aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorandroid-build-team Robot <android-build-team-robot@google.com>2020-04-28 20:25:35 +0000
committerandroid-build-team Robot <android-build-team-robot@google.com>2020-04-28 20:25:35 +0000
commit98af15c140c60757492824f3d5ae34e775ce310a (patch)
tree14d8c016827399c8e4e83829a2a9ff94f972ed11
parent83319a71929d901ff455ca43d777cf9efd192cdc (diff)
parent932fa4323a1a4147fc0df55f120dbce86554ab2c (diff)
downloadvulkan-validation-layers-98af15c140c60757492824f3d5ae34e775ce310a.tar.gz
Change-Id: If67063e4307bc6bd8e16aeaaa411b485132594c5
-rw-r--r--.appveyor.yml18
-rw-r--r--.gn22
-rw-r--r--.travis.yml42
-rw-r--r--BUILD.gn303
-rw-r--r--BUILD.md76
-rw-r--r--CMakeLists.txt60
-rw-r--r--CONTRIBUTING.md16
-rw-r--r--build-android/android-generate.bat73
-rwxr-xr-xbuild-android/android-generate.sh79
-rwxr-xr-xbuild-android/build.py14
-rwxr-xr-xbuild-android/build_all.sh1
-rw-r--r--build-android/cmake/layerlib/CMakeLists.txt42
-rw-r--r--build-android/jni/Android.mk88
-rw-r--r--build-android/jni/Application.mk2
-rw-r--r--build-android/known_good.json10
-rwxr-xr-xbuild-android/test_APK.sh17
-rw-r--r--build-gn/DEPS63
-rwxr-xr-xbuild-gn/commit_id.py64
-rwxr-xr-xbuild-gn/generate_vulkan_layers_json.py126
-rwxr-xr-xbuild-gn/remove_files.py41
-rw-r--r--build-gn/secondary/build_overrides/build.gni18
-rw-r--r--build-gn/secondary/build_overrides/spirv_tools.gni20
-rw-r--r--build-gn/secondary/build_overrides/vulkan_validation_layers.gni23
-rwxr-xr-xbuild-gn/update_deps.sh38
-rw-r--r--docs/gpu_validation.md241
-rw-r--r--docs/khronos_validation_layer.md25
-rw-r--r--external/x64/lib/vulkan-1.libbin48602 -> 0 bytes
-rw-r--r--external/x86/lib/vulkan-1.libbin52334 -> 0 bytes
-rw-r--r--layers/CMakeLists.txt157
-rw-r--r--layers/README.md32
-rw-r--r--layers/VkLayer_khronos_validation.def30
-rw-r--r--layers/best_practices.cpp393
-rw-r--r--layers/best_practices.h90
-rw-r--r--layers/buffer_validation.cpp2676
-rw-r--r--layers/buffer_validation.h10
-rw-r--r--layers/cast_utils.h93
-rw-r--r--layers/core_validation.cpp11447
-rw-r--r--layers/core_validation.h2115
-rw-r--r--layers/core_validation_error_enums.h20
-rw-r--r--layers/core_validation_types.h1120
-rw-r--r--layers/descriptor_sets.cpp1616
-rw-r--r--layers/descriptor_sets.h270
-rw-r--r--layers/drawdispatch.cpp484
-rw-r--r--layers/generated/.clang-format5
-rw-r--r--layers/generated/chassis.cpp9752
-rw-r--r--layers/generated/chassis.h3739
-rw-r--r--layers/generated/layer_chassis_dispatch.cpp6778
-rw-r--r--layers/generated/layer_chassis_dispatch.h1793
-rw-r--r--layers/generated/object_tracker.cpp5145
-rw-r--r--layers/generated/object_tracker.h2315
-rw-r--r--layers/generated/parameter_validation.cpp10392
-rw-r--r--layers/generated/parameter_validation.h1757
-rw-r--r--layers/generated/spirv_tools_commit_id.h29
-rw-r--r--layers/generated/thread_safety.cpp5849
-rw-r--r--layers/generated/thread_safety.h4250
-rw-r--r--layers/generated/vk_dispatch_table_helper.h981
-rw-r--r--layers/generated/vk_enum_string_helper.h5994
-rw-r--r--layers/generated/vk_extension_helper.h969
-rw-r--r--layers/generated/vk_layer_dispatch_table.h646
-rw-r--r--layers/generated/vk_object_types.h767
-rw-r--r--layers/generated/vk_safe_struct.cpp29684
-rw-r--r--layers/generated/vk_safe_struct.h6665
-rw-r--r--layers/generated/vk_typemap_helper.h3473
-rw-r--r--layers/gpu_validation.cpp1218
-rw-r--r--layers/gpu_validation.h105
-rw-r--r--layers/json/VkLayer_khronos_validation.json.in38
-rw-r--r--layers/json/VkLayer_standard_validation.json.in6
-rw-r--r--layers/libVkLayer_core_validation.map10
-rw-r--r--layers/libVkLayer_khronos_validation.map10
-rw-r--r--layers/libVkLayer_object_lifetimes.map10
-rw-r--r--layers/libVkLayer_stateless_validation.map10
-rw-r--r--layers/libVkLayer_thread_safety.map10
-rw-r--r--layers/libVkLayer_unique_objects.map10
-rw-r--r--layers/object_lifetime_validation.h128
-rw-r--r--layers/object_tracker_utils.cpp379
-rw-r--r--layers/parameter_validation_utils.cpp850
-rw-r--r--layers/shader_validation.cpp1462
-rw-r--r--layers/shader_validation.h118
-rw-r--r--layers/sparse_containers.h404
-rw-r--r--layers/stateless_validation.h180
-rw-r--r--layers/vk_format_utils.cpp36
-rw-r--r--layers/vk_format_utils.h5
-rw-r--r--layers/vk_layer_config.cpp16
-rw-r--r--layers/vk_layer_config.h8
-rw-r--r--layers/vk_layer_logging.h382
-rw-r--r--layers/vk_layer_settings.txt44
-rw-r--r--layers/vk_layer_utils.cpp16
-rw-r--r--layers/vk_layer_utils.h169
-rw-r--r--layers/vk_mem_alloc.h16813
-rw-r--r--layers/vk_validation_error_messages.h (renamed from layers/generated/vk_validation_error_messages.h)1554
-rwxr-xr-xscripts/check_commit_message_format.sh19
-rw-r--r--scripts/common_codegen.py51
-rw-r--r--scripts/dispatch_table_helper_generator.py36
-rw-r--r--scripts/external_revision_generator.py48
-rwxr-xr-xscripts/generate_source.py132
-rw-r--r--scripts/helper_file_generator.py507
-rw-r--r--scripts/known_good.json14
-rw-r--r--scripts/layer_chassis_dispatch_generator.py511
-rw-r--r--scripts/layer_chassis_generator.py705
-rw-r--r--scripts/layer_dispatch_table_generator.py24
-rw-r--r--scripts/lvl_genvk.py28
-rw-r--r--scripts/object_tracker_generator.py78
-rw-r--r--scripts/parameter_validation_generator.py112
-rw-r--r--scripts/thread_safety_generator.py249
-rwxr-xr-xscripts/update_deps.py26
-rwxr-xr-xscripts/vk_validation_stats.py151
-rw-r--r--tests/CMakeLists.txt31
-rw-r--r--tests/layer_validation_tests.cpp38108
-rw-r--r--tests/layer_validation_tests.h786
-rw-r--r--tests/layers/CMakeLists.txt42
-rw-r--r--tests/layers/libVkLayer_device_profile_api.map9
-rw-r--r--tests/vklayertests_buffer_image_memory_sampler.cpp7169
-rw-r--r--tests/vklayertests_command.cpp5016
-rw-r--r--tests/vklayertests_descriptor_renderpass_framebuffer.cpp7242
-rw-r--r--tests/vklayertests_imageless_framebuffer.cpp1084
-rw-r--r--tests/vklayertests_others.cpp4938
-rw-r--r--tests/vklayertests_pipeline_shader.cpp5754
-rw-r--r--tests/vkpositivelayertests.cpp8350
-rw-r--r--tests/vkrenderframework.cpp229
-rw-r--r--tests/vkrenderframework.h60
-rw-r--r--tests/vktestbinding.cpp88
-rw-r--r--tests/vktestbinding.h62
-rw-r--r--tests/vktestframework.cpp3
-rw-r--r--tests/vktestframework.h8
-rw-r--r--tests/vktestframeworkandroid.cpp4
-rw-r--r--tests/vktestframeworkandroid.h2
126 files changed, 48873 insertions, 179852 deletions
diff --git a/.appveyor.yml b/.appveyor.yml
index 70390dba0..88148189a 100644
--- a/.appveyor.yml
+++ b/.appveyor.yml
@@ -12,26 +12,16 @@ version: "{build}"
max_jobs: 4
os:
- - Visual Studio 2015
-
-init:
- - git config --global core.autocrlf true
+ - Visual Studio 2013
environment:
PYTHON_PATH: "C:/Python35"
PYTHON_PACKAGE_PATH: "C:/Python35/Scripts"
- CMAKE_URL: "http://cmake.org/files/v3.10/cmake-3.10.2-win64-x64.zip"
branches:
only:
- master
-install:
- - appveyor DownloadFile %CMAKE_URL% -FileName cmake.zip
- - 7z x cmake.zip -oC:\cmake > nul
- - set path=C:\cmake\bin;%path%
- - cmake --version
-
before_build:
- "SET PATH=C:\\Python35;C:\\Python35\\Scripts;%PATH%"
- echo.
@@ -44,8 +34,6 @@ before_build:
- git checkout tags/release-1.8.1
- cd %APPVEYOR_BUILD_FOLDER%
- python scripts/update_deps.py --dir=external --arch=%PLATFORM% --config=%CONFIGURATION%
- - echo Verifying consistency between source file generators and output
- - python scripts/generate_source.py --verify external/Vulkan-Headers/registry
- echo Generating Vulkan-ValidationLayers CMake files for %PLATFORM% %CONFIGURATION%
- mkdir build
- cd build
@@ -74,7 +62,3 @@ build:
parallel: true # enable MSBuild parallel builds
project: build/Vulkan-ValidationLayers.sln # path to Visual Studio solution or project
verbosity: quiet # quiet|minimal|normal|detailed
-
-artifacts:
- - path: build\layers\$(configuration)
- name: Vulkan-ValidationLayers-$(platform)-$(configuration)
diff --git a/.gn b/.gn
deleted file mode 100644
index e190259e7..000000000
--- a/.gn
+++ /dev/null
@@ -1,22 +0,0 @@
-# Copyright (C) 2019 LunarG, Inc.
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# https://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-buildconfig = "//build/config/BUILDCONFIG.gn"
-secondary_source = "//build-gn/secondary/"
-
-default_args = {
- clang_use_chrome_plugins = false
- use_custom_libcxx = false
-}
-
diff --git a/.travis.yml b/.travis.yml
index 9e3cb5874..5013051e2 100644
--- a/.travis.yml
+++ b/.travis.yml
@@ -10,7 +10,6 @@ matrix:
fast_finish: true
allow_failures:
- env: CHECK_COMMIT_FORMAT=ON
- - env: VULKAN_BUILD_TARGET=GN
include:
# Android build.
- os: linux
@@ -28,9 +27,6 @@ matrix:
- os: linux
compiler: clang
env: VULKAN_BUILD_TARGET=LINUX
- # Linux GN debug build.
- - os: linux
- env: VULKAN_BUILD_TARGET=GN
# Check for proper clang formatting in the pull request.
- env: CHECK_FORMAT=ON
# Check for proper commit message formatting for commits in PR
@@ -45,25 +41,13 @@ cache: ccache
before_install:
- set -e
- - CMAKE_VERSION=3.10.2
- - |
- if [[ "${TRAVIS_OS_NAME}" == "linux" ]]; then
- # Upgrade to the desired version of CMake
- CMAKE_URL="https://cmake.org/files/v${CMAKE_VERSION%.*}/cmake-${CMAKE_VERSION}-Linux-x86_64.tar.gz"
- echo CMAKE_URL=${CMAKE_URL}
- mkdir cmake-${CMAKE_VERSION} && travis_retry wget --no-check-certificate -O - ${CMAKE_URL} | tar --strip-components=1 -xz -C cmake-${CMAKE_VERSION}
- export PATH=${PWD}/cmake-${CMAKE_VERSION}/bin:${PATH}
- else
- brew install cmake || brew upgrade cmake
- fi
- cmake --version
- |
- if [[ "$VULKAN_BUILD_TARGET" == "LINUX" ]] || [[ "$VULKAN_BUILD_TARGET" == "GN" ]]; then
+ if [[ "$VULKAN_BUILD_TARGET" == "LINUX" ]]; then
# Install the appropriate Linux packages.
sudo apt-get -qq update
sudo apt-get -y install libxkbcommon-dev libwayland-dev libmirclient-dev libxrandr-dev \
libx11-xcb-dev libxcb-keysyms1 libxcb-keysyms1-dev libxcb-ewmh-dev \
- libxcb-randr0-dev python-pathlib
+ libxcb-randr0-dev
fi
- |
if [[ "$VULKAN_BUILD_TARGET" == "ANDROID" ]]; then
@@ -109,19 +93,12 @@ script:
fi
- |
if [[ "$VULKAN_BUILD_TARGET" == "LINUX" ]]; then
- # Verify consistency between source file generators and output
- echo Verifying consistency between source file generators and output
- python3 ${TRAVIS_BUILD_DIR}/scripts/generate_source.py --verify ${TRAVIS_BUILD_DIR}/external/Vulkan-Headers/registry
- fi
- - |
- if [[ "$VULKAN_BUILD_TARGET" == "LINUX" ]]; then
# Build Vulkan-ValidationLayers
cd ${TRAVIS_BUILD_DIR}
mkdir build
cd build
cmake -C ${TRAVIS_BUILD_DIR}/external/helper.cmake -DCMAKE_BUILD_TYPE=Debug \
- -DCMAKE_INSTALL_PREFIX=${TRAVIS_BUILD_DIR}/build/install -DBUILD_LAYER_SUPPORT_FILES=ON \
- -DUSE_CCACHE=ON ..
+ -DCMAKE_INSTALL_PREFIX=${TRAVIS_BUILD_DIR}/build/install -DUSE_CCACHE=ON ..
cmake --build . --target install -- -j$core_count
fi
- |
@@ -133,8 +110,6 @@ script:
mkdir build
cd build
cmake -DCMAKE_BUILD_TYPE=Debug \
- -DBUILD_VIA=NO -DBUILD_VKTRACE=NO -DBUILD_VLF=NO -DBUILD_TESTS=NO -DBUILD_LAYERMGR=NO \
- -DBUILD_VKTRACEVIEWER=NO -DBUILD_VKTRACE_LAYER=NO -DBUILD_VKTRACE_REPLAY=NO \
-DVULKAN_HEADERS_INSTALL_DIR=${TRAVIS_BUILD_DIR}/external/Vulkan-Headers/build/install \
-DVULKAN_LOADER_INSTALL_DIR=${TRAVIS_BUILD_DIR}/external/Vulkan-Loader/build/install \
-DVULKAN_VALIDATIONLAYERS_INSTALL_DIR=${TRAVIS_BUILD_DIR}/build/install \
@@ -160,6 +135,7 @@ script:
if [[ "$VULKAN_BUILD_TARGET" == "ANDROID" ]]; then
pushd build-android
./update_external_sources_android.sh --abi $ANDROID_ABI --no-build
+ ./android-generate.sh
USE_CCACHE=1 NDK_CCACHE=ccache ndk-build APP_ABI=$ANDROID_ABI -j $core_count
popd
fi
@@ -181,24 +157,16 @@ script:
./scripts/check_commit_message_format.sh
fi
fi
- - |
- if [[ "$VULKAN_BUILD_TARGET" == "GN" ]]; then
- git clone https://chromium.googlesource.com/chromium/tools/depot_tools.git depot_tools
- export PATH=$PATH:$PWD/depot_tools
- ./build-gn/update_deps.sh
- gn gen out/Debug
- ninja -C out/Debug
- fi
- ccache --show-stats
- set +e
notifications:
email:
recipients:
+ - karl@lunarg.com
- cnorthrop@google.com
- tobine@google.com
- chrisforbes@google.com
- shannon@lunarg.com
- - mikes@lunarg.com
on_success: change
on_failure: always
diff --git a/BUILD.gn b/BUILD.gn
deleted file mode 100644
index ff08295cc..000000000
--- a/BUILD.gn
+++ /dev/null
@@ -1,303 +0,0 @@
-# Copyright (C) 2018-2019 The ANGLE Project Authors.
-# Copyright (C) 2019 LunarG, Inc.
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# https://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import("//build_overrides/vulkan_validation_layers.gni")
-
-# Fuchsia has non-upstream changes to the vulkan layers, so we don't want
-# to build it from upstream sources.
-assert(!is_fuchsia)
-
-vulkan_undefine_configs = []
-if (is_win) {
- vulkan_undefine_configs += [
- "//build/config/win:nominmax",
- "//build/config/win:unicode",
- ]
-}
-
-vulkan_gen_dir = "$target_gen_dir/$vulkan_gen_subdir"
-raw_vulkan_gen_dir = rebase_path(vulkan_gen_dir, root_build_dir)
-
-vulkan_data_dir = "$root_out_dir/$vulkan_data_subdir"
-raw_vulkan_data_dir = rebase_path(vulkan_data_dir, root_build_dir)
-
-raw_root_out_dir = rebase_path(root_out_dir, root_build_dir)
-
-# This special action is needed to remove old VVL objects that are now renamed.
-action("vulkan_clean_old_validation_layer_objects") {
- script = "build-gn/remove_files.py"
-
- # inputs is a (random) new file since the vvl roll, used to ensure the cleanup is done only once
- inputs = [
- "layers/gpu_validation.cpp",
- ]
- outputs = [
- "$vulkan_gen_dir/old_vvl_files_are_removed",
- ]
- args = [
- "$raw_vulkan_gen_dir/old_vvl_files_are_removed",
- "$raw_root_out_dir/libVkLayer*",
- "$raw_root_out_dir/VkLayer*",
- "$raw_vulkan_data_dir/VkLayer*.json",
- ]
-}
-
-config("vulkan_internal_config") {
- defines = [
- "VULKAN_NON_CMAKE_BUILD",
- "API_NAME=\"Vulkan\"",
- ]
- if (is_clang || !is_win) {
- cflags = [ "-Wno-unused-function" ]
- }
- if (is_linux) {
- defines += [
- "SYSCONFDIR=\"/etc\"",
- "FALLBACK_CONFIG_DIRS=\"/etc/xdg\"",
- "FALLBACK_DATA_DIRS=\"/usr/local/share:/usr/share\"",
- ]
- }
-}
-
-# The validation layers
-# ---------------------
-
-config("vulkan_layer_config") {
- include_dirs = [
- "layers",
- "layers/generated",
- ]
-}
-
-core_validation_sources = [
- # This file is manually included in the layer
- # "layers/generated/vk_safe_struct.cpp",
- "layers/buffer_validation.cpp",
- "layers/buffer_validation.h",
- "layers/core_validation.cpp",
- "layers/core_validation.h",
- "layers/convert_to_renderpass2.cpp",
- "layers/descriptor_sets.cpp",
- "layers/descriptor_sets.h",
- "layers/drawdispatch.cpp",
- "layers/gpu_validation.cpp",
- "layers/gpu_validation.h",
- "layers/shader_validation.cpp",
- "layers/shader_validation.h",
- "layers/xxhash.c",
- "layers/xxhash.h",
-]
-
-object_lifetimes_sources = [
- "layers/generated/object_tracker.cpp",
- "layers/generated/object_tracker.h",
- "layers/object_tracker_utils.cpp",
-]
-
-stateless_validation_sources = [
- "layers/generated/parameter_validation.cpp",
- "layers/generated/parameter_validation.h",
- "layers/parameter_validation_utils.cpp",
-]
-
-thread_safety_sources = [
- "layers/generated/thread_safety.cpp",
- "layers/generated/thread_safety.h",
-]
-
-unique_objects_sources = []
-
-chassis_sources = [
- "layers/core_validation.h",
- "layers/generated/vk_safe_struct.h",
- "layers/generated/thread_safety.h",
- "layers/generated/chassis.cpp",
- "layers/generated/chassis.h",
- "layers/generated/layer_chassis_dispatch.cpp",
- "layers/generated/layer_chassis_dispatch.h",
- "$vulkan_headers_dir/include/vulkan/vk_layer.h",
- "$vulkan_headers_dir/include/vulkan/vulkan.h",
-]
-
-layers = [
- [
- "core_validation",
- core_validation_sources + chassis_sources + thread_safety_sources,
- [ ":vulkan_core_validation_glslang" ],
- [ "BUILD_CORE_VALIDATION" ],
- ],
- [
- "object_lifetimes",
- object_lifetimes_sources + chassis_sources + thread_safety_sources + core_validation_sources,
- [ ":vulkan_core_validation_glslang" ],
- [ "BUILD_OBJECT_TRACKER" ],
- ],
- [
- "stateless_validation",
- stateless_validation_sources + chassis_sources + core_validation_sources,
- [ ":vulkan_core_validation_glslang" ],
- [ "BUILD_PARAMETER_VALIDATION" ],
- ],
- [
- "thread_safety",
- thread_safety_sources + chassis_sources + core_validation_sources,
- [ ":vulkan_core_validation_glslang" ],
- [ "BUILD_THREAD_SAFETY" ],
- ],
- [
- "unique_objects",
- unique_objects_sources + chassis_sources + core_validation_sources,
- [ ":vulkan_core_validation_glslang" ],
- [ "LAYER_CHASSIS_CAN_WRAP_HANDLES" ],
- ],
- [
- "khronos_validation",
- core_validation_sources + object_lifetimes_sources +
- stateless_validation_sources + thread_safety_sources +
- unique_objects_sources + chassis_sources,
- [ ":vulkan_core_validation_glslang" ],
- [
- "BUILD_KHRONOS_VALIDATION",
- "BUILD_CORE_VALIDATION",
- "BUILD_OBJECT_TRACKER",
- "BUILD_THREAD_SAFETY",
- "BUILD_PARAMETER_VALIDATION",
- "LAYER_CHASSIS_CAN_WRAP_HANDLES",
- ],
- ],
-]
-
-if (!is_android) {
- action("vulkan_gen_json_files") {
- script = "build-gn/generate_vulkan_layers_json.py"
- public_deps = [
- ":vulkan_clean_old_validation_layer_objects",
- ]
- json_names = [
- "VkLayer_core_validation.json",
- "VkLayer_object_lifetimes.json",
- "VkLayer_stateless_validation.json",
- "VkLayer_standard_validation.json",
- "VkLayer_thread_safety.json",
- "VkLayer_unique_objects.json",
- "VkLayer_khronos_validation.json",
- ]
- sources = [
- "$vulkan_headers_dir/include/vulkan/vulkan_core.h",
- "$vulkan_headers_dir/include/vulkan/vk_layer.h",
- ]
- outputs = []
- foreach(json_name, json_names) {
- sources += [ "layers/json/$json_name.in" ]
- outputs += [ "$vulkan_data_dir/$json_name" ]
- }
- args = [
- rebase_path("layers/json", root_build_dir),
- rebase_path(vulkan_data_dir, root_build_dir),
- ] + rebase_path(sources, root_build_dir)
-
- # The layer JSON files are part of the necessary data deps.
- data = outputs
- }
-}
-
-source_set("vulkan_layer_utils") {
- include_dirs = [
- "layers",
- "layers/generated",
- ]
- sources = [
- "layers/vk_format_utils.cpp",
- "layers/vk_format_utils.h",
- "layers/vk_layer_config.cpp",
- "layers/vk_layer_config.h",
- "layers/vk_layer_extension_utils.cpp",
- "layers/vk_layer_extension_utils.h",
- "layers/vk_layer_utils.cpp",
- "layers/vk_layer_utils.h",
- "$vulkan_headers_dir/include/vulkan/vk_layer.h",
- "$vulkan_headers_dir/include/vulkan/vulkan.h",
- "$vulkan_headers_dir/include/vulkan/vk_sdk_platform.h",
- ]
- public_configs = [
- "$vulkan_headers_dir:vulkan_headers_config",
- ":vulkan_internal_config",
- ]
- configs -= [ "//build/config/compiler:chromium_code" ]
- configs += [ "//build/config/compiler:no_chromium_code" ]
- public_deps = []
- configs -= vulkan_undefine_configs
-}
-
-config("vulkan_core_validation_config") {
- include_dirs = [ "$vvl_glslang_dir" ]
-}
-
-source_set("vulkan_core_validation_glslang") {
- public_deps = [
- "${vvl_spirv_tools_dir}:spvtools",
- "${vvl_spirv_tools_dir}:spvtools_opt",
- "${vvl_spirv_tools_dir}:spvtools_val",
- ]
- public_configs = [
- "$vulkan_headers_dir:vulkan_headers_config",
- ":vulkan_core_validation_config",
- ]
-}
-
-config("vulkan_stateless_validation_config") {
- if (is_clang) {
- cflags_cc = [ "-Wno-unused-const-variable" ]
- }
-}
-
-foreach(layer_info, layers) {
- name = layer_info[0]
- shared_library("VkLayer_$name") {
- configs -= [ "//build/config/compiler:chromium_code" ]
- configs += [ "//build/config/compiler:no_chromium_code" ]
- configs -= vulkan_undefine_configs
- public_configs = [ ":vulkan_layer_config" ]
- deps = [
- ":vulkan_layer_utils",
- ]
- if (layer_info[2] != "") {
- deps += layer_info[2]
- }
- sources = layer_info[1]
- if (is_win) {
- sources += [ "layers/VkLayer_$name.def" ]
- }
- if (is_linux || is_android) {
- ldflags = [ "-Wl,-Bsymbolic,--exclude-libs,ALL" ]
- }
- if (is_android) {
- libs = [
- "log",
- "nativewindow",
- ]
- configs -= [ "//build/config/android:hide_all_but_jni_onload" ]
- }
- defines = layer_info[3]
- }
-}
-
-group("vulkan_validation_layers") {
- data_deps = []
- foreach(layer_info, layers) {
- name = layer_info[0]
- data_deps += [ ":VkLayer_$name" ]
- }
-}
diff --git a/BUILD.md b/BUILD.md
index a974d7ad9..b35143368 100644
--- a/BUILD.md
+++ b/BUILD.md
@@ -134,41 +134,22 @@ directories and place them in any location.
### Building Dependent Repositories with Known-Good Revisions
-There is a Python utility script, `scripts/update_deps.py`, that you can use to
-gather and build the dependent repositories mentioned above. This script uses
-information stored in the `scripts/known_good.json` file to check out dependent
-repository revisions that are known to be compatible with the revision of this
-repository that you currently have checked out. As such, this script is useful
-as a quick-start tool for common use cases and default configurations.
+There is a Python utility script, `scripts/update_deps.py`, that you can use
+to gather and build the dependent repositories mentioned above. This program
+also uses information stored in the `scripts/known-good.json` file to checkout
+dependent repository revisions that are known to be compatible with the
+revision of this repository that you currently have checked out.
-For all platforms, start with:
+Here is a usage example for this repository:
git clone git@github.com:KhronosGroup/Vulkan-ValidationLayers.git
cd Vulkan-ValidationLayers
mkdir build
cd build
-
-For 64-bit Linux and MacOS, continue with:
-
../scripts/update_deps.py
cmake -C helper.cmake ..
cmake --build .
-For 64-bit Windows, continue with:
-
- ..\scripts\update_deps.py --arch x64
- cmake -A x64 -C helper.cmake ..
- cmake --build .
-
-For 32-bit Windows, continue with:
-
- ..\scripts\update_deps.py --arch Win32
- cmake -A Win32 -C helper.cmake ..
- cmake --build .
-
-Please see the more detailed build information later in this file if you have
-specific requirements for configuring and building these components.
-
#### Notes
- You may need to adjust some of the CMake options based on your platform. See
@@ -197,20 +178,6 @@ specific requirements for configuring and building these components.
- Please use `update_deps.py --help` to list additional options and read the
internal documentation in `update_deps.py` for further information.
-### Generated source code
-
-This repository contains generated source code in the `layers/generated`
-directory which is not intended to be modified directly. Instead, changes should be
-made to the corresponding generator in the `scripts` directory. The source files can
-then be regenerated using `scripts/generate_source.py`:
-
- python3 scripts/generate_source.py PATH_TO_VULKAN_HEADERS_REGISTRY_DIR
-
-A helper CMake target `VulkanVL_generated_source` is also provided to simplify
-the invocation of `scripts/generate_source.py` from the build directory:
-
- cmake --build . --target VulkanVL_generated_source
-
### Build Options
When generating native platform build files through CMake, several options can
@@ -246,11 +213,12 @@ generate the native platform files.
- Any Personal Computer version supported by Microsoft
- Microsoft [Visual Studio](https://www.visualstudio.com/)
- Versions
+ - [2013 (update 4)](https://www.visualstudio.com/vs/older-downloads/)
- [2015](https://www.visualstudio.com/vs/older-downloads/)
- [2017](https://www.visualstudio.com/vs/downloads/)
- The Community Edition of each of the above versions is sufficient, as
well as any more capable edition.
-- [CMake 3.10.2](https://cmake.org/files/v3.10/cmake-3.10.2-win64-x64.zip) is recommended.
+- [CMake](http://www.cmake.org/download/) (Version 2.8.11 or better)
- Use the installer option to add CMake to the system PATH
- Git Client Support
- [Git for Windows](http://git-scm.com/download/win) is a popular solution
@@ -408,6 +376,7 @@ include:
| Build Platform | 64-bit Generator | 32-bit Generator |
|------------------------------|-------------------------------|-------------------------|
+| Microsoft Visual Studio 2013 | "Visual Studio 12 2013 Win64" | "Visual Studio 12 2013" |
| Microsoft Visual Studio 2015 | "Visual Studio 14 2015 Win64" | "Visual Studio 14 2015" |
| Microsoft Visual Studio 2017 | "Visual Studio 15 2017 Win64" | "Visual Studio 15 2017" |
@@ -428,16 +397,14 @@ layer validation tests use the "googletest" testing framework.)
### Linux Build Requirements
This repository has been built and tested on the two most recent Ubuntu LTS
-versions. Currently, the oldest supported version is Ubuntu 16.04, meaning
-that the minimum officially supported C++11 compiler version is GCC 5.4.0,
+versions. Currently, the oldest supported version is Ubuntu 14.04, meaning
+that the minimum supported compiler versions are GCC 4.8.2 and Clang 3.4,
although earlier versions may work. It should be straightforward to adapt this
repository to other Linux distributions.
-[CMake 3.10.2](https://cmake.org/files/v3.10/cmake-3.10.2-Linux-x86_64.tar.gz) is recommended.
-
#### Required Package List
- sudo apt-get install git build-essential libx11-xcb-dev \
+ sudo apt-get install git cmake build-essential libx11-xcb-dev \
libxkbcommon-dev libwayland-dev libxrandr-dev \
libegl1-mesa-dev
@@ -618,14 +585,11 @@ following.
### Android Build Requirements
-Note that the minimum supported Android SDK API Level is 26, revision
-level 3.
-
- Install [Android Studio 2.3](https://developer.android.com/studio/index.html)
or later.
- From the "Welcome to Android Studio" splash screen, add the following
components using Configure > SDK Manager:
- - SDK Platforms > Android 8.0.0 and newer
+ - SDK Platforms > Android 6.0 and newer
- SDK Tools > Android SDK Build-Tools
- SDK Tools > Android SDK Platform-Tools
- SDK Tools > Android SDK Tools
@@ -642,7 +606,7 @@ On Linux:
export ANDROID_NDK_HOME=$HOME/Android/sdk/ndk-bundle
export PATH=$ANDROID_SDK_HOME:$PATH
export PATH=$ANDROID_NDK_HOME:$PATH
- export PATH=$ANDROID_SDK_HOME/build-tools/26.0.3:$PATH
+ export PATH=$ANDROID_SDK_HOME/build-tools/23.0.3:$PATH
On Windows:
@@ -655,7 +619,7 @@ On OSX:
export ANDROID_SDK_HOME=$HOME/Library/Android/sdk
export ANDROID_NDK_HOME=$HOME/Library/Android/sdk/ndk-bundle
export PATH=$ANDROID_NDK_PATH:$PATH
- export PATH=$ANDROID_SDK_HOME/build-tools/26.0.3:$PATH
+ export PATH=$ANDROID_SDK_HOME/build-tools/23.0.3:$PATH
Note: If `jarsigner` is missing from your platform, you can find it in the
Android Studio install or in your Java installation. If you do not have Java,
@@ -679,7 +643,7 @@ Setup Homebrew and components
- Add packages with the following:
- brew install python
+ brew install cmake python
### Android Build
@@ -710,15 +674,17 @@ Follow the setup steps for Linux or OSX above, then from your terminal:
cd build-android
./update_external_sources_android.sh --no-build
+ ./android-generate.sh
ndk-build -j4
#### Windows
Follow the setup steps for Windows above, then from Developer Command Prompt
-for VS2015:
+for VS2013:
cd build-android
update_external_sources_android.bat
+ android-generate.bat
ndk-build
### Android Tests and Demos
@@ -748,8 +714,6 @@ validation tests:
Tested on OSX version 10.12.6
-[CMake 3.10.2](https://cmake.org/files/v3.10/cmake-3.10.2-Darwin-x86_64.tar.gz) is recommended.
-
Setup Homebrew and components
- Follow instructions on [brew.sh](http://brew.sh) to get Homebrew installed.
@@ -762,7 +726,7 @@ Setup Homebrew and components
- Add packages with the following (may need refinement)
- brew install python python3 git
+ brew install cmake python python3 git
### Clone the Repository
diff --git a/CMakeLists.txt b/CMakeLists.txt
index b1ce624c9..61a266c08 100644
--- a/CMakeLists.txt
+++ b/CMakeLists.txt
@@ -18,15 +18,13 @@
# CMake project initialization ---------------------------------------------------------------------------------------------------
# This section contains pre-project() initialization, and ends with the project() command.
-cmake_minimum_required(VERSION 3.10.2)
+cmake_minimum_required(VERSION 3.4)
# Apple: Must be set before enable_language() or project() as it may influence configuration of the toolchain and flags.
set(CMAKE_OSX_DEPLOYMENT_TARGET "10.12" CACHE STRING "Minimum OS X deployment version")
project(Vulkan-ValidationLayers)
-enable_testing()
-
# User-interface declarations ----------------------------------------------------------------------------------------------------
# This section contains variables that affect development GUIs (e.g. CMake GUI and IDEs), such as option(), folders, and variables
# with the CACHE property.
@@ -38,7 +36,7 @@ add_definitions(-DAPI_NAME="${API_NAME}")
set(CMAKE_MODULE_PATH ${CMAKE_MODULE_PATH} "${CMAKE_CURRENT_SOURCE_DIR}/cmake")
-find_package(PythonInterp 3 QUIET)
+find_package(PythonInterp 3 REQUIRED)
if (TARGET Vulkan::Headers)
message(STATUS "Using Vulkan headers from Vulkan::Headers target")
@@ -73,6 +71,8 @@ endif()
if(APPLE)
# CMake versions 3 or later need CMAKE_MACOSX_RPATH defined. This avoids the CMP0042 policy message.
set(CMAKE_MACOSX_RPATH 1)
+ # The "install" target for MacOS fixes up bundles in place.
+ set(CMAKE_INSTALL_PREFIX ${CMAKE_BINARY_DIR})
endif()
# Enable IDE GUI folders
@@ -291,6 +291,40 @@ endif()
set(SCRIPTS_DIR "${PROJECT_SOURCE_DIR}/scripts")
+# Generate a source file from vk.xml
+macro(GenerateFromVkXml dependency output)
+ add_custom_command(OUTPUT ${output}
+ COMMAND ${PYTHON_EXECUTABLE} ${SCRIPTS_DIR}/lvl_genvk.py -registry ${VulkanRegistry_DIR}/vk.xml -scripts
+ ${VulkanRegistry_DIR} ${output}
+ DEPENDS ${VulkanRegistry_DIR}/vk.xml
+ ${VulkanRegistry_DIR}/generator.py
+ ${SCRIPTS_DIR}/${dependency}
+ ${SCRIPTS_DIR}/lvl_genvk.py
+ ${VulkanRegistry_DIR}/reg.py)
+endmacro()
+
+# Add rules to generate XML-derived source files.
+GenerateFromVkXml(layer_dispatch_table_generator.py vk_layer_dispatch_table.h)
+GenerateFromVkXml(dispatch_table_helper_generator.py vk_dispatch_table_helper.h)
+GenerateFromVkXml(helper_file_generator.py vk_safe_struct.h)
+GenerateFromVkXml(helper_file_generator.py vk_safe_struct.cpp)
+GenerateFromVkXml(helper_file_generator.py vk_enum_string_helper.h)
+GenerateFromVkXml(helper_file_generator.py vk_object_types.h)
+GenerateFromVkXml(helper_file_generator.py vk_extension_helper.h)
+GenerateFromVkXml(helper_file_generator.py vk_typemap_helper.h)
+
+# This target causes the source files to be generated.
+add_custom_target(VulkanVL_generate_helper_files
+ DEPENDS vk_enum_string_helper.h
+ vk_safe_struct.h
+ vk_safe_struct.cpp
+ vk_object_types.h
+ vk_layer_dispatch_table.h
+ vk_dispatch_table_helper.h
+ vk_extension_helper.h
+ vk_typemap_helper.h)
+set_target_properties(VulkanVL_generate_helper_files PROPERTIES FOLDER ${LAYERS_HELPER_FOLDER})
+
# VkLayer_utils library ----------------------------------------------------------------------------------------------------------
# For Windows, we use a static lib because the Windows loader has a fairly restrictive loader search path that can't be easily
# modified to point it to the same directory that contains the layers. TODO: This should not be a library -- in future, include
@@ -308,13 +342,12 @@ if(WIN32)
endif()
install(TARGETS VkLayer_utils DESTINATION ${CMAKE_INSTALL_LIBDIR})
set_target_properties(VkLayer_utils PROPERTIES LINKER_LANGUAGE CXX)
+add_dependencies(VkLayer_utils VulkanVL_generate_helper_files)
target_include_directories(VkLayer_utils
- PUBLIC ${CMAKE_CURRENT_SOURCE_DIR}/layers
- ${CMAKE_CURRENT_SOURCE_DIR}/layers/generated
+ PUBLIC ${VulkanHeaders_INCLUDE_DIR}
${CMAKE_CURRENT_BINARY_DIR}
${CMAKE_CURRENT_BINARY_DIR}/layers
- ${PROJECT_BINARY_DIR}
- ${VulkanHeaders_INCLUDE_DIR})
+ ${PROJECT_BINARY_DIR})
# uninstall target ---------------------------------------------------------------------------------------------------------------
if(NOT TARGET uninstall)
@@ -338,17 +371,6 @@ else()
message(FATAL_ERROR "Unable to fetch version from vulkan_core.h")
endif()
-# Optional codegen target --------------------------------------------------------------------------------------------------------
-if(PYTHONINTERP_FOUND)
- add_custom_target(VulkanVL_generated_source
- COMMAND ${PYTHON_EXECUTABLE} ${PROJECT_SOURCE_DIR}/scripts/generate_source.py
- ${VulkanRegistry_DIR} --incremental
- WORKING_DIRECTORY ${PROJECT_SOURCE_DIR}/layers/generated
- )
-else()
- message("WARNING: VulkanVL_generated_source target requires python 3")
-endif()
-
# Add subprojects ----------------------------------------------------------------------------------------------------------------
add_subdirectory(external)
diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md
index bca881c7e..ba8a0161c 100644
--- a/CONTRIBUTING.md
+++ b/CONTRIBUTING.md
@@ -77,7 +77,7 @@ a good reason is "This violates the style guide, but it improves type safety."
> $ git commit
* **Commit Messages**
- * Limit the subject line to 64 characters -- this allows the information to display correctly in git/GitHub logs
+ * Limit the subject line to 50 characters -- this allows the information to display correctly in git/Github logs
* Begin subject line with a one-word component description followed by a colon (e.g. build, docs, layers, tests, etc.)
* Separate subject from body with a blank line
* Wrap the body at 72 characters
@@ -92,7 +92,7 @@ that to be accepted into the repository, the pull request must [pass all tests](
-- the automatic Github Travis and AppVeyor continuous integration features will assist in enforcing this requirement.
#### **Testing Your Changes**
-* Run the existing tests in the repository before and after each of your commits to check for any regressions.
+* Run the existing tests in the repository before and after each if your commits to check for any regressions.
There are some tests that appear in all repositories.
These tests can be found in the following folders inside of your target build directory:
@@ -124,22 +124,16 @@ if the report is received. Otherwise, the test should indicate "failure".
This new test should be added to the validation layer test program in the `tests` directory and contributed
at the same time as the new validation check itself. There are many existing validation tests in this directory that can be
used as a starting point.
-* **Validation Checks:** Validation checks are carried out by the Khronos Validation layer. The CoreChecks validation object
-contains checks that require significant amounts of application state to carry out. In contrast, the stateless validation object contains
+* **Validation Checks:** The majority of validation checks are carried out by the Core Validation layer. In general, this layer
+contains checks that require some amount of application state to carry out. In contrast, the parameter validation layer contains
checks that require (mostly) no state at all. Please inquire if you are unsure of the location for your contribution. The other
-validation objects (thread_safety, object lifetimes) are more special-purpose and are mostly code-generated from the specification.
+layers (threading, object_tracker, unique_objects) are more special-purpose and are mostly code-generated from the specification.
* **Validation Error/Warning Messages:** Strive to give specific information describing the particulars of the failure, including
output all of the applicable Vulkan Objects and related values. Also, ensure that when messages can give suggestions about _how_ to
fix the problem, they should do so to better assist the user.
* **Validation Statistics:** The `vk_validation_stats.py` script (in the scripts directory) inspects the layer and test source files
and reports a variety of statistics on validation completeness and correctness. Before submitting a change you should run this
script with the consistency check (`-c`) argument to ensure that your changes have not introduced any inconsistencies in the code.
-* **Generated Source Code:** The `layers/generated` directory contains source code that is created by several
-generator scripts in the `scripts` directory. All changes to these scripts _must_ be submitted with the
-corresponding generated output to keep the repository self-consistent. This requirement is enforced by both
-Travis CI and AppVeyor test configurations. Regenerate source files after modifying any of the generator
-scripts and before building and testing your changes. More details can be found in
-[BUILD.md](https://github.com/KhronosGroup/Vulkan-ValidationLayers/blob/master/BUILD.md#generated-source-code).
#### Coding Conventions for [CMake](http://cmake.org) files
diff --git a/build-android/android-generate.bat b/build-android/android-generate.bat
new file mode 100644
index 000000000..106dd455e
--- /dev/null
+++ b/build-android/android-generate.bat
@@ -0,0 +1,73 @@
+@echo off
+REM # Copyright 2015 The Android Open Source Project
+REM # Copyright (C) 2015 Valve Corporation
+REM
+REM # Licensed under the Apache License, Version 2.0 (the "License");
+REM # you may not use this file except in compliance with the License.
+REM # You may obtain a copy of the License at
+REM
+REM # http://www.apache.org/licenses/LICENSE-2.0
+REM
+REM # Unless required by applicable law or agreed to in writing, software
+REM # distributed under the License is distributed on an "AS IS" BASIS,
+REM # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+REM # See the License for the specific language governing permissions and
+REM # limitations under the License.
+
+if exist generated (
+ rmdir /s /q generated
+)
+mkdir generated\include generated\common
+
+set HEADERS_REGISTRY_PATH=%CD%/third_party/Vulkan-Headers/registry
+
+cd generated/include
+py -3 ../../../scripts/lvl_genvk.py -registry %HEADERS_REGISTRY_PATH%/vk.xml -scripts %HEADERS_REGISTRY_PATH% vk_safe_struct.h
+py -3 ../../../scripts/lvl_genvk.py -registry %HEADERS_REGISTRY_PATH%/vk.xml -scripts %HEADERS_REGISTRY_PATH% vk_safe_struct.cpp
+py -3 ../../../scripts/lvl_genvk.py -registry %HEADERS_REGISTRY_PATH%/vk.xml -scripts %HEADERS_REGISTRY_PATH% vk_enum_string_helper.h
+py -3 ../../../scripts/lvl_genvk.py -registry %HEADERS_REGISTRY_PATH%/vk.xml -scripts %HEADERS_REGISTRY_PATH% vk_object_types.h
+py -3 ../../../scripts/lvl_genvk.py -registry %HEADERS_REGISTRY_PATH%/vk.xml -scripts %HEADERS_REGISTRY_PATH% vk_dispatch_table_helper.h
+py -3 ../../../scripts/lvl_genvk.py -registry %HEADERS_REGISTRY_PATH%/vk.xml -scripts %HEADERS_REGISTRY_PATH% thread_safety.cpp
+py -3 ../../../scripts/lvl_genvk.py -registry %HEADERS_REGISTRY_PATH%/vk.xml -scripts %HEADERS_REGISTRY_PATH% thread_safety.h
+py -3 ../../../scripts/lvl_genvk.py -registry %HEADERS_REGISTRY_PATH%/vk.xml -scripts %HEADERS_REGISTRY_PATH% parameter_validation.cpp
+py -3 ../../../scripts/lvl_genvk.py -registry %HEADERS_REGISTRY_PATH%/vk.xml -scripts %HEADERS_REGISTRY_PATH% parameter_validation.h
+py -3 ../../../scripts/lvl_genvk.py -registry %HEADERS_REGISTRY_PATH%/vk.xml -scripts %HEADERS_REGISTRY_PATH% vk_layer_dispatch_table.h
+py -3 ../../../scripts/lvl_genvk.py -registry %HEADERS_REGISTRY_PATH%/vk.xml -scripts %HEADERS_REGISTRY_PATH% vk_extension_helper.h
+py -3 ../../../scripts/lvl_genvk.py -registry %HEADERS_REGISTRY_PATH%/vk.xml -scripts %HEADERS_REGISTRY_PATH% vk_typemap_helper.h
+
+py -3 ../../../scripts/lvl_genvk.py -registry %HEADERS_REGISTRY_PATH%/vk.xml -scripts %HEADERS_REGISTRY_PATH% object_tracker.cpp
+py -3 ../../../scripts/lvl_genvk.py -registry %HEADERS_REGISTRY_PATH%/vk.xml -scripts %HEADERS_REGISTRY_PATH% object_tracker.h
+py -3 ../../../scripts/lvl_genvk.py -registry %HEADERS_REGISTRY_PATH%/vk.xml -scripts %HEADERS_REGISTRY_PATH% layer_chassis_dispatch.cpp
+py -3 ../../../scripts/lvl_genvk.py -registry %HEADERS_REGISTRY_PATH%/vk.xml -scripts %HEADERS_REGISTRY_PATH% layer_chassis_dispatch.h
+py -3 ../../../scripts/lvl_genvk.py -registry %HEADERS_REGISTRY_PATH%/vk.xml -scripts %HEADERS_REGISTRY_PATH% chassis.cpp
+py -3 ../../../scripts/lvl_genvk.py -registry %HEADERS_REGISTRY_PATH%/vk.xml -scripts %HEADERS_REGISTRY_PATH% chassis.h
+
+set SPIRV_TOOLS_PATH=../../third_party/shaderc/third_party/spirv-tools
+set SPIRV_TOOLS_UUID=spirv_tools_uuid.txt
+
+if exist %SPIRV_TOOLS_PATH% (
+
+ echo Found spirv-tools, using git_dir for external_revision_generator.py
+ py -3 ../../../scripts/external_revision_generator.py ^
+ --git_dir %SPIRV_TOOLS_PATH% ^
+ -s SPIRV_TOOLS_COMMIT_ID ^
+ -o spirv_tools_commit_id.h
+
+) else (
+
+ echo No spirv-tools git_dir found, generating UUID for external_revision_generator.py
+
+ REM Ensure uuidgen is installed, this should error if not found
+ uuidgen.exe -v
+
+ uuidgen.exe > %SPIRV_TOOLS_UUID%
+ type %SPIRV_TOOLS_UUID%
+ py -3 ../../../scripts/external_revision_generator.py ^
+ --rev_file %SPIRV_TOOLS_UUID% ^
+ -s SPIRV_TOOLS_COMMIT_ID ^
+ -o spirv_tools_commit_id.h
+
+)
+
+cd ../..
+
diff --git a/build-android/android-generate.sh b/build-android/android-generate.sh
new file mode 100755
index 000000000..f6de5e397
--- /dev/null
+++ b/build-android/android-generate.sh
@@ -0,0 +1,79 @@
+#!/bin/bash
+
+# Copyright 2015 The Android Open Source Project
+# Copyright (C) 2015 Valve Corporation
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+# http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+dir=$(cd -P -- "$(dirname -- "$0")" && pwd -P)
+cd $dir
+
+rm -rf generated
+mkdir -p generated/include generated/common
+HEADERS_REGISTRY_PATH="$1"
+echo HEADERS_REGISTRY_PATH defined as $HEADERS_REGISTRY_PATH
+
+( cd generated/include; python3 ../../../scripts/lvl_genvk.py -registry $HEADERS_REGISTRY_PATH/vk.xml -scripts $HEADERS_REGISTRY_PATH vk_safe_struct.h )
+( cd generated/include; python3 ../../../scripts/lvl_genvk.py -registry $HEADERS_REGISTRY_PATH/vk.xml -scripts $HEADERS_REGISTRY_PATH vk_safe_struct.cpp )
+( cd generated/include; python3 ../../../scripts/lvl_genvk.py -registry $HEADERS_REGISTRY_PATH/vk.xml -scripts $HEADERS_REGISTRY_PATH vk_enum_string_helper.h )
+( cd generated/include; python3 ../../../scripts/lvl_genvk.py -registry $HEADERS_REGISTRY_PATH/vk.xml -scripts $HEADERS_REGISTRY_PATH vk_object_types.h )
+( cd generated/include; python3 ../../../scripts/lvl_genvk.py -registry $HEADERS_REGISTRY_PATH/vk.xml -scripts $HEADERS_REGISTRY_PATH vk_dispatch_table_helper.h )
+( cd generated/include; python3 ../../../scripts/lvl_genvk.py -registry $HEADERS_REGISTRY_PATH/vk.xml -scripts $HEADERS_REGISTRY_PATH thread_safety.cpp )
+( cd generated/include; python3 ../../../scripts/lvl_genvk.py -registry $HEADERS_REGISTRY_PATH/vk.xml -scripts $HEADERS_REGISTRY_PATH thread_safety.h )
+( cd generated/include; python3 ../../../scripts/lvl_genvk.py -registry $HEADERS_REGISTRY_PATH/vk.xml -scripts $HEADERS_REGISTRY_PATH parameter_validation.cpp )
+( cd generated/include; python3 ../../../scripts/lvl_genvk.py -registry $HEADERS_REGISTRY_PATH/vk.xml -scripts $HEADERS_REGISTRY_PATH parameter_validation.h )
+( cd generated/include; python3 ../../../scripts/lvl_genvk.py -registry $HEADERS_REGISTRY_PATH/vk.xml -scripts $HEADERS_REGISTRY_PATH vk_layer_dispatch_table.h )
+( cd generated/include; python3 ../../../scripts/lvl_genvk.py -registry $HEADERS_REGISTRY_PATH/vk.xml -scripts $HEADERS_REGISTRY_PATH vk_extension_helper.h )
+( cd generated/include; python3 ../../../scripts/lvl_genvk.py -registry $HEADERS_REGISTRY_PATH/vk.xml -scripts $HEADERS_REGISTRY_PATH vk_typemap_helper.h )
+( cd generated/include; python3 ../../../scripts/lvl_genvk.py -registry $HEADERS_REGISTRY_PATH/vk.xml -scripts $HEADERS_REGISTRY_PATH object_tracker.cpp )
+( cd generated/include; python3 ../../../scripts/lvl_genvk.py -registry $HEADERS_REGISTRY_PATH/vk.xml -scripts $HEADERS_REGISTRY_PATH object_tracker.h )
+( cd generated/include; python3 ../../../scripts/lvl_genvk.py -registry $HEADERS_REGISTRY_PATH/vk.xml -scripts $HEADERS_REGISTRY_PATH layer_chassis_dispatch.cpp )
+( cd generated/include; python3 ../../../scripts/lvl_genvk.py -registry $HEADERS_REGISTRY_PATH/vk.xml -scripts $HEADERS_REGISTRY_PATH layer_chassis_dispatch.h )
+( cd generated/include; python3 ../../../scripts/lvl_genvk.py -registry $HEADERS_REGISTRY_PATH/vk.xml -scripts $HEADERS_REGISTRY_PATH chassis.cpp )
+( cd generated/include; python3 ../../../scripts/lvl_genvk.py -registry $HEADERS_REGISTRY_PATH/vk.xml -scripts $HEADERS_REGISTRY_PATH chassis.h )
+
+SPIRV_TOOLS_PATH=../../third_party/shaderc/third_party/spirv-tools
+SPIRV_TOOLS_UUID=spirv_tools_uuid.txt
+
+set -e
+
+( cd generated/include;
+
+ if [[ -d $SPIRV_TOOLS_PATH ]]; then
+
+ echo Found spirv-tools, using git_dir for external_revision_generator.py
+
+ python3 ../../../scripts/external_revision_generator.py \
+ --git_dir $SPIRV_TOOLS_PATH \
+ -s SPIRV_TOOLS_COMMIT_ID \
+ -o spirv_tools_commit_id.h
+
+ else
+
+ echo No spirv-tools git_dir found, generating UUID for external_revision_generator.py
+
+ # Ensure uuidgen is installed, this should error if not found
+ type uuidgen
+
+ uuidgen > $SPIRV_TOOLS_UUID;
+ cat $SPIRV_TOOLS_UUID;
+ python3 ../../../scripts/external_revision_generator.py \
+ --rev_file $SPIRV_TOOLS_UUID \
+ -s SPIRV_TOOLS_COMMIT_ID \
+ -o spirv_tools_commit_id.h
+
+ fi
+)
+
+
+exit 0
diff --git a/build-android/build.py b/build-android/build.py
index 7b4d3d32a..4cee83077 100755
--- a/build-android/build.py
+++ b/build-android/build.py
@@ -65,6 +65,8 @@ THIS_DIR = os.path.realpath(os.path.dirname(__file__))
ALL_ARCHITECTURES = (
'arm',
'arm64',
+ 'mips',
+ 'mips64',
'x86',
'x86_64',
)
@@ -74,6 +76,8 @@ ALL_ARCHITECTURES = (
ALL_ABIS = (
'armeabi-v7a',
'arm64-v8a',
+ 'mips',
+ 'mips64',
'x86',
'x86_64',
)
@@ -85,6 +89,8 @@ def arch_to_abis(arch):
return {
'arm': ['armeabi-v7a'],
'arm64': ['arm64-v8a'],
+ 'mips': ['mips'],
+ 'mips64': ['mips64'],
'x86': ['x86'],
'x86_64': ['x86_64'],
}[arch]
@@ -264,6 +270,14 @@ def main():
print('Constructing Vulkan validation layer source...')
build_cmd = [
+ 'bash', build_dir + '/vulkan/src/build-android/android-generate.sh',
+ build_dir + '/vulkan/src/registry'
+ ]
+ print('Generating generated layers...')
+ subprocess.check_call(build_cmd)
+ print('Generation finished')
+
+ build_cmd = [
'bash', ndk_build, '-C', build_dir + '/vulkan/src/build-android',
jobs_arg(),
'APP_ABI=' + ' '.join(abis),
diff --git a/build-android/build_all.sh b/build-android/build_all.sh
index 36d3fbd65..e280e8614 100755
--- a/build-android/build_all.sh
+++ b/build-android/build_all.sh
@@ -60,6 +60,7 @@ function create_APK() {
# build layers
#
./update_external_sources_android.sh --no-build
+./android-generate.sh
ndk-build -j $cores
#
diff --git a/build-android/cmake/layerlib/CMakeLists.txt b/build-android/cmake/layerlib/CMakeLists.txt
index 2e59ecd5e..5c183a40c 100644
--- a/build-android/cmake/layerlib/CMakeLists.txt
+++ b/build-android/cmake/layerlib/CMakeLists.txt
@@ -57,47 +57,11 @@ add_library(SPIRV-Tools-prebuilt STATIC IMPORTED)
set_target_properties(SPIRV-Tools-prebuilt PROPERTIES IMPORTED_LOCATION
${SPIRV_LIB})
+# build core_validation layers which including shader validation
set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -DVK_USE_PLATFORM_ANDROID_KHR \
-fvisibility=hidden")
-add_library(VkLayer_khronos_validation SHARED
- ${SRC_DIR}/layers/core_validation.cpp
- ${SRC_DIR}/layers/drawdispatch.cpp
- ${SRC_DIR}/layers/convert_to_renderpass2.cpp
- ${SRC_DIR}/layers/descriptor_sets.cpp
- ${SRC_DIR}/layers/buffer_validation.cpp
- ${SRC_DIR}/layers/shader_validation.cpp
- ${SRC_DIR}/layers/gpu_validation.cpp
- ${COMMON_DIR}/include/layer_chassis_dispatch.cpp
- ${COMMON_DIR}/include/chassis.cpp
- ${COMMON_DIR}/include/parameter_validation.cpp
- ${SRC_DIR}/layers/parameter_validation_utils.cpp
- ${COMMON_DIR}/include/object_tracker.cpp
- ${SRC_DIR}/layers/object_tracker_utils.cpp
- ${COMMON_DIR}/include/thread_safety.cpp
- ${SRC_DIR}/layers/xxhash.c)
-target_compile_definitions(VkLayer_khronos_validation PUBLIC "BUILD_KHRONOS_VALIDATION;BUILD_CORE_VALIDATION;BUILD_PARAMETER_VALIDATION;BUILD_OBJECT_TRACKER;BUILD_THREAD_SAFETY;LAYER_CHASSIS_CAN_WRAP_HANDLES")
-target_include_directories(VkLayer_khronos_validation PRIVATE
- ${SRC_DIR}/include
- ${SRC_DIR}/layers
- ${COMMON_DIR}/include
- ${SRC_DIR}/loader
- ${EXTERNAL_DIR}/glslang
- ${EXTERNAL_DIR}/spirv-tools/include)
-target_link_libraries(VkLayer_khronos_validation PRIVATE
- log layer_utils SPIRV-Tools-prebuilt)
-if (NOT BUILD_IN_NDK)
- set(SPIRV_OPT_LIB
- "${SRC_DIR}/build-android/third_party/shaderc/android_test/obj/local/${ANDROID_ABI}/libSPIRV-Tools-opt.a")
- add_library(SPIRV-Tools-opt-prebuilt STATIC IMPORTED)
- set_target_properties(SPIRV-Tools-opt-prebuilt PROPERTIES IMPORTED_LOCATION
- ${SPIRV_OPT_LIB})
- target_link_libraries(VkLayer_khronos_validation PRIVATE
- SPIRV-Tools-opt-prebuilt)
-endif()
-
add_library(VkLayer_core_validation SHARED
${SRC_DIR}/layers/core_validation.cpp
- ${SRC_DIR}/layers/drawdispatch.cpp
${SRC_DIR}/layers/convert_to_renderpass2.cpp
${SRC_DIR}/layers/descriptor_sets.cpp
${SRC_DIR}/layers/buffer_validation.cpp
@@ -155,7 +119,7 @@ target_link_libraries(VkLayer_object_tracker PRIVATE log layer_utils)
add_library(VkLayer_threading SHARED
${COMMON_DIR}/include/thread_safety.cpp
${COMMON_DIR}/include/layer_chassis_dispatch.cpp
- ${COMMON_DIR}/include/chassis.cpp)
+ ${COMMON_DIR}/include/chassis.cpp
target_compile_definitions(VkLayer_threading PUBLIC "BUILD_THREAD_SAFETY")
target_include_directories(VkLayer_threading PRIVATE
${SRC_DIR}/include
@@ -165,7 +129,7 @@ target_include_directories(VkLayer_threading PRIVATE
target_link_libraries(VkLayer_threading PRIVATE log layer_utils)
add_library(VkLayer_unique_objects SHARED
- ${COMMON_DIR}/include/layer_chassis_dispatch.cpp
+ ${COMMON_DIR}/include/layer_chassis_dispatch.cpp)
${COMMON_DIR}/include/chassis.cpp)
target_compile_definitions(VkLayer_object_tracker PUBLIC "LAYER_CHASSIS_CAN_WRAP_HANDLES")
target_include_directories(VkLayer_unique_objects PRIVATE
diff --git a/build-android/jni/Android.mk b/build-android/jni/Android.mk
index 6f93c7ff8..215c1ce48 100644
--- a/build-android/jni/Android.mk
+++ b/build-android/jni/Android.mk
@@ -27,56 +27,26 @@ LOCAL_SRC_FILES += $(SRC_DIR)/layers/vk_layer_extension_utils.cpp
LOCAL_SRC_FILES += $(SRC_DIR)/layers/vk_layer_utils.cpp
LOCAL_SRC_FILES += $(SRC_DIR)/layers/vk_format_utils.cpp
LOCAL_C_INCLUDES += $(VULKAN_INCLUDE) \
- $(LOCAL_PATH)/$(SRC_DIR)/layers/generated \
+ $(LOCAL_PATH)/$(LAYER_DIR)/include \
$(LOCAL_PATH)/$(SRC_DIR)/layers
LOCAL_CPPFLAGS += -std=c++11 -Wall -Werror -Wno-unused-function -Wno-unused-const-variable
LOCAL_CPPFLAGS += -DVK_USE_PLATFORM_ANDROID_KHR -DVK_PROTOTYPES -fvisibility=hidden
include $(BUILD_STATIC_LIBRARY)
include $(CLEAR_VARS)
-LOCAL_MODULE := VkLayer_khronos_validation
-LOCAL_SRC_FILES += $(SRC_DIR)/layers/core_validation.cpp
-LOCAL_SRC_FILES += $(SRC_DIR)/layers/drawdispatch.cpp
-LOCAL_SRC_FILES += $(SRC_DIR)/layers/descriptor_sets.cpp
-LOCAL_SRC_FILES += $(SRC_DIR)/layers/buffer_validation.cpp
-LOCAL_SRC_FILES += $(SRC_DIR)/layers/shader_validation.cpp
-LOCAL_SRC_FILES += $(SRC_DIR)/layers/gpu_validation.cpp
-LOCAL_SRC_FILES += $(SRC_DIR)/layers/convert_to_renderpass2.cpp
-LOCAL_SRC_FILES += $(SRC_DIR)/layers/generated/layer_chassis_dispatch.cpp
-LOCAL_SRC_FILES += $(SRC_DIR)/layers/generated/chassis.cpp
-LOCAL_SRC_FILES += $(SRC_DIR)/layers/xxhash.c
-LOCAL_SRC_FILES += $(SRC_DIR)/layers/generated/parameter_validation.cpp
-LOCAL_SRC_FILES += $(SRC_DIR)/layers/parameter_validation_utils.cpp
-LOCAL_SRC_FILES += $(SRC_DIR)/layers/generated/object_tracker.cpp
-LOCAL_SRC_FILES += $(SRC_DIR)/layers/object_tracker_utils.cpp
-LOCAL_SRC_FILES += $(SRC_DIR)/layers/generated/thread_safety.cpp
-LOCAL_C_INCLUDES += $(VULKAN_INCLUDE) \
- $(LOCAL_PATH)/$(SRC_DIR)/layers \
- $(LOCAL_PATH)/$(SRC_DIR)/layers/generated
-LOCAL_STATIC_LIBRARIES += layer_utils glslang SPIRV-Tools SPIRV-Tools-opt
-LOCAL_CPPFLAGS += -std=c++11 -Wall -Werror -Wno-unused-function -Wno-unused-const-variable
-LOCAL_CPPFLAGS += -DVK_USE_PLATFORM_ANDROID_KHR -DVK_PROTOTYPES -fvisibility=hidden -DBUILD_KHRONOS_VALIDATION -DBUILD_CORE_VALIDATION -DBUILD_PARAMETER_VALIDATION -DBUILD_OBJECT_TRACKER -DBUILD_THREAD_SAFETY -DLAYER_CHASSIS_CAN_WRAP_HANDLES
-LOCAL_LDLIBS := -llog -landroid
-LOCAL_LDFLAGS += -Wl,-Bsymbolic
-LOCAL_LDFLAGS += -Wl,--exclude-libs,ALL
-include $(BUILD_SHARED_LIBRARY)
-
-
-include $(CLEAR_VARS)
LOCAL_MODULE := VkLayer_core_validation
LOCAL_SRC_FILES += $(SRC_DIR)/layers/core_validation.cpp
-LOCAL_SRC_FILES += $(SRC_DIR)/layers/drawdispatch.cpp
LOCAL_SRC_FILES += $(SRC_DIR)/layers/descriptor_sets.cpp
LOCAL_SRC_FILES += $(SRC_DIR)/layers/buffer_validation.cpp
LOCAL_SRC_FILES += $(SRC_DIR)/layers/shader_validation.cpp
LOCAL_SRC_FILES += $(SRC_DIR)/layers/gpu_validation.cpp
LOCAL_SRC_FILES += $(SRC_DIR)/layers/convert_to_renderpass2.cpp
-LOCAL_SRC_FILES += $(SRC_DIR)/layers/generated/layer_chassis_dispatch.cpp
-LOCAL_SRC_FILES += $(SRC_DIR)/layers/generated/chassis.cpp
+LOCAL_SRC_FILES += $(LAYER_DIR)/include/layer_chassis_dispatch.cpp
+LOCAL_SRC_FILES += $(LAYER_DIR)/include/chassis.cpp
LOCAL_SRC_FILES += $(SRC_DIR)/layers/xxhash.c
LOCAL_C_INCLUDES += $(VULKAN_INCLUDE) \
$(LOCAL_PATH)/$(SRC_DIR)/layers \
- $(LOCAL_PATH)/$(SRC_DIR)/layers/generated
+ $(LOCAL_PATH)/$(LAYER_DIR)/include
LOCAL_STATIC_LIBRARIES += layer_utils glslang SPIRV-Tools SPIRV-Tools-opt
LOCAL_CPPFLAGS += -std=c++11 -Wall -Werror -Wno-unused-function -Wno-unused-const-variable
LOCAL_CPPFLAGS += -DVK_USE_PLATFORM_ANDROID_KHR -DVK_PROTOTYPES -fvisibility=hidden -DBUILD_CORE_VALIDATION
@@ -87,12 +57,12 @@ include $(BUILD_SHARED_LIBRARY)
include $(CLEAR_VARS)
LOCAL_MODULE := VkLayer_parameter_validation
-LOCAL_SRC_FILES += $(SRC_DIR)/layers/generated/parameter_validation.cpp
-LOCAL_SRC_FILES += $(SRC_DIR)/layers/generated/layer_chassis_dispatch.cpp
-LOCAL_SRC_FILES += $(SRC_DIR)/layers/generated/chassis.cpp
+LOCAL_SRC_FILES += $(LAYER_DIR)/include/parameter_validation.cpp
+LOCAL_SRC_FILES += $(LAYER_DIR)/include/layer_chassis_dispatch.cpp
+LOCAL_SRC_FILES += $(LAYER_DIR)/include/chassis.cpp
LOCAL_SRC_FILES += $(SRC_DIR)/layers/parameter_validation_utils.cpp
LOCAL_C_INCLUDES += $(VULKAN_INCLUDE) \
- $(LOCAL_PATH)/$(SRC_DIR)/layers/generated \
+ $(LOCAL_PATH)/$(LAYER_DIR)/include \
$(LOCAL_PATH)/$(SRC_DIR)/layers
LOCAL_STATIC_LIBRARIES += layer_utils
LOCAL_CPPFLAGS += -std=c++11 -DVK_PROTOTYPES -Wall -Werror -Wno-unused-function -Wno-unused-const-variable
@@ -104,13 +74,13 @@ include $(BUILD_SHARED_LIBRARY)
include $(CLEAR_VARS)
LOCAL_MODULE := VkLayer_object_tracker
-LOCAL_SRC_FILES += $(SRC_DIR)/layers/generated/object_tracker.cpp
-LOCAL_SRC_FILES += $(SRC_DIR)/layers/generated/layer_chassis_dispatch.cpp
-LOCAL_SRC_FILES += $(SRC_DIR)/layers/generated/chassis.cpp
+LOCAL_SRC_FILES += $(LAYER_DIR)/include/object_tracker.cpp
+LOCAL_SRC_FILES += $(LAYER_DIR)/include/layer_chassis_dispatch.cpp
+LOCAL_SRC_FILES += $(LAYER_DIR)/include/chassis.cpp
LOCAL_SRC_FILES += $(SRC_DIR)/layers/object_tracker_utils.cpp
LOCAL_C_INCLUDES += $(VULKAN_INCLUDE) \
$(LOCAL_PATH)/$(SRC_DIR)/layers \
- $(LOCAL_PATH)/$(SRC_DIR)/layers/generated
+ $(LOCAL_PATH)/$(LAYER_DIR)/include
LOCAL_STATIC_LIBRARIES += layer_utils
LOCAL_CPPFLAGS += -std=c++11 -DVK_PROTOTYPES -Wall -Werror -Wno-unused-function -Wno-unused-const-variable
LOCAL_CPPFLAGS += -DVK_USE_PLATFORM_ANDROID_KHR -fvisibility=hidden -DBUILD_OBJECT_TRACKER
@@ -121,12 +91,12 @@ include $(BUILD_SHARED_LIBRARY)
include $(CLEAR_VARS)
LOCAL_MODULE := VkLayer_threading
-LOCAL_SRC_FILES += $(SRC_DIR)/layers/generated/thread_safety.cpp
-LOCAL_SRC_FILES += $(SRC_DIR)/layers/generated/layer_chassis_dispatch.cpp
-LOCAL_SRC_FILES += $(SRC_DIR)/layers/generated/chassis.cpp
+LOCAL_SRC_FILES += $(LAYER_DIR)/include/thread_safety.cpp
+LOCAL_SRC_FILES += $(LAYER_DIR)/include/layer_chassis_dispatch.cpp
+LOCAL_SRC_FILES += $(LAYER_DIR)/include/chassis.cpp
LOCAL_C_INCLUDES += $(VULKAN_INCLUDE) \
$(LOCAL_PATH)/$(SRC_DIR)/layers \
- $(LOCAL_PATH)/$(SRC_DIR)/layers/generated
+ $(LOCAL_PATH)/$(LAYER_DIR)/include
LOCAL_STATIC_LIBRARIES += layer_utils
LOCAL_CPPFLAGS += -std=c++11 -DVK_PROTOTYPES -Wall -Werror -Wno-unused-function -Wno-unused-const-variable
LOCAL_CPPFLAGS += -DVK_USE_PLATFORM_ANDROID_KHR -fvisibility=hidden -DBUILD_THREAD_SAFETY
@@ -137,11 +107,11 @@ include $(BUILD_SHARED_LIBRARY)
include $(CLEAR_VARS)
LOCAL_MODULE := VkLayer_unique_objects
-LOCAL_SRC_FILES += $(SRC_DIR)/layers/generated/layer_chassis_dispatch.cpp
-LOCAL_SRC_FILES += $(SRC_DIR)/layers/generated/chassis.cpp
+LOCAL_SRC_FILES += $(LAYER_DIR)/include/layer_chassis_dispatch.cpp
+LOCAL_SRC_FILES += $(LAYER_DIR)/include/chassis.cpp
LOCAL_C_INCLUDES += $(VULKAN_INCLUDE) \
$(LOCAL_PATH)/$(SRC_DIR)/layers \
- $(LOCAL_PATH)/$(SRC_DIR)/layers/generated
+ $(LOCAL_PATH)/$(LAYER_DIR)/include
LOCAL_STATIC_LIBRARIES += layer_utils
LOCAL_CPPFLAGS += -std=c++11 -Wall -Werror -Wno-unused-function -Wno-unused-const-variable
LOCAL_CPPFLAGS += -DVK_USE_PLATFORM_ANDROID_KHR -DVK_PROTOTYPES -fvisibility=hidden -DLAYER_CHASSIS_CAN_WRAP_HANDLES
@@ -153,20 +123,14 @@ include $(BUILD_SHARED_LIBRARY)
include $(CLEAR_VARS)
LOCAL_MODULE := VkLayerValidationTests
LOCAL_SRC_FILES += $(SRC_DIR)/tests/layer_validation_tests.cpp \
- $(SRC_DIR)/tests/vklayertests_pipeline_shader.cpp \
- $(SRC_DIR)/tests/vklayertests_buffer_image_memory_sampler.cpp \
- $(SRC_DIR)/tests/vklayertests_others.cpp \
- $(SRC_DIR)/tests/vklayertests_descriptor_renderpass_framebuffer.cpp \
- $(SRC_DIR)/tests/vklayertests_command.cpp \
- $(SRC_DIR)/tests/vkpositivelayertests.cpp \
$(SRC_DIR)/tests/vktestbinding.cpp \
$(SRC_DIR)/tests/vktestframeworkandroid.cpp \
$(SRC_DIR)/tests/vkrenderframework.cpp \
$(SRC_DIR)/layers/convert_to_renderpass2.cpp \
- $(SRC_DIR)/layers/generated/vk_safe_struct.cpp \
+ $(LAYER_DIR)/include/vk_safe_struct.cpp \
$(THIRD_PARTY)/Vulkan-Tools/common/vulkan_wrapper.cpp
LOCAL_C_INCLUDES += $(VULKAN_INCLUDE) \
- $(LOCAL_PATH)/$(SRC_DIR)/layers/generated \
+ $(LOCAL_PATH)/$(LAYER_DIR)/include \
$(LOCAL_PATH)/$(SRC_DIR)/layers \
$(LOCAL_PATH)/$(SRC_DIR)/libs \
$(LOCAL_PATH)/$(THIRD_PARTY)/Vulkan-Tools/common
@@ -183,20 +147,14 @@ include $(BUILD_EXECUTABLE)
include $(CLEAR_VARS)
LOCAL_MODULE := VulkanLayerValidationTests
LOCAL_SRC_FILES += $(SRC_DIR)/tests/layer_validation_tests.cpp \
- $(SRC_DIR)/tests/vklayertests_pipeline_shader.cpp \
- $(SRC_DIR)/tests/vklayertests_buffer_image_memory_sampler.cpp \
- $(SRC_DIR)/tests/vklayertests_others.cpp \
- $(SRC_DIR)/tests/vklayertests_descriptor_renderpass_framebuffer.cpp \
- $(SRC_DIR)/tests/vklayertests_command.cpp \
- $(SRC_DIR)/tests/vkpositivelayertests.cpp \
$(SRC_DIR)/tests/vktestbinding.cpp \
$(SRC_DIR)/tests/vktestframeworkandroid.cpp \
$(SRC_DIR)/tests/vkrenderframework.cpp \
$(SRC_DIR)/layers/convert_to_renderpass2.cpp \
- $(SRC_DIR)/layers/generated/vk_safe_struct.cpp \
+ $(LAYER_DIR)/include/vk_safe_struct.cpp \
$(THIRD_PARTY)/Vulkan-Tools/common/vulkan_wrapper.cpp
LOCAL_C_INCLUDES += $(VULKAN_INCLUDE) \
- $(LOCAL_PATH)/$(SRC_DIR)/layers/generated \
+ $(LOCAL_PATH)/$(LAYER_DIR)/include \
$(LOCAL_PATH)/$(SRC_DIR)/layers \
$(LOCAL_PATH)/$(SRC_DIR)/libs \
$(LOCAL_PATH)/$(THIRD_PARTY)/Vulkan-Tools/common
diff --git a/build-android/jni/Application.mk b/build-android/jni/Application.mk
index fffccc2f2..66128c065 100644
--- a/build-android/jni/Application.mk
+++ b/build-android/jni/Application.mk
@@ -17,6 +17,6 @@ APP_ABI := armeabi-v7a arm64-v8a x86 x86_64
# APP_ABI := arm64-v8a # just build for pixel2 (don't check in)
APP_PLATFORM := android-26
APP_STL := c++_static
-APP_MODULES := VkLayer_khronos_validation VkLayer_core_validation VkLayer_parameter_validation VkLayer_object_tracker VkLayer_threading VkLayer_unique_objects
+APP_MODULES := VkLayer_core_validation VkLayer_parameter_validation VkLayer_object_tracker VkLayer_threading VkLayer_unique_objects
NDK_TOOLCHAIN_VERSION := clang
NDK_MODULE_PATH := .
diff --git a/build-android/known_good.json b/build-android/known_good.json
index dac58e97f..f5c53bcf6 100644
--- a/build-android/known_good.json
+++ b/build-android/known_good.json
@@ -10,31 +10,31 @@
"name" : "glslang",
"url" : "https://github.com/KhronosGroup/glslang.git",
"sub_dir" : "shaderc/third_party/glslang",
- "commit" : "74426f7570fdd23a3bcf7ac0987347197f44c92e"
+ "commit" : "9983f99e87ab0b6608b236ea59bcf873f90e1435"
},
{
"name" : "Vulkan-Headers",
"url" : "https://github.com/KhronosGroup/Vulkan-Headers.git",
"sub_dir" : "Vulkan-Headers",
- "commit" : "v1.1.121"
+ "commit" : "v1.1.102"
},
{
"name" : "Vulkan-Tools",
"url" : "https://github.com/KhronosGroup/Vulkan-Tools.git",
"sub_dir" : "Vulkan-Tools",
- "commit" : "e36c760bbde1c56b5b2a934347ff0fb9dce4d793"
+ "commit" : "v1.1.102"
},
{
"name" : "SPIRV-Tools",
"url" : "https://github.com/KhronosGroup/SPIRV-Tools.git",
"sub_dir" : "shaderc/third_party/spirv-tools",
- "commit" : "1fedf72e500b7cf72098a3f800c8ef4b9d9dc84f"
+ "commit" : "0f4bf0720a9cd49d7375ae1296c874133df5ea34"
},
{
"name" : "SPIRV-Headers",
"url" : "https://github.com/KhronosGroup/SPIRV-Headers.git",
"sub_dir" : "shaderc/third_party/spirv-tools/external/spirv-headers",
- "commit" : "dcce859e34cf0c23625ec75ac44df750aa2f4d70"
+ "commit" : "8bea0a266ac9b718aa0818d9e3a47c0b77c2cb23"
}
]
}
diff --git a/build-android/test_APK.sh b/build-android/test_APK.sh
index b51dafbeb..f1822abbf 100755
--- a/build-android/test_APK.sh
+++ b/build-android/test_APK.sh
@@ -31,7 +31,6 @@ function printUsage {
echo " -p|--platform <platform> (optional)"
echo " -f|--filter <gtest filter list> (optional)"
echo " -s|--serial <target device serial number> (optional)"
- echo " -a|--abi <target abi> (optional)"
echo
echo "i.e. ${0##*/} -p <platform> -f <test filter> -s <serial number>"
exit 1
@@ -61,10 +60,6 @@ do
serial="$2"
shift 2
;;
- -a|--abi)
- abi="$2"
- shift 2
- ;;
-*)
# unknown option
echo Unknown option: $1
@@ -103,7 +98,6 @@ fi
if [[ $platform ]]; then echo platform = "${platform}"; fi
if [[ $filter ]]; then echo filter = "${filter}"; fi
if [[ $serial ]]; then echo serial = "${serial}"; fi
-if [[ $abi ]]; then echo abi = "${abi}"; fi
set -e
@@ -165,12 +159,7 @@ echo
echo Installing ./bin/VulkanLayerValidationTests.apk...
# Install the current build
-if [[ -z $abi ]]
-then
- adb $serialFlag install -r bin/VulkanLayerValidationTests.apk
-else
- adb $serialFlag install --abi $abi -r bin/VulkanLayerValidationTests.apk
-fi
+adb $serialFlag install -r bin/VulkanLayerValidationTests.apk
echo
echo Launching tests...
@@ -251,9 +240,7 @@ adb $serialFlag shell am force-stop com.example.VulkanLayerValidationTests
echo
echo Fetching test output and filtered logcat text...
-# Avoid characters that are illegal in Windows filenames, so these
-# files can be archived to a Windows host system for later reference
-today=$(date +%Y%m%d-%H%M%S)
+today=$(date +%Y-%m-%d.%H:%M:%S)
outFile="VulkanLayerValidationTests.$platform.$today.out.txt"
errFile="VulkanLayerValidationTests.$platform.$today.err.txt"
logFile="VulkanLayerValidationTests.$platform.$today.logcat.txt"
diff --git a/build-gn/DEPS b/build-gn/DEPS
deleted file mode 100644
index 5b8213142..000000000
--- a/build-gn/DEPS
+++ /dev/null
@@ -1,63 +0,0 @@
-vars = {
- 'chromium_git': 'https://chromium.googlesource.com',
- 'build_with_chromium': False,
-}
-
-deps = {
-
- './build': {
- 'url': '{chromium_git}/chromium/src/build.git@a660b0b9174e3a808f620222017566e8d1b2669b',
- 'condition': 'not build_with_chromium',
- },
-
- './buildtools': {
- 'url': '{chromium_git}/chromium/src/buildtools.git@459baaf66bee809f6eb288e0215cf524f4d2429a',
- 'condition': 'not build_with_chromium',
- },
-
- './testing': {
- 'url': '{chromium_git}/chromium/src/testing@083d633e752e7a57cbe62a468a06e51e28c49ee9',
- 'condition': 'not build_with_chromium',
- },
-
- './tools/clang': {
- 'url': '{chromium_git}/chromium/src/tools/clang.git@3114fbc11f9644c54dd0a4cdbfa867bac50ff983',
- 'condition': 'not build_with_chromium',
- },
-
-}
-
-hooks = [
- # Pull clang-format binaries using checked-in hashes.
- {
- 'name': 'clang_format_linux',
- 'pattern': '.',
- 'condition': 'host_os == "linux" and not build_with_chromium',
- 'action': [ 'download_from_google_storage',
- '--no_resume',
- '--platform=linux*',
- '--no_auth',
- '--bucket', 'chromium-clang-format',
- '-s', './buildtools/linux64/clang-format.sha1',
- ],
- },
- {
- 'name': 'sysroot_x64',
- 'pattern': '.',
- 'condition': 'checkout_linux and (checkout_x64 and not build_with_chromium)',
- 'action': ['python', './build/linux/sysroot_scripts/install-sysroot.py',
- '--arch=x64'],
- },
- {
- # Note: On Win, this should run after win_toolchain, as it may use it.
- 'name': 'clang',
- 'pattern': '.',
- 'action': ['python', './tools/clang/scripts/update.py'],
- 'condition': 'not build_with_chromium',
- },
-]
-
-recursedeps = [
- # buildtools provides clang_format.
- './buildtools',
-]
diff --git a/build-gn/commit_id.py b/build-gn/commit_id.py
deleted file mode 100755
index 78e978e80..000000000
--- a/build-gn/commit_id.py
+++ /dev/null
@@ -1,64 +0,0 @@
-#!/usr/bin/env python
-
-# Copyright (C) 2018 The ANGLE Project Authors.
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# https://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-# Generate commit.h with git commit hash.
-#
-
-import subprocess as sp
-import sys
-import os
-
-usage = """\
-Usage: commit_id.py check <angle_dir> - check if git is present
- commit_id.py gen <angle_dir> <file_to_write> - generate commit.h"""
-
-def grab_output(command, cwd):
- return sp.Popen(command, stdout=sp.PIPE, shell=True, cwd=cwd).communicate()[0].strip()
-
-if len(sys.argv) < 3:
- sys.exit(usage)
-
-operation = sys.argv[1]
-cwd = sys.argv[2]
-
-if operation == 'check':
- index_path = os.path.join(cwd, '.git', 'index')
- if os.path.exists(index_path):
- print("1")
- else:
- print("0")
- sys.exit(0)
-
-if len(sys.argv) < 4 or operation != 'gen':
- sys.exit(usage)
-
-output_file = sys.argv[3]
-commit_id_size = 12
-
-try:
- commit_id = grab_output('git rev-parse --short=%d HEAD' % commit_id_size, cwd)
- commit_date = grab_output('git show -s --format=%ci HEAD', cwd)
-except:
- commit_id = 'invalid-hash'
- commit_date = 'invalid-date'
-
-hfile = open(output_file, 'w')
-
-hfile.write('#define ANGLE_COMMIT_HASH "%s"\n' % commit_id)
-hfile.write('#define ANGLE_COMMIT_HASH_SIZE %d\n' % commit_id_size)
-hfile.write('#define ANGLE_COMMIT_DATE "%s"\n' % commit_date)
-
-hfile.close()
diff --git a/build-gn/generate_vulkan_layers_json.py b/build-gn/generate_vulkan_layers_json.py
deleted file mode 100755
index 2999cd815..000000000
--- a/build-gn/generate_vulkan_layers_json.py
+++ /dev/null
@@ -1,126 +0,0 @@
-#!/usr/bin/env python
-
-# Copyright (C) 2016 The ANGLE Project Authors.
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# https://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-"""Generate copies of the Vulkan layers JSON files, with no paths, forcing
-Vulkan to use the default search path to look for layers."""
-
-from __future__ import print_function
-
-import argparse
-import glob
-import json
-import os
-import platform
-import sys
-
-
-def glob_slash(dirname):
- """Like regular glob but replaces \ with / in returned paths."""
- return [s.replace('\\', '/') for s in glob.glob(dirname)]
-
-
-def main():
- parser = argparse.ArgumentParser(description=__doc__)
- parser.add_argument('--icd', action='store_true')
- parser.add_argument('source_dir')
- parser.add_argument('target_dir')
- parser.add_argument('version_header', help='path to vulkan_core.h')
- parser.add_argument('json_files', nargs='*')
- args = parser.parse_args()
-
- source_dir = args.source_dir
- target_dir = args.target_dir
-
- json_files = [j for j in args.json_files if j.endswith('.json')]
- json_in_files = [j for j in args.json_files if j.endswith('.json.in')]
-
- data_key = 'ICD' if args.icd else 'layer'
-
- if not os.path.isdir(source_dir):
- print(source_dir + ' is not a directory.', file=sys.stderr)
- return 1
-
- if not os.path.exists(target_dir):
- os.makedirs(target_dir)
-
- # Copy the *.json files from source dir to target dir
- if (set(glob_slash(os.path.join(source_dir, '*.json'))) != set(json_files)):
- print(glob.glob(os.path.join(source_dir, '*.json')))
- print('.json list in gn file is out-of-date', file=sys.stderr)
- return 1
-
- for json_fname in json_files:
- if not json_fname.endswith('.json'):
- continue
- with open(json_fname) as infile:
- data = json.load(infile)
-
- # Update the path.
- if not data_key in data:
- raise Exception(
- "Could not find '%s' key in %s" % (data_key, json_fname))
-
- # The standard validation layer has no library path.
- if 'library_path' in data[data_key]:
- prev_name = os.path.basename(data[data_key]['library_path'])
- data[data_key]['library_path'] = prev_name
-
- target_fname = os.path.join(target_dir, os.path.basename(json_fname))
- with open(target_fname, 'wb') as outfile:
- json.dump(data, outfile)
-
- # Get the Vulkan version from the vulkan_core.h file
- vk_header_filename = args.version_header
- vk_version = None
- with open(vk_header_filename) as vk_header_file:
- for line in vk_header_file:
- if line.startswith('#define VK_HEADER_VERSION'):
- vk_version = line.split()[-1]
- break
- if not vk_version:
- print('failed to extract vk_version', file=sys.stderr)
- return 1
-
- # Set json file prefix and suffix for generating files, default to Linux.
- relative_path_prefix = '../lib'
- file_type_suffix = '.so'
- if platform.system() == 'Windows':
- relative_path_prefix = r'..\\' # json-escaped, hence two backslashes.
- file_type_suffix = '.dll'
-
- # For each *.json.in template files in source dir generate actual json file
- # in target dir
- if (set(glob_slash(os.path.join(source_dir, '*.json.in'))) !=
- set(json_in_files)):
- print('.json.in list in gn file is out-of-date', file=sys.stderr)
- return 1
- for json_in_name in json_in_files:
- if not json_in_name.endswith('.json.in'):
- continue
- json_in_fname = os.path.basename(json_in_name)
- layer_name = json_in_fname[:-len('.json.in')]
- layer_lib_name = layer_name + file_type_suffix
- json_out_fname = os.path.join(target_dir, json_in_fname[:-len('.in')])
- with open(json_out_fname,'w') as json_out_file, \
- open(json_in_name) as infile:
- for line in infile:
- line = line.replace('@RELATIVE_LAYER_BINARY@',
- relative_path_prefix + layer_lib_name)
- line = line.replace('@VK_VERSION@', '1.1.' + vk_version)
- json_out_file.write(line)
-
-if __name__ == '__main__':
- sys.exit(main())
diff --git a/build-gn/remove_files.py b/build-gn/remove_files.py
deleted file mode 100755
index 7095913af..000000000
--- a/build-gn/remove_files.py
+++ /dev/null
@@ -1,41 +0,0 @@
-#!/usr/bin/python2
-
-# Copyright (C) 2019 The ANGLE Project Authors.
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# https://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-# remove_files.py:
-# This special action is used to cleanup old files from the build directory.
-# Otherwise ANGLE will pick up the old file(s), causing build or runtime errors.
-#
-
-import glob
-import os
-import sys
-
-if len(sys.argv) < 3:
- print("Usage: " + sys.argv[0] + " <stamp_file> <remove_patterns>")
-
-stamp_file = sys.argv[1]
-
-for i in range(2, len(sys.argv)):
- remove_pattern = sys.argv[i]
- remove_files = glob.glob(remove_pattern)
- for f in remove_files:
- if os.path.isfile(f):
- os.remove(f)
-
-# touch a dummy file to keep a timestamp
-with open(stamp_file, "w") as f:
- f.write("blah")
- f.close()
diff --git a/build-gn/secondary/build_overrides/build.gni b/build-gn/secondary/build_overrides/build.gni
deleted file mode 100644
index c6c11fa9b..000000000
--- a/build-gn/secondary/build_overrides/build.gni
+++ /dev/null
@@ -1,18 +0,0 @@
-# Copyright (c) 2019 LunarG, Inc.
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# https://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-build_with_chromium = false
-ignore_elf32_limitations = true
-linux_use_bundled_binutils_override = false
-use_system_xcode = true
diff --git a/build-gn/secondary/build_overrides/spirv_tools.gni b/build-gn/secondary/build_overrides/spirv_tools.gni
deleted file mode 100644
index cd58574f6..000000000
--- a/build-gn/secondary/build_overrides/spirv_tools.gni
+++ /dev/null
@@ -1,20 +0,0 @@
-# Copyright (c) 2019 LunarG, Inc.
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# https://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-# We are building inside Vulkan-ValidationLayers
-spirv_tools_standalone = false
-
-# Paths to SPIRV-Tools dependencies
-spirv_tools_googletest_dir = "//external/googletest"
-spirv_tools_spirv_headers_dir = "//external/glslang/External/spirv-tools/external/spirv-headers"
diff --git a/build-gn/secondary/build_overrides/vulkan_validation_layers.gni b/build-gn/secondary/build_overrides/vulkan_validation_layers.gni
deleted file mode 100644
index 54e94fff5..000000000
--- a/build-gn/secondary/build_overrides/vulkan_validation_layers.gni
+++ /dev/null
@@ -1,23 +0,0 @@
-# Copyright (c) 2019 LunarG, Inc.
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# https://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-# Paths to validation layer dependencies
-vulkan_headers_dir = "//external/Vulkan-Headers"
-vvl_spirv_tools_dir = "//external/glslang/External/spirv-tools"
-vvl_glslang_dir = "//external/glslang/"
-
-# Subdirectories for generated files
-vulkan_data_subdir = ""
-vulkan_gen_subdir = ""
-
diff --git a/build-gn/update_deps.sh b/build-gn/update_deps.sh
deleted file mode 100755
index f9792f7d4..000000000
--- a/build-gn/update_deps.sh
+++ /dev/null
@@ -1,38 +0,0 @@
-#!/bin/sh
-
-# Copyright (c) 2019 LunarG, Inc.
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# https://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-# Execute at repo root
-cd "$(dirname $0)/.."
-
-# Use update_deps.py to update source dependencies from /scripts/known_good.json
-scripts/update_deps.py --dir="external" --no-build
-(cd external/glslang; ./update_glslang_sources.py)
-
-# Use gclient to update toolchain dependencies from /build-gn/DEPS (from chromium)
-cat << EOF >> .gclient
-solutions = [
- { "name" : ".",
- "url" : "https://github.com/KhronosGroup/Vulkan-ValidationLayers",
- "deps_file" : "build-gn/DEPS",
- "managed" : False,
- "custom_deps" : {
- },
- "custom_vars": {},
- },
-]
-EOF
-gclient sync
-
diff --git a/docs/gpu_validation.md b/docs/gpu_validation.md
index 99520d9cf..ac1d1d2d3 100644
--- a/docs/gpu_validation.md
+++ b/docs/gpu_validation.md
@@ -12,9 +12,8 @@
[3]: https://i.creativecommons.org/l/by-nd/4.0/88x31.png "Creative Commons License"
[4]: https://creativecommons.org/licenses/by-nd/4.0/
-GPU-Assisted validation is implemented in the SPIR-V Tools optimizer and the `VK_LAYER_KHRONOS_validation layer (or, in the
-soon-to-be-deprecated `VK_LAYER_LUNARG_core_validation` layer). This document covers the design of the layer portion of the
-implementation.
+GPU-Assisted validation is implemented in the SPIR-V Tools optimizer and the `VK_LAYER_LUNARG_core_validation` layer.
+This document covers the design of the layer portion of the implementation.
## Basic Operation
@@ -26,9 +25,6 @@ The layer instruments the shaders by passing the shader's SPIR-V bytecode to the
instructs the optimizer to perform an instrumentation pass to add the additional instructions to perform the run-time checking.
The layer then passes the resulting modified SPIR-V bytecode to the driver as part of the process of creating a ShaderModule.
-The layer also allocates a buffer that describes the length of all descriptor arrays and the write state of each element of each array.
-It only does this if the VK_EXT_descriptor_indexing extension is enabled.
-
As the shader is executed, the instrumented shader code performs the run-time checks.
If a check detects an error condition, the instrumentation code writes an error record into the GPU's device memory.
This record is small and is on the order of a dozen 32-bit words.
@@ -47,8 +43,8 @@ also provides the line of shader source code that provoked the error as part of
The initial release (Jan 2019) of GPU-Assisted Validation includes checking for out-of-bounds descriptor array indexing
for image/texel descriptor types.
-The second release (Apr 2019) adds validation for out-of-bounds descriptor array indexing and use of unwritten descriptors when the
-VK_EXT_descriptor_indexing extension is enabled. Also added (June 2019) was validation for buffer descriptors.
+Future releases are planned to add checking for other hazards such as proper population of descriptors when using the
+`descriptorBindingPartiallyBound` feature of the `VK_EXT_descriptor_indexing` extension.
### Out-of-Bounds(OOB) Descriptor Array Indexing
@@ -75,20 +71,6 @@ ERROR : VALIDATION - Message Id Number: 0 | Message Id Name: UNASSIGNED-Image de
/home/user/src/Vulkan-ValidationLayers/external/Vulkan-Tools/cube/cube.frag at line 45.
45: uFragColor = light * texture(tex[tex_ind], texcoord.xy);
```
-The VK_EXT_descriptor_indexing extension allows a shader to declare a descriptor array without specifying its size
-```glsl
-layout(set = 0, binding = 1) uniform sampler2D tex[];
-```
-In this case, the layer needs to tell the optimization code how big the descriptor array is so the code can determine what is out of
-bounds and what is not.
-
-The extension also allows descriptor set bindings to be partially bound, meaning that as long as the shader doesn't use certain
-array elements, those elements are not required to have been written.
-The instrumentation code needs to know which elements of a descriptor array have been written, so that it can tell if one is used
-that has not been written.
-
-Note that currently, VK_DESCRIPTOR_TYPE_INLINE_UNIFORM_BLOCK_EXT validation is not working and all accesses are reported as valid.
-
## GPU-Assisted Validation Options
@@ -120,18 +102,15 @@ To turn on GPU validation, add the following to your layer settings file, which
named `vk_layer_settings.txt`.
```code
-khronos_validation.gpu_validation = all
+lunarg_core_validation.gpu_validation = all
```
To turn on GPU validation and request to reserve a binding slot:
```code
-khronos_validation.gpu_validation = all,reserve_binding_slot
+lunarg_core_validation.gpu_validation = all,reserve_binding_slot
```
-Note: When using the core_validation layer, the above settings should use `lunarg_core_validation` in place of
-`khronos_validation`.
-
Some platforms do not support configuration of the validation layers with this configuration file.
Programs running on these platforms must then use the programmatic interface.
@@ -165,7 +144,7 @@ Vulkan 1,1 is required to ensure that SPIR-V 1.3 is available.
### Descriptor Types
-The current implementation works with image, texel, and buffer descriptor types.
+The current implementation works with image and texel descriptor types.
A complete list appears later in this document.
### Descriptor Set Binding Limit
@@ -193,21 +172,12 @@ changing the application to free a slot is difficult.
### Device Memory
-GPU-Assisted Validation does allocate device memory for the error report buffers, and if
-descriptor indexing is enabled, for the input buffer of descriptor sizes and write state.
+GPU-Assisted Validation does allocate device memory for the error report buffers.
This can lead to a greater chance of memory exhaustion, especially in cases where
the application is trying to use all of the available memory.
The extra memory allocations are also not visible to the application, making it
impossible for the application to account for them.
-Note that if descriptor indexing is enabled, the input buffer size will be equal to
-(1 + (number_of_sets * 2) + (binding_count * 2) + descriptor_count) words of memory where
-binding_count is the binding number of the largest binding in the set.
-This means that sparsely populated sets and sets with a very large binding will cause
-the input buffer to be much larger than it could be with more densely packed binding numbers.
-As a best practice, when using GPU-Assisted Validation with descriptor indexing enabled,
-make sure descriptor bindings are densely packed.
-
If GPU-Assisted Validation device memory allocations fail, the device could become
unstable because some previously-built pipelines may contain instrumented shaders.
This is a condition that is nearly impossible to recover from, so the layer just
@@ -258,13 +228,11 @@ It isn't necessarily required for using the feature.
In general, the implementation does:
-* For each draw, dispatch, and trace rays call, allocate a buffer with enough device memory to hold a single debug output record written by the
+* For each draw call, allocate a block of device memory to hold a single debug output record written by the
instrumented shader code.
- If descriptor indexing is enabled, calculate the amount of memory needed to describe the descriptor arrays sizes and
- write states and allocate device memory and a buffer for input to the instrumented shader.
- The Vulkan Memory Allocator is used to handle this efficiently.
+ There is a device memory manager to handle this efficiently.
- There is probably little advantage in providing a larger output buffer in order to obtain more debug records.
+ There is probably little advantage in providing a larger buffer in order to obtain more debug records.
It is likely, especially for fragment shaders, that multiple errors occurring near each other have the same root cause.
A block is allocated on a per draw basis to make it possible to associate a shader debug error record with
@@ -273,14 +241,12 @@ In general, the implementation does:
An alternative design allocates this block on a per-device or per-queue basis and should work.
However, it is not possible to identify the command buffer that causes the error if multiple command buffers
are submitted at once.
-* For each draw, dispatch, and trace rays call, allocate a descriptor set and update it to point to the block of device memory just allocated.
- If descriptor indexing is enabled, also update the descriptor set to point to the allocated input buffer.
- Fill the input buffer with the size and write state information for each descriptor array.
+* For each draw call, allocate a descriptor set and update it to point to the block of device memory just allocated.
There is a descriptor set manager to handle this efficiently.
Also make an additional call down the chain to create a bind descriptor set command to bind our descriptor set at the desired index.
This has the effect of binding the device memory block belonging to this draw so that the GPU instrumentation
writes into this buffer for when the draw is executed.
- The end result is that each draw call has its own buffer containing GPU instrumentation error
+ The end result is that each draw call has its own device memory block containing GPU instrumentation error
records, if any occurred while executing that draw.
* Determine the descriptor set binding index that is eventually used to bind the descriptor set just allocated and updated.
Usually, it is `VkPhysicalDeviceLimits::maxBoundDescriptorSets` minus one.
@@ -289,7 +255,6 @@ In general, the implementation does:
* When creating a ShaderModule, pass the SPIR-V bytecode to the SPIR-V optimizer to perform the instrumentation pass.
Pass the desired descriptor set binding index to the optimizer via a parameter so that the instrumented
code knows which descriptor to use for writing error report data to the memory block.
- If descriptor indexing is enabled, turn on OOB and write state checking in the instrumentation pass.
Use the instrumented bytecode to create the ShaderModule.
* For all pipeline layouts, add our descriptor set to the layout, at the binding index determined earlier.
Fill any gaps with empty descriptor sets.
@@ -299,13 +264,10 @@ In general, the implementation does:
Instead, the layer leaves the layout alone and later replaces the instrumented shaders with
non-instrumented ones when the pipeline layout is later used to create a graphics pipeline.
The layer issues an error message to report this condition.
-* When creating a GraphicsPipeline, ComputePipeline, or RayTracingPipeline, check to see if the pipeline is using the debug binding index.
+* When creating a GraphicsPipeline, check to see if the pipeline is using the debug binding index.
If it is, replace the instrumented shaders in the pipeline with non-instrumented ones.
-* Before calling QueueSubmit, if descriptor indexing is enabled, check to see if there were any unwritten descriptors that were declared
- update-after-bind.
- If there were, update the write state of those elements.
* After calling QueueSubmit, perform a wait on the queue to allow the queue to finish executing.
- Then map and examine the device memory block for each draw or trace ray command that was submitted.
+ Then map and examine the device memory block for each draw that was submitted.
If any debug record is found, generate a validation error message for each record found.
The above describes only the high-level details of GPU-Assisted Validation operation.
@@ -313,21 +275,22 @@ More detail is found in the discussion of the individual hooked functions below.
### Initialization
-When the validation layer loads, it examines the user options from both the layer settings file and the
+When the core validation layer loads, it examines the user options from both the layer settings file and the
`VK_EXT_validation_features` extension.
Note that it also processes the subsumed `VK_EXT_validation_flags` extension for simple backwards compatibility.
-From these options, the layer sets instance-scope flags in the validation layer tracking data to indicate if
+From these options, the layer sets instance-scope flags in the core validation layer tracking data to indicate if
GPU-Assisted Validation has been requested, along with any other associated options.
### "Calling Down the Chain"
Much of the GPU-Assisted Validation implementation involves making "application level" Vulkan API
calls outside of the application's API usage to create resources and perform its required operations
-inside of the validation layer.
+inside of the core validation layer.
These calls are not routed up through the top of the loader/layer/driver call stack via the loader.
-Instead, they are simply dispatched via the containing layer's dispatch table.
+Instead, they are simply dispatched via the core validation layer's dispatch table.
-These calls therefore don't pass through any validation checks that occur before the gpu validation checks are run.
+These calls therefore don't pass through core validation or any other validation layers that may be
+loaded/dispatched prior to code validation.
This doesn't present any particular problem, but it does raise some issues:
* The additional API calls are not fully validated
@@ -336,22 +299,22 @@ This doesn't present any particular problem, but it does raise some issues:
To address this, the code can "just" be written carefully so that it is "valid" Vulkan,
which is hard to do.
- Or, this code can be checked by loading a khronos validation layer with
+ Or, this code can be checked by loading a core validation layer with
GPU validation enabled on top of "normal" standard validation in the
layer stack, which effectively validates the API usage of this code.
This sort of checking is performed by layer developers to check that the additional
Vulkan usage is valid.
This validation can be accomplished by:
-
- * Building the validation layer with a hack to force GPU-Assisted Validation to be enabled.
+
+ * Building the core validation layer with a hack to force GPU-Assisted Validation to be enabled.
Can't use the exposed mechanisms because we probably don't want it on twice.
- * Rename this layer binary to something else like "khronos_validation2" to keep it apart from the
- "normal" khronos validation.
+ * Rename this layer binary to something else like "core_validation2" to keep it apart from the
+ "normal" core validation.
* Create a new JSON file with the new layer name.
- * Set up the layer stack so that the "khronos_validation2" layer is on top of or before the actual khronos
- validation layer
- * Then run tests and check for validation errors pointing to API usage in the "khronos_validation2" layer.
+ * Set up the layer stack so that the "core_validation2" layer is on top of or before the standard validation
+ layer
+ * Then run tests and check for validation errors pointing to API usage in the "core_validation2" layer.
This should only need to be done after making any major changes to the implementation.
@@ -375,8 +338,8 @@ This doesn't present any particular problem, but it does raise some issues:
The GPU-Assisted Validation code is largely contained in one
[file](https://github.com/KhronosGroup/Vulkan-ValidationLayers/blob/master/layers/gpu_validation.cpp), with "hooks" in
-the other validation code that call functions in this file.
-These hooks in the validation code look something like this:
+the other core validation code that call functions in this file.
+These hooks in the core validation code look something like this:
```C
if (GetEnables(dev_data)->gpu_validation) {
@@ -384,11 +347,11 @@ if (GetEnables(dev_data)->gpu_validation) {
}
```
-The GPU-Assisted Validation code is linked into the shared library for the khronos and core validation layers.
+The GPU-Assisted Validation code is linked into the shared library for the core validation layer.
-#### Review of Khronos Validation Code Structure
+#### Review of Core Validation Code Structure
-Each function for a Vulkan API command intercepted in the khronos validation layer is usually split up
+Each function for a Vulkan API command intercepted in the core validation layer is usually split up
into several decomposed functions in order to organize the implementation.
These functions take the form of:
@@ -397,8 +360,11 @@ These functions take the form of:
* PreCallRecord&lt;foo&gt;: Perform state recording before calling down the chain
* PostCallRecord&lt;foo&gt;: Perform state recording after calling down the chain
-The GPU-Assisted Validation functions follow this pattern not by hooking into the top-level validation API shim, but
+The GPU-Assisted Validation functions follow this pattern not by hooking into the top-level core validation API shim, but
by hooking one of these decomposed functions.
+In a few unusual cases, the GPU-Assisted Validation function "takes over" the call to the driver (down the chain) and so
+must hook the top-level API shim.
+These functions deviate from the above naming convention to make their purpose more evident.
The design of each hooked function follows:
@@ -411,7 +377,7 @@ The design of each hooked function follows:
#### GpuPostCallRecordCreateDevice
* Determine and record (save in device state) the desired descriptor set binding index.
-* Initialize Vulkan Memory Allocator
+* Initialize device memory manager
* Determine error record block size based on the maximum size of the error record and alignment limits of the device.
* Initialize descriptor set manager
* Make a descriptor set layout to describe our descriptor set
@@ -423,31 +389,31 @@ The design of each hooked function follows:
* Destroy descriptor set layouts created in CreateDevice
* Clean up descriptor set manager
-* Clean up Vulkan Memory Allocator (VMA)
+* Clean up device memory manager
* Clean up device state
#### GpuAllocateValidationResources
-* For each Draw, Dispatch, or TraceRays call:
+* For each Draw or Dispatch call:
* Get a descriptor set from the descriptor set manager
- * Get an output buffer and associated memory from VMA
- * If descriptor indexing is enabled, get an input buffer and fill with descriptor array information
+ * Get a device memory block from the device memory manager
* Update (write) the descriptor set with the memory info
* Check to see if the layout for the pipeline just bound is using our selected bind index
* If no conflict, add an additional command to the command buffer to bind our descriptor set at our selected index
* Record the above objects in the per-CB state
-Note that the Draw and Dispatch calls include vkCmdDraw, vkCmdDrawIndexed, vkCmdDrawIndirect, vkCmdDrawIndexedIndirect, vkCmdDispatch, vkCmdDispatchIndirect, and vkCmdTraceRaysNV.
+Note that the Draw and Dispatch calls include vkCmdDraw, vkCmdDrawIndexed, vkCmdDrawIndirect, vkCmdDrawIndexedIndirect, vkCmdDispatch, and vkCmdDispatchIndirect.
#### GpuPreCallRecordFreeCommandBuffers
* For each command buffer:
- * Destroy the VMA buffer(s), releasing the memory
+ * Give the memory blocks back to the device memory manager
* Give the descriptor sets back to the descriptor set manager
* Clean up CB state
#### GpuOverrideDispatchCreateShaderModule
-This function is called from PreCallRecordCreateShaderModule.
+This function is called from CreateShaderModule and can't really be called from one of the decomposed functions
+because it replaces the SPIR-V, which requires modifying the bytecode passed down to the driver.
This routine sets up to call the SPIR-V optimizer to run the "BindlessCheckPass", replacing the original SPIR-V with the instrumented SPIR-V
which is then used in the call down the chain to CreateShaderModule.
@@ -476,7 +442,7 @@ This ensures that the original SPIR-V bytecode is available if we need it to rep
#### GpuOverrideDispatchCreatePipelineLayout
-This is function is called through PreCallRecordCreatePipelineLayout.
+This is another function that replaces the parameters and so can't be called from a decomposed function.
* Check for a descriptor set binding index conflict.
* If there is one, issue an error message and leave the pipeline layout unmodified
@@ -487,11 +453,6 @@ This is function is called through PreCallRecordCreatePipelineLayout.
* Add our descriptor set layout as the last one in the new pipeline layout
* Create the pipeline layouts by calling down the chain with the original or modified create info
-#### GpuPreCallQueueSubmit
-
-* For each primary and secondary command buffer in the submission:
- * Call helper function to see if there are any update after bind descriptors whose write state may need to be updated
- and if so, map the input buffer and update the state.
#### GpuPostCallQueueSubmit
@@ -526,7 +487,7 @@ This is function is called through PreCallRecordCreatePipelineLayout.
This tracker is used to attach the shader bytecode to the shader in case it is needed
later to get the shader source code debug info.
-The current shader module tracker in the validation code stores the bytecode,
+The current shader module tracker in core validation stores the bytecode,
but this tracker has the same life cycle as the shader module itself.
It is possible for the application to destroy the shader module after
creating graphics pipeline and before submitting work that uses the shader,
@@ -552,10 +513,6 @@ to descriptors of the following types:
VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER
VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER
VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER
- VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER
- VK_DESCRIPTOR_TYPE_STORAGE_BUFFER
- VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC
- VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC
Instrumentation is applied to the following SPIR-V operations:
@@ -592,12 +549,6 @@ Instrumentation is applied to the following SPIR-V operations:
OpImageSparseRead
OpImageWrite
-Also, OpLoad and OpStore with an AccessChain into a base of OpVariable with
-either Uniform or StorageBuffer storage class and a type which is either a
-struct decorated with Block, or a runtime or statically-sized array of such
-a struct.
-
-
### Shader Instrumentation Error Record Format
The instrumented shader code generates "error records" in a specific format.
@@ -655,41 +606,29 @@ The Instruction Index is the instruction within the original function at which t
For bindless, this will be the instruction which consumes the descriptor in question,
or the instruction that consumes the OpSampledImage that consumes the descriptor.
-The Stage is the integer value used in SPIR-V for each of the Execution Models:
-
-| Stage | Value |
-|---------------|:-----:|
-|Vertex |0 |
-|TessCtrl |1 |
-|TessEval |2 |
-|Geometry |3 |
-|Fragment |4 |
-|Compute |5 |
-|RayGenerationNV|5313 |
-|IntersectionNV |5314 |
-|AnyHitNV |5315 |
-|ClosestHitNV |5316 |
-|MissNV |5317 |
-|CallableNV |5318 |
+The Stage is the integer value used in SPIR-V for each of the Graphics Execution Models:
+
+| Stage | Value |
+|--------|:-----:|
+|Vertex |0 |
+|TessCtrl|1 |
+|TessEval|2 |
+|Geometry|3 |
+|Fragment|4 |
+|Compute |5 |
### Stage Specific Words
These are words that identify which "instance" of the shader the validation error occurred in.
Here are words for each stage:
-| Stage | Word 0 | Word 1 | World 2 |
-|---------------|------------------|------------|------------|
-|Vertex |VertexID |InstanceID | unused |
-|Tess* |InvocationID |unused | unused |
-|Geometry |PrimitiveID |InvocationID| unused |
-|Fragment |FragCoord.x |FragCoord.y | unused |
-|Compute |GlobalInvocationID|unused | unused |
-|RayGenerationNV|LaunchIdNV.x |LaunchIdNV.y|LaunchIdNV.z|
-|IntersectionNV |LaunchIdNV.x |LaunchIdNV.y|LaunchIdNV.z|
-|AnyHitNV |LaunchIdNV.x |LaunchIdNV.y|LaunchIdNV.z|
-|ClosestHitNV |LaunchIdNV.x |LaunchIdNV.y|LaunchIdNV.z|
-|MissNV |LaunchIdNV.x |LaunchIdNV.y|LaunchIdNV.z|
-|CallableNV |LaunchIdNV.x |LaunchIdNV.y|LaunchIdNV.z|
+| Stage | Word 0 | Word 1 |
+|--------|------------------|------------|
+|Vertex |VertexID |InstanceID |
+|Tess* |InvocationID |unused |
+|Geometry|PrimitiveID |InvocationID|
+|Fragment|FragCoord.x |FragCoord.y |
+|Compute |GlobalInvocationID|unused |
"unused" means not relevant, but still present.
@@ -817,56 +756,6 @@ then added to the validation error message.
For example, if the OpLine line number is 15, and there is a "#line 10" on line 40
in the OpSource source, then line 45 in the OpSource contains the correct source line.
-### Shader Instrumentation Input Record Format
-
-Although the input buffer is a linear array of unsigned integers, conceptually there are arrays within the linear array
-
-Word 1 starts an array (denoted by sets_to_sizes) that is number_of_sets long, with an index that indicates the start of that set's entries in the sizes array
-
-After the sets_to_sizes array is the sizes array, that contains the array size (or 1 if descriptor is not an array) of each descriptor in the set. Bindings with no descriptor are filled in with zeros
-
-After the sizes array is the sets_to_bindings array that for each descriptor set, indexes into the bindings_to_written array. Word 0 contains the index that is the start of the sets_to_bindings array
-
-After the sets_to_bindings array, is the bindings_to_written array that for each binding in the set, indexes to the start of that binding's entries in the written array
-
-Lastly comes the written array, which indicates whether a given binding / array element has been written
-
-Example:
-```
-Assume Descriptor Set 0 looks like: And Descriptor Set 1 looks like:
- Binding Binding
- 0 Array[3] 2 Array[4]
- 1 Non Array 3 Array[5]
- 3 Array[2]
-
-Here is what the input buffer should look like:
-
- Index of sets_to_sizes sizes sets_to_bindings bindings_to_written written
- sets_to_bindings
-
- 0 |11| sets_to_bindings 1 |3| set 0 sizes start at 3 3 |3| S0B0 11 |13| set 0 bindings start at 13 13 |21| S0B0 21 |1| S0B0I0 was written
- starts at 11 2 |7| set 1 sizes start at 7 4 |1| S0B1 12 |17| set 1 bindings start at 17 14 |24| S0B1 22 |1| S0B0I1 was written
- 5 |0| S0B2 15 |0 | S0B2 23 |1| S0B0I3 was written
- 6 |2| S0B3 16 |25| S0B3 24 |1| S0B1 was written
- 7 |0| S1B0 17 |0 | S1B0 25 |1| S0B3I0 was written
- 8 |0| S1B1 18 |0 | S1B1 26 |1| S0B3I1 was written
- 9 |4| S1B2 19 |27| S1B2 27 |0| S1B2I0 was not written
- 10 |5| S1B3 20 |31| S1B3 28 |1| S1B2I1 was written
- 29 |1| S1B2I2 was written
- 30 |1| S1B2I3 was written
- 31 |1| S1B3I0 was written
- 32 |1| S1B3I1 was written
- 33 |1| S1B3I2 was written
- 34 |1| S1B3I3 was written
- 35 |1| S1B3I4 was written
-```
-Alternately, you could describe the array size and write state data as:
-(set = s, binding = b, index = i) is not initialized if
-```
-Input[ i + Input[ b + Input[ s + Input[ Input[0] ] ] ] ] == 0
-```
-and the array's size = Input[ Input[ s + 1 ] + b ]
-
## GPU-Assisted Validation Testing
Validation Layer Tests (VLTs) exist for GPU-Assisted Validation.
diff --git a/docs/khronos_validation_layer.md b/docs/khronos_validation_layer.md
deleted file mode 100644
index 54dcdaade..000000000
--- a/docs/khronos_validation_layer.md
+++ /dev/null
@@ -1,25 +0,0 @@
-<!-- markdownlint-disable MD041 -->
-<!-- Copyright 2015-2019 LunarG, Inc. -->
-
-[![Khronos Vulkan][1]][2]
-
-[1]: https://vulkan.lunarg.com/img/Vulkan_100px_Dec16.png "https://www.khronos.org/vulkan/"
-[2]: https://www.khronos.org/vulkan/
-
-# VK\_LAYER\_KHRONOS\_validation
-
-[![Creative Commons][3]][4]
-
-[3]: https://i.creativecommons.org/l/by-nd/4.0/88x31.png "Creative Commons License"
-[4]: https://creativecommons.org/licenses/by-nd/4.0/
-The `VK_LAYER_KHRONOS_validation` layer encompasses the complete functionality that had previously been provided in the following layers:
-
-- VK_LAYER_GOOGLE_threading
-- VK_LAYER_LUNARG_parameter_validation
-- VK_LAYER_LUNARG_object_tracker
-- VK_LAYER_LUNARG_core_validation
-- VK_LAYER_GOOGLE_unique_objects
-
-
-Details for the functionality contained in each of these areas can be viewed in their respective layer detail documents, located in this directory.
-
diff --git a/external/x64/lib/vulkan-1.lib b/external/x64/lib/vulkan-1.lib
deleted file mode 100644
index 3e23a7bcf..000000000
--- a/external/x64/lib/vulkan-1.lib
+++ /dev/null
Binary files differ
diff --git a/external/x86/lib/vulkan-1.lib b/external/x86/lib/vulkan-1.lib
deleted file mode 100644
index 3caa7ba04..000000000
--- a/external/x86/lib/vulkan-1.lib
+++ /dev/null
Binary files differ
diff --git a/layers/CMakeLists.txt b/layers/CMakeLists.txt
index c908ffc1a..eb3dbe394 100644
--- a/layers/CMakeLists.txt
+++ b/layers/CMakeLists.txt
@@ -44,38 +44,46 @@ else()
message(FATAL_ERROR "Unsupported Platform!")
endif()
-# Configure installation of source files that are dependencies of other repos.
-if(BUILD_LAYER_SUPPORT_FILES)
- set(LAYER_UTIL_FILES
- cast_utils.h
- hash_util.h
- hash_vk_types.h
- vk_format_utils.h
- vk_format_utils.cpp
- vk_layer_config.h
- vk_layer_config.cpp
- vk_layer_data.h
- vk_layer_extension_utils.h
- vk_layer_extension_utils.cpp
- vk_layer_logging.h
- vk_layer_utils.h
- vk_layer_utils.cpp
- vk_loader_layer.h
- vk_loader_platform.h
- generated/vk_validation_error_messages.h
- generated/vk_layer_dispatch_table.h
- generated/vk_dispatch_table_helper.h
- generated/vk_safe_struct.h
- generated/vk_safe_struct.cpp
- generated/vk_enum_string_helper.h
- generated/vk_object_types.h
- generated/vk_extension_helper.h
- generated/vk_typemap_helper.h)
- install(FILES ${LAYER_UTIL_FILES} DESTINATION ${CMAKE_INSTALL_INCLUDEDIR})
+# Custom targets for generated validation layer helper file dependencies
+add_custom_target(spirv_tools_revision_file DEPENDS spirv_tools_commit_id.h)
+set_target_properties(spirv_tools_revision_file PROPERTIES FOLDER ${LAYERS_HELPER_FOLDER})
+
+if(BUILD_LAYERS)
+ # generate header file containing commit IDs of external dependencies
+ add_custom_command(OUTPUT spirv_tools_commit_id.h
+ COMMAND ${PYTHON_EXECUTABLE} ${SCRIPTS_DIR}/external_revision_generator.py
+ --from_uuid -s SPIRV_TOOLS_COMMIT_ID -o spirv_tools_commit_id.h
+ DEPENDS ${SCRIPTS_DIR}/external_revision_generator.py)
endif()
+# Configure installation of source files that are dependencies of other repos.
+set(LAYER_UTIL_FILES
+ hash_util.h
+ hash_vk_types.h
+ vk_format_utils.h
+ vk_format_utils.cpp
+ vk_layer_config.h
+ vk_layer_config.cpp
+ vk_layer_data.h
+ vk_layer_extension_utils.h
+ vk_layer_extension_utils.cpp
+ vk_layer_logging.h
+ vk_layer_utils.h
+ vk_layer_utils.cpp
+ vk_loader_layer.h
+ vk_loader_platform.h
+ vk_validation_error_messages.h
+ ${PROJECT_BINARY_DIR}/vk_layer_dispatch_table.h
+ ${PROJECT_BINARY_DIR}/vk_dispatch_table_helper.h
+ ${PROJECT_BINARY_DIR}/vk_safe_struct.h
+ ${PROJECT_BINARY_DIR}/vk_safe_struct.cpp
+ ${PROJECT_BINARY_DIR}/vk_enum_string_helper.h
+ ${PROJECT_BINARY_DIR}/vk_object_types.h
+ ${PROJECT_BINARY_DIR}/vk_extension_helper.h
+ ${PROJECT_BINARY_DIR}/vk_typemap_helper.h)
+install(FILES ${LAYER_UTIL_FILES} DESTINATION ${CMAKE_INSTALL_INCLUDEDIR})
+
set(TARGET_NAMES
- VkLayer_khronos_validation
VkLayer_core_validation
VkLayer_object_lifetimes
VkLayer_unique_objects
@@ -115,7 +123,7 @@ if(WIN32)
add_library(VkLayer_${target} SHARED ${ARGN} VkLayer_${target}.def)
target_compile_definitions(VkLayer_${target} PUBLIC ${LAYER_COMPILE_DEFINITIONS})
target_link_libraries(VkLayer_${target} PRIVATE VkLayer_utils)
- add_dependencies(VkLayer_${target} VkLayer_utils)
+ add_dependencies(VkLayer_${target} VulkanVL_generate_helper_files VulkanVL_generate_chassis_files VkLayer_utils)
install(TARGETS VkLayer_${target} DESTINATION ${CMAKE_INSTALL_LIBDIR})
endmacro()
elseif(APPLE)
@@ -123,7 +131,7 @@ elseif(APPLE)
add_library(VkLayer_${target} SHARED ${ARGN})
target_compile_definitions(VkLayer_${target} PUBLIC ${LAYER_COMPILE_DEFINITIONS})
target_link_libraries(VkLayer_${target} PRIVATE VkLayer_utils)
- add_dependencies(VkLayer_${target} VkLayer_utils)
+ add_dependencies(VkLayer_${target} VulkanVL_generate_helper_files VulkanVL_generate_chassis_files VkLayer_utils)
set_target_properties(VkLayer_${target}
PROPERTIES LINK_FLAGS
"-Wl"
@@ -136,21 +144,19 @@ else(UNIX AND NOT APPLE) # i.e.: Linux
add_library(VkLayer_${target} SHARED ${ARGN})
target_compile_definitions(VkLayer_${target} PUBLIC ${LAYER_COMPILE_DEFINITIONS})
target_link_libraries(VkLayer_${target} PRIVATE VkLayer_utils)
- add_dependencies(VkLayer_${target} VkLayer_utils)
- set_target_properties(VkLayer_${target} PROPERTIES LINK_FLAGS "-Wl,--version-script=${CMAKE_CURRENT_SOURCE_DIR}/libVkLayer_${target}.map,-Bsymbolic,--exclude-libs,ALL")
+ add_dependencies(VkLayer_${target} VulkanVL_generate_helper_files VulkanVL_generate_chassis_files VkLayer_utils)
+ set_target_properties(VkLayer_${target} PROPERTIES LINK_FLAGS "-Wl,-Bsymbolic,--exclude-libs,ALL")
install(TARGETS VkLayer_${target} DESTINATION ${CMAKE_INSTALL_LIBDIR})
endmacro()
endif()
-include_directories(${CMAKE_CURRENT_SOURCE_DIR} ${CMAKE_CURRENT_SOURCE_DIR}/generated ${VulkanHeaders_INCLUDE_DIR})
+include_directories(${CMAKE_CURRENT_SOURCE_DIR} ${VulkanHeaders_INCLUDE_DIR})
if(WIN32)
# Applies to all configurations
add_definitions(-D_CRT_SECURE_NO_WARNINGS)
# Avoid: fatal error C1128: number of sections exceeded object file format limit: compile with /bigobj
add_compile_options("/bigobj")
- # Allow Windows to use multiprocessor compilation
- add_compile_options(/MP)
# Turn off transitional "changed behavior" warning message for Visual Studio versions prior to 2015. The changed behavior is
# that constructor initializers are now fixed to clear the struct members.
add_compile_options("$<$<AND:$<CXX_COMPILER_ID:MSVC>,$<VERSION_LESS:$<CXX_COMPILER_VERSION>,19>>:/wd4351>")
@@ -164,61 +170,44 @@ if(CMAKE_C_COMPILER_ID MATCHES "Clang")
set_source_files_properties(parameter_validation.cpp PROPERTIES COMPILE_FLAGS "-Wno-unused-const-variable")
endif()
-set(CHASSIS_LIBRARY_FILES
- generated/chassis.cpp
- generated/layer_chassis_dispatch.cpp)
-
-set(CORE_VALIDATION_LIBRARY_FILES
- core_validation.cpp
- drawdispatch.cpp
- convert_to_renderpass2.cpp
- descriptor_sets.cpp
- buffer_validation.cpp
- shader_validation.cpp
- gpu_validation.cpp
- xxhash.c)
-
-set(OBJECT_LIFETIMES_LIBRARY_FILES
- generated/object_tracker.cpp
- generated/object_tracker.h
- object_tracker_utils.cpp)
-
-set(THREAD_SAFETY_LIBRARY_FILES
- generated/thread_safety.cpp
- generated/thread_safety.h)
-
-set(STATELESS_VALIDATION_LIBRARY_FILES
- generated/parameter_validation.cpp
- generated/parameter_validation.h
- parameter_validation_utils.cpp)
+GenerateFromVkXml(thread_safety_generator.py thread_safety.h)
+GenerateFromVkXml(thread_safety_generator.py thread_safety.cpp)
+GenerateFromVkXml(parameter_validation_generator.py parameter_validation.cpp)
+GenerateFromVkXml(parameter_validation_generator.py parameter_validation.h)
+GenerateFromVkXml(dispatch_table_helper_generator.py vk_dispatch_table_helper.h)
+GenerateFromVkXml(object_tracker_generator.py object_tracker.cpp)
+GenerateFromVkXml(object_tracker_generator.py object_tracker.h)
+GenerateFromVkXml(layer_chassis_generator.py chassis.cpp)
+GenerateFromVkXml(layer_chassis_generator.py chassis.h)
+GenerateFromVkXml(layer_chassis_dispatch_generator.py layer_chassis_dispatch.h)
+GenerateFromVkXml(layer_chassis_dispatch_generator.py layer_chassis_dispatch.cpp)
-set(BEST_PRACTICES_LIBRARY_FILES
- best_practices.cpp
- best_practices.h)
+# This target causes the chassis source files to be generated.
+add_custom_target(VulkanVL_generate_chassis_files
+ DEPENDS chassis.cpp
+ chassis.h
+ layer_chassis_dispatch.h
+ layer_chassis_dispatch.cpp)
+set_target_properties(VulkanVL_generate_chassis_files PROPERTIES FOLDER ${LAYERS_HELPER_FOLDER})
+# Inter-layer dependencies are temporarily necessary to serialize layer builds which avoids contention for common generated files
if(BUILD_LAYERS)
- AddVkLayer(core_validation "BUILD_CORE_VALIDATION"
- ${CHASSIS_LIBRARY_FILES}
- ${CORE_VALIDATION_LIBRARY_FILES})
- AddVkLayer(object_lifetimes "BUILD_OBJECT_TRACKER" ${CHASSIS_LIBRARY_FILES} ${OBJECT_LIFETIMES_LIBRARY_FILES})
- AddVkLayer(thread_safety "BUILD_THREAD_SAFETY" ${CHASSIS_LIBRARY_FILES} ${THREAD_SAFETY_LIBRARY_FILES})
- AddVkLayer(stateless_validation "BUILD_PARAMETER_VALIDATION" ${CHASSIS_LIBRARY_FILES} ${STATELESS_VALIDATION_LIBRARY_FILES})
- AddVkLayer(unique_objects "LAYER_CHASSIS_CAN_WRAP_HANDLES" ${CHASSIS_LIBRARY_FILES})
- AddVkLayer(khronos_validation "BUILD_KHRONOS_VALIDATION;BUILD_CORE_VALIDATION;BUILD_OBJECT_TRACKER;BUILD_THREAD_SAFETY;BUILD_PARAMETER_VALIDATION;LAYER_CHASSIS_CAN_WRAP_HANDLES;BUILD_BEST_PRACTICES"
- ${CHASSIS_LIBRARY_FILES}
- ${CORE_VALIDATION_LIBRARY_FILES}
- ${OBJECT_LIFETIMES_LIBRARY_FILES}
- ${THREAD_SAFETY_LIBRARY_FILES}
- ${STATELESS_VALIDATION_LIBRARY_FILES}
- ${BEST_PRACTICES_LIBRARY_FILES})
+ AddVkLayer(core_validation "BUILD_CORE_VALIDATION" chassis.cpp layer_chassis_dispatch.cpp core_validation.cpp convert_to_renderpass2.cpp descriptor_sets.cpp buffer_validation.cpp shader_validation.cpp gpu_validation.cpp xxhash.c)
+ add_dependencies(VkLayer_core_validation VulkanVL_generate_chassis_files)
+ AddVkLayer(object_lifetimes "BUILD_OBJECT_TRACKER" object_tracker.cpp object_tracker.h object_tracker_utils.cpp chassis.cpp layer_chassis_dispatch.cpp)
+ add_dependencies(VkLayer_object_lifetimes VkLayer_core_validation)
+ AddVkLayer(thread_safety "BUILD_THREAD_SAFETY" thread_safety.cpp thread_safety.h chassis.cpp layer_chassis_dispatch.cpp)
+ add_dependencies(VkLayer_thread_safety VkLayer_object_lifetimes)
+ AddVkLayer(unique_objects "LAYER_CHASSIS_CAN_WRAP_HANDLES" chassis.cpp layer_chassis_dispatch.cpp)
+ add_dependencies(VkLayer_unique_objects VkLayer_thread_safety)
+ AddVkLayer(stateless_validation "BUILD_PARAMETER_VALIDATION" parameter_validation.cpp parameter_validation.h parameter_validation_utils.cpp chassis.cpp layer_chassis_dispatch.cpp)
+ add_dependencies(VkLayer_stateless_validation VkLayer_unique_objects)
- # Core validation and Khronos validation have additional dependencies
+ # Core validation has additional dependencies
target_include_directories(VkLayer_core_validation PRIVATE ${GLSLANG_SPIRV_INCLUDE_DIR})
target_include_directories(VkLayer_core_validation PRIVATE ${SPIRV_TOOLS_INCLUDE_DIR})
target_link_libraries(VkLayer_core_validation PRIVATE ${SPIRV_TOOLS_LIBRARIES})
- target_include_directories(VkLayer_khronos_validation PRIVATE ${GLSLANG_SPIRV_INCLUDE_DIR})
- target_include_directories(VkLayer_khronos_validation PRIVATE ${SPIRV_TOOLS_INCLUDE_DIR})
- target_link_libraries(VkLayer_khronos_validation PRIVATE ${SPIRV_TOOLS_LIBRARIES})
+ add_dependencies(VkLayer_core_validation spirv_tools_revision_file)
# The output file needs Unix "/" separators or Windows "\" separators On top of that, Windows separators actually need to be doubled
# because the json format uses backslash escapes
diff --git a/layers/README.md b/layers/README.md
index 4ed1e186c..eb9ab02e6 100644
--- a/layers/README.md
+++ b/layers/README.md
@@ -36,35 +36,33 @@ Note that some layers are code-generated and will therefore exist in the directo
`include/vkLayer.h` - header file for layer code.
-### Standard Validation
-NOTE: This meta-layer is being deprecated -- users should load the Khronos validation layer (name = `VK_LAYER_KHRONOS_validation`) in its place.
-This is a meta-layer managed by the loader. (name = `VK_LAYER_LUNARG_standard_validation`) - specifying this layer name will cause the loader to load the standard validation layer: `VK_LAYER_KHRONOS_validation`. Other layers can be specified and the loader will remove duplicates.
-
-### The Khronos Validation Layer
+### Layer Details
+For complete details of current validation layers, including all of the validation checks that they perform, please refer to the document `layers/vk_validation_layer_details.md`. Below is a brief overview of each layer.
-This layer emcompasses all of the functionality that used to be contained in the following layers: VK_LAYER_GOOGLE_threading, VK_LAYER_LUNARG_parameter_validation, VK_LAYER_LUNARG_object_tracker, VK_LAYER_LUNARG_core_validation, and VK_LAYER_GOOGLE_unique_objects. Each of these functional areas can still disabled individually, and are described below.
+### Standard Validation
+This is a meta-layer managed by the loader. (name = `VK_LAYER_LUNARG_standard_validation`) - specifying this layer name will cause the loader to load the all of the standard validation layers (listed below) in the following optimal order: `VK_LAYER_GOOGLE_threading`, `VK_LAYER_LUNARG_parameter_validation`, `VK_LAYER_LUNARG_object_tracker`, `VK_LAYER_LUNARG_core_validation`, and `VK_LAYER_GOOGLE_unique_objects`. Other layers can be specified and the loader will remove duplicates.
### Object Validation and Statistics
-The object lifetime tracking will track object creation, use, and destruction. As objects are created their handles are stored in a data structure. As objects are used the layer verifies they exist in the data structure and output errors for unknown objects. As objects are destroyed they are removed from the data structure. At `vkDestroyDevice()` and `vkDestroyInstance()` times, if any objects have not been destroyed they are reported as leaked objects. If a debug callback function is registered this layer will use callback function(s) for reporting, otherwise it will use stdout.
+(build dir)/layers/object_tracker.cpp (name=`VK_LAYER_LUNARG_object_tracker`) - Track object creation, use, and destruction. As objects are created they are stored in a map. As objects are used the layer verifies they exist in the map, flagging errors for unknown objects. As objects are destroyed they are removed from the map. At `vkDestroyDevice()` and `vkDestroyInstance()` times, if any objects have not been destroyed they are reported as leaked objects. If a Dbg callback function is registered this layer will use callback function(s) for reporting, otherwise it will use stdout.
### Validate API State and Shaders
-The set of core checks does the bulk of the API validation that requires storing state. Some of the state it tracks includes the Descriptor Set, Pipeline State, Shaders, and dynamic state, and memory objects and bindings. It performs some point validation as states are created and used, and further validation Draw call and QueueSubmit time. Of primary interest is making sure that the resources bound to Descriptor Sets correctly align with the layout specified for the Set. Also, all of the image and buffer layouts are validated to make sure explicit layout transitions are properly managed. Related to memory, core\_validation includes tracking object bindings, memory hazards, and memory object lifetimes. It also validates several other hazard-related issues related to command buffers, fences, and memory mapping. Additionally core\_validation include shader validation (formerly separate shader\_checker layer) that inspects the SPIR-V shader images and fixed function pipeline stages at PSO creation time. It flags errors when inconsistencies are found across interfaces between shader stages. The exact behavior of the checks depends on the pair of pipeline stages involved. If a debug callback function is registered, this layer will use callback function(s) for reporting, otherwise uses stdout. This layer also validates correct usage of image- and buffer-related APIs, including image and buffer parameters, formats, and correct use.
+layers/core\_validation.cpp (name=`VK_LAYER_LUNARG_core_validation`) - The core\_validation layer does the bulk of the API validation that requires storing state. Some of the state it tracks includes the Descriptor Set, Pipeline State, Shaders, and dynamic state, and memory objects and bindings. It performs some point validation as states are created and used, and further validation Draw call and QueueSubmit time. Of primary interest is making sure that the resources bound to Descriptor Sets correctly align with the layout specified for the Set. Also, all of the image and buffer layouts are validated to make sure explicit layout transitions are properly managed. Related to memory, core\_validation includes tracking object bindings, memory hazards, and memory object lifetimes. It also validates several other hazard-related issues related to command buffers, fences, and memory mapping. Additionally core\_validation include shader validation (formerly separate shader\_checker layer) that inspects the SPIR-V shader images and fixed function pipeline stages at PSO creation time. It flags errors when inconsistencies are found across interfaces between shader stages. The exact behavior of the checks depends on the pair of pipeline stages involved. If a Dbg callback function is registered, this layer will use callback function(s) for reporting, otherwise uses stdout. This layer also validates correct usage of image- and buffer-related APIs, including image and buffer parameters, formats, and correct use.
-### Stateless parameter checking
-The stateless validation checks the input parameters to API calls for validity. If a debug callback function is registered, this layer will use callback function(s) for reporting otherwise uses stdout.
+### Check parameters
+(build_dir)/layers/parameter_validation.cpp (name=`VK_LAYER_LUNARG_parameter_validation`) - Check the input parameters to API calls for validity. If a Dbg callback function is registered, this layer will use callback function(s) for reporting, otherwise uses stdout.
-### Thread Safety Checking
-The thread-safety validation will check the multithreading of API calls for validity. Currently this checks that only one thread at a time uses an object in free-threaded API calls. If a debug callback function is registered, this layer will use callback function(s) for reporting, otherwise uses stdout.
+### Check threading
+(build_dir)/layers/threading.cpp (name=`VK_LAYER_GOOGLE_threading`) - Check multithreading of API calls for validity. Currently this checks that only one thread at a time uses an object in free-threaded API calls. If a Dbg callback function is registered, this layer will use callback function(s) for reporting, otherwise uses stdout.
-### Handle Wrapping
-The khronos layer framework also supports Vulkan handle wrapping. The Vulkan specification allows objects to have non-unique handles. This makes tracking object lifetimes difficult in that it is unclear which object is being referenced on deletion. When this functionalty is enabled (as it is by default) it will alias all objects with a unique object representation, allowing proper object lifetime tracking. This functionality may interfere with the development of proprietary Vulkan extension development, and is not strictly required for the proper operation of validation. One sign that it is needed is the appearance of errors emitted from the object_tracker layer indicating the use of previously destroyed objects.
+### Unique Objects
+(build dir)/layers/layer_chassis_dispatch.cpp (name=`VK_LAYER_GOOGLE_unique_objects`) - The Vulkan specification allows objects that have non-unique handles. This makes tracking object lifetimes difficult in that it is unclear which object is being referenced on deletion. The unique_objects layer was created to address this problem. If loaded in the correct position (last, which is closest to the display driver) it will alias all objects with a unique object representation, allowing proper object lifetime tracking. This layer does no validation on its own and may not be required for the proper operation of all layers or all platforms. One sign that it is needed is the appearance of errors emitted from the object_tracker layer indicating the use of previously destroyed objects.
## Using Layers
1. Build VK loader using normal steps (cmake and make)
-2. Place `libVkLayer_khronos_validation.so` in the same directory as your VK test or app:
+2. Place `libVkLayer_<name>.so` in the same directory as your VK test or app:
- `cp build/layer/libVkLayer_khronos_validation.so build/tests`
+ `cp build/layer/libVkLayer_threading.so build/tests`
This is required for the Loader to be able to scan and enumerate your library.
Alternatively, use the `VK_LAYER_PATH` environment variable to specify where the layer libraries reside.
@@ -75,7 +73,7 @@ The khronos layer framework also supports Vulkan handle wrapping. The Vulkan sp
4. Specify which layers to activate using environment variables.
- `export VK\_INSTANCE\_LAYERS=VK\_LAYER\_KHRONOS\_validation`
+ `export VK\_INSTANCE\_LAYERS=VK\_LAYER\_LUNARG\_parameter\_validation:VK\_LAYER\_LUNARG\_core\_validation`
`cd build/tests; ./vkinfo`
diff --git a/layers/VkLayer_khronos_validation.def b/layers/VkLayer_khronos_validation.def
deleted file mode 100644
index 4cbfcf42a..000000000
--- a/layers/VkLayer_khronos_validation.def
+++ /dev/null
@@ -1,30 +0,0 @@
-
-;;;; Begin Copyright Notice ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
-;
-; Copyright (c) 2015-2019 The Khronos Group Inc.
-; Copyright (c) 2015-2019 Valve Corporation
-; Copyright (c) 2015-2019 LunarG, Inc.
-;
-; Licensed under the Apache License, Version 2.0 (the "License");
-; you may not use this file except in compliance with the License.
-; You may obtain a copy of the License at
-;
-; http://www.apache.org/licenses/LICENSE-2.0
-;
-; Unless required by applicable law or agreed to in writing, software
-; distributed under the License is distributed on an "AS IS" BASIS,
-; WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-; See the License for the specific language governing permissions and
-; limitations under the License.
-;
-; Author: Mark Lobodzinski <mark@LunarG.com>
-;
-;;;; End Copyright Notice ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
-
-LIBRARY VkLayer_khronos_validation
-EXPORTS
-vkGetInstanceProcAddr
-vkGetDeviceProcAddr
-vkEnumerateInstanceLayerProperties
-vkEnumerateInstanceExtensionProperties
-vkNegotiateLoaderLayerInterfaceVersion
diff --git a/layers/best_practices.cpp b/layers/best_practices.cpp
deleted file mode 100644
index 85d1dc35a..000000000
--- a/layers/best_practices.cpp
+++ /dev/null
@@ -1,393 +0,0 @@
-/* Copyright (c) 2015-2019 The Khronos Group Inc.
- * Copyright (c) 2015-2019 Valve Corporation
- * Copyright (c) 2015-2019 LunarG, Inc.
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- *
- * Author: Camden Stocker <camden@lunarg.com>
- */
-
-#include "best_practices.h"
-#include "layer_chassis_dispatch.h"
-
-#include <string>
-#include <iomanip>
-
-// get the API name is proper format
-std::string BestPractices::GetAPIVersionName(uint32_t version) {
- std::stringstream version_name;
- uint32_t major = VK_VERSION_MAJOR(version);
- uint32_t minor = VK_VERSION_MINOR(version);
- uint32_t patch = VK_VERSION_PATCH(version);
-
- version_name << major << "." << minor << "." << patch << " (0x" << std::setfill('0') << std::setw(8) << std::hex << version
- << ")";
-
- return version_name.str();
-}
-
-bool BestPractices::PreCallValidateCreateInstance(const VkInstanceCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator,
- VkInstance* pInstance) {
- bool skip = false;
-
- for (uint32_t i = 0; i < pCreateInfo->enabledExtensionCount; i++) {
- if (white_list(pCreateInfo->ppEnabledExtensionNames[i], kDeviceExtensionNames)) {
- skip |=
- log_msg(report_data, VK_DEBUG_REPORT_WARNING_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
- layer_name.c_str(), "vkCreateInstance(): Attempting to enable Device Extension %s at CreateInstance time.",
- pCreateInfo->ppEnabledExtensionNames[i]);
- }
- }
-
- return skip;
-}
-
-void BestPractices::PreCallRecordCreateInstance(const VkInstanceCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator,
- VkInstance* pInstance) {
- instance_api_version = pCreateInfo->pApplicationInfo->apiVersion;
-}
-
-bool BestPractices::PreCallValidateCreateDevice(VkPhysicalDevice physicalDevice, const VkDeviceCreateInfo* pCreateInfo,
- const VkAllocationCallbacks* pAllocator, VkDevice* pDevice) {
- bool skip = false;
-
- // get API version of physical device passed when creating device.
- VkPhysicalDeviceProperties physical_device_properties{};
- DispatchGetPhysicalDeviceProperties(physicalDevice, &physical_device_properties);
- device_api_version = physical_device_properties.apiVersion;
-
- // check api versions and warn if instance api Version is higher than version on device.
- if (instance_api_version > device_api_version) {
- std::string inst_api_name = GetAPIVersionName(instance_api_version);
- std::string dev_api_name = GetAPIVersionName(device_api_version);
-
- skip |=
- log_msg(report_data, VK_DEBUG_REPORT_WARNING_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0, layer_name.c_str(),
- "vkCreateDevice(): API Version of current instance, %s is higher than API Version on device, %s",
- inst_api_name.c_str(), dev_api_name.c_str());
- }
-
- for (uint32_t i = 0; i < pCreateInfo->enabledExtensionCount; i++) {
- if (white_list(pCreateInfo->ppEnabledExtensionNames[i], kInstanceExtensionNames)) {
- skip |=
- log_msg(report_data, VK_DEBUG_REPORT_WARNING_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
- layer_name.c_str(), "vkCreateDevice(): Attempting to enable Instance Extension %s at CreateDevice time.",
- pCreateInfo->ppEnabledExtensionNames[i]);
- }
- }
-
- return skip;
-}
-
-bool BestPractices::PreCallValidateCreateBuffer(VkDevice device, const VkBufferCreateInfo* pCreateInfo,
- const VkAllocationCallbacks* pAllocator, VkBuffer* pBuffer) {
- bool skip = false;
-
- if ((pCreateInfo->queueFamilyIndexCount > 1) && (pCreateInfo->sharingMode == VK_SHARING_MODE_EXCLUSIVE)) {
- std::stringstream bufferHex;
- bufferHex << "0x" << std::hex << HandleToUint64(pBuffer);
-
- skip |=
- log_msg(report_data, VK_DEBUG_REPORT_WARNING_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0, layer_name.c_str(),
- "Warning: Buffer (%s) specifies a sharing mode of VK_SHARING_MODE_EXCLUSIVE while specifying multiple queues "
- "(queueFamilyIndexCount of %" PRIu32 ").",
- bufferHex.str().c_str(), pCreateInfo->queueFamilyIndexCount);
- }
-
- return skip;
-}
-
-bool BestPractices::PreCallValidateCreateImage(VkDevice device, const VkImageCreateInfo* pCreateInfo,
- const VkAllocationCallbacks* pAllocator, VkImage* pImage) {
- bool skip = false;
-
- if ((pCreateInfo->queueFamilyIndexCount > 1) && (pCreateInfo->sharingMode == VK_SHARING_MODE_EXCLUSIVE)) {
- std::stringstream imageHex;
- imageHex << "0x" << std::hex << HandleToUint64(pImage);
-
- skip |=
- log_msg(report_data, VK_DEBUG_REPORT_WARNING_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0, layer_name.c_str(),
- "Warning: Image (%s) specifies a sharing mode of VK_SHARING_MODE_EXCLUSIVE while specifying multiple queues "
- "(queueFamilyIndexCount of %" PRIu32 ").",
- imageHex.str().c_str(), pCreateInfo->queueFamilyIndexCount);
- }
-
- return skip;
-}
-
-bool BestPractices::PreCallValidateCreateSwapchainKHR(VkDevice device, const VkSwapchainCreateInfoKHR* pCreateInfo,
- const VkAllocationCallbacks* pAllocator, VkSwapchainKHR* pSwapchain) {
- bool skip = false;
-
- if ((pCreateInfo->queueFamilyIndexCount > 1) && (pCreateInfo->imageSharingMode == VK_SHARING_MODE_EXCLUSIVE)) {
- skip |=
- log_msg(report_data, VK_DEBUG_REPORT_WARNING_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0, layer_name.c_str(),
- "Warning: A Swapchain is being created which specifies a sharing mode of VK_SHARING_MODE_EXCULSIVE while "
- "specifying multiple queues (queueFamilyIndexCount of %" PRIu32 ").",
- pCreateInfo->queueFamilyIndexCount);
- }
-
- return skip;
-}
-
-bool BestPractices::PreCallValidateCreateSharedSwapchainsKHR(VkDevice device, uint32_t swapchainCount,
- const VkSwapchainCreateInfoKHR* pCreateInfos,
- const VkAllocationCallbacks* pAllocator, VkSwapchainKHR* pSwapchains) {
- bool skip = false;
-
- for (uint32_t i = 0; i < swapchainCount; i++) {
- if ((pCreateInfos[i].queueFamilyIndexCount > 1) && (pCreateInfos[i].imageSharingMode == VK_SHARING_MODE_EXCLUSIVE)) {
- skip |= log_msg(
- report_data, VK_DEBUG_REPORT_WARNING_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0, layer_name.c_str(),
- "Warning: A shared swapchain (index %" PRIu32
- ") is being created which specifies a sharing mode of VK_SHARING_MODE_EXCLUSIVE while specifying multiple "
- "queues (queueFamilyIndexCount of %" PRIu32 ").",
- i, pCreateInfos[i].queueFamilyIndexCount);
- }
- }
-
- return skip;
-}
-
-bool BestPractices::PreCallValidateCreateRenderPass(VkDevice device, const VkRenderPassCreateInfo* pCreateInfo,
- const VkAllocationCallbacks* pAllocator, VkRenderPass* pRenderPass) {
- bool skip = false;
-
- for (uint32_t i = 0; i < pCreateInfo->attachmentCount; ++i) {
- VkFormat format = pCreateInfo->pAttachments[i].format;
- if (pCreateInfo->pAttachments[i].initialLayout == VK_IMAGE_LAYOUT_UNDEFINED) {
- if ((FormatIsColor(format) || FormatHasDepth(format)) &&
- pCreateInfo->pAttachments[i].loadOp == VK_ATTACHMENT_LOAD_OP_LOAD) {
- skip |= log_msg(report_data, VK_DEBUG_REPORT_WARNING_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
- layer_name.c_str(),
- "Render pass has an attachment with loadOp == VK_ATTACHMENT_LOAD_OP_LOAD and "
- "initialLayout == VK_IMAGE_LAYOUT_UNDEFINED. This is probably not what you "
- "intended. Consider using VK_ATTACHMENT_LOAD_OP_DONT_CARE instead if the "
- "image truely is undefined at the start of the render pass.");
- }
- if (FormatHasStencil(format) && pCreateInfo->pAttachments[i].stencilLoadOp == VK_ATTACHMENT_LOAD_OP_LOAD) {
- skip |= log_msg(report_data, VK_DEBUG_REPORT_WARNING_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
- layer_name.c_str(),
- "Render pass has an attachment with stencilLoadOp == VK_ATTACHMENT_LOAD_OP_LOAD "
- "and initialLayout == VK_IMAGE_LAYOUT_UNDEFINED. This is probably not what you "
- "intended. Consider using VK_ATTACHMENT_LOAD_OP_DONT_CARE instead if the "
- "image truely is undefined at the start of the render pass.");
- }
- }
- }
-
- for (uint32_t dependency = 0; dependency < pCreateInfo->dependencyCount; dependency++) {
- skip |= CheckPipelineStageFlags("vkCreateRenderPass", pCreateInfo->pDependencies[dependency].srcStageMask);
- skip |= CheckPipelineStageFlags("vkCreateRenderPass", pCreateInfo->pDependencies[dependency].dstStageMask);
- }
-
- return skip;
-}
-
-bool BestPractices::PreCallValidateAllocateMemory(VkDevice device, const VkMemoryAllocateInfo* pAllocateInfo,
- const VkAllocationCallbacks* pAllocator, VkDeviceMemory* pMemory) {
- bool skip = false;
-
- num_mem_objects++;
-
- if (num_mem_objects > kMemoryObjectWarningLimit) {
- skip |= log_msg(report_data, VK_DEBUG_REPORT_PERFORMANCE_WARNING_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
- layer_name.c_str(), "Performance Warning: This app has > %" PRIu32 " memory objects.",
- kMemoryObjectWarningLimit);
- }
-
- return skip;
-}
-
-void BestPractices::PreCallRecordFreeMemory(VkDevice device, VkDeviceMemory memory, const VkAllocationCallbacks* pAllocator) {
- if (memory != VK_NULL_HANDLE) {
- num_mem_objects--;
- }
-}
-
-bool BestPractices::PreCallValidateCreateGraphicsPipelines(VkDevice device, VkPipelineCache pipelineCache, uint32_t createInfoCount,
- const VkGraphicsPipelineCreateInfo* pCreateInfos,
- const VkAllocationCallbacks* pAllocator, VkPipeline* pPipelines) {
- bool skip = false;
-
- if ((createInfoCount > 1) && (!pipelineCache)) {
- skip |=
- log_msg(report_data, VK_DEBUG_REPORT_PERFORMANCE_WARNING_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
- layer_name.c_str(),
- "Performance Warning: This vkCreateGraphicsPipelines call is creating multiple pipelines but is not using a "
- "pipeline cache, which may help with performance");
- }
-
- return skip;
-}
-
-bool BestPractices::PreCallValidateCreateComputePipelines(VkDevice device, VkPipelineCache pipelineCache, uint32_t createInfoCount,
- const VkComputePipelineCreateInfo* pCreateInfos,
- const VkAllocationCallbacks* pAllocator, VkPipeline* pPipelines) {
- bool skip = false;
-
- if ((createInfoCount > 1) && (!pipelineCache)) {
- skip |= log_msg(report_data, VK_DEBUG_REPORT_PERFORMANCE_WARNING_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
- layer_name.c_str(),
- "Performance Warning: This vkCreateComputePipelines call is creating multiple pipelines but is not using a "
- "pipeline cache, which may help with performance");
- }
-
- return skip;
-}
-
-bool BestPractices::CheckPipelineStageFlags(std::string api_name, const VkPipelineStageFlags flags) {
- bool skip = false;
-
- if (flags & VK_PIPELINE_STAGE_ALL_GRAPHICS_BIT) {
- skip |=
- log_msg(report_data, VK_DEBUG_REPORT_WARNING_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0, layer_name.c_str(),
- "You are using VK_PIPELINE_STAGE_ALL_GRAPHICS_BIT when %s is called\n", api_name.c_str());
- } else if (flags & VK_PIPELINE_STAGE_ALL_COMMANDS_BIT) {
- skip |=
- log_msg(report_data, VK_DEBUG_REPORT_WARNING_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0, layer_name.c_str(),
- "You are using VK_PIPELINE_STAGE_ALL_COMMANDS_BIT when %s is called\n", api_name.c_str());
- }
-
- return skip;
-}
-
-bool BestPractices::PreCallValidateQueueSubmit(VkQueue queue, uint32_t submitCount, const VkSubmitInfo* pSubmits, VkFence fence) {
- bool skip = false;
-
- for (uint32_t submit = 0; submit < submitCount; submit++) {
- for (uint32_t semaphore = 0; semaphore < pSubmits[submit].waitSemaphoreCount; semaphore++) {
- skip |= CheckPipelineStageFlags("vkQueueSubmit", pSubmits[submit].pWaitDstStageMask[semaphore]);
- }
- }
-
- return skip;
-}
-
-bool BestPractices::PreCallValidateCmdSetEvent(VkCommandBuffer commandBuffer, VkEvent event, VkPipelineStageFlags stageMask) {
- bool skip = false;
-
- skip |= CheckPipelineStageFlags("vkCmdSetEvent", stageMask);
-
- return skip;
-}
-
-bool BestPractices::PreCallValidateCmdResetEvent(VkCommandBuffer commandBuffer, VkEvent event, VkPipelineStageFlags stageMask) {
- bool skip = false;
-
- skip |= CheckPipelineStageFlags("vkCmdResetEvent", stageMask);
-
- return skip;
-}
-
-bool BestPractices::PreCallValidateCmdWaitEvents(VkCommandBuffer commandBuffer, uint32_t eventCount, const VkEvent* pEvents,
- VkPipelineStageFlags srcStageMask, VkPipelineStageFlags dstStageMask,
- uint32_t memoryBarrierCount, const VkMemoryBarrier* pMemoryBarriers,
- uint32_t bufferMemoryBarrierCount,
- const VkBufferMemoryBarrier* pBufferMemoryBarriers,
- uint32_t imageMemoryBarrierCount,
- const VkImageMemoryBarrier* pImageMemoryBarriers) {
- bool skip = false;
-
- skip |= CheckPipelineStageFlags("vkCmdWaitEvents", srcStageMask);
- skip |= CheckPipelineStageFlags("vkCmdWaitEvents", dstStageMask);
-
- return skip;
-}
-
-bool BestPractices::PreCallValidateCmdPipelineBarrier(VkCommandBuffer commandBuffer, VkPipelineStageFlags srcStageMask,
- VkPipelineStageFlags dstStageMask, VkDependencyFlags dependencyFlags,
- uint32_t memoryBarrierCount, const VkMemoryBarrier* pMemoryBarriers,
- uint32_t bufferMemoryBarrierCount,
- const VkBufferMemoryBarrier* pBufferMemoryBarriers,
- uint32_t imageMemoryBarrierCount,
- const VkImageMemoryBarrier* pImageMemoryBarriers) {
- bool skip = false;
-
- skip |= CheckPipelineStageFlags("vkCmdPipelineBarrier", srcStageMask);
- skip |= CheckPipelineStageFlags("vkCmdPipelineBarrier", dstStageMask);
-
- return skip;
-}
-
-bool BestPractices::PreCallValidateCmdWriteTimestamp(VkCommandBuffer commandBuffer, VkPipelineStageFlagBits pipelineStage,
- VkQueryPool queryPool, uint32_t query) {
- bool skip = false;
-
- skip |= CheckPipelineStageFlags("vkCmdWriteTimestamp", pipelineStage);
-
- return skip;
-}
-
-bool BestPractices::PreCallValidateCmdDraw(VkCommandBuffer commandBuffer, uint32_t vertexCount, uint32_t instanceCount,
- uint32_t firstVertex, uint32_t firstInstance) {
- bool skip = false;
-
- if (instanceCount == 0) {
- skip |= log_msg(report_data, VK_DEBUG_REPORT_WARNING_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
- layer_name.c_str(), "Warning: You are calling vkCmdDraw() with an instanceCount of Zero.");
- }
-
- return skip;
-}
-
-bool BestPractices::PreCallValidateCmdDrawIndexed(VkCommandBuffer commandBuffer, uint32_t indexCount, uint32_t instanceCount,
- uint32_t firstIndex, int32_t vertexOffset, uint32_t firstInstance) {
- bool skip = false;
-
- if (instanceCount == 0) {
- skip |= log_msg(report_data, VK_DEBUG_REPORT_WARNING_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
- layer_name.c_str(), "Warning: You are calling vkCmdDrawIndexed() with an instanceCount of Zero.");
- }
-
- return skip;
-}
-
-bool BestPractices::PreCallValidateCmdDrawIndirect(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset,
- uint32_t drawCount, uint32_t stride) {
- bool skip = false;
-
- if (drawCount == 0) {
- skip |= log_msg(report_data, VK_DEBUG_REPORT_WARNING_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
- layer_name.c_str(), "Warning: You are calling vkCmdDrawIndirect() with a drawCount of Zero.");
- }
-
- return skip;
-}
-
-bool BestPractices::PreCallValidateCmdDrawIndexedIndirect(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset,
- uint32_t drawCount, uint32_t stride) {
- bool skip = false;
-
- if (drawCount == 0) {
- skip |= log_msg(report_data, VK_DEBUG_REPORT_WARNING_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
- layer_name.c_str(), "Warning: You are calling vkCmdDrawIndexedIndirect() with a drawCount of Zero.");
- }
-
- return skip;
-}
-
-bool BestPractices::PreCallValidateCmdDispatch(VkCommandBuffer commandBuffer, uint32_t groupCountX, uint32_t groupCountY,
- uint32_t groupCountZ) {
- bool skip = false;
-
- if ((groupCountX == 0) || (groupCountY == 0) || (groupCountZ == 0)) {
- skip |=
- log_msg(report_data, VK_DEBUG_REPORT_WARNING_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0, layer_name.c_str(),
- "Warning: You are calling vkCmdDispatch() while one or more groupCounts are zero (groupCountX = %" PRIu32
- ", groupCountY = %" PRIu32 ", groupCountZ = %" PRIu32 ").",
- groupCountX, groupCountY, groupCountZ);
- }
-
- return skip;
-}
diff --git a/layers/best_practices.h b/layers/best_practices.h
deleted file mode 100644
index 394716ab7..000000000
--- a/layers/best_practices.h
+++ /dev/null
@@ -1,90 +0,0 @@
-/* Copyright (c) 2015-2019 The Khronos Group Inc.
- * Copyright (c) 2015-2019 Valve Corporation
- * Copyright (c) 2015-2019 LunarG, Inc.
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- *
- * Author: Camden Stocker <camden@lunarg.com>
- */
-
-#pragma once
-
-#include "chassis.h"
-#include <string>
-
-static const uint32_t kMemoryObjectWarningLimit = 250;
-
-class BestPractices : public ValidationObject {
- public:
- std::string GetAPIVersionName(uint32_t version);
-
- bool PreCallValidateCreateInstance(const VkInstanceCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator,
- VkInstance* pInstance);
- void PreCallRecordCreateInstance(const VkInstanceCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator,
- VkInstance* pInstance);
- bool PreCallValidateCreateDevice(VkPhysicalDevice physicalDevice, const VkDeviceCreateInfo* pCreateInfo,
- const VkAllocationCallbacks* pAllocator, VkDevice* pDevice);
- bool PreCallValidateCreateBuffer(VkDevice device, const VkBufferCreateInfo* pCreateInfo,
- const VkAllocationCallbacks* pAllocator, VkBuffer* pBuffer);
- bool PreCallValidateCreateImage(VkDevice device, const VkImageCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator,
- VkImage* pImage);
- bool PreCallValidateCreateSwapchainKHR(VkDevice device, const VkSwapchainCreateInfoKHR* pCreateInfo,
- const VkAllocationCallbacks* pAllocator, VkSwapchainKHR* pSwapchain);
- bool PreCallValidateCreateSharedSwapchainsKHR(VkDevice device, uint32_t swapchainCount,
- const VkSwapchainCreateInfoKHR* pCreateInfos,
- const VkAllocationCallbacks* pAllocator, VkSwapchainKHR* pSwapchains);
- bool PreCallValidateCreateRenderPass(VkDevice device, const VkRenderPassCreateInfo* pCreateInfo,
- const VkAllocationCallbacks* pAllocator, VkRenderPass* pRenderPass);
- bool PreCallValidateAllocateMemory(VkDevice device, const VkMemoryAllocateInfo* pAllocateInfo,
- const VkAllocationCallbacks* pAllocator, VkDeviceMemory* pMemory);
- void PreCallRecordFreeMemory(VkDevice device, VkDeviceMemory memory, const VkAllocationCallbacks* pAllocator);
- bool PreCallValidateCreateGraphicsPipelines(VkDevice device, VkPipelineCache pipelineCache, uint32_t createInfoCount,
- const VkGraphicsPipelineCreateInfo* pCreateInfos,
- const VkAllocationCallbacks* pAllocator, VkPipeline* pPipelines);
- bool PreCallValidateCreateComputePipelines(VkDevice device, VkPipelineCache pipelineCache, uint32_t createInfoCount,
- const VkComputePipelineCreateInfo* pCreateInfos,
- const VkAllocationCallbacks* pAllocator, VkPipeline* pPipelines);
-
- bool CheckPipelineStageFlags(std::string api_name, const VkPipelineStageFlags flags);
- bool PreCallValidateQueueSubmit(VkQueue queue, uint32_t submitCount, const VkSubmitInfo* pSubmits, VkFence fence);
- bool PreCallValidateCmdSetEvent(VkCommandBuffer commandBuffer, VkEvent event, VkPipelineStageFlags stageMask);
- bool PreCallValidateCmdResetEvent(VkCommandBuffer commandBuffer, VkEvent event, VkPipelineStageFlags stageMask);
- bool PreCallValidateCmdWaitEvents(VkCommandBuffer commandBuffer, uint32_t eventCount, const VkEvent* pEvents,
- VkPipelineStageFlags srcStageMask, VkPipelineStageFlags dstStageMask,
- uint32_t memoryBarrierCount, const VkMemoryBarrier* pMemoryBarriers,
- uint32_t bufferMemoryBarrierCount, const VkBufferMemoryBarrier* pBufferMemoryBarriers,
- uint32_t imageMemoryBarrierCount, const VkImageMemoryBarrier* pImageMemoryBarriers);
- bool PreCallValidateCmdPipelineBarrier(VkCommandBuffer commandBuffer, VkPipelineStageFlags srcStageMask,
- VkPipelineStageFlags dstStageMask, VkDependencyFlags dependencyFlags,
- uint32_t memoryBarrierCount, const VkMemoryBarrier* pMemoryBarriers,
- uint32_t bufferMemoryBarrierCount, const VkBufferMemoryBarrier* pBufferMemoryBarriers,
- uint32_t imageMemoryBarrierCount, const VkImageMemoryBarrier* pImageMemoryBarriers);
- bool PreCallValidateCmdWriteTimestamp(VkCommandBuffer commandBuffer, VkPipelineStageFlagBits pipelineStage,
- VkQueryPool queryPool, uint32_t query);
- bool PreCallValidateCmdDraw(VkCommandBuffer commandBuffer, uint32_t vertexCount, uint32_t instanceCount, uint32_t firstVertex,
- uint32_t firstInstance);
- bool PreCallValidateCmdDrawIndexed(VkCommandBuffer commandBuffer, uint32_t indexCount, uint32_t instanceCount,
- uint32_t firstIndex, int32_t vertexOffset, uint32_t firstInstance);
- bool PreCallValidateCmdDrawIndirect(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, uint32_t drawCount,
- uint32_t stride);
- bool PreCallValidateCmdDrawIndexedIndirect(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset,
- uint32_t drawCount, uint32_t stride);
- bool PreCallValidateCmdDispatch(VkCommandBuffer commandBuffer, uint32_t groupCountX, uint32_t groupCountY,
- uint32_t groupCountZ);
-
- private:
- uint32_t instance_api_version;
- uint32_t device_api_version;
-
- uint32_t num_mem_objects;
-};
diff --git a/layers/buffer_validation.cpp b/layers/buffer_validation.cpp
index 4e16e8e63..ccf377ee4 100644
--- a/layers/buffer_validation.cpp
+++ b/layers/buffer_validation.cpp
@@ -29,7 +29,6 @@
#include <string>
#include "vk_enum_string_helper.h"
-#include "vk_format_utils.h"
#include "vk_layer_data.h"
#include "vk_layer_utils.h"
#include "vk_layer_logging.h"
@@ -41,84 +40,6 @@
#include "descriptor_sets.h"
#include "buffer_validation.h"
-// Transfer VkImageSubresourceLayers into VkImageSubresourceRange struct
-static VkImageSubresourceRange RangeFromLayers(const VkImageSubresourceLayers &subresource_layers) {
- VkImageSubresourceRange subresource_range;
- subresource_range.aspectMask = subresource_layers.aspectMask;
- subresource_range.baseArrayLayer = subresource_layers.baseArrayLayer;
- subresource_range.layerCount = subresource_layers.layerCount;
- subresource_range.baseMipLevel = subresource_layers.mipLevel;
- subresource_range.levelCount = 1;
- return subresource_range;
-}
-
-IMAGE_STATE::IMAGE_STATE(VkImage img, const VkImageCreateInfo *pCreateInfo)
- : image(img),
- createInfo(*pCreateInfo),
- valid(false),
- acquired(false),
- shared_presentable(false),
- layout_locked(false),
- get_sparse_reqs_called(false),
- sparse_metadata_required(false),
- sparse_metadata_bound(false),
- imported_ahb(false),
- has_ahb_format(false),
- ahb_format(0),
- full_range{},
- create_from_swapchain(VK_NULL_HANDLE),
- bind_swapchain(VK_NULL_HANDLE),
- bind_swapchain_imageIndex(0),
- sparse_requirements{} {
- if ((createInfo.sharingMode == VK_SHARING_MODE_CONCURRENT) && (createInfo.queueFamilyIndexCount > 0)) {
- uint32_t *pQueueFamilyIndices = new uint32_t[createInfo.queueFamilyIndexCount];
- for (uint32_t i = 0; i < createInfo.queueFamilyIndexCount; i++) {
- pQueueFamilyIndices[i] = pCreateInfo->pQueueFamilyIndices[i];
- }
- createInfo.pQueueFamilyIndices = pQueueFamilyIndices;
- }
-
- if (createInfo.flags & VK_IMAGE_CREATE_SPARSE_BINDING_BIT) {
- sparse = true;
- }
- const auto format = createInfo.format;
- VkImageSubresourceRange init_range{0, 0, VK_REMAINING_MIP_LEVELS, 0, VK_REMAINING_ARRAY_LAYERS};
- if (FormatIsColor(format) || FormatIsMultiplane(format)) {
- init_range.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT; // Normalization will expand this for multiplane
- } else {
- init_range.aspectMask =
- (FormatHasDepth(format) ? VK_IMAGE_ASPECT_DEPTH_BIT : 0) | (FormatHasStencil(format) ? VK_IMAGE_ASPECT_STENCIL_BIT : 0);
- }
- full_range = NormalizeSubresourceRange(*this, init_range);
-
-#ifdef VK_USE_PLATFORM_ANDROID_KHR
- auto external_format = lvl_find_in_chain<VkExternalFormatANDROID>(createInfo.pNext);
- if (external_format) {
- external_format_android = external_format->externalFormat;
- } else {
- // If externalFormat is zero, the effect is as if the VkExternalFormatANDROID structure was not present.
- external_format_android = 0;
- }
-#endif // VK_USE_PLATFORM_ANDROID_KHR
-}
-
-IMAGE_VIEW_STATE::IMAGE_VIEW_STATE(const IMAGE_STATE *image_state, VkImageView iv, const VkImageViewCreateInfo *ci)
- : image_view(iv), create_info(*ci), normalized_subresource_range(ci->subresourceRange), samplerConversion(VK_NULL_HANDLE) {
- auto *conversionInfo = lvl_find_in_chain<VkSamplerYcbcrConversionInfo>(create_info.pNext);
- if (conversionInfo) samplerConversion = conversionInfo->conversion;
- if (image_state) {
- // A light normalization of the createInfo range
- auto &sub_res_range = create_info.subresourceRange;
- sub_res_range.levelCount = ResolveRemainingLevels(&sub_res_range, image_state->createInfo.mipLevels);
- sub_res_range.layerCount = ResolveRemainingLayers(&sub_res_range, image_state->createInfo.arrayLayers);
-
- // Cache a full normalization (for "full image/whole image" comparisons)
- normalized_subresource_range = NormalizeSubresourceRange(*image_state, ci->subresourceRange);
- samples = image_state->createInfo.samples;
- descriptor_format_bits = DescriptorRequirementsBitsFromFormat(create_info.format);
- }
-}
-
uint32_t FullMipChainLevels(uint32_t height, uint32_t width, uint32_t depth) {
// uint cast applies floor()
return 1u + (uint32_t)log2(std::max({height, width, depth}));
@@ -128,56 +49,48 @@ uint32_t FullMipChainLevels(VkExtent3D extent) { return FullMipChainLevels(exten
uint32_t FullMipChainLevels(VkExtent2D extent) { return FullMipChainLevels(extent.height, extent.width); }
-VkImageSubresourceRange NormalizeSubresourceRange(const IMAGE_STATE &image_state, const VkImageSubresourceRange &range) {
- const VkImageCreateInfo &image_create_info = image_state.createInfo;
- VkImageSubresourceRange norm = range;
- norm.levelCount = ResolveRemainingLevels(&range, image_create_info.mipLevels);
-
- // Special case for 3D images with VK_IMAGE_CREATE_2D_ARRAY_COMPATIBLE_BIT_KHR flag bit, where <extent.depth> and
- // <arrayLayers> can potentially alias.
- uint32_t layer_limit = (0 != (image_create_info.flags & VK_IMAGE_CREATE_2D_ARRAY_COMPATIBLE_BIT_KHR))
- ? image_create_info.extent.depth
- : image_create_info.arrayLayers;
- norm.layerCount = ResolveRemainingLayers(&range, layer_limit);
-
- // For multiplanar formats, IMAGE_ASPECT_COLOR is equivalent to adding the aspect of the individual planes
- VkImageAspectFlags &aspect_mask = norm.aspectMask;
- if (FormatIsMultiplane(image_create_info.format)) {
- if (aspect_mask & VK_IMAGE_ASPECT_COLOR_BIT) {
- aspect_mask &= ~VK_IMAGE_ASPECT_COLOR_BIT;
- aspect_mask |= (VK_IMAGE_ASPECT_PLANE_0_BIT | VK_IMAGE_ASPECT_PLANE_1_BIT);
- if (FormatPlaneCount(image_create_info.format) > 2) {
- aspect_mask |= VK_IMAGE_ASPECT_PLANE_2_BIT;
- }
+void CoreChecks::SetLayout(layer_data *device_data, GLOBAL_CB_NODE *pCB, ImageSubresourcePair imgpair,
+ const VkImageLayout &layout) {
+ auto it = pCB->imageLayoutMap.find(imgpair);
+ if (it != pCB->imageLayoutMap.end()) {
+ it->second.layout = layout;
+ } else {
+ assert(imgpair.hasSubresource);
+ IMAGE_CMD_BUF_LAYOUT_NODE node;
+ if (!FindCmdBufLayout(device_data, pCB, imgpair.image, imgpair.subresource, node)) {
+ node.initialLayout = layout;
}
+ SetLayout(device_data, pCB, imgpair, {node.initialLayout, layout});
}
- return norm;
}
template <class OBJECT, class LAYOUT>
-void CoreChecks::SetLayout(OBJECT *pObject, VkImage image, VkImageSubresource range, const LAYOUT &layout) {
+void CoreChecks::SetLayout(layer_data *device_data, OBJECT *pObject, VkImage image, VkImageSubresource range,
+ const LAYOUT &layout) {
ImageSubresourcePair imgpair = {image, true, range};
- SetLayout(pObject, imgpair, layout, VK_IMAGE_ASPECT_COLOR_BIT);
- SetLayout(pObject, imgpair, layout, VK_IMAGE_ASPECT_DEPTH_BIT);
- SetLayout(pObject, imgpair, layout, VK_IMAGE_ASPECT_STENCIL_BIT);
- SetLayout(pObject, imgpair, layout, VK_IMAGE_ASPECT_METADATA_BIT);
- if (device_extensions.vk_khr_sampler_ycbcr_conversion) {
- SetLayout(pObject, imgpair, layout, VK_IMAGE_ASPECT_PLANE_0_BIT_KHR);
- SetLayout(pObject, imgpair, layout, VK_IMAGE_ASPECT_PLANE_1_BIT_KHR);
- SetLayout(pObject, imgpair, layout, VK_IMAGE_ASPECT_PLANE_2_BIT_KHR);
+ SetLayout(device_data, pObject, imgpair, layout, VK_IMAGE_ASPECT_COLOR_BIT);
+ SetLayout(device_data, pObject, imgpair, layout, VK_IMAGE_ASPECT_DEPTH_BIT);
+ SetLayout(device_data, pObject, imgpair, layout, VK_IMAGE_ASPECT_STENCIL_BIT);
+ SetLayout(device_data, pObject, imgpair, layout, VK_IMAGE_ASPECT_METADATA_BIT);
+ if (GetDeviceExtensions()->vk_khr_sampler_ycbcr_conversion) {
+ SetLayout(device_data, pObject, imgpair, layout, VK_IMAGE_ASPECT_PLANE_0_BIT_KHR);
+ SetLayout(device_data, pObject, imgpair, layout, VK_IMAGE_ASPECT_PLANE_1_BIT_KHR);
+ SetLayout(device_data, pObject, imgpair, layout, VK_IMAGE_ASPECT_PLANE_2_BIT_KHR);
}
}
template <class OBJECT, class LAYOUT>
-void CoreChecks::SetLayout(OBJECT *pObject, ImageSubresourcePair imgpair, const LAYOUT &layout, VkImageAspectFlags aspectMask) {
+void CoreChecks::SetLayout(layer_data *device_data, OBJECT *pObject, ImageSubresourcePair imgpair, const LAYOUT &layout,
+ VkImageAspectFlags aspectMask) {
if (imgpair.subresource.aspectMask & aspectMask) {
imgpair.subresource.aspectMask = aspectMask;
- SetLayout(pObject, imgpair, layout);
+ SetLayout(device_data, pObject, imgpair, layout);
}
}
// Set the layout in supplied map
-void CoreChecks::SetLayout(ImageSubresPairLayoutMap &imageLayoutMap, ImageSubresourcePair imgpair, VkImageLayout layout) {
+void CoreChecks::SetLayout(std::unordered_map<ImageSubresourcePair, IMAGE_LAYOUT_NODE> &imageLayoutMap,
+ ImageSubresourcePair imgpair, VkImageLayout layout) {
auto it = imageLayoutMap.find(imgpair);
if (it != imageLayoutMap.end()) {
it->second.layout = layout; // Update
@@ -186,20 +99,51 @@ void CoreChecks::SetLayout(ImageSubresPairLayoutMap &imageLayoutMap, ImageSubres
}
}
-bool CoreChecks::FindLayoutVerifyLayout(ImageSubresourcePair imgpair, VkImageLayout &layout, const VkImageAspectFlags aspectMask) {
+bool CoreChecks::FindLayoutVerifyNode(layer_data const *device_data, GLOBAL_CB_NODE const *pCB, ImageSubresourcePair imgpair,
+ IMAGE_CMD_BUF_LAYOUT_NODE &node, const VkImageAspectFlags aspectMask) {
if (!(imgpair.subresource.aspectMask & aspectMask)) {
return false;
}
VkImageAspectFlags oldAspectMask = imgpair.subresource.aspectMask;
imgpair.subresource.aspectMask = aspectMask;
- auto imgsubIt = imageLayoutMap.find(imgpair);
- if (imgsubIt == imageLayoutMap.end()) {
+ auto imgsubIt = pCB->imageLayoutMap.find(imgpair);
+ if (imgsubIt == pCB->imageLayoutMap.end()) {
+ return false;
+ }
+ if (node.layout != VK_IMAGE_LAYOUT_MAX_ENUM && node.layout != imgsubIt->second.layout) {
+ log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT, HandleToUint64(imgpair.image),
+ kVUID_Core_DrawState_InvalidLayout,
+ "Cannot query for VkImage %s layout when combined aspect mask %d has multiple layout types: %s and %s",
+ report_data->FormatHandle(imgpair.image).c_str(), oldAspectMask, string_VkImageLayout(node.layout),
+ string_VkImageLayout(imgsubIt->second.layout));
+ }
+ if (node.initialLayout != VK_IMAGE_LAYOUT_MAX_ENUM && node.initialLayout != imgsubIt->second.initialLayout) {
+ log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT, HandleToUint64(imgpair.image),
+ kVUID_Core_DrawState_InvalidLayout,
+ "Cannot query for VkImage %s"
+ " layout when combined aspect mask %d has multiple initial layout types: %s and %s",
+ report_data->FormatHandle(imgpair.image).c_str(), oldAspectMask, string_VkImageLayout(node.initialLayout),
+ string_VkImageLayout(imgsubIt->second.initialLayout));
+ }
+ node = imgsubIt->second;
+ return true;
+}
+
+bool CoreChecks::FindLayoutVerifyLayout(layer_data const *device_data, ImageSubresourcePair imgpair, VkImageLayout &layout,
+ const VkImageAspectFlags aspectMask) {
+ if (!(imgpair.subresource.aspectMask & aspectMask)) {
+ return false;
+ }
+ VkImageAspectFlags oldAspectMask = imgpair.subresource.aspectMask;
+ imgpair.subresource.aspectMask = aspectMask;
+ auto imgsubIt = (*GetImageLayoutMap()).find(imgpair);
+ if (imgsubIt == (*GetImageLayoutMap()).end()) {
return false;
}
if (layout != VK_IMAGE_LAYOUT_MAX_ENUM && layout != imgsubIt->second.layout) {
log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT, HandleToUint64(imgpair.image),
kVUID_Core_DrawState_InvalidLayout,
- "Cannot query for %s layout when combined aspect mask %d has multiple layout types: %s and %s",
+ "Cannot query for VkImage %s layout when combined aspect mask %d has multiple layout types: %s and %s",
report_data->FormatHandle(imgpair.image).c_str(), oldAspectMask, string_VkImageLayout(layout),
string_VkImageLayout(imgsubIt->second.layout));
}
@@ -207,30 +151,54 @@ bool CoreChecks::FindLayoutVerifyLayout(ImageSubresourcePair imgpair, VkImageLay
return true;
}
+// Find layout(s) on the command buffer level
+bool CoreChecks::FindCmdBufLayout(layer_data const *device_data, GLOBAL_CB_NODE const *pCB, VkImage image, VkImageSubresource range,
+ IMAGE_CMD_BUF_LAYOUT_NODE &node) {
+ ImageSubresourcePair imgpair = {image, true, range};
+ node = IMAGE_CMD_BUF_LAYOUT_NODE(VK_IMAGE_LAYOUT_MAX_ENUM, VK_IMAGE_LAYOUT_MAX_ENUM);
+ FindLayoutVerifyNode(device_data, pCB, imgpair, node, VK_IMAGE_ASPECT_COLOR_BIT);
+ FindLayoutVerifyNode(device_data, pCB, imgpair, node, VK_IMAGE_ASPECT_DEPTH_BIT);
+ FindLayoutVerifyNode(device_data, pCB, imgpair, node, VK_IMAGE_ASPECT_STENCIL_BIT);
+ FindLayoutVerifyNode(device_data, pCB, imgpair, node, VK_IMAGE_ASPECT_METADATA_BIT);
+ if (GetDeviceExtensions()->vk_khr_sampler_ycbcr_conversion) {
+ FindLayoutVerifyNode(device_data, pCB, imgpair, node, VK_IMAGE_ASPECT_PLANE_0_BIT_KHR);
+ FindLayoutVerifyNode(device_data, pCB, imgpair, node, VK_IMAGE_ASPECT_PLANE_1_BIT_KHR);
+ FindLayoutVerifyNode(device_data, pCB, imgpair, node, VK_IMAGE_ASPECT_PLANE_2_BIT_KHR);
+ }
+ if (node.layout == VK_IMAGE_LAYOUT_MAX_ENUM) {
+ imgpair = {image, false, VkImageSubresource()};
+ auto imgsubIt = pCB->imageLayoutMap.find(imgpair);
+ if (imgsubIt == pCB->imageLayoutMap.end()) return false;
+ // TODO: This is ostensibly a find function but it changes state here
+ node = imgsubIt->second;
+ }
+ return true;
+}
+
// Find layout(s) on the global level
-bool CoreChecks::FindGlobalLayout(ImageSubresourcePair imgpair, VkImageLayout &layout) {
+bool CoreChecks::FindGlobalLayout(layer_data *device_data, ImageSubresourcePair imgpair, VkImageLayout &layout) {
layout = VK_IMAGE_LAYOUT_MAX_ENUM;
- FindLayoutVerifyLayout(imgpair, layout, VK_IMAGE_ASPECT_COLOR_BIT);
- FindLayoutVerifyLayout(imgpair, layout, VK_IMAGE_ASPECT_DEPTH_BIT);
- FindLayoutVerifyLayout(imgpair, layout, VK_IMAGE_ASPECT_STENCIL_BIT);
- FindLayoutVerifyLayout(imgpair, layout, VK_IMAGE_ASPECT_METADATA_BIT);
- if (device_extensions.vk_khr_sampler_ycbcr_conversion) {
- FindLayoutVerifyLayout(imgpair, layout, VK_IMAGE_ASPECT_PLANE_0_BIT_KHR);
- FindLayoutVerifyLayout(imgpair, layout, VK_IMAGE_ASPECT_PLANE_1_BIT_KHR);
- FindLayoutVerifyLayout(imgpair, layout, VK_IMAGE_ASPECT_PLANE_2_BIT_KHR);
+ FindLayoutVerifyLayout(device_data, imgpair, layout, VK_IMAGE_ASPECT_COLOR_BIT);
+ FindLayoutVerifyLayout(device_data, imgpair, layout, VK_IMAGE_ASPECT_DEPTH_BIT);
+ FindLayoutVerifyLayout(device_data, imgpair, layout, VK_IMAGE_ASPECT_STENCIL_BIT);
+ FindLayoutVerifyLayout(device_data, imgpair, layout, VK_IMAGE_ASPECT_METADATA_BIT);
+ if (GetDeviceExtensions()->vk_khr_sampler_ycbcr_conversion) {
+ FindLayoutVerifyLayout(device_data, imgpair, layout, VK_IMAGE_ASPECT_PLANE_0_BIT_KHR);
+ FindLayoutVerifyLayout(device_data, imgpair, layout, VK_IMAGE_ASPECT_PLANE_1_BIT_KHR);
+ FindLayoutVerifyLayout(device_data, imgpair, layout, VK_IMAGE_ASPECT_PLANE_2_BIT_KHR);
}
if (layout == VK_IMAGE_LAYOUT_MAX_ENUM) {
imgpair = {imgpair.image, false, VkImageSubresource()};
- auto imgsubIt = imageLayoutMap.find(imgpair);
- if (imgsubIt == imageLayoutMap.end()) return false;
+ auto imgsubIt = (*GetImageLayoutMap()).find(imgpair);
+ if (imgsubIt == (*GetImageLayoutMap()).end()) return false;
layout = imgsubIt->second.layout;
}
return true;
}
-bool CoreChecks::FindLayouts(VkImage image, std::vector<VkImageLayout> &layouts) {
- auto sub_data = imageSubresourceMap.find(image);
- if (sub_data == imageSubresourceMap.end()) return false;
+bool CoreChecks::FindLayouts(layer_data *device_data, VkImage image, std::vector<VkImageLayout> &layouts) {
+ auto sub_data = (*GetImageSubresourceMap()).find(image);
+ if (sub_data == (*GetImageSubresourceMap()).end()) return false;
auto image_state = GetImageState(image);
if (!image_state) return false;
bool ignoreGlobal = false;
@@ -240,16 +208,16 @@ bool CoreChecks::FindLayouts(VkImage image, std::vector<VkImageLayout> &layouts)
}
for (auto imgsubpair : sub_data->second) {
if (ignoreGlobal && !imgsubpair.hasSubresource) continue;
- auto img_data = imageLayoutMap.find(imgsubpair);
- if (img_data != imageLayoutMap.end()) {
+ auto img_data = (*GetImageLayoutMap()).find(imgsubpair);
+ if (img_data != (*GetImageLayoutMap()).end()) {
layouts.push_back(img_data->second.layout);
}
}
return true;
}
-bool CoreChecks::FindLayout(const ImageSubresPairLayoutMap &imageLayoutMap, ImageSubresourcePair imgpair, VkImageLayout &layout,
- const VkImageAspectFlags aspectMask) {
+bool CoreChecks::FindLayout(const std::unordered_map<ImageSubresourcePair, IMAGE_LAYOUT_NODE> &imageLayoutMap,
+ ImageSubresourcePair imgpair, VkImageLayout &layout, const VkImageAspectFlags aspectMask) {
if (!(imgpair.subresource.aspectMask & aspectMask)) {
return false;
}
@@ -263,14 +231,15 @@ bool CoreChecks::FindLayout(const ImageSubresPairLayoutMap &imageLayoutMap, Imag
}
// find layout in supplied map
-bool CoreChecks::FindLayout(const ImageSubresPairLayoutMap &imageLayoutMap, ImageSubresourcePair imgpair,
- VkImageLayout &layout) const {
+bool CoreChecks::FindLayout(layer_data *device_data,
+ const std::unordered_map<ImageSubresourcePair, IMAGE_LAYOUT_NODE> &imageLayoutMap,
+ ImageSubresourcePair imgpair, VkImageLayout &layout) {
layout = VK_IMAGE_LAYOUT_MAX_ENUM;
FindLayout(imageLayoutMap, imgpair, layout, VK_IMAGE_ASPECT_COLOR_BIT);
FindLayout(imageLayoutMap, imgpair, layout, VK_IMAGE_ASPECT_DEPTH_BIT);
FindLayout(imageLayoutMap, imgpair, layout, VK_IMAGE_ASPECT_STENCIL_BIT);
FindLayout(imageLayoutMap, imgpair, layout, VK_IMAGE_ASPECT_METADATA_BIT);
- if (device_extensions.vk_khr_sampler_ycbcr_conversion) {
+ if (GetDeviceExtensions()->vk_khr_sampler_ycbcr_conversion) {
FindLayout(imageLayoutMap, imgpair, layout, VK_IMAGE_ASPECT_PLANE_0_BIT_KHR);
FindLayout(imageLayoutMap, imgpair, layout, VK_IMAGE_ASPECT_PLANE_1_BIT_KHR);
FindLayout(imageLayoutMap, imgpair, layout, VK_IMAGE_ASPECT_PLANE_2_BIT_KHR);
@@ -286,84 +255,106 @@ bool CoreChecks::FindLayout(const ImageSubresPairLayoutMap &imageLayoutMap, Imag
}
// Set the layout on the global level
-void CoreChecks::SetGlobalLayout(ImageSubresourcePair imgpair, const VkImageLayout &layout) {
+void CoreChecks::SetGlobalLayout(layer_data *device_data, ImageSubresourcePair imgpair, const VkImageLayout &layout) {
VkImage &image = imgpair.image;
- auto data = imageLayoutMap.find(imgpair);
- if (data != imageLayoutMap.end()) {
+ auto &lmap = (*GetImageLayoutMap());
+ auto data = lmap.find(imgpair);
+ if (data != lmap.end()) {
data->second.layout = layout; // Update
} else {
- imageLayoutMap[imgpair].layout = layout; // Insert
+ lmap[imgpair].layout = layout; // Insert
}
- auto &image_subresources = imageSubresourceMap[image];
+ auto &image_subresources = (*GetImageSubresourceMap())[image];
auto subresource = std::find(image_subresources.begin(), image_subresources.end(), imgpair);
if (subresource == image_subresources.end()) {
image_subresources.push_back(imgpair);
}
}
-// Set image layout for given VkImageSubresourceRange struct
-void CoreChecks::SetImageLayout(CMD_BUFFER_STATE *cb_node, const IMAGE_STATE &image_state,
- const VkImageSubresourceRange &image_subresource_range, VkImageLayout layout,
- VkImageLayout expected_layout) {
- auto *subresource_map = GetImageSubresourceLayoutMap(cb_node, image_state);
- assert(subresource_map); // the non-const getter must return a valid pointer
- if (subresource_map->SetSubresourceRangeLayout(*cb_node, image_subresource_range, layout, expected_layout)) {
- cb_node->image_layout_change_count++; // Change the version of this data to force revalidation
+// Set the layout on the cmdbuf level
+void CoreChecks::SetLayout(layer_data *device_data, GLOBAL_CB_NODE *pCB, ImageSubresourcePair imgpair,
+ const IMAGE_CMD_BUF_LAYOUT_NODE &node) {
+ auto it = pCB->imageLayoutMap.find(imgpair);
+ if (it != pCB->imageLayoutMap.end()) {
+ it->second = node; // Update
+ } else {
+ pCB->imageLayoutMap[imgpair] = node; // Insert
}
}
-
-// Set the initial image layout for all slices of an image view
-void CoreChecks::SetImageViewInitialLayout(CMD_BUFFER_STATE *cb_node, const IMAGE_VIEW_STATE &view_state, VkImageLayout layout) {
- if (disabled.image_layout_validation) {
- return;
- }
- IMAGE_STATE *image_state = GetImageState(view_state.create_info.image);
- if (image_state) {
- auto *subresource_map = GetImageSubresourceLayoutMap(cb_node, *image_state);
- subresource_map->SetSubresourceRangeInitialLayout(*cb_node, view_state.normalized_subresource_range, layout, &view_state);
+// Set image layout for given VkImageSubresourceRange struct
+void CoreChecks::SetImageLayout(layer_data *device_data, GLOBAL_CB_NODE *cb_node, const IMAGE_STATE *image_state,
+ VkImageSubresourceRange image_subresource_range, const VkImageLayout &layout) {
+ assert(image_state);
+ cb_node->image_layout_change_count++; // Change the version of this data to force revalidation
+ for (uint32_t level_index = 0; level_index < image_subresource_range.levelCount; ++level_index) {
+ uint32_t level = image_subresource_range.baseMipLevel + level_index;
+ for (uint32_t layer_index = 0; layer_index < image_subresource_range.layerCount; layer_index++) {
+ uint32_t layer = image_subresource_range.baseArrayLayer + layer_index;
+ VkImageSubresource sub = {image_subresource_range.aspectMask, level, layer};
+ // TODO: If ImageView was created with depth or stencil, transition both layouts as the aspectMask is ignored and both
+ // are used. Verify that the extra implicit layout is OK for descriptor set layout validation
+ if (image_subresource_range.aspectMask & (VK_IMAGE_ASPECT_DEPTH_BIT | VK_IMAGE_ASPECT_STENCIL_BIT)) {
+ if (FormatIsDepthAndStencil(image_state->createInfo.format)) {
+ sub.aspectMask |= (VK_IMAGE_ASPECT_DEPTH_BIT | VK_IMAGE_ASPECT_STENCIL_BIT);
+ }
+ }
+ // For multiplane images, IMAGE_ASPECT_COLOR is an alias for all of the plane bits
+ if (GetDeviceExtensions()->vk_khr_sampler_ycbcr_conversion) {
+ if (FormatIsMultiplane(image_state->createInfo.format)) {
+ if (sub.aspectMask & VK_IMAGE_ASPECT_COLOR_BIT) {
+ sub.aspectMask &= ~VK_IMAGE_ASPECT_COLOR_BIT;
+ sub.aspectMask |= VK_IMAGE_ASPECT_PLANE_0_BIT_KHR | VK_IMAGE_ASPECT_PLANE_1_BIT_KHR;
+ if (FormatPlaneCount(image_state->createInfo.format) > 2) {
+ sub.aspectMask |= VK_IMAGE_ASPECT_PLANE_2_BIT_KHR;
+ }
+ }
+ }
+ }
+ SetLayout(device_data, cb_node, image_state->image, sub, layout);
+ }
}
}
-
-// Set the initial image layout for a passed non-normalized subresource range
-void CoreChecks::SetImageInitialLayout(CMD_BUFFER_STATE *cb_node, const IMAGE_STATE &image_state,
- const VkImageSubresourceRange &range, VkImageLayout layout) {
- auto *subresource_map = GetImageSubresourceLayoutMap(cb_node, image_state);
- assert(subresource_map);
- subresource_map->SetSubresourceRangeInitialLayout(*cb_node, NormalizeSubresourceRange(image_state, range), layout);
-}
-
-void CoreChecks::SetImageInitialLayout(CMD_BUFFER_STATE *cb_node, VkImage image, const VkImageSubresourceRange &range,
- VkImageLayout layout) {
- const IMAGE_STATE *image_state = GetImageState(image);
- if (!image_state) return;
- SetImageInitialLayout(cb_node, *image_state, range, layout);
-};
-
-void CoreChecks::SetImageInitialLayout(CMD_BUFFER_STATE *cb_node, const IMAGE_STATE &image_state,
- const VkImageSubresourceLayers &layers, VkImageLayout layout) {
- SetImageInitialLayout(cb_node, image_state, RangeFromLayers(layers), layout);
+// Set image layout for given VkImageSubresourceLayers struct
+void CoreChecks::SetImageLayout(layer_data *device_data, GLOBAL_CB_NODE *cb_node, const IMAGE_STATE *image_state,
+ VkImageSubresourceLayers image_subresource_layers, const VkImageLayout &layout) {
+ // Transfer VkImageSubresourceLayers into VkImageSubresourceRange struct
+ VkImageSubresourceRange image_subresource_range;
+ image_subresource_range.aspectMask = image_subresource_layers.aspectMask;
+ image_subresource_range.baseArrayLayer = image_subresource_layers.baseArrayLayer;
+ image_subresource_range.layerCount = image_subresource_layers.layerCount;
+ image_subresource_range.baseMipLevel = image_subresource_layers.mipLevel;
+ image_subresource_range.levelCount = 1;
+ SetImageLayout(device_data, cb_node, image_state, image_subresource_range, layout);
}
// Set image layout for all slices of an image view
-void CoreChecks::SetImageViewLayout(CMD_BUFFER_STATE *cb_node, const IMAGE_VIEW_STATE &view_state, VkImageLayout layout) {
- IMAGE_STATE *image_state = GetImageState(view_state.create_info.image);
- if (!image_state) return; // TODO: track/report stale image references
+void CoreChecks::SetImageViewLayout(layer_data *device_data, GLOBAL_CB_NODE *cb_node, IMAGE_VIEW_STATE *view_state,
+ const VkImageLayout &layout) {
+ assert(view_state);
+
+ IMAGE_STATE *image_state = GetImageState(view_state->create_info.image);
+ VkImageSubresourceRange sub_range = view_state->create_info.subresourceRange;
- VkImageSubresourceRange sub_range = view_state.normalized_subresource_range;
// When changing the layout of a 3D image subresource via a 2D or 2D_ARRRAY image view, all depth slices of
// the subresource mip level(s) are transitioned, ignoring any layers restriction in the subresource info.
- if ((image_state->createInfo.imageType == VK_IMAGE_TYPE_3D) && (view_state.create_info.viewType != VK_IMAGE_VIEW_TYPE_3D)) {
+ if ((image_state->createInfo.imageType == VK_IMAGE_TYPE_3D) && (view_state->create_info.viewType != VK_IMAGE_VIEW_TYPE_3D)) {
sub_range.baseArrayLayer = 0;
sub_range.layerCount = image_state->createInfo.extent.depth;
}
- SetImageLayout(cb_node, *image_state, sub_range, layout);
+ SetImageLayout(device_data, cb_node, image_state, sub_range, layout);
+}
+
+void CoreChecks::SetImageViewLayout(layer_data *device_data, GLOBAL_CB_NODE *cb_node, VkImageView imageView,
+ const VkImageLayout &layout) {
+ auto view_state = GetImageViewState(imageView);
+ SetImageViewLayout(device_data, cb_node, view_state, layout);
}
-bool CoreChecks::ValidateRenderPassLayoutAgainstFramebufferImageUsage(RenderPassCreateVersion rp_version, VkImageLayout layout,
- VkImage image, VkImageView image_view,
+bool CoreChecks::ValidateRenderPassLayoutAgainstFramebufferImageUsage(layer_data *device_data, RenderPassCreateVersion rp_version,
+ VkImageLayout layout, VkImage image, VkImageView image_view,
VkFramebuffer framebuffer, VkRenderPass renderpass,
- uint32_t attachment_index, const char *variable_name) const {
+ uint32_t attachment_index, const char *variable_name) {
bool skip = false;
auto image_state = GetImageState(image);
const char *vuid;
@@ -372,7 +363,8 @@ bool CoreChecks::ValidateRenderPassLayoutAgainstFramebufferImageUsage(RenderPass
if (!image_state) {
skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT, HandleToUint64(image),
"VUID-VkRenderPassBeginInfo-framebuffer-parameter",
- "Render Pass begin with %s uses %s where pAttachments[%" PRIu32 "] = %s, which refers to an invalid image",
+ "Render Pass begin with renderpass %s uses framebuffer %s where pAttachments[%" PRIu32
+ "] = image view %s, which refers to an invalid image",
report_data->FormatHandle(renderpass).c_str(), report_data->FormatHandle(framebuffer).c_str(),
attachment_index, report_data->FormatHandle(image_view).c_str());
return skip;
@@ -385,11 +377,11 @@ bool CoreChecks::ValidateRenderPassLayoutAgainstFramebufferImageUsage(RenderPass
vuid = use_rp2 ? "VUID-vkCmdBeginRenderPass2KHR-initialLayout-03094" : "VUID-vkCmdBeginRenderPass-initialLayout-00895";
skip |=
log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT, HandleToUint64(image), vuid,
- "Layout/usage mismatch for attachment %u in %s"
- " - the %s is %s but the image attached to %s via %s"
+ "Layout/usage mismatch for attachment %u in render pass %s"
+ " - the %s is %s but the image attached to framebuffer %s via image view %s"
" was not created with VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT",
- attachment_index, report_data->FormatHandle(renderpass).c_str(), variable_name, string_VkImageLayout(layout),
- report_data->FormatHandle(framebuffer).c_str(), report_data->FormatHandle(image_view).c_str());
+ attachment_index, report_data->FormatHandle(framebuffer).c_str(), variable_name, string_VkImageLayout(layout),
+ report_data->FormatHandle(renderpass).c_str(), report_data->FormatHandle(image_view).c_str());
}
if (layout == VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL &&
@@ -397,36 +389,36 @@ bool CoreChecks::ValidateRenderPassLayoutAgainstFramebufferImageUsage(RenderPass
vuid = use_rp2 ? "VUID-vkCmdBeginRenderPass2KHR-initialLayout-03097" : "VUID-vkCmdBeginRenderPass-initialLayout-00897";
skip |=
log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT, HandleToUint64(image), vuid,
- "Layout/usage mismatch for attachment %u in %s"
- " - the %s is %s but the image attached to %s via %s"
+ "Layout/usage mismatch for attachment %u in render pass %s"
+ " - the %s is %s but the image attached to framebuffer %s via image view %s"
" was not created with VK_IMAGE_USAGE_INPUT_ATTACHMENT_BIT or VK_IMAGE_USAGE_SAMPLED_BIT",
- attachment_index, report_data->FormatHandle(renderpass).c_str(), variable_name, string_VkImageLayout(layout),
- report_data->FormatHandle(framebuffer).c_str(), report_data->FormatHandle(image_view).c_str());
+ attachment_index, report_data->FormatHandle(framebuffer).c_str(), variable_name, string_VkImageLayout(layout),
+ report_data->FormatHandle(renderpass).c_str(), report_data->FormatHandle(image_view).c_str());
}
if (layout == VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL && !(image_usage & VK_IMAGE_USAGE_TRANSFER_SRC_BIT)) {
vuid = use_rp2 ? "VUID-vkCmdBeginRenderPass2KHR-initialLayout-03098" : "VUID-vkCmdBeginRenderPass-initialLayout-00898";
skip |=
log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT, HandleToUint64(image), vuid,
- "Layout/usage mismatch for attachment %u in %s"
- " - the %s is %s but the image attached to %s via %s"
+ "Layout/usage mismatch for attachment %u in render pass %s"
+ " - the %s is %s but the image attached to framebuffer %s via image view %s"
" was not created with VK_IMAGE_USAGE_TRANSFER_SRC_BIT",
- attachment_index, report_data->FormatHandle(renderpass).c_str(), variable_name, string_VkImageLayout(layout),
- report_data->FormatHandle(framebuffer).c_str(), report_data->FormatHandle(image_view).c_str());
+ attachment_index, report_data->FormatHandle(framebuffer).c_str(), variable_name, string_VkImageLayout(layout),
+ report_data->FormatHandle(renderpass).c_str(), report_data->FormatHandle(image_view).c_str());
}
if (layout == VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL && !(image_usage & VK_IMAGE_USAGE_TRANSFER_DST_BIT)) {
vuid = use_rp2 ? "VUID-vkCmdBeginRenderPass2KHR-initialLayout-03099" : "VUID-vkCmdBeginRenderPass-initialLayout-00899";
skip |=
log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT, HandleToUint64(image), vuid,
- "Layout/usage mismatch for attachment %u in %s"
- " - the %s is %s but the image attached to %s via %s"
+ "Layout/usage mismatch for attachment %u in render pass %s"
+ " - the %s is %s but the image attached to framebuffer %s via image view %s"
" was not created with VK_IMAGE_USAGE_TRANSFER_DST_BIT",
- attachment_index, report_data->FormatHandle(renderpass).c_str(), variable_name, string_VkImageLayout(layout),
- report_data->FormatHandle(framebuffer).c_str(), report_data->FormatHandle(image_view).c_str());
+ attachment_index, report_data->FormatHandle(framebuffer).c_str(), variable_name, string_VkImageLayout(layout),
+ report_data->FormatHandle(renderpass).c_str(), report_data->FormatHandle(image_view).c_str());
}
- if (device_extensions.vk_khr_maintenance2) {
+ if (GetDeviceExtensions()->vk_khr_maintenance2) {
if ((layout == VK_IMAGE_LAYOUT_DEPTH_READ_ONLY_STENCIL_ATTACHMENT_OPTIMAL ||
layout == VK_IMAGE_LAYOUT_DEPTH_ATTACHMENT_STENCIL_READ_ONLY_OPTIMAL ||
layout == VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL ||
@@ -435,11 +427,11 @@ bool CoreChecks::ValidateRenderPassLayoutAgainstFramebufferImageUsage(RenderPass
vuid = use_rp2 ? "VUID-vkCmdBeginRenderPass2KHR-initialLayout-03096" : "VUID-vkCmdBeginRenderPass-initialLayout-01758";
skip |= log_msg(
report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT, HandleToUint64(image), vuid,
- "Layout/usage mismatch for attachment %u in %s"
- " - the %s is %s but the image attached to %s via %s"
+ "Layout/usage mismatch for attachment %u in render pass %s"
+ " - the %s is %s but the image attached to framebuffer %s via image view %s"
" was not created with VK_IMAGE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT",
- attachment_index, report_data->FormatHandle(renderpass).c_str(), variable_name, string_VkImageLayout(layout),
- report_data->FormatHandle(framebuffer).c_str(), report_data->FormatHandle(image_view).c_str());
+ attachment_index, report_data->FormatHandle(framebuffer).c_str(), variable_name, string_VkImageLayout(layout),
+ report_data->FormatHandle(renderpass).c_str(), report_data->FormatHandle(image_view).c_str());
}
} else {
// The create render pass 2 extension requires maintenance 2 (the previous branch), so no vuid switch needed here.
@@ -448,24 +440,23 @@ bool CoreChecks::ValidateRenderPassLayoutAgainstFramebufferImageUsage(RenderPass
!(image_usage & VK_IMAGE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT)) {
skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT,
HandleToUint64(image), "VUID-vkCmdBeginRenderPass-initialLayout-00896",
- "Layout/usage mismatch for attachment %u in %s"
- " - the %s is %s but the image attached to %s via %s"
+ "Layout/usage mismatch for attachment %u in render pass %s"
+ " - the %s is %s but the image attached to framebuffer %s via image view %s"
" was not created with VK_IMAGE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT",
- attachment_index, report_data->FormatHandle(renderpass).c_str(), variable_name,
- string_VkImageLayout(layout), report_data->FormatHandle(framebuffer).c_str(),
+ attachment_index, report_data->FormatHandle(framebuffer).c_str(), variable_name,
+ string_VkImageLayout(layout), report_data->FormatHandle(renderpass).c_str(),
report_data->FormatHandle(image_view).c_str());
}
}
return skip;
}
-bool CoreChecks::VerifyFramebufferAndRenderPassLayouts(RenderPassCreateVersion rp_version, const CMD_BUFFER_STATE *pCB,
- const VkRenderPassBeginInfo *pRenderPassBegin,
- const FRAMEBUFFER_STATE *framebuffer_state) const {
+bool CoreChecks::VerifyFramebufferAndRenderPassLayouts(layer_data *device_data, RenderPassCreateVersion rp_version,
+ GLOBAL_CB_NODE *pCB, const VkRenderPassBeginInfo *pRenderPassBegin,
+ const FRAMEBUFFER_STATE *framebuffer_state) {
bool skip = false;
auto const pRenderPassInfo = GetRenderPassState(pRenderPassBegin->renderPass)->createInfo.ptr();
auto const &framebufferInfo = framebuffer_state->createInfo;
- const VkImageView *attachments = framebufferInfo.pAttachments;
auto render_pass = GetRenderPassState(pRenderPassBegin->renderPass)->renderPass;
auto framebuffer = framebuffer_state->framebuffer;
@@ -475,124 +466,102 @@ bool CoreChecks::VerifyFramebufferAndRenderPassLayouts(RenderPassCreateVersion r
HandleToUint64(pCB->commandBuffer), kVUID_Core_DrawState_InvalidRenderpass,
"You cannot start a render pass using a framebuffer with a different number of attachments.");
}
+ for (uint32_t i = 0; i < pRenderPassInfo->attachmentCount; ++i) {
+ const VkImageView &image_view = framebufferInfo.pAttachments[i];
+ auto view_state = GetImageViewState(image_view);
- const auto *attachmentInfo = lvl_find_in_chain<VkRenderPassAttachmentBeginInfoKHR>(pRenderPassBegin->pNext);
- if (((framebufferInfo.flags & VK_FRAMEBUFFER_CREATE_IMAGELESS_BIT_KHR) != 0) && attachmentInfo != nullptr) {
- attachments = attachmentInfo->pAttachments;
- }
-
- if (attachments != nullptr) {
- const auto *const_pCB = static_cast<const CMD_BUFFER_STATE *>(pCB);
- for (uint32_t i = 0; i < pRenderPassInfo->attachmentCount; ++i) {
- auto image_view = attachments[i];
- auto view_state = GetImageViewState(image_view);
-
- if (!view_state) {
- skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_RENDER_PASS_EXT,
- HandleToUint64(pRenderPassBegin->renderPass), "VUID-VkRenderPassBeginInfo-framebuffer-parameter",
- "vkCmdBeginRenderPass(): %s pAttachments[%" PRIu32 "] = %s is not a valid VkImageView handle",
- report_data->FormatHandle(framebuffer_state->framebuffer).c_str(), i,
- report_data->FormatHandle(image_view).c_str());
- continue;
- }
-
- const VkImage image = view_state->create_info.image;
- const IMAGE_STATE *image_state = GetImageState(image);
+ if (!view_state) {
+ skip |=
+ log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_RENDER_PASS_EXT,
+ HandleToUint64(pRenderPassBegin->renderPass), "VUID-VkRenderPassBeginInfo-framebuffer-parameter",
+ "vkCmdBeginRenderPass(): framebuffer %s pAttachments[%" PRIu32 "] = %s is not a valid VkImageView handle",
+ report_data->FormatHandle(framebuffer_state->framebuffer).c_str(), i,
+ report_data->FormatHandle(image_view).c_str());
+ continue;
+ }
- if (!image_state) {
- skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_RENDER_PASS_EXT,
- HandleToUint64(pRenderPassBegin->renderPass), "VUID-VkRenderPassBeginInfo-framebuffer-parameter",
- "vkCmdBeginRenderPass(): %s pAttachments[%" PRIu32 "] = %s references non-extant %s.",
- report_data->FormatHandle(framebuffer_state->framebuffer).c_str(), i,
- report_data->FormatHandle(image_view).c_str(), report_data->FormatHandle(image).c_str());
- continue;
- }
- auto attachment_initial_layout = pRenderPassInfo->pAttachments[i].initialLayout;
- auto final_layout = pRenderPassInfo->pAttachments[i].finalLayout;
-
- // Cast pCB to const because we don't want to create entries that don't exist here (in case the key changes to something
- // in common with the non-const version.)
- const ImageSubresourceLayoutMap *subresource_map =
- (attachment_initial_layout != VK_IMAGE_LAYOUT_UNDEFINED) ? GetImageSubresourceLayoutMap(const_pCB, image) : nullptr;
-
- if (subresource_map) { // If no layout information for image yet, will be checked at QueueSubmit time
- LayoutUseCheckAndMessage layout_check(subresource_map);
- bool subres_skip = false;
- auto subresource_cb = [this, i, attachment_initial_layout, &layout_check, &subres_skip](
- const VkImageSubresource &subres, VkImageLayout layout, VkImageLayout initial_layout) {
- if (!layout_check.Check(subres, attachment_initial_layout, layout, initial_layout)) {
- subres_skip |=
- log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
+ const VkImage &image = view_state->create_info.image;
+ const VkImageSubresourceRange &subRange = view_state->create_info.subresourceRange;
+ auto initial_layout = pRenderPassInfo->pAttachments[i].initialLayout;
+ auto final_layout = pRenderPassInfo->pAttachments[i].finalLayout;
+
+ // TODO: Do not iterate over every possibility - consolidate where possible
+ for (uint32_t j = 0; j < subRange.levelCount; j++) {
+ uint32_t level = subRange.baseMipLevel + j;
+ for (uint32_t k = 0; k < subRange.layerCount; k++) {
+ uint32_t layer = subRange.baseArrayLayer + k;
+ VkImageSubresource sub = {subRange.aspectMask, level, layer};
+ IMAGE_CMD_BUF_LAYOUT_NODE node;
+ if (!FindCmdBufLayout(device_data, pCB, image, sub, node)) {
+ // Missing layouts will be added during state update
+ continue;
+ }
+ if (initial_layout != VK_IMAGE_LAYOUT_UNDEFINED && initial_layout != node.layout) {
+ skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
kVUID_Core_DrawState_InvalidRenderpass,
"You cannot start a render pass using attachment %u where the render pass initial layout is %s "
- "and the %s layout of the attachment is %s. The layouts must match, or the render "
+ "and the previous known layout of the attachment is %s. The layouts must match, or the render "
"pass initial layout for the attachment must be VK_IMAGE_LAYOUT_UNDEFINED",
- i, string_VkImageLayout(attachment_initial_layout), layout_check.message,
- string_VkImageLayout(layout_check.layout));
- }
- return !subres_skip; // quit checking subresources once we fail once
- };
-
- subresource_map->ForRange(view_state->normalized_subresource_range, subresource_cb);
- skip |= subres_skip;
+ i, string_VkImageLayout(initial_layout), string_VkImageLayout(node.layout));
+ }
}
+ }
- ValidateRenderPassLayoutAgainstFramebufferImageUsage(rp_version, attachment_initial_layout, image, image_view,
- framebuffer, render_pass, i, "initial layout");
+ ValidateRenderPassLayoutAgainstFramebufferImageUsage(device_data, rp_version, initial_layout, image, image_view,
+ framebuffer, render_pass, i, "initial layout");
- ValidateRenderPassLayoutAgainstFramebufferImageUsage(rp_version, final_layout, image, image_view, framebuffer,
- render_pass, i, "final layout");
- }
+ ValidateRenderPassLayoutAgainstFramebufferImageUsage(device_data, rp_version, final_layout, image, image_view, framebuffer,
+ render_pass, i, "final layout");
+ }
- for (uint32_t j = 0; j < pRenderPassInfo->subpassCount; ++j) {
- auto &subpass = pRenderPassInfo->pSubpasses[j];
- for (uint32_t k = 0; k < pRenderPassInfo->pSubpasses[j].inputAttachmentCount; ++k) {
- auto &attachment_ref = subpass.pInputAttachments[k];
- if (attachment_ref.attachment != VK_ATTACHMENT_UNUSED) {
- auto image_view = attachments[attachment_ref.attachment];
- auto view_state = GetImageViewState(image_view);
+ for (uint32_t j = 0; j < pRenderPassInfo->subpassCount; ++j) {
+ auto &subpass = pRenderPassInfo->pSubpasses[j];
+ for (uint32_t k = 0; k < pRenderPassInfo->pSubpasses[j].inputAttachmentCount; ++k) {
+ auto &attachment_ref = subpass.pInputAttachments[k];
+ if (attachment_ref.attachment != VK_ATTACHMENT_UNUSED) {
+ auto image_view = framebufferInfo.pAttachments[attachment_ref.attachment];
+ auto view_state = GetImageViewState(image_view);
- if (view_state) {
- auto image = view_state->create_info.image;
- ValidateRenderPassLayoutAgainstFramebufferImageUsage(rp_version, attachment_ref.layout, image, image_view,
- framebuffer, render_pass, attachment_ref.attachment,
- "input attachment layout");
- }
+ if (view_state) {
+ auto image = view_state->create_info.image;
+ ValidateRenderPassLayoutAgainstFramebufferImageUsage(device_data, rp_version, attachment_ref.layout, image,
+ image_view, framebuffer, render_pass,
+ attachment_ref.attachment, "input attachment layout");
}
}
+ }
- for (uint32_t k = 0; k < pRenderPassInfo->pSubpasses[j].colorAttachmentCount; ++k) {
- auto &attachment_ref = subpass.pColorAttachments[k];
- if (attachment_ref.attachment != VK_ATTACHMENT_UNUSED) {
- auto image_view = attachments[attachment_ref.attachment];
- auto view_state = GetImageViewState(image_view);
-
- if (view_state) {
- auto image = view_state->create_info.image;
- ValidateRenderPassLayoutAgainstFramebufferImageUsage(rp_version, attachment_ref.layout, image, image_view,
- framebuffer, render_pass, attachment_ref.attachment,
- "color attachment layout");
- if (subpass.pResolveAttachments) {
- ValidateRenderPassLayoutAgainstFramebufferImageUsage(
- rp_version, attachment_ref.layout, image, image_view, framebuffer, render_pass,
- attachment_ref.attachment, "resolve attachment layout");
- }
+ for (uint32_t k = 0; k < pRenderPassInfo->pSubpasses[j].colorAttachmentCount; ++k) {
+ auto &attachment_ref = subpass.pColorAttachments[k];
+ if (attachment_ref.attachment != VK_ATTACHMENT_UNUSED) {
+ auto image_view = framebufferInfo.pAttachments[attachment_ref.attachment];
+ auto view_state = GetImageViewState(image_view);
+
+ if (view_state) {
+ auto image = view_state->create_info.image;
+ ValidateRenderPassLayoutAgainstFramebufferImageUsage(device_data, rp_version, attachment_ref.layout, image,
+ image_view, framebuffer, render_pass,
+ attachment_ref.attachment, "color attachment layout");
+ if (subpass.pResolveAttachments) {
+ ValidateRenderPassLayoutAgainstFramebufferImageUsage(
+ device_data, rp_version, attachment_ref.layout, image, image_view, framebuffer, render_pass,
+ attachment_ref.attachment, "resolve attachment layout");
}
}
}
+ }
- if (pRenderPassInfo->pSubpasses[j].pDepthStencilAttachment) {
- auto &attachment_ref = *subpass.pDepthStencilAttachment;
- if (attachment_ref.attachment != VK_ATTACHMENT_UNUSED) {
- auto image_view = attachments[attachment_ref.attachment];
- auto view_state = GetImageViewState(image_view);
-
- if (view_state) {
- auto image = view_state->create_info.image;
- ValidateRenderPassLayoutAgainstFramebufferImageUsage(rp_version, attachment_ref.layout, image, image_view,
- framebuffer, render_pass, attachment_ref.attachment,
- "input attachment layout");
- }
+ if (pRenderPassInfo->pSubpasses[j].pDepthStencilAttachment) {
+ auto &attachment_ref = *subpass.pDepthStencilAttachment;
+ if (attachment_ref.attachment != VK_ATTACHMENT_UNUSED) {
+ auto image_view = framebufferInfo.pAttachments[attachment_ref.attachment];
+ auto view_state = GetImageViewState(image_view);
+
+ if (view_state) {
+ auto image = view_state->create_info.image;
+ ValidateRenderPassLayoutAgainstFramebufferImageUsage(device_data, rp_version, attachment_ref.layout, image,
+ image_view, framebuffer, render_pass,
+ attachment_ref.attachment, "input attachment layout");
}
}
}
@@ -600,49 +569,95 @@ bool CoreChecks::VerifyFramebufferAndRenderPassLayouts(RenderPassCreateVersion r
return skip;
}
-void CoreChecks::TransitionAttachmentRefLayout(CMD_BUFFER_STATE *pCB, FRAMEBUFFER_STATE *pFramebuffer,
+void CoreChecks::TransitionAttachmentRefLayout(layer_data *device_data, GLOBAL_CB_NODE *pCB, FRAMEBUFFER_STATE *pFramebuffer,
const safe_VkAttachmentReference2KHR &ref) {
if (ref.attachment != VK_ATTACHMENT_UNUSED) {
auto image_view = GetAttachmentImageViewState(pFramebuffer, ref.attachment);
if (image_view) {
- SetImageViewLayout(pCB, *image_view, ref.layout);
+ SetImageViewLayout(device_data, pCB, image_view, ref.layout);
}
}
}
-void CoreChecks::TransitionSubpassLayouts(CMD_BUFFER_STATE *pCB, const RENDER_PASS_STATE *render_pass_state,
+void CoreChecks::TransitionSubpassLayouts(layer_data *device_data, GLOBAL_CB_NODE *pCB, const RENDER_PASS_STATE *render_pass_state,
const int subpass_index, FRAMEBUFFER_STATE *framebuffer_state) {
assert(render_pass_state);
if (framebuffer_state) {
auto const &subpass = render_pass_state->createInfo.pSubpasses[subpass_index];
for (uint32_t j = 0; j < subpass.inputAttachmentCount; ++j) {
- TransitionAttachmentRefLayout(pCB, framebuffer_state, subpass.pInputAttachments[j]);
+ TransitionAttachmentRefLayout(device_data, pCB, framebuffer_state, subpass.pInputAttachments[j]);
}
for (uint32_t j = 0; j < subpass.colorAttachmentCount; ++j) {
- TransitionAttachmentRefLayout(pCB, framebuffer_state, subpass.pColorAttachments[j]);
+ TransitionAttachmentRefLayout(device_data, pCB, framebuffer_state, subpass.pColorAttachments[j]);
}
if (subpass.pDepthStencilAttachment) {
- TransitionAttachmentRefLayout(pCB, framebuffer_state, *subpass.pDepthStencilAttachment);
+ TransitionAttachmentRefLayout(device_data, pCB, framebuffer_state, *subpass.pDepthStencilAttachment);
}
}
}
+bool CoreChecks::ValidateImageAspectLayout(layer_data *device_data, GLOBAL_CB_NODE const *pCB,
+ const VkImageMemoryBarrier *mem_barrier, uint32_t level, uint32_t layer,
+ VkImageAspectFlags aspect) {
+ if (!(mem_barrier->subresourceRange.aspectMask & aspect)) {
+ return false;
+ }
+ VkImageSubresource sub = {aspect, level, layer};
+ IMAGE_CMD_BUF_LAYOUT_NODE node;
+ if (!FindCmdBufLayout(device_data, pCB, mem_barrier->image, sub, node)) {
+ return false;
+ }
+ bool skip = false;
+ if (mem_barrier->oldLayout == VK_IMAGE_LAYOUT_UNDEFINED) {
+ // TODO: Set memory invalid which is in mem_tracker currently
+ } else if (node.layout != mem_barrier->oldLayout) {
+ skip = log_msg(
+ report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
+ HandleToUint64(pCB->commandBuffer), "VUID-VkImageMemoryBarrier-oldLayout-01197",
+ "For image %s you cannot transition the layout of aspect=%d level=%d layer=%d from %s when current layout is %s.",
+ report_data->FormatHandle(mem_barrier->image).c_str(), aspect, level, layer,
+ string_VkImageLayout(mem_barrier->oldLayout), string_VkImageLayout(node.layout));
+ }
+ return skip;
+}
+
// Transition the layout state for renderpass attachments based on the BeginRenderPass() call. This includes:
// 1. Transition into initialLayout state
// 2. Transition from initialLayout to layout used in subpass 0
-void CoreChecks::TransitionBeginRenderPassLayouts(CMD_BUFFER_STATE *cb_state, const RENDER_PASS_STATE *render_pass_state,
+void CoreChecks::TransitionBeginRenderPassLayouts(layer_data *device_data, GLOBAL_CB_NODE *cb_state,
+ const RENDER_PASS_STATE *render_pass_state,
FRAMEBUFFER_STATE *framebuffer_state) {
// First transition into initialLayout
auto const rpci = render_pass_state->createInfo.ptr();
for (uint32_t i = 0; i < rpci->attachmentCount; ++i) {
auto view_state = GetAttachmentImageViewState(framebuffer_state, i);
if (view_state) {
- SetImageViewLayout(cb_state, *view_state, rpci->pAttachments[i].initialLayout);
+ SetImageViewLayout(device_data, cb_state, view_state, rpci->pAttachments[i].initialLayout);
}
}
// Now transition for first subpass (index 0)
- TransitionSubpassLayouts(cb_state, render_pass_state, 0, framebuffer_state);
+ TransitionSubpassLayouts(device_data, cb_state, render_pass_state, 0, framebuffer_state);
+}
+
+void CoreChecks::TransitionImageAspectLayout(layer_data *device_data, GLOBAL_CB_NODE *pCB, const VkImageMemoryBarrier *mem_barrier,
+ uint32_t level, uint32_t layer, VkImageAspectFlags aspect_mask,
+ VkImageAspectFlags aspect) {
+ if (!(aspect_mask & aspect)) {
+ return;
+ }
+ VkImageSubresource sub = {aspect, level, layer};
+ IMAGE_CMD_BUF_LAYOUT_NODE node;
+ if (!FindCmdBufLayout(device_data, pCB, mem_barrier->image, sub, node)) {
+ pCB->image_layout_change_count++; // Change the version of this data to force revalidation
+ SetLayout(device_data, pCB, mem_barrier->image, sub,
+ IMAGE_CMD_BUF_LAYOUT_NODE(mem_barrier->oldLayout, mem_barrier->newLayout));
+ return;
+ }
+ if (mem_barrier->oldLayout == VK_IMAGE_LAYOUT_UNDEFINED) {
+ // TODO: Set memory invalid
+ }
+ SetLayout(device_data, pCB, mem_barrier->image, sub, mem_barrier->newLayout);
}
bool VerifyAspectsPresent(VkImageAspectFlags aspect_mask, VkFormat format) {
@@ -663,11 +678,10 @@ bool VerifyAspectsPresent(VkImageAspectFlags aspect_mask, VkFormat format) {
}
// Verify an ImageMemoryBarrier's old/new ImageLayouts are compatible with the Image's ImageUsageFlags.
-bool CoreChecks::ValidateBarrierLayoutToImageUsage(const VkImageMemoryBarrier &img_barrier, bool new_not_old,
- VkImageUsageFlags usage_flags, const char *func_name,
- const char *barrier_pname) {
+bool CoreChecks::ValidateBarrierLayoutToImageUsage(layer_data *device_data, const VkImageMemoryBarrier *img_barrier,
+ bool new_not_old, VkImageUsageFlags usage_flags, const char *func_name) {
bool skip = false;
- const VkImageLayout layout = (new_not_old) ? img_barrier.newLayout : img_barrier.oldLayout;
+ const VkImageLayout layout = (new_not_old) ? img_barrier->newLayout : img_barrier->oldLayout;
const char *msg_code = kVUIDUndefined; // sentinel value meaning "no error"
switch (layout) {
@@ -713,147 +727,142 @@ bool CoreChecks::ValidateBarrierLayoutToImageUsage(const VkImageMemoryBarrier &i
if (msg_code != kVUIDUndefined) {
skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT,
- HandleToUint64(img_barrier.image), msg_code,
- "%s: Image barrier %s %s Layout=%s is not compatible with %s usage flags 0x%" PRIx32 ".", func_name,
- barrier_pname, ((new_not_old) ? "new" : "old"), string_VkImageLayout(layout),
- report_data->FormatHandle(img_barrier.image).c_str(), usage_flags);
+ HandleToUint64(img_barrier->image), msg_code,
+ "%s: Image barrier 0x%p %sLayout=%s is not compatible with image %s usage flags 0x%" PRIx32 ".", func_name,
+ static_cast<const void *>(img_barrier), ((new_not_old) ? "new" : "old"), string_VkImageLayout(layout),
+ report_data->FormatHandle(img_barrier->image).c_str(), usage_flags);
}
return skip;
}
+// Scoreboard for checking for duplicate and inconsistent barriers to images
+struct ImageBarrierScoreboardEntry {
+ uint32_t index;
+ // This is designed for temporary storage within the scope of the API call. If retained storage of the barriers is
+ // required, copies should be made and smart or unique pointers used in some other stucture (or this one refactored)
+ const VkImageMemoryBarrier *barrier;
+};
+using ImageBarrierScoreboardSubresMap = std::unordered_map<VkImageSubresourceRange, ImageBarrierScoreboardEntry>;
+using ImageBarrierScoreboardImageMap = std::unordered_map<VkImage, ImageBarrierScoreboardSubresMap>;
+
// Verify image barriers are compatible with the images they reference.
-bool CoreChecks::ValidateBarriersToImages(CMD_BUFFER_STATE const *cb_state, uint32_t imageMemoryBarrierCount,
+bool CoreChecks::ValidateBarriersToImages(layer_data *device_data, GLOBAL_CB_NODE const *cb_state, uint32_t imageMemoryBarrierCount,
const VkImageMemoryBarrier *pImageMemoryBarriers, const char *func_name) {
bool skip = false;
- // Scoreboard for checking for duplicate and inconsistent barriers to images
- struct ImageBarrierScoreboardEntry {
- uint32_t index;
- // This is designed for temporary storage within the scope of the API call. If retained storage of the barriers is
- // required, copies should be made and smart or unique pointers used in some other stucture (or this one refactored)
- const VkImageMemoryBarrier *barrier;
- };
- using ImageBarrierScoreboardSubresMap = std::unordered_map<VkImageSubresourceRange, ImageBarrierScoreboardEntry>;
- using ImageBarrierScoreboardImageMap = std::unordered_map<VkImage, ImageBarrierScoreboardSubresMap>;
-
// Scoreboard for duplicate layout transition barriers within the list
// Pointers retained in the scoreboard only have the lifetime of *this* call (i.e. within the scope of the API call)
ImageBarrierScoreboardImageMap layout_transitions;
for (uint32_t i = 0; i < imageMemoryBarrierCount; ++i) {
- const auto &img_barrier = pImageMemoryBarriers[i];
- const std::string barrier_pname = "pImageMemoryBarrier[" + std::to_string(i) + "]";
+ auto img_barrier = &pImageMemoryBarriers[i];
+ if (!img_barrier) continue;
// Update the scoreboard of layout transitions and check for barriers affecting the same image and subresource
// TODO: a higher precision could be gained by adapting the command_buffer image_layout_map logic looking for conflicts
// at a per sub-resource level
- if (img_barrier.oldLayout != img_barrier.newLayout) {
- const ImageBarrierScoreboardEntry new_entry{i, &img_barrier};
- const auto image_it = layout_transitions.find(img_barrier.image);
+ if (img_barrier->oldLayout != img_barrier->newLayout) {
+ ImageBarrierScoreboardEntry new_entry{i, img_barrier};
+ auto image_it = layout_transitions.find(img_barrier->image);
if (image_it != layout_transitions.end()) {
auto &subres_map = image_it->second;
- auto subres_it = subres_map.find(img_barrier.subresourceRange);
+ auto subres_it = subres_map.find(img_barrier->subresourceRange);
if (subres_it != subres_map.end()) {
auto &entry = subres_it->second;
- if ((entry.barrier->newLayout != img_barrier.oldLayout) &&
- (img_barrier.oldLayout != VK_IMAGE_LAYOUT_UNDEFINED)) {
- const VkImageSubresourceRange &range = img_barrier.subresourceRange;
+ if ((entry.barrier->newLayout != img_barrier->oldLayout) &&
+ (img_barrier->oldLayout != VK_IMAGE_LAYOUT_UNDEFINED)) {
+ const VkImageSubresourceRange &range = img_barrier->subresourceRange;
skip = log_msg(
report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
HandleToUint64(cb_state->commandBuffer), "VUID-VkImageMemoryBarrier-oldLayout-01197",
- "%s: %s conflicts with earlier entry pImageMemoryBarrier[%u]. %s"
+ "%s: pImageMemoryBarrier[%u] conflicts with earlier entry pImageMemoryBarrier[%u]. Image %s"
" subresourceRange: aspectMask=%u baseMipLevel=%u levelCount=%u, baseArrayLayer=%u, layerCount=%u; "
"conflicting barrier transitions image layout from %s when earlier barrier transitioned to layout %s.",
- func_name, barrier_pname.c_str(), entry.index, report_data->FormatHandle(img_barrier.image).c_str(),
- range.aspectMask, range.baseMipLevel, range.levelCount, range.baseArrayLayer, range.layerCount,
- string_VkImageLayout(img_barrier.oldLayout), string_VkImageLayout(entry.barrier->newLayout));
+ func_name, i, entry.index, report_data->FormatHandle(img_barrier->image).c_str(), range.aspectMask,
+ range.baseMipLevel, range.levelCount, range.baseArrayLayer, range.layerCount,
+ string_VkImageLayout(img_barrier->oldLayout), string_VkImageLayout(entry.barrier->newLayout));
}
entry = new_entry;
} else {
- subres_map[img_barrier.subresourceRange] = new_entry;
+ subres_map[img_barrier->subresourceRange] = new_entry;
}
} else {
- layout_transitions[img_barrier.image][img_barrier.subresourceRange] = new_entry;
+ layout_transitions[img_barrier->image][img_barrier->subresourceRange] = new_entry;
}
}
- auto image_state = GetImageState(img_barrier.image);
+ auto image_state = GetImageState(img_barrier->image);
if (image_state) {
VkImageUsageFlags usage_flags = image_state->createInfo.usage;
- skip |= ValidateBarrierLayoutToImageUsage(img_barrier, false, usage_flags, func_name, barrier_pname.c_str());
- skip |= ValidateBarrierLayoutToImageUsage(img_barrier, true, usage_flags, func_name, barrier_pname.c_str());
+ skip |= ValidateBarrierLayoutToImageUsage(device_data, img_barrier, false, usage_flags, func_name);
+ skip |= ValidateBarrierLayoutToImageUsage(device_data, img_barrier, true, usage_flags, func_name);
// Make sure layout is able to be transitioned, currently only presented shared presentable images are locked
if (image_state->layout_locked) {
// TODO: Add unique id for error when available
skip |= log_msg(
report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT,
- HandleToUint64(img_barrier.image), 0,
- "Attempting to transition shared presentable %s"
+ HandleToUint64(img_barrier->image), 0,
+ "Attempting to transition shared presentable image %s"
" from layout %s to layout %s, but image has already been presented and cannot have its layout transitioned.",
- report_data->FormatHandle(img_barrier.image).c_str(), string_VkImageLayout(img_barrier.oldLayout),
- string_VkImageLayout(img_barrier.newLayout));
+ report_data->FormatHandle(img_barrier->image).c_str(), string_VkImageLayout(img_barrier->oldLayout),
+ string_VkImageLayout(img_barrier->newLayout));
}
+ }
- VkImageCreateInfo *image_create_info = &image_state->createInfo;
- // For a Depth/Stencil image both aspects MUST be set
- if (FormatIsDepthAndStencil(image_create_info->format)) {
- auto const aspect_mask = img_barrier.subresourceRange.aspectMask;
- auto const ds_mask = VK_IMAGE_ASPECT_DEPTH_BIT | VK_IMAGE_ASPECT_STENCIL_BIT;
- if ((aspect_mask & ds_mask) != (ds_mask)) {
- skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT,
- HandleToUint64(img_barrier.image), "VUID-VkImageMemoryBarrier-image-01207",
- "%s: Image barrier %s references %s of format %s that must have the depth and stencil "
- "aspects set, but its aspectMask is 0x%" PRIx32 ".",
- func_name, barrier_pname.c_str(), report_data->FormatHandle(img_barrier.image).c_str(),
- string_VkFormat(image_create_info->format), aspect_mask);
- }
+ VkImageCreateInfo *image_create_info = &(GetImageState(img_barrier->image)->createInfo);
+ // For a Depth/Stencil image both aspects MUST be set
+ if (FormatIsDepthAndStencil(image_create_info->format)) {
+ auto const aspect_mask = img_barrier->subresourceRange.aspectMask;
+ auto const ds_mask = VK_IMAGE_ASPECT_DEPTH_BIT | VK_IMAGE_ASPECT_STENCIL_BIT;
+ if ((aspect_mask & ds_mask) != (ds_mask)) {
+ skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT,
+ HandleToUint64(img_barrier->image), "VUID-VkImageMemoryBarrier-image-01207",
+ "%s: Image barrier 0x%p references image %s of format %s that must have the depth and stencil "
+ "aspects set, but its aspectMask is 0x%" PRIx32 ".",
+ func_name, static_cast<const void *>(img_barrier),
+ report_data->FormatHandle(img_barrier->image).c_str(), string_VkFormat(image_create_info->format),
+ aspect_mask);
}
-
- const auto *subresource_map = GetImageSubresourceLayoutMap(cb_state, img_barrier.image);
- if (img_barrier.oldLayout == VK_IMAGE_LAYOUT_UNDEFINED) {
- // TODO: Set memory invalid which is in mem_tracker currently
- // Not sure if this needs to be in the ForRange traversal, pulling it out as it is currently invariant with
- // subresource.
- } else if (subresource_map) {
- bool subres_skip = false;
- LayoutUseCheckAndMessage layout_check(subresource_map);
- VkImageSubresourceRange normalized_isr = NormalizeSubresourceRange(*image_state, img_barrier.subresourceRange);
- auto subres_callback = [this, img_barrier, cb_state, &layout_check, &subres_skip](
- const VkImageSubresource &subres, VkImageLayout layout, VkImageLayout initial_layout) {
- if (!layout_check.Check(subres, img_barrier.oldLayout, layout, initial_layout)) {
- subres_skip =
- log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
- HandleToUint64(cb_state->commandBuffer), "VUID-VkImageMemoryBarrier-oldLayout-01197",
- "For %s you cannot transition the layout of aspect=%d level=%d layer=%d from %s when the "
- "%s layout is %s.",
- report_data->FormatHandle(img_barrier.image).c_str(), subres.aspectMask, subres.mipLevel,
- subres.arrayLayer, string_VkImageLayout(img_barrier.oldLayout), layout_check.message,
- string_VkImageLayout(layout_check.layout));
- }
- return !subres_skip;
- };
- subresource_map->ForRange(normalized_isr, subres_callback);
- skip |= subres_skip;
+ }
+ uint32_t level_count = ResolveRemainingLevels(&img_barrier->subresourceRange, image_create_info->mipLevels);
+ uint32_t layer_count = ResolveRemainingLayers(&img_barrier->subresourceRange, image_create_info->arrayLayers);
+
+ for (uint32_t j = 0; j < level_count; j++) {
+ uint32_t level = img_barrier->subresourceRange.baseMipLevel + j;
+ for (uint32_t k = 0; k < layer_count; k++) {
+ uint32_t layer = img_barrier->subresourceRange.baseArrayLayer + k;
+ skip |= ValidateImageAspectLayout(device_data, cb_state, img_barrier, level, layer, VK_IMAGE_ASPECT_COLOR_BIT);
+ skip |= ValidateImageAspectLayout(device_data, cb_state, img_barrier, level, layer, VK_IMAGE_ASPECT_DEPTH_BIT);
+ skip |= ValidateImageAspectLayout(device_data, cb_state, img_barrier, level, layer, VK_IMAGE_ASPECT_STENCIL_BIT);
+ skip |= ValidateImageAspectLayout(device_data, cb_state, img_barrier, level, layer, VK_IMAGE_ASPECT_METADATA_BIT);
+ if (GetDeviceExtensions()->vk_khr_sampler_ycbcr_conversion) {
+ skip |= ValidateImageAspectLayout(device_data, cb_state, img_barrier, level, layer,
+ VK_IMAGE_ASPECT_PLANE_0_BIT_KHR);
+ skip |= ValidateImageAspectLayout(device_data, cb_state, img_barrier, level, layer,
+ VK_IMAGE_ASPECT_PLANE_1_BIT_KHR);
+ skip |= ValidateImageAspectLayout(device_data, cb_state, img_barrier, level, layer,
+ VK_IMAGE_ASPECT_PLANE_2_BIT_KHR);
+ }
}
}
}
return skip;
}
-bool CoreChecks::IsReleaseOp(CMD_BUFFER_STATE *cb_state, const VkImageMemoryBarrier &barrier) const {
- if (!IsTransferOp(&barrier)) return false;
+bool CoreChecks::IsReleaseOp(GLOBAL_CB_NODE *cb_state, VkImageMemoryBarrier const *barrier) {
+ if (!IsTransferOp(barrier)) return false;
- auto pool = GetCommandPoolState(cb_state->createInfo.commandPool);
- return pool && TempIsReleaseOp<VkImageMemoryBarrier, true>(pool, &barrier);
+ auto pool = GetCommandPoolNode(cb_state->createInfo.commandPool);
+ return pool && TempIsReleaseOp<VkImageMemoryBarrier, true>(pool, barrier);
}
template <typename Barrier>
-bool CoreChecks::ValidateQFOTransferBarrierUniqueness(const char *func_name, CMD_BUFFER_STATE *cb_state, uint32_t barrier_count,
- const Barrier *barriers) {
+bool CoreChecks::ValidateQFOTransferBarrierUniqueness(layer_data *device_data, const char *func_name, GLOBAL_CB_NODE *cb_state,
+ uint32_t barrier_count, const Barrier *barriers) {
using BarrierRecord = QFOTransferBarrier<Barrier>;
bool skip = false;
- auto pool = GetCommandPoolState(cb_state->createInfo.commandPool);
+ auto pool = GetCommandPoolNode(cb_state->createInfo.commandPool);
auto &barrier_sets = GetQFOBarrierSets(cb_state, typename BarrierRecord::Tag());
const char *barrier_name = BarrierRecord::BarrierName();
const char *handle_name = BarrierRecord::HandleName();
@@ -890,8 +899,9 @@ bool CoreChecks::ValidateQFOTransferBarrierUniqueness(const char *func_name, CMD
}
template <typename Barrier>
-void CoreChecks::RecordQFOTransferBarriers(CMD_BUFFER_STATE *cb_state, uint32_t barrier_count, const Barrier *barriers) {
- auto pool = GetCommandPoolState(cb_state->createInfo.commandPool);
+void CoreChecks::RecordQFOTransferBarriers(layer_data *device_data, GLOBAL_CB_NODE *cb_state, uint32_t barrier_count,
+ const Barrier *barriers) {
+ auto pool = GetCommandPoolNode(cb_state->createInfo.commandPool);
auto &barrier_sets = GetQFOBarrierSets(cb_state, typename QFOTransferBarrier<Barrier>::Tag());
for (uint32_t b = 0; b < barrier_count; b++) {
if (!IsTransferOp(&barriers[b])) continue;
@@ -905,26 +915,26 @@ void CoreChecks::RecordQFOTransferBarriers(CMD_BUFFER_STATE *cb_state, uint32_t
}
}
-bool CoreChecks::ValidateBarriersQFOTransferUniqueness(const char *func_name, CMD_BUFFER_STATE *cb_state,
+bool CoreChecks::ValidateBarriersQFOTransferUniqueness(layer_data *device_data, const char *func_name, GLOBAL_CB_NODE *cb_state,
uint32_t bufferBarrierCount, const VkBufferMemoryBarrier *pBufferMemBarriers,
uint32_t imageMemBarrierCount,
const VkImageMemoryBarrier *pImageMemBarriers) {
bool skip = false;
- skip |= ValidateQFOTransferBarrierUniqueness(func_name, cb_state, bufferBarrierCount, pBufferMemBarriers);
- skip |= ValidateQFOTransferBarrierUniqueness(func_name, cb_state, imageMemBarrierCount, pImageMemBarriers);
+ skip |= ValidateQFOTransferBarrierUniqueness(device_data, func_name, cb_state, bufferBarrierCount, pBufferMemBarriers);
+ skip |= ValidateQFOTransferBarrierUniqueness(device_data, func_name, cb_state, imageMemBarrierCount, pImageMemBarriers);
return skip;
}
-void CoreChecks::RecordBarriersQFOTransfers(CMD_BUFFER_STATE *cb_state, uint32_t bufferBarrierCount,
+void CoreChecks::RecordBarriersQFOTransfers(layer_data *device_data, GLOBAL_CB_NODE *cb_state, uint32_t bufferBarrierCount,
const VkBufferMemoryBarrier *pBufferMemBarriers, uint32_t imageMemBarrierCount,
const VkImageMemoryBarrier *pImageMemBarriers) {
- RecordQFOTransferBarriers(cb_state, bufferBarrierCount, pBufferMemBarriers);
- RecordQFOTransferBarriers(cb_state, imageMemBarrierCount, pImageMemBarriers);
+ RecordQFOTransferBarriers(device_data, cb_state, bufferBarrierCount, pBufferMemBarriers);
+ RecordQFOTransferBarriers(device_data, cb_state, imageMemBarrierCount, pImageMemBarriers);
}
template <typename BarrierRecord, typename Scoreboard>
-bool CoreChecks::ValidateAndUpdateQFOScoreboard(const debug_report_data *report_data, const CMD_BUFFER_STATE *cb_state,
- const char *operation, const BarrierRecord &barrier, Scoreboard *scoreboard) const {
+bool CoreChecks::ValidateAndUpdateQFOScoreboard(const debug_report_data *report_data, const GLOBAL_CB_NODE *cb_state,
+ const char *operation, const BarrierRecord &barrier, Scoreboard *scoreboard) {
// Record to the scoreboard or report that we have a duplication
bool skip = false;
auto inserted = scoreboard->insert(std::make_pair(barrier, cb_state));
@@ -933,17 +943,17 @@ bool CoreChecks::ValidateAndUpdateQFOScoreboard(const debug_report_data *report_
skip = log_msg(report_data, VK_DEBUG_REPORT_WARNING_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
HandleToUint64(cb_state->commandBuffer), BarrierRecord::ErrMsgDuplicateQFOInSubmit(),
"%s: %s %s queue ownership of %s (%s), from srcQueueFamilyIndex %" PRIu32 " to dstQueueFamilyIndex %" PRIu32
- " duplicates existing barrier submitted in this batch from %s.",
+ " duplicates existing barrier submitted in this batch from command buffer %s.",
"vkQueueSubmit()", BarrierRecord::BarrierName(), operation, BarrierRecord::HandleName(),
report_data->FormatHandle(barrier.handle).c_str(), barrier.srcQueueFamilyIndex, barrier.dstQueueFamilyIndex,
- report_data->FormatHandle(inserted.first->second->commandBuffer).c_str());
+ report_data->FormatHandle(inserted.first->second).c_str());
}
return skip;
}
template <typename Barrier>
-bool CoreChecks::ValidateQueuedQFOTransferBarriers(const CMD_BUFFER_STATE *cb_state,
- QFOTransferCBScoreboards<Barrier> *scoreboards) const {
+bool CoreChecks::ValidateQueuedQFOTransferBarriers(layer_data *device_data, GLOBAL_CB_NODE *cb_state,
+ QFOTransferCBScoreboards<Barrier> *scoreboards) {
using BarrierRecord = QFOTransferBarrier<Barrier>;
using TypeTag = typename BarrierRecord::Tag;
bool skip = false;
@@ -991,17 +1001,17 @@ bool CoreChecks::ValidateQueuedQFOTransferBarriers(const CMD_BUFFER_STATE *cb_st
return skip;
}
-bool CoreChecks::ValidateQueuedQFOTransfers(const CMD_BUFFER_STATE *cb_state,
+bool CoreChecks::ValidateQueuedQFOTransfers(layer_data *device_data, GLOBAL_CB_NODE *cb_state,
QFOTransferCBScoreboards<VkImageMemoryBarrier> *qfo_image_scoreboards,
- QFOTransferCBScoreboards<VkBufferMemoryBarrier> *qfo_buffer_scoreboards) const {
+ QFOTransferCBScoreboards<VkBufferMemoryBarrier> *qfo_buffer_scoreboards) {
bool skip = false;
- skip |= ValidateQueuedQFOTransferBarriers<VkImageMemoryBarrier>(cb_state, qfo_image_scoreboards);
- skip |= ValidateQueuedQFOTransferBarriers<VkBufferMemoryBarrier>(cb_state, qfo_buffer_scoreboards);
+ skip |= ValidateQueuedQFOTransferBarriers<VkImageMemoryBarrier>(device_data, cb_state, qfo_image_scoreboards);
+ skip |= ValidateQueuedQFOTransferBarriers<VkBufferMemoryBarrier>(device_data, cb_state, qfo_buffer_scoreboards);
return skip;
}
template <typename Barrier>
-void CoreChecks::RecordQueuedQFOTransferBarriers(CMD_BUFFER_STATE *cb_state) {
+void CoreChecks::RecordQueuedQFOTransferBarriers(layer_data *device_data, GLOBAL_CB_NODE *cb_state) {
using BarrierRecord = QFOTransferBarrier<Barrier>;
using TypeTag = typename BarrierRecord::Tag;
const auto &cb_barriers = GetQFOBarrierSets(cb_state, TypeTag());
@@ -1028,18 +1038,21 @@ void CoreChecks::RecordQueuedQFOTransferBarriers(CMD_BUFFER_STATE *cb_state) {
}
}
-void CoreChecks::RecordQueuedQFOTransfers(CMD_BUFFER_STATE *cb_state) {
- RecordQueuedQFOTransferBarriers<VkImageMemoryBarrier>(cb_state);
- RecordQueuedQFOTransferBarriers<VkBufferMemoryBarrier>(cb_state);
+void CoreChecks::RecordQueuedQFOTransfers(layer_data *device_data, GLOBAL_CB_NODE *cb_state) {
+ RecordQueuedQFOTransferBarriers<VkImageMemoryBarrier>(device_data, cb_state);
+ RecordQueuedQFOTransferBarriers<VkBufferMemoryBarrier>(device_data, cb_state);
}
// Avoid making the template globally visible by exporting the one instance of it we need.
-void CoreChecks::EraseQFOImageRelaseBarriers(const VkImage &image) { EraseQFOReleaseBarriers<VkImageMemoryBarrier>(image); }
+void CoreChecks::EraseQFOImageRelaseBarriers(layer_data *device_data, const VkImage &image) {
+ EraseQFOReleaseBarriers<VkImageMemoryBarrier>(device_data, image);
+}
-void CoreChecks::TransitionImageLayouts(CMD_BUFFER_STATE *cb_state, uint32_t memBarrierCount,
+void CoreChecks::TransitionImageLayouts(layer_data *device_data, GLOBAL_CB_NODE *cb_state, uint32_t memBarrierCount,
const VkImageMemoryBarrier *pImgMemBarriers) {
for (uint32_t i = 0; i < memBarrierCount; ++i) {
- const auto &mem_barrier = pImgMemBarriers[i];
+ auto mem_barrier = &pImgMemBarriers[i];
+ if (!mem_barrier) continue;
// For ownership transfers, the barrier is specified twice; as a release
// operation on the yielding queue family, and as an acquire operation
@@ -1052,56 +1065,80 @@ void CoreChecks::TransitionImageLayouts(CMD_BUFFER_STATE *cb_state, uint32_t mem
continue;
}
- auto *image_state = GetImageState(mem_barrier.image);
- if (!image_state) continue;
-
- VkImageSubresourceRange normalized_isr = NormalizeSubresourceRange(*image_state, mem_barrier.subresourceRange);
- const auto &image_create_info = image_state->createInfo;
+ VkImageCreateInfo *image_create_info = &(GetImageState(mem_barrier->image)->createInfo);
+ uint32_t level_count = ResolveRemainingLevels(&mem_barrier->subresourceRange, image_create_info->mipLevels);
+ uint32_t layer_count = ResolveRemainingLayers(&mem_barrier->subresourceRange, image_create_info->arrayLayers);
// Special case for 3D images with VK_IMAGE_CREATE_2D_ARRAY_COMPATIBLE_BIT_KHR flag bit, where <extent.depth> and
// <arrayLayers> can potentially alias. When recording layout for the entire image, pre-emptively record layouts
// for all (potential) layer sub_resources.
- if (0 != (image_create_info.flags & VK_IMAGE_CREATE_2D_ARRAY_COMPATIBLE_BIT_KHR)) {
- normalized_isr.baseArrayLayer = 0;
- normalized_isr.layerCount = image_create_info.extent.depth; // Treat each depth slice as a layer subresource
+ if ((0 != (image_create_info->flags & VK_IMAGE_CREATE_2D_ARRAY_COMPATIBLE_BIT_KHR)) &&
+ (mem_barrier->subresourceRange.baseArrayLayer == 0) && (layer_count == 1)) {
+ layer_count = image_create_info->extent.depth; // Treat each depth slice as a layer subresource
+ }
+
+ // For multiplanar formats, IMAGE_ASPECT_COLOR is equivalent to adding the aspect of the individual planes
+ VkImageAspectFlags aspect_mask = mem_barrier->subresourceRange.aspectMask;
+ if (GetDeviceExtensions()->vk_khr_sampler_ycbcr_conversion) {
+ if (FormatIsMultiplane(image_create_info->format)) {
+ if (aspect_mask & VK_IMAGE_ASPECT_COLOR_BIT) {
+ aspect_mask &= ~VK_IMAGE_ASPECT_COLOR_BIT;
+ aspect_mask |= (VK_IMAGE_ASPECT_PLANE_0_BIT | VK_IMAGE_ASPECT_PLANE_1_BIT);
+ if (FormatPlaneCount(image_create_info->format) > 2) {
+ aspect_mask |= VK_IMAGE_ASPECT_PLANE_2_BIT;
+ }
+ }
+ }
}
- SetImageLayout(cb_state, *image_state, normalized_isr, mem_barrier.newLayout, mem_barrier.oldLayout);
+ for (uint32_t j = 0; j < level_count; j++) {
+ uint32_t level = mem_barrier->subresourceRange.baseMipLevel + j;
+ for (uint32_t k = 0; k < layer_count; k++) {
+ uint32_t layer = mem_barrier->subresourceRange.baseArrayLayer + k;
+ TransitionImageAspectLayout(device_data, cb_state, mem_barrier, level, layer, aspect_mask,
+ VK_IMAGE_ASPECT_COLOR_BIT);
+ TransitionImageAspectLayout(device_data, cb_state, mem_barrier, level, layer, aspect_mask,
+ VK_IMAGE_ASPECT_DEPTH_BIT);
+ TransitionImageAspectLayout(device_data, cb_state, mem_barrier, level, layer, aspect_mask,
+ VK_IMAGE_ASPECT_STENCIL_BIT);
+ TransitionImageAspectLayout(device_data, cb_state, mem_barrier, level, layer, aspect_mask,
+ VK_IMAGE_ASPECT_METADATA_BIT);
+ if (GetDeviceExtensions()->vk_khr_sampler_ycbcr_conversion) {
+ TransitionImageAspectLayout(device_data, cb_state, mem_barrier, level, layer, aspect_mask,
+ VK_IMAGE_ASPECT_PLANE_0_BIT_KHR);
+ TransitionImageAspectLayout(device_data, cb_state, mem_barrier, level, layer, aspect_mask,
+ VK_IMAGE_ASPECT_PLANE_1_BIT_KHR);
+ TransitionImageAspectLayout(device_data, cb_state, mem_barrier, level, layer, aspect_mask,
+ VK_IMAGE_ASPECT_PLANE_2_BIT_KHR);
+ }
+ }
+ }
}
}
-bool CoreChecks::VerifyImageLayout(const CMD_BUFFER_STATE *cb_node, const IMAGE_STATE *image_state,
- const VkImageSubresourceRange &range, VkImageAspectFlags aspect_mask,
- VkImageLayout explicit_layout, VkImageLayout optimal_layout, const char *caller,
- const char *layout_invalid_msg_code, const char *layout_mismatch_msg_code, bool *error) const {
- if (disabled.image_layout_validation) return false;
- assert(cb_node);
- assert(image_state);
+bool CoreChecks::VerifyImageLayout(layer_data const *device_data, GLOBAL_CB_NODE const *cb_node, IMAGE_STATE *image_state,
+ VkImageSubresourceLayers subLayers, VkImageLayout explicit_layout, VkImageLayout optimal_layout,
+ const char *caller, const char *layout_invalid_msg_code, const char *layout_mismatch_msg_code,
+ bool *error) {
const auto image = image_state->image;
bool skip = false;
- const auto *subresource_map = GetImageSubresourceLayoutMap(cb_node, image);
- if (subresource_map) {
- bool subres_skip = false;
- LayoutUseCheckAndMessage layout_check(subresource_map, aspect_mask);
- auto subresource_cb = [this, explicit_layout, cb_node, layout_mismatch_msg_code, caller, image, &layout_check, &error,
- &subres_skip](const VkImageSubresource &subres, VkImageLayout layout, VkImageLayout initial_layout) {
- if (!layout_check.Check(subres, explicit_layout, layout, initial_layout)) {
+ for (uint32_t i = 0; i < subLayers.layerCount; ++i) {
+ uint32_t layer = i + subLayers.baseArrayLayer;
+ VkImageSubresource sub = {subLayers.aspectMask, subLayers.mipLevel, layer};
+ IMAGE_CMD_BUF_LAYOUT_NODE node;
+ if (FindCmdBufLayout(device_data, cb_node, image, sub, node)) {
+ if (node.layout != explicit_layout) {
*error = true;
- subres_skip |=
- log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
- HandleToUint64(cb_node->commandBuffer), layout_mismatch_msg_code,
- "%s: Cannot use %s (layer=%u mip=%u) with specific layout %s that doesn't match the "
- "%s layout %s.",
- caller, report_data->FormatHandle(image).c_str(), subres.arrayLayer, subres.mipLevel,
- string_VkImageLayout(explicit_layout), layout_check.message, string_VkImageLayout(layout_check.layout));
+ skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
+ HandleToUint64(cb_node->commandBuffer), layout_mismatch_msg_code,
+ "%s: Cannot use image %s (layer=%u mip=%u) with specific layout %s that doesn't match the actual "
+ "current layout %s.",
+ caller, report_data->FormatHandle(image).c_str(), layer, subLayers.mipLevel,
+ string_VkImageLayout(explicit_layout), string_VkImageLayout(node.layout));
}
- return !subres_skip;
- };
- subresource_map->ForRange(range, subresource_cb);
- skip |= subres_skip;
+ }
}
-
// If optimal_layout is not UNDEFINED, check that layout matches optimal for this case
if ((VK_IMAGE_LAYOUT_UNDEFINED != optimal_layout) && (explicit_layout != optimal_layout)) {
if (VK_IMAGE_LAYOUT_GENERAL == explicit_layout) {
@@ -1110,10 +1147,10 @@ bool CoreChecks::VerifyImageLayout(const CMD_BUFFER_STATE *cb_node, const IMAGE_
skip |= log_msg(report_data, VK_DEBUG_REPORT_PERFORMANCE_WARNING_BIT_EXT,
VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, HandleToUint64(cb_node->commandBuffer),
kVUID_Core_DrawState_InvalidImageLayout,
- "%s: For optimal performance %s layout should be %s instead of GENERAL.", caller,
+ "%s: For optimal performance image %s layout should be %s instead of GENERAL.", caller,
report_data->FormatHandle(image).c_str(), string_VkImageLayout(optimal_layout));
}
- } else if (device_extensions.vk_khr_shared_presentable_image) {
+ } else if (GetDeviceExtensions()->vk_khr_shared_presentable_image) {
if (image_state->shared_presentable) {
if (VK_IMAGE_LAYOUT_SHARED_PRESENT_KHR != explicit_layout) {
skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
@@ -1126,22 +1163,16 @@ bool CoreChecks::VerifyImageLayout(const CMD_BUFFER_STATE *cb_node, const IMAGE_
*error = true;
skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
HandleToUint64(cb_node->commandBuffer), layout_invalid_msg_code,
- "%s: Layout for %s is %s but can only be %s or VK_IMAGE_LAYOUT_GENERAL.", caller,
+ "%s: Layout for image %s is %s but can only be %s or VK_IMAGE_LAYOUT_GENERAL.", caller,
report_data->FormatHandle(image).c_str(), string_VkImageLayout(explicit_layout),
string_VkImageLayout(optimal_layout));
}
}
return skip;
}
-bool CoreChecks::VerifyImageLayout(const CMD_BUFFER_STATE *cb_node, const IMAGE_STATE *image_state,
- const VkImageSubresourceLayers &subLayers, VkImageLayout explicit_layout,
- VkImageLayout optimal_layout, const char *caller, const char *layout_invalid_msg_code,
- const char *layout_mismatch_msg_code, bool *error) const {
- return VerifyImageLayout(cb_node, image_state, RangeFromLayers(subLayers), explicit_layout, optimal_layout, caller,
- layout_invalid_msg_code, layout_mismatch_msg_code, error);
-}
-void CoreChecks::TransitionFinalSubpassLayouts(CMD_BUFFER_STATE *pCB, const VkRenderPassBeginInfo *pRenderPassBegin,
+void CoreChecks::TransitionFinalSubpassLayouts(layer_data *device_data, GLOBAL_CB_NODE *pCB,
+ const VkRenderPassBeginInfo *pRenderPassBegin,
FRAMEBUFFER_STATE *framebuffer_state) {
auto renderPass = GetRenderPassState(pRenderPassBegin->renderPass);
if (!renderPass) return;
@@ -1151,7 +1182,7 @@ void CoreChecks::TransitionFinalSubpassLayouts(CMD_BUFFER_STATE *pCB, const VkRe
for (uint32_t i = 0; i < pRenderPassInfo->attachmentCount; ++i) {
auto view_state = GetAttachmentImageViewState(framebuffer_state, i);
if (view_state) {
- SetImageViewLayout(pCB, *view_state, pRenderPassInfo->pAttachments[i].finalLayout);
+ SetImageViewLayout(device_data, pCB, view_state, pRenderPassInfo->pAttachments[i].finalLayout);
}
}
}
@@ -1164,7 +1195,8 @@ void CoreChecks::TransitionFinalSubpassLayouts(CMD_BUFFER_STATE *pCB, const VkRe
//
// AHB-specific validation within non-AHB APIs
//
-bool CoreChecks::ValidateCreateImageANDROID(const debug_report_data *report_data, const VkImageCreateInfo *create_info) {
+bool CoreChecks::ValidateCreateImageANDROID(layer_data *device_data, const debug_report_data *report_data,
+ const VkImageCreateInfo *create_info) {
bool skip = false;
const VkExternalFormatANDROID *ext_fmt_android = lvl_find_in_chain<VkExternalFormatANDROID>(create_info->pNext);
@@ -1200,7 +1232,8 @@ bool CoreChecks::ValidateCreateImageANDROID(const debug_report_data *report_data
}
}
- if ((0 != ext_fmt_android->externalFormat) && (0 == ahb_ext_formats_set.count(ext_fmt_android->externalFormat))) {
+ auto ahb_formats = GetAHBExternalFormatsSet();
+ if ((0 != ext_fmt_android->externalFormat) && (0 == ahb_formats->count(ext_fmt_android->externalFormat))) {
skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
"VUID-VkExternalFormatANDROID-externalFormat-01894",
"vkCreateImage(): Chained VkExternalFormatANDROID struct contains a non-zero externalFormat which has "
@@ -1241,7 +1274,7 @@ bool CoreChecks::ValidateCreateImageANDROID(const debug_report_data *report_data
return skip;
}
-void ValidationStateTracker::RecordCreateImageANDROID(const VkImageCreateInfo *create_info, IMAGE_STATE *is_node) {
+void CoreChecks::RecordCreateImageANDROID(const VkImageCreateInfo *create_info, IMAGE_STATE *is_node) {
const VkExternalMemoryImageCreateInfo *emici = lvl_find_in_chain<VkExternalMemoryImageCreateInfo>(create_info->pNext);
if (emici && (emici->handleTypes & VK_EXTERNAL_MEMORY_HANDLE_TYPE_ANDROID_HARDWARE_BUFFER_BIT_ANDROID)) {
is_node->imported_ahb = true;
@@ -1253,7 +1286,7 @@ void ValidationStateTracker::RecordCreateImageANDROID(const VkImageCreateInfo *c
}
}
-bool CoreChecks::ValidateCreateImageViewANDROID(const VkImageViewCreateInfo *create_info) {
+bool CoreChecks::ValidateCreateImageViewANDROID(layer_data *device_data, const VkImageViewCreateInfo *create_info) {
bool skip = false;
IMAGE_STATE *image_state = GetImageState(create_info->image);
@@ -1272,9 +1305,10 @@ bool CoreChecks::ValidateCreateImageViewANDROID(const VkImageViewCreateInfo *cre
const VkSamplerYcbcrConversionInfo *ycbcr_conv_info = lvl_find_in_chain<VkSamplerYcbcrConversionInfo>(create_info->pNext);
if (ycbcr_conv_info != nullptr) {
VkSamplerYcbcrConversion conv_handle = ycbcr_conv_info->conversion;
- if (ycbcr_conversion_ahb_fmt_map.find(conv_handle) != ycbcr_conversion_ahb_fmt_map.end()) {
+ auto fmap = GetYcbcrConversionFormatMap();
+ if (fmap->find(conv_handle) != fmap->end()) {
conv_found = true;
- external_format = ycbcr_conversion_ahb_fmt_map.at(conv_handle);
+ external_format = fmap->at(conv_handle);
}
}
if ((!conv_found) || (external_format != image_state->ahb_format)) {
@@ -1299,10 +1333,10 @@ bool CoreChecks::ValidateCreateImageViewANDROID(const VkImageViewCreateInfo *cre
return skip;
}
-bool CoreChecks::ValidateGetImageSubresourceLayoutANDROID(const VkImage image) const {
+bool CoreChecks::ValidateGetImageSubresourceLayoutANDROID(layer_data *device_data, const VkImage image) {
bool skip = false;
- const IMAGE_STATE *image_state = GetImageState(image);
+ IMAGE_STATE *image_state = GetImageState(image);
if (image_state->imported_ahb && (0 == image_state->GetBoundMemory().size())) {
skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT, HandleToUint64(image),
"VUID-vkGetImageSubresourceLayout-image-01895",
@@ -1315,24 +1349,26 @@ bool CoreChecks::ValidateGetImageSubresourceLayoutANDROID(const VkImage image) c
#else
-bool CoreChecks::ValidateCreateImageANDROID(const debug_report_data *report_data, const VkImageCreateInfo *create_info) {
+bool CoreChecks::ValidateCreateImageANDROID(layer_data *device_data, const debug_report_data *report_data,
+ const VkImageCreateInfo *create_info) {
return false;
}
-void ValidationStateTracker::RecordCreateImageANDROID(const VkImageCreateInfo *create_info, IMAGE_STATE *is_node) {}
+void CoreChecks::RecordCreateImageANDROID(const VkImageCreateInfo *create_info, IMAGE_STATE *is_node) {}
-bool CoreChecks::ValidateCreateImageViewANDROID(const VkImageViewCreateInfo *create_info) { return false; }
+bool CoreChecks::ValidateCreateImageViewANDROID(layer_data *device_data, const VkImageViewCreateInfo *create_info) { return false; }
-bool CoreChecks::ValidateGetImageSubresourceLayoutANDROID(const VkImage image) const { return false; }
+bool CoreChecks::ValidateGetImageSubresourceLayoutANDROID(layer_data *device_data, const VkImage image) { return false; }
#endif // VK_USE_PLATFORM_ANDROID_KHR
bool CoreChecks::PreCallValidateCreateImage(VkDevice device, const VkImageCreateInfo *pCreateInfo,
const VkAllocationCallbacks *pAllocator, VkImage *pImage) {
+ layer_data *device_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
bool skip = false;
- if (device_extensions.vk_android_external_memory_android_hardware_buffer) {
- skip |= ValidateCreateImageANDROID(report_data, pCreateInfo);
+ if (GetDeviceExtensions()->vk_android_external_memory_android_hardware_buffer) {
+ skip |= ValidateCreateImageANDROID(device_data, report_data, pCreateInfo);
} else { // These checks are omitted or replaced when Android HW Buffer extension is active
if (pCreateInfo->format == VK_FORMAT_UNDEFINED) {
return log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
@@ -1341,24 +1377,14 @@ bool CoreChecks::PreCallValidateCreateImage(VkDevice device, const VkImageCreate
}
}
- if (pCreateInfo->flags & VK_IMAGE_CREATE_CUBE_COMPATIBLE_BIT) {
- if (VK_IMAGE_TYPE_2D != pCreateInfo->imageType) {
- skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
- "VUID-VkImageCreateInfo-flags-00949",
- "vkCreateImage(): Image type must be VK_IMAGE_TYPE_2D when VK_IMAGE_CREATE_CUBE_COMPATIBLE_BIT "
- "flag bit is set");
- }
-
- if ((pCreateInfo->extent.width != pCreateInfo->extent.height) || (pCreateInfo->arrayLayers < 6)) {
- skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
- "VUID-VkImageCreateInfo-imageType-00954",
- "vkCreateImage(): If VK_IMAGE_CREATE_CUBE_COMPATIBLE_BIT flag bit is set, width (%d) must equal "
- "height (%d) and arrayLayers (%d) must be >= 6.",
- pCreateInfo->extent.width, pCreateInfo->extent.height, pCreateInfo->arrayLayers);
- }
+ if ((pCreateInfo->flags & VK_IMAGE_CREATE_CUBE_COMPATIBLE_BIT) && (VK_IMAGE_TYPE_2D != pCreateInfo->imageType)) {
+ skip |= log_msg(
+ report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
+ "VUID-VkImageCreateInfo-flags-00949",
+ "vkCreateImage(): Image type must be VK_IMAGE_TYPE_2D when VK_IMAGE_CREATE_CUBE_COMPATIBLE_BIT flag bit is set");
}
- const VkPhysicalDeviceLimits *device_limits = &phys_dev_props.limits;
+ const VkPhysicalDeviceLimits *device_limits = &(GetPDProperties()->limits);
VkImageUsageFlags attach_flags = VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT | VK_IMAGE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT |
VK_IMAGE_USAGE_TRANSIENT_ATTACHMENT_BIT | VK_IMAGE_USAGE_INPUT_ATTACHMENT_BIT;
if ((pCreateInfo->usage & attach_flags) && (pCreateInfo->extent.width > device_limits->maxFramebufferWidth)) {
@@ -1375,45 +1401,12 @@ bool CoreChecks::PreCallValidateCreateImage(VkDevice device, const VkImageCreate
"maxFramebufferHeight");
}
- if (device_extensions.vk_ext_fragment_density_map) {
- uint32_t ceiling_width =
- (uint32_t)ceil((float)device_limits->maxFramebufferWidth /
- std::max((float)phys_dev_ext_props.fragment_density_map_props.minFragmentDensityTexelSize.width, 1.0f));
- if ((pCreateInfo->usage & VK_IMAGE_USAGE_FRAGMENT_DENSITY_MAP_BIT_EXT) && (pCreateInfo->extent.width > ceiling_width)) {
- skip |=
- log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
- "VUID-VkImageCreateInfo-usage-02559",
- "vkCreateImage(): Image usage flags include a fragment density map bit and image width (%u) exceeds the "
- "ceiling of device "
- "maxFramebufferWidth (%u) / minFragmentDensityTexelSize.width (%u). The ceiling value: %u",
- pCreateInfo->extent.width, device_limits->maxFramebufferWidth,
- phys_dev_ext_props.fragment_density_map_props.minFragmentDensityTexelSize.width, ceiling_width);
- }
-
- uint32_t ceiling_height =
- (uint32_t)ceil((float)device_limits->maxFramebufferHeight /
- std::max((float)phys_dev_ext_props.fragment_density_map_props.minFragmentDensityTexelSize.height, 1.0f));
- if ((pCreateInfo->usage & VK_IMAGE_USAGE_FRAGMENT_DENSITY_MAP_BIT_EXT) && (pCreateInfo->extent.height > ceiling_height)) {
- skip |=
- log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
- "VUID-VkImageCreateInfo-usage-02560",
- "vkCreateImage(): Image usage flags include a fragment density map bit and image height (%u) exceeds the "
- "ceiling of device "
- "maxFramebufferHeight (%u) / minFragmentDensityTexelSize.height (%u). The ceiling value: %u",
- pCreateInfo->extent.height, device_limits->maxFramebufferHeight,
- phys_dev_ext_props.fragment_density_map_props.minFragmentDensityTexelSize.height, ceiling_height);
- }
- }
-
VkImageFormatProperties format_limits = {};
VkResult res = GetPDImageFormatProperties(pCreateInfo, &format_limits);
if (res == VK_ERROR_FORMAT_NOT_SUPPORTED) {
-#ifdef VK_USE_PLATFORM_ANDROID_KHR
- if (!lvl_find_in_chain<VkExternalFormatANDROID>(pCreateInfo->pNext))
-#endif // VK_USE_PLATFORM_ANDROID_KHR
- skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0, kVUIDUndefined,
- "vkCreateImage(): Format %s is not supported for this combination of parameters.",
- string_VkFormat(pCreateInfo->format));
+ skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0, kVUIDUndefined,
+ "vkCreateImage(): Format %s is not supported for this combination of parameters.",
+ string_VkFormat(pCreateInfo->format));
} else {
if (pCreateInfo->mipLevels > format_limits.maxMipLevels) {
const char *format_string = string_VkFormat(pCreateInfo->format);
@@ -1429,7 +1422,7 @@ bool CoreChecks::PreCallValidateCreateImage(VkDevice device, const VkImageCreate
uint64_t total_size = (uint64_t)std::ceil(FormatTexelSize(pCreateInfo->format) * texel_count);
// Round up to imageGranularity boundary
- VkDeviceSize imageGranularity = phys_dev_props.limits.bufferImageGranularity;
+ VkDeviceSize imageGranularity = GetPDProperties()->limits.bufferImageGranularity;
uint64_t ig_mask = imageGranularity - 1;
total_size = (total_size + ig_mask) & ~ig_mask;
@@ -1448,15 +1441,6 @@ bool CoreChecks::PreCallValidateCreateImage(VkDevice device, const VkImageCreate
pCreateInfo->arrayLayers, format_limits.maxArrayLayers);
}
- if (device_extensions.vk_khr_sampler_ycbcr_conversion && FormatRequiresYcbcrConversion(pCreateInfo->format) &&
- !device_extensions.vk_ext_ycbcr_image_arrays && pCreateInfo->arrayLayers > 1) {
- skip |= log_msg(
- report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT, 0,
- "VUID-VkImageCreateInfo-format-02653",
- "vkCreateImage(): arrayLayers=%d exceeds the maximum allowed of 1 for formats requiring sampler ycbcr conversion",
- pCreateInfo->arrayLayers);
- }
-
if ((pCreateInfo->samples & format_limits.sampleCounts) == 0) {
skip |=
log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT, 0,
@@ -1465,14 +1449,14 @@ bool CoreChecks::PreCallValidateCreateImage(VkDevice device, const VkImageCreate
}
}
- if ((pCreateInfo->flags & VK_IMAGE_CREATE_SPARSE_ALIASED_BIT) && (!enabled_features.core.sparseResidencyAliased)) {
+ if ((pCreateInfo->flags & VK_IMAGE_CREATE_SPARSE_ALIASED_BIT) && (!GetEnabledFeatures()->core.sparseResidencyAliased)) {
skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
"VUID-VkImageCreateInfo-flags-01924",
"vkCreateImage(): the sparseResidencyAliased device feature is disabled: Images cannot be created with the "
"VK_IMAGE_CREATE_SPARSE_ALIASED_BIT set.");
}
- if (device_extensions.vk_khr_maintenance2) {
+ if (GetDeviceExtensions()->vk_khr_maintenance2) {
if (pCreateInfo->flags & VK_IMAGE_CREATE_BLOCK_TEXEL_VIEW_COMPATIBLE_BIT_KHR) {
if (!(FormatIsCompressed_BC(pCreateInfo->format) || FormatIsCompressed_ASTC_LDR(pCreateInfo->format) ||
FormatIsCompressed_ETC2_EAC(pCreateInfo->format))) {
@@ -1494,98 +1478,71 @@ bool CoreChecks::PreCallValidateCreateImage(VkDevice device, const VkImageCreate
}
if (pCreateInfo->sharingMode == VK_SHARING_MODE_CONCURRENT && pCreateInfo->pQueueFamilyIndices) {
- skip |= ValidateQueueFamilies(pCreateInfo->queueFamilyIndexCount, pCreateInfo->pQueueFamilyIndices, "vkCreateImage",
- "pCreateInfo->pQueueFamilyIndices", "VUID-VkImageCreateInfo-sharingMode-01420",
- "VUID-VkImageCreateInfo-sharingMode-01420", false);
+ skip |=
+ ValidateQueueFamilies(device_data, pCreateInfo->queueFamilyIndexCount, pCreateInfo->pQueueFamilyIndices,
+ "vkCreateImage", "pCreateInfo->pQueueFamilyIndices", "VUID-VkImageCreateInfo-sharingMode-01420",
+ "VUID-VkImageCreateInfo-sharingMode-01420", false);
}
return skip;
}
-void ValidationStateTracker::PostCallRecordCreateImage(VkDevice device, const VkImageCreateInfo *pCreateInfo,
- const VkAllocationCallbacks *pAllocator, VkImage *pImage, VkResult result) {
- if (VK_SUCCESS != result) return;
- std::unique_ptr<IMAGE_STATE> is_node(new IMAGE_STATE(*pImage, pCreateInfo));
- if (device_extensions.vk_android_external_memory_android_hardware_buffer) {
- RecordCreateImageANDROID(pCreateInfo, is_node.get());
- }
- const auto swapchain_info = lvl_find_in_chain<VkImageSwapchainCreateInfoKHR>(pCreateInfo->pNext);
- if (swapchain_info) {
- is_node->create_from_swapchain = swapchain_info->swapchain;
- }
-
- bool pre_fetch_memory_reqs = true;
-#ifdef VK_USE_PLATFORM_ANDROID_KHR
- if (is_node->external_format_android) {
- // Do not fetch requirements for external memory images
- pre_fetch_memory_reqs = false;
- }
-#endif
- // Record the memory requirements in case they won't be queried
- if (pre_fetch_memory_reqs) {
- DispatchGetImageMemoryRequirements(device, *pImage, &is_node->requirements);
- }
- imageMap.insert(std::make_pair(*pImage, std::move(is_node)));
-}
-
void CoreChecks::PostCallRecordCreateImage(VkDevice device, const VkImageCreateInfo *pCreateInfo,
const VkAllocationCallbacks *pAllocator, VkImage *pImage, VkResult result) {
if (VK_SUCCESS != result) return;
-
- StateTracker::PostCallRecordCreateImage(device, pCreateInfo, pAllocator, pImage, result);
-
- IMAGE_LAYOUT_STATE image_state;
+ IMAGE_LAYOUT_NODE image_state;
image_state.layout = pCreateInfo->initialLayout;
image_state.format = pCreateInfo->format;
+ IMAGE_STATE *is_node = new IMAGE_STATE(*pImage, pCreateInfo);
+ if (GetDeviceExtensions()->vk_android_external_memory_android_hardware_buffer) {
+ RecordCreateImageANDROID(pCreateInfo, is_node);
+ }
+ GetImageMap()->insert(std::make_pair(*pImage, std::unique_ptr<IMAGE_STATE>(is_node)));
ImageSubresourcePair subpair{*pImage, false, VkImageSubresource()};
- imageSubresourceMap[*pImage].push_back(subpair);
- imageLayoutMap[subpair] = image_state;
+ (*GetImageSubresourceMap())[*pImage].push_back(subpair);
+ (*GetImageLayoutMap())[subpair] = image_state;
}
bool CoreChecks::PreCallValidateDestroyImage(VkDevice device, VkImage image, const VkAllocationCallbacks *pAllocator) {
+ layer_data *device_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
IMAGE_STATE *image_state = GetImageState(image);
- const VulkanTypedHandle obj_struct(image, kVulkanObjectTypeImage);
+ const VK_OBJECT obj_struct = {HandleToUint64(image), kVulkanObjectTypeImage};
bool skip = false;
if (image_state) {
- skip |= ValidateObjectNotInUse(image_state, obj_struct, "vkDestroyImage", "VUID-vkDestroyImage-image-01000");
+ skip |= ValidateObjectNotInUse(device_data, image_state, obj_struct, "vkDestroyImage", "VUID-vkDestroyImage-image-01000");
}
return skip;
}
-void ValidationStateTracker::PreCallRecordDestroyImage(VkDevice device, VkImage image, const VkAllocationCallbacks *pAllocator) {
+void CoreChecks::PreCallRecordDestroyImage(VkDevice device, VkImage image, const VkAllocationCallbacks *pAllocator) {
+ layer_data *device_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
if (!image) return;
IMAGE_STATE *image_state = GetImageState(image);
- const VulkanTypedHandle obj_struct(image, kVulkanObjectTypeImage);
- InvalidateCommandBuffers(image_state->cb_bindings, obj_struct);
+ VK_OBJECT obj_struct = {HandleToUint64(image), kVulkanObjectTypeImage};
+ InvalidateCommandBuffers(device_data, image_state->cb_bindings, obj_struct);
// Clean up memory mapping, bindings and range references for image
for (auto mem_binding : image_state->GetBoundMemory()) {
- auto mem_info = GetDevMemState(mem_binding);
+ auto mem_info = GetMemObjInfo(mem_binding);
if (mem_info) {
RemoveImageMemoryRange(obj_struct.handle, mem_info);
}
}
- ClearMemoryObjectBindings(obj_struct);
+ ClearMemoryObjectBindings(obj_struct.handle, kVulkanObjectTypeImage);
+ EraseQFOReleaseBarriers<VkImageMemoryBarrier>(device_data, image);
// Remove image from imageMap
- imageMap.erase(image);
-}
-
-void CoreChecks::PreCallRecordDestroyImage(VkDevice device, VkImage image, const VkAllocationCallbacks *pAllocator) {
- // Clean up validation specific data
- EraseQFOReleaseBarriers<VkImageMemoryBarrier>(image);
+ GetImageMap()->erase(image);
+ std::unordered_map<VkImage, std::vector<ImageSubresourcePair>> *imageSubresourceMap = GetImageSubresourceMap();
- const auto &sub_entry = imageSubresourceMap.find(image);
- if (sub_entry != imageSubresourceMap.end()) {
+ const auto &sub_entry = imageSubresourceMap->find(image);
+ if (sub_entry != imageSubresourceMap->end()) {
for (const auto &pair : sub_entry->second) {
- imageLayoutMap.erase(pair);
+ GetImageLayoutMap()->erase(pair);
}
- imageSubresourceMap.erase(sub_entry);
+ imageSubresourceMap->erase(sub_entry);
}
-
- // Clean up generic image state
- StateTracker::PreCallRecordDestroyImage(device, image, pAllocator);
}
-bool CoreChecks::ValidateImageAttributes(const IMAGE_STATE *image_state, const VkImageSubresourceRange &range) const {
+bool CoreChecks::ValidateImageAttributes(layer_data *device_data, IMAGE_STATE *image_state, VkImageSubresourceRange range) {
bool skip = false;
if (range.aspectMask != VK_IMAGE_ASPECT_COLOR_BIT) {
@@ -1630,11 +1587,13 @@ uint32_t ResolveRemainingLayers(const VkImageSubresourceRange *range, uint32_t l
return array_layer_count;
}
-bool CoreChecks::VerifyClearImageLayout(const CMD_BUFFER_STATE *cb_node, const IMAGE_STATE *image_state,
- const VkImageSubresourceRange &range, VkImageLayout dest_image_layout,
- const char *func_name) const {
+bool CoreChecks::VerifyClearImageLayout(layer_data *device_data, GLOBAL_CB_NODE *cb_node, IMAGE_STATE *image_state,
+ VkImageSubresourceRange range, VkImageLayout dest_image_layout, const char *func_name) {
bool skip = false;
+ uint32_t level_count = ResolveRemainingLevels(&range, image_state->createInfo.mipLevels);
+ uint32_t layer_count = ResolveRemainingLayers(&range, image_state->createInfo.arrayLayers);
+
if (dest_image_layout != VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL) {
if (dest_image_layout == VK_IMAGE_LAYOUT_GENERAL) {
if (image_state->createInfo.tiling != VK_IMAGE_TILING_LINEAR) {
@@ -1644,7 +1603,7 @@ bool CoreChecks::VerifyClearImageLayout(const CMD_BUFFER_STATE *cb_node, const I
"%s: Layout for cleared image should be TRANSFER_DST_OPTIMAL instead of GENERAL.", func_name);
}
} else if (VK_IMAGE_LAYOUT_SHARED_PRESENT_KHR == dest_image_layout) {
- if (!device_extensions.vk_khr_shared_presentable_image) {
+ if (!GetDeviceExtensions()->vk_khr_shared_presentable_image) {
// TODO: Add unique error id when available.
skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT,
HandleToUint64(image_state->image), 0,
@@ -1674,84 +1633,93 @@ bool CoreChecks::VerifyClearImageLayout(const CMD_BUFFER_STATE *cb_node, const I
}
}
- // Cast to const to prevent creation at validate time.
- const auto *subresource_map = GetImageSubresourceLayoutMap(cb_node, image_state->image);
- if (subresource_map) {
- bool subres_skip = false;
- LayoutUseCheckAndMessage layout_check(subresource_map);
- VkImageSubresourceRange normalized_isr = NormalizeSubresourceRange(*image_state, range);
- auto subres_callback = [this, cb_node, dest_image_layout, func_name, &layout_check, &subres_skip](
- const VkImageSubresource &subres, VkImageLayout layout, VkImageLayout initial_layout) {
- if (!layout_check.Check(subres, dest_image_layout, layout, initial_layout)) {
- const char *error_code = "VUID-vkCmdClearColorImage-imageLayout-00004";
- if (strcmp(func_name, "vkCmdClearDepthStencilImage()") == 0) {
- error_code = "VUID-vkCmdClearDepthStencilImage-imageLayout-00011";
- } else {
- assert(strcmp(func_name, "vkCmdClearColorImage()") == 0);
+ for (uint32_t level_index = 0; level_index < level_count; ++level_index) {
+ uint32_t level = level_index + range.baseMipLevel;
+ for (uint32_t layer_index = 0; layer_index < layer_count; ++layer_index) {
+ uint32_t layer = layer_index + range.baseArrayLayer;
+ VkImageSubresource sub = {range.aspectMask, level, layer};
+ IMAGE_CMD_BUF_LAYOUT_NODE node;
+ if (FindCmdBufLayout(device_data, cb_node, image_state->image, sub, node)) {
+ if (node.layout != dest_image_layout) {
+ const char *error_code = "VUID-vkCmdClearColorImage-imageLayout-00004";
+ if (strcmp(func_name, "vkCmdClearDepthStencilImage()") == 0) {
+ error_code = "VUID-vkCmdClearDepthStencilImage-imageLayout-00011";
+ } else {
+ assert(strcmp(func_name, "vkCmdClearColorImage()") == 0);
+ }
+ skip |=
+ log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, 0,
+ error_code, "%s: Cannot clear an image whose layout is %s and doesn't match the current layout %s.",
+ func_name, string_VkImageLayout(dest_image_layout), string_VkImageLayout(node.layout));
}
- subres_skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
- HandleToUint64(cb_node->commandBuffer), error_code,
- "%s: Cannot clear an image whose layout is %s and doesn't match the %s layout %s.",
- func_name, string_VkImageLayout(dest_image_layout), layout_check.message,
- string_VkImageLayout(layout_check.layout));
}
- return !subres_skip;
- };
- subresource_map->ForRange(normalized_isr, subres_callback);
- skip |= subres_skip;
+ }
}
return skip;
}
+void CoreChecks::RecordClearImageLayout(layer_data *device_data, GLOBAL_CB_NODE *cb_node, VkImage image,
+ VkImageSubresourceRange range, VkImageLayout dest_image_layout) {
+ VkImageCreateInfo *image_create_info = &(GetImageState(image)->createInfo);
+ uint32_t level_count = ResolveRemainingLevels(&range, image_create_info->mipLevels);
+ uint32_t layer_count = ResolveRemainingLayers(&range, image_create_info->arrayLayers);
+
+ for (uint32_t level_index = 0; level_index < level_count; ++level_index) {
+ uint32_t level = level_index + range.baseMipLevel;
+ for (uint32_t layer_index = 0; layer_index < layer_count; ++layer_index) {
+ uint32_t layer = layer_index + range.baseArrayLayer;
+ VkImageSubresource sub = {range.aspectMask, level, layer};
+ IMAGE_CMD_BUF_LAYOUT_NODE node;
+ if (!FindCmdBufLayout(device_data, cb_node, image, sub, node)) {
+ SetLayout(device_data, cb_node, image, sub, IMAGE_CMD_BUF_LAYOUT_NODE(dest_image_layout, dest_image_layout));
+ }
+ }
+ }
+}
+
bool CoreChecks::PreCallValidateCmdClearColorImage(VkCommandBuffer commandBuffer, VkImage image, VkImageLayout imageLayout,
const VkClearColorValue *pColor, uint32_t rangeCount,
const VkImageSubresourceRange *pRanges) {
+ layer_data *device_data = GetLayerDataPtr(get_dispatch_key(commandBuffer), layer_data_map);
+
bool skip = false;
// TODO : Verify memory is in VK_IMAGE_STATE_CLEAR state
- const auto *cb_node = GetCBState(commandBuffer);
- const auto *image_state = GetImageState(image);
+ auto cb_node = GetCBNode(commandBuffer);
+ auto image_state = GetImageState(image);
if (cb_node && image_state) {
- skip |= ValidateMemoryIsBoundToImage(image_state, "vkCmdClearColorImage()", "VUID-vkCmdClearColorImage-image-00003");
- skip |= ValidateCmdQueueFlags(cb_node, "vkCmdClearColorImage()", VK_QUEUE_GRAPHICS_BIT | VK_QUEUE_COMPUTE_BIT,
+ skip |= ValidateMemoryIsBoundToImage(device_data, image_state, "vkCmdClearColorImage()",
+ "VUID-vkCmdClearColorImage-image-00003");
+ skip |= ValidateCmdQueueFlags(device_data, cb_node, "vkCmdClearColorImage()", VK_QUEUE_GRAPHICS_BIT | VK_QUEUE_COMPUTE_BIT,
"VUID-vkCmdClearColorImage-commandBuffer-cmdpool");
- skip |= ValidateCmd(cb_node, CMD_CLEARCOLORIMAGE, "vkCmdClearColorImage()");
- if (api_version >= VK_API_VERSION_1_1 || device_extensions.vk_khr_maintenance1) {
- skip |=
- ValidateImageFormatFeatureFlags(image_state, VK_FORMAT_FEATURE_TRANSFER_DST_BIT, "vkCmdClearColorImage",
- "VUID-vkCmdClearColorImage-image-01993", "VUID-vkCmdClearColorImage-image-01993");
+ skip |= ValidateCmd(device_data, cb_node, CMD_CLEARCOLORIMAGE, "vkCmdClearColorImage()");
+ if (GetApiVersion() >= VK_API_VERSION_1_1 || GetDeviceExtensions()->vk_khr_maintenance1) {
+ skip |= ValidateImageFormatFeatureFlags(device_data, image_state, VK_FORMAT_FEATURE_TRANSFER_DST_BIT,
+ "vkCmdClearColorImage", "VUID-vkCmdClearColorImage-image-01993",
+ "VUID-vkCmdClearColorImage-image-01993");
}
- skip |= InsideRenderPass(cb_node, "vkCmdClearColorImage()", "VUID-vkCmdClearColorImage-renderpass");
+ skip |= InsideRenderPass(device_data, cb_node, "vkCmdClearColorImage()", "VUID-vkCmdClearColorImage-renderpass");
for (uint32_t i = 0; i < rangeCount; ++i) {
std::string param_name = "pRanges[" + std::to_string(i) + "]";
- skip |= ValidateCmdClearColorSubresourceRange(image_state, pRanges[i], param_name.c_str());
- skip |= ValidateImageAttributes(image_state, pRanges[i]);
- skip |= VerifyClearImageLayout(cb_node, image_state, pRanges[i], imageLayout, "vkCmdClearColorImage()");
+ skip |= ValidateCmdClearColorSubresourceRange(device_data, image_state, pRanges[i], param_name.c_str());
+ skip |= ValidateImageAttributes(device_data, image_state, pRanges[i]);
+ skip |= VerifyClearImageLayout(device_data, cb_node, image_state, pRanges[i], imageLayout, "vkCmdClearColorImage()");
}
}
return skip;
}
-void ValidationStateTracker::PreCallRecordCmdClearColorImage(VkCommandBuffer commandBuffer, VkImage image,
- VkImageLayout imageLayout, const VkClearColorValue *pColor,
- uint32_t rangeCount, const VkImageSubresourceRange *pRanges) {
- auto cb_node = GetCBState(commandBuffer);
- auto image_state = GetImageState(image);
- if (cb_node && image_state) {
- AddCommandBufferBindingImage(cb_node, image_state);
- }
-}
-
void CoreChecks::PreCallRecordCmdClearColorImage(VkCommandBuffer commandBuffer, VkImage image, VkImageLayout imageLayout,
const VkClearColorValue *pColor, uint32_t rangeCount,
const VkImageSubresourceRange *pRanges) {
- StateTracker::PreCallRecordCmdClearColorImage(commandBuffer, image, imageLayout, pColor, rangeCount, pRanges);
+ layer_data *device_data = GetLayerDataPtr(get_dispatch_key(commandBuffer), layer_data_map);
- auto cb_node = GetCBState(commandBuffer);
+ auto cb_node = GetCBNode(commandBuffer);
auto image_state = GetImageState(image);
if (cb_node && image_state) {
+ AddCommandBufferBindingImage(device_data, cb_node, image_state);
for (uint32_t i = 0; i < rangeCount; ++i) {
- SetImageInitialLayout(cb_node, image, pRanges[i], imageLayout);
+ RecordClearImageLayout(device_data, cb_node, image, pRanges[i], imageLayout);
}
}
}
@@ -1759,27 +1727,30 @@ void CoreChecks::PreCallRecordCmdClearColorImage(VkCommandBuffer commandBuffer,
bool CoreChecks::PreCallValidateCmdClearDepthStencilImage(VkCommandBuffer commandBuffer, VkImage image, VkImageLayout imageLayout,
const VkClearDepthStencilValue *pDepthStencil, uint32_t rangeCount,
const VkImageSubresourceRange *pRanges) {
+ layer_data *device_data = GetLayerDataPtr(get_dispatch_key(commandBuffer), layer_data_map);
bool skip = false;
// TODO : Verify memory is in VK_IMAGE_STATE_CLEAR state
- const auto *cb_node = GetCBState(commandBuffer);
- const auto *image_state = GetImageState(image);
+ auto cb_node = GetCBNode(commandBuffer);
+ auto image_state = GetImageState(image);
if (cb_node && image_state) {
- skip |= ValidateMemoryIsBoundToImage(image_state, "vkCmdClearDepthStencilImage()",
+ skip |= ValidateMemoryIsBoundToImage(device_data, image_state, "vkCmdClearDepthStencilImage()",
"VUID-vkCmdClearDepthStencilImage-image-00010");
- skip |= ValidateCmdQueueFlags(cb_node, "vkCmdClearDepthStencilImage()", VK_QUEUE_GRAPHICS_BIT,
+ skip |= ValidateCmdQueueFlags(device_data, cb_node, "vkCmdClearDepthStencilImage()", VK_QUEUE_GRAPHICS_BIT,
"VUID-vkCmdClearDepthStencilImage-commandBuffer-cmdpool");
- skip |= ValidateCmd(cb_node, CMD_CLEARDEPTHSTENCILIMAGE, "vkCmdClearDepthStencilImage()");
- if (api_version >= VK_API_VERSION_1_1 || device_extensions.vk_khr_maintenance1) {
- skip |= ValidateImageFormatFeatureFlags(image_state, VK_FORMAT_FEATURE_TRANSFER_DST_BIT, "vkCmdClearDepthStencilImage",
- "VUID-vkCmdClearDepthStencilImage-image-01994",
+ skip |= ValidateCmd(device_data, cb_node, CMD_CLEARDEPTHSTENCILIMAGE, "vkCmdClearDepthStencilImage()");
+ if (GetApiVersion() >= VK_API_VERSION_1_1 || GetDeviceExtensions()->vk_khr_maintenance1) {
+ skip |= ValidateImageFormatFeatureFlags(device_data, image_state, VK_FORMAT_FEATURE_TRANSFER_DST_BIT,
+ "vkCmdClearDepthStencilImage", "VUID-vkCmdClearDepthStencilImage-image-01994",
"VUID-vkCmdClearDepthStencilImage-image-01994");
}
- skip |= InsideRenderPass(cb_node, "vkCmdClearDepthStencilImage()", "VUID-vkCmdClearDepthStencilImage-renderpass");
+ skip |=
+ InsideRenderPass(device_data, cb_node, "vkCmdClearDepthStencilImage()", "VUID-vkCmdClearDepthStencilImage-renderpass");
for (uint32_t i = 0; i < rangeCount; ++i) {
std::string param_name = "pRanges[" + std::to_string(i) + "]";
- skip |= ValidateCmdClearDepthSubresourceRange(image_state, pRanges[i], param_name.c_str());
- skip |= VerifyClearImageLayout(cb_node, image_state, pRanges[i], imageLayout, "vkCmdClearDepthStencilImage()");
+ skip |= ValidateCmdClearDepthSubresourceRange(device_data, image_state, pRanges[i], param_name.c_str());
+ skip |=
+ VerifyClearImageLayout(device_data, cb_node, image_state, pRanges[i], imageLayout, "vkCmdClearDepthStencilImage()");
// Image aspect must be depth or stencil or both
VkImageAspectFlags valid_aspects = VK_IMAGE_ASPECT_DEPTH_BIT | VK_IMAGE_ASPECT_STENCIL_BIT;
if (((pRanges[i].aspectMask & valid_aspects) == 0) || ((pRanges[i].aspectMask & ~valid_aspects) != 0)) {
@@ -1806,26 +1777,17 @@ bool CoreChecks::PreCallValidateCmdClearDepthStencilImage(VkCommandBuffer comman
return skip;
}
-void ValidationStateTracker::PreCallRecordCmdClearDepthStencilImage(VkCommandBuffer commandBuffer, VkImage image,
- VkImageLayout imageLayout,
- const VkClearDepthStencilValue *pDepthStencil,
- uint32_t rangeCount, const VkImageSubresourceRange *pRanges) {
- auto cb_node = GetCBState(commandBuffer);
- auto image_state = GetImageState(image);
- if (cb_node && image_state) {
- AddCommandBufferBindingImage(cb_node, image_state);
- }
-}
-
void CoreChecks::PreCallRecordCmdClearDepthStencilImage(VkCommandBuffer commandBuffer, VkImage image, VkImageLayout imageLayout,
const VkClearDepthStencilValue *pDepthStencil, uint32_t rangeCount,
const VkImageSubresourceRange *pRanges) {
- StateTracker::PreCallRecordCmdClearDepthStencilImage(commandBuffer, image, imageLayout, pDepthStencil, rangeCount, pRanges);
- auto cb_node = GetCBState(commandBuffer);
+ layer_data *device_data = GetLayerDataPtr(get_dispatch_key(commandBuffer), layer_data_map);
+
+ auto cb_node = GetCBNode(commandBuffer);
auto image_state = GetImageState(image);
if (cb_node && image_state) {
+ AddCommandBufferBindingImage(device_data, cb_node, image_state);
for (uint32_t i = 0; i < rangeCount; ++i) {
- SetImageInitialLayout(cb_node, image, pRanges[i], imageLayout);
+ RecordClearImageLayout(device_data, cb_node, image, pRanges[i], imageLayout);
}
}
}
@@ -1972,10 +1934,10 @@ static inline bool IsExtentSizeZero(const VkExtent3D *extent) {
}
// Returns the image transfer granularity for a specific image scaled by compressed block size if necessary.
-VkExtent3D CoreChecks::GetScaledItg(const CMD_BUFFER_STATE *cb_node, const IMAGE_STATE *img) const {
+VkExtent3D CoreChecks::GetScaledItg(layer_data *device_data, const GLOBAL_CB_NODE *cb_node, const IMAGE_STATE *img) {
// Default to (0, 0, 0) granularity in case we can't find the real granularity for the physical device.
VkExtent3D granularity = {0, 0, 0};
- auto pPool = GetCommandPoolState(cb_node->createInfo.commandPool);
+ auto pPool = GetCommandPoolNode(cb_node->createInfo.commandPool);
if (pPool) {
granularity = GetPhysicalDeviceState()->queue_family_properties[pPool->queueFamilyIndex].minImageTransferGranularity;
if (FormatIsCompressed(img->createInfo.format)) {
@@ -1998,8 +1960,9 @@ static inline bool IsExtentAligned(const VkExtent3D *extent, const VkExtent3D *g
}
// Check elements of a VkOffset3D structure against a queue family's Image Transfer Granularity values
-bool CoreChecks::CheckItgOffset(const CMD_BUFFER_STATE *cb_node, const VkOffset3D *offset, const VkExtent3D *granularity,
- const uint32_t i, const char *function, const char *member, const char *vuid) const {
+bool CoreChecks::CheckItgOffset(layer_data *device_data, const GLOBAL_CB_NODE *cb_node, const VkOffset3D *offset,
+ const VkExtent3D *granularity, const uint32_t i, const char *function, const char *member,
+ const char *vuid) {
bool skip = false;
VkExtent3D offset_extent = {};
offset_extent.width = static_cast<uint32_t>(abs(offset->x));
@@ -2030,9 +1993,10 @@ bool CoreChecks::CheckItgOffset(const CMD_BUFFER_STATE *cb_node, const VkOffset3
}
// Check elements of a VkExtent3D structure against a queue family's Image Transfer Granularity values
-bool CoreChecks::CheckItgExtent(const CMD_BUFFER_STATE *cb_node, const VkExtent3D *extent, const VkOffset3D *offset,
- const VkExtent3D *granularity, const VkExtent3D *subresource_extent, const VkImageType image_type,
- const uint32_t i, const char *function, const char *member, const char *vuid) const {
+bool CoreChecks::CheckItgExtent(layer_data *device_data, const GLOBAL_CB_NODE *cb_node, const VkExtent3D *extent,
+ const VkOffset3D *offset, const VkExtent3D *granularity, const VkExtent3D *subresource_extent,
+ const VkImageType image_type, const uint32_t i, const char *function, const char *member,
+ const char *vuid) {
bool skip = false;
if (IsExtentAllZeroes(granularity)) {
// If the queue family image transfer granularity is (0, 0, 0), then the extent must always match the image
@@ -2087,28 +2051,29 @@ bool CoreChecks::CheckItgExtent(const CMD_BUFFER_STATE *cb_node, const VkExtent3
return skip;
}
-bool CoreChecks::ValidateImageMipLevel(const CMD_BUFFER_STATE *cb_node, const IMAGE_STATE *img, uint32_t mip_level,
- const uint32_t i, const char *function, const char *member, const char *vuid) const {
+bool CoreChecks::ValidateImageMipLevel(layer_data *device_data, const GLOBAL_CB_NODE *cb_node, const IMAGE_STATE *img,
+ uint32_t mip_level, const uint32_t i, const char *function, const char *member,
+ const char *vuid) {
bool skip = false;
if (mip_level >= img->createInfo.mipLevels) {
skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
HandleToUint64(cb_node->commandBuffer), vuid,
- "In %s, pRegions[%u].%s.mipLevel is %u, but provided %s has %u mip levels.", function, i, member, mip_level,
- report_data->FormatHandle(img->image).c_str(), img->createInfo.mipLevels);
+ "In %s, pRegions[%u].%s.mipLevel is %u, but provided image %s has %u mip levels.", function, i, member,
+ mip_level, report_data->FormatHandle(img->image).c_str(), img->createInfo.mipLevels);
}
return skip;
}
-bool CoreChecks::ValidateImageArrayLayerRange(const CMD_BUFFER_STATE *cb_node, const IMAGE_STATE *img, const uint32_t base_layer,
- const uint32_t layer_count, const uint32_t i, const char *function,
- const char *member, const char *vuid) const {
+bool CoreChecks::ValidateImageArrayLayerRange(layer_data *device_data, const GLOBAL_CB_NODE *cb_node, const IMAGE_STATE *img,
+ const uint32_t base_layer, const uint32_t layer_count, const uint32_t i,
+ const char *function, const char *member, const char *vuid) {
bool skip = false;
if (base_layer >= img->createInfo.arrayLayers || layer_count > img->createInfo.arrayLayers ||
(base_layer + layer_count) > img->createInfo.arrayLayers) {
skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
HandleToUint64(cb_node->commandBuffer), vuid,
"In %s, pRegions[%u].%s.baseArrayLayer is %u and .layerCount is "
- "%u, but provided %s has %u array layers.",
+ "%u, but provided image %s has %u array layers.",
function, i, member, base_layer, layer_count, report_data->FormatHandle(img->image).c_str(),
img->createInfo.arrayLayers);
}
@@ -2116,48 +2081,50 @@ bool CoreChecks::ValidateImageArrayLayerRange(const CMD_BUFFER_STATE *cb_node, c
}
// Check valid usage Image Transfer Granularity requirements for elements of a VkBufferImageCopy structure
-bool CoreChecks::ValidateCopyBufferImageTransferGranularityRequirements(const CMD_BUFFER_STATE *cb_node, const IMAGE_STATE *img,
- const VkBufferImageCopy *region, const uint32_t i,
- const char *function, const char *vuid) const {
+bool CoreChecks::ValidateCopyBufferImageTransferGranularityRequirements(layer_data *device_data, const GLOBAL_CB_NODE *cb_node,
+ const IMAGE_STATE *img, const VkBufferImageCopy *region,
+ const uint32_t i, const char *function, const char *vuid) {
bool skip = false;
- VkExtent3D granularity = GetScaledItg(cb_node, img);
- skip |= CheckItgOffset(cb_node, &region->imageOffset, &granularity, i, function, "imageOffset", vuid);
+ VkExtent3D granularity = GetScaledItg(device_data, cb_node, img);
+ skip |= CheckItgOffset(device_data, cb_node, &region->imageOffset, &granularity, i, function, "imageOffset", vuid);
VkExtent3D subresource_extent = GetImageSubresourceExtent(img, &region->imageSubresource);
- skip |= CheckItgExtent(cb_node, &region->imageExtent, &region->imageOffset, &granularity, &subresource_extent,
+ skip |= CheckItgExtent(device_data, cb_node, &region->imageExtent, &region->imageOffset, &granularity, &subresource_extent,
img->createInfo.imageType, i, function, "imageExtent", vuid);
return skip;
}
// Check valid usage Image Transfer Granularity requirements for elements of a VkImageCopy structure
-bool CoreChecks::ValidateCopyImageTransferGranularityRequirements(const CMD_BUFFER_STATE *cb_node, const IMAGE_STATE *src_img,
- const IMAGE_STATE *dst_img, const VkImageCopy *region,
- const uint32_t i, const char *function) const {
+bool CoreChecks::ValidateCopyImageTransferGranularityRequirements(layer_data *device_data, const GLOBAL_CB_NODE *cb_node,
+ const IMAGE_STATE *src_img, const IMAGE_STATE *dst_img,
+ const VkImageCopy *region, const uint32_t i,
+ const char *function) {
bool skip = false;
// Source image checks
- VkExtent3D granularity = GetScaledItg(cb_node, src_img);
- skip |=
- CheckItgOffset(cb_node, &region->srcOffset, &granularity, i, function, "srcOffset", "VUID-vkCmdCopyImage-srcOffset-01783");
+ VkExtent3D granularity = GetScaledItg(device_data, cb_node, src_img);
+ skip |= CheckItgOffset(device_data, cb_node, &region->srcOffset, &granularity, i, function, "srcOffset",
+ "VUID-vkCmdCopyImage-srcOffset-01783");
VkExtent3D subresource_extent = GetImageSubresourceExtent(src_img, &region->srcSubresource);
const VkExtent3D extent = region->extent;
- skip |= CheckItgExtent(cb_node, &extent, &region->srcOffset, &granularity, &subresource_extent, src_img->createInfo.imageType,
- i, function, "extent", "VUID-vkCmdCopyImage-srcOffset-01783");
+ skip |= CheckItgExtent(device_data, cb_node, &extent, &region->srcOffset, &granularity, &subresource_extent,
+ src_img->createInfo.imageType, i, function, "extent", "VUID-vkCmdCopyImage-srcOffset-01783");
// Destination image checks
- granularity = GetScaledItg(cb_node, dst_img);
- skip |=
- CheckItgOffset(cb_node, &region->dstOffset, &granularity, i, function, "dstOffset", "VUID-vkCmdCopyImage-dstOffset-01784");
+ granularity = GetScaledItg(device_data, cb_node, dst_img);
+ skip |= CheckItgOffset(device_data, cb_node, &region->dstOffset, &granularity, i, function, "dstOffset",
+ "VUID-vkCmdCopyImage-dstOffset-01784");
// Adjust dest extent, if necessary
const VkExtent3D dest_effective_extent =
GetAdjustedDestImageExtent(src_img->createInfo.format, dst_img->createInfo.format, extent);
subresource_extent = GetImageSubresourceExtent(dst_img, &region->dstSubresource);
- skip |= CheckItgExtent(cb_node, &dest_effective_extent, &region->dstOffset, &granularity, &subresource_extent,
+ skip |= CheckItgExtent(device_data, cb_node, &dest_effective_extent, &region->dstOffset, &granularity, &subresource_extent,
dst_img->createInfo.imageType, i, function, "extent", "VUID-vkCmdCopyImage-dstOffset-01784");
return skip;
}
// Validate contents of a VkImageCopy struct
-bool CoreChecks::ValidateImageCopyData(const uint32_t regionCount, const VkImageCopy *ic_regions, const IMAGE_STATE *src_state,
- const IMAGE_STATE *dst_state) const {
+bool CoreChecks::ValidateImageCopyData(const layer_data *device_data, const debug_report_data *report_data,
+ const uint32_t regionCount, const VkImageCopy *ic_regions, const IMAGE_STATE *src_state,
+ const IMAGE_STATE *dst_state) {
bool skip = false;
for (uint32_t i = 0; i < regionCount; i++) {
@@ -2210,7 +2177,7 @@ bool CoreChecks::ValidateImageCopyData(const uint32_t regionCount, const VkImage
region.srcOffset.z);
}
- if (device_extensions.vk_khr_maintenance1) {
+ if (GetDeviceExtensions()->vk_khr_maintenance1) {
if (src_state->createInfo.imageType == VK_IMAGE_TYPE_3D) {
if ((0 != region.srcSubresource.baseArrayLayer) || (1 != region.srcSubresource.layerCount)) {
skip |=
@@ -2235,7 +2202,7 @@ bool CoreChecks::ValidateImageCopyData(const uint32_t regionCount, const VkImage
}
// Source checks that apply only to compressed images (or to _422 images if ycbcr enabled)
- bool ext_ycbcr = device_extensions.vk_khr_sampler_ycbcr_conversion;
+ bool ext_ycbcr = GetDeviceExtensions()->vk_khr_sampler_ycbcr_conversion;
if (FormatIsCompressed(src_state->createInfo.format) ||
(ext_ycbcr && FormatIsSinglePlane_422(src_state->createInfo.format))) {
const VkExtent3D block_size = FormatTexelBlockExtent(src_state->createInfo.format);
@@ -2327,7 +2294,7 @@ bool CoreChecks::ValidateImageCopyData(const uint32_t regionCount, const VkImage
}
}
// VU01199 changed with mnt1
- if (device_extensions.vk_khr_maintenance1) {
+ if (GetDeviceExtensions()->vk_khr_maintenance1) {
if (dst_state->createInfo.imageType == VK_IMAGE_TYPE_3D) {
if ((0 != region.dstSubresource.baseArrayLayer) || (1 != region.dstSubresource.layerCount)) {
skip |=
@@ -2409,8 +2376,9 @@ bool CoreChecks::ValidateImageCopyData(const uint32_t regionCount, const VkImage
}
// vkCmdCopyImage checks that only apply if the multiplane extension is enabled
-bool CoreChecks::CopyImageMultiplaneValidation(VkCommandBuffer command_buffer, const IMAGE_STATE *src_image_state,
- const IMAGE_STATE *dst_image_state, const VkImageCopy region) const {
+bool CoreChecks::CopyImageMultiplaneValidation(const layer_data *dev_data, VkCommandBuffer command_buffer,
+ const IMAGE_STATE *src_image_state, const IMAGE_STATE *dst_image_state,
+ const VkImageCopy region) {
bool skip = false;
// Neither image is multiplane
@@ -2481,12 +2449,13 @@ bool CoreChecks::CopyImageMultiplaneValidation(VkCommandBuffer command_buffer, c
bool CoreChecks::PreCallValidateCmdCopyImage(VkCommandBuffer commandBuffer, VkImage srcImage, VkImageLayout srcImageLayout,
VkImage dstImage, VkImageLayout dstImageLayout, uint32_t regionCount,
const VkImageCopy *pRegions) {
- const auto *cb_node = GetCBState(commandBuffer);
- const auto *src_image_state = GetImageState(srcImage);
- const auto *dst_image_state = GetImageState(dstImage);
+ layer_data *device_data = GetLayerDataPtr(get_dispatch_key(commandBuffer), layer_data_map);
+ auto cb_node = GetCBNode(commandBuffer);
+ auto src_image_state = GetImageState(srcImage);
+ auto dst_image_state = GetImageState(dstImage);
bool skip = false;
- skip = ValidateImageCopyData(regionCount, pRegions, src_image_state, dst_image_state);
+ skip = ValidateImageCopyData(device_data, report_data, regionCount, pRegions, src_image_state, dst_image_state);
VkCommandBuffer command_buffer = cb_node->commandBuffer;
@@ -2513,20 +2482,20 @@ bool CoreChecks::PreCallValidateCmdCopyImage(VkCommandBuffer commandBuffer, VkIm
slice_override = (depth_slices != 1);
}
- skip |= ValidateImageSubresourceLayers(cb_node, &region.srcSubresource, "vkCmdCopyImage", "srcSubresource", i);
- skip |= ValidateImageSubresourceLayers(cb_node, &region.dstSubresource, "vkCmdCopyImage", "dstSubresource", i);
- skip |= ValidateImageMipLevel(cb_node, src_image_state, region.srcSubresource.mipLevel, i, "vkCmdCopyImage",
+ skip |= ValidateImageSubresourceLayers(device_data, cb_node, &region.srcSubresource, "vkCmdCopyImage", "srcSubresource", i);
+ skip |= ValidateImageSubresourceLayers(device_data, cb_node, &region.dstSubresource, "vkCmdCopyImage", "dstSubresource", i);
+ skip |= ValidateImageMipLevel(device_data, cb_node, src_image_state, region.srcSubresource.mipLevel, i, "vkCmdCopyImage",
"srcSubresource", "VUID-vkCmdCopyImage-srcSubresource-01696");
- skip |= ValidateImageMipLevel(cb_node, dst_image_state, region.dstSubresource.mipLevel, i, "vkCmdCopyImage",
+ skip |= ValidateImageMipLevel(device_data, cb_node, dst_image_state, region.dstSubresource.mipLevel, i, "vkCmdCopyImage",
"dstSubresource", "VUID-vkCmdCopyImage-dstSubresource-01697");
- skip |= ValidateImageArrayLayerRange(cb_node, src_image_state, region.srcSubresource.baseArrayLayer,
+ skip |= ValidateImageArrayLayerRange(device_data, cb_node, src_image_state, region.srcSubresource.baseArrayLayer,
region.srcSubresource.layerCount, i, "vkCmdCopyImage", "srcSubresource",
"VUID-vkCmdCopyImage-srcSubresource-01698");
- skip |= ValidateImageArrayLayerRange(cb_node, dst_image_state, region.dstSubresource.baseArrayLayer,
+ skip |= ValidateImageArrayLayerRange(device_data, cb_node, dst_image_state, region.dstSubresource.baseArrayLayer,
region.dstSubresource.layerCount, i, "vkCmdCopyImage", "dstSubresource",
"VUID-vkCmdCopyImage-dstSubresource-01699");
- if (device_extensions.vk_khr_maintenance1) {
+ if (GetDeviceExtensions()->vk_khr_maintenance1) {
// No chance of mismatch if we're overriding depth slice count
if (!slice_override) {
// The number of depth slices in srcSubresource and dstSubresource must match
@@ -2557,11 +2526,11 @@ bool CoreChecks::PreCallValidateCmdCopyImage(VkCommandBuffer commandBuffer, VkIm
}
// Do multiplane-specific checks, if extension enabled
- if (device_extensions.vk_khr_sampler_ycbcr_conversion) {
- skip |= CopyImageMultiplaneValidation(command_buffer, src_image_state, dst_image_state, region);
+ if (GetDeviceExtensions()->vk_khr_sampler_ycbcr_conversion) {
+ skip |= CopyImageMultiplaneValidation(device_data, command_buffer, src_image_state, dst_image_state, region);
}
- if (!device_extensions.vk_khr_sampler_ycbcr_conversion) {
+ if (!GetDeviceExtensions()->vk_khr_sampler_ycbcr_conversion) {
// not multi-plane, the aspectMask member of srcSubresource and dstSubresource must match
if (region.srcSubresource.aspectMask != region.dstSubresource.aspectMask) {
char const str[] = "vkCmdCopyImage(): Src and dest aspectMasks for each region must match";
@@ -2711,70 +2680,65 @@ bool CoreChecks::PreCallValidateCmdCopyImage(VkCommandBuffer commandBuffer, VkIm
HandleToUint64(command_buffer), "VUID-vkCmdCopyImage-srcImage-00136", "%s", str);
}
- skip |= ValidateMemoryIsBoundToImage(src_image_state, "vkCmdCopyImage()", "VUID-vkCmdCopyImage-srcImage-00127");
- skip |= ValidateMemoryIsBoundToImage(dst_image_state, "vkCmdCopyImage()", "VUID-vkCmdCopyImage-dstImage-00132");
+ skip |= ValidateMemoryIsBoundToImage(device_data, src_image_state, "vkCmdCopyImage()", "VUID-vkCmdCopyImage-srcImage-00127");
+ skip |= ValidateMemoryIsBoundToImage(device_data, dst_image_state, "vkCmdCopyImage()", "VUID-vkCmdCopyImage-dstImage-00132");
// Validate that SRC & DST images have correct usage flags set
- skip |= ValidateImageUsageFlags(src_image_state, VK_IMAGE_USAGE_TRANSFER_SRC_BIT, true, "VUID-vkCmdCopyImage-srcImage-00126",
- "vkCmdCopyImage()", "VK_IMAGE_USAGE_TRANSFER_SRC_BIT");
- skip |= ValidateImageUsageFlags(dst_image_state, VK_IMAGE_USAGE_TRANSFER_DST_BIT, true, "VUID-vkCmdCopyImage-dstImage-00131",
- "vkCmdCopyImage()", "VK_IMAGE_USAGE_TRANSFER_DST_BIT");
- if (api_version >= VK_API_VERSION_1_1 || device_extensions.vk_khr_maintenance1) {
- skip |= ValidateImageFormatFeatureFlags(src_image_state, VK_FORMAT_FEATURE_TRANSFER_SRC_BIT, "vkCmdCopyImage()",
- "VUID-vkCmdCopyImage-srcImage-01995", "VUID-vkCmdCopyImage-srcImage-01995");
- skip |= ValidateImageFormatFeatureFlags(dst_image_state, VK_FORMAT_FEATURE_TRANSFER_DST_BIT, "vkCmdCopyImage()",
- "VUID-vkCmdCopyImage-dstImage-01996", "VUID-vkCmdCopyImage-dstImage-01996");
- }
- skip |= ValidateCmdQueueFlags(cb_node, "vkCmdCopyImage()", VK_QUEUE_TRANSFER_BIT | VK_QUEUE_GRAPHICS_BIT | VK_QUEUE_COMPUTE_BIT,
+ skip |= ValidateImageUsageFlags(device_data, src_image_state, VK_IMAGE_USAGE_TRANSFER_SRC_BIT, true,
+ "VUID-vkCmdCopyImage-srcImage-00126", "vkCmdCopyImage()", "VK_IMAGE_USAGE_TRANSFER_SRC_BIT");
+ skip |= ValidateImageUsageFlags(device_data, dst_image_state, VK_IMAGE_USAGE_TRANSFER_DST_BIT, true,
+ "VUID-vkCmdCopyImage-dstImage-00131", "vkCmdCopyImage()", "VK_IMAGE_USAGE_TRANSFER_DST_BIT");
+ if (GetApiVersion() >= VK_API_VERSION_1_1 || GetDeviceExtensions()->vk_khr_maintenance1) {
+ skip |=
+ ValidateImageFormatFeatureFlags(device_data, src_image_state, VK_FORMAT_FEATURE_TRANSFER_SRC_BIT, "vkCmdCopyImage()",
+ "VUID-vkCmdCopyImage-srcImage-01995", "VUID-vkCmdCopyImage-srcImage-01995");
+ skip |=
+ ValidateImageFormatFeatureFlags(device_data, dst_image_state, VK_FORMAT_FEATURE_TRANSFER_DST_BIT, "vkCmdCopyImage()",
+ "VUID-vkCmdCopyImage-dstImage-01996", "VUID-vkCmdCopyImage-dstImage-01996");
+ }
+ skip |= ValidateCmdQueueFlags(device_data, cb_node, "vkCmdCopyImage()",
+ VK_QUEUE_TRANSFER_BIT | VK_QUEUE_GRAPHICS_BIT | VK_QUEUE_COMPUTE_BIT,
"VUID-vkCmdCopyImage-commandBuffer-cmdpool");
- skip |= ValidateCmd(cb_node, CMD_COPYIMAGE, "vkCmdCopyImage()");
- skip |= InsideRenderPass(cb_node, "vkCmdCopyImage()", "VUID-vkCmdCopyImage-renderpass");
+ skip |= ValidateCmd(device_data, cb_node, CMD_COPYIMAGE, "vkCmdCopyImage()");
+ skip |= InsideRenderPass(device_data, cb_node, "vkCmdCopyImage()", "VUID-vkCmdCopyImage-renderpass");
bool hit_error = false;
- const char *invalid_src_layout_vuid = (src_image_state->shared_presentable && device_extensions.vk_khr_shared_presentable_image)
- ? "VUID-vkCmdCopyImage-srcImageLayout-01917"
- : "VUID-vkCmdCopyImage-srcImageLayout-00129";
- const char *invalid_dst_layout_vuid = (dst_image_state->shared_presentable && device_extensions.vk_khr_shared_presentable_image)
- ? "VUID-vkCmdCopyImage-dstImageLayout-01395"
- : "VUID-vkCmdCopyImage-dstImageLayout-00134";
+ const char *invalid_src_layout_vuid =
+ (src_image_state->shared_presentable && GetDeviceExtensions()->vk_khr_shared_presentable_image)
+ ? "VUID-vkCmdCopyImage-srcImageLayout-01917"
+ : "VUID-vkCmdCopyImage-srcImageLayout-00129";
+ const char *invalid_dst_layout_vuid =
+ (dst_image_state->shared_presentable && GetDeviceExtensions()->vk_khr_shared_presentable_image)
+ ? "VUID-vkCmdCopyImage-dstImageLayout-01395"
+ : "VUID-vkCmdCopyImage-dstImageLayout-00134";
for (uint32_t i = 0; i < regionCount; ++i) {
- skip |= VerifyImageLayout(cb_node, src_image_state, pRegions[i].srcSubresource, srcImageLayout,
+ skip |= VerifyImageLayout(device_data, cb_node, src_image_state, pRegions[i].srcSubresource, srcImageLayout,
VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL, "vkCmdCopyImage()", invalid_src_layout_vuid,
"VUID-vkCmdCopyImage-srcImageLayout-00128", &hit_error);
- skip |= VerifyImageLayout(cb_node, dst_image_state, pRegions[i].dstSubresource, dstImageLayout,
+ skip |= VerifyImageLayout(device_data, cb_node, dst_image_state, pRegions[i].dstSubresource, dstImageLayout,
VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, "vkCmdCopyImage()", invalid_dst_layout_vuid,
"VUID-vkCmdCopyImage-dstImageLayout-00133", &hit_error);
- skip |= ValidateCopyImageTransferGranularityRequirements(cb_node, src_image_state, dst_image_state, &pRegions[i], i,
- "vkCmdCopyImage()");
+ skip |= ValidateCopyImageTransferGranularityRequirements(device_data, cb_node, src_image_state, dst_image_state,
+ &pRegions[i], i, "vkCmdCopyImage()");
}
return skip;
}
-void ValidationStateTracker::PreCallRecordCmdCopyImage(VkCommandBuffer commandBuffer, VkImage srcImage,
- VkImageLayout srcImageLayout, VkImage dstImage, VkImageLayout dstImageLayout,
- uint32_t regionCount, const VkImageCopy *pRegions) {
- auto cb_node = GetCBState(commandBuffer);
- auto src_image_state = GetImageState(srcImage);
- auto dst_image_state = GetImageState(dstImage);
-
- // Update bindings between images and cmd buffer
- AddCommandBufferBindingImage(cb_node, src_image_state);
- AddCommandBufferBindingImage(cb_node, dst_image_state);
-}
-
void CoreChecks::PreCallRecordCmdCopyImage(VkCommandBuffer commandBuffer, VkImage srcImage, VkImageLayout srcImageLayout,
VkImage dstImage, VkImageLayout dstImageLayout, uint32_t regionCount,
const VkImageCopy *pRegions) {
- StateTracker::PreCallRecordCmdCopyImage(commandBuffer, srcImage, srcImageLayout, dstImage, dstImageLayout, regionCount,
- pRegions);
- auto cb_node = GetCBState(commandBuffer);
+ layer_data *device_data = GetLayerDataPtr(get_dispatch_key(commandBuffer), layer_data_map);
+ auto cb_node = GetCBNode(commandBuffer);
auto src_image_state = GetImageState(srcImage);
auto dst_image_state = GetImageState(dstImage);
// Make sure that all image slices are updated to correct layout
for (uint32_t i = 0; i < regionCount; ++i) {
- SetImageInitialLayout(cb_node, *src_image_state, pRegions[i].srcSubresource, srcImageLayout);
- SetImageInitialLayout(cb_node, *dst_image_state, pRegions[i].dstSubresource, dstImageLayout);
+ SetImageLayout(device_data, cb_node, src_image_state, pRegions[i].srcSubresource, srcImageLayout);
+ SetImageLayout(device_data, cb_node, dst_image_state, pRegions[i].dstSubresource, dstImageLayout);
}
+ // Update bindings between images and cmd buffer
+ AddCommandBufferBindingImage(device_data, cb_node, src_image_state);
+ AddCommandBufferBindingImage(device_data, cb_node, dst_image_state);
}
// Returns true if sub_rect is entirely contained within rect
@@ -2785,10 +2749,9 @@ static inline bool ContainsRect(VkRect2D rect, VkRect2D sub_rect) {
return true;
}
-bool CoreChecks::ValidateClearAttachmentExtent(VkCommandBuffer command_buffer, uint32_t attachment_index,
- const FRAMEBUFFER_STATE *framebuffer, uint32_t fb_attachment,
- const VkRect2D &render_area, uint32_t rect_count,
- const VkClearRect *clear_rects) const {
+bool CoreChecks::ValidateClearAttachmentExtent(layer_data *device_data, VkCommandBuffer command_buffer, uint32_t attachment_index,
+ FRAMEBUFFER_STATE *framebuffer, uint32_t fb_attachment, const VkRect2D &render_area,
+ uint32_t rect_count, const VkClearRect *clear_rects) {
bool skip = false;
const IMAGE_VIEW_STATE *image_view_state = nullptr;
if (framebuffer && (fb_attachment != VK_ATTACHMENT_UNUSED) && (fb_attachment < framebuffer->createInfo.attachmentCount)) {
@@ -2797,7 +2760,7 @@ bool CoreChecks::ValidateClearAttachmentExtent(VkCommandBuffer command_buffer, u
for (uint32_t j = 0; j < rect_count; j++) {
if (!ContainsRect(render_area, clear_rects[j].rect)) {
- skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
+ skip |= log_msg(device_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
HandleToUint64(command_buffer), "VUID-vkCmdClearAttachments-pRects-00016",
"vkCmdClearAttachments(): The area defined by pRects[%d] is not contained in the area of "
"the current render pass instance.",
@@ -2810,11 +2773,12 @@ bool CoreChecks::ValidateClearAttachmentExtent(VkCommandBuffer command_buffer, u
const auto attachment_layer_count = image_view_state->create_info.subresourceRange.layerCount;
if ((clear_rects[j].baseArrayLayer >= attachment_layer_count) ||
(clear_rects[j].baseArrayLayer + clear_rects[j].layerCount > attachment_layer_count)) {
- skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
- HandleToUint64(command_buffer), "VUID-vkCmdClearAttachments-pRects-00017",
- "vkCmdClearAttachments(): The layers defined in pRects[%d] are not contained in the layers "
- "of pAttachment[%d].",
- j, attachment_index);
+ skip |=
+ log_msg(device_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
+ HandleToUint64(command_buffer), "VUID-vkCmdClearAttachments-pRects-00017",
+ "vkCmdClearAttachments(): The layers defined in pRects[%d] are not contained in the layers "
+ "of pAttachment[%d].",
+ j, attachment_index);
}
}
}
@@ -2824,34 +2788,39 @@ bool CoreChecks::ValidateClearAttachmentExtent(VkCommandBuffer command_buffer, u
bool CoreChecks::PreCallValidateCmdClearAttachments(VkCommandBuffer commandBuffer, uint32_t attachmentCount,
const VkClearAttachment *pAttachments, uint32_t rectCount,
const VkClearRect *pRects) {
+ layer_data *device_data = GetLayerDataPtr(get_dispatch_key(commandBuffer), layer_data_map);
+
+ GLOBAL_CB_NODE *cb_node = GetCBNode(commandBuffer);
+
bool skip = false;
- const CMD_BUFFER_STATE *cb_node = GetCBState(commandBuffer); // TODO: Should be const, and never modified during validation
- if (!cb_node) return skip;
-
- skip |= ValidateCmdQueueFlags(cb_node, "vkCmdClearAttachments()", VK_QUEUE_GRAPHICS_BIT,
- "VUID-vkCmdClearAttachments-commandBuffer-cmdpool");
- skip |= ValidateCmd(cb_node, CMD_CLEARATTACHMENTS, "vkCmdClearAttachments()");
- // Warn if this is issued prior to Draw Cmd and clearing the entire attachment
- if (!cb_node->hasDrawCmd && (cb_node->activeRenderPassBeginInfo.renderArea.extent.width == pRects[0].rect.extent.width) &&
- (cb_node->activeRenderPassBeginInfo.renderArea.extent.height == pRects[0].rect.extent.height)) {
- // There are times where app needs to use ClearAttachments (generally when reusing a buffer inside of a render pass)
- // This warning should be made more specific. It'd be best to avoid triggering this test if it's a use that must call
- // CmdClearAttachments.
- skip |= log_msg(report_data, VK_DEBUG_REPORT_PERFORMANCE_WARNING_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
+ if (cb_node) {
+ skip |= ValidateCmdQueueFlags(device_data, cb_node, "vkCmdClearAttachments()", VK_QUEUE_GRAPHICS_BIT,
+ "VUID-vkCmdClearAttachments-commandBuffer-cmdpool");
+ skip |= ValidateCmd(device_data, cb_node, CMD_CLEARATTACHMENTS, "vkCmdClearAttachments()");
+ // Warn if this is issued prior to Draw Cmd and clearing the entire attachment
+ if (!cb_node->hasDrawCmd && (cb_node->activeRenderPassBeginInfo.renderArea.extent.width == pRects[0].rect.extent.width) &&
+ (cb_node->activeRenderPassBeginInfo.renderArea.extent.height == pRects[0].rect.extent.height)) {
+ // There are times where app needs to use ClearAttachments (generally when reusing a buffer inside of a render pass)
+ // This warning should be made more specific. It'd be best to avoid triggering this test if it's a use that must call
+ // CmdClearAttachments.
+ skip |=
+ log_msg(report_data, VK_DEBUG_REPORT_PERFORMANCE_WARNING_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
HandleToUint64(commandBuffer), kVUID_Core_DrawState_ClearCmdBeforeDraw,
- "vkCmdClearAttachments() issued on %s prior to any Draw Cmds. It is recommended you "
+ "vkCmdClearAttachments() issued on command buffer object %s prior to any Draw Cmds. It is recommended you "
"use RenderPass LOAD_OP_CLEAR on Attachments prior to any Draw.",
report_data->FormatHandle(commandBuffer).c_str());
+ }
+ skip |= OutsideRenderPass(device_data, cb_node, "vkCmdClearAttachments()", "VUID-vkCmdClearAttachments-renderpass");
}
- skip |= OutsideRenderPass(cb_node, "vkCmdClearAttachments()", "VUID-vkCmdClearAttachments-renderpass");
// Validate that attachment is in reference list of active subpass
if (cb_node->activeRenderPass) {
const VkRenderPassCreateInfo2KHR *renderpass_create_info = cb_node->activeRenderPass->createInfo.ptr();
const uint32_t renderpass_attachment_count = renderpass_create_info->attachmentCount;
const VkSubpassDescription2KHR *subpass_desc = &renderpass_create_info->pSubpasses[cb_node->activeSubpass];
- const auto *framebuffer = GetFramebufferState(cb_node->activeFramebuffer);
+ auto framebuffer = GetFramebufferState(cb_node->activeFramebuffer);
const auto &render_area = cb_node->activeRenderPassBeginInfo.renderArea;
+ std::shared_ptr<std::vector<VkClearRect>> clear_rect_copy;
for (uint32_t attachment_index = 0; attachment_index < attachmentCount; attachment_index++) {
auto clear_desc = &pAttachments[attachment_index];
@@ -2868,21 +2837,21 @@ bool CoreChecks::PreCallValidateCmdClearAttachments(VkCommandBuffer commandBuffe
if (clear_desc->colorAttachment < subpass_desc->colorAttachmentCount) {
color_attachment = subpass_desc->pColorAttachments[clear_desc->colorAttachment].attachment;
if ((color_attachment != VK_ATTACHMENT_UNUSED) && (color_attachment >= renderpass_attachment_count)) {
- skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
- HandleToUint64(commandBuffer), "VUID-vkCmdClearAttachments-aspectMask-02501",
- "vkCmdClearAttachments() pAttachments[%u].colorAttachment=%u is not VK_ATTACHMENT_UNUSED "
- "and not a valid attachment for %s attachmentCount=%u. Subpass %u pColorAttachment[%u]=%u.",
- attachment_index, clear_desc->colorAttachment,
- report_data->FormatHandle(cb_node->activeRenderPass->renderPass).c_str(),
- cb_node->activeSubpass, clear_desc->colorAttachment, color_attachment,
- renderpass_attachment_count);
+ skip |= log_msg(
+ report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
+ HandleToUint64(commandBuffer), "VUID-vkCmdClearAttachments-aspectMask-02501",
+ "vkCmdClearAttachments() pAttachments[%u].colorAttachment=%u is not VK_ATTACHMENT_UNUSED "
+ "and not a valid attachment for render pass %s attachmentCount=%u. Subpass %u pColorAttachment[%u]=%u.",
+ attachment_index, clear_desc->colorAttachment,
+ report_data->FormatHandle(cb_node->activeRenderPass->renderPass).c_str(), cb_node->activeSubpass,
+ clear_desc->colorAttachment, color_attachment, renderpass_attachment_count);
color_attachment = VK_ATTACHMENT_UNUSED; // Defensive, prevent lookup past end of renderpass attachment
}
} else {
skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
HandleToUint64(commandBuffer), "VUID-vkCmdClearAttachments-aspectMask-02501",
- "vkCmdClearAttachments() pAttachments[%u].colorAttachment=%u out of range for %s"
+ "vkCmdClearAttachments() pAttachments[%u].colorAttachment=%u out of range for render pass %s"
" subpass %u. colorAttachmentCount=%u",
attachment_index, clear_desc->colorAttachment,
report_data->FormatHandle(cb_node->activeRenderPass->renderPass).c_str(),
@@ -2917,104 +2886,87 @@ bool CoreChecks::PreCallValidateCmdClearAttachments(VkCommandBuffer commandBuffe
}
}
if (cb_node->createInfo.level == VK_COMMAND_BUFFER_LEVEL_PRIMARY) {
- skip |= ValidateClearAttachmentExtent(commandBuffer, attachment_index, framebuffer, fb_attachment, render_area,
- rectCount, pRects);
- }
- }
- }
- return skip;
-}
-
-void CoreChecks::PreCallRecordCmdClearAttachments(VkCommandBuffer commandBuffer, uint32_t attachmentCount,
- const VkClearAttachment *pAttachments, uint32_t rectCount,
- const VkClearRect *pRects) {
- auto *cb_node = GetCBState(commandBuffer);
- if (cb_node->activeRenderPass && (cb_node->createInfo.level == VK_COMMAND_BUFFER_LEVEL_SECONDARY)) {
- const VkRenderPassCreateInfo2KHR *renderpass_create_info = cb_node->activeRenderPass->createInfo.ptr();
- const VkSubpassDescription2KHR *subpass_desc = &renderpass_create_info->pSubpasses[cb_node->activeSubpass];
- std::shared_ptr<std::vector<VkClearRect>> clear_rect_copy;
- for (uint32_t attachment_index = 0; attachment_index < attachmentCount; attachment_index++) {
- const auto clear_desc = &pAttachments[attachment_index];
- uint32_t fb_attachment = VK_ATTACHMENT_UNUSED;
- if ((clear_desc->aspectMask & VK_IMAGE_ASPECT_COLOR_BIT) &&
- (clear_desc->colorAttachment < subpass_desc->colorAttachmentCount)) {
- fb_attachment = subpass_desc->pColorAttachments[clear_desc->colorAttachment].attachment;
- } else if ((clear_desc->aspectMask & (VK_IMAGE_ASPECT_DEPTH_BIT | VK_IMAGE_ASPECT_STENCIL_BIT)) &&
- subpass_desc->pDepthStencilAttachment) {
- fb_attachment = subpass_desc->pDepthStencilAttachment->attachment;
- }
- if (fb_attachment != VK_ATTACHMENT_UNUSED) {
+ skip |= ValidateClearAttachmentExtent(device_data, commandBuffer, attachment_index, framebuffer, fb_attachment,
+ render_area, rectCount, pRects);
+ } else {
+ // if a secondary level command buffer inherits the framebuffer from the primary command buffer
+ // (see VkCommandBufferInheritanceInfo), this validation must be deferred until queue submit time
if (!clear_rect_copy) {
// We need a copy of the clear rectangles that will persist until the last lambda executes
// but we want to create it as lazily as possible
clear_rect_copy.reset(new std::vector<VkClearRect>(pRects, pRects + rectCount));
}
- // if a secondary level command buffer inherits the framebuffer from the primary command buffer
- // (see VkCommandBufferInheritanceInfo), this validation must be deferred until queue submit time
- auto val_fn = [this, commandBuffer, attachment_index, fb_attachment, rectCount, clear_rect_copy](
- const CMD_BUFFER_STATE *prim_cb, VkFramebuffer fb) {
+
+ auto val_fn = [device_data, commandBuffer, attachment_index, fb_attachment, rectCount, clear_rect_copy](
+ GLOBAL_CB_NODE *prim_cb, VkFramebuffer fb) {
assert(rectCount == clear_rect_copy->size());
- const FRAMEBUFFER_STATE *framebuffer = GetFramebufferState(fb);
+ FRAMEBUFFER_STATE *framebuffer = device_data->GetFramebufferState(fb);
const auto &render_area = prim_cb->activeRenderPassBeginInfo.renderArea;
bool skip = false;
- skip = ValidateClearAttachmentExtent(commandBuffer, attachment_index, framebuffer, fb_attachment, render_area,
- rectCount, clear_rect_copy->data());
+ skip =
+ device_data->ValidateClearAttachmentExtent(device_data, commandBuffer, attachment_index, framebuffer,
+ fb_attachment, render_area, rectCount, clear_rect_copy->data());
return skip;
};
cb_node->cmd_execute_commands_functions.emplace_back(val_fn);
}
}
}
+ return skip;
}
bool CoreChecks::PreCallValidateCmdResolveImage(VkCommandBuffer commandBuffer, VkImage srcImage, VkImageLayout srcImageLayout,
VkImage dstImage, VkImageLayout dstImageLayout, uint32_t regionCount,
const VkImageResolve *pRegions) {
- const auto *cb_node = GetCBState(commandBuffer);
- const auto *src_image_state = GetImageState(srcImage);
- const auto *dst_image_state = GetImageState(dstImage);
+ layer_data *device_data = GetLayerDataPtr(get_dispatch_key(commandBuffer), layer_data_map);
+ auto cb_node = GetCBNode(commandBuffer);
+ auto src_image_state = GetImageState(srcImage);
+ auto dst_image_state = GetImageState(dstImage);
bool skip = false;
if (cb_node && src_image_state && dst_image_state) {
- skip |= ValidateMemoryIsBoundToImage(src_image_state, "vkCmdResolveImage()", "VUID-vkCmdResolveImage-srcImage-00256");
- skip |= ValidateMemoryIsBoundToImage(dst_image_state, "vkCmdResolveImage()", "VUID-vkCmdResolveImage-dstImage-00258");
- skip |= ValidateCmdQueueFlags(cb_node, "vkCmdResolveImage()", VK_QUEUE_GRAPHICS_BIT,
+ skip |= ValidateMemoryIsBoundToImage(device_data, src_image_state, "vkCmdResolveImage()",
+ "VUID-vkCmdResolveImage-srcImage-00256");
+ skip |= ValidateMemoryIsBoundToImage(device_data, dst_image_state, "vkCmdResolveImage()",
+ "VUID-vkCmdResolveImage-dstImage-00258");
+ skip |= ValidateCmdQueueFlags(device_data, cb_node, "vkCmdResolveImage()", VK_QUEUE_GRAPHICS_BIT,
"VUID-vkCmdResolveImage-commandBuffer-cmdpool");
- skip |= ValidateCmd(cb_node, CMD_RESOLVEIMAGE, "vkCmdResolveImage()");
- skip |= InsideRenderPass(cb_node, "vkCmdResolveImage()", "VUID-vkCmdResolveImage-renderpass");
- skip |= ValidateImageFormatFeatureFlags(dst_image_state, VK_FORMAT_FEATURE_COLOR_ATTACHMENT_BIT, "vkCmdResolveImage()",
- "VUID-vkCmdResolveImage-dstImage-02003", "VUID-vkCmdResolveImage-dstImage-02003");
+ skip |= ValidateCmd(device_data, cb_node, CMD_RESOLVEIMAGE, "vkCmdResolveImage()");
+ skip |= InsideRenderPass(device_data, cb_node, "vkCmdResolveImage()", "VUID-vkCmdResolveImage-renderpass");
+ skip |= ValidateImageFormatFeatureFlags(device_data, dst_image_state, VK_FORMAT_FEATURE_COLOR_ATTACHMENT_BIT,
+ "vkCmdResolveImage()", "VUID-vkCmdResolveImage-dstImage-02003",
+ "VUID-vkCmdResolveImage-dstImage-02003");
bool hit_error = false;
const char *invalid_src_layout_vuid =
- (src_image_state->shared_presentable && device_extensions.vk_khr_shared_presentable_image)
+ (src_image_state->shared_presentable && GetDeviceExtensions()->vk_khr_shared_presentable_image)
? "VUID-vkCmdResolveImage-srcImageLayout-01400"
: "VUID-vkCmdResolveImage-srcImageLayout-00261";
const char *invalid_dst_layout_vuid =
- (dst_image_state->shared_presentable && device_extensions.vk_khr_shared_presentable_image)
+ (dst_image_state->shared_presentable && GetDeviceExtensions()->vk_khr_shared_presentable_image)
? "VUID-vkCmdResolveImage-dstImageLayout-01401"
: "VUID-vkCmdResolveImage-dstImageLayout-00263";
// For each region, the number of layers in the image subresource should not be zero
// For each region, src and dest image aspect must be color only
for (uint32_t i = 0; i < regionCount; i++) {
- skip |=
- ValidateImageSubresourceLayers(cb_node, &pRegions[i].srcSubresource, "vkCmdResolveImage()", "srcSubresource", i);
- skip |=
- ValidateImageSubresourceLayers(cb_node, &pRegions[i].dstSubresource, "vkCmdResolveImage()", "dstSubresource", i);
- skip |= VerifyImageLayout(cb_node, src_image_state, pRegions[i].srcSubresource, srcImageLayout,
+ skip |= ValidateImageSubresourceLayers(device_data, cb_node, &pRegions[i].srcSubresource, "vkCmdResolveImage()",
+ "srcSubresource", i);
+ skip |= ValidateImageSubresourceLayers(device_data, cb_node, &pRegions[i].dstSubresource, "vkCmdResolveImage()",
+ "dstSubresource", i);
+ skip |= VerifyImageLayout(device_data, cb_node, src_image_state, pRegions[i].srcSubresource, srcImageLayout,
VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL, "vkCmdResolveImage()", invalid_src_layout_vuid,
"VUID-vkCmdResolveImage-srcImageLayout-00260", &hit_error);
- skip |= VerifyImageLayout(cb_node, dst_image_state, pRegions[i].dstSubresource, dstImageLayout,
+ skip |= VerifyImageLayout(device_data, cb_node, dst_image_state, pRegions[i].dstSubresource, dstImageLayout,
VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, "vkCmdResolveImage()", invalid_dst_layout_vuid,
"VUID-vkCmdResolveImage-dstImageLayout-00262", &hit_error);
- skip |= ValidateImageMipLevel(cb_node, src_image_state, pRegions[i].srcSubresource.mipLevel, i, "vkCmdResolveImage()",
- "srcSubresource", "VUID-vkCmdResolveImage-srcSubresource-01709");
- skip |= ValidateImageMipLevel(cb_node, dst_image_state, pRegions[i].dstSubresource.mipLevel, i, "vkCmdResolveImage()",
- "dstSubresource", "VUID-vkCmdResolveImage-dstSubresource-01710");
- skip |= ValidateImageArrayLayerRange(cb_node, src_image_state, pRegions[i].srcSubresource.baseArrayLayer,
+ skip |= ValidateImageMipLevel(device_data, cb_node, src_image_state, pRegions[i].srcSubresource.mipLevel, i,
+ "vkCmdResolveImage()", "srcSubresource", "VUID-vkCmdResolveImage-srcSubresource-01709");
+ skip |= ValidateImageMipLevel(device_data, cb_node, dst_image_state, pRegions[i].dstSubresource.mipLevel, i,
+ "vkCmdResolveImage()", "dstSubresource", "VUID-vkCmdResolveImage-dstSubresource-01710");
+ skip |= ValidateImageArrayLayerRange(device_data, cb_node, src_image_state, pRegions[i].srcSubresource.baseArrayLayer,
pRegions[i].srcSubresource.layerCount, i, "vkCmdResolveImage()", "srcSubresource",
"VUID-vkCmdResolveImage-srcSubresource-01711");
- skip |= ValidateImageArrayLayerRange(cb_node, dst_image_state, pRegions[i].dstSubresource.baseArrayLayer,
+ skip |= ValidateImageArrayLayerRange(device_data, cb_node, dst_image_state, pRegions[i].dstSubresource.baseArrayLayer,
pRegions[i].dstSubresource.layerCount, i, "vkCmdResolveImage()", "srcSubresource",
"VUID-vkCmdResolveImage-dstSubresource-01712");
@@ -3061,50 +3013,53 @@ bool CoreChecks::PreCallValidateCmdResolveImage(VkCommandBuffer commandBuffer, V
return skip;
}
-void ValidationStateTracker::PreCallRecordCmdResolveImage(VkCommandBuffer commandBuffer, VkImage srcImage,
- VkImageLayout srcImageLayout, VkImage dstImage,
- VkImageLayout dstImageLayout, uint32_t regionCount,
- const VkImageResolve *pRegions) {
- auto cb_node = GetCBState(commandBuffer);
+void CoreChecks::PreCallRecordCmdResolveImage(VkCommandBuffer commandBuffer, VkImage srcImage, VkImageLayout srcImageLayout,
+ VkImage dstImage, VkImageLayout dstImageLayout, uint32_t regionCount,
+ const VkImageResolve *pRegions) {
+ layer_data *device_data = GetLayerDataPtr(get_dispatch_key(commandBuffer), layer_data_map);
+ auto cb_node = GetCBNode(commandBuffer);
auto src_image_state = GetImageState(srcImage);
auto dst_image_state = GetImageState(dstImage);
// Update bindings between images and cmd buffer
- AddCommandBufferBindingImage(cb_node, src_image_state);
- AddCommandBufferBindingImage(cb_node, dst_image_state);
+ AddCommandBufferBindingImage(device_data, cb_node, src_image_state);
+ AddCommandBufferBindingImage(device_data, cb_node, dst_image_state);
}
bool CoreChecks::PreCallValidateCmdBlitImage(VkCommandBuffer commandBuffer, VkImage srcImage, VkImageLayout srcImageLayout,
VkImage dstImage, VkImageLayout dstImageLayout, uint32_t regionCount,
const VkImageBlit *pRegions, VkFilter filter) {
- const auto *cb_node = GetCBState(commandBuffer);
- const auto *src_image_state = GetImageState(srcImage);
- const auto *dst_image_state = GetImageState(dstImage);
+ layer_data *device_data = GetLayerDataPtr(get_dispatch_key(commandBuffer), layer_data_map);
+ auto cb_node = GetCBNode(commandBuffer);
+ auto src_image_state = GetImageState(srcImage);
+ auto dst_image_state = GetImageState(dstImage);
bool skip = false;
if (cb_node) {
- skip |= ValidateCmd(cb_node, CMD_BLITIMAGE, "vkCmdBlitImage()");
+ skip |= ValidateCmd(device_data, cb_node, CMD_BLITIMAGE, "vkCmdBlitImage()");
}
if (cb_node && src_image_state && dst_image_state) {
- skip |= ValidateImageSampleCount(src_image_state, VK_SAMPLE_COUNT_1_BIT, "vkCmdBlitImage(): srcImage",
+ skip |= ValidateImageSampleCount(device_data, src_image_state, VK_SAMPLE_COUNT_1_BIT, "vkCmdBlitImage(): srcImage",
"VUID-vkCmdBlitImage-srcImage-00233");
- skip |= ValidateImageSampleCount(dst_image_state, VK_SAMPLE_COUNT_1_BIT, "vkCmdBlitImage(): dstImage",
+ skip |= ValidateImageSampleCount(device_data, dst_image_state, VK_SAMPLE_COUNT_1_BIT, "vkCmdBlitImage(): dstImage",
"VUID-vkCmdBlitImage-dstImage-00234");
- skip |= ValidateMemoryIsBoundToImage(src_image_state, "vkCmdBlitImage()", "VUID-vkCmdBlitImage-srcImage-00220");
- skip |= ValidateMemoryIsBoundToImage(dst_image_state, "vkCmdBlitImage()", "VUID-vkCmdBlitImage-dstImage-00225");
skip |=
- ValidateImageUsageFlags(src_image_state, VK_IMAGE_USAGE_TRANSFER_SRC_BIT, true, "VUID-vkCmdBlitImage-srcImage-00219",
- "vkCmdBlitImage()", "VK_IMAGE_USAGE_TRANSFER_SRC_BIT");
+ ValidateMemoryIsBoundToImage(device_data, src_image_state, "vkCmdBlitImage()", "VUID-vkCmdBlitImage-srcImage-00220");
skip |=
- ValidateImageUsageFlags(dst_image_state, VK_IMAGE_USAGE_TRANSFER_DST_BIT, true, "VUID-vkCmdBlitImage-dstImage-00224",
- "vkCmdBlitImage()", "VK_IMAGE_USAGE_TRANSFER_DST_BIT");
+ ValidateMemoryIsBoundToImage(device_data, dst_image_state, "vkCmdBlitImage()", "VUID-vkCmdBlitImage-dstImage-00225");
skip |=
- ValidateCmdQueueFlags(cb_node, "vkCmdBlitImage()", VK_QUEUE_GRAPHICS_BIT, "VUID-vkCmdBlitImage-commandBuffer-cmdpool");
- skip |= ValidateCmd(cb_node, CMD_BLITIMAGE, "vkCmdBlitImage()");
- skip |= InsideRenderPass(cb_node, "vkCmdBlitImage()", "VUID-vkCmdBlitImage-renderpass");
- skip |= ValidateImageFormatFeatureFlags(src_image_state, VK_FORMAT_FEATURE_BLIT_SRC_BIT, "vkCmdBlitImage()",
+ ValidateImageUsageFlags(device_data, src_image_state, VK_IMAGE_USAGE_TRANSFER_SRC_BIT, true,
+ "VUID-vkCmdBlitImage-srcImage-00219", "vkCmdBlitImage()", "VK_IMAGE_USAGE_TRANSFER_SRC_BIT");
+ skip |=
+ ValidateImageUsageFlags(device_data, dst_image_state, VK_IMAGE_USAGE_TRANSFER_DST_BIT, true,
+ "VUID-vkCmdBlitImage-dstImage-00224", "vkCmdBlitImage()", "VK_IMAGE_USAGE_TRANSFER_DST_BIT");
+ skip |= ValidateCmdQueueFlags(device_data, cb_node, "vkCmdBlitImage()", VK_QUEUE_GRAPHICS_BIT,
+ "VUID-vkCmdBlitImage-commandBuffer-cmdpool");
+ skip |= ValidateCmd(device_data, cb_node, CMD_BLITIMAGE, "vkCmdBlitImage()");
+ skip |= InsideRenderPass(device_data, cb_node, "vkCmdBlitImage()", "VUID-vkCmdBlitImage-renderpass");
+ skip |= ValidateImageFormatFeatureFlags(device_data, src_image_state, VK_FORMAT_FEATURE_BLIT_SRC_BIT, "vkCmdBlitImage()",
"VUID-vkCmdBlitImage-srcImage-01999", "VUID-vkCmdBlitImage-srcImage-01999");
- skip |= ValidateImageFormatFeatureFlags(dst_image_state, VK_FORMAT_FEATURE_BLIT_DST_BIT, "vkCmdBlitImage()",
+ skip |= ValidateImageFormatFeatureFlags(device_data, dst_image_state, VK_FORMAT_FEATURE_BLIT_DST_BIT, "vkCmdBlitImage()",
"VUID-vkCmdBlitImage-dstImage-02000", "VUID-vkCmdBlitImage-dstImage-02000");
// TODO: Need to validate image layouts, which will include layout validation for shared presentable images
@@ -3115,13 +3070,13 @@ bool CoreChecks::PreCallValidateCmdBlitImage(VkCommandBuffer commandBuffer, VkIm
VkImageType dst_type = dst_image_state->createInfo.imageType;
if (VK_FILTER_LINEAR == filter) {
- skip |= ValidateImageFormatFeatureFlags(src_image_state, VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_LINEAR_BIT,
+ skip |= ValidateImageFormatFeatureFlags(device_data, src_image_state, VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_LINEAR_BIT,
"vkCmdBlitImage()", "VUID-vkCmdBlitImage-filter-02001",
"VUID-vkCmdBlitImage-filter-02001");
} else if (VK_FILTER_CUBIC_IMG == filter) {
- skip |= ValidateImageFormatFeatureFlags(src_image_state, VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_CUBIC_BIT_IMG,
- "vkCmdBlitImage()", "VUID-vkCmdBlitImage-filter-02002",
- "VUID-vkCmdBlitImage-filter-02002");
+ skip |= ValidateImageFormatFeatureFlags(device_data, src_image_state,
+ VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_CUBIC_BIT_IMG, "vkCmdBlitImage()",
+ "VUID-vkCmdBlitImage-filter-02002", "VUID-vkCmdBlitImage-filter-02002");
}
if ((VK_FILTER_CUBIC_IMG == filter) && (VK_IMAGE_TYPE_3D != src_type)) {
@@ -3182,32 +3137,34 @@ bool CoreChecks::PreCallValidateCmdBlitImage(VkCommandBuffer commandBuffer, VkIm
// Do per-region checks
const char *invalid_src_layout_vuid =
- (src_image_state->shared_presentable && device_extensions.vk_khr_shared_presentable_image)
+ (src_image_state->shared_presentable && GetDeviceExtensions()->vk_khr_shared_presentable_image)
? "VUID-vkCmdBlitImage-srcImageLayout-01398"
: "VUID-vkCmdBlitImage-srcImageLayout-00222";
const char *invalid_dst_layout_vuid =
- (dst_image_state->shared_presentable && device_extensions.vk_khr_shared_presentable_image)
+ (dst_image_state->shared_presentable && GetDeviceExtensions()->vk_khr_shared_presentable_image)
? "VUID-vkCmdBlitImage-dstImageLayout-01399"
: "VUID-vkCmdBlitImage-dstImageLayout-00227";
for (uint32_t i = 0; i < regionCount; i++) {
const VkImageBlit rgn = pRegions[i];
bool hit_error = false;
- skip |= VerifyImageLayout(cb_node, src_image_state, rgn.srcSubresource, srcImageLayout,
+ skip |= VerifyImageLayout(device_data, cb_node, src_image_state, rgn.srcSubresource, srcImageLayout,
VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL, "vkCmdBlitImage()", invalid_src_layout_vuid,
"VUID-vkCmdBlitImage-srcImageLayout-00221", &hit_error);
- skip |= VerifyImageLayout(cb_node, dst_image_state, rgn.dstSubresource, dstImageLayout,
+ skip |= VerifyImageLayout(device_data, cb_node, dst_image_state, rgn.dstSubresource, dstImageLayout,
VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, "vkCmdBlitImage()", invalid_dst_layout_vuid,
"VUID-vkCmdBlitImage-dstImageLayout-00226", &hit_error);
- skip |= ValidateImageSubresourceLayers(cb_node, &rgn.srcSubresource, "vkCmdBlitImage()", "srcSubresource", i);
- skip |= ValidateImageSubresourceLayers(cb_node, &rgn.dstSubresource, "vkCmdBlitImage()", "dstSubresource", i);
- skip |= ValidateImageMipLevel(cb_node, src_image_state, rgn.srcSubresource.mipLevel, i, "vkCmdBlitImage()",
+ skip |=
+ ValidateImageSubresourceLayers(device_data, cb_node, &rgn.srcSubresource, "vkCmdBlitImage()", "srcSubresource", i);
+ skip |=
+ ValidateImageSubresourceLayers(device_data, cb_node, &rgn.dstSubresource, "vkCmdBlitImage()", "dstSubresource", i);
+ skip |= ValidateImageMipLevel(device_data, cb_node, src_image_state, rgn.srcSubresource.mipLevel, i, "vkCmdBlitImage()",
"srcSubresource", "VUID-vkCmdBlitImage-srcSubresource-01705");
- skip |= ValidateImageMipLevel(cb_node, dst_image_state, rgn.dstSubresource.mipLevel, i, "vkCmdBlitImage()",
+ skip |= ValidateImageMipLevel(device_data, cb_node, dst_image_state, rgn.dstSubresource.mipLevel, i, "vkCmdBlitImage()",
"dstSubresource", "VUID-vkCmdBlitImage-dstSubresource-01706");
- skip |= ValidateImageArrayLayerRange(cb_node, src_image_state, rgn.srcSubresource.baseArrayLayer,
+ skip |= ValidateImageArrayLayerRange(device_data, cb_node, src_image_state, rgn.srcSubresource.baseArrayLayer,
rgn.srcSubresource.layerCount, i, "vkCmdBlitImage()", "srcSubresource",
"VUID-vkCmdBlitImage-srcSubresource-01707");
- skip |= ValidateImageArrayLayerRange(cb_node, dst_image_state, rgn.dstSubresource.baseArrayLayer,
+ skip |= ValidateImageArrayLayerRange(device_data, cb_node, dst_image_state, rgn.dstSubresource.baseArrayLayer,
rgn.dstSubresource.layerCount, i, "vkCmdBlitImage()", "dstSubresource",
"VUID-vkCmdBlitImage-dstSubresource-01708");
// Warn for zero-sized regions
@@ -3390,116 +3347,68 @@ bool CoreChecks::PreCallValidateCmdBlitImage(VkCommandBuffer commandBuffer, VkIm
return skip;
}
-void ValidationStateTracker::PreCallRecordCmdBlitImage(VkCommandBuffer commandBuffer, VkImage srcImage,
- VkImageLayout srcImageLayout, VkImage dstImage, VkImageLayout dstImageLayout,
- uint32_t regionCount, const VkImageBlit *pRegions, VkFilter filter) {
- auto cb_node = GetCBState(commandBuffer);
- auto src_image_state = GetImageState(srcImage);
- auto dst_image_state = GetImageState(dstImage);
-
- // Update bindings between images and cmd buffer
- AddCommandBufferBindingImage(cb_node, src_image_state);
- AddCommandBufferBindingImage(cb_node, dst_image_state);
-}
-
void CoreChecks::PreCallRecordCmdBlitImage(VkCommandBuffer commandBuffer, VkImage srcImage, VkImageLayout srcImageLayout,
VkImage dstImage, VkImageLayout dstImageLayout, uint32_t regionCount,
const VkImageBlit *pRegions, VkFilter filter) {
- StateTracker::PreCallRecordCmdBlitImage(commandBuffer, srcImage, srcImageLayout, dstImage, dstImageLayout, regionCount,
- pRegions, filter);
- auto cb_node = GetCBState(commandBuffer);
+ layer_data *device_data = GetLayerDataPtr(get_dispatch_key(commandBuffer), layer_data_map);
+ auto cb_node = GetCBNode(commandBuffer);
auto src_image_state = GetImageState(srcImage);
auto dst_image_state = GetImageState(dstImage);
// Make sure that all image slices are updated to correct layout
for (uint32_t i = 0; i < regionCount; ++i) {
- SetImageInitialLayout(cb_node, *src_image_state, pRegions[i].srcSubresource, srcImageLayout);
- SetImageInitialLayout(cb_node, *dst_image_state, pRegions[i].dstSubresource, dstImageLayout);
+ SetImageLayout(device_data, cb_node, src_image_state, pRegions[i].srcSubresource, srcImageLayout);
+ SetImageLayout(device_data, cb_node, dst_image_state, pRegions[i].dstSubresource, dstImageLayout);
}
+ // Update bindings between images and cmd buffer
+ AddCommandBufferBindingImage(device_data, cb_node, src_image_state);
+ AddCommandBufferBindingImage(device_data, cb_node, dst_image_state);
}
// This validates that the initial layout specified in the command buffer for the IMAGE is the same as the global IMAGE layout
-bool CoreChecks::ValidateCmdBufImageLayouts(const CMD_BUFFER_STATE *pCB, const ImageSubresPairLayoutMap &globalImageLayoutMap,
- ImageSubresPairLayoutMap *overlayLayoutMap_arg) const {
- if (disabled.image_layout_validation) return false;
+bool CoreChecks::ValidateCmdBufImageLayouts(layer_data *device_data, GLOBAL_CB_NODE *pCB,
+ std::unordered_map<ImageSubresourcePair, IMAGE_LAYOUT_NODE> const &globalImageLayoutMap,
+ std::unordered_map<ImageSubresourcePair, IMAGE_LAYOUT_NODE> &overlayLayoutMap) {
bool skip = false;
- ImageSubresPairLayoutMap &overlayLayoutMap = *overlayLayoutMap_arg;
- // Iterate over the layout maps for each referenced image
- for (const auto &layout_map_entry : pCB->image_layout_map) {
- const auto image = layout_map_entry.first;
- const auto *image_state = GetImageState(image);
- if (!image_state) continue; // Can't check layouts of a dead image
- auto subres_map = layout_map_entry.second.get();
- ImageSubresourcePair isr_pair;
- isr_pair.image = image;
- isr_pair.hasSubresource = true;
-
- std::string bind_swapchain_msg = "";
- if (image_state->bind_swapchain) {
- auto swapchain_node = GetSwapchainState(image_state->bind_swapchain);
- const auto swapchain_image = swapchain_node->images[image_state->bind_swapchain_imageIndex];
- isr_pair.image = swapchain_image;
-
- string_sprintf(&bind_swapchain_msg, "bind %s imageIndex %d (%s)",
- report_data->FormatHandle(image_state->bind_swapchain).c_str(), image_state->bind_swapchain_imageIndex,
- report_data->FormatHandle(swapchain_image).c_str());
- }
-
- // Validate the initial_uses for each subresource referenced
- for (auto it_init = subres_map->BeginInitialUse(); !it_init.AtEnd(); ++it_init) {
- isr_pair.subresource = (*it_init).subresource;
- VkImageLayout initial_layout = (*it_init).layout;
- VkImageLayout image_layout;
- if (FindLayout(overlayLayoutMap, isr_pair, image_layout) || FindLayout(globalImageLayoutMap, isr_pair, image_layout)) {
- if (initial_layout == VK_IMAGE_LAYOUT_UNDEFINED) {
- // TODO: Set memory invalid which is in mem_tracker currently
- } else if (image_layout != initial_layout) {
- // Need to look up the inital layout *state* to get a bit more information
- const auto *initial_layout_state = subres_map->GetSubresourceInitialLayoutState(isr_pair.subresource);
- assert(initial_layout_state); // There's no way we should have an initial layout without matching state...
- bool matches = ImageLayoutMatches(initial_layout_state->aspect_mask, image_layout, initial_layout);
- if (!matches) {
- std::string formatted_label = FormatDebugLabel(" ", pCB->debug_label);
- skip |= log_msg(
- report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
- HandleToUint64(pCB->commandBuffer), kVUID_Core_DrawState_InvalidImageLayout,
- "Submitted command buffer expects %s %s (subresource: aspectMask 0x%X array layer %u, mip level %u) "
- "to be in layout %s--instead, current layout is %s.%s",
- report_data->FormatHandle(image).c_str(), bind_swapchain_msg.c_str(), isr_pair.subresource.aspectMask,
- isr_pair.subresource.arrayLayer, isr_pair.subresource.mipLevel, string_VkImageLayout(initial_layout),
- string_VkImageLayout(image_layout), formatted_label.c_str());
- }
+ for (auto cb_image_data : pCB->imageLayoutMap) {
+ VkImageLayout imageLayout;
+
+ if (FindLayout(device_data, overlayLayoutMap, cb_image_data.first, imageLayout) ||
+ FindLayout(device_data, globalImageLayoutMap, cb_image_data.first, imageLayout)) {
+ if (cb_image_data.second.initialLayout == VK_IMAGE_LAYOUT_UNDEFINED) {
+ // TODO: Set memory invalid which is in mem_tracker currently
+ } else if (imageLayout != cb_image_data.second.initialLayout) {
+ if (cb_image_data.first.hasSubresource) {
+ skip |=
+ log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
+ HandleToUint64(pCB->commandBuffer), kVUID_Core_DrawState_InvalidImageLayout,
+ "Submitted command buffer expects image %s (subresource: aspectMask 0x%X array layer %u, mip level "
+ "%u) to be in layout %s--instead, image %s's current layout is %s.",
+ report_data->FormatHandle(cb_image_data.first.image).c_str(),
+ cb_image_data.first.subresource.aspectMask, cb_image_data.first.subresource.arrayLayer,
+ cb_image_data.first.subresource.mipLevel, string_VkImageLayout(cb_image_data.second.initialLayout),
+ report_data->FormatHandle(cb_image_data.first.image).c_str(), string_VkImageLayout(imageLayout));
+ } else {
+ skip |= log_msg(
+ report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
+ HandleToUint64(pCB->commandBuffer), kVUID_Core_DrawState_InvalidImageLayout,
+ "Submitted command buffer expects image %s to be in layout %s--instead, image %s's current layout is %s.",
+ report_data->FormatHandle(cb_image_data.first.image).c_str(),
+ string_VkImageLayout(cb_image_data.second.initialLayout),
+ report_data->FormatHandle(cb_image_data.first.image).c_str(), string_VkImageLayout(imageLayout));
}
}
- }
-
- // Update all layout set operations (which will be a subset of the initial_layouts
- for (auto it_set = subres_map->BeginSetLayout(); !it_set.AtEnd(); ++it_set) {
- VkImageLayout layout = (*it_set).layout;
- isr_pair.subresource = (*it_set).subresource;
- SetLayout(overlayLayoutMap, isr_pair, layout);
+ SetLayout(overlayLayoutMap, cb_image_data.first, cb_image_data.second.layout);
}
}
-
return skip;
}
-void CoreChecks::UpdateCmdBufImageLayouts(CMD_BUFFER_STATE *pCB) {
- for (const auto &layout_map_entry : pCB->image_layout_map) {
- const auto image = layout_map_entry.first;
- const auto *image_state = GetImageState(image);
- if (!image_state) continue; // Can't set layouts of a dead image
- const auto &subres_map = layout_map_entry.second;
- ImageSubresourcePair isr_pair;
- isr_pair.image = image;
- isr_pair.hasSubresource = true;
-
- // Update all layout set operations (which will be a subset of the initial_layouts
- for (auto it_set = subres_map->BeginSetLayout(); !it_set.AtEnd(); ++it_set) {
- VkImageLayout layout = (*it_set).layout;
- isr_pair.subresource = (*it_set).subresource;
- SetGlobalLayout(isr_pair, layout);
- }
+void CoreChecks::UpdateCmdBufImageLayouts(layer_data *device_data, GLOBAL_CB_NODE *pCB) {
+ for (auto cb_image_data : pCB->imageLayoutMap) {
+ VkImageLayout imageLayout;
+ FindGlobalLayout(device_data, cb_image_data.first, imageLayout);
+ SetGlobalLayout(device_data, cb_image_data.first, cb_image_data.second.layout);
}
}
@@ -3508,7 +3417,7 @@ void CoreChecks::UpdateCmdBufImageLayouts(CMD_BUFFER_STATE *pCB) {
// layout attachments don't have CLEAR as their loadOp.
bool CoreChecks::ValidateLayoutVsAttachmentDescription(const debug_report_data *report_data, RenderPassCreateVersion rp_version,
const VkImageLayout first_layout, const uint32_t attachment,
- const VkAttachmentDescription2KHR &attachment_description) const {
+ const VkAttachmentDescription2KHR &attachment_description) {
bool skip = false;
const char *vuid;
const bool use_rp2 = (rp_version == RENDER_PASS_VERSION_2);
@@ -3550,8 +3459,8 @@ bool CoreChecks::ValidateLayoutVsAttachmentDescription(const debug_report_data *
return skip;
}
-bool CoreChecks::ValidateLayouts(RenderPassCreateVersion rp_version, VkDevice device,
- const VkRenderPassCreateInfo2KHR *pCreateInfo) const {
+bool CoreChecks::ValidateLayouts(layer_data *device_data, RenderPassCreateVersion rp_version, VkDevice device,
+ const VkRenderPassCreateInfo2KHR *pCreateInfo) {
bool skip = false;
const char *vuid;
const bool use_rp2 = (rp_version == RENDER_PASS_VERSION_2);
@@ -3608,12 +3517,12 @@ bool CoreChecks::ValidateLayouts(RenderPassCreateVersion rp_version, VkDevice de
skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0, vuid,
"Layout for input attachment reference %u in subpass %u is %s but must be "
"DEPTH_STENCIL_READ_ONLY, SHADER_READ_ONLY_OPTIMAL, or GENERAL.",
- j, i, string_VkImageLayout(subpass.pInputAttachments[j].layout));
+ j, i, string_VkImageLayout(subpass.pDepthStencilAttachment->layout));
break;
case VK_IMAGE_LAYOUT_DEPTH_READ_ONLY_STENCIL_ATTACHMENT_OPTIMAL_KHR:
case VK_IMAGE_LAYOUT_DEPTH_ATTACHMENT_STENCIL_READ_ONLY_OPTIMAL_KHR:
- if (device_extensions.vk_khr_maintenance2) {
+ if (GetDeviceExtensions()->vk_khr_maintenance2) {
break;
} else {
// Intentionally fall through to generic error message
@@ -3729,7 +3638,7 @@ bool CoreChecks::ValidateLayouts(RenderPassCreateVersion rp_version, VkDevice de
case VK_IMAGE_LAYOUT_DEPTH_READ_ONLY_STENCIL_ATTACHMENT_OPTIMAL_KHR:
case VK_IMAGE_LAYOUT_DEPTH_ATTACHMENT_STENCIL_READ_ONLY_OPTIMAL_KHR:
- if (device_extensions.vk_khr_maintenance2) {
+ if (GetDeviceExtensions()->vk_khr_maintenance2) {
break;
} else {
// Intentionally fall through to generic error message
@@ -3756,13 +3665,44 @@ bool CoreChecks::ValidateLayouts(RenderPassCreateVersion rp_version, VkDevice de
return skip;
}
+// For any image objects that overlap mapped memory, verify that their layouts are PREINIT or GENERAL
+bool CoreChecks::ValidateMapImageLayouts(layer_data *device_data, VkDevice device, DEVICE_MEM_INFO const *mem_info,
+ VkDeviceSize offset, VkDeviceSize end_offset) {
+ bool skip = false;
+ // Iterate over all bound image ranges and verify that for any that overlap the map ranges, the layouts are
+ // VK_IMAGE_LAYOUT_PREINITIALIZED or VK_IMAGE_LAYOUT_GENERAL
+ // TODO : This can be optimized if we store ranges based on starting address and early exit when we pass our range
+ for (auto image_handle : mem_info->bound_images) {
+ auto img_it = mem_info->bound_ranges.find(image_handle);
+ if (img_it != mem_info->bound_ranges.end()) {
+ if (RangesIntersect(device_data, &img_it->second, offset, end_offset)) {
+ std::vector<VkImageLayout> layouts;
+ if (FindLayouts(device_data, VkImage(image_handle), layouts)) {
+ for (auto layout : layouts) {
+ if (layout != VK_IMAGE_LAYOUT_PREINITIALIZED && layout != VK_IMAGE_LAYOUT_GENERAL) {
+ skip |=
+ log_msg(report_data, VK_DEBUG_REPORT_WARNING_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_MEMORY_EXT,
+ HandleToUint64(mem_info->mem), kVUID_Core_DrawState_InvalidImageLayout,
+ "Mapping an image with layout %s can result in undefined behavior if this memory is used "
+ "by the device. Only GENERAL or PREINITIALIZED should be used.",
+ string_VkImageLayout(layout));
+ }
+ }
+ }
+ }
+ }
+ }
+ return skip;
+}
+
// Helper function to validate correct usage bits set for buffers or images. Verify that (actual & desired) flags != 0 or, if strict
// is true, verify that (actual & desired) flags == desired
-bool CoreChecks::ValidateUsageFlags(VkFlags actual, VkFlags desired, VkBool32 strict, const VulkanTypedHandle &typed_handle,
- const char *msgCode, char const *func_name, char const *usage_str) const {
+bool CoreChecks::ValidateUsageFlags(const layer_data *device_data, VkFlags actual, VkFlags desired, VkBool32 strict,
+ uint64_t obj_handle, VulkanObjectType obj_type, const char *msgCode, char const *func_name,
+ char const *usage_str) {
bool correct_usage = false;
bool skip = false;
- const char *type_str = object_string[typed_handle.type];
+ const char *type_str = object_string[obj_type];
if (strict) {
correct_usage = ((actual & desired) == desired);
} else {
@@ -3771,15 +3711,14 @@ bool CoreChecks::ValidateUsageFlags(VkFlags actual, VkFlags desired, VkBool32 st
if (!correct_usage) {
if (msgCode == kVUIDUndefined) {
// TODO: Fix callers with kVUIDUndefined to use correct validation checks.
- skip = log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, get_debug_report_enum[typed_handle.type],
- typed_handle.handle, kVUID_Core_MemTrack_InvalidUsageFlag,
- "Invalid usage flag for %s used by %s. In this case, %s should have %s set during creation.",
- report_data->FormatHandle(typed_handle).c_str(), func_name, type_str, usage_str);
+ skip = log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, get_debug_report_enum[obj_type], obj_handle,
+ kVUID_Core_MemTrack_InvalidUsageFlag,
+ "Invalid usage flag for %s %s used by %s. In this case, %s should have %s set during creation.",
+ type_str, report_data->FormatHandle(obj_handle).c_str(), func_name, type_str, usage_str);
} else {
- skip =
- log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, get_debug_report_enum[typed_handle.type], typed_handle.handle,
- msgCode, "Invalid usage flag for %s used by %s. In this case, %s should have %s set during creation.",
- report_data->FormatHandle(typed_handle).c_str(), func_name, type_str, usage_str);
+ skip = log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, get_debug_report_enum[obj_type], obj_handle, msgCode,
+ "Invalid usage flag for %s %s used by %s. In this case, %s should have %s set during creation.",
+ type_str, report_data->FormatHandle(obj_handle).c_str(), func_name, type_str, usage_str);
}
}
return skip;
@@ -3787,21 +3726,21 @@ bool CoreChecks::ValidateUsageFlags(VkFlags actual, VkFlags desired, VkBool32 st
// Helper function to validate usage flags for buffers. For given buffer_state send actual vs. desired usage off to helper above
// where an error will be flagged if usage is not correct
-bool CoreChecks::ValidateImageUsageFlags(IMAGE_STATE const *image_state, VkFlags desired, bool strict, const char *msgCode,
- char const *func_name, char const *usage_string) const {
- return ValidateUsageFlags(image_state->createInfo.usage, desired, strict,
- VulkanTypedHandle(image_state->image, kVulkanObjectTypeImage), msgCode, func_name, usage_string);
+bool CoreChecks::ValidateImageUsageFlags(layer_data *device_data, IMAGE_STATE const *image_state, VkFlags desired, bool strict,
+ const char *msgCode, char const *func_name, char const *usage_string) {
+ return ValidateUsageFlags(device_data, image_state->createInfo.usage, desired, strict, HandleToUint64(image_state->image),
+ kVulkanObjectTypeImage, msgCode, func_name, usage_string);
}
-bool CoreChecks::ValidateImageFormatFeatureFlags(IMAGE_STATE const *image_state, VkFormatFeatureFlags desired,
- char const *func_name, const char *linear_vuid, const char *optimal_vuid) const {
+bool CoreChecks::ValidateImageFormatFeatureFlags(layer_data *dev_data, IMAGE_STATE const *image_state, VkFormatFeatureFlags desired,
+ char const *func_name, const char *linear_vuid, const char *optimal_vuid) {
VkFormatProperties format_properties = GetPDFormatProperties(image_state->createInfo.format);
bool skip = false;
if (image_state->createInfo.tiling == VK_IMAGE_TILING_LINEAR) {
if ((format_properties.linearTilingFeatures & desired) != desired) {
skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT,
HandleToUint64(image_state->image), linear_vuid,
- "In %s, invalid linearTilingFeatures (0x%08X) for format %u used by %s.", func_name,
+ "In %s, invalid linearTilingFeatures (0x%08X) for format %u used by image %s.", func_name,
format_properties.linearTilingFeatures, image_state->createInfo.format,
report_data->FormatHandle(image_state->image).c_str());
}
@@ -3809,7 +3748,7 @@ bool CoreChecks::ValidateImageFormatFeatureFlags(IMAGE_STATE const *image_state,
if ((format_properties.optimalTilingFeatures & desired) != desired) {
skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT,
HandleToUint64(image_state->image), optimal_vuid,
- "In %s, invalid optimalTilingFeatures (0x%08X) for format %u used by %s.", func_name,
+ "In %s, invalid optimalTilingFeatures (0x%08X) for format %u used by image %s.", func_name,
format_properties.optimalTilingFeatures, image_state->createInfo.format,
report_data->FormatHandle(image_state->image).c_str());
}
@@ -3817,8 +3756,9 @@ bool CoreChecks::ValidateImageFormatFeatureFlags(IMAGE_STATE const *image_state,
return skip;
}
-bool CoreChecks::ValidateImageSubresourceLayers(const CMD_BUFFER_STATE *cb_node, const VkImageSubresourceLayers *subresource_layers,
- char const *func_name, char const *member, uint32_t i) const {
+bool CoreChecks::ValidateImageSubresourceLayers(layer_data *dev_data, const GLOBAL_CB_NODE *cb_node,
+ const VkImageSubresourceLayers *subresource_layers, char const *func_name,
+ char const *member, uint32_t i) {
bool skip = false;
// layerCount must not be zero
if (subresource_layers->layerCount == 0) {
@@ -3846,14 +3786,14 @@ bool CoreChecks::ValidateImageSubresourceLayers(const CMD_BUFFER_STATE *cb_node,
// Helper function to validate usage flags for buffers. For given buffer_state send actual vs. desired usage off to helper above
// where an error will be flagged if usage is not correct
-bool CoreChecks::ValidateBufferUsageFlags(BUFFER_STATE const *buffer_state, VkFlags desired, bool strict, const char *msgCode,
- char const *func_name, char const *usage_string) const {
- return ValidateUsageFlags(buffer_state->createInfo.usage, desired, strict,
- VulkanTypedHandle(buffer_state->buffer, kVulkanObjectTypeBuffer), msgCode, func_name, usage_string);
+bool CoreChecks::ValidateBufferUsageFlags(const layer_data *device_data, BUFFER_STATE const *buffer_state, VkFlags desired,
+ bool strict, const char *msgCode, char const *func_name, char const *usage_string) {
+ return ValidateUsageFlags(device_data, buffer_state->createInfo.usage, desired, strict, HandleToUint64(buffer_state->buffer),
+ kVulkanObjectTypeBuffer, msgCode, func_name, usage_string);
}
-bool CoreChecks::ValidateBufferViewRange(const BUFFER_STATE *buffer_state, const VkBufferViewCreateInfo *pCreateInfo,
- const VkPhysicalDeviceLimits *device_limits) {
+bool CoreChecks::ValidateBufferViewRange(const layer_data *device_data, const BUFFER_STATE *buffer_state,
+ const VkBufferViewCreateInfo *pCreateInfo, const VkPhysicalDeviceLimits *device_limits) {
bool skip = false;
const VkDeviceSize &range = pCreateInfo->range;
@@ -3867,23 +3807,24 @@ bool CoreChecks::ValidateBufferViewRange(const BUFFER_STATE *buffer_state, const
range);
}
// Range must be a multiple of the element size of format
- const uint32_t format_size = FormatElementSize(pCreateInfo->format);
- if (SafeModulo(range, format_size) != 0) {
+ const size_t format_size = FormatElementSize(pCreateInfo->format);
+ if (range % format_size != 0) {
skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_BUFFER_EXT,
HandleToUint64(buffer_state->buffer), "VUID-VkBufferViewCreateInfo-range-00929",
"If VkBufferViewCreateInfo range (%" PRIuLEAST64
") does not equal VK_WHOLE_SIZE, range must be a multiple of the element size of the format "
- "(%" PRIu32 ").",
+ "(" PRINTF_SIZE_T_SPECIFIER ").",
range, format_size);
}
// Range divided by the element size of format must be less than or equal to VkPhysicalDeviceLimits::maxTexelBufferElements
- if (SafeDivision(range, format_size) > device_limits->maxTexelBufferElements) {
- skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_BUFFER_EXT,
- HandleToUint64(buffer_state->buffer), "VUID-VkBufferViewCreateInfo-range-00930",
- "If VkBufferViewCreateInfo range (%" PRIuLEAST64
- ") does not equal VK_WHOLE_SIZE, range divided by the element size of the format (%" PRIu32
- ") must be less than or equal to VkPhysicalDeviceLimits::maxTexelBufferElements (%" PRIuLEAST32 ").",
- range, format_size, device_limits->maxTexelBufferElements);
+ if (range / format_size > device_limits->maxTexelBufferElements) {
+ skip |=
+ log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_BUFFER_EXT,
+ HandleToUint64(buffer_state->buffer), "VUID-VkBufferViewCreateInfo-range-00930",
+ "If VkBufferViewCreateInfo range (%" PRIuLEAST64
+ ") does not equal VK_WHOLE_SIZE, range divided by the element size of the format (" PRINTF_SIZE_T_SPECIFIER
+ ") must be less than or equal to VkPhysicalDeviceLimits::maxTexelBufferElements (%" PRIuLEAST32 ").",
+ range, format_size, device_limits->maxTexelBufferElements);
}
// The sum of range and offset must be less than or equal to the size of buffer
if (range + pCreateInfo->offset > buffer_state->createInfo.size) {
@@ -3898,7 +3839,8 @@ bool CoreChecks::ValidateBufferViewRange(const BUFFER_STATE *buffer_state, const
return skip;
}
-bool CoreChecks::ValidateBufferViewBuffer(const BUFFER_STATE *buffer_state, const VkBufferViewCreateInfo *pCreateInfo) {
+bool CoreChecks::ValidateBufferViewBuffer(const layer_data *device_data, const BUFFER_STATE *buffer_state,
+ const VkBufferViewCreateInfo *pCreateInfo) {
bool skip = false;
const VkFormatProperties format_properties = GetPDFormatProperties(pCreateInfo->format);
if ((buffer_state->createInfo.usage & VK_BUFFER_USAGE_UNIFORM_TEXEL_BUFFER_BIT) &&
@@ -3920,25 +3862,27 @@ bool CoreChecks::ValidateBufferViewBuffer(const BUFFER_STATE *buffer_state, cons
bool CoreChecks::PreCallValidateCreateBuffer(VkDevice device, const VkBufferCreateInfo *pCreateInfo,
const VkAllocationCallbacks *pAllocator, VkBuffer *pBuffer) {
+ layer_data *device_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
+
bool skip = false;
// TODO: Add check for "VUID-vkCreateBuffer-flags-00911" (sparse address space accounting)
- if ((pCreateInfo->flags & VK_BUFFER_CREATE_SPARSE_BINDING_BIT) && (!enabled_features.core.sparseBinding)) {
+ if ((pCreateInfo->flags & VK_BUFFER_CREATE_SPARSE_BINDING_BIT) && (!GetEnabledFeatures()->core.sparseBinding)) {
skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
"VUID-VkBufferCreateInfo-flags-00915",
"vkCreateBuffer(): the sparseBinding device feature is disabled: Buffers cannot be created with the "
"VK_BUFFER_CREATE_SPARSE_BINDING_BIT set.");
}
- if ((pCreateInfo->flags & VK_BUFFER_CREATE_SPARSE_RESIDENCY_BIT) && (!enabled_features.core.sparseResidencyBuffer)) {
+ if ((pCreateInfo->flags & VK_BUFFER_CREATE_SPARSE_RESIDENCY_BIT) && (!GetEnabledFeatures()->core.sparseResidencyBuffer)) {
skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
"VUID-VkBufferCreateInfo-flags-00916",
"vkCreateBuffer(): the sparseResidencyBuffer device feature is disabled: Buffers cannot be created with "
"the VK_BUFFER_CREATE_SPARSE_RESIDENCY_BIT set.");
}
- if ((pCreateInfo->flags & VK_BUFFER_CREATE_SPARSE_ALIASED_BIT) && (!enabled_features.core.sparseResidencyAliased)) {
+ if ((pCreateInfo->flags & VK_BUFFER_CREATE_SPARSE_ALIASED_BIT) && (!GetEnabledFeatures()->core.sparseResidencyAliased)) {
skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
"VUID-VkBufferCreateInfo-flags-00917",
"vkCreateBuffer(): the sparseResidencyAliased device feature is disabled: Buffers cannot be created with "
@@ -3957,7 +3901,7 @@ bool CoreChecks::PreCallValidateCreateBuffer(VkDevice device, const VkBufferCrea
}
if ((pCreateInfo->flags & VK_BUFFER_CREATE_DEVICE_ADDRESS_CAPTURE_REPLAY_BIT_EXT) &&
- !enabled_features.buffer_address.bufferDeviceAddressCaptureReplay) {
+ !GetEnabledFeatures()->buffer_address.bufferDeviceAddressCaptureReplay) {
skip |= log_msg(
report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
"VUID-VkBufferCreateInfo-flags-02605",
@@ -3966,7 +3910,7 @@ bool CoreChecks::PreCallValidateCreateBuffer(VkDevice device, const VkBufferCrea
}
if ((pCreateInfo->usage & VK_BUFFER_USAGE_SHADER_DEVICE_ADDRESS_BIT_EXT) &&
- !enabled_features.buffer_address.bufferDeviceAddress) {
+ !GetEnabledFeatures()->buffer_address.bufferDeviceAddress) {
skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
"VUID-VkBufferCreateInfo-usage-02606",
"vkCreateBuffer(): the bufferDeviceAddress device feature is disabled: Buffers cannot be created with "
@@ -3974,37 +3918,35 @@ bool CoreChecks::PreCallValidateCreateBuffer(VkDevice device, const VkBufferCrea
}
if (pCreateInfo->sharingMode == VK_SHARING_MODE_CONCURRENT && pCreateInfo->pQueueFamilyIndices) {
- skip |= ValidateQueueFamilies(pCreateInfo->queueFamilyIndexCount, pCreateInfo->pQueueFamilyIndices, "vkCreateBuffer",
- "pCreateInfo->pQueueFamilyIndices", "VUID-VkBufferCreateInfo-sharingMode-01419",
- "VUID-VkBufferCreateInfo-sharingMode-01419", false);
+ skip |=
+ ValidateQueueFamilies(device_data, pCreateInfo->queueFamilyIndexCount, pCreateInfo->pQueueFamilyIndices,
+ "vkCreateBuffer", "pCreateInfo->pQueueFamilyIndices", "VUID-VkBufferCreateInfo-sharingMode-01419",
+ "VUID-VkBufferCreateInfo-sharingMode-01419", false);
}
return skip;
}
-void ValidationStateTracker::PostCallRecordCreateBuffer(VkDevice device, const VkBufferCreateInfo *pCreateInfo,
- const VkAllocationCallbacks *pAllocator, VkBuffer *pBuffer,
- VkResult result) {
+void CoreChecks::PostCallRecordCreateBuffer(VkDevice device, const VkBufferCreateInfo *pCreateInfo,
+ const VkAllocationCallbacks *pAllocator, VkBuffer *pBuffer, VkResult result) {
if (result != VK_SUCCESS) return;
// TODO : This doesn't create deep copy of pQueueFamilyIndices so need to fix that if/when we want that data to be valid
- std::unique_ptr<BUFFER_STATE> buffer_state(new BUFFER_STATE(*pBuffer, pCreateInfo));
-
- // Get a set of requirements in the case the app does not
- DispatchGetBufferMemoryRequirements(device, *pBuffer, &buffer_state->requirements);
-
- bufferMap.insert(std::make_pair(*pBuffer, std::move(buffer_state)));
+ GetBufferMap()->insert(std::make_pair(*pBuffer, std::unique_ptr<BUFFER_STATE>(new BUFFER_STATE(*pBuffer, pCreateInfo))));
}
bool CoreChecks::PreCallValidateCreateBufferView(VkDevice device, const VkBufferViewCreateInfo *pCreateInfo,
const VkAllocationCallbacks *pAllocator, VkBufferView *pView) {
+ layer_data *device_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
+
bool skip = false;
BUFFER_STATE *buffer_state = GetBufferState(pCreateInfo->buffer);
// If this isn't a sparse buffer, it needs to have memory backing it at CreateBufferView time
if (buffer_state) {
- skip |= ValidateMemoryIsBoundToBuffer(buffer_state, "vkCreateBufferView()", "VUID-VkBufferViewCreateInfo-buffer-00935");
+ skip |= ValidateMemoryIsBoundToBuffer(device_data, buffer_state, "vkCreateBufferView()",
+ "VUID-VkBufferViewCreateInfo-buffer-00935");
// In order to create a valid buffer view, the buffer must have been created with at least one of the following flags:
// UNIFORM_TEXEL_BUFFER_BIT or STORAGE_TEXEL_BUFFER_BIT
- skip |= ValidateBufferUsageFlags(buffer_state,
+ skip |= ValidateBufferUsageFlags(device_data, buffer_state,
VK_BUFFER_USAGE_UNIFORM_TEXEL_BUFFER_BIT | VK_BUFFER_USAGE_STORAGE_TEXEL_BUFFER_BIT, false,
"VUID-VkBufferViewCreateInfo-buffer-00932", "vkCreateBufferView()",
"VK_BUFFER_USAGE_[STORAGE|UNIFORM]_TEXEL_BUFFER_BIT");
@@ -4018,87 +3960,32 @@ bool CoreChecks::PreCallValidateCreateBufferView(VkDevice device, const VkBuffer
pCreateInfo->offset, buffer_state->createInfo.size);
}
- const VkPhysicalDeviceLimits *device_limits = &phys_dev_props.limits;
+ const VkPhysicalDeviceLimits *device_limits = &(GetPDProperties()->limits);
// Buffer view offset must be a multiple of VkPhysicalDeviceLimits::minTexelBufferOffsetAlignment
- if ((pCreateInfo->offset % device_limits->minTexelBufferOffsetAlignment) != 0 &&
- !enabled_features.texel_buffer_alignment_features.texelBufferAlignment) {
+ if ((pCreateInfo->offset % device_limits->minTexelBufferOffsetAlignment) != 0) {
skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_BUFFER_EXT,
- HandleToUint64(buffer_state->buffer), "VUID-VkBufferViewCreateInfo-offset-02749",
+ HandleToUint64(buffer_state->buffer), "VUID-VkBufferViewCreateInfo-offset-00926",
"VkBufferViewCreateInfo offset (%" PRIuLEAST64
") must be a multiple of VkPhysicalDeviceLimits::minTexelBufferOffsetAlignment (%" PRIuLEAST64 ").",
pCreateInfo->offset, device_limits->minTexelBufferOffsetAlignment);
}
- if (enabled_features.texel_buffer_alignment_features.texelBufferAlignment) {
- VkDeviceSize elementSize = FormatElementSize(pCreateInfo->format);
- if ((elementSize % 3) == 0) {
- elementSize /= 3;
- }
- if (buffer_state->createInfo.usage & VK_BUFFER_USAGE_STORAGE_TEXEL_BUFFER_BIT) {
- VkDeviceSize alignmentRequirement =
- phys_dev_ext_props.texel_buffer_alignment_props.storageTexelBufferOffsetAlignmentBytes;
- if (phys_dev_ext_props.texel_buffer_alignment_props.storageTexelBufferOffsetSingleTexelAlignment) {
- alignmentRequirement = std::min(alignmentRequirement, elementSize);
- }
- if (SafeModulo(pCreateInfo->offset, alignmentRequirement) != 0) {
- skip |= log_msg(
- report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_BUFFER_EXT,
- HandleToUint64(buffer_state->buffer), "VUID-VkBufferViewCreateInfo-buffer-02750",
- "If buffer was created with usage containing VK_BUFFER_USAGE_STORAGE_TEXEL_BUFFER_BIT, "
- "VkBufferViewCreateInfo offset (%" PRIuLEAST64
- ") must be a multiple of the lesser of "
- "VkPhysicalDeviceTexelBufferAlignmentPropertiesEXT::storageTexelBufferOffsetAlignmentBytes (%" PRIuLEAST64
- ") or, if VkPhysicalDeviceTexelBufferAlignmentPropertiesEXT::storageTexelBufferOffsetSingleTexelAlignment "
- "(%" PRId32
- ") is VK_TRUE, the size of a texel of the requested format. "
- "If the size of a texel is a multiple of three bytes, then the size of a "
- "single component of format is used instead",
- pCreateInfo->offset, phys_dev_ext_props.texel_buffer_alignment_props.storageTexelBufferOffsetAlignmentBytes,
- phys_dev_ext_props.texel_buffer_alignment_props.storageTexelBufferOffsetSingleTexelAlignment);
- }
- }
- if (buffer_state->createInfo.usage & VK_BUFFER_USAGE_UNIFORM_TEXEL_BUFFER_BIT) {
- VkDeviceSize alignmentRequirement =
- phys_dev_ext_props.texel_buffer_alignment_props.uniformTexelBufferOffsetAlignmentBytes;
- if (phys_dev_ext_props.texel_buffer_alignment_props.uniformTexelBufferOffsetSingleTexelAlignment) {
- alignmentRequirement = std::min(alignmentRequirement, elementSize);
- }
- if (SafeModulo(pCreateInfo->offset, alignmentRequirement) != 0) {
- skip |= log_msg(
- report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_BUFFER_EXT,
- HandleToUint64(buffer_state->buffer), "VUID-VkBufferViewCreateInfo-buffer-02751",
- "If buffer was created with usage containing VK_BUFFER_USAGE_UNIFORM_TEXEL_BUFFER_BIT, "
- "VkBufferViewCreateInfo offset (%" PRIuLEAST64
- ") must be a multiple of the lesser of "
- "VkPhysicalDeviceTexelBufferAlignmentPropertiesEXT::uniformTexelBufferOffsetAlignmentBytes (%" PRIuLEAST64
- ") or, if VkPhysicalDeviceTexelBufferAlignmentPropertiesEXT::uniformTexelBufferOffsetSingleTexelAlignment "
- "(%" PRId32
- ") is VK_TRUE, the size of a texel of the requested format. "
- "If the size of a texel is a multiple of three bytes, then the size of a "
- "single component of format is used instead",
- pCreateInfo->offset, phys_dev_ext_props.texel_buffer_alignment_props.uniformTexelBufferOffsetAlignmentBytes,
- phys_dev_ext_props.texel_buffer_alignment_props.uniformTexelBufferOffsetSingleTexelAlignment);
- }
- }
- }
-
- skip |= ValidateBufferViewRange(buffer_state, pCreateInfo, device_limits);
+ skip |= ValidateBufferViewRange(device_data, buffer_state, pCreateInfo, device_limits);
- skip |= ValidateBufferViewBuffer(buffer_state, pCreateInfo);
+ skip |= ValidateBufferViewBuffer(device_data, buffer_state, pCreateInfo);
}
return skip;
}
-void ValidationStateTracker::PostCallRecordCreateBufferView(VkDevice device, const VkBufferViewCreateInfo *pCreateInfo,
- const VkAllocationCallbacks *pAllocator, VkBufferView *pView,
- VkResult result) {
+void CoreChecks::PostCallRecordCreateBufferView(VkDevice device, const VkBufferViewCreateInfo *pCreateInfo,
+ const VkAllocationCallbacks *pAllocator, VkBufferView *pView, VkResult result) {
if (result != VK_SUCCESS) return;
- bufferViewMap[*pView] = std::unique_ptr<BUFFER_VIEW_STATE>(new BUFFER_VIEW_STATE(*pView, pCreateInfo));
+ (*GetBufferViewMap())[*pView] = std::unique_ptr<BUFFER_VIEW_STATE>(new BUFFER_VIEW_STATE(*pView, pCreateInfo));
}
// For the given format verify that the aspect masks make sense
-bool CoreChecks::ValidateImageAspectMask(VkImage image, VkFormat format, VkImageAspectFlags aspect_mask, const char *func_name,
- const char *vuid) const {
+bool CoreChecks::ValidateImageAspectMask(const layer_data *device_data, VkImage image, VkFormat format,
+ VkImageAspectFlags aspect_mask, const char *func_name, const char *vuid) {
bool skip = false;
VkDebugReportObjectTypeEXT objectType = VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT;
if (image != VK_NULL_HANDLE) {
@@ -4156,10 +4043,10 @@ bool CoreChecks::ValidateImageAspectMask(VkImage image, VkFormat format, VkImage
return skip;
}
-bool CoreChecks::ValidateImageSubresourceRange(const uint32_t image_mip_count, const uint32_t image_layer_count,
- const VkImageSubresourceRange &subresourceRange, const char *cmd_name,
- const char *param_name, const char *image_layer_count_var_name,
- const uint64_t image_handle, SubresourceRangeErrorCodes errorCodes) const {
+bool CoreChecks::ValidateImageSubresourceRange(const layer_data *device_data, const uint32_t image_mip_count,
+ const uint32_t image_layer_count, const VkImageSubresourceRange &subresourceRange,
+ const char *cmd_name, const char *param_name, const char *image_layer_count_var_name,
+ const uint64_t image_handle, SubresourceRangeErrorCodes errorCodes) {
bool skip = false;
// Validate mip levels
@@ -4220,9 +4107,10 @@ bool CoreChecks::ValidateImageSubresourceRange(const uint32_t image_mip_count, c
return skip;
}
-bool CoreChecks::ValidateCreateImageViewSubresourceRange(const IMAGE_STATE *image_state, bool is_imageview_2d_type,
+bool CoreChecks::ValidateCreateImageViewSubresourceRange(const layer_data *device_data, const IMAGE_STATE *image_state,
+ bool is_imageview_2d_type,
const VkImageSubresourceRange &subresourceRange) {
- bool is_khr_maintenance1 = device_extensions.vk_khr_maintenance1;
+ bool is_khr_maintenance1 = GetDeviceExtensions()->vk_khr_maintenance1;
bool is_image_slicable = image_state->createInfo.imageType == VK_IMAGE_TYPE_3D &&
(image_state->createInfo.flags & VK_IMAGE_CREATE_2D_ARRAY_COMPATIBLE_BIT_KHR);
bool is_3D_to_2D_map = is_khr_maintenance1 && is_image_slicable && is_imageview_2d_type;
@@ -4233,48 +4121,46 @@ bool CoreChecks::ValidateCreateImageViewSubresourceRange(const IMAGE_STATE *imag
SubresourceRangeErrorCodes subresourceRangeErrorCodes = {};
subresourceRangeErrorCodes.base_mip_err = "VUID-VkImageViewCreateInfo-subresourceRange-01478";
subresourceRangeErrorCodes.mip_count_err = "VUID-VkImageViewCreateInfo-subresourceRange-01718";
- subresourceRangeErrorCodes.base_layer_err = is_khr_maintenance1 ? (is_3D_to_2D_map ? "VUID-VkImageViewCreateInfo-image-02724"
+ subresourceRangeErrorCodes.base_layer_err = is_khr_maintenance1 ? (is_3D_to_2D_map ? "VUID-VkImageViewCreateInfo-image-01484"
: "VUID-VkImageViewCreateInfo-image-01482")
: "VUID-VkImageViewCreateInfo-subresourceRange-01480";
subresourceRangeErrorCodes.layer_count_err = is_khr_maintenance1
- ? (is_3D_to_2D_map ? "VUID-VkImageViewCreateInfo-subresourceRange-02725"
+ ? (is_3D_to_2D_map ? "VUID-VkImageViewCreateInfo-subresourceRange-01485"
: "VUID-VkImageViewCreateInfo-subresourceRange-01483")
: "VUID-VkImageViewCreateInfo-subresourceRange-01719";
- return ValidateImageSubresourceRange(image_state->createInfo.mipLevels, image_layer_count, subresourceRange,
+ return ValidateImageSubresourceRange(device_data, image_state->createInfo.mipLevels, image_layer_count, subresourceRange,
"vkCreateImageView", "pCreateInfo->subresourceRange", image_layer_count_var_name,
HandleToUint64(image_state->image), subresourceRangeErrorCodes);
}
-bool CoreChecks::ValidateCmdClearColorSubresourceRange(const IMAGE_STATE *image_state,
- const VkImageSubresourceRange &subresourceRange,
- const char *param_name) const {
+bool CoreChecks::ValidateCmdClearColorSubresourceRange(const layer_data *device_data, const IMAGE_STATE *image_state,
+ const VkImageSubresourceRange &subresourceRange, const char *param_name) {
SubresourceRangeErrorCodes subresourceRangeErrorCodes = {};
subresourceRangeErrorCodes.base_mip_err = "VUID-vkCmdClearColorImage-baseMipLevel-01470";
subresourceRangeErrorCodes.mip_count_err = "VUID-vkCmdClearColorImage-pRanges-01692";
subresourceRangeErrorCodes.base_layer_err = "VUID-vkCmdClearColorImage-baseArrayLayer-01472";
subresourceRangeErrorCodes.layer_count_err = "VUID-vkCmdClearColorImage-pRanges-01693";
- return ValidateImageSubresourceRange(image_state->createInfo.mipLevels, image_state->createInfo.arrayLayers, subresourceRange,
- "vkCmdClearColorImage", param_name, "arrayLayers", HandleToUint64(image_state->image),
- subresourceRangeErrorCodes);
+ return ValidateImageSubresourceRange(device_data, image_state->createInfo.mipLevels, image_state->createInfo.arrayLayers,
+ subresourceRange, "vkCmdClearColorImage", param_name, "arrayLayers",
+ HandleToUint64(image_state->image), subresourceRangeErrorCodes);
}
-bool CoreChecks::ValidateCmdClearDepthSubresourceRange(const IMAGE_STATE *image_state,
- const VkImageSubresourceRange &subresourceRange,
- const char *param_name) const {
+bool CoreChecks::ValidateCmdClearDepthSubresourceRange(const layer_data *device_data, const IMAGE_STATE *image_state,
+ const VkImageSubresourceRange &subresourceRange, const char *param_name) {
SubresourceRangeErrorCodes subresourceRangeErrorCodes = {};
subresourceRangeErrorCodes.base_mip_err = "VUID-vkCmdClearDepthStencilImage-baseMipLevel-01474";
subresourceRangeErrorCodes.mip_count_err = "VUID-vkCmdClearDepthStencilImage-pRanges-01694";
subresourceRangeErrorCodes.base_layer_err = "VUID-vkCmdClearDepthStencilImage-baseArrayLayer-01476";
subresourceRangeErrorCodes.layer_count_err = "VUID-vkCmdClearDepthStencilImage-pRanges-01695";
- return ValidateImageSubresourceRange(image_state->createInfo.mipLevels, image_state->createInfo.arrayLayers, subresourceRange,
- "vkCmdClearDepthStencilImage", param_name, "arrayLayers",
+ return ValidateImageSubresourceRange(device_data, image_state->createInfo.mipLevels, image_state->createInfo.arrayLayers,
+ subresourceRange, "vkCmdClearDepthStencilImage", param_name, "arrayLayers",
HandleToUint64(image_state->image), subresourceRangeErrorCodes);
}
-bool CoreChecks::ValidateImageBarrierSubresourceRange(const IMAGE_STATE *image_state,
+bool CoreChecks::ValidateImageBarrierSubresourceRange(const layer_data *device_data, const IMAGE_STATE *image_state,
const VkImageSubresourceRange &subresourceRange, const char *cmd_name,
const char *param_name) {
SubresourceRangeErrorCodes subresourceRangeErrorCodes = {};
@@ -4283,29 +4169,31 @@ bool CoreChecks::ValidateImageBarrierSubresourceRange(const IMAGE_STATE *image_s
subresourceRangeErrorCodes.base_layer_err = "VUID-VkImageMemoryBarrier-subresourceRange-01488";
subresourceRangeErrorCodes.layer_count_err = "VUID-VkImageMemoryBarrier-subresourceRange-01725";
- return ValidateImageSubresourceRange(image_state->createInfo.mipLevels, image_state->createInfo.arrayLayers, subresourceRange,
- cmd_name, param_name, "arrayLayers", HandleToUint64(image_state->image),
+ return ValidateImageSubresourceRange(device_data, image_state->createInfo.mipLevels, image_state->createInfo.arrayLayers,
+ subresourceRange, cmd_name, param_name, "arrayLayers", HandleToUint64(image_state->image),
subresourceRangeErrorCodes);
}
bool CoreChecks::PreCallValidateCreateImageView(VkDevice device, const VkImageViewCreateInfo *pCreateInfo,
const VkAllocationCallbacks *pAllocator, VkImageView *pView) {
+ layer_data *device_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
bool skip = false;
IMAGE_STATE *image_state = GetImageState(pCreateInfo->image);
if (image_state) {
- skip |=
- ValidateImageUsageFlags(image_state,
- VK_IMAGE_USAGE_SAMPLED_BIT | VK_IMAGE_USAGE_STORAGE_BIT | VK_IMAGE_USAGE_INPUT_ATTACHMENT_BIT |
- VK_IMAGE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT | VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT |
- VK_IMAGE_USAGE_SHADING_RATE_IMAGE_BIT_NV | VK_IMAGE_USAGE_FRAGMENT_DENSITY_MAP_BIT_EXT,
- false, kVUIDUndefined, "vkCreateImageView()",
- "VK_IMAGE_USAGE_[SAMPLED|STORAGE|COLOR_ATTACHMENT|DEPTH_STENCIL_ATTACHMENT|INPUT_ATTACHMENT|"
- "SHADING_RATE_IMAGE|FRAGMENT_DENSITY_MAP]_BIT");
+ skip |= ValidateImageUsageFlags(
+ device_data, image_state,
+ VK_IMAGE_USAGE_SAMPLED_BIT | VK_IMAGE_USAGE_STORAGE_BIT | VK_IMAGE_USAGE_INPUT_ATTACHMENT_BIT |
+ VK_IMAGE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT | VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT |
+ VK_IMAGE_USAGE_SHADING_RATE_IMAGE_BIT_NV,
+ false, kVUIDUndefined, "vkCreateImageView()",
+ "VK_IMAGE_USAGE_[SAMPLED|STORAGE|COLOR_ATTACHMENT|DEPTH_STENCIL_ATTACHMENT|INPUT_ATTACHMENT|SHADING_RATE_IMAGE]_BIT");
// If this isn't a sparse image, it needs to have memory backing it at CreateImageView time
- skip |= ValidateMemoryIsBoundToImage(image_state, "vkCreateImageView()", "VUID-VkImageViewCreateInfo-image-01020");
+ skip |=
+ ValidateMemoryIsBoundToImage(device_data, image_state, "vkCreateImageView()", "VUID-VkImageViewCreateInfo-image-01020");
// Checks imported from image layer
skip |= ValidateCreateImageViewSubresourceRange(
- image_state, pCreateInfo->viewType == VK_IMAGE_VIEW_TYPE_2D || pCreateInfo->viewType == VK_IMAGE_VIEW_TYPE_2D_ARRAY,
+ device_data, image_state,
+ pCreateInfo->viewType == VK_IMAGE_VIEW_TYPE_2D || pCreateInfo->viewType == VK_IMAGE_VIEW_TYPE_2D_ARRAY,
pCreateInfo->subresourceRange);
VkImageCreateFlags image_flags = image_state->createInfo.flags;
@@ -4320,32 +4208,57 @@ bool CoreChecks::PreCallValidateCreateImageView(VkDevice device, const VkImageVi
// If there's a chained VkImageViewUsageCreateInfo struct, modify image_usage to match
auto chained_ivuci_struct = lvl_find_in_chain<VkImageViewUsageCreateInfoKHR>(pCreateInfo->pNext);
if (chained_ivuci_struct) {
+ if (chained_ivuci_struct->usage & ~image_usage) {
+ std::stringstream ss;
+ ss << "vkCreateImageView(): Chained VkImageViewUsageCreateInfo usage field (0x" << std::hex
+ << chained_ivuci_struct->usage << ") must not include flags not present in underlying image's usage (0x"
+ << image_usage << ").";
+ skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT,
+ HandleToUint64(pCreateInfo->image), "VUID-VkImageViewUsageCreateInfo-usage-01587", "%s",
+ ss.str().c_str());
+ }
+
image_usage = chained_ivuci_struct->usage;
}
// Validate VK_IMAGE_CREATE_MUTABLE_FORMAT_BIT state, if view/image formats differ
if ((image_flags & VK_IMAGE_CREATE_MUTABLE_FORMAT_BIT) && (image_format != view_format)) {
if (FormatIsMultiplane(image_format)) {
- VkFormat compat_format = FindMultiplaneCompatibleFormat(image_format, aspect_mask);
+ // View format must match the multiplane compatible format
+ uint32_t plane = 3; // invalid
+ switch (aspect_mask) {
+ case VK_IMAGE_ASPECT_PLANE_0_BIT:
+ plane = 0;
+ break;
+ case VK_IMAGE_ASPECT_PLANE_1_BIT:
+ plane = 1;
+ break;
+ case VK_IMAGE_ASPECT_PLANE_2_BIT:
+ plane = 2;
+ break;
+ default:
+ break;
+ }
+
+ VkFormat compat_format = FindMultiplaneCompatibleFormat(image_format, plane);
if (view_format != compat_format) {
- // View format must match the multiplane compatible format
std::stringstream ss;
ss << "vkCreateImageView(): ImageView format " << string_VkFormat(view_format)
- << " is not compatible with plane " << GetPlaneIndex(aspect_mask) << " of underlying image format "
+ << " is not compatible with plane " << plane << " of underlying image format "
<< string_VkFormat(image_format) << ", must be " << string_VkFormat(compat_format) << ".";
skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT,
HandleToUint64(pCreateInfo->image), "VUID-VkImageViewCreateInfo-image-01586", "%s",
ss.str().c_str());
}
} else {
- if ((!device_extensions.vk_khr_maintenance2 ||
+ if ((!GetDeviceExtensions()->vk_khr_maintenance2 ||
!(image_flags & VK_IMAGE_CREATE_BLOCK_TEXEL_VIEW_COMPATIBLE_BIT_KHR))) {
// Format MUST be compatible (in the same format compatibility class) as the format the image was created with
if (FormatCompatibilityClass(image_format) != FormatCompatibilityClass(view_format)) {
std::stringstream ss;
ss << "vkCreateImageView(): ImageView format " << string_VkFormat(view_format)
- << " is not in the same format compatibility class as "
- << report_data->FormatHandle(pCreateInfo->image).c_str() << " format " << string_VkFormat(image_format)
+ << " is not in the same format compatibility class as image ("
+ << report_data->FormatHandle(pCreateInfo->image).c_str() << ") format " << string_VkFormat(image_format)
<< ". Images created with the VK_IMAGE_CREATE_MUTABLE_FORMAT BIT "
<< "can support ImageViews with differing formats but they must be in the same compatibility class.";
skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT,
@@ -4358,7 +4271,7 @@ bool CoreChecks::PreCallValidateCreateImageView(VkDevice device, const VkImageVi
// Format MUST be IDENTICAL to the format the image was created with
if (image_format != view_format) {
std::stringstream ss;
- ss << "vkCreateImageView() format " << string_VkFormat(view_format) << " differs from "
+ ss << "vkCreateImageView() format " << string_VkFormat(view_format) << " differs from image "
<< report_data->FormatHandle(pCreateInfo->image).c_str() << " format " << string_VkFormat(image_format)
<< ". Formats MUST be IDENTICAL unless VK_IMAGE_CREATE_MUTABLE_FORMAT BIT was set on image creation.";
skip |=
@@ -4368,7 +4281,7 @@ bool CoreChecks::PreCallValidateCreateImageView(VkDevice device, const VkImageVi
}
// Validate correct image aspect bits for desired formats and format consistency
- skip |= ValidateImageAspectMask(image_state->image, image_format, aspect_mask, "vkCreateImageView()");
+ skip |= ValidateImageAspectMask(device_data, image_state->image, image_format, aspect_mask, "vkCreateImageView()");
switch (image_type) {
case VK_IMAGE_TYPE_1D:
@@ -4396,7 +4309,7 @@ bool CoreChecks::PreCallValidateCreateImageView(VkDevice device, const VkImageVi
}
break;
case VK_IMAGE_TYPE_3D:
- if (device_extensions.vk_khr_maintenance1) {
+ if (GetDeviceExtensions()->vk_khr_maintenance1) {
if (view_type != VK_IMAGE_VIEW_TYPE_3D) {
if ((view_type == VK_IMAGE_VIEW_TYPE_2D || view_type == VK_IMAGE_VIEW_TYPE_2D_ARRAY)) {
if (!(image_flags & VK_IMAGE_CREATE_2D_ARRAY_COMPATIBLE_BIT_KHR)) {
@@ -4436,8 +4349,8 @@ bool CoreChecks::PreCallValidateCreateImageView(VkDevice device, const VkImageVi
}
// External format checks needed when VK_ANDROID_external_memory_android_hardware_buffer enabled
- if (device_extensions.vk_android_external_memory_android_hardware_buffer) {
- skip |= ValidateCreateImageViewANDROID(pCreateInfo);
+ if (GetDeviceExtensions()->vk_android_external_memory_android_hardware_buffer) {
+ skip |= ValidateCreateImageViewANDROID(device_data, pCreateInfo);
}
VkFormatProperties format_properties = GetPDFormatProperties(view_format);
@@ -4497,174 +4410,181 @@ bool CoreChecks::PreCallValidateCreateImageView(VkDevice device, const VkImageVi
return skip;
}
-void ValidationStateTracker::PostCallRecordCreateImageView(VkDevice device, const VkImageViewCreateInfo *pCreateInfo,
- const VkAllocationCallbacks *pAllocator, VkImageView *pView,
- VkResult result) {
+void CoreChecks::PostCallRecordCreateImageView(VkDevice device, const VkImageViewCreateInfo *pCreateInfo,
+ const VkAllocationCallbacks *pAllocator, VkImageView *pView, VkResult result) {
if (result != VK_SUCCESS) return;
+ auto image_view_map = GetImageViewMap();
+ (*image_view_map)[*pView] = std::unique_ptr<IMAGE_VIEW_STATE>(new IMAGE_VIEW_STATE(*pView, pCreateInfo));
+
auto image_state = GetImageState(pCreateInfo->image);
- imageViewMap[*pView] = std::unique_ptr<IMAGE_VIEW_STATE>(new IMAGE_VIEW_STATE(image_state, *pView, pCreateInfo));
+ auto &sub_res_range = (*image_view_map)[*pView].get()->create_info.subresourceRange;
+ sub_res_range.levelCount = ResolveRemainingLevels(&sub_res_range, image_state->createInfo.mipLevels);
+ sub_res_range.layerCount = ResolveRemainingLayers(&sub_res_range, image_state->createInfo.arrayLayers);
}
bool CoreChecks::PreCallValidateCmdCopyBuffer(VkCommandBuffer commandBuffer, VkBuffer srcBuffer, VkBuffer dstBuffer,
uint32_t regionCount, const VkBufferCopy *pRegions) {
- const auto cb_node = GetCBState(commandBuffer);
- const auto src_buffer_state = GetBufferState(srcBuffer);
- const auto dst_buffer_state = GetBufferState(dstBuffer);
+ layer_data *device_data = GetLayerDataPtr(get_dispatch_key(commandBuffer), layer_data_map);
+ auto cb_node = GetCBNode(commandBuffer);
+ auto src_buffer_state = GetBufferState(srcBuffer);
+ auto dst_buffer_state = GetBufferState(dstBuffer);
bool skip = false;
- skip |= ValidateMemoryIsBoundToBuffer(src_buffer_state, "vkCmdCopyBuffer()", "VUID-vkCmdCopyBuffer-srcBuffer-00119");
- skip |= ValidateMemoryIsBoundToBuffer(dst_buffer_state, "vkCmdCopyBuffer()", "VUID-vkCmdCopyBuffer-dstBuffer-00121");
- // Validate that SRC & DST buffers have correct usage flags set
skip |=
- ValidateBufferUsageFlags(src_buffer_state, VK_BUFFER_USAGE_TRANSFER_SRC_BIT, true, "VUID-vkCmdCopyBuffer-srcBuffer-00118",
- "vkCmdCopyBuffer()", "VK_BUFFER_USAGE_TRANSFER_SRC_BIT");
+ ValidateMemoryIsBoundToBuffer(device_data, src_buffer_state, "vkCmdCopyBuffer()", "VUID-vkCmdCopyBuffer-srcBuffer-00119");
skip |=
- ValidateBufferUsageFlags(dst_buffer_state, VK_BUFFER_USAGE_TRANSFER_DST_BIT, true, "VUID-vkCmdCopyBuffer-dstBuffer-00120",
- "vkCmdCopyBuffer()", "VK_BUFFER_USAGE_TRANSFER_DST_BIT");
+ ValidateMemoryIsBoundToBuffer(device_data, dst_buffer_state, "vkCmdCopyBuffer()", "VUID-vkCmdCopyBuffer-dstBuffer-00121");
+ // Validate that SRC & DST buffers have correct usage flags set
+ skip |=
+ ValidateBufferUsageFlags(device_data, src_buffer_state, VK_BUFFER_USAGE_TRANSFER_SRC_BIT, true,
+ "VUID-vkCmdCopyBuffer-srcBuffer-00118", "vkCmdCopyBuffer()", "VK_BUFFER_USAGE_TRANSFER_SRC_BIT");
skip |=
- ValidateCmdQueueFlags(cb_node, "vkCmdCopyBuffer()", VK_QUEUE_TRANSFER_BIT | VK_QUEUE_GRAPHICS_BIT | VK_QUEUE_COMPUTE_BIT,
- "VUID-vkCmdCopyBuffer-commandBuffer-cmdpool");
- skip |= ValidateCmd(cb_node, CMD_COPYBUFFER, "vkCmdCopyBuffer()");
- skip |= InsideRenderPass(cb_node, "vkCmdCopyBuffer()", "VUID-vkCmdCopyBuffer-renderpass");
+ ValidateBufferUsageFlags(device_data, dst_buffer_state, VK_BUFFER_USAGE_TRANSFER_DST_BIT, true,
+ "VUID-vkCmdCopyBuffer-dstBuffer-00120", "vkCmdCopyBuffer()", "VK_BUFFER_USAGE_TRANSFER_DST_BIT");
+ skip |= ValidateCmdQueueFlags(device_data, cb_node, "vkCmdCopyBuffer()",
+ VK_QUEUE_TRANSFER_BIT | VK_QUEUE_GRAPHICS_BIT | VK_QUEUE_COMPUTE_BIT,
+ "VUID-vkCmdCopyBuffer-commandBuffer-cmdpool");
+ skip |= ValidateCmd(device_data, cb_node, CMD_COPYBUFFER, "vkCmdCopyBuffer()");
+ skip |= InsideRenderPass(device_data, cb_node, "vkCmdCopyBuffer()", "VUID-vkCmdCopyBuffer-renderpass");
return skip;
}
-void ValidationStateTracker::PreCallRecordCmdCopyBuffer(VkCommandBuffer commandBuffer, VkBuffer srcBuffer, VkBuffer dstBuffer,
- uint32_t regionCount, const VkBufferCopy *pRegions) {
- auto cb_node = GetCBState(commandBuffer);
+void CoreChecks::PreCallRecordCmdCopyBuffer(VkCommandBuffer commandBuffer, VkBuffer srcBuffer, VkBuffer dstBuffer,
+ uint32_t regionCount, const VkBufferCopy *pRegions) {
+ layer_data *device_data = GetLayerDataPtr(get_dispatch_key(commandBuffer), layer_data_map);
+ auto cb_node = GetCBNode(commandBuffer);
auto src_buffer_state = GetBufferState(srcBuffer);
auto dst_buffer_state = GetBufferState(dstBuffer);
// Update bindings between buffers and cmd buffer
- AddCommandBufferBindingBuffer(cb_node, src_buffer_state);
- AddCommandBufferBindingBuffer(cb_node, dst_buffer_state);
+ AddCommandBufferBindingBuffer(device_data, cb_node, src_buffer_state);
+ AddCommandBufferBindingBuffer(device_data, cb_node, dst_buffer_state);
}
-bool CoreChecks::ValidateIdleBuffer(VkBuffer buffer) {
+bool CoreChecks::ValidateIdleBuffer(layer_data *device_data, VkBuffer buffer) {
bool skip = false;
auto buffer_state = GetBufferState(buffer);
if (!buffer_state) {
skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_BUFFER_EXT, HandleToUint64(buffer),
- kVUID_Core_DrawState_DoubleDestroy, "Cannot free %s that has not been allocated.",
+ kVUID_Core_DrawState_DoubleDestroy, "Cannot free buffer %s that has not been allocated.",
report_data->FormatHandle(buffer).c_str());
} else {
if (buffer_state->in_use.load()) {
skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_BUFFER_EXT,
HandleToUint64(buffer), "VUID-vkDestroyBuffer-buffer-00922",
- "Cannot free %s that is in use by a command buffer.", report_data->FormatHandle(buffer).c_str());
+ "Cannot free buffer %s that is in use by a command buffer.", report_data->FormatHandle(buffer).c_str());
}
}
return skip;
}
bool CoreChecks::PreCallValidateDestroyImageView(VkDevice device, VkImageView imageView, const VkAllocationCallbacks *pAllocator) {
+ layer_data *device_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
IMAGE_VIEW_STATE *image_view_state = GetImageViewState(imageView);
- const VulkanTypedHandle obj_struct(imageView, kVulkanObjectTypeImageView);
+ VK_OBJECT obj_struct = {HandleToUint64(imageView), kVulkanObjectTypeImageView};
bool skip = false;
if (image_view_state) {
- skip |=
- ValidateObjectNotInUse(image_view_state, obj_struct, "vkDestroyImageView", "VUID-vkDestroyImageView-imageView-01026");
+ skip |= ValidateObjectNotInUse(device_data, image_view_state, obj_struct, "vkDestroyImageView",
+ "VUID-vkDestroyImageView-imageView-01026");
}
return skip;
}
-void ValidationStateTracker::PreCallRecordDestroyImageView(VkDevice device, VkImageView imageView,
- const VkAllocationCallbacks *pAllocator) {
+void CoreChecks::PreCallRecordDestroyImageView(VkDevice device, VkImageView imageView, const VkAllocationCallbacks *pAllocator) {
+ layer_data *device_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
IMAGE_VIEW_STATE *image_view_state = GetImageViewState(imageView);
if (!image_view_state) return;
- const VulkanTypedHandle obj_struct(imageView, kVulkanObjectTypeImageView);
+ VK_OBJECT obj_struct = {HandleToUint64(imageView), kVulkanObjectTypeImageView};
// Any bound cmd buffers are now invalid
- InvalidateCommandBuffers(image_view_state->cb_bindings, obj_struct);
- imageViewMap.erase(imageView);
+ InvalidateCommandBuffers(device_data, image_view_state->cb_bindings, obj_struct);
+ (*GetImageViewMap()).erase(imageView);
}
bool CoreChecks::PreCallValidateDestroyBuffer(VkDevice device, VkBuffer buffer, const VkAllocationCallbacks *pAllocator) {
+ layer_data *device_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
auto buffer_state = GetBufferState(buffer);
bool skip = false;
if (buffer_state) {
- skip |= ValidateIdleBuffer(buffer);
+ skip |= ValidateIdleBuffer(device_data, buffer);
}
return skip;
}
-void ValidationStateTracker::PreCallRecordDestroyBuffer(VkDevice device, VkBuffer buffer, const VkAllocationCallbacks *pAllocator) {
+void CoreChecks::PreCallRecordDestroyBuffer(VkDevice device, VkBuffer buffer, const VkAllocationCallbacks *pAllocator) {
+ layer_data *device_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
if (!buffer) return;
auto buffer_state = GetBufferState(buffer);
- const VulkanTypedHandle obj_struct(buffer, kVulkanObjectTypeBuffer);
+ VK_OBJECT obj_struct = {HandleToUint64(buffer), kVulkanObjectTypeBuffer};
- InvalidateCommandBuffers(buffer_state->cb_bindings, obj_struct);
+ InvalidateCommandBuffers(device_data, buffer_state->cb_bindings, obj_struct);
for (auto mem_binding : buffer_state->GetBoundMemory()) {
- auto mem_info = GetDevMemState(mem_binding);
+ auto mem_info = GetMemObjInfo(mem_binding);
if (mem_info) {
RemoveBufferMemoryRange(HandleToUint64(buffer), mem_info);
}
}
- ClearMemoryObjectBindings(obj_struct);
- bufferMap.erase(buffer_state->buffer);
+ ClearMemoryObjectBindings(HandleToUint64(buffer), kVulkanObjectTypeBuffer);
+ EraseQFOReleaseBarriers<VkBufferMemoryBarrier>(device_data, buffer);
+ GetBufferMap()->erase(buffer_state->buffer);
}
-void CoreChecks::PreCallRecordDestroyBuffer(VkDevice device, VkBuffer buffer, const VkAllocationCallbacks *pAllocator) {
- if (!buffer) return;
-
- // Clean up validation specific data
- EraseQFOReleaseBarriers<VkBufferMemoryBarrier>(buffer);
-
- // Clean up generic buffer state
- StateTracker::PreCallRecordDestroyBuffer(device, buffer, pAllocator);
-}
bool CoreChecks::PreCallValidateDestroyBufferView(VkDevice device, VkBufferView bufferView,
const VkAllocationCallbacks *pAllocator) {
+ layer_data *device_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
auto buffer_view_state = GetBufferViewState(bufferView);
- const VulkanTypedHandle obj_struct(bufferView, kVulkanObjectTypeBufferView);
+ VK_OBJECT obj_struct = {HandleToUint64(bufferView), kVulkanObjectTypeBufferView};
bool skip = false;
if (buffer_view_state) {
- skip |= ValidateObjectNotInUse(buffer_view_state, obj_struct, "vkDestroyBufferView",
+ skip |= ValidateObjectNotInUse(device_data, buffer_view_state, obj_struct, "vkDestroyBufferView",
"VUID-vkDestroyBufferView-bufferView-00936");
}
return skip;
}
-void ValidationStateTracker::PreCallRecordDestroyBufferView(VkDevice device, VkBufferView bufferView,
- const VkAllocationCallbacks *pAllocator) {
+void CoreChecks::PreCallRecordDestroyBufferView(VkDevice device, VkBufferView bufferView, const VkAllocationCallbacks *pAllocator) {
+ layer_data *device_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
if (!bufferView) return;
auto buffer_view_state = GetBufferViewState(bufferView);
- const VulkanTypedHandle obj_struct(bufferView, kVulkanObjectTypeBufferView);
+ VK_OBJECT obj_struct = {HandleToUint64(bufferView), kVulkanObjectTypeBufferView};
// Any bound cmd buffers are now invalid
- InvalidateCommandBuffers(buffer_view_state->cb_bindings, obj_struct);
- bufferViewMap.erase(bufferView);
+ InvalidateCommandBuffers(device_data, buffer_view_state->cb_bindings, obj_struct);
+ GetBufferViewMap()->erase(bufferView);
}
bool CoreChecks::PreCallValidateCmdFillBuffer(VkCommandBuffer commandBuffer, VkBuffer dstBuffer, VkDeviceSize dstOffset,
VkDeviceSize size, uint32_t data) {
- auto cb_node = GetCBState(commandBuffer);
+ layer_data *device_data = GetLayerDataPtr(get_dispatch_key(commandBuffer), layer_data_map);
+ auto cb_node = GetCBNode(commandBuffer);
auto buffer_state = GetBufferState(dstBuffer);
bool skip = false;
- skip |= ValidateMemoryIsBoundToBuffer(buffer_state, "vkCmdFillBuffer()", "VUID-vkCmdFillBuffer-dstBuffer-00031");
- skip |=
- ValidateCmdQueueFlags(cb_node, "vkCmdFillBuffer()", VK_QUEUE_TRANSFER_BIT | VK_QUEUE_GRAPHICS_BIT | VK_QUEUE_COMPUTE_BIT,
- "VUID-vkCmdFillBuffer-commandBuffer-cmdpool");
- skip |= ValidateCmd(cb_node, CMD_FILLBUFFER, "vkCmdFillBuffer()");
+ skip |= ValidateMemoryIsBoundToBuffer(device_data, buffer_state, "vkCmdFillBuffer()", "VUID-vkCmdFillBuffer-dstBuffer-00031");
+ skip |= ValidateCmdQueueFlags(device_data, cb_node, "vkCmdFillBuffer()",
+ VK_QUEUE_TRANSFER_BIT | VK_QUEUE_GRAPHICS_BIT | VK_QUEUE_COMPUTE_BIT,
+ "VUID-vkCmdFillBuffer-commandBuffer-cmdpool");
+ skip |= ValidateCmd(device_data, cb_node, CMD_FILLBUFFER, "vkCmdFillBuffer()");
// Validate that DST buffer has correct usage flags set
- skip |= ValidateBufferUsageFlags(buffer_state, VK_BUFFER_USAGE_TRANSFER_DST_BIT, true, "VUID-vkCmdFillBuffer-dstBuffer-00029",
- "vkCmdFillBuffer()", "VK_BUFFER_USAGE_TRANSFER_DST_BIT");
- skip |= InsideRenderPass(cb_node, "vkCmdFillBuffer()", "VUID-vkCmdFillBuffer-renderpass");
+ skip |=
+ ValidateBufferUsageFlags(device_data, buffer_state, VK_BUFFER_USAGE_TRANSFER_DST_BIT, true,
+ "VUID-vkCmdFillBuffer-dstBuffer-00029", "vkCmdFillBuffer()", "VK_BUFFER_USAGE_TRANSFER_DST_BIT");
+ skip |= InsideRenderPass(device_data, cb_node, "vkCmdFillBuffer()", "VUID-vkCmdFillBuffer-renderpass");
return skip;
}
-void ValidationStateTracker::PreCallRecordCmdFillBuffer(VkCommandBuffer commandBuffer, VkBuffer dstBuffer, VkDeviceSize dstOffset,
- VkDeviceSize size, uint32_t data) {
- auto cb_node = GetCBState(commandBuffer);
+void CoreChecks::PreCallRecordCmdFillBuffer(VkCommandBuffer commandBuffer, VkBuffer dstBuffer, VkDeviceSize dstOffset,
+ VkDeviceSize size, uint32_t data) {
+ layer_data *device_data = GetLayerDataPtr(get_dispatch_key(commandBuffer), layer_data_map);
+ auto cb_node = GetCBNode(commandBuffer);
auto buffer_state = GetBufferState(dstBuffer);
// Update bindings between buffer and cmd buffer
- AddCommandBufferBindingBuffer(cb_node, buffer_state);
+ AddCommandBufferBindingBuffer(device_data, cb_node, buffer_state);
}
-bool CoreChecks::ValidateBufferImageCopyData(uint32_t regionCount, const VkBufferImageCopy *pRegions, IMAGE_STATE *image_state,
- const char *function) {
+bool CoreChecks::ValidateBufferImageCopyData(const debug_report_data *report_data, uint32_t regionCount,
+ const VkBufferImageCopy *pRegions, IMAGE_STATE *image_state, const char *function) {
bool skip = false;
for (uint32_t i = 0; i < regionCount; i++) {
@@ -4700,8 +4620,7 @@ bool CoreChecks::ValidateBufferImageCopyData(uint32_t regionCount, const VkBuffe
// If the the calling command's VkImage parameter's format is not a depth/stencil format,
// then bufferOffset must be a multiple of the calling command's VkImage parameter's element size
- uint32_t element_size = FormatElementSize(image_state->createInfo.format, pRegions[i].imageSubresource.aspectMask);
-
+ uint32_t element_size = FormatElementSize(image_state->createInfo.format);
if (!FormatIsDepthAndStencil(image_state->createInfo.format) && SafeModulo(pRegions[i].bufferOffset, element_size) != 0) {
skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT,
HandleToUint64(image_state->image), "VUID-VkBufferImageCopy-bufferOffset-00193",
@@ -4736,41 +4655,6 @@ bool CoreChecks::ValidateBufferImageCopyData(uint32_t regionCount, const VkBuffe
function, i, pRegions[i].bufferImageHeight, pRegions[i].imageExtent.height);
}
- // Calculate adjusted image extent, accounting for multiplane image factors
- VkExtent3D adusted_image_extent = GetImageSubresourceExtent(image_state, &pRegions[i].imageSubresource);
- // imageOffset.x and (imageExtent.width + imageOffset.x) must both be >= 0 and <= image subresource width
- if ((pRegions[i].imageOffset.x < 0) || (pRegions[i].imageOffset.x > static_cast<int32_t>(adusted_image_extent.width)) ||
- ((pRegions[i].imageOffset.x + pRegions[i].imageExtent.width) > static_cast<int32_t>(adusted_image_extent.width))) {
- skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT,
- HandleToUint64(image_state->image), "VUID-VkBufferImageCopy-imageOffset-00197",
- "%s(): Both pRegion[%d] imageoffset.x (%d) and (imageExtent.width + imageOffset.x) (%d) must be >= "
- "zero or <= image subresource width (%d).",
- function, i, pRegions[i].imageOffset.x, (pRegions[i].imageOffset.x + pRegions[i].imageExtent.width),
- adusted_image_extent.width);
- }
-
- // imageOffset.y and (imageExtent.height + imageOffset.y) must both be >= 0 and <= image subresource height
- if ((pRegions[i].imageOffset.y < 0) || (pRegions[i].imageOffset.y > static_cast<int32_t>(adusted_image_extent.height)) ||
- ((pRegions[i].imageOffset.y + pRegions[i].imageExtent.height) > static_cast<int32_t>(adusted_image_extent.height))) {
- skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT,
- HandleToUint64(image_state->image), "VUID-VkBufferImageCopy-imageOffset-00198",
- "%s(): Both pRegion[%d] imageoffset.y (%d) and (imageExtent.height + imageOffset.y) (%d) must be >= "
- "zero or <= image subresource height (%d).",
- function, i, pRegions[i].imageOffset.y, (pRegions[i].imageOffset.y + pRegions[i].imageExtent.height),
- adusted_image_extent.height);
- }
-
- // imageOffset.z and (imageExtent.depth + imageOffset.z) must both be >= 0 and <= image subresource depth
- if ((pRegions[i].imageOffset.z < 0) || (pRegions[i].imageOffset.z > static_cast<int32_t>(adusted_image_extent.depth)) ||
- ((pRegions[i].imageOffset.z + pRegions[i].imageExtent.depth) > static_cast<int32_t>(adusted_image_extent.depth))) {
- skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT,
- HandleToUint64(image_state->image), "VUID-VkBufferImageCopy-imageOffset-00200",
- "%s(): Both pRegion[%d] imageoffset.z (%d) and (imageExtent.depth + imageOffset.z) (%d) must be >= "
- "zero or <= image subresource depth (%d).",
- function, i, pRegions[i].imageOffset.z, (pRegions[i].imageOffset.z + pRegions[i].imageExtent.depth),
- adusted_image_extent.depth);
- }
-
// subresource aspectMask must have exactly 1 bit set
const int num_bits = sizeof(VkFlags) * CHAR_BIT;
std::bitset<num_bits> aspect_mask_bits(pRegions[i].imageSubresource.aspectMask);
@@ -4926,8 +4810,27 @@ static inline bool ValidateBufferBounds(const debug_report_data *report_data, IM
VkDeviceSize buffer_width = (0 == pRegions[i].bufferRowLength ? copy_extent.width : pRegions[i].bufferRowLength);
VkDeviceSize buffer_height = (0 == pRegions[i].bufferImageHeight ? copy_extent.height : pRegions[i].bufferImageHeight);
- VkDeviceSize unit_size = FormatElementSize(image_state->createInfo.format,
- pRegions[i].imageSubresource.aspectMask); // size (bytes) of texel or block
+ VkDeviceSize unit_size = FormatElementSize(image_state->createInfo.format); // size (bytes) of texel or block
+
+ // Handle special buffer packing rules for specific depth/stencil formats
+ if (pRegions[i].imageSubresource.aspectMask & VK_IMAGE_ASPECT_STENCIL_BIT) {
+ unit_size = FormatElementSize(VK_FORMAT_S8_UINT);
+ } else if (pRegions[i].imageSubresource.aspectMask & VK_IMAGE_ASPECT_DEPTH_BIT) {
+ switch (image_state->createInfo.format) {
+ case VK_FORMAT_D16_UNORM_S8_UINT:
+ unit_size = FormatElementSize(VK_FORMAT_D16_UNORM);
+ break;
+ case VK_FORMAT_D32_SFLOAT_S8_UINT:
+ unit_size = FormatElementSize(VK_FORMAT_D32_SFLOAT);
+ break;
+ case VK_FORMAT_X8_D24_UNORM_PACK32: // Fall through
+ case VK_FORMAT_D24_UNORM_S8_UINT:
+ unit_size = 4;
+ break;
+ default:
+ break;
+ }
+ }
if (FormatIsCompressed(image_state->createInfo.format) || FormatIsSinglePlane_422(image_state->createInfo.format)) {
// Switch to texel block units, rounding up for any partially-used blocks
@@ -4964,17 +4867,18 @@ static inline bool ValidateBufferBounds(const debug_report_data *report_data, IM
bool CoreChecks::PreCallValidateCmdCopyImageToBuffer(VkCommandBuffer commandBuffer, VkImage srcImage, VkImageLayout srcImageLayout,
VkBuffer dstBuffer, uint32_t regionCount, const VkBufferImageCopy *pRegions) {
- const auto cb_node = GetCBState(commandBuffer);
- const auto src_image_state = GetImageState(srcImage);
- const auto dst_buffer_state = GetBufferState(dstBuffer);
+ layer_data *device_data = GetLayerDataPtr(get_dispatch_key(commandBuffer), layer_data_map);
+ auto cb_node = GetCBNode(commandBuffer);
+ auto src_image_state = GetImageState(srcImage);
+ auto dst_buffer_state = GetBufferState(dstBuffer);
- bool skip = ValidateBufferImageCopyData(regionCount, pRegions, src_image_state, "vkCmdCopyImageToBuffer");
+ bool skip = ValidateBufferImageCopyData(report_data, regionCount, pRegions, src_image_state, "vkCmdCopyImageToBuffer");
// Validate command buffer state
- skip |= ValidateCmd(cb_node, CMD_COPYIMAGETOBUFFER, "vkCmdCopyImageToBuffer()");
+ skip |= ValidateCmd(device_data, cb_node, CMD_COPYIMAGETOBUFFER, "vkCmdCopyImageToBuffer()");
// Command pool must support graphics, compute, or transfer operations
- const auto pPool = GetCommandPoolState(cb_node->createInfo.commandPool);
+ auto pPool = GetCommandPoolNode(cb_node->createInfo.commandPool);
VkQueueFlags queue_flags = GetPhysicalDeviceState()->queue_family_properties[pPool->queueFamilyIndex].queueFlags;
@@ -4989,84 +4893,81 @@ bool CoreChecks::PreCallValidateCmdCopyImageToBuffer(VkCommandBuffer commandBuff
skip |= ValidateBufferBounds(report_data, src_image_state, dst_buffer_state, regionCount, pRegions, "vkCmdCopyImageToBuffer()",
"VUID-vkCmdCopyImageToBuffer-pRegions-00183");
- skip |= ValidateImageSampleCount(src_image_state, VK_SAMPLE_COUNT_1_BIT, "vkCmdCopyImageToBuffer(): srcImage",
+ skip |= ValidateImageSampleCount(device_data, src_image_state, VK_SAMPLE_COUNT_1_BIT, "vkCmdCopyImageToBuffer(): srcImage",
"VUID-vkCmdCopyImageToBuffer-srcImage-00188");
- skip |= ValidateMemoryIsBoundToImage(src_image_state, "vkCmdCopyImageToBuffer()", "VUID-vkCmdCopyImageToBuffer-srcImage-00187");
- skip |=
- ValidateMemoryIsBoundToBuffer(dst_buffer_state, "vkCmdCopyImageToBuffer()", "VUID-vkCmdCopyImageToBuffer-dstBuffer-00192");
+ skip |= ValidateMemoryIsBoundToImage(device_data, src_image_state, "vkCmdCopyImageToBuffer()",
+ "VUID-vkCmdCopyImageToBuffer-srcImage-00187");
+ skip |= ValidateMemoryIsBoundToBuffer(device_data, dst_buffer_state, "vkCmdCopyImageToBuffer()",
+ "VUID-vkCmdCopyImageToBuffer-dstBuffer-00192");
// Validate that SRC image & DST buffer have correct usage flags set
- skip |= ValidateImageUsageFlags(src_image_state, VK_IMAGE_USAGE_TRANSFER_SRC_BIT, true,
+ skip |= ValidateImageUsageFlags(device_data, src_image_state, VK_IMAGE_USAGE_TRANSFER_SRC_BIT, true,
"VUID-vkCmdCopyImageToBuffer-srcImage-00186", "vkCmdCopyImageToBuffer()",
"VK_IMAGE_USAGE_TRANSFER_SRC_BIT");
- skip |= ValidateBufferUsageFlags(dst_buffer_state, VK_BUFFER_USAGE_TRANSFER_DST_BIT, true,
+ skip |= ValidateBufferUsageFlags(device_data, dst_buffer_state, VK_BUFFER_USAGE_TRANSFER_DST_BIT, true,
"VUID-vkCmdCopyImageToBuffer-dstBuffer-00191", "vkCmdCopyImageToBuffer()",
"VK_BUFFER_USAGE_TRANSFER_DST_BIT");
- if (api_version >= VK_API_VERSION_1_1 || device_extensions.vk_khr_maintenance1) {
- skip |= ValidateImageFormatFeatureFlags(src_image_state, VK_FORMAT_FEATURE_TRANSFER_SRC_BIT, "vkCmdCopyImageToBuffer()",
- "VUID-vkCmdCopyImageToBuffer-srcImage-01998",
+ if (GetApiVersion() >= VK_API_VERSION_1_1 || GetDeviceExtensions()->vk_khr_maintenance1) {
+ skip |= ValidateImageFormatFeatureFlags(device_data, src_image_state, VK_FORMAT_FEATURE_TRANSFER_SRC_BIT,
+ "vkCmdCopyImageToBuffer()", "VUID-vkCmdCopyImageToBuffer-srcImage-01998",
"VUID-vkCmdCopyImageToBuffer-srcImage-01998");
}
- skip |= InsideRenderPass(cb_node, "vkCmdCopyImageToBuffer()", "VUID-vkCmdCopyImageToBuffer-renderpass");
+ skip |= InsideRenderPass(device_data, cb_node, "vkCmdCopyImageToBuffer()", "VUID-vkCmdCopyImageToBuffer-renderpass");
bool hit_error = false;
- const char *src_invalid_layout_vuid = (src_image_state->shared_presentable && device_extensions.vk_khr_shared_presentable_image)
- ? "VUID-vkCmdCopyImageToBuffer-srcImageLayout-01397"
- : "VUID-vkCmdCopyImageToBuffer-srcImageLayout-00190";
+ const char *src_invalid_layout_vuid =
+ (src_image_state->shared_presentable && GetDeviceExtensions()->vk_khr_shared_presentable_image)
+ ? "VUID-vkCmdCopyImageToBuffer-srcImageLayout-01397"
+ : "VUID-vkCmdCopyImageToBuffer-srcImageLayout-00190";
for (uint32_t i = 0; i < regionCount; ++i) {
- skip |= ValidateImageSubresourceLayers(cb_node, &pRegions[i].imageSubresource, "vkCmdCopyImageToBuffer()",
+ skip |= ValidateImageSubresourceLayers(device_data, cb_node, &pRegions[i].imageSubresource, "vkCmdCopyImageToBuffer()",
"imageSubresource", i);
- skip |= VerifyImageLayout(cb_node, src_image_state, pRegions[i].imageSubresource, srcImageLayout,
+ skip |= VerifyImageLayout(device_data, cb_node, src_image_state, pRegions[i].imageSubresource, srcImageLayout,
VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL, "vkCmdCopyImageToBuffer()", src_invalid_layout_vuid,
"VUID-vkCmdCopyImageToBuffer-srcImageLayout-00189", &hit_error);
- skip |= ValidateCopyBufferImageTransferGranularityRequirements(
- cb_node, src_image_state, &pRegions[i], i, "vkCmdCopyImageToBuffer()", "VUID-vkCmdCopyImageToBuffer-imageOffset-01794");
- skip |=
- ValidateImageMipLevel(cb_node, src_image_state, pRegions[i].imageSubresource.mipLevel, i, "vkCmdCopyImageToBuffer()",
- "imageSubresource", "VUID-vkCmdCopyImageToBuffer-imageSubresource-01703");
- skip |= ValidateImageArrayLayerRange(cb_node, src_image_state, pRegions[i].imageSubresource.baseArrayLayer,
+ skip |= ValidateCopyBufferImageTransferGranularityRequirements(device_data, cb_node, src_image_state, &pRegions[i], i,
+ "vkCmdCopyImageToBuffer()",
+ "VUID-vkCmdCopyImageToBuffer-imageOffset-01794");
+ skip |= ValidateImageMipLevel(device_data, cb_node, src_image_state, pRegions[i].imageSubresource.mipLevel, i,
+ "vkCmdCopyImageToBuffer()", "imageSubresource",
+ "VUID-vkCmdCopyImageToBuffer-imageSubresource-01703");
+ skip |= ValidateImageArrayLayerRange(device_data, cb_node, src_image_state, pRegions[i].imageSubresource.baseArrayLayer,
pRegions[i].imageSubresource.layerCount, i, "vkCmdCopyImageToBuffer()",
"imageSubresource", "VUID-vkCmdCopyImageToBuffer-imageSubresource-01704");
}
return skip;
}
-void ValidationStateTracker::PreCallRecordCmdCopyImageToBuffer(VkCommandBuffer commandBuffer, VkImage srcImage,
- VkImageLayout srcImageLayout, VkBuffer dstBuffer,
- uint32_t regionCount, const VkBufferImageCopy *pRegions) {
- auto cb_node = GetCBState(commandBuffer);
- auto src_image_state = GetImageState(srcImage);
- auto dst_buffer_state = GetBufferState(dstBuffer);
-
- // Update bindings between buffer/image and cmd buffer
- AddCommandBufferBindingImage(cb_node, src_image_state);
- AddCommandBufferBindingBuffer(cb_node, dst_buffer_state);
-}
void CoreChecks::PreCallRecordCmdCopyImageToBuffer(VkCommandBuffer commandBuffer, VkImage srcImage, VkImageLayout srcImageLayout,
VkBuffer dstBuffer, uint32_t regionCount, const VkBufferImageCopy *pRegions) {
- StateTracker::PreCallRecordCmdCopyImageToBuffer(commandBuffer, srcImage, srcImageLayout, dstBuffer, regionCount, pRegions);
-
- auto cb_node = GetCBState(commandBuffer);
+ layer_data *device_data = GetLayerDataPtr(get_dispatch_key(commandBuffer), layer_data_map);
+ auto cb_node = GetCBNode(commandBuffer);
auto src_image_state = GetImageState(srcImage);
- // Make sure that all image slices record referenced layout
+ auto dst_buffer_state = GetBufferState(dstBuffer);
+
+ // Make sure that all image slices are updated to correct layout
for (uint32_t i = 0; i < regionCount; ++i) {
- SetImageInitialLayout(cb_node, *src_image_state, pRegions[i].imageSubresource, srcImageLayout);
+ SetImageLayout(device_data, cb_node, src_image_state, pRegions[i].imageSubresource, srcImageLayout);
}
+ // Update bindings between buffer/image and cmd buffer
+ AddCommandBufferBindingImage(device_data, cb_node, src_image_state);
+ AddCommandBufferBindingBuffer(device_data, cb_node, dst_buffer_state);
}
bool CoreChecks::PreCallValidateCmdCopyBufferToImage(VkCommandBuffer commandBuffer, VkBuffer srcBuffer, VkImage dstImage,
VkImageLayout dstImageLayout, uint32_t regionCount,
const VkBufferImageCopy *pRegions) {
- const auto cb_node = GetCBState(commandBuffer);
- const auto src_buffer_state = GetBufferState(srcBuffer);
- const auto dst_image_state = GetImageState(dstImage);
+ layer_data *device_data = GetLayerDataPtr(get_dispatch_key(commandBuffer), layer_data_map);
+ auto cb_node = GetCBNode(commandBuffer);
+ auto src_buffer_state = GetBufferState(srcBuffer);
+ auto dst_image_state = GetImageState(dstImage);
- bool skip = ValidateBufferImageCopyData(regionCount, pRegions, dst_image_state, "vkCmdCopyBufferToImage");
+ bool skip = ValidateBufferImageCopyData(report_data, regionCount, pRegions, dst_image_state, "vkCmdCopyBufferToImage");
// Validate command buffer state
- skip |= ValidateCmd(cb_node, CMD_COPYBUFFERTOIMAGE, "vkCmdCopyBufferToImage()");
+ skip |= ValidateCmd(device_data, cb_node, CMD_COPYBUFFERTOIMAGE, "vkCmdCopyBufferToImage()");
// Command pool must support graphics, compute, or transfer operations
- const auto pPool = GetCommandPoolState(cb_node->createInfo.commandPool);
+ auto pPool = GetCommandPoolNode(cb_node->createInfo.commandPool);
VkQueueFlags queue_flags = GetPhysicalDeviceState()->queue_family_properties[pPool->queueFamilyIndex].queueFlags;
if (0 == (queue_flags & (VK_QUEUE_GRAPHICS_BIT | VK_QUEUE_COMPUTE_BIT | VK_QUEUE_TRANSFER_BIT))) {
skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
@@ -5078,71 +4979,68 @@ bool CoreChecks::PreCallValidateCmdCopyBufferToImage(VkCommandBuffer commandBuff
"VUID-vkCmdCopyBufferToImage-pRegions-00172");
skip |= ValidateBufferBounds(report_data, dst_image_state, src_buffer_state, regionCount, pRegions, "vkCmdCopyBufferToImage()",
"VUID-vkCmdCopyBufferToImage-pRegions-00171");
- skip |= ValidateImageSampleCount(dst_image_state, VK_SAMPLE_COUNT_1_BIT, "vkCmdCopyBufferToImage(): dstImage",
+ skip |= ValidateImageSampleCount(device_data, dst_image_state, VK_SAMPLE_COUNT_1_BIT, "vkCmdCopyBufferToImage(): dstImage",
"VUID-vkCmdCopyBufferToImage-dstImage-00179");
- skip |=
- ValidateMemoryIsBoundToBuffer(src_buffer_state, "vkCmdCopyBufferToImage()", "VUID-vkCmdCopyBufferToImage-srcBuffer-00176");
- skip |= ValidateMemoryIsBoundToImage(dst_image_state, "vkCmdCopyBufferToImage()", "VUID-vkCmdCopyBufferToImage-dstImage-00178");
- skip |= ValidateBufferUsageFlags(src_buffer_state, VK_BUFFER_USAGE_TRANSFER_SRC_BIT, true,
+ skip |= ValidateMemoryIsBoundToBuffer(device_data, src_buffer_state, "vkCmdCopyBufferToImage()",
+ "VUID-vkCmdCopyBufferToImage-srcBuffer-00176");
+ skip |= ValidateMemoryIsBoundToImage(device_data, dst_image_state, "vkCmdCopyBufferToImage()",
+ "VUID-vkCmdCopyBufferToImage-dstImage-00178");
+ skip |= ValidateBufferUsageFlags(device_data, src_buffer_state, VK_BUFFER_USAGE_TRANSFER_SRC_BIT, true,
"VUID-vkCmdCopyBufferToImage-srcBuffer-00174", "vkCmdCopyBufferToImage()",
"VK_BUFFER_USAGE_TRANSFER_SRC_BIT");
- skip |= ValidateImageUsageFlags(dst_image_state, VK_IMAGE_USAGE_TRANSFER_DST_BIT, true,
+ skip |= ValidateImageUsageFlags(device_data, dst_image_state, VK_IMAGE_USAGE_TRANSFER_DST_BIT, true,
"VUID-vkCmdCopyBufferToImage-dstImage-00177", "vkCmdCopyBufferToImage()",
"VK_IMAGE_USAGE_TRANSFER_DST_BIT");
- if (api_version >= VK_API_VERSION_1_1 || device_extensions.vk_khr_maintenance1) {
- skip |= ValidateImageFormatFeatureFlags(dst_image_state, VK_FORMAT_FEATURE_TRANSFER_DST_BIT, "vkCmdCopyBufferToImage()",
- "VUID-vkCmdCopyBufferToImage-dstImage-01997",
+ if (GetApiVersion() >= VK_API_VERSION_1_1 || GetDeviceExtensions()->vk_khr_maintenance1) {
+ skip |= ValidateImageFormatFeatureFlags(device_data, dst_image_state, VK_FORMAT_FEATURE_TRANSFER_DST_BIT,
+ "vkCmdCopyBufferToImage()", "VUID-vkCmdCopyBufferToImage-dstImage-01997",
"VUID-vkCmdCopyBufferToImage-dstImage-01997");
}
- skip |= InsideRenderPass(cb_node, "vkCmdCopyBufferToImage()", "VUID-vkCmdCopyBufferToImage-renderpass");
+ skip |= InsideRenderPass(device_data, cb_node, "vkCmdCopyBufferToImage()", "VUID-vkCmdCopyBufferToImage-renderpass");
bool hit_error = false;
- const char *dst_invalid_layout_vuid = (dst_image_state->shared_presentable && device_extensions.vk_khr_shared_presentable_image)
- ? "VUID-vkCmdCopyBufferToImage-dstImageLayout-01396"
- : "VUID-vkCmdCopyBufferToImage-dstImageLayout-00181";
+ const char *dst_invalid_layout_vuid =
+ (dst_image_state->shared_presentable && GetDeviceExtensions()->vk_khr_shared_presentable_image)
+ ? "VUID-vkCmdCopyBufferToImage-dstImageLayout-01396"
+ : "VUID-vkCmdCopyBufferToImage-dstImageLayout-00181";
for (uint32_t i = 0; i < regionCount; ++i) {
- skip |= ValidateImageSubresourceLayers(cb_node, &pRegions[i].imageSubresource, "vkCmdCopyBufferToImage()",
+ skip |= ValidateImageSubresourceLayers(device_data, cb_node, &pRegions[i].imageSubresource, "vkCmdCopyBufferToImage()",
"imageSubresource", i);
- skip |= VerifyImageLayout(cb_node, dst_image_state, pRegions[i].imageSubresource, dstImageLayout,
+ skip |= VerifyImageLayout(device_data, cb_node, dst_image_state, pRegions[i].imageSubresource, dstImageLayout,
VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, "vkCmdCopyBufferToImage()", dst_invalid_layout_vuid,
"VUID-vkCmdCopyBufferToImage-dstImageLayout-00180", &hit_error);
- skip |= ValidateCopyBufferImageTransferGranularityRequirements(
- cb_node, dst_image_state, &pRegions[i], i, "vkCmdCopyBufferToImage()", "VUID-vkCmdCopyBufferToImage-imageOffset-01793");
- skip |=
- ValidateImageMipLevel(cb_node, dst_image_state, pRegions[i].imageSubresource.mipLevel, i, "vkCmdCopyBufferToImage()",
- "imageSubresource", "VUID-vkCmdCopyBufferToImage-imageSubresource-01701");
- skip |= ValidateImageArrayLayerRange(cb_node, dst_image_state, pRegions[i].imageSubresource.baseArrayLayer,
+ skip |= ValidateCopyBufferImageTransferGranularityRequirements(device_data, cb_node, dst_image_state, &pRegions[i], i,
+ "vkCmdCopyBufferToImage()",
+ "VUID-vkCmdCopyBufferToImage-imageOffset-01793");
+ skip |= ValidateImageMipLevel(device_data, cb_node, dst_image_state, pRegions[i].imageSubresource.mipLevel, i,
+ "vkCmdCopyBufferToImage()", "imageSubresource",
+ "VUID-vkCmdCopyBufferToImage-imageSubresource-01701");
+ skip |= ValidateImageArrayLayerRange(device_data, cb_node, dst_image_state, pRegions[i].imageSubresource.baseArrayLayer,
pRegions[i].imageSubresource.layerCount, i, "vkCmdCopyBufferToImage()",
"imageSubresource", "VUID-vkCmdCopyBufferToImage-imageSubresource-01702");
}
return skip;
}
-void ValidationStateTracker::PreCallRecordCmdCopyBufferToImage(VkCommandBuffer commandBuffer, VkBuffer srcBuffer, VkImage dstImage,
- VkImageLayout dstImageLayout, uint32_t regionCount,
- const VkBufferImageCopy *pRegions) {
- auto cb_node = GetCBState(commandBuffer);
- auto src_buffer_state = GetBufferState(srcBuffer);
- auto dst_image_state = GetImageState(dstImage);
-
- AddCommandBufferBindingBuffer(cb_node, src_buffer_state);
- AddCommandBufferBindingImage(cb_node, dst_image_state);
-}
-
void CoreChecks::PreCallRecordCmdCopyBufferToImage(VkCommandBuffer commandBuffer, VkBuffer srcBuffer, VkImage dstImage,
VkImageLayout dstImageLayout, uint32_t regionCount,
const VkBufferImageCopy *pRegions) {
- StateTracker::PreCallRecordCmdCopyBufferToImage(commandBuffer, srcBuffer, dstImage, dstImageLayout, regionCount, pRegions);
-
- auto cb_node = GetCBState(commandBuffer);
+ layer_data *device_data = GetLayerDataPtr(get_dispatch_key(commandBuffer), layer_data_map);
+ auto cb_node = GetCBNode(commandBuffer);
+ auto src_buffer_state = GetBufferState(srcBuffer);
auto dst_image_state = GetImageState(dstImage);
- // Make sure that all image slices are record referenced layout
+
+ // Make sure that all image slices are updated to correct layout
for (uint32_t i = 0; i < regionCount; ++i) {
- SetImageInitialLayout(cb_node, *dst_image_state, pRegions[i].imageSubresource, dstImageLayout);
+ SetImageLayout(device_data, cb_node, dst_image_state, pRegions[i].imageSubresource, dstImageLayout);
}
+ AddCommandBufferBindingBuffer(device_data, cb_node, src_buffer_state);
+ AddCommandBufferBindingImage(device_data, cb_node, dst_image_state);
}
bool CoreChecks::PreCallValidateGetImageSubresourceLayout(VkDevice device, VkImage image, const VkImageSubresource *pSubresource,
VkSubresourceLayout *pLayout) {
+ layer_data *device_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
+ const auto report_data = device_data->report_data;
bool skip = false;
const VkImageAspectFlags sub_aspect = pSubresource->aspectMask;
@@ -5155,7 +5053,7 @@ bool CoreChecks::PreCallValidateGetImageSubresourceLayout(VkDevice device, VkIma
"vkGetImageSubresourceLayout(): VkImageSubresource.aspectMask must have exactly 1 bit set.");
}
- const IMAGE_STATE *image_entry = GetImageState(image);
+ IMAGE_STATE *image_entry = GetImageState(image);
if (!image_entry) {
return skip;
}
@@ -5215,8 +5113,8 @@ bool CoreChecks::PreCallValidateGetImageSubresourceLayout(VkDevice device, VkIma
}
}
- if (device_extensions.vk_android_external_memory_android_hardware_buffer) {
- skip |= ValidateGetImageSubresourceLayoutANDROID(image);
+ if (GetDeviceExtensions()->vk_android_external_memory_android_hardware_buffer) {
+ skip |= ValidateGetImageSubresourceLayoutANDROID(device_data, image);
}
return skip;
diff --git a/layers/buffer_validation.h b/layers/buffer_validation.h
index a4cf548e8..d3abb7198 100644
--- a/layers/buffer_validation.h
+++ b/layers/buffer_validation.h
@@ -30,6 +30,9 @@
#include <algorithm>
#include <bitset>
+class CoreChecks;
+typedef CoreChecks layer_data;
+
uint32_t FullMipChainLevels(uint32_t height, uint32_t width = 1, uint32_t depth = 1);
uint32_t FullMipChainLevels(VkExtent3D);
uint32_t FullMipChainLevels(VkExtent2D);
@@ -37,6 +40,11 @@ uint32_t FullMipChainLevels(VkExtent2D);
uint32_t ResolveRemainingLevels(const VkImageSubresourceRange *range, uint32_t mip_levels);
uint32_t ResolveRemainingLayers(const VkImageSubresourceRange *range, uint32_t layers);
-VkImageSubresourceRange NormalizeSubresourceRange(const IMAGE_STATE &image_state, const VkImageSubresourceRange &range);
+
+bool FindLayout(const std::unordered_map<ImageSubresourcePair, IMAGE_LAYOUT_NODE> &imageLayoutMap, ImageSubresourcePair imgpair,
+ VkImageLayout &layout, const VkImageAspectFlags aspectMask);
+
+void SetLayout(std::unordered_map<ImageSubresourcePair, IMAGE_LAYOUT_NODE> &imageLayoutMap, ImageSubresourcePair imgpair,
+ VkImageLayout layout);
#endif // CORE_VALIDATION_BUFFER_VALIDATION_H_
diff --git a/layers/cast_utils.h b/layers/cast_utils.h
deleted file mode 100644
index b7ff77bc7..000000000
--- a/layers/cast_utils.h
+++ /dev/null
@@ -1,93 +0,0 @@
-/* Copyright (c) 2019 The Khronos Group Inc.
- * Copyright (c) 2019 Valve Corporation
- * Copyright (c) 2019 LunarG, Inc.
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- *
- * Author: John Zulauf <jzulauf@lunarg.com>
- *
- */
-#pragma once
-#ifndef CAST_UTILS_H_
-#define CAST_UTILS_H_
-
-#include <cassert>
-#include <cstddef>
-#include <cstdint>
-#include <functional>
-
-#define CAST_TO_FROM_UTILS
-// Casts to allow various types of less than 64 bits to be cast to and from uint64_t safely and portably
-template <typename HandleType, typename Uint>
-static inline HandleType CastFromUint(Uint untyped_handle) {
- static_assert(sizeof(HandleType) == sizeof(Uint), "HandleType must be the same size as untyped handle");
- return *reinterpret_cast<HandleType *>(&untyped_handle);
-}
-template <typename HandleType, typename Uint>
-static inline Uint CastToUint(HandleType handle) {
- static_assert(sizeof(HandleType) == sizeof(Uint), "HandleType must be the same size as untyped handle");
- return *reinterpret_cast<Uint *>(&handle);
-}
-
-// Ensure that the size changing casts are *static* to ensure portability
-template <typename HandleType>
-static inline HandleType CastFromUint64(uint64_t untyped_handle) {
- static_assert(sizeof(HandleType) <= sizeof(uint64_t), "HandleType must be not larger than the untyped handle size");
- typedef
- typename std::conditional<sizeof(HandleType) == sizeof(uint8_t), uint8_t,
- typename std::conditional<sizeof(HandleType) == sizeof(uint16_t), uint16_t,
- typename std::conditional<sizeof(HandleType) == sizeof(uint32_t),
- uint32_t, uint64_t>::type>::type>::type Uint;
- return CastFromUint<HandleType, Uint>(static_cast<Uint>(untyped_handle));
-}
-
-template <typename HandleType>
-static uint64_t CastToUint64(HandleType handle) {
- static_assert(sizeof(HandleType) <= sizeof(uint64_t), "HandleType must be not larger than the untyped handle size");
- typedef
- typename std::conditional<sizeof(HandleType) == sizeof(uint8_t), uint8_t,
- typename std::conditional<sizeof(HandleType) == sizeof(uint16_t), uint16_t,
- typename std::conditional<sizeof(HandleType) == sizeof(uint32_t),
- uint32_t, uint64_t>::type>::type>::type Uint;
- return static_cast<uint64_t>(CastToUint<HandleType, Uint>(handle));
-}
-
-// Convenience functions to case between handles and the types the handles abstract, reflecting the Vulkan handle scheme, where
-// Handles are either pointers (dispatchable) or sizeof(uint64_t) (non-dispatchable), s.t. full size-safe casts are used and
-// we ensure that handles are large enough to contain the underlying type.
-template <typename HandleType, typename ValueType>
-void CastToHandle(ValueType value, HandleType *handle) {
- static_assert(sizeof(HandleType) >= sizeof(ValueType), "HandleType must large enough to hold internal value");
- *handle = CastFromUint64<HandleType>(CastToUint64<ValueType>(value));
-}
-// This form is conveniently "inline", you should only need to specify the handle type (the value type being deducible from the arg)
-template <typename HandleType, typename ValueType>
-HandleType CastToHandle(ValueType value) {
- HandleType handle;
- CastToHandle(value, &handle);
- return handle;
-}
-
-template <typename ValueType, typename HandleType>
-void CastFromHandle(HandleType handle, ValueType *value) {
- static_assert(sizeof(HandleType) >= sizeof(ValueType), "HandleType must large enough to hold internal value");
- *value = CastFromUint64<ValueType>(CastToUint64<HandleType>(handle));
-}
-template <typename ValueType, typename HandleType>
-ValueType CastFromHandle(HandleType handle) {
- ValueType value;
- CastFromHandle(handle, &value);
- return value;
-}
-
-#endif // CAST_UTILS_H_
diff --git a/layers/core_validation.cpp b/layers/core_validation.cpp
index 51173f2c9..d33dc03ab 100644
--- a/layers/core_validation.cpp
+++ b/layers/core_validation.cpp
@@ -44,7 +44,6 @@
#include <cmath>
#include <iostream>
#include <list>
-#include <math.h>
#include <map>
#include <memory>
#include <mutex>
@@ -59,6 +58,12 @@
#include "vk_loader_platform.h"
#include "vk_dispatch_table_helper.h"
#include "vk_enum_string_helper.h"
+#if defined(__GNUC__)
+#pragma GCC diagnostic ignored "-Wwrite-strings"
+#endif
+#if defined(__GNUC__)
+#pragma GCC diagnostic warning "-Wwrite-strings"
+#endif
#include "chassis.h"
#include "convert_to_renderpass2.h"
#include "core_validation.h"
@@ -116,15 +121,25 @@ static const VkDeviceMemory MEMTRACKER_SWAP_CHAIN_IMAGE_KEY = (VkDeviceMemory)(-
// 2nd special memory handle used to flag object as unbound from memory
static const VkDeviceMemory MEMORY_UNBOUND = VkDeviceMemory(~((uint64_t)(0)) - 1);
-// Get the global maps of pending releases
-const GlobalQFOTransferBarrierMap<VkImageMemoryBarrier> &CoreChecks::GetGlobalQFOReleaseBarrierMap(
- const QFOTransferBarrier<VkImageMemoryBarrier>::Tag &type_tag) const {
- return qfo_release_image_barrier_map;
+// Return buffer state ptr for specified buffer or else NULL
+BUFFER_STATE *CoreChecks::GetBufferState(VkBuffer buffer) {
+ auto buff_it = bufferMap.find(buffer);
+ if (buff_it == bufferMap.end()) {
+ return nullptr;
+ }
+ return buff_it->second.get();
}
-const GlobalQFOTransferBarrierMap<VkBufferMemoryBarrier> &CoreChecks::GetGlobalQFOReleaseBarrierMap(
- const QFOTransferBarrier<VkBufferMemoryBarrier>::Tag &type_tag) const {
- return qfo_release_buffer_barrier_map;
+
+// Return IMAGE_VIEW_STATE ptr for specified imageView or else NULL
+IMAGE_VIEW_STATE *CoreChecks::GetImageViewState(VkImageView image_view) {
+ auto iv_it = imageViewMap.find(image_view);
+ if (iv_it == imageViewMap.end()) {
+ return nullptr;
+ }
+ return iv_it->second.get();
}
+
+// Get the global map of pending releases
GlobalQFOTransferBarrierMap<VkImageMemoryBarrier> &CoreChecks::GetGlobalQFOReleaseBarrierMap(
const QFOTransferBarrier<VkImageMemoryBarrier>::Tag &type_tag) {
return qfo_release_image_barrier_map;
@@ -135,13 +150,58 @@ GlobalQFOTransferBarrierMap<VkBufferMemoryBarrier> &CoreChecks::GetGlobalQFORele
}
// Get the image viewstate for a given framebuffer attachment
-IMAGE_VIEW_STATE *ValidationStateTracker::GetAttachmentImageViewState(FRAMEBUFFER_STATE *framebuffer, uint32_t index) {
+IMAGE_VIEW_STATE *CoreChecks::GetAttachmentImageViewState(FRAMEBUFFER_STATE *framebuffer, uint32_t index) {
assert(framebuffer && (index < framebuffer->createInfo.attachmentCount));
+#ifdef FRAMEBUFFER_ATTACHMENT_STATE_CACHE
+ return framebuffer->attachments[index].view_state;
+#else
const VkImageView &image_view = framebuffer->createInfo.pAttachments[index];
return GetImageViewState(image_view);
+#endif
+}
+
+// Return sampler node ptr for specified sampler or else NULL
+SAMPLER_STATE *CoreChecks::GetSamplerState(VkSampler sampler) {
+ auto sampler_it = samplerMap.find(sampler);
+ if (sampler_it == samplerMap.end()) {
+ return nullptr;
+ }
+ return sampler_it->second.get();
+}
+// Return image state ptr for specified image or else NULL
+IMAGE_STATE *CoreChecks::GetImageState(VkImage image) {
+ auto img_it = imageMap.find(image);
+ if (img_it == imageMap.end()) {
+ return nullptr;
+ }
+ return img_it->second.get();
+}
+// Return swapchain node for specified swapchain or else NULL
+SWAPCHAIN_NODE *CoreChecks::GetSwapchainNode(VkSwapchainKHR swapchain) {
+ auto swp_it = swapchainMap.find(swapchain);
+ if (swp_it == swapchainMap.end()) {
+ return nullptr;
+ }
+ return swp_it->second.get();
+}
+// Return buffer node ptr for specified buffer or else NULL
+BUFFER_VIEW_STATE *CoreChecks::GetBufferViewState(VkBufferView buffer_view) {
+ auto bv_it = bufferViewMap.find(buffer_view);
+ if (bv_it == bufferViewMap.end()) {
+ return nullptr;
+ }
+ return bv_it->second.get();
+}
+
+FENCE_NODE *CoreChecks::GetFenceNode(VkFence fence) {
+ auto it = fenceMap.find(fence);
+ if (it == fenceMap.end()) {
+ return nullptr;
+ }
+ return &it->second;
}
-EVENT_STATE *ValidationStateTracker::GetEventState(VkEvent event) {
+EVENT_STATE *CoreChecks::GetEventNode(VkEvent event) {
auto it = eventMap.find(event);
if (it == eventMap.end()) {
return nullptr;
@@ -149,14 +209,15 @@ EVENT_STATE *ValidationStateTracker::GetEventState(VkEvent event) {
return &it->second;
}
-const QUEUE_STATE *ValidationStateTracker::GetQueueState(VkQueue queue) const {
- auto it = queueMap.find(queue);
- if (it == queueMap.cend()) {
+QUERY_POOL_NODE *CoreChecks::GetQueryPoolNode(VkQueryPool query_pool) {
+ auto it = queryPoolMap.find(query_pool);
+ if (it == queryPoolMap.end()) {
return nullptr;
}
return &it->second;
}
-QUEUE_STATE *ValidationStateTracker::GetQueueState(VkQueue queue) {
+
+QUEUE_STATE *CoreChecks::GetQueueState(VkQueue queue) {
auto it = queueMap.find(queue);
if (it == queueMap.end()) {
return nullptr;
@@ -164,16 +225,23 @@ QUEUE_STATE *ValidationStateTracker::GetQueueState(VkQueue queue) {
return &it->second;
}
-const PHYSICAL_DEVICE_STATE *ValidationStateTracker::GetPhysicalDeviceState(VkPhysicalDevice phys) const {
- auto *phys_dev_map = ((physical_device_map.size() > 0) ? &physical_device_map : &instance_state->physical_device_map);
- auto it = phys_dev_map->find(phys);
- if (it == phys_dev_map->end()) {
+SEMAPHORE_NODE *CoreChecks::GetSemaphoreNode(VkSemaphore semaphore) {
+ auto it = semaphoreMap.find(semaphore);
+ if (it == semaphoreMap.end()) {
return nullptr;
}
return &it->second;
}
-PHYSICAL_DEVICE_STATE *ValidationStateTracker::GetPhysicalDeviceState(VkPhysicalDevice phys) {
+COMMAND_POOL_NODE *CoreChecks::GetCommandPoolNode(VkCommandPool pool) {
+ auto it = commandPoolMap.find(pool);
+ if (it == commandPoolMap.end()) {
+ return nullptr;
+ }
+ return &it->second;
+}
+
+PHYSICAL_DEVICE_STATE *CoreChecks::GetPhysicalDeviceState(VkPhysicalDevice phys) {
auto *phys_dev_map = ((physical_device_map.size() > 0) ? &physical_device_map : &instance_state->physical_device_map);
auto it = phys_dev_map->find(phys);
if (it == phys_dev_map->end()) {
@@ -182,118 +250,55 @@ PHYSICAL_DEVICE_STATE *ValidationStateTracker::GetPhysicalDeviceState(VkPhysical
return &it->second;
}
-PHYSICAL_DEVICE_STATE *ValidationStateTracker::GetPhysicalDeviceState() { return physical_device_state; }
-const PHYSICAL_DEVICE_STATE *ValidationStateTracker::GetPhysicalDeviceState() const { return physical_device_state; }
+PHYSICAL_DEVICE_STATE *CoreChecks::GetPhysicalDeviceState() { return physical_device_state; }
+
+SURFACE_STATE *CoreChecks::GetSurfaceState(VkSurfaceKHR surface) {
+ auto *surf_map = ((surface_map.size() > 0) ? &surface_map : &instance_state->surface_map);
+ auto it = surf_map->find(surface);
+ if (it == surf_map->end()) {
+ return nullptr;
+ }
+ return &it->second;
+}
// Return ptr to memory binding for given handle of specified type
-template <typename State, typename Result>
-static Result GetObjectMemBindingImpl(State state, const VulkanTypedHandle &typed_handle) {
- switch (typed_handle.type) {
+BINDABLE *CoreChecks::GetObjectMemBinding(uint64_t handle, VulkanObjectType type) {
+ switch (type) {
case kVulkanObjectTypeImage:
- return state->GetImageState(typed_handle.Cast<VkImage>());
+ return GetImageState(VkImage(handle));
case kVulkanObjectTypeBuffer:
- return state->GetBufferState(typed_handle.Cast<VkBuffer>());
- case kVulkanObjectTypeAccelerationStructureNV:
- return state->GetAccelerationStructureState(typed_handle.Cast<VkAccelerationStructureNV>());
+ return GetBufferState(VkBuffer(handle));
default:
break;
}
return nullptr;
}
-const BINDABLE *ValidationStateTracker::GetObjectMemBinding(const VulkanTypedHandle &typed_handle) const {
- return GetObjectMemBindingImpl<const ValidationStateTracker *, const BINDABLE *>(this, typed_handle);
-}
-BINDABLE *ValidationStateTracker::GetObjectMemBinding(const VulkanTypedHandle &typed_handle) {
- return GetObjectMemBindingImpl<ValidationStateTracker *, BINDABLE *>(this, typed_handle);
-}
-
-ImageSubresourceLayoutMap::InitialLayoutState::InitialLayoutState(const CMD_BUFFER_STATE &cb_state,
- const IMAGE_VIEW_STATE *view_state)
- : image_view(VK_NULL_HANDLE), aspect_mask(0), label(cb_state.debug_label) {
- if (view_state) {
- image_view = view_state->image_view;
- aspect_mask = view_state->create_info.subresourceRange.aspectMask;
- }
-}
-std::string FormatDebugLabel(const char *prefix, const LoggingLabel &label) {
- if (label.Empty()) return std::string();
- std::string out;
- string_sprintf(&out, "%sVkDebugUtilsLabel(name='%s' color=[%g, %g %g, %g])", prefix, label.name.c_str(), label.color[0],
- label.color[1], label.color[2], label.color[3]);
- return out;
+std::unordered_map<VkSamplerYcbcrConversion, uint64_t> *CoreChecks::GetYcbcrConversionFormatMap() {
+ return &ycbcr_conversion_ahb_fmt_map;
}
-// the ImageLayoutMap implementation bakes in the number of valid aspects -- we have to choose the correct one at construction time
-template <uint32_t kThreshold>
-static std::unique_ptr<ImageSubresourceLayoutMap> LayoutMapFactoryByAspect(const IMAGE_STATE &image_state) {
- ImageSubresourceLayoutMap *map = nullptr;
- switch (image_state.full_range.aspectMask) {
- case VK_IMAGE_ASPECT_COLOR_BIT:
- map = new ImageSubresourceLayoutMapImpl<ColorAspectTraits, kThreshold>(image_state);
- break;
- case VK_IMAGE_ASPECT_DEPTH_BIT:
- map = new ImageSubresourceLayoutMapImpl<DepthAspectTraits, kThreshold>(image_state);
- break;
- case VK_IMAGE_ASPECT_STENCIL_BIT:
- map = new ImageSubresourceLayoutMapImpl<StencilAspectTraits, kThreshold>(image_state);
- break;
- case VK_IMAGE_ASPECT_DEPTH_BIT | VK_IMAGE_ASPECT_STENCIL_BIT:
- map = new ImageSubresourceLayoutMapImpl<DepthStencilAspectTraits, kThreshold>(image_state);
- break;
- case VK_IMAGE_ASPECT_PLANE_0_BIT | VK_IMAGE_ASPECT_PLANE_1_BIT:
- map = new ImageSubresourceLayoutMapImpl<Multiplane2AspectTraits, kThreshold>(image_state);
- break;
- case VK_IMAGE_ASPECT_PLANE_0_BIT | VK_IMAGE_ASPECT_PLANE_1_BIT | VK_IMAGE_ASPECT_PLANE_2_BIT:
- map = new ImageSubresourceLayoutMapImpl<Multiplane3AspectTraits, kThreshold>(image_state);
- break;
- }
+std::unordered_set<uint64_t> *CoreChecks::GetAHBExternalFormatsSet() { return &ahb_ext_formats_set; }
- assert(map); // We shouldn't be able to get here null unless the traits cases are incomplete
- return std::unique_ptr<ImageSubresourceLayoutMap>(map);
-}
+// prototype
+GLOBAL_CB_NODE *GetCBNode(layer_data const *, const VkCommandBuffer);
-static std::unique_ptr<ImageSubresourceLayoutMap> LayoutMapFactory(const IMAGE_STATE &image_state) {
- std::unique_ptr<ImageSubresourceLayoutMap> map;
- const uint32_t kAlwaysDenseLimit = 16; // About a cacheline on deskop architectures
- if (image_state.full_range.layerCount <= kAlwaysDenseLimit) {
- // Create a dense row map
- map = LayoutMapFactoryByAspect<0>(image_state);
- } else {
- // Create an initially sparse row map
- map = LayoutMapFactoryByAspect<kAlwaysDenseLimit>(image_state);
+// Return ptr to info in map container containing mem, or NULL if not found
+// Calls to this function should be wrapped in mutex
+DEVICE_MEM_INFO *CoreChecks::GetMemObjInfo(const VkDeviceMemory mem) {
+ auto mem_it = memObjMap.find(mem);
+ if (mem_it == memObjMap.end()) {
+ return NULL;
}
- return map;
+ return mem_it->second.get();
}
-// The const variant only need the image as it is the key for the map
-const ImageSubresourceLayoutMap *GetImageSubresourceLayoutMap(const CMD_BUFFER_STATE *cb_state, VkImage image) {
- auto it = cb_state->image_layout_map.find(image);
- if (it == cb_state->image_layout_map.cend()) {
- return nullptr;
- }
- return it->second.get();
-}
-
-// The non-const variant only needs the image state, as the factory requires it to construct a new entry
-ImageSubresourceLayoutMap *GetImageSubresourceLayoutMap(CMD_BUFFER_STATE *cb_state, const IMAGE_STATE &image_state) {
- auto it = cb_state->image_layout_map.find(image_state.image);
- if (it == cb_state->image_layout_map.end()) {
- // Empty slot... fill it in.
- auto insert_pair = cb_state->image_layout_map.insert(std::make_pair(image_state.image, LayoutMapFactory(image_state)));
- assert(insert_pair.second);
- ImageSubresourceLayoutMap *new_map = insert_pair.first->second.get();
- assert(new_map);
- return new_map;
- }
- return it->second.get();
-}
-
-void ValidationStateTracker::AddMemObjInfo(void *object, const VkDeviceMemory mem, const VkMemoryAllocateInfo *pAllocateInfo) {
+void CoreChecks::AddMemObjInfo(layer_data *dev_data, void *object, const VkDeviceMemory mem,
+ const VkMemoryAllocateInfo *pAllocateInfo) {
assert(object != NULL);
- auto *mem_info = new DEVICE_MEMORY_STATE(object, mem, pAllocateInfo);
- memObjMap[mem] = unique_ptr<DEVICE_MEMORY_STATE>(mem_info);
+ auto *mem_info = new DEVICE_MEM_INFO(object, mem, pAllocateInfo);
+ dev_data->memObjMap[mem] = unique_ptr<DEVICE_MEM_INFO>(mem_info);
auto dedicated = lvl_find_in_chain<VkMemoryDedicatedAllocateInfoKHR>(pAllocateInfo->pNext);
if (dedicated) {
@@ -309,137 +314,78 @@ void ValidationStateTracker::AddMemObjInfo(void *object, const VkDeviceMemory me
}
// Create binding link between given sampler and command buffer node
-void ValidationStateTracker::AddCommandBufferBindingSampler(CMD_BUFFER_STATE *cb_node, SAMPLER_STATE *sampler_state) {
- if (disabled.command_buffer_state) {
- return;
- }
- auto inserted = cb_node->object_bindings.emplace(sampler_state->sampler, kVulkanObjectTypeSampler);
- if (inserted.second) {
- // Only need to complete the cross-reference if this is a new item
- sampler_state->cb_bindings.insert(cb_node);
- }
+void CoreChecks::AddCommandBufferBindingSampler(GLOBAL_CB_NODE *cb_node, SAMPLER_STATE *sampler_state) {
+ sampler_state->cb_bindings.insert(cb_node);
+ cb_node->object_bindings.insert({HandleToUint64(sampler_state->sampler), kVulkanObjectTypeSampler});
}
// Create binding link between given image node and command buffer node
-void ValidationStateTracker::AddCommandBufferBindingImage(CMD_BUFFER_STATE *cb_node, IMAGE_STATE *image_state) {
- if (disabled.command_buffer_state) {
- return;
- }
+void CoreChecks::AddCommandBufferBindingImage(const layer_data *dev_data, GLOBAL_CB_NODE *cb_node, IMAGE_STATE *image_state) {
// Skip validation if this image was created through WSI
if (image_state->binding.mem != MEMTRACKER_SWAP_CHAIN_IMAGE_KEY) {
- // First update cb binding for image
- auto image_inserted = cb_node->object_bindings.emplace(image_state->image, kVulkanObjectTypeImage);
- if (image_inserted.second) {
- // Only need to continue if this is a new item (the rest of the work would have be done previous)
- image_state->cb_bindings.insert(cb_node);
- // Now update CB binding in MemObj mini CB list
- for (auto mem_binding : image_state->GetBoundMemory()) {
- DEVICE_MEMORY_STATE *pMemInfo = GetDevMemState(mem_binding);
- if (pMemInfo) {
- // Now update CBInfo's Mem reference list
- auto mem_inserted = cb_node->memObjs.insert(mem_binding);
- if (mem_inserted.second) {
- // Only need to complete the cross-reference if this is a new item
- pMemInfo->cb_bindings.insert(cb_node);
- }
- }
+ // First update CB binding in MemObj mini CB list
+ for (auto mem_binding : image_state->GetBoundMemory()) {
+ DEVICE_MEM_INFO *pMemInfo = GetMemObjInfo(mem_binding);
+ if (pMemInfo) {
+ pMemInfo->cb_bindings.insert(cb_node);
+ // Now update CBInfo's Mem reference list
+ cb_node->memObjs.insert(mem_binding);
}
}
+ // Now update cb binding for image
+ cb_node->object_bindings.insert({HandleToUint64(image_state->image), kVulkanObjectTypeImage});
+ image_state->cb_bindings.insert(cb_node);
}
}
// Create binding link between given image view node and its image with command buffer node
-void ValidationStateTracker::AddCommandBufferBindingImageView(CMD_BUFFER_STATE *cb_node, IMAGE_VIEW_STATE *view_state) {
- if (disabled.command_buffer_state) {
- return;
- }
+void CoreChecks::AddCommandBufferBindingImageView(const layer_data *dev_data, GLOBAL_CB_NODE *cb_node,
+ IMAGE_VIEW_STATE *view_state) {
// First add bindings for imageView
- auto inserted = cb_node->object_bindings.emplace(view_state->image_view, kVulkanObjectTypeImageView);
- if (inserted.second) {
- // Only need to continue if this is a new item
- view_state->cb_bindings.insert(cb_node);
- auto image_state = GetImageState(view_state->create_info.image);
- // Add bindings for image within imageView
- if (image_state) {
- AddCommandBufferBindingImage(cb_node, image_state);
- }
+ view_state->cb_bindings.insert(cb_node);
+ cb_node->object_bindings.insert({HandleToUint64(view_state->image_view), kVulkanObjectTypeImageView});
+ auto image_state = GetImageState(view_state->create_info.image);
+ // Add bindings for image within imageView
+ if (image_state) {
+ AddCommandBufferBindingImage(dev_data, cb_node, image_state);
}
}
// Create binding link between given buffer node and command buffer node
-void ValidationStateTracker::AddCommandBufferBindingBuffer(CMD_BUFFER_STATE *cb_node, BUFFER_STATE *buffer_state) {
- if (disabled.command_buffer_state) {
- return;
- }
- // First update cb binding for buffer
- auto buffer_inserted = cb_node->object_bindings.emplace(buffer_state->buffer, kVulkanObjectTypeBuffer);
- if (buffer_inserted.second) {
- // Only need to continue if this is a new item
- buffer_state->cb_bindings.insert(cb_node);
- // Now update CB binding in MemObj mini CB list
- for (auto mem_binding : buffer_state->GetBoundMemory()) {
- DEVICE_MEMORY_STATE *pMemInfo = GetDevMemState(mem_binding);
- if (pMemInfo) {
- // Now update CBInfo's Mem reference list
- auto inserted = cb_node->memObjs.insert(mem_binding);
- if (inserted.second) {
- // Only need to complete the cross-reference if this is a new item
- pMemInfo->cb_bindings.insert(cb_node);
- }
- }
+void CoreChecks::AddCommandBufferBindingBuffer(const layer_data *dev_data, GLOBAL_CB_NODE *cb_node, BUFFER_STATE *buffer_state) {
+ // First update CB binding in MemObj mini CB list
+ for (auto mem_binding : buffer_state->GetBoundMemory()) {
+ DEVICE_MEM_INFO *pMemInfo = GetMemObjInfo(mem_binding);
+ if (pMemInfo) {
+ pMemInfo->cb_bindings.insert(cb_node);
+ // Now update CBInfo's Mem reference list
+ cb_node->memObjs.insert(mem_binding);
}
}
+ // Now update cb binding for buffer
+ cb_node->object_bindings.insert({HandleToUint64(buffer_state->buffer), kVulkanObjectTypeBuffer});
+ buffer_state->cb_bindings.insert(cb_node);
}
// Create binding link between given buffer view node and its buffer with command buffer node
-void ValidationStateTracker::AddCommandBufferBindingBufferView(CMD_BUFFER_STATE *cb_node, BUFFER_VIEW_STATE *view_state) {
- if (disabled.command_buffer_state) {
- return;
- }
+void CoreChecks::AddCommandBufferBindingBufferView(const layer_data *dev_data, GLOBAL_CB_NODE *cb_node,
+ BUFFER_VIEW_STATE *view_state) {
// First add bindings for bufferView
- auto inserted = cb_node->object_bindings.emplace(view_state->buffer_view, kVulkanObjectTypeBufferView);
- if (inserted.second) {
- // Only need to complete the cross-reference if this is a new item
- view_state->cb_bindings.insert(cb_node);
- auto buffer_state = GetBufferState(view_state->create_info.buffer);
- // Add bindings for buffer within bufferView
- if (buffer_state) {
- AddCommandBufferBindingBuffer(cb_node, buffer_state);
- }
- }
-}
-
-// Create binding link between given acceleration structure and command buffer node
-void ValidationStateTracker::AddCommandBufferBindingAccelerationStructure(CMD_BUFFER_STATE *cb_node,
- ACCELERATION_STRUCTURE_STATE *as_state) {
- if (disabled.command_buffer_state) {
- return;
- }
- auto as_inserted = cb_node->object_bindings.emplace(as_state->acceleration_structure, kVulkanObjectTypeAccelerationStructureNV);
- if (as_inserted.second) {
- // Only need to complete the cross-reference if this is a new item
- as_state->cb_bindings.insert(cb_node);
- // Now update CB binding in MemObj mini CB list
- for (auto mem_binding : as_state->GetBoundMemory()) {
- DEVICE_MEMORY_STATE *pMemInfo = GetDevMemState(mem_binding);
- if (pMemInfo) {
- // Now update CBInfo's Mem reference list
- auto mem_inserted = cb_node->memObjs.insert(mem_binding);
- if (mem_inserted.second) {
- // Only need to complete the cross-reference if this is a new item
- pMemInfo->cb_bindings.insert(cb_node);
- }
- }
- }
+ view_state->cb_bindings.insert(cb_node);
+ cb_node->object_bindings.insert({HandleToUint64(view_state->buffer_view), kVulkanObjectTypeBufferView});
+ auto buffer_state = GetBufferState(view_state->create_info.buffer);
+ // Add bindings for buffer within bufferView
+ if (buffer_state) {
+ AddCommandBufferBindingBuffer(dev_data, cb_node, buffer_state);
}
}
// For every mem obj bound to particular CB, free bindings related to that CB
-void ValidationStateTracker::ClearCmdBufAndMemReferences(CMD_BUFFER_STATE *cb_node) {
+void CoreChecks::ClearCmdBufAndMemReferences(layer_data *dev_data, GLOBAL_CB_NODE *cb_node) {
if (cb_node) {
if (cb_node->memObjs.size() > 0) {
for (auto mem : cb_node->memObjs) {
- DEVICE_MEMORY_STATE *pInfo = GetDevMemState(mem);
+ DEVICE_MEM_INFO *pInfo = GetMemObjInfo(mem);
if (pInfo) {
pInfo->cb_bindings.erase(cb_node);
}
@@ -450,91 +396,75 @@ void ValidationStateTracker::ClearCmdBufAndMemReferences(CMD_BUFFER_STATE *cb_no
}
// Clear a single object binding from given memory object
-void ValidationStateTracker::ClearMemoryObjectBinding(const VulkanTypedHandle &typed_handle, VkDeviceMemory mem) {
- DEVICE_MEMORY_STATE *mem_info = GetDevMemState(mem);
+void CoreChecks::ClearMemoryObjectBinding(uint64_t handle, VulkanObjectType type, VkDeviceMemory mem) {
+ DEVICE_MEM_INFO *mem_info = GetMemObjInfo(mem);
// This obj is bound to a memory object. Remove the reference to this object in that memory object's list
if (mem_info) {
- mem_info->obj_bindings.erase(typed_handle);
+ mem_info->obj_bindings.erase({handle, type});
}
}
// ClearMemoryObjectBindings clears the binding of objects to memory
// For the given object it pulls the memory bindings and makes sure that the bindings
// no longer refer to the object being cleared. This occurs when objects are destroyed.
-void ValidationStateTracker::ClearMemoryObjectBindings(const VulkanTypedHandle &typed_handle) {
- BINDABLE *mem_binding = GetObjectMemBinding(typed_handle);
+void CoreChecks::ClearMemoryObjectBindings(uint64_t handle, VulkanObjectType type) {
+ BINDABLE *mem_binding = GetObjectMemBinding(handle, type);
if (mem_binding) {
if (!mem_binding->sparse) {
- ClearMemoryObjectBinding(typed_handle, mem_binding->binding.mem);
+ ClearMemoryObjectBinding(handle, type, mem_binding->binding.mem);
} else { // Sparse, clear all bindings
for (auto &sparse_mem_binding : mem_binding->sparse_bindings) {
- ClearMemoryObjectBinding(typed_handle, sparse_mem_binding.mem);
+ ClearMemoryObjectBinding(handle, type, sparse_mem_binding.mem);
}
}
}
}
// For given mem object, verify that it is not null or UNBOUND, if it is, report error. Return skip value.
-bool CoreChecks::VerifyBoundMemoryIsValid(VkDeviceMemory mem, const VulkanTypedHandle &typed_handle, const char *api_name,
- const char *error_code) const {
+bool VerifyBoundMemoryIsValid(const layer_data *dev_data, VkDeviceMemory mem, uint64_t handle, const char *api_name,
+ const char *type_name, const char *error_code) {
bool result = false;
- auto type_name = object_string[typed_handle.type];
if (VK_NULL_HANDLE == mem) {
- result = log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT, typed_handle.handle,
- error_code, "%s: %s used with no memory bound. Memory should be bound by calling vkBind%sMemory().",
- api_name, report_data->FormatHandle(typed_handle).c_str(), type_name + 2);
+ result =
+ log_msg(dev_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT, handle, error_code,
+ "%s: Vk%s object %s used with no memory bound. Memory should be bound by calling vkBind%sMemory().", api_name,
+ type_name, dev_data->report_data->FormatHandle(handle).c_str(), type_name);
} else if (MEMORY_UNBOUND == mem) {
- result = log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT, typed_handle.handle,
- error_code,
- "%s: %s used with no memory bound and previously bound memory was freed. Memory must not be freed "
- "prior to this operation.",
- api_name, report_data->FormatHandle(typed_handle).c_str());
+ result =
+ log_msg(dev_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT, handle, error_code,
+ "%s: Vk%s object %s used with no memory bound and previously bound memory was freed. Memory must not be freed "
+ "prior to this operation.",
+ api_name, type_name, dev_data->report_data->FormatHandle(handle).c_str());
}
return result;
}
// Check to see if memory was ever bound to this image
-bool CoreChecks::ValidateMemoryIsBoundToImage(const IMAGE_STATE *image_state, const char *api_name, const char *error_code) const {
+bool CoreChecks::ValidateMemoryIsBoundToImage(const layer_data *dev_data, const IMAGE_STATE *image_state, const char *api_name,
+ const char *error_code) {
bool result = false;
- if (image_state->create_from_swapchain != VK_NULL_HANDLE) {
- if (image_state->bind_swapchain == VK_NULL_HANDLE) {
- log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT,
- HandleToUint64(image_state->image), error_code,
- "%s: %s is created by %s, and the image should be bound by calling vkBindImageMemory2(), and the pNext chain "
- "includes VkBindImageMemorySwapchainInfoKHR.",
- api_name, report_data->FormatHandle(image_state->image).c_str(),
- report_data->FormatHandle(image_state->create_from_swapchain).c_str());
- }
- } else if (0 == (static_cast<uint32_t>(image_state->createInfo.flags) & VK_IMAGE_CREATE_SPARSE_BINDING_BIT)) {
- result = VerifyBoundMemoryIsValid(image_state->binding.mem, VulkanTypedHandle(image_state->image, kVulkanObjectTypeImage),
- api_name, error_code);
+ if (0 == (static_cast<uint32_t>(image_state->createInfo.flags) & VK_IMAGE_CREATE_SPARSE_BINDING_BIT)) {
+ result = VerifyBoundMemoryIsValid(dev_data, image_state->binding.mem, HandleToUint64(image_state->image), api_name, "Image",
+ error_code);
}
return result;
}
// Check to see if memory was bound to this buffer
-bool CoreChecks::ValidateMemoryIsBoundToBuffer(const BUFFER_STATE *buffer_state, const char *api_name,
- const char *error_code) const {
+bool CoreChecks::ValidateMemoryIsBoundToBuffer(const layer_data *dev_data, const BUFFER_STATE *buffer_state, const char *api_name,
+ const char *error_code) {
bool result = false;
if (0 == (static_cast<uint32_t>(buffer_state->createInfo.flags) & VK_BUFFER_CREATE_SPARSE_BINDING_BIT)) {
- result = VerifyBoundMemoryIsValid(buffer_state->binding.mem,
- VulkanTypedHandle(buffer_state->buffer, kVulkanObjectTypeBuffer), api_name, error_code);
+ result = VerifyBoundMemoryIsValid(dev_data, buffer_state->binding.mem, HandleToUint64(buffer_state->buffer), api_name,
+ "Buffer", error_code);
}
return result;
}
-// Check to see if memory was bound to this acceleration structure
-bool CoreChecks::ValidateMemoryIsBoundToAccelerationStructure(const ACCELERATION_STRUCTURE_STATE *as_state, const char *api_name,
- const char *error_code) const {
- return VerifyBoundMemoryIsValid(as_state->binding.mem,
- VulkanTypedHandle(as_state->acceleration_structure, kVulkanObjectTypeAccelerationStructureNV),
- api_name, error_code);
-}
-
// SetMemBinding is used to establish immutable, non-sparse binding between a single image/buffer object and memory object.
// Corresponding valid usage checks are in ValidateSetMemBinding().
-void ValidationStateTracker::SetMemBinding(VkDeviceMemory mem, BINDABLE *mem_binding, VkDeviceSize memory_offset,
- const VulkanTypedHandle &typed_handle) {
+void CoreChecks::SetMemBinding(layer_data *dev_data, VkDeviceMemory mem, BINDABLE *mem_binding, VkDeviceSize memory_offset,
+ uint64_t handle, VulkanObjectType type) {
assert(mem_binding);
mem_binding->binding.mem = mem;
mem_binding->UpdateBoundMemorySet(); // force recreation of cached set
@@ -542,12 +472,12 @@ void ValidationStateTracker::SetMemBinding(VkDeviceMemory mem, BINDABLE *mem_bin
mem_binding->binding.size = mem_binding->requirements.size;
if (mem != VK_NULL_HANDLE) {
- DEVICE_MEMORY_STATE *mem_info = GetDevMemState(mem);
+ DEVICE_MEM_INFO *mem_info = GetMemObjInfo(mem);
if (mem_info) {
- mem_info->obj_bindings.insert(typed_handle);
+ mem_info->obj_bindings.insert({handle, type});
// For image objects, make sure default memory state is correctly set
// TODO : What's the best/correct way to handle this?
- if (kVulkanObjectTypeImage == typed_handle.type) {
+ if (kVulkanObjectTypeImage == type) {
auto const image_state = reinterpret_cast<const IMAGE_STATE *>(mem_binding);
if (image_state) {
VkImageCreateInfo ici = image_state->createInfo;
@@ -567,50 +497,53 @@ void ValidationStateTracker::SetMemBinding(VkDeviceMemory mem, BINDABLE *mem_bin
// Otherwise, add reference from objectInfo to memoryInfo
// Add reference off of objInfo
// TODO: We may need to refactor or pass in multiple valid usage statements to handle multiple valid usage conditions.
-bool CoreChecks::ValidateSetMemBinding(VkDeviceMemory mem, const VulkanTypedHandle &typed_handle, const char *apiName) const {
+bool CoreChecks::ValidateSetMemBinding(layer_data *dev_data, VkDeviceMemory mem, uint64_t handle, VulkanObjectType type,
+ const char *apiName) {
bool skip = false;
// It's an error to bind an object to NULL memory
if (mem != VK_NULL_HANDLE) {
- const BINDABLE *mem_binding = ValidationStateTracker::GetObjectMemBinding(typed_handle);
+ BINDABLE *mem_binding = GetObjectMemBinding(handle, type);
assert(mem_binding);
if (mem_binding->sparse) {
const char *error_code = "VUID-vkBindImageMemory-image-01045";
const char *handle_type = "IMAGE";
- if (typed_handle.type == kVulkanObjectTypeBuffer) {
+ if (type == kVulkanObjectTypeBuffer) {
error_code = "VUID-vkBindBufferMemory-buffer-01030";
handle_type = "BUFFER";
} else {
- assert(typed_handle.type == kVulkanObjectTypeImage);
+ assert(type == kVulkanObjectTypeImage);
}
- skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_MEMORY_EXT,
+ skip |= log_msg(dev_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_MEMORY_EXT,
HandleToUint64(mem), error_code,
- "In %s, attempting to bind %s to %s which was created with sparse memory flags "
+ "In %s, attempting to bind memory (%s) to object (%s) which was created with sparse memory flags "
"(VK_%s_CREATE_SPARSE_*_BIT).",
- apiName, report_data->FormatHandle(mem).c_str(), report_data->FormatHandle(typed_handle).c_str(),
- handle_type);
+ apiName, dev_data->report_data->FormatHandle(mem).c_str(),
+ dev_data->report_data->FormatHandle(handle).c_str(), handle_type);
}
- const DEVICE_MEMORY_STATE *mem_info = ValidationStateTracker::GetDevMemState(mem);
+ DEVICE_MEM_INFO *mem_info = GetMemObjInfo(mem);
if (mem_info) {
- const DEVICE_MEMORY_STATE *prev_binding = ValidationStateTracker::GetDevMemState(mem_binding->binding.mem);
+ DEVICE_MEM_INFO *prev_binding = GetMemObjInfo(mem_binding->binding.mem);
if (prev_binding) {
const char *error_code = "VUID-vkBindImageMemory-image-01044";
- if (typed_handle.type == kVulkanObjectTypeBuffer) {
+ if (type == kVulkanObjectTypeBuffer) {
error_code = "VUID-vkBindBufferMemory-buffer-01029";
} else {
- assert(typed_handle.type == kVulkanObjectTypeImage);
+ assert(type == kVulkanObjectTypeImage);
}
- skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_MEMORY_EXT,
- HandleToUint64(mem), error_code,
- "In %s, attempting to bind %s to %s which has already been bound to %s.", apiName,
- report_data->FormatHandle(mem).c_str(), report_data->FormatHandle(typed_handle).c_str(),
- report_data->FormatHandle(prev_binding->mem).c_str());
+ skip |= log_msg(
+ dev_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_MEMORY_EXT,
+ HandleToUint64(mem), error_code,
+ "In %s, attempting to bind memory (%s) to object (%s) which has already been bound to mem object %s.", apiName,
+ dev_data->report_data->FormatHandle(mem).c_str(), dev_data->report_data->FormatHandle(handle).c_str(),
+ dev_data->report_data->FormatHandle(prev_binding->mem).c_str());
} else if (mem_binding->binding.mem == MEMORY_UNBOUND) {
- skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_MEMORY_EXT,
+ skip |= log_msg(dev_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_MEMORY_EXT,
HandleToUint64(mem), kVUID_Core_MemTrack_RebindObject,
- "In %s, attempting to bind %s to %s which was previous bound to memory that has "
+ "In %s, attempting to bind memory (%s) to object (%s) which was previous bound to memory that has "
"since been freed. Memory bindings are immutable in "
"Vulkan so this attempt to bind to new memory is not allowed.",
- apiName, report_data->FormatHandle(mem).c_str(), report_data->FormatHandle(typed_handle).c_str());
+ apiName, dev_data->report_data->FormatHandle(mem).c_str(),
+ dev_data->report_data->FormatHandle(handle).c_str());
}
}
}
@@ -623,19 +556,19 @@ bool CoreChecks::ValidateSetMemBinding(VkDeviceMemory mem, const VulkanTypedHand
// Add reference from objectInfo to memoryInfo
// Add reference off of object's binding info
// Return VK_TRUE if addition is successful, VK_FALSE otherwise
-bool CoreChecks::SetSparseMemBinding(MEM_BINDING binding, const VulkanTypedHandle &typed_handle) {
+bool CoreChecks::SetSparseMemBinding(layer_data *dev_data, MEM_BINDING binding, uint64_t handle, VulkanObjectType type) {
bool skip = VK_FALSE;
// Handle NULL case separately, just clear previous binding & decrement reference
if (binding.mem == VK_NULL_HANDLE) {
// TODO : This should cause the range of the resource to be unbound according to spec
} else {
- BINDABLE *mem_binding = GetObjectMemBinding(typed_handle);
+ BINDABLE *mem_binding = GetObjectMemBinding(handle, type);
assert(mem_binding);
if (mem_binding) { // Invalid handles are reported by object tracker, but Get returns NULL for them, so avoid SEGV here
assert(mem_binding->sparse);
- DEVICE_MEMORY_STATE *mem_info = GetDevMemState(binding.mem);
+ DEVICE_MEM_INFO *mem_info = GetMemObjInfo(binding.mem);
if (mem_info) {
- mem_info->obj_bindings.insert(typed_handle);
+ mem_info->obj_bindings.insert({handle, type});
// Need to set mem binding for this object
mem_binding->sparse_bindings.insert(binding);
mem_binding->UpdateBoundMemorySet();
@@ -645,28 +578,29 @@ bool CoreChecks::SetSparseMemBinding(MEM_BINDING binding, const VulkanTypedHandl
return skip;
}
-bool CoreChecks::ValidateDeviceQueueFamily(uint32_t queue_family, const char *cmd_name, const char *parameter_name,
- const char *error_code, bool optional = false) const {
+bool CoreChecks::ValidateDeviceQueueFamily(layer_data *device_data, uint32_t queue_family, const char *cmd_name,
+ const char *parameter_name, const char *error_code, bool optional = false) {
bool skip = false;
if (!optional && queue_family == VK_QUEUE_FAMILY_IGNORED) {
- skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, HandleToUint64(device),
- error_code,
+ skip |= log_msg(device_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT,
+ HandleToUint64(device_data->device), error_code,
"%s: %s is VK_QUEUE_FAMILY_IGNORED, but it is required to provide a valid queue family index value.",
cmd_name, parameter_name);
- } else if (queue_family_index_map.find(queue_family) == queue_family_index_map.end()) {
- skip |= log_msg(
- report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, HandleToUint64(device), error_code,
- "%s: %s (= %" PRIu32
- ") is not one of the queue families given via VkDeviceQueueCreateInfo structures when the device was created.",
- cmd_name, parameter_name, queue_family);
+ } else if (device_data->queue_family_index_map.find(queue_family) == device_data->queue_family_index_map.end()) {
+ skip |=
+ log_msg(device_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT,
+ HandleToUint64(device_data->device), error_code,
+ "%s: %s (= %" PRIu32
+ ") is not one of the queue families given via VkDeviceQueueCreateInfo structures when the device was created.",
+ cmd_name, parameter_name, queue_family);
}
return skip;
}
-bool CoreChecks::ValidateQueueFamilies(uint32_t queue_family_count, const uint32_t *queue_families, const char *cmd_name,
- const char *array_parameter_name, const char *unique_error_code,
- const char *valid_error_code, bool optional = false) const {
+bool CoreChecks::ValidateQueueFamilies(layer_data *device_data, uint32_t queue_family_count, const uint32_t *queue_families,
+ const char *cmd_name, const char *array_parameter_name, const char *unique_error_code,
+ const char *valid_error_code, bool optional = false) {
bool skip = false;
if (queue_families) {
std::unordered_set<uint32_t> set;
@@ -674,12 +608,14 @@ bool CoreChecks::ValidateQueueFamilies(uint32_t queue_family_count, const uint32
std::string parameter_name = std::string(array_parameter_name) + "[" + std::to_string(i) + "]";
if (set.count(queue_families[i])) {
- skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT,
- HandleToUint64(device), unique_error_code, "%s: %s (=%" PRIu32 ") is not unique within %s array.",
- cmd_name, parameter_name.c_str(), queue_families[i], array_parameter_name);
+ skip |= log_msg(device_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT,
+ HandleToUint64(device_data->device), unique_error_code,
+ "%s: %s (=%" PRIu32 ") is not unique within %s array.", cmd_name, parameter_name.c_str(),
+ queue_families[i], array_parameter_name);
} else {
set.insert(queue_families[i]);
- skip |= ValidateDeviceQueueFamily(queue_families[i], cmd_name, parameter_name.c_str(), valid_error_code, optional);
+ skip |= ValidateDeviceQueueFamily(device_data, queue_families[i], cmd_name, parameter_name.c_str(),
+ valid_error_code, optional);
}
}
}
@@ -687,24 +623,26 @@ bool CoreChecks::ValidateQueueFamilies(uint32_t queue_family_count, const uint32
}
// Check object status for selected flag state
-bool CoreChecks::ValidateStatus(const CMD_BUFFER_STATE *pNode, CBStatusFlags status_mask, VkFlags msg_flags, const char *fail_msg,
- const char *msg_code) const {
+bool CoreChecks::ValidateStatus(layer_data *dev_data, GLOBAL_CB_NODE *pNode, CBStatusFlags status_mask, VkFlags msg_flags,
+ const char *fail_msg, const char *msg_code) {
if (!(pNode->status & status_mask)) {
- return log_msg(report_data, msg_flags, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, HandleToUint64(pNode->commandBuffer),
- msg_code, "%s: %s..", report_data->FormatHandle(pNode->commandBuffer).c_str(), fail_msg);
+ return log_msg(dev_data->report_data, msg_flags, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
+ HandleToUint64(pNode->commandBuffer), msg_code, "command buffer object %s: %s..",
+ dev_data->report_data->FormatHandle(pNode->commandBuffer).c_str(), fail_msg);
}
return false;
}
-const RENDER_PASS_STATE *ValidationStateTracker::GetRenderPassState(VkRenderPass renderpass) const {
- auto it = renderPassMap.find(renderpass);
- if (it == renderPassMap.end()) {
+// Retrieve pipeline node ptr for given pipeline object
+PIPELINE_STATE *CoreChecks::GetPipelineState(VkPipeline pipeline) {
+ auto it = pipelineMap.find(pipeline);
+ if (it == pipelineMap.end()) {
return nullptr;
}
return it->second.get();
}
-RENDER_PASS_STATE *ValidationStateTracker::GetRenderPassState(VkRenderPass renderpass) {
+RENDER_PASS_STATE *CoreChecks::GetRenderPassState(VkRenderPass renderpass) {
auto it = renderPassMap.find(renderpass);
if (it == renderPassMap.end()) {
return nullptr;
@@ -712,7 +650,7 @@ RENDER_PASS_STATE *ValidationStateTracker::GetRenderPassState(VkRenderPass rende
return it->second.get();
}
-std::shared_ptr<RENDER_PASS_STATE> ValidationStateTracker::GetRenderPassStateSharedPtr(VkRenderPass renderpass) {
+std::shared_ptr<RENDER_PASS_STATE> CoreChecks::GetRenderPassStateSharedPtr(VkRenderPass renderpass) {
auto it = renderPassMap.find(renderpass);
if (it == renderPassMap.end()) {
return nullptr;
@@ -720,15 +658,48 @@ std::shared_ptr<RENDER_PASS_STATE> ValidationStateTracker::GetRenderPassStateSha
return it->second;
}
-std::shared_ptr<cvdescriptorset::DescriptorSetLayout const> const GetDescriptorSetLayout(const ValidationStateTracker *state_data,
+FRAMEBUFFER_STATE *CoreChecks::GetFramebufferState(VkFramebuffer framebuffer) {
+ auto it = frameBufferMap.find(framebuffer);
+ if (it == frameBufferMap.end()) {
+ return nullptr;
+ }
+ return it->second.get();
+}
+
+std::shared_ptr<cvdescriptorset::DescriptorSetLayout const> const GetDescriptorSetLayout(layer_data const *dev_data,
VkDescriptorSetLayout dsLayout) {
- auto it = state_data->descriptorSetLayoutMap.find(dsLayout);
- if (it == state_data->descriptorSetLayoutMap.end()) {
+ auto it = dev_data->descriptorSetLayoutMap.find(dsLayout);
+ if (it == dev_data->descriptorSetLayoutMap.end()) {
return nullptr;
}
return it->second;
}
+PIPELINE_LAYOUT_NODE const *CoreChecks::GetPipelineLayout(layer_data const *dev_data, VkPipelineLayout pipeLayout) {
+ auto it = dev_data->pipelineLayoutMap.find(pipeLayout);
+ if (it == dev_data->pipelineLayoutMap.end()) {
+ return nullptr;
+ }
+ return &it->second;
+}
+
+shader_module const *CoreChecks::GetShaderModuleState(VkShaderModule module) {
+ auto it = shaderModuleMap.find(module);
+ if (it == shaderModuleMap.end()) {
+ return nullptr;
+ }
+ return it->second.get();
+}
+
+const TEMPLATE_STATE *CoreChecks::GetDescriptorTemplateState(const layer_data *dev_data,
+ VkDescriptorUpdateTemplateKHR descriptor_update_template) {
+ const auto it = dev_data->desc_template_map.find(descriptor_update_template);
+ if (it == dev_data->desc_template_map.cend()) {
+ return nullptr;
+ }
+ return it->second.get();
+}
+
// Return true if for a given PSO, the given state enum is dynamic, else return false
static bool IsDynamic(const PIPELINE_STATE *pPipeline, const VkDynamicState state) {
if (pPipeline && pPipeline->graphicsPipelineCI.pDynamicState) {
@@ -740,69 +711,61 @@ static bool IsDynamic(const PIPELINE_STATE *pPipeline, const VkDynamicState stat
}
// Validate state stored as flags at time of draw call
-bool CoreChecks::ValidateDrawStateFlags(const CMD_BUFFER_STATE *pCB, const PIPELINE_STATE *pPipe, bool indexed,
- const char *msg_code) const {
+bool CoreChecks::ValidateDrawStateFlags(layer_data *dev_data, GLOBAL_CB_NODE *pCB, const PIPELINE_STATE *pPipe, bool indexed,
+ const char *msg_code) {
bool result = false;
if (pPipe->topology_at_rasterizer == VK_PRIMITIVE_TOPOLOGY_LINE_LIST ||
pPipe->topology_at_rasterizer == VK_PRIMITIVE_TOPOLOGY_LINE_STRIP) {
- result |= ValidateStatus(pCB, CBSTATUS_LINE_WIDTH_SET, VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ result |= ValidateStatus(dev_data, pCB, CBSTATUS_LINE_WIDTH_SET, VK_DEBUG_REPORT_ERROR_BIT_EXT,
"Dynamic line width state not set for this command buffer", msg_code);
}
if (pPipe->graphicsPipelineCI.pRasterizationState &&
(pPipe->graphicsPipelineCI.pRasterizationState->depthBiasEnable == VK_TRUE)) {
- result |= ValidateStatus(pCB, CBSTATUS_DEPTH_BIAS_SET, VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ result |= ValidateStatus(dev_data, pCB, CBSTATUS_DEPTH_BIAS_SET, VK_DEBUG_REPORT_ERROR_BIT_EXT,
"Dynamic depth bias state not set for this command buffer", msg_code);
}
if (pPipe->blendConstantsEnabled) {
- result |= ValidateStatus(pCB, CBSTATUS_BLEND_CONSTANTS_SET, VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ result |= ValidateStatus(dev_data, pCB, CBSTATUS_BLEND_CONSTANTS_SET, VK_DEBUG_REPORT_ERROR_BIT_EXT,
"Dynamic blend constants state not set for this command buffer", msg_code);
}
if (pPipe->graphicsPipelineCI.pDepthStencilState &&
(pPipe->graphicsPipelineCI.pDepthStencilState->depthBoundsTestEnable == VK_TRUE)) {
- result |= ValidateStatus(pCB, CBSTATUS_DEPTH_BOUNDS_SET, VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ result |= ValidateStatus(dev_data, pCB, CBSTATUS_DEPTH_BOUNDS_SET, VK_DEBUG_REPORT_ERROR_BIT_EXT,
"Dynamic depth bounds state not set for this command buffer", msg_code);
}
if (pPipe->graphicsPipelineCI.pDepthStencilState &&
(pPipe->graphicsPipelineCI.pDepthStencilState->stencilTestEnable == VK_TRUE)) {
- result |= ValidateStatus(pCB, CBSTATUS_STENCIL_READ_MASK_SET, VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ result |= ValidateStatus(dev_data, pCB, CBSTATUS_STENCIL_READ_MASK_SET, VK_DEBUG_REPORT_ERROR_BIT_EXT,
"Dynamic stencil read mask state not set for this command buffer", msg_code);
- result |= ValidateStatus(pCB, CBSTATUS_STENCIL_WRITE_MASK_SET, VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ result |= ValidateStatus(dev_data, pCB, CBSTATUS_STENCIL_WRITE_MASK_SET, VK_DEBUG_REPORT_ERROR_BIT_EXT,
"Dynamic stencil write mask state not set for this command buffer", msg_code);
- result |= ValidateStatus(pCB, CBSTATUS_STENCIL_REFERENCE_SET, VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ result |= ValidateStatus(dev_data, pCB, CBSTATUS_STENCIL_REFERENCE_SET, VK_DEBUG_REPORT_ERROR_BIT_EXT,
"Dynamic stencil reference state not set for this command buffer", msg_code);
}
if (indexed) {
- result |= ValidateStatus(pCB, CBSTATUS_INDEX_BUFFER_BOUND, VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ result |= ValidateStatus(dev_data, pCB, CBSTATUS_INDEX_BUFFER_BOUND, VK_DEBUG_REPORT_ERROR_BIT_EXT,
"Index buffer object not bound to this command buffer when Indexed Draw attempted", msg_code);
}
- if (pPipe->topology_at_rasterizer == VK_PRIMITIVE_TOPOLOGY_LINE_LIST ||
- pPipe->topology_at_rasterizer == VK_PRIMITIVE_TOPOLOGY_LINE_STRIP) {
- const auto *line_state =
- lvl_find_in_chain<VkPipelineRasterizationLineStateCreateInfoEXT>(pPipe->graphicsPipelineCI.pRasterizationState->pNext);
- if (line_state && line_state->stippledLineEnable) {
- result |= ValidateStatus(pCB, CBSTATUS_LINE_STIPPLE_SET, VK_DEBUG_REPORT_ERROR_BIT_EXT,
- "Dynamic line stipple state not set for this command buffer", msg_code);
- }
- }
return result;
}
-bool CoreChecks::LogInvalidAttachmentMessage(const char *type1_string, const RENDER_PASS_STATE *rp1_state, const char *type2_string,
+bool CoreChecks::LogInvalidAttachmentMessage(layer_data const *dev_data, const char *type1_string,
+ const RENDER_PASS_STATE *rp1_state, const char *type2_string,
const RENDER_PASS_STATE *rp2_state, uint32_t primary_attach, uint32_t secondary_attach,
- const char *msg, const char *caller, const char *error_code) const {
- return log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_RENDER_PASS_EXT,
+ const char *msg, const char *caller, const char *error_code) {
+ return log_msg(dev_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_RENDER_PASS_EXT,
HandleToUint64(rp1_state->renderPass), error_code,
- "%s: RenderPasses incompatible between %s w/ %s and %s w/ %s Attachment %u is not "
+ "%s: RenderPasses incompatible between %s w/ renderPass %s and %s w/ renderPass %s Attachment %u is not "
"compatible with %u: %s.",
- caller, type1_string, report_data->FormatHandle(rp1_state->renderPass).c_str(), type2_string,
- report_data->FormatHandle(rp2_state->renderPass).c_str(), primary_attach, secondary_attach, msg);
+ caller, type1_string, dev_data->report_data->FormatHandle(rp1_state->renderPass).c_str(), type2_string,
+ dev_data->report_data->FormatHandle(rp2_state->renderPass).c_str(), primary_attach, secondary_attach, msg);
}
-bool CoreChecks::ValidateAttachmentCompatibility(const char *type1_string, const RENDER_PASS_STATE *rp1_state,
- const char *type2_string, const RENDER_PASS_STATE *rp2_state,
- uint32_t primary_attach, uint32_t secondary_attach, const char *caller,
- const char *error_code) const {
+bool CoreChecks::ValidateAttachmentCompatibility(layer_data const *dev_data, const char *type1_string,
+ const RENDER_PASS_STATE *rp1_state, const char *type2_string,
+ const RENDER_PASS_STATE *rp2_state, uint32_t primary_attach,
+ uint32_t secondary_attach, const char *caller, const char *error_code) {
bool skip = false;
const auto &primaryPassCI = rp1_state->createInfo;
const auto &secondaryPassCI = rp2_state->createInfo;
@@ -816,34 +779,35 @@ bool CoreChecks::ValidateAttachmentCompatibility(const char *type1_string, const
return skip;
}
if (primary_attach == VK_ATTACHMENT_UNUSED) {
- skip |= LogInvalidAttachmentMessage(type1_string, rp1_state, type2_string, rp2_state, primary_attach, secondary_attach,
- "The first is unused while the second is not.", caller, error_code);
+ skip |= LogInvalidAttachmentMessage(dev_data, type1_string, rp1_state, type2_string, rp2_state, primary_attach,
+ secondary_attach, "The first is unused while the second is not.", caller, error_code);
return skip;
}
if (secondary_attach == VK_ATTACHMENT_UNUSED) {
- skip |= LogInvalidAttachmentMessage(type1_string, rp1_state, type2_string, rp2_state, primary_attach, secondary_attach,
- "The second is unused while the first is not.", caller, error_code);
+ skip |= LogInvalidAttachmentMessage(dev_data, type1_string, rp1_state, type2_string, rp2_state, primary_attach,
+ secondary_attach, "The second is unused while the first is not.", caller, error_code);
return skip;
}
if (primaryPassCI.pAttachments[primary_attach].format != secondaryPassCI.pAttachments[secondary_attach].format) {
- skip |= LogInvalidAttachmentMessage(type1_string, rp1_state, type2_string, rp2_state, primary_attach, secondary_attach,
- "They have different formats.", caller, error_code);
+ skip |= LogInvalidAttachmentMessage(dev_data, type1_string, rp1_state, type2_string, rp2_state, primary_attach,
+ secondary_attach, "They have different formats.", caller, error_code);
}
if (primaryPassCI.pAttachments[primary_attach].samples != secondaryPassCI.pAttachments[secondary_attach].samples) {
- skip |= LogInvalidAttachmentMessage(type1_string, rp1_state, type2_string, rp2_state, primary_attach, secondary_attach,
- "They have different samples.", caller, error_code);
+ skip |= LogInvalidAttachmentMessage(dev_data, type1_string, rp1_state, type2_string, rp2_state, primary_attach,
+ secondary_attach, "They have different samples.", caller, error_code);
}
if (primaryPassCI.pAttachments[primary_attach].flags != secondaryPassCI.pAttachments[secondary_attach].flags) {
- skip |= LogInvalidAttachmentMessage(type1_string, rp1_state, type2_string, rp2_state, primary_attach, secondary_attach,
- "They have different flags.", caller, error_code);
+ skip |= LogInvalidAttachmentMessage(dev_data, type1_string, rp1_state, type2_string, rp2_state, primary_attach,
+ secondary_attach, "They have different flags.", caller, error_code);
}
return skip;
}
-bool CoreChecks::ValidateSubpassCompatibility(const char *type1_string, const RENDER_PASS_STATE *rp1_state,
- const char *type2_string, const RENDER_PASS_STATE *rp2_state, const int subpass,
- const char *caller, const char *error_code) const {
+bool CoreChecks::ValidateSubpassCompatibility(layer_data const *dev_data, const char *type1_string,
+ const RENDER_PASS_STATE *rp1_state, const char *type2_string,
+ const RENDER_PASS_STATE *rp2_state, const int subpass, const char *caller,
+ const char *error_code) {
bool skip = false;
const auto &primary_desc = rp1_state->createInfo.pSubpasses[subpass];
const auto &secondary_desc = rp2_state->createInfo.pSubpasses[subpass];
@@ -856,7 +820,7 @@ bool CoreChecks::ValidateSubpassCompatibility(const char *type1_string, const RE
if (i < secondary_desc.inputAttachmentCount) {
secondary_input_attach = secondary_desc.pInputAttachments[i].attachment;
}
- skip |= ValidateAttachmentCompatibility(type1_string, rp1_state, type2_string, rp2_state, primary_input_attach,
+ skip |= ValidateAttachmentCompatibility(dev_data, type1_string, rp1_state, type2_string, rp2_state, primary_input_attach,
secondary_input_attach, caller, error_code);
}
uint32_t maxColorAttachmentCount = std::max(primary_desc.colorAttachmentCount, secondary_desc.colorAttachmentCount);
@@ -868,7 +832,7 @@ bool CoreChecks::ValidateSubpassCompatibility(const char *type1_string, const RE
if (i < secondary_desc.colorAttachmentCount) {
secondary_color_attach = secondary_desc.pColorAttachments[i].attachment;
}
- skip |= ValidateAttachmentCompatibility(type1_string, rp1_state, type2_string, rp2_state, primary_color_attach,
+ skip |= ValidateAttachmentCompatibility(dev_data, type1_string, rp1_state, type2_string, rp2_state, primary_color_attach,
secondary_color_attach, caller, error_code);
if (rp1_state->createInfo.subpassCount > 1) {
uint32_t primary_resolve_attach = VK_ATTACHMENT_UNUSED, secondary_resolve_attach = VK_ATTACHMENT_UNUSED;
@@ -878,8 +842,8 @@ bool CoreChecks::ValidateSubpassCompatibility(const char *type1_string, const RE
if (i < secondary_desc.colorAttachmentCount && secondary_desc.pResolveAttachments) {
secondary_resolve_attach = secondary_desc.pResolveAttachments[i].attachment;
}
- skip |= ValidateAttachmentCompatibility(type1_string, rp1_state, type2_string, rp2_state, primary_resolve_attach,
- secondary_resolve_attach, caller, error_code);
+ skip |= ValidateAttachmentCompatibility(dev_data, type1_string, rp1_state, type2_string, rp2_state,
+ primary_resolve_attach, secondary_resolve_attach, caller, error_code);
}
}
uint32_t primary_depthstencil_attach = VK_ATTACHMENT_UNUSED, secondary_depthstencil_attach = VK_ATTACHMENT_UNUSED;
@@ -889,7 +853,7 @@ bool CoreChecks::ValidateSubpassCompatibility(const char *type1_string, const RE
if (secondary_desc.pDepthStencilAttachment) {
secondary_depthstencil_attach = secondary_desc.pDepthStencilAttachment[0].attachment;
}
- skip |= ValidateAttachmentCompatibility(type1_string, rp1_state, type2_string, rp2_state, primary_depthstencil_attach,
+ skip |= ValidateAttachmentCompatibility(dev_data, type1_string, rp1_state, type2_string, rp2_state, primary_depthstencil_attach,
secondary_depthstencil_attach, caller, error_code);
return skip;
}
@@ -897,27 +861,36 @@ bool CoreChecks::ValidateSubpassCompatibility(const char *type1_string, const RE
// Verify that given renderPass CreateInfo for primary and secondary command buffers are compatible.
// This function deals directly with the CreateInfo, there are overloaded versions below that can take the renderPass handle and
// will then feed into this function
-bool CoreChecks::ValidateRenderPassCompatibility(const char *type1_string, const RENDER_PASS_STATE *rp1_state,
- const char *type2_string, const RENDER_PASS_STATE *rp2_state, const char *caller,
- const char *error_code) const {
+bool CoreChecks::ValidateRenderPassCompatibility(layer_data const *dev_data, const char *type1_string,
+ const RENDER_PASS_STATE *rp1_state, const char *type2_string,
+ const RENDER_PASS_STATE *rp2_state, const char *caller, const char *error_code) {
bool skip = false;
if (rp1_state->createInfo.subpassCount != rp2_state->createInfo.subpassCount) {
- skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_RENDER_PASS_EXT,
+ skip |= log_msg(dev_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_RENDER_PASS_EXT,
HandleToUint64(rp1_state->renderPass), error_code,
- "%s: RenderPasses incompatible between %s w/ %s with a subpassCount of %u and %s w/ "
+ "%s: RenderPasses incompatible between %s w/ renderPass %s with a subpassCount of %u and %s w/ renderPass "
"%s with a subpassCount of %u.",
- caller, type1_string, report_data->FormatHandle(rp1_state->renderPass).c_str(),
- rp1_state->createInfo.subpassCount, type2_string, report_data->FormatHandle(rp2_state->renderPass).c_str(),
- rp2_state->createInfo.subpassCount);
+ caller, type1_string, dev_data->report_data->FormatHandle(rp1_state->renderPass).c_str(),
+ rp1_state->createInfo.subpassCount, type2_string,
+ dev_data->report_data->FormatHandle(rp2_state->renderPass).c_str(), rp2_state->createInfo.subpassCount);
} else {
for (uint32_t i = 0; i < rp1_state->createInfo.subpassCount; ++i) {
- skip |= ValidateSubpassCompatibility(type1_string, rp1_state, type2_string, rp2_state, i, caller, error_code);
+ skip |= ValidateSubpassCompatibility(dev_data, type1_string, rp1_state, type2_string, rp2_state, i, caller, error_code);
}
}
return skip;
}
+// Return Set node ptr for specified set or else NULL
+cvdescriptorset::DescriptorSet *CoreChecks::GetSetNode(VkDescriptorSet set) {
+ auto set_it = setMap.find(set);
+ if (set_it == setMap.end()) {
+ return NULL;
+ }
+ return set_it->second;
+}
+
// For given pipeline, return number of MSAA samples, or one if MSAA disabled
static VkSampleCountFlagBits GetNumSamples(PIPELINE_STATE const *pipe) {
if (pipe->graphicsPipelineCI.pMultisampleState != NULL &&
@@ -940,24 +913,23 @@ static void ListBits(std::ostream &s, uint32_t bits) {
}
// Validate draw-time state related to the PSO
-bool CoreChecks::ValidatePipelineDrawtimeState(const LAST_BOUND_STATE &state, const CMD_BUFFER_STATE *pCB, CMD_TYPE cmd_type,
- const PIPELINE_STATE *pPipeline, const char *caller) const {
+bool CoreChecks::ValidatePipelineDrawtimeState(layer_data const *dev_data, LAST_BOUND_STATE const &state, const GLOBAL_CB_NODE *pCB,
+ CMD_TYPE cmd_type, PIPELINE_STATE const *pPipeline, const char *caller) {
bool skip = false;
- const auto &current_vtx_bfr_binding_info = pCB->current_vertex_buffer_binding_info.vertex_buffer_bindings;
// Verify vertex binding
if (pPipeline->vertex_binding_descriptions_.size() > 0) {
for (size_t i = 0; i < pPipeline->vertex_binding_descriptions_.size(); i++) {
const auto vertex_binding = pPipeline->vertex_binding_descriptions_[i].binding;
- if ((current_vtx_bfr_binding_info.size() < (vertex_binding + 1)) ||
- (current_vtx_bfr_binding_info[vertex_binding].buffer == VK_NULL_HANDLE)) {
- skip |=
- log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
- HandleToUint64(pCB->commandBuffer), kVUID_Core_DrawState_VtxIndexOutOfBounds,
- "%s expects that this Command Buffer's vertex binding Index %u should be set via "
- "vkCmdBindVertexBuffers. This is because VkVertexInputBindingDescription struct at "
- "index " PRINTF_SIZE_T_SPECIFIER " of pVertexBindingDescriptions has a binding value of %u.",
- report_data->FormatHandle(state.pipeline_state->pipeline).c_str(), vertex_binding, i, vertex_binding);
+ if ((pCB->current_draw_data.vertex_buffer_bindings.size() < (vertex_binding + 1)) ||
+ (pCB->current_draw_data.vertex_buffer_bindings[vertex_binding].buffer == VK_NULL_HANDLE)) {
+ skip |= log_msg(
+ dev_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
+ HandleToUint64(pCB->commandBuffer), kVUID_Core_DrawState_VtxIndexOutOfBounds,
+ "The Pipeline State Object (%s) expects that this Command Buffer's vertex binding Index %u should be set via "
+ "vkCmdBindVertexBuffers. This is because VkVertexInputBindingDescription struct at "
+ "index " PRINTF_SIZE_T_SPECIFIER " of pVertexBindingDescriptions has a binding value of %u.",
+ dev_data->report_data->FormatHandle(state.pipeline_state->pipeline).c_str(), vertex_binding, i, vertex_binding);
}
}
@@ -970,41 +942,44 @@ bool CoreChecks::ValidatePipelineDrawtimeState(const LAST_BOUND_STATE &state, co
const auto &vertex_binding_map_it = pPipeline->vertex_binding_to_index_map_.find(vertex_binding);
if ((vertex_binding_map_it != pPipeline->vertex_binding_to_index_map_.cend()) &&
- (vertex_binding < current_vtx_bfr_binding_info.size()) &&
- (current_vtx_bfr_binding_info[vertex_binding].buffer != VK_NULL_HANDLE)) {
+ (vertex_binding < pCB->current_draw_data.vertex_buffer_bindings.size()) &&
+ (pCB->current_draw_data.vertex_buffer_bindings[vertex_binding].buffer != VK_NULL_HANDLE)) {
const auto vertex_buffer_stride = pPipeline->vertex_binding_descriptions_[vertex_binding_map_it->second].stride;
- const auto vertex_buffer_offset = current_vtx_bfr_binding_info[vertex_binding].offset;
- const auto buffer_state = GetBufferState(current_vtx_bfr_binding_info[vertex_binding].buffer);
+ const auto vertex_buffer_offset = pCB->current_draw_data.vertex_buffer_bindings[vertex_binding].offset;
+ const auto buffer_state = GetBufferState(pCB->current_draw_data.vertex_buffer_bindings[vertex_binding].buffer);
// Use only memory binding offset as base memory should be properly aligned by the driver
const auto buffer_binding_address = buffer_state->binding.offset + vertex_buffer_offset;
// Use 1 as vertex/instance index to use buffer stride as well
const auto attrib_address = buffer_binding_address + vertex_buffer_stride + attribute_offset;
- VkDeviceSize vtx_attrib_req_alignment = FormatElementSize(attribute_format);
+ uint32_t vtx_attrib_req_alignment = FormatElementSize(attribute_format);
if (FormatElementIsTexel(attribute_format)) {
- vtx_attrib_req_alignment = SafeDivision(vtx_attrib_req_alignment, FormatChannelCount(attribute_format));
+ vtx_attrib_req_alignment /= FormatChannelCount(attribute_format);
}
if (SafeModulo(attrib_address, vtx_attrib_req_alignment) != 0) {
- skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_BUFFER_EXT,
- HandleToUint64(current_vtx_bfr_binding_info[vertex_binding].buffer),
- kVUID_Core_DrawState_InvalidVtxAttributeAlignment,
- "Invalid attribAddress alignment for vertex attribute " PRINTF_SIZE_T_SPECIFIER
- " from %s and vertex %s.",
- i, report_data->FormatHandle(state.pipeline_state->pipeline).c_str(),
- report_data->FormatHandle(current_vtx_bfr_binding_info[vertex_binding].buffer).c_str());
+ skip |= log_msg(
+ dev_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_BUFFER_EXT,
+ HandleToUint64(pCB->current_draw_data.vertex_buffer_bindings[vertex_binding].buffer),
+ kVUID_Core_DrawState_InvalidVtxAttributeAlignment,
+ "Invalid attribAddress alignment for vertex attribute " PRINTF_SIZE_T_SPECIFIER
+ " from pipeline (%s) and vertex buffer (%s).",
+ i, dev_data->report_data->FormatHandle(state.pipeline_state->pipeline).c_str(),
+ dev_data->report_data->FormatHandle(pCB->current_draw_data.vertex_buffer_bindings[vertex_binding].buffer)
+ .c_str());
}
}
}
} else {
- if ((!current_vtx_bfr_binding_info.empty()) && (!pCB->vertex_buffer_used)) {
- skip |=
- log_msg(report_data, VK_DEBUG_REPORT_PERFORMANCE_WARNING_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
- HandleToUint64(pCB->commandBuffer), kVUID_Core_DrawState_VtxIndexOutOfBounds,
- "Vertex buffers are bound to %s but no vertex buffers are attached to %s.",
- report_data->FormatHandle(pCB->commandBuffer).c_str(),
- report_data->FormatHandle(state.pipeline_state->pipeline).c_str());
+ if ((!pCB->current_draw_data.vertex_buffer_bindings.empty()) && (!pCB->vertex_buffer_used)) {
+ skip |= log_msg(dev_data->report_data, VK_DEBUG_REPORT_PERFORMANCE_WARNING_BIT_EXT,
+ VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, HandleToUint64(pCB->commandBuffer),
+ kVUID_Core_DrawState_VtxIndexOutOfBounds,
+ "Vertex buffers are bound to command buffer (%s) but no vertex buffers are attached to this Pipeline "
+ "State Object (%s).",
+ dev_data->report_data->FormatHandle(pCB->commandBuffer).c_str(),
+ dev_data->report_data->FormatHandle(state.pipeline_state->pipeline).c_str());
}
}
@@ -1024,7 +999,7 @@ bool CoreChecks::ValidatePipelineDrawtimeState(const LAST_BOUND_STATE &state, co
ss << "Dynamic viewport(s) ";
ListBits(ss, missingViewportMask);
ss << " are used by pipeline state object, but were not provided via calls to vkCmdSetViewport().";
- skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
+ skip |= log_msg(dev_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
kVUID_Core_DrawState_ViewportScissorMismatch, "%s", ss.str().c_str());
}
}
@@ -1037,7 +1012,7 @@ bool CoreChecks::ValidatePipelineDrawtimeState(const LAST_BOUND_STATE &state, co
ss << "Dynamic scissor(s) ";
ListBits(ss, missingScissorMask);
ss << " are used by pipeline state object, but were not provided via calls to vkCmdSetScissor().";
- skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
+ skip |= log_msg(dev_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
kVUID_Core_DrawState_ViewportScissorMismatch, "%s", ss.str().c_str());
}
}
@@ -1066,20 +1041,22 @@ bool CoreChecks::ValidatePipelineDrawtimeState(const LAST_BOUND_STATE &state, co
subpass_num_samples |= (unsigned)render_pass_info->pAttachments[attachment].samples;
}
- if (!(device_extensions.vk_amd_mixed_attachment_samples || device_extensions.vk_nv_framebuffer_mixed_samples) &&
+ if (!(dev_data->device_extensions.vk_amd_mixed_attachment_samples ||
+ dev_data->device_extensions.vk_nv_framebuffer_mixed_samples) &&
((subpass_num_samples & static_cast<unsigned>(pso_num_samples)) != subpass_num_samples)) {
- skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_EXT,
- HandleToUint64(pPipeline->pipeline), kVUID_Core_DrawState_NumSamplesMismatch,
- "Num samples mismatch! At draw-time in %s with %u samples while current %s w/ "
- "%u samples!",
- report_data->FormatHandle(pPipeline->pipeline).c_str(), pso_num_samples,
- report_data->FormatHandle(pCB->activeRenderPass->renderPass).c_str(), subpass_num_samples);
+ skip |=
+ log_msg(dev_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_EXT,
+ HandleToUint64(pPipeline->pipeline), kVUID_Core_DrawState_NumSamplesMismatch,
+ "Num samples mismatch! At draw-time in Pipeline (%s) with %u samples while current RenderPass (%s) w/ "
+ "%u samples!",
+ dev_data->report_data->FormatHandle(pPipeline->pipeline).c_str(), pso_num_samples,
+ dev_data->report_data->FormatHandle(pCB->activeRenderPass->renderPass).c_str(), subpass_num_samples);
}
} else {
- skip |=
- log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_EXT,
- HandleToUint64(pPipeline->pipeline), kVUID_Core_DrawState_NoActiveRenderpass,
- "No active render pass found at draw-time in %s!", report_data->FormatHandle(pPipeline->pipeline).c_str());
+ skip |= log_msg(dev_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_EXT,
+ HandleToUint64(pPipeline->pipeline), kVUID_Core_DrawState_NoActiveRenderpass,
+ "No active render pass found at draw-time in Pipeline (%s)!",
+ dev_data->report_data->FormatHandle(pPipeline->pipeline).c_str());
}
}
// Verify that PSO creation renderPass is compatible with active renderPass
@@ -1087,39 +1064,47 @@ bool CoreChecks::ValidatePipelineDrawtimeState(const LAST_BOUND_STATE &state, co
// TODO: Move all of the error codes common across different Draws into a LUT accessed by cmd_type
// TODO: AMD extension codes are included here, but actual function entrypoints are not yet intercepted
// Error codes for renderpass and subpass mismatches
- auto rp_error = "VUID-vkCmdDraw-renderPass-02684", sp_error = "VUID-vkCmdDraw-subpass-02685";
+ auto rp_error = "VUID-vkCmdDraw-renderPass-00435", sp_error = "VUID-vkCmdDraw-subpass-00436";
switch (cmd_type) {
case CMD_DRAWINDEXED:
- rp_error = "VUID-vkCmdDrawIndexed-renderPass-02684";
- sp_error = "VUID-vkCmdDrawIndexed-subpass-02685";
+ rp_error = "VUID-vkCmdDrawIndexed-renderPass-00454";
+ sp_error = "VUID-vkCmdDrawIndexed-subpass-00455";
break;
case CMD_DRAWINDIRECT:
- rp_error = "VUID-vkCmdDrawIndirect-renderPass-02684";
- sp_error = "VUID-vkCmdDrawIndirect-subpass-02685";
+ rp_error = "VUID-vkCmdDrawIndirect-renderPass-00479";
+ sp_error = "VUID-vkCmdDrawIndirect-subpass-00480";
+ break;
+ case CMD_DRAWINDIRECTCOUNTAMD:
+ rp_error = "VUID-vkCmdDrawIndirectCountAMD-renderPass-00507";
+ sp_error = "VUID-vkCmdDrawIndirectCountAMD-subpass-00508";
break;
case CMD_DRAWINDIRECTCOUNTKHR:
- rp_error = "VUID-vkCmdDrawIndirectCountKHR-renderPass-02684";
- sp_error = "VUID-vkCmdDrawIndirectCountKHR-subpass-02685";
+ rp_error = "VUID-vkCmdDrawIndirectCountKHR-renderPass-03113";
+ sp_error = "VUID-vkCmdDrawIndirectCountKHR-subpass-03114";
break;
case CMD_DRAWINDEXEDINDIRECT:
- rp_error = "VUID-vkCmdDrawIndexedIndirect-renderPass-02684";
- sp_error = "VUID-vkCmdDrawIndexedIndirect-subpass-02685";
+ rp_error = "VUID-vkCmdDrawIndexedIndirect-renderPass-00531";
+ sp_error = "VUID-vkCmdDrawIndexedIndirect-subpass-00532";
+ break;
+ case CMD_DRAWINDEXEDINDIRECTCOUNTAMD:
+ rp_error = "VUID-vkCmdDrawIndexedIndirectCountAMD-renderPass-00560";
+ sp_error = "VUID-vkCmdDrawIndexedIndirectCountAMD-subpass-00561";
break;
case CMD_DRAWINDEXEDINDIRECTCOUNTKHR:
- rp_error = "VUID-vkCmdDrawIndexedIndirectCountKHR-renderPass-02684";
- sp_error = "VUID-vkCmdDrawIndexedIndirectCountKHR-subpass-02685";
+ rp_error = "VUID-vkCmdDrawIndexedIndirectCountKHR-renderPass-03145";
+ sp_error = "VUID-vkCmdDrawIndexedIndirectCountKHR-subpass-03146";
break;
case CMD_DRAWMESHTASKSNV:
- rp_error = "VUID-vkCmdDrawMeshTasksNV-renderPass-02684";
- sp_error = "VUID-vkCmdDrawMeshTasksNV-subpass-02685";
+ rp_error = "VUID-vkCmdDrawMeshTasksNV-renderPass-02120";
+ sp_error = "VUID-vkCmdDrawMeshTasksNV-subpass-02121";
break;
case CMD_DRAWMESHTASKSINDIRECTNV:
- rp_error = "VUID-vkCmdDrawMeshTasksIndirectNV-renderPass-02684";
- sp_error = "VUID-vkCmdDrawMeshTasksIndirectNV-subpass-02685";
+ rp_error = "VUID-vkCmdDrawMeshTasksIndirectNV-renderPass-02148";
+ sp_error = "VUID-vkCmdDrawMeshTasksIndirectNV-subpass-02149";
break;
case CMD_DRAWMESHTASKSINDIRECTCOUNTNV:
- rp_error = "VUID-vkCmdDrawMeshTasksIndirectCountNV-renderPass-02684";
- sp_error = "VUID-vkCmdDrawMeshTasksIndirectCountNV-subpass-02685";
+ rp_error = "VUID-vkCmdDrawMeshTasksIndirectCountNV-renderPass-02184";
+ sp_error = "VUID-vkCmdDrawMeshTasksIndirectCountNV-subpass-02185";
break;
default:
assert(CMD_DRAW == cmd_type);
@@ -1127,12 +1112,12 @@ bool CoreChecks::ValidatePipelineDrawtimeState(const LAST_BOUND_STATE &state, co
}
if (pCB->activeRenderPass->renderPass != pPipeline->rp_state->renderPass) {
// renderPass that PSO was created with must be compatible with active renderPass that PSO is being used with
- skip |= ValidateRenderPassCompatibility("active render pass", pCB->activeRenderPass, "pipeline state object",
+ skip |= ValidateRenderPassCompatibility(dev_data, "active render pass", pCB->activeRenderPass, "pipeline state object",
pPipeline->rp_state.get(), caller, rp_error);
}
if (pPipeline->graphicsPipelineCI.subpass != pCB->activeSubpass) {
skip |=
- log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_EXT,
+ log_msg(dev_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_EXT,
HandleToUint64(pPipeline->pipeline), sp_error, "Pipeline was built for subpass %u but used in subpass %u.",
pPipeline->graphicsPipelineCI.subpass, pCB->activeSubpass);
}
@@ -1144,7 +1129,7 @@ bool CoreChecks::ValidatePipelineDrawtimeState(const LAST_BOUND_STATE &state, co
// For given cvdescriptorset::DescriptorSet, verify that its Set is compatible w/ the setLayout corresponding to
// pipelineLayout[layoutIndex]
static bool VerifySetLayoutCompatibility(const cvdescriptorset::DescriptorSet *descriptor_set,
- PIPELINE_LAYOUT_STATE const *pipeline_layout, const uint32_t layoutIndex,
+ PIPELINE_LAYOUT_NODE const *pipeline_layout, const uint32_t layoutIndex,
string &errorMsg) {
auto num_sets = pipeline_layout->set_layouts.size();
if (layoutIndex >= num_sets) {
@@ -1157,31 +1142,26 @@ static bool VerifySetLayoutCompatibility(const cvdescriptorset::DescriptorSet *d
}
if (descriptor_set->IsPushDescriptor()) return true;
auto layout_node = pipeline_layout->set_layouts[layoutIndex];
- return cvdescriptorset::VerifySetLayoutCompatibility(layout_node.get(), descriptor_set->GetLayout().get(), &errorMsg);
+ return descriptor_set->IsCompatible(layout_node.get(), &errorMsg);
}
// Validate overall state at the time of a draw call
-bool CoreChecks::ValidateCmdBufDrawState(const CMD_BUFFER_STATE *cb_node, CMD_TYPE cmd_type, const bool indexed,
+bool CoreChecks::ValidateCmdBufDrawState(layer_data *dev_data, GLOBAL_CB_NODE *cb_node, CMD_TYPE cmd_type, const bool indexed,
const VkPipelineBindPoint bind_point, const char *function, const char *pipe_err_code,
- const char *state_err_code) const {
- const auto last_bound_it = cb_node->lastBound.find(bind_point);
- const PIPELINE_STATE *pPipe = nullptr;
- if (last_bound_it != cb_node->lastBound.cend()) {
- pPipe = last_bound_it->second.pipeline_state;
- }
-
+ const char *state_err_code) {
+ bool result = false;
+ auto const &state = cb_node->lastBound[bind_point];
+ PIPELINE_STATE *pPipe = state.pipeline_state;
if (nullptr == pPipe) {
- return log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
+ return log_msg(dev_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
HandleToUint64(cb_node->commandBuffer), pipe_err_code,
"Must not call %s on this command buffer while there is no %s pipeline bound.", function,
bind_point == VK_PIPELINE_BIND_POINT_GRAPHICS ? "Graphics" : "Compute");
}
- bool result = false;
- auto const &state = last_bound_it->second;
-
// First check flag states
- if (VK_PIPELINE_BIND_POINT_GRAPHICS == bind_point) result = ValidateDrawStateFlags(cb_node, pPipe, indexed, state_err_code);
+ if (VK_PIPELINE_BIND_POINT_GRAPHICS == bind_point)
+ result = ValidateDrawStateFlags(dev_data, cb_node, pPipe, indexed, state_err_code);
// Now complete other state checks
string errorString;
@@ -1190,23 +1170,23 @@ bool CoreChecks::ValidateCmdBufDrawState(const CMD_BUFFER_STATE *cb_node, CMD_TY
for (const auto &set_binding_pair : pPipe->active_slots) {
uint32_t setIndex = set_binding_pair.first;
// If valid set is not bound throw an error
- if ((state.per_set.size() <= setIndex) || (!state.per_set[setIndex].bound_descriptor_set)) {
- result |=
- log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
- HandleToUint64(cb_node->commandBuffer), kVUID_Core_DrawState_DescriptorSetNotBound,
- "%s uses set #%u but that set is not bound.", report_data->FormatHandle(pPipe->pipeline).c_str(), setIndex);
- } else if (!VerifySetLayoutCompatibility(state.per_set[setIndex].bound_descriptor_set, &pipeline_layout, setIndex,
- errorString)) {
+ if ((state.boundDescriptorSets.size() <= setIndex) || (!state.boundDescriptorSets[setIndex])) {
+ result |= log_msg(dev_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
+ HandleToUint64(cb_node->commandBuffer), kVUID_Core_DrawState_DescriptorSetNotBound,
+ "VkPipeline %s uses set #%u but that set is not bound.",
+ dev_data->report_data->FormatHandle(pPipe->pipeline).c_str(), setIndex);
+ } else if (!VerifySetLayoutCompatibility(state.boundDescriptorSets[setIndex], &pipeline_layout, setIndex, errorString)) {
// Set is bound but not compatible w/ overlapping pipeline_layout from PSO
- VkDescriptorSet setHandle = state.per_set[setIndex].bound_descriptor_set->GetSet();
- result |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_SET_EXT,
- HandleToUint64(setHandle), kVUID_Core_DrawState_PipelineLayoutsIncompatible,
- "%s bound as set #%u is not compatible with overlapping %s due to: %s",
- report_data->FormatHandle(setHandle).c_str(), setIndex,
- report_data->FormatHandle(pipeline_layout.layout).c_str(), errorString.c_str());
+ VkDescriptorSet setHandle = state.boundDescriptorSets[setIndex]->GetSet();
+ result |=
+ log_msg(dev_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_SET_EXT,
+ HandleToUint64(setHandle), kVUID_Core_DrawState_PipelineLayoutsIncompatible,
+ "VkDescriptorSet (%s) bound as set #%u is not compatible with overlapping VkPipelineLayout %s due to: %s",
+ dev_data->report_data->FormatHandle(setHandle).c_str(), setIndex,
+ dev_data->report_data->FormatHandle(pipeline_layout.layout).c_str(), errorString.c_str());
} else { // Valid set is bound and layout compatible, validate that it's updated
// Pull the set node
- const cvdescriptorset::DescriptorSet *descriptor_set = state.per_set[setIndex].bound_descriptor_set;
+ cvdescriptorset::DescriptorSet *descriptor_set = state.boundDescriptorSets[setIndex];
// Validate the draw-time state for this descriptor set
std::string err_str;
if (!descriptor_set->IsPushDescriptor()) {
@@ -1214,38 +1194,18 @@ bool CoreChecks::ValidateCmdBufDrawState(const CMD_BUFFER_STATE *cb_node, CMD_TY
// binding validation. Take the requested binding set and prefilter it to eliminate redundant validation checks.
// Here, the currently bound pipeline determines whether an image validation check is redundant...
// for images are the "req" portion of the binding_req is indirectly (but tightly) coupled to the pipeline.
- cvdescriptorset::PrefilterBindRequestMap reduced_map(*descriptor_set, set_binding_pair.second);
- const auto &binding_req_map = reduced_map.FilteredMap(*cb_node, *pPipe);
-
- // We can skip validating the descriptor set if "nothing" has changed since the last validation.
- // Same set, no image layout changes, and same "pipeline state" (binding_req_map). If there are
- // any dynamic descriptors, always revalidate rather than caching the values. We currently only
- // apply this optimization if IsManyDescriptors is true, to avoid the overhead of copying the
- // binding_req_map which could potentially be expensive.
- bool need_validate =
- !reduced_map.IsManyDescriptors() ||
- // Revalidate each time if the set has dynamic offsets
- state.per_set[setIndex].dynamicOffsets.size() > 0 ||
- // Revalidate if descriptor set (or contents) has changed
- state.per_set[setIndex].validated_set != descriptor_set ||
- state.per_set[setIndex].validated_set_change_count != descriptor_set->GetChangeCount() ||
- (!disabled.image_layout_validation &&
- state.per_set[setIndex].validated_set_image_layout_change_count != cb_node->image_layout_change_count) ||
- // Revalidate if previous bindingReqMap doesn't include new bindingRepMap
- !std::includes(state.per_set[setIndex].validated_set_binding_req_map.begin(),
- state.per_set[setIndex].validated_set_binding_req_map.end(), set_binding_pair.second.begin(),
- set_binding_pair.second.end());
-
- if (need_validate) {
- if (!ValidateDrawState(descriptor_set, binding_req_map, state.per_set[setIndex].dynamicOffsets, cb_node,
- function, &err_str)) {
- auto set = descriptor_set->GetSet();
- result |=
- log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_SET_EXT,
- HandleToUint64(set), kVUID_Core_DrawState_DescriptorSetNotUpdated,
- "%s bound as set #%u encountered the following validation error at %s time: %s",
- report_data->FormatHandle(set).c_str(), setIndex, function, err_str.c_str());
- }
+ const cvdescriptorset::PrefilterBindRequestMap reduced_map(*descriptor_set, set_binding_pair.second, cb_node,
+ pPipe);
+ const auto &binding_req_map = reduced_map.Map();
+
+ if (!descriptor_set->ValidateDrawState(binding_req_map, state.dynamicOffsets[setIndex], cb_node, function,
+ &err_str)) {
+ auto set = descriptor_set->GetSet();
+ result |= log_msg(
+ dev_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_SET_EXT,
+ HandleToUint64(set), kVUID_Core_DrawState_DescriptorSetNotUpdated,
+ "Descriptor set %s bound as set #%u encountered the following validation error at %s time: %s",
+ dev_data->report_data->FormatHandle(set).c_str(), setIndex, function, err_str.c_str());
}
}
}
@@ -1253,63 +1213,28 @@ bool CoreChecks::ValidateCmdBufDrawState(const CMD_BUFFER_STATE *cb_node, CMD_TY
// Check general pipeline state that needs to be validated at drawtime
if (VK_PIPELINE_BIND_POINT_GRAPHICS == bind_point)
- result |= ValidatePipelineDrawtimeState(state, cb_node, cmd_type, pPipe, function);
+ result |= ValidatePipelineDrawtimeState(dev_data, state, cb_node, cmd_type, pPipe, function);
return result;
}
-void ValidationStateTracker::UpdateDrawState(CMD_BUFFER_STATE *cb_state, const VkPipelineBindPoint bind_point) {
- auto &state = cb_state->lastBound[bind_point];
+void CoreChecks::UpdateDrawState(layer_data *dev_data, GLOBAL_CB_NODE *cb_state, const VkPipelineBindPoint bind_point) {
+ auto const &state = cb_state->lastBound[bind_point];
PIPELINE_STATE *pPipe = state.pipeline_state;
if (VK_NULL_HANDLE != state.pipeline_layout) {
for (const auto &set_binding_pair : pPipe->active_slots) {
uint32_t setIndex = set_binding_pair.first;
// Pull the set node
- cvdescriptorset::DescriptorSet *descriptor_set = state.per_set[setIndex].bound_descriptor_set;
+ cvdescriptorset::DescriptorSet *descriptor_set = state.boundDescriptorSets[setIndex];
if (!descriptor_set->IsPushDescriptor()) {
// For the "bindless" style resource usage with many descriptors, need to optimize command <-> descriptor binding
+ const cvdescriptorset::PrefilterBindRequestMap reduced_map(*descriptor_set, set_binding_pair.second, cb_state);
+ const auto &binding_req_map = reduced_map.Map();
- // TODO: If recreating the reduced_map here shows up in profilinging, need to find a way of sharing with the
- // Validate pass. Though in the case of "many" descriptors, typically the descriptor count >> binding count
- cvdescriptorset::PrefilterBindRequestMap reduced_map(*descriptor_set, set_binding_pair.second);
- const auto &binding_req_map = reduced_map.FilteredMap(*cb_state, *pPipe);
-
- if (reduced_map.IsManyDescriptors()) {
- // Only update validate binding tags if we meet the "many" criteria in the Prefilter class
- descriptor_set->UpdateValidationCache(*cb_state, *pPipe, binding_req_map);
- }
-
- // We can skip updating the state if "nothing" has changed since the last validation.
- // See CoreChecks::ValidateCmdBufDrawState for more details.
- bool need_update =
- !reduced_map.IsManyDescriptors() ||
- // Update if descriptor set (or contents) has changed
- state.per_set[setIndex].validated_set != descriptor_set ||
- state.per_set[setIndex].validated_set_change_count != descriptor_set->GetChangeCount() ||
- (!disabled.image_layout_validation &&
- state.per_set[setIndex].validated_set_image_layout_change_count != cb_state->image_layout_change_count) ||
- // Update if previous bindingReqMap doesn't include new bindingRepMap
- !std::includes(state.per_set[setIndex].validated_set_binding_req_map.begin(),
- state.per_set[setIndex].validated_set_binding_req_map.end(), set_binding_pair.second.begin(),
- set_binding_pair.second.end());
-
- if (need_update) {
- // Bind this set and its active descriptor resources to the command buffer
- descriptor_set->UpdateDrawState(this, cb_state, binding_req_map);
-
- state.per_set[setIndex].validated_set = descriptor_set;
- state.per_set[setIndex].validated_set_change_count = descriptor_set->GetChangeCount();
- state.per_set[setIndex].validated_set_image_layout_change_count = cb_state->image_layout_change_count;
- if (reduced_map.IsManyDescriptors()) {
- // Check whether old == new before assigning, the equality check is much cheaper than
- // freeing and reallocating the map.
- if (state.per_set[setIndex].validated_set_binding_req_map != set_binding_pair.second) {
- state.per_set[setIndex].validated_set_binding_req_map = set_binding_pair.second;
- }
- } else {
- state.per_set[setIndex].validated_set_binding_req_map = BindingReqMap();
- }
- }
+ // Bind this set and its active descriptor resources to the command buffer
+ descriptor_set->UpdateDrawState(cb_state, binding_req_map);
+ // For given active slots record updated images & buffers
+ descriptor_set->GetStorageUpdates(binding_req_map, &cb_state->updateBuffers, &cb_state->updateImages);
}
}
}
@@ -1318,27 +1243,28 @@ void ValidationStateTracker::UpdateDrawState(CMD_BUFFER_STATE *cb_state, const V
}
}
-bool CoreChecks::ValidatePipelineLocked(std::vector<std::unique_ptr<PIPELINE_STATE>> const &pPipelines, int pipelineIndex) const {
+bool CoreChecks::ValidatePipelineLocked(layer_data *dev_data, std::vector<std::unique_ptr<PIPELINE_STATE>> const &pPipelines,
+ int pipelineIndex) {
bool skip = false;
- const PIPELINE_STATE *pPipeline = pPipelines[pipelineIndex].get();
+ PIPELINE_STATE *pPipeline = pPipelines[pipelineIndex].get();
// If create derivative bit is set, check that we've specified a base
// pipeline correctly, and that the base pipeline was created to allow
// derivatives.
if (pPipeline->graphicsPipelineCI.flags & VK_PIPELINE_CREATE_DERIVATIVE_BIT) {
- const PIPELINE_STATE *pBasePipeline = nullptr;
+ PIPELINE_STATE *pBasePipeline = nullptr;
if (!((pPipeline->graphicsPipelineCI.basePipelineHandle != VK_NULL_HANDLE) ^
(pPipeline->graphicsPipelineCI.basePipelineIndex != -1))) {
// This check is a superset of "VUID-VkGraphicsPipelineCreateInfo-flags-00724" and
// "VUID-VkGraphicsPipelineCreateInfo-flags-00725"
- skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT,
- HandleToUint64(device), kVUID_Core_DrawState_InvalidPipelineCreateState,
+ skip |= log_msg(dev_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_EXT,
+ HandleToUint64(pPipeline->pipeline), kVUID_Core_DrawState_InvalidPipelineCreateState,
"Invalid Pipeline CreateInfo: exactly one of base pipeline index and handle must be specified");
} else if (pPipeline->graphicsPipelineCI.basePipelineIndex != -1) {
if (pPipeline->graphicsPipelineCI.basePipelineIndex >= pipelineIndex) {
- skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT,
- HandleToUint64(device), "VUID-vkCreateGraphicsPipelines-flags-00720",
+ skip |= log_msg(dev_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_EXT,
+ HandleToUint64(pPipeline->pipeline), "VUID-vkCreateGraphicsPipelines-flags-00720",
"Invalid Pipeline CreateInfo: base pipeline must occur earlier in array than derivative pipeline.");
} else {
pBasePipeline = pPipelines[pPipeline->graphicsPipelineCI.basePipelineIndex].get();
@@ -1348,8 +1274,8 @@ bool CoreChecks::ValidatePipelineLocked(std::vector<std::unique_ptr<PIPELINE_STA
}
if (pBasePipeline && !(pBasePipeline->graphicsPipelineCI.flags & VK_PIPELINE_CREATE_ALLOW_DERIVATIVES_BIT)) {
- skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT,
- HandleToUint64(device), kVUID_Core_DrawState_InvalidPipelineCreateState,
+ skip |= log_msg(dev_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_EXT,
+ HandleToUint64(pPipeline->pipeline), kVUID_Core_DrawState_InvalidPipelineCreateState,
"Invalid Pipeline CreateInfo: base pipeline does not allow derivatives.");
}
}
@@ -1357,17 +1283,20 @@ bool CoreChecks::ValidatePipelineLocked(std::vector<std::unique_ptr<PIPELINE_STA
return skip;
}
-// UNLOCKED pipeline validation. DO NOT lookup objects in the CoreChecks->* maps in this function.
-bool CoreChecks::ValidatePipelineUnlocked(const PIPELINE_STATE *pPipeline, uint32_t pipelineIndex) const {
+// UNLOCKED pipeline validation. DO NOT lookup objects in the layer_data->* maps in this function.
+bool CoreChecks::ValidatePipelineUnlocked(layer_data *dev_data, std::vector<std::unique_ptr<PIPELINE_STATE>> const &pPipelines,
+ int pipelineIndex) {
bool skip = false;
- // Ensure the subpass index is valid. If not, then ValidateGraphicsPipelineShaderState
+ PIPELINE_STATE *pPipeline = pPipelines[pipelineIndex].get();
+
+ // Ensure the subpass index is valid. If not, then ValidateAndCapturePipelineShaderState
// produces nonsense errors that confuse users. Other layers should already
// emit errors for renderpass being invalid.
auto subpass_desc = &pPipeline->rp_state->createInfo.pSubpasses[pPipeline->graphicsPipelineCI.subpass];
if (pPipeline->graphicsPipelineCI.subpass >= pPipeline->rp_state->createInfo.subpassCount) {
- skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, HandleToUint64(device),
- "VUID-VkGraphicsPipelineCreateInfo-subpass-00759",
+ skip |= log_msg(dev_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_EXT,
+ HandleToUint64(pPipeline->pipeline), "VUID-VkGraphicsPipelineCreateInfo-subpass-00759",
"Invalid Pipeline CreateInfo State: Subpass index %u is out of range for this renderpass (0..%u).",
pPipeline->graphicsPipelineCI.subpass, pPipeline->rp_state->createInfo.subpassCount - 1);
subpass_desc = nullptr;
@@ -1376,25 +1305,26 @@ bool CoreChecks::ValidatePipelineUnlocked(const PIPELINE_STATE *pPipeline, uint3
if (pPipeline->graphicsPipelineCI.pColorBlendState != NULL) {
const safe_VkPipelineColorBlendStateCreateInfo *color_blend_state = pPipeline->graphicsPipelineCI.pColorBlendState;
if (color_blend_state->attachmentCount != subpass_desc->colorAttachmentCount) {
- skip |=
- log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, HandleToUint64(device),
- "VUID-VkGraphicsPipelineCreateInfo-attachmentCount-00746",
- "vkCreateGraphicsPipelines(): %s subpass %u has colorAttachmentCount of %u which doesn't "
- "match the pColorBlendState->attachmentCount of %u.",
- report_data->FormatHandle(pPipeline->rp_state->renderPass).c_str(), pPipeline->graphicsPipelineCI.subpass,
- subpass_desc->colorAttachmentCount, color_blend_state->attachmentCount);
- }
- if (!enabled_features.core.independentBlend) {
+ skip |= log_msg(dev_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_EXT,
+ HandleToUint64(pPipeline->pipeline), "VUID-VkGraphicsPipelineCreateInfo-attachmentCount-00746",
+ "vkCreateGraphicsPipelines(): Render pass (%s) subpass %u has colorAttachmentCount of %u which doesn't "
+ "match the pColorBlendState->attachmentCount of %u.",
+ dev_data->report_data->FormatHandle(pPipeline->rp_state->renderPass).c_str(),
+ pPipeline->graphicsPipelineCI.subpass, subpass_desc->colorAttachmentCount,
+ color_blend_state->attachmentCount);
+ }
+ if (!dev_data->enabled_features.core.independentBlend) {
if (pPipeline->attachments.size() > 1) {
- const VkPipelineColorBlendAttachmentState *const pAttachments = &pPipeline->attachments[0];
+ VkPipelineColorBlendAttachmentState *pAttachments = &pPipeline->attachments[0];
for (size_t i = 1; i < pPipeline->attachments.size(); i++) {
// Quoting the spec: "If [the independent blend] feature is not enabled, the VkPipelineColorBlendAttachmentState
// settings for all color attachments must be identical." VkPipelineColorBlendAttachmentState contains
// only attachment state, so memcmp is best suited for the comparison
if (memcmp(static_cast<const void *>(pAttachments), static_cast<const void *>(&pAttachments[i]),
sizeof(pAttachments[0]))) {
- skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT,
- HandleToUint64(device), "VUID-VkPipelineColorBlendStateCreateInfo-pAttachments-00605",
+ skip |= log_msg(dev_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_EXT, HandleToUint64(pPipeline->pipeline),
+ "VUID-VkPipelineColorBlendStateCreateInfo-pAttachments-00605",
"Invalid Pipeline CreateInfo: If independent blend feature not enabled, all elements of "
"pAttachments must be identical.");
break;
@@ -1402,10 +1332,11 @@ bool CoreChecks::ValidatePipelineUnlocked(const PIPELINE_STATE *pPipeline, uint3
}
}
}
- if (!enabled_features.core.logicOp && (pPipeline->graphicsPipelineCI.pColorBlendState->logicOpEnable != VK_FALSE)) {
+ if (!dev_data->enabled_features.core.logicOp &&
+ (pPipeline->graphicsPipelineCI.pColorBlendState->logicOpEnable != VK_FALSE)) {
skip |=
- log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, HandleToUint64(device),
- "VUID-VkPipelineColorBlendStateCreateInfo-logicOpEnable-00606",
+ log_msg(dev_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_EXT,
+ HandleToUint64(pPipeline->pipeline), "VUID-VkPipelineColorBlendStateCreateInfo-logicOpEnable-00606",
"Invalid Pipeline CreateInfo: If logic operations feature not enabled, logicOpEnable must be VK_FALSE.");
}
for (size_t i = 0; i < pPipeline->attachments.size(); i++) {
@@ -1413,80 +1344,80 @@ bool CoreChecks::ValidatePipelineUnlocked(const PIPELINE_STATE *pPipeline, uint3
(pPipeline->attachments[i].srcColorBlendFactor == VK_BLEND_FACTOR_ONE_MINUS_SRC1_COLOR) ||
(pPipeline->attachments[i].srcColorBlendFactor == VK_BLEND_FACTOR_SRC1_ALPHA) ||
(pPipeline->attachments[i].srcColorBlendFactor == VK_BLEND_FACTOR_ONE_MINUS_SRC1_ALPHA)) {
- if (!enabled_features.core.dualSrcBlend) {
- skip |=
- log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT,
- HandleToUint64(device), "VUID-VkPipelineColorBlendAttachmentState-srcColorBlendFactor-00608",
- "vkCreateGraphicsPipelines(): pPipelines[%d].pColorBlendState.pAttachments[" PRINTF_SIZE_T_SPECIFIER
- "].srcColorBlendFactor uses a dual-source blend factor (%d), but this device feature is not "
- "enabled.",
- pipelineIndex, i, pPipeline->attachments[i].srcColorBlendFactor);
+ if (!dev_data->enabled_features.core.dualSrcBlend) {
+ skip |= log_msg(
+ dev_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_EXT,
+ HandleToUint64(pPipeline->pipeline), "VUID-VkPipelineColorBlendAttachmentState-srcColorBlendFactor-00608",
+ "vkCreateGraphicsPipelines(): pPipelines[%d].pColorBlendState.pAttachments[" PRINTF_SIZE_T_SPECIFIER
+ "].srcColorBlendFactor uses a dual-source blend factor (%d), but this device feature is not "
+ "enabled.",
+ pipelineIndex, i, pPipeline->attachments[i].srcColorBlendFactor);
}
}
if ((pPipeline->attachments[i].dstColorBlendFactor == VK_BLEND_FACTOR_SRC1_COLOR) ||
(pPipeline->attachments[i].dstColorBlendFactor == VK_BLEND_FACTOR_ONE_MINUS_SRC1_COLOR) ||
(pPipeline->attachments[i].dstColorBlendFactor == VK_BLEND_FACTOR_SRC1_ALPHA) ||
(pPipeline->attachments[i].dstColorBlendFactor == VK_BLEND_FACTOR_ONE_MINUS_SRC1_ALPHA)) {
- if (!enabled_features.core.dualSrcBlend) {
- skip |=
- log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT,
- HandleToUint64(device), "VUID-VkPipelineColorBlendAttachmentState-dstColorBlendFactor-00609",
- "vkCreateGraphicsPipelines(): pPipelines[%d].pColorBlendState.pAttachments[" PRINTF_SIZE_T_SPECIFIER
- "].dstColorBlendFactor uses a dual-source blend factor (%d), but this device feature is not "
- "enabled.",
- pipelineIndex, i, pPipeline->attachments[i].dstColorBlendFactor);
+ if (!dev_data->enabled_features.core.dualSrcBlend) {
+ skip |= log_msg(
+ dev_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_EXT,
+ HandleToUint64(pPipeline->pipeline), "VUID-VkPipelineColorBlendAttachmentState-dstColorBlendFactor-00609",
+ "vkCreateGraphicsPipelines(): pPipelines[%d].pColorBlendState.pAttachments[" PRINTF_SIZE_T_SPECIFIER
+ "].dstColorBlendFactor uses a dual-source blend factor (%d), but this device feature is not "
+ "enabled.",
+ pipelineIndex, i, pPipeline->attachments[i].dstColorBlendFactor);
}
}
if ((pPipeline->attachments[i].srcAlphaBlendFactor == VK_BLEND_FACTOR_SRC1_COLOR) ||
(pPipeline->attachments[i].srcAlphaBlendFactor == VK_BLEND_FACTOR_ONE_MINUS_SRC1_COLOR) ||
(pPipeline->attachments[i].srcAlphaBlendFactor == VK_BLEND_FACTOR_SRC1_ALPHA) ||
(pPipeline->attachments[i].srcAlphaBlendFactor == VK_BLEND_FACTOR_ONE_MINUS_SRC1_ALPHA)) {
- if (!enabled_features.core.dualSrcBlend) {
- skip |=
- log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT,
- HandleToUint64(device), "VUID-VkPipelineColorBlendAttachmentState-srcAlphaBlendFactor-00610",
- "vkCreateGraphicsPipelines(): pPipelines[%d].pColorBlendState.pAttachments[" PRINTF_SIZE_T_SPECIFIER
- "].srcAlphaBlendFactor uses a dual-source blend factor (%d), but this device feature is not "
- "enabled.",
- pipelineIndex, i, pPipeline->attachments[i].srcAlphaBlendFactor);
+ if (!dev_data->enabled_features.core.dualSrcBlend) {
+ skip |= log_msg(
+ dev_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_EXT,
+ HandleToUint64(pPipeline->pipeline), "VUID-VkPipelineColorBlendAttachmentState-srcAlphaBlendFactor-00610",
+ "vkCreateGraphicsPipelines(): pPipelines[%d].pColorBlendState.pAttachments[" PRINTF_SIZE_T_SPECIFIER
+ "].srcAlphaBlendFactor uses a dual-source blend factor (%d), but this device feature is not "
+ "enabled.",
+ pipelineIndex, i, pPipeline->attachments[i].srcAlphaBlendFactor);
}
}
if ((pPipeline->attachments[i].dstAlphaBlendFactor == VK_BLEND_FACTOR_SRC1_COLOR) ||
(pPipeline->attachments[i].dstAlphaBlendFactor == VK_BLEND_FACTOR_ONE_MINUS_SRC1_COLOR) ||
(pPipeline->attachments[i].dstAlphaBlendFactor == VK_BLEND_FACTOR_SRC1_ALPHA) ||
(pPipeline->attachments[i].dstAlphaBlendFactor == VK_BLEND_FACTOR_ONE_MINUS_SRC1_ALPHA)) {
- if (!enabled_features.core.dualSrcBlend) {
- skip |=
- log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT,
- HandleToUint64(device), "VUID-VkPipelineColorBlendAttachmentState-dstAlphaBlendFactor-00611",
- "vkCreateGraphicsPipelines(): pPipelines[%d].pColorBlendState.pAttachments[" PRINTF_SIZE_T_SPECIFIER
- "].dstAlphaBlendFactor uses a dual-source blend factor (%d), but this device feature is not "
- "enabled.",
- pipelineIndex, i, pPipeline->attachments[i].dstAlphaBlendFactor);
+ if (!dev_data->enabled_features.core.dualSrcBlend) {
+ skip |= log_msg(
+ dev_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_EXT,
+ HandleToUint64(pPipeline->pipeline), "VUID-VkPipelineColorBlendAttachmentState-dstAlphaBlendFactor-00611",
+ "vkCreateGraphicsPipelines(): pPipelines[%d].pColorBlendState.pAttachments[" PRINTF_SIZE_T_SPECIFIER
+ "].dstAlphaBlendFactor uses a dual-source blend factor (%d), but this device feature is not "
+ "enabled.",
+ pipelineIndex, i, pPipeline->attachments[i].dstAlphaBlendFactor);
}
}
}
}
- if (ValidateGraphicsPipelineShaderState(pPipeline)) {
+ if (ValidateAndCapturePipelineShaderState(dev_data, pPipeline)) {
skip = true;
}
// Each shader's stage must be unique
if (pPipeline->duplicate_shaders) {
for (uint32_t stage = VK_SHADER_STAGE_VERTEX_BIT; stage & VK_SHADER_STAGE_ALL_GRAPHICS; stage <<= 1) {
if (pPipeline->duplicate_shaders & stage) {
- skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT,
- HandleToUint64(device), kVUID_Core_DrawState_InvalidPipelineCreateState,
+ skip |= log_msg(dev_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_EXT,
+ HandleToUint64(pPipeline->pipeline), kVUID_Core_DrawState_InvalidPipelineCreateState,
"Invalid Pipeline CreateInfo State: Multiple shaders provided for stage %s",
string_VkShaderStageFlagBits(VkShaderStageFlagBits(stage)));
}
}
}
- if (device_extensions.vk_nv_mesh_shader) {
+ if (dev_data->device_extensions.vk_nv_mesh_shader) {
// VS or mesh is required
if (!(pPipeline->active_shaders & (VK_SHADER_STAGE_VERTEX_BIT | VK_SHADER_STAGE_MESH_BIT_NV))) {
- skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT,
- HandleToUint64(device), "VUID-VkGraphicsPipelineCreateInfo-stage-02096",
+ skip |= log_msg(dev_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_EXT,
+ HandleToUint64(pPipeline->pipeline), "VUID-VkGraphicsPipelineCreateInfo-stage-02096",
"Invalid Pipeline CreateInfo State: Vertex Shader or Mesh Shader required.");
}
// Can't mix mesh and VTG
@@ -1494,29 +1425,29 @@ bool CoreChecks::ValidatePipelineUnlocked(const PIPELINE_STATE *pPipeline, uint3
(pPipeline->active_shaders &
(VK_SHADER_STAGE_VERTEX_BIT | VK_SHADER_STAGE_GEOMETRY_BIT | VK_SHADER_STAGE_TESSELLATION_CONTROL_BIT |
VK_SHADER_STAGE_TESSELLATION_EVALUATION_BIT))) {
- skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT,
- HandleToUint64(device), "VUID-VkGraphicsPipelineCreateInfo-pStages-02095",
+ skip |= log_msg(dev_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_EXT,
+ HandleToUint64(pPipeline->pipeline), "VUID-VkGraphicsPipelineCreateInfo-pStages-02095",
"Invalid Pipeline CreateInfo State: Geometric shader stages must either be all mesh (mesh | task) "
"or all VTG (vertex, tess control, tess eval, geom).");
}
} else {
// VS is required
if (!(pPipeline->active_shaders & VK_SHADER_STAGE_VERTEX_BIT)) {
- skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT,
- HandleToUint64(device), "VUID-VkGraphicsPipelineCreateInfo-stage-00727",
+ skip |= log_msg(dev_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_EXT,
+ HandleToUint64(pPipeline->pipeline), "VUID-VkGraphicsPipelineCreateInfo-stage-00727",
"Invalid Pipeline CreateInfo State: Vertex Shader required.");
}
}
- if (!enabled_features.mesh_shader.meshShader && (pPipeline->active_shaders & VK_SHADER_STAGE_MESH_BIT_NV)) {
- skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, HandleToUint64(device),
- "VUID-VkPipelineShaderStageCreateInfo-stage-02091",
+ if (!dev_data->enabled_features.mesh_shader.meshShader && (pPipeline->active_shaders & VK_SHADER_STAGE_MESH_BIT_NV)) {
+ skip |= log_msg(dev_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_EXT,
+ HandleToUint64(pPipeline->pipeline), "VUID-VkPipelineShaderStageCreateInfo-stage-02091",
"Invalid Pipeline CreateInfo State: Mesh Shader not supported.");
}
- if (!enabled_features.mesh_shader.taskShader && (pPipeline->active_shaders & VK_SHADER_STAGE_TASK_BIT_NV)) {
- skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, HandleToUint64(device),
- "VUID-VkPipelineShaderStageCreateInfo-stage-02092",
+ if (!dev_data->enabled_features.mesh_shader.taskShader && (pPipeline->active_shaders & VK_SHADER_STAGE_TASK_BIT_NV)) {
+ skip |= log_msg(dev_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_EXT,
+ HandleToUint64(pPipeline->pipeline), "VUID-VkPipelineShaderStageCreateInfo-stage-02092",
"Invalid Pipeline CreateInfo State: Task Shader not supported.");
}
@@ -1524,25 +1455,25 @@ bool CoreChecks::ValidatePipelineUnlocked(const PIPELINE_STATE *pPipeline, uint3
bool has_control = (pPipeline->active_shaders & VK_SHADER_STAGE_TESSELLATION_CONTROL_BIT) != 0;
bool has_eval = (pPipeline->active_shaders & VK_SHADER_STAGE_TESSELLATION_EVALUATION_BIT) != 0;
if (has_control && !has_eval) {
- skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, HandleToUint64(device),
- "VUID-VkGraphicsPipelineCreateInfo-pStages-00729",
+ skip |= log_msg(dev_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_EXT,
+ HandleToUint64(pPipeline->pipeline), "VUID-VkGraphicsPipelineCreateInfo-pStages-00729",
"Invalid Pipeline CreateInfo State: TE and TC shaders must be included or excluded as a pair.");
}
if (!has_control && has_eval) {
- skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, HandleToUint64(device),
- "VUID-VkGraphicsPipelineCreateInfo-pStages-00730",
+ skip |= log_msg(dev_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_EXT,
+ HandleToUint64(pPipeline->pipeline), "VUID-VkGraphicsPipelineCreateInfo-pStages-00730",
"Invalid Pipeline CreateInfo State: TE and TC shaders must be included or excluded as a pair.");
}
// Compute shaders should be specified independent of Gfx shaders
if (pPipeline->active_shaders & VK_SHADER_STAGE_COMPUTE_BIT) {
- skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, HandleToUint64(device),
- "VUID-VkGraphicsPipelineCreateInfo-stage-00728",
+ skip |= log_msg(dev_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_EXT,
+ HandleToUint64(pPipeline->pipeline), "VUID-VkGraphicsPipelineCreateInfo-stage-00728",
"Invalid Pipeline CreateInfo State: Do not specify Compute Shader for Gfx Pipeline.");
}
if ((pPipeline->active_shaders & VK_SHADER_STAGE_VERTEX_BIT) && !pPipeline->graphicsPipelineCI.pInputAssemblyState) {
- skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, HandleToUint64(device),
- "VUID-VkGraphicsPipelineCreateInfo-pStages-02098",
+ skip |= log_msg(dev_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_EXT,
+ HandleToUint64(pPipeline->pipeline), "VUID-VkGraphicsPipelineCreateInfo-pStages-02098",
"Invalid Pipeline CreateInfo State: Missing pInputAssemblyState.");
}
@@ -1551,67 +1482,37 @@ bool CoreChecks::ValidatePipelineUnlocked(const PIPELINE_STATE *pPipeline, uint3
if (has_control && has_eval &&
(!pPipeline->graphicsPipelineCI.pInputAssemblyState ||
pPipeline->graphicsPipelineCI.pInputAssemblyState->topology != VK_PRIMITIVE_TOPOLOGY_PATCH_LIST)) {
- skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, HandleToUint64(device),
- "VUID-VkGraphicsPipelineCreateInfo-pStages-00736",
+ skip |= log_msg(dev_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_EXT,
+ HandleToUint64(pPipeline->pipeline), "VUID-VkGraphicsPipelineCreateInfo-pStages-00736",
"Invalid Pipeline CreateInfo State: VK_PRIMITIVE_TOPOLOGY_PATCH_LIST must be set as IA topology for "
"tessellation pipelines.");
}
- if (pPipeline->graphicsPipelineCI.pInputAssemblyState) {
- if (pPipeline->graphicsPipelineCI.pInputAssemblyState->topology == VK_PRIMITIVE_TOPOLOGY_PATCH_LIST) {
- if (!has_control || !has_eval) {
- skip |=
- log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT,
- HandleToUint64(device), "VUID-VkGraphicsPipelineCreateInfo-topology-00737",
+ if (pPipeline->graphicsPipelineCI.pInputAssemblyState &&
+ pPipeline->graphicsPipelineCI.pInputAssemblyState->topology == VK_PRIMITIVE_TOPOLOGY_PATCH_LIST) {
+ if (!has_control || !has_eval) {
+ skip |= log_msg(dev_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_EXT,
+ HandleToUint64(pPipeline->pipeline), "VUID-VkGraphicsPipelineCreateInfo-topology-00737",
"Invalid Pipeline CreateInfo State: VK_PRIMITIVE_TOPOLOGY_PATCH_LIST primitive topology is only valid "
"for tessellation pipelines.");
- }
- }
-
- if ((pPipeline->graphicsPipelineCI.pInputAssemblyState->primitiveRestartEnable == VK_TRUE) &&
- (pPipeline->graphicsPipelineCI.pInputAssemblyState->topology == VK_PRIMITIVE_TOPOLOGY_POINT_LIST ||
- pPipeline->graphicsPipelineCI.pInputAssemblyState->topology == VK_PRIMITIVE_TOPOLOGY_LINE_LIST ||
- pPipeline->graphicsPipelineCI.pInputAssemblyState->topology == VK_PRIMITIVE_TOPOLOGY_TRIANGLE_LIST ||
- pPipeline->graphicsPipelineCI.pInputAssemblyState->topology == VK_PRIMITIVE_TOPOLOGY_LINE_LIST_WITH_ADJACENCY ||
- pPipeline->graphicsPipelineCI.pInputAssemblyState->topology == VK_PRIMITIVE_TOPOLOGY_TRIANGLE_LIST_WITH_ADJACENCY ||
- pPipeline->graphicsPipelineCI.pInputAssemblyState->topology == VK_PRIMITIVE_TOPOLOGY_PATCH_LIST)) {
- skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT,
- HandleToUint64(device), "VUID-VkPipelineInputAssemblyStateCreateInfo-topology-00428",
- "topology is %s and primitiveRestartEnable is VK_TRUE. It is invalid.",
- string_VkPrimitiveTopology(pPipeline->graphicsPipelineCI.pInputAssemblyState->topology));
- }
- if ((enabled_features.core.geometryShader == VK_FALSE) &&
- (pPipeline->graphicsPipelineCI.pInputAssemblyState->topology == VK_PRIMITIVE_TOPOLOGY_LINE_LIST_WITH_ADJACENCY ||
- pPipeline->graphicsPipelineCI.pInputAssemblyState->topology == VK_PRIMITIVE_TOPOLOGY_LINE_STRIP_WITH_ADJACENCY ||
- pPipeline->graphicsPipelineCI.pInputAssemblyState->topology == VK_PRIMITIVE_TOPOLOGY_TRIANGLE_LIST_WITH_ADJACENCY ||
- pPipeline->graphicsPipelineCI.pInputAssemblyState->topology == VK_PRIMITIVE_TOPOLOGY_TRIANGLE_STRIP_WITH_ADJACENCY)) {
- skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT,
- HandleToUint64(device), "VUID-VkPipelineInputAssemblyStateCreateInfo-topology-00429",
- "topology is %s and geometry shaders feature is not enabled. It is invalid.",
- string_VkPrimitiveTopology(pPipeline->graphicsPipelineCI.pInputAssemblyState->topology));
- }
- if ((enabled_features.core.tessellationShader == VK_FALSE) &&
- (pPipeline->graphicsPipelineCI.pInputAssemblyState->topology == VK_PRIMITIVE_TOPOLOGY_PATCH_LIST)) {
- skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT,
- HandleToUint64(device), "VUID-VkPipelineInputAssemblyStateCreateInfo-topology-00430",
- "topology is %s and tessellation shaders feature is not enabled. It is invalid.",
- string_VkPrimitiveTopology(pPipeline->graphicsPipelineCI.pInputAssemblyState->topology));
}
}
// If a rasterization state is provided...
if (pPipeline->graphicsPipelineCI.pRasterizationState) {
if ((pPipeline->graphicsPipelineCI.pRasterizationState->depthClampEnable == VK_TRUE) &&
- (!enabled_features.core.depthClamp)) {
- skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT,
- HandleToUint64(device), "VUID-VkPipelineRasterizationStateCreateInfo-depthClampEnable-00782",
- "vkCreateGraphicsPipelines(): the depthClamp device feature is disabled: the depthClampEnable member "
- "of the VkPipelineRasterizationStateCreateInfo structure must be set to VK_FALSE.");
+ (!dev_data->enabled_features.core.depthClamp)) {
+ skip |=
+ log_msg(dev_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_EXT,
+ HandleToUint64(pPipeline->pipeline), "VUID-VkPipelineRasterizationStateCreateInfo-depthClampEnable-00782",
+ "vkCreateGraphicsPipelines(): the depthClamp device feature is disabled: the depthClampEnable member "
+ "of the VkPipelineRasterizationStateCreateInfo structure must be set to VK_FALSE.");
}
if (!IsDynamic(pPipeline, VK_DYNAMIC_STATE_DEPTH_BIAS) &&
- (pPipeline->graphicsPipelineCI.pRasterizationState->depthBiasClamp != 0.0) && (!enabled_features.core.depthBiasClamp)) {
- skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT,
- HandleToUint64(device), kVUID_Core_DrawState_InvalidFeature,
+ (pPipeline->graphicsPipelineCI.pRasterizationState->depthBiasClamp != 0.0) &&
+ (!dev_data->enabled_features.core.depthBiasClamp)) {
+ skip |= log_msg(dev_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_EXT,
+ HandleToUint64(pPipeline->pipeline), kVUID_Core_DrawState_InvalidFeature,
"vkCreateGraphicsPipelines(): the depthBiasClamp device feature is disabled: the depthBiasClamp member "
"of the VkPipelineRasterizationStateCreateInfo structure must be set to 0.0 unless the "
"VK_DYNAMIC_STATE_DEPTH_BIAS dynamic state is enabled");
@@ -1620,30 +1521,32 @@ bool CoreChecks::ValidatePipelineUnlocked(const PIPELINE_STATE *pPipeline, uint3
// If rasterization is enabled...
if (pPipeline->graphicsPipelineCI.pRasterizationState->rasterizerDiscardEnable == VK_FALSE) {
if ((pPipeline->graphicsPipelineCI.pMultisampleState->alphaToOneEnable == VK_TRUE) &&
- (!enabled_features.core.alphaToOne)) {
- skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT,
- HandleToUint64(device), "VUID-VkPipelineMultisampleStateCreateInfo-alphaToOneEnable-00785",
- "vkCreateGraphicsPipelines(): the alphaToOne device feature is disabled: the alphaToOneEnable "
- "member of the VkPipelineMultisampleStateCreateInfo structure must be set to VK_FALSE.");
+ (!dev_data->enabled_features.core.alphaToOne)) {
+ skip |=
+ log_msg(dev_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_EXT,
+ HandleToUint64(pPipeline->pipeline), "VUID-VkPipelineMultisampleStateCreateInfo-alphaToOneEnable-00785",
+ "vkCreateGraphicsPipelines(): the alphaToOne device feature is disabled: the alphaToOneEnable "
+ "member of the VkPipelineMultisampleStateCreateInfo structure must be set to VK_FALSE.");
}
// If subpass uses a depth/stencil attachment, pDepthStencilState must be a pointer to a valid structure
if (subpass_desc && subpass_desc->pDepthStencilAttachment &&
subpass_desc->pDepthStencilAttachment->attachment != VK_ATTACHMENT_UNUSED) {
if (!pPipeline->graphicsPipelineCI.pDepthStencilState) {
- skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT,
- HandleToUint64(device), "VUID-VkGraphicsPipelineCreateInfo-rasterizerDiscardEnable-00752",
+ skip |= log_msg(dev_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_EXT,
+ HandleToUint64(pPipeline->pipeline),
+ "VUID-VkGraphicsPipelineCreateInfo-rasterizerDiscardEnable-00752",
"Invalid Pipeline CreateInfo State: pDepthStencilState is NULL when rasterization is enabled "
"and subpass uses a depth/stencil attachment.");
} else if ((pPipeline->graphicsPipelineCI.pDepthStencilState->depthBoundsTestEnable == VK_TRUE) &&
- (!enabled_features.core.depthBounds)) {
- skip |=
- log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT,
- HandleToUint64(device), "VUID-VkPipelineDepthStencilStateCreateInfo-depthBoundsTestEnable-00598",
- "vkCreateGraphicsPipelines(): the depthBounds device feature is disabled: the "
- "depthBoundsTestEnable member of the VkPipelineDepthStencilStateCreateInfo structure must be "
- "set to VK_FALSE.");
+ (!dev_data->enabled_features.core.depthBounds)) {
+ skip |= log_msg(dev_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_EXT,
+ HandleToUint64(pPipeline->pipeline),
+ "VUID-VkPipelineDepthStencilStateCreateInfo-depthBoundsTestEnable-00598",
+ "vkCreateGraphicsPipelines(): the depthBounds device feature is disabled: the "
+ "depthBoundsTestEnable member of the VkPipelineDepthStencilStateCreateInfo structure must be "
+ "set to VK_FALSE.");
}
}
@@ -1656,8 +1559,9 @@ bool CoreChecks::ValidatePipelineUnlocked(const PIPELINE_STATE *pPipeline, uint3
}
}
if (color_attachment_count > 0 && pPipeline->graphicsPipelineCI.pColorBlendState == nullptr) {
- skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT,
- HandleToUint64(device), "VUID-VkGraphicsPipelineCreateInfo-rasterizerDiscardEnable-00753",
+ skip |= log_msg(dev_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_EXT,
+ HandleToUint64(pPipeline->pipeline),
+ "VUID-VkGraphicsPipelineCreateInfo-rasterizerDiscardEnable-00753",
"Invalid Pipeline CreateInfo State: pColorBlendState is NULL when rasterization is enabled and "
"subpass uses color attachments.");
}
@@ -1666,8 +1570,8 @@ bool CoreChecks::ValidatePipelineUnlocked(const PIPELINE_STATE *pPipeline, uint3
}
if ((pPipeline->active_shaders & VK_SHADER_STAGE_VERTEX_BIT) && !pPipeline->graphicsPipelineCI.pVertexInputState) {
- skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, HandleToUint64(device),
- "VUID-VkGraphicsPipelineCreateInfo-pStages-02097",
+ skip |= log_msg(dev_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_EXT,
+ HandleToUint64(pPipeline->pipeline), "VUID-VkGraphicsPipelineCreateInfo-pStages-02097",
"Invalid Pipeline CreateInfo State: Missing pVertexInputState.");
}
@@ -1677,11 +1581,11 @@ bool CoreChecks::ValidatePipelineUnlocked(const PIPELINE_STATE *pPipeline, uint3
VkFormat format = vi->pVertexAttributeDescriptions[j].format;
// Internal call to get format info. Still goes through layers, could potentially go directly to ICD.
VkFormatProperties properties;
- DispatchGetPhysicalDeviceFormatProperties(physical_device, format, &properties);
+ dev_data->instance_dispatch_table.GetPhysicalDeviceFormatProperties(dev_data->physical_device, format, &properties);
if ((properties.bufferFeatures & VK_FORMAT_FEATURE_VERTEX_BUFFER_BIT) == 0) {
skip |=
- log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT,
- HandleToUint64(device), "VUID-VkVertexInputAttributeDescription-format-00623",
+ log_msg(dev_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
+ "VUID-VkVertexInputAttributeDescription-format-00623",
"vkCreateGraphicsPipelines: pCreateInfo[%d].pVertexInputState->vertexAttributeDescriptions[%d].format "
"(%s) is not a supported vertex buffer format.",
pipelineIndex, j, string_VkFormat(format));
@@ -1689,185 +1593,176 @@ bool CoreChecks::ValidatePipelineUnlocked(const PIPELINE_STATE *pPipeline, uint3
}
}
- if (pPipeline->graphicsPipelineCI.pMultisampleState) {
- auto accumColorSamples = [subpass_desc, pPipeline](uint32_t &samples) {
- for (uint32_t i = 0; i < subpass_desc->colorAttachmentCount; i++) {
- const auto attachment = subpass_desc->pColorAttachments[i].attachment;
- if (attachment != VK_ATTACHMENT_UNUSED) {
- samples |= static_cast<uint32_t>(pPipeline->rp_state->createInfo.pAttachments[attachment].samples);
- }
+ auto accumColorSamples = [subpass_desc, pPipeline](uint32_t &samples) {
+ for (uint32_t i = 0; i < subpass_desc->colorAttachmentCount; i++) {
+ const auto attachment = subpass_desc->pColorAttachments[i].attachment;
+ if (attachment != VK_ATTACHMENT_UNUSED) {
+ samples |= static_cast<uint32_t>(pPipeline->rp_state->createInfo.pAttachments[attachment].samples);
}
- };
+ }
+ };
- if (!(device_extensions.vk_amd_mixed_attachment_samples || device_extensions.vk_nv_framebuffer_mixed_samples)) {
- uint32_t raster_samples = static_cast<uint32_t>(GetNumSamples(pPipeline));
- uint32_t subpass_num_samples = 0;
+ if (!(dev_data->device_extensions.vk_amd_mixed_attachment_samples ||
+ dev_data->device_extensions.vk_nv_framebuffer_mixed_samples)) {
+ uint32_t raster_samples = static_cast<uint32_t>(GetNumSamples(pPipeline));
+ uint32_t subpass_num_samples = 0;
- accumColorSamples(subpass_num_samples);
+ accumColorSamples(subpass_num_samples);
- if (subpass_desc->pDepthStencilAttachment &&
- subpass_desc->pDepthStencilAttachment->attachment != VK_ATTACHMENT_UNUSED) {
- const auto attachment = subpass_desc->pDepthStencilAttachment->attachment;
- subpass_num_samples |= static_cast<uint32_t>(pPipeline->rp_state->createInfo.pAttachments[attachment].samples);
- }
+ if (subpass_desc->pDepthStencilAttachment && subpass_desc->pDepthStencilAttachment->attachment != VK_ATTACHMENT_UNUSED) {
+ const auto attachment = subpass_desc->pDepthStencilAttachment->attachment;
+ subpass_num_samples |= static_cast<uint32_t>(pPipeline->rp_state->createInfo.pAttachments[attachment].samples);
+ }
- // subpass_num_samples is 0 when the subpass has no attachments or if all attachments are VK_ATTACHMENT_UNUSED.
- // Only validate the value of subpass_num_samples if the subpass has attachments that are not VK_ATTACHMENT_UNUSED.
- if (subpass_num_samples && (!IsPowerOfTwo(subpass_num_samples) || (subpass_num_samples != raster_samples))) {
- skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT,
- HandleToUint64(device), "VUID-VkGraphicsPipelineCreateInfo-subpass-00757",
- "vkCreateGraphicsPipelines: pCreateInfo[%d].pMultisampleState->rasterizationSamples (%u) "
- "does not match the number of samples of the RenderPass color and/or depth attachment.",
- pipelineIndex, raster_samples);
- }
+ // subpass_num_samples is 0 when the subpass has no attachments or if all attachments are VK_ATTACHMENT_UNUSED.
+ // Only validate the value of subpass_num_samples if the subpass has attachments that are not VK_ATTACHMENT_UNUSED.
+ if (subpass_num_samples && (!IsPowerOfTwo(subpass_num_samples) || (subpass_num_samples != raster_samples))) {
+ skip |= log_msg(dev_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_EXT,
+ HandleToUint64(pPipeline->pipeline), "VUID-VkGraphicsPipelineCreateInfo-subpass-00757",
+ "vkCreateGraphicsPipelines: pCreateInfo[%d].pMultisampleState->rasterizationSamples (%u) "
+ "does not match the number of samples of the RenderPass color and/or depth attachment.",
+ pipelineIndex, raster_samples);
}
+ }
- if (device_extensions.vk_amd_mixed_attachment_samples) {
- VkSampleCountFlagBits max_sample_count = static_cast<VkSampleCountFlagBits>(0);
- for (uint32_t i = 0; i < subpass_desc->colorAttachmentCount; ++i) {
- if (subpass_desc->pColorAttachments[i].attachment != VK_ATTACHMENT_UNUSED) {
- max_sample_count = std::max(
- max_sample_count,
- pPipeline->rp_state->createInfo.pAttachments[subpass_desc->pColorAttachments[i].attachment].samples);
- }
- }
- if (subpass_desc->pDepthStencilAttachment &&
- subpass_desc->pDepthStencilAttachment->attachment != VK_ATTACHMENT_UNUSED) {
- max_sample_count = std::max(
- max_sample_count,
- pPipeline->rp_state->createInfo.pAttachments[subpass_desc->pDepthStencilAttachment->attachment].samples);
- }
- if ((pPipeline->graphicsPipelineCI.pRasterizationState->rasterizerDiscardEnable == VK_FALSE) &&
- (pPipeline->graphicsPipelineCI.pMultisampleState->rasterizationSamples != max_sample_count)) {
- skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT,
- HandleToUint64(device), "VUID-VkGraphicsPipelineCreateInfo-subpass-01505",
- "vkCreateGraphicsPipelines: pCreateInfo[%d].pMultisampleState->rasterizationSamples (%s) != max "
- "attachment samples (%s) used in subpass %u.",
- pipelineIndex,
- string_VkSampleCountFlagBits(pPipeline->graphicsPipelineCI.pMultisampleState->rasterizationSamples),
- string_VkSampleCountFlagBits(max_sample_count), pPipeline->graphicsPipelineCI.subpass);
+ if (dev_data->device_extensions.vk_amd_mixed_attachment_samples) {
+ VkSampleCountFlagBits max_sample_count = static_cast<VkSampleCountFlagBits>(0);
+ for (uint32_t i = 0; i < subpass_desc->colorAttachmentCount; ++i) {
+ if (subpass_desc->pColorAttachments[i].attachment != VK_ATTACHMENT_UNUSED) {
+ max_sample_count =
+ std::max(max_sample_count,
+ pPipeline->rp_state->createInfo.pAttachments[subpass_desc->pColorAttachments[i].attachment].samples);
}
}
+ if (subpass_desc->pDepthStencilAttachment && subpass_desc->pDepthStencilAttachment->attachment != VK_ATTACHMENT_UNUSED) {
+ max_sample_count =
+ std::max(max_sample_count,
+ pPipeline->rp_state->createInfo.pAttachments[subpass_desc->pDepthStencilAttachment->attachment].samples);
+ }
+ if ((pPipeline->graphicsPipelineCI.pRasterizationState->rasterizerDiscardEnable == VK_FALSE) &&
+ (pPipeline->graphicsPipelineCI.pMultisampleState->rasterizationSamples != max_sample_count)) {
+ skip |= log_msg(dev_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_EXT,
+ HandleToUint64(pPipeline->pipeline), "VUID-VkGraphicsPipelineCreateInfo-subpass-01505",
+ "vkCreateGraphicsPipelines: pCreateInfo[%d].pMultisampleState->rasterizationSamples (%s) != max "
+ "attachment samples (%s) used in subpass %u.",
+ pipelineIndex,
+ string_VkSampleCountFlagBits(pPipeline->graphicsPipelineCI.pMultisampleState->rasterizationSamples),
+ string_VkSampleCountFlagBits(max_sample_count), pPipeline->graphicsPipelineCI.subpass);
+ }
+ }
- if (device_extensions.vk_nv_framebuffer_mixed_samples) {
- uint32_t raster_samples = static_cast<uint32_t>(GetNumSamples(pPipeline));
- uint32_t subpass_color_samples = 0;
+ if (dev_data->device_extensions.vk_nv_framebuffer_mixed_samples) {
+ uint32_t raster_samples = static_cast<uint32_t>(GetNumSamples(pPipeline));
+ uint32_t subpass_color_samples = 0;
- accumColorSamples(subpass_color_samples);
+ accumColorSamples(subpass_color_samples);
- if (subpass_desc->pDepthStencilAttachment &&
- subpass_desc->pDepthStencilAttachment->attachment != VK_ATTACHMENT_UNUSED) {
- const auto attachment = subpass_desc->pDepthStencilAttachment->attachment;
- const uint32_t subpass_depth_samples =
- static_cast<uint32_t>(pPipeline->rp_state->createInfo.pAttachments[attachment].samples);
-
- if (pPipeline->graphicsPipelineCI.pDepthStencilState) {
- const bool ds_test_enabled =
- (pPipeline->graphicsPipelineCI.pDepthStencilState->depthTestEnable == VK_TRUE) ||
- (pPipeline->graphicsPipelineCI.pDepthStencilState->depthBoundsTestEnable == VK_TRUE) ||
- (pPipeline->graphicsPipelineCI.pDepthStencilState->stencilTestEnable == VK_TRUE);
-
- if (ds_test_enabled && (!IsPowerOfTwo(subpass_depth_samples) || (raster_samples != subpass_depth_samples))) {
- skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT,
- HandleToUint64(device), "VUID-VkGraphicsPipelineCreateInfo-subpass-01411",
- "vkCreateGraphicsPipelines: pCreateInfo[%d].pMultisampleState->rasterizationSamples (%u) "
- "does not match the number of samples of the RenderPass depth attachment (%u).",
- pipelineIndex, raster_samples, subpass_depth_samples);
- }
- }
- }
+ if (subpass_desc->pDepthStencilAttachment && subpass_desc->pDepthStencilAttachment->attachment != VK_ATTACHMENT_UNUSED) {
+ const auto attachment = subpass_desc->pDepthStencilAttachment->attachment;
+ const uint32_t subpass_depth_samples =
+ static_cast<uint32_t>(pPipeline->rp_state->createInfo.pAttachments[attachment].samples);
+
+ if (pPipeline->graphicsPipelineCI.pDepthStencilState) {
+ const bool ds_test_enabled = (pPipeline->graphicsPipelineCI.pDepthStencilState->depthTestEnable == VK_TRUE) ||
+ (pPipeline->graphicsPipelineCI.pDepthStencilState->depthBoundsTestEnable == VK_TRUE) ||
+ (pPipeline->graphicsPipelineCI.pDepthStencilState->stencilTestEnable == VK_TRUE);
- if (IsPowerOfTwo(subpass_color_samples)) {
- if (raster_samples < subpass_color_samples) {
- skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT,
- HandleToUint64(device), "VUID-VkGraphicsPipelineCreateInfo-subpass-01412",
+ if (ds_test_enabled && (!IsPowerOfTwo(subpass_depth_samples) || (raster_samples != subpass_depth_samples))) {
+ skip |= log_msg(dev_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_EXT,
+ HandleToUint64(pPipeline->pipeline), "VUID-VkGraphicsPipelineCreateInfo-subpass-01411",
"vkCreateGraphicsPipelines: pCreateInfo[%d].pMultisampleState->rasterizationSamples (%u) "
- "is not greater or equal to the number of samples of the RenderPass color attachment (%u).",
- pipelineIndex, raster_samples, subpass_color_samples);
+ "does not match the number of samples of the RenderPass depth attachment (%u).",
+ pipelineIndex, raster_samples, subpass_depth_samples);
}
+ }
+ }
- if (pPipeline->graphicsPipelineCI.pMultisampleState) {
- if ((raster_samples > subpass_color_samples) &&
- (pPipeline->graphicsPipelineCI.pMultisampleState->sampleShadingEnable == VK_TRUE)) {
- skip |=
- log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT,
- HandleToUint64(device), "VUID-VkPipelineMultisampleStateCreateInfo-rasterizationSamples-01415",
- "vkCreateGraphicsPipelines: pCreateInfo[%d].pMultisampleState->sampleShadingEnable must be "
- "VK_FALSE when "
- "pCreateInfo[%d].pMultisampleState->rasterizationSamples (%u) is greater than the number of "
- "samples of the "
- "subpass color attachment (%u).",
- pipelineIndex, pipelineIndex, raster_samples, subpass_color_samples);
- }
+ if (IsPowerOfTwo(subpass_color_samples)) {
+ if (raster_samples < subpass_color_samples) {
+ skip |= log_msg(dev_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_EXT,
+ HandleToUint64(pPipeline->pipeline), "VUID-VkGraphicsPipelineCreateInfo-subpass-01412",
+ "vkCreateGraphicsPipelines: pCreateInfo[%d].pMultisampleState->rasterizationSamples (%u) "
+ "is not greater or equal to the number of samples of the RenderPass color attachment (%u).",
+ pipelineIndex, raster_samples, subpass_color_samples);
+ }
+
+ if (pPipeline->graphicsPipelineCI.pMultisampleState) {
+ if ((raster_samples > subpass_color_samples) &&
+ (pPipeline->graphicsPipelineCI.pMultisampleState->sampleShadingEnable == VK_TRUE)) {
+ skip |= log_msg(
+ dev_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_EXT,
+ HandleToUint64(pPipeline->pipeline), "VUID-VkPipelineMultisampleStateCreateInfo-rasterizationSamples-01415",
+ "vkCreateGraphicsPipelines: pCreateInfo[%d].pMultisampleState->sampleShadingEnable must be VK_FALSE when "
+ "pCreateInfo[%d].pMultisampleState->rasterizationSamples (%u) is greater than the number of samples of the "
+ "subpass color attachment (%u).",
+ pipelineIndex, pipelineIndex, raster_samples, subpass_color_samples);
+ }
- const auto *coverage_modulation_state = lvl_find_in_chain<VkPipelineCoverageModulationStateCreateInfoNV>(
- pPipeline->graphicsPipelineCI.pMultisampleState->pNext);
+ const auto *coverage_modulation_state = lvl_find_in_chain<VkPipelineCoverageModulationStateCreateInfoNV>(
+ pPipeline->graphicsPipelineCI.pMultisampleState->pNext);
- if (coverage_modulation_state && (coverage_modulation_state->coverageModulationTableEnable == VK_TRUE)) {
- if (coverage_modulation_state->coverageModulationTableCount != (raster_samples / subpass_color_samples)) {
- skip |=
- log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT,
- HandleToUint64(device),
+ if (coverage_modulation_state && (coverage_modulation_state->coverageModulationTableEnable == VK_TRUE)) {
+ if (coverage_modulation_state->coverageModulationTableCount != (raster_samples / subpass_color_samples)) {
+ skip |= log_msg(dev_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_EXT, HandleToUint64(pPipeline->pipeline),
"VUID-VkPipelineCoverageModulationStateCreateInfoNV-coverageModulationTableEnable-01405",
"vkCreateGraphicsPipelines: pCreateInfos[%d] VkPipelineCoverageModulationStateCreateInfoNV "
"coverageModulationTableCount of %u is invalid.",
pipelineIndex, coverage_modulation_state->coverageModulationTableCount);
- }
}
}
}
}
+ }
- if (device_extensions.vk_nv_fragment_coverage_to_color) {
- const auto coverage_to_color_state =
- lvl_find_in_chain<VkPipelineCoverageToColorStateCreateInfoNV>(pPipeline->graphicsPipelineCI.pMultisampleState);
-
- if (coverage_to_color_state && coverage_to_color_state->coverageToColorEnable == VK_TRUE) {
- bool attachment_is_valid = false;
- std::string error_detail;
-
- if (coverage_to_color_state->coverageToColorLocation < subpass_desc->colorAttachmentCount) {
- const auto color_attachment_ref =
- subpass_desc->pColorAttachments[coverage_to_color_state->coverageToColorLocation];
- if (color_attachment_ref.attachment != VK_ATTACHMENT_UNUSED) {
- const auto color_attachment = pPipeline->rp_state->createInfo.pAttachments[color_attachment_ref.attachment];
-
- switch (color_attachment.format) {
- case VK_FORMAT_R8_UINT:
- case VK_FORMAT_R8_SINT:
- case VK_FORMAT_R16_UINT:
- case VK_FORMAT_R16_SINT:
- case VK_FORMAT_R32_UINT:
- case VK_FORMAT_R32_SINT:
- attachment_is_valid = true;
- break;
- default:
- string_sprintf(&error_detail, "references an attachment with an invalid format (%s).",
- string_VkFormat(color_attachment.format));
- break;
- }
- } else {
- string_sprintf(&error_detail,
- "references an invalid attachment. The subpass pColorAttachments[%" PRIu32
- "].attachment has the value "
- "VK_ATTACHMENT_UNUSED.",
- coverage_to_color_state->coverageToColorLocation);
+ if (dev_data->device_extensions.vk_nv_fragment_coverage_to_color) {
+ const auto coverage_to_color_state =
+ lvl_find_in_chain<VkPipelineCoverageToColorStateCreateInfoNV>(pPipeline->graphicsPipelineCI.pMultisampleState);
+
+ if (coverage_to_color_state && coverage_to_color_state->coverageToColorEnable == VK_TRUE) {
+ bool attachment_is_valid = false;
+ std::string error_detail;
+
+ if (coverage_to_color_state->coverageToColorLocation < subpass_desc->colorAttachmentCount) {
+ const auto color_attachment_ref = subpass_desc->pColorAttachments[coverage_to_color_state->coverageToColorLocation];
+ if (color_attachment_ref.attachment != VK_ATTACHMENT_UNUSED) {
+ const auto color_attachment = pPipeline->rp_state->createInfo.pAttachments[color_attachment_ref.attachment];
+
+ switch (color_attachment.format) {
+ case VK_FORMAT_R8_UINT:
+ case VK_FORMAT_R8_SINT:
+ case VK_FORMAT_R16_UINT:
+ case VK_FORMAT_R16_SINT:
+ case VK_FORMAT_R32_UINT:
+ case VK_FORMAT_R32_SINT:
+ attachment_is_valid = true;
+ break;
+ default:
+ string_sprintf(&error_detail, "references an attachment with an invalid format (%s).",
+ string_VkFormat(color_attachment.format));
+ break;
}
} else {
string_sprintf(&error_detail,
- "references an non-existing attachment since the subpass colorAttachmentCount is %" PRIu32 ".",
- subpass_desc->colorAttachmentCount);
+ "references an invalid attachment. The subpass pColorAttachments[%" PRIu32
+ "].attachment has the value "
+ "VK_ATTACHMENT_UNUSED.",
+ coverage_to_color_state->coverageToColorLocation);
}
+ } else {
+ string_sprintf(&error_detail,
+ "references an non-existing attachment since the subpass colorAttachmentCount is %" PRIu32 ".",
+ subpass_desc->colorAttachmentCount);
+ }
- if (!attachment_is_valid) {
- skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT,
- HandleToUint64(device),
- "VUID-VkPipelineCoverageToColorStateCreateInfoNV-coverageToColorEnable-01404",
- "vkCreateGraphicsPipelines: pCreateInfos[%" PRId32
- "].pMultisampleState VkPipelineCoverageToColorStateCreateInfoNV "
- "coverageToColorLocation = %" PRIu32 " %s",
- pipelineIndex, coverage_to_color_state->coverageToColorLocation, error_detail.c_str());
- }
+ if (!attachment_is_valid) {
+ skip |= log_msg(dev_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_EXT,
+ HandleToUint64(pPipeline->pipeline),
+ "VUID-VkPipelineCoverageToColorStateCreateInfoNV-coverageToColorEnable-01404",
+ "vkCreateGraphicsPipelines: pCreateInfos[%" PRId32
+ "].pMultisampleState VkPipelineCoverageToColorStateCreateInfoNV "
+ "coverageToColorLocation = %" PRIu32 " %s",
+ pipelineIndex, coverage_to_color_state->coverageToColorLocation, error_detail.c_str());
}
}
}
@@ -1877,69 +1772,89 @@ bool CoreChecks::ValidatePipelineUnlocked(const PIPELINE_STATE *pPipeline, uint3
// Block of code at start here specifically for managing/tracking DSs
+// Return Pool node ptr for specified pool or else NULL
+DESCRIPTOR_POOL_STATE *CoreChecks::GetDescriptorPoolState(const VkDescriptorPool pool) {
+ auto pool_it = descriptorPoolMap.find(pool);
+ if (pool_it == descriptorPoolMap.end()) {
+ return NULL;
+ }
+ return pool_it->second;
+}
+
// Validate that given set is valid and that it's not being used by an in-flight CmdBuffer
// func_str is the name of the calling function
// Return false if no errors occur
// Return true if validation error occurs and callback returns true (to skip upcoming API call down the chain)
-bool CoreChecks::ValidateIdleDescriptorSet(VkDescriptorSet set, const char *func_str) {
- if (disabled.idle_descriptor_set) return false;
+bool CoreChecks::ValidateIdleDescriptorSet(const layer_data *dev_data, VkDescriptorSet set, const char *func_str) {
+ if (dev_data->disabled.idle_descriptor_set) return false;
bool skip = false;
- auto set_node = setMap.find(set);
- if (set_node == setMap.end()) {
- skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_SET_EXT,
+ auto set_node = dev_data->setMap.find(set);
+ if (set_node == dev_data->setMap.end()) {
+ skip |= log_msg(dev_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_SET_EXT,
HandleToUint64(set), kVUID_Core_DrawState_DoubleDestroy,
- "Cannot call %s() on %s that has not been allocated.", func_str, report_data->FormatHandle(set).c_str());
+ "Cannot call %s() on descriptor set %s that has not been allocated.", func_str,
+ dev_data->report_data->FormatHandle(set).c_str());
} else {
// TODO : This covers various error cases so should pass error enum into this function and use passed in enum here
if (set_node->second->in_use.load()) {
- skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_SET_EXT,
+ skip |= log_msg(dev_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_SET_EXT,
HandleToUint64(set), "VUID-vkFreeDescriptorSets-pDescriptorSets-00309",
- "Cannot call %s() on %s that is in use by a command buffer.", func_str,
- report_data->FormatHandle(set).c_str());
+ "Cannot call %s() on descriptor set %s that is in use by a command buffer.", func_str,
+ dev_data->report_data->FormatHandle(set).c_str());
}
}
return skip;
}
// Remove set from setMap and delete the set
-void ValidationStateTracker::FreeDescriptorSet(cvdescriptorset::DescriptorSet *descriptor_set) {
- setMap.erase(descriptor_set->GetSet());
+void CoreChecks::FreeDescriptorSet(layer_data *dev_data, cvdescriptorset::DescriptorSet *descriptor_set) {
+ dev_data->setMap.erase(descriptor_set->GetSet());
+ delete descriptor_set;
}
-
// Free all DS Pools including their Sets & related sub-structs
// NOTE : Calls to this function should be wrapped in mutex
-void ValidationStateTracker::DeleteDescriptorSetPools() {
- for (auto ii = descriptorPoolMap.begin(); ii != descriptorPoolMap.end();) {
+void CoreChecks::DeletePools(layer_data *dev_data) {
+ for (auto ii = dev_data->descriptorPoolMap.begin(); ii != dev_data->descriptorPoolMap.end();) {
// Remove this pools' sets from setMap and delete them
for (auto ds : ii->second->sets) {
- FreeDescriptorSet(ds);
+ FreeDescriptorSet(dev_data, ds);
}
ii->second->sets.clear();
- ii = descriptorPoolMap.erase(ii);
+ delete ii->second;
+ ii = dev_data->descriptorPoolMap.erase(ii);
+ }
+}
+
+// For given CB object, fetch associated CB Node from map
+GLOBAL_CB_NODE *CoreChecks::GetCBNode(const VkCommandBuffer cb) {
+ auto it = commandBufferMap.find(cb);
+ if (it == commandBufferMap.end()) {
+ return NULL;
}
+ return it->second;
}
// If a renderpass is active, verify that the given command type is appropriate for current subpass state
-bool CoreChecks::ValidateCmdSubpassState(const CMD_BUFFER_STATE *pCB, const CMD_TYPE cmd_type) const {
+bool CoreChecks::ValidateCmdSubpassState(const layer_data *dev_data, const GLOBAL_CB_NODE *pCB, const CMD_TYPE cmd_type) {
if (!pCB->activeRenderPass) return false;
bool skip = false;
if (pCB->activeSubpassContents == VK_SUBPASS_CONTENTS_SECONDARY_COMMAND_BUFFERS &&
(cmd_type != CMD_EXECUTECOMMANDS && cmd_type != CMD_NEXTSUBPASS && cmd_type != CMD_ENDRENDERPASS &&
cmd_type != CMD_NEXTSUBPASS2KHR && cmd_type != CMD_ENDRENDERPASS2KHR)) {
- skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
+ skip |= log_msg(dev_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
HandleToUint64(pCB->commandBuffer), kVUID_Core_DrawState_InvalidCommandBuffer,
"Commands cannot be called in a subpass using secondary command buffers.");
} else if (pCB->activeSubpassContents == VK_SUBPASS_CONTENTS_INLINE && cmd_type == CMD_EXECUTECOMMANDS) {
- skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
+ skip |= log_msg(dev_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
HandleToUint64(pCB->commandBuffer), kVUID_Core_DrawState_InvalidCommandBuffer,
"vkCmdExecuteCommands() cannot be called in a subpass using inline commands.");
}
return skip;
}
-bool CoreChecks::ValidateCmdQueueFlags(const CMD_BUFFER_STATE *cb_node, const char *caller_name, VkQueueFlags required_flags,
- const char *error_code) const {
- auto pool = GetCommandPoolState(cb_node->createInfo.commandPool);
+bool CoreChecks::ValidateCmdQueueFlags(layer_data *dev_data, const GLOBAL_CB_NODE *cb_node, const char *caller_name,
+ VkQueueFlags required_flags, const char *error_code) {
+ auto pool = GetCommandPoolNode(cb_node->createInfo.commandPool);
if (pool) {
VkQueueFlags queue_flags = GetPhysicalDeviceState()->queue_family_properties[pool->queueFamilyIndex].queueFlags;
if (!(required_flags & queue_flags)) {
@@ -1952,7 +1867,7 @@ bool CoreChecks::ValidateCmdQueueFlags(const CMD_BUFFER_STATE *cb_node, const ch
required_flags_string += string_VkQueueFlagBits(flag);
}
}
- return log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
+ return log_msg(dev_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
HandleToUint64(cb_node->commandBuffer), error_code,
"Cannot call %s on a command buffer allocated from a pool without %s capabilities..", caller_name,
required_flags_string.c_str());
@@ -1961,159 +1876,192 @@ bool CoreChecks::ValidateCmdQueueFlags(const CMD_BUFFER_STATE *cb_node, const ch
return false;
}
-static char const *GetCauseStr(VulkanTypedHandle obj) {
+static char const *GetCauseStr(VK_OBJECT obj) {
if (obj.type == kVulkanObjectTypeDescriptorSet) return "destroyed or updated";
if (obj.type == kVulkanObjectTypeCommandBuffer) return "destroyed or rerecorded";
return "destroyed";
}
-bool CoreChecks::ReportInvalidCommandBuffer(const CMD_BUFFER_STATE *cb_state, const char *call_source) const {
+bool CoreChecks::ReportInvalidCommandBuffer(layer_data *dev_data, const GLOBAL_CB_NODE *cb_state, const char *call_source) {
bool skip = false;
for (auto obj : cb_state->broken_bindings) {
+ const char *type_str = object_string[obj.type];
const char *cause_str = GetCauseStr(obj);
- string VUID;
- string_sprintf(&VUID, "%s-%s", kVUID_Core_DrawState_InvalidCommandBuffer, object_string[obj.type]);
- skip |=
- log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
- HandleToUint64(cb_state->commandBuffer), VUID.c_str(),
- "You are adding %s to %s that is invalid because bound %s was %s.", call_source,
- report_data->FormatHandle(cb_state->commandBuffer).c_str(), report_data->FormatHandle(obj).c_str(), cause_str);
+ skip |= log_msg(dev_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
+ HandleToUint64(cb_state->commandBuffer), kVUID_Core_DrawState_InvalidCommandBuffer,
+ "You are adding %s to command buffer %s that is invalid because bound %s %s was %s.", call_source,
+ dev_data->report_data->FormatHandle(cb_state->commandBuffer).c_str(), type_str,
+ dev_data->report_data->FormatHandle(obj.handle).c_str(), cause_str);
}
return skip;
}
// 'commandBuffer must be in the recording state' valid usage error code for each command
-// Autogenerated as part of the vk_validation_error_message.h codegen
-static const std::array<const char *, CMD_RANGE_SIZE> must_be_recording_list = {{VUID_MUST_BE_RECORDING_LIST}};
+// Note: grepping for ^^^^^^^^^ in vk_validation_database is easily massaged into the following list
+// Note: C++11 doesn't automatically devolve enum types to the underlying type for hash traits purposes (fixed in C++14)
+using CmdTypeHashType = std::underlying_type<CMD_TYPE>::type;
+static const std::unordered_map<CmdTypeHashType, std::string> must_be_recording_map = {
+ {CMD_NONE, kVUIDUndefined}, // UNMATCHED
+ {CMD_BEGINQUERY, "VUID-vkCmdBeginQuery-commandBuffer-recording"},
+ {CMD_BEGINRENDERPASS, "VUID-vkCmdBeginRenderPass-commandBuffer-recording"},
+ {CMD_BEGINRENDERPASS2KHR, "VUID-vkCmdBeginRenderPass2KHR-commandBuffer-recording"},
+ {CMD_BINDDESCRIPTORSETS, "VUID-vkCmdBindDescriptorSets-commandBuffer-recording"},
+ {CMD_BINDINDEXBUFFER, "VUID-vkCmdBindIndexBuffer-commandBuffer-recording"},
+ {CMD_BINDPIPELINE, "VUID-vkCmdBindPipeline-commandBuffer-recording"},
+ {CMD_BINDSHADINGRATEIMAGE, "VUID-vkCmdBindShadingRateImageNV-commandBuffer-recording"},
+ {CMD_BINDVERTEXBUFFERS, "VUID-vkCmdBindVertexBuffers-commandBuffer-recording"},
+ {CMD_BLITIMAGE, "VUID-vkCmdBlitImage-commandBuffer-recording"},
+ {CMD_CLEARATTACHMENTS, "VUID-vkCmdClearAttachments-commandBuffer-recording"},
+ {CMD_CLEARCOLORIMAGE, "VUID-vkCmdClearColorImage-commandBuffer-recording"},
+ {CMD_CLEARDEPTHSTENCILIMAGE, "VUID-vkCmdClearDepthStencilImage-commandBuffer-recording"},
+ {CMD_COPYBUFFER, "VUID-vkCmdCopyBuffer-commandBuffer-recording"},
+ {CMD_COPYBUFFERTOIMAGE, "VUID-vkCmdCopyBufferToImage-commandBuffer-recording"},
+ {CMD_COPYIMAGE, "VUID-vkCmdCopyImage-commandBuffer-recording"},
+ {CMD_COPYIMAGETOBUFFER, "VUID-vkCmdCopyImageToBuffer-commandBuffer-recording"},
+ {CMD_COPYQUERYPOOLRESULTS, "VUID-vkCmdCopyQueryPoolResults-commandBuffer-recording"},
+ {CMD_DEBUGMARKERBEGINEXT, "VUID-vkCmdDebugMarkerBeginEXT-commandBuffer-recording"},
+ {CMD_DEBUGMARKERENDEXT, "VUID-vkCmdDebugMarkerEndEXT-commandBuffer-recording"},
+ {CMD_DEBUGMARKERINSERTEXT, "VUID-vkCmdDebugMarkerInsertEXT-commandBuffer-recording"},
+ {CMD_DISPATCH, "VUID-vkCmdDispatch-commandBuffer-recording"},
+ // Exclude KHX (if not already present) { CMD_DISPATCHBASEKHX, "VUID-vkCmdDispatchBase-commandBuffer-recording" },
+ {CMD_DISPATCHINDIRECT, "VUID-vkCmdDispatchIndirect-commandBuffer-recording"},
+ {CMD_DRAW, "VUID-vkCmdDraw-commandBuffer-recording"},
+ {CMD_DRAWINDEXED, "VUID-vkCmdDrawIndexed-commandBuffer-recording"},
+ {CMD_DRAWINDEXEDINDIRECT, "VUID-vkCmdDrawIndexedIndirect-commandBuffer-recording"},
+ // Exclude vendor ext (if not already present) { CMD_DRAWINDEXEDINDIRECTCOUNTAMD,
+ // "VUID-vkCmdDrawIndexedIndirectCountAMD-commandBuffer-recording" },
+ {CMD_DRAWINDEXEDINDIRECTCOUNTKHR, "VUID-vkCmdDrawIndexedIndirectCountKHR-commandBuffer-recording"},
+ {CMD_DRAWINDIRECT, "VUID-vkCmdDrawIndirect-commandBuffer-recording"},
+ // Exclude vendor ext (if not already present) { CMD_DRAWINDIRECTCOUNTAMD,
+ // "VUID-vkCmdDrawIndirectCountAMD-commandBuffer-recording" },
+ {CMD_DRAWINDIRECTCOUNTKHR, "VUID-vkCmdDrawIndirectCountKHR-commandBuffer-recording"},
+ {CMD_DRAWMESHTASKSNV, "VUID-vkCmdDrawMeshTasksNV-commandBuffer-recording"},
+ {CMD_DRAWMESHTASKSINDIRECTNV, "VUID-vkCmdDrawMeshTasksIndirectNV-commandBuffer-recording"},
+ {CMD_DRAWMESHTASKSINDIRECTCOUNTNV, "VUID-vkCmdDrawMeshTasksIndirectCountNV-commandBuffer-recording"},
+ {CMD_ENDCOMMANDBUFFER, "VUID-vkEndCommandBuffer-commandBuffer-00059"},
+ {CMD_ENDQUERY, "VUID-vkCmdEndQuery-commandBuffer-recording"},
+ {CMD_ENDRENDERPASS, "VUID-vkCmdEndRenderPass-commandBuffer-recording"},
+ {CMD_ENDRENDERPASS2KHR, "VUID-vkCmdEndRenderPass2KHR-commandBuffer-recording"},
+ {CMD_EXECUTECOMMANDS, "VUID-vkCmdExecuteCommands-commandBuffer-recording"},
+ {CMD_FILLBUFFER, "VUID-vkCmdFillBuffer-commandBuffer-recording"},
+ {CMD_NEXTSUBPASS, "VUID-vkCmdNextSubpass-commandBuffer-recording"},
+ {CMD_NEXTSUBPASS2KHR, "VUID-vkCmdNextSubpass2KHR-commandBuffer-recording"},
+ {CMD_PIPELINEBARRIER, "VUID-vkCmdPipelineBarrier-commandBuffer-recording"},
+ // Exclude vendor ext (if not already present) { CMD_PROCESSCOMMANDSNVX, "VUID-vkCmdProcessCommandsNVX-commandBuffer-recording"
+ // },
+ {CMD_PUSHCONSTANTS, "VUID-vkCmdPushConstants-commandBuffer-recording"},
+ {CMD_PUSHDESCRIPTORSETKHR, "VUID-vkCmdPushDescriptorSetKHR-commandBuffer-recording"},
+ {CMD_PUSHDESCRIPTORSETWITHTEMPLATEKHR, "VUID-vkCmdPushDescriptorSetWithTemplateKHR-commandBuffer-recording"},
+ // Exclude vendor ext (if not already present) { CMD_RESERVESPACEFORCOMMANDSNVX,
+ // "VUID-vkCmdReserveSpaceForCommandsNVX-commandBuffer-recording" },
+ {CMD_RESETEVENT, "VUID-vkCmdResetEvent-commandBuffer-recording"},
+ {CMD_RESETQUERYPOOL, "VUID-vkCmdResetQueryPool-commandBuffer-recording"},
+ {CMD_RESOLVEIMAGE, "VUID-vkCmdResolveImage-commandBuffer-recording"},
+ {CMD_SETBLENDCONSTANTS, "VUID-vkCmdSetBlendConstants-commandBuffer-recording"},
+ {CMD_SETDEPTHBIAS, "VUID-vkCmdSetDepthBias-commandBuffer-recording"},
+ {CMD_SETDEPTHBOUNDS, "VUID-vkCmdSetDepthBounds-commandBuffer-recording"},
+ // Exclude KHX (if not already present) { CMD_SETDEVICEMASKKHX, "VUID-vkCmdSetDeviceMask-commandBuffer-recording" },
+ {CMD_SETDISCARDRECTANGLEEXT, "VUID-vkCmdSetDiscardRectangleEXT-commandBuffer-recording"},
+ {CMD_SETEVENT, "VUID-vkCmdSetEvent-commandBuffer-recording"},
+ {CMD_SETEXCLUSIVESCISSOR, "VUID-vkCmdSetExclusiveScissorNV-commandBuffer-recording"},
+ {CMD_SETLINEWIDTH, "VUID-vkCmdSetLineWidth-commandBuffer-recording"},
+ {CMD_SETSAMPLELOCATIONSEXT, "VUID-vkCmdSetSampleLocationsEXT-commandBuffer-recording"},
+ {CMD_SETSCISSOR, "VUID-vkCmdSetScissor-commandBuffer-recording"},
+ {CMD_SETSTENCILCOMPAREMASK, "VUID-vkCmdSetStencilCompareMask-commandBuffer-recording"},
+ {CMD_SETSTENCILREFERENCE, "VUID-vkCmdSetStencilReference-commandBuffer-recording"},
+ {CMD_SETSTENCILWRITEMASK, "VUID-vkCmdSetStencilWriteMask-commandBuffer-recording"},
+ {CMD_SETVIEWPORT, "VUID-vkCmdSetViewport-commandBuffer-recording"},
+ {CMD_SETVIEWPORTSHADINGRATEPALETTE, "VUID-vkCmdSetViewportShadingRatePaletteNV-commandBuffer-recording"},
+ // Exclude vendor ext (if not already present) { CMD_SETVIEWPORTWSCALINGNV,
+ // "VUID-vkCmdSetViewportWScalingNV-commandBuffer-recording" },
+ {CMD_UPDATEBUFFER, "VUID-vkCmdUpdateBuffer-commandBuffer-recording"},
+ {CMD_WAITEVENTS, "VUID-vkCmdWaitEvents-commandBuffer-recording"},
+ {CMD_WRITETIMESTAMP, "VUID-vkCmdWriteTimestamp-commandBuffer-recording"},
+};
// Validate the given command being added to the specified cmd buffer, flagging errors if CB is not in the recording state or if
// there's an issue with the Cmd ordering
-bool CoreChecks::ValidateCmd(const CMD_BUFFER_STATE *cb_state, const CMD_TYPE cmd, const char *caller_name) const {
+bool CoreChecks::ValidateCmd(layer_data *dev_data, const GLOBAL_CB_NODE *cb_state, const CMD_TYPE cmd, const char *caller_name) {
switch (cb_state->state) {
case CB_RECORDING:
- return ValidateCmdSubpassState(cb_state, cmd);
+ return ValidateCmdSubpassState(dev_data, cb_state, cmd);
case CB_INVALID_COMPLETE:
case CB_INVALID_INCOMPLETE:
- return ReportInvalidCommandBuffer(cb_state, caller_name);
+ return ReportInvalidCommandBuffer(dev_data, cb_state, caller_name);
default:
- assert(cmd != CMD_NONE);
- const auto error = must_be_recording_list[cmd];
- return log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
+ auto error_it = must_be_recording_map.find(cmd);
+ // This assert lets us know that a vkCmd.* entrypoint has been added without enabling it in the map
+ assert(error_it != must_be_recording_map.cend());
+ if (error_it == must_be_recording_map.cend()) {
+ error_it = must_be_recording_map.find(CMD_NONE); // But we'll handle the asserting case, in case of a test gap
+ }
+ const auto error = error_it->second;
+ return log_msg(dev_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
HandleToUint64(cb_state->commandBuffer), error,
"You must call vkBeginCommandBuffer() before this call to %s.", caller_name);
}
}
-bool CoreChecks::ValidateDeviceMaskToPhysicalDeviceCount(uint32_t deviceMask, VkDebugReportObjectTypeEXT VUID_handle_type,
- uint64_t VUID_handle, const char *VUID) const {
- bool skip = false;
- uint32_t count = 1 << physical_device_count;
- if (count <= deviceMask) {
- skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VUID_handle_type, VUID_handle, VUID,
- "deviceMask(0x%" PRIx32 ") is invaild. Physical device count is %" PRIu32 ".", deviceMask,
- physical_device_count);
- }
- return skip;
-}
-
-bool CoreChecks::ValidateDeviceMaskToZero(uint32_t deviceMask, VkDebugReportObjectTypeEXT VUID_handle_type, uint64_t VUID_handle,
- const char *VUID) const {
- bool skip = false;
- if (deviceMask == 0) {
- skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VUID_handle_type, VUID_handle, VUID,
- "deviceMask(0x%" PRIx32 ") must be non-zero.", deviceMask);
- }
- return skip;
-}
-
-bool CoreChecks::ValidateDeviceMaskToCommandBuffer(const CMD_BUFFER_STATE *pCB, uint32_t deviceMask,
- VkDebugReportObjectTypeEXT VUID_handle_type, uint64_t VUID_handle,
- const char *VUID) const {
- bool skip = false;
- if ((deviceMask & pCB->initial_device_mask) != deviceMask) {
- skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VUID_handle_type, VUID_handle, VUID,
- "deviceMask(0x%" PRIx32 ") is not a subset of %s initial device mask(0x%" PRIx32 ").", deviceMask,
- report_data->FormatHandle(pCB->commandBuffer).c_str(), pCB->initial_device_mask);
- }
- return skip;
-}
-
-bool CoreChecks::ValidateDeviceMaskToRenderPass(const CMD_BUFFER_STATE *pCB, uint32_t deviceMask,
- VkDebugReportObjectTypeEXT VUID_handle_type, uint64_t VUID_handle,
- const char *VUID) {
- bool skip = false;
- if ((deviceMask & pCB->active_render_pass_device_mask) != deviceMask) {
- skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VUID_handle_type, VUID_handle, VUID,
- "deviceMask(0x%" PRIx32 ") is not a subset of %s device mask(0x%" PRIx32 ").", deviceMask,
- report_data->FormatHandle(pCB->activeRenderPass->renderPass).c_str(), pCB->active_render_pass_device_mask);
- }
- return skip;
-}
-
// For given object struct return a ptr of BASE_NODE type for its wrapping struct
-BASE_NODE *ValidationStateTracker::GetStateStructPtrFromObject(const VulkanTypedHandle &object_struct) {
+BASE_NODE *CoreChecks::GetStateStructPtrFromObject(layer_data *dev_data, VK_OBJECT object_struct) {
BASE_NODE *base_ptr = nullptr;
switch (object_struct.type) {
case kVulkanObjectTypeDescriptorSet: {
- base_ptr = GetSetNode(object_struct.Cast<VkDescriptorSet>());
+ base_ptr = GetSetNode(reinterpret_cast<VkDescriptorSet &>(object_struct.handle));
break;
}
case kVulkanObjectTypeSampler: {
- base_ptr = GetSamplerState(object_struct.Cast<VkSampler>());
+ base_ptr = GetSamplerState(reinterpret_cast<VkSampler &>(object_struct.handle));
break;
}
case kVulkanObjectTypeQueryPool: {
- base_ptr = GetQueryPoolState(object_struct.Cast<VkQueryPool>());
+ base_ptr = GetQueryPoolNode(reinterpret_cast<VkQueryPool &>(object_struct.handle));
break;
}
case kVulkanObjectTypePipeline: {
- base_ptr = GetPipelineState(object_struct.Cast<VkPipeline>());
+ base_ptr = GetPipelineState(reinterpret_cast<VkPipeline &>(object_struct.handle));
break;
}
case kVulkanObjectTypeBuffer: {
- base_ptr = GetBufferState(object_struct.Cast<VkBuffer>());
+ base_ptr = GetBufferState(reinterpret_cast<VkBuffer &>(object_struct.handle));
break;
}
case kVulkanObjectTypeBufferView: {
- base_ptr = GetBufferViewState(object_struct.Cast<VkBufferView>());
+ base_ptr = GetBufferViewState(reinterpret_cast<VkBufferView &>(object_struct.handle));
break;
}
case kVulkanObjectTypeImage: {
- base_ptr = GetImageState(object_struct.Cast<VkImage>());
+ base_ptr = GetImageState(reinterpret_cast<VkImage &>(object_struct.handle));
break;
}
case kVulkanObjectTypeImageView: {
- base_ptr = GetImageViewState(object_struct.Cast<VkImageView>());
+ base_ptr = GetImageViewState(reinterpret_cast<VkImageView &>(object_struct.handle));
break;
}
case kVulkanObjectTypeEvent: {
- base_ptr = GetEventState(object_struct.Cast<VkEvent>());
+ base_ptr = GetEventNode(reinterpret_cast<VkEvent &>(object_struct.handle));
break;
}
case kVulkanObjectTypeDescriptorPool: {
- base_ptr = GetDescriptorPoolState(object_struct.Cast<VkDescriptorPool>());
+ base_ptr = GetDescriptorPoolState(reinterpret_cast<VkDescriptorPool &>(object_struct.handle));
break;
}
case kVulkanObjectTypeCommandPool: {
- base_ptr = GetCommandPoolState(object_struct.Cast<VkCommandPool>());
+ base_ptr = GetCommandPoolNode(reinterpret_cast<VkCommandPool &>(object_struct.handle));
break;
}
case kVulkanObjectTypeFramebuffer: {
- base_ptr = GetFramebufferState(object_struct.Cast<VkFramebuffer>());
+ base_ptr = GetFramebufferState(reinterpret_cast<VkFramebuffer &>(object_struct.handle));
break;
}
case kVulkanObjectTypeRenderPass: {
- base_ptr = GetRenderPassState(object_struct.Cast<VkRenderPass>());
+ base_ptr = GetRenderPassState(reinterpret_cast<VkRenderPass &>(object_struct.handle));
break;
}
case kVulkanObjectTypeDeviceMemory: {
- base_ptr = GetDevMemState(object_struct.Cast<VkDeviceMemory>());
- break;
- }
- case kVulkanObjectTypeAccelerationStructureNV: {
- base_ptr = GetAccelerationStructureState(object_struct.Cast<VkAccelerationStructureNV>());
+ base_ptr = GetMemObjInfo(reinterpret_cast<VkDeviceMemory &>(object_struct.handle));
break;
}
default:
@@ -2124,26 +2072,22 @@ BASE_NODE *ValidationStateTracker::GetStateStructPtrFromObject(const VulkanTyped
return base_ptr;
}
-// Tie the VulkanTypedHandle to the cmd buffer which includes:
+// Tie the VK_OBJECT to the cmd buffer which includes:
// Add object_binding to cmd buffer
// Add cb_binding to object
-void ValidationStateTracker::AddCommandBufferBinding(std::unordered_set<CMD_BUFFER_STATE *> *cb_bindings,
- const VulkanTypedHandle &obj, CMD_BUFFER_STATE *cb_node) {
- if (disabled.command_buffer_state) {
- return;
- }
+static void AddCommandBufferBinding(std::unordered_set<GLOBAL_CB_NODE *> *cb_bindings, VK_OBJECT obj, GLOBAL_CB_NODE *cb_node) {
cb_bindings->insert(cb_node);
cb_node->object_bindings.insert(obj);
}
// For a given object, if cb_node is in that objects cb_bindings, remove cb_node
-void ValidationStateTracker::RemoveCommandBufferBinding(VulkanTypedHandle const &object, CMD_BUFFER_STATE *cb_node) {
- BASE_NODE *base_obj = GetStateStructPtrFromObject(object);
+void CoreChecks::RemoveCommandBufferBinding(layer_data *dev_data, VK_OBJECT const *object, GLOBAL_CB_NODE *cb_node) {
+ BASE_NODE *base_obj = GetStateStructPtrFromObject(dev_data, *object);
if (base_obj) base_obj->cb_bindings.erase(cb_node);
}
// Reset the command buffer state
// Maintain the createInfo and set state to CB_NEW, but clear all other state
-void ValidationStateTracker::ResetCommandBufferState(const VkCommandBuffer cb) {
- CMD_BUFFER_STATE *pCB = GetCBState(cb);
+void CoreChecks::ResetCommandBufferState(layer_data *dev_data, const VkCommandBuffer cb) {
+ GLOBAL_CB_NODE *pCB = dev_data->commandBufferMap[cb];
if (pCB) {
pCB->in_use.store(0);
// Reset CB state (note that createInfo is not cleared)
@@ -2151,8 +2095,6 @@ void ValidationStateTracker::ResetCommandBufferState(const VkCommandBuffer cb) {
memset(&pCB->beginInfo, 0, sizeof(VkCommandBufferBeginInfo));
memset(&pCB->inheritanceInfo, 0, sizeof(VkCommandBufferInheritanceInfo));
pCB->hasDrawCmd = false;
- pCB->hasTraceRaysCmd = false;
- pCB->hasDispatchCmd = false;
pCB->state = CB_NEW;
pCB->submitCount = 0;
pCB->image_layout_change_count = 1; // Start at 1. 0 is insert value for validation cache versions, s.t. new == dirty
@@ -2173,18 +2115,19 @@ void ValidationStateTracker::ResetCommandBufferState(const VkCommandBuffer cb) {
pCB->waitedEvents.clear();
pCB->events.clear();
pCB->writeEventsBeforeWait.clear();
+ pCB->waitedEventsBeforeQueryReset.clear();
pCB->queryToStateMap.clear();
pCB->activeQueries.clear();
pCB->startedQueries.clear();
- pCB->image_layout_map.clear();
+ pCB->imageLayoutMap.clear();
pCB->eventToStageMap.clear();
- pCB->cb_vertex_buffer_binding_info.clear();
- pCB->current_vertex_buffer_binding_info.vertex_buffer_bindings.clear();
+ pCB->draw_data.clear();
+ pCB->current_draw_data.vertex_buffer_bindings.clear();
pCB->vertex_buffer_used = false;
pCB->primaryCommandBuffer = VK_NULL_HANDLE;
// If secondary, invalidate any primary command buffer that may call us.
if (pCB->createInfo.level == VK_COMMAND_BUFFER_LEVEL_SECONDARY) {
- InvalidateCommandBuffers(pCB->linkedCommandBuffers, VulkanTypedHandle(cb, kVulkanObjectTypeCommandBuffer));
+ InvalidateCommandBuffers(dev_data, pCB->linkedCommandBuffers, {HandleToUint64(cb), kVulkanObjectTypeCommandBuffer});
}
// Remove reverse command buffer links.
@@ -2192,15 +2135,17 @@ void ValidationStateTracker::ResetCommandBufferState(const VkCommandBuffer cb) {
pSubCB->linkedCommandBuffers.erase(pCB);
}
pCB->linkedCommandBuffers.clear();
- ClearCmdBufAndMemReferences(pCB);
+ pCB->updateImages.clear();
+ pCB->updateBuffers.clear();
+ ClearCmdBufAndMemReferences(dev_data, pCB);
pCB->queue_submit_functions.clear();
pCB->cmd_execute_commands_functions.clear();
pCB->eventUpdates.clear();
pCB->queryUpdates.clear();
// Remove object bindings
- for (const auto &obj : pCB->object_bindings) {
- RemoveCommandBufferBinding(obj, pCB);
+ for (auto obj : pCB->object_bindings) {
+ RemoveCommandBufferBinding(dev_data, &obj, pCB);
}
pCB->object_bindings.clear();
// Remove this cmdBuffer's reference from each FrameBuffer's CB ref list
@@ -2214,13 +2159,6 @@ void ValidationStateTracker::ResetCommandBufferState(const VkCommandBuffer cb) {
pCB->qfo_transfer_image_barriers.Reset();
pCB->qfo_transfer_buffer_barriers.Reset();
-
- // Clean up the label data
- ResetCmdDebugUtilsLabel(report_data, pCB->commandBuffer);
- pCB->debug_label.Reset();
- }
- if (command_buffer_reset_callback) {
- (*command_buffer_reset_callback)(cb);
}
}
@@ -2264,9 +2202,6 @@ CBStatusFlags MakeStaticStateMask(VkPipelineDynamicStateCreateInfo const *ds) {
case VK_DYNAMIC_STATE_VIEWPORT_SHADING_RATE_PALETTE_NV:
flags &= ~CBSTATUS_SHADING_RATE_PALETTE_SET;
break;
- case VK_DYNAMIC_STATE_LINE_STIPPLE_EXT:
- flags &= ~CBSTATUS_LINE_STIPPLE_SET;
- break;
default:
break;
}
@@ -2278,31 +2213,32 @@ CBStatusFlags MakeStaticStateMask(VkPipelineDynamicStateCreateInfo const *ds) {
// Flags validation error if the associated call is made inside a render pass. The apiName routine should ONLY be called outside a
// render pass.
-bool CoreChecks::InsideRenderPass(const CMD_BUFFER_STATE *pCB, const char *apiName, const char *msgCode) const {
+bool CoreChecks::InsideRenderPass(const layer_data *dev_data, const GLOBAL_CB_NODE *pCB, const char *apiName, const char *msgCode) {
bool inside = false;
if (pCB->activeRenderPass) {
- inside = log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
- HandleToUint64(pCB->commandBuffer), msgCode, "%s: It is invalid to issue this call inside an active %s.",
- apiName, report_data->FormatHandle(pCB->activeRenderPass->renderPass).c_str());
+ inside = log_msg(dev_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
+ HandleToUint64(pCB->commandBuffer), msgCode,
+ "%s: It is invalid to issue this call inside an active render pass (%s).", apiName,
+ dev_data->report_data->FormatHandle(pCB->activeRenderPass->renderPass).c_str());
}
return inside;
}
// Flags validation error if the associated call is made outside a render pass. The apiName
// routine should ONLY be called inside a render pass.
-bool CoreChecks::OutsideRenderPass(const CMD_BUFFER_STATE *pCB, const char *apiName, const char *msgCode) const {
+bool CoreChecks::OutsideRenderPass(const layer_data *dev_data, GLOBAL_CB_NODE *pCB, const char *apiName, const char *msgCode) {
bool outside = false;
if (((pCB->createInfo.level == VK_COMMAND_BUFFER_LEVEL_PRIMARY) && (!pCB->activeRenderPass)) ||
((pCB->createInfo.level == VK_COMMAND_BUFFER_LEVEL_SECONDARY) && (!pCB->activeRenderPass) &&
!(pCB->beginInfo.flags & VK_COMMAND_BUFFER_USAGE_RENDER_PASS_CONTINUE_BIT))) {
- outside = log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
+ outside = log_msg(dev_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
HandleToUint64(pCB->commandBuffer), msgCode, "%s: This call must be issued inside an active render pass.",
apiName);
}
return outside;
}
-void CoreChecks::InitGpuValidation() {
+void CoreChecks::InitGpuValidation(instance_layer_data *instance_data) {
// Process the layer settings file.
enum CoreValidationGpuFlagBits {
CORE_VALIDATION_GPU_VALIDATION_ALL_BIT = 0x00000001,
@@ -2315,35 +2251,36 @@ void CoreChecks::InitGpuValidation() {
};
std::string gpu_flags_key = "lunarg_core_validation.gpu_validation";
CoreGPUFlags gpu_flags = GetLayerOptionFlags(gpu_flags_key, gpu_flags_option_definitions, 0);
- gpu_flags_key = "khronos_validation.gpu_validation";
- gpu_flags |= GetLayerOptionFlags(gpu_flags_key, gpu_flags_option_definitions, 0);
if (gpu_flags & CORE_VALIDATION_GPU_VALIDATION_ALL_BIT) {
- instance_state->enabled.gpu_validation = true;
+ instance_data->instance_state->enabled.gpu_validation = true;
}
if (gpu_flags & CORE_VALIDATION_GPU_VALIDATION_RESERVE_BINDING_SLOT_BIT) {
- instance_state->enabled.gpu_validation_reserve_binding_slot = true;
+ instance_data->instance_state->enabled.gpu_validation_reserve_binding_slot = true;
}
}
void CoreChecks::PostCallRecordCreateInstance(const VkInstanceCreateInfo *pCreateInfo, const VkAllocationCallbacks *pAllocator,
VkInstance *pInstance, VkResult result) {
+ instance_layer_data *instance_data = GetLayerDataPtr(get_dispatch_key(*pInstance), instance_layer_data_map);
if (VK_SUCCESS != result) return;
- InitGpuValidation();
+ InitGpuValidation(instance_data);
}
-bool CoreChecks::ValidateQueueFamilyIndex(const PHYSICAL_DEVICE_STATE *pd_state, uint32_t requested_queue_family,
- const char *err_code, const char *cmd_name, const char *queue_family_var_name) {
+static bool ValidatePhysicalDeviceQueueFamily(instance_layer_data *instance_data, const PHYSICAL_DEVICE_STATE *pd_state,
+ uint32_t requested_queue_family, const char *err_code, const char *cmd_name,
+ const char *queue_family_var_name) {
bool skip = false;
- if (requested_queue_family >= pd_state->queue_family_known_count) {
- const char *conditional_ext_cmd =
- instance_extensions.vk_khr_get_physical_device_properties_2 ? " or vkGetPhysicalDeviceQueueFamilyProperties2[KHR]" : "";
+ const char *conditional_ext_cmd = instance_data->instance_extensions.vk_khr_get_physical_device_properties_2
+ ? " or vkGetPhysicalDeviceQueueFamilyProperties2[KHR]"
+ : "";
- const std::string count_note = (UNCALLED == pd_state->vkGetPhysicalDeviceQueueFamilyPropertiesState)
- ? "the pQueueFamilyPropertyCount was never obtained"
- : "i.e. is not less than " + std::to_string(pd_state->queue_family_known_count);
+ std::string count_note = (UNCALLED == pd_state->vkGetPhysicalDeviceQueueFamilyPropertiesState)
+ ? "the pQueueFamilyPropertyCount was never obtained"
+ : "i.e. is not less than " + std::to_string(pd_state->queue_family_count);
- skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_PHYSICAL_DEVICE_EXT,
+ if (requested_queue_family >= pd_state->queue_family_count) {
+ skip |= log_msg(instance_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_PHYSICAL_DEVICE_EXT,
HandleToUint64(pd_state->phys_device), err_code,
"%s: %s (= %" PRIu32
") is not less than any previously obtained pQueueFamilyPropertyCount from "
@@ -2354,8 +2291,8 @@ bool CoreChecks::ValidateQueueFamilyIndex(const PHYSICAL_DEVICE_STATE *pd_state,
}
// Verify VkDeviceQueueCreateInfos
-bool CoreChecks::ValidateDeviceQueueCreateInfos(const PHYSICAL_DEVICE_STATE *pd_state, uint32_t info_count,
- const VkDeviceQueueCreateInfo *infos) {
+static bool ValidateDeviceQueueCreateInfos(instance_layer_data *instance_data, const PHYSICAL_DEVICE_STATE *pd_state,
+ uint32_t info_count, const VkDeviceQueueCreateInfo *infos) {
bool skip = false;
std::unordered_set<uint32_t> queue_family_set;
@@ -2363,37 +2300,38 @@ bool CoreChecks::ValidateDeviceQueueCreateInfos(const PHYSICAL_DEVICE_STATE *pd_
for (uint32_t i = 0; i < info_count; ++i) {
const auto requested_queue_family = infos[i].queueFamilyIndex;
+ // Verify that requested queue family is known to be valid at this point in time
std::string queue_family_var_name = "pCreateInfo->pQueueCreateInfos[" + std::to_string(i) + "].queueFamilyIndex";
- skip |= ValidateQueueFamilyIndex(pd_state, requested_queue_family, "VUID-VkDeviceQueueCreateInfo-queueFamilyIndex-00381",
- "vkCreateDevice", queue_family_var_name.c_str());
-
- if (queue_family_set.insert(requested_queue_family).second == false) {
- skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT,
+ skip |= ValidatePhysicalDeviceQueueFamily(instance_data, pd_state, requested_queue_family,
+ "VUID-VkDeviceQueueCreateInfo-queueFamilyIndex-00381", "vkCreateDevice",
+ queue_family_var_name.c_str());
+ if (queue_family_set.count(requested_queue_family)) {
+ skip |= log_msg(instance_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT,
HandleToUint64(pd_state->phys_device), "VUID-VkDeviceCreateInfo-queueFamilyIndex-00372",
"CreateDevice(): %s (=%" PRIu32 ") is not unique within pQueueCreateInfos.",
queue_family_var_name.c_str(), requested_queue_family);
+ } else {
+ queue_family_set.insert(requested_queue_family);
}
- // Verify that requested queue count of queue family is known to be valid at this point in time
- if (requested_queue_family < pd_state->queue_family_known_count) {
+ // Verify that requested queue count of queue family is known to be valid at this point in time
+ if (requested_queue_family < pd_state->queue_family_count) {
const auto requested_queue_count = infos[i].queueCount;
- const bool queue_family_has_props = requested_queue_family < pd_state->queue_family_properties.size();
- // spec guarantees at least one queue for each queue family
- const uint32_t available_queue_count =
- queue_family_has_props ? pd_state->queue_family_properties[requested_queue_family].queueCount : 1;
- const char *conditional_ext_cmd = instance_extensions.vk_khr_get_physical_device_properties_2
+ const auto queue_family_props_count = pd_state->queue_family_properties.size();
+ const bool queue_family_has_props = requested_queue_family < queue_family_props_count;
+ const char *conditional_ext_cmd = instance_data->instance_extensions.vk_khr_get_physical_device_properties_2
? " or vkGetPhysicalDeviceQueueFamilyProperties2[KHR]"
: "";
-
- if (requested_queue_count > available_queue_count) {
- const std::string count_note =
- queue_family_has_props
- ? "i.e. is not less than or equal to " +
- std::to_string(pd_state->queue_family_properties[requested_queue_family].queueCount)
- : "the pQueueFamilyProperties[" + std::to_string(requested_queue_family) + "] was never obtained";
-
+ std::string count_note =
+ !queue_family_has_props
+ ? "the pQueueFamilyProperties[" + std::to_string(requested_queue_family) + "] was never obtained"
+ : "i.e. is not less than or equal to " +
+ std::to_string(pd_state->queue_family_properties[requested_queue_family].queueCount);
+
+ if (!queue_family_has_props ||
+ requested_queue_count > pd_state->queue_family_properties[requested_queue_family].queueCount) {
skip |= log_msg(
- report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_PHYSICAL_DEVICE_EXT,
+ instance_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_PHYSICAL_DEVICE_EXT,
HandleToUint64(pd_state->phys_device), "VUID-VkDeviceQueueCreateInfo-queueCount-00382",
"vkCreateDevice: pCreateInfo->pQueueCreateInfos[%" PRIu32 "].queueCount (=%" PRIu32
") is not less than or equal to available queue count for this pCreateInfo->pQueueCreateInfos[%" PRIu32
@@ -2408,59 +2346,35 @@ bool CoreChecks::ValidateDeviceQueueCreateInfos(const PHYSICAL_DEVICE_STATE *pd_
bool CoreChecks::PreCallValidateCreateDevice(VkPhysicalDevice gpu, const VkDeviceCreateInfo *pCreateInfo,
const VkAllocationCallbacks *pAllocator, VkDevice *pDevice) {
+ instance_layer_data *instance_data = GetLayerDataPtr(get_dispatch_key(gpu), instance_layer_data_map);
bool skip = false;
auto pd_state = GetPhysicalDeviceState(gpu);
// TODO: object_tracker should perhaps do this instead
// and it does not seem to currently work anyway -- the loader just crashes before this point
if (!pd_state) {
- skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_PHYSICAL_DEVICE_EXT, 0,
- kVUID_Core_DevLimit_MustQueryCount,
+ skip |= log_msg(instance_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_PHYSICAL_DEVICE_EXT,
+ 0, kVUID_Core_DevLimit_MustQueryCount,
"Invalid call to vkCreateDevice() w/o first calling vkEnumeratePhysicalDevices().");
- } else {
- skip |= ValidateDeviceQueueCreateInfos(pd_state, pCreateInfo->queueCreateInfoCount, pCreateInfo->pQueueCreateInfos);
}
+ skip |=
+ ValidateDeviceQueueCreateInfos(instance_data, pd_state, pCreateInfo->queueCreateInfoCount, pCreateInfo->pQueueCreateInfos);
return skip;
}
void CoreChecks::PreCallRecordCreateDevice(VkPhysicalDevice gpu, const VkDeviceCreateInfo *pCreateInfo,
const VkAllocationCallbacks *pAllocator, VkDevice *pDevice,
- safe_VkDeviceCreateInfo *modified_create_info) {
+ std::unique_ptr<safe_VkDeviceCreateInfo> &modified_create_info) {
// GPU Validation can possibly turn on device features, so give it a chance to change the create info.
- if (enabled.gpu_validation) {
+ if (GetEnables()->gpu_validation) {
VkPhysicalDeviceFeatures supported_features;
- DispatchGetPhysicalDeviceFeatures(gpu, &supported_features);
+ instance_dispatch_table.GetPhysicalDeviceFeatures(gpu, &supported_features);
GpuPreCallRecordCreateDevice(gpu, modified_create_info, &supported_features);
}
}
void CoreChecks::PostCallRecordCreateDevice(VkPhysicalDevice gpu, const VkDeviceCreateInfo *pCreateInfo,
const VkAllocationCallbacks *pAllocator, VkDevice *pDevice, VkResult result) {
- // The state tracker sets up the device state
- StateTracker::PostCallRecordCreateDevice(gpu, pCreateInfo, pAllocator, pDevice, result);
-
- // Add the callback hooks for the functions that are either broadly or deeply used and that the ValidationStateTracker refactor
- // would be messier without.
- // TODO: Find a good way to do this hooklessly.
- ValidationObject *device_object = GetLayerDataPtr(get_dispatch_key(*pDevice), layer_data_map);
- ValidationObject *validation_data = GetValidationObject(device_object->object_dispatch, LayerObjectTypeCoreValidation);
- CoreChecks *core_checks = static_cast<CoreChecks *>(validation_data);
-
- if (enabled.gpu_validation) {
- // The only CoreCheck specific init is for gpu_validation
- core_checks->GpuPostCallRecordCreateDevice(&enabled, pCreateInfo);
- core_checks->SetCommandBufferResetCallback(
- [core_checks](VkCommandBuffer command_buffer) -> void { core_checks->GpuResetCommandBuffer(command_buffer); });
- }
- core_checks->SetSetImageViewInitialLayoutCallback(
- [core_checks](CMD_BUFFER_STATE *cb_node, const IMAGE_VIEW_STATE &iv_state, VkImageLayout layout) -> void {
- core_checks->SetImageViewInitialLayout(cb_node, iv_state, layout);
- });
-}
-
-void ValidationStateTracker::PostCallRecordCreateDevice(VkPhysicalDevice gpu, const VkDeviceCreateInfo *pCreateInfo,
- const VkAllocationCallbacks *pAllocator, VkDevice *pDevice,
- VkResult result) {
if (VK_SUCCESS != result) return;
const VkPhysicalDeviceFeatures *enabled_features_found = pCreateInfo->pEnabledFeatures;
@@ -2471,305 +2385,252 @@ void ValidationStateTracker::PostCallRecordCreateDevice(VkPhysicalDevice gpu, co
}
}
- ValidationObject *device_object = GetLayerDataPtr(get_dispatch_key(*pDevice), layer_data_map);
+ ValidationObject *device_object = ::GetLayerDataPtr(::get_dispatch_key(*pDevice), ::layer_data_map);
ValidationObject *validation_data = GetValidationObject(device_object->object_dispatch, LayerObjectTypeCoreValidation);
- ValidationStateTracker *state_tracker = static_cast<ValidationStateTracker *>(validation_data);
+ CoreChecks *core_checks = static_cast<CoreChecks *>(validation_data);
if (nullptr == enabled_features_found) {
- state_tracker->enabled_features.core = {};
+ core_checks->enabled_features.core = {};
} else {
- state_tracker->enabled_features.core = *enabled_features_found;
+ core_checks->enabled_features.core = *enabled_features_found;
}
// Make sure that queue_family_properties are obtained for this device's physical_device, even if the app has not
// previously set them through an explicit API call.
uint32_t count;
auto pd_state = GetPhysicalDeviceState(gpu);
- DispatchGetPhysicalDeviceQueueFamilyProperties(gpu, &count, nullptr);
+ instance_dispatch_table.GetPhysicalDeviceQueueFamilyProperties(gpu, &count, nullptr);
+ pd_state->queue_family_count = std::max(pd_state->queue_family_count, count);
pd_state->queue_family_properties.resize(std::max(static_cast<uint32_t>(pd_state->queue_family_properties.size()), count));
- DispatchGetPhysicalDeviceQueueFamilyProperties(gpu, &count, &pd_state->queue_family_properties[0]);
+ instance_dispatch_table.GetPhysicalDeviceQueueFamilyProperties(gpu, &count, &pd_state->queue_family_properties[0]);
// Save local link to this device's physical device state
- state_tracker->physical_device_state = pd_state;
+ core_checks->physical_device_state = pd_state;
const auto *device_group_ci = lvl_find_in_chain<VkDeviceGroupDeviceCreateInfo>(pCreateInfo->pNext);
- state_tracker->physical_device_count =
+ core_checks->physical_device_count =
device_group_ci && device_group_ci->physicalDeviceCount > 0 ? device_group_ci->physicalDeviceCount : 1;
const auto *descriptor_indexing_features = lvl_find_in_chain<VkPhysicalDeviceDescriptorIndexingFeaturesEXT>(pCreateInfo->pNext);
if (descriptor_indexing_features) {
- state_tracker->enabled_features.descriptor_indexing = *descriptor_indexing_features;
+ core_checks->enabled_features.descriptor_indexing = *descriptor_indexing_features;
}
const auto *eight_bit_storage_features = lvl_find_in_chain<VkPhysicalDevice8BitStorageFeaturesKHR>(pCreateInfo->pNext);
if (eight_bit_storage_features) {
- state_tracker->enabled_features.eight_bit_storage = *eight_bit_storage_features;
+ core_checks->enabled_features.eight_bit_storage = *eight_bit_storage_features;
}
const auto *exclusive_scissor_features = lvl_find_in_chain<VkPhysicalDeviceExclusiveScissorFeaturesNV>(pCreateInfo->pNext);
if (exclusive_scissor_features) {
- state_tracker->enabled_features.exclusive_scissor = *exclusive_scissor_features;
+ core_checks->enabled_features.exclusive_scissor = *exclusive_scissor_features;
}
const auto *shading_rate_image_features = lvl_find_in_chain<VkPhysicalDeviceShadingRateImageFeaturesNV>(pCreateInfo->pNext);
if (shading_rate_image_features) {
- state_tracker->enabled_features.shading_rate_image = *shading_rate_image_features;
+ core_checks->enabled_features.shading_rate_image = *shading_rate_image_features;
}
const auto *mesh_shader_features = lvl_find_in_chain<VkPhysicalDeviceMeshShaderFeaturesNV>(pCreateInfo->pNext);
if (mesh_shader_features) {
- state_tracker->enabled_features.mesh_shader = *mesh_shader_features;
+ core_checks->enabled_features.mesh_shader = *mesh_shader_features;
}
const auto *inline_uniform_block_features =
lvl_find_in_chain<VkPhysicalDeviceInlineUniformBlockFeaturesEXT>(pCreateInfo->pNext);
if (inline_uniform_block_features) {
- state_tracker->enabled_features.inline_uniform_block = *inline_uniform_block_features;
+ core_checks->enabled_features.inline_uniform_block = *inline_uniform_block_features;
}
const auto *transform_feedback_features = lvl_find_in_chain<VkPhysicalDeviceTransformFeedbackFeaturesEXT>(pCreateInfo->pNext);
if (transform_feedback_features) {
- state_tracker->enabled_features.transform_feedback_features = *transform_feedback_features;
+ core_checks->enabled_features.transform_feedback_features = *transform_feedback_features;
}
const auto *float16_int8_features = lvl_find_in_chain<VkPhysicalDeviceFloat16Int8FeaturesKHR>(pCreateInfo->pNext);
if (float16_int8_features) {
- state_tracker->enabled_features.float16_int8 = *float16_int8_features;
+ core_checks->enabled_features.float16_int8 = *float16_int8_features;
}
const auto *vtx_attrib_div_features = lvl_find_in_chain<VkPhysicalDeviceVertexAttributeDivisorFeaturesEXT>(pCreateInfo->pNext);
if (vtx_attrib_div_features) {
- state_tracker->enabled_features.vtx_attrib_divisor_features = *vtx_attrib_div_features;
- }
-
- const auto *uniform_buffer_standard_layout_features =
- lvl_find_in_chain<VkPhysicalDeviceUniformBufferStandardLayoutFeaturesKHR>(pCreateInfo->pNext);
- if (uniform_buffer_standard_layout_features) {
- state_tracker->enabled_features.uniform_buffer_standard_layout = *uniform_buffer_standard_layout_features;
+ core_checks->enabled_features.vtx_attrib_divisor_features = *vtx_attrib_div_features;
}
const auto *scalar_block_layout_features = lvl_find_in_chain<VkPhysicalDeviceScalarBlockLayoutFeaturesEXT>(pCreateInfo->pNext);
if (scalar_block_layout_features) {
- state_tracker->enabled_features.scalar_block_layout_features = *scalar_block_layout_features;
+ core_checks->enabled_features.scalar_block_layout_features = *scalar_block_layout_features;
}
const auto *buffer_address = lvl_find_in_chain<VkPhysicalDeviceBufferAddressFeaturesEXT>(pCreateInfo->pNext);
if (buffer_address) {
- state_tracker->enabled_features.buffer_address = *buffer_address;
- }
-
- const auto *cooperative_matrix_features = lvl_find_in_chain<VkPhysicalDeviceCooperativeMatrixFeaturesNV>(pCreateInfo->pNext);
- if (cooperative_matrix_features) {
- state_tracker->enabled_features.cooperative_matrix_features = *cooperative_matrix_features;
- }
-
- const auto *float_controls_features = lvl_find_in_chain<VkPhysicalDeviceFloatControlsPropertiesKHR>(pCreateInfo->pNext);
- if (float_controls_features) {
- state_tracker->enabled_features.float_controls = *float_controls_features;
- }
-
- const auto *host_query_reset_features = lvl_find_in_chain<VkPhysicalDeviceHostQueryResetFeaturesEXT>(pCreateInfo->pNext);
- if (host_query_reset_features) {
- state_tracker->enabled_features.host_query_reset_features = *host_query_reset_features;
- }
-
- const auto *compute_shader_derivatives_features =
- lvl_find_in_chain<VkPhysicalDeviceComputeShaderDerivativesFeaturesNV>(pCreateInfo->pNext);
- if (compute_shader_derivatives_features) {
- state_tracker->enabled_features.compute_shader_derivatives_features = *compute_shader_derivatives_features;
+ core_checks->enabled_features.buffer_address = *buffer_address;
}
- const auto *fragment_shader_barycentric_features =
- lvl_find_in_chain<VkPhysicalDeviceFragmentShaderBarycentricFeaturesNV>(pCreateInfo->pNext);
- if (fragment_shader_barycentric_features) {
- state_tracker->enabled_features.fragment_shader_barycentric_features = *fragment_shader_barycentric_features;
- }
+ // Store physical device properties and physical device mem limits into device layer_data structs
+ instance_dispatch_table.GetPhysicalDeviceMemoryProperties(gpu, &core_checks->phys_dev_mem_props);
+ instance_dispatch_table.GetPhysicalDeviceProperties(gpu, &core_checks->phys_dev_props);
- const auto *shader_image_footprint_features =
- lvl_find_in_chain<VkPhysicalDeviceShaderImageFootprintFeaturesNV>(pCreateInfo->pNext);
- if (shader_image_footprint_features) {
- state_tracker->enabled_features.shader_image_footprint_features = *shader_image_footprint_features;
+ if (core_checks->device_extensions.vk_khr_push_descriptor) {
+ // Get the needed push_descriptor limits
+ auto push_descriptor_prop = lvl_init_struct<VkPhysicalDevicePushDescriptorPropertiesKHR>();
+ auto prop2 = lvl_init_struct<VkPhysicalDeviceProperties2KHR>(&push_descriptor_prop);
+ instance_dispatch_table.GetPhysicalDeviceProperties2KHR(gpu, &prop2);
+ core_checks->phys_dev_ext_props.max_push_descriptors = push_descriptor_prop.maxPushDescriptors;
}
-
- const auto *fragment_shader_interlock_features =
- lvl_find_in_chain<VkPhysicalDeviceFragmentShaderInterlockFeaturesEXT>(pCreateInfo->pNext);
- if (fragment_shader_interlock_features) {
- state_tracker->enabled_features.fragment_shader_interlock_features = *fragment_shader_interlock_features;
+ if (core_checks->device_extensions.vk_ext_descriptor_indexing) {
+ // Get the needed descriptor_indexing limits
+ auto descriptor_indexing_props = lvl_init_struct<VkPhysicalDeviceDescriptorIndexingPropertiesEXT>();
+ auto prop2 = lvl_init_struct<VkPhysicalDeviceProperties2KHR>(&descriptor_indexing_props);
+ instance_dispatch_table.GetPhysicalDeviceProperties2KHR(gpu, &prop2);
+ core_checks->phys_dev_ext_props.descriptor_indexing_props = descriptor_indexing_props;
}
-
- const auto *demote_to_helper_invocation_features =
- lvl_find_in_chain<VkPhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT>(pCreateInfo->pNext);
- if (demote_to_helper_invocation_features) {
- state_tracker->enabled_features.demote_to_helper_invocation_features = *demote_to_helper_invocation_features;
+ if (core_checks->device_extensions.vk_nv_shading_rate_image) {
+ // Get the needed shading rate image limits
+ auto shading_rate_image_props = lvl_init_struct<VkPhysicalDeviceShadingRateImagePropertiesNV>();
+ auto prop2 = lvl_init_struct<VkPhysicalDeviceProperties2KHR>(&shading_rate_image_props);
+ instance_dispatch_table.GetPhysicalDeviceProperties2KHR(gpu, &prop2);
+ core_checks->phys_dev_ext_props.shading_rate_image_props = shading_rate_image_props;
}
-
- const auto *texel_buffer_alignment_features =
- lvl_find_in_chain<VkPhysicalDeviceTexelBufferAlignmentFeaturesEXT>(pCreateInfo->pNext);
- if (texel_buffer_alignment_features) {
- state_tracker->enabled_features.texel_buffer_alignment_features = *texel_buffer_alignment_features;
+ if (core_checks->device_extensions.vk_nv_mesh_shader) {
+ // Get the needed mesh shader limits
+ auto mesh_shader_props = lvl_init_struct<VkPhysicalDeviceMeshShaderPropertiesNV>();
+ auto prop2 = lvl_init_struct<VkPhysicalDeviceProperties2KHR>(&mesh_shader_props);
+ instance_dispatch_table.GetPhysicalDeviceProperties2KHR(gpu, &prop2);
+ core_checks->phys_dev_ext_props.mesh_shader_props = mesh_shader_props;
}
-
- const auto *imageless_framebuffer_features =
- lvl_find_in_chain<VkPhysicalDeviceImagelessFramebufferFeaturesKHR>(pCreateInfo->pNext);
- if (imageless_framebuffer_features) {
- state_tracker->enabled_features.imageless_framebuffer_features = *imageless_framebuffer_features;
+ if (core_checks->device_extensions.vk_ext_inline_uniform_block) {
+ // Get the needed inline uniform block limits
+ auto inline_uniform_block_props = lvl_init_struct<VkPhysicalDeviceInlineUniformBlockPropertiesEXT>();
+ auto prop2 = lvl_init_struct<VkPhysicalDeviceProperties2KHR>(&inline_uniform_block_props);
+ instance_dispatch_table.GetPhysicalDeviceProperties2KHR(gpu, &prop2);
+ core_checks->phys_dev_ext_props.inline_uniform_block_props = inline_uniform_block_props;
}
-
- const auto *pipeline_exe_props_features =
- lvl_find_in_chain<VkPhysicalDevicePipelineExecutablePropertiesFeaturesKHR>(pCreateInfo->pNext);
- if (pipeline_exe_props_features) {
- state_tracker->enabled_features.pipeline_exe_props_features = *pipeline_exe_props_features;
+ if (core_checks->device_extensions.vk_ext_vertex_attribute_divisor) {
+ // Get the needed vertex attribute divisor limits
+ auto vtx_attrib_divisor_props = lvl_init_struct<VkPhysicalDeviceVertexAttributeDivisorPropertiesEXT>();
+ auto prop2 = lvl_init_struct<VkPhysicalDeviceProperties2KHR>(&vtx_attrib_divisor_props);
+ instance_dispatch_table.GetPhysicalDeviceProperties2KHR(gpu, &prop2);
+ core_checks->phys_dev_ext_props.vtx_attrib_divisor_props = vtx_attrib_divisor_props;
}
-
- // Store physical device properties and physical device mem limits into CoreChecks structs
- DispatchGetPhysicalDeviceMemoryProperties(gpu, &state_tracker->phys_dev_mem_props);
- DispatchGetPhysicalDeviceProperties(gpu, &state_tracker->phys_dev_props);
-
- const auto &dev_ext = state_tracker->device_extensions;
- auto *phys_dev_props = &state_tracker->phys_dev_ext_props;
-
- if (dev_ext.vk_khr_push_descriptor) {
- // Get the needed push_descriptor limits
- VkPhysicalDevicePushDescriptorPropertiesKHR push_descriptor_prop;
- GetPhysicalDeviceExtProperties(gpu, dev_ext.vk_khr_push_descriptor, &push_descriptor_prop);
- phys_dev_props->max_push_descriptors = push_descriptor_prop.maxPushDescriptors;
- }
-
- GetPhysicalDeviceExtProperties(gpu, dev_ext.vk_ext_descriptor_indexing, &phys_dev_props->descriptor_indexing_props);
- GetPhysicalDeviceExtProperties(gpu, dev_ext.vk_nv_shading_rate_image, &phys_dev_props->shading_rate_image_props);
- GetPhysicalDeviceExtProperties(gpu, dev_ext.vk_nv_mesh_shader, &phys_dev_props->mesh_shader_props);
- GetPhysicalDeviceExtProperties(gpu, dev_ext.vk_ext_inline_uniform_block, &phys_dev_props->inline_uniform_block_props);
- GetPhysicalDeviceExtProperties(gpu, dev_ext.vk_ext_vertex_attribute_divisor, &phys_dev_props->vtx_attrib_divisor_props);
- GetPhysicalDeviceExtProperties(gpu, dev_ext.vk_khr_depth_stencil_resolve, &phys_dev_props->depth_stencil_resolve_props);
- GetPhysicalDeviceExtProperties(gpu, dev_ext.vk_ext_transform_feedback, &phys_dev_props->transform_feedback_props);
- GetPhysicalDeviceExtProperties(gpu, dev_ext.vk_nv_ray_tracing, &phys_dev_props->ray_tracing_props);
- GetPhysicalDeviceExtProperties(gpu, dev_ext.vk_ext_texel_buffer_alignment, &phys_dev_props->texel_buffer_alignment_props);
- GetPhysicalDeviceExtProperties(gpu, dev_ext.vk_ext_fragment_density_map, &phys_dev_props->fragment_density_map_props);
- if (state_tracker->device_extensions.vk_nv_cooperative_matrix) {
- // Get the needed cooperative_matrix properties
- auto cooperative_matrix_props = lvl_init_struct<VkPhysicalDeviceCooperativeMatrixPropertiesNV>();
- auto prop2 = lvl_init_struct<VkPhysicalDeviceProperties2KHR>(&cooperative_matrix_props);
+ if (core_checks->device_extensions.vk_khr_depth_stencil_resolve) {
+ // Get the needed depth and stencil resolve modes
+ auto depth_stencil_resolve_props = lvl_init_struct<VkPhysicalDeviceDepthStencilResolvePropertiesKHR>();
+ auto prop2 = lvl_init_struct<VkPhysicalDeviceProperties2KHR>(&depth_stencil_resolve_props);
instance_dispatch_table.GetPhysicalDeviceProperties2KHR(gpu, &prop2);
- state_tracker->phys_dev_ext_props.cooperative_matrix_props = cooperative_matrix_props;
-
- uint32_t numCooperativeMatrixProperties = 0;
- instance_dispatch_table.GetPhysicalDeviceCooperativeMatrixPropertiesNV(gpu, &numCooperativeMatrixProperties, NULL);
- state_tracker->cooperative_matrix_properties.resize(numCooperativeMatrixProperties,
- lvl_init_struct<VkCooperativeMatrixPropertiesNV>());
-
- instance_dispatch_table.GetPhysicalDeviceCooperativeMatrixPropertiesNV(gpu, &numCooperativeMatrixProperties,
- state_tracker->cooperative_matrix_properties.data());
+ core_checks->phys_dev_ext_props.depth_stencil_resolve_props = depth_stencil_resolve_props;
}
- if (state_tracker->api_version >= VK_API_VERSION_1_1) {
- // Get the needed subgroup limits
- auto subgroup_prop = lvl_init_struct<VkPhysicalDeviceSubgroupProperties>();
- auto prop2 = lvl_init_struct<VkPhysicalDeviceProperties2KHR>(&subgroup_prop);
- instance_dispatch_table.GetPhysicalDeviceProperties2(gpu, &prop2);
-
- state_tracker->phys_dev_ext_props.subgroup_props = subgroup_prop;
+ if (GetEnables()->gpu_validation) {
+ // Copy any needed instance data into the gpu validation state
+ core_checks->gpu_validation_state.reserve_binding_slot = GetEnables()->gpu_validation_reserve_binding_slot;
+ core_checks->GpuPostCallRecordCreateDevice(core_checks);
}
// Store queue family data
if ((pCreateInfo != nullptr) && (pCreateInfo->pQueueCreateInfos != nullptr)) {
for (uint32_t i = 0; i < pCreateInfo->queueCreateInfoCount; ++i) {
- state_tracker->queue_family_index_map.insert(
+ core_checks->queue_family_index_map.insert(
std::make_pair(pCreateInfo->pQueueCreateInfos[i].queueFamilyIndex, pCreateInfo->pQueueCreateInfos[i].queueCount));
}
}
}
-void ValidationStateTracker::PreCallRecordDestroyDevice(VkDevice device, const VkAllocationCallbacks *pAllocator) {
+void CoreChecks::PreCallRecordDestroyDevice(VkDevice device, const VkAllocationCallbacks *pAllocator) {
if (!device) return;
-
- pipelineMap.clear();
- renderPassMap.clear();
-
- // Reset all command buffers before destroying them, to unlink object_bindings.
- for (auto &commandBuffer : commandBufferMap) {
- ResetCommandBufferState(commandBuffer.first);
+ layer_data *device_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
+ if (GetEnables()->gpu_validation) {
+ GpuPreCallRecordDestroyDevice(device_data);
}
- commandBufferMap.clear();
-
+ device_data->pipelineMap.clear();
+ device_data->renderPassMap.clear();
+ for (auto ii = device_data->commandBufferMap.begin(); ii != device_data->commandBufferMap.end(); ++ii) {
+ delete (*ii).second;
+ }
+ device_data->commandBufferMap.clear();
// This will also delete all sets in the pool & remove them from setMap
- DeleteDescriptorSetPools();
+ DeletePools(device_data);
// All sets should be removed
- assert(setMap.empty());
- descriptorSetLayoutMap.clear();
- imageViewMap.clear();
- imageMap.clear();
- bufferViewMap.clear();
- bufferMap.clear();
+ assert(device_data->setMap.empty());
+ device_data->descriptorSetLayoutMap.clear();
+ device_data->imageViewMap.clear();
+ device_data->imageMap.clear();
+ device_data->imageSubresourceMap.clear();
+ device_data->imageLayoutMap.clear();
+ device_data->bufferViewMap.clear();
+ device_data->bufferMap.clear();
// Queues persist until device is destroyed
- queueMap.clear();
+ device_data->queueMap.clear();
layer_debug_utils_destroy_device(device);
}
-void CoreChecks::PreCallRecordDestroyDevice(VkDevice device, const VkAllocationCallbacks *pAllocator) {
- if (!device) return;
- if (enabled.gpu_validation) {
- GpuPreCallRecordDestroyDevice();
- }
- imageSubresourceMap.clear();
- imageLayoutMap.clear();
-
- StateTracker::PreCallRecordDestroyDevice(device, pAllocator);
-}
// For given stage mask, if Geometry shader stage is on w/o GS being enabled, report geo_error_id
// and if Tessellation Control or Evaluation shader stages are on w/o TS being enabled, report tess_error_id.
// Similarly for mesh and task shaders.
-bool CoreChecks::ValidateStageMaskGsTsEnables(VkPipelineStageFlags stageMask, const char *caller, const char *geo_error_id,
- const char *tess_error_id, const char *mesh_error_id,
- const char *task_error_id) const {
+static bool ValidateStageMaskGsTsEnables(const layer_data *dev_data, VkPipelineStageFlags stageMask, const char *caller,
+ const char *geo_error_id, const char *tess_error_id, const char *mesh_error_id,
+ const char *task_error_id) {
bool skip = false;
- if (!enabled_features.core.geometryShader && (stageMask & VK_PIPELINE_STAGE_GEOMETRY_SHADER_BIT)) {
- skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0, geo_error_id,
- "%s call includes a stageMask with VK_PIPELINE_STAGE_GEOMETRY_SHADER_BIT bit set when device does not have "
- "geometryShader feature enabled.",
- caller);
+ if (!dev_data->enabled_features.core.geometryShader && (stageMask & VK_PIPELINE_STAGE_GEOMETRY_SHADER_BIT)) {
+ skip |=
+ log_msg(dev_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0, geo_error_id,
+ "%s call includes a stageMask with VK_PIPELINE_STAGE_GEOMETRY_SHADER_BIT bit set when device does not have "
+ "geometryShader feature enabled.",
+ caller);
}
- if (!enabled_features.core.tessellationShader &&
+ if (!dev_data->enabled_features.core.tessellationShader &&
(stageMask & (VK_PIPELINE_STAGE_TESSELLATION_CONTROL_SHADER_BIT | VK_PIPELINE_STAGE_TESSELLATION_EVALUATION_SHADER_BIT))) {
- skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0, tess_error_id,
- "%s call includes a stageMask with VK_PIPELINE_STAGE_TESSELLATION_CONTROL_SHADER_BIT and/or "
- "VK_PIPELINE_STAGE_TESSELLATION_EVALUATION_SHADER_BIT bit(s) set when device does not have "
- "tessellationShader feature enabled.",
- caller);
+ skip |=
+ log_msg(dev_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0, tess_error_id,
+ "%s call includes a stageMask with VK_PIPELINE_STAGE_TESSELLATION_CONTROL_SHADER_BIT and/or "
+ "VK_PIPELINE_STAGE_TESSELLATION_EVALUATION_SHADER_BIT bit(s) set when device does not have "
+ "tessellationShader feature enabled.",
+ caller);
}
- if (!enabled_features.mesh_shader.meshShader && (stageMask & VK_PIPELINE_STAGE_MESH_SHADER_BIT_NV)) {
- skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0, mesh_error_id,
- "%s call includes a stageMask with VK_PIPELINE_STAGE_MESH_SHADER_BIT_NV bit set when device does not have "
- "VkPhysicalDeviceMeshShaderFeaturesNV::meshShader feature enabled.",
- caller);
+ if (!dev_data->enabled_features.mesh_shader.meshShader && (stageMask & VK_PIPELINE_STAGE_MESH_SHADER_BIT_NV)) {
+ skip |=
+ log_msg(dev_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0, mesh_error_id,
+ "%s call includes a stageMask with VK_PIPELINE_STAGE_MESH_SHADER_BIT_NV bit set when device does not have "
+ "VkPhysicalDeviceMeshShaderFeaturesNV::meshShader feature enabled.",
+ caller);
}
- if (!enabled_features.mesh_shader.taskShader && (stageMask & VK_PIPELINE_STAGE_TASK_SHADER_BIT_NV)) {
- skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0, task_error_id,
- "%s call includes a stageMask with VK_PIPELINE_STAGE_TASK_SHADER_BIT_NV bit set when device does not have "
- "VkPhysicalDeviceMeshShaderFeaturesNV::taskShader feature enabled.",
- caller);
+ if (!dev_data->enabled_features.mesh_shader.taskShader && (stageMask & VK_PIPELINE_STAGE_TASK_SHADER_BIT_NV)) {
+ skip |=
+ log_msg(dev_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0, task_error_id,
+ "%s call includes a stageMask with VK_PIPELINE_STAGE_TASK_SHADER_BIT_NV bit set when device does not have "
+ "VkPhysicalDeviceMeshShaderFeaturesNV::taskShader feature enabled.",
+ caller);
}
return skip;
}
// Loop through bound objects and increment their in_use counts.
-void ValidationStateTracker::IncrementBoundObjects(CMD_BUFFER_STATE const *cb_node) {
+void CoreChecks::IncrementBoundObjects(layer_data *dev_data, GLOBAL_CB_NODE const *cb_node) {
for (auto obj : cb_node->object_bindings) {
- auto base_obj = GetStateStructPtrFromObject(obj);
+ auto base_obj = GetStateStructPtrFromObject(dev_data, obj);
if (base_obj) {
base_obj->in_use.fetch_add(1);
}
}
}
// Track which resources are in-flight by atomically incrementing their "in_use" count
-void ValidationStateTracker::IncrementResources(CMD_BUFFER_STATE *cb_node) {
+void CoreChecks::IncrementResources(layer_data *dev_data, GLOBAL_CB_NODE *cb_node) {
cb_node->submitCount++;
cb_node->in_use.fetch_add(1);
// First Increment for all "generic" objects bound to cmd buffer, followed by special-case objects below
- IncrementBoundObjects(cb_node);
+ IncrementBoundObjects(dev_data, cb_node);
// TODO : We should be able to remove the NULL look-up checks from the code below as long as
// all the corresponding cases are verified to cause CB_INVALID state and the CB_INVALID state
// should then be flagged prior to calling this function
+ for (auto draw_data_element : cb_node->draw_data) {
+ for (auto &vertex_buffer : draw_data_element.vertex_buffer_bindings) {
+ auto buffer_state = GetBufferState(vertex_buffer.buffer);
+ if (buffer_state) {
+ buffer_state->in_use.fetch_add(1);
+ }
+ }
+ }
for (auto event : cb_node->writeEventsBeforeWait) {
- auto event_state = GetEventState(event);
+ auto event_state = GetEventNode(event);
if (event_state) event_state->write_in_use++;
}
}
@@ -2778,7 +2639,7 @@ void ValidationStateTracker::IncrementResources(CMD_BUFFER_STATE *cb_node) {
// For the given queue, verify the queue state up to the given seq number.
// Currently the only check is to make sure that if there are events to be waited on prior to
// a QueryReset, make sure that all such events have been signalled.
-bool CoreChecks::VerifyQueueStateToSeq(QUEUE_STATE *initial_queue, uint64_t initial_seq) {
+bool CoreChecks::VerifyQueueStateToSeq(layer_data *dev_data, QUEUE_STATE *initial_queue, uint64_t initial_seq) {
bool skip = false;
// sequence number we want to validate up to, per queue
@@ -2822,26 +2683,26 @@ bool CoreChecks::VerifyQueueStateToSeq(QUEUE_STATE *initial_queue, uint64_t init
}
// When the given fence is retired, verify outstanding queue operations through the point of the fence
-bool CoreChecks::VerifyQueueStateToFence(VkFence fence) {
- auto fence_state = GetFenceState(fence);
+bool CoreChecks::VerifyQueueStateToFence(layer_data *dev_data, VkFence fence) {
+ auto fence_state = GetFenceNode(fence);
if (fence_state && fence_state->scope == kSyncScopeInternal && VK_NULL_HANDLE != fence_state->signaler.first) {
- return VerifyQueueStateToSeq(GetQueueState(fence_state->signaler.first), fence_state->signaler.second);
+ return VerifyQueueStateToSeq(dev_data, GetQueueState(fence_state->signaler.first), fence_state->signaler.second);
}
return false;
}
// Decrement in-use count for objects bound to command buffer
-void ValidationStateTracker::DecrementBoundResources(CMD_BUFFER_STATE const *cb_node) {
+void CoreChecks::DecrementBoundResources(layer_data *dev_data, GLOBAL_CB_NODE const *cb_node) {
BASE_NODE *base_obj = nullptr;
for (auto obj : cb_node->object_bindings) {
- base_obj = GetStateStructPtrFromObject(obj);
+ base_obj = GetStateStructPtrFromObject(dev_data, obj);
if (base_obj) {
base_obj->in_use.fetch_sub(1);
}
}
}
-void ValidationStateTracker::RetireWorkOnQueue(QUEUE_STATE *pQueue, uint64_t seq, bool switch_finished_queries) {
+void CoreChecks::RetireWorkOnQueue(layer_data *dev_data, QUEUE_STATE *pQueue, uint64_t seq) {
std::unordered_map<VkQueue, uint64_t> otherQueueSeqs;
// Roll this queue forward, one submission at a time.
@@ -2849,7 +2710,7 @@ void ValidationStateTracker::RetireWorkOnQueue(QUEUE_STATE *pQueue, uint64_t seq
auto &submission = pQueue->submissions.front();
for (auto &wait : submission.waitSemaphores) {
- auto pSemaphore = GetSemaphoreState(wait.semaphore);
+ auto pSemaphore = GetSemaphoreNode(wait.semaphore);
if (pSemaphore) {
pSemaphore->in_use.fetch_sub(1);
}
@@ -2858,47 +2719,51 @@ void ValidationStateTracker::RetireWorkOnQueue(QUEUE_STATE *pQueue, uint64_t seq
}
for (auto &semaphore : submission.signalSemaphores) {
- auto pSemaphore = GetSemaphoreState(semaphore);
+ auto pSemaphore = GetSemaphoreNode(semaphore);
if (pSemaphore) {
pSemaphore->in_use.fetch_sub(1);
}
}
for (auto &semaphore : submission.externalSemaphores) {
- auto pSemaphore = GetSemaphoreState(semaphore);
+ auto pSemaphore = GetSemaphoreNode(semaphore);
if (pSemaphore) {
pSemaphore->in_use.fetch_sub(1);
}
}
for (auto cb : submission.cbs) {
- auto cb_node = GetCBState(cb);
+ auto cb_node = GetCBNode(cb);
if (!cb_node) {
continue;
}
// First perform decrement on general case bound objects
- DecrementBoundResources(cb_node);
+ DecrementBoundResources(dev_data, cb_node);
+ for (auto draw_data_element : cb_node->draw_data) {
+ for (auto &vertex_buffer_binding : draw_data_element.vertex_buffer_bindings) {
+ auto buffer_state = GetBufferState(vertex_buffer_binding.buffer);
+ if (buffer_state) {
+ buffer_state->in_use.fetch_sub(1);
+ }
+ }
+ }
for (auto event : cb_node->writeEventsBeforeWait) {
- auto eventNode = eventMap.find(event);
- if (eventNode != eventMap.end()) {
+ auto eventNode = dev_data->eventMap.find(event);
+ if (eventNode != dev_data->eventMap.end()) {
eventNode->second.write_in_use--;
}
}
for (auto queryStatePair : cb_node->queryToStateMap) {
- const QueryState newState =
- ((queryStatePair.second == QUERYSTATE_ENDED && switch_finished_queries) ? QUERYSTATE_AVAILABLE
- : queryStatePair.second);
- pQueue->queryToStateMap[queryStatePair.first] = newState;
- queryToStateMap[queryStatePair.first] = newState;
+ dev_data->queryToStateMap[queryStatePair.first] = queryStatePair.second;
}
for (auto eventStagePair : cb_node->eventToStageMap) {
- eventMap[eventStagePair.first].stageMask = eventStagePair.second;
+ dev_data->eventMap[eventStagePair.first].stageMask = eventStagePair.second;
}
cb_node->in_use.fetch_sub(1);
}
- auto pFence = GetFenceState(submission.fence);
+ auto pFence = GetFenceNode(submission.fence);
if (pFence && pFence->scope == kSyncScopeInternal) {
pFence->state = FENCE_RETIRED;
}
@@ -2909,62 +2774,64 @@ void ValidationStateTracker::RetireWorkOnQueue(QUEUE_STATE *pQueue, uint64_t seq
// Roll other queues forward to the highest seq we saw a wait for
for (auto qs : otherQueueSeqs) {
- RetireWorkOnQueue(GetQueueState(qs.first), qs.second, switch_finished_queries);
+ RetireWorkOnQueue(dev_data, GetQueueState(qs.first), qs.second);
}
}
// Submit a fence to a queue, delimiting previous fences and previous untracked
// work by it.
-static void SubmitFence(QUEUE_STATE *pQueue, FENCE_STATE *pFence, uint64_t submitCount) {
+static void SubmitFence(QUEUE_STATE *pQueue, FENCE_NODE *pFence, uint64_t submitCount) {
pFence->state = FENCE_INFLIGHT;
pFence->signaler.first = pQueue->queue;
pFence->signaler.second = pQueue->seq + pQueue->submissions.size() + submitCount;
}
-bool CoreChecks::ValidateCommandBufferSimultaneousUse(const CMD_BUFFER_STATE *pCB, int current_submit_count) const {
+bool CoreChecks::ValidateCommandBufferSimultaneousUse(layer_data *dev_data, GLOBAL_CB_NODE *pCB, int current_submit_count) {
bool skip = false;
if ((pCB->in_use.load() || current_submit_count > 1) &&
!(pCB->beginInfo.flags & VK_COMMAND_BUFFER_USAGE_SIMULTANEOUS_USE_BIT)) {
- skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, 0,
- "VUID-vkQueueSubmit-pCommandBuffers-00071", "%s is already in use and is not marked for simultaneous use.",
- report_data->FormatHandle(pCB->commandBuffer).c_str());
+ skip |= log_msg(dev_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, 0,
+ "VUID-vkQueueSubmit-pCommandBuffers-00071",
+ "Command Buffer %s is already in use and is not marked for simultaneous use.",
+ dev_data->report_data->FormatHandle(pCB->commandBuffer).c_str());
}
return skip;
}
-bool CoreChecks::ValidateCommandBufferState(const CMD_BUFFER_STATE *cb_state, const char *call_source, int current_submit_count,
- const char *vu_id) const {
+bool CoreChecks::ValidateCommandBufferState(layer_data *dev_data, GLOBAL_CB_NODE *cb_state, const char *call_source,
+ int current_submit_count, const char *vu_id) {
bool skip = false;
- if (disabled.command_buffer_state) return skip;
+ if (dev_data->instance_data->disabled.command_buffer_state) return skip;
// Validate ONE_TIME_SUBMIT_BIT CB is not being submitted more than once
if ((cb_state->beginInfo.flags & VK_COMMAND_BUFFER_USAGE_ONE_TIME_SUBMIT_BIT) &&
(cb_state->submitCount + current_submit_count > 1)) {
- skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, 0,
- kVUID_Core_DrawState_CommandBufferSingleSubmitViolation,
- "%s was begun w/ VK_COMMAND_BUFFER_USAGE_ONE_TIME_SUBMIT_BIT set, but has been submitted 0x%" PRIxLEAST64
- "times.",
- report_data->FormatHandle(cb_state->commandBuffer).c_str(), cb_state->submitCount + current_submit_count);
+ skip |= log_msg(
+ dev_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, 0,
+ kVUID_Core_DrawState_CommandBufferSingleSubmitViolation,
+ "Commandbuffer %s was begun w/ VK_COMMAND_BUFFER_USAGE_ONE_TIME_SUBMIT_BIT set, but has been submitted 0x%" PRIxLEAST64
+ "times.",
+ dev_data->report_data->FormatHandle(cb_state->commandBuffer).c_str(), cb_state->submitCount + current_submit_count);
}
// Validate that cmd buffers have been updated
switch (cb_state->state) {
case CB_INVALID_INCOMPLETE:
case CB_INVALID_COMPLETE:
- skip |= ReportInvalidCommandBuffer(cb_state, call_source);
+ skip |= ReportInvalidCommandBuffer(dev_data, cb_state, call_source);
break;
case CB_NEW:
- skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
+ skip |= log_msg(dev_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
(uint64_t)(cb_state->commandBuffer), vu_id,
- "%s used in the call to %s is unrecorded and contains no commands.",
- report_data->FormatHandle(cb_state->commandBuffer).c_str(), call_source);
+ "Command buffer %s used in the call to %s is unrecorded and contains no commands.",
+ dev_data->report_data->FormatHandle(cb_state->commandBuffer).c_str(), call_source);
break;
case CB_RECORDING:
- skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
+ skip |= log_msg(dev_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
HandleToUint64(cb_state->commandBuffer), kVUID_Core_DrawState_NoEndCommandBuffer,
- "You must call vkEndCommandBuffer() on %s before this call to %s!",
- report_data->FormatHandle(cb_state->commandBuffer).c_str(), call_source);
+ "You must call vkEndCommandBuffer() on command buffer %s before this call to %s!",
+ dev_data->report_data->FormatHandle(cb_state->commandBuffer).c_str(), call_source);
break;
default: /* recorded */
@@ -2973,9 +2840,29 @@ bool CoreChecks::ValidateCommandBufferState(const CMD_BUFFER_STATE *cb_state, co
return skip;
}
+bool CoreChecks::ValidateResources(layer_data *dev_data, GLOBAL_CB_NODE *cb_node) {
+ bool skip = false;
+
+ // TODO : We should be able to remove the NULL look-up checks from the code below as long as
+ // all the corresponding cases are verified to cause CB_INVALID state and the CB_INVALID state
+ // should then be flagged prior to calling this function
+ for (const auto &draw_data_element : cb_node->draw_data) {
+ for (const auto &vertex_buffer_binding : draw_data_element.vertex_buffer_bindings) {
+ auto buffer_state = GetBufferState(vertex_buffer_binding.buffer);
+ if ((vertex_buffer_binding.buffer != VK_NULL_HANDLE) && (!buffer_state)) {
+ skip |= log_msg(dev_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_BUFFER_EXT,
+ HandleToUint64(vertex_buffer_binding.buffer), kVUID_Core_DrawState_InvalidBuffer,
+ "Cannot submit cmd buffer using deleted buffer %s.",
+ dev_data->report_data->FormatHandle(vertex_buffer_binding.buffer).c_str());
+ }
+ }
+ }
+ return skip;
+}
+
// Check that the queue family index of 'queue' matches one of the entries in pQueueFamilyIndices
-bool CoreChecks::ValidImageBufferQueue(const CMD_BUFFER_STATE *cb_node, const VulkanTypedHandle &object, VkQueue queue,
- uint32_t count, const uint32_t *indices) const {
+bool CoreChecks::ValidImageBufferQueue(layer_data *dev_data, GLOBAL_CB_NODE *cb_node, const VK_OBJECT *object, VkQueue queue,
+ uint32_t count, const uint32_t *indices) {
bool found = false;
bool skip = false;
auto queue_state = GetQueueState(queue);
@@ -2988,12 +2875,12 @@ bool CoreChecks::ValidImageBufferQueue(const CMD_BUFFER_STATE *cb_node, const Vu
}
if (!found) {
- skip = log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, get_debug_report_enum[object.type], object.handle,
- kVUID_Core_DrawState_InvalidQueueFamily,
- "vkQueueSubmit: %s contains %s which was not created allowing concurrent access to "
+ skip = log_msg(dev_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, get_debug_report_enum[object->type],
+ object->handle, kVUID_Core_DrawState_InvalidQueueFamily,
+ "vkQueueSubmit: Command buffer %s contains %s %s which was not created allowing concurrent access to "
"this queue family %d.",
- report_data->FormatHandle(cb_node->commandBuffer).c_str(), report_data->FormatHandle(object).c_str(),
- queue_state->queueFamilyIndex);
+ dev_data->report_data->FormatHandle(cb_node->commandBuffer).c_str(), object_string[object->type],
+ dev_data->report_data->FormatHandle(object->handle).c_str(), queue_state->queueFamilyIndex);
}
}
return skip;
@@ -3001,33 +2888,33 @@ bool CoreChecks::ValidImageBufferQueue(const CMD_BUFFER_STATE *cb_node, const Vu
// Validate that queueFamilyIndices of primary command buffers match this queue
// Secondary command buffers were previously validated in vkCmdExecuteCommands().
-bool CoreChecks::ValidateQueueFamilyIndices(const CMD_BUFFER_STATE *pCB, VkQueue queue) const {
+bool CoreChecks::ValidateQueueFamilyIndices(layer_data *dev_data, GLOBAL_CB_NODE *pCB, VkQueue queue) {
bool skip = false;
- auto pPool = GetCommandPoolState(pCB->createInfo.commandPool);
+ auto pPool = GetCommandPoolNode(pCB->createInfo.commandPool);
auto queue_state = GetQueueState(queue);
if (pPool && queue_state) {
if (pPool->queueFamilyIndex != queue_state->queueFamilyIndex) {
- skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
+ skip |= log_msg(dev_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
HandleToUint64(pCB->commandBuffer), "VUID-vkQueueSubmit-pCommandBuffers-00074",
- "vkQueueSubmit: Primary %s created in queue family %d is being submitted on %s "
+ "vkQueueSubmit: Primary command buffer %s created in queue family %d is being submitted on queue %s "
"from queue family %d.",
- report_data->FormatHandle(pCB->commandBuffer).c_str(), pPool->queueFamilyIndex,
- report_data->FormatHandle(queue).c_str(), queue_state->queueFamilyIndex);
+ dev_data->report_data->FormatHandle(pCB->commandBuffer).c_str(), pPool->queueFamilyIndex,
+ dev_data->report_data->FormatHandle(queue).c_str(), queue_state->queueFamilyIndex);
}
// Ensure that any bound images or buffers created with SHARING_MODE_CONCURRENT have access to the current queue family
- for (const auto &object : pCB->object_bindings) {
+ for (auto object : pCB->object_bindings) {
if (object.type == kVulkanObjectTypeImage) {
- auto image_state = GetImageState(object.Cast<VkImage>());
+ auto image_state = GetImageState(reinterpret_cast<VkImage &>(object.handle));
if (image_state && image_state->createInfo.sharingMode == VK_SHARING_MODE_CONCURRENT) {
- skip |= ValidImageBufferQueue(pCB, object, queue, image_state->createInfo.queueFamilyIndexCount,
+ skip |= ValidImageBufferQueue(dev_data, pCB, &object, queue, image_state->createInfo.queueFamilyIndexCount,
image_state->createInfo.pQueueFamilyIndices);
}
} else if (object.type == kVulkanObjectTypeBuffer) {
- auto buffer_state = GetBufferState(object.Cast<VkBuffer>());
+ auto buffer_state = GetBufferState(reinterpret_cast<VkBuffer &>(object.handle));
if (buffer_state && buffer_state->createInfo.sharingMode == VK_SHARING_MODE_CONCURRENT) {
- skip |= ValidImageBufferQueue(pCB, object, queue, buffer_state->createInfo.queueFamilyIndexCount,
+ skip |= ValidImageBufferQueue(dev_data, pCB, &object, queue, buffer_state->createInfo.queueFamilyIndexCount,
buffer_state->createInfo.pQueueFamilyIndices);
}
}
@@ -3037,66 +2924,73 @@ bool CoreChecks::ValidateQueueFamilyIndices(const CMD_BUFFER_STATE *pCB, VkQueue
return skip;
}
-bool CoreChecks::ValidatePrimaryCommandBufferState(const CMD_BUFFER_STATE *pCB, int current_submit_count,
+bool CoreChecks::ValidatePrimaryCommandBufferState(layer_data *dev_data, GLOBAL_CB_NODE *pCB, int current_submit_count,
QFOTransferCBScoreboards<VkImageMemoryBarrier> *qfo_image_scoreboards,
- QFOTransferCBScoreboards<VkBufferMemoryBarrier> *qfo_buffer_scoreboards) const {
+ QFOTransferCBScoreboards<VkBufferMemoryBarrier> *qfo_buffer_scoreboards) {
// Track in-use for resources off of primary and any secondary CBs
bool skip = false;
- // If USAGE_SIMULTANEOUS_USE_BIT not set then CB cannot already be executing on device
- skip |= ValidateCommandBufferSimultaneousUse(pCB, current_submit_count);
+ // If USAGE_SIMULTANEOUS_USE_BIT not set then CB cannot already be executing
+ // on device
+ skip |= ValidateCommandBufferSimultaneousUse(dev_data, pCB, current_submit_count);
- skip |= ValidateQueuedQFOTransfers(pCB, qfo_image_scoreboards, qfo_buffer_scoreboards);
+ skip |= ValidateResources(dev_data, pCB);
+ skip |= ValidateQueuedQFOTransfers(dev_data, pCB, qfo_image_scoreboards, qfo_buffer_scoreboards);
for (auto pSubCB : pCB->linkedCommandBuffers) {
- skip |= ValidateQueuedQFOTransfers(pSubCB, qfo_image_scoreboards, qfo_buffer_scoreboards);
+ skip |= ValidateResources(dev_data, pSubCB);
+ skip |= ValidateQueuedQFOTransfers(dev_data, pSubCB, qfo_image_scoreboards, qfo_buffer_scoreboards);
// TODO: replace with InvalidateCommandBuffers() at recording.
if ((pSubCB->primaryCommandBuffer != pCB->commandBuffer) &&
!(pSubCB->beginInfo.flags & VK_COMMAND_BUFFER_USAGE_SIMULTANEOUS_USE_BIT)) {
- log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, 0,
+ log_msg(dev_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, 0,
"VUID-vkQueueSubmit-pCommandBuffers-00073",
- "%s was submitted with secondary %s but that buffer has subsequently been bound to "
- "primary %s and it does not have VK_COMMAND_BUFFER_USAGE_SIMULTANEOUS_USE_BIT set.",
- report_data->FormatHandle(pCB->commandBuffer).c_str(), report_data->FormatHandle(pSubCB->commandBuffer).c_str(),
- report_data->FormatHandle(pSubCB->primaryCommandBuffer).c_str());
+ "Commandbuffer %s was submitted with secondary buffer %s but that buffer has subsequently been bound to "
+ "primary cmd buffer %s and it does not have VK_COMMAND_BUFFER_USAGE_SIMULTANEOUS_USE_BIT set.",
+ dev_data->report_data->FormatHandle(pCB->commandBuffer).c_str(),
+ dev_data->report_data->FormatHandle(pSubCB->commandBuffer).c_str(),
+ dev_data->report_data->FormatHandle(pSubCB->primaryCommandBuffer).c_str());
}
}
- skip |= ValidateCommandBufferState(pCB, "vkQueueSubmit()", current_submit_count, "VUID-vkQueueSubmit-pCommandBuffers-00072");
+ skip |= ValidateCommandBufferState(dev_data, pCB, "vkQueueSubmit()", current_submit_count,
+ "VUID-vkQueueSubmit-pCommandBuffers-00072");
return skip;
}
-bool CoreChecks::ValidateFenceForSubmit(const FENCE_STATE *pFence) const {
+bool CoreChecks::ValidateFenceForSubmit(layer_data *dev_data, FENCE_NODE *pFence) {
bool skip = false;
if (pFence && pFence->scope == kSyncScopeInternal) {
if (pFence->state == FENCE_INFLIGHT) {
// TODO: opportunities for "VUID-vkQueueSubmit-fence-00064", "VUID-vkQueueBindSparse-fence-01114",
// "VUID-vkAcquireNextImageKHR-fence-01287"
- skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_FENCE_EXT,
+ skip |= log_msg(dev_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_FENCE_EXT,
HandleToUint64(pFence->fence), kVUID_Core_DrawState_InvalidFence,
- "%s is already in use by another submission.", report_data->FormatHandle(pFence->fence).c_str());
+ "Fence %s is already in use by another submission.",
+ dev_data->report_data->FormatHandle(pFence->fence).c_str());
}
else if (pFence->state == FENCE_RETIRED) {
// TODO: opportunities for "VUID-vkQueueSubmit-fence-00063", "VUID-vkQueueBindSparse-fence-01113",
// "VUID-vkAcquireNextImageKHR-fence-01287"
- skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_FENCE_EXT,
+ skip |= log_msg(dev_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_FENCE_EXT,
HandleToUint64(pFence->fence), kVUID_Core_MemTrack_FenceState,
- "%s submitted in SIGNALED state. Fences must be reset before being submitted",
- report_data->FormatHandle(pFence->fence).c_str());
+ "Fence %s submitted in SIGNALED state. Fences must be reset before being submitted",
+ dev_data->report_data->FormatHandle(pFence->fence).c_str());
}
}
return skip;
}
-void ValidationStateTracker::PostCallRecordQueueSubmit(VkQueue queue, uint32_t submitCount, const VkSubmitInfo *pSubmits,
- VkFence fence, VkResult result) {
+void CoreChecks::PostCallRecordQueueSubmit(VkQueue queue, uint32_t submitCount, const VkSubmitInfo *pSubmits, VkFence fence,
+ VkResult result) {
+ layer_data *device_data = GetLayerDataPtr(get_dispatch_key(queue), layer_data_map);
uint64_t early_retire_seq = 0;
auto pQueue = GetQueueState(queue);
- auto pFence = GetFenceState(fence);
+ auto pFence = GetFenceNode(fence);
if (pFence) {
if (pFence->scope == kSyncScopeInternal) {
@@ -3112,13 +3006,14 @@ void ValidationStateTracker::PostCallRecordQueueSubmit(VkQueue queue, uint32_t s
} else {
// Retire work up until this fence early, we will not see the wait that corresponds to this signal
early_retire_seq = pQueue->seq + pQueue->submissions.size();
- if (!external_sync_warning) {
- external_sync_warning = true;
- log_msg(report_data, VK_DEBUG_REPORT_WARNING_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_FENCE_EXT, HandleToUint64(fence),
- kVUID_Core_DrawState_QueueForwardProgress,
- "vkQueueSubmit(): Signaling external %s on %s will disable validation of preceding command "
+ if (!device_data->external_sync_warning) {
+ device_data->external_sync_warning = true;
+ log_msg(device_data->report_data, VK_DEBUG_REPORT_WARNING_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_FENCE_EXT,
+ HandleToUint64(fence), kVUID_Core_DrawState_QueueForwardProgress,
+ "vkQueueSubmit(): Signaling external fence %s on queue %s will disable validation of preceding command "
"buffer lifecycle states and the in-use status of associated objects.",
- report_data->FormatHandle(fence).c_str(), report_data->FormatHandle(queue).c_str());
+ device_data->report_data->FormatHandle(fence).c_str(),
+ device_data->report_data->FormatHandle(queue).c_str());
}
}
}
@@ -3132,7 +3027,7 @@ void ValidationStateTracker::PostCallRecordQueueSubmit(VkQueue queue, uint32_t s
vector<VkSemaphore> semaphore_externals;
for (uint32_t i = 0; i < submit->waitSemaphoreCount; ++i) {
VkSemaphore semaphore = submit->pWaitSemaphores[i];
- auto pSemaphore = GetSemaphoreState(semaphore);
+ auto pSemaphore = GetSemaphoreNode(semaphore);
if (pSemaphore) {
if (pSemaphore->scope == kSyncScopeInternal) {
if (pSemaphore->signaler.first != VK_NULL_HANDLE) {
@@ -3152,7 +3047,7 @@ void ValidationStateTracker::PostCallRecordQueueSubmit(VkQueue queue, uint32_t s
}
for (uint32_t i = 0; i < submit->signalSemaphoreCount; ++i) {
VkSemaphore semaphore = submit->pSignalSemaphores[i];
- auto pSemaphore = GetSemaphoreState(semaphore);
+ auto pSemaphore = GetSemaphoreNode(semaphore);
if (pSemaphore) {
if (pSemaphore->scope == kSyncScopeInternal) {
pSemaphore->signaler.first = queue;
@@ -3163,156 +3058,51 @@ void ValidationStateTracker::PostCallRecordQueueSubmit(VkQueue queue, uint32_t s
} else {
// Retire work up until this submit early, we will not see the wait that corresponds to this signal
early_retire_seq = std::max(early_retire_seq, pQueue->seq + pQueue->submissions.size() + 1);
- if (!external_sync_warning) {
- external_sync_warning = true;
- log_msg(report_data, VK_DEBUG_REPORT_WARNING_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_SEMAPHORE_EXT,
- HandleToUint64(semaphore), kVUID_Core_DrawState_QueueForwardProgress,
- "vkQueueSubmit(): Signaling external %s on %s will disable validation of preceding "
+ if (!device_data->external_sync_warning) {
+ device_data->external_sync_warning = true;
+ log_msg(device_data->report_data, VK_DEBUG_REPORT_WARNING_BIT_EXT,
+ VK_DEBUG_REPORT_OBJECT_TYPE_SEMAPHORE_EXT, HandleToUint64(semaphore),
+ kVUID_Core_DrawState_QueueForwardProgress,
+ "vkQueueSubmit(): Signaling external semaphore %s on queue %s will disable validation of preceding "
"command buffer lifecycle states and the in-use status of associated objects.",
- report_data->FormatHandle(semaphore).c_str(), report_data->FormatHandle(queue).c_str());
+ device_data->report_data->FormatHandle(semaphore).c_str(),
+ device_data->report_data->FormatHandle(queue).c_str());
}
}
}
}
for (uint32_t i = 0; i < submit->commandBufferCount; i++) {
- auto cb_node = GetCBState(submit->pCommandBuffers[i]);
+ auto cb_node = GetCBNode(submit->pCommandBuffers[i]);
if (cb_node) {
cbs.push_back(submit->pCommandBuffers[i]);
for (auto secondaryCmdBuffer : cb_node->linkedCommandBuffers) {
cbs.push_back(secondaryCmdBuffer->commandBuffer);
- IncrementResources(secondaryCmdBuffer);
+ UpdateCmdBufImageLayouts(device_data, secondaryCmdBuffer);
+ IncrementResources(device_data, secondaryCmdBuffer);
+ RecordQueuedQFOTransfers(device_data, secondaryCmdBuffer);
}
- IncrementResources(cb_node);
+ UpdateCmdBufImageLayouts(device_data, cb_node);
+ IncrementResources(device_data, cb_node);
+ RecordQueuedQFOTransfers(device_data, cb_node);
}
}
pQueue->submissions.emplace_back(cbs, semaphore_waits, semaphore_signals, semaphore_externals,
- submit_idx == submitCount - 1 ? fence : (VkFence)VK_NULL_HANDLE);
+ submit_idx == submitCount - 1 ? fence : VK_NULL_HANDLE);
}
if (early_retire_seq) {
- RetireWorkOnQueue(pQueue, early_retire_seq, true);
+ RetireWorkOnQueue(device_data, pQueue, early_retire_seq);
}
-}
-void CoreChecks::PostCallRecordQueueSubmit(VkQueue queue, uint32_t submitCount, const VkSubmitInfo *pSubmits, VkFence fence,
- VkResult result) {
- StateTracker::PostCallRecordQueueSubmit(queue, submitCount, pSubmits, fence, result);
-
- // The triply nested for duplicates that in the StateTracker, but avoids the need for two additional callbacks.
- for (uint32_t submit_idx = 0; submit_idx < submitCount; submit_idx++) {
- const VkSubmitInfo *submit = &pSubmits[submit_idx];
- for (uint32_t i = 0; i < submit->commandBufferCount; i++) {
- auto cb_node = GetCBState(submit->pCommandBuffers[i]);
- if (cb_node) {
- for (auto secondaryCmdBuffer : cb_node->linkedCommandBuffers) {
- UpdateCmdBufImageLayouts(secondaryCmdBuffer);
- RecordQueuedQFOTransfers(secondaryCmdBuffer);
- }
- UpdateCmdBufImageLayouts(cb_node);
- RecordQueuedQFOTransfers(cb_node);
- }
- }
- }
-
- if (enabled.gpu_validation) {
- GpuPostCallQueueSubmit(queue, submitCount, pSubmits, fence);
+ if (GetEnables()->gpu_validation) {
+ GpuPostCallQueueSubmit(device_data, queue, submitCount, pSubmits, fence);
}
}
-bool CoreChecks::ValidateSemaphoresForSubmit(VkQueue queue, const VkSubmitInfo *submit,
- unordered_set<VkSemaphore> *unsignaled_sema_arg,
- unordered_set<VkSemaphore> *signaled_sema_arg,
- unordered_set<VkSemaphore> *internal_sema_arg) const {
- bool skip = false;
- unordered_set<VkSemaphore> &signaled_semaphores = *signaled_sema_arg;
- unordered_set<VkSemaphore> &unsignaled_semaphores = *unsignaled_sema_arg;
- unordered_set<VkSemaphore> &internal_semaphores = *internal_sema_arg;
-
- for (uint32_t i = 0; i < submit->waitSemaphoreCount; ++i) {
- skip |=
- ValidateStageMaskGsTsEnables(submit->pWaitDstStageMask[i], "vkQueueSubmit()",
- "VUID-VkSubmitInfo-pWaitDstStageMask-00076", "VUID-VkSubmitInfo-pWaitDstStageMask-00077",
- "VUID-VkSubmitInfo-pWaitDstStageMask-02089", "VUID-VkSubmitInfo-pWaitDstStageMask-02090");
- VkSemaphore semaphore = submit->pWaitSemaphores[i];
- const auto *pSemaphore = GetSemaphoreState(semaphore);
- if (pSemaphore && (pSemaphore->scope == kSyncScopeInternal || internal_semaphores.count(semaphore))) {
- if (unsignaled_semaphores.count(semaphore) || (!(signaled_semaphores.count(semaphore)) && !(pSemaphore->signaled))) {
- skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_SEMAPHORE_EXT,
- HandleToUint64(semaphore), kVUID_Core_DrawState_QueueForwardProgress,
- "%s is waiting on %s that has no way to be signaled.", report_data->FormatHandle(queue).c_str(),
- report_data->FormatHandle(semaphore).c_str());
- } else {
- signaled_semaphores.erase(semaphore);
- unsignaled_semaphores.insert(semaphore);
- }
- }
- if (pSemaphore && pSemaphore->scope == kSyncScopeExternalTemporary) {
- internal_semaphores.insert(semaphore);
- }
- }
- for (uint32_t i = 0; i < submit->signalSemaphoreCount; ++i) {
- VkSemaphore semaphore = submit->pSignalSemaphores[i];
- const auto *pSemaphore = GetSemaphoreState(semaphore);
- if (pSemaphore && (pSemaphore->scope == kSyncScopeInternal || internal_semaphores.count(semaphore))) {
- if (signaled_semaphores.count(semaphore) || (!(unsignaled_semaphores.count(semaphore)) && pSemaphore->signaled)) {
- skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_SEMAPHORE_EXT,
- HandleToUint64(semaphore), kVUID_Core_DrawState_QueueForwardProgress,
- "%s is signaling %s that was previously signaled by %s but has not since "
- "been waited on by any queue.",
- report_data->FormatHandle(queue).c_str(), report_data->FormatHandle(semaphore).c_str(),
- report_data->FormatHandle(pSemaphore->signaler.first).c_str());
- } else {
- unsignaled_semaphores.erase(semaphore);
- signaled_semaphores.insert(semaphore);
- }
- }
- }
-
- return skip;
-}
-bool CoreChecks::ValidateCommandBuffersForSubmit(VkQueue queue, const VkSubmitInfo *submit,
- ImageSubresPairLayoutMap *localImageLayoutMap_arg,
- vector<VkCommandBuffer> *current_cmds_arg) const {
- bool skip = false;
-
- ImageSubresPairLayoutMap &localImageLayoutMap = *localImageLayoutMap_arg;
- vector<VkCommandBuffer> &current_cmds = *current_cmds_arg;
-
- QFOTransferCBScoreboards<VkImageMemoryBarrier> qfo_image_scoreboards;
- QFOTransferCBScoreboards<VkBufferMemoryBarrier> qfo_buffer_scoreboards;
-
- for (uint32_t i = 0; i < submit->commandBufferCount; i++) {
- const auto *cb_node = GetCBState(submit->pCommandBuffers[i]);
- if (cb_node) {
- skip |= ValidateCmdBufImageLayouts(cb_node, imageLayoutMap, &localImageLayoutMap);
- current_cmds.push_back(submit->pCommandBuffers[i]);
- skip |= ValidatePrimaryCommandBufferState(
- cb_node, (int)std::count(current_cmds.begin(), current_cmds.end(), submit->pCommandBuffers[i]),
- &qfo_image_scoreboards, &qfo_buffer_scoreboards);
- skip |= ValidateQueueFamilyIndices(cb_node, queue);
-
- // Potential early exit here as bad object state may crash in delayed function calls
- if (skip) {
- return true;
- }
-
- // Call submit-time functions to validate/update state
- for (auto &function : cb_node->queue_submit_functions) {
- skip |= function();
- }
- for (auto &function : cb_node->eventUpdates) {
- skip |= function(queue);
- }
- for (auto &function : cb_node->queryUpdates) {
- skip |= function(queue);
- }
- }
- }
- return skip;
-}
bool CoreChecks::PreCallValidateQueueSubmit(VkQueue queue, uint32_t submitCount, const VkSubmitInfo *pSubmits, VkFence fence) {
- const auto *pFence = GetFenceState(fence);
- bool skip = ValidateFenceForSubmit(pFence);
+ layer_data *device_data = GetLayerDataPtr(get_dispatch_key(queue), layer_data_map);
+ auto pFence = GetFenceNode(fence);
+ bool skip = ValidateFenceForSubmit(device_data, pFence);
if (skip) {
return true;
}
@@ -3321,29 +3111,87 @@ bool CoreChecks::PreCallValidateQueueSubmit(VkQueue queue, uint32_t submitCount,
unordered_set<VkSemaphore> unsignaled_semaphores;
unordered_set<VkSemaphore> internal_semaphores;
vector<VkCommandBuffer> current_cmds;
- ImageSubresPairLayoutMap localImageLayoutMap;
+ unordered_map<ImageSubresourcePair, IMAGE_LAYOUT_NODE> localImageLayoutMap;
// Now verify each individual submit
for (uint32_t submit_idx = 0; submit_idx < submitCount; submit_idx++) {
const VkSubmitInfo *submit = &pSubmits[submit_idx];
- skip |= ValidateSemaphoresForSubmit(queue, submit, &unsignaled_semaphores, &signaled_semaphores, &internal_semaphores);
- skip |= ValidateCommandBuffersForSubmit(queue, submit, &localImageLayoutMap, &current_cmds);
+ for (uint32_t i = 0; i < submit->waitSemaphoreCount; ++i) {
+ skip |= ValidateStageMaskGsTsEnables(
+ device_data, submit->pWaitDstStageMask[i], "vkQueueSubmit()", "VUID-VkSubmitInfo-pWaitDstStageMask-00076",
+ "VUID-VkSubmitInfo-pWaitDstStageMask-00077", "VUID-VkSubmitInfo-pWaitDstStageMask-02089",
+ "VUID-VkSubmitInfo-pWaitDstStageMask-02090");
+ VkSemaphore semaphore = submit->pWaitSemaphores[i];
+ auto pSemaphore = GetSemaphoreNode(semaphore);
+ if (pSemaphore && (pSemaphore->scope == kSyncScopeInternal || internal_semaphores.count(semaphore))) {
+ if (unsignaled_semaphores.count(semaphore) ||
+ (!(signaled_semaphores.count(semaphore)) && !(pSemaphore->signaled))) {
+ skip |=
+ log_msg(device_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_SEMAPHORE_EXT,
+ HandleToUint64(semaphore), kVUID_Core_DrawState_QueueForwardProgress,
+ "Queue %s is waiting on semaphore %s that has no way to be signaled.",
+ device_data->report_data->FormatHandle(queue).c_str(),
+ device_data->report_data->FormatHandle(semaphore).c_str());
+ } else {
+ signaled_semaphores.erase(semaphore);
+ unsignaled_semaphores.insert(semaphore);
+ }
+ }
+ if (pSemaphore && pSemaphore->scope == kSyncScopeExternalTemporary) {
+ internal_semaphores.insert(semaphore);
+ }
+ }
+ for (uint32_t i = 0; i < submit->signalSemaphoreCount; ++i) {
+ VkSemaphore semaphore = submit->pSignalSemaphores[i];
+ auto pSemaphore = GetSemaphoreNode(semaphore);
+ if (pSemaphore && (pSemaphore->scope == kSyncScopeInternal || internal_semaphores.count(semaphore))) {
+ if (signaled_semaphores.count(semaphore) || (!(unsignaled_semaphores.count(semaphore)) && pSemaphore->signaled)) {
+ skip |=
+ log_msg(device_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_SEMAPHORE_EXT,
+ HandleToUint64(semaphore), kVUID_Core_DrawState_QueueForwardProgress,
+ "Queue %s is signaling semaphore %s that was previously signaled by queue %s but has not since "
+ "been waited on by any queue.",
+ device_data->report_data->FormatHandle(queue).c_str(),
+ device_data->report_data->FormatHandle(semaphore).c_str(),
+ device_data->report_data->FormatHandle(pSemaphore->signaler.first).c_str());
+ } else {
+ unsignaled_semaphores.erase(semaphore);
+ signaled_semaphores.insert(semaphore);
+ }
+ }
+ }
+ QFOTransferCBScoreboards<VkImageMemoryBarrier> qfo_image_scoreboards;
+ QFOTransferCBScoreboards<VkBufferMemoryBarrier> qfo_buffer_scoreboards;
- auto chained_device_group_struct = lvl_find_in_chain<VkDeviceGroupSubmitInfo>(submit->pNext);
- if (chained_device_group_struct && chained_device_group_struct->commandBufferCount > 0) {
- for (uint32_t i = 0; i < chained_device_group_struct->commandBufferCount; ++i) {
- skip |= ValidateDeviceMaskToPhysicalDeviceCount(chained_device_group_struct->pCommandBufferDeviceMasks[i],
- VK_DEBUG_REPORT_OBJECT_TYPE_QUEUE_EXT, HandleToUint64(queue),
- "VUID-VkDeviceGroupSubmitInfo-pCommandBufferDeviceMasks-00086");
+ for (uint32_t i = 0; i < submit->commandBufferCount; i++) {
+ auto cb_node = GetCBNode(submit->pCommandBuffers[i]);
+ if (cb_node) {
+ skip |= ValidateCmdBufImageLayouts(device_data, cb_node, device_data->imageLayoutMap, localImageLayoutMap);
+ current_cmds.push_back(submit->pCommandBuffers[i]);
+ skip |= ValidatePrimaryCommandBufferState(
+ device_data, cb_node, (int)std::count(current_cmds.begin(), current_cmds.end(), submit->pCommandBuffers[i]),
+ &qfo_image_scoreboards, &qfo_buffer_scoreboards);
+ skip |= ValidateQueueFamilyIndices(device_data, cb_node, queue);
+
+ // Potential early exit here as bad object state may crash in delayed function calls
+ if (skip) {
+ return true;
+ }
+
+ // Call submit-time functions to validate/update state
+ for (auto &function : cb_node->queue_submit_functions) {
+ skip |= function();
+ }
+ for (auto &function : cb_node->eventUpdates) {
+ skip |= function(queue);
+ }
+ for (auto &function : cb_node->queryUpdates) {
+ skip |= function(queue);
+ }
}
}
}
return skip;
}
-void CoreChecks::PreCallRecordQueueSubmit(VkQueue queue, uint32_t submitCount, const VkSubmitInfo *pSubmits, VkFence fence) {
- if (enabled.gpu_validation && device_extensions.vk_ext_descriptor_indexing) {
- GpuPreCallRecordQueueSubmit(queue, submitCount, pSubmits, fence);
- }
-}
#ifdef VK_USE_PLATFORM_ANDROID_KHR
// Android-specific validation that uses types defined only on Android and only for NDK versions
@@ -3429,8 +3277,9 @@ std::map<VkImageCreateFlags, uint64_t> ahb_create_map_v2a = {
//
// AHB-extension new APIs
//
-bool CoreChecks::PreCallValidateGetAndroidHardwareBufferPropertiesANDROID(VkDevice device, const struct AHardwareBuffer *buffer,
- VkAndroidHardwareBufferPropertiesANDROID *pProperties) {
+bool CoreChecks::PreCallValidateGetAndroidHardwareBufferProperties(VkDevice device, const struct AHardwareBuffer *buffer,
+ VkAndroidHardwareBufferPropertiesANDROID *pProperties) {
+ layer_data *device_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
bool skip = false;
// buffer must be a valid Android hardware buffer object with at least one of the AHARDWAREBUFFER_USAGE_GPU_* usage flags.
AHardwareBuffer_Desc ahb_desc;
@@ -3439,8 +3288,8 @@ bool CoreChecks::PreCallValidateGetAndroidHardwareBufferPropertiesANDROID(VkDevi
AHARDWAREBUFFER_USAGE_GPU_CUBE_MAP | AHARDWAREBUFFER_USAGE_GPU_MIPMAP_COMPLETE |
AHARDWAREBUFFER_USAGE_GPU_DATA_BUFFER;
if (0 == (ahb_desc.usage & required_flags)) {
- skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, HandleToUint64(device),
- "VUID-vkGetAndroidHardwareBufferPropertiesANDROID-buffer-01884",
+ skip |= log_msg(device_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT,
+ HandleToUint64(device_data->device), "VUID-vkGetAndroidHardwareBufferPropertiesANDROID-buffer-01884",
"vkGetAndroidHardwareBufferPropertiesANDROID: The AHardwareBuffer's AHardwareBuffer_Desc.usage (0x%" PRIx64
") does not have any AHARDWAREBUFFER_USAGE_GPU_* flags set.",
ahb_desc.usage);
@@ -3448,45 +3297,47 @@ bool CoreChecks::PreCallValidateGetAndroidHardwareBufferPropertiesANDROID(VkDevi
return skip;
}
-void CoreChecks::PostCallRecordGetAndroidHardwareBufferPropertiesANDROID(VkDevice device, const struct AHardwareBuffer *buffer,
- VkAndroidHardwareBufferPropertiesANDROID *pProperties,
- VkResult result) {
+void CoreChecks::PostCallRecordGetAndroidHardwareBufferProperties(VkDevice device, const struct AHardwareBuffer *buffer,
+ VkAndroidHardwareBufferPropertiesANDROID *pProperties,
+ VkResult result) {
if (VK_SUCCESS != result) return;
auto ahb_format_props = lvl_find_in_chain<VkAndroidHardwareBufferFormatPropertiesANDROID>(pProperties->pNext);
if (ahb_format_props) {
- ahb_ext_formats_set.insert(ahb_format_props->externalFormat);
+ auto ext_formats = GetAHBExternalFormatsSet();
+ ext_formats->insert(ahb_format_props->externalFormat);
}
}
-bool CoreChecks::PreCallValidateGetMemoryAndroidHardwareBufferANDROID(VkDevice device,
- const VkMemoryGetAndroidHardwareBufferInfoANDROID *pInfo,
- struct AHardwareBuffer **pBuffer) {
+bool CoreChecks::PreCallValidateGetMemoryAndroidHardwareBuffer(VkDevice device,
+ const VkMemoryGetAndroidHardwareBufferInfoANDROID *pInfo,
+ struct AHardwareBuffer **pBuffer) {
+ layer_data *device_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
bool skip = false;
- const DEVICE_MEMORY_STATE *mem_info = GetDevMemState(pInfo->memory);
+ DEVICE_MEM_INFO *mem_info = GetMemObjInfo(pInfo->memory);
// VK_EXTERNAL_MEMORY_HANDLE_TYPE_ANDROID_HARDWARE_BUFFER_BIT_ANDROID must have been included in
// VkExportMemoryAllocateInfoKHR::handleTypes when memory was created.
if (!mem_info->is_export ||
(0 == (mem_info->export_handle_type_flags & VK_EXTERNAL_MEMORY_HANDLE_TYPE_ANDROID_HARDWARE_BUFFER_BIT_ANDROID))) {
- skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, HandleToUint64(device),
- "VUID-VkMemoryGetAndroidHardwareBufferInfoANDROID-handleTypes-01882",
- "vkGetMemoryAndroidHardwareBufferANDROID: %s was not allocated for export, or the "
+ skip |= log_msg(device_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT,
+ HandleToUint64(device), "VUID-VkMemoryGetAndroidHardwareBufferInfoANDROID-handleTypes-01882",
+ "vkGetMemoryAndroidHardwareBufferANDROID: The VkDeviceMemory (%s) was not allocated for export, or the "
"export handleTypes (0x%" PRIx32
") did not contain VK_EXTERNAL_MEMORY_HANDLE_TYPE_ANDROID_HARDWARE_BUFFER_BIT_ANDROID.",
- report_data->FormatHandle(pInfo->memory).c_str(), mem_info->export_handle_type_flags);
+ device_data->report_data->FormatHandle(pInfo->memory).c_str(), mem_info->export_handle_type_flags);
}
// If the pNext chain of the VkMemoryAllocateInfo used to allocate memory included a VkMemoryDedicatedAllocateInfo
// with non-NULL image member, then that image must already be bound to memory.
if (mem_info->is_dedicated && (VK_NULL_HANDLE != mem_info->dedicated_image)) {
- const auto image_state = GetImageState(mem_info->dedicated_image);
+ auto image_state = GetImageState(mem_info->dedicated_image);
if ((nullptr == image_state) || (0 == (image_state->GetBoundMemory().count(pInfo->memory)))) {
- skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT,
+ skip |= log_msg(device_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT,
HandleToUint64(device), "VUID-VkMemoryGetAndroidHardwareBufferInfoANDROID-pNext-01883",
- "vkGetMemoryAndroidHardwareBufferANDROID: %s was allocated using a dedicated "
- "%s, but that image is not bound to the VkDeviceMemory object.",
- report_data->FormatHandle(pInfo->memory).c_str(),
- report_data->FormatHandle(mem_info->dedicated_image).c_str());
+ "vkGetMemoryAndroidHardwareBufferANDROID: The VkDeviceMemory (%s) was allocated using a dedicated "
+ "image (%s), but that image is not bound to the VkDeviceMemory object.",
+ device_data->report_data->FormatHandle(pInfo->memory).c_str(),
+ device_data->report_data->FormatHandle(mem_info->dedicated_image).c_str());
}
}
@@ -3496,7 +3347,7 @@ bool CoreChecks::PreCallValidateGetMemoryAndroidHardwareBufferANDROID(VkDevice d
//
// AHB-specific validation within non-AHB APIs
//
-bool CoreChecks::ValidateAllocateMemoryANDROID(const VkMemoryAllocateInfo *alloc_info) const {
+bool CoreChecks::ValidateAllocateMemoryANDROID(layer_data *dev_data, const VkMemoryAllocateInfo *alloc_info) {
bool skip = false;
auto import_ahb_info = lvl_find_in_chain<VkImportAndroidHardwareBufferInfoANDROID>(alloc_info->pNext);
auto exp_mem_alloc_info = lvl_find_in_chain<VkExportMemoryAllocateInfo>(alloc_info->pNext);
@@ -3518,8 +3369,8 @@ bool CoreChecks::ValidateAllocateMemoryANDROID(const VkMemoryAllocateInfo *alloc
AHARDWAREBUFFER_USAGE_GPU_CUBE_MAP | AHARDWAREBUFFER_USAGE_GPU_MIPMAP_COMPLETE |
AHARDWAREBUFFER_USAGE_PROTECTED_CONTENT;
if ((0 == (ahb_desc.usage & ahb_equiv_usage_bits)) || (0 == ahb_format_map_a2v.count(ahb_desc.format))) {
- skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT,
- HandleToUint64(device), "VUID-VkImportAndroidHardwareBufferInfoANDROID-buffer-01881",
+ skip |= log_msg(dev_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT,
+ HandleToUint64(dev_data->device), "VUID-VkImportAndroidHardwareBufferInfoANDROID-buffer-01881",
"vkAllocateMemory: The AHardwareBuffer_Desc's format ( %u ) and/or usage ( 0x%" PRIx64
" ) are not compatible with Vulkan.",
ahb_desc.format, ahb_desc.usage);
@@ -3539,7 +3390,9 @@ bool CoreChecks::ValidateAllocateMemoryANDROID(const VkMemoryAllocateInfo *alloc
VkExternalBufferProperties ext_buf_props = {};
ext_buf_props.sType = VK_STRUCTURE_TYPE_EXTERNAL_BUFFER_PROPERTIES;
- DispatchGetPhysicalDeviceExternalBufferProperties(physical_device, &pdebi, &ext_buf_props);
+ instance_layer_data *instance_data = GetLayerDataPtr(get_dispatch_key(dev_data->instance_data->instance), layer_data_map);
+ instance_data->instance_dispatch_table.GetPhysicalDeviceExternalBufferProperties(dev_data->physical_device, &pdebi,
+ &ext_buf_props);
// Collect external format info
VkPhysicalDeviceExternalImageFormatInfo pdeifi = {};
@@ -3577,8 +3430,8 @@ bool CoreChecks::ValidateAllocateMemoryANDROID(const VkMemoryAllocateInfo *alloc
if (0 == (ext_buf_props.externalMemoryProperties.externalMemoryFeatures & VK_EXTERNAL_MEMORY_FEATURE_IMPORTABLE_BIT)) {
if ((VK_SUCCESS != fmt_lookup_result) || (0 == (ext_img_fmt_props.externalMemoryProperties.externalMemoryFeatures &
VK_EXTERNAL_MEMORY_FEATURE_IMPORTABLE_BIT))) {
- skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT,
- HandleToUint64(device), "VUID-VkImportAndroidHardwareBufferInfoANDROID-buffer-01880",
+ skip |= log_msg(dev_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT,
+ HandleToUint64(dev_data->device), "VUID-VkImportAndroidHardwareBufferInfoANDROID-buffer-01880",
"vkAllocateMemory: Neither the VkExternalImageFormatProperties nor the VkExternalBufferProperties "
"structs for the AHardwareBuffer include the VK_EXTERNAL_MEMORY_FEATURE_IMPORTABLE_BIT flag.");
}
@@ -3590,12 +3443,13 @@ bool CoreChecks::ValidateAllocateMemoryANDROID(const VkMemoryAllocateInfo *alloc
VkAndroidHardwareBufferPropertiesANDROID ahb_props = {};
ahb_props.sType = VK_STRUCTURE_TYPE_ANDROID_HARDWARE_BUFFER_PROPERTIES_ANDROID;
ahb_props.pNext = &ahb_format_props;
- DispatchGetAndroidHardwareBufferPropertiesANDROID(device, import_ahb_info->buffer, &ahb_props);
+ dev_data->device_dispatch_table.GetAndroidHardwareBufferPropertiesANDROID(dev_data->device, import_ahb_info->buffer,
+ &ahb_props);
// allocationSize must be the size returned by vkGetAndroidHardwareBufferPropertiesANDROID for the Android hardware buffer
if (alloc_info->allocationSize != ahb_props.allocationSize) {
- skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT,
- HandleToUint64(device), "VUID-VkMemoryAllocateInfo-allocationSize-02383",
+ skip |= log_msg(dev_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT,
+ HandleToUint64(dev_data->device), "VUID-VkMemoryAllocateInfo-allocationSize-02383",
"vkAllocateMemory: VkMemoryAllocateInfo struct with chained VkImportAndroidHardwareBufferInfoANDROID "
"struct, allocationSize (%" PRId64
") does not match the AHardwareBuffer's reported allocationSize (%" PRId64 ").",
@@ -3606,8 +3460,8 @@ bool CoreChecks::ValidateAllocateMemoryANDROID(const VkMemoryAllocateInfo *alloc
// Note: memoryTypeIndex is an index, memoryTypeBits is a bitmask
uint32_t mem_type_bitmask = 1 << alloc_info->memoryTypeIndex;
if (0 == (mem_type_bitmask & ahb_props.memoryTypeBits)) {
- skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT,
- HandleToUint64(device), "VUID-VkMemoryAllocateInfo-memoryTypeIndex-02385",
+ skip |= log_msg(dev_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT,
+ HandleToUint64(dev_data->device), "VUID-VkMemoryAllocateInfo-memoryTypeIndex-02385",
"vkAllocateMemory: VkMemoryAllocateInfo struct with chained VkImportAndroidHardwareBufferInfoANDROID "
"struct, memoryTypeIndex (%" PRId32
") does not correspond to a bit set in AHardwareBuffer's reported "
@@ -3619,25 +3473,26 @@ bool CoreChecks::ValidateAllocateMemoryANDROID(const VkMemoryAllocateInfo *alloc
if ((nullptr == mem_ded_alloc_info) || (VK_NULL_HANDLE == mem_ded_alloc_info->image)) {
// the Android hardware buffer must have a format of AHARDWAREBUFFER_FORMAT_BLOB and a usage that includes
// AHARDWAREBUFFER_USAGE_GPU_DATA_BUFFER
- if (((uint64_t)AHARDWAREBUFFER_FORMAT_BLOB != ahb_desc.format) ||
+ if (((uint64_t)AHARDWAREBUFFER_FORMAT_BLOB != ahb_format_props.externalFormat) ||
(0 == (ahb_desc.usage & AHARDWAREBUFFER_USAGE_GPU_DATA_BUFFER))) {
skip |= log_msg(
- report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, HandleToUint64(device),
- "VUID-VkMemoryAllocateInfo-pNext-02384",
+ dev_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT,
+ HandleToUint64(dev_data->device), "VUID-VkMemoryAllocateInfo-pNext-02384",
"vkAllocateMemory: VkMemoryAllocateInfo struct with chained VkImportAndroidHardwareBufferInfoANDROID "
- "struct without a dedicated allocation requirement, while the AHardwareBuffer_Desc's format ( %u ) is not "
- "AHARDWAREBUFFER_FORMAT_BLOB or usage (0x%" PRIx64 ") does not include AHARDWAREBUFFER_USAGE_GPU_DATA_BUFFER.",
- ahb_desc.format, ahb_desc.usage);
+ "struct without a dedicated allocation requirement, while the AHardwareBuffer's external format (0x%" PRIx64
+ ") is not AHARDWAREBUFFER_FORMAT_BLOB or usage (0x%" PRIx64
+ ") does not include AHARDWAREBUFFER_USAGE_GPU_DATA_BUFFER.",
+ ahb_format_props.externalFormat, ahb_desc.usage);
}
} else { // Checks specific to import with a dedicated allocation requirement
- const VkImageCreateInfo *ici = &(GetImageState(mem_ded_alloc_info->image)->createInfo);
+ VkImageCreateInfo *ici = &(GetImageState(mem_ded_alloc_info->image)->createInfo);
// The Android hardware buffer's usage must include at least one of AHARDWAREBUFFER_USAGE_GPU_COLOR_OUTPUT or
// AHARDWAREBUFFER_USAGE_GPU_SAMPLED_IMAGE
if (0 == (ahb_desc.usage & (AHARDWAREBUFFER_USAGE_GPU_COLOR_OUTPUT | AHARDWAREBUFFER_USAGE_GPU_SAMPLED_IMAGE))) {
skip |= log_msg(
- report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, HandleToUint64(device),
- "VUID-VkMemoryAllocateInfo-pNext-02386",
+ dev_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT,
+ HandleToUint64(dev_data->device), "VUID-VkMemoryAllocateInfo-pNext-02386",
"vkAllocateMemory: VkMemoryAllocateInfo struct with chained VkImportAndroidHardwareBufferInfoANDROID and a "
"dedicated allocation requirement, while the AHardwareBuffer's usage (0x%" PRIx64
") contains neither AHARDWAREBUFFER_USAGE_GPU_COLOR_OUTPUT nor AHARDWAREBUFFER_USAGE_GPU_SAMPLED_IMAGE.",
@@ -3647,8 +3502,8 @@ bool CoreChecks::ValidateAllocateMemoryANDROID(const VkMemoryAllocateInfo *alloc
// the format of image must be VK_FORMAT_UNDEFINED or the format returned by
// vkGetAndroidHardwareBufferPropertiesANDROID
if ((ici->format != ahb_format_props.format) && (VK_FORMAT_UNDEFINED != ici->format)) {
- skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT,
- HandleToUint64(device), "VUID-VkMemoryAllocateInfo-pNext-02387",
+ skip |= log_msg(dev_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT,
+ HandleToUint64(dev_data->device), "VUID-VkMemoryAllocateInfo-pNext-02387",
"vkAllocateMemory: VkMemoryAllocateInfo struct with chained "
"VkImportAndroidHardwareBufferInfoANDROID, the dedicated allocation image's "
"format (%s) is not VK_FORMAT_UNDEFINED and does not match the AHardwareBuffer's format (%s).",
@@ -3658,8 +3513,8 @@ bool CoreChecks::ValidateAllocateMemoryANDROID(const VkMemoryAllocateInfo *alloc
// The width, height, and array layer dimensions of image and the Android hardwarebuffer must be identical
if ((ici->extent.width != ahb_desc.width) || (ici->extent.height != ahb_desc.height) ||
(ici->arrayLayers != ahb_desc.layers)) {
- skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT,
- HandleToUint64(device), "VUID-VkMemoryAllocateInfo-pNext-02388",
+ skip |= log_msg(dev_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT,
+ HandleToUint64(dev_data->device), "VUID-VkMemoryAllocateInfo-pNext-02388",
"vkAllocateMemory: VkMemoryAllocateInfo struct with chained "
"VkImportAndroidHardwareBufferInfoANDROID, the dedicated allocation image's "
"width, height, and arrayLayers (%" PRId32 " %" PRId32 " %" PRId32
@@ -3681,8 +3536,8 @@ bool CoreChecks::ValidateAllocateMemoryANDROID(const VkMemoryAllocateInfo *alloc
if ((ahb_desc.usage & AHARDWAREBUFFER_USAGE_GPU_MIPMAP_COMPLETE) && (ici->mipLevels != 1) &&
(ici->mipLevels != FullMipChainLevels(ici->extent))) {
skip |=
- log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT,
- HandleToUint64(device), "VUID-VkMemoryAllocateInfo-pNext-02389",
+ log_msg(dev_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT,
+ HandleToUint64(dev_data->device), "VUID-VkMemoryAllocateInfo-pNext-02389",
"vkAllocateMemory: VkMemoryAllocateInfo struct with chained VkImportAndroidHardwareBufferInfoANDROID, "
"usage includes AHARDWAREBUFFER_USAGE_GPU_MIPMAP_COMPLETE but mipLevels (%" PRId32
") is neither 1 nor full mip "
@@ -3697,8 +3552,8 @@ bool CoreChecks::ValidateAllocateMemoryANDROID(const VkMemoryAllocateInfo *alloc
~(VK_IMAGE_USAGE_SAMPLED_BIT | VK_IMAGE_USAGE_INPUT_ATTACHMENT_BIT | VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT |
VK_IMAGE_USAGE_TRANSFER_SRC_BIT | VK_IMAGE_USAGE_TRANSFER_DST_BIT)) {
skip |=
- log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT,
- HandleToUint64(device), "VUID-VkMemoryAllocateInfo-pNext-02390",
+ log_msg(dev_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT,
+ HandleToUint64(dev_data->device), "VUID-VkMemoryAllocateInfo-pNext-02390",
"vkAllocateMemory: VkMemoryAllocateInfo struct with chained VkImportAndroidHardwareBufferInfoANDROID, "
"dedicated image usage bits include one or more with no AHardwareBuffer equivalent.");
}
@@ -3713,8 +3568,8 @@ bool CoreChecks::ValidateAllocateMemoryANDROID(const VkMemoryAllocateInfo *alloc
}
}
if (illegal_usage) {
- skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT,
- HandleToUint64(device), "VUID-VkMemoryAllocateInfo-pNext-02390",
+ skip |= log_msg(dev_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT,
+ HandleToUint64(dev_data->device), "VUID-VkMemoryAllocateInfo-pNext-02390",
"vkAllocateMemory: VkMemoryAllocateInfo struct with chained "
"VkImportAndroidHardwareBufferInfoANDROID, one or more AHardwareBuffer usage bits equivalent to "
"the provided image's usage bits are missing from AHardwareBuffer_Desc.usage.");
@@ -3726,16 +3581,16 @@ bool CoreChecks::ValidateAllocateMemoryANDROID(const VkMemoryAllocateInfo *alloc
(VK_NULL_HANDLE != mem_ded_alloc_info->image)) {
// This is an Android HW Buffer export
if (0 != alloc_info->allocationSize) {
- skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT,
- HandleToUint64(device), "VUID-VkMemoryAllocateInfo-pNext-01874",
+ skip |= log_msg(dev_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT,
+ HandleToUint64(dev_data->device), "VUID-VkMemoryAllocateInfo-pNext-01874",
"vkAllocateMemory: pNext chain indicates a dedicated Android Hardware Buffer export allocation, "
"but allocationSize is non-zero.");
}
} else {
if (0 == alloc_info->allocationSize) {
skip |= log_msg(
- report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, HandleToUint64(device),
- "VUID-VkMemoryAllocateInfo-pNext-01874",
+ dev_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT,
+ HandleToUint64(dev_data->device), "VUID-VkMemoryAllocateInfo-pNext-01874",
"vkAllocateMemory: pNext chain does not indicate a dedicated export allocation, but allocationSize is 0.");
};
}
@@ -3743,10 +3598,10 @@ bool CoreChecks::ValidateAllocateMemoryANDROID(const VkMemoryAllocateInfo *alloc
return skip;
}
-bool CoreChecks::ValidateGetImageMemoryRequirements2ANDROID(const VkImage image) const {
+bool CoreChecks::ValidateGetImageMemoryRequirements2ANDROID(layer_data *dev_data, const VkImage image) {
bool skip = false;
- const IMAGE_STATE *image_state = GetImageState(image);
+ IMAGE_STATE *image_state = GetImageState(image);
if (image_state->imported_ahb && (0 == image_state->GetBoundMemory().size())) {
skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT, HandleToUint64(image),
"VUID-VkImageMemoryRequirementsInfo2-image-01897",
@@ -3778,17 +3633,20 @@ static bool ValidateGetPhysicalDeviceImageFormatProperties2ANDROID(const debug_r
return skip;
}
-bool CoreChecks::ValidateCreateSamplerYcbcrConversionANDROID(const VkSamplerYcbcrConversionCreateInfo *create_info) const {
+bool CoreChecks::ValidateCreateSamplerYcbcrConversionANDROID(const layer_data *dev_data,
+ const VkSamplerYcbcrConversionCreateInfo *create_info) {
const VkExternalFormatANDROID *ext_format_android = lvl_find_in_chain<VkExternalFormatANDROID>(create_info->pNext);
if ((nullptr != ext_format_android) && (0 != ext_format_android->externalFormat)) {
if (VK_FORMAT_UNDEFINED != create_info->format) {
- return log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_SAMPLER_YCBCR_CONVERSION_EXT, 0,
+ return log_msg(dev_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ VK_DEBUG_REPORT_OBJECT_TYPE_SAMPLER_YCBCR_CONVERSION_EXT, 0,
"VUID-VkSamplerYcbcrConversionCreateInfo-format-01904",
"vkCreateSamplerYcbcrConversion[KHR]: CreateInfo format is not VK_FORMAT_UNDEFINED while "
"there is a chained VkExternalFormatANDROID struct.");
}
} else if (VK_FORMAT_UNDEFINED == create_info->format) {
- return log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_SAMPLER_YCBCR_CONVERSION_EXT, 0,
+ return log_msg(dev_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ VK_DEBUG_REPORT_OBJECT_TYPE_SAMPLER_YCBCR_CONVERSION_EXT, 0,
"VUID-VkSamplerYcbcrConversionCreateInfo-format-01904",
"vkCreateSamplerYcbcrConversion[KHR]: CreateInfo format is VK_FORMAT_UNDEFINED with no chained "
"VkExternalFormatANDROID struct.");
@@ -3796,21 +3654,22 @@ bool CoreChecks::ValidateCreateSamplerYcbcrConversionANDROID(const VkSamplerYcbc
return false;
}
-void ValidationStateTracker::RecordCreateSamplerYcbcrConversionANDROID(const VkSamplerYcbcrConversionCreateInfo *create_info,
- VkSamplerYcbcrConversion ycbcr_conversion) {
+void CoreChecks::RecordCreateSamplerYcbcrConversionANDROID(layer_data *dev_data,
+ const VkSamplerYcbcrConversionCreateInfo *create_info,
+ VkSamplerYcbcrConversion ycbcr_conversion) {
const VkExternalFormatANDROID *ext_format_android = lvl_find_in_chain<VkExternalFormatANDROID>(create_info->pNext);
if (ext_format_android && (0 != ext_format_android->externalFormat)) {
- ycbcr_conversion_ahb_fmt_map.emplace(ycbcr_conversion, ext_format_android->externalFormat);
+ dev_data->ycbcr_conversion_ahb_fmt_map.emplace(ycbcr_conversion, ext_format_android->externalFormat);
}
};
-void ValidationStateTracker::RecordDestroySamplerYcbcrConversionANDROID(VkSamplerYcbcrConversion ycbcr_conversion) {
- ycbcr_conversion_ahb_fmt_map.erase(ycbcr_conversion);
+void CoreChecks::RecordDestroySamplerYcbcrConversionANDROID(layer_data *dev_data, VkSamplerYcbcrConversion ycbcr_conversion) {
+ dev_data->ycbcr_conversion_ahb_fmt_map.erase(ycbcr_conversion);
};
#else // !VK_USE_PLATFORM_ANDROID_KHR
-bool CoreChecks::ValidateAllocateMemoryANDROID(const VkMemoryAllocateInfo *alloc_info) const { return false; }
+bool CoreChecks::ValidateAllocateMemoryANDROID(layer_data *dev_data, const VkMemoryAllocateInfo *alloc_info) { return false; }
static bool ValidateGetPhysicalDeviceImageFormatProperties2ANDROID(const debug_report_data *report_data,
const VkPhysicalDeviceImageFormatInfo2 *pImageFormatInfo,
@@ -3818,107 +3677,101 @@ static bool ValidateGetPhysicalDeviceImageFormatProperties2ANDROID(const debug_r
return false;
}
-bool CoreChecks::ValidateCreateSamplerYcbcrConversionANDROID(const VkSamplerYcbcrConversionCreateInfo *create_info) const {
+bool CoreChecks::ValidateCreateSamplerYcbcrConversionANDROID(const layer_data *dev_data,
+ const VkSamplerYcbcrConversionCreateInfo *create_info) {
return false;
}
-bool CoreChecks::ValidateGetImageMemoryRequirements2ANDROID(const VkImage image) const { return false; }
+bool CoreChecks::ValidateGetImageMemoryRequirements2ANDROID(layer_data *dev_data, const VkImage image) { return false; }
-void ValidationStateTracker::RecordCreateSamplerYcbcrConversionANDROID(const VkSamplerYcbcrConversionCreateInfo *create_info,
- VkSamplerYcbcrConversion ycbcr_conversion){};
+void CoreChecks::RecordCreateSamplerYcbcrConversionANDROID(layer_data *dev_data,
+ const VkSamplerYcbcrConversionCreateInfo *create_info,
+ VkSamplerYcbcrConversion ycbcr_conversion){};
-void ValidationStateTracker::RecordDestroySamplerYcbcrConversionANDROID(VkSamplerYcbcrConversion ycbcr_conversion){};
+void CoreChecks::RecordDestroySamplerYcbcrConversionANDROID(layer_data *dev_data, VkSamplerYcbcrConversion ycbcr_conversion){};
#endif // VK_USE_PLATFORM_ANDROID_KHR
bool CoreChecks::PreCallValidateAllocateMemory(VkDevice device, const VkMemoryAllocateInfo *pAllocateInfo,
const VkAllocationCallbacks *pAllocator, VkDeviceMemory *pMemory) {
+ layer_data *device_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
bool skip = false;
- if (memObjMap.size() >= phys_dev_props.limits.maxMemoryAllocationCount) {
- skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, HandleToUint64(device),
- kVUIDUndefined, "Number of currently valid memory objects is not less than the maximum allowed (%u).",
- phys_dev_props.limits.maxMemoryAllocationCount);
+ if (device_data->memObjMap.size() >= device_data->phys_dev_props.limits.maxMemoryAllocationCount) {
+ skip |= log_msg(device_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT,
+ HandleToUint64(device), kVUIDUndefined,
+ "Number of currently valid memory objects is not less than the maximum allowed (%u).",
+ device_data->phys_dev_props.limits.maxMemoryAllocationCount);
}
- if (device_extensions.vk_android_external_memory_android_hardware_buffer) {
- skip |= ValidateAllocateMemoryANDROID(pAllocateInfo);
+ if (GetDeviceExtensions()->vk_android_external_memory_android_hardware_buffer) {
+ skip |= ValidateAllocateMemoryANDROID(device_data, pAllocateInfo);
} else {
if (0 == pAllocateInfo->allocationSize) {
- skip |=
- log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, HandleToUint64(device),
- "VUID-VkMemoryAllocateInfo-allocationSize-00638", "vkAllocateMemory: allocationSize is 0.");
+ skip |= log_msg(device_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT,
+ HandleToUint64(device), "VUID-VkMemoryAllocateInfo-allocationSize-00638",
+ "vkAllocateMemory: allocationSize is 0.");
};
}
-
- auto chained_flags_struct = lvl_find_in_chain<VkMemoryAllocateFlagsInfo>(pAllocateInfo->pNext);
- if (chained_flags_struct && chained_flags_struct->flags == VK_MEMORY_ALLOCATE_DEVICE_MASK_BIT) {
- skip |= ValidateDeviceMaskToPhysicalDeviceCount(chained_flags_struct->deviceMask, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT,
- HandleToUint64(device), "VUID-VkMemoryAllocateFlagsInfo-deviceMask-00675");
- skip |= ValidateDeviceMaskToZero(chained_flags_struct->deviceMask, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT,
- HandleToUint64(device), "VUID-VkMemoryAllocateFlagsInfo-deviceMask-00676");
- }
// TODO: VUIDs ending in 00643, 00644, 00646, 00647, 01742, 01743, 01745, 00645, 00648, 01744
return skip;
}
-void ValidationStateTracker::PostCallRecordAllocateMemory(VkDevice device, const VkMemoryAllocateInfo *pAllocateInfo,
- const VkAllocationCallbacks *pAllocator, VkDeviceMemory *pMemory,
- VkResult result) {
+void CoreChecks::PostCallRecordAllocateMemory(VkDevice device, const VkMemoryAllocateInfo *pAllocateInfo,
+ const VkAllocationCallbacks *pAllocator, VkDeviceMemory *pMemory, VkResult result) {
if (VK_SUCCESS == result) {
- AddMemObjInfo(device, *pMemory, pAllocateInfo);
+ layer_data *device_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
+ AddMemObjInfo(device_data, device, *pMemory, pAllocateInfo);
}
return;
}
// For given obj node, if it is use, flag a validation error and return callback result, else return false
-bool CoreChecks::ValidateObjectNotInUse(const BASE_NODE *obj_node, const VulkanTypedHandle &obj_struct, const char *caller_name,
- const char *error_code) const {
- if (disabled.object_in_use) return false;
+bool CoreChecks::ValidateObjectNotInUse(const layer_data *dev_data, BASE_NODE *obj_node, VK_OBJECT obj_struct,
+ const char *caller_name, const char *error_code) {
+ if (dev_data->instance_data->disabled.object_in_use) return false;
bool skip = false;
if (obj_node->in_use.load()) {
- skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, get_debug_report_enum[obj_struct.type], obj_struct.handle,
- error_code, "Cannot call %s on %s that is currently in use by a command buffer.", caller_name,
- report_data->FormatHandle(obj_struct).c_str());
+ skip |=
+ log_msg(dev_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, get_debug_report_enum[obj_struct.type], obj_struct.handle,
+ error_code, "Cannot call %s on %s %s that is currently in use by a command buffer.", caller_name,
+ object_string[obj_struct.type], dev_data->report_data->FormatHandle(obj_struct.handle).c_str());
}
return skip;
}
bool CoreChecks::PreCallValidateFreeMemory(VkDevice device, VkDeviceMemory mem, const VkAllocationCallbacks *pAllocator) {
- const DEVICE_MEMORY_STATE *mem_info = GetDevMemState(mem);
- const VulkanTypedHandle obj_struct(mem, kVulkanObjectTypeDeviceMemory);
+ layer_data *device_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
+ DEVICE_MEM_INFO *mem_info = GetMemObjInfo(mem);
+ VK_OBJECT obj_struct = {HandleToUint64(mem), kVulkanObjectTypeDeviceMemory};
bool skip = false;
if (mem_info) {
- skip |= ValidateObjectNotInUse(mem_info, obj_struct, "vkFreeMemory", "VUID-vkFreeMemory-memory-00677");
- for (const auto &obj : mem_info->obj_bindings) {
- log_msg(report_data, VK_DEBUG_REPORT_INFORMATION_BIT_EXT, get_debug_report_enum[obj.type], obj.handle,
- kVUID_Core_MemTrack_FreedMemRef, "%s still has a reference to %s.", report_data->FormatHandle(obj).c_str(),
- report_data->FormatHandle(mem_info->mem).c_str());
- }
+ skip |= ValidateObjectNotInUse(device_data, mem_info, obj_struct, "vkFreeMemory", "VUID-vkFreeMemory-memory-00677");
}
return skip;
}
-void ValidationStateTracker::PreCallRecordFreeMemory(VkDevice device, VkDeviceMemory mem, const VkAllocationCallbacks *pAllocator) {
+void CoreChecks::PreCallRecordFreeMemory(VkDevice device, VkDeviceMemory mem, const VkAllocationCallbacks *pAllocator) {
+ layer_data *device_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
if (!mem) return;
- DEVICE_MEMORY_STATE *mem_info = GetDevMemState(mem);
- const VulkanTypedHandle obj_struct(mem, kVulkanObjectTypeDeviceMemory);
+ DEVICE_MEM_INFO *mem_info = GetMemObjInfo(mem);
+ VK_OBJECT obj_struct = {HandleToUint64(mem), kVulkanObjectTypeDeviceMemory};
// Clear mem binding for any bound objects
- for (const auto &obj : mem_info->obj_bindings) {
+ for (auto obj : mem_info->obj_bindings) {
+ log_msg(device_data->report_data, VK_DEBUG_REPORT_INFORMATION_BIT_EXT, get_debug_report_enum[obj.type], obj.handle,
+ kVUID_Core_MemTrack_FreedMemRef, "VK Object %s still has a reference to mem obj %s.",
+ device_data->report_data->FormatHandle(obj.handle).c_str(),
+ device_data->report_data->FormatHandle(mem_info->mem).c_str());
BINDABLE *bindable_state = nullptr;
switch (obj.type) {
case kVulkanObjectTypeImage:
- bindable_state = GetImageState(obj.Cast<VkImage>());
+ bindable_state = GetImageState(reinterpret_cast<VkImage &>(obj.handle));
break;
case kVulkanObjectTypeBuffer:
- bindable_state = GetBufferState(obj.Cast<VkBuffer>());
- break;
- case kVulkanObjectTypeAccelerationStructureNV:
- bindable_state = GetAccelerationStructureState(obj.Cast<VkAccelerationStructureNV>());
+ bindable_state = GetBufferState(reinterpret_cast<VkBuffer &>(obj.handle));
break;
-
default:
- // Should only have acceleration structure, buffer, or image objects bound to memory
+ // Should only have buffer or image objects bound to memory
assert(0);
}
@@ -3927,38 +3780,38 @@ void ValidationStateTracker::PreCallRecordFreeMemory(VkDevice device, VkDeviceMe
bindable_state->UpdateBoundMemorySet();
}
// Any bound cmd buffers are now invalid
- InvalidateCommandBuffers(mem_info->cb_bindings, obj_struct);
- memObjMap.erase(mem);
+ InvalidateCommandBuffers(device_data, mem_info->cb_bindings, obj_struct);
+ device_data->memObjMap.erase(mem);
}
// Validate that given Map memory range is valid. This means that the memory should not already be mapped,
// and that the size of the map range should be:
// 1. Not zero
// 2. Within the size of the memory allocation
-bool CoreChecks::ValidateMapMemRange(VkDeviceMemory mem, VkDeviceSize offset, VkDeviceSize size) {
+static bool ValidateMapMemRange(layer_data *dev_data, VkDeviceMemory mem, VkDeviceSize offset, VkDeviceSize size) {
bool skip = false;
if (size == 0) {
- skip =
- log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_MEMORY_EXT, HandleToUint64(mem),
- kVUID_Core_MemTrack_InvalidMap, "VkMapMemory: Attempting to map memory range of size zero");
+ skip = log_msg(dev_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_MEMORY_EXT,
+ HandleToUint64(mem), kVUID_Core_MemTrack_InvalidMap,
+ "VkMapMemory: Attempting to map memory range of size zero");
}
- auto mem_element = memObjMap.find(mem);
- if (mem_element != memObjMap.end()) {
+ auto mem_element = dev_data->memObjMap.find(mem);
+ if (mem_element != dev_data->memObjMap.end()) {
auto mem_info = mem_element->second.get();
// It is an application error to call VkMapMemory on an object that is already mapped
if (mem_info->mem_range.size != 0) {
- skip =
- log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_MEMORY_EXT,
- HandleToUint64(mem), kVUID_Core_MemTrack_InvalidMap,
- "VkMapMemory: Attempting to map memory on an already-mapped %s.", report_data->FormatHandle(mem).c_str());
+ skip = log_msg(dev_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_MEMORY_EXT,
+ HandleToUint64(mem), kVUID_Core_MemTrack_InvalidMap,
+ "VkMapMemory: Attempting to map memory on an already-mapped object %s.",
+ dev_data->report_data->FormatHandle(mem).c_str());
}
// Validate that offset + size is within object's allocationSize
if (size == VK_WHOLE_SIZE) {
if (offset >= mem_info->alloc_info.allocationSize) {
- skip = log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_MEMORY_EXT,
+ skip = log_msg(dev_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_MEMORY_EXT,
HandleToUint64(mem), kVUID_Core_MemTrack_InvalidMap,
"Mapping Memory from 0x%" PRIx64 " to 0x%" PRIx64
" with size of VK_WHOLE_SIZE oversteps total array size 0x%" PRIx64,
@@ -3966,7 +3819,7 @@ bool CoreChecks::ValidateMapMemRange(VkDeviceMemory mem, VkDeviceSize offset, Vk
}
} else {
if ((offset + size) > mem_info->alloc_info.allocationSize) {
- skip = log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_MEMORY_EXT,
+ skip = log_msg(dev_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_MEMORY_EXT,
HandleToUint64(mem), "VUID-vkMapMemory-size-00681",
"Mapping Memory from 0x%" PRIx64 " to 0x%" PRIx64 " oversteps total array size 0x%" PRIx64 ".",
offset, size + offset, mem_info->alloc_info.allocationSize);
@@ -3976,8 +3829,8 @@ bool CoreChecks::ValidateMapMemRange(VkDeviceMemory mem, VkDeviceSize offset, Vk
return skip;
}
-void CoreChecks::StoreMemRanges(VkDeviceMemory mem, VkDeviceSize offset, VkDeviceSize size) {
- auto mem_info = GetDevMemState(mem);
+void CoreChecks::StoreMemRanges(layer_data *dev_data, VkDeviceMemory mem, VkDeviceSize offset, VkDeviceSize size) {
+ auto mem_info = GetMemObjInfo(mem);
if (mem_info) {
mem_info->mem_range.offset = offset;
mem_info->mem_range.size = size;
@@ -3987,21 +3840,22 @@ void CoreChecks::StoreMemRanges(VkDeviceMemory mem, VkDeviceSize offset, VkDevic
// Guard value for pad data
static char NoncoherentMemoryFillValue = 0xb;
-void CoreChecks::InitializeAndTrackMemory(VkDeviceMemory mem, VkDeviceSize offset, VkDeviceSize size, void **ppData) {
- auto mem_info = GetDevMemState(mem);
+void CoreChecks::InitializeAndTrackMemory(layer_data *dev_data, VkDeviceMemory mem, VkDeviceSize offset, VkDeviceSize size,
+ void **ppData) {
+ auto mem_info = GetMemObjInfo(mem);
if (mem_info) {
mem_info->p_driver_data = *ppData;
uint32_t index = mem_info->alloc_info.memoryTypeIndex;
- if (phys_dev_mem_props.memoryTypes[index].propertyFlags & VK_MEMORY_PROPERTY_HOST_COHERENT_BIT) {
+ if (dev_data->phys_dev_mem_props.memoryTypes[index].propertyFlags & VK_MEMORY_PROPERTY_HOST_COHERENT_BIT) {
mem_info->shadow_copy = 0;
} else {
if (size == VK_WHOLE_SIZE) {
size = mem_info->alloc_info.allocationSize - offset;
}
- mem_info->shadow_pad_size = phys_dev_props.limits.minMemoryMapAlignment;
- assert(SafeModulo(mem_info->shadow_pad_size, phys_dev_props.limits.minMemoryMapAlignment) == 0);
+ mem_info->shadow_pad_size = dev_data->phys_dev_props.limits.minMemoryMapAlignment;
+ assert(SafeModulo(mem_info->shadow_pad_size, dev_data->phys_dev_props.limits.minMemoryMapAlignment) == 0);
// Ensure start of mapped region reflects hardware alignment constraints
- uint64_t map_alignment = phys_dev_props.limits.minMemoryMapAlignment;
+ uint64_t map_alignment = dev_data->phys_dev_props.limits.minMemoryMapAlignment;
// From spec: (ppData - offset) must be aligned to at least limits::minMemoryMapAlignment.
uint64_t start_offset = offset % map_alignment;
@@ -4022,12 +3876,30 @@ void CoreChecks::InitializeAndTrackMemory(VkDeviceMemory mem, VkDeviceSize offse
}
}
-void CoreChecks::RetireFence(VkFence fence) {
- auto pFence = GetFenceState(fence);
+// Verify that state for fence being waited on is appropriate. That is,
+// a fence being waited on should not already be signaled and
+// it should have been submitted on a queue or during acquire next image
+bool CoreChecks::VerifyWaitFenceState(layer_data *dev_data, VkFence fence, const char *apiCall) {
+ bool skip = false;
+
+ auto pFence = GetFenceNode(fence);
+ if (pFence && pFence->scope == kSyncScopeInternal) {
+ if (pFence->state == FENCE_UNSIGNALED) {
+ skip |= log_msg(dev_data->report_data, VK_DEBUG_REPORT_WARNING_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_FENCE_EXT,
+ HandleToUint64(fence), kVUID_Core_MemTrack_FenceState,
+ "%s called for fence %s which has not been submitted on a Queue or during acquire next image.", apiCall,
+ dev_data->report_data->FormatHandle(fence).c_str());
+ }
+ }
+ return skip;
+}
+
+void CoreChecks::RetireFence(layer_data *dev_data, VkFence fence) {
+ auto pFence = GetFenceNode(fence);
if (pFence && pFence->scope == kSyncScopeInternal) {
if (pFence->signaler.first != VK_NULL_HANDLE) {
// Fence signaller is a queue -- use this as proof that prior operations on that queue have completed.
- RetireWorkOnQueue(GetQueueState(pFence->signaler.first), pFence->signaler.second, true);
+ RetireWorkOnQueue(dev_data, GetQueueState(pFence->signaler.first), pFence->signaler.second);
} else {
// Fence signaller is the WSI. We're not tracking what the WSI op actually /was/ in CV yet, but we need to mark
// the fence as retired.
@@ -4038,22 +3910,26 @@ void CoreChecks::RetireFence(VkFence fence) {
bool CoreChecks::PreCallValidateWaitForFences(VkDevice device, uint32_t fenceCount, const VkFence *pFences, VkBool32 waitAll,
uint64_t timeout) {
+ layer_data *device_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
// Verify fence status of submitted fences
+ if (device_data->instance_data->disabled.wait_for_fences) return false;
bool skip = false;
for (uint32_t i = 0; i < fenceCount; i++) {
- skip |= VerifyQueueStateToFence(pFences[i]);
+ skip |= VerifyWaitFenceState(device_data, pFences[i], "vkWaitForFences");
+ skip |= VerifyQueueStateToFence(device_data, pFences[i]);
}
return skip;
}
void CoreChecks::PostCallRecordWaitForFences(VkDevice device, uint32_t fenceCount, const VkFence *pFences, VkBool32 waitAll,
uint64_t timeout, VkResult result) {
+ layer_data *device_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
if (VK_SUCCESS != result) return;
// When we know that all fences are complete we can clean/remove their CBs
if ((VK_TRUE == waitAll) || (1 == fenceCount)) {
for (uint32_t i = 0; i < fenceCount; i++) {
- RetireFence(pFences[i]);
+ RetireFence(device_data, pFences[i]);
}
}
// NOTE : Alternate case not handled here is when some fences have completed. In
@@ -4061,31 +3937,37 @@ void CoreChecks::PostCallRecordWaitForFences(VkDevice device, uint32_t fenceCoun
// vkGetFenceStatus() at which point we'll clean/remove their CBs if complete.
}
+bool CoreChecks::PreCallValidateGetFenceStatus(VkDevice device, VkFence fence) {
+ layer_data *device_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
+ return VerifyWaitFenceState(device_data, fence, "vkGetFenceStatus()");
+}
+
void CoreChecks::PostCallRecordGetFenceStatus(VkDevice device, VkFence fence, VkResult result) {
+ layer_data *device_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
if (VK_SUCCESS != result) return;
- RetireFence(fence);
+ RetireFence(device_data, fence);
}
-void ValidationStateTracker::RecordGetDeviceQueueState(uint32_t queue_family_index, VkQueue queue) {
+void CoreChecks::RecordGetDeviceQueueState(layer_data *device_data, uint32_t queue_family_index, VkQueue queue) {
// Add queue to tracking set only if it is new
- auto queue_is_new = queues.emplace(queue);
+ auto queue_is_new = device_data->queues.emplace(queue);
if (queue_is_new.second == true) {
- QUEUE_STATE *queue_state = &queueMap[queue];
+ QUEUE_STATE *queue_state = &device_data->queueMap[queue];
queue_state->queue = queue;
queue_state->queueFamilyIndex = queue_family_index;
queue_state->seq = 0;
}
}
-bool CoreChecks::ValidateGetDeviceQueue(uint32_t queueFamilyIndex, uint32_t queueIndex, VkQueue *pQueue, const char *valid_qfi_vuid,
- const char *qfi_in_range_vuid) const {
+bool CoreChecks::ValidateGetDeviceQueue(layer_data *device_data, uint32_t queueFamilyIndex, uint32_t queueIndex, VkQueue *pQueue,
+ const char *valid_qfi_vuid, const char *qfi_in_range_vuid) {
bool skip = false;
- skip |= ValidateDeviceQueueFamily(queueFamilyIndex, "vkGetDeviceQueue", "queueFamilyIndex", valid_qfi_vuid);
- const auto &queue_data = queue_family_index_map.find(queueFamilyIndex);
- if (queue_data != queue_family_index_map.end() && queue_data->second <= queueIndex) {
- skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, HandleToUint64(device),
- qfi_in_range_vuid,
+ skip |= ValidateDeviceQueueFamily(device_data, queueFamilyIndex, "vkGetDeviceQueue", "queueFamilyIndex", valid_qfi_vuid);
+ const auto &queue_data = device_data->queue_family_index_map.find(queueFamilyIndex);
+ if (queue_data != device_data->queue_family_index_map.end() && queue_data->second <= queueIndex) {
+ skip |= log_msg(device_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT,
+ HandleToUint64(device_data->device), qfi_in_range_vuid,
"vkGetDeviceQueue: queueIndex (=%" PRIu32
") is not less than the number of queues requested from queueFamilyIndex (=%" PRIu32
") when the device was created (i.e. is not less than %" PRIu32 ").",
@@ -4095,184 +3977,273 @@ bool CoreChecks::ValidateGetDeviceQueue(uint32_t queueFamilyIndex, uint32_t queu
}
bool CoreChecks::PreCallValidateGetDeviceQueue(VkDevice device, uint32_t queueFamilyIndex, uint32_t queueIndex, VkQueue *pQueue) {
- return ValidateGetDeviceQueue(queueFamilyIndex, queueIndex, pQueue, "VUID-vkGetDeviceQueue-queueFamilyIndex-00384",
+ layer_data *device_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
+ return ValidateGetDeviceQueue(device_data, queueFamilyIndex, queueIndex, pQueue, "VUID-vkGetDeviceQueue-queueFamilyIndex-00384",
"VUID-vkGetDeviceQueue-queueIndex-00385");
}
-void ValidationStateTracker::PostCallRecordGetDeviceQueue(VkDevice device, uint32_t queueFamilyIndex, uint32_t queueIndex,
- VkQueue *pQueue) {
- RecordGetDeviceQueueState(queueFamilyIndex, *pQueue);
+void CoreChecks::PostCallRecordGetDeviceQueue(VkDevice device, uint32_t queueFamilyIndex, uint32_t queueIndex, VkQueue *pQueue) {
+ layer_data *device_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
+ RecordGetDeviceQueueState(device_data, queueFamilyIndex, *pQueue);
}
-void ValidationStateTracker::PostCallRecordGetDeviceQueue2(VkDevice device, const VkDeviceQueueInfo2 *pQueueInfo, VkQueue *pQueue) {
- RecordGetDeviceQueueState(pQueueInfo->queueFamilyIndex, *pQueue);
+void CoreChecks::PostCallRecordGetDeviceQueue2(VkDevice device, const VkDeviceQueueInfo2 *pQueueInfo, VkQueue *pQueue) {
+ layer_data *device_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
+ RecordGetDeviceQueueState(device_data, pQueueInfo->queueFamilyIndex, *pQueue);
}
bool CoreChecks::PreCallValidateQueueWaitIdle(VkQueue queue) {
+ layer_data *device_data = GetLayerDataPtr(get_dispatch_key(queue), layer_data_map);
QUEUE_STATE *queue_state = GetQueueState(queue);
- return VerifyQueueStateToSeq(queue_state, queue_state->seq + queue_state->submissions.size());
+ if (device_data->instance_data->disabled.queue_wait_idle) return false;
+ return VerifyQueueStateToSeq(device_data, queue_state, queue_state->seq + queue_state->submissions.size());
}
void CoreChecks::PostCallRecordQueueWaitIdle(VkQueue queue, VkResult result) {
+ layer_data *device_data = GetLayerDataPtr(get_dispatch_key(queue), layer_data_map);
if (VK_SUCCESS != result) return;
QUEUE_STATE *queue_state = GetQueueState(queue);
- RetireWorkOnQueue(queue_state, queue_state->seq + queue_state->submissions.size(), true);
+ RetireWorkOnQueue(device_data, queue_state, queue_state->seq + queue_state->submissions.size());
}
bool CoreChecks::PreCallValidateDeviceWaitIdle(VkDevice device) {
+ layer_data *device_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
+ if (device_data->instance_data->disabled.device_wait_idle) return false;
bool skip = false;
- for (auto &queue : queueMap) {
- skip |= VerifyQueueStateToSeq(&queue.second, queue.second.seq + queue.second.submissions.size());
+ for (auto &queue : device_data->queueMap) {
+ skip |= VerifyQueueStateToSeq(device_data, &queue.second, queue.second.seq + queue.second.submissions.size());
}
return skip;
}
void CoreChecks::PostCallRecordDeviceWaitIdle(VkDevice device, VkResult result) {
+ layer_data *device_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
if (VK_SUCCESS != result) return;
- for (auto &queue : queueMap) {
- RetireWorkOnQueue(&queue.second, queue.second.seq + queue.second.submissions.size(), true);
+ for (auto &queue : device_data->queueMap) {
+ RetireWorkOnQueue(device_data, &queue.second, queue.second.seq + queue.second.submissions.size());
}
}
bool CoreChecks::PreCallValidateDestroyFence(VkDevice device, VkFence fence, const VkAllocationCallbacks *pAllocator) {
- const FENCE_STATE *fence_node = GetFenceState(fence);
+ layer_data *device_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
+ FENCE_NODE *fence_node = GetFenceNode(fence);
bool skip = false;
if (fence_node) {
if (fence_node->scope == kSyncScopeInternal && fence_node->state == FENCE_INFLIGHT) {
- skip |=
- log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_FENCE_EXT, HandleToUint64(fence),
- "VUID-vkDestroyFence-fence-01120", "%s is in use.", report_data->FormatHandle(fence).c_str());
+ skip |= log_msg(device_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_FENCE_EXT,
+ HandleToUint64(fence), "VUID-vkDestroyFence-fence-01120", "Fence %s is in use.",
+ device_data->report_data->FormatHandle(fence).c_str());
}
}
return skip;
}
-void ValidationStateTracker::PreCallRecordDestroyFence(VkDevice device, VkFence fence, const VkAllocationCallbacks *pAllocator) {
+void CoreChecks::PreCallRecordDestroyFence(VkDevice device, VkFence fence, const VkAllocationCallbacks *pAllocator) {
+ layer_data *device_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
if (!fence) return;
- fenceMap.erase(fence);
+ device_data->fenceMap.erase(fence);
}
bool CoreChecks::PreCallValidateDestroySemaphore(VkDevice device, VkSemaphore semaphore, const VkAllocationCallbacks *pAllocator) {
- const SEMAPHORE_STATE *sema_node = GetSemaphoreState(semaphore);
- const VulkanTypedHandle obj_struct(semaphore, kVulkanObjectTypeSemaphore);
+ layer_data *device_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
+ SEMAPHORE_NODE *sema_node = GetSemaphoreNode(semaphore);
+ VK_OBJECT obj_struct = {HandleToUint64(semaphore), kVulkanObjectTypeSemaphore};
+ if (device_data->instance_data->disabled.destroy_semaphore) return false;
bool skip = false;
if (sema_node) {
- skip |= ValidateObjectNotInUse(sema_node, obj_struct, "vkDestroySemaphore", "VUID-vkDestroySemaphore-semaphore-01137");
+ skip |= ValidateObjectNotInUse(device_data, sema_node, obj_struct, "vkDestroySemaphore",
+ "VUID-vkDestroySemaphore-semaphore-01137");
}
return skip;
}
-void ValidationStateTracker::PreCallRecordDestroySemaphore(VkDevice device, VkSemaphore semaphore,
- const VkAllocationCallbacks *pAllocator) {
+void CoreChecks::PreCallRecordDestroySemaphore(VkDevice device, VkSemaphore semaphore, const VkAllocationCallbacks *pAllocator) {
+ layer_data *device_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
if (!semaphore) return;
- semaphoreMap.erase(semaphore);
+ device_data->semaphoreMap.erase(semaphore);
}
bool CoreChecks::PreCallValidateDestroyEvent(VkDevice device, VkEvent event, const VkAllocationCallbacks *pAllocator) {
- const EVENT_STATE *event_state = GetEventState(event);
- const VulkanTypedHandle obj_struct(event, kVulkanObjectTypeEvent);
+ layer_data *device_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
+ EVENT_STATE *event_state = GetEventNode(event);
+ VK_OBJECT obj_struct = {HandleToUint64(event), kVulkanObjectTypeEvent};
bool skip = false;
if (event_state) {
- skip |= ValidateObjectNotInUse(event_state, obj_struct, "vkDestroyEvent", "VUID-vkDestroyEvent-event-01145");
+ skip |= ValidateObjectNotInUse(device_data, event_state, obj_struct, "vkDestroyEvent", "VUID-vkDestroyEvent-event-01145");
}
return skip;
}
-void ValidationStateTracker::PreCallRecordDestroyEvent(VkDevice device, VkEvent event, const VkAllocationCallbacks *pAllocator) {
+void CoreChecks::PreCallRecordDestroyEvent(VkDevice device, VkEvent event, const VkAllocationCallbacks *pAllocator) {
+ layer_data *device_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
if (!event) return;
- EVENT_STATE *event_state = GetEventState(event);
- const VulkanTypedHandle obj_struct(event, kVulkanObjectTypeEvent);
- InvalidateCommandBuffers(event_state->cb_bindings, obj_struct);
- eventMap.erase(event);
+ EVENT_STATE *event_state = GetEventNode(event);
+ VK_OBJECT obj_struct = {HandleToUint64(event), kVulkanObjectTypeEvent};
+ InvalidateCommandBuffers(device_data, event_state->cb_bindings, obj_struct);
+ device_data->eventMap.erase(event);
}
bool CoreChecks::PreCallValidateDestroyQueryPool(VkDevice device, VkQueryPool queryPool, const VkAllocationCallbacks *pAllocator) {
- if (disabled.query_validation) return false;
- const QUERY_POOL_STATE *qp_state = GetQueryPoolState(queryPool);
- const VulkanTypedHandle obj_struct(queryPool, kVulkanObjectTypeQueryPool);
+ layer_data *device_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
+ QUERY_POOL_NODE *qp_state = GetQueryPoolNode(queryPool);
+ VK_OBJECT obj_struct = {HandleToUint64(queryPool), kVulkanObjectTypeQueryPool};
bool skip = false;
if (qp_state) {
- skip |= ValidateObjectNotInUse(qp_state, obj_struct, "vkDestroyQueryPool", "VUID-vkDestroyQueryPool-queryPool-00793");
+ skip |= ValidateObjectNotInUse(device_data, qp_state, obj_struct, "vkDestroyQueryPool",
+ "VUID-vkDestroyQueryPool-queryPool-00793");
}
return skip;
}
-void ValidationStateTracker::PreCallRecordDestroyQueryPool(VkDevice device, VkQueryPool queryPool,
- const VkAllocationCallbacks *pAllocator) {
+void CoreChecks::PreCallRecordDestroyQueryPool(VkDevice device, VkQueryPool queryPool, const VkAllocationCallbacks *pAllocator) {
+ layer_data *device_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
if (!queryPool) return;
- QUERY_POOL_STATE *qp_state = GetQueryPoolState(queryPool);
- const VulkanTypedHandle obj_struct(queryPool, kVulkanObjectTypeQueryPool);
- InvalidateCommandBuffers(qp_state->cb_bindings, obj_struct);
- queryPoolMap.erase(queryPool);
+ QUERY_POOL_NODE *qp_state = GetQueryPoolNode(queryPool);
+ VK_OBJECT obj_struct = {HandleToUint64(queryPool), kVulkanObjectTypeQueryPool};
+ InvalidateCommandBuffers(device_data, qp_state->cb_bindings, obj_struct);
+ device_data->queryPoolMap.erase(queryPool);
}
-bool CoreChecks::ValidateGetQueryPoolResultsFlags(VkQueryPool queryPool, VkQueryResultFlags flags) const {
+bool CoreChecks::PreCallValidateGetQueryPoolResults(VkDevice device, VkQueryPool queryPool, uint32_t firstQuery,
+ uint32_t queryCount, size_t dataSize, void *pData, VkDeviceSize stride,
+ VkQueryResultFlags flags) {
+ layer_data *device_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
bool skip = false;
- const auto query_pool_state = GetQueryPoolState(queryPool);
- if (query_pool_state) {
- if ((query_pool_state->createInfo.queryType == VK_QUERY_TYPE_TIMESTAMP) && (flags & VK_QUERY_RESULT_PARTIAL_BIT)) {
- skip |= log_msg(
- report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_QUERY_POOL_EXT, HandleToUint64(queryPool),
- "VUID-vkGetQueryPoolResults-queryType-00818",
- "%s was created with a queryType of VK_QUERY_TYPE_TIMESTAMP but flags contains VK_QUERY_RESULT_PARTIAL_BIT.",
- report_data->FormatHandle(queryPool).c_str());
+ auto query_pool_state = device_data->queryPoolMap.find(queryPool);
+ if (query_pool_state != device_data->queryPoolMap.end()) {
+ if ((query_pool_state->second.createInfo.queryType == VK_QUERY_TYPE_TIMESTAMP) && (flags & VK_QUERY_RESULT_PARTIAL_BIT)) {
+ skip |= log_msg(device_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_QUERY_POOL_EXT, 0,
+ "VUID-vkGetQueryPoolResults-queryType-00818",
+ "QueryPool %s was created with a queryType of VK_QUERY_TYPE_TIMESTAMP but flags contains "
+ "VK_QUERY_RESULT_PARTIAL_BIT.",
+ device_data->report_data->FormatHandle(queryPool).c_str());
}
}
return skip;
}
-bool CoreChecks::ValidateGetQueryPoolResultsQueries(VkQueryPool queryPool, uint32_t firstQuery, uint32_t queryCount) const {
- bool skip = false;
- QueryObject query_obj{queryPool, 0u};
+void CoreChecks::PostCallRecordGetQueryPoolResults(VkDevice device, VkQueryPool queryPool, uint32_t firstQuery, uint32_t queryCount,
+ size_t dataSize, void *pData, VkDeviceSize stride, VkQueryResultFlags flags,
+ VkResult result) {
+ layer_data *device_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
+
+ if ((VK_SUCCESS != result) && (VK_NOT_READY != result)) return;
+ // TODO: clean this up, it's insanely wasteful.
+ unordered_map<QueryObject, std::vector<VkCommandBuffer>> queries_in_flight;
+ for (auto cmd_buffer : device_data->commandBufferMap) {
+ if (cmd_buffer.second->in_use.load()) {
+ for (auto query_state_pair : cmd_buffer.second->queryToStateMap) {
+ queries_in_flight[query_state_pair.first].push_back(cmd_buffer.first);
+ }
+ }
+ }
for (uint32_t i = 0; i < queryCount; ++i) {
- query_obj.query = firstQuery + i;
- if (queryToStateMap.count(query_obj) == 0) {
- skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_QUERY_POOL_EXT,
- HandleToUint64(queryPool), kVUID_Core_DrawState_InvalidQuery,
- "vkGetQueryPoolResults() on %s and query %" PRIu32 ": unknown query",
- report_data->FormatHandle(queryPool).c_str(), query_obj.query);
+ QueryObject query = {queryPool, firstQuery + i};
+ auto qif_pair = queries_in_flight.find(query);
+ auto query_state_pair = device_data->queryToStateMap.find(query);
+ if (query_state_pair != device_data->queryToStateMap.end()) {
+ // Available and in flight
+ if (qif_pair != queries_in_flight.end() && query_state_pair != device_data->queryToStateMap.end() &&
+ query_state_pair->second) {
+ for (auto cmd_buffer : qif_pair->second) {
+ auto cb = GetCBNode(cmd_buffer);
+ auto query_event_pair = cb->waitedEventsBeforeQueryReset.find(query);
+ if (query_event_pair != cb->waitedEventsBeforeQueryReset.end()) {
+ for (auto event : query_event_pair->second) {
+ device_data->eventMap[event].needsSignaled = true;
+ }
+ }
+ }
+ }
}
}
- return skip;
}
-bool CoreChecks::PreCallValidateGetQueryPoolResults(VkDevice device, VkQueryPool queryPool, uint32_t firstQuery,
- uint32_t queryCount, size_t dataSize, void *pData, VkDeviceSize stride,
- VkQueryResultFlags flags) {
- if (disabled.query_validation) return false;
- bool skip = false;
- skip |= ValidateQueryPoolStride("VUID-vkGetQueryPoolResults-flags-00814", "VUID-vkGetQueryPoolResults-flags-00815", stride,
- "dataSize", dataSize, flags);
- skip |= ValidateGetQueryPoolResultsFlags(queryPool, flags);
- skip |= ValidateGetQueryPoolResultsQueries(queryPool, firstQuery, queryCount);
-
- return skip;
+// Return true if given ranges intersect, else false
+// Prereq : For both ranges, range->end - range->start > 0. This case should have already resulted
+// in an error so not checking that here
+// pad_ranges bool indicates a linear and non-linear comparison which requires padding
+// In the case where padding is required, if an alias is encountered then a validation error is reported and skip
+// may be set by the callback function so caller should merge in skip value if padding case is possible.
+// This check can be skipped by passing skip_checks=true, for call sites outside the validation path.
+bool CoreChecks::RangesIntersect(layer_data const *dev_data, MEMORY_RANGE const *range1, MEMORY_RANGE const *range2, bool *skip,
+ bool skip_checks) {
+ *skip = false;
+ auto r1_start = range1->start;
+ auto r1_end = range1->end;
+ auto r2_start = range2->start;
+ auto r2_end = range2->end;
+ VkDeviceSize pad_align = 1;
+ if (range1->linear != range2->linear) {
+ pad_align = dev_data->phys_dev_props.limits.bufferImageGranularity;
+ }
+ if ((r1_end & ~(pad_align - 1)) < (r2_start & ~(pad_align - 1))) return false;
+ if ((r1_start & ~(pad_align - 1)) > (r2_end & ~(pad_align - 1))) return false;
+
+ if (!skip_checks && (range1->linear != range2->linear)) {
+ // In linear vs. non-linear case, warn of aliasing
+ const char *r1_linear_str = range1->linear ? "Linear" : "Non-linear";
+ const char *r1_type_str = range1->image ? "image" : "buffer";
+ const char *r2_linear_str = range2->linear ? "linear" : "non-linear";
+ const char *r2_type_str = range2->image ? "image" : "buffer";
+ auto obj_type = range1->image ? VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT : VK_DEBUG_REPORT_OBJECT_TYPE_BUFFER_EXT;
+ *skip |= log_msg(
+ dev_data->report_data, VK_DEBUG_REPORT_WARNING_BIT_EXT, obj_type, range1->handle, kVUID_Core_MemTrack_InvalidAliasing,
+ "%s %s %s is aliased with %s %s %s which may indicate a bug. For further info refer to the Buffer-Image Granularity "
+ "section of the Vulkan specification. "
+ "(https://www.khronos.org/registry/vulkan/specs/1.0-extensions/xhtml/vkspec.html#resources-bufferimagegranularity)",
+ r1_linear_str, r1_type_str, dev_data->report_data->FormatHandle(range1->handle).c_str(), r2_linear_str, r2_type_str,
+ dev_data->report_data->FormatHandle(range2->handle).c_str());
+ }
+ // Ranges intersect
+ return true;
+}
+// Simplified RangesIntersect that calls above function to check range1 for intersection with offset & end addresses
+bool CoreChecks::RangesIntersect(layer_data const *dev_data, MEMORY_RANGE const *range1, VkDeviceSize offset, VkDeviceSize end) {
+ // Create a local MEMORY_RANGE struct to wrap offset/size
+ MEMORY_RANGE range_wrap;
+ // Synch linear with range1 to avoid padding and potential validation error case
+ range_wrap.linear = range1->linear;
+ range_wrap.start = offset;
+ range_wrap.end = end;
+ bool tmp_bool;
+ return RangesIntersect(dev_data, range1, &range_wrap, &tmp_bool, true);
}
-bool CoreChecks::ValidateInsertMemoryRange(const VulkanTypedHandle &typed_handle, const DEVICE_MEMORY_STATE *mem_info,
- VkDeviceSize memoryOffset, const VkMemoryRequirements &memRequirements, bool is_linear,
- const char *api_name) const {
+bool CoreChecks::ValidateInsertMemoryRange(layer_data const *dev_data, uint64_t handle, DEVICE_MEM_INFO *mem_info,
+ VkDeviceSize memoryOffset, VkMemoryRequirements memRequirements, bool is_image,
+ bool is_linear, const char *api_name) {
bool skip = false;
- if (memoryOffset >= mem_info->alloc_info.allocationSize) {
- const char *error_code = nullptr;
- if (typed_handle.type == kVulkanObjectTypeBuffer) {
- error_code = "VUID-vkBindBufferMemory-memoryOffset-01031";
- } else if (typed_handle.type == kVulkanObjectTypeImage) {
- error_code = "VUID-vkBindImageMemory-memoryOffset-01046";
- } else if (typed_handle.type == kVulkanObjectTypeAccelerationStructureNV) {
- error_code = "VUID-VkBindAccelerationStructureMemoryInfoNV-memoryOffset-02451";
- } else {
- // Unsupported object type
- assert(false);
+ MEMORY_RANGE range;
+ range.image = is_image;
+ range.handle = handle;
+ range.linear = is_linear;
+ range.memory = mem_info->mem;
+ range.start = memoryOffset;
+ range.size = memRequirements.size;
+ range.end = memoryOffset + memRequirements.size - 1;
+ range.aliases.clear();
+
+ // Check for aliasing problems.
+ for (auto &obj_range_pair : mem_info->bound_ranges) {
+ auto check_range = &obj_range_pair.second;
+ bool intersection_error = false;
+ if (RangesIntersect(dev_data, &range, check_range, &intersection_error, false)) {
+ skip |= intersection_error;
+ range.aliases.insert(check_range);
}
+ }
- skip = log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_MEMORY_EXT,
+ if (memoryOffset >= mem_info->alloc_info.allocationSize) {
+ const char *error_code =
+ is_image ? "VUID-vkBindImageMemory-memoryOffset-01046" : "VUID-vkBindBufferMemory-memoryOffset-01031";
+ skip = log_msg(dev_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_MEMORY_EXT,
HandleToUint64(mem_info->mem), error_code,
- "In %s, attempting to bind %s to %s, memoryOffset=0x%" PRIxLEAST64
+ "In %s, attempting to bind memory (%s) to object (%s), memoryOffset=0x%" PRIxLEAST64
" must be less than the memory allocation size 0x%" PRIxLEAST64 ".",
- api_name, report_data->FormatHandle(mem_info->mem).c_str(), report_data->FormatHandle(typed_handle).c_str(),
- memoryOffset, mem_info->alloc_info.allocationSize);
+ api_name, dev_data->report_data->FormatHandle(mem_info->mem).c_str(),
+ dev_data->report_data->FormatHandle(handle).c_str(), memoryOffset, mem_info->alloc_info.allocationSize);
}
return skip;
@@ -4285,124 +4256,131 @@ bool CoreChecks::ValidateInsertMemoryRange(const VulkanTypedHandle &typed_handle
// Return true if an error is flagged and the user callback returns "true", otherwise false
// is_image indicates an image object, otherwise handle is for a buffer
// is_linear indicates a buffer or linear image
-void ValidationStateTracker::InsertMemoryRange(const VulkanTypedHandle &typed_handle, DEVICE_MEMORY_STATE *mem_info,
- VkDeviceSize memoryOffset, VkMemoryRequirements memRequirements, bool is_linear) {
- if (typed_handle.type == kVulkanObjectTypeImage) {
- mem_info->bound_images.insert(typed_handle.handle);
- } else if (typed_handle.type == kVulkanObjectTypeBuffer) {
- mem_info->bound_buffers.insert(typed_handle.handle);
- } else if (typed_handle.type == kVulkanObjectTypeAccelerationStructureNV) {
- mem_info->bound_acceleration_structures.insert(typed_handle.handle);
- } else {
- // Unsupported object type
- assert(false);
- }
-}
-
-bool CoreChecks::ValidateInsertImageMemoryRange(VkImage image, const DEVICE_MEMORY_STATE *mem_info, VkDeviceSize mem_offset,
- const VkMemoryRequirements &mem_reqs, bool is_linear, const char *api_name) const {
- return ValidateInsertMemoryRange(VulkanTypedHandle(image, kVulkanObjectTypeImage), mem_info, mem_offset, mem_reqs, is_linear,
- api_name);
-}
-void ValidationStateTracker::InsertImageMemoryRange(VkImage image, DEVICE_MEMORY_STATE *mem_info, VkDeviceSize mem_offset,
- VkMemoryRequirements mem_reqs, bool is_linear) {
- InsertMemoryRange(VulkanTypedHandle(image, kVulkanObjectTypeImage), mem_info, mem_offset, mem_reqs, is_linear);
-}
-
-bool CoreChecks::ValidateInsertBufferMemoryRange(VkBuffer buffer, const DEVICE_MEMORY_STATE *mem_info, VkDeviceSize mem_offset,
- const VkMemoryRequirements &mem_reqs, const char *api_name) const {
- return ValidateInsertMemoryRange(VulkanTypedHandle(buffer, kVulkanObjectTypeBuffer), mem_info, mem_offset, mem_reqs, true,
- api_name);
-}
-void ValidationStateTracker::InsertBufferMemoryRange(VkBuffer buffer, DEVICE_MEMORY_STATE *mem_info, VkDeviceSize mem_offset,
- const VkMemoryRequirements &mem_reqs) {
- InsertMemoryRange(VulkanTypedHandle(buffer, kVulkanObjectTypeBuffer), mem_info, mem_offset, mem_reqs, true);
-}
-
-bool CoreChecks::ValidateInsertAccelerationStructureMemoryRange(VkAccelerationStructureNV as, const DEVICE_MEMORY_STATE *mem_info,
- VkDeviceSize mem_offset, const VkMemoryRequirements &mem_reqs,
- const char *api_name) const {
- return ValidateInsertMemoryRange(VulkanTypedHandle(as, kVulkanObjectTypeAccelerationStructureNV), mem_info, mem_offset,
- mem_reqs, true, api_name);
-}
-void ValidationStateTracker::InsertAccelerationStructureMemoryRange(VkAccelerationStructureNV as, DEVICE_MEMORY_STATE *mem_info,
- VkDeviceSize mem_offset, const VkMemoryRequirements &mem_reqs) {
- InsertMemoryRange(VulkanTypedHandle(as, kVulkanObjectTypeAccelerationStructureNV), mem_info, mem_offset, mem_reqs, true);
-}
-
-// This function will remove the handle-to-index mapping from the appropriate map.
-static void RemoveMemoryRange(uint64_t handle, DEVICE_MEMORY_STATE *mem_info, VulkanObjectType object_type) {
- if (object_type == kVulkanObjectTypeImage) {
+void CoreChecks::InsertMemoryRange(layer_data const *dev_data, uint64_t handle, DEVICE_MEM_INFO *mem_info,
+ VkDeviceSize memoryOffset, VkMemoryRequirements memRequirements, bool is_image, bool is_linear) {
+ MEMORY_RANGE range;
+
+ range.image = is_image;
+ range.handle = handle;
+ range.linear = is_linear;
+ range.memory = mem_info->mem;
+ range.start = memoryOffset;
+ range.size = memRequirements.size;
+ range.end = memoryOffset + memRequirements.size - 1;
+ range.aliases.clear();
+ // Update Memory aliasing
+ // Save aliased ranges so we can copy into final map entry below. Can't do it in loop b/c we don't yet have final ptr. If we
+ // inserted into map before loop to get the final ptr, then we may enter loop when not needed & we check range against itself
+ std::unordered_set<MEMORY_RANGE *> tmp_alias_ranges;
+ for (auto &obj_range_pair : mem_info->bound_ranges) {
+ auto check_range = &obj_range_pair.second;
+ bool intersection_error = false;
+ if (RangesIntersect(dev_data, &range, check_range, &intersection_error, true)) {
+ range.aliases.insert(check_range);
+ tmp_alias_ranges.insert(check_range);
+ }
+ }
+ mem_info->bound_ranges[handle] = std::move(range);
+ for (auto tmp_range : tmp_alias_ranges) {
+ tmp_range->aliases.insert(&mem_info->bound_ranges[handle]);
+ }
+ if (is_image)
+ mem_info->bound_images.insert(handle);
+ else
+ mem_info->bound_buffers.insert(handle);
+}
+
+bool CoreChecks::ValidateInsertImageMemoryRange(layer_data const *dev_data, VkImage image, DEVICE_MEM_INFO *mem_info,
+ VkDeviceSize mem_offset, VkMemoryRequirements mem_reqs, bool is_linear,
+ const char *api_name) {
+ return ValidateInsertMemoryRange(dev_data, HandleToUint64(image), mem_info, mem_offset, mem_reqs, true, is_linear, api_name);
+}
+void CoreChecks::InsertImageMemoryRange(layer_data const *dev_data, VkImage image, DEVICE_MEM_INFO *mem_info,
+ VkDeviceSize mem_offset, VkMemoryRequirements mem_reqs, bool is_linear) {
+ InsertMemoryRange(dev_data, HandleToUint64(image), mem_info, mem_offset, mem_reqs, true, is_linear);
+}
+
+bool CoreChecks::ValidateInsertBufferMemoryRange(layer_data const *dev_data, VkBuffer buffer, DEVICE_MEM_INFO *mem_info,
+ VkDeviceSize mem_offset, VkMemoryRequirements mem_reqs, const char *api_name) {
+ return ValidateInsertMemoryRange(dev_data, HandleToUint64(buffer), mem_info, mem_offset, mem_reqs, false, true, api_name);
+}
+void CoreChecks::InsertBufferMemoryRange(layer_data const *dev_data, VkBuffer buffer, DEVICE_MEM_INFO *mem_info,
+ VkDeviceSize mem_offset, VkMemoryRequirements mem_reqs) {
+ InsertMemoryRange(dev_data, HandleToUint64(buffer), mem_info, mem_offset, mem_reqs, false, true);
+}
+
+// Remove MEMORY_RANGE struct for give handle from bound_ranges of mem_info
+// is_image indicates if handle is for image or buffer
+// This function will also remove the handle-to-index mapping from the appropriate
+// map and clean up any aliases for range being removed.
+static void RemoveMemoryRange(uint64_t handle, DEVICE_MEM_INFO *mem_info, bool is_image) {
+ auto erase_range = &mem_info->bound_ranges[handle];
+ for (auto alias_range : erase_range->aliases) {
+ alias_range->aliases.erase(erase_range);
+ }
+ erase_range->aliases.clear();
+ mem_info->bound_ranges.erase(handle);
+ if (is_image) {
mem_info->bound_images.erase(handle);
- } else if (object_type == kVulkanObjectTypeBuffer) {
- mem_info->bound_buffers.erase(handle);
- } else if (object_type == kVulkanObjectTypeAccelerationStructureNV) {
- mem_info->bound_acceleration_structures.erase(handle);
} else {
- // Unsupported object type
- assert(false);
+ mem_info->bound_buffers.erase(handle);
}
}
-void ValidationStateTracker::RemoveBufferMemoryRange(uint64_t handle, DEVICE_MEMORY_STATE *mem_info) {
- RemoveMemoryRange(handle, mem_info, kVulkanObjectTypeBuffer);
-}
+void CoreChecks::RemoveBufferMemoryRange(uint64_t handle, DEVICE_MEM_INFO *mem_info) { RemoveMemoryRange(handle, mem_info, false); }
-void ValidationStateTracker::RemoveImageMemoryRange(uint64_t handle, DEVICE_MEMORY_STATE *mem_info) {
- RemoveMemoryRange(handle, mem_info, kVulkanObjectTypeImage);
-}
+void CoreChecks::RemoveImageMemoryRange(uint64_t handle, DEVICE_MEM_INFO *mem_info) { RemoveMemoryRange(handle, mem_info, true); }
-void ValidationStateTracker::RemoveAccelerationStructureMemoryRange(uint64_t handle, DEVICE_MEMORY_STATE *mem_info) {
- RemoveMemoryRange(handle, mem_info, kVulkanObjectTypeAccelerationStructureNV);
-}
-
-bool CoreChecks::ValidateMemoryTypes(const DEVICE_MEMORY_STATE *mem_info, const uint32_t memory_type_bits, const char *funcName,
- const char *msgCode) const {
+bool CoreChecks::ValidateMemoryTypes(const layer_data *dev_data, const DEVICE_MEM_INFO *mem_info, const uint32_t memory_type_bits,
+ const char *funcName, const char *msgCode) {
bool skip = false;
if (((1 << mem_info->alloc_info.memoryTypeIndex) & memory_type_bits) == 0) {
- skip = log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_MEMORY_EXT,
+ skip = log_msg(dev_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_MEMORY_EXT,
HandleToUint64(mem_info->mem), msgCode,
"%s(): MemoryRequirements->memoryTypeBits (0x%X) for this object type are not compatible with the memory "
- "type (0x%X) of %s.",
+ "type (0x%X) of this memory object %s.",
funcName, memory_type_bits, mem_info->alloc_info.memoryTypeIndex,
- report_data->FormatHandle(mem_info->mem).c_str());
+ dev_data->report_data->FormatHandle(mem_info->mem).c_str());
}
return skip;
}
-bool CoreChecks::ValidateBindBufferMemory(VkBuffer buffer, VkDeviceMemory mem, VkDeviceSize memoryOffset,
- const char *api_name) const {
- const BUFFER_STATE *buffer_state = GetBufferState(buffer);
+bool CoreChecks::ValidateBindBufferMemory(layer_data *device_data, VkBuffer buffer, VkDeviceMemory mem, VkDeviceSize memoryOffset,
+ const char *api_name) {
+ BUFFER_STATE *buffer_state = GetBufferState(buffer);
bool skip = false;
if (buffer_state) {
// Track objects tied to memory
uint64_t buffer_handle = HandleToUint64(buffer);
- const VulkanTypedHandle obj_struct(buffer, kVulkanObjectTypeBuffer);
- skip = ValidateSetMemBinding(mem, obj_struct, api_name);
+ skip = ValidateSetMemBinding(device_data, mem, buffer_handle, kVulkanObjectTypeBuffer, api_name);
if (!buffer_state->memory_requirements_checked) {
// There's not an explicit requirement in the spec to call vkGetBufferMemoryRequirements() prior to calling
// BindBufferMemory, but it's implied in that memory being bound must conform with VkMemoryRequirements from
// vkGetBufferMemoryRequirements()
- skip |= log_msg(report_data, VK_DEBUG_REPORT_WARNING_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_BUFFER_EXT, buffer_handle,
- kVUID_Core_BindBuffer_NoMemReqQuery,
- "%s: Binding memory to %s but vkGetBufferMemoryRequirements() has not been called on that buffer.",
- api_name, report_data->FormatHandle(buffer).c_str());
- // In the following we'll use the information we got in CreateBuffer
+ skip |=
+ log_msg(device_data->report_data, VK_DEBUG_REPORT_WARNING_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_BUFFER_EXT,
+ buffer_handle, kVUID_Core_DrawState_InvalidBuffer,
+ "%s: Binding memory to buffer %s but vkGetBufferMemoryRequirements() has not been called on that buffer.",
+ api_name, device_data->report_data->FormatHandle(buffer_handle).c_str());
+ // Make the call for them so we can verify the state
+ device_data->device_dispatch_table.GetBufferMemoryRequirements(device_data->device, buffer,
+ &buffer_state->requirements);
}
// Validate bound memory range information
- const auto mem_info = GetDevMemState(mem);
+ const auto mem_info = GetMemObjInfo(mem);
if (mem_info) {
- skip |= ValidateInsertBufferMemoryRange(buffer, mem_info, memoryOffset, buffer_state->requirements, api_name);
- skip |= ValidateMemoryTypes(mem_info, buffer_state->requirements.memoryTypeBits, api_name,
+ skip |=
+ ValidateInsertBufferMemoryRange(device_data, buffer, mem_info, memoryOffset, buffer_state->requirements, api_name);
+ skip |= ValidateMemoryTypes(device_data, mem_info, buffer_state->requirements.memoryTypeBits, api_name,
"VUID-vkBindBufferMemory-memory-01035");
}
// Validate memory requirements alignment
if (SafeModulo(memoryOffset, buffer_state->requirements.alignment) != 0) {
- skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_BUFFER_EXT, buffer_handle,
- "VUID-vkBindBufferMemory-memoryOffset-01036",
+ skip |= log_msg(device_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_BUFFER_EXT,
+ buffer_handle, "VUID-vkBindBufferMemory-memoryOffset-01036",
"%s: memoryOffset is 0x%" PRIxLEAST64
" but must be an integer multiple of the VkMemoryRequirements::alignment value 0x%" PRIxLEAST64
", returned from a call to vkGetBufferMemoryRequirements with buffer.",
@@ -4412,8 +4390,8 @@ bool CoreChecks::ValidateBindBufferMemory(VkBuffer buffer, VkDeviceMemory mem, V
if (mem_info) {
// Validate memory requirements size
if (buffer_state->requirements.size > (mem_info->alloc_info.allocationSize - memoryOffset)) {
- skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_BUFFER_EXT, buffer_handle,
- "VUID-vkBindBufferMemory-size-01037",
+ skip |= log_msg(device_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_BUFFER_EXT,
+ buffer_handle, "VUID-vkBindBufferMemory-size-01037",
"%s: memory size minus memoryOffset is 0x%" PRIxLEAST64
" but must be at least as large as VkMemoryRequirements::size value 0x%" PRIxLEAST64
", returned from a call to vkGetBufferMemoryRequirements with buffer.",
@@ -4427,13 +4405,13 @@ bool CoreChecks::ValidateBindBufferMemory(VkBuffer buffer, VkDeviceMemory mem, V
if (strcmp(api_name, "vkBindBufferMemory()") == 0) {
validation_error = "VUID-vkBindBufferMemory-memory-01508";
}
- skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_BUFFER_EXT, buffer_handle,
- validation_error,
- "%s: for dedicated %s, VkMemoryDedicatedAllocateInfoKHR::buffer %s must be equal "
- "to %s and memoryOffset 0x%" PRIxLEAST64 " must be zero.",
- api_name, report_data->FormatHandle(mem).c_str(),
- report_data->FormatHandle(mem_info->dedicated_buffer).c_str(),
- report_data->FormatHandle(buffer).c_str(), memoryOffset);
+ skip |= log_msg(device_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_BUFFER_EXT,
+ buffer_handle, validation_error,
+ "%s: for dedicated memory allocation %s, VkMemoryDedicatedAllocateInfoKHR::buffer %s must be equal "
+ "to buffer %s and memoryOffset 0x%" PRIxLEAST64 " must be zero.",
+ api_name, device_data->report_data->FormatHandle(mem).c_str(),
+ device_data->report_data->FormatHandle(mem_info->dedicated_buffer).c_str(),
+ device_data->report_data->FormatHandle(buffer_handle).c_str(), memoryOffset);
}
}
}
@@ -4441,68 +4419,79 @@ bool CoreChecks::ValidateBindBufferMemory(VkBuffer buffer, VkDeviceMemory mem, V
}
bool CoreChecks::PreCallValidateBindBufferMemory(VkDevice device, VkBuffer buffer, VkDeviceMemory mem, VkDeviceSize memoryOffset) {
+ layer_data *device_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
const char *api_name = "vkBindBufferMemory()";
- return ValidateBindBufferMemory(buffer, mem, memoryOffset, api_name);
+ return ValidateBindBufferMemory(device_data, buffer, mem, memoryOffset, api_name);
}
-void ValidationStateTracker::UpdateBindBufferMemoryState(VkBuffer buffer, VkDeviceMemory mem, VkDeviceSize memoryOffset) {
+void CoreChecks::UpdateBindBufferMemoryState(layer_data *device_data, VkBuffer buffer, VkDeviceMemory mem,
+ VkDeviceSize memoryOffset) {
BUFFER_STATE *buffer_state = GetBufferState(buffer);
if (buffer_state) {
// Track bound memory range information
- auto mem_info = GetDevMemState(mem);
+ auto mem_info = GetMemObjInfo(mem);
if (mem_info) {
- InsertBufferMemoryRange(buffer, mem_info, memoryOffset, buffer_state->requirements);
+ InsertBufferMemoryRange(device_data, buffer, mem_info, memoryOffset, buffer_state->requirements);
}
// Track objects tied to memory
- SetMemBinding(mem, buffer_state, memoryOffset, VulkanTypedHandle(buffer, kVulkanObjectTypeBuffer));
+ uint64_t buffer_handle = HandleToUint64(buffer);
+ SetMemBinding(device_data, mem, buffer_state, memoryOffset, buffer_handle, kVulkanObjectTypeBuffer);
}
}
-void ValidationStateTracker::PostCallRecordBindBufferMemory(VkDevice device, VkBuffer buffer, VkDeviceMemory mem,
- VkDeviceSize memoryOffset, VkResult result) {
+void CoreChecks::PostCallRecordBindBufferMemory(VkDevice device, VkBuffer buffer, VkDeviceMemory mem, VkDeviceSize memoryOffset,
+ VkResult result) {
+ layer_data *device_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
if (VK_SUCCESS != result) return;
- UpdateBindBufferMemoryState(buffer, mem, memoryOffset);
+ UpdateBindBufferMemoryState(device_data, buffer, mem, memoryOffset);
}
bool CoreChecks::PreCallValidateBindBufferMemory2(VkDevice device, uint32_t bindInfoCount,
const VkBindBufferMemoryInfoKHR *pBindInfos) {
+ layer_data *device_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
char api_name[64];
bool skip = false;
for (uint32_t i = 0; i < bindInfoCount; i++) {
sprintf(api_name, "vkBindBufferMemory2() pBindInfos[%u]", i);
- skip |= ValidateBindBufferMemory(pBindInfos[i].buffer, pBindInfos[i].memory, pBindInfos[i].memoryOffset, api_name);
+ skip |=
+ ValidateBindBufferMemory(device_data, pBindInfos[i].buffer, pBindInfos[i].memory, pBindInfos[i].memoryOffset, api_name);
}
return skip;
}
bool CoreChecks::PreCallValidateBindBufferMemory2KHR(VkDevice device, uint32_t bindInfoCount,
const VkBindBufferMemoryInfoKHR *pBindInfos) {
+ layer_data *device_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
char api_name[64];
bool skip = false;
for (uint32_t i = 0; i < bindInfoCount; i++) {
sprintf(api_name, "vkBindBufferMemory2KHR() pBindInfos[%u]", i);
- skip |= ValidateBindBufferMemory(pBindInfos[i].buffer, pBindInfos[i].memory, pBindInfos[i].memoryOffset, api_name);
+ skip |=
+ ValidateBindBufferMemory(device_data, pBindInfos[i].buffer, pBindInfos[i].memory, pBindInfos[i].memoryOffset, api_name);
}
return skip;
}
-void ValidationStateTracker::PostCallRecordBindBufferMemory2(VkDevice device, uint32_t bindInfoCount,
- const VkBindBufferMemoryInfoKHR *pBindInfos, VkResult result) {
+void CoreChecks::PostCallRecordBindBufferMemory2(VkDevice device, uint32_t bindInfoCount,
+ const VkBindBufferMemoryInfoKHR *pBindInfos, VkResult result) {
+ layer_data *device_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
for (uint32_t i = 0; i < bindInfoCount; i++) {
- UpdateBindBufferMemoryState(pBindInfos[i].buffer, pBindInfos[i].memory, pBindInfos[i].memoryOffset);
+ UpdateBindBufferMemoryState(device_data, pBindInfos[i].buffer, pBindInfos[i].memory, pBindInfos[i].memoryOffset);
}
}
-void ValidationStateTracker::PostCallRecordBindBufferMemory2KHR(VkDevice device, uint32_t bindInfoCount,
- const VkBindBufferMemoryInfoKHR *pBindInfos, VkResult result) {
+void CoreChecks::PostCallRecordBindBufferMemory2KHR(VkDevice device, uint32_t bindInfoCount,
+ const VkBindBufferMemoryInfoKHR *pBindInfos, VkResult result) {
+ layer_data *device_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
for (uint32_t i = 0; i < bindInfoCount; i++) {
- UpdateBindBufferMemoryState(pBindInfos[i].buffer, pBindInfos[i].memory, pBindInfos[i].memoryOffset);
+ UpdateBindBufferMemoryState(device_data, pBindInfos[i].buffer, pBindInfos[i].memory, pBindInfos[i].memoryOffset);
}
}
-void ValidationStateTracker::RecordGetBufferMemoryRequirementsState(VkBuffer buffer, VkMemoryRequirements *pMemoryRequirements) {
+void CoreChecks::RecordGetBufferMemoryRequirementsState(layer_data *device_data, VkBuffer buffer,
+ VkMemoryRequirements *pMemoryRequirements) {
BUFFER_STATE *buffer_state = GetBufferState(buffer);
if (buffer_state) {
buffer_state->requirements = *pMemoryRequirements;
@@ -4510,42 +4499,46 @@ void ValidationStateTracker::RecordGetBufferMemoryRequirementsState(VkBuffer buf
}
}
-void ValidationStateTracker::PostCallRecordGetBufferMemoryRequirements(VkDevice device, VkBuffer buffer,
- VkMemoryRequirements *pMemoryRequirements) {
- RecordGetBufferMemoryRequirementsState(buffer, pMemoryRequirements);
+void CoreChecks::PostCallRecordGetBufferMemoryRequirements(VkDevice device, VkBuffer buffer,
+ VkMemoryRequirements *pMemoryRequirements) {
+ layer_data *device_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
+ RecordGetBufferMemoryRequirementsState(device_data, buffer, pMemoryRequirements);
}
-void ValidationStateTracker::PostCallRecordGetBufferMemoryRequirements2(VkDevice device,
- const VkBufferMemoryRequirementsInfo2KHR *pInfo,
- VkMemoryRequirements2KHR *pMemoryRequirements) {
- RecordGetBufferMemoryRequirementsState(pInfo->buffer, &pMemoryRequirements->memoryRequirements);
+void CoreChecks::PostCallRecordGetBufferMemoryRequirements2(VkDevice device, const VkBufferMemoryRequirementsInfo2KHR *pInfo,
+ VkMemoryRequirements2KHR *pMemoryRequirements) {
+ layer_data *device_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
+ RecordGetBufferMemoryRequirementsState(device_data, pInfo->buffer, &pMemoryRequirements->memoryRequirements);
}
-void ValidationStateTracker::PostCallRecordGetBufferMemoryRequirements2KHR(VkDevice device,
- const VkBufferMemoryRequirementsInfo2KHR *pInfo,
- VkMemoryRequirements2KHR *pMemoryRequirements) {
- RecordGetBufferMemoryRequirementsState(pInfo->buffer, &pMemoryRequirements->memoryRequirements);
+void CoreChecks::PostCallRecordGetBufferMemoryRequirements2KHR(VkDevice device, const VkBufferMemoryRequirementsInfo2KHR *pInfo,
+ VkMemoryRequirements2KHR *pMemoryRequirements) {
+ layer_data *device_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
+ RecordGetBufferMemoryRequirementsState(device_data, pInfo->buffer, &pMemoryRequirements->memoryRequirements);
}
-bool CoreChecks::ValidateGetImageMemoryRequirements2(const VkImageMemoryRequirementsInfo2 *pInfo) const {
+bool CoreChecks::ValidateGetImageMemoryRequirements2(layer_data *dev_data, const VkImageMemoryRequirementsInfo2 *pInfo) {
bool skip = false;
- if (device_extensions.vk_android_external_memory_android_hardware_buffer) {
- skip |= ValidateGetImageMemoryRequirements2ANDROID(pInfo->image);
+ if (GetDeviceExtensions()->vk_android_external_memory_android_hardware_buffer) {
+ skip |= ValidateGetImageMemoryRequirements2ANDROID(dev_data, pInfo->image);
}
return skip;
}
bool CoreChecks::PreCallValidateGetImageMemoryRequirements2(VkDevice device, const VkImageMemoryRequirementsInfo2 *pInfo,
VkMemoryRequirements2 *pMemoryRequirements) {
- return ValidateGetImageMemoryRequirements2(pInfo);
+ layer_data *device_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
+ return ValidateGetImageMemoryRequirements2(device_data, pInfo);
}
bool CoreChecks::PreCallValidateGetImageMemoryRequirements2KHR(VkDevice device, const VkImageMemoryRequirementsInfo2 *pInfo,
VkMemoryRequirements2 *pMemoryRequirements) {
- return ValidateGetImageMemoryRequirements2(pInfo);
+ layer_data *device_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
+ return ValidateGetImageMemoryRequirements2(device_data, pInfo);
}
-void ValidationStateTracker::RecordGetImageMemoryRequiementsState(VkImage image, VkMemoryRequirements *pMemoryRequirements) {
+void CoreChecks::RecordGetImageMemoryRequiementsState(layer_data *device_data, VkImage image,
+ VkMemoryRequirements *pMemoryRequirements) {
IMAGE_STATE *image_state = GetImageState(image);
if (image_state) {
image_state->requirements = *pMemoryRequirements;
@@ -4553,20 +4546,22 @@ void ValidationStateTracker::RecordGetImageMemoryRequiementsState(VkImage image,
}
}
-void ValidationStateTracker::PostCallRecordGetImageMemoryRequirements(VkDevice device, VkImage image,
- VkMemoryRequirements *pMemoryRequirements) {
- RecordGetImageMemoryRequiementsState(image, pMemoryRequirements);
+void CoreChecks::PostCallRecordGetImageMemoryRequirements(VkDevice device, VkImage image,
+ VkMemoryRequirements *pMemoryRequirements) {
+ layer_data *device_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
+ RecordGetImageMemoryRequiementsState(device_data, image, pMemoryRequirements);
}
-void ValidationStateTracker::PostCallRecordGetImageMemoryRequirements2(VkDevice device, const VkImageMemoryRequirementsInfo2 *pInfo,
- VkMemoryRequirements2 *pMemoryRequirements) {
- RecordGetImageMemoryRequiementsState(pInfo->image, &pMemoryRequirements->memoryRequirements);
+void CoreChecks::PostCallRecordGetImageMemoryRequirements2(VkDevice device, const VkImageMemoryRequirementsInfo2 *pInfo,
+ VkMemoryRequirements2 *pMemoryRequirements) {
+ layer_data *device_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
+ RecordGetImageMemoryRequiementsState(device_data, pInfo->image, &pMemoryRequirements->memoryRequirements);
}
-void ValidationStateTracker::PostCallRecordGetImageMemoryRequirements2KHR(VkDevice device,
- const VkImageMemoryRequirementsInfo2 *pInfo,
- VkMemoryRequirements2 *pMemoryRequirements) {
- RecordGetImageMemoryRequiementsState(pInfo->image, &pMemoryRequirements->memoryRequirements);
+void CoreChecks::PostCallRecordGetImageMemoryRequirements2KHR(VkDevice device, const VkImageMemoryRequirementsInfo2 *pInfo,
+ VkMemoryRequirements2 *pMemoryRequirements) {
+ layer_data *device_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
+ RecordGetImageMemoryRequiementsState(device_data, pInfo->image, &pMemoryRequirements->memoryRequirements);
}
static void RecordGetImageSparseMemoryRequirementsState(IMAGE_STATE *image_state,
@@ -4577,9 +4572,9 @@ static void RecordGetImageSparseMemoryRequirementsState(IMAGE_STATE *image_state
}
}
-void ValidationStateTracker::PostCallRecordGetImageSparseMemoryRequirements(
- VkDevice device, VkImage image, uint32_t *pSparseMemoryRequirementCount,
- VkSparseImageMemoryRequirements *pSparseMemoryRequirements) {
+void CoreChecks::PostCallRecordGetImageSparseMemoryRequirements(VkDevice device, VkImage image,
+ uint32_t *pSparseMemoryRequirementCount,
+ VkSparseImageMemoryRequirements *pSparseMemoryRequirements) {
auto image_state = GetImageState(image);
image_state->get_sparse_reqs_called = true;
if (!pSparseMemoryRequirements) return;
@@ -4588,9 +4583,10 @@ void ValidationStateTracker::PostCallRecordGetImageSparseMemoryRequirements(
}
}
-void ValidationStateTracker::PostCallRecordGetImageSparseMemoryRequirements2(
- VkDevice device, const VkImageSparseMemoryRequirementsInfo2KHR *pInfo, uint32_t *pSparseMemoryRequirementCount,
- VkSparseImageMemoryRequirements2KHR *pSparseMemoryRequirements) {
+void CoreChecks::PostCallRecordGetImageSparseMemoryRequirements2(VkDevice device,
+ const VkImageSparseMemoryRequirementsInfo2KHR *pInfo,
+ uint32_t *pSparseMemoryRequirementCount,
+ VkSparseImageMemoryRequirements2KHR *pSparseMemoryRequirements) {
auto image_state = GetImageState(pInfo->image);
image_state->get_sparse_reqs_called = true;
if (!pSparseMemoryRequirements) return;
@@ -4600,7 +4596,7 @@ void ValidationStateTracker::PostCallRecordGetImageSparseMemoryRequirements2(
}
}
-void ValidationStateTracker::PostCallRecordGetImageSparseMemoryRequirements2KHR(
+void CoreChecks::PostCallRecordGetImageSparseMemoryRequirements2KHR(
VkDevice device, const VkImageSparseMemoryRequirementsInfo2KHR *pInfo, uint32_t *pSparseMemoryRequirementCount,
VkSparseImageMemoryRequirements2KHR *pSparseMemoryRequirements) {
auto image_state = GetImageState(pInfo->image);
@@ -4615,116 +4611,128 @@ void ValidationStateTracker::PostCallRecordGetImageSparseMemoryRequirements2KHR(
bool CoreChecks::PreCallValidateGetPhysicalDeviceImageFormatProperties2(VkPhysicalDevice physicalDevice,
const VkPhysicalDeviceImageFormatInfo2 *pImageFormatInfo,
VkImageFormatProperties2 *pImageFormatProperties) {
+ instance_layer_data *instance_data = GetLayerDataPtr(get_dispatch_key(physicalDevice), instance_layer_data_map);
// Can't wrap AHB-specific validation in a device extension check here, but no harm
- bool skip = ValidateGetPhysicalDeviceImageFormatProperties2ANDROID(report_data, pImageFormatInfo, pImageFormatProperties);
+ bool skip = ValidateGetPhysicalDeviceImageFormatProperties2ANDROID(instance_data->report_data, pImageFormatInfo,
+ pImageFormatProperties);
return skip;
}
bool CoreChecks::PreCallValidateGetPhysicalDeviceImageFormatProperties2KHR(VkPhysicalDevice physicalDevice,
const VkPhysicalDeviceImageFormatInfo2 *pImageFormatInfo,
VkImageFormatProperties2 *pImageFormatProperties) {
+ instance_layer_data *instance_data = GetLayerDataPtr(get_dispatch_key(physicalDevice), instance_layer_data_map);
// Can't wrap AHB-specific validation in a device extension check here, but no harm
- bool skip = ValidateGetPhysicalDeviceImageFormatProperties2ANDROID(report_data, pImageFormatInfo, pImageFormatProperties);
+ bool skip = ValidateGetPhysicalDeviceImageFormatProperties2ANDROID(instance_data->report_data, pImageFormatInfo,
+ pImageFormatProperties);
return skip;
}
-void ValidationStateTracker::PreCallRecordDestroyShaderModule(VkDevice device, VkShaderModule shaderModule,
- const VkAllocationCallbacks *pAllocator) {
+void CoreChecks::PreCallRecordDestroyShaderModule(VkDevice device, VkShaderModule shaderModule,
+ const VkAllocationCallbacks *pAllocator) {
+ layer_data *device_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
if (!shaderModule) return;
- shaderModuleMap.erase(shaderModule);
+ device_data->shaderModuleMap.erase(shaderModule);
}
bool CoreChecks::PreCallValidateDestroyPipeline(VkDevice device, VkPipeline pipeline, const VkAllocationCallbacks *pAllocator) {
+ layer_data *device_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
PIPELINE_STATE *pipeline_state = GetPipelineState(pipeline);
- const VulkanTypedHandle obj_struct(pipeline, kVulkanObjectTypePipeline);
+ VK_OBJECT obj_struct = {HandleToUint64(pipeline), kVulkanObjectTypePipeline};
+ if (device_data->instance_data->disabled.destroy_pipeline) return false;
bool skip = false;
if (pipeline_state) {
- skip |= ValidateObjectNotInUse(pipeline_state, obj_struct, "vkDestroyPipeline", "VUID-vkDestroyPipeline-pipeline-00765");
+ skip |= ValidateObjectNotInUse(device_data, pipeline_state, obj_struct, "vkDestroyPipeline",
+ "VUID-vkDestroyPipeline-pipeline-00765");
}
return skip;
}
-void ValidationStateTracker::PreCallRecordDestroyPipeline(VkDevice device, VkPipeline pipeline,
- const VkAllocationCallbacks *pAllocator) {
+void CoreChecks::PreCallRecordDestroyPipeline(VkDevice device, VkPipeline pipeline, const VkAllocationCallbacks *pAllocator) {
+ layer_data *device_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
if (!pipeline) return;
PIPELINE_STATE *pipeline_state = GetPipelineState(pipeline);
- const VulkanTypedHandle obj_struct(pipeline, kVulkanObjectTypePipeline);
+ VK_OBJECT obj_struct = {HandleToUint64(pipeline), kVulkanObjectTypePipeline};
// Any bound cmd buffers are now invalid
- InvalidateCommandBuffers(pipeline_state->cb_bindings, obj_struct);
- pipelineMap.erase(pipeline);
-}
-
-void CoreChecks::PreCallRecordDestroyPipeline(VkDevice device, VkPipeline pipeline, const VkAllocationCallbacks *pAllocator) {
- if (pipeline && enabled.gpu_validation) {
- GpuPreCallRecordDestroyPipeline(pipeline);
+ InvalidateCommandBuffers(device_data, pipeline_state->cb_bindings, obj_struct);
+ if (GetEnables()->gpu_validation) {
+ GpuPreCallRecordDestroyPipeline(device_data, pipeline);
}
-
- StateTracker::PreCallRecordDestroyPipeline(device, pipeline, pAllocator);
+ device_data->pipelineMap.erase(pipeline);
}
-void ValidationStateTracker::PreCallRecordDestroyPipelineLayout(VkDevice device, VkPipelineLayout pipelineLayout,
- const VkAllocationCallbacks *pAllocator) {
+void CoreChecks::PreCallRecordDestroyPipelineLayout(VkDevice device, VkPipelineLayout pipelineLayout,
+ const VkAllocationCallbacks *pAllocator) {
+ layer_data *device_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
if (!pipelineLayout) return;
- pipelineLayoutMap.erase(pipelineLayout);
+ device_data->pipelineLayoutMap.erase(pipelineLayout);
}
bool CoreChecks::PreCallValidateDestroySampler(VkDevice device, VkSampler sampler, const VkAllocationCallbacks *pAllocator) {
- const SAMPLER_STATE *sampler_state = GetSamplerState(sampler);
- const VulkanTypedHandle obj_struct(sampler, kVulkanObjectTypeSampler);
+ layer_data *device_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
+ SAMPLER_STATE *sampler_state = GetSamplerState(sampler);
+ VK_OBJECT obj_struct = {HandleToUint64(sampler), kVulkanObjectTypeSampler};
+ if (device_data->instance_data->disabled.destroy_sampler) return false;
bool skip = false;
if (sampler_state) {
- skip |= ValidateObjectNotInUse(sampler_state, obj_struct, "vkDestroySampler", "VUID-vkDestroySampler-sampler-01082");
+ skip |= ValidateObjectNotInUse(device_data, sampler_state, obj_struct, "vkDestroySampler",
+ "VUID-vkDestroySampler-sampler-01082");
}
return skip;
}
-void ValidationStateTracker ::PreCallRecordDestroySampler(VkDevice device, VkSampler sampler,
- const VkAllocationCallbacks *pAllocator) {
+void CoreChecks::PreCallRecordDestroySampler(VkDevice device, VkSampler sampler, const VkAllocationCallbacks *pAllocator) {
+ layer_data *device_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
if (!sampler) return;
SAMPLER_STATE *sampler_state = GetSamplerState(sampler);
- const VulkanTypedHandle obj_struct(sampler, kVulkanObjectTypeSampler);
+ VK_OBJECT obj_struct = {HandleToUint64(sampler), kVulkanObjectTypeSampler};
// Any bound cmd buffers are now invalid
if (sampler_state) {
- InvalidateCommandBuffers(sampler_state->cb_bindings, obj_struct);
+ InvalidateCommandBuffers(device_data, sampler_state->cb_bindings, obj_struct);
}
- samplerMap.erase(sampler);
+ device_data->samplerMap.erase(sampler);
}
-void ValidationStateTracker::PreCallRecordDestroyDescriptorSetLayout(VkDevice device, VkDescriptorSetLayout descriptorSetLayout,
- const VkAllocationCallbacks *pAllocator) {
+void CoreChecks::PreCallRecordDestroyDescriptorSetLayout(VkDevice device, VkDescriptorSetLayout descriptorSetLayout,
+ const VkAllocationCallbacks *pAllocator) {
+ layer_data *device_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
if (!descriptorSetLayout) return;
- auto layout_it = descriptorSetLayoutMap.find(descriptorSetLayout);
- if (layout_it != descriptorSetLayoutMap.end()) {
+ auto layout_it = device_data->descriptorSetLayoutMap.find(descriptorSetLayout);
+ if (layout_it != device_data->descriptorSetLayoutMap.end()) {
layout_it->second.get()->MarkDestroyed();
- descriptorSetLayoutMap.erase(layout_it);
+ device_data->descriptorSetLayoutMap.erase(layout_it);
}
}
bool CoreChecks::PreCallValidateDestroyDescriptorPool(VkDevice device, VkDescriptorPool descriptorPool,
const VkAllocationCallbacks *pAllocator) {
+ layer_data *device_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
DESCRIPTOR_POOL_STATE *desc_pool_state = GetDescriptorPoolState(descriptorPool);
- const VulkanTypedHandle obj_struct(descriptorPool, kVulkanObjectTypeDescriptorPool);
+ VK_OBJECT obj_struct = {HandleToUint64(descriptorPool), kVulkanObjectTypeDescriptorPool};
+ if (device_data->instance_data->disabled.destroy_descriptor_pool) return false;
bool skip = false;
if (desc_pool_state) {
- skip |= ValidateObjectNotInUse(desc_pool_state, obj_struct, "vkDestroyDescriptorPool",
+ skip |= ValidateObjectNotInUse(device_data, desc_pool_state, obj_struct, "vkDestroyDescriptorPool",
"VUID-vkDestroyDescriptorPool-descriptorPool-00303");
}
return skip;
}
-void ValidationStateTracker::PreCallRecordDestroyDescriptorPool(VkDevice device, VkDescriptorPool descriptorPool,
- const VkAllocationCallbacks *pAllocator) {
+void CoreChecks::PreCallRecordDestroyDescriptorPool(VkDevice device, VkDescriptorPool descriptorPool,
+ const VkAllocationCallbacks *pAllocator) {
+ layer_data *device_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
if (!descriptorPool) return;
DESCRIPTOR_POOL_STATE *desc_pool_state = GetDescriptorPoolState(descriptorPool);
- const VulkanTypedHandle obj_struct(descriptorPool, kVulkanObjectTypeDescriptorPool);
+ VK_OBJECT obj_struct = {HandleToUint64(descriptorPool), kVulkanObjectTypeDescriptorPool};
if (desc_pool_state) {
// Any bound cmd buffers are now invalid
- InvalidateCommandBuffers(desc_pool_state->cb_bindings, obj_struct);
+ InvalidateCommandBuffers(device_data, desc_pool_state->cb_bindings, obj_struct);
// Free sets that were in this pool
for (auto ds : desc_pool_state->sets) {
- FreeDescriptorSet(ds);
+ FreeDescriptorSet(device_data, ds);
}
- descriptorPoolMap.erase(descriptorPool);
+ device_data->descriptorPoolMap.erase(descriptorPool);
+ delete desc_pool_state;
}
}
@@ -4732,87 +4740,92 @@ void ValidationStateTracker::PreCallRecordDestroyDescriptorPool(VkDevice device,
// If this is a secondary command buffer, then make sure its primary is also in-flight
// If primary is not in-flight, then remove secondary from global in-flight set
// This function is only valid at a point when cmdBuffer is being reset or freed
-bool CoreChecks::CheckCommandBufferInFlight(const CMD_BUFFER_STATE *cb_node, const char *action, const char *error_code) const {
+bool CoreChecks::CheckCommandBufferInFlight(layer_data *dev_data, const GLOBAL_CB_NODE *cb_node, const char *action,
+ const char *error_code) {
bool skip = false;
if (cb_node->in_use.load()) {
- skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
- HandleToUint64(cb_node->commandBuffer), error_code, "Attempt to %s %s which is in use.", action,
- report_data->FormatHandle(cb_node->commandBuffer).c_str());
+ skip |= log_msg(dev_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
+ HandleToUint64(cb_node->commandBuffer), error_code, "Attempt to %s command buffer (%s) which is in use.",
+ action, dev_data->report_data->FormatHandle(cb_node->commandBuffer).c_str());
}
return skip;
}
// Iterate over all cmdBuffers in given commandPool and verify that each is not in use
-bool CoreChecks::CheckCommandBuffersInFlight(const COMMAND_POOL_STATE *pPool, const char *action, const char *error_code) const {
+bool CoreChecks::CheckCommandBuffersInFlight(layer_data *dev_data, COMMAND_POOL_NODE *pPool, const char *action,
+ const char *error_code) {
bool skip = false;
for (auto cmd_buffer : pPool->commandBuffers) {
- skip |= CheckCommandBufferInFlight(GetCBState(cmd_buffer), action, error_code);
+ skip |= CheckCommandBufferInFlight(dev_data, GetCBNode(cmd_buffer), action, error_code);
}
return skip;
}
// Free all command buffers in given list, removing all references/links to them using ResetCommandBufferState
-void ValidationStateTracker::FreeCommandBufferStates(COMMAND_POOL_STATE *pool_state, const uint32_t command_buffer_count,
- const VkCommandBuffer *command_buffers) {
+void CoreChecks::FreeCommandBufferStates(layer_data *dev_data, COMMAND_POOL_NODE *pool_state, const uint32_t command_buffer_count,
+ const VkCommandBuffer *command_buffers) {
+ if (GetEnables()->gpu_validation) {
+ GpuPreCallRecordFreeCommandBuffers(dev_data, command_buffer_count, command_buffers);
+ }
for (uint32_t i = 0; i < command_buffer_count; i++) {
- auto cb_state = GetCBState(command_buffers[i]);
+ auto cb_state = GetCBNode(command_buffers[i]);
// Remove references to command buffer's state and delete
if (cb_state) {
// reset prior to delete, removing various references to it.
// TODO: fix this, it's insane.
- ResetCommandBufferState(cb_state->commandBuffer);
- // Remove the cb_state's references from COMMAND_POOL_STATEs
+ ResetCommandBufferState(dev_data, cb_state->commandBuffer);
+ // Remove the cb_state's references from layer_data and COMMAND_POOL_NODE
+ dev_data->commandBufferMap.erase(cb_state->commandBuffer);
pool_state->commandBuffers.erase(command_buffers[i]);
- // Remove the cb debug labels
- EraseCmdDebugUtilsLabel(report_data, cb_state->commandBuffer);
- // Remove CBState from CB map
- commandBufferMap.erase(cb_state->commandBuffer);
+ delete cb_state;
}
}
}
bool CoreChecks::PreCallValidateFreeCommandBuffers(VkDevice device, VkCommandPool commandPool, uint32_t commandBufferCount,
const VkCommandBuffer *pCommandBuffers) {
+ layer_data *device_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
bool skip = false;
for (uint32_t i = 0; i < commandBufferCount; i++) {
- const auto *cb_node = GetCBState(pCommandBuffers[i]);
+ auto cb_node = GetCBNode(pCommandBuffers[i]);
// Delete CB information structure, and remove from commandBufferMap
if (cb_node) {
- skip |= CheckCommandBufferInFlight(cb_node, "free", "VUID-vkFreeCommandBuffers-pCommandBuffers-00047");
+ skip |= CheckCommandBufferInFlight(device_data, cb_node, "free", "VUID-vkFreeCommandBuffers-pCommandBuffers-00047");
}
}
return skip;
}
-void ValidationStateTracker::PreCallRecordFreeCommandBuffers(VkDevice device, VkCommandPool commandPool,
- uint32_t commandBufferCount, const VkCommandBuffer *pCommandBuffers) {
- auto pPool = GetCommandPoolState(commandPool);
- FreeCommandBufferStates(pPool, commandBufferCount, pCommandBuffers);
+void CoreChecks::PreCallRecordFreeCommandBuffers(VkDevice device, VkCommandPool commandPool, uint32_t commandBufferCount,
+ const VkCommandBuffer *pCommandBuffers) {
+ layer_data *device_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
+ auto pPool = GetCommandPoolNode(commandPool);
+ FreeCommandBufferStates(device_data, pPool, commandBufferCount, pCommandBuffers);
}
bool CoreChecks::PreCallValidateCreateCommandPool(VkDevice device, const VkCommandPoolCreateInfo *pCreateInfo,
const VkAllocationCallbacks *pAllocator, VkCommandPool *pCommandPool) {
- return ValidateDeviceQueueFamily(pCreateInfo->queueFamilyIndex, "vkCreateCommandPool", "pCreateInfo->queueFamilyIndex",
- "VUID-vkCreateCommandPool-queueFamilyIndex-01937");
+ layer_data *device_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
+ return ValidateDeviceQueueFamily(device_data, pCreateInfo->queueFamilyIndex, "vkCreateCommandPool",
+ "pCreateInfo->queueFamilyIndex", "VUID-vkCreateCommandPool-queueFamilyIndex-01937");
}
-void ValidationStateTracker::PostCallRecordCreateCommandPool(VkDevice device, const VkCommandPoolCreateInfo *pCreateInfo,
- const VkAllocationCallbacks *pAllocator, VkCommandPool *pCommandPool,
- VkResult result) {
+void CoreChecks::PostCallRecordCreateCommandPool(VkDevice device, const VkCommandPoolCreateInfo *pCreateInfo,
+ const VkAllocationCallbacks *pAllocator, VkCommandPool *pCommandPool,
+ VkResult result) {
+ layer_data *device_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
if (VK_SUCCESS != result) return;
- std::unique_ptr<COMMAND_POOL_STATE> cmd_pool_state(new COMMAND_POOL_STATE{});
- cmd_pool_state->createFlags = pCreateInfo->flags;
- cmd_pool_state->queueFamilyIndex = pCreateInfo->queueFamilyIndex;
- commandPoolMap[*pCommandPool] = std::move(cmd_pool_state);
+ device_data->commandPoolMap[*pCommandPool].createFlags = pCreateInfo->flags;
+ device_data->commandPoolMap[*pCommandPool].queueFamilyIndex = pCreateInfo->queueFamilyIndex;
}
bool CoreChecks::PreCallValidateCreateQueryPool(VkDevice device, const VkQueryPoolCreateInfo *pCreateInfo,
const VkAllocationCallbacks *pAllocator, VkQueryPool *pQueryPool) {
- if (disabled.query_validation) return false;
+ layer_data *device_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
bool skip = false;
if (pCreateInfo && pCreateInfo->queryType == VK_QUERY_TYPE_PIPELINE_STATISTICS) {
- if (!enabled_features.core.pipelineStatisticsQuery) {
- skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_QUERY_POOL_EXT, 0,
+ if (!device_data->enabled_features.core.pipelineStatisticsQuery) {
+ skip |= log_msg(device_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_QUERY_POOL_EXT, 0,
"VUID-VkQueryPoolCreateInfo-queryType-00791",
"Query pool with type VK_QUERY_TYPE_PIPELINE_STATISTICS created on a device with "
"VkDeviceCreateInfo.pEnabledFeatures.pipelineStatisticsQuery == VK_FALSE.");
@@ -4821,69 +4834,71 @@ bool CoreChecks::PreCallValidateCreateQueryPool(VkDevice device, const VkQueryPo
return skip;
}
-void ValidationStateTracker::PostCallRecordCreateQueryPool(VkDevice device, const VkQueryPoolCreateInfo *pCreateInfo,
- const VkAllocationCallbacks *pAllocator, VkQueryPool *pQueryPool,
- VkResult result) {
+void CoreChecks::PostCallRecordCreateQueryPool(VkDevice device, const VkQueryPoolCreateInfo *pCreateInfo,
+ const VkAllocationCallbacks *pAllocator, VkQueryPool *pQueryPool, VkResult result) {
+ layer_data *device_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
if (VK_SUCCESS != result) return;
- std::unique_ptr<QUERY_POOL_STATE> query_pool_state(new QUERY_POOL_STATE{});
- query_pool_state->createInfo = *pCreateInfo;
- queryPoolMap[*pQueryPool] = std::move(query_pool_state);
-
- QueryObject query_obj{*pQueryPool, 0u};
- for (uint32_t i = 0; i < pCreateInfo->queryCount; ++i) {
- query_obj.query = i;
- queryToStateMap[query_obj] = QUERYSTATE_UNKNOWN;
- }
+ QUERY_POOL_NODE *qp_node = &device_data->queryPoolMap[*pQueryPool];
+ qp_node->createInfo = *pCreateInfo;
}
bool CoreChecks::PreCallValidateDestroyCommandPool(VkDevice device, VkCommandPool commandPool,
const VkAllocationCallbacks *pAllocator) {
- const COMMAND_POOL_STATE *cp_state = GetCommandPoolState(commandPool);
+ layer_data *device_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
+
+ COMMAND_POOL_NODE *cp_state = GetCommandPoolNode(commandPool);
+ if (device_data->instance_data->disabled.destroy_command_pool) return false;
bool skip = false;
if (cp_state) {
// Verify that command buffers in pool are complete (not in-flight)
- skip |= CheckCommandBuffersInFlight(cp_state, "destroy command pool with", "VUID-vkDestroyCommandPool-commandPool-00041");
+ skip |= CheckCommandBuffersInFlight(device_data, cp_state, "destroy command pool with",
+ "VUID-vkDestroyCommandPool-commandPool-00041");
}
return skip;
}
-void ValidationStateTracker::PreCallRecordDestroyCommandPool(VkDevice device, VkCommandPool commandPool,
- const VkAllocationCallbacks *pAllocator) {
+void CoreChecks::PreCallRecordDestroyCommandPool(VkDevice device, VkCommandPool commandPool,
+ const VkAllocationCallbacks *pAllocator) {
+ layer_data *device_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
if (!commandPool) return;
- COMMAND_POOL_STATE *cp_state = GetCommandPoolState(commandPool);
+ COMMAND_POOL_NODE *cp_state = GetCommandPoolNode(commandPool);
// Remove cmdpool from cmdpoolmap, after freeing layer data for the command buffers
// "When a pool is destroyed, all command buffers allocated from the pool are freed."
if (cp_state) {
// Create a vector, as FreeCommandBufferStates deletes from cp_state->commandBuffers during iteration.
std::vector<VkCommandBuffer> cb_vec{cp_state->commandBuffers.begin(), cp_state->commandBuffers.end()};
- FreeCommandBufferStates(cp_state, static_cast<uint32_t>(cb_vec.size()), cb_vec.data());
- commandPoolMap.erase(commandPool);
+ FreeCommandBufferStates(device_data, cp_state, static_cast<uint32_t>(cb_vec.size()), cb_vec.data());
+ device_data->commandPoolMap.erase(commandPool);
}
}
bool CoreChecks::PreCallValidateResetCommandPool(VkDevice device, VkCommandPool commandPool, VkCommandPoolResetFlags flags) {
- const auto *command_pool_state = GetCommandPoolState(commandPool);
- return CheckCommandBuffersInFlight(command_pool_state, "reset command pool with", "VUID-vkResetCommandPool-commandPool-00040");
+ layer_data *device_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
+ auto command_pool_state = GetCommandPoolNode(commandPool);
+ return CheckCommandBuffersInFlight(device_data, command_pool_state, "reset command pool with",
+ "VUID-vkResetCommandPool-commandPool-00040");
}
-void ValidationStateTracker::PostCallRecordResetCommandPool(VkDevice device, VkCommandPool commandPool,
- VkCommandPoolResetFlags flags, VkResult result) {
+void CoreChecks::PostCallRecordResetCommandPool(VkDevice device, VkCommandPool commandPool, VkCommandPoolResetFlags flags,
+ VkResult result) {
+ layer_data *device_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
if (VK_SUCCESS != result) return;
// Reset all of the CBs allocated from this pool
- auto command_pool_state = GetCommandPoolState(commandPool);
+ auto command_pool_state = GetCommandPoolNode(commandPool);
for (auto cmdBuffer : command_pool_state->commandBuffers) {
- ResetCommandBufferState(cmdBuffer);
+ ResetCommandBufferState(device_data, cmdBuffer);
}
}
bool CoreChecks::PreCallValidateResetFences(VkDevice device, uint32_t fenceCount, const VkFence *pFences) {
+ layer_data *device_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
bool skip = false;
for (uint32_t i = 0; i < fenceCount; ++i) {
- auto pFence = GetFenceState(pFences[i]);
+ auto pFence = GetFenceNode(pFences[i]);
if (pFence && pFence->scope == kSyncScopeInternal && pFence->state == FENCE_INFLIGHT) {
- skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_FENCE_EXT,
- HandleToUint64(pFences[i]), "VUID-vkResetFences-pFences-01123", "%s is in use.",
- report_data->FormatHandle(pFences[i]).c_str());
+ skip |= log_msg(device_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_FENCE_EXT,
+ HandleToUint64(pFences[i]), "VUID-vkResetFences-pFences-01123", "Fence %s is in use.",
+ device_data->report_data->FormatHandle(pFences[i]).c_str());
}
}
return skip;
@@ -4891,7 +4906,7 @@ bool CoreChecks::PreCallValidateResetFences(VkDevice device, uint32_t fenceCount
void CoreChecks::PostCallRecordResetFences(VkDevice device, uint32_t fenceCount, const VkFence *pFences, VkResult result) {
for (uint32_t i = 0; i < fenceCount; ++i) {
- auto pFence = GetFenceState(pFences[i]);
+ auto pFence = GetFenceNode(pFences[i]);
if (pFence) {
if (pFence->scope == kSyncScopeInternal) {
pFence->state = FENCE_UNSIGNALED;
@@ -4903,10 +4918,14 @@ void CoreChecks::PostCallRecordResetFences(VkDevice device, uint32_t fenceCount,
}
// For given cb_nodes, invalidate them and track object causing invalidation
-void ValidationStateTracker::InvalidateCommandBuffers(std::unordered_set<CMD_BUFFER_STATE *> const &cb_nodes,
- const VulkanTypedHandle &obj) {
+void CoreChecks::InvalidateCommandBuffers(const layer_data *dev_data, std::unordered_set<GLOBAL_CB_NODE *> const &cb_nodes,
+ VK_OBJECT obj) {
for (auto cb_node : cb_nodes) {
if (cb_node->state == CB_RECORDING) {
+ log_msg(dev_data->report_data, VK_DEBUG_REPORT_WARNING_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
+ HandleToUint64(cb_node->commandBuffer), kVUID_Core_DrawState_InvalidCommandBuffer,
+ "Invalidating a command buffer that's currently being recorded: %s.",
+ dev_data->report_data->FormatHandle(cb_node->commandBuffer).c_str());
cb_node->state = CB_INVALID_INCOMPLETE;
} else if (cb_node->state == CB_RECORDED) {
cb_node->state = CB_INVALID_COMPLETE;
@@ -4915,79 +4934,121 @@ void ValidationStateTracker::InvalidateCommandBuffers(std::unordered_set<CMD_BUF
// if secondary, then propagate the invalidation to the primaries that will call us.
if (cb_node->createInfo.level == VK_COMMAND_BUFFER_LEVEL_SECONDARY) {
- InvalidateCommandBuffers(cb_node->linkedCommandBuffers, obj);
+ InvalidateCommandBuffers(dev_data, cb_node->linkedCommandBuffers, obj);
}
}
}
bool CoreChecks::PreCallValidateDestroyFramebuffer(VkDevice device, VkFramebuffer framebuffer,
const VkAllocationCallbacks *pAllocator) {
- const FRAMEBUFFER_STATE *framebuffer_state = GetFramebufferState(framebuffer);
- const VulkanTypedHandle obj_struct(framebuffer, kVulkanObjectTypeFramebuffer);
+ layer_data *device_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
+ FRAMEBUFFER_STATE *framebuffer_state = GetFramebufferState(framebuffer);
+ VK_OBJECT obj_struct = {HandleToUint64(framebuffer), kVulkanObjectTypeFramebuffer};
bool skip = false;
if (framebuffer_state) {
- skip |= ValidateObjectNotInUse(framebuffer_state, obj_struct, "vkDestroyFramebuffer",
+ skip |= ValidateObjectNotInUse(device_data, framebuffer_state, obj_struct, "vkDestroyFramebuffer",
"VUID-vkDestroyFramebuffer-framebuffer-00892");
}
return skip;
}
-void ValidationStateTracker::PreCallRecordDestroyFramebuffer(VkDevice device, VkFramebuffer framebuffer,
- const VkAllocationCallbacks *pAllocator) {
+void CoreChecks::PreCallRecordDestroyFramebuffer(VkDevice device, VkFramebuffer framebuffer,
+ const VkAllocationCallbacks *pAllocator) {
+ layer_data *device_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
if (!framebuffer) return;
FRAMEBUFFER_STATE *framebuffer_state = GetFramebufferState(framebuffer);
- const VulkanTypedHandle obj_struct(framebuffer, kVulkanObjectTypeFramebuffer);
- InvalidateCommandBuffers(framebuffer_state->cb_bindings, obj_struct);
- frameBufferMap.erase(framebuffer);
+ VK_OBJECT obj_struct = {HandleToUint64(framebuffer), kVulkanObjectTypeFramebuffer};
+ InvalidateCommandBuffers(device_data, framebuffer_state->cb_bindings, obj_struct);
+ device_data->frameBufferMap.erase(framebuffer);
}
bool CoreChecks::PreCallValidateDestroyRenderPass(VkDevice device, VkRenderPass renderPass,
const VkAllocationCallbacks *pAllocator) {
- const RENDER_PASS_STATE *rp_state = GetRenderPassState(renderPass);
- const VulkanTypedHandle obj_struct(renderPass, kVulkanObjectTypeRenderPass);
+ layer_data *device_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
+ RENDER_PASS_STATE *rp_state = GetRenderPassState(renderPass);
+ VK_OBJECT obj_struct = {HandleToUint64(renderPass), kVulkanObjectTypeRenderPass};
bool skip = false;
if (rp_state) {
- skip |= ValidateObjectNotInUse(rp_state, obj_struct, "vkDestroyRenderPass", "VUID-vkDestroyRenderPass-renderPass-00873");
+ skip |= ValidateObjectNotInUse(device_data, rp_state, obj_struct, "vkDestroyRenderPass",
+ "VUID-vkDestroyRenderPass-renderPass-00873");
}
return skip;
}
-void ValidationStateTracker::PreCallRecordDestroyRenderPass(VkDevice device, VkRenderPass renderPass,
- const VkAllocationCallbacks *pAllocator) {
+void CoreChecks::PreCallRecordDestroyRenderPass(VkDevice device, VkRenderPass renderPass, const VkAllocationCallbacks *pAllocator) {
+ layer_data *device_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
if (!renderPass) return;
RENDER_PASS_STATE *rp_state = GetRenderPassState(renderPass);
- const VulkanTypedHandle obj_struct(renderPass, kVulkanObjectTypeRenderPass);
- InvalidateCommandBuffers(rp_state->cb_bindings, obj_struct);
- renderPassMap.erase(renderPass);
+ VK_OBJECT obj_struct = {HandleToUint64(renderPass), kVulkanObjectTypeRenderPass};
+ InvalidateCommandBuffers(device_data, rp_state->cb_bindings, obj_struct);
+ device_data->renderPassMap.erase(renderPass);
}
// Access helper functions for external modules
-VkFormatProperties CoreChecks::GetPDFormatProperties(const VkFormat format) const {
+VkFormatProperties CoreChecks::GetPDFormatProperties(const VkFormat format) {
VkFormatProperties format_properties;
- DispatchGetPhysicalDeviceFormatProperties(physical_device, format, &format_properties);
+ instance_dispatch_table.GetPhysicalDeviceFormatProperties(physical_device, format, &format_properties);
return format_properties;
}
VkResult CoreChecks::GetPDImageFormatProperties(const VkImageCreateInfo *image_ci,
VkImageFormatProperties *pImageFormatProperties) {
- return DispatchGetPhysicalDeviceImageFormatProperties(physical_device, image_ci->format, image_ci->imageType, image_ci->tiling,
- image_ci->usage, image_ci->flags, pImageFormatProperties);
+ return instance_dispatch_table.GetPhysicalDeviceImageFormatProperties(physical_device, image_ci->format, image_ci->imageType,
+ image_ci->tiling, image_ci->usage, image_ci->flags,
+ pImageFormatProperties);
}
VkResult CoreChecks::GetPDImageFormatProperties2(const VkPhysicalDeviceImageFormatInfo2 *phys_dev_image_fmt_info,
- VkImageFormatProperties2 *pImageFormatProperties) const {
+ VkImageFormatProperties2 *pImageFormatProperties) {
if (!instance_extensions.vk_khr_get_physical_device_properties_2) return VK_ERROR_EXTENSION_NOT_PRESENT;
- return DispatchGetPhysicalDeviceImageFormatProperties2(physical_device, phys_dev_image_fmt_info, pImageFormatProperties);
+ return instance_dispatch_table.GetPhysicalDeviceImageFormatProperties2(physical_device, phys_dev_image_fmt_info,
+ pImageFormatProperties);
}
-void ValidationStateTracker::PostCallRecordCreateFence(VkDevice device, const VkFenceCreateInfo *pCreateInfo,
- const VkAllocationCallbacks *pAllocator, VkFence *pFence, VkResult result) {
+const debug_report_data *CoreChecks::GetReportData() { return report_data; }
+
+const VkLayerDispatchTable *CoreChecks::GetDispatchTable() { return &device_dispatch_table; }
+
+const VkPhysicalDeviceProperties *CoreChecks::GetPDProperties() { return &phys_dev_props; }
+
+const VkPhysicalDeviceMemoryProperties *CoreChecks::GetPhysicalDeviceMemoryProperties() { return &phys_dev_mem_props; }
+
+const CHECK_DISABLED *CoreChecks::GetDisables() { return &instance_state->disabled; }
+
+const CHECK_ENABLED *CoreChecks::GetEnables() { return &instance_state->enabled; }
+
+std::unordered_map<VkImage, std::unique_ptr<IMAGE_STATE>> *CoreChecks::GetImageMap() { return &imageMap; }
+
+std::unordered_map<VkImage, std::vector<ImageSubresourcePair>> *CoreChecks::GetImageSubresourceMap() {
+ return &imageSubresourceMap;
+}
+
+std::unordered_map<ImageSubresourcePair, IMAGE_LAYOUT_NODE> *CoreChecks::GetImageLayoutMap() { return &imageLayoutMap; }
+
+std::unordered_map<VkBuffer, std::unique_ptr<BUFFER_STATE>> *CoreChecks::GetBufferMap() { return &bufferMap; }
+
+std::unordered_map<VkBufferView, std::unique_ptr<BUFFER_VIEW_STATE>> *CoreChecks::GetBufferViewMap() { return &bufferViewMap; }
+
+std::unordered_map<VkImageView, std::unique_ptr<IMAGE_VIEW_STATE>> *CoreChecks::GetImageViewMap() { return &imageViewMap; }
+
+const DeviceFeatures *CoreChecks::GetEnabledFeatures() { return &enabled_features; }
+
+const DeviceExtensions *CoreChecks::GetDeviceExtensions() { return &device_extensions; }
+
+GpuValidationState *CoreChecks::GetGpuValidationState() { return &gpu_validation_state; }
+
+VkDevice CoreChecks::GetDevice() { return device; }
+
+uint32_t CoreChecks::GetApiVersion() { return api_version; }
+
+void CoreChecks::PostCallRecordCreateFence(VkDevice device, const VkFenceCreateInfo *pCreateInfo,
+ const VkAllocationCallbacks *pAllocator, VkFence *pFence, VkResult result) {
+ layer_data *device_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
if (VK_SUCCESS != result) return;
- std::unique_ptr<FENCE_STATE> fence_state(new FENCE_STATE{});
- fence_state->fence = *pFence;
- fence_state->createInfo = *pCreateInfo;
- fence_state->state = (pCreateInfo->flags & VK_FENCE_CREATE_SIGNALED_BIT) ? FENCE_RETIRED : FENCE_UNSIGNALED;
- fenceMap[*pFence] = std::move(fence_state);
+ auto &fence_node = device_data->fenceMap[*pFence];
+ fence_node.fence = *pFence;
+ fence_node.createInfo = *pCreateInfo;
+ fence_node.state = (pCreateInfo->flags & VK_FENCE_CREATE_SIGNALED_BIT) ? FENCE_RETIRED : FENCE_UNSIGNALED;
}
// Validation cache:
@@ -5013,10 +5074,11 @@ void SetPipelineState(PIPELINE_STATE *pPipe) {
}
}
-bool CoreChecks::ValidatePipelineVertexDivisors(std::vector<std::unique_ptr<PIPELINE_STATE>> const &pipe_state_vec,
- const uint32_t count, const VkGraphicsPipelineCreateInfo *pipe_cis) const {
+bool CoreChecks::ValidatePipelineVertexDivisors(layer_data *dev_data,
+ std::vector<std::unique_ptr<PIPELINE_STATE>> const &pipe_state_vec,
+ const uint32_t count, const VkGraphicsPipelineCreateInfo *pipe_cis) {
bool skip = false;
- const VkPhysicalDeviceLimits *device_limits = &phys_dev_props.limits;
+ const VkPhysicalDeviceLimits *device_limits = &(GetPDProperties()->limits);
for (uint32_t i = 0; i < count; i++) {
auto pvids_ci = lvl_find_in_chain<VkPipelineVertexInputDivisorStateCreateInfoEXT>(pipe_cis[i].pVertexInputState->pNext);
@@ -5027,32 +5089,36 @@ bool CoreChecks::ValidatePipelineVertexDivisors(std::vector<std::unique_ptr<PIPE
const VkVertexInputBindingDivisorDescriptionEXT *vibdd = &(pvids_ci->pVertexBindingDivisors[j]);
if (vibdd->binding >= device_limits->maxVertexInputBindings) {
skip |= log_msg(
- report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, HandleToUint64(device),
- "VUID-VkVertexInputBindingDivisorDescriptionEXT-binding-01869",
+ dev_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_EXT,
+ HandleToUint64(pipe_state->pipeline), "VUID-VkVertexInputBindingDivisorDescriptionEXT-binding-01869",
"vkCreateGraphicsPipelines(): Pipeline[%1u] with chained VkPipelineVertexInputDivisorStateCreateInfoEXT, "
"pVertexBindingDivisors[%1u] binding index of (%1u) exceeds device maxVertexInputBindings (%1u).",
i, j, vibdd->binding, device_limits->maxVertexInputBindings);
}
- if (vibdd->divisor > phys_dev_ext_props.vtx_attrib_divisor_props.maxVertexAttribDivisor) {
+ if (vibdd->divisor > dev_data->phys_dev_ext_props.vtx_attrib_divisor_props.maxVertexAttribDivisor) {
skip |= log_msg(
- report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, HandleToUint64(device),
- "VUID-VkVertexInputBindingDivisorDescriptionEXT-divisor-01870",
+ dev_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_EXT,
+ HandleToUint64(pipe_state->pipeline), "VUID-VkVertexInputBindingDivisorDescriptionEXT-divisor-01870",
"vkCreateGraphicsPipelines(): Pipeline[%1u] with chained VkPipelineVertexInputDivisorStateCreateInfoEXT, "
"pVertexBindingDivisors[%1u] divisor of (%1u) exceeds extension maxVertexAttribDivisor (%1u).",
- i, j, vibdd->divisor, phys_dev_ext_props.vtx_attrib_divisor_props.maxVertexAttribDivisor);
+ i, j, vibdd->divisor, dev_data->phys_dev_ext_props.vtx_attrib_divisor_props.maxVertexAttribDivisor);
}
- if ((0 == vibdd->divisor) && !enabled_features.vtx_attrib_divisor_features.vertexAttributeInstanceRateZeroDivisor) {
+ if ((0 == vibdd->divisor) &&
+ !dev_data->enabled_features.vtx_attrib_divisor_features.vertexAttributeInstanceRateZeroDivisor) {
skip |= log_msg(
- report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, HandleToUint64(device),
+ dev_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_EXT,
+ HandleToUint64(pipe_state->pipeline),
"VUID-VkVertexInputBindingDivisorDescriptionEXT-vertexAttributeInstanceRateZeroDivisor-02228",
"vkCreateGraphicsPipelines(): Pipeline[%1u] with chained VkPipelineVertexInputDivisorStateCreateInfoEXT, "
"pVertexBindingDivisors[%1u] divisor must not be 0 when vertexAttributeInstanceRateZeroDivisor feature is not "
"enabled.",
i, j);
}
- if ((1 != vibdd->divisor) && !enabled_features.vtx_attrib_divisor_features.vertexAttributeInstanceRateDivisor) {
+ if ((1 != vibdd->divisor) &&
+ !dev_data->enabled_features.vtx_attrib_divisor_features.vertexAttributeInstanceRateDivisor) {
skip |= log_msg(
- report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, HandleToUint64(device),
+ dev_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_EXT,
+ HandleToUint64(pipe_state->pipeline),
"VUID-VkVertexInputBindingDivisorDescriptionEXT-vertexAttributeInstanceRateDivisor-02229",
"vkCreateGraphicsPipelines(): Pipeline[%1u] with chained VkPipelineVertexInputDivisorStateCreateInfoEXT, "
"pVertexBindingDivisors[%1u] divisor (%1u) must be 1 when vertexAttributeInstanceRateDivisor feature is not "
@@ -5071,8 +5137,8 @@ bool CoreChecks::ValidatePipelineVertexDivisors(std::vector<std::unique_ptr<PIPE
}
if (failed_01871) { // Description not found, or has incorrect inputRate value
skip |= log_msg(
- report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, HandleToUint64(device),
- "VUID-VkVertexInputBindingDivisorDescriptionEXT-inputRate-01871",
+ dev_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_EXT,
+ HandleToUint64(pipe_state->pipeline), "VUID-VkVertexInputBindingDivisorDescriptionEXT-inputRate-01871",
"vkCreateGraphicsPipelines(): Pipeline[%1u] with chained VkPipelineVertexInputDivisorStateCreateInfoEXT, "
"pVertexBindingDivisors[%1u] specifies binding index (%1u), but that binding index's "
"VkVertexInputBindingDescription.inputRate member is not VK_VERTEX_INPUT_RATE_INSTANCE.",
@@ -5083,41 +5149,32 @@ bool CoreChecks::ValidatePipelineVertexDivisors(std::vector<std::unique_ptr<PIPE
return skip;
}
-bool ValidationStateTracker::PreCallValidateCreateGraphicsPipelines(VkDevice device, VkPipelineCache pipelineCache, uint32_t count,
- const VkGraphicsPipelineCreateInfo *pCreateInfos,
- const VkAllocationCallbacks *pAllocator, VkPipeline *pPipelines,
- void *cgpl_state_data) {
- // Set up the state that CoreChecks, gpu_validation and later StateTracker Record will use.
+bool CoreChecks::PreCallValidateCreateGraphicsPipelines(VkDevice device, VkPipelineCache pipelineCache, uint32_t count,
+ const VkGraphicsPipelineCreateInfo *pCreateInfos,
+ const VkAllocationCallbacks *pAllocator, VkPipeline *pPipelines,
+ void *cgpl_state_data) {
+ layer_data *device_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
+
+ bool skip = false;
create_graphics_pipeline_api_state *cgpl_state = reinterpret_cast<create_graphics_pipeline_api_state *>(cgpl_state_data);
- cgpl_state->pCreateInfos = pCreateInfos; // GPU validation can alter this, so we have to set a default value for the Chassis
cgpl_state->pipe_state.reserve(count);
for (uint32_t i = 0; i < count; i++) {
cgpl_state->pipe_state.push_back(std::unique_ptr<PIPELINE_STATE>(new PIPELINE_STATE));
- (cgpl_state->pipe_state)[i]->initGraphicsPipeline(this, &pCreateInfos[i],
+ (cgpl_state->pipe_state)[i]->initGraphicsPipeline(&pCreateInfos[i],
GetRenderPassStateSharedPtr(pCreateInfos[i].renderPass));
- (cgpl_state->pipe_state)[i]->pipeline_layout = *GetPipelineLayout(pCreateInfos[i].layout);
+ (cgpl_state->pipe_state)[i]->pipeline_layout = *GetPipelineLayout(device_data, pCreateInfos[i].layout);
}
- return false;
-}
-
-bool CoreChecks::PreCallValidateCreateGraphicsPipelines(VkDevice device, VkPipelineCache pipelineCache, uint32_t count,
- const VkGraphicsPipelineCreateInfo *pCreateInfos,
- const VkAllocationCallbacks *pAllocator, VkPipeline *pPipelines,
- void *cgpl_state_data) {
- bool skip = StateTracker::PreCallValidateCreateGraphicsPipelines(device, pipelineCache, count, pCreateInfos, pAllocator,
- pPipelines, cgpl_state_data);
- create_graphics_pipeline_api_state *cgpl_state = reinterpret_cast<create_graphics_pipeline_api_state *>(cgpl_state_data);
for (uint32_t i = 0; i < count; i++) {
- skip |= ValidatePipelineLocked(cgpl_state->pipe_state, i);
+ skip |= ValidatePipelineLocked(device_data, cgpl_state->pipe_state, i);
}
for (uint32_t i = 0; i < count; i++) {
- skip |= ValidatePipelineUnlocked(cgpl_state->pipe_state[i].get(), i);
+ skip |= ValidatePipelineUnlocked(device_data, cgpl_state->pipe_state, i);
}
- if (device_extensions.vk_ext_vertex_attribute_divisor) {
- skip |= ValidatePipelineVertexDivisors(cgpl_state->pipe_state, count, pCreateInfos);
+ if (device_data->device_extensions.vk_ext_vertex_attribute_divisor) {
+ skip |= ValidatePipelineVertexDivisors(device_data, cgpl_state->pipe_state, count, pCreateInfos);
}
return skip;
@@ -5128,296 +5185,152 @@ void CoreChecks::PreCallRecordCreateGraphicsPipelines(VkDevice device, VkPipelin
const VkGraphicsPipelineCreateInfo *pCreateInfos,
const VkAllocationCallbacks *pAllocator, VkPipeline *pPipelines,
void *cgpl_state_data) {
+ layer_data *device_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
+ create_graphics_pipeline_api_state *cgpl_state = reinterpret_cast<create_graphics_pipeline_api_state *>(cgpl_state_data);
+ cgpl_state->pCreateInfos = pCreateInfos;
// GPU Validation may replace instrumented shaders with non-instrumented ones, so allow it to modify the createinfos.
- if (enabled.gpu_validation) {
- create_graphics_pipeline_api_state *cgpl_state = reinterpret_cast<create_graphics_pipeline_api_state *>(cgpl_state_data);
- cgpl_state->gpu_create_infos = GpuPreCallRecordCreateGraphicsPipelines(pipelineCache, count, pCreateInfos, pAllocator,
- pPipelines, cgpl_state->pipe_state);
+ if (GetEnables()->gpu_validation) {
+ cgpl_state->gpu_create_infos = GpuPreCallRecordCreateGraphicsPipelines(device_data, pipelineCache, count, pCreateInfos,
+ pAllocator, pPipelines, cgpl_state->pipe_state);
cgpl_state->pCreateInfos = reinterpret_cast<VkGraphicsPipelineCreateInfo *>(cgpl_state->gpu_create_infos.data());
}
}
-void ValidationStateTracker::PostCallRecordCreateGraphicsPipelines(VkDevice device, VkPipelineCache pipelineCache, uint32_t count,
- const VkGraphicsPipelineCreateInfo *pCreateInfos,
- const VkAllocationCallbacks *pAllocator, VkPipeline *pPipelines,
- VkResult result, void *cgpl_state_data) {
+void CoreChecks::PostCallRecordCreateGraphicsPipelines(VkDevice device, VkPipelineCache pipelineCache, uint32_t count,
+ const VkGraphicsPipelineCreateInfo *pCreateInfos,
+ const VkAllocationCallbacks *pAllocator, VkPipeline *pPipelines,
+ VkResult result, void *cgpl_state_data) {
+ layer_data *device_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
create_graphics_pipeline_api_state *cgpl_state = reinterpret_cast<create_graphics_pipeline_api_state *>(cgpl_state_data);
// This API may create pipelines regardless of the return value
for (uint32_t i = 0; i < count; i++) {
if (pPipelines[i] != VK_NULL_HANDLE) {
(cgpl_state->pipe_state)[i]->pipeline = pPipelines[i];
- pipelineMap[pPipelines[i]] = std::move((cgpl_state->pipe_state)[i]);
+ device_data->pipelineMap[pPipelines[i]] = std::move((cgpl_state->pipe_state)[i]);
}
}
- cgpl_state->pipe_state.clear();
-}
-
-void CoreChecks::PostCallRecordCreateGraphicsPipelines(VkDevice device, VkPipelineCache pipelineCache, uint32_t count,
- const VkGraphicsPipelineCreateInfo *pCreateInfos,
- const VkAllocationCallbacks *pAllocator, VkPipeline *pPipelines,
- VkResult result, void *cgpl_state_data) {
- StateTracker::PostCallRecordCreateGraphicsPipelines(device, pipelineCache, count, pCreateInfos, pAllocator, pPipelines, result,
- cgpl_state_data);
// GPU val needs clean up regardless of result
- if (enabled.gpu_validation) {
- create_graphics_pipeline_api_state *cgpl_state = reinterpret_cast<create_graphics_pipeline_api_state *>(cgpl_state_data);
- GpuPostCallRecordCreateGraphicsPipelines(count, pCreateInfos, pAllocator, pPipelines);
+ if (GetEnables()->gpu_validation) {
+ GpuPostCallRecordCreateGraphicsPipelines(device_data, count, pCreateInfos, pAllocator, pPipelines);
cgpl_state->gpu_create_infos.clear();
}
-}
-
-bool ValidationStateTracker::PreCallValidateCreateComputePipelines(VkDevice device, VkPipelineCache pipelineCache, uint32_t count,
- const VkComputePipelineCreateInfo *pCreateInfos,
- const VkAllocationCallbacks *pAllocator, VkPipeline *pPipelines,
- void *ccpl_state_data) {
- auto *ccpl_state = reinterpret_cast<create_compute_pipeline_api_state *>(ccpl_state_data);
- ccpl_state->pCreateInfos = pCreateInfos; // GPU validation can alter this, so we have to set a default value for the Chassis
- ccpl_state->pipe_state.reserve(count);
- for (uint32_t i = 0; i < count; i++) {
- // Create and initialize internal tracking data structure
- ccpl_state->pipe_state.push_back(unique_ptr<PIPELINE_STATE>(new PIPELINE_STATE));
- ccpl_state->pipe_state.back()->initComputePipeline(this, &pCreateInfos[i]);
- ccpl_state->pipe_state.back()->pipeline_layout = *GetPipelineLayout(pCreateInfos[i].layout);
- }
- return false;
+ cgpl_state->pipe_state.clear();
}
bool CoreChecks::PreCallValidateCreateComputePipelines(VkDevice device, VkPipelineCache pipelineCache, uint32_t count,
const VkComputePipelineCreateInfo *pCreateInfos,
const VkAllocationCallbacks *pAllocator, VkPipeline *pPipelines,
- void *ccpl_state_data) {
- bool skip = StateTracker::PreCallValidateCreateComputePipelines(device, pipelineCache, count, pCreateInfos, pAllocator,
- pPipelines, ccpl_state_data);
-
- auto *ccpl_state = reinterpret_cast<create_compute_pipeline_api_state *>(ccpl_state_data);
+ void *pipe_state_data) {
+ layer_data *device_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
+ bool skip = false;
+ std::vector<std::unique_ptr<PIPELINE_STATE>> *pipe_state =
+ reinterpret_cast<std::vector<std::unique_ptr<PIPELINE_STATE>> *>(pipe_state_data);
+ pipe_state->reserve(count);
for (uint32_t i = 0; i < count; i++) {
+ // Create and initialize internal tracking data structure
+ pipe_state->push_back(unique_ptr<PIPELINE_STATE>(new PIPELINE_STATE));
+ (*pipe_state)[i]->initComputePipeline(&pCreateInfos[i]);
+ (*pipe_state)[i]->pipeline_layout = *GetPipelineLayout(device_data, pCreateInfos[i].layout);
+
// TODO: Add Compute Pipeline Verification
- skip |= ValidateComputePipeline(ccpl_state->pipe_state.back().get());
+ skip |= ValidateComputePipeline(device_data, (*pipe_state)[i].get());
}
return skip;
}
-void CoreChecks::PreCallRecordCreateComputePipelines(VkDevice device, VkPipelineCache pipelineCache, uint32_t count,
- const VkComputePipelineCreateInfo *pCreateInfos,
- const VkAllocationCallbacks *pAllocator, VkPipeline *pPipelines,
- void *ccpl_state_data) {
- // GPU Validation may replace instrumented shaders with non-instrumented ones, so allow it to modify the createinfos.
- if (enabled.gpu_validation) {
- auto *ccpl_state = reinterpret_cast<create_compute_pipeline_api_state *>(ccpl_state_data);
- ccpl_state->gpu_create_infos = GpuPreCallRecordCreateComputePipelines(pipelineCache, count, pCreateInfos, pAllocator,
- pPipelines, ccpl_state->pipe_state);
- ccpl_state->pCreateInfos = reinterpret_cast<VkComputePipelineCreateInfo *>(ccpl_state->gpu_create_infos.data());
- }
-}
-
-void ValidationStateTracker::PostCallRecordCreateComputePipelines(VkDevice device, VkPipelineCache pipelineCache, uint32_t count,
- const VkComputePipelineCreateInfo *pCreateInfos,
- const VkAllocationCallbacks *pAllocator, VkPipeline *pPipelines,
- VkResult result, void *ccpl_state_data) {
- create_compute_pipeline_api_state *ccpl_state = reinterpret_cast<create_compute_pipeline_api_state *>(ccpl_state_data);
-
- // This API may create pipelines regardless of the return value
- for (uint32_t i = 0; i < count; i++) {
- if (pPipelines[i] != VK_NULL_HANDLE) {
- (ccpl_state->pipe_state)[i]->pipeline = pPipelines[i];
- pipelineMap[pPipelines[i]] = std::move((ccpl_state->pipe_state)[i]);
- }
- }
- ccpl_state->pipe_state.clear();
-}
-
void CoreChecks::PostCallRecordCreateComputePipelines(VkDevice device, VkPipelineCache pipelineCache, uint32_t count,
const VkComputePipelineCreateInfo *pCreateInfos,
const VkAllocationCallbacks *pAllocator, VkPipeline *pPipelines,
- VkResult result, void *ccpl_state_data) {
- StateTracker::PostCallRecordCreateComputePipelines(device, pipelineCache, count, pCreateInfos, pAllocator, pPipelines, result,
- ccpl_state_data);
+ VkResult result, void *pipe_state_data) {
+ layer_data *device_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
+ std::vector<std::unique_ptr<PIPELINE_STATE>> *pipe_state =
+ reinterpret_cast<std::vector<std::unique_ptr<PIPELINE_STATE>> *>(pipe_state_data);
- // GPU val needs clean up regardless of result
- if (enabled.gpu_validation) {
- create_compute_pipeline_api_state *ccpl_state = reinterpret_cast<create_compute_pipeline_api_state *>(ccpl_state_data);
- GpuPostCallRecordCreateComputePipelines(count, pCreateInfos, pAllocator, pPipelines);
- ccpl_state->gpu_create_infos.clear();
- }
-}
-
-bool ValidationStateTracker::PreCallValidateCreateRayTracingPipelinesNV(VkDevice device, VkPipelineCache pipelineCache,
- uint32_t count,
- const VkRayTracingPipelineCreateInfoNV *pCreateInfos,
- const VkAllocationCallbacks *pAllocator,
- VkPipeline *pPipelines, void *crtpl_state_data) {
- auto *crtpl_state = reinterpret_cast<create_ray_tracing_pipeline_api_state *>(crtpl_state_data);
- crtpl_state->pipe_state.reserve(count);
+ // This API may create pipelines regardless of the return value
for (uint32_t i = 0; i < count; i++) {
- // Create and initialize internal tracking data structure
- crtpl_state->pipe_state.push_back(unique_ptr<PIPELINE_STATE>(new PIPELINE_STATE));
- crtpl_state->pipe_state.back()->initRayTracingPipelineNV(this, &pCreateInfos[i]);
- crtpl_state->pipe_state.back()->pipeline_layout = *GetPipelineLayout(pCreateInfos[i].layout);
+ if (pPipelines[i] != VK_NULL_HANDLE) {
+ (*pipe_state)[i]->pipeline = pPipelines[i];
+ device_data->pipelineMap[pPipelines[i]] = std::move((*pipe_state)[i]);
+ }
}
- return false;
}
bool CoreChecks::PreCallValidateCreateRayTracingPipelinesNV(VkDevice device, VkPipelineCache pipelineCache, uint32_t count,
const VkRayTracingPipelineCreateInfoNV *pCreateInfos,
const VkAllocationCallbacks *pAllocator, VkPipeline *pPipelines,
- void *crtpl_state_data) {
- bool skip = StateTracker::PreCallValidateCreateRayTracingPipelinesNV(device, pipelineCache, count, pCreateInfos, pAllocator,
- pPipelines, crtpl_state_data);
-
- auto *crtpl_state = reinterpret_cast<create_ray_tracing_pipeline_api_state *>(crtpl_state_data);
- for (uint32_t i = 0; i < count; i++) {
- skip |= ValidateRayTracingPipelineNV(crtpl_state->pipe_state[i].get());
+ void *pipe_state_data) {
+ layer_data *device_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
+ bool skip = false;
+ // The order of operations here is a little convoluted but gets the job done
+ // 1. Pipeline create state is first shadowed into PIPELINE_STATE struct
+ // 2. Create state is then validated (which uses flags setup during shadowing)
+ // 3. If everything looks good, we'll then create the pipeline and add NODE to pipelineMap
+ uint32_t i = 0;
+ vector<std::unique_ptr<PIPELINE_STATE>> *pipe_state =
+ reinterpret_cast<vector<std::unique_ptr<PIPELINE_STATE>> *>(pipe_state_data);
+ pipe_state->reserve(count);
+ for (i = 0; i < count; i++) {
+ pipe_state->push_back(std::unique_ptr<PIPELINE_STATE>(new PIPELINE_STATE));
+ (*pipe_state)[i]->initRayTracingPipelineNV(&pCreateInfos[i]);
+ (*pipe_state)[i]->pipeline_layout = *GetPipelineLayout(device_data, pCreateInfos[i].layout);
}
- return skip;
-}
-void CoreChecks::PreCallRecordCreateRayTracingPipelinesNV(VkDevice device, VkPipelineCache pipelineCache, uint32_t count,
- const VkRayTracingPipelineCreateInfoNV *pCreateInfos,
- const VkAllocationCallbacks *pAllocator, VkPipeline *pPipelines,
- void *crtpl_state_data) {
- // GPU Validation may replace instrumented shaders with non-instrumented ones, so allow it to modify the createinfos.
- if (enabled.gpu_validation) {
- auto *crtpl_state = reinterpret_cast<create_ray_tracing_pipeline_api_state *>(crtpl_state_data);
- crtpl_state->gpu_create_infos = GpuPreCallRecordCreateRayTracingPipelinesNV(pipelineCache, count, pCreateInfos, pAllocator,
- pPipelines, crtpl_state->pipe_state);
- crtpl_state->pCreateInfos = reinterpret_cast<VkRayTracingPipelineCreateInfoNV *>(crtpl_state->gpu_create_infos.data());
+ for (i = 0; i < count; i++) {
+ skip |= ValidateRayTracingPipelineNV(device_data, (*pipe_state)[i].get());
}
-}
-void ValidationStateTracker::PostCallRecordCreateRayTracingPipelinesNV(
- VkDevice device, VkPipelineCache pipelineCache, uint32_t count, const VkRayTracingPipelineCreateInfoNV *pCreateInfos,
- const VkAllocationCallbacks *pAllocator, VkPipeline *pPipelines, VkResult result, void *crtpl_state_data) {
- auto *crtpl_state = reinterpret_cast<create_ray_tracing_pipeline_api_state *>(crtpl_state_data);
- // This API may create pipelines regardless of the return value
- for (uint32_t i = 0; i < count; i++) {
- if (pPipelines[i] != VK_NULL_HANDLE) {
- (crtpl_state->pipe_state)[i]->pipeline = pPipelines[i];
- pipelineMap[pPipelines[i]] = std::move((crtpl_state->pipe_state)[i]);
- }
- }
- crtpl_state->pipe_state.clear();
+ return skip;
}
void CoreChecks::PostCallRecordCreateRayTracingPipelinesNV(VkDevice device, VkPipelineCache pipelineCache, uint32_t count,
const VkRayTracingPipelineCreateInfoNV *pCreateInfos,
const VkAllocationCallbacks *pAllocator, VkPipeline *pPipelines,
- VkResult result, void *crtpl_state_data) {
- StateTracker::PostCallRecordCreateRayTracingPipelinesNV(device, pipelineCache, count, pCreateInfos, pAllocator, pPipelines,
- result, crtpl_state_data);
- // GPU val needs clean up regardless of result
- if (enabled.gpu_validation) {
- auto *crtpl_state = reinterpret_cast<create_ray_tracing_pipeline_api_state *>(crtpl_state_data);
- GpuPostCallRecordCreateRayTracingPipelinesNV(count, pCreateInfos, pAllocator, pPipelines);
- crtpl_state->gpu_create_infos.clear();
- }
-}
-
-bool CoreChecks::PreCallValidateGetPipelineExecutablePropertiesKHR(VkDevice device, const VkPipelineInfoKHR *pPipelineInfo,
- uint32_t *pExecutableCount,
- VkPipelineExecutablePropertiesKHR *pProperties) {
- bool skip = false;
-
- if (!enabled_features.pipeline_exe_props_features.pipelineExecutableInfo) {
- skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, HandleToUint64(device),
- "VUID-vkGetPipelineExecutablePropertiesKHR-pipelineExecutableProperties-03270",
- "vkGetPipelineExecutablePropertiesKHR called when pipelineExecutableInfo feature is not enabled.");
- }
-
- return skip;
-}
-
-bool CoreChecks::ValidatePipelineExecutableInfo(VkDevice device, const VkPipelineExecutableInfoKHR *pExecutableInfo) const {
- bool skip = false;
-
- if (!enabled_features.pipeline_exe_props_features.pipelineExecutableInfo) {
- skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, HandleToUint64(device),
- "VUID-vkGetPipelineExecutableStatisticsKHR-pipelineExecutableInfo-03272",
- "vkGetPipelineExecutableStatisticsKHR called when pipelineExecutableInfo feature is not enabled.");
- }
-
- VkPipelineInfoKHR pi = {};
- pi.sType = VK_STRUCTURE_TYPE_PIPELINE_INFO_KHR;
- pi.pipeline = pExecutableInfo->pipeline;
-
- // We could probably cache this instead of fetching it every time
- uint32_t executableCount = 0;
- DispatchGetPipelineExecutablePropertiesKHR(device, &pi, &executableCount, NULL);
-
- if (pExecutableInfo->executableIndex >= executableCount) {
- skip |=
- log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_EXT,
- HandleToUint64(pExecutableInfo->pipeline), "VUID-VkPipelineExecutableInfoKHR-executableIndex-03275",
- "VkPipelineExecutableInfo::executableIndex (%1u) must be less than the number of executables associated with "
- "the pipeline (%1u) as returned by vkGetPipelineExecutablePropertiessKHR",
- pExecutableInfo->executableIndex, executableCount);
- }
-
- return skip;
-}
-
-bool CoreChecks::PreCallValidateGetPipelineExecutableStatisticsKHR(VkDevice device,
- const VkPipelineExecutableInfoKHR *pExecutableInfo,
- uint32_t *pStatisticCount,
- VkPipelineExecutableStatisticKHR *pStatistics) {
- bool skip = ValidatePipelineExecutableInfo(device, pExecutableInfo);
-
- const PIPELINE_STATE *pipeline_state = GetPipelineState(pExecutableInfo->pipeline);
- if (!(pipeline_state->getPipelineCreateFlags() & VK_PIPELINE_CREATE_CAPTURE_STATISTICS_BIT_KHR)) {
- skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_EXT,
- HandleToUint64(pExecutableInfo->pipeline), "VUID-vkGetPipelineExecutableStatisticsKHR-pipeline-03274",
- "vkGetPipelineExecutableStatisticsKHR called on a pipeline created without the "
- "VK_PIPELINE_CREATE_CAPTURE_STATISTICS_BIT_KHR flag set");
+ VkResult result, void *pipe_state_data) {
+ layer_data *device_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
+ vector<std::unique_ptr<PIPELINE_STATE>> *pipe_state =
+ reinterpret_cast<vector<std::unique_ptr<PIPELINE_STATE>> *>(pipe_state_data);
+ // This API may create pipelines regardless of the return value
+ for (uint32_t i = 0; i < count; i++) {
+ if (pPipelines[i] != VK_NULL_HANDLE) {
+ (*pipe_state)[i]->pipeline = pPipelines[i];
+ device_data->pipelineMap[pPipelines[i]] = std::move((*pipe_state)[i]);
+ }
}
-
- return skip;
}
-bool CoreChecks::PreCallValidateGetPipelineExecutableInternalRepresentationsKHR(
- VkDevice device, const VkPipelineExecutableInfoKHR *pExecutableInfo, uint32_t *pInternalRepresentationCount,
- VkPipelineExecutableInternalRepresentationKHR *pStatistics) {
- bool skip = ValidatePipelineExecutableInfo(device, pExecutableInfo);
-
- const PIPELINE_STATE *pipeline_state = GetPipelineState(pExecutableInfo->pipeline);
- if (!(pipeline_state->getPipelineCreateFlags() & VK_PIPELINE_CREATE_CAPTURE_INTERNAL_REPRESENTATIONS_BIT_KHR)) {
- skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_EXT,
- HandleToUint64(pExecutableInfo->pipeline),
- "VUID-vkGetPipelineExecutableInternalRepresentationsKHR-pipeline-03278",
- "vkGetPipelineExecutableInternalRepresentationsKHR called on a pipeline created without the "
- "VK_PIPELINE_CREATE_CAPTURE_INTERNAL_REPRESENTATIONS_BIT_KHR flag set");
- }
-
- return skip;
-}
-
-void ValidationStateTracker::PostCallRecordCreateSampler(VkDevice device, const VkSamplerCreateInfo *pCreateInfo,
- const VkAllocationCallbacks *pAllocator, VkSampler *pSampler,
- VkResult result) {
- samplerMap[*pSampler] = unique_ptr<SAMPLER_STATE>(new SAMPLER_STATE(pSampler, pCreateInfo));
+void CoreChecks::PostCallRecordCreateSampler(VkDevice device, const VkSamplerCreateInfo *pCreateInfo,
+ const VkAllocationCallbacks *pAllocator, VkSampler *pSampler, VkResult result) {
+ layer_data *device_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
+ device_data->samplerMap[*pSampler] = unique_ptr<SAMPLER_STATE>(new SAMPLER_STATE(pSampler, pCreateInfo));
}
bool CoreChecks::PreCallValidateCreateDescriptorSetLayout(VkDevice device, const VkDescriptorSetLayoutCreateInfo *pCreateInfo,
const VkAllocationCallbacks *pAllocator,
VkDescriptorSetLayout *pSetLayout) {
- return cvdescriptorset::ValidateDescriptorSetLayoutCreateInfo(
- report_data, pCreateInfo, device_extensions.vk_khr_push_descriptor, phys_dev_ext_props.max_push_descriptors,
- device_extensions.vk_ext_descriptor_indexing, &enabled_features.descriptor_indexing, &enabled_features.inline_uniform_block,
- &phys_dev_ext_props.inline_uniform_block_props, &device_extensions);
+ layer_data *device_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
+ if (device_data->instance_data->disabled.create_descriptor_set_layout) return false;
+ return cvdescriptorset::DescriptorSetLayout::ValidateCreateInfo(
+ device_data->report_data, pCreateInfo, device_data->device_extensions.vk_khr_push_descriptor,
+ device_data->phys_dev_ext_props.max_push_descriptors, device_data->device_extensions.vk_ext_descriptor_indexing,
+ &device_data->enabled_features.descriptor_indexing, &device_data->enabled_features.inline_uniform_block,
+ &device_data->phys_dev_ext_props.inline_uniform_block_props);
}
-void ValidationStateTracker::PostCallRecordCreateDescriptorSetLayout(VkDevice device,
- const VkDescriptorSetLayoutCreateInfo *pCreateInfo,
- const VkAllocationCallbacks *pAllocator,
- VkDescriptorSetLayout *pSetLayout, VkResult result) {
+void CoreChecks::PostCallRecordCreateDescriptorSetLayout(VkDevice device, const VkDescriptorSetLayoutCreateInfo *pCreateInfo,
+ const VkAllocationCallbacks *pAllocator, VkDescriptorSetLayout *pSetLayout,
+ VkResult result) {
+ layer_data *device_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
if (VK_SUCCESS != result) return;
- descriptorSetLayoutMap[*pSetLayout] = std::make_shared<cvdescriptorset::DescriptorSetLayout>(pCreateInfo, *pSetLayout);
+ device_data->descriptorSetLayoutMap[*pSetLayout] =
+ std::make_shared<cvdescriptorset::DescriptorSetLayout>(pCreateInfo, *pSetLayout);
}
// Used by CreatePipelineLayout and CmdPushConstants.
// Note that the index argument is optional and only used by CreatePipelineLayout.
-bool CoreChecks::ValidatePushConstantRange(const uint32_t offset, const uint32_t size, const char *caller_name,
- uint32_t index = 0) const {
- if (disabled.push_constant_range) return false;
- uint32_t const maxPushConstantsSize = phys_dev_props.limits.maxPushConstantsSize;
+static bool ValidatePushConstantRange(const layer_data *dev_data, const uint32_t offset, const uint32_t size,
+ const char *caller_name, uint32_t index = 0) {
+ if (dev_data->instance_data->disabled.push_constant_range) return false;
+ uint32_t const maxPushConstantsSize = dev_data->phys_dev_props.limits.maxPushConstantsSize;
bool skip = false;
// Check that offset + size don't exceed the max.
// Prevent arithetic overflow here by avoiding addition and testing in this order.
@@ -5426,13 +5339,13 @@ bool CoreChecks::ValidatePushConstantRange(const uint32_t offset, const uint32_t
if (0 == strcmp(caller_name, "vkCreatePipelineLayout()")) {
if (offset >= maxPushConstantsSize) {
skip |= log_msg(
- report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
+ dev_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
"VUID-VkPushConstantRange-offset-00294",
"%s call has push constants index %u with offset %u that exceeds this device's maxPushConstantSize of %u.",
caller_name, index, offset, maxPushConstantsSize);
}
if (size > maxPushConstantsSize - offset) {
- skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
+ skip |= log_msg(dev_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
"VUID-VkPushConstantRange-size-00298",
"%s call has push constants index %u with offset %u and size %u that exceeds this device's "
"maxPushConstantSize of %u.",
@@ -5441,20 +5354,20 @@ bool CoreChecks::ValidatePushConstantRange(const uint32_t offset, const uint32_t
} else if (0 == strcmp(caller_name, "vkCmdPushConstants()")) {
if (offset >= maxPushConstantsSize) {
skip |= log_msg(
- report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
+ dev_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
"VUID-vkCmdPushConstants-offset-00370",
"%s call has push constants index %u with offset %u that exceeds this device's maxPushConstantSize of %u.",
caller_name, index, offset, maxPushConstantsSize);
}
if (size > maxPushConstantsSize - offset) {
- skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
+ skip |= log_msg(dev_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
"VUID-vkCmdPushConstants-size-00371",
"%s call has push constants index %u with offset %u and size %u that exceeds this device's "
"maxPushConstantSize of %u.",
caller_name, index, offset, size, maxPushConstantsSize);
}
} else {
- skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
+ skip |= log_msg(dev_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
kVUID_Core_DrawState_InternalError, "%s caller not supported.", caller_name);
}
}
@@ -5462,48 +5375,48 @@ bool CoreChecks::ValidatePushConstantRange(const uint32_t offset, const uint32_t
if ((size == 0) || ((size & 0x3) != 0)) {
if (0 == strcmp(caller_name, "vkCreatePipelineLayout()")) {
if (size == 0) {
- skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
+ skip |= log_msg(dev_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
"VUID-VkPushConstantRange-size-00296",
"%s call has push constants index %u with size %u. Size must be greater than zero.", caller_name,
index, size);
}
if (size & 0x3) {
- skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
+ skip |= log_msg(dev_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
"VUID-VkPushConstantRange-size-00297",
"%s call has push constants index %u with size %u. Size must be a multiple of 4.", caller_name,
index, size);
}
} else if (0 == strcmp(caller_name, "vkCmdPushConstants()")) {
if (size == 0) {
- skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
+ skip |= log_msg(dev_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
"VUID-vkCmdPushConstants-size-arraylength",
"%s call has push constants index %u with size %u. Size must be greater than zero.", caller_name,
index, size);
}
if (size & 0x3) {
- skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
+ skip |= log_msg(dev_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
"VUID-vkCmdPushConstants-size-00369",
"%s call has push constants index %u with size %u. Size must be a multiple of 4.", caller_name,
index, size);
}
} else {
- skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
+ skip |= log_msg(dev_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
kVUID_Core_DrawState_InternalError, "%s caller not supported.", caller_name);
}
}
// offset needs to be a multiple of 4.
if ((offset & 0x3) != 0) {
if (0 == strcmp(caller_name, "vkCreatePipelineLayout()")) {
- skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
+ skip |= log_msg(dev_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
"VUID-VkPushConstantRange-offset-00295",
"%s call has push constants index %u with offset %u. Offset must be a multiple of 4.", caller_name,
index, offset);
} else if (0 == strcmp(caller_name, "vkCmdPushConstants()")) {
- skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
+ skip |= log_msg(dev_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
"VUID-vkCmdPushConstants-offset-00368",
"%s call has push constants with offset %u. Offset must be a multiple of 4.", caller_name, offset);
} else {
- skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
+ skip |= log_msg(dev_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
kVUID_Core_DrawState_InternalError, "%s caller not supported.", caller_name);
}
}
@@ -5524,15 +5437,15 @@ enum DSL_DESCRIPTOR_GROUPS {
// Used by PreCallValidateCreatePipelineLayout.
// Returns an array of size DSL_NUM_DESCRIPTOR_GROUPS of the maximum number of descriptors used in any single pipeline stage
std::valarray<uint32_t> GetDescriptorCountMaxPerStage(
- const DeviceFeatures *enabled_features,
- const std::vector<std::shared_ptr<cvdescriptorset::DescriptorSetLayout const>> &set_layouts, bool skip_update_after_bind) {
+ const layer_data *dev_data, const std::vector<std::shared_ptr<cvdescriptorset::DescriptorSetLayout const>> set_layouts,
+ bool skip_update_after_bind) {
// Identify active pipeline stages
std::vector<VkShaderStageFlags> stage_flags = {VK_SHADER_STAGE_VERTEX_BIT, VK_SHADER_STAGE_FRAGMENT_BIT,
VK_SHADER_STAGE_COMPUTE_BIT};
- if (enabled_features->core.geometryShader) {
+ if (dev_data->enabled_features.core.geometryShader) {
stage_flags.push_back(VK_SHADER_STAGE_GEOMETRY_BIT);
}
- if (enabled_features->core.tessellationShader) {
+ if (dev_data->enabled_features.core.tessellationShader) {
stage_flags.push_back(VK_SHADER_STAGE_TESSELLATION_CONTROL_BIT);
stage_flags.push_back(VK_SHADER_STAGE_TESSELLATION_EVALUATION_BIT);
}
@@ -5604,7 +5517,8 @@ std::valarray<uint32_t> GetDescriptorCountMaxPerStage(
// Returns a map indexed by VK_DESCRIPTOR_TYPE_* enum of the summed descriptors by type.
// Note: descriptors only count against the limit once even if used by multiple stages.
std::map<uint32_t, uint32_t> GetDescriptorSum(
- const std::vector<std::shared_ptr<cvdescriptorset::DescriptorSetLayout const>> &set_layouts, bool skip_update_after_bind) {
+ const layer_data *dev_data, const std::vector<std::shared_ptr<cvdescriptorset::DescriptorSetLayout const>> &set_layouts,
+ bool skip_update_after_bind) {
std::map<uint32_t, uint32_t> sum_by_type;
for (auto dsl : set_layouts) {
if (skip_update_after_bind && (dsl->GetCreateFlags() & VK_DESCRIPTOR_SET_LAYOUT_CREATE_UPDATE_AFTER_BIND_POOL_BIT_EXT)) {
@@ -5629,23 +5543,24 @@ std::map<uint32_t, uint32_t> GetDescriptorSum(
bool CoreChecks::PreCallValidateCreatePipelineLayout(VkDevice device, const VkPipelineLayoutCreateInfo *pCreateInfo,
const VkAllocationCallbacks *pAllocator, VkPipelineLayout *pPipelineLayout) {
+ layer_data *device_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
bool skip = false;
// Validate layout count against device physical limit
- if (pCreateInfo->setLayoutCount > phys_dev_props.limits.maxBoundDescriptorSets) {
- skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
+ if (pCreateInfo->setLayoutCount > device_data->phys_dev_props.limits.maxBoundDescriptorSets) {
+ skip |= log_msg(device_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
"VUID-VkPipelineLayoutCreateInfo-setLayoutCount-00286",
"vkCreatePipelineLayout(): setLayoutCount (%d) exceeds physical device maxBoundDescriptorSets limit (%d).",
- pCreateInfo->setLayoutCount, phys_dev_props.limits.maxBoundDescriptorSets);
+ pCreateInfo->setLayoutCount, device_data->phys_dev_props.limits.maxBoundDescriptorSets);
}
// Validate Push Constant ranges
uint32_t i, j;
for (i = 0; i < pCreateInfo->pushConstantRangeCount; ++i) {
- skip |= ValidatePushConstantRange(pCreateInfo->pPushConstantRanges[i].offset, pCreateInfo->pPushConstantRanges[i].size,
- "vkCreatePipelineLayout()", i);
+ skip |= ValidatePushConstantRange(device_data, pCreateInfo->pPushConstantRanges[i].offset,
+ pCreateInfo->pPushConstantRanges[i].size, "vkCreatePipelineLayout()", i);
if (0 == pCreateInfo->pPushConstantRanges[i].stageFlags) {
- skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
+ skip |= log_msg(device_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
"VUID-VkPushConstantRange-stageFlags-requiredbitmask",
"vkCreatePipelineLayout() call has no stageFlags set.");
}
@@ -5655,7 +5570,7 @@ bool CoreChecks::PreCallValidateCreatePipelineLayout(VkDevice device, const VkPi
for (i = 0; i < pCreateInfo->pushConstantRangeCount; ++i) {
for (j = i + 1; j < pCreateInfo->pushConstantRangeCount; ++j) {
if (0 != (pCreateInfo->pPushConstantRanges[i].stageFlags & pCreateInfo->pPushConstantRanges[j].stageFlags)) {
- skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
+ skip |= log_msg(device_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
"VUID-VkPipelineLayoutCreateInfo-pPushConstantRanges-00292",
"vkCreatePipelineLayout() Duplicate stage flags found in ranges %d and %d.", i, j);
}
@@ -5669,366 +5584,390 @@ bool CoreChecks::PreCallValidateCreatePipelineLayout(VkDevice device, const VkPi
unsigned int push_descriptor_set_count = 0;
{
for (i = 0; i < pCreateInfo->setLayoutCount; ++i) {
- set_layouts[i] = GetDescriptorSetLayout(this, pCreateInfo->pSetLayouts[i]);
+ set_layouts[i] = GetDescriptorSetLayout(device_data, pCreateInfo->pSetLayouts[i]);
if (set_layouts[i]->IsPushDescriptor()) ++push_descriptor_set_count;
}
}
if (push_descriptor_set_count > 1) {
- skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
+ skip |= log_msg(device_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
"VUID-VkPipelineLayoutCreateInfo-pSetLayouts-00293",
"vkCreatePipelineLayout() Multiple push descriptor sets found.");
}
// Max descriptors by type, within a single pipeline stage
- std::valarray<uint32_t> max_descriptors_per_stage = GetDescriptorCountMaxPerStage(&enabled_features, set_layouts, true);
+ std::valarray<uint32_t> max_descriptors_per_stage = GetDescriptorCountMaxPerStage(device_data, set_layouts, true);
// Samplers
- if (max_descriptors_per_stage[DSL_TYPE_SAMPLERS] > phys_dev_props.limits.maxPerStageDescriptorSamplers) {
- skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
- "VUID-VkPipelineLayoutCreateInfo-pSetLayouts-00287",
- "vkCreatePipelineLayout(): max per-stage sampler bindings count (%d) exceeds device "
- "maxPerStageDescriptorSamplers limit (%d).",
- max_descriptors_per_stage[DSL_TYPE_SAMPLERS], phys_dev_props.limits.maxPerStageDescriptorSamplers);
+ if (max_descriptors_per_stage[DSL_TYPE_SAMPLERS] > device_data->phys_dev_props.limits.maxPerStageDescriptorSamplers) {
+ skip |=
+ log_msg(device_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
+ "VUID-VkPipelineLayoutCreateInfo-pSetLayouts-00287",
+ "vkCreatePipelineLayout(): max per-stage sampler bindings count (%d) exceeds device "
+ "maxPerStageDescriptorSamplers limit (%d).",
+ max_descriptors_per_stage[DSL_TYPE_SAMPLERS], device_data->phys_dev_props.limits.maxPerStageDescriptorSamplers);
}
// Uniform buffers
- if (max_descriptors_per_stage[DSL_TYPE_UNIFORM_BUFFERS] > phys_dev_props.limits.maxPerStageDescriptorUniformBuffers) {
- skip |=
- log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
- "VUID-VkPipelineLayoutCreateInfo-pSetLayouts-00288",
- "vkCreatePipelineLayout(): max per-stage uniform buffer bindings count (%d) exceeds device "
- "maxPerStageDescriptorUniformBuffers limit (%d).",
- max_descriptors_per_stage[DSL_TYPE_UNIFORM_BUFFERS], phys_dev_props.limits.maxPerStageDescriptorUniformBuffers);
+ if (max_descriptors_per_stage[DSL_TYPE_UNIFORM_BUFFERS] >
+ device_data->phys_dev_props.limits.maxPerStageDescriptorUniformBuffers) {
+ skip |= log_msg(device_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
+ "VUID-VkPipelineLayoutCreateInfo-pSetLayouts-00288",
+ "vkCreatePipelineLayout(): max per-stage uniform buffer bindings count (%d) exceeds device "
+ "maxPerStageDescriptorUniformBuffers limit (%d).",
+ max_descriptors_per_stage[DSL_TYPE_UNIFORM_BUFFERS],
+ device_data->phys_dev_props.limits.maxPerStageDescriptorUniformBuffers);
}
// Storage buffers
- if (max_descriptors_per_stage[DSL_TYPE_STORAGE_BUFFERS] > phys_dev_props.limits.maxPerStageDescriptorStorageBuffers) {
- skip |=
- log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
- "VUID-VkPipelineLayoutCreateInfo-pSetLayouts-00289",
- "vkCreatePipelineLayout(): max per-stage storage buffer bindings count (%d) exceeds device "
- "maxPerStageDescriptorStorageBuffers limit (%d).",
- max_descriptors_per_stage[DSL_TYPE_STORAGE_BUFFERS], phys_dev_props.limits.maxPerStageDescriptorStorageBuffers);
+ if (max_descriptors_per_stage[DSL_TYPE_STORAGE_BUFFERS] >
+ device_data->phys_dev_props.limits.maxPerStageDescriptorStorageBuffers) {
+ skip |= log_msg(device_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
+ "VUID-VkPipelineLayoutCreateInfo-pSetLayouts-00289",
+ "vkCreatePipelineLayout(): max per-stage storage buffer bindings count (%d) exceeds device "
+ "maxPerStageDescriptorStorageBuffers limit (%d).",
+ max_descriptors_per_stage[DSL_TYPE_STORAGE_BUFFERS],
+ device_data->phys_dev_props.limits.maxPerStageDescriptorStorageBuffers);
}
// Sampled images
- if (max_descriptors_per_stage[DSL_TYPE_SAMPLED_IMAGES] > phys_dev_props.limits.maxPerStageDescriptorSampledImages) {
- skip |=
- log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
- "VUID-VkPipelineLayoutCreateInfo-pSetLayouts-00290",
- "vkCreatePipelineLayout(): max per-stage sampled image bindings count (%d) exceeds device "
- "maxPerStageDescriptorSampledImages limit (%d).",
- max_descriptors_per_stage[DSL_TYPE_SAMPLED_IMAGES], phys_dev_props.limits.maxPerStageDescriptorSampledImages);
+ if (max_descriptors_per_stage[DSL_TYPE_SAMPLED_IMAGES] >
+ device_data->phys_dev_props.limits.maxPerStageDescriptorSampledImages) {
+ skip |= log_msg(device_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
+ "VUID-VkPipelineLayoutCreateInfo-pSetLayouts-00290",
+ "vkCreatePipelineLayout(): max per-stage sampled image bindings count (%d) exceeds device "
+ "maxPerStageDescriptorSampledImages limit (%d).",
+ max_descriptors_per_stage[DSL_TYPE_SAMPLED_IMAGES],
+ device_data->phys_dev_props.limits.maxPerStageDescriptorSampledImages);
}
// Storage images
- if (max_descriptors_per_stage[DSL_TYPE_STORAGE_IMAGES] > phys_dev_props.limits.maxPerStageDescriptorStorageImages) {
- skip |=
- log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
- "VUID-VkPipelineLayoutCreateInfo-pSetLayouts-00291",
- "vkCreatePipelineLayout(): max per-stage storage image bindings count (%d) exceeds device "
- "maxPerStageDescriptorStorageImages limit (%d).",
- max_descriptors_per_stage[DSL_TYPE_STORAGE_IMAGES], phys_dev_props.limits.maxPerStageDescriptorStorageImages);
+ if (max_descriptors_per_stage[DSL_TYPE_STORAGE_IMAGES] >
+ device_data->phys_dev_props.limits.maxPerStageDescriptorStorageImages) {
+ skip |= log_msg(device_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
+ "VUID-VkPipelineLayoutCreateInfo-pSetLayouts-00291",
+ "vkCreatePipelineLayout(): max per-stage storage image bindings count (%d) exceeds device "
+ "maxPerStageDescriptorStorageImages limit (%d).",
+ max_descriptors_per_stage[DSL_TYPE_STORAGE_IMAGES],
+ device_data->phys_dev_props.limits.maxPerStageDescriptorStorageImages);
}
// Input attachments
- if (max_descriptors_per_stage[DSL_TYPE_INPUT_ATTACHMENTS] > phys_dev_props.limits.maxPerStageDescriptorInputAttachments) {
- skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
+ if (max_descriptors_per_stage[DSL_TYPE_INPUT_ATTACHMENTS] >
+ device_data->phys_dev_props.limits.maxPerStageDescriptorInputAttachments) {
+ skip |= log_msg(device_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
"VUID-VkPipelineLayoutCreateInfo-pSetLayouts-01676",
"vkCreatePipelineLayout(): max per-stage input attachment bindings count (%d) exceeds device "
"maxPerStageDescriptorInputAttachments limit (%d).",
max_descriptors_per_stage[DSL_TYPE_INPUT_ATTACHMENTS],
- phys_dev_props.limits.maxPerStageDescriptorInputAttachments);
+ device_data->phys_dev_props.limits.maxPerStageDescriptorInputAttachments);
}
// Inline uniform blocks
if (max_descriptors_per_stage[DSL_TYPE_INLINE_UNIFORM_BLOCK] >
- phys_dev_ext_props.inline_uniform_block_props.maxPerStageDescriptorInlineUniformBlocks) {
- skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
+ device_data->phys_dev_ext_props.inline_uniform_block_props.maxPerStageDescriptorInlineUniformBlocks) {
+ skip |= log_msg(device_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
"VUID-VkPipelineLayoutCreateInfo-descriptorType-02214",
"vkCreatePipelineLayout(): max per-stage inline uniform block bindings count (%d) exceeds device "
"maxPerStageDescriptorInlineUniformBlocks limit (%d).",
max_descriptors_per_stage[DSL_TYPE_INLINE_UNIFORM_BLOCK],
- phys_dev_ext_props.inline_uniform_block_props.maxPerStageDescriptorInlineUniformBlocks);
+ device_data->phys_dev_ext_props.inline_uniform_block_props.maxPerStageDescriptorInlineUniformBlocks);
}
// Total descriptors by type
//
- std::map<uint32_t, uint32_t> sum_all_stages = GetDescriptorSum(set_layouts, true);
+ std::map<uint32_t, uint32_t> sum_all_stages = GetDescriptorSum(device_data, set_layouts, true);
// Samplers
uint32_t sum = sum_all_stages[VK_DESCRIPTOR_TYPE_SAMPLER] + sum_all_stages[VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER];
- if (sum > phys_dev_props.limits.maxDescriptorSetSamplers) {
- skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
+ if (sum > device_data->phys_dev_props.limits.maxDescriptorSetSamplers) {
+ skip |= log_msg(device_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
"VUID-VkPipelineLayoutCreateInfo-pSetLayouts-01677",
"vkCreatePipelineLayout(): sum of sampler bindings among all stages (%d) exceeds device "
"maxDescriptorSetSamplers limit (%d).",
- sum, phys_dev_props.limits.maxDescriptorSetSamplers);
+ sum, device_data->phys_dev_props.limits.maxDescriptorSetSamplers);
}
// Uniform buffers
- if (sum_all_stages[VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER] > phys_dev_props.limits.maxDescriptorSetUniformBuffers) {
- skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
+ if (sum_all_stages[VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER] > device_data->phys_dev_props.limits.maxDescriptorSetUniformBuffers) {
+ skip |= log_msg(device_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
"VUID-VkPipelineLayoutCreateInfo-pSetLayouts-01678",
"vkCreatePipelineLayout(): sum of uniform buffer bindings among all stages (%d) exceeds device "
"maxDescriptorSetUniformBuffers limit (%d).",
- sum_all_stages[VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER], phys_dev_props.limits.maxDescriptorSetUniformBuffers);
+ sum_all_stages[VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER],
+ device_data->phys_dev_props.limits.maxDescriptorSetUniformBuffers);
}
// Dynamic uniform buffers
- if (sum_all_stages[VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC] > phys_dev_props.limits.maxDescriptorSetUniformBuffersDynamic) {
- skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
+ if (sum_all_stages[VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC] >
+ device_data->phys_dev_props.limits.maxDescriptorSetUniformBuffersDynamic) {
+ skip |= log_msg(device_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
"VUID-VkPipelineLayoutCreateInfo-pSetLayouts-01679",
"vkCreatePipelineLayout(): sum of dynamic uniform buffer bindings among all stages (%d) exceeds device "
"maxDescriptorSetUniformBuffersDynamic limit (%d).",
sum_all_stages[VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC],
- phys_dev_props.limits.maxDescriptorSetUniformBuffersDynamic);
+ device_data->phys_dev_props.limits.maxDescriptorSetUniformBuffersDynamic);
}
// Storage buffers
- if (sum_all_stages[VK_DESCRIPTOR_TYPE_STORAGE_BUFFER] > phys_dev_props.limits.maxDescriptorSetStorageBuffers) {
- skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
+ if (sum_all_stages[VK_DESCRIPTOR_TYPE_STORAGE_BUFFER] > device_data->phys_dev_props.limits.maxDescriptorSetStorageBuffers) {
+ skip |= log_msg(device_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
"VUID-VkPipelineLayoutCreateInfo-pSetLayouts-01680",
"vkCreatePipelineLayout(): sum of storage buffer bindings among all stages (%d) exceeds device "
"maxDescriptorSetStorageBuffers limit (%d).",
- sum_all_stages[VK_DESCRIPTOR_TYPE_STORAGE_BUFFER], phys_dev_props.limits.maxDescriptorSetStorageBuffers);
+ sum_all_stages[VK_DESCRIPTOR_TYPE_STORAGE_BUFFER],
+ device_data->phys_dev_props.limits.maxDescriptorSetStorageBuffers);
}
// Dynamic storage buffers
- if (sum_all_stages[VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC] > phys_dev_props.limits.maxDescriptorSetStorageBuffersDynamic) {
- skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
+ if (sum_all_stages[VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC] >
+ device_data->phys_dev_props.limits.maxDescriptorSetStorageBuffersDynamic) {
+ skip |= log_msg(device_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
"VUID-VkPipelineLayoutCreateInfo-pSetLayouts-01681",
"vkCreatePipelineLayout(): sum of dynamic storage buffer bindings among all stages (%d) exceeds device "
"maxDescriptorSetStorageBuffersDynamic limit (%d).",
sum_all_stages[VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC],
- phys_dev_props.limits.maxDescriptorSetStorageBuffersDynamic);
+ device_data->phys_dev_props.limits.maxDescriptorSetStorageBuffersDynamic);
}
// Sampled images
sum = sum_all_stages[VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE] + sum_all_stages[VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER] +
sum_all_stages[VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER];
- if (sum > phys_dev_props.limits.maxDescriptorSetSampledImages) {
- skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
+ if (sum > device_data->phys_dev_props.limits.maxDescriptorSetSampledImages) {
+ skip |= log_msg(device_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
"VUID-VkPipelineLayoutCreateInfo-pSetLayouts-01682",
"vkCreatePipelineLayout(): sum of sampled image bindings among all stages (%d) exceeds device "
"maxDescriptorSetSampledImages limit (%d).",
- sum, phys_dev_props.limits.maxDescriptorSetSampledImages);
+ sum, device_data->phys_dev_props.limits.maxDescriptorSetSampledImages);
}
// Storage images
sum = sum_all_stages[VK_DESCRIPTOR_TYPE_STORAGE_IMAGE] + sum_all_stages[VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER];
- if (sum > phys_dev_props.limits.maxDescriptorSetStorageImages) {
- skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
+ if (sum > device_data->phys_dev_props.limits.maxDescriptorSetStorageImages) {
+ skip |= log_msg(device_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
"VUID-VkPipelineLayoutCreateInfo-pSetLayouts-01683",
"vkCreatePipelineLayout(): sum of storage image bindings among all stages (%d) exceeds device "
"maxDescriptorSetStorageImages limit (%d).",
- sum, phys_dev_props.limits.maxDescriptorSetStorageImages);
+ sum, device_data->phys_dev_props.limits.maxDescriptorSetStorageImages);
}
// Input attachments
- if (sum_all_stages[VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT] > phys_dev_props.limits.maxDescriptorSetInputAttachments) {
- skip |=
- log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
- "VUID-VkPipelineLayoutCreateInfo-pSetLayouts-01684",
- "vkCreatePipelineLayout(): sum of input attachment bindings among all stages (%d) exceeds device "
- "maxDescriptorSetInputAttachments limit (%d).",
- sum_all_stages[VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT], phys_dev_props.limits.maxDescriptorSetInputAttachments);
+ if (sum_all_stages[VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT] > device_data->phys_dev_props.limits.maxDescriptorSetInputAttachments) {
+ skip |= log_msg(device_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
+ "VUID-VkPipelineLayoutCreateInfo-pSetLayouts-01684",
+ "vkCreatePipelineLayout(): sum of input attachment bindings among all stages (%d) exceeds device "
+ "maxDescriptorSetInputAttachments limit (%d).",
+ sum_all_stages[VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT],
+ device_data->phys_dev_props.limits.maxDescriptorSetInputAttachments);
}
// Inline uniform blocks
if (sum_all_stages[VK_DESCRIPTOR_TYPE_INLINE_UNIFORM_BLOCK_EXT] >
- phys_dev_ext_props.inline_uniform_block_props.maxDescriptorSetInlineUniformBlocks) {
- skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
+ device_data->phys_dev_ext_props.inline_uniform_block_props.maxDescriptorSetInlineUniformBlocks) {
+ skip |= log_msg(device_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
"VUID-VkPipelineLayoutCreateInfo-descriptorType-02216",
"vkCreatePipelineLayout(): sum of inline uniform block bindings among all stages (%d) exceeds device "
"maxDescriptorSetInlineUniformBlocks limit (%d).",
sum_all_stages[VK_DESCRIPTOR_TYPE_INLINE_UNIFORM_BLOCK_EXT],
- phys_dev_ext_props.inline_uniform_block_props.maxDescriptorSetInlineUniformBlocks);
+ device_data->phys_dev_ext_props.inline_uniform_block_props.maxDescriptorSetInlineUniformBlocks);
}
- if (device_extensions.vk_ext_descriptor_indexing) {
+ if (device_data->device_extensions.vk_ext_descriptor_indexing) {
// XXX TODO: replace with correct VU messages
// Max descriptors by type, within a single pipeline stage
std::valarray<uint32_t> max_descriptors_per_stage_update_after_bind =
- GetDescriptorCountMaxPerStage(&enabled_features, set_layouts, false);
+ GetDescriptorCountMaxPerStage(device_data, set_layouts, false);
// Samplers
if (max_descriptors_per_stage_update_after_bind[DSL_TYPE_SAMPLERS] >
- phys_dev_ext_props.descriptor_indexing_props.maxPerStageDescriptorUpdateAfterBindSamplers) {
- skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
+ device_data->phys_dev_ext_props.descriptor_indexing_props.maxPerStageDescriptorUpdateAfterBindSamplers) {
+ skip |= log_msg(device_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
"VUID-VkPipelineLayoutCreateInfo-descriptorType-03022",
"vkCreatePipelineLayout(): max per-stage sampler bindings count (%d) exceeds device "
"maxPerStageDescriptorUpdateAfterBindSamplers limit (%d).",
max_descriptors_per_stage_update_after_bind[DSL_TYPE_SAMPLERS],
- phys_dev_ext_props.descriptor_indexing_props.maxPerStageDescriptorUpdateAfterBindSamplers);
+ device_data->phys_dev_ext_props.descriptor_indexing_props.maxPerStageDescriptorUpdateAfterBindSamplers);
}
// Uniform buffers
if (max_descriptors_per_stage_update_after_bind[DSL_TYPE_UNIFORM_BUFFERS] >
- phys_dev_ext_props.descriptor_indexing_props.maxPerStageDescriptorUpdateAfterBindUniformBuffers) {
- skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
- "VUID-VkPipelineLayoutCreateInfo-descriptorType-03023",
- "vkCreatePipelineLayout(): max per-stage uniform buffer bindings count (%d) exceeds device "
- "maxPerStageDescriptorUpdateAfterBindUniformBuffers limit (%d).",
- max_descriptors_per_stage_update_after_bind[DSL_TYPE_UNIFORM_BUFFERS],
- phys_dev_ext_props.descriptor_indexing_props.maxPerStageDescriptorUpdateAfterBindUniformBuffers);
+ device_data->phys_dev_ext_props.descriptor_indexing_props.maxPerStageDescriptorUpdateAfterBindUniformBuffers) {
+ skip |= log_msg(
+ device_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
+ "VUID-VkPipelineLayoutCreateInfo-descriptorType-03023",
+ "vkCreatePipelineLayout(): max per-stage uniform buffer bindings count (%d) exceeds device "
+ "maxPerStageDescriptorUpdateAfterBindUniformBuffers limit (%d).",
+ max_descriptors_per_stage_update_after_bind[DSL_TYPE_UNIFORM_BUFFERS],
+ device_data->phys_dev_ext_props.descriptor_indexing_props.maxPerStageDescriptorUpdateAfterBindUniformBuffers);
}
// Storage buffers
if (max_descriptors_per_stage_update_after_bind[DSL_TYPE_STORAGE_BUFFERS] >
- phys_dev_ext_props.descriptor_indexing_props.maxPerStageDescriptorUpdateAfterBindStorageBuffers) {
- skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
- "VUID-VkPipelineLayoutCreateInfo-descriptorType-03024",
- "vkCreatePipelineLayout(): max per-stage storage buffer bindings count (%d) exceeds device "
- "maxPerStageDescriptorUpdateAfterBindStorageBuffers limit (%d).",
- max_descriptors_per_stage_update_after_bind[DSL_TYPE_STORAGE_BUFFERS],
- phys_dev_ext_props.descriptor_indexing_props.maxPerStageDescriptorUpdateAfterBindStorageBuffers);
+ device_data->phys_dev_ext_props.descriptor_indexing_props.maxPerStageDescriptorUpdateAfterBindStorageBuffers) {
+ skip |= log_msg(
+ device_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
+ "VUID-VkPipelineLayoutCreateInfo-descriptorType-03024",
+ "vkCreatePipelineLayout(): max per-stage storage buffer bindings count (%d) exceeds device "
+ "maxPerStageDescriptorUpdateAfterBindStorageBuffers limit (%d).",
+ max_descriptors_per_stage_update_after_bind[DSL_TYPE_STORAGE_BUFFERS],
+ device_data->phys_dev_ext_props.descriptor_indexing_props.maxPerStageDescriptorUpdateAfterBindStorageBuffers);
}
// Sampled images
if (max_descriptors_per_stage_update_after_bind[DSL_TYPE_SAMPLED_IMAGES] >
- phys_dev_ext_props.descriptor_indexing_props.maxPerStageDescriptorUpdateAfterBindSampledImages) {
- skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
- "VUID-VkPipelineLayoutCreateInfo-descriptorType-03025",
- "vkCreatePipelineLayout(): max per-stage sampled image bindings count (%d) exceeds device "
- "maxPerStageDescriptorUpdateAfterBindSampledImages limit (%d).",
- max_descriptors_per_stage_update_after_bind[DSL_TYPE_SAMPLED_IMAGES],
- phys_dev_ext_props.descriptor_indexing_props.maxPerStageDescriptorUpdateAfterBindSampledImages);
+ device_data->phys_dev_ext_props.descriptor_indexing_props.maxPerStageDescriptorUpdateAfterBindSampledImages) {
+ skip |= log_msg(
+ device_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
+ "VUID-VkPipelineLayoutCreateInfo-descriptorType-03025",
+ "vkCreatePipelineLayout(): max per-stage sampled image bindings count (%d) exceeds device "
+ "maxPerStageDescriptorUpdateAfterBindSampledImages limit (%d).",
+ max_descriptors_per_stage_update_after_bind[DSL_TYPE_SAMPLED_IMAGES],
+ device_data->phys_dev_ext_props.descriptor_indexing_props.maxPerStageDescriptorUpdateAfterBindSampledImages);
}
// Storage images
if (max_descriptors_per_stage_update_after_bind[DSL_TYPE_STORAGE_IMAGES] >
- phys_dev_ext_props.descriptor_indexing_props.maxPerStageDescriptorUpdateAfterBindStorageImages) {
- skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
- "VUID-VkPipelineLayoutCreateInfo-descriptorType-03026",
- "vkCreatePipelineLayout(): max per-stage storage image bindings count (%d) exceeds device "
- "maxPerStageDescriptorUpdateAfterBindStorageImages limit (%d).",
- max_descriptors_per_stage_update_after_bind[DSL_TYPE_STORAGE_IMAGES],
- phys_dev_ext_props.descriptor_indexing_props.maxPerStageDescriptorUpdateAfterBindStorageImages);
+ device_data->phys_dev_ext_props.descriptor_indexing_props.maxPerStageDescriptorUpdateAfterBindStorageImages) {
+ skip |= log_msg(
+ device_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
+ "VUID-VkPipelineLayoutCreateInfo-descriptorType-03026",
+ "vkCreatePipelineLayout(): max per-stage storage image bindings count (%d) exceeds device "
+ "maxPerStageDescriptorUpdateAfterBindStorageImages limit (%d).",
+ max_descriptors_per_stage_update_after_bind[DSL_TYPE_STORAGE_IMAGES],
+ device_data->phys_dev_ext_props.descriptor_indexing_props.maxPerStageDescriptorUpdateAfterBindStorageImages);
}
// Input attachments
if (max_descriptors_per_stage_update_after_bind[DSL_TYPE_INPUT_ATTACHMENTS] >
- phys_dev_ext_props.descriptor_indexing_props.maxPerStageDescriptorUpdateAfterBindInputAttachments) {
- skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
- "VUID-VkPipelineLayoutCreateInfo-descriptorType-03027",
- "vkCreatePipelineLayout(): max per-stage input attachment bindings count (%d) exceeds device "
- "maxPerStageDescriptorUpdateAfterBindInputAttachments limit (%d).",
- max_descriptors_per_stage_update_after_bind[DSL_TYPE_INPUT_ATTACHMENTS],
- phys_dev_ext_props.descriptor_indexing_props.maxPerStageDescriptorUpdateAfterBindInputAttachments);
+ device_data->phys_dev_ext_props.descriptor_indexing_props.maxPerStageDescriptorUpdateAfterBindInputAttachments) {
+ skip |= log_msg(
+ device_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
+ "VUID-VkPipelineLayoutCreateInfo-descriptorType-03027",
+ "vkCreatePipelineLayout(): max per-stage input attachment bindings count (%d) exceeds device "
+ "maxPerStageDescriptorUpdateAfterBindInputAttachments limit (%d).",
+ max_descriptors_per_stage_update_after_bind[DSL_TYPE_INPUT_ATTACHMENTS],
+ device_data->phys_dev_ext_props.descriptor_indexing_props.maxPerStageDescriptorUpdateAfterBindInputAttachments);
}
// Inline uniform blocks
if (max_descriptors_per_stage_update_after_bind[DSL_TYPE_INLINE_UNIFORM_BLOCK] >
- phys_dev_ext_props.inline_uniform_block_props.maxPerStageDescriptorUpdateAfterBindInlineUniformBlocks) {
- skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
- "VUID-VkPipelineLayoutCreateInfo-descriptorType-02215",
- "vkCreatePipelineLayout(): max per-stage inline uniform block bindings count (%d) exceeds device "
- "maxPerStageDescriptorUpdateAfterBindInlineUniformBlocks limit (%d).",
- max_descriptors_per_stage_update_after_bind[DSL_TYPE_INLINE_UNIFORM_BLOCK],
- phys_dev_ext_props.inline_uniform_block_props.maxPerStageDescriptorUpdateAfterBindInlineUniformBlocks);
+ device_data->phys_dev_ext_props.inline_uniform_block_props.maxPerStageDescriptorUpdateAfterBindInlineUniformBlocks) {
+ skip |= log_msg(
+ device_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
+ "VUID-VkPipelineLayoutCreateInfo-descriptorType-02215",
+ "vkCreatePipelineLayout(): max per-stage inline uniform block bindings count (%d) exceeds device "
+ "maxPerStageDescriptorUpdateAfterBindInlineUniformBlocks limit (%d).",
+ max_descriptors_per_stage_update_after_bind[DSL_TYPE_INLINE_UNIFORM_BLOCK],
+ device_data->phys_dev_ext_props.inline_uniform_block_props.maxPerStageDescriptorUpdateAfterBindInlineUniformBlocks);
}
// Total descriptors by type, summed across all pipeline stages
//
- std::map<uint32_t, uint32_t> sum_all_stages_update_after_bind = GetDescriptorSum(set_layouts, false);
+ std::map<uint32_t, uint32_t> sum_all_stages_update_after_bind = GetDescriptorSum(device_data, set_layouts, false);
// Samplers
sum = sum_all_stages_update_after_bind[VK_DESCRIPTOR_TYPE_SAMPLER] +
sum_all_stages_update_after_bind[VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER];
- if (sum > phys_dev_ext_props.descriptor_indexing_props.maxDescriptorSetUpdateAfterBindSamplers) {
- skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
+ if (sum > device_data->phys_dev_ext_props.descriptor_indexing_props.maxDescriptorSetUpdateAfterBindSamplers) {
+ skip |= log_msg(device_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
"VUID-VkPipelineLayoutCreateInfo-pSetLayouts-03036",
"vkCreatePipelineLayout(): sum of sampler bindings among all stages (%d) exceeds device "
"maxDescriptorSetUpdateAfterBindSamplers limit (%d).",
- sum, phys_dev_ext_props.descriptor_indexing_props.maxDescriptorSetUpdateAfterBindSamplers);
+ sum, device_data->phys_dev_ext_props.descriptor_indexing_props.maxDescriptorSetUpdateAfterBindSamplers);
}
// Uniform buffers
if (sum_all_stages_update_after_bind[VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER] >
- phys_dev_ext_props.descriptor_indexing_props.maxDescriptorSetUpdateAfterBindUniformBuffers) {
- skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
- "VUID-VkPipelineLayoutCreateInfo-pSetLayouts-03037",
- "vkCreatePipelineLayout(): sum of uniform buffer bindings among all stages (%d) exceeds device "
- "maxDescriptorSetUpdateAfterBindUniformBuffers limit (%d).",
- sum_all_stages_update_after_bind[VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER],
- phys_dev_ext_props.descriptor_indexing_props.maxDescriptorSetUpdateAfterBindUniformBuffers);
+ device_data->phys_dev_ext_props.descriptor_indexing_props.maxDescriptorSetUpdateAfterBindUniformBuffers) {
+ skip |=
+ log_msg(device_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
+ "VUID-VkPipelineLayoutCreateInfo-pSetLayouts-03037",
+ "vkCreatePipelineLayout(): sum of uniform buffer bindings among all stages (%d) exceeds device "
+ "maxDescriptorSetUpdateAfterBindUniformBuffers limit (%d).",
+ sum_all_stages_update_after_bind[VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER],
+ device_data->phys_dev_ext_props.descriptor_indexing_props.maxDescriptorSetUpdateAfterBindUniformBuffers);
}
// Dynamic uniform buffers
if (sum_all_stages_update_after_bind[VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC] >
- phys_dev_ext_props.descriptor_indexing_props.maxDescriptorSetUpdateAfterBindUniformBuffersDynamic) {
- skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
- "VUID-VkPipelineLayoutCreateInfo-pSetLayouts-03038",
- "vkCreatePipelineLayout(): sum of dynamic uniform buffer bindings among all stages (%d) exceeds device "
- "maxDescriptorSetUpdateAfterBindUniformBuffersDynamic limit (%d).",
- sum_all_stages_update_after_bind[VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC],
- phys_dev_ext_props.descriptor_indexing_props.maxDescriptorSetUpdateAfterBindUniformBuffersDynamic);
+ device_data->phys_dev_ext_props.descriptor_indexing_props.maxDescriptorSetUpdateAfterBindUniformBuffersDynamic) {
+ skip |= log_msg(
+ device_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
+ "VUID-VkPipelineLayoutCreateInfo-pSetLayouts-03038",
+ "vkCreatePipelineLayout(): sum of dynamic uniform buffer bindings among all stages (%d) exceeds device "
+ "maxDescriptorSetUpdateAfterBindUniformBuffersDynamic limit (%d).",
+ sum_all_stages_update_after_bind[VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC],
+ device_data->phys_dev_ext_props.descriptor_indexing_props.maxDescriptorSetUpdateAfterBindUniformBuffersDynamic);
}
// Storage buffers
if (sum_all_stages_update_after_bind[VK_DESCRIPTOR_TYPE_STORAGE_BUFFER] >
- phys_dev_ext_props.descriptor_indexing_props.maxDescriptorSetUpdateAfterBindStorageBuffers) {
- skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
- "VUID-VkPipelineLayoutCreateInfo-pSetLayouts-03039",
- "vkCreatePipelineLayout(): sum of storage buffer bindings among all stages (%d) exceeds device "
- "maxDescriptorSetUpdateAfterBindStorageBuffers limit (%d).",
- sum_all_stages_update_after_bind[VK_DESCRIPTOR_TYPE_STORAGE_BUFFER],
- phys_dev_ext_props.descriptor_indexing_props.maxDescriptorSetUpdateAfterBindStorageBuffers);
+ device_data->phys_dev_ext_props.descriptor_indexing_props.maxDescriptorSetUpdateAfterBindStorageBuffers) {
+ skip |=
+ log_msg(device_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
+ "VUID-VkPipelineLayoutCreateInfo-pSetLayouts-03039",
+ "vkCreatePipelineLayout(): sum of storage buffer bindings among all stages (%d) exceeds device "
+ "maxDescriptorSetUpdateAfterBindStorageBuffers limit (%d).",
+ sum_all_stages_update_after_bind[VK_DESCRIPTOR_TYPE_STORAGE_BUFFER],
+ device_data->phys_dev_ext_props.descriptor_indexing_props.maxDescriptorSetUpdateAfterBindStorageBuffers);
}
// Dynamic storage buffers
if (sum_all_stages_update_after_bind[VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC] >
- phys_dev_ext_props.descriptor_indexing_props.maxDescriptorSetUpdateAfterBindStorageBuffersDynamic) {
- skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
- "VUID-VkPipelineLayoutCreateInfo-pSetLayouts-03040",
- "vkCreatePipelineLayout(): sum of dynamic storage buffer bindings among all stages (%d) exceeds device "
- "maxDescriptorSetUpdateAfterBindStorageBuffersDynamic limit (%d).",
- sum_all_stages_update_after_bind[VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC],
- phys_dev_ext_props.descriptor_indexing_props.maxDescriptorSetUpdateAfterBindStorageBuffersDynamic);
+ device_data->phys_dev_ext_props.descriptor_indexing_props.maxDescriptorSetUpdateAfterBindStorageBuffersDynamic) {
+ skip |= log_msg(
+ device_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
+ "VUID-VkPipelineLayoutCreateInfo-pSetLayouts-03040",
+ "vkCreatePipelineLayout(): sum of dynamic storage buffer bindings among all stages (%d) exceeds device "
+ "maxDescriptorSetUpdateAfterBindStorageBuffersDynamic limit (%d).",
+ sum_all_stages_update_after_bind[VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC],
+ device_data->phys_dev_ext_props.descriptor_indexing_props.maxDescriptorSetUpdateAfterBindStorageBuffersDynamic);
}
// Sampled images
sum = sum_all_stages_update_after_bind[VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE] +
sum_all_stages_update_after_bind[VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER] +
sum_all_stages_update_after_bind[VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER];
- if (sum > phys_dev_ext_props.descriptor_indexing_props.maxDescriptorSetUpdateAfterBindSampledImages) {
- skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
+ if (sum > device_data->phys_dev_ext_props.descriptor_indexing_props.maxDescriptorSetUpdateAfterBindSampledImages) {
+ skip |= log_msg(device_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
"VUID-VkPipelineLayoutCreateInfo-pSetLayouts-03041",
"vkCreatePipelineLayout(): sum of sampled image bindings among all stages (%d) exceeds device "
"maxDescriptorSetUpdateAfterBindSampledImages limit (%d).",
- sum, phys_dev_ext_props.descriptor_indexing_props.maxDescriptorSetUpdateAfterBindSampledImages);
+ sum,
+ device_data->phys_dev_ext_props.descriptor_indexing_props.maxDescriptorSetUpdateAfterBindSampledImages);
}
// Storage images
sum = sum_all_stages_update_after_bind[VK_DESCRIPTOR_TYPE_STORAGE_IMAGE] +
sum_all_stages_update_after_bind[VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER];
- if (sum > phys_dev_ext_props.descriptor_indexing_props.maxDescriptorSetUpdateAfterBindStorageImages) {
- skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
+ if (sum > device_data->phys_dev_ext_props.descriptor_indexing_props.maxDescriptorSetUpdateAfterBindStorageImages) {
+ skip |= log_msg(device_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
"VUID-VkPipelineLayoutCreateInfo-pSetLayouts-03042",
"vkCreatePipelineLayout(): sum of storage image bindings among all stages (%d) exceeds device "
"maxDescriptorSetUpdateAfterBindStorageImages limit (%d).",
- sum, phys_dev_ext_props.descriptor_indexing_props.maxDescriptorSetUpdateAfterBindStorageImages);
+ sum,
+ device_data->phys_dev_ext_props.descriptor_indexing_props.maxDescriptorSetUpdateAfterBindStorageImages);
}
// Input attachments
if (sum_all_stages_update_after_bind[VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT] >
- phys_dev_ext_props.descriptor_indexing_props.maxDescriptorSetUpdateAfterBindInputAttachments) {
- skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
- "VUID-VkPipelineLayoutCreateInfo-pSetLayouts-03043",
- "vkCreatePipelineLayout(): sum of input attachment bindings among all stages (%d) exceeds device "
- "maxDescriptorSetUpdateAfterBindInputAttachments limit (%d).",
- sum_all_stages_update_after_bind[VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT],
- phys_dev_ext_props.descriptor_indexing_props.maxDescriptorSetUpdateAfterBindInputAttachments);
+ device_data->phys_dev_ext_props.descriptor_indexing_props.maxDescriptorSetUpdateAfterBindInputAttachments) {
+ skip |=
+ log_msg(device_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
+ "VUID-VkPipelineLayoutCreateInfo-pSetLayouts-03043",
+ "vkCreatePipelineLayout(): sum of input attachment bindings among all stages (%d) exceeds device "
+ "maxDescriptorSetUpdateAfterBindInputAttachments limit (%d).",
+ sum_all_stages_update_after_bind[VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT],
+ device_data->phys_dev_ext_props.descriptor_indexing_props.maxDescriptorSetUpdateAfterBindInputAttachments);
}
// Inline uniform blocks
if (sum_all_stages_update_after_bind[VK_DESCRIPTOR_TYPE_INLINE_UNIFORM_BLOCK_EXT] >
- phys_dev_ext_props.inline_uniform_block_props.maxDescriptorSetUpdateAfterBindInlineUniformBlocks) {
- skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
- "VUID-VkPipelineLayoutCreateInfo-descriptorType-02217",
- "vkCreatePipelineLayout(): sum of inline uniform block bindings among all stages (%d) exceeds device "
- "maxDescriptorSetUpdateAfterBindInlineUniformBlocks limit (%d).",
- sum_all_stages_update_after_bind[VK_DESCRIPTOR_TYPE_INLINE_UNIFORM_BLOCK_EXT],
- phys_dev_ext_props.inline_uniform_block_props.maxDescriptorSetUpdateAfterBindInlineUniformBlocks);
+ device_data->phys_dev_ext_props.inline_uniform_block_props.maxDescriptorSetUpdateAfterBindInlineUniformBlocks) {
+ skip |= log_msg(
+ device_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
+ "VUID-VkPipelineLayoutCreateInfo-descriptorType-02217",
+ "vkCreatePipelineLayout(): sum of inline uniform block bindings among all stages (%d) exceeds device "
+ "maxDescriptorSetUpdateAfterBindInlineUniformBlocks limit (%d).",
+ sum_all_stages_update_after_bind[VK_DESCRIPTOR_TYPE_INLINE_UNIFORM_BLOCK_EXT],
+ device_data->phys_dev_ext_props.inline_uniform_block_props.maxDescriptorSetUpdateAfterBindInlineUniformBlocks);
}
}
return skip;
@@ -6084,9 +6023,10 @@ static PipelineLayoutCompatId GetCanonicalId(const uint32_t set_index, const Pus
void CoreChecks::PreCallRecordCreatePipelineLayout(VkDevice device, const VkPipelineLayoutCreateInfo *pCreateInfo,
const VkAllocationCallbacks *pAllocator, VkPipelineLayout *pPipelineLayout,
void *cpl_state_data) {
+ layer_data *device_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
create_pipeline_layout_api_state *cpl_state = reinterpret_cast<create_pipeline_layout_api_state *>(cpl_state_data);
- if (enabled.gpu_validation) {
- GpuPreCallCreatePipelineLayout(pCreateInfo, pAllocator, pPipelineLayout, &cpl_state->new_layouts,
+ if (GetEnables()->gpu_validation) {
+ GpuPreCallCreatePipelineLayout(device_data, pCreateInfo, pAllocator, pPipelineLayout, &cpl_state->new_layouts,
&cpl_state->modified_create_info);
}
}
@@ -6094,60 +6034,57 @@ void CoreChecks::PreCallRecordCreatePipelineLayout(VkDevice device, const VkPipe
void CoreChecks::PostCallRecordCreatePipelineLayout(VkDevice device, const VkPipelineLayoutCreateInfo *pCreateInfo,
const VkAllocationCallbacks *pAllocator, VkPipelineLayout *pPipelineLayout,
VkResult result) {
- StateTracker::PostCallRecordCreatePipelineLayout(device, pCreateInfo, pAllocator, pPipelineLayout, result);
+ layer_data *device_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
// Clean up GPU validation
- if (enabled.gpu_validation) {
- GpuPostCallCreatePipelineLayout(result);
+ if (GetEnables()->gpu_validation) {
+ GpuPostCallCreatePipelineLayout(device_data, result);
}
-}
-
-void ValidationStateTracker::PostCallRecordCreatePipelineLayout(VkDevice device, const VkPipelineLayoutCreateInfo *pCreateInfo,
- const VkAllocationCallbacks *pAllocator,
- VkPipelineLayout *pPipelineLayout, VkResult result) {
if (VK_SUCCESS != result) return;
- std::unique_ptr<PIPELINE_LAYOUT_STATE> pipeline_layout_state(new PIPELINE_LAYOUT_STATE{});
- pipeline_layout_state->layout = *pPipelineLayout;
- pipeline_layout_state->set_layouts.resize(pCreateInfo->setLayoutCount);
+ PIPELINE_LAYOUT_NODE &plNode = device_data->pipelineLayoutMap[*pPipelineLayout];
+ plNode.layout = *pPipelineLayout;
+ plNode.set_layouts.resize(pCreateInfo->setLayoutCount);
PipelineLayoutSetLayoutsDef set_layouts(pCreateInfo->setLayoutCount);
for (uint32_t i = 0; i < pCreateInfo->setLayoutCount; ++i) {
- pipeline_layout_state->set_layouts[i] = GetDescriptorSetLayout(this, pCreateInfo->pSetLayouts[i]);
- set_layouts[i] = pipeline_layout_state->set_layouts[i]->GetLayoutId();
+ plNode.set_layouts[i] = GetDescriptorSetLayout(device_data, pCreateInfo->pSetLayouts[i]);
+ set_layouts[i] = plNode.set_layouts[i]->GetLayoutId();
}
// Get canonical form IDs for the "compatible for set" contents
- pipeline_layout_state->push_constant_ranges = GetCanonicalId(pCreateInfo);
+ plNode.push_constant_ranges = GetCanonicalId(pCreateInfo);
auto set_layouts_id = pipeline_layout_set_layouts_dict.look_up(set_layouts);
- pipeline_layout_state->compat_for_set.reserve(pCreateInfo->setLayoutCount);
+ plNode.compat_for_set.reserve(pCreateInfo->setLayoutCount);
// Create table of "compatible for set N" cannonical forms for trivial accept validation
for (uint32_t i = 0; i < pCreateInfo->setLayoutCount; ++i) {
- pipeline_layout_state->compat_for_set.emplace_back(
- GetCanonicalId(i, pipeline_layout_state->push_constant_ranges, set_layouts_id));
+ plNode.compat_for_set.emplace_back(GetCanonicalId(i, plNode.push_constant_ranges, set_layouts_id));
}
- pipelineLayoutMap[*pPipelineLayout] = std::move(pipeline_layout_state);
}
-void ValidationStateTracker::PostCallRecordCreateDescriptorPool(VkDevice device, const VkDescriptorPoolCreateInfo *pCreateInfo,
- const VkAllocationCallbacks *pAllocator,
- VkDescriptorPool *pDescriptorPool, VkResult result) {
+void CoreChecks::PostCallRecordCreateDescriptorPool(VkDevice device, const VkDescriptorPoolCreateInfo *pCreateInfo,
+ const VkAllocationCallbacks *pAllocator, VkDescriptorPool *pDescriptorPool,
+ VkResult result) {
+ layer_data *dev_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
if (VK_SUCCESS != result) return;
- descriptorPoolMap[*pDescriptorPool] =
- std::unique_ptr<DESCRIPTOR_POOL_STATE>(new DESCRIPTOR_POOL_STATE(*pDescriptorPool, pCreateInfo));
+ DESCRIPTOR_POOL_STATE *pNewNode = new DESCRIPTOR_POOL_STATE(*pDescriptorPool, pCreateInfo);
+ assert(pNewNode);
+ dev_data->descriptorPoolMap[*pDescriptorPool] = pNewNode;
}
bool CoreChecks::PreCallValidateResetDescriptorPool(VkDevice device, VkDescriptorPool descriptorPool,
VkDescriptorPoolResetFlags flags) {
+ layer_data *device_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
// Make sure sets being destroyed are not currently in-use
- if (disabled.idle_descriptor_set) return false;
+ if (device_data->instance_data->disabled.idle_descriptor_set) return false;
bool skip = false;
DESCRIPTOR_POOL_STATE *pPool = GetDescriptorPoolState(descriptorPool);
if (pPool != nullptr) {
for (auto ds : pPool->sets) {
if (ds && ds->in_use.load()) {
- skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_POOL_EXT,
- HandleToUint64(descriptorPool), "VUID-vkResetDescriptorPool-descriptorPool-00313",
+ skip |= log_msg(device_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_POOL_EXT, HandleToUint64(descriptorPool),
+ "VUID-vkResetDescriptorPool-descriptorPool-00313",
"It is invalid to call vkResetDescriptorPool() with descriptor sets in use by a command buffer.");
if (skip) break;
}
@@ -6156,14 +6093,15 @@ bool CoreChecks::PreCallValidateResetDescriptorPool(VkDevice device, VkDescripto
return skip;
}
-void ValidationStateTracker::PostCallRecordResetDescriptorPool(VkDevice device, VkDescriptorPool descriptorPool,
- VkDescriptorPoolResetFlags flags, VkResult result) {
+void CoreChecks::PostCallRecordResetDescriptorPool(VkDevice device, VkDescriptorPool descriptorPool,
+ VkDescriptorPoolResetFlags flags, VkResult result) {
+ layer_data *device_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
if (VK_SUCCESS != result) return;
DESCRIPTOR_POOL_STATE *pPool = GetDescriptorPoolState(descriptorPool);
// TODO: validate flags
// For every set off of this pool, clear it, remove from setMap, and free cvdescriptorset::DescriptorSet
for (auto ds : pPool->sets) {
- FreeDescriptorSet(ds);
+ FreeDescriptorSet(device_data, ds);
}
pPool->sets.clear();
// Reset available count for each type and available sets for this pool
@@ -6178,39 +6116,44 @@ void ValidationStateTracker::PostCallRecordResetDescriptorPool(VkDevice device,
// as well as DescriptorSetLayout ptrs used for later update.
bool CoreChecks::PreCallValidateAllocateDescriptorSets(VkDevice device, const VkDescriptorSetAllocateInfo *pAllocateInfo,
VkDescriptorSet *pDescriptorSets, void *ads_state_data) {
+ layer_data *device_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
+
// Always update common data
cvdescriptorset::AllocateDescriptorSetsData *ads_state =
reinterpret_cast<cvdescriptorset::AllocateDescriptorSetsData *>(ads_state_data);
- UpdateAllocateDescriptorSetsData(pAllocateInfo, ads_state);
+ UpdateAllocateDescriptorSetsData(device_data, pAllocateInfo, ads_state);
+ if (device_data->instance_data->disabled.allocate_descriptor_sets) return false;
// All state checks for AllocateDescriptorSets is done in single function
- return ValidateAllocateDescriptorSets(pAllocateInfo, ads_state);
+ return ValidateAllocateDescriptorSets(device_data, pAllocateInfo, ads_state);
}
// Allocation state was good and call down chain was made so update state based on allocating descriptor sets
-void ValidationStateTracker::PostCallRecordAllocateDescriptorSets(VkDevice device, const VkDescriptorSetAllocateInfo *pAllocateInfo,
- VkDescriptorSet *pDescriptorSets, VkResult result,
- void *ads_state_data) {
+void CoreChecks::PostCallRecordAllocateDescriptorSets(VkDevice device, const VkDescriptorSetAllocateInfo *pAllocateInfo,
+ VkDescriptorSet *pDescriptorSets, VkResult result, void *ads_state_data) {
+ layer_data *device_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
if (VK_SUCCESS != result) return;
// All the updates are contained in a single cvdescriptorset function
cvdescriptorset::AllocateDescriptorSetsData *ads_state =
reinterpret_cast<cvdescriptorset::AllocateDescriptorSetsData *>(ads_state_data);
- PerformAllocateDescriptorSets(pAllocateInfo, pDescriptorSets, ads_state);
+ PerformAllocateDescriptorSets(pAllocateInfo, pDescriptorSets, ads_state, &device_data->descriptorPoolMap, &device_data->setMap,
+ device_data);
}
bool CoreChecks::PreCallValidateFreeDescriptorSets(VkDevice device, VkDescriptorPool descriptorPool, uint32_t count,
const VkDescriptorSet *pDescriptorSets) {
+ layer_data *device_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
// Make sure that no sets being destroyed are in-flight
bool skip = false;
// First make sure sets being destroyed are not currently in-use
for (uint32_t i = 0; i < count; ++i) {
if (pDescriptorSets[i] != VK_NULL_HANDLE) {
- skip |= ValidateIdleDescriptorSet(pDescriptorSets[i], "vkFreeDescriptorSets");
+ skip |= ValidateIdleDescriptorSet(device_data, pDescriptorSets[i], "vkFreeDescriptorSets");
}
}
DESCRIPTOR_POOL_STATE *pool_state = GetDescriptorPoolState(descriptorPool);
if (pool_state && !(VK_DESCRIPTOR_POOL_CREATE_FREE_DESCRIPTOR_SET_BIT & pool_state->createInfo.flags)) {
// Can't Free from a NON_FREE pool
- skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_POOL_EXT,
+ skip |= log_msg(device_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_POOL_EXT,
HandleToUint64(descriptorPool), "VUID-vkFreeDescriptorSets-descriptorPool-00312",
"It is invalid to call vkFreeDescriptorSets() with a pool created without setting "
"VK_DESCRIPTOR_POOL_CREATE_FREE_DESCRIPTOR_SET_BIT.");
@@ -6218,8 +6161,9 @@ bool CoreChecks::PreCallValidateFreeDescriptorSets(VkDevice device, VkDescriptor
return skip;
}
-void ValidationStateTracker::PreCallRecordFreeDescriptorSets(VkDevice device, VkDescriptorPool descriptorPool, uint32_t count,
- const VkDescriptorSet *pDescriptorSets) {
+void CoreChecks::PreCallRecordFreeDescriptorSets(VkDevice device, VkDescriptorPool descriptorPool, uint32_t count,
+ const VkDescriptorSet *pDescriptorSets) {
+ layer_data *device_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
DESCRIPTOR_POOL_STATE *pool_state = GetDescriptorPoolState(descriptorPool);
// Update available descriptor sets in pool
pool_state->availableSets += count;
@@ -6227,14 +6171,14 @@ void ValidationStateTracker::PreCallRecordFreeDescriptorSets(VkDevice device, Vk
// For each freed descriptor add its resources back into the pool as available and remove from pool and setMap
for (uint32_t i = 0; i < count; ++i) {
if (pDescriptorSets[i] != VK_NULL_HANDLE) {
- auto descriptor_set = setMap[pDescriptorSets[i]].get();
+ auto descriptor_set = device_data->setMap[pDescriptorSets[i]];
uint32_t type_index = 0, descriptor_count = 0;
for (uint32_t j = 0; j < descriptor_set->GetBindingCount(); ++j) {
type_index = static_cast<uint32_t>(descriptor_set->GetTypeFromIndex(j));
descriptor_count = descriptor_set->GetDescriptorCountFromIndex(j);
pool_state->availableDescriptorTypeCount[type_index] += descriptor_count;
}
- FreeDescriptorSet(descriptor_set);
+ FreeDescriptorSet(device_data, descriptor_set);
pool_state->sets.erase(descriptor_set);
}
}
@@ -6243,6 +6187,8 @@ void ValidationStateTracker::PreCallRecordFreeDescriptorSets(VkDevice device, Vk
bool CoreChecks::PreCallValidateUpdateDescriptorSets(VkDevice device, uint32_t descriptorWriteCount,
const VkWriteDescriptorSet *pDescriptorWrites, uint32_t descriptorCopyCount,
const VkCopyDescriptorSet *pDescriptorCopies) {
+ layer_data *device_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
+ if (device_data->disabled.update_descriptor_sets) return false;
// First thing to do is perform map look-ups.
// NOTE : UpdateDescriptorSets is somewhat unique in that it's operating on a number of DescriptorSets
// so we can't just do a single map look-up up-front, but do them individually in functions below
@@ -6250,141 +6196,138 @@ bool CoreChecks::PreCallValidateUpdateDescriptorSets(VkDevice device, uint32_t d
// Now make call(s) that validate state, but don't perform state updates in this function
// Note, here DescriptorSets is unique in that we don't yet have an instance. Using a helper function in the
// namespace which will parse params and make calls into specific class instances
- return ValidateUpdateDescriptorSets(descriptorWriteCount, pDescriptorWrites, descriptorCopyCount, pDescriptorCopies,
- "vkUpdateDescriptorSets()");
+ return ValidateUpdateDescriptorSets(device_data->report_data, device_data, descriptorWriteCount, pDescriptorWrites,
+ descriptorCopyCount, pDescriptorCopies, "vkUpdateDescriptorSets()");
}
-void ValidationStateTracker::PreCallRecordUpdateDescriptorSets(VkDevice device, uint32_t descriptorWriteCount,
- const VkWriteDescriptorSet *pDescriptorWrites,
- uint32_t descriptorCopyCount,
- const VkCopyDescriptorSet *pDescriptorCopies) {
- cvdescriptorset::PerformUpdateDescriptorSets(this, descriptorWriteCount, pDescriptorWrites, descriptorCopyCount,
+void CoreChecks::PreCallRecordUpdateDescriptorSets(VkDevice device, uint32_t descriptorWriteCount,
+ const VkWriteDescriptorSet *pDescriptorWrites, uint32_t descriptorCopyCount,
+ const VkCopyDescriptorSet *pDescriptorCopies) {
+ layer_data *device_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
+ cvdescriptorset::PerformUpdateDescriptorSets(device_data, descriptorWriteCount, pDescriptorWrites, descriptorCopyCount,
pDescriptorCopies);
}
-void ValidationStateTracker::PostCallRecordAllocateCommandBuffers(VkDevice device, const VkCommandBufferAllocateInfo *pCreateInfo,
- VkCommandBuffer *pCommandBuffer, VkResult result) {
+void CoreChecks::PostCallRecordAllocateCommandBuffers(VkDevice device, const VkCommandBufferAllocateInfo *pCreateInfo,
+ VkCommandBuffer *pCommandBuffer, VkResult result) {
if (VK_SUCCESS != result) return;
- auto pPool = GetCommandPoolState(pCreateInfo->commandPool);
+ layer_data *device_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
+ auto pPool = GetCommandPoolNode(pCreateInfo->commandPool);
if (pPool) {
for (uint32_t i = 0; i < pCreateInfo->commandBufferCount; i++) {
// Add command buffer to its commandPool map
pPool->commandBuffers.insert(pCommandBuffer[i]);
- std::unique_ptr<CMD_BUFFER_STATE> pCB(new CMD_BUFFER_STATE{});
+ GLOBAL_CB_NODE *pCB = new GLOBAL_CB_NODE;
+ // Add command buffer to map
+ device_data->commandBufferMap[pCommandBuffer[i]] = pCB;
+ ResetCommandBufferState(device_data, pCommandBuffer[i]);
pCB->createInfo = *pCreateInfo;
pCB->device = device;
- // Add command buffer to map
- commandBufferMap[pCommandBuffer[i]] = std::move(pCB);
- ResetCommandBufferState(pCommandBuffer[i]);
}
}
}
// Add bindings between the given cmd buffer & framebuffer and the framebuffer's children
-void ValidationStateTracker::AddFramebufferBinding(CMD_BUFFER_STATE *cb_state, FRAMEBUFFER_STATE *fb_state) {
- AddCommandBufferBinding(&fb_state->cb_bindings, VulkanTypedHandle(fb_state->framebuffer, kVulkanObjectTypeFramebuffer),
+void CoreChecks::AddFramebufferBinding(layer_data *dev_data, GLOBAL_CB_NODE *cb_state, FRAMEBUFFER_STATE *fb_state) {
+ AddCommandBufferBinding(&fb_state->cb_bindings, {HandleToUint64(fb_state->framebuffer), kVulkanObjectTypeFramebuffer},
cb_state);
const uint32_t attachmentCount = fb_state->createInfo.attachmentCount;
for (uint32_t attachment = 0; attachment < attachmentCount; ++attachment) {
auto view_state = GetAttachmentImageViewState(fb_state, attachment);
if (view_state) {
- AddCommandBufferBindingImageView(cb_state, view_state);
+ AddCommandBufferBindingImageView(dev_data, cb_state, view_state);
}
}
}
bool CoreChecks::PreCallValidateBeginCommandBuffer(VkCommandBuffer commandBuffer, const VkCommandBufferBeginInfo *pBeginInfo) {
- const CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
+ layer_data *device_data = GetLayerDataPtr(get_dispatch_key(commandBuffer), layer_data_map);
+ GLOBAL_CB_NODE *cb_state = GetCBNode(commandBuffer);
if (!cb_state) return false;
bool skip = false;
if (cb_state->in_use.load()) {
- skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
+ skip |= log_msg(device_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
HandleToUint64(commandBuffer), "VUID-vkBeginCommandBuffer-commandBuffer-00049",
- "Calling vkBeginCommandBuffer() on active %s before it has completed. You must check "
+ "Calling vkBeginCommandBuffer() on active command buffer %s before it has completed. You must check "
"command buffer fence before this call.",
- report_data->FormatHandle(commandBuffer).c_str());
+ device_data->report_data->FormatHandle(commandBuffer).c_str());
}
if (cb_state->createInfo.level != VK_COMMAND_BUFFER_LEVEL_PRIMARY) {
// Secondary Command Buffer
const VkCommandBufferInheritanceInfo *pInfo = pBeginInfo->pInheritanceInfo;
if (!pInfo) {
- skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
+ skip |= log_msg(device_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
HandleToUint64(commandBuffer), "VUID-vkBeginCommandBuffer-commandBuffer-00051",
- "vkBeginCommandBuffer(): Secondary %s must have inheritance info.",
- report_data->FormatHandle(commandBuffer).c_str());
+ "vkBeginCommandBuffer(): Secondary Command Buffer (%s) must have inheritance info.",
+ device_data->report_data->FormatHandle(commandBuffer).c_str());
} else {
if (pBeginInfo->flags & VK_COMMAND_BUFFER_USAGE_RENDER_PASS_CONTINUE_BIT) {
assert(pInfo->renderPass);
- const auto *framebuffer = GetFramebufferState(pInfo->framebuffer);
+ auto framebuffer = GetFramebufferState(pInfo->framebuffer);
if (framebuffer) {
if (framebuffer->createInfo.renderPass != pInfo->renderPass) {
- const auto *render_pass = GetRenderPassState(pInfo->renderPass);
// renderPass that framebuffer was created with must be compatible with local renderPass
- skip |= ValidateRenderPassCompatibility("framebuffer", framebuffer->rp_state.get(), "command buffer",
- render_pass, "vkBeginCommandBuffer()",
- "VUID-VkCommandBufferBeginInfo-flags-00055");
+ skip |=
+ ValidateRenderPassCompatibility(device_data, "framebuffer", framebuffer->rp_state.get(),
+ "command buffer", GetRenderPassState(pInfo->renderPass),
+ "vkBeginCommandBuffer()", "VUID-VkCommandBufferBeginInfo-flags-00055");
}
}
}
- if ((pInfo->occlusionQueryEnable == VK_FALSE || enabled_features.core.occlusionQueryPrecise == VK_FALSE) &&
+ if ((pInfo->occlusionQueryEnable == VK_FALSE || device_data->enabled_features.core.occlusionQueryPrecise == VK_FALSE) &&
(pInfo->queryFlags & VK_QUERY_CONTROL_PRECISE_BIT)) {
- skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
- HandleToUint64(commandBuffer), "VUID-vkBeginCommandBuffer-commandBuffer-00052",
- "vkBeginCommandBuffer(): Secondary %s must not have VK_QUERY_CONTROL_PRECISE_BIT if "
- "occulusionQuery is disabled or the device does not support precise occlusion queries.",
- report_data->FormatHandle(commandBuffer).c_str());
+ skip |=
+ log_msg(device_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
+ HandleToUint64(commandBuffer), "VUID-vkBeginCommandBuffer-commandBuffer-00052",
+ "vkBeginCommandBuffer(): Secondary Command Buffer (%s) must not have VK_QUERY_CONTROL_PRECISE_BIT if "
+ "occulusionQuery is disabled or the device does not support precise occlusion queries.",
+ device_data->report_data->FormatHandle(commandBuffer).c_str());
}
}
if (pInfo && pInfo->renderPass != VK_NULL_HANDLE) {
- const auto *renderPass = GetRenderPassState(pInfo->renderPass);
+ auto renderPass = GetRenderPassState(pInfo->renderPass);
if (renderPass) {
if (pInfo->subpass >= renderPass->createInfo.subpassCount) {
- skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
- HandleToUint64(commandBuffer), "VUID-VkCommandBufferBeginInfo-flags-00054",
- "vkBeginCommandBuffer(): Secondary %s must have a subpass index (%d) that is "
+ skip |= log_msg(device_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, HandleToUint64(commandBuffer),
+ "VUID-VkCommandBufferBeginInfo-flags-00054",
+ "vkBeginCommandBuffer(): Secondary Command Buffers (%s) must have a subpass index (%d) that is "
"less than the number of subpasses (%d).",
- report_data->FormatHandle(commandBuffer).c_str(), pInfo->subpass,
+ device_data->report_data->FormatHandle(commandBuffer).c_str(), pInfo->subpass,
renderPass->createInfo.subpassCount);
}
}
}
}
if (CB_RECORDING == cb_state->state) {
- skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
+ skip |= log_msg(device_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
HandleToUint64(commandBuffer), "VUID-vkBeginCommandBuffer-commandBuffer-00049",
- "vkBeginCommandBuffer(): Cannot call Begin on %s in the RECORDING state. Must first call "
+ "vkBeginCommandBuffer(): Cannot call Begin on command buffer (%s) in the RECORDING state. Must first call "
"vkEndCommandBuffer().",
- report_data->FormatHandle(commandBuffer).c_str());
+ device_data->report_data->FormatHandle(commandBuffer).c_str());
} else if (CB_RECORDED == cb_state->state || CB_INVALID_COMPLETE == cb_state->state) {
VkCommandPool cmdPool = cb_state->createInfo.commandPool;
- const auto *pPool = GetCommandPoolState(cmdPool);
+ auto pPool = GetCommandPoolNode(cmdPool);
if (!(VK_COMMAND_POOL_CREATE_RESET_COMMAND_BUFFER_BIT & pPool->createFlags)) {
- skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
- HandleToUint64(commandBuffer), "VUID-vkBeginCommandBuffer-commandBuffer-00050",
- "Call to vkBeginCommandBuffer() on %s attempts to implicitly reset cmdBuffer created from "
- "%s that does NOT have the VK_COMMAND_POOL_CREATE_RESET_COMMAND_BUFFER_BIT bit set.",
- report_data->FormatHandle(commandBuffer).c_str(), report_data->FormatHandle(cmdPool).c_str());
- }
- }
- auto chained_device_group_struct = lvl_find_in_chain<VkDeviceGroupCommandBufferBeginInfo>(pBeginInfo->pNext);
- if (chained_device_group_struct) {
- skip |= ValidateDeviceMaskToPhysicalDeviceCount(
- chained_device_group_struct->deviceMask, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, HandleToUint64(commandBuffer),
- "VUID-VkDeviceGroupCommandBufferBeginInfo-deviceMask-00106");
- skip |=
- ValidateDeviceMaskToZero(chained_device_group_struct->deviceMask, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
- HandleToUint64(commandBuffer), "VUID-VkDeviceGroupCommandBufferBeginInfo-deviceMask-00107");
+ skip |=
+ log_msg(device_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
+ HandleToUint64(commandBuffer), "VUID-vkBeginCommandBuffer-commandBuffer-00050",
+ "Call to vkBeginCommandBuffer() on command buffer (%s) attempts to implicitly reset cmdBuffer created from "
+ "command pool (%s) that does NOT have the VK_COMMAND_POOL_CREATE_RESET_COMMAND_BUFFER_BIT bit set.",
+ device_data->report_data->FormatHandle(commandBuffer).c_str(),
+ device_data->report_data->FormatHandle(cmdPool).c_str());
+ }
}
return skip;
}
-void ValidationStateTracker::PreCallRecordBeginCommandBuffer(VkCommandBuffer commandBuffer,
- const VkCommandBufferBeginInfo *pBeginInfo) {
- CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
+void CoreChecks::PreCallRecordBeginCommandBuffer(VkCommandBuffer commandBuffer, const VkCommandBufferBeginInfo *pBeginInfo) {
+ layer_data *device_data = GetLayerDataPtr(get_dispatch_key(commandBuffer), layer_data_map);
+ GLOBAL_CB_NODE *cb_state = GetCBNode(commandBuffer);
if (!cb_state) return;
// This implicitly resets the Cmd Buffer so make sure any fence is done and then clear memory references
- ClearCmdBufAndMemReferences(cb_state);
+ ClearCmdBufAndMemReferences(device_data, cb_state);
if (cb_state->createInfo.level != VK_COMMAND_BUFFER_LEVEL_PRIMARY) {
// Secondary Command Buffer
const VkCommandBufferInheritanceInfo *pInfo = pBeginInfo->pInheritanceInfo;
@@ -6394,13 +6337,13 @@ void ValidationStateTracker::PreCallRecordBeginCommandBuffer(VkCommandBuffer com
auto framebuffer = GetFramebufferState(pInfo->framebuffer);
if (framebuffer) {
// Connect this framebuffer and its children to this cmdBuffer
- AddFramebufferBinding(cb_state, framebuffer);
+ AddFramebufferBinding(device_data, cb_state, framebuffer);
}
}
}
}
if (CB_RECORDED == cb_state->state || CB_INVALID_COMPLETE == cb_state->state) {
- ResetCommandBufferState(commandBuffer);
+ ResetCommandBufferState(device_data, commandBuffer);
}
// Set updated state here in case implicit reset occurs above
cb_state->state = CB_RECORDING;
@@ -6417,38 +6360,31 @@ void ValidationStateTracker::PreCallRecordBeginCommandBuffer(VkCommandBuffer com
cb_state->framebuffers.insert(cb_state->beginInfo.pInheritanceInfo->framebuffer);
}
}
-
- auto chained_device_group_struct = lvl_find_in_chain<VkDeviceGroupCommandBufferBeginInfo>(pBeginInfo->pNext);
- if (chained_device_group_struct) {
- cb_state->initial_device_mask = chained_device_group_struct->deviceMask;
- } else {
- cb_state->initial_device_mask = (1 << physical_device_count) - 1;
- }
}
bool CoreChecks::PreCallValidateEndCommandBuffer(VkCommandBuffer commandBuffer) {
- const CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
+ layer_data *device_data = GetLayerDataPtr(get_dispatch_key(commandBuffer), layer_data_map);
+ GLOBAL_CB_NODE *cb_state = GetCBNode(commandBuffer);
if (!cb_state) return false;
bool skip = false;
if ((VK_COMMAND_BUFFER_LEVEL_PRIMARY == cb_state->createInfo.level) ||
!(cb_state->beginInfo.flags & VK_COMMAND_BUFFER_USAGE_RENDER_PASS_CONTINUE_BIT)) {
// This needs spec clarification to update valid usage, see comments in PR:
// https://github.com/KhronosGroup/Vulkan-ValidationLayers/issues/165
- skip |= InsideRenderPass(cb_state, "vkEndCommandBuffer()", "VUID-vkEndCommandBuffer-commandBuffer-00060");
+ skip |= InsideRenderPass(device_data, cb_state, "vkEndCommandBuffer()", "VUID-vkEndCommandBuffer-commandBuffer-00060");
}
-
- skip |= ValidateCmd(cb_state, CMD_ENDCOMMANDBUFFER, "vkEndCommandBuffer()");
+ skip |= ValidateCmd(device_data, cb_state, CMD_ENDCOMMANDBUFFER, "vkEndCommandBuffer()");
for (auto query : cb_state->activeQueries) {
- skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
+ skip |= log_msg(device_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
HandleToUint64(commandBuffer), "VUID-vkEndCommandBuffer-commandBuffer-00061",
- "Ending command buffer with in progress query: %s, query %d.",
- report_data->FormatHandle(query.pool).c_str(), query.query);
+ "Ending command buffer with in progress query: queryPool %s, index %d.",
+ device_data->report_data->FormatHandle(query.pool).c_str(), query.index);
}
return skip;
}
-void ValidationStateTracker::PostCallRecordEndCommandBuffer(VkCommandBuffer commandBuffer, VkResult result) {
- CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
+void CoreChecks::PostCallRecordEndCommandBuffer(VkCommandBuffer commandBuffer, VkResult result) {
+ GLOBAL_CB_NODE *cb_state = GetCBNode(commandBuffer);
if (!cb_state) return;
// Cached validation is specific to a specific recording of a specific command buffer.
for (auto descriptor_set : cb_state->validated_descriptor_sets) {
@@ -6461,89 +6397,50 @@ void ValidationStateTracker::PostCallRecordEndCommandBuffer(VkCommandBuffer comm
}
bool CoreChecks::PreCallValidateResetCommandBuffer(VkCommandBuffer commandBuffer, VkCommandBufferResetFlags flags) {
+ layer_data *device_data = GetLayerDataPtr(get_dispatch_key(commandBuffer), layer_data_map);
bool skip = false;
- const CMD_BUFFER_STATE *pCB = GetCBState(commandBuffer);
+ GLOBAL_CB_NODE *pCB = GetCBNode(commandBuffer);
if (!pCB) return false;
VkCommandPool cmdPool = pCB->createInfo.commandPool;
- const auto *pPool = GetCommandPoolState(cmdPool);
+ auto pPool = GetCommandPoolNode(cmdPool);
if (!(VK_COMMAND_POOL_CREATE_RESET_COMMAND_BUFFER_BIT & pPool->createFlags)) {
- skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
+ skip |= log_msg(device_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
HandleToUint64(commandBuffer), "VUID-vkResetCommandBuffer-commandBuffer-00046",
- "Attempt to reset %s created from %s that does NOT have the "
+ "Attempt to reset command buffer (%s) created from command pool (%s) that does NOT have the "
"VK_COMMAND_POOL_CREATE_RESET_COMMAND_BUFFER_BIT bit set.",
- report_data->FormatHandle(commandBuffer).c_str(), report_data->FormatHandle(cmdPool).c_str());
+ device_data->report_data->FormatHandle(commandBuffer).c_str(),
+ device_data->report_data->FormatHandle(cmdPool).c_str());
}
- skip |= CheckCommandBufferInFlight(pCB, "reset", "VUID-vkResetCommandBuffer-commandBuffer-00045");
+ skip |= CheckCommandBufferInFlight(device_data, pCB, "reset", "VUID-vkResetCommandBuffer-commandBuffer-00045");
return skip;
}
-void ValidationStateTracker::PostCallRecordResetCommandBuffer(VkCommandBuffer commandBuffer, VkCommandBufferResetFlags flags,
- VkResult result) {
+void CoreChecks::PostCallRecordResetCommandBuffer(VkCommandBuffer commandBuffer, VkCommandBufferResetFlags flags, VkResult result) {
if (VK_SUCCESS == result) {
- ResetCommandBufferState(commandBuffer);
- }
-}
-
-static const char *GetPipelineTypeName(VkPipelineBindPoint pipelineBindPoint) {
- switch (pipelineBindPoint) {
- case VK_PIPELINE_BIND_POINT_GRAPHICS:
- return "graphics";
- case VK_PIPELINE_BIND_POINT_COMPUTE:
- return "compute";
- case VK_PIPELINE_BIND_POINT_RAY_TRACING_NV:
- return "ray-tracing";
- default:
- return "unknown";
+ layer_data *device_data = GetLayerDataPtr(get_dispatch_key(commandBuffer), layer_data_map);
+ ResetCommandBufferState(device_data, commandBuffer);
}
}
bool CoreChecks::PreCallValidateCmdBindPipeline(VkCommandBuffer commandBuffer, VkPipelineBindPoint pipelineBindPoint,
VkPipeline pipeline) {
- const CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
+ layer_data *device_data = GetLayerDataPtr(get_dispatch_key(commandBuffer), layer_data_map);
+ GLOBAL_CB_NODE *cb_state = GetCBNode(commandBuffer);
assert(cb_state);
- bool skip = ValidateCmdQueueFlags(cb_state, "vkCmdBindPipeline()", VK_QUEUE_GRAPHICS_BIT | VK_QUEUE_COMPUTE_BIT,
+ bool skip = ValidateCmdQueueFlags(device_data, cb_state, "vkCmdBindPipeline()", VK_QUEUE_GRAPHICS_BIT | VK_QUEUE_COMPUTE_BIT,
"VUID-vkCmdBindPipeline-commandBuffer-cmdpool");
- skip |= ValidateCmd(cb_state, CMD_BINDPIPELINE, "vkCmdBindPipeline()");
- static const std::map<VkPipelineBindPoint, std::string> bindpoint_errors = {
- std::make_pair(VK_PIPELINE_BIND_POINT_GRAPHICS, "VUID-vkCmdBindPipeline-pipelineBindPoint-00777"),
- std::make_pair(VK_PIPELINE_BIND_POINT_COMPUTE, "VUID-vkCmdBindPipeline-pipelineBindPoint-00778"),
- std::make_pair(VK_PIPELINE_BIND_POINT_RAY_TRACING_NV, "VUID-vkCmdBindPipeline-pipelineBindPoint-02391")};
-
- skip |= ValidatePipelineBindPoint(cb_state, pipelineBindPoint, "vkCmdBindPipeline()", bindpoint_errors);
-
- const auto *pipeline_state = GetPipelineState(pipeline);
- assert(pipeline_state);
-
- const auto &pipeline_state_bind_point = pipeline_state->getPipelineType();
-
- if (pipelineBindPoint != pipeline_state_bind_point) {
- if (pipelineBindPoint == VK_PIPELINE_BIND_POINT_GRAPHICS) {
- skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
- HandleToUint64(cb_state->commandBuffer), "VUID-vkCmdBindPipeline-pipelineBindPoint-00779",
- "Cannot bind a pipeline of type %s to the graphics pipeline bind point",
- GetPipelineTypeName(pipeline_state_bind_point));
- } else if (pipelineBindPoint == VK_PIPELINE_BIND_POINT_COMPUTE) {
- skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
- HandleToUint64(cb_state->commandBuffer), "VUID-vkCmdBindPipeline-pipelineBindPoint-00780",
- "Cannot bind a pipeline of type %s to the compute pipeline bind point",
- GetPipelineTypeName(pipeline_state_bind_point));
- } else if (pipelineBindPoint == VK_PIPELINE_BIND_POINT_RAY_TRACING_NV) {
- skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
- HandleToUint64(cb_state->commandBuffer), "VUID-vkCmdBindPipeline-pipelineBindPoint-02392",
- "Cannot bind a pipeline of type %s to the ray-tracing pipeline bind point",
- GetPipelineTypeName(pipeline_state_bind_point));
- }
- }
-
+ skip |= ValidateCmd(device_data, cb_state, CMD_BINDPIPELINE, "vkCmdBindPipeline()");
+ // TODO: "VUID-vkCmdBindPipeline-pipelineBindPoint-00777" "VUID-vkCmdBindPipeline-pipelineBindPoint-00779" -- using
+ // ValidatePipelineBindPoint
return skip;
}
-void ValidationStateTracker::PreCallRecordCmdBindPipeline(VkCommandBuffer commandBuffer, VkPipelineBindPoint pipelineBindPoint,
- VkPipeline pipeline) {
- CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
+void CoreChecks::PreCallRecordCmdBindPipeline(VkCommandBuffer commandBuffer, VkPipelineBindPoint pipelineBindPoint,
+ VkPipeline pipeline) {
+ GLOBAL_CB_NODE *cb_state = GetCBNode(commandBuffer);
assert(cb_state);
auto pipe_state = GetPipelineState(pipeline);
@@ -6554,68 +6451,71 @@ void ValidationStateTracker::PreCallRecordCmdBindPipeline(VkCommandBuffer comman
}
cb_state->lastBound[pipelineBindPoint].pipeline_state = pipe_state;
SetPipelineState(pipe_state);
- AddCommandBufferBinding(&pipe_state->cb_bindings, VulkanTypedHandle(pipeline, kVulkanObjectTypePipeline), cb_state);
+ AddCommandBufferBinding(&pipe_state->cb_bindings, {HandleToUint64(pipeline), kVulkanObjectTypePipeline}, cb_state);
}
bool CoreChecks::PreCallValidateCmdSetViewport(VkCommandBuffer commandBuffer, uint32_t firstViewport, uint32_t viewportCount,
const VkViewport *pViewports) {
- const CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
+ layer_data *device_data = GetLayerDataPtr(get_dispatch_key(commandBuffer), layer_data_map);
+ GLOBAL_CB_NODE *cb_state = GetCBNode(commandBuffer);
assert(cb_state);
- bool skip =
- ValidateCmdQueueFlags(cb_state, "vkCmdSetViewport()", VK_QUEUE_GRAPHICS_BIT, "VUID-vkCmdSetViewport-commandBuffer-cmdpool");
- skip |= ValidateCmd(cb_state, CMD_SETVIEWPORT, "vkCmdSetViewport()");
+ bool skip = ValidateCmdQueueFlags(device_data, cb_state, "vkCmdSetViewport()", VK_QUEUE_GRAPHICS_BIT,
+ "VUID-vkCmdSetViewport-commandBuffer-cmdpool");
+ skip |= ValidateCmd(device_data, cb_state, CMD_SETVIEWPORT, "vkCmdSetViewport()");
if (cb_state->static_status & CBSTATUS_VIEWPORT_SET) {
- skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
+ skip |= log_msg(device_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
HandleToUint64(commandBuffer), "VUID-vkCmdSetViewport-None-01221",
"vkCmdSetViewport(): pipeline was created without VK_DYNAMIC_STATE_VIEWPORT flag.");
}
return skip;
}
-void ValidationStateTracker::PreCallRecordCmdSetViewport(VkCommandBuffer commandBuffer, uint32_t firstViewport,
- uint32_t viewportCount, const VkViewport *pViewports) {
- CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
+void CoreChecks::PreCallRecordCmdSetViewport(VkCommandBuffer commandBuffer, uint32_t firstViewport, uint32_t viewportCount,
+ const VkViewport *pViewports) {
+ GLOBAL_CB_NODE *cb_state = GetCBNode(commandBuffer);
cb_state->viewportMask |= ((1u << viewportCount) - 1u) << firstViewport;
cb_state->status |= CBSTATUS_VIEWPORT_SET;
}
bool CoreChecks::PreCallValidateCmdSetScissor(VkCommandBuffer commandBuffer, uint32_t firstScissor, uint32_t scissorCount,
const VkRect2D *pScissors) {
- const CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
+ layer_data *device_data = GetLayerDataPtr(get_dispatch_key(commandBuffer), layer_data_map);
+ GLOBAL_CB_NODE *cb_state = GetCBNode(commandBuffer);
assert(cb_state);
- bool skip =
- ValidateCmdQueueFlags(cb_state, "vkCmdSetScissor()", VK_QUEUE_GRAPHICS_BIT, "VUID-vkCmdSetScissor-commandBuffer-cmdpool");
- skip |= ValidateCmd(cb_state, CMD_SETSCISSOR, "vkCmdSetScissor()");
+ bool skip = ValidateCmdQueueFlags(device_data, cb_state, "vkCmdSetScissor()", VK_QUEUE_GRAPHICS_BIT,
+ "VUID-vkCmdSetScissor-commandBuffer-cmdpool");
+ skip |= ValidateCmd(device_data, cb_state, CMD_SETSCISSOR, "vkCmdSetScissor()");
if (cb_state->static_status & CBSTATUS_SCISSOR_SET) {
- skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
+ skip |= log_msg(device_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
HandleToUint64(commandBuffer), "VUID-vkCmdSetScissor-None-00590",
"vkCmdSetScissor(): pipeline was created without VK_DYNAMIC_STATE_SCISSOR flag..");
}
return skip;
}
-void ValidationStateTracker ::PreCallRecordCmdSetScissor(VkCommandBuffer commandBuffer, uint32_t firstScissor,
- uint32_t scissorCount, const VkRect2D *pScissors) {
- CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
+void CoreChecks::PreCallRecordCmdSetScissor(VkCommandBuffer commandBuffer, uint32_t firstScissor, uint32_t scissorCount,
+ const VkRect2D *pScissors) {
+ GLOBAL_CB_NODE *cb_state = GetCBNode(commandBuffer);
cb_state->scissorMask |= ((1u << scissorCount) - 1u) << firstScissor;
cb_state->status |= CBSTATUS_SCISSOR_SET;
}
bool CoreChecks::PreCallValidateCmdSetExclusiveScissorNV(VkCommandBuffer commandBuffer, uint32_t firstExclusiveScissor,
uint32_t exclusiveScissorCount, const VkRect2D *pExclusiveScissors) {
- const CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
+ layer_data *device_data = GetLayerDataPtr(get_dispatch_key(commandBuffer), layer_data_map);
+ GLOBAL_CB_NODE *cb_state = GetCBNode(commandBuffer);
assert(cb_state);
- bool skip = ValidateCmdQueueFlags(cb_state, "vkCmdSetExclusiveScissorNV()", VK_QUEUE_GRAPHICS_BIT,
+ bool skip = ValidateCmdQueueFlags(device_data, cb_state, "vkCmdSetExclusiveScissorNV()", VK_QUEUE_GRAPHICS_BIT,
"VUID-vkCmdSetExclusiveScissorNV-commandBuffer-cmdpool");
- skip |= ValidateCmd(cb_state, CMD_SETEXCLUSIVESCISSORNV, "vkCmdSetExclusiveScissorNV()");
+ skip |= ValidateCmd(device_data, cb_state, CMD_SETEXCLUSIVESCISSOR, "vkCmdSetExclusiveScissorNV()");
if (cb_state->static_status & CBSTATUS_EXCLUSIVE_SCISSOR_SET) {
- skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
+ skip |= log_msg(device_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
HandleToUint64(commandBuffer), "VUID-vkCmdSetExclusiveScissorNV-None-02032",
"vkCmdSetExclusiveScissorNV(): pipeline was created without VK_DYNAMIC_STATE_EXCLUSIVE_SCISSOR_NV flag.");
}
- if (!enabled_features.exclusive_scissor.exclusiveScissor) {
- skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
+ if (!GetEnabledFeatures()->exclusive_scissor.exclusiveScissor) {
+ skip |= log_msg(device_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
HandleToUint64(commandBuffer), "VUID-vkCmdSetExclusiveScissorNV-None-02031",
"vkCmdSetExclusiveScissorNV: The exclusiveScissor feature is disabled.");
}
@@ -6623,10 +6523,9 @@ bool CoreChecks::PreCallValidateCmdSetExclusiveScissorNV(VkCommandBuffer command
return skip;
}
-void ValidationStateTracker::PreCallRecordCmdSetExclusiveScissorNV(VkCommandBuffer commandBuffer, uint32_t firstExclusiveScissor,
- uint32_t exclusiveScissorCount,
- const VkRect2D *pExclusiveScissors) {
- CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
+void CoreChecks::PreCallRecordCmdSetExclusiveScissorNV(VkCommandBuffer commandBuffer, uint32_t firstExclusiveScissor,
+ uint32_t exclusiveScissorCount, const VkRect2D *pExclusiveScissors) {
+ GLOBAL_CB_NODE *cb_state = GetCBNode(commandBuffer);
// TODO: We don't have VUIDs for validating that all exclusive scissors have been set.
// cb_state->exclusiveScissorMask |= ((1u << exclusiveScissorCount) - 1u) << firstExclusiveScissor;
cb_state->status |= CBSTATUS_EXCLUSIVE_SCISSOR_SET;
@@ -6634,25 +6533,26 @@ void ValidationStateTracker::PreCallRecordCmdSetExclusiveScissorNV(VkCommandBuff
bool CoreChecks::PreCallValidateCmdBindShadingRateImageNV(VkCommandBuffer commandBuffer, VkImageView imageView,
VkImageLayout imageLayout) {
- const CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
+ layer_data *device_data = GetLayerDataPtr(get_dispatch_key(commandBuffer), layer_data_map);
+ GLOBAL_CB_NODE *cb_state = GetCBNode(commandBuffer);
assert(cb_state);
- bool skip = ValidateCmdQueueFlags(cb_state, "vkCmdBindShadingRateImageNV()", VK_QUEUE_GRAPHICS_BIT,
+ bool skip = ValidateCmdQueueFlags(device_data, cb_state, "vkCmdBindShadingRateImageNV()", VK_QUEUE_GRAPHICS_BIT,
"VUID-vkCmdBindShadingRateImageNV-commandBuffer-cmdpool");
- skip |= ValidateCmd(cb_state, CMD_BINDSHADINGRATEIMAGENV, "vkCmdBindShadingRateImageNV()");
+ skip |= ValidateCmd(device_data, cb_state, CMD_BINDSHADINGRATEIMAGE, "vkCmdBindShadingRateImageNV()");
- if (!enabled_features.shading_rate_image.shadingRateImage) {
- skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
+ if (!GetEnabledFeatures()->shading_rate_image.shadingRateImage) {
+ skip |= log_msg(device_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
HandleToUint64(commandBuffer), "VUID-vkCmdBindShadingRateImageNV-None-02058",
"vkCmdBindShadingRateImageNV: The shadingRateImage feature is disabled.");
}
if (imageView != VK_NULL_HANDLE) {
- const auto view_state = GetImageViewState(imageView);
+ auto view_state = GetImageViewState(imageView);
auto &ivci = view_state->create_info;
if (!view_state || (ivci.viewType != VK_IMAGE_VIEW_TYPE_2D && ivci.viewType != VK_IMAGE_VIEW_TYPE_2D_ARRAY)) {
- skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_VIEW_EXT,
+ skip |= log_msg(device_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_VIEW_EXT,
HandleToUint64(imageView), "VUID-vkCmdBindShadingRateImageNV-imageView-02059",
"vkCmdBindShadingRateImageNV: If imageView is not VK_NULL_HANDLE, it must be a valid "
"VkImageView handle of type VK_IMAGE_VIEW_TYPE_2D or VK_IMAGE_VIEW_TYPE_2D_ARRAY.");
@@ -6660,21 +6560,21 @@ bool CoreChecks::PreCallValidateCmdBindShadingRateImageNV(VkCommandBuffer comman
if (view_state && ivci.format != VK_FORMAT_R8_UINT) {
skip |= log_msg(
- report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_VIEW_EXT, HandleToUint64(imageView),
- "VUID-vkCmdBindShadingRateImageNV-imageView-02060",
+ device_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_VIEW_EXT,
+ HandleToUint64(imageView), "VUID-vkCmdBindShadingRateImageNV-imageView-02060",
"vkCmdBindShadingRateImageNV: If imageView is not VK_NULL_HANDLE, it must have a format of VK_FORMAT_R8_UINT.");
}
const VkImageCreateInfo *ici = view_state ? &GetImageState(view_state->create_info.image)->createInfo : nullptr;
if (ici && !(ici->usage & VK_IMAGE_USAGE_SHADING_RATE_IMAGE_BIT_NV)) {
- skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_VIEW_EXT,
+ skip |= log_msg(device_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_VIEW_EXT,
HandleToUint64(imageView), "VUID-vkCmdBindShadingRateImageNV-imageView-02061",
"vkCmdBindShadingRateImageNV: If imageView is not VK_NULL_HANDLE, the image must have been "
"created with VK_IMAGE_USAGE_SHADING_RATE_IMAGE_BIT_NV set.");
}
if (view_state) {
- const auto image_state = GetImageState(view_state->create_info.image);
+ auto image_state = GetImageState(view_state->create_info.image);
bool hit_error = false;
// XXX TODO: While the VUID says "each subresource", only the base mip level is
@@ -6684,8 +6584,9 @@ bool CoreChecks::PreCallValidateCmdBindShadingRateImageNV(VkCommandBuffer comman
VkImageSubresourceLayers subresource = {range.aspectMask, range.baseMipLevel, range.baseArrayLayer, range.layerCount};
if (image_state) {
- skip |= VerifyImageLayout(cb_state, image_state, subresource, imageLayout, VK_IMAGE_LAYOUT_SHADING_RATE_OPTIMAL_NV,
- "vkCmdCopyImage()", "VUID-vkCmdBindShadingRateImageNV-imageLayout-02063",
+ skip |= VerifyImageLayout(device_data, cb_state, image_state, subresource, imageLayout,
+ VK_IMAGE_LAYOUT_SHADING_RATE_OPTIMAL_NV, "vkCmdCopyImage()",
+ "VUID-vkCmdBindShadingRateImageNV-imageLayout-02063",
"VUID-vkCmdBindShadingRateImageNV-imageView-02062", &hit_error);
}
}
@@ -6694,34 +6595,36 @@ bool CoreChecks::PreCallValidateCmdBindShadingRateImageNV(VkCommandBuffer comman
return skip;
}
-void ValidationStateTracker::PreCallRecordCmdBindShadingRateImageNV(VkCommandBuffer commandBuffer, VkImageView imageView,
- VkImageLayout imageLayout) {
- CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
+void CoreChecks::PreCallRecordCmdBindShadingRateImageNV(VkCommandBuffer commandBuffer, VkImageView imageView,
+ VkImageLayout imageLayout) {
+ layer_data *device_data = GetLayerDataPtr(get_dispatch_key(commandBuffer), layer_data_map);
+ GLOBAL_CB_NODE *cb_state = GetCBNode(commandBuffer);
if (imageView != VK_NULL_HANDLE) {
auto view_state = GetImageViewState(imageView);
- AddCommandBufferBindingImageView(cb_state, view_state);
+ AddCommandBufferBindingImageView(device_data, cb_state, view_state);
}
}
bool CoreChecks::PreCallValidateCmdSetViewportShadingRatePaletteNV(VkCommandBuffer commandBuffer, uint32_t firstViewport,
uint32_t viewportCount,
const VkShadingRatePaletteNV *pShadingRatePalettes) {
- const CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
+ layer_data *device_data = GetLayerDataPtr(get_dispatch_key(commandBuffer), layer_data_map);
+ GLOBAL_CB_NODE *cb_state = GetCBNode(commandBuffer);
assert(cb_state);
- bool skip = ValidateCmdQueueFlags(cb_state, "vkCmdSetViewportShadingRatePaletteNV()", VK_QUEUE_GRAPHICS_BIT,
+ bool skip = ValidateCmdQueueFlags(device_data, cb_state, "vkCmdSetViewportShadingRatePaletteNV()", VK_QUEUE_GRAPHICS_BIT,
"VUID-vkCmdSetViewportShadingRatePaletteNV-commandBuffer-cmdpool");
- skip |= ValidateCmd(cb_state, CMD_SETVIEWPORTSHADINGRATEPALETTENV, "vkCmdSetViewportShadingRatePaletteNV()");
+ skip |= ValidateCmd(device_data, cb_state, CMD_SETVIEWPORTSHADINGRATEPALETTE, "vkCmdSetViewportShadingRatePaletteNV()");
- if (!enabled_features.shading_rate_image.shadingRateImage) {
- skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
+ if (!GetEnabledFeatures()->shading_rate_image.shadingRateImage) {
+ skip |= log_msg(device_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
HandleToUint64(commandBuffer), "VUID-vkCmdSetViewportShadingRatePaletteNV-None-02064",
"vkCmdSetViewportShadingRatePaletteNV: The shadingRateImage feature is disabled.");
}
if (cb_state->static_status & CBSTATUS_SHADING_RATE_PALETTE_SET) {
- skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
+ skip |= log_msg(device_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
HandleToUint64(commandBuffer), "VUID-vkCmdSetViewportShadingRatePaletteNV-None-02065",
"vkCmdSetViewportShadingRatePaletteNV(): pipeline was created without "
"VK_DYNAMIC_STATE_VIEWPORT_SHADING_RATE_PALETTE_NV flag.");
@@ -6730,9 +6633,10 @@ bool CoreChecks::PreCallValidateCmdSetViewportShadingRatePaletteNV(VkCommandBuff
for (uint32_t i = 0; i < viewportCount; ++i) {
auto *palette = &pShadingRatePalettes[i];
if (palette->shadingRatePaletteEntryCount == 0 ||
- palette->shadingRatePaletteEntryCount > phys_dev_ext_props.shading_rate_image_props.shadingRatePaletteSize) {
+ palette->shadingRatePaletteEntryCount >
+ device_data->phys_dev_ext_props.shading_rate_image_props.shadingRatePaletteSize) {
skip |= log_msg(
- report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
+ device_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
HandleToUint64(commandBuffer), "VUID-VkShadingRatePaletteNV-shadingRatePaletteEntryCount-02071",
"vkCmdSetViewportShadingRatePaletteNV: shadingRatePaletteEntryCount must be between 1 and shadingRatePaletteSize.");
}
@@ -6741,554 +6645,51 @@ bool CoreChecks::PreCallValidateCmdSetViewportShadingRatePaletteNV(VkCommandBuff
return skip;
}
-void ValidationStateTracker::PreCallRecordCmdSetViewportShadingRatePaletteNV(VkCommandBuffer commandBuffer, uint32_t firstViewport,
- uint32_t viewportCount,
- const VkShadingRatePaletteNV *pShadingRatePalettes) {
- CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
+void CoreChecks::PreCallRecordCmdSetViewportShadingRatePaletteNV(VkCommandBuffer commandBuffer, uint32_t firstViewport,
+ uint32_t viewportCount,
+ const VkShadingRatePaletteNV *pShadingRatePalettes) {
+ GLOBAL_CB_NODE *cb_state = GetCBNode(commandBuffer);
// TODO: We don't have VUIDs for validating that all shading rate palettes have been set.
// cb_state->shadingRatePaletteMask |= ((1u << viewportCount) - 1u) << firstViewport;
cb_state->status |= CBSTATUS_SHADING_RATE_PALETTE_SET;
}
-bool CoreChecks::ValidateGeometryTrianglesNV(const VkGeometryTrianglesNV &triangles, VkDebugReportObjectTypeEXT object_type,
- uint64_t object_handle, const char *func_name) const {
- bool skip = false;
-
- const BUFFER_STATE *vb_state = GetBufferState(triangles.vertexData);
- if (vb_state != nullptr && vb_state->binding.size <= triangles.vertexOffset) {
- skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, object_type, object_handle,
- "VUID-VkGeometryTrianglesNV-vertexOffset-02428", "%s", func_name);
- }
-
- const BUFFER_STATE *ib_state = GetBufferState(triangles.indexData);
- if (ib_state != nullptr && ib_state->binding.size <= triangles.indexOffset) {
- skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, object_type, object_handle,
- "VUID-VkGeometryTrianglesNV-indexOffset-02431", "%s", func_name);
- }
-
- const BUFFER_STATE *td_state = GetBufferState(triangles.transformData);
- if (td_state != nullptr && td_state->binding.size <= triangles.transformOffset) {
- skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, object_type, object_handle,
- "VUID-VkGeometryTrianglesNV-transformOffset-02437", "%s", func_name);
- }
-
- return skip;
-}
-
-bool CoreChecks::ValidateGeometryAABBNV(const VkGeometryAABBNV &aabbs, VkDebugReportObjectTypeEXT object_type,
- uint64_t object_handle, const char *func_name) const {
- bool skip = false;
-
- const BUFFER_STATE *aabb_state = GetBufferState(aabbs.aabbData);
- if (aabb_state != nullptr && aabb_state->binding.size > 0 && aabb_state->binding.size <= aabbs.offset) {
- skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, object_type, object_handle,
- "VUID-VkGeometryAABBNV-offset-02439", "%s", func_name);
- }
-
- return skip;
-}
-
-bool CoreChecks::ValidateGeometryNV(const VkGeometryNV &geometry, VkDebugReportObjectTypeEXT object_type, uint64_t object_handle,
- const char *func_name) const {
- bool skip = false;
- if (geometry.geometryType == VK_GEOMETRY_TYPE_TRIANGLES_NV) {
- skip = ValidateGeometryTrianglesNV(geometry.geometry.triangles, object_type, object_handle, func_name);
- } else if (geometry.geometryType == VK_GEOMETRY_TYPE_AABBS_NV) {
- skip = ValidateGeometryAABBNV(geometry.geometry.aabbs, object_type, object_handle, func_name);
- }
- return skip;
-}
-
-bool CoreChecks::PreCallValidateCreateAccelerationStructureNV(VkDevice device,
- const VkAccelerationStructureCreateInfoNV *pCreateInfo,
- const VkAllocationCallbacks *pAllocator,
- VkAccelerationStructureNV *pAccelerationStructure) {
- bool skip = false;
- if (pCreateInfo != nullptr && pCreateInfo->info.type == VK_ACCELERATION_STRUCTURE_TYPE_BOTTOM_LEVEL_NV) {
- for (uint32_t i = 0; i < pCreateInfo->info.geometryCount; i++) {
- skip |= ValidateGeometryNV(pCreateInfo->info.pGeometries[i], VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT,
- HandleToUint64(device), "vkCreateAccelerationStructureNV():");
- }
- }
- return skip;
-}
-
-void ValidationStateTracker::PostCallRecordCreateAccelerationStructureNV(VkDevice device,
- const VkAccelerationStructureCreateInfoNV *pCreateInfo,
- const VkAllocationCallbacks *pAllocator,
- VkAccelerationStructureNV *pAccelerationStructure,
- VkResult result) {
- if (VK_SUCCESS != result) return;
- std::unique_ptr<ACCELERATION_STRUCTURE_STATE> as_state(new ACCELERATION_STRUCTURE_STATE(*pAccelerationStructure, pCreateInfo));
-
- // Query the requirements in case the application doesn't (to avoid bind/validation time query)
- VkAccelerationStructureMemoryRequirementsInfoNV as_memory_requirements_info = {};
- as_memory_requirements_info.sType = VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_INFO_NV;
- as_memory_requirements_info.type = VK_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_TYPE_OBJECT_NV;
- as_memory_requirements_info.accelerationStructure = as_state->acceleration_structure;
- DispatchGetAccelerationStructureMemoryRequirementsNV(device, &as_memory_requirements_info, &as_state->memory_requirements);
-
- VkAccelerationStructureMemoryRequirementsInfoNV scratch_memory_req_info = {};
- scratch_memory_req_info.sType = VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_INFO_NV;
- scratch_memory_req_info.type = VK_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_TYPE_BUILD_SCRATCH_NV;
- scratch_memory_req_info.accelerationStructure = as_state->acceleration_structure;
- DispatchGetAccelerationStructureMemoryRequirementsNV(device, &scratch_memory_req_info,
- &as_state->build_scratch_memory_requirements);
-
- VkAccelerationStructureMemoryRequirementsInfoNV update_memory_req_info = {};
- update_memory_req_info.sType = VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_INFO_NV;
- update_memory_req_info.type = VK_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_TYPE_UPDATE_SCRATCH_NV;
- update_memory_req_info.accelerationStructure = as_state->acceleration_structure;
- DispatchGetAccelerationStructureMemoryRequirementsNV(device, &update_memory_req_info,
- &as_state->update_scratch_memory_requirements);
-
- accelerationStructureMap[*pAccelerationStructure] = std::move(as_state);
-}
-
-void ValidationStateTracker::PostCallRecordGetAccelerationStructureMemoryRequirementsNV(
- VkDevice device, const VkAccelerationStructureMemoryRequirementsInfoNV *pInfo, VkMemoryRequirements2KHR *pMemoryRequirements) {
- ACCELERATION_STRUCTURE_STATE *as_state = GetAccelerationStructureState(pInfo->accelerationStructure);
- if (as_state != nullptr) {
- if (pInfo->type == VK_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_TYPE_OBJECT_NV) {
- as_state->memory_requirements = *pMemoryRequirements;
- as_state->memory_requirements_checked = true;
- } else if (pInfo->type == VK_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_TYPE_BUILD_SCRATCH_NV) {
- as_state->build_scratch_memory_requirements = *pMemoryRequirements;
- as_state->build_scratch_memory_requirements_checked = true;
- } else if (pInfo->type == VK_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_TYPE_UPDATE_SCRATCH_NV) {
- as_state->update_scratch_memory_requirements = *pMemoryRequirements;
- as_state->update_scratch_memory_requirements_checked = true;
- }
- }
-}
-bool CoreChecks::ValidateBindAccelerationStructureMemoryNV(VkDevice device,
- const VkBindAccelerationStructureMemoryInfoNV &info) const {
- bool skip = false;
-
- const ACCELERATION_STRUCTURE_STATE *as_state = GetAccelerationStructureState(info.accelerationStructure);
- if (!as_state) {
- return skip;
- }
- uint64_t as_handle = HandleToUint64(info.accelerationStructure);
- if (!as_state->GetBoundMemory().empty()) {
- skip |=
- log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_ACCELERATION_STRUCTURE_NV_EXT,
- as_handle, "VUID-VkBindAccelerationStructureMemoryInfoNV-accelerationStructure-02450",
- "vkBindAccelerationStructureMemoryNV(): accelerationStructure must not already be backed by a memory object.");
- }
-
- if (!as_state->memory_requirements_checked) {
- // There's not an explicit requirement in the spec to call vkGetImageMemoryRequirements() prior to calling
- // BindAccelerationStructureMemoryNV but it's implied in that memory being bound must conform with
- // VkAccelerationStructureMemoryRequirementsInfoNV from vkGetAccelerationStructureMemoryRequirementsNV
- skip |= log_msg(
- report_data, VK_DEBUG_REPORT_WARNING_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT, as_handle,
- kVUID_Core_BindAccelNV_NoMemReqQuery,
- "vkBindAccelerationStructureMemoryNV(): "
- "Binding memory to %s but vkGetAccelerationStructureMemoryRequirementsNV() has not been called on that structure.",
- report_data->FormatHandle(info.accelerationStructure).c_str());
- // Use requirements gathered at create time for validation below...
- }
-
- // Validate bound memory range information
- const auto mem_info = GetDevMemState(info.memory);
- if (mem_info) {
- skip |= ValidateInsertAccelerationStructureMemoryRange(info.accelerationStructure, mem_info, info.memoryOffset,
- as_state->memory_requirements.memoryRequirements,
- "vkBindAccelerationStructureMemoryNV()");
- skip |= ValidateMemoryTypes(mem_info, as_state->memory_requirements.memoryRequirements.memoryTypeBits,
- "vkBindAccelerationStructureMemoryNV()",
- "VUID-VkBindAccelerationStructureMemoryInfoNV-memory-02593");
- }
-
- // Validate memory requirements alignment
- if (SafeModulo(info.memoryOffset, as_state->memory_requirements.memoryRequirements.alignment) != 0) {
- skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_ACCELERATION_STRUCTURE_NV_EXT,
- as_handle, "VUID-VkBindAccelerationStructureMemoryInfoNV-memoryOffset-02594",
- "vkBindAccelerationStructureMemoryNV(): memoryOffset is 0x%" PRIxLEAST64
- " but must be an integer multiple of the VkMemoryRequirements::alignment value 0x%" PRIxLEAST64
- ", returned from a call to vkGetAccelerationStructureMemoryRequirementsNV with accelerationStructure"
- "and type of VK_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_TYPE_OBJECT_NV.",
- info.memoryOffset, as_state->memory_requirements.memoryRequirements.alignment);
- }
-
- if (mem_info) {
- // Validate memory requirements size
- if (as_state->memory_requirements.memoryRequirements.size > (mem_info->alloc_info.allocationSize - info.memoryOffset)) {
- skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_BUFFER_EXT, as_handle,
- "VUID-VkBindAccelerationStructureMemoryInfoNV-size-02595",
- "vkBindAccelerationStructureMemoryNV(): memory size minus memoryOffset is 0x%" PRIxLEAST64
- " but must be at least as large as VkMemoryRequirements::size value 0x%" PRIxLEAST64
- ", returned from a call to vkGetAccelerationStructureMemoryRequirementsNV with accelerationStructure"
- "and type of VK_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_TYPE_OBJECT_NV.",
- mem_info->alloc_info.allocationSize - info.memoryOffset,
- as_state->memory_requirements.memoryRequirements.size);
- }
- }
-
- return skip;
-}
-bool CoreChecks::PreCallValidateBindAccelerationStructureMemoryNV(VkDevice device, uint32_t bindInfoCount,
- const VkBindAccelerationStructureMemoryInfoNV *pBindInfos) {
- bool skip = false;
- for (uint32_t i = 0; i < bindInfoCount; i++) {
- skip |= ValidateBindAccelerationStructureMemoryNV(device, pBindInfos[i]);
- }
- return skip;
-}
-
-bool CoreChecks::PreCallValidateGetAccelerationStructureHandleNV(VkDevice device, VkAccelerationStructureNV accelerationStructure,
- size_t dataSize, void *pData) {
- bool skip = false;
-
- const ACCELERATION_STRUCTURE_STATE *as_state = GetAccelerationStructureState(accelerationStructure);
- if (as_state != nullptr) {
- // TODO: update the fake VUID below once the real one is generated.
- skip = ValidateMemoryIsBoundToAccelerationStructure(
- as_state, "vkGetAccelerationStructureHandleNV",
- "UNASSIGNED-vkGetAccelerationStructureHandleNV-accelerationStructure-XXXX");
- }
-
- return skip;
-}
-
-void ValidationStateTracker::PostCallRecordBindAccelerationStructureMemoryNV(
- VkDevice device, uint32_t bindInfoCount, const VkBindAccelerationStructureMemoryInfoNV *pBindInfos, VkResult result) {
- if (VK_SUCCESS != result) return;
- for (uint32_t i = 0; i < bindInfoCount; i++) {
- const VkBindAccelerationStructureMemoryInfoNV &info = pBindInfos[i];
-
- ACCELERATION_STRUCTURE_STATE *as_state = GetAccelerationStructureState(info.accelerationStructure);
- if (as_state) {
- // Track bound memory range information
- auto mem_info = GetDevMemState(info.memory);
- if (mem_info) {
- InsertAccelerationStructureMemoryRange(info.accelerationStructure, mem_info, info.memoryOffset,
- as_state->requirements);
- }
- // Track objects tied to memory
- SetMemBinding(info.memory, as_state, info.memoryOffset,
- VulkanTypedHandle(info.accelerationStructure, kVulkanObjectTypeAccelerationStructureNV));
- }
- }
-}
-
-bool CoreChecks::PreCallValidateCmdBuildAccelerationStructureNV(VkCommandBuffer commandBuffer,
- const VkAccelerationStructureInfoNV *pInfo, VkBuffer instanceData,
- VkDeviceSize instanceOffset, VkBool32 update,
- VkAccelerationStructureNV dst, VkAccelerationStructureNV src,
- VkBuffer scratch, VkDeviceSize scratchOffset) {
- const CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
- assert(cb_state);
- bool skip = ValidateCmdQueueFlags(cb_state, "vkCmdBuildAccelerationStructureNV()", VK_QUEUE_COMPUTE_BIT,
- "VUID-vkCmdBuildAccelerationStructureNV-commandBuffer-cmdpool");
-
- skip |= ValidateCmd(cb_state, CMD_BUILDACCELERATIONSTRUCTURENV, "vkCmdBuildAccelerationStructureNV()");
-
- if (pInfo != nullptr && pInfo->type == VK_ACCELERATION_STRUCTURE_TYPE_BOTTOM_LEVEL_NV) {
- for (uint32_t i = 0; i < pInfo->geometryCount; i++) {
- skip |= ValidateGeometryNV(pInfo->pGeometries[i], VK_DEBUG_REPORT_OBJECT_TYPE_ACCELERATION_STRUCTURE_NV_EXT,
- HandleToUint64(device), "vkCmdBuildAccelerationStructureNV():");
- }
- }
-
- if (pInfo != nullptr && pInfo->geometryCount > phys_dev_ext_props.ray_tracing_props.maxGeometryCount) {
- skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
- HandleToUint64(commandBuffer), "VUID-vkCmdBuildAccelerationStructureNV-geometryCount-02241",
- "vkCmdBuildAccelerationStructureNV(): geometryCount [%d] must be less than or equal to "
- "VkPhysicalDeviceRayTracingPropertiesNV::maxGeometryCount.",
- pInfo->geometryCount);
- }
-
- const ACCELERATION_STRUCTURE_STATE *dst_as_state = GetAccelerationStructureState(dst);
- const ACCELERATION_STRUCTURE_STATE *src_as_state = GetAccelerationStructureState(src);
- const BUFFER_STATE *scratch_buffer_state = GetBufferState(scratch);
-
- if (dst_as_state != nullptr && pInfo != nullptr) {
- if (dst_as_state->create_info.info.type != pInfo->type) {
- skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
- HandleToUint64(commandBuffer), "VUID-vkCmdBuildAccelerationStructureNV-dst-02488",
- "vkCmdBuildAccelerationStructureNV(): create info VkAccelerationStructureInfoNV::type"
- "[%s] must be identical to build info VkAccelerationStructureInfoNV::type [%s].",
- string_VkAccelerationStructureTypeNV(dst_as_state->create_info.info.type),
- string_VkAccelerationStructureTypeNV(pInfo->type));
- }
- if (dst_as_state->create_info.info.flags != pInfo->flags) {
- skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
- HandleToUint64(commandBuffer), "VUID-vkCmdBuildAccelerationStructureNV-dst-02488",
- "vkCmdBuildAccelerationStructureNV(): create info VkAccelerationStructureInfoNV::flags"
- "[0x%X] must be identical to build info VkAccelerationStructureInfoNV::flags [0x%X].",
- dst_as_state->create_info.info.flags, pInfo->flags);
- }
- if (dst_as_state->create_info.info.instanceCount < pInfo->instanceCount) {
- skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
- HandleToUint64(commandBuffer), "VUID-vkCmdBuildAccelerationStructureNV-dst-02488",
- "vkCmdBuildAccelerationStructureNV(): create info VkAccelerationStructureInfoNV::instanceCount "
- "[%d] must be greater than or equal to build info VkAccelerationStructureInfoNV::instanceCount [%d].",
- dst_as_state->create_info.info.instanceCount, pInfo->instanceCount);
- }
- if (dst_as_state->create_info.info.geometryCount < pInfo->geometryCount) {
- skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
- HandleToUint64(commandBuffer), "VUID-vkCmdBuildAccelerationStructureNV-dst-02488",
- "vkCmdBuildAccelerationStructureNV(): create info VkAccelerationStructureInfoNV::geometryCount"
- "[%d] must be greater than or equal to build info VkAccelerationStructureInfoNV::geometryCount [%d].",
- dst_as_state->create_info.info.geometryCount, pInfo->geometryCount);
- } else {
- for (uint32_t i = 0; i < pInfo->geometryCount; i++) {
- const VkGeometryDataNV &create_geometry_data = dst_as_state->create_info.info.pGeometries[i].geometry;
- const VkGeometryDataNV &build_geometry_data = pInfo->pGeometries[i].geometry;
- if (create_geometry_data.triangles.vertexCount < build_geometry_data.triangles.vertexCount) {
- skip |= log_msg(
- report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
- HandleToUint64(commandBuffer), "VUID-vkCmdBuildAccelerationStructureNV-dst-02488",
- "vkCmdBuildAccelerationStructureNV(): create info pGeometries[%d].geometry.triangles.vertexCount [%d]"
- "must be greater than or equal to build info pGeometries[%d].geometry.triangles.vertexCount [%d].",
- i, create_geometry_data.triangles.vertexCount, i, build_geometry_data.triangles.vertexCount);
- break;
- }
- if (create_geometry_data.triangles.indexCount < build_geometry_data.triangles.indexCount) {
- skip |= log_msg(
- report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
- HandleToUint64(commandBuffer), "VUID-vkCmdBuildAccelerationStructureNV-dst-02488",
- "vkCmdBuildAccelerationStructureNV(): create info pGeometries[%d].geometry.triangles.indexCount [%d]"
- "must be greater than or equal to build info pGeometries[%d].geometry.triangles.indexCount [%d].",
- i, create_geometry_data.triangles.indexCount, i, build_geometry_data.triangles.indexCount);
- break;
- }
- if (create_geometry_data.aabbs.numAABBs < build_geometry_data.aabbs.numAABBs) {
- skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
- HandleToUint64(commandBuffer), "VUID-vkCmdBuildAccelerationStructureNV-dst-02488",
- "vkCmdBuildAccelerationStructureNV(): create info pGeometries[%d].geometry.aabbs.numAABBs [%d]"
- "must be greater than or equal to build info pGeometries[%d].geometry.aabbs.numAABBs [%d].",
- i, create_geometry_data.aabbs.numAABBs, i, build_geometry_data.aabbs.numAABBs);
- break;
- }
- }
- }
- }
-
- if (dst_as_state != nullptr) {
- skip |= ValidateMemoryIsBoundToAccelerationStructure(
- dst_as_state, "vkCmdBuildAccelerationStructureNV()",
- "UNASSIGNED-CoreValidation-DrawState-InvalidCommandBuffer-VkAccelerationStructureNV");
- }
-
- if (update == VK_TRUE) {
- if (src == VK_NULL_HANDLE) {
- skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
- HandleToUint64(commandBuffer), "VUID-vkCmdBuildAccelerationStructureNV-update-02489",
- "vkCmdBuildAccelerationStructureNV(): If update is VK_TRUE, src must not be VK_NULL_HANDLE.");
- } else {
- if (src_as_state == nullptr || !src_as_state->built ||
- !(src_as_state->build_info.flags & VK_BUILD_ACCELERATION_STRUCTURE_ALLOW_UPDATE_BIT_NV)) {
- skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
- HandleToUint64(commandBuffer), "VUID-vkCmdBuildAccelerationStructureNV-update-02489",
- "vkCmdBuildAccelerationStructureNV(): If update is VK_TRUE, src must have been built before "
- "with VK_BUILD_ACCELERATION_STRUCTURE_ALLOW_UPDATE_BIT_NV set in "
- "VkAccelerationStructureInfoNV::flags.");
- }
- }
- if (dst_as_state != nullptr && !dst_as_state->update_scratch_memory_requirements_checked) {
- skip |= log_msg(report_data, VK_DEBUG_REPORT_WARNING_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_ACCELERATION_STRUCTURE_NV_EXT,
- HandleToUint64(dst), kVUID_Core_CmdBuildAccelNV_NoUpdateMemReqQuery,
- "vkCmdBuildAccelerationStructureNV(): Updating %s but vkGetAccelerationStructureMemoryRequirementsNV() "
- "has not been called for update scratch memory.",
- report_data->FormatHandle(dst_as_state->acceleration_structure).c_str());
- // Use requirements fetched at create time
- }
- if (scratch_buffer_state != nullptr && dst_as_state != nullptr &&
- dst_as_state->update_scratch_memory_requirements.memoryRequirements.size >
- (scratch_buffer_state->binding.size - (scratch_buffer_state->binding.offset + scratchOffset))) {
- skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
- HandleToUint64(commandBuffer), "VUID-vkCmdBuildAccelerationStructureNV-update-02492",
- "vkCmdBuildAccelerationStructureNV(): If update is VK_TRUE, The size member of the "
- "VkMemoryRequirements structure returned from a call to "
- "vkGetAccelerationStructureMemoryRequirementsNV with "
- "VkAccelerationStructureMemoryRequirementsInfoNV::accelerationStructure set to dst and "
- "VkAccelerationStructureMemoryRequirementsInfoNV::type set to "
- "VK_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_TYPE_UPDATE_SCRATCH_NV must be less than "
- "or equal to the size of scratch minus scratchOffset");
- }
- } else {
- if (dst_as_state != nullptr && !dst_as_state->build_scratch_memory_requirements_checked) {
- skip |= log_msg(report_data, VK_DEBUG_REPORT_WARNING_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_ACCELERATION_STRUCTURE_NV_EXT,
- HandleToUint64(dst), kVUID_Core_CmdBuildAccelNV_NoScratchMemReqQuery,
- "vkCmdBuildAccelerationStructureNV(): Assigning scratch buffer to %s but "
- "vkGetAccelerationStructureMemoryRequirementsNV() has not been called for scratch memory.",
- report_data->FormatHandle(dst_as_state->acceleration_structure).c_str());
- // Use requirements fetched at create time
- }
- if (scratch_buffer_state != nullptr && dst_as_state != nullptr &&
- dst_as_state->build_scratch_memory_requirements.memoryRequirements.size >
- (scratch_buffer_state->binding.size - (scratch_buffer_state->binding.offset + scratchOffset))) {
- skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
- HandleToUint64(commandBuffer), "VUID-vkCmdBuildAccelerationStructureNV-update-02491",
- "vkCmdBuildAccelerationStructureNV(): If update is VK_FALSE, The size member of the "
- "VkMemoryRequirements structure returned from a call to "
- "vkGetAccelerationStructureMemoryRequirementsNV with "
- "VkAccelerationStructureMemoryRequirementsInfoNV::accelerationStructure set to dst and "
- "VkAccelerationStructureMemoryRequirementsInfoNV::type set to "
- "VK_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_TYPE_BUILD_SCRATCH_NV must be less than "
- "or equal to the size of scratch minus scratchOffset");
- }
- }
- return skip;
-}
-
-void ValidationStateTracker::PostCallRecordCmdBuildAccelerationStructureNV(
- VkCommandBuffer commandBuffer, const VkAccelerationStructureInfoNV *pInfo, VkBuffer instanceData, VkDeviceSize instanceOffset,
- VkBool32 update, VkAccelerationStructureNV dst, VkAccelerationStructureNV src, VkBuffer scratch, VkDeviceSize scratchOffset) {
- CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
- if (cb_state) {
- ACCELERATION_STRUCTURE_STATE *dst_as_state = GetAccelerationStructureState(dst);
- ACCELERATION_STRUCTURE_STATE *src_as_state = GetAccelerationStructureState(src);
- if (dst_as_state != nullptr) {
- dst_as_state->built = true;
- dst_as_state->build_info.initialize(pInfo);
- AddCommandBufferBindingAccelerationStructure(cb_state, dst_as_state);
- }
- if (src_as_state != nullptr) {
- AddCommandBufferBindingAccelerationStructure(cb_state, src_as_state);
- }
- }
-}
-
-bool CoreChecks::PreCallValidateCmdCopyAccelerationStructureNV(VkCommandBuffer commandBuffer, VkAccelerationStructureNV dst,
- VkAccelerationStructureNV src,
- VkCopyAccelerationStructureModeNV mode) {
- const CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
- assert(cb_state);
- bool skip = ValidateCmdQueueFlags(cb_state, "vkCmdCopyAccelerationStructureNV()", VK_QUEUE_COMPUTE_BIT,
- "VUID-vkCmdCopyAccelerationStructureNV-commandBuffer-cmdpool");
-
- skip |= ValidateCmd(cb_state, CMD_COPYACCELERATIONSTRUCTURENV, "vkCmdCopyAccelerationStructureNV()");
-
- const ACCELERATION_STRUCTURE_STATE *dst_as_state = GetAccelerationStructureState(dst);
- const ACCELERATION_STRUCTURE_STATE *src_as_state = GetAccelerationStructureState(src);
-
- if (dst_as_state != nullptr) {
- skip |= ValidateMemoryIsBoundToAccelerationStructure(
- dst_as_state, "vkCmdBuildAccelerationStructureNV()",
- "UNASSIGNED-CoreValidation-DrawState-InvalidCommandBuffer-VkAccelerationStructureNV");
- }
-
- if (mode == VK_COPY_ACCELERATION_STRUCTURE_MODE_COMPACT_NV) {
- if (src_as_state != nullptr &&
- (!src_as_state->built || !(src_as_state->build_info.flags & VK_BUILD_ACCELERATION_STRUCTURE_ALLOW_COMPACTION_BIT_NV))) {
- skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
- HandleToUint64(commandBuffer), "VUID-vkCmdCopyAccelerationStructureNV-src-02497",
- "vkCmdCopyAccelerationStructureNV(): src must have been built with "
- "VK_BUILD_ACCELERATION_STRUCTURE_ALLOW_COMPACTION_BIT_NV if mode is "
- "VK_COPY_ACCELERATION_STRUCTURE_MODE_COMPACT_NV.");
- }
- }
- return skip;
-}
-
-void ValidationStateTracker::PostCallRecordCmdCopyAccelerationStructureNV(VkCommandBuffer commandBuffer,
- VkAccelerationStructureNV dst,
- VkAccelerationStructureNV src,
- VkCopyAccelerationStructureModeNV mode) {
- CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
- if (cb_state) {
- ACCELERATION_STRUCTURE_STATE *src_as_state = GetAccelerationStructureState(src);
- ACCELERATION_STRUCTURE_STATE *dst_as_state = GetAccelerationStructureState(dst);
- if (dst_as_state != nullptr && src_as_state != nullptr) {
- dst_as_state->built = true;
- dst_as_state->build_info = src_as_state->build_info;
- AddCommandBufferBindingAccelerationStructure(cb_state, dst_as_state);
- AddCommandBufferBindingAccelerationStructure(cb_state, src_as_state);
- }
- }
-}
-
-bool CoreChecks::PreCallValidateDestroyAccelerationStructureNV(VkDevice device, VkAccelerationStructureNV accelerationStructure,
- const VkAllocationCallbacks *pAllocator) {
- const ACCELERATION_STRUCTURE_STATE *as_state = GetAccelerationStructureState(accelerationStructure);
- const VulkanTypedHandle obj_struct(accelerationStructure, kVulkanObjectTypeAccelerationStructureNV);
- bool skip = false;
- if (as_state) {
- skip |= ValidateObjectNotInUse(as_state, obj_struct, "vkDestroyAccelerationStructureNV",
- "VUID-vkDestroyAccelerationStructureNV-accelerationStructure-02442");
- }
- return skip;
-}
-
-void ValidationStateTracker::PreCallRecordDestroyAccelerationStructureNV(VkDevice device,
- VkAccelerationStructureNV accelerationStructure,
- const VkAllocationCallbacks *pAllocator) {
- if (!accelerationStructure) return;
- auto *as_state = GetAccelerationStructureState(accelerationStructure);
- if (as_state) {
- const VulkanTypedHandle obj_struct(accelerationStructure, kVulkanObjectTypeAccelerationStructureNV);
- InvalidateCommandBuffers(as_state->cb_bindings, obj_struct);
- for (auto mem_binding : as_state->GetBoundMemory()) {
- auto mem_info = GetDevMemState(mem_binding);
- if (mem_info) {
- RemoveAccelerationStructureMemoryRange(HandleToUint64(accelerationStructure), mem_info);
- }
- }
- ClearMemoryObjectBindings(obj_struct);
- accelerationStructureMap.erase(accelerationStructure);
- }
-}
-
bool CoreChecks::PreCallValidateCmdSetLineWidth(VkCommandBuffer commandBuffer, float lineWidth) {
- const CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
+ layer_data *device_data = GetLayerDataPtr(get_dispatch_key(commandBuffer), layer_data_map);
+ GLOBAL_CB_NODE *cb_state = GetCBNode(commandBuffer);
assert(cb_state);
- bool skip = ValidateCmdQueueFlags(cb_state, "vkCmdSetLineWidth()", VK_QUEUE_GRAPHICS_BIT,
+ bool skip = ValidateCmdQueueFlags(device_data, cb_state, "vkCmdSetLineWidth()", VK_QUEUE_GRAPHICS_BIT,
"VUID-vkCmdSetLineWidth-commandBuffer-cmdpool");
- skip |= ValidateCmd(cb_state, CMD_SETLINEWIDTH, "vkCmdSetLineWidth()");
+ skip |= ValidateCmd(device_data, cb_state, CMD_SETLINEWIDTH, "vkCmdSetLineWidth()");
if (cb_state->static_status & CBSTATUS_LINE_WIDTH_SET) {
- skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
+ skip |= log_msg(device_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
HandleToUint64(commandBuffer), "VUID-vkCmdSetLineWidth-None-00787",
"vkCmdSetLineWidth called but pipeline was created without VK_DYNAMIC_STATE_LINE_WIDTH flag.");
}
return skip;
}
-void ValidationStateTracker::PreCallRecordCmdSetLineWidth(VkCommandBuffer commandBuffer, float lineWidth) {
- CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
+void CoreChecks::PreCallRecordCmdSetLineWidth(VkCommandBuffer commandBuffer, float lineWidth) {
+ GLOBAL_CB_NODE *cb_state = GetCBNode(commandBuffer);
cb_state->status |= CBSTATUS_LINE_WIDTH_SET;
}
-bool CoreChecks::PreCallValidateCmdSetLineStippleEXT(VkCommandBuffer commandBuffer, uint32_t lineStippleFactor,
- uint16_t lineStipplePattern) {
- const CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
- assert(cb_state);
- bool skip = ValidateCmdQueueFlags(cb_state, "vkCmdSetLineStippleEXT()", VK_QUEUE_GRAPHICS_BIT,
- "VUID-vkCmdSetLineStippleEXT-commandBuffer-cmdpool");
- skip |= ValidateCmd(cb_state, CMD_SETLINESTIPPLEEXT, "vkCmdSetLineStippleEXT()");
-
- if (cb_state->static_status & CBSTATUS_LINE_STIPPLE_SET) {
- skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
- HandleToUint64(commandBuffer), "VUID-vkCmdSetLineStippleEXT-None-02775",
- "vkCmdSetLineStippleEXT called but pipeline was created without VK_DYNAMIC_STATE_LINE_STIPPLE_EXT flag.");
- }
- return skip;
-}
-
-void ValidationStateTracker::PreCallRecordCmdSetLineStippleEXT(VkCommandBuffer commandBuffer, uint32_t lineStippleFactor,
- uint16_t lineStipplePattern) {
- CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
- cb_state->status |= CBSTATUS_LINE_STIPPLE_SET;
-}
-
bool CoreChecks::PreCallValidateCmdSetDepthBias(VkCommandBuffer commandBuffer, float depthBiasConstantFactor, float depthBiasClamp,
float depthBiasSlopeFactor) {
- const CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
+ layer_data *device_data = GetLayerDataPtr(get_dispatch_key(commandBuffer), layer_data_map);
+ GLOBAL_CB_NODE *cb_state = GetCBNode(commandBuffer);
assert(cb_state);
- bool skip = ValidateCmdQueueFlags(cb_state, "vkCmdSetDepthBias()", VK_QUEUE_GRAPHICS_BIT,
+ bool skip = ValidateCmdQueueFlags(device_data, cb_state, "vkCmdSetDepthBias()", VK_QUEUE_GRAPHICS_BIT,
"VUID-vkCmdSetDepthBias-commandBuffer-cmdpool");
- skip |= ValidateCmd(cb_state, CMD_SETDEPTHBIAS, "vkCmdSetDepthBias()");
+ skip |= ValidateCmd(device_data, cb_state, CMD_SETDEPTHBIAS, "vkCmdSetDepthBias()");
if (cb_state->static_status & CBSTATUS_DEPTH_BIAS_SET) {
- skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
+ skip |= log_msg(device_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
HandleToUint64(commandBuffer), "VUID-vkCmdSetDepthBias-None-00789",
"vkCmdSetDepthBias(): pipeline was created without VK_DYNAMIC_STATE_DEPTH_BIAS flag..");
}
- if ((depthBiasClamp != 0.0) && (!enabled_features.core.depthBiasClamp)) {
- skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
+ if ((depthBiasClamp != 0.0) && (!device_data->enabled_features.core.depthBiasClamp)) {
+ skip |= log_msg(device_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
HandleToUint64(commandBuffer), "VUID-vkCmdSetDepthBias-depthBiasClamp-00790",
"vkCmdSetDepthBias(): the depthBiasClamp device feature is disabled: the depthBiasClamp parameter must "
"be set to 0.0.");
@@ -7296,124 +6697,127 @@ bool CoreChecks::PreCallValidateCmdSetDepthBias(VkCommandBuffer commandBuffer, f
return skip;
}
-void ValidationStateTracker::PreCallRecordCmdSetDepthBias(VkCommandBuffer commandBuffer, float depthBiasConstantFactor,
- float depthBiasClamp, float depthBiasSlopeFactor) {
- CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
+void CoreChecks::PreCallRecordCmdSetDepthBias(VkCommandBuffer commandBuffer, float depthBiasConstantFactor, float depthBiasClamp,
+ float depthBiasSlopeFactor) {
+ GLOBAL_CB_NODE *cb_state = GetCBNode(commandBuffer);
cb_state->status |= CBSTATUS_DEPTH_BIAS_SET;
}
bool CoreChecks::PreCallValidateCmdSetBlendConstants(VkCommandBuffer commandBuffer, const float blendConstants[4]) {
- const CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
+ layer_data *device_data = GetLayerDataPtr(get_dispatch_key(commandBuffer), layer_data_map);
+ GLOBAL_CB_NODE *cb_state = GetCBNode(commandBuffer);
assert(cb_state);
- bool skip = ValidateCmdQueueFlags(cb_state, "vkCmdSetBlendConstants()", VK_QUEUE_GRAPHICS_BIT,
+ bool skip = ValidateCmdQueueFlags(device_data, cb_state, "vkCmdSetBlendConstants()", VK_QUEUE_GRAPHICS_BIT,
"VUID-vkCmdSetBlendConstants-commandBuffer-cmdpool");
- skip |= ValidateCmd(cb_state, CMD_SETBLENDCONSTANTS, "vkCmdSetBlendConstants()");
+ skip |= ValidateCmd(device_data, cb_state, CMD_SETBLENDCONSTANTS, "vkCmdSetBlendConstants()");
if (cb_state->static_status & CBSTATUS_BLEND_CONSTANTS_SET) {
- skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
+ skip |= log_msg(device_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
HandleToUint64(commandBuffer), "VUID-vkCmdSetBlendConstants-None-00612",
"vkCmdSetBlendConstants(): pipeline was created without VK_DYNAMIC_STATE_BLEND_CONSTANTS flag..");
}
return skip;
}
-void ValidationStateTracker::PreCallRecordCmdSetBlendConstants(VkCommandBuffer commandBuffer, const float blendConstants[4]) {
- CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
+void CoreChecks::PreCallRecordCmdSetBlendConstants(VkCommandBuffer commandBuffer, const float blendConstants[4]) {
+ GLOBAL_CB_NODE *cb_state = GetCBNode(commandBuffer);
cb_state->status |= CBSTATUS_BLEND_CONSTANTS_SET;
}
bool CoreChecks::PreCallValidateCmdSetDepthBounds(VkCommandBuffer commandBuffer, float minDepthBounds, float maxDepthBounds) {
- const CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
+ layer_data *device_data = GetLayerDataPtr(get_dispatch_key(commandBuffer), layer_data_map);
+ GLOBAL_CB_NODE *cb_state = GetCBNode(commandBuffer);
assert(cb_state);
- bool skip = ValidateCmdQueueFlags(cb_state, "vkCmdSetDepthBounds()", VK_QUEUE_GRAPHICS_BIT,
+ bool skip = ValidateCmdQueueFlags(device_data, cb_state, "vkCmdSetDepthBounds()", VK_QUEUE_GRAPHICS_BIT,
"VUID-vkCmdSetDepthBounds-commandBuffer-cmdpool");
- skip |= ValidateCmd(cb_state, CMD_SETDEPTHBOUNDS, "vkCmdSetDepthBounds()");
+ skip |= ValidateCmd(device_data, cb_state, CMD_SETDEPTHBOUNDS, "vkCmdSetDepthBounds()");
if (cb_state->static_status & CBSTATUS_DEPTH_BOUNDS_SET) {
- skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
+ skip |= log_msg(device_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
HandleToUint64(commandBuffer), "VUID-vkCmdSetDepthBounds-None-00599",
"vkCmdSetDepthBounds(): pipeline was created without VK_DYNAMIC_STATE_DEPTH_BOUNDS flag..");
}
return skip;
}
-void ValidationStateTracker::PreCallRecordCmdSetDepthBounds(VkCommandBuffer commandBuffer, float minDepthBounds,
- float maxDepthBounds) {
- CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
+void CoreChecks::PreCallRecordCmdSetDepthBounds(VkCommandBuffer commandBuffer, float minDepthBounds, float maxDepthBounds) {
+ GLOBAL_CB_NODE *cb_state = GetCBNode(commandBuffer);
cb_state->status |= CBSTATUS_DEPTH_BOUNDS_SET;
}
bool CoreChecks::PreCallValidateCmdSetStencilCompareMask(VkCommandBuffer commandBuffer, VkStencilFaceFlags faceMask,
uint32_t compareMask) {
- const CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
+ layer_data *device_data = GetLayerDataPtr(get_dispatch_key(commandBuffer), layer_data_map);
+ GLOBAL_CB_NODE *cb_state = GetCBNode(commandBuffer);
assert(cb_state);
- bool skip = ValidateCmdQueueFlags(cb_state, "vkCmdSetStencilCompareMask()", VK_QUEUE_GRAPHICS_BIT,
+ bool skip = ValidateCmdQueueFlags(device_data, cb_state, "vkCmdSetStencilCompareMask()", VK_QUEUE_GRAPHICS_BIT,
"VUID-vkCmdSetStencilCompareMask-commandBuffer-cmdpool");
- skip |= ValidateCmd(cb_state, CMD_SETSTENCILCOMPAREMASK, "vkCmdSetStencilCompareMask()");
+ skip |= ValidateCmd(device_data, cb_state, CMD_SETSTENCILCOMPAREMASK, "vkCmdSetStencilCompareMask()");
if (cb_state->static_status & CBSTATUS_STENCIL_READ_MASK_SET) {
- skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
+ skip |= log_msg(device_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
HandleToUint64(commandBuffer), "VUID-vkCmdSetStencilCompareMask-None-00602",
"vkCmdSetStencilCompareMask(): pipeline was created without VK_DYNAMIC_STATE_STENCIL_COMPARE_MASK flag..");
}
return skip;
}
-void ValidationStateTracker::PreCallRecordCmdSetStencilCompareMask(VkCommandBuffer commandBuffer, VkStencilFaceFlags faceMask,
- uint32_t compareMask) {
- CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
+void CoreChecks::PreCallRecordCmdSetStencilCompareMask(VkCommandBuffer commandBuffer, VkStencilFaceFlags faceMask,
+ uint32_t compareMask) {
+ GLOBAL_CB_NODE *cb_state = GetCBNode(commandBuffer);
cb_state->status |= CBSTATUS_STENCIL_READ_MASK_SET;
}
bool CoreChecks::PreCallValidateCmdSetStencilWriteMask(VkCommandBuffer commandBuffer, VkStencilFaceFlags faceMask,
uint32_t writeMask) {
- const CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
+ layer_data *device_data = GetLayerDataPtr(get_dispatch_key(commandBuffer), layer_data_map);
+ GLOBAL_CB_NODE *cb_state = GetCBNode(commandBuffer);
assert(cb_state);
- bool skip = ValidateCmdQueueFlags(cb_state, "vkCmdSetStencilWriteMask()", VK_QUEUE_GRAPHICS_BIT,
+ bool skip = ValidateCmdQueueFlags(device_data, cb_state, "vkCmdSetStencilWriteMask()", VK_QUEUE_GRAPHICS_BIT,
"VUID-vkCmdSetStencilWriteMask-commandBuffer-cmdpool");
- skip |= ValidateCmd(cb_state, CMD_SETSTENCILWRITEMASK, "vkCmdSetStencilWriteMask()");
+ skip |= ValidateCmd(device_data, cb_state, CMD_SETSTENCILWRITEMASK, "vkCmdSetStencilWriteMask()");
if (cb_state->static_status & CBSTATUS_STENCIL_WRITE_MASK_SET) {
- skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
+ skip |= log_msg(device_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
HandleToUint64(commandBuffer), "VUID-vkCmdSetStencilWriteMask-None-00603",
"vkCmdSetStencilWriteMask(): pipeline was created without VK_DYNAMIC_STATE_STENCIL_WRITE_MASK flag..");
}
return skip;
}
-void ValidationStateTracker::PreCallRecordCmdSetStencilWriteMask(VkCommandBuffer commandBuffer, VkStencilFaceFlags faceMask,
- uint32_t writeMask) {
- CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
+void CoreChecks::PreCallRecordCmdSetStencilWriteMask(VkCommandBuffer commandBuffer, VkStencilFaceFlags faceMask,
+ uint32_t writeMask) {
+ GLOBAL_CB_NODE *cb_state = GetCBNode(commandBuffer);
cb_state->status |= CBSTATUS_STENCIL_WRITE_MASK_SET;
}
bool CoreChecks::PreCallValidateCmdSetStencilReference(VkCommandBuffer commandBuffer, VkStencilFaceFlags faceMask,
uint32_t reference) {
- const CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
+ layer_data *device_data = GetLayerDataPtr(get_dispatch_key(commandBuffer), layer_data_map);
+ GLOBAL_CB_NODE *cb_state = GetCBNode(commandBuffer);
assert(cb_state);
- bool skip = ValidateCmdQueueFlags(cb_state, "vkCmdSetStencilReference()", VK_QUEUE_GRAPHICS_BIT,
+ bool skip = ValidateCmdQueueFlags(device_data, cb_state, "vkCmdSetStencilReference()", VK_QUEUE_GRAPHICS_BIT,
"VUID-vkCmdSetStencilReference-commandBuffer-cmdpool");
- skip |= ValidateCmd(cb_state, CMD_SETSTENCILREFERENCE, "vkCmdSetStencilReference()");
+ skip |= ValidateCmd(device_data, cb_state, CMD_SETSTENCILREFERENCE, "vkCmdSetStencilReference()");
if (cb_state->static_status & CBSTATUS_STENCIL_REFERENCE_SET) {
- skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
+ skip |= log_msg(device_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
HandleToUint64(commandBuffer), "VUID-vkCmdSetStencilReference-None-00604",
"vkCmdSetStencilReference(): pipeline was created without VK_DYNAMIC_STATE_STENCIL_REFERENCE flag..");
}
return skip;
}
-void ValidationStateTracker::PreCallRecordCmdSetStencilReference(VkCommandBuffer commandBuffer, VkStencilFaceFlags faceMask,
- uint32_t reference) {
- CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
+void CoreChecks::PreCallRecordCmdSetStencilReference(VkCommandBuffer commandBuffer, VkStencilFaceFlags faceMask,
+ uint32_t reference) {
+ GLOBAL_CB_NODE *cb_state = GetCBNode(commandBuffer);
cb_state->status |= CBSTATUS_STENCIL_REFERENCE_SET;
}
-// Update pipeline_layout bind points applying the "Pipeline Layout Compatibility" rules.
-// One of pDescriptorSets or push_descriptor_set should be nullptr, indicating whether this
-// is called for CmdBindDescriptorSets or CmdPushDescriptorSet.
-void ValidationStateTracker::UpdateLastBoundDescriptorSets(CMD_BUFFER_STATE *cb_state, VkPipelineBindPoint pipeline_bind_point,
- const PIPELINE_LAYOUT_STATE *pipeline_layout, uint32_t first_set,
- uint32_t set_count, const VkDescriptorSet *pDescriptorSets,
- cvdescriptorset::DescriptorSet *push_descriptor_set,
- uint32_t dynamic_offset_count, const uint32_t *p_dynamic_offsets) {
- assert((pDescriptorSets == nullptr) ^ (push_descriptor_set == nullptr));
+// Update pipeline_layout bind points applying the "Pipeline Layout Compatibility" rules
+static void UpdateLastBoundDescriptorSets(layer_data *device_data, GLOBAL_CB_NODE *cb_state,
+ VkPipelineBindPoint pipeline_bind_point, const PIPELINE_LAYOUT_NODE *pipeline_layout,
+ uint32_t first_set, uint32_t set_count,
+ const std::vector<cvdescriptorset::DescriptorSet *> descriptor_sets,
+ uint32_t dynamic_offset_count, const uint32_t *p_dynamic_offsets) {
// Defensive
+ assert(set_count);
+ if (0 == set_count) return;
assert(pipeline_layout);
if (!pipeline_layout) return;
@@ -7423,8 +6827,15 @@ void ValidationStateTracker::UpdateLastBoundDescriptorSets(CMD_BUFFER_STATE *cb_
// Some useful shorthand
auto &last_bound = cb_state->lastBound[pipeline_bind_point];
+
+ auto &bound_sets = last_bound.boundDescriptorSets;
+ auto &dynamic_offsets = last_bound.dynamicOffsets;
+ auto &bound_compat_ids = last_bound.compat_id_for_set;
auto &pipe_compat_ids = pipeline_layout->compat_for_set;
- const uint32_t current_size = static_cast<uint32_t>(last_bound.per_set.size());
+
+ const uint32_t current_size = static_cast<uint32_t>(bound_sets.size());
+ assert(current_size == dynamic_offsets.size());
+ assert(current_size == bound_compat_ids.size());
// We need this three times in this function, but nowhere else
auto push_descriptor_cleanup = [&last_bound](const cvdescriptorset::DescriptorSet *ds) -> bool {
@@ -7438,10 +6849,10 @@ void ValidationStateTracker::UpdateLastBoundDescriptorSets(CMD_BUFFER_STATE *cb_
// Clean up the "disturbed" before and after the range to be set
if (required_size < current_size) {
- if (last_bound.per_set[last_binding_index].compat_id_for_set != pipe_compat_ids[last_binding_index]) {
+ if (bound_compat_ids[last_binding_index] != pipe_compat_ids[last_binding_index]) {
// We're disturbing those after last, we'll shrink below, but first need to check for and cleanup the push_descriptor
for (auto set_idx = required_size; set_idx < current_size; ++set_idx) {
- if (push_descriptor_cleanup(last_bound.per_set[set_idx].bound_descriptor_set)) break;
+ if (push_descriptor_cleanup(bound_sets[set_idx])) break;
}
} else {
// We're not disturbing past last, so leave the upper binding data alone.
@@ -7451,16 +6862,19 @@ void ValidationStateTracker::UpdateLastBoundDescriptorSets(CMD_BUFFER_STATE *cb_
// We resize if we need more set entries or if those past "last" are disturbed
if (required_size != current_size) {
- last_bound.per_set.resize(required_size);
+ // TODO: put these size tied things in a struct (touches many lines)
+ bound_sets.resize(required_size);
+ dynamic_offsets.resize(required_size);
+ bound_compat_ids.resize(required_size);
}
// For any previously bound sets, need to set them to "invalid" if they were disturbed by this update
for (uint32_t set_idx = 0; set_idx < first_set; ++set_idx) {
- if (last_bound.per_set[set_idx].compat_id_for_set != pipe_compat_ids[set_idx]) {
- push_descriptor_cleanup(last_bound.per_set[set_idx].bound_descriptor_set);
- last_bound.per_set[set_idx].bound_descriptor_set = nullptr;
- last_bound.per_set[set_idx].dynamicOffsets.clear();
- last_bound.per_set[set_idx].compat_id_for_set = pipe_compat_ids[set_idx];
+ if (bound_compat_ids[set_idx] != pipe_compat_ids[set_idx]) {
+ push_descriptor_cleanup(bound_sets[set_idx]);
+ bound_sets[set_idx] = nullptr;
+ dynamic_offsets[set_idx].clear();
+ bound_compat_ids[set_idx] = pipe_compat_ids[set_idx];
}
}
@@ -7468,27 +6882,26 @@ void ValidationStateTracker::UpdateLastBoundDescriptorSets(CMD_BUFFER_STATE *cb_
const uint32_t *input_dynamic_offsets = p_dynamic_offsets; // "read" pointer for dynamic offset data
for (uint32_t input_idx = 0; input_idx < set_count; input_idx++) {
auto set_idx = input_idx + first_set; // set_idx is index within layout, input_idx is index within input descriptor sets
- cvdescriptorset::DescriptorSet *descriptor_set =
- push_descriptor_set ? push_descriptor_set : GetSetNode(pDescriptorSets[input_idx]);
+ cvdescriptorset::DescriptorSet *descriptor_set = descriptor_sets[input_idx];
// Record binding (or push)
if (descriptor_set != last_bound.push_descriptor_set.get()) {
// Only cleanup the push descriptors if they aren't the currently used set.
- push_descriptor_cleanup(last_bound.per_set[set_idx].bound_descriptor_set);
+ push_descriptor_cleanup(bound_sets[set_idx]);
}
- last_bound.per_set[set_idx].bound_descriptor_set = descriptor_set;
- last_bound.per_set[set_idx].compat_id_for_set = pipe_compat_ids[set_idx]; // compat ids are canonical *per* set index
+ bound_sets[set_idx] = descriptor_set;
+ bound_compat_ids[set_idx] = pipe_compat_ids[set_idx]; // compat ids are canonical *per* set index
if (descriptor_set) {
auto set_dynamic_descriptor_count = descriptor_set->GetDynamicDescriptorCount();
// TODO: Add logic for tracking push_descriptor offsets (here or in caller)
if (set_dynamic_descriptor_count && input_dynamic_offsets) {
const uint32_t *end_offset = input_dynamic_offsets + set_dynamic_descriptor_count;
- last_bound.per_set[set_idx].dynamicOffsets = std::vector<uint32_t>(input_dynamic_offsets, end_offset);
+ dynamic_offsets[set_idx] = std::vector<uint32_t>(input_dynamic_offsets, end_offset);
input_dynamic_offsets = end_offset;
assert(input_dynamic_offsets <= (p_dynamic_offsets + dynamic_offset_count));
} else {
- last_bound.per_set[set_idx].dynamicOffsets.clear();
+ dynamic_offsets[set_idx].clear();
}
if (!descriptor_set->IsPushDescriptor()) {
// Can't cache validation of push_descriptors
@@ -7499,23 +6912,28 @@ void ValidationStateTracker::UpdateLastBoundDescriptorSets(CMD_BUFFER_STATE *cb_
}
// Update the bound state for the bind point, including the effects of incompatible pipeline layouts
-void ValidationStateTracker::PreCallRecordCmdBindDescriptorSets(VkCommandBuffer commandBuffer,
- VkPipelineBindPoint pipelineBindPoint, VkPipelineLayout layout,
- uint32_t firstSet, uint32_t setCount,
- const VkDescriptorSet *pDescriptorSets, uint32_t dynamicOffsetCount,
- const uint32_t *pDynamicOffsets) {
- CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
- auto pipeline_layout = GetPipelineLayout(layout);
-
- // Resize binding arrays
- uint32_t last_set_index = firstSet + setCount - 1;
- if (last_set_index >= cb_state->lastBound[pipelineBindPoint].per_set.size()) {
- cb_state->lastBound[pipelineBindPoint].per_set.resize(last_set_index + 1);
+void CoreChecks::PreCallRecordCmdBindDescriptorSets(VkCommandBuffer commandBuffer, VkPipelineBindPoint pipelineBindPoint,
+ VkPipelineLayout layout, uint32_t firstSet, uint32_t setCount,
+ const VkDescriptorSet *pDescriptorSets, uint32_t dynamicOffsetCount,
+ const uint32_t *pDynamicOffsets) {
+ layer_data *device_data = GetLayerDataPtr(get_dispatch_key(commandBuffer), layer_data_map);
+ GLOBAL_CB_NODE *cb_state = GetCBNode(commandBuffer);
+ auto pipeline_layout = GetPipelineLayout(device_data, layout);
+ std::vector<cvdescriptorset::DescriptorSet *> descriptor_sets;
+ descriptor_sets.reserve(setCount);
+
+ // Construct a list of the descriptors
+ bool found_non_null = false;
+ for (uint32_t i = 0; i < setCount; i++) {
+ cvdescriptorset::DescriptorSet *descriptor_set = GetSetNode(pDescriptorSets[i]);
+ descriptor_sets.emplace_back(descriptor_set);
+ found_non_null |= descriptor_set != nullptr;
+ }
+ if (found_non_null) { // which implies setCount > 0
+ UpdateLastBoundDescriptorSets(device_data, cb_state, pipelineBindPoint, pipeline_layout, firstSet, setCount,
+ descriptor_sets, dynamicOffsetCount, pDynamicOffsets);
+ cb_state->lastBound[pipelineBindPoint].pipeline_layout = layout;
}
-
- UpdateLastBoundDescriptorSets(cb_state, pipelineBindPoint, pipeline_layout, firstSet, setCount, pDescriptorSets, nullptr,
- dynamicOffsetCount, pDynamicOffsets);
- cb_state->lastBound[pipelineBindPoint].pipeline_layout = layout;
}
static bool ValidateDynamicOffsetAlignment(const debug_report_data *report_data, const VkDescriptorSetLayoutBinding *binding,
@@ -7542,28 +6960,35 @@ bool CoreChecks::PreCallValidateCmdBindDescriptorSets(VkCommandBuffer commandBuf
VkPipelineLayout layout, uint32_t firstSet, uint32_t setCount,
const VkDescriptorSet *pDescriptorSets, uint32_t dynamicOffsetCount,
const uint32_t *pDynamicOffsets) {
- const CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
+ layer_data *device_data = GetLayerDataPtr(get_dispatch_key(commandBuffer), layer_data_map);
+ GLOBAL_CB_NODE *cb_state = GetCBNode(commandBuffer);
assert(cb_state);
bool skip = false;
- skip |= ValidateCmdQueueFlags(cb_state, "vkCmdBindDescriptorSets()", VK_QUEUE_GRAPHICS_BIT | VK_QUEUE_COMPUTE_BIT,
+ skip |= ValidateCmdQueueFlags(device_data, cb_state, "vkCmdBindDescriptorSets()", VK_QUEUE_GRAPHICS_BIT | VK_QUEUE_COMPUTE_BIT,
"VUID-vkCmdBindDescriptorSets-commandBuffer-cmdpool");
- skip |= ValidateCmd(cb_state, CMD_BINDDESCRIPTORSETS, "vkCmdBindDescriptorSets()");
+ skip |= ValidateCmd(device_data, cb_state, CMD_BINDDESCRIPTORSETS, "vkCmdBindDescriptorSets()");
// Track total count of dynamic descriptor types to make sure we have an offset for each one
uint32_t total_dynamic_descriptors = 0;
string error_string = "";
+ uint32_t last_set_index = firstSet + setCount - 1;
- const auto *pipeline_layout = GetPipelineLayout(layout);
+ if (last_set_index >= cb_state->lastBound[pipelineBindPoint].boundDescriptorSets.size()) {
+ cb_state->lastBound[pipelineBindPoint].boundDescriptorSets.resize(last_set_index + 1);
+ cb_state->lastBound[pipelineBindPoint].dynamicOffsets.resize(last_set_index + 1);
+ cb_state->lastBound[pipelineBindPoint].compat_id_for_set.resize(last_set_index + 1);
+ }
+ auto pipeline_layout = GetPipelineLayout(device_data, layout);
for (uint32_t set_idx = 0; set_idx < setCount; set_idx++) {
- const cvdescriptorset::DescriptorSet *descriptor_set = GetSetNode(pDescriptorSets[set_idx]);
+ cvdescriptorset::DescriptorSet *descriptor_set = GetSetNode(pDescriptorSets[set_idx]);
if (descriptor_set) {
// Verify that set being bound is compatible with overlapping setLayout of pipelineLayout
if (!VerifySetLayoutCompatibility(descriptor_set, pipeline_layout, set_idx + firstSet, error_string)) {
- skip |=
- log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_SET_EXT,
- HandleToUint64(pDescriptorSets[set_idx]), "VUID-vkCmdBindDescriptorSets-pDescriptorSets-00358",
- "descriptorSet #%u being bound is not compatible with overlapping descriptorSetLayout at index %u of "
- "%s due to: %s.",
- set_idx, set_idx + firstSet, report_data->FormatHandle(layout).c_str(), error_string.c_str());
+ skip |= log_msg(
+ device_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_SET_EXT,
+ HandleToUint64(pDescriptorSets[set_idx]), "VUID-vkCmdBindDescriptorSets-pDescriptorSets-00358",
+ "descriptorSet #%u being bound is not compatible with overlapping descriptorSetLayout at index %u of "
+ "pipelineLayout %s due to: %s.",
+ set_idx, set_idx + firstSet, device_data->report_data->FormatHandle(layout).c_str(), error_string.c_str());
}
auto set_dynamic_descriptor_count = descriptor_set->GetDynamicDescriptorCount();
@@ -7571,13 +6996,13 @@ bool CoreChecks::PreCallValidateCmdBindDescriptorSets(VkCommandBuffer commandBuf
// First make sure we won't overstep bounds of pDynamicOffsets array
if ((total_dynamic_descriptors + set_dynamic_descriptor_count) > dynamicOffsetCount) {
// Test/report this here, such that we don't run past the end of pDynamicOffsets in the else clause
- skip |=
- log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_SET_EXT,
- HandleToUint64(pDescriptorSets[set_idx]), "VUID-vkCmdBindDescriptorSets-dynamicOffsetCount-00359",
- "descriptorSet #%u (%s) requires %u dynamicOffsets, but only %u dynamicOffsets are left in "
- "pDynamicOffsets array. There must be one dynamic offset for each dynamic descriptor being bound.",
- set_idx, report_data->FormatHandle(pDescriptorSets[set_idx]).c_str(),
- descriptor_set->GetDynamicDescriptorCount(), (dynamicOffsetCount - total_dynamic_descriptors));
+ skip |= log_msg(
+ device_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_SET_EXT,
+ HandleToUint64(pDescriptorSets[set_idx]), "VUID-vkCmdBindDescriptorSets-dynamicOffsetCount-00359",
+ "descriptorSet #%u (%s) requires %u dynamicOffsets, but only %u dynamicOffsets are left in "
+ "pDynamicOffsets array. There must be one dynamic offset for each dynamic descriptor being bound.",
+ set_idx, device_data->report_data->FormatHandle(pDescriptorSets[set_idx]).c_str(),
+ descriptor_set->GetDynamicDescriptorCount(), (dynamicOffsetCount - total_dynamic_descriptors));
// Set the number found to the maximum to prevent duplicate messages, or subsquent descriptor sets from
// testing against the "short tail" we're skipping below.
total_dynamic_descriptors = dynamicOffsetCount;
@@ -7585,14 +7010,16 @@ bool CoreChecks::PreCallValidateCmdBindDescriptorSets(VkCommandBuffer commandBuf
uint32_t cur_dyn_offset = total_dynamic_descriptors;
const auto dsl = descriptor_set->GetLayout();
const auto binding_count = dsl->GetBindingCount();
- const auto &limits = phys_dev_props.limits;
+ const auto &limits = device_data->phys_dev_props.limits;
for (uint32_t binding_idx = 0; binding_idx < binding_count; binding_idx++) {
const auto *binding = dsl->GetDescriptorSetLayoutBindingPtrFromIndex(binding_idx);
- skip |= ValidateDynamicOffsetAlignment(report_data, binding, VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC,
+ skip |= ValidateDynamicOffsetAlignment(device_data->report_data, binding,
+ VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC,
limits.minUniformBufferOffsetAlignment, pDynamicOffsets,
"VUID-vkCmdBindDescriptorSets-pDynamicOffsets-01971",
"minUniformBufferOffsetAlignment", &cur_dyn_offset);
- skip |= ValidateDynamicOffsetAlignment(report_data, binding, VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC,
+ skip |= ValidateDynamicOffsetAlignment(device_data->report_data, binding,
+ VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC,
limits.minStorageBufferOffsetAlignment, pDynamicOffsets,
"VUID-vkCmdBindDescriptorSets-pDynamicOffsets-01972",
"minStorageBufferOffsetAlignment", &cur_dyn_offset);
@@ -7602,14 +7029,15 @@ bool CoreChecks::PreCallValidateCmdBindDescriptorSets(VkCommandBuffer commandBuf
}
}
} else {
- skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_SET_EXT,
+ skip |= log_msg(device_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_SET_EXT,
HandleToUint64(pDescriptorSets[set_idx]), kVUID_Core_DrawState_InvalidSet,
- "Attempt to bind %s that doesn't exist!", report_data->FormatHandle(pDescriptorSets[set_idx]).c_str());
+ "Attempt to bind descriptor set %s that doesn't exist!",
+ device_data->report_data->FormatHandle(pDescriptorSets[set_idx]).c_str());
}
}
// dynamicOffsetCount must equal the total number of dynamic descriptors in the sets being bound
if (total_dynamic_descriptors != dynamicOffsetCount) {
- skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
+ skip |= log_msg(device_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
HandleToUint64(cb_state->commandBuffer), "VUID-vkCmdBindDescriptorSets-dynamicOffsetCount-00359",
"Attempting to bind %u descriptorSets with %u dynamic descriptors, but dynamicOffsetCount is %u. It should "
"exactly match the number of dynamic descriptors.",
@@ -7621,10 +7049,10 @@ bool CoreChecks::PreCallValidateCmdBindDescriptorSets(VkCommandBuffer commandBuf
// Validates that the supplied bind point is supported for the command buffer (vis. the command pool)
// Takes array of error codes as some of the VUID's (e.g. vkCmdBindPipeline) are written per bindpoint
// TODO add vkCmdBindPipeline bind_point validation using this call.
-bool CoreChecks::ValidatePipelineBindPoint(const CMD_BUFFER_STATE *cb_state, VkPipelineBindPoint bind_point, const char *func_name,
- const std::map<VkPipelineBindPoint, std::string> &bind_errors) const {
+bool CoreChecks::ValidatePipelineBindPoint(layer_data *device_data, GLOBAL_CB_NODE *cb_state, VkPipelineBindPoint bind_point,
+ const char *func_name, const std::map<VkPipelineBindPoint, std::string> &bind_errors) {
bool skip = false;
- auto pool = GetCommandPoolState(cb_state->createInfo.commandPool);
+ auto pool = GetCommandPoolNode(cb_state->createInfo.commandPool);
if (pool) { // The loss of a pool in a recording cmd is reported in DestroyCommandPool
static const std::map<VkPipelineBindPoint, VkQueueFlags> flag_mask = {
std::make_pair(VK_PIPELINE_BIND_POINT_GRAPHICS, static_cast<VkQueueFlags>(VK_QUEUE_GRAPHICS_BIT)),
@@ -7636,11 +7064,12 @@ bool CoreChecks::ValidatePipelineBindPoint(const CMD_BUFFER_STATE *cb_state, VkP
if (0 == (qfp.queueFlags & flag_mask.at(bind_point))) {
const std::string &error = bind_errors.at(bind_point);
auto cb_u64 = HandleToUint64(cb_state->commandBuffer);
- skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, cb_u64,
- error, "%s: %s was allocated from %s that does not support bindpoint %s.", func_name,
- report_data->FormatHandle(cb_state->commandBuffer).c_str(),
- report_data->FormatHandle(cb_state->createInfo.commandPool).c_str(),
- string_VkPipelineBindPoint(bind_point));
+ auto cp_u64 = HandleToUint64(cb_state->createInfo.commandPool);
+ skip |= log_msg(device_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
+ cb_u64, error,
+ "%s: CommandBuffer %s was allocated from VkCommandPool %s that does not support bindpoint %s.",
+ func_name, device_data->report_data->FormatHandle(cb_u64).c_str(),
+ device_data->report_data->FormatHandle(cp_u64).c_str(), string_VkPipelineBindPoint(bind_point));
}
}
return skip;
@@ -7649,12 +7078,13 @@ bool CoreChecks::ValidatePipelineBindPoint(const CMD_BUFFER_STATE *cb_state, VkP
bool CoreChecks::PreCallValidateCmdPushDescriptorSetKHR(VkCommandBuffer commandBuffer, VkPipelineBindPoint pipelineBindPoint,
VkPipelineLayout layout, uint32_t set, uint32_t descriptorWriteCount,
const VkWriteDescriptorSet *pDescriptorWrites) {
- CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
+ layer_data *device_data = GetLayerDataPtr(get_dispatch_key(commandBuffer), layer_data_map);
+ GLOBAL_CB_NODE *cb_state = GetCBNode(commandBuffer);
assert(cb_state);
const char *func_name = "vkCmdPushDescriptorSetKHR()";
bool skip = false;
- skip |= ValidateCmd(cb_state, CMD_PUSHDESCRIPTORSETKHR, func_name);
- skip |= ValidateCmdQueueFlags(cb_state, func_name, (VK_QUEUE_GRAPHICS_BIT | VK_QUEUE_COMPUTE_BIT),
+ skip |= ValidateCmd(device_data, cb_state, CMD_PUSHDESCRIPTORSETKHR, func_name);
+ skip |= ValidateCmdQueueFlags(device_data, cb_state, func_name, (VK_QUEUE_GRAPHICS_BIT | VK_QUEUE_COMPUTE_BIT),
"VUID-vkCmdPushDescriptorSetKHR-commandBuffer-cmdpool");
static const std::map<VkPipelineBindPoint, std::string> bind_errors = {
@@ -7662,8 +7092,8 @@ bool CoreChecks::PreCallValidateCmdPushDescriptorSetKHR(VkCommandBuffer commandB
std::make_pair(VK_PIPELINE_BIND_POINT_COMPUTE, "VUID-vkCmdPushDescriptorSetKHR-pipelineBindPoint-00363"),
std::make_pair(VK_PIPELINE_BIND_POINT_RAY_TRACING_NV, "VUID-vkCmdPushDescriptorSetKHR-pipelineBindPoint-00363")};
- skip |= ValidatePipelineBindPoint(cb_state, pipelineBindPoint, func_name, bind_errors);
- auto layout_data = GetPipelineLayout(layout);
+ skip |= ValidatePipelineBindPoint(device_data, cb_state, pipelineBindPoint, func_name, bind_errors);
+ auto layout_data = GetPipelineLayout(device_data, layout);
// Validate the set index points to a push descriptor set and is in range
if (layout_data) {
@@ -7673,33 +7103,36 @@ bool CoreChecks::PreCallValidateCmdPushDescriptorSetKHR(VkCommandBuffer commandB
const auto dsl = set_layouts[set];
if (dsl) {
if (!dsl->IsPushDescriptor()) {
- skip = log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_LAYOUT_EXT,
- layout_u64, "VUID-vkCmdPushDescriptorSetKHR-set-00365",
- "%s: Set index %" PRIu32 " does not match push descriptor set layout index for %s.", func_name,
- set, report_data->FormatHandle(layout).c_str());
+ skip = log_msg(
+ device_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_LAYOUT_EXT,
+ layout_u64, "VUID-vkCmdPushDescriptorSetKHR-set-00365",
+ "%s: Set index %" PRIu32 " does not match push descriptor set layout index for VkPipelineLayout %s.",
+ func_name, set, device_data->report_data->FormatHandle(layout_u64).c_str());
} else {
// Create an empty proxy in order to use the existing descriptor set update validation
// TODO move the validation (like this) that doesn't need descriptor set state to the DSL object so we
// don't have to do this.
- cvdescriptorset::DescriptorSet proxy_ds(VK_NULL_HANDLE, VK_NULL_HANDLE, dsl, 0, this);
- skip |= ValidatePushDescriptorsUpdate(&proxy_ds, descriptorWriteCount, pDescriptorWrites, func_name);
+ cvdescriptorset::DescriptorSet proxy_ds(VK_NULL_HANDLE, VK_NULL_HANDLE, dsl, 0, device_data);
+ skip |= proxy_ds.ValidatePushDescriptorsUpdate(device_data->report_data, descriptorWriteCount,
+ pDescriptorWrites, func_name);
}
}
} else {
- skip = log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_LAYOUT_EXT, layout_u64,
- "VUID-vkCmdPushDescriptorSetKHR-set-00364",
- "%s: Set index %" PRIu32 " is outside of range for %s (set < %" PRIu32 ").", func_name, set,
- report_data->FormatHandle(layout).c_str(), static_cast<uint32_t>(set_layouts.size()));
+ skip =
+ log_msg(device_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_LAYOUT_EXT,
+ layout_u64, "VUID-vkCmdPushDescriptorSetKHR-set-00364",
+ "%s: Set index %" PRIu32 " is outside of range for VkPipelineLayout %s (set < %" PRIu32 ").", func_name,
+ set, device_data->report_data->FormatHandle(layout_u64).c_str(), static_cast<uint32_t>(set_layouts.size()));
}
}
return skip;
}
-void CoreChecks::RecordCmdPushDescriptorSetState(CMD_BUFFER_STATE *cb_state, VkPipelineBindPoint pipelineBindPoint,
- VkPipelineLayout layout, uint32_t set, uint32_t descriptorWriteCount,
- const VkWriteDescriptorSet *pDescriptorWrites) {
- const auto &pipeline_layout = GetPipelineLayout(layout);
+void CoreChecks::RecordCmdPushDescriptorSetState(layer_data *device_data, GLOBAL_CB_NODE *cb_state,
+ VkPipelineBindPoint pipelineBindPoint, VkPipelineLayout layout, uint32_t set,
+ uint32_t descriptorWriteCount, const VkWriteDescriptorSet *pDescriptorWrites) {
+ const auto &pipeline_layout = GetPipelineLayout(device_data, layout);
// Short circuit invalid updates
if (!pipeline_layout || (set >= pipeline_layout->set_layouts.size()) || !pipeline_layout->set_layouts[set] ||
!pipeline_layout->set_layouts[set]->IsPushDescriptor())
@@ -7710,12 +7143,12 @@ void CoreChecks::RecordCmdPushDescriptorSetState(CMD_BUFFER_STATE *cb_state, VkP
auto &last_bound = cb_state->lastBound[pipelineBindPoint];
auto &push_descriptor_set = last_bound.push_descriptor_set;
// If we are disturbing the current push_desriptor_set clear it
- if (!push_descriptor_set || !CompatForSet(set, last_bound, pipeline_layout->compat_for_set)) {
- last_bound.UnbindAndResetPushDescriptorSet(new cvdescriptorset::DescriptorSet(0, 0, dsl, 0, this));
+ if (!push_descriptor_set || !CompatForSet(set, last_bound.compat_id_for_set, pipeline_layout->compat_for_set)) {
+ push_descriptor_set.reset(new cvdescriptorset::DescriptorSet(0, 0, dsl, 0, device_data));
}
- UpdateLastBoundDescriptorSets(cb_state, pipelineBindPoint, pipeline_layout, set, 1, nullptr, push_descriptor_set.get(), 0,
- nullptr);
+ std::vector<cvdescriptorset::DescriptorSet *> descriptor_sets = {push_descriptor_set.get()};
+ UpdateLastBoundDescriptorSets(device_data, cb_state, pipelineBindPoint, pipeline_layout, set, 1, descriptor_sets, 0, nullptr);
last_bound.pipeline_layout = layout;
// Now that we have either the new or extant push_descriptor set ... do the write updates against it
@@ -7725,8 +7158,9 @@ void CoreChecks::RecordCmdPushDescriptorSetState(CMD_BUFFER_STATE *cb_state, VkP
void CoreChecks::PreCallRecordCmdPushDescriptorSetKHR(VkCommandBuffer commandBuffer, VkPipelineBindPoint pipelineBindPoint,
VkPipelineLayout layout, uint32_t set, uint32_t descriptorWriteCount,
const VkWriteDescriptorSet *pDescriptorWrites) {
- CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
- RecordCmdPushDescriptorSetState(cb_state, pipelineBindPoint, layout, set, descriptorWriteCount, pDescriptorWrites);
+ layer_data *device_data = GetLayerDataPtr(get_dispatch_key(commandBuffer), layer_data_map);
+ GLOBAL_CB_NODE *cb_state = GetCBNode(commandBuffer);
+ RecordCmdPushDescriptorSetState(device_data, cb_state, pipelineBindPoint, layout, set, descriptorWriteCount, pDescriptorWrites);
}
static VkDeviceSize GetIndexAlignment(VkIndexType indexType) {
@@ -7735,8 +7169,6 @@ static VkDeviceSize GetIndexAlignment(VkIndexType indexType) {
return 2;
case VK_INDEX_TYPE_UINT32:
return 4;
- case VK_INDEX_TYPE_UINT8_EXT:
- return 1;
default:
// Not a real index type. Express no alignment requirement here; we expect upper layer
// to have already picked up on the enum being nonsense.
@@ -7746,21 +7178,23 @@ static VkDeviceSize GetIndexAlignment(VkIndexType indexType) {
bool CoreChecks::PreCallValidateCmdBindIndexBuffer(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset,
VkIndexType indexType) {
- const auto buffer_state = GetBufferState(buffer);
- const auto cb_node = GetCBState(commandBuffer);
+ layer_data *device_data = GetLayerDataPtr(get_dispatch_key(commandBuffer), layer_data_map);
+ auto buffer_state = GetBufferState(buffer);
+ auto cb_node = GetCBNode(commandBuffer);
assert(buffer_state);
assert(cb_node);
- bool skip =
- ValidateBufferUsageFlags(buffer_state, VK_BUFFER_USAGE_INDEX_BUFFER_BIT, true, "VUID-vkCmdBindIndexBuffer-buffer-00433",
- "vkCmdBindIndexBuffer()", "VK_BUFFER_USAGE_INDEX_BUFFER_BIT");
- skip |= ValidateCmdQueueFlags(cb_node, "vkCmdBindIndexBuffer()", VK_QUEUE_GRAPHICS_BIT,
+ bool skip = ValidateBufferUsageFlags(device_data, buffer_state, VK_BUFFER_USAGE_INDEX_BUFFER_BIT, true,
+ "VUID-vkCmdBindIndexBuffer-buffer-00433", "vkCmdBindIndexBuffer()",
+ "VK_BUFFER_USAGE_INDEX_BUFFER_BIT");
+ skip |= ValidateCmdQueueFlags(device_data, cb_node, "vkCmdBindIndexBuffer()", VK_QUEUE_GRAPHICS_BIT,
"VUID-vkCmdBindIndexBuffer-commandBuffer-cmdpool");
- skip |= ValidateCmd(cb_node, CMD_BINDINDEXBUFFER, "vkCmdBindIndexBuffer()");
- skip |= ValidateMemoryIsBoundToBuffer(buffer_state, "vkCmdBindIndexBuffer()", "VUID-vkCmdBindIndexBuffer-buffer-00434");
- const auto offset_align = GetIndexAlignment(indexType);
+ skip |= ValidateCmd(device_data, cb_node, CMD_BINDINDEXBUFFER, "vkCmdBindIndexBuffer()");
+ skip |= ValidateMemoryIsBoundToBuffer(device_data, buffer_state, "vkCmdBindIndexBuffer()",
+ "VUID-vkCmdBindIndexBuffer-buffer-00434");
+ auto offset_align = GetIndexAlignment(indexType);
if (offset % offset_align) {
- skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
+ skip |= log_msg(device_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
HandleToUint64(commandBuffer), "VUID-vkCmdBindIndexBuffer-offset-00432",
"vkCmdBindIndexBuffer() offset (0x%" PRIxLEAST64 ") does not fall on alignment (%s) boundary.", offset,
string_VkIndexType(indexType));
@@ -7769,38 +7203,39 @@ bool CoreChecks::PreCallValidateCmdBindIndexBuffer(VkCommandBuffer commandBuffer
return skip;
}
-void ValidationStateTracker::PreCallRecordCmdBindIndexBuffer(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset,
- VkIndexType indexType) {
+void CoreChecks::PreCallRecordCmdBindIndexBuffer(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset,
+ VkIndexType indexType) {
auto buffer_state = GetBufferState(buffer);
- auto cb_state = GetCBState(commandBuffer);
+ auto cb_node = GetCBNode(commandBuffer);
- cb_state->status |= CBSTATUS_INDEX_BUFFER_BOUND;
- cb_state->index_buffer_binding.buffer = buffer;
- cb_state->index_buffer_binding.size = buffer_state->createInfo.size;
- cb_state->index_buffer_binding.offset = offset;
- cb_state->index_buffer_binding.index_type = indexType;
- // Add binding for this index buffer to this commandbuffer
- AddCommandBufferBindingBuffer(cb_state, buffer_state);
+ cb_node->status |= CBSTATUS_INDEX_BUFFER_BOUND;
+ cb_node->index_buffer_binding.buffer = buffer;
+ cb_node->index_buffer_binding.size = buffer_state->createInfo.size;
+ cb_node->index_buffer_binding.offset = offset;
+ cb_node->index_buffer_binding.index_type = indexType;
}
+static inline void UpdateResourceTrackingOnDraw(GLOBAL_CB_NODE *pCB) { pCB->draw_data.push_back(pCB->current_draw_data); }
+
bool CoreChecks::PreCallValidateCmdBindVertexBuffers(VkCommandBuffer commandBuffer, uint32_t firstBinding, uint32_t bindingCount,
const VkBuffer *pBuffers, const VkDeviceSize *pOffsets) {
- const auto cb_state = GetCBState(commandBuffer);
+ layer_data *device_data = GetLayerDataPtr(get_dispatch_key(commandBuffer), layer_data_map);
+ auto cb_state = GetCBNode(commandBuffer);
assert(cb_state);
- bool skip = ValidateCmdQueueFlags(cb_state, "vkCmdBindVertexBuffers()", VK_QUEUE_GRAPHICS_BIT,
+ bool skip = ValidateCmdQueueFlags(device_data, cb_state, "vkCmdBindVertexBuffers()", VK_QUEUE_GRAPHICS_BIT,
"VUID-vkCmdBindVertexBuffers-commandBuffer-cmdpool");
- skip |= ValidateCmd(cb_state, CMD_BINDVERTEXBUFFERS, "vkCmdBindVertexBuffers()");
+ skip |= ValidateCmd(device_data, cb_state, CMD_BINDVERTEXBUFFERS, "vkCmdBindVertexBuffers()");
for (uint32_t i = 0; i < bindingCount; ++i) {
- const auto buffer_state = GetBufferState(pBuffers[i]);
+ auto buffer_state = GetBufferState(pBuffers[i]);
assert(buffer_state);
- skip |= ValidateBufferUsageFlags(buffer_state, VK_BUFFER_USAGE_VERTEX_BUFFER_BIT, true,
+ skip |= ValidateBufferUsageFlags(device_data, buffer_state, VK_BUFFER_USAGE_VERTEX_BUFFER_BIT, true,
"VUID-vkCmdBindVertexBuffers-pBuffers-00627", "vkCmdBindVertexBuffers()",
"VK_BUFFER_USAGE_VERTEX_BUFFER_BIT");
- skip |=
- ValidateMemoryIsBoundToBuffer(buffer_state, "vkCmdBindVertexBuffers()", "VUID-vkCmdBindVertexBuffers-pBuffers-00628");
+ skip |= ValidateMemoryIsBoundToBuffer(device_data, buffer_state, "vkCmdBindVertexBuffers()",
+ "VUID-vkCmdBindVertexBuffers-pBuffers-00628");
if (pOffsets[i] >= buffer_state->createInfo.size) {
- skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_BUFFER_EXT,
+ skip |= log_msg(device_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_BUFFER_EXT,
HandleToUint64(buffer_state->buffer), "VUID-vkCmdBindVertexBuffers-pOffsets-00626",
"vkCmdBindVertexBuffers() offset (0x%" PRIxLEAST64 ") is beyond the end of the buffer.", pOffsets[i]);
}
@@ -7808,99 +7243,298 @@ bool CoreChecks::PreCallValidateCmdBindVertexBuffers(VkCommandBuffer commandBuff
return skip;
}
-void ValidationStateTracker::PreCallRecordCmdBindVertexBuffers(VkCommandBuffer commandBuffer, uint32_t firstBinding,
- uint32_t bindingCount, const VkBuffer *pBuffers,
- const VkDeviceSize *pOffsets) {
- auto cb_state = GetCBState(commandBuffer);
+void CoreChecks::PreCallRecordCmdBindVertexBuffers(VkCommandBuffer commandBuffer, uint32_t firstBinding, uint32_t bindingCount,
+ const VkBuffer *pBuffers, const VkDeviceSize *pOffsets) {
+ auto cb_state = GetCBNode(commandBuffer);
uint32_t end = firstBinding + bindingCount;
- if (cb_state->current_vertex_buffer_binding_info.vertex_buffer_bindings.size() < end) {
- cb_state->current_vertex_buffer_binding_info.vertex_buffer_bindings.resize(end);
+ if (cb_state->current_draw_data.vertex_buffer_bindings.size() < end) {
+ cb_state->current_draw_data.vertex_buffer_bindings.resize(end);
}
for (uint32_t i = 0; i < bindingCount; ++i) {
- auto &vertex_buffer_binding = cb_state->current_vertex_buffer_binding_info.vertex_buffer_bindings[i + firstBinding];
+ auto &vertex_buffer_binding = cb_state->current_draw_data.vertex_buffer_bindings[i + firstBinding];
vertex_buffer_binding.buffer = pBuffers[i];
vertex_buffer_binding.offset = pOffsets[i];
- // Add binding for this vertex buffer to this commandbuffer
- AddCommandBufferBindingBuffer(cb_state, GetBufferState(pBuffers[i]));
}
}
+// Generic function to handle validation for all CmdDraw* type functions
+bool CoreChecks::ValidateCmdDrawType(layer_data *dev_data, VkCommandBuffer cmd_buffer, bool indexed, VkPipelineBindPoint bind_point,
+ CMD_TYPE cmd_type, const char *caller, VkQueueFlags queue_flags, const char *queue_flag_code,
+ const char *renderpass_msg_code, const char *pipebound_msg_code,
+ const char *dynamic_state_msg_code) {
+ bool skip = false;
+ GLOBAL_CB_NODE *cb_state = GetCBNode(cmd_buffer);
+ if (cb_state) {
+ skip |= ValidateCmdQueueFlags(dev_data, cb_state, caller, queue_flags, queue_flag_code);
+ skip |= ValidateCmd(dev_data, cb_state, cmd_type, caller);
+ skip |= ValidateCmdBufDrawState(dev_data, cb_state, cmd_type, indexed, bind_point, caller, pipebound_msg_code,
+ dynamic_state_msg_code);
+ skip |= (VK_PIPELINE_BIND_POINT_GRAPHICS == bind_point) ? OutsideRenderPass(dev_data, cb_state, caller, renderpass_msg_code)
+ : InsideRenderPass(dev_data, cb_state, caller, renderpass_msg_code);
+ }
+ return skip;
+}
+
+// Generic function to handle state update for all CmdDraw* and CmdDispatch* type functions
+void CoreChecks::UpdateStateCmdDrawDispatchType(layer_data *dev_data, GLOBAL_CB_NODE *cb_state, VkPipelineBindPoint bind_point) {
+ UpdateDrawState(dev_data, cb_state, bind_point);
+}
+
+// Generic function to handle state update for all CmdDraw* type functions
+void CoreChecks::UpdateStateCmdDrawType(layer_data *dev_data, GLOBAL_CB_NODE *cb_state, VkPipelineBindPoint bind_point) {
+ UpdateStateCmdDrawDispatchType(dev_data, cb_state, bind_point);
+ UpdateResourceTrackingOnDraw(cb_state);
+ cb_state->hasDrawCmd = true;
+}
+
+bool CoreChecks::PreCallValidateCmdDraw(VkCommandBuffer commandBuffer, uint32_t vertexCount, uint32_t instanceCount,
+ uint32_t firstVertex, uint32_t firstInstance) {
+ layer_data *device_data = GetLayerDataPtr(get_dispatch_key(commandBuffer), layer_data_map);
+ return ValidateCmdDrawType(device_data, commandBuffer, false, VK_PIPELINE_BIND_POINT_GRAPHICS, CMD_DRAW, "vkCmdDraw()",
+ VK_QUEUE_GRAPHICS_BIT, "VUID-vkCmdDraw-commandBuffer-cmdpool", "VUID-vkCmdDraw-renderpass",
+ "VUID-vkCmdDraw-None-00442", "VUID-vkCmdDraw-None-00443");
+}
+
+void CoreChecks::PreCallRecordCmdDraw(VkCommandBuffer commandBuffer, uint32_t vertexCount, uint32_t instanceCount,
+ uint32_t firstVertex, uint32_t firstInstance) {
+ layer_data *device_data = GetLayerDataPtr(get_dispatch_key(commandBuffer), layer_data_map);
+ GpuAllocateValidationResources(device_data, commandBuffer, VK_PIPELINE_BIND_POINT_GRAPHICS);
+}
+
+void CoreChecks::PostCallRecordCmdDraw(VkCommandBuffer commandBuffer, uint32_t vertexCount, uint32_t instanceCount,
+ uint32_t firstVertex, uint32_t firstInstance) {
+ layer_data *device_data = GetLayerDataPtr(get_dispatch_key(commandBuffer), layer_data_map);
+ GLOBAL_CB_NODE *cb_state = GetCBNode(commandBuffer);
+ UpdateStateCmdDrawType(device_data, cb_state, VK_PIPELINE_BIND_POINT_GRAPHICS);
+}
+
+bool CoreChecks::PreCallValidateCmdDrawIndexed(VkCommandBuffer commandBuffer, uint32_t indexCount, uint32_t instanceCount,
+ uint32_t firstIndex, int32_t vertexOffset, uint32_t firstInstance) {
+ layer_data *device_data = GetLayerDataPtr(get_dispatch_key(commandBuffer), layer_data_map);
+ bool skip = ValidateCmdDrawType(device_data, commandBuffer, true, VK_PIPELINE_BIND_POINT_GRAPHICS, CMD_DRAWINDEXED,
+ "vkCmdDrawIndexed()", VK_QUEUE_GRAPHICS_BIT, "VUID-vkCmdDrawIndexed-commandBuffer-cmdpool",
+ "VUID-vkCmdDrawIndexed-renderpass", "VUID-vkCmdDrawIndexed-None-00461",
+ "VUID-vkCmdDrawIndexed-None-00462");
+ GLOBAL_CB_NODE *cb_state = GetCBNode(commandBuffer);
+ if (!skip && (cb_state->status & CBSTATUS_INDEX_BUFFER_BOUND)) {
+ unsigned int index_size = 0;
+ const auto &index_buffer_binding = cb_state->index_buffer_binding;
+ if (index_buffer_binding.index_type == VK_INDEX_TYPE_UINT16) {
+ index_size = 2;
+ } else if (index_buffer_binding.index_type == VK_INDEX_TYPE_UINT32) {
+ index_size = 4;
+ }
+ VkDeviceSize end_offset = (index_size * ((VkDeviceSize)firstIndex + indexCount)) + index_buffer_binding.offset;
+ if (end_offset > index_buffer_binding.size) {
+ skip |= log_msg(device_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_BUFFER_EXT,
+ HandleToUint64(index_buffer_binding.buffer), "VUID-vkCmdDrawIndexed-indexSize-00463",
+ "vkCmdDrawIndexed() index size (%d) * (firstIndex (%d) + indexCount (%d)) "
+ "+ binding offset (%" PRIuLEAST64 ") = an ending offset of %" PRIuLEAST64
+ " bytes, "
+ "which is greater than the index buffer size (%" PRIuLEAST64 ").",
+ index_size, firstIndex, indexCount, index_buffer_binding.offset, end_offset, index_buffer_binding.size);
+ }
+ }
+ return skip;
+}
+
+void CoreChecks::PreCallRecordCmdDrawIndexed(VkCommandBuffer commandBuffer, uint32_t indexCount, uint32_t instanceCount,
+ uint32_t firstIndex, int32_t vertexOffset, uint32_t firstInstance) {
+ layer_data *device_data = GetLayerDataPtr(get_dispatch_key(commandBuffer), layer_data_map);
+ GpuAllocateValidationResources(device_data, commandBuffer, VK_PIPELINE_BIND_POINT_GRAPHICS);
+}
+
+void CoreChecks::PostCallRecordCmdDrawIndexed(VkCommandBuffer commandBuffer, uint32_t indexCount, uint32_t instanceCount,
+ uint32_t firstIndex, int32_t vertexOffset, uint32_t firstInstance) {
+ layer_data *device_data = GetLayerDataPtr(get_dispatch_key(commandBuffer), layer_data_map);
+ GLOBAL_CB_NODE *cb_state = GetCBNode(commandBuffer);
+ UpdateStateCmdDrawType(device_data, cb_state, VK_PIPELINE_BIND_POINT_GRAPHICS);
+}
+
+bool CoreChecks::PreCallValidateCmdDrawIndirect(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, uint32_t count,
+ uint32_t stride) {
+ layer_data *device_data = GetLayerDataPtr(get_dispatch_key(commandBuffer), layer_data_map);
+ bool skip = ValidateCmdDrawType(device_data, commandBuffer, false, VK_PIPELINE_BIND_POINT_GRAPHICS, CMD_DRAWINDIRECT,
+ "vkCmdDrawIndirect()", VK_QUEUE_GRAPHICS_BIT, "VUID-vkCmdDrawIndirect-commandBuffer-cmdpool",
+ "VUID-vkCmdDrawIndirect-renderpass", "VUID-vkCmdDrawIndirect-None-00485",
+ "VUID-vkCmdDrawIndirect-None-00486");
+ BUFFER_STATE *buffer_state = GetBufferState(buffer);
+ skip |= ValidateMemoryIsBoundToBuffer(device_data, buffer_state, "vkCmdDrawIndirect()", "VUID-vkCmdDrawIndirect-buffer-00474");
+ // TODO: If the drawIndirectFirstInstance feature is not enabled, all the firstInstance members of the
+ // VkDrawIndirectCommand structures accessed by this command must be 0, which will require access to the contents of 'buffer'.
+ return skip;
+}
+
+void CoreChecks::PreCallRecordCmdDrawIndirect(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, uint32_t count,
+ uint32_t stride) {
+ layer_data *device_data = GetLayerDataPtr(get_dispatch_key(commandBuffer), layer_data_map);
+ GpuAllocateValidationResources(device_data, commandBuffer, VK_PIPELINE_BIND_POINT_GRAPHICS);
+}
+
+void CoreChecks::PostCallRecordCmdDrawIndirect(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, uint32_t count,
+ uint32_t stride) {
+ layer_data *device_data = GetLayerDataPtr(get_dispatch_key(commandBuffer), layer_data_map);
+ GLOBAL_CB_NODE *cb_state = GetCBNode(commandBuffer);
+ BUFFER_STATE *buffer_state = GetBufferState(buffer);
+ UpdateStateCmdDrawType(device_data, cb_state, VK_PIPELINE_BIND_POINT_GRAPHICS);
+ AddCommandBufferBindingBuffer(device_data, cb_state, buffer_state);
+}
+
+bool CoreChecks::PreCallValidateCmdDrawIndexedIndirect(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset,
+ uint32_t count, uint32_t stride) {
+ layer_data *device_data = GetLayerDataPtr(get_dispatch_key(commandBuffer), layer_data_map);
+ bool skip = ValidateCmdDrawType(
+ device_data, commandBuffer, true, VK_PIPELINE_BIND_POINT_GRAPHICS, CMD_DRAWINDEXEDINDIRECT, "vkCmdDrawIndexedIndirect()",
+ VK_QUEUE_GRAPHICS_BIT, "VUID-vkCmdDrawIndexedIndirect-commandBuffer-cmdpool", "VUID-vkCmdDrawIndexedIndirect-renderpass",
+ "VUID-vkCmdDrawIndexedIndirect-None-00537", "VUID-vkCmdDrawIndexedIndirect-None-00538");
+ BUFFER_STATE *buffer_state = GetBufferState(buffer);
+ skip |= ValidateMemoryIsBoundToBuffer(device_data, buffer_state, "vkCmdDrawIndexedIndirect()",
+ "VUID-vkCmdDrawIndexedIndirect-buffer-00526");
+ // TODO: If the drawIndirectFirstInstance feature is not enabled, all the firstInstance members of the
+ // VkDrawIndexedIndirectCommand structures accessed by this command must be 0, which will require access to the contents of
+ // 'buffer'.
+ return skip;
+}
+
+void CoreChecks::PreCallRecordCmdDrawIndexedIndirect(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset,
+ uint32_t count, uint32_t stride) {
+ layer_data *device_data = GetLayerDataPtr(get_dispatch_key(commandBuffer), layer_data_map);
+ GpuAllocateValidationResources(device_data, commandBuffer, VK_PIPELINE_BIND_POINT_GRAPHICS);
+}
+
+void CoreChecks::PostCallRecordCmdDrawIndexedIndirect(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset,
+ uint32_t count, uint32_t stride) {
+ layer_data *device_data = GetLayerDataPtr(get_dispatch_key(commandBuffer), layer_data_map);
+ GLOBAL_CB_NODE *cb_state = GetCBNode(commandBuffer);
+ BUFFER_STATE *buffer_state = GetBufferState(buffer);
+ UpdateStateCmdDrawType(device_data, cb_state, VK_PIPELINE_BIND_POINT_GRAPHICS);
+ AddCommandBufferBindingBuffer(device_data, cb_state, buffer_state);
+}
+
+bool CoreChecks::PreCallValidateCmdDispatch(VkCommandBuffer commandBuffer, uint32_t x, uint32_t y, uint32_t z) {
+ layer_data *device_data = GetLayerDataPtr(get_dispatch_key(commandBuffer), layer_data_map);
+ return ValidateCmdDrawType(device_data, commandBuffer, false, VK_PIPELINE_BIND_POINT_COMPUTE, CMD_DISPATCH, "vkCmdDispatch()",
+ VK_QUEUE_COMPUTE_BIT, "VUID-vkCmdDispatch-commandBuffer-cmdpool", "VUID-vkCmdDispatch-renderpass",
+ "VUID-vkCmdDispatch-None-00391", kVUIDUndefined);
+}
+
+void CoreChecks::PreCallRecordCmdDispatch(VkCommandBuffer commandBuffer, uint32_t x, uint32_t y, uint32_t z) {
+ layer_data *device_data = GetLayerDataPtr(get_dispatch_key(commandBuffer), layer_data_map);
+ GpuAllocateValidationResources(device_data, commandBuffer, VK_PIPELINE_BIND_POINT_COMPUTE);
+}
+
+void CoreChecks::PostCallRecordCmdDispatch(VkCommandBuffer commandBuffer, uint32_t x, uint32_t y, uint32_t z) {
+ layer_data *device_data = GetLayerDataPtr(get_dispatch_key(commandBuffer), layer_data_map);
+ GLOBAL_CB_NODE *cb_state = GetCBNode(commandBuffer);
+ UpdateStateCmdDrawDispatchType(device_data, cb_state, VK_PIPELINE_BIND_POINT_COMPUTE);
+}
+
+bool CoreChecks::PreCallValidateCmdDispatchIndirect(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset) {
+ layer_data *device_data = GetLayerDataPtr(get_dispatch_key(commandBuffer), layer_data_map);
+ bool skip =
+ ValidateCmdDrawType(device_data, commandBuffer, false, VK_PIPELINE_BIND_POINT_COMPUTE, CMD_DISPATCHINDIRECT,
+ "vkCmdDispatchIndirect()", VK_QUEUE_COMPUTE_BIT, "VUID-vkCmdDispatchIndirect-commandBuffer-cmdpool",
+ "VUID-vkCmdDispatchIndirect-renderpass", "VUID-vkCmdDispatchIndirect-None-00404", kVUIDUndefined);
+ BUFFER_STATE *buffer_state = GetBufferState(buffer);
+ skip |= ValidateMemoryIsBoundToBuffer(device_data, buffer_state, "vkCmdDispatchIndirect()",
+ "VUID-vkCmdDispatchIndirect-buffer-00401");
+ return skip;
+}
+
+void CoreChecks::PreCallRecordCmdDispatchIndirect(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset) {
+ layer_data *device_data = GetLayerDataPtr(get_dispatch_key(commandBuffer), layer_data_map);
+ GpuAllocateValidationResources(device_data, commandBuffer, VK_PIPELINE_BIND_POINT_COMPUTE);
+}
+
+void CoreChecks::PostCallRecordCmdDispatchIndirect(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset) {
+ layer_data *device_data = GetLayerDataPtr(get_dispatch_key(commandBuffer), layer_data_map);
+ GLOBAL_CB_NODE *cb_state = GetCBNode(commandBuffer);
+ UpdateStateCmdDrawDispatchType(device_data, cb_state, VK_PIPELINE_BIND_POINT_COMPUTE);
+ BUFFER_STATE *buffer_state = GetBufferState(buffer);
+ AddCommandBufferBindingBuffer(device_data, cb_state, buffer_state);
+}
+
// Validate that an image's sampleCount matches the requirement for a specific API call
-bool CoreChecks::ValidateImageSampleCount(const IMAGE_STATE *image_state, VkSampleCountFlagBits sample_count, const char *location,
- const std::string &msgCode) const {
+bool CoreChecks::ValidateImageSampleCount(layer_data *dev_data, IMAGE_STATE *image_state, VkSampleCountFlagBits sample_count,
+ const char *location, const std::string &msgCode) {
bool skip = false;
if (image_state->createInfo.samples != sample_count) {
- skip =
- log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT,
- HandleToUint64(image_state->image), msgCode, "%s for %s was created with a sample count of %s but must be %s.",
- location, report_data->FormatHandle(image_state->image).c_str(),
- string_VkSampleCountFlagBits(image_state->createInfo.samples), string_VkSampleCountFlagBits(sample_count));
+ skip = log_msg(dev_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT,
+ HandleToUint64(image_state->image), msgCode,
+ "%s for image %s was created with a sample count of %s but must be %s.", location,
+ dev_data->report_data->FormatHandle(image_state->image).c_str(),
+ string_VkSampleCountFlagBits(image_state->createInfo.samples), string_VkSampleCountFlagBits(sample_count));
}
return skip;
}
bool CoreChecks::PreCallValidateCmdUpdateBuffer(VkCommandBuffer commandBuffer, VkBuffer dstBuffer, VkDeviceSize dstOffset,
VkDeviceSize dataSize, const void *pData) {
- const auto cb_state = GetCBState(commandBuffer);
+ layer_data *device_data = GetLayerDataPtr(get_dispatch_key(commandBuffer), layer_data_map);
+ auto cb_state = GetCBNode(commandBuffer);
assert(cb_state);
- const auto dst_buffer_state = GetBufferState(dstBuffer);
+ auto dst_buffer_state = GetBufferState(dstBuffer);
assert(dst_buffer_state);
bool skip = false;
- skip |= ValidateMemoryIsBoundToBuffer(dst_buffer_state, "vkCmdUpdateBuffer()", "VUID-vkCmdUpdateBuffer-dstBuffer-00035");
+ skip |= ValidateMemoryIsBoundToBuffer(device_data, dst_buffer_state, "vkCmdUpdateBuffer()",
+ "VUID-vkCmdUpdateBuffer-dstBuffer-00035");
// Validate that DST buffer has correct usage flags set
- skip |=
- ValidateBufferUsageFlags(dst_buffer_state, VK_BUFFER_USAGE_TRANSFER_DST_BIT, true, "VUID-vkCmdUpdateBuffer-dstBuffer-00034",
- "vkCmdUpdateBuffer()", "VK_BUFFER_USAGE_TRANSFER_DST_BIT");
- skip |=
- ValidateCmdQueueFlags(cb_state, "vkCmdUpdateBuffer()", VK_QUEUE_TRANSFER_BIT | VK_QUEUE_GRAPHICS_BIT | VK_QUEUE_COMPUTE_BIT,
- "VUID-vkCmdUpdateBuffer-commandBuffer-cmdpool");
- skip |= ValidateCmd(cb_state, CMD_UPDATEBUFFER, "vkCmdUpdateBuffer()");
- skip |= InsideRenderPass(cb_state, "vkCmdUpdateBuffer()", "VUID-vkCmdUpdateBuffer-renderpass");
+ skip |= ValidateBufferUsageFlags(device_data, dst_buffer_state, VK_BUFFER_USAGE_TRANSFER_DST_BIT, true,
+ "VUID-vkCmdUpdateBuffer-dstBuffer-00034", "vkCmdUpdateBuffer()",
+ "VK_BUFFER_USAGE_TRANSFER_DST_BIT");
+ skip |= ValidateCmdQueueFlags(device_data, cb_state, "vkCmdUpdateBuffer()",
+ VK_QUEUE_TRANSFER_BIT | VK_QUEUE_GRAPHICS_BIT | VK_QUEUE_COMPUTE_BIT,
+ "VUID-vkCmdUpdateBuffer-commandBuffer-cmdpool");
+ skip |= ValidateCmd(device_data, cb_state, CMD_UPDATEBUFFER, "vkCmdUpdateBuffer()");
+ skip |= InsideRenderPass(device_data, cb_state, "vkCmdUpdateBuffer()", "VUID-vkCmdUpdateBuffer-renderpass");
return skip;
}
-void ValidationStateTracker::PostCallRecordCmdUpdateBuffer(VkCommandBuffer commandBuffer, VkBuffer dstBuffer,
- VkDeviceSize dstOffset, VkDeviceSize dataSize, const void *pData) {
- auto cb_state = GetCBState(commandBuffer);
+void CoreChecks::PostCallRecordCmdUpdateBuffer(VkCommandBuffer commandBuffer, VkBuffer dstBuffer, VkDeviceSize dstOffset,
+ VkDeviceSize dataSize, const void *pData) {
+ layer_data *device_data = GetLayerDataPtr(get_dispatch_key(commandBuffer), layer_data_map);
+ auto cb_state = GetCBNode(commandBuffer);
auto dst_buffer_state = GetBufferState(dstBuffer);
// Update bindings between buffer and cmd buffer
- AddCommandBufferBindingBuffer(cb_state, dst_buffer_state);
+ AddCommandBufferBindingBuffer(device_data, cb_state, dst_buffer_state);
}
bool CoreChecks::SetEventStageMask(VkQueue queue, VkCommandBuffer commandBuffer, VkEvent event, VkPipelineStageFlags stageMask) {
- CMD_BUFFER_STATE *pCB = GetCBState(commandBuffer);
+ layer_data *dev_data = GetLayerDataPtr(get_dispatch_key(commandBuffer), layer_data_map);
+ GLOBAL_CB_NODE *pCB = GetCBNode(commandBuffer);
if (pCB) {
pCB->eventToStageMap[event] = stageMask;
}
- auto queue_data = queueMap.find(queue);
- if (queue_data != queueMap.end()) {
+ auto queue_data = dev_data->queueMap.find(queue);
+ if (queue_data != dev_data->queueMap.end()) {
queue_data->second.eventToStageMap[event] = stageMask;
}
return false;
}
bool CoreChecks::PreCallValidateCmdSetEvent(VkCommandBuffer commandBuffer, VkEvent event, VkPipelineStageFlags stageMask) {
- CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
+ layer_data *device_data = GetLayerDataPtr(get_dispatch_key(commandBuffer), layer_data_map);
+ GLOBAL_CB_NODE *cb_state = GetCBNode(commandBuffer);
assert(cb_state);
- bool skip = ValidateCmdQueueFlags(cb_state, "vkCmdSetEvent()", VK_QUEUE_GRAPHICS_BIT | VK_QUEUE_COMPUTE_BIT,
+ bool skip = ValidateCmdQueueFlags(device_data, cb_state, "vkCmdSetEvent()", VK_QUEUE_GRAPHICS_BIT | VK_QUEUE_COMPUTE_BIT,
"VUID-vkCmdSetEvent-commandBuffer-cmdpool");
- skip |= ValidateCmd(cb_state, CMD_SETEVENT, "vkCmdSetEvent()");
- skip |= InsideRenderPass(cb_state, "vkCmdSetEvent()", "VUID-vkCmdSetEvent-renderpass");
- skip |= ValidateStageMaskGsTsEnables(stageMask, "vkCmdSetEvent()", "VUID-vkCmdSetEvent-stageMask-01150",
+ skip |= ValidateCmd(device_data, cb_state, CMD_SETEVENT, "vkCmdSetEvent()");
+ skip |= InsideRenderPass(device_data, cb_state, "vkCmdSetEvent()", "VUID-vkCmdSetEvent-renderpass");
+ skip |= ValidateStageMaskGsTsEnables(device_data, stageMask, "vkCmdSetEvent()", "VUID-vkCmdSetEvent-stageMask-01150",
"VUID-vkCmdSetEvent-stageMask-01151", "VUID-vkCmdSetEvent-stageMask-02107",
"VUID-vkCmdSetEvent-stageMask-02108");
return skip;
}
void CoreChecks::PreCallRecordCmdSetEvent(VkCommandBuffer commandBuffer, VkEvent event, VkPipelineStageFlags stageMask) {
- CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
- auto event_state = GetEventState(event);
+ GLOBAL_CB_NODE *cb_state = GetCBNode(commandBuffer);
+ auto event_state = GetEventNode(event);
if (event_state) {
- AddCommandBufferBinding(&event_state->cb_bindings, VulkanTypedHandle(event, kVulkanObjectTypeEvent), cb_state);
+ AddCommandBufferBinding(&event_state->cb_bindings, {HandleToUint64(event), kVulkanObjectTypeEvent}, cb_state);
event_state->cb_bindings.insert(cb_state);
}
cb_state->events.push_back(event);
@@ -7911,24 +7545,25 @@ void CoreChecks::PreCallRecordCmdSetEvent(VkCommandBuffer commandBuffer, VkEvent
}
bool CoreChecks::PreCallValidateCmdResetEvent(VkCommandBuffer commandBuffer, VkEvent event, VkPipelineStageFlags stageMask) {
- CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
+ layer_data *device_data = GetLayerDataPtr(get_dispatch_key(commandBuffer), layer_data_map);
+ GLOBAL_CB_NODE *cb_state = GetCBNode(commandBuffer);
assert(cb_state);
- bool skip = ValidateCmdQueueFlags(cb_state, "vkCmdResetEvent()", VK_QUEUE_GRAPHICS_BIT | VK_QUEUE_COMPUTE_BIT,
+ bool skip = ValidateCmdQueueFlags(device_data, cb_state, "vkCmdResetEvent()", VK_QUEUE_GRAPHICS_BIT | VK_QUEUE_COMPUTE_BIT,
"VUID-vkCmdResetEvent-commandBuffer-cmdpool");
- skip |= ValidateCmd(cb_state, CMD_RESETEVENT, "vkCmdResetEvent()");
- skip |= InsideRenderPass(cb_state, "vkCmdResetEvent()", "VUID-vkCmdResetEvent-renderpass");
- skip |= ValidateStageMaskGsTsEnables(stageMask, "vkCmdResetEvent()", "VUID-vkCmdResetEvent-stageMask-01154",
+ skip |= ValidateCmd(device_data, cb_state, CMD_RESETEVENT, "vkCmdResetEvent()");
+ skip |= InsideRenderPass(device_data, cb_state, "vkCmdResetEvent()", "VUID-vkCmdResetEvent-renderpass");
+ skip |= ValidateStageMaskGsTsEnables(device_data, stageMask, "vkCmdResetEvent()", "VUID-vkCmdResetEvent-stageMask-01154",
"VUID-vkCmdResetEvent-stageMask-01155", "VUID-vkCmdResetEvent-stageMask-02109",
"VUID-vkCmdResetEvent-stageMask-02110");
return skip;
}
void CoreChecks::PreCallRecordCmdResetEvent(VkCommandBuffer commandBuffer, VkEvent event, VkPipelineStageFlags stageMask) {
- CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
- auto event_state = GetEventState(event);
+ GLOBAL_CB_NODE *cb_state = GetCBNode(commandBuffer);
+ auto event_state = GetEventNode(event);
if (event_state) {
- AddCommandBufferBinding(&event_state->cb_bindings, VulkanTypedHandle(event, kVulkanObjectTypeEvent), cb_state);
+ AddCommandBufferBinding(&event_state->cb_bindings, {HandleToUint64(event), kVulkanObjectTypeEvent}, cb_state);
event_state->cb_bindings.insert(cb_state);
}
cb_state->events.push_back(event);
@@ -8045,9 +7680,9 @@ static VkPipelineStageFlagBits GetLogicallyLatestGraphicsPipelineStage(VkPipelin
}
// Verify image barrier image state and that the image is consistent with FB image
-bool CoreChecks::ValidateImageBarrierImage(const char *funcName, CMD_BUFFER_STATE const *cb_state, VkFramebuffer framebuffer,
- uint32_t active_subpass, const safe_VkSubpassDescription2KHR &sub_desc,
- const VulkanTypedHandle &rp_handle, uint32_t img_index,
+bool CoreChecks::ValidateImageBarrierImage(layer_data *device_data, const char *funcName, GLOBAL_CB_NODE const *cb_state,
+ VkFramebuffer framebuffer, uint32_t active_subpass,
+ const safe_VkSubpassDescription2KHR &sub_desc, uint64_t rp_handle, uint32_t img_index,
const VkImageMemoryBarrier &img_barrier) {
bool skip = false;
const auto &fb_state = GetFramebufferState(framebuffer);
@@ -8071,7 +7706,7 @@ bool CoreChecks::ValidateImageBarrierImage(const char *funcName, CMD_BUFFER_STAT
if (sub_desc.pDepthStencilAttachment && sub_desc.pDepthStencilAttachment->attachment == attach_index) {
sub_image_layout = sub_desc.pDepthStencilAttachment->layout;
sub_image_found = true;
- } else if (device_extensions.vk_khr_depth_stencil_resolve) {
+ } else if (GetDeviceExtensions()->vk_khr_depth_stencil_resolve) {
const auto *resolve = lvl_find_in_chain<VkSubpassDescriptionDepthStencilResolveKHR>(sub_desc.pNext);
if (resolve && resolve->pDepthStencilResolveAttachment &&
resolve->pDepthStencilResolveAttachment->attachment == attach_index) {
@@ -8092,35 +7727,37 @@ bool CoreChecks::ValidateImageBarrierImage(const char *funcName, CMD_BUFFER_STAT
}
}
if (!sub_image_found) {
- skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_RENDER_PASS_EXT,
- rp_handle.handle, "VUID-vkCmdPipelineBarrier-image-02635",
- "%s: Barrier pImageMemoryBarriers[%d].%s is not referenced by the VkSubpassDescription for "
- "active subpass (%d) of current %s.",
- funcName, img_index, report_data->FormatHandle(img_bar_image).c_str(), active_subpass,
- report_data->FormatHandle(rp_handle).c_str());
+ skip |= log_msg(device_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_RENDER_PASS_EXT,
+ rp_handle, "VUID-vkCmdPipelineBarrier-image-02635",
+ "%s: Barrier pImageMemoryBarriers[%d].image (%s) is not referenced by the VkSubpassDescription for "
+ "active subpass (%d) of current renderPass (%s).",
+ funcName, img_index, device_data->report_data->FormatHandle(img_bar_image).c_str(), active_subpass,
+ device_data->report_data->FormatHandle(rp_handle).c_str());
}
} else { // !image_match
auto const fb_handle = HandleToUint64(fb_state->framebuffer);
- skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_FRAMEBUFFER_EXT, fb_handle,
- "VUID-vkCmdPipelineBarrier-image-02635",
- "%s: Barrier pImageMemoryBarriers[%d].%s does not match an image from the current %s.", funcName, img_index,
- report_data->FormatHandle(img_bar_image).c_str(), report_data->FormatHandle(fb_state->framebuffer).c_str());
+ skip |=
+ log_msg(device_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_FRAMEBUFFER_EXT, fb_handle,
+ "VUID-vkCmdPipelineBarrier-image-02635",
+ "%s: Barrier pImageMemoryBarriers[%d].image (%s) does not match an image from the current framebuffer (%s).",
+ funcName, img_index, device_data->report_data->FormatHandle(img_bar_image).c_str(),
+ device_data->report_data->FormatHandle(fb_handle).c_str());
}
if (img_barrier.oldLayout != img_barrier.newLayout) {
- skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
+ skip |= log_msg(device_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
HandleToUint64(cb_state->commandBuffer), "VUID-vkCmdPipelineBarrier-oldLayout-01181",
- "%s: As the Image Barrier for %s is being executed within a render pass instance, oldLayout must "
+ "%s: As the Image Barrier for image %s is being executed within a render pass instance, oldLayout must "
"equal newLayout yet they are %s and %s.",
- funcName, report_data->FormatHandle(img_barrier.image).c_str(), string_VkImageLayout(img_barrier.oldLayout),
- string_VkImageLayout(img_barrier.newLayout));
+ funcName, device_data->report_data->FormatHandle(img_barrier.image).c_str(),
+ string_VkImageLayout(img_barrier.oldLayout), string_VkImageLayout(img_barrier.newLayout));
} else {
if (sub_image_found && sub_image_layout != img_barrier.oldLayout) {
- skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_RENDER_PASS_EXT,
- rp_handle.handle, "VUID-vkCmdPipelineBarrier-oldLayout-02636",
- "%s: Barrier pImageMemoryBarriers[%d].%s is referenced by the VkSubpassDescription for active "
- "subpass (%d) of current %s as having layout %s, but image barrier has layout %s.",
- funcName, img_index, report_data->FormatHandle(img_bar_image).c_str(), active_subpass,
- report_data->FormatHandle(rp_handle).c_str(), string_VkImageLayout(sub_image_layout),
+ skip |= log_msg(device_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_RENDER_PASS_EXT,
+ rp_handle, "VUID-vkCmdPipelineBarrier-oldLayout-02636",
+ "%s: Barrier pImageMemoryBarriers[%d].image (%s) is referenced by the VkSubpassDescription for active "
+ "subpass (%d) of current renderPass (%s) as having layout %s, but image barrier has layout %s.",
+ funcName, img_index, device_data->report_data->FormatHandle(img_bar_image).c_str(), active_subpass,
+ device_data->report_data->FormatHandle(rp_handle).c_str(), string_VkImageLayout(sub_image_layout),
string_VkImageLayout(img_barrier.oldLayout));
}
}
@@ -8128,9 +7765,9 @@ bool CoreChecks::ValidateImageBarrierImage(const char *funcName, CMD_BUFFER_STAT
}
// Validate image barriers within a renderPass
-bool CoreChecks::ValidateRenderPassImageBarriers(const char *funcName, CMD_BUFFER_STATE *cb_state, uint32_t active_subpass,
- const safe_VkSubpassDescription2KHR &sub_desc, const VulkanTypedHandle &rp_handle,
- const safe_VkSubpassDependency2KHR *dependencies,
+bool CoreChecks::ValidateRenderPassImageBarriers(layer_data *device_data, const char *funcName, GLOBAL_CB_NODE *cb_state,
+ uint32_t active_subpass, const safe_VkSubpassDescription2KHR &sub_desc,
+ uint64_t rp_handle, const safe_VkSubpassDependency2KHR *dependencies,
const std::vector<uint32_t> &self_dependencies, uint32_t image_mem_barrier_count,
const VkImageMemoryBarrier *image_barriers) {
bool skip = false;
@@ -8148,23 +7785,25 @@ bool CoreChecks::ValidateRenderPassImageBarriers(const char *funcName, CMD_BUFFE
if (!access_mask_match) {
std::stringstream self_dep_ss;
stream_join(self_dep_ss, ", ", self_dependencies);
- skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_RENDER_PASS_EXT,
- rp_handle.handle, "VUID-vkCmdPipelineBarrier-pDependencies-02285",
- "%s: Barrier pImageMemoryBarriers[%d].srcAccessMask(0x%X) is not a subset of VkSubpassDependency "
- "srcAccessMask of subpass %d of %s. Candidate VkSubpassDependency are pDependencies entries [%s].",
- funcName, i, img_src_access_mask, active_subpass, report_data->FormatHandle(rp_handle).c_str(),
- self_dep_ss.str().c_str());
- skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_RENDER_PASS_EXT,
- rp_handle.handle, "VUID-vkCmdPipelineBarrier-pDependencies-02285",
- "%s: Barrier pImageMemoryBarriers[%d].dstAccessMask(0x%X) is not a subset of VkSubpassDependency "
- "dstAccessMask of subpass %d of %s. Candidate VkSubpassDependency are pDependencies entries [%s].",
- funcName, i, img_dst_access_mask, active_subpass, report_data->FormatHandle(rp_handle).c_str(),
- self_dep_ss.str().c_str());
+ skip |= log_msg(
+ device_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_RENDER_PASS_EXT, rp_handle,
+ "VUID-vkCmdPipelineBarrier-pDependencies-02285",
+ "%s: Barrier pImageMemoryBarriers[%d].srcAccessMask(0x%X) is not a subset of VkSubpassDependency "
+ "srcAccessMask of subpass %d of renderPass %s. Candidate VkSubpassDependency are pDependencies entries [%s].",
+ funcName, i, img_src_access_mask, active_subpass, device_data->report_data->FormatHandle(rp_handle).c_str(),
+ self_dep_ss.str().c_str());
+ skip |= log_msg(
+ device_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_RENDER_PASS_EXT, rp_handle,
+ "VUID-vkCmdPipelineBarrier-pDependencies-02285",
+ "%s: Barrier pImageMemoryBarriers[%d].dstAccessMask(0x%X) is not a subset of VkSubpassDependency "
+ "dstAccessMask of subpass %d of renderPass %s. Candidate VkSubpassDependency are pDependencies entries [%s].",
+ funcName, i, img_dst_access_mask, active_subpass, device_data->report_data->FormatHandle(rp_handle).c_str(),
+ self_dep_ss.str().c_str());
}
if (VK_QUEUE_FAMILY_IGNORED != img_barrier.srcQueueFamilyIndex ||
VK_QUEUE_FAMILY_IGNORED != img_barrier.dstQueueFamilyIndex) {
- skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_RENDER_PASS_EXT,
- rp_handle.handle, "VUID-vkCmdPipelineBarrier-srcQueueFamilyIndex-01182",
+ skip |= log_msg(device_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_RENDER_PASS_EXT,
+ rp_handle, "VUID-vkCmdPipelineBarrier-srcQueueFamilyIndex-01182",
"%s: Barrier pImageMemoryBarriers[%d].srcQueueFamilyIndex is %d and "
"pImageMemoryBarriers[%d].dstQueueFamilyIndex is %d but both must be VK_QUEUE_FAMILY_IGNORED.",
funcName, i, img_barrier.srcQueueFamilyIndex, i, img_barrier.dstQueueFamilyIndex);
@@ -8173,12 +7812,13 @@ bool CoreChecks::ValidateRenderPassImageBarriers(const char *funcName, CMD_BUFFE
if (VK_NULL_HANDLE == cb_state->activeFramebuffer) {
assert(VK_COMMAND_BUFFER_LEVEL_SECONDARY == cb_state->createInfo.level);
// Secondary CB case w/o FB specified delay validation
- cb_state->cmd_execute_commands_functions.emplace_back([=](const CMD_BUFFER_STATE *primary_cb, VkFramebuffer fb) {
- return ValidateImageBarrierImage(funcName, cb_state, fb, active_subpass, sub_desc, rp_handle, i, img_barrier);
+ cb_state->cmd_execute_commands_functions.emplace_back([=](GLOBAL_CB_NODE *primary_cb, VkFramebuffer fb) {
+ return ValidateImageBarrierImage(device_data, funcName, cb_state, fb, active_subpass, sub_desc, rp_handle, i,
+ img_barrier);
});
} else {
- skip |= ValidateImageBarrierImage(funcName, cb_state, cb_state->activeFramebuffer, active_subpass, sub_desc, rp_handle,
- i, img_barrier);
+ skip |= ValidateImageBarrierImage(device_data, funcName, cb_state, cb_state->activeFramebuffer, active_subpass,
+ sub_desc, rp_handle, i, img_barrier);
}
}
return skip;
@@ -8186,7 +7826,7 @@ bool CoreChecks::ValidateRenderPassImageBarriers(const char *funcName, CMD_BUFFE
// Validate VUs for Pipeline Barriers that are within a renderPass
// Pre: cb_state->activeRenderPass must be a pointer to valid renderPass state
-bool CoreChecks::ValidateRenderPassPipelineBarriers(const char *funcName, CMD_BUFFER_STATE *cb_state,
+bool CoreChecks::ValidateRenderPassPipelineBarriers(layer_data *device_data, const char *funcName, GLOBAL_CB_NODE *cb_state,
VkPipelineStageFlags src_stage_mask, VkPipelineStageFlags dst_stage_mask,
VkDependencyFlags dependency_flags, uint32_t mem_barrier_count,
const VkMemoryBarrier *mem_barriers, uint32_t buffer_mem_barrier_count,
@@ -8195,14 +7835,14 @@ bool CoreChecks::ValidateRenderPassPipelineBarriers(const char *funcName, CMD_BU
bool skip = false;
const auto rp_state = cb_state->activeRenderPass;
const auto active_subpass = cb_state->activeSubpass;
- const VulkanTypedHandle rp_handle(rp_state->renderPass, kVulkanObjectTypeRenderPass);
+ auto rp_handle = HandleToUint64(rp_state->renderPass);
const auto &self_dependencies = rp_state->self_dependencies[active_subpass];
const auto &dependencies = rp_state->createInfo.pDependencies;
if (self_dependencies.size() == 0) {
- skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_RENDER_PASS_EXT, rp_handle.handle,
- "VUID-vkCmdPipelineBarrier-pDependencies-02285",
- "%s: Barriers cannot be set during subpass %d of %s with no self-dependency specified.", funcName,
- active_subpass, report_data->FormatHandle(rp_handle).c_str());
+ skip |= log_msg(device_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_RENDER_PASS_EXT,
+ rp_handle, "VUID-vkCmdPipelineBarrier-pDependencies-02285",
+ "%s: Barriers cannot be set during subpass %d of renderPass %s with no self-dependency specified.",
+ funcName, active_subpass, device_data->report_data->FormatHandle(rp_handle).c_str());
} else {
// Grab ref to current subpassDescription up-front for use below
const auto &sub_desc = rp_state->createInfo.pSubpasses[active_subpass];
@@ -8210,8 +7850,8 @@ bool CoreChecks::ValidateRenderPassPipelineBarriers(const char *funcName, CMD_BU
bool stage_mask_match = false;
for (const auto self_dep_index : self_dependencies) {
const auto &sub_dep = dependencies[self_dep_index];
- const auto &sub_src_stage_mask = ExpandPipelineStageFlags(device_extensions, sub_dep.srcStageMask);
- const auto &sub_dst_stage_mask = ExpandPipelineStageFlags(device_extensions, sub_dep.dstStageMask);
+ const auto &sub_src_stage_mask = ExpandPipelineStageFlags(device_data->device_extensions, sub_dep.srcStageMask);
+ const auto &sub_dst_stage_mask = ExpandPipelineStageFlags(device_data->device_extensions, sub_dep.dstStageMask);
stage_mask_match = ((sub_src_stage_mask == VK_PIPELINE_STAGE_ALL_COMMANDS_BIT) ||
(src_stage_mask == (sub_src_stage_mask & src_stage_mask))) &&
((sub_dst_stage_mask == VK_PIPELINE_STAGE_ALL_COMMANDS_BIT) ||
@@ -8221,27 +7861,27 @@ bool CoreChecks::ValidateRenderPassPipelineBarriers(const char *funcName, CMD_BU
if (!stage_mask_match) {
std::stringstream self_dep_ss;
stream_join(self_dep_ss, ", ", self_dependencies);
- skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_RENDER_PASS_EXT,
- rp_handle.handle, "VUID-vkCmdPipelineBarrier-pDependencies-02285",
+ skip |= log_msg(device_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_RENDER_PASS_EXT,
+ rp_handle, "VUID-vkCmdPipelineBarrier-pDependencies-02285",
"%s: Barrier srcStageMask(0x%X) is not a subset of VkSubpassDependency srcStageMask of any "
- "self-dependency of subpass %d of %s for which dstStageMask is also a subset. "
+ "self-dependency of subpass %d of renderPass %s for which dstStageMask is also a subset. "
"Candidate VkSubpassDependency are pDependencies entries [%s].",
- funcName, src_stage_mask, active_subpass, report_data->FormatHandle(rp_handle).c_str(),
+ funcName, src_stage_mask, active_subpass, device_data->report_data->FormatHandle(rp_handle).c_str(),
self_dep_ss.str().c_str());
- skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_RENDER_PASS_EXT,
- rp_handle.handle, "VUID-vkCmdPipelineBarrier-pDependencies-02285",
+ skip |= log_msg(device_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_RENDER_PASS_EXT,
+ rp_handle, "VUID-vkCmdPipelineBarrier-pDependencies-02285",
"%s: Barrier dstStageMask(0x%X) is not a subset of VkSubpassDependency dstStageMask of any "
- "self-dependency of subpass %d of %s for which srcStageMask is also a subset. "
+ "self-dependency of subpass %d of renderPass %s for which srcStageMask is also a subset. "
"Candidate VkSubpassDependency are pDependencies entries [%s].",
- funcName, dst_stage_mask, active_subpass, report_data->FormatHandle(rp_handle).c_str(),
+ funcName, dst_stage_mask, active_subpass, device_data->report_data->FormatHandle(rp_handle).c_str(),
self_dep_ss.str().c_str());
}
if (0 != buffer_mem_barrier_count) {
- skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_RENDER_PASS_EXT,
- rp_handle.handle, "VUID-vkCmdPipelineBarrier-bufferMemoryBarrierCount-01178",
- "%s: bufferMemoryBarrierCount is non-zero (%d) for subpass %d of %s.", funcName,
- buffer_mem_barrier_count, active_subpass, report_data->FormatHandle(rp_handle).c_str());
+ skip |= log_msg(device_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_RENDER_PASS_EXT,
+ rp_handle, "VUID-vkCmdPipelineBarrier-bufferMemoryBarrierCount-01178",
+ "%s: bufferMemoryBarrierCount is non-zero (%d) for subpass %d of renderPass %s.", funcName,
+ buffer_mem_barrier_count, active_subpass, device_data->report_data->FormatHandle(rp_handle).c_str());
}
for (uint32_t i = 0; i < mem_barrier_count; ++i) {
const auto &mb_src_access_mask = mem_barriers[i].srcAccessMask;
@@ -8258,25 +7898,25 @@ bool CoreChecks::ValidateRenderPassPipelineBarriers(const char *funcName, CMD_BU
std::stringstream self_dep_ss;
stream_join(self_dep_ss, ", ", self_dependencies);
skip |= log_msg(
- report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_RENDER_PASS_EXT, rp_handle.handle,
+ device_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_RENDER_PASS_EXT, rp_handle,
"VUID-vkCmdPipelineBarrier-pDependencies-02285",
"%s: Barrier pMemoryBarriers[%d].srcAccessMask(0x%X) is not a subset of VkSubpassDependency srcAccessMask "
- "for any self-dependency of subpass %d of %s for which dstAccessMask is also a subset. "
+ "for any self-dependency of subpass %d of renderPass %s for which dstAccessMask is also a subset. "
"Candidate VkSubpassDependency are pDependencies entries [%s].",
- funcName, i, mb_src_access_mask, active_subpass, report_data->FormatHandle(rp_handle).c_str(),
+ funcName, i, mb_src_access_mask, active_subpass, device_data->report_data->FormatHandle(rp_handle).c_str(),
self_dep_ss.str().c_str());
skip |= log_msg(
- report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_RENDER_PASS_EXT, rp_handle.handle,
+ device_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_RENDER_PASS_EXT, rp_handle,
"VUID-vkCmdPipelineBarrier-pDependencies-02285",
"%s: Barrier pMemoryBarriers[%d].dstAccessMask(0x%X) is not a subset of VkSubpassDependency dstAccessMask "
- "for any self-dependency of subpass %d of %s for which srcAccessMask is also a subset. "
+ "for any self-dependency of subpass %d of renderPass %s for which srcAccessMask is also a subset. "
"Candidate VkSubpassDependency are pDependencies entries [%s].",
- funcName, i, mb_dst_access_mask, active_subpass, report_data->FormatHandle(rp_handle).c_str(),
+ funcName, i, mb_dst_access_mask, active_subpass, device_data->report_data->FormatHandle(rp_handle).c_str(),
self_dep_ss.str().c_str());
}
}
- skip |= ValidateRenderPassImageBarriers(funcName, cb_state, active_subpass, sub_desc, rp_handle, dependencies,
+ skip |= ValidateRenderPassImageBarriers(device_data, funcName, cb_state, active_subpass, sub_desc, rp_handle, dependencies,
self_dependencies, image_mem_barrier_count, image_barriers);
bool flag_match = false;
@@ -8288,12 +7928,13 @@ bool CoreChecks::ValidateRenderPassPipelineBarriers(const char *funcName, CMD_BU
if (!flag_match) {
std::stringstream self_dep_ss;
stream_join(self_dep_ss, ", ", self_dependencies);
- skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_RENDER_PASS_EXT,
- rp_handle.handle, "VUID-vkCmdPipelineBarrier-pDependencies-02285",
- "%s: dependencyFlags param (0x%X) does not equal VkSubpassDependency dependencyFlags value for any "
- "self-dependency of subpass %d of %s. Candidate VkSubpassDependency are pDependencies entries [%s].",
- funcName, dependency_flags, cb_state->activeSubpass, report_data->FormatHandle(rp_handle).c_str(),
- self_dep_ss.str().c_str());
+ skip |= log_msg(
+ device_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_RENDER_PASS_EXT, rp_handle,
+ "VUID-vkCmdPipelineBarrier-pDependencies-02285",
+ "%s: dependencyFlags param (0x%X) does not equal VkSubpassDependency dependencyFlags value for any "
+ "self-dependency of subpass %d of renderPass %s. Candidate VkSubpassDependency are pDependencies entries [%s].",
+ funcName, dependency_flags, cb_state->activeSubpass, device_data->report_data->FormatHandle(rp_handle).c_str(),
+ self_dep_ss.str().c_str());
}
}
return skip;
@@ -8432,28 +8073,30 @@ static const std::string buffer_error_codes[] = {
class ValidatorState {
public:
- ValidatorState(const CoreChecks *device_data, const char *func_name, const CMD_BUFFER_STATE *cb_state,
- const VulkanTypedHandle &barrier_handle, const VkSharingMode sharing_mode, const std::string *val_codes)
+ ValidatorState(const layer_data *device_data, const char *func_name, const GLOBAL_CB_NODE *cb_state,
+ const uint64_t barrier_handle64, const VkSharingMode sharing_mode, const VulkanObjectType object_type,
+ const std::string *val_codes)
: report_data_(device_data->report_data),
func_name_(func_name),
cb_handle64_(HandleToUint64(cb_state->commandBuffer)),
- barrier_handle_(barrier_handle),
+ barrier_handle64_(barrier_handle64),
sharing_mode_(sharing_mode),
+ object_type_(object_type),
val_codes_(val_codes),
limit_(static_cast<uint32_t>(device_data->physical_device_state->queue_family_properties.size())),
mem_ext_(device_data->device_extensions.vk_khr_external_memory) {}
// Create a validator state from an image state... reducing the image specific to the generic version.
- ValidatorState(const CoreChecks *device_data, const char *func_name, const CMD_BUFFER_STATE *cb_state,
+ ValidatorState(const layer_data *device_data, const char *func_name, const GLOBAL_CB_NODE *cb_state,
const VkImageMemoryBarrier *barrier, const IMAGE_STATE *state)
- : ValidatorState(device_data, func_name, cb_state, VulkanTypedHandle(barrier->image, kVulkanObjectTypeImage),
- state->createInfo.sharingMode, image_error_codes) {}
+ : ValidatorState(device_data, func_name, cb_state, HandleToUint64(barrier->image), state->createInfo.sharingMode,
+ kVulkanObjectTypeImage, image_error_codes) {}
// Create a validator state from an buffer state... reducing the buffer specific to the generic version.
- ValidatorState(const CoreChecks *device_data, const char *func_name, const CMD_BUFFER_STATE *cb_state,
+ ValidatorState(const layer_data *device_data, const char *func_name, const GLOBAL_CB_NODE *cb_state,
const VkBufferMemoryBarrier *barrier, const BUFFER_STATE *state)
- : ValidatorState(device_data, func_name, cb_state, VulkanTypedHandle(barrier->buffer, kVulkanObjectTypeBuffer),
- state->createInfo.sharingMode, buffer_error_codes) {}
+ : ValidatorState(device_data, func_name, cb_state, HandleToUint64(barrier->buffer), state->createInfo.sharingMode,
+ kVulkanObjectTypeImage, buffer_error_codes) {}
// Log the messages using boilerplate from object state, and Vu specific information from the template arg
// One and two family versions, in the single family version, Vu holds the name of the passed parameter
@@ -8462,7 +8105,7 @@ class ValidatorState {
const char *annotation = GetFamilyAnnotation(family);
return log_msg(report_data_, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, cb_handle64_,
val_code, "%s: Barrier using %s %s created with sharingMode %s, has %s %u%s. %s", func_name_,
- GetTypeString(), report_data_->FormatHandle(barrier_handle_).c_str(), GetModeString(), param_name, family,
+ GetTypeString(), report_data_->FormatHandle(barrier_handle64_).c_str(), GetModeString(), param_name, family,
annotation, vu_summary[vu_index]);
}
@@ -8473,14 +8116,14 @@ class ValidatorState {
return log_msg(
report_data_, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, cb_handle64_, val_code,
"%s: Barrier using %s %s created with sharingMode %s, has srcQueueFamilyIndex %u%s and dstQueueFamilyIndex %u%s. %s",
- func_name_, GetTypeString(), report_data_->FormatHandle(barrier_handle_).c_str(), GetModeString(), src_family,
+ func_name_, GetTypeString(), report_data_->FormatHandle(barrier_handle64_).c_str(), GetModeString(), src_family,
src_annotation, dst_family, dst_annotation, vu_summary[vu_index]);
}
// This abstract Vu can only be tested at submit time, thus we need a callback from the closure containing the needed
// data. Note that the mem_barrier is copied to the closure as the lambda lifespan exceed the guarantees of validity for
// application input.
- static bool ValidateAtQueueSubmit(const VkQueue queue, const CoreChecks *device_data, uint32_t src_family, uint32_t dst_family,
+ static bool ValidateAtQueueSubmit(const VkQueue queue, const layer_data *device_data, uint32_t src_family, uint32_t dst_family,
const ValidatorState &val) {
auto queue_data_it = device_data->queueMap.find(queue);
if (queue_data_it == device_data->queueMap.end()) return false;
@@ -8495,7 +8138,7 @@ class ValidatorState {
"%s: Barrier submitted to queue with family index %u, using %s %s created with sharingMode %s, has "
"srcQueueFamilyIndex %u%s and dstQueueFamilyIndex %u%s. %s",
"vkQueueSubmit", queue_family, val.GetTypeString(),
- device_data->report_data->FormatHandle(val.barrier_handle_).c_str(), val.GetModeString(), src_family,
+ device_data->report_data->FormatHandle(val.barrier_handle64_).c_str(), val.GetModeString(), src_family,
src_annotation, dst_family, dst_annotation, vu_summary[kSubmitQueueMustMatchSrcOrDst]);
}
return false;
@@ -8532,21 +8175,22 @@ class ValidatorState {
return invalid;
};
}
- const char *GetTypeString() const { return object_string[barrier_handle_.type]; }
+ const char *GetTypeString() const { return object_string[object_type_]; }
VkSharingMode GetSharingMode() const { return sharing_mode_; }
protected:
const debug_report_data *const report_data_;
const char *const func_name_;
const uint64_t cb_handle64_;
- const VulkanTypedHandle barrier_handle_;
+ const uint64_t barrier_handle64_;
const VkSharingMode sharing_mode_;
+ const VulkanObjectType object_type_;
const std::string *val_codes_;
const uint32_t limit_;
const bool mem_ext_;
};
-bool Validate(const CoreChecks *device_data, const char *func_name, CMD_BUFFER_STATE *cb_state, const ValidatorState &val,
+bool Validate(const layer_data *device_data, const char *func_name, GLOBAL_CB_NODE *cb_state, const ValidatorState &val,
const uint32_t src_queue_family, const uint32_t dst_queue_family) {
bool skip = false;
@@ -8601,76 +8245,76 @@ bool Validate(const CoreChecks *device_data, const char *func_name, CMD_BUFFER_S
} // namespace barrier_queue_families
// Type specific wrapper for image barriers
-bool CoreChecks::ValidateBarrierQueueFamilies(const char *func_name, CMD_BUFFER_STATE *cb_state,
- const VkImageMemoryBarrier &barrier, const IMAGE_STATE *state_data) {
+bool ValidateBarrierQueueFamilies(const layer_data *device_data, const char *func_name, GLOBAL_CB_NODE *cb_state,
+ const VkImageMemoryBarrier *barrier, const IMAGE_STATE *state_data) {
// State data is required
if (!state_data) {
return false;
}
// Create the validator state from the image state
- barrier_queue_families::ValidatorState val(this, func_name, cb_state, &barrier, state_data);
- const uint32_t src_queue_family = barrier.srcQueueFamilyIndex;
- const uint32_t dst_queue_family = barrier.dstQueueFamilyIndex;
- return barrier_queue_families::Validate(this, func_name, cb_state, val, src_queue_family, dst_queue_family);
+ barrier_queue_families::ValidatorState val(device_data, func_name, cb_state, barrier, state_data);
+ const uint32_t src_queue_family = barrier->srcQueueFamilyIndex;
+ const uint32_t dst_queue_family = barrier->dstQueueFamilyIndex;
+ return barrier_queue_families::Validate(device_data, func_name, cb_state, val, src_queue_family, dst_queue_family);
}
// Type specific wrapper for buffer barriers
-bool CoreChecks::ValidateBarrierQueueFamilies(const char *func_name, CMD_BUFFER_STATE *cb_state,
- const VkBufferMemoryBarrier &barrier, const BUFFER_STATE *state_data) {
+bool ValidateBarrierQueueFamilies(const layer_data *device_data, const char *func_name, GLOBAL_CB_NODE *cb_state,
+ const VkBufferMemoryBarrier *barrier, const BUFFER_STATE *state_data) {
// State data is required
if (!state_data) {
return false;
}
// Create the validator state from the buffer state
- barrier_queue_families::ValidatorState val(this, func_name, cb_state, &barrier, state_data);
- const uint32_t src_queue_family = barrier.srcQueueFamilyIndex;
- const uint32_t dst_queue_family = barrier.dstQueueFamilyIndex;
- return barrier_queue_families::Validate(this, func_name, cb_state, val, src_queue_family, dst_queue_family);
+ barrier_queue_families::ValidatorState val(device_data, func_name, cb_state, barrier, state_data);
+ const uint32_t src_queue_family = barrier->srcQueueFamilyIndex;
+ const uint32_t dst_queue_family = barrier->dstQueueFamilyIndex;
+ return barrier_queue_families::Validate(device_data, func_name, cb_state, val, src_queue_family, dst_queue_family);
}
-bool CoreChecks::ValidateBarriers(const char *funcName, CMD_BUFFER_STATE *cb_state, VkPipelineStageFlags src_stage_mask,
- VkPipelineStageFlags dst_stage_mask, uint32_t memBarrierCount,
- const VkMemoryBarrier *pMemBarriers, uint32_t bufferBarrierCount,
+bool CoreChecks::ValidateBarriers(layer_data *device_data, const char *funcName, GLOBAL_CB_NODE *cb_state,
+ VkPipelineStageFlags src_stage_mask, VkPipelineStageFlags dst_stage_mask,
+ uint32_t memBarrierCount, const VkMemoryBarrier *pMemBarriers, uint32_t bufferBarrierCount,
const VkBufferMemoryBarrier *pBufferMemBarriers, uint32_t imageMemBarrierCount,
const VkImageMemoryBarrier *pImageMemBarriers) {
bool skip = false;
for (uint32_t i = 0; i < memBarrierCount; ++i) {
const auto &mem_barrier = pMemBarriers[i];
- if (!ValidateAccessMaskPipelineStage(device_extensions, mem_barrier.srcAccessMask, src_stage_mask)) {
- skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
+ if (!ValidateAccessMaskPipelineStage(device_data->device_extensions, mem_barrier.srcAccessMask, src_stage_mask)) {
+ skip |= log_msg(device_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
HandleToUint64(cb_state->commandBuffer), "VUID-vkCmdPipelineBarrier-pMemoryBarriers-01184",
"%s: pMemBarriers[%d].srcAccessMask (0x%X) is not supported by srcStageMask (0x%X).", funcName, i,
mem_barrier.srcAccessMask, src_stage_mask);
}
- if (!ValidateAccessMaskPipelineStage(device_extensions, mem_barrier.dstAccessMask, dst_stage_mask)) {
- skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
+ if (!ValidateAccessMaskPipelineStage(device_data->device_extensions, mem_barrier.dstAccessMask, dst_stage_mask)) {
+ skip |= log_msg(device_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
HandleToUint64(cb_state->commandBuffer), "VUID-vkCmdPipelineBarrier-pMemoryBarriers-01185",
"%s: pMemBarriers[%d].dstAccessMask (0x%X) is not supported by dstStageMask (0x%X).", funcName, i,
mem_barrier.dstAccessMask, dst_stage_mask);
}
}
for (uint32_t i = 0; i < imageMemBarrierCount; ++i) {
- const auto &mem_barrier = pImageMemBarriers[i];
- if (!ValidateAccessMaskPipelineStage(device_extensions, mem_barrier.srcAccessMask, src_stage_mask)) {
- skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
+ auto mem_barrier = &pImageMemBarriers[i];
+ if (!ValidateAccessMaskPipelineStage(device_data->device_extensions, mem_barrier->srcAccessMask, src_stage_mask)) {
+ skip |= log_msg(device_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
HandleToUint64(cb_state->commandBuffer), "VUID-vkCmdPipelineBarrier-pMemoryBarriers-01184",
"%s: pImageMemBarriers[%d].srcAccessMask (0x%X) is not supported by srcStageMask (0x%X).", funcName, i,
- mem_barrier.srcAccessMask, src_stage_mask);
+ mem_barrier->srcAccessMask, src_stage_mask);
}
- if (!ValidateAccessMaskPipelineStage(device_extensions, mem_barrier.dstAccessMask, dst_stage_mask)) {
- skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
+ if (!ValidateAccessMaskPipelineStage(device_data->device_extensions, mem_barrier->dstAccessMask, dst_stage_mask)) {
+ skip |= log_msg(device_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
HandleToUint64(cb_state->commandBuffer), "VUID-vkCmdPipelineBarrier-pMemoryBarriers-01185",
"%s: pImageMemBarriers[%d].dstAccessMask (0x%X) is not supported by dstStageMask (0x%X).", funcName, i,
- mem_barrier.dstAccessMask, dst_stage_mask);
+ mem_barrier->dstAccessMask, dst_stage_mask);
}
- auto image_data = GetImageState(mem_barrier.image);
- skip |= ValidateBarrierQueueFamilies(funcName, cb_state, mem_barrier, image_data);
+ auto image_data = GetImageState(mem_barrier->image);
+ skip |= ValidateBarrierQueueFamilies(device_data, funcName, cb_state, mem_barrier, image_data);
- if (mem_barrier.newLayout == VK_IMAGE_LAYOUT_UNDEFINED || mem_barrier.newLayout == VK_IMAGE_LAYOUT_PREINITIALIZED) {
- skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
+ if (mem_barrier->newLayout == VK_IMAGE_LAYOUT_UNDEFINED || mem_barrier->newLayout == VK_IMAGE_LAYOUT_PREINITIALIZED) {
+ skip |= log_msg(device_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
HandleToUint64(cb_state->commandBuffer), "VUID-VkImageMemoryBarrier-newLayout-01198",
"%s: Image Layout cannot be transitioned to UNDEFINED or PREINITIALIZED.", funcName);
}
@@ -8680,83 +8324,89 @@ bool CoreChecks::ValidateBarriers(const char *funcName, CMD_BUFFER_STATE *cb_sta
// "Non-sparse resources must be bound completely and contiguously to a single VkDeviceMemory object before
// recording commands in a command buffer."
// TODO: Update this when VUID is defined
- skip |= ValidateMemoryIsBoundToImage(image_data, funcName, kVUIDUndefined);
+ skip |= ValidateMemoryIsBoundToImage(device_data, image_data, funcName, kVUIDUndefined);
- const auto aspect_mask = mem_barrier.subresourceRange.aspectMask;
- skip |= ValidateImageAspectMask(image_data->image, image_data->createInfo.format, aspect_mask, funcName);
+ auto aspect_mask = mem_barrier->subresourceRange.aspectMask;
+ skip |= ValidateImageAspectMask(device_data, image_data->image, image_data->createInfo.format, aspect_mask, funcName);
- const std::string param_name = "pImageMemoryBarriers[" + std::to_string(i) + "].subresourceRange";
- skip |= ValidateImageBarrierSubresourceRange(image_data, mem_barrier.subresourceRange, funcName, param_name.c_str());
+ std::string param_name = "pImageMemoryBarriers[" + std::to_string(i) + "].subresourceRange";
+ skip |= ValidateImageBarrierSubresourceRange(device_data, image_data, mem_barrier->subresourceRange, funcName,
+ param_name.c_str());
}
}
for (uint32_t i = 0; i < bufferBarrierCount; ++i) {
- const auto &mem_barrier = pBufferMemBarriers[i];
+ auto mem_barrier = &pBufferMemBarriers[i];
+ if (!mem_barrier) continue;
- if (!ValidateAccessMaskPipelineStage(device_extensions, mem_barrier.srcAccessMask, src_stage_mask)) {
- skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
+ if (!ValidateAccessMaskPipelineStage(device_data->device_extensions, mem_barrier->srcAccessMask, src_stage_mask)) {
+ skip |= log_msg(device_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
HandleToUint64(cb_state->commandBuffer), "VUID-vkCmdPipelineBarrier-pMemoryBarriers-01184",
"%s: pBufferMemBarriers[%d].srcAccessMask (0x%X) is not supported by srcStageMask (0x%X).", funcName, i,
- mem_barrier.srcAccessMask, src_stage_mask);
+ mem_barrier->srcAccessMask, src_stage_mask);
}
- if (!ValidateAccessMaskPipelineStage(device_extensions, mem_barrier.dstAccessMask, dst_stage_mask)) {
- skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
+ if (!ValidateAccessMaskPipelineStage(device_data->device_extensions, mem_barrier->dstAccessMask, dst_stage_mask)) {
+ skip |= log_msg(device_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
HandleToUint64(cb_state->commandBuffer), "VUID-vkCmdPipelineBarrier-pMemoryBarriers-01185",
"%s: pBufferMemBarriers[%d].dstAccessMask (0x%X) is not supported by dstStageMask (0x%X).", funcName, i,
- mem_barrier.dstAccessMask, dst_stage_mask);
+ mem_barrier->dstAccessMask, dst_stage_mask);
}
// Validate buffer barrier queue family indices
- auto buffer_state = GetBufferState(mem_barrier.buffer);
- skip |= ValidateBarrierQueueFamilies(funcName, cb_state, mem_barrier, buffer_state);
+ auto buffer_state = GetBufferState(mem_barrier->buffer);
+ skip |= ValidateBarrierQueueFamilies(device_data, funcName, cb_state, mem_barrier, buffer_state);
if (buffer_state) {
// There is no VUID for this, but there is blanket text:
// "Non-sparse resources must be bound completely and contiguously to a single VkDeviceMemory object before
// recording commands in a command buffer"
// TODO: Update this when VUID is defined
- skip |= ValidateMemoryIsBoundToBuffer(buffer_state, funcName, kVUIDUndefined);
+ skip |= ValidateMemoryIsBoundToBuffer(device_data, buffer_state, funcName, kVUIDUndefined);
auto buffer_size = buffer_state->createInfo.size;
- if (mem_barrier.offset >= buffer_size) {
- skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
- HandleToUint64(cb_state->commandBuffer), "VUID-VkBufferMemoryBarrier-offset-01187",
- "%s: Buffer Barrier %s has offset 0x%" PRIx64 " which is not less than total size 0x%" PRIx64 ".",
- funcName, report_data->FormatHandle(mem_barrier.buffer).c_str(), HandleToUint64(mem_barrier.offset),
- HandleToUint64(buffer_size));
- } else if (mem_barrier.size != VK_WHOLE_SIZE && (mem_barrier.offset + mem_barrier.size > buffer_size)) {
- skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
- HandleToUint64(cb_state->commandBuffer), "VUID-VkBufferMemoryBarrier-size-01189",
- "%s: Buffer Barrier %s has offset 0x%" PRIx64 " and size 0x%" PRIx64
- " whose sum is greater than total size 0x%" PRIx64 ".",
- funcName, report_data->FormatHandle(mem_barrier.buffer).c_str(), HandleToUint64(mem_barrier.offset),
- HandleToUint64(mem_barrier.size), HandleToUint64(buffer_size));
+ if (mem_barrier->offset >= buffer_size) {
+ skip |=
+ log_msg(device_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
+ HandleToUint64(cb_state->commandBuffer), "VUID-VkBufferMemoryBarrier-offset-01187",
+ "%s: Buffer Barrier %s has offset 0x%" PRIx64 " which is not less than total size 0x%" PRIx64 ".",
+ funcName, device_data->report_data->FormatHandle(mem_barrier->buffer).c_str(),
+ HandleToUint64(mem_barrier->offset), HandleToUint64(buffer_size));
+ } else if (mem_barrier->size != VK_WHOLE_SIZE && (mem_barrier->offset + mem_barrier->size > buffer_size)) {
+ skip |=
+ log_msg(device_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
+ HandleToUint64(cb_state->commandBuffer), "VUID-VkBufferMemoryBarrier-size-01189",
+ "%s: Buffer Barrier %s has offset 0x%" PRIx64 " and size 0x%" PRIx64
+ " whose sum is greater than total size 0x%" PRIx64 ".",
+ funcName, device_data->report_data->FormatHandle(mem_barrier->buffer).c_str(),
+ HandleToUint64(mem_barrier->offset), HandleToUint64(mem_barrier->size), HandleToUint64(buffer_size));
}
}
}
- skip |= ValidateBarriersQFOTransferUniqueness(funcName, cb_state, bufferBarrierCount, pBufferMemBarriers, imageMemBarrierCount,
- pImageMemBarriers);
+ skip |= ValidateBarriersQFOTransferUniqueness(device_data, funcName, cb_state, bufferBarrierCount, pBufferMemBarriers,
+ imageMemBarrierCount, pImageMemBarriers);
return skip;
}
-bool CoreChecks::ValidateEventStageMask(VkQueue queue, CMD_BUFFER_STATE *pCB, uint32_t eventCount, size_t firstEventIndex,
+bool CoreChecks::ValidateEventStageMask(VkQueue queue, GLOBAL_CB_NODE *pCB, uint32_t eventCount, size_t firstEventIndex,
VkPipelineStageFlags sourceStageMask) {
bool skip = false;
VkPipelineStageFlags stageMask = 0;
+ layer_data *dev_data = GetLayerDataPtr(get_dispatch_key(queue), layer_data_map);
for (uint32_t i = 0; i < eventCount; ++i) {
auto event = pCB->events[firstEventIndex + i];
- auto queue_data = queueMap.find(queue);
- if (queue_data == queueMap.end()) return false;
+ auto queue_data = dev_data->queueMap.find(queue);
+ if (queue_data == dev_data->queueMap.end()) return false;
auto event_data = queue_data->second.eventToStageMap.find(event);
if (event_data != queue_data->second.eventToStageMap.end()) {
stageMask |= event_data->second;
} else {
- auto global_event_data = GetEventState(event);
+ auto global_event_data = GetEventNode(event);
if (!global_event_data) {
- skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_EVENT_EXT,
+ skip |= log_msg(dev_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_EVENT_EXT,
HandleToUint64(event), kVUID_Core_DrawState_InvalidEvent,
- "%s cannot be waited on if it has never been set.", report_data->FormatHandle(event).c_str());
+ "Event %s cannot be waited on if it has never been set.",
+ dev_data->report_data->FormatHandle(event).c_str());
} else {
stageMask |= global_event_data->stageMask;
}
@@ -8765,7 +8415,7 @@ bool CoreChecks::ValidateEventStageMask(VkQueue queue, CMD_BUFFER_STATE *pCB, ui
// TODO: Need to validate that host_bit is only set if set event is called
// but set event can be called at any time.
if (sourceStageMask != stageMask && sourceStageMask != (stageMask | VK_PIPELINE_STAGE_HOST_BIT)) {
- skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
+ skip |= log_msg(dev_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
HandleToUint64(pCB->commandBuffer), "VUID-vkCmdWaitEvents-srcStageMask-parameter",
"Submitting cmdbuffer with call to VkCmdWaitEvents using srcStageMask 0x%X which must be the bitwise OR of "
"the stageMask parameters used in calls to vkCmdSetEvent and VK_PIPELINE_STAGE_HOST_BIT if used with "
@@ -8807,16 +8457,16 @@ static const VkPipelineStageFlags stage_flag_bit_array[] = {VK_PIPELINE_STAGE_CO
VK_PIPELINE_STAGE_TRANSFER_BIT,
VK_PIPELINE_STAGE_ALL_GRAPHICS_BIT};
-bool CoreChecks::CheckStageMaskQueueCompatibility(VkCommandBuffer command_buffer, VkPipelineStageFlags stage_mask,
- VkQueueFlags queue_flags, const char *function, const char *src_or_dest,
- const char *error_code) {
+bool CoreChecks::CheckStageMaskQueueCompatibility(layer_data *dev_data, VkCommandBuffer command_buffer,
+ VkPipelineStageFlags stage_mask, VkQueueFlags queue_flags, const char *function,
+ const char *src_or_dest, const char *error_code) {
bool skip = false;
// Lookup each bit in the stagemask and check for overlap between its table bits and queue_flags
for (const auto &item : stage_flag_bit_array) {
if (stage_mask & item) {
if ((supported_pipeline_stages_table[item] & queue_flags) == 0) {
- skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
- HandleToUint64(command_buffer), error_code,
+ skip |= log_msg(dev_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, HandleToUint64(command_buffer), error_code,
"%s(): %s flag %s is not compatible with the queue family properties of this command buffer.",
function, src_or_dest, string_VkPipelineStageFlagBits(static_cast<VkPipelineStageFlagBits>(item)));
}
@@ -8827,7 +8477,7 @@ bool CoreChecks::CheckStageMaskQueueCompatibility(VkCommandBuffer command_buffer
// Check if all barriers are of a given operation type.
template <typename Barrier, typename OpCheck>
-bool AllTransferOp(const COMMAND_POOL_STATE *pool, OpCheck &op_check, uint32_t count, const Barrier *barriers) {
+bool AllTransferOp(const COMMAND_POOL_NODE *pool, OpCheck &op_check, uint32_t count, const Barrier *barriers) {
if (!pool) return false;
for (uint32_t b = 0; b < count; b++) {
@@ -8837,11 +8487,12 @@ bool AllTransferOp(const COMMAND_POOL_STATE *pool, OpCheck &op_check, uint32_t c
}
// Look at the barriers to see if we they are all release or all acquire, the result impacts queue properties validation
-BarrierOperationsType CoreChecks::ComputeBarrierOperationsType(CMD_BUFFER_STATE *cb_state, uint32_t buffer_barrier_count,
+BarrierOperationsType CoreChecks::ComputeBarrierOperationsType(layer_data *device_data, GLOBAL_CB_NODE *cb_state,
+ uint32_t buffer_barrier_count,
const VkBufferMemoryBarrier *buffer_barriers,
uint32_t image_barrier_count,
const VkImageMemoryBarrier *image_barriers) {
- auto pool = GetCommandPoolState(cb_state->createInfo.commandPool);
+ auto pool = GetCommandPoolNode(cb_state->createInfo.commandPool);
BarrierOperationsType op_type = kGeneral;
// Look at the barrier details only if they exist
@@ -8859,13 +8510,13 @@ BarrierOperationsType CoreChecks::ComputeBarrierOperationsType(CMD_BUFFER_STATE
return op_type;
}
-bool CoreChecks::ValidateStageMasksAgainstQueueCapabilities(CMD_BUFFER_STATE const *cb_state,
+bool CoreChecks::ValidateStageMasksAgainstQueueCapabilities(layer_data *dev_data, GLOBAL_CB_NODE const *cb_state,
VkPipelineStageFlags source_stage_mask,
VkPipelineStageFlags dest_stage_mask,
BarrierOperationsType barrier_op_type, const char *function,
const char *error_code) {
bool skip = false;
- uint32_t queue_family_index = commandPoolMap[cb_state->createInfo.commandPool].get()->queueFamilyIndex;
+ uint32_t queue_family_index = dev_data->commandPoolMap[cb_state->createInfo.commandPool].queueFamilyIndex;
auto physical_device_state = GetPhysicalDeviceState();
// Any pipeline stage included in srcStageMask or dstStageMask must be supported by the capabilities of the queue family
@@ -8877,13 +8528,13 @@ bool CoreChecks::ValidateStageMasksAgainstQueueCapabilities(CMD_BUFFER_STATE con
// Only check the source stage mask if any barriers aren't "acquire ownership"
if ((barrier_op_type != kAllAcquire) && (source_stage_mask & VK_PIPELINE_STAGE_ALL_COMMANDS_BIT) == 0) {
- skip |= CheckStageMaskQueueCompatibility(cb_state->commandBuffer, source_stage_mask, specified_queue_flags, function,
- "srcStageMask", error_code);
+ skip |= CheckStageMaskQueueCompatibility(dev_data, cb_state->commandBuffer, source_stage_mask, specified_queue_flags,
+ function, "srcStageMask", error_code);
}
// Only check the dest stage mask if any barriers aren't "release ownership"
if ((barrier_op_type != kAllRelease) && (dest_stage_mask & VK_PIPELINE_STAGE_ALL_COMMANDS_BIT) == 0) {
- skip |= CheckStageMaskQueueCompatibility(cb_state->commandBuffer, dest_stage_mask, specified_queue_flags, function,
- "dstStageMask", error_code);
+ skip |= CheckStageMaskQueueCompatibility(dev_data, cb_state->commandBuffer, dest_stage_mask, specified_queue_flags,
+ function, "dstStageMask", error_code);
}
}
return skip;
@@ -8894,25 +8545,27 @@ bool CoreChecks::PreCallValidateCmdWaitEvents(VkCommandBuffer commandBuffer, uin
uint32_t memoryBarrierCount, const VkMemoryBarrier *pMemoryBarriers,
uint32_t bufferMemoryBarrierCount, const VkBufferMemoryBarrier *pBufferMemoryBarriers,
uint32_t imageMemoryBarrierCount, const VkImageMemoryBarrier *pImageMemoryBarriers) {
- CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
+ layer_data *device_data = GetLayerDataPtr(get_dispatch_key(commandBuffer), layer_data_map);
+ GLOBAL_CB_NODE *cb_state = GetCBNode(commandBuffer);
assert(cb_state);
- auto barrier_op_type = ComputeBarrierOperationsType(cb_state, bufferMemoryBarrierCount, pBufferMemoryBarriers,
+ auto barrier_op_type = ComputeBarrierOperationsType(device_data, cb_state, bufferMemoryBarrierCount, pBufferMemoryBarriers,
imageMemoryBarrierCount, pImageMemoryBarriers);
- bool skip = ValidateStageMasksAgainstQueueCapabilities(cb_state, sourceStageMask, dstStageMask, barrier_op_type,
+ bool skip = ValidateStageMasksAgainstQueueCapabilities(device_data, cb_state, sourceStageMask, dstStageMask, barrier_op_type,
"vkCmdWaitEvents", "VUID-vkCmdWaitEvents-srcStageMask-01164");
- skip |= ValidateStageMaskGsTsEnables(sourceStageMask, "vkCmdWaitEvents()", "VUID-vkCmdWaitEvents-srcStageMask-01159",
- "VUID-vkCmdWaitEvents-srcStageMask-01161", "VUID-vkCmdWaitEvents-srcStageMask-02111",
- "VUID-vkCmdWaitEvents-srcStageMask-02112");
- skip |= ValidateStageMaskGsTsEnables(dstStageMask, "vkCmdWaitEvents()", "VUID-vkCmdWaitEvents-dstStageMask-01160",
+ skip |= ValidateStageMaskGsTsEnables(device_data, sourceStageMask, "vkCmdWaitEvents()",
+ "VUID-vkCmdWaitEvents-srcStageMask-01159", "VUID-vkCmdWaitEvents-srcStageMask-01161",
+ "VUID-vkCmdWaitEvents-srcStageMask-02111", "VUID-vkCmdWaitEvents-srcStageMask-02112");
+ skip |= ValidateStageMaskGsTsEnables(device_data, dstStageMask, "vkCmdWaitEvents()", "VUID-vkCmdWaitEvents-dstStageMask-01160",
"VUID-vkCmdWaitEvents-dstStageMask-01162", "VUID-vkCmdWaitEvents-dstStageMask-02113",
"VUID-vkCmdWaitEvents-dstStageMask-02114");
- skip |= ValidateCmdQueueFlags(cb_state, "vkCmdWaitEvents()", VK_QUEUE_GRAPHICS_BIT | VK_QUEUE_COMPUTE_BIT,
+ skip |= ValidateCmdQueueFlags(device_data, cb_state, "vkCmdWaitEvents()", VK_QUEUE_GRAPHICS_BIT | VK_QUEUE_COMPUTE_BIT,
"VUID-vkCmdWaitEvents-commandBuffer-cmdpool");
- skip |= ValidateCmd(cb_state, CMD_WAITEVENTS, "vkCmdWaitEvents()");
- skip |= ValidateBarriersToImages(cb_state, imageMemoryBarrierCount, pImageMemoryBarriers, "vkCmdWaitEvents()");
- skip |= ValidateBarriers("vkCmdWaitEvents()", cb_state, sourceStageMask, dstStageMask, memoryBarrierCount, pMemoryBarriers,
- bufferMemoryBarrierCount, pBufferMemoryBarriers, imageMemoryBarrierCount, pImageMemoryBarriers);
+ skip |= ValidateCmd(device_data, cb_state, CMD_WAITEVENTS, "vkCmdWaitEvents()");
+ skip |= ValidateBarriersToImages(device_data, cb_state, imageMemoryBarrierCount, pImageMemoryBarriers, "vkCmdWaitEvents()");
+ skip |= ValidateBarriers(device_data, "vkCmdWaitEvents()", cb_state, sourceStageMask, dstStageMask, memoryBarrierCount,
+ pMemoryBarriers, bufferMemoryBarrierCount, pBufferMemoryBarriers, imageMemoryBarrierCount,
+ pImageMemoryBarriers);
return skip;
}
@@ -8921,12 +8574,13 @@ void CoreChecks::PreCallRecordCmdWaitEvents(VkCommandBuffer commandBuffer, uint3
uint32_t memoryBarrierCount, const VkMemoryBarrier *pMemoryBarriers,
uint32_t bufferMemoryBarrierCount, const VkBufferMemoryBarrier *pBufferMemoryBarriers,
uint32_t imageMemoryBarrierCount, const VkImageMemoryBarrier *pImageMemoryBarriers) {
- CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
+ layer_data *device_data = GetLayerDataPtr(get_dispatch_key(commandBuffer), layer_data_map);
+ GLOBAL_CB_NODE *cb_state = GetCBNode(commandBuffer);
auto first_event_index = cb_state->events.size();
for (uint32_t i = 0; i < eventCount; ++i) {
- auto event_state = GetEventState(pEvents[i]);
+ auto event_state = GetEventNode(pEvents[i]);
if (event_state) {
- AddCommandBufferBinding(&event_state->cb_bindings, VulkanTypedHandle(pEvents[i], kVulkanObjectTypeEvent), cb_state);
+ AddCommandBufferBinding(&event_state->cb_bindings, {HandleToUint64(pEvents[i]), kVulkanObjectTypeEvent}, cb_state);
event_state->cb_bindings.insert(cb_state);
}
cb_state->waitedEvents.insert(pEvents[i]);
@@ -8934,9 +8588,9 @@ void CoreChecks::PreCallRecordCmdWaitEvents(VkCommandBuffer commandBuffer, uint3
}
cb_state->eventUpdates.emplace_back(
[=](VkQueue q) { return ValidateEventStageMask(q, cb_state, eventCount, first_event_index, sourceStageMask); });
- TransitionImageLayouts(cb_state, imageMemoryBarrierCount, pImageMemoryBarriers);
- if (enabled.gpu_validation) {
- GpuPreCallValidateCmdWaitEvents(sourceStageMask);
+ TransitionImageLayouts(device_data, cb_state, imageMemoryBarrierCount, pImageMemoryBarriers);
+ if (GetEnables()->gpu_validation) {
+ GpuPreCallValidateCmdWaitEvents(device_data, sourceStageMask);
}
}
@@ -8945,8 +8599,9 @@ void CoreChecks::PostCallRecordCmdWaitEvents(VkCommandBuffer commandBuffer, uint
uint32_t memoryBarrierCount, const VkMemoryBarrier *pMemoryBarriers,
uint32_t bufferMemoryBarrierCount, const VkBufferMemoryBarrier *pBufferMemoryBarriers,
uint32_t imageMemoryBarrierCount, const VkImageMemoryBarrier *pImageMemoryBarriers) {
- CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
- RecordBarriersQFOTransfers(cb_state, bufferMemoryBarrierCount, pBufferMemoryBarriers, imageMemoryBarrierCount,
+ layer_data *device_data = GetLayerDataPtr(get_dispatch_key(commandBuffer), layer_data_map);
+ GLOBAL_CB_NODE *cb_state = GetCBNode(commandBuffer);
+ RecordBarriersQFOTransfers(device_data, cb_state, bufferMemoryBarrierCount, pBufferMemoryBarriers, imageMemoryBarrierCount,
pImageMemoryBarriers);
}
@@ -8957,35 +8612,38 @@ bool CoreChecks::PreCallValidateCmdPipelineBarrier(VkCommandBuffer commandBuffer
const VkBufferMemoryBarrier *pBufferMemoryBarriers,
uint32_t imageMemoryBarrierCount,
const VkImageMemoryBarrier *pImageMemoryBarriers) {
- CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
+ layer_data *device_data = GetLayerDataPtr(get_dispatch_key(commandBuffer), layer_data_map);
+ GLOBAL_CB_NODE *cb_state = GetCBNode(commandBuffer);
assert(cb_state);
bool skip = false;
- auto barrier_op_type = ComputeBarrierOperationsType(cb_state, bufferMemoryBarrierCount, pBufferMemoryBarriers,
+ auto barrier_op_type = ComputeBarrierOperationsType(device_data, cb_state, bufferMemoryBarrierCount, pBufferMemoryBarriers,
imageMemoryBarrierCount, pImageMemoryBarriers);
- skip |= ValidateStageMasksAgainstQueueCapabilities(cb_state, srcStageMask, dstStageMask, barrier_op_type,
+ skip |= ValidateStageMasksAgainstQueueCapabilities(device_data, cb_state, srcStageMask, dstStageMask, barrier_op_type,
"vkCmdPipelineBarrier", "VUID-vkCmdPipelineBarrier-srcStageMask-01183");
- skip |= ValidateCmdQueueFlags(cb_state, "vkCmdPipelineBarrier()",
+ skip |= ValidateCmdQueueFlags(device_data, cb_state, "vkCmdPipelineBarrier()",
VK_QUEUE_TRANSFER_BIT | VK_QUEUE_GRAPHICS_BIT | VK_QUEUE_COMPUTE_BIT,
"VUID-vkCmdPipelineBarrier-commandBuffer-cmdpool");
- skip |= ValidateCmd(cb_state, CMD_PIPELINEBARRIER, "vkCmdPipelineBarrier()");
- skip |=
- ValidateStageMaskGsTsEnables(srcStageMask, "vkCmdPipelineBarrier()", "VUID-vkCmdPipelineBarrier-srcStageMask-01168",
- "VUID-vkCmdPipelineBarrier-srcStageMask-01170", "VUID-vkCmdPipelineBarrier-srcStageMask-02115",
- "VUID-vkCmdPipelineBarrier-srcStageMask-02116");
- skip |=
- ValidateStageMaskGsTsEnables(dstStageMask, "vkCmdPipelineBarrier()", "VUID-vkCmdPipelineBarrier-dstStageMask-01169",
- "VUID-vkCmdPipelineBarrier-dstStageMask-01171", "VUID-vkCmdPipelineBarrier-dstStageMask-02117",
- "VUID-vkCmdPipelineBarrier-dstStageMask-02118");
+ skip |= ValidateCmd(device_data, cb_state, CMD_PIPELINEBARRIER, "vkCmdPipelineBarrier()");
+ skip |= ValidateStageMaskGsTsEnables(
+ device_data, srcStageMask, "vkCmdPipelineBarrier()", "VUID-vkCmdPipelineBarrier-srcStageMask-01168",
+ "VUID-vkCmdPipelineBarrier-srcStageMask-01170", "VUID-vkCmdPipelineBarrier-srcStageMask-02115",
+ "VUID-vkCmdPipelineBarrier-srcStageMask-02116");
+ skip |= ValidateStageMaskGsTsEnables(
+ device_data, dstStageMask, "vkCmdPipelineBarrier()", "VUID-vkCmdPipelineBarrier-dstStageMask-01169",
+ "VUID-vkCmdPipelineBarrier-dstStageMask-01171", "VUID-vkCmdPipelineBarrier-dstStageMask-02117",
+ "VUID-vkCmdPipelineBarrier-dstStageMask-02118");
if (cb_state->activeRenderPass) {
- skip |= ValidateRenderPassPipelineBarriers("vkCmdPipelineBarrier()", cb_state, srcStageMask, dstStageMask, dependencyFlags,
- memoryBarrierCount, pMemoryBarriers, bufferMemoryBarrierCount,
+ skip |= ValidateRenderPassPipelineBarriers(device_data, "vkCmdPipelineBarrier()", cb_state, srcStageMask, dstStageMask,
+ dependencyFlags, memoryBarrierCount, pMemoryBarriers, bufferMemoryBarrierCount,
pBufferMemoryBarriers, imageMemoryBarrierCount, pImageMemoryBarriers);
if (skip) return true; // Early return to avoid redundant errors from below calls
}
- skip |= ValidateBarriersToImages(cb_state, imageMemoryBarrierCount, pImageMemoryBarriers, "vkCmdPipelineBarrier()");
- skip |= ValidateBarriers("vkCmdPipelineBarrier()", cb_state, srcStageMask, dstStageMask, memoryBarrierCount, pMemoryBarriers,
- bufferMemoryBarrierCount, pBufferMemoryBarriers, imageMemoryBarrierCount, pImageMemoryBarriers);
+ skip |=
+ ValidateBarriersToImages(device_data, cb_state, imageMemoryBarrierCount, pImageMemoryBarriers, "vkCmdPipelineBarrier()");
+ skip |= ValidateBarriers(device_data, "vkCmdPipelineBarrier()", cb_state, srcStageMask, dstStageMask, memoryBarrierCount,
+ pMemoryBarriers, bufferMemoryBarrierCount, pBufferMemoryBarriers, imageMemoryBarrierCount,
+ pImageMemoryBarriers);
return skip;
}
@@ -8996,262 +8654,141 @@ void CoreChecks::PreCallRecordCmdPipelineBarrier(VkCommandBuffer commandBuffer,
const VkBufferMemoryBarrier *pBufferMemoryBarriers,
uint32_t imageMemoryBarrierCount,
const VkImageMemoryBarrier *pImageMemoryBarriers) {
- CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
+ layer_data *device_data = GetLayerDataPtr(get_dispatch_key(commandBuffer), layer_data_map);
+ GLOBAL_CB_NODE *cb_state = GetCBNode(commandBuffer);
- RecordBarriersQFOTransfers(cb_state, bufferMemoryBarrierCount, pBufferMemoryBarriers, imageMemoryBarrierCount,
+ RecordBarriersQFOTransfers(device_data, cb_state, bufferMemoryBarrierCount, pBufferMemoryBarriers, imageMemoryBarrierCount,
pImageMemoryBarriers);
- TransitionImageLayouts(cb_state, imageMemoryBarrierCount, pImageMemoryBarriers);
+ TransitionImageLayouts(device_data, cb_state, imageMemoryBarrierCount, pImageMemoryBarriers);
}
-bool ValidationStateTracker::SetQueryState(VkQueue queue, VkCommandBuffer commandBuffer, QueryObject object, QueryState value) {
- CMD_BUFFER_STATE *pCB = GetCBState(commandBuffer);
+bool CoreChecks::SetQueryState(VkQueue queue, VkCommandBuffer commandBuffer, QueryObject object, bool value) {
+ layer_data *dev_data = GetLayerDataPtr(get_dispatch_key(commandBuffer), layer_data_map);
+ GLOBAL_CB_NODE *pCB = GetCBNode(commandBuffer);
if (pCB) {
pCB->queryToStateMap[object] = value;
}
- auto queue_data = queueMap.find(queue);
- if (queue_data != queueMap.end()) {
+ auto queue_data = dev_data->queueMap.find(queue);
+ if (queue_data != dev_data->queueMap.end()) {
queue_data->second.queryToStateMap[object] = value;
}
return false;
}
-bool ValidationStateTracker::SetQueryStateMulti(VkQueue queue, VkCommandBuffer commandBuffer, VkQueryPool queryPool,
- uint32_t firstQuery, uint32_t queryCount, QueryState value) {
- CMD_BUFFER_STATE *pCB = GetCBState(commandBuffer);
- auto queue_data = queueMap.find(queue);
-
- for (uint32_t i = 0; i < queryCount; i++) {
- QueryObject object = {queryPool, firstQuery + i};
- if (pCB) {
- pCB->queryToStateMap[object] = value;
- }
- if (queue_data != queueMap.end()) {
- queue_data->second.queryToStateMap[object] = value;
- }
- }
- return false;
-}
-
-bool CoreChecks::ValidateBeginQuery(const CMD_BUFFER_STATE *cb_state, const QueryObject &query_obj, VkFlags flags, CMD_TYPE cmd,
- const char *cmd_name, const char *vuid_queue_flags, const char *vuid_queue_feedback,
- const char *vuid_queue_occlusion, const char *vuid_precise,
- const char *vuid_query_count) const {
- bool skip = false;
- const auto &query_pool_ci = GetQueryPoolState(query_obj.pool)->createInfo;
-
- // There are tighter queue constraints to test for certain query pools
- if (query_pool_ci.queryType == VK_QUERY_TYPE_TRANSFORM_FEEDBACK_STREAM_EXT) {
- skip |= ValidateCmdQueueFlags(cb_state, cmd_name, VK_QUEUE_GRAPHICS_BIT, vuid_queue_feedback);
- }
- if (query_pool_ci.queryType == VK_QUERY_TYPE_OCCLUSION) {
- skip |= ValidateCmdQueueFlags(cb_state, cmd_name, VK_QUEUE_GRAPHICS_BIT, vuid_queue_occlusion);
- }
-
- skip |= ValidateCmdQueueFlags(cb_state, cmd_name, VK_QUEUE_GRAPHICS_BIT | VK_QUEUE_COMPUTE_BIT, vuid_queue_flags);
+bool CoreChecks::PreCallValidateCmdBeginQuery(VkCommandBuffer commandBuffer, VkQueryPool queryPool, uint32_t slot, VkFlags flags) {
+ layer_data *device_data = GetLayerDataPtr(get_dispatch_key(commandBuffer), layer_data_map);
+ GLOBAL_CB_NODE *cb_state = GetCBNode(commandBuffer);
+ assert(cb_state);
+ bool skip = ValidateCmdQueueFlags(device_data, cb_state, "vkCmdBeginQuery()", VK_QUEUE_GRAPHICS_BIT | VK_QUEUE_COMPUTE_BIT,
+ "VUID-vkCmdBeginQuery-commandBuffer-cmdpool");
+ auto queryType = GetQueryPoolNode(queryPool)->createInfo.queryType;
if (flags & VK_QUERY_CONTROL_PRECISE_BIT) {
- if (!enabled_features.core.occlusionQueryPrecise) {
- skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
- HandleToUint64(cb_state->commandBuffer), vuid_precise,
- "%s: VK_QUERY_CONTROL_PRECISE_BIT provided, but precise occlusion queries not enabled on the device.",
- cmd_name);
+ if (!device_data->enabled_features.core.occlusionQueryPrecise) {
+ skip |= log_msg(device_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
+ HandleToUint64(cb_state->commandBuffer), "VUID-vkCmdBeginQuery-queryType-00800",
+ "VK_QUERY_CONTROL_PRECISE_BIT provided to vkCmdBeginQuery, but precise occlusion queries not enabled "
+ "on the device.");
}
- if (query_pool_ci.queryType != VK_QUERY_TYPE_OCCLUSION) {
- skip |=
- log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
- HandleToUint64(cb_state->commandBuffer), vuid_precise,
- "%s: VK_QUERY_CONTROL_PRECISE_BIT provided, but pool query type is not VK_QUERY_TYPE_OCCLUSION", cmd_name);
+ if (queryType != VK_QUERY_TYPE_OCCLUSION) {
+ skip |= log_msg(
+ device_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
+ HandleToUint64(cb_state->commandBuffer), "VUID-vkCmdBeginQuery-queryType-00800",
+ "VK_QUERY_CONTROL_PRECISE_BIT provided to vkCmdBeginQuery, but pool query type is not VK_QUERY_TYPE_OCCLUSION");
}
}
- if (query_obj.query >= query_pool_ci.queryCount) {
- skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
- HandleToUint64(cb_state->commandBuffer), vuid_query_count,
- "%s: Query index %" PRIu32 " must be less than query count %" PRIu32 " of %s.", cmd_name, query_obj.query,
- query_pool_ci.queryCount, report_data->FormatHandle(query_obj.pool).c_str());
- }
-
- skip |= ValidateCmd(cb_state, cmd, cmd_name);
+ skip |= ValidateCmd(device_data, cb_state, CMD_BEGINQUERY, "vkCmdBeginQuery()");
return skip;
}
-void ValidationStateTracker::RecordCmdBeginQuery(CMD_BUFFER_STATE *cb_state, const QueryObject &query_obj) {
- cb_state->activeQueries.insert(query_obj);
- cb_state->startedQueries.insert(query_obj);
- cb_state->queryUpdates.emplace_back([this, cb_state, query_obj](VkQueue q) {
- SetQueryState(q, cb_state->commandBuffer, query_obj, QUERYSTATE_RUNNING);
- return false;
- });
- AddCommandBufferBinding(&GetQueryPoolState(query_obj.pool)->cb_bindings,
- VulkanTypedHandle(query_obj.pool, kVulkanObjectTypeQueryPool), cb_state);
+void CoreChecks::PostCallRecordCmdBeginQuery(VkCommandBuffer commandBuffer, VkQueryPool queryPool, uint32_t slot, VkFlags flags) {
+ GLOBAL_CB_NODE *cb_state = GetCBNode(commandBuffer);
+ QueryObject query = {queryPool, slot};
+ cb_state->activeQueries.insert(query);
+ cb_state->startedQueries.insert(query);
+ AddCommandBufferBinding(&GetQueryPoolNode(queryPool)->cb_bindings, {HandleToUint64(queryPool), kVulkanObjectTypeQueryPool},
+ cb_state);
}
-bool CoreChecks::PreCallValidateCmdBeginQuery(VkCommandBuffer commandBuffer, VkQueryPool queryPool, uint32_t slot, VkFlags flags) {
- if (disabled.query_validation) return false;
- const CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
+bool CoreChecks::PreCallValidateCmdEndQuery(VkCommandBuffer commandBuffer, VkQueryPool queryPool, uint32_t slot) {
+ layer_data *device_data = GetLayerDataPtr(get_dispatch_key(commandBuffer), layer_data_map);
+ QueryObject query = {queryPool, slot};
+ GLOBAL_CB_NODE *cb_state = GetCBNode(commandBuffer);
assert(cb_state);
- QueryObject query_obj(queryPool, slot);
- return ValidateBeginQuery(cb_state, query_obj, flags, CMD_BEGINQUERY, "vkCmdBeginQuery()",
- "VUID-vkCmdBeginQuery-commandBuffer-cmdpool", "VUID-vkCmdBeginQuery-queryType-02327",
- "VUID-vkCmdBeginQuery-queryType-00803", "VUID-vkCmdBeginQuery-queryType-00800",
- "VUID-vkCmdBeginQuery-query-00802");
-}
-
-bool CoreChecks::VerifyQueryIsReset(VkQueue queue, VkCommandBuffer commandBuffer, QueryObject query_obj) const {
bool skip = false;
-
- auto queue_data = GetQueueState(queue);
- if (!queue_data) return false;
-
- QueryState state = GetQueryState(queue_data, query_obj.pool, query_obj.query);
- if (state != QUERYSTATE_RESET) {
- skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
- HandleToUint64(commandBuffer), kVUID_Core_DrawState_QueryNotReset,
- "vkCmdBeginQuery(): %s and query %" PRIu32
- ": query not reset. "
- "After query pool creation, each query must be reset before it is used. "
- "Queries must also be reset between uses.",
- report_data->FormatHandle(query_obj.pool).c_str(), query_obj.query);
+ if (!cb_state->activeQueries.count(query)) {
+ skip |= log_msg(device_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
+ HandleToUint64(commandBuffer), "VUID-vkCmdEndQuery-None-01923",
+ "Ending a query before it was started: queryPool %s, index %d.",
+ device_data->report_data->FormatHandle(queryPool).c_str(), slot);
}
-
+ skip |= ValidateCmdQueueFlags(device_data, cb_state, "VkCmdEndQuery()", VK_QUEUE_GRAPHICS_BIT | VK_QUEUE_COMPUTE_BIT,
+ "VUID-vkCmdEndQuery-commandBuffer-cmdpool");
+ skip |= ValidateCmd(device_data, cb_state, CMD_ENDQUERY, "VkCmdEndQuery()");
return skip;
}
-void ValidationStateTracker::PostCallRecordCmdBeginQuery(VkCommandBuffer commandBuffer, VkQueryPool queryPool, uint32_t slot,
- VkFlags flags) {
+void CoreChecks::PostCallRecordCmdEndQuery(VkCommandBuffer commandBuffer, VkQueryPool queryPool, uint32_t slot) {
QueryObject query = {queryPool, slot};
- CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
- RecordCmdBeginQuery(cb_state, query);
-}
-
-void CoreChecks::EnqueueVerifyBeginQuery(VkCommandBuffer command_buffer, const QueryObject &query_obj) {
- CMD_BUFFER_STATE *cb_state = GetCBState(command_buffer);
-
- // Enqueue the submit time validation here, ahead of the submit time state update in the StateTracker's PostCallRecord
- cb_state->queryUpdates.emplace_back(
- [this, cb_state, query_obj](VkQueue q) { return VerifyQueryIsReset(q, cb_state->commandBuffer, query_obj); });
-}
-
-void CoreChecks::PreCallRecordCmdBeginQuery(VkCommandBuffer commandBuffer, VkQueryPool queryPool, uint32_t slot, VkFlags flags) {
- QueryObject query_obj = {queryPool, slot};
- EnqueueVerifyBeginQuery(commandBuffer, query_obj);
-}
-
-bool CoreChecks::ValidateCmdEndQuery(const CMD_BUFFER_STATE *cb_state, const QueryObject &query_obj, CMD_TYPE cmd,
- const char *cmd_name, const char *vuid_queue_flags, const char *vuid_active_queries) const {
- bool skip = false;
- if (!cb_state->activeQueries.count(query_obj)) {
- skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
- HandleToUint64(cb_state->commandBuffer), vuid_active_queries,
- "%s: Ending a query before it was started: %s, index %d.", cmd_name,
- report_data->FormatHandle(query_obj.pool).c_str(), query_obj.query);
- }
- skip |= ValidateCmdQueueFlags(cb_state, cmd_name, VK_QUEUE_GRAPHICS_BIT | VK_QUEUE_COMPUTE_BIT, vuid_queue_flags);
- skip |= ValidateCmd(cb_state, cmd, cmd_name);
- return skip;
-}
-
-bool CoreChecks::PreCallValidateCmdEndQuery(VkCommandBuffer commandBuffer, VkQueryPool queryPool, uint32_t slot) {
- if (disabled.query_validation) return false;
- QueryObject query_obj = {queryPool, slot};
- const CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
- assert(cb_state);
- return ValidateCmdEndQuery(cb_state, query_obj, CMD_ENDQUERY, "vkCmdEndQuery()", "VUID-vkCmdEndQuery-commandBuffer-cmdpool",
- "VUID-vkCmdEndQuery-None-01923");
-}
-
-void ValidationStateTracker::RecordCmdEndQuery(CMD_BUFFER_STATE *cb_state, const QueryObject &query_obj) {
- cb_state->activeQueries.erase(query_obj);
- cb_state->queryUpdates.emplace_back(
- [this, cb_state, query_obj](VkQueue q) { return SetQueryState(q, cb_state->commandBuffer, query_obj, QUERYSTATE_ENDED); });
- AddCommandBufferBinding(&GetQueryPoolState(query_obj.pool)->cb_bindings,
- VulkanTypedHandle(query_obj.pool, kVulkanObjectTypeQueryPool), cb_state);
-}
-
-void ValidationStateTracker::PostCallRecordCmdEndQuery(VkCommandBuffer commandBuffer, VkQueryPool queryPool, uint32_t slot) {
- QueryObject query_obj = {queryPool, slot};
- CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
- RecordCmdEndQuery(cb_state, query_obj);
+ GLOBAL_CB_NODE *cb_state = GetCBNode(commandBuffer);
+ cb_state->activeQueries.erase(query);
+ cb_state->queryUpdates.emplace_back([=](VkQueue q) { return SetQueryState(q, commandBuffer, query, true); });
+ AddCommandBufferBinding(&GetQueryPoolNode(queryPool)->cb_bindings, {HandleToUint64(queryPool), kVulkanObjectTypeQueryPool},
+ cb_state);
}
bool CoreChecks::PreCallValidateCmdResetQueryPool(VkCommandBuffer commandBuffer, VkQueryPool queryPool, uint32_t firstQuery,
uint32_t queryCount) {
- if (disabled.query_validation) return false;
- const CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
+ layer_data *device_data = GetLayerDataPtr(get_dispatch_key(commandBuffer), layer_data_map);
+ GLOBAL_CB_NODE *cb_state = GetCBNode(commandBuffer);
- bool skip = InsideRenderPass(cb_state, "vkCmdResetQueryPool()", "VUID-vkCmdResetQueryPool-renderpass");
- skip |= ValidateCmd(cb_state, CMD_RESETQUERYPOOL, "VkCmdResetQueryPool()");
- skip |= ValidateCmdQueueFlags(cb_state, "VkCmdResetQueryPool()", VK_QUEUE_GRAPHICS_BIT | VK_QUEUE_COMPUTE_BIT,
+ bool skip = InsideRenderPass(device_data, cb_state, "vkCmdResetQueryPool()", "VUID-vkCmdResetQueryPool-renderpass");
+ skip |= ValidateCmd(device_data, cb_state, CMD_RESETQUERYPOOL, "VkCmdResetQueryPool()");
+ skip |= ValidateCmdQueueFlags(device_data, cb_state, "VkCmdResetQueryPool()", VK_QUEUE_GRAPHICS_BIT | VK_QUEUE_COMPUTE_BIT,
"VUID-vkCmdResetQueryPool-commandBuffer-cmdpool");
return skip;
}
-void ValidationStateTracker::PostCallRecordCmdResetQueryPool(VkCommandBuffer commandBuffer, VkQueryPool queryPool,
- uint32_t firstQuery, uint32_t queryCount) {
- CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
+void CoreChecks::PostCallRecordCmdResetQueryPool(VkCommandBuffer commandBuffer, VkQueryPool queryPool, uint32_t firstQuery,
+ uint32_t queryCount) {
+ GLOBAL_CB_NODE *cb_state = GetCBNode(commandBuffer);
- cb_state->queryUpdates.emplace_back([this, commandBuffer, queryPool, firstQuery, queryCount](VkQueue q) {
- return SetQueryStateMulti(q, commandBuffer, queryPool, firstQuery, queryCount, QUERYSTATE_RESET);
- });
- AddCommandBufferBinding(&GetQueryPoolState(queryPool)->cb_bindings, VulkanTypedHandle(queryPool, kVulkanObjectTypeQueryPool),
+ for (uint32_t i = 0; i < queryCount; i++) {
+ QueryObject query = {queryPool, firstQuery + i};
+ cb_state->waitedEventsBeforeQueryReset[query] = cb_state->waitedEvents;
+ cb_state->queryUpdates.emplace_back([=](VkQueue q) { return SetQueryState(q, commandBuffer, query, false); });
+ }
+ AddCommandBufferBinding(&GetQueryPoolNode(queryPool)->cb_bindings, {HandleToUint64(queryPool), kVulkanObjectTypeQueryPool},
cb_state);
}
-QueryState CoreChecks::GetQueryState(const QUEUE_STATE *queue_data, VkQueryPool queryPool, uint32_t queryIndex) const {
+bool CoreChecks::IsQueryInvalid(layer_data *dev_data, QUEUE_STATE *queue_data, VkQueryPool queryPool, uint32_t queryIndex) {
QueryObject query = {queryPool, queryIndex};
-
- const std::array<const decltype(queryToStateMap) *, 2> map_list = {{&queue_data->queryToStateMap, &queryToStateMap}};
-
- for (const auto map : map_list) {
- auto query_data = map->find(query);
- if (query_data != map->end()) {
- return query_data->second;
- }
+ auto query_data = queue_data->queryToStateMap.find(query);
+ if (query_data != queue_data->queryToStateMap.end()) {
+ if (!query_data->second) return true;
+ } else {
+ auto it = dev_data->queryToStateMap.find(query);
+ if (it == dev_data->queryToStateMap.end() || !it->second) return true;
}
- return QUERYSTATE_UNKNOWN;
-}
-static QueryResultType GetQueryResultType(QueryState state, VkQueryResultFlags flags) {
- switch (state) {
- case QUERYSTATE_UNKNOWN:
- return QUERYRESULT_UNKNOWN;
- case QUERYSTATE_RESET:
- case QUERYSTATE_RUNNING:
- if (flags & VK_QUERY_RESULT_WAIT_BIT) {
- return ((state == QUERYSTATE_RESET) ? QUERYRESULT_WAIT_ON_RESET : QUERYRESULT_WAIT_ON_RUNNING);
- } else if ((flags & VK_QUERY_RESULT_PARTIAL_BIT) || (flags & VK_QUERY_RESULT_WITH_AVAILABILITY_BIT)) {
- return QUERYRESULT_SOME_DATA;
- } else {
- return QUERYRESULT_NO_DATA;
- }
- case QUERYSTATE_ENDED:
- if ((flags & VK_QUERY_RESULT_WAIT_BIT) || (flags & VK_QUERY_RESULT_PARTIAL_BIT) ||
- (flags & VK_QUERY_RESULT_WITH_AVAILABILITY_BIT)) {
- return QUERYRESULT_SOME_DATA;
- } else {
- return QUERYRESULT_MAYBE_NO_DATA;
- }
- case QUERYSTATE_AVAILABLE:
- return QUERYRESULT_SOME_DATA;
- }
- assert(false);
- return QUERYRESULT_UNKNOWN;
+ return false;
}
-bool CoreChecks::ValidateQuery(VkQueue queue, CMD_BUFFER_STATE *pCB, VkQueryPool queryPool, uint32_t firstQuery,
- uint32_t queryCount, VkQueryResultFlags flags) const {
+bool CoreChecks::ValidateQuery(VkQueue queue, GLOBAL_CB_NODE *pCB, VkQueryPool queryPool, uint32_t firstQuery,
+ uint32_t queryCount) {
bool skip = false;
+ layer_data *dev_data = GetLayerDataPtr(get_dispatch_key(pCB->commandBuffer), layer_data_map);
auto queue_data = GetQueueState(queue);
if (!queue_data) return false;
for (uint32_t i = 0; i < queryCount; i++) {
- QueryState state = GetQueryState(queue_data, queryPool, firstQuery + i);
- QueryResultType result_type = GetQueryResultType(state, flags);
- if (result_type != QUERYRESULT_SOME_DATA) {
- skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
+ if (IsQueryInvalid(dev_data, queue_data, queryPool, firstQuery + i)) {
+ skip |= log_msg(dev_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
HandleToUint64(pCB->commandBuffer), kVUID_Core_DrawState_InvalidQuery,
- "Requesting a copy from query to buffer on %s query %" PRIu32 ": %s",
- report_data->FormatHandle(queryPool).c_str(), firstQuery + i, string_QueryResultType(result_type));
+ "Requesting a copy from query to buffer with invalid query: queryPool %s, index %d",
+ dev_data->report_data->FormatHandle(queryPool).c_str(), firstQuery + i);
}
}
return skip;
@@ -9260,58 +8797,50 @@ bool CoreChecks::ValidateQuery(VkQueue queue, CMD_BUFFER_STATE *pCB, VkQueryPool
bool CoreChecks::PreCallValidateCmdCopyQueryPoolResults(VkCommandBuffer commandBuffer, VkQueryPool queryPool, uint32_t firstQuery,
uint32_t queryCount, VkBuffer dstBuffer, VkDeviceSize dstOffset,
VkDeviceSize stride, VkQueryResultFlags flags) {
- if (disabled.query_validation) return false;
- const auto cb_state = GetCBState(commandBuffer);
- const auto dst_buff_state = GetBufferState(dstBuffer);
+ layer_data *device_data = GetLayerDataPtr(get_dispatch_key(commandBuffer), layer_data_map);
+ auto cb_state = GetCBNode(commandBuffer);
+ auto dst_buff_state = GetBufferState(dstBuffer);
assert(cb_state);
assert(dst_buff_state);
- bool skip = ValidateMemoryIsBoundToBuffer(dst_buff_state, "vkCmdCopyQueryPoolResults()",
+ bool skip = ValidateMemoryIsBoundToBuffer(device_data, dst_buff_state, "vkCmdCopyQueryPoolResults()",
"VUID-vkCmdCopyQueryPoolResults-dstBuffer-00826");
- skip |= ValidateQueryPoolStride("VUID-vkCmdCopyQueryPoolResults-flags-00822", "VUID-vkCmdCopyQueryPoolResults-flags-00823",
- stride, "dstOffset", dstOffset, flags);
// Validate that DST buffer has correct usage flags set
- skip |= ValidateBufferUsageFlags(dst_buff_state, VK_BUFFER_USAGE_TRANSFER_DST_BIT, true,
+ skip |= ValidateBufferUsageFlags(device_data, dst_buff_state, VK_BUFFER_USAGE_TRANSFER_DST_BIT, true,
"VUID-vkCmdCopyQueryPoolResults-dstBuffer-00825", "vkCmdCopyQueryPoolResults()",
"VK_BUFFER_USAGE_TRANSFER_DST_BIT");
- skip |= ValidateCmdQueueFlags(cb_state, "vkCmdCopyQueryPoolResults()", VK_QUEUE_GRAPHICS_BIT | VK_QUEUE_COMPUTE_BIT,
- "VUID-vkCmdCopyQueryPoolResults-commandBuffer-cmdpool");
- skip |= ValidateCmd(cb_state, CMD_COPYQUERYPOOLRESULTS, "vkCmdCopyQueryPoolResults()");
- skip |= InsideRenderPass(cb_state, "vkCmdCopyQueryPoolResults()", "VUID-vkCmdCopyQueryPoolResults-renderpass");
+ skip |=
+ ValidateCmdQueueFlags(device_data, cb_state, "vkCmdCopyQueryPoolResults()", VK_QUEUE_GRAPHICS_BIT | VK_QUEUE_COMPUTE_BIT,
+ "VUID-vkCmdCopyQueryPoolResults-commandBuffer-cmdpool");
+ skip |= ValidateCmd(device_data, cb_state, CMD_COPYQUERYPOOLRESULTS, "vkCmdCopyQueryPoolResults()");
+ skip |= InsideRenderPass(device_data, cb_state, "vkCmdCopyQueryPoolResults()", "VUID-vkCmdCopyQueryPoolResults-renderpass");
return skip;
}
-void ValidationStateTracker::PostCallRecordCmdCopyQueryPoolResults(VkCommandBuffer commandBuffer, VkQueryPool queryPool,
- uint32_t firstQuery, uint32_t queryCount, VkBuffer dstBuffer,
- VkDeviceSize dstOffset, VkDeviceSize stride,
- VkQueryResultFlags flags) {
- auto cb_state = GetCBState(commandBuffer);
+void CoreChecks::PostCallRecordCmdCopyQueryPoolResults(VkCommandBuffer commandBuffer, VkQueryPool queryPool, uint32_t firstQuery,
+ uint32_t queryCount, VkBuffer dstBuffer, VkDeviceSize dstOffset,
+ VkDeviceSize stride, VkQueryResultFlags flags) {
+ layer_data *device_data = GetLayerDataPtr(get_dispatch_key(commandBuffer), layer_data_map);
+ auto cb_state = GetCBNode(commandBuffer);
auto dst_buff_state = GetBufferState(dstBuffer);
- AddCommandBufferBindingBuffer(cb_state, dst_buff_state);
- AddCommandBufferBinding(&GetQueryPoolState(queryPool)->cb_bindings, VulkanTypedHandle(queryPool, kVulkanObjectTypeQueryPool),
+ AddCommandBufferBindingBuffer(device_data, cb_state, dst_buff_state);
+ cb_state->queryUpdates.emplace_back([=](VkQueue q) { return ValidateQuery(q, cb_state, queryPool, firstQuery, queryCount); });
+ AddCommandBufferBinding(&GetQueryPoolNode(queryPool)->cb_bindings, {HandleToUint64(queryPool), kVulkanObjectTypeQueryPool},
cb_state);
}
-void CoreChecks::PreCallRecordCmdCopyQueryPoolResults(VkCommandBuffer commandBuffer, VkQueryPool queryPool, uint32_t firstQuery,
- uint32_t queryCount, VkBuffer dstBuffer, VkDeviceSize dstOffset,
- VkDeviceSize stride, VkQueryResultFlags flags) {
- auto cb_state = GetCBState(commandBuffer);
- cb_state->queryUpdates.emplace_back([this, cb_state, queryPool, firstQuery, queryCount, flags](VkQueue q) {
- return ValidateQuery(q, cb_state, queryPool, firstQuery, queryCount, flags);
- });
-}
-
bool CoreChecks::PreCallValidateCmdPushConstants(VkCommandBuffer commandBuffer, VkPipelineLayout layout,
VkShaderStageFlags stageFlags, uint32_t offset, uint32_t size,
const void *pValues) {
+ layer_data *device_data = GetLayerDataPtr(get_dispatch_key(commandBuffer), layer_data_map);
bool skip = false;
- CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
+ GLOBAL_CB_NODE *cb_state = GetCBNode(commandBuffer);
assert(cb_state);
- skip |= ValidateCmdQueueFlags(cb_state, "vkCmdPushConstants()", VK_QUEUE_GRAPHICS_BIT | VK_QUEUE_COMPUTE_BIT,
+ skip |= ValidateCmdQueueFlags(device_data, cb_state, "vkCmdPushConstants()", VK_QUEUE_GRAPHICS_BIT | VK_QUEUE_COMPUTE_BIT,
"VUID-vkCmdPushConstants-commandBuffer-cmdpool");
- skip |= ValidateCmd(cb_state, CMD_PUSHCONSTANTS, "vkCmdPushConstants()");
- skip |= ValidatePushConstantRange(offset, size, "vkCmdPushConstants()");
+ skip |= ValidateCmd(device_data, cb_state, CMD_PUSHCONSTANTS, "vkCmdPushConstants()");
+ skip |= ValidatePushConstantRange(device_data, offset, size, "vkCmdPushConstants()");
if (0 == stageFlags) {
- skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
+ skip |= log_msg(device_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
HandleToUint64(commandBuffer), "VUID-vkCmdPushConstants-stageFlags-requiredbitmask",
"vkCmdPushConstants() call has no stageFlags set.");
}
@@ -9319,20 +8848,21 @@ bool CoreChecks::PreCallValidateCmdPushConstants(VkCommandBuffer commandBuffer,
// Check if pipeline_layout VkPushConstantRange(s) overlapping offset, size have stageFlags set for each stage in the command
// stageFlags argument, *and* that the command stageFlags argument has bits set for the stageFlags in each overlapping range.
if (!skip) {
- const auto &ranges = *GetPipelineLayout(layout)->push_constant_ranges;
+ const auto &ranges = *GetPipelineLayout(device_data, layout)->push_constant_ranges;
VkShaderStageFlags found_stages = 0;
for (const auto &range : ranges) {
if ((offset >= range.offset) && (offset + size <= range.offset + range.size)) {
VkShaderStageFlags matching_stages = range.stageFlags & stageFlags;
if (matching_stages != range.stageFlags) {
// "VUID-vkCmdPushConstants-offset-01796" VUID-vkCmdPushConstants-offset-01796
- skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
- HandleToUint64(commandBuffer), "VUID-vkCmdPushConstants-offset-01796",
+ skip |= log_msg(device_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, HandleToUint64(commandBuffer),
+ "VUID-vkCmdPushConstants-offset-01796",
"vkCmdPushConstants(): stageFlags (0x%" PRIx32 ", offset (%" PRIu32 "), and size (%" PRIu32
"), must contain all stages in overlapping VkPushConstantRange stageFlags (0x%" PRIx32
- "), offset (%" PRIu32 "), and size (%" PRIu32 ") in %s.",
+ "), offset (%" PRIu32 "), and size (%" PRIu32 ") in pipeline layout %s.",
(uint32_t)stageFlags, offset, size, (uint32_t)range.stageFlags, range.offset, range.size,
- report_data->FormatHandle(layout).c_str());
+ device_data->report_data->FormatHandle(layout).c_str());
}
// Accumulate all stages we've found
@@ -9342,12 +8872,13 @@ bool CoreChecks::PreCallValidateCmdPushConstants(VkCommandBuffer commandBuffer,
if (found_stages != stageFlags) {
// "VUID-vkCmdPushConstants-offset-01795" VUID-vkCmdPushConstants-offset-01795
uint32_t missing_stages = ~found_stages & stageFlags;
- skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
- HandleToUint64(commandBuffer), "VUID-vkCmdPushConstants-offset-01795",
- "vkCmdPushConstants(): stageFlags = 0x%" PRIx32
- ", VkPushConstantRange in %s overlapping offset = %d and size = %d, do not contain "
- "stageFlags 0x%" PRIx32 ".",
- (uint32_t)stageFlags, report_data->FormatHandle(layout).c_str(), offset, size, missing_stages);
+ skip |=
+ log_msg(device_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
+ HandleToUint64(commandBuffer), "VUID-vkCmdPushConstants-offset-01795",
+ "vkCmdPushConstants(): stageFlags = 0x%" PRIx32
+ ", VkPushConstantRange in pipeline layout %s overlapping offset = %d and size = %d, do not contain "
+ "stageFlags 0x%" PRIx32 ".",
+ (uint32_t)stageFlags, device_data->report_data->FormatHandle(layout).c_str(), offset, size, missing_stages);
}
}
return skip;
@@ -9355,67 +8886,42 @@ bool CoreChecks::PreCallValidateCmdPushConstants(VkCommandBuffer commandBuffer,
bool CoreChecks::PreCallValidateCmdWriteTimestamp(VkCommandBuffer commandBuffer, VkPipelineStageFlagBits pipelineStage,
VkQueryPool queryPool, uint32_t slot) {
- if (disabled.query_validation) return false;
- CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
+ layer_data *device_data = GetLayerDataPtr(get_dispatch_key(commandBuffer), layer_data_map);
+ GLOBAL_CB_NODE *cb_state = GetCBNode(commandBuffer);
assert(cb_state);
- bool skip = ValidateCmdQueueFlags(cb_state, "vkCmdWriteTimestamp()",
+ bool skip = ValidateCmdQueueFlags(device_data, cb_state, "vkCmdWriteTimestamp()",
VK_QUEUE_GRAPHICS_BIT | VK_QUEUE_COMPUTE_BIT | VK_QUEUE_TRANSFER_BIT,
"VUID-vkCmdWriteTimestamp-commandBuffer-cmdpool");
- skip |= ValidateCmd(cb_state, CMD_WRITETIMESTAMP, "vkCmdWriteTimestamp()");
+ skip |= ValidateCmd(device_data, cb_state, CMD_WRITETIMESTAMP, "vkCmdWriteTimestamp()");
return skip;
}
void CoreChecks::PostCallRecordCmdWriteTimestamp(VkCommandBuffer commandBuffer, VkPipelineStageFlagBits pipelineStage,
VkQueryPool queryPool, uint32_t slot) {
- CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
+ GLOBAL_CB_NODE *cb_state = GetCBNode(commandBuffer);
QueryObject query = {queryPool, slot};
- cb_state->queryUpdates.emplace_back([this, commandBuffer, query](VkQueue q) {
- bool skip = false;
- skip |= VerifyQueryIsReset(q, commandBuffer, query);
- skip |= SetQueryState(q, commandBuffer, query, QUERYSTATE_ENDED);
- return skip;
- });
- AddCommandBufferBinding(&GetQueryPoolState(queryPool)->cb_bindings, VulkanTypedHandle(queryPool, kVulkanObjectTypeQueryPool),
- cb_state);
-}
-
-bool CoreChecks::MatchUsage(uint32_t count, const VkAttachmentReference2KHR *attachments, const VkFramebufferCreateInfo *fbci,
- VkImageUsageFlagBits usage_flag, const char *error_code) const {
- bool skip = false;
-
- if (attachments) {
- for (uint32_t attach = 0; attach < count; attach++) {
- if (attachments[attach].attachment != VK_ATTACHMENT_UNUSED) {
- // Attachment counts are verified elsewhere, but prevent an invalid access
- if (attachments[attach].attachment < fbci->attachmentCount) {
- if ((fbci->flags & VK_FRAMEBUFFER_CREATE_IMAGELESS_BIT_KHR) == 0) {
- const VkImageView *image_view = &fbci->pAttachments[attachments[attach].attachment];
- auto view_state = GetImageViewState(*image_view);
- if (view_state) {
- const VkImageCreateInfo *ici = &GetImageState(view_state->create_info.image)->createInfo;
- if (ici != nullptr) {
- if ((ici->usage & usage_flag) == 0) {
- skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT,
- VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0, error_code,
- "vkCreateFramebuffer: Framebuffer Attachment (%d) conflicts with the image's "
- "IMAGE_USAGE flags (%s).",
- attachments[attach].attachment, string_VkImageUsageFlagBits(usage_flag));
- }
- }
- }
- } else {
- const VkFramebufferAttachmentsCreateInfoKHR *fbaci =
- lvl_find_in_chain<VkFramebufferAttachmentsCreateInfoKHR>(fbci->pNext);
- if (fbaci != nullptr && fbaci->pAttachmentImageInfos != nullptr &&
- fbaci->attachmentImageInfoCount > attachments[attach].attachment) {
- uint32_t image_usage = fbaci->pAttachmentImageInfos[attachments[attach].attachment].usage;
- if ((image_usage & usage_flag) == 0) {
- skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT,
- 0, error_code,
- "vkCreateFramebuffer: Framebuffer attachment info (%d) conflicts with the image's "
- "IMAGE_USAGE flags (%s).",
- attachments[attach].attachment, string_VkImageUsageFlagBits(usage_flag));
- }
+ cb_state->queryUpdates.emplace_back([=](VkQueue q) { return SetQueryState(q, commandBuffer, query, true); });
+}
+
+bool CoreChecks::MatchUsage(layer_data *dev_data, uint32_t count, const VkAttachmentReference2KHR *attachments,
+ const VkFramebufferCreateInfo *fbci, VkImageUsageFlagBits usage_flag, const char *error_code) {
+ bool skip = false;
+
+ for (uint32_t attach = 0; attach < count; attach++) {
+ if (attachments[attach].attachment != VK_ATTACHMENT_UNUSED) {
+ // Attachment counts are verified elsewhere, but prevent an invalid access
+ if (attachments[attach].attachment < fbci->attachmentCount) {
+ const VkImageView *image_view = &fbci->pAttachments[attachments[attach].attachment];
+ auto view_state = GetImageViewState(*image_view);
+ if (view_state) {
+ const VkImageCreateInfo *ici = &GetImageState(view_state->create_info.image)->createInfo;
+ if (ici != nullptr) {
+ if ((ici->usage & usage_flag) == 0) {
+ skip |= log_msg(dev_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0, error_code,
+ "vkCreateFramebuffer: Framebuffer Attachment (%d) conflicts with the image's "
+ "IMAGE_USAGE flags (%s).",
+ attachments[attach].attachment, string_VkImageUsageFlagBits(usage_flag));
}
}
}
@@ -9434,101 +8940,59 @@ bool CoreChecks::MatchUsage(uint32_t count, const VkAttachmentReference2KHR *att
// 6. fb attachments use idenity swizzle
// 7. fb attachments used by renderPass for color/input/ds have correct usage bit set
// 8. fb dimensions are within physical device limits
-bool CoreChecks::ValidateFramebufferCreateInfo(const VkFramebufferCreateInfo *pCreateInfo) const {
+bool CoreChecks::ValidateFramebufferCreateInfo(layer_data *dev_data, const VkFramebufferCreateInfo *pCreateInfo) {
bool skip = false;
- const VkFramebufferAttachmentsCreateInfoKHR *pFramebufferAttachmentsCreateInfo =
- lvl_find_in_chain<VkFramebufferAttachmentsCreateInfoKHR>(pCreateInfo->pNext);
- if ((pCreateInfo->flags & VK_FRAMEBUFFER_CREATE_IMAGELESS_BIT_KHR) != 0) {
- if (!enabled_features.imageless_framebuffer_features.imagelessFramebuffer) {
- skip |=
- log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
- "VUID-VkFramebufferCreateInfo-flags-03189",
- "vkCreateFramebuffer(): VkFramebufferCreateInfo flags includes VK_FRAMEBUFFER_CREATE_IMAGELESS_BIT_KHR, "
- "but the imagelessFramebuffer feature is not enabled.");
- }
-
- if (pFramebufferAttachmentsCreateInfo == nullptr) {
- skip |=
- log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
- "VUID-VkFramebufferCreateInfo-flags-03190",
- "vkCreateFramebuffer(): VkFramebufferCreateInfo flags includes VK_FRAMEBUFFER_CREATE_IMAGELESS_BIT_KHR, "
- "but no instance of VkFramebufferAttachmentsCreateInfoKHR is present in the pNext chain.");
- } else {
- if (pFramebufferAttachmentsCreateInfo->attachmentImageInfoCount != 0 &&
- pFramebufferAttachmentsCreateInfo->attachmentImageInfoCount != pCreateInfo->attachmentCount) {
- skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
- "VUID-VkFramebufferCreateInfo-flags-03191",
- "vkCreateFramebuffer(): VkFramebufferCreateInfo attachmentCount is %u, but "
- "VkFramebufferAttachmentsCreateInfoKHR attachmentImageInfoCount is %u.",
- pCreateInfo->attachmentCount, pFramebufferAttachmentsCreateInfo->attachmentImageInfoCount);
- }
- }
- }
-
auto rp_state = GetRenderPassState(pCreateInfo->renderPass);
if (rp_state) {
const VkRenderPassCreateInfo2KHR *rpci = rp_state->createInfo.ptr();
if (rpci->attachmentCount != pCreateInfo->attachmentCount) {
- skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_RENDER_PASS_EXT,
+ skip |= log_msg(dev_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_RENDER_PASS_EXT,
HandleToUint64(pCreateInfo->renderPass), "VUID-VkFramebufferCreateInfo-attachmentCount-00876",
"vkCreateFramebuffer(): VkFramebufferCreateInfo attachmentCount of %u does not match attachmentCount "
- "of %u of %s being used to create Framebuffer.",
+ "of %u of renderPass (%s) being used to create Framebuffer.",
pCreateInfo->attachmentCount, rpci->attachmentCount,
- report_data->FormatHandle(pCreateInfo->renderPass).c_str());
+ dev_data->report_data->FormatHandle(pCreateInfo->renderPass).c_str());
} else {
// attachmentCounts match, so make sure corresponding attachment details line up
- if ((pCreateInfo->flags & VK_FRAMEBUFFER_CREATE_IMAGELESS_BIT_KHR) == 0) {
- const VkImageView *image_views = pCreateInfo->pAttachments;
- for (uint32_t i = 0; i < pCreateInfo->attachmentCount; ++i) {
- auto view_state = GetImageViewState(image_views[i]);
- if (view_state == nullptr) {
- skip |=
- log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_VIEW_EXT,
- HandleToUint64(image_views[i]), "VUID-VkFramebufferCreateInfo-flags-03188",
- "vkCreateFramebuffer(): VkFramebufferCreateInfo attachment #%u is not a valid VkImageView.", i);
- } else {
- auto &ivci = view_state->create_info;
- if (ivci.format != rpci->pAttachments[i].format) {
- skip |= log_msg(
- report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_RENDER_PASS_EXT,
+ const VkImageView *image_views = pCreateInfo->pAttachments;
+ for (uint32_t i = 0; i < pCreateInfo->attachmentCount; ++i) {
+ auto view_state = GetImageViewState(image_views[i]);
+ auto &ivci = view_state->create_info;
+ if (ivci.format != rpci->pAttachments[i].format) {
+ skip |=
+ log_msg(dev_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_RENDER_PASS_EXT,
HandleToUint64(pCreateInfo->renderPass), "VUID-VkFramebufferCreateInfo-pAttachments-00880",
"vkCreateFramebuffer(): VkFramebufferCreateInfo attachment #%u has format of %s that does not "
- "match the format of %s used by the corresponding attachment for %s.",
+ "match the format of %s used by the corresponding attachment for renderPass (%s).",
i, string_VkFormat(ivci.format), string_VkFormat(rpci->pAttachments[i].format),
- report_data->FormatHandle(pCreateInfo->renderPass).c_str());
- }
- const VkImageCreateInfo *ici = &GetImageState(ivci.image)->createInfo;
- if (ici->samples != rpci->pAttachments[i].samples) {
- skip |=
- log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_RENDER_PASS_EXT,
- HandleToUint64(pCreateInfo->renderPass), "VUID-VkFramebufferCreateInfo-pAttachments-00881",
- "vkCreateFramebuffer(): VkFramebufferCreateInfo attachment #%u has %s samples that do not "
- "match the %s "
- "samples used by the corresponding attachment for %s.",
- i, string_VkSampleCountFlagBits(ici->samples),
- string_VkSampleCountFlagBits(rpci->pAttachments[i].samples),
- report_data->FormatHandle(pCreateInfo->renderPass).c_str());
- }
- // Verify that view only has a single mip level
- if (ivci.subresourceRange.levelCount != 1) {
- skip |= log_msg(
- report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
- "VUID-VkFramebufferCreateInfo-pAttachments-00883",
- "vkCreateFramebuffer(): VkFramebufferCreateInfo attachment #%u has mip levelCount of %u but "
- "only a single mip level (levelCount == 1) is allowed when creating a Framebuffer.",
- i, ivci.subresourceRange.levelCount);
- }
- const uint32_t mip_level = ivci.subresourceRange.baseMipLevel;
- uint32_t mip_width = max(1u, ici->extent.width >> mip_level);
- uint32_t mip_height = max(1u, ici->extent.height >> mip_level);
- if (!(rpci->pAttachments[i].initialLayout == VK_IMAGE_LAYOUT_FRAGMENT_DENSITY_MAP_OPTIMAL_EXT ||
- rpci->pAttachments[i].finalLayout == VK_IMAGE_LAYOUT_FRAGMENT_DENSITY_MAP_OPTIMAL_EXT)) {
- if ((ivci.subresourceRange.layerCount < pCreateInfo->layers) || (mip_width < pCreateInfo->width) ||
- (mip_height < pCreateInfo->height)) {
- skip |= log_msg(
- report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
- "VUID-VkFramebufferCreateInfo-pAttachments-00882",
+ dev_data->report_data->FormatHandle(pCreateInfo->renderPass).c_str());
+ }
+ const VkImageCreateInfo *ici = &GetImageState(ivci.image)->createInfo;
+ if (ici->samples != rpci->pAttachments[i].samples) {
+ skip |= log_msg(
+ dev_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_RENDER_PASS_EXT,
+ HandleToUint64(pCreateInfo->renderPass), "VUID-VkFramebufferCreateInfo-pAttachments-00881",
+ "vkCreateFramebuffer(): VkFramebufferCreateInfo attachment #%u has %s samples that do not match the %s "
+ "samples used by the corresponding attachment for renderPass (%s).",
+ i, string_VkSampleCountFlagBits(ici->samples), string_VkSampleCountFlagBits(rpci->pAttachments[i].samples),
+ dev_data->report_data->FormatHandle(pCreateInfo->renderPass).c_str());
+ }
+ // Verify that view only has a single mip level
+ if (ivci.subresourceRange.levelCount != 1) {
+ skip |= log_msg(dev_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT,
+ 0, "VUID-VkFramebufferCreateInfo-pAttachments-00883",
+ "vkCreateFramebuffer(): VkFramebufferCreateInfo attachment #%u has mip levelCount of %u but "
+ "only a single mip level (levelCount == 1) is allowed when creating a Framebuffer.",
+ i, ivci.subresourceRange.levelCount);
+ }
+ const uint32_t mip_level = ivci.subresourceRange.baseMipLevel;
+ uint32_t mip_width = max(1u, ici->extent.width >> mip_level);
+ uint32_t mip_height = max(1u, ici->extent.height >> mip_level);
+ if ((ivci.subresourceRange.layerCount < pCreateInfo->layers) || (mip_width < pCreateInfo->width) ||
+ (mip_height < pCreateInfo->height)) {
+ skip |= log_msg(dev_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT,
+ 0, "VUID-VkFramebufferCreateInfo-pAttachments-00882",
"vkCreateFramebuffer(): VkFramebufferCreateInfo attachment #%u mip level %u has dimensions "
"smaller than the corresponding framebuffer dimensions. Here are the respective dimensions for "
"attachment #%u, framebuffer:\n"
@@ -9537,313 +9001,77 @@ bool CoreChecks::ValidateFramebufferCreateInfo(const VkFramebufferCreateInfo *pC
"layerCount: %u, %u\n",
i, ivci.subresourceRange.baseMipLevel, i, mip_width, pCreateInfo->width, mip_height,
pCreateInfo->height, ivci.subresourceRange.layerCount, pCreateInfo->layers);
- }
- } else {
- if (device_extensions.vk_ext_fragment_density_map) {
- uint32_t ceiling_width = (uint32_t)ceil(
- (float)pCreateInfo->width /
- std::max((float)phys_dev_ext_props.fragment_density_map_props.maxFragmentDensityTexelSize.width,
- 1.0f));
- if (mip_width < ceiling_width) {
- skip |= log_msg(
- report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
- "VUID-VkFramebufferCreateInfo-pAttachments-02555",
- "vkCreateFramebuffer(): VkFramebufferCreateInfo attachment #%u mip level %u has width "
- "smaller than the corresponding the ceiling of framebuffer width / "
- "maxFragmentDensityTexelSize.width "
- "Here are the respective dimensions for attachment #%u, the ceiling value:\n "
- "attachment #%u, framebuffer:\n"
- "width: %u, the ceiling value: %u\n",
- i, ivci.subresourceRange.baseMipLevel, i, i, mip_width, ceiling_width);
- }
- uint32_t ceiling_height = (uint32_t)ceil(
- (float)pCreateInfo->height /
- std::max(
- (float)phys_dev_ext_props.fragment_density_map_props.maxFragmentDensityTexelSize.height,
- 1.0f));
- if (mip_height < ceiling_height) {
- skip |= log_msg(
- report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
- "VUID-VkFramebufferCreateInfo-pAttachments-02556",
- "vkCreateFramebuffer(): VkFramebufferCreateInfo attachment #%u mip level %u has height "
- "smaller than the corresponding the ceiling of framebuffer height / "
- "maxFragmentDensityTexelSize.height "
- "Here are the respective dimensions for attachment #%u, the ceiling value:\n "
- "attachment #%u, framebuffer:\n"
- "height: %u, the ceiling value: %u\n",
- i, ivci.subresourceRange.baseMipLevel, i, i, mip_height, ceiling_height);
- }
- }
- }
- if (((ivci.components.r != VK_COMPONENT_SWIZZLE_IDENTITY) &&
- (ivci.components.r != VK_COMPONENT_SWIZZLE_R)) ||
- ((ivci.components.g != VK_COMPONENT_SWIZZLE_IDENTITY) &&
- (ivci.components.g != VK_COMPONENT_SWIZZLE_G)) ||
- ((ivci.components.b != VK_COMPONENT_SWIZZLE_IDENTITY) &&
- (ivci.components.b != VK_COMPONENT_SWIZZLE_B)) ||
- ((ivci.components.a != VK_COMPONENT_SWIZZLE_IDENTITY) &&
- (ivci.components.a != VK_COMPONENT_SWIZZLE_A))) {
- skip |= log_msg(
- report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
- "VUID-VkFramebufferCreateInfo-pAttachments-00884",
- "vkCreateFramebuffer(): VkFramebufferCreateInfo attachment #%u has non-identy swizzle. All "
- "framebuffer attachments must have been created with the identity swizzle. Here are the actual "
- "swizzle values:\n"
- "r swizzle = %s\n"
- "g swizzle = %s\n"
- "b swizzle = %s\n"
- "a swizzle = %s\n",
- i, string_VkComponentSwizzle(ivci.components.r), string_VkComponentSwizzle(ivci.components.g),
- string_VkComponentSwizzle(ivci.components.b), string_VkComponentSwizzle(ivci.components.a));
- }
- }
}
- } else if (pFramebufferAttachmentsCreateInfo) {
- // VK_FRAMEBUFFER_CREATE_IMAGELESS_BIT_KHR is set
- for (uint32_t i = 0; i < pCreateInfo->attachmentCount; ++i) {
- auto &aii = pFramebufferAttachmentsCreateInfo->pAttachmentImageInfos[i];
- bool formatFound = false;
- for (uint32_t j = 0; j < aii.viewFormatCount; ++j) {
- if (aii.pViewFormats[j] == rpci->pAttachments[i].format) {
- formatFound = true;
- }
- }
- if (!formatFound) {
- skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_RENDER_PASS_EXT,
- HandleToUint64(pCreateInfo->renderPass), "VUID-VkFramebufferCreateInfo-flags-03205",
- "vkCreateFramebuffer(): VkFramebufferCreateInfo attachment info #%u does not include "
- "format %s used "
- "by the corresponding attachment for renderPass (%s).",
- i, string_VkFormat(rpci->pAttachments[i].format),
- report_data->FormatHandle(pCreateInfo->renderPass).c_str());
- }
-
- const char *mismatchedLayersNoMultiviewVuid = device_extensions.vk_khr_multiview
- ? "VUID-VkFramebufferCreateInfo-renderPass-03199"
- : "VUID-VkFramebufferCreateInfo-flags-03200";
- if ((rpci->subpassCount == 0) || (rpci->pSubpasses[0].viewMask == 0)) {
- if (aii.layerCount < pCreateInfo->layers) {
- skip |=
- log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
- mismatchedLayersNoMultiviewVuid,
- "vkCreateFramebuffer(): VkFramebufferCreateInfo attachment info #%u has only #%u layers, "
- "but framebuffer has #%u layers.",
- i, aii.layerCount, pCreateInfo->layers);
- }
- }
-
- if (!device_extensions.vk_ext_fragment_density_map) {
- if (aii.width < pCreateInfo->width) {
- skip |= log_msg(
- report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
- "VUID-VkFramebufferCreateInfo-flags-03192",
- "vkCreateFramebuffer(): VkFramebufferCreateInfo attachment info #%u has a width of only #%u, "
- "but framebuffer has a width of #%u.",
- i, aii.width, pCreateInfo->width);
- }
-
- if (aii.height < pCreateInfo->height) {
- skip |= log_msg(
- report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
- "VUID-VkFramebufferCreateInfo-flags-03193",
- "vkCreateFramebuffer(): VkFramebufferCreateInfo attachment info #%u has a height of only #%u, "
- "but framebuffer has a height of #%u.",
- i, aii.height, pCreateInfo->height);
- }
- }
- }
-
- // Validate image usage
- uint32_t attachment_index = VK_ATTACHMENT_UNUSED;
- for (uint32_t i = 0; i < rpci->subpassCount; ++i) {
- skip |= MatchUsage(rpci->pSubpasses[i].colorAttachmentCount, rpci->pSubpasses[i].pColorAttachments, pCreateInfo,
- VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT, "VUID-VkFramebufferCreateInfo-flags-03201");
- skip |=
- MatchUsage(rpci->pSubpasses[i].colorAttachmentCount, rpci->pSubpasses[i].pResolveAttachments, pCreateInfo,
- VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT, "VUID-VkFramebufferCreateInfo-flags-03201");
- skip |= MatchUsage(1, rpci->pSubpasses[i].pDepthStencilAttachment, pCreateInfo,
- VK_IMAGE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT, "VUID-VkFramebufferCreateInfo-flags-03202");
- skip |= MatchUsage(rpci->pSubpasses[i].inputAttachmentCount, rpci->pSubpasses[i].pInputAttachments, pCreateInfo,
- VK_IMAGE_USAGE_INPUT_ATTACHMENT_BIT, "VUID-VkFramebufferCreateInfo-flags-03204");
-
- const VkSubpassDescriptionDepthStencilResolveKHR *pDepthStencilResolve =
- lvl_find_in_chain<VkSubpassDescriptionDepthStencilResolveKHR>(rpci->pSubpasses[i].pNext);
- if (device_extensions.vk_khr_depth_stencil_resolve && pDepthStencilResolve != nullptr) {
- skip |= MatchUsage(1, pDepthStencilResolve->pDepthStencilResolveAttachment, pCreateInfo,
- VK_IMAGE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT, "VUID-VkFramebufferCreateInfo-flags-03203");
- }
- }
-
- if (device_extensions.vk_khr_multiview) {
- if ((rpci->subpassCount > 0) && (rpci->pSubpasses[0].viewMask != 0)) {
- for (uint32_t i = 0; i < rpci->subpassCount; ++i) {
- const VkSubpassDescriptionDepthStencilResolveKHR *pDepthStencilResolve =
- lvl_find_in_chain<VkSubpassDescriptionDepthStencilResolveKHR>(rpci->pSubpasses[i].pNext);
- uint32_t view_bits = rpci->pSubpasses[i].viewMask;
- uint32_t highest_view_bit = 0;
-
- for (int j = 0; j < 32; ++j) {
- if (((view_bits >> j) & 1) != 0) {
- highest_view_bit = j;
- }
- }
-
- for (uint32_t j = 0; j < rpci->pSubpasses[i].colorAttachmentCount; ++j) {
- attachment_index = rpci->pSubpasses[i].pColorAttachments[j].attachment;
- if (attachment_index != VK_ATTACHMENT_UNUSED) {
- uint32_t layer_count =
- pFramebufferAttachmentsCreateInfo->pAttachmentImageInfos[attachment_index].layerCount;
- if (layer_count <= highest_view_bit) {
- skip |= log_msg(
- report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_RENDER_PASS_EXT,
- HandleToUint64(pCreateInfo->renderPass),
- "VUID-VkFramebufferCreateInfo-renderPass-03198",
- "vkCreateFramebuffer(): VkFramebufferCreateInfo attachment info %u "
- "only specifies %u layers, but the view mask for subpass %u in renderPass (%s) "
- "includes layer %u, with that attachment specified as a color attachment %u.",
- attachment_index, layer_count, i,
- report_data->FormatHandle(pCreateInfo->renderPass).c_str(), highest_view_bit, j);
- }
- }
- if (rpci->pSubpasses[i].pResolveAttachments) {
- attachment_index = rpci->pSubpasses[i].pResolveAttachments[j].attachment;
- if (attachment_index != VK_ATTACHMENT_UNUSED) {
- uint32_t layer_count =
- pFramebufferAttachmentsCreateInfo->pAttachmentImageInfos[attachment_index].layerCount;
- if (layer_count <= highest_view_bit) {
- skip |= log_msg(
- report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT,
- VK_DEBUG_REPORT_OBJECT_TYPE_RENDER_PASS_EXT,
- HandleToUint64(pCreateInfo->renderPass),
- "VUID-VkFramebufferCreateInfo-renderPass-03198",
- "vkCreateFramebuffer(): VkFramebufferCreateInfo attachment info %u "
- "only specifies %u layers, but the view mask for subpass %u in renderPass (%s) "
- "includes layer %u, with that attachment specified as a resolve attachment %u.",
- attachment_index, layer_count, i,
- report_data->FormatHandle(pCreateInfo->renderPass).c_str(), highest_view_bit, j);
- }
- }
- }
- }
-
- for (uint32_t j = 0; j < rpci->pSubpasses[i].inputAttachmentCount; ++j) {
- attachment_index = rpci->pSubpasses[i].pInputAttachments[j].attachment;
- if (attachment_index != VK_ATTACHMENT_UNUSED) {
- uint32_t layer_count =
- pFramebufferAttachmentsCreateInfo->pAttachmentImageInfos[attachment_index].layerCount;
- if (layer_count <= highest_view_bit) {
- skip |= log_msg(
- report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_RENDER_PASS_EXT,
- HandleToUint64(pCreateInfo->renderPass),
- "VUID-VkFramebufferCreateInfo-renderPass-03198",
- "vkCreateFramebuffer(): VkFramebufferCreateInfo attachment info %u "
- "only specifies %u layers, but the view mask for subpass %u in renderPass (%s) "
- "includes layer %u, with that attachment specified as an input attachment %u.",
- attachment_index, layer_count, i,
- report_data->FormatHandle(pCreateInfo->renderPass).c_str(), highest_view_bit, j);
- }
- }
- }
-
- if (rpci->pSubpasses[i].pDepthStencilAttachment != nullptr) {
- attachment_index = rpci->pSubpasses[i].pDepthStencilAttachment->attachment;
- if (attachment_index != VK_ATTACHMENT_UNUSED) {
- uint32_t layer_count =
- pFramebufferAttachmentsCreateInfo->pAttachmentImageInfos[attachment_index].layerCount;
- if (layer_count <= highest_view_bit) {
- skip |= log_msg(
- report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_RENDER_PASS_EXT,
- HandleToUint64(pCreateInfo->renderPass),
- "VUID-VkFramebufferCreateInfo-renderPass-03198",
- "vkCreateFramebuffer(): VkFramebufferCreateInfo attachment info %u "
- "only specifies %u layers, but the view mask for subpass %u in renderPass (%s) "
- "includes layer %u, with that attachment specified as a depth/stencil attachment.",
- attachment_index, layer_count, i,
- report_data->FormatHandle(pCreateInfo->renderPass).c_str(), highest_view_bit);
- }
- }
-
- if (device_extensions.vk_khr_depth_stencil_resolve && pDepthStencilResolve != nullptr &&
- pDepthStencilResolve->pDepthStencilResolveAttachment != nullptr) {
- attachment_index = pDepthStencilResolve->pDepthStencilResolveAttachment->attachment;
- if (attachment_index != VK_ATTACHMENT_UNUSED) {
- uint32_t layer_count =
- pFramebufferAttachmentsCreateInfo->pAttachmentImageInfos[attachment_index].layerCount;
- if (layer_count <= highest_view_bit) {
- skip |= log_msg(
- report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT,
- VK_DEBUG_REPORT_OBJECT_TYPE_RENDER_PASS_EXT,
- HandleToUint64(pCreateInfo->renderPass),
- "VUID-VkFramebufferCreateInfo-renderPass-03198",
- "vkCreateFramebuffer(): VkFramebufferCreateInfo attachment info %u "
- "only specifies %u layers, but the view mask for subpass %u in renderPass (%s) "
- "includes layer %u, with that attachment specified as a depth/stencil resolve "
- "attachment.",
- attachment_index, layer_count, i,
- report_data->FormatHandle(pCreateInfo->renderPass).c_str(), highest_view_bit);
- }
- }
- }
- }
- }
- }
+ if (((ivci.components.r != VK_COMPONENT_SWIZZLE_IDENTITY) && (ivci.components.r != VK_COMPONENT_SWIZZLE_R)) ||
+ ((ivci.components.g != VK_COMPONENT_SWIZZLE_IDENTITY) && (ivci.components.g != VK_COMPONENT_SWIZZLE_G)) ||
+ ((ivci.components.b != VK_COMPONENT_SWIZZLE_IDENTITY) && (ivci.components.b != VK_COMPONENT_SWIZZLE_B)) ||
+ ((ivci.components.a != VK_COMPONENT_SWIZZLE_IDENTITY) && (ivci.components.a != VK_COMPONENT_SWIZZLE_A))) {
+ skip |= log_msg(dev_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT,
+ 0, "VUID-VkFramebufferCreateInfo-pAttachments-00884",
+ "vkCreateFramebuffer(): VkFramebufferCreateInfo attachment #%u has non-identy swizzle. All "
+ "framebuffer attachments must have been created with the identity swizzle. Here are the actual "
+ "swizzle values:\n"
+ "r swizzle = %s\n"
+ "g swizzle = %s\n"
+ "b swizzle = %s\n"
+ "a swizzle = %s\n",
+ i, string_VkComponentSwizzle(ivci.components.r), string_VkComponentSwizzle(ivci.components.g),
+ string_VkComponentSwizzle(ivci.components.b), string_VkComponentSwizzle(ivci.components.a));
}
}
-
- if ((pCreateInfo->flags & VK_FRAMEBUFFER_CREATE_IMAGELESS_BIT_KHR) == 0) {
- // Verify correct attachment usage flags
- for (uint32_t subpass = 0; subpass < rpci->subpassCount; subpass++) {
- // Verify input attachments:
- skip |= MatchUsage(rpci->pSubpasses[subpass].inputAttachmentCount, rpci->pSubpasses[subpass].pInputAttachments,
- pCreateInfo, VK_IMAGE_USAGE_INPUT_ATTACHMENT_BIT,
- "VUID-VkFramebufferCreateInfo-pAttachments-00879");
- // Verify color attachments:
- skip |= MatchUsage(rpci->pSubpasses[subpass].colorAttachmentCount, rpci->pSubpasses[subpass].pColorAttachments,
- pCreateInfo, VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT,
- "VUID-VkFramebufferCreateInfo-pAttachments-00877");
- // Verify depth/stencil attachments:
- skip |=
- MatchUsage(1, rpci->pSubpasses[subpass].pDepthStencilAttachment, pCreateInfo,
+ }
+ // Verify correct attachment usage flags
+ for (uint32_t subpass = 0; subpass < rpci->subpassCount; subpass++) {
+ // Verify input attachments:
+ skip |=
+ MatchUsage(dev_data, rpci->pSubpasses[subpass].inputAttachmentCount, rpci->pSubpasses[subpass].pInputAttachments,
+ pCreateInfo, VK_IMAGE_USAGE_INPUT_ATTACHMENT_BIT, "VUID-VkFramebufferCreateInfo-pAttachments-00879");
+ // Verify color attachments:
+ skip |=
+ MatchUsage(dev_data, rpci->pSubpasses[subpass].colorAttachmentCount, rpci->pSubpasses[subpass].pColorAttachments,
+ pCreateInfo, VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT, "VUID-VkFramebufferCreateInfo-pAttachments-00877");
+ // Verify depth/stencil attachments:
+ if (rpci->pSubpasses[subpass].pDepthStencilAttachment != nullptr) {
+ skip |= MatchUsage(dev_data, 1, rpci->pSubpasses[subpass].pDepthStencilAttachment, pCreateInfo,
VK_IMAGE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT, "VUID-VkFramebufferCreateInfo-pAttachments-02633");
- }
}
}
}
// Verify FB dimensions are within physical device limits
- if (pCreateInfo->width > phys_dev_props.limits.maxFramebufferWidth) {
- skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
+ if (pCreateInfo->width > dev_data->phys_dev_props.limits.maxFramebufferWidth) {
+ skip |= log_msg(dev_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
"VUID-VkFramebufferCreateInfo-width-00886",
"vkCreateFramebuffer(): Requested VkFramebufferCreateInfo width exceeds physical device limits. Requested "
"width: %u, device max: %u\n",
- pCreateInfo->width, phys_dev_props.limits.maxFramebufferWidth);
+ pCreateInfo->width, dev_data->phys_dev_props.limits.maxFramebufferWidth);
}
- if (pCreateInfo->height > phys_dev_props.limits.maxFramebufferHeight) {
- skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
+ if (pCreateInfo->height > dev_data->phys_dev_props.limits.maxFramebufferHeight) {
+ skip |= log_msg(dev_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
"VUID-VkFramebufferCreateInfo-height-00888",
"vkCreateFramebuffer(): Requested VkFramebufferCreateInfo height exceeds physical device limits. Requested "
"height: %u, device max: %u\n",
- pCreateInfo->height, phys_dev_props.limits.maxFramebufferHeight);
+ pCreateInfo->height, dev_data->phys_dev_props.limits.maxFramebufferHeight);
}
- if (pCreateInfo->layers > phys_dev_props.limits.maxFramebufferLayers) {
- skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
+ if (pCreateInfo->layers > dev_data->phys_dev_props.limits.maxFramebufferLayers) {
+ skip |= log_msg(dev_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
"VUID-VkFramebufferCreateInfo-layers-00890",
"vkCreateFramebuffer(): Requested VkFramebufferCreateInfo layers exceeds physical device limits. Requested "
"layers: %u, device max: %u\n",
- pCreateInfo->layers, phys_dev_props.limits.maxFramebufferLayers);
+ pCreateInfo->layers, dev_data->phys_dev_props.limits.maxFramebufferLayers);
}
// Verify FB dimensions are greater than zero
if (pCreateInfo->width <= 0) {
- skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
+ skip |= log_msg(dev_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
"VUID-VkFramebufferCreateInfo-width-00885",
"vkCreateFramebuffer(): Requested VkFramebufferCreateInfo width must be greater than zero.");
}
if (pCreateInfo->height <= 0) {
- skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
+ skip |= log_msg(dev_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
"VUID-VkFramebufferCreateInfo-height-00887",
"vkCreateFramebuffer(): Requested VkFramebufferCreateInfo height must be greater than zero.");
}
if (pCreateInfo->layers <= 0) {
- skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
+ skip |= log_msg(dev_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
"VUID-VkFramebufferCreateInfo-layers-00889",
"vkCreateFramebuffer(): Requested VkFramebufferCreateInfo layers must be greater than zero.");
}
@@ -9852,30 +9080,36 @@ bool CoreChecks::ValidateFramebufferCreateInfo(const VkFramebufferCreateInfo *pC
bool CoreChecks::PreCallValidateCreateFramebuffer(VkDevice device, const VkFramebufferCreateInfo *pCreateInfo,
const VkAllocationCallbacks *pAllocator, VkFramebuffer *pFramebuffer) {
+ layer_data *device_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
// TODO : Verify that renderPass FB is created with is compatible with FB
bool skip = false;
- skip |= ValidateFramebufferCreateInfo(pCreateInfo);
+ skip |= ValidateFramebufferCreateInfo(device_data, pCreateInfo);
return skip;
}
-void ValidationStateTracker::PostCallRecordCreateFramebuffer(VkDevice device, const VkFramebufferCreateInfo *pCreateInfo,
- const VkAllocationCallbacks *pAllocator, VkFramebuffer *pFramebuffer,
- VkResult result) {
+void CoreChecks::PostCallRecordCreateFramebuffer(VkDevice device, const VkFramebufferCreateInfo *pCreateInfo,
+ const VkAllocationCallbacks *pAllocator, VkFramebuffer *pFramebuffer,
+ VkResult result) {
+ layer_data *device_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
if (VK_SUCCESS != result) return;
// Shadow create info and store in map
std::unique_ptr<FRAMEBUFFER_STATE> fb_state(
new FRAMEBUFFER_STATE(*pFramebuffer, pCreateInfo, GetRenderPassStateSharedPtr(pCreateInfo->renderPass)));
- if ((pCreateInfo->flags & VK_FRAMEBUFFER_CREATE_IMAGELESS_BIT_KHR) == 0) {
- for (uint32_t i = 0; i < pCreateInfo->attachmentCount; ++i) {
- VkImageView view = pCreateInfo->pAttachments[i];
- auto view_state = GetImageViewState(view);
- if (!view_state) {
- continue;
- }
+ for (uint32_t i = 0; i < pCreateInfo->attachmentCount; ++i) {
+ VkImageView view = pCreateInfo->pAttachments[i];
+ auto view_state = GetImageViewState(view);
+ if (!view_state) {
+ continue;
}
+#ifdef FRAMEBUFFER_ATTACHMENT_STATE_CACHE
+ MT_FB_ATTACHMENT_INFO fb_info;
+ fb_info.view_state = view_state;
+ fb_info.image = view_state->create_info.image;
+ fb_state->attachments.push_back(fb_info);
+#endif
}
- frameBufferMap[*pFramebuffer] = std::move(fb_state);
+ device_data->frameBufferMap[*pFramebuffer] = std::move(fb_state);
}
static bool FindDependency(const uint32_t index, const uint32_t dependent, const std::vector<DAGNode> &subpass_to_node,
@@ -9895,38 +9129,26 @@ static bool FindDependency(const uint32_t index, const uint32_t dependent, const
return false;
}
-bool CoreChecks::IsImageLayoutReadOnly(VkImageLayout layout) const {
- if ((layout == VK_IMAGE_LAYOUT_DEPTH_STENCIL_READ_ONLY_OPTIMAL) || (layout == VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL) ||
- (layout == VK_IMAGE_LAYOUT_DEPTH_READ_ONLY_STENCIL_ATTACHMENT_OPTIMAL) ||
- (layout == VK_IMAGE_LAYOUT_DEPTH_ATTACHMENT_STENCIL_READ_ONLY_OPTIMAL)) {
- return true;
- }
- return false;
-}
-
-bool CoreChecks::CheckDependencyExists(const uint32_t subpass, const VkImageLayout layout,
- const std::vector<SubpassLayout> &dependent_subpasses,
- const std::vector<DAGNode> &subpass_to_node, bool &skip) const {
+bool CoreChecks::CheckDependencyExists(const layer_data *dev_data, const uint32_t subpass,
+ const std::vector<uint32_t> &dependent_subpasses,
+ const std::vector<DAGNode> &subpass_to_node, bool &skip) {
bool result = true;
- bool bImageLayoutReadOnly = IsImageLayoutReadOnly(layout);
// Loop through all subpasses that share the same attachment and make sure a dependency exists
for (uint32_t k = 0; k < dependent_subpasses.size(); ++k) {
- const SubpassLayout &sp = dependent_subpasses[k];
- if (subpass == sp.index) continue;
- if (bImageLayoutReadOnly && IsImageLayoutReadOnly(sp.layout)) continue;
-
+ if (static_cast<uint32_t>(subpass) == dependent_subpasses[k]) continue;
const DAGNode &node = subpass_to_node[subpass];
// Check for a specified dependency between the two nodes. If one exists we are done.
- auto prev_elem = std::find(node.prev.begin(), node.prev.end(), sp.index);
- auto next_elem = std::find(node.next.begin(), node.next.end(), sp.index);
+ auto prev_elem = std::find(node.prev.begin(), node.prev.end(), dependent_subpasses[k]);
+ auto next_elem = std::find(node.next.begin(), node.next.end(), dependent_subpasses[k]);
if (prev_elem == node.prev.end() && next_elem == node.next.end()) {
// If no dependency exits an implicit dependency still might. If not, throw an error.
std::unordered_set<uint32_t> processed_nodes;
- if (!(FindDependency(subpass, sp.index, subpass_to_node, processed_nodes) ||
- FindDependency(sp.index, subpass, subpass_to_node, processed_nodes))) {
- skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
+ if (!(FindDependency(subpass, dependent_subpasses[k], subpass_to_node, processed_nodes) ||
+ FindDependency(dependent_subpasses[k], subpass, subpass_to_node, processed_nodes))) {
+ skip |= log_msg(dev_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
kVUID_Core_DrawState_InvalidRenderpass,
- "A dependency between subpasses %d and %d must exist but one is not specified.", subpass, sp.index);
+ "A dependency between subpasses %d and %d must exist but one is not specified.", subpass,
+ dependent_subpasses[k]);
result = false;
}
}
@@ -9934,8 +9156,8 @@ bool CoreChecks::CheckDependencyExists(const uint32_t subpass, const VkImageLayo
return result;
}
-bool CoreChecks::CheckPreserved(const VkRenderPassCreateInfo2KHR *pCreateInfo, const int index, const uint32_t attachment,
- const std::vector<DAGNode> &subpass_to_node, int depth, bool &skip) const {
+bool CoreChecks::CheckPreserved(const layer_data *dev_data, const VkRenderPassCreateInfo2KHR *pCreateInfo, const int index,
+ const uint32_t attachment, const std::vector<DAGNode> &subpass_to_node, int depth, bool &skip) {
const DAGNode &node = subpass_to_node[index];
// If this node writes to the attachment return true as next nodes need to preserve the attachment.
const VkSubpassDescription2KHR &subpass = pCreateInfo->pSubpasses[index];
@@ -9951,7 +9173,7 @@ bool CoreChecks::CheckPreserved(const VkRenderPassCreateInfo2KHR *pCreateInfo, c
bool result = false;
// Loop through previous nodes and see if any of them write to the attachment.
for (auto elem : node.prev) {
- result |= CheckPreserved(pCreateInfo, elem, attachment, subpass_to_node, depth + 1, skip);
+ result |= CheckPreserved(dev_data, pCreateInfo, elem, attachment, subpass_to_node, depth + 1, skip);
}
// If the attachment was written to by a previous node than this node needs to preserve it.
if (result && depth > 0) {
@@ -9963,7 +9185,7 @@ bool CoreChecks::CheckPreserved(const VkRenderPassCreateInfo2KHR *pCreateInfo, c
}
}
if (!has_preserved) {
- skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
+ skip |= log_msg(dev_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
kVUID_Core_DrawState_InvalidRenderpass,
"Attachment %d is used by a later subpass and must be preserved in subpass %d.", attachment, index);
}
@@ -9982,28 +9204,23 @@ bool IsRegionOverlapping(VkImageSubresourceRange range1, VkImageSubresourceRange
IsRangeOverlapping(range1.baseArrayLayer, range1.layerCount, range2.baseArrayLayer, range2.layerCount));
}
-bool CoreChecks::ValidateDependencies(FRAMEBUFFER_STATE const *framebuffer, RENDER_PASS_STATE const *renderPass) const {
+bool CoreChecks::ValidateDependencies(layer_data *dev_data, FRAMEBUFFER_STATE const *framebuffer,
+ RENDER_PASS_STATE const *renderPass) {
bool skip = false;
auto const pFramebufferInfo = framebuffer->createInfo.ptr();
auto const pCreateInfo = renderPass->createInfo.ptr();
auto const &subpass_to_node = renderPass->subpassToNode;
-
- struct Attachment {
- std::vector<SubpassLayout> outputs;
- std::vector<SubpassLayout> inputs;
- std::vector<uint32_t> overlapping;
- };
-
- std::vector<Attachment> attachments(pCreateInfo->attachmentCount);
-
+ std::vector<std::vector<uint32_t>> output_attachment_to_subpass(pCreateInfo->attachmentCount);
+ std::vector<std::vector<uint32_t>> input_attachment_to_subpass(pCreateInfo->attachmentCount);
+ std::vector<std::vector<uint32_t>> overlapping_attachments(pCreateInfo->attachmentCount);
// Find overlapping attachments
for (uint32_t i = 0; i < pCreateInfo->attachmentCount; ++i) {
for (uint32_t j = i + 1; j < pCreateInfo->attachmentCount; ++j) {
VkImageView viewi = pFramebufferInfo->pAttachments[i];
VkImageView viewj = pFramebufferInfo->pAttachments[j];
if (viewi == viewj) {
- attachments[i].overlapping.emplace_back(j);
- attachments[j].overlapping.emplace_back(i);
+ overlapping_attachments[i].push_back(j);
+ overlapping_attachments[j].push_back(i);
continue;
}
auto view_state_i = GetImageViewState(viewi);
@@ -10014,8 +9231,8 @@ bool CoreChecks::ValidateDependencies(FRAMEBUFFER_STATE const *framebuffer, REND
auto view_ci_i = view_state_i->create_info;
auto view_ci_j = view_state_j->create_info;
if (view_ci_i.image == view_ci_j.image && IsRegionOverlapping(view_ci_i.subresourceRange, view_ci_j.subresourceRange)) {
- attachments[i].overlapping.emplace_back(j);
- attachments[j].overlapping.emplace_back(i);
+ overlapping_attachments[i].push_back(j);
+ overlapping_attachments[j].push_back(i);
continue;
}
auto image_data_i = GetImageState(view_ci_i.image);
@@ -10026,8 +9243,8 @@ bool CoreChecks::ValidateDependencies(FRAMEBUFFER_STATE const *framebuffer, REND
if (image_data_i->binding.mem == image_data_j->binding.mem &&
IsRangeOverlapping(image_data_i->binding.offset, image_data_i->binding.size, image_data_j->binding.offset,
image_data_j->binding.size)) {
- attachments[i].overlapping.emplace_back(j);
- attachments[j].overlapping.emplace_back(i);
+ overlapping_attachments[i].push_back(j);
+ overlapping_attachments[j].push_back(i);
}
}
}
@@ -10039,33 +9256,30 @@ bool CoreChecks::ValidateDependencies(FRAMEBUFFER_STATE const *framebuffer, REND
for (uint32_t j = 0; j < subpass.inputAttachmentCount; ++j) {
uint32_t attachment = subpass.pInputAttachments[j].attachment;
if (attachment == VK_ATTACHMENT_UNUSED) continue;
- SubpassLayout sp = {i, subpass.pInputAttachments[j].layout};
- attachments[attachment].inputs.emplace_back(sp);
- for (auto overlapping_attachment : attachments[attachment].overlapping) {
- attachments[overlapping_attachment].inputs.emplace_back(sp);
+ input_attachment_to_subpass[attachment].push_back(i);
+ for (auto overlapping_attachment : overlapping_attachments[attachment]) {
+ input_attachment_to_subpass[overlapping_attachment].push_back(i);
}
}
for (uint32_t j = 0; j < subpass.colorAttachmentCount; ++j) {
uint32_t attachment = subpass.pColorAttachments[j].attachment;
if (attachment == VK_ATTACHMENT_UNUSED) continue;
- SubpassLayout sp = {i, subpass.pColorAttachments[j].layout};
- attachments[attachment].outputs.emplace_back(sp);
- for (auto overlapping_attachment : attachments[attachment].overlapping) {
- attachments[overlapping_attachment].outputs.emplace_back(sp);
+ output_attachment_to_subpass[attachment].push_back(i);
+ for (auto overlapping_attachment : overlapping_attachments[attachment]) {
+ output_attachment_to_subpass[overlapping_attachment].push_back(i);
}
attachmentIndices.insert(attachment);
}
if (subpass.pDepthStencilAttachment && subpass.pDepthStencilAttachment->attachment != VK_ATTACHMENT_UNUSED) {
uint32_t attachment = subpass.pDepthStencilAttachment->attachment;
- SubpassLayout sp = {i, subpass.pDepthStencilAttachment->layout};
- attachments[attachment].outputs.emplace_back(sp);
- for (auto overlapping_attachment : attachments[attachment].overlapping) {
- attachments[overlapping_attachment].outputs.emplace_back(sp);
+ output_attachment_to_subpass[attachment].push_back(i);
+ for (auto overlapping_attachment : overlapping_attachments[attachment]) {
+ output_attachment_to_subpass[overlapping_attachment].push_back(i);
}
if (attachmentIndices.count(attachment)) {
skip |=
- log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
+ log_msg(dev_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
kVUID_Core_DrawState_InvalidRenderpass,
"Cannot use same attachment (%u) as both color and depth output in same subpass (%u).", attachment, i);
}
@@ -10078,21 +9292,19 @@ bool CoreChecks::ValidateDependencies(FRAMEBUFFER_STATE const *framebuffer, REND
for (uint32_t j = 0; j < subpass.inputAttachmentCount; ++j) {
uint32_t attachment = subpass.pInputAttachments[j].attachment;
if (attachment == VK_ATTACHMENT_UNUSED) continue;
- CheckDependencyExists(i, subpass.pInputAttachments[j].layout, attachments[attachment].outputs, subpass_to_node, skip);
+ CheckDependencyExists(dev_data, i, output_attachment_to_subpass[attachment], subpass_to_node, skip);
}
// If the attachment is an output then all subpasses that use the attachment must have a dependency relationship
for (uint32_t j = 0; j < subpass.colorAttachmentCount; ++j) {
uint32_t attachment = subpass.pColorAttachments[j].attachment;
if (attachment == VK_ATTACHMENT_UNUSED) continue;
- CheckDependencyExists(i, subpass.pColorAttachments[j].layout, attachments[attachment].outputs, subpass_to_node, skip);
- CheckDependencyExists(i, subpass.pColorAttachments[j].layout, attachments[attachment].inputs, subpass_to_node, skip);
+ CheckDependencyExists(dev_data, i, output_attachment_to_subpass[attachment], subpass_to_node, skip);
+ CheckDependencyExists(dev_data, i, input_attachment_to_subpass[attachment], subpass_to_node, skip);
}
if (subpass.pDepthStencilAttachment && subpass.pDepthStencilAttachment->attachment != VK_ATTACHMENT_UNUSED) {
const uint32_t &attachment = subpass.pDepthStencilAttachment->attachment;
- CheckDependencyExists(i, subpass.pDepthStencilAttachment->layout, attachments[attachment].outputs, subpass_to_node,
- skip);
- CheckDependencyExists(i, subpass.pDepthStencilAttachment->layout, attachments[attachment].inputs, subpass_to_node,
- skip);
+ CheckDependencyExists(dev_data, i, output_attachment_to_subpass[attachment], subpass_to_node, skip);
+ CheckDependencyExists(dev_data, i, input_attachment_to_subpass[attachment], subpass_to_node, skip);
}
}
// Loop through implicit dependencies, if this pass reads make sure the attachment is preserved for all passes after it was
@@ -10100,14 +9312,14 @@ bool CoreChecks::ValidateDependencies(FRAMEBUFFER_STATE const *framebuffer, REND
for (uint32_t i = 0; i < pCreateInfo->subpassCount; ++i) {
const VkSubpassDescription2KHR &subpass = pCreateInfo->pSubpasses[i];
for (uint32_t j = 0; j < subpass.inputAttachmentCount; ++j) {
- CheckPreserved(pCreateInfo, i, subpass.pInputAttachments[j].attachment, subpass_to_node, 0, skip);
+ CheckPreserved(dev_data, pCreateInfo, i, subpass.pInputAttachments[j].attachment, subpass_to_node, 0, skip);
}
}
return skip;
}
-void ValidationStateTracker::RecordRenderPassDAG(RenderPassCreateVersion rp_version, const VkRenderPassCreateInfo2KHR *pCreateInfo,
- RENDER_PASS_STATE *render_pass) {
+static void RecordRenderPassDAG(const layer_data *dev_data, RenderPassCreateVersion rp_version,
+ const VkRenderPassCreateInfo2KHR *pCreateInfo, RENDER_PASS_STATE *render_pass) {
auto &subpass_to_node = render_pass->subpassToNode;
subpass_to_node.resize(pCreateInfo->subpassCount);
auto &self_dependencies = render_pass->self_dependencies;
@@ -10130,33 +9342,92 @@ void ValidationStateTracker::RecordRenderPassDAG(RenderPassCreateVersion rp_vers
}
}
-bool CoreChecks::ValidateRenderPassDAG(RenderPassCreateVersion rp_version, const VkRenderPassCreateInfo2KHR *pCreateInfo) const {
+static bool ValidateRenderPassDAG(const layer_data *dev_data, RenderPassCreateVersion rp_version,
+ const VkRenderPassCreateInfo2KHR *pCreateInfo, RENDER_PASS_STATE *render_pass) {
+ // Shorthand...
+ auto &subpass_to_node = render_pass->subpassToNode;
+ subpass_to_node.resize(pCreateInfo->subpassCount);
+ auto &self_dependencies = render_pass->self_dependencies;
+ self_dependencies.resize(pCreateInfo->subpassCount);
+
bool skip = false;
const char *vuid;
const bool use_rp2 = (rp_version == RENDER_PASS_VERSION_2);
+ for (uint32_t i = 0; i < pCreateInfo->subpassCount; ++i) {
+ subpass_to_node[i].pass = i;
+ self_dependencies[i].clear();
+ }
for (uint32_t i = 0; i < pCreateInfo->dependencyCount; ++i) {
const VkSubpassDependency2KHR &dependency = pCreateInfo->pDependencies[i];
+ VkPipelineStageFlags exclude_graphics_pipeline_stages =
+ ~(VK_PIPELINE_STAGE_ALL_GRAPHICS_BIT |
+ ExpandPipelineStageFlags(dev_data->device_extensions, VK_PIPELINE_STAGE_ALL_GRAPHICS_BIT));
VkPipelineStageFlagBits latest_src_stage = GetLogicallyLatestGraphicsPipelineStage(dependency.srcStageMask);
VkPipelineStageFlagBits earliest_dst_stage = GetLogicallyEarliestGraphicsPipelineStage(dependency.dstStageMask);
+ // This VU is actually generalised to *any* pipeline - not just graphics - but only graphics render passes are
+ // currently supported by the spec - so only that pipeline is checked here.
+ // If that is ever relaxed, this check should be extended to cover those pipelines.
+ if (dependency.srcSubpass == dependency.dstSubpass && (dependency.srcStageMask & exclude_graphics_pipeline_stages) != 0u &&
+ (dependency.dstStageMask & exclude_graphics_pipeline_stages) != 0u) {
+ vuid = use_rp2 ? "VUID-VkSubpassDependency2KHR-srcSubpass-02244" : "VUID-VkSubpassDependency-srcSubpass-01989";
+ skip |= log_msg(
+ dev_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0, vuid,
+ "Dependency %u is a self-dependency, but specifies stage masks that contain stages not in the GRAPHICS pipeline.",
+ i);
+ } else if (dependency.srcSubpass != VK_SUBPASS_EXTERNAL && (dependency.srcStageMask & VK_PIPELINE_STAGE_HOST_BIT)) {
+ vuid = use_rp2 ? "VUID-VkSubpassDependency2KHR-srcSubpass-03078" : "VUID-VkSubpassDependency-srcSubpass-00858";
+ skip |= log_msg(dev_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0, vuid,
+ "Dependency %u specifies a dependency from subpass %u, but includes HOST_BIT in the source stage mask.",
+ i, dependency.srcSubpass);
+ } else if (dependency.dstSubpass != VK_SUBPASS_EXTERNAL && (dependency.dstStageMask & VK_PIPELINE_STAGE_HOST_BIT)) {
+ vuid = use_rp2 ? "VUID-VkSubpassDependency2KHR-dstSubpass-03079" : "VUID-VkSubpassDependency-dstSubpass-00859";
+ skip |=
+ log_msg(dev_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0, vuid,
+ "Dependency %u specifies a dependency to subpass %u, but includes HOST_BIT in the destination stage mask.",
+ i, dependency.dstSubpass);
+ }
+ // These next two VUs are actually generalised to *any* pipeline - not just graphics - but only graphics render passes are
+ // currently supported by the spec - so only that pipeline is checked here.
+ // If that is ever relaxed, these next two checks should be extended to cover those pipelines.
+ else if (dependency.srcSubpass != VK_SUBPASS_EXTERNAL &&
+ pCreateInfo->pSubpasses[dependency.srcSubpass].pipelineBindPoint == VK_PIPELINE_BIND_POINT_GRAPHICS &&
+ (dependency.srcStageMask & exclude_graphics_pipeline_stages) != 0u) {
+ vuid =
+ use_rp2 ? "VUID-VkRenderPassCreateInfo2KHR-pDependencies-03054" : "VUID-VkRenderPassCreateInfo-pDependencies-00837";
+ skip |= log_msg(dev_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0, vuid,
+ "Dependency %u specifies a source stage mask that contains stages not in the GRAPHICS pipeline as used "
+ "by the source subpass %u.",
+ i, dependency.srcSubpass);
+ } else if (dependency.dstSubpass != VK_SUBPASS_EXTERNAL &&
+ pCreateInfo->pSubpasses[dependency.dstSubpass].pipelineBindPoint == VK_PIPELINE_BIND_POINT_GRAPHICS &&
+ (dependency.dstStageMask & exclude_graphics_pipeline_stages) != 0u) {
+ vuid =
+ use_rp2 ? "VUID-VkRenderPassCreateInfo2KHR-pDependencies-03055" : "VUID-VkRenderPassCreateInfo-pDependencies-00838";
+ skip |= log_msg(dev_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0, vuid,
+ "Dependency %u specifies a destination stage mask that contains stages not in the GRAPHICS pipeline as "
+ "used by the destination subpass %u.",
+ i, dependency.dstSubpass);
+ }
// The first subpass here serves as a good proxy for "is multiview enabled" - since all view masks need to be non-zero if
// any are, which enables multiview.
- if (use_rp2 && (dependency.dependencyFlags & VK_DEPENDENCY_VIEW_LOCAL_BIT) && (pCreateInfo->pSubpasses[0].viewMask == 0)) {
+ else if (use_rp2 && (dependency.dependencyFlags & VK_DEPENDENCY_VIEW_LOCAL_BIT) &&
+ (pCreateInfo->pSubpasses[0].viewMask == 0)) {
skip |= log_msg(
- report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
+ dev_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
"VUID-VkRenderPassCreateInfo2KHR-viewMask-03059",
"Dependency %u specifies the VK_DEPENDENCY_VIEW_LOCAL_BIT, but multiview is not enabled for this render pass.", i);
} else if (use_rp2 && !(dependency.dependencyFlags & VK_DEPENDENCY_VIEW_LOCAL_BIT) && dependency.viewOffset != 0) {
- skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
+ skip |= log_msg(dev_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
"VUID-VkSubpassDependency2KHR-dependencyFlags-03092",
"Dependency %u specifies the VK_DEPENDENCY_VIEW_LOCAL_BIT, but also specifies a view offset of %u.", i,
dependency.viewOffset);
} else if (dependency.srcSubpass == VK_SUBPASS_EXTERNAL || dependency.dstSubpass == VK_SUBPASS_EXTERNAL) {
if (dependency.srcSubpass == dependency.dstSubpass) {
vuid = use_rp2 ? "VUID-VkSubpassDependency2KHR-srcSubpass-03085" : "VUID-VkSubpassDependency-srcSubpass-00865";
- skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0, vuid,
- "The src and dst subpasses in dependency %u are both external.", i);
+ skip |= log_msg(dev_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
+ vuid, "The src and dst subpasses in dependency %u are both external.", i);
} else if (dependency.dependencyFlags & VK_DEPENDENCY_VIEW_LOCAL_BIT) {
if (dependency.srcSubpass == VK_SUBPASS_EXTERNAL) {
vuid = "VUID-VkSubpassDependency-dependencyFlags-02520";
@@ -10172,46 +9443,52 @@ bool CoreChecks::ValidateRenderPassDAG(RenderPassCreateVersion rp_version, const
}
}
skip |=
- log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0, vuid,
+ log_msg(dev_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0, vuid,
"Dependency %u specifies an external dependency but also specifies VK_DEPENDENCY_VIEW_LOCAL_BIT.", i);
}
} else if (dependency.srcSubpass > dependency.dstSubpass) {
vuid = use_rp2 ? "VUID-VkSubpassDependency2KHR-srcSubpass-03084" : "VUID-VkSubpassDependency-srcSubpass-00864";
- skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0, vuid,
+ skip |= log_msg(dev_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0, vuid,
"Dependency %u specifies a dependency from a later subpass (%u) to an earlier subpass (%u), which is "
"disallowed to prevent cyclic dependencies.",
i, dependency.srcSubpass, dependency.dstSubpass);
} else if (dependency.srcSubpass == dependency.dstSubpass) {
if (dependency.viewOffset != 0) {
vuid = use_rp2 ? kVUID_Core_DrawState_InvalidRenderpass : "VUID-VkRenderPassCreateInfo-pNext-01930";
- skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0, vuid,
- "Dependency %u specifies a self-dependency but has a non-zero view offset of %u", i,
+ skip |= log_msg(dev_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
+ vuid, "Dependency %u specifies a self-dependency but has a non-zero view offset of %u", i,
dependency.viewOffset);
} else if ((dependency.dependencyFlags | VK_DEPENDENCY_VIEW_LOCAL_BIT) != dependency.dependencyFlags &&
pCreateInfo->pSubpasses[dependency.srcSubpass].viewMask > 1) {
vuid =
use_rp2 ? "VUID-VkRenderPassCreateInfo2KHR-pDependencies-03060" : "VUID-VkSubpassDependency-srcSubpass-00872";
- skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0, vuid,
- "Dependency %u specifies a self-dependency for subpass %u with a non-zero view mask, but does not "
- "specify VK_DEPENDENCY_VIEW_LOCAL_BIT.",
- i, dependency.srcSubpass);
+ skip |=
+ log_msg(dev_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0, vuid,
+ "Dependency %u specifies a self-dependency for subpass %u with a non-zero view mask, but does not "
+ "specify VK_DEPENDENCY_VIEW_LOCAL_BIT.",
+ i, dependency.srcSubpass);
} else if ((HasNonFramebufferStagePipelineStageFlags(dependency.srcStageMask) ||
HasNonFramebufferStagePipelineStageFlags(dependency.dstStageMask)) &&
(GetGraphicsPipelineStageLogicalOrdinal(latest_src_stage) >
GetGraphicsPipelineStageLogicalOrdinal(earliest_dst_stage))) {
vuid = use_rp2 ? "VUID-VkSubpassDependency2KHR-srcSubpass-03087" : "VUID-VkSubpassDependency-srcSubpass-00867";
skip |= log_msg(
- report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0, vuid,
+ dev_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0, vuid,
"Dependency %u specifies a self-dependency from logically-later stage (%s) to a logically-earlier stage (%s).",
i, string_VkPipelineStageFlagBits(latest_src_stage), string_VkPipelineStageFlagBits(earliest_dst_stage));
+ } else {
+ self_dependencies[dependency.srcSubpass].push_back(i);
}
+ } else {
+ subpass_to_node[dependency.dstSubpass].prev.push_back(dependency.srcSubpass);
+ subpass_to_node[dependency.srcSubpass].next.push_back(dependency.dstSubpass);
}
}
return skip;
}
-bool CoreChecks::ValidateAttachmentIndex(RenderPassCreateVersion rp_version, uint32_t attachment, uint32_t attachment_count,
- const char *type) const {
+bool CoreChecks::ValidateAttachmentIndex(const layer_data *dev_data, RenderPassCreateVersion rp_version, uint32_t attachment,
+ uint32_t attachment_count, const char *type) {
bool skip = false;
const bool use_rp2 = (rp_version == RENDER_PASS_VERSION_2);
const char *const function_name = use_rp2 ? "vkCreateRenderPass2KHR()" : "vkCreateRenderPass()";
@@ -10219,7 +9496,7 @@ bool CoreChecks::ValidateAttachmentIndex(RenderPassCreateVersion rp_version, uin
if (attachment >= attachment_count && attachment != VK_ATTACHMENT_UNUSED) {
const char *vuid =
use_rp2 ? "VUID-VkRenderPassCreateInfo2KHR-attachment-03051" : "VUID-VkRenderPassCreateInfo-attachment-00834";
- skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0, vuid,
+ skip |= log_msg(dev_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0, vuid,
"%s: %s attachment %d must be less than the total number of attachments %d.", type, function_name,
attachment, attachment_count);
}
@@ -10251,9 +9528,9 @@ char const *StringAttachmentType(uint8_t type) {
}
}
-bool CoreChecks::AddAttachmentUse(RenderPassCreateVersion rp_version, uint32_t subpass, std::vector<uint8_t> &attachment_uses,
- std::vector<VkImageLayout> &attachment_layouts, uint32_t attachment, uint8_t new_use,
- VkImageLayout new_layout) const {
+bool CoreChecks::AddAttachmentUse(const layer_data *dev_data, RenderPassCreateVersion rp_version, uint32_t subpass,
+ std::vector<uint8_t> &attachment_uses, std::vector<VkImageLayout> &attachment_layouts,
+ uint32_t attachment, uint8_t new_use, VkImageLayout new_layout) {
if (attachment >= attachment_uses.size()) return false; /* out of range, but already reported */
bool skip = false;
@@ -10265,7 +9542,7 @@ bool CoreChecks::AddAttachmentUse(RenderPassCreateVersion rp_version, uint32_t s
if (uses & new_use) {
if (attachment_layouts[attachment] != new_layout) {
vuid = use_rp2 ? "VUID-VkSubpassDescription2KHR-layout-02528" : "VUID-VkSubpassDescription-layout-02519";
- log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0, vuid,
+ log_msg(dev_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0, vuid,
"%s: subpass %u already uses attachment %u with a different image layout (%s vs %s).", function_name, subpass,
attachment, string_VkImageLayout(attachment_layouts[attachment]), string_VkImageLayout(new_layout));
}
@@ -10273,7 +9550,7 @@ bool CoreChecks::AddAttachmentUse(RenderPassCreateVersion rp_version, uint32_t s
/* Note: input attachments are assumed to be done first. */
vuid = use_rp2 ? "VUID-VkSubpassDescription2KHR-pPreserveAttachments-03074"
: "VUID-VkSubpassDescription-pPreserveAttachments-00854";
- skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0, vuid,
+ skip |= log_msg(dev_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0, vuid,
"%s: subpass %u uses attachment %u as both %s and %s attachment.", function_name, subpass, attachment,
StringAttachmentType(uses), StringAttachmentType(new_use));
} else {
@@ -10284,8 +9561,8 @@ bool CoreChecks::AddAttachmentUse(RenderPassCreateVersion rp_version, uint32_t s
return skip;
}
-bool CoreChecks::ValidateRenderpassAttachmentUsage(RenderPassCreateVersion rp_version,
- const VkRenderPassCreateInfo2KHR *pCreateInfo) const {
+bool CoreChecks::ValidateRenderpassAttachmentUsage(const layer_data *dev_data, RenderPassCreateVersion rp_version,
+ const VkRenderPassCreateInfo2KHR *pCreateInfo) {
bool skip = false;
const bool use_rp2 = (rp_version == RENDER_PASS_VERSION_2);
const char *vuid;
@@ -10299,30 +9576,32 @@ bool CoreChecks::ValidateRenderpassAttachmentUsage(RenderPassCreateVersion rp_ve
if (subpass.pipelineBindPoint != VK_PIPELINE_BIND_POINT_GRAPHICS) {
vuid = use_rp2 ? "VUID-VkSubpassDescription2KHR-pipelineBindPoint-03062"
: "VUID-VkSubpassDescription-pipelineBindPoint-00844";
- skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0, vuid,
+ skip |= log_msg(dev_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0, vuid,
"%s: Pipeline bind point for subpass %d must be VK_PIPELINE_BIND_POINT_GRAPHICS.", function_name, i);
}
for (uint32_t j = 0; j < subpass.inputAttachmentCount; ++j) {
auto const &attachment_ref = subpass.pInputAttachments[j];
if (attachment_ref.attachment != VK_ATTACHMENT_UNUSED) {
- skip |= ValidateAttachmentIndex(rp_version, attachment_ref.attachment, pCreateInfo->attachmentCount, "Input");
+ skip |=
+ ValidateAttachmentIndex(dev_data, rp_version, attachment_ref.attachment, pCreateInfo->attachmentCount, "Input");
if (attachment_ref.aspectMask & VK_IMAGE_ASPECT_METADATA_BIT) {
vuid =
use_rp2 ? kVUID_Core_DrawState_InvalidRenderpass : "VUID-VkInputAttachmentAspectReference-aspectMask-01964";
skip |= log_msg(
- report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0, vuid,
+ dev_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0, vuid,
"%s: Aspect mask for input attachment reference %d in subpass %d includes VK_IMAGE_ASPECT_METADATA_BIT.",
function_name, i, j);
}
if (attachment_ref.attachment < pCreateInfo->attachmentCount) {
- skip |= AddAttachmentUse(rp_version, i, attachment_uses, attachment_layouts, attachment_ref.attachment,
- ATTACHMENT_INPUT, attachment_ref.layout);
+ skip |= AddAttachmentUse(dev_data, rp_version, i, attachment_uses, attachment_layouts,
+ attachment_ref.attachment, ATTACHMENT_INPUT, attachment_ref.layout);
vuid = use_rp2 ? kVUID_Core_DrawState_InvalidRenderpass : "VUID-VkRenderPassCreateInfo-pNext-01963";
- skip |= ValidateImageAspectMask(VK_NULL_HANDLE, pCreateInfo->pAttachments[attachment_ref.attachment].format,
+ skip |= ValidateImageAspectMask(dev_data, VK_NULL_HANDLE,
+ pCreateInfo->pAttachments[attachment_ref.attachment].format,
attachment_ref.aspectMask, function_name, vuid);
}
}
@@ -10333,8 +9612,8 @@ bool CoreChecks::ValidateRenderpassAttachmentUsage(RenderPassCreateVersion rp_ve
// Check for 0
if (attachment_ref.aspectMask == 0) {
- skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
- "VUID-VkSubpassDescription2KHR-aspectMask-03176",
+ skip |= log_msg(dev_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT,
+ 0, "VUID-VkSubpassDescription2KHR-aspectMask-03176",
"%s: Input attachment (%d) aspect mask must not be 0.", function_name, j);
} else {
const VkImageAspectFlags valid_bits =
@@ -10344,10 +9623,11 @@ bool CoreChecks::ValidateRenderpassAttachmentUsage(RenderPassCreateVersion rp_ve
// Check for valid aspect mask bits
if (attachment_ref.aspectMask & ~valid_bits) {
- skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
- "VUID-VkSubpassDescription2KHR-aspectMask-03175",
- "%s: Input attachment (%d) aspect mask (0x%" PRIx32 ")is invalid.", function_name, j,
- attachment_ref.aspectMask);
+ skip |=
+ log_msg(dev_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT,
+ 0, "VUID-VkSubpassDescription2KHR-aspectMask-03175",
+ "%s: Input attachment (%d) aspect mask (0x%" PRIx32 ")is invalid.", function_name, j,
+ attachment_ref.aspectMask);
}
}
}
@@ -10357,13 +9637,13 @@ bool CoreChecks::ValidateRenderpassAttachmentUsage(RenderPassCreateVersion rp_ve
uint32_t attachment = subpass.pPreserveAttachments[j];
if (attachment == VK_ATTACHMENT_UNUSED) {
vuid = use_rp2 ? "VUID-VkSubpassDescription2KHR-attachment-03073" : "VUID-VkSubpassDescription-attachment-00853";
- skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0, vuid,
- "%s: Preserve attachment (%d) must not be VK_ATTACHMENT_UNUSED.", function_name, j);
+ skip |= log_msg(dev_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
+ vuid, "%s: Preserve attachment (%d) must not be VK_ATTACHMENT_UNUSED.", function_name, j);
} else {
- skip |= ValidateAttachmentIndex(rp_version, attachment, pCreateInfo->attachmentCount, "Preserve");
+ skip |= ValidateAttachmentIndex(dev_data, rp_version, attachment, pCreateInfo->attachmentCount, "Preserve");
if (attachment < pCreateInfo->attachmentCount) {
- skip |= AddAttachmentUse(rp_version, i, attachment_uses, attachment_layouts, attachment, ATTACHMENT_PRESERVE,
- VkImageLayout(0) /* preserve doesn't have any layout */);
+ skip |= AddAttachmentUse(dev_data, rp_version, i, attachment_uses, attachment_layouts, attachment,
+ ATTACHMENT_PRESERVE, VkImageLayout(0) /* preserve doesn't have any layout */);
}
}
}
@@ -10374,23 +9654,25 @@ bool CoreChecks::ValidateRenderpassAttachmentUsage(RenderPassCreateVersion rp_ve
if (subpass.pResolveAttachments) {
auto const &attachment_ref = subpass.pResolveAttachments[j];
if (attachment_ref.attachment != VK_ATTACHMENT_UNUSED) {
- skip |= ValidateAttachmentIndex(rp_version, attachment_ref.attachment, pCreateInfo->attachmentCount, "Resolve");
+ skip |= ValidateAttachmentIndex(dev_data, rp_version, attachment_ref.attachment, pCreateInfo->attachmentCount,
+ "Resolve");
if (attachment_ref.attachment < pCreateInfo->attachmentCount) {
- skip |= AddAttachmentUse(rp_version, i, attachment_uses, attachment_layouts, attachment_ref.attachment,
- ATTACHMENT_RESOLVE, attachment_ref.layout);
+ skip |= AddAttachmentUse(dev_data, rp_version, i, attachment_uses, attachment_layouts,
+ attachment_ref.attachment, ATTACHMENT_RESOLVE, attachment_ref.layout);
subpass_performs_resolve = true;
if (pCreateInfo->pAttachments[attachment_ref.attachment].samples != VK_SAMPLE_COUNT_1_BIT) {
vuid = use_rp2 ? "VUID-VkSubpassDescription2KHR-pResolveAttachments-03067"
: "VUID-VkSubpassDescription-pResolveAttachments-00849";
- skip |= log_msg(
- report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0, vuid,
- "%s: Subpass %u requests multisample resolve into attachment %u, which must "
- "have VK_SAMPLE_COUNT_1_BIT but has %s.",
- function_name, i, attachment_ref.attachment,
- string_VkSampleCountFlagBits(pCreateInfo->pAttachments[attachment_ref.attachment].samples));
+ skip |=
+ log_msg(dev_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0, vuid,
+ "%s: Subpass %u requests multisample resolve into attachment %u, which must "
+ "have VK_SAMPLE_COUNT_1_BIT but has %s.",
+ function_name, i, attachment_ref.attachment,
+ string_VkSampleCountFlagBits(pCreateInfo->pAttachments[attachment_ref.attachment].samples));
}
}
}
@@ -10399,10 +9681,10 @@ bool CoreChecks::ValidateRenderpassAttachmentUsage(RenderPassCreateVersion rp_ve
if (subpass.pDepthStencilAttachment) {
if (subpass.pDepthStencilAttachment->attachment != VK_ATTACHMENT_UNUSED) {
- skip |= ValidateAttachmentIndex(rp_version, subpass.pDepthStencilAttachment->attachment,
+ skip |= ValidateAttachmentIndex(dev_data, rp_version, subpass.pDepthStencilAttachment->attachment,
pCreateInfo->attachmentCount, "Depth");
if (subpass.pDepthStencilAttachment->attachment < pCreateInfo->attachmentCount) {
- skip |= AddAttachmentUse(rp_version, i, attachment_uses, attachment_layouts,
+ skip |= AddAttachmentUse(dev_data, rp_version, i, attachment_uses, attachment_layouts,
subpass.pDepthStencilAttachment->attachment, ATTACHMENT_DEPTH,
subpass.pDepthStencilAttachment->layout);
}
@@ -10412,9 +9694,9 @@ bool CoreChecks::ValidateRenderpassAttachmentUsage(RenderPassCreateVersion rp_ve
uint32_t last_sample_count_attachment = VK_ATTACHMENT_UNUSED;
for (uint32_t j = 0; j < subpass.colorAttachmentCount; ++j) {
auto const &attachment_ref = subpass.pColorAttachments[j];
- skip |= ValidateAttachmentIndex(rp_version, attachment_ref.attachment, pCreateInfo->attachmentCount, "Color");
+ skip |= ValidateAttachmentIndex(dev_data, rp_version, attachment_ref.attachment, pCreateInfo->attachmentCount, "Color");
if (attachment_ref.attachment != VK_ATTACHMENT_UNUSED && attachment_ref.attachment < pCreateInfo->attachmentCount) {
- skip |= AddAttachmentUse(rp_version, i, attachment_uses, attachment_layouts, attachment_ref.attachment,
+ skip |= AddAttachmentUse(dev_data, rp_version, i, attachment_uses, attachment_layouts, attachment_ref.attachment,
ATTACHMENT_COLOR, attachment_ref.layout);
VkSampleCountFlagBits current_sample_count = pCreateInfo->pAttachments[attachment_ref.attachment].samples;
@@ -10424,13 +9706,13 @@ bool CoreChecks::ValidateRenderpassAttachmentUsage(RenderPassCreateVersion rp_ve
if (current_sample_count != last_sample_count) {
vuid = use_rp2 ? "VUID-VkSubpassDescription2KHR-pColorAttachments-03069"
: "VUID-VkSubpassDescription-pColorAttachments-01417";
- skip |=
- log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0, vuid,
- "%s: Subpass %u attempts to render to color attachments with inconsistent sample counts."
- "Color attachment ref %u has sample count %s, whereas previous color attachment ref %u has "
- "sample count %s.",
- function_name, i, j, string_VkSampleCountFlagBits(current_sample_count),
- last_sample_count_attachment, string_VkSampleCountFlagBits(last_sample_count));
+ skip |= log_msg(dev_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0, vuid,
+ "%s: Subpass %u attempts to render to color attachments with inconsistent sample counts."
+ "Color attachment ref %u has sample count %s, whereas previous color attachment ref %u has "
+ "sample count %s.",
+ function_name, i, j, string_VkSampleCountFlagBits(current_sample_count),
+ last_sample_count_attachment, string_VkSampleCountFlagBits(last_sample_count));
}
}
last_sample_count_attachment = j;
@@ -10438,7 +9720,8 @@ bool CoreChecks::ValidateRenderpassAttachmentUsage(RenderPassCreateVersion rp_ve
if (subpass_performs_resolve && current_sample_count == VK_SAMPLE_COUNT_1_BIT) {
vuid = use_rp2 ? "VUID-VkSubpassDescription2KHR-pResolveAttachments-03066"
: "VUID-VkSubpassDescription-pResolveAttachments-00848";
- skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0, vuid,
+ skip |= log_msg(dev_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT,
+ 0, vuid,
"%s: Subpass %u requests multisample resolve from attachment %u which has "
"VK_SAMPLE_COUNT_1_BIT.",
function_name, i, attachment_ref.attachment);
@@ -10449,27 +9732,29 @@ bool CoreChecks::ValidateRenderpassAttachmentUsage(RenderPassCreateVersion rp_ve
const auto depth_stencil_sample_count =
pCreateInfo->pAttachments[subpass.pDepthStencilAttachment->attachment].samples;
- if (device_extensions.vk_amd_mixed_attachment_samples) {
+ if (dev_data->device_extensions.vk_amd_mixed_attachment_samples) {
if (pCreateInfo->pAttachments[attachment_ref.attachment].samples > depth_stencil_sample_count) {
vuid = use_rp2 ? "VUID-VkSubpassDescription2KHR-pColorAttachments-03070"
: "VUID-VkSubpassDescription-pColorAttachments-01506";
- skip |= log_msg(
- report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0, vuid,
- "%s: Subpass %u pColorAttachments[%u] has %s which is larger than "
- "depth/stencil attachment %s.",
- function_name, i, j,
- string_VkSampleCountFlagBits(pCreateInfo->pAttachments[attachment_ref.attachment].samples),
- string_VkSampleCountFlagBits(depth_stencil_sample_count));
+ skip |=
+ log_msg(dev_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0, vuid,
+ "%s: Subpass %u pColorAttachments[%u] has %s which is larger than "
+ "depth/stencil attachment %s.",
+ function_name, i, j,
+ string_VkSampleCountFlagBits(pCreateInfo->pAttachments[attachment_ref.attachment].samples),
+ string_VkSampleCountFlagBits(depth_stencil_sample_count));
break;
}
}
- if (!device_extensions.vk_amd_mixed_attachment_samples && !device_extensions.vk_nv_framebuffer_mixed_samples &&
+ if (!dev_data->device_extensions.vk_amd_mixed_attachment_samples &&
+ !dev_data->device_extensions.vk_nv_framebuffer_mixed_samples &&
current_sample_count != depth_stencil_sample_count) {
vuid = use_rp2 ? "VUID-VkSubpassDescription2KHR-pDepthStencilAttachment-03071"
: "VUID-VkSubpassDescription-pDepthStencilAttachment-01418";
skip |= log_msg(
- report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0, vuid,
+ dev_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0, vuid,
"%s: Subpass %u attempts to render to use a depth/stencil attachment with sample count that differs "
"from color attachment %u."
"The depth attachment ref has sample count %s, whereas color attachment ref %u has sample count %s.",
@@ -10485,7 +9770,8 @@ bool CoreChecks::ValidateRenderpassAttachmentUsage(RenderPassCreateVersion rp_ve
if (attachment_ref.attachment == VK_ATTACHMENT_UNUSED) {
vuid = use_rp2 ? "VUID-VkSubpassDescription2KHR-pResolveAttachments-03065"
: "VUID-VkSubpassDescription-pResolveAttachments-00847";
- skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0, vuid,
+ skip |= log_msg(dev_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT,
+ 0, vuid,
"%s: Subpass %u requests multisample resolve from attachment %u which has "
"attachment=VK_ATTACHMENT_UNUSED.",
function_name, i, attachment_ref.attachment);
@@ -10495,11 +9781,11 @@ bool CoreChecks::ValidateRenderpassAttachmentUsage(RenderPassCreateVersion rp_ve
if (color_desc.format != resolve_desc.format) {
vuid = use_rp2 ? "VUID-VkSubpassDescription2KHR-pResolveAttachments-03068"
: "VUID-VkSubpassDescription-pResolveAttachments-00850";
- skip |=
- log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0, vuid,
- "%s: Subpass %u pColorAttachments[%u] resolves to an attachment with a "
- "different format. color format: %u, resolve format: %u.",
- function_name, i, j, color_desc.format, resolve_desc.format);
+ skip |= log_msg(dev_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0, vuid,
+ "%s: Subpass %u pColorAttachments[%u] resolves to an attachment with a "
+ "different format. color format: %u, resolve format: %u.",
+ function_name, i, j, color_desc.format, resolve_desc.format);
}
}
}
@@ -10514,8 +9800,8 @@ static void MarkAttachmentFirstUse(RENDER_PASS_STATE *render_pass, uint32_t inde
if (!render_pass->attachment_first_read.count(index)) render_pass->attachment_first_read[index] = is_read;
}
-bool CoreChecks::ValidateCreateRenderPass(VkDevice device, RenderPassCreateVersion rp_version,
- const VkRenderPassCreateInfo2KHR *pCreateInfo) const {
+bool CoreChecks::ValidateCreateRenderPass(layer_data *dev_data, VkDevice device, RenderPassCreateVersion rp_version,
+ const VkRenderPassCreateInfo2KHR *pCreateInfo, RENDER_PASS_STATE *render_pass) {
bool skip = false;
const bool use_rp2 = (rp_version == RENDER_PASS_VERSION_2);
const char *vuid;
@@ -10523,9 +9809,10 @@ bool CoreChecks::ValidateCreateRenderPass(VkDevice device, RenderPassCreateVersi
// TODO: As part of wrapping up the mem_tracker/core_validation merge the following routine should be consolidated with
// ValidateLayouts.
- skip |= ValidateRenderpassAttachmentUsage(rp_version, pCreateInfo);
+ skip |= ValidateRenderpassAttachmentUsage(dev_data, rp_version, pCreateInfo);
- skip |= ValidateRenderPassDAG(rp_version, pCreateInfo);
+ render_pass->renderPass = VK_NULL_HANDLE;
+ skip |= ValidateRenderPassDAG(dev_data, rp_version, pCreateInfo, render_pass);
// Validate multiview correlation and view masks
bool viewMaskZero = false;
@@ -10542,7 +9829,7 @@ bool CoreChecks::ValidateCreateRenderPass(VkDevice device, RenderPassCreateVersi
if ((subpass.flags & VK_SUBPASS_DESCRIPTION_PER_VIEW_POSITION_X_ONLY_BIT_NVX) != 0 &&
(subpass.flags & VK_SUBPASS_DESCRIPTION_PER_VIEW_ATTRIBUTES_BIT_NVX) == 0) {
vuid = use_rp2 ? "VUID-VkSubpassDescription2KHR-flags-03076" : "VUID-VkSubpassDescription-flags-00856";
- skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0, vuid,
+ skip |= log_msg(dev_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0, vuid,
"%s: The flags parameter of subpass description %u includes "
"VK_SUBPASS_DESCRIPTION_PER_VIEW_POSITION_X_ONLY_BIT_NVX but does not also include "
"VK_SUBPASS_DESCRIPTION_PER_VIEW_ATTRIBUTES_BIT_NVX.",
@@ -10552,13 +9839,13 @@ bool CoreChecks::ValidateCreateRenderPass(VkDevice device, RenderPassCreateVersi
if (rp_version == RENDER_PASS_VERSION_2) {
if (viewMaskNonZero && viewMaskZero) {
- skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
+ skip |= log_msg(dev_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
"VUID-VkRenderPassCreateInfo2KHR-viewMask-03058",
"%s: Some view masks are non-zero whilst others are zero.", function_name);
}
if (viewMaskZero && pCreateInfo->correlatedViewMaskCount != 0) {
- skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
+ skip |= log_msg(dev_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
"VUID-VkRenderPassCreateInfo2KHR-viewMask-03057",
"%s: Multiview is not enabled but correlation masks are still provided", function_name);
}
@@ -10568,7 +9855,7 @@ bool CoreChecks::ValidateCreateRenderPass(VkDevice device, RenderPassCreateVersi
if (aggregated_cvms & pCreateInfo->pCorrelatedViewMasks[i]) {
vuid = use_rp2 ? "VUID-VkRenderPassCreateInfo2KHR-pCorrelatedViewMasks-03056"
: "VUID-VkRenderPassMultiviewCreateInfo-pCorrelationMasks-00841";
- skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0, vuid,
+ skip |= log_msg(dev_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0, vuid,
"%s: pCorrelatedViewMasks[%u] contains a previously appearing view bit.", function_name, i);
}
aggregated_cvms |= pCreateInfo->pCorrelatedViewMasks[i];
@@ -10578,57 +9865,59 @@ bool CoreChecks::ValidateCreateRenderPass(VkDevice device, RenderPassCreateVersi
auto const &dependency = pCreateInfo->pDependencies[i];
if (rp_version == RENDER_PASS_VERSION_2) {
skip |= ValidateStageMaskGsTsEnables(
- dependency.srcStageMask, function_name, "VUID-VkSubpassDependency2KHR-srcStageMask-03080",
+ dev_data, dependency.srcStageMask, function_name, "VUID-VkSubpassDependency2KHR-srcStageMask-03080",
"VUID-VkSubpassDependency2KHR-srcStageMask-03082", "VUID-VkSubpassDependency2KHR-srcStageMask-02103",
"VUID-VkSubpassDependency2KHR-srcStageMask-02104");
skip |= ValidateStageMaskGsTsEnables(
- dependency.dstStageMask, function_name, "VUID-VkSubpassDependency2KHR-dstStageMask-03081",
+ dev_data, dependency.dstStageMask, function_name, "VUID-VkSubpassDependency2KHR-dstStageMask-03081",
"VUID-VkSubpassDependency2KHR-dstStageMask-03083", "VUID-VkSubpassDependency2KHR-dstStageMask-02105",
"VUID-VkSubpassDependency2KHR-dstStageMask-02106");
} else {
skip |= ValidateStageMaskGsTsEnables(
- dependency.srcStageMask, function_name, "VUID-VkSubpassDependency-srcStageMask-00860",
+ dev_data, dependency.srcStageMask, function_name, "VUID-VkSubpassDependency-srcStageMask-00860",
"VUID-VkSubpassDependency-srcStageMask-00862", "VUID-VkSubpassDependency-srcStageMask-02099",
"VUID-VkSubpassDependency-srcStageMask-02100");
skip |= ValidateStageMaskGsTsEnables(
- dependency.dstStageMask, function_name, "VUID-VkSubpassDependency-dstStageMask-00861",
+ dev_data, dependency.dstStageMask, function_name, "VUID-VkSubpassDependency-dstStageMask-00861",
"VUID-VkSubpassDependency-dstStageMask-00863", "VUID-VkSubpassDependency-dstStageMask-02101",
"VUID-VkSubpassDependency-dstStageMask-02102");
}
- if (!ValidateAccessMaskPipelineStage(device_extensions, dependency.srcAccessMask, dependency.srcStageMask)) {
+ if (!ValidateAccessMaskPipelineStage(dev_data->device_extensions, dependency.srcAccessMask, dependency.srcStageMask)) {
vuid = use_rp2 ? "VUID-VkSubpassDependency2KHR-srcAccessMask-03088" : "VUID-VkSubpassDependency-srcAccessMask-00868";
- skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0, vuid,
+ skip |= log_msg(dev_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0, vuid,
"%s: pDependencies[%u].srcAccessMask (0x%" PRIx32 ") is not supported by srcStageMask (0x%" PRIx32 ").",
function_name, i, dependency.srcAccessMask, dependency.srcStageMask);
}
- if (!ValidateAccessMaskPipelineStage(device_extensions, dependency.dstAccessMask, dependency.dstStageMask)) {
+ if (!ValidateAccessMaskPipelineStage(dev_data->device_extensions, dependency.dstAccessMask, dependency.dstStageMask)) {
vuid = use_rp2 ? "VUID-VkSubpassDependency2KHR-dstAccessMask-03089" : "VUID-VkSubpassDependency-dstAccessMask-00869";
- skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0, vuid,
+ skip |= log_msg(dev_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0, vuid,
"%s: pDependencies[%u].dstAccessMask (0x%" PRIx32 ") is not supported by dstStageMask (0x%" PRIx32 ").",
function_name, i, dependency.dstAccessMask, dependency.dstStageMask);
}
}
if (!skip) {
- skip |= ValidateLayouts(rp_version, device, pCreateInfo);
+ skip |= ValidateLayouts(dev_data, rp_version, device, pCreateInfo);
}
return skip;
}
bool CoreChecks::PreCallValidateCreateRenderPass(VkDevice device, const VkRenderPassCreateInfo *pCreateInfo,
const VkAllocationCallbacks *pAllocator, VkRenderPass *pRenderPass) {
+ layer_data *device_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
+
bool skip = false;
// Handle extension structs from KHR_multiview and KHR_maintenance2 that can only be validated for RP1 (indices out of bounds)
const VkRenderPassMultiviewCreateInfo *pMultiviewInfo = lvl_find_in_chain<VkRenderPassMultiviewCreateInfo>(pCreateInfo->pNext);
if (pMultiviewInfo) {
if (pMultiviewInfo->subpassCount && pMultiviewInfo->subpassCount != pCreateInfo->subpassCount) {
- skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
+ skip |= log_msg(device_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
"VUID-VkRenderPassCreateInfo-pNext-01928",
"Subpass count is %u but multiview info has a subpass count of %u.", pCreateInfo->subpassCount,
pMultiviewInfo->subpassCount);
} else if (pMultiviewInfo->dependencyCount && pMultiviewInfo->dependencyCount != pCreateInfo->dependencyCount) {
- skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
+ skip |= log_msg(device_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
"VUID-VkRenderPassCreateInfo-pNext-01929",
"Dependency count is %u but multiview info has a dependency count of %u.", pCreateInfo->dependencyCount,
pMultiviewInfo->dependencyCount);
@@ -10641,13 +9930,13 @@ bool CoreChecks::PreCallValidateCreateRenderPass(VkDevice device, const VkRender
uint32_t subpass = pInputAttachmentAspectInfo->pAspectReferences[i].subpass;
uint32_t attachment = pInputAttachmentAspectInfo->pAspectReferences[i].inputAttachmentIndex;
if (subpass >= pCreateInfo->subpassCount) {
- skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
+ skip |= log_msg(device_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
"VUID-VkRenderPassCreateInfo-pNext-01926",
"Subpass index %u specified by input attachment aspect info %u is greater than the subpass "
"count of %u for this render pass.",
subpass, i, pCreateInfo->subpassCount);
} else if (pCreateInfo->pSubpasses && attachment >= pCreateInfo->pSubpasses[subpass].inputAttachmentCount) {
- skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
+ skip |= log_msg(device_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
"VUID-VkRenderPassCreateInfo-pNext-01927",
"Input attachment index %u specified by input attachment aspect info %u is greater than the "
"input attachment count of %u for this subpass.",
@@ -10655,63 +9944,22 @@ bool CoreChecks::PreCallValidateCreateRenderPass(VkDevice device, const VkRender
}
}
}
- const VkRenderPassFragmentDensityMapCreateInfoEXT *pFragmentDensityMapInfo =
- lvl_find_in_chain<VkRenderPassFragmentDensityMapCreateInfoEXT>(pCreateInfo->pNext);
- if (pFragmentDensityMapInfo) {
- if (pFragmentDensityMapInfo->fragmentDensityMapAttachment.attachment != VK_ATTACHMENT_UNUSED) {
- if (pFragmentDensityMapInfo->fragmentDensityMapAttachment.attachment >= pCreateInfo->attachmentCount) {
- skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
- "VUID-VkRenderPassFragmentDensityMapCreateInfoEXT-fragmentDensityMapAttachment-02547",
- "fragmentDensityMapAttachment %u must be less than attachmentCount %u of for this render pass.",
- pFragmentDensityMapInfo->fragmentDensityMapAttachment.attachment, pCreateInfo->attachmentCount);
- } else {
- if (!(pFragmentDensityMapInfo->fragmentDensityMapAttachment.layout ==
- VK_IMAGE_LAYOUT_FRAGMENT_DENSITY_MAP_OPTIMAL_EXT ||
- pFragmentDensityMapInfo->fragmentDensityMapAttachment.layout == VK_IMAGE_LAYOUT_GENERAL)) {
- skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
- "VUID-VkRenderPassFragmentDensityMapCreateInfoEXT-fragmentDensityMapAttachment-02549",
- "Layout of fragmentDensityMapAttachment %u' must be equal to "
- "VK_IMAGE_LAYOUT_FRAGMENT_DENSITY_MAP_OPTIMAL_EXT, or VK_IMAGE_LAYOUT_GENERAL.",
- pFragmentDensityMapInfo->fragmentDensityMapAttachment.attachment);
- }
- if (!(pCreateInfo->pAttachments[pFragmentDensityMapInfo->fragmentDensityMapAttachment.attachment].loadOp ==
- VK_ATTACHMENT_LOAD_OP_LOAD ||
- pCreateInfo->pAttachments[pFragmentDensityMapInfo->fragmentDensityMapAttachment.attachment].loadOp ==
- VK_ATTACHMENT_LOAD_OP_DONT_CARE)) {
- skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
- "VUID-VkRenderPassFragmentDensityMapCreateInfoEXT-fragmentDensityMapAttachment-02550",
- "FragmentDensityMapAttachment %u' must reference an attachment with a loadOp "
- "equal to VK_ATTACHMENT_LOAD_OP_LOAD or VK_ATTACHMENT_LOAD_OP_DONT_CARE.",
- pFragmentDensityMapInfo->fragmentDensityMapAttachment.attachment);
- }
- if (pCreateInfo->pAttachments[pFragmentDensityMapInfo->fragmentDensityMapAttachment.attachment].storeOp !=
- VK_ATTACHMENT_STORE_OP_DONT_CARE) {
- skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
- "VUID-VkRenderPassFragmentDensityMapCreateInfoEXT-fragmentDensityMapAttachment-02551",
- "FragmentDensityMapAttachment %u' must reference an attachment with a storeOp "
- "equal to VK_ATTACHMENT_STORE_OP_DONT_CARE.",
- pFragmentDensityMapInfo->fragmentDensityMapAttachment.attachment);
- }
- }
- }
- }
if (!skip) {
- safe_VkRenderPassCreateInfo2KHR create_info_2;
- ConvertVkRenderPassCreateInfoToV2KHR(pCreateInfo, &create_info_2);
- skip |= ValidateCreateRenderPass(device, RENDER_PASS_VERSION_1, create_info_2.ptr());
+ auto render_pass = std::unique_ptr<RENDER_PASS_STATE>(new RENDER_PASS_STATE(pCreateInfo));
+ skip |=
+ ValidateCreateRenderPass(device_data, device, RENDER_PASS_VERSION_1, render_pass->createInfo.ptr(), render_pass.get());
}
return skip;
}
-void ValidationStateTracker::RecordCreateRenderPassState(RenderPassCreateVersion rp_version,
- std::shared_ptr<RENDER_PASS_STATE> &render_pass,
- VkRenderPass *pRenderPass) {
+void RecordCreateRenderPassState(layer_data *device_data, RenderPassCreateVersion rp_version,
+ std::shared_ptr<RENDER_PASS_STATE> &render_pass, VkRenderPass *pRenderPass) {
render_pass->renderPass = *pRenderPass;
auto create_info = render_pass->createInfo.ptr();
- RecordRenderPassDAG(RENDER_PASS_VERSION_1, create_info, render_pass.get());
+ RecordRenderPassDAG(device_data, RENDER_PASS_VERSION_1, create_info, render_pass.get());
for (uint32_t i = 0; i < create_info->subpassCount; ++i) {
const VkSubpassDescription2KHR &subpass = create_info->pSubpasses[i];
@@ -10732,27 +9980,29 @@ void ValidationStateTracker::RecordCreateRenderPassState(RenderPassCreateVersion
}
// Even though render_pass is an rvalue-ref parameter, still must move s.t. move assignment is invoked.
- renderPassMap[*pRenderPass] = std::move(render_pass);
+ device_data->renderPassMap[*pRenderPass] = std::move(render_pass);
}
// Style note:
// Use of rvalue reference exceeds reccommended usage of rvalue refs in google style guide, but intentionally forces caller to move
// or copy. This is clearer than passing a pointer to shared_ptr and avoids the atomic increment/decrement of shared_ptr copy
// construction or assignment.
-void ValidationStateTracker::PostCallRecordCreateRenderPass(VkDevice device, const VkRenderPassCreateInfo *pCreateInfo,
- const VkAllocationCallbacks *pAllocator, VkRenderPass *pRenderPass,
- VkResult result) {
+void CoreChecks::PostCallRecordCreateRenderPass(VkDevice device, const VkRenderPassCreateInfo *pCreateInfo,
+ const VkAllocationCallbacks *pAllocator, VkRenderPass *pRenderPass,
+ VkResult result) {
+ layer_data *device_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
if (VK_SUCCESS != result) return;
auto render_pass_state = std::make_shared<RENDER_PASS_STATE>(pCreateInfo);
- RecordCreateRenderPassState(RENDER_PASS_VERSION_1, render_pass_state, pRenderPass);
+ RecordCreateRenderPassState(device_data, RENDER_PASS_VERSION_1, render_pass_state, pRenderPass);
}
-void ValidationStateTracker::PostCallRecordCreateRenderPass2KHR(VkDevice device, const VkRenderPassCreateInfo2KHR *pCreateInfo,
- const VkAllocationCallbacks *pAllocator, VkRenderPass *pRenderPass,
- VkResult result) {
+void CoreChecks::PostCallRecordCreateRenderPass2KHR(VkDevice device, const VkRenderPassCreateInfo2KHR *pCreateInfo,
+ const VkAllocationCallbacks *pAllocator, VkRenderPass *pRenderPass,
+ VkResult result) {
+ layer_data *device_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
if (VK_SUCCESS != result) return;
auto render_pass_state = std::make_shared<RENDER_PASS_STATE>(pCreateInfo);
- RecordCreateRenderPassState(RENDER_PASS_VERSION_2, render_pass_state, pRenderPass);
+ RecordCreateRenderPassState(device_data, RENDER_PASS_VERSION_2, render_pass_state, pRenderPass);
}
static bool ValidateDepthStencilResolve(const debug_report_data *report_data,
@@ -10885,29 +10135,32 @@ static bool ValidateDepthStencilResolve(const debug_report_data *report_data,
bool CoreChecks::PreCallValidateCreateRenderPass2KHR(VkDevice device, const VkRenderPassCreateInfo2KHR *pCreateInfo,
const VkAllocationCallbacks *pAllocator, VkRenderPass *pRenderPass) {
+ layer_data *device_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
bool skip = false;
- if (device_extensions.vk_khr_depth_stencil_resolve) {
- skip |= ValidateDepthStencilResolve(report_data, phys_dev_ext_props.depth_stencil_resolve_props, pCreateInfo);
+ if (GetDeviceExtensions()->vk_khr_depth_stencil_resolve) {
+ skip |= ValidateDepthStencilResolve(device_data->report_data, device_data->phys_dev_ext_props.depth_stencil_resolve_props,
+ pCreateInfo);
}
- safe_VkRenderPassCreateInfo2KHR create_info_2(pCreateInfo);
- skip |= ValidateCreateRenderPass(device, RENDER_PASS_VERSION_2, create_info_2.ptr());
+ auto render_pass = std::make_shared<RENDER_PASS_STATE>(pCreateInfo);
+ skip |= ValidateCreateRenderPass(device_data, device, RENDER_PASS_VERSION_2, render_pass->createInfo.ptr(), render_pass.get());
return skip;
}
-bool CoreChecks::ValidatePrimaryCommandBuffer(const CMD_BUFFER_STATE *pCB, char const *cmd_name, const char *error_code) const {
+bool CoreChecks::ValidatePrimaryCommandBuffer(const layer_data *dev_data, const GLOBAL_CB_NODE *pCB, char const *cmd_name,
+ const char *error_code) {
bool skip = false;
if (pCB->createInfo.level != VK_COMMAND_BUFFER_LEVEL_PRIMARY) {
- skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
+ skip |= log_msg(dev_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
HandleToUint64(pCB->commandBuffer), error_code, "Cannot execute command %s on a secondary command buffer.",
cmd_name);
}
return skip;
}
-bool CoreChecks::VerifyRenderAreaBounds(const VkRenderPassBeginInfo *pRenderPassBegin) const {
+bool CoreChecks::VerifyRenderAreaBounds(const layer_data *dev_data, const VkRenderPassBeginInfo *pRenderPassBegin) {
bool skip = false;
const safe_VkFramebufferCreateInfo *pFramebufferInfo = &GetFramebufferState(pRenderPassBegin->framebuffer)->createInfo;
if (pRenderPassBegin->renderArea.offset.x < 0 ||
@@ -10915,7 +10168,7 @@ bool CoreChecks::VerifyRenderAreaBounds(const VkRenderPassBeginInfo *pRenderPass
pRenderPassBegin->renderArea.offset.y < 0 ||
(pRenderPassBegin->renderArea.offset.y + pRenderPassBegin->renderArea.extent.height) > pFramebufferInfo->height) {
skip |= static_cast<bool>(log_msg(
- report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
+ dev_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
kVUID_Core_DrawState_InvalidRenderArea,
"Cannot execute a render pass with renderArea not within the bound of the framebuffer. RenderArea: x %d, y %d, width "
"%d, height %d. Framebuffer: width %d, height %d.",
@@ -10925,181 +10178,9 @@ bool CoreChecks::VerifyRenderAreaBounds(const VkRenderPassBeginInfo *pRenderPass
return skip;
}
-bool CoreChecks::VerifyFramebufferAndRenderPassImageViews(const VkRenderPassBeginInfo *pRenderPassBeginInfo) const {
- bool skip = false;
- const VkRenderPassAttachmentBeginInfoKHR *pRenderPassAttachmentBeginInfo =
- lvl_find_in_chain<VkRenderPassAttachmentBeginInfoKHR>(pRenderPassBeginInfo->pNext);
-
- if (pRenderPassAttachmentBeginInfo && pRenderPassAttachmentBeginInfo->attachmentCount != 0) {
- const safe_VkFramebufferCreateInfo *pFramebufferCreateInfo =
- &GetFramebufferState(pRenderPassBeginInfo->framebuffer)->createInfo;
- const VkFramebufferAttachmentsCreateInfoKHR *pFramebufferAttachmentsCreateInfo =
- lvl_find_in_chain<VkFramebufferAttachmentsCreateInfoKHR>(pFramebufferCreateInfo->pNext);
- if ((pFramebufferCreateInfo->flags & VK_FRAMEBUFFER_CREATE_IMAGELESS_BIT_KHR) == 0) {
- skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_RENDER_PASS_EXT,
- HandleToUint64(pRenderPassBeginInfo->renderPass), "VUID-VkRenderPassBeginInfo-framebuffer-03207",
- "VkRenderPassBeginInfo: Image views specified at render pass begin, but framebuffer not created with "
- "VK_FRAMEBUFFER_CREATE_IMAGELESS_BIT_KHR");
- } else if (pFramebufferAttachmentsCreateInfo) {
- if (pFramebufferAttachmentsCreateInfo->attachmentImageInfoCount != pRenderPassAttachmentBeginInfo->attachmentCount) {
- skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_RENDER_PASS_EXT,
- HandleToUint64(pRenderPassBeginInfo->renderPass), "VUID-VkRenderPassBeginInfo-framebuffer-03208",
- "VkRenderPassBeginInfo: %u image views specified at render pass begin, but framebuffer "
- "created expecting %u attachments",
- pRenderPassAttachmentBeginInfo->attachmentCount,
- pFramebufferAttachmentsCreateInfo->attachmentImageInfoCount);
- } else {
- const safe_VkRenderPassCreateInfo2KHR *pRenderPassCreateInfo =
- &GetRenderPassState(pRenderPassBeginInfo->renderPass)->createInfo;
- for (uint32_t i = 0; i < pRenderPassAttachmentBeginInfo->attachmentCount; ++i) {
- const VkImageViewCreateInfo *pImageViewCreateInfo =
- &GetImageViewState(pRenderPassAttachmentBeginInfo->pAttachments[i])->create_info;
- const VkFramebufferAttachmentImageInfoKHR *pFramebufferAttachmentImageInfo =
- &pFramebufferAttachmentsCreateInfo->pAttachmentImageInfos[i];
- const VkImageCreateInfo *pImageCreateInfo = &GetImageState(pImageViewCreateInfo->image)->createInfo;
-
- if (pFramebufferAttachmentImageInfo->flags != pImageCreateInfo->flags) {
- skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_RENDER_PASS_EXT,
- HandleToUint64(pRenderPassBeginInfo->renderPass),
- "VUID-VkRenderPassBeginInfo-framebuffer-03209",
- "VkRenderPassBeginInfo: Image view #%u created from an image with flags set as 0x%X, "
- "but image info #%u used to create the framebuffer had flags set as 0x%X",
- i, pImageCreateInfo->flags, i, pFramebufferAttachmentImageInfo->flags);
- }
-
- if (pFramebufferAttachmentImageInfo->usage != pImageCreateInfo->usage) {
- skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_RENDER_PASS_EXT,
- HandleToUint64(pRenderPassBeginInfo->renderPass),
- "VUID-VkRenderPassBeginInfo-framebuffer-03210",
- "VkRenderPassBeginInfo: Image view #%u created from an image with usage set as 0x%X, "
- "but image info #%u used to create the framebuffer had usage set as 0x%X",
- i, pImageCreateInfo->usage, i, pFramebufferAttachmentImageInfo->usage);
- }
-
- if (pFramebufferAttachmentImageInfo->width != pImageCreateInfo->extent.width) {
- skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_RENDER_PASS_EXT,
- HandleToUint64(pRenderPassBeginInfo->renderPass),
- "VUID-VkRenderPassBeginInfo-framebuffer-03211",
- "VkRenderPassBeginInfo: Image view #%u created from an image with width set as %u, "
- "but image info #%u used to create the framebuffer had width set as %u",
- i, pImageCreateInfo->extent.width, i, pFramebufferAttachmentImageInfo->width);
- }
-
- if (pFramebufferAttachmentImageInfo->height != pImageCreateInfo->extent.height) {
- skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_RENDER_PASS_EXT,
- HandleToUint64(pRenderPassBeginInfo->renderPass),
- "VUID-VkRenderPassBeginInfo-framebuffer-03212",
- "VkRenderPassBeginInfo: Image view #%u created from an image with height set as %u, "
- "but image info #%u used to create the framebuffer had height set as %u",
- i, pImageCreateInfo->extent.height, i, pFramebufferAttachmentImageInfo->height);
- }
-
- if (pFramebufferAttachmentImageInfo->layerCount != pImageViewCreateInfo->subresourceRange.layerCount) {
- skip |= log_msg(
- report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_RENDER_PASS_EXT,
- HandleToUint64(pRenderPassBeginInfo->renderPass), "VUID-VkRenderPassBeginInfo-framebuffer-03213",
- "VkRenderPassBeginInfo: Image view #%u created with a subresource range with a layerCount of %u, "
- "but image info #%u used to create the framebuffer had layerCount set as %u",
- i, pImageViewCreateInfo->subresourceRange.layerCount, i, pFramebufferAttachmentImageInfo->layerCount);
- }
-
- const VkImageFormatListCreateInfoKHR *pImageFormatListCreateInfo =
- lvl_find_in_chain<VkImageFormatListCreateInfoKHR>(pImageCreateInfo->pNext);
- if (pImageFormatListCreateInfo) {
- if (pImageFormatListCreateInfo->viewFormatCount != pFramebufferAttachmentImageInfo->viewFormatCount) {
- skip |= log_msg(
- report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_RENDER_PASS_EXT,
- HandleToUint64(pRenderPassBeginInfo->renderPass), "VUID-VkRenderPassBeginInfo-framebuffer-03214",
- "VkRenderPassBeginInfo: Image view #%u created with an image with a viewFormatCount of %u, "
- "but image info #%u used to create the framebuffer had viewFormatCount set as %u",
- i, pImageFormatListCreateInfo->viewFormatCount, i,
- pFramebufferAttachmentImageInfo->viewFormatCount);
- }
-
- for (uint32_t j = 0; j < pImageFormatListCreateInfo->viewFormatCount; ++j) {
- bool formatFound = false;
- for (uint32_t k = 0; k < pFramebufferAttachmentImageInfo->viewFormatCount; ++k) {
- if (pImageFormatListCreateInfo->pViewFormats[j] ==
- pFramebufferAttachmentImageInfo->pViewFormats[k]) {
- formatFound = true;
- }
- }
- if (!formatFound) {
- skip |=
- log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_RENDER_PASS_EXT,
- HandleToUint64(pRenderPassBeginInfo->renderPass),
- "VUID-VkRenderPassBeginInfo-framebuffer-03215",
- "VkRenderPassBeginInfo: Image view #%u created with an image including the format "
- "%s in its view format list, "
- "but image info #%u used to create the framebuffer does not include this format",
- i, string_VkFormat(pImageFormatListCreateInfo->pViewFormats[j]), i);
- }
- }
- }
-
- if (pRenderPassCreateInfo->pAttachments[i].format != pImageViewCreateInfo->format) {
- skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_RENDER_PASS_EXT,
- HandleToUint64(pRenderPassBeginInfo->renderPass),
- "VUID-VkRenderPassBeginInfo-framebuffer-03216",
- "VkRenderPassBeginInfo: Image view #%u created with a format of %s, "
- "but render pass attachment description #%u created with a format of %s",
- i, string_VkFormat(pImageViewCreateInfo->format), i,
- string_VkFormat(pRenderPassCreateInfo->pAttachments[i].format));
- }
-
- if (pRenderPassCreateInfo->pAttachments[i].samples != pImageCreateInfo->samples) {
- skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_RENDER_PASS_EXT,
- HandleToUint64(pRenderPassBeginInfo->renderPass),
- "VUID-VkRenderPassBeginInfo-framebuffer-03217",
- "VkRenderPassBeginInfo: Image view #%u created with an image with %s samples, "
- "but render pass attachment description #%u created with %s samples",
- i, string_VkSampleCountFlagBits(pImageCreateInfo->samples), i,
- string_VkSampleCountFlagBits(pRenderPassCreateInfo->pAttachments[i].samples));
- }
-
- if (pImageViewCreateInfo->subresourceRange.levelCount != 1) {
- skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_VIEW_EXT,
- HandleToUint64(pRenderPassAttachmentBeginInfo->pAttachments[i]),
- "VUID-VkRenderPassAttachmentBeginInfoKHR-pAttachments-03218",
- "VkRenderPassAttachmentBeginInfo: Image view #%u created with multiple (%u) mip levels.", i,
- pImageViewCreateInfo->subresourceRange.levelCount);
- }
-
- if (((pImageViewCreateInfo->components.r != VK_COMPONENT_SWIZZLE_IDENTITY) &&
- (pImageViewCreateInfo->components.r != VK_COMPONENT_SWIZZLE_R)) ||
- ((pImageViewCreateInfo->components.g != VK_COMPONENT_SWIZZLE_IDENTITY) &&
- (pImageViewCreateInfo->components.g != VK_COMPONENT_SWIZZLE_G)) ||
- ((pImageViewCreateInfo->components.b != VK_COMPONENT_SWIZZLE_IDENTITY) &&
- (pImageViewCreateInfo->components.b != VK_COMPONENT_SWIZZLE_B)) ||
- ((pImageViewCreateInfo->components.a != VK_COMPONENT_SWIZZLE_IDENTITY) &&
- (pImageViewCreateInfo->components.a != VK_COMPONENT_SWIZZLE_A))) {
- skip |=
- log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_VIEW_EXT,
- HandleToUint64(pRenderPassAttachmentBeginInfo->pAttachments[i]),
- "VUID-VkRenderPassAttachmentBeginInfoKHR-pAttachments-03219",
- "VkRenderPassAttachmentBeginInfo: Image view #%u created with non-identity swizzle. All "
- "framebuffer attachments must have been created with the identity swizzle. Here are the actual "
- "swizzle values:\n"
- "r swizzle = %s\n"
- "g swizzle = %s\n"
- "b swizzle = %s\n"
- "a swizzle = %s\n",
- i, string_VkComponentSwizzle(pImageViewCreateInfo->components.r),
- string_VkComponentSwizzle(pImageViewCreateInfo->components.g),
- string_VkComponentSwizzle(pImageViewCreateInfo->components.b),
- string_VkComponentSwizzle(pImageViewCreateInfo->components.a));
- }
- }
- }
- }
- }
-
- return skip;
-}
-
// If this is a stencil format, make sure the stencil[Load|Store]Op flag is checked, while if it is a depth/color attachment the
// [load|store]Op flag must be checked
-// TODO: The memory valid flag in DEVICE_MEMORY_STATE should probably be split to track the validity of stencil memory separately.
+// TODO: The memory valid flag in DEVICE_MEM_INFO should probably be split to track the validity of stencil memory separately.
template <typename T>
static bool FormatSpecificLoadAndStoreOpSettings(VkFormat format, T color_depth_op, T stencil_op, T op) {
if (color_depth_op != op && stencil_op != op) {
@@ -11111,9 +10192,9 @@ static bool FormatSpecificLoadAndStoreOpSettings(VkFormat format, T color_depth_
return ((check_color_depth_load_op && (color_depth_op == op)) || (check_stencil_load_op && (stencil_op == op)));
}
-bool CoreChecks::ValidateCmdBeginRenderPass(VkCommandBuffer commandBuffer, RenderPassCreateVersion rp_version,
- const VkRenderPassBeginInfo *pRenderPassBegin) const {
- const CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
+bool CoreChecks::ValidateCmdBeginRenderPass(layer_data *device_data, VkCommandBuffer commandBuffer,
+ RenderPassCreateVersion rp_version, const VkRenderPassBeginInfo *pRenderPassBegin) {
+ GLOBAL_CB_NODE *cb_state = GetCBNode(commandBuffer);
assert(cb_state);
auto render_pass_state = pRenderPassBegin ? GetRenderPassState(pRenderPassBegin->renderPass) : nullptr;
auto framebuffer = pRenderPassBegin ? GetFramebufferState(pRenderPassBegin->framebuffer) : nullptr;
@@ -11133,24 +10214,26 @@ bool CoreChecks::ValidateCmdBeginRenderPass(VkCommandBuffer commandBuffer, Rende
for (uint32_t i = 0; i < pSampleLocationsBeginInfo->attachmentInitialSampleLocationsCount; ++i) {
if (pSampleLocationsBeginInfo->pAttachmentInitialSampleLocations[i].attachmentIndex >=
render_pass_state->createInfo.attachmentCount) {
- skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
- "VUID-VkAttachmentSampleLocationsEXT-attachmentIndex-01531",
- "Attachment index %u specified by attachment sample locations %u is greater than the "
- "attachment count of %u for the render pass being begun.",
- pSampleLocationsBeginInfo->pAttachmentInitialSampleLocations[i].attachmentIndex, i,
- render_pass_state->createInfo.attachmentCount);
+ skip |=
+ log_msg(device_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
+ "VUID-VkAttachmentSampleLocationsEXT-attachmentIndex-01531",
+ "Attachment index %u specified by attachment sample locations %u is greater than the "
+ "attachment count of %u for the render pass being begun.",
+ pSampleLocationsBeginInfo->pAttachmentInitialSampleLocations[i].attachmentIndex, i,
+ render_pass_state->createInfo.attachmentCount);
}
}
for (uint32_t i = 0; i < pSampleLocationsBeginInfo->postSubpassSampleLocationsCount; ++i) {
if (pSampleLocationsBeginInfo->pPostSubpassSampleLocations[i].subpassIndex >=
render_pass_state->createInfo.subpassCount) {
- skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
- "VUID-VkSubpassSampleLocationsEXT-subpassIndex-01532",
- "Subpass index %u specified by subpass sample locations %u is greater than the subpass count "
- "of %u for the render pass being begun.",
- pSampleLocationsBeginInfo->pPostSubpassSampleLocations[i].subpassIndex, i,
- render_pass_state->createInfo.subpassCount);
+ skip |=
+ log_msg(device_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
+ "VUID-VkSubpassSampleLocationsEXT-subpassIndex-01532",
+ "Subpass index %u specified by subpass sample locations %u is greater than the subpass count "
+ "of %u for the render pass being begun.",
+ pSampleLocationsBeginInfo->pPostSubpassSampleLocations[i].subpassIndex, i,
+ render_pass_state->createInfo.subpassCount);
}
}
}
@@ -11164,79 +10247,59 @@ bool CoreChecks::ValidateCmdBeginRenderPass(VkCommandBuffer commandBuffer, Rende
}
if (clear_op_size > pRenderPassBegin->clearValueCount) {
- skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_RENDER_PASS_EXT,
- HandleToUint64(render_pass_state->renderPass), "VUID-VkRenderPassBeginInfo-clearValueCount-00902",
- "In %s the VkRenderPassBeginInfo struct has a clearValueCount of %u but there "
- "must be at least %u entries in pClearValues array to account for the highest index attachment in "
- "%s that uses VK_ATTACHMENT_LOAD_OP_CLEAR is %u. Note that the pClearValues array is indexed by "
- "attachment number so even if some pClearValues entries between 0 and %u correspond to attachments "
- "that aren't cleared they will be ignored.",
- function_name, pRenderPassBegin->clearValueCount, clear_op_size,
- report_data->FormatHandle(render_pass_state->renderPass).c_str(), clear_op_size, clear_op_size - 1);
- }
- skip |= VerifyFramebufferAndRenderPassImageViews(pRenderPassBegin);
- skip |= VerifyRenderAreaBounds(pRenderPassBegin);
- skip |= VerifyFramebufferAndRenderPassLayouts(rp_version, cb_state, pRenderPassBegin,
+ skip |= log_msg(
+ device_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_RENDER_PASS_EXT,
+ HandleToUint64(render_pass_state->renderPass), "VUID-VkRenderPassBeginInfo-clearValueCount-00902",
+ "In %s the VkRenderPassBeginInfo struct has a clearValueCount of %u but there "
+ "must be at least %u entries in pClearValues array to account for the highest index attachment in "
+ "renderPass %s that uses VK_ATTACHMENT_LOAD_OP_CLEAR is %u. Note that the pClearValues array is indexed by "
+ "attachment number so even if some pClearValues entries between 0 and %u correspond to attachments "
+ "that aren't cleared they will be ignored.",
+ function_name, pRenderPassBegin->clearValueCount, clear_op_size,
+ device_data->report_data->FormatHandle(render_pass_state->renderPass).c_str(), clear_op_size, clear_op_size - 1);
+ }
+ skip |= VerifyRenderAreaBounds(device_data, pRenderPassBegin);
+ skip |= VerifyFramebufferAndRenderPassLayouts(device_data, rp_version, cb_state, pRenderPassBegin,
GetFramebufferState(pRenderPassBegin->framebuffer));
if (framebuffer->rp_state->renderPass != render_pass_state->renderPass) {
- skip |= ValidateRenderPassCompatibility("render pass", render_pass_state, "framebuffer", framebuffer->rp_state.get(),
- function_name, "VUID-VkRenderPassBeginInfo-renderPass-00904");
+ skip |= ValidateRenderPassCompatibility(device_data, "render pass", render_pass_state, "framebuffer",
+ framebuffer->rp_state.get(), function_name,
+ "VUID-VkRenderPassBeginInfo-renderPass-00904");
}
vuid = use_rp2 ? "VUID-vkCmdBeginRenderPass2KHR-renderpass" : "VUID-vkCmdBeginRenderPass-renderpass";
- skip |= InsideRenderPass(cb_state, function_name, vuid);
- skip |= ValidateDependencies(framebuffer, render_pass_state);
+ skip |= InsideRenderPass(device_data, cb_state, function_name, vuid);
+ skip |= ValidateDependencies(device_data, framebuffer, render_pass_state);
vuid = use_rp2 ? "VUID-vkCmdBeginRenderPass2KHR-bufferlevel" : "VUID-vkCmdBeginRenderPass-bufferlevel";
- skip |= ValidatePrimaryCommandBuffer(cb_state, function_name, vuid);
+ skip |= ValidatePrimaryCommandBuffer(device_data, cb_state, function_name, vuid);
vuid = use_rp2 ? "VUID-vkCmdBeginRenderPass2KHR-commandBuffer-cmdpool" : "VUID-vkCmdBeginRenderPass-commandBuffer-cmdpool";
- skip |= ValidateCmdQueueFlags(cb_state, function_name, VK_QUEUE_GRAPHICS_BIT, vuid);
+ skip |= ValidateCmdQueueFlags(device_data, cb_state, function_name, VK_QUEUE_GRAPHICS_BIT, vuid);
const CMD_TYPE cmd_type = use_rp2 ? CMD_BEGINRENDERPASS2KHR : CMD_BEGINRENDERPASS;
- skip |= ValidateCmd(cb_state, cmd_type, function_name);
- }
-
- auto chained_device_group_struct = lvl_find_in_chain<VkDeviceGroupRenderPassBeginInfo>(pRenderPassBegin->pNext);
- if (chained_device_group_struct) {
- skip |= ValidateDeviceMaskToPhysicalDeviceCount(
- chained_device_group_struct->deviceMask, VK_DEBUG_REPORT_OBJECT_TYPE_RENDER_PASS_EXT,
- HandleToUint64(pRenderPassBegin->renderPass), "VUID-VkDeviceGroupRenderPassBeginInfo-deviceMask-00905");
- skip |= ValidateDeviceMaskToZero(chained_device_group_struct->deviceMask, VK_DEBUG_REPORT_OBJECT_TYPE_RENDER_PASS_EXT,
- HandleToUint64(pRenderPassBegin->renderPass),
- "VUID-VkDeviceGroupRenderPassBeginInfo-deviceMask-00906");
- skip |= ValidateDeviceMaskToCommandBuffer(
- cb_state, chained_device_group_struct->deviceMask, VK_DEBUG_REPORT_OBJECT_TYPE_RENDER_PASS_EXT,
- HandleToUint64(pRenderPassBegin->renderPass), "VUID-VkDeviceGroupRenderPassBeginInfo-deviceMask-00907");
-
- if (chained_device_group_struct->deviceRenderAreaCount != 0 &&
- chained_device_group_struct->deviceRenderAreaCount != physical_device_count) {
- skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_RENDER_PASS_EXT,
- HandleToUint64(pRenderPassBegin->renderPass),
- "VUID-VkDeviceGroupRenderPassBeginInfo-deviceRenderAreaCount-00908",
- "deviceRenderAreaCount[%" PRIu32 "] is invaild. Physical device count is %" PRIu32 ".",
- chained_device_group_struct->deviceRenderAreaCount, physical_device_count);
- }
+ skip |= ValidateCmd(device_data, cb_state, cmd_type, function_name);
}
return skip;
}
bool CoreChecks::PreCallValidateCmdBeginRenderPass(VkCommandBuffer commandBuffer, const VkRenderPassBeginInfo *pRenderPassBegin,
VkSubpassContents contents) {
- bool skip = ValidateCmdBeginRenderPass(commandBuffer, RENDER_PASS_VERSION_1, pRenderPassBegin);
+ layer_data *device_data = GetLayerDataPtr(get_dispatch_key(commandBuffer), layer_data_map);
+ bool skip = ValidateCmdBeginRenderPass(device_data, commandBuffer, RENDER_PASS_VERSION_1, pRenderPassBegin);
return skip;
}
bool CoreChecks::PreCallValidateCmdBeginRenderPass2KHR(VkCommandBuffer commandBuffer, const VkRenderPassBeginInfo *pRenderPassBegin,
const VkSubpassBeginInfoKHR *pSubpassBeginInfo) {
- bool skip = ValidateCmdBeginRenderPass(commandBuffer, RENDER_PASS_VERSION_2, pRenderPassBegin);
+ layer_data *device_data = GetLayerDataPtr(get_dispatch_key(commandBuffer), layer_data_map);
+ bool skip = ValidateCmdBeginRenderPass(device_data, commandBuffer, RENDER_PASS_VERSION_2, pRenderPassBegin);
return skip;
}
-void ValidationStateTracker::RecordCmdBeginRenderPassState(VkCommandBuffer commandBuffer,
- const VkRenderPassBeginInfo *pRenderPassBegin,
- const VkSubpassContents contents) {
- CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
+void CoreChecks::RecordCmdBeginRenderPassState(layer_data *device_data, VkCommandBuffer commandBuffer,
+ const VkRenderPassBeginInfo *pRenderPassBegin, const VkSubpassContents contents) {
+ GLOBAL_CB_NODE *cb_state = GetCBNode(commandBuffer);
auto render_pass_state = pRenderPassBegin ? GetRenderPassState(pRenderPassBegin->renderPass) : nullptr;
auto framebuffer = pRenderPassBegin ? GetFramebufferState(pRenderPassBegin->framebuffer) : nullptr;
@@ -11249,57 +10312,30 @@ void ValidationStateTracker::RecordCmdBeginRenderPassState(VkCommandBuffer comma
cb_state->activeSubpassContents = contents;
cb_state->framebuffers.insert(pRenderPassBegin->framebuffer);
// Connect this framebuffer and its children to this cmdBuffer
- AddFramebufferBinding(cb_state, framebuffer);
+ AddFramebufferBinding(device_data, cb_state, framebuffer);
// Connect this RP to cmdBuffer
AddCommandBufferBinding(&render_pass_state->cb_bindings,
- VulkanTypedHandle(render_pass_state->renderPass, kVulkanObjectTypeRenderPass), cb_state);
-
- auto chained_device_group_struct = lvl_find_in_chain<VkDeviceGroupRenderPassBeginInfo>(pRenderPassBegin->pNext);
- if (chained_device_group_struct) {
- cb_state->active_render_pass_device_mask = chained_device_group_struct->deviceMask;
- } else {
- cb_state->active_render_pass_device_mask = cb_state->initial_device_mask;
- }
- }
-}
-
-void CoreChecks::RecordCmdBeginRenderPassLayouts(VkCommandBuffer commandBuffer, const VkRenderPassBeginInfo *pRenderPassBegin,
- const VkSubpassContents contents) {
- CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
- auto render_pass_state = pRenderPassBegin ? GetRenderPassState(pRenderPassBegin->renderPass) : nullptr;
- auto framebuffer = pRenderPassBegin ? GetFramebufferState(pRenderPassBegin->framebuffer) : nullptr;
- if (render_pass_state) {
+ {HandleToUint64(render_pass_state->renderPass), kVulkanObjectTypeRenderPass}, cb_state);
// transition attachments to the correct layouts for beginning of renderPass and first subpass
- TransitionBeginRenderPassLayouts(cb_state, render_pass_state, framebuffer);
+ TransitionBeginRenderPassLayouts(device_data, cb_state, render_pass_state, framebuffer);
}
}
-void ValidationStateTracker::PreCallRecordCmdBeginRenderPass(VkCommandBuffer commandBuffer,
- const VkRenderPassBeginInfo *pRenderPassBegin,
- VkSubpassContents contents) {
- RecordCmdBeginRenderPassState(commandBuffer, pRenderPassBegin, contents);
-}
-
void CoreChecks::PreCallRecordCmdBeginRenderPass(VkCommandBuffer commandBuffer, const VkRenderPassBeginInfo *pRenderPassBegin,
VkSubpassContents contents) {
- StateTracker::PreCallRecordCmdBeginRenderPass(commandBuffer, pRenderPassBegin, contents);
- RecordCmdBeginRenderPassLayouts(commandBuffer, pRenderPassBegin, contents);
-}
-
-void ValidationStateTracker::PreCallRecordCmdBeginRenderPass2KHR(VkCommandBuffer commandBuffer,
- const VkRenderPassBeginInfo *pRenderPassBegin,
- const VkSubpassBeginInfoKHR *pSubpassBeginInfo) {
- RecordCmdBeginRenderPassState(commandBuffer, pRenderPassBegin, pSubpassBeginInfo->contents);
+ layer_data *device_data = GetLayerDataPtr(get_dispatch_key(commandBuffer), layer_data_map);
+ RecordCmdBeginRenderPassState(device_data, commandBuffer, pRenderPassBegin, contents);
}
void CoreChecks::PreCallRecordCmdBeginRenderPass2KHR(VkCommandBuffer commandBuffer, const VkRenderPassBeginInfo *pRenderPassBegin,
const VkSubpassBeginInfoKHR *pSubpassBeginInfo) {
- StateTracker::PreCallRecordCmdBeginRenderPass2KHR(commandBuffer, pRenderPassBegin, pSubpassBeginInfo);
- RecordCmdBeginRenderPassLayouts(commandBuffer, pRenderPassBegin, pSubpassBeginInfo->contents);
+ layer_data *device_data = GetLayerDataPtr(get_dispatch_key(commandBuffer), layer_data_map);
+ RecordCmdBeginRenderPassState(device_data, commandBuffer, pRenderPassBegin, pSubpassBeginInfo->contents);
}
-bool CoreChecks::ValidateCmdNextSubpass(RenderPassCreateVersion rp_version, VkCommandBuffer commandBuffer) const {
- const CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
+bool CoreChecks::ValidateCmdNextSubpass(layer_data *device_data, RenderPassCreateVersion rp_version,
+ VkCommandBuffer commandBuffer) {
+ GLOBAL_CB_NODE *cb_state = GetCBNode(commandBuffer);
assert(cb_state);
bool skip = false;
const bool use_rp2 = (rp_version == RENDER_PASS_VERSION_2);
@@ -11307,69 +10343,58 @@ bool CoreChecks::ValidateCmdNextSubpass(RenderPassCreateVersion rp_version, VkCo
const char *const function_name = use_rp2 ? "vkCmdNextSubpass2KHR()" : "vkCmdNextSubpass()";
vuid = use_rp2 ? "VUID-vkCmdNextSubpass2KHR-bufferlevel" : "VUID-vkCmdNextSubpass-bufferlevel";
- skip |= ValidatePrimaryCommandBuffer(cb_state, function_name, vuid);
+ skip |= ValidatePrimaryCommandBuffer(device_data, cb_state, function_name, vuid);
vuid = use_rp2 ? "VUID-vkCmdNextSubpass2KHR-commandBuffer-cmdpool" : "VUID-vkCmdNextSubpass-commandBuffer-cmdpool";
- skip |= ValidateCmdQueueFlags(cb_state, function_name, VK_QUEUE_GRAPHICS_BIT, vuid);
+ skip |= ValidateCmdQueueFlags(device_data, cb_state, function_name, VK_QUEUE_GRAPHICS_BIT, vuid);
const CMD_TYPE cmd_type = use_rp2 ? CMD_NEXTSUBPASS2KHR : CMD_NEXTSUBPASS;
- skip |= ValidateCmd(cb_state, cmd_type, function_name);
+ skip |= ValidateCmd(device_data, cb_state, cmd_type, function_name);
vuid = use_rp2 ? "VUID-vkCmdNextSubpass2KHR-renderpass" : "VUID-vkCmdNextSubpass-renderpass";
- skip |= OutsideRenderPass(cb_state, function_name, vuid);
+ skip |= OutsideRenderPass(device_data, cb_state, function_name, vuid);
auto subpassCount = cb_state->activeRenderPass->createInfo.subpassCount;
if (cb_state->activeSubpass == subpassCount - 1) {
vuid = use_rp2 ? "VUID-vkCmdNextSubpass2KHR-None-03102" : "VUID-vkCmdNextSubpass-None-00909";
- skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
+ skip |= log_msg(device_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
HandleToUint64(commandBuffer), vuid, "%s: Attempted to advance beyond final subpass.", function_name);
}
return skip;
}
bool CoreChecks::PreCallValidateCmdNextSubpass(VkCommandBuffer commandBuffer, VkSubpassContents contents) {
- return ValidateCmdNextSubpass(RENDER_PASS_VERSION_1, commandBuffer);
+ layer_data *device_data = GetLayerDataPtr(get_dispatch_key(commandBuffer), layer_data_map);
+ return ValidateCmdNextSubpass(device_data, RENDER_PASS_VERSION_1, commandBuffer);
}
bool CoreChecks::PreCallValidateCmdNextSubpass2KHR(VkCommandBuffer commandBuffer, const VkSubpassBeginInfoKHR *pSubpassBeginInfo,
const VkSubpassEndInfoKHR *pSubpassEndInfo) {
- return ValidateCmdNextSubpass(RENDER_PASS_VERSION_2, commandBuffer);
+ layer_data *device_data = GetLayerDataPtr(get_dispatch_key(commandBuffer), layer_data_map);
+ return ValidateCmdNextSubpass(device_data, RENDER_PASS_VERSION_2, commandBuffer);
}
-void ValidationStateTracker::RecordCmdNextSubpass(VkCommandBuffer commandBuffer, VkSubpassContents contents) {
- CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
+void CoreChecks::RecordCmdNextSubpass(layer_data *device_data, VkCommandBuffer commandBuffer, VkSubpassContents contents) {
+ GLOBAL_CB_NODE *cb_state = GetCBNode(commandBuffer);
cb_state->activeSubpass++;
cb_state->activeSubpassContents = contents;
-}
-
-void CoreChecks::RecordCmdNextSubpassLayouts(VkCommandBuffer commandBuffer, VkSubpassContents contents) {
- CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
- TransitionSubpassLayouts(cb_state, cb_state->activeRenderPass, cb_state->activeSubpass,
+ TransitionSubpassLayouts(device_data, cb_state, cb_state->activeRenderPass, cb_state->activeSubpass,
GetFramebufferState(cb_state->activeRenderPassBeginInfo.framebuffer));
}
-void ValidationStateTracker::PostCallRecordCmdNextSubpass(VkCommandBuffer commandBuffer, VkSubpassContents contents) {
- RecordCmdNextSubpass(commandBuffer, contents);
-}
-
void CoreChecks::PostCallRecordCmdNextSubpass(VkCommandBuffer commandBuffer, VkSubpassContents contents) {
- StateTracker::PostCallRecordCmdNextSubpass(commandBuffer, contents);
- RecordCmdNextSubpassLayouts(commandBuffer, contents);
-}
-
-void ValidationStateTracker::PostCallRecordCmdNextSubpass2KHR(VkCommandBuffer commandBuffer,
- const VkSubpassBeginInfoKHR *pSubpassBeginInfo,
- const VkSubpassEndInfoKHR *pSubpassEndInfo) {
- RecordCmdNextSubpass(commandBuffer, pSubpassBeginInfo->contents);
+ layer_data *device_data = GetLayerDataPtr(get_dispatch_key(commandBuffer), layer_data_map);
+ RecordCmdNextSubpass(device_data, commandBuffer, contents);
}
void CoreChecks::PostCallRecordCmdNextSubpass2KHR(VkCommandBuffer commandBuffer, const VkSubpassBeginInfoKHR *pSubpassBeginInfo,
const VkSubpassEndInfoKHR *pSubpassEndInfo) {
- StateTracker::PostCallRecordCmdNextSubpass2KHR(commandBuffer, pSubpassBeginInfo, pSubpassEndInfo);
- RecordCmdNextSubpassLayouts(commandBuffer, pSubpassBeginInfo->contents);
+ layer_data *device_data = GetLayerDataPtr(get_dispatch_key(commandBuffer), layer_data_map);
+ RecordCmdNextSubpass(device_data, commandBuffer, pSubpassBeginInfo->contents);
}
-bool CoreChecks::ValidateCmdEndRenderPass(RenderPassCreateVersion rp_version, VkCommandBuffer commandBuffer) const {
- const CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
+bool CoreChecks::ValidateCmdEndRenderPass(layer_data *device_data, RenderPassCreateVersion rp_version,
+ VkCommandBuffer commandBuffer) {
+ GLOBAL_CB_NODE *cb_state = GetCBNode(commandBuffer);
assert(cb_state);
bool skip = false;
const bool use_rp2 = (rp_version == RENDER_PASS_VERSION_2);
@@ -11380,70 +10405,58 @@ bool CoreChecks::ValidateCmdEndRenderPass(RenderPassCreateVersion rp_version, Vk
if (rp_state) {
if (cb_state->activeSubpass != rp_state->createInfo.subpassCount - 1) {
vuid = use_rp2 ? "VUID-vkCmdEndRenderPass2KHR-None-03103" : "VUID-vkCmdEndRenderPass-None-00910";
- skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
+ skip |= log_msg(device_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
HandleToUint64(commandBuffer), vuid, "%s: Called before reaching final subpass.", function_name);
}
}
vuid = use_rp2 ? "VUID-vkCmdEndRenderPass2KHR-renderpass" : "VUID-vkCmdEndRenderPass-renderpass";
- skip |= OutsideRenderPass(cb_state, function_name, vuid);
+ skip |= OutsideRenderPass(device_data, cb_state, function_name, vuid);
vuid = use_rp2 ? "VUID-vkCmdEndRenderPass2KHR-bufferlevel" : "VUID-vkCmdEndRenderPass-bufferlevel";
- skip |= ValidatePrimaryCommandBuffer(cb_state, function_name, vuid);
+ skip |= ValidatePrimaryCommandBuffer(device_data, cb_state, function_name, vuid);
vuid = use_rp2 ? "VUID-vkCmdEndRenderPass2KHR-commandBuffer-cmdpool" : "VUID-vkCmdEndRenderPass-commandBuffer-cmdpool";
- skip |= ValidateCmdQueueFlags(cb_state, function_name, VK_QUEUE_GRAPHICS_BIT, vuid);
+ skip |= ValidateCmdQueueFlags(device_data, cb_state, function_name, VK_QUEUE_GRAPHICS_BIT, vuid);
const CMD_TYPE cmd_type = use_rp2 ? CMD_ENDRENDERPASS2KHR : CMD_ENDRENDERPASS;
- skip |= ValidateCmd(cb_state, cmd_type, function_name);
+ skip |= ValidateCmd(device_data, cb_state, cmd_type, function_name);
return skip;
}
bool CoreChecks::PreCallValidateCmdEndRenderPass(VkCommandBuffer commandBuffer) {
- bool skip = ValidateCmdEndRenderPass(RENDER_PASS_VERSION_1, commandBuffer);
+ layer_data *device_data = GetLayerDataPtr(get_dispatch_key(commandBuffer), layer_data_map);
+ bool skip = ValidateCmdEndRenderPass(device_data, RENDER_PASS_VERSION_1, commandBuffer);
return skip;
}
bool CoreChecks::PreCallValidateCmdEndRenderPass2KHR(VkCommandBuffer commandBuffer, const VkSubpassEndInfoKHR *pSubpassEndInfo) {
- bool skip = ValidateCmdEndRenderPass(RENDER_PASS_VERSION_2, commandBuffer);
+ layer_data *device_data = GetLayerDataPtr(get_dispatch_key(commandBuffer), layer_data_map);
+ bool skip = ValidateCmdEndRenderPass(device_data, RENDER_PASS_VERSION_2, commandBuffer);
return skip;
}
-void ValidationStateTracker::RecordCmdEndRenderPassState(VkCommandBuffer commandBuffer) {
- CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
+void CoreChecks::RecordCmdEndRenderPassState(layer_data *device_data, VkCommandBuffer commandBuffer) {
+ GLOBAL_CB_NODE *cb_state = GetCBNode(commandBuffer);
+ FRAMEBUFFER_STATE *framebuffer = GetFramebufferState(cb_state->activeFramebuffer);
+ TransitionFinalSubpassLayouts(device_data, cb_state, &cb_state->activeRenderPassBeginInfo, framebuffer);
cb_state->activeRenderPass = nullptr;
cb_state->activeSubpass = 0;
cb_state->activeFramebuffer = VK_NULL_HANDLE;
}
-void CoreChecks::RecordCmdEndRenderPassLayouts(VkCommandBuffer commandBuffer) {
- CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
- FRAMEBUFFER_STATE *framebuffer = GetFramebufferState(cb_state->activeFramebuffer);
- TransitionFinalSubpassLayouts(cb_state, &cb_state->activeRenderPassBeginInfo, framebuffer);
-}
-
-void ValidationStateTracker::PostCallRecordCmdEndRenderPass(VkCommandBuffer commandBuffer) {
- RecordCmdEndRenderPassState(commandBuffer);
-}
-
void CoreChecks::PostCallRecordCmdEndRenderPass(VkCommandBuffer commandBuffer) {
- // Record the end at the CoreLevel to ensure StateTracker cleanup doesn't step on anything we need.
- RecordCmdEndRenderPassLayouts(commandBuffer);
- StateTracker::PostCallRecordCmdEndRenderPass(commandBuffer);
-}
-
-void ValidationStateTracker::PostCallRecordCmdEndRenderPass2KHR(VkCommandBuffer commandBuffer,
- const VkSubpassEndInfoKHR *pSubpassEndInfo) {
- RecordCmdEndRenderPassState(commandBuffer);
+ layer_data *device_data = GetLayerDataPtr(get_dispatch_key(commandBuffer), layer_data_map);
+ RecordCmdEndRenderPassState(device_data, commandBuffer);
}
void CoreChecks::PostCallRecordCmdEndRenderPass2KHR(VkCommandBuffer commandBuffer, const VkSubpassEndInfoKHR *pSubpassEndInfo) {
- StateTracker::PostCallRecordCmdEndRenderPass2KHR(commandBuffer, pSubpassEndInfo);
- RecordCmdEndRenderPassLayouts(commandBuffer);
+ layer_data *device_data = GetLayerDataPtr(get_dispatch_key(commandBuffer), layer_data_map);
+ RecordCmdEndRenderPassState(device_data, commandBuffer);
}
-bool CoreChecks::ValidateFramebuffer(VkCommandBuffer primaryBuffer, const CMD_BUFFER_STATE *pCB, VkCommandBuffer secondaryBuffer,
- const CMD_BUFFER_STATE *pSubCB, const char *caller) {
+bool CoreChecks::ValidateFramebuffer(layer_data *dev_data, VkCommandBuffer primaryBuffer, const GLOBAL_CB_NODE *pCB,
+ VkCommandBuffer secondaryBuffer, const GLOBAL_CB_NODE *pSubCB, const char *caller) {
bool skip = false;
if (!pSubCB->beginInfo.pInheritanceInfo) {
return skip;
@@ -11452,70 +10465,72 @@ bool CoreChecks::ValidateFramebuffer(VkCommandBuffer primaryBuffer, const CMD_BU
VkFramebuffer secondary_fb = pSubCB->beginInfo.pInheritanceInfo->framebuffer;
if (secondary_fb != VK_NULL_HANDLE) {
if (primary_fb != secondary_fb) {
- skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
+ skip |= log_msg(dev_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
HandleToUint64(primaryBuffer), "VUID-vkCmdExecuteCommands-pCommandBuffers-00099",
- "vkCmdExecuteCommands() called w/ invalid secondary %s which has a %s"
- " that is not the same as the primary command buffer's current active %s.",
- report_data->FormatHandle(secondaryBuffer).c_str(), report_data->FormatHandle(secondary_fb).c_str(),
- report_data->FormatHandle(primary_fb).c_str());
+ "vkCmdExecuteCommands() called w/ invalid secondary command buffer %s which has a framebuffer %s"
+ " that is not the same as the primary command buffer's current active framebuffer %s.",
+ dev_data->report_data->FormatHandle(secondaryBuffer).c_str(),
+ dev_data->report_data->FormatHandle(secondary_fb).c_str(),
+ dev_data->report_data->FormatHandle(primary_fb).c_str());
}
auto fb = GetFramebufferState(secondary_fb);
if (!fb) {
- skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
+ skip |= log_msg(dev_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
HandleToUint64(primaryBuffer), kVUID_Core_DrawState_InvalidSecondaryCommandBuffer,
- "vkCmdExecuteCommands() called w/ invalid %s which has invalid %s.",
- report_data->FormatHandle(secondaryBuffer).c_str(), report_data->FormatHandle(secondary_fb).c_str());
+ "vkCmdExecuteCommands() called w/ invalid Cmd Buffer %s which has invalid framebuffer %s.",
+ dev_data->report_data->FormatHandle(secondaryBuffer).c_str(),
+ dev_data->report_data->FormatHandle(secondary_fb).c_str());
return skip;
}
}
return skip;
}
-bool CoreChecks::ValidateSecondaryCommandBufferState(const CMD_BUFFER_STATE *pCB, const CMD_BUFFER_STATE *pSubCB) {
+bool CoreChecks::ValidateSecondaryCommandBufferState(layer_data *dev_data, GLOBAL_CB_NODE *pCB, GLOBAL_CB_NODE *pSubCB) {
bool skip = false;
unordered_set<int> activeTypes;
- if (!disabled.query_validation) {
- for (auto queryObject : pCB->activeQueries) {
- auto query_pool_state = GetQueryPoolState(queryObject.pool);
- if (query_pool_state) {
- if (query_pool_state->createInfo.queryType == VK_QUERY_TYPE_PIPELINE_STATISTICS &&
- pSubCB->beginInfo.pInheritanceInfo) {
- VkQueryPipelineStatisticFlags cmdBufStatistics = pSubCB->beginInfo.pInheritanceInfo->pipelineStatistics;
- if ((cmdBufStatistics & query_pool_state->createInfo.pipelineStatistics) != cmdBufStatistics) {
- skip |= log_msg(
- report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
- HandleToUint64(pCB->commandBuffer), "VUID-vkCmdExecuteCommands-commandBuffer-00104",
- "vkCmdExecuteCommands() called w/ invalid %s which has invalid active %s"
- ". Pipeline statistics is being queried so the command buffer must have all bits set on the queryPool.",
- report_data->FormatHandle(pCB->commandBuffer).c_str(),
- report_data->FormatHandle(queryObject.pool).c_str());
- }
+ for (auto queryObject : pCB->activeQueries) {
+ auto queryPoolData = dev_data->queryPoolMap.find(queryObject.pool);
+ if (queryPoolData != dev_data->queryPoolMap.end()) {
+ if (queryPoolData->second.createInfo.queryType == VK_QUERY_TYPE_PIPELINE_STATISTICS &&
+ pSubCB->beginInfo.pInheritanceInfo) {
+ VkQueryPipelineStatisticFlags cmdBufStatistics = pSubCB->beginInfo.pInheritanceInfo->pipelineStatistics;
+ if ((cmdBufStatistics & queryPoolData->second.createInfo.pipelineStatistics) != cmdBufStatistics) {
+ skip |= log_msg(
+ dev_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
+ HandleToUint64(pCB->commandBuffer), "VUID-vkCmdExecuteCommands-commandBuffer-00104",
+ "vkCmdExecuteCommands() called w/ invalid Cmd Buffer %s which has invalid active query pool %s"
+ ". Pipeline statistics is being queried so the command buffer must have all bits set on the queryPool.",
+ dev_data->report_data->FormatHandle(pCB->commandBuffer).c_str(),
+ dev_data->report_data->FormatHandle(queryPoolData->first).c_str());
}
- activeTypes.insert(query_pool_state->createInfo.queryType);
}
+ activeTypes.insert(queryPoolData->second.createInfo.queryType);
}
- for (auto queryObject : pSubCB->startedQueries) {
- auto query_pool_state = GetQueryPoolState(queryObject.pool);
- if (query_pool_state && activeTypes.count(query_pool_state->createInfo.queryType)) {
- skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
- HandleToUint64(pCB->commandBuffer), kVUID_Core_DrawState_InvalidSecondaryCommandBuffer,
- "vkCmdExecuteCommands() called w/ invalid %s which has invalid active %s"
- " of type %d but a query of that type has been started on secondary %s.",
- report_data->FormatHandle(pCB->commandBuffer).c_str(),
- report_data->FormatHandle(queryObject.pool).c_str(), query_pool_state->createInfo.queryType,
- report_data->FormatHandle(pSubCB->commandBuffer).c_str());
- }
+ }
+ for (auto queryObject : pSubCB->startedQueries) {
+ auto queryPoolData = dev_data->queryPoolMap.find(queryObject.pool);
+ if (queryPoolData != dev_data->queryPoolMap.end() && activeTypes.count(queryPoolData->second.createInfo.queryType)) {
+ skip |= log_msg(dev_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
+ HandleToUint64(pCB->commandBuffer), kVUID_Core_DrawState_InvalidSecondaryCommandBuffer,
+ "vkCmdExecuteCommands() called w/ invalid Cmd Buffer %s which has invalid active query pool %s"
+ " of type %d but a query of that type has been started on secondary Cmd Buffer %s.",
+ dev_data->report_data->FormatHandle(pCB->commandBuffer).c_str(),
+ dev_data->report_data->FormatHandle(queryPoolData->first).c_str(),
+ queryPoolData->second.createInfo.queryType,
+ dev_data->report_data->FormatHandle(pSubCB->commandBuffer).c_str());
}
}
- auto primary_pool = GetCommandPoolState(pCB->createInfo.commandPool);
- auto secondary_pool = GetCommandPoolState(pSubCB->createInfo.commandPool);
+
+ auto primary_pool = GetCommandPoolNode(pCB->createInfo.commandPool);
+ auto secondary_pool = GetCommandPoolNode(pSubCB->createInfo.commandPool);
if (primary_pool && secondary_pool && (primary_pool->queueFamilyIndex != secondary_pool->queueFamilyIndex)) {
- skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
+ skip |= log_msg(dev_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
HandleToUint64(pSubCB->commandBuffer), kVUID_Core_DrawState_InvalidQueueFamily,
- "vkCmdExecuteCommands(): Primary %s created in queue family %d has secondary "
+ "vkCmdExecuteCommands(): Primary command buffer %s created in queue family %d has secondary command buffer "
"%s created in queue family %d.",
- report_data->FormatHandle(pCB->commandBuffer).c_str(), primary_pool->queueFamilyIndex,
- report_data->FormatHandle(pSubCB->commandBuffer).c_str(), secondary_pool->queueFamilyIndex);
+ dev_data->report_data->FormatHandle(pCB->commandBuffer).c_str(), primary_pool->queueFamilyIndex,
+ dev_data->report_data->FormatHandle(pSubCB->commandBuffer).c_str(), secondary_pool->queueFamilyIndex);
}
return skip;
@@ -11523,54 +10538,44 @@ bool CoreChecks::ValidateSecondaryCommandBufferState(const CMD_BUFFER_STATE *pCB
bool CoreChecks::PreCallValidateCmdExecuteCommands(VkCommandBuffer commandBuffer, uint32_t commandBuffersCount,
const VkCommandBuffer *pCommandBuffers) {
- const CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
+ layer_data *device_data = GetLayerDataPtr(get_dispatch_key(commandBuffer), layer_data_map);
+ GLOBAL_CB_NODE *cb_state = GetCBNode(commandBuffer);
assert(cb_state);
bool skip = false;
- const CMD_BUFFER_STATE *sub_cb_state = NULL;
- std::unordered_set<const CMD_BUFFER_STATE *> linked_command_buffers;
+ GLOBAL_CB_NODE *sub_cb_state = NULL;
+ std::unordered_set<GLOBAL_CB_NODE *> linked_command_buffers = cb_state->linkedCommandBuffers;
for (uint32_t i = 0; i < commandBuffersCount; i++) {
- sub_cb_state = GetCBState(pCommandBuffers[i]);
+ sub_cb_state = GetCBNode(pCommandBuffers[i]);
assert(sub_cb_state);
if (VK_COMMAND_BUFFER_LEVEL_PRIMARY == sub_cb_state->createInfo.level) {
- skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
+ skip |= log_msg(device_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
HandleToUint64(pCommandBuffers[i]), "VUID-vkCmdExecuteCommands-pCommandBuffers-00088",
- "vkCmdExecuteCommands() called w/ Primary %s in element %u of pCommandBuffers array. All "
+ "vkCmdExecuteCommands() called w/ Primary Cmd Buffer %s in element %u of pCommandBuffers array. All "
"cmd buffers in pCommandBuffers array must be secondary.",
- report_data->FormatHandle(pCommandBuffers[i]).c_str(), i);
- } else if (VK_COMMAND_BUFFER_LEVEL_SECONDARY == sub_cb_state->createInfo.level) {
+ device_data->report_data->FormatHandle(pCommandBuffers[i]).c_str(), i);
+ } else if (cb_state->activeRenderPass) { // Secondary CB w/i RenderPass must have *CONTINUE_BIT set
if (sub_cb_state->beginInfo.pInheritanceInfo != nullptr) {
- const auto secondary_rp_state = GetRenderPassState(sub_cb_state->beginInfo.pInheritanceInfo->renderPass);
- if (cb_state->activeRenderPass &&
- !(sub_cb_state->beginInfo.flags & VK_COMMAND_BUFFER_USAGE_RENDER_PASS_CONTINUE_BIT)) {
- skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
- HandleToUint64(pCommandBuffers[i]), "VUID-vkCmdExecuteCommands-pCommandBuffers-00096",
- "vkCmdExecuteCommands(): Secondary %s is executed within a %s "
- "instance scope, but the Secondary Command Buffer does not have the "
- "VK_COMMAND_BUFFER_USAGE_RENDER_PASS_CONTINUE_BIT set in VkCommandBufferBeginInfo::flags when "
- "the vkBeginCommandBuffer() was called.",
- report_data->FormatHandle(pCommandBuffers[i]).c_str(),
- report_data->FormatHandle(cb_state->activeRenderPass->renderPass).c_str());
- } else if (!cb_state->activeRenderPass &&
- (sub_cb_state->beginInfo.flags & VK_COMMAND_BUFFER_USAGE_RENDER_PASS_CONTINUE_BIT)) {
- skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
- HandleToUint64(pCommandBuffers[i]), "VUID-vkCmdExecuteCommands-pCommandBuffers-00100",
- "vkCmdExecuteCommands(): Secondary %s is executed outside a render pass "
- "instance scope, but the Secondary Command Buffer does have the "
- "VK_COMMAND_BUFFER_USAGE_RENDER_PASS_CONTINUE_BIT set in VkCommandBufferBeginInfo::flags when "
- "the vkBeginCommandBuffer() was called.",
- report_data->FormatHandle(pCommandBuffers[i]).c_str());
- } else if (cb_state->activeRenderPass &&
- (sub_cb_state->beginInfo.flags & VK_COMMAND_BUFFER_USAGE_RENDER_PASS_CONTINUE_BIT)) {
+ auto secondary_rp_state = GetRenderPassState(sub_cb_state->beginInfo.pInheritanceInfo->renderPass);
+ if (!(sub_cb_state->beginInfo.flags & VK_COMMAND_BUFFER_USAGE_RENDER_PASS_CONTINUE_BIT)) {
+ skip |= log_msg(device_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, HandleToUint64(pCommandBuffers[i]),
+ "VUID-vkCmdExecuteCommands-pCommandBuffers-00096",
+ "vkCmdExecuteCommands(): Secondary Command Buffer (%s) executed within render pass (%s) must "
+ "have had vkBeginCommandBuffer() called w/ "
+ "VK_COMMAND_BUFFER_USAGE_RENDER_PASS_CONTINUE_BIT set.",
+ device_data->report_data->FormatHandle(pCommandBuffers[i]).c_str(),
+ device_data->report_data->FormatHandle(cb_state->activeRenderPass->renderPass).c_str());
+ } else {
// Make sure render pass is compatible with parent command buffer pass if has continue
if (cb_state->activeRenderPass->renderPass != secondary_rp_state->renderPass) {
skip |= ValidateRenderPassCompatibility(
- "primary command buffer", cb_state->activeRenderPass, "secondary command buffer", secondary_rp_state,
- "vkCmdExecuteCommands()", "VUID-vkCmdExecuteCommands-pInheritanceInfo-00098");
+ device_data, "primary command buffer", cb_state->activeRenderPass, "secondary command buffer",
+ secondary_rp_state, "vkCmdExecuteCommands()", "VUID-vkCmdExecuteCommands-pInheritanceInfo-00098");
}
// If framebuffer for secondary CB is not NULL, then it must match active FB from primaryCB
- skip |=
- ValidateFramebuffer(commandBuffer, cb_state, pCommandBuffers[i], sub_cb_state, "vkCmdExecuteCommands()");
+ skip |= ValidateFramebuffer(device_data, commandBuffer, cb_state, pCommandBuffers[i], sub_cb_state,
+ "vkCmdExecuteCommands()");
if (!sub_cb_state->cmd_execute_commands_functions.empty()) {
// Inherit primary's activeFramebuffer and while running validate functions
for (auto &function : sub_cb_state->cmd_execute_commands_functions) {
@@ -11580,112 +10585,98 @@ bool CoreChecks::PreCallValidateCmdExecuteCommands(VkCommandBuffer commandBuffer
}
}
}
-
// TODO(mlentine): Move more logic into this method
- skip |= ValidateSecondaryCommandBufferState(cb_state, sub_cb_state);
- skip |= ValidateCommandBufferState(sub_cb_state, "vkCmdExecuteCommands()", 0,
+ skip |= ValidateSecondaryCommandBufferState(device_data, cb_state, sub_cb_state);
+ skip |= ValidateCommandBufferState(device_data, sub_cb_state, "vkCmdExecuteCommands()", 0,
"VUID-vkCmdExecuteCommands-pCommandBuffers-00089");
if (!(sub_cb_state->beginInfo.flags & VK_COMMAND_BUFFER_USAGE_SIMULTANEOUS_USE_BIT)) {
- if (sub_cb_state->in_use.load()) {
- // TODO: Find some way to differentiate between the -00090 and -00091 conditions
- skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
- HandleToUint64(cb_state->commandBuffer), "VUID-vkCmdExecuteCommands-pCommandBuffers-00090",
- "Cannot execute pending %s without VK_COMMAND_BUFFER_USAGE_SIMULTANEOUS_USE_BIT set.",
- report_data->FormatHandle(sub_cb_state->commandBuffer).c_str());
- }
- // We use an const_cast, because one cannot query a container keyed on a non-const pointer using a const pointer
- if (cb_state->linkedCommandBuffers.count(const_cast<CMD_BUFFER_STATE *>(sub_cb_state))) {
+ if (sub_cb_state->in_use.load() || linked_command_buffers.count(sub_cb_state)) {
skip |= log_msg(
- report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
- HandleToUint64(cb_state->commandBuffer), "VUID-vkCmdExecuteCommands-pCommandBuffers-00092",
- "Cannot execute %s without VK_COMMAND_BUFFER_USAGE_SIMULTANEOUS_USE_BIT set if previously executed in %s",
- report_data->FormatHandle(sub_cb_state->commandBuffer).c_str(),
- report_data->FormatHandle(cb_state->commandBuffer).c_str());
+ device_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
+ HandleToUint64(cb_state->commandBuffer), "VUID-vkCmdExecuteCommands-pCommandBuffers-00090",
+ "Attempt to simultaneously execute command buffer %s without VK_COMMAND_BUFFER_USAGE_SIMULTANEOUS_USE_BIT set!",
+ device_data->report_data->FormatHandle(cb_state->commandBuffer).c_str());
}
-
- const auto insert_pair = linked_command_buffers.insert(sub_cb_state);
- if (!insert_pair.second) {
- skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
- HandleToUint64(cb_state->commandBuffer), "VUID-vkCmdExecuteCommands-pCommandBuffers-00093",
- "Cannot duplicate %s in pCommandBuffers without VK_COMMAND_BUFFER_USAGE_SIMULTANEOUS_USE_BIT set.",
- report_data->FormatHandle(cb_state->commandBuffer).c_str());
- }
-
if (cb_state->beginInfo.flags & VK_COMMAND_BUFFER_USAGE_SIMULTANEOUS_USE_BIT) {
// Warn that non-simultaneous secondary cmd buffer renders primary non-simultaneous
- skip |= log_msg(report_data, VK_DEBUG_REPORT_WARNING_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
- HandleToUint64(pCommandBuffers[i]), kVUID_Core_DrawState_InvalidCommandBufferSimultaneousUse,
- "vkCmdExecuteCommands(): Secondary %s does not have "
+ skip |= log_msg(device_data->report_data, VK_DEBUG_REPORT_WARNING_BIT_EXT,
+ VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, HandleToUint64(pCommandBuffers[i]),
+ kVUID_Core_DrawState_InvalidCommandBufferSimultaneousUse,
+ "vkCmdExecuteCommands(): Secondary Command Buffer (%s) does not have "
"VK_COMMAND_BUFFER_USAGE_SIMULTANEOUS_USE_BIT set and will cause primary "
- "%s to be treated as if it does not have "
+ "command buffer (%s) to be treated as if it does not have "
"VK_COMMAND_BUFFER_USAGE_SIMULTANEOUS_USE_BIT set, even though it does.",
- report_data->FormatHandle(pCommandBuffers[i]).c_str(),
- report_data->FormatHandle(cb_state->commandBuffer).c_str());
+ device_data->report_data->FormatHandle(pCommandBuffers[i]).c_str(),
+ device_data->report_data->FormatHandle(cb_state->commandBuffer).c_str());
}
}
- if (!cb_state->activeQueries.empty() && !enabled_features.core.inheritedQueries) {
- skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
+ if (!cb_state->activeQueries.empty() && !device_data->enabled_features.core.inheritedQueries) {
+ skip |= log_msg(device_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
HandleToUint64(pCommandBuffers[i]), "VUID-vkCmdExecuteCommands-commandBuffer-00101",
- "vkCmdExecuteCommands(): Secondary %s cannot be submitted with a query in flight and "
+ "vkCmdExecuteCommands(): Secondary Command Buffer (%s) cannot be submitted with a query in flight and "
"inherited queries not supported on this device.",
- report_data->FormatHandle(pCommandBuffers[i]).c_str());
+ device_data->report_data->FormatHandle(pCommandBuffers[i]).c_str());
}
- // Validate initial layout uses vs. the primary cmd buffer state
+ // Propagate layout transitions to the primary cmd buffer
// Novel Valid usage: "UNASSIGNED-vkCmdExecuteCommands-commandBuffer-00001"
// initial layout usage of secondary command buffers resources must match parent command buffer
- const auto *const_cb_state = static_cast<const CMD_BUFFER_STATE *>(cb_state);
- for (const auto &sub_layout_map_entry : sub_cb_state->image_layout_map) {
- const auto image = sub_layout_map_entry.first;
- const auto *image_state = GetImageState(image);
- if (!image_state) continue; // Can't set layouts of a dead image
-
- const auto *cb_subres_map = GetImageSubresourceLayoutMap(const_cb_state, image);
- // Const getter can be null in which case we have nothing to check against for this image...
- if (!cb_subres_map) continue;
-
- const auto &sub_cb_subres_map = sub_layout_map_entry.second;
- // Validate the initial_uses, that they match the current state of the primary cb, or absent a current state,
- // that the match any initial_layout.
- for (auto it_init = sub_cb_subres_map->BeginInitialUse(); !it_init.AtEnd(); ++it_init) {
- const auto &sub_layout = (*it_init).layout;
- if (VK_IMAGE_LAYOUT_UNDEFINED == sub_layout) continue; // secondary doesn't care about current or initial
- const auto &subresource = (*it_init).subresource;
- // Look up the current layout (if any)
- VkImageLayout cb_layout = cb_subres_map->GetSubresourceLayout(subresource);
- const char *layout_type = "current";
- if (cb_layout == kInvalidLayout) {
- // Find initial layout (if any)
- cb_layout = cb_subres_map->GetSubresourceInitialLayout(subresource);
- layout_type = "initial";
- }
- if ((cb_layout != kInvalidLayout) && (cb_layout != sub_layout)) {
- log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
+ for (const auto &ilm_entry : sub_cb_state->imageLayoutMap) {
+ auto cb_entry = cb_state->imageLayoutMap.find(ilm_entry.first);
+ if (cb_entry != cb_state->imageLayoutMap.end()) {
+ // For exact matches ImageSubresourcePair matches, validate and update the parent entry
+ if ((VK_IMAGE_LAYOUT_UNDEFINED != ilm_entry.second.initialLayout) &&
+ (cb_entry->second.layout != ilm_entry.second.initialLayout)) {
+ const VkImageSubresource &subresource = ilm_entry.first.subresource;
+ log_msg(device_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
HandleToUint64(pCommandBuffers[i]), "UNASSIGNED-vkCmdExecuteCommands-commandBuffer-00001",
- "%s: Executed secondary command buffer using %s (subresource: aspectMask 0x%X array layer %u, "
- "mip level %u) which expects layout %s--instead, image %s layout is %s.",
- "vkCmdExecuteCommands():", report_data->FormatHandle(image).c_str(), subresource.aspectMask,
- subresource.arrayLayer, subresource.mipLevel, string_VkImageLayout(sub_layout), layout_type,
- string_VkImageLayout(cb_layout));
+ "%s: Executed secondary command buffer using image %s (subresource: aspectMask 0x%X array layer %u, "
+ "mip level %u) which expects layout %s--instead, image %s's current layout is %s.",
+ "vkCmdExecuteCommands():", device_data->report_data->FormatHandle(ilm_entry.first.image).c_str(),
+ subresource.aspectMask, subresource.arrayLayer, subresource.mipLevel,
+ string_VkImageLayout(ilm_entry.second.initialLayout),
+ device_data->report_data->FormatHandle(ilm_entry.first.image).c_str(),
+ string_VkImageLayout(cb_entry->second.layout));
+ }
+ } else {
+ // Look for partial matches (in aspectMask), and update or create parent map entry in SetLayout
+ assert(ilm_entry.first.hasSubresource);
+ IMAGE_CMD_BUF_LAYOUT_NODE node;
+ if (FindCmdBufLayout(device_data, cb_state, ilm_entry.first.image, ilm_entry.first.subresource, node)) {
+ if ((VK_IMAGE_LAYOUT_UNDEFINED != ilm_entry.second.initialLayout) &&
+ (node.layout != ilm_entry.second.initialLayout)) {
+ const VkImageSubresource &subresource = ilm_entry.first.subresource;
+ log_msg(device_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, HandleToUint64(pCommandBuffers[i]),
+ "UNASSIGNED-vkCmdExecuteCommands-commandBuffer-00001",
+ "%s: Executed secondary command buffer using image %s (subresource: aspectMask 0x%X array layer "
+ "%u, mip level %u) which expects layout %s--instead, image %s's current layout is %s.",
+ "vkCmdExecuteCommands():", device_data->report_data->FormatHandle(ilm_entry.first.image).c_str(),
+ subresource.aspectMask, subresource.arrayLayer, subresource.mipLevel,
+ string_VkImageLayout(ilm_entry.second.initialLayout),
+ device_data->report_data->FormatHandle(ilm_entry.first.image).c_str(),
+ string_VkImageLayout(node.layout));
+ }
}
}
}
+ linked_command_buffers.insert(sub_cb_state);
}
-
- skip |= ValidatePrimaryCommandBuffer(cb_state, "vkCmdExecuteCommands()", "VUID-vkCmdExecuteCommands-bufferlevel");
- skip |= ValidateCmdQueueFlags(cb_state, "vkCmdExecuteCommands()",
+ skip |= ValidatePrimaryCommandBuffer(device_data, cb_state, "vkCmdExecuteCommands()", "VUID-vkCmdExecuteCommands-bufferlevel");
+ skip |= ValidateCmdQueueFlags(device_data, cb_state, "vkCmdExecuteCommands()",
VK_QUEUE_TRANSFER_BIT | VK_QUEUE_GRAPHICS_BIT | VK_QUEUE_COMPUTE_BIT,
"VUID-vkCmdExecuteCommands-commandBuffer-cmdpool");
- skip |= ValidateCmd(cb_state, CMD_EXECUTECOMMANDS, "vkCmdExecuteCommands()");
+ skip |= ValidateCmd(device_data, cb_state, CMD_EXECUTECOMMANDS, "vkCmdExecuteCommands()");
return skip;
}
-void ValidationStateTracker::PreCallRecordCmdExecuteCommands(VkCommandBuffer commandBuffer, uint32_t commandBuffersCount,
- const VkCommandBuffer *pCommandBuffers) {
- CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
+void CoreChecks::PreCallRecordCmdExecuteCommands(VkCommandBuffer commandBuffer, uint32_t commandBuffersCount,
+ const VkCommandBuffer *pCommandBuffers) {
+ layer_data *device_data = GetLayerDataPtr(get_dispatch_key(commandBuffer), layer_data_map);
+ GLOBAL_CB_NODE *cb_state = GetCBNode(commandBuffer);
- CMD_BUFFER_STATE *sub_cb_state = NULL;
+ GLOBAL_CB_NODE *sub_cb_state = NULL;
for (uint32_t i = 0; i < commandBuffersCount; i++) {
- sub_cb_state = GetCBState(pCommandBuffers[i]);
+ sub_cb_state = GetCBNode(pCommandBuffers[i]);
assert(sub_cb_state);
if (!(sub_cb_state->beginInfo.flags & VK_COMMAND_BUFFER_USAGE_SIMULTANEOUS_USE_BIT)) {
if (cb_state->beginInfo.flags & VK_COMMAND_BUFFER_USAGE_SIMULTANEOUS_USE_BIT) {
@@ -11694,22 +10685,25 @@ void ValidationStateTracker::PreCallRecordCmdExecuteCommands(VkCommandBuffer com
cb_state->beginInfo.flags &= ~VK_COMMAND_BUFFER_USAGE_SIMULTANEOUS_USE_BIT;
}
}
-
- // Propagate inital layout and current layout state to the primary cmd buffer
- // NOTE: The update/population of the image_layout_map is done in CoreChecks, but for other classes derived from
- // ValidationStateTracker these maps will be empty, so leaving the propagation in the the state tracker should be a no-op
- // for those other classes.
- for (const auto &sub_layout_map_entry : sub_cb_state->image_layout_map) {
- const auto image = sub_layout_map_entry.first;
- const auto *image_state = GetImageState(image);
- if (!image_state) continue; // Can't set layouts of a dead image
-
- auto *cb_subres_map = GetImageSubresourceLayoutMap(cb_state, *image_state);
- const auto *sub_cb_subres_map = sub_layout_map_entry.second.get();
- assert(cb_subres_map && sub_cb_subres_map); // Non const get and map traversal should never be null
- cb_subres_map->UpdateFrom(*sub_cb_subres_map);
+ // Propagate layout transitions to the primary cmd buffer
+ // Novel Valid usage: "UNASSIGNED-vkCmdExecuteCommands-commandBuffer-00001"
+ // initial layout usage of secondary command buffers resources must match parent command buffer
+ for (const auto &ilm_entry : sub_cb_state->imageLayoutMap) {
+ auto cb_entry = cb_state->imageLayoutMap.find(ilm_entry.first);
+ if (cb_entry != cb_state->imageLayoutMap.end()) {
+ // For exact matches ImageSubresourcePair matches, update the parent entry
+ cb_entry->second.layout = ilm_entry.second.layout;
+ } else {
+ // Look for partial matches (in aspectMask), and update or create parent map entry in SetLayout
+ assert(ilm_entry.first.hasSubresource);
+ IMAGE_CMD_BUF_LAYOUT_NODE node;
+ if (!FindCmdBufLayout(device_data, cb_state, ilm_entry.first.image, ilm_entry.first.subresource, node)) {
+ node.initialLayout = ilm_entry.second.initialLayout;
+ }
+ node.layout = ilm_entry.second.layout;
+ SetLayout(device_data, cb_state, ilm_entry.first, node);
+ }
}
-
sub_cb_state->primaryCommandBuffer = cb_state->commandBuffer;
cb_state->linkedCommandBuffers.insert(sub_cb_state);
sub_cb_state->linkedCommandBuffers.insert(cb_state);
@@ -11724,43 +10718,49 @@ void ValidationStateTracker::PreCallRecordCmdExecuteCommands(VkCommandBuffer com
bool CoreChecks::PreCallValidateMapMemory(VkDevice device, VkDeviceMemory mem, VkDeviceSize offset, VkDeviceSize size,
VkFlags flags, void **ppData) {
+ layer_data *device_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
bool skip = false;
- DEVICE_MEMORY_STATE *mem_info = GetDevMemState(mem);
+ DEVICE_MEM_INFO *mem_info = GetMemObjInfo(mem);
if (mem_info) {
- if ((phys_dev_mem_props.memoryTypes[mem_info->alloc_info.memoryTypeIndex].propertyFlags &
+ auto end_offset = (VK_WHOLE_SIZE == size) ? mem_info->alloc_info.allocationSize - 1 : offset + size - 1;
+ skip |= ValidateMapImageLayouts(device_data, device, mem_info, offset, end_offset);
+ if ((device_data->phys_dev_mem_props.memoryTypes[mem_info->alloc_info.memoryTypeIndex].propertyFlags &
VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) == 0) {
- skip = log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_MEMORY_EXT,
+ skip = log_msg(device_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_MEMORY_EXT,
HandleToUint64(mem), "VUID-vkMapMemory-memory-00682",
- "Mapping Memory without VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT set: %s.",
- report_data->FormatHandle(mem).c_str());
+ "Mapping Memory without VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT set: mem obj %s.",
+ device_data->report_data->FormatHandle(mem).c_str());
}
}
- skip |= ValidateMapMemRange(mem, offset, size);
+ skip |= ValidateMapMemRange(device_data, mem, offset, size);
return skip;
}
void CoreChecks::PostCallRecordMapMemory(VkDevice device, VkDeviceMemory mem, VkDeviceSize offset, VkDeviceSize size, VkFlags flags,
void **ppData, VkResult result) {
+ layer_data *device_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
if (VK_SUCCESS != result) return;
// TODO : What's the point of this range? See comment on creating new "bound_range" above, which may replace this
- StoreMemRanges(mem, offset, size);
- InitializeAndTrackMemory(mem, offset, size, ppData);
+ StoreMemRanges(device_data, mem, offset, size);
+ InitializeAndTrackMemory(device_data, mem, offset, size, ppData);
}
bool CoreChecks::PreCallValidateUnmapMemory(VkDevice device, VkDeviceMemory mem) {
+ layer_data *device_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
bool skip = false;
- auto mem_info = GetDevMemState(mem);
+ auto mem_info = GetMemObjInfo(mem);
if (mem_info && !mem_info->mem_range.size) {
// Valid Usage: memory must currently be mapped
- skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_MEMORY_EXT,
- HandleToUint64(mem), "VUID-vkUnmapMemory-memory-00689", "Unmapping Memory without memory being mapped: %s.",
- report_data->FormatHandle(mem).c_str());
+ skip |= log_msg(device_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_MEMORY_EXT,
+ HandleToUint64(mem), "VUID-vkUnmapMemory-memory-00689",
+ "Unmapping Memory without memory being mapped: mem obj %s.",
+ device_data->report_data->FormatHandle(mem).c_str());
}
return skip;
}
void CoreChecks::PreCallRecordUnmapMemory(VkDevice device, VkDeviceMemory mem) {
- auto mem_info = GetDevMemState(mem);
+ auto mem_info = GetMemObjInfo(mem);
mem_info->mem_range.size = 0;
if (mem_info->shadow_copy) {
free(mem_info->shadow_copy_base);
@@ -11769,19 +10769,20 @@ void CoreChecks::PreCallRecordUnmapMemory(VkDevice device, VkDeviceMemory mem) {
}
}
-bool CoreChecks::ValidateMemoryIsMapped(const char *funcName, uint32_t memRangeCount, const VkMappedMemoryRange *pMemRanges) {
+bool CoreChecks::ValidateMemoryIsMapped(layer_data *dev_data, const char *funcName, uint32_t memRangeCount,
+ const VkMappedMemoryRange *pMemRanges) {
bool skip = false;
for (uint32_t i = 0; i < memRangeCount; ++i) {
- auto mem_info = GetDevMemState(pMemRanges[i].memory);
+ auto mem_info = GetMemObjInfo(pMemRanges[i].memory);
if (mem_info) {
if (pMemRanges[i].size == VK_WHOLE_SIZE) {
if (mem_info->mem_range.offset > pMemRanges[i].offset) {
- skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_MEMORY_EXT,
- HandleToUint64(pMemRanges[i].memory), "VUID-VkMappedMemoryRange-size-00686",
- "%s: Flush/Invalidate offset (" PRINTF_SIZE_T_SPECIFIER
- ") is less than Memory Object's offset (" PRINTF_SIZE_T_SPECIFIER ").",
- funcName, static_cast<size_t>(pMemRanges[i].offset),
- static_cast<size_t>(mem_info->mem_range.offset));
+ skip |= log_msg(
+ dev_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_MEMORY_EXT,
+ HandleToUint64(pMemRanges[i].memory), "VUID-VkMappedMemoryRange-size-00686",
+ "%s: Flush/Invalidate offset (" PRINTF_SIZE_T_SPECIFIER
+ ") is less than Memory Object's offset (" PRINTF_SIZE_T_SPECIFIER ").",
+ funcName, static_cast<size_t>(pMemRanges[i].offset), static_cast<size_t>(mem_info->mem_range.offset));
}
} else {
const uint64_t data_end = (mem_info->mem_range.size == VK_WHOLE_SIZE)
@@ -11789,12 +10790,13 @@ bool CoreChecks::ValidateMemoryIsMapped(const char *funcName, uint32_t memRangeC
: (mem_info->mem_range.offset + mem_info->mem_range.size);
if ((mem_info->mem_range.offset > pMemRanges[i].offset) ||
(data_end < (pMemRanges[i].offset + pMemRanges[i].size))) {
- skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_MEMORY_EXT,
- HandleToUint64(pMemRanges[i].memory), "VUID-VkMappedMemoryRange-size-00685",
- "%s: Flush/Invalidate size or offset (" PRINTF_SIZE_T_SPECIFIER ", " PRINTF_SIZE_T_SPECIFIER
- ") exceed the Memory Object's upper-bound (" PRINTF_SIZE_T_SPECIFIER ").",
- funcName, static_cast<size_t>(pMemRanges[i].offset + pMemRanges[i].size),
- static_cast<size_t>(pMemRanges[i].offset), static_cast<size_t>(data_end));
+ skip |=
+ log_msg(dev_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_MEMORY_EXT,
+ HandleToUint64(pMemRanges[i].memory), "VUID-VkMappedMemoryRange-size-00685",
+ "%s: Flush/Invalidate size or offset (" PRINTF_SIZE_T_SPECIFIER ", " PRINTF_SIZE_T_SPECIFIER
+ ") exceed the Memory Object's upper-bound (" PRINTF_SIZE_T_SPECIFIER ").",
+ funcName, static_cast<size_t>(pMemRanges[i].offset + pMemRanges[i].size),
+ static_cast<size_t>(pMemRanges[i].offset), static_cast<size_t>(data_end));
}
}
}
@@ -11802,10 +10804,11 @@ bool CoreChecks::ValidateMemoryIsMapped(const char *funcName, uint32_t memRangeC
return skip;
}
-bool CoreChecks::ValidateAndCopyNoncoherentMemoryToDriver(uint32_t mem_range_count, const VkMappedMemoryRange *mem_ranges) {
+bool CoreChecks::ValidateAndCopyNoncoherentMemoryToDriver(layer_data *dev_data, uint32_t mem_range_count,
+ const VkMappedMemoryRange *mem_ranges) {
bool skip = false;
for (uint32_t i = 0; i < mem_range_count; ++i) {
- auto mem_info = GetDevMemState(mem_ranges[i].memory);
+ auto mem_info = GetMemObjInfo(mem_ranges[i].memory);
if (mem_info) {
if (mem_info->shadow_copy) {
VkDeviceSize size = (mem_info->mem_range.size != VK_WHOLE_SIZE)
@@ -11814,18 +10817,18 @@ bool CoreChecks::ValidateAndCopyNoncoherentMemoryToDriver(uint32_t mem_range_cou
char *data = static_cast<char *>(mem_info->shadow_copy);
for (uint64_t j = 0; j < mem_info->shadow_pad_size; ++j) {
if (data[j] != NoncoherentMemoryFillValue) {
- skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_MEMORY_EXT,
- HandleToUint64(mem_ranges[i].memory), kVUID_Core_MemTrack_InvalidMap,
- "Memory underflow was detected on %s.",
- report_data->FormatHandle(mem_ranges[i].memory).c_str());
+ skip |= log_msg(dev_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_MEMORY_EXT, HandleToUint64(mem_ranges[i].memory),
+ kVUID_Core_MemTrack_InvalidMap, "Memory underflow was detected on mem obj %s.",
+ dev_data->report_data->FormatHandle(mem_ranges[i].memory).c_str());
}
}
for (uint64_t j = (size + mem_info->shadow_pad_size); j < (2 * mem_info->shadow_pad_size + size); ++j) {
if (data[j] != NoncoherentMemoryFillValue) {
- skip |=
- log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_MEMORY_EXT,
- HandleToUint64(mem_ranges[i].memory), kVUID_Core_MemTrack_InvalidMap,
- "Memory overflow was detected on %s.", report_data->FormatHandle(mem_ranges[i].memory).c_str());
+ skip |= log_msg(dev_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_MEMORY_EXT, HandleToUint64(mem_ranges[i].memory),
+ kVUID_Core_MemTrack_InvalidMap, "Memory overflow was detected on mem obj %s.",
+ dev_data->report_data->FormatHandle(mem_ranges[i].memory).c_str());
}
}
memcpy(mem_info->p_driver_data, static_cast<void *>(data + mem_info->shadow_pad_size), (size_t)(size));
@@ -11835,9 +10838,10 @@ bool CoreChecks::ValidateAndCopyNoncoherentMemoryToDriver(uint32_t mem_range_cou
return skip;
}
-void CoreChecks::CopyNoncoherentMemoryFromDriver(uint32_t mem_range_count, const VkMappedMemoryRange *mem_ranges) {
+void CoreChecks::CopyNoncoherentMemoryFromDriver(layer_data *dev_data, uint32_t mem_range_count,
+ const VkMappedMemoryRange *mem_ranges) {
for (uint32_t i = 0; i < mem_range_count; ++i) {
- auto mem_info = GetDevMemState(mem_ranges[i].memory);
+ auto mem_info = GetMemObjInfo(mem_ranges[i].memory);
if (mem_info && mem_info->shadow_copy) {
VkDeviceSize size = (mem_info->mem_range.size != VK_WHOLE_SIZE)
? mem_info->mem_range.size
@@ -11848,23 +10852,23 @@ void CoreChecks::CopyNoncoherentMemoryFromDriver(uint32_t mem_range_count, const
}
}
-bool CoreChecks::ValidateMappedMemoryRangeDeviceLimits(const char *func_name, uint32_t mem_range_count,
+bool CoreChecks::ValidateMappedMemoryRangeDeviceLimits(layer_data *dev_data, const char *func_name, uint32_t mem_range_count,
const VkMappedMemoryRange *mem_ranges) {
bool skip = false;
for (uint32_t i = 0; i < mem_range_count; ++i) {
- uint64_t atom_size = phys_dev_props.limits.nonCoherentAtomSize;
+ uint64_t atom_size = dev_data->phys_dev_props.limits.nonCoherentAtomSize;
if (SafeModulo(mem_ranges[i].offset, atom_size) != 0) {
- skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_MEMORY_EXT,
+ skip |= log_msg(dev_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_MEMORY_EXT,
HandleToUint64(mem_ranges->memory), "VUID-VkMappedMemoryRange-offset-00687",
"%s: Offset in pMemRanges[%d] is 0x%" PRIxLEAST64
", which is not a multiple of VkPhysicalDeviceLimits::nonCoherentAtomSize (0x%" PRIxLEAST64 ").",
func_name, i, mem_ranges[i].offset, atom_size);
}
- auto mem_info = GetDevMemState(mem_ranges[i].memory);
+ auto mem_info = GetMemObjInfo(mem_ranges[i].memory);
if ((mem_ranges[i].size != VK_WHOLE_SIZE) &&
(mem_ranges[i].size + mem_ranges[i].offset != mem_info->alloc_info.allocationSize) &&
(SafeModulo(mem_ranges[i].size, atom_size) != 0)) {
- skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_MEMORY_EXT,
+ skip |= log_msg(dev_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_MEMORY_EXT,
HandleToUint64(mem_ranges->memory), "VUID-VkMappedMemoryRange-size-01390",
"%s: Size in pMemRanges[%d] is 0x%" PRIxLEAST64
", which is not a multiple of VkPhysicalDeviceLimits::nonCoherentAtomSize (0x%" PRIxLEAST64 ").",
@@ -11876,148 +10880,113 @@ bool CoreChecks::ValidateMappedMemoryRangeDeviceLimits(const char *func_name, ui
bool CoreChecks::PreCallValidateFlushMappedMemoryRanges(VkDevice device, uint32_t memRangeCount,
const VkMappedMemoryRange *pMemRanges) {
+ layer_data *device_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
bool skip = false;
- skip |= ValidateMappedMemoryRangeDeviceLimits("vkFlushMappedMemoryRanges", memRangeCount, pMemRanges);
- skip |= ValidateAndCopyNoncoherentMemoryToDriver(memRangeCount, pMemRanges);
- skip |= ValidateMemoryIsMapped("vkFlushMappedMemoryRanges", memRangeCount, pMemRanges);
+ skip |= ValidateMappedMemoryRangeDeviceLimits(device_data, "vkFlushMappedMemoryRanges", memRangeCount, pMemRanges);
+ skip |= ValidateAndCopyNoncoherentMemoryToDriver(device_data, memRangeCount, pMemRanges);
+ skip |= ValidateMemoryIsMapped(device_data, "vkFlushMappedMemoryRanges", memRangeCount, pMemRanges);
return skip;
}
bool CoreChecks::PreCallValidateInvalidateMappedMemoryRanges(VkDevice device, uint32_t memRangeCount,
const VkMappedMemoryRange *pMemRanges) {
+ layer_data *device_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
bool skip = false;
- skip |= ValidateMappedMemoryRangeDeviceLimits("vkInvalidateMappedMemoryRanges", memRangeCount, pMemRanges);
- skip |= ValidateMemoryIsMapped("vkInvalidateMappedMemoryRanges", memRangeCount, pMemRanges);
+ skip |= ValidateMappedMemoryRangeDeviceLimits(device_data, "vkInvalidateMappedMemoryRanges", memRangeCount, pMemRanges);
+ skip |= ValidateMemoryIsMapped(device_data, "vkInvalidateMappedMemoryRanges", memRangeCount, pMemRanges);
return skip;
}
void CoreChecks::PostCallRecordInvalidateMappedMemoryRanges(VkDevice device, uint32_t memRangeCount,
const VkMappedMemoryRange *pMemRanges, VkResult result) {
+ layer_data *device_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
if (VK_SUCCESS == result) {
// Update our shadow copy with modified driver data
- CopyNoncoherentMemoryFromDriver(memRangeCount, pMemRanges);
+ CopyNoncoherentMemoryFromDriver(device_data, memRangeCount, pMemRanges);
}
}
bool CoreChecks::PreCallValidateGetDeviceMemoryCommitment(VkDevice device, VkDeviceMemory mem, VkDeviceSize *pCommittedMem) {
+ layer_data *dev_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
bool skip = false;
- const auto mem_info = GetDevMemState(mem);
+ auto mem_info = GetMemObjInfo(mem);
if (mem_info) {
- if ((phys_dev_mem_props.memoryTypes[mem_info->alloc_info.memoryTypeIndex].propertyFlags &
+ if ((dev_data->phys_dev_mem_props.memoryTypes[mem_info->alloc_info.memoryTypeIndex].propertyFlags &
VK_MEMORY_PROPERTY_LAZILY_ALLOCATED_BIT) == 0) {
- skip = log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_MEMORY_EXT,
+ skip = log_msg(dev_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_MEMORY_EXT,
HandleToUint64(mem), "VUID-vkGetDeviceMemoryCommitment-memory-00690",
- "Querying commitment for memory without VK_MEMORY_PROPERTY_LAZILY_ALLOCATED_BIT set: %s.",
- report_data->FormatHandle(mem).c_str());
+ "Querying commitment for memory without VK_MEMORY_PROPERTY_LAZILY_ALLOCATED_BIT set: mem obj %s.",
+ dev_data->report_data->FormatHandle(mem).c_str());
}
}
return skip;
}
-bool CoreChecks::ValidateBindImageMemory(const VkBindImageMemoryInfo &bindInfo, const char *api_name) const {
+bool CoreChecks::ValidateBindImageMemory(layer_data *device_data, VkImage image, VkDeviceMemory mem, VkDeviceSize memoryOffset,
+ const char *api_name) {
bool skip = false;
- const IMAGE_STATE *image_state = GetImageState(bindInfo.image);
+ IMAGE_STATE *image_state = GetImageState(image);
if (image_state) {
// Track objects tied to memory
- uint64_t image_handle = HandleToUint64(bindInfo.image);
- skip = ValidateSetMemBinding(bindInfo.memory, VulkanTypedHandle(bindInfo.image, kVulkanObjectTypeImage), api_name);
-#ifdef VK_USE_PLATFORM_ANDROID_KHR
- if (image_state->external_format_android) {
- if (image_state->memory_requirements_checked) {
- skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT, image_handle,
- kVUID_Core_BindImage_InvalidMemReqQuery,
- "%s: Must not call vkGetImageMemoryRequirements on %s that will be bound to an external "
- "Android hardware buffer.",
- api_name, report_data->FormatHandle(bindInfo.image).c_str());
- }
- return skip;
- }
-#endif // VK_USE_PLATFORM_ANDROID_KHR
+ uint64_t image_handle = HandleToUint64(image);
+ skip = ValidateSetMemBinding(device_data, mem, image_handle, kVulkanObjectTypeImage, api_name);
if (!image_state->memory_requirements_checked) {
// There's not an explicit requirement in the spec to call vkGetImageMemoryRequirements() prior to calling
// BindImageMemory but it's implied in that memory being bound must conform with VkMemoryRequirements from
// vkGetImageMemoryRequirements()
- skip |= log_msg(report_data, VK_DEBUG_REPORT_WARNING_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT, image_handle,
- kVUID_Core_BindImage_NoMemReqQuery,
- "%s: Binding memory to %s but vkGetImageMemoryRequirements() has not been called on that image.",
- api_name, report_data->FormatHandle(bindInfo.image).c_str());
- // Use this information fetched at CreateImage time, in validation below.
+ skip |= log_msg(device_data->report_data, VK_DEBUG_REPORT_WARNING_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT,
+ image_handle, kVUID_Core_DrawState_InvalidImage,
+ "%s: Binding memory to image %s but vkGetImageMemoryRequirements() has not been called on that image.",
+ api_name, device_data->report_data->FormatHandle(image_handle).c_str());
+ // Make the call for them so we can verify the state
+ device_data->device_dispatch_table.GetImageMemoryRequirements(device_data->device, image, &image_state->requirements);
}
// Validate bound memory range information
- const auto mem_info = GetDevMemState(bindInfo.memory);
+ auto mem_info = GetMemObjInfo(mem);
if (mem_info) {
- skip |= ValidateInsertImageMemoryRange(bindInfo.image, mem_info, bindInfo.memoryOffset, image_state->requirements,
+ skip |= ValidateInsertImageMemoryRange(device_data, image, mem_info, memoryOffset, image_state->requirements,
image_state->createInfo.tiling == VK_IMAGE_TILING_LINEAR, api_name);
- skip |= ValidateMemoryTypes(mem_info, image_state->requirements.memoryTypeBits, api_name,
+ skip |= ValidateMemoryTypes(device_data, mem_info, image_state->requirements.memoryTypeBits, api_name,
"VUID-vkBindImageMemory-memory-01047");
}
// Validate memory requirements alignment
- if (SafeModulo(bindInfo.memoryOffset, image_state->requirements.alignment) != 0) {
- skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT, image_handle,
- "VUID-vkBindImageMemory-memoryOffset-01048",
+ if (SafeModulo(memoryOffset, image_state->requirements.alignment) != 0) {
+ skip |= log_msg(device_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT,
+ image_handle, "VUID-vkBindImageMemory-memoryOffset-01048",
"%s: memoryOffset is 0x%" PRIxLEAST64
" but must be an integer multiple of the VkMemoryRequirements::alignment value 0x%" PRIxLEAST64
", returned from a call to vkGetImageMemoryRequirements with image.",
- api_name, bindInfo.memoryOffset, image_state->requirements.alignment);
+ api_name, memoryOffset, image_state->requirements.alignment);
}
if (mem_info) {
// Validate memory requirements size
- if (image_state->requirements.size > mem_info->alloc_info.allocationSize - bindInfo.memoryOffset) {
- skip |=
- log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT, image_handle,
- "VUID-vkBindImageMemory-size-01049",
- "%s: memory size minus memoryOffset is 0x%" PRIxLEAST64
- " but must be at least as large as VkMemoryRequirements::size value 0x%" PRIxLEAST64
- ", returned from a call to vkGetImageMemoryRequirements with image.",
- api_name, mem_info->alloc_info.allocationSize - bindInfo.memoryOffset, image_state->requirements.size);
+ if (image_state->requirements.size > mem_info->alloc_info.allocationSize - memoryOffset) {
+ skip |= log_msg(device_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT,
+ image_handle, "VUID-vkBindImageMemory-size-01049",
+ "%s: memory size minus memoryOffset is 0x%" PRIxLEAST64
+ " but must be at least as large as VkMemoryRequirements::size value 0x%" PRIxLEAST64
+ ", returned from a call to vkGetImageMemoryRequirements with image.",
+ api_name, mem_info->alloc_info.allocationSize - memoryOffset, image_state->requirements.size);
}
// Validate dedicated allocation
- if (mem_info->is_dedicated && ((mem_info->dedicated_image != bindInfo.image) || (bindInfo.memoryOffset != 0))) {
+ if (mem_info->is_dedicated && ((mem_info->dedicated_image != image) || (memoryOffset != 0))) {
// TODO: Add vkBindImageMemory2KHR error message when added to spec.
auto validation_error = kVUIDUndefined;
if (strcmp(api_name, "vkBindImageMemory()") == 0) {
validation_error = "VUID-vkBindImageMemory-memory-01509";
}
- skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT, image_handle,
- validation_error,
- "%s: for dedicated memory allocation %s, VkMemoryDedicatedAllocateInfoKHR:: %s must be equal "
- "to %s and memoryOffset 0x%" PRIxLEAST64 " must be zero.",
- api_name, report_data->FormatHandle(bindInfo.memory).c_str(),
- report_data->FormatHandle(mem_info->dedicated_image).c_str(),
- report_data->FormatHandle(bindInfo.image).c_str(), bindInfo.memoryOffset);
- }
- }
-
- const auto swapchain_info = lvl_find_in_chain<VkBindImageMemorySwapchainInfoKHR>(bindInfo.pNext);
- if (swapchain_info) {
- if (bindInfo.memory != VK_NULL_HANDLE) {
- skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT, image_handle,
- "VUID-VkBindImageMemoryInfo-pNext-01631", "%s: %s is not VK_NULL_HANDLE.", api_name,
- report_data->FormatHandle(bindInfo.memory).c_str());
- }
- const auto swapchain_state = GetSwapchainState(swapchain_info->swapchain);
- if (swapchain_state && swapchain_state->images.size() <= swapchain_info->imageIndex) {
- skip |=
- log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT, image_handle,
- "VUID-VkBindImageMemorySwapchainInfoKHR-imageIndex-01644",
- "%s: imageIndex (%i) is out of bounds of %s images (size: %i)", api_name, swapchain_info->imageIndex,
- report_data->FormatHandle(swapchain_info->swapchain).c_str(), (int)swapchain_state->images.size());
- }
- } else {
- if (image_state->create_from_swapchain) {
- skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT, image_handle,
- "VUID-VkBindImageMemoryInfo-image-01630",
- "%s: pNext of VkBindImageMemoryInfo doesn't include VkBindImageMemorySwapchainInfoKHR.", api_name);
- }
- if (!mem_info) {
- skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT, image_handle,
- "VUID-VkBindImageMemoryInfo-pNext-01632", "%s: %s is invalid.", api_name,
- report_data->FormatHandle(bindInfo.memory).c_str());
+ skip |= log_msg(device_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_BUFFER_EXT,
+ image_handle, validation_error,
+ "%s: for dedicated memory allocation %s, VkMemoryDedicatedAllocateInfoKHR::image %s must be equal "
+ "to image %s and memoryOffset 0x%" PRIxLEAST64 " must be zero.",
+ api_name, device_data->report_data->FormatHandle(mem).c_str(),
+ device_data->report_data->FormatHandle(mem_info->dedicated_image).c_str(),
+ device_data->report_data->FormatHandle(image_handle).c_str(), memoryOffset);
}
}
}
@@ -12025,108 +10994,103 @@ bool CoreChecks::ValidateBindImageMemory(const VkBindImageMemoryInfo &bindInfo,
}
bool CoreChecks::PreCallValidateBindImageMemory(VkDevice device, VkImage image, VkDeviceMemory mem, VkDeviceSize memoryOffset) {
- VkBindImageMemoryInfo bindInfo = {};
- bindInfo.sType = VK_STRUCTURE_TYPE_BIND_IMAGE_MEMORY_INFO;
- bindInfo.image = image;
- bindInfo.memory = mem;
- bindInfo.memoryOffset = memoryOffset;
- return ValidateBindImageMemory(bindInfo, "vkBindImageMemory()");
+ layer_data *device_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
+ return ValidateBindImageMemory(device_data, image, mem, memoryOffset, "vkBindImageMemory()");
}
-void ValidationStateTracker::UpdateBindImageMemoryState(const VkBindImageMemoryInfo &bindInfo) {
- IMAGE_STATE *image_state = GetImageState(bindInfo.image);
+void CoreChecks::UpdateBindImageMemoryState(layer_data *device_data, VkImage image, VkDeviceMemory mem, VkDeviceSize memoryOffset) {
+ IMAGE_STATE *image_state = GetImageState(image);
if (image_state) {
- const auto swapchain_info = lvl_find_in_chain<VkBindImageMemorySwapchainInfoKHR>(bindInfo.pNext);
- if (swapchain_info) {
- image_state->bind_swapchain = swapchain_info->swapchain;
- image_state->bind_swapchain_imageIndex = swapchain_info->imageIndex;
- } else {
- // Track bound memory range information
- auto mem_info = GetDevMemState(bindInfo.memory);
- if (mem_info) {
- InsertImageMemoryRange(bindInfo.image, mem_info, bindInfo.memoryOffset, image_state->requirements,
- image_state->createInfo.tiling == VK_IMAGE_TILING_LINEAR);
- }
-
- // Track objects tied to memory
- SetMemBinding(bindInfo.memory, image_state, bindInfo.memoryOffset,
- VulkanTypedHandle(bindInfo.image, kVulkanObjectTypeImage));
+ // Track bound memory range information
+ auto mem_info = GetMemObjInfo(mem);
+ if (mem_info) {
+ InsertImageMemoryRange(device_data, image, mem_info, memoryOffset, image_state->requirements,
+ image_state->createInfo.tiling == VK_IMAGE_TILING_LINEAR);
}
+
+ // Track objects tied to memory
+ uint64_t image_handle = HandleToUint64(image);
+ SetMemBinding(device_data, mem, image_state, memoryOffset, image_handle, kVulkanObjectTypeImage);
}
}
-void ValidationStateTracker::PostCallRecordBindImageMemory(VkDevice device, VkImage image, VkDeviceMemory mem,
- VkDeviceSize memoryOffset, VkResult result) {
+void CoreChecks::PostCallRecordBindImageMemory(VkDevice device, VkImage image, VkDeviceMemory mem, VkDeviceSize memoryOffset,
+ VkResult result) {
+ layer_data *device_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
if (VK_SUCCESS != result) return;
- VkBindImageMemoryInfo bindInfo = {};
- bindInfo.sType = VK_STRUCTURE_TYPE_BIND_IMAGE_MEMORY_INFO;
- bindInfo.image = image;
- bindInfo.memory = mem;
- bindInfo.memoryOffset = memoryOffset;
- UpdateBindImageMemoryState(bindInfo);
+ UpdateBindImageMemoryState(device_data, image, mem, memoryOffset);
}
bool CoreChecks::PreCallValidateBindImageMemory2(VkDevice device, uint32_t bindInfoCount,
const VkBindImageMemoryInfoKHR *pBindInfos) {
+ layer_data *device_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
bool skip = false;
char api_name[128];
for (uint32_t i = 0; i < bindInfoCount; i++) {
sprintf(api_name, "vkBindImageMemory2() pBindInfos[%u]", i);
- skip |= ValidateBindImageMemory(pBindInfos[i], api_name);
+ skip |=
+ ValidateBindImageMemory(device_data, pBindInfos[i].image, pBindInfos[i].memory, pBindInfos[i].memoryOffset, api_name);
}
return skip;
}
bool CoreChecks::PreCallValidateBindImageMemory2KHR(VkDevice device, uint32_t bindInfoCount,
const VkBindImageMemoryInfoKHR *pBindInfos) {
+ layer_data *device_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
bool skip = false;
char api_name[128];
for (uint32_t i = 0; i < bindInfoCount; i++) {
sprintf(api_name, "vkBindImageMemory2KHR() pBindInfos[%u]", i);
- skip |= ValidateBindImageMemory(pBindInfos[i], api_name);
+ skip |=
+ ValidateBindImageMemory(device_data, pBindInfos[i].image, pBindInfos[i].memory, pBindInfos[i].memoryOffset, api_name);
}
return skip;
}
-void ValidationStateTracker::PostCallRecordBindImageMemory2(VkDevice device, uint32_t bindInfoCount,
- const VkBindImageMemoryInfoKHR *pBindInfos, VkResult result) {
+void CoreChecks::PostCallRecordBindImageMemory2(VkDevice device, uint32_t bindInfoCount, const VkBindImageMemoryInfoKHR *pBindInfos,
+ VkResult result) {
+ layer_data *device_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
if (VK_SUCCESS != result) return;
for (uint32_t i = 0; i < bindInfoCount; i++) {
- UpdateBindImageMemoryState(pBindInfos[i]);
+ UpdateBindImageMemoryState(device_data, pBindInfos[i].image, pBindInfos[i].memory, pBindInfos[i].memoryOffset);
}
}
-void ValidationStateTracker::PostCallRecordBindImageMemory2KHR(VkDevice device, uint32_t bindInfoCount,
- const VkBindImageMemoryInfoKHR *pBindInfos, VkResult result) {
+void CoreChecks::PostCallRecordBindImageMemory2KHR(VkDevice device, uint32_t bindInfoCount,
+ const VkBindImageMemoryInfoKHR *pBindInfos, VkResult result) {
+ layer_data *device_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
if (VK_SUCCESS != result) return;
for (uint32_t i = 0; i < bindInfoCount; i++) {
- UpdateBindImageMemoryState(pBindInfos[i]);
+ UpdateBindImageMemoryState(device_data, pBindInfos[i].image, pBindInfos[i].memory, pBindInfos[i].memoryOffset);
}
}
bool CoreChecks::PreCallValidateSetEvent(VkDevice device, VkEvent event) {
+ layer_data *device_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
bool skip = false;
- auto event_state = GetEventState(event);
+ auto event_state = GetEventNode(event);
if (event_state) {
if (event_state->write_in_use) {
- skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_EVENT_EXT,
+ skip |= log_msg(device_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_EVENT_EXT,
HandleToUint64(event), kVUID_Core_DrawState_QueueForwardProgress,
- "Cannot call vkSetEvent() on %s that is already in use by a command buffer.",
- report_data->FormatHandle(event).c_str());
+ "Cannot call vkSetEvent() on event %s that is already in use by a command buffer.",
+ device_data->report_data->FormatHandle(event).c_str());
}
}
return skip;
}
void CoreChecks::PreCallRecordSetEvent(VkDevice device, VkEvent event) {
- auto event_state = GetEventState(event);
+ layer_data *device_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
+ auto event_state = GetEventNode(event);
if (event_state) {
+ event_state->needsSignaled = false;
event_state->stageMask = VK_PIPELINE_STAGE_HOST_BIT;
}
// Host setting event is visible to all queues immediately so update stageMask for any queue that's seen this event
// TODO : For correctness this needs separate fix to verify that app doesn't make incorrect assumptions about the
// ordering of this command in relation to vkCmd[Set|Reset]Events (see GH297)
- for (auto queue_data : queueMap) {
+ for (auto queue_data : device_data->queueMap) {
auto event_entry = queue_data.second.eventToStageMap.find(event);
if (event_entry != queue_data.second.eventToStageMap.end()) {
event_entry->second |= VK_PIPELINE_STAGE_HOST_BIT;
@@ -12136,21 +11100,13 @@ void CoreChecks::PreCallRecordSetEvent(VkDevice device, VkEvent event) {
bool CoreChecks::PreCallValidateQueueBindSparse(VkQueue queue, uint32_t bindInfoCount, const VkBindSparseInfo *pBindInfo,
VkFence fence) {
- auto queue_data = GetQueueState(queue);
- auto pFence = GetFenceState(fence);
- bool skip = ValidateFenceForSubmit(pFence);
+ layer_data *device_data = GetLayerDataPtr(get_dispatch_key(queue), layer_data_map);
+ auto pFence = GetFenceNode(fence);
+ bool skip = ValidateFenceForSubmit(device_data, pFence);
if (skip) {
return true;
}
- auto queueFlags = GetPhysicalDeviceState()->queue_family_properties[queue_data->queueFamilyIndex].queueFlags;
- if (!(queueFlags & VK_QUEUE_SPARSE_BINDING_BIT)) {
- skip |= log_msg(
- report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_QUEUE_EXT, HandleToUint64(queue),
- "VUID-vkQueueBindSparse-queuetype",
- "Attempting vkQueueBindSparse on a non-memory-management capable queue -- VK_QUEUE_SPARSE_BINDING_BIT not set.");
- }
-
unordered_set<VkSemaphore> signaled_semaphores;
unordered_set<VkSemaphore> unsignaled_semaphores;
unordered_set<VkSemaphore> internal_semaphores;
@@ -12161,14 +11117,16 @@ bool CoreChecks::PreCallValidateQueueBindSparse(VkQueue queue, uint32_t bindInfo
std::vector<VkSemaphore> semaphore_signals;
for (uint32_t i = 0; i < bindInfo.waitSemaphoreCount; ++i) {
VkSemaphore semaphore = bindInfo.pWaitSemaphores[i];
- auto pSemaphore = GetSemaphoreState(semaphore);
+ auto pSemaphore = GetSemaphoreNode(semaphore);
if (pSemaphore && (pSemaphore->scope == kSyncScopeInternal || internal_semaphores.count(semaphore))) {
if (unsignaled_semaphores.count(semaphore) ||
(!(signaled_semaphores.count(semaphore)) && !(pSemaphore->signaled))) {
- skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_SEMAPHORE_EXT,
- HandleToUint64(semaphore), kVUID_Core_DrawState_QueueForwardProgress,
- "%s is waiting on %s that has no way to be signaled.", report_data->FormatHandle(queue).c_str(),
- report_data->FormatHandle(semaphore).c_str());
+ skip |=
+ log_msg(device_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_SEMAPHORE_EXT,
+ HandleToUint64(semaphore), kVUID_Core_DrawState_QueueForwardProgress,
+ "Queue %s is waiting on semaphore %s that has no way to be signaled.",
+ device_data->report_data->FormatHandle(queue).c_str(),
+ device_data->report_data->FormatHandle(semaphore).c_str());
} else {
signaled_semaphores.erase(semaphore);
unsignaled_semaphores.insert(semaphore);
@@ -12180,15 +11138,17 @@ bool CoreChecks::PreCallValidateQueueBindSparse(VkQueue queue, uint32_t bindInfo
}
for (uint32_t i = 0; i < bindInfo.signalSemaphoreCount; ++i) {
VkSemaphore semaphore = bindInfo.pSignalSemaphores[i];
- auto pSemaphore = GetSemaphoreState(semaphore);
+ auto pSemaphore = GetSemaphoreNode(semaphore);
if (pSemaphore && pSemaphore->scope == kSyncScopeInternal) {
if (signaled_semaphores.count(semaphore) || (!(unsignaled_semaphores.count(semaphore)) && pSemaphore->signaled)) {
- skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_SEMAPHORE_EXT,
- HandleToUint64(semaphore), kVUID_Core_DrawState_QueueForwardProgress,
- "%s is signaling %s that was previously signaled by %s but has not since "
- "been waited on by any queue.",
- report_data->FormatHandle(queue).c_str(), report_data->FormatHandle(semaphore).c_str(),
- report_data->FormatHandle(pSemaphore->signaler.first).c_str());
+ skip |=
+ log_msg(device_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_SEMAPHORE_EXT,
+ HandleToUint64(semaphore), kVUID_Core_DrawState_QueueForwardProgress,
+ "Queue %s is signaling semaphore %s that was previously signaled by queue %s but has not since "
+ "been waited on by any queue.",
+ device_data->report_data->FormatHandle(queue).c_str(),
+ device_data->report_data->FormatHandle(semaphore).c_str(),
+ device_data->report_data->FormatHandle(pSemaphore->signaler.first).c_str());
} else {
unsignaled_semaphores.erase(semaphore);
signaled_semaphores.insert(semaphore);
@@ -12207,20 +11167,20 @@ bool CoreChecks::PreCallValidateQueueBindSparse(VkQueue queue, uint32_t bindInfo
if (image_state->createInfo.flags & VK_IMAGE_CREATE_SPARSE_RESIDENCY_BIT) {
if (!image_state->get_sparse_reqs_called || image_state->sparse_requirements.empty()) {
// For now just warning if sparse image binding occurs without calling to get reqs first
- return log_msg(report_data, VK_DEBUG_REPORT_WARNING_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT,
+ return log_msg(device_data->report_data, VK_DEBUG_REPORT_WARNING_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT,
HandleToUint64(image_state->image), kVUID_Core_MemTrack_InvalidState,
- "vkQueueBindSparse(): Binding sparse memory to %s without first calling "
+ "vkQueueBindSparse(): Binding sparse memory to image %s without first calling "
"vkGetImageSparseMemoryRequirements[2KHR]() to retrieve requirements.",
- report_data->FormatHandle(image_state->image).c_str());
+ device_data->report_data->FormatHandle(image_state->image).c_str());
}
}
if (!image_state->memory_requirements_checked) {
// For now just warning if sparse image binding occurs without calling to get reqs first
- return log_msg(report_data, VK_DEBUG_REPORT_WARNING_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT,
+ return log_msg(device_data->report_data, VK_DEBUG_REPORT_WARNING_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT,
HandleToUint64(image_state->image), kVUID_Core_MemTrack_InvalidState,
- "vkQueueBindSparse(): Binding sparse memory to %s without first calling "
+ "vkQueueBindSparse(): Binding sparse memory to image %s without first calling "
"vkGetImageMemoryRequirements() to retrieve requirements.",
- report_data->FormatHandle(image_state->image).c_str());
+ device_data->report_data->FormatHandle(image_state->image).c_str());
}
}
for (uint32_t i = 0; i < bindInfo.imageOpaqueBindCount; ++i) {
@@ -12232,20 +11192,20 @@ bool CoreChecks::PreCallValidateQueueBindSparse(VkQueue queue, uint32_t bindInfo
if (image_state->createInfo.flags & VK_IMAGE_CREATE_SPARSE_RESIDENCY_BIT) {
if (!image_state->get_sparse_reqs_called || image_state->sparse_requirements.empty()) {
// For now just warning if sparse image binding occurs without calling to get reqs first
- return log_msg(report_data, VK_DEBUG_REPORT_WARNING_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT,
+ return log_msg(device_data->report_data, VK_DEBUG_REPORT_WARNING_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT,
HandleToUint64(image_state->image), kVUID_Core_MemTrack_InvalidState,
- "vkQueueBindSparse(): Binding opaque sparse memory to %s without first calling "
+ "vkQueueBindSparse(): Binding opaque sparse memory to image %s without first calling "
"vkGetImageSparseMemoryRequirements[2KHR]() to retrieve requirements.",
- report_data->FormatHandle(image_state->image).c_str());
+ device_data->report_data->FormatHandle(image_state->image).c_str());
}
}
if (!image_state->memory_requirements_checked) {
// For now just warning if sparse image binding occurs without calling to get reqs first
- return log_msg(report_data, VK_DEBUG_REPORT_WARNING_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT,
+ return log_msg(device_data->report_data, VK_DEBUG_REPORT_WARNING_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT,
HandleToUint64(image_state->image), kVUID_Core_MemTrack_InvalidState,
- "vkQueueBindSparse(): Binding opaque sparse memory to %s without first calling "
+ "vkQueueBindSparse(): Binding opaque sparse memory to image %s without first calling "
"vkGetImageMemoryRequirements() to retrieve requirements.",
- report_data->FormatHandle(image_state->image).c_str());
+ device_data->report_data->FormatHandle(image_state->image).c_str());
}
for (uint32_t j = 0; j < image_opaque_bind.bindCount; ++j) {
if (image_opaque_bind.pBinds[j].flags & VK_SPARSE_MEMORY_BIND_METADATA_BIT) {
@@ -12256,11 +11216,11 @@ bool CoreChecks::PreCallValidateQueueBindSparse(VkQueue queue, uint32_t bindInfo
for (const auto &sparse_image_state : sparse_images) {
if (sparse_image_state->sparse_metadata_required && !sparse_image_state->sparse_metadata_bound) {
// Warn if sparse image binding metadata required for image with sparse binding, but metadata not bound
- return log_msg(report_data, VK_DEBUG_REPORT_WARNING_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT,
+ return log_msg(device_data->report_data, VK_DEBUG_REPORT_WARNING_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT,
HandleToUint64(sparse_image_state->image), kVUID_Core_MemTrack_InvalidState,
- "vkQueueBindSparse(): Binding sparse memory to %s which requires a metadata aspect but no "
+ "vkQueueBindSparse(): Binding sparse memory to image %s which requires a metadata aspect but no "
"binding with VK_SPARSE_MEMORY_BIND_METADATA_BIT set was made.",
- report_data->FormatHandle(sparse_image_state->image).c_str());
+ device_data->report_data->FormatHandle(sparse_image_state->image).c_str());
}
}
}
@@ -12269,9 +11229,10 @@ bool CoreChecks::PreCallValidateQueueBindSparse(VkQueue queue, uint32_t bindInfo
}
void CoreChecks::PostCallRecordQueueBindSparse(VkQueue queue, uint32_t bindInfoCount, const VkBindSparseInfo *pBindInfo,
VkFence fence, VkResult result) {
+ layer_data *device_data = GetLayerDataPtr(get_dispatch_key(queue), layer_data_map);
if (result != VK_SUCCESS) return;
uint64_t early_retire_seq = 0;
- auto pFence = GetFenceState(fence);
+ auto pFence = GetFenceNode(fence);
auto pQueue = GetQueueState(queue);
if (pFence) {
@@ -12285,13 +11246,14 @@ void CoreChecks::PostCallRecordQueueBindSparse(VkQueue queue, uint32_t bindInfoC
} else {
// Retire work up until this fence early, we will not see the wait that corresponds to this signal
early_retire_seq = pQueue->seq + pQueue->submissions.size();
- if (!external_sync_warning) {
- external_sync_warning = true;
- log_msg(report_data, VK_DEBUG_REPORT_WARNING_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_FENCE_EXT, HandleToUint64(fence),
- kVUID_Core_DrawState_QueueForwardProgress,
- "vkQueueBindSparse(): Signaling external %s on %s will disable validation of preceding command "
+ if (!device_data->external_sync_warning) {
+ device_data->external_sync_warning = true;
+ log_msg(device_data->report_data, VK_DEBUG_REPORT_WARNING_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_FENCE_EXT,
+ HandleToUint64(fence), kVUID_Core_DrawState_QueueForwardProgress,
+ "vkQueueBindSparse(): Signaling external fence %s on queue %s will disable validation of preceding command "
"buffer lifecycle states and the in-use status of associated objects.",
- report_data->FormatHandle(fence).c_str(), report_data->FormatHandle(queue).c_str());
+ device_data->report_data->FormatHandle(fence).c_str(),
+ device_data->report_data->FormatHandle(queue).c_str());
}
}
}
@@ -12302,15 +11264,15 @@ void CoreChecks::PostCallRecordQueueBindSparse(VkQueue queue, uint32_t bindInfoC
for (uint32_t j = 0; j < bindInfo.bufferBindCount; j++) {
for (uint32_t k = 0; k < bindInfo.pBufferBinds[j].bindCount; k++) {
auto sparse_binding = bindInfo.pBufferBinds[j].pBinds[k];
- SetSparseMemBinding({sparse_binding.memory, sparse_binding.memoryOffset, sparse_binding.size},
- VulkanTypedHandle(bindInfo.pBufferBinds[j].buffer, kVulkanObjectTypeBuffer));
+ SetSparseMemBinding(device_data, {sparse_binding.memory, sparse_binding.memoryOffset, sparse_binding.size},
+ HandleToUint64(bindInfo.pBufferBinds[j].buffer), kVulkanObjectTypeBuffer);
}
}
for (uint32_t j = 0; j < bindInfo.imageOpaqueBindCount; j++) {
for (uint32_t k = 0; k < bindInfo.pImageOpaqueBinds[j].bindCount; k++) {
auto sparse_binding = bindInfo.pImageOpaqueBinds[j].pBinds[k];
- SetSparseMemBinding({sparse_binding.memory, sparse_binding.memoryOffset, sparse_binding.size},
- VulkanTypedHandle(bindInfo.pImageOpaqueBinds[j].image, kVulkanObjectTypeImage));
+ SetSparseMemBinding(device_data, {sparse_binding.memory, sparse_binding.memoryOffset, sparse_binding.size},
+ HandleToUint64(bindInfo.pImageOpaqueBinds[j].image), kVulkanObjectTypeImage);
}
}
for (uint32_t j = 0; j < bindInfo.imageBindCount; j++) {
@@ -12318,8 +11280,8 @@ void CoreChecks::PostCallRecordQueueBindSparse(VkQueue queue, uint32_t bindInfoC
auto sparse_binding = bindInfo.pImageBinds[j].pBinds[k];
// TODO: This size is broken for non-opaque bindings, need to update to comprehend full sparse binding data
VkDeviceSize size = sparse_binding.extent.depth * sparse_binding.extent.height * sparse_binding.extent.width * 4;
- SetSparseMemBinding({sparse_binding.memory, sparse_binding.memoryOffset, size},
- VulkanTypedHandle(bindInfo.pImageBinds[j].image, kVulkanObjectTypeImage));
+ SetSparseMemBinding(device_data, {sparse_binding.memory, sparse_binding.memoryOffset, size},
+ HandleToUint64(bindInfo.pImageBinds[j].image), kVulkanObjectTypeImage);
}
}
@@ -12328,7 +11290,7 @@ void CoreChecks::PostCallRecordQueueBindSparse(VkQueue queue, uint32_t bindInfoC
std::vector<VkSemaphore> semaphore_externals;
for (uint32_t i = 0; i < bindInfo.waitSemaphoreCount; ++i) {
VkSemaphore semaphore = bindInfo.pWaitSemaphores[i];
- auto pSemaphore = GetSemaphoreState(semaphore);
+ auto pSemaphore = GetSemaphoreNode(semaphore);
if (pSemaphore) {
if (pSemaphore->scope == kSyncScopeInternal) {
if (pSemaphore->signaler.first != VK_NULL_HANDLE) {
@@ -12348,7 +11310,7 @@ void CoreChecks::PostCallRecordQueueBindSparse(VkQueue queue, uint32_t bindInfoC
}
for (uint32_t i = 0; i < bindInfo.signalSemaphoreCount; ++i) {
VkSemaphore semaphore = bindInfo.pSignalSemaphores[i];
- auto pSemaphore = GetSemaphoreState(semaphore);
+ auto pSemaphore = GetSemaphoreNode(semaphore);
if (pSemaphore) {
if (pSemaphore->scope == kSyncScopeInternal) {
pSemaphore->signaler.first = queue;
@@ -12359,52 +11321,53 @@ void CoreChecks::PostCallRecordQueueBindSparse(VkQueue queue, uint32_t bindInfoC
} else {
// Retire work up until this submit early, we will not see the wait that corresponds to this signal
early_retire_seq = std::max(early_retire_seq, pQueue->seq + pQueue->submissions.size() + 1);
- if (!external_sync_warning) {
- external_sync_warning = true;
- log_msg(report_data, VK_DEBUG_REPORT_WARNING_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_SEMAPHORE_EXT,
- HandleToUint64(semaphore), kVUID_Core_DrawState_QueueForwardProgress,
- "vkQueueBindSparse(): Signaling external %s on %s will disable validation of "
+ if (!device_data->external_sync_warning) {
+ device_data->external_sync_warning = true;
+ log_msg(device_data->report_data, VK_DEBUG_REPORT_WARNING_BIT_EXT,
+ VK_DEBUG_REPORT_OBJECT_TYPE_SEMAPHORE_EXT, HandleToUint64(semaphore),
+ kVUID_Core_DrawState_QueueForwardProgress,
+ "vkQueueBindSparse(): Signaling external semaphore %s on queue %s will disable validation of "
"preceding command buffer lifecycle states and the in-use status of associated objects.",
- report_data->FormatHandle(semaphore).c_str(), report_data->FormatHandle(queue).c_str());
+ device_data->report_data->FormatHandle(semaphore).c_str(),
+ device_data->report_data->FormatHandle(queue).c_str());
}
}
}
}
pQueue->submissions.emplace_back(std::vector<VkCommandBuffer>(), semaphore_waits, semaphore_signals, semaphore_externals,
- bindIdx == bindInfoCount - 1 ? fence : (VkFence)VK_NULL_HANDLE);
+ bindIdx == bindInfoCount - 1 ? fence : VK_NULL_HANDLE);
}
if (early_retire_seq) {
- RetireWorkOnQueue(pQueue, early_retire_seq, true);
+ RetireWorkOnQueue(device_data, pQueue, early_retire_seq);
}
}
-void ValidationStateTracker::PostCallRecordCreateSemaphore(VkDevice device, const VkSemaphoreCreateInfo *pCreateInfo,
- const VkAllocationCallbacks *pAllocator, VkSemaphore *pSemaphore,
- VkResult result) {
+void CoreChecks::PostCallRecordCreateSemaphore(VkDevice device, const VkSemaphoreCreateInfo *pCreateInfo,
+ const VkAllocationCallbacks *pAllocator, VkSemaphore *pSemaphore, VkResult result) {
+ layer_data *device_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
if (VK_SUCCESS != result) return;
- std::unique_ptr<SEMAPHORE_STATE> semaphore_state(new SEMAPHORE_STATE{});
- semaphore_state->signaler.first = VK_NULL_HANDLE;
- semaphore_state->signaler.second = 0;
- semaphore_state->signaled = false;
- semaphore_state->scope = kSyncScopeInternal;
- semaphoreMap[*pSemaphore] = std::move(semaphore_state);
+ SEMAPHORE_NODE *sNode = &device_data->semaphoreMap[*pSemaphore];
+ sNode->signaler.first = VK_NULL_HANDLE;
+ sNode->signaler.second = 0;
+ sNode->signaled = false;
+ sNode->scope = kSyncScopeInternal;
}
-bool CoreChecks::ValidateImportSemaphore(VkSemaphore semaphore, const char *caller_name) {
+bool CoreChecks::ValidateImportSemaphore(layer_data *device_data, VkSemaphore semaphore, const char *caller_name) {
bool skip = false;
- SEMAPHORE_STATE *sema_node = GetSemaphoreState(semaphore);
+ SEMAPHORE_NODE *sema_node = GetSemaphoreNode(semaphore);
if (sema_node) {
- const VulkanTypedHandle obj_struct(semaphore, kVulkanObjectTypeSemaphore);
- skip |= ValidateObjectNotInUse(sema_node, obj_struct, caller_name, kVUIDUndefined);
+ VK_OBJECT obj_struct = {HandleToUint64(semaphore), kVulkanObjectTypeSemaphore};
+ skip |= ValidateObjectNotInUse(device_data, sema_node, obj_struct, caller_name, kVUIDUndefined);
}
return skip;
}
-void CoreChecks::RecordImportSemaphoreState(VkSemaphore semaphore, VkExternalSemaphoreHandleTypeFlagBitsKHR handle_type,
- VkSemaphoreImportFlagsKHR flags) {
- SEMAPHORE_STATE *sema_node = GetSemaphoreState(semaphore);
+void CoreChecks::RecordImportSemaphoreState(layer_data *device_data, VkSemaphore semaphore,
+ VkExternalSemaphoreHandleTypeFlagBitsKHR handle_type, VkSemaphoreImportFlagsKHR flags) {
+ SEMAPHORE_NODE *sema_node = GetSemaphoreNode(semaphore);
if (sema_node && sema_node->scope != kSyncScopeExternalPermanent) {
if ((handle_type == VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_SYNC_FD_BIT_KHR || flags & VK_SEMAPHORE_IMPORT_TEMPORARY_BIT_KHR) &&
sema_node->scope == kSyncScopeInternal) {
@@ -12418,30 +11381,35 @@ void CoreChecks::RecordImportSemaphoreState(VkSemaphore semaphore, VkExternalSem
#ifdef VK_USE_PLATFORM_WIN32_KHR
bool CoreChecks::PreCallValidateImportSemaphoreWin32HandleKHR(
VkDevice device, const VkImportSemaphoreWin32HandleInfoKHR *pImportSemaphoreWin32HandleInfo) {
- return ValidateImportSemaphore(pImportSemaphoreWin32HandleInfo->semaphore, "vkImportSemaphoreWin32HandleKHR");
+ layer_data *device_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
+ return ValidateImportSemaphore(device_data, pImportSemaphoreWin32HandleInfo->semaphore, "vkImportSemaphoreWin32HandleKHR");
}
void CoreChecks::PostCallRecordImportSemaphoreWin32HandleKHR(
VkDevice device, const VkImportSemaphoreWin32HandleInfoKHR *pImportSemaphoreWin32HandleInfo, VkResult result) {
+ layer_data *device_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
if (VK_SUCCESS != result) return;
- RecordImportSemaphoreState(pImportSemaphoreWin32HandleInfo->semaphore, pImportSemaphoreWin32HandleInfo->handleType,
+ RecordImportSemaphoreState(device_data, pImportSemaphoreWin32HandleInfo->semaphore, pImportSemaphoreWin32HandleInfo->handleType,
pImportSemaphoreWin32HandleInfo->flags);
}
#endif // VK_USE_PLATFORM_WIN32_KHR
bool CoreChecks::PreCallValidateImportSemaphoreFdKHR(VkDevice device, const VkImportSemaphoreFdInfoKHR *pImportSemaphoreFdInfo) {
- return ValidateImportSemaphore(pImportSemaphoreFdInfo->semaphore, "vkImportSemaphoreFdKHR");
+ layer_data *device_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
+ return ValidateImportSemaphore(device_data, pImportSemaphoreFdInfo->semaphore, "vkImportSemaphoreFdKHR");
}
void CoreChecks::PostCallRecordImportSemaphoreFdKHR(VkDevice device, const VkImportSemaphoreFdInfoKHR *pImportSemaphoreFdInfo,
VkResult result) {
+ layer_data *device_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
if (VK_SUCCESS != result) return;
- RecordImportSemaphoreState(pImportSemaphoreFdInfo->semaphore, pImportSemaphoreFdInfo->handleType,
+ RecordImportSemaphoreState(device_data, pImportSemaphoreFdInfo->semaphore, pImportSemaphoreFdInfo->handleType,
pImportSemaphoreFdInfo->flags);
}
-void CoreChecks::RecordGetExternalSemaphoreState(VkSemaphore semaphore, VkExternalSemaphoreHandleTypeFlagBitsKHR handle_type) {
- SEMAPHORE_STATE *semaphore_state = GetSemaphoreState(semaphore);
+void CoreChecks::RecordGetExternalSemaphoreState(layer_data *device_data, VkSemaphore semaphore,
+ VkExternalSemaphoreHandleTypeFlagBitsKHR handle_type) {
+ SEMAPHORE_NODE *semaphore_state = GetSemaphoreNode(semaphore);
if (semaphore_state && handle_type != VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_SYNC_FD_BIT_KHR) {
// Cannot track semaphore state once it is exported, except for Sync FD handle types which have copy transference
semaphore_state->scope = kSyncScopeExternalPermanent;
@@ -12452,31 +11420,33 @@ void CoreChecks::RecordGetExternalSemaphoreState(VkSemaphore semaphore, VkExtern
void CoreChecks::PostCallRecordGetSemaphoreWin32HandleKHR(VkDevice device,
const VkSemaphoreGetWin32HandleInfoKHR *pGetWin32HandleInfo,
HANDLE *pHandle, VkResult result) {
+ layer_data *device_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
if (VK_SUCCESS != result) return;
- RecordGetExternalSemaphoreState(pGetWin32HandleInfo->semaphore, pGetWin32HandleInfo->handleType);
+ RecordGetExternalSemaphoreState(device_data, pGetWin32HandleInfo->semaphore, pGetWin32HandleInfo->handleType);
}
#endif
void CoreChecks::PostCallRecordGetSemaphoreFdKHR(VkDevice device, const VkSemaphoreGetFdInfoKHR *pGetFdInfo, int *pFd,
VkResult result) {
+ layer_data *device_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
if (VK_SUCCESS != result) return;
- RecordGetExternalSemaphoreState(pGetFdInfo->semaphore, pGetFdInfo->handleType);
+ RecordGetExternalSemaphoreState(device_data, pGetFdInfo->semaphore, pGetFdInfo->handleType);
}
-bool CoreChecks::ValidateImportFence(VkFence fence, const char *caller_name) {
- FENCE_STATE *fence_node = GetFenceState(fence);
+bool CoreChecks::ValidateImportFence(layer_data *device_data, VkFence fence, const char *caller_name) {
+ FENCE_NODE *fence_node = GetFenceNode(fence);
bool skip = false;
if (fence_node && fence_node->scope == kSyncScopeInternal && fence_node->state == FENCE_INFLIGHT) {
- skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_FENCE_EXT, HandleToUint64(fence),
- kVUIDUndefined, "Cannot call %s on %s that is currently in use.", caller_name,
- report_data->FormatHandle(fence).c_str());
+ skip |= log_msg(device_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_FENCE_EXT,
+ HandleToUint64(fence), kVUIDUndefined, "Cannot call %s on fence %s that is currently in use.", caller_name,
+ device_data->report_data->FormatHandle(fence).c_str());
}
return skip;
}
-void CoreChecks::RecordImportFenceState(VkFence fence, VkExternalFenceHandleTypeFlagBitsKHR handle_type,
+void CoreChecks::RecordImportFenceState(layer_data *device_data, VkFence fence, VkExternalFenceHandleTypeFlagBitsKHR handle_type,
VkFenceImportFlagsKHR flags) {
- FENCE_STATE *fence_node = GetFenceState(fence);
+ FENCE_NODE *fence_node = GetFenceNode(fence);
if (fence_node && fence_node->scope != kSyncScopeExternalPermanent) {
if ((handle_type == VK_EXTERNAL_FENCE_HANDLE_TYPE_SYNC_FD_BIT_KHR || flags & VK_FENCE_IMPORT_TEMPORARY_BIT_KHR) &&
fence_node->scope == kSyncScopeInternal) {
@@ -12490,28 +11460,33 @@ void CoreChecks::RecordImportFenceState(VkFence fence, VkExternalFenceHandleType
#ifdef VK_USE_PLATFORM_WIN32_KHR
bool CoreChecks::PreCallValidateImportFenceWin32HandleKHR(VkDevice device,
const VkImportFenceWin32HandleInfoKHR *pImportFenceWin32HandleInfo) {
- return ValidateImportFence(pImportFenceWin32HandleInfo->fence, "vkImportFenceWin32HandleKHR");
+ layer_data *device_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
+ return ValidateImportFence(device_data, pImportFenceWin32HandleInfo->fence, "vkImportFenceWin32HandleKHR");
}
void CoreChecks::PostCallRecordImportFenceWin32HandleKHR(VkDevice device,
const VkImportFenceWin32HandleInfoKHR *pImportFenceWin32HandleInfo,
VkResult result) {
+ layer_data *device_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
if (VK_SUCCESS != result) return;
- RecordImportFenceState(pImportFenceWin32HandleInfo->fence, pImportFenceWin32HandleInfo->handleType,
+ RecordImportFenceState(device_data, pImportFenceWin32HandleInfo->fence, pImportFenceWin32HandleInfo->handleType,
pImportFenceWin32HandleInfo->flags);
}
#endif // VK_USE_PLATFORM_WIN32_KHR
bool CoreChecks::PreCallValidateImportFenceFdKHR(VkDevice device, const VkImportFenceFdInfoKHR *pImportFenceFdInfo) {
- return ValidateImportFence(pImportFenceFdInfo->fence, "vkImportFenceFdKHR");
+ layer_data *device_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
+ return ValidateImportFence(device_data, pImportFenceFdInfo->fence, "vkImportFenceFdKHR");
}
void CoreChecks::PostCallRecordImportFenceFdKHR(VkDevice device, const VkImportFenceFdInfoKHR *pImportFenceFdInfo,
VkResult result) {
+ layer_data *device_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
if (VK_SUCCESS != result) return;
- RecordImportFenceState(pImportFenceFdInfo->fence, pImportFenceFdInfo->handleType, pImportFenceFdInfo->flags);
+ RecordImportFenceState(device_data, pImportFenceFdInfo->fence, pImportFenceFdInfo->handleType, pImportFenceFdInfo->flags);
}
-void CoreChecks::RecordGetExternalFenceState(VkFence fence, VkExternalFenceHandleTypeFlagBitsKHR handle_type) {
- FENCE_STATE *fence_state = GetFenceState(fence);
+void CoreChecks::RecordGetExternalFenceState(layer_data *device_data, VkFence fence,
+ VkExternalFenceHandleTypeFlagBitsKHR handle_type) {
+ FENCE_NODE *fence_state = GetFenceNode(fence);
if (fence_state) {
if (handle_type != VK_EXTERNAL_FENCE_HANDLE_TYPE_SYNC_FD_BIT_KHR) {
// Export with reference transference becomes external
@@ -12526,38 +11501,45 @@ void CoreChecks::RecordGetExternalFenceState(VkFence fence, VkExternalFenceHandl
#ifdef VK_USE_PLATFORM_WIN32_KHR
void CoreChecks::PostCallRecordGetFenceWin32HandleKHR(VkDevice device, const VkFenceGetWin32HandleInfoKHR *pGetWin32HandleInfo,
HANDLE *pHandle, VkResult result) {
+ layer_data *device_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
if (VK_SUCCESS != result) return;
- RecordGetExternalFenceState(pGetWin32HandleInfo->fence, pGetWin32HandleInfo->handleType);
+ RecordGetExternalFenceState(device_data, pGetWin32HandleInfo->fence, pGetWin32HandleInfo->handleType);
}
#endif
void CoreChecks::PostCallRecordGetFenceFdKHR(VkDevice device, const VkFenceGetFdInfoKHR *pGetFdInfo, int *pFd, VkResult result) {
+ layer_data *device_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
if (VK_SUCCESS != result) return;
- RecordGetExternalFenceState(pGetFdInfo->fence, pGetFdInfo->handleType);
+ RecordGetExternalFenceState(device_data, pGetFdInfo->fence, pGetFdInfo->handleType);
}
-void ValidationStateTracker::PostCallRecordCreateEvent(VkDevice device, const VkEventCreateInfo *pCreateInfo,
- const VkAllocationCallbacks *pAllocator, VkEvent *pEvent, VkResult result) {
+void CoreChecks::PostCallRecordCreateEvent(VkDevice device, const VkEventCreateInfo *pCreateInfo,
+ const VkAllocationCallbacks *pAllocator, VkEvent *pEvent, VkResult result) {
+ layer_data *device_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
if (VK_SUCCESS != result) return;
- eventMap[*pEvent].write_in_use = 0;
- eventMap[*pEvent].stageMask = VkPipelineStageFlags(0);
+ device_data->eventMap[*pEvent].needsSignaled = false;
+ device_data->eventMap[*pEvent].write_in_use = 0;
+ device_data->eventMap[*pEvent].stageMask = VkPipelineStageFlags(0);
}
-bool CoreChecks::ValidateCreateSwapchain(const char *func_name, VkSwapchainCreateInfoKHR const *pCreateInfo,
- const SURFACE_STATE *surface_state, const SWAPCHAIN_NODE *old_swapchain_state) const {
+bool CoreChecks::ValidateCreateSwapchain(layer_data *device_data, const char *func_name,
+ VkSwapchainCreateInfoKHR const *pCreateInfo, SURFACE_STATE *surface_state,
+ SWAPCHAIN_NODE *old_swapchain_state) {
+ VkDevice device = device_data->device;
+
// All physical devices and queue families are required to be able to present to any native window on Android; require the
// application to have established support on any other platform.
- if (!instance_extensions.vk_khr_android_surface) {
- auto support_predicate = [this](decltype(surface_state->gpu_queue_support)::value_type qs) -> bool {
+ if (!device_data->instance_extensions.vk_khr_android_surface) {
+ auto support_predicate = [device_data](decltype(surface_state->gpu_queue_support)::value_type qs) -> bool {
// TODO: should restrict search only to queue families of VkDeviceQueueCreateInfos, not whole phys. device
- return (qs.first.gpu == physical_device) && qs.second;
+ return (qs.first.gpu == device_data->physical_device) && qs.second;
};
const auto &support = surface_state->gpu_queue_support;
bool is_supported = std::any_of(support.begin(), support.end(), support_predicate);
if (!is_supported) {
- if (log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, HandleToUint64(device),
- "VUID-VkSwapchainCreateInfoKHR-surface-01270",
+ if (log_msg(device_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT,
+ HandleToUint64(device), "VUID-VkSwapchainCreateInfoKHR-surface-01270",
"%s: pCreateInfo->surface is not known at this time to be supported for presentation by this device. The "
"vkGetPhysicalDeviceSurfaceSupportKHR() must be called beforehand, and it must return VK_TRUE support with "
"this surface for at least one queue family of this device.",
@@ -12568,13 +11550,13 @@ bool CoreChecks::ValidateCreateSwapchain(const char *func_name, VkSwapchainCreat
if (old_swapchain_state) {
if (old_swapchain_state->createInfo.surface != pCreateInfo->surface) {
- if (log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_SWAPCHAIN_KHR_EXT,
+ if (log_msg(device_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_SWAPCHAIN_KHR_EXT,
HandleToUint64(pCreateInfo->oldSwapchain), "VUID-VkSwapchainCreateInfoKHR-oldSwapchain-01933",
"%s: pCreateInfo->oldSwapchain's surface is not pCreateInfo->surface", func_name))
return true;
}
if (old_swapchain_state->retired) {
- if (log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_SWAPCHAIN_KHR_EXT,
+ if (log_msg(device_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_SWAPCHAIN_KHR_EXT,
HandleToUint64(pCreateInfo->oldSwapchain), "VUID-VkSwapchainCreateInfoKHR-oldSwapchain-01933",
"%s: pCreateInfo->oldSwapchain is retired", func_name))
return true;
@@ -12582,179 +11564,136 @@ bool CoreChecks::ValidateCreateSwapchain(const char *func_name, VkSwapchainCreat
}
if ((pCreateInfo->imageExtent.width == 0) || (pCreateInfo->imageExtent.height == 0)) {
- if (log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, HandleToUint64(device),
- "VUID-VkSwapchainCreateInfoKHR-imageExtent-01689", "%s: pCreateInfo->imageExtent = (%d, %d) which is illegal.",
- func_name, pCreateInfo->imageExtent.width, pCreateInfo->imageExtent.height))
+ if (log_msg(device_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT,
+ HandleToUint64(device), "VUID-VkSwapchainCreateInfoKHR-imageExtent-01689",
+ "%s: pCreateInfo->imageExtent = (%d, %d) which is illegal.", func_name, pCreateInfo->imageExtent.width,
+ pCreateInfo->imageExtent.height))
return true;
}
auto physical_device_state = GetPhysicalDeviceState();
- bool skip = false;
- VkSurfaceTransformFlagBitsKHR currentTransform = physical_device_state->surfaceCapabilities.currentTransform;
- if ((pCreateInfo->preTransform & currentTransform) != pCreateInfo->preTransform) {
- skip |= log_msg(report_data, VK_DEBUG_REPORT_PERFORMANCE_WARNING_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_PHYSICAL_DEVICE_EXT,
- HandleToUint64(physical_device), kVUID_Core_Swapchain_PreTransform,
- "%s: pCreateInfo->preTransform (%s) doesn't match the currentTransform (%s) returned by "
- "vkGetPhysicalDeviceSurfaceCapabilitiesKHR, the presentation engine will transform the image "
- "content as part of the presentation operation.",
- func_name, string_VkSurfaceTransformFlagBitsKHR(pCreateInfo->preTransform),
- string_VkSurfaceTransformFlagBitsKHR(currentTransform));
- }
-
if (physical_device_state->vkGetPhysicalDeviceSurfaceCapabilitiesKHRState == UNCALLED) {
- if (log_msg(report_data, VK_DEBUG_REPORT_WARNING_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_PHYSICAL_DEVICE_EXT,
- HandleToUint64(physical_device), kVUID_Core_DrawState_SwapchainCreateBeforeQuery,
+ if (log_msg(device_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_PHYSICAL_DEVICE_EXT,
+ HandleToUint64(device_data->physical_device), kVUID_Core_DrawState_SwapchainCreateBeforeQuery,
"%s: surface capabilities not retrieved for this physical device", func_name))
return true;
- }
-
- VkSurfaceCapabilitiesKHR capabilities{};
- DispatchGetPhysicalDeviceSurfaceCapabilitiesKHR(physical_device_state->phys_device, pCreateInfo->surface, &capabilities);
- // Validate pCreateInfo->minImageCount against VkSurfaceCapabilitiesKHR::{min|max}ImageCount:
- if (pCreateInfo->minImageCount < capabilities.minImageCount) {
- if (log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, HandleToUint64(device),
- "VUID-VkSwapchainCreateInfoKHR-minImageCount-01271",
- "%s called with minImageCount = %d, which is outside the bounds returned by "
- "vkGetPhysicalDeviceSurfaceCapabilitiesKHR() (i.e. minImageCount = %d, maxImageCount = %d).",
- func_name, pCreateInfo->minImageCount, capabilities.minImageCount, capabilities.maxImageCount))
- return true;
- }
-
- if ((capabilities.maxImageCount > 0) && (pCreateInfo->minImageCount > capabilities.maxImageCount)) {
- if (log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, HandleToUint64(device),
- "VUID-VkSwapchainCreateInfoKHR-minImageCount-01272",
- "%s called with minImageCount = %d, which is outside the bounds returned by "
- "vkGetPhysicalDeviceSurfaceCapabilitiesKHR() (i.e. minImageCount = %d, maxImageCount = %d).",
- func_name, pCreateInfo->minImageCount, capabilities.minImageCount, capabilities.maxImageCount))
- return true;
- }
-
- // Validate pCreateInfo->imageExtent against VkSurfaceCapabilitiesKHR::{current|min|max}ImageExtent:
- if ((pCreateInfo->imageExtent.width < capabilities.minImageExtent.width) ||
- (pCreateInfo->imageExtent.width > capabilities.maxImageExtent.width) ||
- (pCreateInfo->imageExtent.height < capabilities.minImageExtent.height) ||
- (pCreateInfo->imageExtent.height > capabilities.maxImageExtent.height)) {
- if (log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, HandleToUint64(device),
- "VUID-VkSwapchainCreateInfoKHR-imageExtent-01274",
- "%s called with imageExtent = (%d,%d), which is outside the bounds returned by "
- "vkGetPhysicalDeviceSurfaceCapabilitiesKHR(): currentExtent = (%d,%d), minImageExtent = (%d,%d), "
- "maxImageExtent = (%d,%d).",
- func_name, pCreateInfo->imageExtent.width, pCreateInfo->imageExtent.height, capabilities.currentExtent.width,
- capabilities.currentExtent.height, capabilities.minImageExtent.width, capabilities.minImageExtent.height,
- capabilities.maxImageExtent.width, capabilities.maxImageExtent.height))
- return true;
- }
- // pCreateInfo->preTransform should have exactly one bit set, and that bit must also be set in
- // VkSurfaceCapabilitiesKHR::supportedTransforms.
- if (!pCreateInfo->preTransform || (pCreateInfo->preTransform & (pCreateInfo->preTransform - 1)) ||
- !(pCreateInfo->preTransform & capabilities.supportedTransforms)) {
- // This is an error situation; one for which we'd like to give the developer a helpful, multi-line error message. Build
- // it up a little at a time, and then log it:
- std::string errorString = "";
- char str[1024];
- // Here's the first part of the message:
- sprintf(str, "%s called with a non-supported pCreateInfo->preTransform (i.e. %s). Supported values are:\n", func_name,
- string_VkSurfaceTransformFlagBitsKHR(pCreateInfo->preTransform));
- errorString += str;
- for (int i = 0; i < 32; i++) {
- // Build up the rest of the message:
- if ((1 << i) & capabilities.supportedTransforms) {
- const char *newStr = string_VkSurfaceTransformFlagBitsKHR((VkSurfaceTransformFlagBitsKHR)(1 << i));
- sprintf(str, " %s\n", newStr);
- errorString += str;
- }
- }
- // Log the message that we've built up:
- if (log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, HandleToUint64(device),
- "VUID-VkSwapchainCreateInfoKHR-preTransform-01279", "%s.", errorString.c_str()))
- return true;
- }
+ } else { // have valid capabilities
+ auto &capabilities = physical_device_state->surfaceCapabilities;
+ // Validate pCreateInfo->minImageCount against VkSurfaceCapabilitiesKHR::{min|max}ImageCount:
+ if (pCreateInfo->minImageCount < capabilities.minImageCount) {
+ if (log_msg(device_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT,
+ HandleToUint64(device), "VUID-VkSwapchainCreateInfoKHR-minImageCount-01271",
+ "%s called with minImageCount = %d, which is outside the bounds returned by "
+ "vkGetPhysicalDeviceSurfaceCapabilitiesKHR() (i.e. minImageCount = %d, maxImageCount = %d).",
+ func_name, pCreateInfo->minImageCount, capabilities.minImageCount, capabilities.maxImageCount))
+ return true;
+ }
- // pCreateInfo->compositeAlpha should have exactly one bit set, and that bit must also be set in
- // VkSurfaceCapabilitiesKHR::supportedCompositeAlpha
- if (!pCreateInfo->compositeAlpha || (pCreateInfo->compositeAlpha & (pCreateInfo->compositeAlpha - 1)) ||
- !((pCreateInfo->compositeAlpha) & capabilities.supportedCompositeAlpha)) {
- // This is an error situation; one for which we'd like to give the developer a helpful, multi-line error message. Build
- // it up a little at a time, and then log it:
- std::string errorString = "";
- char str[1024];
- // Here's the first part of the message:
- sprintf(str, "%s called with a non-supported pCreateInfo->compositeAlpha (i.e. %s). Supported values are:\n", func_name,
- string_VkCompositeAlphaFlagBitsKHR(pCreateInfo->compositeAlpha));
- errorString += str;
- for (int i = 0; i < 32; i++) {
- // Build up the rest of the message:
- if ((1 << i) & capabilities.supportedCompositeAlpha) {
- const char *newStr = string_VkCompositeAlphaFlagBitsKHR((VkCompositeAlphaFlagBitsKHR)(1 << i));
- sprintf(str, " %s\n", newStr);
- errorString += str;
- }
- }
- // Log the message that we've built up:
- if (log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, HandleToUint64(device),
- "VUID-VkSwapchainCreateInfoKHR-compositeAlpha-01280", "%s.", errorString.c_str()))
- return true;
- }
- // Validate pCreateInfo->imageArrayLayers against VkSurfaceCapabilitiesKHR::maxImageArrayLayers:
- if (pCreateInfo->imageArrayLayers > capabilities.maxImageArrayLayers) {
- if (log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, HandleToUint64(device),
- "VUID-VkSwapchainCreateInfoKHR-imageArrayLayers-01275",
- "%s called with a non-supported imageArrayLayers (i.e. %d). Maximum value is %d.", func_name,
- pCreateInfo->imageArrayLayers, capabilities.maxImageArrayLayers))
- return true;
- }
- // Validate pCreateInfo->imageUsage against VkSurfaceCapabilitiesKHR::supportedUsageFlags:
- if (pCreateInfo->imageUsage != (pCreateInfo->imageUsage & capabilities.supportedUsageFlags)) {
- if (log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, HandleToUint64(device),
- "VUID-VkSwapchainCreateInfoKHR-imageUsage-01276",
- "%s called with a non-supported pCreateInfo->imageUsage (i.e. 0x%08x). Supported flag bits are 0x%08x.",
- func_name, pCreateInfo->imageUsage, capabilities.supportedUsageFlags))
- return true;
- }
+ if ((capabilities.maxImageCount > 0) && (pCreateInfo->minImageCount > capabilities.maxImageCount)) {
+ if (log_msg(device_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT,
+ HandleToUint64(device), "VUID-VkSwapchainCreateInfoKHR-minImageCount-01272",
+ "%s called with minImageCount = %d, which is outside the bounds returned by "
+ "vkGetPhysicalDeviceSurfaceCapabilitiesKHR() (i.e. minImageCount = %d, maxImageCount = %d).",
+ func_name, pCreateInfo->minImageCount, capabilities.minImageCount, capabilities.maxImageCount))
+ return true;
+ }
- if (device_extensions.vk_khr_surface_protected_capabilities && (pCreateInfo->flags & VK_SWAPCHAIN_CREATE_PROTECTED_BIT_KHR)) {
- VkPhysicalDeviceSurfaceInfo2KHR surfaceInfo = {VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SURFACE_INFO_2_KHR};
- surfaceInfo.surface = pCreateInfo->surface;
- VkSurfaceProtectedCapabilitiesKHR surfaceProtectedCapabilities = {VK_STRUCTURE_TYPE_SURFACE_PROTECTED_CAPABILITIES_KHR};
- VkSurfaceCapabilities2KHR surfaceCapabilities = {VK_STRUCTURE_TYPE_SURFACE_CAPABILITIES_2_KHR};
- surfaceCapabilities.pNext = &surfaceProtectedCapabilities;
- DispatchGetPhysicalDeviceSurfaceCapabilities2KHR(physical_device_state->phys_device, &surfaceInfo, &surfaceCapabilities);
+ // Validate pCreateInfo->imageExtent against VkSurfaceCapabilitiesKHR::{current|min|max}ImageExtent:
+ if ((pCreateInfo->imageExtent.width < capabilities.minImageExtent.width) ||
+ (pCreateInfo->imageExtent.width > capabilities.maxImageExtent.width) ||
+ (pCreateInfo->imageExtent.height < capabilities.minImageExtent.height) ||
+ (pCreateInfo->imageExtent.height > capabilities.maxImageExtent.height)) {
+ if (log_msg(device_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT,
+ HandleToUint64(device), "VUID-VkSwapchainCreateInfoKHR-imageExtent-01274",
+ "%s called with imageExtent = (%d,%d), which is outside the bounds returned by "
+ "vkGetPhysicalDeviceSurfaceCapabilitiesKHR(): currentExtent = (%d,%d), minImageExtent = (%d,%d), "
+ "maxImageExtent = (%d,%d).",
+ func_name, pCreateInfo->imageExtent.width, pCreateInfo->imageExtent.height,
+ capabilities.currentExtent.width, capabilities.currentExtent.height, capabilities.minImageExtent.width,
+ capabilities.minImageExtent.height, capabilities.maxImageExtent.width, capabilities.maxImageExtent.height))
+ return true;
+ }
+ // pCreateInfo->preTransform should have exactly one bit set, and that bit must also be set in
+ // VkSurfaceCapabilitiesKHR::supportedTransforms.
+ if (!pCreateInfo->preTransform || (pCreateInfo->preTransform & (pCreateInfo->preTransform - 1)) ||
+ !(pCreateInfo->preTransform & capabilities.supportedTransforms)) {
+ // This is an error situation; one for which we'd like to give the developer a helpful, multi-line error message. Build
+ // it up a little at a time, and then log it:
+ std::string errorString = "";
+ char str[1024];
+ // Here's the first part of the message:
+ sprintf(str, "%s called with a non-supported pCreateInfo->preTransform (i.e. %s). Supported values are:\n", func_name,
+ string_VkSurfaceTransformFlagBitsKHR(pCreateInfo->preTransform));
+ errorString += str;
+ for (int i = 0; i < 32; i++) {
+ // Build up the rest of the message:
+ if ((1 << i) & capabilities.supportedTransforms) {
+ const char *newStr = string_VkSurfaceTransformFlagBitsKHR((VkSurfaceTransformFlagBitsKHR)(1 << i));
+ sprintf(str, " %s\n", newStr);
+ errorString += str;
+ }
+ }
+ // Log the message that we've built up:
+ if (log_msg(device_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT,
+ HandleToUint64(device), "VUID-VkSwapchainCreateInfoKHR-preTransform-01279", "%s.", errorString.c_str()))
+ return true;
+ }
- if (!surfaceProtectedCapabilities.supportsProtected) {
- if (log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, HandleToUint64(device),
- "VUID-VkSwapchainCreateInfoKHR-flags-03187",
- "%s: pCreateInfo->flags contains VK_SWAPCHAIN_CREATE_PROTECTED_BIT_KHR but the surface "
- "capabilities does not have VkSurfaceProtectedCapabilitiesKHR.supportsProtected set to VK_TRUE.",
- func_name))
+ // pCreateInfo->compositeAlpha should have exactly one bit set, and that bit must also be set in
+ // VkSurfaceCapabilitiesKHR::supportedCompositeAlpha
+ if (!pCreateInfo->compositeAlpha || (pCreateInfo->compositeAlpha & (pCreateInfo->compositeAlpha - 1)) ||
+ !((pCreateInfo->compositeAlpha) & capabilities.supportedCompositeAlpha)) {
+ // This is an error situation; one for which we'd like to give the developer a helpful, multi-line error message. Build
+ // it up a little at a time, and then log it:
+ std::string errorString = "";
+ char str[1024];
+ // Here's the first part of the message:
+ sprintf(str, "%s called with a non-supported pCreateInfo->compositeAlpha (i.e. %s). Supported values are:\n",
+ func_name, string_VkCompositeAlphaFlagBitsKHR(pCreateInfo->compositeAlpha));
+ errorString += str;
+ for (int i = 0; i < 32; i++) {
+ // Build up the rest of the message:
+ if ((1 << i) & capabilities.supportedCompositeAlpha) {
+ const char *newStr = string_VkCompositeAlphaFlagBitsKHR((VkCompositeAlphaFlagBitsKHR)(1 << i));
+ sprintf(str, " %s\n", newStr);
+ errorString += str;
+ }
+ }
+ // Log the message that we've built up:
+ if (log_msg(device_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT,
+ HandleToUint64(device), "VUID-VkSwapchainCreateInfoKHR-compositeAlpha-01280", "%s.", errorString.c_str()))
+ return true;
+ }
+ // Validate pCreateInfo->imageArrayLayers against VkSurfaceCapabilitiesKHR::maxImageArrayLayers:
+ if (pCreateInfo->imageArrayLayers > capabilities.maxImageArrayLayers) {
+ if (log_msg(device_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT,
+ HandleToUint64(device), "VUID-VkSwapchainCreateInfoKHR-imageArrayLayers-01275",
+ "%s called with a non-supported imageArrayLayers (i.e. %d). Maximum value is %d.", func_name,
+ pCreateInfo->imageArrayLayers, capabilities.maxImageArrayLayers))
+ return true;
+ }
+ // Validate pCreateInfo->imageUsage against VkSurfaceCapabilitiesKHR::supportedUsageFlags:
+ if (pCreateInfo->imageUsage != (pCreateInfo->imageUsage & capabilities.supportedUsageFlags)) {
+ if (log_msg(device_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT,
+ HandleToUint64(device), "VUID-VkSwapchainCreateInfoKHR-imageUsage-01276",
+ "%s called with a non-supported pCreateInfo->imageUsage (i.e. 0x%08x). Supported flag bits are 0x%08x.",
+ func_name, pCreateInfo->imageUsage, capabilities.supportedUsageFlags))
return true;
}
}
- std::vector<VkSurfaceFormatKHR> surface_formats;
- const auto *surface_formats_ref = &surface_formats;
-
// Validate pCreateInfo values with the results of vkGetPhysicalDeviceSurfaceFormatsKHR():
if (physical_device_state->vkGetPhysicalDeviceSurfaceFormatsKHRState != QUERY_DETAILS) {
- if (log_msg(report_data, VK_DEBUG_REPORT_WARNING_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, HandleToUint64(device),
- kVUID_Core_DrawState_SwapchainCreateBeforeQuery,
- "%s called before getting format(s) from vkGetPhysicalDeviceSurfaceFormatsKHR().", func_name)) {
+ if (log_msg(device_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT,
+ HandleToUint64(device), kVUID_Core_DrawState_SwapchainCreateBeforeQuery,
+ "%s called before calling vkGetPhysicalDeviceSurfaceFormatsKHR().", func_name))
return true;
- }
- uint32_t surface_format_count = 0;
- DispatchGetPhysicalDeviceSurfaceFormatsKHR(physical_device, pCreateInfo->surface, &surface_format_count, nullptr);
- surface_formats.resize(surface_format_count);
- DispatchGetPhysicalDeviceSurfaceFormatsKHR(physical_device, pCreateInfo->surface, &surface_format_count,
- &surface_formats[0]);
} else {
- surface_formats_ref = &physical_device_state->surface_formats;
- }
-
- {
// Validate pCreateInfo->imageFormat against VkSurfaceFormatKHR::format:
bool foundFormat = false;
bool foundColorSpace = false;
bool foundMatch = false;
- for (auto const &format : *surface_formats_ref) {
+ for (auto const &format : physical_device_state->surface_formats) {
if (pCreateInfo->imageFormat == format.format) {
// Validate pCreateInfo->imageColorSpace against VkSurfaceFormatKHR::colorSpace:
foundFormat = true;
@@ -12770,14 +11709,14 @@ bool CoreChecks::ValidateCreateSwapchain(const char *func_name, VkSwapchainCreat
}
if (!foundMatch) {
if (!foundFormat) {
- if (log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT,
+ if (log_msg(device_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT,
HandleToUint64(device), "VUID-VkSwapchainCreateInfoKHR-imageFormat-01273",
"%s called with a non-supported pCreateInfo->imageFormat (i.e. %d).", func_name,
pCreateInfo->imageFormat))
return true;
}
if (!foundColorSpace) {
- if (log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT,
+ if (log_msg(device_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT,
HandleToUint64(device), "VUID-VkSwapchainCreateInfoKHR-imageFormat-01273",
"%s called with a non-supported pCreateInfo->imageColorSpace (i.e. %d).", func_name,
pCreateInfo->imageColorSpace))
@@ -12790,9 +11729,9 @@ bool CoreChecks::ValidateCreateSwapchain(const char *func_name, VkSwapchainCreat
if (physical_device_state->vkGetPhysicalDeviceSurfacePresentModesKHRState != QUERY_DETAILS) {
// FIFO is required to always be supported
if (pCreateInfo->presentMode != VK_PRESENT_MODE_FIFO_KHR) {
- if (log_msg(report_data, VK_DEBUG_REPORT_WARNING_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT,
+ if (log_msg(device_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT,
HandleToUint64(device), kVUID_Core_DrawState_SwapchainCreateBeforeQuery,
- "%s called before getting present mode(s) from vkGetPhysicalDeviceSurfacePresentModesKHR().", func_name))
+ "%s called before calling vkGetPhysicalDeviceSurfacePresentModesKHR().", func_name))
return true;
}
} else {
@@ -12800,25 +11739,26 @@ bool CoreChecks::ValidateCreateSwapchain(const char *func_name, VkSwapchainCreat
bool foundMatch = std::find(physical_device_state->present_modes.begin(), physical_device_state->present_modes.end(),
pCreateInfo->presentMode) != physical_device_state->present_modes.end();
if (!foundMatch) {
- if (log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, HandleToUint64(device),
- "VUID-VkSwapchainCreateInfoKHR-presentMode-01281", "%s called with a non-supported presentMode (i.e. %s).",
- func_name, string_VkPresentModeKHR(pCreateInfo->presentMode)))
+ if (log_msg(device_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT,
+ HandleToUint64(device), "VUID-VkSwapchainCreateInfoKHR-presentMode-01281",
+ "%s called with a non-supported presentMode (i.e. %s).", func_name,
+ string_VkPresentModeKHR(pCreateInfo->presentMode)))
return true;
}
}
// Validate state for shared presentable case
if (VK_PRESENT_MODE_SHARED_DEMAND_REFRESH_KHR == pCreateInfo->presentMode ||
VK_PRESENT_MODE_SHARED_CONTINUOUS_REFRESH_KHR == pCreateInfo->presentMode) {
- if (!device_extensions.vk_khr_shared_presentable_image) {
- if (log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, HandleToUint64(device),
- kVUID_Core_DrawState_ExtensionNotEnabled,
+ if (!device_data->device_extensions.vk_khr_shared_presentable_image) {
+ if (log_msg(device_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT,
+ HandleToUint64(device), kVUID_Core_DrawState_ExtensionNotEnabled,
"%s called with presentMode %s which requires the VK_KHR_shared_presentable_image extension, which has not "
"been enabled.",
func_name, string_VkPresentModeKHR(pCreateInfo->presentMode)))
return true;
} else if (pCreateInfo->minImageCount != 1) {
- if (log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, HandleToUint64(device),
- "VUID-VkSwapchainCreateInfoKHR-minImageCount-01383",
+ if (log_msg(device_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT,
+ HandleToUint64(device), "VUID-VkSwapchainCreateInfoKHR-minImageCount-01383",
"%s called with presentMode %s, but minImageCount value is %d. For shared presentable image, minImageCount "
"must be 1.",
func_name, string_VkPresentModeKHR(pCreateInfo->presentMode), pCreateInfo->minImageCount))
@@ -12827,9 +11767,9 @@ bool CoreChecks::ValidateCreateSwapchain(const char *func_name, VkSwapchainCreat
}
if (pCreateInfo->flags & VK_SWAPCHAIN_CREATE_MUTABLE_FORMAT_BIT_KHR) {
- if (!device_extensions.vk_khr_swapchain_mutable_format) {
- if (log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, HandleToUint64(device),
- kVUID_Core_DrawState_ExtensionNotEnabled,
+ if (!device_data->device_extensions.vk_khr_swapchain_mutable_format) {
+ if (log_msg(device_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT,
+ HandleToUint64(device), kVUID_Core_DrawState_ExtensionNotEnabled,
"%s: pCreateInfo->flags contains VK_SWAPCHAIN_CREATE_MUTABLE_FORMAT_BIT_KHR which requires the "
"VK_KHR_swapchain_mutable_format extension, which has not been enabled.",
func_name))
@@ -12837,14 +11777,14 @@ bool CoreChecks::ValidateCreateSwapchain(const char *func_name, VkSwapchainCreat
} else {
const auto *image_format_list = lvl_find_in_chain<VkImageFormatListCreateInfoKHR>(pCreateInfo->pNext);
if (image_format_list == nullptr) {
- if (log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT,
+ if (log_msg(device_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT,
HandleToUint64(device), "VUID-VkSwapchainCreateInfoKHR-flags-03168",
"%s: pCreateInfo->flags contains VK_SWAPCHAIN_CREATE_MUTABLE_FORMAT_BIT_KHR but the pNext chain of "
"pCreateInfo does not contain an instance of VkImageFormatListCreateInfoKHR.",
func_name))
return true;
} else if (image_format_list->viewFormatCount == 0) {
- if (log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT,
+ if (log_msg(device_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT,
HandleToUint64(device), "VUID-VkSwapchainCreateInfoKHR-flags-03168",
"%s: pCreateInfo->flags contains VK_SWAPCHAIN_CREATE_MUTABLE_FORMAT_BIT_KHR but the viewFormatCount "
"member of VkImageFormatListCreateInfoKHR in the pNext chain is zero.",
@@ -12859,7 +11799,7 @@ bool CoreChecks::ValidateCreateSwapchain(const char *func_name, VkSwapchainCreat
}
}
if (!found_base_format) {
- if (log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT,
+ if (log_msg(device_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT,
HandleToUint64(device), "VUID-VkSwapchainCreateInfoKHR-flags-03168",
"%s: pCreateInfo->flags contains VK_SWAPCHAIN_CREATE_MUTABLE_FORMAT_BIT_KHR but none of the "
"elements of the pViewFormats member of VkImageFormatListCreateInfoKHR match "
@@ -12872,26 +11812,27 @@ bool CoreChecks::ValidateCreateSwapchain(const char *func_name, VkSwapchainCreat
}
if ((pCreateInfo->imageSharingMode == VK_SHARING_MODE_CONCURRENT) && pCreateInfo->pQueueFamilyIndices) {
- bool skip1 =
- ValidateQueueFamilies(pCreateInfo->queueFamilyIndexCount, pCreateInfo->pQueueFamilyIndices, "vkCreateBuffer",
- "pCreateInfo->pQueueFamilyIndices", "VUID-VkSwapchainCreateInfoKHR-imageSharingMode-01428",
- "VUID-VkSwapchainCreateInfoKHR-imageSharingMode-01428", false);
- if (skip1) return true;
+ bool skip = ValidateQueueFamilies(device_data, pCreateInfo->queueFamilyIndexCount, pCreateInfo->pQueueFamilyIndices,
+ "vkCreateBuffer", "pCreateInfo->pQueueFamilyIndices",
+ "VUID-VkSwapchainCreateInfoKHR-imageSharingMode-01428",
+ "VUID-VkSwapchainCreateInfoKHR-imageSharingMode-01428", false);
+ if (skip) return true;
}
- return skip;
+ return false;
}
bool CoreChecks::PreCallValidateCreateSwapchainKHR(VkDevice device, const VkSwapchainCreateInfoKHR *pCreateInfo,
const VkAllocationCallbacks *pAllocator, VkSwapchainKHR *pSwapchain) {
- const auto surface_state = GetSurfaceState(pCreateInfo->surface);
- const auto old_swapchain_state = GetSwapchainState(pCreateInfo->oldSwapchain);
- return ValidateCreateSwapchain("vkCreateSwapchainKHR()", pCreateInfo, surface_state, old_swapchain_state);
+ layer_data *device_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
+ auto surface_state = GetSurfaceState(pCreateInfo->surface);
+ auto old_swapchain_state = GetSwapchainNode(pCreateInfo->oldSwapchain);
+ return ValidateCreateSwapchain(device_data, "vkCreateSwapchainKHR()", pCreateInfo, surface_state, old_swapchain_state);
}
-void ValidationStateTracker::RecordCreateSwapchainState(VkResult result, const VkSwapchainCreateInfoKHR *pCreateInfo,
- VkSwapchainKHR *pSwapchain, SURFACE_STATE *surface_state,
- SWAPCHAIN_NODE *old_swapchain_state) {
+static void RecordCreateSwapchainState(layer_data *device_data, VkResult result, const VkSwapchainCreateInfoKHR *pCreateInfo,
+ VkSwapchainKHR *pSwapchain, SURFACE_STATE *surface_state,
+ SWAPCHAIN_NODE *old_swapchain_state) {
if (VK_SUCCESS == result) {
auto swapchain_state = unique_ptr<SWAPCHAIN_NODE>(new SWAPCHAIN_NODE(pCreateInfo, *pSwapchain));
if (VK_PRESENT_MODE_SHARED_DEMAND_REFRESH_KHR == pCreateInfo->presentMode ||
@@ -12899,7 +11840,7 @@ void ValidationStateTracker::RecordCreateSwapchainState(VkResult result, const V
swapchain_state->shared_presentable = true;
}
surface_state->swapchain = swapchain_state.get();
- swapchainMap[*pSwapchain] = std::move(swapchain_state);
+ device_data->swapchainMap[*pSwapchain] = std::move(swapchain_state);
} else {
surface_state->swapchain = nullptr;
}
@@ -12910,22 +11851,37 @@ void ValidationStateTracker::RecordCreateSwapchainState(VkResult result, const V
return;
}
-void ValidationStateTracker::PostCallRecordCreateSwapchainKHR(VkDevice device, const VkSwapchainCreateInfoKHR *pCreateInfo,
- const VkAllocationCallbacks *pAllocator, VkSwapchainKHR *pSwapchain,
- VkResult result) {
+void CoreChecks::PostCallRecordCreateSwapchainKHR(VkDevice device, const VkSwapchainCreateInfoKHR *pCreateInfo,
+ const VkAllocationCallbacks *pAllocator, VkSwapchainKHR *pSwapchain,
+ VkResult result) {
+ layer_data *device_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
auto surface_state = GetSurfaceState(pCreateInfo->surface);
- auto old_swapchain_state = GetSwapchainState(pCreateInfo->oldSwapchain);
- RecordCreateSwapchainState(result, pCreateInfo, pSwapchain, surface_state, old_swapchain_state);
+ auto old_swapchain_state = GetSwapchainNode(pCreateInfo->oldSwapchain);
+ RecordCreateSwapchainState(device_data, result, pCreateInfo, pSwapchain, surface_state, old_swapchain_state);
}
-void ValidationStateTracker::PreCallRecordDestroySwapchainKHR(VkDevice device, VkSwapchainKHR swapchain,
- const VkAllocationCallbacks *pAllocator) {
+void CoreChecks::PreCallRecordDestroySwapchainKHR(VkDevice device, VkSwapchainKHR swapchain,
+ const VkAllocationCallbacks *pAllocator) {
+ layer_data *device_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
if (!swapchain) return;
- auto swapchain_data = GetSwapchainState(swapchain);
+ auto swapchain_data = GetSwapchainNode(swapchain);
if (swapchain_data) {
- for (const auto &swapchain_image : swapchain_data->images) {
- ClearMemoryObjectBindings(VulkanTypedHandle(swapchain_image, kVulkanObjectTypeImage));
- imageMap.erase(swapchain_image);
+ if (swapchain_data->images.size() > 0) {
+ for (auto swapchain_image : swapchain_data->images) {
+ auto image_sub = device_data->imageSubresourceMap.find(swapchain_image);
+ if (image_sub != device_data->imageSubresourceMap.end()) {
+ for (auto imgsubpair : image_sub->second) {
+ auto image_item = device_data->imageLayoutMap.find(imgsubpair);
+ if (image_item != device_data->imageLayoutMap.end()) {
+ device_data->imageLayoutMap.erase(image_item);
+ }
+ }
+ device_data->imageSubresourceMap.erase(image_sub);
+ }
+ ClearMemoryObjectBindings(HandleToUint64(swapchain_image), kVulkanObjectTypeSwapchainKHR);
+ EraseQFOImageRelaseBarriers(device_data, swapchain_image);
+ device_data->imageMap.erase(swapchain_image);
+ }
}
auto surface_state = GetSurfaceState(swapchain_data->createInfo.surface);
@@ -12933,47 +11889,26 @@ void ValidationStateTracker::PreCallRecordDestroySwapchainKHR(VkDevice device, V
if (surface_state->swapchain == swapchain_data) surface_state->swapchain = nullptr;
}
- swapchainMap.erase(swapchain);
+ device_data->swapchainMap.erase(swapchain);
}
}
-void CoreChecks::PreCallRecordDestroySwapchainKHR(VkDevice device, VkSwapchainKHR swapchain,
- const VkAllocationCallbacks *pAllocator) {
- if (swapchain) {
- auto swapchain_data = GetSwapchainState(swapchain);
- if (swapchain_data) {
- for (const auto &swapchain_image : swapchain_data->images) {
- auto image_sub = imageSubresourceMap.find(swapchain_image);
- if (image_sub != imageSubresourceMap.end()) {
- for (auto imgsubpair : image_sub->second) {
- auto image_item = imageLayoutMap.find(imgsubpair);
- if (image_item != imageLayoutMap.end()) {
- imageLayoutMap.erase(image_item);
- }
- }
- imageSubresourceMap.erase(image_sub);
- }
- EraseQFOImageRelaseBarriers(swapchain_image);
- }
- }
- }
- StateTracker::PreCallRecordDestroySwapchainKHR(device, swapchain, pAllocator);
-}
bool CoreChecks::PreCallValidateGetSwapchainImagesKHR(VkDevice device, VkSwapchainKHR swapchain, uint32_t *pSwapchainImageCount,
VkImage *pSwapchainImages) {
- auto swapchain_state = GetSwapchainState(swapchain);
+ layer_data *device_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
+ auto swapchain_state = GetSwapchainNode(swapchain);
bool skip = false;
if (swapchain_state && pSwapchainImages) {
// Compare the preliminary value of *pSwapchainImageCount with the value this time:
if (swapchain_state->vkGetSwapchainImagesKHRState == UNCALLED) {
- skip |= log_msg(report_data, VK_DEBUG_REPORT_WARNING_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT,
+ skip |= log_msg(device_data->report_data, VK_DEBUG_REPORT_WARNING_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT,
HandleToUint64(device), kVUID_Core_Swapchain_PriorCount,
"vkGetSwapchainImagesKHR() called with non-NULL pSwapchainImageCount; but no prior positive value has "
"been seen for pSwapchainImages.");
} else if (*pSwapchainImageCount > swapchain_state->get_swapchain_image_count) {
skip |=
- log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, HandleToUint64(device),
- kVUID_Core_Swapchain_InvalidCount,
+ log_msg(device_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT,
+ HandleToUint64(device), kVUID_Core_Swapchain_InvalidCount,
"vkGetSwapchainImagesKHR() called with non-NULL pSwapchainImageCount, and with pSwapchainImages set to a "
"value (%d) that is greater than the value (%d) that was returned when pSwapchainImageCount was NULL.",
*pSwapchainImageCount, swapchain_state->get_swapchain_image_count);
@@ -12984,8 +11919,10 @@ bool CoreChecks::PreCallValidateGetSwapchainImagesKHR(VkDevice device, VkSwapcha
void CoreChecks::PostCallRecordGetSwapchainImagesKHR(VkDevice device, VkSwapchainKHR swapchain, uint32_t *pSwapchainImageCount,
VkImage *pSwapchainImages, VkResult result) {
+ layer_data *device_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
+
if ((result != VK_SUCCESS) && (result != VK_INCOMPLETE)) return;
- auto swapchain_state = GetSwapchainState(swapchain);
+ auto swapchain_state = GetSwapchainNode(swapchain);
if (*pSwapchainImageCount > swapchain_state->images.size()) swapchain_state->images.resize(*pSwapchainImageCount);
@@ -12996,7 +11933,7 @@ void CoreChecks::PostCallRecordGetSwapchainImagesKHR(VkDevice device, VkSwapchai
for (uint32_t i = 0; i < *pSwapchainImageCount; ++i) {
if (swapchain_state->images[i] != VK_NULL_HANDLE) continue; // Already retrieved this.
- IMAGE_LAYOUT_STATE image_layout_node;
+ IMAGE_LAYOUT_NODE image_layout_node;
image_layout_node.layout = VK_IMAGE_LAYOUT_UNDEFINED;
image_layout_node.format = swapchain_state->createInfo.imageFormat;
// Add imageMap entries for each swapchain image
@@ -13013,14 +11950,14 @@ void CoreChecks::PostCallRecordGetSwapchainImagesKHR(VkDevice device, VkSwapchai
image_ci.tiling = VK_IMAGE_TILING_OPTIMAL;
image_ci.usage = swapchain_state->createInfo.imageUsage;
image_ci.sharingMode = swapchain_state->createInfo.imageSharingMode;
- imageMap[pSwapchainImages[i]] = unique_ptr<IMAGE_STATE>(new IMAGE_STATE(pSwapchainImages[i], &image_ci));
- auto &image_state = imageMap[pSwapchainImages[i]];
+ device_data->imageMap[pSwapchainImages[i]] = unique_ptr<IMAGE_STATE>(new IMAGE_STATE(pSwapchainImages[i], &image_ci));
+ auto &image_state = device_data->imageMap[pSwapchainImages[i]];
image_state->valid = false;
image_state->binding.mem = MEMTRACKER_SWAP_CHAIN_IMAGE_KEY;
swapchain_state->images[i] = pSwapchainImages[i];
ImageSubresourcePair subpair = {pSwapchainImages[i], false, VkImageSubresource()};
- imageSubresourceMap[pSwapchainImages[i]].push_back(subpair);
- imageLayoutMap[subpair] = image_layout_node;
+ device_data->imageSubresourceMap[pSwapchainImages[i]].push_back(subpair);
+ device_data->imageLayoutMap[subpair] = image_layout_node;
}
}
@@ -13033,25 +11970,27 @@ void CoreChecks::PostCallRecordGetSwapchainImagesKHR(VkDevice device, VkSwapchai
}
bool CoreChecks::PreCallValidateQueuePresentKHR(VkQueue queue, const VkPresentInfoKHR *pPresentInfo) {
+ layer_data *device_data = GetLayerDataPtr(get_dispatch_key(queue), layer_data_map);
bool skip = false;
auto queue_state = GetQueueState(queue);
for (uint32_t i = 0; i < pPresentInfo->waitSemaphoreCount; ++i) {
- auto pSemaphore = GetSemaphoreState(pPresentInfo->pWaitSemaphores[i]);
+ auto pSemaphore = GetSemaphoreNode(pPresentInfo->pWaitSemaphores[i]);
if (pSemaphore && !pSemaphore->signaled) {
- skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, 0,
- kVUID_Core_DrawState_QueueForwardProgress, "%s is waiting on %s that has no way to be signaled.",
- report_data->FormatHandle(queue).c_str(),
- report_data->FormatHandle(pPresentInfo->pWaitSemaphores[i]).c_str());
+ skip |= log_msg(device_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
+ 0, kVUID_Core_DrawState_QueueForwardProgress,
+ "Queue %s is waiting on semaphore %s that has no way to be signaled.",
+ device_data->report_data->FormatHandle(queue).c_str(),
+ device_data->report_data->FormatHandle(pPresentInfo->pWaitSemaphores[i]).c_str());
}
}
for (uint32_t i = 0; i < pPresentInfo->swapchainCount; ++i) {
- auto swapchain_data = GetSwapchainState(pPresentInfo->pSwapchains[i]);
+ auto swapchain_data = GetSwapchainNode(pPresentInfo->pSwapchains[i]);
if (swapchain_data) {
if (pPresentInfo->pImageIndices[i] >= swapchain_data->images.size()) {
skip |=
- log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_SWAPCHAIN_KHR_EXT,
+ log_msg(device_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_SWAPCHAIN_KHR_EXT,
HandleToUint64(pPresentInfo->pSwapchains[i]), kVUID_Core_DrawState_SwapchainInvalidImage,
"vkQueuePresentKHR: Swapchain image index too large (%u). There are only %u images in this swapchain.",
pPresentInfo->pImageIndices[i], (uint32_t)swapchain_data->images.size());
@@ -13064,19 +12003,21 @@ bool CoreChecks::PreCallValidateQueuePresentKHR(VkQueue queue, const VkPresentIn
}
if (!image_state->acquired) {
- skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_SWAPCHAIN_KHR_EXT,
- HandleToUint64(pPresentInfo->pSwapchains[i]), kVUID_Core_DrawState_SwapchainImageNotAcquired,
- "vkQueuePresentKHR: Swapchain image index %u has not been acquired.",
- pPresentInfo->pImageIndices[i]);
+ skip |= log_msg(
+ device_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_SWAPCHAIN_KHR_EXT,
+ HandleToUint64(pPresentInfo->pSwapchains[i]), kVUID_Core_DrawState_SwapchainImageNotAcquired,
+ "vkQueuePresentKHR: Swapchain image index %u has not been acquired.", pPresentInfo->pImageIndices[i]);
}
vector<VkImageLayout> layouts;
- if (FindLayouts(image, layouts)) {
+ if (FindLayouts(device_data, image, layouts)) {
for (auto layout : layouts) {
- if ((layout != VK_IMAGE_LAYOUT_PRESENT_SRC_KHR) && (!device_extensions.vk_khr_shared_presentable_image ||
- (layout != VK_IMAGE_LAYOUT_SHARED_PRESENT_KHR))) {
- skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_QUEUE_EXT,
- HandleToUint64(queue), "VUID-VkPresentInfoKHR-pImageIndices-01296",
+ if ((layout != VK_IMAGE_LAYOUT_PRESENT_SRC_KHR) &&
+ (!device_data->device_extensions.vk_khr_shared_presentable_image ||
+ (layout != VK_IMAGE_LAYOUT_SHARED_PRESENT_KHR))) {
+ skip |= log_msg(device_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ VK_DEBUG_REPORT_OBJECT_TYPE_QUEUE_EXT, HandleToUint64(queue),
+ "VUID-VkPresentInfoKHR-pImageIndices-01296",
"Images passed to present must be in layout VK_IMAGE_LAYOUT_PRESENT_SRC_KHR or "
"VK_IMAGE_LAYOUT_SHARED_PRESENT_KHR but is in %s.",
string_VkImageLayout(layout));
@@ -13087,17 +12028,20 @@ bool CoreChecks::PreCallValidateQueuePresentKHR(VkQueue queue, const VkPresentIn
// All physical devices and queue families are required to be able to present to any native window on Android; require
// the application to have established support on any other platform.
- if (!instance_extensions.vk_khr_android_surface) {
+ if (!device_data->instance_extensions.vk_khr_android_surface) {
auto surface_state = GetSurfaceState(swapchain_data->createInfo.surface);
- auto support_it = surface_state->gpu_queue_support.find({physical_device, queue_state->queueFamilyIndex});
+ auto support_it =
+ surface_state->gpu_queue_support.find({device_data->physical_device, queue_state->queueFamilyIndex});
if (support_it == surface_state->gpu_queue_support.end()) {
- skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_SWAPCHAIN_KHR_EXT,
- HandleToUint64(pPresentInfo->pSwapchains[i]), kVUID_Core_DrawState_SwapchainUnsupportedQueue,
+ skip |= log_msg(device_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ VK_DEBUG_REPORT_OBJECT_TYPE_SWAPCHAIN_KHR_EXT, HandleToUint64(pPresentInfo->pSwapchains[i]),
+ kVUID_Core_DrawState_SwapchainUnsupportedQueue,
"vkQueuePresentKHR: Presenting image without calling vkGetPhysicalDeviceSurfaceSupportKHR");
} else if (!support_it->second) {
- skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_SWAPCHAIN_KHR_EXT,
- HandleToUint64(pPresentInfo->pSwapchains[i]), "VUID-vkQueuePresentKHR-pSwapchains-01292",
+ skip |= log_msg(device_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ VK_DEBUG_REPORT_OBJECT_TYPE_SWAPCHAIN_KHR_EXT, HandleToUint64(pPresentInfo->pSwapchains[i]),
+ "VUID-vkQueuePresentKHR-pSwapchains-01292",
"vkQueuePresentKHR: Presenting image on queue that cannot present to this surface.");
}
}
@@ -13108,22 +12052,24 @@ bool CoreChecks::PreCallValidateQueuePresentKHR(VkQueue queue, const VkPresentIn
const auto *present_regions = lvl_find_in_chain<VkPresentRegionsKHR>(pPresentInfo->pNext);
if (present_regions) {
for (uint32_t i = 0; i < present_regions->swapchainCount; ++i) {
- auto swapchain_data = GetSwapchainState(pPresentInfo->pSwapchains[i]);
+ auto swapchain_data = GetSwapchainNode(pPresentInfo->pSwapchains[i]);
assert(swapchain_data);
VkPresentRegionKHR region = present_regions->pRegions[i];
for (uint32_t j = 0; j < region.rectangleCount; ++j) {
VkRectLayerKHR rect = region.pRectangles[j];
if ((rect.offset.x + rect.extent.width) > swapchain_data->createInfo.imageExtent.width) {
- skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_SWAPCHAIN_KHR_EXT,
- HandleToUint64(pPresentInfo->pSwapchains[i]), "VUID-VkRectLayerKHR-offset-01261",
+ skip |= log_msg(device_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ VK_DEBUG_REPORT_OBJECT_TYPE_SWAPCHAIN_KHR_EXT, HandleToUint64(pPresentInfo->pSwapchains[i]),
+ "VUID-VkRectLayerKHR-offset-01261",
"vkQueuePresentKHR(): For VkPresentRegionKHR down pNext chain, "
"pRegion[%i].pRectangles[%i], the sum of offset.x (%i) and extent.width (%i) is greater "
"than the corresponding swapchain's imageExtent.width (%i).",
i, j, rect.offset.x, rect.extent.width, swapchain_data->createInfo.imageExtent.width);
}
if ((rect.offset.y + rect.extent.height) > swapchain_data->createInfo.imageExtent.height) {
- skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_SWAPCHAIN_KHR_EXT,
- HandleToUint64(pPresentInfo->pSwapchains[i]), "VUID-VkRectLayerKHR-offset-01261",
+ skip |= log_msg(device_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ VK_DEBUG_REPORT_OBJECT_TYPE_SWAPCHAIN_KHR_EXT, HandleToUint64(pPresentInfo->pSwapchains[i]),
+ "VUID-VkRectLayerKHR-offset-01261",
"vkQueuePresentKHR(): For VkPresentRegionKHR down pNext chain, "
"pRegion[%i].pRectangles[%i], the sum of offset.y (%i) and extent.height (%i) is greater "
"than the corresponding swapchain's imageExtent.height (%i).",
@@ -13131,7 +12077,7 @@ bool CoreChecks::PreCallValidateQueuePresentKHR(VkQueue queue, const VkPresentIn
}
if (rect.layer > swapchain_data->createInfo.imageArrayLayers) {
skip |= log_msg(
- report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_SWAPCHAIN_KHR_EXT,
+ device_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_SWAPCHAIN_KHR_EXT,
HandleToUint64(pPresentInfo->pSwapchains[i]), "VUID-VkRectLayerKHR-layer-01262",
"vkQueuePresentKHR(): For VkPresentRegionKHR down pNext chain, pRegion[%i].pRectangles[%i], the layer "
"(%i) is greater than the corresponding swapchain's imageArrayLayers (%i).",
@@ -13145,7 +12091,7 @@ bool CoreChecks::PreCallValidateQueuePresentKHR(VkQueue queue, const VkPresentIn
if (present_times_info) {
if (pPresentInfo->swapchainCount != present_times_info->swapchainCount) {
skip |=
- log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_SWAPCHAIN_KHR_EXT,
+ log_msg(device_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_SWAPCHAIN_KHR_EXT,
HandleToUint64(pPresentInfo->pSwapchains[0]), "VUID-VkPresentTimesInfoGOOGLE-swapchainCount-01247",
"vkQueuePresentKHR(): VkPresentTimesInfoGOOGLE.swapchainCount is %i but pPresentInfo->swapchainCount "
"is %i. For VkPresentTimesInfoGOOGLE down pNext chain of VkPresentInfoKHR, "
@@ -13161,7 +12107,7 @@ bool CoreChecks::PreCallValidateQueuePresentKHR(VkQueue queue, const VkPresentIn
void CoreChecks::PostCallRecordQueuePresentKHR(VkQueue queue, const VkPresentInfoKHR *pPresentInfo, VkResult result) {
// Semaphore waits occur before error generation, if the call reached the ICD. (Confirm?)
for (uint32_t i = 0; i < pPresentInfo->waitSemaphoreCount; ++i) {
- auto pSemaphore = GetSemaphoreState(pPresentInfo->pWaitSemaphores[i]);
+ auto pSemaphore = GetSemaphoreNode(pPresentInfo->pWaitSemaphores[i]);
if (pSemaphore) {
pSemaphore->signaler.first = VK_NULL_HANDLE;
pSemaphore->signaled = false;
@@ -13174,7 +12120,7 @@ void CoreChecks::PostCallRecordQueuePresentKHR(VkQueue queue, const VkPresentInf
auto local_result = pPresentInfo->pResults ? pPresentInfo->pResults[i] : result;
if (local_result != VK_SUCCESS && local_result != VK_SUBOPTIMAL_KHR) continue; // this present didn't actually happen.
// Mark the image as having been released to the WSI
- auto swapchain_data = GetSwapchainState(pPresentInfo->pSwapchains[i]);
+ auto swapchain_data = GetSwapchainNode(pPresentInfo->pSwapchains[i]);
if (swapchain_data && (swapchain_data->images.size() > pPresentInfo->pImageIndices[i])) {
auto image = swapchain_data->images[pPresentInfo->pImageIndices[i]];
auto image_state = GetImageState(image);
@@ -13190,109 +12136,106 @@ void CoreChecks::PostCallRecordQueuePresentKHR(VkQueue queue, const VkPresentInf
bool CoreChecks::PreCallValidateCreateSharedSwapchainsKHR(VkDevice device, uint32_t swapchainCount,
const VkSwapchainCreateInfoKHR *pCreateInfos,
const VkAllocationCallbacks *pAllocator, VkSwapchainKHR *pSwapchains) {
+ layer_data *device_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
bool skip = false;
if (pCreateInfos) {
for (uint32_t i = 0; i < swapchainCount; i++) {
- const auto surface_state = GetSurfaceState(pCreateInfos[i].surface);
- const auto old_swapchain_state = GetSwapchainState(pCreateInfos[i].oldSwapchain);
+ auto surface_state = GetSurfaceState(pCreateInfos[i].surface);
+ auto old_swapchain_state = GetSwapchainNode(pCreateInfos[i].oldSwapchain);
std::stringstream func_name;
func_name << "vkCreateSharedSwapchainsKHR[" << swapchainCount << "]()";
- skip |= ValidateCreateSwapchain(func_name.str().c_str(), &pCreateInfos[i], surface_state, old_swapchain_state);
+ skip |=
+ ValidateCreateSwapchain(device_data, func_name.str().c_str(), &pCreateInfos[i], surface_state, old_swapchain_state);
}
}
return skip;
}
-void ValidationStateTracker::PostCallRecordCreateSharedSwapchainsKHR(VkDevice device, uint32_t swapchainCount,
- const VkSwapchainCreateInfoKHR *pCreateInfos,
- const VkAllocationCallbacks *pAllocator,
- VkSwapchainKHR *pSwapchains, VkResult result) {
+void CoreChecks::PostCallRecordCreateSharedSwapchainsKHR(VkDevice device, uint32_t swapchainCount,
+ const VkSwapchainCreateInfoKHR *pCreateInfos,
+ const VkAllocationCallbacks *pAllocator, VkSwapchainKHR *pSwapchains,
+ VkResult result) {
+ layer_data *device_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
if (pCreateInfos) {
for (uint32_t i = 0; i < swapchainCount; i++) {
auto surface_state = GetSurfaceState(pCreateInfos[i].surface);
- auto old_swapchain_state = GetSwapchainState(pCreateInfos[i].oldSwapchain);
- RecordCreateSwapchainState(result, &pCreateInfos[i], &pSwapchains[i], surface_state, old_swapchain_state);
+ auto old_swapchain_state = GetSwapchainNode(pCreateInfos[i].oldSwapchain);
+ RecordCreateSwapchainState(device_data, result, &pCreateInfos[i], &pSwapchains[i], surface_state, old_swapchain_state);
}
}
}
-bool CoreChecks::ValidateAcquireNextImage(VkDevice device, VkSwapchainKHR swapchain, uint64_t timeout, VkSemaphore semaphore,
- VkFence fence, uint32_t *pImageIndex, const char *func_name) const {
+bool CoreChecks::ValidateAcquireNextImage(layer_data *device_data, VkDevice device, VkSwapchainKHR swapchain, uint64_t timeout,
+ VkSemaphore semaphore, VkFence fence, uint32_t *pImageIndex, const char *func_name) {
bool skip = false;
if (fence == VK_NULL_HANDLE && semaphore == VK_NULL_HANDLE) {
- skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, HandleToUint64(device),
- "VUID-vkAcquireNextImageKHR-semaphore-01780",
+ skip |= log_msg(device_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT,
+ HandleToUint64(device), "VUID-vkAcquireNextImageKHR-semaphore-01780",
"%s: Semaphore and fence cannot both be VK_NULL_HANDLE. There would be no way to "
"determine the completion of this operation.",
func_name);
}
- auto pSemaphore = GetSemaphoreState(semaphore);
+ auto pSemaphore = GetSemaphoreNode(semaphore);
if (pSemaphore && pSemaphore->scope == kSyncScopeInternal && pSemaphore->signaled) {
- skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_SEMAPHORE_EXT,
+ skip |= log_msg(device_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_SEMAPHORE_EXT,
HandleToUint64(semaphore), "VUID-vkAcquireNextImageKHR-semaphore-01286",
"%s: Semaphore must not be currently signaled or in a wait state.", func_name);
}
- auto pFence = GetFenceState(fence);
+ auto pFence = GetFenceNode(fence);
if (pFence) {
- skip |= ValidateFenceForSubmit(pFence);
+ skip |= ValidateFenceForSubmit(device_data, pFence);
}
- const auto swapchain_data = GetSwapchainState(swapchain);
- if (swapchain_data) {
- if (swapchain_data->retired) {
- skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_SWAPCHAIN_KHR_EXT,
- HandleToUint64(swapchain), "VUID-vkAcquireNextImageKHR-swapchain-01285",
- "%s: This swapchain has been retired. The application can still present any images it "
- "has acquired, but cannot acquire any more.",
- func_name);
- }
-
- auto physical_device_state = GetPhysicalDeviceState();
- if (physical_device_state->vkGetPhysicalDeviceSurfaceCapabilitiesKHRState != UNCALLED) {
- uint64_t acquired_images = std::count_if(swapchain_data->images.begin(), swapchain_data->images.end(),
- [=](VkImage image) { return GetImageState(image)->acquired; });
- if (acquired_images > swapchain_data->images.size() - physical_device_state->surfaceCapabilities.minImageCount) {
- skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_SWAPCHAIN_KHR_EXT,
- HandleToUint64(swapchain), kVUID_Core_DrawState_SwapchainTooManyImages,
- "%s: Application has already acquired the maximum number of images (0x%" PRIxLEAST64 ")", func_name,
- acquired_images);
- }
- }
+ auto swapchain_data = GetSwapchainNode(swapchain);
+ if (swapchain_data && swapchain_data->retired) {
+ skip |= log_msg(device_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_SWAPCHAIN_KHR_EXT,
+ HandleToUint64(swapchain), "VUID-vkAcquireNextImageKHR-swapchain-01285",
+ "%s: This swapchain has been retired. The application can still present any images it "
+ "has acquired, but cannot acquire any more.",
+ func_name);
+ }
- if (swapchain_data->images.size() == 0) {
- skip |= log_msg(report_data, VK_DEBUG_REPORT_WARNING_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_SWAPCHAIN_KHR_EXT,
- HandleToUint64(swapchain), kVUID_Core_DrawState_SwapchainImagesNotFound,
- "%s: No images found to acquire from. Application probably did not call "
- "vkGetSwapchainImagesKHR after swapchain creation.",
- func_name);
- }
+ auto physical_device_state = GetPhysicalDeviceState();
+ if (physical_device_state->vkGetPhysicalDeviceSurfaceCapabilitiesKHRState != UNCALLED) {
+ uint64_t acquired_images = std::count_if(swapchain_data->images.begin(), swapchain_data->images.end(),
+ [=](VkImage image) { return GetImageState(image)->acquired; });
+ if (acquired_images > swapchain_data->images.size() - physical_device_state->surfaceCapabilities.minImageCount) {
+ skip |= log_msg(device_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_SWAPCHAIN_KHR_EXT,
+ HandleToUint64(swapchain), kVUID_Core_DrawState_SwapchainTooManyImages,
+ "%s: Application has already acquired the maximum number of images (0x%" PRIxLEAST64 ")", func_name,
+ acquired_images);
+ }
+ }
+
+ if (swapchain_data && swapchain_data->images.size() == 0) {
+ skip |= log_msg(device_data->report_data, VK_DEBUG_REPORT_WARNING_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_SWAPCHAIN_KHR_EXT,
+ HandleToUint64(swapchain), kVUID_Core_DrawState_SwapchainImagesNotFound,
+ "%s: No images found to acquire from. Application probably did not call "
+ "vkGetSwapchainImagesKHR after swapchain creation.",
+ func_name);
}
return skip;
}
bool CoreChecks::PreCallValidateAcquireNextImageKHR(VkDevice device, VkSwapchainKHR swapchain, uint64_t timeout,
VkSemaphore semaphore, VkFence fence, uint32_t *pImageIndex) {
- return ValidateAcquireNextImage(device, swapchain, timeout, semaphore, fence, pImageIndex, "vkAcquireNextImageKHR");
+ layer_data *device_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
+ return ValidateAcquireNextImage(device_data, device, swapchain, timeout, semaphore, fence, pImageIndex,
+ "vkAcquireNextImageKHR");
}
bool CoreChecks::PreCallValidateAcquireNextImage2KHR(VkDevice device, const VkAcquireNextImageInfoKHR *pAcquireInfo,
uint32_t *pImageIndex) {
- bool skip = false;
- skip |= ValidateDeviceMaskToPhysicalDeviceCount(pAcquireInfo->deviceMask, VK_DEBUG_REPORT_OBJECT_TYPE_SWAPCHAIN_KHR_EXT,
- HandleToUint64(pAcquireInfo->swapchain),
- "VUID-VkAcquireNextImageInfoKHR-deviceMask-01290");
- skip |= ValidateDeviceMaskToZero(pAcquireInfo->deviceMask, VK_DEBUG_REPORT_OBJECT_TYPE_SWAPCHAIN_KHR_EXT,
- HandleToUint64(pAcquireInfo->swapchain), "VUID-VkAcquireNextImageInfoKHR-deviceMask-01291");
- skip |= ValidateAcquireNextImage(device, pAcquireInfo->swapchain, pAcquireInfo->timeout, pAcquireInfo->semaphore,
- pAcquireInfo->fence, pImageIndex, "vkAcquireNextImage2KHR");
- return skip;
+ layer_data *device_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
+ return ValidateAcquireNextImage(device_data, device, pAcquireInfo->swapchain, pAcquireInfo->timeout, pAcquireInfo->semaphore,
+ pAcquireInfo->fence, pImageIndex, "vkAcquireNextImage2KHR");
}
-void ValidationStateTracker::RecordAcquireNextImageState(VkDevice device, VkSwapchainKHR swapchain, uint64_t timeout,
- VkSemaphore semaphore, VkFence fence, uint32_t *pImageIndex) {
- auto pFence = GetFenceState(fence);
+void CoreChecks::RecordAcquireNextImageState(layer_data *device_data, VkDevice device, VkSwapchainKHR swapchain, uint64_t timeout,
+ VkSemaphore semaphore, VkFence fence, uint32_t *pImageIndex) {
+ auto pFence = GetFenceNode(fence);
if (pFence && pFence->scope == kSyncScopeInternal) {
// Treat as inflight since it is valid to wait on this fence, even in cases where it is technically a temporary
// import
@@ -13300,7 +12243,7 @@ void ValidationStateTracker::RecordAcquireNextImageState(VkDevice device, VkSwap
pFence->signaler.first = VK_NULL_HANDLE; // ANI isn't on a queue, so this can't participate in a completion proof.
}
- auto pSemaphore = GetSemaphoreState(semaphore);
+ auto pSemaphore = GetSemaphoreNode(semaphore);
if (pSemaphore && pSemaphore->scope == kSyncScopeInternal) {
// Treat as signaled since it is valid to wait on this semaphore, even in cases where it is technically a
// temporary import
@@ -13309,7 +12252,7 @@ void ValidationStateTracker::RecordAcquireNextImageState(VkDevice device, VkSwap
}
// Mark the image as acquired.
- auto swapchain_data = GetSwapchainState(swapchain);
+ auto swapchain_data = GetSwapchainNode(swapchain);
if (swapchain_data && (swapchain_data->images.size() > *pImageIndex)) {
auto image = swapchain_data->images[*pImageIndex];
auto image_state = GetImageState(image);
@@ -13320,35 +12263,38 @@ void ValidationStateTracker::RecordAcquireNextImageState(VkDevice device, VkSwap
}
}
-void ValidationStateTracker::PostCallRecordAcquireNextImageKHR(VkDevice device, VkSwapchainKHR swapchain, uint64_t timeout,
- VkSemaphore semaphore, VkFence fence, uint32_t *pImageIndex,
- VkResult result) {
+void CoreChecks::PostCallRecordAcquireNextImageKHR(VkDevice device, VkSwapchainKHR swapchain, uint64_t timeout,
+ VkSemaphore semaphore, VkFence fence, uint32_t *pImageIndex, VkResult result) {
+ layer_data *device_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
if ((VK_SUCCESS != result) && (VK_SUBOPTIMAL_KHR != result)) return;
- RecordAcquireNextImageState(device, swapchain, timeout, semaphore, fence, pImageIndex);
+ RecordAcquireNextImageState(device_data, device, swapchain, timeout, semaphore, fence, pImageIndex);
}
-void ValidationStateTracker::PostCallRecordAcquireNextImage2KHR(VkDevice device, const VkAcquireNextImageInfoKHR *pAcquireInfo,
- uint32_t *pImageIndex, VkResult result) {
+void CoreChecks::PostCallRecordAcquireNextImage2KHR(VkDevice device, const VkAcquireNextImageInfoKHR *pAcquireInfo,
+ uint32_t *pImageIndex, VkResult result) {
+ layer_data *device_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
if ((VK_SUCCESS != result) && (VK_SUBOPTIMAL_KHR != result)) return;
- RecordAcquireNextImageState(device, pAcquireInfo->swapchain, pAcquireInfo->timeout, pAcquireInfo->semaphore,
+ RecordAcquireNextImageState(device_data, device, pAcquireInfo->swapchain, pAcquireInfo->timeout, pAcquireInfo->semaphore,
pAcquireInfo->fence, pImageIndex);
}
-void ValidationStateTracker::PostCallRecordEnumeratePhysicalDevices(VkInstance instance, uint32_t *pPhysicalDeviceCount,
- VkPhysicalDevice *pPhysicalDevices, VkResult result) {
+void CoreChecks::PostCallRecordEnumeratePhysicalDevices(VkInstance instance, uint32_t *pPhysicalDeviceCount,
+ VkPhysicalDevice *pPhysicalDevices, VkResult result) {
+ instance_layer_data *instance_data = GetLayerDataPtr(get_dispatch_key(instance), layer_data_map);
if ((NULL != pPhysicalDevices) && ((result == VK_SUCCESS || result == VK_INCOMPLETE))) {
for (uint32_t i = 0; i < *pPhysicalDeviceCount; i++) {
- auto &phys_device_state = physical_device_map[pPhysicalDevices[i]];
+ auto &phys_device_state = instance_data->physical_device_map[pPhysicalDevices[i]];
phys_device_state.phys_device = pPhysicalDevices[i];
// Init actual features for each physical device
- DispatchGetPhysicalDeviceFeatures(pPhysicalDevices[i], &phys_device_state.features2.features);
+ instance_data->instance_dispatch_table.GetPhysicalDeviceFeatures(pPhysicalDevices[i],
+ &phys_device_state.features2.features);
}
}
}
// Common function to handle validation for GetPhysicalDeviceQueueFamilyProperties & 2KHR version
-static bool ValidateCommonGetPhysicalDeviceQueueFamilyProperties(debug_report_data *report_data,
- const PHYSICAL_DEVICE_STATE *pd_state,
+static bool ValidateCommonGetPhysicalDeviceQueueFamilyProperties(instance_layer_data *instance_data,
+ PHYSICAL_DEVICE_STATE *pd_state,
uint32_t requested_queue_family_property_count, bool qfp_null,
const char *caller_name) {
bool skip = false;
@@ -13356,22 +12302,23 @@ static bool ValidateCommonGetPhysicalDeviceQueueFamilyProperties(debug_report_da
// Verify that for each physical device, this command is called first with NULL pQueueFamilyProperties in order to get count
if (UNCALLED == pd_state->vkGetPhysicalDeviceQueueFamilyPropertiesState) {
skip |= log_msg(
- report_data, VK_DEBUG_REPORT_WARNING_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_PHYSICAL_DEVICE_EXT,
+ instance_data->report_data, VK_DEBUG_REPORT_WARNING_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_PHYSICAL_DEVICE_EXT,
HandleToUint64(pd_state->phys_device), kVUID_Core_DevLimit_MissingQueryCount,
"%s is called with non-NULL pQueueFamilyProperties before obtaining pQueueFamilyPropertyCount. It is recommended "
"to first call %s with NULL pQueueFamilyProperties in order to obtain the maximal pQueueFamilyPropertyCount.",
caller_name, caller_name);
// Then verify that pCount that is passed in on second call matches what was returned
- } else if (pd_state->queue_family_known_count != requested_queue_family_property_count) {
+ } else if (pd_state->queue_family_count != requested_queue_family_property_count) {
skip |= log_msg(
- report_data, VK_DEBUG_REPORT_WARNING_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_PHYSICAL_DEVICE_EXT,
+ instance_data->report_data, VK_DEBUG_REPORT_WARNING_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_PHYSICAL_DEVICE_EXT,
HandleToUint64(pd_state->phys_device), kVUID_Core_DevLimit_CountMismatch,
"%s is called with non-NULL pQueueFamilyProperties and pQueueFamilyPropertyCount value %" PRIu32
", but the largest previously returned pQueueFamilyPropertyCount for this physicalDevice is %" PRIu32
". It is recommended to instead receive all the properties by calling %s with pQueueFamilyPropertyCount that was "
"previously obtained by calling %s with NULL pQueueFamilyProperties.",
- caller_name, requested_queue_family_property_count, pd_state->queue_family_known_count, caller_name, caller_name);
+ caller_name, requested_queue_family_property_count, pd_state->queue_family_count, caller_name, caller_name);
}
+ pd_state->vkGetPhysicalDeviceQueueFamilyPropertiesState = QUERY_DETAILS;
}
return skip;
@@ -13380,9 +12327,10 @@ static bool ValidateCommonGetPhysicalDeviceQueueFamilyProperties(debug_report_da
bool CoreChecks::PreCallValidateGetPhysicalDeviceQueueFamilyProperties(VkPhysicalDevice physicalDevice,
uint32_t *pQueueFamilyPropertyCount,
VkQueueFamilyProperties *pQueueFamilyProperties) {
- const auto physical_device_state = GetPhysicalDeviceState(physicalDevice);
+ instance_layer_data *instance_data = GetLayerDataPtr(get_dispatch_key(physicalDevice), instance_layer_data_map);
+ auto physical_device_state = GetPhysicalDeviceState(physicalDevice);
assert(physical_device_state);
- return ValidateCommonGetPhysicalDeviceQueueFamilyProperties(report_data, physical_device_state, *pQueueFamilyPropertyCount,
+ return ValidateCommonGetPhysicalDeviceQueueFamilyProperties(instance_data, physical_device_state, *pQueueFamilyPropertyCount,
(nullptr == pQueueFamilyProperties),
"vkGetPhysicalDeviceQueueFamilyProperties()");
}
@@ -13390,9 +12338,10 @@ bool CoreChecks::PreCallValidateGetPhysicalDeviceQueueFamilyProperties(VkPhysica
bool CoreChecks::PreCallValidateGetPhysicalDeviceQueueFamilyProperties2(VkPhysicalDevice physicalDevice,
uint32_t *pQueueFamilyPropertyCount,
VkQueueFamilyProperties2KHR *pQueueFamilyProperties) {
- const auto physical_device_state = GetPhysicalDeviceState(physicalDevice);
+ instance_layer_data *instance_data = GetLayerDataPtr(get_dispatch_key(physicalDevice), instance_layer_data_map);
+ auto physical_device_state = GetPhysicalDeviceState(physicalDevice);
assert(physical_device_state);
- return ValidateCommonGetPhysicalDeviceQueueFamilyProperties(report_data, physical_device_state, *pQueueFamilyPropertyCount,
+ return ValidateCommonGetPhysicalDeviceQueueFamilyProperties(instance_data, physical_device_state, *pQueueFamilyPropertyCount,
(nullptr == pQueueFamilyProperties),
"vkGetPhysicalDeviceQueueFamilyProperties2()");
}
@@ -13400,9 +12349,10 @@ bool CoreChecks::PreCallValidateGetPhysicalDeviceQueueFamilyProperties2(VkPhysic
bool CoreChecks::PreCallValidateGetPhysicalDeviceQueueFamilyProperties2KHR(VkPhysicalDevice physicalDevice,
uint32_t *pQueueFamilyPropertyCount,
VkQueueFamilyProperties2KHR *pQueueFamilyProperties) {
+ instance_layer_data *instance_data = GetLayerDataPtr(get_dispatch_key(physicalDevice), instance_layer_data_map);
auto physical_device_state = GetPhysicalDeviceState(physicalDevice);
assert(physical_device_state);
- return ValidateCommonGetPhysicalDeviceQueueFamilyProperties(report_data, physical_device_state, *pQueueFamilyPropertyCount,
+ return ValidateCommonGetPhysicalDeviceQueueFamilyProperties(instance_data, physical_device_state, *pQueueFamilyPropertyCount,
(nullptr == pQueueFamilyProperties),
"vkGetPhysicalDeviceQueueFamilyProperties2KHR()");
}
@@ -13410,13 +12360,13 @@ bool CoreChecks::PreCallValidateGetPhysicalDeviceQueueFamilyProperties2KHR(VkPhy
// Common function to update state for GetPhysicalDeviceQueueFamilyProperties & 2KHR version
static void StateUpdateCommonGetPhysicalDeviceQueueFamilyProperties(PHYSICAL_DEVICE_STATE *pd_state, uint32_t count,
VkQueueFamilyProperties2KHR *pQueueFamilyProperties) {
- pd_state->queue_family_known_count = std::max(pd_state->queue_family_known_count, count);
-
if (!pQueueFamilyProperties) {
if (UNCALLED == pd_state->vkGetPhysicalDeviceQueueFamilyPropertiesState)
pd_state->vkGetPhysicalDeviceQueueFamilyPropertiesState = QUERY_COUNT;
+ pd_state->queue_family_count = count;
} else { // Save queue family properties
pd_state->vkGetPhysicalDeviceQueueFamilyPropertiesState = QUERY_DETAILS;
+ pd_state->queue_family_count = std::max(pd_state->queue_family_count, count);
pd_state->queue_family_properties.resize(std::max(static_cast<uint32_t>(pd_state->queue_family_properties.size()), count));
for (uint32_t i = 0; i < count; ++i) {
@@ -13425,9 +12375,9 @@ static void StateUpdateCommonGetPhysicalDeviceQueueFamilyProperties(PHYSICAL_DEV
}
}
-void ValidationStateTracker::PostCallRecordGetPhysicalDeviceQueueFamilyProperties(VkPhysicalDevice physicalDevice,
- uint32_t *pQueueFamilyPropertyCount,
- VkQueueFamilyProperties *pQueueFamilyProperties) {
+void CoreChecks::PostCallRecordGetPhysicalDeviceQueueFamilyProperties(VkPhysicalDevice physicalDevice,
+ uint32_t *pQueueFamilyPropertyCount,
+ VkQueueFamilyProperties *pQueueFamilyProperties) {
auto physical_device_state = GetPhysicalDeviceState(physicalDevice);
assert(physical_device_state);
VkQueueFamilyProperties2KHR *pqfp = nullptr;
@@ -13444,16 +12394,18 @@ void ValidationStateTracker::PostCallRecordGetPhysicalDeviceQueueFamilyPropertie
StateUpdateCommonGetPhysicalDeviceQueueFamilyProperties(physical_device_state, *pQueueFamilyPropertyCount, pqfp);
}
-void ValidationStateTracker::PostCallRecordGetPhysicalDeviceQueueFamilyProperties2(
- VkPhysicalDevice physicalDevice, uint32_t *pQueueFamilyPropertyCount, VkQueueFamilyProperties2KHR *pQueueFamilyProperties) {
+void CoreChecks::PostCallRecordGetPhysicalDeviceQueueFamilyProperties2(VkPhysicalDevice physicalDevice,
+ uint32_t *pQueueFamilyPropertyCount,
+ VkQueueFamilyProperties2KHR *pQueueFamilyProperties) {
auto physical_device_state = GetPhysicalDeviceState(physicalDevice);
assert(physical_device_state);
StateUpdateCommonGetPhysicalDeviceQueueFamilyProperties(physical_device_state, *pQueueFamilyPropertyCount,
pQueueFamilyProperties);
}
-void ValidationStateTracker::PostCallRecordGetPhysicalDeviceQueueFamilyProperties2KHR(
- VkPhysicalDevice physicalDevice, uint32_t *pQueueFamilyPropertyCount, VkQueueFamilyProperties2KHR *pQueueFamilyProperties) {
+void CoreChecks::PostCallRecordGetPhysicalDeviceQueueFamilyProperties2KHR(VkPhysicalDevice physicalDevice,
+ uint32_t *pQueueFamilyPropertyCount,
+ VkQueueFamilyProperties2KHR *pQueueFamilyProperties) {
auto physical_device_state = GetPhysicalDeviceState(physicalDevice);
assert(physical_device_state);
StateUpdateCommonGetPhysicalDeviceQueueFamilyProperties(physical_device_state, *pQueueFamilyPropertyCount,
@@ -13462,158 +12414,166 @@ void ValidationStateTracker::PostCallRecordGetPhysicalDeviceQueueFamilyPropertie
bool CoreChecks::PreCallValidateDestroySurfaceKHR(VkInstance instance, VkSurfaceKHR surface,
const VkAllocationCallbacks *pAllocator) {
- const auto surface_state = GetSurfaceState(surface);
+ instance_layer_data *instance_data = GetLayerDataPtr(get_dispatch_key(instance), instance_layer_data_map);
+ auto surface_state = GetSurfaceState(surface);
bool skip = false;
if ((surface_state) && (surface_state->swapchain)) {
- skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_INSTANCE_EXT,
+ skip |= log_msg(instance_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_INSTANCE_EXT,
HandleToUint64(instance), "VUID-vkDestroySurfaceKHR-surface-01266",
"vkDestroySurfaceKHR() called before its associated VkSwapchainKHR was destroyed.");
}
return skip;
}
-void ValidationStateTracker::PreCallRecordDestroySurfaceKHR(VkInstance instance, VkSurfaceKHR surface,
- const VkAllocationCallbacks *pAllocator) {
- surface_map.erase(surface);
+void CoreChecks::PreCallRecordValidateDestroySurfaceKHR(VkInstance instance, VkSurfaceKHR surface,
+ const VkAllocationCallbacks *pAllocator) {
+ instance_layer_data *instance_data = GetLayerDataPtr(get_dispatch_key(instance), instance_layer_data_map);
+ instance_data->surface_map.erase(surface);
}
-void ValidationStateTracker::RecordVulkanSurface(VkSurfaceKHR *pSurface) {
- surface_map[*pSurface] = std::unique_ptr<SURFACE_STATE>(new SURFACE_STATE{*pSurface});
+static void RecordVulkanSurface(instance_layer_data *instance_data, VkSurfaceKHR *pSurface) {
+ instance_data->surface_map[*pSurface] = SURFACE_STATE(*pSurface);
}
-void ValidationStateTracker::PostCallRecordCreateDisplayPlaneSurfaceKHR(VkInstance instance,
- const VkDisplaySurfaceCreateInfoKHR *pCreateInfo,
- const VkAllocationCallbacks *pAllocator,
- VkSurfaceKHR *pSurface, VkResult result) {
+void CoreChecks::PostCallRecordCreateDisplayPlaneSurfaceKHR(VkInstance instance, const VkDisplaySurfaceCreateInfoKHR *pCreateInfo,
+ const VkAllocationCallbacks *pAllocator, VkSurfaceKHR *pSurface,
+ VkResult result) {
+ instance_layer_data *instance_data = GetLayerDataPtr(get_dispatch_key(instance), instance_layer_data_map);
if (VK_SUCCESS != result) return;
- RecordVulkanSurface(pSurface);
+ RecordVulkanSurface(instance_data, pSurface);
}
#ifdef VK_USE_PLATFORM_ANDROID_KHR
-void ValidationStateTracker::PostCallRecordCreateAndroidSurfaceKHR(VkInstance instance,
- const VkAndroidSurfaceCreateInfoKHR *pCreateInfo,
- const VkAllocationCallbacks *pAllocator, VkSurfaceKHR *pSurface,
- VkResult result) {
+void CoreChecks::PostCallRecordCreateAndroidSurfaceKHR(VkInstance instance, const VkAndroidSurfaceCreateInfoKHR *pCreateInfo,
+ const VkAllocationCallbacks *pAllocator, VkSurfaceKHR *pSurface,
+ VkResult result) {
+ instance_layer_data *instance_data = GetLayerDataPtr(get_dispatch_key(instance), instance_layer_data_map);
if (VK_SUCCESS != result) return;
- RecordVulkanSurface(pSurface);
+ RecordVulkanSurface(instance_data, pSurface);
}
#endif // VK_USE_PLATFORM_ANDROID_KHR
#ifdef VK_USE_PLATFORM_IOS_MVK
-void ValidationStateTracker::PostCallRecordCreateIOSSurfaceMVK(VkInstance instance, const VkIOSSurfaceCreateInfoMVK *pCreateInfo,
- const VkAllocationCallbacks *pAllocator, VkSurfaceKHR *pSurface,
- VkResult result) {
+void CoreChecks::PostCallRecordCreateIOSSurfaceMVK(VkInstance instance, const VkIOSSurfaceCreateInfoMVK *pCreateInfo,
+ const VkAllocationCallbacks *pAllocator, VkSurfaceKHR *pSurface,
+ VkResult result) {
+ instance_layer_data *instance_data = GetLayerDataPtr(get_dispatch_key(instance), instance_layer_data_map);
if (VK_SUCCESS != result) return;
- RecordVulkanSurface(pSurface);
+ RecordVulkanSurface(instance_data, pSurface);
}
#endif // VK_USE_PLATFORM_IOS_MVK
#ifdef VK_USE_PLATFORM_MACOS_MVK
-void ValidationStateTracker::PostCallRecordCreateMacOSSurfaceMVK(VkInstance instance,
- const VkMacOSSurfaceCreateInfoMVK *pCreateInfo,
- const VkAllocationCallbacks *pAllocator, VkSurfaceKHR *pSurface,
- VkResult result) {
+void CoreChecks::PostCallRecordCreateMacOSSurfaceMVK(VkInstance instance, const VkMacOSSurfaceCreateInfoMVK *pCreateInfo,
+ const VkAllocationCallbacks *pAllocator, VkSurfaceKHR *pSurface,
+ VkResult result) {
+ instance_layer_data *instance_data = GetLayerDataPtr(get_dispatch_key(instance), instance_layer_data_map);
if (VK_SUCCESS != result) return;
- RecordVulkanSurface(pSurface);
+ RecordVulkanSurface(instance_data, pSurface);
}
#endif // VK_USE_PLATFORM_MACOS_MVK
#ifdef VK_USE_PLATFORM_WAYLAND_KHR
-void ValidationStateTracker::PostCallRecordCreateWaylandSurfaceKHR(VkInstance instance,
- const VkWaylandSurfaceCreateInfoKHR *pCreateInfo,
- const VkAllocationCallbacks *pAllocator, VkSurfaceKHR *pSurface,
- VkResult result) {
+void CoreChecks::PostCallRecordCreateWaylandSurfaceKHR(VkInstance instance, const VkWaylandSurfaceCreateInfoKHR *pCreateInfo,
+ const VkAllocationCallbacks *pAllocator, VkSurfaceKHR *pSurface,
+ VkResult result) {
+ instance_layer_data *instance_data = GetLayerDataPtr(get_dispatch_key(instance), instance_layer_data_map);
if (VK_SUCCESS != result) return;
- RecordVulkanSurface(pSurface);
+ RecordVulkanSurface(instance_data, pSurface);
}
bool CoreChecks::PreCallValidateGetPhysicalDeviceWaylandPresentationSupportKHR(VkPhysicalDevice physicalDevice,
uint32_t queueFamilyIndex,
struct wl_display *display) {
+ instance_layer_data *instance_data = GetLayerDataPtr(get_dispatch_key(physicalDevice), instance_layer_data_map);
const auto pd_state = GetPhysicalDeviceState(physicalDevice);
- return ValidateQueueFamilyIndex(pd_state, queueFamilyIndex,
- "VUID-vkGetPhysicalDeviceWaylandPresentationSupportKHR-queueFamilyIndex-01306",
- "vkGetPhysicalDeviceWaylandPresentationSupportKHR", "queueFamilyIndex");
+ return ValidatePhysicalDeviceQueueFamily(instance_data, pd_state, queueFamilyIndex,
+ "VUID-vkGetPhysicalDeviceWaylandPresentationSupportKHR-queueFamilyIndex-01306",
+ "vkGetPhysicalDeviceWaylandPresentationSupportKHR", "queueFamilyIndex");
}
#endif // VK_USE_PLATFORM_WAYLAND_KHR
#ifdef VK_USE_PLATFORM_WIN32_KHR
-void ValidationStateTracker::PostCallRecordCreateWin32SurfaceKHR(VkInstance instance,
- const VkWin32SurfaceCreateInfoKHR *pCreateInfo,
- const VkAllocationCallbacks *pAllocator, VkSurfaceKHR *pSurface,
- VkResult result) {
+void CoreChecks::PostCallRecordCreateWin32SurfaceKHR(VkInstance instance, const VkWin32SurfaceCreateInfoKHR *pCreateInfo,
+ const VkAllocationCallbacks *pAllocator, VkSurfaceKHR *pSurface,
+ VkResult result) {
+ instance_layer_data *instance_data = GetLayerDataPtr(get_dispatch_key(instance), instance_layer_data_map);
if (VK_SUCCESS != result) return;
- RecordVulkanSurface(pSurface);
+ RecordVulkanSurface(instance_data, pSurface);
}
bool CoreChecks::PreCallValidateGetPhysicalDeviceWin32PresentationSupportKHR(VkPhysicalDevice physicalDevice,
uint32_t queueFamilyIndex) {
+ instance_layer_data *instance_data = GetLayerDataPtr(get_dispatch_key(physicalDevice), instance_layer_data_map);
const auto pd_state = GetPhysicalDeviceState(physicalDevice);
- return ValidateQueueFamilyIndex(pd_state, queueFamilyIndex,
- "VUID-vkGetPhysicalDeviceWin32PresentationSupportKHR-queueFamilyIndex-01309",
- "vkGetPhysicalDeviceWin32PresentationSupportKHR", "queueFamilyIndex");
+ return ValidatePhysicalDeviceQueueFamily(instance_data, pd_state, queueFamilyIndex,
+ "VUID-vkGetPhysicalDeviceWin32PresentationSupportKHR-queueFamilyIndex-01309",
+ "vkGetPhysicalDeviceWin32PresentationSupportKHR", "queueFamilyIndex");
}
#endif // VK_USE_PLATFORM_WIN32_KHR
#ifdef VK_USE_PLATFORM_XCB_KHR
-void ValidationStateTracker::PostCallRecordCreateXcbSurfaceKHR(VkInstance instance, const VkXcbSurfaceCreateInfoKHR *pCreateInfo,
- const VkAllocationCallbacks *pAllocator, VkSurfaceKHR *pSurface,
- VkResult result) {
+void CoreChecks::PostCallRecordCreateXcbSurfaceKHR(VkInstance instance, const VkXcbSurfaceCreateInfoKHR *pCreateInfo,
+ const VkAllocationCallbacks *pAllocator, VkSurfaceKHR *pSurface,
+ VkResult result) {
+ instance_layer_data *instance_data = GetLayerDataPtr(get_dispatch_key(instance), instance_layer_data_map);
if (VK_SUCCESS != result) return;
- RecordVulkanSurface(pSurface);
+ RecordVulkanSurface(instance_data, pSurface);
}
bool CoreChecks::PreCallValidateGetPhysicalDeviceXcbPresentationSupportKHR(VkPhysicalDevice physicalDevice,
uint32_t queueFamilyIndex, xcb_connection_t *connection,
xcb_visualid_t visual_id) {
+ instance_layer_data *instance_data = GetLayerDataPtr(get_dispatch_key(physicalDevice), instance_layer_data_map);
const auto pd_state = GetPhysicalDeviceState(physicalDevice);
- return ValidateQueueFamilyIndex(pd_state, queueFamilyIndex,
- "VUID-vkGetPhysicalDeviceXcbPresentationSupportKHR-queueFamilyIndex-01312",
- "vkGetPhysicalDeviceXcbPresentationSupportKHR", "queueFamilyIndex");
+ return ValidatePhysicalDeviceQueueFamily(instance_data, pd_state, queueFamilyIndex,
+ "VUID-vkGetPhysicalDeviceXcbPresentationSupportKHR-queueFamilyIndex-01312",
+ "vkGetPhysicalDeviceXcbPresentationSupportKHR", "queueFamilyIndex");
}
#endif // VK_USE_PLATFORM_XCB_KHR
#ifdef VK_USE_PLATFORM_XLIB_KHR
-void ValidationStateTracker::PostCallRecordCreateXlibSurfaceKHR(VkInstance instance, const VkXlibSurfaceCreateInfoKHR *pCreateInfo,
- const VkAllocationCallbacks *pAllocator, VkSurfaceKHR *pSurface,
- VkResult result) {
+void CoreChecks::PostCallRecordCreateXlibSurfaceKHR(VkInstance instance, const VkXlibSurfaceCreateInfoKHR *pCreateInfo,
+ const VkAllocationCallbacks *pAllocator, VkSurfaceKHR *pSurface,
+ VkResult result) {
+ instance_layer_data *instance_data = GetLayerDataPtr(get_dispatch_key(instance), instance_layer_data_map);
if (VK_SUCCESS != result) return;
- RecordVulkanSurface(pSurface);
+ RecordVulkanSurface(instance_data, pSurface);
}
bool CoreChecks::PreCallValidateGetPhysicalDeviceXlibPresentationSupportKHR(VkPhysicalDevice physicalDevice,
uint32_t queueFamilyIndex, Display *dpy,
VisualID visualID) {
+ instance_layer_data *instance_data = GetLayerDataPtr(get_dispatch_key(physicalDevice), instance_layer_data_map);
const auto pd_state = GetPhysicalDeviceState(physicalDevice);
- return ValidateQueueFamilyIndex(pd_state, queueFamilyIndex,
- "VUID-vkGetPhysicalDeviceXlibPresentationSupportKHR-queueFamilyIndex-01315",
- "vkGetPhysicalDeviceXlibPresentationSupportKHR", "queueFamilyIndex");
+ return ValidatePhysicalDeviceQueueFamily(instance_data, pd_state, queueFamilyIndex,
+ "VUID-vkGetPhysicalDeviceXlibPresentationSupportKHR-queueFamilyIndex-01315",
+ "vkGetPhysicalDeviceXlibPresentationSupportKHR", "queueFamilyIndex");
}
#endif // VK_USE_PLATFORM_XLIB_KHR
-void ValidationStateTracker::PostCallRecordGetPhysicalDeviceSurfaceCapabilitiesKHR(VkPhysicalDevice physicalDevice,
- VkSurfaceKHR surface,
- VkSurfaceCapabilitiesKHR *pSurfaceCapabilities,
- VkResult result) {
+void CoreChecks::PostCallRecordGetPhysicalDeviceSurfaceCapabilitiesKHR(VkPhysicalDevice physicalDevice, VkSurfaceKHR surface,
+ VkSurfaceCapabilitiesKHR *pSurfaceCapabilities,
+ VkResult result) {
if (VK_SUCCESS != result) return;
auto physical_device_state = GetPhysicalDeviceState(physicalDevice);
physical_device_state->vkGetPhysicalDeviceSurfaceCapabilitiesKHRState = QUERY_DETAILS;
physical_device_state->surfaceCapabilities = *pSurfaceCapabilities;
}
-void ValidationStateTracker::PostCallRecordGetPhysicalDeviceSurfaceCapabilities2KHR(
- VkPhysicalDevice physicalDevice, const VkPhysicalDeviceSurfaceInfo2KHR *pSurfaceInfo,
- VkSurfaceCapabilities2KHR *pSurfaceCapabilities, VkResult result) {
+void CoreChecks::PostCallRecordGetPhysicalDeviceSurfaceCapabilities2KHR(VkPhysicalDevice physicalDevice,
+ const VkPhysicalDeviceSurfaceInfo2KHR *pSurfaceInfo,
+ VkSurfaceCapabilities2KHR *pSurfaceCapabilities,
+ VkResult result) {
if (VK_SUCCESS != result) return;
auto physical_device_state = GetPhysicalDeviceState(physicalDevice);
physical_device_state->vkGetPhysicalDeviceSurfaceCapabilitiesKHRState = QUERY_DETAILS;
physical_device_state->surfaceCapabilities = pSurfaceCapabilities->surfaceCapabilities;
}
-void ValidationStateTracker::PostCallRecordGetPhysicalDeviceSurfaceCapabilities2EXT(VkPhysicalDevice physicalDevice,
- VkSurfaceKHR surface,
- VkSurfaceCapabilities2EXT *pSurfaceCapabilities,
- VkResult result) {
+void CoreChecks::PostCallRecordGetPhysicalDeviceSurfaceCapabilities2EXT(VkPhysicalDevice physicalDevice, VkSurfaceKHR surface,
+ VkSurfaceCapabilities2EXT *pSurfaceCapabilities,
+ VkResult result) {
auto physical_device_state = GetPhysicalDeviceState(physicalDevice);
physical_device_state->vkGetPhysicalDeviceSurfaceCapabilitiesKHRState = QUERY_DETAILS;
physical_device_state->surfaceCapabilities.minImageCount = pSurfaceCapabilities->minImageCount;
@@ -13630,25 +12590,23 @@ void ValidationStateTracker::PostCallRecordGetPhysicalDeviceSurfaceCapabilities2
bool CoreChecks::PreCallValidateGetPhysicalDeviceSurfaceSupportKHR(VkPhysicalDevice physicalDevice, uint32_t queueFamilyIndex,
VkSurfaceKHR surface, VkBool32 *pSupported) {
+ auto instance_data = GetLayerDataPtr(get_dispatch_key(physicalDevice), instance_layer_data_map);
const auto physical_device_state = GetPhysicalDeviceState(physicalDevice);
- return ValidateQueueFamilyIndex(physical_device_state, queueFamilyIndex,
- "VUID-vkGetPhysicalDeviceSurfaceSupportKHR-queueFamilyIndex-01269",
- "vkGetPhysicalDeviceSurfaceSupportKHR", "queueFamilyIndex");
+ return ValidatePhysicalDeviceQueueFamily(instance_data, physical_device_state, queueFamilyIndex,
+ "VUID-vkGetPhysicalDeviceSurfaceSupportKHR-queueFamilyIndex-01269",
+ "vkGetPhysicalDeviceSurfaceSupportKHR", "queueFamilyIndex");
}
-void ValidationStateTracker::PostCallRecordGetPhysicalDeviceSurfaceSupportKHR(VkPhysicalDevice physicalDevice,
- uint32_t queueFamilyIndex, VkSurfaceKHR surface,
- VkBool32 *pSupported, VkResult result) {
+void CoreChecks::PostCallRecordGetPhysicalDeviceSurfaceSupportKHR(VkPhysicalDevice physicalDevice, uint32_t queueFamilyIndex,
+ VkSurfaceKHR surface, VkBool32 *pSupported, VkResult result) {
if (VK_SUCCESS != result) return;
auto surface_state = GetSurfaceState(surface);
surface_state->gpu_queue_support[{physicalDevice, queueFamilyIndex}] = (*pSupported == VK_TRUE);
}
-void ValidationStateTracker::PostCallRecordGetPhysicalDeviceSurfacePresentModesKHR(VkPhysicalDevice physicalDevice,
- VkSurfaceKHR surface,
- uint32_t *pPresentModeCount,
- VkPresentModeKHR *pPresentModes,
- VkResult result) {
+void CoreChecks::PostCallRecordGetPhysicalDeviceSurfacePresentModesKHR(VkPhysicalDevice physicalDevice, VkSurfaceKHR surface,
+ uint32_t *pPresentModeCount, VkPresentModeKHR *pPresentModes,
+ VkResult result) {
if ((VK_SUCCESS != result) && (VK_INCOMPLETE != result)) return;
// TODO: This isn't quite right -- available modes may differ by surface AND physical device.
@@ -13671,24 +12629,27 @@ void ValidationStateTracker::PostCallRecordGetPhysicalDeviceSurfacePresentModesK
bool CoreChecks::PreCallValidateGetPhysicalDeviceSurfaceFormatsKHR(VkPhysicalDevice physicalDevice, VkSurfaceKHR surface,
uint32_t *pSurfaceFormatCount,
VkSurfaceFormatKHR *pSurfaceFormats) {
+ auto instance_data = GetLayerDataPtr(get_dispatch_key(physicalDevice), instance_layer_data_map);
if (!pSurfaceFormats) return false;
- const auto physical_device_state = GetPhysicalDeviceState(physicalDevice);
- const auto &call_state = physical_device_state->vkGetPhysicalDeviceSurfaceFormatsKHRState;
+ auto physical_device_state = GetPhysicalDeviceState(physicalDevice);
+ auto &call_state = physical_device_state->vkGetPhysicalDeviceSurfaceFormatsKHRState;
bool skip = false;
switch (call_state) {
case UNCALLED:
// Since we haven't recorded a preliminary value of *pSurfaceFormatCount, that likely means that the application didn't
// previously call this function with a NULL value of pSurfaceFormats:
- skip |= log_msg(report_data, VK_DEBUG_REPORT_WARNING_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_PHYSICAL_DEVICE_EXT,
- HandleToUint64(physicalDevice), kVUID_Core_DevLimit_MustQueryCount,
+ skip |= log_msg(instance_data->report_data, VK_DEBUG_REPORT_WARNING_BIT_EXT,
+ VK_DEBUG_REPORT_OBJECT_TYPE_PHYSICAL_DEVICE_EXT, HandleToUint64(physicalDevice),
+ kVUID_Core_DevLimit_MustQueryCount,
"vkGetPhysicalDeviceSurfaceFormatsKHR() called with non-NULL pSurfaceFormatCount; but no prior "
"positive value has been seen for pSurfaceFormats.");
break;
default:
auto prev_format_count = (uint32_t)physical_device_state->surface_formats.size();
if (prev_format_count != *pSurfaceFormatCount) {
- skip |= log_msg(report_data, VK_DEBUG_REPORT_WARNING_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_PHYSICAL_DEVICE_EXT,
- HandleToUint64(physicalDevice), kVUID_Core_DevLimit_CountMismatch,
+ skip |= log_msg(instance_data->report_data, VK_DEBUG_REPORT_WARNING_BIT_EXT,
+ VK_DEBUG_REPORT_OBJECT_TYPE_PHYSICAL_DEVICE_EXT, HandleToUint64(physicalDevice),
+ kVUID_Core_DevLimit_CountMismatch,
"vkGetPhysicalDeviceSurfaceFormatsKHR() called with non-NULL pSurfaceFormatCount, and with "
"pSurfaceFormats set to a value (%u) that is greater than the value (%u) that was returned "
"when pSurfaceFormatCount was NULL.",
@@ -13699,10 +12660,9 @@ bool CoreChecks::PreCallValidateGetPhysicalDeviceSurfaceFormatsKHR(VkPhysicalDev
return skip;
}
-void ValidationStateTracker::PostCallRecordGetPhysicalDeviceSurfaceFormatsKHR(VkPhysicalDevice physicalDevice, VkSurfaceKHR surface,
- uint32_t *pSurfaceFormatCount,
- VkSurfaceFormatKHR *pSurfaceFormats,
- VkResult result) {
+void CoreChecks::PostCallRecordGetPhysicalDeviceSurfaceFormatsKHR(VkPhysicalDevice physicalDevice, VkSurfaceKHR surface,
+ uint32_t *pSurfaceFormatCount,
+ VkSurfaceFormatKHR *pSurfaceFormats, VkResult result) {
if ((VK_SUCCESS != result) && (VK_INCOMPLETE != result)) return;
auto physical_device_state = GetPhysicalDeviceState(physicalDevice);
@@ -13721,11 +12681,10 @@ void ValidationStateTracker::PostCallRecordGetPhysicalDeviceSurfaceFormatsKHR(Vk
}
}
-void ValidationStateTracker::PostCallRecordGetPhysicalDeviceSurfaceFormats2KHR(VkPhysicalDevice physicalDevice,
- const VkPhysicalDeviceSurfaceInfo2KHR *pSurfaceInfo,
- uint32_t *pSurfaceFormatCount,
- VkSurfaceFormat2KHR *pSurfaceFormats,
- VkResult result) {
+void CoreChecks::PostCallRecordGetPhysicalDeviceSurfaceFormats2KHR(VkPhysicalDevice physicalDevice,
+ const VkPhysicalDeviceSurfaceInfo2KHR *pSurfaceInfo,
+ uint32_t *pSurfaceFormatCount,
+ VkSurfaceFormat2KHR *pSurfaceFormats, VkResult result) {
if ((VK_SUCCESS != result) && (VK_INCOMPLETE != result)) return;
auto physicalDeviceState = GetPhysicalDeviceState(physicalDevice);
@@ -13746,88 +12705,127 @@ void ValidationStateTracker::PostCallRecordGetPhysicalDeviceSurfaceFormats2KHR(V
}
}
-void ValidationStateTracker::PreCallRecordCmdBeginDebugUtilsLabelEXT(VkCommandBuffer commandBuffer,
- const VkDebugUtilsLabelEXT *pLabelInfo) {
- BeginCmdDebugUtilsLabel(report_data, commandBuffer, pLabelInfo);
+void CoreChecks::PreCallRecordQueueBeginDebugUtilsLabelEXT(VkQueue queue, const VkDebugUtilsLabelEXT *pLabelInfo) {
+ layer_data *device_data = GetLayerDataPtr(get_dispatch_key(queue), layer_data_map);
+ BeginQueueDebugUtilsLabel(device_data->report_data, queue, pLabelInfo);
+}
+
+void CoreChecks::PostCallRecordQueueEndDebugUtilsLabelEXT(VkQueue queue) {
+ layer_data *device_data = GetLayerDataPtr(get_dispatch_key(queue), layer_data_map);
+ EndQueueDebugUtilsLabel(device_data->report_data, queue);
}
-void ValidationStateTracker::PostCallRecordCmdEndDebugUtilsLabelEXT(VkCommandBuffer commandBuffer) {
- EndCmdDebugUtilsLabel(report_data, commandBuffer);
+void CoreChecks::PreCallRecordQueueInsertDebugUtilsLabelEXT(VkQueue queue, const VkDebugUtilsLabelEXT *pLabelInfo) {
+ layer_data *device_data = GetLayerDataPtr(get_dispatch_key(queue), layer_data_map);
+ InsertQueueDebugUtilsLabel(device_data->report_data, queue, pLabelInfo);
}
-void ValidationStateTracker::PreCallRecordCmdInsertDebugUtilsLabelEXT(VkCommandBuffer commandBuffer,
- const VkDebugUtilsLabelEXT *pLabelInfo) {
- InsertCmdDebugUtilsLabel(report_data, commandBuffer, pLabelInfo);
+void CoreChecks::PreCallRecordCmdBeginDebugUtilsLabelEXT(VkCommandBuffer commandBuffer, const VkDebugUtilsLabelEXT *pLabelInfo) {
+ layer_data *device_data = GetLayerDataPtr(get_dispatch_key(commandBuffer), layer_data_map);
+ BeginCmdDebugUtilsLabel(device_data->report_data, commandBuffer, pLabelInfo);
+}
+
+void CoreChecks::PostCallRecordCmdEndDebugUtilsLabelEXT(VkCommandBuffer commandBuffer) {
+ layer_data *device_data = GetLayerDataPtr(get_dispatch_key(commandBuffer), layer_data_map);
+ EndCmdDebugUtilsLabel(device_data->report_data, commandBuffer);
+}
- // Squirrel away an easily accessible copy.
- CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
- cb_state->debug_label = LoggingLabel(pLabelInfo);
+void CoreChecks::PreCallRecordCmdInsertDebugUtilsLabelEXT(VkCommandBuffer commandBuffer, const VkDebugUtilsLabelEXT *pLabelInfo) {
+ layer_data *device_data = GetLayerDataPtr(get_dispatch_key(commandBuffer), layer_data_map);
+ InsertCmdDebugUtilsLabel(device_data->report_data, commandBuffer, pLabelInfo);
}
-void ValidationStateTracker::RecordEnumeratePhysicalDeviceGroupsState(
- uint32_t *pPhysicalDeviceGroupCount, VkPhysicalDeviceGroupPropertiesKHR *pPhysicalDeviceGroupProperties) {
+void CoreChecks::PostCallRecordCreateDebugUtilsMessengerEXT(VkInstance instance,
+ const VkDebugUtilsMessengerCreateInfoEXT *pCreateInfo,
+ const VkAllocationCallbacks *pAllocator,
+ VkDebugUtilsMessengerEXT *pMessenger, VkResult result) {
+ instance_layer_data *instance_data = GetLayerDataPtr(get_dispatch_key(instance), instance_layer_data_map);
+ if (VK_SUCCESS != result) return;
+ layer_create_messenger_callback(instance_data->report_data, false, pCreateInfo, pAllocator, pMessenger);
+}
+
+void CoreChecks::PostCallRecordDestroyDebugUtilsMessengerEXT(VkInstance instance, VkDebugUtilsMessengerEXT messenger,
+ const VkAllocationCallbacks *pAllocator) {
+ if (!messenger) return;
+ instance_layer_data *instance_data = GetLayerDataPtr(get_dispatch_key(instance), instance_layer_data_map);
+ layer_destroy_messenger_callback(instance_data->report_data, messenger, pAllocator);
+}
+
+void CoreChecks::PostCallRecordDestroyDebugReportCallbackEXT(VkInstance instance, VkDebugReportCallbackEXT msgCallback,
+ const VkAllocationCallbacks *pAllocator) {
+ instance_layer_data *instance_data = GetLayerDataPtr(get_dispatch_key(instance), instance_layer_data_map);
+ layer_destroy_report_callback(instance_data->report_data, msgCallback, pAllocator);
+}
+
+static void PostRecordEnumeratePhysicalDeviceGroupsState(instance_layer_data *instance_data, uint32_t *pPhysicalDeviceGroupCount,
+ VkPhysicalDeviceGroupPropertiesKHR *pPhysicalDeviceGroupProperties) {
if (NULL != pPhysicalDeviceGroupProperties) {
for (uint32_t i = 0; i < *pPhysicalDeviceGroupCount; i++) {
for (uint32_t j = 0; j < pPhysicalDeviceGroupProperties[i].physicalDeviceCount; j++) {
VkPhysicalDevice cur_phys_dev = pPhysicalDeviceGroupProperties[i].physicalDevices[j];
- auto &phys_device_state = physical_device_map[cur_phys_dev];
+ auto &phys_device_state = instance_data->physical_device_map[cur_phys_dev];
phys_device_state.phys_device = cur_phys_dev;
// Init actual features for each physical device
- DispatchGetPhysicalDeviceFeatures(cur_phys_dev, &phys_device_state.features2.features);
+ instance_data->instance_dispatch_table.GetPhysicalDeviceFeatures(cur_phys_dev,
+ &phys_device_state.features2.features);
}
}
}
}
-void ValidationStateTracker::PostCallRecordEnumeratePhysicalDeviceGroups(
- VkInstance instance, uint32_t *pPhysicalDeviceGroupCount, VkPhysicalDeviceGroupPropertiesKHR *pPhysicalDeviceGroupProperties,
- VkResult result) {
+void CoreChecks::PostCallRecordEnumeratePhysicalDeviceGroups(VkInstance instance, uint32_t *pPhysicalDeviceGroupCount,
+ VkPhysicalDeviceGroupPropertiesKHR *pPhysicalDeviceGroupProperties,
+ VkResult result) {
+ instance_layer_data *instance_data = GetLayerDataPtr(get_dispatch_key(instance), instance_layer_data_map);
if ((VK_SUCCESS != result) && (VK_INCOMPLETE != result)) return;
- RecordEnumeratePhysicalDeviceGroupsState(pPhysicalDeviceGroupCount, pPhysicalDeviceGroupProperties);
+ PostRecordEnumeratePhysicalDeviceGroupsState(instance_data, pPhysicalDeviceGroupCount, pPhysicalDeviceGroupProperties);
}
-void ValidationStateTracker::PostCallRecordEnumeratePhysicalDeviceGroupsKHR(
- VkInstance instance, uint32_t *pPhysicalDeviceGroupCount, VkPhysicalDeviceGroupPropertiesKHR *pPhysicalDeviceGroupProperties,
- VkResult result) {
+void CoreChecks::PostCallRecordEnumeratePhysicalDeviceGroupsKHR(VkInstance instance, uint32_t *pPhysicalDeviceGroupCount,
+ VkPhysicalDeviceGroupPropertiesKHR *pPhysicalDeviceGroupProperties,
+ VkResult result) {
+ instance_layer_data *instance_data = GetLayerDataPtr(get_dispatch_key(instance), instance_layer_data_map);
if ((VK_SUCCESS != result) && (VK_INCOMPLETE != result)) return;
- RecordEnumeratePhysicalDeviceGroupsState(pPhysicalDeviceGroupCount, pPhysicalDeviceGroupProperties);
+ PostRecordEnumeratePhysicalDeviceGroupsState(instance_data, pPhysicalDeviceGroupCount, pPhysicalDeviceGroupProperties);
}
-bool CoreChecks::ValidateDescriptorUpdateTemplate(const char *func_name,
+bool CoreChecks::ValidateDescriptorUpdateTemplate(const char *func_name, layer_data *device_data,
const VkDescriptorUpdateTemplateCreateInfoKHR *pCreateInfo) {
bool skip = false;
- const auto layout = GetDescriptorSetLayout(this, pCreateInfo->descriptorSetLayout);
+ const auto layout = GetDescriptorSetLayout(device_data, pCreateInfo->descriptorSetLayout);
if (VK_DESCRIPTOR_UPDATE_TEMPLATE_TYPE_DESCRIPTOR_SET == pCreateInfo->templateType && !layout) {
- const VulkanTypedHandle ds_typed(pCreateInfo->descriptorSetLayout, kVulkanObjectTypeDescriptorSetLayout);
- skip |=
- log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_POOL_EXT, ds_typed.handle,
- "VUID-VkDescriptorUpdateTemplateCreateInfo-templateType-00350",
- "%s: Invalid pCreateInfo->descriptorSetLayout (%s)", func_name, report_data->FormatHandle(ds_typed).c_str());
+ auto ds_uint = HandleToUint64(pCreateInfo->descriptorSetLayout);
+ skip |= log_msg(device_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_POOL_EXT,
+ ds_uint, "VUID-VkDescriptorUpdateTemplateCreateInfo-templateType-00350",
+ "%s: Invalid pCreateInfo->descriptorSetLayout (%s)", func_name,
+ device_data->report_data->FormatHandle(ds_uint).c_str());
} else if (VK_DESCRIPTOR_UPDATE_TEMPLATE_TYPE_PUSH_DESCRIPTORS_KHR == pCreateInfo->templateType) {
auto bind_point = pCreateInfo->pipelineBindPoint;
bool valid_bp = (bind_point == VK_PIPELINE_BIND_POINT_GRAPHICS) || (bind_point == VK_PIPELINE_BIND_POINT_COMPUTE);
if (!valid_bp) {
skip |=
- log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
+ log_msg(device_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
"VUID-VkDescriptorUpdateTemplateCreateInfo-templateType-00351",
"%s: Invalid pCreateInfo->pipelineBindPoint (%" PRIu32 ").", func_name, static_cast<uint32_t>(bind_point));
}
- const auto pipeline_layout = GetPipelineLayout(pCreateInfo->pipelineLayout);
+ const auto pipeline_layout = GetPipelineLayout(device_data, pCreateInfo->pipelineLayout);
if (!pipeline_layout) {
- const VulkanTypedHandle pl_typed(pCreateInfo->pipelineLayout, kVulkanObjectTypePipelineLayout);
- skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_LAYOUT_EXT,
- pl_typed.handle, "VUID-VkDescriptorUpdateTemplateCreateInfo-templateType-00352",
- "%s: Invalid pCreateInfo->pipelineLayout (%s)", func_name, report_data->FormatHandle(pl_typed).c_str());
+ uint64_t pl_uint = HandleToUint64(pCreateInfo->pipelineLayout);
+ skip |= log_msg(
+ device_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_LAYOUT_EXT, pl_uint,
+ "VUID-VkDescriptorUpdateTemplateCreateInfo-templateType-00352", "%s: Invalid pCreateInfo->pipelineLayout (%s)",
+ func_name, device_data->report_data->FormatHandle(pl_uint).c_str());
} else {
const uint32_t pd_set = pCreateInfo->set;
if ((pd_set >= pipeline_layout->set_layouts.size()) || !pipeline_layout->set_layouts[pd_set] ||
!pipeline_layout->set_layouts[pd_set]->IsPushDescriptor()) {
- const VulkanTypedHandle pl_typed(pCreateInfo->pipelineLayout, kVulkanObjectTypePipelineLayout);
- skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_LAYOUT_EXT,
- pl_typed.handle, "VUID-VkDescriptorUpdateTemplateCreateInfo-templateType-00353",
+ uint64_t pl_uint = HandleToUint64(pCreateInfo->pipelineLayout);
+ skip |= log_msg(device_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_LAYOUT_EXT, pl_uint,
+ "VUID-VkDescriptorUpdateTemplateCreateInfo-templateType-00353",
"%s: pCreateInfo->set (%" PRIu32
") does not refer to the push descriptor set layout for pCreateInfo->pipelineLayout (%s).",
- func_name, pd_set, report_data->FormatHandle(pl_typed).c_str());
+ func_name, pd_set, device_data->report_data->FormatHandle(pl_uint).c_str());
}
}
}
@@ -13838,7 +12836,9 @@ bool CoreChecks::PreCallValidateCreateDescriptorUpdateTemplate(VkDevice device,
const VkDescriptorUpdateTemplateCreateInfoKHR *pCreateInfo,
const VkAllocationCallbacks *pAllocator,
VkDescriptorUpdateTemplateKHR *pDescriptorUpdateTemplate) {
- bool skip = ValidateDescriptorUpdateTemplate("vkCreateDescriptorUpdateTemplate()", pCreateInfo);
+ layer_data *device_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
+
+ bool skip = ValidateDescriptorUpdateTemplate("vkCreateDescriptorUpdateTemplate()", device_data, pCreateInfo);
return skip;
}
@@ -13846,51 +12846,61 @@ bool CoreChecks::PreCallValidateCreateDescriptorUpdateTemplateKHR(VkDevice devic
const VkDescriptorUpdateTemplateCreateInfoKHR *pCreateInfo,
const VkAllocationCallbacks *pAllocator,
VkDescriptorUpdateTemplateKHR *pDescriptorUpdateTemplate) {
- bool skip = ValidateDescriptorUpdateTemplate("vkCreateDescriptorUpdateTemplateKHR()", pCreateInfo);
+ layer_data *device_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
+
+ bool skip = ValidateDescriptorUpdateTemplate("vkCreateDescriptorUpdateTemplateKHR()", device_data, pCreateInfo);
return skip;
}
-void ValidationStateTracker::PreCallRecordDestroyDescriptorUpdateTemplate(VkDevice device,
- VkDescriptorUpdateTemplateKHR descriptorUpdateTemplate,
- const VkAllocationCallbacks *pAllocator) {
+void CoreChecks::PreCallRecordDestroyDescriptorUpdateTemplate(VkDevice device,
+ VkDescriptorUpdateTemplateKHR descriptorUpdateTemplate,
+ const VkAllocationCallbacks *pAllocator) {
+ layer_data *device_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
if (!descriptorUpdateTemplate) return;
- desc_template_map.erase(descriptorUpdateTemplate);
+ device_data->desc_template_map.erase(descriptorUpdateTemplate);
}
-void ValidationStateTracker::PreCallRecordDestroyDescriptorUpdateTemplateKHR(VkDevice device,
- VkDescriptorUpdateTemplateKHR descriptorUpdateTemplate,
- const VkAllocationCallbacks *pAllocator) {
+void CoreChecks::PreCallRecordDestroyDescriptorUpdateTemplateKHR(VkDevice device,
+ VkDescriptorUpdateTemplateKHR descriptorUpdateTemplate,
+ const VkAllocationCallbacks *pAllocator) {
+ layer_data *device_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
if (!descriptorUpdateTemplate) return;
- desc_template_map.erase(descriptorUpdateTemplate);
+ device_data->desc_template_map.erase(descriptorUpdateTemplate);
}
-void ValidationStateTracker::RecordCreateDescriptorUpdateTemplateState(const VkDescriptorUpdateTemplateCreateInfoKHR *pCreateInfo,
- VkDescriptorUpdateTemplateKHR *pDescriptorUpdateTemplate) {
+void RecordCreateDescriptorUpdateTemplateState(layer_data *device_data, const VkDescriptorUpdateTemplateCreateInfoKHR *pCreateInfo,
+ VkDescriptorUpdateTemplateKHR *pDescriptorUpdateTemplate) {
safe_VkDescriptorUpdateTemplateCreateInfo *local_create_info = new safe_VkDescriptorUpdateTemplateCreateInfo(pCreateInfo);
std::unique_ptr<TEMPLATE_STATE> template_state(new TEMPLATE_STATE(*pDescriptorUpdateTemplate, local_create_info));
- desc_template_map[*pDescriptorUpdateTemplate] = std::move(template_state);
+ device_data->desc_template_map[*pDescriptorUpdateTemplate] = std::move(template_state);
}
-void ValidationStateTracker::PostCallRecordCreateDescriptorUpdateTemplate(
- VkDevice device, const VkDescriptorUpdateTemplateCreateInfoKHR *pCreateInfo, const VkAllocationCallbacks *pAllocator,
- VkDescriptorUpdateTemplateKHR *pDescriptorUpdateTemplate, VkResult result) {
+void CoreChecks::PostCallRecordCreateDescriptorUpdateTemplate(VkDevice device,
+ const VkDescriptorUpdateTemplateCreateInfoKHR *pCreateInfo,
+ const VkAllocationCallbacks *pAllocator,
+ VkDescriptorUpdateTemplateKHR *pDescriptorUpdateTemplate,
+ VkResult result) {
+ layer_data *device_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
if (VK_SUCCESS != result) return;
- RecordCreateDescriptorUpdateTemplateState(pCreateInfo, pDescriptorUpdateTemplate);
+ RecordCreateDescriptorUpdateTemplateState(device_data, pCreateInfo, pDescriptorUpdateTemplate);
}
-void ValidationStateTracker::PostCallRecordCreateDescriptorUpdateTemplateKHR(
- VkDevice device, const VkDescriptorUpdateTemplateCreateInfoKHR *pCreateInfo, const VkAllocationCallbacks *pAllocator,
- VkDescriptorUpdateTemplateKHR *pDescriptorUpdateTemplate, VkResult result) {
+void CoreChecks::PostCallRecordCreateDescriptorUpdateTemplateKHR(VkDevice device,
+ const VkDescriptorUpdateTemplateCreateInfoKHR *pCreateInfo,
+ const VkAllocationCallbacks *pAllocator,
+ VkDescriptorUpdateTemplateKHR *pDescriptorUpdateTemplate,
+ VkResult result) {
+ layer_data *device_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
if (VK_SUCCESS != result) return;
- RecordCreateDescriptorUpdateTemplateState(pCreateInfo, pDescriptorUpdateTemplate);
+ RecordCreateDescriptorUpdateTemplateState(device_data, pCreateInfo, pDescriptorUpdateTemplate);
}
-bool CoreChecks::ValidateUpdateDescriptorSetWithTemplate(VkDescriptorSet descriptorSet,
+bool CoreChecks::ValidateUpdateDescriptorSetWithTemplate(layer_data *device_data, VkDescriptorSet descriptorSet,
VkDescriptorUpdateTemplateKHR descriptorUpdateTemplate,
const void *pData) {
bool skip = false;
- auto const template_map_entry = desc_template_map.find(descriptorUpdateTemplate);
- if ((template_map_entry == desc_template_map.end()) || (template_map_entry->second.get() == nullptr)) {
+ auto const template_map_entry = device_data->desc_template_map.find(descriptorUpdateTemplate);
+ if ((template_map_entry == device_data->desc_template_map.end()) || (template_map_entry->second.get() == nullptr)) {
// Object tracker will report errors for invalid descriptorUpdateTemplate values, avoiding a crash in release builds
// but retaining the assert as template support is new enough to want to investigate these in debug builds.
assert(0);
@@ -13898,7 +12908,7 @@ bool CoreChecks::ValidateUpdateDescriptorSetWithTemplate(VkDescriptorSet descrip
const TEMPLATE_STATE *template_state = template_map_entry->second.get();
// TODO: Validate template push descriptor updates
if (template_state->create_info.templateType == VK_DESCRIPTOR_UPDATE_TEMPLATE_TYPE_DESCRIPTOR_SET) {
- skip = ValidateUpdateDescriptorSetsWithTemplateKHR(descriptorSet, template_state, pData);
+ skip = ValidateUpdateDescriptorSetsWithTemplateKHR(device_data, descriptorSet, template_state, pData);
}
}
return skip;
@@ -13907,44 +12917,48 @@ bool CoreChecks::ValidateUpdateDescriptorSetWithTemplate(VkDescriptorSet descrip
bool CoreChecks::PreCallValidateUpdateDescriptorSetWithTemplate(VkDevice device, VkDescriptorSet descriptorSet,
VkDescriptorUpdateTemplate descriptorUpdateTemplate,
const void *pData) {
- return ValidateUpdateDescriptorSetWithTemplate(descriptorSet, descriptorUpdateTemplate, pData);
+ layer_data *device_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
+ return ValidateUpdateDescriptorSetWithTemplate(device_data, descriptorSet, descriptorUpdateTemplate, pData);
}
bool CoreChecks::PreCallValidateUpdateDescriptorSetWithTemplateKHR(VkDevice device, VkDescriptorSet descriptorSet,
VkDescriptorUpdateTemplateKHR descriptorUpdateTemplate,
const void *pData) {
- return ValidateUpdateDescriptorSetWithTemplate(descriptorSet, descriptorUpdateTemplate, pData);
+ layer_data *device_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
+ return ValidateUpdateDescriptorSetWithTemplate(device_data, descriptorSet, descriptorUpdateTemplate, pData);
}
-void ValidationStateTracker::RecordUpdateDescriptorSetWithTemplateState(VkDescriptorSet descriptorSet,
- VkDescriptorUpdateTemplateKHR descriptorUpdateTemplate,
- const void *pData) {
- auto const template_map_entry = desc_template_map.find(descriptorUpdateTemplate);
- if ((template_map_entry == desc_template_map.end()) || (template_map_entry->second.get() == nullptr)) {
+void CoreChecks::RecordUpdateDescriptorSetWithTemplateState(layer_data *device_data, VkDescriptorSet descriptorSet,
+ VkDescriptorUpdateTemplateKHR descriptorUpdateTemplate,
+ const void *pData) {
+ auto const template_map_entry = device_data->desc_template_map.find(descriptorUpdateTemplate);
+ if ((template_map_entry == device_data->desc_template_map.end()) || (template_map_entry->second.get() == nullptr)) {
assert(0);
} else {
const TEMPLATE_STATE *template_state = template_map_entry->second.get();
// TODO: Record template push descriptor updates
if (template_state->create_info.templateType == VK_DESCRIPTOR_UPDATE_TEMPLATE_TYPE_DESCRIPTOR_SET) {
- PerformUpdateDescriptorSetsWithTemplateKHR(descriptorSet, template_state, pData);
+ PerformUpdateDescriptorSetsWithTemplateKHR(device_data, descriptorSet, template_state, pData);
}
}
}
-void ValidationStateTracker::PreCallRecordUpdateDescriptorSetWithTemplate(VkDevice device, VkDescriptorSet descriptorSet,
- VkDescriptorUpdateTemplate descriptorUpdateTemplate,
- const void *pData) {
- RecordUpdateDescriptorSetWithTemplateState(descriptorSet, descriptorUpdateTemplate, pData);
+void CoreChecks::PreCallRecordUpdateDescriptorSetWithTemplate(VkDevice device, VkDescriptorSet descriptorSet,
+ VkDescriptorUpdateTemplate descriptorUpdateTemplate,
+ const void *pData) {
+ layer_data *device_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
+ RecordUpdateDescriptorSetWithTemplateState(device_data, descriptorSet, descriptorUpdateTemplate, pData);
}
-void ValidationStateTracker::PreCallRecordUpdateDescriptorSetWithTemplateKHR(VkDevice device, VkDescriptorSet descriptorSet,
- VkDescriptorUpdateTemplateKHR descriptorUpdateTemplate,
- const void *pData) {
- RecordUpdateDescriptorSetWithTemplateState(descriptorSet, descriptorUpdateTemplate, pData);
+void CoreChecks::PreCallRecordUpdateDescriptorSetWithTemplateKHR(VkDevice device, VkDescriptorSet descriptorSet,
+ VkDescriptorUpdateTemplateKHR descriptorUpdateTemplate,
+ const void *pData) {
+ layer_data *device_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
+ RecordUpdateDescriptorSetWithTemplateState(device_data, descriptorSet, descriptorUpdateTemplate, pData);
}
-static std::shared_ptr<cvdescriptorset::DescriptorSetLayout const> GetDslFromPipelineLayout(
- PIPELINE_LAYOUT_STATE const *layout_data, uint32_t set) {
+static std::shared_ptr<cvdescriptorset::DescriptorSetLayout const> GetDslFromPipelineLayout(PIPELINE_LAYOUT_NODE const *layout_data,
+ uint32_t set) {
std::shared_ptr<cvdescriptorset::DescriptorSetLayout const> dsl = nullptr;
if (layout_data && (set < layout_data->set_layouts.size())) {
dsl = layout_data->set_layouts[set];
@@ -13955,32 +12969,34 @@ static std::shared_ptr<cvdescriptorset::DescriptorSetLayout const> GetDslFromPip
bool CoreChecks::PreCallValidateCmdPushDescriptorSetWithTemplateKHR(VkCommandBuffer commandBuffer,
VkDescriptorUpdateTemplateKHR descriptorUpdateTemplate,
VkPipelineLayout layout, uint32_t set, const void *pData) {
- CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
+ layer_data *device_data = GetLayerDataPtr(get_dispatch_key(commandBuffer), layer_data_map);
+ GLOBAL_CB_NODE *cb_state = GetCBNode(commandBuffer);
assert(cb_state);
const char *const func_name = "vkPushDescriptorSetWithTemplateKHR()";
bool skip = false;
- skip |= ValidateCmd(cb_state, CMD_PUSHDESCRIPTORSETWITHTEMPLATEKHR, func_name);
+ skip |= ValidateCmd(device_data, cb_state, CMD_PUSHDESCRIPTORSETWITHTEMPLATEKHR, func_name);
- auto layout_data = GetPipelineLayout(layout);
+ auto layout_data = GetPipelineLayout(device_data, layout);
auto dsl = GetDslFromPipelineLayout(layout_data, set);
- const VulkanTypedHandle layout_typed(layout, kVulkanObjectTypePipelineLayout);
+ const auto layout_u64 = HandleToUint64(layout);
// Validate the set index points to a push descriptor set and is in range
if (dsl) {
if (!dsl->IsPushDescriptor()) {
- skip = log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_LAYOUT_EXT,
- layout_typed.handle, "VUID-vkCmdPushDescriptorSetKHR-set-00365",
- "%s: Set index %" PRIu32 " does not match push descriptor set layout index for %s.", func_name, set,
- report_data->FormatHandle(layout_typed).c_str());
+ skip = log_msg(device_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_LAYOUT_EXT,
+ layout_u64, "VUID-vkCmdPushDescriptorSetKHR-set-00365",
+ "%s: Set index %" PRIu32 " does not match push descriptor set layout index for VkPipelineLayout %s.",
+ func_name, set, device_data->report_data->FormatHandle(layout_u64).c_str());
}
} else if (layout_data && (set >= layout_data->set_layouts.size())) {
- skip = log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_LAYOUT_EXT,
- layout_typed.handle, "VUID-vkCmdPushDescriptorSetKHR-set-00364",
- "%s: Set index %" PRIu32 " is outside of range for %s (set < %" PRIu32 ").", func_name, set,
- report_data->FormatHandle(layout_typed).c_str(), static_cast<uint32_t>(layout_data->set_layouts.size()));
+ skip = log_msg(device_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_LAYOUT_EXT,
+ layout_u64, "VUID-vkCmdPushDescriptorSetKHR-set-00364",
+ "%s: Set index %" PRIu32 " is outside of range for VkPipelineLayout %s (set < %" PRIu32 ").", func_name, set,
+ device_data->report_data->FormatHandle(layout_u64).c_str(),
+ static_cast<uint32_t>(layout_data->set_layouts.size()));
}
- const auto template_state = GetDescriptorTemplateState(descriptorUpdateTemplate);
+ const auto template_state = GetDescriptorTemplateState(device_data, descriptorUpdateTemplate);
if (template_state) {
const auto &template_ci = template_state->create_info;
static const std::map<VkPipelineBindPoint, std::string> bind_errors = {
@@ -13988,42 +13004,43 @@ bool CoreChecks::PreCallValidateCmdPushDescriptorSetWithTemplateKHR(VkCommandBuf
std::make_pair(VK_PIPELINE_BIND_POINT_COMPUTE, "VUID-vkCmdPushDescriptorSetWithTemplateKHR-commandBuffer-00366"),
std::make_pair(VK_PIPELINE_BIND_POINT_RAY_TRACING_NV,
"VUID-vkCmdPushDescriptorSetWithTemplateKHR-commandBuffer-00366")};
- skip |= ValidatePipelineBindPoint(cb_state, template_ci.pipelineBindPoint, func_name, bind_errors);
+ skip |= ValidatePipelineBindPoint(device_data, cb_state, template_ci.pipelineBindPoint, func_name, bind_errors);
if (template_ci.templateType != VK_DESCRIPTOR_UPDATE_TEMPLATE_TYPE_PUSH_DESCRIPTORS_KHR) {
- skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
+ skip |= log_msg(device_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
HandleToUint64(cb_state->commandBuffer), kVUID_Core_PushDescriptorUpdate_TemplateType,
"%s: descriptorUpdateTemplate %s was not created with flag "
"VK_DESCRIPTOR_UPDATE_TEMPLATE_TYPE_PUSH_DESCRIPTORS_KHR.",
- func_name, report_data->FormatHandle(descriptorUpdateTemplate).c_str());
+ func_name, device_data->report_data->FormatHandle(descriptorUpdateTemplate).c_str());
}
if (template_ci.set != set) {
- skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
- HandleToUint64(cb_state->commandBuffer), kVUID_Core_PushDescriptorUpdate_Template_SetMismatched,
- "%s: descriptorUpdateTemplate %s created with set %" PRIu32
- " does not match command parameter set %" PRIu32 ".",
- func_name, report_data->FormatHandle(descriptorUpdateTemplate).c_str(), template_ci.set, set);
- }
- if (!CompatForSet(set, layout_data, GetPipelineLayout(template_ci.pipelineLayout))) {
- skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
+ skip |= log_msg(
+ device_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
+ HandleToUint64(cb_state->commandBuffer), kVUID_Core_PushDescriptorUpdate_Template_SetMismatched,
+ "%s: descriptorUpdateTemplate %s created with set %" PRIu32 " does not match command parameter set %" PRIu32 ".",
+ func_name, device_data->report_data->FormatHandle(descriptorUpdateTemplate).c_str(), template_ci.set, set);
+ }
+ if (!CompatForSet(set, layout_data, GetPipelineLayout(device_data, template_ci.pipelineLayout))) {
+ skip |= log_msg(device_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
HandleToUint64(cb_state->commandBuffer), kVUID_Core_PushDescriptorUpdate_Template_LayoutMismatched,
- "%s: descriptorUpdateTemplate %s created with %s is incompatible with command parameter "
- "%s for set %" PRIu32,
- func_name, report_data->FormatHandle(descriptorUpdateTemplate).c_str(),
- report_data->FormatHandle(template_ci.pipelineLayout).c_str(),
- report_data->FormatHandle(layout).c_str(), set);
+ "%s: descriptorUpdateTemplate %s created with pipelineLayout %s is incompatible with command parameter "
+ "layout %s for set %" PRIu32,
+ func_name, device_data->report_data->FormatHandle(descriptorUpdateTemplate).c_str(),
+ device_data->report_data->FormatHandle(template_ci.pipelineLayout).c_str(),
+ device_data->report_data->FormatHandle(layout).c_str(), set);
}
}
if (dsl && template_state) {
// Create an empty proxy in order to use the existing descriptor set update validation
- cvdescriptorset::DescriptorSet proxy_ds(VK_NULL_HANDLE, VK_NULL_HANDLE, dsl, 0, this);
+ cvdescriptorset::DescriptorSet proxy_ds(VK_NULL_HANDLE, VK_NULL_HANDLE, dsl, 0, device_data);
// Decode the template into a set of write updates
- cvdescriptorset::DecodedTemplateUpdate decoded_template(this, VK_NULL_HANDLE, template_state, pData,
+ cvdescriptorset::DecodedTemplateUpdate decoded_template(device_data, VK_NULL_HANDLE, template_state, pData,
dsl->GetDescriptorSetLayout());
// Validate the decoded update against the proxy_ds
- skip |= ValidatePushDescriptorsUpdate(&proxy_ds, static_cast<uint32_t>(decoded_template.desc_writes.size()),
- decoded_template.desc_writes.data(), func_name);
+ skip |= proxy_ds.ValidatePushDescriptorsUpdate(device_data->report_data,
+ static_cast<uint32_t>(decoded_template.desc_writes.size()),
+ decoded_template.desc_writes.data(), func_name);
}
return skip;
@@ -14032,26 +13049,28 @@ bool CoreChecks::PreCallValidateCmdPushDescriptorSetWithTemplateKHR(VkCommandBuf
void CoreChecks::PreCallRecordCmdPushDescriptorSetWithTemplateKHR(VkCommandBuffer commandBuffer,
VkDescriptorUpdateTemplateKHR descriptorUpdateTemplate,
VkPipelineLayout layout, uint32_t set, const void *pData) {
- CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
+ layer_data *device_data = GetLayerDataPtr(get_dispatch_key(commandBuffer), layer_data_map);
+ GLOBAL_CB_NODE *cb_state = GetCBNode(commandBuffer);
- const auto template_state = GetDescriptorTemplateState(descriptorUpdateTemplate);
+ const auto template_state = GetDescriptorTemplateState(device_data, descriptorUpdateTemplate);
if (template_state) {
- auto layout_data = GetPipelineLayout(layout);
+ auto layout_data = GetPipelineLayout(device_data, layout);
auto dsl = GetDslFromPipelineLayout(layout_data, set);
const auto &template_ci = template_state->create_info;
if (dsl && !dsl->IsDestroyed()) {
// Decode the template into a set of write updates
- cvdescriptorset::DecodedTemplateUpdate decoded_template(this, VK_NULL_HANDLE, template_state, pData,
+ cvdescriptorset::DecodedTemplateUpdate decoded_template(device_data, VK_NULL_HANDLE, template_state, pData,
dsl->GetDescriptorSetLayout());
- RecordCmdPushDescriptorSetState(cb_state, template_ci.pipelineBindPoint, layout, set,
+ RecordCmdPushDescriptorSetState(device_data, cb_state, template_ci.pipelineBindPoint, layout, set,
static_cast<uint32_t>(decoded_template.desc_writes.size()),
decoded_template.desc_writes.data());
}
}
}
-void ValidationStateTracker::RecordGetPhysicalDeviceDisplayPlanePropertiesState(VkPhysicalDevice physicalDevice,
- uint32_t *pPropertyCount, void *pProperties) {
+void CoreChecks::RecordGetPhysicalDeviceDisplayPlanePropertiesState(instance_layer_data *instance_data,
+ VkPhysicalDevice physicalDevice, uint32_t *pPropertyCount,
+ void *pProperties) {
auto physical_device_state = GetPhysicalDeviceState(physicalDevice);
if (*pPropertyCount) {
if (physical_device_state->vkGetPhysicalDeviceDisplayPlanePropertiesKHRState < QUERY_COUNT) {
@@ -14066,36 +13085,39 @@ void ValidationStateTracker::RecordGetPhysicalDeviceDisplayPlanePropertiesState(
}
}
-void ValidationStateTracker::PostCallRecordGetPhysicalDeviceDisplayPlanePropertiesKHR(VkPhysicalDevice physicalDevice,
- uint32_t *pPropertyCount,
- VkDisplayPlanePropertiesKHR *pProperties,
- VkResult result) {
+void CoreChecks::PostCallRecordGetPhysicalDeviceDisplayPlanePropertiesKHR(VkPhysicalDevice physicalDevice, uint32_t *pPropertyCount,
+ VkDisplayPlanePropertiesKHR *pProperties,
+ VkResult result) {
+ instance_layer_data *instance_data = GetLayerDataPtr(get_dispatch_key(physicalDevice), instance_layer_data_map);
if ((VK_SUCCESS != result) && (VK_INCOMPLETE != result)) return;
- RecordGetPhysicalDeviceDisplayPlanePropertiesState(physicalDevice, pPropertyCount, pProperties);
+ RecordGetPhysicalDeviceDisplayPlanePropertiesState(instance_data, physicalDevice, pPropertyCount, pProperties);
}
-void ValidationStateTracker::PostCallRecordGetPhysicalDeviceDisplayPlaneProperties2KHR(VkPhysicalDevice physicalDevice,
- uint32_t *pPropertyCount,
- VkDisplayPlaneProperties2KHR *pProperties,
- VkResult result) {
+void CoreChecks::PostCallRecordGetPhysicalDeviceDisplayPlaneProperties2KHR(VkPhysicalDevice physicalDevice,
+ uint32_t *pPropertyCount,
+ VkDisplayPlaneProperties2KHR *pProperties,
+ VkResult result) {
+ instance_layer_data *instance_data = GetLayerDataPtr(get_dispatch_key(physicalDevice), instance_layer_data_map);
if ((VK_SUCCESS != result) && (VK_INCOMPLETE != result)) return;
- RecordGetPhysicalDeviceDisplayPlanePropertiesState(physicalDevice, pPropertyCount, pProperties);
+ RecordGetPhysicalDeviceDisplayPlanePropertiesState(instance_data, physicalDevice, pPropertyCount, pProperties);
}
-bool CoreChecks::ValidateGetPhysicalDeviceDisplayPlanePropertiesKHRQuery(VkPhysicalDevice physicalDevice, uint32_t planeIndex,
- const char *api_name) const {
+bool CoreChecks::ValidateGetPhysicalDeviceDisplayPlanePropertiesKHRQuery(instance_layer_data *instance_data,
+ VkPhysicalDevice physicalDevice, uint32_t planeIndex,
+ const char *api_name) {
bool skip = false;
- const auto physical_device_state = GetPhysicalDeviceState(physicalDevice);
+ auto physical_device_state = GetPhysicalDeviceState(physicalDevice);
if (physical_device_state->vkGetPhysicalDeviceDisplayPlanePropertiesKHRState == UNCALLED) {
- skip |= log_msg(report_data, VK_DEBUG_REPORT_WARNING_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_PHYSICAL_DEVICE_EXT,
- HandleToUint64(physicalDevice), kVUID_Core_Swapchain_GetSupportedDisplaysWithoutQuery,
- "Potential problem with calling %s() without first retrieving properties from "
- "vkGetPhysicalDeviceDisplayPlanePropertiesKHR or vkGetPhysicalDeviceDisplayPlaneProperties2KHR.",
- api_name);
+ skip |=
+ log_msg(instance_data->report_data, VK_DEBUG_REPORT_WARNING_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_PHYSICAL_DEVICE_EXT,
+ HandleToUint64(physicalDevice), kVUID_Core_Swapchain_GetSupportedDisplaysWithoutQuery,
+ "Potential problem with calling %s() without first querying vkGetPhysicalDeviceDisplayPlanePropertiesKHR "
+ "or vkGetPhysicalDeviceDisplayPlaneProperties2KHR.",
+ api_name);
} else {
if (planeIndex >= physical_device_state->display_plane_property_count) {
skip |= log_msg(
- report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_PHYSICAL_DEVICE_EXT,
+ instance_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_PHYSICAL_DEVICE_EXT,
HandleToUint64(physicalDevice), "VUID-vkGetDisplayPlaneSupportedDisplaysKHR-planeIndex-01249",
"%s(): planeIndex must be in the range [0, %d] that was returned by vkGetPhysicalDeviceDisplayPlanePropertiesKHR "
"or vkGetPhysicalDeviceDisplayPlaneProperties2KHR. Do you have the plane index hardcoded?",
@@ -14107,127 +13129,257 @@ bool CoreChecks::ValidateGetPhysicalDeviceDisplayPlanePropertiesKHRQuery(VkPhysi
bool CoreChecks::PreCallValidateGetDisplayPlaneSupportedDisplaysKHR(VkPhysicalDevice physicalDevice, uint32_t planeIndex,
uint32_t *pDisplayCount, VkDisplayKHR *pDisplays) {
+ instance_layer_data *instance_data = GetLayerDataPtr(get_dispatch_key(physicalDevice), instance_layer_data_map);
bool skip = false;
- skip |= ValidateGetPhysicalDeviceDisplayPlanePropertiesKHRQuery(physicalDevice, planeIndex,
+ skip |= ValidateGetPhysicalDeviceDisplayPlanePropertiesKHRQuery(instance_data, physicalDevice, planeIndex,
"vkGetDisplayPlaneSupportedDisplaysKHR");
return skip;
}
bool CoreChecks::PreCallValidateGetDisplayPlaneCapabilitiesKHR(VkPhysicalDevice physicalDevice, VkDisplayModeKHR mode,
uint32_t planeIndex, VkDisplayPlaneCapabilitiesKHR *pCapabilities) {
+ instance_layer_data *instance_data = GetLayerDataPtr(get_dispatch_key(physicalDevice), instance_layer_data_map);
bool skip = false;
- skip |= ValidateGetPhysicalDeviceDisplayPlanePropertiesKHRQuery(physicalDevice, planeIndex, "vkGetDisplayPlaneCapabilitiesKHR");
+ skip |= ValidateGetPhysicalDeviceDisplayPlanePropertiesKHRQuery(instance_data, physicalDevice, planeIndex,
+ "vkGetDisplayPlaneCapabilitiesKHR");
return skip;
}
bool CoreChecks::PreCallValidateGetDisplayPlaneCapabilities2KHR(VkPhysicalDevice physicalDevice,
const VkDisplayPlaneInfo2KHR *pDisplayPlaneInfo,
VkDisplayPlaneCapabilities2KHR *pCapabilities) {
+ instance_layer_data *instance_data = GetLayerDataPtr(get_dispatch_key(physicalDevice), instance_layer_data_map);
bool skip = false;
- skip |= ValidateGetPhysicalDeviceDisplayPlanePropertiesKHRQuery(physicalDevice, pDisplayPlaneInfo->planeIndex,
+ skip |= ValidateGetPhysicalDeviceDisplayPlanePropertiesKHRQuery(instance_data, physicalDevice, pDisplayPlaneInfo->planeIndex,
"vkGetDisplayPlaneCapabilities2KHR");
return skip;
}
bool CoreChecks::PreCallValidateCmdDebugMarkerBeginEXT(VkCommandBuffer commandBuffer,
const VkDebugMarkerMarkerInfoEXT *pMarkerInfo) {
- const CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
+ layer_data *device_data = GetLayerDataPtr(get_dispatch_key(commandBuffer), layer_data_map);
+ GLOBAL_CB_NODE *cb_state = GetCBNode(commandBuffer);
assert(cb_state);
- return ValidateCmd(cb_state, CMD_DEBUGMARKERBEGINEXT, "vkCmdDebugMarkerBeginEXT()");
+ return ValidateCmd(device_data, cb_state, CMD_DEBUGMARKERBEGINEXT, "vkCmdDebugMarkerBeginEXT()");
}
bool CoreChecks::PreCallValidateCmdDebugMarkerEndEXT(VkCommandBuffer commandBuffer) {
- const CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
+ layer_data *device_data = GetLayerDataPtr(get_dispatch_key(commandBuffer), layer_data_map);
+ GLOBAL_CB_NODE *cb_state = GetCBNode(commandBuffer);
assert(cb_state);
- return ValidateCmd(cb_state, CMD_DEBUGMARKERENDEXT, "vkCmdDebugMarkerEndEXT()");
+ return ValidateCmd(device_data, cb_state, CMD_DEBUGMARKERENDEXT, "vkCmdDebugMarkerEndEXT()");
}
-bool CoreChecks::PreCallValidateCmdBeginQueryIndexedEXT(VkCommandBuffer commandBuffer, VkQueryPool queryPool, uint32_t query,
- VkQueryControlFlags flags, uint32_t index) {
- if (disabled.query_validation) return false;
- CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
- assert(cb_state);
- QueryObject query_obj(queryPool, query, index);
- const char *cmd_name = "vkCmdBeginQueryIndexedEXT()";
- bool skip = ValidateBeginQuery(
- cb_state, query_obj, flags, CMD_BEGINQUERYINDEXEDEXT, cmd_name, "VUID-vkCmdBeginQueryIndexedEXT-commandBuffer-cmdpool",
- "VUID-vkCmdBeginQueryIndexedEXT-queryType-02338", "VUID-vkCmdBeginQueryIndexedEXT-queryType-00803",
- "VUID-vkCmdBeginQueryIndexedEXT-queryType-00800", "VUID-vkCmdBeginQueryIndexedEXT-query-00802");
-
- // Extension specific VU's
- const auto &query_pool_ci = GetQueryPoolState(query_obj.pool)->createInfo;
- if (query_pool_ci.queryType == VK_QUERY_TYPE_TRANSFORM_FEEDBACK_STREAM_EXT) {
- if (device_extensions.vk_ext_transform_feedback &&
- (index >= phys_dev_ext_props.transform_feedback_props.maxTransformFeedbackStreams)) {
- skip |= log_msg(
- report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
- HandleToUint64(cb_state->commandBuffer), "VUID-vkCmdBeginQueryIndexedEXT-queryType-02339",
- "%s: index %" PRIu32
- " must be less than VkPhysicalDeviceTransformFeedbackPropertiesEXT::maxTransformFeedbackStreams %" PRIu32 ".",
- cmd_name, index, phys_dev_ext_props.transform_feedback_props.maxTransformFeedbackStreams);
- }
- } else if (index != 0) {
- skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
- HandleToUint64(cb_state->commandBuffer), "VUID-vkCmdBeginQueryIndexedEXT-queryType-02340",
- "%s: index %" PRIu32
- " must be zero if %s was not created with type VK_QUERY_TYPE_TRANSFORM_FEEDBACK_STREAM_EXT.",
- cmd_name, index, report_data->FormatHandle(queryPool).c_str());
+bool CoreChecks::PreCallValidateCmdSetDiscardRectangleEXT(VkCommandBuffer commandBuffer, uint32_t firstDiscardRectangle,
+ uint32_t discardRectangleCount, const VkRect2D *pDiscardRectangles) {
+ layer_data *device_data = GetLayerDataPtr(get_dispatch_key(commandBuffer), layer_data_map);
+ GLOBAL_CB_NODE *cb_state = GetCBNode(commandBuffer);
+ // Minimal validation for command buffer state
+ return ValidateCmd(device_data, cb_state, CMD_SETDISCARDRECTANGLEEXT, "vkCmdSetDiscardRectangleEXT()");
+}
+
+bool CoreChecks::PreCallValidateCmdSetSampleLocationsEXT(VkCommandBuffer commandBuffer,
+ const VkSampleLocationsInfoEXT *pSampleLocationsInfo) {
+ layer_data *device_data = GetLayerDataPtr(get_dispatch_key(commandBuffer), layer_data_map);
+ GLOBAL_CB_NODE *cb_state = GetCBNode(commandBuffer);
+ // Minimal validation for command buffer state
+ return ValidateCmd(device_data, cb_state, CMD_SETSAMPLELOCATIONSEXT, "vkCmdSetSampleLocationsEXT()");
+}
+
+bool CoreChecks::PreCallValidateCmdDrawIndirectCountKHR(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset,
+ VkBuffer countBuffer, VkDeviceSize countBufferOffset, uint32_t maxDrawCount,
+ uint32_t stride) {
+ layer_data *device_data = GetLayerDataPtr(get_dispatch_key(commandBuffer), layer_data_map);
+ bool skip = false;
+ if (offset & 3) {
+ skip |= log_msg(device_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
+ HandleToUint64(commandBuffer), "VUID-vkCmdDrawIndirectCountKHR-offset-03108",
+ "vkCmdDrawIndirectCountKHR() parameter, VkDeviceSize offset (0x%" PRIxLEAST64 "), is not a multiple of 4.",
+ offset);
+ }
+
+ if (countBufferOffset & 3) {
+ skip |= log_msg(device_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
+ HandleToUint64(commandBuffer), "VUID-vkCmdDrawIndirectCountKHR-countBufferOffset-03109",
+ "vkCmdDrawIndirectCountKHR() parameter, VkDeviceSize countBufferOffset (0x%" PRIxLEAST64
+ "), is not a multiple of 4.",
+ countBufferOffset);
+ }
+
+ if ((stride & 3) || stride < sizeof(VkDrawIndirectCommand)) {
+ skip |= log_msg(device_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
+ HandleToUint64(commandBuffer), "VUID-vkCmdDrawIndirectCountKHR-stride-03110",
+ "vkCmdDrawIndirectCountKHR() parameter, uint32_t stride (0x%" PRIxLEAST32
+ "), is not a multiple of 4 or smaller than sizeof (VkDrawIndirectCommand).",
+ stride);
}
+
+ skip |= ValidateCmdDrawType(device_data, commandBuffer, false, VK_PIPELINE_BIND_POINT_GRAPHICS, CMD_DRAWINDIRECTCOUNTKHR,
+ "vkCmdDrawIndirectCountKHR()", VK_QUEUE_GRAPHICS_BIT,
+ "VUID-vkCmdDrawIndirectCountKHR-commandBuffer-cmdpool", "VUID-vkCmdDrawIndirectCountKHR-renderpass",
+ "VUID-vkCmdDrawIndirectCountKHR-None-03119", "VUID-vkCmdDrawIndirectCountKHR-None-03120");
+ BUFFER_STATE *buffer_state = GetBufferState(buffer);
+ BUFFER_STATE *count_buffer_state = GetBufferState(countBuffer);
+ skip |= ValidateMemoryIsBoundToBuffer(device_data, buffer_state, "vkCmdDrawIndirectCountKHR()",
+ "VUID-vkCmdDrawIndirectCountKHR-buffer-03104");
+ skip |= ValidateMemoryIsBoundToBuffer(device_data, count_buffer_state, "vkCmdDrawIndirectCountKHR()",
+ "VUID-vkCmdDrawIndirectCountKHR-countBuffer-03106");
+
return skip;
}
-void ValidationStateTracker::PostCallRecordCmdBeginQueryIndexedEXT(VkCommandBuffer commandBuffer, VkQueryPool queryPool,
- uint32_t query, VkQueryControlFlags flags, uint32_t index) {
- QueryObject query_obj = {queryPool, query, index};
- CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
- RecordCmdBeginQuery(cb_state, query_obj);
+void CoreChecks::PreCallRecordCmdDrawIndirectCountKHR(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset,
+ VkBuffer countBuffer, VkDeviceSize countBufferOffset, uint32_t maxDrawCount,
+ uint32_t stride) {
+ layer_data *device_data = GetLayerDataPtr(get_dispatch_key(commandBuffer), layer_data_map);
+ GLOBAL_CB_NODE *cb_state = GetCBNode(commandBuffer);
+ BUFFER_STATE *buffer_state = GetBufferState(buffer);
+ BUFFER_STATE *count_buffer_state = GetBufferState(countBuffer);
+ UpdateStateCmdDrawType(device_data, cb_state, VK_PIPELINE_BIND_POINT_GRAPHICS);
+ AddCommandBufferBindingBuffer(device_data, cb_state, buffer_state);
+ AddCommandBufferBindingBuffer(device_data, cb_state, count_buffer_state);
+}
+
+bool CoreChecks::PreCallValidateCmdDrawIndexedIndirectCountKHR(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset,
+ VkBuffer countBuffer, VkDeviceSize countBufferOffset,
+ uint32_t maxDrawCount, uint32_t stride) {
+ layer_data *device_data = GetLayerDataPtr(get_dispatch_key(commandBuffer), layer_data_map);
+ bool skip = false;
+ if (offset & 3) {
+ skip |= log_msg(device_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
+ HandleToUint64(commandBuffer), "VUID-vkCmdDrawIndexedIndirectCountKHR-offset-03140",
+ "vkCmdDrawIndexedIndirectCountKHR() parameter, VkDeviceSize offset (0x%" PRIxLEAST64
+ "), is not a multiple of 4.",
+ offset);
+ }
+
+ if (countBufferOffset & 3) {
+ skip |= log_msg(device_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
+ HandleToUint64(commandBuffer), "VUID-vkCmdDrawIndexedIndirectCountKHR-countBufferOffset-03141",
+ "vkCmdDrawIndexedIndirectCountKHR() parameter, VkDeviceSize countBufferOffset (0x%" PRIxLEAST64
+ "), is not a multiple of 4.",
+ countBufferOffset);
+ }
+
+ if ((stride & 3) || stride < sizeof(VkDrawIndexedIndirectCommand)) {
+ skip |= log_msg(device_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
+ HandleToUint64(commandBuffer), "VUID-vkCmdDrawIndexedIndirectCountKHR-stride-03142",
+ "vkCmdDrawIndexedIndirectCountKHR() parameter, uint32_t stride (0x%" PRIxLEAST32
+ "), is not a multiple of 4 or smaller than sizeof (VkDrawIndexedIndirectCommand).",
+ stride);
+ }
+
+ skip |= ValidateCmdDrawType(
+ device_data, commandBuffer, true, VK_PIPELINE_BIND_POINT_GRAPHICS, CMD_DRAWINDEXEDINDIRECTCOUNTKHR,
+ "vkCmdDrawIndexedIndirectCountKHR()", VK_QUEUE_GRAPHICS_BIT, "VUID-vkCmdDrawIndexedIndirectCountKHR-commandBuffer-cmdpool",
+ "VUID-vkCmdDrawIndexedIndirectCountKHR-renderpass", "VUID-vkCmdDrawIndexedIndirectCountKHR-None-03151",
+ "VUID-vkCmdDrawIndexedIndirectCountKHR-None-03152");
+ BUFFER_STATE *buffer_state = GetBufferState(buffer);
+ BUFFER_STATE *count_buffer_state = GetBufferState(countBuffer);
+ skip |= ValidateMemoryIsBoundToBuffer(device_data, buffer_state, "vkCmdDrawIndexedIndirectCountKHR()",
+ "VUID-vkCmdDrawIndexedIndirectCountKHR-buffer-03136");
+ skip |= ValidateMemoryIsBoundToBuffer(device_data, count_buffer_state, "vkCmdDrawIndexedIndirectCountKHR()",
+ "VUID-vkCmdDrawIndexedIndirectCountKHR-countBuffer-03138");
+ return skip;
}
-void CoreChecks::PreCallRecordCmdBeginQueryIndexedEXT(VkCommandBuffer commandBuffer, VkQueryPool queryPool, uint32_t query,
- VkQueryControlFlags flags, uint32_t index) {
- QueryObject query_obj = {queryPool, query, index};
- EnqueueVerifyBeginQuery(commandBuffer, query_obj);
+void CoreChecks::PreCallRecordCmdDrawIndexedIndirectCountKHR(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset,
+ VkBuffer countBuffer, VkDeviceSize countBufferOffset,
+ uint32_t maxDrawCount, uint32_t stride) {
+ layer_data *device_data = GetLayerDataPtr(get_dispatch_key(commandBuffer), layer_data_map);
+ GLOBAL_CB_NODE *cb_state = GetCBNode(commandBuffer);
+ BUFFER_STATE *buffer_state = GetBufferState(buffer);
+ BUFFER_STATE *count_buffer_state = GetBufferState(countBuffer);
+ UpdateStateCmdDrawType(device_data, cb_state, VK_PIPELINE_BIND_POINT_GRAPHICS);
+ AddCommandBufferBindingBuffer(device_data, cb_state, buffer_state);
+ AddCommandBufferBindingBuffer(device_data, cb_state, count_buffer_state);
}
-bool CoreChecks::PreCallValidateCmdEndQueryIndexedEXT(VkCommandBuffer commandBuffer, VkQueryPool queryPool, uint32_t query,
- uint32_t index) {
- if (disabled.query_validation) return false;
- QueryObject query_obj = {queryPool, query, index};
- const CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
- assert(cb_state);
- return ValidateCmdEndQuery(cb_state, query_obj, CMD_ENDQUERYINDEXEDEXT, "vkCmdEndQueryIndexedEXT()",
- "VUID-vkCmdEndQueryIndexedEXT-commandBuffer-cmdpool", "VUID-vkCmdEndQueryIndexedEXT-None-02342");
+bool CoreChecks::PreCallValidateCmdDrawMeshTasksNV(VkCommandBuffer commandBuffer, uint32_t taskCount, uint32_t firstTask) {
+ layer_data *device_data = GetLayerDataPtr(get_dispatch_key(commandBuffer), layer_data_map);
+ bool skip = ValidateCmdDrawType(device_data, commandBuffer, false, VK_PIPELINE_BIND_POINT_GRAPHICS, CMD_DRAWMESHTASKSNV,
+ "vkCmdDrawMeshTasksNV()", VK_QUEUE_GRAPHICS_BIT,
+ "VUID-vkCmdDrawMeshTasksNV-commandBuffer-cmdpool", "VUID-vkCmdDrawMeshTasksNV-renderpass",
+ "VUID-vkCmdDrawMeshTasksNV-None-02125", "VUID-vkCmdDrawMeshTasksNV-None-02126");
+ return skip;
}
-void ValidationStateTracker::PostCallRecordCmdEndQueryIndexedEXT(VkCommandBuffer commandBuffer, VkQueryPool queryPool,
- uint32_t query, uint32_t index) {
- QueryObject query_obj = {queryPool, query, index};
- CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
- RecordCmdEndQuery(cb_state, query_obj);
+void CoreChecks::PreCallRecordCmdDrawMeshTasksNV(VkCommandBuffer commandBuffer, uint32_t taskCount, uint32_t firstTask) {
+ layer_data *device_data = GetLayerDataPtr(get_dispatch_key(commandBuffer), layer_data_map);
+ GLOBAL_CB_NODE *cb_state = GetCBNode(commandBuffer);
+ UpdateStateCmdDrawType(device_data, cb_state, VK_PIPELINE_BIND_POINT_GRAPHICS);
}
-bool CoreChecks::PreCallValidateCmdSetDiscardRectangleEXT(VkCommandBuffer commandBuffer, uint32_t firstDiscardRectangle,
- uint32_t discardRectangleCount, const VkRect2D *pDiscardRectangles) {
- const CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
- // Minimal validation for command buffer state
- return ValidateCmd(cb_state, CMD_SETDISCARDRECTANGLEEXT, "vkCmdSetDiscardRectangleEXT()");
+bool CoreChecks::PreCallValidateCmdDrawMeshTasksIndirectNV(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset,
+ uint32_t drawCount, uint32_t stride) {
+ layer_data *device_data = GetLayerDataPtr(get_dispatch_key(commandBuffer), layer_data_map);
+ bool skip = ValidateCmdDrawType(device_data, commandBuffer, false, VK_PIPELINE_BIND_POINT_GRAPHICS, CMD_DRAWMESHTASKSINDIRECTNV,
+ "vkCmdDrawMeshTasksIndirectNV()", VK_QUEUE_GRAPHICS_BIT,
+ "VUID-vkCmdDrawMeshTasksIndirectNV-commandBuffer-cmdpool",
+ "VUID-vkCmdDrawMeshTasksIndirectNV-renderpass", "VUID-vkCmdDrawMeshTasksIndirectNV-None-02154",
+ "VUID-vkCmdDrawMeshTasksIndirectNV-None-02155");
+ BUFFER_STATE *buffer_state = GetBufferState(buffer);
+ skip |= ValidateMemoryIsBoundToBuffer(device_data, buffer_state, "vkCmdDrawMeshTasksIndirectNV()",
+ "VUID-vkCmdDrawMeshTasksIndirectNV-buffer-02143");
+
+ return skip;
}
-bool CoreChecks::PreCallValidateCmdSetSampleLocationsEXT(VkCommandBuffer commandBuffer,
- const VkSampleLocationsInfoEXT *pSampleLocationsInfo) {
- const CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
- // Minimal validation for command buffer state
- return ValidateCmd(cb_state, CMD_SETSAMPLELOCATIONSEXT, "vkCmdSetSampleLocationsEXT()");
+void CoreChecks::PreCallRecordCmdDrawMeshTasksIndirectNV(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset,
+ uint32_t drawCount, uint32_t stride) {
+ layer_data *device_data = GetLayerDataPtr(get_dispatch_key(commandBuffer), layer_data_map);
+ GLOBAL_CB_NODE *cb_state = GetCBNode(commandBuffer);
+ UpdateStateCmdDrawType(device_data, cb_state, VK_PIPELINE_BIND_POINT_GRAPHICS);
+ BUFFER_STATE *buffer_state = GetBufferState(buffer);
+ if (buffer_state) {
+ AddCommandBufferBindingBuffer(device_data, cb_state, buffer_state);
+ }
+}
+
+bool CoreChecks::PreCallValidateCmdDrawMeshTasksIndirectCountNV(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset,
+ VkBuffer countBuffer, VkDeviceSize countBufferOffset,
+ uint32_t maxDrawCount, uint32_t stride) {
+ layer_data *device_data = GetLayerDataPtr(get_dispatch_key(commandBuffer), layer_data_map);
+ bool skip = ValidateCmdDrawType(
+ device_data, commandBuffer, false, VK_PIPELINE_BIND_POINT_GRAPHICS, CMD_DRAWMESHTASKSINDIRECTCOUNTNV,
+ "vkCmdDrawMeshTasksIndirectCountNV()", VK_QUEUE_GRAPHICS_BIT,
+ "VUID-vkCmdDrawMeshTasksIndirectCountNV-commandBuffer-cmdpool", "VUID-vkCmdDrawMeshTasksIndirectCountNV-renderpass",
+ "VUID-vkCmdDrawMeshTasksIndirectCountNV-None-02189", "VUID-vkCmdDrawMeshTasksIndirectCountNV-None-02190");
+ BUFFER_STATE *buffer_state = GetBufferState(buffer);
+ BUFFER_STATE *count_buffer_state = GetBufferState(countBuffer);
+ skip |= ValidateMemoryIsBoundToBuffer(device_data, buffer_state, "vkCmdDrawMeshTasksIndirectCountNV()",
+ "VUID-vkCmdDrawMeshTasksIndirectCountNV-buffer-02176");
+ skip |= ValidateMemoryIsBoundToBuffer(device_data, count_buffer_state, "vkCmdDrawMeshTasksIndirectCountNV()",
+ "VUID-vkCmdDrawMeshTasksIndirectCountNV-countBuffer-02178");
+
+ return skip;
}
-bool CoreChecks::ValidateCreateSamplerYcbcrConversion(const char *func_name,
- const VkSamplerYcbcrConversionCreateInfo *create_info) const {
+void CoreChecks::PreCallRecordCmdDrawMeshTasksIndirectCountNV(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset,
+ VkBuffer countBuffer, VkDeviceSize countBufferOffset,
+ uint32_t maxDrawCount, uint32_t stride) {
+ layer_data *device_data = GetLayerDataPtr(get_dispatch_key(commandBuffer), layer_data_map);
+ GLOBAL_CB_NODE *cb_state = GetCBNode(commandBuffer);
+ BUFFER_STATE *buffer_state = GetBufferState(buffer);
+ BUFFER_STATE *count_buffer_state = GetBufferState(countBuffer);
+ UpdateStateCmdDrawType(device_data, cb_state, VK_PIPELINE_BIND_POINT_GRAPHICS);
+ if (buffer_state) {
+ AddCommandBufferBindingBuffer(device_data, cb_state, buffer_state);
+ }
+ if (count_buffer_state) {
+ AddCommandBufferBindingBuffer(device_data, cb_state, count_buffer_state);
+ }
+}
+
+bool CoreChecks::ValidateCreateSamplerYcbcrConversion(const layer_data *device_data, const char *func_name,
+ const VkSamplerYcbcrConversionCreateInfo *create_info) {
bool skip = false;
- if (device_extensions.vk_android_external_memory_android_hardware_buffer) {
- skip |= ValidateCreateSamplerYcbcrConversionANDROID(create_info);
+ if (GetDeviceExtensions()->vk_android_external_memory_android_hardware_buffer) {
+ skip |= ValidateCreateSamplerYcbcrConversionANDROID(device_data, create_info);
} else { // Not android hardware buffer
if (VK_FORMAT_UNDEFINED == create_info->format) {
- skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_SAMPLER_YCBCR_CONVERSION_EXT, 0,
+ skip |= log_msg(device_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ VK_DEBUG_REPORT_OBJECT_TYPE_SAMPLER_YCBCR_CONVERSION_EXT, 0,
"VUID-VkSamplerYcbcrConversionCreateInfo-format-01649",
"%s: CreateInfo format type is VK_FORMAT_UNDEFINED.", func_name);
}
@@ -14238,82 +13390,86 @@ bool CoreChecks::ValidateCreateSamplerYcbcrConversion(const char *func_name,
bool CoreChecks::PreCallValidateCreateSamplerYcbcrConversion(VkDevice device, const VkSamplerYcbcrConversionCreateInfo *pCreateInfo,
const VkAllocationCallbacks *pAllocator,
VkSamplerYcbcrConversion *pYcbcrConversion) {
- return ValidateCreateSamplerYcbcrConversion("vkCreateSamplerYcbcrConversion()", pCreateInfo);
+ layer_data *device_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
+ return ValidateCreateSamplerYcbcrConversion(device_data, "vkCreateSamplerYcbcrConversion()", pCreateInfo);
}
bool CoreChecks::PreCallValidateCreateSamplerYcbcrConversionKHR(VkDevice device,
const VkSamplerYcbcrConversionCreateInfo *pCreateInfo,
const VkAllocationCallbacks *pAllocator,
VkSamplerYcbcrConversion *pYcbcrConversion) {
- return ValidateCreateSamplerYcbcrConversion("vkCreateSamplerYcbcrConversionKHR()", pCreateInfo);
+ layer_data *device_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
+ return ValidateCreateSamplerYcbcrConversion(device_data, "vkCreateSamplerYcbcrConversionKHR()", pCreateInfo);
}
-void ValidationStateTracker::RecordCreateSamplerYcbcrConversionState(const VkSamplerYcbcrConversionCreateInfo *create_info,
- VkSamplerYcbcrConversion ycbcr_conversion) {
- if (device_extensions.vk_android_external_memory_android_hardware_buffer) {
- RecordCreateSamplerYcbcrConversionANDROID(create_info, ycbcr_conversion);
+void CoreChecks::RecordCreateSamplerYcbcrConversionState(layer_data *device_data,
+ const VkSamplerYcbcrConversionCreateInfo *create_info,
+ VkSamplerYcbcrConversion ycbcr_conversion) {
+ if (GetDeviceExtensions()->vk_android_external_memory_android_hardware_buffer) {
+ RecordCreateSamplerYcbcrConversionANDROID(device_data, create_info, ycbcr_conversion);
}
}
-void ValidationStateTracker::PostCallRecordCreateSamplerYcbcrConversion(VkDevice device,
- const VkSamplerYcbcrConversionCreateInfo *pCreateInfo,
- const VkAllocationCallbacks *pAllocator,
- VkSamplerYcbcrConversion *pYcbcrConversion,
- VkResult result) {
+void CoreChecks::PostCallRecordCreateSamplerYcbcrConversion(VkDevice device, const VkSamplerYcbcrConversionCreateInfo *pCreateInfo,
+ const VkAllocationCallbacks *pAllocator,
+ VkSamplerYcbcrConversion *pYcbcrConversion, VkResult result) {
+ layer_data *device_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
if (VK_SUCCESS != result) return;
- RecordCreateSamplerYcbcrConversionState(pCreateInfo, *pYcbcrConversion);
+ RecordCreateSamplerYcbcrConversionState(device_data, pCreateInfo, *pYcbcrConversion);
}
-void ValidationStateTracker::PostCallRecordCreateSamplerYcbcrConversionKHR(VkDevice device,
- const VkSamplerYcbcrConversionCreateInfo *pCreateInfo,
- const VkAllocationCallbacks *pAllocator,
- VkSamplerYcbcrConversion *pYcbcrConversion,
- VkResult result) {
+void CoreChecks::PostCallRecordCreateSamplerYcbcrConversionKHR(VkDevice device,
+ const VkSamplerYcbcrConversionCreateInfo *pCreateInfo,
+ const VkAllocationCallbacks *pAllocator,
+ VkSamplerYcbcrConversion *pYcbcrConversion, VkResult result) {
+ layer_data *device_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
if (VK_SUCCESS != result) return;
- RecordCreateSamplerYcbcrConversionState(pCreateInfo, *pYcbcrConversion);
+ RecordCreateSamplerYcbcrConversionState(device_data, pCreateInfo, *pYcbcrConversion);
}
-void ValidationStateTracker::PostCallRecordDestroySamplerYcbcrConversion(VkDevice device, VkSamplerYcbcrConversion ycbcrConversion,
- const VkAllocationCallbacks *pAllocator) {
+void CoreChecks::PostCallRecordDestroySamplerYcbcrConversion(VkDevice device, VkSamplerYcbcrConversion ycbcrConversion,
+ const VkAllocationCallbacks *pAllocator) {
+ layer_data *device_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
if (!ycbcrConversion) return;
- if (device_extensions.vk_android_external_memory_android_hardware_buffer) {
- RecordDestroySamplerYcbcrConversionANDROID(ycbcrConversion);
+ if (GetDeviceExtensions()->vk_android_external_memory_android_hardware_buffer) {
+ RecordDestroySamplerYcbcrConversionANDROID(device_data, ycbcrConversion);
}
}
-void ValidationStateTracker::PostCallRecordDestroySamplerYcbcrConversionKHR(VkDevice device,
- VkSamplerYcbcrConversion ycbcrConversion,
- const VkAllocationCallbacks *pAllocator) {
+void CoreChecks::PostCallRecordDestroySamplerYcbcrConversionKHR(VkDevice device, VkSamplerYcbcrConversion ycbcrConversion,
+ const VkAllocationCallbacks *pAllocator) {
+ layer_data *device_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
if (!ycbcrConversion) return;
- if (device_extensions.vk_android_external_memory_android_hardware_buffer) {
- RecordDestroySamplerYcbcrConversionANDROID(ycbcrConversion);
+ if (GetDeviceExtensions()->vk_android_external_memory_android_hardware_buffer) {
+ RecordDestroySamplerYcbcrConversionANDROID(device_data, ycbcrConversion);
}
}
bool CoreChecks::PreCallValidateGetBufferDeviceAddressEXT(VkDevice device, const VkBufferDeviceAddressInfoEXT *pInfo) {
+ layer_data *device_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
bool skip = false;
- if (!enabled_features.buffer_address.bufferDeviceAddress) {
- skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_BUFFER_EXT,
+ if (!GetEnabledFeatures()->buffer_address.bufferDeviceAddress) {
+ skip |= log_msg(device_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_BUFFER_EXT,
HandleToUint64(pInfo->buffer), "VUID-vkGetBufferDeviceAddressEXT-None-02598",
"The bufferDeviceAddress feature must: be enabled.");
}
- if (physical_device_count > 1 && !enabled_features.buffer_address.bufferDeviceAddressMultiDevice) {
- skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_BUFFER_EXT,
+ if (device_data->physical_device_count > 1 && !GetEnabledFeatures()->buffer_address.bufferDeviceAddressMultiDevice) {
+ skip |= log_msg(device_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_BUFFER_EXT,
HandleToUint64(pInfo->buffer), "VUID-vkGetBufferDeviceAddressEXT-device-02599",
"If device was created with multiple physical devices, then the "
"bufferDeviceAddressMultiDevice feature must: be enabled.");
}
- const auto buffer_state = GetBufferState(pInfo->buffer);
+ auto buffer_state = GetBufferState(pInfo->buffer);
if (buffer_state) {
if (!(buffer_state->createInfo.flags & VK_BUFFER_CREATE_DEVICE_ADDRESS_CAPTURE_REPLAY_BIT_EXT)) {
- skip |= ValidateMemoryIsBoundToBuffer(buffer_state, "vkGetBufferDeviceAddressEXT()",
+ skip |= ValidateMemoryIsBoundToBuffer(device_data, buffer_state, "vkGetBufferDeviceAddressEXT()",
"VUID-VkBufferDeviceAddressInfoEXT-buffer-02600");
}
- skip |= ValidateBufferUsageFlags(buffer_state, VK_BUFFER_USAGE_SHADER_DEVICE_ADDRESS_BIT_EXT, true,
+ skip |= ValidateBufferUsageFlags(device_data, buffer_state, VK_BUFFER_USAGE_SHADER_DEVICE_ADDRESS_BIT_EXT, true,
"VUID-VkBufferDeviceAddressInfoEXT-buffer-02601", "vkGetBufferDeviceAddressEXT()",
"VK_BUFFER_USAGE_SHADER_DEVICE_ADDRESS_BIT_EXT");
}
@@ -14321,73 +13477,14 @@ bool CoreChecks::PreCallValidateGetBufferDeviceAddressEXT(VkDevice device, const
return skip;
}
-bool CoreChecks::ValidateQueryRange(VkDevice device, VkQueryPool queryPool, uint32_t totalCount, uint32_t firstQuery,
- uint32_t queryCount, const char *vuid_badfirst, const char *vuid_badrange) const {
- bool skip = false;
-
- if (firstQuery >= totalCount) {
- skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, HandleToUint64(device),
- vuid_badfirst, "firstQuery (%" PRIu32 ") greater than or equal to query pool count (%" PRIu32 ") for %s",
- firstQuery, totalCount, report_data->FormatHandle(queryPool).c_str());
- }
-
- if ((firstQuery + queryCount) > totalCount) {
- skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, HandleToUint64(device),
- vuid_badrange, "Query range [%" PRIu32 ", %" PRIu32 ") goes beyond query pool count (%" PRIu32 ") for %s",
- firstQuery, firstQuery + queryCount, totalCount, report_data->FormatHandle(queryPool).c_str());
- }
-
- return skip;
-}
-
-bool CoreChecks::PreCallValidateResetQueryPoolEXT(VkDevice device, VkQueryPool queryPool, uint32_t firstQuery,
- uint32_t queryCount) {
- if (disabled.query_validation) return false;
-
- bool skip = false;
-
- if (!enabled_features.host_query_reset_features.hostQueryReset) {
- skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, HandleToUint64(device),
- "VUID-vkResetQueryPoolEXT-None-02665", "Host query reset not enabled for device");
- }
-
- const auto query_pool_state = GetQueryPoolState(queryPool);
- if (query_pool_state) {
- skip |= ValidateQueryRange(device, queryPool, query_pool_state->createInfo.queryCount, firstQuery, queryCount,
- "VUID-vkResetQueryPoolEXT-firstQuery-02666", "VUID-vkResetQueryPoolEXT-firstQuery-02667");
- }
-
- return skip;
-}
-
-void ValidationStateTracker::PostCallRecordResetQueryPoolEXT(VkDevice device, VkQueryPool queryPool, uint32_t firstQuery,
- uint32_t queryCount) {
- // Do nothing if the feature is not enabled.
- if (!enabled_features.host_query_reset_features.hostQueryReset) return;
-
- // Do nothing if the query pool has been destroyed.
- auto query_pool_state = GetQueryPoolState(queryPool);
- if (!query_pool_state) return;
-
- // Reset the state of existing entries.
- QueryObject query_obj{queryPool, 0};
- const uint32_t max_query_count = std::min(queryCount, query_pool_state->createInfo.queryCount - firstQuery);
- for (uint32_t i = 0; i < max_query_count; ++i) {
- query_obj.query = firstQuery + i;
- auto query_it = queryToStateMap.find(query_obj);
- if (query_it != queryToStateMap.end()) query_it->second = QUERYSTATE_RESET;
- }
-}
-
void CoreChecks::PreCallRecordGetPhysicalDeviceProperties(VkPhysicalDevice physicalDevice,
VkPhysicalDeviceProperties *pPhysicalDeviceProperties) {
- // There is an implicit layer that can cause this call to return 0 for maxBoundDescriptorSets - Ignore such calls
- if (enabled.gpu_validation && enabled.gpu_validation_reserve_binding_slot &&
- pPhysicalDeviceProperties->limits.maxBoundDescriptorSets > 0) {
+ instance_layer_data *instance_data = GetLayerDataPtr(get_dispatch_key(physicalDevice), instance_layer_data_map);
+ if (GetEnables()->gpu_validation && GetEnables()->gpu_validation_reserve_binding_slot) {
if (pPhysicalDeviceProperties->limits.maxBoundDescriptorSets > 1) {
pPhysicalDeviceProperties->limits.maxBoundDescriptorSets -= 1;
} else {
- log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_PHYSICAL_DEVICE_EXT,
+ log_msg(instance_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_PHYSICAL_DEVICE_EXT,
HandleToUint64(physicalDevice), "UNASSIGNED-GPU-Assisted Validation Setup Error.",
"Unable to reserve descriptor binding slot on a device with only one slot.");
}
@@ -14403,207 +13500,35 @@ VkResult CoreChecks::CoreLayerCreateValidationCacheEXT(VkDevice device, const Vk
void CoreChecks::CoreLayerDestroyValidationCacheEXT(VkDevice device, VkValidationCacheEXT validationCache,
const VkAllocationCallbacks *pAllocator) {
- delete CastFromHandle<ValidationCache *>(validationCache);
+ delete (ValidationCache *)validationCache;
}
VkResult CoreChecks::CoreLayerGetValidationCacheDataEXT(VkDevice device, VkValidationCacheEXT validationCache, size_t *pDataSize,
void *pData) {
size_t inSize = *pDataSize;
- CastFromHandle<ValidationCache *>(validationCache)->Write(pDataSize, pData);
+ ((ValidationCache *)validationCache)->Write(pDataSize, pData);
return (pData && *pDataSize != inSize) ? VK_INCOMPLETE : VK_SUCCESS;
}
VkResult CoreChecks::CoreLayerMergeValidationCachesEXT(VkDevice device, VkValidationCacheEXT dstCache, uint32_t srcCacheCount,
const VkValidationCacheEXT *pSrcCaches) {
+ layer_data *dev_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
bool skip = false;
- auto dst = CastFromHandle<ValidationCache *>(dstCache);
+ auto dst = (ValidationCache *)dstCache;
+ auto src = (ValidationCache const *const *)pSrcCaches;
VkResult result = VK_SUCCESS;
for (uint32_t i = 0; i < srcCacheCount; i++) {
- auto src = CastFromHandle<const ValidationCache *>(pSrcCaches[i]);
- if (src == dst) {
- skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_VALIDATION_CACHE_EXT, 0,
- "VUID-vkMergeValidationCachesEXT-dstCache-01536",
+ if (src[i] == dst) {
+ skip |= log_msg(dev_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_VALIDATION_CACHE_EXT,
+ 0, "VUID-vkMergeValidationCachesEXT-dstCache-01536",
"vkMergeValidationCachesEXT: dstCache (0x%" PRIx64 ") must not appear in pSrcCaches array.",
HandleToUint64(dstCache));
result = VK_ERROR_VALIDATION_FAILED_EXT;
}
if (!skip) {
- dst->Merge(src);
+ dst->Merge(src[i]);
}
}
return result;
}
-
-bool CoreChecks::PreCallValidateCmdSetDeviceMask(VkCommandBuffer commandBuffer, uint32_t deviceMask) {
- bool skip = false;
- const CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
-
- skip |= ValidateDeviceMaskToPhysicalDeviceCount(deviceMask, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
- HandleToUint64(commandBuffer), "VUID-vkCmdSetDeviceMask-deviceMask-00108");
- skip |= ValidateDeviceMaskToZero(deviceMask, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, HandleToUint64(commandBuffer),
- "VUID-vkCmdSetDeviceMask-deviceMask-00109");
- skip |= ValidateDeviceMaskToCommandBuffer(cb_state, deviceMask, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
- HandleToUint64(commandBuffer), "VUID-vkCmdSetDeviceMask-deviceMask-00110");
- if (cb_state->activeRenderPass) {
- skip |= ValidateDeviceMaskToRenderPass(cb_state, deviceMask, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
- HandleToUint64(commandBuffer), "VUID-vkCmdSetDeviceMask-deviceMask-00111");
- }
- return skip;
-}
-
-bool CoreChecks::ValidateQueryPoolStride(const std::string &vuid_not_64, const std::string &vuid_64, const VkDeviceSize stride,
- const char *parameter_name, const uint64_t parameter_value,
- const VkQueryResultFlags flags) const {
- bool skip = false;
- if (flags & VK_QUERY_RESULT_64_BIT) {
- static const int condition_multiples = 0b0111;
- if ((stride & condition_multiples) || (parameter_value & condition_multiples)) {
- skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0, vuid_64,
- "stride %" PRIx64 " or %s %" PRIx64 " is invalid.", stride, parameter_name, parameter_value);
- }
- } else {
- static const int condition_multiples = 0b0011;
- if ((stride & condition_multiples) || (parameter_value & condition_multiples)) {
- skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0, vuid_not_64,
- "stride %" PRIx64 " or %s %" PRIx64 " is invalid.", stride, parameter_name, parameter_value);
- }
- }
- return skip;
-}
-
-bool CoreChecks::ValidateCmdDrawStrideWithStruct(VkCommandBuffer commandBuffer, const std::string &vuid, const uint32_t stride,
- const char *struct_name, const uint32_t struct_size) const {
- bool skip = false;
- static const int condition_multiples = 0b0011;
- if ((stride & condition_multiples) || (stride < struct_size)) {
- skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
- HandleToUint64(commandBuffer), vuid, "stride %d is invalid or less than sizeof(%s) %d.", stride,
- struct_name, struct_size);
- }
- return skip;
-}
-
-bool CoreChecks::ValidateCmdDrawStrideWithBuffer(VkCommandBuffer commandBuffer, const std::string &vuid, const uint32_t stride,
- const char *struct_name, const uint32_t struct_size, const uint32_t drawCount,
- const VkDeviceSize offset, const BUFFER_STATE *buffer_state) const {
- bool skip = false;
- uint64_t validation_value = stride * (drawCount - 1) + offset + struct_size;
- if (validation_value > buffer_state->createInfo.size) {
- skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
- HandleToUint64(commandBuffer), vuid,
- "stride[%d] * (drawCount[%d] - 1) + offset[%" PRIx64 "] + sizeof(%s)[%d] = %" PRIx64
- " is greater than the size[%" PRIx64 "] of %s.",
- stride, drawCount, offset, struct_name, struct_size, validation_value, buffer_state->createInfo.size,
- report_data->FormatHandle(buffer_state->buffer).c_str());
- }
- return skip;
-}
-
-void PIPELINE_STATE::initGraphicsPipeline(ValidationStateTracker *state_data, const VkGraphicsPipelineCreateInfo *pCreateInfo,
- std::shared_ptr<RENDER_PASS_STATE> &&rpstate) {
- reset();
- bool uses_color_attachment = false;
- bool uses_depthstencil_attachment = false;
- if (pCreateInfo->subpass < rpstate->createInfo.subpassCount) {
- const auto &subpass = rpstate->createInfo.pSubpasses[pCreateInfo->subpass];
-
- for (uint32_t i = 0; i < subpass.colorAttachmentCount; ++i) {
- if (subpass.pColorAttachments[i].attachment != VK_ATTACHMENT_UNUSED) {
- uses_color_attachment = true;
- break;
- }
- }
-
- if (subpass.pDepthStencilAttachment && subpass.pDepthStencilAttachment->attachment != VK_ATTACHMENT_UNUSED) {
- uses_depthstencil_attachment = true;
- }
- }
- graphicsPipelineCI.initialize(pCreateInfo, uses_color_attachment, uses_depthstencil_attachment);
- if (graphicsPipelineCI.pInputAssemblyState) {
- topology_at_rasterizer = graphicsPipelineCI.pInputAssemblyState->topology;
- }
-
- stage_state.resize(pCreateInfo->stageCount);
- for (uint32_t i = 0; i < pCreateInfo->stageCount; i++) {
- const VkPipelineShaderStageCreateInfo *pPSSCI = &pCreateInfo->pStages[i];
- this->duplicate_shaders |= this->active_shaders & pPSSCI->stage;
- this->active_shaders |= pPSSCI->stage;
- state_data->RecordPipelineShaderStage(pPSSCI, this, &stage_state[i]);
- }
-
- if (graphicsPipelineCI.pVertexInputState) {
- const auto pVICI = graphicsPipelineCI.pVertexInputState;
- if (pVICI->vertexBindingDescriptionCount) {
- this->vertex_binding_descriptions_ = std::vector<VkVertexInputBindingDescription>(
- pVICI->pVertexBindingDescriptions, pVICI->pVertexBindingDescriptions + pVICI->vertexBindingDescriptionCount);
-
- this->vertex_binding_to_index_map_.reserve(pVICI->vertexBindingDescriptionCount);
- for (uint32_t i = 0; i < pVICI->vertexBindingDescriptionCount; ++i) {
- this->vertex_binding_to_index_map_[pVICI->pVertexBindingDescriptions[i].binding] = i;
- }
- }
- if (pVICI->vertexAttributeDescriptionCount) {
- this->vertex_attribute_descriptions_ = std::vector<VkVertexInputAttributeDescription>(
- pVICI->pVertexAttributeDescriptions, pVICI->pVertexAttributeDescriptions + pVICI->vertexAttributeDescriptionCount);
- }
- }
- if (graphicsPipelineCI.pColorBlendState) {
- const auto pCBCI = graphicsPipelineCI.pColorBlendState;
- if (pCBCI->attachmentCount) {
- this->attachments =
- std::vector<VkPipelineColorBlendAttachmentState>(pCBCI->pAttachments, pCBCI->pAttachments + pCBCI->attachmentCount);
- }
- }
- rp_state = rpstate;
-}
-
-void PIPELINE_STATE::initComputePipeline(ValidationStateTracker *state_data, const VkComputePipelineCreateInfo *pCreateInfo) {
- reset();
- computePipelineCI.initialize(pCreateInfo);
- switch (computePipelineCI.stage.stage) {
- case VK_SHADER_STAGE_COMPUTE_BIT:
- this->active_shaders |= VK_SHADER_STAGE_COMPUTE_BIT;
- stage_state.resize(1);
- state_data->RecordPipelineShaderStage(&pCreateInfo->stage, this, &stage_state[0]);
- break;
- default:
- // TODO : Flag error
- break;
- }
-}
-
-void PIPELINE_STATE::initRayTracingPipelineNV(ValidationStateTracker *state_data,
- const VkRayTracingPipelineCreateInfoNV *pCreateInfo) {
- reset();
- raytracingPipelineCI.initialize(pCreateInfo);
-
- stage_state.resize(pCreateInfo->stageCount);
- for (uint32_t stage_index = 0; stage_index < pCreateInfo->stageCount; stage_index++) {
- const auto &shader_stage = pCreateInfo->pStages[stage_index];
- switch (shader_stage.stage) {
- case VK_SHADER_STAGE_RAYGEN_BIT_NV:
- this->active_shaders |= VK_SHADER_STAGE_RAYGEN_BIT_NV;
- break;
- case VK_SHADER_STAGE_ANY_HIT_BIT_NV:
- this->active_shaders |= VK_SHADER_STAGE_ANY_HIT_BIT_NV;
- break;
- case VK_SHADER_STAGE_CLOSEST_HIT_BIT_NV:
- this->active_shaders |= VK_SHADER_STAGE_CLOSEST_HIT_BIT_NV;
- break;
- case VK_SHADER_STAGE_MISS_BIT_NV:
- this->active_shaders |= VK_SHADER_STAGE_MISS_BIT_NV;
- break;
- case VK_SHADER_STAGE_INTERSECTION_BIT_NV:
- this->active_shaders |= VK_SHADER_STAGE_INTERSECTION_BIT_NV;
- break;
- case VK_SHADER_STAGE_CALLABLE_BIT_NV:
- this->active_shaders |= VK_SHADER_STAGE_CALLABLE_BIT_NV;
- break;
- default:
- // TODO : Flag error
- break;
- }
- state_data->RecordPipelineShaderStage(&shader_stage, this, &stage_state[stage_index]);
- }
-}
diff --git a/layers/core_validation.h b/layers/core_validation.h
index 3f24e1296..333e28a51 100644
--- a/layers/core_validation.h
+++ b/layers/core_validation.h
@@ -40,7 +40,6 @@
#include <vector>
#include <list>
#include <deque>
-#include <map>
enum SyncScope {
kSyncScopeInternal,
@@ -48,21 +47,21 @@ enum SyncScope {
kSyncScopeExternalPermanent,
};
-enum FENCE_STATUS { FENCE_UNSIGNALED, FENCE_INFLIGHT, FENCE_RETIRED };
+enum FENCE_STATE { FENCE_UNSIGNALED, FENCE_INFLIGHT, FENCE_RETIRED };
-class FENCE_STATE {
+class FENCE_NODE {
public:
VkFence fence;
VkFenceCreateInfo createInfo;
std::pair<VkQueue, uint64_t> signaler;
- FENCE_STATUS state;
+ FENCE_STATE state;
SyncScope scope;
// Default constructor
- FENCE_STATE() : state(FENCE_UNSIGNALED), scope(kSyncScopeInternal) {}
+ FENCE_NODE() : state(FENCE_UNSIGNALED), scope(kSyncScopeInternal) {}
};
-class SEMAPHORE_STATE : public BASE_NODE {
+class SEMAPHORE_NODE : public BASE_NODE {
public:
std::pair<VkQueue, uint64_t> signaler;
bool signaled;
@@ -72,6 +71,7 @@ class SEMAPHORE_STATE : public BASE_NODE {
class EVENT_STATE : public BASE_NODE {
public:
int write_in_use;
+ bool needsSignaled;
VkPipelineStageFlags stageMask;
};
@@ -80,13 +80,13 @@ class QUEUE_STATE {
VkQueue queue;
uint32_t queueFamilyIndex;
std::unordered_map<VkEvent, VkPipelineStageFlags> eventToStageMap;
- std::map<QueryObject, QueryState> queryToStateMap;
+ std::unordered_map<QueryObject, bool> queryToStateMap; // 0 is unavailable, 1 is available
uint64_t seq;
std::deque<CB_SUBMISSION> submissions;
};
-class QUERY_POOL_STATE : public BASE_NODE {
+class QUERY_POOL_NODE : public BASE_NODE {
public:
VkQueryPoolCreateInfo createInfo;
};
@@ -103,7 +103,7 @@ struct PHYSICAL_DEVICE_STATE {
CALL_STATE vkGetPhysicalDeviceDisplayPlanePropertiesKHRState = UNCALLED;
safe_VkPhysicalDeviceFeatures2 features2 = {};
VkPhysicalDevice phys_device = VK_NULL_HANDLE;
- uint32_t queue_family_known_count = 1; // spec implies one QF must always be supported
+ uint32_t queue_family_count = 0;
std::vector<VkQueueFamilyProperties> queue_family_properties;
VkSurfaceCapabilitiesKHR surfaceCapabilities = {};
std::vector<VkPresentModeKHR> present_modes;
@@ -118,20 +118,6 @@ struct create_graphics_pipeline_api_state {
const VkGraphicsPipelineCreateInfo* pCreateInfos;
};
-// This structure is used to save data across the CreateComputePipelines down-chain API call
-struct create_compute_pipeline_api_state {
- std::vector<safe_VkComputePipelineCreateInfo> gpu_create_infos;
- std::vector<std::unique_ptr<PIPELINE_STATE>> pipe_state;
- const VkComputePipelineCreateInfo* pCreateInfos;
-};
-
-// This structure is used to save data across the CreateRayTracingPipelinesNV down-chain API call.
-struct create_ray_tracing_pipeline_api_state {
- std::vector<safe_VkRayTracingPipelineCreateInfoNV> gpu_create_infos;
- std::vector<std::unique_ptr<PIPELINE_STATE>> pipe_state;
- const VkRayTracingPipelineCreateInfoNV* pCreateInfos;
-};
-
// This structure is used modify parameters for the CreatePipelineLayout down-chain API call
struct create_pipeline_layout_api_state {
std::vector<VkDescriptorSetLayout> new_layouts;
@@ -177,685 +163,71 @@ struct SURFACE_STATE {
SURFACE_STATE(VkSurfaceKHR surface) : surface(surface) {}
};
-struct SubpassLayout {
- uint32_t index;
- VkImageLayout layout;
-};
-
using std::unordered_map;
-struct GpuValidationState;
-
-#define VALSTATETRACK_MAP_AND_TRAITS_IMPL(handle_type, state_type, map_member, instance_scope) \
- template <typename Dummy> \
- struct AccessorStateHandle<state_type, Dummy> { \
- using StateType = state_type; \
- using HandleType = handle_type; \
- }; \
- AccessorTraitsTypes<state_type>::MapType map_member; \
- template <typename Dummy> \
- struct AccessorTraits<state_type, Dummy> : AccessorTraitsTypes<state_type> { \
- static const bool kInstanceScope = instance_scope; \
- static MapType ValidationStateTracker::*Map() { return &ValidationStateTracker::map_member; } \
- };
-#define VALSTATETRACK_MAP_AND_TRAITS(handle_type, state_type, map_member) \
- VALSTATETRACK_MAP_AND_TRAITS_IMPL(handle_type, state_type, map_member, false)
-#define VALSTATETRACK_MAP_AND_TRAITS_INSTANCE_SCOPE(handle_type, state_type, map_member) \
- VALSTATETRACK_MAP_AND_TRAITS_IMPL(handle_type, state_type, map_member, true)
-
-class ValidationStateTracker : public ValidationObject {
+class CoreChecks : public ValidationObject {
public:
- // TODO -- move to private
- // TODO -- make consistent with traits approach below.
+ std::unordered_set<VkQueue> queues; // All queues under given device
+ unordered_map<VkSampler, std::unique_ptr<SAMPLER_STATE>> samplerMap;
+ unordered_map<VkImageView, std::unique_ptr<IMAGE_VIEW_STATE>> imageViewMap;
+ unordered_map<VkImage, std::unique_ptr<IMAGE_STATE>> imageMap;
+ unordered_map<VkBufferView, std::unique_ptr<BUFFER_VIEW_STATE>> bufferViewMap;
+ unordered_map<VkBuffer, std::unique_ptr<BUFFER_STATE>> bufferMap;
+ unordered_map<VkPipeline, std::unique_ptr<PIPELINE_STATE>> pipelineMap;
+ unordered_map<VkCommandPool, COMMAND_POOL_NODE> commandPoolMap;
+ unordered_map<VkDescriptorPool, DESCRIPTOR_POOL_STATE*> descriptorPoolMap;
+ unordered_map<VkDescriptorSet, cvdescriptorset::DescriptorSet*> setMap;
+ unordered_map<VkDescriptorSetLayout, std::shared_ptr<cvdescriptorset::DescriptorSetLayout>> descriptorSetLayoutMap;
+ unordered_map<VkPipelineLayout, PIPELINE_LAYOUT_NODE> pipelineLayoutMap;
+ unordered_map<VkDeviceMemory, std::unique_ptr<DEVICE_MEM_INFO>> memObjMap;
+ unordered_map<VkFence, FENCE_NODE> fenceMap;
unordered_map<VkQueue, QUEUE_STATE> queueMap;
unordered_map<VkEvent, EVENT_STATE> eventMap;
-
+ unordered_map<QueryObject, bool> queryToStateMap;
+ unordered_map<VkQueryPool, QUERY_POOL_NODE> queryPoolMap;
+ unordered_map<VkSemaphore, SEMAPHORE_NODE> semaphoreMap;
+ unordered_map<VkCommandBuffer, GLOBAL_CB_NODE*> commandBufferMap;
+ unordered_map<VkFramebuffer, std::unique_ptr<FRAMEBUFFER_STATE>> frameBufferMap;
+ unordered_map<VkImage, std::vector<ImageSubresourcePair>> imageSubresourceMap;
+ unordered_map<ImageSubresourcePair, IMAGE_LAYOUT_NODE> imageLayoutMap;
unordered_map<VkRenderPass, std::shared_ptr<RENDER_PASS_STATE>> renderPassMap;
- unordered_map<VkDescriptorSetLayout, std::shared_ptr<cvdescriptorset::DescriptorSetLayout>> descriptorSetLayoutMap;
-
- std::unordered_set<VkQueue> queues; // All queues under given device
- std::map<QueryObject, QueryState> queryToStateMap;
+ unordered_map<VkShaderModule, std::unique_ptr<shader_module>> shaderModuleMap;
+ unordered_map<VkDescriptorUpdateTemplateKHR, std::unique_ptr<TEMPLATE_STATE>> desc_template_map;
+ unordered_map<VkSwapchainKHR, std::unique_ptr<SWAPCHAIN_NODE>> swapchainMap;
unordered_map<VkSamplerYcbcrConversion, uint64_t> ycbcr_conversion_ahb_fmt_map;
-
- // Traits for State function resolution. Specializations defined in the macro.
- // NOTE: The Dummy argument allows for *partial* specialization at class scope, as full specialization at class scope
- // isn't supported until C++17. Since the Dummy has a default all instantiations of the template can ignore it, but all
- // specializations of the template must list it (and not give it a default).
- template <typename StateType, typename Dummy = int>
- struct AccessorStateHandle {};
- template <typename StateType, typename Dummy = int>
- struct AccessorTraits {};
- template <typename StateType_>
- struct AccessorTraitsTypes {
- using StateType = StateType_;
- using HandleType = typename AccessorStateHandle<StateType>::HandleType;
- using ReturnType = StateType*;
- using MappedType = std::unique_ptr<StateType>;
- using MapType = unordered_map<HandleType, MappedType>;
- };
-
- VALSTATETRACK_MAP_AND_TRAITS(VkSampler, SAMPLER_STATE, samplerMap)
- VALSTATETRACK_MAP_AND_TRAITS(VkImageView, IMAGE_VIEW_STATE, imageViewMap)
- VALSTATETRACK_MAP_AND_TRAITS(VkImage, IMAGE_STATE, imageMap)
- VALSTATETRACK_MAP_AND_TRAITS(VkBufferView, BUFFER_VIEW_STATE, bufferViewMap)
- VALSTATETRACK_MAP_AND_TRAITS(VkBuffer, BUFFER_STATE, bufferMap)
- VALSTATETRACK_MAP_AND_TRAITS(VkPipeline, PIPELINE_STATE, pipelineMap)
- VALSTATETRACK_MAP_AND_TRAITS(VkDeviceMemory, DEVICE_MEMORY_STATE, memObjMap)
- VALSTATETRACK_MAP_AND_TRAITS(VkFramebuffer, FRAMEBUFFER_STATE, frameBufferMap)
- VALSTATETRACK_MAP_AND_TRAITS(VkShaderModule, SHADER_MODULE_STATE, shaderModuleMap)
- VALSTATETRACK_MAP_AND_TRAITS(VkDescriptorUpdateTemplateKHR, TEMPLATE_STATE, desc_template_map)
- VALSTATETRACK_MAP_AND_TRAITS(VkSwapchainKHR, SWAPCHAIN_NODE, swapchainMap)
- VALSTATETRACK_MAP_AND_TRAITS(VkDescriptorPool, DESCRIPTOR_POOL_STATE, descriptorPoolMap)
- VALSTATETRACK_MAP_AND_TRAITS(VkDescriptorSet, cvdescriptorset::DescriptorSet, setMap)
- VALSTATETRACK_MAP_AND_TRAITS(VkCommandBuffer, CMD_BUFFER_STATE, commandBufferMap)
- VALSTATETRACK_MAP_AND_TRAITS(VkCommandPool, COMMAND_POOL_STATE, commandPoolMap)
- VALSTATETRACK_MAP_AND_TRAITS(VkPipelineLayout, PIPELINE_LAYOUT_STATE, pipelineLayoutMap)
- VALSTATETRACK_MAP_AND_TRAITS(VkFence, FENCE_STATE, fenceMap)
- VALSTATETRACK_MAP_AND_TRAITS(VkQueryPool, QUERY_POOL_STATE, queryPoolMap)
- VALSTATETRACK_MAP_AND_TRAITS(VkSemaphore, SEMAPHORE_STATE, semaphoreMap)
- VALSTATETRACK_MAP_AND_TRAITS(VkAccelerationStructureNV, ACCELERATION_STRUCTURE_STATE, accelerationStructureMap)
- VALSTATETRACK_MAP_AND_TRAITS_INSTANCE_SCOPE(VkSurfaceKHR, SURFACE_STATE, surface_map)
-
- public:
- template <typename State>
- typename AccessorTraits<State>::ReturnType Get(typename AccessorTraits<State>::Handle handle) {
- using Traits = AccessorTraits<State>;
- auto map_member = Traits::Map();
- const typename Traits::MapType& map =
- (Traits::kInstanceScope && (this->*map_member).size() == 0) ? instance_state->*map_member : this->*map_member;
-
- const auto found_it = map.find(handle);
- if (found_it == map.end()) {
- return nullptr;
- }
- return found_it->second.get();
- };
-
- template <typename State>
- const typename AccessorTraits<State>::ReturnType Get(typename AccessorTraits<State>::HandleType handle) const {
- using Traits = AccessorTraits<State>;
- auto map_member = Traits::Map();
- const typename Traits::MapType& map =
- (Traits::kInstanceScope && (this->*map_member).size() == 0) ? instance_state->*map_member : this->*map_member;
-
- const auto found_it = map.find(handle);
- if (found_it == map.cend()) {
- return nullptr;
- }
- return found_it->second.get();
- };
-
- // Accessors for the VALSTATE... maps
- const SAMPLER_STATE* GetSamplerState(VkSampler sampler) const { return Get<SAMPLER_STATE>(sampler); }
- SAMPLER_STATE* GetSamplerState(VkSampler sampler) { return Get<SAMPLER_STATE>(sampler); }
- const IMAGE_VIEW_STATE* GetImageViewState(VkImageView image_view) const { return Get<IMAGE_VIEW_STATE>(image_view); }
- IMAGE_VIEW_STATE* GetImageViewState(VkImageView image_view) { return Get<IMAGE_VIEW_STATE>(image_view); }
- const IMAGE_STATE* GetImageState(VkImage image) const { return Get<IMAGE_STATE>(image); }
- IMAGE_STATE* GetImageState(VkImage image) { return Get<IMAGE_STATE>(image); }
- const BUFFER_VIEW_STATE* GetBufferViewState(VkBufferView buffer_view) const { return Get<BUFFER_VIEW_STATE>(buffer_view); }
- BUFFER_VIEW_STATE* GetBufferViewState(VkBufferView buffer_view) { return Get<BUFFER_VIEW_STATE>(buffer_view); }
- const BUFFER_STATE* GetBufferState(VkBuffer buffer) const { return Get<BUFFER_STATE>(buffer); }
- BUFFER_STATE* GetBufferState(VkBuffer buffer) { return Get<BUFFER_STATE>(buffer); }
- const PIPELINE_STATE* GetPipelineState(VkPipeline pipeline) const { return Get<PIPELINE_STATE>(pipeline); }
- PIPELINE_STATE* GetPipelineState(VkPipeline pipeline) { return Get<PIPELINE_STATE>(pipeline); }
- const DEVICE_MEMORY_STATE* GetDevMemState(VkDeviceMemory mem) const { return Get<DEVICE_MEMORY_STATE>(mem); }
- DEVICE_MEMORY_STATE* GetDevMemState(VkDeviceMemory mem) { return Get<DEVICE_MEMORY_STATE>(mem); }
- const FRAMEBUFFER_STATE* GetFramebufferState(VkFramebuffer framebuffer) const { return Get<FRAMEBUFFER_STATE>(framebuffer); }
- FRAMEBUFFER_STATE* GetFramebufferState(VkFramebuffer framebuffer) { return Get<FRAMEBUFFER_STATE>(framebuffer); }
- const SHADER_MODULE_STATE* GetShaderModuleState(VkShaderModule module) const { return Get<SHADER_MODULE_STATE>(module); }
- SHADER_MODULE_STATE* GetShaderModuleState(VkShaderModule module) { return Get<SHADER_MODULE_STATE>(module); }
- const TEMPLATE_STATE* GetDescriptorTemplateState(VkDescriptorUpdateTemplateKHR descriptor_update_template) const {
- return Get<TEMPLATE_STATE>(descriptor_update_template);
- }
- TEMPLATE_STATE* GetDescriptorTemplateState(VkDescriptorUpdateTemplateKHR descriptor_update_template) {
- return Get<TEMPLATE_STATE>(descriptor_update_template);
- }
- const SWAPCHAIN_NODE* GetSwapchainState(VkSwapchainKHR swapchain) const { return Get<SWAPCHAIN_NODE>(swapchain); }
- SWAPCHAIN_NODE* GetSwapchainState(VkSwapchainKHR swapchain) { return Get<SWAPCHAIN_NODE>(swapchain); }
- const DESCRIPTOR_POOL_STATE* GetDescriptorPoolState(const VkDescriptorPool pool) const {
- return Get<DESCRIPTOR_POOL_STATE>(pool);
- }
- DESCRIPTOR_POOL_STATE* GetDescriptorPoolState(const VkDescriptorPool pool) { return Get<DESCRIPTOR_POOL_STATE>(pool); }
- const cvdescriptorset::DescriptorSet* GetSetNode(VkDescriptorSet set) const { return Get<cvdescriptorset::DescriptorSet>(set); }
- cvdescriptorset::DescriptorSet* GetSetNode(VkDescriptorSet set) { return Get<cvdescriptorset::DescriptorSet>(set); }
- const CMD_BUFFER_STATE* GetCBState(const VkCommandBuffer cb) const { return Get<CMD_BUFFER_STATE>(cb); }
- CMD_BUFFER_STATE* GetCBState(const VkCommandBuffer cb) { return Get<CMD_BUFFER_STATE>(cb); }
- const COMMAND_POOL_STATE* GetCommandPoolState(VkCommandPool pool) const { return Get<COMMAND_POOL_STATE>(pool); }
- COMMAND_POOL_STATE* GetCommandPoolState(VkCommandPool pool) { return Get<COMMAND_POOL_STATE>(pool); }
- const PIPELINE_LAYOUT_STATE* GetPipelineLayout(VkPipelineLayout pipeLayout) const {
- return Get<PIPELINE_LAYOUT_STATE>(pipeLayout);
- }
- PIPELINE_LAYOUT_STATE* GetPipelineLayout(VkPipelineLayout pipeLayout) { return Get<PIPELINE_LAYOUT_STATE>(pipeLayout); }
- const FENCE_STATE* GetFenceState(VkFence fence) const { return Get<FENCE_STATE>(fence); }
- FENCE_STATE* GetFenceState(VkFence fence) { return Get<FENCE_STATE>(fence); }
- const QUERY_POOL_STATE* GetQueryPoolState(VkQueryPool query_pool) const { return Get<QUERY_POOL_STATE>(query_pool); }
- QUERY_POOL_STATE* GetQueryPoolState(VkQueryPool query_pool) { return Get<QUERY_POOL_STATE>(query_pool); }
- const SEMAPHORE_STATE* GetSemaphoreState(VkSemaphore semaphore) const { return Get<SEMAPHORE_STATE>(semaphore); }
- SEMAPHORE_STATE* GetSemaphoreState(VkSemaphore semaphore) { return Get<SEMAPHORE_STATE>(semaphore); }
- const ACCELERATION_STRUCTURE_STATE* GetAccelerationStructureState(VkAccelerationStructureNV as) const {
- return Get<ACCELERATION_STRUCTURE_STATE>(as);
- }
- ACCELERATION_STRUCTURE_STATE* GetAccelerationStructureState(VkAccelerationStructureNV as) {
- return Get<ACCELERATION_STRUCTURE_STATE>(as);
- }
- const SURFACE_STATE* GetSurfaceState(VkSurfaceKHR surface) const { return Get<SURFACE_STATE>(surface); }
- SURFACE_STATE* GetSurfaceState(VkSurfaceKHR surface) { return Get<SURFACE_STATE>(surface); }
-
- // Class Declarations for helper functions
- IMAGE_VIEW_STATE* GetAttachmentImageViewState(FRAMEBUFFER_STATE* framebuffer, uint32_t index);
- const RENDER_PASS_STATE* GetRenderPassState(VkRenderPass renderpass) const;
- RENDER_PASS_STATE* GetRenderPassState(VkRenderPass renderpass);
- std::shared_ptr<RENDER_PASS_STATE> GetRenderPassStateSharedPtr(VkRenderPass renderpass);
- EVENT_STATE* GetEventState(VkEvent event);
- const QUEUE_STATE* GetQueueState(VkQueue queue) const;
- QUEUE_STATE* GetQueueState(VkQueue queue);
- const BINDABLE* GetObjectMemBinding(const VulkanTypedHandle& typed_handle) const;
- BINDABLE* GetObjectMemBinding(const VulkanTypedHandle& typed_handle);
+ std::unordered_set<uint64_t> ahb_ext_formats_set;
+ GlobalQFOTransferBarrierMap<VkImageMemoryBarrier> qfo_release_image_barrier_map;
+ GlobalQFOTransferBarrierMap<VkBufferMemoryBarrier> qfo_release_buffer_barrier_map;
+ // Map for queue family index to queue count
+ unordered_map<uint32_t, uint32_t> queue_family_index_map;
// Used for instance versions of this object
unordered_map<VkPhysicalDevice, PHYSICAL_DEVICE_STATE> physical_device_map;
// Link to the device's physical-device data
PHYSICAL_DEVICE_STATE* physical_device_state;
+ unordered_map<VkSurfaceKHR, SURFACE_STATE> surface_map;
// Link for derived device objects back to their parent instance object
- ValidationStateTracker* instance_state;
-
- const PHYSICAL_DEVICE_STATE* GetPhysicalDeviceState(VkPhysicalDevice phys) const;
- PHYSICAL_DEVICE_STATE* GetPhysicalDeviceState(VkPhysicalDevice phys);
- PHYSICAL_DEVICE_STATE* GetPhysicalDeviceState();
- const PHYSICAL_DEVICE_STATE* GetPhysicalDeviceState() const;
+ CoreChecks* instance_state;
- using CommandBufferResetCallback = std::function<void(VkCommandBuffer)>;
- std::unique_ptr<CommandBufferResetCallback> command_buffer_reset_callback;
- template <typename Fn>
- void SetCommandBufferResetCallback(Fn&& fn) {
- command_buffer_reset_callback.reset(new CommandBufferResetCallback(std::forward<Fn>(fn)));
- }
+ // Temporary object pointers
+ layer_data* device_data = this;
+ layer_data* instance_data = this;
+ layer_data* dev_data = this;
+ std::unordered_map<void*, layer_data*> layer_data_map;
+ std::unordered_map<void*, layer_data*> instance_layer_data_map;
- using SetImageViewInitialLayoutCallback = std::function<void(CMD_BUFFER_STATE*, const IMAGE_VIEW_STATE&, VkImageLayout)>;
- std::unique_ptr<SetImageViewInitialLayoutCallback> set_image_view_initial_layout_callback;
- template <typename Fn>
- void SetSetImageViewInitialLayoutCallback(Fn&& fn) {
- set_image_view_initial_layout_callback.reset(new SetImageViewInitialLayoutCallback(std::forward<Fn>(fn)));
- }
+ dispatch_key get_dispatch_key(const void* object) { return nullptr; }
- void CallSetImageViewInitialLayoutCallback(CMD_BUFFER_STATE* cb_node, const IMAGE_VIEW_STATE& iv_state, VkImageLayout layout) {
- if (set_image_view_initial_layout_callback) {
- (*set_image_view_initial_layout_callback)(cb_node, iv_state, layout);
- }
+ template <typename DATA_T>
+ DATA_T* GetLayerDataPtr(void* data_key, std::unordered_map<void*, DATA_T*>& layer_data_map) {
+ return this;
}
- // State update functions
- // Gets/Enumerations
- void PostCallRecordEnumeratePhysicalDeviceGroups(VkInstance instance, uint32_t* pPhysicalDeviceGroupCount,
- VkPhysicalDeviceGroupPropertiesKHR* pPhysicalDeviceGroupProperties,
- VkResult result);
- void PostCallRecordEnumeratePhysicalDeviceGroupsKHR(VkInstance instance, uint32_t* pPhysicalDeviceGroupCount,
- VkPhysicalDeviceGroupPropertiesKHR* pPhysicalDeviceGroupProperties,
- VkResult result);
- void PostCallRecordEnumeratePhysicalDevices(VkInstance instance, uint32_t* pPhysicalDeviceCount,
- VkPhysicalDevice* pPhysicalDevices, VkResult result);
- void PostCallRecordGetAccelerationStructureMemoryRequirementsNV(VkDevice device,
- const VkAccelerationStructureMemoryRequirementsInfoNV* pInfo,
- VkMemoryRequirements2KHR* pMemoryRequirements);
- void PostCallRecordGetBufferMemoryRequirements(VkDevice device, VkBuffer buffer, VkMemoryRequirements* pMemoryRequirements);
- void PostCallRecordGetBufferMemoryRequirements2(VkDevice device, const VkBufferMemoryRequirementsInfo2KHR* pInfo,
- VkMemoryRequirements2KHR* pMemoryRequirements);
- void PostCallRecordGetBufferMemoryRequirements2KHR(VkDevice device, const VkBufferMemoryRequirementsInfo2KHR* pInfo,
- VkMemoryRequirements2KHR* pMemoryRequirements);
- void PostCallRecordGetDeviceQueue(VkDevice device, uint32_t queueFamilyIndex, uint32_t queueIndex, VkQueue* pQueue);
- void PostCallRecordGetDeviceQueue2(VkDevice device, const VkDeviceQueueInfo2* pQueueInfo, VkQueue* pQueue);
- void PostCallRecordGetImageMemoryRequirements(VkDevice device, VkImage image, VkMemoryRequirements* pMemoryRequirements);
- void PostCallRecordGetImageMemoryRequirements2(VkDevice device, const VkImageMemoryRequirementsInfo2* pInfo,
- VkMemoryRequirements2* pMemoryRequirements);
- void PostCallRecordGetImageMemoryRequirements2KHR(VkDevice device, const VkImageMemoryRequirementsInfo2* pInfo,
- VkMemoryRequirements2* pMemoryRequirements);
- void PostCallRecordGetImageSparseMemoryRequirements(VkDevice device, VkImage image, uint32_t* pSparseMemoryRequirementCount,
- VkSparseImageMemoryRequirements* pSparseMemoryRequirements);
- void PostCallRecordGetImageSparseMemoryRequirements2(VkDevice device, const VkImageSparseMemoryRequirementsInfo2KHR* pInfo,
- uint32_t* pSparseMemoryRequirementCount,
- VkSparseImageMemoryRequirements2KHR* pSparseMemoryRequirements);
- void PostCallRecordGetImageSparseMemoryRequirements2KHR(VkDevice device, const VkImageSparseMemoryRequirementsInfo2KHR* pInfo,
- uint32_t* pSparseMemoryRequirementCount,
- VkSparseImageMemoryRequirements2KHR* pSparseMemoryRequirements);
- void PostCallRecordGetPhysicalDeviceDisplayPlanePropertiesKHR(VkPhysicalDevice physicalDevice, uint32_t* pPropertyCount,
- VkDisplayPlanePropertiesKHR* pProperties, VkResult result);
- void PostCallRecordGetPhysicalDeviceDisplayPlaneProperties2KHR(VkPhysicalDevice physicalDevice, uint32_t* pPropertyCount,
- VkDisplayPlaneProperties2KHR* pProperties, VkResult result);
- void PostCallRecordGetPhysicalDeviceQueueFamilyProperties(VkPhysicalDevice physicalDevice, uint32_t* pQueueFamilyPropertyCount,
- VkQueueFamilyProperties* pQueueFamilyProperties);
- void PostCallRecordGetPhysicalDeviceQueueFamilyProperties2(VkPhysicalDevice physicalDevice, uint32_t* pQueueFamilyPropertyCount,
- VkQueueFamilyProperties2KHR* pQueueFamilyProperties);
- void PostCallRecordGetPhysicalDeviceQueueFamilyProperties2KHR(VkPhysicalDevice physicalDevice,
- uint32_t* pQueueFamilyPropertyCount,
- VkQueueFamilyProperties2KHR* pQueueFamilyProperties);
- void PostCallRecordGetPhysicalDeviceSurfaceCapabilitiesKHR(VkPhysicalDevice physicalDevice, VkSurfaceKHR surface,
- VkSurfaceCapabilitiesKHR* pSurfaceCapabilities, VkResult result);
- void PostCallRecordGetPhysicalDeviceSurfaceCapabilities2KHR(VkPhysicalDevice physicalDevice,
- const VkPhysicalDeviceSurfaceInfo2KHR* pSurfaceInfo,
- VkSurfaceCapabilities2KHR* pSurfaceCapabilities, VkResult result);
- void PostCallRecordGetPhysicalDeviceSurfaceCapabilities2EXT(VkPhysicalDevice physicalDevice, VkSurfaceKHR surface,
- VkSurfaceCapabilities2EXT* pSurfaceCapabilities, VkResult result);
- void PostCallRecordGetPhysicalDeviceSurfaceFormatsKHR(VkPhysicalDevice physicalDevice, VkSurfaceKHR surface,
- uint32_t* pSurfaceFormatCount, VkSurfaceFormatKHR* pSurfaceFormats,
- VkResult result);
- void PostCallRecordGetPhysicalDeviceSurfaceFormats2KHR(VkPhysicalDevice physicalDevice,
- const VkPhysicalDeviceSurfaceInfo2KHR* pSurfaceInfo,
- uint32_t* pSurfaceFormatCount, VkSurfaceFormat2KHR* pSurfaceFormats,
- VkResult result);
- void PostCallRecordGetPhysicalDeviceSurfacePresentModesKHR(VkPhysicalDevice physicalDevice, VkSurfaceKHR surface,
- uint32_t* pPresentModeCount, VkPresentModeKHR* pPresentModes,
- VkResult result);
- void PostCallRecordGetPhysicalDeviceSurfaceSupportKHR(VkPhysicalDevice physicalDevice, uint32_t queueFamilyIndex,
- VkSurfaceKHR surface, VkBool32* pSupported, VkResult result);
-
- // Create/Destroy/Bind
- void PostCallRecordBindAccelerationStructureMemoryNV(VkDevice device, uint32_t bindInfoCount,
- const VkBindAccelerationStructureMemoryInfoNV* pBindInfos,
- VkResult result);
- void PostCallRecordBindBufferMemory(VkDevice device, VkBuffer buffer, VkDeviceMemory mem, VkDeviceSize memoryOffset,
- VkResult result);
- void PostCallRecordBindBufferMemory2(VkDevice device, uint32_t bindInfoCount, const VkBindBufferMemoryInfoKHR* pBindInfos,
- VkResult result);
- void PostCallRecordBindBufferMemory2KHR(VkDevice device, uint32_t bindInfoCount, const VkBindBufferMemoryInfoKHR* pBindInfos,
- VkResult result);
- void PostCallRecordBindImageMemory(VkDevice device, VkImage image, VkDeviceMemory mem, VkDeviceSize memoryOffset,
- VkResult result);
- void PostCallRecordBindImageMemory2(VkDevice device, uint32_t bindInfoCount, const VkBindImageMemoryInfoKHR* pBindInfos,
- VkResult result);
- void PostCallRecordBindImageMemory2KHR(VkDevice device, uint32_t bindInfoCount, const VkBindImageMemoryInfoKHR* pBindInfos,
- VkResult result);
-
- void PostCallRecordCreateDevice(VkPhysicalDevice gpu, const VkDeviceCreateInfo* pCreateInfo,
- const VkAllocationCallbacks* pAllocator, VkDevice* pDevice, VkResult result);
- void PreCallRecordDestroyDevice(VkDevice device, const VkAllocationCallbacks* pAllocator);
-
- void PostCallRecordCreateAccelerationStructureNV(VkDevice device, const VkAccelerationStructureCreateInfoNV* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkAccelerationStructureNV* pAccelerationStructure, VkResult result);
- void PreCallRecordDestroyAccelerationStructureNV(VkDevice device, VkAccelerationStructureNV accelerationStructure,
- const VkAllocationCallbacks* pAllocator);
- void PostCallRecordCreateBuffer(VkDevice device, const VkBufferCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator,
- VkBuffer* pBuffer, VkResult result);
- void PreCallRecordDestroyBuffer(VkDevice device, VkBuffer buffer, const VkAllocationCallbacks* pAllocator);
- void PostCallRecordCreateBufferView(VkDevice device, const VkBufferViewCreateInfo* pCreateInfo,
- const VkAllocationCallbacks* pAllocator, VkBufferView* pView, VkResult result);
- void PreCallRecordDestroyBufferView(VkDevice device, VkBufferView bufferView, const VkAllocationCallbacks* pAllocator);
- void PostCallRecordCreateCommandPool(VkDevice device, const VkCommandPoolCreateInfo* pCreateInfo,
- const VkAllocationCallbacks* pAllocator, VkCommandPool* pCommandPool, VkResult result);
- void PreCallRecordDestroyCommandPool(VkDevice device, VkCommandPool commandPool, const VkAllocationCallbacks* pAllocator);
- void PostCallRecordCreateDisplayPlaneSurfaceKHR(VkInstance instance, const VkDisplaySurfaceCreateInfoKHR* pCreateInfo,
- const VkAllocationCallbacks* pAllocator, VkSurfaceKHR* pSurface,
- VkResult result);
- void PostCallRecordCreateEvent(VkDevice device, const VkEventCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator,
- VkEvent* pEvent, VkResult result);
- void PreCallRecordDestroyEvent(VkDevice device, VkEvent event, const VkAllocationCallbacks* pAllocator);
- void PostCallRecordCreateDescriptorPool(VkDevice device, const VkDescriptorPoolCreateInfo* pCreateInfo,
- const VkAllocationCallbacks* pAllocator, VkDescriptorPool* pDescriptorPool,
- VkResult result);
- void PreCallRecordDestroyDescriptorPool(VkDevice device, VkDescriptorPool descriptorPool,
- const VkAllocationCallbacks* pAllocator);
- void PostCallRecordCreateDescriptorSetLayout(VkDevice device, const VkDescriptorSetLayoutCreateInfo* pCreateInfo,
- const VkAllocationCallbacks* pAllocator, VkDescriptorSetLayout* pSetLayout,
- VkResult result);
- void PostCallRecordResetCommandBuffer(VkCommandBuffer commandBuffer, VkCommandBufferResetFlags flags, VkResult result);
- void PostCallRecordResetCommandPool(VkDevice device, VkCommandPool commandPool, VkCommandPoolResetFlags flags, VkResult result);
- bool PreCallValidateCreateComputePipelines(VkDevice device, VkPipelineCache pipelineCache, uint32_t count,
- const VkComputePipelineCreateInfo* pCreateInfos,
- const VkAllocationCallbacks* pAllocator, VkPipeline* pPipelines, void* pipe_state);
- void PostCallRecordCreateComputePipelines(VkDevice device, VkPipelineCache pipelineCache, uint32_t count,
- const VkComputePipelineCreateInfo* pCreateInfos,
- const VkAllocationCallbacks* pAllocator, VkPipeline* pPipelines, VkResult result,
- void* pipe_state);
- void PostCallRecordResetDescriptorPool(VkDevice device, VkDescriptorPool descriptorPool, VkDescriptorPoolResetFlags flags,
- VkResult result);
- void PreCallRecordDestroyDescriptorSetLayout(VkDevice device, VkDescriptorSetLayout descriptorSetLayout,
- const VkAllocationCallbacks* pAllocator);
- void PostCallRecordCreateDescriptorUpdateTemplate(VkDevice device, const VkDescriptorUpdateTemplateCreateInfoKHR* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkDescriptorUpdateTemplateKHR* pDescriptorUpdateTemplate, VkResult result);
- void PostCallRecordCreateDescriptorUpdateTemplateKHR(VkDevice device,
- const VkDescriptorUpdateTemplateCreateInfoKHR* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkDescriptorUpdateTemplateKHR* pDescriptorUpdateTemplate, VkResult result);
- void PreCallRecordDestroyDescriptorUpdateTemplate(VkDevice device, VkDescriptorUpdateTemplateKHR descriptorUpdateTemplate,
- const VkAllocationCallbacks* pAllocator);
- void PreCallRecordDestroyDescriptorUpdateTemplateKHR(VkDevice device, VkDescriptorUpdateTemplateKHR descriptorUpdateTemplate,
- const VkAllocationCallbacks* pAllocator);
- void PostCallRecordCreateFence(VkDevice device, const VkFenceCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator,
- VkFence* pFence, VkResult result);
- void PreCallRecordDestroyFence(VkDevice device, VkFence fence, const VkAllocationCallbacks* pAllocator);
- void PostCallRecordCreateFramebuffer(VkDevice device, const VkFramebufferCreateInfo* pCreateInfo,
- const VkAllocationCallbacks* pAllocator, VkFramebuffer* pFramebuffer, VkResult result);
- void PreCallRecordDestroyFramebuffer(VkDevice device, VkFramebuffer framebuffer, const VkAllocationCallbacks* pAllocator);
- bool PreCallValidateCreateGraphicsPipelines(VkDevice device, VkPipelineCache pipelineCache, uint32_t count,
- const VkGraphicsPipelineCreateInfo* pCreateInfos,
- const VkAllocationCallbacks* pAllocator, VkPipeline* pPipelines, void* cgpl_state);
- void PostCallRecordCreateGraphicsPipelines(VkDevice device, VkPipelineCache pipelineCache, uint32_t count,
- const VkGraphicsPipelineCreateInfo* pCreateInfos,
- const VkAllocationCallbacks* pAllocator, VkPipeline* pPipelines, VkResult result,
- void* cgpl_state);
- void PostCallRecordCreateImage(VkDevice device, const VkImageCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator,
- VkImage* pImage, VkResult result);
- void PreCallRecordDestroyImage(VkDevice device, VkImage image, const VkAllocationCallbacks* pAllocator);
- void PostCallRecordCreateImageView(VkDevice device, const VkImageViewCreateInfo* pCreateInfo,
- const VkAllocationCallbacks* pAllocator, VkImageView* pView, VkResult result);
- void PreCallRecordDestroyImageView(VkDevice device, VkImageView imageView, const VkAllocationCallbacks* pAllocator);
-
- void PreCallRecordDestroyPipeline(VkDevice device, VkPipeline pipeline, const VkAllocationCallbacks* pAllocator);
- void PostCallRecordCreatePipelineLayout(VkDevice device, const VkPipelineLayoutCreateInfo* pCreateInfo,
- const VkAllocationCallbacks* pAllocator, VkPipelineLayout* pPipelineLayout,
- VkResult result);
- void PreCallRecordDestroyPipelineLayout(VkDevice device, VkPipelineLayout pipelineLayout,
- const VkAllocationCallbacks* pAllocator);
- void PostCallRecordCreateQueryPool(VkDevice device, const VkQueryPoolCreateInfo* pCreateInfo,
- const VkAllocationCallbacks* pAllocator, VkQueryPool* pQueryPool, VkResult result);
- void PreCallRecordDestroyQueryPool(VkDevice device, VkQueryPool queryPool, const VkAllocationCallbacks* pAllocator);
- void PostCallRecordResetQueryPoolEXT(VkDevice device, VkQueryPool queryPool, uint32_t firstQuery, uint32_t queryCount);
- bool PreCallValidateCreateRayTracingPipelinesNV(VkDevice device, VkPipelineCache pipelineCache, uint32_t count,
- const VkRayTracingPipelineCreateInfoNV* pCreateInfos,
- const VkAllocationCallbacks* pAllocator, VkPipeline* pPipelines,
- void* pipe_state);
- void PostCallRecordCreateRayTracingPipelinesNV(VkDevice device, VkPipelineCache pipelineCache, uint32_t count,
- const VkRayTracingPipelineCreateInfoNV* pCreateInfos,
- const VkAllocationCallbacks* pAllocator, VkPipeline* pPipelines, VkResult result,
- void* pipe_state);
- void PostCallRecordCreateRenderPass(VkDevice device, const VkRenderPassCreateInfo* pCreateInfo,
- const VkAllocationCallbacks* pAllocator, VkRenderPass* pRenderPass, VkResult result);
- void PostCallRecordCreateRenderPass2KHR(VkDevice device, const VkRenderPassCreateInfo2KHR* pCreateInfo,
- const VkAllocationCallbacks* pAllocator, VkRenderPass* pRenderPass, VkResult result);
- void PreCallRecordDestroyRenderPass(VkDevice device, VkRenderPass renderPass, const VkAllocationCallbacks* pAllocator);
- void PostCallRecordCreateSampler(VkDevice device, const VkSamplerCreateInfo* pCreateInfo,
- const VkAllocationCallbacks* pAllocator, VkSampler* pSampler, VkResult result);
- void PreCallRecordDestroySampler(VkDevice device, VkSampler sampler, const VkAllocationCallbacks* pAllocator);
- void PostCallRecordCreateSamplerYcbcrConversion(VkDevice device, const VkSamplerYcbcrConversionCreateInfo* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkSamplerYcbcrConversion* pYcbcrConversion, VkResult result);
- void PostCallRecordDestroySamplerYcbcrConversion(VkDevice device, VkSamplerYcbcrConversion ycbcrConversion,
- const VkAllocationCallbacks* pAllocator);
- void PostCallRecordCreateSamplerYcbcrConversionKHR(VkDevice device, const VkSamplerYcbcrConversionCreateInfo* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkSamplerYcbcrConversion* pYcbcrConversion, VkResult result);
- void PostCallRecordDestroySamplerYcbcrConversionKHR(VkDevice device, VkSamplerYcbcrConversion ycbcrConversion,
- const VkAllocationCallbacks* pAllocator);
- void PostCallRecordCreateSemaphore(VkDevice device, const VkSemaphoreCreateInfo* pCreateInfo,
- const VkAllocationCallbacks* pAllocator, VkSemaphore* pSemaphore, VkResult result);
- void PreCallRecordDestroySemaphore(VkDevice device, VkSemaphore semaphore, const VkAllocationCallbacks* pAllocator);
- void PostCallRecordCreateShaderModule(VkDevice device, const VkShaderModuleCreateInfo* pCreateInfo,
- const VkAllocationCallbacks* pAllocator, VkShaderModule* pShaderModule, VkResult result,
- void* csm_state);
- void PreCallRecordDestroyShaderModule(VkDevice device, VkShaderModule shaderModule, const VkAllocationCallbacks* pAllocator);
- void PreCallRecordDestroySurfaceKHR(VkInstance instance, VkSurfaceKHR surface, const VkAllocationCallbacks* pAllocator);
- void PostCallRecordCreateSharedSwapchainsKHR(VkDevice device, uint32_t swapchainCount,
- const VkSwapchainCreateInfoKHR* pCreateInfos,
- const VkAllocationCallbacks* pAllocator, VkSwapchainKHR* pSwapchains,
- VkResult result);
- void PostCallRecordCreateSwapchainKHR(VkDevice device, const VkSwapchainCreateInfoKHR* pCreateInfo,
- const VkAllocationCallbacks* pAllocator, VkSwapchainKHR* pSwapchain, VkResult result);
- void PreCallRecordDestroySwapchainKHR(VkDevice device, VkSwapchainKHR swapchain, const VkAllocationCallbacks* pAllocator);
-
- // CommandBuffer Control
- void PreCallRecordBeginCommandBuffer(VkCommandBuffer commandBuffer, const VkCommandBufferBeginInfo* pBeginInfo);
- void PostCallRecordEndCommandBuffer(VkCommandBuffer commandBuffer, VkResult result);
- void PostCallRecordQueueSubmit(VkQueue queue, uint32_t submitCount, const VkSubmitInfo* pSubmits, VkFence fence,
- VkResult result);
-
- // Allocate/Free
- void PostCallRecordAllocateCommandBuffers(VkDevice device, const VkCommandBufferAllocateInfo* pCreateInfo,
- VkCommandBuffer* pCommandBuffer, VkResult result);
- void PostCallRecordAllocateDescriptorSets(VkDevice device, const VkDescriptorSetAllocateInfo* pAllocateInfo,
- VkDescriptorSet* pDescriptorSets, VkResult result, void* ads_state);
- void PostCallRecordAllocateMemory(VkDevice device, const VkMemoryAllocateInfo* pAllocateInfo,
- const VkAllocationCallbacks* pAllocator, VkDeviceMemory* pMemory, VkResult result);
- void PreCallRecordFreeCommandBuffers(VkDevice device, VkCommandPool commandPool, uint32_t commandBufferCount,
- const VkCommandBuffer* pCommandBuffers);
- void PreCallRecordFreeDescriptorSets(VkDevice device, VkDescriptorPool descriptorPool, uint32_t count,
- const VkDescriptorSet* pDescriptorSets);
- void PreCallRecordFreeMemory(VkDevice device, VkDeviceMemory mem, const VkAllocationCallbacks* pAllocator);
- void PreCallRecordUpdateDescriptorSets(VkDevice device, uint32_t descriptorWriteCount,
- const VkWriteDescriptorSet* pDescriptorWrites, uint32_t descriptorCopyCount,
- const VkCopyDescriptorSet* pDescriptorCopies);
- void PreCallRecordUpdateDescriptorSetWithTemplate(VkDevice device, VkDescriptorSet descriptorSet,
- VkDescriptorUpdateTemplate descriptorUpdateTemplate, const void* pData);
- void PreCallRecordUpdateDescriptorSetWithTemplateKHR(VkDevice device, VkDescriptorSet descriptorSet,
- VkDescriptorUpdateTemplateKHR descriptorUpdateTemplate, const void* pData);
-
- // Recorded Commands
- void PreCallRecordCmdBeginDebugUtilsLabelEXT(VkCommandBuffer commandBuffer, const VkDebugUtilsLabelEXT* pLabelInfo);
- void PostCallRecordCmdBeginQuery(VkCommandBuffer commandBuffer, VkQueryPool queryPool, uint32_t slot, VkFlags flags);
- void PostCallRecordCmdBeginQueryIndexedEXT(VkCommandBuffer commandBuffer, VkQueryPool queryPool, uint32_t query,
- VkQueryControlFlags flags, uint32_t index);
- void PreCallRecordCmdBeginRenderPass(VkCommandBuffer commandBuffer, const VkRenderPassBeginInfo* pRenderPassBegin,
- VkSubpassContents contents);
- void PreCallRecordCmdBeginRenderPass2KHR(VkCommandBuffer commandBuffer, const VkRenderPassBeginInfo* pRenderPassBegin,
- const VkSubpassBeginInfoKHR* pSubpassBeginInfo);
- void PreCallRecordCmdBindDescriptorSets(VkCommandBuffer commandBuffer, VkPipelineBindPoint pipelineBindPoint,
- VkPipelineLayout layout, uint32_t firstSet, uint32_t setCount,
- const VkDescriptorSet* pDescriptorSets, uint32_t dynamicOffsetCount,
- const uint32_t* pDynamicOffsets);
- void PreCallRecordCmdBindIndexBuffer(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset,
- VkIndexType indexType);
- void PreCallRecordCmdBindPipeline(VkCommandBuffer commandBuffer, VkPipelineBindPoint pipelineBindPoint, VkPipeline pipeline);
- void PreCallRecordCmdBindShadingRateImageNV(VkCommandBuffer commandBuffer, VkImageView imageView, VkImageLayout imageLayout);
- void PreCallRecordCmdBindVertexBuffers(VkCommandBuffer commandBuffer, uint32_t firstBinding, uint32_t bindingCount,
- const VkBuffer* pBuffers, const VkDeviceSize* pOffsets);
- void PreCallRecordCmdBlitImage(VkCommandBuffer commandBuffer, VkImage srcImage, VkImageLayout srcImageLayout, VkImage dstImage,
- VkImageLayout dstImageLayout, uint32_t regionCount, const VkImageBlit* pRegions,
- VkFilter filter);
- void PostCallRecordCmdBuildAccelerationStructureNV(VkCommandBuffer commandBuffer, const VkAccelerationStructureInfoNV* pInfo,
- VkBuffer instanceData, VkDeviceSize instanceOffset, VkBool32 update,
- VkAccelerationStructureNV dst, VkAccelerationStructureNV src,
- VkBuffer scratch, VkDeviceSize scratchOffset);
- void PreCallRecordCmdClearColorImage(VkCommandBuffer commandBuffer, VkImage image, VkImageLayout imageLayout,
- const VkClearColorValue* pColor, uint32_t rangeCount,
- const VkImageSubresourceRange* pRanges);
- void PreCallRecordCmdClearDepthStencilImage(VkCommandBuffer commandBuffer, VkImage image, VkImageLayout imageLayout,
- const VkClearDepthStencilValue* pDepthStencil, uint32_t rangeCount,
- const VkImageSubresourceRange* pRanges);
- void PostCallRecordCmdCopyAccelerationStructureNV(VkCommandBuffer commandBuffer, VkAccelerationStructureNV dst,
- VkAccelerationStructureNV src, VkCopyAccelerationStructureModeNV mode);
- void PreCallRecordCmdCopyBuffer(VkCommandBuffer commandBuffer, VkBuffer srcBuffer, VkBuffer dstBuffer, uint32_t regionCount,
- const VkBufferCopy* pRegions);
- void PreCallRecordCmdCopyBufferToImage(VkCommandBuffer commandBuffer, VkBuffer srcBuffer, VkImage dstImage,
- VkImageLayout dstImageLayout, uint32_t regionCount, const VkBufferImageCopy* pRegions);
- void PreCallRecordCmdCopyImage(VkCommandBuffer commandBuffer, VkImage srcImage, VkImageLayout srcImageLayout, VkImage dstImage,
- VkImageLayout dstImageLayout, uint32_t regionCount, const VkImageCopy* pRegions);
- void PreCallRecordCmdCopyImageToBuffer(VkCommandBuffer commandBuffer, VkImage srcImage, VkImageLayout srcImageLayout,
- VkBuffer dstBuffer, uint32_t regionCount, const VkBufferImageCopy* pRegions);
- void PostCallRecordCmdCopyQueryPoolResults(VkCommandBuffer commandBuffer, VkQueryPool queryPool, uint32_t firstQuery,
- uint32_t queryCount, VkBuffer dstBuffer, VkDeviceSize dstOffset, VkDeviceSize stride,
- VkQueryResultFlags flags);
- void PostCallRecordCmdDispatch(VkCommandBuffer commandBuffer, uint32_t x, uint32_t y, uint32_t z);
- void PostCallRecordCmdDispatchIndirect(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset);
- void PostCallRecordCmdDraw(VkCommandBuffer commandBuffer, uint32_t vertexCount, uint32_t instanceCount, uint32_t firstVertex,
- uint32_t firstInstance);
- void PostCallRecordCmdDrawIndexed(VkCommandBuffer commandBuffer, uint32_t indexCount, uint32_t instanceCount,
- uint32_t firstIndex, int32_t vertexOffset, uint32_t firstInstance);
- void PostCallRecordCmdDrawIndexedIndirect(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, uint32_t count,
- uint32_t stride);
- void PostCallRecordCmdDrawIndirect(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, uint32_t count,
- uint32_t stride);
- void PreCallRecordCmdDrawIndexedIndirectCountKHR(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset,
- VkBuffer countBuffer, VkDeviceSize countBufferOffset, uint32_t maxDrawCount,
- uint32_t stride);
- void PreCallRecordCmdDrawIndirectCountKHR(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset,
- VkBuffer countBuffer, VkDeviceSize countBufferOffset, uint32_t maxDrawCount,
- uint32_t stride);
- void PreCallRecordCmdDrawMeshTasksIndirectCountNV(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset,
- VkBuffer countBuffer, VkDeviceSize countBufferOffset, uint32_t maxDrawCount,
- uint32_t stride);
- void PreCallRecordCmdDrawMeshTasksIndirectNV(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset,
- uint32_t drawCount, uint32_t stride);
- void PreCallRecordCmdDrawMeshTasksNV(VkCommandBuffer commandBuffer, uint32_t taskCount, uint32_t firstTask);
- void PostCallRecordCmdEndDebugUtilsLabelEXT(VkCommandBuffer commandBuffer);
- void PostCallRecordCmdEndQuery(VkCommandBuffer commandBuffer, VkQueryPool queryPool, uint32_t slot);
- void PostCallRecordCmdEndQueryIndexedEXT(VkCommandBuffer commandBuffer, VkQueryPool queryPool, uint32_t query, uint32_t index);
- void PostCallRecordCmdEndRenderPass(VkCommandBuffer commandBuffer);
- void PostCallRecordCmdEndRenderPass2KHR(VkCommandBuffer commandBuffer, const VkSubpassEndInfoKHR* pSubpassEndInfo);
- void PreCallRecordCmdExecuteCommands(VkCommandBuffer commandBuffer, uint32_t commandBuffersCount,
- const VkCommandBuffer* pCommandBuffers);
- void PreCallRecordCmdFillBuffer(VkCommandBuffer commandBuffer, VkBuffer dstBuffer, VkDeviceSize dstOffset, VkDeviceSize size,
- uint32_t data);
- void PreCallRecordCmdInsertDebugUtilsLabelEXT(VkCommandBuffer commandBuffer, const VkDebugUtilsLabelEXT* pLabelInfo);
- void PostCallRecordCmdNextSubpass(VkCommandBuffer commandBuffer, VkSubpassContents contents);
- void PostCallRecordCmdNextSubpass2KHR(VkCommandBuffer commandBuffer, const VkSubpassBeginInfoKHR* pSubpassBeginInfo,
- const VkSubpassEndInfoKHR* pSubpassEndInfo);
- void PostCallRecordCmdResetQueryPool(VkCommandBuffer commandBuffer, VkQueryPool queryPool, uint32_t firstQuery,
- uint32_t queryCount);
- void PreCallRecordCmdResolveImage(VkCommandBuffer commandBuffer, VkImage srcImage, VkImageLayout srcImageLayout,
- VkImage dstImage, VkImageLayout dstImageLayout, uint32_t regionCount,
- const VkImageResolve* pRegions);
- void PreCallRecordCmdSetBlendConstants(VkCommandBuffer commandBuffer, const float blendConstants[4]);
- void PreCallRecordCmdSetDepthBias(VkCommandBuffer commandBuffer, float depthBiasConstantFactor, float depthBiasClamp,
- float depthBiasSlopeFactor);
- void PreCallRecordCmdSetDepthBounds(VkCommandBuffer commandBuffer, float minDepthBounds, float maxDepthBounds);
- void PreCallRecordCmdSetExclusiveScissorNV(VkCommandBuffer commandBuffer, uint32_t firstExclusiveScissor,
- uint32_t exclusiveScissorCount, const VkRect2D* pExclusiveScissors);
- void PreCallRecordCmdSetLineWidth(VkCommandBuffer commandBuffer, float lineWidth);
- void PreCallRecordCmdSetLineStippleEXT(VkCommandBuffer commandBuffer, uint32_t lineStippleFactor, uint16_t lineStipplePattern);
- void PreCallRecordCmdSetScissor(VkCommandBuffer commandBuffer, uint32_t firstScissor, uint32_t scissorCount,
- const VkRect2D* pScissors);
- void PreCallRecordCmdSetStencilCompareMask(VkCommandBuffer commandBuffer, VkStencilFaceFlags faceMask, uint32_t compareMask);
- void PreCallRecordCmdSetStencilReference(VkCommandBuffer commandBuffer, VkStencilFaceFlags faceMask, uint32_t reference);
- void PreCallRecordCmdSetStencilWriteMask(VkCommandBuffer commandBuffer, VkStencilFaceFlags faceMask, uint32_t writeMask);
- void PreCallRecordCmdSetViewport(VkCommandBuffer commandBuffer, uint32_t firstViewport, uint32_t viewportCount,
- const VkViewport* pViewports);
- void PreCallRecordCmdSetViewportShadingRatePaletteNV(VkCommandBuffer commandBuffer, uint32_t firstViewport,
- uint32_t viewportCount,
- const VkShadingRatePaletteNV* pShadingRatePalettes);
- void PostCallRecordCmdUpdateBuffer(VkCommandBuffer commandBuffer, VkBuffer dstBuffer, VkDeviceSize dstOffset,
- VkDeviceSize dataSize, const void* pData);
-
- // WSI
- void PostCallRecordAcquireNextImageKHR(VkDevice device, VkSwapchainKHR swapchain, uint64_t timeout, VkSemaphore semaphore,
- VkFence fence, uint32_t* pImageIndex, VkResult result);
- void PostCallRecordAcquireNextImage2KHR(VkDevice device, const VkAcquireNextImageInfoKHR* pAcquireInfo, uint32_t* pImageIndex,
- VkResult result);
-#ifdef VK_USE_PLATFORM_ANDROID_KHR
- void PostCallRecordCreateAndroidSurfaceKHR(VkInstance instance, const VkAndroidSurfaceCreateInfoKHR* pCreateInfo,
- const VkAllocationCallbacks* pAllocator, VkSurfaceKHR* pSurface, VkResult result);
-#endif // VK_USE_PLATFORM_ANDROID_KHR
-#ifdef VK_USE_PLATFORM_IOS_MVK
- void PostCallRecordCreateIOSSurfaceMVK(VkInstance instance, const VkIOSSurfaceCreateInfoMVK* pCreateInfo,
- const VkAllocationCallbacks* pAllocator, VkSurfaceKHR* pSurface, VkResult result);
-#endif // VK_USE_PLATFORM_IOS_MVK
-#ifdef VK_USE_PLATFORM_MACOS_MVK
- void PostCallRecordCreateMacOSSurfaceMVK(VkInstance instance, const VkMacOSSurfaceCreateInfoMVK* pCreateInfo,
- const VkAllocationCallbacks* pAllocator, VkSurfaceKHR* pSurface, VkResult result);
-#endif // VK_USE_PLATFORM_MACOS_MVK
-#ifdef VK_USE_PLATFORM_WIN32_KHR
- void PostCallRecordCreateWin32SurfaceKHR(VkInstance instance, const VkWin32SurfaceCreateInfoKHR* pCreateInfo,
- const VkAllocationCallbacks* pAllocator, VkSurfaceKHR* pSurface, VkResult result);
-#endif // VK_USE_PLATFORM_WIN32_KHR
-#ifdef VK_USE_PLATFORM_WAYLAND_KHR
- void PostCallRecordCreateWaylandSurfaceKHR(VkInstance instance, const VkWaylandSurfaceCreateInfoKHR* pCreateInfo,
- const VkAllocationCallbacks* pAllocator, VkSurfaceKHR* pSurface, VkResult result);
-#endif // VK_USE_PLATFORM_WAYLAND_KHR
-#ifdef VK_USE_PLATFORM_XCB_KHR
- void PostCallRecordCreateXcbSurfaceKHR(VkInstance instance, const VkXcbSurfaceCreateInfoKHR* pCreateInfo,
- const VkAllocationCallbacks* pAllocator, VkSurfaceKHR* pSurface, VkResult result);
-#endif // VK_USE_PLATFORM_XCB_KHR
-#ifdef VK_USE_PLATFORM_XLIB_KHR
- void PostCallRecordCreateXlibSurfaceKHR(VkInstance instance, const VkXlibSurfaceCreateInfoKHR* pCreateInfo,
- const VkAllocationCallbacks* pAllocator, VkSurfaceKHR* pSurface, VkResult result);
-#endif // VK_USE_PLATFORM_XLIB_KHR
-
- // State Utilty functions
- void AddCommandBufferBinding(std::unordered_set<CMD_BUFFER_STATE*>* cb_bindings, const VulkanTypedHandle& obj,
- CMD_BUFFER_STATE* cb_node);
- void AddCommandBufferBindingAccelerationStructure(CMD_BUFFER_STATE*, ACCELERATION_STRUCTURE_STATE*);
- void AddCommandBufferBindingBuffer(CMD_BUFFER_STATE*, BUFFER_STATE*);
- void AddCommandBufferBindingBufferView(CMD_BUFFER_STATE*, BUFFER_VIEW_STATE*);
- void AddCommandBufferBindingImage(CMD_BUFFER_STATE*, IMAGE_STATE*);
- void AddCommandBufferBindingImageView(CMD_BUFFER_STATE*, IMAGE_VIEW_STATE*);
- void AddCommandBufferBindingSampler(CMD_BUFFER_STATE*, SAMPLER_STATE*);
- void AddMemObjInfo(void* object, const VkDeviceMemory mem, const VkMemoryAllocateInfo* pAllocateInfo);
- void AddFramebufferBinding(CMD_BUFFER_STATE* cb_state, FRAMEBUFFER_STATE* fb_state);
- void ClearCmdBufAndMemReferences(CMD_BUFFER_STATE* cb_node);
- void ClearMemoryObjectBindings(const VulkanTypedHandle& typed_handle);
- void ClearMemoryObjectBinding(const VulkanTypedHandle& typed_handle, VkDeviceMemory mem);
- void DecrementBoundResources(CMD_BUFFER_STATE const* cb_node);
- void DeleteDescriptorSetPools();
- void FreeCommandBufferStates(COMMAND_POOL_STATE* pool_state, const uint32_t command_buffer_count,
- const VkCommandBuffer* command_buffers);
- void FreeDescriptorSet(cvdescriptorset::DescriptorSet* descriptor_set);
- BASE_NODE* GetStateStructPtrFromObject(const VulkanTypedHandle& object_struct);
- void IncrementBoundObjects(CMD_BUFFER_STATE const* cb_node);
- void IncrementResources(CMD_BUFFER_STATE* cb_node);
- void InsertAccelerationStructureMemoryRange(VkAccelerationStructureNV as, DEVICE_MEMORY_STATE* mem_info,
- VkDeviceSize mem_offset, const VkMemoryRequirements& mem_reqs);
- void InsertBufferMemoryRange(VkBuffer buffer, DEVICE_MEMORY_STATE* mem_info, VkDeviceSize mem_offset,
- const VkMemoryRequirements& mem_reqs);
- void InsertImageMemoryRange(VkImage image, DEVICE_MEMORY_STATE* mem_info, VkDeviceSize mem_offset,
- VkMemoryRequirements mem_reqs, bool is_linear);
- void InsertMemoryRange(const VulkanTypedHandle& typed_handle, DEVICE_MEMORY_STATE* mem_info, VkDeviceSize memoryOffset,
- VkMemoryRequirements memRequirements, bool is_linear);
- void InvalidateCommandBuffers(std::unordered_set<CMD_BUFFER_STATE*> const& cb_nodes, const VulkanTypedHandle& obj);
- void PerformAllocateDescriptorSets(const VkDescriptorSetAllocateInfo*, const VkDescriptorSet*,
- const cvdescriptorset::AllocateDescriptorSetsData*);
- void PerformUpdateDescriptorSetsWithTemplateKHR(VkDescriptorSet descriptorSet, const TEMPLATE_STATE* template_state,
- const void* pData);
- void RecordAcquireNextImageState(VkDevice device, VkSwapchainKHR swapchain, uint64_t timeout, VkSemaphore semaphore,
- VkFence fence, uint32_t* pImageIndex);
- void RecordCmdBeginQuery(CMD_BUFFER_STATE* cb_state, const QueryObject& query_obj);
- void RecordCmdEndQuery(CMD_BUFFER_STATE* cb_state, const QueryObject& query_obj);
- void RecordCmdEndRenderPassState(VkCommandBuffer commandBuffer);
- void RecordCmdBeginRenderPassState(VkCommandBuffer commandBuffer, const VkRenderPassBeginInfo* pRenderPassBegin,
- const VkSubpassContents contents);
- void RecordCmdNextSubpass(VkCommandBuffer commandBuffer, VkSubpassContents contents);
- void RecordCreateImageANDROID(const VkImageCreateInfo* create_info, IMAGE_STATE* is_node);
- void RecordCreateRenderPassState(RenderPassCreateVersion rp_version, std::shared_ptr<RENDER_PASS_STATE>& render_pass,
- VkRenderPass* pRenderPass);
- void RecordCreateSamplerYcbcrConversionState(const VkSamplerYcbcrConversionCreateInfo* create_info,
- VkSamplerYcbcrConversion ycbcr_conversion);
- void RecordCreateSamplerYcbcrConversionANDROID(const VkSamplerYcbcrConversionCreateInfo* create_info,
- VkSamplerYcbcrConversion ycbcr_conversion);
- void RecordCreateSwapchainState(VkResult result, const VkSwapchainCreateInfoKHR* pCreateInfo, VkSwapchainKHR* pSwapchain,
- SURFACE_STATE* surface_state, SWAPCHAIN_NODE* old_swapchain_state);
- void RecordDestroySamplerYcbcrConversionANDROID(VkSamplerYcbcrConversion ycbcr_conversion);
- void RecordEnumeratePhysicalDeviceGroupsState(uint32_t* pPhysicalDeviceGroupCount,
- VkPhysicalDeviceGroupPropertiesKHR* pPhysicalDeviceGroupProperties);
- void RecordGetBufferMemoryRequirementsState(VkBuffer buffer, VkMemoryRequirements* pMemoryRequirements);
- void RecordGetDeviceQueueState(uint32_t queue_family_index, VkQueue queue);
- void RecordGetImageMemoryRequiementsState(VkImage image, VkMemoryRequirements* pMemoryRequirements);
- void RecordGetPhysicalDeviceDisplayPlanePropertiesState(VkPhysicalDevice physicalDevice, uint32_t* pPropertyCount,
- void* pProperties);
- void RecordUpdateDescriptorSetWithTemplateState(VkDescriptorSet descriptorSet,
- VkDescriptorUpdateTemplateKHR descriptorUpdateTemplate, const void* pData);
- void RecordCreateDescriptorUpdateTemplateState(const VkDescriptorUpdateTemplateCreateInfoKHR* pCreateInfo,
- VkDescriptorUpdateTemplateKHR* pDescriptorUpdateTemplate);
- void RecordPipelineShaderStage(const VkPipelineShaderStageCreateInfo* pStage, PIPELINE_STATE* pipeline,
- PIPELINE_STATE::StageState* stage_state);
- void RecordRenderPassDAG(RenderPassCreateVersion rp_version, const VkRenderPassCreateInfo2KHR* pCreateInfo,
- RENDER_PASS_STATE* render_pass);
- void RecordVulkanSurface(VkSurfaceKHR* pSurface);
- void RemoveAccelerationStructureMemoryRange(uint64_t handle, DEVICE_MEMORY_STATE* mem_info);
- void RemoveCommandBufferBinding(const VulkanTypedHandle& object, CMD_BUFFER_STATE* cb_node);
- void RemoveBufferMemoryRange(uint64_t handle, DEVICE_MEMORY_STATE* mem_info);
- void RemoveImageMemoryRange(uint64_t handle, DEVICE_MEMORY_STATE* mem_info);
- void ResetCommandBufferState(const VkCommandBuffer cb);
- void RetireWorkOnQueue(QUEUE_STATE* pQueue, uint64_t seq, bool switch_finished_queries);
- void SetMemBinding(VkDeviceMemory mem, BINDABLE* mem_binding, VkDeviceSize memory_offset,
- const VulkanTypedHandle& typed_handle);
- bool SetQueryState(VkQueue queue, VkCommandBuffer commandBuffer, QueryObject object, QueryState value);
- bool SetQueryStateMulti(VkQueue queue, VkCommandBuffer commandBuffer, VkQueryPool queryPool, uint32_t firstQuery,
- uint32_t queryCount, QueryState value);
- void UpdateBindBufferMemoryState(VkBuffer buffer, VkDeviceMemory mem, VkDeviceSize memoryOffset);
- void UpdateBindImageMemoryState(const VkBindImageMemoryInfo& bindInfo);
- void UpdateLastBoundDescriptorSets(CMD_BUFFER_STATE* cb_state, VkPipelineBindPoint pipeline_bind_point,
- const PIPELINE_LAYOUT_STATE* pipeline_layout, uint32_t first_set, uint32_t set_count,
- const VkDescriptorSet* pDescriptorSets, cvdescriptorset::DescriptorSet* push_descriptor_set,
- uint32_t dynamic_offset_count, const uint32_t* p_dynamic_offsets);
- void UpdateStateCmdDrawDispatchType(CMD_BUFFER_STATE* cb_state, VkPipelineBindPoint bind_point);
- void UpdateStateCmdDrawType(CMD_BUFFER_STATE* cb_state, VkPipelineBindPoint bind_point);
- void UpdateDrawState(CMD_BUFFER_STATE* cb_state, const VkPipelineBindPoint bind_point);
-
DeviceFeatures enabled_features = {};
// Device specific data
VkPhysicalDeviceMemoryProperties phys_dev_mem_props = {};
VkPhysicalDeviceProperties phys_dev_props = {};
- uint32_t physical_device_count;
-
// Device extension properties -- storing properties gathered from VkPhysicalDeviceProperties2KHR::pNext chain
struct DeviceExtensionProperties {
uint32_t max_push_descriptors; // from VkPhysicalDevicePushDescriptorPropertiesKHR::maxPushDescriptors
@@ -865,294 +237,347 @@ class ValidationStateTracker : public ValidationObject {
VkPhysicalDeviceInlineUniformBlockPropertiesEXT inline_uniform_block_props;
VkPhysicalDeviceVertexAttributeDivisorPropertiesEXT vtx_attrib_divisor_props;
VkPhysicalDeviceDepthStencilResolvePropertiesKHR depth_stencil_resolve_props;
- VkPhysicalDeviceCooperativeMatrixPropertiesNV cooperative_matrix_props;
- VkPhysicalDeviceTransformFeedbackPropertiesEXT transform_feedback_props;
- VkPhysicalDeviceSubgroupProperties subgroup_props;
- VkPhysicalDeviceRayTracingPropertiesNV ray_tracing_props;
- VkPhysicalDeviceTexelBufferAlignmentPropertiesEXT texel_buffer_alignment_props;
- VkPhysicalDeviceFragmentDensityMapPropertiesEXT fragment_density_map_props;
};
DeviceExtensionProperties phys_dev_ext_props = {};
- std::vector<VkCooperativeMatrixPropertiesNV> cooperative_matrix_properties;
-
- // Map for queue family index to queue count
- unordered_map<uint32_t, uint32_t> queue_family_index_map;
-
- template <typename ExtProp>
- void GetPhysicalDeviceExtProperties(VkPhysicalDevice gpu, bool enabled, ExtProp* ext_prop) {
- assert(ext_prop);
- if (enabled) {
- *ext_prop = lvl_init_struct<ExtProp>();
- auto prop2 = lvl_init_struct<VkPhysicalDeviceProperties2KHR>(ext_prop);
- DispatchGetPhysicalDeviceProperties2KHR(gpu, &prop2);
- }
- }
-
- // This controls output of a state tracking warning (s.t. it only emits once)
bool external_sync_warning = false;
-};
+ uint32_t api_version = 0;
+ GpuValidationState gpu_validation_state = {};
+ uint32_t physical_device_count;
-class CoreChecks : public ValidationStateTracker {
- public:
- using StateTracker = ValidationStateTracker;
- std::unordered_set<uint64_t> ahb_ext_formats_set;
- GlobalQFOTransferBarrierMap<VkImageMemoryBarrier> qfo_release_image_barrier_map;
- GlobalQFOTransferBarrierMap<VkBufferMemoryBarrier> qfo_release_buffer_barrier_map;
- unordered_map<VkImage, std::vector<ImageSubresourcePair>> imageSubresourceMap;
- using ImageSubresPairLayoutMap = std::unordered_map<ImageSubresourcePair, IMAGE_LAYOUT_STATE>;
- ImageSubresPairLayoutMap imageLayoutMap;
-
- std::unique_ptr<GpuValidationState> gpu_validation_state;
-
- bool VerifyQueueStateToSeq(QUEUE_STATE* initial_queue, uint64_t initial_seq);
- bool ValidateSetMemBinding(VkDeviceMemory mem, const VulkanTypedHandle& typed_handle, const char* apiName) const;
- bool SetSparseMemBinding(MEM_BINDING binding, const VulkanTypedHandle& typed_handle);
- bool ValidateDeviceQueueFamily(uint32_t queue_family, const char* cmd_name, const char* parameter_name, const char* error_code,
- bool optional) const;
- bool ValidateBindBufferMemory(VkBuffer buffer, VkDeviceMemory mem, VkDeviceSize memoryOffset, const char* api_name) const;
- bool ValidateGetImageMemoryRequirements2(const VkImageMemoryRequirementsInfo2* pInfo) const;
- bool CheckCommandBuffersInFlight(const COMMAND_POOL_STATE* pPool, const char* action, const char* error_code) const;
- bool CheckCommandBufferInFlight(const CMD_BUFFER_STATE* cb_node, const char* action, const char* error_code) const;
- bool VerifyQueueStateToFence(VkFence fence);
- bool VerifyWaitFenceState(VkFence fence, const char* apiCall);
- void RetireFence(VkFence fence);
- void StoreMemRanges(VkDeviceMemory mem, VkDeviceSize offset, VkDeviceSize size);
- bool ValidateIdleDescriptorSet(VkDescriptorSet set, const char* func_str);
- void InitializeAndTrackMemory(VkDeviceMemory mem, VkDeviceSize offset, VkDeviceSize size, void** ppData);
- bool ValidatePipelineLocked(std::vector<std::unique_ptr<PIPELINE_STATE>> const& pPipelines, int pipelineIndex) const;
- bool ValidatePipelineUnlocked(const PIPELINE_STATE* pPipeline, uint32_t pipelineIndex) const;
- bool ValidImageBufferQueue(const CMD_BUFFER_STATE* cb_node, const VulkanTypedHandle& object, VkQueue queue, uint32_t count,
- const uint32_t* indices) const;
- bool ValidateFenceForSubmit(const FENCE_STATE* pFence) const;
- bool ValidateSemaphoresForSubmit(VkQueue queue, const VkSubmitInfo* submit,
- std::unordered_set<VkSemaphore>* unsignaled_sema_arg,
- std::unordered_set<VkSemaphore>* signaled_sema_arg,
- std::unordered_set<VkSemaphore>* internal_sema_arg) const;
- bool ValidateCommandBuffersForSubmit(VkQueue queue, const VkSubmitInfo* submit,
- ImageSubresPairLayoutMap* localImageLayoutMap_arg,
- std::vector<VkCommandBuffer>* current_cmds_arg) const;
- bool ValidateStatus(const CMD_BUFFER_STATE* pNode, CBStatusFlags status_mask, VkFlags msg_flags, const char* fail_msg,
- const char* msg_code) const;
- bool ValidateDrawStateFlags(const CMD_BUFFER_STATE* pCB, const PIPELINE_STATE* pPipe, bool indexed, const char* msg_code) const;
- bool LogInvalidAttachmentMessage(const char* type1_string, const RENDER_PASS_STATE* rp1_state, const char* type2_string,
- const RENDER_PASS_STATE* rp2_state, uint32_t primary_attach, uint32_t secondary_attach,
- const char* msg, const char* caller, const char* error_code) const;
- bool ValidateStageMaskGsTsEnables(VkPipelineStageFlags stageMask, const char* caller, const char* geo_error_id,
- const char* tess_error_id, const char* mesh_error_id, const char* task_error_id) const;
- bool ValidateMapMemRange(VkDeviceMemory mem, VkDeviceSize offset, VkDeviceSize size);
- bool ValidatePushConstantRange(const uint32_t offset, const uint32_t size, const char* caller_name, uint32_t index) const;
- bool ValidateRenderPassDAG(RenderPassCreateVersion rp_version, const VkRenderPassCreateInfo2KHR* pCreateInfo) const;
- bool ValidateAttachmentCompatibility(const char* type1_string, const RENDER_PASS_STATE* rp1_state, const char* type2_string,
- const RENDER_PASS_STATE* rp2_state, uint32_t primary_attach, uint32_t secondary_attach,
- const char* caller, const char* error_code) const;
- bool ValidateSubpassCompatibility(const char* type1_string, const RENDER_PASS_STATE* rp1_state, const char* type2_string,
- const RENDER_PASS_STATE* rp2_state, const int subpass, const char* caller,
- const char* error_code) const;
- bool ValidateRenderPassCompatibility(const char* type1_string, const RENDER_PASS_STATE* rp1_state, const char* type2_string,
- const RENDER_PASS_STATE* rp2_state, const char* caller, const char* error_code) const;
- bool ReportInvalidCommandBuffer(const CMD_BUFFER_STATE* cb_state, const char* call_source) const;
- void InitGpuValidation();
- bool ValidateQueueFamilyIndex(const PHYSICAL_DEVICE_STATE* pd_state, uint32_t requested_queue_family, const char* err_code,
- const char* cmd_name, const char* queue_family_var_name);
- bool ValidateDeviceQueueCreateInfos(const PHYSICAL_DEVICE_STATE* pd_state, uint32_t info_count,
- const VkDeviceQueueCreateInfo* infos);
-
- bool ValidatePipelineVertexDivisors(std::vector<std::unique_ptr<PIPELINE_STATE>> const& pipe_state_vec, const uint32_t count,
- const VkGraphicsPipelineCreateInfo* pipe_cis) const;
- bool ValidateImageBarrierImage(const char* funcName, CMD_BUFFER_STATE const* cb_state, VkFramebuffer framebuffer,
- uint32_t active_subpass, const safe_VkSubpassDescription2KHR& sub_desc,
- const VulkanTypedHandle& rp_handle, uint32_t img_index, const VkImageMemoryBarrier& img_barrier);
- bool ValidateCmdBeginRenderPass(VkCommandBuffer commandBuffer, RenderPassCreateVersion rp_version,
- const VkRenderPassBeginInfo* pRenderPassBegin) const;
- bool ValidateDependencies(FRAMEBUFFER_STATE const* framebuffer, RENDER_PASS_STATE const* renderPass) const;
- bool ValidateBarriers(const char* funcName, CMD_BUFFER_STATE* cb_state, VkPipelineStageFlags src_stage_mask,
- VkPipelineStageFlags dst_stage_mask, uint32_t memBarrierCount, const VkMemoryBarrier* pMemBarriers,
- uint32_t bufferBarrierCount, const VkBufferMemoryBarrier* pBufferMemBarriers,
- uint32_t imageMemBarrierCount, const VkImageMemoryBarrier* pImageMemBarriers);
- bool ValidateBarrierQueueFamilies(const char* func_name, CMD_BUFFER_STATE* cb_state, const VkImageMemoryBarrier& barrier,
- const IMAGE_STATE* state_data);
- bool ValidateBarrierQueueFamilies(const char* func_name, CMD_BUFFER_STATE* cb_state, const VkBufferMemoryBarrier& barrier,
- const BUFFER_STATE* state_data);
- bool ValidateCreateSwapchain(const char* func_name, VkSwapchainCreateInfoKHR const* pCreateInfo,
- const SURFACE_STATE* surface_state, const SWAPCHAIN_NODE* old_swapchain_state) const;
- void RecordCmdPushDescriptorSetState(CMD_BUFFER_STATE* cb_state, VkPipelineBindPoint pipelineBindPoint, VkPipelineLayout layout,
- uint32_t set, uint32_t descriptorWriteCount,
+ // Class Declarations for helper functions
+ cvdescriptorset::DescriptorSet* GetSetNode(VkDescriptorSet);
+ DESCRIPTOR_POOL_STATE* GetDescriptorPoolState(const VkDescriptorPool);
+ BUFFER_STATE* GetBufferState(VkBuffer);
+ IMAGE_STATE* GetImageState(VkImage);
+ DEVICE_MEM_INFO* GetMemObjInfo(VkDeviceMemory);
+ BUFFER_VIEW_STATE* GetBufferViewState(VkBufferView);
+ SAMPLER_STATE* GetSamplerState(VkSampler);
+ IMAGE_VIEW_STATE* GetAttachmentImageViewState(FRAMEBUFFER_STATE* framebuffer, uint32_t index);
+ IMAGE_VIEW_STATE* GetImageViewState(VkImageView);
+ SWAPCHAIN_NODE* GetSwapchainNode(VkSwapchainKHR);
+ GLOBAL_CB_NODE* GetCBNode(const VkCommandBuffer cb);
+ PIPELINE_STATE* GetPipelineState(VkPipeline pipeline);
+ RENDER_PASS_STATE* GetRenderPassState(VkRenderPass renderpass);
+ std::shared_ptr<RENDER_PASS_STATE> GetRenderPassStateSharedPtr(VkRenderPass renderpass);
+ FRAMEBUFFER_STATE* GetFramebufferState(VkFramebuffer framebuffer);
+ COMMAND_POOL_NODE* GetCommandPoolNode(VkCommandPool pool);
+ shader_module const* GetShaderModuleState(VkShaderModule module);
+ const DeviceFeatures* GetEnabledFeatures();
+ FENCE_NODE* GetFenceNode(VkFence fence);
+ EVENT_STATE* GetEventNode(VkEvent event);
+ QUERY_POOL_NODE* GetQueryPoolNode(VkQueryPool query_pool);
+ QUEUE_STATE* GetQueueState(VkQueue queue);
+ SEMAPHORE_NODE* GetSemaphoreNode(VkSemaphore semaphore);
+ PHYSICAL_DEVICE_STATE* GetPhysicalDeviceState(VkPhysicalDevice phys);
+ PHYSICAL_DEVICE_STATE* GetPhysicalDeviceState();
+ SURFACE_STATE* GetSurfaceState(VkSurfaceKHR surface);
+ BINDABLE* GetObjectMemBinding(uint64_t handle, VulkanObjectType type);
+
+ bool VerifyQueueStateToSeq(layer_data* dev_data, QUEUE_STATE* initial_queue, uint64_t initial_seq);
+ void ClearCmdBufAndMemReferences(layer_data* dev_data, GLOBAL_CB_NODE* cb_node);
+ void ClearMemoryObjectBinding(uint64_t handle, VulkanObjectType type, VkDeviceMemory mem);
+ void ResetCommandBufferState(layer_data* dev_data, const VkCommandBuffer cb);
+ void SetMemBinding(layer_data* dev_data, VkDeviceMemory mem, BINDABLE* mem_binding, VkDeviceSize memory_offset, uint64_t handle,
+ VulkanObjectType type);
+ bool ValidateSetMemBinding(layer_data* dev_data, VkDeviceMemory mem, uint64_t handle, VulkanObjectType type,
+ const char* apiName);
+ bool SetSparseMemBinding(layer_data* dev_data, MEM_BINDING binding, uint64_t handle, VulkanObjectType type);
+ bool ValidateDeviceQueueFamily(layer_data* device_data, uint32_t queue_family, const char* cmd_name, const char* parameter_name,
+ const char* error_code, bool optional);
+ BASE_NODE* GetStateStructPtrFromObject(layer_data* dev_data, VK_OBJECT object_struct);
+ void RemoveCommandBufferBinding(layer_data* dev_data, VK_OBJECT const* object, GLOBAL_CB_NODE* cb_node);
+ bool ValidateBindBufferMemory(layer_data* device_data, VkBuffer buffer, VkDeviceMemory mem, VkDeviceSize memoryOffset,
+ const char* api_name);
+ void RecordGetBufferMemoryRequirementsState(layer_data* device_data, VkBuffer buffer,
+ VkMemoryRequirements* pMemoryRequirements);
+ void UpdateBindBufferMemoryState(layer_data* device_data, VkBuffer buffer, VkDeviceMemory mem, VkDeviceSize memoryOffset);
+ PIPELINE_LAYOUT_NODE const* GetPipelineLayout(layer_data const* dev_data, VkPipelineLayout pipeLayout);
+ const TEMPLATE_STATE* GetDescriptorTemplateState(const layer_data* dev_data,
+ VkDescriptorUpdateTemplateKHR descriptor_update_template);
+ bool ValidateGetImageMemoryRequirements2(layer_data* dev_data, const VkImageMemoryRequirementsInfo2* pInfo);
+ void RecordGetImageMemoryRequiementsState(layer_data* device_data, VkImage image, VkMemoryRequirements* pMemoryRequirements);
+ void FreeCommandBufferStates(layer_data* dev_data, COMMAND_POOL_NODE* pool_state, const uint32_t command_buffer_count,
+ const VkCommandBuffer* command_buffers);
+ bool CheckCommandBuffersInFlight(layer_data* dev_data, COMMAND_POOL_NODE* pPool, const char* action, const char* error_code);
+ bool CheckCommandBufferInFlight(layer_data* dev_data, const GLOBAL_CB_NODE* cb_node, const char* action,
+ const char* error_code);
+ bool VerifyQueueStateToFence(layer_data* dev_data, VkFence fence);
+ void DecrementBoundResources(layer_data* dev_data, GLOBAL_CB_NODE const* cb_node);
+ bool VerifyWaitFenceState(layer_data* dev_data, VkFence fence, const char* apiCall);
+ void RetireFence(layer_data* dev_data, VkFence fence);
+ void StoreMemRanges(layer_data* dev_data, VkDeviceMemory mem, VkDeviceSize offset, VkDeviceSize size);
+ bool ValidateIdleDescriptorSet(const layer_data* dev_data, VkDescriptorSet set, const char* func_str);
+ void InitializeAndTrackMemory(layer_data* dev_data, VkDeviceMemory mem, VkDeviceSize offset, VkDeviceSize size, void** ppData);
+ bool ValidatePipelineLocked(layer_data* dev_data, std::vector<std::unique_ptr<PIPELINE_STATE>> const& pPipelines,
+ int pipelineIndex);
+ bool ValidatePipelineUnlocked(layer_data* dev_data, std::vector<std::unique_ptr<PIPELINE_STATE>> const& pPipelines,
+ int pipelineIndex);
+ void FreeDescriptorSet(layer_data* dev_data, cvdescriptorset::DescriptorSet* descriptor_set);
+ void DeletePools(layer_data* dev_data);
+ bool ValidImageBufferQueue(layer_data* dev_data, GLOBAL_CB_NODE* cb_node, const VK_OBJECT* object, VkQueue queue,
+ uint32_t count, const uint32_t* indices);
+ bool ValidateFenceForSubmit(layer_data* dev_data, FENCE_NODE* pFence);
+ void AddMemObjInfo(layer_data* dev_data, void* object, const VkDeviceMemory mem, const VkMemoryAllocateInfo* pAllocateInfo);
+ bool ValidateStatus(layer_data* dev_data, GLOBAL_CB_NODE* pNode, CBStatusFlags status_mask, VkFlags msg_flags,
+ const char* fail_msg, const char* msg_code);
+ bool ValidateDrawStateFlags(layer_data* dev_data, GLOBAL_CB_NODE* pCB, const PIPELINE_STATE* pPipe, bool indexed,
+ const char* msg_code);
+ bool LogInvalidAttachmentMessage(layer_data const* dev_data, const char* type1_string, const RENDER_PASS_STATE* rp1_state,
+ const char* type2_string, const RENDER_PASS_STATE* rp2_state, uint32_t primary_attach,
+ uint32_t secondary_attach, const char* msg, const char* caller, const char* error_code);
+ bool ValidateAttachmentCompatibility(layer_data const* dev_data, const char* type1_string, const RENDER_PASS_STATE* rp1_state,
+ const char* type2_string, const RENDER_PASS_STATE* rp2_state, uint32_t primary_attach,
+ uint32_t secondary_attach, const char* caller, const char* error_code);
+ bool ValidateSubpassCompatibility(layer_data const* dev_data, const char* type1_string, const RENDER_PASS_STATE* rp1_state,
+ const char* type2_string, const RENDER_PASS_STATE* rp2_state, const int subpass,
+ const char* caller, const char* error_code);
+ bool ValidateRenderPassCompatibility(layer_data const* dev_data, const char* type1_string, const RENDER_PASS_STATE* rp1_state,
+ const char* type2_string, const RENDER_PASS_STATE* rp2_state, const char* caller,
+ const char* error_code);
+ void UpdateDrawState(layer_data* dev_data, GLOBAL_CB_NODE* cb_state, const VkPipelineBindPoint bind_point);
+ bool ReportInvalidCommandBuffer(layer_data* dev_data, const GLOBAL_CB_NODE* cb_state, const char* call_source);
+ void InitGpuValidation(layer_data* instance_data);
+
+ bool ValidatePipelineVertexDivisors(layer_data* dev_data, std::vector<std::unique_ptr<PIPELINE_STATE>> const& pipe_state_vec,
+ const uint32_t count, const VkGraphicsPipelineCreateInfo* pipe_cis);
+ void AddFramebufferBinding(layer_data* dev_data, GLOBAL_CB_NODE* cb_state, FRAMEBUFFER_STATE* fb_state);
+ bool ValidateImageBarrierImage(layer_data* device_data, const char* funcName, GLOBAL_CB_NODE const* cb_state,
+ VkFramebuffer framebuffer, uint32_t active_subpass,
+ const safe_VkSubpassDescription2KHR& sub_desc, uint64_t rp_handle, uint32_t img_index,
+ const VkImageMemoryBarrier& img_barrier);
+ void RecordCmdBeginRenderPassState(layer_data* device_data, VkCommandBuffer commandBuffer,
+ const VkRenderPassBeginInfo* pRenderPassBegin, const VkSubpassContents contents);
+ bool ValidateCmdBeginRenderPass(layer_data* device_data, VkCommandBuffer commandBuffer, RenderPassCreateVersion rp_version,
+ const VkRenderPassBeginInfo* pRenderPassBegin);
+ bool ValidateDependencies(layer_data* dev_data, FRAMEBUFFER_STATE const* framebuffer, RENDER_PASS_STATE const* renderPass);
+ bool ValidateBarriers(layer_data* device_data, const char* funcName, GLOBAL_CB_NODE* cb_state,
+ VkPipelineStageFlags src_stage_mask, VkPipelineStageFlags dst_stage_mask, uint32_t memBarrierCount,
+ const VkMemoryBarrier* pMemBarriers, uint32_t bufferBarrierCount,
+ const VkBufferMemoryBarrier* pBufferMemBarriers, uint32_t imageMemBarrierCount,
+ const VkImageMemoryBarrier* pImageMemBarriers);
+ bool ValidateCreateSwapchain(layer_data* device_data, const char* func_name, VkSwapchainCreateInfoKHR const* pCreateInfo,
+ SURFACE_STATE* surface_state, SWAPCHAIN_NODE* old_swapchain_state);
+ void RecordCmdPushDescriptorSetState(layer_data* device_data, GLOBAL_CB_NODE* cb_state, VkPipelineBindPoint pipelineBindPoint,
+ VkPipelineLayout layout, uint32_t set, uint32_t descriptorWriteCount,
const VkWriteDescriptorSet* pDescriptorWrites);
- bool ValidatePipelineBindPoint(const CMD_BUFFER_STATE* cb_state, VkPipelineBindPoint bind_point, const char* func_name,
- const std::map<VkPipelineBindPoint, std::string>& bind_errors) const;
- bool ValidateMemoryIsMapped(const char* funcName, uint32_t memRangeCount, const VkMappedMemoryRange* pMemRanges);
- bool ValidateAndCopyNoncoherentMemoryToDriver(uint32_t mem_range_count, const VkMappedMemoryRange* mem_ranges);
- void CopyNoncoherentMemoryFromDriver(uint32_t mem_range_count, const VkMappedMemoryRange* mem_ranges);
- bool ValidateMappedMemoryRangeDeviceLimits(const char* func_name, uint32_t mem_range_count,
+ bool ValidatePipelineBindPoint(layer_data* device_data, GLOBAL_CB_NODE* cb_state, VkPipelineBindPoint bind_point,
+ const char* func_name, const std::map<VkPipelineBindPoint, std::string>& bind_errors);
+ bool ValidateMemoryIsMapped(layer_data* dev_data, const char* funcName, uint32_t memRangeCount,
+ const VkMappedMemoryRange* pMemRanges);
+ bool ValidateAndCopyNoncoherentMemoryToDriver(layer_data* dev_data, uint32_t mem_range_count,
+ const VkMappedMemoryRange* mem_ranges);
+ void CopyNoncoherentMemoryFromDriver(layer_data* dev_data, uint32_t mem_range_count, const VkMappedMemoryRange* mem_ranges);
+ bool ValidateMappedMemoryRangeDeviceLimits(layer_data* dev_data, const char* func_name, uint32_t mem_range_count,
const VkMappedMemoryRange* mem_ranges);
- BarrierOperationsType ComputeBarrierOperationsType(CMD_BUFFER_STATE* cb_state, uint32_t buffer_barrier_count,
- const VkBufferMemoryBarrier* buffer_barriers, uint32_t image_barrier_count,
- const VkImageMemoryBarrier* image_barriers);
- bool ValidateStageMasksAgainstQueueCapabilities(CMD_BUFFER_STATE const* cb_state, VkPipelineStageFlags source_stage_mask,
- VkPipelineStageFlags dest_stage_mask, BarrierOperationsType barrier_op_type,
- const char* function, const char* error_code);
+ BarrierOperationsType ComputeBarrierOperationsType(layer_data* device_data, GLOBAL_CB_NODE* cb_state,
+ uint32_t buffer_barrier_count, const VkBufferMemoryBarrier* buffer_barriers,
+ uint32_t image_barrier_count, const VkImageMemoryBarrier* image_barriers);
+ bool ValidateStageMasksAgainstQueueCapabilities(layer_data* dev_data, GLOBAL_CB_NODE const* cb_state,
+ VkPipelineStageFlags source_stage_mask, VkPipelineStageFlags dest_stage_mask,
+ BarrierOperationsType barrier_op_type, const char* function,
+ const char* error_code);
bool SetEventStageMask(VkQueue queue, VkCommandBuffer commandBuffer, VkEvent event, VkPipelineStageFlags stageMask);
- bool ValidateRenderPassImageBarriers(const char* funcName, CMD_BUFFER_STATE* cb_state, uint32_t active_subpass,
- const safe_VkSubpassDescription2KHR& sub_desc, const VulkanTypedHandle& rp_handle,
+ bool ValidateRenderPassImageBarriers(layer_data* device_data, const char* funcName, GLOBAL_CB_NODE* cb_state,
+ uint32_t active_subpass, const safe_VkSubpassDescription2KHR& sub_desc, uint64_t rp_handle,
const safe_VkSubpassDependency2KHR* dependencies,
const std::vector<uint32_t>& self_dependencies, uint32_t image_mem_barrier_count,
const VkImageMemoryBarrier* image_barriers);
- bool ValidateSecondaryCommandBufferState(const CMD_BUFFER_STATE* pCB, const CMD_BUFFER_STATE* pSubCB);
- bool ValidateFramebuffer(VkCommandBuffer primaryBuffer, const CMD_BUFFER_STATE* pCB, VkCommandBuffer secondaryBuffer,
- const CMD_BUFFER_STATE* pSubCB, const char* caller);
- bool ValidateDescriptorUpdateTemplate(const char* func_name, const VkDescriptorUpdateTemplateCreateInfoKHR* pCreateInfo);
- bool ValidateCreateSamplerYcbcrConversion(const char* func_name, const VkSamplerYcbcrConversionCreateInfo* create_info) const;
- bool ValidateImportFence(VkFence fence, const char* caller_name);
- void RecordImportFenceState(VkFence fence, VkExternalFenceHandleTypeFlagBitsKHR handle_type, VkFenceImportFlagsKHR flags);
- void RecordGetExternalFenceState(VkFence fence, VkExternalFenceHandleTypeFlagBitsKHR handle_type);
- bool ValidateAcquireNextImage(VkDevice device, VkSwapchainKHR swapchain, uint64_t timeout, VkSemaphore semaphore, VkFence fence,
- uint32_t* pImageIndex, const char* func_name) const;
- bool VerifyRenderAreaBounds(const VkRenderPassBeginInfo* pRenderPassBegin) const;
- bool VerifyFramebufferAndRenderPassImageViews(const VkRenderPassBeginInfo* pRenderPassBeginInfo) const;
- bool ValidatePrimaryCommandBuffer(const CMD_BUFFER_STATE* pCB, char const* cmd_name, const char* error_code) const;
- void RecordCmdNextSubpassLayouts(VkCommandBuffer commandBuffer, VkSubpassContents contents);
- bool ValidateCmdEndRenderPass(RenderPassCreateVersion rp_version, VkCommandBuffer commandBuffer) const;
- void RecordCmdEndRenderPassLayouts(VkCommandBuffer commandBuffer);
- bool ValidateFramebufferCreateInfo(const VkFramebufferCreateInfo* pCreateInfo) const;
- bool MatchUsage(uint32_t count, const VkAttachmentReference2KHR* attachments, const VkFramebufferCreateInfo* fbci,
- VkImageUsageFlagBits usage_flag, const char* error_code) const;
- bool IsImageLayoutReadOnly(VkImageLayout layout) const;
- bool CheckDependencyExists(const uint32_t subpass, const VkImageLayout layout,
- const std::vector<SubpassLayout>& dependent_subpasses, const std::vector<DAGNode>& subpass_to_node,
- bool& skip) const;
- bool CheckPreserved(const VkRenderPassCreateInfo2KHR* pCreateInfo, const int index, const uint32_t attachment,
- const std::vector<DAGNode>& subpass_to_node, int depth, bool& skip) const;
- bool ValidateBindImageMemory(const VkBindImageMemoryInfo& bindInfo, const char* api_name) const;
- bool ValidateGetPhysicalDeviceDisplayPlanePropertiesKHRQuery(VkPhysicalDevice physicalDevice, uint32_t planeIndex,
- const char* api_name) const;
- bool ValidateQuery(VkQueue queue, CMD_BUFFER_STATE* pCB, VkQueryPool queryPool, uint32_t firstQuery, uint32_t queryCount,
- VkQueryResultFlags flags) const;
- QueryState GetQueryState(const QUEUE_STATE* queue_data, VkQueryPool queryPool, uint32_t queryIndex) const;
- bool VerifyQueryIsReset(VkQueue queue, VkCommandBuffer commandBuffer, QueryObject query_obj) const;
- bool ValidateImportSemaphore(VkSemaphore semaphore, const char* caller_name);
- void RecordImportSemaphoreState(VkSemaphore semaphore, VkExternalSemaphoreHandleTypeFlagBitsKHR handle_type,
- VkSemaphoreImportFlagsKHR flags);
- void RecordGetExternalSemaphoreState(VkSemaphore semaphore, VkExternalSemaphoreHandleTypeFlagBitsKHR handle_type);
- bool ValidateBeginQuery(const CMD_BUFFER_STATE* cb_state, const QueryObject& query_obj, VkFlags flags, CMD_TYPE cmd,
- const char* cmd_name, const char* vuid_queue_flags, const char* vuid_queue_feedback,
- const char* vuid_queue_occlusion, const char* vuid_precise, const char* vuid_query_count) const;
- bool ValidateCmdEndQuery(const CMD_BUFFER_STATE* cb_state, const QueryObject& query_obj, CMD_TYPE cmd, const char* cmd_name,
- const char* vuid_queue_flags, const char* vuid_active_queries) const;
- bool ValidateCmdDrawType(VkCommandBuffer cmd_buffer, bool indexed, VkPipelineBindPoint bind_point, CMD_TYPE cmd_type,
- const char* caller, VkQueueFlags queue_flags, const char* queue_flag_code,
- const char* renderpass_msg_code, const char* pipebound_msg_code,
- const char* dynamic_state_msg_code) const;
- bool ValidateCmdNextSubpass(RenderPassCreateVersion rp_version, VkCommandBuffer commandBuffer) const;
- bool ValidateInsertMemoryRange(const VulkanTypedHandle& typed_handle, const DEVICE_MEMORY_STATE* mem_info,
- VkDeviceSize memoryOffset, const VkMemoryRequirements& memRequirements, bool is_linear,
- const char* api_name) const;
- bool ValidateInsertImageMemoryRange(VkImage image, const DEVICE_MEMORY_STATE* mem_info, VkDeviceSize mem_offset,
- const VkMemoryRequirements& mem_reqs, bool is_linear, const char* api_name) const;
- bool ValidateInsertBufferMemoryRange(VkBuffer buffer, const DEVICE_MEMORY_STATE* mem_info, VkDeviceSize mem_offset,
- const VkMemoryRequirements& mem_reqs, const char* api_name) const;
- bool ValidateInsertAccelerationStructureMemoryRange(VkAccelerationStructureNV as, const DEVICE_MEMORY_STATE* mem_info,
- VkDeviceSize mem_offset, const VkMemoryRequirements& mem_reqs,
- const char* api_name) const;
-
- bool ValidateMemoryTypes(const DEVICE_MEMORY_STATE* mem_info, const uint32_t memory_type_bits, const char* funcName,
- const char* msgCode) const;
- bool ValidateCommandBufferState(const CMD_BUFFER_STATE* cb_state, const char* call_source, int current_submit_count,
- const char* vu_id) const;
- bool ValidateCommandBufferSimultaneousUse(const CMD_BUFFER_STATE* pCB, int current_submit_count) const;
- bool ValidateGetDeviceQueue(uint32_t queueFamilyIndex, uint32_t queueIndex, VkQueue* pQueue, const char* valid_qfi_vuid,
- const char* qfi_in_range_vuid) const;
- bool ValidateRenderpassAttachmentUsage(RenderPassCreateVersion rp_version, const VkRenderPassCreateInfo2KHR* pCreateInfo) const;
- bool AddAttachmentUse(RenderPassCreateVersion rp_version, uint32_t subpass, std::vector<uint8_t>& attachment_uses,
- std::vector<VkImageLayout>& attachment_layouts, uint32_t attachment, uint8_t new_use,
- VkImageLayout new_layout) const;
- bool ValidateAttachmentIndex(RenderPassCreateVersion rp_version, uint32_t attachment, uint32_t attachment_count,
- const char* type) const;
- bool ValidateCreateRenderPass(VkDevice device, RenderPassCreateVersion rp_version,
- const VkRenderPassCreateInfo2KHR* pCreateInfo) const;
- bool ValidateRenderPassPipelineBarriers(const char* funcName, CMD_BUFFER_STATE* cb_state, VkPipelineStageFlags src_stage_mask,
- VkPipelineStageFlags dst_stage_mask, VkDependencyFlags dependency_flags,
- uint32_t mem_barrier_count, const VkMemoryBarrier* mem_barriers,
- uint32_t buffer_mem_barrier_count, const VkBufferMemoryBarrier* buffer_mem_barriers,
- uint32_t image_mem_barrier_count, const VkImageMemoryBarrier* image_barriers);
- bool CheckStageMaskQueueCompatibility(VkCommandBuffer command_buffer, VkPipelineStageFlags stage_mask, VkQueueFlags queue_flags,
- const char* function, const char* src_or_dest, const char* error_code);
- bool ValidateUpdateDescriptorSetWithTemplate(VkDescriptorSet descriptorSet,
+ bool ValidateSecondaryCommandBufferState(layer_data* dev_data, GLOBAL_CB_NODE* pCB, GLOBAL_CB_NODE* pSubCB);
+ bool ValidateFramebuffer(layer_data* dev_data, VkCommandBuffer primaryBuffer, const GLOBAL_CB_NODE* pCB,
+ VkCommandBuffer secondaryBuffer, const GLOBAL_CB_NODE* pSubCB, const char* caller);
+ bool ValidateDescriptorUpdateTemplate(const char* func_name, layer_data* device_data,
+ const VkDescriptorUpdateTemplateCreateInfoKHR* pCreateInfo);
+ bool ValidateCreateSamplerYcbcrConversion(const layer_data* device_data, const char* func_name,
+ const VkSamplerYcbcrConversionCreateInfo* create_info);
+ void RecordCreateSamplerYcbcrConversionState(layer_data* device_data, const VkSamplerYcbcrConversionCreateInfo* create_info,
+ VkSamplerYcbcrConversion ycbcr_conversion);
+ bool ValidateImportFence(layer_data* device_data, VkFence fence, const char* caller_name);
+ void RecordImportFenceState(layer_data* device_data, VkFence fence, VkExternalFenceHandleTypeFlagBitsKHR handle_type,
+ VkFenceImportFlagsKHR flags);
+ void RecordGetExternalFenceState(layer_data* device_data, VkFence fence, VkExternalFenceHandleTypeFlagBitsKHR handle_type);
+ bool ValidateAcquireNextImage(layer_data* device_data, VkDevice device, VkSwapchainKHR swapchain, uint64_t timeout,
+ VkSemaphore semaphore, VkFence fence, uint32_t* pImageIndex, const char* func_name);
+ void RecordAcquireNextImageState(layer_data* device_data, VkDevice device, VkSwapchainKHR swapchain, uint64_t timeout,
+ VkSemaphore semaphore, VkFence fence, uint32_t* pImageIndex);
+ bool VerifyRenderAreaBounds(const layer_data* dev_data, const VkRenderPassBeginInfo* pRenderPassBegin);
+ bool ValidatePrimaryCommandBuffer(const layer_data* dev_data, const GLOBAL_CB_NODE* pCB, char const* cmd_name,
+ const char* error_code);
+ void RecordCmdNextSubpass(layer_data* device_data, VkCommandBuffer commandBuffer, VkSubpassContents contents);
+ bool ValidateCmdEndRenderPass(layer_data* device_data, RenderPassCreateVersion rp_version, VkCommandBuffer commandBuffer);
+ void RecordCmdEndRenderPassState(layer_data* device_data, VkCommandBuffer commandBuffer);
+ bool ValidateFramebufferCreateInfo(layer_data* dev_data, const VkFramebufferCreateInfo* pCreateInfo);
+ bool MatchUsage(layer_data* dev_data, uint32_t count, const VkAttachmentReference2KHR* attachments,
+ const VkFramebufferCreateInfo* fbci, VkImageUsageFlagBits usage_flag, const char* error_code);
+ bool CheckDependencyExists(const layer_data* dev_data, const uint32_t subpass, const std::vector<uint32_t>& dependent_subpasses,
+ const std::vector<DAGNode>& subpass_to_node, bool& skip);
+ bool CheckPreserved(const layer_data* dev_data, const VkRenderPassCreateInfo2KHR* pCreateInfo, const int index,
+ const uint32_t attachment, const std::vector<DAGNode>& subpass_to_node, int depth, bool& skip);
+ bool ValidateBindImageMemory(layer_data* device_data, VkImage image, VkDeviceMemory mem, VkDeviceSize memoryOffset,
+ const char* api_name);
+ void UpdateBindImageMemoryState(layer_data* device_data, VkImage image, VkDeviceMemory mem, VkDeviceSize memoryOffset);
+ void RecordGetPhysicalDeviceDisplayPlanePropertiesState(instance_layer_data* instance_data, VkPhysicalDevice physicalDevice,
+ uint32_t* pPropertyCount, void* pProperties);
+ bool ValidateGetPhysicalDeviceDisplayPlanePropertiesKHRQuery(instance_layer_data* instance_data,
+ VkPhysicalDevice physicalDevice, uint32_t planeIndex,
+ const char* api_name);
+ bool ValidateQuery(VkQueue queue, GLOBAL_CB_NODE* pCB, VkQueryPool queryPool, uint32_t firstQuery, uint32_t queryCount);
+ bool IsQueryInvalid(layer_data* dev_data, QUEUE_STATE* queue_data, VkQueryPool queryPool, uint32_t queryIndex);
+ bool ValidateImportSemaphore(layer_data* device_data, VkSemaphore semaphore, const char* caller_name);
+ void RecordImportSemaphoreState(layer_data* device_data, VkSemaphore semaphore,
+ VkExternalSemaphoreHandleTypeFlagBitsKHR handle_type, VkSemaphoreImportFlagsKHR flags);
+ void RecordGetExternalSemaphoreState(layer_data* device_data, VkSemaphore semaphore,
+ VkExternalSemaphoreHandleTypeFlagBitsKHR handle_type);
+ bool SetQueryState(VkQueue queue, VkCommandBuffer commandBuffer, QueryObject object, bool value);
+ bool ValidateCmdDrawType(layer_data* dev_data, VkCommandBuffer cmd_buffer, bool indexed, VkPipelineBindPoint bind_point,
+ CMD_TYPE cmd_type, const char* caller, VkQueueFlags queue_flags, const char* queue_flag_code,
+ const char* renderpass_msg_code, const char* pipebound_msg_code, const char* dynamic_state_msg_code);
+ void UpdateStateCmdDrawDispatchType(layer_data* dev_data, GLOBAL_CB_NODE* cb_state, VkPipelineBindPoint bind_point);
+ void UpdateStateCmdDrawType(layer_data* dev_data, GLOBAL_CB_NODE* cb_state, VkPipelineBindPoint bind_point);
+ bool ValidateCmdNextSubpass(layer_data* device_data, RenderPassCreateVersion rp_version, VkCommandBuffer commandBuffer);
+ bool RangesIntersect(layer_data const* dev_data, MEMORY_RANGE const* range1, VkDeviceSize offset, VkDeviceSize end);
+ bool RangesIntersect(layer_data const* dev_data, MEMORY_RANGE const* range1, MEMORY_RANGE const* range2, bool* skip,
+ bool skip_checks);
+ bool ValidateInsertMemoryRange(layer_data const* dev_data, uint64_t handle, DEVICE_MEM_INFO* mem_info,
+ VkDeviceSize memoryOffset, VkMemoryRequirements memRequirements, bool is_image, bool is_linear,
+ const char* api_name);
+ void InsertMemoryRange(layer_data const* dev_data, uint64_t handle, DEVICE_MEM_INFO* mem_info, VkDeviceSize memoryOffset,
+ VkMemoryRequirements memRequirements, bool is_image, bool is_linear);
+ bool ValidateInsertImageMemoryRange(layer_data const* dev_data, VkImage image, DEVICE_MEM_INFO* mem_info,
+ VkDeviceSize mem_offset, VkMemoryRequirements mem_reqs, bool is_linear,
+ const char* api_name);
+ void InsertImageMemoryRange(layer_data const* dev_data, VkImage image, DEVICE_MEM_INFO* mem_info, VkDeviceSize mem_offset,
+ VkMemoryRequirements mem_reqs, bool is_linear);
+ bool ValidateInsertBufferMemoryRange(layer_data const* dev_data, VkBuffer buffer, DEVICE_MEM_INFO* mem_info,
+ VkDeviceSize mem_offset, VkMemoryRequirements mem_reqs, const char* api_name);
+ void InsertBufferMemoryRange(layer_data const* dev_data, VkBuffer buffer, DEVICE_MEM_INFO* mem_info, VkDeviceSize mem_offset,
+ VkMemoryRequirements mem_reqs);
+ bool ValidateMemoryTypes(const layer_data* dev_data, const DEVICE_MEM_INFO* mem_info, const uint32_t memory_type_bits,
+ const char* funcName, const char* msgCode);
+ bool ValidateCommandBufferState(layer_data* dev_data, GLOBAL_CB_NODE* cb_state, const char* call_source,
+ int current_submit_count, const char* vu_id);
+ bool ValidateCommandBufferSimultaneousUse(layer_data* dev_data, GLOBAL_CB_NODE* pCB, int current_submit_count);
+ bool ValidateGetDeviceQueue(layer_data* device_data, uint32_t queueFamilyIndex, uint32_t queueIndex, VkQueue* pQueue,
+ const char* valid_qfi_vuid, const char* qfi_in_range_vuid);
+ void RecordGetDeviceQueueState(layer_data* device_data, uint32_t queue_family_index, VkQueue queue);
+ bool ValidateRenderpassAttachmentUsage(const layer_data* dev_data, RenderPassCreateVersion rp_version,
+ const VkRenderPassCreateInfo2KHR* pCreateInfo);
+ bool AddAttachmentUse(const layer_data* dev_data, RenderPassCreateVersion rp_version, uint32_t subpass,
+ std::vector<uint8_t>& attachment_uses, std::vector<VkImageLayout>& attachment_layouts,
+ uint32_t attachment, uint8_t new_use, VkImageLayout new_layout);
+ bool ValidateAttachmentIndex(const layer_data* dev_data, RenderPassCreateVersion rp_version, uint32_t attachment,
+ uint32_t attachment_count, const char* type);
+ bool ValidateCreateRenderPass(layer_data* dev_data, VkDevice device, RenderPassCreateVersion rp_version,
+ const VkRenderPassCreateInfo2KHR* pCreateInfo, RENDER_PASS_STATE* render_pass);
+ bool ValidateRenderPassPipelineBarriers(layer_data* device_data, const char* funcName, GLOBAL_CB_NODE* cb_state,
+ VkPipelineStageFlags src_stage_mask, VkPipelineStageFlags dst_stage_mask,
+ VkDependencyFlags dependency_flags, uint32_t mem_barrier_count,
+ const VkMemoryBarrier* mem_barriers, uint32_t buffer_mem_barrier_count,
+ const VkBufferMemoryBarrier* buffer_mem_barriers, uint32_t image_mem_barrier_count,
+ const VkImageMemoryBarrier* image_barriers);
+ bool CheckStageMaskQueueCompatibility(layer_data* dev_data, VkCommandBuffer command_buffer, VkPipelineStageFlags stage_mask,
+ VkQueueFlags queue_flags, const char* function, const char* src_or_dest,
+ const char* error_code);
+ void RecordUpdateDescriptorSetWithTemplateState(layer_data* device_data, VkDescriptorSet descriptorSet,
+ VkDescriptorUpdateTemplateKHR descriptorUpdateTemplate, const void* pData);
+ bool ValidateUpdateDescriptorSetWithTemplate(layer_data* device_data, VkDescriptorSet descriptorSet,
VkDescriptorUpdateTemplateKHR descriptorUpdateTemplate, const void* pData);
- bool ValidateMemoryIsBoundToBuffer(const BUFFER_STATE*, const char*, const char*) const;
- bool ValidateMemoryIsBoundToImage(const IMAGE_STATE*, const char*, const char*) const;
- bool ValidateMemoryIsBoundToAccelerationStructure(const ACCELERATION_STRUCTURE_STATE*, const char*, const char*) const;
- bool ValidateObjectNotInUse(const BASE_NODE* obj_node, const VulkanTypedHandle& obj_struct, const char* caller_name,
- const char* error_code) const;
- bool ValidateCmdQueueFlags(const CMD_BUFFER_STATE* cb_node, const char* caller_name, VkQueueFlags flags,
- const char* error_code) const;
- bool InsideRenderPass(const CMD_BUFFER_STATE* pCB, const char* apiName, const char* msgCode) const;
- bool OutsideRenderPass(const CMD_BUFFER_STATE* pCB, const char* apiName, const char* msgCode) const;
-
- static void SetLayout(ImageSubresPairLayoutMap& imageLayoutMap, ImageSubresourcePair imgpair, VkImageLayout layout);
-
- bool ValidateImageSampleCount(const IMAGE_STATE* image_state, VkSampleCountFlagBits sample_count, const char* location,
- const std::string& msgCode) const;
- bool ValidateCmdSubpassState(const CMD_BUFFER_STATE* pCB, const CMD_TYPE cmd_type) const;
- bool ValidateCmd(const CMD_BUFFER_STATE* cb_state, const CMD_TYPE cmd, const char* caller_name) const;
-
- bool ValidateDeviceMaskToPhysicalDeviceCount(uint32_t deviceMask, VkDebugReportObjectTypeEXT VUID_handle_type,
- uint64_t VUID_handle, const char* VUID) const;
- bool ValidateDeviceMaskToZero(uint32_t deviceMask, VkDebugReportObjectTypeEXT VUID_handle_type, uint64_t VUID_handle,
- const char* VUID) const;
- bool ValidateDeviceMaskToCommandBuffer(const CMD_BUFFER_STATE* pCB, uint32_t deviceMask,
- VkDebugReportObjectTypeEXT VUID_handle_type, uint64_t VUID_handle,
- const char* VUID) const;
- bool ValidateDeviceMaskToRenderPass(const CMD_BUFFER_STATE* pCB, uint32_t deviceMask,
- VkDebugReportObjectTypeEXT VUID_handle_type, uint64_t VUID_handle, const char* VUID);
-
- bool ValidateBindAccelerationStructureMemoryNV(VkDevice device, const VkBindAccelerationStructureMemoryInfoNV& info) const;
- // Prototypes for CoreChecks accessor functions
- VkFormatProperties GetPDFormatProperties(const VkFormat format) const;
+ bool ValidateMemoryIsBoundToBuffer(const layer_data*, const BUFFER_STATE*, const char*, const char*);
+ bool ValidateMemoryIsBoundToImage(const layer_data*, const IMAGE_STATE*, const char*, const char*);
+ void AddCommandBufferBindingSampler(GLOBAL_CB_NODE*, SAMPLER_STATE*);
+ void AddCommandBufferBindingImage(const layer_data*, GLOBAL_CB_NODE*, IMAGE_STATE*);
+ void AddCommandBufferBindingImageView(const layer_data*, GLOBAL_CB_NODE*, IMAGE_VIEW_STATE*);
+ void AddCommandBufferBindingBuffer(const layer_data*, GLOBAL_CB_NODE*, BUFFER_STATE*);
+ void AddCommandBufferBindingBufferView(const layer_data*, GLOBAL_CB_NODE*, BUFFER_VIEW_STATE*);
+ bool ValidateObjectNotInUse(const layer_data* dev_data, BASE_NODE* obj_node, VK_OBJECT obj_struct, const char* caller_name,
+ const char* error_code);
+ void InvalidateCommandBuffers(const layer_data* dev_data, std::unordered_set<GLOBAL_CB_NODE*> const& cb_nodes, VK_OBJECT obj);
+ void RemoveImageMemoryRange(uint64_t handle, DEVICE_MEM_INFO* mem_info);
+ void RemoveBufferMemoryRange(uint64_t handle, DEVICE_MEM_INFO* mem_info);
+ void ClearMemoryObjectBindings(uint64_t handle, VulkanObjectType type);
+ bool ValidateCmdQueueFlags(layer_data* dev_data, const GLOBAL_CB_NODE* cb_node, const char* caller_name, VkQueueFlags flags,
+ const char* error_code);
+ bool InsideRenderPass(const layer_data* my_data, const GLOBAL_CB_NODE* pCB, const char* apiName, const char* msgCode);
+ bool OutsideRenderPass(const layer_data* my_data, GLOBAL_CB_NODE* pCB, const char* apiName, const char* msgCode);
+
+ void SetLayout(layer_data* device_data, GLOBAL_CB_NODE* pCB, ImageSubresourcePair imgpair, const VkImageLayout& layout);
+ void SetLayout(layer_data* device_data, GLOBAL_CB_NODE* pCB, ImageSubresourcePair imgpair,
+ const IMAGE_CMD_BUF_LAYOUT_NODE& node);
+ void SetLayout(std::unordered_map<ImageSubresourcePair, IMAGE_LAYOUT_NODE>& imageLayoutMap, ImageSubresourcePair imgpair,
+ VkImageLayout layout);
+
+ bool ValidateImageSampleCount(layer_data* dev_data, IMAGE_STATE* image_state, VkSampleCountFlagBits sample_count,
+ const char* location, const std::string& msgCode);
+ bool ValidateCmdSubpassState(const layer_data* dev_data, const GLOBAL_CB_NODE* pCB, const CMD_TYPE cmd_type);
+ bool ValidateCmd(layer_data* dev_data, const GLOBAL_CB_NODE* cb_state, const CMD_TYPE cmd, const char* caller_name);
+
+ // Prototypes for layer_data accessor functions. These should be in their own header file at some point
+ VkFormatProperties GetPDFormatProperties(const VkFormat format);
VkResult GetPDImageFormatProperties(const VkImageCreateInfo*, VkImageFormatProperties*);
- VkResult GetPDImageFormatProperties2(const VkPhysicalDeviceImageFormatInfo2*, VkImageFormatProperties2*) const;
+ VkResult GetPDImageFormatProperties2(const VkPhysicalDeviceImageFormatInfo2*, VkImageFormatProperties2*);
+ const debug_report_data* GetReportData();
+ const VkLayerDispatchTable* GetDispatchTable();
+ const VkPhysicalDeviceProperties* GetPDProperties();
const VkPhysicalDeviceMemoryProperties* GetPhysicalDeviceMemoryProperties();
+ const CHECK_DISABLED* GetDisables();
+ const CHECK_ENABLED* GetEnables();
+ std::unordered_map<VkImage, std::unique_ptr<IMAGE_STATE>>* GetImageMap();
+ std::unordered_map<VkImage, std::vector<ImageSubresourcePair>>* GetImageSubresourceMap();
+ std::unordered_map<ImageSubresourcePair, IMAGE_LAYOUT_NODE>* GetImageLayoutMap();
+ std::unordered_map<VkBuffer, std::unique_ptr<BUFFER_STATE>>* GetBufferMap();
+ std::unordered_map<VkBufferView, std::unique_ptr<BUFFER_VIEW_STATE>>* GetBufferViewMap();
+ std::unordered_map<VkImageView, std::unique_ptr<IMAGE_VIEW_STATE>>* GetImageViewMap();
+ std::unordered_map<VkSamplerYcbcrConversion, uint64_t>* GetYcbcrConversionFormatMap();
+ std::unordered_set<uint64_t>* GetAHBExternalFormatsSet();
+
+ const DeviceExtensions* GetDeviceExtensions();
+ GpuValidationState* GetGpuValidationState();
+ VkDevice GetDevice();
+
+ uint32_t GetApiVersion();
- const GlobalQFOTransferBarrierMap<VkImageMemoryBarrier>& GetGlobalQFOReleaseBarrierMap(
- const QFOTransferBarrier<VkImageMemoryBarrier>::Tag& type_tag) const;
- const GlobalQFOTransferBarrierMap<VkBufferMemoryBarrier>& GetGlobalQFOReleaseBarrierMap(
- const QFOTransferBarrier<VkBufferMemoryBarrier>::Tag& type_tag) const;
GlobalQFOTransferBarrierMap<VkImageMemoryBarrier>& GetGlobalQFOReleaseBarrierMap(
const QFOTransferBarrier<VkImageMemoryBarrier>::Tag& type_tag);
GlobalQFOTransferBarrierMap<VkBufferMemoryBarrier>& GetGlobalQFOReleaseBarrierMap(
const QFOTransferBarrier<VkBufferMemoryBarrier>::Tag& type_tag);
template <typename Barrier>
- void RecordQueuedQFOTransferBarriers(CMD_BUFFER_STATE* cb_state);
+ void RecordQueuedQFOTransferBarriers(layer_data* device_data, GLOBAL_CB_NODE* cb_state);
template <typename Barrier>
- bool ValidateQueuedQFOTransferBarriers(const CMD_BUFFER_STATE* cb_state, QFOTransferCBScoreboards<Barrier>* scoreboards) const;
- bool ValidateQueuedQFOTransfers(const CMD_BUFFER_STATE* cb_state,
+ bool ValidateQueuedQFOTransferBarriers(layer_data* device_data, GLOBAL_CB_NODE* cb_state,
+ QFOTransferCBScoreboards<Barrier>* scoreboards);
+ bool ValidateQueuedQFOTransfers(layer_data* device_data, GLOBAL_CB_NODE* cb_state,
QFOTransferCBScoreboards<VkImageMemoryBarrier>* qfo_image_scoreboards,
- QFOTransferCBScoreboards<VkBufferMemoryBarrier>* qfo_buffer_scoreboards) const;
+ QFOTransferCBScoreboards<VkBufferMemoryBarrier>* qfo_buffer_scoreboards);
template <typename BarrierRecord, typename Scoreboard>
- bool ValidateAndUpdateQFOScoreboard(const debug_report_data* report_data, const CMD_BUFFER_STATE* cb_state,
- const char* operation, const BarrierRecord& barrier, Scoreboard* scoreboard) const;
+ bool ValidateAndUpdateQFOScoreboard(const debug_report_data* report_data, const GLOBAL_CB_NODE* cb_state, const char* operation,
+ const BarrierRecord& barrier, Scoreboard* scoreboard);
template <typename Barrier>
- void RecordQFOTransferBarriers(CMD_BUFFER_STATE* cb_state, uint32_t barrier_count, const Barrier* barriers);
- void RecordBarriersQFOTransfers(CMD_BUFFER_STATE* cb_state, uint32_t bufferBarrierCount,
+ void RecordQFOTransferBarriers(layer_data* device_data, GLOBAL_CB_NODE* cb_state, uint32_t barrier_count,
+ const Barrier* barriers);
+ void RecordBarriersQFOTransfers(layer_data* device_data, GLOBAL_CB_NODE* cb_state, uint32_t bufferBarrierCount,
const VkBufferMemoryBarrier* pBufferMemBarriers, uint32_t imageMemBarrierCount,
const VkImageMemoryBarrier* pImageMemBarriers);
template <typename Barrier>
- bool ValidateQFOTransferBarrierUniqueness(const char* func_name, CMD_BUFFER_STATE* cb_state, uint32_t barrier_count,
- const Barrier* barriers);
- bool IsReleaseOp(CMD_BUFFER_STATE* cb_state, const VkImageMemoryBarrier& barrier) const;
- bool ValidateBarriersQFOTransferUniqueness(const char* func_name, CMD_BUFFER_STATE* cb_state, uint32_t bufferBarrierCount,
- const VkBufferMemoryBarrier* pBufferMemBarriers, uint32_t imageMemBarrierCount,
- const VkImageMemoryBarrier* pImageMemBarriers);
- bool ValidatePrimaryCommandBufferState(const CMD_BUFFER_STATE* pCB, int current_submit_count,
+ bool ValidateQFOTransferBarrierUniqueness(layer_data* device_data, const char* func_name, GLOBAL_CB_NODE* cb_state,
+ uint32_t barrier_count, const Barrier* barriers);
+ bool IsReleaseOp(GLOBAL_CB_NODE* cb_state, VkImageMemoryBarrier const* barrier);
+ bool ValidateBarriersQFOTransferUniqueness(layer_data* device_data, const char* func_name, GLOBAL_CB_NODE* cb_state,
+ uint32_t bufferBarrierCount, const VkBufferMemoryBarrier* pBufferMemBarriers,
+ uint32_t imageMemBarrierCount, const VkImageMemoryBarrier* pImageMemBarriers);
+ bool ValidatePrimaryCommandBufferState(layer_data* dev_data, GLOBAL_CB_NODE* pCB, int current_submit_count,
QFOTransferCBScoreboards<VkImageMemoryBarrier>* qfo_image_scoreboards,
- QFOTransferCBScoreboards<VkBufferMemoryBarrier>* qfo_buffer_scoreboards) const;
- bool ValidatePipelineDrawtimeState(const LAST_BOUND_STATE& state, const CMD_BUFFER_STATE* pCB, CMD_TYPE cmd_type,
- const PIPELINE_STATE* pPipeline, const char* caller) const;
- bool ValidateCmdBufDrawState(const CMD_BUFFER_STATE* cb_node, CMD_TYPE cmd_type, const bool indexed,
+ QFOTransferCBScoreboards<VkBufferMemoryBarrier>* qfo_buffer_scoreboards);
+ bool ValidatePipelineDrawtimeState(layer_data const* dev_data, LAST_BOUND_STATE const& state, const GLOBAL_CB_NODE* pCB,
+ CMD_TYPE cmd_type, PIPELINE_STATE const* pPipeline, const char* caller);
+ bool ValidateCmdBufDrawState(layer_data* dev_data, GLOBAL_CB_NODE* cb_node, CMD_TYPE cmd_type, const bool indexed,
const VkPipelineBindPoint bind_point, const char* function, const char* pipe_err_code,
- const char* state_err_code) const;
- bool ValidateEventStageMask(VkQueue queue, CMD_BUFFER_STATE* pCB, uint32_t eventCount, size_t firstEventIndex,
+ const char* state_err_code);
+ void IncrementBoundObjects(layer_data* dev_data, GLOBAL_CB_NODE const* cb_node);
+ void IncrementResources(layer_data* dev_data, GLOBAL_CB_NODE* cb_node);
+ bool ValidateEventStageMask(VkQueue queue, GLOBAL_CB_NODE* pCB, uint32_t eventCount, size_t firstEventIndex,
VkPipelineStageFlags sourceStageMask);
- bool ValidateQueueFamilyIndices(const CMD_BUFFER_STATE* pCB, VkQueue queue) const;
+ void RetireWorkOnQueue(layer_data* dev_data, QUEUE_STATE* pQueue, uint64_t seq);
+ bool ValidateResources(layer_data* dev_data, GLOBAL_CB_NODE* cb_node);
+ bool ValidateQueueFamilyIndices(layer_data* dev_data, GLOBAL_CB_NODE* pCB, VkQueue queue);
VkResult CoreLayerCreateValidationCacheEXT(VkDevice device, const VkValidationCacheCreateInfoEXT* pCreateInfo,
const VkAllocationCallbacks* pAllocator, VkValidationCacheEXT* pValidationCache);
void CoreLayerDestroyValidationCacheEXT(VkDevice device, VkValidationCacheEXT validationCache,
@@ -1161,152 +586,119 @@ class CoreChecks : public ValidationStateTracker {
const VkValidationCacheEXT* pSrcCaches);
VkResult CoreLayerGetValidationCacheDataEXT(VkDevice device, VkValidationCacheEXT validationCache, size_t* pDataSize,
void* pData);
- // For given bindings validate state at time of draw is correct, returning false on error and writing error details into string*
- bool ValidateDrawState(const cvdescriptorset::DescriptorSet* descriptor_set, const std::map<uint32_t, descriptor_req>& bindings,
- const std::vector<uint32_t>& dynamic_offsets, const CMD_BUFFER_STATE* cb_node, const char* caller,
- std::string* error) const;
- // Validate contents of a CopyUpdate
- using DescriptorSet = cvdescriptorset::DescriptorSet;
- bool ValidateCopyUpdate(const VkCopyDescriptorSet* update, const DescriptorSet* dst_set, const DescriptorSet* src_set,
- const char* func_name, std::string* error_code, std::string* error_msg);
- bool VerifyCopyUpdateContents(const VkCopyDescriptorSet* update, const DescriptorSet* src_set, VkDescriptorType type,
- uint32_t index, const char* func_name, std::string* error_code, std::string* error_msg);
- // Validate contents of a WriteUpdate
- bool ValidateWriteUpdate(const DescriptorSet* descriptor_set, const VkWriteDescriptorSet* update, const char* func_name,
- std::string* error_code, std::string* error_msg);
- bool VerifyWriteUpdateContents(const DescriptorSet* dest_set, const VkWriteDescriptorSet* update, const uint32_t index,
- const char* func_name, std::string* error_code, std::string* error_msg);
- // Shared helper functions - These are useful because the shared sampler image descriptor type
- // performs common functions with both sampler and image descriptors so they can share their common functions
- bool ValidateImageUpdate(VkImageView, VkImageLayout, VkDescriptorType, const char* func_name, std::string*, std::string*);
- // Validate contents of a push descriptor update
- bool ValidatePushDescriptorsUpdate(const DescriptorSet* push_set, uint32_t write_count, const VkWriteDescriptorSet* p_wds,
- const char* func_name);
+
// Descriptor Set Validation Functions
- bool ValidateSampler(VkSampler) const;
- bool ValidateBufferUpdate(VkDescriptorBufferInfo const* buffer_info, VkDescriptorType type, const char* func_name,
- std::string* error_code, std::string* error_msg);
- bool ValidateUpdateDescriptorSetsWithTemplateKHR(VkDescriptorSet descriptorSet, const TEMPLATE_STATE* template_state,
- const void* pData);
- void UpdateAllocateDescriptorSetsData(const VkDescriptorSetAllocateInfo*, cvdescriptorset::AllocateDescriptorSetsData*);
- bool ValidateAllocateDescriptorSets(const VkDescriptorSetAllocateInfo*, const cvdescriptorset::AllocateDescriptorSetsData*);
- bool ValidateUpdateDescriptorSets(uint32_t write_count, const VkWriteDescriptorSet* p_wds, uint32_t copy_count,
- const VkCopyDescriptorSet* p_cds, const char* func_name);
+ bool ValidateUpdateDescriptorSetsWithTemplateKHR(layer_data* device_data, VkDescriptorSet descriptorSet,
+ const TEMPLATE_STATE* template_state, const void* pData);
+ void PerformUpdateDescriptorSetsWithTemplateKHR(layer_data* device_data, VkDescriptorSet descriptorSet,
+ const TEMPLATE_STATE* template_state, const void* pData);
+ void UpdateAllocateDescriptorSetsData(const layer_data* dev_data, const VkDescriptorSetAllocateInfo*,
+ cvdescriptorset::AllocateDescriptorSetsData*);
+ bool ValidateAllocateDescriptorSets(const layer_data*, const VkDescriptorSetAllocateInfo*,
+ const cvdescriptorset::AllocateDescriptorSetsData*);
+ void PerformAllocateDescriptorSets(const VkDescriptorSetAllocateInfo*, const VkDescriptorSet*,
+ const cvdescriptorset::AllocateDescriptorSetsData*,
+ std::unordered_map<VkDescriptorPool, DESCRIPTOR_POOL_STATE*>*,
+ std::unordered_map<VkDescriptorSet, cvdescriptorset::DescriptorSet*>*, layer_data*);
+ bool ValidateUpdateDescriptorSets(const debug_report_data* report_data, const layer_data* dev_data, uint32_t write_count,
+ const VkWriteDescriptorSet* p_wds, uint32_t copy_count, const VkCopyDescriptorSet* p_cds,
+ const char* func_name);
// Stuff from shader_validation
- bool ValidateGraphicsPipelineShaderState(const PIPELINE_STATE* pPipeline) const;
- bool ValidateComputePipeline(PIPELINE_STATE* pPipeline) const;
- bool ValidateRayTracingPipelineNV(PIPELINE_STATE* pipeline) const;
+ bool ValidateAndCapturePipelineShaderState(layer_data* dev_data, PIPELINE_STATE* pPipeline);
+ bool ValidateComputePipeline(layer_data* dev_data, PIPELINE_STATE* pPipeline);
+ bool ValidateRayTracingPipelineNV(layer_data* dev_data, PIPELINE_STATE* pipeline);
bool PreCallValidateCreateShaderModule(VkDevice device, const VkShaderModuleCreateInfo* pCreateInfo,
const VkAllocationCallbacks* pAllocator, VkShaderModule* pShaderModule);
void PreCallRecordCreateShaderModule(VkDevice device, const VkShaderModuleCreateInfo* pCreateInfo,
const VkAllocationCallbacks* pAllocator, VkShaderModule* pShaderModule, void* csm_state);
- bool ValidatePipelineShaderStage(VkPipelineShaderStageCreateInfo const* pStage, const PIPELINE_STATE* pipeline,
- const PIPELINE_STATE::StageState& stage_state, const SHADER_MODULE_STATE* module,
- const spirv_inst_iter& entrypoint, bool check_point_size) const;
- bool ValidatePointListShaderState(const PIPELINE_STATE* pipeline, SHADER_MODULE_STATE const* src, spirv_inst_iter entrypoint,
- VkShaderStageFlagBits stage) const;
- bool ValidateShaderCapabilities(SHADER_MODULE_STATE const* src, VkShaderStageFlagBits stage) const;
- bool ValidateShaderStageWritableDescriptor(VkShaderStageFlagBits stage, bool has_writable_descriptor) const;
- bool ValidateShaderStageInputOutputLimits(SHADER_MODULE_STATE const* src, VkPipelineShaderStageCreateInfo const* pStage,
- const PIPELINE_STATE* pipeline, spirv_inst_iter entrypoint) const;
- bool ValidateShaderStageGroupNonUniform(SHADER_MODULE_STATE const* src, VkShaderStageFlagBits stage,
- std::unordered_set<uint32_t> const& accessible_ids) const;
- bool ValidateCooperativeMatrix(SHADER_MODULE_STATE const* src, VkPipelineShaderStageCreateInfo const* pStage,
- const PIPELINE_STATE* pipeline) const;
- bool ValidateExecutionModes(SHADER_MODULE_STATE const* src, spirv_inst_iter entrypoint) const;
+ void PostCallRecordCreateShaderModule(VkDevice device, const VkShaderModuleCreateInfo* pCreateInfo,
+ const VkAllocationCallbacks* pAllocator, VkShaderModule* pShaderModule, VkResult result,
+ void* csm_state);
+ bool ValidatePipelineShaderStage(layer_data* dev_data, VkPipelineShaderStageCreateInfo const* pStage, PIPELINE_STATE* pipeline,
+ shader_module const** out_module, spirv_inst_iter* out_entrypoint, bool check_point_size);
+ bool ValidatePointListShaderState(const layer_data* dev_data, const PIPELINE_STATE* pipeline, shader_module const* src,
+ spirv_inst_iter entrypoint, VkShaderStageFlagBits stage);
+ bool ValidateShaderCapabilities(layer_data* dev_data, shader_module const* src, VkShaderStageFlagBits stage,
+ bool has_writable_descriptor);
+ bool ValidateShaderStageInputOutputLimits(layer_data* dev_data, shader_module const* src,
+ VkPipelineShaderStageCreateInfo const* pStage, PIPELINE_STATE* pipeline);
// Gpu Validation Functions
- void GpuPreCallRecordCreateDevice(VkPhysicalDevice gpu, safe_VkDeviceCreateInfo* modified_create_info,
+ void GpuPreCallRecordCreateDevice(VkPhysicalDevice gpu, std::unique_ptr<safe_VkDeviceCreateInfo>& modified_create_info,
VkPhysicalDeviceFeatures* supported_features);
- void GpuPostCallRecordCreateDevice(const CHECK_ENABLED* enables, const VkDeviceCreateInfo* pCreateInfo);
- void GpuPreCallRecordDestroyDevice();
- void GpuResetCommandBuffer(const VkCommandBuffer commandBuffer);
- bool GpuPreCallCreateShaderModule(const VkShaderModuleCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator,
- VkShaderModule* pShaderModule, uint32_t* unique_shader_id,
- VkShaderModuleCreateInfo* instrumented_create_info,
+ void GpuPostCallRecordCreateDevice(layer_data* dev_data);
+ void GpuPreCallRecordDestroyDevice(layer_data* dev_data);
+ void GpuPreCallRecordFreeCommandBuffers(layer_data* dev_data, uint32_t commandBufferCount,
+ const VkCommandBuffer* pCommandBuffers);
+ bool GpuPreCallCreateShaderModule(layer_data* dev_data, const VkShaderModuleCreateInfo* pCreateInfo,
+ const VkAllocationCallbacks* pAllocator, VkShaderModule* pShaderModule,
+ uint32_t* unique_shader_id, VkShaderModuleCreateInfo* instrumented_create_info,
std::vector<unsigned int>* instrumented_pgm);
- bool GpuPreCallCreatePipelineLayout(const VkPipelineLayoutCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator,
- VkPipelineLayout* pPipelineLayout, std::vector<VkDescriptorSetLayout>* new_layouts,
+ bool GpuPreCallCreatePipelineLayout(layer_data* device_data, const VkPipelineLayoutCreateInfo* pCreateInfo,
+ const VkAllocationCallbacks* pAllocator, VkPipelineLayout* pPipelineLayout,
+ std::vector<VkDescriptorSetLayout>* new_layouts,
VkPipelineLayoutCreateInfo* modified_create_info);
- void GpuPostCallCreatePipelineLayout(VkResult result);
- void GpuPreCallRecordQueueSubmit(VkQueue queue, uint32_t submitCount, const VkSubmitInfo* pSubmits, VkFence fence);
- void GpuPostCallQueueSubmit(VkQueue queue, uint32_t submitCount, const VkSubmitInfo* pSubmits, VkFence fence);
- void GpuPreCallValidateCmdWaitEvents(VkPipelineStageFlags sourceStageMask);
+ void GpuPostCallCreatePipelineLayout(layer_data* device_data, VkResult result);
+ void GpuPostCallQueueSubmit(layer_data* dev_data, VkQueue queue, uint32_t submitCount, const VkSubmitInfo* pSubmits,
+ VkFence fence);
+ void GpuPreCallValidateCmdWaitEvents(layer_data* dev_data, VkPipelineStageFlags sourceStageMask);
std::vector<safe_VkGraphicsPipelineCreateInfo> GpuPreCallRecordCreateGraphicsPipelines(
- VkPipelineCache pipelineCache, uint32_t count, const VkGraphicsPipelineCreateInfo* pCreateInfos,
+ layer_data* dev_data, VkPipelineCache pipelineCache, uint32_t count, const VkGraphicsPipelineCreateInfo* pCreateInfos,
const VkAllocationCallbacks* pAllocator, VkPipeline* pPipelines, std::vector<std::unique_ptr<PIPELINE_STATE>>& pipe_state);
- void GpuPostCallRecordCreateGraphicsPipelines(const uint32_t count, const VkGraphicsPipelineCreateInfo* pCreateInfos,
+ void GpuPostCallRecordCreateGraphicsPipelines(layer_data* dev_data, const uint32_t count,
+ const VkGraphicsPipelineCreateInfo* pCreateInfos,
const VkAllocationCallbacks* pAllocator, VkPipeline* pPipelines);
- void GpuPreCallRecordDestroyPipeline(const VkPipeline pipeline);
- void GpuAllocateValidationResources(const VkCommandBuffer cmd_buffer, VkPipelineBindPoint bind_point);
- void AnalyzeAndReportError(CMD_BUFFER_STATE* cb_node, VkQueue queue, VkPipelineBindPoint bind_point, uint32_t operation_index,
+ void GpuPreCallRecordDestroyPipeline(layer_data* dev_data, const VkPipeline pipeline);
+ void GpuAllocateValidationResources(layer_data* dev_data, const VkCommandBuffer cmd_buffer, VkPipelineBindPoint bind_point);
+ void AnalyzeAndReportError(const layer_data* dev_data, GLOBAL_CB_NODE* cb_node, VkQueue queue, uint32_t draw_index,
uint32_t* const debug_output_buffer);
- void ProcessInstrumentationBuffer(VkQueue queue, CMD_BUFFER_STATE* cb_node);
- void UpdateInstrumentationBuffer(CMD_BUFFER_STATE* cb_node);
- void SubmitBarrier(VkQueue queue);
- bool GpuInstrumentShader(const VkShaderModuleCreateInfo* pCreateInfo, std::vector<unsigned int>& new_pgm,
+ void ProcessInstrumentationBuffer(const layer_data* dev_data, VkQueue queue, GLOBAL_CB_NODE* cb_node);
+ void SubmitBarrier(layer_data* dev_data, VkQueue queue);
+ bool GpuInstrumentShader(layer_data* dev_data, const VkShaderModuleCreateInfo* pCreateInfo, std::vector<unsigned int>& new_pgm,
uint32_t* unique_shader_id);
- template <typename CreateInfo, typename SafeCreateInfo>
- void GpuPreCallRecordPipelineCreations(uint32_t count, const CreateInfo* pCreateInfos, const VkAllocationCallbacks* pAllocator,
- VkPipeline* pPipelines, std::vector<std::unique_ptr<PIPELINE_STATE>>& pipe_state,
- std::vector<SafeCreateInfo>* new_pipeline_create_infos,
- const VkPipelineBindPoint bind_point);
- template <typename CreateInfo>
- void GpuPostCallRecordPipelineCreations(const uint32_t count, const CreateInfo* pCreateInfos,
- const VkAllocationCallbacks* pAllocator, VkPipeline* pPipelines,
- const VkPipelineBindPoint bind_point);
- std::vector<safe_VkComputePipelineCreateInfo> GpuPreCallRecordCreateComputePipelines(
- VkPipelineCache pipelineCache, uint32_t count, const VkComputePipelineCreateInfo* pCreateInfos,
- const VkAllocationCallbacks* pAllocator, VkPipeline* pPipelines, std::vector<std::unique_ptr<PIPELINE_STATE>>& pipe_state);
- void GpuPostCallRecordCreateComputePipelines(const uint32_t count, const VkComputePipelineCreateInfo* pCreateInfos,
- const VkAllocationCallbacks* pAllocator, VkPipeline* pPipelines);
- std::vector<safe_VkRayTracingPipelineCreateInfoNV> GpuPreCallRecordCreateRayTracingPipelinesNV(
- VkPipelineCache pipelineCache, uint32_t count, const VkRayTracingPipelineCreateInfoNV* pCreateInfos,
- const VkAllocationCallbacks* pAllocator, VkPipeline* pPipelines, std::vector<std::unique_ptr<PIPELINE_STATE>>& pipe_state);
- void GpuPostCallRecordCreateRayTracingPipelinesNV(const uint32_t count, const VkRayTracingPipelineCreateInfoNV* pCreateInfos,
- const VkAllocationCallbacks* pAllocator, VkPipeline* pPipelines);
- VkResult GpuInitializeVma();
- void ReportSetupProblem(VkDebugReportObjectTypeEXT object_type, uint64_t object_handle, const char* const specific_message);
+ void ReportSetupProblem(const layer_data* dev_data, VkDebugReportObjectTypeEXT object_type, uint64_t object_handle,
+ const char* const specific_message);
// Buffer Validation Functions
template <class OBJECT, class LAYOUT>
- void SetLayout(OBJECT* pObject, VkImage image, VkImageSubresource range, const LAYOUT& layout);
+ void SetLayout(layer_data* device_data, OBJECT* pObject, VkImage image, VkImageSubresource range, const LAYOUT& layout);
template <class OBJECT, class LAYOUT>
- void SetLayout(OBJECT* pObject, ImageSubresourcePair imgpair, const LAYOUT& layout, VkImageAspectFlags aspectMask);
+ void SetLayout(layer_data* device_data, OBJECT* pObject, ImageSubresourcePair imgpair, const LAYOUT& layout,
+ VkImageAspectFlags aspectMask);
// Remove the pending QFO release records from the global set
// Note that the type of the handle argument constrained to match Barrier type
// The defaulted BarrierRecord argument allows use to declare the type once, but is not intended to be specified by the caller
template <typename Barrier, typename BarrierRecord = QFOTransferBarrier<Barrier>>
- void EraseQFOReleaseBarriers(const typename BarrierRecord::HandleType& handle) {
+ void EraseQFOReleaseBarriers(layer_data* device_data, const typename BarrierRecord::HandleType& handle) {
GlobalQFOTransferBarrierMap<Barrier>& global_release_barriers =
GetGlobalQFOReleaseBarrierMap(typename BarrierRecord::Tag());
global_release_barriers.erase(handle);
}
- bool ValidateCopyImageTransferGranularityRequirements(const CMD_BUFFER_STATE* cb_node, const IMAGE_STATE* src_img,
- const IMAGE_STATE* dst_img, const VkImageCopy* region, const uint32_t i,
- const char* function) const;
- bool ValidateIdleBuffer(VkBuffer buffer);
- bool ValidateUsageFlags(VkFlags actual, VkFlags desired, VkBool32 strict, const VulkanTypedHandle& typed_handle,
- const char* msgCode, char const* func_name, char const* usage_str) const;
- bool ValidateImageSubresourceRange(const uint32_t image_mip_count, const uint32_t image_layer_count,
- const VkImageSubresourceRange& subresourceRange, const char* cmd_name,
- const char* param_name, const char* image_layer_count_var_name, const uint64_t image_handle,
- SubresourceRangeErrorCodes errorCodes) const;
- void SetImageLayout(CMD_BUFFER_STATE* cb_node, const IMAGE_STATE& image_state,
- const VkImageSubresourceRange& image_subresource_range, VkImageLayout layout,
- VkImageLayout expected_layout = kInvalidLayout);
- void SetImageLayout(CMD_BUFFER_STATE* cb_node, const IMAGE_STATE& image_state,
- const VkImageSubresourceLayers& image_subresource_layers, VkImageLayout layout);
- bool ValidateRenderPassLayoutAgainstFramebufferImageUsage(RenderPassCreateVersion rp_version, VkImageLayout layout,
- VkImage image, VkImageView image_view, VkFramebuffer framebuffer,
- VkRenderPass renderpass, uint32_t attachment_index,
- const char* variable_name) const;
- bool ValidateBufferImageCopyData(uint32_t regionCount, const VkBufferImageCopy* pRegions, IMAGE_STATE* image_state,
- const char* function);
- bool ValidateBufferViewRange(const BUFFER_STATE* buffer_state, const VkBufferViewCreateInfo* pCreateInfo,
- const VkPhysicalDeviceLimits* device_limits);
- bool ValidateBufferViewBuffer(const BUFFER_STATE* buffer_state, const VkBufferViewCreateInfo* pCreateInfo);
+ bool ValidateCopyImageTransferGranularityRequirements(layer_data* device_data, const GLOBAL_CB_NODE* cb_node,
+ const IMAGE_STATE* src_img, const IMAGE_STATE* dst_img,
+ const VkImageCopy* region, const uint32_t i, const char* function);
+ bool ValidateIdleBuffer(layer_data* device_data, VkBuffer buffer);
+ bool ValidateUsageFlags(const layer_data* device_data, VkFlags actual, VkFlags desired, VkBool32 strict, uint64_t obj_handle,
+ VulkanObjectType obj_type, const char* msgCode, char const* func_name, char const* usage_str);
+ bool ValidateImageSubresourceRange(const layer_data* device_data, const uint32_t image_mip_count,
+ const uint32_t image_layer_count, const VkImageSubresourceRange& subresourceRange,
+ const char* cmd_name, const char* param_name, const char* image_layer_count_var_name,
+ const uint64_t image_handle, SubresourceRangeErrorCodes errorCodes);
+ void SetImageLayout(layer_data* device_data, GLOBAL_CB_NODE* cb_node, const IMAGE_STATE* image_state,
+ VkImageSubresourceRange image_subresource_range, const VkImageLayout& layout);
+ void SetImageLayout(layer_data* device_data, GLOBAL_CB_NODE* cb_node, const IMAGE_STATE* image_state,
+ VkImageSubresourceLayers image_subresource_layers, const VkImageLayout& layout);
+ bool ValidateRenderPassLayoutAgainstFramebufferImageUsage(layer_data* device_data, RenderPassCreateVersion rp_version,
+ VkImageLayout layout, VkImage image, VkImageView image_view,
+ VkFramebuffer framebuffer, VkRenderPass renderpass,
+ uint32_t attachment_index, const char* variable_name);
+ bool ValidateBufferImageCopyData(const debug_report_data* report_data, uint32_t regionCount, const VkBufferImageCopy* pRegions,
+ IMAGE_STATE* image_state, const char* function);
+ bool ValidateBufferViewRange(const layer_data* device_data, const BUFFER_STATE* buffer_state,
+ const VkBufferViewCreateInfo* pCreateInfo, const VkPhysicalDeviceLimits* device_limits);
+ bool ValidateBufferViewBuffer(const layer_data* device_data, const BUFFER_STATE* buffer_state,
+ const VkBufferViewCreateInfo* pCreateInfo);
bool PreCallValidateCreateImage(VkDevice device, const VkImageCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator,
VkImage* pImage);
@@ -1318,43 +710,36 @@ class CoreChecks : public ValidationStateTracker {
bool PreCallValidateDestroyImage(VkDevice device, VkImage image, const VkAllocationCallbacks* pAllocator);
- bool ValidateImageAttributes(const IMAGE_STATE* image_state, const VkImageSubresourceRange& range) const;
-
- bool ValidateClearAttachmentExtent(VkCommandBuffer command_buffer, uint32_t attachment_index,
- const FRAMEBUFFER_STATE* framebuffer, uint32_t fb_attachment, const VkRect2D& render_area,
- uint32_t rect_count, const VkClearRect* clear_rects) const;
- bool ValidateImageCopyData(const uint32_t regionCount, const VkImageCopy* ic_regions, const IMAGE_STATE* src_state,
- const IMAGE_STATE* dst_state) const;
-
- bool VerifyClearImageLayout(const CMD_BUFFER_STATE* cb_node, const IMAGE_STATE* image_state,
- const VkImageSubresourceRange& range, VkImageLayout dest_image_layout, const char* func_name) const;
+ bool ValidateImageAttributes(layer_data* device_data, IMAGE_STATE* image_state, VkImageSubresourceRange range);
- bool VerifyImageLayout(const CMD_BUFFER_STATE* cb_node, const IMAGE_STATE* image_state, const VkImageSubresourceRange& range,
- VkImageAspectFlags view_aspect, VkImageLayout explicit_layout, VkImageLayout optimal_layout,
- const char* caller, const char* layout_invalid_msg_code, const char* layout_mismatch_msg_code,
- bool* error) const;
+ bool ValidateClearAttachmentExtent(layer_data* device_data, VkCommandBuffer command_buffer, uint32_t attachment_index,
+ FRAMEBUFFER_STATE* framebuffer, uint32_t fb_attachment, const VkRect2D& render_area,
+ uint32_t rect_count, const VkClearRect* clear_rects);
+ bool ValidateImageCopyData(const layer_data* device_data, const debug_report_data* report_data, const uint32_t regionCount,
+ const VkImageCopy* ic_regions, const IMAGE_STATE* src_state, const IMAGE_STATE* dst_state);
- bool VerifyImageLayout(const CMD_BUFFER_STATE* cb_node, const IMAGE_STATE* image_state, const VkImageSubresourceRange& range,
- VkImageLayout explicit_layout, VkImageLayout optimal_layout, const char* caller,
- const char* layout_invalid_msg_code, const char* layout_mismatch_msg_code, bool* error) const {
- return VerifyImageLayout(cb_node, image_state, range, 0, explicit_layout, optimal_layout, caller, layout_invalid_msg_code,
- layout_mismatch_msg_code, error);
- }
+ bool VerifyClearImageLayout(layer_data* device_data, GLOBAL_CB_NODE* cb_node, IMAGE_STATE* image_state,
+ VkImageSubresourceRange range, VkImageLayout dest_image_layout, const char* func_name);
- bool VerifyImageLayout(const CMD_BUFFER_STATE* cb_node, const IMAGE_STATE* image_state,
- const VkImageSubresourceLayers& subLayers, VkImageLayout explicit_layout, VkImageLayout optimal_layout,
+ bool VerifyImageLayout(layer_data const* device_data, GLOBAL_CB_NODE const* cb_node, IMAGE_STATE* image_state,
+ VkImageSubresourceLayers subLayers, VkImageLayout explicit_layout, VkImageLayout optimal_layout,
const char* caller, const char* layout_invalid_msg_code, const char* layout_mismatch_msg_code,
- bool* error) const;
+ bool* error);
- bool CheckItgExtent(const CMD_BUFFER_STATE* cb_node, const VkExtent3D* extent, const VkOffset3D* offset,
+ bool CheckItgExtent(layer_data* device_data, const GLOBAL_CB_NODE* cb_node, const VkExtent3D* extent, const VkOffset3D* offset,
const VkExtent3D* granularity, const VkExtent3D* subresource_extent, const VkImageType image_type,
- const uint32_t i, const char* function, const char* member, const char* vuid) const;
+ const uint32_t i, const char* function, const char* member, const char* vuid);
+
+ bool CheckItgOffset(layer_data* device_data, const GLOBAL_CB_NODE* cb_node, const VkOffset3D* offset,
+ const VkExtent3D* granularity, const uint32_t i, const char* function, const char* member,
+ const char* vuid);
+ VkExtent3D GetScaledItg(layer_data* device_data, const GLOBAL_CB_NODE* cb_node, const IMAGE_STATE* img);
+ bool CopyImageMultiplaneValidation(const layer_data* dev_data, VkCommandBuffer command_buffer,
+ const IMAGE_STATE* src_image_state, const IMAGE_STATE* dst_image_state,
+ const VkImageCopy region);
- bool CheckItgOffset(const CMD_BUFFER_STATE* cb_node, const VkOffset3D* offset, const VkExtent3D* granularity, const uint32_t i,
- const char* function, const char* member, const char* vuid) const;
- VkExtent3D GetScaledItg(const CMD_BUFFER_STATE* cb_node, const IMAGE_STATE* img) const;
- bool CopyImageMultiplaneValidation(VkCommandBuffer command_buffer, const IMAGE_STATE* src_image_state,
- const IMAGE_STATE* dst_image_state, const VkImageCopy region) const;
+ void RecordClearImageLayout(layer_data* dev_data, GLOBAL_CB_NODE* cb_node, VkImage image, VkImageSubresourceRange range,
+ VkImageLayout dest_image_layout);
bool PreCallValidateCmdClearColorImage(VkCommandBuffer commandBuffer, VkImage image, VkImageLayout imageLayout,
const VkClearColorValue* pColor, uint32_t rangeCount,
@@ -1372,53 +757,62 @@ class CoreChecks : public ValidationStateTracker {
const VkClearDepthStencilValue* pDepthStencil, uint32_t rangeCount,
const VkImageSubresourceRange* pRanges);
- bool FindLayoutVerifyLayout(ImageSubresourcePair imgpair, VkImageLayout& layout, const VkImageAspectFlags aspectMask);
+ bool FindLayoutVerifyNode(layer_data const* device_data, GLOBAL_CB_NODE const* pCB, ImageSubresourcePair imgpair,
+ IMAGE_CMD_BUF_LAYOUT_NODE& node, const VkImageAspectFlags aspectMask);
- bool FindGlobalLayout(ImageSubresourcePair imgpair, VkImageLayout& layout);
+ bool FindLayoutVerifyLayout(layer_data const* device_data, ImageSubresourcePair imgpair, VkImageLayout& layout,
+ const VkImageAspectFlags aspectMask);
- bool FindLayouts(VkImage image, std::vector<VkImageLayout>& layouts);
+ bool FindCmdBufLayout(layer_data const* device_data, GLOBAL_CB_NODE const* pCB, VkImage image, VkImageSubresource range,
+ IMAGE_CMD_BUF_LAYOUT_NODE& node);
- bool FindLayout(const ImageSubresPairLayoutMap& imageLayoutMap, ImageSubresourcePair imgpair, VkImageLayout& layout) const;
+ bool FindGlobalLayout(layer_data* device_data, ImageSubresourcePair imgpair, VkImageLayout& layout);
- static bool FindLayout(const ImageSubresPairLayoutMap& imageLayoutMap, ImageSubresourcePair imgpair, VkImageLayout& layout,
- const VkImageAspectFlags aspectMask);
+ bool FindLayouts(layer_data* device_data, VkImage image, std::vector<VkImageLayout>& layouts);
- void SetGlobalLayout(ImageSubresourcePair imgpair, const VkImageLayout& layout);
+ bool FindLayout(layer_data* device_data, const std::unordered_map<ImageSubresourcePair, IMAGE_LAYOUT_NODE>& imageLayoutMap,
+ ImageSubresourcePair imgpair, VkImageLayout& layout);
- void SetImageViewLayout(CMD_BUFFER_STATE* cb_node, const IMAGE_VIEW_STATE& view_state, VkImageLayout layout);
- void SetImageViewInitialLayout(CMD_BUFFER_STATE* cb_node, const IMAGE_VIEW_STATE& view_state, VkImageLayout layout);
+ bool FindLayout(const std::unordered_map<ImageSubresourcePair, IMAGE_LAYOUT_NODE>& imageLayoutMap, ImageSubresourcePair imgpair,
+ VkImageLayout& layout, const VkImageAspectFlags aspectMask);
- void SetImageInitialLayout(CMD_BUFFER_STATE* cb_node, VkImage image, const VkImageSubresourceRange& range,
- VkImageLayout layout);
- void SetImageInitialLayout(CMD_BUFFER_STATE* cb_node, const IMAGE_STATE& image_state, const VkImageSubresourceRange& range,
- VkImageLayout layout);
- void SetImageInitialLayout(CMD_BUFFER_STATE* cb_node, const IMAGE_STATE& image_state, const VkImageSubresourceLayers& layers,
- VkImageLayout layout);
+ void SetGlobalLayout(layer_data* device_data, ImageSubresourcePair imgpair, const VkImageLayout& layout);
- bool VerifyFramebufferAndRenderPassLayouts(RenderPassCreateVersion rp_version, const CMD_BUFFER_STATE* pCB,
+ void SetImageViewLayout(layer_data* device_data, GLOBAL_CB_NODE* pCB, VkImageView imageView, const VkImageLayout& layout);
+
+ void SetImageViewLayout(layer_data* device_data, GLOBAL_CB_NODE* cb_node, IMAGE_VIEW_STATE* view_state,
+ const VkImageLayout& layout);
+
+ bool VerifyFramebufferAndRenderPassLayouts(layer_data* dev_data, RenderPassCreateVersion rp_version, GLOBAL_CB_NODE* pCB,
const VkRenderPassBeginInfo* pRenderPassBegin,
- const FRAMEBUFFER_STATE* framebuffer_state) const;
- void RecordCmdBeginRenderPassLayouts(VkCommandBuffer commandBuffer, const VkRenderPassBeginInfo* pRenderPassBegin,
- const VkSubpassContents contents);
- void TransitionAttachmentRefLayout(CMD_BUFFER_STATE* pCB, FRAMEBUFFER_STATE* pFramebuffer,
+ const FRAMEBUFFER_STATE* framebuffer_state);
+
+ void TransitionAttachmentRefLayout(layer_data* dev_data, GLOBAL_CB_NODE* pCB, FRAMEBUFFER_STATE* pFramebuffer,
const safe_VkAttachmentReference2KHR& ref);
- void TransitionSubpassLayouts(CMD_BUFFER_STATE*, const RENDER_PASS_STATE*, const int, FRAMEBUFFER_STATE*);
+ void TransitionSubpassLayouts(layer_data*, GLOBAL_CB_NODE*, const RENDER_PASS_STATE*, const int, FRAMEBUFFER_STATE*);
- void TransitionBeginRenderPassLayouts(CMD_BUFFER_STATE*, const RENDER_PASS_STATE*, FRAMEBUFFER_STATE*);
+ void TransitionBeginRenderPassLayouts(layer_data*, GLOBAL_CB_NODE*, const RENDER_PASS_STATE*, FRAMEBUFFER_STATE*);
- bool ValidateBarrierLayoutToImageUsage(const VkImageMemoryBarrier& img_barrier, bool new_not_old, VkImageUsageFlags usage,
- const char* func_name, const char* barrier_pname);
+ bool ValidateImageAspectLayout(layer_data* device_data, GLOBAL_CB_NODE const* pCB, const VkImageMemoryBarrier* mem_barrier,
+ uint32_t level, uint32_t layer, VkImageAspectFlags aspect);
- bool ValidateBarriersToImages(CMD_BUFFER_STATE const* cb_state, uint32_t imageMemoryBarrierCount,
+ void TransitionImageAspectLayout(layer_data* device_data, GLOBAL_CB_NODE* pCB, const VkImageMemoryBarrier* mem_barrier,
+ uint32_t level, uint32_t layer, VkImageAspectFlags aspect_mask, VkImageAspectFlags aspect);
+
+ bool ValidateBarrierLayoutToImageUsage(layer_data* device_data, const VkImageMemoryBarrier* img_barrier, bool new_not_old,
+ VkImageUsageFlags usage, const char* func_name);
+
+ bool ValidateBarriersToImages(layer_data* device_data, GLOBAL_CB_NODE const* cb_state, uint32_t imageMemoryBarrierCount,
const VkImageMemoryBarrier* pImageMemoryBarriers, const char* func_name);
- void RecordQueuedQFOTransfers(CMD_BUFFER_STATE* pCB);
- void EraseQFOImageRelaseBarriers(const VkImage& image);
+ void RecordQueuedQFOTransfers(layer_data* dev_data, GLOBAL_CB_NODE* pCB);
+ void EraseQFOImageRelaseBarriers(layer_data* device_data, const VkImage& image);
- void TransitionImageLayouts(CMD_BUFFER_STATE* cb_state, uint32_t memBarrierCount, const VkImageMemoryBarrier* pImgMemBarriers);
+ void TransitionImageLayouts(layer_data* device_data, GLOBAL_CB_NODE* cb_state, uint32_t memBarrierCount,
+ const VkImageMemoryBarrier* pImgMemBarriers);
- void TransitionFinalSubpassLayouts(CMD_BUFFER_STATE* pCB, const VkRenderPassBeginInfo* pRenderPassBegin,
+ void TransitionFinalSubpassLayouts(layer_data* dev_data, GLOBAL_CB_NODE* pCB, const VkRenderPassBeginInfo* pRenderPassBegin,
FRAMEBUFFER_STATE* framebuffer_state);
bool PreCallValidateCmdCopyImage(VkCommandBuffer commandBuffer, VkImage srcImage, VkImageLayout srcImageLayout,
@@ -1427,13 +821,15 @@ class CoreChecks : public ValidationStateTracker {
bool PreCallValidateCmdClearAttachments(VkCommandBuffer commandBuffer, uint32_t attachmentCount,
const VkClearAttachment* pAttachments, uint32_t rectCount, const VkClearRect* pRects);
- void PreCallRecordCmdClearAttachments(VkCommandBuffer commandBuffer, uint32_t attachmentCount,
- const VkClearAttachment* pAttachments, uint32_t rectCount, const VkClearRect* pRects);
bool PreCallValidateCmdResolveImage(VkCommandBuffer commandBuffer, VkImage srcImage, VkImageLayout srcImageLayout,
VkImage dstImage, VkImageLayout dstImageLayout, uint32_t regionCount,
const VkImageResolve* pRegions);
+ void PreCallRecordCmdResolveImage(VkCommandBuffer commandBuffer, VkImage srcImage, VkImageLayout srcImageLayout,
+ VkImage dstImage, VkImageLayout dstImageLayout, uint32_t regionCount,
+ const VkImageResolve* pRegions);
+
bool PreCallValidateCmdBlitImage(VkCommandBuffer commandBuffer, VkImage srcImage, VkImageLayout srcImageLayout,
VkImage dstImage, VkImageLayout dstImageLayout, uint32_t regionCount,
const VkImageBlit* pRegions, VkFilter filter);
@@ -1442,83 +838,110 @@ class CoreChecks : public ValidationStateTracker {
VkImageLayout dstImageLayout, uint32_t regionCount, const VkImageBlit* pRegions,
VkFilter filter);
- bool ValidateCmdBufImageLayouts(const CMD_BUFFER_STATE* pCB, const ImageSubresPairLayoutMap& globalImageLayoutMap,
- ImageSubresPairLayoutMap* overlayLayoutMap_arg) const;
+ bool ValidateCmdBufImageLayouts(layer_data* device_data, GLOBAL_CB_NODE* pCB,
+ std::unordered_map<ImageSubresourcePair, IMAGE_LAYOUT_NODE> const& globalImageLayoutMap,
+ std::unordered_map<ImageSubresourcePair, IMAGE_LAYOUT_NODE>& overlayLayoutMap);
- void UpdateCmdBufImageLayouts(CMD_BUFFER_STATE* pCB);
+ void UpdateCmdBufImageLayouts(layer_data* device_data, GLOBAL_CB_NODE* pCB);
- bool VerifyBoundMemoryIsValid(VkDeviceMemory mem, const VulkanTypedHandle& typed_handle, const char* api_name,
- const char* error_code) const;
+ bool ValidateMaskBitsFromLayouts(layer_data* device_data, VkCommandBuffer cmdBuffer, const VkAccessFlags& accessMask,
+ const VkImageLayout& layout, const char* type);
bool ValidateLayoutVsAttachmentDescription(const debug_report_data* report_data, RenderPassCreateVersion rp_version,
const VkImageLayout first_layout, const uint32_t attachment,
- const VkAttachmentDescription2KHR& attachment_description) const;
+ const VkAttachmentDescription2KHR& attachment_description);
- bool ValidateLayouts(RenderPassCreateVersion rp_version, VkDevice device, const VkRenderPassCreateInfo2KHR* pCreateInfo) const;
+ bool ValidateLayouts(layer_data* dev_data, RenderPassCreateVersion rp_version, VkDevice device,
+ const VkRenderPassCreateInfo2KHR* pCreateInfo);
- bool ValidateImageUsageFlags(IMAGE_STATE const* image_state, VkFlags desired, bool strict, const char* msgCode,
- char const* func_name, char const* usage_string) const;
+ bool ValidateMapImageLayouts(layer_data* dev_data, VkDevice device, DEVICE_MEM_INFO const* mem_info, VkDeviceSize offset,
+ VkDeviceSize end_offset);
- bool ValidateImageFormatFeatureFlags(IMAGE_STATE const* image_state, VkFormatFeatureFlags desired, char const* func_name,
- const char* linear_vuid, const char* optimal_vuid) const;
+ bool ValidateImageUsageFlags(layer_data* dev_data, IMAGE_STATE const* image_state, VkFlags desired, bool strict,
+ const char* msgCode, char const* func_name, char const* usage_string);
- bool ValidateImageSubresourceLayers(const CMD_BUFFER_STATE* cb_node, const VkImageSubresourceLayers* subresource_layers,
- char const* func_name, char const* member, uint32_t i) const;
+ bool ValidateImageFormatFeatureFlags(layer_data* dev_data, IMAGE_STATE const* image_state, VkFormatFeatureFlags desired,
+ char const* func_name, const char* linear_vuid, const char* optimal_vuid);
- bool ValidateBufferUsageFlags(BUFFER_STATE const* buffer_state, VkFlags desired, bool strict, const char* msgCode,
- char const* func_name, char const* usage_string) const;
+ bool ValidateImageSubresourceLayers(layer_data* dev_data, const GLOBAL_CB_NODE* cb_node,
+ const VkImageSubresourceLayers* subresource_layers, char const* func_name,
+ char const* member, uint32_t i);
+
+ bool ValidateBufferUsageFlags(const layer_data* dev_data, BUFFER_STATE const* buffer_state, VkFlags desired, bool strict,
+ const char* msgCode, char const* func_name, char const* usage_string);
bool PreCallValidateCreateBuffer(VkDevice device, const VkBufferCreateInfo* pCreateInfo,
const VkAllocationCallbacks* pAllocator, VkBuffer* pBuffer);
+ void PostCallRecordCreateBuffer(VkDevice device, const VkBufferCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator,
+ VkBuffer* pBuffer, VkResult result);
+
bool PreCallValidateCreateBufferView(VkDevice device, const VkBufferViewCreateInfo* pCreateInfo,
const VkAllocationCallbacks* pAllocator, VkBufferView* pView);
- bool ValidateImageAspectMask(VkImage image, VkFormat format, VkImageAspectFlags aspect_mask, const char* func_name,
- const char* vuid = "VUID-VkImageSubresource-aspectMask-parameter") const;
+ void PostCallRecordCreateBufferView(VkDevice device, const VkBufferViewCreateInfo* pCreateInfo,
+ const VkAllocationCallbacks* pAllocator, VkBufferView* pView, VkResult result);
- bool ValidateCreateImageViewSubresourceRange(const IMAGE_STATE* image_state, bool is_imageview_2d_type,
- const VkImageSubresourceRange& subresourceRange);
+ bool ValidateImageAspectMask(const layer_data* device_data, VkImage image, VkFormat format, VkImageAspectFlags aspect_mask,
+ const char* func_name, const char* vuid = "VUID-VkImageSubresource-aspectMask-parameter");
- bool ValidateCmdClearColorSubresourceRange(const IMAGE_STATE* image_state, const VkImageSubresourceRange& subresourceRange,
- const char* param_name) const;
+ bool ValidateCreateImageViewSubresourceRange(const layer_data* device_data, const IMAGE_STATE* image_state,
+ bool is_imageview_2d_type, const VkImageSubresourceRange& subresourceRange);
- bool ValidateCmdClearDepthSubresourceRange(const IMAGE_STATE* image_state, const VkImageSubresourceRange& subresourceRange,
- const char* param_name) const;
+ bool ValidateCmdClearColorSubresourceRange(const layer_data* device_data, const IMAGE_STATE* image_state,
+ const VkImageSubresourceRange& subresourceRange, const char* param_name);
- bool ValidateImageBarrierSubresourceRange(const IMAGE_STATE* image_state, const VkImageSubresourceRange& subresourceRange,
- const char* cmd_name, const char* param_name);
+ bool ValidateCmdClearDepthSubresourceRange(const layer_data* device_data, const IMAGE_STATE* image_state,
+ const VkImageSubresourceRange& subresourceRange, const char* param_name);
+
+ bool ValidateImageBarrierSubresourceRange(const layer_data* device_data, const IMAGE_STATE* image_state,
+ const VkImageSubresourceRange& subresourceRange, const char* cmd_name,
+ const char* param_name);
bool PreCallValidateCreateImageView(VkDevice device, const VkImageViewCreateInfo* pCreateInfo,
const VkAllocationCallbacks* pAllocator, VkImageView* pView);
- bool ValidateCopyBufferImageTransferGranularityRequirements(const CMD_BUFFER_STATE* cb_node, const IMAGE_STATE* img,
- const VkBufferImageCopy* region, const uint32_t i,
- const char* function, const char* vuid) const;
+ void PostCallRecordCreateImageView(VkDevice device, const VkImageViewCreateInfo* pCreateInfo,
+ const VkAllocationCallbacks* pAllocator, VkImageView* pView, VkResult result);
+
+ bool ValidateCopyBufferImageTransferGranularityRequirements(layer_data* device_data, const GLOBAL_CB_NODE* cb_node,
+ const IMAGE_STATE* img, const VkBufferImageCopy* region,
+ const uint32_t i, const char* function, const char* vuid);
- bool ValidateImageMipLevel(const CMD_BUFFER_STATE* cb_node, const IMAGE_STATE* img, uint32_t mip_level, const uint32_t i,
- const char* function, const char* member, const char* vuid) const;
+ bool ValidateImageMipLevel(layer_data* device_data, const GLOBAL_CB_NODE* cb_node, const IMAGE_STATE* img, uint32_t mip_level,
+ const uint32_t i, const char* function, const char* member, const char* vuid);
- bool ValidateImageArrayLayerRange(const CMD_BUFFER_STATE* cb_node, const IMAGE_STATE* img, const uint32_t base_layer,
- const uint32_t layer_count, const uint32_t i, const char* function, const char* member,
- const char* vuid) const;
+ bool ValidateImageArrayLayerRange(layer_data* device_data, const GLOBAL_CB_NODE* cb_node, const IMAGE_STATE* img,
+ const uint32_t base_layer, const uint32_t layer_count, const uint32_t i, const char* function,
+ const char* member, const char* vuid);
void PreCallRecordCmdCopyImage(VkCommandBuffer commandBuffer, VkImage srcImage, VkImageLayout srcImageLayout, VkImage dstImage,
VkImageLayout dstImageLayout, uint32_t regionCount, const VkImageCopy* pRegions);
bool PreCallValidateCmdCopyBuffer(VkCommandBuffer commandBuffer, VkBuffer srcBuffer, VkBuffer dstBuffer, uint32_t regionCount,
const VkBufferCopy* pRegions);
+
+ void PreCallRecordCmdCopyBuffer(VkCommandBuffer commandBuffer, VkBuffer srcBuffer, VkBuffer dstBuffer, uint32_t regionCount,
+ const VkBufferCopy* pRegions);
+
bool PreCallValidateDestroyImageView(VkDevice device, VkImageView imageView, const VkAllocationCallbacks* pAllocator);
+ void PreCallRecordDestroyImageView(VkDevice device, VkImageView imageView, const VkAllocationCallbacks* pAllocator);
+
bool PreCallValidateDestroyBuffer(VkDevice device, VkBuffer buffer, const VkAllocationCallbacks* pAllocator);
void PreCallRecordDestroyBuffer(VkDevice device, VkBuffer buffer, const VkAllocationCallbacks* pAllocator);
bool PreCallValidateDestroyBufferView(VkDevice device, VkBufferView bufferView, const VkAllocationCallbacks* pAllocator);
+ void PreCallRecordDestroyBufferView(VkDevice device, VkBufferView bufferView, const VkAllocationCallbacks* pAllocator);
+
bool PreCallValidateCmdFillBuffer(VkCommandBuffer commandBuffer, VkBuffer dstBuffer, VkDeviceSize dstOffset, VkDeviceSize size,
uint32_t data);
+ void PreCallRecordCmdFillBuffer(VkCommandBuffer commandBuffer, VkBuffer dstBuffer, VkDeviceSize dstOffset, VkDeviceSize size,
+ uint32_t data);
+
bool PreCallValidateCmdCopyImageToBuffer(VkCommandBuffer commandBuffer, VkImage srcImage, VkImageLayout srcImageLayout,
VkBuffer dstBuffer, uint32_t regionCount, const VkBufferImageCopy* pRegions);
@@ -1533,15 +956,21 @@ class CoreChecks : public ValidationStateTracker {
bool PreCallValidateGetImageSubresourceLayout(VkDevice device, VkImage image, const VkImageSubresource* pSubresource,
VkSubresourceLayout* pLayout);
- bool ValidateCreateImageANDROID(const debug_report_data* report_data, const VkImageCreateInfo* create_info);
- bool ValidateCreateImageViewANDROID(const VkImageViewCreateInfo* create_info);
- bool ValidateGetImageSubresourceLayoutANDROID(const VkImage image) const;
- bool ValidateQueueFamilies(uint32_t queue_family_count, const uint32_t* queue_families, const char* cmd_name,
- const char* array_parameter_name, const char* unique_error_code, const char* valid_error_code,
- bool optional) const;
- bool ValidateAllocateMemoryANDROID(const VkMemoryAllocateInfo* alloc_info) const;
- bool ValidateGetImageMemoryRequirements2ANDROID(const VkImage image) const;
- bool ValidateCreateSamplerYcbcrConversionANDROID(const VkSamplerYcbcrConversionCreateInfo* create_info) const;
+ bool ValidateCreateImageANDROID(layer_data* device_data, const debug_report_data* report_data,
+ const VkImageCreateInfo* create_info);
+ void RecordCreateImageANDROID(const VkImageCreateInfo* create_info, IMAGE_STATE* is_node);
+ bool ValidateCreateImageViewANDROID(layer_data* device_data, const VkImageViewCreateInfo* create_info);
+ bool ValidateGetImageSubresourceLayoutANDROID(layer_data* device_data, const VkImage image);
+ bool ValidateQueueFamilies(layer_data* device_data, uint32_t queue_family_count, const uint32_t* queue_families,
+ const char* cmd_name, const char* array_parameter_name, const char* unique_error_code,
+ const char* valid_error_code, bool optional);
+ bool ValidateAllocateMemoryANDROID(layer_data* dev_data, const VkMemoryAllocateInfo* alloc_info);
+ bool ValidateGetImageMemoryRequirements2ANDROID(layer_data* dev_data, const VkImage image);
+ bool ValidateCreateSamplerYcbcrConversionANDROID(const layer_data* dev_data,
+ const VkSamplerYcbcrConversionCreateInfo* create_info);
+ void RecordCreateSamplerYcbcrConversionANDROID(layer_data* dev_data, const VkSamplerYcbcrConversionCreateInfo* create_info,
+ VkSamplerYcbcrConversion ycbcr_conversion);
+ void RecordDestroySamplerYcbcrConversionANDROID(layer_data* dev_data, VkSamplerYcbcrConversion ycbcr_conversion);
bool PreCallValidateCreateGraphicsPipelines(VkDevice device, VkPipelineCache pipelineCache, uint32_t count,
const VkGraphicsPipelineCreateInfo* pCreateInfos,
const VkAllocationCallbacks* pAllocator, VkPipeline* pPipelines, void* cgpl_state);
@@ -1555,25 +984,10 @@ class CoreChecks : public ValidationStateTracker {
bool PreCallValidateCreateComputePipelines(VkDevice device, VkPipelineCache pipelineCache, uint32_t count,
const VkComputePipelineCreateInfo* pCreateInfos,
const VkAllocationCallbacks* pAllocator, VkPipeline* pPipelines, void* pipe_state);
- void PreCallRecordCreateComputePipelines(VkDevice device, VkPipelineCache pipelineCache, uint32_t count,
- const VkComputePipelineCreateInfo* pCreateInfos,
- const VkAllocationCallbacks* pAllocator, VkPipeline* pPipelines,
- void* ccpl_state_data);
void PostCallRecordCreateComputePipelines(VkDevice device, VkPipelineCache pipelineCache, uint32_t count,
const VkComputePipelineCreateInfo* pCreateInfos,
const VkAllocationCallbacks* pAllocator, VkPipeline* pPipelines, VkResult result,
void* pipe_state);
- bool PreCallValidateGetPipelineExecutablePropertiesKHR(VkDevice device, const VkPipelineInfoKHR* pPipelineInfo,
- uint32_t* pExecutableCount,
- VkPipelineExecutablePropertiesKHR* pProperties);
- bool ValidatePipelineExecutableInfo(VkDevice device, const VkPipelineExecutableInfoKHR* pExecutableInfo) const;
- bool PreCallValidateGetPipelineExecutableStatisticsKHR(VkDevice device, const VkPipelineExecutableInfoKHR* pExecutableInfo,
- uint32_t* pStatisticCount,
- VkPipelineExecutableStatisticKHR* pStatistics);
- bool PreCallValidateGetPipelineExecutableInternalRepresentationsKHR(VkDevice device,
- const VkPipelineExecutableInfoKHR* pExecutableInfo,
- uint32_t* pInternalRepresentationCount,
- VkPipelineExecutableInternalRepresentationKHR* pStatistics);
bool PreCallValidateCreatePipelineLayout(VkDevice device, const VkPipelineLayoutCreateInfo* pCreateInfo,
const VkAllocationCallbacks* pAllocator, VkPipelineLayout* pPipelineLayout);
void PreCallRecordCreatePipelineLayout(VkDevice device, const VkPipelineLayoutCreateInfo* pCreateInfo,
@@ -1584,163 +998,229 @@ class CoreChecks : public ValidationStateTracker {
VkResult result);
bool PreCallValidateAllocateDescriptorSets(VkDevice device, const VkDescriptorSetAllocateInfo* pAllocateInfo,
VkDescriptorSet* pDescriptorSets, void* ads_state);
+ void PostCallRecordAllocateDescriptorSets(VkDevice device, const VkDescriptorSetAllocateInfo* pAllocateInfo,
+ VkDescriptorSet* pDescriptorSets, VkResult result, void* ads_state);
bool PreCallValidateCreateRayTracingPipelinesNV(VkDevice device, VkPipelineCache pipelineCache, uint32_t count,
const VkRayTracingPipelineCreateInfoNV* pCreateInfos,
const VkAllocationCallbacks* pAllocator, VkPipeline* pPipelines,
void* pipe_state);
- void PreCallRecordCreateRayTracingPipelinesNV(VkDevice device, VkPipelineCache pipelineCache, uint32_t count,
- const VkRayTracingPipelineCreateInfoNV* pCreateInfos,
- const VkAllocationCallbacks* pAllocator, VkPipeline* pPipelines,
- void* crtpl_state_data);
void PostCallRecordCreateRayTracingPipelinesNV(VkDevice device, VkPipelineCache pipelineCache, uint32_t count,
const VkRayTracingPipelineCreateInfoNV* pCreateInfos,
const VkAllocationCallbacks* pAllocator, VkPipeline* pPipelines, VkResult result,
- void* crtpl_state_data);
- void PreCallRecordCmdTraceRaysNV(VkCommandBuffer commandBuffer, VkBuffer raygenShaderBindingTableBuffer,
- VkDeviceSize raygenShaderBindingOffset, VkBuffer missShaderBindingTableBuffer,
- VkDeviceSize missShaderBindingOffset, VkDeviceSize missShaderBindingStride,
- VkBuffer hitShaderBindingTableBuffer, VkDeviceSize hitShaderBindingOffset,
- VkDeviceSize hitShaderBindingStride, VkBuffer callableShaderBindingTableBuffer,
- VkDeviceSize callableShaderBindingOffset, VkDeviceSize callableShaderBindingStride,
- uint32_t width, uint32_t height, uint32_t depth);
- void PostCallRecordCmdTraceRaysNV(VkCommandBuffer commandBuffer, VkBuffer raygenShaderBindingTableBuffer,
- VkDeviceSize raygenShaderBindingOffset, VkBuffer missShaderBindingTableBuffer,
- VkDeviceSize missShaderBindingOffset, VkDeviceSize missShaderBindingStride,
- VkBuffer hitShaderBindingTableBuffer, VkDeviceSize hitShaderBindingOffset,
- VkDeviceSize hitShaderBindingStride, VkBuffer callableShaderBindingTableBuffer,
- VkDeviceSize callableShaderBindingOffset, VkDeviceSize callableShaderBindingStride,
- uint32_t width, uint32_t height, uint32_t depth);
+ void* pipe_state);
void PostCallRecordCreateInstance(const VkInstanceCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator,
VkInstance* pInstance, VkResult result);
bool PreCallValidateCreateDevice(VkPhysicalDevice gpu, const VkDeviceCreateInfo* pCreateInfo,
const VkAllocationCallbacks* pAllocator, VkDevice* pDevice);
void PreCallRecordCreateDevice(VkPhysicalDevice gpu, const VkDeviceCreateInfo* pCreateInfo,
const VkAllocationCallbacks* pAllocator, VkDevice* pDevice,
- safe_VkDeviceCreateInfo* modified_create_info);
+ std::unique_ptr<safe_VkDeviceCreateInfo>& modified_create_info);
void PostCallRecordCreateDevice(VkPhysicalDevice gpu, const VkDeviceCreateInfo* pCreateInfo,
const VkAllocationCallbacks* pAllocator, VkDevice* pDevice, VkResult result);
bool PreCallValidateCmdUpdateBuffer(VkCommandBuffer commandBuffer, VkBuffer dstBuffer, VkDeviceSize dstOffset,
VkDeviceSize dataSize, const void* pData);
+ void PostCallRecordCmdUpdateBuffer(VkCommandBuffer commandBuffer, VkBuffer dstBuffer, VkDeviceSize dstOffset,
+ VkDeviceSize dataSize, const void* pData);
+ void PostCallRecordCreateFence(VkDevice device, const VkFenceCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator,
+ VkFence* pFence, VkResult result);
bool PreCallValidateGetDeviceQueue(VkDevice device, uint32_t queueFamilyIndex, uint32_t queueIndex, VkQueue* pQueue);
+ void PostCallRecordGetDeviceQueue(VkDevice device, uint32_t queueFamilyIndex, uint32_t queueIndex, VkQueue* pQueue);
+ void PostCallRecordGetDeviceQueue2(VkDevice device, const VkDeviceQueueInfo2* pQueueInfo, VkQueue* pQueue);
bool PreCallValidateCreateSamplerYcbcrConversion(VkDevice device, const VkSamplerYcbcrConversionCreateInfo* pCreateInfo,
const VkAllocationCallbacks* pAllocator,
VkSamplerYcbcrConversion* pYcbcrConversion);
+ void PostCallRecordCreateSamplerYcbcrConversion(VkDevice device, const VkSamplerYcbcrConversionCreateInfo* pCreateInfo,
+ const VkAllocationCallbacks* pAllocator,
+ VkSamplerYcbcrConversion* pYcbcrConversion, VkResult result);
bool PreCallValidateCreateSamplerYcbcrConversionKHR(VkDevice device, const VkSamplerYcbcrConversionCreateInfo* pCreateInfo,
const VkAllocationCallbacks* pAllocator,
VkSamplerYcbcrConversion* pYcbcrConversion);
+ void PostCallRecordCreateSamplerYcbcrConversionKHR(VkDevice device, const VkSamplerYcbcrConversionCreateInfo* pCreateInfo,
+ const VkAllocationCallbacks* pAllocator,
+ VkSamplerYcbcrConversion* pYcbcrConversion, VkResult result);
bool PreCallValidateCmdDebugMarkerBeginEXT(VkCommandBuffer commandBuffer, const VkDebugMarkerMarkerInfoEXT* pMarkerInfo);
void PreCallRecordDestroyDevice(VkDevice device, const VkAllocationCallbacks* pAllocator);
bool PreCallValidateQueueSubmit(VkQueue queue, uint32_t submitCount, const VkSubmitInfo* pSubmits, VkFence fence);
- void PreCallRecordQueueSubmit(VkQueue queue, uint32_t submitCount, const VkSubmitInfo* pSubmits, VkFence fence);
void PostCallRecordQueueSubmit(VkQueue queue, uint32_t submitCount, const VkSubmitInfo* pSubmits, VkFence fence,
VkResult result);
bool PreCallValidateAllocateMemory(VkDevice device, const VkMemoryAllocateInfo* pAllocateInfo,
const VkAllocationCallbacks* pAllocator, VkDeviceMemory* pMemory);
+ void PostCallRecordAllocateMemory(VkDevice device, const VkMemoryAllocateInfo* pAllocateInfo,
+ const VkAllocationCallbacks* pAllocator, VkDeviceMemory* pMemory, VkResult result);
bool PreCallValidateFreeMemory(VkDevice device, VkDeviceMemory mem, const VkAllocationCallbacks* pAllocator);
+ void PreCallRecordFreeMemory(VkDevice device, VkDeviceMemory mem, const VkAllocationCallbacks* pAllocator);
bool PreCallValidateWaitForFences(VkDevice device, uint32_t fenceCount, const VkFence* pFences, VkBool32 waitAll,
uint64_t timeout);
void PostCallRecordWaitForFences(VkDevice device, uint32_t fenceCount, const VkFence* pFences, VkBool32 waitAll,
uint64_t timeout, VkResult result);
+ bool PreCallValidateGetFenceStatus(VkDevice device, VkFence fence);
void PostCallRecordGetFenceStatus(VkDevice device, VkFence fence, VkResult result);
bool PreCallValidateQueueWaitIdle(VkQueue queue);
void PostCallRecordQueueWaitIdle(VkQueue queue, VkResult result);
bool PreCallValidateDeviceWaitIdle(VkDevice device);
void PostCallRecordDeviceWaitIdle(VkDevice device, VkResult result);
bool PreCallValidateDestroyFence(VkDevice device, VkFence fence, const VkAllocationCallbacks* pAllocator);
+ void PreCallRecordDestroyFence(VkDevice device, VkFence fence, const VkAllocationCallbacks* pAllocator);
bool PreCallValidateDestroySemaphore(VkDevice device, VkSemaphore semaphore, const VkAllocationCallbacks* pAllocator);
+ void PreCallRecordDestroySemaphore(VkDevice device, VkSemaphore semaphore, const VkAllocationCallbacks* pAllocator);
bool PreCallValidateDestroyEvent(VkDevice device, VkEvent event, const VkAllocationCallbacks* pAllocator);
+ void PreCallRecordDestroyEvent(VkDevice device, VkEvent event, const VkAllocationCallbacks* pAllocator);
bool PreCallValidateDestroyQueryPool(VkDevice device, VkQueryPool queryPool, const VkAllocationCallbacks* pAllocator);
- bool ValidateGetQueryPoolResultsFlags(VkQueryPool queryPool, VkQueryResultFlags flags) const;
- bool ValidateGetQueryPoolResultsQueries(VkQueryPool queryPool, uint32_t firstQuery, uint32_t queryCount) const;
+ void PreCallRecordDestroyQueryPool(VkDevice device, VkQueryPool queryPool, const VkAllocationCallbacks* pAllocator);
bool PreCallValidateGetQueryPoolResults(VkDevice device, VkQueryPool queryPool, uint32_t firstQuery, uint32_t queryCount,
size_t dataSize, void* pData, VkDeviceSize stride, VkQueryResultFlags flags);
+ void PostCallRecordGetQueryPoolResults(VkDevice device, VkQueryPool queryPool, uint32_t firstQuery, uint32_t queryCount,
+ size_t dataSize, void* pData, VkDeviceSize stride, VkQueryResultFlags flags,
+ VkResult result);
bool PreCallValidateBindBufferMemory2KHR(VkDevice device, uint32_t bindInfoCount, const VkBindBufferMemoryInfoKHR* pBindInfos);
+ void PostCallRecordBindBufferMemory2KHR(VkDevice device, uint32_t bindInfoCount, const VkBindBufferMemoryInfoKHR* pBindInfos,
+ VkResult result);
bool PreCallValidateBindBufferMemory2(VkDevice device, uint32_t bindInfoCount, const VkBindBufferMemoryInfoKHR* pBindInfos);
+ void PostCallRecordBindBufferMemory2(VkDevice device, uint32_t bindInfoCount, const VkBindBufferMemoryInfoKHR* pBindInfos,
+ VkResult result);
bool PreCallValidateBindBufferMemory(VkDevice device, VkBuffer buffer, VkDeviceMemory mem, VkDeviceSize memoryOffset);
+ void PostCallRecordBindBufferMemory(VkDevice device, VkBuffer buffer, VkDeviceMemory mem, VkDeviceSize memoryOffset,
+ VkResult result);
+ void PostCallRecordGetBufferMemoryRequirements(VkDevice device, VkBuffer buffer, VkMemoryRequirements* pMemoryRequirements);
+ void PostCallRecordGetBufferMemoryRequirements2(VkDevice device, const VkBufferMemoryRequirementsInfo2KHR* pInfo,
+ VkMemoryRequirements2KHR* pMemoryRequirements);
+ void PostCallRecordGetBufferMemoryRequirements2KHR(VkDevice device, const VkBufferMemoryRequirementsInfo2KHR* pInfo,
+ VkMemoryRequirements2KHR* pMemoryRequirements);
bool PreCallValidateGetImageMemoryRequirements2(VkDevice device, const VkImageMemoryRequirementsInfo2* pInfo,
VkMemoryRequirements2* pMemoryRequirements);
bool PreCallValidateGetImageMemoryRequirements2KHR(VkDevice device, const VkImageMemoryRequirementsInfo2* pInfo,
VkMemoryRequirements2* pMemoryRequirements);
+ void PostCallRecordGetImageMemoryRequirements(VkDevice device, VkImage image, VkMemoryRequirements* pMemoryRequirements);
+ void PostCallRecordGetImageMemoryRequirements2(VkDevice device, const VkImageMemoryRequirementsInfo2* pInfo,
+ VkMemoryRequirements2* pMemoryRequirements);
+ void PostCallRecordGetImageMemoryRequirements2KHR(VkDevice device, const VkImageMemoryRequirementsInfo2* pInfo,
+ VkMemoryRequirements2* pMemoryRequirements);
+ void PostCallRecordGetImageSparseMemoryRequirements(VkDevice device, VkImage image, uint32_t* pSparseMemoryRequirementCount,
+ VkSparseImageMemoryRequirements* pSparseMemoryRequirements);
+ void PostCallRecordGetImageSparseMemoryRequirements2(VkDevice device, const VkImageSparseMemoryRequirementsInfo2KHR* pInfo,
+ uint32_t* pSparseMemoryRequirementCount,
+ VkSparseImageMemoryRequirements2KHR* pSparseMemoryRequirements);
+ void PostCallRecordGetImageSparseMemoryRequirements2KHR(VkDevice device, const VkImageSparseMemoryRequirementsInfo2KHR* pInfo,
+ uint32_t* pSparseMemoryRequirementCount,
+ VkSparseImageMemoryRequirements2KHR* pSparseMemoryRequirements);
bool PreCallValidateGetPhysicalDeviceImageFormatProperties2(VkPhysicalDevice physicalDevice,
const VkPhysicalDeviceImageFormatInfo2* pImageFormatInfo,
VkImageFormatProperties2* pImageFormatProperties);
bool PreCallValidateGetPhysicalDeviceImageFormatProperties2KHR(VkPhysicalDevice physicalDevice,
const VkPhysicalDeviceImageFormatInfo2* pImageFormatInfo,
VkImageFormatProperties2* pImageFormatProperties);
+ void PreCallRecordDestroyShaderModule(VkDevice device, VkShaderModule shaderModule, const VkAllocationCallbacks* pAllocator);
bool PreCallValidateDestroyPipeline(VkDevice device, VkPipeline pipeline, const VkAllocationCallbacks* pAllocator);
void PreCallRecordDestroyPipeline(VkDevice device, VkPipeline pipeline, const VkAllocationCallbacks* pAllocator);
+ void PreCallRecordDestroyPipelineLayout(VkDevice device, VkPipelineLayout pipelineLayout,
+ const VkAllocationCallbacks* pAllocator);
bool PreCallValidateDestroySampler(VkDevice device, VkSampler sampler, const VkAllocationCallbacks* pAllocator);
+ void PreCallRecordDestroySampler(VkDevice device, VkSampler sampler, const VkAllocationCallbacks* pAllocator);
+ void PreCallRecordDestroyDescriptorSetLayout(VkDevice device, VkDescriptorSetLayout descriptorSetLayout,
+ const VkAllocationCallbacks* pAllocator);
bool PreCallValidateDestroyDescriptorPool(VkDevice device, VkDescriptorPool descriptorPool,
const VkAllocationCallbacks* pAllocator);
+ void PreCallRecordDestroyDescriptorPool(VkDevice device, VkDescriptorPool descriptorPool,
+ const VkAllocationCallbacks* pAllocator);
bool PreCallValidateFreeCommandBuffers(VkDevice device, VkCommandPool commandPool, uint32_t commandBufferCount,
const VkCommandBuffer* pCommandBuffers);
+ void PreCallRecordFreeCommandBuffers(VkDevice device, VkCommandPool commandPool, uint32_t commandBufferCount,
+ const VkCommandBuffer* pCommandBuffers);
bool PreCallValidateCreateCommandPool(VkDevice device, const VkCommandPoolCreateInfo* pCreateInfo,
const VkAllocationCallbacks* pAllocator, VkCommandPool* pCommandPool);
+ void PostCallRecordCreateCommandPool(VkDevice device, const VkCommandPoolCreateInfo* pCreateInfo,
+ const VkAllocationCallbacks* pAllocator, VkCommandPool* pCommandPool, VkResult result);
bool PreCallValidateCreateQueryPool(VkDevice device, const VkQueryPoolCreateInfo* pCreateInfo,
const VkAllocationCallbacks* pAllocator, VkQueryPool* pQueryPool);
+ void PostCallRecordCreateQueryPool(VkDevice device, const VkQueryPoolCreateInfo* pCreateInfo,
+ const VkAllocationCallbacks* pAllocator, VkQueryPool* pQueryPool, VkResult result);
bool PreCallValidateDestroyCommandPool(VkDevice device, VkCommandPool commandPool, const VkAllocationCallbacks* pAllocator);
+ void PreCallRecordDestroyCommandPool(VkDevice device, VkCommandPool commandPool, const VkAllocationCallbacks* pAllocator);
bool PreCallValidateResetCommandPool(VkDevice device, VkCommandPool commandPool, VkCommandPoolResetFlags flags);
+ void PostCallRecordResetCommandPool(VkDevice device, VkCommandPool commandPool, VkCommandPoolResetFlags flags, VkResult result);
bool PreCallValidateResetFences(VkDevice device, uint32_t fenceCount, const VkFence* pFences);
void PostCallRecordResetFences(VkDevice device, uint32_t fenceCount, const VkFence* pFences, VkResult result);
bool PreCallValidateDestroyFramebuffer(VkDevice device, VkFramebuffer framebuffer, const VkAllocationCallbacks* pAllocator);
+ void PreCallRecordDestroyFramebuffer(VkDevice device, VkFramebuffer framebuffer, const VkAllocationCallbacks* pAllocator);
bool PreCallValidateDestroyRenderPass(VkDevice device, VkRenderPass renderPass, const VkAllocationCallbacks* pAllocator);
+ void PreCallRecordDestroyRenderPass(VkDevice device, VkRenderPass renderPass, const VkAllocationCallbacks* pAllocator);
+ void PostCallRecordCreateSampler(VkDevice device, const VkSamplerCreateInfo* pCreateInfo,
+ const VkAllocationCallbacks* pAllocator, VkSampler* pSampler, VkResult result);
bool PreCallValidateCreateDescriptorSetLayout(VkDevice device, const VkDescriptorSetLayoutCreateInfo* pCreateInfo,
const VkAllocationCallbacks* pAllocator, VkDescriptorSetLayout* pSetLayout);
+ void PostCallRecordCreateDescriptorSetLayout(VkDevice device, const VkDescriptorSetLayoutCreateInfo* pCreateInfo,
+ const VkAllocationCallbacks* pAllocator, VkDescriptorSetLayout* pSetLayout,
+ VkResult result);
+ void PostCallRecordCreateDescriptorPool(VkDevice device, const VkDescriptorPoolCreateInfo* pCreateInfo,
+ const VkAllocationCallbacks* pAllocator, VkDescriptorPool* pDescriptorPool,
+ VkResult result);
bool PreCallValidateResetDescriptorPool(VkDevice device, VkDescriptorPool descriptorPool, VkDescriptorPoolResetFlags flags);
+ void PostCallRecordResetDescriptorPool(VkDevice device, VkDescriptorPool descriptorPool, VkDescriptorPoolResetFlags flags,
+ VkResult result);
bool PreCallValidateFreeDescriptorSets(VkDevice device, VkDescriptorPool descriptorPool, uint32_t count,
const VkDescriptorSet* pDescriptorSets);
+ void PreCallRecordFreeDescriptorSets(VkDevice device, VkDescriptorPool descriptorPool, uint32_t count,
+ const VkDescriptorSet* pDescriptorSets);
bool PreCallValidateUpdateDescriptorSets(VkDevice device, uint32_t descriptorWriteCount,
const VkWriteDescriptorSet* pDescriptorWrites, uint32_t descriptorCopyCount,
const VkCopyDescriptorSet* pDescriptorCopies);
+ void PreCallRecordUpdateDescriptorSets(VkDevice device, uint32_t descriptorWriteCount,
+ const VkWriteDescriptorSet* pDescriptorWrites, uint32_t descriptorCopyCount,
+ const VkCopyDescriptorSet* pDescriptorCopies);
+ void PostCallRecordAllocateCommandBuffers(VkDevice device, const VkCommandBufferAllocateInfo* pCreateInfo,
+ VkCommandBuffer* pCommandBuffer, VkResult result);
bool PreCallValidateBeginCommandBuffer(VkCommandBuffer commandBuffer, const VkCommandBufferBeginInfo* pBeginInfo);
+ void PreCallRecordBeginCommandBuffer(VkCommandBuffer commandBuffer, const VkCommandBufferBeginInfo* pBeginInfo);
bool PreCallValidateEndCommandBuffer(VkCommandBuffer commandBuffer);
+ void PostCallRecordEndCommandBuffer(VkCommandBuffer commandBuffer, VkResult result);
bool PreCallValidateResetCommandBuffer(VkCommandBuffer commandBuffer, VkCommandBufferResetFlags flags);
+ void PostCallRecordResetCommandBuffer(VkCommandBuffer commandBuffer, VkCommandBufferResetFlags flags, VkResult result);
bool PreCallValidateCmdBindPipeline(VkCommandBuffer commandBuffer, VkPipelineBindPoint pipelineBindPoint, VkPipeline pipeline);
+ void PreCallRecordCmdBindPipeline(VkCommandBuffer commandBuffer, VkPipelineBindPoint pipelineBindPoint, VkPipeline pipeline);
bool PreCallValidateCmdSetViewport(VkCommandBuffer commandBuffer, uint32_t firstViewport, uint32_t viewportCount,
const VkViewport* pViewports);
+ void PreCallRecordCmdSetViewport(VkCommandBuffer commandBuffer, uint32_t firstViewport, uint32_t viewportCount,
+ const VkViewport* pViewports);
bool PreCallValidateCmdSetScissor(VkCommandBuffer commandBuffer, uint32_t firstScissor, uint32_t scissorCount,
const VkRect2D* pScissors);
+ void PreCallRecordCmdSetScissor(VkCommandBuffer commandBuffer, uint32_t firstScissor, uint32_t scissorCount,
+ const VkRect2D* pScissors);
bool PreCallValidateCmdSetExclusiveScissorNV(VkCommandBuffer commandBuffer, uint32_t firstExclusiveScissor,
uint32_t exclusiveScissorCount, const VkRect2D* pExclusiveScissors);
+ void PreCallRecordCmdSetExclusiveScissorNV(VkCommandBuffer commandBuffer, uint32_t firstExclusiveScissor,
+ uint32_t exclusiveScissorCount, const VkRect2D* pExclusiveScissors);
bool PreCallValidateCmdBindShadingRateImageNV(VkCommandBuffer commandBuffer, VkImageView imageView, VkImageLayout imageLayout);
+ void PreCallRecordCmdBindShadingRateImageNV(VkCommandBuffer commandBuffer, VkImageView imageView, VkImageLayout imageLayout);
bool PreCallValidateCmdSetViewportShadingRatePaletteNV(VkCommandBuffer commandBuffer, uint32_t firstViewport,
uint32_t viewportCount,
const VkShadingRatePaletteNV* pShadingRatePalettes);
- bool ValidateGeometryTrianglesNV(const VkGeometryTrianglesNV& triangles, VkDebugReportObjectTypeEXT object_type,
- uint64_t object_handle, const char* func_name) const;
- bool ValidateGeometryAABBNV(const VkGeometryAABBNV& geometry, VkDebugReportObjectTypeEXT object_type, uint64_t object_handle,
- const char* func_name) const;
- bool ValidateGeometryNV(const VkGeometryNV& geometry, VkDebugReportObjectTypeEXT object_type, uint64_t object_handle,
- const char* func_name) const;
- bool PreCallValidateCreateAccelerationStructureNV(VkDevice device, const VkAccelerationStructureCreateInfoNV* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkAccelerationStructureNV* pAccelerationStructure);
- bool PreCallValidateBindAccelerationStructureMemoryNV(VkDevice device, uint32_t bindInfoCount,
- const VkBindAccelerationStructureMemoryInfoNV* pBindInfos);
- bool PreCallValidateGetAccelerationStructureHandleNV(VkDevice device, VkAccelerationStructureNV accelerationStructure,
- size_t dataSize, void* pData);
- bool PreCallValidateCmdBuildAccelerationStructureNV(VkCommandBuffer commandBuffer, const VkAccelerationStructureInfoNV* pInfo,
- VkBuffer instanceData, VkDeviceSize instanceOffset, VkBool32 update,
- VkAccelerationStructureNV dst, VkAccelerationStructureNV src,
- VkBuffer scratch, VkDeviceSize scratchOffset);
- bool PreCallValidateCmdCopyAccelerationStructureNV(VkCommandBuffer commandBuffer, VkAccelerationStructureNV dst,
- VkAccelerationStructureNV src, VkCopyAccelerationStructureModeNV mode);
- bool PreCallValidateDestroyAccelerationStructureNV(VkDevice device, VkAccelerationStructureNV accelerationStructure,
- const VkAllocationCallbacks* pAllocator);
+ void PreCallRecordCmdSetViewportShadingRatePaletteNV(VkCommandBuffer commandBuffer, uint32_t firstViewport,
+ uint32_t viewportCount,
+ const VkShadingRatePaletteNV* pShadingRatePalettes);
bool PreCallValidateCmdSetLineWidth(VkCommandBuffer commandBuffer, float lineWidth);
- bool PreCallValidateCmdSetLineStippleEXT(VkCommandBuffer commandBuffer, uint32_t lineStippleFactor,
- uint16_t lineStipplePattern);
+ void PreCallRecordCmdSetLineWidth(VkCommandBuffer commandBuffer, float lineWidth);
bool PreCallValidateCmdSetDepthBias(VkCommandBuffer commandBuffer, float depthBiasConstantFactor, float depthBiasClamp,
float depthBiasSlopeFactor);
+ void PreCallRecordCmdSetDepthBias(VkCommandBuffer commandBuffer, float depthBiasConstantFactor, float depthBiasClamp,
+ float depthBiasSlopeFactor);
bool PreCallValidateCmdSetBlendConstants(VkCommandBuffer commandBuffer, const float blendConstants[4]);
+ void PreCallRecordCmdSetBlendConstants(VkCommandBuffer commandBuffer, const float blendConstants[4]);
bool PreCallValidateCmdSetDepthBounds(VkCommandBuffer commandBuffer, float minDepthBounds, float maxDepthBounds);
+ void PreCallRecordCmdSetDepthBounds(VkCommandBuffer commandBuffer, float minDepthBounds, float maxDepthBounds);
bool PreCallValidateCmdSetStencilCompareMask(VkCommandBuffer commandBuffer, VkStencilFaceFlags faceMask, uint32_t compareMask);
+ void PreCallRecordCmdSetStencilCompareMask(VkCommandBuffer commandBuffer, VkStencilFaceFlags faceMask, uint32_t compareMask);
bool PreCallValidateCmdSetStencilWriteMask(VkCommandBuffer commandBuffer, VkStencilFaceFlags faceMask, uint32_t writeMask);
+ void PreCallRecordCmdSetStencilWriteMask(VkCommandBuffer commandBuffer, VkStencilFaceFlags faceMask, uint32_t writeMask);
bool PreCallValidateCmdSetStencilReference(VkCommandBuffer commandBuffer, VkStencilFaceFlags faceMask, uint32_t reference);
+ void PreCallRecordCmdSetStencilReference(VkCommandBuffer commandBuffer, VkStencilFaceFlags faceMask, uint32_t reference);
bool PreCallValidateCmdBindDescriptorSets(VkCommandBuffer commandBuffer, VkPipelineBindPoint pipelineBindPoint,
VkPipelineLayout layout, uint32_t firstSet, uint32_t setCount,
const VkDescriptorSet* pDescriptorSets, uint32_t dynamicOffsetCount,
const uint32_t* pDynamicOffsets);
+ void PreCallRecordCmdBindDescriptorSets(VkCommandBuffer commandBuffer, VkPipelineBindPoint pipelineBindPoint,
+ VkPipelineLayout layout, uint32_t firstSet, uint32_t setCount,
+ const VkDescriptorSet* pDescriptorSets, uint32_t dynamicOffsetCount,
+ const uint32_t* pDynamicOffsets);
bool PreCallValidateCmdPushDescriptorSetKHR(VkCommandBuffer commandBuffer, VkPipelineBindPoint pipelineBindPoint,
VkPipelineLayout layout, uint32_t set, uint32_t descriptorWriteCount,
const VkWriteDescriptorSet* pDescriptorWrites);
@@ -1749,31 +1229,45 @@ class CoreChecks : public ValidationStateTracker {
const VkWriteDescriptorSet* pDescriptorWrites);
bool PreCallValidateCmdBindIndexBuffer(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset,
VkIndexType indexType);
+ void PreCallRecordCmdBindIndexBuffer(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset,
+ VkIndexType indexType);
bool PreCallValidateCmdBindVertexBuffers(VkCommandBuffer commandBuffer, uint32_t firstBinding, uint32_t bindingCount,
const VkBuffer* pBuffers, const VkDeviceSize* pOffsets);
+ void PreCallRecordCmdBindVertexBuffers(VkCommandBuffer commandBuffer, uint32_t firstBinding, uint32_t bindingCount,
+ const VkBuffer* pBuffers, const VkDeviceSize* pOffsets);
bool PreCallValidateCmdDraw(VkCommandBuffer commandBuffer, uint32_t vertexCount, uint32_t instanceCount, uint32_t firstVertex,
uint32_t firstInstance);
void PreCallRecordCmdDraw(VkCommandBuffer commandBuffer, uint32_t vertexCount, uint32_t instanceCount, uint32_t firstVertex,
uint32_t firstInstance);
+ void PostCallRecordCmdDraw(VkCommandBuffer commandBuffer, uint32_t vertexCount, uint32_t instanceCount, uint32_t firstVertex,
+ uint32_t firstInstance);
bool PreCallValidateCmdDrawIndexed(VkCommandBuffer commandBuffer, uint32_t indexCount, uint32_t instanceCount,
uint32_t firstIndex, int32_t vertexOffset, uint32_t firstInstance);
void PreCallRecordCmdDrawIndexed(VkCommandBuffer commandBuffer, uint32_t indexCount, uint32_t instanceCount,
uint32_t firstIndex, int32_t vertexOffset, uint32_t firstInstance);
+ void PostCallRecordCmdDrawIndexed(VkCommandBuffer commandBuffer, uint32_t indexCount, uint32_t instanceCount,
+ uint32_t firstIndex, int32_t vertexOffset, uint32_t firstInstance);
bool PreCallValidateCmdDrawIndexedIndirect(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, uint32_t count,
uint32_t stride);
void PreCallRecordCmdDrawIndexedIndirect(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, uint32_t count,
uint32_t stride);
+ void PostCallRecordCmdDrawIndexedIndirect(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, uint32_t count,
+ uint32_t stride);
bool PreCallValidateCmdDrawIndexedIndirectCountKHR(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset,
VkBuffer countBuffer, VkDeviceSize countBufferOffset, uint32_t maxDrawCount,
uint32_t stride);
bool PreCallValidateCmdDispatch(VkCommandBuffer commandBuffer, uint32_t x, uint32_t y, uint32_t z);
void PreCallRecordCmdDispatch(VkCommandBuffer commandBuffer, uint32_t x, uint32_t y, uint32_t z);
+ void PostCallRecordCmdDispatch(VkCommandBuffer commandBuffer, uint32_t x, uint32_t y, uint32_t z);
bool PreCallValidateCmdDispatchIndirect(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset);
void PreCallRecordCmdDispatchIndirect(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset);
+ void PostCallRecordCmdDispatchIndirect(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset);
bool PreCallValidateCmdDrawIndirect(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, uint32_t count,
uint32_t stride);
void PreCallRecordCmdDrawIndirect(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, uint32_t count,
uint32_t stride);
+ void PostCallRecordCmdDrawIndirect(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, uint32_t count,
+ uint32_t stride);
bool PreCallValidateCmdSetEvent(VkCommandBuffer commandBuffer, VkEvent event, VkPipelineStageFlags stageMask);
void PreCallRecordCmdSetEvent(VkCommandBuffer commandBuffer, VkEvent event, VkPipelineStageFlags stageMask);
bool PreCallValidateCmdResetEvent(VkCommandBuffer commandBuffer, VkEvent event, VkPipelineStageFlags stageMask);
@@ -1803,19 +1297,20 @@ class CoreChecks : public ValidationStateTracker {
uint32_t memoryBarrierCount, const VkMemoryBarrier* pMemoryBarriers,
uint32_t bufferMemoryBarrierCount, const VkBufferMemoryBarrier* pBufferMemoryBarriers,
uint32_t imageMemoryBarrierCount, const VkImageMemoryBarrier* pImageMemoryBarriers);
-
- void EnqueueVerifyBeginQuery(VkCommandBuffer, const QueryObject& query_obj);
bool PreCallValidateCmdBeginQuery(VkCommandBuffer commandBuffer, VkQueryPool queryPool, uint32_t slot, VkFlags flags);
- void PreCallRecordCmdBeginQuery(VkCommandBuffer commandBuffer, VkQueryPool queryPool, uint32_t slot, VkFlags flags);
+ void PostCallRecordCmdBeginQuery(VkCommandBuffer commandBuffer, VkQueryPool queryPool, uint32_t slot, VkFlags flags);
bool PreCallValidateCmdEndQuery(VkCommandBuffer commandBuffer, VkQueryPool queryPool, uint32_t slot);
+ void PostCallRecordCmdEndQuery(VkCommandBuffer commandBuffer, VkQueryPool queryPool, uint32_t slot);
bool PreCallValidateCmdResetQueryPool(VkCommandBuffer commandBuffer, VkQueryPool queryPool, uint32_t firstQuery,
uint32_t queryCount);
+ void PostCallRecordCmdResetQueryPool(VkCommandBuffer commandBuffer, VkQueryPool queryPool, uint32_t firstQuery,
+ uint32_t queryCount);
bool PreCallValidateCmdCopyQueryPoolResults(VkCommandBuffer commandBuffer, VkQueryPool queryPool, uint32_t firstQuery,
uint32_t queryCount, VkBuffer dstBuffer, VkDeviceSize dstOffset,
VkDeviceSize stride, VkQueryResultFlags flags);
- void PreCallRecordCmdCopyQueryPoolResults(VkCommandBuffer commandBuffer, VkQueryPool queryPool, uint32_t firstQuery,
- uint32_t queryCount, VkBuffer dstBuffer, VkDeviceSize dstOffset, VkDeviceSize stride,
- VkQueryResultFlags flags);
+ void PostCallRecordCmdCopyQueryPoolResults(VkCommandBuffer commandBuffer, VkQueryPool queryPool, uint32_t firstQuery,
+ uint32_t queryCount, VkBuffer dstBuffer, VkDeviceSize dstOffset, VkDeviceSize stride,
+ VkQueryResultFlags flags);
bool PreCallValidateCmdPushConstants(VkCommandBuffer commandBuffer, VkPipelineLayout layout, VkShaderStageFlags stageFlags,
uint32_t offset, uint32_t size, const void* pValues);
bool PreCallValidateCmdWriteTimestamp(VkCommandBuffer commandBuffer, VkPipelineStageFlagBits pipelineStage,
@@ -1824,11 +1319,17 @@ class CoreChecks : public ValidationStateTracker {
VkQueryPool queryPool, uint32_t slot);
bool PreCallValidateCreateFramebuffer(VkDevice device, const VkFramebufferCreateInfo* pCreateInfo,
const VkAllocationCallbacks* pAllocator, VkFramebuffer* pFramebuffer);
+ void PostCallRecordCreateFramebuffer(VkDevice device, const VkFramebufferCreateInfo* pCreateInfo,
+ const VkAllocationCallbacks* pAllocator, VkFramebuffer* pFramebuffer, VkResult result);
bool PreCallValidateCreateRenderPass(VkDevice device, const VkRenderPassCreateInfo* pCreateInfo,
const VkAllocationCallbacks* pAllocator, VkRenderPass* pRenderPass);
+ void PostCallRecordCreateRenderPass(VkDevice device, const VkRenderPassCreateInfo* pCreateInfo,
+ const VkAllocationCallbacks* pAllocator, VkRenderPass* pRenderPass, VkResult result);
bool PreCallValidateGetDeviceMemoryCommitment(VkDevice device, VkDeviceMemory mem, VkDeviceSize* pCommittedMem);
bool PreCallValidateCreateRenderPass2KHR(VkDevice device, const VkRenderPassCreateInfo2KHR* pCreateInfo,
const VkAllocationCallbacks* pAllocator, VkRenderPass* pRenderPass);
+ void PostCallRecordCreateRenderPass2KHR(VkDevice device, const VkRenderPassCreateInfo2KHR* pCreateInfo,
+ const VkAllocationCallbacks* pAllocator, VkRenderPass* pRenderPass, VkResult result);
bool PreCallValidateCmdBeginRenderPass(VkCommandBuffer commandBuffer, const VkRenderPassBeginInfo* pRenderPassBegin,
VkSubpassContents contents);
void PreCallRecordCmdBeginRenderPass(VkCommandBuffer commandBuffer, const VkRenderPassBeginInfo* pRenderPassBegin,
@@ -1849,6 +1350,8 @@ class CoreChecks : public ValidationStateTracker {
void PostCallRecordCmdEndRenderPass2KHR(VkCommandBuffer commandBuffer, const VkSubpassEndInfoKHR* pSubpassEndInfo);
bool PreCallValidateCmdExecuteCommands(VkCommandBuffer commandBuffer, uint32_t commandBuffersCount,
const VkCommandBuffer* pCommandBuffers);
+ void PreCallRecordCmdExecuteCommands(VkCommandBuffer commandBuffer, uint32_t commandBuffersCount,
+ const VkCommandBuffer* pCommandBuffers);
bool PreCallValidateMapMemory(VkDevice device, VkDeviceMemory mem, VkDeviceSize offset, VkDeviceSize size, VkFlags flags,
void** ppData);
void PostCallRecordMapMemory(VkDevice device, VkDeviceMemory mem, VkDeviceSize offset, VkDeviceSize size, VkFlags flags,
@@ -1861,13 +1364,21 @@ class CoreChecks : public ValidationStateTracker {
void PostCallRecordInvalidateMappedMemoryRanges(VkDevice device, uint32_t memRangeCount, const VkMappedMemoryRange* pMemRanges,
VkResult result);
bool PreCallValidateBindImageMemory(VkDevice device, VkImage image, VkDeviceMemory mem, VkDeviceSize memoryOffset);
+ void PostCallRecordBindImageMemory(VkDevice device, VkImage image, VkDeviceMemory mem, VkDeviceSize memoryOffset,
+ VkResult result);
bool PreCallValidateBindImageMemory2(VkDevice device, uint32_t bindInfoCount, const VkBindImageMemoryInfoKHR* pBindInfos);
+ void PostCallRecordBindImageMemory2(VkDevice device, uint32_t bindInfoCount, const VkBindImageMemoryInfoKHR* pBindInfos,
+ VkResult result);
bool PreCallValidateBindImageMemory2KHR(VkDevice device, uint32_t bindInfoCount, const VkBindImageMemoryInfoKHR* pBindInfos);
+ void PostCallRecordBindImageMemory2KHR(VkDevice device, uint32_t bindInfoCount, const VkBindImageMemoryInfoKHR* pBindInfos,
+ VkResult result);
bool PreCallValidateSetEvent(VkDevice device, VkEvent event);
void PreCallRecordSetEvent(VkDevice device, VkEvent event);
bool PreCallValidateQueueBindSparse(VkQueue queue, uint32_t bindInfoCount, const VkBindSparseInfo* pBindInfo, VkFence fence);
void PostCallRecordQueueBindSparse(VkQueue queue, uint32_t bindInfoCount, const VkBindSparseInfo* pBindInfo, VkFence fence,
VkResult result);
+ void PostCallRecordCreateSemaphore(VkDevice device, const VkSemaphoreCreateInfo* pCreateInfo,
+ const VkAllocationCallbacks* pAllocator, VkSemaphore* pSemaphore, VkResult result);
bool PreCallValidateImportSemaphoreFdKHR(VkDevice device, const VkImportSemaphoreFdInfoKHR* pImportSemaphoreFdInfo);
void PostCallRecordImportSemaphoreFdKHR(VkDevice device, const VkImportSemaphoreFdInfoKHR* pImportSemaphoreFdInfo,
VkResult result);
@@ -1892,8 +1403,12 @@ class CoreChecks : public ValidationStateTracker {
void PostCallRecordGetSemaphoreFdKHR(VkDevice device, const VkSemaphoreGetFdInfoKHR* pGetFdInfo, int* pFd, VkResult result);
void PostCallRecordGetFenceFdKHR(VkDevice device, const VkFenceGetFdInfoKHR* pGetFdInfo, int* pFd, VkResult result);
+ void PostCallRecordCreateEvent(VkDevice device, const VkEventCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator,
+ VkEvent* pEvent, VkResult result);
bool PreCallValidateCreateSwapchainKHR(VkDevice device, const VkSwapchainCreateInfoKHR* pCreateInfo,
const VkAllocationCallbacks* pAllocator, VkSwapchainKHR* pSwapchain);
+ void PostCallRecordCreateSwapchainKHR(VkDevice device, const VkSwapchainCreateInfoKHR* pCreateInfo,
+ const VkAllocationCallbacks* pAllocator, VkSwapchainKHR* pSwapchain, VkResult result);
void PreCallRecordDestroySwapchainKHR(VkDevice device, VkSwapchainKHR swapchain, const VkAllocationCallbacks* pAllocator);
bool PreCallValidateGetSwapchainImagesKHR(VkDevice device, VkSwapchainKHR swapchain, uint32_t* pSwapchainImageCount,
VkImage* pSwapchainImages);
@@ -1904,34 +1419,108 @@ class CoreChecks : public ValidationStateTracker {
bool PreCallValidateCreateSharedSwapchainsKHR(VkDevice device, uint32_t swapchainCount,
const VkSwapchainCreateInfoKHR* pCreateInfos,
const VkAllocationCallbacks* pAllocator, VkSwapchainKHR* pSwapchains);
+ void PostCallRecordCreateSharedSwapchainsKHR(VkDevice device, uint32_t swapchainCount,
+ const VkSwapchainCreateInfoKHR* pCreateInfos,
+ const VkAllocationCallbacks* pAllocator, VkSwapchainKHR* pSwapchains,
+ VkResult result);
bool PreCallValidateAcquireNextImageKHR(VkDevice device, VkSwapchainKHR swapchain, uint64_t timeout, VkSemaphore semaphore,
VkFence fence, uint32_t* pImageIndex);
bool PreCallValidateAcquireNextImage2KHR(VkDevice device, const VkAcquireNextImageInfoKHR* pAcquireInfo, uint32_t* pImageIndex);
+ void PostCallRecordAcquireNextImageKHR(VkDevice device, VkSwapchainKHR swapchain, uint64_t timeout, VkSemaphore semaphore,
+ VkFence fence, uint32_t* pImageIndex, VkResult result);
+ void PostCallRecordAcquireNextImage2KHR(VkDevice device, const VkAcquireNextImageInfoKHR* pAcquireInfo, uint32_t* pImageIndex,
+ VkResult result);
+ void PostCallRecordEnumeratePhysicalDevices(VkInstance instance, uint32_t* pPhysicalDeviceCount,
+ VkPhysicalDevice* pPhysicalDevices, VkResult result);
bool PreCallValidateGetPhysicalDeviceQueueFamilyProperties(VkPhysicalDevice physicalDevice, uint32_t* pQueueFamilyPropertyCount,
VkQueueFamilyProperties* pQueueFamilyProperties);
+ void PostCallRecordGetPhysicalDeviceQueueFamilyProperties(VkPhysicalDevice physicalDevice, uint32_t* pQueueFamilyPropertyCount,
+ VkQueueFamilyProperties* pQueueFamilyProperties);
bool PreCallValidateGetPhysicalDeviceQueueFamilyProperties2(VkPhysicalDevice physicalDevice,
uint32_t* pQueueFamilyPropertyCount,
VkQueueFamilyProperties2KHR* pQueueFamilyProperties);
+ void PostCallRecordGetPhysicalDeviceQueueFamilyProperties2(VkPhysicalDevice physicalDevice, uint32_t* pQueueFamilyPropertyCount,
+ VkQueueFamilyProperties2KHR* pQueueFamilyProperties);
bool PreCallValidateGetPhysicalDeviceQueueFamilyProperties2KHR(VkPhysicalDevice physicalDevice,
uint32_t* pQueueFamilyPropertyCount,
VkQueueFamilyProperties2KHR* pQueueFamilyProperties);
+ void PostCallRecordGetPhysicalDeviceQueueFamilyProperties2KHR(VkPhysicalDevice physicalDevice,
+ uint32_t* pQueueFamilyPropertyCount,
+ VkQueueFamilyProperties2KHR* pQueueFamilyProperties);
bool PreCallValidateDestroySurfaceKHR(VkInstance instance, VkSurfaceKHR surface, const VkAllocationCallbacks* pAllocator);
+ void PreCallRecordValidateDestroySurfaceKHR(VkInstance instance, VkSurfaceKHR surface, const VkAllocationCallbacks* pAllocator);
+ void PostCallRecordGetPhysicalDeviceSurfaceCapabilitiesKHR(VkPhysicalDevice physicalDevice, VkSurfaceKHR surface,
+ VkSurfaceCapabilitiesKHR* pSurfaceCapabilities, VkResult result);
+ void PostCallRecordGetPhysicalDeviceSurfaceCapabilities2KHR(VkPhysicalDevice physicalDevice,
+ const VkPhysicalDeviceSurfaceInfo2KHR* pSurfaceInfo,
+ VkSurfaceCapabilities2KHR* pSurfaceCapabilities, VkResult result);
+ void PostCallRecordGetPhysicalDeviceSurfaceCapabilities2EXT(VkPhysicalDevice physicalDevice, VkSurfaceKHR surface,
+ VkSurfaceCapabilities2EXT* pSurfaceCapabilities, VkResult result);
bool PreCallValidateGetPhysicalDeviceSurfaceSupportKHR(VkPhysicalDevice physicalDevice, uint32_t queueFamilyIndex,
VkSurfaceKHR surface, VkBool32* pSupported);
+ void PostCallRecordGetPhysicalDeviceSurfaceSupportKHR(VkPhysicalDevice physicalDevice, uint32_t queueFamilyIndex,
+ VkSurfaceKHR surface, VkBool32* pSupported, VkResult result);
+ void PostCallRecordGetPhysicalDeviceSurfacePresentModesKHR(VkPhysicalDevice physicalDevice, VkSurfaceKHR surface,
+ uint32_t* pPresentModeCount, VkPresentModeKHR* pPresentModes,
+ VkResult result);
bool PreCallValidateGetPhysicalDeviceSurfaceFormatsKHR(VkPhysicalDevice physicalDevice, VkSurfaceKHR surface,
uint32_t* pSurfaceFormatCount, VkSurfaceFormatKHR* pSurfaceFormats);
+ void PostCallRecordGetPhysicalDeviceSurfaceFormatsKHR(VkPhysicalDevice physicalDevice, VkSurfaceKHR surface,
+ uint32_t* pSurfaceFormatCount, VkSurfaceFormatKHR* pSurfaceFormats,
+ VkResult result);
+ void PostCallRecordGetPhysicalDeviceSurfaceFormats2KHR(VkPhysicalDevice physicalDevice,
+ const VkPhysicalDeviceSurfaceInfo2KHR* pSurfaceInfo,
+ uint32_t* pSurfaceFormatCount, VkSurfaceFormat2KHR* pSurfaceFormats,
+ VkResult result);
+ void PostCallRecordCreateDisplayPlaneSurfaceKHR(VkInstance instance, const VkDisplaySurfaceCreateInfoKHR* pCreateInfo,
+ const VkAllocationCallbacks* pAllocator, VkSurfaceKHR* pSurface,
+ VkResult result);
+ void PreCallRecordQueueBeginDebugUtilsLabelEXT(VkQueue queue, const VkDebugUtilsLabelEXT* pLabelInfo);
+ void PostCallRecordQueueEndDebugUtilsLabelEXT(VkQueue queue);
+ void PreCallRecordQueueInsertDebugUtilsLabelEXT(VkQueue queue, const VkDebugUtilsLabelEXT* pLabelInfo);
+ void PreCallRecordCmdBeginDebugUtilsLabelEXT(VkCommandBuffer commandBuffer, const VkDebugUtilsLabelEXT* pLabelInfo);
+ void PostCallRecordCmdEndDebugUtilsLabelEXT(VkCommandBuffer commandBuffer);
+ void PreCallRecordCmdInsertDebugUtilsLabelEXT(VkCommandBuffer commandBuffer, const VkDebugUtilsLabelEXT* pLabelInfo);
+ void PostCallRecordCreateDebugUtilsMessengerEXT(VkInstance instance, const VkDebugUtilsMessengerCreateInfoEXT* pCreateInfo,
+ const VkAllocationCallbacks* pAllocator, VkDebugUtilsMessengerEXT* pMessenger,
+ VkResult result);
+ void PostCallRecordDestroyDebugUtilsMessengerEXT(VkInstance instance, VkDebugUtilsMessengerEXT messenger,
+ const VkAllocationCallbacks* pAllocator);
+ void PostCallRecordDestroyDebugReportCallbackEXT(VkInstance instance, VkDebugReportCallbackEXT msgCallback,
+ const VkAllocationCallbacks* pAllocator);
+ void PostCallRecordEnumeratePhysicalDeviceGroups(VkInstance instance, uint32_t* pPhysicalDeviceGroupCount,
+ VkPhysicalDeviceGroupPropertiesKHR* pPhysicalDeviceGroupProperties,
+ VkResult result);
+ void PostCallRecordEnumeratePhysicalDeviceGroupsKHR(VkInstance instance, uint32_t* pPhysicalDeviceGroupCount,
+ VkPhysicalDeviceGroupPropertiesKHR* pPhysicalDeviceGroupProperties,
+ VkResult result);
bool PreCallValidateCreateDescriptorUpdateTemplate(VkDevice device, const VkDescriptorUpdateTemplateCreateInfoKHR* pCreateInfo,
const VkAllocationCallbacks* pAllocator,
VkDescriptorUpdateTemplateKHR* pDescriptorUpdateTemplate);
+ void PostCallRecordCreateDescriptorUpdateTemplate(VkDevice device, const VkDescriptorUpdateTemplateCreateInfoKHR* pCreateInfo,
+ const VkAllocationCallbacks* pAllocator,
+ VkDescriptorUpdateTemplateKHR* pDescriptorUpdateTemplate, VkResult result);
bool PreCallValidateCreateDescriptorUpdateTemplateKHR(VkDevice device,
const VkDescriptorUpdateTemplateCreateInfoKHR* pCreateInfo,
const VkAllocationCallbacks* pAllocator,
VkDescriptorUpdateTemplateKHR* pDescriptorUpdateTemplate);
+ void PostCallRecordCreateDescriptorUpdateTemplateKHR(VkDevice device,
+ const VkDescriptorUpdateTemplateCreateInfoKHR* pCreateInfo,
+ const VkAllocationCallbacks* pAllocator,
+ VkDescriptorUpdateTemplateKHR* pDescriptorUpdateTemplate, VkResult result);
+ void PreCallRecordDestroyDescriptorUpdateTemplate(VkDevice device, VkDescriptorUpdateTemplateKHR descriptorUpdateTemplate,
+ const VkAllocationCallbacks* pAllocator);
+ void PreCallRecordDestroyDescriptorUpdateTemplateKHR(VkDevice device, VkDescriptorUpdateTemplateKHR descriptorUpdateTemplate,
+ const VkAllocationCallbacks* pAllocator);
bool PreCallValidateUpdateDescriptorSetWithTemplate(VkDevice device, VkDescriptorSet descriptorSet,
VkDescriptorUpdateTemplate descriptorUpdateTemplate, const void* pData);
+ void PreCallRecordUpdateDescriptorSetWithTemplate(VkDevice device, VkDescriptorSet descriptorSet,
+ VkDescriptorUpdateTemplate descriptorUpdateTemplate, const void* pData);
bool PreCallValidateUpdateDescriptorSetWithTemplateKHR(VkDevice device, VkDescriptorSet descriptorSet,
VkDescriptorUpdateTemplateKHR descriptorUpdateTemplate,
const void* pData);
+ void PreCallRecordUpdateDescriptorSetWithTemplateKHR(VkDevice device, VkDescriptorSet descriptorSet,
+ VkDescriptorUpdateTemplateKHR descriptorUpdateTemplate, const void* pData);
bool PreCallValidateCmdPushDescriptorSetWithTemplateKHR(VkCommandBuffer commandBuffer,
VkDescriptorUpdateTemplateKHR descriptorUpdateTemplate,
@@ -1939,6 +1528,10 @@ class CoreChecks : public ValidationStateTracker {
void PreCallRecordCmdPushDescriptorSetWithTemplateKHR(VkCommandBuffer commandBuffer,
VkDescriptorUpdateTemplateKHR descriptorUpdateTemplate,
VkPipelineLayout layout, uint32_t set, const void* pData);
+ void PostCallRecordGetPhysicalDeviceDisplayPlanePropertiesKHR(VkPhysicalDevice physicalDevice, uint32_t* pPropertyCount,
+ VkDisplayPlanePropertiesKHR* pProperties, VkResult result);
+ void PostCallRecordGetPhysicalDeviceDisplayPlaneProperties2KHR(VkPhysicalDevice physicalDevice, uint32_t* pPropertyCount,
+ VkDisplayPlaneProperties2KHR* pProperties, VkResult result);
bool PreCallValidateGetDisplayPlaneSupportedDisplaysKHR(VkPhysicalDevice physicalDevice, uint32_t planeIndex,
uint32_t* pDisplayCount, VkDisplayKHR* pDisplays);
bool PreCallValidateGetDisplayPlaneCapabilitiesKHR(VkPhysicalDevice physicalDevice, VkDisplayModeKHR mode, uint32_t planeIndex,
@@ -1947,13 +1540,6 @@ class CoreChecks : public ValidationStateTracker {
const VkDisplayPlaneInfo2KHR* pDisplayPlaneInfo,
VkDisplayPlaneCapabilities2KHR* pCapabilities);
bool PreCallValidateCmdDebugMarkerEndEXT(VkCommandBuffer commandBuffer);
-
- bool PreCallValidateCmdBeginQueryIndexedEXT(VkCommandBuffer commandBuffer, VkQueryPool queryPool, uint32_t query,
- VkQueryControlFlags flags, uint32_t index);
- void PreCallRecordCmdBeginQueryIndexedEXT(VkCommandBuffer commandBuffer, VkQueryPool queryPool, uint32_t query,
- VkQueryControlFlags flags, uint32_t index);
- bool PreCallValidateCmdEndQueryIndexedEXT(VkCommandBuffer commandBuffer, VkQueryPool queryPool, uint32_t query, uint32_t index);
-
bool PreCallValidateCmdSetDiscardRectangleEXT(VkCommandBuffer commandBuffer, uint32_t firstDiscardRectangle,
uint32_t discardRectangleCount, const VkRect2D* pDiscardRectangles);
bool PreCallValidateCmdSetSampleLocationsEXT(VkCommandBuffer commandBuffer,
@@ -1961,56 +1547,71 @@ class CoreChecks : public ValidationStateTracker {
bool PreCallValidateCmdDrawIndirectCountKHR(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset,
VkBuffer countBuffer, VkDeviceSize countBufferOffset, uint32_t maxDrawCount,
uint32_t stride);
+ void PreCallRecordCmdDrawIndirectCountKHR(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset,
+ VkBuffer countBuffer, VkDeviceSize countBufferOffset, uint32_t maxDrawCount,
+ uint32_t stride);
+ void PreCallRecordCmdDrawIndexedIndirectCountKHR(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset,
+ VkBuffer countBuffer, VkDeviceSize countBufferOffset, uint32_t maxDrawCount,
+ uint32_t stride);
bool PreCallValidateCmdDrawMeshTasksNV(VkCommandBuffer commandBuffer, uint32_t taskCount, uint32_t firstTask);
+ void PreCallRecordCmdDrawMeshTasksNV(VkCommandBuffer commandBuffer, uint32_t taskCount, uint32_t firstTask);
bool PreCallValidateCmdDrawMeshTasksIndirectNV(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset,
uint32_t drawCount, uint32_t stride);
+ void PreCallRecordCmdDrawMeshTasksIndirectNV(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset,
+ uint32_t drawCount, uint32_t stride);
bool PreCallValidateCmdDrawMeshTasksIndirectCountNV(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset,
VkBuffer countBuffer, VkDeviceSize countBufferOffset, uint32_t maxDrawCount,
uint32_t stride);
+ void PreCallRecordCmdDrawMeshTasksIndirectCountNV(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset,
+ VkBuffer countBuffer, VkDeviceSize countBufferOffset, uint32_t maxDrawCount,
+ uint32_t stride);
+ void PostCallRecordDestroySamplerYcbcrConversion(VkDevice device, VkSamplerYcbcrConversion ycbcrConversion,
+ const VkAllocationCallbacks* pAllocator);
void PreCallRecordGetPhysicalDeviceProperties(VkPhysicalDevice physicalDevice,
VkPhysicalDeviceProperties* pPhysicalDeviceProperties);
+ void PostCallRecordDestroySamplerYcbcrConversionKHR(VkDevice device, VkSamplerYcbcrConversion ycbcrConversion,
+ const VkAllocationCallbacks* pAllocator);
bool PreCallValidateGetBufferDeviceAddressEXT(VkDevice device, const VkBufferDeviceAddressInfoEXT* pInfo);
- bool PreCallValidateCmdSetDeviceMask(VkCommandBuffer commandBuffer, uint32_t deviceMask);
- bool ValidateComputeWorkGroupSizes(const SHADER_MODULE_STATE* shader) const;
-
- bool ValidateQueryRange(VkDevice device, VkQueryPool queryPool, uint32_t totalCount, uint32_t firstQuery, uint32_t queryCount,
- const char* vuid_badfirst, const char* vuid_badrange) const;
- bool PreCallValidateResetQueryPoolEXT(VkDevice device, VkQueryPool queryPool, uint32_t firstQuery, uint32_t queryCount);
-
- bool ValidateComputeWorkGroupInvocations(CMD_BUFFER_STATE* cb_state, uint32_t groupCountX, uint32_t groupCountY,
- uint32_t groupCountZ);
- bool ValidateQueryPoolStride(const std::string& vuid_not_64, const std::string& vuid_64, const VkDeviceSize stride,
- const char* parameter_name, const uint64_t parameter_value, const VkQueryResultFlags flags) const;
- bool ValidateCmdDrawStrideWithStruct(VkCommandBuffer commandBuffer, const std::string& vuid, const uint32_t stride,
- const char* struct_name, const uint32_t struct_size) const;
- bool ValidateCmdDrawStrideWithBuffer(VkCommandBuffer commandBuffer, const std::string& vuid, const uint32_t stride,
- const char* struct_name, const uint32_t struct_size, const uint32_t drawCount,
- const VkDeviceSize offset, const BUFFER_STATE* buffer_state) const;
-
#ifdef VK_USE_PLATFORM_ANDROID_KHR
- bool PreCallValidateGetAndroidHardwareBufferPropertiesANDROID(VkDevice device, const struct AHardwareBuffer* buffer,
- VkAndroidHardwareBufferPropertiesANDROID* pProperties);
- void PostCallRecordGetAndroidHardwareBufferPropertiesANDROID(VkDevice device, const struct AHardwareBuffer* buffer,
- VkAndroidHardwareBufferPropertiesANDROID* pProperties,
- VkResult result);
- bool PreCallValidateGetMemoryAndroidHardwareBufferANDROID(VkDevice device,
- const VkMemoryGetAndroidHardwareBufferInfoANDROID* pInfo,
- struct AHardwareBuffer** pBuffer);
+ bool PreCallValidateGetAndroidHardwareBufferProperties(VkDevice device, const struct AHardwareBuffer* buffer,
+ VkAndroidHardwareBufferPropertiesANDROID* pProperties);
+ void PostCallRecordGetAndroidHardwareBufferProperties(VkDevice device, const struct AHardwareBuffer* buffer,
+ VkAndroidHardwareBufferPropertiesANDROID* pProperties, VkResult result);
+ bool PreCallValidateGetMemoryAndroidHardwareBuffer(VkDevice device, const VkMemoryGetAndroidHardwareBufferInfoANDROID* pInfo,
+ struct AHardwareBuffer** pBuffer);
+ void PostCallRecordCreateAndroidSurfaceKHR(VkInstance instance, const VkAndroidSurfaceCreateInfoKHR* pCreateInfo,
+ const VkAllocationCallbacks* pAllocator, VkSurfaceKHR* pSurface, VkResult result);
#endif // VK_USE_PLATFORM_ANDROID_KHR
+#ifdef VK_USE_PLATFORM_IOS_MVK
+ void PostCallRecordCreateIOSSurfaceMVK(VkInstance instance, const VkIOSSurfaceCreateInfoMVK* pCreateInfo,
+ const VkAllocationCallbacks* pAllocator, VkSurfaceKHR* pSurface, VkResult result);
+#endif // VK_USE_PLATFORM_IOS_MVK
+#ifdef VK_USE_PLATFORM_MACOS_MVK
+ void PostCallRecordCreateMacOSSurfaceMVK(VkInstance instance, const VkMacOSSurfaceCreateInfoMVK* pCreateInfo,
+ const VkAllocationCallbacks* pAllocator, VkSurfaceKHR* pSurface, VkResult result);
+#endif // VK_USE_PLATFORM_MACOS_MVK
#ifdef VK_USE_PLATFORM_WAYLAND_KHR
bool PreCallValidateGetPhysicalDeviceWaylandPresentationSupportKHR(VkPhysicalDevice physicalDevice, uint32_t queueFamilyIndex,
struct wl_display* display);
+ void PostCallRecordCreateWaylandSurfaceKHR(VkInstance instance, const VkWaylandSurfaceCreateInfoKHR* pCreateInfo,
+ const VkAllocationCallbacks* pAllocator, VkSurfaceKHR* pSurface, VkResult result);
#endif // VK_USE_PLATFORM_WAYLAND_KHR
#ifdef VK_USE_PLATFORM_WIN32_KHR
bool PreCallValidateGetPhysicalDeviceWin32PresentationSupportKHR(VkPhysicalDevice physicalDevice, uint32_t queueFamilyIndex);
+ void PostCallRecordCreateWin32SurfaceKHR(VkInstance instance, const VkWin32SurfaceCreateInfoKHR* pCreateInfo,
+ const VkAllocationCallbacks* pAllocator, VkSurfaceKHR* pSurface, VkResult result);
#endif // VK_USE_PLATFORM_WIN32_KHR
#ifdef VK_USE_PLATFORM_XCB_KHR
bool PreCallValidateGetPhysicalDeviceXcbPresentationSupportKHR(VkPhysicalDevice physicalDevice, uint32_t queueFamilyIndex,
xcb_connection_t* connection, xcb_visualid_t visual_id);
+ void PostCallRecordCreateXcbSurfaceKHR(VkInstance instance, const VkXcbSurfaceCreateInfoKHR* pCreateInfo,
+ const VkAllocationCallbacks* pAllocator, VkSurfaceKHR* pSurface, VkResult result);
#endif // VK_USE_PLATFORM_XCB_KHR
#ifdef VK_USE_PLATFORM_XLIB_KHR
bool PreCallValidateGetPhysicalDeviceXlibPresentationSupportKHR(VkPhysicalDevice physicalDevice, uint32_t queueFamilyIndex,
Display* dpy, VisualID visualID);
+ void PostCallRecordCreateXlibSurfaceKHR(VkInstance instance, const VkXlibSurfaceCreateInfoKHR* pCreateInfo,
+ const VkAllocationCallbacks* pAllocator, VkSurfaceKHR* pSurface, VkResult result);
#endif // VK_USE_PLATFORM_XLIB_KHR
}; // Class CoreChecks
diff --git a/layers/core_validation_error_enums.h b/layers/core_validation_error_enums.h
index f3fe510e7..0b546fe9d 100644
--- a/layers/core_validation_error_enums.h
+++ b/layers/core_validation_error_enums.h
@@ -36,6 +36,7 @@
static const char DECORATE_UNUSED *kVUID_Core_MemTrack_FenceState = "UNASSIGNED-CoreValidation-MemTrack-FenceState";
static const char DECORATE_UNUSED *kVUID_Core_MemTrack_FreedMemRef = "UNASSIGNED-CoreValidation-MemTrack-FreedMemRef";
+static const char DECORATE_UNUSED *kVUID_Core_MemTrack_InvalidAliasing = "UNASSIGNED-CoreValidation-MemTrack-InvalidAliasing";
static const char DECORATE_UNUSED *kVUID_Core_MemTrack_InvalidMap = "UNASSIGNED-CoreValidation-MemTrack-InvalidMap";
static const char DECORATE_UNUSED *kVUID_Core_MemTrack_InvalidState = "UNASSIGNED-CoreValidation-MemTrack-InvalidState";
static const char DECORATE_UNUSED *kVUID_Core_MemTrack_InvalidUsageFlag = "UNASSIGNED-CoreValidation-MemTrack-InvalidUsageFlag";
@@ -50,7 +51,6 @@ static const char DECORATE_UNUSED *kVUID_Core_MemTrack_RebindObject = "UNASSIGNE
//static const char DECORATE_UNUSED *kVUID_Core_MemTrack_MemoryLeak = "UNASSIGNED-CoreValidation-MemTrack-MemoryLeak";
//static const char DECORATE_UNUSED *kVUID_Core_MemTrack_ObjNotBound = "UNASSIGNED-CoreValidation-MemTrack-ObjNotBound";
//static const char DECORATE_UNUSED *kVUID_Core_MemTrack_ResetCBWhileInFlight = "UNASSIGNED-CoreValidation-MemTrack-ResetCBWhileInFlight";
-//static const char DECORATE_UNUSED *kVUID_Core_MemTrack_InvalidAliasing = "UNASSIGNED-CoreValidation-MemTrack-InvalidAliasing";
static const char DECORATE_UNUSED *kVUID_Core_DrawState_ClearCmdBeforeDraw = "UNASSIGNED-CoreValidation-DrawState-ClearCmdBeforeDraw";
static const char DECORATE_UNUSED *kVUID_Core_DrawState_CommandBufferSingleSubmitViolation = "UNASSIGNED-CoreValidation-DrawState-CommandBufferSingleSubmitViolation";
@@ -60,7 +60,7 @@ static const char DECORATE_UNUSED *kVUID_Core_DrawState_DoubleDestroy = "UNASSIG
static const char DECORATE_UNUSED *kVUID_Core_DrawState_ExtensionNotEnabled = "UNASSIGNED-CoreValidation-DrawState-ExtensionNotEnabled";
static const char DECORATE_UNUSED *kVUID_Core_DrawState_InternalError = "UNASSIGNED-CoreValidation-DrawState-InternalError";
static const char DECORATE_UNUSED *kVUID_Core_DrawState_InvalidBarrier = "UNASSIGNED-CoreValidation-DrawState-InvalidBarrier";
-//static const char DECORATE_UNUSED *kVUID_Core_DrawState_InvalidBuffer = "UNASSIGNED-CoreValidation-DrawState-InvalidBuffer";
+static const char DECORATE_UNUSED *kVUID_Core_DrawState_InvalidBuffer = "UNASSIGNED-CoreValidation-DrawState-InvalidBuffer";
static const char DECORATE_UNUSED *kVUID_Core_DrawState_InvalidCommandBuffer = "UNASSIGNED-CoreValidation-DrawState-InvalidCommandBuffer";
static const char DECORATE_UNUSED *kVUID_Core_DrawState_InvalidCommandBufferSimultaneousUse = "UNASSIGNED-CoreValidation-DrawState-InvalidCommandBufferSimultaneousUse";
static const char DECORATE_UNUSED *kVUID_Core_DrawState_InvalidDescriptorSet = "UNASSIGNED-CoreValidation-DrawState-InvalidDescriptorSet";
@@ -68,14 +68,13 @@ static const char DECORATE_UNUSED *kVUID_Core_DrawState_InvalidEvent = "UNASSIGN
static const char DECORATE_UNUSED *kVUID_Core_DrawState_InvalidExtents = "UNASSIGNED-CoreValidation-DrawState-InvalidExtents";
static const char DECORATE_UNUSED *kVUID_Core_DrawState_InvalidFeature = "UNASSIGNED-CoreValidation-DrawState-InvalidFeature";
static const char DECORATE_UNUSED *kVUID_Core_DrawState_InvalidFence = "UNASSIGNED-CoreValidation-DrawState-InvalidFence";
-//static const char DECORATE_UNUSED *kVUID_Core_DrawState_InvalidImage = "UNASSIGNED-CoreValidation-DrawState-InvalidImage";
+static const char DECORATE_UNUSED *kVUID_Core_DrawState_InvalidImage = "UNASSIGNED-CoreValidation-DrawState-InvalidImage";
static const char DECORATE_UNUSED *kVUID_Core_DrawState_InvalidImageAspect = "UNASSIGNED-CoreValidation-DrawState-InvalidImageAspect";
static const char DECORATE_UNUSED *kVUID_Core_DrawState_InvalidImageLayout = "UNASSIGNED-CoreValidation-DrawState-InvalidImageLayout";
static const char DECORATE_UNUSED *kVUID_Core_DrawState_InvalidLayout = "UNASSIGNED-CoreValidation-DrawState-InvalidLayout";
static const char DECORATE_UNUSED *kVUID_Core_DrawState_InvalidPipeline = "UNASSIGNED-CoreValidation-DrawState-InvalidPipeline";
static const char DECORATE_UNUSED *kVUID_Core_DrawState_InvalidPipelineCreateState = "UNASSIGNED-CoreValidation-DrawState-InvalidPipelineCreateState";
static const char DECORATE_UNUSED *kVUID_Core_DrawState_InvalidQuery = "UNASSIGNED-CoreValidation-DrawState-InvalidQuery";
-static const char DECORATE_UNUSED *kVUID_Core_DrawState_QueryNotReset = "UNASSIGNED-CoreValidation-DrawState-QueryNotReset";
static const char DECORATE_UNUSED *kVUID_Core_DrawState_InvalidQueueFamily = "UNASSIGNED-CoreValidation-DrawState-InvalidQueueFamily";
static const char DECORATE_UNUSED *kVUID_Core_DrawState_InvalidRenderArea = "UNASSIGNED-CoreValidation-DrawState-InvalidRenderArea";
static const char DECORATE_UNUSED *kVUID_Core_DrawState_InvalidRenderpass = "UNASSIGNED-CoreValidation-DrawState-InvalidRenderpass";
@@ -101,7 +100,6 @@ static const char DECORATE_UNUSED *kVUID_Core_DrawState_SwapchainUnsupportedQueu
static const char DECORATE_UNUSED *kVUID_Core_DrawState_ViewportScissorMismatch = "UNASSIGNED-CoreValidation-DrawState-ViewportScissorMismatch";
static const char DECORATE_UNUSED *kVUID_Core_DrawState_VtxIndexOutOfBounds = "UNASSIGNED-CoreValidation-DrawState-VtxIndexOutOfBounds";
static const char DECORATE_UNUSED *kVUID_Core_DrawState_InvalidVtxAttributeAlignment = "UNASSIGNED-CoreValidation-DrawState-InvalidVtxAttributeAlignment";
-static const char DECORATE_UNUSED *kVUID_Core_DrawState_InvalidImageView = "UNASSIGNED-CoreValidation-DrawState-InvalidImageView";
// Previously defined but unused - uncomment as needed
//static const char DECORATE_UNUSED *kVUID_Core_DrawState_BeginCommandBufferInvalidState = "UNASSIGNED-CoreValidation-DrawState-BeginCommandBufferInvalidState";
//static const char DECORATE_UNUSED *kVUID_Core_DrawState_BlendNotBound = "UNASSIGNED-CoreValidation-DrawState-BlendNotBound";
@@ -184,9 +182,6 @@ static const char DECORATE_UNUSED *kVUID_Core_Shader_PushConstantOutOfRange = "U
static const char DECORATE_UNUSED *kVUID_Core_Shader_MissingPointSizeBuiltIn = "UNASSIGNED-CoreValidation-Shader-PointSizeMissing";
static const char DECORATE_UNUSED *kVUID_Core_Shader_PointSizeBuiltInOverSpecified = "UNASSIGNED-CoreValidation-Shader-PointSizeOverSpecified";
static const char DECORATE_UNUSED *kVUID_Core_Shader_NoAlphaAtLocation0WithAlphaToCoverage = "UNASSIGNED-CoreValidation-Shader-NoAlphaAtLocation0WithAlphaToCoverage";
-static const char DECORATE_UNUSED *kVUID_Core_Shader_CooperativeMatrixSupportedStages = "UNASSIGNED-CoreValidation-Shader-CooperativeMatrixSupportedStages";
-static const char DECORATE_UNUSED *kVUID_Core_Shader_CooperativeMatrixType = "UNASSIGNED-CoreValidation-Shader-CooperativeMatrixType";
-static const char DECORATE_UNUSED *kVUID_Core_Shader_CooperativeMatrixMulAdd = "UNASSIGNED-CoreValidation-Shader-CooperativeMatrixMulAdd";
// Previously defined but unused - uncomment as needed
//static const char DECORATE_UNUSED *kVUID_Core_Shader_BadCapability = "UNASSIGNED-CoreValidation-Shader-BadCapability";
//static const char DECORATE_UNUSED *kVUID_Core_Shader_BadSpecialization = "UNASSIGNED-CoreValidation-Shader-BadSpecialization";
@@ -205,8 +200,6 @@ static const char DECORATE_UNUSED *kVUID_Core_DevLimit_MustQueryCount = "UNASSIG
static const char DECORATE_UNUSED *kVUID_Core_Swapchain_GetSupportedDisplaysWithoutQuery = "UNASSIGNED-CoreValidation-Swapchain-GetSupportedDisplaysWithoutQuery";
static const char DECORATE_UNUSED *kVUID_Core_Swapchain_InvalidCount = "UNASSIGNED-CoreValidation-SwapchainInvalidCount";
static const char DECORATE_UNUSED *kVUID_Core_Swapchain_PriorCount = "UNASSIGNED-CoreValidation-SwapchainPriorCount";
-static const char DECORATE_UNUSED *kVUID_Core_Swapchain_PreTransform = "UNASSIGNED-CoreValidation-SwapchainPreTransform";
-
// Previously defined but unused - uncomment as needed
//static const char DECORATE_UNUSED *kVUID_Core_Swapchain_BadBool = "UNASSIGNED-CoreValidation-SwapchainBadBool";
//static const char DECORATE_UNUSED *kVUID_Core_Swapchain_CreateSwapBadCompositeAlpha = "UNASSIGNED-CoreValidation-Swapchain-CreateSwapBadCompositeAlpha";
@@ -239,13 +232,6 @@ static const char DECORATE_UNUSED *kVUID_Core_PushDescriptorUpdate_TemplateType
static const char DECORATE_UNUSED *kVUID_Core_PushDescriptorUpdate_Template_SetMismatched = "UNASSIGNED-CoreValidation-vkCmdPushDescriptorSetWithTemplateKHR-set";
static const char DECORATE_UNUSED *kVUID_Core_PushDescriptorUpdate_Template_LayoutMismatched = "UNASSIGNED-CoreValidation-vkCmdPushDescriptorSetWithTemplateKHR-layout";
-static const char DECORATE_UNUSED *kVUID_Core_BindImage_InvalidMemReqQuery = "UNASSIGNED-CoreValidation-vkBindImageMemory-invalid-requirements";
-static const char DECORATE_UNUSED *kVUID_Core_BindImage_NoMemReqQuery = "UNASSIGNED-CoreValidation-vkBindImageMemory-memory-requirements";
-static const char DECORATE_UNUSED *kVUID_Core_BindBuffer_NoMemReqQuery = "UNASSIGNED-CoreValidation-vkBindBufferMemory-memory-requirements";
-
-static const char DECORATE_UNUSED *kVUID_Core_BindAccelNV_NoMemReqQuery = "UNASSIGNED-CoreValidation-vkBindAccelerationStructureMemoryNV-object-requirements";
-static const char DECORATE_UNUSED *kVUID_Core_CmdBuildAccelNV_NoScratchMemReqQuery = "UNASSIGNED-CoreValidation-vkCmdBuildAccelerationStructureNV-scratch-requirements";
-static const char DECORATE_UNUSED *kVUID_Core_CmdBuildAccelNV_NoUpdateMemReqQuery = "UNASSIGNED-CoreValidation-vkCmdBuildAccelerationStructureNV-update-requirements";
// clang-format on
#undef DECORATE_UNUSED
diff --git a/layers/core_validation_types.h b/layers/core_validation_types.h
index f4b2239e1..bed42da42 100644
--- a/layers/core_validation_types.h
+++ b/layers/core_validation_types.h
@@ -20,14 +20,11 @@
* Author: Chris Forbes <chrisf@ijw.co.nz>
* Author: Mark Lobodzinski <mark@lunarg.com>
* Author: Dave Houlton <daveh@lunarg.com>
- * Author: John Zulauf <jzulauf@lunarg.com>
*/
#ifndef CORE_VALIDATION_TYPES_H_
#define CORE_VALIDATION_TYPES_H_
-#include "cast_utils.h"
#include "hash_vk_types.h"
-#include "sparse_containers.h"
#include "vk_safe_struct.h"
#include "vulkan/vulkan.h"
#include "vk_layer_logging.h"
@@ -35,9 +32,6 @@
#include "vk_extension_helper.h"
#include "vk_typemap_helper.h"
#include "convert_to_renderpass2.h"
-#include "layer_chassis_dispatch.h"
-
-#include <array>
#include <atomic>
#include <functional>
#include <list>
@@ -55,6 +49,10 @@
#include "android_ndk_types.h"
#endif // VK_USE_PLATFORM_ANDROID_KHR
+class CoreChecks;
+typedef CoreChecks layer_data;
+typedef CoreChecks instance_layer_data;
+
// Fwd declarations -- including descriptor_set.h creates an ugly include loop
namespace cvdescriptorset {
class DescriptorSetLayoutDef;
@@ -62,9 +60,7 @@ class DescriptorSetLayout;
class DescriptorSet;
} // namespace cvdescriptorset
-struct CMD_BUFFER_STATE;
-class CoreChecks;
-class ValidationStateTracker;
+struct GLOBAL_CB_NODE;
enum CALL_STATE {
UNCALLED, // Function has not been called
@@ -80,13 +76,13 @@ class BASE_NODE {
// binding initialized when cmd referencing object is bound to command buffer
// binding removed when command buffer is reset or destroyed
// When an object is destroyed, any bound cbs are set to INVALID
- std::unordered_set<CMD_BUFFER_STATE *> cb_bindings;
+ std::unordered_set<GLOBAL_CB_NODE *> cb_bindings;
BASE_NODE() { in_use.store(0); };
};
// Track command pools and their command buffers
-struct COMMAND_POOL_STATE : public BASE_NODE {
+struct COMMAND_POOL_NODE : public BASE_NODE {
VkCommandPoolCreateFlags createFlags;
uint32_t queueFamilyIndex;
// Cmd buffers allocated from this pool
@@ -100,12 +96,12 @@ static bool IsTransferOp(const Barrier *barrier) {
}
template <typename Barrier, bool assume_transfer = false>
-static bool TempIsReleaseOp(const COMMAND_POOL_STATE *pool, const Barrier *barrier) {
+static bool TempIsReleaseOp(const COMMAND_POOL_NODE *pool, const Barrier *barrier) {
return (assume_transfer || IsTransferOp(barrier)) && (pool->queueFamilyIndex == barrier->srcQueueFamilyIndex);
}
template <typename Barrier, bool assume_transfer = false>
-static bool IsAcquireOp(const COMMAND_POOL_STATE *pool, const Barrier *barrier) {
+static bool IsAcquireOp(const COMMAND_POOL_NODE *pool, const Barrier *barrier) {
return (assume_transfer || IsTransferOp(barrier)) && (pool->queueFamilyIndex == barrier->dstQueueFamilyIndex);
}
@@ -113,14 +109,18 @@ inline bool IsSpecial(const uint32_t queue_family_index) {
return (queue_family_index == VK_QUEUE_FAMILY_EXTERNAL_KHR) || (queue_family_index == VK_QUEUE_FAMILY_FOREIGN_EXT);
}
-inline bool operator==(const VulkanTypedHandle &a, const VulkanTypedHandle &b) NOEXCEPT {
- return a.handle == b.handle && a.type == b.type;
-}
+// Generic wrapper for vulkan objects
+struct VK_OBJECT {
+ uint64_t handle;
+ VulkanObjectType type;
+};
+
+inline bool operator==(VK_OBJECT a, VK_OBJECT b) NOEXCEPT { return a.handle == b.handle && a.type == b.type; }
namespace std {
template <>
-struct hash<VulkanTypedHandle> {
- size_t operator()(VulkanTypedHandle obj) const NOEXCEPT { return hash<uint64_t>()(obj.handle) ^ hash<uint32_t>()(obj.type); }
+struct hash<VK_OBJECT> {
+ size_t operator()(VK_OBJECT obj) const NOEXCEPT { return hash<uint64_t>()(obj.handle) ^ hash<uint32_t>()(obj.type); }
};
} // namespace std
@@ -146,10 +146,6 @@ enum descriptor_req {
DESCRIPTOR_REQ_COMPONENT_TYPE_UINT = DESCRIPTOR_REQ_COMPONENT_TYPE_SINT << 1,
};
-extern unsigned DescriptorRequirementsBitsFromFormat(VkFormat fmt);
-
-typedef std::map<uint32_t, descriptor_req> BindingReqMap;
-
struct DESCRIPTOR_POOL_STATE : BASE_NODE {
VkDescriptorPool pool;
uint32_t maxSets; // Max descriptor sets allowed in this pool
@@ -295,27 +291,44 @@ class IMAGE_STATE : public BINDABLE {
public:
VkImage image;
VkImageCreateInfo createInfo;
- bool valid; // If this is a swapchain image backing memory track valid here as it doesn't have DEVICE_MEMORY_STATE
- bool acquired; // If this is a swapchain image, has it been acquired by the app.
- bool shared_presentable; // True for a front-buffered swapchain image
- bool layout_locked; // A front-buffered image that has been presented can never have layout transitioned
- bool get_sparse_reqs_called; // Track if GetImageSparseMemoryRequirements() has been called for this image
- bool sparse_metadata_required; // Track if sparse metadata aspect is required for this image
- bool sparse_metadata_bound; // Track if sparse metadata aspect is bound to this image
- bool imported_ahb; // True if image was imported from an Android Hardware Buffer
- bool has_ahb_format; // True if image was created with an external Android format
- uint64_t ahb_format; // External Android format, if provided
- VkImageSubresourceRange full_range; // The normalized ISR for all levels, layers (slices), and aspects
- VkSwapchainKHR create_from_swapchain;
- VkSwapchainKHR bind_swapchain;
- uint32_t bind_swapchain_imageIndex;
+ bool valid; // If this is a swapchain image backing memory track valid here as it doesn't have DEVICE_MEM_INFO
+ bool acquired; // If this is a swapchain image, has it been acquired by the app.
+ bool shared_presentable; // True for a front-buffered swapchain image
+ bool layout_locked; // A front-buffered image that has been presented can never have layout transitioned
+ bool get_sparse_reqs_called; // Track if GetImageSparseMemoryRequirements() has been called for this image
+ bool sparse_metadata_required; // Track if sparse metadata aspect is required for this image
+ bool sparse_metadata_bound; // Track if sparse metadata aspect is bound to this image
+ bool imported_ahb; // True if image was imported from an Android Hardware Buffer
+ bool has_ahb_format; // True if image was created with an external Android format
+ uint64_t ahb_format; // External Android format, if provided
+ std::vector<VkSparseImageMemoryRequirements> sparse_requirements;
+ IMAGE_STATE(VkImage img, const VkImageCreateInfo *pCreateInfo)
+ : image(img),
+ createInfo(*pCreateInfo),
+ valid(false),
+ acquired(false),
+ shared_presentable(false),
+ layout_locked(false),
+ get_sparse_reqs_called(false),
+ sparse_metadata_required(false),
+ sparse_metadata_bound(false),
+ imported_ahb(false),
+ has_ahb_format(false),
+ ahb_format(0),
+ sparse_requirements{} {
+ if ((createInfo.sharingMode == VK_SHARING_MODE_CONCURRENT) && (createInfo.queueFamilyIndexCount > 0)) {
+ uint32_t *pQueueFamilyIndices = new uint32_t[createInfo.queueFamilyIndexCount];
+ for (uint32_t i = 0; i < createInfo.queueFamilyIndexCount; i++) {
+ pQueueFamilyIndices[i] = pCreateInfo->pQueueFamilyIndices[i];
+ }
+ createInfo.pQueueFamilyIndices = pQueueFamilyIndices;
+ }
-#ifdef VK_USE_PLATFORM_ANDROID_KHR
- uint64_t external_format_android;
-#endif // VK_USE_PLATFORM_ANDROID_KHR
+ if (createInfo.flags & VK_IMAGE_CREATE_SPARSE_BINDING_BIT) {
+ sparse = true;
+ }
+ };
- std::vector<VkSparseImageMemoryRequirements> sparse_requirements;
- IMAGE_STATE(VkImage img, const VkImageCreateInfo *pCreateInfo);
IMAGE_STATE(IMAGE_STATE const &rh_obj) = delete;
~IMAGE_STATE() {
@@ -330,42 +343,34 @@ class IMAGE_VIEW_STATE : public BASE_NODE {
public:
VkImageView image_view;
VkImageViewCreateInfo create_info;
- VkImageSubresourceRange normalized_subresource_range;
- VkSampleCountFlagBits samples;
- unsigned descriptor_format_bits;
VkSamplerYcbcrConversion samplerConversion; // Handle of the ycbcr sampler conversion the image was created with, if any
- IMAGE_VIEW_STATE(const IMAGE_STATE *image_state, VkImageView iv, const VkImageViewCreateInfo *ci);
+ IMAGE_VIEW_STATE(VkImageView iv, const VkImageViewCreateInfo *ci)
+ : image_view(iv), create_info(*ci), samplerConversion(VK_NULL_HANDLE) {
+ auto *conversionInfo = lvl_find_in_chain<VkSamplerYcbcrConversionInfo>(create_info.pNext);
+ if (conversionInfo) samplerConversion = conversionInfo->conversion;
+ };
IMAGE_VIEW_STATE(const IMAGE_VIEW_STATE &rh_obj) = delete;
};
-class ACCELERATION_STRUCTURE_STATE : public BINDABLE {
- public:
- VkAccelerationStructureNV acceleration_structure;
- safe_VkAccelerationStructureCreateInfoNV create_info;
- bool memory_requirements_checked = false;
- VkMemoryRequirements2KHR memory_requirements;
- bool build_scratch_memory_requirements_checked = false;
- VkMemoryRequirements2KHR build_scratch_memory_requirements;
- bool update_scratch_memory_requirements_checked = false;
- VkMemoryRequirements2KHR update_scratch_memory_requirements;
- bool built = false;
- safe_VkAccelerationStructureInfoNV build_info;
- ACCELERATION_STRUCTURE_STATE(VkAccelerationStructureNV as, const VkAccelerationStructureCreateInfoNV *ci)
- : acceleration_structure(as),
- create_info(ci),
- memory_requirements{},
- build_scratch_memory_requirements_checked{},
- update_scratch_memory_requirements_checked{} {}
- ACCELERATION_STRUCTURE_STATE(const ACCELERATION_STRUCTURE_STATE &rh_obj) = delete;
-};
-
struct MemRange {
VkDeviceSize offset;
VkDeviceSize size;
};
+struct MEMORY_RANGE {
+ uint64_t handle;
+ bool image; // True for image, false for buffer
+ bool linear; // True for buffers and linear images
+ VkDeviceMemory memory;
+ VkDeviceSize start;
+ VkDeviceSize size;
+ VkDeviceSize end; // Store this pre-computed for simplicity
+ // Set of ptrs to every range aliased with this one
+ std::unordered_set<MEMORY_RANGE *> aliases;
+};
+
// Data struct for tracking memory object
-struct DEVICE_MEMORY_STATE : public BASE_NODE {
+struct DEVICE_MEM_INFO : public BASE_NODE {
void *object; // Dispatchable object used to create this memory (device of swapchain)
VkDeviceMemory mem;
VkMemoryAllocateInfo alloc_info;
@@ -374,11 +379,11 @@ struct DEVICE_MEMORY_STATE : public BASE_NODE {
VkImage dedicated_image;
bool is_export;
VkExternalMemoryHandleTypeFlags export_handle_type_flags;
- std::unordered_set<VulkanTypedHandle> obj_bindings; // objects bound to this memory
- // Convenience vectors of handles to speed up iterating over objects independently
+ std::unordered_set<VK_OBJECT> obj_bindings; // objects bound to this memory
+ std::unordered_map<uint64_t, MEMORY_RANGE> bound_ranges; // Map of object to its binding range
+ // Convenience vectors image/buff handles to speed up iterating over images or buffers independently
std::unordered_set<uint64_t> bound_images;
std::unordered_set<uint64_t> bound_buffers;
- std::unordered_set<uint64_t> bound_acceleration_structures;
MemRange mem_range;
void *shadow_copy_base; // Base of layer's allocation for guard band, data, and alignment space
@@ -387,7 +392,7 @@ struct DEVICE_MEMORY_STATE : public BASE_NODE {
// multiple of limits.minMemoryMapAlignment
void *p_driver_data; // Pointer to application's actual memory
- DEVICE_MEMORY_STATE(void *disp_object, const VkDeviceMemory in_mem, const VkMemoryAllocateInfo *p_alloc_info)
+ DEVICE_MEM_INFO(void *disp_object, const VkDeviceMemory in_mem, const VkMemoryAllocateInfo *p_alloc_info)
: object(disp_object),
mem(in_mem),
alloc_info(*p_alloc_info),
@@ -416,514 +421,14 @@ class SWAPCHAIN_NODE {
: createInfo(pCreateInfo), swapchain(swapchain) {}
};
-struct ColorAspectTraits {
- static const uint32_t kAspectCount = 1;
- static int Index(VkImageAspectFlags mask) { return 0; };
- static VkImageAspectFlags AspectMask() { return VK_IMAGE_ASPECT_COLOR_BIT; }
- static const std::array<VkImageAspectFlagBits, kAspectCount> &AspectBits() {
- static std::array<VkImageAspectFlagBits, kAspectCount> kAspectBits{{VK_IMAGE_ASPECT_COLOR_BIT}};
- return kAspectBits;
- }
-};
-
-struct DepthAspectTraits {
- static const uint32_t kAspectCount = 1;
- static int Index(VkImageAspectFlags mask) { return 0; };
- static VkImageAspectFlags AspectMask() { return VK_IMAGE_ASPECT_DEPTH_BIT; }
- static const std::array<VkImageAspectFlagBits, kAspectCount> &AspectBits() {
- static std::array<VkImageAspectFlagBits, kAspectCount> kAspectBits{{VK_IMAGE_ASPECT_DEPTH_BIT}};
- return kAspectBits;
- }
-};
-
-struct StencilAspectTraits {
- static const uint32_t kAspectCount = 1;
- static int Index(VkImageAspectFlags mask) { return 0; };
- static VkImageAspectFlags AspectMask() { return VK_IMAGE_ASPECT_STENCIL_BIT; }
- static const std::array<VkImageAspectFlagBits, kAspectCount> &AspectBits() {
- static std::array<VkImageAspectFlagBits, kAspectCount> kAspectBits{{VK_IMAGE_ASPECT_STENCIL_BIT}};
- return kAspectBits;
- }
-};
-
-struct DepthStencilAspectTraits {
- // VK_IMAGE_ASPECT_DEPTH_BIT = 0x00000002, >> 1 -> 1 -1 -> 0
- // VK_IMAGE_ASPECT_STENCIL_BIT = 0x00000004, >> 1 -> 2 -1 = 1
- static const uint32_t kAspectCount = 2;
- static uint32_t Index(VkImageAspectFlags mask) {
- uint32_t index = (mask >> 1) - 1;
- assert((index == 0) || (index == 1));
- return index;
- };
- static VkImageAspectFlags AspectMask() { return VK_IMAGE_ASPECT_DEPTH_BIT | VK_IMAGE_ASPECT_STENCIL_BIT; }
- static const std::array<VkImageAspectFlagBits, kAspectCount> &AspectBits() {
- static std::array<VkImageAspectFlagBits, kAspectCount> kAspectBits{
- {VK_IMAGE_ASPECT_DEPTH_BIT, VK_IMAGE_ASPECT_STENCIL_BIT}};
- return kAspectBits;
- }
-};
-
-struct Multiplane2AspectTraits {
- // VK_IMAGE_ASPECT_PLANE_0_BIT = 0x00000010, >> 4 - 1 -> 0
- // VK_IMAGE_ASPECT_PLANE_1_BIT = 0x00000020, >> 4 - 1 -> 1
- static const uint32_t kAspectCount = 2;
- static uint32_t Index(VkImageAspectFlags mask) {
- uint32_t index = (mask >> 4) - 1;
- assert((index == 0) || (index == 1));
- return index;
- };
- static VkImageAspectFlags AspectMask() { return VK_IMAGE_ASPECT_PLANE_0_BIT | VK_IMAGE_ASPECT_PLANE_1_BIT; }
- static const std::array<VkImageAspectFlagBits, kAspectCount> &AspectBits() {
- static std::array<VkImageAspectFlagBits, kAspectCount> kAspectBits{
- {VK_IMAGE_ASPECT_PLANE_0_BIT, VK_IMAGE_ASPECT_PLANE_1_BIT}};
- return kAspectBits;
- }
-};
-
-struct Multiplane3AspectTraits {
- // VK_IMAGE_ASPECT_PLANE_0_BIT = 0x00000010, >> 4 - 1 -> 0
- // VK_IMAGE_ASPECT_PLANE_1_BIT = 0x00000020, >> 4 - 1 -> 1
- // VK_IMAGE_ASPECT_PLANE_2_BIT = 0x00000040, >> 4 - 1 -> 3
- static const uint32_t kAspectCount = 3;
- static uint32_t Index(VkImageAspectFlags mask) {
- uint32_t index = (mask >> 4) - 1;
- index = index > 2 ? 2 : index;
- assert((index == 0) || (index == 1) || (index == 2));
- return index;
- };
- static VkImageAspectFlags AspectMask() {
- return VK_IMAGE_ASPECT_PLANE_0_BIT | VK_IMAGE_ASPECT_PLANE_1_BIT | VK_IMAGE_ASPECT_PLANE_2_BIT;
- }
- static const std::array<VkImageAspectFlagBits, kAspectCount> &AspectBits() {
- static std::array<VkImageAspectFlagBits, kAspectCount> kAspectBits{
- {VK_IMAGE_ASPECT_PLANE_0_BIT, VK_IMAGE_ASPECT_PLANE_1_BIT, VK_IMAGE_ASPECT_PLANE_2_BIT}};
- return kAspectBits;
- }
-};
-
-std::string FormatDebugLabel(const char *prefix, const LoggingLabel &label);
-
-const static VkImageLayout kInvalidLayout = VK_IMAGE_LAYOUT_MAX_ENUM;
-// Interface class.
-class ImageSubresourceLayoutMap {
+class IMAGE_CMD_BUF_LAYOUT_NODE {
public:
- typedef std::function<bool(const VkImageSubresource &, VkImageLayout, VkImageLayout)> Callback;
- struct InitialLayoutState {
- VkImageView image_view; // For relaxed matching rule evaluation, else VK_NULL_HANDLE
- VkImageAspectFlags aspect_mask; // For relaxed matching rules... else 0
- LoggingLabel label;
- InitialLayoutState(const CMD_BUFFER_STATE &cb_state_, const IMAGE_VIEW_STATE *view_state);
- InitialLayoutState() : image_view(VK_NULL_HANDLE), aspect_mask(0), label() {}
- };
+ IMAGE_CMD_BUF_LAYOUT_NODE() = default;
+ IMAGE_CMD_BUF_LAYOUT_NODE(VkImageLayout initialLayoutInput, VkImageLayout layoutInput)
+ : initialLayout(initialLayoutInput), layout(layoutInput) {}
- struct SubresourceLayout {
- VkImageSubresource subresource;
- VkImageLayout layout;
- };
-
- struct SubresourceRangeLayout {
- VkImageSubresourceRange range;
- VkImageLayout layout;
- };
-
- class ConstIteratorInterface {
- public:
- // Make the value accessor non virtual
- const SubresourceLayout &operator*() const { return value_; }
-
- virtual ConstIteratorInterface &operator++() = 0;
- virtual bool AtEnd() const = 0;
- virtual ~ConstIteratorInterface(){};
-
- protected:
- SubresourceLayout value_;
- };
-
- class ConstIterator {
- public:
- ConstIterator &operator++() {
- ++(*it_);
- return *this;
- }
- const SubresourceLayout &operator*() const { return *(*it_); }
- ConstIterator(ConstIteratorInterface *it) : it_(it){};
- bool AtEnd() const { return it_->AtEnd(); }
-
- protected:
- std::unique_ptr<ConstIteratorInterface> it_;
- };
-
- virtual ConstIterator BeginInitialUse() const = 0;
- virtual ConstIterator BeginSetLayout() const = 0;
-
- virtual bool SetSubresourceRangeLayout(const CMD_BUFFER_STATE &cb_state, const VkImageSubresourceRange &range,
- VkImageLayout layout, VkImageLayout expected_layout = kInvalidLayout) = 0;
- virtual bool SetSubresourceRangeInitialLayout(const CMD_BUFFER_STATE &cb_state, const VkImageSubresourceRange &range,
- VkImageLayout layout, const IMAGE_VIEW_STATE *view_state = nullptr) = 0;
- virtual bool ForRange(const VkImageSubresourceRange &range, const Callback &callback, bool skip_invalid = true,
- bool always_get_initial = false) const = 0;
- virtual VkImageLayout GetSubresourceLayout(const VkImageSubresource subresource) const = 0;
- virtual VkImageLayout GetSubresourceInitialLayout(const VkImageSubresource subresource) const = 0;
- virtual const InitialLayoutState *GetSubresourceInitialLayoutState(const VkImageSubresource subresource) const = 0;
- virtual bool UpdateFrom(const ImageSubresourceLayoutMap &from) = 0;
- virtual uintptr_t CompatibilityKey() const = 0;
- ImageSubresourceLayoutMap() {}
- virtual ~ImageSubresourceLayoutMap() {}
-};
-
-template <typename AspectTraits_, size_t kSparseThreshold = 64U>
-class ImageSubresourceLayoutMapImpl : public ImageSubresourceLayoutMap {
- public:
- typedef ImageSubresourceLayoutMap Base;
- typedef AspectTraits_ AspectTraits;
- typedef Base::SubresourceLayout SubresourceLayout;
- typedef sparse_container::SparseVector<size_t, VkImageLayout, true, kInvalidLayout, kSparseThreshold> LayoutMap;
- typedef sparse_container::SparseVector<size_t, VkImageLayout, false, kInvalidLayout, kSparseThreshold> InitialLayoutMap;
-
- struct Layouts {
- LayoutMap current;
- InitialLayoutMap initial;
- Layouts(size_t size) : current(0, size), initial(0, size) {}
- };
-
- template <typename Container>
- class ConstIteratorImpl : public Base::ConstIteratorInterface {
- public:
- ConstIteratorImpl &operator++() override {
- ++it_;
- UpdateValue();
- return *this;
- }
- // Just good enough for cend checks
- ConstIteratorImpl(const ImageSubresourceLayoutMapImpl &map, const Container &container)
- : map_(&map), container_(&container), the_end_(false) {
- it_ = container_->cbegin();
- UpdateValue();
- }
- ~ConstIteratorImpl() override {}
- virtual bool AtEnd() const override { return the_end_; }
-
- protected:
- void UpdateValue() {
- if (it_ != container_->cend()) {
- value_.subresource = map_->Decode((*it_).first);
- value_.layout = (*it_).second;
- } else {
- the_end_ = true;
- value_.layout = kInvalidLayout;
- }
- }
-
- typedef typename Container::const_iterator ContainerIterator;
- const ImageSubresourceLayoutMapImpl *map_;
- const Container *container_;
- bool the_end_;
- ContainerIterator it_;
- };
-
- Base::ConstIterator BeginInitialUse() const override {
- return Base::ConstIterator(new ConstIteratorImpl<InitialLayoutMap>(*this, layouts_.initial));
- }
-
- Base::ConstIterator BeginSetLayout() const override {
- return Base::ConstIterator(new ConstIteratorImpl<LayoutMap>(*this, layouts_.current));
- }
-
- bool SetSubresourceRangeLayout(const CMD_BUFFER_STATE &cb_state, const VkImageSubresourceRange &range, VkImageLayout layout,
- VkImageLayout expected_layout = kInvalidLayout) override {
- bool updated = false;
- if (expected_layout == kInvalidLayout) {
- // Set the initial layout to the set layout as we had no other layout to reference
- expected_layout = layout;
- }
- if (!InRange(range)) return false; // Don't even try to track bogus subreources
-
- InitialLayoutState *initial_state = nullptr;
- const uint32_t end_mip = range.baseMipLevel + range.levelCount;
- const auto &aspects = AspectTraits::AspectBits();
- for (uint32_t aspect_index = 0; aspect_index < AspectTraits::kAspectCount; aspect_index++) {
- if (0 == (range.aspectMask & aspects[aspect_index])) continue;
- size_t array_offset = Encode(aspect_index, range.baseMipLevel);
- for (uint32_t mip_level = range.baseMipLevel; mip_level < end_mip; ++mip_level, array_offset += mip_size_) {
- size_t start = array_offset + range.baseArrayLayer;
- size_t end = start + range.layerCount;
- bool updated_level = layouts_.current.SetRange(start, end, layout);
- if (updated_level) {
- // We only need to try setting the initial layout, if we changed any of the layout values above
- updated = true;
- if (layouts_.initial.SetRange(start, end, expected_layout)) {
- // We only need to try setting the initial layout *state* if the initial layout was updated
- initial_state = UpdateInitialLayoutState(start, end, initial_state, cb_state, nullptr);
- }
- }
- }
- }
- if (updated) version_++;
- return updated;
- }
-
- bool SetSubresourceRangeInitialLayout(const CMD_BUFFER_STATE &cb_state, const VkImageSubresourceRange &range,
- VkImageLayout layout, const IMAGE_VIEW_STATE *view_state = nullptr) override {
- bool updated = false;
- if (!InRange(range)) return false; // Don't even try to track bogus subreources
-
- InitialLayoutState *initial_state = nullptr;
- const uint32_t end_mip = range.baseMipLevel + range.levelCount;
- const auto &aspects = AspectTraits::AspectBits();
- for (uint32_t aspect_index = 0; aspect_index < AspectTraits::kAspectCount; aspect_index++) {
- if (0 == (range.aspectMask & aspects[aspect_index])) continue;
- size_t array_offset = Encode(aspect_index, range.baseMipLevel);
- for (uint32_t mip_level = range.baseMipLevel; mip_level < end_mip; ++mip_level, array_offset += mip_size_) {
- size_t start = array_offset + range.baseArrayLayer;
- size_t end = start + range.layerCount;
- bool updated_level = layouts_.initial.SetRange(start, end, layout);
- if (updated_level) {
- updated = true;
- // We only need to try setting the initial layout *state* if the initial layout was updated
- initial_state = UpdateInitialLayoutState(start, end, initial_state, cb_state, view_state);
- }
- }
- }
- if (updated) version_++;
- return updated;
- }
-
- // Loop over the given range calling the callback, primarily for
- // validation checks. By default the initial_value is only looked
- // up if the set value isn't found.
- bool ForRange(const VkImageSubresourceRange &range, const Callback &callback, bool skip_invalid = true,
- bool always_get_initial = false) const override {
- if (!InRange(range)) return false; // Don't even try to process bogus subreources
-
- VkImageSubresource subres;
- auto &level = subres.mipLevel;
- auto &layer = subres.arrayLayer;
- auto &aspect = subres.aspectMask;
- const auto &aspects = AspectTraits::AspectBits();
- bool keep_on = true;
- const uint32_t end_mip = range.baseMipLevel + range.levelCount;
- const uint32_t end_layer = range.baseArrayLayer + range.layerCount;
- for (uint32_t aspect_index = 0; aspect_index < AspectTraits::kAspectCount; aspect_index++) {
- if (0 == (range.aspectMask & aspects[aspect_index])) continue;
- aspect = aspects[aspect_index]; // noting that this and the following loop indices are references
- size_t array_offset = Encode(aspect_index, range.baseMipLevel);
- for (level = range.baseMipLevel; level < end_mip; ++level, array_offset += mip_size_) {
- for (layer = range.baseArrayLayer; layer < end_layer; layer++) {
- // TODO -- would an interator with range check be faster?
- size_t index = array_offset + layer;
- VkImageLayout layout = layouts_.current.Get(index);
- VkImageLayout initial_layout = kInvalidLayout;
- if (always_get_initial || (layout == kInvalidLayout)) {
- initial_layout = layouts_.initial.Get(index);
- }
-
- if (!skip_invalid || (layout != kInvalidLayout) || (initial_layout != kInvalidLayout)) {
- keep_on = callback(subres, layout, initial_layout);
- if (!keep_on) return keep_on; // False value from the callback aborts the range traversal
- }
- }
- }
- }
- return keep_on;
- }
- VkImageLayout GetSubresourceInitialLayout(const VkImageSubresource subresource) const override {
- if (!InRange(subresource)) return kInvalidLayout;
- uint32_t aspect_index = AspectTraits::Index(subresource.aspectMask);
- size_t index = Encode(aspect_index, subresource.mipLevel, subresource.arrayLayer);
- return layouts_.initial.Get(index);
- }
-
- const InitialLayoutState *GetSubresourceInitialLayoutState(const VkImageSubresource subresource) const override {
- if (!InRange(subresource)) return nullptr;
- uint32_t aspect_index = AspectTraits::Index(subresource.aspectMask);
- size_t index = Encode(aspect_index, subresource.mipLevel, subresource.arrayLayer);
- return initial_layout_state_map_.Get(index);
- }
-
- VkImageLayout GetSubresourceLayout(const VkImageSubresource subresource) const override {
- if (!InRange(subresource)) return kInvalidLayout;
- uint32_t aspect_index = AspectTraits::Index(subresource.aspectMask);
- size_t index = Encode(aspect_index, subresource.mipLevel, subresource.arrayLayer);
- return layouts_.current.Get(index);
- }
-
- // TODO: make sure this paranoia check is sufficient and not too much.
- uintptr_t CompatibilityKey() const override {
- return (reinterpret_cast<const uintptr_t>(&image_state_) ^ AspectTraits::AspectMask() ^ kSparseThreshold);
- }
-
- bool UpdateFrom(const ImageSubresourceLayoutMap &other) override {
- // Must be from matching images for the reinterpret cast to be valid
- assert(CompatibilityKey() == other.CompatibilityKey());
- if (CompatibilityKey() != other.CompatibilityKey()) return false;
-
- const auto &from = reinterpret_cast<const ImageSubresourceLayoutMapImpl &>(other);
- bool updated = false;
- updated |= layouts_.initial.Merge(from.layouts_.initial);
- updated |= layouts_.current.Merge(from.layouts_.current);
- initial_layout_state_map_.Merge(from.initial_layout_state_map_);
-
- return updated;
- }
-
- ImageSubresourceLayoutMapImpl() : Base() {}
- ImageSubresourceLayoutMapImpl(const IMAGE_STATE &image_state)
- : Base(),
- image_state_(image_state),
- mip_size_(image_state.full_range.layerCount),
- aspect_size_(mip_size_ * image_state.full_range.levelCount),
- version_(0),
- layouts_(aspect_size_ * AspectTraits::kAspectCount),
- initial_layout_states_(),
- initial_layout_state_map_(0, aspect_size_ * AspectTraits::kAspectCount) {
- // Setup the row <-> aspect/mip_level base Encode/Decode LUT...
- aspect_offsets_[0] = 0;
- for (size_t i = 1; i < aspect_offsets_.size(); ++i) { // Size is a compile time constant
- aspect_offsets_[i] = aspect_offsets_[i - 1] + aspect_size_;
- }
- }
- ~ImageSubresourceLayoutMapImpl() override {}
-
- protected:
- // This looks a bit ponderous but kAspectCount is a compile time constant
- VkImageSubresource Decode(size_t index) const {
- VkImageSubresource subres;
- // find aspect index
- uint32_t aspect_index = 0;
- if (AspectTraits::kAspectCount == 2) {
- if (index >= aspect_offsets_[1]) {
- aspect_index = 1;
- index = index - aspect_offsets_[aspect_index];
- }
- } else if (AspectTraits::kAspectCount == 3) {
- if (index >= aspect_offsets_[2]) {
- aspect_index = 2;
- } else if (index >= aspect_offsets_[1]) {
- aspect_index = 1;
- }
- index = index - aspect_offsets_[aspect_index];
- } else {
- assert(AspectTraits::kAspectCount == 1); // Only aspect counts of 1, 2, and 3 supported
- }
-
- subres.aspectMask = AspectTraits::AspectBits()[aspect_index];
- subres.mipLevel =
- static_cast<uint32_t>(index / mip_size_); // One hopes the compiler with optimize this pair of divisions...
- subres.arrayLayer = static_cast<uint32_t>(index % mip_size_);
-
- return subres;
- }
-
- uint32_t LevelLimit(uint32_t level) const { return (std::min)(image_state_.full_range.levelCount, level); }
- uint32_t LayerLimit(uint32_t layer) const { return (std::min)(image_state_.full_range.layerCount, layer); }
-
- bool InRange(const VkImageSubresource &subres) const {
- bool in_range = (subres.mipLevel < image_state_.full_range.levelCount) &&
- (subres.arrayLayer < image_state_.full_range.layerCount) &&
- (subres.aspectMask & AspectTraits::AspectMask());
- return in_range;
- }
-
- bool InRange(const VkImageSubresourceRange &range) const {
- bool in_range = (range.baseMipLevel < image_state_.full_range.levelCount) &&
- ((range.baseMipLevel + range.levelCount) <= image_state_.full_range.levelCount) &&
- (range.baseArrayLayer < image_state_.full_range.layerCount) &&
- ((range.baseArrayLayer + range.layerCount) <= image_state_.full_range.layerCount) &&
- (range.aspectMask & AspectTraits::AspectMask());
- return in_range;
- }
-
- inline size_t Encode(uint32_t aspect_index) const {
- return (AspectTraits::kAspectCount == 1) ? 0 : aspect_offsets_[aspect_index];
- }
- inline size_t Encode(uint32_t aspect_index, uint32_t mip_level) const { return Encode(aspect_index) + mip_level * mip_size_; }
- inline size_t Encode(uint32_t aspect_index, uint32_t mip_level, uint32_t array_layer) const {
- return Encode(aspect_index, mip_level) + array_layer;
- }
-
- InitialLayoutState *UpdateInitialLayoutState(size_t start, size_t end, InitialLayoutState *initial_state,
- const CMD_BUFFER_STATE &cb_state, const IMAGE_VIEW_STATE *view_state) {
- if (!initial_state) {
- // Allocate on demand... initial_layout_states_ holds ownership as a unique_ptr, while
- // each subresource has a non-owning copy of the plain pointer.
- initial_state = new InitialLayoutState(cb_state, view_state);
- initial_layout_states_.emplace_back(initial_state);
- }
- assert(initial_state);
- initial_layout_state_map_.SetRange(start, end, initial_state);
- return initial_state;
- }
-
- typedef std::vector<std::unique_ptr<InitialLayoutState>> InitialLayoutStates;
- // This map *also* needs "write once" semantics
- typedef sparse_container::SparseVector<size_t, InitialLayoutState *, false, nullptr, kSparseThreshold> InitialLayoutStateMap;
-
- const IMAGE_STATE &image_state_;
- const size_t mip_size_;
- const size_t aspect_size_;
- uint64_t version_ = 0;
- Layouts layouts_;
- InitialLayoutStates initial_layout_states_;
- InitialLayoutStateMap initial_layout_state_map_;
- std::array<size_t, AspectTraits::kAspectCount> aspect_offsets_;
-};
-
-static VkImageLayout NormalizeImageLayout(VkImageLayout layout, VkImageLayout non_normal, VkImageLayout normal) {
- return (layout == non_normal) ? normal : layout;
-}
-
-static VkImageLayout NormalizeDepthImageLayout(VkImageLayout layout) {
- return NormalizeImageLayout(layout, VK_IMAGE_LAYOUT_DEPTH_STENCIL_READ_ONLY_OPTIMAL,
- VK_IMAGE_LAYOUT_DEPTH_READ_ONLY_STENCIL_ATTACHMENT_OPTIMAL);
-}
-
-static VkImageLayout NormalizeStencilImageLayout(VkImageLayout layout) {
- return NormalizeImageLayout(layout, VK_IMAGE_LAYOUT_DEPTH_STENCIL_READ_ONLY_OPTIMAL,
- VK_IMAGE_LAYOUT_DEPTH_ATTACHMENT_STENCIL_READ_ONLY_OPTIMAL);
-}
-
-static bool ImageLayoutMatches(const VkImageAspectFlags aspect_mask, VkImageLayout a, VkImageLayout b) {
- bool matches = (a == b);
- if (!matches) {
- // Relaxed rules when referencing *only* the depth or stencil aspects
- if (aspect_mask == VK_IMAGE_ASPECT_DEPTH_BIT) {
- matches = NormalizeDepthImageLayout(a) == NormalizeDepthImageLayout(b);
- } else if (aspect_mask == VK_IMAGE_ASPECT_STENCIL_BIT) {
- matches = NormalizeStencilImageLayout(a) == NormalizeStencilImageLayout(b);
- }
- }
- return matches;
-}
-
-// Utility type for ForRange callbacks
-struct LayoutUseCheckAndMessage {
- const static VkImageAspectFlags kDepthOrStencil = VK_IMAGE_ASPECT_DEPTH_BIT | VK_IMAGE_ASPECT_STENCIL_BIT;
- const ImageSubresourceLayoutMap *layout_map;
- const VkImageAspectFlags aspect_mask;
- const char *message;
+ VkImageLayout initialLayout;
VkImageLayout layout;
-
- LayoutUseCheckAndMessage() = delete;
- LayoutUseCheckAndMessage(const ImageSubresourceLayoutMap *layout_map_, const VkImageAspectFlags aspect_mask_ = 0)
- : layout_map(layout_map_), aspect_mask{aspect_mask_}, message(nullptr), layout(kInvalidLayout) {}
- bool Check(const VkImageSubresource &subres, VkImageLayout check, VkImageLayout current_layout, VkImageLayout initial_layout) {
- message = nullptr;
- layout = kInvalidLayout; // Success status
- if (current_layout != kInvalidLayout && !ImageLayoutMatches(aspect_mask, check, current_layout)) {
- message = "previous known";
- layout = current_layout;
- } else if ((initial_layout != kInvalidLayout) && !ImageLayoutMatches(aspect_mask, check, initial_layout)) {
- // To check the relaxed rule matching we need to see how the initial use was used
- const auto initial_layout_state = layout_map->GetSubresourceInitialLayoutState(subres);
- assert(initial_layout_state); // If we have an initial layout, we better have a state for it
- if (!((initial_layout_state->aspect_mask & kDepthOrStencil) &&
- ImageLayoutMatches(initial_layout_state->aspect_mask, check, initial_layout))) {
- message = "previously used";
- layout = initial_layout;
- }
- }
- return layout == kInvalidLayout;
- }
};
// Store the DAG.
@@ -944,8 +449,83 @@ struct RENDER_PASS_STATE : public BASE_NODE {
RENDER_PASS_STATE(VkRenderPassCreateInfo const *pCreateInfo) { ConvertVkRenderPassCreateInfoToV2KHR(pCreateInfo, &createInfo); }
};
-// Autogenerated as part of the vk_validation_error_message.h codegen
-enum CMD_TYPE { VUID_CMD_ENUM_LIST(CMD_) };
+// vkCmd tracking -- complete as of header 1.0.68
+// please keep in "none, then sorted" order
+// Note: grepping vulkan.h for VKAPI_CALL.*vkCmd will return all functions except vkEndCommandBuffer
+
+enum CMD_TYPE {
+ CMD_NONE,
+ CMD_BEGINQUERY,
+ CMD_BEGINRENDERPASS,
+ CMD_BEGINRENDERPASS2KHR,
+ CMD_BINDDESCRIPTORSETS,
+ CMD_BINDINDEXBUFFER,
+ CMD_BINDPIPELINE,
+ CMD_BINDSHADINGRATEIMAGE,
+ CMD_BINDVERTEXBUFFERS,
+ CMD_BLITIMAGE,
+ CMD_CLEARATTACHMENTS,
+ CMD_CLEARCOLORIMAGE,
+ CMD_CLEARDEPTHSTENCILIMAGE,
+ CMD_COPYBUFFER,
+ CMD_COPYBUFFERTOIMAGE,
+ CMD_COPYIMAGE,
+ CMD_COPYIMAGETOBUFFER,
+ CMD_COPYQUERYPOOLRESULTS,
+ CMD_DEBUGMARKERBEGINEXT,
+ CMD_DEBUGMARKERENDEXT,
+ CMD_DEBUGMARKERINSERTEXT,
+ CMD_DISPATCH,
+ CMD_DISPATCHBASEKHX,
+ CMD_DISPATCHINDIRECT,
+ CMD_DRAW,
+ CMD_DRAWINDEXED,
+ CMD_DRAWINDEXEDINDIRECT,
+ CMD_DRAWINDEXEDINDIRECTCOUNTAMD,
+ CMD_DRAWINDEXEDINDIRECTCOUNTKHR,
+ CMD_DRAWINDIRECT,
+ CMD_DRAWINDIRECTCOUNTAMD,
+ CMD_DRAWINDIRECTCOUNTKHR,
+ CMD_DRAWMESHTASKSNV,
+ CMD_DRAWMESHTASKSINDIRECTNV,
+ CMD_DRAWMESHTASKSINDIRECTCOUNTNV,
+ CMD_ENDCOMMANDBUFFER, // Should be the last command in any RECORDED cmd buffer
+ CMD_ENDQUERY,
+ CMD_ENDRENDERPASS,
+ CMD_ENDRENDERPASS2KHR,
+ CMD_EXECUTECOMMANDS,
+ CMD_FILLBUFFER,
+ CMD_NEXTSUBPASS,
+ CMD_NEXTSUBPASS2KHR,
+ CMD_PIPELINEBARRIER,
+ CMD_PROCESSCOMMANDSNVX,
+ CMD_PUSHCONSTANTS,
+ CMD_PUSHDESCRIPTORSETKHR,
+ CMD_PUSHDESCRIPTORSETWITHTEMPLATEKHR,
+ CMD_RESERVESPACEFORCOMMANDSNVX,
+ CMD_RESETEVENT,
+ CMD_RESETQUERYPOOL,
+ CMD_RESOLVEIMAGE,
+ CMD_SETBLENDCONSTANTS,
+ CMD_SETDEPTHBIAS,
+ CMD_SETDEPTHBOUNDS,
+ CMD_SETDEVICEMASKKHX,
+ CMD_SETDISCARDRECTANGLEEXT,
+ CMD_SETEVENT,
+ CMD_SETEXCLUSIVESCISSOR,
+ CMD_SETLINEWIDTH,
+ CMD_SETSAMPLELOCATIONSEXT,
+ CMD_SETSCISSOR,
+ CMD_SETSTENCILCOMPAREMASK,
+ CMD_SETSTENCILREFERENCE,
+ CMD_SETSTENCILWRITEMASK,
+ CMD_SETVIEWPORT,
+ CMD_SETVIEWPORTSHADINGRATEPALETTE,
+ CMD_SETVIEWPORTWSCALINGNV,
+ CMD_UPDATEBUFFER,
+ CMD_WAITEVENTS,
+ CMD_WRITETIMESTAMP,
+};
enum CB_STATE {
CB_NEW, // Newly created CB w/o any cmds
@@ -972,71 +552,29 @@ enum CBStatusFlagBits {
CBSTATUS_INDEX_BUFFER_BOUND = 0x00000200, // Index buffer has been set
CBSTATUS_EXCLUSIVE_SCISSOR_SET = 0x00000400,
CBSTATUS_SHADING_RATE_PALETTE_SET = 0x00000800,
- CBSTATUS_LINE_STIPPLE_SET = 0x00001000,
- CBSTATUS_ALL_STATE_SET = 0x00001DFF, // All state set (intentionally exclude index buffer)
+ CBSTATUS_ALL_STATE_SET = 0x00000DFF, // All state set (intentionally exclude index buffer)
// clang-format on
};
struct QueryObject {
VkQueryPool pool;
- uint32_t query;
- // These next two fields are *not* used in hash or comparison, they are effectively a data payload
- uint32_t index; // must be zero if !indexed
- bool indexed;
- QueryObject(VkQueryPool pool_, uint32_t query_) : pool(pool_), query(query_), index(0), indexed(false) {}
- QueryObject(VkQueryPool pool_, uint32_t query_, uint32_t index_) : pool(pool_), query(query_), index(index_), indexed(true) {}
- bool operator<(const QueryObject &rhs) const { return (pool == rhs.pool) ? query < rhs.query : pool < rhs.pool; }
-};
-
-enum QueryState {
- QUERYSTATE_UNKNOWN, // Initial state.
- QUERYSTATE_RESET, // After resetting.
- QUERYSTATE_RUNNING, // Query running.
- QUERYSTATE_ENDED, // Query ended but results may not be available.
- QUERYSTATE_AVAILABLE, // Results available.
-};
-
-enum QueryResultType {
- QUERYRESULT_UNKNOWN,
- QUERYRESULT_NO_DATA,
- QUERYRESULT_MAYBE_NO_DATA,
- QUERYRESULT_SOME_DATA,
- QUERYRESULT_WAIT_ON_RESET,
- QUERYRESULT_WAIT_ON_RUNNING,
+ uint32_t index;
};
-inline const char *string_QueryResultType(QueryResultType result_type) {
- switch (result_type) {
- case QUERYRESULT_UNKNOWN:
- return "query may be in an unknown state";
- case QUERYRESULT_NO_DATA:
- case QUERYRESULT_MAYBE_NO_DATA:
- return "query may return no data";
- case QUERYRESULT_SOME_DATA:
- return "query will return some data or availability bit";
- case QUERYRESULT_WAIT_ON_RESET:
- return "waiting on a query that has been reset and not issued yet";
- case QUERYRESULT_WAIT_ON_RUNNING:
- return "waiting on a query that has not ended yet";
- }
- assert(false);
- return "UNKNOWN QUERY STATE"; // Unreachable.
-}
-
inline bool operator==(const QueryObject &query1, const QueryObject &query2) {
- return ((query1.pool == query2.pool) && (query1.query == query2.query));
+ return (query1.pool == query2.pool && query1.index == query2.index);
}
namespace std {
template <>
struct hash<QueryObject> {
size_t operator()(QueryObject query) const throw() {
- return hash<uint64_t>()((uint64_t)(query.pool)) ^ hash<uint32_t>()(query.query);
+ return hash<uint64_t>()((uint64_t)(query.pool)) ^ hash<uint32_t>()(query.index);
}
};
} // namespace std
-struct CBVertexBufferBindingInfo {
+struct DrawData {
std::vector<BufferBinding> vertex_buffer_bindings;
};
@@ -1100,13 +638,13 @@ using PipelineLayoutCompatDict = hash_util::Dictionary<PipelineLayoutCompatDef,
using PipelineLayoutCompatId = PipelineLayoutCompatDict::Id;
// Store layouts and pushconstants for PipelineLayout
-struct PIPELINE_LAYOUT_STATE {
+struct PIPELINE_LAYOUT_NODE {
VkPipelineLayout layout;
std::vector<std::shared_ptr<cvdescriptorset::DescriptorSetLayout const>> set_layouts;
PushConstantRangesId push_constant_ranges;
std::vector<PipelineLayoutCompatId> compat_for_set;
- PIPELINE_LAYOUT_STATE() : layout(VK_NULL_HANDLE), set_layouts{}, push_constant_ranges{}, compat_for_set{} {}
+ PIPELINE_LAYOUT_NODE() : layout(VK_NULL_HANDLE), set_layouts{}, push_constant_ranges{}, compat_for_set{} {}
void reset() {
layout = VK_NULL_HANDLE;
@@ -1115,55 +653,48 @@ struct PIPELINE_LAYOUT_STATE {
compat_for_set.clear();
}
};
-// Shader typedefs needed to store StageStage below
-struct interface_var {
- uint32_t id;
- uint32_t type_id;
- uint32_t offset;
- bool is_patch;
- bool is_block_member;
- bool is_relaxed_precision;
- // TODO: collect the name, too? Isn't required to be present.
-};
-typedef std::pair<unsigned, unsigned> descriptor_slot_t;
+
+static inline bool CompatForSet(uint32_t set, const std::vector<PipelineLayoutCompatId> &a,
+ const std::vector<PipelineLayoutCompatId> &b) {
+ bool result = (set < a.size()) && (set < b.size()) && (a[set] == b[set]);
+ return result;
+}
+
+static inline bool CompatForSet(uint32_t set, const PIPELINE_LAYOUT_NODE *a, const PIPELINE_LAYOUT_NODE *b) {
+ // Intentionally have a result variable to simplify debugging
+ bool result = a && b && CompatForSet(set, a->compat_for_set, b->compat_for_set);
+ return result;
+}
class PIPELINE_STATE : public BASE_NODE {
public:
- struct StageState {
- std::unordered_set<uint32_t> accessible_ids;
- std::vector<std::pair<descriptor_slot_t, interface_var>> descriptor_uses;
- bool has_writable_descriptor;
- };
-
VkPipeline pipeline;
safe_VkGraphicsPipelineCreateInfo graphicsPipelineCI;
- safe_VkComputePipelineCreateInfo computePipelineCI;
- safe_VkRayTracingPipelineCreateInfoNV raytracingPipelineCI;
// Hold shared ptr to RP in case RP itself is destroyed
std::shared_ptr<RENDER_PASS_STATE> rp_state;
+ safe_VkComputePipelineCreateInfo computePipelineCI;
+ safe_VkRayTracingPipelineCreateInfoNV raytracingPipelineCI;
// Flag of which shader stages are active for this pipeline
uint32_t active_shaders;
uint32_t duplicate_shaders;
// Capture which slots (set#->bindings) are actually used by the shaders of this pipeline
- std::unordered_map<uint32_t, BindingReqMap> active_slots;
- // Additional metadata needed by pipeline_state initialization and validation
- std::vector<StageState> stage_state;
+ std::unordered_map<uint32_t, std::map<uint32_t, descriptor_req>> active_slots;
// Vtx input info (if any)
std::vector<VkVertexInputBindingDescription> vertex_binding_descriptions_;
std::vector<VkVertexInputAttributeDescription> vertex_attribute_descriptions_;
std::unordered_map<uint32_t, uint32_t> vertex_binding_to_index_map_;
std::vector<VkPipelineColorBlendAttachmentState> attachments;
bool blendConstantsEnabled; // Blend constants enabled for any attachments
- PIPELINE_LAYOUT_STATE pipeline_layout;
+ PIPELINE_LAYOUT_NODE pipeline_layout;
VkPrimitiveTopology topology_at_rasterizer;
// Default constructor
PIPELINE_STATE()
: pipeline{},
graphicsPipelineCI{},
+ rp_state(nullptr),
computePipelineCI{},
raytracingPipelineCI{},
- rp_state(nullptr),
active_shaders(0),
duplicate_shaders(0),
active_slots(),
@@ -1175,41 +706,106 @@ class PIPELINE_STATE : public BASE_NODE {
pipeline_layout(),
topology_at_rasterizer{} {}
- void reset() {
- VkGraphicsPipelineCreateInfo emptyGraphicsCI = {};
- graphicsPipelineCI.initialize(&emptyGraphicsCI, false, false);
+ void initGraphicsPipeline(const VkGraphicsPipelineCreateInfo *pCreateInfo, std::shared_ptr<RENDER_PASS_STATE> &&rpstate) {
+ bool uses_color_attachment = false;
+ bool uses_depthstencil_attachment = false;
+ if (pCreateInfo->subpass < rpstate->createInfo.subpassCount) {
+ const auto &subpass = rpstate->createInfo.pSubpasses[pCreateInfo->subpass];
+
+ for (uint32_t i = 0; i < subpass.colorAttachmentCount; ++i) {
+ if (subpass.pColorAttachments[i].attachment != VK_ATTACHMENT_UNUSED) {
+ uses_color_attachment = true;
+ break;
+ }
+ }
+
+ if (subpass.pDepthStencilAttachment && subpass.pDepthStencilAttachment->attachment != VK_ATTACHMENT_UNUSED) {
+ uses_depthstencil_attachment = true;
+ }
+ }
+ graphicsPipelineCI.initialize(pCreateInfo, uses_color_attachment, uses_depthstencil_attachment);
+ // Make sure compute pipeline is null
VkComputePipelineCreateInfo emptyComputeCI = {};
computePipelineCI.initialize(&emptyComputeCI);
- VkRayTracingPipelineCreateInfoNV emptyRayTracingCI = {};
- raytracingPipelineCI.initialize(&emptyRayTracingCI);
- stage_state.clear();
+ for (uint32_t i = 0; i < pCreateInfo->stageCount; i++) {
+ const VkPipelineShaderStageCreateInfo *pPSSCI = &pCreateInfo->pStages[i];
+ this->duplicate_shaders |= this->active_shaders & pPSSCI->stage;
+ this->active_shaders |= pPSSCI->stage;
+ }
+ if (graphicsPipelineCI.pVertexInputState) {
+ const auto pVICI = graphicsPipelineCI.pVertexInputState;
+ if (pVICI->vertexBindingDescriptionCount) {
+ this->vertex_binding_descriptions_ = std::vector<VkVertexInputBindingDescription>(
+ pVICI->pVertexBindingDescriptions, pVICI->pVertexBindingDescriptions + pVICI->vertexBindingDescriptionCount);
+
+ this->vertex_binding_to_index_map_.reserve(pVICI->vertexBindingDescriptionCount);
+ for (uint32_t i = 0; i < pVICI->vertexBindingDescriptionCount; ++i) {
+ this->vertex_binding_to_index_map_[pVICI->pVertexBindingDescriptions[i].binding] = i;
+ }
+ }
+ if (pVICI->vertexAttributeDescriptionCount) {
+ this->vertex_attribute_descriptions_ = std::vector<VkVertexInputAttributeDescription>(
+ pVICI->pVertexAttributeDescriptions,
+ pVICI->pVertexAttributeDescriptions + pVICI->vertexAttributeDescriptionCount);
+ }
+ }
+ if (graphicsPipelineCI.pColorBlendState) {
+ const auto pCBCI = graphicsPipelineCI.pColorBlendState;
+ if (pCBCI->attachmentCount) {
+ this->attachments = std::vector<VkPipelineColorBlendAttachmentState>(pCBCI->pAttachments,
+ pCBCI->pAttachments + pCBCI->attachmentCount);
+ }
+ }
+ if (graphicsPipelineCI.pInputAssemblyState) {
+ topology_at_rasterizer = graphicsPipelineCI.pInputAssemblyState->topology;
+ }
+ rp_state = rpstate;
}
- void initGraphicsPipeline(ValidationStateTracker *state_data, const VkGraphicsPipelineCreateInfo *pCreateInfo,
- std::shared_ptr<RENDER_PASS_STATE> &&rpstate);
- void initComputePipeline(ValidationStateTracker *state_data, const VkComputePipelineCreateInfo *pCreateInfo);
- void initRayTracingPipelineNV(ValidationStateTracker *state_data, const VkRayTracingPipelineCreateInfoNV *pCreateInfo);
-
- inline VkPipelineBindPoint getPipelineType() const {
- if (graphicsPipelineCI.sType == VK_STRUCTURE_TYPE_GRAPHICS_PIPELINE_CREATE_INFO)
- return VK_PIPELINE_BIND_POINT_GRAPHICS;
- else if (computePipelineCI.sType == VK_STRUCTURE_TYPE_COMPUTE_PIPELINE_CREATE_INFO)
- return VK_PIPELINE_BIND_POINT_COMPUTE;
- else if (raytracingPipelineCI.sType == VK_STRUCTURE_TYPE_RAY_TRACING_PIPELINE_CREATE_INFO_NV)
- return VK_PIPELINE_BIND_POINT_RAY_TRACING_NV;
- else
- return VK_PIPELINE_BIND_POINT_MAX_ENUM;
+ void initComputePipeline(const VkComputePipelineCreateInfo *pCreateInfo) {
+ computePipelineCI.initialize(pCreateInfo);
+ // Make sure gfx pipeline is null
+ VkGraphicsPipelineCreateInfo emptyGraphicsCI = {};
+ graphicsPipelineCI.initialize(&emptyGraphicsCI, false, false);
+ switch (computePipelineCI.stage.stage) {
+ case VK_SHADER_STAGE_COMPUTE_BIT:
+ this->active_shaders |= VK_SHADER_STAGE_COMPUTE_BIT;
+ break;
+ default:
+ // TODO : Flag error
+ break;
+ }
}
-
- inline VkPipelineCreateFlags getPipelineCreateFlags() const {
- if (graphicsPipelineCI.sType == VK_STRUCTURE_TYPE_GRAPHICS_PIPELINE_CREATE_INFO)
- return graphicsPipelineCI.flags;
- else if (computePipelineCI.sType == VK_STRUCTURE_TYPE_COMPUTE_PIPELINE_CREATE_INFO)
- return computePipelineCI.flags;
- else if (raytracingPipelineCI.sType == VK_STRUCTURE_TYPE_RAY_TRACING_PIPELINE_CREATE_INFO_NV)
- return raytracingPipelineCI.flags;
- else
- return 0;
+ void initRayTracingPipelineNV(const VkRayTracingPipelineCreateInfoNV *pCreateInfo) {
+ raytracingPipelineCI.initialize(pCreateInfo);
+ // Make sure gfx and compute pipeline is null
+ VkGraphicsPipelineCreateInfo emptyGraphicsCI = {};
+ VkComputePipelineCreateInfo emptyComputeCI = {};
+ computePipelineCI.initialize(&emptyComputeCI);
+ graphicsPipelineCI.initialize(&emptyGraphicsCI, false, false);
+ switch (raytracingPipelineCI.pStages->stage) {
+ case VK_SHADER_STAGE_RAYGEN_BIT_NV:
+ this->active_shaders |= VK_SHADER_STAGE_RAYGEN_BIT_NV;
+ break;
+ case VK_SHADER_STAGE_ANY_HIT_BIT_NV:
+ this->active_shaders |= VK_SHADER_STAGE_ANY_HIT_BIT_NV;
+ break;
+ case VK_SHADER_STAGE_CLOSEST_HIT_BIT_NV:
+ this->active_shaders |= VK_SHADER_STAGE_CLOSEST_HIT_BIT_NV;
+ break;
+ case VK_SHADER_STAGE_MISS_BIT_NV:
+ this->active_shaders = VK_SHADER_STAGE_MISS_BIT_NV;
+ break;
+ case VK_SHADER_STAGE_INTERSECTION_BIT_NV:
+ this->active_shaders = VK_SHADER_STAGE_INTERSECTION_BIT_NV;
+ break;
+ case VK_SHADER_STAGE_CALLABLE_BIT_NV:
+ this->active_shaders |= VK_SHADER_STAGE_CALLABLE_BIT_NV;
+ break;
+ default:
+ // TODO : Flag error
+ break;
+ }
}
};
@@ -1218,63 +814,24 @@ struct LAST_BOUND_STATE {
LAST_BOUND_STATE() { reset(); } // must define default constructor for portability reasons
PIPELINE_STATE *pipeline_state;
VkPipelineLayout pipeline_layout;
- std::unique_ptr<cvdescriptorset::DescriptorSet> push_descriptor_set;
-
+ // Track each set that has been bound
// Ordered bound set tracking where index is set# that given set is bound to
- struct PER_SET {
- PER_SET()
- : bound_descriptor_set(nullptr),
- compat_id_for_set(0),
- validated_set(nullptr),
- validated_set_change_count(~0ULL),
- validated_set_image_layout_change_count(~0ULL),
- validated_set_binding_req_map() {}
-
- cvdescriptorset::DescriptorSet *bound_descriptor_set;
- // one dynamic offset per dynamic descriptor bound to this CB
- std::vector<uint32_t> dynamicOffsets;
- PipelineLayoutCompatId compat_id_for_set;
-
- // Cache most recently validated descriptor state for ValidateCmdBufDrawState/UpdateDrawState
- const cvdescriptorset::DescriptorSet *validated_set;
- uint64_t validated_set_change_count;
- uint64_t validated_set_image_layout_change_count;
- BindingReqMap validated_set_binding_req_map;
- };
-
- std::vector<PER_SET> per_set;
+ std::vector<cvdescriptorset::DescriptorSet *> boundDescriptorSets;
+ std::unique_ptr<cvdescriptorset::DescriptorSet> push_descriptor_set;
+ // one dynamic offset per dynamic descriptor bound to this CB
+ std::vector<std::vector<uint32_t>> dynamicOffsets;
+ std::vector<PipelineLayoutCompatId> compat_id_for_set;
void reset() {
pipeline_state = nullptr;
pipeline_layout = VK_NULL_HANDLE;
+ boundDescriptorSets.clear();
push_descriptor_set = nullptr;
- per_set.clear();
- }
-
- void UnbindAndResetPushDescriptorSet(cvdescriptorset::DescriptorSet *ds) {
- if (push_descriptor_set) {
- for (std::size_t i = 0; i < per_set.size(); i++) {
- if (per_set[i].bound_descriptor_set == push_descriptor_set.get()) {
- per_set[i].bound_descriptor_set = nullptr;
- }
- }
- }
- push_descriptor_set.reset(ds);
+ dynamicOffsets.clear();
+ compat_id_for_set.clear();
}
};
-static inline bool CompatForSet(uint32_t set, const LAST_BOUND_STATE &a, const std::vector<PipelineLayoutCompatId> &b) {
- bool result = (set < a.per_set.size()) && (set < b.size()) && (a.per_set[set].compat_id_for_set == b[set]);
- return result;
-}
-
-static inline bool CompatForSet(uint32_t set, const PIPELINE_LAYOUT_STATE *a, const PIPELINE_LAYOUT_STATE *b) {
- // Intentionally have a result variable to simplify debugging
- bool result = a && b && (set < a->compat_for_set.size()) && (set < b->compat_for_set.size()) &&
- (a->compat_for_set[set] == b->compat_for_set[set]);
- return result;
-}
-
// Types to store queue family ownership (QFO) Transfers
// Common to image and buffer memory barriers
@@ -1396,23 +953,35 @@ using GlobalQFOTransferBarrierMap =
// Submit queue uses the Scoreboard to track all release/acquire operations in a batch.
template <typename Barrier>
using QFOTransferCBScoreboard =
- std::unordered_map<QFOTransferBarrier<Barrier>, const CMD_BUFFER_STATE *, QFOTransferBarrierHash<Barrier>>;
+ std::unordered_map<QFOTransferBarrier<Barrier>, const GLOBAL_CB_NODE *, QFOTransferBarrierHash<Barrier>>;
template <typename Barrier>
struct QFOTransferCBScoreboards {
QFOTransferCBScoreboard<Barrier> acquire;
QFOTransferCBScoreboard<Barrier> release;
};
+struct GpuDeviceMemoryBlock {
+ VkBuffer buffer;
+ VkDeviceMemory memory;
+ uint32_t offset;
+};
+
+struct GpuBufferInfo {
+ GpuDeviceMemoryBlock mem_block;
+ VkDescriptorSet desc_set;
+ VkDescriptorPool desc_pool;
+ GpuBufferInfo(GpuDeviceMemoryBlock mem_block, VkDescriptorSet desc_set, VkDescriptorPool desc_pool)
+ : mem_block(mem_block), desc_set(desc_set), desc_pool(desc_pool){};
+};
+
// Cmd Buffer Wrapper Struct - TODO : This desperately needs its own class
-struct CMD_BUFFER_STATE : public BASE_NODE {
+struct GLOBAL_CB_NODE : public BASE_NODE {
VkCommandBuffer commandBuffer;
VkCommandBufferAllocateInfo createInfo = {};
VkCommandBufferBeginInfo beginInfo;
VkCommandBufferInheritanceInfo inheritanceInfo;
VkDevice device; // device this CB belongs to
bool hasDrawCmd;
- bool hasTraceRaysCmd;
- bool hasDispatchCmd;
CB_STATE state; // Track cmd buffer update state
uint64_t submitCount; // Number of times CB has been submitted
typedef uint64_t ImageLayoutUpdateCount;
@@ -1428,19 +997,16 @@ struct CMD_BUFFER_STATE : public BASE_NODE {
uint32_t viewportMask;
uint32_t scissorMask;
- uint32_t initial_device_mask;
-
VkRenderPassBeginInfo activeRenderPassBeginInfo;
RENDER_PASS_STATE *activeRenderPass;
VkSubpassContents activeSubpassContents;
- uint32_t active_render_pass_device_mask;
uint32_t activeSubpass;
VkFramebuffer activeFramebuffer;
std::unordered_set<VkFramebuffer> framebuffers;
// Unified data structs to track objects bound to this command buffer as well as object
// dependencies that have been broken : either destroyed objects, or updated descriptor sets
- std::unordered_set<VulkanTypedHandle> object_bindings;
- std::vector<VulkanTypedHandle> broken_bindings;
+ std::unordered_set<VK_OBJECT> object_bindings;
+ std::vector<VK_OBJECT> broken_bindings;
QFOTransferBarrierSets<VkBufferMemoryBarrier> qfo_transfer_buffer_barriers;
QFOTransferBarrierSets<VkImageMemoryBarrier> qfo_transfer_image_barriers;
@@ -1448,48 +1014,42 @@ struct CMD_BUFFER_STATE : public BASE_NODE {
std::unordered_set<VkEvent> waitedEvents;
std::vector<VkEvent> writeEventsBeforeWait;
std::vector<VkEvent> events;
- std::map<QueryObject, QueryState> queryToStateMap;
+ std::unordered_map<QueryObject, std::unordered_set<VkEvent>> waitedEventsBeforeQueryReset;
+ std::unordered_map<QueryObject, bool> queryToStateMap; // 0 is unavailable, 1 is available
std::unordered_set<QueryObject> activeQueries;
std::unordered_set<QueryObject> startedQueries;
- typedef std::unordered_map<VkImage, std::unique_ptr<ImageSubresourceLayoutMap>> ImageLayoutMap;
- ImageLayoutMap image_layout_map;
+ std::unordered_map<ImageSubresourcePair, IMAGE_CMD_BUF_LAYOUT_NODE> imageLayoutMap;
std::unordered_map<VkEvent, VkPipelineStageFlags> eventToStageMap;
- std::vector<CBVertexBufferBindingInfo> cb_vertex_buffer_binding_info;
- CBVertexBufferBindingInfo current_vertex_buffer_binding_info;
+ std::vector<DrawData> draw_data;
+ DrawData current_draw_data;
bool vertex_buffer_used; // Track for perf warning to make sure any bound vtx buffer used
VkCommandBuffer primaryCommandBuffer;
+ // Track images and buffers that are updated by this CB at the point of a draw
+ std::unordered_set<VkImageView> updateImages;
+ std::unordered_set<VkBuffer> updateBuffers;
// If primary, the secondary command buffers we will call.
// If secondary, the primary command buffers we will be called by.
- std::unordered_set<CMD_BUFFER_STATE *> linkedCommandBuffers;
+ std::unordered_set<GLOBAL_CB_NODE *> linkedCommandBuffers;
// Validation functions run at primary CB queue submit time
std::vector<std::function<bool()>> queue_submit_functions;
// Validation functions run when secondary CB is executed in primary
- std::vector<std::function<bool(const CMD_BUFFER_STATE *, VkFramebuffer)>> cmd_execute_commands_functions;
+ std::vector<std::function<bool(GLOBAL_CB_NODE *, VkFramebuffer)>> cmd_execute_commands_functions;
std::unordered_set<VkDeviceMemory> memObjs;
std::vector<std::function<bool(VkQueue)>> eventUpdates;
std::vector<std::function<bool(VkQueue)>> queryUpdates;
std::unordered_set<cvdescriptorset::DescriptorSet *> validated_descriptor_sets;
// Contents valid only after an index buffer is bound (CBSTATUS_INDEX_BUFFER_BOUND set)
IndexBufferBinding index_buffer_binding;
-
- // Cache of current insert label...
- LoggingLabel debug_label;
+ // GPU Validation data
+ std::vector<GpuBufferInfo> gpu_buffer_list;
};
-static inline const QFOTransferBarrierSets<VkImageMemoryBarrier> &GetQFOBarrierSets(
- const CMD_BUFFER_STATE *cb, const QFOTransferBarrier<VkImageMemoryBarrier>::Tag &type_tag) {
- return cb->qfo_transfer_image_barriers;
-}
-static inline const QFOTransferBarrierSets<VkBufferMemoryBarrier> &GetQFOBarrierSets(
- const CMD_BUFFER_STATE *cb, const QFOTransferBarrier<VkBufferMemoryBarrier>::Tag &type_tag) {
- return cb->qfo_transfer_buffer_barriers;
-}
static inline QFOTransferBarrierSets<VkImageMemoryBarrier> &GetQFOBarrierSets(
- CMD_BUFFER_STATE *cb, const QFOTransferBarrier<VkImageMemoryBarrier>::Tag &type_tag) {
+ GLOBAL_CB_NODE *cb, const QFOTransferBarrier<VkImageMemoryBarrier>::Tag &type_tag) {
return cb->qfo_transfer_image_barriers;
}
static inline QFOTransferBarrierSets<VkBufferMemoryBarrier> &GetQFOBarrierSets(
- CMD_BUFFER_STATE *cb, const QFOTransferBarrier<VkBufferMemoryBarrier>::Tag &type_tag) {
+ GLOBAL_CB_NODE *cb, const QFOTransferBarrier<VkBufferMemoryBarrier>::Tag &type_tag) {
return cb->qfo_transfer_buffer_barriers;
}
@@ -1516,7 +1076,7 @@ struct CB_SUBMISSION {
VkFence fence;
};
-struct IMAGE_LAYOUT_STATE {
+struct IMAGE_LAYOUT_NODE {
VkImageLayout layout;
VkFormat format;
};
@@ -1531,11 +1091,17 @@ class FRAMEBUFFER_STATE : public BASE_NODE {
VkFramebuffer framebuffer;
safe_VkFramebufferCreateInfo createInfo;
std::shared_ptr<RENDER_PASS_STATE> rp_state;
+#ifdef FRAMEBUFFER_ATTACHMENT_STATE_CACHE
+ // TODO Re-enable attachment state cache once staleness protection is implemented
+ // For staleness protection destoryed images and image view must invalidate the cached data and tag the framebuffer object
+ // as no longer valid
+ std::vector<MT_FB_ATTACHMENT_INFO> attachments;
+#endif
FRAMEBUFFER_STATE(VkFramebuffer fb, const VkFramebufferCreateInfo *pCreateInfo, std::shared_ptr<RENDER_PASS_STATE> &&rpstate)
: framebuffer(fb), createInfo(pCreateInfo), rp_state(rpstate){};
};
-struct SHADER_MODULE_STATE;
+struct shader_module;
struct DeviceExtensions;
struct DeviceFeatures {
@@ -1549,29 +1115,33 @@ struct DeviceFeatures {
VkPhysicalDeviceTransformFeedbackFeaturesEXT transform_feedback_features;
VkPhysicalDeviceFloat16Int8FeaturesKHR float16_int8;
VkPhysicalDeviceVertexAttributeDivisorFeaturesEXT vtx_attrib_divisor_features;
- VkPhysicalDeviceUniformBufferStandardLayoutFeaturesKHR uniform_buffer_standard_layout;
VkPhysicalDeviceScalarBlockLayoutFeaturesEXT scalar_block_layout_features;
VkPhysicalDeviceBufferAddressFeaturesEXT buffer_address;
- VkPhysicalDeviceCooperativeMatrixFeaturesNV cooperative_matrix_features;
- VkPhysicalDeviceFloatControlsPropertiesKHR float_controls;
- VkPhysicalDeviceHostQueryResetFeaturesEXT host_query_reset_features;
- VkPhysicalDeviceComputeShaderDerivativesFeaturesNV compute_shader_derivatives_features;
- VkPhysicalDeviceFragmentShaderBarycentricFeaturesNV fragment_shader_barycentric_features;
- VkPhysicalDeviceShaderImageFootprintFeaturesNV shader_image_footprint_features;
- VkPhysicalDeviceFragmentShaderInterlockFeaturesEXT fragment_shader_interlock_features;
- VkPhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT demote_to_helper_invocation_features;
- VkPhysicalDeviceTexelBufferAlignmentFeaturesEXT texel_buffer_alignment_features;
- VkPhysicalDeviceImagelessFramebufferFeaturesKHR imageless_framebuffer_features;
- VkPhysicalDevicePipelineExecutablePropertiesFeaturesKHR pipeline_exe_props_features;
};
enum RenderPassCreateVersion { RENDER_PASS_VERSION_1 = 0, RENDER_PASS_VERSION_2 = 1 };
+class GpuDeviceMemoryManager;
+class GpuDescriptorSetManager;
struct ShaderTracker {
VkPipeline pipeline;
VkShaderModule shader_module;
std::vector<unsigned int> pgm;
};
+struct GpuValidationState {
+ bool aborted;
+ bool reserve_binding_slot;
+ VkDescriptorSetLayout debug_desc_layout;
+ VkDescriptorSetLayout dummy_desc_layout;
+ uint32_t adjusted_max_desc_sets;
+ uint32_t desc_set_bind_index;
+ uint32_t unique_shader_module_id;
+ std::unordered_map<uint32_t, ShaderTracker> shader_map;
+ std::unique_ptr<GpuDeviceMemoryManager> memory_manager;
+ std::unique_ptr<GpuDescriptorSetManager> desc_set_manager;
+ VkCommandPool barrier_command_pool;
+ VkCommandBuffer barrier_command_buffer;
+};
enum BarrierOperationsType {
kAllAcquire, // All Barrier operations are "ownership acquire" operations
@@ -1579,10 +1149,6 @@ enum BarrierOperationsType {
kGeneral, // Either no ownership operations or a mix of ownership operation types and/or non-ownership operations
};
-std::shared_ptr<cvdescriptorset::DescriptorSetLayout const> const GetDescriptorSetLayout(const ValidationStateTracker *,
- VkDescriptorSetLayout);
-
-ImageSubresourceLayoutMap *GetImageSubresourceLayoutMap(CMD_BUFFER_STATE *cb_state, const IMAGE_STATE &image_state);
-const ImageSubresourceLayoutMap *GetImageSubresourceLayoutMap(const CMD_BUFFER_STATE *cb_state, VkImage image);
+std::shared_ptr<cvdescriptorset::DescriptorSetLayout const> const GetDescriptorSetLayout(layer_data const *, VkDescriptorSetLayout);
#endif // CORE_VALIDATION_TYPES_H_
diff --git a/layers/descriptor_sets.cpp b/layers/descriptor_sets.cpp
index 8dcdf8b44..9cb7df2b9 100644
--- a/layers/descriptor_sets.cpp
+++ b/layers/descriptor_sets.cpp
@@ -52,8 +52,6 @@ struct BindingNumCmp {
}
};
-using DescriptorSet = cvdescriptorset::DescriptorSet;
-using DescriptorSetLayout = cvdescriptorset::DescriptorSetLayout;
using DescriptorSetLayoutDef = cvdescriptorset::DescriptorSetLayoutDef;
using DescriptorSetLayoutId = cvdescriptorset::DescriptorSetLayoutId;
@@ -117,11 +115,15 @@ cvdescriptorset::DescriptorSetLayoutDef::DescriptorSetLayoutDef(const VkDescript
assert(bindings_.size() == binding_count_);
assert(binding_flags_.size() == binding_count_);
uint32_t global_index = 0;
- global_index_range_.reserve(binding_count_);
- // Vector order is finalized so build vectors of descriptors and dynamic offsets by binding index
+ binding_to_global_index_range_map_.reserve(binding_count_);
+ // Vector order is finalized so create maps of bindings to descriptors and descriptors to indices
for (uint32_t i = 0; i < binding_count_; ++i) {
+ auto binding_num = bindings_[i].binding;
auto final_index = global_index + bindings_[i].descriptorCount;
- global_index_range_.emplace_back(global_index, final_index);
+ binding_to_global_index_range_map_[binding_num] = IndexRange(global_index, final_index);
+ if (final_index != global_index) {
+ global_start_to_index_map_[global_index] = i;
+ }
global_index = final_index;
}
@@ -180,18 +182,34 @@ VkDescriptorBindingFlagsEXT cvdescriptorset::DescriptorSetLayoutDef::GetDescript
return binding_flags_[index];
}
-const cvdescriptorset::IndexRange &cvdescriptorset::DescriptorSetLayoutDef::GetGlobalIndexRangeFromIndex(uint32_t index) const {
- const static IndexRange kInvalidRange = {0xFFFFFFFF, 0xFFFFFFFF};
- if (index >= binding_flags_.size()) return kInvalidRange;
- return global_index_range_[index];
+// For the given global index, return index
+uint32_t cvdescriptorset::DescriptorSetLayoutDef::GetIndexFromGlobalIndex(const uint32_t global_index) const {
+ auto start_it = global_start_to_index_map_.upper_bound(global_index);
+ uint32_t index = binding_count_;
+ assert(start_it != global_start_to_index_map_.cbegin());
+ if (start_it != global_start_to_index_map_.cbegin()) {
+ --start_it;
+ index = start_it->second;
+#ifndef NDEBUG
+ const auto &range = GetGlobalIndexRangeFromBinding(bindings_[index].binding);
+ assert(range.start <= global_index && global_index < range.end);
+#endif
+ }
+ return index;
}
-// For the given binding, return the global index range (half open)
-// As start and end are often needed in pairs, get both with a single lookup.
+// For the given binding, return the global index range
+// As start and end are often needed in pairs, get both with a single hash lookup.
const cvdescriptorset::IndexRange &cvdescriptorset::DescriptorSetLayoutDef::GetGlobalIndexRangeFromBinding(
const uint32_t binding) const {
- uint32_t index = GetIndexFromBinding(binding);
- return GetGlobalIndexRangeFromIndex(index);
+ assert(binding_to_global_index_range_map_.count(binding));
+ // In error case max uint32_t so index is out of bounds to break ASAP
+ const static IndexRange kInvalidRange = {0xFFFFFFFF, 0xFFFFFFFF};
+ const auto &range_it = binding_to_global_index_range_map_.find(binding);
+ if (range_it != binding_to_global_index_range_map_.end()) {
+ return range_it->second;
+ }
+ return kInvalidRange;
}
// For given binding, return ptr to ImmutableSampler array
@@ -216,72 +234,66 @@ VkSampler const *cvdescriptorset::DescriptorSetLayoutDef::GetImmutableSamplerPtr
}
return nullptr;
}
-
-// If our layout is compatible with rh_ds_layout, return true.
-bool cvdescriptorset::DescriptorSetLayout::IsCompatible(DescriptorSetLayout const *rh_ds_layout) const {
- bool compatible = (this == rh_ds_layout) || (GetLayoutDef() == rh_ds_layout->GetLayoutDef());
- return compatible;
-}
// If our layout is compatible with rh_ds_layout, return true,
// else return false and fill in error_msg will description of what causes incompatibility
-bool cvdescriptorset::VerifySetLayoutCompatibility(DescriptorSetLayout const *lh_ds_layout, DescriptorSetLayout const *rh_ds_layout,
- std::string *error_msg) {
- // Short circuit the detailed check.
- if (lh_ds_layout->IsCompatible(rh_ds_layout)) return true;
-
- // Do a detailed compatibility check of this lhs def (referenced by lh_ds_layout), vs. the rhs (layout and def)
- // Should only be run if trivial accept has failed, and in that context should return false.
- VkDescriptorSetLayout lh_dsl_handle = lh_ds_layout->GetDescriptorSetLayout();
- VkDescriptorSetLayout rh_dsl_handle = rh_ds_layout->GetDescriptorSetLayout();
- DescriptorSetLayoutDef const *lh_ds_layout_def = lh_ds_layout->GetLayoutDef();
- DescriptorSetLayoutDef const *rh_ds_layout_def = rh_ds_layout->GetLayoutDef();
-
- // Check descriptor counts
- if (lh_ds_layout_def->GetTotalDescriptorCount() != rh_ds_layout_def->GetTotalDescriptorCount()) {
+bool cvdescriptorset::DescriptorSetLayout::IsCompatible(DescriptorSetLayout const *const rh_ds_layout,
+ std::string *error_msg) const {
+ // Trivial case
+ if (layout_ == rh_ds_layout->GetDescriptorSetLayout()) return true;
+ if (GetLayoutDef() == rh_ds_layout->GetLayoutDef()) return true;
+ bool detailed_compat_check =
+ GetLayoutDef()->IsCompatible(layout_, rh_ds_layout->GetDescriptorSetLayout(), rh_ds_layout->GetLayoutDef(), error_msg);
+ // The detailed check should never tell us mismatching DSL are compatible
+ assert(!detailed_compat_check);
+ return detailed_compat_check;
+}
+
+// Do a detailed compatibility check of this def (referenced by ds_layout), vs. the rhs (layout and def)
+// Should only be called if trivial accept has failed, and in that context should return false.
+bool cvdescriptorset::DescriptorSetLayoutDef::IsCompatible(VkDescriptorSetLayout ds_layout, VkDescriptorSetLayout rh_ds_layout,
+ DescriptorSetLayoutDef const *const rh_ds_layout_def,
+ std::string *error_msg) const {
+ if (descriptor_count_ != rh_ds_layout_def->descriptor_count_) {
std::stringstream error_str;
- error_str << "DescriptorSetLayout " << lh_dsl_handle << " has " << lh_ds_layout_def->GetTotalDescriptorCount()
- << " descriptors, but DescriptorSetLayout " << rh_dsl_handle << ", which comes from pipelineLayout, has "
- << rh_ds_layout_def->GetTotalDescriptorCount() << " descriptors.";
+ error_str << "DescriptorSetLayout " << ds_layout << " has " << descriptor_count_ << " descriptors, but DescriptorSetLayout "
+ << rh_ds_layout << ", which comes from pipelineLayout, has " << rh_ds_layout_def->descriptor_count_
+ << " descriptors.";
*error_msg = error_str.str();
return false; // trivial fail case
}
// Descriptor counts match so need to go through bindings one-by-one
// and verify that type and stageFlags match
- for (const auto &binding : lh_ds_layout_def->GetBindings()) {
+ for (auto binding : bindings_) {
// TODO : Do we also need to check immutable samplers?
// VkDescriptorSetLayoutBinding *rh_binding;
if (binding.descriptorCount != rh_ds_layout_def->GetDescriptorCountFromBinding(binding.binding)) {
std::stringstream error_str;
- error_str << "Binding " << binding.binding << " for DescriptorSetLayout " << lh_dsl_handle
- << " has a descriptorCount of " << binding.descriptorCount << " but binding " << binding.binding
- << " for DescriptorSetLayout " << rh_dsl_handle
- << ", which comes from pipelineLayout, has a descriptorCount of "
+ error_str << "Binding " << binding.binding << " for DescriptorSetLayout " << ds_layout << " has a descriptorCount of "
+ << binding.descriptorCount << " but binding " << binding.binding << " for DescriptorSetLayout "
+ << rh_ds_layout << ", which comes from pipelineLayout, has a descriptorCount of "
<< rh_ds_layout_def->GetDescriptorCountFromBinding(binding.binding);
*error_msg = error_str.str();
return false;
} else if (binding.descriptorType != rh_ds_layout_def->GetTypeFromBinding(binding.binding)) {
std::stringstream error_str;
- error_str << "Binding " << binding.binding << " for DescriptorSetLayout " << lh_dsl_handle << " is type '"
+ error_str << "Binding " << binding.binding << " for DescriptorSetLayout " << ds_layout << " is type '"
<< string_VkDescriptorType(binding.descriptorType) << "' but binding " << binding.binding
- << " for DescriptorSetLayout " << rh_dsl_handle << ", which comes from pipelineLayout, is type '"
+ << " for DescriptorSetLayout " << rh_ds_layout << ", which comes from pipelineLayout, is type '"
<< string_VkDescriptorType(rh_ds_layout_def->GetTypeFromBinding(binding.binding)) << "'";
*error_msg = error_str.str();
return false;
} else if (binding.stageFlags != rh_ds_layout_def->GetStageFlagsFromBinding(binding.binding)) {
std::stringstream error_str;
- error_str << "Binding " << binding.binding << " for DescriptorSetLayout " << lh_dsl_handle << " has stageFlags "
- << binding.stageFlags << " but binding " << binding.binding << " for DescriptorSetLayout " << rh_dsl_handle
+ error_str << "Binding " << binding.binding << " for DescriptorSetLayout " << ds_layout << " has stageFlags "
+ << binding.stageFlags << " but binding " << binding.binding << " for DescriptorSetLayout " << rh_ds_layout
<< ", which comes from pipelineLayout, has stageFlags "
<< rh_ds_layout_def->GetStageFlagsFromBinding(binding.binding);
*error_msg = error_str.str();
return false;
}
}
- // No detailed check should succeed if the trivial check failed -- or the dictionary has failed somehow.
- bool compatible = true;
- assert(!compatible);
- return compatible;
+ return true;
}
bool cvdescriptorset::DescriptorSetLayoutDef::IsNextBindingConsistent(const uint32_t binding) const {
@@ -305,6 +317,47 @@ bool cvdescriptorset::DescriptorSetLayoutDef::IsNextBindingConsistent(const uint
}
return false;
}
+// Starting at offset descriptor of given binding, parse over update_count
+// descriptor updates and verify that for any binding boundaries that are crossed, the next binding(s) are all consistent
+// Consistency means that their type, stage flags, and whether or not they use immutable samplers matches
+// If so, return true. If not, fill in error_msg and return false
+bool cvdescriptorset::DescriptorSetLayoutDef::VerifyUpdateConsistency(uint32_t current_binding, uint32_t offset,
+ uint32_t update_count, const char *type,
+ const VkDescriptorSet set, std::string *error_msg) const {
+ // Verify consecutive bindings match (if needed)
+ auto orig_binding = current_binding;
+ // Track count of descriptors in the current_bindings that are remaining to be updated
+ auto binding_remaining = GetDescriptorCountFromBinding(current_binding);
+ // First, it's legal to offset beyond your own binding so handle that case
+ // Really this is just searching for the binding in which the update begins and adjusting offset accordingly
+ while (offset >= binding_remaining) {
+ // Advance to next binding, decrement offset by binding size
+ offset -= binding_remaining;
+ binding_remaining = GetDescriptorCountFromBinding(++current_binding);
+ }
+ binding_remaining -= offset;
+ while (update_count > binding_remaining) { // While our updates overstep current binding
+ // Verify next consecutive binding matches type, stage flags & immutable sampler use
+ if (!IsNextBindingConsistent(current_binding++)) {
+ std::stringstream error_str;
+ error_str << "Attempting " << type;
+ if (IsPushDescriptor()) {
+ error_str << " push descriptors";
+ } else {
+ error_str << " descriptor set " << set;
+ }
+ error_str << " binding #" << orig_binding << " with #" << update_count
+ << " descriptors being updated but this update oversteps the bounds of this binding and the next binding is "
+ "not consistent with current binding so this update is invalid.";
+ *error_msg = error_str.str();
+ return false;
+ }
+ // For sake of this check consider the bindings updated and grab count for next binding
+ update_count -= binding_remaining;
+ binding_remaining = GetDescriptorCountFromBinding(current_binding);
+ }
+ return true;
+}
// The DescriptorSetLayout stores the per handle data for a descriptor set layout, and references the common defintion for the
// handle invariant portion
@@ -313,12 +366,12 @@ cvdescriptorset::DescriptorSetLayout::DescriptorSetLayout(const VkDescriptorSetL
: layout_(layout), layout_destroyed_(false), layout_id_(GetCanonicalId(p_create_info)) {}
// Validate descriptor set layout create info
-bool cvdescriptorset::ValidateDescriptorSetLayoutCreateInfo(
+bool cvdescriptorset::DescriptorSetLayout::ValidateCreateInfo(
const debug_report_data *report_data, const VkDescriptorSetLayoutCreateInfo *create_info, const bool push_descriptor_ext,
const uint32_t max_push_descriptors, const bool descriptor_indexing_ext,
const VkPhysicalDeviceDescriptorIndexingFeaturesEXT *descriptor_indexing_features,
const VkPhysicalDeviceInlineUniformBlockFeaturesEXT *inline_uniform_block_features,
- const VkPhysicalDeviceInlineUniformBlockPropertiesEXT *inline_uniform_block_props, const DeviceExtensions *device_extensions) {
+ const VkPhysicalDeviceInlineUniformBlockPropertiesEXT *inline_uniform_block_props) {
bool skip = false;
std::unordered_set<uint32_t> bindings;
uint64_t total_descriptors = 0;
@@ -370,24 +423,16 @@ bool cvdescriptorset::ValidateDescriptorSetLayoutCreateInfo(
}
if (binding_info.descriptorType == VK_DESCRIPTOR_TYPE_INLINE_UNIFORM_BLOCK_EXT) {
- if (!device_extensions->vk_ext_inline_uniform_block) {
- skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, 0,
- "UNASSIGNED-Extension not enabled",
- "Creating VkDescriptorSetLayout with descriptor type VK_DESCRIPTOR_TYPE_INLINE_UNIFORM_BLOCK_EXT "
- "but extension %s is missing",
- VK_EXT_INLINE_UNIFORM_BLOCK_EXTENSION_NAME);
- } else {
- if ((binding_info.descriptorCount % 4) != 0) {
- skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
- "VUID-VkDescriptorSetLayoutBinding-descriptorType-02209",
- "descriptorCount =(%" PRIu32 ") must be a multiple of 4", binding_info.descriptorCount);
- }
- if (binding_info.descriptorCount > inline_uniform_block_props->maxInlineUniformBlockSize) {
- skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
- "VUID-VkDescriptorSetLayoutBinding-descriptorType-02210",
- "descriptorCount =(%" PRIu32 ") must be less than or equal to maxInlineUniformBlockSize",
- binding_info.descriptorCount);
- }
+ if ((binding_info.descriptorCount % 4) != 0) {
+ skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
+ "VUID-VkDescriptorSetLayoutBinding-descriptorType-02209",
+ "descriptorCount =(%" PRIu32 ") must be a multiple of 4", binding_info.descriptorCount);
+ }
+ if (binding_info.descriptorCount > inline_uniform_block_props->maxInlineUniformBlockSize) {
+ skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
+ "VUID-VkDescriptorSetLayoutBinding-descriptorType-02210",
+ "descriptorCount =(%" PRIu32 ") must be less than or equal to maxInlineUniformBlockSize",
+ binding_info.descriptorCount);
}
}
@@ -546,15 +591,15 @@ cvdescriptorset::AllocateDescriptorSetsData::AllocateDescriptorSetsData(uint32_t
cvdescriptorset::DescriptorSet::DescriptorSet(const VkDescriptorSet set, const VkDescriptorPool pool,
const std::shared_ptr<DescriptorSetLayout const> &layout, uint32_t variable_count,
- cvdescriptorset::DescriptorSet::StateTracker *state_data)
+ layer_data *dev_data)
: some_update_(false),
set_(set),
pool_state_(nullptr),
p_layout_(layout),
- state_data_(state_data),
- variable_count_(variable_count),
- change_count_(0) {
- pool_state_ = state_data->GetDescriptorPoolState(pool);
+ device_data_(dev_data),
+ limits_(dev_data->phys_dev_props.limits),
+ variable_count_(variable_count) {
+ pool_state_ = dev_data->GetDescriptorPoolState(pool);
// Foreach binding, create default descriptors of given type
descriptors_.reserve(p_layout_->GetTotalDescriptorCount());
for (uint32_t i = 0; i < p_layout_->GetBindingCount(); ++i) {
@@ -639,7 +684,12 @@ static char const *StringDescriptorReqComponentType(descriptor_req req) {
return "(none)";
}
-unsigned DescriptorRequirementsBitsFromFormat(VkFormat fmt) {
+// Is this sets underlying layout compatible with passed in layout according to "Pipeline Layout Compatibility" in spec?
+bool cvdescriptorset::DescriptorSet::IsCompatible(DescriptorSetLayout const *const layout, std::string *error) const {
+ return layout->IsCompatible(p_layout_.get(), error);
+}
+
+static unsigned DescriptorRequirementsBitsFromFormat(VkFormat fmt) {
if (FormatIsSInt(fmt)) return DESCRIPTOR_REQ_COMPONENT_TYPE_SINT;
if (FormatIsUInt(fmt)) return DESCRIPTOR_REQ_COMPONENT_TYPE_UINT;
if (FormatIsDepthAndStencil(fmt)) return DESCRIPTOR_REQ_COMPONENT_TYPE_FLOAT | DESCRIPTOR_REQ_COMPONENT_TYPE_UINT;
@@ -652,162 +702,151 @@ unsigned DescriptorRequirementsBitsFromFormat(VkFormat fmt) {
// This includes validating that all descriptors in the given bindings are updated,
// that any update buffers are valid, and that any dynamic offsets are within the bounds of their buffers.
// Return true if state is acceptable, or false and write an error message into error string
-bool CoreChecks::ValidateDrawState(const DescriptorSet *descriptor_set, const std::map<uint32_t, descriptor_req> &bindings,
- const std::vector<uint32_t> &dynamic_offsets, const CMD_BUFFER_STATE *cb_node,
- const char *caller, std::string *error) const {
- using DescriptorClass = cvdescriptorset::DescriptorClass;
- using BufferDescriptor = cvdescriptorset::BufferDescriptor;
- using ImageDescriptor = cvdescriptorset::ImageDescriptor;
- using ImageSamplerDescriptor = cvdescriptorset::ImageSamplerDescriptor;
- using SamplerDescriptor = cvdescriptorset::SamplerDescriptor;
- using TexelDescriptor = cvdescriptorset::TexelDescriptor;
+bool cvdescriptorset::DescriptorSet::ValidateDrawState(const std::map<uint32_t, descriptor_req> &bindings,
+ const std::vector<uint32_t> &dynamic_offsets, GLOBAL_CB_NODE *cb_node,
+ const char *caller, std::string *error) const {
for (auto binding_pair : bindings) {
auto binding = binding_pair.first;
- DescriptorSetLayout::ConstBindingIterator binding_it(descriptor_set->GetLayout().get(), binding);
- if (binding_it.AtEnd()) { // End at construction is the condition for an invalid binding.
+ if (!p_layout_->HasBinding(binding)) {
std::stringstream error_str;
error_str << "Attempting to validate DrawState for binding #" << binding
<< " which is an invalid binding for this descriptor set.";
*error = error_str.str();
return false;
}
-
- if (binding_it.GetDescriptorBindingFlags() &
- (VK_DESCRIPTOR_BINDING_PARTIALLY_BOUND_BIT_EXT | VK_DESCRIPTOR_BINDING_UPDATE_AFTER_BIND_BIT_EXT)) {
- // Can't validate the descriptor because it may not have been updated,
- // or the view could have been destroyed
- continue;
- }
-
- // Copy the range, the end range is subject to update based on variable length descriptor arrays.
- cvdescriptorset::IndexRange index_range = binding_it.GetGlobalIndexRange();
+ IndexRange index_range = p_layout_->GetGlobalIndexRangeFromBinding(binding);
auto array_idx = 0; // Track array idx if we're dealing with array descriptors
- if (binding_it.IsVariableDescriptorCount()) {
+ if (IsVariableDescriptorCount(binding)) {
// Only validate the first N descriptors if it uses variable_count
- index_range.end = index_range.start + descriptor_set->GetVariableDescriptorCount();
+ index_range.end = index_range.start + GetVariableDescriptorCount();
}
for (uint32_t i = index_range.start; i < index_range.end; ++i, ++array_idx) {
- uint32_t index = i - index_range.start;
- const auto *descriptor = descriptor_set->GetDescriptorFromGlobalIndex(i);
-
- if (descriptor->GetClass() == DescriptorClass::InlineUniform) {
- // Can't validate the descriptor because it may not have been updated.
+ if ((p_layout_->GetDescriptorBindingFlagsFromBinding(binding) &
+ (VK_DESCRIPTOR_BINDING_PARTIALLY_BOUND_BIT_EXT | VK_DESCRIPTOR_BINDING_UPDATE_AFTER_BIND_BIT_EXT)) ||
+ descriptors_[i]->GetClass() == InlineUniform) {
+ // Can't validate the descriptor because it may not have been updated,
+ // or the view could have been destroyed
continue;
- } else if (!descriptor->updated) {
+ } else if (!descriptors_[i]->updated) {
std::stringstream error_str;
- error_str << "Descriptor in binding #" << binding << " index " << index
- << " is being used in draw but has never been updated via vkUpdateDescriptorSets() or a similar call.";
+ error_str << "Descriptor in binding #" << binding << " at global descriptor index " << i
+ << " is being used in draw but has not been updated.";
*error = error_str.str();
return false;
} else {
- auto descriptor_class = descriptor->GetClass();
- if (descriptor_class == DescriptorClass::GeneralBuffer) {
+ auto descriptor_class = descriptors_[i]->GetClass();
+ if (descriptor_class == GeneralBuffer) {
// Verify that buffers are valid
- auto buffer = static_cast<const BufferDescriptor *>(descriptor)->GetBuffer();
- auto buffer_node = GetBufferState(buffer);
+ auto buffer = static_cast<BufferDescriptor *>(descriptors_[i].get())->GetBuffer();
+ auto buffer_node = device_data_->GetBufferState(buffer);
if (!buffer_node) {
std::stringstream error_str;
- error_str << "Descriptor in binding #" << binding << " index " << index << " references invalid buffer "
- << buffer << ".";
+ error_str << "Descriptor in binding #" << binding << " at global descriptor index " << i
+ << " references invalid buffer " << buffer << ".";
*error = error_str.str();
return false;
} else if (!buffer_node->sparse) {
for (auto mem_binding : buffer_node->GetBoundMemory()) {
- if (!GetDevMemState(mem_binding)) {
+ if (!device_data_->GetMemObjInfo(mem_binding)) {
std::stringstream error_str;
- error_str << "Descriptor in binding #" << binding << " index " << index << " uses buffer " << buffer
- << " that references invalid memory " << mem_binding << ".";
+ error_str << "Descriptor in binding #" << binding << " at global descriptor index " << i
+ << " uses buffer " << buffer << " that references invalid memory " << mem_binding << ".";
*error = error_str.str();
return false;
}
}
}
- if (descriptor->IsDynamic()) {
+ if (descriptors_[i]->IsDynamic()) {
// Validate that dynamic offsets are within the buffer
auto buffer_size = buffer_node->createInfo.size;
- auto range = static_cast<const BufferDescriptor *>(descriptor)->GetRange();
- auto desc_offset = static_cast<const BufferDescriptor *>(descriptor)->GetOffset();
- auto dyn_offset = dynamic_offsets[binding_it.GetDynamicOffsetIndex() + array_idx];
+ auto range = static_cast<BufferDescriptor *>(descriptors_[i].get())->GetRange();
+ auto desc_offset = static_cast<BufferDescriptor *>(descriptors_[i].get())->GetOffset();
+ auto dyn_offset = dynamic_offsets[GetDynamicOffsetIndexFromBinding(binding) + array_idx];
if (VK_WHOLE_SIZE == range) {
if ((dyn_offset + desc_offset) > buffer_size) {
std::stringstream error_str;
- error_str << "Dynamic descriptor in binding #" << binding << " index " << index << " uses buffer "
- << buffer << " with update range of VK_WHOLE_SIZE has dynamic offset " << dyn_offset
- << " combined with offset " << desc_offset << " that oversteps the buffer size of "
- << buffer_size << ".";
+ error_str << "Dynamic descriptor in binding #" << binding << " at global descriptor index " << i
+ << " uses buffer " << buffer << " with update range of VK_WHOLE_SIZE has dynamic offset "
+ << dyn_offset << " combined with offset " << desc_offset
+ << " that oversteps the buffer size of " << buffer_size << ".";
*error = error_str.str();
return false;
}
} else {
if ((dyn_offset + desc_offset + range) > buffer_size) {
std::stringstream error_str;
- error_str << "Dynamic descriptor in binding #" << binding << " index " << index << " uses buffer "
- << buffer << " with dynamic offset " << dyn_offset << " combined with offset "
- << desc_offset << " and range " << range << " that oversteps the buffer size of "
- << buffer_size << ".";
+ error_str << "Dynamic descriptor in binding #" << binding << " at global descriptor index " << i
+ << " uses buffer " << buffer << " with dynamic offset " << dyn_offset
+ << " combined with offset " << desc_offset << " and range " << range
+ << " that oversteps the buffer size of " << buffer_size << ".";
*error = error_str.str();
return false;
}
}
}
- } else if (descriptor_class == DescriptorClass::ImageSampler || descriptor_class == DescriptorClass::Image) {
+ } else if (descriptor_class == ImageSampler || descriptor_class == Image) {
VkImageView image_view;
VkImageLayout image_layout;
- if (descriptor_class == DescriptorClass::ImageSampler) {
- image_view = static_cast<const ImageSamplerDescriptor *>(descriptor)->GetImageView();
- image_layout = static_cast<const ImageSamplerDescriptor *>(descriptor)->GetImageLayout();
+ if (descriptor_class == ImageSampler) {
+ image_view = static_cast<ImageSamplerDescriptor *>(descriptors_[i].get())->GetImageView();
+ image_layout = static_cast<ImageSamplerDescriptor *>(descriptors_[i].get())->GetImageLayout();
} else {
- image_view = static_cast<const ImageDescriptor *>(descriptor)->GetImageView();
- image_layout = static_cast<const ImageDescriptor *>(descriptor)->GetImageLayout();
+ image_view = static_cast<ImageDescriptor *>(descriptors_[i].get())->GetImageView();
+ image_layout = static_cast<ImageDescriptor *>(descriptors_[i].get())->GetImageLayout();
}
auto reqs = binding_pair.second;
- auto image_view_state = GetImageViewState(image_view);
+ auto image_view_state = device_data_->GetImageViewState(image_view);
if (nullptr == image_view_state) {
// Image view must have been destroyed since initial update. Could potentially flag the descriptor
// as "invalid" (updated = false) at DestroyImageView() time and detect this error at bind time
std::stringstream error_str;
- error_str << "Descriptor in binding #" << binding << " index " << index << " is using imageView "
- << report_data->FormatHandle(image_view).c_str() << " that has been destroyed.";
+ error_str << "Descriptor in binding #" << binding << " at global descriptor index " << i
+ << " is using imageView " << image_view << " that has been destroyed.";
*error = error_str.str();
return false;
}
- const auto &image_view_ci = image_view_state->create_info;
-
- if (reqs & DESCRIPTOR_REQ_ALL_VIEW_TYPE_BITS) {
- if (~reqs & (1 << image_view_ci.viewType)) {
- // bad view type
- std::stringstream error_str;
- error_str << "Descriptor in binding #" << binding << " index " << index
- << " requires an image view of type " << StringDescriptorReqViewType(reqs) << " but got "
- << string_VkImageViewType(image_view_ci.viewType) << ".";
- *error = error_str.str();
- return false;
- }
+ auto image_view_ci = image_view_state->create_info;
- if (!(reqs & image_view_state->descriptor_format_bits)) {
- // bad component type
- std::stringstream error_str;
- error_str << "Descriptor in binding #" << binding << " index " << index << " requires "
- << StringDescriptorReqComponentType(reqs)
- << " component type, but bound descriptor format is " << string_VkFormat(image_view_ci.format)
- << ".";
- *error = error_str.str();
- return false;
- }
+ if ((reqs & DESCRIPTOR_REQ_ALL_VIEW_TYPE_BITS) && (~reqs & (1 << image_view_ci.viewType))) {
+ // bad view type
+ std::stringstream error_str;
+ error_str << "Descriptor in binding #" << binding << " at global descriptor index " << i
+ << " requires an image view of type " << StringDescriptorReqViewType(reqs) << " but got "
+ << string_VkImageViewType(image_view_ci.viewType) << ".";
+ *error = error_str.str();
+ return false;
}
- if (!disabled.image_layout_validation) {
- auto image_node = GetImageState(image_view_ci.image);
- assert(image_node);
- // Verify Image Layout
+ auto format_bits = DescriptorRequirementsBitsFromFormat(image_view_ci.format);
+ if (!(reqs & format_bits)) {
+ // bad component type
+ std::stringstream error_str;
+ error_str << "Descriptor in binding #" << binding << " at global descriptor index " << i << " requires "
+ << StringDescriptorReqComponentType(reqs) << " component type, but bound descriptor format is "
+ << string_VkFormat(image_view_ci.format) << ".";
+ *error = error_str.str();
+ return false;
+ }
+
+ auto image_node = device_data_->GetImageState(image_view_ci.image);
+ assert(image_node);
+ // Verify Image Layout
+ // Copy first mip level into sub_layers and loop over each mip level to verify layout
+ VkImageSubresourceLayers sub_layers;
+ sub_layers.aspectMask = image_view_ci.subresourceRange.aspectMask;
+ sub_layers.baseArrayLayer = image_view_ci.subresourceRange.baseArrayLayer;
+ sub_layers.layerCount = image_view_ci.subresourceRange.layerCount;
+ bool hit_error = false;
+ for (auto cur_level = image_view_ci.subresourceRange.baseMipLevel;
+ cur_level < image_view_ci.subresourceRange.levelCount; ++cur_level) {
+ sub_layers.mipLevel = cur_level;
// No "invalid layout" VUID required for this call, since the optimal_layout parameter is UNDEFINED.
- bool hit_error = false;
- VerifyImageLayout(cb_node, image_node, image_view_state->normalized_subresource_range,
- image_view_ci.subresourceRange.aspectMask, image_layout, VK_IMAGE_LAYOUT_UNDEFINED,
- caller, kVUIDUndefined, "VUID-VkDescriptorImageInfo-imageLayout-00344", &hit_error);
+ device_data_->VerifyImageLayout(device_data_, cb_node, image_node, sub_layers, image_layout,
+ VK_IMAGE_LAYOUT_UNDEFINED, caller, kVUIDUndefined,
+ "VUID-VkDescriptorImageInfo-imageLayout-00344", &hit_error);
if (hit_error) {
*error =
"Image layout specified at vkUpdateDescriptorSet* or vkCmdPushDescriptorSet* time "
@@ -816,79 +855,53 @@ bool CoreChecks::ValidateDrawState(const DescriptorSet *descriptor_set, const st
return false;
}
}
-
// Verify Sample counts
- if ((reqs & DESCRIPTOR_REQ_SINGLE_SAMPLE) && image_view_state->samples != VK_SAMPLE_COUNT_1_BIT) {
+ if ((reqs & DESCRIPTOR_REQ_SINGLE_SAMPLE) && image_node->createInfo.samples != VK_SAMPLE_COUNT_1_BIT) {
std::stringstream error_str;
- error_str << "Descriptor in binding #" << binding << " index " << index
+ error_str << "Descriptor in binding #" << binding << " at global descriptor index " << i
<< " requires bound image to have VK_SAMPLE_COUNT_1_BIT but got "
- << string_VkSampleCountFlagBits(image_view_state->samples) << ".";
+ << string_VkSampleCountFlagBits(image_node->createInfo.samples) << ".";
*error = error_str.str();
return false;
}
- if ((reqs & DESCRIPTOR_REQ_MULTI_SAMPLE) && image_view_state->samples == VK_SAMPLE_COUNT_1_BIT) {
+ if ((reqs & DESCRIPTOR_REQ_MULTI_SAMPLE) && image_node->createInfo.samples == VK_SAMPLE_COUNT_1_BIT) {
std::stringstream error_str;
- error_str << "Descriptor in binding #" << binding << " index " << index
+ error_str << "Descriptor in binding #" << binding << " at global descriptor index " << i
<< " requires bound image to have multiple samples, but got VK_SAMPLE_COUNT_1_BIT.";
*error = error_str.str();
return false;
}
- } else if (descriptor_class == DescriptorClass::TexelBuffer) {
- auto texel_buffer = static_cast<const TexelDescriptor *>(descriptor);
- auto buffer_view = GetBufferViewState(texel_buffer->GetBufferView());
+ } else if (descriptor_class == TexelBuffer) {
+ auto texel_buffer = static_cast<TexelDescriptor *>(descriptors_[i].get());
+ auto buffer_view = device_data_->GetBufferViewState(texel_buffer->GetBufferView());
- if (nullptr == buffer_view) {
- std::stringstream error_str;
- error_str << "Descriptor in binding #" << binding << " index " << index << " is using bufferView "
- << buffer_view << " that has been destroyed.";
- *error = error_str.str();
- return false;
- }
- auto buffer = buffer_view->create_info.buffer;
- auto buffer_state = GetBufferState(buffer);
- if (!buffer_state) {
- std::stringstream error_str;
- error_str << "Descriptor in binding #" << binding << " index " << index << " is using buffer "
- << buffer_state << " that has been destroyed.";
- *error = error_str.str();
- return false;
- }
auto reqs = binding_pair.second;
auto format_bits = DescriptorRequirementsBitsFromFormat(buffer_view->create_info.format);
if (!(reqs & format_bits)) {
// bad component type
std::stringstream error_str;
- error_str << "Descriptor in binding #" << binding << " index " << index << " requires "
+ error_str << "Descriptor in binding #" << binding << " at global descriptor index " << i << " requires "
<< StringDescriptorReqComponentType(reqs) << " component type, but bound descriptor format is "
<< string_VkFormat(buffer_view->create_info.format) << ".";
*error = error_str.str();
return false;
}
}
- if (descriptor_class == DescriptorClass::ImageSampler || descriptor_class == DescriptorClass::PlainSampler) {
+ if (descriptor_class == ImageSampler || descriptor_class == PlainSampler) {
// Verify Sampler still valid
VkSampler sampler;
- if (descriptor_class == DescriptorClass::ImageSampler) {
- sampler = static_cast<const ImageSamplerDescriptor *>(descriptor)->GetSampler();
+ if (descriptor_class == ImageSampler) {
+ sampler = static_cast<ImageSamplerDescriptor *>(descriptors_[i].get())->GetSampler();
} else {
- sampler = static_cast<const SamplerDescriptor *>(descriptor)->GetSampler();
+ sampler = static_cast<SamplerDescriptor *>(descriptors_[i].get())->GetSampler();
}
- if (!ValidateSampler(sampler)) {
+ if (!ValidateSampler(sampler, device_data_)) {
std::stringstream error_str;
- error_str << "Descriptor in binding #" << binding << " index " << index << " is using sampler " << sampler
- << " that has been destroyed.";
+ error_str << "Descriptor in binding #" << binding << " at global descriptor index " << i
+ << " is using sampler " << sampler << " that has been destroyed.";
*error = error_str.str();
return false;
- } else {
- const SAMPLER_STATE *sampler_state = GetSamplerState(sampler);
- if (sampler_state->samplerConversion && !descriptor->IsImmutableSampler()) {
- std::stringstream error_str;
- error_str << "sampler (" << sampler << ") in the descriptor set (" << descriptor_set->GetSet()
- << ") contains a YCBCR conversion (" << sampler_state->samplerConversion
- << ") , then the sampler MUST also exists as an immutable sampler.";
- *error = error_str.str();
- }
}
}
}
@@ -897,9 +910,52 @@ bool CoreChecks::ValidateDrawState(const DescriptorSet *descriptor_set, const st
return true;
}
+// For given bindings, place any update buffers or images into the passed-in unordered_sets
+uint32_t cvdescriptorset::DescriptorSet::GetStorageUpdates(const std::map<uint32_t, descriptor_req> &bindings,
+ std::unordered_set<VkBuffer> *buffer_set,
+ std::unordered_set<VkImageView> *image_set) const {
+ auto num_updates = 0;
+ for (auto binding_pair : bindings) {
+ auto binding = binding_pair.first;
+ // If a binding doesn't exist, skip it
+ if (!p_layout_->HasBinding(binding)) {
+ continue;
+ }
+ uint32_t start_idx = p_layout_->GetGlobalIndexRangeFromBinding(binding).start;
+ if (descriptors_[start_idx]->IsStorage()) {
+ if (Image == descriptors_[start_idx]->descriptor_class) {
+ for (uint32_t i = 0; i < p_layout_->GetDescriptorCountFromBinding(binding); ++i) {
+ if (descriptors_[start_idx + i]->updated) {
+ image_set->insert(static_cast<ImageDescriptor *>(descriptors_[start_idx + i].get())->GetImageView());
+ num_updates++;
+ }
+ }
+ } else if (TexelBuffer == descriptors_[start_idx]->descriptor_class) {
+ for (uint32_t i = 0; i < p_layout_->GetDescriptorCountFromBinding(binding); ++i) {
+ if (descriptors_[start_idx + i]->updated) {
+ auto bufferview = static_cast<TexelDescriptor *>(descriptors_[start_idx + i].get())->GetBufferView();
+ auto bv_state = device_data_->GetBufferViewState(bufferview);
+ if (bv_state) {
+ buffer_set->insert(bv_state->create_info.buffer);
+ num_updates++;
+ }
+ }
+ }
+ } else if (GeneralBuffer == descriptors_[start_idx]->descriptor_class) {
+ for (uint32_t i = 0; i < p_layout_->GetDescriptorCountFromBinding(binding); ++i) {
+ if (descriptors_[start_idx + i]->updated) {
+ buffer_set->insert(static_cast<BufferDescriptor *>(descriptors_[start_idx + i].get())->GetBuffer());
+ num_updates++;
+ }
+ }
+ }
+ }
+ }
+ return num_updates;
+}
// Set is being deleted or updates so invalidate all bound cmd buffers
void cvdescriptorset::DescriptorSet::InvalidateBoundCmdBuffers() {
- state_data_->InvalidateCommandBuffers(cb_bindings, VulkanTypedHandle(set_, kVulkanObjectTypeDescriptorSet));
+ device_data_->InvalidateCommandBuffers(device_data_, cb_bindings, {HandleToUint64(set_), kVulkanObjectTypeDescriptorSet});
}
// Loop through the write updates to do for a push descriptor set, ignoring dstSet
@@ -929,10 +985,7 @@ void cvdescriptorset::DescriptorSet::PerformWriteUpdate(const VkWriteDescriptorS
offset = 0;
binding_being_updated++;
}
- if (update->descriptorCount) {
- some_update_ = true;
- change_count_++;
- }
+ if (update->descriptorCount) some_update_ = true;
if (!(p_layout_->GetDescriptorBindingFlagsFromBinding(update->dstBinding) &
(VK_DESCRIPTOR_BINDING_UPDATE_UNUSED_WHILE_PENDING_BIT_EXT | VK_DESCRIPTOR_BINDING_UPDATE_AFTER_BIND_BIT_EXT))) {
@@ -940,57 +993,54 @@ void cvdescriptorset::DescriptorSet::PerformWriteUpdate(const VkWriteDescriptorS
}
}
// Validate Copy update
-bool CoreChecks::ValidateCopyUpdate(const VkCopyDescriptorSet *update, const DescriptorSet *dst_set, const DescriptorSet *src_set,
- const char *func_name, std::string *error_code, std::string *error_msg) {
- auto dst_layout = dst_set->GetLayout();
- auto src_layout = src_set->GetLayout();
-
+bool cvdescriptorset::DescriptorSet::ValidateCopyUpdate(const debug_report_data *report_data, const VkCopyDescriptorSet *update,
+ const DescriptorSet *src_set, const char *func_name,
+ std::string *error_code, std::string *error_msg) {
// Verify dst layout still valid
- if (dst_layout->IsDestroyed()) {
+ if (p_layout_->IsDestroyed()) {
*error_code = "VUID-VkCopyDescriptorSet-dstSet-parameter";
string_sprintf(error_msg,
- "Cannot call %s to perform copy update on dstSet %s"
- " created with destroyed %s.",
- func_name, report_data->FormatHandle(dst_set->GetSet()).c_str(),
- report_data->FormatHandle(dst_layout->GetDescriptorSetLayout()).c_str());
+ "Cannot call %s to perform copy update on descriptor set dstSet %s"
+ " created with destroyed VkDescriptorSetLayout %s.",
+ func_name, report_data->FormatHandle(set_).c_str(),
+ report_data->FormatHandle(p_layout_->GetDescriptorSetLayout()).c_str());
return false;
}
// Verify src layout still valid
- if (src_layout->IsDestroyed()) {
+ if (src_set->p_layout_->IsDestroyed()) {
*error_code = "VUID-VkCopyDescriptorSet-srcSet-parameter";
string_sprintf(error_msg,
"Cannot call %s to perform copy update of dstSet %s"
- " from srcSet %s"
- " created with destroyed %s.",
- func_name, report_data->FormatHandle(dst_set->GetSet()).c_str(),
- report_data->FormatHandle(src_set->GetSet()).c_str(),
- report_data->FormatHandle(src_layout->GetDescriptorSetLayout()).c_str());
+ " from descriptor set srcSet %s"
+ " created with destroyed VkDescriptorSetLayout %s.",
+ func_name, report_data->FormatHandle(set_).c_str(), report_data->FormatHandle(src_set->set_).c_str(),
+ report_data->FormatHandle(src_set->p_layout_->GetDescriptorSetLayout()).c_str());
return false;
}
- if (!dst_layout->HasBinding(update->dstBinding)) {
+ if (!p_layout_->HasBinding(update->dstBinding)) {
*error_code = "VUID-VkCopyDescriptorSet-dstBinding-00347";
std::stringstream error_str;
- error_str << "DescriptorSet " << dst_set->GetSet() << " does not have copy update dest binding of " << update->dstBinding;
+ error_str << "DescriptorSet " << set_ << " does not have copy update dest binding of " << update->dstBinding;
*error_msg = error_str.str();
return false;
}
if (!src_set->HasBinding(update->srcBinding)) {
*error_code = "VUID-VkCopyDescriptorSet-srcBinding-00345";
std::stringstream error_str;
- error_str << "DescriptorSet " << dst_set->GetSet() << " does not have copy update src binding of " << update->srcBinding;
+ error_str << "DescriptorSet " << set_ << " does not have copy update src binding of " << update->srcBinding;
*error_msg = error_str.str();
return false;
}
// Verify idle ds
- if (dst_set->in_use.load() &&
- !(dst_layout->GetDescriptorBindingFlagsFromBinding(update->dstBinding) &
+ if (in_use.load() &&
+ !(p_layout_->GetDescriptorBindingFlagsFromBinding(update->dstBinding) &
(VK_DESCRIPTOR_BINDING_UPDATE_UNUSED_WHILE_PENDING_BIT_EXT | VK_DESCRIPTOR_BINDING_UPDATE_AFTER_BIND_BIT_EXT))) {
// TODO : Re-using Free Idle error code, need copy update idle error code
*error_code = "VUID-vkFreeDescriptorSets-pDescriptorSets-00309";
std::stringstream error_str;
- error_str << "Cannot call " << func_name << " to perform copy update on descriptor set " << dst_set->GetSet()
+ error_str << "Cannot call " << func_name << " to perform copy update on descriptor set " << set_
<< " that is in use by a command buffer";
*error_msg = error_str.str();
return false;
@@ -1009,15 +1059,15 @@ bool CoreChecks::ValidateCopyUpdate(const VkCopyDescriptorSet *update, const Des
*error_msg = error_str.str();
return false;
}
- auto dst_start_idx = dst_layout->GetGlobalIndexRangeFromBinding(update->dstBinding).start + update->dstArrayElement;
- if ((dst_start_idx + update->descriptorCount) > dst_layout->GetTotalDescriptorCount()) {
+ auto dst_start_idx = p_layout_->GetGlobalIndexRangeFromBinding(update->dstBinding).start + update->dstArrayElement;
+ if ((dst_start_idx + update->descriptorCount) > p_layout_->GetTotalDescriptorCount()) {
// DST update out of bounds
*error_code = "VUID-VkCopyDescriptorSet-dstArrayElement-00348";
std::stringstream error_str;
- error_str << "Attempting copy update to descriptorSet " << dst_set->GetSet() << " binding#" << update->dstBinding
- << " with offset index of " << dst_layout->GetGlobalIndexRangeFromBinding(update->dstBinding).start
+ error_str << "Attempting copy update to descriptorSet " << set_ << " binding#" << update->dstBinding
+ << " with offset index of " << p_layout_->GetGlobalIndexRangeFromBinding(update->dstBinding).start
<< " plus update array offset of " << update->dstArrayElement << " and update of " << update->descriptorCount
- << " descriptors oversteps total number of descriptors in set: " << dst_layout->GetTotalDescriptorCount();
+ << " descriptors oversteps total number of descriptors in set: " << p_layout_->GetTotalDescriptorCount();
*error_msg = error_str.str();
return false;
}
@@ -1026,28 +1076,25 @@ bool CoreChecks::ValidateCopyUpdate(const VkCopyDescriptorSet *update, const Des
// consistency issues, need more fine-grained error codes
*error_code = "VUID-VkCopyDescriptorSet-srcSet-00349";
auto src_type = src_set->GetTypeFromBinding(update->srcBinding);
- auto dst_type = dst_layout->GetTypeFromBinding(update->dstBinding);
+ auto dst_type = p_layout_->GetTypeFromBinding(update->dstBinding);
if (src_type != dst_type) {
std::stringstream error_str;
- error_str << "Attempting copy update to descriptorSet " << dst_set->GetSet() << " binding #" << update->dstBinding
- << " with type " << string_VkDescriptorType(dst_type) << " from descriptorSet " << src_set->GetSet()
- << " binding #" << update->srcBinding << " with type " << string_VkDescriptorType(src_type)
- << ". Types do not match";
+ error_str << "Attempting copy update to descriptorSet " << set_ << " binding #" << update->dstBinding << " with type "
+ << string_VkDescriptorType(dst_type) << " from descriptorSet " << src_set->GetSet() << " binding #"
+ << update->srcBinding << " with type " << string_VkDescriptorType(src_type) << ". Types do not match";
*error_msg = error_str.str();
return false;
}
// Verify consistency of src & dst bindings if update crosses binding boundaries
- if ((!VerifyUpdateConsistency(DescriptorSetLayout::ConstBindingIterator(src_layout.get(), update->srcBinding),
- update->srcArrayElement, update->descriptorCount, "copy update from", src_set->GetSet(),
- error_msg)) ||
- (!VerifyUpdateConsistency(DescriptorSetLayout::ConstBindingIterator(dst_layout.get(), update->dstBinding),
- update->dstArrayElement, update->descriptorCount, "copy update to", dst_set->GetSet(),
- error_msg))) {
+ if ((!src_set->GetLayout()->VerifyUpdateConsistency(update->srcBinding, update->srcArrayElement, update->descriptorCount,
+ "copy update from", src_set->GetSet(), error_msg)) ||
+ (!p_layout_->VerifyUpdateConsistency(update->dstBinding, update->dstArrayElement, update->descriptorCount, "copy update to",
+ set_, error_msg))) {
return false;
}
- if ((src_layout->GetCreateFlags() & VK_DESCRIPTOR_SET_LAYOUT_CREATE_UPDATE_AFTER_BIND_POOL_BIT_EXT) &&
- !(dst_layout->GetCreateFlags() & VK_DESCRIPTOR_SET_LAYOUT_CREATE_UPDATE_AFTER_BIND_POOL_BIT_EXT)) {
+ if ((src_set->GetLayout()->GetCreateFlags() & VK_DESCRIPTOR_SET_LAYOUT_CREATE_UPDATE_AFTER_BIND_POOL_BIT_EXT) &&
+ !(GetLayout()->GetCreateFlags() & VK_DESCRIPTOR_SET_LAYOUT_CREATE_UPDATE_AFTER_BIND_POOL_BIT_EXT)) {
*error_code = "VUID-VkCopyDescriptorSet-srcSet-01918";
std::stringstream error_str;
error_str << "If pname:srcSet's (" << update->srcSet
@@ -1061,8 +1108,8 @@ bool CoreChecks::ValidateCopyUpdate(const VkCopyDescriptorSet *update, const Des
return false;
}
- if (!(src_layout->GetCreateFlags() & VK_DESCRIPTOR_SET_LAYOUT_CREATE_UPDATE_AFTER_BIND_POOL_BIT_EXT) &&
- (dst_layout->GetCreateFlags() & VK_DESCRIPTOR_SET_LAYOUT_CREATE_UPDATE_AFTER_BIND_POOL_BIT_EXT)) {
+ if (!(src_set->GetLayout()->GetCreateFlags() & VK_DESCRIPTOR_SET_LAYOUT_CREATE_UPDATE_AFTER_BIND_POOL_BIT_EXT) &&
+ (GetLayout()->GetCreateFlags() & VK_DESCRIPTOR_SET_LAYOUT_CREATE_UPDATE_AFTER_BIND_POOL_BIT_EXT)) {
*error_code = "VUID-VkCopyDescriptorSet-srcSet-01919";
std::stringstream error_str;
error_str << "If pname:srcSet's (" << update->srcSet
@@ -1077,7 +1124,7 @@ bool CoreChecks::ValidateCopyUpdate(const VkCopyDescriptorSet *update, const Des
}
if ((src_set->GetPoolState()->createInfo.flags & VK_DESCRIPTOR_POOL_CREATE_UPDATE_AFTER_BIND_BIT_EXT) &&
- !(dst_set->GetPoolState()->createInfo.flags & VK_DESCRIPTOR_POOL_CREATE_UPDATE_AFTER_BIND_BIT_EXT)) {
+ !(GetPoolState()->createInfo.flags & VK_DESCRIPTOR_POOL_CREATE_UPDATE_AFTER_BIND_BIT_EXT)) {
*error_code = "VUID-VkCopyDescriptorSet-srcSet-01920";
std::stringstream error_str;
error_str << "If the descriptor pool from which pname:srcSet (" << update->srcSet
@@ -1092,7 +1139,7 @@ bool CoreChecks::ValidateCopyUpdate(const VkCopyDescriptorSet *update, const Des
}
if (!(src_set->GetPoolState()->createInfo.flags & VK_DESCRIPTOR_POOL_CREATE_UPDATE_AFTER_BIND_BIT_EXT) &&
- (dst_set->GetPoolState()->createInfo.flags & VK_DESCRIPTOR_POOL_CREATE_UPDATE_AFTER_BIND_BIT_EXT)) {
+ (GetPoolState()->createInfo.flags & VK_DESCRIPTOR_POOL_CREATE_UPDATE_AFTER_BIND_BIT_EXT)) {
*error_code = "VUID-VkCopyDescriptorSet-srcSet-01921";
std::stringstream error_str;
error_str << "If the descriptor pool from which pname:srcSet (" << update->srcSet
@@ -1150,7 +1197,6 @@ void cvdescriptorset::DescriptorSet::PerformCopyUpdate(const VkCopyDescriptorSet
if (src->updated) {
dst->CopyUpdate(src);
some_update_ = true;
- change_count_++;
} else {
dst->updated = false;
}
@@ -1169,27 +1215,14 @@ void cvdescriptorset::DescriptorSet::PerformCopyUpdate(const VkCopyDescriptorSet
// TODO: Modify the UpdateDrawState virtural functions to *only* set initial layout and not change layouts
// Prereq: This should be called for a set that has been confirmed to be active for the given cb_node, meaning it's going
// to be used in a draw by the given cb_node
-void cvdescriptorset::DescriptorSet::UpdateDrawState(ValidationStateTracker *device_data, CMD_BUFFER_STATE *cb_node,
+void cvdescriptorset::DescriptorSet::UpdateDrawState(GLOBAL_CB_NODE *cb_node,
const std::map<uint32_t, descriptor_req> &binding_req_map) {
- if (!device_data->disabled.command_buffer_state) {
- // bind cb to this descriptor set
- // Add bindings for descriptor set, the set's pool, and individual objects in the set
- auto inserted = cb_node->object_bindings.emplace(set_, kVulkanObjectTypeDescriptorSet);
- if (inserted.second) {
- cb_bindings.insert(cb_node);
- auto inserted2 = cb_node->object_bindings.emplace(pool_state_->pool, kVulkanObjectTypeDescriptorPool);
- if (inserted2.second) {
- pool_state_->cb_bindings.insert(cb_node);
- }
- }
- }
-
- // Descriptor UpdateDrawState functions do two things - associate resources to the command buffer,
- // and call image layout validation callbacks. If both are disabled, skip the entire loop.
- if (device_data->disabled.command_buffer_state && device_data->disabled.image_layout_validation) {
- return;
- }
-
+ // bind cb to this descriptor set
+ cb_bindings.insert(cb_node);
+ // Add bindings for descriptor set, the set's pool, and individual objects in the set
+ cb_node->object_bindings.insert({HandleToUint64(set_), kVulkanObjectTypeDescriptorSet});
+ pool_state_->cb_bindings.insert(cb_node);
+ cb_node->object_bindings.insert({HandleToUint64(pool_state_->pool), kVulkanObjectTypeDescriptorPool});
// For the active slots, use set# to look up descriptorSet from boundDescriptorSets, and bind all of that descriptor set's
// resources
for (auto binding_req_pair : binding_req_map) {
@@ -1201,39 +1234,52 @@ void cvdescriptorset::DescriptorSet::UpdateDrawState(ValidationStateTracker *dev
}
auto range = p_layout_->GetGlobalIndexRangeFromBinding(binding);
for (uint32_t i = range.start; i < range.end; ++i) {
- descriptors_[i]->UpdateDrawState(device_data, cb_node);
+ descriptors_[i]->UpdateDrawState(device_data_, cb_node);
}
}
}
-void cvdescriptorset::DescriptorSet::FilterOneBindingReq(const BindingReqMap::value_type &binding_req_pair, BindingReqMap *out_req,
- const TrackedBindings &bindings, uint32_t limit) {
- if (bindings.size() < limit) {
- const auto it = bindings.find(binding_req_pair.first);
- if (it == bindings.cend()) out_req->emplace(binding_req_pair);
+void cvdescriptorset::DescriptorSet::FilterAndTrackOneBindingReq(const BindingReqMap::value_type &binding_req_pair,
+ const BindingReqMap &in_req, BindingReqMap *out_req,
+ TrackedBindings *bindings) {
+ assert(out_req);
+ assert(bindings);
+ const auto binding = binding_req_pair.first;
+ // Use insert and look at the boolean ("was inserted") in the returned pair to see if this is a new set member.
+ // Saves one hash lookup vs. find ... compare w/ end ... insert.
+ const auto it_bool_pair = bindings->insert(binding);
+ if (it_bool_pair.second) {
+ out_req->emplace(binding_req_pair);
}
}
-void cvdescriptorset::DescriptorSet::FilterBindingReqs(const CMD_BUFFER_STATE &cb_state, const PIPELINE_STATE &pipeline,
- const BindingReqMap &in_req, BindingReqMap *out_req) const {
- // For const cleanliness we have to find in the maps...
- const auto validated_it = cached_validation_.find(&cb_state);
- if (validated_it == cached_validation_.cend()) {
- // We have nothing validated, copy in to out
- for (const auto &binding_req_pair : in_req) {
- out_req->emplace(binding_req_pair);
+void cvdescriptorset::DescriptorSet::FilterAndTrackOneBindingReq(const BindingReqMap::value_type &binding_req_pair,
+ const BindingReqMap &in_req, BindingReqMap *out_req,
+ TrackedBindings *bindings, uint32_t limit) {
+ if (bindings->size() < limit) FilterAndTrackOneBindingReq(binding_req_pair, in_req, out_req, bindings);
+}
+
+void cvdescriptorset::DescriptorSet::FilterAndTrackBindingReqs(GLOBAL_CB_NODE *cb_state, const BindingReqMap &in_req,
+ BindingReqMap *out_req) {
+ TrackedBindings &bound = cached_validation_[cb_state].command_binding_and_usage;
+ if (bound.size() == GetBindingCount()) {
+ return; // All bindings are bound, out req is empty
+ }
+ for (const auto &binding_req_pair : in_req) {
+ const auto binding = binding_req_pair.first;
+ // If a binding doesn't exist, or has already been bound, skip it
+ if (p_layout_->HasBinding(binding)) {
+ FilterAndTrackOneBindingReq(binding_req_pair, in_req, out_req, &bound);
}
- return;
}
- const auto &validated = validated_it->second;
+}
- const auto image_sample_version_it = validated.image_samplers.find(&pipeline);
- const VersionedBindings *image_sample_version = nullptr;
- if (image_sample_version_it != validated.image_samplers.cend()) {
- image_sample_version = &(image_sample_version_it->second);
- }
- const auto &dynamic_buffers = validated.dynamic_buffers;
- const auto &non_dynamic_buffers = validated.non_dynamic_buffers;
+void cvdescriptorset::DescriptorSet::FilterAndTrackBindingReqs(GLOBAL_CB_NODE *cb_state, PIPELINE_STATE *pipeline,
+ const BindingReqMap &in_req, BindingReqMap *out_req) {
+ auto &validated = cached_validation_[cb_state];
+ auto &image_sample_val = validated.image_samplers[pipeline];
+ auto *const dynamic_buffers = &validated.dynamic_buffers;
+ auto *const non_dynamic_buffers = &validated.non_dynamic_buffers;
const auto &stats = p_layout_->GetBindingTypeStats();
for (const auto &binding_req_pair : in_req) {
auto binding = binding_req_pair.first;
@@ -1245,55 +1291,22 @@ void cvdescriptorset::DescriptorSet::FilterBindingReqs(const CMD_BUFFER_STATE &c
// If image_layout have changed , the image descriptors need to be validated against them.
if ((layout_binding->descriptorType == VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC) ||
(layout_binding->descriptorType == VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC)) {
- FilterOneBindingReq(binding_req_pair, out_req, dynamic_buffers, stats.dynamic_buffer_count);
+ FilterAndTrackOneBindingReq(binding_req_pair, in_req, out_req, dynamic_buffers, stats.dynamic_buffer_count);
} else if ((layout_binding->descriptorType == VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER) ||
(layout_binding->descriptorType == VK_DESCRIPTOR_TYPE_STORAGE_BUFFER)) {
- FilterOneBindingReq(binding_req_pair, out_req, non_dynamic_buffers, stats.non_dynamic_buffer_count);
+ FilterAndTrackOneBindingReq(binding_req_pair, in_req, out_req, non_dynamic_buffers, stats.non_dynamic_buffer_count);
} else {
// This is rather crude, as the changed layouts may not impact the bound descriptors,
// but the simple "versioning" is a simple "dirt" test.
- bool stale = true;
- if (image_sample_version) {
- const auto version_it = image_sample_version->find(binding);
- if (version_it != image_sample_version->cend() && (version_it->second == cb_state.image_layout_change_count)) {
- stale = false;
- }
- }
- if (stale) {
+ auto &version = image_sample_val[binding]; // Take advantage of default construtor zero initialzing new entries
+ if (version != cb_state->image_layout_change_count) {
+ version = cb_state->image_layout_change_count;
out_req->emplace(binding_req_pair);
}
}
}
}
-void cvdescriptorset::DescriptorSet::UpdateValidationCache(const CMD_BUFFER_STATE &cb_state, const PIPELINE_STATE &pipeline,
- const BindingReqMap &updated_bindings) {
- // For const cleanliness we have to find in the maps...
- auto &validated = cached_validation_[&cb_state];
-
- auto &image_sample_version = validated.image_samplers[&pipeline];
- auto &dynamic_buffers = validated.dynamic_buffers;
- auto &non_dynamic_buffers = validated.non_dynamic_buffers;
- for (const auto &binding_req_pair : updated_bindings) {
- auto binding = binding_req_pair.first;
- VkDescriptorSetLayoutBinding const *layout_binding = p_layout_->GetDescriptorSetLayoutBindingPtrFromBinding(binding);
- if (!layout_binding) {
- continue;
- }
- // Caching criteria differs per type.
- if ((layout_binding->descriptorType == VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC) ||
- (layout_binding->descriptorType == VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC)) {
- dynamic_buffers.emplace(binding);
- } else if ((layout_binding->descriptorType == VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER) ||
- (layout_binding->descriptorType == VK_DESCRIPTOR_TYPE_STORAGE_BUFFER)) {
- non_dynamic_buffers.emplace(binding);
- } else {
- // Save the layout change version...
- image_sample_version[binding] = cb_state.image_layout_change_count;
- }
- }
-}
-
cvdescriptorset::SamplerDescriptor::SamplerDescriptor(const VkSampler *immut) : sampler_(VK_NULL_HANDLE), immutable_(false) {
updated = false;
descriptor_class = PlainSampler;
@@ -1304,14 +1317,21 @@ cvdescriptorset::SamplerDescriptor::SamplerDescriptor(const VkSampler *immut) :
}
}
// Validate given sampler. Currently this only checks to make sure it exists in the samplerMap
-bool CoreChecks::ValidateSampler(const VkSampler sampler) const { return (GetSamplerState(sampler) != nullptr); }
+bool cvdescriptorset::ValidateSampler(const VkSampler sampler, layer_data *dev_data) {
+ return (dev_data->GetSamplerState(sampler) != nullptr);
+}
-bool CoreChecks::ValidateImageUpdate(VkImageView image_view, VkImageLayout image_layout, VkDescriptorType type,
- const char *func_name, std::string *error_code, std::string *error_msg) {
+bool cvdescriptorset::ValidateImageUpdate(VkImageView image_view, VkImageLayout image_layout, VkDescriptorType type,
+ layer_data *dev_data, const char *func_name, std::string *error_code,
+ std::string *error_msg) {
*error_code = "VUID-VkWriteDescriptorSet-descriptorType-00326";
- auto iv_state = GetImageViewState(image_view);
- assert(iv_state);
-
+ auto iv_state = dev_data->GetImageViewState(image_view);
+ if (!iv_state) {
+ std::stringstream error_str;
+ error_str << "Invalid VkImageView: " << image_view;
+ *error_msg = error_str.str();
+ return false;
+ }
// Note that when an imageview is created, we validated that memory is bound so no need to re-check here
// Validate that imageLayout is compatible with aspect_mask and image format
// and validate that image usage bits are correct for given usage
@@ -1319,32 +1339,38 @@ bool CoreChecks::ValidateImageUpdate(VkImageView image_view, VkImageLayout image
VkImage image = iv_state->create_info.image;
VkFormat format = VK_FORMAT_MAX_ENUM;
VkImageUsageFlags usage = 0;
- auto image_node = GetImageState(image);
- assert(image_node);
-
- format = image_node->createInfo.format;
- usage = image_node->createInfo.usage;
- // Validate that memory is bound to image
- // TODO: This should have its own valid usage id apart from 2524 which is from CreateImageView case. The only
- // the error here occurs is if memory bound to a created imageView has been freed.
- if (ValidateMemoryIsBoundToImage(image_node, func_name, "VUID-VkImageViewCreateInfo-image-01020")) {
- *error_code = "VUID-VkImageViewCreateInfo-image-01020";
- *error_msg = "No memory bound to image.";
- return false;
- }
+ auto image_node = dev_data->GetImageState(image);
+ if (image_node) {
+ format = image_node->createInfo.format;
+ usage = image_node->createInfo.usage;
+ // Validate that memory is bound to image
+ // TODO: This should have its own valid usage id apart from 2524 which is from CreateImageView case. The only
+ // the error here occurs is if memory bound to a created imageView has been freed.
+ if (dev_data->ValidateMemoryIsBoundToImage(dev_data, image_node, func_name, "VUID-VkImageViewCreateInfo-image-01020")) {
+ *error_code = "VUID-VkImageViewCreateInfo-image-01020";
+ *error_msg = "No memory bound to image.";
+ return false;
+ }
- // KHR_maintenance1 allows rendering into 2D or 2DArray views which slice a 3D image,
- // but not binding them to descriptor sets.
- if (image_node->createInfo.imageType == VK_IMAGE_TYPE_3D && (iv_state->create_info.viewType == VK_IMAGE_VIEW_TYPE_2D ||
- iv_state->create_info.viewType == VK_IMAGE_VIEW_TYPE_2D_ARRAY)) {
- *error_code = "VUID-VkDescriptorImageInfo-imageView-00343";
- *error_msg = "ImageView must not be a 2D or 2DArray view of a 3D image";
+ // KHR_maintenance1 allows rendering into 2D or 2DArray views which slice a 3D image,
+ // but not binding them to descriptor sets.
+ if (image_node->createInfo.imageType == VK_IMAGE_TYPE_3D &&
+ (iv_state->create_info.viewType == VK_IMAGE_VIEW_TYPE_2D ||
+ iv_state->create_info.viewType == VK_IMAGE_VIEW_TYPE_2D_ARRAY)) {
+ *error_code = "VUID-VkDescriptorImageInfo-imageView-00343";
+ *error_msg = "ImageView must not be a 2D or 2DArray view of a 3D image";
+ return false;
+ }
+ }
+ // First validate that format and layout are compatible
+ if (format == VK_FORMAT_MAX_ENUM) {
+ std::stringstream error_str;
+ error_str << "Invalid image (" << image << ") in imageView (" << image_view << ").";
+ *error_msg = error_str.str();
return false;
}
-
// TODO : The various image aspect and format checks here are based on general spec language in 11.5 Image Views section under
// vkCreateImageView(). What's the best way to create unique id for these cases?
- *error_code = "UNASSIGNED-CoreValidation-DrawState-InvalidImageView";
bool ds = FormatIsDepthOrStencil(format);
switch (image_layout) {
case VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL:
@@ -1352,7 +1378,7 @@ bool CoreChecks::ValidateImageUpdate(VkImageView image_view, VkImageLayout image
if ((aspect_mask & VK_IMAGE_ASPECT_COLOR_BIT) != VK_IMAGE_ASPECT_COLOR_BIT) {
std::stringstream error_str;
error_str
- << "ImageView (" << report_data->FormatHandle(image_view).c_str()
+ << "ImageView (" << image_view
<< ") uses layout VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL but does not have VK_IMAGE_ASPECT_COLOR_BIT set.";
*error_msg = error_str.str();
return false;
@@ -1360,7 +1386,7 @@ bool CoreChecks::ValidateImageUpdate(VkImageView image_view, VkImageLayout image
// format must NOT be DS
if (ds) {
std::stringstream error_str;
- error_str << "ImageView (" << report_data->FormatHandle(image_view).c_str()
+ error_str << "ImageView (" << image_view
<< ") uses layout VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL but the image format is "
<< string_VkFormat(format) << " which is not a color format.";
*error_msg = error_str.str();
@@ -1374,25 +1400,23 @@ bool CoreChecks::ValidateImageUpdate(VkImageView image_view, VkImageLayout image
if (aspect_mask & VK_IMAGE_ASPECT_STENCIL_BIT) {
// both must NOT be set
std::stringstream error_str;
- error_str << "ImageView (" << report_data->FormatHandle(image_view).c_str()
- << ") has both STENCIL and DEPTH aspects set";
+ error_str << "ImageView (" << image_view << ") has both STENCIL and DEPTH aspects set";
*error_msg = error_str.str();
return false;
}
} else if (!(aspect_mask & VK_IMAGE_ASPECT_STENCIL_BIT)) {
// Neither were set
std::stringstream error_str;
- error_str << "ImageView (" << report_data->FormatHandle(image_view).c_str() << ") has layout "
- << string_VkImageLayout(image_layout) << " but does not have STENCIL or DEPTH aspects set";
+ error_str << "ImageView (" << image_view << ") has layout " << string_VkImageLayout(image_layout)
+ << " but does not have STENCIL or DEPTH aspects set";
*error_msg = error_str.str();
return false;
}
// format must be DS
if (!ds) {
std::stringstream error_str;
- error_str << "ImageView (" << report_data->FormatHandle(image_view).c_str() << ") has layout "
- << string_VkImageLayout(image_layout) << " but the image format is " << string_VkFormat(format)
- << " which is not a depth/stencil format.";
+ error_str << "ImageView (" << image_view << ") has layout " << string_VkImageLayout(image_layout)
+ << " but the image format is " << string_VkFormat(format) << " which is not a depth/stencil format.";
*error_msg = error_str.str();
return false;
}
@@ -1404,14 +1428,12 @@ bool CoreChecks::ValidateImageUpdate(VkImageView image_view, VkImageLayout image
if (aspect_mask & VK_IMAGE_ASPECT_STENCIL_BIT) {
// both must NOT be set
std::stringstream error_str;
- error_str << "ImageView (" << report_data->FormatHandle(image_view).c_str() << ") has layout "
- << string_VkImageLayout(image_layout) << " and is using depth/stencil image of format "
- << string_VkFormat(format)
+ error_str << "ImageView (" << image_view << ") has layout " << string_VkImageLayout(image_layout)
+ << " and is using depth/stencil image of format " << string_VkFormat(format)
<< " but it has both STENCIL and DEPTH aspects set, which is illegal. When using a depth/stencil "
"image in a descriptor set, please only set either VK_IMAGE_ASPECT_DEPTH_BIT or "
"VK_IMAGE_ASPECT_STENCIL_BIT depending on whether it will be used for depth reads or stencil "
"reads respectively.";
- *error_code = "VUID-VkDescriptorImageInfo-imageView-01976";
*error_msg = error_str.str();
return false;
}
@@ -1442,7 +1464,7 @@ bool CoreChecks::ValidateImageUpdate(VkImageView image_view, VkImageLayout image
// TODO : Need to create custom enum error codes for these cases
if (image_node->shared_presentable) {
if (VK_IMAGE_LAYOUT_SHARED_PRESENT_KHR != image_layout) {
- error_str << "ImageView (" << report_data->FormatHandle(image_view).c_str()
+ error_str << "ImageView (" << image_view
<< ") of VK_DESCRIPTOR_TYPE_STORAGE_IMAGE type with a front-buffered image is being updated with "
"layout "
<< string_VkImageLayout(image_layout)
@@ -1453,7 +1475,7 @@ bool CoreChecks::ValidateImageUpdate(VkImageView image_view, VkImageLayout image
return false;
}
} else if (VK_IMAGE_LAYOUT_GENERAL != image_layout) {
- error_str << "ImageView (" << report_data->FormatHandle(image_view).c_str()
+ error_str << "ImageView (" << image_view
<< ") of VK_DESCRIPTOR_TYPE_STORAGE_IMAGE type is being updated with layout "
<< string_VkImageLayout(image_layout)
<< " but according to spec section 13.1 Descriptor Types, 'Load and store operations on storage "
@@ -1475,9 +1497,9 @@ bool CoreChecks::ValidateImageUpdate(VkImageView image_view, VkImageLayout image
}
if (error_usage_bit) {
std::stringstream error_str;
- error_str << "ImageView (" << report_data->FormatHandle(image_view).c_str() << ") with usage mask " << std::hex
- << std::showbase << usage << " being used for a descriptor update of type " << string_VkDescriptorType(type)
- << " does not have " << error_usage_bit << " set.";
+ error_str << "ImageView (" << image_view << ") with usage mask 0x" << usage
+ << " being used for a descriptor update of type " << string_VkDescriptorType(type) << " does not have "
+ << error_usage_bit << " set.";
*error_msg = error_str.str();
return false;
}
@@ -1497,8 +1519,8 @@ bool CoreChecks::ValidateImageUpdate(VkImageView image_view, VkImageLayout image
{VK_IMAGE_LAYOUT_SHARED_PRESENT_KHR, &DeviceExtensions::vk_khr_shared_presentable_image},
{VK_IMAGE_LAYOUT_DEPTH_READ_ONLY_STENCIL_ATTACHMENT_OPTIMAL, &DeviceExtensions::vk_khr_maintenance2},
{VK_IMAGE_LAYOUT_DEPTH_ATTACHMENT_STENCIL_READ_ONLY_OPTIMAL, &DeviceExtensions::vk_khr_maintenance2}}};
- auto is_layout = [image_layout, this](const ExtensionLayout &ext_layout) {
- return device_extensions.*(ext_layout.extension) && (ext_layout.layout == image_layout);
+ auto is_layout = [image_layout, dev_data](const ExtensionLayout &ext_layout) {
+ return dev_data->device_extensions.*(ext_layout.extension) && (ext_layout.layout == image_layout);
};
bool valid_layout = (std::find(valid_layouts.cbegin(), valid_layouts.cend(), image_layout) != valid_layouts.cend()) ||
@@ -1508,13 +1530,11 @@ bool CoreChecks::ValidateImageUpdate(VkImageView image_view, VkImageLayout image
*error_code = "VUID-VkWriteDescriptorSet-descriptorType-01403";
std::stringstream error_str;
error_str << "Descriptor update with descriptorType " << string_VkDescriptorType(type)
- << " is being updated with invalid imageLayout " << string_VkImageLayout(image_layout) << " for image "
- << report_data->FormatHandle(image).c_str() << " in imageView "
- << report_data->FormatHandle(image_view).c_str()
+ << " is being updated with invalid imageLayout " << string_VkImageLayout(image_layout)
<< ". Allowed layouts are: VK_IMAGE_LAYOUT_DEPTH_STENCIL_READ_ONLY_OPTIMAL, "
<< "VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL, VK_IMAGE_LAYOUT_GENERAL";
for (auto &ext_layout : extended_layouts) {
- if (device_extensions.*(ext_layout.extension)) {
+ if (dev_data->device_extensions.*(ext_layout.extension)) {
error_str << ", " << string_VkImageLayout(ext_layout.layout);
}
}
@@ -1541,7 +1561,7 @@ void cvdescriptorset::SamplerDescriptor::CopyUpdate(const Descriptor *src) {
updated = true;
}
-void cvdescriptorset::SamplerDescriptor::UpdateDrawState(ValidationStateTracker *dev_data, CMD_BUFFER_STATE *cb_node) {
+void cvdescriptorset::SamplerDescriptor::UpdateDrawState(layer_data *dev_data, GLOBAL_CB_NODE *cb_node) {
if (!immutable_) {
auto sampler_state = dev_data->GetSamplerState(sampler_);
if (sampler_state) dev_data->AddCommandBufferBindingSampler(cb_node, sampler_state);
@@ -1580,7 +1600,7 @@ void cvdescriptorset::ImageSamplerDescriptor::CopyUpdate(const Descriptor *src)
image_layout_ = image_layout;
}
-void cvdescriptorset::ImageSamplerDescriptor::UpdateDrawState(ValidationStateTracker *dev_data, CMD_BUFFER_STATE *cb_node) {
+void cvdescriptorset::ImageSamplerDescriptor::UpdateDrawState(layer_data *dev_data, GLOBAL_CB_NODE *cb_node) {
// First add binding for any non-immutable sampler
if (!immutable_) {
auto sampler_state = dev_data->GetSamplerState(sampler_);
@@ -1589,8 +1609,10 @@ void cvdescriptorset::ImageSamplerDescriptor::UpdateDrawState(ValidationStateTra
// Add binding for image
auto iv_state = dev_data->GetImageViewState(image_view_);
if (iv_state) {
- dev_data->AddCommandBufferBindingImageView(cb_node, iv_state);
- dev_data->CallSetImageViewInitialLayoutCallback(cb_node, *iv_state, image_layout_);
+ dev_data->AddCommandBufferBindingImageView(dev_data, cb_node, iv_state);
+ }
+ if (image_view_) {
+ dev_data->SetImageViewLayout(dev_data, cb_node, image_view_, image_layout_);
}
}
@@ -1616,12 +1638,14 @@ void cvdescriptorset::ImageDescriptor::CopyUpdate(const Descriptor *src) {
image_layout_ = image_layout;
}
-void cvdescriptorset::ImageDescriptor::UpdateDrawState(ValidationStateTracker *dev_data, CMD_BUFFER_STATE *cb_node) {
+void cvdescriptorset::ImageDescriptor::UpdateDrawState(layer_data *dev_data, GLOBAL_CB_NODE *cb_node) {
// Add binding for image
auto iv_state = dev_data->GetImageViewState(image_view_);
if (iv_state) {
- dev_data->AddCommandBufferBindingImageView(cb_node, iv_state);
- dev_data->CallSetImageViewInitialLayoutCallback(cb_node, *iv_state, image_layout_);
+ dev_data->AddCommandBufferBindingImageView(dev_data, cb_node, iv_state);
+ }
+ if (image_view_) {
+ dev_data->SetImageViewLayout(dev_data, cb_node, image_view_, image_layout_);
}
}
@@ -1654,9 +1678,9 @@ void cvdescriptorset::BufferDescriptor::CopyUpdate(const Descriptor *src) {
range_ = buff_desc->range_;
}
-void cvdescriptorset::BufferDescriptor::UpdateDrawState(ValidationStateTracker *dev_data, CMD_BUFFER_STATE *cb_node) {
+void cvdescriptorset::BufferDescriptor::UpdateDrawState(layer_data *dev_data, GLOBAL_CB_NODE *cb_node) {
auto buffer_node = dev_data->GetBufferState(buffer_);
- if (buffer_node) dev_data->AddCommandBufferBindingBuffer(cb_node, buffer_node);
+ if (buffer_node) dev_data->AddCommandBufferBindingBuffer(dev_data, cb_node, buffer_node);
}
cvdescriptorset::TexelDescriptor::TexelDescriptor(const VkDescriptorType type) : buffer_view_(VK_NULL_HANDLE), storage_(false) {
@@ -1675,10 +1699,10 @@ void cvdescriptorset::TexelDescriptor::CopyUpdate(const Descriptor *src) {
buffer_view_ = static_cast<const TexelDescriptor *>(src)->buffer_view_;
}
-void cvdescriptorset::TexelDescriptor::UpdateDrawState(ValidationStateTracker *dev_data, CMD_BUFFER_STATE *cb_node) {
+void cvdescriptorset::TexelDescriptor::UpdateDrawState(layer_data *dev_data, GLOBAL_CB_NODE *cb_node) {
auto bv_state = dev_data->GetBufferViewState(buffer_view_);
if (bv_state) {
- dev_data->AddCommandBufferBindingBufferView(cb_node, bv_state);
+ dev_data->AddCommandBufferBindingBufferView(dev_data, cb_node, bv_state);
}
}
@@ -1687,7 +1711,8 @@ void cvdescriptorset::TexelDescriptor::UpdateDrawState(ValidationStateTracker *d
// If the update hits an issue for which the callback returns "true", meaning that the call down the chain should
// be skipped, then true is returned.
// If there is no issue with the update, then false is returned.
-bool CoreChecks::ValidateUpdateDescriptorSets(uint32_t write_count, const VkWriteDescriptorSet *p_wds, uint32_t copy_count,
+bool CoreChecks::ValidateUpdateDescriptorSets(const debug_report_data *report_data, const layer_data *dev_data,
+ uint32_t write_count, const VkWriteDescriptorSet *p_wds, uint32_t copy_count,
const VkCopyDescriptorSet *p_cds, const char *func_name) {
bool skip = false;
// Validate Write updates
@@ -1697,15 +1722,16 @@ bool CoreChecks::ValidateUpdateDescriptorSets(uint32_t write_count, const VkWrit
if (!set_node) {
skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_SET_EXT,
HandleToUint64(dest_set), kVUID_Core_DrawState_InvalidDescriptorSet,
- "Cannot call %s on %s that has not been allocated.", func_name,
+ "Cannot call %s on descriptor set %s that has not been allocated.", func_name,
report_data->FormatHandle(dest_set).c_str());
} else {
std::string error_code;
std::string error_str;
- if (!ValidateWriteUpdate(set_node, &p_wds[i], func_name, &error_code, &error_str)) {
+ if (!set_node->ValidateWriteUpdate(report_data, &p_wds[i], func_name, &error_code, &error_str)) {
skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_SET_EXT,
- HandleToUint64(dest_set), error_code, "%s failed write update validation for %s with error: %s.",
- func_name, report_data->FormatHandle(dest_set).c_str(), error_str.c_str());
+ HandleToUint64(dest_set), error_code,
+ "%s failed write update validation for Descriptor Set %s with error: %s.", func_name,
+ report_data->FormatHandle(dest_set).c_str(), error_str.c_str());
}
}
}
@@ -1720,11 +1746,11 @@ bool CoreChecks::ValidateUpdateDescriptorSets(uint32_t write_count, const VkWrit
assert(dst_node);
std::string error_code;
std::string error_str;
- if (!ValidateCopyUpdate(&p_cds[i], dst_node, src_node, func_name, &error_code, &error_str)) {
- skip |=
- log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_SET_EXT,
- HandleToUint64(dst_set), error_code, "%s failed copy update from %s to %s with error: %s.", func_name,
- report_data->FormatHandle(src_set).c_str(), report_data->FormatHandle(dst_set).c_str(), error_str.c_str());
+ if (!dst_node->ValidateCopyUpdate(report_data, &p_cds[i], src_node, func_name, &error_code, &error_str)) {
+ skip |= log_msg(
+ report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_SET_EXT, HandleToUint64(dst_set),
+ error_code, "%s failed copy update from Descriptor Set %s to Descriptor Set %s with error: %s.", func_name,
+ report_data->FormatHandle(src_set).c_str(), report_data->FormatHandle(dst_set).c_str(), error_str.c_str());
}
}
return skip;
@@ -1735,9 +1761,8 @@ bool CoreChecks::ValidateUpdateDescriptorSets(uint32_t write_count, const VkWrit
// with the same set of updates.
// This is split from the validate code to allow validation prior to calling down the chain, and then update after
// calling down the chain.
-void cvdescriptorset::PerformUpdateDescriptorSets(ValidationStateTracker *dev_data, uint32_t write_count,
- const VkWriteDescriptorSet *p_wds, uint32_t copy_count,
- const VkCopyDescriptorSet *p_cds) {
+void cvdescriptorset::PerformUpdateDescriptorSets(layer_data *dev_data, uint32_t write_count, const VkWriteDescriptorSet *p_wds,
+ uint32_t copy_count, const VkCopyDescriptorSet *p_cds) {
// Write updates first
uint32_t i = 0;
for (i = 0; i < write_count; ++i) {
@@ -1759,9 +1784,9 @@ void cvdescriptorset::PerformUpdateDescriptorSets(ValidationStateTracker *dev_da
}
}
-cvdescriptorset::DecodedTemplateUpdate::DecodedTemplateUpdate(const ValidationStateTracker *device_data,
- VkDescriptorSet descriptorSet, const TEMPLATE_STATE *template_state,
- const void *pData, VkDescriptorSetLayout push_layout) {
+cvdescriptorset::DecodedTemplateUpdate::DecodedTemplateUpdate(layer_data *device_data, VkDescriptorSet descriptorSet,
+ const TEMPLATE_STATE *template_state, const void *pData,
+ VkDescriptorSetLayout push_layout) {
auto const &create_info = template_state->create_info;
inline_infos.resize(create_info.descriptorUpdateEntryCount); // Make sure we have one if we need it
desc_writes.reserve(create_info.descriptorUpdateEntryCount); // emplaced, so reserved without initialization
@@ -1824,8 +1849,6 @@ cvdescriptorset::DecodedTemplateUpdate::DecodedTemplateUpdate(const ValidationSt
inline_info->dataSize = create_info.pDescriptorUpdateEntries[i].descriptorCount;
inline_info->pData = update_entry;
write_entry.pNext = inline_info;
- // descriptorCount must match the dataSize member of the VkWriteDescriptorSetInlineUniformBlockEXT structure
- write_entry.descriptorCount = inline_info->dataSize;
// skip the rest of the array, they just represent bytes in the update
j = create_info.pDescriptorUpdateEntries[i].descriptorCount;
break;
@@ -1840,57 +1863,181 @@ cvdescriptorset::DecodedTemplateUpdate::DecodedTemplateUpdate(const ValidationSt
}
// These helper functions carry out the validate and record descriptor updates peformed via update templates. They decode
// the templatized data and leverage the non-template UpdateDescriptor helper functions.
-bool CoreChecks::ValidateUpdateDescriptorSetsWithTemplateKHR(VkDescriptorSet descriptorSet, const TEMPLATE_STATE *template_state,
- const void *pData) {
+bool CoreChecks::ValidateUpdateDescriptorSetsWithTemplateKHR(layer_data *device_data, VkDescriptorSet descriptorSet,
+ const TEMPLATE_STATE *template_state, const void *pData) {
// Translate the templated update into a normal update for validation...
- cvdescriptorset::DecodedTemplateUpdate decoded_update(this, descriptorSet, template_state, pData);
- return ValidateUpdateDescriptorSets(static_cast<uint32_t>(decoded_update.desc_writes.size()), decoded_update.desc_writes.data(),
- 0, NULL, "vkUpdateDescriptorSetWithTemplate()");
+ cvdescriptorset::DecodedTemplateUpdate decoded_update(device_data, descriptorSet, template_state, pData);
+ return ValidateUpdateDescriptorSets(GetReportData(), device_data, static_cast<uint32_t>(decoded_update.desc_writes.size()),
+ decoded_update.desc_writes.data(), 0, NULL, "vkUpdateDescriptorSetWithTemplate()");
}
-void ValidationStateTracker::PerformUpdateDescriptorSetsWithTemplateKHR(VkDescriptorSet descriptorSet,
- const TEMPLATE_STATE *template_state, const void *pData) {
+void CoreChecks::PerformUpdateDescriptorSetsWithTemplateKHR(layer_data *device_data, VkDescriptorSet descriptorSet,
+ const TEMPLATE_STATE *template_state, const void *pData) {
// Translate the templated update into a normal update for validation...
- cvdescriptorset::DecodedTemplateUpdate decoded_update(this, descriptorSet, template_state, pData);
- cvdescriptorset::PerformUpdateDescriptorSets(this, static_cast<uint32_t>(decoded_update.desc_writes.size()),
+ cvdescriptorset::DecodedTemplateUpdate decoded_update(device_data, descriptorSet, template_state, pData);
+ cvdescriptorset::PerformUpdateDescriptorSets(device_data, static_cast<uint32_t>(decoded_update.desc_writes.size()),
decoded_update.desc_writes.data(), 0, NULL);
}
std::string cvdescriptorset::DescriptorSet::StringifySetAndLayout() const {
std::string out;
- auto layout_handle = p_layout_->GetDescriptorSetLayout();
+ uint64_t layout_handle = HandleToUint64(p_layout_->GetDescriptorSetLayout());
if (IsPushDescriptor()) {
- string_sprintf(&out, "Push Descriptors defined with VkDescriptorSetLayout %s",
- state_data_->report_data->FormatHandle(layout_handle).c_str());
+ string_sprintf(&out, "Push Descriptors defined with VkDescriptorSetLayout 0x%" PRIxLEAST64, layout_handle);
} else {
- string_sprintf(&out, "VkDescriptorSet %s allocated with VkDescriptorSetLayout %s",
- state_data_->report_data->FormatHandle(set_).c_str(),
- state_data_->report_data->FormatHandle(layout_handle).c_str());
+ string_sprintf(&out, "VkDescriptorSet 0x%" PRIxLEAST64 "allocated with VkDescriptorSetLayout 0x%" PRIxLEAST64,
+ HandleToUint64(set_), layout_handle);
}
return out;
};
// Loop through the write updates to validate for a push descriptor set, ignoring dstSet
-bool CoreChecks::ValidatePushDescriptorsUpdate(const DescriptorSet *push_set, uint32_t write_count,
- const VkWriteDescriptorSet *p_wds, const char *func_name) {
- assert(push_set->IsPushDescriptor());
+bool cvdescriptorset::DescriptorSet::ValidatePushDescriptorsUpdate(const debug_report_data *report_data, uint32_t write_count,
+ const VkWriteDescriptorSet *p_wds, const char *func_name) {
+ assert(IsPushDescriptor());
bool skip = false;
for (uint32_t i = 0; i < write_count; i++) {
std::string error_code;
std::string error_str;
- if (!ValidateWriteUpdate(push_set, &p_wds[i], func_name, &error_code, &error_str)) {
+ if (!ValidateWriteUpdate(report_data, &p_wds[i], func_name, &error_code, &error_str)) {
skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_SET_LAYOUT_EXT,
- HandleToUint64(push_set->GetDescriptorSetLayout()), error_code, "%s failed update validation: %s.",
+ HandleToUint64(p_layout_->GetDescriptorSetLayout()), error_code, "%s failed update validation: %s.",
func_name, error_str.c_str());
}
}
return skip;
}
+// Validate the state for a given write update but don't actually perform the update
+// If an error would occur for this update, return false and fill in details in error_msg string
+bool cvdescriptorset::DescriptorSet::ValidateWriteUpdate(const debug_report_data *report_data, const VkWriteDescriptorSet *update,
+ const char *func_name, std::string *error_code, std::string *error_msg) {
+ // Verify dst layout still valid
+ if (p_layout_->IsDestroyed()) {
+ *error_code = "VUID-VkWriteDescriptorSet-dstSet-00320";
+ string_sprintf(error_msg, "Cannot call %s to perform write update on %s which has been destroyed", func_name,
+ StringifySetAndLayout().c_str());
+ return false;
+ }
+ // Verify dst binding exists
+ if (!p_layout_->HasBinding(update->dstBinding)) {
+ *error_code = "VUID-VkWriteDescriptorSet-dstBinding-00315";
+ std::stringstream error_str;
+ error_str << StringifySetAndLayout() << " does not have binding " << update->dstBinding;
+ *error_msg = error_str.str();
+ return false;
+ } else {
+ // Make sure binding isn't empty
+ if (0 == p_layout_->GetDescriptorCountFromBinding(update->dstBinding)) {
+ *error_code = "VUID-VkWriteDescriptorSet-dstBinding-00316";
+ std::stringstream error_str;
+ error_str << StringifySetAndLayout() << " cannot updated binding " << update->dstBinding << " that has 0 descriptors";
+ *error_msg = error_str.str();
+ return false;
+ }
+ }
+ // Verify idle ds
+ if (in_use.load() &&
+ !(p_layout_->GetDescriptorBindingFlagsFromBinding(update->dstBinding) &
+ (VK_DESCRIPTOR_BINDING_UPDATE_UNUSED_WHILE_PENDING_BIT_EXT | VK_DESCRIPTOR_BINDING_UPDATE_AFTER_BIND_BIT_EXT))) {
+ // TODO : Re-using Free Idle error code, need write update idle error code
+ *error_code = "VUID-vkFreeDescriptorSets-pDescriptorSets-00309";
+ std::stringstream error_str;
+ error_str << "Cannot call " << func_name << " to perform write update on " << StringifySetAndLayout()
+ << " that is in use by a command buffer";
+ *error_msg = error_str.str();
+ return false;
+ }
+ // We know that binding is valid, verify update and do update on each descriptor
+ auto start_idx = p_layout_->GetGlobalIndexRangeFromBinding(update->dstBinding).start + update->dstArrayElement;
+ auto type = p_layout_->GetTypeFromBinding(update->dstBinding);
+ if (type != update->descriptorType) {
+ *error_code = "VUID-VkWriteDescriptorSet-descriptorType-00319";
+ std::stringstream error_str;
+ error_str << "Attempting write update to " << StringifySetAndLayout() << " binding #" << update->dstBinding << " with type "
+ << string_VkDescriptorType(type) << " but update type is " << string_VkDescriptorType(update->descriptorType);
+ *error_msg = error_str.str();
+ return false;
+ }
+ if (update->descriptorCount > (descriptors_.size() - start_idx)) {
+ *error_code = "VUID-VkWriteDescriptorSet-dstArrayElement-00321";
+ std::stringstream error_str;
+ error_str << "Attempting write update to " << StringifySetAndLayout() << " binding #" << update->dstBinding << " with "
+ << descriptors_.size() - start_idx
+ << " descriptors in that binding and all successive bindings of the set, but update of "
+ << update->descriptorCount << " descriptors combined with update array element offset of "
+ << update->dstArrayElement << " oversteps the available number of consecutive descriptors";
+ *error_msg = error_str.str();
+ return false;
+ }
+ if (type == VK_DESCRIPTOR_TYPE_INLINE_UNIFORM_BLOCK_EXT) {
+ if ((update->dstArrayElement % 4) != 0) {
+ *error_code = "VUID-VkWriteDescriptorSet-descriptorType-02219";
+ std::stringstream error_str;
+ error_str << "Attempting write update to " << StringifySetAndLayout() << " binding #" << update->dstBinding << " with "
+ << "dstArrayElement " << update->dstArrayElement << " not a multiple of 4";
+ *error_msg = error_str.str();
+ return false;
+ }
+ if ((update->descriptorCount % 4) != 0) {
+ *error_code = "VUID-VkWriteDescriptorSet-descriptorType-02220";
+ std::stringstream error_str;
+ error_str << "Attempting write update to " << StringifySetAndLayout() << " binding #" << update->dstBinding << " with "
+ << "descriptorCount " << update->descriptorCount << " not a multiple of 4";
+ *error_msg = error_str.str();
+ return false;
+ }
+ const auto *write_inline_info = lvl_find_in_chain<VkWriteDescriptorSetInlineUniformBlockEXT>(update->pNext);
+ if (!write_inline_info || write_inline_info->dataSize != update->descriptorCount) {
+ *error_code = "VUID-VkWriteDescriptorSet-descriptorType-02221";
+ std::stringstream error_str;
+ if (!write_inline_info) {
+ error_str << "Attempting write update to " << StringifySetAndLayout() << " binding #" << update->dstBinding
+ << " with "
+ << "VkWriteDescriptorSetInlineUniformBlockEXT missing";
+ } else {
+ error_str << "Attempting write update to " << StringifySetAndLayout() << " binding #" << update->dstBinding
+ << " with "
+ << "VkWriteDescriptorSetInlineUniformBlockEXT dataSize " << write_inline_info->dataSize
+ << " not equal to "
+ << "VkWriteDescriptorSet descriptorCount " << update->descriptorCount;
+ }
+ *error_msg = error_str.str();
+ return false;
+ }
+ // This error is probably unreachable due to the previous two errors
+ if (write_inline_info && (write_inline_info->dataSize % 4) != 0) {
+ *error_code = "VUID-VkWriteDescriptorSetInlineUniformBlockEXT-dataSize-02222";
+ std::stringstream error_str;
+ error_str << "Attempting write update to " << StringifySetAndLayout() << " binding #" << update->dstBinding << " with "
+ << "VkWriteDescriptorSetInlineUniformBlockEXT dataSize " << write_inline_info->dataSize
+ << " not a multiple of 4";
+ *error_msg = error_str.str();
+ return false;
+ }
+ }
+ // Verify consecutive bindings match (if needed)
+ if (!p_layout_->VerifyUpdateConsistency(update->dstBinding, update->dstArrayElement, update->descriptorCount, "write update to",
+ set_, error_msg)) {
+ // TODO : Should break out "consecutive binding updates" language into valid usage statements
+ *error_code = "VUID-VkWriteDescriptorSet-dstArrayElement-00321";
+ return false;
+ }
+ // Update is within bounds and consistent so last step is to validate update contents
+ if (!VerifyWriteUpdateContents(update, start_idx, func_name, error_code, error_msg)) {
+ std::stringstream error_str;
+ error_str << "Write update to " << StringifySetAndLayout() << " binding #" << update->dstBinding
+ << " failed with error message: " << error_msg->c_str();
+ *error_msg = error_str.str();
+ return false;
+ }
+ // All checks passed, update is clean
+ return true;
+}
// For the given buffer, verify that its creation parameters are appropriate for the given type
// If there's an error, update the error_msg string with details and return false, else return true
-bool cvdescriptorset::ValidateBufferUsage(BUFFER_STATE const *buffer_node, VkDescriptorType type, std::string *error_code,
- std::string *error_msg) {
+bool cvdescriptorset::DescriptorSet::ValidateBufferUsage(BUFFER_STATE const *buffer_node, VkDescriptorType type,
+ std::string *error_code, std::string *error_msg) const {
// Verify that usage bits set correctly for given type
auto usage = buffer_node->createInfo.usage;
const char *error_usage_bit = nullptr;
@@ -1926,7 +2073,7 @@ bool cvdescriptorset::ValidateBufferUsage(BUFFER_STATE const *buffer_node, VkDes
}
if (error_usage_bit) {
std::stringstream error_str;
- error_str << "Buffer (" << buffer_node->buffer << ") with usage mask " << std::hex << std::showbase << usage
+ error_str << "Buffer (" << buffer_node->buffer << ") with usage mask 0x" << usage
<< " being used for a descriptor update of type " << string_VkDescriptorType(type) << " does not have "
<< error_usage_bit << " set.";
*error_msg = error_str.str();
@@ -1941,19 +2088,21 @@ bool cvdescriptorset::ValidateBufferUsage(BUFFER_STATE const *buffer_node, VkDes
// 4. range is either VK_WHOLE_SIZE or falls in (0, (buffer size - offset)]
// 5. range and offset are within the device's limits
// If there's an error, update the error_msg string with details and return false, else return true
-bool CoreChecks::ValidateBufferUpdate(VkDescriptorBufferInfo const *buffer_info, VkDescriptorType type, const char *func_name,
- std::string *error_code, std::string *error_msg) {
+bool cvdescriptorset::DescriptorSet::ValidateBufferUpdate(VkDescriptorBufferInfo const *buffer_info, VkDescriptorType type,
+ const char *func_name, std::string *error_code,
+ std::string *error_msg) const {
// First make sure that buffer is valid
- auto buffer_node = GetBufferState(buffer_info->buffer);
+ auto buffer_node = device_data_->GetBufferState(buffer_info->buffer);
// Any invalid buffer should already be caught by object_tracker
assert(buffer_node);
- if (ValidateMemoryIsBoundToBuffer(buffer_node, func_name, "VUID-VkWriteDescriptorSet-descriptorType-00329")) {
+ if (device_data_->ValidateMemoryIsBoundToBuffer(device_data_, buffer_node, func_name,
+ "VUID-VkWriteDescriptorSet-descriptorType-00329")) {
*error_code = "VUID-VkWriteDescriptorSet-descriptorType-00329";
*error_msg = "No memory bound to buffer.";
return false;
}
// Verify usage bits
- if (!cvdescriptorset::ValidateBufferUsage(buffer_node, type, error_code, error_msg)) {
+ if (!ValidateBufferUsage(buffer_node, type, error_code, error_msg)) {
// error_msg will have been updated by ValidateBufferUsage()
return false;
}
@@ -1986,9 +2135,8 @@ bool CoreChecks::ValidateBufferUpdate(VkDescriptorBufferInfo const *buffer_info,
}
}
// Check buffer update sizes against device limits
- const auto &limits = phys_dev_props.limits;
if (VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER == type || VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC == type) {
- auto max_ub_range = limits.maxUniformBufferRange;
+ auto max_ub_range = limits_.maxUniformBufferRange;
if (buffer_info->range != VK_WHOLE_SIZE && buffer_info->range > max_ub_range) {
*error_code = "VUID-VkWriteDescriptorSet-descriptorType-00332";
std::stringstream error_str;
@@ -2006,7 +2154,7 @@ bool CoreChecks::ValidateBufferUpdate(VkDescriptorBufferInfo const *buffer_info,
return false;
}
} else if (VK_DESCRIPTOR_TYPE_STORAGE_BUFFER == type || VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC == type) {
- auto max_sb_range = limits.maxStorageBufferRange;
+ auto max_sb_range = limits_.maxStorageBufferRange;
if (buffer_info->range != VK_WHOLE_SIZE && buffer_info->range > max_sb_range) {
*error_code = "VUID-VkWriteDescriptorSet-descriptorType-00333";
std::stringstream error_str;
@@ -2026,27 +2174,150 @@ bool CoreChecks::ValidateBufferUpdate(VkDescriptorBufferInfo const *buffer_info,
}
return true;
}
+
+// Verify that the contents of the update are ok, but don't perform actual update
+bool cvdescriptorset::DescriptorSet::VerifyWriteUpdateContents(const VkWriteDescriptorSet *update, const uint32_t index,
+ const char *func_name, std::string *error_code,
+ std::string *error_msg) const {
+ switch (update->descriptorType) {
+ case VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER: {
+ for (uint32_t di = 0; di < update->descriptorCount; ++di) {
+ // Validate image
+ auto image_view = update->pImageInfo[di].imageView;
+ auto image_layout = update->pImageInfo[di].imageLayout;
+ if (!ValidateImageUpdate(image_view, image_layout, update->descriptorType, device_data_, func_name, error_code,
+ error_msg)) {
+ std::stringstream error_str;
+ error_str << "Attempted write update to combined image sampler descriptor failed due to: "
+ << error_msg->c_str();
+ *error_msg = error_str.str();
+ return false;
+ }
+ if (device_data_->GetDeviceExtensions()->vk_khr_sampler_ycbcr_conversion) {
+ ImageSamplerDescriptor *desc = (ImageSamplerDescriptor *)descriptors_[index].get();
+ if (desc->IsImmutableSampler()) {
+ auto sampler_state = device_data_->GetSamplerState(desc->GetSampler());
+ auto iv_state = device_data_->GetImageViewState(image_view);
+ if (iv_state && sampler_state) {
+ if (iv_state->samplerConversion != sampler_state->samplerConversion) {
+ *error_code = "VUID-VkWriteDescriptorSet-descriptorType-01948";
+ std::stringstream error_str;
+ error_str << "Attempted write update to combined image sampler and image view and sampler ycbcr "
+ "conversions are not identical, sampler: "
+ << desc->GetSampler() << " image view: " << iv_state->image_view << ".";
+ *error_msg = error_str.str();
+ return false;
+ }
+ }
+ }
+ }
+ }
+ }
+ // fall through
+ case VK_DESCRIPTOR_TYPE_SAMPLER: {
+ for (uint32_t di = 0; di < update->descriptorCount; ++di) {
+ if (!descriptors_[index + di].get()->IsImmutableSampler()) {
+ if (!ValidateSampler(update->pImageInfo[di].sampler, device_data_)) {
+ *error_code = "VUID-VkWriteDescriptorSet-descriptorType-00325";
+ std::stringstream error_str;
+ error_str << "Attempted write update to sampler descriptor with invalid sampler: "
+ << update->pImageInfo[di].sampler << ".";
+ *error_msg = error_str.str();
+ return false;
+ }
+ } else {
+ // TODO : Warn here
+ }
+ }
+ break;
+ }
+ case VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE:
+ case VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT:
+ case VK_DESCRIPTOR_TYPE_STORAGE_IMAGE: {
+ for (uint32_t di = 0; di < update->descriptorCount; ++di) {
+ auto image_view = update->pImageInfo[di].imageView;
+ auto image_layout = update->pImageInfo[di].imageLayout;
+ if (!ValidateImageUpdate(image_view, image_layout, update->descriptorType, device_data_, func_name, error_code,
+ error_msg)) {
+ std::stringstream error_str;
+ error_str << "Attempted write update to image descriptor failed due to: " << error_msg->c_str();
+ *error_msg = error_str.str();
+ return false;
+ }
+ }
+ break;
+ }
+ case VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER:
+ case VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER: {
+ for (uint32_t di = 0; di < update->descriptorCount; ++di) {
+ auto buffer_view = update->pTexelBufferView[di];
+ auto bv_state = device_data_->GetBufferViewState(buffer_view);
+ if (!bv_state) {
+ *error_code = "VUID-VkWriteDescriptorSet-descriptorType-00323";
+ std::stringstream error_str;
+ error_str << "Attempted write update to texel buffer descriptor with invalid buffer view: " << buffer_view;
+ *error_msg = error_str.str();
+ return false;
+ }
+ auto buffer = bv_state->create_info.buffer;
+ auto buffer_state = device_data_->GetBufferState(buffer);
+ // Verify that buffer underlying the view hasn't been destroyed prematurely
+ if (!buffer_state) {
+ *error_code = "VUID-VkWriteDescriptorSet-descriptorType-00323";
+ std::stringstream error_str;
+ error_str << "Attempted write update to texel buffer descriptor failed because underlying buffer (" << buffer
+ << ") has been destroyed: " << error_msg->c_str();
+ *error_msg = error_str.str();
+ return false;
+ } else if (!ValidateBufferUsage(buffer_state, update->descriptorType, error_code, error_msg)) {
+ std::stringstream error_str;
+ error_str << "Attempted write update to texel buffer descriptor failed due to: " << error_msg->c_str();
+ *error_msg = error_str.str();
+ return false;
+ }
+ }
+ break;
+ }
+ case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER:
+ case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC:
+ case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER:
+ case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC: {
+ for (uint32_t di = 0; di < update->descriptorCount; ++di) {
+ if (!ValidateBufferUpdate(update->pBufferInfo + di, update->descriptorType, func_name, error_code, error_msg)) {
+ std::stringstream error_str;
+ error_str << "Attempted write update to buffer descriptor failed due to: " << error_msg->c_str();
+ *error_msg = error_str.str();
+ return false;
+ }
+ }
+ break;
+ }
+ case VK_DESCRIPTOR_TYPE_INLINE_UNIFORM_BLOCK_EXT:
+ break;
+ case VK_DESCRIPTOR_TYPE_ACCELERATION_STRUCTURE_NV:
+ // XXX TODO
+ break;
+ default:
+ assert(0); // We've already verified update type so should never get here
+ break;
+ }
+ // All checks passed so update contents are good
+ return true;
+}
// Verify that the contents of the update are ok, but don't perform actual update
-bool CoreChecks::VerifyCopyUpdateContents(const VkCopyDescriptorSet *update, const DescriptorSet *src_set, VkDescriptorType type,
- uint32_t index, const char *func_name, std::string *error_code, std::string *error_msg) {
+bool cvdescriptorset::DescriptorSet::VerifyCopyUpdateContents(const VkCopyDescriptorSet *update, const DescriptorSet *src_set,
+ VkDescriptorType type, uint32_t index, const char *func_name,
+ std::string *error_code, std::string *error_msg) const {
// Note : Repurposing some Write update error codes here as specific details aren't called out for copy updates like they are
// for write updates
- using DescriptorClass = cvdescriptorset::DescriptorClass;
- using BufferDescriptor = cvdescriptorset::BufferDescriptor;
- using ImageDescriptor = cvdescriptorset::ImageDescriptor;
- using ImageSamplerDescriptor = cvdescriptorset::ImageSamplerDescriptor;
- using SamplerDescriptor = cvdescriptorset::SamplerDescriptor;
- using TexelDescriptor = cvdescriptorset::TexelDescriptor;
-
- auto device_data = this;
- switch (src_set->GetDescriptorFromGlobalIndex(index)->descriptor_class) {
- case DescriptorClass::PlainSampler: {
+ switch (src_set->descriptors_[index]->descriptor_class) {
+ case PlainSampler: {
for (uint32_t di = 0; di < update->descriptorCount; ++di) {
- const auto src_desc = src_set->GetDescriptorFromGlobalIndex(index + di);
+ const auto src_desc = src_set->descriptors_[index + di].get();
if (!src_desc->updated) continue;
if (!src_desc->IsImmutableSampler()) {
- auto update_sampler = static_cast<const SamplerDescriptor *>(src_desc)->GetSampler();
- if (!ValidateSampler(update_sampler)) {
+ auto update_sampler = static_cast<SamplerDescriptor *>(src_desc)->GetSampler();
+ if (!ValidateSampler(update_sampler, device_data_)) {
*error_code = "VUID-VkWriteDescriptorSet-descriptorType-00325";
std::stringstream error_str;
error_str << "Attempted copy update to sampler descriptor with invalid sampler: " << update_sampler << ".";
@@ -2059,15 +2330,15 @@ bool CoreChecks::VerifyCopyUpdateContents(const VkCopyDescriptorSet *update, con
}
break;
}
- case DescriptorClass::ImageSampler: {
+ case ImageSampler: {
for (uint32_t di = 0; di < update->descriptorCount; ++di) {
- const auto src_desc = src_set->GetDescriptorFromGlobalIndex(index + di);
+ const auto src_desc = src_set->descriptors_[index + di].get();
if (!src_desc->updated) continue;
auto img_samp_desc = static_cast<const ImageSamplerDescriptor *>(src_desc);
// First validate sampler
if (!img_samp_desc->IsImmutableSampler()) {
auto update_sampler = img_samp_desc->GetSampler();
- if (!ValidateSampler(update_sampler)) {
+ if (!ValidateSampler(update_sampler, device_data_)) {
*error_code = "VUID-VkWriteDescriptorSet-descriptorType-00325";
std::stringstream error_str;
error_str << "Attempted copy update to sampler descriptor with invalid sampler: " << update_sampler << ".";
@@ -2080,7 +2351,7 @@ bool CoreChecks::VerifyCopyUpdateContents(const VkCopyDescriptorSet *update, con
// Validate image
auto image_view = img_samp_desc->GetImageView();
auto image_layout = img_samp_desc->GetImageLayout();
- if (!ValidateImageUpdate(image_view, image_layout, type, func_name, error_code, error_msg)) {
+ if (!ValidateImageUpdate(image_view, image_layout, type, device_data_, func_name, error_code, error_msg)) {
std::stringstream error_str;
error_str << "Attempted copy update to combined image sampler descriptor failed due to: " << error_msg->c_str();
*error_msg = error_str.str();
@@ -2089,14 +2360,14 @@ bool CoreChecks::VerifyCopyUpdateContents(const VkCopyDescriptorSet *update, con
}
break;
}
- case DescriptorClass::Image: {
+ case Image: {
for (uint32_t di = 0; di < update->descriptorCount; ++di) {
- const auto src_desc = src_set->GetDescriptorFromGlobalIndex(index + di);
+ const auto src_desc = src_set->descriptors_[index + di].get();
if (!src_desc->updated) continue;
auto img_desc = static_cast<const ImageDescriptor *>(src_desc);
auto image_view = img_desc->GetImageView();
auto image_layout = img_desc->GetImageLayout();
- if (!ValidateImageUpdate(image_view, image_layout, type, func_name, error_code, error_msg)) {
+ if (!ValidateImageUpdate(image_view, image_layout, type, device_data_, func_name, error_code, error_msg)) {
std::stringstream error_str;
error_str << "Attempted copy update to image descriptor failed due to: " << error_msg->c_str();
*error_msg = error_str.str();
@@ -2105,12 +2376,12 @@ bool CoreChecks::VerifyCopyUpdateContents(const VkCopyDescriptorSet *update, con
}
break;
}
- case DescriptorClass::TexelBuffer: {
+ case TexelBuffer: {
for (uint32_t di = 0; di < update->descriptorCount; ++di) {
- const auto src_desc = src_set->GetDescriptorFromGlobalIndex(index + di);
+ const auto src_desc = src_set->descriptors_[index + di].get();
if (!src_desc->updated) continue;
- auto buffer_view = static_cast<const TexelDescriptor *>(src_desc)->GetBufferView();
- auto bv_state = device_data->GetBufferViewState(buffer_view);
+ auto buffer_view = static_cast<TexelDescriptor *>(src_desc)->GetBufferView();
+ auto bv_state = device_data_->GetBufferViewState(buffer_view);
if (!bv_state) {
*error_code = "VUID-VkWriteDescriptorSet-descriptorType-00323";
std::stringstream error_str;
@@ -2119,7 +2390,7 @@ bool CoreChecks::VerifyCopyUpdateContents(const VkCopyDescriptorSet *update, con
return false;
}
auto buffer = bv_state->create_info.buffer;
- if (!cvdescriptorset::ValidateBufferUsage(GetBufferState(buffer), type, error_code, error_msg)) {
+ if (!ValidateBufferUsage(device_data_->GetBufferState(buffer), type, error_code, error_msg)) {
std::stringstream error_str;
error_str << "Attempted copy update to texel buffer descriptor failed due to: " << error_msg->c_str();
*error_msg = error_str.str();
@@ -2128,12 +2399,12 @@ bool CoreChecks::VerifyCopyUpdateContents(const VkCopyDescriptorSet *update, con
}
break;
}
- case DescriptorClass::GeneralBuffer: {
+ case GeneralBuffer: {
for (uint32_t di = 0; di < update->descriptorCount; ++di) {
- const auto src_desc = src_set->GetDescriptorFromGlobalIndex(index + di);
+ const auto src_desc = src_set->descriptors_[index + di].get();
if (!src_desc->updated) continue;
- auto buffer = static_cast<const BufferDescriptor *>(src_desc)->GetBuffer();
- if (!cvdescriptorset::ValidateBufferUsage(GetBufferState(buffer), type, error_code, error_msg)) {
+ auto buffer = static_cast<BufferDescriptor *>(src_desc)->GetBuffer();
+ if (!ValidateBufferUsage(device_data_->GetBufferState(buffer), type, error_code, error_msg)) {
std::stringstream error_str;
error_str << "Attempted copy update to buffer descriptor failed due to: " << error_msg->c_str();
*error_msg = error_str.str();
@@ -2142,8 +2413,8 @@ bool CoreChecks::VerifyCopyUpdateContents(const VkCopyDescriptorSet *update, con
}
break;
}
- case DescriptorClass::InlineUniform:
- case DescriptorClass::AccelerationStructure:
+ case InlineUniform:
+ case AccelerationStructure:
break;
default:
assert(0); // We've already verified update type so should never get here
@@ -2153,10 +2424,10 @@ bool CoreChecks::VerifyCopyUpdateContents(const VkCopyDescriptorSet *update, con
return true;
}
// Update the common AllocateDescriptorSetsData
-void CoreChecks::UpdateAllocateDescriptorSetsData(const VkDescriptorSetAllocateInfo *p_alloc_info,
+void CoreChecks::UpdateAllocateDescriptorSetsData(const layer_data *dev_data, const VkDescriptorSetAllocateInfo *p_alloc_info,
cvdescriptorset::AllocateDescriptorSetsData *ds_data) {
for (uint32_t i = 0; i < p_alloc_info->descriptorSetCount; i++) {
- auto layout = GetDescriptorSetLayout(this, p_alloc_info->pSetLayouts[i]);
+ auto layout = GetDescriptorSetLayout(dev_data, p_alloc_info->pSetLayouts[i]);
if (layout) {
ds_data->layout_nodes[i] = layout;
// Count total descriptors required per type
@@ -2170,18 +2441,18 @@ void CoreChecks::UpdateAllocateDescriptorSetsData(const VkDescriptorSetAllocateI
}
}
// Verify that the state at allocate time is correct, but don't actually allocate the sets yet
-bool CoreChecks::ValidateAllocateDescriptorSets(const VkDescriptorSetAllocateInfo *p_alloc_info,
+bool CoreChecks::ValidateAllocateDescriptorSets(const layer_data *dev_data, const VkDescriptorSetAllocateInfo *p_alloc_info,
const cvdescriptorset::AllocateDescriptorSetsData *ds_data) {
bool skip = false;
auto pool_state = GetDescriptorPoolState(p_alloc_info->descriptorPool);
for (uint32_t i = 0; i < p_alloc_info->descriptorSetCount; i++) {
- auto layout = GetDescriptorSetLayout(this, p_alloc_info->pSetLayouts[i]);
+ auto layout = GetDescriptorSetLayout(dev_data, p_alloc_info->pSetLayouts[i]);
if (layout) { // nullptr layout indicates no valid layout handle for this device, validated/logged in object_tracker
if (layout->IsPushDescriptor()) {
skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_SET_LAYOUT_EXT,
HandleToUint64(p_alloc_info->pSetLayouts[i]), "VUID-VkDescriptorSetAllocateInfo-pSetLayouts-00308",
- "%s specified at pSetLayouts[%" PRIu32
+ "Layout %s specified at pSetLayouts[%" PRIu32
"] in vkAllocateDescriptorSets() was created with invalid flag %s set.",
report_data->FormatHandle(p_alloc_info->pSetLayouts[i]).c_str(), i,
"VK_DESCRIPTOR_SET_LAYOUT_CREATE_PUSH_DESCRIPTOR_BIT_KHR");
@@ -2194,12 +2465,12 @@ bool CoreChecks::ValidateAllocateDescriptorSets(const VkDescriptorSetAllocateInf
}
}
}
- if (!device_extensions.vk_khr_maintenance1) {
+ if (!GetDeviceExtensions()->vk_khr_maintenance1) {
// Track number of descriptorSets allowable in this pool
if (pool_state->availableSets < p_alloc_info->descriptorSetCount) {
skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_POOL_EXT,
HandleToUint64(pool_state->pool), "VUID-VkDescriptorSetAllocateInfo-descriptorSetCount-00306",
- "Unable to allocate %u descriptorSets from %s"
+ "Unable to allocate %u descriptorSets from pool %s"
". This pool only has %d descriptorSets remaining.",
p_alloc_info->descriptorSetCount, report_data->FormatHandle(pool_state->pool).c_str(),
pool_state->availableSets);
@@ -2210,7 +2481,7 @@ bool CoreChecks::ValidateAllocateDescriptorSets(const VkDescriptorSetAllocateInf
skip |= log_msg(
report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_POOL_EXT,
HandleToUint64(pool_state->pool), "VUID-VkDescriptorSetAllocateInfo-descriptorPool-00307",
- "Unable to allocate %u descriptors of type %s from %s"
+ "Unable to allocate %u descriptors of type %s from pool %s"
". This pool only has %d descriptors of this type remaining.",
ds_data->required_descriptors_by_type.at(it->first), string_VkDescriptorType(VkDescriptorType(it->first)),
report_data->FormatHandle(pool_state->pool).c_str(), pool_state->availableDescriptorTypeCount[it->first]);
@@ -2231,7 +2502,7 @@ bool CoreChecks::ValidateAllocateDescriptorSets(const VkDescriptorSetAllocateInf
}
if (count_allocate_info->descriptorSetCount == p_alloc_info->descriptorSetCount) {
for (uint32_t i = 0; i < p_alloc_info->descriptorSetCount; i++) {
- auto layout = GetDescriptorSetLayout(this, p_alloc_info->pSetLayouts[i]);
+ auto layout = GetDescriptorSetLayout(dev_data, p_alloc_info->pSetLayouts[i]);
if (count_allocate_info->pDescriptorCounts[i] > layout->GetDescriptorCountFromBinding(layout->GetMaxBinding())) {
skip |= log_msg(
report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_SET_LAYOUT_EXT, 0,
@@ -2246,10 +2517,13 @@ bool CoreChecks::ValidateAllocateDescriptorSets(const VkDescriptorSetAllocateInf
return skip;
}
// Decrement allocated sets from the pool and insert new sets into set_map
-void ValidationStateTracker::PerformAllocateDescriptorSets(const VkDescriptorSetAllocateInfo *p_alloc_info,
- const VkDescriptorSet *descriptor_sets,
- const cvdescriptorset::AllocateDescriptorSetsData *ds_data) {
- auto pool_state = descriptorPoolMap[p_alloc_info->descriptorPool].get();
+void CoreChecks::PerformAllocateDescriptorSets(const VkDescriptorSetAllocateInfo *p_alloc_info,
+ const VkDescriptorSet *descriptor_sets,
+ const cvdescriptorset::AllocateDescriptorSetsData *ds_data,
+ std::unordered_map<VkDescriptorPool, DESCRIPTOR_POOL_STATE *> *pool_map,
+ std::unordered_map<VkDescriptorSet, cvdescriptorset::DescriptorSet *> *set_map,
+ layer_data *dev_data) {
+ auto pool_state = (*pool_map)[p_alloc_info->descriptorPool];
// Account for sets and individual descriptors allocated from pool
pool_state->availableSets -= p_alloc_info->descriptorSetCount;
for (auto it = ds_data->required_descriptors_by_type.begin(); it != ds_data->required_descriptors_by_type.end(); ++it) {
@@ -2263,346 +2537,28 @@ void ValidationStateTracker::PerformAllocateDescriptorSets(const VkDescriptorSet
for (uint32_t i = 0; i < p_alloc_info->descriptorSetCount; i++) {
uint32_t variable_count = variable_count_valid ? variable_count_info->pDescriptorCounts[i] : 0;
- std::unique_ptr<cvdescriptorset::DescriptorSet> new_ds(new cvdescriptorset::DescriptorSet(
- descriptor_sets[i], p_alloc_info->descriptorPool, ds_data->layout_nodes[i], variable_count, this));
- pool_state->sets.insert(new_ds.get());
+ auto new_ds = new cvdescriptorset::DescriptorSet(descriptor_sets[i], p_alloc_info->descriptorPool, ds_data->layout_nodes[i],
+ variable_count, this);
+
+ pool_state->sets.insert(new_ds);
new_ds->in_use.store(0);
- setMap[descriptor_sets[i]] = std::move(new_ds);
+ (*set_map)[descriptor_sets[i]] = new_ds;
}
}
-const BindingReqMap &cvdescriptorset::PrefilterBindRequestMap::FilteredMap(const CMD_BUFFER_STATE &cb_state,
- const PIPELINE_STATE &pipeline) {
- if (IsManyDescriptors()) {
+cvdescriptorset::PrefilterBindRequestMap::PrefilterBindRequestMap(cvdescriptorset::DescriptorSet &ds, const BindingReqMap &in_map,
+ GLOBAL_CB_NODE *cb_state)
+ : filtered_map_(), orig_map_(in_map) {
+ if (ds.GetTotalDescriptorCount() > kManyDescriptors_) {
filtered_map_.reset(new std::map<uint32_t, descriptor_req>());
- descriptor_set_.FilterBindingReqs(cb_state, pipeline, orig_map_, filtered_map_.get());
- return *filtered_map_;
- }
- return orig_map_;
-}
-
-// Starting at offset descriptor of given binding, parse over update_count
-// descriptor updates and verify that for any binding boundaries that are crossed, the next binding(s) are all consistent
-// Consistency means that their type, stage flags, and whether or not they use immutable samplers matches
-// If so, return true. If not, fill in error_msg and return false
-bool cvdescriptorset::VerifyUpdateConsistency(DescriptorSetLayout::ConstBindingIterator current_binding, uint32_t offset,
- uint32_t update_count, const char *type, const VkDescriptorSet set,
- std::string *error_msg) {
- // Verify consecutive bindings match (if needed)
- auto orig_binding = current_binding;
- // Track count of descriptors in the current_bindings that are remaining to be updated
- auto binding_remaining = current_binding.GetDescriptorCount();
- // First, it's legal to offset beyond your own binding so handle that case
- // Really this is just searching for the binding in which the update begins and adjusting offset accordingly
- while (offset >= binding_remaining && !current_binding.AtEnd()) {
- // Advance to next binding, decrement offset by binding size
- offset -= binding_remaining;
- ++current_binding;
- binding_remaining = current_binding.GetDescriptorCount(); // Accessors are safe if AtEnd
- }
- assert(!current_binding.AtEnd()); // As written assumes range check has been made before calling
- binding_remaining -= offset;
- while (update_count > binding_remaining) { // While our updates overstep current binding
- // Verify next consecutive binding matches type, stage flags & immutable sampler use
- auto next_binding = current_binding.Next();
- if (!current_binding.IsConsistent(next_binding)) {
- std::stringstream error_str;
- error_str << "Attempting " << type;
- if (current_binding.Layout()->IsPushDescriptor()) {
- error_str << " push descriptors";
- } else {
- error_str << " descriptor set " << set;
- }
- error_str << " binding #" << orig_binding.Binding() << " with #" << update_count
- << " descriptors being updated but this update oversteps the bounds of this binding and the next binding is "
- "not consistent with current binding so this update is invalid.";
- *error_msg = error_str.str();
- return false;
- }
- current_binding = next_binding;
- // For sake of this check consider the bindings updated and grab count for next binding
- update_count -= binding_remaining;
- binding_remaining = current_binding.GetDescriptorCount();
+ ds.FilterAndTrackBindingReqs(cb_state, orig_map_, filtered_map_.get());
}
- return true;
}
-
-// Validate the state for a given write update but don't actually perform the update
-// If an error would occur for this update, return false and fill in details in error_msg string
-bool CoreChecks::ValidateWriteUpdate(const DescriptorSet *dest_set, const VkWriteDescriptorSet *update, const char *func_name,
- std::string *error_code, std::string *error_msg) {
- const auto dest_layout = dest_set->GetLayout();
-
- // Verify dst layout still valid
- if (dest_layout->IsDestroyed()) {
- *error_code = "VUID-VkWriteDescriptorSet-dstSet-00320";
- string_sprintf(error_msg, "Cannot call %s to perform write update on %s which has been destroyed", func_name,
- dest_set->StringifySetAndLayout().c_str());
- return false;
- }
- // Verify dst binding exists
- if (!dest_layout->HasBinding(update->dstBinding)) {
- *error_code = "VUID-VkWriteDescriptorSet-dstBinding-00315";
- std::stringstream error_str;
- error_str << dest_set->StringifySetAndLayout() << " does not have binding " << update->dstBinding;
- *error_msg = error_str.str();
- return false;
- }
-
- DescriptorSetLayout::ConstBindingIterator dest(dest_layout.get(), update->dstBinding);
- // Make sure binding isn't empty
- if (0 == dest.GetDescriptorCount()) {
- *error_code = "VUID-VkWriteDescriptorSet-dstBinding-00316";
- std::stringstream error_str;
- error_str << dest_set->StringifySetAndLayout() << " cannot updated binding " << update->dstBinding
- << " that has 0 descriptors";
- *error_msg = error_str.str();
- return false;
- }
-
- // Verify idle ds
- if (dest_set->in_use.load() && !(dest.GetDescriptorBindingFlags() & (VK_DESCRIPTOR_BINDING_UPDATE_UNUSED_WHILE_PENDING_BIT_EXT |
- VK_DESCRIPTOR_BINDING_UPDATE_AFTER_BIND_BIT_EXT))) {
- // TODO : Re-using Free Idle error code, need write update idle error code
- *error_code = "VUID-vkFreeDescriptorSets-pDescriptorSets-00309";
- std::stringstream error_str;
- error_str << "Cannot call " << func_name << " to perform write update on " << dest_set->StringifySetAndLayout()
- << " that is in use by a command buffer";
- *error_msg = error_str.str();
- return false;
- }
- // We know that binding is valid, verify update and do update on each descriptor
- auto start_idx = dest.GetGlobalIndexRange().start + update->dstArrayElement;
- auto type = dest.GetType();
- if (type != update->descriptorType) {
- *error_code = "VUID-VkWriteDescriptorSet-descriptorType-00319";
- std::stringstream error_str;
- error_str << "Attempting write update to " << dest_set->StringifySetAndLayout() << " binding #" << update->dstBinding
- << " with type " << string_VkDescriptorType(type) << " but update type is "
- << string_VkDescriptorType(update->descriptorType);
- *error_msg = error_str.str();
- return false;
- }
- auto total_descriptors = dest_layout->GetTotalDescriptorCount();
- if (update->descriptorCount > (total_descriptors - start_idx)) {
- *error_code = "VUID-VkWriteDescriptorSet-dstArrayElement-00321";
- std::stringstream error_str;
- error_str << "Attempting write update to " << dest_set->StringifySetAndLayout() << " binding #" << update->dstBinding
- << " with " << total_descriptors - start_idx
- << " descriptors in that binding and all successive bindings of the set, but update of "
- << update->descriptorCount << " descriptors combined with update array element offset of "
- << update->dstArrayElement << " oversteps the available number of consecutive descriptors";
- *error_msg = error_str.str();
- return false;
- }
- if (type == VK_DESCRIPTOR_TYPE_INLINE_UNIFORM_BLOCK_EXT) {
- if ((update->dstArrayElement % 4) != 0) {
- *error_code = "VUID-VkWriteDescriptorSet-descriptorType-02219";
- std::stringstream error_str;
- error_str << "Attempting write update to " << dest_set->StringifySetAndLayout() << " binding #" << update->dstBinding
- << " with "
- << "dstArrayElement " << update->dstArrayElement << " not a multiple of 4";
- *error_msg = error_str.str();
- return false;
- }
- if ((update->descriptorCount % 4) != 0) {
- *error_code = "VUID-VkWriteDescriptorSet-descriptorType-02220";
- std::stringstream error_str;
- error_str << "Attempting write update to " << dest_set->StringifySetAndLayout() << " binding #" << update->dstBinding
- << " with "
- << "descriptorCount " << update->descriptorCount << " not a multiple of 4";
- *error_msg = error_str.str();
- return false;
- }
- const auto *write_inline_info = lvl_find_in_chain<VkWriteDescriptorSetInlineUniformBlockEXT>(update->pNext);
- if (!write_inline_info || write_inline_info->dataSize != update->descriptorCount) {
- *error_code = "VUID-VkWriteDescriptorSet-descriptorType-02221";
- std::stringstream error_str;
- if (!write_inline_info) {
- error_str << "Attempting write update to " << dest_set->StringifySetAndLayout() << " binding #"
- << update->dstBinding << " with "
- << "VkWriteDescriptorSetInlineUniformBlockEXT missing";
- } else {
- error_str << "Attempting write update to " << dest_set->StringifySetAndLayout() << " binding #"
- << update->dstBinding << " with "
- << "VkWriteDescriptorSetInlineUniformBlockEXT dataSize " << write_inline_info->dataSize
- << " not equal to "
- << "VkWriteDescriptorSet descriptorCount " << update->descriptorCount;
- }
- *error_msg = error_str.str();
- return false;
- }
- // This error is probably unreachable due to the previous two errors
- if (write_inline_info && (write_inline_info->dataSize % 4) != 0) {
- *error_code = "VUID-VkWriteDescriptorSetInlineUniformBlockEXT-dataSize-02222";
- std::stringstream error_str;
- error_str << "Attempting write update to " << dest_set->StringifySetAndLayout() << " binding #" << update->dstBinding
- << " with "
- << "VkWriteDescriptorSetInlineUniformBlockEXT dataSize " << write_inline_info->dataSize
- << " not a multiple of 4";
- *error_msg = error_str.str();
- return false;
- }
- }
- // Verify consecutive bindings match (if needed)
- if (!VerifyUpdateConsistency(DescriptorSetLayout::ConstBindingIterator(dest_layout.get(), update->dstBinding),
- update->dstArrayElement, update->descriptorCount, "write update to", dest_set->GetSet(),
- error_msg)) {
- // TODO : Should break out "consecutive binding updates" language into valid usage statements
- *error_code = "VUID-VkWriteDescriptorSet-dstArrayElement-00321";
- return false;
- }
- // Update is within bounds and consistent so last step is to validate update contents
- if (!VerifyWriteUpdateContents(dest_set, update, start_idx, func_name, error_code, error_msg)) {
- std::stringstream error_str;
- error_str << "Write update to " << dest_set->StringifySetAndLayout() << " binding #" << update->dstBinding
- << " failed with error message: " << error_msg->c_str();
- *error_msg = error_str.str();
- return false;
- }
- // All checks passed, update is clean
- return true;
-}
-
-// Verify that the contents of the update are ok, but don't perform actual update
-bool CoreChecks::VerifyWriteUpdateContents(const DescriptorSet *dest_set, const VkWriteDescriptorSet *update, const uint32_t index,
- const char *func_name, std::string *error_code, std::string *error_msg) {
- using ImageSamplerDescriptor = cvdescriptorset::ImageSamplerDescriptor;
- using SamplerDescriptor = cvdescriptorset::SamplerDescriptor;
-
- switch (update->descriptorType) {
- case VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER: {
- for (uint32_t di = 0; di < update->descriptorCount; ++di) {
- // Validate image
- auto image_view = update->pImageInfo[di].imageView;
- auto image_layout = update->pImageInfo[di].imageLayout;
- if (!ValidateImageUpdate(image_view, image_layout, update->descriptorType, func_name, error_code, error_msg)) {
- std::stringstream error_str;
- error_str << "Attempted write update to combined image sampler descriptor failed due to: "
- << error_msg->c_str();
- *error_msg = error_str.str();
- return false;
- }
- if (device_extensions.vk_khr_sampler_ycbcr_conversion) {
- ImageSamplerDescriptor *desc = (ImageSamplerDescriptor *)dest_set->GetDescriptorFromGlobalIndex(index + di);
- if (desc->IsImmutableSampler()) {
- auto sampler_state = GetSamplerState(desc->GetSampler());
- auto iv_state = GetImageViewState(image_view);
- if (iv_state && sampler_state) {
- if (iv_state->samplerConversion != sampler_state->samplerConversion) {
- *error_code = "VUID-VkWriteDescriptorSet-descriptorType-01948";
- std::stringstream error_str;
- error_str << "Attempted write update to combined image sampler and image view and sampler ycbcr "
- "conversions are not identical, sampler: "
- << desc->GetSampler() << " image view: " << iv_state->image_view << ".";
- *error_msg = error_str.str();
- return false;
- }
- }
- } else {
- auto iv_state = GetImageViewState(image_view);
- if (iv_state && (iv_state->samplerConversion != VK_NULL_HANDLE)) {
- *error_code = "VUID-VkWriteDescriptorSet-descriptorType-02738";
- std::stringstream error_str;
- error_str << "Because dstSet (" << update->dstSet << ") is bound to image view ("
- << iv_state->image_view
- << ") that includes a YCBCR conversion, it must have been allocated with a layout that "
- "includes an immutable sampler.";
- *error_msg = error_str.str();
- return false;
- }
- }
- }
- }
- }
- // fall through
- case VK_DESCRIPTOR_TYPE_SAMPLER: {
- for (uint32_t di = 0; di < update->descriptorCount; ++di) {
- SamplerDescriptor *desc = (SamplerDescriptor *)dest_set->GetDescriptorFromGlobalIndex(index + di);
- if (!desc->IsImmutableSampler()) {
- if (!ValidateSampler(update->pImageInfo[di].sampler)) {
- *error_code = "VUID-VkWriteDescriptorSet-descriptorType-00325";
- std::stringstream error_str;
- error_str << "Attempted write update to sampler descriptor with invalid sampler: "
- << update->pImageInfo[di].sampler << ".";
- *error_msg = error_str.str();
- return false;
- }
- } else {
- // TODO : Warn here
- }
- }
- break;
- }
- case VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE:
- case VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT:
- case VK_DESCRIPTOR_TYPE_STORAGE_IMAGE: {
- for (uint32_t di = 0; di < update->descriptorCount; ++di) {
- auto image_view = update->pImageInfo[di].imageView;
- auto image_layout = update->pImageInfo[di].imageLayout;
- if (!ValidateImageUpdate(image_view, image_layout, update->descriptorType, func_name, error_code, error_msg)) {
- std::stringstream error_str;
- error_str << "Attempted write update to image descriptor failed due to: " << error_msg->c_str();
- *error_msg = error_str.str();
- return false;
- }
- }
- break;
- }
- case VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER:
- case VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER: {
- for (uint32_t di = 0; di < update->descriptorCount; ++di) {
- auto buffer_view = update->pTexelBufferView[di];
- auto bv_state = GetBufferViewState(buffer_view);
- if (!bv_state) {
- *error_code = "VUID-VkWriteDescriptorSet-descriptorType-00323";
- std::stringstream error_str;
- error_str << "Attempted write update to texel buffer descriptor with invalid buffer view: " << buffer_view;
- *error_msg = error_str.str();
- return false;
- }
- auto buffer = bv_state->create_info.buffer;
- auto buffer_state = GetBufferState(buffer);
- // Verify that buffer underlying the view hasn't been destroyed prematurely
- if (!buffer_state) {
- *error_code = "VUID-VkWriteDescriptorSet-descriptorType-00323";
- std::stringstream error_str;
- error_str << "Attempted write update to texel buffer descriptor failed because underlying buffer (" << buffer
- << ") has been destroyed: " << error_msg->c_str();
- *error_msg = error_str.str();
- return false;
- } else if (!cvdescriptorset::ValidateBufferUsage(buffer_state, update->descriptorType, error_code, error_msg)) {
- std::stringstream error_str;
- error_str << "Attempted write update to texel buffer descriptor failed due to: " << error_msg->c_str();
- *error_msg = error_str.str();
- return false;
- }
- }
- break;
- }
- case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER:
- case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC:
- case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER:
- case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC: {
- for (uint32_t di = 0; di < update->descriptorCount; ++di) {
- if (!ValidateBufferUpdate(update->pBufferInfo + di, update->descriptorType, func_name, error_code, error_msg)) {
- std::stringstream error_str;
- error_str << "Attempted write update to buffer descriptor failed due to: " << error_msg->c_str();
- *error_msg = error_str.str();
- return false;
- }
- }
- break;
- }
- case VK_DESCRIPTOR_TYPE_INLINE_UNIFORM_BLOCK_EXT:
- break;
- case VK_DESCRIPTOR_TYPE_ACCELERATION_STRUCTURE_NV:
- // XXX TODO
- break;
- default:
- assert(0); // We've already verified update type so should never get here
- break;
+cvdescriptorset::PrefilterBindRequestMap::PrefilterBindRequestMap(cvdescriptorset::DescriptorSet &ds, const BindingReqMap &in_map,
+ GLOBAL_CB_NODE *cb_state, PIPELINE_STATE *pipeline)
+ : filtered_map_(), orig_map_(in_map) {
+ if (ds.GetTotalDescriptorCount() > kManyDescriptors_) {
+ filtered_map_.reset(new std::map<uint32_t, descriptor_req>());
+ ds.FilterAndTrackBindingReqs(cb_state, pipeline, orig_map_, filtered_map_.get());
}
- // All checks passed so update contents are good
- return true;
}
diff --git a/layers/descriptor_sets.h b/layers/descriptor_sets.h
index 598f4395e..012a17817 100644
--- a/layers/descriptor_sets.h
+++ b/layers/descriptor_sets.h
@@ -35,7 +35,7 @@
#include <vector>
class CoreChecks;
-class ValidationStateTracker;
+typedef CoreChecks layer_data;
// Descriptor Data structures
namespace cvdescriptorset {
@@ -48,6 +48,7 @@ struct IndexRange {
uint32_t start;
uint32_t end;
};
+typedef std::map<uint32_t, descriptor_req> BindingReqMap;
/*
* DescriptorSetLayoutDef/DescriptorSetLayout classes
@@ -100,6 +101,9 @@ class DescriptorSetLayoutDef {
const std::set<uint32_t> &GetSortedBindingSet() const { return non_empty_bindings_; }
// Return true if given binding is present in this layout
bool HasBinding(const uint32_t binding) const { return binding_to_index_map_.count(binding) > 0; };
+ // Return true if this DSL Def (referenced by the 1st layout) is compatible with another DSL Def (ref'd from the 2nd layout)
+ // else return false and update error_msg with description of incompatibility
+ bool IsCompatible(VkDescriptorSetLayout, VkDescriptorSetLayout, DescriptorSetLayoutDef const *const, std::string *) const;
// Return true if binding 1 beyond given exists and has same type, stageFlags & immutable sampler use
bool IsNextBindingConsistent(const uint32_t) const;
uint32_t GetIndexFromBinding(uint32_t binding) const;
@@ -126,6 +130,10 @@ class DescriptorSetLayoutDef {
VkDescriptorBindingFlagsEXT GetDescriptorBindingFlagsFromBinding(const uint32_t binding) const {
return GetDescriptorBindingFlagsFromIndex(GetIndexFromBinding(binding));
}
+ uint32_t GetIndexFromGlobalIndex(const uint32_t global_index) const;
+ VkDescriptorType GetTypeFromGlobalIndex(const uint32_t global_index) const {
+ return GetTypeFromIndex(GetIndexFromGlobalIndex(global_index));
+ }
VkSampler const *GetImmutableSamplerPtrFromBinding(const uint32_t) const;
VkSampler const *GetImmutableSamplerPtrFromIndex(const uint32_t) const;
// For a given binding and array index, return the corresponding index into the dynamic offset array
@@ -140,10 +148,12 @@ class DescriptorSetLayoutDef {
// For a particular binding, get the global index range
// This call should be guarded by a call to "HasBinding(binding)" to verify that the given binding exists
const IndexRange &GetGlobalIndexRangeFromBinding(const uint32_t) const;
- const cvdescriptorset::IndexRange &GetGlobalIndexRangeFromIndex(uint32_t index) const;
// Helper function to get the next valid binding for a descriptor
uint32_t GetNextValidBinding(const uint32_t) const;
+ // For a particular binding starting at offset and having update_count descriptors
+ // updated, verify that for any binding boundaries crossed, the update is consistent
+ bool VerifyUpdateConsistency(uint32_t, uint32_t, uint32_t, const char *, const VkDescriptorSet, std::string *) const;
bool IsPushDescriptor() const { return GetCreateFlags() & VK_DESCRIPTOR_SET_LAYOUT_CREATE_PUSH_DESCRIPTOR_BIT_KHR; };
struct BindingTypeStats {
@@ -164,7 +174,8 @@ class DescriptorSetLayoutDef {
std::set<uint32_t> non_empty_bindings_; // Containing non-emtpy bindings in numerical order
std::unordered_map<uint32_t, uint32_t> binding_to_index_map_;
// The following map allows an non-iterative lookup of a binding from a global index...
- std::vector<IndexRange> global_index_range_; // range is exclusive of .end
+ std::map<uint32_t, uint32_t> global_start_to_index_map_; // The index corresponding for a starting global (descriptor) index
+ std::unordered_map<uint32_t, IndexRange> binding_to_global_index_range_map_; // range is exclusive of .end
// For a given binding map to associated index in the dynamic offset array
std::unordered_map<uint32_t, uint32_t> binding_to_dynamic_array_idx_map_;
@@ -188,11 +199,15 @@ class DescriptorSetLayout {
public:
// Constructors and destructor
DescriptorSetLayout(const VkDescriptorSetLayoutCreateInfo *p_create_info, const VkDescriptorSetLayout layout);
+ // Validate create info - should be called prior to creation
+ static bool ValidateCreateInfo(const debug_report_data *, const VkDescriptorSetLayoutCreateInfo *, const bool, const uint32_t,
+ const bool, const VkPhysicalDeviceDescriptorIndexingFeaturesEXT *descriptor_indexing_features,
+ const VkPhysicalDeviceInlineUniformBlockFeaturesEXT *inline_uniform_block_features,
+ const VkPhysicalDeviceInlineUniformBlockPropertiesEXT *inline_uniform_block_props);
bool HasBinding(const uint32_t binding) const { return layout_id_->HasBinding(binding); }
// Return true if this layout is compatible with passed in layout from a pipelineLayout,
// else return false and update error_msg with description of incompatibility
- // Return true if this layout is compatible with passed in layout
- bool IsCompatible(DescriptorSetLayout const *rh_ds_layout) const;
+ bool IsCompatible(DescriptorSetLayout const *const, std::string *) const;
// Straightforward Get functions
VkDescriptorSetLayout GetDescriptorSetLayout() const { return layout_; };
bool IsDestroyed() const { return layout_destroyed_; }
@@ -215,7 +230,6 @@ class DescriptorSetLayout {
return layout_id_->GetDescriptorSetLayoutBindingPtrFromBinding(binding);
}
const std::vector<safe_VkDescriptorSetLayoutBinding> &GetBindings() const { return layout_id_->GetBindings(); }
- const std::set<uint32_t> &GetSortedBindingSet() const { return layout_id_->GetSortedBindingSet(); }
uint32_t GetDescriptorCountFromIndex(const uint32_t index) const { return layout_id_->GetDescriptorCountFromIndex(index); }
uint32_t GetDescriptorCountFromBinding(const uint32_t binding) const {
return layout_id_->GetDescriptorCountFromBinding(binding);
@@ -232,6 +246,12 @@ class DescriptorSetLayout {
VkDescriptorBindingFlagsEXT GetDescriptorBindingFlagsFromBinding(const uint32_t binding) const {
return layout_id_->GetDescriptorBindingFlagsFromBinding(binding);
}
+ uint32_t GetIndexFromGlobalIndex(const uint32_t global_index) const {
+ return layout_id_->GetIndexFromGlobalIndex(global_index);
+ }
+ VkDescriptorType GetTypeFromGlobalIndex(const uint32_t global_index) const {
+ return GetTypeFromIndex(GetIndexFromGlobalIndex(global_index));
+ }
VkSampler const *GetImmutableSamplerPtrFromBinding(const uint32_t binding) const {
return layout_id_->GetImmutableSamplerPtrFromBinding(binding);
}
@@ -247,95 +267,19 @@ class DescriptorSetLayout {
const IndexRange &GetGlobalIndexRangeFromBinding(const uint32_t binding) const {
return layout_id_->GetGlobalIndexRangeFromBinding(binding);
}
- const IndexRange &GetGlobalIndexRangeFromIndex(uint32_t index) const { return layout_id_->GetGlobalIndexRangeFromIndex(index); }
-
// Helper function to get the next valid binding for a descriptor
uint32_t GetNextValidBinding(const uint32_t binding) const { return layout_id_->GetNextValidBinding(binding); }
- bool IsPushDescriptor() const { return layout_id_->IsPushDescriptor(); }
- bool IsVariableDescriptorCountFromIndex(uint32_t index) const {
- return !!(GetDescriptorBindingFlagsFromIndex(index) & VK_DESCRIPTOR_BINDING_VARIABLE_DESCRIPTOR_COUNT_BIT_EXT);
- }
- bool IsVariableDescriptorCount(uint32_t binding) const {
- return IsVariableDescriptorCountFromIndex(GetIndexFromBinding(binding));
+ // For a particular binding starting at offset and having update_count descriptors
+ // updated, verify that for any binding boundaries crossed, the update is consistent
+ bool VerifyUpdateConsistency(uint32_t current_binding, uint32_t offset, uint32_t update_count, const char *type,
+ const VkDescriptorSet set, std::string *error_msg) const {
+ return layout_id_->VerifyUpdateConsistency(current_binding, offset, update_count, type, set, error_msg);
}
+ bool IsPushDescriptor() const { return layout_id_->IsPushDescriptor(); }
using BindingTypeStats = DescriptorSetLayoutDef::BindingTypeStats;
const BindingTypeStats &GetBindingTypeStats() const { return layout_id_->GetBindingTypeStats(); }
- // Binding Iterator
- class ConstBindingIterator {
- public:
- ConstBindingIterator() = delete;
- ConstBindingIterator(const ConstBindingIterator &other) = default;
- ConstBindingIterator &operator=(const ConstBindingIterator &rhs) = default;
-
- ConstBindingIterator(const DescriptorSetLayout *layout) : layout_(layout), index_(0) { assert(layout); }
- ConstBindingIterator(const DescriptorSetLayout *layout, uint32_t binding) : ConstBindingIterator(layout) {
- index_ = layout->GetIndexFromBinding(binding);
- }
-
- VkDescriptorSetLayoutBinding const *GetDescriptorSetLayoutBindingPtr() const {
- return layout_->GetDescriptorSetLayoutBindingPtrFromIndex(index_);
- }
- uint32_t GetDescriptorCount() const { return layout_->GetDescriptorCountFromIndex(index_); }
- VkDescriptorType GetType() const { return layout_->GetTypeFromIndex(index_); }
- VkShaderStageFlags GetStageFlags() const { return layout_->GetStageFlagsFromIndex(index_); }
-
- VkDescriptorBindingFlagsEXT GetDescriptorBindingFlags() const {
- return layout_->GetDescriptorBindingFlagsFromIndex(index_);
- }
-
- bool IsVariableDescriptorCount() const { return layout_->IsVariableDescriptorCountFromIndex(index_); }
-
- VkSampler const *GetImmutableSamplerPtr() const { return layout_->GetImmutableSamplerPtrFromIndex(index_); }
- const IndexRange &GetGlobalIndexRange() const { return layout_->GetGlobalIndexRangeFromIndex(index_); }
- bool AtEnd() const { return index_ == layout_->GetBindingCount(); }
-
- // Return index into dynamic offset array for given binding
- int32_t GetDynamicOffsetIndex() const {
- return layout_->GetDynamicOffsetIndexFromBinding(Binding()); // There is only binding mapped access in layout_
- }
-
- bool operator==(const ConstBindingIterator &rhs) { return (index_ = rhs.index_) && (layout_ == rhs.layout_); }
-
- ConstBindingIterator &operator++() {
- if (!AtEnd()) {
- index_++;
- }
- return *this;
- }
-
- bool IsConsistent(const ConstBindingIterator &other) const {
- if (AtEnd() || other.AtEnd()) {
- return false;
- }
- const auto *binding_ci = GetDescriptorSetLayoutBindingPtr();
- const auto *other_binding_ci = other.GetDescriptorSetLayoutBindingPtr();
- assert((binding_ci != nullptr) && (other_binding_ci != nullptr));
-
- if ((binding_ci->descriptorType != other_binding_ci->descriptorType) ||
- (binding_ci->stageFlags != other_binding_ci->stageFlags) ||
- (!hash_util::similar_for_nullity(binding_ci->pImmutableSamplers, other_binding_ci->pImmutableSamplers)) ||
- (GetDescriptorBindingFlags() != other.GetDescriptorBindingFlags())) {
- return false;
- }
- return true;
- }
-
- const DescriptorSetLayout *Layout() const { return layout_; }
- uint32_t Binding() const { return layout_->GetBindings()[index_].binding; }
- ConstBindingIterator Next() {
- ConstBindingIterator next(*this);
- ++next;
- return next;
- }
-
- private:
- const DescriptorSetLayout *layout_;
- uint32_t index_;
- };
- ConstBindingIterator end() const { return ConstBindingIterator(this, GetBindingCount()); }
-
private:
VkDescriptorSetLayout layout_;
bool layout_destroyed_;
@@ -358,7 +302,7 @@ class Descriptor {
virtual void WriteUpdate(const VkWriteDescriptorSet *, const uint32_t) = 0;
virtual void CopyUpdate(const Descriptor *) = 0;
// Create binding between resources of this descriptor and given cb_node
- virtual void UpdateDrawState(ValidationStateTracker *, CMD_BUFFER_STATE *) = 0;
+ virtual void UpdateDrawState(layer_data *, GLOBAL_CB_NODE *) = 0;
virtual DescriptorClass GetClass() const { return descriptor_class; };
// Special fast-path check for SamplerDescriptors that are immutable
virtual bool IsImmutableSampler() const { return false; };
@@ -369,29 +313,23 @@ class Descriptor {
bool updated; // Has descriptor been updated?
DescriptorClass descriptor_class;
};
-
-// Return true if this layout is compatible with passed in layout from a pipelineLayout,
-// else return false and update error_msg with description of incompatibility
-bool VerifySetLayoutCompatibility(DescriptorSetLayout const *lh_ds_layout, DescriptorSetLayout const *rh_ds_layout,
- std::string *error_msg);
-bool ValidateDescriptorSetLayoutCreateInfo(const debug_report_data *report_data, const VkDescriptorSetLayoutCreateInfo *create_info,
- const bool push_descriptor_ext, const uint32_t max_push_descriptors,
- const bool descriptor_indexing_ext,
- const VkPhysicalDeviceDescriptorIndexingFeaturesEXT *descriptor_indexing_features,
- const VkPhysicalDeviceInlineUniformBlockFeaturesEXT *inline_uniform_block_features,
- const VkPhysicalDeviceInlineUniformBlockPropertiesEXT *inline_uniform_block_props,
- const DeviceExtensions *device_extensions);
+// Shared helper functions - These are useful because the shared sampler image descriptor type
+// performs common functions with both sampler and image descriptors so they can share their common functions
+bool ValidateSampler(const VkSampler, layer_data *);
+bool ValidateImageUpdate(VkImageView, VkImageLayout, VkDescriptorType, layer_data *, const char *func_name, std::string *,
+ std::string *);
class SamplerDescriptor : public Descriptor {
public:
SamplerDescriptor(const VkSampler *);
void WriteUpdate(const VkWriteDescriptorSet *, const uint32_t) override;
void CopyUpdate(const Descriptor *) override;
- void UpdateDrawState(ValidationStateTracker *, CMD_BUFFER_STATE *) override;
+ void UpdateDrawState(layer_data *, GLOBAL_CB_NODE *) override;
virtual bool IsImmutableSampler() const override { return immutable_; };
VkSampler GetSampler() const { return sampler_; }
private:
+ // bool ValidateSampler(const VkSampler) const;
VkSampler sampler_;
bool immutable_;
};
@@ -401,7 +339,7 @@ class ImageSamplerDescriptor : public Descriptor {
ImageSamplerDescriptor(const VkSampler *);
void WriteUpdate(const VkWriteDescriptorSet *, const uint32_t) override;
void CopyUpdate(const Descriptor *) override;
- void UpdateDrawState(ValidationStateTracker *, CMD_BUFFER_STATE *) override;
+ void UpdateDrawState(layer_data *, GLOBAL_CB_NODE *) override;
virtual bool IsImmutableSampler() const override { return immutable_; };
VkSampler GetSampler() const { return sampler_; }
VkImageView GetImageView() const { return image_view_; }
@@ -419,7 +357,7 @@ class ImageDescriptor : public Descriptor {
ImageDescriptor(const VkDescriptorType);
void WriteUpdate(const VkWriteDescriptorSet *, const uint32_t) override;
void CopyUpdate(const Descriptor *) override;
- void UpdateDrawState(ValidationStateTracker *, CMD_BUFFER_STATE *) override;
+ void UpdateDrawState(layer_data *, GLOBAL_CB_NODE *) override;
virtual bool IsStorage() const override { return storage_; }
VkImageView GetImageView() const { return image_view_; }
VkImageLayout GetImageLayout() const { return image_layout_; }
@@ -435,7 +373,7 @@ class TexelDescriptor : public Descriptor {
TexelDescriptor(const VkDescriptorType);
void WriteUpdate(const VkWriteDescriptorSet *, const uint32_t) override;
void CopyUpdate(const Descriptor *) override;
- void UpdateDrawState(ValidationStateTracker *, CMD_BUFFER_STATE *) override;
+ void UpdateDrawState(layer_data *, GLOBAL_CB_NODE *) override;
virtual bool IsStorage() const override { return storage_; }
VkBufferView GetBufferView() const { return buffer_view_; }
@@ -449,7 +387,7 @@ class BufferDescriptor : public Descriptor {
BufferDescriptor(const VkDescriptorType);
void WriteUpdate(const VkWriteDescriptorSet *, const uint32_t) override;
void CopyUpdate(const Descriptor *) override;
- void UpdateDrawState(ValidationStateTracker *, CMD_BUFFER_STATE *) override;
+ void UpdateDrawState(layer_data *, GLOBAL_CB_NODE *) override;
virtual bool IsDynamic() const override { return dynamic_; }
virtual bool IsStorage() const override { return storage_; }
VkBuffer GetBuffer() const { return buffer_; }
@@ -472,7 +410,7 @@ class InlineUniformDescriptor : public Descriptor {
}
void WriteUpdate(const VkWriteDescriptorSet *, const uint32_t) override { updated = true; }
void CopyUpdate(const Descriptor *) override { updated = true; }
- void UpdateDrawState(ValidationStateTracker *, CMD_BUFFER_STATE *) override {}
+ void UpdateDrawState(layer_data *, GLOBAL_CB_NODE *) override {}
};
class AccelerationStructureDescriptor : public Descriptor {
@@ -483,7 +421,7 @@ class AccelerationStructureDescriptor : public Descriptor {
}
void WriteUpdate(const VkWriteDescriptorSet *, const uint32_t) override { updated = true; }
void CopyUpdate(const Descriptor *) override { updated = true; }
- void UpdateDrawState(ValidationStateTracker *, CMD_BUFFER_STATE *) override {}
+ void UpdateDrawState(layer_data *, GLOBAL_CB_NODE *) override {}
};
// Structs to contain common elements that need to be shared between Validate* and Perform* calls below
@@ -494,29 +432,17 @@ struct AllocateDescriptorSetsData {
};
// Helper functions for descriptor set functions that cross multiple sets
// "Validate" will make sure an update is ok without actually performing it
-bool ValidateUpdateDescriptorSets(const debug_report_data *, const CoreChecks *, uint32_t, const VkWriteDescriptorSet *, uint32_t,
+bool ValidateUpdateDescriptorSets(const debug_report_data *, const layer_data *, uint32_t, const VkWriteDescriptorSet *, uint32_t,
const VkCopyDescriptorSet *, const char *func_name);
// "Perform" does the update with the assumption that ValidateUpdateDescriptorSets() has passed for the given update
-void PerformUpdateDescriptorSets(ValidationStateTracker *, uint32_t, const VkWriteDescriptorSet *, uint32_t,
- const VkCopyDescriptorSet *);
-
-// Core Validation specific validation checks using DescriptorSet and DescriptorSetLayoutAccessors
-// TODO: migrate out of descriptor_set.cpp/h
-// For a particular binding starting at offset and having update_count descriptors
-// updated, verify that for any binding boundaries crossed, the update is consistent
-bool VerifyUpdateConsistency(DescriptorSetLayout::ConstBindingIterator current_binding, uint32_t offset, uint32_t update_count,
- const char *type, const VkDescriptorSet set, std::string *error_msg);
-
-// Validate buffer descriptor update info
-bool ValidateBufferUsage(BUFFER_STATE const *buffer_node, VkDescriptorType type, std::string *error_code, std::string *error_msg);
+void PerformUpdateDescriptorSets(layer_data *, uint32_t, const VkWriteDescriptorSet *, uint32_t, const VkCopyDescriptorSet *);
// Helper class to encapsulate the descriptor update template decoding logic
struct DecodedTemplateUpdate {
std::vector<VkWriteDescriptorSet> desc_writes;
std::vector<VkWriteDescriptorSetInlineUniformBlockEXT> inline_infos;
- DecodedTemplateUpdate(const ValidationStateTracker *device_data, VkDescriptorSet descriptorSet,
- const TEMPLATE_STATE *template_state, const void *pData,
- VkDescriptorSetLayout push_layout = VK_NULL_HANDLE);
+ DecodedTemplateUpdate(layer_data *device_data, VkDescriptorSet descriptorSet, const TEMPLATE_STATE *template_state,
+ const void *pData, VkDescriptorSetLayout push_layout = VK_NULL_HANDLE);
};
/*
@@ -539,15 +465,15 @@ struct DecodedTemplateUpdate {
*/
class DescriptorSet : public BASE_NODE {
public:
- using StateTracker = ValidationStateTracker;
DescriptorSet(const VkDescriptorSet, const VkDescriptorPool, const std::shared_ptr<DescriptorSetLayout const> &,
- uint32_t variable_count, StateTracker *);
+ uint32_t variable_count, layer_data *);
~DescriptorSet();
// A number of common Get* functions that return data based on layout from which this set was created
uint32_t GetTotalDescriptorCount() const { return p_layout_->GetTotalDescriptorCount(); };
uint32_t GetDynamicDescriptorCount() const { return p_layout_->GetDynamicDescriptorCount(); };
uint32_t GetBindingCount() const { return p_layout_->GetBindingCount(); };
VkDescriptorType GetTypeFromIndex(const uint32_t index) const { return p_layout_->GetTypeFromIndex(index); };
+ VkDescriptorType GetTypeFromGlobalIndex(const uint32_t index) const { return p_layout_->GetTypeFromGlobalIndex(index); };
VkDescriptorType GetTypeFromBinding(const uint32_t binding) const { return p_layout_->GetTypeFromBinding(binding); };
uint32_t GetDescriptorCountFromIndex(const uint32_t index) const { return p_layout_->GetDescriptorCountFromIndex(index); };
uint32_t GetDescriptorCountFromBinding(const uint32_t binding) const {
@@ -559,40 +485,54 @@ class DescriptorSet : public BASE_NODE {
}
// Return true if given binding is present in this set
bool HasBinding(const uint32_t binding) const { return p_layout_->HasBinding(binding); };
+ // Is this set compatible with the given layout?
+ bool IsCompatible(DescriptorSetLayout const *const, std::string *) const;
+ // For given bindings validate state at time of draw is correct, returning false on error and writing error details into string*
+ bool ValidateDrawState(const std::map<uint32_t, descriptor_req> &, const std::vector<uint32_t> &, GLOBAL_CB_NODE *,
+ const char *caller, std::string *) const;
+ // For given set of bindings, add any buffers and images that will be updated to their respective unordered_sets & return number
+ // of objects inserted
+ uint32_t GetStorageUpdates(const std::map<uint32_t, descriptor_req> &, std::unordered_set<VkBuffer> *,
+ std::unordered_set<VkImageView> *) const;
std::string StringifySetAndLayout() const;
-
+ // Descriptor Update functions. These functions validate state and perform update separately
+ // Validate contents of a push descriptor update
+ bool ValidatePushDescriptorsUpdate(const debug_report_data *report_data, uint32_t write_count,
+ const VkWriteDescriptorSet *p_wds, const char *func_name);
// Perform a push update whose contents were just validated using ValidatePushDescriptorsUpdate
void PerformPushDescriptorsUpdate(uint32_t write_count, const VkWriteDescriptorSet *p_wds);
+ // Validate contents of a WriteUpdate
+ bool ValidateWriteUpdate(const debug_report_data *, const VkWriteDescriptorSet *, const char *, std::string *, std::string *);
// Perform a WriteUpdate whose contents were just validated using ValidateWriteUpdate
void PerformWriteUpdate(const VkWriteDescriptorSet *);
+ // Validate contents of a CopyUpdate
+ bool ValidateCopyUpdate(const debug_report_data *, const VkCopyDescriptorSet *, const DescriptorSet *, const char *func_name,
+ std::string *, std::string *);
// Perform a CopyUpdate whose contents were just validated using ValidateCopyUpdate
void PerformCopyUpdate(const VkCopyDescriptorSet *, const DescriptorSet *);
const std::shared_ptr<DescriptorSetLayout const> GetLayout() const { return p_layout_; };
- VkDescriptorSetLayout GetDescriptorSetLayout() const { return p_layout_->GetDescriptorSetLayout(); }
VkDescriptorSet GetSet() const { return set_; };
// Return unordered_set of all command buffers that this set is bound to
- std::unordered_set<CMD_BUFFER_STATE *> GetBoundCmdBuffers() const { return cb_bindings; }
+ std::unordered_set<GLOBAL_CB_NODE *> GetBoundCmdBuffers() const { return cb_bindings; }
// Bind given cmd_buffer to this descriptor set and
// update CB image layout map with image/imagesampler descriptor image layouts
- void UpdateDrawState(ValidationStateTracker *, CMD_BUFFER_STATE *, const std::map<uint32_t, descriptor_req> &);
+ void UpdateDrawState(GLOBAL_CB_NODE *, const std::map<uint32_t, descriptor_req> &);
// Track work that has been bound or validated to avoid duplicate work, important when large descriptor arrays
// are present
typedef std::unordered_set<uint32_t> TrackedBindings;
- static void FilterOneBindingReq(const BindingReqMap::value_type &binding_req_pair, BindingReqMap *out_req,
- const TrackedBindings &set, uint32_t limit);
- void FilterBindingReqs(const CMD_BUFFER_STATE &, const PIPELINE_STATE &, const BindingReqMap &in_req,
- BindingReqMap *out_req) const;
- void UpdateValidationCache(const CMD_BUFFER_STATE &cb_state, const PIPELINE_STATE &pipeline,
- const BindingReqMap &updated_bindings);
- void ClearCachedDynamicDescriptorValidation(CMD_BUFFER_STATE *cb_state) {
- cached_validation_[cb_state].dynamic_buffers.clear();
- }
- void ClearCachedValidation(CMD_BUFFER_STATE *cb_state) { cached_validation_.erase(cb_state); }
+ static void FilterAndTrackOneBindingReq(const BindingReqMap::value_type &binding_req_pair, const BindingReqMap &in_req,
+ BindingReqMap *out_req, TrackedBindings *set);
+ static void FilterAndTrackOneBindingReq(const BindingReqMap::value_type &binding_req_pair, const BindingReqMap &in_req,
+ BindingReqMap *out_req, TrackedBindings *set, uint32_t limit);
+ void FilterAndTrackBindingReqs(GLOBAL_CB_NODE *, const BindingReqMap &in_req, BindingReqMap *out_req);
+ void FilterAndTrackBindingReqs(GLOBAL_CB_NODE *, PIPELINE_STATE *, const BindingReqMap &in_req, BindingReqMap *out_req);
+ void ClearCachedDynamicDescriptorValidation(GLOBAL_CB_NODE *cb_state) { cached_validation_[cb_state].dynamic_buffers.clear(); }
+ void ClearCachedValidation(GLOBAL_CB_NODE *cb_state) { cached_validation_.erase(cb_state); }
// If given cmd_buffer is in the cb_bindings set, remove it
- void RemoveBoundCommandBuffer(CMD_BUFFER_STATE *cb_node) {
+ void RemoveBoundCommandBuffer(GLOBAL_CB_NODE *cb_node) {
cb_bindings.erase(cb_node);
ClearCachedValidation(cb_node);
}
@@ -600,28 +540,25 @@ class DescriptorSet : public BASE_NODE {
return p_layout_->GetImmutableSamplerPtrFromBinding(index);
};
// For a particular binding, get the global index
- const IndexRange GetGlobalIndexRangeFromBinding(const uint32_t binding, bool actual_length = false) const {
- if (actual_length && binding == p_layout_->GetMaxBinding() && IsVariableDescriptorCount(binding)) {
- IndexRange range = p_layout_->GetGlobalIndexRangeFromBinding(binding);
- auto diff = GetDescriptorCountFromBinding(binding) - GetVariableDescriptorCount();
- range.end -= diff;
- return range;
- }
+ const IndexRange &GetGlobalIndexRangeFromBinding(const uint32_t binding) const {
return p_layout_->GetGlobalIndexRangeFromBinding(binding);
};
// Return true if any part of set has ever been updated
bool IsUpdated() const { return some_update_; };
bool IsPushDescriptor() const { return p_layout_->IsPushDescriptor(); };
- bool IsVariableDescriptorCount(uint32_t binding) const { return p_layout_->IsVariableDescriptorCount(binding); }
- bool IsUpdateAfterBind(uint32_t binding) const {
- return !!(p_layout_->GetDescriptorBindingFlagsFromBinding(binding) & VK_DESCRIPTOR_BINDING_UPDATE_AFTER_BIND_BIT_EXT);
+ bool IsVariableDescriptorCount(uint32_t binding) const {
+ return !!(p_layout_->GetDescriptorBindingFlagsFromBinding(binding) &
+ VK_DESCRIPTOR_BINDING_VARIABLE_DESCRIPTOR_COUNT_BIT_EXT);
}
uint32_t GetVariableDescriptorCount() const { return variable_count_; }
DESCRIPTOR_POOL_STATE *GetPoolState() const { return pool_state_; }
- const Descriptor *GetDescriptorFromGlobalIndex(const uint32_t index) const { return descriptors_[index].get(); }
- uint64_t GetChangeCount() const { return change_count_; }
private:
+ bool VerifyWriteUpdateContents(const VkWriteDescriptorSet *, const uint32_t, const char *, std::string *, std::string *) const;
+ bool VerifyCopyUpdateContents(const VkCopyDescriptorSet *, const DescriptorSet *, VkDescriptorType, uint32_t, const char *,
+ std::string *, std::string *) const;
+ bool ValidateBufferUsage(BUFFER_STATE const *, VkDescriptorType, std::string *, std::string *) const;
+ bool ValidateBufferUpdate(VkDescriptorBufferInfo const *, VkDescriptorType, const char *, std::string *, std::string *) const;
// Private helper to set all bound cmd buffers to INVALID state
void InvalidateBoundCmdBuffers();
bool some_update_; // has any part of the set ever been updated?
@@ -629,24 +566,25 @@ class DescriptorSet : public BASE_NODE {
DESCRIPTOR_POOL_STATE *pool_state_;
const std::shared_ptr<DescriptorSetLayout const> p_layout_;
std::vector<std::unique_ptr<Descriptor>> descriptors_;
- StateTracker *state_data_;
+ layer_data *device_data_;
+ const VkPhysicalDeviceLimits limits_;
uint32_t variable_count_;
- uint64_t change_count_;
// Cached binding and validation support:
//
// For the lifespan of a given command buffer recording, do lazy evaluation, caching, and dirtying of
// expensive validation operation (typically per-draw)
- typedef std::unordered_map<CMD_BUFFER_STATE *, TrackedBindings> TrackedBindingMap;
+ typedef std::unordered_map<GLOBAL_CB_NODE *, TrackedBindings> TrackedBindingMap;
+ typedef std::unordered_map<PIPELINE_STATE *, TrackedBindingMap> ValidatedBindings;
// Track the validation caching of bindings vs. the command buffer and draw state
- typedef std::unordered_map<uint32_t, CMD_BUFFER_STATE::ImageLayoutUpdateCount> VersionedBindings;
+ typedef std::unordered_map<uint32_t, GLOBAL_CB_NODE::ImageLayoutUpdateCount> VersionedBindings;
struct CachedValidation {
- TrackedBindings command_binding_and_usage; // Persistent for the life of the recording
- TrackedBindings non_dynamic_buffers; // Persistent for the life of the recording
- TrackedBindings dynamic_buffers; // Dirtied (flushed) each BindDescriptorSet
- std::unordered_map<const PIPELINE_STATE *, VersionedBindings> image_samplers; // Tested vs. changes to CB's ImageLayout
+ TrackedBindings command_binding_and_usage; // Persistent for the life of the recording
+ TrackedBindings non_dynamic_buffers; // Persistent for the life of the recording
+ TrackedBindings dynamic_buffers; // Dirtied (flushed) each BindDescriptorSet
+ std::unordered_map<PIPELINE_STATE *, VersionedBindings> image_samplers; // Tested vs. changes to CB's ImageLayout
};
- typedef std::unordered_map<const CMD_BUFFER_STATE *, CachedValidation> CachedValidationMap;
+ typedef std::unordered_map<GLOBAL_CB_NODE *, CachedValidation> CachedValidationMap;
// Image and ImageView bindings are validated per pipeline and not invalidate by repeated binding
CachedValidationMap cached_validation_;
};
@@ -656,12 +594,10 @@ class PrefilterBindRequestMap {
static const uint32_t kManyDescriptors_ = 64; // TODO base this number on measured data
std::unique_ptr<BindingReqMap> filtered_map_;
const BindingReqMap &orig_map_;
- const DescriptorSet &descriptor_set_;
- PrefilterBindRequestMap(const DescriptorSet &ds, const BindingReqMap &in_map)
- : filtered_map_(), orig_map_(in_map), descriptor_set_(ds) {}
- const BindingReqMap &FilteredMap(const CMD_BUFFER_STATE &cb_state, const PIPELINE_STATE &);
- bool IsManyDescriptors() const { return descriptor_set_.GetTotalDescriptorCount() > kManyDescriptors_; }
+ PrefilterBindRequestMap(DescriptorSet &ds, const BindingReqMap &in_map, GLOBAL_CB_NODE *cb_state);
+ PrefilterBindRequestMap(DescriptorSet &ds, const BindingReqMap &in_map, GLOBAL_CB_NODE *cb_state, PIPELINE_STATE *);
+ const BindingReqMap &Map() const { return (filtered_map_) ? *filtered_map_ : orig_map_; }
};
} // namespace cvdescriptorset
#endif // CORE_VALIDATION_DESCRIPTOR_SETS_H_
diff --git a/layers/drawdispatch.cpp b/layers/drawdispatch.cpp
deleted file mode 100644
index 8779d956e..000000000
--- a/layers/drawdispatch.cpp
+++ /dev/null
@@ -1,484 +0,0 @@
-/* Copyright (c) 2015-2019 The Khronos Group Inc.
- * Copyright (c) 2015-2019 Valve Corporation
- * Copyright (c) 2015-2019 LunarG, Inc.
- * Copyright (C) 2015-2019 Google Inc.
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- *
- * Author: Cody Northrop <cnorthrop@google.com>
- * Author: Michael Lentine <mlentine@google.com>
- * Author: Tobin Ehlis <tobine@google.com>
- * Author: Chia-I Wu <olv@google.com>
- * Author: Chris Forbes <chrisf@ijw.co.nz>
- * Author: Mark Lobodzinski <mark@lunarg.com>
- * Author: Ian Elliott <ianelliott@google.com>
- * Author: Dave Houlton <daveh@lunarg.com>
- * Author: Dustin Graves <dustin@lunarg.com>
- * Author: Jeremy Hayes <jeremy@lunarg.com>
- * Author: Jon Ashburn <jon@lunarg.com>
- * Author: Karl Schultz <karl@lunarg.com>
- * Author: Mark Young <marky@lunarg.com>
- * Author: Mike Schuchardt <mikes@lunarg.com>
- * Author: Mike Weiblen <mikew@lunarg.com>
- * Author: Tony Barbour <tony@LunarG.com>
- * Author: John Zulauf <jzulauf@lunarg.com>
- * Author: Shannon McPherson <shannon@lunarg.com>
- */
-
-// Allow use of STL min and max functions in Windows
-#define NOMINMAX
-
-#include "chassis.h"
-#include "core_validation.h"
-
-static inline void UpdateResourceTrackingOnDraw(CMD_BUFFER_STATE *pCB) {
- pCB->cb_vertex_buffer_binding_info.push_back(pCB->current_vertex_buffer_binding_info);
-}
-
-// Generic function to handle validation for all CmdDraw* type functions
-bool CoreChecks::ValidateCmdDrawType(VkCommandBuffer cmd_buffer, bool indexed, VkPipelineBindPoint bind_point, CMD_TYPE cmd_type,
- const char *caller, VkQueueFlags queue_flags, const char *queue_flag_code,
- const char *renderpass_msg_code, const char *pipebound_msg_code,
- const char *dynamic_state_msg_code) const {
- bool skip = false;
- const CMD_BUFFER_STATE *cb_state = GetCBState(cmd_buffer);
- if (cb_state) {
- skip |= ValidateCmdQueueFlags(cb_state, caller, queue_flags, queue_flag_code);
- skip |= ValidateCmd(cb_state, cmd_type, caller);
- skip |=
- ValidateCmdBufDrawState(cb_state, cmd_type, indexed, bind_point, caller, pipebound_msg_code, dynamic_state_msg_code);
- skip |= (VK_PIPELINE_BIND_POINT_GRAPHICS == bind_point) ? OutsideRenderPass(cb_state, caller, renderpass_msg_code)
- : InsideRenderPass(cb_state, caller, renderpass_msg_code);
- }
- return skip;
-}
-
-// Generic function to handle state update for all CmdDraw* and CmdDispatch* type functions
-void ValidationStateTracker::UpdateStateCmdDrawDispatchType(CMD_BUFFER_STATE *cb_state, VkPipelineBindPoint bind_point) {
- UpdateDrawState(cb_state, bind_point);
- cb_state->hasDispatchCmd = true;
-}
-
-// Generic function to handle state update for all CmdDraw* type functions
-void ValidationStateTracker::UpdateStateCmdDrawType(CMD_BUFFER_STATE *cb_state, VkPipelineBindPoint bind_point) {
- UpdateStateCmdDrawDispatchType(cb_state, bind_point);
- UpdateResourceTrackingOnDraw(cb_state);
- cb_state->hasDrawCmd = true;
-}
-
-bool CoreChecks::PreCallValidateCmdDraw(VkCommandBuffer commandBuffer, uint32_t vertexCount, uint32_t instanceCount,
- uint32_t firstVertex, uint32_t firstInstance) {
- return ValidateCmdDrawType(commandBuffer, false, VK_PIPELINE_BIND_POINT_GRAPHICS, CMD_DRAW, "vkCmdDraw()",
- VK_QUEUE_GRAPHICS_BIT, "VUID-vkCmdDraw-commandBuffer-cmdpool", "VUID-vkCmdDraw-renderpass",
- "VUID-vkCmdDraw-None-02700", "VUID-vkCmdDraw-commandBuffer-02701");
-}
-
-void CoreChecks::PreCallRecordCmdDraw(VkCommandBuffer commandBuffer, uint32_t vertexCount, uint32_t instanceCount,
- uint32_t firstVertex, uint32_t firstInstance) {
- GpuAllocateValidationResources(commandBuffer, VK_PIPELINE_BIND_POINT_GRAPHICS);
-}
-
-void ValidationStateTracker::PostCallRecordCmdDraw(VkCommandBuffer commandBuffer, uint32_t vertexCount, uint32_t instanceCount,
- uint32_t firstVertex, uint32_t firstInstance) {
- CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
- UpdateStateCmdDrawType(cb_state, VK_PIPELINE_BIND_POINT_GRAPHICS);
-}
-
-bool CoreChecks::PreCallValidateCmdDrawIndexed(VkCommandBuffer commandBuffer, uint32_t indexCount, uint32_t instanceCount,
- uint32_t firstIndex, int32_t vertexOffset, uint32_t firstInstance) {
- bool skip = ValidateCmdDrawType(commandBuffer, true, VK_PIPELINE_BIND_POINT_GRAPHICS, CMD_DRAWINDEXED, "vkCmdDrawIndexed()",
- VK_QUEUE_GRAPHICS_BIT, "VUID-vkCmdDrawIndexed-commandBuffer-cmdpool",
- "VUID-vkCmdDrawIndexed-renderpass", "VUID-vkCmdDrawIndexed-None-02700",
- "VUID-vkCmdDrawIndexed-commandBuffer-02701");
- CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
- if (!skip && (cb_state->status & CBSTATUS_INDEX_BUFFER_BOUND)) {
- unsigned int index_size = 0;
- const auto &index_buffer_binding = cb_state->index_buffer_binding;
- if (index_buffer_binding.index_type == VK_INDEX_TYPE_UINT16) {
- index_size = 2;
- } else if (index_buffer_binding.index_type == VK_INDEX_TYPE_UINT32) {
- index_size = 4;
- } else if (index_buffer_binding.index_type == VK_INDEX_TYPE_UINT8_EXT) {
- index_size = 1;
- }
- VkDeviceSize end_offset = (index_size * ((VkDeviceSize)firstIndex + indexCount)) + index_buffer_binding.offset;
- if (end_offset > index_buffer_binding.size) {
- skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_BUFFER_EXT,
- HandleToUint64(index_buffer_binding.buffer), "VUID-vkCmdDrawIndexed-indexSize-00463",
- "vkCmdDrawIndexed() index size (%d) * (firstIndex (%d) + indexCount (%d)) "
- "+ binding offset (%" PRIuLEAST64 ") = an ending offset of %" PRIuLEAST64
- " bytes, "
- "which is greater than the index buffer size (%" PRIuLEAST64 ").",
- index_size, firstIndex, indexCount, index_buffer_binding.offset, end_offset, index_buffer_binding.size);
- }
- }
- return skip;
-}
-
-void CoreChecks::PreCallRecordCmdDrawIndexed(VkCommandBuffer commandBuffer, uint32_t indexCount, uint32_t instanceCount,
- uint32_t firstIndex, int32_t vertexOffset, uint32_t firstInstance) {
- GpuAllocateValidationResources(commandBuffer, VK_PIPELINE_BIND_POINT_GRAPHICS);
-}
-
-void ValidationStateTracker::PostCallRecordCmdDrawIndexed(VkCommandBuffer commandBuffer, uint32_t indexCount,
- uint32_t instanceCount, uint32_t firstIndex, int32_t vertexOffset,
- uint32_t firstInstance) {
- CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
- UpdateStateCmdDrawType(cb_state, VK_PIPELINE_BIND_POINT_GRAPHICS);
-}
-
-bool CoreChecks::PreCallValidateCmdDrawIndirect(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, uint32_t count,
- uint32_t stride) {
- bool skip = ValidateCmdDrawType(commandBuffer, false, VK_PIPELINE_BIND_POINT_GRAPHICS, CMD_DRAWINDIRECT, "vkCmdDrawIndirect()",
- VK_QUEUE_GRAPHICS_BIT, "VUID-vkCmdDrawIndirect-commandBuffer-cmdpool",
- "VUID-vkCmdDrawIndirect-renderpass", "VUID-vkCmdDrawIndirect-None-02700",
- "VUID-vkCmdDrawIndirect-commandBuffer-02701");
- const BUFFER_STATE *buffer_state = GetBufferState(buffer);
- skip |= ValidateMemoryIsBoundToBuffer(buffer_state, "vkCmdDrawIndirect()", "VUID-vkCmdDrawIndirect-buffer-02708");
- skip |= ValidateBufferUsageFlags(buffer_state, VK_BUFFER_USAGE_INDIRECT_BUFFER_BIT, true, "VUID-vkCmdDrawIndirect-buffer-02709",
- "vkCmdDrawIndirect()", "VK_BUFFER_USAGE_INDIRECT_BUFFER_BIT");
- if (count > 1) {
- skip |= ValidateCmdDrawStrideWithStruct(commandBuffer, "VUID-vkCmdDrawIndirect-drawCount-00476", stride,
- "VkDrawIndirectCommand", sizeof(VkDrawIndirectCommand));
- skip |=
- ValidateCmdDrawStrideWithBuffer(commandBuffer, "VUID-vkCmdDrawIndirect-drawCount-00488", stride,
- "VkDrawIndirectCommand", sizeof(VkDrawIndirectCommand), count, offset, buffer_state);
- }
- // TODO: If the drawIndirectFirstInstance feature is not enabled, all the firstInstance members of the
- // VkDrawIndirectCommand structures accessed by this command must be 0, which will require access to the contents of 'buffer'.
- return skip;
-}
-
-void CoreChecks::PreCallRecordCmdDrawIndirect(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, uint32_t count,
- uint32_t stride) {
- GpuAllocateValidationResources(commandBuffer, VK_PIPELINE_BIND_POINT_GRAPHICS);
-}
-
-void ValidationStateTracker::PostCallRecordCmdDrawIndirect(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset,
- uint32_t count, uint32_t stride) {
- CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
- BUFFER_STATE *buffer_state = GetBufferState(buffer);
- UpdateStateCmdDrawType(cb_state, VK_PIPELINE_BIND_POINT_GRAPHICS);
- AddCommandBufferBindingBuffer(cb_state, buffer_state);
-}
-
-bool CoreChecks::PreCallValidateCmdDrawIndexedIndirect(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset,
- uint32_t count, uint32_t stride) {
- bool skip = ValidateCmdDrawType(
- commandBuffer, true, VK_PIPELINE_BIND_POINT_GRAPHICS, CMD_DRAWINDEXEDINDIRECT, "vkCmdDrawIndexedIndirect()",
- VK_QUEUE_GRAPHICS_BIT, "VUID-vkCmdDrawIndexedIndirect-commandBuffer-cmdpool", "VUID-vkCmdDrawIndexedIndirect-renderpass",
- "VUID-vkCmdDrawIndexedIndirect-None-02700", "VUID-vkCmdDrawIndexedIndirect-commandBuffer-02701");
- const BUFFER_STATE *buffer_state = GetBufferState(buffer);
- skip |= ValidateMemoryIsBoundToBuffer(buffer_state, "vkCmdDrawIndexedIndirect()", "VUID-vkCmdDrawIndexedIndirect-buffer-02708");
- skip |= ValidateBufferUsageFlags(buffer_state, VK_BUFFER_USAGE_INDIRECT_BUFFER_BIT, true,
- "VUID-vkCmdDrawIndexedIndirect-buffer-02709", "vkCmdDrawIndexedIndirect()",
- "VK_BUFFER_USAGE_INDIRECT_BUFFER_BIT");
- if (count > 1) {
- skip |= ValidateCmdDrawStrideWithStruct(commandBuffer, "VUID-vkCmdDrawIndexedIndirect-drawCount-00528", stride,
- "VkDrawIndexedIndirectCommand", sizeof(VkDrawIndexedIndirectCommand));
- skip |= ValidateCmdDrawStrideWithBuffer(commandBuffer, "VUID-vkCmdDrawIndexedIndirect-drawCount-00540", stride,
- "VkDrawIndexedIndirectCommand", sizeof(VkDrawIndexedIndirectCommand), count, offset,
- buffer_state);
- }
- // TODO: If the drawIndirectFirstInstance feature is not enabled, all the firstInstance members of the
- // VkDrawIndexedIndirectCommand structures accessed by this command must be 0, which will require access to the contents of
- // 'buffer'.
- return skip;
-}
-
-void CoreChecks::PreCallRecordCmdDrawIndexedIndirect(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset,
- uint32_t count, uint32_t stride) {
- GpuAllocateValidationResources(commandBuffer, VK_PIPELINE_BIND_POINT_GRAPHICS);
-}
-
-void ValidationStateTracker::PostCallRecordCmdDrawIndexedIndirect(VkCommandBuffer commandBuffer, VkBuffer buffer,
- VkDeviceSize offset, uint32_t count, uint32_t stride) {
- CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
- BUFFER_STATE *buffer_state = GetBufferState(buffer);
- UpdateStateCmdDrawType(cb_state, VK_PIPELINE_BIND_POINT_GRAPHICS);
- AddCommandBufferBindingBuffer(cb_state, buffer_state);
-}
-
-bool CoreChecks::PreCallValidateCmdDispatch(VkCommandBuffer commandBuffer, uint32_t x, uint32_t y, uint32_t z) {
- bool skip = false;
- skip |= ValidateCmdDrawType(commandBuffer, false, VK_PIPELINE_BIND_POINT_COMPUTE, CMD_DISPATCH, "vkCmdDispatch()",
- VK_QUEUE_COMPUTE_BIT, "VUID-vkCmdDispatch-commandBuffer-cmdpool", "VUID-vkCmdDispatch-renderpass",
- "VUID-vkCmdDispatch-None-02700", kVUIDUndefined);
- return skip;
-}
-
-void CoreChecks::PreCallRecordCmdDispatch(VkCommandBuffer commandBuffer, uint32_t x, uint32_t y, uint32_t z) {
- GpuAllocateValidationResources(commandBuffer, VK_PIPELINE_BIND_POINT_COMPUTE);
-}
-
-void ValidationStateTracker::PostCallRecordCmdDispatch(VkCommandBuffer commandBuffer, uint32_t x, uint32_t y, uint32_t z) {
- CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
- UpdateStateCmdDrawDispatchType(cb_state, VK_PIPELINE_BIND_POINT_COMPUTE);
-}
-
-bool CoreChecks::PreCallValidateCmdDispatchIndirect(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset) {
- bool skip =
- ValidateCmdDrawType(commandBuffer, false, VK_PIPELINE_BIND_POINT_COMPUTE, CMD_DISPATCHINDIRECT, "vkCmdDispatchIndirect()",
- VK_QUEUE_COMPUTE_BIT, "VUID-vkCmdDispatchIndirect-commandBuffer-cmdpool",
- "VUID-vkCmdDispatchIndirect-renderpass", "VUID-vkCmdDispatchIndirect-None-02700", kVUIDUndefined);
- const BUFFER_STATE *buffer_state = GetBufferState(buffer);
- skip |= ValidateMemoryIsBoundToBuffer(buffer_state, "vkCmdDispatchIndirect()", "VUID-vkCmdDispatchIndirect-buffer-02708");
- skip |=
- ValidateBufferUsageFlags(buffer_state, VK_BUFFER_USAGE_INDIRECT_BUFFER_BIT, true, "VUID-vkCmdDispatchIndirect-buffer-02709",
- "vkCmdDispatchIndirect()", "VK_BUFFER_USAGE_INDIRECT_BUFFER_BIT");
- return skip;
-}
-
-void CoreChecks::PreCallRecordCmdDispatchIndirect(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset) {
- GpuAllocateValidationResources(commandBuffer, VK_PIPELINE_BIND_POINT_COMPUTE);
-}
-
-void ValidationStateTracker::PostCallRecordCmdDispatchIndirect(VkCommandBuffer commandBuffer, VkBuffer buffer,
- VkDeviceSize offset) {
- CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
- UpdateStateCmdDrawDispatchType(cb_state, VK_PIPELINE_BIND_POINT_COMPUTE);
- BUFFER_STATE *buffer_state = GetBufferState(buffer);
- AddCommandBufferBindingBuffer(cb_state, buffer_state);
-}
-
-bool CoreChecks::PreCallValidateCmdDrawIndirectCountKHR(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset,
- VkBuffer countBuffer, VkDeviceSize countBufferOffset, uint32_t maxDrawCount,
- uint32_t stride) {
- bool skip = false;
- if (offset & 3) {
- skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
- HandleToUint64(commandBuffer), "VUID-vkCmdDrawIndirectCountKHR-offset-02710",
- "vkCmdDrawIndirectCountKHR() parameter, VkDeviceSize offset (0x%" PRIxLEAST64 "), is not a multiple of 4.",
- offset);
- }
-
- if (countBufferOffset & 3) {
- skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
- HandleToUint64(commandBuffer), "VUID-vkCmdDrawIndirectCountKHR-countBufferOffset-02716",
- "vkCmdDrawIndirectCountKHR() parameter, VkDeviceSize countBufferOffset (0x%" PRIxLEAST64
- "), is not a multiple of 4.",
- countBufferOffset);
- }
- skip |= ValidateCmdDrawStrideWithStruct(commandBuffer, "VUID-vkCmdDrawIndirectCountKHR-stride-03110", stride,
- "VkDrawIndirectCommand", sizeof(VkDrawIndirectCommand));
- if (maxDrawCount > 1) {
- const BUFFER_STATE *buffer_state = GetBufferState(buffer);
- skip |= ValidateCmdDrawStrideWithBuffer(commandBuffer, "VUID-vkCmdDrawIndirectCountKHR-maxDrawCount-03111", stride,
- "VkDrawIndirectCommand", sizeof(VkDrawIndirectCommand), maxDrawCount, offset,
- buffer_state);
- }
-
- skip |= ValidateCmdDrawType(commandBuffer, false, VK_PIPELINE_BIND_POINT_GRAPHICS, CMD_DRAWINDIRECTCOUNTKHR,
- "vkCmdDrawIndirectCountKHR()", VK_QUEUE_GRAPHICS_BIT,
- "VUID-vkCmdDrawIndirectCountKHR-commandBuffer-cmdpool", "VUID-vkCmdDrawIndirectCountKHR-renderpass",
- "VUID-vkCmdDrawIndirectCountKHR-None-02700", "VUID-vkCmdDrawIndirectCountKHR-commandBuffer-02701");
- const BUFFER_STATE *buffer_state = GetBufferState(buffer);
- const BUFFER_STATE *count_buffer_state = GetBufferState(countBuffer);
- skip |=
- ValidateMemoryIsBoundToBuffer(buffer_state, "vkCmdDrawIndirectCountKHR()", "VUID-vkCmdDrawIndirectCountKHR-buffer-02708");
- skip |= ValidateMemoryIsBoundToBuffer(count_buffer_state, "vkCmdDrawIndirectCountKHR()",
- "VUID-vkCmdDrawIndirectCountKHR-countBuffer-02714");
- skip |= ValidateBufferUsageFlags(buffer_state, VK_BUFFER_USAGE_INDIRECT_BUFFER_BIT, true,
- "VUID-vkCmdDrawIndirectCountKHR-buffer-02709", "vkCmdDrawIndirectCountKHR()",
- "VK_BUFFER_USAGE_INDIRECT_BUFFER_BIT");
- skip |= ValidateBufferUsageFlags(count_buffer_state, VK_BUFFER_USAGE_INDIRECT_BUFFER_BIT, true,
- "VUID-vkCmdDrawIndirectCountKHR-countBuffer-02715", "vkCmdDrawIndirectCountKHR()",
- "VK_BUFFER_USAGE_INDIRECT_BUFFER_BIT");
- return skip;
-}
-
-void ValidationStateTracker::PreCallRecordCmdDrawIndirectCountKHR(VkCommandBuffer commandBuffer, VkBuffer buffer,
- VkDeviceSize offset, VkBuffer countBuffer,
- VkDeviceSize countBufferOffset, uint32_t maxDrawCount,
- uint32_t stride) {
- CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
- BUFFER_STATE *buffer_state = GetBufferState(buffer);
- BUFFER_STATE *count_buffer_state = GetBufferState(countBuffer);
- UpdateStateCmdDrawType(cb_state, VK_PIPELINE_BIND_POINT_GRAPHICS);
- AddCommandBufferBindingBuffer(cb_state, buffer_state);
- AddCommandBufferBindingBuffer(cb_state, count_buffer_state);
-}
-
-bool CoreChecks::PreCallValidateCmdDrawIndexedIndirectCountKHR(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset,
- VkBuffer countBuffer, VkDeviceSize countBufferOffset,
- uint32_t maxDrawCount, uint32_t stride) {
- bool skip = false;
- if (offset & 3) {
- skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
- HandleToUint64(commandBuffer), "VUID-vkCmdDrawIndexedIndirectCountKHR-offset-02710",
- "vkCmdDrawIndexedIndirectCountKHR() parameter, VkDeviceSize offset (0x%" PRIxLEAST64
- "), is not a multiple of 4.",
- offset);
- }
-
- if (countBufferOffset & 3) {
- skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
- HandleToUint64(commandBuffer), "VUID-vkCmdDrawIndexedIndirectCountKHR-countBufferOffset-02716",
- "vkCmdDrawIndexedIndirectCountKHR() parameter, VkDeviceSize countBufferOffset (0x%" PRIxLEAST64
- "), is not a multiple of 4.",
- countBufferOffset);
- }
-
- skip |= ValidateCmdDrawStrideWithStruct(commandBuffer, "VUID-vkCmdDrawIndexedIndirectCountKHR-stride-03142", stride,
- "VkDrawIndirectCommand", sizeof(VkDrawIndexedIndirectCommand));
- if (maxDrawCount > 1) {
- const BUFFER_STATE *buffer_state = GetBufferState(buffer);
- skip |= ValidateCmdDrawStrideWithBuffer(commandBuffer, "VUID-vkCmdDrawIndexedIndirectCountKHR-maxDrawCount-03143", stride,
- "VkDrawIndirectCommand", sizeof(VkDrawIndexedIndirectCommand), maxDrawCount, offset,
- buffer_state);
- }
-
- skip |= ValidateCmdDrawType(
- commandBuffer, true, VK_PIPELINE_BIND_POINT_GRAPHICS, CMD_DRAWINDEXEDINDIRECTCOUNTKHR, "vkCmdDrawIndexedIndirectCountKHR()",
- VK_QUEUE_GRAPHICS_BIT, "VUID-vkCmdDrawIndexedIndirectCountKHR-commandBuffer-cmdpool",
- "VUID-vkCmdDrawIndexedIndirectCountKHR-renderpass", "VUID-vkCmdDrawIndexedIndirectCountKHR-None-02700",
- "VUID-vkCmdDrawIndexedIndirectCountKHR-commandBuffer-02701");
- const BUFFER_STATE *buffer_state = GetBufferState(buffer);
- const BUFFER_STATE *count_buffer_state = GetBufferState(countBuffer);
- skip |= ValidateMemoryIsBoundToBuffer(buffer_state, "vkCmdDrawIndexedIndirectCountKHR()",
- "VUID-vkCmdDrawIndexedIndirectCountKHR-buffer-02708");
- skip |= ValidateMemoryIsBoundToBuffer(count_buffer_state, "vkCmdDrawIndexedIndirectCountKHR()",
- "VUID-vkCmdDrawIndexedIndirectCountKHR-countBuffer-02714");
- skip |= ValidateBufferUsageFlags(buffer_state, VK_BUFFER_USAGE_INDIRECT_BUFFER_BIT, true,
- "VUID-vkCmdDrawIndexedIndirectCountKHR-buffer-02709", "vkCmdDrawIndexedIndirectCountKHR()",
- "VK_BUFFER_USAGE_INDIRECT_BUFFER_BIT");
- skip |= ValidateBufferUsageFlags(count_buffer_state, VK_BUFFER_USAGE_INDIRECT_BUFFER_BIT, true,
- "VUID-vkCmdDrawIndexedIndirectCountKHR-countBuffer-02715",
- "vkCmdDrawIndexedIndirectCountKHR()", "VK_BUFFER_USAGE_INDIRECT_BUFFER_BIT");
- return skip;
-}
-
-void ValidationStateTracker::PreCallRecordCmdDrawIndexedIndirectCountKHR(VkCommandBuffer commandBuffer, VkBuffer buffer,
- VkDeviceSize offset, VkBuffer countBuffer,
- VkDeviceSize countBufferOffset, uint32_t maxDrawCount,
- uint32_t stride) {
- CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
- BUFFER_STATE *buffer_state = GetBufferState(buffer);
- BUFFER_STATE *count_buffer_state = GetBufferState(countBuffer);
- UpdateStateCmdDrawType(cb_state, VK_PIPELINE_BIND_POINT_GRAPHICS);
- AddCommandBufferBindingBuffer(cb_state, buffer_state);
- AddCommandBufferBindingBuffer(cb_state, count_buffer_state);
-}
-
-void CoreChecks::PreCallRecordCmdTraceRaysNV(VkCommandBuffer commandBuffer, VkBuffer raygenShaderBindingTableBuffer,
- VkDeviceSize raygenShaderBindingOffset, VkBuffer missShaderBindingTableBuffer,
- VkDeviceSize missShaderBindingOffset, VkDeviceSize missShaderBindingStride,
- VkBuffer hitShaderBindingTableBuffer, VkDeviceSize hitShaderBindingOffset,
- VkDeviceSize hitShaderBindingStride, VkBuffer callableShaderBindingTableBuffer,
- VkDeviceSize callableShaderBindingOffset, VkDeviceSize callableShaderBindingStride,
- uint32_t width, uint32_t height, uint32_t depth) {
- GpuAllocateValidationResources(commandBuffer, VK_PIPELINE_BIND_POINT_RAY_TRACING_NV);
-}
-
-void CoreChecks::PostCallRecordCmdTraceRaysNV(VkCommandBuffer commandBuffer, VkBuffer raygenShaderBindingTableBuffer,
- VkDeviceSize raygenShaderBindingOffset, VkBuffer missShaderBindingTableBuffer,
- VkDeviceSize missShaderBindingOffset, VkDeviceSize missShaderBindingStride,
- VkBuffer hitShaderBindingTableBuffer, VkDeviceSize hitShaderBindingOffset,
- VkDeviceSize hitShaderBindingStride, VkBuffer callableShaderBindingTableBuffer,
- VkDeviceSize callableShaderBindingOffset, VkDeviceSize callableShaderBindingStride,
- uint32_t width, uint32_t height, uint32_t depth) {
- CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
- UpdateStateCmdDrawDispatchType(cb_state, VK_PIPELINE_BIND_POINT_RAY_TRACING_NV);
- cb_state->hasTraceRaysCmd = true;
-}
-
-bool CoreChecks::PreCallValidateCmdDrawMeshTasksNV(VkCommandBuffer commandBuffer, uint32_t taskCount, uint32_t firstTask) {
- bool skip = ValidateCmdDrawType(commandBuffer, false, VK_PIPELINE_BIND_POINT_GRAPHICS, CMD_DRAWMESHTASKSNV,
- "vkCmdDrawMeshTasksNV()", VK_QUEUE_GRAPHICS_BIT,
- "VUID-vkCmdDrawMeshTasksNV-commandBuffer-cmdpool", "VUID-vkCmdDrawMeshTasksNV-renderpass",
- "VUID-vkCmdDrawMeshTasksNV-None-02700", "VUID-vkCmdDrawMeshTasksNV-commandBuffer-02701");
- return skip;
-}
-
-void ValidationStateTracker::PreCallRecordCmdDrawMeshTasksNV(VkCommandBuffer commandBuffer, uint32_t taskCount,
- uint32_t firstTask) {
- CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
- UpdateStateCmdDrawType(cb_state, VK_PIPELINE_BIND_POINT_GRAPHICS);
-}
-
-bool CoreChecks::PreCallValidateCmdDrawMeshTasksIndirectNV(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset,
- uint32_t drawCount, uint32_t stride) {
- bool skip = ValidateCmdDrawType(commandBuffer, false, VK_PIPELINE_BIND_POINT_GRAPHICS, CMD_DRAWMESHTASKSINDIRECTNV,
- "vkCmdDrawMeshTasksIndirectNV()", VK_QUEUE_GRAPHICS_BIT,
- "VUID-vkCmdDrawMeshTasksIndirectNV-commandBuffer-cmdpool",
- "VUID-vkCmdDrawMeshTasksIndirectNV-renderpass", "VUID-vkCmdDrawMeshTasksIndirectNV-None-02700",
- "VUID-vkCmdDrawMeshTasksIndirectNV-commandBuffer-02701");
- const BUFFER_STATE *buffer_state = GetBufferState(buffer);
- skip |= ValidateMemoryIsBoundToBuffer(buffer_state, "vkCmdDrawMeshTasksIndirectNV()",
- "VUID-vkCmdDrawMeshTasksIndirectNV-buffer-02708");
- skip |= ValidateBufferUsageFlags(buffer_state, VK_BUFFER_USAGE_INDIRECT_BUFFER_BIT, true,
- "VUID-vkCmdDrawMeshTasksIndirectNV-buffer-02709", "vkCmdDrawMeshTasksIndirectNV()",
- "VK_BUFFER_USAGE_INDIRECT_BUFFER_BIT");
- if (drawCount > 1) {
- skip |= ValidateCmdDrawStrideWithBuffer(commandBuffer, "VUID-vkCmdDrawMeshTasksIndirectNV-drawCount-02157", stride,
- "VkDrawMeshTasksIndirectCommandNV", sizeof(VkDrawMeshTasksIndirectCommandNV),
- drawCount, offset, buffer_state);
- }
- return skip;
-}
-
-void ValidationStateTracker::PreCallRecordCmdDrawMeshTasksIndirectNV(VkCommandBuffer commandBuffer, VkBuffer buffer,
- VkDeviceSize offset, uint32_t drawCount, uint32_t stride) {
- CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
- UpdateStateCmdDrawType(cb_state, VK_PIPELINE_BIND_POINT_GRAPHICS);
- BUFFER_STATE *buffer_state = GetBufferState(buffer);
- if (buffer_state) {
- AddCommandBufferBindingBuffer(cb_state, buffer_state);
- }
-}
-
-bool CoreChecks::PreCallValidateCmdDrawMeshTasksIndirectCountNV(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset,
- VkBuffer countBuffer, VkDeviceSize countBufferOffset,
- uint32_t maxDrawCount, uint32_t stride) {
- bool skip = ValidateCmdDrawType(
- commandBuffer, false, VK_PIPELINE_BIND_POINT_GRAPHICS, CMD_DRAWMESHTASKSINDIRECTCOUNTNV,
- "vkCmdDrawMeshTasksIndirectCountNV()", VK_QUEUE_GRAPHICS_BIT,
- "VUID-vkCmdDrawMeshTasksIndirectCountNV-commandBuffer-cmdpool", "VUID-vkCmdDrawMeshTasksIndirectCountNV-renderpass",
- "VUID-vkCmdDrawMeshTasksIndirectCountNV-None-02700", "VUID-vkCmdDrawMeshTasksIndirectCountNV-commandBuffer-02701");
- const BUFFER_STATE *buffer_state = GetBufferState(buffer);
- const BUFFER_STATE *count_buffer_state = GetBufferState(countBuffer);
- skip |= ValidateMemoryIsBoundToBuffer(buffer_state, "vkCmdDrawMeshTasksIndirectCountNV()",
- "VUID-vkCmdDrawMeshTasksIndirectCountNV-buffer-02708");
- skip |= ValidateMemoryIsBoundToBuffer(count_buffer_state, "vkCmdDrawMeshTasksIndirectCountNV()",
- "VUID-vkCmdDrawMeshTasksIndirectCountNV-countBuffer-02714");
- skip |= ValidateBufferUsageFlags(buffer_state, VK_BUFFER_USAGE_INDIRECT_BUFFER_BIT, true,
- "VUID-vkCmdDrawMeshTasksIndirectCountNV-buffer-02709", "vkCmdDrawIndexedIndirectCountKHR()",
- "VK_BUFFER_USAGE_INDIRECT_BUFFER_BIT");
- skip |= ValidateBufferUsageFlags(count_buffer_state, VK_BUFFER_USAGE_INDIRECT_BUFFER_BIT, true,
- "VUID-vkCmdDrawMeshTasksIndirectCountNV-countBuffer-02715",
- "vkCmdDrawIndexedIndirectCountKHR()", "VK_BUFFER_USAGE_INDIRECT_BUFFER_BIT");
- skip |= ValidateCmdDrawStrideWithStruct(commandBuffer, "VUID-vkCmdDrawMeshTasksIndirectCountNV-stride-02182", stride,
- "VkDrawMeshTasksIndirectCommandNV", sizeof(VkDrawMeshTasksIndirectCommandNV));
- if (maxDrawCount > 1) {
- skip |= ValidateCmdDrawStrideWithBuffer(commandBuffer, "VUID-vkCmdDrawMeshTasksIndirectCountNV-maxDrawCount-02183", stride,
- "VkDrawMeshTasksIndirectCommandNV", sizeof(VkDrawMeshTasksIndirectCommandNV),
- maxDrawCount, offset, buffer_state);
- }
- return skip;
-}
-
-void ValidationStateTracker::PreCallRecordCmdDrawMeshTasksIndirectCountNV(VkCommandBuffer commandBuffer, VkBuffer buffer,
- VkDeviceSize offset, VkBuffer countBuffer,
- VkDeviceSize countBufferOffset, uint32_t maxDrawCount,
- uint32_t stride) {
- CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
- BUFFER_STATE *buffer_state = GetBufferState(buffer);
- BUFFER_STATE *count_buffer_state = GetBufferState(countBuffer);
- UpdateStateCmdDrawType(cb_state, VK_PIPELINE_BIND_POINT_GRAPHICS);
- if (buffer_state) {
- AddCommandBufferBindingBuffer(cb_state, buffer_state);
- }
- if (count_buffer_state) {
- AddCommandBufferBindingBuffer(cb_state, count_buffer_state);
- }
-}
diff --git a/layers/generated/.clang-format b/layers/generated/.clang-format
deleted file mode 100644
index 3bb983a45..000000000
--- a/layers/generated/.clang-format
+++ /dev/null
@@ -1,5 +0,0 @@
----
-# Disable clang-format for generated code
-DisableFormat: true
-SortIncludes: false
-...
diff --git a/layers/generated/chassis.cpp b/layers/generated/chassis.cpp
deleted file mode 100644
index 9923262ea..000000000
--- a/layers/generated/chassis.cpp
+++ /dev/null
@@ -1,9752 +0,0 @@
-
-// This file is ***GENERATED***. Do Not Edit.
-// See layer_chassis_generator.py for modifications.
-
-/* Copyright (c) 2015-2019 The Khronos Group Inc.
- * Copyright (c) 2015-2019 Valve Corporation
- * Copyright (c) 2015-2019 LunarG, Inc.
- * Copyright (c) 2015-2019 Google Inc.
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- *
- * Author: Mark Lobodzinski <mark@lunarg.com>
- */
-
-
-#include <string.h>
-#include <mutex>
-
-#define VALIDATION_ERROR_MAP_IMPL
-
-#include "chassis.h"
-#include "layer_chassis_dispatch.h"
-
-std::unordered_map<void*, ValidationObject*> layer_data_map;
-
-// Global unique object identifier.
-std::atomic<uint64_t> global_unique_id(1ULL);
-// Map uniqueID to actual object handle. Accesses to the map itself are
-// internally synchronized.
-vl_concurrent_unordered_map<uint64_t, uint64_t, 4> unique_id_mapping;
-
-// TODO: This variable controls handle wrapping -- in the future it should be hooked
-// up to the new VALIDATION_FEATURES extension. Temporarily, control with a compile-time flag.
-#if defined(LAYER_CHASSIS_CAN_WRAP_HANDLES)
-bool wrap_handles = true;
-#else
-bool wrap_handles = false;
-#endif
-
-// Set layer name -- Khronos layer name overrides any other defined names
-#if BUILD_KHRONOS_VALIDATION
-#define OBJECT_LAYER_NAME "VK_LAYER_KHRONOS_validation"
-#define OBJECT_LAYER_DESCRIPTION "khronos_validation"
-#elif BUILD_OBJECT_TRACKER
-#define OBJECT_LAYER_NAME "VK_LAYER_LUNARG_object_tracker"
-#define OBJECT_LAYER_DESCRIPTION "lunarg_object_tracker"
-#elif BUILD_THREAD_SAFETY
-#define OBJECT_LAYER_NAME "VK_LAYER_GOOGLE_threading"
-#define OBJECT_LAYER_DESCRIPTION "google_thread_checker"
-#elif BUILD_PARAMETER_VALIDATION
-#define OBJECT_LAYER_NAME "VK_LAYER_LUNARG_parameter_validation"
-#define OBJECT_LAYER_DESCRIPTION "lunarg_parameter_validation"
-#elif BUILD_CORE_VALIDATION
-#define OBJECT_LAYER_NAME "VK_LAYER_LUNARG_core_validation"
-#define OBJECT_LAYER_DESCRIPTION "lunarg_core_validation"
-#else
-#define OBJECT_LAYER_NAME "VK_LAYER_GOOGLE_unique_objects"
-#define OBJECT_LAYER_DESCRIPTION "lunarg_unique_objects"
-#endif
-
-// Include layer validation object definitions
-#if BUILD_OBJECT_TRACKER
-#include "object_lifetime_validation.h"
-#endif
-#if BUILD_THREAD_SAFETY
-#include "thread_safety.h"
-#endif
-#if BUILD_PARAMETER_VALIDATION
-#include "stateless_validation.h"
-#endif
-#if BUILD_CORE_VALIDATION
-#include "core_validation.h"
-#endif
-#if BUILD_BEST_PRACTICES
-#include "best_practices.h"
-#endif
-
-namespace vulkan_layer_chassis {
-
-using std::unordered_map;
-
-static const VkLayerProperties global_layer = {
- OBJECT_LAYER_NAME, VK_LAYER_API_VERSION, 1, "LunarG validation Layer",
-};
-
-static const VkExtensionProperties instance_extensions[] = {{VK_EXT_DEBUG_REPORT_EXTENSION_NAME, VK_EXT_DEBUG_REPORT_SPEC_VERSION},
- {VK_EXT_DEBUG_UTILS_EXTENSION_NAME, VK_EXT_DEBUG_UTILS_SPEC_VERSION}};
-static const VkExtensionProperties device_extensions[] = {
- {VK_EXT_VALIDATION_CACHE_EXTENSION_NAME, VK_EXT_VALIDATION_CACHE_SPEC_VERSION},
- {VK_EXT_DEBUG_MARKER_EXTENSION_NAME, VK_EXT_DEBUG_MARKER_SPEC_VERSION},
-};
-
-typedef struct {
- bool is_instance_api;
- void* funcptr;
-} function_data;
-
-extern const std::unordered_map<std::string, function_data> name_to_funcptr_map;
-
-// Manually written functions
-
-// Check enabled instance extensions against supported instance extension whitelist
-static void InstanceExtensionWhitelist(ValidationObject *layer_data, const VkInstanceCreateInfo *pCreateInfo, VkInstance instance) {
- for (uint32_t i = 0; i < pCreateInfo->enabledExtensionCount; i++) {
- // Check for recognized instance extensions
- if (!white_list(pCreateInfo->ppEnabledExtensionNames[i], kInstanceExtensionNames)) {
- log_msg(layer_data->report_data, VK_DEBUG_REPORT_WARNING_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
- kVUIDUndefined,
- "Instance Extension %s is not supported by this layer. Using this extension may adversely affect validation "
- "results and/or produce undefined behavior.",
- pCreateInfo->ppEnabledExtensionNames[i]);
- }
- }
-}
-
-// Check enabled device extensions against supported device extension whitelist
-static void DeviceExtensionWhitelist(ValidationObject *layer_data, const VkDeviceCreateInfo *pCreateInfo, VkDevice device) {
- for (uint32_t i = 0; i < pCreateInfo->enabledExtensionCount; i++) {
- // Check for recognized device extensions
- if (!white_list(pCreateInfo->ppEnabledExtensionNames[i], kDeviceExtensionNames)) {
- log_msg(layer_data->report_data, VK_DEBUG_REPORT_WARNING_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
- kVUIDUndefined,
- "Device Extension %s is not supported by this layer. Using this extension may adversely affect validation "
- "results and/or produce undefined behavior.",
- pCreateInfo->ppEnabledExtensionNames[i]);
- }
- }
-}
-
-
-// Process validation features, flags and settings specified through extensions, a layer settings file, or environment variables
-
-static const std::unordered_map<std::string, VkValidationFeatureDisableEXT> VkValFeatureDisableLookup = {
- {"VK_VALIDATION_FEATURE_DISABLE_SHADERS_EXT", VK_VALIDATION_FEATURE_DISABLE_SHADERS_EXT},
- {"VK_VALIDATION_FEATURE_DISABLE_THREAD_SAFETY_EXT", VK_VALIDATION_FEATURE_DISABLE_THREAD_SAFETY_EXT},
- {"VK_VALIDATION_FEATURE_DISABLE_API_PARAMETERS_EXT", VK_VALIDATION_FEATURE_DISABLE_API_PARAMETERS_EXT},
- {"VK_VALIDATION_FEATURE_DISABLE_OBJECT_LIFETIMES_EXT", VK_VALIDATION_FEATURE_DISABLE_OBJECT_LIFETIMES_EXT},
- {"VK_VALIDATION_FEATURE_DISABLE_CORE_CHECKS_EXT", VK_VALIDATION_FEATURE_DISABLE_CORE_CHECKS_EXT},
- {"VK_VALIDATION_FEATURE_DISABLE_UNIQUE_HANDLES_EXT", VK_VALIDATION_FEATURE_DISABLE_UNIQUE_HANDLES_EXT},
- {"VK_VALIDATION_FEATURE_DISABLE_ALL_EXT", VK_VALIDATION_FEATURE_DISABLE_ALL_EXT},
-};
-
-static const std::unordered_map<std::string, VkValidationFeatureEnableEXT> VkValFeatureEnableLookup = {
- {"VK_VALIDATION_FEATURE_ENABLE_GPU_ASSISTED_EXT", VK_VALIDATION_FEATURE_ENABLE_GPU_ASSISTED_EXT},
- {"VK_VALIDATION_FEATURE_ENABLE_GPU_ASSISTED_RESERVE_BINDING_SLOT_EXT", VK_VALIDATION_FEATURE_ENABLE_GPU_ASSISTED_RESERVE_BINDING_SLOT_EXT},
-};
-
-static const std::unordered_map<std::string, VkValidationFeatureEnable> VkValFeatureEnableLookup2 = {
- {"VK_VALIDATION_FEATURE_ENABLE_BEST_PRACTICES", VK_VALIDATION_FEATURE_ENABLE_BEST_PRACTICES},
-};
-
-static const std::unordered_map<std::string, ValidationCheckDisables> ValidationDisableLookup = {
- {"VALIDATION_CHECK_DISABLE_COMMAND_BUFFER_STATE", VALIDATION_CHECK_DISABLE_COMMAND_BUFFER_STATE},
- {"VALIDATION_CHECK_DISABLE_OBJECT_IN_USE", VALIDATION_CHECK_DISABLE_OBJECT_IN_USE},
- {"VALIDATION_CHECK_DISABLE_IDLE_DESCRIPTOR_SET", VALIDATION_CHECK_DISABLE_IDLE_DESCRIPTOR_SET},
- {"VALIDATION_CHECK_DISABLE_PUSH_CONSTANT_RANGE", VALIDATION_CHECK_DISABLE_PUSH_CONSTANT_RANGE},
- {"VALIDATION_CHECK_DISABLE_QUERY_VALIDATION", VALIDATION_CHECK_DISABLE_QUERY_VALIDATION},
- {"VALIDATION_CHECK_DISABLE_IMAGE_LAYOUT_VALIDATION", VALIDATION_CHECK_DISABLE_IMAGE_LAYOUT_VALIDATION},
-};
-
-// Set the local disable flag for the appropriate VALIDATION_CHECK_DISABLE enum
-void SetValidationDisable(CHECK_DISABLED* disable_data, const ValidationCheckDisables disable_id) {
- switch (disable_id) {
- case VALIDATION_CHECK_DISABLE_COMMAND_BUFFER_STATE:
- disable_data->command_buffer_state = true;
- break;
- case VALIDATION_CHECK_DISABLE_OBJECT_IN_USE:
- disable_data->object_in_use = true;
- break;
- case VALIDATION_CHECK_DISABLE_IDLE_DESCRIPTOR_SET:
- disable_data->idle_descriptor_set = true;
- break;
- case VALIDATION_CHECK_DISABLE_PUSH_CONSTANT_RANGE:
- disable_data->push_constant_range = true;
- break;
- case VALIDATION_CHECK_DISABLE_QUERY_VALIDATION:
- disable_data->query_validation = true;
- break;
- case VALIDATION_CHECK_DISABLE_IMAGE_LAYOUT_VALIDATION:
- disable_data->image_layout_validation = true;
- break;
- default:
- assert(true);
- }
-}
-
-// Set the local disable flag for a single VK_VALIDATION_FEATURE_DISABLE_* flag
-void SetValidationFeatureDisable(CHECK_DISABLED* disable_data, const VkValidationFeatureDisableEXT feature_disable) {
- switch (feature_disable) {
- case VK_VALIDATION_FEATURE_DISABLE_SHADERS_EXT:
- disable_data->shader_validation = true;
- break;
- case VK_VALIDATION_FEATURE_DISABLE_THREAD_SAFETY_EXT:
- disable_data->thread_safety = true;
- break;
- case VK_VALIDATION_FEATURE_DISABLE_API_PARAMETERS_EXT:
- disable_data->stateless_checks = true;
- break;
- case VK_VALIDATION_FEATURE_DISABLE_OBJECT_LIFETIMES_EXT:
- disable_data->object_tracking = true;
- break;
- case VK_VALIDATION_FEATURE_DISABLE_CORE_CHECKS_EXT:
- disable_data->core_checks = true;
- break;
- case VK_VALIDATION_FEATURE_DISABLE_UNIQUE_HANDLES_EXT:
- disable_data->handle_wrapping = true;
- break;
- case VK_VALIDATION_FEATURE_DISABLE_ALL_EXT:
- // Set all disabled flags to true
- disable_data->SetAll(true);
- break;
- default:
- break;
- }
-}
-
-// Set the local enable flag for a single VK_VALIDATION_FEATURE_ENABLE_* flag
-void SetValidationFeatureEnable(CHECK_ENABLED *enable_data, const VkValidationFeatureEnableEXT feature_enable) {
- switch (feature_enable) {
- case VK_VALIDATION_FEATURE_ENABLE_GPU_ASSISTED_EXT:
- enable_data->gpu_validation = true;
- break;
- case VK_VALIDATION_FEATURE_ENABLE_GPU_ASSISTED_RESERVE_BINDING_SLOT_EXT:
- enable_data->gpu_validation_reserve_binding_slot = true;
- break;
- default:
- break;
- }
-}
-
-void SetValidationFeatureEnable(CHECK_ENABLED *enable_data, const VkValidationFeatureEnable feature_enable) {
- switch(feature_enable) {
- case VK_VALIDATION_FEATURE_ENABLE_BEST_PRACTICES:
- enable_data->best_practices = true;
- break;
- default:
- break;
- }
-}
-
-// Set the local disable flag for settings specified through the VK_EXT_validation_flags extension
-void SetValidationFlags(CHECK_DISABLED* disables, const VkValidationFlagsEXT* val_flags_struct) {
- for (uint32_t i = 0; i < val_flags_struct->disabledValidationCheckCount; ++i) {
- switch (val_flags_struct->pDisabledValidationChecks[i]) {
- case VK_VALIDATION_CHECK_SHADERS_EXT:
- disables->shader_validation = true;
- break;
- case VK_VALIDATION_CHECK_ALL_EXT:
- // Set all disabled flags to true
- disables->SetAll(true);
- break;
- default:
- break;
- }
- }
-}
-
-// Process Validation Features flags specified through the ValidationFeature extension
-void SetValidationFeatures(CHECK_DISABLED *disable_data, CHECK_ENABLED *enable_data,
- const VkValidationFeaturesEXT *val_features_struct) {
- for (uint32_t i = 0; i < val_features_struct->disabledValidationFeatureCount; ++i) {
- SetValidationFeatureDisable(disable_data, val_features_struct->pDisabledValidationFeatures[i]);
- }
- for (uint32_t i = 0; i < val_features_struct->enabledValidationFeatureCount; ++i) {
- SetValidationFeatureEnable(enable_data, val_features_struct->pEnabledValidationFeatures[i]);
- }
-}
-
-// Given a string representation of a list of enable enum values, call the appropriate setter function
-void SetLocalEnableSetting(std::string list_of_enables, std::string delimiter, CHECK_ENABLED* enables) {
- size_t pos = 0;
- std::string token;
- while (list_of_enables.length() != 0) {
- pos = list_of_enables.find(delimiter);
- if (pos != std::string::npos) {
- token = list_of_enables.substr(0, pos);
- } else {
- pos = list_of_enables.length() - delimiter.length();
- token = list_of_enables;
- }
- if (token.find("VK_VALIDATION_FEATURE_ENABLE_") != std::string::npos) {
- auto result = VkValFeatureEnableLookup.find(token);
- if (result != VkValFeatureEnableLookup.end()) {
- SetValidationFeatureEnable(enables, result->second);
- } else {
- auto result2 = VkValFeatureEnableLookup2.find(token);
- if (result2 != VkValFeatureEnableLookup2.end()) {
- SetValidationFeatureEnable(enables, result2->second);
- }
- }
- }
- list_of_enables.erase(0, pos + delimiter.length());
- }
-}
-
-// Given a string representation of a list of disable enum values, call the appropriate setter function
-void SetLocalDisableSetting(std::string list_of_disables, std::string delimiter, CHECK_DISABLED* disables) {
- size_t pos = 0;
- std::string token;
- while (list_of_disables.length() != 0) {
- pos = list_of_disables.find(delimiter);
- if (pos != std::string::npos) {
- token = list_of_disables.substr(0, pos);
- } else {
- pos = list_of_disables.length() - delimiter.length();
- token = list_of_disables;
- }
- if (token.find("VK_VALIDATION_FEATURE_DISABLE_") != std::string::npos) {
- auto result = VkValFeatureDisableLookup.find(token);
- if (result != VkValFeatureDisableLookup.end()) {
- SetValidationFeatureDisable(disables, result->second);
- }
- }
- if (token.find("VALIDATION_CHECK_DISABLE_") != std::string::npos) {
- auto result = ValidationDisableLookup.find(token);
- if (result != ValidationDisableLookup.end()) {
- SetValidationDisable(disables, result->second);
- }
- }
- list_of_disables.erase(0, pos + delimiter.length());
- }
-}
-
-// Process enables and disables set though the vk_layer_settings.txt config file or through an environment variable
-void ProcessConfigAndEnvSettings(const char* layer_description, CHECK_ENABLED* enables, CHECK_DISABLED* disables) {
- std::string enable_key = layer_description;
- std::string disable_key = layer_description;
- enable_key.append(".enables");
- disable_key.append(".disables");
- std::string list_of_config_enables = getLayerOption(enable_key.c_str());
- std::string list_of_env_enables = GetLayerEnvVar("VK_LAYER_ENABLES");
- std::string list_of_config_disables = getLayerOption(disable_key.c_str());
- std::string list_of_env_disables = GetLayerEnvVar("VK_LAYER_DISABLES");
-#if defined(_WIN32)
- std::string env_delimiter = ";";
-#else
- std::string env_delimiter = ":";
-#endif
- SetLocalEnableSetting(list_of_config_enables, ",", enables);
- SetLocalEnableSetting(list_of_env_enables, env_delimiter, enables);
- SetLocalDisableSetting(list_of_config_disables, ",", disables);
- SetLocalDisableSetting(list_of_env_disables, env_delimiter, disables);
-}
-
-
-// Non-code-generated chassis API functions
-
-VKAPI_ATTR PFN_vkVoidFunction VKAPI_CALL GetDeviceProcAddr(VkDevice device, const char *funcName) {
- auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
- if (!ApiParentExtensionEnabled(funcName, &layer_data->device_extensions)) {
- return nullptr;
- }
- const auto &item = name_to_funcptr_map.find(funcName);
- if (item != name_to_funcptr_map.end()) {
- if (item->second.is_instance_api) {
- return nullptr;
- } else {
- return reinterpret_cast<PFN_vkVoidFunction>(item->second.funcptr);
- }
- }
- auto &table = layer_data->device_dispatch_table;
- if (!table.GetDeviceProcAddr) return nullptr;
- return table.GetDeviceProcAddr(device, funcName);
-}
-
-VKAPI_ATTR PFN_vkVoidFunction VKAPI_CALL GetInstanceProcAddr(VkInstance instance, const char *funcName) {
- const auto &item = name_to_funcptr_map.find(funcName);
- if (item != name_to_funcptr_map.end()) {
- return reinterpret_cast<PFN_vkVoidFunction>(item->second.funcptr);
- }
- auto layer_data = GetLayerDataPtr(get_dispatch_key(instance), layer_data_map);
- auto &table = layer_data->instance_dispatch_table;
- if (!table.GetInstanceProcAddr) return nullptr;
- return table.GetInstanceProcAddr(instance, funcName);
-}
-
-VKAPI_ATTR VkResult VKAPI_CALL EnumerateInstanceLayerProperties(uint32_t *pCount, VkLayerProperties *pProperties) {
- return util_GetLayerProperties(1, &global_layer, pCount, pProperties);
-}
-
-VKAPI_ATTR VkResult VKAPI_CALL EnumerateDeviceLayerProperties(VkPhysicalDevice physicalDevice, uint32_t *pCount,
- VkLayerProperties *pProperties) {
- return util_GetLayerProperties(1, &global_layer, pCount, pProperties);
-}
-
-VKAPI_ATTR VkResult VKAPI_CALL EnumerateInstanceExtensionProperties(const char *pLayerName, uint32_t *pCount,
- VkExtensionProperties *pProperties) {
- if (pLayerName && !strcmp(pLayerName, global_layer.layerName))
- return util_GetExtensionProperties(ARRAY_SIZE(instance_extensions), instance_extensions, pCount, pProperties);
-
- return VK_ERROR_LAYER_NOT_PRESENT;
-}
-
-VKAPI_ATTR VkResult VKAPI_CALL EnumerateDeviceExtensionProperties(VkPhysicalDevice physicalDevice, const char *pLayerName,
- uint32_t *pCount, VkExtensionProperties *pProperties) {
- if (pLayerName && !strcmp(pLayerName, global_layer.layerName)) return util_GetExtensionProperties(ARRAY_SIZE(device_extensions), device_extensions, pCount, pProperties);
- assert(physicalDevice);
- auto layer_data = GetLayerDataPtr(get_dispatch_key(physicalDevice), layer_data_map);
- return layer_data->instance_dispatch_table.EnumerateDeviceExtensionProperties(physicalDevice, pLayerName, pCount, pProperties);
-}
-
-VKAPI_ATTR VkResult VKAPI_CALL CreateInstance(const VkInstanceCreateInfo *pCreateInfo, const VkAllocationCallbacks *pAllocator,
- VkInstance *pInstance) {
- VkLayerInstanceCreateInfo* chain_info = get_chain_info(pCreateInfo, VK_LAYER_LINK_INFO);
-
- assert(chain_info->u.pLayerInfo);
- PFN_vkGetInstanceProcAddr fpGetInstanceProcAddr = chain_info->u.pLayerInfo->pfnNextGetInstanceProcAddr;
- PFN_vkCreateInstance fpCreateInstance = (PFN_vkCreateInstance)fpGetInstanceProcAddr(NULL, "vkCreateInstance");
- if (fpCreateInstance == NULL) return VK_ERROR_INITIALIZATION_FAILED;
- chain_info->u.pLayerInfo = chain_info->u.pLayerInfo->pNext;
- uint32_t specified_version = (pCreateInfo->pApplicationInfo ? pCreateInfo->pApplicationInfo->apiVersion : VK_API_VERSION_1_0);
- uint32_t api_version = (specified_version < VK_API_VERSION_1_1) ? VK_API_VERSION_1_0 : VK_API_VERSION_1_1;
-
- CHECK_ENABLED local_enables {};
- CHECK_DISABLED local_disables {};
- const auto *validation_features_ext = lvl_find_in_chain<VkValidationFeaturesEXT>(pCreateInfo->pNext);
- if (validation_features_ext) {
- SetValidationFeatures(&local_disables, &local_enables, validation_features_ext);
- }
- const auto *validation_flags_ext = lvl_find_in_chain<VkValidationFlagsEXT>(pCreateInfo->pNext);
- if (validation_flags_ext) {
- SetValidationFlags(&local_disables, validation_flags_ext);
- }
- ProcessConfigAndEnvSettings(OBJECT_LAYER_DESCRIPTION, &local_enables, &local_disables);
-
- // Create temporary dispatch vector for pre-calls until instance is created
- std::vector<ValidationObject*> local_object_dispatch;
- // Add VOs to dispatch vector. Order here will be the validation dispatch order!
-#if BUILD_THREAD_SAFETY
- auto thread_checker = new ThreadSafety;
- if (!local_disables.thread_safety) {
- local_object_dispatch.emplace_back(thread_checker);
- }
- thread_checker->container_type = LayerObjectTypeThreading;
- thread_checker->api_version = api_version;
-#endif
-#if BUILD_PARAMETER_VALIDATION
- auto parameter_validation = new StatelessValidation;
- if (!local_disables.stateless_checks) {
- local_object_dispatch.emplace_back(parameter_validation);
- }
- parameter_validation->container_type = LayerObjectTypeParameterValidation;
- parameter_validation->api_version = api_version;
-#endif
-#if BUILD_OBJECT_TRACKER
- auto object_tracker = new ObjectLifetimes;
- if (!local_disables.object_tracking) {
- local_object_dispatch.emplace_back(object_tracker);
- }
- object_tracker->container_type = LayerObjectTypeObjectTracker;
- object_tracker->api_version = api_version;
-#endif
-#if BUILD_CORE_VALIDATION
- auto core_checks = new CoreChecks;
- if (!local_disables.core_checks) {
- local_object_dispatch.emplace_back(core_checks);
- }
- core_checks->container_type = LayerObjectTypeCoreValidation;
- core_checks->api_version = api_version;
-#endif
-#if BUILD_BEST_PRACTICES
- auto best_practices = new BestPractices;
- if (local_enables.best_practices) {
- local_object_dispatch.emplace_back(best_practices);
- }
- best_practices->container_type = LayerObjectTypeBestPractices;
- best_practices->api_version = api_version;
-#endif
-
- // If handle wrapping is disabled via the ValidationFeatures extension, override build flag
- if (local_disables.handle_wrapping) {
- wrap_handles = false;
- }
-
- // Init dispatch array and call registration functions
- for (auto intercept : local_object_dispatch) {
- intercept->PreCallValidateCreateInstance(pCreateInfo, pAllocator, pInstance);
- }
- for (auto intercept : local_object_dispatch) {
- intercept->PreCallRecordCreateInstance(pCreateInfo, pAllocator, pInstance);
- }
-
- VkResult result = fpCreateInstance(pCreateInfo, pAllocator, pInstance);
- if (result != VK_SUCCESS) return result;
-
- auto framework = GetLayerDataPtr(get_dispatch_key(*pInstance), layer_data_map);
-
- framework->object_dispatch = local_object_dispatch;
- framework->container_type = LayerObjectTypeInstance;
- framework->disabled = local_disables;
- framework->enabled = local_enables;
-
- framework->instance = *pInstance;
- layer_init_instance_dispatch_table(*pInstance, &framework->instance_dispatch_table, fpGetInstanceProcAddr);
- framework->report_data = debug_utils_create_instance(&framework->instance_dispatch_table, *pInstance, pCreateInfo->enabledExtensionCount,
- pCreateInfo->ppEnabledExtensionNames);
- framework->api_version = api_version;
- framework->instance_extensions.InitFromInstanceCreateInfo(specified_version, pCreateInfo);
-
- layer_debug_messenger_actions(framework->report_data, framework->logging_messenger, pAllocator, OBJECT_LAYER_DESCRIPTION);
-
-#if BUILD_OBJECT_TRACKER
- object_tracker->report_data = framework->report_data;
- object_tracker->instance_dispatch_table = framework->instance_dispatch_table;
- object_tracker->enabled = framework->enabled;
- object_tracker->disabled = framework->disabled;
-#endif
-#if BUILD_THREAD_SAFETY
- thread_checker->report_data = framework->report_data;
- thread_checker->instance_dispatch_table = framework->instance_dispatch_table;
- thread_checker->enabled = framework->enabled;
- thread_checker->disabled = framework->disabled;
-#endif
-#if BUILD_PARAMETER_VALIDATION
- parameter_validation->report_data = framework->report_data;
- parameter_validation->instance_dispatch_table = framework->instance_dispatch_table;
- parameter_validation->enabled = framework->enabled;
- parameter_validation->disabled = framework->disabled;
-#endif
-#if BUILD_CORE_VALIDATION
- core_checks->report_data = framework->report_data;
- core_checks->instance_dispatch_table = framework->instance_dispatch_table;
- core_checks->instance = *pInstance;
- core_checks->enabled = framework->enabled;
- core_checks->disabled = framework->disabled;
- core_checks->instance_state = core_checks;
-#endif
-#if BUILD_BEST_PRACTICES
- best_practices->report_data = framework->report_data;
- best_practices->instance_dispatch_table = framework->instance_dispatch_table;
- best_practices->enabled = framework->enabled;
- best_practices->disabled = framework->disabled;
-#endif
-
- for (auto intercept : framework->object_dispatch) {
- intercept->PostCallRecordCreateInstance(pCreateInfo, pAllocator, pInstance, result);
- }
-
- InstanceExtensionWhitelist(framework, pCreateInfo, *pInstance);
-
- return result;
-}
-
-VKAPI_ATTR void VKAPI_CALL DestroyInstance(VkInstance instance, const VkAllocationCallbacks *pAllocator) {
- dispatch_key key = get_dispatch_key(instance);
- auto layer_data = GetLayerDataPtr(key, layer_data_map);
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PreCallValidateDestroyInstance(instance, pAllocator);
- }
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PreCallRecordDestroyInstance(instance, pAllocator);
- }
-
- layer_data->instance_dispatch_table.DestroyInstance(instance, pAllocator);
-
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PostCallRecordDestroyInstance(instance, pAllocator);
- }
- // Clean up logging callback, if any
- while (layer_data->logging_messenger.size() > 0) {
- VkDebugUtilsMessengerEXT messenger = layer_data->logging_messenger.back();
- layer_destroy_messenger_callback(layer_data->report_data, messenger, pAllocator);
- layer_data->logging_messenger.pop_back();
- }
- while (layer_data->logging_callback.size() > 0) {
- VkDebugReportCallbackEXT callback = layer_data->logging_callback.back();
- layer_destroy_report_callback(layer_data->report_data, callback, pAllocator);
- layer_data->logging_callback.pop_back();
- }
-
- layer_debug_utils_destroy_instance(layer_data->report_data);
-
- for (auto item = layer_data->object_dispatch.begin(); item != layer_data->object_dispatch.end(); item++) {
- delete *item;
- }
- FreeLayerDataPtr(key, layer_data_map);
-}
-
-VKAPI_ATTR VkResult VKAPI_CALL CreateDevice(VkPhysicalDevice gpu, const VkDeviceCreateInfo *pCreateInfo,
- const VkAllocationCallbacks *pAllocator, VkDevice *pDevice) {
- VkLayerDeviceCreateInfo *chain_info = get_chain_info(pCreateInfo, VK_LAYER_LINK_INFO);
-
- auto instance_interceptor = GetLayerDataPtr(get_dispatch_key(gpu), layer_data_map);
-
- PFN_vkGetInstanceProcAddr fpGetInstanceProcAddr = chain_info->u.pLayerInfo->pfnNextGetInstanceProcAddr;
- PFN_vkGetDeviceProcAddr fpGetDeviceProcAddr = chain_info->u.pLayerInfo->pfnNextGetDeviceProcAddr;
- PFN_vkCreateDevice fpCreateDevice = (PFN_vkCreateDevice)fpGetInstanceProcAddr(instance_interceptor->instance, "vkCreateDevice");
- if (fpCreateDevice == NULL) {
- return VK_ERROR_INITIALIZATION_FAILED;
- }
- chain_info->u.pLayerInfo = chain_info->u.pLayerInfo->pNext;
-
- // Get physical device limits for device
- VkPhysicalDeviceProperties device_properties = {};
- instance_interceptor->instance_dispatch_table.GetPhysicalDeviceProperties(gpu, &device_properties);
-
- // Setup the validation tables based on the application API version from the instance and the capabilities of the device driver
- uint32_t effective_api_version = std::min(device_properties.apiVersion, instance_interceptor->api_version);
-
- DeviceExtensions device_extensions = {};
- device_extensions.InitFromDeviceCreateInfo(&instance_interceptor->instance_extensions, effective_api_version, pCreateInfo);
- for (auto item : instance_interceptor->object_dispatch) {
- item->device_extensions = device_extensions;
- }
-
- safe_VkDeviceCreateInfo modified_create_info(pCreateInfo);
-
- bool skip = false;
- for (auto intercept : instance_interceptor->object_dispatch) {
- auto lock = intercept->write_lock();
- skip |= intercept->PreCallValidateCreateDevice(gpu, pCreateInfo, pAllocator, pDevice);
- if (skip) return VK_ERROR_VALIDATION_FAILED_EXT;
- }
- for (auto intercept : instance_interceptor->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PreCallRecordCreateDevice(gpu, pCreateInfo, pAllocator, pDevice, &modified_create_info);
- }
-
- VkResult result = fpCreateDevice(gpu, reinterpret_cast<VkDeviceCreateInfo *>(&modified_create_info), pAllocator, pDevice);
- if (result != VK_SUCCESS) {
- return result;
- }
-
- auto device_interceptor = GetLayerDataPtr(get_dispatch_key(*pDevice), layer_data_map);
- device_interceptor->container_type = LayerObjectTypeDevice;
-
- // Save local info in device object
- device_interceptor->phys_dev_properties.properties = device_properties;
- device_interceptor->api_version = device_interceptor->device_extensions.InitFromDeviceCreateInfo(
- &instance_interceptor->instance_extensions, effective_api_version, pCreateInfo);
- device_interceptor->device_extensions = device_extensions;
-
- layer_init_device_dispatch_table(*pDevice, &device_interceptor->device_dispatch_table, fpGetDeviceProcAddr);
-
- device_interceptor->device = *pDevice;
- device_interceptor->physical_device = gpu;
- device_interceptor->instance = instance_interceptor->instance;
- device_interceptor->report_data = layer_debug_utils_create_device(instance_interceptor->report_data, *pDevice);
-
- // Note that this defines the order in which the layer validation objects are called
-#if BUILD_THREAD_SAFETY
- auto thread_safety = new ThreadSafety;
- thread_safety->container_type = LayerObjectTypeThreading;
- if (!instance_interceptor->disabled.thread_safety) {
- device_interceptor->object_dispatch.emplace_back(thread_safety);
- }
-#endif
-#if BUILD_PARAMETER_VALIDATION
- auto stateless_validation = new StatelessValidation;
- stateless_validation->container_type = LayerObjectTypeParameterValidation;
- if (!instance_interceptor->disabled.stateless_checks) {
- device_interceptor->object_dispatch.emplace_back(stateless_validation);
- }
-#endif
-#if BUILD_OBJECT_TRACKER
- auto object_tracker = new ObjectLifetimes;
- object_tracker->container_type = LayerObjectTypeObjectTracker;
- if (!instance_interceptor->disabled.object_tracking) {
- device_interceptor->object_dispatch.emplace_back(object_tracker);
- }
-#endif
-#if BUILD_CORE_VALIDATION
- auto core_checks = new CoreChecks;
- core_checks->container_type = LayerObjectTypeCoreValidation;
- core_checks->instance_state = reinterpret_cast<CoreChecks *>(
- core_checks->GetValidationObject(instance_interceptor->object_dispatch, LayerObjectTypeCoreValidation));
- if (!instance_interceptor->disabled.core_checks) {
- device_interceptor->object_dispatch.emplace_back(core_checks);
- }
-#endif
-#if BUILD_BEST_PRACTICES
- auto best_practices = new BestPractices;
- best_practices->container_type = LayerObjectTypeBestPractices;
- if (instance_interceptor->enabled.best_practices) {
- device_interceptor->object_dispatch.emplace_back(best_practices);
- }
-#endif
-
- // Set per-intercept common data items
- for (auto dev_intercept : device_interceptor->object_dispatch) {
- dev_intercept->device = *pDevice;
- dev_intercept->physical_device = gpu;
- dev_intercept->instance = instance_interceptor->instance;
- dev_intercept->report_data = device_interceptor->report_data;
- dev_intercept->device_dispatch_table = device_interceptor->device_dispatch_table;
- dev_intercept->api_version = device_interceptor->api_version;
- dev_intercept->disabled = instance_interceptor->disabled;
- dev_intercept->enabled = instance_interceptor->enabled;
- dev_intercept->instance_dispatch_table = instance_interceptor->instance_dispatch_table;
- dev_intercept->instance_extensions = instance_interceptor->instance_extensions;
- dev_intercept->device_extensions = device_interceptor->device_extensions;
- }
-
- for (auto intercept : instance_interceptor->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PostCallRecordCreateDevice(gpu, pCreateInfo, pAllocator, pDevice, result);
- }
-
- DeviceExtensionWhitelist(device_interceptor, pCreateInfo, *pDevice);
-
- return result;
-}
-
-VKAPI_ATTR void VKAPI_CALL DestroyDevice(VkDevice device, const VkAllocationCallbacks *pAllocator) {
- dispatch_key key = get_dispatch_key(device);
- auto layer_data = GetLayerDataPtr(key, layer_data_map);
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PreCallValidateDestroyDevice(device, pAllocator);
- }
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PreCallRecordDestroyDevice(device, pAllocator);
- }
- layer_debug_utils_destroy_device(device);
-
- layer_data->device_dispatch_table.DestroyDevice(device, pAllocator);
-
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PostCallRecordDestroyDevice(device, pAllocator);
- }
-
- for (auto item = layer_data->object_dispatch.begin(); item != layer_data->object_dispatch.end(); item++) {
- delete *item;
- }
- FreeLayerDataPtr(key, layer_data_map);
-}
-
-
-// Special-case APIs for which core_validation needs custom parameter lists and/or modifies parameters
-
-VKAPI_ATTR VkResult VKAPI_CALL CreateGraphicsPipelines(
- VkDevice device,
- VkPipelineCache pipelineCache,
- uint32_t createInfoCount,
- const VkGraphicsPipelineCreateInfo* pCreateInfos,
- const VkAllocationCallbacks* pAllocator,
- VkPipeline* pPipelines) {
- auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
- bool skip = false;
-
-#ifdef BUILD_CORE_VALIDATION
- create_graphics_pipeline_api_state cgpl_state{};
-#else
- struct create_graphics_pipeline_api_state {
- const VkGraphicsPipelineCreateInfo* pCreateInfos;
- } cgpl_state;
-#endif
- cgpl_state.pCreateInfos = pCreateInfos;
-
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- skip |= intercept->PreCallValidateCreateGraphicsPipelines(device, pipelineCache, createInfoCount, pCreateInfos, pAllocator, pPipelines, &cgpl_state);
- if (skip) return VK_ERROR_VALIDATION_FAILED_EXT;
- }
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PreCallRecordCreateGraphicsPipelines(device, pipelineCache, createInfoCount, pCreateInfos, pAllocator, pPipelines, &cgpl_state);
- }
-
- VkResult result = DispatchCreateGraphicsPipelines(device, pipelineCache, createInfoCount, cgpl_state.pCreateInfos, pAllocator, pPipelines);
-
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PostCallRecordCreateGraphicsPipelines(device, pipelineCache, createInfoCount, pCreateInfos, pAllocator, pPipelines, result, &cgpl_state);
- }
- return result;
-}
-
-// This API saves some core_validation pipeline state state on the stack for performance purposes
-VKAPI_ATTR VkResult VKAPI_CALL CreateComputePipelines(
- VkDevice device,
- VkPipelineCache pipelineCache,
- uint32_t createInfoCount,
- const VkComputePipelineCreateInfo* pCreateInfos,
- const VkAllocationCallbacks* pAllocator,
- VkPipeline* pPipelines) {
- auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
- bool skip = false;
-
-#ifdef BUILD_CORE_VALIDATION
- create_compute_pipeline_api_state ccpl_state{};
-#else
- struct create_compute_pipeline_api_state {
- const VkComputePipelineCreateInfo* pCreateInfos;
- } ccpl_state;
-#endif
- ccpl_state.pCreateInfos = pCreateInfos;
-
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- skip |= intercept->PreCallValidateCreateComputePipelines(device, pipelineCache, createInfoCount, pCreateInfos, pAllocator, pPipelines, &ccpl_state);
- if (skip) return VK_ERROR_VALIDATION_FAILED_EXT;
- }
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PreCallRecordCreateComputePipelines(device, pipelineCache, createInfoCount, pCreateInfos, pAllocator, pPipelines, &ccpl_state);
- }
- VkResult result = DispatchCreateComputePipelines(device, pipelineCache, createInfoCount, ccpl_state.pCreateInfos, pAllocator, pPipelines);
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PostCallRecordCreateComputePipelines(device, pipelineCache, createInfoCount, pCreateInfos, pAllocator, pPipelines, result, &ccpl_state);
- }
- return result;
-}
-
-VKAPI_ATTR VkResult VKAPI_CALL CreateRayTracingPipelinesNV(
- VkDevice device,
- VkPipelineCache pipelineCache,
- uint32_t createInfoCount,
- const VkRayTracingPipelineCreateInfoNV* pCreateInfos,
- const VkAllocationCallbacks* pAllocator,
- VkPipeline* pPipelines) {
- auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
- bool skip = false;
-
-#ifdef BUILD_CORE_VALIDATION
- create_ray_tracing_pipeline_api_state crtpl_state{};
-#else
- struct create_ray_tracing_pipeline_api_state {
- const VkRayTracingPipelineCreateInfoNV* pCreateInfos;
- } crtpl_state;
-#endif
- crtpl_state.pCreateInfos = pCreateInfos;
-
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- skip |= intercept->PreCallValidateCreateRayTracingPipelinesNV(device, pipelineCache, createInfoCount, pCreateInfos,
- pAllocator, pPipelines, &crtpl_state);
- if (skip) return VK_ERROR_VALIDATION_FAILED_EXT;
- }
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PreCallRecordCreateRayTracingPipelinesNV(device, pipelineCache, createInfoCount, pCreateInfos, pAllocator,
- pPipelines, &crtpl_state);
- }
- VkResult result = DispatchCreateRayTracingPipelinesNV(device, pipelineCache, createInfoCount, pCreateInfos, pAllocator, pPipelines);
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PostCallRecordCreateRayTracingPipelinesNV(device, pipelineCache, createInfoCount, pCreateInfos, pAllocator,
- pPipelines, result, &crtpl_state);
- }
- return result;
-}
-
-// This API needs the ability to modify a down-chain parameter
-VKAPI_ATTR VkResult VKAPI_CALL CreatePipelineLayout(
- VkDevice device,
- const VkPipelineLayoutCreateInfo* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkPipelineLayout* pPipelineLayout) {
- auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
- bool skip = false;
-
-#ifndef BUILD_CORE_VALIDATION
- struct create_pipeline_layout_api_state {
- VkPipelineLayoutCreateInfo modified_create_info;
- };
-#endif
- create_pipeline_layout_api_state cpl_state{};
- cpl_state.modified_create_info = *pCreateInfo;
-
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- skip |= intercept->PreCallValidateCreatePipelineLayout(device, pCreateInfo, pAllocator, pPipelineLayout);
- if (skip) return VK_ERROR_VALIDATION_FAILED_EXT;
- }
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PreCallRecordCreatePipelineLayout(device, pCreateInfo, pAllocator, pPipelineLayout, &cpl_state);
- }
- VkResult result = DispatchCreatePipelineLayout(device, &cpl_state.modified_create_info, pAllocator, pPipelineLayout);
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PostCallRecordCreatePipelineLayout(device, pCreateInfo, pAllocator, pPipelineLayout, result);
- }
- return result;
-}
-
-// This API needs some local stack data for performance reasons and also may modify a parameter
-VKAPI_ATTR VkResult VKAPI_CALL CreateShaderModule(
- VkDevice device,
- const VkShaderModuleCreateInfo* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkShaderModule* pShaderModule) {
- auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
- bool skip = false;
-
-#ifndef BUILD_CORE_VALIDATION
- struct create_shader_module_api_state {
- VkShaderModuleCreateInfo instrumented_create_info;
- };
-#endif
- create_shader_module_api_state csm_state{};
- csm_state.instrumented_create_info = *pCreateInfo;
-
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- skip |= intercept->PreCallValidateCreateShaderModule(device, pCreateInfo, pAllocator, pShaderModule, &csm_state);
- if (skip) return VK_ERROR_VALIDATION_FAILED_EXT;
- }
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PreCallRecordCreateShaderModule(device, pCreateInfo, pAllocator, pShaderModule, &csm_state);
- }
- VkResult result = DispatchCreateShaderModule(device, &csm_state.instrumented_create_info, pAllocator, pShaderModule);
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PostCallRecordCreateShaderModule(device, pCreateInfo, pAllocator, pShaderModule, result, &csm_state);
- }
- return result;
-}
-
-VKAPI_ATTR VkResult VKAPI_CALL AllocateDescriptorSets(
- VkDevice device,
- const VkDescriptorSetAllocateInfo* pAllocateInfo,
- VkDescriptorSet* pDescriptorSets) {
- auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
- bool skip = false;
-
-#ifdef BUILD_CORE_VALIDATION
- cvdescriptorset::AllocateDescriptorSetsData ads_state(pAllocateInfo->descriptorSetCount);
-#else
- struct ads_state {} ads_state;
-#endif
-
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- skip |= intercept->PreCallValidateAllocateDescriptorSets(device, pAllocateInfo, pDescriptorSets, &ads_state);
- if (skip) return VK_ERROR_VALIDATION_FAILED_EXT;
- }
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PreCallRecordAllocateDescriptorSets(device, pAllocateInfo, pDescriptorSets);
- }
- VkResult result = DispatchAllocateDescriptorSets(device, pAllocateInfo, pDescriptorSets);
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PostCallRecordAllocateDescriptorSets(device, pAllocateInfo, pDescriptorSets, result, &ads_state);
- }
- return result;
-}
-
-
-
-
-
-// ValidationCache APIs do not dispatch
-
-VKAPI_ATTR VkResult VKAPI_CALL CreateValidationCacheEXT(
- VkDevice device,
- const VkValidationCacheCreateInfoEXT* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkValidationCacheEXT* pValidationCache) {
- auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
- VkResult result = VK_SUCCESS;
-
- ValidationObject *validation_data = layer_data->GetValidationObject(layer_data->object_dispatch, LayerObjectTypeCoreValidation);
- if (validation_data) {
- auto lock = validation_data->write_lock();
- result = validation_data->CoreLayerCreateValidationCacheEXT(device, pCreateInfo, pAllocator, pValidationCache);
- }
- return result;
-}
-
-VKAPI_ATTR void VKAPI_CALL DestroyValidationCacheEXT(
- VkDevice device,
- VkValidationCacheEXT validationCache,
- const VkAllocationCallbacks* pAllocator) {
- auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
-
- ValidationObject *validation_data = layer_data->GetValidationObject(layer_data->object_dispatch, LayerObjectTypeCoreValidation);
- if (validation_data) {
- auto lock = validation_data->write_lock();
- validation_data->CoreLayerDestroyValidationCacheEXT(device, validationCache, pAllocator);
- }
-}
-
-VKAPI_ATTR VkResult VKAPI_CALL MergeValidationCachesEXT(
- VkDevice device,
- VkValidationCacheEXT dstCache,
- uint32_t srcCacheCount,
- const VkValidationCacheEXT* pSrcCaches) {
- auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
- VkResult result = VK_SUCCESS;
-
- ValidationObject *validation_data = layer_data->GetValidationObject(layer_data->object_dispatch, LayerObjectTypeCoreValidation);
- if (validation_data) {
- auto lock = validation_data->write_lock();
- result = validation_data->CoreLayerMergeValidationCachesEXT(device, dstCache, srcCacheCount, pSrcCaches);
- }
- return result;
-}
-
-VKAPI_ATTR VkResult VKAPI_CALL GetValidationCacheDataEXT(
- VkDevice device,
- VkValidationCacheEXT validationCache,
- size_t* pDataSize,
- void* pData) {
- auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
- VkResult result = VK_SUCCESS;
-
- ValidationObject *validation_data = layer_data->GetValidationObject(layer_data->object_dispatch, LayerObjectTypeCoreValidation);
- if (validation_data) {
- auto lock = validation_data->write_lock();
- result = validation_data->CoreLayerGetValidationCacheDataEXT(device, validationCache, pDataSize, pData);
- }
- return result;
-
-}
-
-
-VKAPI_ATTR VkResult VKAPI_CALL EnumeratePhysicalDevices(
- VkInstance instance,
- uint32_t* pPhysicalDeviceCount,
- VkPhysicalDevice* pPhysicalDevices) {
- auto layer_data = GetLayerDataPtr(get_dispatch_key(instance), layer_data_map);
- bool skip = false;
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- skip |= intercept->PreCallValidateEnumeratePhysicalDevices(instance, pPhysicalDeviceCount, pPhysicalDevices);
- if (skip) return VK_ERROR_VALIDATION_FAILED_EXT;
- }
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PreCallRecordEnumeratePhysicalDevices(instance, pPhysicalDeviceCount, pPhysicalDevices);
- }
- VkResult result = DispatchEnumeratePhysicalDevices(instance, pPhysicalDeviceCount, pPhysicalDevices);
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PostCallRecordEnumeratePhysicalDevices(instance, pPhysicalDeviceCount, pPhysicalDevices, result);
- }
- return result;
-}
-
-VKAPI_ATTR void VKAPI_CALL GetPhysicalDeviceFeatures(
- VkPhysicalDevice physicalDevice,
- VkPhysicalDeviceFeatures* pFeatures) {
- auto layer_data = GetLayerDataPtr(get_dispatch_key(physicalDevice), layer_data_map);
- bool skip = false;
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- skip |= intercept->PreCallValidateGetPhysicalDeviceFeatures(physicalDevice, pFeatures);
- if (skip) return;
- }
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PreCallRecordGetPhysicalDeviceFeatures(physicalDevice, pFeatures);
- }
- DispatchGetPhysicalDeviceFeatures(physicalDevice, pFeatures);
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PostCallRecordGetPhysicalDeviceFeatures(physicalDevice, pFeatures);
- }
-}
-
-VKAPI_ATTR void VKAPI_CALL GetPhysicalDeviceFormatProperties(
- VkPhysicalDevice physicalDevice,
- VkFormat format,
- VkFormatProperties* pFormatProperties) {
- auto layer_data = GetLayerDataPtr(get_dispatch_key(physicalDevice), layer_data_map);
- bool skip = false;
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- skip |= intercept->PreCallValidateGetPhysicalDeviceFormatProperties(physicalDevice, format, pFormatProperties);
- if (skip) return;
- }
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PreCallRecordGetPhysicalDeviceFormatProperties(physicalDevice, format, pFormatProperties);
- }
- DispatchGetPhysicalDeviceFormatProperties(physicalDevice, format, pFormatProperties);
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PostCallRecordGetPhysicalDeviceFormatProperties(physicalDevice, format, pFormatProperties);
- }
-}
-
-VKAPI_ATTR VkResult VKAPI_CALL GetPhysicalDeviceImageFormatProperties(
- VkPhysicalDevice physicalDevice,
- VkFormat format,
- VkImageType type,
- VkImageTiling tiling,
- VkImageUsageFlags usage,
- VkImageCreateFlags flags,
- VkImageFormatProperties* pImageFormatProperties) {
- auto layer_data = GetLayerDataPtr(get_dispatch_key(physicalDevice), layer_data_map);
- bool skip = false;
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- skip |= intercept->PreCallValidateGetPhysicalDeviceImageFormatProperties(physicalDevice, format, type, tiling, usage, flags, pImageFormatProperties);
- if (skip) return VK_ERROR_VALIDATION_FAILED_EXT;
- }
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PreCallRecordGetPhysicalDeviceImageFormatProperties(physicalDevice, format, type, tiling, usage, flags, pImageFormatProperties);
- }
- VkResult result = DispatchGetPhysicalDeviceImageFormatProperties(physicalDevice, format, type, tiling, usage, flags, pImageFormatProperties);
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PostCallRecordGetPhysicalDeviceImageFormatProperties(physicalDevice, format, type, tiling, usage, flags, pImageFormatProperties, result);
- }
- return result;
-}
-
-VKAPI_ATTR void VKAPI_CALL GetPhysicalDeviceProperties(
- VkPhysicalDevice physicalDevice,
- VkPhysicalDeviceProperties* pProperties) {
- auto layer_data = GetLayerDataPtr(get_dispatch_key(physicalDevice), layer_data_map);
- bool skip = false;
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- skip |= intercept->PreCallValidateGetPhysicalDeviceProperties(physicalDevice, pProperties);
- if (skip) return;
- }
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PreCallRecordGetPhysicalDeviceProperties(physicalDevice, pProperties);
- }
- DispatchGetPhysicalDeviceProperties(physicalDevice, pProperties);
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PostCallRecordGetPhysicalDeviceProperties(physicalDevice, pProperties);
- }
-}
-
-VKAPI_ATTR void VKAPI_CALL GetPhysicalDeviceQueueFamilyProperties(
- VkPhysicalDevice physicalDevice,
- uint32_t* pQueueFamilyPropertyCount,
- VkQueueFamilyProperties* pQueueFamilyProperties) {
- auto layer_data = GetLayerDataPtr(get_dispatch_key(physicalDevice), layer_data_map);
- bool skip = false;
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- skip |= intercept->PreCallValidateGetPhysicalDeviceQueueFamilyProperties(physicalDevice, pQueueFamilyPropertyCount, pQueueFamilyProperties);
- if (skip) return;
- }
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PreCallRecordGetPhysicalDeviceQueueFamilyProperties(physicalDevice, pQueueFamilyPropertyCount, pQueueFamilyProperties);
- }
- DispatchGetPhysicalDeviceQueueFamilyProperties(physicalDevice, pQueueFamilyPropertyCount, pQueueFamilyProperties);
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PostCallRecordGetPhysicalDeviceQueueFamilyProperties(physicalDevice, pQueueFamilyPropertyCount, pQueueFamilyProperties);
- }
-}
-
-VKAPI_ATTR void VKAPI_CALL GetPhysicalDeviceMemoryProperties(
- VkPhysicalDevice physicalDevice,
- VkPhysicalDeviceMemoryProperties* pMemoryProperties) {
- auto layer_data = GetLayerDataPtr(get_dispatch_key(physicalDevice), layer_data_map);
- bool skip = false;
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- skip |= intercept->PreCallValidateGetPhysicalDeviceMemoryProperties(physicalDevice, pMemoryProperties);
- if (skip) return;
- }
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PreCallRecordGetPhysicalDeviceMemoryProperties(physicalDevice, pMemoryProperties);
- }
- DispatchGetPhysicalDeviceMemoryProperties(physicalDevice, pMemoryProperties);
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PostCallRecordGetPhysicalDeviceMemoryProperties(physicalDevice, pMemoryProperties);
- }
-}
-
-VKAPI_ATTR void VKAPI_CALL GetDeviceQueue(
- VkDevice device,
- uint32_t queueFamilyIndex,
- uint32_t queueIndex,
- VkQueue* pQueue) {
- auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
- bool skip = false;
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- skip |= intercept->PreCallValidateGetDeviceQueue(device, queueFamilyIndex, queueIndex, pQueue);
- if (skip) return;
- }
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PreCallRecordGetDeviceQueue(device, queueFamilyIndex, queueIndex, pQueue);
- }
- DispatchGetDeviceQueue(device, queueFamilyIndex, queueIndex, pQueue);
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PostCallRecordGetDeviceQueue(device, queueFamilyIndex, queueIndex, pQueue);
- }
-}
-
-VKAPI_ATTR VkResult VKAPI_CALL QueueSubmit(
- VkQueue queue,
- uint32_t submitCount,
- const VkSubmitInfo* pSubmits,
- VkFence fence) {
- auto layer_data = GetLayerDataPtr(get_dispatch_key(queue), layer_data_map);
- bool skip = false;
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- skip |= intercept->PreCallValidateQueueSubmit(queue, submitCount, pSubmits, fence);
- if (skip) return VK_ERROR_VALIDATION_FAILED_EXT;
- }
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PreCallRecordQueueSubmit(queue, submitCount, pSubmits, fence);
- }
- VkResult result = DispatchQueueSubmit(queue, submitCount, pSubmits, fence);
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PostCallRecordQueueSubmit(queue, submitCount, pSubmits, fence, result);
- }
- return result;
-}
-
-VKAPI_ATTR VkResult VKAPI_CALL QueueWaitIdle(
- VkQueue queue) {
- auto layer_data = GetLayerDataPtr(get_dispatch_key(queue), layer_data_map);
- bool skip = false;
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- skip |= intercept->PreCallValidateQueueWaitIdle(queue);
- if (skip) return VK_ERROR_VALIDATION_FAILED_EXT;
- }
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PreCallRecordQueueWaitIdle(queue);
- }
- VkResult result = DispatchQueueWaitIdle(queue);
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PostCallRecordQueueWaitIdle(queue, result);
- }
- return result;
-}
-
-VKAPI_ATTR VkResult VKAPI_CALL DeviceWaitIdle(
- VkDevice device) {
- auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
- bool skip = false;
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- skip |= intercept->PreCallValidateDeviceWaitIdle(device);
- if (skip) return VK_ERROR_VALIDATION_FAILED_EXT;
- }
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PreCallRecordDeviceWaitIdle(device);
- }
- VkResult result = DispatchDeviceWaitIdle(device);
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PostCallRecordDeviceWaitIdle(device, result);
- }
- return result;
-}
-
-VKAPI_ATTR VkResult VKAPI_CALL AllocateMemory(
- VkDevice device,
- const VkMemoryAllocateInfo* pAllocateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkDeviceMemory* pMemory) {
- auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
- bool skip = false;
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- skip |= intercept->PreCallValidateAllocateMemory(device, pAllocateInfo, pAllocator, pMemory);
- if (skip) return VK_ERROR_VALIDATION_FAILED_EXT;
- }
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PreCallRecordAllocateMemory(device, pAllocateInfo, pAllocator, pMemory);
- }
- VkResult result = DispatchAllocateMemory(device, pAllocateInfo, pAllocator, pMemory);
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PostCallRecordAllocateMemory(device, pAllocateInfo, pAllocator, pMemory, result);
- }
- return result;
-}
-
-VKAPI_ATTR void VKAPI_CALL FreeMemory(
- VkDevice device,
- VkDeviceMemory memory,
- const VkAllocationCallbacks* pAllocator) {
- auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
- bool skip = false;
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- skip |= intercept->PreCallValidateFreeMemory(device, memory, pAllocator);
- if (skip) return;
- }
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PreCallRecordFreeMemory(device, memory, pAllocator);
- }
- DispatchFreeMemory(device, memory, pAllocator);
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PostCallRecordFreeMemory(device, memory, pAllocator);
- }
-}
-
-VKAPI_ATTR VkResult VKAPI_CALL MapMemory(
- VkDevice device,
- VkDeviceMemory memory,
- VkDeviceSize offset,
- VkDeviceSize size,
- VkMemoryMapFlags flags,
- void** ppData) {
- auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
- bool skip = false;
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- skip |= intercept->PreCallValidateMapMemory(device, memory, offset, size, flags, ppData);
- if (skip) return VK_ERROR_VALIDATION_FAILED_EXT;
- }
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PreCallRecordMapMemory(device, memory, offset, size, flags, ppData);
- }
- VkResult result = DispatchMapMemory(device, memory, offset, size, flags, ppData);
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PostCallRecordMapMemory(device, memory, offset, size, flags, ppData, result);
- }
- return result;
-}
-
-VKAPI_ATTR void VKAPI_CALL UnmapMemory(
- VkDevice device,
- VkDeviceMemory memory) {
- auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
- bool skip = false;
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- skip |= intercept->PreCallValidateUnmapMemory(device, memory);
- if (skip) return;
- }
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PreCallRecordUnmapMemory(device, memory);
- }
- DispatchUnmapMemory(device, memory);
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PostCallRecordUnmapMemory(device, memory);
- }
-}
-
-VKAPI_ATTR VkResult VKAPI_CALL FlushMappedMemoryRanges(
- VkDevice device,
- uint32_t memoryRangeCount,
- const VkMappedMemoryRange* pMemoryRanges) {
- auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
- bool skip = false;
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- skip |= intercept->PreCallValidateFlushMappedMemoryRanges(device, memoryRangeCount, pMemoryRanges);
- if (skip) return VK_ERROR_VALIDATION_FAILED_EXT;
- }
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PreCallRecordFlushMappedMemoryRanges(device, memoryRangeCount, pMemoryRanges);
- }
- VkResult result = DispatchFlushMappedMemoryRanges(device, memoryRangeCount, pMemoryRanges);
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PostCallRecordFlushMappedMemoryRanges(device, memoryRangeCount, pMemoryRanges, result);
- }
- return result;
-}
-
-VKAPI_ATTR VkResult VKAPI_CALL InvalidateMappedMemoryRanges(
- VkDevice device,
- uint32_t memoryRangeCount,
- const VkMappedMemoryRange* pMemoryRanges) {
- auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
- bool skip = false;
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- skip |= intercept->PreCallValidateInvalidateMappedMemoryRanges(device, memoryRangeCount, pMemoryRanges);
- if (skip) return VK_ERROR_VALIDATION_FAILED_EXT;
- }
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PreCallRecordInvalidateMappedMemoryRanges(device, memoryRangeCount, pMemoryRanges);
- }
- VkResult result = DispatchInvalidateMappedMemoryRanges(device, memoryRangeCount, pMemoryRanges);
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PostCallRecordInvalidateMappedMemoryRanges(device, memoryRangeCount, pMemoryRanges, result);
- }
- return result;
-}
-
-VKAPI_ATTR void VKAPI_CALL GetDeviceMemoryCommitment(
- VkDevice device,
- VkDeviceMemory memory,
- VkDeviceSize* pCommittedMemoryInBytes) {
- auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
- bool skip = false;
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- skip |= intercept->PreCallValidateGetDeviceMemoryCommitment(device, memory, pCommittedMemoryInBytes);
- if (skip) return;
- }
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PreCallRecordGetDeviceMemoryCommitment(device, memory, pCommittedMemoryInBytes);
- }
- DispatchGetDeviceMemoryCommitment(device, memory, pCommittedMemoryInBytes);
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PostCallRecordGetDeviceMemoryCommitment(device, memory, pCommittedMemoryInBytes);
- }
-}
-
-VKAPI_ATTR VkResult VKAPI_CALL BindBufferMemory(
- VkDevice device,
- VkBuffer buffer,
- VkDeviceMemory memory,
- VkDeviceSize memoryOffset) {
- auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
- bool skip = false;
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- skip |= intercept->PreCallValidateBindBufferMemory(device, buffer, memory, memoryOffset);
- if (skip) return VK_ERROR_VALIDATION_FAILED_EXT;
- }
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PreCallRecordBindBufferMemory(device, buffer, memory, memoryOffset);
- }
- VkResult result = DispatchBindBufferMemory(device, buffer, memory, memoryOffset);
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PostCallRecordBindBufferMemory(device, buffer, memory, memoryOffset, result);
- }
- return result;
-}
-
-VKAPI_ATTR VkResult VKAPI_CALL BindImageMemory(
- VkDevice device,
- VkImage image,
- VkDeviceMemory memory,
- VkDeviceSize memoryOffset) {
- auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
- bool skip = false;
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- skip |= intercept->PreCallValidateBindImageMemory(device, image, memory, memoryOffset);
- if (skip) return VK_ERROR_VALIDATION_FAILED_EXT;
- }
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PreCallRecordBindImageMemory(device, image, memory, memoryOffset);
- }
- VkResult result = DispatchBindImageMemory(device, image, memory, memoryOffset);
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PostCallRecordBindImageMemory(device, image, memory, memoryOffset, result);
- }
- return result;
-}
-
-VKAPI_ATTR void VKAPI_CALL GetBufferMemoryRequirements(
- VkDevice device,
- VkBuffer buffer,
- VkMemoryRequirements* pMemoryRequirements) {
- auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
- bool skip = false;
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- skip |= intercept->PreCallValidateGetBufferMemoryRequirements(device, buffer, pMemoryRequirements);
- if (skip) return;
- }
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PreCallRecordGetBufferMemoryRequirements(device, buffer, pMemoryRequirements);
- }
- DispatchGetBufferMemoryRequirements(device, buffer, pMemoryRequirements);
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PostCallRecordGetBufferMemoryRequirements(device, buffer, pMemoryRequirements);
- }
-}
-
-VKAPI_ATTR void VKAPI_CALL GetImageMemoryRequirements(
- VkDevice device,
- VkImage image,
- VkMemoryRequirements* pMemoryRequirements) {
- auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
- bool skip = false;
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- skip |= intercept->PreCallValidateGetImageMemoryRequirements(device, image, pMemoryRequirements);
- if (skip) return;
- }
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PreCallRecordGetImageMemoryRequirements(device, image, pMemoryRequirements);
- }
- DispatchGetImageMemoryRequirements(device, image, pMemoryRequirements);
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PostCallRecordGetImageMemoryRequirements(device, image, pMemoryRequirements);
- }
-}
-
-VKAPI_ATTR void VKAPI_CALL GetImageSparseMemoryRequirements(
- VkDevice device,
- VkImage image,
- uint32_t* pSparseMemoryRequirementCount,
- VkSparseImageMemoryRequirements* pSparseMemoryRequirements) {
- auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
- bool skip = false;
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- skip |= intercept->PreCallValidateGetImageSparseMemoryRequirements(device, image, pSparseMemoryRequirementCount, pSparseMemoryRequirements);
- if (skip) return;
- }
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PreCallRecordGetImageSparseMemoryRequirements(device, image, pSparseMemoryRequirementCount, pSparseMemoryRequirements);
- }
- DispatchGetImageSparseMemoryRequirements(device, image, pSparseMemoryRequirementCount, pSparseMemoryRequirements);
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PostCallRecordGetImageSparseMemoryRequirements(device, image, pSparseMemoryRequirementCount, pSparseMemoryRequirements);
- }
-}
-
-VKAPI_ATTR void VKAPI_CALL GetPhysicalDeviceSparseImageFormatProperties(
- VkPhysicalDevice physicalDevice,
- VkFormat format,
- VkImageType type,
- VkSampleCountFlagBits samples,
- VkImageUsageFlags usage,
- VkImageTiling tiling,
- uint32_t* pPropertyCount,
- VkSparseImageFormatProperties* pProperties) {
- auto layer_data = GetLayerDataPtr(get_dispatch_key(physicalDevice), layer_data_map);
- bool skip = false;
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- skip |= intercept->PreCallValidateGetPhysicalDeviceSparseImageFormatProperties(physicalDevice, format, type, samples, usage, tiling, pPropertyCount, pProperties);
- if (skip) return;
- }
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PreCallRecordGetPhysicalDeviceSparseImageFormatProperties(physicalDevice, format, type, samples, usage, tiling, pPropertyCount, pProperties);
- }
- DispatchGetPhysicalDeviceSparseImageFormatProperties(physicalDevice, format, type, samples, usage, tiling, pPropertyCount, pProperties);
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PostCallRecordGetPhysicalDeviceSparseImageFormatProperties(physicalDevice, format, type, samples, usage, tiling, pPropertyCount, pProperties);
- }
-}
-
-VKAPI_ATTR VkResult VKAPI_CALL QueueBindSparse(
- VkQueue queue,
- uint32_t bindInfoCount,
- const VkBindSparseInfo* pBindInfo,
- VkFence fence) {
- auto layer_data = GetLayerDataPtr(get_dispatch_key(queue), layer_data_map);
- bool skip = false;
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- skip |= intercept->PreCallValidateQueueBindSparse(queue, bindInfoCount, pBindInfo, fence);
- if (skip) return VK_ERROR_VALIDATION_FAILED_EXT;
- }
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PreCallRecordQueueBindSparse(queue, bindInfoCount, pBindInfo, fence);
- }
- VkResult result = DispatchQueueBindSparse(queue, bindInfoCount, pBindInfo, fence);
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PostCallRecordQueueBindSparse(queue, bindInfoCount, pBindInfo, fence, result);
- }
- return result;
-}
-
-VKAPI_ATTR VkResult VKAPI_CALL CreateFence(
- VkDevice device,
- const VkFenceCreateInfo* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkFence* pFence) {
- auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
- bool skip = false;
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- skip |= intercept->PreCallValidateCreateFence(device, pCreateInfo, pAllocator, pFence);
- if (skip) return VK_ERROR_VALIDATION_FAILED_EXT;
- }
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PreCallRecordCreateFence(device, pCreateInfo, pAllocator, pFence);
- }
- VkResult result = DispatchCreateFence(device, pCreateInfo, pAllocator, pFence);
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PostCallRecordCreateFence(device, pCreateInfo, pAllocator, pFence, result);
- }
- return result;
-}
-
-VKAPI_ATTR void VKAPI_CALL DestroyFence(
- VkDevice device,
- VkFence fence,
- const VkAllocationCallbacks* pAllocator) {
- auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
- bool skip = false;
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- skip |= intercept->PreCallValidateDestroyFence(device, fence, pAllocator);
- if (skip) return;
- }
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PreCallRecordDestroyFence(device, fence, pAllocator);
- }
- DispatchDestroyFence(device, fence, pAllocator);
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PostCallRecordDestroyFence(device, fence, pAllocator);
- }
-}
-
-VKAPI_ATTR VkResult VKAPI_CALL ResetFences(
- VkDevice device,
- uint32_t fenceCount,
- const VkFence* pFences) {
- auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
- bool skip = false;
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- skip |= intercept->PreCallValidateResetFences(device, fenceCount, pFences);
- if (skip) return VK_ERROR_VALIDATION_FAILED_EXT;
- }
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PreCallRecordResetFences(device, fenceCount, pFences);
- }
- VkResult result = DispatchResetFences(device, fenceCount, pFences);
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PostCallRecordResetFences(device, fenceCount, pFences, result);
- }
- return result;
-}
-
-VKAPI_ATTR VkResult VKAPI_CALL GetFenceStatus(
- VkDevice device,
- VkFence fence) {
- auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
- bool skip = false;
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- skip |= intercept->PreCallValidateGetFenceStatus(device, fence);
- if (skip) return VK_ERROR_VALIDATION_FAILED_EXT;
- }
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PreCallRecordGetFenceStatus(device, fence);
- }
- VkResult result = DispatchGetFenceStatus(device, fence);
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PostCallRecordGetFenceStatus(device, fence, result);
- }
- return result;
-}
-
-VKAPI_ATTR VkResult VKAPI_CALL WaitForFences(
- VkDevice device,
- uint32_t fenceCount,
- const VkFence* pFences,
- VkBool32 waitAll,
- uint64_t timeout) {
- auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
- bool skip = false;
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- skip |= intercept->PreCallValidateWaitForFences(device, fenceCount, pFences, waitAll, timeout);
- if (skip) return VK_ERROR_VALIDATION_FAILED_EXT;
- }
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PreCallRecordWaitForFences(device, fenceCount, pFences, waitAll, timeout);
- }
- VkResult result = DispatchWaitForFences(device, fenceCount, pFences, waitAll, timeout);
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PostCallRecordWaitForFences(device, fenceCount, pFences, waitAll, timeout, result);
- }
- return result;
-}
-
-VKAPI_ATTR VkResult VKAPI_CALL CreateSemaphore(
- VkDevice device,
- const VkSemaphoreCreateInfo* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkSemaphore* pSemaphore) {
- auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
- bool skip = false;
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- skip |= intercept->PreCallValidateCreateSemaphore(device, pCreateInfo, pAllocator, pSemaphore);
- if (skip) return VK_ERROR_VALIDATION_FAILED_EXT;
- }
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PreCallRecordCreateSemaphore(device, pCreateInfo, pAllocator, pSemaphore);
- }
- VkResult result = DispatchCreateSemaphore(device, pCreateInfo, pAllocator, pSemaphore);
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PostCallRecordCreateSemaphore(device, pCreateInfo, pAllocator, pSemaphore, result);
- }
- return result;
-}
-
-VKAPI_ATTR void VKAPI_CALL DestroySemaphore(
- VkDevice device,
- VkSemaphore semaphore,
- const VkAllocationCallbacks* pAllocator) {
- auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
- bool skip = false;
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- skip |= intercept->PreCallValidateDestroySemaphore(device, semaphore, pAllocator);
- if (skip) return;
- }
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PreCallRecordDestroySemaphore(device, semaphore, pAllocator);
- }
- DispatchDestroySemaphore(device, semaphore, pAllocator);
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PostCallRecordDestroySemaphore(device, semaphore, pAllocator);
- }
-}
-
-VKAPI_ATTR VkResult VKAPI_CALL CreateEvent(
- VkDevice device,
- const VkEventCreateInfo* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkEvent* pEvent) {
- auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
- bool skip = false;
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- skip |= intercept->PreCallValidateCreateEvent(device, pCreateInfo, pAllocator, pEvent);
- if (skip) return VK_ERROR_VALIDATION_FAILED_EXT;
- }
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PreCallRecordCreateEvent(device, pCreateInfo, pAllocator, pEvent);
- }
- VkResult result = DispatchCreateEvent(device, pCreateInfo, pAllocator, pEvent);
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PostCallRecordCreateEvent(device, pCreateInfo, pAllocator, pEvent, result);
- }
- return result;
-}
-
-VKAPI_ATTR void VKAPI_CALL DestroyEvent(
- VkDevice device,
- VkEvent event,
- const VkAllocationCallbacks* pAllocator) {
- auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
- bool skip = false;
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- skip |= intercept->PreCallValidateDestroyEvent(device, event, pAllocator);
- if (skip) return;
- }
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PreCallRecordDestroyEvent(device, event, pAllocator);
- }
- DispatchDestroyEvent(device, event, pAllocator);
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PostCallRecordDestroyEvent(device, event, pAllocator);
- }
-}
-
-VKAPI_ATTR VkResult VKAPI_CALL GetEventStatus(
- VkDevice device,
- VkEvent event) {
- auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
- bool skip = false;
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- skip |= intercept->PreCallValidateGetEventStatus(device, event);
- if (skip) return VK_ERROR_VALIDATION_FAILED_EXT;
- }
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PreCallRecordGetEventStatus(device, event);
- }
- VkResult result = DispatchGetEventStatus(device, event);
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PostCallRecordGetEventStatus(device, event, result);
- }
- return result;
-}
-
-VKAPI_ATTR VkResult VKAPI_CALL SetEvent(
- VkDevice device,
- VkEvent event) {
- auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
- bool skip = false;
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- skip |= intercept->PreCallValidateSetEvent(device, event);
- if (skip) return VK_ERROR_VALIDATION_FAILED_EXT;
- }
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PreCallRecordSetEvent(device, event);
- }
- VkResult result = DispatchSetEvent(device, event);
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PostCallRecordSetEvent(device, event, result);
- }
- return result;
-}
-
-VKAPI_ATTR VkResult VKAPI_CALL ResetEvent(
- VkDevice device,
- VkEvent event) {
- auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
- bool skip = false;
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- skip |= intercept->PreCallValidateResetEvent(device, event);
- if (skip) return VK_ERROR_VALIDATION_FAILED_EXT;
- }
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PreCallRecordResetEvent(device, event);
- }
- VkResult result = DispatchResetEvent(device, event);
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PostCallRecordResetEvent(device, event, result);
- }
- return result;
-}
-
-VKAPI_ATTR VkResult VKAPI_CALL CreateQueryPool(
- VkDevice device,
- const VkQueryPoolCreateInfo* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkQueryPool* pQueryPool) {
- auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
- bool skip = false;
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- skip |= intercept->PreCallValidateCreateQueryPool(device, pCreateInfo, pAllocator, pQueryPool);
- if (skip) return VK_ERROR_VALIDATION_FAILED_EXT;
- }
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PreCallRecordCreateQueryPool(device, pCreateInfo, pAllocator, pQueryPool);
- }
- VkResult result = DispatchCreateQueryPool(device, pCreateInfo, pAllocator, pQueryPool);
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PostCallRecordCreateQueryPool(device, pCreateInfo, pAllocator, pQueryPool, result);
- }
- return result;
-}
-
-VKAPI_ATTR void VKAPI_CALL DestroyQueryPool(
- VkDevice device,
- VkQueryPool queryPool,
- const VkAllocationCallbacks* pAllocator) {
- auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
- bool skip = false;
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- skip |= intercept->PreCallValidateDestroyQueryPool(device, queryPool, pAllocator);
- if (skip) return;
- }
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PreCallRecordDestroyQueryPool(device, queryPool, pAllocator);
- }
- DispatchDestroyQueryPool(device, queryPool, pAllocator);
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PostCallRecordDestroyQueryPool(device, queryPool, pAllocator);
- }
-}
-
-VKAPI_ATTR VkResult VKAPI_CALL GetQueryPoolResults(
- VkDevice device,
- VkQueryPool queryPool,
- uint32_t firstQuery,
- uint32_t queryCount,
- size_t dataSize,
- void* pData,
- VkDeviceSize stride,
- VkQueryResultFlags flags) {
- auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
- bool skip = false;
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- skip |= intercept->PreCallValidateGetQueryPoolResults(device, queryPool, firstQuery, queryCount, dataSize, pData, stride, flags);
- if (skip) return VK_ERROR_VALIDATION_FAILED_EXT;
- }
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PreCallRecordGetQueryPoolResults(device, queryPool, firstQuery, queryCount, dataSize, pData, stride, flags);
- }
- VkResult result = DispatchGetQueryPoolResults(device, queryPool, firstQuery, queryCount, dataSize, pData, stride, flags);
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PostCallRecordGetQueryPoolResults(device, queryPool, firstQuery, queryCount, dataSize, pData, stride, flags, result);
- }
- return result;
-}
-
-VKAPI_ATTR VkResult VKAPI_CALL CreateBuffer(
- VkDevice device,
- const VkBufferCreateInfo* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkBuffer* pBuffer) {
- auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
- bool skip = false;
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- skip |= intercept->PreCallValidateCreateBuffer(device, pCreateInfo, pAllocator, pBuffer);
- if (skip) return VK_ERROR_VALIDATION_FAILED_EXT;
- }
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PreCallRecordCreateBuffer(device, pCreateInfo, pAllocator, pBuffer);
- }
- VkResult result = DispatchCreateBuffer(device, pCreateInfo, pAllocator, pBuffer);
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PostCallRecordCreateBuffer(device, pCreateInfo, pAllocator, pBuffer, result);
- }
- return result;
-}
-
-VKAPI_ATTR void VKAPI_CALL DestroyBuffer(
- VkDevice device,
- VkBuffer buffer,
- const VkAllocationCallbacks* pAllocator) {
- auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
- bool skip = false;
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- skip |= intercept->PreCallValidateDestroyBuffer(device, buffer, pAllocator);
- if (skip) return;
- }
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PreCallRecordDestroyBuffer(device, buffer, pAllocator);
- }
- DispatchDestroyBuffer(device, buffer, pAllocator);
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PostCallRecordDestroyBuffer(device, buffer, pAllocator);
- }
-}
-
-VKAPI_ATTR VkResult VKAPI_CALL CreateBufferView(
- VkDevice device,
- const VkBufferViewCreateInfo* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkBufferView* pView) {
- auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
- bool skip = false;
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- skip |= intercept->PreCallValidateCreateBufferView(device, pCreateInfo, pAllocator, pView);
- if (skip) return VK_ERROR_VALIDATION_FAILED_EXT;
- }
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PreCallRecordCreateBufferView(device, pCreateInfo, pAllocator, pView);
- }
- VkResult result = DispatchCreateBufferView(device, pCreateInfo, pAllocator, pView);
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PostCallRecordCreateBufferView(device, pCreateInfo, pAllocator, pView, result);
- }
- return result;
-}
-
-VKAPI_ATTR void VKAPI_CALL DestroyBufferView(
- VkDevice device,
- VkBufferView bufferView,
- const VkAllocationCallbacks* pAllocator) {
- auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
- bool skip = false;
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- skip |= intercept->PreCallValidateDestroyBufferView(device, bufferView, pAllocator);
- if (skip) return;
- }
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PreCallRecordDestroyBufferView(device, bufferView, pAllocator);
- }
- DispatchDestroyBufferView(device, bufferView, pAllocator);
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PostCallRecordDestroyBufferView(device, bufferView, pAllocator);
- }
-}
-
-VKAPI_ATTR VkResult VKAPI_CALL CreateImage(
- VkDevice device,
- const VkImageCreateInfo* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkImage* pImage) {
- auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
- bool skip = false;
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- skip |= intercept->PreCallValidateCreateImage(device, pCreateInfo, pAllocator, pImage);
- if (skip) return VK_ERROR_VALIDATION_FAILED_EXT;
- }
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PreCallRecordCreateImage(device, pCreateInfo, pAllocator, pImage);
- }
- VkResult result = DispatchCreateImage(device, pCreateInfo, pAllocator, pImage);
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PostCallRecordCreateImage(device, pCreateInfo, pAllocator, pImage, result);
- }
- return result;
-}
-
-VKAPI_ATTR void VKAPI_CALL DestroyImage(
- VkDevice device,
- VkImage image,
- const VkAllocationCallbacks* pAllocator) {
- auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
- bool skip = false;
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- skip |= intercept->PreCallValidateDestroyImage(device, image, pAllocator);
- if (skip) return;
- }
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PreCallRecordDestroyImage(device, image, pAllocator);
- }
- DispatchDestroyImage(device, image, pAllocator);
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PostCallRecordDestroyImage(device, image, pAllocator);
- }
-}
-
-VKAPI_ATTR void VKAPI_CALL GetImageSubresourceLayout(
- VkDevice device,
- VkImage image,
- const VkImageSubresource* pSubresource,
- VkSubresourceLayout* pLayout) {
- auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
- bool skip = false;
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- skip |= intercept->PreCallValidateGetImageSubresourceLayout(device, image, pSubresource, pLayout);
- if (skip) return;
- }
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PreCallRecordGetImageSubresourceLayout(device, image, pSubresource, pLayout);
- }
- DispatchGetImageSubresourceLayout(device, image, pSubresource, pLayout);
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PostCallRecordGetImageSubresourceLayout(device, image, pSubresource, pLayout);
- }
-}
-
-VKAPI_ATTR VkResult VKAPI_CALL CreateImageView(
- VkDevice device,
- const VkImageViewCreateInfo* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkImageView* pView) {
- auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
- bool skip = false;
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- skip |= intercept->PreCallValidateCreateImageView(device, pCreateInfo, pAllocator, pView);
- if (skip) return VK_ERROR_VALIDATION_FAILED_EXT;
- }
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PreCallRecordCreateImageView(device, pCreateInfo, pAllocator, pView);
- }
- VkResult result = DispatchCreateImageView(device, pCreateInfo, pAllocator, pView);
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PostCallRecordCreateImageView(device, pCreateInfo, pAllocator, pView, result);
- }
- return result;
-}
-
-VKAPI_ATTR void VKAPI_CALL DestroyImageView(
- VkDevice device,
- VkImageView imageView,
- const VkAllocationCallbacks* pAllocator) {
- auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
- bool skip = false;
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- skip |= intercept->PreCallValidateDestroyImageView(device, imageView, pAllocator);
- if (skip) return;
- }
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PreCallRecordDestroyImageView(device, imageView, pAllocator);
- }
- DispatchDestroyImageView(device, imageView, pAllocator);
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PostCallRecordDestroyImageView(device, imageView, pAllocator);
- }
-}
-
-VKAPI_ATTR void VKAPI_CALL DestroyShaderModule(
- VkDevice device,
- VkShaderModule shaderModule,
- const VkAllocationCallbacks* pAllocator) {
- auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
- bool skip = false;
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- skip |= intercept->PreCallValidateDestroyShaderModule(device, shaderModule, pAllocator);
- if (skip) return;
- }
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PreCallRecordDestroyShaderModule(device, shaderModule, pAllocator);
- }
- DispatchDestroyShaderModule(device, shaderModule, pAllocator);
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PostCallRecordDestroyShaderModule(device, shaderModule, pAllocator);
- }
-}
-
-VKAPI_ATTR VkResult VKAPI_CALL CreatePipelineCache(
- VkDevice device,
- const VkPipelineCacheCreateInfo* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkPipelineCache* pPipelineCache) {
- auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
- bool skip = false;
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- skip |= intercept->PreCallValidateCreatePipelineCache(device, pCreateInfo, pAllocator, pPipelineCache);
- if (skip) return VK_ERROR_VALIDATION_FAILED_EXT;
- }
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PreCallRecordCreatePipelineCache(device, pCreateInfo, pAllocator, pPipelineCache);
- }
- VkResult result = DispatchCreatePipelineCache(device, pCreateInfo, pAllocator, pPipelineCache);
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PostCallRecordCreatePipelineCache(device, pCreateInfo, pAllocator, pPipelineCache, result);
- }
- return result;
-}
-
-VKAPI_ATTR void VKAPI_CALL DestroyPipelineCache(
- VkDevice device,
- VkPipelineCache pipelineCache,
- const VkAllocationCallbacks* pAllocator) {
- auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
- bool skip = false;
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- skip |= intercept->PreCallValidateDestroyPipelineCache(device, pipelineCache, pAllocator);
- if (skip) return;
- }
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PreCallRecordDestroyPipelineCache(device, pipelineCache, pAllocator);
- }
- DispatchDestroyPipelineCache(device, pipelineCache, pAllocator);
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PostCallRecordDestroyPipelineCache(device, pipelineCache, pAllocator);
- }
-}
-
-VKAPI_ATTR VkResult VKAPI_CALL GetPipelineCacheData(
- VkDevice device,
- VkPipelineCache pipelineCache,
- size_t* pDataSize,
- void* pData) {
- auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
- bool skip = false;
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- skip |= intercept->PreCallValidateGetPipelineCacheData(device, pipelineCache, pDataSize, pData);
- if (skip) return VK_ERROR_VALIDATION_FAILED_EXT;
- }
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PreCallRecordGetPipelineCacheData(device, pipelineCache, pDataSize, pData);
- }
- VkResult result = DispatchGetPipelineCacheData(device, pipelineCache, pDataSize, pData);
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PostCallRecordGetPipelineCacheData(device, pipelineCache, pDataSize, pData, result);
- }
- return result;
-}
-
-VKAPI_ATTR VkResult VKAPI_CALL MergePipelineCaches(
- VkDevice device,
- VkPipelineCache dstCache,
- uint32_t srcCacheCount,
- const VkPipelineCache* pSrcCaches) {
- auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
- bool skip = false;
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- skip |= intercept->PreCallValidateMergePipelineCaches(device, dstCache, srcCacheCount, pSrcCaches);
- if (skip) return VK_ERROR_VALIDATION_FAILED_EXT;
- }
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PreCallRecordMergePipelineCaches(device, dstCache, srcCacheCount, pSrcCaches);
- }
- VkResult result = DispatchMergePipelineCaches(device, dstCache, srcCacheCount, pSrcCaches);
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PostCallRecordMergePipelineCaches(device, dstCache, srcCacheCount, pSrcCaches, result);
- }
- return result;
-}
-
-VKAPI_ATTR void VKAPI_CALL DestroyPipeline(
- VkDevice device,
- VkPipeline pipeline,
- const VkAllocationCallbacks* pAllocator) {
- auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
- bool skip = false;
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- skip |= intercept->PreCallValidateDestroyPipeline(device, pipeline, pAllocator);
- if (skip) return;
- }
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PreCallRecordDestroyPipeline(device, pipeline, pAllocator);
- }
- DispatchDestroyPipeline(device, pipeline, pAllocator);
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PostCallRecordDestroyPipeline(device, pipeline, pAllocator);
- }
-}
-
-VKAPI_ATTR void VKAPI_CALL DestroyPipelineLayout(
- VkDevice device,
- VkPipelineLayout pipelineLayout,
- const VkAllocationCallbacks* pAllocator) {
- auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
- bool skip = false;
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- skip |= intercept->PreCallValidateDestroyPipelineLayout(device, pipelineLayout, pAllocator);
- if (skip) return;
- }
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PreCallRecordDestroyPipelineLayout(device, pipelineLayout, pAllocator);
- }
- DispatchDestroyPipelineLayout(device, pipelineLayout, pAllocator);
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PostCallRecordDestroyPipelineLayout(device, pipelineLayout, pAllocator);
- }
-}
-
-VKAPI_ATTR VkResult VKAPI_CALL CreateSampler(
- VkDevice device,
- const VkSamplerCreateInfo* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkSampler* pSampler) {
- auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
- bool skip = false;
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- skip |= intercept->PreCallValidateCreateSampler(device, pCreateInfo, pAllocator, pSampler);
- if (skip) return VK_ERROR_VALIDATION_FAILED_EXT;
- }
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PreCallRecordCreateSampler(device, pCreateInfo, pAllocator, pSampler);
- }
- VkResult result = DispatchCreateSampler(device, pCreateInfo, pAllocator, pSampler);
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PostCallRecordCreateSampler(device, pCreateInfo, pAllocator, pSampler, result);
- }
- return result;
-}
-
-VKAPI_ATTR void VKAPI_CALL DestroySampler(
- VkDevice device,
- VkSampler sampler,
- const VkAllocationCallbacks* pAllocator) {
- auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
- bool skip = false;
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- skip |= intercept->PreCallValidateDestroySampler(device, sampler, pAllocator);
- if (skip) return;
- }
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PreCallRecordDestroySampler(device, sampler, pAllocator);
- }
- DispatchDestroySampler(device, sampler, pAllocator);
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PostCallRecordDestroySampler(device, sampler, pAllocator);
- }
-}
-
-VKAPI_ATTR VkResult VKAPI_CALL CreateDescriptorSetLayout(
- VkDevice device,
- const VkDescriptorSetLayoutCreateInfo* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkDescriptorSetLayout* pSetLayout) {
- auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
- bool skip = false;
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- skip |= intercept->PreCallValidateCreateDescriptorSetLayout(device, pCreateInfo, pAllocator, pSetLayout);
- if (skip) return VK_ERROR_VALIDATION_FAILED_EXT;
- }
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PreCallRecordCreateDescriptorSetLayout(device, pCreateInfo, pAllocator, pSetLayout);
- }
- VkResult result = DispatchCreateDescriptorSetLayout(device, pCreateInfo, pAllocator, pSetLayout);
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PostCallRecordCreateDescriptorSetLayout(device, pCreateInfo, pAllocator, pSetLayout, result);
- }
- return result;
-}
-
-VKAPI_ATTR void VKAPI_CALL DestroyDescriptorSetLayout(
- VkDevice device,
- VkDescriptorSetLayout descriptorSetLayout,
- const VkAllocationCallbacks* pAllocator) {
- auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
- bool skip = false;
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- skip |= intercept->PreCallValidateDestroyDescriptorSetLayout(device, descriptorSetLayout, pAllocator);
- if (skip) return;
- }
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PreCallRecordDestroyDescriptorSetLayout(device, descriptorSetLayout, pAllocator);
- }
- DispatchDestroyDescriptorSetLayout(device, descriptorSetLayout, pAllocator);
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PostCallRecordDestroyDescriptorSetLayout(device, descriptorSetLayout, pAllocator);
- }
-}
-
-VKAPI_ATTR VkResult VKAPI_CALL CreateDescriptorPool(
- VkDevice device,
- const VkDescriptorPoolCreateInfo* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkDescriptorPool* pDescriptorPool) {
- auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
- bool skip = false;
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- skip |= intercept->PreCallValidateCreateDescriptorPool(device, pCreateInfo, pAllocator, pDescriptorPool);
- if (skip) return VK_ERROR_VALIDATION_FAILED_EXT;
- }
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PreCallRecordCreateDescriptorPool(device, pCreateInfo, pAllocator, pDescriptorPool);
- }
- VkResult result = DispatchCreateDescriptorPool(device, pCreateInfo, pAllocator, pDescriptorPool);
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PostCallRecordCreateDescriptorPool(device, pCreateInfo, pAllocator, pDescriptorPool, result);
- }
- return result;
-}
-
-VKAPI_ATTR void VKAPI_CALL DestroyDescriptorPool(
- VkDevice device,
- VkDescriptorPool descriptorPool,
- const VkAllocationCallbacks* pAllocator) {
- auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
- bool skip = false;
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- skip |= intercept->PreCallValidateDestroyDescriptorPool(device, descriptorPool, pAllocator);
- if (skip) return;
- }
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PreCallRecordDestroyDescriptorPool(device, descriptorPool, pAllocator);
- }
- DispatchDestroyDescriptorPool(device, descriptorPool, pAllocator);
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PostCallRecordDestroyDescriptorPool(device, descriptorPool, pAllocator);
- }
-}
-
-VKAPI_ATTR VkResult VKAPI_CALL ResetDescriptorPool(
- VkDevice device,
- VkDescriptorPool descriptorPool,
- VkDescriptorPoolResetFlags flags) {
- auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
- bool skip = false;
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- skip |= intercept->PreCallValidateResetDescriptorPool(device, descriptorPool, flags);
- if (skip) return VK_ERROR_VALIDATION_FAILED_EXT;
- }
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PreCallRecordResetDescriptorPool(device, descriptorPool, flags);
- }
- VkResult result = DispatchResetDescriptorPool(device, descriptorPool, flags);
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PostCallRecordResetDescriptorPool(device, descriptorPool, flags, result);
- }
- return result;
-}
-
-VKAPI_ATTR VkResult VKAPI_CALL FreeDescriptorSets(
- VkDevice device,
- VkDescriptorPool descriptorPool,
- uint32_t descriptorSetCount,
- const VkDescriptorSet* pDescriptorSets) {
- auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
- bool skip = false;
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- skip |= intercept->PreCallValidateFreeDescriptorSets(device, descriptorPool, descriptorSetCount, pDescriptorSets);
- if (skip) return VK_ERROR_VALIDATION_FAILED_EXT;
- }
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PreCallRecordFreeDescriptorSets(device, descriptorPool, descriptorSetCount, pDescriptorSets);
- }
- VkResult result = DispatchFreeDescriptorSets(device, descriptorPool, descriptorSetCount, pDescriptorSets);
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PostCallRecordFreeDescriptorSets(device, descriptorPool, descriptorSetCount, pDescriptorSets, result);
- }
- return result;
-}
-
-VKAPI_ATTR void VKAPI_CALL UpdateDescriptorSets(
- VkDevice device,
- uint32_t descriptorWriteCount,
- const VkWriteDescriptorSet* pDescriptorWrites,
- uint32_t descriptorCopyCount,
- const VkCopyDescriptorSet* pDescriptorCopies) {
- auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
- bool skip = false;
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- skip |= intercept->PreCallValidateUpdateDescriptorSets(device, descriptorWriteCount, pDescriptorWrites, descriptorCopyCount, pDescriptorCopies);
- if (skip) return;
- }
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PreCallRecordUpdateDescriptorSets(device, descriptorWriteCount, pDescriptorWrites, descriptorCopyCount, pDescriptorCopies);
- }
- DispatchUpdateDescriptorSets(device, descriptorWriteCount, pDescriptorWrites, descriptorCopyCount, pDescriptorCopies);
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PostCallRecordUpdateDescriptorSets(device, descriptorWriteCount, pDescriptorWrites, descriptorCopyCount, pDescriptorCopies);
- }
-}
-
-VKAPI_ATTR VkResult VKAPI_CALL CreateFramebuffer(
- VkDevice device,
- const VkFramebufferCreateInfo* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkFramebuffer* pFramebuffer) {
- auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
- bool skip = false;
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- skip |= intercept->PreCallValidateCreateFramebuffer(device, pCreateInfo, pAllocator, pFramebuffer);
- if (skip) return VK_ERROR_VALIDATION_FAILED_EXT;
- }
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PreCallRecordCreateFramebuffer(device, pCreateInfo, pAllocator, pFramebuffer);
- }
- VkResult result = DispatchCreateFramebuffer(device, pCreateInfo, pAllocator, pFramebuffer);
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PostCallRecordCreateFramebuffer(device, pCreateInfo, pAllocator, pFramebuffer, result);
- }
- return result;
-}
-
-VKAPI_ATTR void VKAPI_CALL DestroyFramebuffer(
- VkDevice device,
- VkFramebuffer framebuffer,
- const VkAllocationCallbacks* pAllocator) {
- auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
- bool skip = false;
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- skip |= intercept->PreCallValidateDestroyFramebuffer(device, framebuffer, pAllocator);
- if (skip) return;
- }
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PreCallRecordDestroyFramebuffer(device, framebuffer, pAllocator);
- }
- DispatchDestroyFramebuffer(device, framebuffer, pAllocator);
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PostCallRecordDestroyFramebuffer(device, framebuffer, pAllocator);
- }
-}
-
-VKAPI_ATTR VkResult VKAPI_CALL CreateRenderPass(
- VkDevice device,
- const VkRenderPassCreateInfo* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkRenderPass* pRenderPass) {
- auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
- bool skip = false;
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- skip |= intercept->PreCallValidateCreateRenderPass(device, pCreateInfo, pAllocator, pRenderPass);
- if (skip) return VK_ERROR_VALIDATION_FAILED_EXT;
- }
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PreCallRecordCreateRenderPass(device, pCreateInfo, pAllocator, pRenderPass);
- }
- VkResult result = DispatchCreateRenderPass(device, pCreateInfo, pAllocator, pRenderPass);
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PostCallRecordCreateRenderPass(device, pCreateInfo, pAllocator, pRenderPass, result);
- }
- return result;
-}
-
-VKAPI_ATTR void VKAPI_CALL DestroyRenderPass(
- VkDevice device,
- VkRenderPass renderPass,
- const VkAllocationCallbacks* pAllocator) {
- auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
- bool skip = false;
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- skip |= intercept->PreCallValidateDestroyRenderPass(device, renderPass, pAllocator);
- if (skip) return;
- }
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PreCallRecordDestroyRenderPass(device, renderPass, pAllocator);
- }
- DispatchDestroyRenderPass(device, renderPass, pAllocator);
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PostCallRecordDestroyRenderPass(device, renderPass, pAllocator);
- }
-}
-
-VKAPI_ATTR void VKAPI_CALL GetRenderAreaGranularity(
- VkDevice device,
- VkRenderPass renderPass,
- VkExtent2D* pGranularity) {
- auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
- bool skip = false;
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- skip |= intercept->PreCallValidateGetRenderAreaGranularity(device, renderPass, pGranularity);
- if (skip) return;
- }
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PreCallRecordGetRenderAreaGranularity(device, renderPass, pGranularity);
- }
- DispatchGetRenderAreaGranularity(device, renderPass, pGranularity);
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PostCallRecordGetRenderAreaGranularity(device, renderPass, pGranularity);
- }
-}
-
-VKAPI_ATTR VkResult VKAPI_CALL CreateCommandPool(
- VkDevice device,
- const VkCommandPoolCreateInfo* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkCommandPool* pCommandPool) {
- auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
- bool skip = false;
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- skip |= intercept->PreCallValidateCreateCommandPool(device, pCreateInfo, pAllocator, pCommandPool);
- if (skip) return VK_ERROR_VALIDATION_FAILED_EXT;
- }
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PreCallRecordCreateCommandPool(device, pCreateInfo, pAllocator, pCommandPool);
- }
- VkResult result = DispatchCreateCommandPool(device, pCreateInfo, pAllocator, pCommandPool);
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PostCallRecordCreateCommandPool(device, pCreateInfo, pAllocator, pCommandPool, result);
- }
- return result;
-}
-
-VKAPI_ATTR void VKAPI_CALL DestroyCommandPool(
- VkDevice device,
- VkCommandPool commandPool,
- const VkAllocationCallbacks* pAllocator) {
- auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
- bool skip = false;
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- skip |= intercept->PreCallValidateDestroyCommandPool(device, commandPool, pAllocator);
- if (skip) return;
- }
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PreCallRecordDestroyCommandPool(device, commandPool, pAllocator);
- }
- DispatchDestroyCommandPool(device, commandPool, pAllocator);
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PostCallRecordDestroyCommandPool(device, commandPool, pAllocator);
- }
-}
-
-VKAPI_ATTR VkResult VKAPI_CALL ResetCommandPool(
- VkDevice device,
- VkCommandPool commandPool,
- VkCommandPoolResetFlags flags) {
- auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
- bool skip = false;
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- skip |= intercept->PreCallValidateResetCommandPool(device, commandPool, flags);
- if (skip) return VK_ERROR_VALIDATION_FAILED_EXT;
- }
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PreCallRecordResetCommandPool(device, commandPool, flags);
- }
- VkResult result = DispatchResetCommandPool(device, commandPool, flags);
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PostCallRecordResetCommandPool(device, commandPool, flags, result);
- }
- return result;
-}
-
-VKAPI_ATTR VkResult VKAPI_CALL AllocateCommandBuffers(
- VkDevice device,
- const VkCommandBufferAllocateInfo* pAllocateInfo,
- VkCommandBuffer* pCommandBuffers) {
- auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
- bool skip = false;
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- skip |= intercept->PreCallValidateAllocateCommandBuffers(device, pAllocateInfo, pCommandBuffers);
- if (skip) return VK_ERROR_VALIDATION_FAILED_EXT;
- }
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PreCallRecordAllocateCommandBuffers(device, pAllocateInfo, pCommandBuffers);
- }
- VkResult result = DispatchAllocateCommandBuffers(device, pAllocateInfo, pCommandBuffers);
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PostCallRecordAllocateCommandBuffers(device, pAllocateInfo, pCommandBuffers, result);
- }
- return result;
-}
-
-VKAPI_ATTR void VKAPI_CALL FreeCommandBuffers(
- VkDevice device,
- VkCommandPool commandPool,
- uint32_t commandBufferCount,
- const VkCommandBuffer* pCommandBuffers) {
- auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
- bool skip = false;
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- skip |= intercept->PreCallValidateFreeCommandBuffers(device, commandPool, commandBufferCount, pCommandBuffers);
- if (skip) return;
- }
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PreCallRecordFreeCommandBuffers(device, commandPool, commandBufferCount, pCommandBuffers);
- }
- DispatchFreeCommandBuffers(device, commandPool, commandBufferCount, pCommandBuffers);
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PostCallRecordFreeCommandBuffers(device, commandPool, commandBufferCount, pCommandBuffers);
- }
-}
-
-VKAPI_ATTR VkResult VKAPI_CALL BeginCommandBuffer(
- VkCommandBuffer commandBuffer,
- const VkCommandBufferBeginInfo* pBeginInfo) {
- auto layer_data = GetLayerDataPtr(get_dispatch_key(commandBuffer), layer_data_map);
- bool skip = false;
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- skip |= intercept->PreCallValidateBeginCommandBuffer(commandBuffer, pBeginInfo);
- if (skip) return VK_ERROR_VALIDATION_FAILED_EXT;
- }
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PreCallRecordBeginCommandBuffer(commandBuffer, pBeginInfo);
- }
- VkResult result = DispatchBeginCommandBuffer(commandBuffer, pBeginInfo);
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PostCallRecordBeginCommandBuffer(commandBuffer, pBeginInfo, result);
- }
- return result;
-}
-
-VKAPI_ATTR VkResult VKAPI_CALL EndCommandBuffer(
- VkCommandBuffer commandBuffer) {
- auto layer_data = GetLayerDataPtr(get_dispatch_key(commandBuffer), layer_data_map);
- bool skip = false;
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- skip |= intercept->PreCallValidateEndCommandBuffer(commandBuffer);
- if (skip) return VK_ERROR_VALIDATION_FAILED_EXT;
- }
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PreCallRecordEndCommandBuffer(commandBuffer);
- }
- VkResult result = DispatchEndCommandBuffer(commandBuffer);
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PostCallRecordEndCommandBuffer(commandBuffer, result);
- }
- return result;
-}
-
-VKAPI_ATTR VkResult VKAPI_CALL ResetCommandBuffer(
- VkCommandBuffer commandBuffer,
- VkCommandBufferResetFlags flags) {
- auto layer_data = GetLayerDataPtr(get_dispatch_key(commandBuffer), layer_data_map);
- bool skip = false;
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- skip |= intercept->PreCallValidateResetCommandBuffer(commandBuffer, flags);
- if (skip) return VK_ERROR_VALIDATION_FAILED_EXT;
- }
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PreCallRecordResetCommandBuffer(commandBuffer, flags);
- }
- VkResult result = DispatchResetCommandBuffer(commandBuffer, flags);
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PostCallRecordResetCommandBuffer(commandBuffer, flags, result);
- }
- return result;
-}
-
-VKAPI_ATTR void VKAPI_CALL CmdBindPipeline(
- VkCommandBuffer commandBuffer,
- VkPipelineBindPoint pipelineBindPoint,
- VkPipeline pipeline) {
- auto layer_data = GetLayerDataPtr(get_dispatch_key(commandBuffer), layer_data_map);
- bool skip = false;
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- skip |= intercept->PreCallValidateCmdBindPipeline(commandBuffer, pipelineBindPoint, pipeline);
- if (skip) return;
- }
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PreCallRecordCmdBindPipeline(commandBuffer, pipelineBindPoint, pipeline);
- }
- DispatchCmdBindPipeline(commandBuffer, pipelineBindPoint, pipeline);
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PostCallRecordCmdBindPipeline(commandBuffer, pipelineBindPoint, pipeline);
- }
-}
-
-VKAPI_ATTR void VKAPI_CALL CmdSetViewport(
- VkCommandBuffer commandBuffer,
- uint32_t firstViewport,
- uint32_t viewportCount,
- const VkViewport* pViewports) {
- auto layer_data = GetLayerDataPtr(get_dispatch_key(commandBuffer), layer_data_map);
- bool skip = false;
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- skip |= intercept->PreCallValidateCmdSetViewport(commandBuffer, firstViewport, viewportCount, pViewports);
- if (skip) return;
- }
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PreCallRecordCmdSetViewport(commandBuffer, firstViewport, viewportCount, pViewports);
- }
- DispatchCmdSetViewport(commandBuffer, firstViewport, viewportCount, pViewports);
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PostCallRecordCmdSetViewport(commandBuffer, firstViewport, viewportCount, pViewports);
- }
-}
-
-VKAPI_ATTR void VKAPI_CALL CmdSetScissor(
- VkCommandBuffer commandBuffer,
- uint32_t firstScissor,
- uint32_t scissorCount,
- const VkRect2D* pScissors) {
- auto layer_data = GetLayerDataPtr(get_dispatch_key(commandBuffer), layer_data_map);
- bool skip = false;
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- skip |= intercept->PreCallValidateCmdSetScissor(commandBuffer, firstScissor, scissorCount, pScissors);
- if (skip) return;
- }
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PreCallRecordCmdSetScissor(commandBuffer, firstScissor, scissorCount, pScissors);
- }
- DispatchCmdSetScissor(commandBuffer, firstScissor, scissorCount, pScissors);
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PostCallRecordCmdSetScissor(commandBuffer, firstScissor, scissorCount, pScissors);
- }
-}
-
-VKAPI_ATTR void VKAPI_CALL CmdSetLineWidth(
- VkCommandBuffer commandBuffer,
- float lineWidth) {
- auto layer_data = GetLayerDataPtr(get_dispatch_key(commandBuffer), layer_data_map);
- bool skip = false;
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- skip |= intercept->PreCallValidateCmdSetLineWidth(commandBuffer, lineWidth);
- if (skip) return;
- }
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PreCallRecordCmdSetLineWidth(commandBuffer, lineWidth);
- }
- DispatchCmdSetLineWidth(commandBuffer, lineWidth);
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PostCallRecordCmdSetLineWidth(commandBuffer, lineWidth);
- }
-}
-
-VKAPI_ATTR void VKAPI_CALL CmdSetDepthBias(
- VkCommandBuffer commandBuffer,
- float depthBiasConstantFactor,
- float depthBiasClamp,
- float depthBiasSlopeFactor) {
- auto layer_data = GetLayerDataPtr(get_dispatch_key(commandBuffer), layer_data_map);
- bool skip = false;
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- skip |= intercept->PreCallValidateCmdSetDepthBias(commandBuffer, depthBiasConstantFactor, depthBiasClamp, depthBiasSlopeFactor);
- if (skip) return;
- }
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PreCallRecordCmdSetDepthBias(commandBuffer, depthBiasConstantFactor, depthBiasClamp, depthBiasSlopeFactor);
- }
- DispatchCmdSetDepthBias(commandBuffer, depthBiasConstantFactor, depthBiasClamp, depthBiasSlopeFactor);
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PostCallRecordCmdSetDepthBias(commandBuffer, depthBiasConstantFactor, depthBiasClamp, depthBiasSlopeFactor);
- }
-}
-
-VKAPI_ATTR void VKAPI_CALL CmdSetBlendConstants(
- VkCommandBuffer commandBuffer,
- const float blendConstants[4]) {
- auto layer_data = GetLayerDataPtr(get_dispatch_key(commandBuffer), layer_data_map);
- bool skip = false;
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- skip |= intercept->PreCallValidateCmdSetBlendConstants(commandBuffer, blendConstants);
- if (skip) return;
- }
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PreCallRecordCmdSetBlendConstants(commandBuffer, blendConstants);
- }
- DispatchCmdSetBlendConstants(commandBuffer, blendConstants);
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PostCallRecordCmdSetBlendConstants(commandBuffer, blendConstants);
- }
-}
-
-VKAPI_ATTR void VKAPI_CALL CmdSetDepthBounds(
- VkCommandBuffer commandBuffer,
- float minDepthBounds,
- float maxDepthBounds) {
- auto layer_data = GetLayerDataPtr(get_dispatch_key(commandBuffer), layer_data_map);
- bool skip = false;
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- skip |= intercept->PreCallValidateCmdSetDepthBounds(commandBuffer, minDepthBounds, maxDepthBounds);
- if (skip) return;
- }
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PreCallRecordCmdSetDepthBounds(commandBuffer, minDepthBounds, maxDepthBounds);
- }
- DispatchCmdSetDepthBounds(commandBuffer, minDepthBounds, maxDepthBounds);
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PostCallRecordCmdSetDepthBounds(commandBuffer, minDepthBounds, maxDepthBounds);
- }
-}
-
-VKAPI_ATTR void VKAPI_CALL CmdSetStencilCompareMask(
- VkCommandBuffer commandBuffer,
- VkStencilFaceFlags faceMask,
- uint32_t compareMask) {
- auto layer_data = GetLayerDataPtr(get_dispatch_key(commandBuffer), layer_data_map);
- bool skip = false;
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- skip |= intercept->PreCallValidateCmdSetStencilCompareMask(commandBuffer, faceMask, compareMask);
- if (skip) return;
- }
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PreCallRecordCmdSetStencilCompareMask(commandBuffer, faceMask, compareMask);
- }
- DispatchCmdSetStencilCompareMask(commandBuffer, faceMask, compareMask);
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PostCallRecordCmdSetStencilCompareMask(commandBuffer, faceMask, compareMask);
- }
-}
-
-VKAPI_ATTR void VKAPI_CALL CmdSetStencilWriteMask(
- VkCommandBuffer commandBuffer,
- VkStencilFaceFlags faceMask,
- uint32_t writeMask) {
- auto layer_data = GetLayerDataPtr(get_dispatch_key(commandBuffer), layer_data_map);
- bool skip = false;
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- skip |= intercept->PreCallValidateCmdSetStencilWriteMask(commandBuffer, faceMask, writeMask);
- if (skip) return;
- }
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PreCallRecordCmdSetStencilWriteMask(commandBuffer, faceMask, writeMask);
- }
- DispatchCmdSetStencilWriteMask(commandBuffer, faceMask, writeMask);
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PostCallRecordCmdSetStencilWriteMask(commandBuffer, faceMask, writeMask);
- }
-}
-
-VKAPI_ATTR void VKAPI_CALL CmdSetStencilReference(
- VkCommandBuffer commandBuffer,
- VkStencilFaceFlags faceMask,
- uint32_t reference) {
- auto layer_data = GetLayerDataPtr(get_dispatch_key(commandBuffer), layer_data_map);
- bool skip = false;
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- skip |= intercept->PreCallValidateCmdSetStencilReference(commandBuffer, faceMask, reference);
- if (skip) return;
- }
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PreCallRecordCmdSetStencilReference(commandBuffer, faceMask, reference);
- }
- DispatchCmdSetStencilReference(commandBuffer, faceMask, reference);
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PostCallRecordCmdSetStencilReference(commandBuffer, faceMask, reference);
- }
-}
-
-VKAPI_ATTR void VKAPI_CALL CmdBindDescriptorSets(
- VkCommandBuffer commandBuffer,
- VkPipelineBindPoint pipelineBindPoint,
- VkPipelineLayout layout,
- uint32_t firstSet,
- uint32_t descriptorSetCount,
- const VkDescriptorSet* pDescriptorSets,
- uint32_t dynamicOffsetCount,
- const uint32_t* pDynamicOffsets) {
- auto layer_data = GetLayerDataPtr(get_dispatch_key(commandBuffer), layer_data_map);
- bool skip = false;
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- skip |= intercept->PreCallValidateCmdBindDescriptorSets(commandBuffer, pipelineBindPoint, layout, firstSet, descriptorSetCount, pDescriptorSets, dynamicOffsetCount, pDynamicOffsets);
- if (skip) return;
- }
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PreCallRecordCmdBindDescriptorSets(commandBuffer, pipelineBindPoint, layout, firstSet, descriptorSetCount, pDescriptorSets, dynamicOffsetCount, pDynamicOffsets);
- }
- DispatchCmdBindDescriptorSets(commandBuffer, pipelineBindPoint, layout, firstSet, descriptorSetCount, pDescriptorSets, dynamicOffsetCount, pDynamicOffsets);
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PostCallRecordCmdBindDescriptorSets(commandBuffer, pipelineBindPoint, layout, firstSet, descriptorSetCount, pDescriptorSets, dynamicOffsetCount, pDynamicOffsets);
- }
-}
-
-VKAPI_ATTR void VKAPI_CALL CmdBindIndexBuffer(
- VkCommandBuffer commandBuffer,
- VkBuffer buffer,
- VkDeviceSize offset,
- VkIndexType indexType) {
- auto layer_data = GetLayerDataPtr(get_dispatch_key(commandBuffer), layer_data_map);
- bool skip = false;
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- skip |= intercept->PreCallValidateCmdBindIndexBuffer(commandBuffer, buffer, offset, indexType);
- if (skip) return;
- }
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PreCallRecordCmdBindIndexBuffer(commandBuffer, buffer, offset, indexType);
- }
- DispatchCmdBindIndexBuffer(commandBuffer, buffer, offset, indexType);
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PostCallRecordCmdBindIndexBuffer(commandBuffer, buffer, offset, indexType);
- }
-}
-
-VKAPI_ATTR void VKAPI_CALL CmdBindVertexBuffers(
- VkCommandBuffer commandBuffer,
- uint32_t firstBinding,
- uint32_t bindingCount,
- const VkBuffer* pBuffers,
- const VkDeviceSize* pOffsets) {
- auto layer_data = GetLayerDataPtr(get_dispatch_key(commandBuffer), layer_data_map);
- bool skip = false;
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- skip |= intercept->PreCallValidateCmdBindVertexBuffers(commandBuffer, firstBinding, bindingCount, pBuffers, pOffsets);
- if (skip) return;
- }
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PreCallRecordCmdBindVertexBuffers(commandBuffer, firstBinding, bindingCount, pBuffers, pOffsets);
- }
- DispatchCmdBindVertexBuffers(commandBuffer, firstBinding, bindingCount, pBuffers, pOffsets);
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PostCallRecordCmdBindVertexBuffers(commandBuffer, firstBinding, bindingCount, pBuffers, pOffsets);
- }
-}
-
-VKAPI_ATTR void VKAPI_CALL CmdDraw(
- VkCommandBuffer commandBuffer,
- uint32_t vertexCount,
- uint32_t instanceCount,
- uint32_t firstVertex,
- uint32_t firstInstance) {
- auto layer_data = GetLayerDataPtr(get_dispatch_key(commandBuffer), layer_data_map);
- bool skip = false;
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- skip |= intercept->PreCallValidateCmdDraw(commandBuffer, vertexCount, instanceCount, firstVertex, firstInstance);
- if (skip) return;
- }
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PreCallRecordCmdDraw(commandBuffer, vertexCount, instanceCount, firstVertex, firstInstance);
- }
- DispatchCmdDraw(commandBuffer, vertexCount, instanceCount, firstVertex, firstInstance);
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PostCallRecordCmdDraw(commandBuffer, vertexCount, instanceCount, firstVertex, firstInstance);
- }
-}
-
-VKAPI_ATTR void VKAPI_CALL CmdDrawIndexed(
- VkCommandBuffer commandBuffer,
- uint32_t indexCount,
- uint32_t instanceCount,
- uint32_t firstIndex,
- int32_t vertexOffset,
- uint32_t firstInstance) {
- auto layer_data = GetLayerDataPtr(get_dispatch_key(commandBuffer), layer_data_map);
- bool skip = false;
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- skip |= intercept->PreCallValidateCmdDrawIndexed(commandBuffer, indexCount, instanceCount, firstIndex, vertexOffset, firstInstance);
- if (skip) return;
- }
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PreCallRecordCmdDrawIndexed(commandBuffer, indexCount, instanceCount, firstIndex, vertexOffset, firstInstance);
- }
- DispatchCmdDrawIndexed(commandBuffer, indexCount, instanceCount, firstIndex, vertexOffset, firstInstance);
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PostCallRecordCmdDrawIndexed(commandBuffer, indexCount, instanceCount, firstIndex, vertexOffset, firstInstance);
- }
-}
-
-VKAPI_ATTR void VKAPI_CALL CmdDrawIndirect(
- VkCommandBuffer commandBuffer,
- VkBuffer buffer,
- VkDeviceSize offset,
- uint32_t drawCount,
- uint32_t stride) {
- auto layer_data = GetLayerDataPtr(get_dispatch_key(commandBuffer), layer_data_map);
- bool skip = false;
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- skip |= intercept->PreCallValidateCmdDrawIndirect(commandBuffer, buffer, offset, drawCount, stride);
- if (skip) return;
- }
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PreCallRecordCmdDrawIndirect(commandBuffer, buffer, offset, drawCount, stride);
- }
- DispatchCmdDrawIndirect(commandBuffer, buffer, offset, drawCount, stride);
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PostCallRecordCmdDrawIndirect(commandBuffer, buffer, offset, drawCount, stride);
- }
-}
-
-VKAPI_ATTR void VKAPI_CALL CmdDrawIndexedIndirect(
- VkCommandBuffer commandBuffer,
- VkBuffer buffer,
- VkDeviceSize offset,
- uint32_t drawCount,
- uint32_t stride) {
- auto layer_data = GetLayerDataPtr(get_dispatch_key(commandBuffer), layer_data_map);
- bool skip = false;
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- skip |= intercept->PreCallValidateCmdDrawIndexedIndirect(commandBuffer, buffer, offset, drawCount, stride);
- if (skip) return;
- }
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PreCallRecordCmdDrawIndexedIndirect(commandBuffer, buffer, offset, drawCount, stride);
- }
- DispatchCmdDrawIndexedIndirect(commandBuffer, buffer, offset, drawCount, stride);
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PostCallRecordCmdDrawIndexedIndirect(commandBuffer, buffer, offset, drawCount, stride);
- }
-}
-
-VKAPI_ATTR void VKAPI_CALL CmdDispatch(
- VkCommandBuffer commandBuffer,
- uint32_t groupCountX,
- uint32_t groupCountY,
- uint32_t groupCountZ) {
- auto layer_data = GetLayerDataPtr(get_dispatch_key(commandBuffer), layer_data_map);
- bool skip = false;
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- skip |= intercept->PreCallValidateCmdDispatch(commandBuffer, groupCountX, groupCountY, groupCountZ);
- if (skip) return;
- }
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PreCallRecordCmdDispatch(commandBuffer, groupCountX, groupCountY, groupCountZ);
- }
- DispatchCmdDispatch(commandBuffer, groupCountX, groupCountY, groupCountZ);
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PostCallRecordCmdDispatch(commandBuffer, groupCountX, groupCountY, groupCountZ);
- }
-}
-
-VKAPI_ATTR void VKAPI_CALL CmdDispatchIndirect(
- VkCommandBuffer commandBuffer,
- VkBuffer buffer,
- VkDeviceSize offset) {
- auto layer_data = GetLayerDataPtr(get_dispatch_key(commandBuffer), layer_data_map);
- bool skip = false;
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- skip |= intercept->PreCallValidateCmdDispatchIndirect(commandBuffer, buffer, offset);
- if (skip) return;
- }
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PreCallRecordCmdDispatchIndirect(commandBuffer, buffer, offset);
- }
- DispatchCmdDispatchIndirect(commandBuffer, buffer, offset);
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PostCallRecordCmdDispatchIndirect(commandBuffer, buffer, offset);
- }
-}
-
-VKAPI_ATTR void VKAPI_CALL CmdCopyBuffer(
- VkCommandBuffer commandBuffer,
- VkBuffer srcBuffer,
- VkBuffer dstBuffer,
- uint32_t regionCount,
- const VkBufferCopy* pRegions) {
- auto layer_data = GetLayerDataPtr(get_dispatch_key(commandBuffer), layer_data_map);
- bool skip = false;
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- skip |= intercept->PreCallValidateCmdCopyBuffer(commandBuffer, srcBuffer, dstBuffer, regionCount, pRegions);
- if (skip) return;
- }
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PreCallRecordCmdCopyBuffer(commandBuffer, srcBuffer, dstBuffer, regionCount, pRegions);
- }
- DispatchCmdCopyBuffer(commandBuffer, srcBuffer, dstBuffer, regionCount, pRegions);
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PostCallRecordCmdCopyBuffer(commandBuffer, srcBuffer, dstBuffer, regionCount, pRegions);
- }
-}
-
-VKAPI_ATTR void VKAPI_CALL CmdCopyImage(
- VkCommandBuffer commandBuffer,
- VkImage srcImage,
- VkImageLayout srcImageLayout,
- VkImage dstImage,
- VkImageLayout dstImageLayout,
- uint32_t regionCount,
- const VkImageCopy* pRegions) {
- auto layer_data = GetLayerDataPtr(get_dispatch_key(commandBuffer), layer_data_map);
- bool skip = false;
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- skip |= intercept->PreCallValidateCmdCopyImage(commandBuffer, srcImage, srcImageLayout, dstImage, dstImageLayout, regionCount, pRegions);
- if (skip) return;
- }
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PreCallRecordCmdCopyImage(commandBuffer, srcImage, srcImageLayout, dstImage, dstImageLayout, regionCount, pRegions);
- }
- DispatchCmdCopyImage(commandBuffer, srcImage, srcImageLayout, dstImage, dstImageLayout, regionCount, pRegions);
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PostCallRecordCmdCopyImage(commandBuffer, srcImage, srcImageLayout, dstImage, dstImageLayout, regionCount, pRegions);
- }
-}
-
-VKAPI_ATTR void VKAPI_CALL CmdBlitImage(
- VkCommandBuffer commandBuffer,
- VkImage srcImage,
- VkImageLayout srcImageLayout,
- VkImage dstImage,
- VkImageLayout dstImageLayout,
- uint32_t regionCount,
- const VkImageBlit* pRegions,
- VkFilter filter) {
- auto layer_data = GetLayerDataPtr(get_dispatch_key(commandBuffer), layer_data_map);
- bool skip = false;
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- skip |= intercept->PreCallValidateCmdBlitImage(commandBuffer, srcImage, srcImageLayout, dstImage, dstImageLayout, regionCount, pRegions, filter);
- if (skip) return;
- }
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PreCallRecordCmdBlitImage(commandBuffer, srcImage, srcImageLayout, dstImage, dstImageLayout, regionCount, pRegions, filter);
- }
- DispatchCmdBlitImage(commandBuffer, srcImage, srcImageLayout, dstImage, dstImageLayout, regionCount, pRegions, filter);
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PostCallRecordCmdBlitImage(commandBuffer, srcImage, srcImageLayout, dstImage, dstImageLayout, regionCount, pRegions, filter);
- }
-}
-
-VKAPI_ATTR void VKAPI_CALL CmdCopyBufferToImage(
- VkCommandBuffer commandBuffer,
- VkBuffer srcBuffer,
- VkImage dstImage,
- VkImageLayout dstImageLayout,
- uint32_t regionCount,
- const VkBufferImageCopy* pRegions) {
- auto layer_data = GetLayerDataPtr(get_dispatch_key(commandBuffer), layer_data_map);
- bool skip = false;
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- skip |= intercept->PreCallValidateCmdCopyBufferToImage(commandBuffer, srcBuffer, dstImage, dstImageLayout, regionCount, pRegions);
- if (skip) return;
- }
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PreCallRecordCmdCopyBufferToImage(commandBuffer, srcBuffer, dstImage, dstImageLayout, regionCount, pRegions);
- }
- DispatchCmdCopyBufferToImage(commandBuffer, srcBuffer, dstImage, dstImageLayout, regionCount, pRegions);
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PostCallRecordCmdCopyBufferToImage(commandBuffer, srcBuffer, dstImage, dstImageLayout, regionCount, pRegions);
- }
-}
-
-VKAPI_ATTR void VKAPI_CALL CmdCopyImageToBuffer(
- VkCommandBuffer commandBuffer,
- VkImage srcImage,
- VkImageLayout srcImageLayout,
- VkBuffer dstBuffer,
- uint32_t regionCount,
- const VkBufferImageCopy* pRegions) {
- auto layer_data = GetLayerDataPtr(get_dispatch_key(commandBuffer), layer_data_map);
- bool skip = false;
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- skip |= intercept->PreCallValidateCmdCopyImageToBuffer(commandBuffer, srcImage, srcImageLayout, dstBuffer, regionCount, pRegions);
- if (skip) return;
- }
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PreCallRecordCmdCopyImageToBuffer(commandBuffer, srcImage, srcImageLayout, dstBuffer, regionCount, pRegions);
- }
- DispatchCmdCopyImageToBuffer(commandBuffer, srcImage, srcImageLayout, dstBuffer, regionCount, pRegions);
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PostCallRecordCmdCopyImageToBuffer(commandBuffer, srcImage, srcImageLayout, dstBuffer, regionCount, pRegions);
- }
-}
-
-VKAPI_ATTR void VKAPI_CALL CmdUpdateBuffer(
- VkCommandBuffer commandBuffer,
- VkBuffer dstBuffer,
- VkDeviceSize dstOffset,
- VkDeviceSize dataSize,
- const void* pData) {
- auto layer_data = GetLayerDataPtr(get_dispatch_key(commandBuffer), layer_data_map);
- bool skip = false;
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- skip |= intercept->PreCallValidateCmdUpdateBuffer(commandBuffer, dstBuffer, dstOffset, dataSize, pData);
- if (skip) return;
- }
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PreCallRecordCmdUpdateBuffer(commandBuffer, dstBuffer, dstOffset, dataSize, pData);
- }
- DispatchCmdUpdateBuffer(commandBuffer, dstBuffer, dstOffset, dataSize, pData);
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PostCallRecordCmdUpdateBuffer(commandBuffer, dstBuffer, dstOffset, dataSize, pData);
- }
-}
-
-VKAPI_ATTR void VKAPI_CALL CmdFillBuffer(
- VkCommandBuffer commandBuffer,
- VkBuffer dstBuffer,
- VkDeviceSize dstOffset,
- VkDeviceSize size,
- uint32_t data) {
- auto layer_data = GetLayerDataPtr(get_dispatch_key(commandBuffer), layer_data_map);
- bool skip = false;
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- skip |= intercept->PreCallValidateCmdFillBuffer(commandBuffer, dstBuffer, dstOffset, size, data);
- if (skip) return;
- }
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PreCallRecordCmdFillBuffer(commandBuffer, dstBuffer, dstOffset, size, data);
- }
- DispatchCmdFillBuffer(commandBuffer, dstBuffer, dstOffset, size, data);
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PostCallRecordCmdFillBuffer(commandBuffer, dstBuffer, dstOffset, size, data);
- }
-}
-
-VKAPI_ATTR void VKAPI_CALL CmdClearColorImage(
- VkCommandBuffer commandBuffer,
- VkImage image,
- VkImageLayout imageLayout,
- const VkClearColorValue* pColor,
- uint32_t rangeCount,
- const VkImageSubresourceRange* pRanges) {
- auto layer_data = GetLayerDataPtr(get_dispatch_key(commandBuffer), layer_data_map);
- bool skip = false;
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- skip |= intercept->PreCallValidateCmdClearColorImage(commandBuffer, image, imageLayout, pColor, rangeCount, pRanges);
- if (skip) return;
- }
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PreCallRecordCmdClearColorImage(commandBuffer, image, imageLayout, pColor, rangeCount, pRanges);
- }
- DispatchCmdClearColorImage(commandBuffer, image, imageLayout, pColor, rangeCount, pRanges);
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PostCallRecordCmdClearColorImage(commandBuffer, image, imageLayout, pColor, rangeCount, pRanges);
- }
-}
-
-VKAPI_ATTR void VKAPI_CALL CmdClearDepthStencilImage(
- VkCommandBuffer commandBuffer,
- VkImage image,
- VkImageLayout imageLayout,
- const VkClearDepthStencilValue* pDepthStencil,
- uint32_t rangeCount,
- const VkImageSubresourceRange* pRanges) {
- auto layer_data = GetLayerDataPtr(get_dispatch_key(commandBuffer), layer_data_map);
- bool skip = false;
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- skip |= intercept->PreCallValidateCmdClearDepthStencilImage(commandBuffer, image, imageLayout, pDepthStencil, rangeCount, pRanges);
- if (skip) return;
- }
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PreCallRecordCmdClearDepthStencilImage(commandBuffer, image, imageLayout, pDepthStencil, rangeCount, pRanges);
- }
- DispatchCmdClearDepthStencilImage(commandBuffer, image, imageLayout, pDepthStencil, rangeCount, pRanges);
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PostCallRecordCmdClearDepthStencilImage(commandBuffer, image, imageLayout, pDepthStencil, rangeCount, pRanges);
- }
-}
-
-VKAPI_ATTR void VKAPI_CALL CmdClearAttachments(
- VkCommandBuffer commandBuffer,
- uint32_t attachmentCount,
- const VkClearAttachment* pAttachments,
- uint32_t rectCount,
- const VkClearRect* pRects) {
- auto layer_data = GetLayerDataPtr(get_dispatch_key(commandBuffer), layer_data_map);
- bool skip = false;
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- skip |= intercept->PreCallValidateCmdClearAttachments(commandBuffer, attachmentCount, pAttachments, rectCount, pRects);
- if (skip) return;
- }
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PreCallRecordCmdClearAttachments(commandBuffer, attachmentCount, pAttachments, rectCount, pRects);
- }
- DispatchCmdClearAttachments(commandBuffer, attachmentCount, pAttachments, rectCount, pRects);
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PostCallRecordCmdClearAttachments(commandBuffer, attachmentCount, pAttachments, rectCount, pRects);
- }
-}
-
-VKAPI_ATTR void VKAPI_CALL CmdResolveImage(
- VkCommandBuffer commandBuffer,
- VkImage srcImage,
- VkImageLayout srcImageLayout,
- VkImage dstImage,
- VkImageLayout dstImageLayout,
- uint32_t regionCount,
- const VkImageResolve* pRegions) {
- auto layer_data = GetLayerDataPtr(get_dispatch_key(commandBuffer), layer_data_map);
- bool skip = false;
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- skip |= intercept->PreCallValidateCmdResolveImage(commandBuffer, srcImage, srcImageLayout, dstImage, dstImageLayout, regionCount, pRegions);
- if (skip) return;
- }
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PreCallRecordCmdResolveImage(commandBuffer, srcImage, srcImageLayout, dstImage, dstImageLayout, regionCount, pRegions);
- }
- DispatchCmdResolveImage(commandBuffer, srcImage, srcImageLayout, dstImage, dstImageLayout, regionCount, pRegions);
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PostCallRecordCmdResolveImage(commandBuffer, srcImage, srcImageLayout, dstImage, dstImageLayout, regionCount, pRegions);
- }
-}
-
-VKAPI_ATTR void VKAPI_CALL CmdSetEvent(
- VkCommandBuffer commandBuffer,
- VkEvent event,
- VkPipelineStageFlags stageMask) {
- auto layer_data = GetLayerDataPtr(get_dispatch_key(commandBuffer), layer_data_map);
- bool skip = false;
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- skip |= intercept->PreCallValidateCmdSetEvent(commandBuffer, event, stageMask);
- if (skip) return;
- }
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PreCallRecordCmdSetEvent(commandBuffer, event, stageMask);
- }
- DispatchCmdSetEvent(commandBuffer, event, stageMask);
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PostCallRecordCmdSetEvent(commandBuffer, event, stageMask);
- }
-}
-
-VKAPI_ATTR void VKAPI_CALL CmdResetEvent(
- VkCommandBuffer commandBuffer,
- VkEvent event,
- VkPipelineStageFlags stageMask) {
- auto layer_data = GetLayerDataPtr(get_dispatch_key(commandBuffer), layer_data_map);
- bool skip = false;
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- skip |= intercept->PreCallValidateCmdResetEvent(commandBuffer, event, stageMask);
- if (skip) return;
- }
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PreCallRecordCmdResetEvent(commandBuffer, event, stageMask);
- }
- DispatchCmdResetEvent(commandBuffer, event, stageMask);
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PostCallRecordCmdResetEvent(commandBuffer, event, stageMask);
- }
-}
-
-VKAPI_ATTR void VKAPI_CALL CmdWaitEvents(
- VkCommandBuffer commandBuffer,
- uint32_t eventCount,
- const VkEvent* pEvents,
- VkPipelineStageFlags srcStageMask,
- VkPipelineStageFlags dstStageMask,
- uint32_t memoryBarrierCount,
- const VkMemoryBarrier* pMemoryBarriers,
- uint32_t bufferMemoryBarrierCount,
- const VkBufferMemoryBarrier* pBufferMemoryBarriers,
- uint32_t imageMemoryBarrierCount,
- const VkImageMemoryBarrier* pImageMemoryBarriers) {
- auto layer_data = GetLayerDataPtr(get_dispatch_key(commandBuffer), layer_data_map);
- bool skip = false;
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- skip |= intercept->PreCallValidateCmdWaitEvents(commandBuffer, eventCount, pEvents, srcStageMask, dstStageMask, memoryBarrierCount, pMemoryBarriers, bufferMemoryBarrierCount, pBufferMemoryBarriers, imageMemoryBarrierCount, pImageMemoryBarriers);
- if (skip) return;
- }
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PreCallRecordCmdWaitEvents(commandBuffer, eventCount, pEvents, srcStageMask, dstStageMask, memoryBarrierCount, pMemoryBarriers, bufferMemoryBarrierCount, pBufferMemoryBarriers, imageMemoryBarrierCount, pImageMemoryBarriers);
- }
- DispatchCmdWaitEvents(commandBuffer, eventCount, pEvents, srcStageMask, dstStageMask, memoryBarrierCount, pMemoryBarriers, bufferMemoryBarrierCount, pBufferMemoryBarriers, imageMemoryBarrierCount, pImageMemoryBarriers);
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PostCallRecordCmdWaitEvents(commandBuffer, eventCount, pEvents, srcStageMask, dstStageMask, memoryBarrierCount, pMemoryBarriers, bufferMemoryBarrierCount, pBufferMemoryBarriers, imageMemoryBarrierCount, pImageMemoryBarriers);
- }
-}
-
-VKAPI_ATTR void VKAPI_CALL CmdPipelineBarrier(
- VkCommandBuffer commandBuffer,
- VkPipelineStageFlags srcStageMask,
- VkPipelineStageFlags dstStageMask,
- VkDependencyFlags dependencyFlags,
- uint32_t memoryBarrierCount,
- const VkMemoryBarrier* pMemoryBarriers,
- uint32_t bufferMemoryBarrierCount,
- const VkBufferMemoryBarrier* pBufferMemoryBarriers,
- uint32_t imageMemoryBarrierCount,
- const VkImageMemoryBarrier* pImageMemoryBarriers) {
- auto layer_data = GetLayerDataPtr(get_dispatch_key(commandBuffer), layer_data_map);
- bool skip = false;
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- skip |= intercept->PreCallValidateCmdPipelineBarrier(commandBuffer, srcStageMask, dstStageMask, dependencyFlags, memoryBarrierCount, pMemoryBarriers, bufferMemoryBarrierCount, pBufferMemoryBarriers, imageMemoryBarrierCount, pImageMemoryBarriers);
- if (skip) return;
- }
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PreCallRecordCmdPipelineBarrier(commandBuffer, srcStageMask, dstStageMask, dependencyFlags, memoryBarrierCount, pMemoryBarriers, bufferMemoryBarrierCount, pBufferMemoryBarriers, imageMemoryBarrierCount, pImageMemoryBarriers);
- }
- DispatchCmdPipelineBarrier(commandBuffer, srcStageMask, dstStageMask, dependencyFlags, memoryBarrierCount, pMemoryBarriers, bufferMemoryBarrierCount, pBufferMemoryBarriers, imageMemoryBarrierCount, pImageMemoryBarriers);
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PostCallRecordCmdPipelineBarrier(commandBuffer, srcStageMask, dstStageMask, dependencyFlags, memoryBarrierCount, pMemoryBarriers, bufferMemoryBarrierCount, pBufferMemoryBarriers, imageMemoryBarrierCount, pImageMemoryBarriers);
- }
-}
-
-VKAPI_ATTR void VKAPI_CALL CmdBeginQuery(
- VkCommandBuffer commandBuffer,
- VkQueryPool queryPool,
- uint32_t query,
- VkQueryControlFlags flags) {
- auto layer_data = GetLayerDataPtr(get_dispatch_key(commandBuffer), layer_data_map);
- bool skip = false;
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- skip |= intercept->PreCallValidateCmdBeginQuery(commandBuffer, queryPool, query, flags);
- if (skip) return;
- }
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PreCallRecordCmdBeginQuery(commandBuffer, queryPool, query, flags);
- }
- DispatchCmdBeginQuery(commandBuffer, queryPool, query, flags);
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PostCallRecordCmdBeginQuery(commandBuffer, queryPool, query, flags);
- }
-}
-
-VKAPI_ATTR void VKAPI_CALL CmdEndQuery(
- VkCommandBuffer commandBuffer,
- VkQueryPool queryPool,
- uint32_t query) {
- auto layer_data = GetLayerDataPtr(get_dispatch_key(commandBuffer), layer_data_map);
- bool skip = false;
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- skip |= intercept->PreCallValidateCmdEndQuery(commandBuffer, queryPool, query);
- if (skip) return;
- }
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PreCallRecordCmdEndQuery(commandBuffer, queryPool, query);
- }
- DispatchCmdEndQuery(commandBuffer, queryPool, query);
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PostCallRecordCmdEndQuery(commandBuffer, queryPool, query);
- }
-}
-
-VKAPI_ATTR void VKAPI_CALL CmdResetQueryPool(
- VkCommandBuffer commandBuffer,
- VkQueryPool queryPool,
- uint32_t firstQuery,
- uint32_t queryCount) {
- auto layer_data = GetLayerDataPtr(get_dispatch_key(commandBuffer), layer_data_map);
- bool skip = false;
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- skip |= intercept->PreCallValidateCmdResetQueryPool(commandBuffer, queryPool, firstQuery, queryCount);
- if (skip) return;
- }
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PreCallRecordCmdResetQueryPool(commandBuffer, queryPool, firstQuery, queryCount);
- }
- DispatchCmdResetQueryPool(commandBuffer, queryPool, firstQuery, queryCount);
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PostCallRecordCmdResetQueryPool(commandBuffer, queryPool, firstQuery, queryCount);
- }
-}
-
-VKAPI_ATTR void VKAPI_CALL CmdWriteTimestamp(
- VkCommandBuffer commandBuffer,
- VkPipelineStageFlagBits pipelineStage,
- VkQueryPool queryPool,
- uint32_t query) {
- auto layer_data = GetLayerDataPtr(get_dispatch_key(commandBuffer), layer_data_map);
- bool skip = false;
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- skip |= intercept->PreCallValidateCmdWriteTimestamp(commandBuffer, pipelineStage, queryPool, query);
- if (skip) return;
- }
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PreCallRecordCmdWriteTimestamp(commandBuffer, pipelineStage, queryPool, query);
- }
- DispatchCmdWriteTimestamp(commandBuffer, pipelineStage, queryPool, query);
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PostCallRecordCmdWriteTimestamp(commandBuffer, pipelineStage, queryPool, query);
- }
-}
-
-VKAPI_ATTR void VKAPI_CALL CmdCopyQueryPoolResults(
- VkCommandBuffer commandBuffer,
- VkQueryPool queryPool,
- uint32_t firstQuery,
- uint32_t queryCount,
- VkBuffer dstBuffer,
- VkDeviceSize dstOffset,
- VkDeviceSize stride,
- VkQueryResultFlags flags) {
- auto layer_data = GetLayerDataPtr(get_dispatch_key(commandBuffer), layer_data_map);
- bool skip = false;
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- skip |= intercept->PreCallValidateCmdCopyQueryPoolResults(commandBuffer, queryPool, firstQuery, queryCount, dstBuffer, dstOffset, stride, flags);
- if (skip) return;
- }
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PreCallRecordCmdCopyQueryPoolResults(commandBuffer, queryPool, firstQuery, queryCount, dstBuffer, dstOffset, stride, flags);
- }
- DispatchCmdCopyQueryPoolResults(commandBuffer, queryPool, firstQuery, queryCount, dstBuffer, dstOffset, stride, flags);
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PostCallRecordCmdCopyQueryPoolResults(commandBuffer, queryPool, firstQuery, queryCount, dstBuffer, dstOffset, stride, flags);
- }
-}
-
-VKAPI_ATTR void VKAPI_CALL CmdPushConstants(
- VkCommandBuffer commandBuffer,
- VkPipelineLayout layout,
- VkShaderStageFlags stageFlags,
- uint32_t offset,
- uint32_t size,
- const void* pValues) {
- auto layer_data = GetLayerDataPtr(get_dispatch_key(commandBuffer), layer_data_map);
- bool skip = false;
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- skip |= intercept->PreCallValidateCmdPushConstants(commandBuffer, layout, stageFlags, offset, size, pValues);
- if (skip) return;
- }
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PreCallRecordCmdPushConstants(commandBuffer, layout, stageFlags, offset, size, pValues);
- }
- DispatchCmdPushConstants(commandBuffer, layout, stageFlags, offset, size, pValues);
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PostCallRecordCmdPushConstants(commandBuffer, layout, stageFlags, offset, size, pValues);
- }
-}
-
-VKAPI_ATTR void VKAPI_CALL CmdBeginRenderPass(
- VkCommandBuffer commandBuffer,
- const VkRenderPassBeginInfo* pRenderPassBegin,
- VkSubpassContents contents) {
- auto layer_data = GetLayerDataPtr(get_dispatch_key(commandBuffer), layer_data_map);
- bool skip = false;
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- skip |= intercept->PreCallValidateCmdBeginRenderPass(commandBuffer, pRenderPassBegin, contents);
- if (skip) return;
- }
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PreCallRecordCmdBeginRenderPass(commandBuffer, pRenderPassBegin, contents);
- }
- DispatchCmdBeginRenderPass(commandBuffer, pRenderPassBegin, contents);
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PostCallRecordCmdBeginRenderPass(commandBuffer, pRenderPassBegin, contents);
- }
-}
-
-VKAPI_ATTR void VKAPI_CALL CmdNextSubpass(
- VkCommandBuffer commandBuffer,
- VkSubpassContents contents) {
- auto layer_data = GetLayerDataPtr(get_dispatch_key(commandBuffer), layer_data_map);
- bool skip = false;
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- skip |= intercept->PreCallValidateCmdNextSubpass(commandBuffer, contents);
- if (skip) return;
- }
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PreCallRecordCmdNextSubpass(commandBuffer, contents);
- }
- DispatchCmdNextSubpass(commandBuffer, contents);
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PostCallRecordCmdNextSubpass(commandBuffer, contents);
- }
-}
-
-VKAPI_ATTR void VKAPI_CALL CmdEndRenderPass(
- VkCommandBuffer commandBuffer) {
- auto layer_data = GetLayerDataPtr(get_dispatch_key(commandBuffer), layer_data_map);
- bool skip = false;
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- skip |= intercept->PreCallValidateCmdEndRenderPass(commandBuffer);
- if (skip) return;
- }
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PreCallRecordCmdEndRenderPass(commandBuffer);
- }
- DispatchCmdEndRenderPass(commandBuffer);
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PostCallRecordCmdEndRenderPass(commandBuffer);
- }
-}
-
-VKAPI_ATTR void VKAPI_CALL CmdExecuteCommands(
- VkCommandBuffer commandBuffer,
- uint32_t commandBufferCount,
- const VkCommandBuffer* pCommandBuffers) {
- auto layer_data = GetLayerDataPtr(get_dispatch_key(commandBuffer), layer_data_map);
- bool skip = false;
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- skip |= intercept->PreCallValidateCmdExecuteCommands(commandBuffer, commandBufferCount, pCommandBuffers);
- if (skip) return;
- }
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PreCallRecordCmdExecuteCommands(commandBuffer, commandBufferCount, pCommandBuffers);
- }
- DispatchCmdExecuteCommands(commandBuffer, commandBufferCount, pCommandBuffers);
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PostCallRecordCmdExecuteCommands(commandBuffer, commandBufferCount, pCommandBuffers);
- }
-}
-
-
-VKAPI_ATTR VkResult VKAPI_CALL BindBufferMemory2(
- VkDevice device,
- uint32_t bindInfoCount,
- const VkBindBufferMemoryInfo* pBindInfos) {
- auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
- bool skip = false;
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- skip |= intercept->PreCallValidateBindBufferMemory2(device, bindInfoCount, pBindInfos);
- if (skip) return VK_ERROR_VALIDATION_FAILED_EXT;
- }
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PreCallRecordBindBufferMemory2(device, bindInfoCount, pBindInfos);
- }
- VkResult result = DispatchBindBufferMemory2(device, bindInfoCount, pBindInfos);
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PostCallRecordBindBufferMemory2(device, bindInfoCount, pBindInfos, result);
- }
- return result;
-}
-
-VKAPI_ATTR VkResult VKAPI_CALL BindImageMemory2(
- VkDevice device,
- uint32_t bindInfoCount,
- const VkBindImageMemoryInfo* pBindInfos) {
- auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
- bool skip = false;
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- skip |= intercept->PreCallValidateBindImageMemory2(device, bindInfoCount, pBindInfos);
- if (skip) return VK_ERROR_VALIDATION_FAILED_EXT;
- }
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PreCallRecordBindImageMemory2(device, bindInfoCount, pBindInfos);
- }
- VkResult result = DispatchBindImageMemory2(device, bindInfoCount, pBindInfos);
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PostCallRecordBindImageMemory2(device, bindInfoCount, pBindInfos, result);
- }
- return result;
-}
-
-VKAPI_ATTR void VKAPI_CALL GetDeviceGroupPeerMemoryFeatures(
- VkDevice device,
- uint32_t heapIndex,
- uint32_t localDeviceIndex,
- uint32_t remoteDeviceIndex,
- VkPeerMemoryFeatureFlags* pPeerMemoryFeatures) {
- auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
- bool skip = false;
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- skip |= intercept->PreCallValidateGetDeviceGroupPeerMemoryFeatures(device, heapIndex, localDeviceIndex, remoteDeviceIndex, pPeerMemoryFeatures);
- if (skip) return;
- }
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PreCallRecordGetDeviceGroupPeerMemoryFeatures(device, heapIndex, localDeviceIndex, remoteDeviceIndex, pPeerMemoryFeatures);
- }
- DispatchGetDeviceGroupPeerMemoryFeatures(device, heapIndex, localDeviceIndex, remoteDeviceIndex, pPeerMemoryFeatures);
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PostCallRecordGetDeviceGroupPeerMemoryFeatures(device, heapIndex, localDeviceIndex, remoteDeviceIndex, pPeerMemoryFeatures);
- }
-}
-
-VKAPI_ATTR void VKAPI_CALL CmdSetDeviceMask(
- VkCommandBuffer commandBuffer,
- uint32_t deviceMask) {
- auto layer_data = GetLayerDataPtr(get_dispatch_key(commandBuffer), layer_data_map);
- bool skip = false;
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- skip |= intercept->PreCallValidateCmdSetDeviceMask(commandBuffer, deviceMask);
- if (skip) return;
- }
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PreCallRecordCmdSetDeviceMask(commandBuffer, deviceMask);
- }
- DispatchCmdSetDeviceMask(commandBuffer, deviceMask);
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PostCallRecordCmdSetDeviceMask(commandBuffer, deviceMask);
- }
-}
-
-VKAPI_ATTR void VKAPI_CALL CmdDispatchBase(
- VkCommandBuffer commandBuffer,
- uint32_t baseGroupX,
- uint32_t baseGroupY,
- uint32_t baseGroupZ,
- uint32_t groupCountX,
- uint32_t groupCountY,
- uint32_t groupCountZ) {
- auto layer_data = GetLayerDataPtr(get_dispatch_key(commandBuffer), layer_data_map);
- bool skip = false;
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- skip |= intercept->PreCallValidateCmdDispatchBase(commandBuffer, baseGroupX, baseGroupY, baseGroupZ, groupCountX, groupCountY, groupCountZ);
- if (skip) return;
- }
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PreCallRecordCmdDispatchBase(commandBuffer, baseGroupX, baseGroupY, baseGroupZ, groupCountX, groupCountY, groupCountZ);
- }
- DispatchCmdDispatchBase(commandBuffer, baseGroupX, baseGroupY, baseGroupZ, groupCountX, groupCountY, groupCountZ);
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PostCallRecordCmdDispatchBase(commandBuffer, baseGroupX, baseGroupY, baseGroupZ, groupCountX, groupCountY, groupCountZ);
- }
-}
-
-VKAPI_ATTR VkResult VKAPI_CALL EnumeratePhysicalDeviceGroups(
- VkInstance instance,
- uint32_t* pPhysicalDeviceGroupCount,
- VkPhysicalDeviceGroupProperties* pPhysicalDeviceGroupProperties) {
- auto layer_data = GetLayerDataPtr(get_dispatch_key(instance), layer_data_map);
- bool skip = false;
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- skip |= intercept->PreCallValidateEnumeratePhysicalDeviceGroups(instance, pPhysicalDeviceGroupCount, pPhysicalDeviceGroupProperties);
- if (skip) return VK_ERROR_VALIDATION_FAILED_EXT;
- }
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PreCallRecordEnumeratePhysicalDeviceGroups(instance, pPhysicalDeviceGroupCount, pPhysicalDeviceGroupProperties);
- }
- VkResult result = DispatchEnumeratePhysicalDeviceGroups(instance, pPhysicalDeviceGroupCount, pPhysicalDeviceGroupProperties);
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PostCallRecordEnumeratePhysicalDeviceGroups(instance, pPhysicalDeviceGroupCount, pPhysicalDeviceGroupProperties, result);
- }
- return result;
-}
-
-VKAPI_ATTR void VKAPI_CALL GetImageMemoryRequirements2(
- VkDevice device,
- const VkImageMemoryRequirementsInfo2* pInfo,
- VkMemoryRequirements2* pMemoryRequirements) {
- auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
- bool skip = false;
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- skip |= intercept->PreCallValidateGetImageMemoryRequirements2(device, pInfo, pMemoryRequirements);
- if (skip) return;
- }
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PreCallRecordGetImageMemoryRequirements2(device, pInfo, pMemoryRequirements);
- }
- DispatchGetImageMemoryRequirements2(device, pInfo, pMemoryRequirements);
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PostCallRecordGetImageMemoryRequirements2(device, pInfo, pMemoryRequirements);
- }
-}
-
-VKAPI_ATTR void VKAPI_CALL GetBufferMemoryRequirements2(
- VkDevice device,
- const VkBufferMemoryRequirementsInfo2* pInfo,
- VkMemoryRequirements2* pMemoryRequirements) {
- auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
- bool skip = false;
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- skip |= intercept->PreCallValidateGetBufferMemoryRequirements2(device, pInfo, pMemoryRequirements);
- if (skip) return;
- }
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PreCallRecordGetBufferMemoryRequirements2(device, pInfo, pMemoryRequirements);
- }
- DispatchGetBufferMemoryRequirements2(device, pInfo, pMemoryRequirements);
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PostCallRecordGetBufferMemoryRequirements2(device, pInfo, pMemoryRequirements);
- }
-}
-
-VKAPI_ATTR void VKAPI_CALL GetImageSparseMemoryRequirements2(
- VkDevice device,
- const VkImageSparseMemoryRequirementsInfo2* pInfo,
- uint32_t* pSparseMemoryRequirementCount,
- VkSparseImageMemoryRequirements2* pSparseMemoryRequirements) {
- auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
- bool skip = false;
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- skip |= intercept->PreCallValidateGetImageSparseMemoryRequirements2(device, pInfo, pSparseMemoryRequirementCount, pSparseMemoryRequirements);
- if (skip) return;
- }
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PreCallRecordGetImageSparseMemoryRequirements2(device, pInfo, pSparseMemoryRequirementCount, pSparseMemoryRequirements);
- }
- DispatchGetImageSparseMemoryRequirements2(device, pInfo, pSparseMemoryRequirementCount, pSparseMemoryRequirements);
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PostCallRecordGetImageSparseMemoryRequirements2(device, pInfo, pSparseMemoryRequirementCount, pSparseMemoryRequirements);
- }
-}
-
-VKAPI_ATTR void VKAPI_CALL GetPhysicalDeviceFeatures2(
- VkPhysicalDevice physicalDevice,
- VkPhysicalDeviceFeatures2* pFeatures) {
- auto layer_data = GetLayerDataPtr(get_dispatch_key(physicalDevice), layer_data_map);
- bool skip = false;
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- skip |= intercept->PreCallValidateGetPhysicalDeviceFeatures2(physicalDevice, pFeatures);
- if (skip) return;
- }
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PreCallRecordGetPhysicalDeviceFeatures2(physicalDevice, pFeatures);
- }
- DispatchGetPhysicalDeviceFeatures2(physicalDevice, pFeatures);
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PostCallRecordGetPhysicalDeviceFeatures2(physicalDevice, pFeatures);
- }
-}
-
-VKAPI_ATTR void VKAPI_CALL GetPhysicalDeviceProperties2(
- VkPhysicalDevice physicalDevice,
- VkPhysicalDeviceProperties2* pProperties) {
- auto layer_data = GetLayerDataPtr(get_dispatch_key(physicalDevice), layer_data_map);
- bool skip = false;
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- skip |= intercept->PreCallValidateGetPhysicalDeviceProperties2(physicalDevice, pProperties);
- if (skip) return;
- }
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PreCallRecordGetPhysicalDeviceProperties2(physicalDevice, pProperties);
- }
- DispatchGetPhysicalDeviceProperties2(physicalDevice, pProperties);
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PostCallRecordGetPhysicalDeviceProperties2(physicalDevice, pProperties);
- }
-}
-
-VKAPI_ATTR void VKAPI_CALL GetPhysicalDeviceFormatProperties2(
- VkPhysicalDevice physicalDevice,
- VkFormat format,
- VkFormatProperties2* pFormatProperties) {
- auto layer_data = GetLayerDataPtr(get_dispatch_key(physicalDevice), layer_data_map);
- bool skip = false;
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- skip |= intercept->PreCallValidateGetPhysicalDeviceFormatProperties2(physicalDevice, format, pFormatProperties);
- if (skip) return;
- }
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PreCallRecordGetPhysicalDeviceFormatProperties2(physicalDevice, format, pFormatProperties);
- }
- DispatchGetPhysicalDeviceFormatProperties2(physicalDevice, format, pFormatProperties);
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PostCallRecordGetPhysicalDeviceFormatProperties2(physicalDevice, format, pFormatProperties);
- }
-}
-
-VKAPI_ATTR VkResult VKAPI_CALL GetPhysicalDeviceImageFormatProperties2(
- VkPhysicalDevice physicalDevice,
- const VkPhysicalDeviceImageFormatInfo2* pImageFormatInfo,
- VkImageFormatProperties2* pImageFormatProperties) {
- auto layer_data = GetLayerDataPtr(get_dispatch_key(physicalDevice), layer_data_map);
- bool skip = false;
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- skip |= intercept->PreCallValidateGetPhysicalDeviceImageFormatProperties2(physicalDevice, pImageFormatInfo, pImageFormatProperties);
- if (skip) return VK_ERROR_VALIDATION_FAILED_EXT;
- }
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PreCallRecordGetPhysicalDeviceImageFormatProperties2(physicalDevice, pImageFormatInfo, pImageFormatProperties);
- }
- VkResult result = DispatchGetPhysicalDeviceImageFormatProperties2(physicalDevice, pImageFormatInfo, pImageFormatProperties);
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PostCallRecordGetPhysicalDeviceImageFormatProperties2(physicalDevice, pImageFormatInfo, pImageFormatProperties, result);
- }
- return result;
-}
-
-VKAPI_ATTR void VKAPI_CALL GetPhysicalDeviceQueueFamilyProperties2(
- VkPhysicalDevice physicalDevice,
- uint32_t* pQueueFamilyPropertyCount,
- VkQueueFamilyProperties2* pQueueFamilyProperties) {
- auto layer_data = GetLayerDataPtr(get_dispatch_key(physicalDevice), layer_data_map);
- bool skip = false;
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- skip |= intercept->PreCallValidateGetPhysicalDeviceQueueFamilyProperties2(physicalDevice, pQueueFamilyPropertyCount, pQueueFamilyProperties);
- if (skip) return;
- }
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PreCallRecordGetPhysicalDeviceQueueFamilyProperties2(physicalDevice, pQueueFamilyPropertyCount, pQueueFamilyProperties);
- }
- DispatchGetPhysicalDeviceQueueFamilyProperties2(physicalDevice, pQueueFamilyPropertyCount, pQueueFamilyProperties);
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PostCallRecordGetPhysicalDeviceQueueFamilyProperties2(physicalDevice, pQueueFamilyPropertyCount, pQueueFamilyProperties);
- }
-}
-
-VKAPI_ATTR void VKAPI_CALL GetPhysicalDeviceMemoryProperties2(
- VkPhysicalDevice physicalDevice,
- VkPhysicalDeviceMemoryProperties2* pMemoryProperties) {
- auto layer_data = GetLayerDataPtr(get_dispatch_key(physicalDevice), layer_data_map);
- bool skip = false;
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- skip |= intercept->PreCallValidateGetPhysicalDeviceMemoryProperties2(physicalDevice, pMemoryProperties);
- if (skip) return;
- }
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PreCallRecordGetPhysicalDeviceMemoryProperties2(physicalDevice, pMemoryProperties);
- }
- DispatchGetPhysicalDeviceMemoryProperties2(physicalDevice, pMemoryProperties);
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PostCallRecordGetPhysicalDeviceMemoryProperties2(physicalDevice, pMemoryProperties);
- }
-}
-
-VKAPI_ATTR void VKAPI_CALL GetPhysicalDeviceSparseImageFormatProperties2(
- VkPhysicalDevice physicalDevice,
- const VkPhysicalDeviceSparseImageFormatInfo2* pFormatInfo,
- uint32_t* pPropertyCount,
- VkSparseImageFormatProperties2* pProperties) {
- auto layer_data = GetLayerDataPtr(get_dispatch_key(physicalDevice), layer_data_map);
- bool skip = false;
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- skip |= intercept->PreCallValidateGetPhysicalDeviceSparseImageFormatProperties2(physicalDevice, pFormatInfo, pPropertyCount, pProperties);
- if (skip) return;
- }
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PreCallRecordGetPhysicalDeviceSparseImageFormatProperties2(physicalDevice, pFormatInfo, pPropertyCount, pProperties);
- }
- DispatchGetPhysicalDeviceSparseImageFormatProperties2(physicalDevice, pFormatInfo, pPropertyCount, pProperties);
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PostCallRecordGetPhysicalDeviceSparseImageFormatProperties2(physicalDevice, pFormatInfo, pPropertyCount, pProperties);
- }
-}
-
-VKAPI_ATTR void VKAPI_CALL TrimCommandPool(
- VkDevice device,
- VkCommandPool commandPool,
- VkCommandPoolTrimFlags flags) {
- auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
- bool skip = false;
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- skip |= intercept->PreCallValidateTrimCommandPool(device, commandPool, flags);
- if (skip) return;
- }
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PreCallRecordTrimCommandPool(device, commandPool, flags);
- }
- DispatchTrimCommandPool(device, commandPool, flags);
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PostCallRecordTrimCommandPool(device, commandPool, flags);
- }
-}
-
-VKAPI_ATTR void VKAPI_CALL GetDeviceQueue2(
- VkDevice device,
- const VkDeviceQueueInfo2* pQueueInfo,
- VkQueue* pQueue) {
- auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
- bool skip = false;
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- skip |= intercept->PreCallValidateGetDeviceQueue2(device, pQueueInfo, pQueue);
- if (skip) return;
- }
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PreCallRecordGetDeviceQueue2(device, pQueueInfo, pQueue);
- }
- DispatchGetDeviceQueue2(device, pQueueInfo, pQueue);
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PostCallRecordGetDeviceQueue2(device, pQueueInfo, pQueue);
- }
-}
-
-VKAPI_ATTR VkResult VKAPI_CALL CreateSamplerYcbcrConversion(
- VkDevice device,
- const VkSamplerYcbcrConversionCreateInfo* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkSamplerYcbcrConversion* pYcbcrConversion) {
- auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
- bool skip = false;
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- skip |= intercept->PreCallValidateCreateSamplerYcbcrConversion(device, pCreateInfo, pAllocator, pYcbcrConversion);
- if (skip) return VK_ERROR_VALIDATION_FAILED_EXT;
- }
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PreCallRecordCreateSamplerYcbcrConversion(device, pCreateInfo, pAllocator, pYcbcrConversion);
- }
- VkResult result = DispatchCreateSamplerYcbcrConversion(device, pCreateInfo, pAllocator, pYcbcrConversion);
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PostCallRecordCreateSamplerYcbcrConversion(device, pCreateInfo, pAllocator, pYcbcrConversion, result);
- }
- return result;
-}
-
-VKAPI_ATTR void VKAPI_CALL DestroySamplerYcbcrConversion(
- VkDevice device,
- VkSamplerYcbcrConversion ycbcrConversion,
- const VkAllocationCallbacks* pAllocator) {
- auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
- bool skip = false;
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- skip |= intercept->PreCallValidateDestroySamplerYcbcrConversion(device, ycbcrConversion, pAllocator);
- if (skip) return;
- }
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PreCallRecordDestroySamplerYcbcrConversion(device, ycbcrConversion, pAllocator);
- }
- DispatchDestroySamplerYcbcrConversion(device, ycbcrConversion, pAllocator);
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PostCallRecordDestroySamplerYcbcrConversion(device, ycbcrConversion, pAllocator);
- }
-}
-
-VKAPI_ATTR VkResult VKAPI_CALL CreateDescriptorUpdateTemplate(
- VkDevice device,
- const VkDescriptorUpdateTemplateCreateInfo* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkDescriptorUpdateTemplate* pDescriptorUpdateTemplate) {
- auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
- bool skip = false;
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- skip |= intercept->PreCallValidateCreateDescriptorUpdateTemplate(device, pCreateInfo, pAllocator, pDescriptorUpdateTemplate);
- if (skip) return VK_ERROR_VALIDATION_FAILED_EXT;
- }
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PreCallRecordCreateDescriptorUpdateTemplate(device, pCreateInfo, pAllocator, pDescriptorUpdateTemplate);
- }
- VkResult result = DispatchCreateDescriptorUpdateTemplate(device, pCreateInfo, pAllocator, pDescriptorUpdateTemplate);
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PostCallRecordCreateDescriptorUpdateTemplate(device, pCreateInfo, pAllocator, pDescriptorUpdateTemplate, result);
- }
- return result;
-}
-
-VKAPI_ATTR void VKAPI_CALL DestroyDescriptorUpdateTemplate(
- VkDevice device,
- VkDescriptorUpdateTemplate descriptorUpdateTemplate,
- const VkAllocationCallbacks* pAllocator) {
- auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
- bool skip = false;
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- skip |= intercept->PreCallValidateDestroyDescriptorUpdateTemplate(device, descriptorUpdateTemplate, pAllocator);
- if (skip) return;
- }
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PreCallRecordDestroyDescriptorUpdateTemplate(device, descriptorUpdateTemplate, pAllocator);
- }
- DispatchDestroyDescriptorUpdateTemplate(device, descriptorUpdateTemplate, pAllocator);
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PostCallRecordDestroyDescriptorUpdateTemplate(device, descriptorUpdateTemplate, pAllocator);
- }
-}
-
-VKAPI_ATTR void VKAPI_CALL UpdateDescriptorSetWithTemplate(
- VkDevice device,
- VkDescriptorSet descriptorSet,
- VkDescriptorUpdateTemplate descriptorUpdateTemplate,
- const void* pData) {
- auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
- bool skip = false;
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- skip |= intercept->PreCallValidateUpdateDescriptorSetWithTemplate(device, descriptorSet, descriptorUpdateTemplate, pData);
- if (skip) return;
- }
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PreCallRecordUpdateDescriptorSetWithTemplate(device, descriptorSet, descriptorUpdateTemplate, pData);
- }
- DispatchUpdateDescriptorSetWithTemplate(device, descriptorSet, descriptorUpdateTemplate, pData);
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PostCallRecordUpdateDescriptorSetWithTemplate(device, descriptorSet, descriptorUpdateTemplate, pData);
- }
-}
-
-VKAPI_ATTR void VKAPI_CALL GetPhysicalDeviceExternalBufferProperties(
- VkPhysicalDevice physicalDevice,
- const VkPhysicalDeviceExternalBufferInfo* pExternalBufferInfo,
- VkExternalBufferProperties* pExternalBufferProperties) {
- auto layer_data = GetLayerDataPtr(get_dispatch_key(physicalDevice), layer_data_map);
- bool skip = false;
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- skip |= intercept->PreCallValidateGetPhysicalDeviceExternalBufferProperties(physicalDevice, pExternalBufferInfo, pExternalBufferProperties);
- if (skip) return;
- }
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PreCallRecordGetPhysicalDeviceExternalBufferProperties(physicalDevice, pExternalBufferInfo, pExternalBufferProperties);
- }
- DispatchGetPhysicalDeviceExternalBufferProperties(physicalDevice, pExternalBufferInfo, pExternalBufferProperties);
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PostCallRecordGetPhysicalDeviceExternalBufferProperties(physicalDevice, pExternalBufferInfo, pExternalBufferProperties);
- }
-}
-
-VKAPI_ATTR void VKAPI_CALL GetPhysicalDeviceExternalFenceProperties(
- VkPhysicalDevice physicalDevice,
- const VkPhysicalDeviceExternalFenceInfo* pExternalFenceInfo,
- VkExternalFenceProperties* pExternalFenceProperties) {
- auto layer_data = GetLayerDataPtr(get_dispatch_key(physicalDevice), layer_data_map);
- bool skip = false;
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- skip |= intercept->PreCallValidateGetPhysicalDeviceExternalFenceProperties(physicalDevice, pExternalFenceInfo, pExternalFenceProperties);
- if (skip) return;
- }
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PreCallRecordGetPhysicalDeviceExternalFenceProperties(physicalDevice, pExternalFenceInfo, pExternalFenceProperties);
- }
- DispatchGetPhysicalDeviceExternalFenceProperties(physicalDevice, pExternalFenceInfo, pExternalFenceProperties);
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PostCallRecordGetPhysicalDeviceExternalFenceProperties(physicalDevice, pExternalFenceInfo, pExternalFenceProperties);
- }
-}
-
-VKAPI_ATTR void VKAPI_CALL GetPhysicalDeviceExternalSemaphoreProperties(
- VkPhysicalDevice physicalDevice,
- const VkPhysicalDeviceExternalSemaphoreInfo* pExternalSemaphoreInfo,
- VkExternalSemaphoreProperties* pExternalSemaphoreProperties) {
- auto layer_data = GetLayerDataPtr(get_dispatch_key(physicalDevice), layer_data_map);
- bool skip = false;
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- skip |= intercept->PreCallValidateGetPhysicalDeviceExternalSemaphoreProperties(physicalDevice, pExternalSemaphoreInfo, pExternalSemaphoreProperties);
- if (skip) return;
- }
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PreCallRecordGetPhysicalDeviceExternalSemaphoreProperties(physicalDevice, pExternalSemaphoreInfo, pExternalSemaphoreProperties);
- }
- DispatchGetPhysicalDeviceExternalSemaphoreProperties(physicalDevice, pExternalSemaphoreInfo, pExternalSemaphoreProperties);
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PostCallRecordGetPhysicalDeviceExternalSemaphoreProperties(physicalDevice, pExternalSemaphoreInfo, pExternalSemaphoreProperties);
- }
-}
-
-VKAPI_ATTR void VKAPI_CALL GetDescriptorSetLayoutSupport(
- VkDevice device,
- const VkDescriptorSetLayoutCreateInfo* pCreateInfo,
- VkDescriptorSetLayoutSupport* pSupport) {
- auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
- bool skip = false;
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- skip |= intercept->PreCallValidateGetDescriptorSetLayoutSupport(device, pCreateInfo, pSupport);
- if (skip) return;
- }
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PreCallRecordGetDescriptorSetLayoutSupport(device, pCreateInfo, pSupport);
- }
- DispatchGetDescriptorSetLayoutSupport(device, pCreateInfo, pSupport);
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PostCallRecordGetDescriptorSetLayoutSupport(device, pCreateInfo, pSupport);
- }
-}
-
-
-VKAPI_ATTR void VKAPI_CALL DestroySurfaceKHR(
- VkInstance instance,
- VkSurfaceKHR surface,
- const VkAllocationCallbacks* pAllocator) {
- auto layer_data = GetLayerDataPtr(get_dispatch_key(instance), layer_data_map);
- bool skip = false;
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- skip |= intercept->PreCallValidateDestroySurfaceKHR(instance, surface, pAllocator);
- if (skip) return;
- }
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PreCallRecordDestroySurfaceKHR(instance, surface, pAllocator);
- }
- DispatchDestroySurfaceKHR(instance, surface, pAllocator);
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PostCallRecordDestroySurfaceKHR(instance, surface, pAllocator);
- }
-}
-
-VKAPI_ATTR VkResult VKAPI_CALL GetPhysicalDeviceSurfaceSupportKHR(
- VkPhysicalDevice physicalDevice,
- uint32_t queueFamilyIndex,
- VkSurfaceKHR surface,
- VkBool32* pSupported) {
- auto layer_data = GetLayerDataPtr(get_dispatch_key(physicalDevice), layer_data_map);
- bool skip = false;
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- skip |= intercept->PreCallValidateGetPhysicalDeviceSurfaceSupportKHR(physicalDevice, queueFamilyIndex, surface, pSupported);
- if (skip) return VK_ERROR_VALIDATION_FAILED_EXT;
- }
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PreCallRecordGetPhysicalDeviceSurfaceSupportKHR(physicalDevice, queueFamilyIndex, surface, pSupported);
- }
- VkResult result = DispatchGetPhysicalDeviceSurfaceSupportKHR(physicalDevice, queueFamilyIndex, surface, pSupported);
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PostCallRecordGetPhysicalDeviceSurfaceSupportKHR(physicalDevice, queueFamilyIndex, surface, pSupported, result);
- }
- return result;
-}
-
-VKAPI_ATTR VkResult VKAPI_CALL GetPhysicalDeviceSurfaceCapabilitiesKHR(
- VkPhysicalDevice physicalDevice,
- VkSurfaceKHR surface,
- VkSurfaceCapabilitiesKHR* pSurfaceCapabilities) {
- auto layer_data = GetLayerDataPtr(get_dispatch_key(physicalDevice), layer_data_map);
- bool skip = false;
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- skip |= intercept->PreCallValidateGetPhysicalDeviceSurfaceCapabilitiesKHR(physicalDevice, surface, pSurfaceCapabilities);
- if (skip) return VK_ERROR_VALIDATION_FAILED_EXT;
- }
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PreCallRecordGetPhysicalDeviceSurfaceCapabilitiesKHR(physicalDevice, surface, pSurfaceCapabilities);
- }
- VkResult result = DispatchGetPhysicalDeviceSurfaceCapabilitiesKHR(physicalDevice, surface, pSurfaceCapabilities);
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PostCallRecordGetPhysicalDeviceSurfaceCapabilitiesKHR(physicalDevice, surface, pSurfaceCapabilities, result);
- }
- return result;
-}
-
-VKAPI_ATTR VkResult VKAPI_CALL GetPhysicalDeviceSurfaceFormatsKHR(
- VkPhysicalDevice physicalDevice,
- VkSurfaceKHR surface,
- uint32_t* pSurfaceFormatCount,
- VkSurfaceFormatKHR* pSurfaceFormats) {
- auto layer_data = GetLayerDataPtr(get_dispatch_key(physicalDevice), layer_data_map);
- bool skip = false;
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- skip |= intercept->PreCallValidateGetPhysicalDeviceSurfaceFormatsKHR(physicalDevice, surface, pSurfaceFormatCount, pSurfaceFormats);
- if (skip) return VK_ERROR_VALIDATION_FAILED_EXT;
- }
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PreCallRecordGetPhysicalDeviceSurfaceFormatsKHR(physicalDevice, surface, pSurfaceFormatCount, pSurfaceFormats);
- }
- VkResult result = DispatchGetPhysicalDeviceSurfaceFormatsKHR(physicalDevice, surface, pSurfaceFormatCount, pSurfaceFormats);
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PostCallRecordGetPhysicalDeviceSurfaceFormatsKHR(physicalDevice, surface, pSurfaceFormatCount, pSurfaceFormats, result);
- }
- return result;
-}
-
-VKAPI_ATTR VkResult VKAPI_CALL GetPhysicalDeviceSurfacePresentModesKHR(
- VkPhysicalDevice physicalDevice,
- VkSurfaceKHR surface,
- uint32_t* pPresentModeCount,
- VkPresentModeKHR* pPresentModes) {
- auto layer_data = GetLayerDataPtr(get_dispatch_key(physicalDevice), layer_data_map);
- bool skip = false;
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- skip |= intercept->PreCallValidateGetPhysicalDeviceSurfacePresentModesKHR(physicalDevice, surface, pPresentModeCount, pPresentModes);
- if (skip) return VK_ERROR_VALIDATION_FAILED_EXT;
- }
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PreCallRecordGetPhysicalDeviceSurfacePresentModesKHR(physicalDevice, surface, pPresentModeCount, pPresentModes);
- }
- VkResult result = DispatchGetPhysicalDeviceSurfacePresentModesKHR(physicalDevice, surface, pPresentModeCount, pPresentModes);
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PostCallRecordGetPhysicalDeviceSurfacePresentModesKHR(physicalDevice, surface, pPresentModeCount, pPresentModes, result);
- }
- return result;
-}
-
-
-VKAPI_ATTR VkResult VKAPI_CALL CreateSwapchainKHR(
- VkDevice device,
- const VkSwapchainCreateInfoKHR* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkSwapchainKHR* pSwapchain) {
- auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
- bool skip = false;
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- skip |= intercept->PreCallValidateCreateSwapchainKHR(device, pCreateInfo, pAllocator, pSwapchain);
- if (skip) return VK_ERROR_VALIDATION_FAILED_EXT;
- }
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PreCallRecordCreateSwapchainKHR(device, pCreateInfo, pAllocator, pSwapchain);
- }
- VkResult result = DispatchCreateSwapchainKHR(device, pCreateInfo, pAllocator, pSwapchain);
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PostCallRecordCreateSwapchainKHR(device, pCreateInfo, pAllocator, pSwapchain, result);
- }
- return result;
-}
-
-VKAPI_ATTR void VKAPI_CALL DestroySwapchainKHR(
- VkDevice device,
- VkSwapchainKHR swapchain,
- const VkAllocationCallbacks* pAllocator) {
- auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
- bool skip = false;
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- skip |= intercept->PreCallValidateDestroySwapchainKHR(device, swapchain, pAllocator);
- if (skip) return;
- }
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PreCallRecordDestroySwapchainKHR(device, swapchain, pAllocator);
- }
- DispatchDestroySwapchainKHR(device, swapchain, pAllocator);
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PostCallRecordDestroySwapchainKHR(device, swapchain, pAllocator);
- }
-}
-
-VKAPI_ATTR VkResult VKAPI_CALL GetSwapchainImagesKHR(
- VkDevice device,
- VkSwapchainKHR swapchain,
- uint32_t* pSwapchainImageCount,
- VkImage* pSwapchainImages) {
- auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
- bool skip = false;
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- skip |= intercept->PreCallValidateGetSwapchainImagesKHR(device, swapchain, pSwapchainImageCount, pSwapchainImages);
- if (skip) return VK_ERROR_VALIDATION_FAILED_EXT;
- }
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PreCallRecordGetSwapchainImagesKHR(device, swapchain, pSwapchainImageCount, pSwapchainImages);
- }
- VkResult result = DispatchGetSwapchainImagesKHR(device, swapchain, pSwapchainImageCount, pSwapchainImages);
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PostCallRecordGetSwapchainImagesKHR(device, swapchain, pSwapchainImageCount, pSwapchainImages, result);
- }
- return result;
-}
-
-VKAPI_ATTR VkResult VKAPI_CALL AcquireNextImageKHR(
- VkDevice device,
- VkSwapchainKHR swapchain,
- uint64_t timeout,
- VkSemaphore semaphore,
- VkFence fence,
- uint32_t* pImageIndex) {
- auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
- bool skip = false;
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- skip |= intercept->PreCallValidateAcquireNextImageKHR(device, swapchain, timeout, semaphore, fence, pImageIndex);
- if (skip) return VK_ERROR_VALIDATION_FAILED_EXT;
- }
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PreCallRecordAcquireNextImageKHR(device, swapchain, timeout, semaphore, fence, pImageIndex);
- }
- VkResult result = DispatchAcquireNextImageKHR(device, swapchain, timeout, semaphore, fence, pImageIndex);
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PostCallRecordAcquireNextImageKHR(device, swapchain, timeout, semaphore, fence, pImageIndex, result);
- }
- return result;
-}
-
-VKAPI_ATTR VkResult VKAPI_CALL QueuePresentKHR(
- VkQueue queue,
- const VkPresentInfoKHR* pPresentInfo) {
- auto layer_data = GetLayerDataPtr(get_dispatch_key(queue), layer_data_map);
- bool skip = false;
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- skip |= intercept->PreCallValidateQueuePresentKHR(queue, pPresentInfo);
- if (skip) return VK_ERROR_VALIDATION_FAILED_EXT;
- }
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PreCallRecordQueuePresentKHR(queue, pPresentInfo);
- }
- VkResult result = DispatchQueuePresentKHR(queue, pPresentInfo);
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PostCallRecordQueuePresentKHR(queue, pPresentInfo, result);
- }
- return result;
-}
-
-VKAPI_ATTR VkResult VKAPI_CALL GetDeviceGroupPresentCapabilitiesKHR(
- VkDevice device,
- VkDeviceGroupPresentCapabilitiesKHR* pDeviceGroupPresentCapabilities) {
- auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
- bool skip = false;
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- skip |= intercept->PreCallValidateGetDeviceGroupPresentCapabilitiesKHR(device, pDeviceGroupPresentCapabilities);
- if (skip) return VK_ERROR_VALIDATION_FAILED_EXT;
- }
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PreCallRecordGetDeviceGroupPresentCapabilitiesKHR(device, pDeviceGroupPresentCapabilities);
- }
- VkResult result = DispatchGetDeviceGroupPresentCapabilitiesKHR(device, pDeviceGroupPresentCapabilities);
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PostCallRecordGetDeviceGroupPresentCapabilitiesKHR(device, pDeviceGroupPresentCapabilities, result);
- }
- return result;
-}
-
-VKAPI_ATTR VkResult VKAPI_CALL GetDeviceGroupSurfacePresentModesKHR(
- VkDevice device,
- VkSurfaceKHR surface,
- VkDeviceGroupPresentModeFlagsKHR* pModes) {
- auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
- bool skip = false;
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- skip |= intercept->PreCallValidateGetDeviceGroupSurfacePresentModesKHR(device, surface, pModes);
- if (skip) return VK_ERROR_VALIDATION_FAILED_EXT;
- }
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PreCallRecordGetDeviceGroupSurfacePresentModesKHR(device, surface, pModes);
- }
- VkResult result = DispatchGetDeviceGroupSurfacePresentModesKHR(device, surface, pModes);
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PostCallRecordGetDeviceGroupSurfacePresentModesKHR(device, surface, pModes, result);
- }
- return result;
-}
-
-VKAPI_ATTR VkResult VKAPI_CALL GetPhysicalDevicePresentRectanglesKHR(
- VkPhysicalDevice physicalDevice,
- VkSurfaceKHR surface,
- uint32_t* pRectCount,
- VkRect2D* pRects) {
- auto layer_data = GetLayerDataPtr(get_dispatch_key(physicalDevice), layer_data_map);
- bool skip = false;
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- skip |= intercept->PreCallValidateGetPhysicalDevicePresentRectanglesKHR(physicalDevice, surface, pRectCount, pRects);
- if (skip) return VK_ERROR_VALIDATION_FAILED_EXT;
- }
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PreCallRecordGetPhysicalDevicePresentRectanglesKHR(physicalDevice, surface, pRectCount, pRects);
- }
- VkResult result = DispatchGetPhysicalDevicePresentRectanglesKHR(physicalDevice, surface, pRectCount, pRects);
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PostCallRecordGetPhysicalDevicePresentRectanglesKHR(physicalDevice, surface, pRectCount, pRects, result);
- }
- return result;
-}
-
-VKAPI_ATTR VkResult VKAPI_CALL AcquireNextImage2KHR(
- VkDevice device,
- const VkAcquireNextImageInfoKHR* pAcquireInfo,
- uint32_t* pImageIndex) {
- auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
- bool skip = false;
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- skip |= intercept->PreCallValidateAcquireNextImage2KHR(device, pAcquireInfo, pImageIndex);
- if (skip) return VK_ERROR_VALIDATION_FAILED_EXT;
- }
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PreCallRecordAcquireNextImage2KHR(device, pAcquireInfo, pImageIndex);
- }
- VkResult result = DispatchAcquireNextImage2KHR(device, pAcquireInfo, pImageIndex);
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PostCallRecordAcquireNextImage2KHR(device, pAcquireInfo, pImageIndex, result);
- }
- return result;
-}
-
-
-VKAPI_ATTR VkResult VKAPI_CALL GetPhysicalDeviceDisplayPropertiesKHR(
- VkPhysicalDevice physicalDevice,
- uint32_t* pPropertyCount,
- VkDisplayPropertiesKHR* pProperties) {
- auto layer_data = GetLayerDataPtr(get_dispatch_key(physicalDevice), layer_data_map);
- bool skip = false;
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- skip |= intercept->PreCallValidateGetPhysicalDeviceDisplayPropertiesKHR(physicalDevice, pPropertyCount, pProperties);
- if (skip) return VK_ERROR_VALIDATION_FAILED_EXT;
- }
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PreCallRecordGetPhysicalDeviceDisplayPropertiesKHR(physicalDevice, pPropertyCount, pProperties);
- }
- VkResult result = DispatchGetPhysicalDeviceDisplayPropertiesKHR(physicalDevice, pPropertyCount, pProperties);
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PostCallRecordGetPhysicalDeviceDisplayPropertiesKHR(physicalDevice, pPropertyCount, pProperties, result);
- }
- return result;
-}
-
-VKAPI_ATTR VkResult VKAPI_CALL GetPhysicalDeviceDisplayPlanePropertiesKHR(
- VkPhysicalDevice physicalDevice,
- uint32_t* pPropertyCount,
- VkDisplayPlanePropertiesKHR* pProperties) {
- auto layer_data = GetLayerDataPtr(get_dispatch_key(physicalDevice), layer_data_map);
- bool skip = false;
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- skip |= intercept->PreCallValidateGetPhysicalDeviceDisplayPlanePropertiesKHR(physicalDevice, pPropertyCount, pProperties);
- if (skip) return VK_ERROR_VALIDATION_FAILED_EXT;
- }
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PreCallRecordGetPhysicalDeviceDisplayPlanePropertiesKHR(physicalDevice, pPropertyCount, pProperties);
- }
- VkResult result = DispatchGetPhysicalDeviceDisplayPlanePropertiesKHR(physicalDevice, pPropertyCount, pProperties);
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PostCallRecordGetPhysicalDeviceDisplayPlanePropertiesKHR(physicalDevice, pPropertyCount, pProperties, result);
- }
- return result;
-}
-
-VKAPI_ATTR VkResult VKAPI_CALL GetDisplayPlaneSupportedDisplaysKHR(
- VkPhysicalDevice physicalDevice,
- uint32_t planeIndex,
- uint32_t* pDisplayCount,
- VkDisplayKHR* pDisplays) {
- auto layer_data = GetLayerDataPtr(get_dispatch_key(physicalDevice), layer_data_map);
- bool skip = false;
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- skip |= intercept->PreCallValidateGetDisplayPlaneSupportedDisplaysKHR(physicalDevice, planeIndex, pDisplayCount, pDisplays);
- if (skip) return VK_ERROR_VALIDATION_FAILED_EXT;
- }
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PreCallRecordGetDisplayPlaneSupportedDisplaysKHR(physicalDevice, planeIndex, pDisplayCount, pDisplays);
- }
- VkResult result = DispatchGetDisplayPlaneSupportedDisplaysKHR(physicalDevice, planeIndex, pDisplayCount, pDisplays);
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PostCallRecordGetDisplayPlaneSupportedDisplaysKHR(physicalDevice, planeIndex, pDisplayCount, pDisplays, result);
- }
- return result;
-}
-
-VKAPI_ATTR VkResult VKAPI_CALL GetDisplayModePropertiesKHR(
- VkPhysicalDevice physicalDevice,
- VkDisplayKHR display,
- uint32_t* pPropertyCount,
- VkDisplayModePropertiesKHR* pProperties) {
- auto layer_data = GetLayerDataPtr(get_dispatch_key(physicalDevice), layer_data_map);
- bool skip = false;
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- skip |= intercept->PreCallValidateGetDisplayModePropertiesKHR(physicalDevice, display, pPropertyCount, pProperties);
- if (skip) return VK_ERROR_VALIDATION_FAILED_EXT;
- }
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PreCallRecordGetDisplayModePropertiesKHR(physicalDevice, display, pPropertyCount, pProperties);
- }
- VkResult result = DispatchGetDisplayModePropertiesKHR(physicalDevice, display, pPropertyCount, pProperties);
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PostCallRecordGetDisplayModePropertiesKHR(physicalDevice, display, pPropertyCount, pProperties, result);
- }
- return result;
-}
-
-VKAPI_ATTR VkResult VKAPI_CALL CreateDisplayModeKHR(
- VkPhysicalDevice physicalDevice,
- VkDisplayKHR display,
- const VkDisplayModeCreateInfoKHR* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkDisplayModeKHR* pMode) {
- auto layer_data = GetLayerDataPtr(get_dispatch_key(physicalDevice), layer_data_map);
- bool skip = false;
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- skip |= intercept->PreCallValidateCreateDisplayModeKHR(physicalDevice, display, pCreateInfo, pAllocator, pMode);
- if (skip) return VK_ERROR_VALIDATION_FAILED_EXT;
- }
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PreCallRecordCreateDisplayModeKHR(physicalDevice, display, pCreateInfo, pAllocator, pMode);
- }
- VkResult result = DispatchCreateDisplayModeKHR(physicalDevice, display, pCreateInfo, pAllocator, pMode);
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PostCallRecordCreateDisplayModeKHR(physicalDevice, display, pCreateInfo, pAllocator, pMode, result);
- }
- return result;
-}
-
-VKAPI_ATTR VkResult VKAPI_CALL GetDisplayPlaneCapabilitiesKHR(
- VkPhysicalDevice physicalDevice,
- VkDisplayModeKHR mode,
- uint32_t planeIndex,
- VkDisplayPlaneCapabilitiesKHR* pCapabilities) {
- auto layer_data = GetLayerDataPtr(get_dispatch_key(physicalDevice), layer_data_map);
- bool skip = false;
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- skip |= intercept->PreCallValidateGetDisplayPlaneCapabilitiesKHR(physicalDevice, mode, planeIndex, pCapabilities);
- if (skip) return VK_ERROR_VALIDATION_FAILED_EXT;
- }
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PreCallRecordGetDisplayPlaneCapabilitiesKHR(physicalDevice, mode, planeIndex, pCapabilities);
- }
- VkResult result = DispatchGetDisplayPlaneCapabilitiesKHR(physicalDevice, mode, planeIndex, pCapabilities);
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PostCallRecordGetDisplayPlaneCapabilitiesKHR(physicalDevice, mode, planeIndex, pCapabilities, result);
- }
- return result;
-}
-
-VKAPI_ATTR VkResult VKAPI_CALL CreateDisplayPlaneSurfaceKHR(
- VkInstance instance,
- const VkDisplaySurfaceCreateInfoKHR* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkSurfaceKHR* pSurface) {
- auto layer_data = GetLayerDataPtr(get_dispatch_key(instance), layer_data_map);
- bool skip = false;
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- skip |= intercept->PreCallValidateCreateDisplayPlaneSurfaceKHR(instance, pCreateInfo, pAllocator, pSurface);
- if (skip) return VK_ERROR_VALIDATION_FAILED_EXT;
- }
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PreCallRecordCreateDisplayPlaneSurfaceKHR(instance, pCreateInfo, pAllocator, pSurface);
- }
- VkResult result = DispatchCreateDisplayPlaneSurfaceKHR(instance, pCreateInfo, pAllocator, pSurface);
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PostCallRecordCreateDisplayPlaneSurfaceKHR(instance, pCreateInfo, pAllocator, pSurface, result);
- }
- return result;
-}
-
-
-VKAPI_ATTR VkResult VKAPI_CALL CreateSharedSwapchainsKHR(
- VkDevice device,
- uint32_t swapchainCount,
- const VkSwapchainCreateInfoKHR* pCreateInfos,
- const VkAllocationCallbacks* pAllocator,
- VkSwapchainKHR* pSwapchains) {
- auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
- bool skip = false;
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- skip |= intercept->PreCallValidateCreateSharedSwapchainsKHR(device, swapchainCount, pCreateInfos, pAllocator, pSwapchains);
- if (skip) return VK_ERROR_VALIDATION_FAILED_EXT;
- }
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PreCallRecordCreateSharedSwapchainsKHR(device, swapchainCount, pCreateInfos, pAllocator, pSwapchains);
- }
- VkResult result = DispatchCreateSharedSwapchainsKHR(device, swapchainCount, pCreateInfos, pAllocator, pSwapchains);
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PostCallRecordCreateSharedSwapchainsKHR(device, swapchainCount, pCreateInfos, pAllocator, pSwapchains, result);
- }
- return result;
-}
-
-#ifdef VK_USE_PLATFORM_XLIB_KHR
-
-VKAPI_ATTR VkResult VKAPI_CALL CreateXlibSurfaceKHR(
- VkInstance instance,
- const VkXlibSurfaceCreateInfoKHR* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkSurfaceKHR* pSurface) {
- auto layer_data = GetLayerDataPtr(get_dispatch_key(instance), layer_data_map);
- bool skip = false;
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- skip |= intercept->PreCallValidateCreateXlibSurfaceKHR(instance, pCreateInfo, pAllocator, pSurface);
- if (skip) return VK_ERROR_VALIDATION_FAILED_EXT;
- }
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PreCallRecordCreateXlibSurfaceKHR(instance, pCreateInfo, pAllocator, pSurface);
- }
- VkResult result = DispatchCreateXlibSurfaceKHR(instance, pCreateInfo, pAllocator, pSurface);
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PostCallRecordCreateXlibSurfaceKHR(instance, pCreateInfo, pAllocator, pSurface, result);
- }
- return result;
-}
-
-VKAPI_ATTR VkBool32 VKAPI_CALL GetPhysicalDeviceXlibPresentationSupportKHR(
- VkPhysicalDevice physicalDevice,
- uint32_t queueFamilyIndex,
- Display* dpy,
- VisualID visualID) {
- auto layer_data = GetLayerDataPtr(get_dispatch_key(physicalDevice), layer_data_map);
- bool skip = false;
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- skip |= intercept->PreCallValidateGetPhysicalDeviceXlibPresentationSupportKHR(physicalDevice, queueFamilyIndex, dpy, visualID);
- if (skip) return VK_FALSE;
- }
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PreCallRecordGetPhysicalDeviceXlibPresentationSupportKHR(physicalDevice, queueFamilyIndex, dpy, visualID);
- }
- VkBool32 result = DispatchGetPhysicalDeviceXlibPresentationSupportKHR(physicalDevice, queueFamilyIndex, dpy, visualID);
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PostCallRecordGetPhysicalDeviceXlibPresentationSupportKHR(physicalDevice, queueFamilyIndex, dpy, visualID);
- }
- return result;
-}
-#endif // VK_USE_PLATFORM_XLIB_KHR
-
-#ifdef VK_USE_PLATFORM_XCB_KHR
-
-VKAPI_ATTR VkResult VKAPI_CALL CreateXcbSurfaceKHR(
- VkInstance instance,
- const VkXcbSurfaceCreateInfoKHR* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkSurfaceKHR* pSurface) {
- auto layer_data = GetLayerDataPtr(get_dispatch_key(instance), layer_data_map);
- bool skip = false;
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- skip |= intercept->PreCallValidateCreateXcbSurfaceKHR(instance, pCreateInfo, pAllocator, pSurface);
- if (skip) return VK_ERROR_VALIDATION_FAILED_EXT;
- }
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PreCallRecordCreateXcbSurfaceKHR(instance, pCreateInfo, pAllocator, pSurface);
- }
- VkResult result = DispatchCreateXcbSurfaceKHR(instance, pCreateInfo, pAllocator, pSurface);
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PostCallRecordCreateXcbSurfaceKHR(instance, pCreateInfo, pAllocator, pSurface, result);
- }
- return result;
-}
-
-VKAPI_ATTR VkBool32 VKAPI_CALL GetPhysicalDeviceXcbPresentationSupportKHR(
- VkPhysicalDevice physicalDevice,
- uint32_t queueFamilyIndex,
- xcb_connection_t* connection,
- xcb_visualid_t visual_id) {
- auto layer_data = GetLayerDataPtr(get_dispatch_key(physicalDevice), layer_data_map);
- bool skip = false;
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- skip |= intercept->PreCallValidateGetPhysicalDeviceXcbPresentationSupportKHR(physicalDevice, queueFamilyIndex, connection, visual_id);
- if (skip) return VK_FALSE;
- }
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PreCallRecordGetPhysicalDeviceXcbPresentationSupportKHR(physicalDevice, queueFamilyIndex, connection, visual_id);
- }
- VkBool32 result = DispatchGetPhysicalDeviceXcbPresentationSupportKHR(physicalDevice, queueFamilyIndex, connection, visual_id);
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PostCallRecordGetPhysicalDeviceXcbPresentationSupportKHR(physicalDevice, queueFamilyIndex, connection, visual_id);
- }
- return result;
-}
-#endif // VK_USE_PLATFORM_XCB_KHR
-
-#ifdef VK_USE_PLATFORM_WAYLAND_KHR
-
-VKAPI_ATTR VkResult VKAPI_CALL CreateWaylandSurfaceKHR(
- VkInstance instance,
- const VkWaylandSurfaceCreateInfoKHR* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkSurfaceKHR* pSurface) {
- auto layer_data = GetLayerDataPtr(get_dispatch_key(instance), layer_data_map);
- bool skip = false;
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- skip |= intercept->PreCallValidateCreateWaylandSurfaceKHR(instance, pCreateInfo, pAllocator, pSurface);
- if (skip) return VK_ERROR_VALIDATION_FAILED_EXT;
- }
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PreCallRecordCreateWaylandSurfaceKHR(instance, pCreateInfo, pAllocator, pSurface);
- }
- VkResult result = DispatchCreateWaylandSurfaceKHR(instance, pCreateInfo, pAllocator, pSurface);
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PostCallRecordCreateWaylandSurfaceKHR(instance, pCreateInfo, pAllocator, pSurface, result);
- }
- return result;
-}
-
-VKAPI_ATTR VkBool32 VKAPI_CALL GetPhysicalDeviceWaylandPresentationSupportKHR(
- VkPhysicalDevice physicalDevice,
- uint32_t queueFamilyIndex,
- struct wl_display* display) {
- auto layer_data = GetLayerDataPtr(get_dispatch_key(physicalDevice), layer_data_map);
- bool skip = false;
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- skip |= intercept->PreCallValidateGetPhysicalDeviceWaylandPresentationSupportKHR(physicalDevice, queueFamilyIndex, display);
- if (skip) return VK_FALSE;
- }
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PreCallRecordGetPhysicalDeviceWaylandPresentationSupportKHR(physicalDevice, queueFamilyIndex, display);
- }
- VkBool32 result = DispatchGetPhysicalDeviceWaylandPresentationSupportKHR(physicalDevice, queueFamilyIndex, display);
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PostCallRecordGetPhysicalDeviceWaylandPresentationSupportKHR(physicalDevice, queueFamilyIndex, display);
- }
- return result;
-}
-#endif // VK_USE_PLATFORM_WAYLAND_KHR
-
-#ifdef VK_USE_PLATFORM_ANDROID_KHR
-
-VKAPI_ATTR VkResult VKAPI_CALL CreateAndroidSurfaceKHR(
- VkInstance instance,
- const VkAndroidSurfaceCreateInfoKHR* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkSurfaceKHR* pSurface) {
- auto layer_data = GetLayerDataPtr(get_dispatch_key(instance), layer_data_map);
- bool skip = false;
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- skip |= intercept->PreCallValidateCreateAndroidSurfaceKHR(instance, pCreateInfo, pAllocator, pSurface);
- if (skip) return VK_ERROR_VALIDATION_FAILED_EXT;
- }
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PreCallRecordCreateAndroidSurfaceKHR(instance, pCreateInfo, pAllocator, pSurface);
- }
- VkResult result = DispatchCreateAndroidSurfaceKHR(instance, pCreateInfo, pAllocator, pSurface);
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PostCallRecordCreateAndroidSurfaceKHR(instance, pCreateInfo, pAllocator, pSurface, result);
- }
- return result;
-}
-#endif // VK_USE_PLATFORM_ANDROID_KHR
-
-#ifdef VK_USE_PLATFORM_WIN32_KHR
-
-VKAPI_ATTR VkResult VKAPI_CALL CreateWin32SurfaceKHR(
- VkInstance instance,
- const VkWin32SurfaceCreateInfoKHR* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkSurfaceKHR* pSurface) {
- auto layer_data = GetLayerDataPtr(get_dispatch_key(instance), layer_data_map);
- bool skip = false;
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- skip |= intercept->PreCallValidateCreateWin32SurfaceKHR(instance, pCreateInfo, pAllocator, pSurface);
- if (skip) return VK_ERROR_VALIDATION_FAILED_EXT;
- }
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PreCallRecordCreateWin32SurfaceKHR(instance, pCreateInfo, pAllocator, pSurface);
- }
- VkResult result = DispatchCreateWin32SurfaceKHR(instance, pCreateInfo, pAllocator, pSurface);
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PostCallRecordCreateWin32SurfaceKHR(instance, pCreateInfo, pAllocator, pSurface, result);
- }
- return result;
-}
-
-VKAPI_ATTR VkBool32 VKAPI_CALL GetPhysicalDeviceWin32PresentationSupportKHR(
- VkPhysicalDevice physicalDevice,
- uint32_t queueFamilyIndex) {
- auto layer_data = GetLayerDataPtr(get_dispatch_key(physicalDevice), layer_data_map);
- bool skip = false;
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- skip |= intercept->PreCallValidateGetPhysicalDeviceWin32PresentationSupportKHR(physicalDevice, queueFamilyIndex);
- if (skip) return VK_FALSE;
- }
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PreCallRecordGetPhysicalDeviceWin32PresentationSupportKHR(physicalDevice, queueFamilyIndex);
- }
- VkBool32 result = DispatchGetPhysicalDeviceWin32PresentationSupportKHR(physicalDevice, queueFamilyIndex);
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PostCallRecordGetPhysicalDeviceWin32PresentationSupportKHR(physicalDevice, queueFamilyIndex);
- }
- return result;
-}
-#endif // VK_USE_PLATFORM_WIN32_KHR
-
-
-
-
-VKAPI_ATTR void VKAPI_CALL GetPhysicalDeviceFeatures2KHR(
- VkPhysicalDevice physicalDevice,
- VkPhysicalDeviceFeatures2* pFeatures) {
- auto layer_data = GetLayerDataPtr(get_dispatch_key(physicalDevice), layer_data_map);
- bool skip = false;
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- skip |= intercept->PreCallValidateGetPhysicalDeviceFeatures2KHR(physicalDevice, pFeatures);
- if (skip) return;
- }
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PreCallRecordGetPhysicalDeviceFeatures2KHR(physicalDevice, pFeatures);
- }
- DispatchGetPhysicalDeviceFeatures2KHR(physicalDevice, pFeatures);
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PostCallRecordGetPhysicalDeviceFeatures2KHR(physicalDevice, pFeatures);
- }
-}
-
-VKAPI_ATTR void VKAPI_CALL GetPhysicalDeviceProperties2KHR(
- VkPhysicalDevice physicalDevice,
- VkPhysicalDeviceProperties2* pProperties) {
- auto layer_data = GetLayerDataPtr(get_dispatch_key(physicalDevice), layer_data_map);
- bool skip = false;
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- skip |= intercept->PreCallValidateGetPhysicalDeviceProperties2KHR(physicalDevice, pProperties);
- if (skip) return;
- }
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PreCallRecordGetPhysicalDeviceProperties2KHR(physicalDevice, pProperties);
- }
- DispatchGetPhysicalDeviceProperties2KHR(physicalDevice, pProperties);
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PostCallRecordGetPhysicalDeviceProperties2KHR(physicalDevice, pProperties);
- }
-}
-
-VKAPI_ATTR void VKAPI_CALL GetPhysicalDeviceFormatProperties2KHR(
- VkPhysicalDevice physicalDevice,
- VkFormat format,
- VkFormatProperties2* pFormatProperties) {
- auto layer_data = GetLayerDataPtr(get_dispatch_key(physicalDevice), layer_data_map);
- bool skip = false;
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- skip |= intercept->PreCallValidateGetPhysicalDeviceFormatProperties2KHR(physicalDevice, format, pFormatProperties);
- if (skip) return;
- }
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PreCallRecordGetPhysicalDeviceFormatProperties2KHR(physicalDevice, format, pFormatProperties);
- }
- DispatchGetPhysicalDeviceFormatProperties2KHR(physicalDevice, format, pFormatProperties);
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PostCallRecordGetPhysicalDeviceFormatProperties2KHR(physicalDevice, format, pFormatProperties);
- }
-}
-
-VKAPI_ATTR VkResult VKAPI_CALL GetPhysicalDeviceImageFormatProperties2KHR(
- VkPhysicalDevice physicalDevice,
- const VkPhysicalDeviceImageFormatInfo2* pImageFormatInfo,
- VkImageFormatProperties2* pImageFormatProperties) {
- auto layer_data = GetLayerDataPtr(get_dispatch_key(physicalDevice), layer_data_map);
- bool skip = false;
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- skip |= intercept->PreCallValidateGetPhysicalDeviceImageFormatProperties2KHR(physicalDevice, pImageFormatInfo, pImageFormatProperties);
- if (skip) return VK_ERROR_VALIDATION_FAILED_EXT;
- }
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PreCallRecordGetPhysicalDeviceImageFormatProperties2KHR(physicalDevice, pImageFormatInfo, pImageFormatProperties);
- }
- VkResult result = DispatchGetPhysicalDeviceImageFormatProperties2KHR(physicalDevice, pImageFormatInfo, pImageFormatProperties);
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PostCallRecordGetPhysicalDeviceImageFormatProperties2KHR(physicalDevice, pImageFormatInfo, pImageFormatProperties, result);
- }
- return result;
-}
-
-VKAPI_ATTR void VKAPI_CALL GetPhysicalDeviceQueueFamilyProperties2KHR(
- VkPhysicalDevice physicalDevice,
- uint32_t* pQueueFamilyPropertyCount,
- VkQueueFamilyProperties2* pQueueFamilyProperties) {
- auto layer_data = GetLayerDataPtr(get_dispatch_key(physicalDevice), layer_data_map);
- bool skip = false;
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- skip |= intercept->PreCallValidateGetPhysicalDeviceQueueFamilyProperties2KHR(physicalDevice, pQueueFamilyPropertyCount, pQueueFamilyProperties);
- if (skip) return;
- }
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PreCallRecordGetPhysicalDeviceQueueFamilyProperties2KHR(physicalDevice, pQueueFamilyPropertyCount, pQueueFamilyProperties);
- }
- DispatchGetPhysicalDeviceQueueFamilyProperties2KHR(physicalDevice, pQueueFamilyPropertyCount, pQueueFamilyProperties);
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PostCallRecordGetPhysicalDeviceQueueFamilyProperties2KHR(physicalDevice, pQueueFamilyPropertyCount, pQueueFamilyProperties);
- }
-}
-
-VKAPI_ATTR void VKAPI_CALL GetPhysicalDeviceMemoryProperties2KHR(
- VkPhysicalDevice physicalDevice,
- VkPhysicalDeviceMemoryProperties2* pMemoryProperties) {
- auto layer_data = GetLayerDataPtr(get_dispatch_key(physicalDevice), layer_data_map);
- bool skip = false;
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- skip |= intercept->PreCallValidateGetPhysicalDeviceMemoryProperties2KHR(physicalDevice, pMemoryProperties);
- if (skip) return;
- }
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PreCallRecordGetPhysicalDeviceMemoryProperties2KHR(physicalDevice, pMemoryProperties);
- }
- DispatchGetPhysicalDeviceMemoryProperties2KHR(physicalDevice, pMemoryProperties);
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PostCallRecordGetPhysicalDeviceMemoryProperties2KHR(physicalDevice, pMemoryProperties);
- }
-}
-
-VKAPI_ATTR void VKAPI_CALL GetPhysicalDeviceSparseImageFormatProperties2KHR(
- VkPhysicalDevice physicalDevice,
- const VkPhysicalDeviceSparseImageFormatInfo2* pFormatInfo,
- uint32_t* pPropertyCount,
- VkSparseImageFormatProperties2* pProperties) {
- auto layer_data = GetLayerDataPtr(get_dispatch_key(physicalDevice), layer_data_map);
- bool skip = false;
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- skip |= intercept->PreCallValidateGetPhysicalDeviceSparseImageFormatProperties2KHR(physicalDevice, pFormatInfo, pPropertyCount, pProperties);
- if (skip) return;
- }
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PreCallRecordGetPhysicalDeviceSparseImageFormatProperties2KHR(physicalDevice, pFormatInfo, pPropertyCount, pProperties);
- }
- DispatchGetPhysicalDeviceSparseImageFormatProperties2KHR(physicalDevice, pFormatInfo, pPropertyCount, pProperties);
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PostCallRecordGetPhysicalDeviceSparseImageFormatProperties2KHR(physicalDevice, pFormatInfo, pPropertyCount, pProperties);
- }
-}
-
-
-VKAPI_ATTR void VKAPI_CALL GetDeviceGroupPeerMemoryFeaturesKHR(
- VkDevice device,
- uint32_t heapIndex,
- uint32_t localDeviceIndex,
- uint32_t remoteDeviceIndex,
- VkPeerMemoryFeatureFlags* pPeerMemoryFeatures) {
- auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
- bool skip = false;
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- skip |= intercept->PreCallValidateGetDeviceGroupPeerMemoryFeaturesKHR(device, heapIndex, localDeviceIndex, remoteDeviceIndex, pPeerMemoryFeatures);
- if (skip) return;
- }
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PreCallRecordGetDeviceGroupPeerMemoryFeaturesKHR(device, heapIndex, localDeviceIndex, remoteDeviceIndex, pPeerMemoryFeatures);
- }
- DispatchGetDeviceGroupPeerMemoryFeaturesKHR(device, heapIndex, localDeviceIndex, remoteDeviceIndex, pPeerMemoryFeatures);
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PostCallRecordGetDeviceGroupPeerMemoryFeaturesKHR(device, heapIndex, localDeviceIndex, remoteDeviceIndex, pPeerMemoryFeatures);
- }
-}
-
-VKAPI_ATTR void VKAPI_CALL CmdSetDeviceMaskKHR(
- VkCommandBuffer commandBuffer,
- uint32_t deviceMask) {
- auto layer_data = GetLayerDataPtr(get_dispatch_key(commandBuffer), layer_data_map);
- bool skip = false;
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- skip |= intercept->PreCallValidateCmdSetDeviceMaskKHR(commandBuffer, deviceMask);
- if (skip) return;
- }
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PreCallRecordCmdSetDeviceMaskKHR(commandBuffer, deviceMask);
- }
- DispatchCmdSetDeviceMaskKHR(commandBuffer, deviceMask);
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PostCallRecordCmdSetDeviceMaskKHR(commandBuffer, deviceMask);
- }
-}
-
-VKAPI_ATTR void VKAPI_CALL CmdDispatchBaseKHR(
- VkCommandBuffer commandBuffer,
- uint32_t baseGroupX,
- uint32_t baseGroupY,
- uint32_t baseGroupZ,
- uint32_t groupCountX,
- uint32_t groupCountY,
- uint32_t groupCountZ) {
- auto layer_data = GetLayerDataPtr(get_dispatch_key(commandBuffer), layer_data_map);
- bool skip = false;
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- skip |= intercept->PreCallValidateCmdDispatchBaseKHR(commandBuffer, baseGroupX, baseGroupY, baseGroupZ, groupCountX, groupCountY, groupCountZ);
- if (skip) return;
- }
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PreCallRecordCmdDispatchBaseKHR(commandBuffer, baseGroupX, baseGroupY, baseGroupZ, groupCountX, groupCountY, groupCountZ);
- }
- DispatchCmdDispatchBaseKHR(commandBuffer, baseGroupX, baseGroupY, baseGroupZ, groupCountX, groupCountY, groupCountZ);
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PostCallRecordCmdDispatchBaseKHR(commandBuffer, baseGroupX, baseGroupY, baseGroupZ, groupCountX, groupCountY, groupCountZ);
- }
-}
-
-
-
-VKAPI_ATTR void VKAPI_CALL TrimCommandPoolKHR(
- VkDevice device,
- VkCommandPool commandPool,
- VkCommandPoolTrimFlags flags) {
- auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
- bool skip = false;
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- skip |= intercept->PreCallValidateTrimCommandPoolKHR(device, commandPool, flags);
- if (skip) return;
- }
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PreCallRecordTrimCommandPoolKHR(device, commandPool, flags);
- }
- DispatchTrimCommandPoolKHR(device, commandPool, flags);
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PostCallRecordTrimCommandPoolKHR(device, commandPool, flags);
- }
-}
-
-
-VKAPI_ATTR VkResult VKAPI_CALL EnumeratePhysicalDeviceGroupsKHR(
- VkInstance instance,
- uint32_t* pPhysicalDeviceGroupCount,
- VkPhysicalDeviceGroupProperties* pPhysicalDeviceGroupProperties) {
- auto layer_data = GetLayerDataPtr(get_dispatch_key(instance), layer_data_map);
- bool skip = false;
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- skip |= intercept->PreCallValidateEnumeratePhysicalDeviceGroupsKHR(instance, pPhysicalDeviceGroupCount, pPhysicalDeviceGroupProperties);
- if (skip) return VK_ERROR_VALIDATION_FAILED_EXT;
- }
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PreCallRecordEnumeratePhysicalDeviceGroupsKHR(instance, pPhysicalDeviceGroupCount, pPhysicalDeviceGroupProperties);
- }
- VkResult result = DispatchEnumeratePhysicalDeviceGroupsKHR(instance, pPhysicalDeviceGroupCount, pPhysicalDeviceGroupProperties);
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PostCallRecordEnumeratePhysicalDeviceGroupsKHR(instance, pPhysicalDeviceGroupCount, pPhysicalDeviceGroupProperties, result);
- }
- return result;
-}
-
-
-VKAPI_ATTR void VKAPI_CALL GetPhysicalDeviceExternalBufferPropertiesKHR(
- VkPhysicalDevice physicalDevice,
- const VkPhysicalDeviceExternalBufferInfo* pExternalBufferInfo,
- VkExternalBufferProperties* pExternalBufferProperties) {
- auto layer_data = GetLayerDataPtr(get_dispatch_key(physicalDevice), layer_data_map);
- bool skip = false;
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- skip |= intercept->PreCallValidateGetPhysicalDeviceExternalBufferPropertiesKHR(physicalDevice, pExternalBufferInfo, pExternalBufferProperties);
- if (skip) return;
- }
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PreCallRecordGetPhysicalDeviceExternalBufferPropertiesKHR(physicalDevice, pExternalBufferInfo, pExternalBufferProperties);
- }
- DispatchGetPhysicalDeviceExternalBufferPropertiesKHR(physicalDevice, pExternalBufferInfo, pExternalBufferProperties);
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PostCallRecordGetPhysicalDeviceExternalBufferPropertiesKHR(physicalDevice, pExternalBufferInfo, pExternalBufferProperties);
- }
-}
-
-
-#ifdef VK_USE_PLATFORM_WIN32_KHR
-
-VKAPI_ATTR VkResult VKAPI_CALL GetMemoryWin32HandleKHR(
- VkDevice device,
- const VkMemoryGetWin32HandleInfoKHR* pGetWin32HandleInfo,
- HANDLE* pHandle) {
- auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
- bool skip = false;
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- skip |= intercept->PreCallValidateGetMemoryWin32HandleKHR(device, pGetWin32HandleInfo, pHandle);
- if (skip) return VK_ERROR_VALIDATION_FAILED_EXT;
- }
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PreCallRecordGetMemoryWin32HandleKHR(device, pGetWin32HandleInfo, pHandle);
- }
- VkResult result = DispatchGetMemoryWin32HandleKHR(device, pGetWin32HandleInfo, pHandle);
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PostCallRecordGetMemoryWin32HandleKHR(device, pGetWin32HandleInfo, pHandle, result);
- }
- return result;
-}
-
-VKAPI_ATTR VkResult VKAPI_CALL GetMemoryWin32HandlePropertiesKHR(
- VkDevice device,
- VkExternalMemoryHandleTypeFlagBits handleType,
- HANDLE handle,
- VkMemoryWin32HandlePropertiesKHR* pMemoryWin32HandleProperties) {
- auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
- bool skip = false;
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- skip |= intercept->PreCallValidateGetMemoryWin32HandlePropertiesKHR(device, handleType, handle, pMemoryWin32HandleProperties);
- if (skip) return VK_ERROR_VALIDATION_FAILED_EXT;
- }
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PreCallRecordGetMemoryWin32HandlePropertiesKHR(device, handleType, handle, pMemoryWin32HandleProperties);
- }
- VkResult result = DispatchGetMemoryWin32HandlePropertiesKHR(device, handleType, handle, pMemoryWin32HandleProperties);
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PostCallRecordGetMemoryWin32HandlePropertiesKHR(device, handleType, handle, pMemoryWin32HandleProperties, result);
- }
- return result;
-}
-#endif // VK_USE_PLATFORM_WIN32_KHR
-
-
-VKAPI_ATTR VkResult VKAPI_CALL GetMemoryFdKHR(
- VkDevice device,
- const VkMemoryGetFdInfoKHR* pGetFdInfo,
- int* pFd) {
- auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
- bool skip = false;
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- skip |= intercept->PreCallValidateGetMemoryFdKHR(device, pGetFdInfo, pFd);
- if (skip) return VK_ERROR_VALIDATION_FAILED_EXT;
- }
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PreCallRecordGetMemoryFdKHR(device, pGetFdInfo, pFd);
- }
- VkResult result = DispatchGetMemoryFdKHR(device, pGetFdInfo, pFd);
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PostCallRecordGetMemoryFdKHR(device, pGetFdInfo, pFd, result);
- }
- return result;
-}
-
-VKAPI_ATTR VkResult VKAPI_CALL GetMemoryFdPropertiesKHR(
- VkDevice device,
- VkExternalMemoryHandleTypeFlagBits handleType,
- int fd,
- VkMemoryFdPropertiesKHR* pMemoryFdProperties) {
- auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
- bool skip = false;
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- skip |= intercept->PreCallValidateGetMemoryFdPropertiesKHR(device, handleType, fd, pMemoryFdProperties);
- if (skip) return VK_ERROR_VALIDATION_FAILED_EXT;
- }
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PreCallRecordGetMemoryFdPropertiesKHR(device, handleType, fd, pMemoryFdProperties);
- }
- VkResult result = DispatchGetMemoryFdPropertiesKHR(device, handleType, fd, pMemoryFdProperties);
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PostCallRecordGetMemoryFdPropertiesKHR(device, handleType, fd, pMemoryFdProperties, result);
- }
- return result;
-}
-
-#ifdef VK_USE_PLATFORM_WIN32_KHR
-#endif // VK_USE_PLATFORM_WIN32_KHR
-
-
-VKAPI_ATTR void VKAPI_CALL GetPhysicalDeviceExternalSemaphorePropertiesKHR(
- VkPhysicalDevice physicalDevice,
- const VkPhysicalDeviceExternalSemaphoreInfo* pExternalSemaphoreInfo,
- VkExternalSemaphoreProperties* pExternalSemaphoreProperties) {
- auto layer_data = GetLayerDataPtr(get_dispatch_key(physicalDevice), layer_data_map);
- bool skip = false;
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- skip |= intercept->PreCallValidateGetPhysicalDeviceExternalSemaphorePropertiesKHR(physicalDevice, pExternalSemaphoreInfo, pExternalSemaphoreProperties);
- if (skip) return;
- }
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PreCallRecordGetPhysicalDeviceExternalSemaphorePropertiesKHR(physicalDevice, pExternalSemaphoreInfo, pExternalSemaphoreProperties);
- }
- DispatchGetPhysicalDeviceExternalSemaphorePropertiesKHR(physicalDevice, pExternalSemaphoreInfo, pExternalSemaphoreProperties);
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PostCallRecordGetPhysicalDeviceExternalSemaphorePropertiesKHR(physicalDevice, pExternalSemaphoreInfo, pExternalSemaphoreProperties);
- }
-}
-
-
-#ifdef VK_USE_PLATFORM_WIN32_KHR
-
-VKAPI_ATTR VkResult VKAPI_CALL ImportSemaphoreWin32HandleKHR(
- VkDevice device,
- const VkImportSemaphoreWin32HandleInfoKHR* pImportSemaphoreWin32HandleInfo) {
- auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
- bool skip = false;
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- skip |= intercept->PreCallValidateImportSemaphoreWin32HandleKHR(device, pImportSemaphoreWin32HandleInfo);
- if (skip) return VK_ERROR_VALIDATION_FAILED_EXT;
- }
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PreCallRecordImportSemaphoreWin32HandleKHR(device, pImportSemaphoreWin32HandleInfo);
- }
- VkResult result = DispatchImportSemaphoreWin32HandleKHR(device, pImportSemaphoreWin32HandleInfo);
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PostCallRecordImportSemaphoreWin32HandleKHR(device, pImportSemaphoreWin32HandleInfo, result);
- }
- return result;
-}
-
-VKAPI_ATTR VkResult VKAPI_CALL GetSemaphoreWin32HandleKHR(
- VkDevice device,
- const VkSemaphoreGetWin32HandleInfoKHR* pGetWin32HandleInfo,
- HANDLE* pHandle) {
- auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
- bool skip = false;
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- skip |= intercept->PreCallValidateGetSemaphoreWin32HandleKHR(device, pGetWin32HandleInfo, pHandle);
- if (skip) return VK_ERROR_VALIDATION_FAILED_EXT;
- }
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PreCallRecordGetSemaphoreWin32HandleKHR(device, pGetWin32HandleInfo, pHandle);
- }
- VkResult result = DispatchGetSemaphoreWin32HandleKHR(device, pGetWin32HandleInfo, pHandle);
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PostCallRecordGetSemaphoreWin32HandleKHR(device, pGetWin32HandleInfo, pHandle, result);
- }
- return result;
-}
-#endif // VK_USE_PLATFORM_WIN32_KHR
-
-
-VKAPI_ATTR VkResult VKAPI_CALL ImportSemaphoreFdKHR(
- VkDevice device,
- const VkImportSemaphoreFdInfoKHR* pImportSemaphoreFdInfo) {
- auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
- bool skip = false;
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- skip |= intercept->PreCallValidateImportSemaphoreFdKHR(device, pImportSemaphoreFdInfo);
- if (skip) return VK_ERROR_VALIDATION_FAILED_EXT;
- }
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PreCallRecordImportSemaphoreFdKHR(device, pImportSemaphoreFdInfo);
- }
- VkResult result = DispatchImportSemaphoreFdKHR(device, pImportSemaphoreFdInfo);
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PostCallRecordImportSemaphoreFdKHR(device, pImportSemaphoreFdInfo, result);
- }
- return result;
-}
-
-VKAPI_ATTR VkResult VKAPI_CALL GetSemaphoreFdKHR(
- VkDevice device,
- const VkSemaphoreGetFdInfoKHR* pGetFdInfo,
- int* pFd) {
- auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
- bool skip = false;
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- skip |= intercept->PreCallValidateGetSemaphoreFdKHR(device, pGetFdInfo, pFd);
- if (skip) return VK_ERROR_VALIDATION_FAILED_EXT;
- }
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PreCallRecordGetSemaphoreFdKHR(device, pGetFdInfo, pFd);
- }
- VkResult result = DispatchGetSemaphoreFdKHR(device, pGetFdInfo, pFd);
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PostCallRecordGetSemaphoreFdKHR(device, pGetFdInfo, pFd, result);
- }
- return result;
-}
-
-
-VKAPI_ATTR void VKAPI_CALL CmdPushDescriptorSetKHR(
- VkCommandBuffer commandBuffer,
- VkPipelineBindPoint pipelineBindPoint,
- VkPipelineLayout layout,
- uint32_t set,
- uint32_t descriptorWriteCount,
- const VkWriteDescriptorSet* pDescriptorWrites) {
- auto layer_data = GetLayerDataPtr(get_dispatch_key(commandBuffer), layer_data_map);
- bool skip = false;
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- skip |= intercept->PreCallValidateCmdPushDescriptorSetKHR(commandBuffer, pipelineBindPoint, layout, set, descriptorWriteCount, pDescriptorWrites);
- if (skip) return;
- }
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PreCallRecordCmdPushDescriptorSetKHR(commandBuffer, pipelineBindPoint, layout, set, descriptorWriteCount, pDescriptorWrites);
- }
- DispatchCmdPushDescriptorSetKHR(commandBuffer, pipelineBindPoint, layout, set, descriptorWriteCount, pDescriptorWrites);
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PostCallRecordCmdPushDescriptorSetKHR(commandBuffer, pipelineBindPoint, layout, set, descriptorWriteCount, pDescriptorWrites);
- }
-}
-
-VKAPI_ATTR void VKAPI_CALL CmdPushDescriptorSetWithTemplateKHR(
- VkCommandBuffer commandBuffer,
- VkDescriptorUpdateTemplate descriptorUpdateTemplate,
- VkPipelineLayout layout,
- uint32_t set,
- const void* pData) {
- auto layer_data = GetLayerDataPtr(get_dispatch_key(commandBuffer), layer_data_map);
- bool skip = false;
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- skip |= intercept->PreCallValidateCmdPushDescriptorSetWithTemplateKHR(commandBuffer, descriptorUpdateTemplate, layout, set, pData);
- if (skip) return;
- }
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PreCallRecordCmdPushDescriptorSetWithTemplateKHR(commandBuffer, descriptorUpdateTemplate, layout, set, pData);
- }
- DispatchCmdPushDescriptorSetWithTemplateKHR(commandBuffer, descriptorUpdateTemplate, layout, set, pData);
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PostCallRecordCmdPushDescriptorSetWithTemplateKHR(commandBuffer, descriptorUpdateTemplate, layout, set, pData);
- }
-}
-
-
-
-
-
-VKAPI_ATTR VkResult VKAPI_CALL CreateDescriptorUpdateTemplateKHR(
- VkDevice device,
- const VkDescriptorUpdateTemplateCreateInfo* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkDescriptorUpdateTemplate* pDescriptorUpdateTemplate) {
- auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
- bool skip = false;
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- skip |= intercept->PreCallValidateCreateDescriptorUpdateTemplateKHR(device, pCreateInfo, pAllocator, pDescriptorUpdateTemplate);
- if (skip) return VK_ERROR_VALIDATION_FAILED_EXT;
- }
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PreCallRecordCreateDescriptorUpdateTemplateKHR(device, pCreateInfo, pAllocator, pDescriptorUpdateTemplate);
- }
- VkResult result = DispatchCreateDescriptorUpdateTemplateKHR(device, pCreateInfo, pAllocator, pDescriptorUpdateTemplate);
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PostCallRecordCreateDescriptorUpdateTemplateKHR(device, pCreateInfo, pAllocator, pDescriptorUpdateTemplate, result);
- }
- return result;
-}
-
-VKAPI_ATTR void VKAPI_CALL DestroyDescriptorUpdateTemplateKHR(
- VkDevice device,
- VkDescriptorUpdateTemplate descriptorUpdateTemplate,
- const VkAllocationCallbacks* pAllocator) {
- auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
- bool skip = false;
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- skip |= intercept->PreCallValidateDestroyDescriptorUpdateTemplateKHR(device, descriptorUpdateTemplate, pAllocator);
- if (skip) return;
- }
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PreCallRecordDestroyDescriptorUpdateTemplateKHR(device, descriptorUpdateTemplate, pAllocator);
- }
- DispatchDestroyDescriptorUpdateTemplateKHR(device, descriptorUpdateTemplate, pAllocator);
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PostCallRecordDestroyDescriptorUpdateTemplateKHR(device, descriptorUpdateTemplate, pAllocator);
- }
-}
-
-VKAPI_ATTR void VKAPI_CALL UpdateDescriptorSetWithTemplateKHR(
- VkDevice device,
- VkDescriptorSet descriptorSet,
- VkDescriptorUpdateTemplate descriptorUpdateTemplate,
- const void* pData) {
- auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
- bool skip = false;
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- skip |= intercept->PreCallValidateUpdateDescriptorSetWithTemplateKHR(device, descriptorSet, descriptorUpdateTemplate, pData);
- if (skip) return;
- }
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PreCallRecordUpdateDescriptorSetWithTemplateKHR(device, descriptorSet, descriptorUpdateTemplate, pData);
- }
- DispatchUpdateDescriptorSetWithTemplateKHR(device, descriptorSet, descriptorUpdateTemplate, pData);
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PostCallRecordUpdateDescriptorSetWithTemplateKHR(device, descriptorSet, descriptorUpdateTemplate, pData);
- }
-}
-
-
-
-VKAPI_ATTR VkResult VKAPI_CALL CreateRenderPass2KHR(
- VkDevice device,
- const VkRenderPassCreateInfo2KHR* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkRenderPass* pRenderPass) {
- auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
- bool skip = false;
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- skip |= intercept->PreCallValidateCreateRenderPass2KHR(device, pCreateInfo, pAllocator, pRenderPass);
- if (skip) return VK_ERROR_VALIDATION_FAILED_EXT;
- }
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PreCallRecordCreateRenderPass2KHR(device, pCreateInfo, pAllocator, pRenderPass);
- }
- VkResult result = DispatchCreateRenderPass2KHR(device, pCreateInfo, pAllocator, pRenderPass);
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PostCallRecordCreateRenderPass2KHR(device, pCreateInfo, pAllocator, pRenderPass, result);
- }
- return result;
-}
-
-VKAPI_ATTR void VKAPI_CALL CmdBeginRenderPass2KHR(
- VkCommandBuffer commandBuffer,
- const VkRenderPassBeginInfo* pRenderPassBegin,
- const VkSubpassBeginInfoKHR* pSubpassBeginInfo) {
- auto layer_data = GetLayerDataPtr(get_dispatch_key(commandBuffer), layer_data_map);
- bool skip = false;
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- skip |= intercept->PreCallValidateCmdBeginRenderPass2KHR(commandBuffer, pRenderPassBegin, pSubpassBeginInfo);
- if (skip) return;
- }
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PreCallRecordCmdBeginRenderPass2KHR(commandBuffer, pRenderPassBegin, pSubpassBeginInfo);
- }
- DispatchCmdBeginRenderPass2KHR(commandBuffer, pRenderPassBegin, pSubpassBeginInfo);
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PostCallRecordCmdBeginRenderPass2KHR(commandBuffer, pRenderPassBegin, pSubpassBeginInfo);
- }
-}
-
-VKAPI_ATTR void VKAPI_CALL CmdNextSubpass2KHR(
- VkCommandBuffer commandBuffer,
- const VkSubpassBeginInfoKHR* pSubpassBeginInfo,
- const VkSubpassEndInfoKHR* pSubpassEndInfo) {
- auto layer_data = GetLayerDataPtr(get_dispatch_key(commandBuffer), layer_data_map);
- bool skip = false;
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- skip |= intercept->PreCallValidateCmdNextSubpass2KHR(commandBuffer, pSubpassBeginInfo, pSubpassEndInfo);
- if (skip) return;
- }
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PreCallRecordCmdNextSubpass2KHR(commandBuffer, pSubpassBeginInfo, pSubpassEndInfo);
- }
- DispatchCmdNextSubpass2KHR(commandBuffer, pSubpassBeginInfo, pSubpassEndInfo);
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PostCallRecordCmdNextSubpass2KHR(commandBuffer, pSubpassBeginInfo, pSubpassEndInfo);
- }
-}
-
-VKAPI_ATTR void VKAPI_CALL CmdEndRenderPass2KHR(
- VkCommandBuffer commandBuffer,
- const VkSubpassEndInfoKHR* pSubpassEndInfo) {
- auto layer_data = GetLayerDataPtr(get_dispatch_key(commandBuffer), layer_data_map);
- bool skip = false;
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- skip |= intercept->PreCallValidateCmdEndRenderPass2KHR(commandBuffer, pSubpassEndInfo);
- if (skip) return;
- }
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PreCallRecordCmdEndRenderPass2KHR(commandBuffer, pSubpassEndInfo);
- }
- DispatchCmdEndRenderPass2KHR(commandBuffer, pSubpassEndInfo);
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PostCallRecordCmdEndRenderPass2KHR(commandBuffer, pSubpassEndInfo);
- }
-}
-
-
-VKAPI_ATTR VkResult VKAPI_CALL GetSwapchainStatusKHR(
- VkDevice device,
- VkSwapchainKHR swapchain) {
- auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
- bool skip = false;
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- skip |= intercept->PreCallValidateGetSwapchainStatusKHR(device, swapchain);
- if (skip) return VK_ERROR_VALIDATION_FAILED_EXT;
- }
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PreCallRecordGetSwapchainStatusKHR(device, swapchain);
- }
- VkResult result = DispatchGetSwapchainStatusKHR(device, swapchain);
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PostCallRecordGetSwapchainStatusKHR(device, swapchain, result);
- }
- return result;
-}
-
-
-VKAPI_ATTR void VKAPI_CALL GetPhysicalDeviceExternalFencePropertiesKHR(
- VkPhysicalDevice physicalDevice,
- const VkPhysicalDeviceExternalFenceInfo* pExternalFenceInfo,
- VkExternalFenceProperties* pExternalFenceProperties) {
- auto layer_data = GetLayerDataPtr(get_dispatch_key(physicalDevice), layer_data_map);
- bool skip = false;
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- skip |= intercept->PreCallValidateGetPhysicalDeviceExternalFencePropertiesKHR(physicalDevice, pExternalFenceInfo, pExternalFenceProperties);
- if (skip) return;
- }
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PreCallRecordGetPhysicalDeviceExternalFencePropertiesKHR(physicalDevice, pExternalFenceInfo, pExternalFenceProperties);
- }
- DispatchGetPhysicalDeviceExternalFencePropertiesKHR(physicalDevice, pExternalFenceInfo, pExternalFenceProperties);
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PostCallRecordGetPhysicalDeviceExternalFencePropertiesKHR(physicalDevice, pExternalFenceInfo, pExternalFenceProperties);
- }
-}
-
-
-#ifdef VK_USE_PLATFORM_WIN32_KHR
-
-VKAPI_ATTR VkResult VKAPI_CALL ImportFenceWin32HandleKHR(
- VkDevice device,
- const VkImportFenceWin32HandleInfoKHR* pImportFenceWin32HandleInfo) {
- auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
- bool skip = false;
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- skip |= intercept->PreCallValidateImportFenceWin32HandleKHR(device, pImportFenceWin32HandleInfo);
- if (skip) return VK_ERROR_VALIDATION_FAILED_EXT;
- }
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PreCallRecordImportFenceWin32HandleKHR(device, pImportFenceWin32HandleInfo);
- }
- VkResult result = DispatchImportFenceWin32HandleKHR(device, pImportFenceWin32HandleInfo);
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PostCallRecordImportFenceWin32HandleKHR(device, pImportFenceWin32HandleInfo, result);
- }
- return result;
-}
-
-VKAPI_ATTR VkResult VKAPI_CALL GetFenceWin32HandleKHR(
- VkDevice device,
- const VkFenceGetWin32HandleInfoKHR* pGetWin32HandleInfo,
- HANDLE* pHandle) {
- auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
- bool skip = false;
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- skip |= intercept->PreCallValidateGetFenceWin32HandleKHR(device, pGetWin32HandleInfo, pHandle);
- if (skip) return VK_ERROR_VALIDATION_FAILED_EXT;
- }
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PreCallRecordGetFenceWin32HandleKHR(device, pGetWin32HandleInfo, pHandle);
- }
- VkResult result = DispatchGetFenceWin32HandleKHR(device, pGetWin32HandleInfo, pHandle);
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PostCallRecordGetFenceWin32HandleKHR(device, pGetWin32HandleInfo, pHandle, result);
- }
- return result;
-}
-#endif // VK_USE_PLATFORM_WIN32_KHR
-
-
-VKAPI_ATTR VkResult VKAPI_CALL ImportFenceFdKHR(
- VkDevice device,
- const VkImportFenceFdInfoKHR* pImportFenceFdInfo) {
- auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
- bool skip = false;
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- skip |= intercept->PreCallValidateImportFenceFdKHR(device, pImportFenceFdInfo);
- if (skip) return VK_ERROR_VALIDATION_FAILED_EXT;
- }
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PreCallRecordImportFenceFdKHR(device, pImportFenceFdInfo);
- }
- VkResult result = DispatchImportFenceFdKHR(device, pImportFenceFdInfo);
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PostCallRecordImportFenceFdKHR(device, pImportFenceFdInfo, result);
- }
- return result;
-}
-
-VKAPI_ATTR VkResult VKAPI_CALL GetFenceFdKHR(
- VkDevice device,
- const VkFenceGetFdInfoKHR* pGetFdInfo,
- int* pFd) {
- auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
- bool skip = false;
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- skip |= intercept->PreCallValidateGetFenceFdKHR(device, pGetFdInfo, pFd);
- if (skip) return VK_ERROR_VALIDATION_FAILED_EXT;
- }
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PreCallRecordGetFenceFdKHR(device, pGetFdInfo, pFd);
- }
- VkResult result = DispatchGetFenceFdKHR(device, pGetFdInfo, pFd);
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PostCallRecordGetFenceFdKHR(device, pGetFdInfo, pFd, result);
- }
- return result;
-}
-
-
-
-VKAPI_ATTR VkResult VKAPI_CALL GetPhysicalDeviceSurfaceCapabilities2KHR(
- VkPhysicalDevice physicalDevice,
- const VkPhysicalDeviceSurfaceInfo2KHR* pSurfaceInfo,
- VkSurfaceCapabilities2KHR* pSurfaceCapabilities) {
- auto layer_data = GetLayerDataPtr(get_dispatch_key(physicalDevice), layer_data_map);
- bool skip = false;
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- skip |= intercept->PreCallValidateGetPhysicalDeviceSurfaceCapabilities2KHR(physicalDevice, pSurfaceInfo, pSurfaceCapabilities);
- if (skip) return VK_ERROR_VALIDATION_FAILED_EXT;
- }
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PreCallRecordGetPhysicalDeviceSurfaceCapabilities2KHR(physicalDevice, pSurfaceInfo, pSurfaceCapabilities);
- }
- VkResult result = DispatchGetPhysicalDeviceSurfaceCapabilities2KHR(physicalDevice, pSurfaceInfo, pSurfaceCapabilities);
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PostCallRecordGetPhysicalDeviceSurfaceCapabilities2KHR(physicalDevice, pSurfaceInfo, pSurfaceCapabilities, result);
- }
- return result;
-}
-
-VKAPI_ATTR VkResult VKAPI_CALL GetPhysicalDeviceSurfaceFormats2KHR(
- VkPhysicalDevice physicalDevice,
- const VkPhysicalDeviceSurfaceInfo2KHR* pSurfaceInfo,
- uint32_t* pSurfaceFormatCount,
- VkSurfaceFormat2KHR* pSurfaceFormats) {
- auto layer_data = GetLayerDataPtr(get_dispatch_key(physicalDevice), layer_data_map);
- bool skip = false;
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- skip |= intercept->PreCallValidateGetPhysicalDeviceSurfaceFormats2KHR(physicalDevice, pSurfaceInfo, pSurfaceFormatCount, pSurfaceFormats);
- if (skip) return VK_ERROR_VALIDATION_FAILED_EXT;
- }
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PreCallRecordGetPhysicalDeviceSurfaceFormats2KHR(physicalDevice, pSurfaceInfo, pSurfaceFormatCount, pSurfaceFormats);
- }
- VkResult result = DispatchGetPhysicalDeviceSurfaceFormats2KHR(physicalDevice, pSurfaceInfo, pSurfaceFormatCount, pSurfaceFormats);
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PostCallRecordGetPhysicalDeviceSurfaceFormats2KHR(physicalDevice, pSurfaceInfo, pSurfaceFormatCount, pSurfaceFormats, result);
- }
- return result;
-}
-
-
-
-VKAPI_ATTR VkResult VKAPI_CALL GetPhysicalDeviceDisplayProperties2KHR(
- VkPhysicalDevice physicalDevice,
- uint32_t* pPropertyCount,
- VkDisplayProperties2KHR* pProperties) {
- auto layer_data = GetLayerDataPtr(get_dispatch_key(physicalDevice), layer_data_map);
- bool skip = false;
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- skip |= intercept->PreCallValidateGetPhysicalDeviceDisplayProperties2KHR(physicalDevice, pPropertyCount, pProperties);
- if (skip) return VK_ERROR_VALIDATION_FAILED_EXT;
- }
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PreCallRecordGetPhysicalDeviceDisplayProperties2KHR(physicalDevice, pPropertyCount, pProperties);
- }
- VkResult result = DispatchGetPhysicalDeviceDisplayProperties2KHR(physicalDevice, pPropertyCount, pProperties);
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PostCallRecordGetPhysicalDeviceDisplayProperties2KHR(physicalDevice, pPropertyCount, pProperties, result);
- }
- return result;
-}
-
-VKAPI_ATTR VkResult VKAPI_CALL GetPhysicalDeviceDisplayPlaneProperties2KHR(
- VkPhysicalDevice physicalDevice,
- uint32_t* pPropertyCount,
- VkDisplayPlaneProperties2KHR* pProperties) {
- auto layer_data = GetLayerDataPtr(get_dispatch_key(physicalDevice), layer_data_map);
- bool skip = false;
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- skip |= intercept->PreCallValidateGetPhysicalDeviceDisplayPlaneProperties2KHR(physicalDevice, pPropertyCount, pProperties);
- if (skip) return VK_ERROR_VALIDATION_FAILED_EXT;
- }
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PreCallRecordGetPhysicalDeviceDisplayPlaneProperties2KHR(physicalDevice, pPropertyCount, pProperties);
- }
- VkResult result = DispatchGetPhysicalDeviceDisplayPlaneProperties2KHR(physicalDevice, pPropertyCount, pProperties);
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PostCallRecordGetPhysicalDeviceDisplayPlaneProperties2KHR(physicalDevice, pPropertyCount, pProperties, result);
- }
- return result;
-}
-
-VKAPI_ATTR VkResult VKAPI_CALL GetDisplayModeProperties2KHR(
- VkPhysicalDevice physicalDevice,
- VkDisplayKHR display,
- uint32_t* pPropertyCount,
- VkDisplayModeProperties2KHR* pProperties) {
- auto layer_data = GetLayerDataPtr(get_dispatch_key(physicalDevice), layer_data_map);
- bool skip = false;
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- skip |= intercept->PreCallValidateGetDisplayModeProperties2KHR(physicalDevice, display, pPropertyCount, pProperties);
- if (skip) return VK_ERROR_VALIDATION_FAILED_EXT;
- }
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PreCallRecordGetDisplayModeProperties2KHR(physicalDevice, display, pPropertyCount, pProperties);
- }
- VkResult result = DispatchGetDisplayModeProperties2KHR(physicalDevice, display, pPropertyCount, pProperties);
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PostCallRecordGetDisplayModeProperties2KHR(physicalDevice, display, pPropertyCount, pProperties, result);
- }
- return result;
-}
-
-VKAPI_ATTR VkResult VKAPI_CALL GetDisplayPlaneCapabilities2KHR(
- VkPhysicalDevice physicalDevice,
- const VkDisplayPlaneInfo2KHR* pDisplayPlaneInfo,
- VkDisplayPlaneCapabilities2KHR* pCapabilities) {
- auto layer_data = GetLayerDataPtr(get_dispatch_key(physicalDevice), layer_data_map);
- bool skip = false;
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- skip |= intercept->PreCallValidateGetDisplayPlaneCapabilities2KHR(physicalDevice, pDisplayPlaneInfo, pCapabilities);
- if (skip) return VK_ERROR_VALIDATION_FAILED_EXT;
- }
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PreCallRecordGetDisplayPlaneCapabilities2KHR(physicalDevice, pDisplayPlaneInfo, pCapabilities);
- }
- VkResult result = DispatchGetDisplayPlaneCapabilities2KHR(physicalDevice, pDisplayPlaneInfo, pCapabilities);
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PostCallRecordGetDisplayPlaneCapabilities2KHR(physicalDevice, pDisplayPlaneInfo, pCapabilities, result);
- }
- return result;
-}
-
-
-
-
-
-VKAPI_ATTR void VKAPI_CALL GetImageMemoryRequirements2KHR(
- VkDevice device,
- const VkImageMemoryRequirementsInfo2* pInfo,
- VkMemoryRequirements2* pMemoryRequirements) {
- auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
- bool skip = false;
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- skip |= intercept->PreCallValidateGetImageMemoryRequirements2KHR(device, pInfo, pMemoryRequirements);
- if (skip) return;
- }
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PreCallRecordGetImageMemoryRequirements2KHR(device, pInfo, pMemoryRequirements);
- }
- DispatchGetImageMemoryRequirements2KHR(device, pInfo, pMemoryRequirements);
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PostCallRecordGetImageMemoryRequirements2KHR(device, pInfo, pMemoryRequirements);
- }
-}
-
-VKAPI_ATTR void VKAPI_CALL GetBufferMemoryRequirements2KHR(
- VkDevice device,
- const VkBufferMemoryRequirementsInfo2* pInfo,
- VkMemoryRequirements2* pMemoryRequirements) {
- auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
- bool skip = false;
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- skip |= intercept->PreCallValidateGetBufferMemoryRequirements2KHR(device, pInfo, pMemoryRequirements);
- if (skip) return;
- }
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PreCallRecordGetBufferMemoryRequirements2KHR(device, pInfo, pMemoryRequirements);
- }
- DispatchGetBufferMemoryRequirements2KHR(device, pInfo, pMemoryRequirements);
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PostCallRecordGetBufferMemoryRequirements2KHR(device, pInfo, pMemoryRequirements);
- }
-}
-
-VKAPI_ATTR void VKAPI_CALL GetImageSparseMemoryRequirements2KHR(
- VkDevice device,
- const VkImageSparseMemoryRequirementsInfo2* pInfo,
- uint32_t* pSparseMemoryRequirementCount,
- VkSparseImageMemoryRequirements2* pSparseMemoryRequirements) {
- auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
- bool skip = false;
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- skip |= intercept->PreCallValidateGetImageSparseMemoryRequirements2KHR(device, pInfo, pSparseMemoryRequirementCount, pSparseMemoryRequirements);
- if (skip) return;
- }
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PreCallRecordGetImageSparseMemoryRequirements2KHR(device, pInfo, pSparseMemoryRequirementCount, pSparseMemoryRequirements);
- }
- DispatchGetImageSparseMemoryRequirements2KHR(device, pInfo, pSparseMemoryRequirementCount, pSparseMemoryRequirements);
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PostCallRecordGetImageSparseMemoryRequirements2KHR(device, pInfo, pSparseMemoryRequirementCount, pSparseMemoryRequirements);
- }
-}
-
-
-
-VKAPI_ATTR VkResult VKAPI_CALL CreateSamplerYcbcrConversionKHR(
- VkDevice device,
- const VkSamplerYcbcrConversionCreateInfo* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkSamplerYcbcrConversion* pYcbcrConversion) {
- auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
- bool skip = false;
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- skip |= intercept->PreCallValidateCreateSamplerYcbcrConversionKHR(device, pCreateInfo, pAllocator, pYcbcrConversion);
- if (skip) return VK_ERROR_VALIDATION_FAILED_EXT;
- }
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PreCallRecordCreateSamplerYcbcrConversionKHR(device, pCreateInfo, pAllocator, pYcbcrConversion);
- }
- VkResult result = DispatchCreateSamplerYcbcrConversionKHR(device, pCreateInfo, pAllocator, pYcbcrConversion);
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PostCallRecordCreateSamplerYcbcrConversionKHR(device, pCreateInfo, pAllocator, pYcbcrConversion, result);
- }
- return result;
-}
-
-VKAPI_ATTR void VKAPI_CALL DestroySamplerYcbcrConversionKHR(
- VkDevice device,
- VkSamplerYcbcrConversion ycbcrConversion,
- const VkAllocationCallbacks* pAllocator) {
- auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
- bool skip = false;
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- skip |= intercept->PreCallValidateDestroySamplerYcbcrConversionKHR(device, ycbcrConversion, pAllocator);
- if (skip) return;
- }
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PreCallRecordDestroySamplerYcbcrConversionKHR(device, ycbcrConversion, pAllocator);
- }
- DispatchDestroySamplerYcbcrConversionKHR(device, ycbcrConversion, pAllocator);
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PostCallRecordDestroySamplerYcbcrConversionKHR(device, ycbcrConversion, pAllocator);
- }
-}
-
-
-VKAPI_ATTR VkResult VKAPI_CALL BindBufferMemory2KHR(
- VkDevice device,
- uint32_t bindInfoCount,
- const VkBindBufferMemoryInfo* pBindInfos) {
- auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
- bool skip = false;
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- skip |= intercept->PreCallValidateBindBufferMemory2KHR(device, bindInfoCount, pBindInfos);
- if (skip) return VK_ERROR_VALIDATION_FAILED_EXT;
- }
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PreCallRecordBindBufferMemory2KHR(device, bindInfoCount, pBindInfos);
- }
- VkResult result = DispatchBindBufferMemory2KHR(device, bindInfoCount, pBindInfos);
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PostCallRecordBindBufferMemory2KHR(device, bindInfoCount, pBindInfos, result);
- }
- return result;
-}
-
-VKAPI_ATTR VkResult VKAPI_CALL BindImageMemory2KHR(
- VkDevice device,
- uint32_t bindInfoCount,
- const VkBindImageMemoryInfo* pBindInfos) {
- auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
- bool skip = false;
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- skip |= intercept->PreCallValidateBindImageMemory2KHR(device, bindInfoCount, pBindInfos);
- if (skip) return VK_ERROR_VALIDATION_FAILED_EXT;
- }
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PreCallRecordBindImageMemory2KHR(device, bindInfoCount, pBindInfos);
- }
- VkResult result = DispatchBindImageMemory2KHR(device, bindInfoCount, pBindInfos);
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PostCallRecordBindImageMemory2KHR(device, bindInfoCount, pBindInfos, result);
- }
- return result;
-}
-
-
-VKAPI_ATTR void VKAPI_CALL GetDescriptorSetLayoutSupportKHR(
- VkDevice device,
- const VkDescriptorSetLayoutCreateInfo* pCreateInfo,
- VkDescriptorSetLayoutSupport* pSupport) {
- auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
- bool skip = false;
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- skip |= intercept->PreCallValidateGetDescriptorSetLayoutSupportKHR(device, pCreateInfo, pSupport);
- if (skip) return;
- }
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PreCallRecordGetDescriptorSetLayoutSupportKHR(device, pCreateInfo, pSupport);
- }
- DispatchGetDescriptorSetLayoutSupportKHR(device, pCreateInfo, pSupport);
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PostCallRecordGetDescriptorSetLayoutSupportKHR(device, pCreateInfo, pSupport);
- }
-}
-
-
-VKAPI_ATTR void VKAPI_CALL CmdDrawIndirectCountKHR(
- VkCommandBuffer commandBuffer,
- VkBuffer buffer,
- VkDeviceSize offset,
- VkBuffer countBuffer,
- VkDeviceSize countBufferOffset,
- uint32_t maxDrawCount,
- uint32_t stride) {
- auto layer_data = GetLayerDataPtr(get_dispatch_key(commandBuffer), layer_data_map);
- bool skip = false;
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- skip |= intercept->PreCallValidateCmdDrawIndirectCountKHR(commandBuffer, buffer, offset, countBuffer, countBufferOffset, maxDrawCount, stride);
- if (skip) return;
- }
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PreCallRecordCmdDrawIndirectCountKHR(commandBuffer, buffer, offset, countBuffer, countBufferOffset, maxDrawCount, stride);
- }
- DispatchCmdDrawIndirectCountKHR(commandBuffer, buffer, offset, countBuffer, countBufferOffset, maxDrawCount, stride);
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PostCallRecordCmdDrawIndirectCountKHR(commandBuffer, buffer, offset, countBuffer, countBufferOffset, maxDrawCount, stride);
- }
-}
-
-VKAPI_ATTR void VKAPI_CALL CmdDrawIndexedIndirectCountKHR(
- VkCommandBuffer commandBuffer,
- VkBuffer buffer,
- VkDeviceSize offset,
- VkBuffer countBuffer,
- VkDeviceSize countBufferOffset,
- uint32_t maxDrawCount,
- uint32_t stride) {
- auto layer_data = GetLayerDataPtr(get_dispatch_key(commandBuffer), layer_data_map);
- bool skip = false;
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- skip |= intercept->PreCallValidateCmdDrawIndexedIndirectCountKHR(commandBuffer, buffer, offset, countBuffer, countBufferOffset, maxDrawCount, stride);
- if (skip) return;
- }
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PreCallRecordCmdDrawIndexedIndirectCountKHR(commandBuffer, buffer, offset, countBuffer, countBufferOffset, maxDrawCount, stride);
- }
- DispatchCmdDrawIndexedIndirectCountKHR(commandBuffer, buffer, offset, countBuffer, countBufferOffset, maxDrawCount, stride);
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PostCallRecordCmdDrawIndexedIndirectCountKHR(commandBuffer, buffer, offset, countBuffer, countBufferOffset, maxDrawCount, stride);
- }
-}
-
-
-
-
-
-
-
-
-
-
-
-VKAPI_ATTR VkResult VKAPI_CALL GetPipelineExecutablePropertiesKHR(
- VkDevice device,
- const VkPipelineInfoKHR* pPipelineInfo,
- uint32_t* pExecutableCount,
- VkPipelineExecutablePropertiesKHR* pProperties) {
- auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
- bool skip = false;
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- skip |= intercept->PreCallValidateGetPipelineExecutablePropertiesKHR(device, pPipelineInfo, pExecutableCount, pProperties);
- if (skip) return VK_ERROR_VALIDATION_FAILED_EXT;
- }
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PreCallRecordGetPipelineExecutablePropertiesKHR(device, pPipelineInfo, pExecutableCount, pProperties);
- }
- VkResult result = DispatchGetPipelineExecutablePropertiesKHR(device, pPipelineInfo, pExecutableCount, pProperties);
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PostCallRecordGetPipelineExecutablePropertiesKHR(device, pPipelineInfo, pExecutableCount, pProperties, result);
- }
- return result;
-}
-
-VKAPI_ATTR VkResult VKAPI_CALL GetPipelineExecutableStatisticsKHR(
- VkDevice device,
- const VkPipelineExecutableInfoKHR* pExecutableInfo,
- uint32_t* pStatisticCount,
- VkPipelineExecutableStatisticKHR* pStatistics) {
- auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
- bool skip = false;
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- skip |= intercept->PreCallValidateGetPipelineExecutableStatisticsKHR(device, pExecutableInfo, pStatisticCount, pStatistics);
- if (skip) return VK_ERROR_VALIDATION_FAILED_EXT;
- }
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PreCallRecordGetPipelineExecutableStatisticsKHR(device, pExecutableInfo, pStatisticCount, pStatistics);
- }
- VkResult result = DispatchGetPipelineExecutableStatisticsKHR(device, pExecutableInfo, pStatisticCount, pStatistics);
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PostCallRecordGetPipelineExecutableStatisticsKHR(device, pExecutableInfo, pStatisticCount, pStatistics, result);
- }
- return result;
-}
-
-VKAPI_ATTR VkResult VKAPI_CALL GetPipelineExecutableInternalRepresentationsKHR(
- VkDevice device,
- const VkPipelineExecutableInfoKHR* pExecutableInfo,
- uint32_t* pInternalRepresentationCount,
- VkPipelineExecutableInternalRepresentationKHR* pInternalRepresentations) {
- auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
- bool skip = false;
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- skip |= intercept->PreCallValidateGetPipelineExecutableInternalRepresentationsKHR(device, pExecutableInfo, pInternalRepresentationCount, pInternalRepresentations);
- if (skip) return VK_ERROR_VALIDATION_FAILED_EXT;
- }
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PreCallRecordGetPipelineExecutableInternalRepresentationsKHR(device, pExecutableInfo, pInternalRepresentationCount, pInternalRepresentations);
- }
- VkResult result = DispatchGetPipelineExecutableInternalRepresentationsKHR(device, pExecutableInfo, pInternalRepresentationCount, pInternalRepresentations);
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PostCallRecordGetPipelineExecutableInternalRepresentationsKHR(device, pExecutableInfo, pInternalRepresentationCount, pInternalRepresentations, result);
- }
- return result;
-}
-
-
-VKAPI_ATTR VkResult VKAPI_CALL CreateDebugReportCallbackEXT(
- VkInstance instance,
- const VkDebugReportCallbackCreateInfoEXT* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkDebugReportCallbackEXT* pCallback) {
- auto layer_data = GetLayerDataPtr(get_dispatch_key(instance), layer_data_map);
- bool skip = false;
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- skip |= intercept->PreCallValidateCreateDebugReportCallbackEXT(instance, pCreateInfo, pAllocator, pCallback);
- if (skip) return VK_ERROR_VALIDATION_FAILED_EXT;
- }
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PreCallRecordCreateDebugReportCallbackEXT(instance, pCreateInfo, pAllocator, pCallback);
- }
- VkResult result = DispatchCreateDebugReportCallbackEXT(instance, pCreateInfo, pAllocator, pCallback);
- layer_create_report_callback(layer_data->report_data, false, pCreateInfo, pAllocator, pCallback);
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PostCallRecordCreateDebugReportCallbackEXT(instance, pCreateInfo, pAllocator, pCallback, result);
- }
- return result;
-}
-
-VKAPI_ATTR void VKAPI_CALL DestroyDebugReportCallbackEXT(
- VkInstance instance,
- VkDebugReportCallbackEXT callback,
- const VkAllocationCallbacks* pAllocator) {
- auto layer_data = GetLayerDataPtr(get_dispatch_key(instance), layer_data_map);
- bool skip = false;
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- skip |= intercept->PreCallValidateDestroyDebugReportCallbackEXT(instance, callback, pAllocator);
- if (skip) return;
- }
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PreCallRecordDestroyDebugReportCallbackEXT(instance, callback, pAllocator);
- }
- DispatchDestroyDebugReportCallbackEXT(instance, callback, pAllocator);
- layer_destroy_report_callback(layer_data->report_data, callback, pAllocator);
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PostCallRecordDestroyDebugReportCallbackEXT(instance, callback, pAllocator);
- }
-}
-
-VKAPI_ATTR void VKAPI_CALL DebugReportMessageEXT(
- VkInstance instance,
- VkDebugReportFlagsEXT flags,
- VkDebugReportObjectTypeEXT objectType,
- uint64_t object,
- size_t location,
- int32_t messageCode,
- const char* pLayerPrefix,
- const char* pMessage) {
- auto layer_data = GetLayerDataPtr(get_dispatch_key(instance), layer_data_map);
- bool skip = false;
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- skip |= intercept->PreCallValidateDebugReportMessageEXT(instance, flags, objectType, object, location, messageCode, pLayerPrefix, pMessage);
- if (skip) return;
- }
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PreCallRecordDebugReportMessageEXT(instance, flags, objectType, object, location, messageCode, pLayerPrefix, pMessage);
- }
- DispatchDebugReportMessageEXT(instance, flags, objectType, object, location, messageCode, pLayerPrefix, pMessage);
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PostCallRecordDebugReportMessageEXT(instance, flags, objectType, object, location, messageCode, pLayerPrefix, pMessage);
- }
-}
-
-
-
-
-
-
-
-
-VKAPI_ATTR VkResult VKAPI_CALL DebugMarkerSetObjectTagEXT(
- VkDevice device,
- const VkDebugMarkerObjectTagInfoEXT* pTagInfo) {
- auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
- bool skip = false;
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- skip |= intercept->PreCallValidateDebugMarkerSetObjectTagEXT(device, pTagInfo);
- if (skip) return VK_ERROR_VALIDATION_FAILED_EXT;
- }
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PreCallRecordDebugMarkerSetObjectTagEXT(device, pTagInfo);
- }
- VkResult result = DispatchDebugMarkerSetObjectTagEXT(device, pTagInfo);
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PostCallRecordDebugMarkerSetObjectTagEXT(device, pTagInfo, result);
- }
- return result;
-}
-
-VKAPI_ATTR VkResult VKAPI_CALL DebugMarkerSetObjectNameEXT(
- VkDevice device,
- const VkDebugMarkerObjectNameInfoEXT* pNameInfo) {
- auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
- bool skip = false;
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- skip |= intercept->PreCallValidateDebugMarkerSetObjectNameEXT(device, pNameInfo);
- if (skip) return VK_ERROR_VALIDATION_FAILED_EXT;
- }
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PreCallRecordDebugMarkerSetObjectNameEXT(device, pNameInfo);
- }
- layer_data->report_data->DebugReportSetMarkerObjectName(pNameInfo);
- VkResult result = DispatchDebugMarkerSetObjectNameEXT(device, pNameInfo);
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PostCallRecordDebugMarkerSetObjectNameEXT(device, pNameInfo, result);
- }
- return result;
-}
-
-VKAPI_ATTR void VKAPI_CALL CmdDebugMarkerBeginEXT(
- VkCommandBuffer commandBuffer,
- const VkDebugMarkerMarkerInfoEXT* pMarkerInfo) {
- auto layer_data = GetLayerDataPtr(get_dispatch_key(commandBuffer), layer_data_map);
- bool skip = false;
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- skip |= intercept->PreCallValidateCmdDebugMarkerBeginEXT(commandBuffer, pMarkerInfo);
- if (skip) return;
- }
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PreCallRecordCmdDebugMarkerBeginEXT(commandBuffer, pMarkerInfo);
- }
- DispatchCmdDebugMarkerBeginEXT(commandBuffer, pMarkerInfo);
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PostCallRecordCmdDebugMarkerBeginEXT(commandBuffer, pMarkerInfo);
- }
-}
-
-VKAPI_ATTR void VKAPI_CALL CmdDebugMarkerEndEXT(
- VkCommandBuffer commandBuffer) {
- auto layer_data = GetLayerDataPtr(get_dispatch_key(commandBuffer), layer_data_map);
- bool skip = false;
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- skip |= intercept->PreCallValidateCmdDebugMarkerEndEXT(commandBuffer);
- if (skip) return;
- }
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PreCallRecordCmdDebugMarkerEndEXT(commandBuffer);
- }
- DispatchCmdDebugMarkerEndEXT(commandBuffer);
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PostCallRecordCmdDebugMarkerEndEXT(commandBuffer);
- }
-}
-
-VKAPI_ATTR void VKAPI_CALL CmdDebugMarkerInsertEXT(
- VkCommandBuffer commandBuffer,
- const VkDebugMarkerMarkerInfoEXT* pMarkerInfo) {
- auto layer_data = GetLayerDataPtr(get_dispatch_key(commandBuffer), layer_data_map);
- bool skip = false;
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- skip |= intercept->PreCallValidateCmdDebugMarkerInsertEXT(commandBuffer, pMarkerInfo);
- if (skip) return;
- }
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PreCallRecordCmdDebugMarkerInsertEXT(commandBuffer, pMarkerInfo);
- }
- DispatchCmdDebugMarkerInsertEXT(commandBuffer, pMarkerInfo);
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PostCallRecordCmdDebugMarkerInsertEXT(commandBuffer, pMarkerInfo);
- }
-}
-
-
-
-
-VKAPI_ATTR void VKAPI_CALL CmdBindTransformFeedbackBuffersEXT(
- VkCommandBuffer commandBuffer,
- uint32_t firstBinding,
- uint32_t bindingCount,
- const VkBuffer* pBuffers,
- const VkDeviceSize* pOffsets,
- const VkDeviceSize* pSizes) {
- auto layer_data = GetLayerDataPtr(get_dispatch_key(commandBuffer), layer_data_map);
- bool skip = false;
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- skip |= intercept->PreCallValidateCmdBindTransformFeedbackBuffersEXT(commandBuffer, firstBinding, bindingCount, pBuffers, pOffsets, pSizes);
- if (skip) return;
- }
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PreCallRecordCmdBindTransformFeedbackBuffersEXT(commandBuffer, firstBinding, bindingCount, pBuffers, pOffsets, pSizes);
- }
- DispatchCmdBindTransformFeedbackBuffersEXT(commandBuffer, firstBinding, bindingCount, pBuffers, pOffsets, pSizes);
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PostCallRecordCmdBindTransformFeedbackBuffersEXT(commandBuffer, firstBinding, bindingCount, pBuffers, pOffsets, pSizes);
- }
-}
-
-VKAPI_ATTR void VKAPI_CALL CmdBeginTransformFeedbackEXT(
- VkCommandBuffer commandBuffer,
- uint32_t firstCounterBuffer,
- uint32_t counterBufferCount,
- const VkBuffer* pCounterBuffers,
- const VkDeviceSize* pCounterBufferOffsets) {
- auto layer_data = GetLayerDataPtr(get_dispatch_key(commandBuffer), layer_data_map);
- bool skip = false;
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- skip |= intercept->PreCallValidateCmdBeginTransformFeedbackEXT(commandBuffer, firstCounterBuffer, counterBufferCount, pCounterBuffers, pCounterBufferOffsets);
- if (skip) return;
- }
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PreCallRecordCmdBeginTransformFeedbackEXT(commandBuffer, firstCounterBuffer, counterBufferCount, pCounterBuffers, pCounterBufferOffsets);
- }
- DispatchCmdBeginTransformFeedbackEXT(commandBuffer, firstCounterBuffer, counterBufferCount, pCounterBuffers, pCounterBufferOffsets);
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PostCallRecordCmdBeginTransformFeedbackEXT(commandBuffer, firstCounterBuffer, counterBufferCount, pCounterBuffers, pCounterBufferOffsets);
- }
-}
-
-VKAPI_ATTR void VKAPI_CALL CmdEndTransformFeedbackEXT(
- VkCommandBuffer commandBuffer,
- uint32_t firstCounterBuffer,
- uint32_t counterBufferCount,
- const VkBuffer* pCounterBuffers,
- const VkDeviceSize* pCounterBufferOffsets) {
- auto layer_data = GetLayerDataPtr(get_dispatch_key(commandBuffer), layer_data_map);
- bool skip = false;
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- skip |= intercept->PreCallValidateCmdEndTransformFeedbackEXT(commandBuffer, firstCounterBuffer, counterBufferCount, pCounterBuffers, pCounterBufferOffsets);
- if (skip) return;
- }
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PreCallRecordCmdEndTransformFeedbackEXT(commandBuffer, firstCounterBuffer, counterBufferCount, pCounterBuffers, pCounterBufferOffsets);
- }
- DispatchCmdEndTransformFeedbackEXT(commandBuffer, firstCounterBuffer, counterBufferCount, pCounterBuffers, pCounterBufferOffsets);
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PostCallRecordCmdEndTransformFeedbackEXT(commandBuffer, firstCounterBuffer, counterBufferCount, pCounterBuffers, pCounterBufferOffsets);
- }
-}
-
-VKAPI_ATTR void VKAPI_CALL CmdBeginQueryIndexedEXT(
- VkCommandBuffer commandBuffer,
- VkQueryPool queryPool,
- uint32_t query,
- VkQueryControlFlags flags,
- uint32_t index) {
- auto layer_data = GetLayerDataPtr(get_dispatch_key(commandBuffer), layer_data_map);
- bool skip = false;
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- skip |= intercept->PreCallValidateCmdBeginQueryIndexedEXT(commandBuffer, queryPool, query, flags, index);
- if (skip) return;
- }
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PreCallRecordCmdBeginQueryIndexedEXT(commandBuffer, queryPool, query, flags, index);
- }
- DispatchCmdBeginQueryIndexedEXT(commandBuffer, queryPool, query, flags, index);
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PostCallRecordCmdBeginQueryIndexedEXT(commandBuffer, queryPool, query, flags, index);
- }
-}
-
-VKAPI_ATTR void VKAPI_CALL CmdEndQueryIndexedEXT(
- VkCommandBuffer commandBuffer,
- VkQueryPool queryPool,
- uint32_t query,
- uint32_t index) {
- auto layer_data = GetLayerDataPtr(get_dispatch_key(commandBuffer), layer_data_map);
- bool skip = false;
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- skip |= intercept->PreCallValidateCmdEndQueryIndexedEXT(commandBuffer, queryPool, query, index);
- if (skip) return;
- }
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PreCallRecordCmdEndQueryIndexedEXT(commandBuffer, queryPool, query, index);
- }
- DispatchCmdEndQueryIndexedEXT(commandBuffer, queryPool, query, index);
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PostCallRecordCmdEndQueryIndexedEXT(commandBuffer, queryPool, query, index);
- }
-}
-
-VKAPI_ATTR void VKAPI_CALL CmdDrawIndirectByteCountEXT(
- VkCommandBuffer commandBuffer,
- uint32_t instanceCount,
- uint32_t firstInstance,
- VkBuffer counterBuffer,
- VkDeviceSize counterBufferOffset,
- uint32_t counterOffset,
- uint32_t vertexStride) {
- auto layer_data = GetLayerDataPtr(get_dispatch_key(commandBuffer), layer_data_map);
- bool skip = false;
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- skip |= intercept->PreCallValidateCmdDrawIndirectByteCountEXT(commandBuffer, instanceCount, firstInstance, counterBuffer, counterBufferOffset, counterOffset, vertexStride);
- if (skip) return;
- }
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PreCallRecordCmdDrawIndirectByteCountEXT(commandBuffer, instanceCount, firstInstance, counterBuffer, counterBufferOffset, counterOffset, vertexStride);
- }
- DispatchCmdDrawIndirectByteCountEXT(commandBuffer, instanceCount, firstInstance, counterBuffer, counterBufferOffset, counterOffset, vertexStride);
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PostCallRecordCmdDrawIndirectByteCountEXT(commandBuffer, instanceCount, firstInstance, counterBuffer, counterBufferOffset, counterOffset, vertexStride);
- }
-}
-
-
-VKAPI_ATTR uint32_t VKAPI_CALL GetImageViewHandleNVX(
- VkDevice device,
- const VkImageViewHandleInfoNVX* pInfo) {
- auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
- bool skip = false;
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- skip |= intercept->PreCallValidateGetImageViewHandleNVX(device, pInfo);
- if (skip) return 0;
- }
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PreCallRecordGetImageViewHandleNVX(device, pInfo);
- }
- uint32_t result = DispatchGetImageViewHandleNVX(device, pInfo);
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PostCallRecordGetImageViewHandleNVX(device, pInfo);
- }
- return result;
-}
-
-
-VKAPI_ATTR void VKAPI_CALL CmdDrawIndirectCountAMD(
- VkCommandBuffer commandBuffer,
- VkBuffer buffer,
- VkDeviceSize offset,
- VkBuffer countBuffer,
- VkDeviceSize countBufferOffset,
- uint32_t maxDrawCount,
- uint32_t stride) {
- auto layer_data = GetLayerDataPtr(get_dispatch_key(commandBuffer), layer_data_map);
- bool skip = false;
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- skip |= intercept->PreCallValidateCmdDrawIndirectCountAMD(commandBuffer, buffer, offset, countBuffer, countBufferOffset, maxDrawCount, stride);
- if (skip) return;
- }
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PreCallRecordCmdDrawIndirectCountAMD(commandBuffer, buffer, offset, countBuffer, countBufferOffset, maxDrawCount, stride);
- }
- DispatchCmdDrawIndirectCountAMD(commandBuffer, buffer, offset, countBuffer, countBufferOffset, maxDrawCount, stride);
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PostCallRecordCmdDrawIndirectCountAMD(commandBuffer, buffer, offset, countBuffer, countBufferOffset, maxDrawCount, stride);
- }
-}
-
-VKAPI_ATTR void VKAPI_CALL CmdDrawIndexedIndirectCountAMD(
- VkCommandBuffer commandBuffer,
- VkBuffer buffer,
- VkDeviceSize offset,
- VkBuffer countBuffer,
- VkDeviceSize countBufferOffset,
- uint32_t maxDrawCount,
- uint32_t stride) {
- auto layer_data = GetLayerDataPtr(get_dispatch_key(commandBuffer), layer_data_map);
- bool skip = false;
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- skip |= intercept->PreCallValidateCmdDrawIndexedIndirectCountAMD(commandBuffer, buffer, offset, countBuffer, countBufferOffset, maxDrawCount, stride);
- if (skip) return;
- }
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PreCallRecordCmdDrawIndexedIndirectCountAMD(commandBuffer, buffer, offset, countBuffer, countBufferOffset, maxDrawCount, stride);
- }
- DispatchCmdDrawIndexedIndirectCountAMD(commandBuffer, buffer, offset, countBuffer, countBufferOffset, maxDrawCount, stride);
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PostCallRecordCmdDrawIndexedIndirectCountAMD(commandBuffer, buffer, offset, countBuffer, countBufferOffset, maxDrawCount, stride);
- }
-}
-
-
-
-
-
-
-VKAPI_ATTR VkResult VKAPI_CALL GetShaderInfoAMD(
- VkDevice device,
- VkPipeline pipeline,
- VkShaderStageFlagBits shaderStage,
- VkShaderInfoTypeAMD infoType,
- size_t* pInfoSize,
- void* pInfo) {
- auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
- bool skip = false;
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- skip |= intercept->PreCallValidateGetShaderInfoAMD(device, pipeline, shaderStage, infoType, pInfoSize, pInfo);
- if (skip) return VK_ERROR_VALIDATION_FAILED_EXT;
- }
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PreCallRecordGetShaderInfoAMD(device, pipeline, shaderStage, infoType, pInfoSize, pInfo);
- }
- VkResult result = DispatchGetShaderInfoAMD(device, pipeline, shaderStage, infoType, pInfoSize, pInfo);
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PostCallRecordGetShaderInfoAMD(device, pipeline, shaderStage, infoType, pInfoSize, pInfo, result);
- }
- return result;
-}
-
-
-#ifdef VK_USE_PLATFORM_GGP
-
-VKAPI_ATTR VkResult VKAPI_CALL CreateStreamDescriptorSurfaceGGP(
- VkInstance instance,
- const VkStreamDescriptorSurfaceCreateInfoGGP* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkSurfaceKHR* pSurface) {
- auto layer_data = GetLayerDataPtr(get_dispatch_key(instance), layer_data_map);
- bool skip = false;
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- skip |= intercept->PreCallValidateCreateStreamDescriptorSurfaceGGP(instance, pCreateInfo, pAllocator, pSurface);
- if (skip) return VK_ERROR_VALIDATION_FAILED_EXT;
- }
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PreCallRecordCreateStreamDescriptorSurfaceGGP(instance, pCreateInfo, pAllocator, pSurface);
- }
- VkResult result = DispatchCreateStreamDescriptorSurfaceGGP(instance, pCreateInfo, pAllocator, pSurface);
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PostCallRecordCreateStreamDescriptorSurfaceGGP(instance, pCreateInfo, pAllocator, pSurface, result);
- }
- return result;
-}
-#endif // VK_USE_PLATFORM_GGP
-
-
-
-
-VKAPI_ATTR VkResult VKAPI_CALL GetPhysicalDeviceExternalImageFormatPropertiesNV(
- VkPhysicalDevice physicalDevice,
- VkFormat format,
- VkImageType type,
- VkImageTiling tiling,
- VkImageUsageFlags usage,
- VkImageCreateFlags flags,
- VkExternalMemoryHandleTypeFlagsNV externalHandleType,
- VkExternalImageFormatPropertiesNV* pExternalImageFormatProperties) {
- auto layer_data = GetLayerDataPtr(get_dispatch_key(physicalDevice), layer_data_map);
- bool skip = false;
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- skip |= intercept->PreCallValidateGetPhysicalDeviceExternalImageFormatPropertiesNV(physicalDevice, format, type, tiling, usage, flags, externalHandleType, pExternalImageFormatProperties);
- if (skip) return VK_ERROR_VALIDATION_FAILED_EXT;
- }
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PreCallRecordGetPhysicalDeviceExternalImageFormatPropertiesNV(physicalDevice, format, type, tiling, usage, flags, externalHandleType, pExternalImageFormatProperties);
- }
- VkResult result = DispatchGetPhysicalDeviceExternalImageFormatPropertiesNV(physicalDevice, format, type, tiling, usage, flags, externalHandleType, pExternalImageFormatProperties);
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PostCallRecordGetPhysicalDeviceExternalImageFormatPropertiesNV(physicalDevice, format, type, tiling, usage, flags, externalHandleType, pExternalImageFormatProperties, result);
- }
- return result;
-}
-
-
-#ifdef VK_USE_PLATFORM_WIN32_KHR
-
-VKAPI_ATTR VkResult VKAPI_CALL GetMemoryWin32HandleNV(
- VkDevice device,
- VkDeviceMemory memory,
- VkExternalMemoryHandleTypeFlagsNV handleType,
- HANDLE* pHandle) {
- auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
- bool skip = false;
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- skip |= intercept->PreCallValidateGetMemoryWin32HandleNV(device, memory, handleType, pHandle);
- if (skip) return VK_ERROR_VALIDATION_FAILED_EXT;
- }
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PreCallRecordGetMemoryWin32HandleNV(device, memory, handleType, pHandle);
- }
- VkResult result = DispatchGetMemoryWin32HandleNV(device, memory, handleType, pHandle);
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PostCallRecordGetMemoryWin32HandleNV(device, memory, handleType, pHandle, result);
- }
- return result;
-}
-#endif // VK_USE_PLATFORM_WIN32_KHR
-
-#ifdef VK_USE_PLATFORM_WIN32_KHR
-#endif // VK_USE_PLATFORM_WIN32_KHR
-
-
-#ifdef VK_USE_PLATFORM_VI_NN
-
-VKAPI_ATTR VkResult VKAPI_CALL CreateViSurfaceNN(
- VkInstance instance,
- const VkViSurfaceCreateInfoNN* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkSurfaceKHR* pSurface) {
- auto layer_data = GetLayerDataPtr(get_dispatch_key(instance), layer_data_map);
- bool skip = false;
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- skip |= intercept->PreCallValidateCreateViSurfaceNN(instance, pCreateInfo, pAllocator, pSurface);
- if (skip) return VK_ERROR_VALIDATION_FAILED_EXT;
- }
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PreCallRecordCreateViSurfaceNN(instance, pCreateInfo, pAllocator, pSurface);
- }
- VkResult result = DispatchCreateViSurfaceNN(instance, pCreateInfo, pAllocator, pSurface);
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PostCallRecordCreateViSurfaceNN(instance, pCreateInfo, pAllocator, pSurface, result);
- }
- return result;
-}
-#endif // VK_USE_PLATFORM_VI_NN
-
-
-
-
-
-
-VKAPI_ATTR void VKAPI_CALL CmdBeginConditionalRenderingEXT(
- VkCommandBuffer commandBuffer,
- const VkConditionalRenderingBeginInfoEXT* pConditionalRenderingBegin) {
- auto layer_data = GetLayerDataPtr(get_dispatch_key(commandBuffer), layer_data_map);
- bool skip = false;
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- skip |= intercept->PreCallValidateCmdBeginConditionalRenderingEXT(commandBuffer, pConditionalRenderingBegin);
- if (skip) return;
- }
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PreCallRecordCmdBeginConditionalRenderingEXT(commandBuffer, pConditionalRenderingBegin);
- }
- DispatchCmdBeginConditionalRenderingEXT(commandBuffer, pConditionalRenderingBegin);
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PostCallRecordCmdBeginConditionalRenderingEXT(commandBuffer, pConditionalRenderingBegin);
- }
-}
-
-VKAPI_ATTR void VKAPI_CALL CmdEndConditionalRenderingEXT(
- VkCommandBuffer commandBuffer) {
- auto layer_data = GetLayerDataPtr(get_dispatch_key(commandBuffer), layer_data_map);
- bool skip = false;
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- skip |= intercept->PreCallValidateCmdEndConditionalRenderingEXT(commandBuffer);
- if (skip) return;
- }
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PreCallRecordCmdEndConditionalRenderingEXT(commandBuffer);
- }
- DispatchCmdEndConditionalRenderingEXT(commandBuffer);
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PostCallRecordCmdEndConditionalRenderingEXT(commandBuffer);
- }
-}
-
-
-VKAPI_ATTR void VKAPI_CALL CmdProcessCommandsNVX(
- VkCommandBuffer commandBuffer,
- const VkCmdProcessCommandsInfoNVX* pProcessCommandsInfo) {
- auto layer_data = GetLayerDataPtr(get_dispatch_key(commandBuffer), layer_data_map);
- bool skip = false;
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- skip |= intercept->PreCallValidateCmdProcessCommandsNVX(commandBuffer, pProcessCommandsInfo);
- if (skip) return;
- }
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PreCallRecordCmdProcessCommandsNVX(commandBuffer, pProcessCommandsInfo);
- }
- DispatchCmdProcessCommandsNVX(commandBuffer, pProcessCommandsInfo);
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PostCallRecordCmdProcessCommandsNVX(commandBuffer, pProcessCommandsInfo);
- }
-}
-
-VKAPI_ATTR void VKAPI_CALL CmdReserveSpaceForCommandsNVX(
- VkCommandBuffer commandBuffer,
- const VkCmdReserveSpaceForCommandsInfoNVX* pReserveSpaceInfo) {
- auto layer_data = GetLayerDataPtr(get_dispatch_key(commandBuffer), layer_data_map);
- bool skip = false;
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- skip |= intercept->PreCallValidateCmdReserveSpaceForCommandsNVX(commandBuffer, pReserveSpaceInfo);
- if (skip) return;
- }
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PreCallRecordCmdReserveSpaceForCommandsNVX(commandBuffer, pReserveSpaceInfo);
- }
- DispatchCmdReserveSpaceForCommandsNVX(commandBuffer, pReserveSpaceInfo);
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PostCallRecordCmdReserveSpaceForCommandsNVX(commandBuffer, pReserveSpaceInfo);
- }
-}
-
-VKAPI_ATTR VkResult VKAPI_CALL CreateIndirectCommandsLayoutNVX(
- VkDevice device,
- const VkIndirectCommandsLayoutCreateInfoNVX* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkIndirectCommandsLayoutNVX* pIndirectCommandsLayout) {
- auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
- bool skip = false;
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- skip |= intercept->PreCallValidateCreateIndirectCommandsLayoutNVX(device, pCreateInfo, pAllocator, pIndirectCommandsLayout);
- if (skip) return VK_ERROR_VALIDATION_FAILED_EXT;
- }
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PreCallRecordCreateIndirectCommandsLayoutNVX(device, pCreateInfo, pAllocator, pIndirectCommandsLayout);
- }
- VkResult result = DispatchCreateIndirectCommandsLayoutNVX(device, pCreateInfo, pAllocator, pIndirectCommandsLayout);
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PostCallRecordCreateIndirectCommandsLayoutNVX(device, pCreateInfo, pAllocator, pIndirectCommandsLayout, result);
- }
- return result;
-}
-
-VKAPI_ATTR void VKAPI_CALL DestroyIndirectCommandsLayoutNVX(
- VkDevice device,
- VkIndirectCommandsLayoutNVX indirectCommandsLayout,
- const VkAllocationCallbacks* pAllocator) {
- auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
- bool skip = false;
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- skip |= intercept->PreCallValidateDestroyIndirectCommandsLayoutNVX(device, indirectCommandsLayout, pAllocator);
- if (skip) return;
- }
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PreCallRecordDestroyIndirectCommandsLayoutNVX(device, indirectCommandsLayout, pAllocator);
- }
- DispatchDestroyIndirectCommandsLayoutNVX(device, indirectCommandsLayout, pAllocator);
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PostCallRecordDestroyIndirectCommandsLayoutNVX(device, indirectCommandsLayout, pAllocator);
- }
-}
-
-VKAPI_ATTR VkResult VKAPI_CALL CreateObjectTableNVX(
- VkDevice device,
- const VkObjectTableCreateInfoNVX* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkObjectTableNVX* pObjectTable) {
- auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
- bool skip = false;
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- skip |= intercept->PreCallValidateCreateObjectTableNVX(device, pCreateInfo, pAllocator, pObjectTable);
- if (skip) return VK_ERROR_VALIDATION_FAILED_EXT;
- }
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PreCallRecordCreateObjectTableNVX(device, pCreateInfo, pAllocator, pObjectTable);
- }
- VkResult result = DispatchCreateObjectTableNVX(device, pCreateInfo, pAllocator, pObjectTable);
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PostCallRecordCreateObjectTableNVX(device, pCreateInfo, pAllocator, pObjectTable, result);
- }
- return result;
-}
-
-VKAPI_ATTR void VKAPI_CALL DestroyObjectTableNVX(
- VkDevice device,
- VkObjectTableNVX objectTable,
- const VkAllocationCallbacks* pAllocator) {
- auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
- bool skip = false;
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- skip |= intercept->PreCallValidateDestroyObjectTableNVX(device, objectTable, pAllocator);
- if (skip) return;
- }
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PreCallRecordDestroyObjectTableNVX(device, objectTable, pAllocator);
- }
- DispatchDestroyObjectTableNVX(device, objectTable, pAllocator);
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PostCallRecordDestroyObjectTableNVX(device, objectTable, pAllocator);
- }
-}
-
-VKAPI_ATTR VkResult VKAPI_CALL RegisterObjectsNVX(
- VkDevice device,
- VkObjectTableNVX objectTable,
- uint32_t objectCount,
- const VkObjectTableEntryNVX* const* ppObjectTableEntries,
- const uint32_t* pObjectIndices) {
- auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
- bool skip = false;
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- skip |= intercept->PreCallValidateRegisterObjectsNVX(device, objectTable, objectCount, ppObjectTableEntries, pObjectIndices);
- if (skip) return VK_ERROR_VALIDATION_FAILED_EXT;
- }
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PreCallRecordRegisterObjectsNVX(device, objectTable, objectCount, ppObjectTableEntries, pObjectIndices);
- }
- VkResult result = DispatchRegisterObjectsNVX(device, objectTable, objectCount, ppObjectTableEntries, pObjectIndices);
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PostCallRecordRegisterObjectsNVX(device, objectTable, objectCount, ppObjectTableEntries, pObjectIndices, result);
- }
- return result;
-}
-
-VKAPI_ATTR VkResult VKAPI_CALL UnregisterObjectsNVX(
- VkDevice device,
- VkObjectTableNVX objectTable,
- uint32_t objectCount,
- const VkObjectEntryTypeNVX* pObjectEntryTypes,
- const uint32_t* pObjectIndices) {
- auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
- bool skip = false;
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- skip |= intercept->PreCallValidateUnregisterObjectsNVX(device, objectTable, objectCount, pObjectEntryTypes, pObjectIndices);
- if (skip) return VK_ERROR_VALIDATION_FAILED_EXT;
- }
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PreCallRecordUnregisterObjectsNVX(device, objectTable, objectCount, pObjectEntryTypes, pObjectIndices);
- }
- VkResult result = DispatchUnregisterObjectsNVX(device, objectTable, objectCount, pObjectEntryTypes, pObjectIndices);
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PostCallRecordUnregisterObjectsNVX(device, objectTable, objectCount, pObjectEntryTypes, pObjectIndices, result);
- }
- return result;
-}
-
-VKAPI_ATTR void VKAPI_CALL GetPhysicalDeviceGeneratedCommandsPropertiesNVX(
- VkPhysicalDevice physicalDevice,
- VkDeviceGeneratedCommandsFeaturesNVX* pFeatures,
- VkDeviceGeneratedCommandsLimitsNVX* pLimits) {
- auto layer_data = GetLayerDataPtr(get_dispatch_key(physicalDevice), layer_data_map);
- bool skip = false;
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- skip |= intercept->PreCallValidateGetPhysicalDeviceGeneratedCommandsPropertiesNVX(physicalDevice, pFeatures, pLimits);
- if (skip) return;
- }
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PreCallRecordGetPhysicalDeviceGeneratedCommandsPropertiesNVX(physicalDevice, pFeatures, pLimits);
- }
- DispatchGetPhysicalDeviceGeneratedCommandsPropertiesNVX(physicalDevice, pFeatures, pLimits);
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PostCallRecordGetPhysicalDeviceGeneratedCommandsPropertiesNVX(physicalDevice, pFeatures, pLimits);
- }
-}
-
-
-VKAPI_ATTR void VKAPI_CALL CmdSetViewportWScalingNV(
- VkCommandBuffer commandBuffer,
- uint32_t firstViewport,
- uint32_t viewportCount,
- const VkViewportWScalingNV* pViewportWScalings) {
- auto layer_data = GetLayerDataPtr(get_dispatch_key(commandBuffer), layer_data_map);
- bool skip = false;
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- skip |= intercept->PreCallValidateCmdSetViewportWScalingNV(commandBuffer, firstViewport, viewportCount, pViewportWScalings);
- if (skip) return;
- }
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PreCallRecordCmdSetViewportWScalingNV(commandBuffer, firstViewport, viewportCount, pViewportWScalings);
- }
- DispatchCmdSetViewportWScalingNV(commandBuffer, firstViewport, viewportCount, pViewportWScalings);
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PostCallRecordCmdSetViewportWScalingNV(commandBuffer, firstViewport, viewportCount, pViewportWScalings);
- }
-}
-
-
-VKAPI_ATTR VkResult VKAPI_CALL ReleaseDisplayEXT(
- VkPhysicalDevice physicalDevice,
- VkDisplayKHR display) {
- auto layer_data = GetLayerDataPtr(get_dispatch_key(physicalDevice), layer_data_map);
- bool skip = false;
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- skip |= intercept->PreCallValidateReleaseDisplayEXT(physicalDevice, display);
- if (skip) return VK_ERROR_VALIDATION_FAILED_EXT;
- }
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PreCallRecordReleaseDisplayEXT(physicalDevice, display);
- }
- VkResult result = DispatchReleaseDisplayEXT(physicalDevice, display);
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PostCallRecordReleaseDisplayEXT(physicalDevice, display, result);
- }
- return result;
-}
-
-#ifdef VK_USE_PLATFORM_XLIB_XRANDR_EXT
-
-VKAPI_ATTR VkResult VKAPI_CALL AcquireXlibDisplayEXT(
- VkPhysicalDevice physicalDevice,
- Display* dpy,
- VkDisplayKHR display) {
- auto layer_data = GetLayerDataPtr(get_dispatch_key(physicalDevice), layer_data_map);
- bool skip = false;
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- skip |= intercept->PreCallValidateAcquireXlibDisplayEXT(physicalDevice, dpy, display);
- if (skip) return VK_ERROR_VALIDATION_FAILED_EXT;
- }
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PreCallRecordAcquireXlibDisplayEXT(physicalDevice, dpy, display);
- }
- VkResult result = DispatchAcquireXlibDisplayEXT(physicalDevice, dpy, display);
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PostCallRecordAcquireXlibDisplayEXT(physicalDevice, dpy, display, result);
- }
- return result;
-}
-
-VKAPI_ATTR VkResult VKAPI_CALL GetRandROutputDisplayEXT(
- VkPhysicalDevice physicalDevice,
- Display* dpy,
- RROutput rrOutput,
- VkDisplayKHR* pDisplay) {
- auto layer_data = GetLayerDataPtr(get_dispatch_key(physicalDevice), layer_data_map);
- bool skip = false;
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- skip |= intercept->PreCallValidateGetRandROutputDisplayEXT(physicalDevice, dpy, rrOutput, pDisplay);
- if (skip) return VK_ERROR_VALIDATION_FAILED_EXT;
- }
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PreCallRecordGetRandROutputDisplayEXT(physicalDevice, dpy, rrOutput, pDisplay);
- }
- VkResult result = DispatchGetRandROutputDisplayEXT(physicalDevice, dpy, rrOutput, pDisplay);
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PostCallRecordGetRandROutputDisplayEXT(physicalDevice, dpy, rrOutput, pDisplay, result);
- }
- return result;
-}
-#endif // VK_USE_PLATFORM_XLIB_XRANDR_EXT
-
-
-VKAPI_ATTR VkResult VKAPI_CALL GetPhysicalDeviceSurfaceCapabilities2EXT(
- VkPhysicalDevice physicalDevice,
- VkSurfaceKHR surface,
- VkSurfaceCapabilities2EXT* pSurfaceCapabilities) {
- auto layer_data = GetLayerDataPtr(get_dispatch_key(physicalDevice), layer_data_map);
- bool skip = false;
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- skip |= intercept->PreCallValidateGetPhysicalDeviceSurfaceCapabilities2EXT(physicalDevice, surface, pSurfaceCapabilities);
- if (skip) return VK_ERROR_VALIDATION_FAILED_EXT;
- }
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PreCallRecordGetPhysicalDeviceSurfaceCapabilities2EXT(physicalDevice, surface, pSurfaceCapabilities);
- }
- VkResult result = DispatchGetPhysicalDeviceSurfaceCapabilities2EXT(physicalDevice, surface, pSurfaceCapabilities);
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PostCallRecordGetPhysicalDeviceSurfaceCapabilities2EXT(physicalDevice, surface, pSurfaceCapabilities, result);
- }
- return result;
-}
-
-
-VKAPI_ATTR VkResult VKAPI_CALL DisplayPowerControlEXT(
- VkDevice device,
- VkDisplayKHR display,
- const VkDisplayPowerInfoEXT* pDisplayPowerInfo) {
- auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
- bool skip = false;
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- skip |= intercept->PreCallValidateDisplayPowerControlEXT(device, display, pDisplayPowerInfo);
- if (skip) return VK_ERROR_VALIDATION_FAILED_EXT;
- }
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PreCallRecordDisplayPowerControlEXT(device, display, pDisplayPowerInfo);
- }
- VkResult result = DispatchDisplayPowerControlEXT(device, display, pDisplayPowerInfo);
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PostCallRecordDisplayPowerControlEXT(device, display, pDisplayPowerInfo, result);
- }
- return result;
-}
-
-VKAPI_ATTR VkResult VKAPI_CALL RegisterDeviceEventEXT(
- VkDevice device,
- const VkDeviceEventInfoEXT* pDeviceEventInfo,
- const VkAllocationCallbacks* pAllocator,
- VkFence* pFence) {
- auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
- bool skip = false;
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- skip |= intercept->PreCallValidateRegisterDeviceEventEXT(device, pDeviceEventInfo, pAllocator, pFence);
- if (skip) return VK_ERROR_VALIDATION_FAILED_EXT;
- }
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PreCallRecordRegisterDeviceEventEXT(device, pDeviceEventInfo, pAllocator, pFence);
- }
- VkResult result = DispatchRegisterDeviceEventEXT(device, pDeviceEventInfo, pAllocator, pFence);
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PostCallRecordRegisterDeviceEventEXT(device, pDeviceEventInfo, pAllocator, pFence, result);
- }
- return result;
-}
-
-VKAPI_ATTR VkResult VKAPI_CALL RegisterDisplayEventEXT(
- VkDevice device,
- VkDisplayKHR display,
- const VkDisplayEventInfoEXT* pDisplayEventInfo,
- const VkAllocationCallbacks* pAllocator,
- VkFence* pFence) {
- auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
- bool skip = false;
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- skip |= intercept->PreCallValidateRegisterDisplayEventEXT(device, display, pDisplayEventInfo, pAllocator, pFence);
- if (skip) return VK_ERROR_VALIDATION_FAILED_EXT;
- }
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PreCallRecordRegisterDisplayEventEXT(device, display, pDisplayEventInfo, pAllocator, pFence);
- }
- VkResult result = DispatchRegisterDisplayEventEXT(device, display, pDisplayEventInfo, pAllocator, pFence);
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PostCallRecordRegisterDisplayEventEXT(device, display, pDisplayEventInfo, pAllocator, pFence, result);
- }
- return result;
-}
-
-VKAPI_ATTR VkResult VKAPI_CALL GetSwapchainCounterEXT(
- VkDevice device,
- VkSwapchainKHR swapchain,
- VkSurfaceCounterFlagBitsEXT counter,
- uint64_t* pCounterValue) {
- auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
- bool skip = false;
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- skip |= intercept->PreCallValidateGetSwapchainCounterEXT(device, swapchain, counter, pCounterValue);
- if (skip) return VK_ERROR_VALIDATION_FAILED_EXT;
- }
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PreCallRecordGetSwapchainCounterEXT(device, swapchain, counter, pCounterValue);
- }
- VkResult result = DispatchGetSwapchainCounterEXT(device, swapchain, counter, pCounterValue);
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PostCallRecordGetSwapchainCounterEXT(device, swapchain, counter, pCounterValue, result);
- }
- return result;
-}
-
-
-VKAPI_ATTR VkResult VKAPI_CALL GetRefreshCycleDurationGOOGLE(
- VkDevice device,
- VkSwapchainKHR swapchain,
- VkRefreshCycleDurationGOOGLE* pDisplayTimingProperties) {
- auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
- bool skip = false;
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- skip |= intercept->PreCallValidateGetRefreshCycleDurationGOOGLE(device, swapchain, pDisplayTimingProperties);
- if (skip) return VK_ERROR_VALIDATION_FAILED_EXT;
- }
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PreCallRecordGetRefreshCycleDurationGOOGLE(device, swapchain, pDisplayTimingProperties);
- }
- VkResult result = DispatchGetRefreshCycleDurationGOOGLE(device, swapchain, pDisplayTimingProperties);
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PostCallRecordGetRefreshCycleDurationGOOGLE(device, swapchain, pDisplayTimingProperties, result);
- }
- return result;
-}
-
-VKAPI_ATTR VkResult VKAPI_CALL GetPastPresentationTimingGOOGLE(
- VkDevice device,
- VkSwapchainKHR swapchain,
- uint32_t* pPresentationTimingCount,
- VkPastPresentationTimingGOOGLE* pPresentationTimings) {
- auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
- bool skip = false;
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- skip |= intercept->PreCallValidateGetPastPresentationTimingGOOGLE(device, swapchain, pPresentationTimingCount, pPresentationTimings);
- if (skip) return VK_ERROR_VALIDATION_FAILED_EXT;
- }
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PreCallRecordGetPastPresentationTimingGOOGLE(device, swapchain, pPresentationTimingCount, pPresentationTimings);
- }
- VkResult result = DispatchGetPastPresentationTimingGOOGLE(device, swapchain, pPresentationTimingCount, pPresentationTimings);
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PostCallRecordGetPastPresentationTimingGOOGLE(device, swapchain, pPresentationTimingCount, pPresentationTimings, result);
- }
- return result;
-}
-
-
-
-
-
-
-
-VKAPI_ATTR void VKAPI_CALL CmdSetDiscardRectangleEXT(
- VkCommandBuffer commandBuffer,
- uint32_t firstDiscardRectangle,
- uint32_t discardRectangleCount,
- const VkRect2D* pDiscardRectangles) {
- auto layer_data = GetLayerDataPtr(get_dispatch_key(commandBuffer), layer_data_map);
- bool skip = false;
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- skip |= intercept->PreCallValidateCmdSetDiscardRectangleEXT(commandBuffer, firstDiscardRectangle, discardRectangleCount, pDiscardRectangles);
- if (skip) return;
- }
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PreCallRecordCmdSetDiscardRectangleEXT(commandBuffer, firstDiscardRectangle, discardRectangleCount, pDiscardRectangles);
- }
- DispatchCmdSetDiscardRectangleEXT(commandBuffer, firstDiscardRectangle, discardRectangleCount, pDiscardRectangles);
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PostCallRecordCmdSetDiscardRectangleEXT(commandBuffer, firstDiscardRectangle, discardRectangleCount, pDiscardRectangles);
- }
-}
-
-
-
-
-
-VKAPI_ATTR void VKAPI_CALL SetHdrMetadataEXT(
- VkDevice device,
- uint32_t swapchainCount,
- const VkSwapchainKHR* pSwapchains,
- const VkHdrMetadataEXT* pMetadata) {
- auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
- bool skip = false;
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- skip |= intercept->PreCallValidateSetHdrMetadataEXT(device, swapchainCount, pSwapchains, pMetadata);
- if (skip) return;
- }
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PreCallRecordSetHdrMetadataEXT(device, swapchainCount, pSwapchains, pMetadata);
- }
- DispatchSetHdrMetadataEXT(device, swapchainCount, pSwapchains, pMetadata);
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PostCallRecordSetHdrMetadataEXT(device, swapchainCount, pSwapchains, pMetadata);
- }
-}
-
-#ifdef VK_USE_PLATFORM_IOS_MVK
-
-VKAPI_ATTR VkResult VKAPI_CALL CreateIOSSurfaceMVK(
- VkInstance instance,
- const VkIOSSurfaceCreateInfoMVK* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkSurfaceKHR* pSurface) {
- auto layer_data = GetLayerDataPtr(get_dispatch_key(instance), layer_data_map);
- bool skip = false;
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- skip |= intercept->PreCallValidateCreateIOSSurfaceMVK(instance, pCreateInfo, pAllocator, pSurface);
- if (skip) return VK_ERROR_VALIDATION_FAILED_EXT;
- }
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PreCallRecordCreateIOSSurfaceMVK(instance, pCreateInfo, pAllocator, pSurface);
- }
- VkResult result = DispatchCreateIOSSurfaceMVK(instance, pCreateInfo, pAllocator, pSurface);
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PostCallRecordCreateIOSSurfaceMVK(instance, pCreateInfo, pAllocator, pSurface, result);
- }
- return result;
-}
-#endif // VK_USE_PLATFORM_IOS_MVK
-
-#ifdef VK_USE_PLATFORM_MACOS_MVK
-
-VKAPI_ATTR VkResult VKAPI_CALL CreateMacOSSurfaceMVK(
- VkInstance instance,
- const VkMacOSSurfaceCreateInfoMVK* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkSurfaceKHR* pSurface) {
- auto layer_data = GetLayerDataPtr(get_dispatch_key(instance), layer_data_map);
- bool skip = false;
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- skip |= intercept->PreCallValidateCreateMacOSSurfaceMVK(instance, pCreateInfo, pAllocator, pSurface);
- if (skip) return VK_ERROR_VALIDATION_FAILED_EXT;
- }
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PreCallRecordCreateMacOSSurfaceMVK(instance, pCreateInfo, pAllocator, pSurface);
- }
- VkResult result = DispatchCreateMacOSSurfaceMVK(instance, pCreateInfo, pAllocator, pSurface);
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PostCallRecordCreateMacOSSurfaceMVK(instance, pCreateInfo, pAllocator, pSurface, result);
- }
- return result;
-}
-#endif // VK_USE_PLATFORM_MACOS_MVK
-
-
-
-
-VKAPI_ATTR VkResult VKAPI_CALL SetDebugUtilsObjectNameEXT(
- VkDevice device,
- const VkDebugUtilsObjectNameInfoEXT* pNameInfo) {
- auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
- bool skip = false;
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- skip |= intercept->PreCallValidateSetDebugUtilsObjectNameEXT(device, pNameInfo);
- if (skip) return VK_ERROR_VALIDATION_FAILED_EXT;
- }
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PreCallRecordSetDebugUtilsObjectNameEXT(device, pNameInfo);
- }
- layer_data->report_data->DebugReportSetUtilsObjectName(pNameInfo);
- VkResult result = DispatchSetDebugUtilsObjectNameEXT(device, pNameInfo);
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PostCallRecordSetDebugUtilsObjectNameEXT(device, pNameInfo, result);
- }
- return result;
-}
-
-VKAPI_ATTR VkResult VKAPI_CALL SetDebugUtilsObjectTagEXT(
- VkDevice device,
- const VkDebugUtilsObjectTagInfoEXT* pTagInfo) {
- auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
- bool skip = false;
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- skip |= intercept->PreCallValidateSetDebugUtilsObjectTagEXT(device, pTagInfo);
- if (skip) return VK_ERROR_VALIDATION_FAILED_EXT;
- }
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PreCallRecordSetDebugUtilsObjectTagEXT(device, pTagInfo);
- }
- VkResult result = DispatchSetDebugUtilsObjectTagEXT(device, pTagInfo);
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PostCallRecordSetDebugUtilsObjectTagEXT(device, pTagInfo, result);
- }
- return result;
-}
-
-VKAPI_ATTR void VKAPI_CALL QueueBeginDebugUtilsLabelEXT(
- VkQueue queue,
- const VkDebugUtilsLabelEXT* pLabelInfo) {
- auto layer_data = GetLayerDataPtr(get_dispatch_key(queue), layer_data_map);
- bool skip = false;
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- skip |= intercept->PreCallValidateQueueBeginDebugUtilsLabelEXT(queue, pLabelInfo);
- if (skip) return;
- }
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PreCallRecordQueueBeginDebugUtilsLabelEXT(queue, pLabelInfo);
- }
- BeginQueueDebugUtilsLabel(layer_data->report_data, queue, pLabelInfo);
- DispatchQueueBeginDebugUtilsLabelEXT(queue, pLabelInfo);
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PostCallRecordQueueBeginDebugUtilsLabelEXT(queue, pLabelInfo);
- }
-}
-
-VKAPI_ATTR void VKAPI_CALL QueueEndDebugUtilsLabelEXT(
- VkQueue queue) {
- auto layer_data = GetLayerDataPtr(get_dispatch_key(queue), layer_data_map);
- bool skip = false;
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- skip |= intercept->PreCallValidateQueueEndDebugUtilsLabelEXT(queue);
- if (skip) return;
- }
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PreCallRecordQueueEndDebugUtilsLabelEXT(queue);
- }
- DispatchQueueEndDebugUtilsLabelEXT(queue);
- EndQueueDebugUtilsLabel(layer_data->report_data, queue);
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PostCallRecordQueueEndDebugUtilsLabelEXT(queue);
- }
-}
-
-VKAPI_ATTR void VKAPI_CALL QueueInsertDebugUtilsLabelEXT(
- VkQueue queue,
- const VkDebugUtilsLabelEXT* pLabelInfo) {
- auto layer_data = GetLayerDataPtr(get_dispatch_key(queue), layer_data_map);
- bool skip = false;
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- skip |= intercept->PreCallValidateQueueInsertDebugUtilsLabelEXT(queue, pLabelInfo);
- if (skip) return;
- }
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PreCallRecordQueueInsertDebugUtilsLabelEXT(queue, pLabelInfo);
- }
- InsertQueueDebugUtilsLabel(layer_data->report_data, queue, pLabelInfo);
- DispatchQueueInsertDebugUtilsLabelEXT(queue, pLabelInfo);
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PostCallRecordQueueInsertDebugUtilsLabelEXT(queue, pLabelInfo);
- }
-}
-
-VKAPI_ATTR void VKAPI_CALL CmdBeginDebugUtilsLabelEXT(
- VkCommandBuffer commandBuffer,
- const VkDebugUtilsLabelEXT* pLabelInfo) {
- auto layer_data = GetLayerDataPtr(get_dispatch_key(commandBuffer), layer_data_map);
- bool skip = false;
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- skip |= intercept->PreCallValidateCmdBeginDebugUtilsLabelEXT(commandBuffer, pLabelInfo);
- if (skip) return;
- }
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PreCallRecordCmdBeginDebugUtilsLabelEXT(commandBuffer, pLabelInfo);
- }
- DispatchCmdBeginDebugUtilsLabelEXT(commandBuffer, pLabelInfo);
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PostCallRecordCmdBeginDebugUtilsLabelEXT(commandBuffer, pLabelInfo);
- }
-}
-
-VKAPI_ATTR void VKAPI_CALL CmdEndDebugUtilsLabelEXT(
- VkCommandBuffer commandBuffer) {
- auto layer_data = GetLayerDataPtr(get_dispatch_key(commandBuffer), layer_data_map);
- bool skip = false;
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- skip |= intercept->PreCallValidateCmdEndDebugUtilsLabelEXT(commandBuffer);
- if (skip) return;
- }
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PreCallRecordCmdEndDebugUtilsLabelEXT(commandBuffer);
- }
- DispatchCmdEndDebugUtilsLabelEXT(commandBuffer);
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PostCallRecordCmdEndDebugUtilsLabelEXT(commandBuffer);
- }
-}
-
-VKAPI_ATTR void VKAPI_CALL CmdInsertDebugUtilsLabelEXT(
- VkCommandBuffer commandBuffer,
- const VkDebugUtilsLabelEXT* pLabelInfo) {
- auto layer_data = GetLayerDataPtr(get_dispatch_key(commandBuffer), layer_data_map);
- bool skip = false;
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- skip |= intercept->PreCallValidateCmdInsertDebugUtilsLabelEXT(commandBuffer, pLabelInfo);
- if (skip) return;
- }
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PreCallRecordCmdInsertDebugUtilsLabelEXT(commandBuffer, pLabelInfo);
- }
- DispatchCmdInsertDebugUtilsLabelEXT(commandBuffer, pLabelInfo);
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PostCallRecordCmdInsertDebugUtilsLabelEXT(commandBuffer, pLabelInfo);
- }
-}
-
-VKAPI_ATTR VkResult VKAPI_CALL CreateDebugUtilsMessengerEXT(
- VkInstance instance,
- const VkDebugUtilsMessengerCreateInfoEXT* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkDebugUtilsMessengerEXT* pMessenger) {
- auto layer_data = GetLayerDataPtr(get_dispatch_key(instance), layer_data_map);
- bool skip = false;
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- skip |= intercept->PreCallValidateCreateDebugUtilsMessengerEXT(instance, pCreateInfo, pAllocator, pMessenger);
- if (skip) return VK_ERROR_VALIDATION_FAILED_EXT;
- }
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PreCallRecordCreateDebugUtilsMessengerEXT(instance, pCreateInfo, pAllocator, pMessenger);
- }
- VkResult result = DispatchCreateDebugUtilsMessengerEXT(instance, pCreateInfo, pAllocator, pMessenger);
- layer_create_messenger_callback(layer_data->report_data, false, pCreateInfo, pAllocator, pMessenger);
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PostCallRecordCreateDebugUtilsMessengerEXT(instance, pCreateInfo, pAllocator, pMessenger, result);
- }
- return result;
-}
-
-VKAPI_ATTR void VKAPI_CALL DestroyDebugUtilsMessengerEXT(
- VkInstance instance,
- VkDebugUtilsMessengerEXT messenger,
- const VkAllocationCallbacks* pAllocator) {
- auto layer_data = GetLayerDataPtr(get_dispatch_key(instance), layer_data_map);
- bool skip = false;
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- skip |= intercept->PreCallValidateDestroyDebugUtilsMessengerEXT(instance, messenger, pAllocator);
- if (skip) return;
- }
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PreCallRecordDestroyDebugUtilsMessengerEXT(instance, messenger, pAllocator);
- }
- DispatchDestroyDebugUtilsMessengerEXT(instance, messenger, pAllocator);
- layer_destroy_messenger_callback(layer_data->report_data, messenger, pAllocator);
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PostCallRecordDestroyDebugUtilsMessengerEXT(instance, messenger, pAllocator);
- }
-}
-
-VKAPI_ATTR void VKAPI_CALL SubmitDebugUtilsMessageEXT(
- VkInstance instance,
- VkDebugUtilsMessageSeverityFlagBitsEXT messageSeverity,
- VkDebugUtilsMessageTypeFlagsEXT messageTypes,
- const VkDebugUtilsMessengerCallbackDataEXT* pCallbackData) {
- auto layer_data = GetLayerDataPtr(get_dispatch_key(instance), layer_data_map);
- bool skip = false;
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- skip |= intercept->PreCallValidateSubmitDebugUtilsMessageEXT(instance, messageSeverity, messageTypes, pCallbackData);
- if (skip) return;
- }
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PreCallRecordSubmitDebugUtilsMessageEXT(instance, messageSeverity, messageTypes, pCallbackData);
- }
- DispatchSubmitDebugUtilsMessageEXT(instance, messageSeverity, messageTypes, pCallbackData);
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PostCallRecordSubmitDebugUtilsMessageEXT(instance, messageSeverity, messageTypes, pCallbackData);
- }
-}
-
-#ifdef VK_USE_PLATFORM_ANDROID_KHR
-
-VKAPI_ATTR VkResult VKAPI_CALL GetAndroidHardwareBufferPropertiesANDROID(
- VkDevice device,
- const struct AHardwareBuffer* buffer,
- VkAndroidHardwareBufferPropertiesANDROID* pProperties) {
- auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
- bool skip = false;
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- skip |= intercept->PreCallValidateGetAndroidHardwareBufferPropertiesANDROID(device, buffer, pProperties);
- if (skip) return VK_ERROR_VALIDATION_FAILED_EXT;
- }
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PreCallRecordGetAndroidHardwareBufferPropertiesANDROID(device, buffer, pProperties);
- }
- VkResult result = DispatchGetAndroidHardwareBufferPropertiesANDROID(device, buffer, pProperties);
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PostCallRecordGetAndroidHardwareBufferPropertiesANDROID(device, buffer, pProperties, result);
- }
- return result;
-}
-
-VKAPI_ATTR VkResult VKAPI_CALL GetMemoryAndroidHardwareBufferANDROID(
- VkDevice device,
- const VkMemoryGetAndroidHardwareBufferInfoANDROID* pInfo,
- struct AHardwareBuffer** pBuffer) {
- auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
- bool skip = false;
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- skip |= intercept->PreCallValidateGetMemoryAndroidHardwareBufferANDROID(device, pInfo, pBuffer);
- if (skip) return VK_ERROR_VALIDATION_FAILED_EXT;
- }
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PreCallRecordGetMemoryAndroidHardwareBufferANDROID(device, pInfo, pBuffer);
- }
- VkResult result = DispatchGetMemoryAndroidHardwareBufferANDROID(device, pInfo, pBuffer);
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PostCallRecordGetMemoryAndroidHardwareBufferANDROID(device, pInfo, pBuffer, result);
- }
- return result;
-}
-#endif // VK_USE_PLATFORM_ANDROID_KHR
-
-
-
-
-
-
-
-
-VKAPI_ATTR void VKAPI_CALL CmdSetSampleLocationsEXT(
- VkCommandBuffer commandBuffer,
- const VkSampleLocationsInfoEXT* pSampleLocationsInfo) {
- auto layer_data = GetLayerDataPtr(get_dispatch_key(commandBuffer), layer_data_map);
- bool skip = false;
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- skip |= intercept->PreCallValidateCmdSetSampleLocationsEXT(commandBuffer, pSampleLocationsInfo);
- if (skip) return;
- }
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PreCallRecordCmdSetSampleLocationsEXT(commandBuffer, pSampleLocationsInfo);
- }
- DispatchCmdSetSampleLocationsEXT(commandBuffer, pSampleLocationsInfo);
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PostCallRecordCmdSetSampleLocationsEXT(commandBuffer, pSampleLocationsInfo);
- }
-}
-
-VKAPI_ATTR void VKAPI_CALL GetPhysicalDeviceMultisamplePropertiesEXT(
- VkPhysicalDevice physicalDevice,
- VkSampleCountFlagBits samples,
- VkMultisamplePropertiesEXT* pMultisampleProperties) {
- auto layer_data = GetLayerDataPtr(get_dispatch_key(physicalDevice), layer_data_map);
- bool skip = false;
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- skip |= intercept->PreCallValidateGetPhysicalDeviceMultisamplePropertiesEXT(physicalDevice, samples, pMultisampleProperties);
- if (skip) return;
- }
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PreCallRecordGetPhysicalDeviceMultisamplePropertiesEXT(physicalDevice, samples, pMultisampleProperties);
- }
- DispatchGetPhysicalDeviceMultisamplePropertiesEXT(physicalDevice, samples, pMultisampleProperties);
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PostCallRecordGetPhysicalDeviceMultisamplePropertiesEXT(physicalDevice, samples, pMultisampleProperties);
- }
-}
-
-
-
-
-
-
-
-
-VKAPI_ATTR VkResult VKAPI_CALL GetImageDrmFormatModifierPropertiesEXT(
- VkDevice device,
- VkImage image,
- VkImageDrmFormatModifierPropertiesEXT* pProperties) {
- auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
- bool skip = false;
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- skip |= intercept->PreCallValidateGetImageDrmFormatModifierPropertiesEXT(device, image, pProperties);
- if (skip) return VK_ERROR_VALIDATION_FAILED_EXT;
- }
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PreCallRecordGetImageDrmFormatModifierPropertiesEXT(device, image, pProperties);
- }
- VkResult result = DispatchGetImageDrmFormatModifierPropertiesEXT(device, image, pProperties);
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PostCallRecordGetImageDrmFormatModifierPropertiesEXT(device, image, pProperties, result);
- }
- return result;
-}
-
-
-
-
-
-VKAPI_ATTR void VKAPI_CALL CmdBindShadingRateImageNV(
- VkCommandBuffer commandBuffer,
- VkImageView imageView,
- VkImageLayout imageLayout) {
- auto layer_data = GetLayerDataPtr(get_dispatch_key(commandBuffer), layer_data_map);
- bool skip = false;
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- skip |= intercept->PreCallValidateCmdBindShadingRateImageNV(commandBuffer, imageView, imageLayout);
- if (skip) return;
- }
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PreCallRecordCmdBindShadingRateImageNV(commandBuffer, imageView, imageLayout);
- }
- DispatchCmdBindShadingRateImageNV(commandBuffer, imageView, imageLayout);
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PostCallRecordCmdBindShadingRateImageNV(commandBuffer, imageView, imageLayout);
- }
-}
-
-VKAPI_ATTR void VKAPI_CALL CmdSetViewportShadingRatePaletteNV(
- VkCommandBuffer commandBuffer,
- uint32_t firstViewport,
- uint32_t viewportCount,
- const VkShadingRatePaletteNV* pShadingRatePalettes) {
- auto layer_data = GetLayerDataPtr(get_dispatch_key(commandBuffer), layer_data_map);
- bool skip = false;
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- skip |= intercept->PreCallValidateCmdSetViewportShadingRatePaletteNV(commandBuffer, firstViewport, viewportCount, pShadingRatePalettes);
- if (skip) return;
- }
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PreCallRecordCmdSetViewportShadingRatePaletteNV(commandBuffer, firstViewport, viewportCount, pShadingRatePalettes);
- }
- DispatchCmdSetViewportShadingRatePaletteNV(commandBuffer, firstViewport, viewportCount, pShadingRatePalettes);
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PostCallRecordCmdSetViewportShadingRatePaletteNV(commandBuffer, firstViewport, viewportCount, pShadingRatePalettes);
- }
-}
-
-VKAPI_ATTR void VKAPI_CALL CmdSetCoarseSampleOrderNV(
- VkCommandBuffer commandBuffer,
- VkCoarseSampleOrderTypeNV sampleOrderType,
- uint32_t customSampleOrderCount,
- const VkCoarseSampleOrderCustomNV* pCustomSampleOrders) {
- auto layer_data = GetLayerDataPtr(get_dispatch_key(commandBuffer), layer_data_map);
- bool skip = false;
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- skip |= intercept->PreCallValidateCmdSetCoarseSampleOrderNV(commandBuffer, sampleOrderType, customSampleOrderCount, pCustomSampleOrders);
- if (skip) return;
- }
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PreCallRecordCmdSetCoarseSampleOrderNV(commandBuffer, sampleOrderType, customSampleOrderCount, pCustomSampleOrders);
- }
- DispatchCmdSetCoarseSampleOrderNV(commandBuffer, sampleOrderType, customSampleOrderCount, pCustomSampleOrders);
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PostCallRecordCmdSetCoarseSampleOrderNV(commandBuffer, sampleOrderType, customSampleOrderCount, pCustomSampleOrders);
- }
-}
-
-
-VKAPI_ATTR VkResult VKAPI_CALL CreateAccelerationStructureNV(
- VkDevice device,
- const VkAccelerationStructureCreateInfoNV* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkAccelerationStructureNV* pAccelerationStructure) {
- auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
- bool skip = false;
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- skip |= intercept->PreCallValidateCreateAccelerationStructureNV(device, pCreateInfo, pAllocator, pAccelerationStructure);
- if (skip) return VK_ERROR_VALIDATION_FAILED_EXT;
- }
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PreCallRecordCreateAccelerationStructureNV(device, pCreateInfo, pAllocator, pAccelerationStructure);
- }
- VkResult result = DispatchCreateAccelerationStructureNV(device, pCreateInfo, pAllocator, pAccelerationStructure);
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PostCallRecordCreateAccelerationStructureNV(device, pCreateInfo, pAllocator, pAccelerationStructure, result);
- }
- return result;
-}
-
-VKAPI_ATTR void VKAPI_CALL DestroyAccelerationStructureNV(
- VkDevice device,
- VkAccelerationStructureNV accelerationStructure,
- const VkAllocationCallbacks* pAllocator) {
- auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
- bool skip = false;
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- skip |= intercept->PreCallValidateDestroyAccelerationStructureNV(device, accelerationStructure, pAllocator);
- if (skip) return;
- }
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PreCallRecordDestroyAccelerationStructureNV(device, accelerationStructure, pAllocator);
- }
- DispatchDestroyAccelerationStructureNV(device, accelerationStructure, pAllocator);
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PostCallRecordDestroyAccelerationStructureNV(device, accelerationStructure, pAllocator);
- }
-}
-
-VKAPI_ATTR void VKAPI_CALL GetAccelerationStructureMemoryRequirementsNV(
- VkDevice device,
- const VkAccelerationStructureMemoryRequirementsInfoNV* pInfo,
- VkMemoryRequirements2KHR* pMemoryRequirements) {
- auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
- bool skip = false;
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- skip |= intercept->PreCallValidateGetAccelerationStructureMemoryRequirementsNV(device, pInfo, pMemoryRequirements);
- if (skip) return;
- }
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PreCallRecordGetAccelerationStructureMemoryRequirementsNV(device, pInfo, pMemoryRequirements);
- }
- DispatchGetAccelerationStructureMemoryRequirementsNV(device, pInfo, pMemoryRequirements);
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PostCallRecordGetAccelerationStructureMemoryRequirementsNV(device, pInfo, pMemoryRequirements);
- }
-}
-
-VKAPI_ATTR VkResult VKAPI_CALL BindAccelerationStructureMemoryNV(
- VkDevice device,
- uint32_t bindInfoCount,
- const VkBindAccelerationStructureMemoryInfoNV* pBindInfos) {
- auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
- bool skip = false;
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- skip |= intercept->PreCallValidateBindAccelerationStructureMemoryNV(device, bindInfoCount, pBindInfos);
- if (skip) return VK_ERROR_VALIDATION_FAILED_EXT;
- }
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PreCallRecordBindAccelerationStructureMemoryNV(device, bindInfoCount, pBindInfos);
- }
- VkResult result = DispatchBindAccelerationStructureMemoryNV(device, bindInfoCount, pBindInfos);
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PostCallRecordBindAccelerationStructureMemoryNV(device, bindInfoCount, pBindInfos, result);
- }
- return result;
-}
-
-VKAPI_ATTR void VKAPI_CALL CmdBuildAccelerationStructureNV(
- VkCommandBuffer commandBuffer,
- const VkAccelerationStructureInfoNV* pInfo,
- VkBuffer instanceData,
- VkDeviceSize instanceOffset,
- VkBool32 update,
- VkAccelerationStructureNV dst,
- VkAccelerationStructureNV src,
- VkBuffer scratch,
- VkDeviceSize scratchOffset) {
- auto layer_data = GetLayerDataPtr(get_dispatch_key(commandBuffer), layer_data_map);
- bool skip = false;
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- skip |= intercept->PreCallValidateCmdBuildAccelerationStructureNV(commandBuffer, pInfo, instanceData, instanceOffset, update, dst, src, scratch, scratchOffset);
- if (skip) return;
- }
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PreCallRecordCmdBuildAccelerationStructureNV(commandBuffer, pInfo, instanceData, instanceOffset, update, dst, src, scratch, scratchOffset);
- }
- DispatchCmdBuildAccelerationStructureNV(commandBuffer, pInfo, instanceData, instanceOffset, update, dst, src, scratch, scratchOffset);
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PostCallRecordCmdBuildAccelerationStructureNV(commandBuffer, pInfo, instanceData, instanceOffset, update, dst, src, scratch, scratchOffset);
- }
-}
-
-VKAPI_ATTR void VKAPI_CALL CmdCopyAccelerationStructureNV(
- VkCommandBuffer commandBuffer,
- VkAccelerationStructureNV dst,
- VkAccelerationStructureNV src,
- VkCopyAccelerationStructureModeNV mode) {
- auto layer_data = GetLayerDataPtr(get_dispatch_key(commandBuffer), layer_data_map);
- bool skip = false;
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- skip |= intercept->PreCallValidateCmdCopyAccelerationStructureNV(commandBuffer, dst, src, mode);
- if (skip) return;
- }
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PreCallRecordCmdCopyAccelerationStructureNV(commandBuffer, dst, src, mode);
- }
- DispatchCmdCopyAccelerationStructureNV(commandBuffer, dst, src, mode);
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PostCallRecordCmdCopyAccelerationStructureNV(commandBuffer, dst, src, mode);
- }
-}
-
-VKAPI_ATTR void VKAPI_CALL CmdTraceRaysNV(
- VkCommandBuffer commandBuffer,
- VkBuffer raygenShaderBindingTableBuffer,
- VkDeviceSize raygenShaderBindingOffset,
- VkBuffer missShaderBindingTableBuffer,
- VkDeviceSize missShaderBindingOffset,
- VkDeviceSize missShaderBindingStride,
- VkBuffer hitShaderBindingTableBuffer,
- VkDeviceSize hitShaderBindingOffset,
- VkDeviceSize hitShaderBindingStride,
- VkBuffer callableShaderBindingTableBuffer,
- VkDeviceSize callableShaderBindingOffset,
- VkDeviceSize callableShaderBindingStride,
- uint32_t width,
- uint32_t height,
- uint32_t depth) {
- auto layer_data = GetLayerDataPtr(get_dispatch_key(commandBuffer), layer_data_map);
- bool skip = false;
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- skip |= intercept->PreCallValidateCmdTraceRaysNV(commandBuffer, raygenShaderBindingTableBuffer, raygenShaderBindingOffset, missShaderBindingTableBuffer, missShaderBindingOffset, missShaderBindingStride, hitShaderBindingTableBuffer, hitShaderBindingOffset, hitShaderBindingStride, callableShaderBindingTableBuffer, callableShaderBindingOffset, callableShaderBindingStride, width, height, depth);
- if (skip) return;
- }
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PreCallRecordCmdTraceRaysNV(commandBuffer, raygenShaderBindingTableBuffer, raygenShaderBindingOffset, missShaderBindingTableBuffer, missShaderBindingOffset, missShaderBindingStride, hitShaderBindingTableBuffer, hitShaderBindingOffset, hitShaderBindingStride, callableShaderBindingTableBuffer, callableShaderBindingOffset, callableShaderBindingStride, width, height, depth);
- }
- DispatchCmdTraceRaysNV(commandBuffer, raygenShaderBindingTableBuffer, raygenShaderBindingOffset, missShaderBindingTableBuffer, missShaderBindingOffset, missShaderBindingStride, hitShaderBindingTableBuffer, hitShaderBindingOffset, hitShaderBindingStride, callableShaderBindingTableBuffer, callableShaderBindingOffset, callableShaderBindingStride, width, height, depth);
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PostCallRecordCmdTraceRaysNV(commandBuffer, raygenShaderBindingTableBuffer, raygenShaderBindingOffset, missShaderBindingTableBuffer, missShaderBindingOffset, missShaderBindingStride, hitShaderBindingTableBuffer, hitShaderBindingOffset, hitShaderBindingStride, callableShaderBindingTableBuffer, callableShaderBindingOffset, callableShaderBindingStride, width, height, depth);
- }
-}
-
-VKAPI_ATTR VkResult VKAPI_CALL GetRayTracingShaderGroupHandlesNV(
- VkDevice device,
- VkPipeline pipeline,
- uint32_t firstGroup,
- uint32_t groupCount,
- size_t dataSize,
- void* pData) {
- auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
- bool skip = false;
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- skip |= intercept->PreCallValidateGetRayTracingShaderGroupHandlesNV(device, pipeline, firstGroup, groupCount, dataSize, pData);
- if (skip) return VK_ERROR_VALIDATION_FAILED_EXT;
- }
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PreCallRecordGetRayTracingShaderGroupHandlesNV(device, pipeline, firstGroup, groupCount, dataSize, pData);
- }
- VkResult result = DispatchGetRayTracingShaderGroupHandlesNV(device, pipeline, firstGroup, groupCount, dataSize, pData);
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PostCallRecordGetRayTracingShaderGroupHandlesNV(device, pipeline, firstGroup, groupCount, dataSize, pData, result);
- }
- return result;
-}
-
-VKAPI_ATTR VkResult VKAPI_CALL GetAccelerationStructureHandleNV(
- VkDevice device,
- VkAccelerationStructureNV accelerationStructure,
- size_t dataSize,
- void* pData) {
- auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
- bool skip = false;
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- skip |= intercept->PreCallValidateGetAccelerationStructureHandleNV(device, accelerationStructure, dataSize, pData);
- if (skip) return VK_ERROR_VALIDATION_FAILED_EXT;
- }
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PreCallRecordGetAccelerationStructureHandleNV(device, accelerationStructure, dataSize, pData);
- }
- VkResult result = DispatchGetAccelerationStructureHandleNV(device, accelerationStructure, dataSize, pData);
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PostCallRecordGetAccelerationStructureHandleNV(device, accelerationStructure, dataSize, pData, result);
- }
- return result;
-}
-
-VKAPI_ATTR void VKAPI_CALL CmdWriteAccelerationStructuresPropertiesNV(
- VkCommandBuffer commandBuffer,
- uint32_t accelerationStructureCount,
- const VkAccelerationStructureNV* pAccelerationStructures,
- VkQueryType queryType,
- VkQueryPool queryPool,
- uint32_t firstQuery) {
- auto layer_data = GetLayerDataPtr(get_dispatch_key(commandBuffer), layer_data_map);
- bool skip = false;
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- skip |= intercept->PreCallValidateCmdWriteAccelerationStructuresPropertiesNV(commandBuffer, accelerationStructureCount, pAccelerationStructures, queryType, queryPool, firstQuery);
- if (skip) return;
- }
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PreCallRecordCmdWriteAccelerationStructuresPropertiesNV(commandBuffer, accelerationStructureCount, pAccelerationStructures, queryType, queryPool, firstQuery);
- }
- DispatchCmdWriteAccelerationStructuresPropertiesNV(commandBuffer, accelerationStructureCount, pAccelerationStructures, queryType, queryPool, firstQuery);
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PostCallRecordCmdWriteAccelerationStructuresPropertiesNV(commandBuffer, accelerationStructureCount, pAccelerationStructures, queryType, queryPool, firstQuery);
- }
-}
-
-VKAPI_ATTR VkResult VKAPI_CALL CompileDeferredNV(
- VkDevice device,
- VkPipeline pipeline,
- uint32_t shader) {
- auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
- bool skip = false;
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- skip |= intercept->PreCallValidateCompileDeferredNV(device, pipeline, shader);
- if (skip) return VK_ERROR_VALIDATION_FAILED_EXT;
- }
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PreCallRecordCompileDeferredNV(device, pipeline, shader);
- }
- VkResult result = DispatchCompileDeferredNV(device, pipeline, shader);
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PostCallRecordCompileDeferredNV(device, pipeline, shader, result);
- }
- return result;
-}
-
-
-
-
-
-VKAPI_ATTR VkResult VKAPI_CALL GetMemoryHostPointerPropertiesEXT(
- VkDevice device,
- VkExternalMemoryHandleTypeFlagBits handleType,
- const void* pHostPointer,
- VkMemoryHostPointerPropertiesEXT* pMemoryHostPointerProperties) {
- auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
- bool skip = false;
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- skip |= intercept->PreCallValidateGetMemoryHostPointerPropertiesEXT(device, handleType, pHostPointer, pMemoryHostPointerProperties);
- if (skip) return VK_ERROR_VALIDATION_FAILED_EXT;
- }
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PreCallRecordGetMemoryHostPointerPropertiesEXT(device, handleType, pHostPointer, pMemoryHostPointerProperties);
- }
- VkResult result = DispatchGetMemoryHostPointerPropertiesEXT(device, handleType, pHostPointer, pMemoryHostPointerProperties);
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PostCallRecordGetMemoryHostPointerPropertiesEXT(device, handleType, pHostPointer, pMemoryHostPointerProperties, result);
- }
- return result;
-}
-
-
-VKAPI_ATTR void VKAPI_CALL CmdWriteBufferMarkerAMD(
- VkCommandBuffer commandBuffer,
- VkPipelineStageFlagBits pipelineStage,
- VkBuffer dstBuffer,
- VkDeviceSize dstOffset,
- uint32_t marker) {
- auto layer_data = GetLayerDataPtr(get_dispatch_key(commandBuffer), layer_data_map);
- bool skip = false;
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- skip |= intercept->PreCallValidateCmdWriteBufferMarkerAMD(commandBuffer, pipelineStage, dstBuffer, dstOffset, marker);
- if (skip) return;
- }
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PreCallRecordCmdWriteBufferMarkerAMD(commandBuffer, pipelineStage, dstBuffer, dstOffset, marker);
- }
- DispatchCmdWriteBufferMarkerAMD(commandBuffer, pipelineStage, dstBuffer, dstOffset, marker);
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PostCallRecordCmdWriteBufferMarkerAMD(commandBuffer, pipelineStage, dstBuffer, dstOffset, marker);
- }
-}
-
-
-
-VKAPI_ATTR VkResult VKAPI_CALL GetPhysicalDeviceCalibrateableTimeDomainsEXT(
- VkPhysicalDevice physicalDevice,
- uint32_t* pTimeDomainCount,
- VkTimeDomainEXT* pTimeDomains) {
- auto layer_data = GetLayerDataPtr(get_dispatch_key(physicalDevice), layer_data_map);
- bool skip = false;
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- skip |= intercept->PreCallValidateGetPhysicalDeviceCalibrateableTimeDomainsEXT(physicalDevice, pTimeDomainCount, pTimeDomains);
- if (skip) return VK_ERROR_VALIDATION_FAILED_EXT;
- }
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PreCallRecordGetPhysicalDeviceCalibrateableTimeDomainsEXT(physicalDevice, pTimeDomainCount, pTimeDomains);
- }
- VkResult result = DispatchGetPhysicalDeviceCalibrateableTimeDomainsEXT(physicalDevice, pTimeDomainCount, pTimeDomains);
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PostCallRecordGetPhysicalDeviceCalibrateableTimeDomainsEXT(physicalDevice, pTimeDomainCount, pTimeDomains, result);
- }
- return result;
-}
-
-VKAPI_ATTR VkResult VKAPI_CALL GetCalibratedTimestampsEXT(
- VkDevice device,
- uint32_t timestampCount,
- const VkCalibratedTimestampInfoEXT* pTimestampInfos,
- uint64_t* pTimestamps,
- uint64_t* pMaxDeviation) {
- auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
- bool skip = false;
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- skip |= intercept->PreCallValidateGetCalibratedTimestampsEXT(device, timestampCount, pTimestampInfos, pTimestamps, pMaxDeviation);
- if (skip) return VK_ERROR_VALIDATION_FAILED_EXT;
- }
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PreCallRecordGetCalibratedTimestampsEXT(device, timestampCount, pTimestampInfos, pTimestamps, pMaxDeviation);
- }
- VkResult result = DispatchGetCalibratedTimestampsEXT(device, timestampCount, pTimestampInfos, pTimestamps, pMaxDeviation);
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PostCallRecordGetCalibratedTimestampsEXT(device, timestampCount, pTimestampInfos, pTimestamps, pMaxDeviation, result);
- }
- return result;
-}
-
-
-
-
-#ifdef VK_USE_PLATFORM_GGP
-#endif // VK_USE_PLATFORM_GGP
-
-
-
-
-
-VKAPI_ATTR void VKAPI_CALL CmdDrawMeshTasksNV(
- VkCommandBuffer commandBuffer,
- uint32_t taskCount,
- uint32_t firstTask) {
- auto layer_data = GetLayerDataPtr(get_dispatch_key(commandBuffer), layer_data_map);
- bool skip = false;
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- skip |= intercept->PreCallValidateCmdDrawMeshTasksNV(commandBuffer, taskCount, firstTask);
- if (skip) return;
- }
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PreCallRecordCmdDrawMeshTasksNV(commandBuffer, taskCount, firstTask);
- }
- DispatchCmdDrawMeshTasksNV(commandBuffer, taskCount, firstTask);
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PostCallRecordCmdDrawMeshTasksNV(commandBuffer, taskCount, firstTask);
- }
-}
-
-VKAPI_ATTR void VKAPI_CALL CmdDrawMeshTasksIndirectNV(
- VkCommandBuffer commandBuffer,
- VkBuffer buffer,
- VkDeviceSize offset,
- uint32_t drawCount,
- uint32_t stride) {
- auto layer_data = GetLayerDataPtr(get_dispatch_key(commandBuffer), layer_data_map);
- bool skip = false;
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- skip |= intercept->PreCallValidateCmdDrawMeshTasksIndirectNV(commandBuffer, buffer, offset, drawCount, stride);
- if (skip) return;
- }
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PreCallRecordCmdDrawMeshTasksIndirectNV(commandBuffer, buffer, offset, drawCount, stride);
- }
- DispatchCmdDrawMeshTasksIndirectNV(commandBuffer, buffer, offset, drawCount, stride);
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PostCallRecordCmdDrawMeshTasksIndirectNV(commandBuffer, buffer, offset, drawCount, stride);
- }
-}
-
-VKAPI_ATTR void VKAPI_CALL CmdDrawMeshTasksIndirectCountNV(
- VkCommandBuffer commandBuffer,
- VkBuffer buffer,
- VkDeviceSize offset,
- VkBuffer countBuffer,
- VkDeviceSize countBufferOffset,
- uint32_t maxDrawCount,
- uint32_t stride) {
- auto layer_data = GetLayerDataPtr(get_dispatch_key(commandBuffer), layer_data_map);
- bool skip = false;
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- skip |= intercept->PreCallValidateCmdDrawMeshTasksIndirectCountNV(commandBuffer, buffer, offset, countBuffer, countBufferOffset, maxDrawCount, stride);
- if (skip) return;
- }
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PreCallRecordCmdDrawMeshTasksIndirectCountNV(commandBuffer, buffer, offset, countBuffer, countBufferOffset, maxDrawCount, stride);
- }
- DispatchCmdDrawMeshTasksIndirectCountNV(commandBuffer, buffer, offset, countBuffer, countBufferOffset, maxDrawCount, stride);
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PostCallRecordCmdDrawMeshTasksIndirectCountNV(commandBuffer, buffer, offset, countBuffer, countBufferOffset, maxDrawCount, stride);
- }
-}
-
-
-
-
-VKAPI_ATTR void VKAPI_CALL CmdSetExclusiveScissorNV(
- VkCommandBuffer commandBuffer,
- uint32_t firstExclusiveScissor,
- uint32_t exclusiveScissorCount,
- const VkRect2D* pExclusiveScissors) {
- auto layer_data = GetLayerDataPtr(get_dispatch_key(commandBuffer), layer_data_map);
- bool skip = false;
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- skip |= intercept->PreCallValidateCmdSetExclusiveScissorNV(commandBuffer, firstExclusiveScissor, exclusiveScissorCount, pExclusiveScissors);
- if (skip) return;
- }
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PreCallRecordCmdSetExclusiveScissorNV(commandBuffer, firstExclusiveScissor, exclusiveScissorCount, pExclusiveScissors);
- }
- DispatchCmdSetExclusiveScissorNV(commandBuffer, firstExclusiveScissor, exclusiveScissorCount, pExclusiveScissors);
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PostCallRecordCmdSetExclusiveScissorNV(commandBuffer, firstExclusiveScissor, exclusiveScissorCount, pExclusiveScissors);
- }
-}
-
-
-VKAPI_ATTR void VKAPI_CALL CmdSetCheckpointNV(
- VkCommandBuffer commandBuffer,
- const void* pCheckpointMarker) {
- auto layer_data = GetLayerDataPtr(get_dispatch_key(commandBuffer), layer_data_map);
- bool skip = false;
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- skip |= intercept->PreCallValidateCmdSetCheckpointNV(commandBuffer, pCheckpointMarker);
- if (skip) return;
- }
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PreCallRecordCmdSetCheckpointNV(commandBuffer, pCheckpointMarker);
- }
- DispatchCmdSetCheckpointNV(commandBuffer, pCheckpointMarker);
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PostCallRecordCmdSetCheckpointNV(commandBuffer, pCheckpointMarker);
- }
-}
-
-VKAPI_ATTR void VKAPI_CALL GetQueueCheckpointDataNV(
- VkQueue queue,
- uint32_t* pCheckpointDataCount,
- VkCheckpointDataNV* pCheckpointData) {
- auto layer_data = GetLayerDataPtr(get_dispatch_key(queue), layer_data_map);
- bool skip = false;
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- skip |= intercept->PreCallValidateGetQueueCheckpointDataNV(queue, pCheckpointDataCount, pCheckpointData);
- if (skip) return;
- }
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PreCallRecordGetQueueCheckpointDataNV(queue, pCheckpointDataCount, pCheckpointData);
- }
- DispatchGetQueueCheckpointDataNV(queue, pCheckpointDataCount, pCheckpointData);
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PostCallRecordGetQueueCheckpointDataNV(queue, pCheckpointDataCount, pCheckpointData);
- }
-}
-
-
-
-VKAPI_ATTR VkResult VKAPI_CALL InitializePerformanceApiINTEL(
- VkDevice device,
- const VkInitializePerformanceApiInfoINTEL* pInitializeInfo) {
- auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
- bool skip = false;
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- skip |= intercept->PreCallValidateInitializePerformanceApiINTEL(device, pInitializeInfo);
- if (skip) return VK_ERROR_VALIDATION_FAILED_EXT;
- }
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PreCallRecordInitializePerformanceApiINTEL(device, pInitializeInfo);
- }
- VkResult result = DispatchInitializePerformanceApiINTEL(device, pInitializeInfo);
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PostCallRecordInitializePerformanceApiINTEL(device, pInitializeInfo, result);
- }
- return result;
-}
-
-VKAPI_ATTR void VKAPI_CALL UninitializePerformanceApiINTEL(
- VkDevice device) {
- auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
- bool skip = false;
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- skip |= intercept->PreCallValidateUninitializePerformanceApiINTEL(device);
- if (skip) return;
- }
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PreCallRecordUninitializePerformanceApiINTEL(device);
- }
- DispatchUninitializePerformanceApiINTEL(device);
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PostCallRecordUninitializePerformanceApiINTEL(device);
- }
-}
-
-VKAPI_ATTR VkResult VKAPI_CALL CmdSetPerformanceMarkerINTEL(
- VkCommandBuffer commandBuffer,
- const VkPerformanceMarkerInfoINTEL* pMarkerInfo) {
- auto layer_data = GetLayerDataPtr(get_dispatch_key(commandBuffer), layer_data_map);
- bool skip = false;
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- skip |= intercept->PreCallValidateCmdSetPerformanceMarkerINTEL(commandBuffer, pMarkerInfo);
- if (skip) return VK_ERROR_VALIDATION_FAILED_EXT;
- }
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PreCallRecordCmdSetPerformanceMarkerINTEL(commandBuffer, pMarkerInfo);
- }
- VkResult result = DispatchCmdSetPerformanceMarkerINTEL(commandBuffer, pMarkerInfo);
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PostCallRecordCmdSetPerformanceMarkerINTEL(commandBuffer, pMarkerInfo, result);
- }
- return result;
-}
-
-VKAPI_ATTR VkResult VKAPI_CALL CmdSetPerformanceStreamMarkerINTEL(
- VkCommandBuffer commandBuffer,
- const VkPerformanceStreamMarkerInfoINTEL* pMarkerInfo) {
- auto layer_data = GetLayerDataPtr(get_dispatch_key(commandBuffer), layer_data_map);
- bool skip = false;
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- skip |= intercept->PreCallValidateCmdSetPerformanceStreamMarkerINTEL(commandBuffer, pMarkerInfo);
- if (skip) return VK_ERROR_VALIDATION_FAILED_EXT;
- }
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PreCallRecordCmdSetPerformanceStreamMarkerINTEL(commandBuffer, pMarkerInfo);
- }
- VkResult result = DispatchCmdSetPerformanceStreamMarkerINTEL(commandBuffer, pMarkerInfo);
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PostCallRecordCmdSetPerformanceStreamMarkerINTEL(commandBuffer, pMarkerInfo, result);
- }
- return result;
-}
-
-VKAPI_ATTR VkResult VKAPI_CALL CmdSetPerformanceOverrideINTEL(
- VkCommandBuffer commandBuffer,
- const VkPerformanceOverrideInfoINTEL* pOverrideInfo) {
- auto layer_data = GetLayerDataPtr(get_dispatch_key(commandBuffer), layer_data_map);
- bool skip = false;
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- skip |= intercept->PreCallValidateCmdSetPerformanceOverrideINTEL(commandBuffer, pOverrideInfo);
- if (skip) return VK_ERROR_VALIDATION_FAILED_EXT;
- }
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PreCallRecordCmdSetPerformanceOverrideINTEL(commandBuffer, pOverrideInfo);
- }
- VkResult result = DispatchCmdSetPerformanceOverrideINTEL(commandBuffer, pOverrideInfo);
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PostCallRecordCmdSetPerformanceOverrideINTEL(commandBuffer, pOverrideInfo, result);
- }
- return result;
-}
-
-VKAPI_ATTR VkResult VKAPI_CALL AcquirePerformanceConfigurationINTEL(
- VkDevice device,
- const VkPerformanceConfigurationAcquireInfoINTEL* pAcquireInfo,
- VkPerformanceConfigurationINTEL* pConfiguration) {
- auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
- bool skip = false;
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- skip |= intercept->PreCallValidateAcquirePerformanceConfigurationINTEL(device, pAcquireInfo, pConfiguration);
- if (skip) return VK_ERROR_VALIDATION_FAILED_EXT;
- }
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PreCallRecordAcquirePerformanceConfigurationINTEL(device, pAcquireInfo, pConfiguration);
- }
- VkResult result = DispatchAcquirePerformanceConfigurationINTEL(device, pAcquireInfo, pConfiguration);
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PostCallRecordAcquirePerformanceConfigurationINTEL(device, pAcquireInfo, pConfiguration, result);
- }
- return result;
-}
-
-VKAPI_ATTR VkResult VKAPI_CALL ReleasePerformanceConfigurationINTEL(
- VkDevice device,
- VkPerformanceConfigurationINTEL configuration) {
- auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
- bool skip = false;
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- skip |= intercept->PreCallValidateReleasePerformanceConfigurationINTEL(device, configuration);
- if (skip) return VK_ERROR_VALIDATION_FAILED_EXT;
- }
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PreCallRecordReleasePerformanceConfigurationINTEL(device, configuration);
- }
- VkResult result = DispatchReleasePerformanceConfigurationINTEL(device, configuration);
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PostCallRecordReleasePerformanceConfigurationINTEL(device, configuration, result);
- }
- return result;
-}
-
-VKAPI_ATTR VkResult VKAPI_CALL QueueSetPerformanceConfigurationINTEL(
- VkQueue queue,
- VkPerformanceConfigurationINTEL configuration) {
- auto layer_data = GetLayerDataPtr(get_dispatch_key(queue), layer_data_map);
- bool skip = false;
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- skip |= intercept->PreCallValidateQueueSetPerformanceConfigurationINTEL(queue, configuration);
- if (skip) return VK_ERROR_VALIDATION_FAILED_EXT;
- }
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PreCallRecordQueueSetPerformanceConfigurationINTEL(queue, configuration);
- }
- VkResult result = DispatchQueueSetPerformanceConfigurationINTEL(queue, configuration);
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PostCallRecordQueueSetPerformanceConfigurationINTEL(queue, configuration, result);
- }
- return result;
-}
-
-VKAPI_ATTR VkResult VKAPI_CALL GetPerformanceParameterINTEL(
- VkDevice device,
- VkPerformanceParameterTypeINTEL parameter,
- VkPerformanceValueINTEL* pValue) {
- auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
- bool skip = false;
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- skip |= intercept->PreCallValidateGetPerformanceParameterINTEL(device, parameter, pValue);
- if (skip) return VK_ERROR_VALIDATION_FAILED_EXT;
- }
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PreCallRecordGetPerformanceParameterINTEL(device, parameter, pValue);
- }
- VkResult result = DispatchGetPerformanceParameterINTEL(device, parameter, pValue);
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PostCallRecordGetPerformanceParameterINTEL(device, parameter, pValue, result);
- }
- return result;
-}
-
-
-
-VKAPI_ATTR void VKAPI_CALL SetLocalDimmingAMD(
- VkDevice device,
- VkSwapchainKHR swapChain,
- VkBool32 localDimmingEnable) {
- auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
- bool skip = false;
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- skip |= intercept->PreCallValidateSetLocalDimmingAMD(device, swapChain, localDimmingEnable);
- if (skip) return;
- }
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PreCallRecordSetLocalDimmingAMD(device, swapChain, localDimmingEnable);
- }
- DispatchSetLocalDimmingAMD(device, swapChain, localDimmingEnable);
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PostCallRecordSetLocalDimmingAMD(device, swapChain, localDimmingEnable);
- }
-}
-
-#ifdef VK_USE_PLATFORM_FUCHSIA
-
-VKAPI_ATTR VkResult VKAPI_CALL CreateImagePipeSurfaceFUCHSIA(
- VkInstance instance,
- const VkImagePipeSurfaceCreateInfoFUCHSIA* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkSurfaceKHR* pSurface) {
- auto layer_data = GetLayerDataPtr(get_dispatch_key(instance), layer_data_map);
- bool skip = false;
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- skip |= intercept->PreCallValidateCreateImagePipeSurfaceFUCHSIA(instance, pCreateInfo, pAllocator, pSurface);
- if (skip) return VK_ERROR_VALIDATION_FAILED_EXT;
- }
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PreCallRecordCreateImagePipeSurfaceFUCHSIA(instance, pCreateInfo, pAllocator, pSurface);
- }
- VkResult result = DispatchCreateImagePipeSurfaceFUCHSIA(instance, pCreateInfo, pAllocator, pSurface);
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PostCallRecordCreateImagePipeSurfaceFUCHSIA(instance, pCreateInfo, pAllocator, pSurface, result);
- }
- return result;
-}
-#endif // VK_USE_PLATFORM_FUCHSIA
-
-#ifdef VK_USE_PLATFORM_METAL_EXT
-
-VKAPI_ATTR VkResult VKAPI_CALL CreateMetalSurfaceEXT(
- VkInstance instance,
- const VkMetalSurfaceCreateInfoEXT* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkSurfaceKHR* pSurface) {
- auto layer_data = GetLayerDataPtr(get_dispatch_key(instance), layer_data_map);
- bool skip = false;
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- skip |= intercept->PreCallValidateCreateMetalSurfaceEXT(instance, pCreateInfo, pAllocator, pSurface);
- if (skip) return VK_ERROR_VALIDATION_FAILED_EXT;
- }
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PreCallRecordCreateMetalSurfaceEXT(instance, pCreateInfo, pAllocator, pSurface);
- }
- VkResult result = DispatchCreateMetalSurfaceEXT(instance, pCreateInfo, pAllocator, pSurface);
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PostCallRecordCreateMetalSurfaceEXT(instance, pCreateInfo, pAllocator, pSurface, result);
- }
- return result;
-}
-#endif // VK_USE_PLATFORM_METAL_EXT
-
-
-
-
-
-
-
-
-
-
-
-
-VKAPI_ATTR VkDeviceAddress VKAPI_CALL GetBufferDeviceAddressEXT(
- VkDevice device,
- const VkBufferDeviceAddressInfoEXT* pInfo) {
- auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
- bool skip = false;
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- skip |= intercept->PreCallValidateGetBufferDeviceAddressEXT(device, pInfo);
- if (skip) return 0;
- }
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PreCallRecordGetBufferDeviceAddressEXT(device, pInfo);
- }
- VkDeviceAddress result = DispatchGetBufferDeviceAddressEXT(device, pInfo);
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PostCallRecordGetBufferDeviceAddressEXT(device, pInfo);
- }
- return result;
-}
-
-
-
-
-VKAPI_ATTR VkResult VKAPI_CALL GetPhysicalDeviceCooperativeMatrixPropertiesNV(
- VkPhysicalDevice physicalDevice,
- uint32_t* pPropertyCount,
- VkCooperativeMatrixPropertiesNV* pProperties) {
- auto layer_data = GetLayerDataPtr(get_dispatch_key(physicalDevice), layer_data_map);
- bool skip = false;
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- skip |= intercept->PreCallValidateGetPhysicalDeviceCooperativeMatrixPropertiesNV(physicalDevice, pPropertyCount, pProperties);
- if (skip) return VK_ERROR_VALIDATION_FAILED_EXT;
- }
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PreCallRecordGetPhysicalDeviceCooperativeMatrixPropertiesNV(physicalDevice, pPropertyCount, pProperties);
- }
- VkResult result = DispatchGetPhysicalDeviceCooperativeMatrixPropertiesNV(physicalDevice, pPropertyCount, pProperties);
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PostCallRecordGetPhysicalDeviceCooperativeMatrixPropertiesNV(physicalDevice, pPropertyCount, pProperties, result);
- }
- return result;
-}
-
-
-VKAPI_ATTR VkResult VKAPI_CALL GetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV(
- VkPhysicalDevice physicalDevice,
- uint32_t* pCombinationCount,
- VkFramebufferMixedSamplesCombinationNV* pCombinations) {
- auto layer_data = GetLayerDataPtr(get_dispatch_key(physicalDevice), layer_data_map);
- bool skip = false;
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- skip |= intercept->PreCallValidateGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV(physicalDevice, pCombinationCount, pCombinations);
- if (skip) return VK_ERROR_VALIDATION_FAILED_EXT;
- }
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PreCallRecordGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV(physicalDevice, pCombinationCount, pCombinations);
- }
- VkResult result = DispatchGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV(physicalDevice, pCombinationCount, pCombinations);
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PostCallRecordGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV(physicalDevice, pCombinationCount, pCombinations, result);
- }
- return result;
-}
-
-
-
-#ifdef VK_USE_PLATFORM_WIN32_KHR
-
-VKAPI_ATTR VkResult VKAPI_CALL GetPhysicalDeviceSurfacePresentModes2EXT(
- VkPhysicalDevice physicalDevice,
- const VkPhysicalDeviceSurfaceInfo2KHR* pSurfaceInfo,
- uint32_t* pPresentModeCount,
- VkPresentModeKHR* pPresentModes) {
- auto layer_data = GetLayerDataPtr(get_dispatch_key(physicalDevice), layer_data_map);
- bool skip = false;
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- skip |= intercept->PreCallValidateGetPhysicalDeviceSurfacePresentModes2EXT(physicalDevice, pSurfaceInfo, pPresentModeCount, pPresentModes);
- if (skip) return VK_ERROR_VALIDATION_FAILED_EXT;
- }
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PreCallRecordGetPhysicalDeviceSurfacePresentModes2EXT(physicalDevice, pSurfaceInfo, pPresentModeCount, pPresentModes);
- }
- VkResult result = DispatchGetPhysicalDeviceSurfacePresentModes2EXT(physicalDevice, pSurfaceInfo, pPresentModeCount, pPresentModes);
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PostCallRecordGetPhysicalDeviceSurfacePresentModes2EXT(physicalDevice, pSurfaceInfo, pPresentModeCount, pPresentModes, result);
- }
- return result;
-}
-
-VKAPI_ATTR VkResult VKAPI_CALL AcquireFullScreenExclusiveModeEXT(
- VkDevice device,
- VkSwapchainKHR swapchain) {
- auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
- bool skip = false;
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- skip |= intercept->PreCallValidateAcquireFullScreenExclusiveModeEXT(device, swapchain);
- if (skip) return VK_ERROR_VALIDATION_FAILED_EXT;
- }
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PreCallRecordAcquireFullScreenExclusiveModeEXT(device, swapchain);
- }
- VkResult result = DispatchAcquireFullScreenExclusiveModeEXT(device, swapchain);
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PostCallRecordAcquireFullScreenExclusiveModeEXT(device, swapchain, result);
- }
- return result;
-}
-
-VKAPI_ATTR VkResult VKAPI_CALL ReleaseFullScreenExclusiveModeEXT(
- VkDevice device,
- VkSwapchainKHR swapchain) {
- auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
- bool skip = false;
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- skip |= intercept->PreCallValidateReleaseFullScreenExclusiveModeEXT(device, swapchain);
- if (skip) return VK_ERROR_VALIDATION_FAILED_EXT;
- }
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PreCallRecordReleaseFullScreenExclusiveModeEXT(device, swapchain);
- }
- VkResult result = DispatchReleaseFullScreenExclusiveModeEXT(device, swapchain);
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PostCallRecordReleaseFullScreenExclusiveModeEXT(device, swapchain, result);
- }
- return result;
-}
-
-VKAPI_ATTR VkResult VKAPI_CALL GetDeviceGroupSurfacePresentModes2EXT(
- VkDevice device,
- const VkPhysicalDeviceSurfaceInfo2KHR* pSurfaceInfo,
- VkDeviceGroupPresentModeFlagsKHR* pModes) {
- auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
- bool skip = false;
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- skip |= intercept->PreCallValidateGetDeviceGroupSurfacePresentModes2EXT(device, pSurfaceInfo, pModes);
- if (skip) return VK_ERROR_VALIDATION_FAILED_EXT;
- }
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PreCallRecordGetDeviceGroupSurfacePresentModes2EXT(device, pSurfaceInfo, pModes);
- }
- VkResult result = DispatchGetDeviceGroupSurfacePresentModes2EXT(device, pSurfaceInfo, pModes);
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PostCallRecordGetDeviceGroupSurfacePresentModes2EXT(device, pSurfaceInfo, pModes, result);
- }
- return result;
-}
-#endif // VK_USE_PLATFORM_WIN32_KHR
-
-
-VKAPI_ATTR VkResult VKAPI_CALL CreateHeadlessSurfaceEXT(
- VkInstance instance,
- const VkHeadlessSurfaceCreateInfoEXT* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkSurfaceKHR* pSurface) {
- auto layer_data = GetLayerDataPtr(get_dispatch_key(instance), layer_data_map);
- bool skip = false;
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- skip |= intercept->PreCallValidateCreateHeadlessSurfaceEXT(instance, pCreateInfo, pAllocator, pSurface);
- if (skip) return VK_ERROR_VALIDATION_FAILED_EXT;
- }
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PreCallRecordCreateHeadlessSurfaceEXT(instance, pCreateInfo, pAllocator, pSurface);
- }
- VkResult result = DispatchCreateHeadlessSurfaceEXT(instance, pCreateInfo, pAllocator, pSurface);
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PostCallRecordCreateHeadlessSurfaceEXT(instance, pCreateInfo, pAllocator, pSurface, result);
- }
- return result;
-}
-
-
-VKAPI_ATTR void VKAPI_CALL CmdSetLineStippleEXT(
- VkCommandBuffer commandBuffer,
- uint32_t lineStippleFactor,
- uint16_t lineStipplePattern) {
- auto layer_data = GetLayerDataPtr(get_dispatch_key(commandBuffer), layer_data_map);
- bool skip = false;
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- skip |= intercept->PreCallValidateCmdSetLineStippleEXT(commandBuffer, lineStippleFactor, lineStipplePattern);
- if (skip) return;
- }
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PreCallRecordCmdSetLineStippleEXT(commandBuffer, lineStippleFactor, lineStipplePattern);
- }
- DispatchCmdSetLineStippleEXT(commandBuffer, lineStippleFactor, lineStipplePattern);
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PostCallRecordCmdSetLineStippleEXT(commandBuffer, lineStippleFactor, lineStipplePattern);
- }
-}
-
-
-VKAPI_ATTR void VKAPI_CALL ResetQueryPoolEXT(
- VkDevice device,
- VkQueryPool queryPool,
- uint32_t firstQuery,
- uint32_t queryCount) {
- auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
- bool skip = false;
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- skip |= intercept->PreCallValidateResetQueryPoolEXT(device, queryPool, firstQuery, queryCount);
- if (skip) return;
- }
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PreCallRecordResetQueryPoolEXT(device, queryPool, firstQuery, queryCount);
- }
- DispatchResetQueryPoolEXT(device, queryPool, firstQuery, queryCount);
- for (auto intercept : layer_data->object_dispatch) {
- auto lock = intercept->write_lock();
- intercept->PostCallRecordResetQueryPoolEXT(device, queryPool, firstQuery, queryCount);
- }
-}
-
-
-
-
-// Map of intercepted ApiName to its associated function data
-const std::unordered_map<std::string, function_data> name_to_funcptr_map = {
- {"vkCreateInstance", {true, (void*)CreateInstance}},
- {"vkDestroyInstance", {true, (void*)DestroyInstance}},
- {"vkEnumeratePhysicalDevices", {true, (void*)EnumeratePhysicalDevices}},
- {"vkGetPhysicalDeviceFeatures", {true, (void*)GetPhysicalDeviceFeatures}},
- {"vkGetPhysicalDeviceFormatProperties", {true, (void*)GetPhysicalDeviceFormatProperties}},
- {"vkGetPhysicalDeviceImageFormatProperties", {true, (void*)GetPhysicalDeviceImageFormatProperties}},
- {"vkGetPhysicalDeviceProperties", {true, (void*)GetPhysicalDeviceProperties}},
- {"vkGetPhysicalDeviceQueueFamilyProperties", {true, (void*)GetPhysicalDeviceQueueFamilyProperties}},
- {"vkGetPhysicalDeviceMemoryProperties", {true, (void*)GetPhysicalDeviceMemoryProperties}},
- {"vkGetInstanceProcAddr", {true, (void*)GetInstanceProcAddr}},
- {"vkGetDeviceProcAddr", {false, (void*)GetDeviceProcAddr}},
- {"vkCreateDevice", {true, (void*)CreateDevice}},
- {"vkDestroyDevice", {false, (void*)DestroyDevice}},
- {"vkEnumerateInstanceExtensionProperties", {false, (void*)EnumerateInstanceExtensionProperties}},
- {"vkEnumerateDeviceExtensionProperties", {true, (void*)EnumerateDeviceExtensionProperties}},
- {"vkEnumerateInstanceLayerProperties", {false, (void*)EnumerateInstanceLayerProperties}},
- {"vkEnumerateDeviceLayerProperties", {true, (void*)EnumerateDeviceLayerProperties}},
- {"vkGetDeviceQueue", {false, (void*)GetDeviceQueue}},
- {"vkQueueSubmit", {false, (void*)QueueSubmit}},
- {"vkQueueWaitIdle", {false, (void*)QueueWaitIdle}},
- {"vkDeviceWaitIdle", {false, (void*)DeviceWaitIdle}},
- {"vkAllocateMemory", {false, (void*)AllocateMemory}},
- {"vkFreeMemory", {false, (void*)FreeMemory}},
- {"vkMapMemory", {false, (void*)MapMemory}},
- {"vkUnmapMemory", {false, (void*)UnmapMemory}},
- {"vkFlushMappedMemoryRanges", {false, (void*)FlushMappedMemoryRanges}},
- {"vkInvalidateMappedMemoryRanges", {false, (void*)InvalidateMappedMemoryRanges}},
- {"vkGetDeviceMemoryCommitment", {false, (void*)GetDeviceMemoryCommitment}},
- {"vkBindBufferMemory", {false, (void*)BindBufferMemory}},
- {"vkBindImageMemory", {false, (void*)BindImageMemory}},
- {"vkGetBufferMemoryRequirements", {false, (void*)GetBufferMemoryRequirements}},
- {"vkGetImageMemoryRequirements", {false, (void*)GetImageMemoryRequirements}},
- {"vkGetImageSparseMemoryRequirements", {false, (void*)GetImageSparseMemoryRequirements}},
- {"vkGetPhysicalDeviceSparseImageFormatProperties", {true, (void*)GetPhysicalDeviceSparseImageFormatProperties}},
- {"vkQueueBindSparse", {false, (void*)QueueBindSparse}},
- {"vkCreateFence", {false, (void*)CreateFence}},
- {"vkDestroyFence", {false, (void*)DestroyFence}},
- {"vkResetFences", {false, (void*)ResetFences}},
- {"vkGetFenceStatus", {false, (void*)GetFenceStatus}},
- {"vkWaitForFences", {false, (void*)WaitForFences}},
- {"vkCreateSemaphore", {false, (void*)CreateSemaphore}},
- {"vkDestroySemaphore", {false, (void*)DestroySemaphore}},
- {"vkCreateEvent", {false, (void*)CreateEvent}},
- {"vkDestroyEvent", {false, (void*)DestroyEvent}},
- {"vkGetEventStatus", {false, (void*)GetEventStatus}},
- {"vkSetEvent", {false, (void*)SetEvent}},
- {"vkResetEvent", {false, (void*)ResetEvent}},
- {"vkCreateQueryPool", {false, (void*)CreateQueryPool}},
- {"vkDestroyQueryPool", {false, (void*)DestroyQueryPool}},
- {"vkGetQueryPoolResults", {false, (void*)GetQueryPoolResults}},
- {"vkCreateBuffer", {false, (void*)CreateBuffer}},
- {"vkDestroyBuffer", {false, (void*)DestroyBuffer}},
- {"vkCreateBufferView", {false, (void*)CreateBufferView}},
- {"vkDestroyBufferView", {false, (void*)DestroyBufferView}},
- {"vkCreateImage", {false, (void*)CreateImage}},
- {"vkDestroyImage", {false, (void*)DestroyImage}},
- {"vkGetImageSubresourceLayout", {false, (void*)GetImageSubresourceLayout}},
- {"vkCreateImageView", {false, (void*)CreateImageView}},
- {"vkDestroyImageView", {false, (void*)DestroyImageView}},
- {"vkCreateShaderModule", {false, (void*)CreateShaderModule}},
- {"vkDestroyShaderModule", {false, (void*)DestroyShaderModule}},
- {"vkCreatePipelineCache", {false, (void*)CreatePipelineCache}},
- {"vkDestroyPipelineCache", {false, (void*)DestroyPipelineCache}},
- {"vkGetPipelineCacheData", {false, (void*)GetPipelineCacheData}},
- {"vkMergePipelineCaches", {false, (void*)MergePipelineCaches}},
- {"vkCreateGraphicsPipelines", {false, (void*)CreateGraphicsPipelines}},
- {"vkCreateComputePipelines", {false, (void*)CreateComputePipelines}},
- {"vkDestroyPipeline", {false, (void*)DestroyPipeline}},
- {"vkCreatePipelineLayout", {false, (void*)CreatePipelineLayout}},
- {"vkDestroyPipelineLayout", {false, (void*)DestroyPipelineLayout}},
- {"vkCreateSampler", {false, (void*)CreateSampler}},
- {"vkDestroySampler", {false, (void*)DestroySampler}},
- {"vkCreateDescriptorSetLayout", {false, (void*)CreateDescriptorSetLayout}},
- {"vkDestroyDescriptorSetLayout", {false, (void*)DestroyDescriptorSetLayout}},
- {"vkCreateDescriptorPool", {false, (void*)CreateDescriptorPool}},
- {"vkDestroyDescriptorPool", {false, (void*)DestroyDescriptorPool}},
- {"vkResetDescriptorPool", {false, (void*)ResetDescriptorPool}},
- {"vkAllocateDescriptorSets", {false, (void*)AllocateDescriptorSets}},
- {"vkFreeDescriptorSets", {false, (void*)FreeDescriptorSets}},
- {"vkUpdateDescriptorSets", {false, (void*)UpdateDescriptorSets}},
- {"vkCreateFramebuffer", {false, (void*)CreateFramebuffer}},
- {"vkDestroyFramebuffer", {false, (void*)DestroyFramebuffer}},
- {"vkCreateRenderPass", {false, (void*)CreateRenderPass}},
- {"vkDestroyRenderPass", {false, (void*)DestroyRenderPass}},
- {"vkGetRenderAreaGranularity", {false, (void*)GetRenderAreaGranularity}},
- {"vkCreateCommandPool", {false, (void*)CreateCommandPool}},
- {"vkDestroyCommandPool", {false, (void*)DestroyCommandPool}},
- {"vkResetCommandPool", {false, (void*)ResetCommandPool}},
- {"vkAllocateCommandBuffers", {false, (void*)AllocateCommandBuffers}},
- {"vkFreeCommandBuffers", {false, (void*)FreeCommandBuffers}},
- {"vkBeginCommandBuffer", {false, (void*)BeginCommandBuffer}},
- {"vkEndCommandBuffer", {false, (void*)EndCommandBuffer}},
- {"vkResetCommandBuffer", {false, (void*)ResetCommandBuffer}},
- {"vkCmdBindPipeline", {false, (void*)CmdBindPipeline}},
- {"vkCmdSetViewport", {false, (void*)CmdSetViewport}},
- {"vkCmdSetScissor", {false, (void*)CmdSetScissor}},
- {"vkCmdSetLineWidth", {false, (void*)CmdSetLineWidth}},
- {"vkCmdSetDepthBias", {false, (void*)CmdSetDepthBias}},
- {"vkCmdSetBlendConstants", {false, (void*)CmdSetBlendConstants}},
- {"vkCmdSetDepthBounds", {false, (void*)CmdSetDepthBounds}},
- {"vkCmdSetStencilCompareMask", {false, (void*)CmdSetStencilCompareMask}},
- {"vkCmdSetStencilWriteMask", {false, (void*)CmdSetStencilWriteMask}},
- {"vkCmdSetStencilReference", {false, (void*)CmdSetStencilReference}},
- {"vkCmdBindDescriptorSets", {false, (void*)CmdBindDescriptorSets}},
- {"vkCmdBindIndexBuffer", {false, (void*)CmdBindIndexBuffer}},
- {"vkCmdBindVertexBuffers", {false, (void*)CmdBindVertexBuffers}},
- {"vkCmdDraw", {false, (void*)CmdDraw}},
- {"vkCmdDrawIndexed", {false, (void*)CmdDrawIndexed}},
- {"vkCmdDrawIndirect", {false, (void*)CmdDrawIndirect}},
- {"vkCmdDrawIndexedIndirect", {false, (void*)CmdDrawIndexedIndirect}},
- {"vkCmdDispatch", {false, (void*)CmdDispatch}},
- {"vkCmdDispatchIndirect", {false, (void*)CmdDispatchIndirect}},
- {"vkCmdCopyBuffer", {false, (void*)CmdCopyBuffer}},
- {"vkCmdCopyImage", {false, (void*)CmdCopyImage}},
- {"vkCmdBlitImage", {false, (void*)CmdBlitImage}},
- {"vkCmdCopyBufferToImage", {false, (void*)CmdCopyBufferToImage}},
- {"vkCmdCopyImageToBuffer", {false, (void*)CmdCopyImageToBuffer}},
- {"vkCmdUpdateBuffer", {false, (void*)CmdUpdateBuffer}},
- {"vkCmdFillBuffer", {false, (void*)CmdFillBuffer}},
- {"vkCmdClearColorImage", {false, (void*)CmdClearColorImage}},
- {"vkCmdClearDepthStencilImage", {false, (void*)CmdClearDepthStencilImage}},
- {"vkCmdClearAttachments", {false, (void*)CmdClearAttachments}},
- {"vkCmdResolveImage", {false, (void*)CmdResolveImage}},
- {"vkCmdSetEvent", {false, (void*)CmdSetEvent}},
- {"vkCmdResetEvent", {false, (void*)CmdResetEvent}},
- {"vkCmdWaitEvents", {false, (void*)CmdWaitEvents}},
- {"vkCmdPipelineBarrier", {false, (void*)CmdPipelineBarrier}},
- {"vkCmdBeginQuery", {false, (void*)CmdBeginQuery}},
- {"vkCmdEndQuery", {false, (void*)CmdEndQuery}},
- {"vkCmdResetQueryPool", {false, (void*)CmdResetQueryPool}},
- {"vkCmdWriteTimestamp", {false, (void*)CmdWriteTimestamp}},
- {"vkCmdCopyQueryPoolResults", {false, (void*)CmdCopyQueryPoolResults}},
- {"vkCmdPushConstants", {false, (void*)CmdPushConstants}},
- {"vkCmdBeginRenderPass", {false, (void*)CmdBeginRenderPass}},
- {"vkCmdNextSubpass", {false, (void*)CmdNextSubpass}},
- {"vkCmdEndRenderPass", {false, (void*)CmdEndRenderPass}},
- {"vkCmdExecuteCommands", {false, (void*)CmdExecuteCommands}},
- {"vkBindBufferMemory2", {false, (void*)BindBufferMemory2}},
- {"vkBindImageMemory2", {false, (void*)BindImageMemory2}},
- {"vkGetDeviceGroupPeerMemoryFeatures", {false, (void*)GetDeviceGroupPeerMemoryFeatures}},
- {"vkCmdSetDeviceMask", {false, (void*)CmdSetDeviceMask}},
- {"vkCmdDispatchBase", {false, (void*)CmdDispatchBase}},
- {"vkEnumeratePhysicalDeviceGroups", {true, (void*)EnumeratePhysicalDeviceGroups}},
- {"vkGetImageMemoryRequirements2", {false, (void*)GetImageMemoryRequirements2}},
- {"vkGetBufferMemoryRequirements2", {false, (void*)GetBufferMemoryRequirements2}},
- {"vkGetImageSparseMemoryRequirements2", {false, (void*)GetImageSparseMemoryRequirements2}},
- {"vkGetPhysicalDeviceFeatures2", {true, (void*)GetPhysicalDeviceFeatures2}},
- {"vkGetPhysicalDeviceProperties2", {true, (void*)GetPhysicalDeviceProperties2}},
- {"vkGetPhysicalDeviceFormatProperties2", {true, (void*)GetPhysicalDeviceFormatProperties2}},
- {"vkGetPhysicalDeviceImageFormatProperties2", {true, (void*)GetPhysicalDeviceImageFormatProperties2}},
- {"vkGetPhysicalDeviceQueueFamilyProperties2", {true, (void*)GetPhysicalDeviceQueueFamilyProperties2}},
- {"vkGetPhysicalDeviceMemoryProperties2", {true, (void*)GetPhysicalDeviceMemoryProperties2}},
- {"vkGetPhysicalDeviceSparseImageFormatProperties2", {true, (void*)GetPhysicalDeviceSparseImageFormatProperties2}},
- {"vkTrimCommandPool", {false, (void*)TrimCommandPool}},
- {"vkGetDeviceQueue2", {false, (void*)GetDeviceQueue2}},
- {"vkCreateSamplerYcbcrConversion", {false, (void*)CreateSamplerYcbcrConversion}},
- {"vkDestroySamplerYcbcrConversion", {false, (void*)DestroySamplerYcbcrConversion}},
- {"vkCreateDescriptorUpdateTemplate", {false, (void*)CreateDescriptorUpdateTemplate}},
- {"vkDestroyDescriptorUpdateTemplate", {false, (void*)DestroyDescriptorUpdateTemplate}},
- {"vkUpdateDescriptorSetWithTemplate", {false, (void*)UpdateDescriptorSetWithTemplate}},
- {"vkGetPhysicalDeviceExternalBufferProperties", {true, (void*)GetPhysicalDeviceExternalBufferProperties}},
- {"vkGetPhysicalDeviceExternalFenceProperties", {true, (void*)GetPhysicalDeviceExternalFenceProperties}},
- {"vkGetPhysicalDeviceExternalSemaphoreProperties", {true, (void*)GetPhysicalDeviceExternalSemaphoreProperties}},
- {"vkGetDescriptorSetLayoutSupport", {false, (void*)GetDescriptorSetLayoutSupport}},
- {"vkDestroySurfaceKHR", {true, (void*)DestroySurfaceKHR}},
- {"vkGetPhysicalDeviceSurfaceSupportKHR", {true, (void*)GetPhysicalDeviceSurfaceSupportKHR}},
- {"vkGetPhysicalDeviceSurfaceCapabilitiesKHR", {true, (void*)GetPhysicalDeviceSurfaceCapabilitiesKHR}},
- {"vkGetPhysicalDeviceSurfaceFormatsKHR", {true, (void*)GetPhysicalDeviceSurfaceFormatsKHR}},
- {"vkGetPhysicalDeviceSurfacePresentModesKHR", {true, (void*)GetPhysicalDeviceSurfacePresentModesKHR}},
- {"vkCreateSwapchainKHR", {false, (void*)CreateSwapchainKHR}},
- {"vkDestroySwapchainKHR", {false, (void*)DestroySwapchainKHR}},
- {"vkGetSwapchainImagesKHR", {false, (void*)GetSwapchainImagesKHR}},
- {"vkAcquireNextImageKHR", {false, (void*)AcquireNextImageKHR}},
- {"vkQueuePresentKHR", {false, (void*)QueuePresentKHR}},
- {"vkGetDeviceGroupPresentCapabilitiesKHR", {false, (void*)GetDeviceGroupPresentCapabilitiesKHR}},
- {"vkGetDeviceGroupSurfacePresentModesKHR", {false, (void*)GetDeviceGroupSurfacePresentModesKHR}},
- {"vkGetPhysicalDevicePresentRectanglesKHR", {true, (void*)GetPhysicalDevicePresentRectanglesKHR}},
- {"vkAcquireNextImage2KHR", {false, (void*)AcquireNextImage2KHR}},
- {"vkGetPhysicalDeviceDisplayPropertiesKHR", {true, (void*)GetPhysicalDeviceDisplayPropertiesKHR}},
- {"vkGetPhysicalDeviceDisplayPlanePropertiesKHR", {true, (void*)GetPhysicalDeviceDisplayPlanePropertiesKHR}},
- {"vkGetDisplayPlaneSupportedDisplaysKHR", {true, (void*)GetDisplayPlaneSupportedDisplaysKHR}},
- {"vkGetDisplayModePropertiesKHR", {true, (void*)GetDisplayModePropertiesKHR}},
- {"vkCreateDisplayModeKHR", {true, (void*)CreateDisplayModeKHR}},
- {"vkGetDisplayPlaneCapabilitiesKHR", {true, (void*)GetDisplayPlaneCapabilitiesKHR}},
- {"vkCreateDisplayPlaneSurfaceKHR", {true, (void*)CreateDisplayPlaneSurfaceKHR}},
- {"vkCreateSharedSwapchainsKHR", {false, (void*)CreateSharedSwapchainsKHR}},
-#ifdef VK_USE_PLATFORM_XLIB_KHR
- {"vkCreateXlibSurfaceKHR", {true, (void*)CreateXlibSurfaceKHR}},
-#endif
-#ifdef VK_USE_PLATFORM_XLIB_KHR
- {"vkGetPhysicalDeviceXlibPresentationSupportKHR", {true, (void*)GetPhysicalDeviceXlibPresentationSupportKHR}},
-#endif
-#ifdef VK_USE_PLATFORM_XCB_KHR
- {"vkCreateXcbSurfaceKHR", {true, (void*)CreateXcbSurfaceKHR}},
-#endif
-#ifdef VK_USE_PLATFORM_XCB_KHR
- {"vkGetPhysicalDeviceXcbPresentationSupportKHR", {true, (void*)GetPhysicalDeviceXcbPresentationSupportKHR}},
-#endif
-#ifdef VK_USE_PLATFORM_WAYLAND_KHR
- {"vkCreateWaylandSurfaceKHR", {true, (void*)CreateWaylandSurfaceKHR}},
-#endif
-#ifdef VK_USE_PLATFORM_WAYLAND_KHR
- {"vkGetPhysicalDeviceWaylandPresentationSupportKHR", {true, (void*)GetPhysicalDeviceWaylandPresentationSupportKHR}},
-#endif
-#ifdef VK_USE_PLATFORM_ANDROID_KHR
- {"vkCreateAndroidSurfaceKHR", {true, (void*)CreateAndroidSurfaceKHR}},
-#endif
-#ifdef VK_USE_PLATFORM_WIN32_KHR
- {"vkCreateWin32SurfaceKHR", {true, (void*)CreateWin32SurfaceKHR}},
-#endif
-#ifdef VK_USE_PLATFORM_WIN32_KHR
- {"vkGetPhysicalDeviceWin32PresentationSupportKHR", {true, (void*)GetPhysicalDeviceWin32PresentationSupportKHR}},
-#endif
- {"vkGetPhysicalDeviceFeatures2KHR", {true, (void*)GetPhysicalDeviceFeatures2KHR}},
- {"vkGetPhysicalDeviceProperties2KHR", {true, (void*)GetPhysicalDeviceProperties2KHR}},
- {"vkGetPhysicalDeviceFormatProperties2KHR", {true, (void*)GetPhysicalDeviceFormatProperties2KHR}},
- {"vkGetPhysicalDeviceImageFormatProperties2KHR", {true, (void*)GetPhysicalDeviceImageFormatProperties2KHR}},
- {"vkGetPhysicalDeviceQueueFamilyProperties2KHR", {true, (void*)GetPhysicalDeviceQueueFamilyProperties2KHR}},
- {"vkGetPhysicalDeviceMemoryProperties2KHR", {true, (void*)GetPhysicalDeviceMemoryProperties2KHR}},
- {"vkGetPhysicalDeviceSparseImageFormatProperties2KHR", {true, (void*)GetPhysicalDeviceSparseImageFormatProperties2KHR}},
- {"vkGetDeviceGroupPeerMemoryFeaturesKHR", {false, (void*)GetDeviceGroupPeerMemoryFeaturesKHR}},
- {"vkCmdSetDeviceMaskKHR", {false, (void*)CmdSetDeviceMaskKHR}},
- {"vkCmdDispatchBaseKHR", {false, (void*)CmdDispatchBaseKHR}},
- {"vkTrimCommandPoolKHR", {false, (void*)TrimCommandPoolKHR}},
- {"vkEnumeratePhysicalDeviceGroupsKHR", {true, (void*)EnumeratePhysicalDeviceGroupsKHR}},
- {"vkGetPhysicalDeviceExternalBufferPropertiesKHR", {true, (void*)GetPhysicalDeviceExternalBufferPropertiesKHR}},
-#ifdef VK_USE_PLATFORM_WIN32_KHR
- {"vkGetMemoryWin32HandleKHR", {false, (void*)GetMemoryWin32HandleKHR}},
-#endif
-#ifdef VK_USE_PLATFORM_WIN32_KHR
- {"vkGetMemoryWin32HandlePropertiesKHR", {false, (void*)GetMemoryWin32HandlePropertiesKHR}},
-#endif
- {"vkGetMemoryFdKHR", {false, (void*)GetMemoryFdKHR}},
- {"vkGetMemoryFdPropertiesKHR", {false, (void*)GetMemoryFdPropertiesKHR}},
- {"vkGetPhysicalDeviceExternalSemaphorePropertiesKHR", {true, (void*)GetPhysicalDeviceExternalSemaphorePropertiesKHR}},
-#ifdef VK_USE_PLATFORM_WIN32_KHR
- {"vkImportSemaphoreWin32HandleKHR", {false, (void*)ImportSemaphoreWin32HandleKHR}},
-#endif
-#ifdef VK_USE_PLATFORM_WIN32_KHR
- {"vkGetSemaphoreWin32HandleKHR", {false, (void*)GetSemaphoreWin32HandleKHR}},
-#endif
- {"vkImportSemaphoreFdKHR", {false, (void*)ImportSemaphoreFdKHR}},
- {"vkGetSemaphoreFdKHR", {false, (void*)GetSemaphoreFdKHR}},
- {"vkCmdPushDescriptorSetKHR", {false, (void*)CmdPushDescriptorSetKHR}},
- {"vkCmdPushDescriptorSetWithTemplateKHR", {false, (void*)CmdPushDescriptorSetWithTemplateKHR}},
- {"vkCreateDescriptorUpdateTemplateKHR", {false, (void*)CreateDescriptorUpdateTemplateKHR}},
- {"vkDestroyDescriptorUpdateTemplateKHR", {false, (void*)DestroyDescriptorUpdateTemplateKHR}},
- {"vkUpdateDescriptorSetWithTemplateKHR", {false, (void*)UpdateDescriptorSetWithTemplateKHR}},
- {"vkCreateRenderPass2KHR", {false, (void*)CreateRenderPass2KHR}},
- {"vkCmdBeginRenderPass2KHR", {false, (void*)CmdBeginRenderPass2KHR}},
- {"vkCmdNextSubpass2KHR", {false, (void*)CmdNextSubpass2KHR}},
- {"vkCmdEndRenderPass2KHR", {false, (void*)CmdEndRenderPass2KHR}},
- {"vkGetSwapchainStatusKHR", {false, (void*)GetSwapchainStatusKHR}},
- {"vkGetPhysicalDeviceExternalFencePropertiesKHR", {true, (void*)GetPhysicalDeviceExternalFencePropertiesKHR}},
-#ifdef VK_USE_PLATFORM_WIN32_KHR
- {"vkImportFenceWin32HandleKHR", {false, (void*)ImportFenceWin32HandleKHR}},
-#endif
-#ifdef VK_USE_PLATFORM_WIN32_KHR
- {"vkGetFenceWin32HandleKHR", {false, (void*)GetFenceWin32HandleKHR}},
-#endif
- {"vkImportFenceFdKHR", {false, (void*)ImportFenceFdKHR}},
- {"vkGetFenceFdKHR", {false, (void*)GetFenceFdKHR}},
- {"vkGetPhysicalDeviceSurfaceCapabilities2KHR", {true, (void*)GetPhysicalDeviceSurfaceCapabilities2KHR}},
- {"vkGetPhysicalDeviceSurfaceFormats2KHR", {true, (void*)GetPhysicalDeviceSurfaceFormats2KHR}},
- {"vkGetPhysicalDeviceDisplayProperties2KHR", {true, (void*)GetPhysicalDeviceDisplayProperties2KHR}},
- {"vkGetPhysicalDeviceDisplayPlaneProperties2KHR", {true, (void*)GetPhysicalDeviceDisplayPlaneProperties2KHR}},
- {"vkGetDisplayModeProperties2KHR", {true, (void*)GetDisplayModeProperties2KHR}},
- {"vkGetDisplayPlaneCapabilities2KHR", {true, (void*)GetDisplayPlaneCapabilities2KHR}},
- {"vkGetImageMemoryRequirements2KHR", {false, (void*)GetImageMemoryRequirements2KHR}},
- {"vkGetBufferMemoryRequirements2KHR", {false, (void*)GetBufferMemoryRequirements2KHR}},
- {"vkGetImageSparseMemoryRequirements2KHR", {false, (void*)GetImageSparseMemoryRequirements2KHR}},
- {"vkCreateSamplerYcbcrConversionKHR", {false, (void*)CreateSamplerYcbcrConversionKHR}},
- {"vkDestroySamplerYcbcrConversionKHR", {false, (void*)DestroySamplerYcbcrConversionKHR}},
- {"vkBindBufferMemory2KHR", {false, (void*)BindBufferMemory2KHR}},
- {"vkBindImageMemory2KHR", {false, (void*)BindImageMemory2KHR}},
- {"vkGetDescriptorSetLayoutSupportKHR", {false, (void*)GetDescriptorSetLayoutSupportKHR}},
- {"vkCmdDrawIndirectCountKHR", {false, (void*)CmdDrawIndirectCountKHR}},
- {"vkCmdDrawIndexedIndirectCountKHR", {false, (void*)CmdDrawIndexedIndirectCountKHR}},
- {"vkGetPipelineExecutablePropertiesKHR", {false, (void*)GetPipelineExecutablePropertiesKHR}},
- {"vkGetPipelineExecutableStatisticsKHR", {false, (void*)GetPipelineExecutableStatisticsKHR}},
- {"vkGetPipelineExecutableInternalRepresentationsKHR", {false, (void*)GetPipelineExecutableInternalRepresentationsKHR}},
- {"vkCreateDebugReportCallbackEXT", {true, (void*)CreateDebugReportCallbackEXT}},
- {"vkDestroyDebugReportCallbackEXT", {true, (void*)DestroyDebugReportCallbackEXT}},
- {"vkDebugReportMessageEXT", {true, (void*)DebugReportMessageEXT}},
- {"vkDebugMarkerSetObjectTagEXT", {false, (void*)DebugMarkerSetObjectTagEXT}},
- {"vkDebugMarkerSetObjectNameEXT", {false, (void*)DebugMarkerSetObjectNameEXT}},
- {"vkCmdDebugMarkerBeginEXT", {false, (void*)CmdDebugMarkerBeginEXT}},
- {"vkCmdDebugMarkerEndEXT", {false, (void*)CmdDebugMarkerEndEXT}},
- {"vkCmdDebugMarkerInsertEXT", {false, (void*)CmdDebugMarkerInsertEXT}},
- {"vkCmdBindTransformFeedbackBuffersEXT", {false, (void*)CmdBindTransformFeedbackBuffersEXT}},
- {"vkCmdBeginTransformFeedbackEXT", {false, (void*)CmdBeginTransformFeedbackEXT}},
- {"vkCmdEndTransformFeedbackEXT", {false, (void*)CmdEndTransformFeedbackEXT}},
- {"vkCmdBeginQueryIndexedEXT", {false, (void*)CmdBeginQueryIndexedEXT}},
- {"vkCmdEndQueryIndexedEXT", {false, (void*)CmdEndQueryIndexedEXT}},
- {"vkCmdDrawIndirectByteCountEXT", {false, (void*)CmdDrawIndirectByteCountEXT}},
- {"vkGetImageViewHandleNVX", {false, (void*)GetImageViewHandleNVX}},
- {"vkCmdDrawIndirectCountAMD", {false, (void*)CmdDrawIndirectCountAMD}},
- {"vkCmdDrawIndexedIndirectCountAMD", {false, (void*)CmdDrawIndexedIndirectCountAMD}},
- {"vkGetShaderInfoAMD", {false, (void*)GetShaderInfoAMD}},
-#ifdef VK_USE_PLATFORM_GGP
- {"vkCreateStreamDescriptorSurfaceGGP", {true, (void*)CreateStreamDescriptorSurfaceGGP}},
-#endif
- {"vkGetPhysicalDeviceExternalImageFormatPropertiesNV", {true, (void*)GetPhysicalDeviceExternalImageFormatPropertiesNV}},
-#ifdef VK_USE_PLATFORM_WIN32_KHR
- {"vkGetMemoryWin32HandleNV", {false, (void*)GetMemoryWin32HandleNV}},
-#endif
-#ifdef VK_USE_PLATFORM_VI_NN
- {"vkCreateViSurfaceNN", {true, (void*)CreateViSurfaceNN}},
-#endif
- {"vkCmdBeginConditionalRenderingEXT", {false, (void*)CmdBeginConditionalRenderingEXT}},
- {"vkCmdEndConditionalRenderingEXT", {false, (void*)CmdEndConditionalRenderingEXT}},
- {"vkCmdProcessCommandsNVX", {false, (void*)CmdProcessCommandsNVX}},
- {"vkCmdReserveSpaceForCommandsNVX", {false, (void*)CmdReserveSpaceForCommandsNVX}},
- {"vkCreateIndirectCommandsLayoutNVX", {false, (void*)CreateIndirectCommandsLayoutNVX}},
- {"vkDestroyIndirectCommandsLayoutNVX", {false, (void*)DestroyIndirectCommandsLayoutNVX}},
- {"vkCreateObjectTableNVX", {false, (void*)CreateObjectTableNVX}},
- {"vkDestroyObjectTableNVX", {false, (void*)DestroyObjectTableNVX}},
- {"vkRegisterObjectsNVX", {false, (void*)RegisterObjectsNVX}},
- {"vkUnregisterObjectsNVX", {false, (void*)UnregisterObjectsNVX}},
- {"vkGetPhysicalDeviceGeneratedCommandsPropertiesNVX", {true, (void*)GetPhysicalDeviceGeneratedCommandsPropertiesNVX}},
- {"vkCmdSetViewportWScalingNV", {false, (void*)CmdSetViewportWScalingNV}},
- {"vkReleaseDisplayEXT", {true, (void*)ReleaseDisplayEXT}},
-#ifdef VK_USE_PLATFORM_XLIB_XRANDR_EXT
- {"vkAcquireXlibDisplayEXT", {true, (void*)AcquireXlibDisplayEXT}},
-#endif
-#ifdef VK_USE_PLATFORM_XLIB_XRANDR_EXT
- {"vkGetRandROutputDisplayEXT", {true, (void*)GetRandROutputDisplayEXT}},
-#endif
- {"vkGetPhysicalDeviceSurfaceCapabilities2EXT", {true, (void*)GetPhysicalDeviceSurfaceCapabilities2EXT}},
- {"vkDisplayPowerControlEXT", {false, (void*)DisplayPowerControlEXT}},
- {"vkRegisterDeviceEventEXT", {false, (void*)RegisterDeviceEventEXT}},
- {"vkRegisterDisplayEventEXT", {false, (void*)RegisterDisplayEventEXT}},
- {"vkGetSwapchainCounterEXT", {false, (void*)GetSwapchainCounterEXT}},
- {"vkGetRefreshCycleDurationGOOGLE", {false, (void*)GetRefreshCycleDurationGOOGLE}},
- {"vkGetPastPresentationTimingGOOGLE", {false, (void*)GetPastPresentationTimingGOOGLE}},
- {"vkCmdSetDiscardRectangleEXT", {false, (void*)CmdSetDiscardRectangleEXT}},
- {"vkSetHdrMetadataEXT", {false, (void*)SetHdrMetadataEXT}},
-#ifdef VK_USE_PLATFORM_IOS_MVK
- {"vkCreateIOSSurfaceMVK", {true, (void*)CreateIOSSurfaceMVK}},
-#endif
-#ifdef VK_USE_PLATFORM_MACOS_MVK
- {"vkCreateMacOSSurfaceMVK", {true, (void*)CreateMacOSSurfaceMVK}},
-#endif
- {"vkSetDebugUtilsObjectNameEXT", {false, (void*)SetDebugUtilsObjectNameEXT}},
- {"vkSetDebugUtilsObjectTagEXT", {false, (void*)SetDebugUtilsObjectTagEXT}},
- {"vkQueueBeginDebugUtilsLabelEXT", {false, (void*)QueueBeginDebugUtilsLabelEXT}},
- {"vkQueueEndDebugUtilsLabelEXT", {false, (void*)QueueEndDebugUtilsLabelEXT}},
- {"vkQueueInsertDebugUtilsLabelEXT", {false, (void*)QueueInsertDebugUtilsLabelEXT}},
- {"vkCmdBeginDebugUtilsLabelEXT", {false, (void*)CmdBeginDebugUtilsLabelEXT}},
- {"vkCmdEndDebugUtilsLabelEXT", {false, (void*)CmdEndDebugUtilsLabelEXT}},
- {"vkCmdInsertDebugUtilsLabelEXT", {false, (void*)CmdInsertDebugUtilsLabelEXT}},
- {"vkCreateDebugUtilsMessengerEXT", {true, (void*)CreateDebugUtilsMessengerEXT}},
- {"vkDestroyDebugUtilsMessengerEXT", {true, (void*)DestroyDebugUtilsMessengerEXT}},
- {"vkSubmitDebugUtilsMessageEXT", {true, (void*)SubmitDebugUtilsMessageEXT}},
-#ifdef VK_USE_PLATFORM_ANDROID_KHR
- {"vkGetAndroidHardwareBufferPropertiesANDROID", {false, (void*)GetAndroidHardwareBufferPropertiesANDROID}},
-#endif
-#ifdef VK_USE_PLATFORM_ANDROID_KHR
- {"vkGetMemoryAndroidHardwareBufferANDROID", {false, (void*)GetMemoryAndroidHardwareBufferANDROID}},
-#endif
- {"vkCmdSetSampleLocationsEXT", {false, (void*)CmdSetSampleLocationsEXT}},
- {"vkGetPhysicalDeviceMultisamplePropertiesEXT", {true, (void*)GetPhysicalDeviceMultisamplePropertiesEXT}},
- {"vkGetImageDrmFormatModifierPropertiesEXT", {false, (void*)GetImageDrmFormatModifierPropertiesEXT}},
-#ifdef BUILD_CORE_VALIDATION
- {"vkCreateValidationCacheEXT", {false, (void*)CreateValidationCacheEXT}},
-#endif
-#ifdef BUILD_CORE_VALIDATION
- {"vkDestroyValidationCacheEXT", {false, (void*)DestroyValidationCacheEXT}},
-#endif
-#ifdef BUILD_CORE_VALIDATION
- {"vkMergeValidationCachesEXT", {false, (void*)MergeValidationCachesEXT}},
-#endif
-#ifdef BUILD_CORE_VALIDATION
- {"vkGetValidationCacheDataEXT", {false, (void*)GetValidationCacheDataEXT}},
-#endif
- {"vkCmdBindShadingRateImageNV", {false, (void*)CmdBindShadingRateImageNV}},
- {"vkCmdSetViewportShadingRatePaletteNV", {false, (void*)CmdSetViewportShadingRatePaletteNV}},
- {"vkCmdSetCoarseSampleOrderNV", {false, (void*)CmdSetCoarseSampleOrderNV}},
- {"vkCreateAccelerationStructureNV", {false, (void*)CreateAccelerationStructureNV}},
- {"vkDestroyAccelerationStructureNV", {false, (void*)DestroyAccelerationStructureNV}},
- {"vkGetAccelerationStructureMemoryRequirementsNV", {false, (void*)GetAccelerationStructureMemoryRequirementsNV}},
- {"vkBindAccelerationStructureMemoryNV", {false, (void*)BindAccelerationStructureMemoryNV}},
- {"vkCmdBuildAccelerationStructureNV", {false, (void*)CmdBuildAccelerationStructureNV}},
- {"vkCmdCopyAccelerationStructureNV", {false, (void*)CmdCopyAccelerationStructureNV}},
- {"vkCmdTraceRaysNV", {false, (void*)CmdTraceRaysNV}},
- {"vkCreateRayTracingPipelinesNV", {false, (void*)CreateRayTracingPipelinesNV}},
- {"vkGetRayTracingShaderGroupHandlesNV", {false, (void*)GetRayTracingShaderGroupHandlesNV}},
- {"vkGetAccelerationStructureHandleNV", {false, (void*)GetAccelerationStructureHandleNV}},
- {"vkCmdWriteAccelerationStructuresPropertiesNV", {false, (void*)CmdWriteAccelerationStructuresPropertiesNV}},
- {"vkCompileDeferredNV", {false, (void*)CompileDeferredNV}},
- {"vkGetMemoryHostPointerPropertiesEXT", {false, (void*)GetMemoryHostPointerPropertiesEXT}},
- {"vkCmdWriteBufferMarkerAMD", {false, (void*)CmdWriteBufferMarkerAMD}},
- {"vkGetPhysicalDeviceCalibrateableTimeDomainsEXT", {true, (void*)GetPhysicalDeviceCalibrateableTimeDomainsEXT}},
- {"vkGetCalibratedTimestampsEXT", {false, (void*)GetCalibratedTimestampsEXT}},
- {"vkCmdDrawMeshTasksNV", {false, (void*)CmdDrawMeshTasksNV}},
- {"vkCmdDrawMeshTasksIndirectNV", {false, (void*)CmdDrawMeshTasksIndirectNV}},
- {"vkCmdDrawMeshTasksIndirectCountNV", {false, (void*)CmdDrawMeshTasksIndirectCountNV}},
- {"vkCmdSetExclusiveScissorNV", {false, (void*)CmdSetExclusiveScissorNV}},
- {"vkCmdSetCheckpointNV", {false, (void*)CmdSetCheckpointNV}},
- {"vkGetQueueCheckpointDataNV", {false, (void*)GetQueueCheckpointDataNV}},
- {"vkInitializePerformanceApiINTEL", {false, (void*)InitializePerformanceApiINTEL}},
- {"vkUninitializePerformanceApiINTEL", {false, (void*)UninitializePerformanceApiINTEL}},
- {"vkCmdSetPerformanceMarkerINTEL", {false, (void*)CmdSetPerformanceMarkerINTEL}},
- {"vkCmdSetPerformanceStreamMarkerINTEL", {false, (void*)CmdSetPerformanceStreamMarkerINTEL}},
- {"vkCmdSetPerformanceOverrideINTEL", {false, (void*)CmdSetPerformanceOverrideINTEL}},
- {"vkAcquirePerformanceConfigurationINTEL", {false, (void*)AcquirePerformanceConfigurationINTEL}},
- {"vkReleasePerformanceConfigurationINTEL", {false, (void*)ReleasePerformanceConfigurationINTEL}},
- {"vkQueueSetPerformanceConfigurationINTEL", {false, (void*)QueueSetPerformanceConfigurationINTEL}},
- {"vkGetPerformanceParameterINTEL", {false, (void*)GetPerformanceParameterINTEL}},
- {"vkSetLocalDimmingAMD", {false, (void*)SetLocalDimmingAMD}},
-#ifdef VK_USE_PLATFORM_FUCHSIA
- {"vkCreateImagePipeSurfaceFUCHSIA", {true, (void*)CreateImagePipeSurfaceFUCHSIA}},
-#endif
-#ifdef VK_USE_PLATFORM_METAL_EXT
- {"vkCreateMetalSurfaceEXT", {true, (void*)CreateMetalSurfaceEXT}},
-#endif
- {"vkGetBufferDeviceAddressEXT", {false, (void*)GetBufferDeviceAddressEXT}},
- {"vkGetPhysicalDeviceCooperativeMatrixPropertiesNV", {true, (void*)GetPhysicalDeviceCooperativeMatrixPropertiesNV}},
- {"vkGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV", {true, (void*)GetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV}},
-#ifdef VK_USE_PLATFORM_WIN32_KHR
- {"vkGetPhysicalDeviceSurfacePresentModes2EXT", {true, (void*)GetPhysicalDeviceSurfacePresentModes2EXT}},
-#endif
-#ifdef VK_USE_PLATFORM_WIN32_KHR
- {"vkAcquireFullScreenExclusiveModeEXT", {false, (void*)AcquireFullScreenExclusiveModeEXT}},
-#endif
-#ifdef VK_USE_PLATFORM_WIN32_KHR
- {"vkReleaseFullScreenExclusiveModeEXT", {false, (void*)ReleaseFullScreenExclusiveModeEXT}},
-#endif
-#ifdef VK_USE_PLATFORM_WIN32_KHR
- {"vkGetDeviceGroupSurfacePresentModes2EXT", {false, (void*)GetDeviceGroupSurfacePresentModes2EXT}},
-#endif
- {"vkCreateHeadlessSurfaceEXT", {true, (void*)CreateHeadlessSurfaceEXT}},
- {"vkCmdSetLineStippleEXT", {false, (void*)CmdSetLineStippleEXT}},
- {"vkResetQueryPoolEXT", {false, (void*)ResetQueryPoolEXT}},
-};
-
-
-} // namespace vulkan_layer_chassis
-
-// loader-layer interface v0, just wrappers since there is only a layer
-
-VK_LAYER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL vkEnumerateInstanceExtensionProperties(const char *pLayerName, uint32_t *pCount,
- VkExtensionProperties *pProperties) {
- return vulkan_layer_chassis::EnumerateInstanceExtensionProperties(pLayerName, pCount, pProperties);
-}
-
-VK_LAYER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL vkEnumerateInstanceLayerProperties(uint32_t *pCount,
- VkLayerProperties *pProperties) {
- return vulkan_layer_chassis::EnumerateInstanceLayerProperties(pCount, pProperties);
-}
-
-VK_LAYER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL vkEnumerateDeviceLayerProperties(VkPhysicalDevice physicalDevice, uint32_t *pCount,
- VkLayerProperties *pProperties) {
- // the layer command handles VK_NULL_HANDLE just fine internally
- assert(physicalDevice == VK_NULL_HANDLE);
- return vulkan_layer_chassis::EnumerateDeviceLayerProperties(VK_NULL_HANDLE, pCount, pProperties);
-}
-
-VK_LAYER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL vkEnumerateDeviceExtensionProperties(VkPhysicalDevice physicalDevice,
- const char *pLayerName, uint32_t *pCount,
- VkExtensionProperties *pProperties) {
- // the layer command handles VK_NULL_HANDLE just fine internally
- assert(physicalDevice == VK_NULL_HANDLE);
- return vulkan_layer_chassis::EnumerateDeviceExtensionProperties(VK_NULL_HANDLE, pLayerName, pCount, pProperties);
-}
-
-VK_LAYER_EXPORT VKAPI_ATTR PFN_vkVoidFunction VKAPI_CALL vkGetDeviceProcAddr(VkDevice dev, const char *funcName) {
- return vulkan_layer_chassis::GetDeviceProcAddr(dev, funcName);
-}
-
-VK_LAYER_EXPORT VKAPI_ATTR PFN_vkVoidFunction VKAPI_CALL vkGetInstanceProcAddr(VkInstance instance, const char *funcName) {
- return vulkan_layer_chassis::GetInstanceProcAddr(instance, funcName);
-}
-
-VK_LAYER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL vkNegotiateLoaderLayerInterfaceVersion(VkNegotiateLayerInterface *pVersionStruct) {
- assert(pVersionStruct != NULL);
- assert(pVersionStruct->sType == LAYER_NEGOTIATE_INTERFACE_STRUCT);
-
- // Fill in the function pointers if our version is at least capable of having the structure contain them.
- if (pVersionStruct->loaderLayerInterfaceVersion >= 2) {
- pVersionStruct->pfnGetInstanceProcAddr = vkGetInstanceProcAddr;
- pVersionStruct->pfnGetDeviceProcAddr = vkGetDeviceProcAddr;
- pVersionStruct->pfnGetPhysicalDeviceProcAddr = nullptr;
- }
-
- return VK_SUCCESS;
-}
diff --git a/layers/generated/chassis.h b/layers/generated/chassis.h
deleted file mode 100644
index 80695d78f..000000000
--- a/layers/generated/chassis.h
+++ /dev/null
@@ -1,3739 +0,0 @@
-
-// This file is ***GENERATED***. Do Not Edit.
-// See layer_chassis_generator.py for modifications.
-
-/* Copyright (c) 2015-2019 The Khronos Group Inc.
- * Copyright (c) 2015-2019 Valve Corporation
- * Copyright (c) 2015-2019 LunarG, Inc.
- * Copyright (c) 2015-2019 Google Inc.
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- *
- * Author: Mark Lobodzinski <mark@lunarg.com>
- */
-#pragma once
-
-
-#define NOMINMAX
-#include <atomic>
-#include <mutex>
-#include <cinttypes>
-#include <stdio.h>
-#include <stdlib.h>
-#include <string.h>
-#include <unordered_map>
-#include <unordered_set>
-#include <algorithm>
-#include <memory>
-
-#include "vk_loader_platform.h"
-#include "vulkan/vulkan.h"
-#include "vk_layer_config.h"
-#include "vk_layer_data.h"
-#include "vk_layer_logging.h"
-#include "vk_object_types.h"
-#include "vulkan/vk_layer.h"
-#include "vk_enum_string_helper.h"
-#include "vk_layer_extension_utils.h"
-#include "vk_layer_utils.h"
-#include "vulkan/vk_layer.h"
-#include "vk_dispatch_table_helper.h"
-#include "vk_extension_helper.h"
-#include "vk_safe_struct.h"
-#include "vk_typemap_helper.h"
-
-
-extern std::atomic<uint64_t> global_unique_id;
-extern vl_concurrent_unordered_map<uint64_t, uint64_t, 4> unique_id_mapping;
-
-
-
-VKAPI_ATTR VkResult VKAPI_CALL CreateInstance(
- const VkInstanceCreateInfo* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkInstance* pInstance);
-
-VKAPI_ATTR void VKAPI_CALL DestroyInstance(
- VkInstance instance,
- const VkAllocationCallbacks* pAllocator);
-
-VKAPI_ATTR VkResult VKAPI_CALL EnumeratePhysicalDevices(
- VkInstance instance,
- uint32_t* pPhysicalDeviceCount,
- VkPhysicalDevice* pPhysicalDevices);
-
-VKAPI_ATTR void VKAPI_CALL GetPhysicalDeviceFeatures(
- VkPhysicalDevice physicalDevice,
- VkPhysicalDeviceFeatures* pFeatures);
-
-VKAPI_ATTR void VKAPI_CALL GetPhysicalDeviceFormatProperties(
- VkPhysicalDevice physicalDevice,
- VkFormat format,
- VkFormatProperties* pFormatProperties);
-
-VKAPI_ATTR VkResult VKAPI_CALL GetPhysicalDeviceImageFormatProperties(
- VkPhysicalDevice physicalDevice,
- VkFormat format,
- VkImageType type,
- VkImageTiling tiling,
- VkImageUsageFlags usage,
- VkImageCreateFlags flags,
- VkImageFormatProperties* pImageFormatProperties);
-
-VKAPI_ATTR void VKAPI_CALL GetPhysicalDeviceProperties(
- VkPhysicalDevice physicalDevice,
- VkPhysicalDeviceProperties* pProperties);
-
-VKAPI_ATTR void VKAPI_CALL GetPhysicalDeviceQueueFamilyProperties(
- VkPhysicalDevice physicalDevice,
- uint32_t* pQueueFamilyPropertyCount,
- VkQueueFamilyProperties* pQueueFamilyProperties);
-
-VKAPI_ATTR void VKAPI_CALL GetPhysicalDeviceMemoryProperties(
- VkPhysicalDevice physicalDevice,
- VkPhysicalDeviceMemoryProperties* pMemoryProperties);
-
-VKAPI_ATTR PFN_vkVoidFunction VKAPI_CALL GetInstanceProcAddr(
- VkInstance instance,
- const char* pName);
-
-VKAPI_ATTR PFN_vkVoidFunction VKAPI_CALL GetDeviceProcAddr(
- VkDevice device,
- const char* pName);
-
-VKAPI_ATTR VkResult VKAPI_CALL CreateDevice(
- VkPhysicalDevice physicalDevice,
- const VkDeviceCreateInfo* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkDevice* pDevice);
-
-VKAPI_ATTR void VKAPI_CALL DestroyDevice(
- VkDevice device,
- const VkAllocationCallbacks* pAllocator);
-
-VKAPI_ATTR VkResult VKAPI_CALL EnumerateInstanceExtensionProperties(
- const char* pLayerName,
- uint32_t* pPropertyCount,
- VkExtensionProperties* pProperties);
-
-VKAPI_ATTR VkResult VKAPI_CALL EnumerateDeviceExtensionProperties(
- VkPhysicalDevice physicalDevice,
- const char* pLayerName,
- uint32_t* pPropertyCount,
- VkExtensionProperties* pProperties);
-
-VKAPI_ATTR VkResult VKAPI_CALL EnumerateInstanceLayerProperties(
- uint32_t* pPropertyCount,
- VkLayerProperties* pProperties);
-
-VKAPI_ATTR VkResult VKAPI_CALL EnumerateDeviceLayerProperties(
- VkPhysicalDevice physicalDevice,
- uint32_t* pPropertyCount,
- VkLayerProperties* pProperties);
-
-VKAPI_ATTR void VKAPI_CALL GetDeviceQueue(
- VkDevice device,
- uint32_t queueFamilyIndex,
- uint32_t queueIndex,
- VkQueue* pQueue);
-
-VKAPI_ATTR VkResult VKAPI_CALL QueueSubmit(
- VkQueue queue,
- uint32_t submitCount,
- const VkSubmitInfo* pSubmits,
- VkFence fence);
-
-VKAPI_ATTR VkResult VKAPI_CALL QueueWaitIdle(
- VkQueue queue);
-
-VKAPI_ATTR VkResult VKAPI_CALL DeviceWaitIdle(
- VkDevice device);
-
-VKAPI_ATTR VkResult VKAPI_CALL AllocateMemory(
- VkDevice device,
- const VkMemoryAllocateInfo* pAllocateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkDeviceMemory* pMemory);
-
-VKAPI_ATTR void VKAPI_CALL FreeMemory(
- VkDevice device,
- VkDeviceMemory memory,
- const VkAllocationCallbacks* pAllocator);
-
-VKAPI_ATTR VkResult VKAPI_CALL MapMemory(
- VkDevice device,
- VkDeviceMemory memory,
- VkDeviceSize offset,
- VkDeviceSize size,
- VkMemoryMapFlags flags,
- void** ppData);
-
-VKAPI_ATTR void VKAPI_CALL UnmapMemory(
- VkDevice device,
- VkDeviceMemory memory);
-
-VKAPI_ATTR VkResult VKAPI_CALL FlushMappedMemoryRanges(
- VkDevice device,
- uint32_t memoryRangeCount,
- const VkMappedMemoryRange* pMemoryRanges);
-
-VKAPI_ATTR VkResult VKAPI_CALL InvalidateMappedMemoryRanges(
- VkDevice device,
- uint32_t memoryRangeCount,
- const VkMappedMemoryRange* pMemoryRanges);
-
-VKAPI_ATTR void VKAPI_CALL GetDeviceMemoryCommitment(
- VkDevice device,
- VkDeviceMemory memory,
- VkDeviceSize* pCommittedMemoryInBytes);
-
-VKAPI_ATTR VkResult VKAPI_CALL BindBufferMemory(
- VkDevice device,
- VkBuffer buffer,
- VkDeviceMemory memory,
- VkDeviceSize memoryOffset);
-
-VKAPI_ATTR VkResult VKAPI_CALL BindImageMemory(
- VkDevice device,
- VkImage image,
- VkDeviceMemory memory,
- VkDeviceSize memoryOffset);
-
-VKAPI_ATTR void VKAPI_CALL GetBufferMemoryRequirements(
- VkDevice device,
- VkBuffer buffer,
- VkMemoryRequirements* pMemoryRequirements);
-
-VKAPI_ATTR void VKAPI_CALL GetImageMemoryRequirements(
- VkDevice device,
- VkImage image,
- VkMemoryRequirements* pMemoryRequirements);
-
-VKAPI_ATTR void VKAPI_CALL GetImageSparseMemoryRequirements(
- VkDevice device,
- VkImage image,
- uint32_t* pSparseMemoryRequirementCount,
- VkSparseImageMemoryRequirements* pSparseMemoryRequirements);
-
-VKAPI_ATTR void VKAPI_CALL GetPhysicalDeviceSparseImageFormatProperties(
- VkPhysicalDevice physicalDevice,
- VkFormat format,
- VkImageType type,
- VkSampleCountFlagBits samples,
- VkImageUsageFlags usage,
- VkImageTiling tiling,
- uint32_t* pPropertyCount,
- VkSparseImageFormatProperties* pProperties);
-
-VKAPI_ATTR VkResult VKAPI_CALL QueueBindSparse(
- VkQueue queue,
- uint32_t bindInfoCount,
- const VkBindSparseInfo* pBindInfo,
- VkFence fence);
-
-VKAPI_ATTR VkResult VKAPI_CALL CreateFence(
- VkDevice device,
- const VkFenceCreateInfo* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkFence* pFence);
-
-VKAPI_ATTR void VKAPI_CALL DestroyFence(
- VkDevice device,
- VkFence fence,
- const VkAllocationCallbacks* pAllocator);
-
-VKAPI_ATTR VkResult VKAPI_CALL ResetFences(
- VkDevice device,
- uint32_t fenceCount,
- const VkFence* pFences);
-
-VKAPI_ATTR VkResult VKAPI_CALL GetFenceStatus(
- VkDevice device,
- VkFence fence);
-
-VKAPI_ATTR VkResult VKAPI_CALL WaitForFences(
- VkDevice device,
- uint32_t fenceCount,
- const VkFence* pFences,
- VkBool32 waitAll,
- uint64_t timeout);
-
-VKAPI_ATTR VkResult VKAPI_CALL CreateSemaphore(
- VkDevice device,
- const VkSemaphoreCreateInfo* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkSemaphore* pSemaphore);
-
-VKAPI_ATTR void VKAPI_CALL DestroySemaphore(
- VkDevice device,
- VkSemaphore semaphore,
- const VkAllocationCallbacks* pAllocator);
-
-VKAPI_ATTR VkResult VKAPI_CALL CreateEvent(
- VkDevice device,
- const VkEventCreateInfo* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkEvent* pEvent);
-
-VKAPI_ATTR void VKAPI_CALL DestroyEvent(
- VkDevice device,
- VkEvent event,
- const VkAllocationCallbacks* pAllocator);
-
-VKAPI_ATTR VkResult VKAPI_CALL GetEventStatus(
- VkDevice device,
- VkEvent event);
-
-VKAPI_ATTR VkResult VKAPI_CALL SetEvent(
- VkDevice device,
- VkEvent event);
-
-VKAPI_ATTR VkResult VKAPI_CALL ResetEvent(
- VkDevice device,
- VkEvent event);
-
-VKAPI_ATTR VkResult VKAPI_CALL CreateQueryPool(
- VkDevice device,
- const VkQueryPoolCreateInfo* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkQueryPool* pQueryPool);
-
-VKAPI_ATTR void VKAPI_CALL DestroyQueryPool(
- VkDevice device,
- VkQueryPool queryPool,
- const VkAllocationCallbacks* pAllocator);
-
-VKAPI_ATTR VkResult VKAPI_CALL GetQueryPoolResults(
- VkDevice device,
- VkQueryPool queryPool,
- uint32_t firstQuery,
- uint32_t queryCount,
- size_t dataSize,
- void* pData,
- VkDeviceSize stride,
- VkQueryResultFlags flags);
-
-VKAPI_ATTR VkResult VKAPI_CALL CreateBuffer(
- VkDevice device,
- const VkBufferCreateInfo* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkBuffer* pBuffer);
-
-VKAPI_ATTR void VKAPI_CALL DestroyBuffer(
- VkDevice device,
- VkBuffer buffer,
- const VkAllocationCallbacks* pAllocator);
-
-VKAPI_ATTR VkResult VKAPI_CALL CreateBufferView(
- VkDevice device,
- const VkBufferViewCreateInfo* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkBufferView* pView);
-
-VKAPI_ATTR void VKAPI_CALL DestroyBufferView(
- VkDevice device,
- VkBufferView bufferView,
- const VkAllocationCallbacks* pAllocator);
-
-VKAPI_ATTR VkResult VKAPI_CALL CreateImage(
- VkDevice device,
- const VkImageCreateInfo* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkImage* pImage);
-
-VKAPI_ATTR void VKAPI_CALL DestroyImage(
- VkDevice device,
- VkImage image,
- const VkAllocationCallbacks* pAllocator);
-
-VKAPI_ATTR void VKAPI_CALL GetImageSubresourceLayout(
- VkDevice device,
- VkImage image,
- const VkImageSubresource* pSubresource,
- VkSubresourceLayout* pLayout);
-
-VKAPI_ATTR VkResult VKAPI_CALL CreateImageView(
- VkDevice device,
- const VkImageViewCreateInfo* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkImageView* pView);
-
-VKAPI_ATTR void VKAPI_CALL DestroyImageView(
- VkDevice device,
- VkImageView imageView,
- const VkAllocationCallbacks* pAllocator);
-
-VKAPI_ATTR VkResult VKAPI_CALL CreateShaderModule(
- VkDevice device,
- const VkShaderModuleCreateInfo* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkShaderModule* pShaderModule);
-
-VKAPI_ATTR void VKAPI_CALL DestroyShaderModule(
- VkDevice device,
- VkShaderModule shaderModule,
- const VkAllocationCallbacks* pAllocator);
-
-VKAPI_ATTR VkResult VKAPI_CALL CreatePipelineCache(
- VkDevice device,
- const VkPipelineCacheCreateInfo* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkPipelineCache* pPipelineCache);
-
-VKAPI_ATTR void VKAPI_CALL DestroyPipelineCache(
- VkDevice device,
- VkPipelineCache pipelineCache,
- const VkAllocationCallbacks* pAllocator);
-
-VKAPI_ATTR VkResult VKAPI_CALL GetPipelineCacheData(
- VkDevice device,
- VkPipelineCache pipelineCache,
- size_t* pDataSize,
- void* pData);
-
-VKAPI_ATTR VkResult VKAPI_CALL MergePipelineCaches(
- VkDevice device,
- VkPipelineCache dstCache,
- uint32_t srcCacheCount,
- const VkPipelineCache* pSrcCaches);
-
-VKAPI_ATTR VkResult VKAPI_CALL CreateGraphicsPipelines(
- VkDevice device,
- VkPipelineCache pipelineCache,
- uint32_t createInfoCount,
- const VkGraphicsPipelineCreateInfo* pCreateInfos,
- const VkAllocationCallbacks* pAllocator,
- VkPipeline* pPipelines);
-
-VKAPI_ATTR VkResult VKAPI_CALL CreateComputePipelines(
- VkDevice device,
- VkPipelineCache pipelineCache,
- uint32_t createInfoCount,
- const VkComputePipelineCreateInfo* pCreateInfos,
- const VkAllocationCallbacks* pAllocator,
- VkPipeline* pPipelines);
-
-VKAPI_ATTR void VKAPI_CALL DestroyPipeline(
- VkDevice device,
- VkPipeline pipeline,
- const VkAllocationCallbacks* pAllocator);
-
-VKAPI_ATTR VkResult VKAPI_CALL CreatePipelineLayout(
- VkDevice device,
- const VkPipelineLayoutCreateInfo* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkPipelineLayout* pPipelineLayout);
-
-VKAPI_ATTR void VKAPI_CALL DestroyPipelineLayout(
- VkDevice device,
- VkPipelineLayout pipelineLayout,
- const VkAllocationCallbacks* pAllocator);
-
-VKAPI_ATTR VkResult VKAPI_CALL CreateSampler(
- VkDevice device,
- const VkSamplerCreateInfo* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkSampler* pSampler);
-
-VKAPI_ATTR void VKAPI_CALL DestroySampler(
- VkDevice device,
- VkSampler sampler,
- const VkAllocationCallbacks* pAllocator);
-
-VKAPI_ATTR VkResult VKAPI_CALL CreateDescriptorSetLayout(
- VkDevice device,
- const VkDescriptorSetLayoutCreateInfo* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkDescriptorSetLayout* pSetLayout);
-
-VKAPI_ATTR void VKAPI_CALL DestroyDescriptorSetLayout(
- VkDevice device,
- VkDescriptorSetLayout descriptorSetLayout,
- const VkAllocationCallbacks* pAllocator);
-
-VKAPI_ATTR VkResult VKAPI_CALL CreateDescriptorPool(
- VkDevice device,
- const VkDescriptorPoolCreateInfo* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkDescriptorPool* pDescriptorPool);
-
-VKAPI_ATTR void VKAPI_CALL DestroyDescriptorPool(
- VkDevice device,
- VkDescriptorPool descriptorPool,
- const VkAllocationCallbacks* pAllocator);
-
-VKAPI_ATTR VkResult VKAPI_CALL ResetDescriptorPool(
- VkDevice device,
- VkDescriptorPool descriptorPool,
- VkDescriptorPoolResetFlags flags);
-
-VKAPI_ATTR VkResult VKAPI_CALL AllocateDescriptorSets(
- VkDevice device,
- const VkDescriptorSetAllocateInfo* pAllocateInfo,
- VkDescriptorSet* pDescriptorSets);
-
-VKAPI_ATTR VkResult VKAPI_CALL FreeDescriptorSets(
- VkDevice device,
- VkDescriptorPool descriptorPool,
- uint32_t descriptorSetCount,
- const VkDescriptorSet* pDescriptorSets);
-
-VKAPI_ATTR void VKAPI_CALL UpdateDescriptorSets(
- VkDevice device,
- uint32_t descriptorWriteCount,
- const VkWriteDescriptorSet* pDescriptorWrites,
- uint32_t descriptorCopyCount,
- const VkCopyDescriptorSet* pDescriptorCopies);
-
-VKAPI_ATTR VkResult VKAPI_CALL CreateFramebuffer(
- VkDevice device,
- const VkFramebufferCreateInfo* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkFramebuffer* pFramebuffer);
-
-VKAPI_ATTR void VKAPI_CALL DestroyFramebuffer(
- VkDevice device,
- VkFramebuffer framebuffer,
- const VkAllocationCallbacks* pAllocator);
-
-VKAPI_ATTR VkResult VKAPI_CALL CreateRenderPass(
- VkDevice device,
- const VkRenderPassCreateInfo* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkRenderPass* pRenderPass);
-
-VKAPI_ATTR void VKAPI_CALL DestroyRenderPass(
- VkDevice device,
- VkRenderPass renderPass,
- const VkAllocationCallbacks* pAllocator);
-
-VKAPI_ATTR void VKAPI_CALL GetRenderAreaGranularity(
- VkDevice device,
- VkRenderPass renderPass,
- VkExtent2D* pGranularity);
-
-VKAPI_ATTR VkResult VKAPI_CALL CreateCommandPool(
- VkDevice device,
- const VkCommandPoolCreateInfo* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkCommandPool* pCommandPool);
-
-VKAPI_ATTR void VKAPI_CALL DestroyCommandPool(
- VkDevice device,
- VkCommandPool commandPool,
- const VkAllocationCallbacks* pAllocator);
-
-VKAPI_ATTR VkResult VKAPI_CALL ResetCommandPool(
- VkDevice device,
- VkCommandPool commandPool,
- VkCommandPoolResetFlags flags);
-
-VKAPI_ATTR VkResult VKAPI_CALL AllocateCommandBuffers(
- VkDevice device,
- const VkCommandBufferAllocateInfo* pAllocateInfo,
- VkCommandBuffer* pCommandBuffers);
-
-VKAPI_ATTR void VKAPI_CALL FreeCommandBuffers(
- VkDevice device,
- VkCommandPool commandPool,
- uint32_t commandBufferCount,
- const VkCommandBuffer* pCommandBuffers);
-
-VKAPI_ATTR VkResult VKAPI_CALL BeginCommandBuffer(
- VkCommandBuffer commandBuffer,
- const VkCommandBufferBeginInfo* pBeginInfo);
-
-VKAPI_ATTR VkResult VKAPI_CALL EndCommandBuffer(
- VkCommandBuffer commandBuffer);
-
-VKAPI_ATTR VkResult VKAPI_CALL ResetCommandBuffer(
- VkCommandBuffer commandBuffer,
- VkCommandBufferResetFlags flags);
-
-VKAPI_ATTR void VKAPI_CALL CmdBindPipeline(
- VkCommandBuffer commandBuffer,
- VkPipelineBindPoint pipelineBindPoint,
- VkPipeline pipeline);
-
-VKAPI_ATTR void VKAPI_CALL CmdSetViewport(
- VkCommandBuffer commandBuffer,
- uint32_t firstViewport,
- uint32_t viewportCount,
- const VkViewport* pViewports);
-
-VKAPI_ATTR void VKAPI_CALL CmdSetScissor(
- VkCommandBuffer commandBuffer,
- uint32_t firstScissor,
- uint32_t scissorCount,
- const VkRect2D* pScissors);
-
-VKAPI_ATTR void VKAPI_CALL CmdSetLineWidth(
- VkCommandBuffer commandBuffer,
- float lineWidth);
-
-VKAPI_ATTR void VKAPI_CALL CmdSetDepthBias(
- VkCommandBuffer commandBuffer,
- float depthBiasConstantFactor,
- float depthBiasClamp,
- float depthBiasSlopeFactor);
-
-VKAPI_ATTR void VKAPI_CALL CmdSetBlendConstants(
- VkCommandBuffer commandBuffer,
- const float blendConstants[4]);
-
-VKAPI_ATTR void VKAPI_CALL CmdSetDepthBounds(
- VkCommandBuffer commandBuffer,
- float minDepthBounds,
- float maxDepthBounds);
-
-VKAPI_ATTR void VKAPI_CALL CmdSetStencilCompareMask(
- VkCommandBuffer commandBuffer,
- VkStencilFaceFlags faceMask,
- uint32_t compareMask);
-
-VKAPI_ATTR void VKAPI_CALL CmdSetStencilWriteMask(
- VkCommandBuffer commandBuffer,
- VkStencilFaceFlags faceMask,
- uint32_t writeMask);
-
-VKAPI_ATTR void VKAPI_CALL CmdSetStencilReference(
- VkCommandBuffer commandBuffer,
- VkStencilFaceFlags faceMask,
- uint32_t reference);
-
-VKAPI_ATTR void VKAPI_CALL CmdBindDescriptorSets(
- VkCommandBuffer commandBuffer,
- VkPipelineBindPoint pipelineBindPoint,
- VkPipelineLayout layout,
- uint32_t firstSet,
- uint32_t descriptorSetCount,
- const VkDescriptorSet* pDescriptorSets,
- uint32_t dynamicOffsetCount,
- const uint32_t* pDynamicOffsets);
-
-VKAPI_ATTR void VKAPI_CALL CmdBindIndexBuffer(
- VkCommandBuffer commandBuffer,
- VkBuffer buffer,
- VkDeviceSize offset,
- VkIndexType indexType);
-
-VKAPI_ATTR void VKAPI_CALL CmdBindVertexBuffers(
- VkCommandBuffer commandBuffer,
- uint32_t firstBinding,
- uint32_t bindingCount,
- const VkBuffer* pBuffers,
- const VkDeviceSize* pOffsets);
-
-VKAPI_ATTR void VKAPI_CALL CmdDraw(
- VkCommandBuffer commandBuffer,
- uint32_t vertexCount,
- uint32_t instanceCount,
- uint32_t firstVertex,
- uint32_t firstInstance);
-
-VKAPI_ATTR void VKAPI_CALL CmdDrawIndexed(
- VkCommandBuffer commandBuffer,
- uint32_t indexCount,
- uint32_t instanceCount,
- uint32_t firstIndex,
- int32_t vertexOffset,
- uint32_t firstInstance);
-
-VKAPI_ATTR void VKAPI_CALL CmdDrawIndirect(
- VkCommandBuffer commandBuffer,
- VkBuffer buffer,
- VkDeviceSize offset,
- uint32_t drawCount,
- uint32_t stride);
-
-VKAPI_ATTR void VKAPI_CALL CmdDrawIndexedIndirect(
- VkCommandBuffer commandBuffer,
- VkBuffer buffer,
- VkDeviceSize offset,
- uint32_t drawCount,
- uint32_t stride);
-
-VKAPI_ATTR void VKAPI_CALL CmdDispatch(
- VkCommandBuffer commandBuffer,
- uint32_t groupCountX,
- uint32_t groupCountY,
- uint32_t groupCountZ);
-
-VKAPI_ATTR void VKAPI_CALL CmdDispatchIndirect(
- VkCommandBuffer commandBuffer,
- VkBuffer buffer,
- VkDeviceSize offset);
-
-VKAPI_ATTR void VKAPI_CALL CmdCopyBuffer(
- VkCommandBuffer commandBuffer,
- VkBuffer srcBuffer,
- VkBuffer dstBuffer,
- uint32_t regionCount,
- const VkBufferCopy* pRegions);
-
-VKAPI_ATTR void VKAPI_CALL CmdCopyImage(
- VkCommandBuffer commandBuffer,
- VkImage srcImage,
- VkImageLayout srcImageLayout,
- VkImage dstImage,
- VkImageLayout dstImageLayout,
- uint32_t regionCount,
- const VkImageCopy* pRegions);
-
-VKAPI_ATTR void VKAPI_CALL CmdBlitImage(
- VkCommandBuffer commandBuffer,
- VkImage srcImage,
- VkImageLayout srcImageLayout,
- VkImage dstImage,
- VkImageLayout dstImageLayout,
- uint32_t regionCount,
- const VkImageBlit* pRegions,
- VkFilter filter);
-
-VKAPI_ATTR void VKAPI_CALL CmdCopyBufferToImage(
- VkCommandBuffer commandBuffer,
- VkBuffer srcBuffer,
- VkImage dstImage,
- VkImageLayout dstImageLayout,
- uint32_t regionCount,
- const VkBufferImageCopy* pRegions);
-
-VKAPI_ATTR void VKAPI_CALL CmdCopyImageToBuffer(
- VkCommandBuffer commandBuffer,
- VkImage srcImage,
- VkImageLayout srcImageLayout,
- VkBuffer dstBuffer,
- uint32_t regionCount,
- const VkBufferImageCopy* pRegions);
-
-VKAPI_ATTR void VKAPI_CALL CmdUpdateBuffer(
- VkCommandBuffer commandBuffer,
- VkBuffer dstBuffer,
- VkDeviceSize dstOffset,
- VkDeviceSize dataSize,
- const void* pData);
-
-VKAPI_ATTR void VKAPI_CALL CmdFillBuffer(
- VkCommandBuffer commandBuffer,
- VkBuffer dstBuffer,
- VkDeviceSize dstOffset,
- VkDeviceSize size,
- uint32_t data);
-
-VKAPI_ATTR void VKAPI_CALL CmdClearColorImage(
- VkCommandBuffer commandBuffer,
- VkImage image,
- VkImageLayout imageLayout,
- const VkClearColorValue* pColor,
- uint32_t rangeCount,
- const VkImageSubresourceRange* pRanges);
-
-VKAPI_ATTR void VKAPI_CALL CmdClearDepthStencilImage(
- VkCommandBuffer commandBuffer,
- VkImage image,
- VkImageLayout imageLayout,
- const VkClearDepthStencilValue* pDepthStencil,
- uint32_t rangeCount,
- const VkImageSubresourceRange* pRanges);
-
-VKAPI_ATTR void VKAPI_CALL CmdClearAttachments(
- VkCommandBuffer commandBuffer,
- uint32_t attachmentCount,
- const VkClearAttachment* pAttachments,
- uint32_t rectCount,
- const VkClearRect* pRects);
-
-VKAPI_ATTR void VKAPI_CALL CmdResolveImage(
- VkCommandBuffer commandBuffer,
- VkImage srcImage,
- VkImageLayout srcImageLayout,
- VkImage dstImage,
- VkImageLayout dstImageLayout,
- uint32_t regionCount,
- const VkImageResolve* pRegions);
-
-VKAPI_ATTR void VKAPI_CALL CmdSetEvent(
- VkCommandBuffer commandBuffer,
- VkEvent event,
- VkPipelineStageFlags stageMask);
-
-VKAPI_ATTR void VKAPI_CALL CmdResetEvent(
- VkCommandBuffer commandBuffer,
- VkEvent event,
- VkPipelineStageFlags stageMask);
-
-VKAPI_ATTR void VKAPI_CALL CmdWaitEvents(
- VkCommandBuffer commandBuffer,
- uint32_t eventCount,
- const VkEvent* pEvents,
- VkPipelineStageFlags srcStageMask,
- VkPipelineStageFlags dstStageMask,
- uint32_t memoryBarrierCount,
- const VkMemoryBarrier* pMemoryBarriers,
- uint32_t bufferMemoryBarrierCount,
- const VkBufferMemoryBarrier* pBufferMemoryBarriers,
- uint32_t imageMemoryBarrierCount,
- const VkImageMemoryBarrier* pImageMemoryBarriers);
-
-VKAPI_ATTR void VKAPI_CALL CmdPipelineBarrier(
- VkCommandBuffer commandBuffer,
- VkPipelineStageFlags srcStageMask,
- VkPipelineStageFlags dstStageMask,
- VkDependencyFlags dependencyFlags,
- uint32_t memoryBarrierCount,
- const VkMemoryBarrier* pMemoryBarriers,
- uint32_t bufferMemoryBarrierCount,
- const VkBufferMemoryBarrier* pBufferMemoryBarriers,
- uint32_t imageMemoryBarrierCount,
- const VkImageMemoryBarrier* pImageMemoryBarriers);
-
-VKAPI_ATTR void VKAPI_CALL CmdBeginQuery(
- VkCommandBuffer commandBuffer,
- VkQueryPool queryPool,
- uint32_t query,
- VkQueryControlFlags flags);
-
-VKAPI_ATTR void VKAPI_CALL CmdEndQuery(
- VkCommandBuffer commandBuffer,
- VkQueryPool queryPool,
- uint32_t query);
-
-VKAPI_ATTR void VKAPI_CALL CmdResetQueryPool(
- VkCommandBuffer commandBuffer,
- VkQueryPool queryPool,
- uint32_t firstQuery,
- uint32_t queryCount);
-
-VKAPI_ATTR void VKAPI_CALL CmdWriteTimestamp(
- VkCommandBuffer commandBuffer,
- VkPipelineStageFlagBits pipelineStage,
- VkQueryPool queryPool,
- uint32_t query);
-
-VKAPI_ATTR void VKAPI_CALL CmdCopyQueryPoolResults(
- VkCommandBuffer commandBuffer,
- VkQueryPool queryPool,
- uint32_t firstQuery,
- uint32_t queryCount,
- VkBuffer dstBuffer,
- VkDeviceSize dstOffset,
- VkDeviceSize stride,
- VkQueryResultFlags flags);
-
-VKAPI_ATTR void VKAPI_CALL CmdPushConstants(
- VkCommandBuffer commandBuffer,
- VkPipelineLayout layout,
- VkShaderStageFlags stageFlags,
- uint32_t offset,
- uint32_t size,
- const void* pValues);
-
-VKAPI_ATTR void VKAPI_CALL CmdBeginRenderPass(
- VkCommandBuffer commandBuffer,
- const VkRenderPassBeginInfo* pRenderPassBegin,
- VkSubpassContents contents);
-
-VKAPI_ATTR void VKAPI_CALL CmdNextSubpass(
- VkCommandBuffer commandBuffer,
- VkSubpassContents contents);
-
-VKAPI_ATTR void VKAPI_CALL CmdEndRenderPass(
- VkCommandBuffer commandBuffer);
-
-VKAPI_ATTR void VKAPI_CALL CmdExecuteCommands(
- VkCommandBuffer commandBuffer,
- uint32_t commandBufferCount,
- const VkCommandBuffer* pCommandBuffers);
-
-
-VKAPI_ATTR VkResult VKAPI_CALL BindBufferMemory2(
- VkDevice device,
- uint32_t bindInfoCount,
- const VkBindBufferMemoryInfo* pBindInfos);
-
-VKAPI_ATTR VkResult VKAPI_CALL BindImageMemory2(
- VkDevice device,
- uint32_t bindInfoCount,
- const VkBindImageMemoryInfo* pBindInfos);
-
-VKAPI_ATTR void VKAPI_CALL GetDeviceGroupPeerMemoryFeatures(
- VkDevice device,
- uint32_t heapIndex,
- uint32_t localDeviceIndex,
- uint32_t remoteDeviceIndex,
- VkPeerMemoryFeatureFlags* pPeerMemoryFeatures);
-
-VKAPI_ATTR void VKAPI_CALL CmdSetDeviceMask(
- VkCommandBuffer commandBuffer,
- uint32_t deviceMask);
-
-VKAPI_ATTR void VKAPI_CALL CmdDispatchBase(
- VkCommandBuffer commandBuffer,
- uint32_t baseGroupX,
- uint32_t baseGroupY,
- uint32_t baseGroupZ,
- uint32_t groupCountX,
- uint32_t groupCountY,
- uint32_t groupCountZ);
-
-VKAPI_ATTR VkResult VKAPI_CALL EnumeratePhysicalDeviceGroups(
- VkInstance instance,
- uint32_t* pPhysicalDeviceGroupCount,
- VkPhysicalDeviceGroupProperties* pPhysicalDeviceGroupProperties);
-
-VKAPI_ATTR void VKAPI_CALL GetImageMemoryRequirements2(
- VkDevice device,
- const VkImageMemoryRequirementsInfo2* pInfo,
- VkMemoryRequirements2* pMemoryRequirements);
-
-VKAPI_ATTR void VKAPI_CALL GetBufferMemoryRequirements2(
- VkDevice device,
- const VkBufferMemoryRequirementsInfo2* pInfo,
- VkMemoryRequirements2* pMemoryRequirements);
-
-VKAPI_ATTR void VKAPI_CALL GetImageSparseMemoryRequirements2(
- VkDevice device,
- const VkImageSparseMemoryRequirementsInfo2* pInfo,
- uint32_t* pSparseMemoryRequirementCount,
- VkSparseImageMemoryRequirements2* pSparseMemoryRequirements);
-
-VKAPI_ATTR void VKAPI_CALL GetPhysicalDeviceFeatures2(
- VkPhysicalDevice physicalDevice,
- VkPhysicalDeviceFeatures2* pFeatures);
-
-VKAPI_ATTR void VKAPI_CALL GetPhysicalDeviceProperties2(
- VkPhysicalDevice physicalDevice,
- VkPhysicalDeviceProperties2* pProperties);
-
-VKAPI_ATTR void VKAPI_CALL GetPhysicalDeviceFormatProperties2(
- VkPhysicalDevice physicalDevice,
- VkFormat format,
- VkFormatProperties2* pFormatProperties);
-
-VKAPI_ATTR VkResult VKAPI_CALL GetPhysicalDeviceImageFormatProperties2(
- VkPhysicalDevice physicalDevice,
- const VkPhysicalDeviceImageFormatInfo2* pImageFormatInfo,
- VkImageFormatProperties2* pImageFormatProperties);
-
-VKAPI_ATTR void VKAPI_CALL GetPhysicalDeviceQueueFamilyProperties2(
- VkPhysicalDevice physicalDevice,
- uint32_t* pQueueFamilyPropertyCount,
- VkQueueFamilyProperties2* pQueueFamilyProperties);
-
-VKAPI_ATTR void VKAPI_CALL GetPhysicalDeviceMemoryProperties2(
- VkPhysicalDevice physicalDevice,
- VkPhysicalDeviceMemoryProperties2* pMemoryProperties);
-
-VKAPI_ATTR void VKAPI_CALL GetPhysicalDeviceSparseImageFormatProperties2(
- VkPhysicalDevice physicalDevice,
- const VkPhysicalDeviceSparseImageFormatInfo2* pFormatInfo,
- uint32_t* pPropertyCount,
- VkSparseImageFormatProperties2* pProperties);
-
-VKAPI_ATTR void VKAPI_CALL TrimCommandPool(
- VkDevice device,
- VkCommandPool commandPool,
- VkCommandPoolTrimFlags flags);
-
-VKAPI_ATTR void VKAPI_CALL GetDeviceQueue2(
- VkDevice device,
- const VkDeviceQueueInfo2* pQueueInfo,
- VkQueue* pQueue);
-
-VKAPI_ATTR VkResult VKAPI_CALL CreateSamplerYcbcrConversion(
- VkDevice device,
- const VkSamplerYcbcrConversionCreateInfo* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkSamplerYcbcrConversion* pYcbcrConversion);
-
-VKAPI_ATTR void VKAPI_CALL DestroySamplerYcbcrConversion(
- VkDevice device,
- VkSamplerYcbcrConversion ycbcrConversion,
- const VkAllocationCallbacks* pAllocator);
-
-VKAPI_ATTR VkResult VKAPI_CALL CreateDescriptorUpdateTemplate(
- VkDevice device,
- const VkDescriptorUpdateTemplateCreateInfo* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkDescriptorUpdateTemplate* pDescriptorUpdateTemplate);
-
-VKAPI_ATTR void VKAPI_CALL DestroyDescriptorUpdateTemplate(
- VkDevice device,
- VkDescriptorUpdateTemplate descriptorUpdateTemplate,
- const VkAllocationCallbacks* pAllocator);
-
-VKAPI_ATTR void VKAPI_CALL UpdateDescriptorSetWithTemplate(
- VkDevice device,
- VkDescriptorSet descriptorSet,
- VkDescriptorUpdateTemplate descriptorUpdateTemplate,
- const void* pData);
-
-VKAPI_ATTR void VKAPI_CALL GetPhysicalDeviceExternalBufferProperties(
- VkPhysicalDevice physicalDevice,
- const VkPhysicalDeviceExternalBufferInfo* pExternalBufferInfo,
- VkExternalBufferProperties* pExternalBufferProperties);
-
-VKAPI_ATTR void VKAPI_CALL GetPhysicalDeviceExternalFenceProperties(
- VkPhysicalDevice physicalDevice,
- const VkPhysicalDeviceExternalFenceInfo* pExternalFenceInfo,
- VkExternalFenceProperties* pExternalFenceProperties);
-
-VKAPI_ATTR void VKAPI_CALL GetPhysicalDeviceExternalSemaphoreProperties(
- VkPhysicalDevice physicalDevice,
- const VkPhysicalDeviceExternalSemaphoreInfo* pExternalSemaphoreInfo,
- VkExternalSemaphoreProperties* pExternalSemaphoreProperties);
-
-VKAPI_ATTR void VKAPI_CALL GetDescriptorSetLayoutSupport(
- VkDevice device,
- const VkDescriptorSetLayoutCreateInfo* pCreateInfo,
- VkDescriptorSetLayoutSupport* pSupport);
-
-
-VKAPI_ATTR void VKAPI_CALL DestroySurfaceKHR(
- VkInstance instance,
- VkSurfaceKHR surface,
- const VkAllocationCallbacks* pAllocator);
-
-VKAPI_ATTR VkResult VKAPI_CALL GetPhysicalDeviceSurfaceSupportKHR(
- VkPhysicalDevice physicalDevice,
- uint32_t queueFamilyIndex,
- VkSurfaceKHR surface,
- VkBool32* pSupported);
-
-VKAPI_ATTR VkResult VKAPI_CALL GetPhysicalDeviceSurfaceCapabilitiesKHR(
- VkPhysicalDevice physicalDevice,
- VkSurfaceKHR surface,
- VkSurfaceCapabilitiesKHR* pSurfaceCapabilities);
-
-VKAPI_ATTR VkResult VKAPI_CALL GetPhysicalDeviceSurfaceFormatsKHR(
- VkPhysicalDevice physicalDevice,
- VkSurfaceKHR surface,
- uint32_t* pSurfaceFormatCount,
- VkSurfaceFormatKHR* pSurfaceFormats);
-
-VKAPI_ATTR VkResult VKAPI_CALL GetPhysicalDeviceSurfacePresentModesKHR(
- VkPhysicalDevice physicalDevice,
- VkSurfaceKHR surface,
- uint32_t* pPresentModeCount,
- VkPresentModeKHR* pPresentModes);
-
-
-VKAPI_ATTR VkResult VKAPI_CALL CreateSwapchainKHR(
- VkDevice device,
- const VkSwapchainCreateInfoKHR* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkSwapchainKHR* pSwapchain);
-
-VKAPI_ATTR void VKAPI_CALL DestroySwapchainKHR(
- VkDevice device,
- VkSwapchainKHR swapchain,
- const VkAllocationCallbacks* pAllocator);
-
-VKAPI_ATTR VkResult VKAPI_CALL GetSwapchainImagesKHR(
- VkDevice device,
- VkSwapchainKHR swapchain,
- uint32_t* pSwapchainImageCount,
- VkImage* pSwapchainImages);
-
-VKAPI_ATTR VkResult VKAPI_CALL AcquireNextImageKHR(
- VkDevice device,
- VkSwapchainKHR swapchain,
- uint64_t timeout,
- VkSemaphore semaphore,
- VkFence fence,
- uint32_t* pImageIndex);
-
-VKAPI_ATTR VkResult VKAPI_CALL QueuePresentKHR(
- VkQueue queue,
- const VkPresentInfoKHR* pPresentInfo);
-
-VKAPI_ATTR VkResult VKAPI_CALL GetDeviceGroupPresentCapabilitiesKHR(
- VkDevice device,
- VkDeviceGroupPresentCapabilitiesKHR* pDeviceGroupPresentCapabilities);
-
-VKAPI_ATTR VkResult VKAPI_CALL GetDeviceGroupSurfacePresentModesKHR(
- VkDevice device,
- VkSurfaceKHR surface,
- VkDeviceGroupPresentModeFlagsKHR* pModes);
-
-VKAPI_ATTR VkResult VKAPI_CALL GetPhysicalDevicePresentRectanglesKHR(
- VkPhysicalDevice physicalDevice,
- VkSurfaceKHR surface,
- uint32_t* pRectCount,
- VkRect2D* pRects);
-
-VKAPI_ATTR VkResult VKAPI_CALL AcquireNextImage2KHR(
- VkDevice device,
- const VkAcquireNextImageInfoKHR* pAcquireInfo,
- uint32_t* pImageIndex);
-
-
-VKAPI_ATTR VkResult VKAPI_CALL GetPhysicalDeviceDisplayPropertiesKHR(
- VkPhysicalDevice physicalDevice,
- uint32_t* pPropertyCount,
- VkDisplayPropertiesKHR* pProperties);
-
-VKAPI_ATTR VkResult VKAPI_CALL GetPhysicalDeviceDisplayPlanePropertiesKHR(
- VkPhysicalDevice physicalDevice,
- uint32_t* pPropertyCount,
- VkDisplayPlanePropertiesKHR* pProperties);
-
-VKAPI_ATTR VkResult VKAPI_CALL GetDisplayPlaneSupportedDisplaysKHR(
- VkPhysicalDevice physicalDevice,
- uint32_t planeIndex,
- uint32_t* pDisplayCount,
- VkDisplayKHR* pDisplays);
-
-VKAPI_ATTR VkResult VKAPI_CALL GetDisplayModePropertiesKHR(
- VkPhysicalDevice physicalDevice,
- VkDisplayKHR display,
- uint32_t* pPropertyCount,
- VkDisplayModePropertiesKHR* pProperties);
-
-VKAPI_ATTR VkResult VKAPI_CALL CreateDisplayModeKHR(
- VkPhysicalDevice physicalDevice,
- VkDisplayKHR display,
- const VkDisplayModeCreateInfoKHR* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkDisplayModeKHR* pMode);
-
-VKAPI_ATTR VkResult VKAPI_CALL GetDisplayPlaneCapabilitiesKHR(
- VkPhysicalDevice physicalDevice,
- VkDisplayModeKHR mode,
- uint32_t planeIndex,
- VkDisplayPlaneCapabilitiesKHR* pCapabilities);
-
-VKAPI_ATTR VkResult VKAPI_CALL CreateDisplayPlaneSurfaceKHR(
- VkInstance instance,
- const VkDisplaySurfaceCreateInfoKHR* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkSurfaceKHR* pSurface);
-
-
-VKAPI_ATTR VkResult VKAPI_CALL CreateSharedSwapchainsKHR(
- VkDevice device,
- uint32_t swapchainCount,
- const VkSwapchainCreateInfoKHR* pCreateInfos,
- const VkAllocationCallbacks* pAllocator,
- VkSwapchainKHR* pSwapchains);
-
-#ifdef VK_USE_PLATFORM_XLIB_KHR
-
-VKAPI_ATTR VkResult VKAPI_CALL CreateXlibSurfaceKHR(
- VkInstance instance,
- const VkXlibSurfaceCreateInfoKHR* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkSurfaceKHR* pSurface);
-
-VKAPI_ATTR VkBool32 VKAPI_CALL GetPhysicalDeviceXlibPresentationSupportKHR(
- VkPhysicalDevice physicalDevice,
- uint32_t queueFamilyIndex,
- Display* dpy,
- VisualID visualID);
-#endif // VK_USE_PLATFORM_XLIB_KHR
-
-#ifdef VK_USE_PLATFORM_XCB_KHR
-
-VKAPI_ATTR VkResult VKAPI_CALL CreateXcbSurfaceKHR(
- VkInstance instance,
- const VkXcbSurfaceCreateInfoKHR* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkSurfaceKHR* pSurface);
-
-VKAPI_ATTR VkBool32 VKAPI_CALL GetPhysicalDeviceXcbPresentationSupportKHR(
- VkPhysicalDevice physicalDevice,
- uint32_t queueFamilyIndex,
- xcb_connection_t* connection,
- xcb_visualid_t visual_id);
-#endif // VK_USE_PLATFORM_XCB_KHR
-
-#ifdef VK_USE_PLATFORM_WAYLAND_KHR
-
-VKAPI_ATTR VkResult VKAPI_CALL CreateWaylandSurfaceKHR(
- VkInstance instance,
- const VkWaylandSurfaceCreateInfoKHR* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkSurfaceKHR* pSurface);
-
-VKAPI_ATTR VkBool32 VKAPI_CALL GetPhysicalDeviceWaylandPresentationSupportKHR(
- VkPhysicalDevice physicalDevice,
- uint32_t queueFamilyIndex,
- struct wl_display* display);
-#endif // VK_USE_PLATFORM_WAYLAND_KHR
-
-#ifdef VK_USE_PLATFORM_ANDROID_KHR
-
-VKAPI_ATTR VkResult VKAPI_CALL CreateAndroidSurfaceKHR(
- VkInstance instance,
- const VkAndroidSurfaceCreateInfoKHR* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkSurfaceKHR* pSurface);
-#endif // VK_USE_PLATFORM_ANDROID_KHR
-
-#ifdef VK_USE_PLATFORM_WIN32_KHR
-
-VKAPI_ATTR VkResult VKAPI_CALL CreateWin32SurfaceKHR(
- VkInstance instance,
- const VkWin32SurfaceCreateInfoKHR* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkSurfaceKHR* pSurface);
-
-VKAPI_ATTR VkBool32 VKAPI_CALL GetPhysicalDeviceWin32PresentationSupportKHR(
- VkPhysicalDevice physicalDevice,
- uint32_t queueFamilyIndex);
-#endif // VK_USE_PLATFORM_WIN32_KHR
-
-
-
-
-VKAPI_ATTR void VKAPI_CALL GetPhysicalDeviceFeatures2KHR(
- VkPhysicalDevice physicalDevice,
- VkPhysicalDeviceFeatures2* pFeatures);
-
-VKAPI_ATTR void VKAPI_CALL GetPhysicalDeviceProperties2KHR(
- VkPhysicalDevice physicalDevice,
- VkPhysicalDeviceProperties2* pProperties);
-
-VKAPI_ATTR void VKAPI_CALL GetPhysicalDeviceFormatProperties2KHR(
- VkPhysicalDevice physicalDevice,
- VkFormat format,
- VkFormatProperties2* pFormatProperties);
-
-VKAPI_ATTR VkResult VKAPI_CALL GetPhysicalDeviceImageFormatProperties2KHR(
- VkPhysicalDevice physicalDevice,
- const VkPhysicalDeviceImageFormatInfo2* pImageFormatInfo,
- VkImageFormatProperties2* pImageFormatProperties);
-
-VKAPI_ATTR void VKAPI_CALL GetPhysicalDeviceQueueFamilyProperties2KHR(
- VkPhysicalDevice physicalDevice,
- uint32_t* pQueueFamilyPropertyCount,
- VkQueueFamilyProperties2* pQueueFamilyProperties);
-
-VKAPI_ATTR void VKAPI_CALL GetPhysicalDeviceMemoryProperties2KHR(
- VkPhysicalDevice physicalDevice,
- VkPhysicalDeviceMemoryProperties2* pMemoryProperties);
-
-VKAPI_ATTR void VKAPI_CALL GetPhysicalDeviceSparseImageFormatProperties2KHR(
- VkPhysicalDevice physicalDevice,
- const VkPhysicalDeviceSparseImageFormatInfo2* pFormatInfo,
- uint32_t* pPropertyCount,
- VkSparseImageFormatProperties2* pProperties);
-
-
-VKAPI_ATTR void VKAPI_CALL GetDeviceGroupPeerMemoryFeaturesKHR(
- VkDevice device,
- uint32_t heapIndex,
- uint32_t localDeviceIndex,
- uint32_t remoteDeviceIndex,
- VkPeerMemoryFeatureFlags* pPeerMemoryFeatures);
-
-VKAPI_ATTR void VKAPI_CALL CmdSetDeviceMaskKHR(
- VkCommandBuffer commandBuffer,
- uint32_t deviceMask);
-
-VKAPI_ATTR void VKAPI_CALL CmdDispatchBaseKHR(
- VkCommandBuffer commandBuffer,
- uint32_t baseGroupX,
- uint32_t baseGroupY,
- uint32_t baseGroupZ,
- uint32_t groupCountX,
- uint32_t groupCountY,
- uint32_t groupCountZ);
-
-
-
-VKAPI_ATTR void VKAPI_CALL TrimCommandPoolKHR(
- VkDevice device,
- VkCommandPool commandPool,
- VkCommandPoolTrimFlags flags);
-
-
-VKAPI_ATTR VkResult VKAPI_CALL EnumeratePhysicalDeviceGroupsKHR(
- VkInstance instance,
- uint32_t* pPhysicalDeviceGroupCount,
- VkPhysicalDeviceGroupProperties* pPhysicalDeviceGroupProperties);
-
-
-VKAPI_ATTR void VKAPI_CALL GetPhysicalDeviceExternalBufferPropertiesKHR(
- VkPhysicalDevice physicalDevice,
- const VkPhysicalDeviceExternalBufferInfo* pExternalBufferInfo,
- VkExternalBufferProperties* pExternalBufferProperties);
-
-
-#ifdef VK_USE_PLATFORM_WIN32_KHR
-
-VKAPI_ATTR VkResult VKAPI_CALL GetMemoryWin32HandleKHR(
- VkDevice device,
- const VkMemoryGetWin32HandleInfoKHR* pGetWin32HandleInfo,
- HANDLE* pHandle);
-
-VKAPI_ATTR VkResult VKAPI_CALL GetMemoryWin32HandlePropertiesKHR(
- VkDevice device,
- VkExternalMemoryHandleTypeFlagBits handleType,
- HANDLE handle,
- VkMemoryWin32HandlePropertiesKHR* pMemoryWin32HandleProperties);
-#endif // VK_USE_PLATFORM_WIN32_KHR
-
-
-VKAPI_ATTR VkResult VKAPI_CALL GetMemoryFdKHR(
- VkDevice device,
- const VkMemoryGetFdInfoKHR* pGetFdInfo,
- int* pFd);
-
-VKAPI_ATTR VkResult VKAPI_CALL GetMemoryFdPropertiesKHR(
- VkDevice device,
- VkExternalMemoryHandleTypeFlagBits handleType,
- int fd,
- VkMemoryFdPropertiesKHR* pMemoryFdProperties);
-
-#ifdef VK_USE_PLATFORM_WIN32_KHR
-#endif // VK_USE_PLATFORM_WIN32_KHR
-
-
-VKAPI_ATTR void VKAPI_CALL GetPhysicalDeviceExternalSemaphorePropertiesKHR(
- VkPhysicalDevice physicalDevice,
- const VkPhysicalDeviceExternalSemaphoreInfo* pExternalSemaphoreInfo,
- VkExternalSemaphoreProperties* pExternalSemaphoreProperties);
-
-
-#ifdef VK_USE_PLATFORM_WIN32_KHR
-
-VKAPI_ATTR VkResult VKAPI_CALL ImportSemaphoreWin32HandleKHR(
- VkDevice device,
- const VkImportSemaphoreWin32HandleInfoKHR* pImportSemaphoreWin32HandleInfo);
-
-VKAPI_ATTR VkResult VKAPI_CALL GetSemaphoreWin32HandleKHR(
- VkDevice device,
- const VkSemaphoreGetWin32HandleInfoKHR* pGetWin32HandleInfo,
- HANDLE* pHandle);
-#endif // VK_USE_PLATFORM_WIN32_KHR
-
-
-VKAPI_ATTR VkResult VKAPI_CALL ImportSemaphoreFdKHR(
- VkDevice device,
- const VkImportSemaphoreFdInfoKHR* pImportSemaphoreFdInfo);
-
-VKAPI_ATTR VkResult VKAPI_CALL GetSemaphoreFdKHR(
- VkDevice device,
- const VkSemaphoreGetFdInfoKHR* pGetFdInfo,
- int* pFd);
-
-
-VKAPI_ATTR void VKAPI_CALL CmdPushDescriptorSetKHR(
- VkCommandBuffer commandBuffer,
- VkPipelineBindPoint pipelineBindPoint,
- VkPipelineLayout layout,
- uint32_t set,
- uint32_t descriptorWriteCount,
- const VkWriteDescriptorSet* pDescriptorWrites);
-
-VKAPI_ATTR void VKAPI_CALL CmdPushDescriptorSetWithTemplateKHR(
- VkCommandBuffer commandBuffer,
- VkDescriptorUpdateTemplate descriptorUpdateTemplate,
- VkPipelineLayout layout,
- uint32_t set,
- const void* pData);
-
-
-
-
-
-VKAPI_ATTR VkResult VKAPI_CALL CreateDescriptorUpdateTemplateKHR(
- VkDevice device,
- const VkDescriptorUpdateTemplateCreateInfo* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkDescriptorUpdateTemplate* pDescriptorUpdateTemplate);
-
-VKAPI_ATTR void VKAPI_CALL DestroyDescriptorUpdateTemplateKHR(
- VkDevice device,
- VkDescriptorUpdateTemplate descriptorUpdateTemplate,
- const VkAllocationCallbacks* pAllocator);
-
-VKAPI_ATTR void VKAPI_CALL UpdateDescriptorSetWithTemplateKHR(
- VkDevice device,
- VkDescriptorSet descriptorSet,
- VkDescriptorUpdateTemplate descriptorUpdateTemplate,
- const void* pData);
-
-
-
-VKAPI_ATTR VkResult VKAPI_CALL CreateRenderPass2KHR(
- VkDevice device,
- const VkRenderPassCreateInfo2KHR* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkRenderPass* pRenderPass);
-
-VKAPI_ATTR void VKAPI_CALL CmdBeginRenderPass2KHR(
- VkCommandBuffer commandBuffer,
- const VkRenderPassBeginInfo* pRenderPassBegin,
- const VkSubpassBeginInfoKHR* pSubpassBeginInfo);
-
-VKAPI_ATTR void VKAPI_CALL CmdNextSubpass2KHR(
- VkCommandBuffer commandBuffer,
- const VkSubpassBeginInfoKHR* pSubpassBeginInfo,
- const VkSubpassEndInfoKHR* pSubpassEndInfo);
-
-VKAPI_ATTR void VKAPI_CALL CmdEndRenderPass2KHR(
- VkCommandBuffer commandBuffer,
- const VkSubpassEndInfoKHR* pSubpassEndInfo);
-
-
-VKAPI_ATTR VkResult VKAPI_CALL GetSwapchainStatusKHR(
- VkDevice device,
- VkSwapchainKHR swapchain);
-
-
-VKAPI_ATTR void VKAPI_CALL GetPhysicalDeviceExternalFencePropertiesKHR(
- VkPhysicalDevice physicalDevice,
- const VkPhysicalDeviceExternalFenceInfo* pExternalFenceInfo,
- VkExternalFenceProperties* pExternalFenceProperties);
-
-
-#ifdef VK_USE_PLATFORM_WIN32_KHR
-
-VKAPI_ATTR VkResult VKAPI_CALL ImportFenceWin32HandleKHR(
- VkDevice device,
- const VkImportFenceWin32HandleInfoKHR* pImportFenceWin32HandleInfo);
-
-VKAPI_ATTR VkResult VKAPI_CALL GetFenceWin32HandleKHR(
- VkDevice device,
- const VkFenceGetWin32HandleInfoKHR* pGetWin32HandleInfo,
- HANDLE* pHandle);
-#endif // VK_USE_PLATFORM_WIN32_KHR
-
-
-VKAPI_ATTR VkResult VKAPI_CALL ImportFenceFdKHR(
- VkDevice device,
- const VkImportFenceFdInfoKHR* pImportFenceFdInfo);
-
-VKAPI_ATTR VkResult VKAPI_CALL GetFenceFdKHR(
- VkDevice device,
- const VkFenceGetFdInfoKHR* pGetFdInfo,
- int* pFd);
-
-
-
-VKAPI_ATTR VkResult VKAPI_CALL GetPhysicalDeviceSurfaceCapabilities2KHR(
- VkPhysicalDevice physicalDevice,
- const VkPhysicalDeviceSurfaceInfo2KHR* pSurfaceInfo,
- VkSurfaceCapabilities2KHR* pSurfaceCapabilities);
-
-VKAPI_ATTR VkResult VKAPI_CALL GetPhysicalDeviceSurfaceFormats2KHR(
- VkPhysicalDevice physicalDevice,
- const VkPhysicalDeviceSurfaceInfo2KHR* pSurfaceInfo,
- uint32_t* pSurfaceFormatCount,
- VkSurfaceFormat2KHR* pSurfaceFormats);
-
-
-
-VKAPI_ATTR VkResult VKAPI_CALL GetPhysicalDeviceDisplayProperties2KHR(
- VkPhysicalDevice physicalDevice,
- uint32_t* pPropertyCount,
- VkDisplayProperties2KHR* pProperties);
-
-VKAPI_ATTR VkResult VKAPI_CALL GetPhysicalDeviceDisplayPlaneProperties2KHR(
- VkPhysicalDevice physicalDevice,
- uint32_t* pPropertyCount,
- VkDisplayPlaneProperties2KHR* pProperties);
-
-VKAPI_ATTR VkResult VKAPI_CALL GetDisplayModeProperties2KHR(
- VkPhysicalDevice physicalDevice,
- VkDisplayKHR display,
- uint32_t* pPropertyCount,
- VkDisplayModeProperties2KHR* pProperties);
-
-VKAPI_ATTR VkResult VKAPI_CALL GetDisplayPlaneCapabilities2KHR(
- VkPhysicalDevice physicalDevice,
- const VkDisplayPlaneInfo2KHR* pDisplayPlaneInfo,
- VkDisplayPlaneCapabilities2KHR* pCapabilities);
-
-
-
-
-
-VKAPI_ATTR void VKAPI_CALL GetImageMemoryRequirements2KHR(
- VkDevice device,
- const VkImageMemoryRequirementsInfo2* pInfo,
- VkMemoryRequirements2* pMemoryRequirements);
-
-VKAPI_ATTR void VKAPI_CALL GetBufferMemoryRequirements2KHR(
- VkDevice device,
- const VkBufferMemoryRequirementsInfo2* pInfo,
- VkMemoryRequirements2* pMemoryRequirements);
-
-VKAPI_ATTR void VKAPI_CALL GetImageSparseMemoryRequirements2KHR(
- VkDevice device,
- const VkImageSparseMemoryRequirementsInfo2* pInfo,
- uint32_t* pSparseMemoryRequirementCount,
- VkSparseImageMemoryRequirements2* pSparseMemoryRequirements);
-
-
-
-VKAPI_ATTR VkResult VKAPI_CALL CreateSamplerYcbcrConversionKHR(
- VkDevice device,
- const VkSamplerYcbcrConversionCreateInfo* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkSamplerYcbcrConversion* pYcbcrConversion);
-
-VKAPI_ATTR void VKAPI_CALL DestroySamplerYcbcrConversionKHR(
- VkDevice device,
- VkSamplerYcbcrConversion ycbcrConversion,
- const VkAllocationCallbacks* pAllocator);
-
-
-VKAPI_ATTR VkResult VKAPI_CALL BindBufferMemory2KHR(
- VkDevice device,
- uint32_t bindInfoCount,
- const VkBindBufferMemoryInfo* pBindInfos);
-
-VKAPI_ATTR VkResult VKAPI_CALL BindImageMemory2KHR(
- VkDevice device,
- uint32_t bindInfoCount,
- const VkBindImageMemoryInfo* pBindInfos);
-
-
-VKAPI_ATTR void VKAPI_CALL GetDescriptorSetLayoutSupportKHR(
- VkDevice device,
- const VkDescriptorSetLayoutCreateInfo* pCreateInfo,
- VkDescriptorSetLayoutSupport* pSupport);
-
-
-VKAPI_ATTR void VKAPI_CALL CmdDrawIndirectCountKHR(
- VkCommandBuffer commandBuffer,
- VkBuffer buffer,
- VkDeviceSize offset,
- VkBuffer countBuffer,
- VkDeviceSize countBufferOffset,
- uint32_t maxDrawCount,
- uint32_t stride);
-
-VKAPI_ATTR void VKAPI_CALL CmdDrawIndexedIndirectCountKHR(
- VkCommandBuffer commandBuffer,
- VkBuffer buffer,
- VkDeviceSize offset,
- VkBuffer countBuffer,
- VkDeviceSize countBufferOffset,
- uint32_t maxDrawCount,
- uint32_t stride);
-
-
-
-
-
-
-
-
-
-
-
-VKAPI_ATTR VkResult VKAPI_CALL GetPipelineExecutablePropertiesKHR(
- VkDevice device,
- const VkPipelineInfoKHR* pPipelineInfo,
- uint32_t* pExecutableCount,
- VkPipelineExecutablePropertiesKHR* pProperties);
-
-VKAPI_ATTR VkResult VKAPI_CALL GetPipelineExecutableStatisticsKHR(
- VkDevice device,
- const VkPipelineExecutableInfoKHR* pExecutableInfo,
- uint32_t* pStatisticCount,
- VkPipelineExecutableStatisticKHR* pStatistics);
-
-VKAPI_ATTR VkResult VKAPI_CALL GetPipelineExecutableInternalRepresentationsKHR(
- VkDevice device,
- const VkPipelineExecutableInfoKHR* pExecutableInfo,
- uint32_t* pInternalRepresentationCount,
- VkPipelineExecutableInternalRepresentationKHR* pInternalRepresentations);
-
-
-VKAPI_ATTR VkResult VKAPI_CALL CreateDebugReportCallbackEXT(
- VkInstance instance,
- const VkDebugReportCallbackCreateInfoEXT* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkDebugReportCallbackEXT* pCallback);
-
-VKAPI_ATTR void VKAPI_CALL DestroyDebugReportCallbackEXT(
- VkInstance instance,
- VkDebugReportCallbackEXT callback,
- const VkAllocationCallbacks* pAllocator);
-
-VKAPI_ATTR void VKAPI_CALL DebugReportMessageEXT(
- VkInstance instance,
- VkDebugReportFlagsEXT flags,
- VkDebugReportObjectTypeEXT objectType,
- uint64_t object,
- size_t location,
- int32_t messageCode,
- const char* pLayerPrefix,
- const char* pMessage);
-
-
-
-
-
-
-
-
-VKAPI_ATTR VkResult VKAPI_CALL DebugMarkerSetObjectTagEXT(
- VkDevice device,
- const VkDebugMarkerObjectTagInfoEXT* pTagInfo);
-
-VKAPI_ATTR VkResult VKAPI_CALL DebugMarkerSetObjectNameEXT(
- VkDevice device,
- const VkDebugMarkerObjectNameInfoEXT* pNameInfo);
-
-VKAPI_ATTR void VKAPI_CALL CmdDebugMarkerBeginEXT(
- VkCommandBuffer commandBuffer,
- const VkDebugMarkerMarkerInfoEXT* pMarkerInfo);
-
-VKAPI_ATTR void VKAPI_CALL CmdDebugMarkerEndEXT(
- VkCommandBuffer commandBuffer);
-
-VKAPI_ATTR void VKAPI_CALL CmdDebugMarkerInsertEXT(
- VkCommandBuffer commandBuffer,
- const VkDebugMarkerMarkerInfoEXT* pMarkerInfo);
-
-
-
-
-VKAPI_ATTR void VKAPI_CALL CmdBindTransformFeedbackBuffersEXT(
- VkCommandBuffer commandBuffer,
- uint32_t firstBinding,
- uint32_t bindingCount,
- const VkBuffer* pBuffers,
- const VkDeviceSize* pOffsets,
- const VkDeviceSize* pSizes);
-
-VKAPI_ATTR void VKAPI_CALL CmdBeginTransformFeedbackEXT(
- VkCommandBuffer commandBuffer,
- uint32_t firstCounterBuffer,
- uint32_t counterBufferCount,
- const VkBuffer* pCounterBuffers,
- const VkDeviceSize* pCounterBufferOffsets);
-
-VKAPI_ATTR void VKAPI_CALL CmdEndTransformFeedbackEXT(
- VkCommandBuffer commandBuffer,
- uint32_t firstCounterBuffer,
- uint32_t counterBufferCount,
- const VkBuffer* pCounterBuffers,
- const VkDeviceSize* pCounterBufferOffsets);
-
-VKAPI_ATTR void VKAPI_CALL CmdBeginQueryIndexedEXT(
- VkCommandBuffer commandBuffer,
- VkQueryPool queryPool,
- uint32_t query,
- VkQueryControlFlags flags,
- uint32_t index);
-
-VKAPI_ATTR void VKAPI_CALL CmdEndQueryIndexedEXT(
- VkCommandBuffer commandBuffer,
- VkQueryPool queryPool,
- uint32_t query,
- uint32_t index);
-
-VKAPI_ATTR void VKAPI_CALL CmdDrawIndirectByteCountEXT(
- VkCommandBuffer commandBuffer,
- uint32_t instanceCount,
- uint32_t firstInstance,
- VkBuffer counterBuffer,
- VkDeviceSize counterBufferOffset,
- uint32_t counterOffset,
- uint32_t vertexStride);
-
-
-VKAPI_ATTR uint32_t VKAPI_CALL GetImageViewHandleNVX(
- VkDevice device,
- const VkImageViewHandleInfoNVX* pInfo);
-
-
-VKAPI_ATTR void VKAPI_CALL CmdDrawIndirectCountAMD(
- VkCommandBuffer commandBuffer,
- VkBuffer buffer,
- VkDeviceSize offset,
- VkBuffer countBuffer,
- VkDeviceSize countBufferOffset,
- uint32_t maxDrawCount,
- uint32_t stride);
-
-VKAPI_ATTR void VKAPI_CALL CmdDrawIndexedIndirectCountAMD(
- VkCommandBuffer commandBuffer,
- VkBuffer buffer,
- VkDeviceSize offset,
- VkBuffer countBuffer,
- VkDeviceSize countBufferOffset,
- uint32_t maxDrawCount,
- uint32_t stride);
-
-
-
-
-
-
-VKAPI_ATTR VkResult VKAPI_CALL GetShaderInfoAMD(
- VkDevice device,
- VkPipeline pipeline,
- VkShaderStageFlagBits shaderStage,
- VkShaderInfoTypeAMD infoType,
- size_t* pInfoSize,
- void* pInfo);
-
-
-#ifdef VK_USE_PLATFORM_GGP
-
-VKAPI_ATTR VkResult VKAPI_CALL CreateStreamDescriptorSurfaceGGP(
- VkInstance instance,
- const VkStreamDescriptorSurfaceCreateInfoGGP* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkSurfaceKHR* pSurface);
-#endif // VK_USE_PLATFORM_GGP
-
-
-
-
-VKAPI_ATTR VkResult VKAPI_CALL GetPhysicalDeviceExternalImageFormatPropertiesNV(
- VkPhysicalDevice physicalDevice,
- VkFormat format,
- VkImageType type,
- VkImageTiling tiling,
- VkImageUsageFlags usage,
- VkImageCreateFlags flags,
- VkExternalMemoryHandleTypeFlagsNV externalHandleType,
- VkExternalImageFormatPropertiesNV* pExternalImageFormatProperties);
-
-
-#ifdef VK_USE_PLATFORM_WIN32_KHR
-
-VKAPI_ATTR VkResult VKAPI_CALL GetMemoryWin32HandleNV(
- VkDevice device,
- VkDeviceMemory memory,
- VkExternalMemoryHandleTypeFlagsNV handleType,
- HANDLE* pHandle);
-#endif // VK_USE_PLATFORM_WIN32_KHR
-
-#ifdef VK_USE_PLATFORM_WIN32_KHR
-#endif // VK_USE_PLATFORM_WIN32_KHR
-
-
-#ifdef VK_USE_PLATFORM_VI_NN
-
-VKAPI_ATTR VkResult VKAPI_CALL CreateViSurfaceNN(
- VkInstance instance,
- const VkViSurfaceCreateInfoNN* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkSurfaceKHR* pSurface);
-#endif // VK_USE_PLATFORM_VI_NN
-
-
-
-
-
-
-VKAPI_ATTR void VKAPI_CALL CmdBeginConditionalRenderingEXT(
- VkCommandBuffer commandBuffer,
- const VkConditionalRenderingBeginInfoEXT* pConditionalRenderingBegin);
-
-VKAPI_ATTR void VKAPI_CALL CmdEndConditionalRenderingEXT(
- VkCommandBuffer commandBuffer);
-
-
-VKAPI_ATTR void VKAPI_CALL CmdProcessCommandsNVX(
- VkCommandBuffer commandBuffer,
- const VkCmdProcessCommandsInfoNVX* pProcessCommandsInfo);
-
-VKAPI_ATTR void VKAPI_CALL CmdReserveSpaceForCommandsNVX(
- VkCommandBuffer commandBuffer,
- const VkCmdReserveSpaceForCommandsInfoNVX* pReserveSpaceInfo);
-
-VKAPI_ATTR VkResult VKAPI_CALL CreateIndirectCommandsLayoutNVX(
- VkDevice device,
- const VkIndirectCommandsLayoutCreateInfoNVX* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkIndirectCommandsLayoutNVX* pIndirectCommandsLayout);
-
-VKAPI_ATTR void VKAPI_CALL DestroyIndirectCommandsLayoutNVX(
- VkDevice device,
- VkIndirectCommandsLayoutNVX indirectCommandsLayout,
- const VkAllocationCallbacks* pAllocator);
-
-VKAPI_ATTR VkResult VKAPI_CALL CreateObjectTableNVX(
- VkDevice device,
- const VkObjectTableCreateInfoNVX* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkObjectTableNVX* pObjectTable);
-
-VKAPI_ATTR void VKAPI_CALL DestroyObjectTableNVX(
- VkDevice device,
- VkObjectTableNVX objectTable,
- const VkAllocationCallbacks* pAllocator);
-
-VKAPI_ATTR VkResult VKAPI_CALL RegisterObjectsNVX(
- VkDevice device,
- VkObjectTableNVX objectTable,
- uint32_t objectCount,
- const VkObjectTableEntryNVX* const* ppObjectTableEntries,
- const uint32_t* pObjectIndices);
-
-VKAPI_ATTR VkResult VKAPI_CALL UnregisterObjectsNVX(
- VkDevice device,
- VkObjectTableNVX objectTable,
- uint32_t objectCount,
- const VkObjectEntryTypeNVX* pObjectEntryTypes,
- const uint32_t* pObjectIndices);
-
-VKAPI_ATTR void VKAPI_CALL GetPhysicalDeviceGeneratedCommandsPropertiesNVX(
- VkPhysicalDevice physicalDevice,
- VkDeviceGeneratedCommandsFeaturesNVX* pFeatures,
- VkDeviceGeneratedCommandsLimitsNVX* pLimits);
-
-
-VKAPI_ATTR void VKAPI_CALL CmdSetViewportWScalingNV(
- VkCommandBuffer commandBuffer,
- uint32_t firstViewport,
- uint32_t viewportCount,
- const VkViewportWScalingNV* pViewportWScalings);
-
-
-VKAPI_ATTR VkResult VKAPI_CALL ReleaseDisplayEXT(
- VkPhysicalDevice physicalDevice,
- VkDisplayKHR display);
-
-#ifdef VK_USE_PLATFORM_XLIB_XRANDR_EXT
-
-VKAPI_ATTR VkResult VKAPI_CALL AcquireXlibDisplayEXT(
- VkPhysicalDevice physicalDevice,
- Display* dpy,
- VkDisplayKHR display);
-
-VKAPI_ATTR VkResult VKAPI_CALL GetRandROutputDisplayEXT(
- VkPhysicalDevice physicalDevice,
- Display* dpy,
- RROutput rrOutput,
- VkDisplayKHR* pDisplay);
-#endif // VK_USE_PLATFORM_XLIB_XRANDR_EXT
-
-
-VKAPI_ATTR VkResult VKAPI_CALL GetPhysicalDeviceSurfaceCapabilities2EXT(
- VkPhysicalDevice physicalDevice,
- VkSurfaceKHR surface,
- VkSurfaceCapabilities2EXT* pSurfaceCapabilities);
-
-
-VKAPI_ATTR VkResult VKAPI_CALL DisplayPowerControlEXT(
- VkDevice device,
- VkDisplayKHR display,
- const VkDisplayPowerInfoEXT* pDisplayPowerInfo);
-
-VKAPI_ATTR VkResult VKAPI_CALL RegisterDeviceEventEXT(
- VkDevice device,
- const VkDeviceEventInfoEXT* pDeviceEventInfo,
- const VkAllocationCallbacks* pAllocator,
- VkFence* pFence);
-
-VKAPI_ATTR VkResult VKAPI_CALL RegisterDisplayEventEXT(
- VkDevice device,
- VkDisplayKHR display,
- const VkDisplayEventInfoEXT* pDisplayEventInfo,
- const VkAllocationCallbacks* pAllocator,
- VkFence* pFence);
-
-VKAPI_ATTR VkResult VKAPI_CALL GetSwapchainCounterEXT(
- VkDevice device,
- VkSwapchainKHR swapchain,
- VkSurfaceCounterFlagBitsEXT counter,
- uint64_t* pCounterValue);
-
-
-VKAPI_ATTR VkResult VKAPI_CALL GetRefreshCycleDurationGOOGLE(
- VkDevice device,
- VkSwapchainKHR swapchain,
- VkRefreshCycleDurationGOOGLE* pDisplayTimingProperties);
-
-VKAPI_ATTR VkResult VKAPI_CALL GetPastPresentationTimingGOOGLE(
- VkDevice device,
- VkSwapchainKHR swapchain,
- uint32_t* pPresentationTimingCount,
- VkPastPresentationTimingGOOGLE* pPresentationTimings);
-
-
-
-
-
-
-
-VKAPI_ATTR void VKAPI_CALL CmdSetDiscardRectangleEXT(
- VkCommandBuffer commandBuffer,
- uint32_t firstDiscardRectangle,
- uint32_t discardRectangleCount,
- const VkRect2D* pDiscardRectangles);
-
-
-
-
-
-VKAPI_ATTR void VKAPI_CALL SetHdrMetadataEXT(
- VkDevice device,
- uint32_t swapchainCount,
- const VkSwapchainKHR* pSwapchains,
- const VkHdrMetadataEXT* pMetadata);
-
-#ifdef VK_USE_PLATFORM_IOS_MVK
-
-VKAPI_ATTR VkResult VKAPI_CALL CreateIOSSurfaceMVK(
- VkInstance instance,
- const VkIOSSurfaceCreateInfoMVK* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkSurfaceKHR* pSurface);
-#endif // VK_USE_PLATFORM_IOS_MVK
-
-#ifdef VK_USE_PLATFORM_MACOS_MVK
-
-VKAPI_ATTR VkResult VKAPI_CALL CreateMacOSSurfaceMVK(
- VkInstance instance,
- const VkMacOSSurfaceCreateInfoMVK* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkSurfaceKHR* pSurface);
-#endif // VK_USE_PLATFORM_MACOS_MVK
-
-
-
-
-VKAPI_ATTR VkResult VKAPI_CALL SetDebugUtilsObjectNameEXT(
- VkDevice device,
- const VkDebugUtilsObjectNameInfoEXT* pNameInfo);
-
-VKAPI_ATTR VkResult VKAPI_CALL SetDebugUtilsObjectTagEXT(
- VkDevice device,
- const VkDebugUtilsObjectTagInfoEXT* pTagInfo);
-
-VKAPI_ATTR void VKAPI_CALL QueueBeginDebugUtilsLabelEXT(
- VkQueue queue,
- const VkDebugUtilsLabelEXT* pLabelInfo);
-
-VKAPI_ATTR void VKAPI_CALL QueueEndDebugUtilsLabelEXT(
- VkQueue queue);
-
-VKAPI_ATTR void VKAPI_CALL QueueInsertDebugUtilsLabelEXT(
- VkQueue queue,
- const VkDebugUtilsLabelEXT* pLabelInfo);
-
-VKAPI_ATTR void VKAPI_CALL CmdBeginDebugUtilsLabelEXT(
- VkCommandBuffer commandBuffer,
- const VkDebugUtilsLabelEXT* pLabelInfo);
-
-VKAPI_ATTR void VKAPI_CALL CmdEndDebugUtilsLabelEXT(
- VkCommandBuffer commandBuffer);
-
-VKAPI_ATTR void VKAPI_CALL CmdInsertDebugUtilsLabelEXT(
- VkCommandBuffer commandBuffer,
- const VkDebugUtilsLabelEXT* pLabelInfo);
-
-VKAPI_ATTR VkResult VKAPI_CALL CreateDebugUtilsMessengerEXT(
- VkInstance instance,
- const VkDebugUtilsMessengerCreateInfoEXT* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkDebugUtilsMessengerEXT* pMessenger);
-
-VKAPI_ATTR void VKAPI_CALL DestroyDebugUtilsMessengerEXT(
- VkInstance instance,
- VkDebugUtilsMessengerEXT messenger,
- const VkAllocationCallbacks* pAllocator);
-
-VKAPI_ATTR void VKAPI_CALL SubmitDebugUtilsMessageEXT(
- VkInstance instance,
- VkDebugUtilsMessageSeverityFlagBitsEXT messageSeverity,
- VkDebugUtilsMessageTypeFlagsEXT messageTypes,
- const VkDebugUtilsMessengerCallbackDataEXT* pCallbackData);
-
-#ifdef VK_USE_PLATFORM_ANDROID_KHR
-
-VKAPI_ATTR VkResult VKAPI_CALL GetAndroidHardwareBufferPropertiesANDROID(
- VkDevice device,
- const struct AHardwareBuffer* buffer,
- VkAndroidHardwareBufferPropertiesANDROID* pProperties);
-
-VKAPI_ATTR VkResult VKAPI_CALL GetMemoryAndroidHardwareBufferANDROID(
- VkDevice device,
- const VkMemoryGetAndroidHardwareBufferInfoANDROID* pInfo,
- struct AHardwareBuffer** pBuffer);
-#endif // VK_USE_PLATFORM_ANDROID_KHR
-
-
-
-
-
-
-
-
-VKAPI_ATTR void VKAPI_CALL CmdSetSampleLocationsEXT(
- VkCommandBuffer commandBuffer,
- const VkSampleLocationsInfoEXT* pSampleLocationsInfo);
-
-VKAPI_ATTR void VKAPI_CALL GetPhysicalDeviceMultisamplePropertiesEXT(
- VkPhysicalDevice physicalDevice,
- VkSampleCountFlagBits samples,
- VkMultisamplePropertiesEXT* pMultisampleProperties);
-
-
-
-
-
-
-
-
-VKAPI_ATTR VkResult VKAPI_CALL GetImageDrmFormatModifierPropertiesEXT(
- VkDevice device,
- VkImage image,
- VkImageDrmFormatModifierPropertiesEXT* pProperties);
-
-
-VKAPI_ATTR VkResult VKAPI_CALL CreateValidationCacheEXT(
- VkDevice device,
- const VkValidationCacheCreateInfoEXT* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkValidationCacheEXT* pValidationCache);
-
-VKAPI_ATTR void VKAPI_CALL DestroyValidationCacheEXT(
- VkDevice device,
- VkValidationCacheEXT validationCache,
- const VkAllocationCallbacks* pAllocator);
-
-VKAPI_ATTR VkResult VKAPI_CALL MergeValidationCachesEXT(
- VkDevice device,
- VkValidationCacheEXT dstCache,
- uint32_t srcCacheCount,
- const VkValidationCacheEXT* pSrcCaches);
-
-VKAPI_ATTR VkResult VKAPI_CALL GetValidationCacheDataEXT(
- VkDevice device,
- VkValidationCacheEXT validationCache,
- size_t* pDataSize,
- void* pData);
-
-
-
-
-VKAPI_ATTR void VKAPI_CALL CmdBindShadingRateImageNV(
- VkCommandBuffer commandBuffer,
- VkImageView imageView,
- VkImageLayout imageLayout);
-
-VKAPI_ATTR void VKAPI_CALL CmdSetViewportShadingRatePaletteNV(
- VkCommandBuffer commandBuffer,
- uint32_t firstViewport,
- uint32_t viewportCount,
- const VkShadingRatePaletteNV* pShadingRatePalettes);
-
-VKAPI_ATTR void VKAPI_CALL CmdSetCoarseSampleOrderNV(
- VkCommandBuffer commandBuffer,
- VkCoarseSampleOrderTypeNV sampleOrderType,
- uint32_t customSampleOrderCount,
- const VkCoarseSampleOrderCustomNV* pCustomSampleOrders);
-
-
-VKAPI_ATTR VkResult VKAPI_CALL CreateAccelerationStructureNV(
- VkDevice device,
- const VkAccelerationStructureCreateInfoNV* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkAccelerationStructureNV* pAccelerationStructure);
-
-VKAPI_ATTR void VKAPI_CALL DestroyAccelerationStructureNV(
- VkDevice device,
- VkAccelerationStructureNV accelerationStructure,
- const VkAllocationCallbacks* pAllocator);
-
-VKAPI_ATTR void VKAPI_CALL GetAccelerationStructureMemoryRequirementsNV(
- VkDevice device,
- const VkAccelerationStructureMemoryRequirementsInfoNV* pInfo,
- VkMemoryRequirements2KHR* pMemoryRequirements);
-
-VKAPI_ATTR VkResult VKAPI_CALL BindAccelerationStructureMemoryNV(
- VkDevice device,
- uint32_t bindInfoCount,
- const VkBindAccelerationStructureMemoryInfoNV* pBindInfos);
-
-VKAPI_ATTR void VKAPI_CALL CmdBuildAccelerationStructureNV(
- VkCommandBuffer commandBuffer,
- const VkAccelerationStructureInfoNV* pInfo,
- VkBuffer instanceData,
- VkDeviceSize instanceOffset,
- VkBool32 update,
- VkAccelerationStructureNV dst,
- VkAccelerationStructureNV src,
- VkBuffer scratch,
- VkDeviceSize scratchOffset);
-
-VKAPI_ATTR void VKAPI_CALL CmdCopyAccelerationStructureNV(
- VkCommandBuffer commandBuffer,
- VkAccelerationStructureNV dst,
- VkAccelerationStructureNV src,
- VkCopyAccelerationStructureModeNV mode);
-
-VKAPI_ATTR void VKAPI_CALL CmdTraceRaysNV(
- VkCommandBuffer commandBuffer,
- VkBuffer raygenShaderBindingTableBuffer,
- VkDeviceSize raygenShaderBindingOffset,
- VkBuffer missShaderBindingTableBuffer,
- VkDeviceSize missShaderBindingOffset,
- VkDeviceSize missShaderBindingStride,
- VkBuffer hitShaderBindingTableBuffer,
- VkDeviceSize hitShaderBindingOffset,
- VkDeviceSize hitShaderBindingStride,
- VkBuffer callableShaderBindingTableBuffer,
- VkDeviceSize callableShaderBindingOffset,
- VkDeviceSize callableShaderBindingStride,
- uint32_t width,
- uint32_t height,
- uint32_t depth);
-
-VKAPI_ATTR VkResult VKAPI_CALL CreateRayTracingPipelinesNV(
- VkDevice device,
- VkPipelineCache pipelineCache,
- uint32_t createInfoCount,
- const VkRayTracingPipelineCreateInfoNV* pCreateInfos,
- const VkAllocationCallbacks* pAllocator,
- VkPipeline* pPipelines);
-
-VKAPI_ATTR VkResult VKAPI_CALL GetRayTracingShaderGroupHandlesNV(
- VkDevice device,
- VkPipeline pipeline,
- uint32_t firstGroup,
- uint32_t groupCount,
- size_t dataSize,
- void* pData);
-
-VKAPI_ATTR VkResult VKAPI_CALL GetAccelerationStructureHandleNV(
- VkDevice device,
- VkAccelerationStructureNV accelerationStructure,
- size_t dataSize,
- void* pData);
-
-VKAPI_ATTR void VKAPI_CALL CmdWriteAccelerationStructuresPropertiesNV(
- VkCommandBuffer commandBuffer,
- uint32_t accelerationStructureCount,
- const VkAccelerationStructureNV* pAccelerationStructures,
- VkQueryType queryType,
- VkQueryPool queryPool,
- uint32_t firstQuery);
-
-VKAPI_ATTR VkResult VKAPI_CALL CompileDeferredNV(
- VkDevice device,
- VkPipeline pipeline,
- uint32_t shader);
-
-
-
-
-
-VKAPI_ATTR VkResult VKAPI_CALL GetMemoryHostPointerPropertiesEXT(
- VkDevice device,
- VkExternalMemoryHandleTypeFlagBits handleType,
- const void* pHostPointer,
- VkMemoryHostPointerPropertiesEXT* pMemoryHostPointerProperties);
-
-
-VKAPI_ATTR void VKAPI_CALL CmdWriteBufferMarkerAMD(
- VkCommandBuffer commandBuffer,
- VkPipelineStageFlagBits pipelineStage,
- VkBuffer dstBuffer,
- VkDeviceSize dstOffset,
- uint32_t marker);
-
-
-
-VKAPI_ATTR VkResult VKAPI_CALL GetPhysicalDeviceCalibrateableTimeDomainsEXT(
- VkPhysicalDevice physicalDevice,
- uint32_t* pTimeDomainCount,
- VkTimeDomainEXT* pTimeDomains);
-
-VKAPI_ATTR VkResult VKAPI_CALL GetCalibratedTimestampsEXT(
- VkDevice device,
- uint32_t timestampCount,
- const VkCalibratedTimestampInfoEXT* pTimestampInfos,
- uint64_t* pTimestamps,
- uint64_t* pMaxDeviation);
-
-
-
-
-#ifdef VK_USE_PLATFORM_GGP
-#endif // VK_USE_PLATFORM_GGP
-
-
-
-
-
-VKAPI_ATTR void VKAPI_CALL CmdDrawMeshTasksNV(
- VkCommandBuffer commandBuffer,
- uint32_t taskCount,
- uint32_t firstTask);
-
-VKAPI_ATTR void VKAPI_CALL CmdDrawMeshTasksIndirectNV(
- VkCommandBuffer commandBuffer,
- VkBuffer buffer,
- VkDeviceSize offset,
- uint32_t drawCount,
- uint32_t stride);
-
-VKAPI_ATTR void VKAPI_CALL CmdDrawMeshTasksIndirectCountNV(
- VkCommandBuffer commandBuffer,
- VkBuffer buffer,
- VkDeviceSize offset,
- VkBuffer countBuffer,
- VkDeviceSize countBufferOffset,
- uint32_t maxDrawCount,
- uint32_t stride);
-
-
-
-
-VKAPI_ATTR void VKAPI_CALL CmdSetExclusiveScissorNV(
- VkCommandBuffer commandBuffer,
- uint32_t firstExclusiveScissor,
- uint32_t exclusiveScissorCount,
- const VkRect2D* pExclusiveScissors);
-
-
-VKAPI_ATTR void VKAPI_CALL CmdSetCheckpointNV(
- VkCommandBuffer commandBuffer,
- const void* pCheckpointMarker);
-
-VKAPI_ATTR void VKAPI_CALL GetQueueCheckpointDataNV(
- VkQueue queue,
- uint32_t* pCheckpointDataCount,
- VkCheckpointDataNV* pCheckpointData);
-
-
-
-VKAPI_ATTR VkResult VKAPI_CALL InitializePerformanceApiINTEL(
- VkDevice device,
- const VkInitializePerformanceApiInfoINTEL* pInitializeInfo);
-
-VKAPI_ATTR void VKAPI_CALL UninitializePerformanceApiINTEL(
- VkDevice device);
-
-VKAPI_ATTR VkResult VKAPI_CALL CmdSetPerformanceMarkerINTEL(
- VkCommandBuffer commandBuffer,
- const VkPerformanceMarkerInfoINTEL* pMarkerInfo);
-
-VKAPI_ATTR VkResult VKAPI_CALL CmdSetPerformanceStreamMarkerINTEL(
- VkCommandBuffer commandBuffer,
- const VkPerformanceStreamMarkerInfoINTEL* pMarkerInfo);
-
-VKAPI_ATTR VkResult VKAPI_CALL CmdSetPerformanceOverrideINTEL(
- VkCommandBuffer commandBuffer,
- const VkPerformanceOverrideInfoINTEL* pOverrideInfo);
-
-VKAPI_ATTR VkResult VKAPI_CALL AcquirePerformanceConfigurationINTEL(
- VkDevice device,
- const VkPerformanceConfigurationAcquireInfoINTEL* pAcquireInfo,
- VkPerformanceConfigurationINTEL* pConfiguration);
-
-VKAPI_ATTR VkResult VKAPI_CALL ReleasePerformanceConfigurationINTEL(
- VkDevice device,
- VkPerformanceConfigurationINTEL configuration);
-
-VKAPI_ATTR VkResult VKAPI_CALL QueueSetPerformanceConfigurationINTEL(
- VkQueue queue,
- VkPerformanceConfigurationINTEL configuration);
-
-VKAPI_ATTR VkResult VKAPI_CALL GetPerformanceParameterINTEL(
- VkDevice device,
- VkPerformanceParameterTypeINTEL parameter,
- VkPerformanceValueINTEL* pValue);
-
-
-
-VKAPI_ATTR void VKAPI_CALL SetLocalDimmingAMD(
- VkDevice device,
- VkSwapchainKHR swapChain,
- VkBool32 localDimmingEnable);
-
-#ifdef VK_USE_PLATFORM_FUCHSIA
-
-VKAPI_ATTR VkResult VKAPI_CALL CreateImagePipeSurfaceFUCHSIA(
- VkInstance instance,
- const VkImagePipeSurfaceCreateInfoFUCHSIA* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkSurfaceKHR* pSurface);
-#endif // VK_USE_PLATFORM_FUCHSIA
-
-#ifdef VK_USE_PLATFORM_METAL_EXT
-
-VKAPI_ATTR VkResult VKAPI_CALL CreateMetalSurfaceEXT(
- VkInstance instance,
- const VkMetalSurfaceCreateInfoEXT* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkSurfaceKHR* pSurface);
-#endif // VK_USE_PLATFORM_METAL_EXT
-
-
-
-
-
-
-
-
-
-
-
-
-VKAPI_ATTR VkDeviceAddress VKAPI_CALL GetBufferDeviceAddressEXT(
- VkDevice device,
- const VkBufferDeviceAddressInfoEXT* pInfo);
-
-
-
-
-VKAPI_ATTR VkResult VKAPI_CALL GetPhysicalDeviceCooperativeMatrixPropertiesNV(
- VkPhysicalDevice physicalDevice,
- uint32_t* pPropertyCount,
- VkCooperativeMatrixPropertiesNV* pProperties);
-
-
-VKAPI_ATTR VkResult VKAPI_CALL GetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV(
- VkPhysicalDevice physicalDevice,
- uint32_t* pCombinationCount,
- VkFramebufferMixedSamplesCombinationNV* pCombinations);
-
-
-
-#ifdef VK_USE_PLATFORM_WIN32_KHR
-
-VKAPI_ATTR VkResult VKAPI_CALL GetPhysicalDeviceSurfacePresentModes2EXT(
- VkPhysicalDevice physicalDevice,
- const VkPhysicalDeviceSurfaceInfo2KHR* pSurfaceInfo,
- uint32_t* pPresentModeCount,
- VkPresentModeKHR* pPresentModes);
-
-VKAPI_ATTR VkResult VKAPI_CALL AcquireFullScreenExclusiveModeEXT(
- VkDevice device,
- VkSwapchainKHR swapchain);
-
-VKAPI_ATTR VkResult VKAPI_CALL ReleaseFullScreenExclusiveModeEXT(
- VkDevice device,
- VkSwapchainKHR swapchain);
-
-VKAPI_ATTR VkResult VKAPI_CALL GetDeviceGroupSurfacePresentModes2EXT(
- VkDevice device,
- const VkPhysicalDeviceSurfaceInfo2KHR* pSurfaceInfo,
- VkDeviceGroupPresentModeFlagsKHR* pModes);
-#endif // VK_USE_PLATFORM_WIN32_KHR
-
-
-VKAPI_ATTR VkResult VKAPI_CALL CreateHeadlessSurfaceEXT(
- VkInstance instance,
- const VkHeadlessSurfaceCreateInfoEXT* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkSurfaceKHR* pSurface);
-
-
-VKAPI_ATTR void VKAPI_CALL CmdSetLineStippleEXT(
- VkCommandBuffer commandBuffer,
- uint32_t lineStippleFactor,
- uint16_t lineStipplePattern);
-
-
-VKAPI_ATTR void VKAPI_CALL ResetQueryPoolEXT(
- VkDevice device,
- VkQueryPool queryPool,
- uint32_t firstQuery,
- uint32_t queryCount);
-
-
-
-
-
-
-
-// Layer object type identifiers
-enum LayerObjectTypeId {
- LayerObjectTypeInstance, // Container for an instance dispatch object
- LayerObjectTypeDevice, // Container for a device dispatch object
- LayerObjectTypeThreading, // Instance or device threading layer object
- LayerObjectTypeParameterValidation, // Instance or device parameter validation layer object
- LayerObjectTypeObjectTracker, // Instance or device object tracker layer object
- LayerObjectTypeCoreValidation, // Instance or device core validation layer object
- LayerObjectTypeBestPractices, // Instance or device best practices layer object
-};
-
-struct TEMPLATE_STATE {
- VkDescriptorUpdateTemplateKHR desc_update_template;
- safe_VkDescriptorUpdateTemplateCreateInfo create_info;
-
- TEMPLATE_STATE(VkDescriptorUpdateTemplateKHR update_template, safe_VkDescriptorUpdateTemplateCreateInfo *pCreateInfo)
- : desc_update_template(update_template), create_info(*pCreateInfo) {}
-};
-
-class LAYER_PHYS_DEV_PROPERTIES {
-public:
- VkPhysicalDeviceProperties properties;
- std::vector<VkQueueFamilyProperties> queue_family_properties;
-};
-
-typedef enum ValidationCheckDisables {
- VALIDATION_CHECK_DISABLE_COMMAND_BUFFER_STATE,
- VALIDATION_CHECK_DISABLE_OBJECT_IN_USE,
- VALIDATION_CHECK_DISABLE_IDLE_DESCRIPTOR_SET,
- VALIDATION_CHECK_DISABLE_PUSH_CONSTANT_RANGE,
- VALIDATION_CHECK_DISABLE_QUERY_VALIDATION,
- VALIDATION_CHECK_DISABLE_IMAGE_LAYOUT_VALIDATION,
-} ValidationCheckDisables;
-
-typedef enum VkValidationFeatureEnable {
- VK_VALIDATION_FEATURE_ENABLE_BEST_PRACTICES,
-} VkValidationFeatureEnable;
-
-
-// CHECK_DISABLED struct is a container for bools that can block validation checks from being performed.
-// These bools are all "false" by default meaning that all checks are enabled. Enum values can be specified
-// via the vk_layer_setting.txt config file or at CreateInstance time via the VK_EXT_validation_features extension
-// that can selectively disable checks.
-struct CHECK_DISABLED {
- bool command_buffer_state; // Skip command buffer state validation
- bool object_in_use; // Skip all object in_use checking
- bool idle_descriptor_set; // Skip check to verify that descriptor set is not in-use
- bool push_constant_range; // Skip push constant range checks
- bool query_validation; // Disable all core validation query-related checks
- bool image_layout_validation; // Disable image layout validation
- bool object_tracking; // Disable object lifetime validation
- bool core_checks; // Disable core validation checks
- bool thread_safety; // Disable thread safety validation
- bool stateless_checks; // Disable stateless validation checks
- bool handle_wrapping; // Disable unique handles/handle wrapping
- bool shader_validation; // Skip validation for shaders
-
- void SetAll(bool value) { std::fill(&command_buffer_state, &shader_validation + 1, value); }
-};
-
-struct CHECK_ENABLED {
- bool gpu_validation;
- bool gpu_validation_reserve_binding_slot;
- bool best_practices;
-
- void SetAll(bool value) { std::fill(&gpu_validation, &gpu_validation_reserve_binding_slot + 1, value); }
-};
-
-// Layer chassis validation object base class definition
-class ValidationObject {
- public:
- uint32_t api_version;
- debug_report_data* report_data = nullptr;
- std::vector<VkDebugReportCallbackEXT> logging_callback;
- std::vector<VkDebugUtilsMessengerEXT> logging_messenger;
-
- VkLayerInstanceDispatchTable instance_dispatch_table;
- VkLayerDispatchTable device_dispatch_table;
-
- InstanceExtensions instance_extensions;
- DeviceExtensions device_extensions = {};
- CHECK_DISABLED disabled = {};
- CHECK_ENABLED enabled = {};
-
- VkInstance instance = VK_NULL_HANDLE;
- VkPhysicalDevice physical_device = VK_NULL_HANDLE;
- VkDevice device = VK_NULL_HANDLE;
- LAYER_PHYS_DEV_PROPERTIES phys_dev_properties = {};
-
- std::vector<ValidationObject*> object_dispatch;
- LayerObjectTypeId container_type;
-
- std::string layer_name = "CHASSIS";
-
- // Constructor
- ValidationObject(){};
- // Destructor
- virtual ~ValidationObject() {};
-
- std::mutex validation_object_mutex;
- virtual std::unique_lock<std::mutex> write_lock() {
- return std::unique_lock<std::mutex>(validation_object_mutex);
- }
-
- ValidationObject* GetValidationObject(std::vector<ValidationObject*>& object_dispatch, LayerObjectTypeId object_type) {
- for (auto validation_object : object_dispatch) {
- if (validation_object->container_type == object_type) {
- return validation_object;
- }
- }
- return nullptr;
- };
-
- // Handle Wrapping Data
- // Reverse map display handles
- vl_concurrent_unordered_map<VkDisplayKHR, uint64_t, 0> display_id_reverse_mapping;
- // Wrapping Descriptor Template Update structures requires access to the template createinfo structs
- std::unordered_map<uint64_t, std::unique_ptr<TEMPLATE_STATE>> desc_template_createinfo_map;
- struct SubpassesUsageStates {
- std::unordered_set<uint32_t> subpasses_using_color_attachment;
- std::unordered_set<uint32_t> subpasses_using_depthstencil_attachment;
- };
- // Uses unwrapped handles
- std::unordered_map<VkRenderPass, SubpassesUsageStates> renderpasses_states;
- // Map of wrapped swapchain handles to arrays of wrapped swapchain image IDs
- // Each swapchain has an immutable list of wrapped swapchain image IDs -- always return these IDs if they exist
- std::unordered_map<VkSwapchainKHR, std::vector<VkImage>> swapchain_wrapped_image_handle_map;
- // Map of wrapped descriptor pools to set of wrapped descriptor sets allocated from each pool
- std::unordered_map<VkDescriptorPool, std::unordered_set<VkDescriptorSet>> pool_descriptor_sets_map;
-
-
- // Unwrap a handle.
- template <typename HandleType>
- HandleType Unwrap(HandleType wrappedHandle) {
- auto iter = unique_id_mapping.find(reinterpret_cast<uint64_t const &>(wrappedHandle));
- if (iter == unique_id_mapping.end())
- return (HandleType)0;
- return (HandleType)iter->second;
- }
-
- // Wrap a newly created handle with a new unique ID, and return the new ID.
- template <typename HandleType>
- HandleType WrapNew(HandleType newlyCreatedHandle) {
- auto unique_id = global_unique_id++;
- unique_id_mapping.insert_or_assign(unique_id, reinterpret_cast<uint64_t const &>(newlyCreatedHandle));
- return (HandleType)unique_id;
- }
-
- // Specialized handling for VkDisplayKHR. Adds an entry to enable reverse-lookup.
- VkDisplayKHR WrapDisplay(VkDisplayKHR newlyCreatedHandle, ValidationObject *map_data) {
- auto unique_id = global_unique_id++;
- unique_id_mapping.insert_or_assign(unique_id, reinterpret_cast<uint64_t const &>(newlyCreatedHandle));
- map_data->display_id_reverse_mapping.insert_or_assign(newlyCreatedHandle, unique_id);
- return (VkDisplayKHR)unique_id;
- }
-
- // VkDisplayKHR objects don't have a single point of creation, so we need to see if one already exists in the map before
- // creating another.
- VkDisplayKHR MaybeWrapDisplay(VkDisplayKHR handle, ValidationObject *map_data) {
- // See if this display is already known
- auto it = map_data->display_id_reverse_mapping.find(handle);
- if (it != map_data->display_id_reverse_mapping.end()) return (VkDisplayKHR)it->second;
- // Unknown, so wrap
- return WrapDisplay(handle, map_data);
- }
-
- // Pre/post hook point declarations
- virtual bool PreCallValidateCreateInstance(const VkInstanceCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkInstance* pInstance) { return false; };
- virtual void PreCallRecordCreateInstance(const VkInstanceCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkInstance* pInstance) {};
- virtual void PostCallRecordCreateInstance(const VkInstanceCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkInstance* pInstance, VkResult result) {};
- virtual bool PreCallValidateDestroyInstance(VkInstance instance, const VkAllocationCallbacks* pAllocator) { return false; };
- virtual void PreCallRecordDestroyInstance(VkInstance instance, const VkAllocationCallbacks* pAllocator) {};
- virtual void PostCallRecordDestroyInstance(VkInstance instance, const VkAllocationCallbacks* pAllocator) {};
- virtual bool PreCallValidateEnumeratePhysicalDevices(VkInstance instance, uint32_t* pPhysicalDeviceCount, VkPhysicalDevice* pPhysicalDevices) { return false; };
- virtual void PreCallRecordEnumeratePhysicalDevices(VkInstance instance, uint32_t* pPhysicalDeviceCount, VkPhysicalDevice* pPhysicalDevices) {};
- virtual void PostCallRecordEnumeratePhysicalDevices(VkInstance instance, uint32_t* pPhysicalDeviceCount, VkPhysicalDevice* pPhysicalDevices, VkResult result) {};
- virtual bool PreCallValidateGetPhysicalDeviceFeatures(VkPhysicalDevice physicalDevice, VkPhysicalDeviceFeatures* pFeatures) { return false; };
- virtual void PreCallRecordGetPhysicalDeviceFeatures(VkPhysicalDevice physicalDevice, VkPhysicalDeviceFeatures* pFeatures) {};
- virtual void PostCallRecordGetPhysicalDeviceFeatures(VkPhysicalDevice physicalDevice, VkPhysicalDeviceFeatures* pFeatures) {};
- virtual bool PreCallValidateGetPhysicalDeviceFormatProperties(VkPhysicalDevice physicalDevice, VkFormat format, VkFormatProperties* pFormatProperties) { return false; };
- virtual void PreCallRecordGetPhysicalDeviceFormatProperties(VkPhysicalDevice physicalDevice, VkFormat format, VkFormatProperties* pFormatProperties) {};
- virtual void PostCallRecordGetPhysicalDeviceFormatProperties(VkPhysicalDevice physicalDevice, VkFormat format, VkFormatProperties* pFormatProperties) {};
- virtual bool PreCallValidateGetPhysicalDeviceImageFormatProperties(VkPhysicalDevice physicalDevice, VkFormat format, VkImageType type, VkImageTiling tiling, VkImageUsageFlags usage, VkImageCreateFlags flags, VkImageFormatProperties* pImageFormatProperties) { return false; };
- virtual void PreCallRecordGetPhysicalDeviceImageFormatProperties(VkPhysicalDevice physicalDevice, VkFormat format, VkImageType type, VkImageTiling tiling, VkImageUsageFlags usage, VkImageCreateFlags flags, VkImageFormatProperties* pImageFormatProperties) {};
- virtual void PostCallRecordGetPhysicalDeviceImageFormatProperties(VkPhysicalDevice physicalDevice, VkFormat format, VkImageType type, VkImageTiling tiling, VkImageUsageFlags usage, VkImageCreateFlags flags, VkImageFormatProperties* pImageFormatProperties, VkResult result) {};
- virtual bool PreCallValidateGetPhysicalDeviceProperties(VkPhysicalDevice physicalDevice, VkPhysicalDeviceProperties* pProperties) { return false; };
- virtual void PreCallRecordGetPhysicalDeviceProperties(VkPhysicalDevice physicalDevice, VkPhysicalDeviceProperties* pProperties) {};
- virtual void PostCallRecordGetPhysicalDeviceProperties(VkPhysicalDevice physicalDevice, VkPhysicalDeviceProperties* pProperties) {};
- virtual bool PreCallValidateGetPhysicalDeviceQueueFamilyProperties(VkPhysicalDevice physicalDevice, uint32_t* pQueueFamilyPropertyCount, VkQueueFamilyProperties* pQueueFamilyProperties) { return false; };
- virtual void PreCallRecordGetPhysicalDeviceQueueFamilyProperties(VkPhysicalDevice physicalDevice, uint32_t* pQueueFamilyPropertyCount, VkQueueFamilyProperties* pQueueFamilyProperties) {};
- virtual void PostCallRecordGetPhysicalDeviceQueueFamilyProperties(VkPhysicalDevice physicalDevice, uint32_t* pQueueFamilyPropertyCount, VkQueueFamilyProperties* pQueueFamilyProperties) {};
- virtual bool PreCallValidateGetPhysicalDeviceMemoryProperties(VkPhysicalDevice physicalDevice, VkPhysicalDeviceMemoryProperties* pMemoryProperties) { return false; };
- virtual void PreCallRecordGetPhysicalDeviceMemoryProperties(VkPhysicalDevice physicalDevice, VkPhysicalDeviceMemoryProperties* pMemoryProperties) {};
- virtual void PostCallRecordGetPhysicalDeviceMemoryProperties(VkPhysicalDevice physicalDevice, VkPhysicalDeviceMemoryProperties* pMemoryProperties) {};
- virtual bool PreCallValidateGetInstanceProcAddr(VkInstance instance, const char* pName) { return false; };
- virtual void PreCallRecordGetInstanceProcAddr(VkInstance instance, const char* pName) {};
- virtual void PostCallRecordGetInstanceProcAddr(VkInstance instance, const char* pName) {};
- virtual bool PreCallValidateGetDeviceProcAddr(VkDevice device, const char* pName) { return false; };
- virtual void PreCallRecordGetDeviceProcAddr(VkDevice device, const char* pName) {};
- virtual void PostCallRecordGetDeviceProcAddr(VkDevice device, const char* pName) {};
- virtual bool PreCallValidateCreateDevice(VkPhysicalDevice physicalDevice, const VkDeviceCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkDevice* pDevice) { return false; };
- virtual void PreCallRecordCreateDevice(VkPhysicalDevice physicalDevice, const VkDeviceCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkDevice* pDevice) {};
- virtual void PostCallRecordCreateDevice(VkPhysicalDevice physicalDevice, const VkDeviceCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkDevice* pDevice, VkResult result) {};
- virtual bool PreCallValidateDestroyDevice(VkDevice device, const VkAllocationCallbacks* pAllocator) { return false; };
- virtual void PreCallRecordDestroyDevice(VkDevice device, const VkAllocationCallbacks* pAllocator) {};
- virtual void PostCallRecordDestroyDevice(VkDevice device, const VkAllocationCallbacks* pAllocator) {};
- virtual bool PreCallValidateEnumerateInstanceExtensionProperties(const char* pLayerName, uint32_t* pPropertyCount, VkExtensionProperties* pProperties) { return false; };
- virtual void PreCallRecordEnumerateInstanceExtensionProperties(const char* pLayerName, uint32_t* pPropertyCount, VkExtensionProperties* pProperties) {};
- virtual void PostCallRecordEnumerateInstanceExtensionProperties(const char* pLayerName, uint32_t* pPropertyCount, VkExtensionProperties* pProperties, VkResult result) {};
- virtual bool PreCallValidateEnumerateDeviceExtensionProperties(VkPhysicalDevice physicalDevice, const char* pLayerName, uint32_t* pPropertyCount, VkExtensionProperties* pProperties) { return false; };
- virtual void PreCallRecordEnumerateDeviceExtensionProperties(VkPhysicalDevice physicalDevice, const char* pLayerName, uint32_t* pPropertyCount, VkExtensionProperties* pProperties) {};
- virtual void PostCallRecordEnumerateDeviceExtensionProperties(VkPhysicalDevice physicalDevice, const char* pLayerName, uint32_t* pPropertyCount, VkExtensionProperties* pProperties, VkResult result) {};
- virtual bool PreCallValidateEnumerateInstanceLayerProperties(uint32_t* pPropertyCount, VkLayerProperties* pProperties) { return false; };
- virtual void PreCallRecordEnumerateInstanceLayerProperties(uint32_t* pPropertyCount, VkLayerProperties* pProperties) {};
- virtual void PostCallRecordEnumerateInstanceLayerProperties(uint32_t* pPropertyCount, VkLayerProperties* pProperties, VkResult result) {};
- virtual bool PreCallValidateEnumerateDeviceLayerProperties(VkPhysicalDevice physicalDevice, uint32_t* pPropertyCount, VkLayerProperties* pProperties) { return false; };
- virtual void PreCallRecordEnumerateDeviceLayerProperties(VkPhysicalDevice physicalDevice, uint32_t* pPropertyCount, VkLayerProperties* pProperties) {};
- virtual void PostCallRecordEnumerateDeviceLayerProperties(VkPhysicalDevice physicalDevice, uint32_t* pPropertyCount, VkLayerProperties* pProperties, VkResult result) {};
- virtual bool PreCallValidateGetDeviceQueue(VkDevice device, uint32_t queueFamilyIndex, uint32_t queueIndex, VkQueue* pQueue) { return false; };
- virtual void PreCallRecordGetDeviceQueue(VkDevice device, uint32_t queueFamilyIndex, uint32_t queueIndex, VkQueue* pQueue) {};
- virtual void PostCallRecordGetDeviceQueue(VkDevice device, uint32_t queueFamilyIndex, uint32_t queueIndex, VkQueue* pQueue) {};
- virtual bool PreCallValidateQueueSubmit(VkQueue queue, uint32_t submitCount, const VkSubmitInfo* pSubmits, VkFence fence) { return false; };
- virtual void PreCallRecordQueueSubmit(VkQueue queue, uint32_t submitCount, const VkSubmitInfo* pSubmits, VkFence fence) {};
- virtual void PostCallRecordQueueSubmit(VkQueue queue, uint32_t submitCount, const VkSubmitInfo* pSubmits, VkFence fence, VkResult result) {};
- virtual bool PreCallValidateQueueWaitIdle(VkQueue queue) { return false; };
- virtual void PreCallRecordQueueWaitIdle(VkQueue queue) {};
- virtual void PostCallRecordQueueWaitIdle(VkQueue queue, VkResult result) {};
- virtual bool PreCallValidateDeviceWaitIdle(VkDevice device) { return false; };
- virtual void PreCallRecordDeviceWaitIdle(VkDevice device) {};
- virtual void PostCallRecordDeviceWaitIdle(VkDevice device, VkResult result) {};
- virtual bool PreCallValidateAllocateMemory(VkDevice device, const VkMemoryAllocateInfo* pAllocateInfo, const VkAllocationCallbacks* pAllocator, VkDeviceMemory* pMemory) { return false; };
- virtual void PreCallRecordAllocateMemory(VkDevice device, const VkMemoryAllocateInfo* pAllocateInfo, const VkAllocationCallbacks* pAllocator, VkDeviceMemory* pMemory) {};
- virtual void PostCallRecordAllocateMemory(VkDevice device, const VkMemoryAllocateInfo* pAllocateInfo, const VkAllocationCallbacks* pAllocator, VkDeviceMemory* pMemory, VkResult result) {};
- virtual bool PreCallValidateFreeMemory(VkDevice device, VkDeviceMemory memory, const VkAllocationCallbacks* pAllocator) { return false; };
- virtual void PreCallRecordFreeMemory(VkDevice device, VkDeviceMemory memory, const VkAllocationCallbacks* pAllocator) {};
- virtual void PostCallRecordFreeMemory(VkDevice device, VkDeviceMemory memory, const VkAllocationCallbacks* pAllocator) {};
- virtual bool PreCallValidateMapMemory(VkDevice device, VkDeviceMemory memory, VkDeviceSize offset, VkDeviceSize size, VkMemoryMapFlags flags, void** ppData) { return false; };
- virtual void PreCallRecordMapMemory(VkDevice device, VkDeviceMemory memory, VkDeviceSize offset, VkDeviceSize size, VkMemoryMapFlags flags, void** ppData) {};
- virtual void PostCallRecordMapMemory(VkDevice device, VkDeviceMemory memory, VkDeviceSize offset, VkDeviceSize size, VkMemoryMapFlags flags, void** ppData, VkResult result) {};
- virtual bool PreCallValidateUnmapMemory(VkDevice device, VkDeviceMemory memory) { return false; };
- virtual void PreCallRecordUnmapMemory(VkDevice device, VkDeviceMemory memory) {};
- virtual void PostCallRecordUnmapMemory(VkDevice device, VkDeviceMemory memory) {};
- virtual bool PreCallValidateFlushMappedMemoryRanges(VkDevice device, uint32_t memoryRangeCount, const VkMappedMemoryRange* pMemoryRanges) { return false; };
- virtual void PreCallRecordFlushMappedMemoryRanges(VkDevice device, uint32_t memoryRangeCount, const VkMappedMemoryRange* pMemoryRanges) {};
- virtual void PostCallRecordFlushMappedMemoryRanges(VkDevice device, uint32_t memoryRangeCount, const VkMappedMemoryRange* pMemoryRanges, VkResult result) {};
- virtual bool PreCallValidateInvalidateMappedMemoryRanges(VkDevice device, uint32_t memoryRangeCount, const VkMappedMemoryRange* pMemoryRanges) { return false; };
- virtual void PreCallRecordInvalidateMappedMemoryRanges(VkDevice device, uint32_t memoryRangeCount, const VkMappedMemoryRange* pMemoryRanges) {};
- virtual void PostCallRecordInvalidateMappedMemoryRanges(VkDevice device, uint32_t memoryRangeCount, const VkMappedMemoryRange* pMemoryRanges, VkResult result) {};
- virtual bool PreCallValidateGetDeviceMemoryCommitment(VkDevice device, VkDeviceMemory memory, VkDeviceSize* pCommittedMemoryInBytes) { return false; };
- virtual void PreCallRecordGetDeviceMemoryCommitment(VkDevice device, VkDeviceMemory memory, VkDeviceSize* pCommittedMemoryInBytes) {};
- virtual void PostCallRecordGetDeviceMemoryCommitment(VkDevice device, VkDeviceMemory memory, VkDeviceSize* pCommittedMemoryInBytes) {};
- virtual bool PreCallValidateBindBufferMemory(VkDevice device, VkBuffer buffer, VkDeviceMemory memory, VkDeviceSize memoryOffset) { return false; };
- virtual void PreCallRecordBindBufferMemory(VkDevice device, VkBuffer buffer, VkDeviceMemory memory, VkDeviceSize memoryOffset) {};
- virtual void PostCallRecordBindBufferMemory(VkDevice device, VkBuffer buffer, VkDeviceMemory memory, VkDeviceSize memoryOffset, VkResult result) {};
- virtual bool PreCallValidateBindImageMemory(VkDevice device, VkImage image, VkDeviceMemory memory, VkDeviceSize memoryOffset) { return false; };
- virtual void PreCallRecordBindImageMemory(VkDevice device, VkImage image, VkDeviceMemory memory, VkDeviceSize memoryOffset) {};
- virtual void PostCallRecordBindImageMemory(VkDevice device, VkImage image, VkDeviceMemory memory, VkDeviceSize memoryOffset, VkResult result) {};
- virtual bool PreCallValidateGetBufferMemoryRequirements(VkDevice device, VkBuffer buffer, VkMemoryRequirements* pMemoryRequirements) { return false; };
- virtual void PreCallRecordGetBufferMemoryRequirements(VkDevice device, VkBuffer buffer, VkMemoryRequirements* pMemoryRequirements) {};
- virtual void PostCallRecordGetBufferMemoryRequirements(VkDevice device, VkBuffer buffer, VkMemoryRequirements* pMemoryRequirements) {};
- virtual bool PreCallValidateGetImageMemoryRequirements(VkDevice device, VkImage image, VkMemoryRequirements* pMemoryRequirements) { return false; };
- virtual void PreCallRecordGetImageMemoryRequirements(VkDevice device, VkImage image, VkMemoryRequirements* pMemoryRequirements) {};
- virtual void PostCallRecordGetImageMemoryRequirements(VkDevice device, VkImage image, VkMemoryRequirements* pMemoryRequirements) {};
- virtual bool PreCallValidateGetImageSparseMemoryRequirements(VkDevice device, VkImage image, uint32_t* pSparseMemoryRequirementCount, VkSparseImageMemoryRequirements* pSparseMemoryRequirements) { return false; };
- virtual void PreCallRecordGetImageSparseMemoryRequirements(VkDevice device, VkImage image, uint32_t* pSparseMemoryRequirementCount, VkSparseImageMemoryRequirements* pSparseMemoryRequirements) {};
- virtual void PostCallRecordGetImageSparseMemoryRequirements(VkDevice device, VkImage image, uint32_t* pSparseMemoryRequirementCount, VkSparseImageMemoryRequirements* pSparseMemoryRequirements) {};
- virtual bool PreCallValidateGetPhysicalDeviceSparseImageFormatProperties(VkPhysicalDevice physicalDevice, VkFormat format, VkImageType type, VkSampleCountFlagBits samples, VkImageUsageFlags usage, VkImageTiling tiling, uint32_t* pPropertyCount, VkSparseImageFormatProperties* pProperties) { return false; };
- virtual void PreCallRecordGetPhysicalDeviceSparseImageFormatProperties(VkPhysicalDevice physicalDevice, VkFormat format, VkImageType type, VkSampleCountFlagBits samples, VkImageUsageFlags usage, VkImageTiling tiling, uint32_t* pPropertyCount, VkSparseImageFormatProperties* pProperties) {};
- virtual void PostCallRecordGetPhysicalDeviceSparseImageFormatProperties(VkPhysicalDevice physicalDevice, VkFormat format, VkImageType type, VkSampleCountFlagBits samples, VkImageUsageFlags usage, VkImageTiling tiling, uint32_t* pPropertyCount, VkSparseImageFormatProperties* pProperties) {};
- virtual bool PreCallValidateQueueBindSparse(VkQueue queue, uint32_t bindInfoCount, const VkBindSparseInfo* pBindInfo, VkFence fence) { return false; };
- virtual void PreCallRecordQueueBindSparse(VkQueue queue, uint32_t bindInfoCount, const VkBindSparseInfo* pBindInfo, VkFence fence) {};
- virtual void PostCallRecordQueueBindSparse(VkQueue queue, uint32_t bindInfoCount, const VkBindSparseInfo* pBindInfo, VkFence fence, VkResult result) {};
- virtual bool PreCallValidateCreateFence(VkDevice device, const VkFenceCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkFence* pFence) { return false; };
- virtual void PreCallRecordCreateFence(VkDevice device, const VkFenceCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkFence* pFence) {};
- virtual void PostCallRecordCreateFence(VkDevice device, const VkFenceCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkFence* pFence, VkResult result) {};
- virtual bool PreCallValidateDestroyFence(VkDevice device, VkFence fence, const VkAllocationCallbacks* pAllocator) { return false; };
- virtual void PreCallRecordDestroyFence(VkDevice device, VkFence fence, const VkAllocationCallbacks* pAllocator) {};
- virtual void PostCallRecordDestroyFence(VkDevice device, VkFence fence, const VkAllocationCallbacks* pAllocator) {};
- virtual bool PreCallValidateResetFences(VkDevice device, uint32_t fenceCount, const VkFence* pFences) { return false; };
- virtual void PreCallRecordResetFences(VkDevice device, uint32_t fenceCount, const VkFence* pFences) {};
- virtual void PostCallRecordResetFences(VkDevice device, uint32_t fenceCount, const VkFence* pFences, VkResult result) {};
- virtual bool PreCallValidateGetFenceStatus(VkDevice device, VkFence fence) { return false; };
- virtual void PreCallRecordGetFenceStatus(VkDevice device, VkFence fence) {};
- virtual void PostCallRecordGetFenceStatus(VkDevice device, VkFence fence, VkResult result) {};
- virtual bool PreCallValidateWaitForFences(VkDevice device, uint32_t fenceCount, const VkFence* pFences, VkBool32 waitAll, uint64_t timeout) { return false; };
- virtual void PreCallRecordWaitForFences(VkDevice device, uint32_t fenceCount, const VkFence* pFences, VkBool32 waitAll, uint64_t timeout) {};
- virtual void PostCallRecordWaitForFences(VkDevice device, uint32_t fenceCount, const VkFence* pFences, VkBool32 waitAll, uint64_t timeout, VkResult result) {};
- virtual bool PreCallValidateCreateSemaphore(VkDevice device, const VkSemaphoreCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSemaphore* pSemaphore) { return false; };
- virtual void PreCallRecordCreateSemaphore(VkDevice device, const VkSemaphoreCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSemaphore* pSemaphore) {};
- virtual void PostCallRecordCreateSemaphore(VkDevice device, const VkSemaphoreCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSemaphore* pSemaphore, VkResult result) {};
- virtual bool PreCallValidateDestroySemaphore(VkDevice device, VkSemaphore semaphore, const VkAllocationCallbacks* pAllocator) { return false; };
- virtual void PreCallRecordDestroySemaphore(VkDevice device, VkSemaphore semaphore, const VkAllocationCallbacks* pAllocator) {};
- virtual void PostCallRecordDestroySemaphore(VkDevice device, VkSemaphore semaphore, const VkAllocationCallbacks* pAllocator) {};
- virtual bool PreCallValidateCreateEvent(VkDevice device, const VkEventCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkEvent* pEvent) { return false; };
- virtual void PreCallRecordCreateEvent(VkDevice device, const VkEventCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkEvent* pEvent) {};
- virtual void PostCallRecordCreateEvent(VkDevice device, const VkEventCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkEvent* pEvent, VkResult result) {};
- virtual bool PreCallValidateDestroyEvent(VkDevice device, VkEvent event, const VkAllocationCallbacks* pAllocator) { return false; };
- virtual void PreCallRecordDestroyEvent(VkDevice device, VkEvent event, const VkAllocationCallbacks* pAllocator) {};
- virtual void PostCallRecordDestroyEvent(VkDevice device, VkEvent event, const VkAllocationCallbacks* pAllocator) {};
- virtual bool PreCallValidateGetEventStatus(VkDevice device, VkEvent event) { return false; };
- virtual void PreCallRecordGetEventStatus(VkDevice device, VkEvent event) {};
- virtual void PostCallRecordGetEventStatus(VkDevice device, VkEvent event, VkResult result) {};
- virtual bool PreCallValidateSetEvent(VkDevice device, VkEvent event) { return false; };
- virtual void PreCallRecordSetEvent(VkDevice device, VkEvent event) {};
- virtual void PostCallRecordSetEvent(VkDevice device, VkEvent event, VkResult result) {};
- virtual bool PreCallValidateResetEvent(VkDevice device, VkEvent event) { return false; };
- virtual void PreCallRecordResetEvent(VkDevice device, VkEvent event) {};
- virtual void PostCallRecordResetEvent(VkDevice device, VkEvent event, VkResult result) {};
- virtual bool PreCallValidateCreateQueryPool(VkDevice device, const VkQueryPoolCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkQueryPool* pQueryPool) { return false; };
- virtual void PreCallRecordCreateQueryPool(VkDevice device, const VkQueryPoolCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkQueryPool* pQueryPool) {};
- virtual void PostCallRecordCreateQueryPool(VkDevice device, const VkQueryPoolCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkQueryPool* pQueryPool, VkResult result) {};
- virtual bool PreCallValidateDestroyQueryPool(VkDevice device, VkQueryPool queryPool, const VkAllocationCallbacks* pAllocator) { return false; };
- virtual void PreCallRecordDestroyQueryPool(VkDevice device, VkQueryPool queryPool, const VkAllocationCallbacks* pAllocator) {};
- virtual void PostCallRecordDestroyQueryPool(VkDevice device, VkQueryPool queryPool, const VkAllocationCallbacks* pAllocator) {};
- virtual bool PreCallValidateGetQueryPoolResults(VkDevice device, VkQueryPool queryPool, uint32_t firstQuery, uint32_t queryCount, size_t dataSize, void* pData, VkDeviceSize stride, VkQueryResultFlags flags) { return false; };
- virtual void PreCallRecordGetQueryPoolResults(VkDevice device, VkQueryPool queryPool, uint32_t firstQuery, uint32_t queryCount, size_t dataSize, void* pData, VkDeviceSize stride, VkQueryResultFlags flags) {};
- virtual void PostCallRecordGetQueryPoolResults(VkDevice device, VkQueryPool queryPool, uint32_t firstQuery, uint32_t queryCount, size_t dataSize, void* pData, VkDeviceSize stride, VkQueryResultFlags flags, VkResult result) {};
- virtual bool PreCallValidateCreateBuffer(VkDevice device, const VkBufferCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkBuffer* pBuffer) { return false; };
- virtual void PreCallRecordCreateBuffer(VkDevice device, const VkBufferCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkBuffer* pBuffer) {};
- virtual void PostCallRecordCreateBuffer(VkDevice device, const VkBufferCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkBuffer* pBuffer, VkResult result) {};
- virtual bool PreCallValidateDestroyBuffer(VkDevice device, VkBuffer buffer, const VkAllocationCallbacks* pAllocator) { return false; };
- virtual void PreCallRecordDestroyBuffer(VkDevice device, VkBuffer buffer, const VkAllocationCallbacks* pAllocator) {};
- virtual void PostCallRecordDestroyBuffer(VkDevice device, VkBuffer buffer, const VkAllocationCallbacks* pAllocator) {};
- virtual bool PreCallValidateCreateBufferView(VkDevice device, const VkBufferViewCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkBufferView* pView) { return false; };
- virtual void PreCallRecordCreateBufferView(VkDevice device, const VkBufferViewCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkBufferView* pView) {};
- virtual void PostCallRecordCreateBufferView(VkDevice device, const VkBufferViewCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkBufferView* pView, VkResult result) {};
- virtual bool PreCallValidateDestroyBufferView(VkDevice device, VkBufferView bufferView, const VkAllocationCallbacks* pAllocator) { return false; };
- virtual void PreCallRecordDestroyBufferView(VkDevice device, VkBufferView bufferView, const VkAllocationCallbacks* pAllocator) {};
- virtual void PostCallRecordDestroyBufferView(VkDevice device, VkBufferView bufferView, const VkAllocationCallbacks* pAllocator) {};
- virtual bool PreCallValidateCreateImage(VkDevice device, const VkImageCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkImage* pImage) { return false; };
- virtual void PreCallRecordCreateImage(VkDevice device, const VkImageCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkImage* pImage) {};
- virtual void PostCallRecordCreateImage(VkDevice device, const VkImageCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkImage* pImage, VkResult result) {};
- virtual bool PreCallValidateDestroyImage(VkDevice device, VkImage image, const VkAllocationCallbacks* pAllocator) { return false; };
- virtual void PreCallRecordDestroyImage(VkDevice device, VkImage image, const VkAllocationCallbacks* pAllocator) {};
- virtual void PostCallRecordDestroyImage(VkDevice device, VkImage image, const VkAllocationCallbacks* pAllocator) {};
- virtual bool PreCallValidateGetImageSubresourceLayout(VkDevice device, VkImage image, const VkImageSubresource* pSubresource, VkSubresourceLayout* pLayout) { return false; };
- virtual void PreCallRecordGetImageSubresourceLayout(VkDevice device, VkImage image, const VkImageSubresource* pSubresource, VkSubresourceLayout* pLayout) {};
- virtual void PostCallRecordGetImageSubresourceLayout(VkDevice device, VkImage image, const VkImageSubresource* pSubresource, VkSubresourceLayout* pLayout) {};
- virtual bool PreCallValidateCreateImageView(VkDevice device, const VkImageViewCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkImageView* pView) { return false; };
- virtual void PreCallRecordCreateImageView(VkDevice device, const VkImageViewCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkImageView* pView) {};
- virtual void PostCallRecordCreateImageView(VkDevice device, const VkImageViewCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkImageView* pView, VkResult result) {};
- virtual bool PreCallValidateDestroyImageView(VkDevice device, VkImageView imageView, const VkAllocationCallbacks* pAllocator) { return false; };
- virtual void PreCallRecordDestroyImageView(VkDevice device, VkImageView imageView, const VkAllocationCallbacks* pAllocator) {};
- virtual void PostCallRecordDestroyImageView(VkDevice device, VkImageView imageView, const VkAllocationCallbacks* pAllocator) {};
- virtual bool PreCallValidateCreateShaderModule(VkDevice device, const VkShaderModuleCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkShaderModule* pShaderModule) { return false; };
- virtual void PreCallRecordCreateShaderModule(VkDevice device, const VkShaderModuleCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkShaderModule* pShaderModule) {};
- virtual void PostCallRecordCreateShaderModule(VkDevice device, const VkShaderModuleCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkShaderModule* pShaderModule, VkResult result) {};
- virtual bool PreCallValidateDestroyShaderModule(VkDevice device, VkShaderModule shaderModule, const VkAllocationCallbacks* pAllocator) { return false; };
- virtual void PreCallRecordDestroyShaderModule(VkDevice device, VkShaderModule shaderModule, const VkAllocationCallbacks* pAllocator) {};
- virtual void PostCallRecordDestroyShaderModule(VkDevice device, VkShaderModule shaderModule, const VkAllocationCallbacks* pAllocator) {};
- virtual bool PreCallValidateCreatePipelineCache(VkDevice device, const VkPipelineCacheCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkPipelineCache* pPipelineCache) { return false; };
- virtual void PreCallRecordCreatePipelineCache(VkDevice device, const VkPipelineCacheCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkPipelineCache* pPipelineCache) {};
- virtual void PostCallRecordCreatePipelineCache(VkDevice device, const VkPipelineCacheCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkPipelineCache* pPipelineCache, VkResult result) {};
- virtual bool PreCallValidateDestroyPipelineCache(VkDevice device, VkPipelineCache pipelineCache, const VkAllocationCallbacks* pAllocator) { return false; };
- virtual void PreCallRecordDestroyPipelineCache(VkDevice device, VkPipelineCache pipelineCache, const VkAllocationCallbacks* pAllocator) {};
- virtual void PostCallRecordDestroyPipelineCache(VkDevice device, VkPipelineCache pipelineCache, const VkAllocationCallbacks* pAllocator) {};
- virtual bool PreCallValidateGetPipelineCacheData(VkDevice device, VkPipelineCache pipelineCache, size_t* pDataSize, void* pData) { return false; };
- virtual void PreCallRecordGetPipelineCacheData(VkDevice device, VkPipelineCache pipelineCache, size_t* pDataSize, void* pData) {};
- virtual void PostCallRecordGetPipelineCacheData(VkDevice device, VkPipelineCache pipelineCache, size_t* pDataSize, void* pData, VkResult result) {};
- virtual bool PreCallValidateMergePipelineCaches(VkDevice device, VkPipelineCache dstCache, uint32_t srcCacheCount, const VkPipelineCache* pSrcCaches) { return false; };
- virtual void PreCallRecordMergePipelineCaches(VkDevice device, VkPipelineCache dstCache, uint32_t srcCacheCount, const VkPipelineCache* pSrcCaches) {};
- virtual void PostCallRecordMergePipelineCaches(VkDevice device, VkPipelineCache dstCache, uint32_t srcCacheCount, const VkPipelineCache* pSrcCaches, VkResult result) {};
- virtual bool PreCallValidateCreateGraphicsPipelines(VkDevice device, VkPipelineCache pipelineCache, uint32_t createInfoCount, const VkGraphicsPipelineCreateInfo* pCreateInfos, const VkAllocationCallbacks* pAllocator, VkPipeline* pPipelines) { return false; };
- virtual void PreCallRecordCreateGraphicsPipelines(VkDevice device, VkPipelineCache pipelineCache, uint32_t createInfoCount, const VkGraphicsPipelineCreateInfo* pCreateInfos, const VkAllocationCallbacks* pAllocator, VkPipeline* pPipelines) {};
- virtual void PostCallRecordCreateGraphicsPipelines(VkDevice device, VkPipelineCache pipelineCache, uint32_t createInfoCount, const VkGraphicsPipelineCreateInfo* pCreateInfos, const VkAllocationCallbacks* pAllocator, VkPipeline* pPipelines, VkResult result) {};
- virtual bool PreCallValidateCreateComputePipelines(VkDevice device, VkPipelineCache pipelineCache, uint32_t createInfoCount, const VkComputePipelineCreateInfo* pCreateInfos, const VkAllocationCallbacks* pAllocator, VkPipeline* pPipelines) { return false; };
- virtual void PreCallRecordCreateComputePipelines(VkDevice device, VkPipelineCache pipelineCache, uint32_t createInfoCount, const VkComputePipelineCreateInfo* pCreateInfos, const VkAllocationCallbacks* pAllocator, VkPipeline* pPipelines) {};
- virtual void PostCallRecordCreateComputePipelines(VkDevice device, VkPipelineCache pipelineCache, uint32_t createInfoCount, const VkComputePipelineCreateInfo* pCreateInfos, const VkAllocationCallbacks* pAllocator, VkPipeline* pPipelines, VkResult result) {};
- virtual bool PreCallValidateDestroyPipeline(VkDevice device, VkPipeline pipeline, const VkAllocationCallbacks* pAllocator) { return false; };
- virtual void PreCallRecordDestroyPipeline(VkDevice device, VkPipeline pipeline, const VkAllocationCallbacks* pAllocator) {};
- virtual void PostCallRecordDestroyPipeline(VkDevice device, VkPipeline pipeline, const VkAllocationCallbacks* pAllocator) {};
- virtual bool PreCallValidateCreatePipelineLayout(VkDevice device, const VkPipelineLayoutCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkPipelineLayout* pPipelineLayout) { return false; };
- virtual void PreCallRecordCreatePipelineLayout(VkDevice device, const VkPipelineLayoutCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkPipelineLayout* pPipelineLayout) {};
- virtual void PostCallRecordCreatePipelineLayout(VkDevice device, const VkPipelineLayoutCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkPipelineLayout* pPipelineLayout, VkResult result) {};
- virtual bool PreCallValidateDestroyPipelineLayout(VkDevice device, VkPipelineLayout pipelineLayout, const VkAllocationCallbacks* pAllocator) { return false; };
- virtual void PreCallRecordDestroyPipelineLayout(VkDevice device, VkPipelineLayout pipelineLayout, const VkAllocationCallbacks* pAllocator) {};
- virtual void PostCallRecordDestroyPipelineLayout(VkDevice device, VkPipelineLayout pipelineLayout, const VkAllocationCallbacks* pAllocator) {};
- virtual bool PreCallValidateCreateSampler(VkDevice device, const VkSamplerCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSampler* pSampler) { return false; };
- virtual void PreCallRecordCreateSampler(VkDevice device, const VkSamplerCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSampler* pSampler) {};
- virtual void PostCallRecordCreateSampler(VkDevice device, const VkSamplerCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSampler* pSampler, VkResult result) {};
- virtual bool PreCallValidateDestroySampler(VkDevice device, VkSampler sampler, const VkAllocationCallbacks* pAllocator) { return false; };
- virtual void PreCallRecordDestroySampler(VkDevice device, VkSampler sampler, const VkAllocationCallbacks* pAllocator) {};
- virtual void PostCallRecordDestroySampler(VkDevice device, VkSampler sampler, const VkAllocationCallbacks* pAllocator) {};
- virtual bool PreCallValidateCreateDescriptorSetLayout(VkDevice device, const VkDescriptorSetLayoutCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkDescriptorSetLayout* pSetLayout) { return false; };
- virtual void PreCallRecordCreateDescriptorSetLayout(VkDevice device, const VkDescriptorSetLayoutCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkDescriptorSetLayout* pSetLayout) {};
- virtual void PostCallRecordCreateDescriptorSetLayout(VkDevice device, const VkDescriptorSetLayoutCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkDescriptorSetLayout* pSetLayout, VkResult result) {};
- virtual bool PreCallValidateDestroyDescriptorSetLayout(VkDevice device, VkDescriptorSetLayout descriptorSetLayout, const VkAllocationCallbacks* pAllocator) { return false; };
- virtual void PreCallRecordDestroyDescriptorSetLayout(VkDevice device, VkDescriptorSetLayout descriptorSetLayout, const VkAllocationCallbacks* pAllocator) {};
- virtual void PostCallRecordDestroyDescriptorSetLayout(VkDevice device, VkDescriptorSetLayout descriptorSetLayout, const VkAllocationCallbacks* pAllocator) {};
- virtual bool PreCallValidateCreateDescriptorPool(VkDevice device, const VkDescriptorPoolCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkDescriptorPool* pDescriptorPool) { return false; };
- virtual void PreCallRecordCreateDescriptorPool(VkDevice device, const VkDescriptorPoolCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkDescriptorPool* pDescriptorPool) {};
- virtual void PostCallRecordCreateDescriptorPool(VkDevice device, const VkDescriptorPoolCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkDescriptorPool* pDescriptorPool, VkResult result) {};
- virtual bool PreCallValidateDestroyDescriptorPool(VkDevice device, VkDescriptorPool descriptorPool, const VkAllocationCallbacks* pAllocator) { return false; };
- virtual void PreCallRecordDestroyDescriptorPool(VkDevice device, VkDescriptorPool descriptorPool, const VkAllocationCallbacks* pAllocator) {};
- virtual void PostCallRecordDestroyDescriptorPool(VkDevice device, VkDescriptorPool descriptorPool, const VkAllocationCallbacks* pAllocator) {};
- virtual bool PreCallValidateResetDescriptorPool(VkDevice device, VkDescriptorPool descriptorPool, VkDescriptorPoolResetFlags flags) { return false; };
- virtual void PreCallRecordResetDescriptorPool(VkDevice device, VkDescriptorPool descriptorPool, VkDescriptorPoolResetFlags flags) {};
- virtual void PostCallRecordResetDescriptorPool(VkDevice device, VkDescriptorPool descriptorPool, VkDescriptorPoolResetFlags flags, VkResult result) {};
- virtual bool PreCallValidateAllocateDescriptorSets(VkDevice device, const VkDescriptorSetAllocateInfo* pAllocateInfo, VkDescriptorSet* pDescriptorSets) { return false; };
- virtual void PreCallRecordAllocateDescriptorSets(VkDevice device, const VkDescriptorSetAllocateInfo* pAllocateInfo, VkDescriptorSet* pDescriptorSets) {};
- virtual void PostCallRecordAllocateDescriptorSets(VkDevice device, const VkDescriptorSetAllocateInfo* pAllocateInfo, VkDescriptorSet* pDescriptorSets, VkResult result) {};
- virtual bool PreCallValidateFreeDescriptorSets(VkDevice device, VkDescriptorPool descriptorPool, uint32_t descriptorSetCount, const VkDescriptorSet* pDescriptorSets) { return false; };
- virtual void PreCallRecordFreeDescriptorSets(VkDevice device, VkDescriptorPool descriptorPool, uint32_t descriptorSetCount, const VkDescriptorSet* pDescriptorSets) {};
- virtual void PostCallRecordFreeDescriptorSets(VkDevice device, VkDescriptorPool descriptorPool, uint32_t descriptorSetCount, const VkDescriptorSet* pDescriptorSets, VkResult result) {};
- virtual bool PreCallValidateUpdateDescriptorSets(VkDevice device, uint32_t descriptorWriteCount, const VkWriteDescriptorSet* pDescriptorWrites, uint32_t descriptorCopyCount, const VkCopyDescriptorSet* pDescriptorCopies) { return false; };
- virtual void PreCallRecordUpdateDescriptorSets(VkDevice device, uint32_t descriptorWriteCount, const VkWriteDescriptorSet* pDescriptorWrites, uint32_t descriptorCopyCount, const VkCopyDescriptorSet* pDescriptorCopies) {};
- virtual void PostCallRecordUpdateDescriptorSets(VkDevice device, uint32_t descriptorWriteCount, const VkWriteDescriptorSet* pDescriptorWrites, uint32_t descriptorCopyCount, const VkCopyDescriptorSet* pDescriptorCopies) {};
- virtual bool PreCallValidateCreateFramebuffer(VkDevice device, const VkFramebufferCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkFramebuffer* pFramebuffer) { return false; };
- virtual void PreCallRecordCreateFramebuffer(VkDevice device, const VkFramebufferCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkFramebuffer* pFramebuffer) {};
- virtual void PostCallRecordCreateFramebuffer(VkDevice device, const VkFramebufferCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkFramebuffer* pFramebuffer, VkResult result) {};
- virtual bool PreCallValidateDestroyFramebuffer(VkDevice device, VkFramebuffer framebuffer, const VkAllocationCallbacks* pAllocator) { return false; };
- virtual void PreCallRecordDestroyFramebuffer(VkDevice device, VkFramebuffer framebuffer, const VkAllocationCallbacks* pAllocator) {};
- virtual void PostCallRecordDestroyFramebuffer(VkDevice device, VkFramebuffer framebuffer, const VkAllocationCallbacks* pAllocator) {};
- virtual bool PreCallValidateCreateRenderPass(VkDevice device, const VkRenderPassCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkRenderPass* pRenderPass) { return false; };
- virtual void PreCallRecordCreateRenderPass(VkDevice device, const VkRenderPassCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkRenderPass* pRenderPass) {};
- virtual void PostCallRecordCreateRenderPass(VkDevice device, const VkRenderPassCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkRenderPass* pRenderPass, VkResult result) {};
- virtual bool PreCallValidateDestroyRenderPass(VkDevice device, VkRenderPass renderPass, const VkAllocationCallbacks* pAllocator) { return false; };
- virtual void PreCallRecordDestroyRenderPass(VkDevice device, VkRenderPass renderPass, const VkAllocationCallbacks* pAllocator) {};
- virtual void PostCallRecordDestroyRenderPass(VkDevice device, VkRenderPass renderPass, const VkAllocationCallbacks* pAllocator) {};
- virtual bool PreCallValidateGetRenderAreaGranularity(VkDevice device, VkRenderPass renderPass, VkExtent2D* pGranularity) { return false; };
- virtual void PreCallRecordGetRenderAreaGranularity(VkDevice device, VkRenderPass renderPass, VkExtent2D* pGranularity) {};
- virtual void PostCallRecordGetRenderAreaGranularity(VkDevice device, VkRenderPass renderPass, VkExtent2D* pGranularity) {};
- virtual bool PreCallValidateCreateCommandPool(VkDevice device, const VkCommandPoolCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkCommandPool* pCommandPool) { return false; };
- virtual void PreCallRecordCreateCommandPool(VkDevice device, const VkCommandPoolCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkCommandPool* pCommandPool) {};
- virtual void PostCallRecordCreateCommandPool(VkDevice device, const VkCommandPoolCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkCommandPool* pCommandPool, VkResult result) {};
- virtual bool PreCallValidateDestroyCommandPool(VkDevice device, VkCommandPool commandPool, const VkAllocationCallbacks* pAllocator) { return false; };
- virtual void PreCallRecordDestroyCommandPool(VkDevice device, VkCommandPool commandPool, const VkAllocationCallbacks* pAllocator) {};
- virtual void PostCallRecordDestroyCommandPool(VkDevice device, VkCommandPool commandPool, const VkAllocationCallbacks* pAllocator) {};
- virtual bool PreCallValidateResetCommandPool(VkDevice device, VkCommandPool commandPool, VkCommandPoolResetFlags flags) { return false; };
- virtual void PreCallRecordResetCommandPool(VkDevice device, VkCommandPool commandPool, VkCommandPoolResetFlags flags) {};
- virtual void PostCallRecordResetCommandPool(VkDevice device, VkCommandPool commandPool, VkCommandPoolResetFlags flags, VkResult result) {};
- virtual bool PreCallValidateAllocateCommandBuffers(VkDevice device, const VkCommandBufferAllocateInfo* pAllocateInfo, VkCommandBuffer* pCommandBuffers) { return false; };
- virtual void PreCallRecordAllocateCommandBuffers(VkDevice device, const VkCommandBufferAllocateInfo* pAllocateInfo, VkCommandBuffer* pCommandBuffers) {};
- virtual void PostCallRecordAllocateCommandBuffers(VkDevice device, const VkCommandBufferAllocateInfo* pAllocateInfo, VkCommandBuffer* pCommandBuffers, VkResult result) {};
- virtual bool PreCallValidateFreeCommandBuffers(VkDevice device, VkCommandPool commandPool, uint32_t commandBufferCount, const VkCommandBuffer* pCommandBuffers) { return false; };
- virtual void PreCallRecordFreeCommandBuffers(VkDevice device, VkCommandPool commandPool, uint32_t commandBufferCount, const VkCommandBuffer* pCommandBuffers) {};
- virtual void PostCallRecordFreeCommandBuffers(VkDevice device, VkCommandPool commandPool, uint32_t commandBufferCount, const VkCommandBuffer* pCommandBuffers) {};
- virtual bool PreCallValidateBeginCommandBuffer(VkCommandBuffer commandBuffer, const VkCommandBufferBeginInfo* pBeginInfo) { return false; };
- virtual void PreCallRecordBeginCommandBuffer(VkCommandBuffer commandBuffer, const VkCommandBufferBeginInfo* pBeginInfo) {};
- virtual void PostCallRecordBeginCommandBuffer(VkCommandBuffer commandBuffer, const VkCommandBufferBeginInfo* pBeginInfo, VkResult result) {};
- virtual bool PreCallValidateEndCommandBuffer(VkCommandBuffer commandBuffer) { return false; };
- virtual void PreCallRecordEndCommandBuffer(VkCommandBuffer commandBuffer) {};
- virtual void PostCallRecordEndCommandBuffer(VkCommandBuffer commandBuffer, VkResult result) {};
- virtual bool PreCallValidateResetCommandBuffer(VkCommandBuffer commandBuffer, VkCommandBufferResetFlags flags) { return false; };
- virtual void PreCallRecordResetCommandBuffer(VkCommandBuffer commandBuffer, VkCommandBufferResetFlags flags) {};
- virtual void PostCallRecordResetCommandBuffer(VkCommandBuffer commandBuffer, VkCommandBufferResetFlags flags, VkResult result) {};
- virtual bool PreCallValidateCmdBindPipeline(VkCommandBuffer commandBuffer, VkPipelineBindPoint pipelineBindPoint, VkPipeline pipeline) { return false; };
- virtual void PreCallRecordCmdBindPipeline(VkCommandBuffer commandBuffer, VkPipelineBindPoint pipelineBindPoint, VkPipeline pipeline) {};
- virtual void PostCallRecordCmdBindPipeline(VkCommandBuffer commandBuffer, VkPipelineBindPoint pipelineBindPoint, VkPipeline pipeline) {};
- virtual bool PreCallValidateCmdSetViewport(VkCommandBuffer commandBuffer, uint32_t firstViewport, uint32_t viewportCount, const VkViewport* pViewports) { return false; };
- virtual void PreCallRecordCmdSetViewport(VkCommandBuffer commandBuffer, uint32_t firstViewport, uint32_t viewportCount, const VkViewport* pViewports) {};
- virtual void PostCallRecordCmdSetViewport(VkCommandBuffer commandBuffer, uint32_t firstViewport, uint32_t viewportCount, const VkViewport* pViewports) {};
- virtual bool PreCallValidateCmdSetScissor(VkCommandBuffer commandBuffer, uint32_t firstScissor, uint32_t scissorCount, const VkRect2D* pScissors) { return false; };
- virtual void PreCallRecordCmdSetScissor(VkCommandBuffer commandBuffer, uint32_t firstScissor, uint32_t scissorCount, const VkRect2D* pScissors) {};
- virtual void PostCallRecordCmdSetScissor(VkCommandBuffer commandBuffer, uint32_t firstScissor, uint32_t scissorCount, const VkRect2D* pScissors) {};
- virtual bool PreCallValidateCmdSetLineWidth(VkCommandBuffer commandBuffer, float lineWidth) { return false; };
- virtual void PreCallRecordCmdSetLineWidth(VkCommandBuffer commandBuffer, float lineWidth) {};
- virtual void PostCallRecordCmdSetLineWidth(VkCommandBuffer commandBuffer, float lineWidth) {};
- virtual bool PreCallValidateCmdSetDepthBias(VkCommandBuffer commandBuffer, float depthBiasConstantFactor, float depthBiasClamp, float depthBiasSlopeFactor) { return false; };
- virtual void PreCallRecordCmdSetDepthBias(VkCommandBuffer commandBuffer, float depthBiasConstantFactor, float depthBiasClamp, float depthBiasSlopeFactor) {};
- virtual void PostCallRecordCmdSetDepthBias(VkCommandBuffer commandBuffer, float depthBiasConstantFactor, float depthBiasClamp, float depthBiasSlopeFactor) {};
- virtual bool PreCallValidateCmdSetBlendConstants(VkCommandBuffer commandBuffer, const float blendConstants[4]) { return false; };
- virtual void PreCallRecordCmdSetBlendConstants(VkCommandBuffer commandBuffer, const float blendConstants[4]) {};
- virtual void PostCallRecordCmdSetBlendConstants(VkCommandBuffer commandBuffer, const float blendConstants[4]) {};
- virtual bool PreCallValidateCmdSetDepthBounds(VkCommandBuffer commandBuffer, float minDepthBounds, float maxDepthBounds) { return false; };
- virtual void PreCallRecordCmdSetDepthBounds(VkCommandBuffer commandBuffer, float minDepthBounds, float maxDepthBounds) {};
- virtual void PostCallRecordCmdSetDepthBounds(VkCommandBuffer commandBuffer, float minDepthBounds, float maxDepthBounds) {};
- virtual bool PreCallValidateCmdSetStencilCompareMask(VkCommandBuffer commandBuffer, VkStencilFaceFlags faceMask, uint32_t compareMask) { return false; };
- virtual void PreCallRecordCmdSetStencilCompareMask(VkCommandBuffer commandBuffer, VkStencilFaceFlags faceMask, uint32_t compareMask) {};
- virtual void PostCallRecordCmdSetStencilCompareMask(VkCommandBuffer commandBuffer, VkStencilFaceFlags faceMask, uint32_t compareMask) {};
- virtual bool PreCallValidateCmdSetStencilWriteMask(VkCommandBuffer commandBuffer, VkStencilFaceFlags faceMask, uint32_t writeMask) { return false; };
- virtual void PreCallRecordCmdSetStencilWriteMask(VkCommandBuffer commandBuffer, VkStencilFaceFlags faceMask, uint32_t writeMask) {};
- virtual void PostCallRecordCmdSetStencilWriteMask(VkCommandBuffer commandBuffer, VkStencilFaceFlags faceMask, uint32_t writeMask) {};
- virtual bool PreCallValidateCmdSetStencilReference(VkCommandBuffer commandBuffer, VkStencilFaceFlags faceMask, uint32_t reference) { return false; };
- virtual void PreCallRecordCmdSetStencilReference(VkCommandBuffer commandBuffer, VkStencilFaceFlags faceMask, uint32_t reference) {};
- virtual void PostCallRecordCmdSetStencilReference(VkCommandBuffer commandBuffer, VkStencilFaceFlags faceMask, uint32_t reference) {};
- virtual bool PreCallValidateCmdBindDescriptorSets(VkCommandBuffer commandBuffer, VkPipelineBindPoint pipelineBindPoint, VkPipelineLayout layout, uint32_t firstSet, uint32_t descriptorSetCount, const VkDescriptorSet* pDescriptorSets, uint32_t dynamicOffsetCount, const uint32_t* pDynamicOffsets) { return false; };
- virtual void PreCallRecordCmdBindDescriptorSets(VkCommandBuffer commandBuffer, VkPipelineBindPoint pipelineBindPoint, VkPipelineLayout layout, uint32_t firstSet, uint32_t descriptorSetCount, const VkDescriptorSet* pDescriptorSets, uint32_t dynamicOffsetCount, const uint32_t* pDynamicOffsets) {};
- virtual void PostCallRecordCmdBindDescriptorSets(VkCommandBuffer commandBuffer, VkPipelineBindPoint pipelineBindPoint, VkPipelineLayout layout, uint32_t firstSet, uint32_t descriptorSetCount, const VkDescriptorSet* pDescriptorSets, uint32_t dynamicOffsetCount, const uint32_t* pDynamicOffsets) {};
- virtual bool PreCallValidateCmdBindIndexBuffer(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, VkIndexType indexType) { return false; };
- virtual void PreCallRecordCmdBindIndexBuffer(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, VkIndexType indexType) {};
- virtual void PostCallRecordCmdBindIndexBuffer(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, VkIndexType indexType) {};
- virtual bool PreCallValidateCmdBindVertexBuffers(VkCommandBuffer commandBuffer, uint32_t firstBinding, uint32_t bindingCount, const VkBuffer* pBuffers, const VkDeviceSize* pOffsets) { return false; };
- virtual void PreCallRecordCmdBindVertexBuffers(VkCommandBuffer commandBuffer, uint32_t firstBinding, uint32_t bindingCount, const VkBuffer* pBuffers, const VkDeviceSize* pOffsets) {};
- virtual void PostCallRecordCmdBindVertexBuffers(VkCommandBuffer commandBuffer, uint32_t firstBinding, uint32_t bindingCount, const VkBuffer* pBuffers, const VkDeviceSize* pOffsets) {};
- virtual bool PreCallValidateCmdDraw(VkCommandBuffer commandBuffer, uint32_t vertexCount, uint32_t instanceCount, uint32_t firstVertex, uint32_t firstInstance) { return false; };
- virtual void PreCallRecordCmdDraw(VkCommandBuffer commandBuffer, uint32_t vertexCount, uint32_t instanceCount, uint32_t firstVertex, uint32_t firstInstance) {};
- virtual void PostCallRecordCmdDraw(VkCommandBuffer commandBuffer, uint32_t vertexCount, uint32_t instanceCount, uint32_t firstVertex, uint32_t firstInstance) {};
- virtual bool PreCallValidateCmdDrawIndexed(VkCommandBuffer commandBuffer, uint32_t indexCount, uint32_t instanceCount, uint32_t firstIndex, int32_t vertexOffset, uint32_t firstInstance) { return false; };
- virtual void PreCallRecordCmdDrawIndexed(VkCommandBuffer commandBuffer, uint32_t indexCount, uint32_t instanceCount, uint32_t firstIndex, int32_t vertexOffset, uint32_t firstInstance) {};
- virtual void PostCallRecordCmdDrawIndexed(VkCommandBuffer commandBuffer, uint32_t indexCount, uint32_t instanceCount, uint32_t firstIndex, int32_t vertexOffset, uint32_t firstInstance) {};
- virtual bool PreCallValidateCmdDrawIndirect(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, uint32_t drawCount, uint32_t stride) { return false; };
- virtual void PreCallRecordCmdDrawIndirect(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, uint32_t drawCount, uint32_t stride) {};
- virtual void PostCallRecordCmdDrawIndirect(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, uint32_t drawCount, uint32_t stride) {};
- virtual bool PreCallValidateCmdDrawIndexedIndirect(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, uint32_t drawCount, uint32_t stride) { return false; };
- virtual void PreCallRecordCmdDrawIndexedIndirect(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, uint32_t drawCount, uint32_t stride) {};
- virtual void PostCallRecordCmdDrawIndexedIndirect(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, uint32_t drawCount, uint32_t stride) {};
- virtual bool PreCallValidateCmdDispatch(VkCommandBuffer commandBuffer, uint32_t groupCountX, uint32_t groupCountY, uint32_t groupCountZ) { return false; };
- virtual void PreCallRecordCmdDispatch(VkCommandBuffer commandBuffer, uint32_t groupCountX, uint32_t groupCountY, uint32_t groupCountZ) {};
- virtual void PostCallRecordCmdDispatch(VkCommandBuffer commandBuffer, uint32_t groupCountX, uint32_t groupCountY, uint32_t groupCountZ) {};
- virtual bool PreCallValidateCmdDispatchIndirect(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset) { return false; };
- virtual void PreCallRecordCmdDispatchIndirect(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset) {};
- virtual void PostCallRecordCmdDispatchIndirect(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset) {};
- virtual bool PreCallValidateCmdCopyBuffer(VkCommandBuffer commandBuffer, VkBuffer srcBuffer, VkBuffer dstBuffer, uint32_t regionCount, const VkBufferCopy* pRegions) { return false; };
- virtual void PreCallRecordCmdCopyBuffer(VkCommandBuffer commandBuffer, VkBuffer srcBuffer, VkBuffer dstBuffer, uint32_t regionCount, const VkBufferCopy* pRegions) {};
- virtual void PostCallRecordCmdCopyBuffer(VkCommandBuffer commandBuffer, VkBuffer srcBuffer, VkBuffer dstBuffer, uint32_t regionCount, const VkBufferCopy* pRegions) {};
- virtual bool PreCallValidateCmdCopyImage(VkCommandBuffer commandBuffer, VkImage srcImage, VkImageLayout srcImageLayout, VkImage dstImage, VkImageLayout dstImageLayout, uint32_t regionCount, const VkImageCopy* pRegions) { return false; };
- virtual void PreCallRecordCmdCopyImage(VkCommandBuffer commandBuffer, VkImage srcImage, VkImageLayout srcImageLayout, VkImage dstImage, VkImageLayout dstImageLayout, uint32_t regionCount, const VkImageCopy* pRegions) {};
- virtual void PostCallRecordCmdCopyImage(VkCommandBuffer commandBuffer, VkImage srcImage, VkImageLayout srcImageLayout, VkImage dstImage, VkImageLayout dstImageLayout, uint32_t regionCount, const VkImageCopy* pRegions) {};
- virtual bool PreCallValidateCmdBlitImage(VkCommandBuffer commandBuffer, VkImage srcImage, VkImageLayout srcImageLayout, VkImage dstImage, VkImageLayout dstImageLayout, uint32_t regionCount, const VkImageBlit* pRegions, VkFilter filter) { return false; };
- virtual void PreCallRecordCmdBlitImage(VkCommandBuffer commandBuffer, VkImage srcImage, VkImageLayout srcImageLayout, VkImage dstImage, VkImageLayout dstImageLayout, uint32_t regionCount, const VkImageBlit* pRegions, VkFilter filter) {};
- virtual void PostCallRecordCmdBlitImage(VkCommandBuffer commandBuffer, VkImage srcImage, VkImageLayout srcImageLayout, VkImage dstImage, VkImageLayout dstImageLayout, uint32_t regionCount, const VkImageBlit* pRegions, VkFilter filter) {};
- virtual bool PreCallValidateCmdCopyBufferToImage(VkCommandBuffer commandBuffer, VkBuffer srcBuffer, VkImage dstImage, VkImageLayout dstImageLayout, uint32_t regionCount, const VkBufferImageCopy* pRegions) { return false; };
- virtual void PreCallRecordCmdCopyBufferToImage(VkCommandBuffer commandBuffer, VkBuffer srcBuffer, VkImage dstImage, VkImageLayout dstImageLayout, uint32_t regionCount, const VkBufferImageCopy* pRegions) {};
- virtual void PostCallRecordCmdCopyBufferToImage(VkCommandBuffer commandBuffer, VkBuffer srcBuffer, VkImage dstImage, VkImageLayout dstImageLayout, uint32_t regionCount, const VkBufferImageCopy* pRegions) {};
- virtual bool PreCallValidateCmdCopyImageToBuffer(VkCommandBuffer commandBuffer, VkImage srcImage, VkImageLayout srcImageLayout, VkBuffer dstBuffer, uint32_t regionCount, const VkBufferImageCopy* pRegions) { return false; };
- virtual void PreCallRecordCmdCopyImageToBuffer(VkCommandBuffer commandBuffer, VkImage srcImage, VkImageLayout srcImageLayout, VkBuffer dstBuffer, uint32_t regionCount, const VkBufferImageCopy* pRegions) {};
- virtual void PostCallRecordCmdCopyImageToBuffer(VkCommandBuffer commandBuffer, VkImage srcImage, VkImageLayout srcImageLayout, VkBuffer dstBuffer, uint32_t regionCount, const VkBufferImageCopy* pRegions) {};
- virtual bool PreCallValidateCmdUpdateBuffer(VkCommandBuffer commandBuffer, VkBuffer dstBuffer, VkDeviceSize dstOffset, VkDeviceSize dataSize, const void* pData) { return false; };
- virtual void PreCallRecordCmdUpdateBuffer(VkCommandBuffer commandBuffer, VkBuffer dstBuffer, VkDeviceSize dstOffset, VkDeviceSize dataSize, const void* pData) {};
- virtual void PostCallRecordCmdUpdateBuffer(VkCommandBuffer commandBuffer, VkBuffer dstBuffer, VkDeviceSize dstOffset, VkDeviceSize dataSize, const void* pData) {};
- virtual bool PreCallValidateCmdFillBuffer(VkCommandBuffer commandBuffer, VkBuffer dstBuffer, VkDeviceSize dstOffset, VkDeviceSize size, uint32_t data) { return false; };
- virtual void PreCallRecordCmdFillBuffer(VkCommandBuffer commandBuffer, VkBuffer dstBuffer, VkDeviceSize dstOffset, VkDeviceSize size, uint32_t data) {};
- virtual void PostCallRecordCmdFillBuffer(VkCommandBuffer commandBuffer, VkBuffer dstBuffer, VkDeviceSize dstOffset, VkDeviceSize size, uint32_t data) {};
- virtual bool PreCallValidateCmdClearColorImage(VkCommandBuffer commandBuffer, VkImage image, VkImageLayout imageLayout, const VkClearColorValue* pColor, uint32_t rangeCount, const VkImageSubresourceRange* pRanges) { return false; };
- virtual void PreCallRecordCmdClearColorImage(VkCommandBuffer commandBuffer, VkImage image, VkImageLayout imageLayout, const VkClearColorValue* pColor, uint32_t rangeCount, const VkImageSubresourceRange* pRanges) {};
- virtual void PostCallRecordCmdClearColorImage(VkCommandBuffer commandBuffer, VkImage image, VkImageLayout imageLayout, const VkClearColorValue* pColor, uint32_t rangeCount, const VkImageSubresourceRange* pRanges) {};
- virtual bool PreCallValidateCmdClearDepthStencilImage(VkCommandBuffer commandBuffer, VkImage image, VkImageLayout imageLayout, const VkClearDepthStencilValue* pDepthStencil, uint32_t rangeCount, const VkImageSubresourceRange* pRanges) { return false; };
- virtual void PreCallRecordCmdClearDepthStencilImage(VkCommandBuffer commandBuffer, VkImage image, VkImageLayout imageLayout, const VkClearDepthStencilValue* pDepthStencil, uint32_t rangeCount, const VkImageSubresourceRange* pRanges) {};
- virtual void PostCallRecordCmdClearDepthStencilImage(VkCommandBuffer commandBuffer, VkImage image, VkImageLayout imageLayout, const VkClearDepthStencilValue* pDepthStencil, uint32_t rangeCount, const VkImageSubresourceRange* pRanges) {};
- virtual bool PreCallValidateCmdClearAttachments(VkCommandBuffer commandBuffer, uint32_t attachmentCount, const VkClearAttachment* pAttachments, uint32_t rectCount, const VkClearRect* pRects) { return false; };
- virtual void PreCallRecordCmdClearAttachments(VkCommandBuffer commandBuffer, uint32_t attachmentCount, const VkClearAttachment* pAttachments, uint32_t rectCount, const VkClearRect* pRects) {};
- virtual void PostCallRecordCmdClearAttachments(VkCommandBuffer commandBuffer, uint32_t attachmentCount, const VkClearAttachment* pAttachments, uint32_t rectCount, const VkClearRect* pRects) {};
- virtual bool PreCallValidateCmdResolveImage(VkCommandBuffer commandBuffer, VkImage srcImage, VkImageLayout srcImageLayout, VkImage dstImage, VkImageLayout dstImageLayout, uint32_t regionCount, const VkImageResolve* pRegions) { return false; };
- virtual void PreCallRecordCmdResolveImage(VkCommandBuffer commandBuffer, VkImage srcImage, VkImageLayout srcImageLayout, VkImage dstImage, VkImageLayout dstImageLayout, uint32_t regionCount, const VkImageResolve* pRegions) {};
- virtual void PostCallRecordCmdResolveImage(VkCommandBuffer commandBuffer, VkImage srcImage, VkImageLayout srcImageLayout, VkImage dstImage, VkImageLayout dstImageLayout, uint32_t regionCount, const VkImageResolve* pRegions) {};
- virtual bool PreCallValidateCmdSetEvent(VkCommandBuffer commandBuffer, VkEvent event, VkPipelineStageFlags stageMask) { return false; };
- virtual void PreCallRecordCmdSetEvent(VkCommandBuffer commandBuffer, VkEvent event, VkPipelineStageFlags stageMask) {};
- virtual void PostCallRecordCmdSetEvent(VkCommandBuffer commandBuffer, VkEvent event, VkPipelineStageFlags stageMask) {};
- virtual bool PreCallValidateCmdResetEvent(VkCommandBuffer commandBuffer, VkEvent event, VkPipelineStageFlags stageMask) { return false; };
- virtual void PreCallRecordCmdResetEvent(VkCommandBuffer commandBuffer, VkEvent event, VkPipelineStageFlags stageMask) {};
- virtual void PostCallRecordCmdResetEvent(VkCommandBuffer commandBuffer, VkEvent event, VkPipelineStageFlags stageMask) {};
- virtual bool PreCallValidateCmdWaitEvents(VkCommandBuffer commandBuffer, uint32_t eventCount, const VkEvent* pEvents, VkPipelineStageFlags srcStageMask, VkPipelineStageFlags dstStageMask, uint32_t memoryBarrierCount, const VkMemoryBarrier* pMemoryBarriers, uint32_t bufferMemoryBarrierCount, const VkBufferMemoryBarrier* pBufferMemoryBarriers, uint32_t imageMemoryBarrierCount, const VkImageMemoryBarrier* pImageMemoryBarriers) { return false; };
- virtual void PreCallRecordCmdWaitEvents(VkCommandBuffer commandBuffer, uint32_t eventCount, const VkEvent* pEvents, VkPipelineStageFlags srcStageMask, VkPipelineStageFlags dstStageMask, uint32_t memoryBarrierCount, const VkMemoryBarrier* pMemoryBarriers, uint32_t bufferMemoryBarrierCount, const VkBufferMemoryBarrier* pBufferMemoryBarriers, uint32_t imageMemoryBarrierCount, const VkImageMemoryBarrier* pImageMemoryBarriers) {};
- virtual void PostCallRecordCmdWaitEvents(VkCommandBuffer commandBuffer, uint32_t eventCount, const VkEvent* pEvents, VkPipelineStageFlags srcStageMask, VkPipelineStageFlags dstStageMask, uint32_t memoryBarrierCount, const VkMemoryBarrier* pMemoryBarriers, uint32_t bufferMemoryBarrierCount, const VkBufferMemoryBarrier* pBufferMemoryBarriers, uint32_t imageMemoryBarrierCount, const VkImageMemoryBarrier* pImageMemoryBarriers) {};
- virtual bool PreCallValidateCmdPipelineBarrier(VkCommandBuffer commandBuffer, VkPipelineStageFlags srcStageMask, VkPipelineStageFlags dstStageMask, VkDependencyFlags dependencyFlags, uint32_t memoryBarrierCount, const VkMemoryBarrier* pMemoryBarriers, uint32_t bufferMemoryBarrierCount, const VkBufferMemoryBarrier* pBufferMemoryBarriers, uint32_t imageMemoryBarrierCount, const VkImageMemoryBarrier* pImageMemoryBarriers) { return false; };
- virtual void PreCallRecordCmdPipelineBarrier(VkCommandBuffer commandBuffer, VkPipelineStageFlags srcStageMask, VkPipelineStageFlags dstStageMask, VkDependencyFlags dependencyFlags, uint32_t memoryBarrierCount, const VkMemoryBarrier* pMemoryBarriers, uint32_t bufferMemoryBarrierCount, const VkBufferMemoryBarrier* pBufferMemoryBarriers, uint32_t imageMemoryBarrierCount, const VkImageMemoryBarrier* pImageMemoryBarriers) {};
- virtual void PostCallRecordCmdPipelineBarrier(VkCommandBuffer commandBuffer, VkPipelineStageFlags srcStageMask, VkPipelineStageFlags dstStageMask, VkDependencyFlags dependencyFlags, uint32_t memoryBarrierCount, const VkMemoryBarrier* pMemoryBarriers, uint32_t bufferMemoryBarrierCount, const VkBufferMemoryBarrier* pBufferMemoryBarriers, uint32_t imageMemoryBarrierCount, const VkImageMemoryBarrier* pImageMemoryBarriers) {};
- virtual bool PreCallValidateCmdBeginQuery(VkCommandBuffer commandBuffer, VkQueryPool queryPool, uint32_t query, VkQueryControlFlags flags) { return false; };
- virtual void PreCallRecordCmdBeginQuery(VkCommandBuffer commandBuffer, VkQueryPool queryPool, uint32_t query, VkQueryControlFlags flags) {};
- virtual void PostCallRecordCmdBeginQuery(VkCommandBuffer commandBuffer, VkQueryPool queryPool, uint32_t query, VkQueryControlFlags flags) {};
- virtual bool PreCallValidateCmdEndQuery(VkCommandBuffer commandBuffer, VkQueryPool queryPool, uint32_t query) { return false; };
- virtual void PreCallRecordCmdEndQuery(VkCommandBuffer commandBuffer, VkQueryPool queryPool, uint32_t query) {};
- virtual void PostCallRecordCmdEndQuery(VkCommandBuffer commandBuffer, VkQueryPool queryPool, uint32_t query) {};
- virtual bool PreCallValidateCmdResetQueryPool(VkCommandBuffer commandBuffer, VkQueryPool queryPool, uint32_t firstQuery, uint32_t queryCount) { return false; };
- virtual void PreCallRecordCmdResetQueryPool(VkCommandBuffer commandBuffer, VkQueryPool queryPool, uint32_t firstQuery, uint32_t queryCount) {};
- virtual void PostCallRecordCmdResetQueryPool(VkCommandBuffer commandBuffer, VkQueryPool queryPool, uint32_t firstQuery, uint32_t queryCount) {};
- virtual bool PreCallValidateCmdWriteTimestamp(VkCommandBuffer commandBuffer, VkPipelineStageFlagBits pipelineStage, VkQueryPool queryPool, uint32_t query) { return false; };
- virtual void PreCallRecordCmdWriteTimestamp(VkCommandBuffer commandBuffer, VkPipelineStageFlagBits pipelineStage, VkQueryPool queryPool, uint32_t query) {};
- virtual void PostCallRecordCmdWriteTimestamp(VkCommandBuffer commandBuffer, VkPipelineStageFlagBits pipelineStage, VkQueryPool queryPool, uint32_t query) {};
- virtual bool PreCallValidateCmdCopyQueryPoolResults(VkCommandBuffer commandBuffer, VkQueryPool queryPool, uint32_t firstQuery, uint32_t queryCount, VkBuffer dstBuffer, VkDeviceSize dstOffset, VkDeviceSize stride, VkQueryResultFlags flags) { return false; };
- virtual void PreCallRecordCmdCopyQueryPoolResults(VkCommandBuffer commandBuffer, VkQueryPool queryPool, uint32_t firstQuery, uint32_t queryCount, VkBuffer dstBuffer, VkDeviceSize dstOffset, VkDeviceSize stride, VkQueryResultFlags flags) {};
- virtual void PostCallRecordCmdCopyQueryPoolResults(VkCommandBuffer commandBuffer, VkQueryPool queryPool, uint32_t firstQuery, uint32_t queryCount, VkBuffer dstBuffer, VkDeviceSize dstOffset, VkDeviceSize stride, VkQueryResultFlags flags) {};
- virtual bool PreCallValidateCmdPushConstants(VkCommandBuffer commandBuffer, VkPipelineLayout layout, VkShaderStageFlags stageFlags, uint32_t offset, uint32_t size, const void* pValues) { return false; };
- virtual void PreCallRecordCmdPushConstants(VkCommandBuffer commandBuffer, VkPipelineLayout layout, VkShaderStageFlags stageFlags, uint32_t offset, uint32_t size, const void* pValues) {};
- virtual void PostCallRecordCmdPushConstants(VkCommandBuffer commandBuffer, VkPipelineLayout layout, VkShaderStageFlags stageFlags, uint32_t offset, uint32_t size, const void* pValues) {};
- virtual bool PreCallValidateCmdBeginRenderPass(VkCommandBuffer commandBuffer, const VkRenderPassBeginInfo* pRenderPassBegin, VkSubpassContents contents) { return false; };
- virtual void PreCallRecordCmdBeginRenderPass(VkCommandBuffer commandBuffer, const VkRenderPassBeginInfo* pRenderPassBegin, VkSubpassContents contents) {};
- virtual void PostCallRecordCmdBeginRenderPass(VkCommandBuffer commandBuffer, const VkRenderPassBeginInfo* pRenderPassBegin, VkSubpassContents contents) {};
- virtual bool PreCallValidateCmdNextSubpass(VkCommandBuffer commandBuffer, VkSubpassContents contents) { return false; };
- virtual void PreCallRecordCmdNextSubpass(VkCommandBuffer commandBuffer, VkSubpassContents contents) {};
- virtual void PostCallRecordCmdNextSubpass(VkCommandBuffer commandBuffer, VkSubpassContents contents) {};
- virtual bool PreCallValidateCmdEndRenderPass(VkCommandBuffer commandBuffer) { return false; };
- virtual void PreCallRecordCmdEndRenderPass(VkCommandBuffer commandBuffer) {};
- virtual void PostCallRecordCmdEndRenderPass(VkCommandBuffer commandBuffer) {};
- virtual bool PreCallValidateCmdExecuteCommands(VkCommandBuffer commandBuffer, uint32_t commandBufferCount, const VkCommandBuffer* pCommandBuffers) { return false; };
- virtual void PreCallRecordCmdExecuteCommands(VkCommandBuffer commandBuffer, uint32_t commandBufferCount, const VkCommandBuffer* pCommandBuffers) {};
- virtual void PostCallRecordCmdExecuteCommands(VkCommandBuffer commandBuffer, uint32_t commandBufferCount, const VkCommandBuffer* pCommandBuffers) {};
- virtual bool PreCallValidateBindBufferMemory2(VkDevice device, uint32_t bindInfoCount, const VkBindBufferMemoryInfo* pBindInfos) { return false; };
- virtual void PreCallRecordBindBufferMemory2(VkDevice device, uint32_t bindInfoCount, const VkBindBufferMemoryInfo* pBindInfos) {};
- virtual void PostCallRecordBindBufferMemory2(VkDevice device, uint32_t bindInfoCount, const VkBindBufferMemoryInfo* pBindInfos, VkResult result) {};
- virtual bool PreCallValidateBindImageMemory2(VkDevice device, uint32_t bindInfoCount, const VkBindImageMemoryInfo* pBindInfos) { return false; };
- virtual void PreCallRecordBindImageMemory2(VkDevice device, uint32_t bindInfoCount, const VkBindImageMemoryInfo* pBindInfos) {};
- virtual void PostCallRecordBindImageMemory2(VkDevice device, uint32_t bindInfoCount, const VkBindImageMemoryInfo* pBindInfos, VkResult result) {};
- virtual bool PreCallValidateGetDeviceGroupPeerMemoryFeatures(VkDevice device, uint32_t heapIndex, uint32_t localDeviceIndex, uint32_t remoteDeviceIndex, VkPeerMemoryFeatureFlags* pPeerMemoryFeatures) { return false; };
- virtual void PreCallRecordGetDeviceGroupPeerMemoryFeatures(VkDevice device, uint32_t heapIndex, uint32_t localDeviceIndex, uint32_t remoteDeviceIndex, VkPeerMemoryFeatureFlags* pPeerMemoryFeatures) {};
- virtual void PostCallRecordGetDeviceGroupPeerMemoryFeatures(VkDevice device, uint32_t heapIndex, uint32_t localDeviceIndex, uint32_t remoteDeviceIndex, VkPeerMemoryFeatureFlags* pPeerMemoryFeatures) {};
- virtual bool PreCallValidateCmdSetDeviceMask(VkCommandBuffer commandBuffer, uint32_t deviceMask) { return false; };
- virtual void PreCallRecordCmdSetDeviceMask(VkCommandBuffer commandBuffer, uint32_t deviceMask) {};
- virtual void PostCallRecordCmdSetDeviceMask(VkCommandBuffer commandBuffer, uint32_t deviceMask) {};
- virtual bool PreCallValidateCmdDispatchBase(VkCommandBuffer commandBuffer, uint32_t baseGroupX, uint32_t baseGroupY, uint32_t baseGroupZ, uint32_t groupCountX, uint32_t groupCountY, uint32_t groupCountZ) { return false; };
- virtual void PreCallRecordCmdDispatchBase(VkCommandBuffer commandBuffer, uint32_t baseGroupX, uint32_t baseGroupY, uint32_t baseGroupZ, uint32_t groupCountX, uint32_t groupCountY, uint32_t groupCountZ) {};
- virtual void PostCallRecordCmdDispatchBase(VkCommandBuffer commandBuffer, uint32_t baseGroupX, uint32_t baseGroupY, uint32_t baseGroupZ, uint32_t groupCountX, uint32_t groupCountY, uint32_t groupCountZ) {};
- virtual bool PreCallValidateEnumeratePhysicalDeviceGroups(VkInstance instance, uint32_t* pPhysicalDeviceGroupCount, VkPhysicalDeviceGroupProperties* pPhysicalDeviceGroupProperties) { return false; };
- virtual void PreCallRecordEnumeratePhysicalDeviceGroups(VkInstance instance, uint32_t* pPhysicalDeviceGroupCount, VkPhysicalDeviceGroupProperties* pPhysicalDeviceGroupProperties) {};
- virtual void PostCallRecordEnumeratePhysicalDeviceGroups(VkInstance instance, uint32_t* pPhysicalDeviceGroupCount, VkPhysicalDeviceGroupProperties* pPhysicalDeviceGroupProperties, VkResult result) {};
- virtual bool PreCallValidateGetImageMemoryRequirements2(VkDevice device, const VkImageMemoryRequirementsInfo2* pInfo, VkMemoryRequirements2* pMemoryRequirements) { return false; };
- virtual void PreCallRecordGetImageMemoryRequirements2(VkDevice device, const VkImageMemoryRequirementsInfo2* pInfo, VkMemoryRequirements2* pMemoryRequirements) {};
- virtual void PostCallRecordGetImageMemoryRequirements2(VkDevice device, const VkImageMemoryRequirementsInfo2* pInfo, VkMemoryRequirements2* pMemoryRequirements) {};
- virtual bool PreCallValidateGetBufferMemoryRequirements2(VkDevice device, const VkBufferMemoryRequirementsInfo2* pInfo, VkMemoryRequirements2* pMemoryRequirements) { return false; };
- virtual void PreCallRecordGetBufferMemoryRequirements2(VkDevice device, const VkBufferMemoryRequirementsInfo2* pInfo, VkMemoryRequirements2* pMemoryRequirements) {};
- virtual void PostCallRecordGetBufferMemoryRequirements2(VkDevice device, const VkBufferMemoryRequirementsInfo2* pInfo, VkMemoryRequirements2* pMemoryRequirements) {};
- virtual bool PreCallValidateGetImageSparseMemoryRequirements2(VkDevice device, const VkImageSparseMemoryRequirementsInfo2* pInfo, uint32_t* pSparseMemoryRequirementCount, VkSparseImageMemoryRequirements2* pSparseMemoryRequirements) { return false; };
- virtual void PreCallRecordGetImageSparseMemoryRequirements2(VkDevice device, const VkImageSparseMemoryRequirementsInfo2* pInfo, uint32_t* pSparseMemoryRequirementCount, VkSparseImageMemoryRequirements2* pSparseMemoryRequirements) {};
- virtual void PostCallRecordGetImageSparseMemoryRequirements2(VkDevice device, const VkImageSparseMemoryRequirementsInfo2* pInfo, uint32_t* pSparseMemoryRequirementCount, VkSparseImageMemoryRequirements2* pSparseMemoryRequirements) {};
- virtual bool PreCallValidateGetPhysicalDeviceFeatures2(VkPhysicalDevice physicalDevice, VkPhysicalDeviceFeatures2* pFeatures) { return false; };
- virtual void PreCallRecordGetPhysicalDeviceFeatures2(VkPhysicalDevice physicalDevice, VkPhysicalDeviceFeatures2* pFeatures) {};
- virtual void PostCallRecordGetPhysicalDeviceFeatures2(VkPhysicalDevice physicalDevice, VkPhysicalDeviceFeatures2* pFeatures) {};
- virtual bool PreCallValidateGetPhysicalDeviceProperties2(VkPhysicalDevice physicalDevice, VkPhysicalDeviceProperties2* pProperties) { return false; };
- virtual void PreCallRecordGetPhysicalDeviceProperties2(VkPhysicalDevice physicalDevice, VkPhysicalDeviceProperties2* pProperties) {};
- virtual void PostCallRecordGetPhysicalDeviceProperties2(VkPhysicalDevice physicalDevice, VkPhysicalDeviceProperties2* pProperties) {};
- virtual bool PreCallValidateGetPhysicalDeviceFormatProperties2(VkPhysicalDevice physicalDevice, VkFormat format, VkFormatProperties2* pFormatProperties) { return false; };
- virtual void PreCallRecordGetPhysicalDeviceFormatProperties2(VkPhysicalDevice physicalDevice, VkFormat format, VkFormatProperties2* pFormatProperties) {};
- virtual void PostCallRecordGetPhysicalDeviceFormatProperties2(VkPhysicalDevice physicalDevice, VkFormat format, VkFormatProperties2* pFormatProperties) {};
- virtual bool PreCallValidateGetPhysicalDeviceImageFormatProperties2(VkPhysicalDevice physicalDevice, const VkPhysicalDeviceImageFormatInfo2* pImageFormatInfo, VkImageFormatProperties2* pImageFormatProperties) { return false; };
- virtual void PreCallRecordGetPhysicalDeviceImageFormatProperties2(VkPhysicalDevice physicalDevice, const VkPhysicalDeviceImageFormatInfo2* pImageFormatInfo, VkImageFormatProperties2* pImageFormatProperties) {};
- virtual void PostCallRecordGetPhysicalDeviceImageFormatProperties2(VkPhysicalDevice physicalDevice, const VkPhysicalDeviceImageFormatInfo2* pImageFormatInfo, VkImageFormatProperties2* pImageFormatProperties, VkResult result) {};
- virtual bool PreCallValidateGetPhysicalDeviceQueueFamilyProperties2(VkPhysicalDevice physicalDevice, uint32_t* pQueueFamilyPropertyCount, VkQueueFamilyProperties2* pQueueFamilyProperties) { return false; };
- virtual void PreCallRecordGetPhysicalDeviceQueueFamilyProperties2(VkPhysicalDevice physicalDevice, uint32_t* pQueueFamilyPropertyCount, VkQueueFamilyProperties2* pQueueFamilyProperties) {};
- virtual void PostCallRecordGetPhysicalDeviceQueueFamilyProperties2(VkPhysicalDevice physicalDevice, uint32_t* pQueueFamilyPropertyCount, VkQueueFamilyProperties2* pQueueFamilyProperties) {};
- virtual bool PreCallValidateGetPhysicalDeviceMemoryProperties2(VkPhysicalDevice physicalDevice, VkPhysicalDeviceMemoryProperties2* pMemoryProperties) { return false; };
- virtual void PreCallRecordGetPhysicalDeviceMemoryProperties2(VkPhysicalDevice physicalDevice, VkPhysicalDeviceMemoryProperties2* pMemoryProperties) {};
- virtual void PostCallRecordGetPhysicalDeviceMemoryProperties2(VkPhysicalDevice physicalDevice, VkPhysicalDeviceMemoryProperties2* pMemoryProperties) {};
- virtual bool PreCallValidateGetPhysicalDeviceSparseImageFormatProperties2(VkPhysicalDevice physicalDevice, const VkPhysicalDeviceSparseImageFormatInfo2* pFormatInfo, uint32_t* pPropertyCount, VkSparseImageFormatProperties2* pProperties) { return false; };
- virtual void PreCallRecordGetPhysicalDeviceSparseImageFormatProperties2(VkPhysicalDevice physicalDevice, const VkPhysicalDeviceSparseImageFormatInfo2* pFormatInfo, uint32_t* pPropertyCount, VkSparseImageFormatProperties2* pProperties) {};
- virtual void PostCallRecordGetPhysicalDeviceSparseImageFormatProperties2(VkPhysicalDevice physicalDevice, const VkPhysicalDeviceSparseImageFormatInfo2* pFormatInfo, uint32_t* pPropertyCount, VkSparseImageFormatProperties2* pProperties) {};
- virtual bool PreCallValidateTrimCommandPool(VkDevice device, VkCommandPool commandPool, VkCommandPoolTrimFlags flags) { return false; };
- virtual void PreCallRecordTrimCommandPool(VkDevice device, VkCommandPool commandPool, VkCommandPoolTrimFlags flags) {};
- virtual void PostCallRecordTrimCommandPool(VkDevice device, VkCommandPool commandPool, VkCommandPoolTrimFlags flags) {};
- virtual bool PreCallValidateGetDeviceQueue2(VkDevice device, const VkDeviceQueueInfo2* pQueueInfo, VkQueue* pQueue) { return false; };
- virtual void PreCallRecordGetDeviceQueue2(VkDevice device, const VkDeviceQueueInfo2* pQueueInfo, VkQueue* pQueue) {};
- virtual void PostCallRecordGetDeviceQueue2(VkDevice device, const VkDeviceQueueInfo2* pQueueInfo, VkQueue* pQueue) {};
- virtual bool PreCallValidateCreateSamplerYcbcrConversion(VkDevice device, const VkSamplerYcbcrConversionCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSamplerYcbcrConversion* pYcbcrConversion) { return false; };
- virtual void PreCallRecordCreateSamplerYcbcrConversion(VkDevice device, const VkSamplerYcbcrConversionCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSamplerYcbcrConversion* pYcbcrConversion) {};
- virtual void PostCallRecordCreateSamplerYcbcrConversion(VkDevice device, const VkSamplerYcbcrConversionCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSamplerYcbcrConversion* pYcbcrConversion, VkResult result) {};
- virtual bool PreCallValidateDestroySamplerYcbcrConversion(VkDevice device, VkSamplerYcbcrConversion ycbcrConversion, const VkAllocationCallbacks* pAllocator) { return false; };
- virtual void PreCallRecordDestroySamplerYcbcrConversion(VkDevice device, VkSamplerYcbcrConversion ycbcrConversion, const VkAllocationCallbacks* pAllocator) {};
- virtual void PostCallRecordDestroySamplerYcbcrConversion(VkDevice device, VkSamplerYcbcrConversion ycbcrConversion, const VkAllocationCallbacks* pAllocator) {};
- virtual bool PreCallValidateCreateDescriptorUpdateTemplate(VkDevice device, const VkDescriptorUpdateTemplateCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkDescriptorUpdateTemplate* pDescriptorUpdateTemplate) { return false; };
- virtual void PreCallRecordCreateDescriptorUpdateTemplate(VkDevice device, const VkDescriptorUpdateTemplateCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkDescriptorUpdateTemplate* pDescriptorUpdateTemplate) {};
- virtual void PostCallRecordCreateDescriptorUpdateTemplate(VkDevice device, const VkDescriptorUpdateTemplateCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkDescriptorUpdateTemplate* pDescriptorUpdateTemplate, VkResult result) {};
- virtual bool PreCallValidateDestroyDescriptorUpdateTemplate(VkDevice device, VkDescriptorUpdateTemplate descriptorUpdateTemplate, const VkAllocationCallbacks* pAllocator) { return false; };
- virtual void PreCallRecordDestroyDescriptorUpdateTemplate(VkDevice device, VkDescriptorUpdateTemplate descriptorUpdateTemplate, const VkAllocationCallbacks* pAllocator) {};
- virtual void PostCallRecordDestroyDescriptorUpdateTemplate(VkDevice device, VkDescriptorUpdateTemplate descriptorUpdateTemplate, const VkAllocationCallbacks* pAllocator) {};
- virtual bool PreCallValidateUpdateDescriptorSetWithTemplate(VkDevice device, VkDescriptorSet descriptorSet, VkDescriptorUpdateTemplate descriptorUpdateTemplate, const void* pData) { return false; };
- virtual void PreCallRecordUpdateDescriptorSetWithTemplate(VkDevice device, VkDescriptorSet descriptorSet, VkDescriptorUpdateTemplate descriptorUpdateTemplate, const void* pData) {};
- virtual void PostCallRecordUpdateDescriptorSetWithTemplate(VkDevice device, VkDescriptorSet descriptorSet, VkDescriptorUpdateTemplate descriptorUpdateTemplate, const void* pData) {};
- virtual bool PreCallValidateGetPhysicalDeviceExternalBufferProperties(VkPhysicalDevice physicalDevice, const VkPhysicalDeviceExternalBufferInfo* pExternalBufferInfo, VkExternalBufferProperties* pExternalBufferProperties) { return false; };
- virtual void PreCallRecordGetPhysicalDeviceExternalBufferProperties(VkPhysicalDevice physicalDevice, const VkPhysicalDeviceExternalBufferInfo* pExternalBufferInfo, VkExternalBufferProperties* pExternalBufferProperties) {};
- virtual void PostCallRecordGetPhysicalDeviceExternalBufferProperties(VkPhysicalDevice physicalDevice, const VkPhysicalDeviceExternalBufferInfo* pExternalBufferInfo, VkExternalBufferProperties* pExternalBufferProperties) {};
- virtual bool PreCallValidateGetPhysicalDeviceExternalFenceProperties(VkPhysicalDevice physicalDevice, const VkPhysicalDeviceExternalFenceInfo* pExternalFenceInfo, VkExternalFenceProperties* pExternalFenceProperties) { return false; };
- virtual void PreCallRecordGetPhysicalDeviceExternalFenceProperties(VkPhysicalDevice physicalDevice, const VkPhysicalDeviceExternalFenceInfo* pExternalFenceInfo, VkExternalFenceProperties* pExternalFenceProperties) {};
- virtual void PostCallRecordGetPhysicalDeviceExternalFenceProperties(VkPhysicalDevice physicalDevice, const VkPhysicalDeviceExternalFenceInfo* pExternalFenceInfo, VkExternalFenceProperties* pExternalFenceProperties) {};
- virtual bool PreCallValidateGetPhysicalDeviceExternalSemaphoreProperties(VkPhysicalDevice physicalDevice, const VkPhysicalDeviceExternalSemaphoreInfo* pExternalSemaphoreInfo, VkExternalSemaphoreProperties* pExternalSemaphoreProperties) { return false; };
- virtual void PreCallRecordGetPhysicalDeviceExternalSemaphoreProperties(VkPhysicalDevice physicalDevice, const VkPhysicalDeviceExternalSemaphoreInfo* pExternalSemaphoreInfo, VkExternalSemaphoreProperties* pExternalSemaphoreProperties) {};
- virtual void PostCallRecordGetPhysicalDeviceExternalSemaphoreProperties(VkPhysicalDevice physicalDevice, const VkPhysicalDeviceExternalSemaphoreInfo* pExternalSemaphoreInfo, VkExternalSemaphoreProperties* pExternalSemaphoreProperties) {};
- virtual bool PreCallValidateGetDescriptorSetLayoutSupport(VkDevice device, const VkDescriptorSetLayoutCreateInfo* pCreateInfo, VkDescriptorSetLayoutSupport* pSupport) { return false; };
- virtual void PreCallRecordGetDescriptorSetLayoutSupport(VkDevice device, const VkDescriptorSetLayoutCreateInfo* pCreateInfo, VkDescriptorSetLayoutSupport* pSupport) {};
- virtual void PostCallRecordGetDescriptorSetLayoutSupport(VkDevice device, const VkDescriptorSetLayoutCreateInfo* pCreateInfo, VkDescriptorSetLayoutSupport* pSupport) {};
- virtual bool PreCallValidateDestroySurfaceKHR(VkInstance instance, VkSurfaceKHR surface, const VkAllocationCallbacks* pAllocator) { return false; };
- virtual void PreCallRecordDestroySurfaceKHR(VkInstance instance, VkSurfaceKHR surface, const VkAllocationCallbacks* pAllocator) {};
- virtual void PostCallRecordDestroySurfaceKHR(VkInstance instance, VkSurfaceKHR surface, const VkAllocationCallbacks* pAllocator) {};
- virtual bool PreCallValidateGetPhysicalDeviceSurfaceSupportKHR(VkPhysicalDevice physicalDevice, uint32_t queueFamilyIndex, VkSurfaceKHR surface, VkBool32* pSupported) { return false; };
- virtual void PreCallRecordGetPhysicalDeviceSurfaceSupportKHR(VkPhysicalDevice physicalDevice, uint32_t queueFamilyIndex, VkSurfaceKHR surface, VkBool32* pSupported) {};
- virtual void PostCallRecordGetPhysicalDeviceSurfaceSupportKHR(VkPhysicalDevice physicalDevice, uint32_t queueFamilyIndex, VkSurfaceKHR surface, VkBool32* pSupported, VkResult result) {};
- virtual bool PreCallValidateGetPhysicalDeviceSurfaceCapabilitiesKHR(VkPhysicalDevice physicalDevice, VkSurfaceKHR surface, VkSurfaceCapabilitiesKHR* pSurfaceCapabilities) { return false; };
- virtual void PreCallRecordGetPhysicalDeviceSurfaceCapabilitiesKHR(VkPhysicalDevice physicalDevice, VkSurfaceKHR surface, VkSurfaceCapabilitiesKHR* pSurfaceCapabilities) {};
- virtual void PostCallRecordGetPhysicalDeviceSurfaceCapabilitiesKHR(VkPhysicalDevice physicalDevice, VkSurfaceKHR surface, VkSurfaceCapabilitiesKHR* pSurfaceCapabilities, VkResult result) {};
- virtual bool PreCallValidateGetPhysicalDeviceSurfaceFormatsKHR(VkPhysicalDevice physicalDevice, VkSurfaceKHR surface, uint32_t* pSurfaceFormatCount, VkSurfaceFormatKHR* pSurfaceFormats) { return false; };
- virtual void PreCallRecordGetPhysicalDeviceSurfaceFormatsKHR(VkPhysicalDevice physicalDevice, VkSurfaceKHR surface, uint32_t* pSurfaceFormatCount, VkSurfaceFormatKHR* pSurfaceFormats) {};
- virtual void PostCallRecordGetPhysicalDeviceSurfaceFormatsKHR(VkPhysicalDevice physicalDevice, VkSurfaceKHR surface, uint32_t* pSurfaceFormatCount, VkSurfaceFormatKHR* pSurfaceFormats, VkResult result) {};
- virtual bool PreCallValidateGetPhysicalDeviceSurfacePresentModesKHR(VkPhysicalDevice physicalDevice, VkSurfaceKHR surface, uint32_t* pPresentModeCount, VkPresentModeKHR* pPresentModes) { return false; };
- virtual void PreCallRecordGetPhysicalDeviceSurfacePresentModesKHR(VkPhysicalDevice physicalDevice, VkSurfaceKHR surface, uint32_t* pPresentModeCount, VkPresentModeKHR* pPresentModes) {};
- virtual void PostCallRecordGetPhysicalDeviceSurfacePresentModesKHR(VkPhysicalDevice physicalDevice, VkSurfaceKHR surface, uint32_t* pPresentModeCount, VkPresentModeKHR* pPresentModes, VkResult result) {};
- virtual bool PreCallValidateCreateSwapchainKHR(VkDevice device, const VkSwapchainCreateInfoKHR* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSwapchainKHR* pSwapchain) { return false; };
- virtual void PreCallRecordCreateSwapchainKHR(VkDevice device, const VkSwapchainCreateInfoKHR* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSwapchainKHR* pSwapchain) {};
- virtual void PostCallRecordCreateSwapchainKHR(VkDevice device, const VkSwapchainCreateInfoKHR* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSwapchainKHR* pSwapchain, VkResult result) {};
- virtual bool PreCallValidateDestroySwapchainKHR(VkDevice device, VkSwapchainKHR swapchain, const VkAllocationCallbacks* pAllocator) { return false; };
- virtual void PreCallRecordDestroySwapchainKHR(VkDevice device, VkSwapchainKHR swapchain, const VkAllocationCallbacks* pAllocator) {};
- virtual void PostCallRecordDestroySwapchainKHR(VkDevice device, VkSwapchainKHR swapchain, const VkAllocationCallbacks* pAllocator) {};
- virtual bool PreCallValidateGetSwapchainImagesKHR(VkDevice device, VkSwapchainKHR swapchain, uint32_t* pSwapchainImageCount, VkImage* pSwapchainImages) { return false; };
- virtual void PreCallRecordGetSwapchainImagesKHR(VkDevice device, VkSwapchainKHR swapchain, uint32_t* pSwapchainImageCount, VkImage* pSwapchainImages) {};
- virtual void PostCallRecordGetSwapchainImagesKHR(VkDevice device, VkSwapchainKHR swapchain, uint32_t* pSwapchainImageCount, VkImage* pSwapchainImages, VkResult result) {};
- virtual bool PreCallValidateAcquireNextImageKHR(VkDevice device, VkSwapchainKHR swapchain, uint64_t timeout, VkSemaphore semaphore, VkFence fence, uint32_t* pImageIndex) { return false; };
- virtual void PreCallRecordAcquireNextImageKHR(VkDevice device, VkSwapchainKHR swapchain, uint64_t timeout, VkSemaphore semaphore, VkFence fence, uint32_t* pImageIndex) {};
- virtual void PostCallRecordAcquireNextImageKHR(VkDevice device, VkSwapchainKHR swapchain, uint64_t timeout, VkSemaphore semaphore, VkFence fence, uint32_t* pImageIndex, VkResult result) {};
- virtual bool PreCallValidateQueuePresentKHR(VkQueue queue, const VkPresentInfoKHR* pPresentInfo) { return false; };
- virtual void PreCallRecordQueuePresentKHR(VkQueue queue, const VkPresentInfoKHR* pPresentInfo) {};
- virtual void PostCallRecordQueuePresentKHR(VkQueue queue, const VkPresentInfoKHR* pPresentInfo, VkResult result) {};
- virtual bool PreCallValidateGetDeviceGroupPresentCapabilitiesKHR(VkDevice device, VkDeviceGroupPresentCapabilitiesKHR* pDeviceGroupPresentCapabilities) { return false; };
- virtual void PreCallRecordGetDeviceGroupPresentCapabilitiesKHR(VkDevice device, VkDeviceGroupPresentCapabilitiesKHR* pDeviceGroupPresentCapabilities) {};
- virtual void PostCallRecordGetDeviceGroupPresentCapabilitiesKHR(VkDevice device, VkDeviceGroupPresentCapabilitiesKHR* pDeviceGroupPresentCapabilities, VkResult result) {};
- virtual bool PreCallValidateGetDeviceGroupSurfacePresentModesKHR(VkDevice device, VkSurfaceKHR surface, VkDeviceGroupPresentModeFlagsKHR* pModes) { return false; };
- virtual void PreCallRecordGetDeviceGroupSurfacePresentModesKHR(VkDevice device, VkSurfaceKHR surface, VkDeviceGroupPresentModeFlagsKHR* pModes) {};
- virtual void PostCallRecordGetDeviceGroupSurfacePresentModesKHR(VkDevice device, VkSurfaceKHR surface, VkDeviceGroupPresentModeFlagsKHR* pModes, VkResult result) {};
- virtual bool PreCallValidateGetPhysicalDevicePresentRectanglesKHR(VkPhysicalDevice physicalDevice, VkSurfaceKHR surface, uint32_t* pRectCount, VkRect2D* pRects) { return false; };
- virtual void PreCallRecordGetPhysicalDevicePresentRectanglesKHR(VkPhysicalDevice physicalDevice, VkSurfaceKHR surface, uint32_t* pRectCount, VkRect2D* pRects) {};
- virtual void PostCallRecordGetPhysicalDevicePresentRectanglesKHR(VkPhysicalDevice physicalDevice, VkSurfaceKHR surface, uint32_t* pRectCount, VkRect2D* pRects, VkResult result) {};
- virtual bool PreCallValidateAcquireNextImage2KHR(VkDevice device, const VkAcquireNextImageInfoKHR* pAcquireInfo, uint32_t* pImageIndex) { return false; };
- virtual void PreCallRecordAcquireNextImage2KHR(VkDevice device, const VkAcquireNextImageInfoKHR* pAcquireInfo, uint32_t* pImageIndex) {};
- virtual void PostCallRecordAcquireNextImage2KHR(VkDevice device, const VkAcquireNextImageInfoKHR* pAcquireInfo, uint32_t* pImageIndex, VkResult result) {};
- virtual bool PreCallValidateGetPhysicalDeviceDisplayPropertiesKHR(VkPhysicalDevice physicalDevice, uint32_t* pPropertyCount, VkDisplayPropertiesKHR* pProperties) { return false; };
- virtual void PreCallRecordGetPhysicalDeviceDisplayPropertiesKHR(VkPhysicalDevice physicalDevice, uint32_t* pPropertyCount, VkDisplayPropertiesKHR* pProperties) {};
- virtual void PostCallRecordGetPhysicalDeviceDisplayPropertiesKHR(VkPhysicalDevice physicalDevice, uint32_t* pPropertyCount, VkDisplayPropertiesKHR* pProperties, VkResult result) {};
- virtual bool PreCallValidateGetPhysicalDeviceDisplayPlanePropertiesKHR(VkPhysicalDevice physicalDevice, uint32_t* pPropertyCount, VkDisplayPlanePropertiesKHR* pProperties) { return false; };
- virtual void PreCallRecordGetPhysicalDeviceDisplayPlanePropertiesKHR(VkPhysicalDevice physicalDevice, uint32_t* pPropertyCount, VkDisplayPlanePropertiesKHR* pProperties) {};
- virtual void PostCallRecordGetPhysicalDeviceDisplayPlanePropertiesKHR(VkPhysicalDevice physicalDevice, uint32_t* pPropertyCount, VkDisplayPlanePropertiesKHR* pProperties, VkResult result) {};
- virtual bool PreCallValidateGetDisplayPlaneSupportedDisplaysKHR(VkPhysicalDevice physicalDevice, uint32_t planeIndex, uint32_t* pDisplayCount, VkDisplayKHR* pDisplays) { return false; };
- virtual void PreCallRecordGetDisplayPlaneSupportedDisplaysKHR(VkPhysicalDevice physicalDevice, uint32_t planeIndex, uint32_t* pDisplayCount, VkDisplayKHR* pDisplays) {};
- virtual void PostCallRecordGetDisplayPlaneSupportedDisplaysKHR(VkPhysicalDevice physicalDevice, uint32_t planeIndex, uint32_t* pDisplayCount, VkDisplayKHR* pDisplays, VkResult result) {};
- virtual bool PreCallValidateGetDisplayModePropertiesKHR(VkPhysicalDevice physicalDevice, VkDisplayKHR display, uint32_t* pPropertyCount, VkDisplayModePropertiesKHR* pProperties) { return false; };
- virtual void PreCallRecordGetDisplayModePropertiesKHR(VkPhysicalDevice physicalDevice, VkDisplayKHR display, uint32_t* pPropertyCount, VkDisplayModePropertiesKHR* pProperties) {};
- virtual void PostCallRecordGetDisplayModePropertiesKHR(VkPhysicalDevice physicalDevice, VkDisplayKHR display, uint32_t* pPropertyCount, VkDisplayModePropertiesKHR* pProperties, VkResult result) {};
- virtual bool PreCallValidateCreateDisplayModeKHR(VkPhysicalDevice physicalDevice, VkDisplayKHR display, const VkDisplayModeCreateInfoKHR* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkDisplayModeKHR* pMode) { return false; };
- virtual void PreCallRecordCreateDisplayModeKHR(VkPhysicalDevice physicalDevice, VkDisplayKHR display, const VkDisplayModeCreateInfoKHR* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkDisplayModeKHR* pMode) {};
- virtual void PostCallRecordCreateDisplayModeKHR(VkPhysicalDevice physicalDevice, VkDisplayKHR display, const VkDisplayModeCreateInfoKHR* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkDisplayModeKHR* pMode, VkResult result) {};
- virtual bool PreCallValidateGetDisplayPlaneCapabilitiesKHR(VkPhysicalDevice physicalDevice, VkDisplayModeKHR mode, uint32_t planeIndex, VkDisplayPlaneCapabilitiesKHR* pCapabilities) { return false; };
- virtual void PreCallRecordGetDisplayPlaneCapabilitiesKHR(VkPhysicalDevice physicalDevice, VkDisplayModeKHR mode, uint32_t planeIndex, VkDisplayPlaneCapabilitiesKHR* pCapabilities) {};
- virtual void PostCallRecordGetDisplayPlaneCapabilitiesKHR(VkPhysicalDevice physicalDevice, VkDisplayModeKHR mode, uint32_t planeIndex, VkDisplayPlaneCapabilitiesKHR* pCapabilities, VkResult result) {};
- virtual bool PreCallValidateCreateDisplayPlaneSurfaceKHR(VkInstance instance, const VkDisplaySurfaceCreateInfoKHR* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSurfaceKHR* pSurface) { return false; };
- virtual void PreCallRecordCreateDisplayPlaneSurfaceKHR(VkInstance instance, const VkDisplaySurfaceCreateInfoKHR* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSurfaceKHR* pSurface) {};
- virtual void PostCallRecordCreateDisplayPlaneSurfaceKHR(VkInstance instance, const VkDisplaySurfaceCreateInfoKHR* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSurfaceKHR* pSurface, VkResult result) {};
- virtual bool PreCallValidateCreateSharedSwapchainsKHR(VkDevice device, uint32_t swapchainCount, const VkSwapchainCreateInfoKHR* pCreateInfos, const VkAllocationCallbacks* pAllocator, VkSwapchainKHR* pSwapchains) { return false; };
- virtual void PreCallRecordCreateSharedSwapchainsKHR(VkDevice device, uint32_t swapchainCount, const VkSwapchainCreateInfoKHR* pCreateInfos, const VkAllocationCallbacks* pAllocator, VkSwapchainKHR* pSwapchains) {};
- virtual void PostCallRecordCreateSharedSwapchainsKHR(VkDevice device, uint32_t swapchainCount, const VkSwapchainCreateInfoKHR* pCreateInfos, const VkAllocationCallbacks* pAllocator, VkSwapchainKHR* pSwapchains, VkResult result) {};
-#ifdef VK_USE_PLATFORM_XLIB_KHR
- virtual bool PreCallValidateCreateXlibSurfaceKHR(VkInstance instance, const VkXlibSurfaceCreateInfoKHR* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSurfaceKHR* pSurface) { return false; };
- virtual void PreCallRecordCreateXlibSurfaceKHR(VkInstance instance, const VkXlibSurfaceCreateInfoKHR* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSurfaceKHR* pSurface) {};
- virtual void PostCallRecordCreateXlibSurfaceKHR(VkInstance instance, const VkXlibSurfaceCreateInfoKHR* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSurfaceKHR* pSurface, VkResult result) {};
-#endif
-#ifdef VK_USE_PLATFORM_XLIB_KHR
- virtual bool PreCallValidateGetPhysicalDeviceXlibPresentationSupportKHR(VkPhysicalDevice physicalDevice, uint32_t queueFamilyIndex, Display* dpy, VisualID visualID) { return false; };
- virtual void PreCallRecordGetPhysicalDeviceXlibPresentationSupportKHR(VkPhysicalDevice physicalDevice, uint32_t queueFamilyIndex, Display* dpy, VisualID visualID) {};
- virtual void PostCallRecordGetPhysicalDeviceXlibPresentationSupportKHR(VkPhysicalDevice physicalDevice, uint32_t queueFamilyIndex, Display* dpy, VisualID visualID) {};
-#endif
-#ifdef VK_USE_PLATFORM_XCB_KHR
- virtual bool PreCallValidateCreateXcbSurfaceKHR(VkInstance instance, const VkXcbSurfaceCreateInfoKHR* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSurfaceKHR* pSurface) { return false; };
- virtual void PreCallRecordCreateXcbSurfaceKHR(VkInstance instance, const VkXcbSurfaceCreateInfoKHR* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSurfaceKHR* pSurface) {};
- virtual void PostCallRecordCreateXcbSurfaceKHR(VkInstance instance, const VkXcbSurfaceCreateInfoKHR* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSurfaceKHR* pSurface, VkResult result) {};
-#endif
-#ifdef VK_USE_PLATFORM_XCB_KHR
- virtual bool PreCallValidateGetPhysicalDeviceXcbPresentationSupportKHR(VkPhysicalDevice physicalDevice, uint32_t queueFamilyIndex, xcb_connection_t* connection, xcb_visualid_t visual_id) { return false; };
- virtual void PreCallRecordGetPhysicalDeviceXcbPresentationSupportKHR(VkPhysicalDevice physicalDevice, uint32_t queueFamilyIndex, xcb_connection_t* connection, xcb_visualid_t visual_id) {};
- virtual void PostCallRecordGetPhysicalDeviceXcbPresentationSupportKHR(VkPhysicalDevice physicalDevice, uint32_t queueFamilyIndex, xcb_connection_t* connection, xcb_visualid_t visual_id) {};
-#endif
-#ifdef VK_USE_PLATFORM_WAYLAND_KHR
- virtual bool PreCallValidateCreateWaylandSurfaceKHR(VkInstance instance, const VkWaylandSurfaceCreateInfoKHR* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSurfaceKHR* pSurface) { return false; };
- virtual void PreCallRecordCreateWaylandSurfaceKHR(VkInstance instance, const VkWaylandSurfaceCreateInfoKHR* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSurfaceKHR* pSurface) {};
- virtual void PostCallRecordCreateWaylandSurfaceKHR(VkInstance instance, const VkWaylandSurfaceCreateInfoKHR* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSurfaceKHR* pSurface, VkResult result) {};
-#endif
-#ifdef VK_USE_PLATFORM_WAYLAND_KHR
- virtual bool PreCallValidateGetPhysicalDeviceWaylandPresentationSupportKHR(VkPhysicalDevice physicalDevice, uint32_t queueFamilyIndex, struct wl_display* display) { return false; };
- virtual void PreCallRecordGetPhysicalDeviceWaylandPresentationSupportKHR(VkPhysicalDevice physicalDevice, uint32_t queueFamilyIndex, struct wl_display* display) {};
- virtual void PostCallRecordGetPhysicalDeviceWaylandPresentationSupportKHR(VkPhysicalDevice physicalDevice, uint32_t queueFamilyIndex, struct wl_display* display) {};
-#endif
-#ifdef VK_USE_PLATFORM_ANDROID_KHR
- virtual bool PreCallValidateCreateAndroidSurfaceKHR(VkInstance instance, const VkAndroidSurfaceCreateInfoKHR* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSurfaceKHR* pSurface) { return false; };
- virtual void PreCallRecordCreateAndroidSurfaceKHR(VkInstance instance, const VkAndroidSurfaceCreateInfoKHR* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSurfaceKHR* pSurface) {};
- virtual void PostCallRecordCreateAndroidSurfaceKHR(VkInstance instance, const VkAndroidSurfaceCreateInfoKHR* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSurfaceKHR* pSurface, VkResult result) {};
-#endif
-#ifdef VK_USE_PLATFORM_WIN32_KHR
- virtual bool PreCallValidateCreateWin32SurfaceKHR(VkInstance instance, const VkWin32SurfaceCreateInfoKHR* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSurfaceKHR* pSurface) { return false; };
- virtual void PreCallRecordCreateWin32SurfaceKHR(VkInstance instance, const VkWin32SurfaceCreateInfoKHR* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSurfaceKHR* pSurface) {};
- virtual void PostCallRecordCreateWin32SurfaceKHR(VkInstance instance, const VkWin32SurfaceCreateInfoKHR* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSurfaceKHR* pSurface, VkResult result) {};
-#endif
-#ifdef VK_USE_PLATFORM_WIN32_KHR
- virtual bool PreCallValidateGetPhysicalDeviceWin32PresentationSupportKHR(VkPhysicalDevice physicalDevice, uint32_t queueFamilyIndex) { return false; };
- virtual void PreCallRecordGetPhysicalDeviceWin32PresentationSupportKHR(VkPhysicalDevice physicalDevice, uint32_t queueFamilyIndex) {};
- virtual void PostCallRecordGetPhysicalDeviceWin32PresentationSupportKHR(VkPhysicalDevice physicalDevice, uint32_t queueFamilyIndex) {};
-#endif
- virtual bool PreCallValidateGetPhysicalDeviceFeatures2KHR(VkPhysicalDevice physicalDevice, VkPhysicalDeviceFeatures2* pFeatures) { return false; };
- virtual void PreCallRecordGetPhysicalDeviceFeatures2KHR(VkPhysicalDevice physicalDevice, VkPhysicalDeviceFeatures2* pFeatures) {};
- virtual void PostCallRecordGetPhysicalDeviceFeatures2KHR(VkPhysicalDevice physicalDevice, VkPhysicalDeviceFeatures2* pFeatures) {};
- virtual bool PreCallValidateGetPhysicalDeviceProperties2KHR(VkPhysicalDevice physicalDevice, VkPhysicalDeviceProperties2* pProperties) { return false; };
- virtual void PreCallRecordGetPhysicalDeviceProperties2KHR(VkPhysicalDevice physicalDevice, VkPhysicalDeviceProperties2* pProperties) {};
- virtual void PostCallRecordGetPhysicalDeviceProperties2KHR(VkPhysicalDevice physicalDevice, VkPhysicalDeviceProperties2* pProperties) {};
- virtual bool PreCallValidateGetPhysicalDeviceFormatProperties2KHR(VkPhysicalDevice physicalDevice, VkFormat format, VkFormatProperties2* pFormatProperties) { return false; };
- virtual void PreCallRecordGetPhysicalDeviceFormatProperties2KHR(VkPhysicalDevice physicalDevice, VkFormat format, VkFormatProperties2* pFormatProperties) {};
- virtual void PostCallRecordGetPhysicalDeviceFormatProperties2KHR(VkPhysicalDevice physicalDevice, VkFormat format, VkFormatProperties2* pFormatProperties) {};
- virtual bool PreCallValidateGetPhysicalDeviceImageFormatProperties2KHR(VkPhysicalDevice physicalDevice, const VkPhysicalDeviceImageFormatInfo2* pImageFormatInfo, VkImageFormatProperties2* pImageFormatProperties) { return false; };
- virtual void PreCallRecordGetPhysicalDeviceImageFormatProperties2KHR(VkPhysicalDevice physicalDevice, const VkPhysicalDeviceImageFormatInfo2* pImageFormatInfo, VkImageFormatProperties2* pImageFormatProperties) {};
- virtual void PostCallRecordGetPhysicalDeviceImageFormatProperties2KHR(VkPhysicalDevice physicalDevice, const VkPhysicalDeviceImageFormatInfo2* pImageFormatInfo, VkImageFormatProperties2* pImageFormatProperties, VkResult result) {};
- virtual bool PreCallValidateGetPhysicalDeviceQueueFamilyProperties2KHR(VkPhysicalDevice physicalDevice, uint32_t* pQueueFamilyPropertyCount, VkQueueFamilyProperties2* pQueueFamilyProperties) { return false; };
- virtual void PreCallRecordGetPhysicalDeviceQueueFamilyProperties2KHR(VkPhysicalDevice physicalDevice, uint32_t* pQueueFamilyPropertyCount, VkQueueFamilyProperties2* pQueueFamilyProperties) {};
- virtual void PostCallRecordGetPhysicalDeviceQueueFamilyProperties2KHR(VkPhysicalDevice physicalDevice, uint32_t* pQueueFamilyPropertyCount, VkQueueFamilyProperties2* pQueueFamilyProperties) {};
- virtual bool PreCallValidateGetPhysicalDeviceMemoryProperties2KHR(VkPhysicalDevice physicalDevice, VkPhysicalDeviceMemoryProperties2* pMemoryProperties) { return false; };
- virtual void PreCallRecordGetPhysicalDeviceMemoryProperties2KHR(VkPhysicalDevice physicalDevice, VkPhysicalDeviceMemoryProperties2* pMemoryProperties) {};
- virtual void PostCallRecordGetPhysicalDeviceMemoryProperties2KHR(VkPhysicalDevice physicalDevice, VkPhysicalDeviceMemoryProperties2* pMemoryProperties) {};
- virtual bool PreCallValidateGetPhysicalDeviceSparseImageFormatProperties2KHR(VkPhysicalDevice physicalDevice, const VkPhysicalDeviceSparseImageFormatInfo2* pFormatInfo, uint32_t* pPropertyCount, VkSparseImageFormatProperties2* pProperties) { return false; };
- virtual void PreCallRecordGetPhysicalDeviceSparseImageFormatProperties2KHR(VkPhysicalDevice physicalDevice, const VkPhysicalDeviceSparseImageFormatInfo2* pFormatInfo, uint32_t* pPropertyCount, VkSparseImageFormatProperties2* pProperties) {};
- virtual void PostCallRecordGetPhysicalDeviceSparseImageFormatProperties2KHR(VkPhysicalDevice physicalDevice, const VkPhysicalDeviceSparseImageFormatInfo2* pFormatInfo, uint32_t* pPropertyCount, VkSparseImageFormatProperties2* pProperties) {};
- virtual bool PreCallValidateGetDeviceGroupPeerMemoryFeaturesKHR(VkDevice device, uint32_t heapIndex, uint32_t localDeviceIndex, uint32_t remoteDeviceIndex, VkPeerMemoryFeatureFlags* pPeerMemoryFeatures) { return false; };
- virtual void PreCallRecordGetDeviceGroupPeerMemoryFeaturesKHR(VkDevice device, uint32_t heapIndex, uint32_t localDeviceIndex, uint32_t remoteDeviceIndex, VkPeerMemoryFeatureFlags* pPeerMemoryFeatures) {};
- virtual void PostCallRecordGetDeviceGroupPeerMemoryFeaturesKHR(VkDevice device, uint32_t heapIndex, uint32_t localDeviceIndex, uint32_t remoteDeviceIndex, VkPeerMemoryFeatureFlags* pPeerMemoryFeatures) {};
- virtual bool PreCallValidateCmdSetDeviceMaskKHR(VkCommandBuffer commandBuffer, uint32_t deviceMask) { return false; };
- virtual void PreCallRecordCmdSetDeviceMaskKHR(VkCommandBuffer commandBuffer, uint32_t deviceMask) {};
- virtual void PostCallRecordCmdSetDeviceMaskKHR(VkCommandBuffer commandBuffer, uint32_t deviceMask) {};
- virtual bool PreCallValidateCmdDispatchBaseKHR(VkCommandBuffer commandBuffer, uint32_t baseGroupX, uint32_t baseGroupY, uint32_t baseGroupZ, uint32_t groupCountX, uint32_t groupCountY, uint32_t groupCountZ) { return false; };
- virtual void PreCallRecordCmdDispatchBaseKHR(VkCommandBuffer commandBuffer, uint32_t baseGroupX, uint32_t baseGroupY, uint32_t baseGroupZ, uint32_t groupCountX, uint32_t groupCountY, uint32_t groupCountZ) {};
- virtual void PostCallRecordCmdDispatchBaseKHR(VkCommandBuffer commandBuffer, uint32_t baseGroupX, uint32_t baseGroupY, uint32_t baseGroupZ, uint32_t groupCountX, uint32_t groupCountY, uint32_t groupCountZ) {};
- virtual bool PreCallValidateTrimCommandPoolKHR(VkDevice device, VkCommandPool commandPool, VkCommandPoolTrimFlags flags) { return false; };
- virtual void PreCallRecordTrimCommandPoolKHR(VkDevice device, VkCommandPool commandPool, VkCommandPoolTrimFlags flags) {};
- virtual void PostCallRecordTrimCommandPoolKHR(VkDevice device, VkCommandPool commandPool, VkCommandPoolTrimFlags flags) {};
- virtual bool PreCallValidateEnumeratePhysicalDeviceGroupsKHR(VkInstance instance, uint32_t* pPhysicalDeviceGroupCount, VkPhysicalDeviceGroupProperties* pPhysicalDeviceGroupProperties) { return false; };
- virtual void PreCallRecordEnumeratePhysicalDeviceGroupsKHR(VkInstance instance, uint32_t* pPhysicalDeviceGroupCount, VkPhysicalDeviceGroupProperties* pPhysicalDeviceGroupProperties) {};
- virtual void PostCallRecordEnumeratePhysicalDeviceGroupsKHR(VkInstance instance, uint32_t* pPhysicalDeviceGroupCount, VkPhysicalDeviceGroupProperties* pPhysicalDeviceGroupProperties, VkResult result) {};
- virtual bool PreCallValidateGetPhysicalDeviceExternalBufferPropertiesKHR(VkPhysicalDevice physicalDevice, const VkPhysicalDeviceExternalBufferInfo* pExternalBufferInfo, VkExternalBufferProperties* pExternalBufferProperties) { return false; };
- virtual void PreCallRecordGetPhysicalDeviceExternalBufferPropertiesKHR(VkPhysicalDevice physicalDevice, const VkPhysicalDeviceExternalBufferInfo* pExternalBufferInfo, VkExternalBufferProperties* pExternalBufferProperties) {};
- virtual void PostCallRecordGetPhysicalDeviceExternalBufferPropertiesKHR(VkPhysicalDevice physicalDevice, const VkPhysicalDeviceExternalBufferInfo* pExternalBufferInfo, VkExternalBufferProperties* pExternalBufferProperties) {};
-#ifdef VK_USE_PLATFORM_WIN32_KHR
- virtual bool PreCallValidateGetMemoryWin32HandleKHR(VkDevice device, const VkMemoryGetWin32HandleInfoKHR* pGetWin32HandleInfo, HANDLE* pHandle) { return false; };
- virtual void PreCallRecordGetMemoryWin32HandleKHR(VkDevice device, const VkMemoryGetWin32HandleInfoKHR* pGetWin32HandleInfo, HANDLE* pHandle) {};
- virtual void PostCallRecordGetMemoryWin32HandleKHR(VkDevice device, const VkMemoryGetWin32HandleInfoKHR* pGetWin32HandleInfo, HANDLE* pHandle, VkResult result) {};
-#endif
-#ifdef VK_USE_PLATFORM_WIN32_KHR
- virtual bool PreCallValidateGetMemoryWin32HandlePropertiesKHR(VkDevice device, VkExternalMemoryHandleTypeFlagBits handleType, HANDLE handle, VkMemoryWin32HandlePropertiesKHR* pMemoryWin32HandleProperties) { return false; };
- virtual void PreCallRecordGetMemoryWin32HandlePropertiesKHR(VkDevice device, VkExternalMemoryHandleTypeFlagBits handleType, HANDLE handle, VkMemoryWin32HandlePropertiesKHR* pMemoryWin32HandleProperties) {};
- virtual void PostCallRecordGetMemoryWin32HandlePropertiesKHR(VkDevice device, VkExternalMemoryHandleTypeFlagBits handleType, HANDLE handle, VkMemoryWin32HandlePropertiesKHR* pMemoryWin32HandleProperties, VkResult result) {};
-#endif
- virtual bool PreCallValidateGetMemoryFdKHR(VkDevice device, const VkMemoryGetFdInfoKHR* pGetFdInfo, int* pFd) { return false; };
- virtual void PreCallRecordGetMemoryFdKHR(VkDevice device, const VkMemoryGetFdInfoKHR* pGetFdInfo, int* pFd) {};
- virtual void PostCallRecordGetMemoryFdKHR(VkDevice device, const VkMemoryGetFdInfoKHR* pGetFdInfo, int* pFd, VkResult result) {};
- virtual bool PreCallValidateGetMemoryFdPropertiesKHR(VkDevice device, VkExternalMemoryHandleTypeFlagBits handleType, int fd, VkMemoryFdPropertiesKHR* pMemoryFdProperties) { return false; };
- virtual void PreCallRecordGetMemoryFdPropertiesKHR(VkDevice device, VkExternalMemoryHandleTypeFlagBits handleType, int fd, VkMemoryFdPropertiesKHR* pMemoryFdProperties) {};
- virtual void PostCallRecordGetMemoryFdPropertiesKHR(VkDevice device, VkExternalMemoryHandleTypeFlagBits handleType, int fd, VkMemoryFdPropertiesKHR* pMemoryFdProperties, VkResult result) {};
- virtual bool PreCallValidateGetPhysicalDeviceExternalSemaphorePropertiesKHR(VkPhysicalDevice physicalDevice, const VkPhysicalDeviceExternalSemaphoreInfo* pExternalSemaphoreInfo, VkExternalSemaphoreProperties* pExternalSemaphoreProperties) { return false; };
- virtual void PreCallRecordGetPhysicalDeviceExternalSemaphorePropertiesKHR(VkPhysicalDevice physicalDevice, const VkPhysicalDeviceExternalSemaphoreInfo* pExternalSemaphoreInfo, VkExternalSemaphoreProperties* pExternalSemaphoreProperties) {};
- virtual void PostCallRecordGetPhysicalDeviceExternalSemaphorePropertiesKHR(VkPhysicalDevice physicalDevice, const VkPhysicalDeviceExternalSemaphoreInfo* pExternalSemaphoreInfo, VkExternalSemaphoreProperties* pExternalSemaphoreProperties) {};
-#ifdef VK_USE_PLATFORM_WIN32_KHR
- virtual bool PreCallValidateImportSemaphoreWin32HandleKHR(VkDevice device, const VkImportSemaphoreWin32HandleInfoKHR* pImportSemaphoreWin32HandleInfo) { return false; };
- virtual void PreCallRecordImportSemaphoreWin32HandleKHR(VkDevice device, const VkImportSemaphoreWin32HandleInfoKHR* pImportSemaphoreWin32HandleInfo) {};
- virtual void PostCallRecordImportSemaphoreWin32HandleKHR(VkDevice device, const VkImportSemaphoreWin32HandleInfoKHR* pImportSemaphoreWin32HandleInfo, VkResult result) {};
-#endif
-#ifdef VK_USE_PLATFORM_WIN32_KHR
- virtual bool PreCallValidateGetSemaphoreWin32HandleKHR(VkDevice device, const VkSemaphoreGetWin32HandleInfoKHR* pGetWin32HandleInfo, HANDLE* pHandle) { return false; };
- virtual void PreCallRecordGetSemaphoreWin32HandleKHR(VkDevice device, const VkSemaphoreGetWin32HandleInfoKHR* pGetWin32HandleInfo, HANDLE* pHandle) {};
- virtual void PostCallRecordGetSemaphoreWin32HandleKHR(VkDevice device, const VkSemaphoreGetWin32HandleInfoKHR* pGetWin32HandleInfo, HANDLE* pHandle, VkResult result) {};
-#endif
- virtual bool PreCallValidateImportSemaphoreFdKHR(VkDevice device, const VkImportSemaphoreFdInfoKHR* pImportSemaphoreFdInfo) { return false; };
- virtual void PreCallRecordImportSemaphoreFdKHR(VkDevice device, const VkImportSemaphoreFdInfoKHR* pImportSemaphoreFdInfo) {};
- virtual void PostCallRecordImportSemaphoreFdKHR(VkDevice device, const VkImportSemaphoreFdInfoKHR* pImportSemaphoreFdInfo, VkResult result) {};
- virtual bool PreCallValidateGetSemaphoreFdKHR(VkDevice device, const VkSemaphoreGetFdInfoKHR* pGetFdInfo, int* pFd) { return false; };
- virtual void PreCallRecordGetSemaphoreFdKHR(VkDevice device, const VkSemaphoreGetFdInfoKHR* pGetFdInfo, int* pFd) {};
- virtual void PostCallRecordGetSemaphoreFdKHR(VkDevice device, const VkSemaphoreGetFdInfoKHR* pGetFdInfo, int* pFd, VkResult result) {};
- virtual bool PreCallValidateCmdPushDescriptorSetKHR(VkCommandBuffer commandBuffer, VkPipelineBindPoint pipelineBindPoint, VkPipelineLayout layout, uint32_t set, uint32_t descriptorWriteCount, const VkWriteDescriptorSet* pDescriptorWrites) { return false; };
- virtual void PreCallRecordCmdPushDescriptorSetKHR(VkCommandBuffer commandBuffer, VkPipelineBindPoint pipelineBindPoint, VkPipelineLayout layout, uint32_t set, uint32_t descriptorWriteCount, const VkWriteDescriptorSet* pDescriptorWrites) {};
- virtual void PostCallRecordCmdPushDescriptorSetKHR(VkCommandBuffer commandBuffer, VkPipelineBindPoint pipelineBindPoint, VkPipelineLayout layout, uint32_t set, uint32_t descriptorWriteCount, const VkWriteDescriptorSet* pDescriptorWrites) {};
- virtual bool PreCallValidateCmdPushDescriptorSetWithTemplateKHR(VkCommandBuffer commandBuffer, VkDescriptorUpdateTemplate descriptorUpdateTemplate, VkPipelineLayout layout, uint32_t set, const void* pData) { return false; };
- virtual void PreCallRecordCmdPushDescriptorSetWithTemplateKHR(VkCommandBuffer commandBuffer, VkDescriptorUpdateTemplate descriptorUpdateTemplate, VkPipelineLayout layout, uint32_t set, const void* pData) {};
- virtual void PostCallRecordCmdPushDescriptorSetWithTemplateKHR(VkCommandBuffer commandBuffer, VkDescriptorUpdateTemplate descriptorUpdateTemplate, VkPipelineLayout layout, uint32_t set, const void* pData) {};
- virtual bool PreCallValidateCreateDescriptorUpdateTemplateKHR(VkDevice device, const VkDescriptorUpdateTemplateCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkDescriptorUpdateTemplate* pDescriptorUpdateTemplate) { return false; };
- virtual void PreCallRecordCreateDescriptorUpdateTemplateKHR(VkDevice device, const VkDescriptorUpdateTemplateCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkDescriptorUpdateTemplate* pDescriptorUpdateTemplate) {};
- virtual void PostCallRecordCreateDescriptorUpdateTemplateKHR(VkDevice device, const VkDescriptorUpdateTemplateCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkDescriptorUpdateTemplate* pDescriptorUpdateTemplate, VkResult result) {};
- virtual bool PreCallValidateDestroyDescriptorUpdateTemplateKHR(VkDevice device, VkDescriptorUpdateTemplate descriptorUpdateTemplate, const VkAllocationCallbacks* pAllocator) { return false; };
- virtual void PreCallRecordDestroyDescriptorUpdateTemplateKHR(VkDevice device, VkDescriptorUpdateTemplate descriptorUpdateTemplate, const VkAllocationCallbacks* pAllocator) {};
- virtual void PostCallRecordDestroyDescriptorUpdateTemplateKHR(VkDevice device, VkDescriptorUpdateTemplate descriptorUpdateTemplate, const VkAllocationCallbacks* pAllocator) {};
- virtual bool PreCallValidateUpdateDescriptorSetWithTemplateKHR(VkDevice device, VkDescriptorSet descriptorSet, VkDescriptorUpdateTemplate descriptorUpdateTemplate, const void* pData) { return false; };
- virtual void PreCallRecordUpdateDescriptorSetWithTemplateKHR(VkDevice device, VkDescriptorSet descriptorSet, VkDescriptorUpdateTemplate descriptorUpdateTemplate, const void* pData) {};
- virtual void PostCallRecordUpdateDescriptorSetWithTemplateKHR(VkDevice device, VkDescriptorSet descriptorSet, VkDescriptorUpdateTemplate descriptorUpdateTemplate, const void* pData) {};
- virtual bool PreCallValidateCreateRenderPass2KHR(VkDevice device, const VkRenderPassCreateInfo2KHR* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkRenderPass* pRenderPass) { return false; };
- virtual void PreCallRecordCreateRenderPass2KHR(VkDevice device, const VkRenderPassCreateInfo2KHR* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkRenderPass* pRenderPass) {};
- virtual void PostCallRecordCreateRenderPass2KHR(VkDevice device, const VkRenderPassCreateInfo2KHR* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkRenderPass* pRenderPass, VkResult result) {};
- virtual bool PreCallValidateCmdBeginRenderPass2KHR(VkCommandBuffer commandBuffer, const VkRenderPassBeginInfo* pRenderPassBegin, const VkSubpassBeginInfoKHR* pSubpassBeginInfo) { return false; };
- virtual void PreCallRecordCmdBeginRenderPass2KHR(VkCommandBuffer commandBuffer, const VkRenderPassBeginInfo* pRenderPassBegin, const VkSubpassBeginInfoKHR* pSubpassBeginInfo) {};
- virtual void PostCallRecordCmdBeginRenderPass2KHR(VkCommandBuffer commandBuffer, const VkRenderPassBeginInfo* pRenderPassBegin, const VkSubpassBeginInfoKHR* pSubpassBeginInfo) {};
- virtual bool PreCallValidateCmdNextSubpass2KHR(VkCommandBuffer commandBuffer, const VkSubpassBeginInfoKHR* pSubpassBeginInfo, const VkSubpassEndInfoKHR* pSubpassEndInfo) { return false; };
- virtual void PreCallRecordCmdNextSubpass2KHR(VkCommandBuffer commandBuffer, const VkSubpassBeginInfoKHR* pSubpassBeginInfo, const VkSubpassEndInfoKHR* pSubpassEndInfo) {};
- virtual void PostCallRecordCmdNextSubpass2KHR(VkCommandBuffer commandBuffer, const VkSubpassBeginInfoKHR* pSubpassBeginInfo, const VkSubpassEndInfoKHR* pSubpassEndInfo) {};
- virtual bool PreCallValidateCmdEndRenderPass2KHR(VkCommandBuffer commandBuffer, const VkSubpassEndInfoKHR* pSubpassEndInfo) { return false; };
- virtual void PreCallRecordCmdEndRenderPass2KHR(VkCommandBuffer commandBuffer, const VkSubpassEndInfoKHR* pSubpassEndInfo) {};
- virtual void PostCallRecordCmdEndRenderPass2KHR(VkCommandBuffer commandBuffer, const VkSubpassEndInfoKHR* pSubpassEndInfo) {};
- virtual bool PreCallValidateGetSwapchainStatusKHR(VkDevice device, VkSwapchainKHR swapchain) { return false; };
- virtual void PreCallRecordGetSwapchainStatusKHR(VkDevice device, VkSwapchainKHR swapchain) {};
- virtual void PostCallRecordGetSwapchainStatusKHR(VkDevice device, VkSwapchainKHR swapchain, VkResult result) {};
- virtual bool PreCallValidateGetPhysicalDeviceExternalFencePropertiesKHR(VkPhysicalDevice physicalDevice, const VkPhysicalDeviceExternalFenceInfo* pExternalFenceInfo, VkExternalFenceProperties* pExternalFenceProperties) { return false; };
- virtual void PreCallRecordGetPhysicalDeviceExternalFencePropertiesKHR(VkPhysicalDevice physicalDevice, const VkPhysicalDeviceExternalFenceInfo* pExternalFenceInfo, VkExternalFenceProperties* pExternalFenceProperties) {};
- virtual void PostCallRecordGetPhysicalDeviceExternalFencePropertiesKHR(VkPhysicalDevice physicalDevice, const VkPhysicalDeviceExternalFenceInfo* pExternalFenceInfo, VkExternalFenceProperties* pExternalFenceProperties) {};
-#ifdef VK_USE_PLATFORM_WIN32_KHR
- virtual bool PreCallValidateImportFenceWin32HandleKHR(VkDevice device, const VkImportFenceWin32HandleInfoKHR* pImportFenceWin32HandleInfo) { return false; };
- virtual void PreCallRecordImportFenceWin32HandleKHR(VkDevice device, const VkImportFenceWin32HandleInfoKHR* pImportFenceWin32HandleInfo) {};
- virtual void PostCallRecordImportFenceWin32HandleKHR(VkDevice device, const VkImportFenceWin32HandleInfoKHR* pImportFenceWin32HandleInfo, VkResult result) {};
-#endif
-#ifdef VK_USE_PLATFORM_WIN32_KHR
- virtual bool PreCallValidateGetFenceWin32HandleKHR(VkDevice device, const VkFenceGetWin32HandleInfoKHR* pGetWin32HandleInfo, HANDLE* pHandle) { return false; };
- virtual void PreCallRecordGetFenceWin32HandleKHR(VkDevice device, const VkFenceGetWin32HandleInfoKHR* pGetWin32HandleInfo, HANDLE* pHandle) {};
- virtual void PostCallRecordGetFenceWin32HandleKHR(VkDevice device, const VkFenceGetWin32HandleInfoKHR* pGetWin32HandleInfo, HANDLE* pHandle, VkResult result) {};
-#endif
- virtual bool PreCallValidateImportFenceFdKHR(VkDevice device, const VkImportFenceFdInfoKHR* pImportFenceFdInfo) { return false; };
- virtual void PreCallRecordImportFenceFdKHR(VkDevice device, const VkImportFenceFdInfoKHR* pImportFenceFdInfo) {};
- virtual void PostCallRecordImportFenceFdKHR(VkDevice device, const VkImportFenceFdInfoKHR* pImportFenceFdInfo, VkResult result) {};
- virtual bool PreCallValidateGetFenceFdKHR(VkDevice device, const VkFenceGetFdInfoKHR* pGetFdInfo, int* pFd) { return false; };
- virtual void PreCallRecordGetFenceFdKHR(VkDevice device, const VkFenceGetFdInfoKHR* pGetFdInfo, int* pFd) {};
- virtual void PostCallRecordGetFenceFdKHR(VkDevice device, const VkFenceGetFdInfoKHR* pGetFdInfo, int* pFd, VkResult result) {};
- virtual bool PreCallValidateGetPhysicalDeviceSurfaceCapabilities2KHR(VkPhysicalDevice physicalDevice, const VkPhysicalDeviceSurfaceInfo2KHR* pSurfaceInfo, VkSurfaceCapabilities2KHR* pSurfaceCapabilities) { return false; };
- virtual void PreCallRecordGetPhysicalDeviceSurfaceCapabilities2KHR(VkPhysicalDevice physicalDevice, const VkPhysicalDeviceSurfaceInfo2KHR* pSurfaceInfo, VkSurfaceCapabilities2KHR* pSurfaceCapabilities) {};
- virtual void PostCallRecordGetPhysicalDeviceSurfaceCapabilities2KHR(VkPhysicalDevice physicalDevice, const VkPhysicalDeviceSurfaceInfo2KHR* pSurfaceInfo, VkSurfaceCapabilities2KHR* pSurfaceCapabilities, VkResult result) {};
- virtual bool PreCallValidateGetPhysicalDeviceSurfaceFormats2KHR(VkPhysicalDevice physicalDevice, const VkPhysicalDeviceSurfaceInfo2KHR* pSurfaceInfo, uint32_t* pSurfaceFormatCount, VkSurfaceFormat2KHR* pSurfaceFormats) { return false; };
- virtual void PreCallRecordGetPhysicalDeviceSurfaceFormats2KHR(VkPhysicalDevice physicalDevice, const VkPhysicalDeviceSurfaceInfo2KHR* pSurfaceInfo, uint32_t* pSurfaceFormatCount, VkSurfaceFormat2KHR* pSurfaceFormats) {};
- virtual void PostCallRecordGetPhysicalDeviceSurfaceFormats2KHR(VkPhysicalDevice physicalDevice, const VkPhysicalDeviceSurfaceInfo2KHR* pSurfaceInfo, uint32_t* pSurfaceFormatCount, VkSurfaceFormat2KHR* pSurfaceFormats, VkResult result) {};
- virtual bool PreCallValidateGetPhysicalDeviceDisplayProperties2KHR(VkPhysicalDevice physicalDevice, uint32_t* pPropertyCount, VkDisplayProperties2KHR* pProperties) { return false; };
- virtual void PreCallRecordGetPhysicalDeviceDisplayProperties2KHR(VkPhysicalDevice physicalDevice, uint32_t* pPropertyCount, VkDisplayProperties2KHR* pProperties) {};
- virtual void PostCallRecordGetPhysicalDeviceDisplayProperties2KHR(VkPhysicalDevice physicalDevice, uint32_t* pPropertyCount, VkDisplayProperties2KHR* pProperties, VkResult result) {};
- virtual bool PreCallValidateGetPhysicalDeviceDisplayPlaneProperties2KHR(VkPhysicalDevice physicalDevice, uint32_t* pPropertyCount, VkDisplayPlaneProperties2KHR* pProperties) { return false; };
- virtual void PreCallRecordGetPhysicalDeviceDisplayPlaneProperties2KHR(VkPhysicalDevice physicalDevice, uint32_t* pPropertyCount, VkDisplayPlaneProperties2KHR* pProperties) {};
- virtual void PostCallRecordGetPhysicalDeviceDisplayPlaneProperties2KHR(VkPhysicalDevice physicalDevice, uint32_t* pPropertyCount, VkDisplayPlaneProperties2KHR* pProperties, VkResult result) {};
- virtual bool PreCallValidateGetDisplayModeProperties2KHR(VkPhysicalDevice physicalDevice, VkDisplayKHR display, uint32_t* pPropertyCount, VkDisplayModeProperties2KHR* pProperties) { return false; };
- virtual void PreCallRecordGetDisplayModeProperties2KHR(VkPhysicalDevice physicalDevice, VkDisplayKHR display, uint32_t* pPropertyCount, VkDisplayModeProperties2KHR* pProperties) {};
- virtual void PostCallRecordGetDisplayModeProperties2KHR(VkPhysicalDevice physicalDevice, VkDisplayKHR display, uint32_t* pPropertyCount, VkDisplayModeProperties2KHR* pProperties, VkResult result) {};
- virtual bool PreCallValidateGetDisplayPlaneCapabilities2KHR(VkPhysicalDevice physicalDevice, const VkDisplayPlaneInfo2KHR* pDisplayPlaneInfo, VkDisplayPlaneCapabilities2KHR* pCapabilities) { return false; };
- virtual void PreCallRecordGetDisplayPlaneCapabilities2KHR(VkPhysicalDevice physicalDevice, const VkDisplayPlaneInfo2KHR* pDisplayPlaneInfo, VkDisplayPlaneCapabilities2KHR* pCapabilities) {};
- virtual void PostCallRecordGetDisplayPlaneCapabilities2KHR(VkPhysicalDevice physicalDevice, const VkDisplayPlaneInfo2KHR* pDisplayPlaneInfo, VkDisplayPlaneCapabilities2KHR* pCapabilities, VkResult result) {};
- virtual bool PreCallValidateGetImageMemoryRequirements2KHR(VkDevice device, const VkImageMemoryRequirementsInfo2* pInfo, VkMemoryRequirements2* pMemoryRequirements) { return false; };
- virtual void PreCallRecordGetImageMemoryRequirements2KHR(VkDevice device, const VkImageMemoryRequirementsInfo2* pInfo, VkMemoryRequirements2* pMemoryRequirements) {};
- virtual void PostCallRecordGetImageMemoryRequirements2KHR(VkDevice device, const VkImageMemoryRequirementsInfo2* pInfo, VkMemoryRequirements2* pMemoryRequirements) {};
- virtual bool PreCallValidateGetBufferMemoryRequirements2KHR(VkDevice device, const VkBufferMemoryRequirementsInfo2* pInfo, VkMemoryRequirements2* pMemoryRequirements) { return false; };
- virtual void PreCallRecordGetBufferMemoryRequirements2KHR(VkDevice device, const VkBufferMemoryRequirementsInfo2* pInfo, VkMemoryRequirements2* pMemoryRequirements) {};
- virtual void PostCallRecordGetBufferMemoryRequirements2KHR(VkDevice device, const VkBufferMemoryRequirementsInfo2* pInfo, VkMemoryRequirements2* pMemoryRequirements) {};
- virtual bool PreCallValidateGetImageSparseMemoryRequirements2KHR(VkDevice device, const VkImageSparseMemoryRequirementsInfo2* pInfo, uint32_t* pSparseMemoryRequirementCount, VkSparseImageMemoryRequirements2* pSparseMemoryRequirements) { return false; };
- virtual void PreCallRecordGetImageSparseMemoryRequirements2KHR(VkDevice device, const VkImageSparseMemoryRequirementsInfo2* pInfo, uint32_t* pSparseMemoryRequirementCount, VkSparseImageMemoryRequirements2* pSparseMemoryRequirements) {};
- virtual void PostCallRecordGetImageSparseMemoryRequirements2KHR(VkDevice device, const VkImageSparseMemoryRequirementsInfo2* pInfo, uint32_t* pSparseMemoryRequirementCount, VkSparseImageMemoryRequirements2* pSparseMemoryRequirements) {};
- virtual bool PreCallValidateCreateSamplerYcbcrConversionKHR(VkDevice device, const VkSamplerYcbcrConversionCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSamplerYcbcrConversion* pYcbcrConversion) { return false; };
- virtual void PreCallRecordCreateSamplerYcbcrConversionKHR(VkDevice device, const VkSamplerYcbcrConversionCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSamplerYcbcrConversion* pYcbcrConversion) {};
- virtual void PostCallRecordCreateSamplerYcbcrConversionKHR(VkDevice device, const VkSamplerYcbcrConversionCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSamplerYcbcrConversion* pYcbcrConversion, VkResult result) {};
- virtual bool PreCallValidateDestroySamplerYcbcrConversionKHR(VkDevice device, VkSamplerYcbcrConversion ycbcrConversion, const VkAllocationCallbacks* pAllocator) { return false; };
- virtual void PreCallRecordDestroySamplerYcbcrConversionKHR(VkDevice device, VkSamplerYcbcrConversion ycbcrConversion, const VkAllocationCallbacks* pAllocator) {};
- virtual void PostCallRecordDestroySamplerYcbcrConversionKHR(VkDevice device, VkSamplerYcbcrConversion ycbcrConversion, const VkAllocationCallbacks* pAllocator) {};
- virtual bool PreCallValidateBindBufferMemory2KHR(VkDevice device, uint32_t bindInfoCount, const VkBindBufferMemoryInfo* pBindInfos) { return false; };
- virtual void PreCallRecordBindBufferMemory2KHR(VkDevice device, uint32_t bindInfoCount, const VkBindBufferMemoryInfo* pBindInfos) {};
- virtual void PostCallRecordBindBufferMemory2KHR(VkDevice device, uint32_t bindInfoCount, const VkBindBufferMemoryInfo* pBindInfos, VkResult result) {};
- virtual bool PreCallValidateBindImageMemory2KHR(VkDevice device, uint32_t bindInfoCount, const VkBindImageMemoryInfo* pBindInfos) { return false; };
- virtual void PreCallRecordBindImageMemory2KHR(VkDevice device, uint32_t bindInfoCount, const VkBindImageMemoryInfo* pBindInfos) {};
- virtual void PostCallRecordBindImageMemory2KHR(VkDevice device, uint32_t bindInfoCount, const VkBindImageMemoryInfo* pBindInfos, VkResult result) {};
- virtual bool PreCallValidateGetDescriptorSetLayoutSupportKHR(VkDevice device, const VkDescriptorSetLayoutCreateInfo* pCreateInfo, VkDescriptorSetLayoutSupport* pSupport) { return false; };
- virtual void PreCallRecordGetDescriptorSetLayoutSupportKHR(VkDevice device, const VkDescriptorSetLayoutCreateInfo* pCreateInfo, VkDescriptorSetLayoutSupport* pSupport) {};
- virtual void PostCallRecordGetDescriptorSetLayoutSupportKHR(VkDevice device, const VkDescriptorSetLayoutCreateInfo* pCreateInfo, VkDescriptorSetLayoutSupport* pSupport) {};
- virtual bool PreCallValidateCmdDrawIndirectCountKHR(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, VkBuffer countBuffer, VkDeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride) { return false; };
- virtual void PreCallRecordCmdDrawIndirectCountKHR(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, VkBuffer countBuffer, VkDeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride) {};
- virtual void PostCallRecordCmdDrawIndirectCountKHR(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, VkBuffer countBuffer, VkDeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride) {};
- virtual bool PreCallValidateCmdDrawIndexedIndirectCountKHR(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, VkBuffer countBuffer, VkDeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride) { return false; };
- virtual void PreCallRecordCmdDrawIndexedIndirectCountKHR(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, VkBuffer countBuffer, VkDeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride) {};
- virtual void PostCallRecordCmdDrawIndexedIndirectCountKHR(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, VkBuffer countBuffer, VkDeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride) {};
- virtual bool PreCallValidateGetPipelineExecutablePropertiesKHR(VkDevice device, const VkPipelineInfoKHR* pPipelineInfo, uint32_t* pExecutableCount, VkPipelineExecutablePropertiesKHR* pProperties) { return false; };
- virtual void PreCallRecordGetPipelineExecutablePropertiesKHR(VkDevice device, const VkPipelineInfoKHR* pPipelineInfo, uint32_t* pExecutableCount, VkPipelineExecutablePropertiesKHR* pProperties) {};
- virtual void PostCallRecordGetPipelineExecutablePropertiesKHR(VkDevice device, const VkPipelineInfoKHR* pPipelineInfo, uint32_t* pExecutableCount, VkPipelineExecutablePropertiesKHR* pProperties, VkResult result) {};
- virtual bool PreCallValidateGetPipelineExecutableStatisticsKHR(VkDevice device, const VkPipelineExecutableInfoKHR* pExecutableInfo, uint32_t* pStatisticCount, VkPipelineExecutableStatisticKHR* pStatistics) { return false; };
- virtual void PreCallRecordGetPipelineExecutableStatisticsKHR(VkDevice device, const VkPipelineExecutableInfoKHR* pExecutableInfo, uint32_t* pStatisticCount, VkPipelineExecutableStatisticKHR* pStatistics) {};
- virtual void PostCallRecordGetPipelineExecutableStatisticsKHR(VkDevice device, const VkPipelineExecutableInfoKHR* pExecutableInfo, uint32_t* pStatisticCount, VkPipelineExecutableStatisticKHR* pStatistics, VkResult result) {};
- virtual bool PreCallValidateGetPipelineExecutableInternalRepresentationsKHR(VkDevice device, const VkPipelineExecutableInfoKHR* pExecutableInfo, uint32_t* pInternalRepresentationCount, VkPipelineExecutableInternalRepresentationKHR* pInternalRepresentations) { return false; };
- virtual void PreCallRecordGetPipelineExecutableInternalRepresentationsKHR(VkDevice device, const VkPipelineExecutableInfoKHR* pExecutableInfo, uint32_t* pInternalRepresentationCount, VkPipelineExecutableInternalRepresentationKHR* pInternalRepresentations) {};
- virtual void PostCallRecordGetPipelineExecutableInternalRepresentationsKHR(VkDevice device, const VkPipelineExecutableInfoKHR* pExecutableInfo, uint32_t* pInternalRepresentationCount, VkPipelineExecutableInternalRepresentationKHR* pInternalRepresentations, VkResult result) {};
- virtual bool PreCallValidateCreateDebugReportCallbackEXT(VkInstance instance, const VkDebugReportCallbackCreateInfoEXT* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkDebugReportCallbackEXT* pCallback) { return false; };
- virtual void PreCallRecordCreateDebugReportCallbackEXT(VkInstance instance, const VkDebugReportCallbackCreateInfoEXT* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkDebugReportCallbackEXT* pCallback) {};
- virtual void PostCallRecordCreateDebugReportCallbackEXT(VkInstance instance, const VkDebugReportCallbackCreateInfoEXT* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkDebugReportCallbackEXT* pCallback, VkResult result) {};
- virtual bool PreCallValidateDestroyDebugReportCallbackEXT(VkInstance instance, VkDebugReportCallbackEXT callback, const VkAllocationCallbacks* pAllocator) { return false; };
- virtual void PreCallRecordDestroyDebugReportCallbackEXT(VkInstance instance, VkDebugReportCallbackEXT callback, const VkAllocationCallbacks* pAllocator) {};
- virtual void PostCallRecordDestroyDebugReportCallbackEXT(VkInstance instance, VkDebugReportCallbackEXT callback, const VkAllocationCallbacks* pAllocator) {};
- virtual bool PreCallValidateDebugReportMessageEXT(VkInstance instance, VkDebugReportFlagsEXT flags, VkDebugReportObjectTypeEXT objectType, uint64_t object, size_t location, int32_t messageCode, const char* pLayerPrefix, const char* pMessage) { return false; };
- virtual void PreCallRecordDebugReportMessageEXT(VkInstance instance, VkDebugReportFlagsEXT flags, VkDebugReportObjectTypeEXT objectType, uint64_t object, size_t location, int32_t messageCode, const char* pLayerPrefix, const char* pMessage) {};
- virtual void PostCallRecordDebugReportMessageEXT(VkInstance instance, VkDebugReportFlagsEXT flags, VkDebugReportObjectTypeEXT objectType, uint64_t object, size_t location, int32_t messageCode, const char* pLayerPrefix, const char* pMessage) {};
- virtual bool PreCallValidateDebugMarkerSetObjectTagEXT(VkDevice device, const VkDebugMarkerObjectTagInfoEXT* pTagInfo) { return false; };
- virtual void PreCallRecordDebugMarkerSetObjectTagEXT(VkDevice device, const VkDebugMarkerObjectTagInfoEXT* pTagInfo) {};
- virtual void PostCallRecordDebugMarkerSetObjectTagEXT(VkDevice device, const VkDebugMarkerObjectTagInfoEXT* pTagInfo, VkResult result) {};
- virtual bool PreCallValidateDebugMarkerSetObjectNameEXT(VkDevice device, const VkDebugMarkerObjectNameInfoEXT* pNameInfo) { return false; };
- virtual void PreCallRecordDebugMarkerSetObjectNameEXT(VkDevice device, const VkDebugMarkerObjectNameInfoEXT* pNameInfo) {};
- virtual void PostCallRecordDebugMarkerSetObjectNameEXT(VkDevice device, const VkDebugMarkerObjectNameInfoEXT* pNameInfo, VkResult result) {};
- virtual bool PreCallValidateCmdDebugMarkerBeginEXT(VkCommandBuffer commandBuffer, const VkDebugMarkerMarkerInfoEXT* pMarkerInfo) { return false; };
- virtual void PreCallRecordCmdDebugMarkerBeginEXT(VkCommandBuffer commandBuffer, const VkDebugMarkerMarkerInfoEXT* pMarkerInfo) {};
- virtual void PostCallRecordCmdDebugMarkerBeginEXT(VkCommandBuffer commandBuffer, const VkDebugMarkerMarkerInfoEXT* pMarkerInfo) {};
- virtual bool PreCallValidateCmdDebugMarkerEndEXT(VkCommandBuffer commandBuffer) { return false; };
- virtual void PreCallRecordCmdDebugMarkerEndEXT(VkCommandBuffer commandBuffer) {};
- virtual void PostCallRecordCmdDebugMarkerEndEXT(VkCommandBuffer commandBuffer) {};
- virtual bool PreCallValidateCmdDebugMarkerInsertEXT(VkCommandBuffer commandBuffer, const VkDebugMarkerMarkerInfoEXT* pMarkerInfo) { return false; };
- virtual void PreCallRecordCmdDebugMarkerInsertEXT(VkCommandBuffer commandBuffer, const VkDebugMarkerMarkerInfoEXT* pMarkerInfo) {};
- virtual void PostCallRecordCmdDebugMarkerInsertEXT(VkCommandBuffer commandBuffer, const VkDebugMarkerMarkerInfoEXT* pMarkerInfo) {};
- virtual bool PreCallValidateCmdBindTransformFeedbackBuffersEXT(VkCommandBuffer commandBuffer, uint32_t firstBinding, uint32_t bindingCount, const VkBuffer* pBuffers, const VkDeviceSize* pOffsets, const VkDeviceSize* pSizes) { return false; };
- virtual void PreCallRecordCmdBindTransformFeedbackBuffersEXT(VkCommandBuffer commandBuffer, uint32_t firstBinding, uint32_t bindingCount, const VkBuffer* pBuffers, const VkDeviceSize* pOffsets, const VkDeviceSize* pSizes) {};
- virtual void PostCallRecordCmdBindTransformFeedbackBuffersEXT(VkCommandBuffer commandBuffer, uint32_t firstBinding, uint32_t bindingCount, const VkBuffer* pBuffers, const VkDeviceSize* pOffsets, const VkDeviceSize* pSizes) {};
- virtual bool PreCallValidateCmdBeginTransformFeedbackEXT(VkCommandBuffer commandBuffer, uint32_t firstCounterBuffer, uint32_t counterBufferCount, const VkBuffer* pCounterBuffers, const VkDeviceSize* pCounterBufferOffsets) { return false; };
- virtual void PreCallRecordCmdBeginTransformFeedbackEXT(VkCommandBuffer commandBuffer, uint32_t firstCounterBuffer, uint32_t counterBufferCount, const VkBuffer* pCounterBuffers, const VkDeviceSize* pCounterBufferOffsets) {};
- virtual void PostCallRecordCmdBeginTransformFeedbackEXT(VkCommandBuffer commandBuffer, uint32_t firstCounterBuffer, uint32_t counterBufferCount, const VkBuffer* pCounterBuffers, const VkDeviceSize* pCounterBufferOffsets) {};
- virtual bool PreCallValidateCmdEndTransformFeedbackEXT(VkCommandBuffer commandBuffer, uint32_t firstCounterBuffer, uint32_t counterBufferCount, const VkBuffer* pCounterBuffers, const VkDeviceSize* pCounterBufferOffsets) { return false; };
- virtual void PreCallRecordCmdEndTransformFeedbackEXT(VkCommandBuffer commandBuffer, uint32_t firstCounterBuffer, uint32_t counterBufferCount, const VkBuffer* pCounterBuffers, const VkDeviceSize* pCounterBufferOffsets) {};
- virtual void PostCallRecordCmdEndTransformFeedbackEXT(VkCommandBuffer commandBuffer, uint32_t firstCounterBuffer, uint32_t counterBufferCount, const VkBuffer* pCounterBuffers, const VkDeviceSize* pCounterBufferOffsets) {};
- virtual bool PreCallValidateCmdBeginQueryIndexedEXT(VkCommandBuffer commandBuffer, VkQueryPool queryPool, uint32_t query, VkQueryControlFlags flags, uint32_t index) { return false; };
- virtual void PreCallRecordCmdBeginQueryIndexedEXT(VkCommandBuffer commandBuffer, VkQueryPool queryPool, uint32_t query, VkQueryControlFlags flags, uint32_t index) {};
- virtual void PostCallRecordCmdBeginQueryIndexedEXT(VkCommandBuffer commandBuffer, VkQueryPool queryPool, uint32_t query, VkQueryControlFlags flags, uint32_t index) {};
- virtual bool PreCallValidateCmdEndQueryIndexedEXT(VkCommandBuffer commandBuffer, VkQueryPool queryPool, uint32_t query, uint32_t index) { return false; };
- virtual void PreCallRecordCmdEndQueryIndexedEXT(VkCommandBuffer commandBuffer, VkQueryPool queryPool, uint32_t query, uint32_t index) {};
- virtual void PostCallRecordCmdEndQueryIndexedEXT(VkCommandBuffer commandBuffer, VkQueryPool queryPool, uint32_t query, uint32_t index) {};
- virtual bool PreCallValidateCmdDrawIndirectByteCountEXT(VkCommandBuffer commandBuffer, uint32_t instanceCount, uint32_t firstInstance, VkBuffer counterBuffer, VkDeviceSize counterBufferOffset, uint32_t counterOffset, uint32_t vertexStride) { return false; };
- virtual void PreCallRecordCmdDrawIndirectByteCountEXT(VkCommandBuffer commandBuffer, uint32_t instanceCount, uint32_t firstInstance, VkBuffer counterBuffer, VkDeviceSize counterBufferOffset, uint32_t counterOffset, uint32_t vertexStride) {};
- virtual void PostCallRecordCmdDrawIndirectByteCountEXT(VkCommandBuffer commandBuffer, uint32_t instanceCount, uint32_t firstInstance, VkBuffer counterBuffer, VkDeviceSize counterBufferOffset, uint32_t counterOffset, uint32_t vertexStride) {};
- virtual bool PreCallValidateGetImageViewHandleNVX(VkDevice device, const VkImageViewHandleInfoNVX* pInfo) { return false; };
- virtual void PreCallRecordGetImageViewHandleNVX(VkDevice device, const VkImageViewHandleInfoNVX* pInfo) {};
- virtual void PostCallRecordGetImageViewHandleNVX(VkDevice device, const VkImageViewHandleInfoNVX* pInfo) {};
- virtual bool PreCallValidateCmdDrawIndirectCountAMD(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, VkBuffer countBuffer, VkDeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride) { return false; };
- virtual void PreCallRecordCmdDrawIndirectCountAMD(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, VkBuffer countBuffer, VkDeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride) {};
- virtual void PostCallRecordCmdDrawIndirectCountAMD(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, VkBuffer countBuffer, VkDeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride) {};
- virtual bool PreCallValidateCmdDrawIndexedIndirectCountAMD(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, VkBuffer countBuffer, VkDeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride) { return false; };
- virtual void PreCallRecordCmdDrawIndexedIndirectCountAMD(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, VkBuffer countBuffer, VkDeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride) {};
- virtual void PostCallRecordCmdDrawIndexedIndirectCountAMD(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, VkBuffer countBuffer, VkDeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride) {};
- virtual bool PreCallValidateGetShaderInfoAMD(VkDevice device, VkPipeline pipeline, VkShaderStageFlagBits shaderStage, VkShaderInfoTypeAMD infoType, size_t* pInfoSize, void* pInfo) { return false; };
- virtual void PreCallRecordGetShaderInfoAMD(VkDevice device, VkPipeline pipeline, VkShaderStageFlagBits shaderStage, VkShaderInfoTypeAMD infoType, size_t* pInfoSize, void* pInfo) {};
- virtual void PostCallRecordGetShaderInfoAMD(VkDevice device, VkPipeline pipeline, VkShaderStageFlagBits shaderStage, VkShaderInfoTypeAMD infoType, size_t* pInfoSize, void* pInfo, VkResult result) {};
-#ifdef VK_USE_PLATFORM_GGP
- virtual bool PreCallValidateCreateStreamDescriptorSurfaceGGP(VkInstance instance, const VkStreamDescriptorSurfaceCreateInfoGGP* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSurfaceKHR* pSurface) { return false; };
- virtual void PreCallRecordCreateStreamDescriptorSurfaceGGP(VkInstance instance, const VkStreamDescriptorSurfaceCreateInfoGGP* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSurfaceKHR* pSurface) {};
- virtual void PostCallRecordCreateStreamDescriptorSurfaceGGP(VkInstance instance, const VkStreamDescriptorSurfaceCreateInfoGGP* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSurfaceKHR* pSurface, VkResult result) {};
-#endif
- virtual bool PreCallValidateGetPhysicalDeviceExternalImageFormatPropertiesNV(VkPhysicalDevice physicalDevice, VkFormat format, VkImageType type, VkImageTiling tiling, VkImageUsageFlags usage, VkImageCreateFlags flags, VkExternalMemoryHandleTypeFlagsNV externalHandleType, VkExternalImageFormatPropertiesNV* pExternalImageFormatProperties) { return false; };
- virtual void PreCallRecordGetPhysicalDeviceExternalImageFormatPropertiesNV(VkPhysicalDevice physicalDevice, VkFormat format, VkImageType type, VkImageTiling tiling, VkImageUsageFlags usage, VkImageCreateFlags flags, VkExternalMemoryHandleTypeFlagsNV externalHandleType, VkExternalImageFormatPropertiesNV* pExternalImageFormatProperties) {};
- virtual void PostCallRecordGetPhysicalDeviceExternalImageFormatPropertiesNV(VkPhysicalDevice physicalDevice, VkFormat format, VkImageType type, VkImageTiling tiling, VkImageUsageFlags usage, VkImageCreateFlags flags, VkExternalMemoryHandleTypeFlagsNV externalHandleType, VkExternalImageFormatPropertiesNV* pExternalImageFormatProperties, VkResult result) {};
-#ifdef VK_USE_PLATFORM_WIN32_KHR
- virtual bool PreCallValidateGetMemoryWin32HandleNV(VkDevice device, VkDeviceMemory memory, VkExternalMemoryHandleTypeFlagsNV handleType, HANDLE* pHandle) { return false; };
- virtual void PreCallRecordGetMemoryWin32HandleNV(VkDevice device, VkDeviceMemory memory, VkExternalMemoryHandleTypeFlagsNV handleType, HANDLE* pHandle) {};
- virtual void PostCallRecordGetMemoryWin32HandleNV(VkDevice device, VkDeviceMemory memory, VkExternalMemoryHandleTypeFlagsNV handleType, HANDLE* pHandle, VkResult result) {};
-#endif
-#ifdef VK_USE_PLATFORM_VI_NN
- virtual bool PreCallValidateCreateViSurfaceNN(VkInstance instance, const VkViSurfaceCreateInfoNN* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSurfaceKHR* pSurface) { return false; };
- virtual void PreCallRecordCreateViSurfaceNN(VkInstance instance, const VkViSurfaceCreateInfoNN* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSurfaceKHR* pSurface) {};
- virtual void PostCallRecordCreateViSurfaceNN(VkInstance instance, const VkViSurfaceCreateInfoNN* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSurfaceKHR* pSurface, VkResult result) {};
-#endif
- virtual bool PreCallValidateCmdBeginConditionalRenderingEXT(VkCommandBuffer commandBuffer, const VkConditionalRenderingBeginInfoEXT* pConditionalRenderingBegin) { return false; };
- virtual void PreCallRecordCmdBeginConditionalRenderingEXT(VkCommandBuffer commandBuffer, const VkConditionalRenderingBeginInfoEXT* pConditionalRenderingBegin) {};
- virtual void PostCallRecordCmdBeginConditionalRenderingEXT(VkCommandBuffer commandBuffer, const VkConditionalRenderingBeginInfoEXT* pConditionalRenderingBegin) {};
- virtual bool PreCallValidateCmdEndConditionalRenderingEXT(VkCommandBuffer commandBuffer) { return false; };
- virtual void PreCallRecordCmdEndConditionalRenderingEXT(VkCommandBuffer commandBuffer) {};
- virtual void PostCallRecordCmdEndConditionalRenderingEXT(VkCommandBuffer commandBuffer) {};
- virtual bool PreCallValidateCmdProcessCommandsNVX(VkCommandBuffer commandBuffer, const VkCmdProcessCommandsInfoNVX* pProcessCommandsInfo) { return false; };
- virtual void PreCallRecordCmdProcessCommandsNVX(VkCommandBuffer commandBuffer, const VkCmdProcessCommandsInfoNVX* pProcessCommandsInfo) {};
- virtual void PostCallRecordCmdProcessCommandsNVX(VkCommandBuffer commandBuffer, const VkCmdProcessCommandsInfoNVX* pProcessCommandsInfo) {};
- virtual bool PreCallValidateCmdReserveSpaceForCommandsNVX(VkCommandBuffer commandBuffer, const VkCmdReserveSpaceForCommandsInfoNVX* pReserveSpaceInfo) { return false; };
- virtual void PreCallRecordCmdReserveSpaceForCommandsNVX(VkCommandBuffer commandBuffer, const VkCmdReserveSpaceForCommandsInfoNVX* pReserveSpaceInfo) {};
- virtual void PostCallRecordCmdReserveSpaceForCommandsNVX(VkCommandBuffer commandBuffer, const VkCmdReserveSpaceForCommandsInfoNVX* pReserveSpaceInfo) {};
- virtual bool PreCallValidateCreateIndirectCommandsLayoutNVX(VkDevice device, const VkIndirectCommandsLayoutCreateInfoNVX* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkIndirectCommandsLayoutNVX* pIndirectCommandsLayout) { return false; };
- virtual void PreCallRecordCreateIndirectCommandsLayoutNVX(VkDevice device, const VkIndirectCommandsLayoutCreateInfoNVX* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkIndirectCommandsLayoutNVX* pIndirectCommandsLayout) {};
- virtual void PostCallRecordCreateIndirectCommandsLayoutNVX(VkDevice device, const VkIndirectCommandsLayoutCreateInfoNVX* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkIndirectCommandsLayoutNVX* pIndirectCommandsLayout, VkResult result) {};
- virtual bool PreCallValidateDestroyIndirectCommandsLayoutNVX(VkDevice device, VkIndirectCommandsLayoutNVX indirectCommandsLayout, const VkAllocationCallbacks* pAllocator) { return false; };
- virtual void PreCallRecordDestroyIndirectCommandsLayoutNVX(VkDevice device, VkIndirectCommandsLayoutNVX indirectCommandsLayout, const VkAllocationCallbacks* pAllocator) {};
- virtual void PostCallRecordDestroyIndirectCommandsLayoutNVX(VkDevice device, VkIndirectCommandsLayoutNVX indirectCommandsLayout, const VkAllocationCallbacks* pAllocator) {};
- virtual bool PreCallValidateCreateObjectTableNVX(VkDevice device, const VkObjectTableCreateInfoNVX* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkObjectTableNVX* pObjectTable) { return false; };
- virtual void PreCallRecordCreateObjectTableNVX(VkDevice device, const VkObjectTableCreateInfoNVX* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkObjectTableNVX* pObjectTable) {};
- virtual void PostCallRecordCreateObjectTableNVX(VkDevice device, const VkObjectTableCreateInfoNVX* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkObjectTableNVX* pObjectTable, VkResult result) {};
- virtual bool PreCallValidateDestroyObjectTableNVX(VkDevice device, VkObjectTableNVX objectTable, const VkAllocationCallbacks* pAllocator) { return false; };
- virtual void PreCallRecordDestroyObjectTableNVX(VkDevice device, VkObjectTableNVX objectTable, const VkAllocationCallbacks* pAllocator) {};
- virtual void PostCallRecordDestroyObjectTableNVX(VkDevice device, VkObjectTableNVX objectTable, const VkAllocationCallbacks* pAllocator) {};
- virtual bool PreCallValidateRegisterObjectsNVX(VkDevice device, VkObjectTableNVX objectTable, uint32_t objectCount, const VkObjectTableEntryNVX* const* ppObjectTableEntries, const uint32_t* pObjectIndices) { return false; };
- virtual void PreCallRecordRegisterObjectsNVX(VkDevice device, VkObjectTableNVX objectTable, uint32_t objectCount, const VkObjectTableEntryNVX* const* ppObjectTableEntries, const uint32_t* pObjectIndices) {};
- virtual void PostCallRecordRegisterObjectsNVX(VkDevice device, VkObjectTableNVX objectTable, uint32_t objectCount, const VkObjectTableEntryNVX* const* ppObjectTableEntries, const uint32_t* pObjectIndices, VkResult result) {};
- virtual bool PreCallValidateUnregisterObjectsNVX(VkDevice device, VkObjectTableNVX objectTable, uint32_t objectCount, const VkObjectEntryTypeNVX* pObjectEntryTypes, const uint32_t* pObjectIndices) { return false; };
- virtual void PreCallRecordUnregisterObjectsNVX(VkDevice device, VkObjectTableNVX objectTable, uint32_t objectCount, const VkObjectEntryTypeNVX* pObjectEntryTypes, const uint32_t* pObjectIndices) {};
- virtual void PostCallRecordUnregisterObjectsNVX(VkDevice device, VkObjectTableNVX objectTable, uint32_t objectCount, const VkObjectEntryTypeNVX* pObjectEntryTypes, const uint32_t* pObjectIndices, VkResult result) {};
- virtual bool PreCallValidateGetPhysicalDeviceGeneratedCommandsPropertiesNVX(VkPhysicalDevice physicalDevice, VkDeviceGeneratedCommandsFeaturesNVX* pFeatures, VkDeviceGeneratedCommandsLimitsNVX* pLimits) { return false; };
- virtual void PreCallRecordGetPhysicalDeviceGeneratedCommandsPropertiesNVX(VkPhysicalDevice physicalDevice, VkDeviceGeneratedCommandsFeaturesNVX* pFeatures, VkDeviceGeneratedCommandsLimitsNVX* pLimits) {};
- virtual void PostCallRecordGetPhysicalDeviceGeneratedCommandsPropertiesNVX(VkPhysicalDevice physicalDevice, VkDeviceGeneratedCommandsFeaturesNVX* pFeatures, VkDeviceGeneratedCommandsLimitsNVX* pLimits) {};
- virtual bool PreCallValidateCmdSetViewportWScalingNV(VkCommandBuffer commandBuffer, uint32_t firstViewport, uint32_t viewportCount, const VkViewportWScalingNV* pViewportWScalings) { return false; };
- virtual void PreCallRecordCmdSetViewportWScalingNV(VkCommandBuffer commandBuffer, uint32_t firstViewport, uint32_t viewportCount, const VkViewportWScalingNV* pViewportWScalings) {};
- virtual void PostCallRecordCmdSetViewportWScalingNV(VkCommandBuffer commandBuffer, uint32_t firstViewport, uint32_t viewportCount, const VkViewportWScalingNV* pViewportWScalings) {};
- virtual bool PreCallValidateReleaseDisplayEXT(VkPhysicalDevice physicalDevice, VkDisplayKHR display) { return false; };
- virtual void PreCallRecordReleaseDisplayEXT(VkPhysicalDevice physicalDevice, VkDisplayKHR display) {};
- virtual void PostCallRecordReleaseDisplayEXT(VkPhysicalDevice physicalDevice, VkDisplayKHR display, VkResult result) {};
-#ifdef VK_USE_PLATFORM_XLIB_XRANDR_EXT
- virtual bool PreCallValidateAcquireXlibDisplayEXT(VkPhysicalDevice physicalDevice, Display* dpy, VkDisplayKHR display) { return false; };
- virtual void PreCallRecordAcquireXlibDisplayEXT(VkPhysicalDevice physicalDevice, Display* dpy, VkDisplayKHR display) {};
- virtual void PostCallRecordAcquireXlibDisplayEXT(VkPhysicalDevice physicalDevice, Display* dpy, VkDisplayKHR display, VkResult result) {};
-#endif
-#ifdef VK_USE_PLATFORM_XLIB_XRANDR_EXT
- virtual bool PreCallValidateGetRandROutputDisplayEXT(VkPhysicalDevice physicalDevice, Display* dpy, RROutput rrOutput, VkDisplayKHR* pDisplay) { return false; };
- virtual void PreCallRecordGetRandROutputDisplayEXT(VkPhysicalDevice physicalDevice, Display* dpy, RROutput rrOutput, VkDisplayKHR* pDisplay) {};
- virtual void PostCallRecordGetRandROutputDisplayEXT(VkPhysicalDevice physicalDevice, Display* dpy, RROutput rrOutput, VkDisplayKHR* pDisplay, VkResult result) {};
-#endif
- virtual bool PreCallValidateGetPhysicalDeviceSurfaceCapabilities2EXT(VkPhysicalDevice physicalDevice, VkSurfaceKHR surface, VkSurfaceCapabilities2EXT* pSurfaceCapabilities) { return false; };
- virtual void PreCallRecordGetPhysicalDeviceSurfaceCapabilities2EXT(VkPhysicalDevice physicalDevice, VkSurfaceKHR surface, VkSurfaceCapabilities2EXT* pSurfaceCapabilities) {};
- virtual void PostCallRecordGetPhysicalDeviceSurfaceCapabilities2EXT(VkPhysicalDevice physicalDevice, VkSurfaceKHR surface, VkSurfaceCapabilities2EXT* pSurfaceCapabilities, VkResult result) {};
- virtual bool PreCallValidateDisplayPowerControlEXT(VkDevice device, VkDisplayKHR display, const VkDisplayPowerInfoEXT* pDisplayPowerInfo) { return false; };
- virtual void PreCallRecordDisplayPowerControlEXT(VkDevice device, VkDisplayKHR display, const VkDisplayPowerInfoEXT* pDisplayPowerInfo) {};
- virtual void PostCallRecordDisplayPowerControlEXT(VkDevice device, VkDisplayKHR display, const VkDisplayPowerInfoEXT* pDisplayPowerInfo, VkResult result) {};
- virtual bool PreCallValidateRegisterDeviceEventEXT(VkDevice device, const VkDeviceEventInfoEXT* pDeviceEventInfo, const VkAllocationCallbacks* pAllocator, VkFence* pFence) { return false; };
- virtual void PreCallRecordRegisterDeviceEventEXT(VkDevice device, const VkDeviceEventInfoEXT* pDeviceEventInfo, const VkAllocationCallbacks* pAllocator, VkFence* pFence) {};
- virtual void PostCallRecordRegisterDeviceEventEXT(VkDevice device, const VkDeviceEventInfoEXT* pDeviceEventInfo, const VkAllocationCallbacks* pAllocator, VkFence* pFence, VkResult result) {};
- virtual bool PreCallValidateRegisterDisplayEventEXT(VkDevice device, VkDisplayKHR display, const VkDisplayEventInfoEXT* pDisplayEventInfo, const VkAllocationCallbacks* pAllocator, VkFence* pFence) { return false; };
- virtual void PreCallRecordRegisterDisplayEventEXT(VkDevice device, VkDisplayKHR display, const VkDisplayEventInfoEXT* pDisplayEventInfo, const VkAllocationCallbacks* pAllocator, VkFence* pFence) {};
- virtual void PostCallRecordRegisterDisplayEventEXT(VkDevice device, VkDisplayKHR display, const VkDisplayEventInfoEXT* pDisplayEventInfo, const VkAllocationCallbacks* pAllocator, VkFence* pFence, VkResult result) {};
- virtual bool PreCallValidateGetSwapchainCounterEXT(VkDevice device, VkSwapchainKHR swapchain, VkSurfaceCounterFlagBitsEXT counter, uint64_t* pCounterValue) { return false; };
- virtual void PreCallRecordGetSwapchainCounterEXT(VkDevice device, VkSwapchainKHR swapchain, VkSurfaceCounterFlagBitsEXT counter, uint64_t* pCounterValue) {};
- virtual void PostCallRecordGetSwapchainCounterEXT(VkDevice device, VkSwapchainKHR swapchain, VkSurfaceCounterFlagBitsEXT counter, uint64_t* pCounterValue, VkResult result) {};
- virtual bool PreCallValidateGetRefreshCycleDurationGOOGLE(VkDevice device, VkSwapchainKHR swapchain, VkRefreshCycleDurationGOOGLE* pDisplayTimingProperties) { return false; };
- virtual void PreCallRecordGetRefreshCycleDurationGOOGLE(VkDevice device, VkSwapchainKHR swapchain, VkRefreshCycleDurationGOOGLE* pDisplayTimingProperties) {};
- virtual void PostCallRecordGetRefreshCycleDurationGOOGLE(VkDevice device, VkSwapchainKHR swapchain, VkRefreshCycleDurationGOOGLE* pDisplayTimingProperties, VkResult result) {};
- virtual bool PreCallValidateGetPastPresentationTimingGOOGLE(VkDevice device, VkSwapchainKHR swapchain, uint32_t* pPresentationTimingCount, VkPastPresentationTimingGOOGLE* pPresentationTimings) { return false; };
- virtual void PreCallRecordGetPastPresentationTimingGOOGLE(VkDevice device, VkSwapchainKHR swapchain, uint32_t* pPresentationTimingCount, VkPastPresentationTimingGOOGLE* pPresentationTimings) {};
- virtual void PostCallRecordGetPastPresentationTimingGOOGLE(VkDevice device, VkSwapchainKHR swapchain, uint32_t* pPresentationTimingCount, VkPastPresentationTimingGOOGLE* pPresentationTimings, VkResult result) {};
- virtual bool PreCallValidateCmdSetDiscardRectangleEXT(VkCommandBuffer commandBuffer, uint32_t firstDiscardRectangle, uint32_t discardRectangleCount, const VkRect2D* pDiscardRectangles) { return false; };
- virtual void PreCallRecordCmdSetDiscardRectangleEXT(VkCommandBuffer commandBuffer, uint32_t firstDiscardRectangle, uint32_t discardRectangleCount, const VkRect2D* pDiscardRectangles) {};
- virtual void PostCallRecordCmdSetDiscardRectangleEXT(VkCommandBuffer commandBuffer, uint32_t firstDiscardRectangle, uint32_t discardRectangleCount, const VkRect2D* pDiscardRectangles) {};
- virtual bool PreCallValidateSetHdrMetadataEXT(VkDevice device, uint32_t swapchainCount, const VkSwapchainKHR* pSwapchains, const VkHdrMetadataEXT* pMetadata) { return false; };
- virtual void PreCallRecordSetHdrMetadataEXT(VkDevice device, uint32_t swapchainCount, const VkSwapchainKHR* pSwapchains, const VkHdrMetadataEXT* pMetadata) {};
- virtual void PostCallRecordSetHdrMetadataEXT(VkDevice device, uint32_t swapchainCount, const VkSwapchainKHR* pSwapchains, const VkHdrMetadataEXT* pMetadata) {};
-#ifdef VK_USE_PLATFORM_IOS_MVK
- virtual bool PreCallValidateCreateIOSSurfaceMVK(VkInstance instance, const VkIOSSurfaceCreateInfoMVK* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSurfaceKHR* pSurface) { return false; };
- virtual void PreCallRecordCreateIOSSurfaceMVK(VkInstance instance, const VkIOSSurfaceCreateInfoMVK* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSurfaceKHR* pSurface) {};
- virtual void PostCallRecordCreateIOSSurfaceMVK(VkInstance instance, const VkIOSSurfaceCreateInfoMVK* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSurfaceKHR* pSurface, VkResult result) {};
-#endif
-#ifdef VK_USE_PLATFORM_MACOS_MVK
- virtual bool PreCallValidateCreateMacOSSurfaceMVK(VkInstance instance, const VkMacOSSurfaceCreateInfoMVK* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSurfaceKHR* pSurface) { return false; };
- virtual void PreCallRecordCreateMacOSSurfaceMVK(VkInstance instance, const VkMacOSSurfaceCreateInfoMVK* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSurfaceKHR* pSurface) {};
- virtual void PostCallRecordCreateMacOSSurfaceMVK(VkInstance instance, const VkMacOSSurfaceCreateInfoMVK* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSurfaceKHR* pSurface, VkResult result) {};
-#endif
- virtual bool PreCallValidateSetDebugUtilsObjectNameEXT(VkDevice device, const VkDebugUtilsObjectNameInfoEXT* pNameInfo) { return false; };
- virtual void PreCallRecordSetDebugUtilsObjectNameEXT(VkDevice device, const VkDebugUtilsObjectNameInfoEXT* pNameInfo) {};
- virtual void PostCallRecordSetDebugUtilsObjectNameEXT(VkDevice device, const VkDebugUtilsObjectNameInfoEXT* pNameInfo, VkResult result) {};
- virtual bool PreCallValidateSetDebugUtilsObjectTagEXT(VkDevice device, const VkDebugUtilsObjectTagInfoEXT* pTagInfo) { return false; };
- virtual void PreCallRecordSetDebugUtilsObjectTagEXT(VkDevice device, const VkDebugUtilsObjectTagInfoEXT* pTagInfo) {};
- virtual void PostCallRecordSetDebugUtilsObjectTagEXT(VkDevice device, const VkDebugUtilsObjectTagInfoEXT* pTagInfo, VkResult result) {};
- virtual bool PreCallValidateQueueBeginDebugUtilsLabelEXT(VkQueue queue, const VkDebugUtilsLabelEXT* pLabelInfo) { return false; };
- virtual void PreCallRecordQueueBeginDebugUtilsLabelEXT(VkQueue queue, const VkDebugUtilsLabelEXT* pLabelInfo) {};
- virtual void PostCallRecordQueueBeginDebugUtilsLabelEXT(VkQueue queue, const VkDebugUtilsLabelEXT* pLabelInfo) {};
- virtual bool PreCallValidateQueueEndDebugUtilsLabelEXT(VkQueue queue) { return false; };
- virtual void PreCallRecordQueueEndDebugUtilsLabelEXT(VkQueue queue) {};
- virtual void PostCallRecordQueueEndDebugUtilsLabelEXT(VkQueue queue) {};
- virtual bool PreCallValidateQueueInsertDebugUtilsLabelEXT(VkQueue queue, const VkDebugUtilsLabelEXT* pLabelInfo) { return false; };
- virtual void PreCallRecordQueueInsertDebugUtilsLabelEXT(VkQueue queue, const VkDebugUtilsLabelEXT* pLabelInfo) {};
- virtual void PostCallRecordQueueInsertDebugUtilsLabelEXT(VkQueue queue, const VkDebugUtilsLabelEXT* pLabelInfo) {};
- virtual bool PreCallValidateCmdBeginDebugUtilsLabelEXT(VkCommandBuffer commandBuffer, const VkDebugUtilsLabelEXT* pLabelInfo) { return false; };
- virtual void PreCallRecordCmdBeginDebugUtilsLabelEXT(VkCommandBuffer commandBuffer, const VkDebugUtilsLabelEXT* pLabelInfo) {};
- virtual void PostCallRecordCmdBeginDebugUtilsLabelEXT(VkCommandBuffer commandBuffer, const VkDebugUtilsLabelEXT* pLabelInfo) {};
- virtual bool PreCallValidateCmdEndDebugUtilsLabelEXT(VkCommandBuffer commandBuffer) { return false; };
- virtual void PreCallRecordCmdEndDebugUtilsLabelEXT(VkCommandBuffer commandBuffer) {};
- virtual void PostCallRecordCmdEndDebugUtilsLabelEXT(VkCommandBuffer commandBuffer) {};
- virtual bool PreCallValidateCmdInsertDebugUtilsLabelEXT(VkCommandBuffer commandBuffer, const VkDebugUtilsLabelEXT* pLabelInfo) { return false; };
- virtual void PreCallRecordCmdInsertDebugUtilsLabelEXT(VkCommandBuffer commandBuffer, const VkDebugUtilsLabelEXT* pLabelInfo) {};
- virtual void PostCallRecordCmdInsertDebugUtilsLabelEXT(VkCommandBuffer commandBuffer, const VkDebugUtilsLabelEXT* pLabelInfo) {};
- virtual bool PreCallValidateCreateDebugUtilsMessengerEXT(VkInstance instance, const VkDebugUtilsMessengerCreateInfoEXT* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkDebugUtilsMessengerEXT* pMessenger) { return false; };
- virtual void PreCallRecordCreateDebugUtilsMessengerEXT(VkInstance instance, const VkDebugUtilsMessengerCreateInfoEXT* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkDebugUtilsMessengerEXT* pMessenger) {};
- virtual void PostCallRecordCreateDebugUtilsMessengerEXT(VkInstance instance, const VkDebugUtilsMessengerCreateInfoEXT* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkDebugUtilsMessengerEXT* pMessenger, VkResult result) {};
- virtual bool PreCallValidateDestroyDebugUtilsMessengerEXT(VkInstance instance, VkDebugUtilsMessengerEXT messenger, const VkAllocationCallbacks* pAllocator) { return false; };
- virtual void PreCallRecordDestroyDebugUtilsMessengerEXT(VkInstance instance, VkDebugUtilsMessengerEXT messenger, const VkAllocationCallbacks* pAllocator) {};
- virtual void PostCallRecordDestroyDebugUtilsMessengerEXT(VkInstance instance, VkDebugUtilsMessengerEXT messenger, const VkAllocationCallbacks* pAllocator) {};
- virtual bool PreCallValidateSubmitDebugUtilsMessageEXT(VkInstance instance, VkDebugUtilsMessageSeverityFlagBitsEXT messageSeverity, VkDebugUtilsMessageTypeFlagsEXT messageTypes, const VkDebugUtilsMessengerCallbackDataEXT* pCallbackData) { return false; };
- virtual void PreCallRecordSubmitDebugUtilsMessageEXT(VkInstance instance, VkDebugUtilsMessageSeverityFlagBitsEXT messageSeverity, VkDebugUtilsMessageTypeFlagsEXT messageTypes, const VkDebugUtilsMessengerCallbackDataEXT* pCallbackData) {};
- virtual void PostCallRecordSubmitDebugUtilsMessageEXT(VkInstance instance, VkDebugUtilsMessageSeverityFlagBitsEXT messageSeverity, VkDebugUtilsMessageTypeFlagsEXT messageTypes, const VkDebugUtilsMessengerCallbackDataEXT* pCallbackData) {};
-#ifdef VK_USE_PLATFORM_ANDROID_KHR
- virtual bool PreCallValidateGetAndroidHardwareBufferPropertiesANDROID(VkDevice device, const struct AHardwareBuffer* buffer, VkAndroidHardwareBufferPropertiesANDROID* pProperties) { return false; };
- virtual void PreCallRecordGetAndroidHardwareBufferPropertiesANDROID(VkDevice device, const struct AHardwareBuffer* buffer, VkAndroidHardwareBufferPropertiesANDROID* pProperties) {};
- virtual void PostCallRecordGetAndroidHardwareBufferPropertiesANDROID(VkDevice device, const struct AHardwareBuffer* buffer, VkAndroidHardwareBufferPropertiesANDROID* pProperties, VkResult result) {};
-#endif
-#ifdef VK_USE_PLATFORM_ANDROID_KHR
- virtual bool PreCallValidateGetMemoryAndroidHardwareBufferANDROID(VkDevice device, const VkMemoryGetAndroidHardwareBufferInfoANDROID* pInfo, struct AHardwareBuffer** pBuffer) { return false; };
- virtual void PreCallRecordGetMemoryAndroidHardwareBufferANDROID(VkDevice device, const VkMemoryGetAndroidHardwareBufferInfoANDROID* pInfo, struct AHardwareBuffer** pBuffer) {};
- virtual void PostCallRecordGetMemoryAndroidHardwareBufferANDROID(VkDevice device, const VkMemoryGetAndroidHardwareBufferInfoANDROID* pInfo, struct AHardwareBuffer** pBuffer, VkResult result) {};
-#endif
- virtual bool PreCallValidateCmdSetSampleLocationsEXT(VkCommandBuffer commandBuffer, const VkSampleLocationsInfoEXT* pSampleLocationsInfo) { return false; };
- virtual void PreCallRecordCmdSetSampleLocationsEXT(VkCommandBuffer commandBuffer, const VkSampleLocationsInfoEXT* pSampleLocationsInfo) {};
- virtual void PostCallRecordCmdSetSampleLocationsEXT(VkCommandBuffer commandBuffer, const VkSampleLocationsInfoEXT* pSampleLocationsInfo) {};
- virtual bool PreCallValidateGetPhysicalDeviceMultisamplePropertiesEXT(VkPhysicalDevice physicalDevice, VkSampleCountFlagBits samples, VkMultisamplePropertiesEXT* pMultisampleProperties) { return false; };
- virtual void PreCallRecordGetPhysicalDeviceMultisamplePropertiesEXT(VkPhysicalDevice physicalDevice, VkSampleCountFlagBits samples, VkMultisamplePropertiesEXT* pMultisampleProperties) {};
- virtual void PostCallRecordGetPhysicalDeviceMultisamplePropertiesEXT(VkPhysicalDevice physicalDevice, VkSampleCountFlagBits samples, VkMultisamplePropertiesEXT* pMultisampleProperties) {};
- virtual bool PreCallValidateGetImageDrmFormatModifierPropertiesEXT(VkDevice device, VkImage image, VkImageDrmFormatModifierPropertiesEXT* pProperties) { return false; };
- virtual void PreCallRecordGetImageDrmFormatModifierPropertiesEXT(VkDevice device, VkImage image, VkImageDrmFormatModifierPropertiesEXT* pProperties) {};
- virtual void PostCallRecordGetImageDrmFormatModifierPropertiesEXT(VkDevice device, VkImage image, VkImageDrmFormatModifierPropertiesEXT* pProperties, VkResult result) {};
- virtual bool PreCallValidateCmdBindShadingRateImageNV(VkCommandBuffer commandBuffer, VkImageView imageView, VkImageLayout imageLayout) { return false; };
- virtual void PreCallRecordCmdBindShadingRateImageNV(VkCommandBuffer commandBuffer, VkImageView imageView, VkImageLayout imageLayout) {};
- virtual void PostCallRecordCmdBindShadingRateImageNV(VkCommandBuffer commandBuffer, VkImageView imageView, VkImageLayout imageLayout) {};
- virtual bool PreCallValidateCmdSetViewportShadingRatePaletteNV(VkCommandBuffer commandBuffer, uint32_t firstViewport, uint32_t viewportCount, const VkShadingRatePaletteNV* pShadingRatePalettes) { return false; };
- virtual void PreCallRecordCmdSetViewportShadingRatePaletteNV(VkCommandBuffer commandBuffer, uint32_t firstViewport, uint32_t viewportCount, const VkShadingRatePaletteNV* pShadingRatePalettes) {};
- virtual void PostCallRecordCmdSetViewportShadingRatePaletteNV(VkCommandBuffer commandBuffer, uint32_t firstViewport, uint32_t viewportCount, const VkShadingRatePaletteNV* pShadingRatePalettes) {};
- virtual bool PreCallValidateCmdSetCoarseSampleOrderNV(VkCommandBuffer commandBuffer, VkCoarseSampleOrderTypeNV sampleOrderType, uint32_t customSampleOrderCount, const VkCoarseSampleOrderCustomNV* pCustomSampleOrders) { return false; };
- virtual void PreCallRecordCmdSetCoarseSampleOrderNV(VkCommandBuffer commandBuffer, VkCoarseSampleOrderTypeNV sampleOrderType, uint32_t customSampleOrderCount, const VkCoarseSampleOrderCustomNV* pCustomSampleOrders) {};
- virtual void PostCallRecordCmdSetCoarseSampleOrderNV(VkCommandBuffer commandBuffer, VkCoarseSampleOrderTypeNV sampleOrderType, uint32_t customSampleOrderCount, const VkCoarseSampleOrderCustomNV* pCustomSampleOrders) {};
- virtual bool PreCallValidateCreateAccelerationStructureNV(VkDevice device, const VkAccelerationStructureCreateInfoNV* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkAccelerationStructureNV* pAccelerationStructure) { return false; };
- virtual void PreCallRecordCreateAccelerationStructureNV(VkDevice device, const VkAccelerationStructureCreateInfoNV* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkAccelerationStructureNV* pAccelerationStructure) {};
- virtual void PostCallRecordCreateAccelerationStructureNV(VkDevice device, const VkAccelerationStructureCreateInfoNV* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkAccelerationStructureNV* pAccelerationStructure, VkResult result) {};
- virtual bool PreCallValidateDestroyAccelerationStructureNV(VkDevice device, VkAccelerationStructureNV accelerationStructure, const VkAllocationCallbacks* pAllocator) { return false; };
- virtual void PreCallRecordDestroyAccelerationStructureNV(VkDevice device, VkAccelerationStructureNV accelerationStructure, const VkAllocationCallbacks* pAllocator) {};
- virtual void PostCallRecordDestroyAccelerationStructureNV(VkDevice device, VkAccelerationStructureNV accelerationStructure, const VkAllocationCallbacks* pAllocator) {};
- virtual bool PreCallValidateGetAccelerationStructureMemoryRequirementsNV(VkDevice device, const VkAccelerationStructureMemoryRequirementsInfoNV* pInfo, VkMemoryRequirements2KHR* pMemoryRequirements) { return false; };
- virtual void PreCallRecordGetAccelerationStructureMemoryRequirementsNV(VkDevice device, const VkAccelerationStructureMemoryRequirementsInfoNV* pInfo, VkMemoryRequirements2KHR* pMemoryRequirements) {};
- virtual void PostCallRecordGetAccelerationStructureMemoryRequirementsNV(VkDevice device, const VkAccelerationStructureMemoryRequirementsInfoNV* pInfo, VkMemoryRequirements2KHR* pMemoryRequirements) {};
- virtual bool PreCallValidateBindAccelerationStructureMemoryNV(VkDevice device, uint32_t bindInfoCount, const VkBindAccelerationStructureMemoryInfoNV* pBindInfos) { return false; };
- virtual void PreCallRecordBindAccelerationStructureMemoryNV(VkDevice device, uint32_t bindInfoCount, const VkBindAccelerationStructureMemoryInfoNV* pBindInfos) {};
- virtual void PostCallRecordBindAccelerationStructureMemoryNV(VkDevice device, uint32_t bindInfoCount, const VkBindAccelerationStructureMemoryInfoNV* pBindInfos, VkResult result) {};
- virtual bool PreCallValidateCmdBuildAccelerationStructureNV(VkCommandBuffer commandBuffer, const VkAccelerationStructureInfoNV* pInfo, VkBuffer instanceData, VkDeviceSize instanceOffset, VkBool32 update, VkAccelerationStructureNV dst, VkAccelerationStructureNV src, VkBuffer scratch, VkDeviceSize scratchOffset) { return false; };
- virtual void PreCallRecordCmdBuildAccelerationStructureNV(VkCommandBuffer commandBuffer, const VkAccelerationStructureInfoNV* pInfo, VkBuffer instanceData, VkDeviceSize instanceOffset, VkBool32 update, VkAccelerationStructureNV dst, VkAccelerationStructureNV src, VkBuffer scratch, VkDeviceSize scratchOffset) {};
- virtual void PostCallRecordCmdBuildAccelerationStructureNV(VkCommandBuffer commandBuffer, const VkAccelerationStructureInfoNV* pInfo, VkBuffer instanceData, VkDeviceSize instanceOffset, VkBool32 update, VkAccelerationStructureNV dst, VkAccelerationStructureNV src, VkBuffer scratch, VkDeviceSize scratchOffset) {};
- virtual bool PreCallValidateCmdCopyAccelerationStructureNV(VkCommandBuffer commandBuffer, VkAccelerationStructureNV dst, VkAccelerationStructureNV src, VkCopyAccelerationStructureModeNV mode) { return false; };
- virtual void PreCallRecordCmdCopyAccelerationStructureNV(VkCommandBuffer commandBuffer, VkAccelerationStructureNV dst, VkAccelerationStructureNV src, VkCopyAccelerationStructureModeNV mode) {};
- virtual void PostCallRecordCmdCopyAccelerationStructureNV(VkCommandBuffer commandBuffer, VkAccelerationStructureNV dst, VkAccelerationStructureNV src, VkCopyAccelerationStructureModeNV mode) {};
- virtual bool PreCallValidateCmdTraceRaysNV(VkCommandBuffer commandBuffer, VkBuffer raygenShaderBindingTableBuffer, VkDeviceSize raygenShaderBindingOffset, VkBuffer missShaderBindingTableBuffer, VkDeviceSize missShaderBindingOffset, VkDeviceSize missShaderBindingStride, VkBuffer hitShaderBindingTableBuffer, VkDeviceSize hitShaderBindingOffset, VkDeviceSize hitShaderBindingStride, VkBuffer callableShaderBindingTableBuffer, VkDeviceSize callableShaderBindingOffset, VkDeviceSize callableShaderBindingStride, uint32_t width, uint32_t height, uint32_t depth) { return false; };
- virtual void PreCallRecordCmdTraceRaysNV(VkCommandBuffer commandBuffer, VkBuffer raygenShaderBindingTableBuffer, VkDeviceSize raygenShaderBindingOffset, VkBuffer missShaderBindingTableBuffer, VkDeviceSize missShaderBindingOffset, VkDeviceSize missShaderBindingStride, VkBuffer hitShaderBindingTableBuffer, VkDeviceSize hitShaderBindingOffset, VkDeviceSize hitShaderBindingStride, VkBuffer callableShaderBindingTableBuffer, VkDeviceSize callableShaderBindingOffset, VkDeviceSize callableShaderBindingStride, uint32_t width, uint32_t height, uint32_t depth) {};
- virtual void PostCallRecordCmdTraceRaysNV(VkCommandBuffer commandBuffer, VkBuffer raygenShaderBindingTableBuffer, VkDeviceSize raygenShaderBindingOffset, VkBuffer missShaderBindingTableBuffer, VkDeviceSize missShaderBindingOffset, VkDeviceSize missShaderBindingStride, VkBuffer hitShaderBindingTableBuffer, VkDeviceSize hitShaderBindingOffset, VkDeviceSize hitShaderBindingStride, VkBuffer callableShaderBindingTableBuffer, VkDeviceSize callableShaderBindingOffset, VkDeviceSize callableShaderBindingStride, uint32_t width, uint32_t height, uint32_t depth) {};
- virtual bool PreCallValidateCreateRayTracingPipelinesNV(VkDevice device, VkPipelineCache pipelineCache, uint32_t createInfoCount, const VkRayTracingPipelineCreateInfoNV* pCreateInfos, const VkAllocationCallbacks* pAllocator, VkPipeline* pPipelines) { return false; };
- virtual void PreCallRecordCreateRayTracingPipelinesNV(VkDevice device, VkPipelineCache pipelineCache, uint32_t createInfoCount, const VkRayTracingPipelineCreateInfoNV* pCreateInfos, const VkAllocationCallbacks* pAllocator, VkPipeline* pPipelines) {};
- virtual void PostCallRecordCreateRayTracingPipelinesNV(VkDevice device, VkPipelineCache pipelineCache, uint32_t createInfoCount, const VkRayTracingPipelineCreateInfoNV* pCreateInfos, const VkAllocationCallbacks* pAllocator, VkPipeline* pPipelines, VkResult result) {};
- virtual bool PreCallValidateGetRayTracingShaderGroupHandlesNV(VkDevice device, VkPipeline pipeline, uint32_t firstGroup, uint32_t groupCount, size_t dataSize, void* pData) { return false; };
- virtual void PreCallRecordGetRayTracingShaderGroupHandlesNV(VkDevice device, VkPipeline pipeline, uint32_t firstGroup, uint32_t groupCount, size_t dataSize, void* pData) {};
- virtual void PostCallRecordGetRayTracingShaderGroupHandlesNV(VkDevice device, VkPipeline pipeline, uint32_t firstGroup, uint32_t groupCount, size_t dataSize, void* pData, VkResult result) {};
- virtual bool PreCallValidateGetAccelerationStructureHandleNV(VkDevice device, VkAccelerationStructureNV accelerationStructure, size_t dataSize, void* pData) { return false; };
- virtual void PreCallRecordGetAccelerationStructureHandleNV(VkDevice device, VkAccelerationStructureNV accelerationStructure, size_t dataSize, void* pData) {};
- virtual void PostCallRecordGetAccelerationStructureHandleNV(VkDevice device, VkAccelerationStructureNV accelerationStructure, size_t dataSize, void* pData, VkResult result) {};
- virtual bool PreCallValidateCmdWriteAccelerationStructuresPropertiesNV(VkCommandBuffer commandBuffer, uint32_t accelerationStructureCount, const VkAccelerationStructureNV* pAccelerationStructures, VkQueryType queryType, VkQueryPool queryPool, uint32_t firstQuery) { return false; };
- virtual void PreCallRecordCmdWriteAccelerationStructuresPropertiesNV(VkCommandBuffer commandBuffer, uint32_t accelerationStructureCount, const VkAccelerationStructureNV* pAccelerationStructures, VkQueryType queryType, VkQueryPool queryPool, uint32_t firstQuery) {};
- virtual void PostCallRecordCmdWriteAccelerationStructuresPropertiesNV(VkCommandBuffer commandBuffer, uint32_t accelerationStructureCount, const VkAccelerationStructureNV* pAccelerationStructures, VkQueryType queryType, VkQueryPool queryPool, uint32_t firstQuery) {};
- virtual bool PreCallValidateCompileDeferredNV(VkDevice device, VkPipeline pipeline, uint32_t shader) { return false; };
- virtual void PreCallRecordCompileDeferredNV(VkDevice device, VkPipeline pipeline, uint32_t shader) {};
- virtual void PostCallRecordCompileDeferredNV(VkDevice device, VkPipeline pipeline, uint32_t shader, VkResult result) {};
- virtual bool PreCallValidateGetMemoryHostPointerPropertiesEXT(VkDevice device, VkExternalMemoryHandleTypeFlagBits handleType, const void* pHostPointer, VkMemoryHostPointerPropertiesEXT* pMemoryHostPointerProperties) { return false; };
- virtual void PreCallRecordGetMemoryHostPointerPropertiesEXT(VkDevice device, VkExternalMemoryHandleTypeFlagBits handleType, const void* pHostPointer, VkMemoryHostPointerPropertiesEXT* pMemoryHostPointerProperties) {};
- virtual void PostCallRecordGetMemoryHostPointerPropertiesEXT(VkDevice device, VkExternalMemoryHandleTypeFlagBits handleType, const void* pHostPointer, VkMemoryHostPointerPropertiesEXT* pMemoryHostPointerProperties, VkResult result) {};
- virtual bool PreCallValidateCmdWriteBufferMarkerAMD(VkCommandBuffer commandBuffer, VkPipelineStageFlagBits pipelineStage, VkBuffer dstBuffer, VkDeviceSize dstOffset, uint32_t marker) { return false; };
- virtual void PreCallRecordCmdWriteBufferMarkerAMD(VkCommandBuffer commandBuffer, VkPipelineStageFlagBits pipelineStage, VkBuffer dstBuffer, VkDeviceSize dstOffset, uint32_t marker) {};
- virtual void PostCallRecordCmdWriteBufferMarkerAMD(VkCommandBuffer commandBuffer, VkPipelineStageFlagBits pipelineStage, VkBuffer dstBuffer, VkDeviceSize dstOffset, uint32_t marker) {};
- virtual bool PreCallValidateGetPhysicalDeviceCalibrateableTimeDomainsEXT(VkPhysicalDevice physicalDevice, uint32_t* pTimeDomainCount, VkTimeDomainEXT* pTimeDomains) { return false; };
- virtual void PreCallRecordGetPhysicalDeviceCalibrateableTimeDomainsEXT(VkPhysicalDevice physicalDevice, uint32_t* pTimeDomainCount, VkTimeDomainEXT* pTimeDomains) {};
- virtual void PostCallRecordGetPhysicalDeviceCalibrateableTimeDomainsEXT(VkPhysicalDevice physicalDevice, uint32_t* pTimeDomainCount, VkTimeDomainEXT* pTimeDomains, VkResult result) {};
- virtual bool PreCallValidateGetCalibratedTimestampsEXT(VkDevice device, uint32_t timestampCount, const VkCalibratedTimestampInfoEXT* pTimestampInfos, uint64_t* pTimestamps, uint64_t* pMaxDeviation) { return false; };
- virtual void PreCallRecordGetCalibratedTimestampsEXT(VkDevice device, uint32_t timestampCount, const VkCalibratedTimestampInfoEXT* pTimestampInfos, uint64_t* pTimestamps, uint64_t* pMaxDeviation) {};
- virtual void PostCallRecordGetCalibratedTimestampsEXT(VkDevice device, uint32_t timestampCount, const VkCalibratedTimestampInfoEXT* pTimestampInfos, uint64_t* pTimestamps, uint64_t* pMaxDeviation, VkResult result) {};
- virtual bool PreCallValidateCmdDrawMeshTasksNV(VkCommandBuffer commandBuffer, uint32_t taskCount, uint32_t firstTask) { return false; };
- virtual void PreCallRecordCmdDrawMeshTasksNV(VkCommandBuffer commandBuffer, uint32_t taskCount, uint32_t firstTask) {};
- virtual void PostCallRecordCmdDrawMeshTasksNV(VkCommandBuffer commandBuffer, uint32_t taskCount, uint32_t firstTask) {};
- virtual bool PreCallValidateCmdDrawMeshTasksIndirectNV(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, uint32_t drawCount, uint32_t stride) { return false; };
- virtual void PreCallRecordCmdDrawMeshTasksIndirectNV(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, uint32_t drawCount, uint32_t stride) {};
- virtual void PostCallRecordCmdDrawMeshTasksIndirectNV(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, uint32_t drawCount, uint32_t stride) {};
- virtual bool PreCallValidateCmdDrawMeshTasksIndirectCountNV(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, VkBuffer countBuffer, VkDeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride) { return false; };
- virtual void PreCallRecordCmdDrawMeshTasksIndirectCountNV(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, VkBuffer countBuffer, VkDeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride) {};
- virtual void PostCallRecordCmdDrawMeshTasksIndirectCountNV(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, VkBuffer countBuffer, VkDeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride) {};
- virtual bool PreCallValidateCmdSetExclusiveScissorNV(VkCommandBuffer commandBuffer, uint32_t firstExclusiveScissor, uint32_t exclusiveScissorCount, const VkRect2D* pExclusiveScissors) { return false; };
- virtual void PreCallRecordCmdSetExclusiveScissorNV(VkCommandBuffer commandBuffer, uint32_t firstExclusiveScissor, uint32_t exclusiveScissorCount, const VkRect2D* pExclusiveScissors) {};
- virtual void PostCallRecordCmdSetExclusiveScissorNV(VkCommandBuffer commandBuffer, uint32_t firstExclusiveScissor, uint32_t exclusiveScissorCount, const VkRect2D* pExclusiveScissors) {};
- virtual bool PreCallValidateCmdSetCheckpointNV(VkCommandBuffer commandBuffer, const void* pCheckpointMarker) { return false; };
- virtual void PreCallRecordCmdSetCheckpointNV(VkCommandBuffer commandBuffer, const void* pCheckpointMarker) {};
- virtual void PostCallRecordCmdSetCheckpointNV(VkCommandBuffer commandBuffer, const void* pCheckpointMarker) {};
- virtual bool PreCallValidateGetQueueCheckpointDataNV(VkQueue queue, uint32_t* pCheckpointDataCount, VkCheckpointDataNV* pCheckpointData) { return false; };
- virtual void PreCallRecordGetQueueCheckpointDataNV(VkQueue queue, uint32_t* pCheckpointDataCount, VkCheckpointDataNV* pCheckpointData) {};
- virtual void PostCallRecordGetQueueCheckpointDataNV(VkQueue queue, uint32_t* pCheckpointDataCount, VkCheckpointDataNV* pCheckpointData) {};
- virtual bool PreCallValidateInitializePerformanceApiINTEL(VkDevice device, const VkInitializePerformanceApiInfoINTEL* pInitializeInfo) { return false; };
- virtual void PreCallRecordInitializePerformanceApiINTEL(VkDevice device, const VkInitializePerformanceApiInfoINTEL* pInitializeInfo) {};
- virtual void PostCallRecordInitializePerformanceApiINTEL(VkDevice device, const VkInitializePerformanceApiInfoINTEL* pInitializeInfo, VkResult result) {};
- virtual bool PreCallValidateUninitializePerformanceApiINTEL(VkDevice device) { return false; };
- virtual void PreCallRecordUninitializePerformanceApiINTEL(VkDevice device) {};
- virtual void PostCallRecordUninitializePerformanceApiINTEL(VkDevice device) {};
- virtual bool PreCallValidateCmdSetPerformanceMarkerINTEL(VkCommandBuffer commandBuffer, const VkPerformanceMarkerInfoINTEL* pMarkerInfo) { return false; };
- virtual void PreCallRecordCmdSetPerformanceMarkerINTEL(VkCommandBuffer commandBuffer, const VkPerformanceMarkerInfoINTEL* pMarkerInfo) {};
- virtual void PostCallRecordCmdSetPerformanceMarkerINTEL(VkCommandBuffer commandBuffer, const VkPerformanceMarkerInfoINTEL* pMarkerInfo, VkResult result) {};
- virtual bool PreCallValidateCmdSetPerformanceStreamMarkerINTEL(VkCommandBuffer commandBuffer, const VkPerformanceStreamMarkerInfoINTEL* pMarkerInfo) { return false; };
- virtual void PreCallRecordCmdSetPerformanceStreamMarkerINTEL(VkCommandBuffer commandBuffer, const VkPerformanceStreamMarkerInfoINTEL* pMarkerInfo) {};
- virtual void PostCallRecordCmdSetPerformanceStreamMarkerINTEL(VkCommandBuffer commandBuffer, const VkPerformanceStreamMarkerInfoINTEL* pMarkerInfo, VkResult result) {};
- virtual bool PreCallValidateCmdSetPerformanceOverrideINTEL(VkCommandBuffer commandBuffer, const VkPerformanceOverrideInfoINTEL* pOverrideInfo) { return false; };
- virtual void PreCallRecordCmdSetPerformanceOverrideINTEL(VkCommandBuffer commandBuffer, const VkPerformanceOverrideInfoINTEL* pOverrideInfo) {};
- virtual void PostCallRecordCmdSetPerformanceOverrideINTEL(VkCommandBuffer commandBuffer, const VkPerformanceOverrideInfoINTEL* pOverrideInfo, VkResult result) {};
- virtual bool PreCallValidateAcquirePerformanceConfigurationINTEL(VkDevice device, const VkPerformanceConfigurationAcquireInfoINTEL* pAcquireInfo, VkPerformanceConfigurationINTEL* pConfiguration) { return false; };
- virtual void PreCallRecordAcquirePerformanceConfigurationINTEL(VkDevice device, const VkPerformanceConfigurationAcquireInfoINTEL* pAcquireInfo, VkPerformanceConfigurationINTEL* pConfiguration) {};
- virtual void PostCallRecordAcquirePerformanceConfigurationINTEL(VkDevice device, const VkPerformanceConfigurationAcquireInfoINTEL* pAcquireInfo, VkPerformanceConfigurationINTEL* pConfiguration, VkResult result) {};
- virtual bool PreCallValidateReleasePerformanceConfigurationINTEL(VkDevice device, VkPerformanceConfigurationINTEL configuration) { return false; };
- virtual void PreCallRecordReleasePerformanceConfigurationINTEL(VkDevice device, VkPerformanceConfigurationINTEL configuration) {};
- virtual void PostCallRecordReleasePerformanceConfigurationINTEL(VkDevice device, VkPerformanceConfigurationINTEL configuration, VkResult result) {};
- virtual bool PreCallValidateQueueSetPerformanceConfigurationINTEL(VkQueue queue, VkPerformanceConfigurationINTEL configuration) { return false; };
- virtual void PreCallRecordQueueSetPerformanceConfigurationINTEL(VkQueue queue, VkPerformanceConfigurationINTEL configuration) {};
- virtual void PostCallRecordQueueSetPerformanceConfigurationINTEL(VkQueue queue, VkPerformanceConfigurationINTEL configuration, VkResult result) {};
- virtual bool PreCallValidateGetPerformanceParameterINTEL(VkDevice device, VkPerformanceParameterTypeINTEL parameter, VkPerformanceValueINTEL* pValue) { return false; };
- virtual void PreCallRecordGetPerformanceParameterINTEL(VkDevice device, VkPerformanceParameterTypeINTEL parameter, VkPerformanceValueINTEL* pValue) {};
- virtual void PostCallRecordGetPerformanceParameterINTEL(VkDevice device, VkPerformanceParameterTypeINTEL parameter, VkPerformanceValueINTEL* pValue, VkResult result) {};
- virtual bool PreCallValidateSetLocalDimmingAMD(VkDevice device, VkSwapchainKHR swapChain, VkBool32 localDimmingEnable) { return false; };
- virtual void PreCallRecordSetLocalDimmingAMD(VkDevice device, VkSwapchainKHR swapChain, VkBool32 localDimmingEnable) {};
- virtual void PostCallRecordSetLocalDimmingAMD(VkDevice device, VkSwapchainKHR swapChain, VkBool32 localDimmingEnable) {};
-#ifdef VK_USE_PLATFORM_FUCHSIA
- virtual bool PreCallValidateCreateImagePipeSurfaceFUCHSIA(VkInstance instance, const VkImagePipeSurfaceCreateInfoFUCHSIA* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSurfaceKHR* pSurface) { return false; };
- virtual void PreCallRecordCreateImagePipeSurfaceFUCHSIA(VkInstance instance, const VkImagePipeSurfaceCreateInfoFUCHSIA* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSurfaceKHR* pSurface) {};
- virtual void PostCallRecordCreateImagePipeSurfaceFUCHSIA(VkInstance instance, const VkImagePipeSurfaceCreateInfoFUCHSIA* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSurfaceKHR* pSurface, VkResult result) {};
-#endif
-#ifdef VK_USE_PLATFORM_METAL_EXT
- virtual bool PreCallValidateCreateMetalSurfaceEXT(VkInstance instance, const VkMetalSurfaceCreateInfoEXT* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSurfaceKHR* pSurface) { return false; };
- virtual void PreCallRecordCreateMetalSurfaceEXT(VkInstance instance, const VkMetalSurfaceCreateInfoEXT* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSurfaceKHR* pSurface) {};
- virtual void PostCallRecordCreateMetalSurfaceEXT(VkInstance instance, const VkMetalSurfaceCreateInfoEXT* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSurfaceKHR* pSurface, VkResult result) {};
-#endif
- virtual bool PreCallValidateGetBufferDeviceAddressEXT(VkDevice device, const VkBufferDeviceAddressInfoEXT* pInfo) { return false; };
- virtual void PreCallRecordGetBufferDeviceAddressEXT(VkDevice device, const VkBufferDeviceAddressInfoEXT* pInfo) {};
- virtual void PostCallRecordGetBufferDeviceAddressEXT(VkDevice device, const VkBufferDeviceAddressInfoEXT* pInfo) {};
- virtual bool PreCallValidateGetPhysicalDeviceCooperativeMatrixPropertiesNV(VkPhysicalDevice physicalDevice, uint32_t* pPropertyCount, VkCooperativeMatrixPropertiesNV* pProperties) { return false; };
- virtual void PreCallRecordGetPhysicalDeviceCooperativeMatrixPropertiesNV(VkPhysicalDevice physicalDevice, uint32_t* pPropertyCount, VkCooperativeMatrixPropertiesNV* pProperties) {};
- virtual void PostCallRecordGetPhysicalDeviceCooperativeMatrixPropertiesNV(VkPhysicalDevice physicalDevice, uint32_t* pPropertyCount, VkCooperativeMatrixPropertiesNV* pProperties, VkResult result) {};
- virtual bool PreCallValidateGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV(VkPhysicalDevice physicalDevice, uint32_t* pCombinationCount, VkFramebufferMixedSamplesCombinationNV* pCombinations) { return false; };
- virtual void PreCallRecordGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV(VkPhysicalDevice physicalDevice, uint32_t* pCombinationCount, VkFramebufferMixedSamplesCombinationNV* pCombinations) {};
- virtual void PostCallRecordGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV(VkPhysicalDevice physicalDevice, uint32_t* pCombinationCount, VkFramebufferMixedSamplesCombinationNV* pCombinations, VkResult result) {};
-#ifdef VK_USE_PLATFORM_WIN32_KHR
- virtual bool PreCallValidateGetPhysicalDeviceSurfacePresentModes2EXT(VkPhysicalDevice physicalDevice, const VkPhysicalDeviceSurfaceInfo2KHR* pSurfaceInfo, uint32_t* pPresentModeCount, VkPresentModeKHR* pPresentModes) { return false; };
- virtual void PreCallRecordGetPhysicalDeviceSurfacePresentModes2EXT(VkPhysicalDevice physicalDevice, const VkPhysicalDeviceSurfaceInfo2KHR* pSurfaceInfo, uint32_t* pPresentModeCount, VkPresentModeKHR* pPresentModes) {};
- virtual void PostCallRecordGetPhysicalDeviceSurfacePresentModes2EXT(VkPhysicalDevice physicalDevice, const VkPhysicalDeviceSurfaceInfo2KHR* pSurfaceInfo, uint32_t* pPresentModeCount, VkPresentModeKHR* pPresentModes, VkResult result) {};
-#endif
-#ifdef VK_USE_PLATFORM_WIN32_KHR
- virtual bool PreCallValidateAcquireFullScreenExclusiveModeEXT(VkDevice device, VkSwapchainKHR swapchain) { return false; };
- virtual void PreCallRecordAcquireFullScreenExclusiveModeEXT(VkDevice device, VkSwapchainKHR swapchain) {};
- virtual void PostCallRecordAcquireFullScreenExclusiveModeEXT(VkDevice device, VkSwapchainKHR swapchain, VkResult result) {};
-#endif
-#ifdef VK_USE_PLATFORM_WIN32_KHR
- virtual bool PreCallValidateReleaseFullScreenExclusiveModeEXT(VkDevice device, VkSwapchainKHR swapchain) { return false; };
- virtual void PreCallRecordReleaseFullScreenExclusiveModeEXT(VkDevice device, VkSwapchainKHR swapchain) {};
- virtual void PostCallRecordReleaseFullScreenExclusiveModeEXT(VkDevice device, VkSwapchainKHR swapchain, VkResult result) {};
-#endif
-#ifdef VK_USE_PLATFORM_WIN32_KHR
- virtual bool PreCallValidateGetDeviceGroupSurfacePresentModes2EXT(VkDevice device, const VkPhysicalDeviceSurfaceInfo2KHR* pSurfaceInfo, VkDeviceGroupPresentModeFlagsKHR* pModes) { return false; };
- virtual void PreCallRecordGetDeviceGroupSurfacePresentModes2EXT(VkDevice device, const VkPhysicalDeviceSurfaceInfo2KHR* pSurfaceInfo, VkDeviceGroupPresentModeFlagsKHR* pModes) {};
- virtual void PostCallRecordGetDeviceGroupSurfacePresentModes2EXT(VkDevice device, const VkPhysicalDeviceSurfaceInfo2KHR* pSurfaceInfo, VkDeviceGroupPresentModeFlagsKHR* pModes, VkResult result) {};
-#endif
- virtual bool PreCallValidateCreateHeadlessSurfaceEXT(VkInstance instance, const VkHeadlessSurfaceCreateInfoEXT* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSurfaceKHR* pSurface) { return false; };
- virtual void PreCallRecordCreateHeadlessSurfaceEXT(VkInstance instance, const VkHeadlessSurfaceCreateInfoEXT* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSurfaceKHR* pSurface) {};
- virtual void PostCallRecordCreateHeadlessSurfaceEXT(VkInstance instance, const VkHeadlessSurfaceCreateInfoEXT* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSurfaceKHR* pSurface, VkResult result) {};
- virtual bool PreCallValidateCmdSetLineStippleEXT(VkCommandBuffer commandBuffer, uint32_t lineStippleFactor, uint16_t lineStipplePattern) { return false; };
- virtual void PreCallRecordCmdSetLineStippleEXT(VkCommandBuffer commandBuffer, uint32_t lineStippleFactor, uint16_t lineStipplePattern) {};
- virtual void PostCallRecordCmdSetLineStippleEXT(VkCommandBuffer commandBuffer, uint32_t lineStippleFactor, uint16_t lineStipplePattern) {};
- virtual bool PreCallValidateResetQueryPoolEXT(VkDevice device, VkQueryPool queryPool, uint32_t firstQuery, uint32_t queryCount) { return false; };
- virtual void PreCallRecordResetQueryPoolEXT(VkDevice device, VkQueryPool queryPool, uint32_t firstQuery, uint32_t queryCount) {};
- virtual void PostCallRecordResetQueryPoolEXT(VkDevice device, VkQueryPool queryPool, uint32_t firstQuery, uint32_t queryCount) {};
-
- virtual VkResult CoreLayerCreateValidationCacheEXT(VkDevice device, const VkValidationCacheCreateInfoEXT* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkValidationCacheEXT* pValidationCache) { return VK_SUCCESS; };
- virtual void CoreLayerDestroyValidationCacheEXT(VkDevice device, VkValidationCacheEXT validationCache, const VkAllocationCallbacks* pAllocator) {};
- virtual VkResult CoreLayerMergeValidationCachesEXT(VkDevice device, VkValidationCacheEXT dstCache, uint32_t srcCacheCount, const VkValidationCacheEXT* pSrcCaches) { return VK_SUCCESS; };
- virtual VkResult CoreLayerGetValidationCacheDataEXT(VkDevice device, VkValidationCacheEXT validationCache, size_t* pDataSize, void* pData) { return VK_SUCCESS; };
-
- // Allow additional state parameter for CreateGraphicsPipelines
- virtual bool PreCallValidateCreateGraphicsPipelines(VkDevice device, VkPipelineCache pipelineCache, uint32_t createInfoCount, const VkGraphicsPipelineCreateInfo* pCreateInfos, const VkAllocationCallbacks* pAllocator, VkPipeline* pPipelines, void* cgpl_state) {
- return PreCallValidateCreateGraphicsPipelines(device, pipelineCache, createInfoCount, pCreateInfos, pAllocator, pPipelines);
- };
- virtual void PreCallRecordCreateGraphicsPipelines(VkDevice device, VkPipelineCache pipelineCache, uint32_t createInfoCount, const VkGraphicsPipelineCreateInfo* pCreateInfos, const VkAllocationCallbacks* pAllocator, VkPipeline* pPipelines, void* cgpl_state) {
- PreCallRecordCreateGraphicsPipelines(device, pipelineCache, createInfoCount, pCreateInfos, pAllocator, pPipelines);
- };
- virtual void PostCallRecordCreateGraphicsPipelines(VkDevice device, VkPipelineCache pipelineCache, uint32_t createInfoCount, const VkGraphicsPipelineCreateInfo* pCreateInfos, const VkAllocationCallbacks* pAllocator, VkPipeline* pPipelines, VkResult result, void* cgpl_state) {
- PostCallRecordCreateGraphicsPipelines(device, pipelineCache, createInfoCount, pCreateInfos, pAllocator, pPipelines, result);
- };
-
- // Allow additional state parameter for CreateComputePipelines
- virtual bool PreCallValidateCreateComputePipelines(VkDevice device, VkPipelineCache pipelineCache, uint32_t createInfoCount, const VkComputePipelineCreateInfo* pCreateInfos, const VkAllocationCallbacks* pAllocator, VkPipeline* pPipelines, void* pipe_state) {
- return PreCallValidateCreateComputePipelines(device, pipelineCache, createInfoCount, pCreateInfos, pAllocator, pPipelines);
- };
- virtual void PreCallRecordCreateComputePipelines(VkDevice device, VkPipelineCache pipelineCache, uint32_t createInfoCount, const VkComputePipelineCreateInfo* pCreateInfos, const VkAllocationCallbacks* pAllocator, VkPipeline* pPipelines, void* ccpl_state) {
- PreCallRecordCreateComputePipelines(device, pipelineCache, createInfoCount, pCreateInfos, pAllocator, pPipelines);
- };
- virtual void PostCallRecordCreateComputePipelines(VkDevice device, VkPipelineCache pipelineCache, uint32_t createInfoCount, const VkComputePipelineCreateInfo* pCreateInfos, const VkAllocationCallbacks* pAllocator, VkPipeline* pPipelines, VkResult result, void* pipe_state) {
- PostCallRecordCreateComputePipelines(device, pipelineCache, createInfoCount, pCreateInfos, pAllocator, pPipelines, result);
- };
-
- // Allow additional state parameter for CreateRayTracingPipelinesNV
- virtual bool PreCallValidateCreateRayTracingPipelinesNV(VkDevice device, VkPipelineCache pipelineCache, uint32_t createInfoCount, const VkRayTracingPipelineCreateInfoNV* pCreateInfos, const VkAllocationCallbacks* pAllocator, VkPipeline* pPipelines, void* pipe_state) {
- return PreCallValidateCreateRayTracingPipelinesNV(device, pipelineCache, createInfoCount, pCreateInfos, pAllocator, pPipelines);
- };
- virtual void PreCallRecordCreateRayTracingPipelinesNV(VkDevice device, VkPipelineCache pipelineCache, uint32_t createInfoCount, const VkRayTracingPipelineCreateInfoNV* pCreateInfos, const VkAllocationCallbacks* pAllocator, VkPipeline* pPipelines, void* ccpl_state) {
- PreCallRecordCreateRayTracingPipelinesNV(device, pipelineCache, createInfoCount, pCreateInfos, pAllocator, pPipelines);
- };
- virtual void PostCallRecordCreateRayTracingPipelinesNV(VkDevice device, VkPipelineCache pipelineCache, uint32_t createInfoCount, const VkRayTracingPipelineCreateInfoNV* pCreateInfos, const VkAllocationCallbacks* pAllocator, VkPipeline* pPipelines, VkResult result, void* pipe_state) {
- PostCallRecordCreateRayTracingPipelinesNV(device, pipelineCache, createInfoCount, pCreateInfos, pAllocator, pPipelines, result);
- };
-
- // Allow modification of a down-chain parameter for CreatePipelineLayout
- virtual void PreCallRecordCreatePipelineLayout(VkDevice device, const VkPipelineLayoutCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkPipelineLayout* pPipelineLayout, void *cpl_state) {
- PreCallRecordCreatePipelineLayout(device, pCreateInfo, pAllocator, pPipelineLayout);
- };
-
- // Enable the CreateShaderModule API to take an extra argument for state preservation and paramter modification
- virtual bool PreCallValidateCreateShaderModule(VkDevice device, const VkShaderModuleCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkShaderModule* pShaderModule, void* csm_state) {
- return PreCallValidateCreateShaderModule(device, pCreateInfo, pAllocator, pShaderModule);
- };
- virtual void PreCallRecordCreateShaderModule(VkDevice device, const VkShaderModuleCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkShaderModule* pShaderModule, void* csm_state) {
- PreCallRecordCreateShaderModule(device, pCreateInfo, pAllocator, pShaderModule);
- };
- virtual void PostCallRecordCreateShaderModule(VkDevice device, const VkShaderModuleCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkShaderModule* pShaderModule, VkResult result, void* csm_state) {
- PostCallRecordCreateShaderModule(device, pCreateInfo, pAllocator, pShaderModule, result);
- };
-
- // Allow AllocateDescriptorSets to use some local stack storage for performance purposes
- virtual bool PreCallValidateAllocateDescriptorSets(VkDevice device, const VkDescriptorSetAllocateInfo* pAllocateInfo, VkDescriptorSet* pDescriptorSets, void* ads_state) {
- return PreCallValidateAllocateDescriptorSets(device, pAllocateInfo, pDescriptorSets);
- };
- virtual void PostCallRecordAllocateDescriptorSets(VkDevice device, const VkDescriptorSetAllocateInfo* pAllocateInfo, VkDescriptorSet* pDescriptorSets, VkResult result, void* ads_state) {
- PostCallRecordAllocateDescriptorSets(device, pAllocateInfo, pDescriptorSets, result);
- };
-
- // Modify a parameter to CreateDevice
- virtual void PreCallRecordCreateDevice(VkPhysicalDevice physicalDevice, const VkDeviceCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkDevice* pDevice, safe_VkDeviceCreateInfo *modified_create_info) {
- PreCallRecordCreateDevice(physicalDevice, pCreateInfo, pAllocator, pDevice);
- };
-};
-
-extern std::unordered_map<void*, ValidationObject*> layer_data_map;
diff --git a/layers/generated/layer_chassis_dispatch.cpp b/layers/generated/layer_chassis_dispatch.cpp
deleted file mode 100644
index 36b6f9e3f..000000000
--- a/layers/generated/layer_chassis_dispatch.cpp
+++ /dev/null
@@ -1,6778 +0,0 @@
-
-// This file is ***GENERATED***. Do Not Edit.
-// See layer_chassis_dispatch_generator.py for modifications.
-
-/* Copyright (c) 2015-2019 The Khronos Group Inc.
- * Copyright (c) 2015-2019 Valve Corporation
- * Copyright (c) 2015-2019 LunarG, Inc.
- * Copyright (c) 2015-2019 Google Inc.
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- *
- * Author: Mark Lobodzinski <mark@lunarg.com>
- */
-
-#include <mutex>
-#include "chassis.h"
-#include "layer_chassis_dispatch.h"
-#include "vk_layer_utils.h"
-
-// This intentionally includes a cpp file
-#include "vk_safe_struct.cpp"
-
-// shared_mutex support added in MSVC 2015 update 2
-#if defined(_MSC_FULL_VER) && _MSC_FULL_VER >= 190023918 && NTDDI_VERSION > NTDDI_WIN10_RS2
- #include <shared_mutex>
- typedef std::shared_mutex dispatch_lock_t;
- typedef std::shared_lock<dispatch_lock_t> read_dispatch_lock_guard_t;
- typedef std::unique_lock<dispatch_lock_t> write_dispatch_lock_guard_t;
-#else
- typedef std::mutex dispatch_lock_t;
- typedef std::unique_lock<dispatch_lock_t> read_dispatch_lock_guard_t;
- typedef std::unique_lock<dispatch_lock_t> write_dispatch_lock_guard_t;
-#endif
-dispatch_lock_t dispatch_lock;
-
-// Unique Objects pNext extension handling function
-void WrapPnextChainHandles(ValidationObject *layer_data, const void *pNext) {
- void *cur_pnext = const_cast<void *>(pNext);
- while (cur_pnext != NULL) {
- VkBaseOutStructure *header = reinterpret_cast<VkBaseOutStructure *>(cur_pnext);
-
- switch (header->sType) {
-#ifdef VK_USE_PLATFORM_WIN32_KHR
- case VK_STRUCTURE_TYPE_WIN32_KEYED_MUTEX_ACQUIRE_RELEASE_INFO_KHR: {
- safe_VkWin32KeyedMutexAcquireReleaseInfoKHR *safe_struct = reinterpret_cast<safe_VkWin32KeyedMutexAcquireReleaseInfoKHR *>(cur_pnext);
- if (safe_struct->pAcquireSyncs) {
- for (uint32_t index0 = 0; index0 < safe_struct->acquireCount; ++index0) {
- safe_struct->pAcquireSyncs[index0] = layer_data->Unwrap(safe_struct->pAcquireSyncs[index0]);
- }
- }
- if (safe_struct->pReleaseSyncs) {
- for (uint32_t index0 = 0; index0 < safe_struct->releaseCount; ++index0) {
- safe_struct->pReleaseSyncs[index0] = layer_data->Unwrap(safe_struct->pReleaseSyncs[index0]);
- }
- }
- } break;
-#endif // VK_USE_PLATFORM_WIN32_KHR
-
-#ifdef VK_USE_PLATFORM_WIN32_KHR
- case VK_STRUCTURE_TYPE_WIN32_KEYED_MUTEX_ACQUIRE_RELEASE_INFO_NV: {
- safe_VkWin32KeyedMutexAcquireReleaseInfoNV *safe_struct = reinterpret_cast<safe_VkWin32KeyedMutexAcquireReleaseInfoNV *>(cur_pnext);
- if (safe_struct->pAcquireSyncs) {
- for (uint32_t index0 = 0; index0 < safe_struct->acquireCount; ++index0) {
- safe_struct->pAcquireSyncs[index0] = layer_data->Unwrap(safe_struct->pAcquireSyncs[index0]);
- }
- }
- if (safe_struct->pReleaseSyncs) {
- for (uint32_t index0 = 0; index0 < safe_struct->releaseCount; ++index0) {
- safe_struct->pReleaseSyncs[index0] = layer_data->Unwrap(safe_struct->pReleaseSyncs[index0]);
- }
- }
- } break;
-#endif // VK_USE_PLATFORM_WIN32_KHR
-
- case VK_STRUCTURE_TYPE_DEDICATED_ALLOCATION_MEMORY_ALLOCATE_INFO_NV: {
- safe_VkDedicatedAllocationMemoryAllocateInfoNV *safe_struct = reinterpret_cast<safe_VkDedicatedAllocationMemoryAllocateInfoNV *>(cur_pnext);
- if (safe_struct->image) {
- safe_struct->image = layer_data->Unwrap(safe_struct->image);
- }
- if (safe_struct->buffer) {
- safe_struct->buffer = layer_data->Unwrap(safe_struct->buffer);
- }
- } break;
-
- case VK_STRUCTURE_TYPE_MEMORY_DEDICATED_ALLOCATE_INFO: {
- safe_VkMemoryDedicatedAllocateInfo *safe_struct = reinterpret_cast<safe_VkMemoryDedicatedAllocateInfo *>(cur_pnext);
- if (safe_struct->image) {
- safe_struct->image = layer_data->Unwrap(safe_struct->image);
- }
- if (safe_struct->buffer) {
- safe_struct->buffer = layer_data->Unwrap(safe_struct->buffer);
- }
- } break;
-
- case VK_STRUCTURE_TYPE_IMAGE_SWAPCHAIN_CREATE_INFO_KHR: {
- safe_VkImageSwapchainCreateInfoKHR *safe_struct = reinterpret_cast<safe_VkImageSwapchainCreateInfoKHR *>(cur_pnext);
- if (safe_struct->swapchain) {
- safe_struct->swapchain = layer_data->Unwrap(safe_struct->swapchain);
- }
- } break;
-
- case VK_STRUCTURE_TYPE_SAMPLER_YCBCR_CONVERSION_INFO: {
- safe_VkSamplerYcbcrConversionInfo *safe_struct = reinterpret_cast<safe_VkSamplerYcbcrConversionInfo *>(cur_pnext);
- if (safe_struct->conversion) {
- safe_struct->conversion = layer_data->Unwrap(safe_struct->conversion);
- }
- } break;
-
- case VK_STRUCTURE_TYPE_SHADER_MODULE_VALIDATION_CACHE_CREATE_INFO_EXT: {
- safe_VkShaderModuleValidationCacheCreateInfoEXT *safe_struct = reinterpret_cast<safe_VkShaderModuleValidationCacheCreateInfoEXT *>(cur_pnext);
- if (safe_struct->validationCache) {
- safe_struct->validationCache = layer_data->Unwrap(safe_struct->validationCache);
- }
- } break;
-
- case VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET_ACCELERATION_STRUCTURE_NV: {
- safe_VkWriteDescriptorSetAccelerationStructureNV *safe_struct = reinterpret_cast<safe_VkWriteDescriptorSetAccelerationStructureNV *>(cur_pnext);
- if (safe_struct->pAccelerationStructures) {
- for (uint32_t index0 = 0; index0 < safe_struct->accelerationStructureCount; ++index0) {
- safe_struct->pAccelerationStructures[index0] = layer_data->Unwrap(safe_struct->pAccelerationStructures[index0]);
- }
- }
- } break;
-
- case VK_STRUCTURE_TYPE_RENDER_PASS_ATTACHMENT_BEGIN_INFO_KHR: {
- safe_VkRenderPassAttachmentBeginInfoKHR *safe_struct = reinterpret_cast<safe_VkRenderPassAttachmentBeginInfoKHR *>(cur_pnext);
- if (safe_struct->pAttachments) {
- for (uint32_t index0 = 0; index0 < safe_struct->attachmentCount; ++index0) {
- safe_struct->pAttachments[index0] = layer_data->Unwrap(safe_struct->pAttachments[index0]);
- }
- }
- } break;
-
- case VK_STRUCTURE_TYPE_BIND_IMAGE_MEMORY_SWAPCHAIN_INFO_KHR: {
- safe_VkBindImageMemorySwapchainInfoKHR *safe_struct = reinterpret_cast<safe_VkBindImageMemorySwapchainInfoKHR *>(cur_pnext);
- if (safe_struct->swapchain) {
- safe_struct->swapchain = layer_data->Unwrap(safe_struct->swapchain);
- }
- } break;
-
- default:
- break;
- }
-
- // Process the next structure in the chain
- cur_pnext = header->pNext;
- }
-}
-
-
-// Manually written Dispatch routines
-
-VkResult DispatchCreateComputePipelines(VkDevice device, VkPipelineCache pipelineCache, uint32_t createInfoCount,
- const VkComputePipelineCreateInfo *pCreateInfos,
- const VkAllocationCallbacks *pAllocator, VkPipeline *pPipelines) {
- auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
- if (!wrap_handles) return layer_data->device_dispatch_table.CreateComputePipelines(device, pipelineCache, createInfoCount,
- pCreateInfos, pAllocator, pPipelines);
- safe_VkComputePipelineCreateInfo *local_pCreateInfos = NULL;
- if (pCreateInfos) {
- local_pCreateInfos = new safe_VkComputePipelineCreateInfo[createInfoCount];
- for (uint32_t idx0 = 0; idx0 < createInfoCount; ++idx0) {
- local_pCreateInfos[idx0].initialize(&pCreateInfos[idx0]);
- if (pCreateInfos[idx0].basePipelineHandle) {
- local_pCreateInfos[idx0].basePipelineHandle = layer_data->Unwrap(pCreateInfos[idx0].basePipelineHandle);
- }
- if (pCreateInfos[idx0].layout) {
- local_pCreateInfos[idx0].layout = layer_data->Unwrap(pCreateInfos[idx0].layout);
- }
- if (pCreateInfos[idx0].stage.module) {
- local_pCreateInfos[idx0].stage.module = layer_data->Unwrap(pCreateInfos[idx0].stage.module);
- }
- }
- }
- if (pipelineCache) {
- pipelineCache = layer_data->Unwrap(pipelineCache);
- }
-
- VkResult result = layer_data->device_dispatch_table.CreateComputePipelines(device, pipelineCache, createInfoCount,
- local_pCreateInfos->ptr(), pAllocator, pPipelines);
- delete[] local_pCreateInfos;
- {
- for (uint32_t i = 0; i < createInfoCount; ++i) {
- if (pPipelines[i] != VK_NULL_HANDLE) {
- pPipelines[i] = layer_data->WrapNew(pPipelines[i]);
- }
- }
- }
- return result;
-}
-
-VkResult DispatchCreateGraphicsPipelines(VkDevice device, VkPipelineCache pipelineCache, uint32_t createInfoCount,
- const VkGraphicsPipelineCreateInfo *pCreateInfos,
- const VkAllocationCallbacks *pAllocator, VkPipeline *pPipelines) {
- auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
- if (!wrap_handles) return layer_data->device_dispatch_table.CreateGraphicsPipelines(device, pipelineCache, createInfoCount,
- pCreateInfos, pAllocator, pPipelines);
- safe_VkGraphicsPipelineCreateInfo *local_pCreateInfos = nullptr;
- if (pCreateInfos) {
- local_pCreateInfos = new safe_VkGraphicsPipelineCreateInfo[createInfoCount];
- read_dispatch_lock_guard_t lock(dispatch_lock);
- for (uint32_t idx0 = 0; idx0 < createInfoCount; ++idx0) {
- bool uses_color_attachment = false;
- bool uses_depthstencil_attachment = false;
- {
- const auto subpasses_uses_it = layer_data->renderpasses_states.find(layer_data->Unwrap(pCreateInfos[idx0].renderPass));
- if (subpasses_uses_it != layer_data->renderpasses_states.end()) {
- const auto &subpasses_uses = subpasses_uses_it->second;
- if (subpasses_uses.subpasses_using_color_attachment.count(pCreateInfos[idx0].subpass))
- uses_color_attachment = true;
- if (subpasses_uses.subpasses_using_depthstencil_attachment.count(pCreateInfos[idx0].subpass))
- uses_depthstencil_attachment = true;
- }
- }
-
- local_pCreateInfos[idx0].initialize(&pCreateInfos[idx0], uses_color_attachment, uses_depthstencil_attachment);
-
- if (pCreateInfos[idx0].basePipelineHandle) {
- local_pCreateInfos[idx0].basePipelineHandle = layer_data->Unwrap(pCreateInfos[idx0].basePipelineHandle);
- }
- if (pCreateInfos[idx0].layout) {
- local_pCreateInfos[idx0].layout = layer_data->Unwrap(pCreateInfos[idx0].layout);
- }
- if (pCreateInfos[idx0].pStages) {
- for (uint32_t idx1 = 0; idx1 < pCreateInfos[idx0].stageCount; ++idx1) {
- if (pCreateInfos[idx0].pStages[idx1].module) {
- local_pCreateInfos[idx0].pStages[idx1].module = layer_data->Unwrap(pCreateInfos[idx0].pStages[idx1].module);
- }
- }
- }
- if (pCreateInfos[idx0].renderPass) {
- local_pCreateInfos[idx0].renderPass = layer_data->Unwrap(pCreateInfos[idx0].renderPass);
- }
- }
- }
- if (pipelineCache) {
- pipelineCache = layer_data->Unwrap(pipelineCache);
- }
-
- VkResult result = layer_data->device_dispatch_table.CreateGraphicsPipelines(device, pipelineCache, createInfoCount,
- local_pCreateInfos->ptr(), pAllocator, pPipelines);
- delete[] local_pCreateInfos;
- {
- for (uint32_t i = 0; i < createInfoCount; ++i) {
- if (pPipelines[i] != VK_NULL_HANDLE) {
- pPipelines[i] = layer_data->WrapNew(pPipelines[i]);
- }
- }
- }
- return result;
-}
-
-template <typename T>
-static void UpdateCreateRenderPassState(ValidationObject *layer_data, const T *pCreateInfo, VkRenderPass renderPass) {
- auto &renderpass_state = layer_data->renderpasses_states[renderPass];
-
- for (uint32_t subpass = 0; subpass < pCreateInfo->subpassCount; ++subpass) {
- bool uses_color = false;
- for (uint32_t i = 0; i < pCreateInfo->pSubpasses[subpass].colorAttachmentCount && !uses_color; ++i)
- if (pCreateInfo->pSubpasses[subpass].pColorAttachments[i].attachment != VK_ATTACHMENT_UNUSED) uses_color = true;
-
- bool uses_depthstencil = false;
- if (pCreateInfo->pSubpasses[subpass].pDepthStencilAttachment)
- if (pCreateInfo->pSubpasses[subpass].pDepthStencilAttachment->attachment != VK_ATTACHMENT_UNUSED)
- uses_depthstencil = true;
-
- if (uses_color) renderpass_state.subpasses_using_color_attachment.insert(subpass);
- if (uses_depthstencil) renderpass_state.subpasses_using_depthstencil_attachment.insert(subpass);
- }
-}
-
-VkResult DispatchCreateRenderPass(VkDevice device, const VkRenderPassCreateInfo *pCreateInfo,
- const VkAllocationCallbacks *pAllocator, VkRenderPass *pRenderPass) {
- auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
- VkResult result = layer_data->device_dispatch_table.CreateRenderPass(device, pCreateInfo, pAllocator, pRenderPass);
- if (!wrap_handles) return result;
- if (VK_SUCCESS == result) {
- write_dispatch_lock_guard_t lock(dispatch_lock);
- UpdateCreateRenderPassState(layer_data, pCreateInfo, *pRenderPass);
- *pRenderPass = layer_data->WrapNew(*pRenderPass);
- }
- return result;
-}
-
-VkResult DispatchCreateRenderPass2KHR(VkDevice device, const VkRenderPassCreateInfo2KHR *pCreateInfo,
- const VkAllocationCallbacks *pAllocator, VkRenderPass *pRenderPass) {
- auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
- VkResult result = layer_data->device_dispatch_table.CreateRenderPass2KHR(device, pCreateInfo, pAllocator, pRenderPass);
- if (!wrap_handles) return result;
- if (VK_SUCCESS == result) {
- write_dispatch_lock_guard_t lock(dispatch_lock);
- UpdateCreateRenderPassState(layer_data, pCreateInfo, *pRenderPass);
- *pRenderPass = layer_data->WrapNew(*pRenderPass);
- }
- return result;
-}
-
-void DispatchDestroyRenderPass(VkDevice device, VkRenderPass renderPass, const VkAllocationCallbacks *pAllocator) {
- auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
- if (!wrap_handles) return layer_data->device_dispatch_table.DestroyRenderPass(device, renderPass, pAllocator);
- uint64_t renderPass_id = reinterpret_cast<uint64_t &>(renderPass);
-
- auto iter = unique_id_mapping.pop(renderPass_id);
- if (iter != unique_id_mapping.end()) {
- renderPass = (VkRenderPass)iter->second;
- } else {
- renderPass = (VkRenderPass)0;
- }
-
- layer_data->device_dispatch_table.DestroyRenderPass(device, renderPass, pAllocator);
-
- write_dispatch_lock_guard_t lock(dispatch_lock);
- layer_data->renderpasses_states.erase(renderPass);
-}
-
-VkResult DispatchCreateSwapchainKHR(VkDevice device, const VkSwapchainCreateInfoKHR *pCreateInfo,
- const VkAllocationCallbacks *pAllocator, VkSwapchainKHR *pSwapchain) {
- auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
- if (!wrap_handles) return layer_data->device_dispatch_table.CreateSwapchainKHR(device, pCreateInfo, pAllocator, pSwapchain);
- safe_VkSwapchainCreateInfoKHR *local_pCreateInfo = NULL;
- if (pCreateInfo) {
- local_pCreateInfo = new safe_VkSwapchainCreateInfoKHR(pCreateInfo);
- local_pCreateInfo->oldSwapchain = layer_data->Unwrap(pCreateInfo->oldSwapchain);
- // Surface is instance-level object
- local_pCreateInfo->surface = layer_data->Unwrap(pCreateInfo->surface);
- }
-
- VkResult result = layer_data->device_dispatch_table.CreateSwapchainKHR(device, local_pCreateInfo->ptr(), pAllocator, pSwapchain);
- delete local_pCreateInfo;
-
- if (VK_SUCCESS == result) {
- *pSwapchain = layer_data->WrapNew(*pSwapchain);
- }
- return result;
-}
-
-VkResult DispatchCreateSharedSwapchainsKHR(VkDevice device, uint32_t swapchainCount, const VkSwapchainCreateInfoKHR *pCreateInfos,
- const VkAllocationCallbacks *pAllocator, VkSwapchainKHR *pSwapchains) {
- auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
- if (!wrap_handles)
- return layer_data->device_dispatch_table.CreateSharedSwapchainsKHR(device, swapchainCount, pCreateInfos, pAllocator,
- pSwapchains);
- safe_VkSwapchainCreateInfoKHR *local_pCreateInfos = NULL;
- {
- if (pCreateInfos) {
- local_pCreateInfos = new safe_VkSwapchainCreateInfoKHR[swapchainCount];
- for (uint32_t i = 0; i < swapchainCount; ++i) {
- local_pCreateInfos[i].initialize(&pCreateInfos[i]);
- if (pCreateInfos[i].surface) {
- // Surface is instance-level object
- local_pCreateInfos[i].surface = layer_data->Unwrap(pCreateInfos[i].surface);
- }
- if (pCreateInfos[i].oldSwapchain) {
- local_pCreateInfos[i].oldSwapchain = layer_data->Unwrap(pCreateInfos[i].oldSwapchain);
- }
- }
- }
- }
- VkResult result = layer_data->device_dispatch_table.CreateSharedSwapchainsKHR(device, swapchainCount, local_pCreateInfos->ptr(),
- pAllocator, pSwapchains);
- delete[] local_pCreateInfos;
- if (VK_SUCCESS == result) {
- for (uint32_t i = 0; i < swapchainCount; i++) {
- pSwapchains[i] = layer_data->WrapNew(pSwapchains[i]);
- }
- }
- return result;
-}
-
-VkResult DispatchGetSwapchainImagesKHR(VkDevice device, VkSwapchainKHR swapchain, uint32_t *pSwapchainImageCount,
- VkImage *pSwapchainImages) {
- auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
- if (!wrap_handles)
- return layer_data->device_dispatch_table.GetSwapchainImagesKHR(device, swapchain, pSwapchainImageCount, pSwapchainImages);
- VkSwapchainKHR wrapped_swapchain_handle = swapchain;
- if (VK_NULL_HANDLE != swapchain) {
- swapchain = layer_data->Unwrap(swapchain);
- }
- VkResult result =
- layer_data->device_dispatch_table.GetSwapchainImagesKHR(device, swapchain, pSwapchainImageCount, pSwapchainImages);
- if ((VK_SUCCESS == result) || (VK_INCOMPLETE == result)) {
- if ((*pSwapchainImageCount > 0) && pSwapchainImages) {
- write_dispatch_lock_guard_t lock(dispatch_lock);
- auto &wrapped_swapchain_image_handles = layer_data->swapchain_wrapped_image_handle_map[wrapped_swapchain_handle];
- for (uint32_t i = static_cast<uint32_t>(wrapped_swapchain_image_handles.size()); i < *pSwapchainImageCount; i++) {
- wrapped_swapchain_image_handles.emplace_back(layer_data->WrapNew(pSwapchainImages[i]));
- }
- for (uint32_t i = 0; i < *pSwapchainImageCount; i++) {
- pSwapchainImages[i] = wrapped_swapchain_image_handles[i];
- }
- }
- }
- return result;
-}
-
-void DispatchDestroySwapchainKHR(VkDevice device, VkSwapchainKHR swapchain, const VkAllocationCallbacks *pAllocator) {
- auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
- if (!wrap_handles) return layer_data->device_dispatch_table.DestroySwapchainKHR(device, swapchain, pAllocator);
- write_dispatch_lock_guard_t lock(dispatch_lock);
-
- auto &image_array = layer_data->swapchain_wrapped_image_handle_map[swapchain];
- for (auto &image_handle : image_array) {
- unique_id_mapping.erase(HandleToUint64(image_handle));
- }
- layer_data->swapchain_wrapped_image_handle_map.erase(swapchain);
- lock.unlock();
-
- uint64_t swapchain_id = HandleToUint64(swapchain);
-
- auto iter = unique_id_mapping.pop(swapchain_id);
- if (iter != unique_id_mapping.end()) {
- swapchain = (VkSwapchainKHR)iter->second;
- } else {
- swapchain = (VkSwapchainKHR)0;
- }
-
- layer_data->device_dispatch_table.DestroySwapchainKHR(device, swapchain, pAllocator);
-}
-
-VkResult DispatchQueuePresentKHR(VkQueue queue, const VkPresentInfoKHR *pPresentInfo) {
- auto layer_data = GetLayerDataPtr(get_dispatch_key(queue), layer_data_map);
- if (!wrap_handles) return layer_data->device_dispatch_table.QueuePresentKHR(queue, pPresentInfo);
- safe_VkPresentInfoKHR *local_pPresentInfo = NULL;
- {
- if (pPresentInfo) {
- local_pPresentInfo = new safe_VkPresentInfoKHR(pPresentInfo);
- if (local_pPresentInfo->pWaitSemaphores) {
- for (uint32_t index1 = 0; index1 < local_pPresentInfo->waitSemaphoreCount; ++index1) {
- local_pPresentInfo->pWaitSemaphores[index1] = layer_data->Unwrap(pPresentInfo->pWaitSemaphores[index1]);
- }
- }
- if (local_pPresentInfo->pSwapchains) {
- for (uint32_t index1 = 0; index1 < local_pPresentInfo->swapchainCount; ++index1) {
- local_pPresentInfo->pSwapchains[index1] = layer_data->Unwrap(pPresentInfo->pSwapchains[index1]);
- }
- }
- }
- }
- VkResult result = layer_data->device_dispatch_table.QueuePresentKHR(queue, local_pPresentInfo->ptr());
-
- // pResults is an output array embedded in a structure. The code generator neglects to copy back from the safe_* version,
- // so handle it as a special case here:
- if (pPresentInfo && pPresentInfo->pResults) {
- for (uint32_t i = 0; i < pPresentInfo->swapchainCount; i++) {
- pPresentInfo->pResults[i] = local_pPresentInfo->pResults[i];
- }
- }
- delete local_pPresentInfo;
- return result;
-}
-
-void DispatchDestroyDescriptorPool(VkDevice device, VkDescriptorPool descriptorPool, const VkAllocationCallbacks *pAllocator) {
- auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
- if (!wrap_handles) return layer_data->device_dispatch_table.DestroyDescriptorPool(device, descriptorPool, pAllocator);
- write_dispatch_lock_guard_t lock(dispatch_lock);
-
- // remove references to implicitly freed descriptor sets
- for(auto descriptor_set : layer_data->pool_descriptor_sets_map[descriptorPool]) {
- unique_id_mapping.erase(reinterpret_cast<uint64_t &>(descriptor_set));
- }
- layer_data->pool_descriptor_sets_map.erase(descriptorPool);
- lock.unlock();
-
- uint64_t descriptorPool_id = reinterpret_cast<uint64_t &>(descriptorPool);
-
- auto iter = unique_id_mapping.pop(descriptorPool_id);
- if (iter != unique_id_mapping.end()) {
- descriptorPool = (VkDescriptorPool)iter->second;
- } else {
- descriptorPool = (VkDescriptorPool)0;
- }
-
- layer_data->device_dispatch_table.DestroyDescriptorPool(device, descriptorPool, pAllocator);
-}
-
-VkResult DispatchResetDescriptorPool(VkDevice device, VkDescriptorPool descriptorPool, VkDescriptorPoolResetFlags flags) {
- auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
- if (!wrap_handles) return layer_data->device_dispatch_table.ResetDescriptorPool(device, descriptorPool, flags);
- VkDescriptorPool local_descriptor_pool = VK_NULL_HANDLE;
- {
- local_descriptor_pool = layer_data->Unwrap(descriptorPool);
- }
- VkResult result = layer_data->device_dispatch_table.ResetDescriptorPool(device, local_descriptor_pool, flags);
- if (VK_SUCCESS == result) {
- write_dispatch_lock_guard_t lock(dispatch_lock);
- // remove references to implicitly freed descriptor sets
- for(auto descriptor_set : layer_data->pool_descriptor_sets_map[descriptorPool]) {
- unique_id_mapping.erase(reinterpret_cast<uint64_t &>(descriptor_set));
- }
- layer_data->pool_descriptor_sets_map[descriptorPool].clear();
- }
-
- return result;
-}
-
-VkResult DispatchAllocateDescriptorSets(VkDevice device, const VkDescriptorSetAllocateInfo *pAllocateInfo,
- VkDescriptorSet *pDescriptorSets) {
- auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
- if (!wrap_handles) return layer_data->device_dispatch_table.AllocateDescriptorSets(device, pAllocateInfo, pDescriptorSets);
- safe_VkDescriptorSetAllocateInfo *local_pAllocateInfo = NULL;
- {
- if (pAllocateInfo) {
- local_pAllocateInfo = new safe_VkDescriptorSetAllocateInfo(pAllocateInfo);
- if (pAllocateInfo->descriptorPool) {
- local_pAllocateInfo->descriptorPool = layer_data->Unwrap(pAllocateInfo->descriptorPool);
- }
- if (local_pAllocateInfo->pSetLayouts) {
- for (uint32_t index1 = 0; index1 < local_pAllocateInfo->descriptorSetCount; ++index1) {
- local_pAllocateInfo->pSetLayouts[index1] = layer_data->Unwrap(local_pAllocateInfo->pSetLayouts[index1]);
- }
- }
- }
- }
- VkResult result = layer_data->device_dispatch_table.AllocateDescriptorSets(
- device, (const VkDescriptorSetAllocateInfo *)local_pAllocateInfo, pDescriptorSets);
- if (local_pAllocateInfo) {
- delete local_pAllocateInfo;
- }
- if (VK_SUCCESS == result) {
- write_dispatch_lock_guard_t lock(dispatch_lock);
- auto &pool_descriptor_sets = layer_data->pool_descriptor_sets_map[pAllocateInfo->descriptorPool];
- for (uint32_t index0 = 0; index0 < pAllocateInfo->descriptorSetCount; index0++) {
- pDescriptorSets[index0] = layer_data->WrapNew(pDescriptorSets[index0]);
- pool_descriptor_sets.insert(pDescriptorSets[index0]);
- }
- }
- return result;
-}
-
-VkResult DispatchFreeDescriptorSets(VkDevice device, VkDescriptorPool descriptorPool, uint32_t descriptorSetCount,
- const VkDescriptorSet *pDescriptorSets) {
- auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
- if (!wrap_handles)
- return layer_data->device_dispatch_table.FreeDescriptorSets(device, descriptorPool, descriptorSetCount, pDescriptorSets);
- VkDescriptorSet *local_pDescriptorSets = NULL;
- VkDescriptorPool local_descriptor_pool = VK_NULL_HANDLE;
- {
- local_descriptor_pool = layer_data->Unwrap(descriptorPool);
- if (pDescriptorSets) {
- local_pDescriptorSets = new VkDescriptorSet[descriptorSetCount];
- for (uint32_t index0 = 0; index0 < descriptorSetCount; ++index0) {
- local_pDescriptorSets[index0] = layer_data->Unwrap(pDescriptorSets[index0]);
- }
- }
- }
- VkResult result = layer_data->device_dispatch_table.FreeDescriptorSets(device, local_descriptor_pool, descriptorSetCount,
- (const VkDescriptorSet *)local_pDescriptorSets);
- if (local_pDescriptorSets) delete[] local_pDescriptorSets;
- if ((VK_SUCCESS == result) && (pDescriptorSets)) {
- write_dispatch_lock_guard_t lock(dispatch_lock);
- auto &pool_descriptor_sets = layer_data->pool_descriptor_sets_map[descriptorPool];
- for (uint32_t index0 = 0; index0 < descriptorSetCount; index0++) {
- VkDescriptorSet handle = pDescriptorSets[index0];
- pool_descriptor_sets.erase(handle);
- uint64_t unique_id = reinterpret_cast<uint64_t &>(handle);
- unique_id_mapping.erase(unique_id);
- }
- }
- return result;
-}
-
-// This is the core version of this routine. The extension version is below.
-VkResult DispatchCreateDescriptorUpdateTemplate(VkDevice device, const VkDescriptorUpdateTemplateCreateInfoKHR *pCreateInfo,
- const VkAllocationCallbacks *pAllocator,
- VkDescriptorUpdateTemplateKHR *pDescriptorUpdateTemplate) {
- auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
- if (!wrap_handles)
- return layer_data->device_dispatch_table.CreateDescriptorUpdateTemplate(device, pCreateInfo, pAllocator,
- pDescriptorUpdateTemplate);
- safe_VkDescriptorUpdateTemplateCreateInfo *local_create_info = NULL;
- {
- if (pCreateInfo) {
- local_create_info = new safe_VkDescriptorUpdateTemplateCreateInfo(pCreateInfo);
- if (pCreateInfo->descriptorSetLayout) {
- local_create_info->descriptorSetLayout = layer_data->Unwrap(pCreateInfo->descriptorSetLayout);
- }
- if (pCreateInfo->pipelineLayout) {
- local_create_info->pipelineLayout = layer_data->Unwrap(pCreateInfo->pipelineLayout);
- }
- }
- }
- VkResult result = layer_data->device_dispatch_table.CreateDescriptorUpdateTemplate(device, local_create_info->ptr(), pAllocator,
- pDescriptorUpdateTemplate);
- if (VK_SUCCESS == result) {
- write_dispatch_lock_guard_t lock(dispatch_lock);
- *pDescriptorUpdateTemplate = layer_data->WrapNew(*pDescriptorUpdateTemplate);
-
- // Shadow template createInfo for later updates
- std::unique_ptr<TEMPLATE_STATE> template_state(new TEMPLATE_STATE(*pDescriptorUpdateTemplate, local_create_info));
- layer_data->desc_template_createinfo_map[(uint64_t)*pDescriptorUpdateTemplate] = std::move(template_state);
- }
- return result;
-}
-
-// This is the extension version of this routine. The core version is above.
-VkResult DispatchCreateDescriptorUpdateTemplateKHR(VkDevice device, const VkDescriptorUpdateTemplateCreateInfoKHR *pCreateInfo,
- const VkAllocationCallbacks *pAllocator,
- VkDescriptorUpdateTemplateKHR *pDescriptorUpdateTemplate) {
- auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
- if (!wrap_handles)
- return layer_data->device_dispatch_table.CreateDescriptorUpdateTemplateKHR(device, pCreateInfo, pAllocator,
- pDescriptorUpdateTemplate);
- safe_VkDescriptorUpdateTemplateCreateInfo *local_create_info = NULL;
- {
- if (pCreateInfo) {
- local_create_info = new safe_VkDescriptorUpdateTemplateCreateInfo(pCreateInfo);
- if (pCreateInfo->descriptorSetLayout) {
- local_create_info->descriptorSetLayout = layer_data->Unwrap(pCreateInfo->descriptorSetLayout);
- }
- if (pCreateInfo->pipelineLayout) {
- local_create_info->pipelineLayout = layer_data->Unwrap(pCreateInfo->pipelineLayout);
- }
- }
- }
- VkResult result = layer_data->device_dispatch_table.CreateDescriptorUpdateTemplateKHR(device, local_create_info->ptr(), pAllocator,
- pDescriptorUpdateTemplate);
- if (VK_SUCCESS == result) {
- write_dispatch_lock_guard_t lock(dispatch_lock);
- *pDescriptorUpdateTemplate = layer_data->WrapNew(*pDescriptorUpdateTemplate);
-
- // Shadow template createInfo for later updates
- std::unique_ptr<TEMPLATE_STATE> template_state(new TEMPLATE_STATE(*pDescriptorUpdateTemplate, local_create_info));
- layer_data->desc_template_createinfo_map[(uint64_t)*pDescriptorUpdateTemplate] = std::move(template_state);
- }
- return result;
-}
-
-// This is the core version of this routine. The extension version is below.
-void DispatchDestroyDescriptorUpdateTemplate(VkDevice device, VkDescriptorUpdateTemplateKHR descriptorUpdateTemplate,
- const VkAllocationCallbacks *pAllocator) {
- auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
- if (!wrap_handles)
- return layer_data->device_dispatch_table.DestroyDescriptorUpdateTemplate(device, descriptorUpdateTemplate, pAllocator);
- write_dispatch_lock_guard_t lock(dispatch_lock);
- uint64_t descriptor_update_template_id = reinterpret_cast<uint64_t &>(descriptorUpdateTemplate);
- layer_data->desc_template_createinfo_map.erase(descriptor_update_template_id);
- lock.unlock();
-
- auto iter = unique_id_mapping.pop(descriptor_update_template_id);
- if (iter != unique_id_mapping.end()) {
- descriptorUpdateTemplate = (VkDescriptorUpdateTemplate)iter->second;
- } else {
- descriptorUpdateTemplate = (VkDescriptorUpdateTemplate)0;
- }
-
- layer_data->device_dispatch_table.DestroyDescriptorUpdateTemplate(device, descriptorUpdateTemplate, pAllocator);
-}
-
-// This is the extension version of this routine. The core version is above.
-void DispatchDestroyDescriptorUpdateTemplateKHR(VkDevice device, VkDescriptorUpdateTemplateKHR descriptorUpdateTemplate,
- const VkAllocationCallbacks *pAllocator) {
- auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
- if (!wrap_handles)
- return layer_data->device_dispatch_table.DestroyDescriptorUpdateTemplateKHR(device, descriptorUpdateTemplate, pAllocator);
- write_dispatch_lock_guard_t lock(dispatch_lock);
- uint64_t descriptor_update_template_id = reinterpret_cast<uint64_t &>(descriptorUpdateTemplate);
- layer_data->desc_template_createinfo_map.erase(descriptor_update_template_id);
- lock.unlock();
-
- auto iter = unique_id_mapping.pop(descriptor_update_template_id);
- if (iter != unique_id_mapping.end()) {
- descriptorUpdateTemplate = (VkDescriptorUpdateTemplate)iter->second;
- } else {
- descriptorUpdateTemplate = (VkDescriptorUpdateTemplate)0;
- }
-
- layer_data->device_dispatch_table.DestroyDescriptorUpdateTemplateKHR(device, descriptorUpdateTemplate, pAllocator);
-}
-
-void *BuildUnwrappedUpdateTemplateBuffer(ValidationObject *layer_data, uint64_t descriptorUpdateTemplate, const void *pData) {
- auto const template_map_entry = layer_data->desc_template_createinfo_map.find(descriptorUpdateTemplate);
- if (template_map_entry == layer_data->desc_template_createinfo_map.end()) {
- assert(0);
- }
- auto const &create_info = template_map_entry->second->create_info;
- size_t allocation_size = 0;
- std::vector<std::tuple<size_t, VulkanObjectType, uint64_t, size_t>> template_entries;
-
- for (uint32_t i = 0; i < create_info.descriptorUpdateEntryCount; i++) {
- for (uint32_t j = 0; j < create_info.pDescriptorUpdateEntries[i].descriptorCount; j++) {
- size_t offset = create_info.pDescriptorUpdateEntries[i].offset + j * create_info.pDescriptorUpdateEntries[i].stride;
- char *update_entry = (char *)(pData) + offset;
-
- switch (create_info.pDescriptorUpdateEntries[i].descriptorType) {
- case VK_DESCRIPTOR_TYPE_SAMPLER:
- case VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER:
- case VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE:
- case VK_DESCRIPTOR_TYPE_STORAGE_IMAGE:
- case VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT: {
- auto image_entry = reinterpret_cast<VkDescriptorImageInfo *>(update_entry);
- allocation_size = std::max(allocation_size, offset + sizeof(VkDescriptorImageInfo));
-
- VkDescriptorImageInfo *wrapped_entry = new VkDescriptorImageInfo(*image_entry);
- wrapped_entry->sampler = layer_data->Unwrap(image_entry->sampler);
- wrapped_entry->imageView = layer_data->Unwrap(image_entry->imageView);
- template_entries.emplace_back(offset, kVulkanObjectTypeImage, CastToUint64(wrapped_entry), 0);
- } break;
-
- case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER:
- case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER:
- case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC:
- case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC: {
- auto buffer_entry = reinterpret_cast<VkDescriptorBufferInfo *>(update_entry);
- allocation_size = std::max(allocation_size, offset + sizeof(VkDescriptorBufferInfo));
-
- VkDescriptorBufferInfo *wrapped_entry = new VkDescriptorBufferInfo(*buffer_entry);
- wrapped_entry->buffer = layer_data->Unwrap(buffer_entry->buffer);
- template_entries.emplace_back(offset, kVulkanObjectTypeBuffer, CastToUint64(wrapped_entry), 0);
- } break;
-
- case VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER:
- case VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER: {
- auto buffer_view_handle = reinterpret_cast<VkBufferView *>(update_entry);
- allocation_size = std::max(allocation_size, offset + sizeof(VkBufferView));
-
- VkBufferView wrapped_entry = layer_data->Unwrap(*buffer_view_handle);
- template_entries.emplace_back(offset, kVulkanObjectTypeBufferView, CastToUint64(wrapped_entry), 0);
- } break;
- case VK_DESCRIPTOR_TYPE_INLINE_UNIFORM_BLOCK_EXT: {
- size_t numBytes = create_info.pDescriptorUpdateEntries[i].descriptorCount;
- allocation_size = std::max(allocation_size, offset + numBytes);
- // nothing to unwrap, just plain data
- template_entries.emplace_back(offset, kVulkanObjectTypeUnknown, CastToUint64(update_entry),
- numBytes);
- // to break out of the loop
- j = create_info.pDescriptorUpdateEntries[i].descriptorCount;
- } break;
- default:
- assert(0);
- break;
- }
- }
- }
- // Allocate required buffer size and populate with source/unwrapped data
- void *unwrapped_data = malloc(allocation_size);
- for (auto &this_entry : template_entries) {
- VulkanObjectType type = std::get<1>(this_entry);
- void *destination = (char *)unwrapped_data + std::get<0>(this_entry);
- uint64_t source = std::get<2>(this_entry);
- size_t size = std::get<3>(this_entry);
-
- if (size != 0) {
- assert(type == kVulkanObjectTypeUnknown);
- memcpy(destination, CastFromUint64<void *>(source), size);
- } else {
- switch (type) {
- case kVulkanObjectTypeImage:
- *(reinterpret_cast<VkDescriptorImageInfo *>(destination)) =
- *(reinterpret_cast<VkDescriptorImageInfo *>(source));
- delete CastFromUint64<VkDescriptorImageInfo *>(source);
- break;
- case kVulkanObjectTypeBuffer:
- *(reinterpret_cast<VkDescriptorBufferInfo *>(destination)) =
- *(CastFromUint64<VkDescriptorBufferInfo *>(source));
- delete CastFromUint64<VkDescriptorBufferInfo *>(source);
- break;
- case kVulkanObjectTypeBufferView:
- *(reinterpret_cast<VkBufferView *>(destination)) = CastFromUint64<VkBufferView>(source);
- break;
- default:
- assert(0);
- break;
- }
- }
- }
- return (void *)unwrapped_data;
-}
-
-void DispatchUpdateDescriptorSetWithTemplate(VkDevice device, VkDescriptorSet descriptorSet,
- VkDescriptorUpdateTemplateKHR descriptorUpdateTemplate, const void *pData) {
- auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
- if (!wrap_handles)
- return layer_data->device_dispatch_table.UpdateDescriptorSetWithTemplate(device, descriptorSet, descriptorUpdateTemplate,
- pData);
- uint64_t template_handle = reinterpret_cast<uint64_t &>(descriptorUpdateTemplate);
- void *unwrapped_buffer = nullptr;
- {
- read_dispatch_lock_guard_t lock(dispatch_lock);
- descriptorSet = layer_data->Unwrap(descriptorSet);
- descriptorUpdateTemplate = (VkDescriptorUpdateTemplate)layer_data->Unwrap(descriptorUpdateTemplate);
- unwrapped_buffer = BuildUnwrappedUpdateTemplateBuffer(layer_data, template_handle, pData);
- }
- layer_data->device_dispatch_table.UpdateDescriptorSetWithTemplate(device, descriptorSet, descriptorUpdateTemplate, unwrapped_buffer);
- free(unwrapped_buffer);
-}
-
-void DispatchUpdateDescriptorSetWithTemplateKHR(VkDevice device, VkDescriptorSet descriptorSet,
- VkDescriptorUpdateTemplateKHR descriptorUpdateTemplate, const void *pData) {
- auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
- if (!wrap_handles)
- return layer_data->device_dispatch_table.UpdateDescriptorSetWithTemplateKHR(device, descriptorSet, descriptorUpdateTemplate,
- pData);
- uint64_t template_handle = reinterpret_cast<uint64_t &>(descriptorUpdateTemplate);
- void *unwrapped_buffer = nullptr;
- {
- read_dispatch_lock_guard_t lock(dispatch_lock);
- descriptorSet = layer_data->Unwrap(descriptorSet);
- descriptorUpdateTemplate = layer_data->Unwrap(descriptorUpdateTemplate);
- unwrapped_buffer = BuildUnwrappedUpdateTemplateBuffer(layer_data, template_handle, pData);
- }
- layer_data->device_dispatch_table.UpdateDescriptorSetWithTemplateKHR(device, descriptorSet, descriptorUpdateTemplate, unwrapped_buffer);
- free(unwrapped_buffer);
-}
-
-void DispatchCmdPushDescriptorSetWithTemplateKHR(VkCommandBuffer commandBuffer,
- VkDescriptorUpdateTemplateKHR descriptorUpdateTemplate, VkPipelineLayout layout,
- uint32_t set, const void *pData) {
- auto layer_data = GetLayerDataPtr(get_dispatch_key(commandBuffer), layer_data_map);
- if (!wrap_handles)
- return layer_data->device_dispatch_table.CmdPushDescriptorSetWithTemplateKHR(commandBuffer, descriptorUpdateTemplate,
- layout, set, pData);
- uint64_t template_handle = reinterpret_cast<uint64_t &>(descriptorUpdateTemplate);
- void *unwrapped_buffer = nullptr;
- {
- read_dispatch_lock_guard_t lock(dispatch_lock);
- descriptorUpdateTemplate = layer_data->Unwrap(descriptorUpdateTemplate);
- layout = layer_data->Unwrap(layout);
- unwrapped_buffer = BuildUnwrappedUpdateTemplateBuffer(layer_data, template_handle, pData);
- }
- layer_data->device_dispatch_table.CmdPushDescriptorSetWithTemplateKHR(commandBuffer, descriptorUpdateTemplate, layout, set,
- unwrapped_buffer);
- free(unwrapped_buffer);
-}
-
-VkResult DispatchGetPhysicalDeviceDisplayPropertiesKHR(VkPhysicalDevice physicalDevice, uint32_t *pPropertyCount,
- VkDisplayPropertiesKHR *pProperties) {
- auto layer_data = GetLayerDataPtr(get_dispatch_key(physicalDevice), layer_data_map);
- VkResult result =
- layer_data->instance_dispatch_table.GetPhysicalDeviceDisplayPropertiesKHR(physicalDevice, pPropertyCount, pProperties);
- if (!wrap_handles) return result;
- if ((result == VK_SUCCESS || result == VK_INCOMPLETE) && pProperties) {
- for (uint32_t idx0 = 0; idx0 < *pPropertyCount; ++idx0) {
- pProperties[idx0].display = layer_data->MaybeWrapDisplay(pProperties[idx0].display, layer_data);
- }
- }
- return result;
-}
-
-VkResult DispatchGetPhysicalDeviceDisplayProperties2KHR(VkPhysicalDevice physicalDevice, uint32_t *pPropertyCount,
- VkDisplayProperties2KHR *pProperties) {
- auto layer_data = GetLayerDataPtr(get_dispatch_key(physicalDevice), layer_data_map);
- VkResult result =
- layer_data->instance_dispatch_table.GetPhysicalDeviceDisplayProperties2KHR(physicalDevice, pPropertyCount, pProperties);
- if (!wrap_handles) return result;
- if ((result == VK_SUCCESS || result == VK_INCOMPLETE) && pProperties) {
- for (uint32_t idx0 = 0; idx0 < *pPropertyCount; ++idx0) {
- pProperties[idx0].displayProperties.display =
- layer_data->MaybeWrapDisplay(pProperties[idx0].displayProperties.display, layer_data);
- }
- }
- return result;
-}
-
-VkResult DispatchGetPhysicalDeviceDisplayPlanePropertiesKHR(VkPhysicalDevice physicalDevice, uint32_t *pPropertyCount,
- VkDisplayPlanePropertiesKHR *pProperties) {
- auto layer_data = GetLayerDataPtr(get_dispatch_key(physicalDevice), layer_data_map);
- VkResult result =
- layer_data->instance_dispatch_table.GetPhysicalDeviceDisplayPlanePropertiesKHR(physicalDevice, pPropertyCount, pProperties);
- if (!wrap_handles) return result;
- if ((result == VK_SUCCESS || result == VK_INCOMPLETE) && pProperties) {
- for (uint32_t idx0 = 0; idx0 < *pPropertyCount; ++idx0) {
- VkDisplayKHR &opt_display = pProperties[idx0].currentDisplay;
- if (opt_display) opt_display = layer_data->MaybeWrapDisplay(opt_display, layer_data);
- }
- }
- return result;
-}
-
-VkResult DispatchGetPhysicalDeviceDisplayPlaneProperties2KHR(VkPhysicalDevice physicalDevice, uint32_t *pPropertyCount,
- VkDisplayPlaneProperties2KHR *pProperties) {
- auto layer_data = GetLayerDataPtr(get_dispatch_key(physicalDevice), layer_data_map);
- VkResult result = layer_data->instance_dispatch_table.GetPhysicalDeviceDisplayPlaneProperties2KHR(physicalDevice,
- pPropertyCount, pProperties);
- if (!wrap_handles) return result;
- if ((result == VK_SUCCESS || result == VK_INCOMPLETE) && pProperties) {
- for (uint32_t idx0 = 0; idx0 < *pPropertyCount; ++idx0) {
- VkDisplayKHR &opt_display = pProperties[idx0].displayPlaneProperties.currentDisplay;
- if (opt_display) opt_display = layer_data->MaybeWrapDisplay(opt_display, layer_data);
- }
- }
- return result;
-}
-
-VkResult DispatchGetDisplayPlaneSupportedDisplaysKHR(VkPhysicalDevice physicalDevice, uint32_t planeIndex, uint32_t *pDisplayCount,
- VkDisplayKHR *pDisplays) {
- auto layer_data = GetLayerDataPtr(get_dispatch_key(physicalDevice), layer_data_map);
- VkResult result = layer_data->instance_dispatch_table.GetDisplayPlaneSupportedDisplaysKHR(physicalDevice, planeIndex,
- pDisplayCount, pDisplays);
- if ((result == VK_SUCCESS || result == VK_INCOMPLETE) && pDisplays) {
- if (!wrap_handles) return result;
- for (uint32_t i = 0; i < *pDisplayCount; ++i) {
- if (pDisplays[i]) pDisplays[i] = layer_data->MaybeWrapDisplay(pDisplays[i], layer_data);
- }
- }
- return result;
-}
-
-VkResult DispatchGetDisplayModePropertiesKHR(VkPhysicalDevice physicalDevice, VkDisplayKHR display, uint32_t *pPropertyCount,
- VkDisplayModePropertiesKHR *pProperties) {
- auto layer_data = GetLayerDataPtr(get_dispatch_key(physicalDevice), layer_data_map);
- if (!wrap_handles)
- return layer_data->instance_dispatch_table.GetDisplayModePropertiesKHR(physicalDevice, display, pPropertyCount,
- pProperties);
- {
- display = layer_data->Unwrap(display);
- }
-
- VkResult result = layer_data->instance_dispatch_table.GetDisplayModePropertiesKHR(physicalDevice, display, pPropertyCount, pProperties);
- if ((result == VK_SUCCESS || result == VK_INCOMPLETE) && pProperties) {
- for (uint32_t idx0 = 0; idx0 < *pPropertyCount; ++idx0) {
- pProperties[idx0].displayMode = layer_data->WrapNew(pProperties[idx0].displayMode);
- }
- }
- return result;
-}
-
-VkResult DispatchGetDisplayModeProperties2KHR(VkPhysicalDevice physicalDevice, VkDisplayKHR display, uint32_t *pPropertyCount,
- VkDisplayModeProperties2KHR *pProperties) {
- auto layer_data = GetLayerDataPtr(get_dispatch_key(physicalDevice), layer_data_map);
- if (!wrap_handles)
- return layer_data->instance_dispatch_table.GetDisplayModeProperties2KHR(physicalDevice, display, pPropertyCount,
- pProperties);
- {
- display = layer_data->Unwrap(display);
- }
-
- VkResult result =
- layer_data->instance_dispatch_table.GetDisplayModeProperties2KHR(physicalDevice, display, pPropertyCount, pProperties);
- if ((result == VK_SUCCESS || result == VK_INCOMPLETE) && pProperties) {
- for (uint32_t idx0 = 0; idx0 < *pPropertyCount; ++idx0) {
- pProperties[idx0].displayModeProperties.displayMode = layer_data->WrapNew(pProperties[idx0].displayModeProperties.displayMode);
- }
- }
- return result;
-}
-
-VkResult DispatchDebugMarkerSetObjectTagEXT(VkDevice device, const VkDebugMarkerObjectTagInfoEXT *pTagInfo) {
- auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
- if (!wrap_handles) return layer_data->device_dispatch_table.DebugMarkerSetObjectTagEXT(device, pTagInfo);
- safe_VkDebugMarkerObjectTagInfoEXT local_tag_info(pTagInfo);
- {
- auto it = unique_id_mapping.find(reinterpret_cast<uint64_t &>(local_tag_info.object));
- if (it != unique_id_mapping.end()) {
- local_tag_info.object = it->second;
- }
- }
- VkResult result = layer_data->device_dispatch_table.DebugMarkerSetObjectTagEXT(device,
- reinterpret_cast<VkDebugMarkerObjectTagInfoEXT *>(&local_tag_info));
- return result;
-}
-
-VkResult DispatchDebugMarkerSetObjectNameEXT(VkDevice device, const VkDebugMarkerObjectNameInfoEXT *pNameInfo) {
- auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
- if (!wrap_handles) return layer_data->device_dispatch_table.DebugMarkerSetObjectNameEXT(device, pNameInfo);
- safe_VkDebugMarkerObjectNameInfoEXT local_name_info(pNameInfo);
- {
- auto it = unique_id_mapping.find(reinterpret_cast<uint64_t &>(local_name_info.object));
- if (it != unique_id_mapping.end()) {
- local_name_info.object = it->second;
- }
- }
- VkResult result = layer_data->device_dispatch_table.DebugMarkerSetObjectNameEXT(
- device, reinterpret_cast<VkDebugMarkerObjectNameInfoEXT *>(&local_name_info));
- return result;
-}
-
-// VK_EXT_debug_utils
-VkResult DispatchSetDebugUtilsObjectTagEXT(VkDevice device, const VkDebugUtilsObjectTagInfoEXT *pTagInfo) {
- auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
- if (!wrap_handles) return layer_data->device_dispatch_table.SetDebugUtilsObjectTagEXT(device, pTagInfo);
- safe_VkDebugUtilsObjectTagInfoEXT local_tag_info(pTagInfo);
- {
- auto it = unique_id_mapping.find(reinterpret_cast<uint64_t &>(local_tag_info.objectHandle));
- if (it != unique_id_mapping.end()) {
- local_tag_info.objectHandle = it->second;
- }
- }
- VkResult result = layer_data->device_dispatch_table.SetDebugUtilsObjectTagEXT(
- device, reinterpret_cast<const VkDebugUtilsObjectTagInfoEXT *>(&local_tag_info));
- return result;
-}
-
-VkResult DispatchSetDebugUtilsObjectNameEXT(VkDevice device, const VkDebugUtilsObjectNameInfoEXT *pNameInfo) {
- auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
- if (!wrap_handles) return layer_data->device_dispatch_table.SetDebugUtilsObjectNameEXT(device, pNameInfo);
- safe_VkDebugUtilsObjectNameInfoEXT local_name_info(pNameInfo);
- {
- auto it = unique_id_mapping.find(reinterpret_cast<uint64_t &>(local_name_info.objectHandle));
- if (it != unique_id_mapping.end()) {
- local_name_info.objectHandle = it->second;
- }
- }
- VkResult result = layer_data->device_dispatch_table.SetDebugUtilsObjectNameEXT(
- device, reinterpret_cast<const VkDebugUtilsObjectNameInfoEXT *>(&local_name_info));
- return result;
-}
-
-
-
-
-// Skip vkCreateInstance dispatch, manually generated
-
-// Skip vkDestroyInstance dispatch, manually generated
-
-VkResult DispatchEnumeratePhysicalDevices(
- VkInstance instance,
- uint32_t* pPhysicalDeviceCount,
- VkPhysicalDevice* pPhysicalDevices)
-{
- auto layer_data = GetLayerDataPtr(get_dispatch_key(instance), layer_data_map);
- VkResult result = layer_data->instance_dispatch_table.EnumeratePhysicalDevices(instance, pPhysicalDeviceCount, pPhysicalDevices);
-
- return result;
-}
-
-void DispatchGetPhysicalDeviceFeatures(
- VkPhysicalDevice physicalDevice,
- VkPhysicalDeviceFeatures* pFeatures)
-{
- auto layer_data = GetLayerDataPtr(get_dispatch_key(physicalDevice), layer_data_map);
- layer_data->instance_dispatch_table.GetPhysicalDeviceFeatures(physicalDevice, pFeatures);
-
-}
-
-void DispatchGetPhysicalDeviceFormatProperties(
- VkPhysicalDevice physicalDevice,
- VkFormat format,
- VkFormatProperties* pFormatProperties)
-{
- auto layer_data = GetLayerDataPtr(get_dispatch_key(physicalDevice), layer_data_map);
- layer_data->instance_dispatch_table.GetPhysicalDeviceFormatProperties(physicalDevice, format, pFormatProperties);
-
-}
-
-VkResult DispatchGetPhysicalDeviceImageFormatProperties(
- VkPhysicalDevice physicalDevice,
- VkFormat format,
- VkImageType type,
- VkImageTiling tiling,
- VkImageUsageFlags usage,
- VkImageCreateFlags flags,
- VkImageFormatProperties* pImageFormatProperties)
-{
- auto layer_data = GetLayerDataPtr(get_dispatch_key(physicalDevice), layer_data_map);
- VkResult result = layer_data->instance_dispatch_table.GetPhysicalDeviceImageFormatProperties(physicalDevice, format, type, tiling, usage, flags, pImageFormatProperties);
-
- return result;
-}
-
-void DispatchGetPhysicalDeviceProperties(
- VkPhysicalDevice physicalDevice,
- VkPhysicalDeviceProperties* pProperties)
-{
- auto layer_data = GetLayerDataPtr(get_dispatch_key(physicalDevice), layer_data_map);
- layer_data->instance_dispatch_table.GetPhysicalDeviceProperties(physicalDevice, pProperties);
-
-}
-
-void DispatchGetPhysicalDeviceQueueFamilyProperties(
- VkPhysicalDevice physicalDevice,
- uint32_t* pQueueFamilyPropertyCount,
- VkQueueFamilyProperties* pQueueFamilyProperties)
-{
- auto layer_data = GetLayerDataPtr(get_dispatch_key(physicalDevice), layer_data_map);
- layer_data->instance_dispatch_table.GetPhysicalDeviceQueueFamilyProperties(physicalDevice, pQueueFamilyPropertyCount, pQueueFamilyProperties);
-
-}
-
-void DispatchGetPhysicalDeviceMemoryProperties(
- VkPhysicalDevice physicalDevice,
- VkPhysicalDeviceMemoryProperties* pMemoryProperties)
-{
- auto layer_data = GetLayerDataPtr(get_dispatch_key(physicalDevice), layer_data_map);
- layer_data->instance_dispatch_table.GetPhysicalDeviceMemoryProperties(physicalDevice, pMemoryProperties);
-
-}
-
-PFN_vkVoidFunction DispatchGetInstanceProcAddr(
- VkInstance instance,
- const char* pName)
-{
- auto layer_data = GetLayerDataPtr(get_dispatch_key(instance), layer_data_map);
- PFN_vkVoidFunction result = layer_data->instance_dispatch_table.GetInstanceProcAddr(instance, pName);
-
- return result;
-}
-
-PFN_vkVoidFunction DispatchGetDeviceProcAddr(
- VkDevice device,
- const char* pName)
-{
- auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
- PFN_vkVoidFunction result = layer_data->device_dispatch_table.GetDeviceProcAddr(device, pName);
-
- return result;
-}
-
-// Skip vkCreateDevice dispatch, manually generated
-
-// Skip vkDestroyDevice dispatch, manually generated
-
-// Skip vkEnumerateInstanceExtensionProperties dispatch, manually generated
-
-// Skip vkEnumerateDeviceExtensionProperties dispatch, manually generated
-
-// Skip vkEnumerateInstanceLayerProperties dispatch, manually generated
-
-// Skip vkEnumerateDeviceLayerProperties dispatch, manually generated
-
-void DispatchGetDeviceQueue(
- VkDevice device,
- uint32_t queueFamilyIndex,
- uint32_t queueIndex,
- VkQueue* pQueue)
-{
- auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
- layer_data->device_dispatch_table.GetDeviceQueue(device, queueFamilyIndex, queueIndex, pQueue);
-
-}
-
-VkResult DispatchQueueSubmit(
- VkQueue queue,
- uint32_t submitCount,
- const VkSubmitInfo* pSubmits,
- VkFence fence)
-{
- auto layer_data = GetLayerDataPtr(get_dispatch_key(queue), layer_data_map);
- if (!wrap_handles) return layer_data->device_dispatch_table.QueueSubmit(queue, submitCount, pSubmits, fence);
- safe_VkSubmitInfo *local_pSubmits = NULL;
- {
- if (pSubmits) {
- local_pSubmits = new safe_VkSubmitInfo[submitCount];
- for (uint32_t index0 = 0; index0 < submitCount; ++index0) {
- local_pSubmits[index0].initialize(&pSubmits[index0]);
- WrapPnextChainHandles(layer_data, local_pSubmits[index0].pNext);
- if (local_pSubmits[index0].pWaitSemaphores) {
- for (uint32_t index1 = 0; index1 < local_pSubmits[index0].waitSemaphoreCount; ++index1) {
- local_pSubmits[index0].pWaitSemaphores[index1] = layer_data->Unwrap(local_pSubmits[index0].pWaitSemaphores[index1]);
- }
- }
- if (local_pSubmits[index0].pSignalSemaphores) {
- for (uint32_t index1 = 0; index1 < local_pSubmits[index0].signalSemaphoreCount; ++index1) {
- local_pSubmits[index0].pSignalSemaphores[index1] = layer_data->Unwrap(local_pSubmits[index0].pSignalSemaphores[index1]);
- }
- }
- }
- }
- fence = layer_data->Unwrap(fence);
- }
- VkResult result = layer_data->device_dispatch_table.QueueSubmit(queue, submitCount, (const VkSubmitInfo*)local_pSubmits, fence);
- if (local_pSubmits) {
- delete[] local_pSubmits;
- }
- return result;
-}
-
-VkResult DispatchQueueWaitIdle(
- VkQueue queue)
-{
- auto layer_data = GetLayerDataPtr(get_dispatch_key(queue), layer_data_map);
- VkResult result = layer_data->device_dispatch_table.QueueWaitIdle(queue);
-
- return result;
-}
-
-VkResult DispatchDeviceWaitIdle(
- VkDevice device)
-{
- auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
- VkResult result = layer_data->device_dispatch_table.DeviceWaitIdle(device);
-
- return result;
-}
-
-VkResult DispatchAllocateMemory(
- VkDevice device,
- const VkMemoryAllocateInfo* pAllocateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkDeviceMemory* pMemory)
-{
- auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
- if (!wrap_handles) return layer_data->device_dispatch_table.AllocateMemory(device, pAllocateInfo, pAllocator, pMemory);
- safe_VkMemoryAllocateInfo *local_pAllocateInfo = NULL;
- {
- if (pAllocateInfo) {
- local_pAllocateInfo = new safe_VkMemoryAllocateInfo(pAllocateInfo);
- WrapPnextChainHandles(layer_data, local_pAllocateInfo->pNext);
- }
- }
- VkResult result = layer_data->device_dispatch_table.AllocateMemory(device, (const VkMemoryAllocateInfo*)local_pAllocateInfo, pAllocator, pMemory);
- if (local_pAllocateInfo) {
- delete local_pAllocateInfo;
- }
- if (VK_SUCCESS == result) {
- *pMemory = layer_data->WrapNew(*pMemory);
- }
- return result;
-}
-
-void DispatchFreeMemory(
- VkDevice device,
- VkDeviceMemory memory,
- const VkAllocationCallbacks* pAllocator)
-{
- auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
- if (!wrap_handles) return layer_data->device_dispatch_table.FreeMemory(device, memory, pAllocator);
- uint64_t memory_id = reinterpret_cast<uint64_t &>(memory);
- auto iter = unique_id_mapping.pop(memory_id);
- if (iter != unique_id_mapping.end()) {
- memory = (VkDeviceMemory)iter->second;
- } else {
- memory = (VkDeviceMemory)0;
- }
- layer_data->device_dispatch_table.FreeMemory(device, memory, pAllocator);
-
-}
-
-VkResult DispatchMapMemory(
- VkDevice device,
- VkDeviceMemory memory,
- VkDeviceSize offset,
- VkDeviceSize size,
- VkMemoryMapFlags flags,
- void** ppData)
-{
- auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
- if (!wrap_handles) return layer_data->device_dispatch_table.MapMemory(device, memory, offset, size, flags, ppData);
- {
- memory = layer_data->Unwrap(memory);
- }
- VkResult result = layer_data->device_dispatch_table.MapMemory(device, memory, offset, size, flags, ppData);
-
- return result;
-}
-
-void DispatchUnmapMemory(
- VkDevice device,
- VkDeviceMemory memory)
-{
- auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
- if (!wrap_handles) return layer_data->device_dispatch_table.UnmapMemory(device, memory);
- {
- memory = layer_data->Unwrap(memory);
- }
- layer_data->device_dispatch_table.UnmapMemory(device, memory);
-
-}
-
-VkResult DispatchFlushMappedMemoryRanges(
- VkDevice device,
- uint32_t memoryRangeCount,
- const VkMappedMemoryRange* pMemoryRanges)
-{
- auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
- if (!wrap_handles) return layer_data->device_dispatch_table.FlushMappedMemoryRanges(device, memoryRangeCount, pMemoryRanges);
- safe_VkMappedMemoryRange *local_pMemoryRanges = NULL;
- {
- if (pMemoryRanges) {
- local_pMemoryRanges = new safe_VkMappedMemoryRange[memoryRangeCount];
- for (uint32_t index0 = 0; index0 < memoryRangeCount; ++index0) {
- local_pMemoryRanges[index0].initialize(&pMemoryRanges[index0]);
- if (pMemoryRanges[index0].memory) {
- local_pMemoryRanges[index0].memory = layer_data->Unwrap(pMemoryRanges[index0].memory);
- }
- }
- }
- }
- VkResult result = layer_data->device_dispatch_table.FlushMappedMemoryRanges(device, memoryRangeCount, (const VkMappedMemoryRange*)local_pMemoryRanges);
- if (local_pMemoryRanges) {
- delete[] local_pMemoryRanges;
- }
- return result;
-}
-
-VkResult DispatchInvalidateMappedMemoryRanges(
- VkDevice device,
- uint32_t memoryRangeCount,
- const VkMappedMemoryRange* pMemoryRanges)
-{
- auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
- if (!wrap_handles) return layer_data->device_dispatch_table.InvalidateMappedMemoryRanges(device, memoryRangeCount, pMemoryRanges);
- safe_VkMappedMemoryRange *local_pMemoryRanges = NULL;
- {
- if (pMemoryRanges) {
- local_pMemoryRanges = new safe_VkMappedMemoryRange[memoryRangeCount];
- for (uint32_t index0 = 0; index0 < memoryRangeCount; ++index0) {
- local_pMemoryRanges[index0].initialize(&pMemoryRanges[index0]);
- if (pMemoryRanges[index0].memory) {
- local_pMemoryRanges[index0].memory = layer_data->Unwrap(pMemoryRanges[index0].memory);
- }
- }
- }
- }
- VkResult result = layer_data->device_dispatch_table.InvalidateMappedMemoryRanges(device, memoryRangeCount, (const VkMappedMemoryRange*)local_pMemoryRanges);
- if (local_pMemoryRanges) {
- delete[] local_pMemoryRanges;
- }
- return result;
-}
-
-void DispatchGetDeviceMemoryCommitment(
- VkDevice device,
- VkDeviceMemory memory,
- VkDeviceSize* pCommittedMemoryInBytes)
-{
- auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
- if (!wrap_handles) return layer_data->device_dispatch_table.GetDeviceMemoryCommitment(device, memory, pCommittedMemoryInBytes);
- {
- memory = layer_data->Unwrap(memory);
- }
- layer_data->device_dispatch_table.GetDeviceMemoryCommitment(device, memory, pCommittedMemoryInBytes);
-
-}
-
-VkResult DispatchBindBufferMemory(
- VkDevice device,
- VkBuffer buffer,
- VkDeviceMemory memory,
- VkDeviceSize memoryOffset)
-{
- auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
- if (!wrap_handles) return layer_data->device_dispatch_table.BindBufferMemory(device, buffer, memory, memoryOffset);
- {
- buffer = layer_data->Unwrap(buffer);
- memory = layer_data->Unwrap(memory);
- }
- VkResult result = layer_data->device_dispatch_table.BindBufferMemory(device, buffer, memory, memoryOffset);
-
- return result;
-}
-
-VkResult DispatchBindImageMemory(
- VkDevice device,
- VkImage image,
- VkDeviceMemory memory,
- VkDeviceSize memoryOffset)
-{
- auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
- if (!wrap_handles) return layer_data->device_dispatch_table.BindImageMemory(device, image, memory, memoryOffset);
- {
- image = layer_data->Unwrap(image);
- memory = layer_data->Unwrap(memory);
- }
- VkResult result = layer_data->device_dispatch_table.BindImageMemory(device, image, memory, memoryOffset);
-
- return result;
-}
-
-void DispatchGetBufferMemoryRequirements(
- VkDevice device,
- VkBuffer buffer,
- VkMemoryRequirements* pMemoryRequirements)
-{
- auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
- if (!wrap_handles) return layer_data->device_dispatch_table.GetBufferMemoryRequirements(device, buffer, pMemoryRequirements);
- {
- buffer = layer_data->Unwrap(buffer);
- }
- layer_data->device_dispatch_table.GetBufferMemoryRequirements(device, buffer, pMemoryRequirements);
-
-}
-
-void DispatchGetImageMemoryRequirements(
- VkDevice device,
- VkImage image,
- VkMemoryRequirements* pMemoryRequirements)
-{
- auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
- if (!wrap_handles) return layer_data->device_dispatch_table.GetImageMemoryRequirements(device, image, pMemoryRequirements);
- {
- image = layer_data->Unwrap(image);
- }
- layer_data->device_dispatch_table.GetImageMemoryRequirements(device, image, pMemoryRequirements);
-
-}
-
-void DispatchGetImageSparseMemoryRequirements(
- VkDevice device,
- VkImage image,
- uint32_t* pSparseMemoryRequirementCount,
- VkSparseImageMemoryRequirements* pSparseMemoryRequirements)
-{
- auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
- if (!wrap_handles) return layer_data->device_dispatch_table.GetImageSparseMemoryRequirements(device, image, pSparseMemoryRequirementCount, pSparseMemoryRequirements);
- {
- image = layer_data->Unwrap(image);
- }
- layer_data->device_dispatch_table.GetImageSparseMemoryRequirements(device, image, pSparseMemoryRequirementCount, pSparseMemoryRequirements);
-
-}
-
-void DispatchGetPhysicalDeviceSparseImageFormatProperties(
- VkPhysicalDevice physicalDevice,
- VkFormat format,
- VkImageType type,
- VkSampleCountFlagBits samples,
- VkImageUsageFlags usage,
- VkImageTiling tiling,
- uint32_t* pPropertyCount,
- VkSparseImageFormatProperties* pProperties)
-{
- auto layer_data = GetLayerDataPtr(get_dispatch_key(physicalDevice), layer_data_map);
- layer_data->instance_dispatch_table.GetPhysicalDeviceSparseImageFormatProperties(physicalDevice, format, type, samples, usage, tiling, pPropertyCount, pProperties);
-
-}
-
-VkResult DispatchQueueBindSparse(
- VkQueue queue,
- uint32_t bindInfoCount,
- const VkBindSparseInfo* pBindInfo,
- VkFence fence)
-{
- auto layer_data = GetLayerDataPtr(get_dispatch_key(queue), layer_data_map);
- if (!wrap_handles) return layer_data->device_dispatch_table.QueueBindSparse(queue, bindInfoCount, pBindInfo, fence);
- safe_VkBindSparseInfo *local_pBindInfo = NULL;
- {
- if (pBindInfo) {
- local_pBindInfo = new safe_VkBindSparseInfo[bindInfoCount];
- for (uint32_t index0 = 0; index0 < bindInfoCount; ++index0) {
- local_pBindInfo[index0].initialize(&pBindInfo[index0]);
- if (local_pBindInfo[index0].pWaitSemaphores) {
- for (uint32_t index1 = 0; index1 < local_pBindInfo[index0].waitSemaphoreCount; ++index1) {
- local_pBindInfo[index0].pWaitSemaphores[index1] = layer_data->Unwrap(local_pBindInfo[index0].pWaitSemaphores[index1]);
- }
- }
- if (local_pBindInfo[index0].pBufferBinds) {
- for (uint32_t index1 = 0; index1 < local_pBindInfo[index0].bufferBindCount; ++index1) {
- if (pBindInfo[index0].pBufferBinds[index1].buffer) {
- local_pBindInfo[index0].pBufferBinds[index1].buffer = layer_data->Unwrap(pBindInfo[index0].pBufferBinds[index1].buffer);
- }
- if (local_pBindInfo[index0].pBufferBinds[index1].pBinds) {
- for (uint32_t index2 = 0; index2 < local_pBindInfo[index0].pBufferBinds[index1].bindCount; ++index2) {
- if (pBindInfo[index0].pBufferBinds[index1].pBinds[index2].memory) {
- local_pBindInfo[index0].pBufferBinds[index1].pBinds[index2].memory = layer_data->Unwrap(pBindInfo[index0].pBufferBinds[index1].pBinds[index2].memory);
- }
- }
- }
- }
- }
- if (local_pBindInfo[index0].pImageOpaqueBinds) {
- for (uint32_t index1 = 0; index1 < local_pBindInfo[index0].imageOpaqueBindCount; ++index1) {
- if (pBindInfo[index0].pImageOpaqueBinds[index1].image) {
- local_pBindInfo[index0].pImageOpaqueBinds[index1].image = layer_data->Unwrap(pBindInfo[index0].pImageOpaqueBinds[index1].image);
- }
- if (local_pBindInfo[index0].pImageOpaqueBinds[index1].pBinds) {
- for (uint32_t index2 = 0; index2 < local_pBindInfo[index0].pImageOpaqueBinds[index1].bindCount; ++index2) {
- if (pBindInfo[index0].pImageOpaqueBinds[index1].pBinds[index2].memory) {
- local_pBindInfo[index0].pImageOpaqueBinds[index1].pBinds[index2].memory = layer_data->Unwrap(pBindInfo[index0].pImageOpaqueBinds[index1].pBinds[index2].memory);
- }
- }
- }
- }
- }
- if (local_pBindInfo[index0].pImageBinds) {
- for (uint32_t index1 = 0; index1 < local_pBindInfo[index0].imageBindCount; ++index1) {
- if (pBindInfo[index0].pImageBinds[index1].image) {
- local_pBindInfo[index0].pImageBinds[index1].image = layer_data->Unwrap(pBindInfo[index0].pImageBinds[index1].image);
- }
- if (local_pBindInfo[index0].pImageBinds[index1].pBinds) {
- for (uint32_t index2 = 0; index2 < local_pBindInfo[index0].pImageBinds[index1].bindCount; ++index2) {
- if (pBindInfo[index0].pImageBinds[index1].pBinds[index2].memory) {
- local_pBindInfo[index0].pImageBinds[index1].pBinds[index2].memory = layer_data->Unwrap(pBindInfo[index0].pImageBinds[index1].pBinds[index2].memory);
- }
- }
- }
- }
- }
- if (local_pBindInfo[index0].pSignalSemaphores) {
- for (uint32_t index1 = 0; index1 < local_pBindInfo[index0].signalSemaphoreCount; ++index1) {
- local_pBindInfo[index0].pSignalSemaphores[index1] = layer_data->Unwrap(local_pBindInfo[index0].pSignalSemaphores[index1]);
- }
- }
- }
- }
- fence = layer_data->Unwrap(fence);
- }
- VkResult result = layer_data->device_dispatch_table.QueueBindSparse(queue, bindInfoCount, (const VkBindSparseInfo*)local_pBindInfo, fence);
- if (local_pBindInfo) {
- delete[] local_pBindInfo;
- }
- return result;
-}
-
-VkResult DispatchCreateFence(
- VkDevice device,
- const VkFenceCreateInfo* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkFence* pFence)
-{
- auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
- if (!wrap_handles) return layer_data->device_dispatch_table.CreateFence(device, pCreateInfo, pAllocator, pFence);
- VkResult result = layer_data->device_dispatch_table.CreateFence(device, pCreateInfo, pAllocator, pFence);
- if (VK_SUCCESS == result) {
- *pFence = layer_data->WrapNew(*pFence);
- }
- return result;
-}
-
-void DispatchDestroyFence(
- VkDevice device,
- VkFence fence,
- const VkAllocationCallbacks* pAllocator)
-{
- auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
- if (!wrap_handles) return layer_data->device_dispatch_table.DestroyFence(device, fence, pAllocator);
- uint64_t fence_id = reinterpret_cast<uint64_t &>(fence);
- auto iter = unique_id_mapping.pop(fence_id);
- if (iter != unique_id_mapping.end()) {
- fence = (VkFence)iter->second;
- } else {
- fence = (VkFence)0;
- }
- layer_data->device_dispatch_table.DestroyFence(device, fence, pAllocator);
-
-}
-
-VkResult DispatchResetFences(
- VkDevice device,
- uint32_t fenceCount,
- const VkFence* pFences)
-{
- auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
- if (!wrap_handles) return layer_data->device_dispatch_table.ResetFences(device, fenceCount, pFences);
- VkFence *local_pFences = NULL;
- {
- if (pFences) {
- local_pFences = new VkFence[fenceCount];
- for (uint32_t index0 = 0; index0 < fenceCount; ++index0) {
- local_pFences[index0] = layer_data->Unwrap(pFences[index0]);
- }
- }
- }
- VkResult result = layer_data->device_dispatch_table.ResetFences(device, fenceCount, (const VkFence*)local_pFences);
- if (local_pFences)
- delete[] local_pFences;
- return result;
-}
-
-VkResult DispatchGetFenceStatus(
- VkDevice device,
- VkFence fence)
-{
- auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
- if (!wrap_handles) return layer_data->device_dispatch_table.GetFenceStatus(device, fence);
- {
- fence = layer_data->Unwrap(fence);
- }
- VkResult result = layer_data->device_dispatch_table.GetFenceStatus(device, fence);
-
- return result;
-}
-
-VkResult DispatchWaitForFences(
- VkDevice device,
- uint32_t fenceCount,
- const VkFence* pFences,
- VkBool32 waitAll,
- uint64_t timeout)
-{
- auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
- if (!wrap_handles) return layer_data->device_dispatch_table.WaitForFences(device, fenceCount, pFences, waitAll, timeout);
- VkFence *local_pFences = NULL;
- {
- if (pFences) {
- local_pFences = new VkFence[fenceCount];
- for (uint32_t index0 = 0; index0 < fenceCount; ++index0) {
- local_pFences[index0] = layer_data->Unwrap(pFences[index0]);
- }
- }
- }
- VkResult result = layer_data->device_dispatch_table.WaitForFences(device, fenceCount, (const VkFence*)local_pFences, waitAll, timeout);
- if (local_pFences)
- delete[] local_pFences;
- return result;
-}
-
-VkResult DispatchCreateSemaphore(
- VkDevice device,
- const VkSemaphoreCreateInfo* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkSemaphore* pSemaphore)
-{
- auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
- if (!wrap_handles) return layer_data->device_dispatch_table.CreateSemaphore(device, pCreateInfo, pAllocator, pSemaphore);
- VkResult result = layer_data->device_dispatch_table.CreateSemaphore(device, pCreateInfo, pAllocator, pSemaphore);
- if (VK_SUCCESS == result) {
- *pSemaphore = layer_data->WrapNew(*pSemaphore);
- }
- return result;
-}
-
-void DispatchDestroySemaphore(
- VkDevice device,
- VkSemaphore semaphore,
- const VkAllocationCallbacks* pAllocator)
-{
- auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
- if (!wrap_handles) return layer_data->device_dispatch_table.DestroySemaphore(device, semaphore, pAllocator);
- uint64_t semaphore_id = reinterpret_cast<uint64_t &>(semaphore);
- auto iter = unique_id_mapping.pop(semaphore_id);
- if (iter != unique_id_mapping.end()) {
- semaphore = (VkSemaphore)iter->second;
- } else {
- semaphore = (VkSemaphore)0;
- }
- layer_data->device_dispatch_table.DestroySemaphore(device, semaphore, pAllocator);
-
-}
-
-VkResult DispatchCreateEvent(
- VkDevice device,
- const VkEventCreateInfo* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkEvent* pEvent)
-{
- auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
- if (!wrap_handles) return layer_data->device_dispatch_table.CreateEvent(device, pCreateInfo, pAllocator, pEvent);
- VkResult result = layer_data->device_dispatch_table.CreateEvent(device, pCreateInfo, pAllocator, pEvent);
- if (VK_SUCCESS == result) {
- *pEvent = layer_data->WrapNew(*pEvent);
- }
- return result;
-}
-
-void DispatchDestroyEvent(
- VkDevice device,
- VkEvent event,
- const VkAllocationCallbacks* pAllocator)
-{
- auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
- if (!wrap_handles) return layer_data->device_dispatch_table.DestroyEvent(device, event, pAllocator);
- uint64_t event_id = reinterpret_cast<uint64_t &>(event);
- auto iter = unique_id_mapping.pop(event_id);
- if (iter != unique_id_mapping.end()) {
- event = (VkEvent)iter->second;
- } else {
- event = (VkEvent)0;
- }
- layer_data->device_dispatch_table.DestroyEvent(device, event, pAllocator);
-
-}
-
-VkResult DispatchGetEventStatus(
- VkDevice device,
- VkEvent event)
-{
- auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
- if (!wrap_handles) return layer_data->device_dispatch_table.GetEventStatus(device, event);
- {
- event = layer_data->Unwrap(event);
- }
- VkResult result = layer_data->device_dispatch_table.GetEventStatus(device, event);
-
- return result;
-}
-
-VkResult DispatchSetEvent(
- VkDevice device,
- VkEvent event)
-{
- auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
- if (!wrap_handles) return layer_data->device_dispatch_table.SetEvent(device, event);
- {
- event = layer_data->Unwrap(event);
- }
- VkResult result = layer_data->device_dispatch_table.SetEvent(device, event);
-
- return result;
-}
-
-VkResult DispatchResetEvent(
- VkDevice device,
- VkEvent event)
-{
- auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
- if (!wrap_handles) return layer_data->device_dispatch_table.ResetEvent(device, event);
- {
- event = layer_data->Unwrap(event);
- }
- VkResult result = layer_data->device_dispatch_table.ResetEvent(device, event);
-
- return result;
-}
-
-VkResult DispatchCreateQueryPool(
- VkDevice device,
- const VkQueryPoolCreateInfo* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkQueryPool* pQueryPool)
-{
- auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
- if (!wrap_handles) return layer_data->device_dispatch_table.CreateQueryPool(device, pCreateInfo, pAllocator, pQueryPool);
- VkResult result = layer_data->device_dispatch_table.CreateQueryPool(device, pCreateInfo, pAllocator, pQueryPool);
- if (VK_SUCCESS == result) {
- *pQueryPool = layer_data->WrapNew(*pQueryPool);
- }
- return result;
-}
-
-void DispatchDestroyQueryPool(
- VkDevice device,
- VkQueryPool queryPool,
- const VkAllocationCallbacks* pAllocator)
-{
- auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
- if (!wrap_handles) return layer_data->device_dispatch_table.DestroyQueryPool(device, queryPool, pAllocator);
- uint64_t queryPool_id = reinterpret_cast<uint64_t &>(queryPool);
- auto iter = unique_id_mapping.pop(queryPool_id);
- if (iter != unique_id_mapping.end()) {
- queryPool = (VkQueryPool)iter->second;
- } else {
- queryPool = (VkQueryPool)0;
- }
- layer_data->device_dispatch_table.DestroyQueryPool(device, queryPool, pAllocator);
-
-}
-
-VkResult DispatchGetQueryPoolResults(
- VkDevice device,
- VkQueryPool queryPool,
- uint32_t firstQuery,
- uint32_t queryCount,
- size_t dataSize,
- void* pData,
- VkDeviceSize stride,
- VkQueryResultFlags flags)
-{
- auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
- if (!wrap_handles) return layer_data->device_dispatch_table.GetQueryPoolResults(device, queryPool, firstQuery, queryCount, dataSize, pData, stride, flags);
- {
- queryPool = layer_data->Unwrap(queryPool);
- }
- VkResult result = layer_data->device_dispatch_table.GetQueryPoolResults(device, queryPool, firstQuery, queryCount, dataSize, pData, stride, flags);
-
- return result;
-}
-
-VkResult DispatchCreateBuffer(
- VkDevice device,
- const VkBufferCreateInfo* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkBuffer* pBuffer)
-{
- auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
- if (!wrap_handles) return layer_data->device_dispatch_table.CreateBuffer(device, pCreateInfo, pAllocator, pBuffer);
- VkResult result = layer_data->device_dispatch_table.CreateBuffer(device, pCreateInfo, pAllocator, pBuffer);
- if (VK_SUCCESS == result) {
- *pBuffer = layer_data->WrapNew(*pBuffer);
- }
- return result;
-}
-
-void DispatchDestroyBuffer(
- VkDevice device,
- VkBuffer buffer,
- const VkAllocationCallbacks* pAllocator)
-{
- auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
- if (!wrap_handles) return layer_data->device_dispatch_table.DestroyBuffer(device, buffer, pAllocator);
- uint64_t buffer_id = reinterpret_cast<uint64_t &>(buffer);
- auto iter = unique_id_mapping.pop(buffer_id);
- if (iter != unique_id_mapping.end()) {
- buffer = (VkBuffer)iter->second;
- } else {
- buffer = (VkBuffer)0;
- }
- layer_data->device_dispatch_table.DestroyBuffer(device, buffer, pAllocator);
-
-}
-
-VkResult DispatchCreateBufferView(
- VkDevice device,
- const VkBufferViewCreateInfo* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkBufferView* pView)
-{
- auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
- if (!wrap_handles) return layer_data->device_dispatch_table.CreateBufferView(device, pCreateInfo, pAllocator, pView);
- safe_VkBufferViewCreateInfo *local_pCreateInfo = NULL;
- {
- if (pCreateInfo) {
- local_pCreateInfo = new safe_VkBufferViewCreateInfo(pCreateInfo);
- if (pCreateInfo->buffer) {
- local_pCreateInfo->buffer = layer_data->Unwrap(pCreateInfo->buffer);
- }
- }
- }
- VkResult result = layer_data->device_dispatch_table.CreateBufferView(device, (const VkBufferViewCreateInfo*)local_pCreateInfo, pAllocator, pView);
- if (local_pCreateInfo) {
- delete local_pCreateInfo;
- }
- if (VK_SUCCESS == result) {
- *pView = layer_data->WrapNew(*pView);
- }
- return result;
-}
-
-void DispatchDestroyBufferView(
- VkDevice device,
- VkBufferView bufferView,
- const VkAllocationCallbacks* pAllocator)
-{
- auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
- if (!wrap_handles) return layer_data->device_dispatch_table.DestroyBufferView(device, bufferView, pAllocator);
- uint64_t bufferView_id = reinterpret_cast<uint64_t &>(bufferView);
- auto iter = unique_id_mapping.pop(bufferView_id);
- if (iter != unique_id_mapping.end()) {
- bufferView = (VkBufferView)iter->second;
- } else {
- bufferView = (VkBufferView)0;
- }
- layer_data->device_dispatch_table.DestroyBufferView(device, bufferView, pAllocator);
-
-}
-
-VkResult DispatchCreateImage(
- VkDevice device,
- const VkImageCreateInfo* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkImage* pImage)
-{
- auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
- if (!wrap_handles) return layer_data->device_dispatch_table.CreateImage(device, pCreateInfo, pAllocator, pImage);
- safe_VkImageCreateInfo *local_pCreateInfo = NULL;
- {
- if (pCreateInfo) {
- local_pCreateInfo = new safe_VkImageCreateInfo(pCreateInfo);
- WrapPnextChainHandles(layer_data, local_pCreateInfo->pNext);
- }
- }
- VkResult result = layer_data->device_dispatch_table.CreateImage(device, (const VkImageCreateInfo*)local_pCreateInfo, pAllocator, pImage);
- if (local_pCreateInfo) {
- delete local_pCreateInfo;
- }
- if (VK_SUCCESS == result) {
- *pImage = layer_data->WrapNew(*pImage);
- }
- return result;
-}
-
-void DispatchDestroyImage(
- VkDevice device,
- VkImage image,
- const VkAllocationCallbacks* pAllocator)
-{
- auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
- if (!wrap_handles) return layer_data->device_dispatch_table.DestroyImage(device, image, pAllocator);
- uint64_t image_id = reinterpret_cast<uint64_t &>(image);
- auto iter = unique_id_mapping.pop(image_id);
- if (iter != unique_id_mapping.end()) {
- image = (VkImage)iter->second;
- } else {
- image = (VkImage)0;
- }
- layer_data->device_dispatch_table.DestroyImage(device, image, pAllocator);
-
-}
-
-void DispatchGetImageSubresourceLayout(
- VkDevice device,
- VkImage image,
- const VkImageSubresource* pSubresource,
- VkSubresourceLayout* pLayout)
-{
- auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
- if (!wrap_handles) return layer_data->device_dispatch_table.GetImageSubresourceLayout(device, image, pSubresource, pLayout);
- {
- image = layer_data->Unwrap(image);
- }
- layer_data->device_dispatch_table.GetImageSubresourceLayout(device, image, pSubresource, pLayout);
-
-}
-
-VkResult DispatchCreateImageView(
- VkDevice device,
- const VkImageViewCreateInfo* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkImageView* pView)
-{
- auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
- if (!wrap_handles) return layer_data->device_dispatch_table.CreateImageView(device, pCreateInfo, pAllocator, pView);
- safe_VkImageViewCreateInfo *local_pCreateInfo = NULL;
- {
- if (pCreateInfo) {
- local_pCreateInfo = new safe_VkImageViewCreateInfo(pCreateInfo);
- if (pCreateInfo->image) {
- local_pCreateInfo->image = layer_data->Unwrap(pCreateInfo->image);
- }
- WrapPnextChainHandles(layer_data, local_pCreateInfo->pNext);
- }
- }
- VkResult result = layer_data->device_dispatch_table.CreateImageView(device, (const VkImageViewCreateInfo*)local_pCreateInfo, pAllocator, pView);
- if (local_pCreateInfo) {
- delete local_pCreateInfo;
- }
- if (VK_SUCCESS == result) {
- *pView = layer_data->WrapNew(*pView);
- }
- return result;
-}
-
-void DispatchDestroyImageView(
- VkDevice device,
- VkImageView imageView,
- const VkAllocationCallbacks* pAllocator)
-{
- auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
- if (!wrap_handles) return layer_data->device_dispatch_table.DestroyImageView(device, imageView, pAllocator);
- uint64_t imageView_id = reinterpret_cast<uint64_t &>(imageView);
- auto iter = unique_id_mapping.pop(imageView_id);
- if (iter != unique_id_mapping.end()) {
- imageView = (VkImageView)iter->second;
- } else {
- imageView = (VkImageView)0;
- }
- layer_data->device_dispatch_table.DestroyImageView(device, imageView, pAllocator);
-
-}
-
-VkResult DispatchCreateShaderModule(
- VkDevice device,
- const VkShaderModuleCreateInfo* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkShaderModule* pShaderModule)
-{
- auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
- if (!wrap_handles) return layer_data->device_dispatch_table.CreateShaderModule(device, pCreateInfo, pAllocator, pShaderModule);
- safe_VkShaderModuleCreateInfo *local_pCreateInfo = NULL;
- {
- if (pCreateInfo) {
- local_pCreateInfo = new safe_VkShaderModuleCreateInfo(pCreateInfo);
- WrapPnextChainHandles(layer_data, local_pCreateInfo->pNext);
- }
- }
- VkResult result = layer_data->device_dispatch_table.CreateShaderModule(device, (const VkShaderModuleCreateInfo*)local_pCreateInfo, pAllocator, pShaderModule);
- if (local_pCreateInfo) {
- delete local_pCreateInfo;
- }
- if (VK_SUCCESS == result) {
- *pShaderModule = layer_data->WrapNew(*pShaderModule);
- }
- return result;
-}
-
-void DispatchDestroyShaderModule(
- VkDevice device,
- VkShaderModule shaderModule,
- const VkAllocationCallbacks* pAllocator)
-{
- auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
- if (!wrap_handles) return layer_data->device_dispatch_table.DestroyShaderModule(device, shaderModule, pAllocator);
- uint64_t shaderModule_id = reinterpret_cast<uint64_t &>(shaderModule);
- auto iter = unique_id_mapping.pop(shaderModule_id);
- if (iter != unique_id_mapping.end()) {
- shaderModule = (VkShaderModule)iter->second;
- } else {
- shaderModule = (VkShaderModule)0;
- }
- layer_data->device_dispatch_table.DestroyShaderModule(device, shaderModule, pAllocator);
-
-}
-
-VkResult DispatchCreatePipelineCache(
- VkDevice device,
- const VkPipelineCacheCreateInfo* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkPipelineCache* pPipelineCache)
-{
- auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
- if (!wrap_handles) return layer_data->device_dispatch_table.CreatePipelineCache(device, pCreateInfo, pAllocator, pPipelineCache);
- VkResult result = layer_data->device_dispatch_table.CreatePipelineCache(device, pCreateInfo, pAllocator, pPipelineCache);
- if (VK_SUCCESS == result) {
- *pPipelineCache = layer_data->WrapNew(*pPipelineCache);
- }
- return result;
-}
-
-void DispatchDestroyPipelineCache(
- VkDevice device,
- VkPipelineCache pipelineCache,
- const VkAllocationCallbacks* pAllocator)
-{
- auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
- if (!wrap_handles) return layer_data->device_dispatch_table.DestroyPipelineCache(device, pipelineCache, pAllocator);
- uint64_t pipelineCache_id = reinterpret_cast<uint64_t &>(pipelineCache);
- auto iter = unique_id_mapping.pop(pipelineCache_id);
- if (iter != unique_id_mapping.end()) {
- pipelineCache = (VkPipelineCache)iter->second;
- } else {
- pipelineCache = (VkPipelineCache)0;
- }
- layer_data->device_dispatch_table.DestroyPipelineCache(device, pipelineCache, pAllocator);
-
-}
-
-VkResult DispatchGetPipelineCacheData(
- VkDevice device,
- VkPipelineCache pipelineCache,
- size_t* pDataSize,
- void* pData)
-{
- auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
- if (!wrap_handles) return layer_data->device_dispatch_table.GetPipelineCacheData(device, pipelineCache, pDataSize, pData);
- {
- pipelineCache = layer_data->Unwrap(pipelineCache);
- }
- VkResult result = layer_data->device_dispatch_table.GetPipelineCacheData(device, pipelineCache, pDataSize, pData);
-
- return result;
-}
-
-VkResult DispatchMergePipelineCaches(
- VkDevice device,
- VkPipelineCache dstCache,
- uint32_t srcCacheCount,
- const VkPipelineCache* pSrcCaches)
-{
- auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
- if (!wrap_handles) return layer_data->device_dispatch_table.MergePipelineCaches(device, dstCache, srcCacheCount, pSrcCaches);
- VkPipelineCache *local_pSrcCaches = NULL;
- {
- dstCache = layer_data->Unwrap(dstCache);
- if (pSrcCaches) {
- local_pSrcCaches = new VkPipelineCache[srcCacheCount];
- for (uint32_t index0 = 0; index0 < srcCacheCount; ++index0) {
- local_pSrcCaches[index0] = layer_data->Unwrap(pSrcCaches[index0]);
- }
- }
- }
- VkResult result = layer_data->device_dispatch_table.MergePipelineCaches(device, dstCache, srcCacheCount, (const VkPipelineCache*)local_pSrcCaches);
- if (local_pSrcCaches)
- delete[] local_pSrcCaches;
- return result;
-}
-
-// Skip vkCreateGraphicsPipelines dispatch, manually generated
-
-// Skip vkCreateComputePipelines dispatch, manually generated
-
-void DispatchDestroyPipeline(
- VkDevice device,
- VkPipeline pipeline,
- const VkAllocationCallbacks* pAllocator)
-{
- auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
- if (!wrap_handles) return layer_data->device_dispatch_table.DestroyPipeline(device, pipeline, pAllocator);
- uint64_t pipeline_id = reinterpret_cast<uint64_t &>(pipeline);
- auto iter = unique_id_mapping.pop(pipeline_id);
- if (iter != unique_id_mapping.end()) {
- pipeline = (VkPipeline)iter->second;
- } else {
- pipeline = (VkPipeline)0;
- }
- layer_data->device_dispatch_table.DestroyPipeline(device, pipeline, pAllocator);
-
-}
-
-VkResult DispatchCreatePipelineLayout(
- VkDevice device,
- const VkPipelineLayoutCreateInfo* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkPipelineLayout* pPipelineLayout)
-{
- auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
- if (!wrap_handles) return layer_data->device_dispatch_table.CreatePipelineLayout(device, pCreateInfo, pAllocator, pPipelineLayout);
- safe_VkPipelineLayoutCreateInfo *local_pCreateInfo = NULL;
- {
- if (pCreateInfo) {
- local_pCreateInfo = new safe_VkPipelineLayoutCreateInfo(pCreateInfo);
- if (local_pCreateInfo->pSetLayouts) {
- for (uint32_t index1 = 0; index1 < local_pCreateInfo->setLayoutCount; ++index1) {
- local_pCreateInfo->pSetLayouts[index1] = layer_data->Unwrap(local_pCreateInfo->pSetLayouts[index1]);
- }
- }
- }
- }
- VkResult result = layer_data->device_dispatch_table.CreatePipelineLayout(device, (const VkPipelineLayoutCreateInfo*)local_pCreateInfo, pAllocator, pPipelineLayout);
- if (local_pCreateInfo) {
- delete local_pCreateInfo;
- }
- if (VK_SUCCESS == result) {
- *pPipelineLayout = layer_data->WrapNew(*pPipelineLayout);
- }
- return result;
-}
-
-void DispatchDestroyPipelineLayout(
- VkDevice device,
- VkPipelineLayout pipelineLayout,
- const VkAllocationCallbacks* pAllocator)
-{
- auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
- if (!wrap_handles) return layer_data->device_dispatch_table.DestroyPipelineLayout(device, pipelineLayout, pAllocator);
- uint64_t pipelineLayout_id = reinterpret_cast<uint64_t &>(pipelineLayout);
- auto iter = unique_id_mapping.pop(pipelineLayout_id);
- if (iter != unique_id_mapping.end()) {
- pipelineLayout = (VkPipelineLayout)iter->second;
- } else {
- pipelineLayout = (VkPipelineLayout)0;
- }
- layer_data->device_dispatch_table.DestroyPipelineLayout(device, pipelineLayout, pAllocator);
-
-}
-
-VkResult DispatchCreateSampler(
- VkDevice device,
- const VkSamplerCreateInfo* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkSampler* pSampler)
-{
- auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
- if (!wrap_handles) return layer_data->device_dispatch_table.CreateSampler(device, pCreateInfo, pAllocator, pSampler);
- safe_VkSamplerCreateInfo *local_pCreateInfo = NULL;
- {
- if (pCreateInfo) {
- local_pCreateInfo = new safe_VkSamplerCreateInfo(pCreateInfo);
- WrapPnextChainHandles(layer_data, local_pCreateInfo->pNext);
- }
- }
- VkResult result = layer_data->device_dispatch_table.CreateSampler(device, (const VkSamplerCreateInfo*)local_pCreateInfo, pAllocator, pSampler);
- if (local_pCreateInfo) {
- delete local_pCreateInfo;
- }
- if (VK_SUCCESS == result) {
- *pSampler = layer_data->WrapNew(*pSampler);
- }
- return result;
-}
-
-void DispatchDestroySampler(
- VkDevice device,
- VkSampler sampler,
- const VkAllocationCallbacks* pAllocator)
-{
- auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
- if (!wrap_handles) return layer_data->device_dispatch_table.DestroySampler(device, sampler, pAllocator);
- uint64_t sampler_id = reinterpret_cast<uint64_t &>(sampler);
- auto iter = unique_id_mapping.pop(sampler_id);
- if (iter != unique_id_mapping.end()) {
- sampler = (VkSampler)iter->second;
- } else {
- sampler = (VkSampler)0;
- }
- layer_data->device_dispatch_table.DestroySampler(device, sampler, pAllocator);
-
-}
-
-VkResult DispatchCreateDescriptorSetLayout(
- VkDevice device,
- const VkDescriptorSetLayoutCreateInfo* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkDescriptorSetLayout* pSetLayout)
-{
- auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
- if (!wrap_handles) return layer_data->device_dispatch_table.CreateDescriptorSetLayout(device, pCreateInfo, pAllocator, pSetLayout);
- safe_VkDescriptorSetLayoutCreateInfo *local_pCreateInfo = NULL;
- {
- if (pCreateInfo) {
- local_pCreateInfo = new safe_VkDescriptorSetLayoutCreateInfo(pCreateInfo);
- if (local_pCreateInfo->pBindings) {
- for (uint32_t index1 = 0; index1 < local_pCreateInfo->bindingCount; ++index1) {
- if (local_pCreateInfo->pBindings[index1].pImmutableSamplers) {
- for (uint32_t index2 = 0; index2 < local_pCreateInfo->pBindings[index1].descriptorCount; ++index2) {
- local_pCreateInfo->pBindings[index1].pImmutableSamplers[index2] = layer_data->Unwrap(local_pCreateInfo->pBindings[index1].pImmutableSamplers[index2]);
- }
- }
- }
- }
- }
- }
- VkResult result = layer_data->device_dispatch_table.CreateDescriptorSetLayout(device, (const VkDescriptorSetLayoutCreateInfo*)local_pCreateInfo, pAllocator, pSetLayout);
- if (local_pCreateInfo) {
- delete local_pCreateInfo;
- }
- if (VK_SUCCESS == result) {
- *pSetLayout = layer_data->WrapNew(*pSetLayout);
- }
- return result;
-}
-
-void DispatchDestroyDescriptorSetLayout(
- VkDevice device,
- VkDescriptorSetLayout descriptorSetLayout,
- const VkAllocationCallbacks* pAllocator)
-{
- auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
- if (!wrap_handles) return layer_data->device_dispatch_table.DestroyDescriptorSetLayout(device, descriptorSetLayout, pAllocator);
- uint64_t descriptorSetLayout_id = reinterpret_cast<uint64_t &>(descriptorSetLayout);
- auto iter = unique_id_mapping.pop(descriptorSetLayout_id);
- if (iter != unique_id_mapping.end()) {
- descriptorSetLayout = (VkDescriptorSetLayout)iter->second;
- } else {
- descriptorSetLayout = (VkDescriptorSetLayout)0;
- }
- layer_data->device_dispatch_table.DestroyDescriptorSetLayout(device, descriptorSetLayout, pAllocator);
-
-}
-
-VkResult DispatchCreateDescriptorPool(
- VkDevice device,
- const VkDescriptorPoolCreateInfo* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkDescriptorPool* pDescriptorPool)
-{
- auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
- if (!wrap_handles) return layer_data->device_dispatch_table.CreateDescriptorPool(device, pCreateInfo, pAllocator, pDescriptorPool);
- VkResult result = layer_data->device_dispatch_table.CreateDescriptorPool(device, pCreateInfo, pAllocator, pDescriptorPool);
- if (VK_SUCCESS == result) {
- *pDescriptorPool = layer_data->WrapNew(*pDescriptorPool);
- }
- return result;
-}
-
-// Skip vkDestroyDescriptorPool dispatch, manually generated
-
-// Skip vkResetDescriptorPool dispatch, manually generated
-
-// Skip vkAllocateDescriptorSets dispatch, manually generated
-
-// Skip vkFreeDescriptorSets dispatch, manually generated
-
-void DispatchUpdateDescriptorSets(
- VkDevice device,
- uint32_t descriptorWriteCount,
- const VkWriteDescriptorSet* pDescriptorWrites,
- uint32_t descriptorCopyCount,
- const VkCopyDescriptorSet* pDescriptorCopies)
-{
- auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
- if (!wrap_handles) return layer_data->device_dispatch_table.UpdateDescriptorSets(device, descriptorWriteCount, pDescriptorWrites, descriptorCopyCount, pDescriptorCopies);
- safe_VkWriteDescriptorSet *local_pDescriptorWrites = NULL;
- safe_VkCopyDescriptorSet *local_pDescriptorCopies = NULL;
- {
- if (pDescriptorWrites) {
- local_pDescriptorWrites = new safe_VkWriteDescriptorSet[descriptorWriteCount];
- for (uint32_t index0 = 0; index0 < descriptorWriteCount; ++index0) {
- local_pDescriptorWrites[index0].initialize(&pDescriptorWrites[index0]);
- WrapPnextChainHandles(layer_data, local_pDescriptorWrites[index0].pNext);
- if (pDescriptorWrites[index0].dstSet) {
- local_pDescriptorWrites[index0].dstSet = layer_data->Unwrap(pDescriptorWrites[index0].dstSet);
- }
- if (local_pDescriptorWrites[index0].pImageInfo) {
- for (uint32_t index1 = 0; index1 < local_pDescriptorWrites[index0].descriptorCount; ++index1) {
- if (pDescriptorWrites[index0].pImageInfo[index1].sampler) {
- local_pDescriptorWrites[index0].pImageInfo[index1].sampler = layer_data->Unwrap(pDescriptorWrites[index0].pImageInfo[index1].sampler);
- }
- if (pDescriptorWrites[index0].pImageInfo[index1].imageView) {
- local_pDescriptorWrites[index0].pImageInfo[index1].imageView = layer_data->Unwrap(pDescriptorWrites[index0].pImageInfo[index1].imageView);
- }
- }
- }
- if (local_pDescriptorWrites[index0].pBufferInfo) {
- for (uint32_t index1 = 0; index1 < local_pDescriptorWrites[index0].descriptorCount; ++index1) {
- if (pDescriptorWrites[index0].pBufferInfo[index1].buffer) {
- local_pDescriptorWrites[index0].pBufferInfo[index1].buffer = layer_data->Unwrap(pDescriptorWrites[index0].pBufferInfo[index1].buffer);
- }
- }
- }
- if (local_pDescriptorWrites[index0].pTexelBufferView) {
- for (uint32_t index1 = 0; index1 < local_pDescriptorWrites[index0].descriptorCount; ++index1) {
- local_pDescriptorWrites[index0].pTexelBufferView[index1] = layer_data->Unwrap(local_pDescriptorWrites[index0].pTexelBufferView[index1]);
- }
- }
- }
- }
- if (pDescriptorCopies) {
- local_pDescriptorCopies = new safe_VkCopyDescriptorSet[descriptorCopyCount];
- for (uint32_t index0 = 0; index0 < descriptorCopyCount; ++index0) {
- local_pDescriptorCopies[index0].initialize(&pDescriptorCopies[index0]);
- if (pDescriptorCopies[index0].srcSet) {
- local_pDescriptorCopies[index0].srcSet = layer_data->Unwrap(pDescriptorCopies[index0].srcSet);
- }
- if (pDescriptorCopies[index0].dstSet) {
- local_pDescriptorCopies[index0].dstSet = layer_data->Unwrap(pDescriptorCopies[index0].dstSet);
- }
- }
- }
- }
- layer_data->device_dispatch_table.UpdateDescriptorSets(device, descriptorWriteCount, (const VkWriteDescriptorSet*)local_pDescriptorWrites, descriptorCopyCount, (const VkCopyDescriptorSet*)local_pDescriptorCopies);
- if (local_pDescriptorWrites) {
- delete[] local_pDescriptorWrites;
- }
- if (local_pDescriptorCopies) {
- delete[] local_pDescriptorCopies;
- }
-}
-
-VkResult DispatchCreateFramebuffer(
- VkDevice device,
- const VkFramebufferCreateInfo* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkFramebuffer* pFramebuffer)
-{
- auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
- if (!wrap_handles) return layer_data->device_dispatch_table.CreateFramebuffer(device, pCreateInfo, pAllocator, pFramebuffer);
- safe_VkFramebufferCreateInfo *local_pCreateInfo = NULL;
- {
- if (pCreateInfo) {
- local_pCreateInfo = new safe_VkFramebufferCreateInfo(pCreateInfo);
- if (pCreateInfo->renderPass) {
- local_pCreateInfo->renderPass = layer_data->Unwrap(pCreateInfo->renderPass);
- }
- if (local_pCreateInfo->pAttachments) {
- for (uint32_t index1 = 0; index1 < local_pCreateInfo->attachmentCount; ++index1) {
- local_pCreateInfo->pAttachments[index1] = layer_data->Unwrap(local_pCreateInfo->pAttachments[index1]);
- }
- }
- }
- }
- VkResult result = layer_data->device_dispatch_table.CreateFramebuffer(device, (const VkFramebufferCreateInfo*)local_pCreateInfo, pAllocator, pFramebuffer);
- if (local_pCreateInfo) {
- delete local_pCreateInfo;
- }
- if (VK_SUCCESS == result) {
- *pFramebuffer = layer_data->WrapNew(*pFramebuffer);
- }
- return result;
-}
-
-void DispatchDestroyFramebuffer(
- VkDevice device,
- VkFramebuffer framebuffer,
- const VkAllocationCallbacks* pAllocator)
-{
- auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
- if (!wrap_handles) return layer_data->device_dispatch_table.DestroyFramebuffer(device, framebuffer, pAllocator);
- uint64_t framebuffer_id = reinterpret_cast<uint64_t &>(framebuffer);
- auto iter = unique_id_mapping.pop(framebuffer_id);
- if (iter != unique_id_mapping.end()) {
- framebuffer = (VkFramebuffer)iter->second;
- } else {
- framebuffer = (VkFramebuffer)0;
- }
- layer_data->device_dispatch_table.DestroyFramebuffer(device, framebuffer, pAllocator);
-
-}
-
-// Skip vkCreateRenderPass dispatch, manually generated
-
-// Skip vkDestroyRenderPass dispatch, manually generated
-
-void DispatchGetRenderAreaGranularity(
- VkDevice device,
- VkRenderPass renderPass,
- VkExtent2D* pGranularity)
-{
- auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
- if (!wrap_handles) return layer_data->device_dispatch_table.GetRenderAreaGranularity(device, renderPass, pGranularity);
- {
- renderPass = layer_data->Unwrap(renderPass);
- }
- layer_data->device_dispatch_table.GetRenderAreaGranularity(device, renderPass, pGranularity);
-
-}
-
-VkResult DispatchCreateCommandPool(
- VkDevice device,
- const VkCommandPoolCreateInfo* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkCommandPool* pCommandPool)
-{
- auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
- if (!wrap_handles) return layer_data->device_dispatch_table.CreateCommandPool(device, pCreateInfo, pAllocator, pCommandPool);
- VkResult result = layer_data->device_dispatch_table.CreateCommandPool(device, pCreateInfo, pAllocator, pCommandPool);
- if (VK_SUCCESS == result) {
- *pCommandPool = layer_data->WrapNew(*pCommandPool);
- }
- return result;
-}
-
-void DispatchDestroyCommandPool(
- VkDevice device,
- VkCommandPool commandPool,
- const VkAllocationCallbacks* pAllocator)
-{
- auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
- if (!wrap_handles) return layer_data->device_dispatch_table.DestroyCommandPool(device, commandPool, pAllocator);
- uint64_t commandPool_id = reinterpret_cast<uint64_t &>(commandPool);
- auto iter = unique_id_mapping.pop(commandPool_id);
- if (iter != unique_id_mapping.end()) {
- commandPool = (VkCommandPool)iter->second;
- } else {
- commandPool = (VkCommandPool)0;
- }
- layer_data->device_dispatch_table.DestroyCommandPool(device, commandPool, pAllocator);
-
-}
-
-VkResult DispatchResetCommandPool(
- VkDevice device,
- VkCommandPool commandPool,
- VkCommandPoolResetFlags flags)
-{
- auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
- if (!wrap_handles) return layer_data->device_dispatch_table.ResetCommandPool(device, commandPool, flags);
- {
- commandPool = layer_data->Unwrap(commandPool);
- }
- VkResult result = layer_data->device_dispatch_table.ResetCommandPool(device, commandPool, flags);
-
- return result;
-}
-
-VkResult DispatchAllocateCommandBuffers(
- VkDevice device,
- const VkCommandBufferAllocateInfo* pAllocateInfo,
- VkCommandBuffer* pCommandBuffers)
-{
- auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
- if (!wrap_handles) return layer_data->device_dispatch_table.AllocateCommandBuffers(device, pAllocateInfo, pCommandBuffers);
- safe_VkCommandBufferAllocateInfo *local_pAllocateInfo = NULL;
- {
- if (pAllocateInfo) {
- local_pAllocateInfo = new safe_VkCommandBufferAllocateInfo(pAllocateInfo);
- if (pAllocateInfo->commandPool) {
- local_pAllocateInfo->commandPool = layer_data->Unwrap(pAllocateInfo->commandPool);
- }
- }
- }
- VkResult result = layer_data->device_dispatch_table.AllocateCommandBuffers(device, (const VkCommandBufferAllocateInfo*)local_pAllocateInfo, pCommandBuffers);
- if (local_pAllocateInfo) {
- delete local_pAllocateInfo;
- }
- return result;
-}
-
-void DispatchFreeCommandBuffers(
- VkDevice device,
- VkCommandPool commandPool,
- uint32_t commandBufferCount,
- const VkCommandBuffer* pCommandBuffers)
-{
- auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
- if (!wrap_handles) return layer_data->device_dispatch_table.FreeCommandBuffers(device, commandPool, commandBufferCount, pCommandBuffers);
- {
- commandPool = layer_data->Unwrap(commandPool);
- }
- layer_data->device_dispatch_table.FreeCommandBuffers(device, commandPool, commandBufferCount, pCommandBuffers);
-
-}
-
-VkResult DispatchBeginCommandBuffer(
- VkCommandBuffer commandBuffer,
- const VkCommandBufferBeginInfo* pBeginInfo)
-{
- auto layer_data = GetLayerDataPtr(get_dispatch_key(commandBuffer), layer_data_map);
- if (!wrap_handles) return layer_data->device_dispatch_table.BeginCommandBuffer(commandBuffer, pBeginInfo);
- safe_VkCommandBufferBeginInfo *local_pBeginInfo = NULL;
- {
- if (pBeginInfo) {
- local_pBeginInfo = new safe_VkCommandBufferBeginInfo(pBeginInfo);
- if (local_pBeginInfo->pInheritanceInfo) {
- if (pBeginInfo->pInheritanceInfo->renderPass) {
- local_pBeginInfo->pInheritanceInfo->renderPass = layer_data->Unwrap(pBeginInfo->pInheritanceInfo->renderPass);
- }
- if (pBeginInfo->pInheritanceInfo->framebuffer) {
- local_pBeginInfo->pInheritanceInfo->framebuffer = layer_data->Unwrap(pBeginInfo->pInheritanceInfo->framebuffer);
- }
- }
- }
- }
- VkResult result = layer_data->device_dispatch_table.BeginCommandBuffer(commandBuffer, (const VkCommandBufferBeginInfo*)local_pBeginInfo);
- if (local_pBeginInfo) {
- delete local_pBeginInfo;
- }
- return result;
-}
-
-VkResult DispatchEndCommandBuffer(
- VkCommandBuffer commandBuffer)
-{
- auto layer_data = GetLayerDataPtr(get_dispatch_key(commandBuffer), layer_data_map);
- VkResult result = layer_data->device_dispatch_table.EndCommandBuffer(commandBuffer);
-
- return result;
-}
-
-VkResult DispatchResetCommandBuffer(
- VkCommandBuffer commandBuffer,
- VkCommandBufferResetFlags flags)
-{
- auto layer_data = GetLayerDataPtr(get_dispatch_key(commandBuffer), layer_data_map);
- VkResult result = layer_data->device_dispatch_table.ResetCommandBuffer(commandBuffer, flags);
-
- return result;
-}
-
-void DispatchCmdBindPipeline(
- VkCommandBuffer commandBuffer,
- VkPipelineBindPoint pipelineBindPoint,
- VkPipeline pipeline)
-{
- auto layer_data = GetLayerDataPtr(get_dispatch_key(commandBuffer), layer_data_map);
- if (!wrap_handles) return layer_data->device_dispatch_table.CmdBindPipeline(commandBuffer, pipelineBindPoint, pipeline);
- {
- pipeline = layer_data->Unwrap(pipeline);
- }
- layer_data->device_dispatch_table.CmdBindPipeline(commandBuffer, pipelineBindPoint, pipeline);
-
-}
-
-void DispatchCmdSetViewport(
- VkCommandBuffer commandBuffer,
- uint32_t firstViewport,
- uint32_t viewportCount,
- const VkViewport* pViewports)
-{
- auto layer_data = GetLayerDataPtr(get_dispatch_key(commandBuffer), layer_data_map);
- layer_data->device_dispatch_table.CmdSetViewport(commandBuffer, firstViewport, viewportCount, pViewports);
-
-}
-
-void DispatchCmdSetScissor(
- VkCommandBuffer commandBuffer,
- uint32_t firstScissor,
- uint32_t scissorCount,
- const VkRect2D* pScissors)
-{
- auto layer_data = GetLayerDataPtr(get_dispatch_key(commandBuffer), layer_data_map);
- layer_data->device_dispatch_table.CmdSetScissor(commandBuffer, firstScissor, scissorCount, pScissors);
-
-}
-
-void DispatchCmdSetLineWidth(
- VkCommandBuffer commandBuffer,
- float lineWidth)
-{
- auto layer_data = GetLayerDataPtr(get_dispatch_key(commandBuffer), layer_data_map);
- layer_data->device_dispatch_table.CmdSetLineWidth(commandBuffer, lineWidth);
-
-}
-
-void DispatchCmdSetDepthBias(
- VkCommandBuffer commandBuffer,
- float depthBiasConstantFactor,
- float depthBiasClamp,
- float depthBiasSlopeFactor)
-{
- auto layer_data = GetLayerDataPtr(get_dispatch_key(commandBuffer), layer_data_map);
- layer_data->device_dispatch_table.CmdSetDepthBias(commandBuffer, depthBiasConstantFactor, depthBiasClamp, depthBiasSlopeFactor);
-
-}
-
-void DispatchCmdSetBlendConstants(
- VkCommandBuffer commandBuffer,
- const float blendConstants[4])
-{
- auto layer_data = GetLayerDataPtr(get_dispatch_key(commandBuffer), layer_data_map);
- layer_data->device_dispatch_table.CmdSetBlendConstants(commandBuffer, blendConstants);
-
-}
-
-void DispatchCmdSetDepthBounds(
- VkCommandBuffer commandBuffer,
- float minDepthBounds,
- float maxDepthBounds)
-{
- auto layer_data = GetLayerDataPtr(get_dispatch_key(commandBuffer), layer_data_map);
- layer_data->device_dispatch_table.CmdSetDepthBounds(commandBuffer, minDepthBounds, maxDepthBounds);
-
-}
-
-void DispatchCmdSetStencilCompareMask(
- VkCommandBuffer commandBuffer,
- VkStencilFaceFlags faceMask,
- uint32_t compareMask)
-{
- auto layer_data = GetLayerDataPtr(get_dispatch_key(commandBuffer), layer_data_map);
- layer_data->device_dispatch_table.CmdSetStencilCompareMask(commandBuffer, faceMask, compareMask);
-
-}
-
-void DispatchCmdSetStencilWriteMask(
- VkCommandBuffer commandBuffer,
- VkStencilFaceFlags faceMask,
- uint32_t writeMask)
-{
- auto layer_data = GetLayerDataPtr(get_dispatch_key(commandBuffer), layer_data_map);
- layer_data->device_dispatch_table.CmdSetStencilWriteMask(commandBuffer, faceMask, writeMask);
-
-}
-
-void DispatchCmdSetStencilReference(
- VkCommandBuffer commandBuffer,
- VkStencilFaceFlags faceMask,
- uint32_t reference)
-{
- auto layer_data = GetLayerDataPtr(get_dispatch_key(commandBuffer), layer_data_map);
- layer_data->device_dispatch_table.CmdSetStencilReference(commandBuffer, faceMask, reference);
-
-}
-
-void DispatchCmdBindDescriptorSets(
- VkCommandBuffer commandBuffer,
- VkPipelineBindPoint pipelineBindPoint,
- VkPipelineLayout layout,
- uint32_t firstSet,
- uint32_t descriptorSetCount,
- const VkDescriptorSet* pDescriptorSets,
- uint32_t dynamicOffsetCount,
- const uint32_t* pDynamicOffsets)
-{
- auto layer_data = GetLayerDataPtr(get_dispatch_key(commandBuffer), layer_data_map);
- if (!wrap_handles) return layer_data->device_dispatch_table.CmdBindDescriptorSets(commandBuffer, pipelineBindPoint, layout, firstSet, descriptorSetCount, pDescriptorSets, dynamicOffsetCount, pDynamicOffsets);
- VkDescriptorSet *local_pDescriptorSets = NULL;
- {
- layout = layer_data->Unwrap(layout);
- if (pDescriptorSets) {
- local_pDescriptorSets = new VkDescriptorSet[descriptorSetCount];
- for (uint32_t index0 = 0; index0 < descriptorSetCount; ++index0) {
- local_pDescriptorSets[index0] = layer_data->Unwrap(pDescriptorSets[index0]);
- }
- }
- }
- layer_data->device_dispatch_table.CmdBindDescriptorSets(commandBuffer, pipelineBindPoint, layout, firstSet, descriptorSetCount, (const VkDescriptorSet*)local_pDescriptorSets, dynamicOffsetCount, pDynamicOffsets);
- if (local_pDescriptorSets)
- delete[] local_pDescriptorSets;
-}
-
-void DispatchCmdBindIndexBuffer(
- VkCommandBuffer commandBuffer,
- VkBuffer buffer,
- VkDeviceSize offset,
- VkIndexType indexType)
-{
- auto layer_data = GetLayerDataPtr(get_dispatch_key(commandBuffer), layer_data_map);
- if (!wrap_handles) return layer_data->device_dispatch_table.CmdBindIndexBuffer(commandBuffer, buffer, offset, indexType);
- {
- buffer = layer_data->Unwrap(buffer);
- }
- layer_data->device_dispatch_table.CmdBindIndexBuffer(commandBuffer, buffer, offset, indexType);
-
-}
-
-void DispatchCmdBindVertexBuffers(
- VkCommandBuffer commandBuffer,
- uint32_t firstBinding,
- uint32_t bindingCount,
- const VkBuffer* pBuffers,
- const VkDeviceSize* pOffsets)
-{
- auto layer_data = GetLayerDataPtr(get_dispatch_key(commandBuffer), layer_data_map);
- if (!wrap_handles) return layer_data->device_dispatch_table.CmdBindVertexBuffers(commandBuffer, firstBinding, bindingCount, pBuffers, pOffsets);
- VkBuffer *local_pBuffers = NULL;
- {
- if (pBuffers) {
- local_pBuffers = new VkBuffer[bindingCount];
- for (uint32_t index0 = 0; index0 < bindingCount; ++index0) {
- local_pBuffers[index0] = layer_data->Unwrap(pBuffers[index0]);
- }
- }
- }
- layer_data->device_dispatch_table.CmdBindVertexBuffers(commandBuffer, firstBinding, bindingCount, (const VkBuffer*)local_pBuffers, pOffsets);
- if (local_pBuffers)
- delete[] local_pBuffers;
-}
-
-void DispatchCmdDraw(
- VkCommandBuffer commandBuffer,
- uint32_t vertexCount,
- uint32_t instanceCount,
- uint32_t firstVertex,
- uint32_t firstInstance)
-{
- auto layer_data = GetLayerDataPtr(get_dispatch_key(commandBuffer), layer_data_map);
- layer_data->device_dispatch_table.CmdDraw(commandBuffer, vertexCount, instanceCount, firstVertex, firstInstance);
-
-}
-
-void DispatchCmdDrawIndexed(
- VkCommandBuffer commandBuffer,
- uint32_t indexCount,
- uint32_t instanceCount,
- uint32_t firstIndex,
- int32_t vertexOffset,
- uint32_t firstInstance)
-{
- auto layer_data = GetLayerDataPtr(get_dispatch_key(commandBuffer), layer_data_map);
- layer_data->device_dispatch_table.CmdDrawIndexed(commandBuffer, indexCount, instanceCount, firstIndex, vertexOffset, firstInstance);
-
-}
-
-void DispatchCmdDrawIndirect(
- VkCommandBuffer commandBuffer,
- VkBuffer buffer,
- VkDeviceSize offset,
- uint32_t drawCount,
- uint32_t stride)
-{
- auto layer_data = GetLayerDataPtr(get_dispatch_key(commandBuffer), layer_data_map);
- if (!wrap_handles) return layer_data->device_dispatch_table.CmdDrawIndirect(commandBuffer, buffer, offset, drawCount, stride);
- {
- buffer = layer_data->Unwrap(buffer);
- }
- layer_data->device_dispatch_table.CmdDrawIndirect(commandBuffer, buffer, offset, drawCount, stride);
-
-}
-
-void DispatchCmdDrawIndexedIndirect(
- VkCommandBuffer commandBuffer,
- VkBuffer buffer,
- VkDeviceSize offset,
- uint32_t drawCount,
- uint32_t stride)
-{
- auto layer_data = GetLayerDataPtr(get_dispatch_key(commandBuffer), layer_data_map);
- if (!wrap_handles) return layer_data->device_dispatch_table.CmdDrawIndexedIndirect(commandBuffer, buffer, offset, drawCount, stride);
- {
- buffer = layer_data->Unwrap(buffer);
- }
- layer_data->device_dispatch_table.CmdDrawIndexedIndirect(commandBuffer, buffer, offset, drawCount, stride);
-
-}
-
-void DispatchCmdDispatch(
- VkCommandBuffer commandBuffer,
- uint32_t groupCountX,
- uint32_t groupCountY,
- uint32_t groupCountZ)
-{
- auto layer_data = GetLayerDataPtr(get_dispatch_key(commandBuffer), layer_data_map);
- layer_data->device_dispatch_table.CmdDispatch(commandBuffer, groupCountX, groupCountY, groupCountZ);
-
-}
-
-void DispatchCmdDispatchIndirect(
- VkCommandBuffer commandBuffer,
- VkBuffer buffer,
- VkDeviceSize offset)
-{
- auto layer_data = GetLayerDataPtr(get_dispatch_key(commandBuffer), layer_data_map);
- if (!wrap_handles) return layer_data->device_dispatch_table.CmdDispatchIndirect(commandBuffer, buffer, offset);
- {
- buffer = layer_data->Unwrap(buffer);
- }
- layer_data->device_dispatch_table.CmdDispatchIndirect(commandBuffer, buffer, offset);
-
-}
-
-void DispatchCmdCopyBuffer(
- VkCommandBuffer commandBuffer,
- VkBuffer srcBuffer,
- VkBuffer dstBuffer,
- uint32_t regionCount,
- const VkBufferCopy* pRegions)
-{
- auto layer_data = GetLayerDataPtr(get_dispatch_key(commandBuffer), layer_data_map);
- if (!wrap_handles) return layer_data->device_dispatch_table.CmdCopyBuffer(commandBuffer, srcBuffer, dstBuffer, regionCount, pRegions);
- {
- srcBuffer = layer_data->Unwrap(srcBuffer);
- dstBuffer = layer_data->Unwrap(dstBuffer);
- }
- layer_data->device_dispatch_table.CmdCopyBuffer(commandBuffer, srcBuffer, dstBuffer, regionCount, pRegions);
-
-}
-
-void DispatchCmdCopyImage(
- VkCommandBuffer commandBuffer,
- VkImage srcImage,
- VkImageLayout srcImageLayout,
- VkImage dstImage,
- VkImageLayout dstImageLayout,
- uint32_t regionCount,
- const VkImageCopy* pRegions)
-{
- auto layer_data = GetLayerDataPtr(get_dispatch_key(commandBuffer), layer_data_map);
- if (!wrap_handles) return layer_data->device_dispatch_table.CmdCopyImage(commandBuffer, srcImage, srcImageLayout, dstImage, dstImageLayout, regionCount, pRegions);
- {
- srcImage = layer_data->Unwrap(srcImage);
- dstImage = layer_data->Unwrap(dstImage);
- }
- layer_data->device_dispatch_table.CmdCopyImage(commandBuffer, srcImage, srcImageLayout, dstImage, dstImageLayout, regionCount, pRegions);
-
-}
-
-void DispatchCmdBlitImage(
- VkCommandBuffer commandBuffer,
- VkImage srcImage,
- VkImageLayout srcImageLayout,
- VkImage dstImage,
- VkImageLayout dstImageLayout,
- uint32_t regionCount,
- const VkImageBlit* pRegions,
- VkFilter filter)
-{
- auto layer_data = GetLayerDataPtr(get_dispatch_key(commandBuffer), layer_data_map);
- if (!wrap_handles) return layer_data->device_dispatch_table.CmdBlitImage(commandBuffer, srcImage, srcImageLayout, dstImage, dstImageLayout, regionCount, pRegions, filter);
- {
- srcImage = layer_data->Unwrap(srcImage);
- dstImage = layer_data->Unwrap(dstImage);
- }
- layer_data->device_dispatch_table.CmdBlitImage(commandBuffer, srcImage, srcImageLayout, dstImage, dstImageLayout, regionCount, pRegions, filter);
-
-}
-
-void DispatchCmdCopyBufferToImage(
- VkCommandBuffer commandBuffer,
- VkBuffer srcBuffer,
- VkImage dstImage,
- VkImageLayout dstImageLayout,
- uint32_t regionCount,
- const VkBufferImageCopy* pRegions)
-{
- auto layer_data = GetLayerDataPtr(get_dispatch_key(commandBuffer), layer_data_map);
- if (!wrap_handles) return layer_data->device_dispatch_table.CmdCopyBufferToImage(commandBuffer, srcBuffer, dstImage, dstImageLayout, regionCount, pRegions);
- {
- srcBuffer = layer_data->Unwrap(srcBuffer);
- dstImage = layer_data->Unwrap(dstImage);
- }
- layer_data->device_dispatch_table.CmdCopyBufferToImage(commandBuffer, srcBuffer, dstImage, dstImageLayout, regionCount, pRegions);
-
-}
-
-void DispatchCmdCopyImageToBuffer(
- VkCommandBuffer commandBuffer,
- VkImage srcImage,
- VkImageLayout srcImageLayout,
- VkBuffer dstBuffer,
- uint32_t regionCount,
- const VkBufferImageCopy* pRegions)
-{
- auto layer_data = GetLayerDataPtr(get_dispatch_key(commandBuffer), layer_data_map);
- if (!wrap_handles) return layer_data->device_dispatch_table.CmdCopyImageToBuffer(commandBuffer, srcImage, srcImageLayout, dstBuffer, regionCount, pRegions);
- {
- srcImage = layer_data->Unwrap(srcImage);
- dstBuffer = layer_data->Unwrap(dstBuffer);
- }
- layer_data->device_dispatch_table.CmdCopyImageToBuffer(commandBuffer, srcImage, srcImageLayout, dstBuffer, regionCount, pRegions);
-
-}
-
-void DispatchCmdUpdateBuffer(
- VkCommandBuffer commandBuffer,
- VkBuffer dstBuffer,
- VkDeviceSize dstOffset,
- VkDeviceSize dataSize,
- const void* pData)
-{
- auto layer_data = GetLayerDataPtr(get_dispatch_key(commandBuffer), layer_data_map);
- if (!wrap_handles) return layer_data->device_dispatch_table.CmdUpdateBuffer(commandBuffer, dstBuffer, dstOffset, dataSize, pData);
- {
- dstBuffer = layer_data->Unwrap(dstBuffer);
- }
- layer_data->device_dispatch_table.CmdUpdateBuffer(commandBuffer, dstBuffer, dstOffset, dataSize, pData);
-
-}
-
-void DispatchCmdFillBuffer(
- VkCommandBuffer commandBuffer,
- VkBuffer dstBuffer,
- VkDeviceSize dstOffset,
- VkDeviceSize size,
- uint32_t data)
-{
- auto layer_data = GetLayerDataPtr(get_dispatch_key(commandBuffer), layer_data_map);
- if (!wrap_handles) return layer_data->device_dispatch_table.CmdFillBuffer(commandBuffer, dstBuffer, dstOffset, size, data);
- {
- dstBuffer = layer_data->Unwrap(dstBuffer);
- }
- layer_data->device_dispatch_table.CmdFillBuffer(commandBuffer, dstBuffer, dstOffset, size, data);
-
-}
-
-void DispatchCmdClearColorImage(
- VkCommandBuffer commandBuffer,
- VkImage image,
- VkImageLayout imageLayout,
- const VkClearColorValue* pColor,
- uint32_t rangeCount,
- const VkImageSubresourceRange* pRanges)
-{
- auto layer_data = GetLayerDataPtr(get_dispatch_key(commandBuffer), layer_data_map);
- if (!wrap_handles) return layer_data->device_dispatch_table.CmdClearColorImage(commandBuffer, image, imageLayout, pColor, rangeCount, pRanges);
- {
- image = layer_data->Unwrap(image);
- }
- layer_data->device_dispatch_table.CmdClearColorImage(commandBuffer, image, imageLayout, pColor, rangeCount, pRanges);
-
-}
-
-void DispatchCmdClearDepthStencilImage(
- VkCommandBuffer commandBuffer,
- VkImage image,
- VkImageLayout imageLayout,
- const VkClearDepthStencilValue* pDepthStencil,
- uint32_t rangeCount,
- const VkImageSubresourceRange* pRanges)
-{
- auto layer_data = GetLayerDataPtr(get_dispatch_key(commandBuffer), layer_data_map);
- if (!wrap_handles) return layer_data->device_dispatch_table.CmdClearDepthStencilImage(commandBuffer, image, imageLayout, pDepthStencil, rangeCount, pRanges);
- {
- image = layer_data->Unwrap(image);
- }
- layer_data->device_dispatch_table.CmdClearDepthStencilImage(commandBuffer, image, imageLayout, pDepthStencil, rangeCount, pRanges);
-
-}
-
-void DispatchCmdClearAttachments(
- VkCommandBuffer commandBuffer,
- uint32_t attachmentCount,
- const VkClearAttachment* pAttachments,
- uint32_t rectCount,
- const VkClearRect* pRects)
-{
- auto layer_data = GetLayerDataPtr(get_dispatch_key(commandBuffer), layer_data_map);
- layer_data->device_dispatch_table.CmdClearAttachments(commandBuffer, attachmentCount, pAttachments, rectCount, pRects);
-
-}
-
-void DispatchCmdResolveImage(
- VkCommandBuffer commandBuffer,
- VkImage srcImage,
- VkImageLayout srcImageLayout,
- VkImage dstImage,
- VkImageLayout dstImageLayout,
- uint32_t regionCount,
- const VkImageResolve* pRegions)
-{
- auto layer_data = GetLayerDataPtr(get_dispatch_key(commandBuffer), layer_data_map);
- if (!wrap_handles) return layer_data->device_dispatch_table.CmdResolveImage(commandBuffer, srcImage, srcImageLayout, dstImage, dstImageLayout, regionCount, pRegions);
- {
- srcImage = layer_data->Unwrap(srcImage);
- dstImage = layer_data->Unwrap(dstImage);
- }
- layer_data->device_dispatch_table.CmdResolveImage(commandBuffer, srcImage, srcImageLayout, dstImage, dstImageLayout, regionCount, pRegions);
-
-}
-
-void DispatchCmdSetEvent(
- VkCommandBuffer commandBuffer,
- VkEvent event,
- VkPipelineStageFlags stageMask)
-{
- auto layer_data = GetLayerDataPtr(get_dispatch_key(commandBuffer), layer_data_map);
- if (!wrap_handles) return layer_data->device_dispatch_table.CmdSetEvent(commandBuffer, event, stageMask);
- {
- event = layer_data->Unwrap(event);
- }
- layer_data->device_dispatch_table.CmdSetEvent(commandBuffer, event, stageMask);
-
-}
-
-void DispatchCmdResetEvent(
- VkCommandBuffer commandBuffer,
- VkEvent event,
- VkPipelineStageFlags stageMask)
-{
- auto layer_data = GetLayerDataPtr(get_dispatch_key(commandBuffer), layer_data_map);
- if (!wrap_handles) return layer_data->device_dispatch_table.CmdResetEvent(commandBuffer, event, stageMask);
- {
- event = layer_data->Unwrap(event);
- }
- layer_data->device_dispatch_table.CmdResetEvent(commandBuffer, event, stageMask);
-
-}
-
-void DispatchCmdWaitEvents(
- VkCommandBuffer commandBuffer,
- uint32_t eventCount,
- const VkEvent* pEvents,
- VkPipelineStageFlags srcStageMask,
- VkPipelineStageFlags dstStageMask,
- uint32_t memoryBarrierCount,
- const VkMemoryBarrier* pMemoryBarriers,
- uint32_t bufferMemoryBarrierCount,
- const VkBufferMemoryBarrier* pBufferMemoryBarriers,
- uint32_t imageMemoryBarrierCount,
- const VkImageMemoryBarrier* pImageMemoryBarriers)
-{
- auto layer_data = GetLayerDataPtr(get_dispatch_key(commandBuffer), layer_data_map);
- if (!wrap_handles) return layer_data->device_dispatch_table.CmdWaitEvents(commandBuffer, eventCount, pEvents, srcStageMask, dstStageMask, memoryBarrierCount, pMemoryBarriers, bufferMemoryBarrierCount, pBufferMemoryBarriers, imageMemoryBarrierCount, pImageMemoryBarriers);
- VkEvent *local_pEvents = NULL;
- safe_VkBufferMemoryBarrier *local_pBufferMemoryBarriers = NULL;
- safe_VkImageMemoryBarrier *local_pImageMemoryBarriers = NULL;
- {
- if (pEvents) {
- local_pEvents = new VkEvent[eventCount];
- for (uint32_t index0 = 0; index0 < eventCount; ++index0) {
- local_pEvents[index0] = layer_data->Unwrap(pEvents[index0]);
- }
- }
- if (pBufferMemoryBarriers) {
- local_pBufferMemoryBarriers = new safe_VkBufferMemoryBarrier[bufferMemoryBarrierCount];
- for (uint32_t index0 = 0; index0 < bufferMemoryBarrierCount; ++index0) {
- local_pBufferMemoryBarriers[index0].initialize(&pBufferMemoryBarriers[index0]);
- if (pBufferMemoryBarriers[index0].buffer) {
- local_pBufferMemoryBarriers[index0].buffer = layer_data->Unwrap(pBufferMemoryBarriers[index0].buffer);
- }
- }
- }
- if (pImageMemoryBarriers) {
- local_pImageMemoryBarriers = new safe_VkImageMemoryBarrier[imageMemoryBarrierCount];
- for (uint32_t index0 = 0; index0 < imageMemoryBarrierCount; ++index0) {
- local_pImageMemoryBarriers[index0].initialize(&pImageMemoryBarriers[index0]);
- if (pImageMemoryBarriers[index0].image) {
- local_pImageMemoryBarriers[index0].image = layer_data->Unwrap(pImageMemoryBarriers[index0].image);
- }
- }
- }
- }
- layer_data->device_dispatch_table.CmdWaitEvents(commandBuffer, eventCount, (const VkEvent*)local_pEvents, srcStageMask, dstStageMask, memoryBarrierCount, pMemoryBarriers, bufferMemoryBarrierCount, (const VkBufferMemoryBarrier*)local_pBufferMemoryBarriers, imageMemoryBarrierCount, (const VkImageMemoryBarrier*)local_pImageMemoryBarriers);
- if (local_pEvents)
- delete[] local_pEvents;
- if (local_pBufferMemoryBarriers) {
- delete[] local_pBufferMemoryBarriers;
- }
- if (local_pImageMemoryBarriers) {
- delete[] local_pImageMemoryBarriers;
- }
-}
-
-void DispatchCmdPipelineBarrier(
- VkCommandBuffer commandBuffer,
- VkPipelineStageFlags srcStageMask,
- VkPipelineStageFlags dstStageMask,
- VkDependencyFlags dependencyFlags,
- uint32_t memoryBarrierCount,
- const VkMemoryBarrier* pMemoryBarriers,
- uint32_t bufferMemoryBarrierCount,
- const VkBufferMemoryBarrier* pBufferMemoryBarriers,
- uint32_t imageMemoryBarrierCount,
- const VkImageMemoryBarrier* pImageMemoryBarriers)
-{
- auto layer_data = GetLayerDataPtr(get_dispatch_key(commandBuffer), layer_data_map);
- if (!wrap_handles) return layer_data->device_dispatch_table.CmdPipelineBarrier(commandBuffer, srcStageMask, dstStageMask, dependencyFlags, memoryBarrierCount, pMemoryBarriers, bufferMemoryBarrierCount, pBufferMemoryBarriers, imageMemoryBarrierCount, pImageMemoryBarriers);
- safe_VkBufferMemoryBarrier *local_pBufferMemoryBarriers = NULL;
- safe_VkImageMemoryBarrier *local_pImageMemoryBarriers = NULL;
- {
- if (pBufferMemoryBarriers) {
- local_pBufferMemoryBarriers = new safe_VkBufferMemoryBarrier[bufferMemoryBarrierCount];
- for (uint32_t index0 = 0; index0 < bufferMemoryBarrierCount; ++index0) {
- local_pBufferMemoryBarriers[index0].initialize(&pBufferMemoryBarriers[index0]);
- if (pBufferMemoryBarriers[index0].buffer) {
- local_pBufferMemoryBarriers[index0].buffer = layer_data->Unwrap(pBufferMemoryBarriers[index0].buffer);
- }
- }
- }
- if (pImageMemoryBarriers) {
- local_pImageMemoryBarriers = new safe_VkImageMemoryBarrier[imageMemoryBarrierCount];
- for (uint32_t index0 = 0; index0 < imageMemoryBarrierCount; ++index0) {
- local_pImageMemoryBarriers[index0].initialize(&pImageMemoryBarriers[index0]);
- if (pImageMemoryBarriers[index0].image) {
- local_pImageMemoryBarriers[index0].image = layer_data->Unwrap(pImageMemoryBarriers[index0].image);
- }
- }
- }
- }
- layer_data->device_dispatch_table.CmdPipelineBarrier(commandBuffer, srcStageMask, dstStageMask, dependencyFlags, memoryBarrierCount, pMemoryBarriers, bufferMemoryBarrierCount, (const VkBufferMemoryBarrier*)local_pBufferMemoryBarriers, imageMemoryBarrierCount, (const VkImageMemoryBarrier*)local_pImageMemoryBarriers);
- if (local_pBufferMemoryBarriers) {
- delete[] local_pBufferMemoryBarriers;
- }
- if (local_pImageMemoryBarriers) {
- delete[] local_pImageMemoryBarriers;
- }
-}
-
-void DispatchCmdBeginQuery(
- VkCommandBuffer commandBuffer,
- VkQueryPool queryPool,
- uint32_t query,
- VkQueryControlFlags flags)
-{
- auto layer_data = GetLayerDataPtr(get_dispatch_key(commandBuffer), layer_data_map);
- if (!wrap_handles) return layer_data->device_dispatch_table.CmdBeginQuery(commandBuffer, queryPool, query, flags);
- {
- queryPool = layer_data->Unwrap(queryPool);
- }
- layer_data->device_dispatch_table.CmdBeginQuery(commandBuffer, queryPool, query, flags);
-
-}
-
-void DispatchCmdEndQuery(
- VkCommandBuffer commandBuffer,
- VkQueryPool queryPool,
- uint32_t query)
-{
- auto layer_data = GetLayerDataPtr(get_dispatch_key(commandBuffer), layer_data_map);
- if (!wrap_handles) return layer_data->device_dispatch_table.CmdEndQuery(commandBuffer, queryPool, query);
- {
- queryPool = layer_data->Unwrap(queryPool);
- }
- layer_data->device_dispatch_table.CmdEndQuery(commandBuffer, queryPool, query);
-
-}
-
-void DispatchCmdResetQueryPool(
- VkCommandBuffer commandBuffer,
- VkQueryPool queryPool,
- uint32_t firstQuery,
- uint32_t queryCount)
-{
- auto layer_data = GetLayerDataPtr(get_dispatch_key(commandBuffer), layer_data_map);
- if (!wrap_handles) return layer_data->device_dispatch_table.CmdResetQueryPool(commandBuffer, queryPool, firstQuery, queryCount);
- {
- queryPool = layer_data->Unwrap(queryPool);
- }
- layer_data->device_dispatch_table.CmdResetQueryPool(commandBuffer, queryPool, firstQuery, queryCount);
-
-}
-
-void DispatchCmdWriteTimestamp(
- VkCommandBuffer commandBuffer,
- VkPipelineStageFlagBits pipelineStage,
- VkQueryPool queryPool,
- uint32_t query)
-{
- auto layer_data = GetLayerDataPtr(get_dispatch_key(commandBuffer), layer_data_map);
- if (!wrap_handles) return layer_data->device_dispatch_table.CmdWriteTimestamp(commandBuffer, pipelineStage, queryPool, query);
- {
- queryPool = layer_data->Unwrap(queryPool);
- }
- layer_data->device_dispatch_table.CmdWriteTimestamp(commandBuffer, pipelineStage, queryPool, query);
-
-}
-
-void DispatchCmdCopyQueryPoolResults(
- VkCommandBuffer commandBuffer,
- VkQueryPool queryPool,
- uint32_t firstQuery,
- uint32_t queryCount,
- VkBuffer dstBuffer,
- VkDeviceSize dstOffset,
- VkDeviceSize stride,
- VkQueryResultFlags flags)
-{
- auto layer_data = GetLayerDataPtr(get_dispatch_key(commandBuffer), layer_data_map);
- if (!wrap_handles) return layer_data->device_dispatch_table.CmdCopyQueryPoolResults(commandBuffer, queryPool, firstQuery, queryCount, dstBuffer, dstOffset, stride, flags);
- {
- queryPool = layer_data->Unwrap(queryPool);
- dstBuffer = layer_data->Unwrap(dstBuffer);
- }
- layer_data->device_dispatch_table.CmdCopyQueryPoolResults(commandBuffer, queryPool, firstQuery, queryCount, dstBuffer, dstOffset, stride, flags);
-
-}
-
-void DispatchCmdPushConstants(
- VkCommandBuffer commandBuffer,
- VkPipelineLayout layout,
- VkShaderStageFlags stageFlags,
- uint32_t offset,
- uint32_t size,
- const void* pValues)
-{
- auto layer_data = GetLayerDataPtr(get_dispatch_key(commandBuffer), layer_data_map);
- if (!wrap_handles) return layer_data->device_dispatch_table.CmdPushConstants(commandBuffer, layout, stageFlags, offset, size, pValues);
- {
- layout = layer_data->Unwrap(layout);
- }
- layer_data->device_dispatch_table.CmdPushConstants(commandBuffer, layout, stageFlags, offset, size, pValues);
-
-}
-
-void DispatchCmdBeginRenderPass(
- VkCommandBuffer commandBuffer,
- const VkRenderPassBeginInfo* pRenderPassBegin,
- VkSubpassContents contents)
-{
- auto layer_data = GetLayerDataPtr(get_dispatch_key(commandBuffer), layer_data_map);
- if (!wrap_handles) return layer_data->device_dispatch_table.CmdBeginRenderPass(commandBuffer, pRenderPassBegin, contents);
- safe_VkRenderPassBeginInfo *local_pRenderPassBegin = NULL;
- {
- if (pRenderPassBegin) {
- local_pRenderPassBegin = new safe_VkRenderPassBeginInfo(pRenderPassBegin);
- if (pRenderPassBegin->renderPass) {
- local_pRenderPassBegin->renderPass = layer_data->Unwrap(pRenderPassBegin->renderPass);
- }
- if (pRenderPassBegin->framebuffer) {
- local_pRenderPassBegin->framebuffer = layer_data->Unwrap(pRenderPassBegin->framebuffer);
- }
- WrapPnextChainHandles(layer_data, local_pRenderPassBegin->pNext);
- }
- }
- layer_data->device_dispatch_table.CmdBeginRenderPass(commandBuffer, (const VkRenderPassBeginInfo*)local_pRenderPassBegin, contents);
- if (local_pRenderPassBegin) {
- delete local_pRenderPassBegin;
- }
-}
-
-void DispatchCmdNextSubpass(
- VkCommandBuffer commandBuffer,
- VkSubpassContents contents)
-{
- auto layer_data = GetLayerDataPtr(get_dispatch_key(commandBuffer), layer_data_map);
- layer_data->device_dispatch_table.CmdNextSubpass(commandBuffer, contents);
-
-}
-
-void DispatchCmdEndRenderPass(
- VkCommandBuffer commandBuffer)
-{
- auto layer_data = GetLayerDataPtr(get_dispatch_key(commandBuffer), layer_data_map);
- layer_data->device_dispatch_table.CmdEndRenderPass(commandBuffer);
-
-}
-
-void DispatchCmdExecuteCommands(
- VkCommandBuffer commandBuffer,
- uint32_t commandBufferCount,
- const VkCommandBuffer* pCommandBuffers)
-{
- auto layer_data = GetLayerDataPtr(get_dispatch_key(commandBuffer), layer_data_map);
- layer_data->device_dispatch_table.CmdExecuteCommands(commandBuffer, commandBufferCount, pCommandBuffers);
-
-}
-
-// Skip vkEnumerateInstanceVersion dispatch, manually generated
-
-VkResult DispatchBindBufferMemory2(
- VkDevice device,
- uint32_t bindInfoCount,
- const VkBindBufferMemoryInfo* pBindInfos)
-{
- auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
- if (!wrap_handles) return layer_data->device_dispatch_table.BindBufferMemory2(device, bindInfoCount, pBindInfos);
- safe_VkBindBufferMemoryInfo *local_pBindInfos = NULL;
- {
- if (pBindInfos) {
- local_pBindInfos = new safe_VkBindBufferMemoryInfo[bindInfoCount];
- for (uint32_t index0 = 0; index0 < bindInfoCount; ++index0) {
- local_pBindInfos[index0].initialize(&pBindInfos[index0]);
- if (pBindInfos[index0].buffer) {
- local_pBindInfos[index0].buffer = layer_data->Unwrap(pBindInfos[index0].buffer);
- }
- if (pBindInfos[index0].memory) {
- local_pBindInfos[index0].memory = layer_data->Unwrap(pBindInfos[index0].memory);
- }
- }
- }
- }
- VkResult result = layer_data->device_dispatch_table.BindBufferMemory2(device, bindInfoCount, (const VkBindBufferMemoryInfo*)local_pBindInfos);
- if (local_pBindInfos) {
- delete[] local_pBindInfos;
- }
- return result;
-}
-
-VkResult DispatchBindImageMemory2(
- VkDevice device,
- uint32_t bindInfoCount,
- const VkBindImageMemoryInfo* pBindInfos)
-{
- auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
- if (!wrap_handles) return layer_data->device_dispatch_table.BindImageMemory2(device, bindInfoCount, pBindInfos);
- safe_VkBindImageMemoryInfo *local_pBindInfos = NULL;
- {
- if (pBindInfos) {
- local_pBindInfos = new safe_VkBindImageMemoryInfo[bindInfoCount];
- for (uint32_t index0 = 0; index0 < bindInfoCount; ++index0) {
- local_pBindInfos[index0].initialize(&pBindInfos[index0]);
- WrapPnextChainHandles(layer_data, local_pBindInfos[index0].pNext);
- if (pBindInfos[index0].image) {
- local_pBindInfos[index0].image = layer_data->Unwrap(pBindInfos[index0].image);
- }
- if (pBindInfos[index0].memory) {
- local_pBindInfos[index0].memory = layer_data->Unwrap(pBindInfos[index0].memory);
- }
- }
- }
- }
- VkResult result = layer_data->device_dispatch_table.BindImageMemory2(device, bindInfoCount, (const VkBindImageMemoryInfo*)local_pBindInfos);
- if (local_pBindInfos) {
- delete[] local_pBindInfos;
- }
- return result;
-}
-
-void DispatchGetDeviceGroupPeerMemoryFeatures(
- VkDevice device,
- uint32_t heapIndex,
- uint32_t localDeviceIndex,
- uint32_t remoteDeviceIndex,
- VkPeerMemoryFeatureFlags* pPeerMemoryFeatures)
-{
- auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
- layer_data->device_dispatch_table.GetDeviceGroupPeerMemoryFeatures(device, heapIndex, localDeviceIndex, remoteDeviceIndex, pPeerMemoryFeatures);
-
-}
-
-void DispatchCmdSetDeviceMask(
- VkCommandBuffer commandBuffer,
- uint32_t deviceMask)
-{
- auto layer_data = GetLayerDataPtr(get_dispatch_key(commandBuffer), layer_data_map);
- layer_data->device_dispatch_table.CmdSetDeviceMask(commandBuffer, deviceMask);
-
-}
-
-void DispatchCmdDispatchBase(
- VkCommandBuffer commandBuffer,
- uint32_t baseGroupX,
- uint32_t baseGroupY,
- uint32_t baseGroupZ,
- uint32_t groupCountX,
- uint32_t groupCountY,
- uint32_t groupCountZ)
-{
- auto layer_data = GetLayerDataPtr(get_dispatch_key(commandBuffer), layer_data_map);
- layer_data->device_dispatch_table.CmdDispatchBase(commandBuffer, baseGroupX, baseGroupY, baseGroupZ, groupCountX, groupCountY, groupCountZ);
-
-}
-
-VkResult DispatchEnumeratePhysicalDeviceGroups(
- VkInstance instance,
- uint32_t* pPhysicalDeviceGroupCount,
- VkPhysicalDeviceGroupProperties* pPhysicalDeviceGroupProperties)
-{
- auto layer_data = GetLayerDataPtr(get_dispatch_key(instance), layer_data_map);
- VkResult result = layer_data->instance_dispatch_table.EnumeratePhysicalDeviceGroups(instance, pPhysicalDeviceGroupCount, pPhysicalDeviceGroupProperties);
-
- return result;
-}
-
-void DispatchGetImageMemoryRequirements2(
- VkDevice device,
- const VkImageMemoryRequirementsInfo2* pInfo,
- VkMemoryRequirements2* pMemoryRequirements)
-{
- auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
- if (!wrap_handles) return layer_data->device_dispatch_table.GetImageMemoryRequirements2(device, pInfo, pMemoryRequirements);
- safe_VkImageMemoryRequirementsInfo2 *local_pInfo = NULL;
- {
- if (pInfo) {
- local_pInfo = new safe_VkImageMemoryRequirementsInfo2(pInfo);
- if (pInfo->image) {
- local_pInfo->image = layer_data->Unwrap(pInfo->image);
- }
- }
- }
- layer_data->device_dispatch_table.GetImageMemoryRequirements2(device, (const VkImageMemoryRequirementsInfo2*)local_pInfo, pMemoryRequirements);
- if (local_pInfo) {
- delete local_pInfo;
- }
-}
-
-void DispatchGetBufferMemoryRequirements2(
- VkDevice device,
- const VkBufferMemoryRequirementsInfo2* pInfo,
- VkMemoryRequirements2* pMemoryRequirements)
-{
- auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
- if (!wrap_handles) return layer_data->device_dispatch_table.GetBufferMemoryRequirements2(device, pInfo, pMemoryRequirements);
- safe_VkBufferMemoryRequirementsInfo2 *local_pInfo = NULL;
- {
- if (pInfo) {
- local_pInfo = new safe_VkBufferMemoryRequirementsInfo2(pInfo);
- if (pInfo->buffer) {
- local_pInfo->buffer = layer_data->Unwrap(pInfo->buffer);
- }
- }
- }
- layer_data->device_dispatch_table.GetBufferMemoryRequirements2(device, (const VkBufferMemoryRequirementsInfo2*)local_pInfo, pMemoryRequirements);
- if (local_pInfo) {
- delete local_pInfo;
- }
-}
-
-void DispatchGetImageSparseMemoryRequirements2(
- VkDevice device,
- const VkImageSparseMemoryRequirementsInfo2* pInfo,
- uint32_t* pSparseMemoryRequirementCount,
- VkSparseImageMemoryRequirements2* pSparseMemoryRequirements)
-{
- auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
- if (!wrap_handles) return layer_data->device_dispatch_table.GetImageSparseMemoryRequirements2(device, pInfo, pSparseMemoryRequirementCount, pSparseMemoryRequirements);
- safe_VkImageSparseMemoryRequirementsInfo2 *local_pInfo = NULL;
- {
- if (pInfo) {
- local_pInfo = new safe_VkImageSparseMemoryRequirementsInfo2(pInfo);
- if (pInfo->image) {
- local_pInfo->image = layer_data->Unwrap(pInfo->image);
- }
- }
- }
- layer_data->device_dispatch_table.GetImageSparseMemoryRequirements2(device, (const VkImageSparseMemoryRequirementsInfo2*)local_pInfo, pSparseMemoryRequirementCount, pSparseMemoryRequirements);
- if (local_pInfo) {
- delete local_pInfo;
- }
-}
-
-void DispatchGetPhysicalDeviceFeatures2(
- VkPhysicalDevice physicalDevice,
- VkPhysicalDeviceFeatures2* pFeatures)
-{
- auto layer_data = GetLayerDataPtr(get_dispatch_key(physicalDevice), layer_data_map);
- layer_data->instance_dispatch_table.GetPhysicalDeviceFeatures2(physicalDevice, pFeatures);
-
-}
-
-void DispatchGetPhysicalDeviceProperties2(
- VkPhysicalDevice physicalDevice,
- VkPhysicalDeviceProperties2* pProperties)
-{
- auto layer_data = GetLayerDataPtr(get_dispatch_key(physicalDevice), layer_data_map);
- layer_data->instance_dispatch_table.GetPhysicalDeviceProperties2(physicalDevice, pProperties);
-
-}
-
-void DispatchGetPhysicalDeviceFormatProperties2(
- VkPhysicalDevice physicalDevice,
- VkFormat format,
- VkFormatProperties2* pFormatProperties)
-{
- auto layer_data = GetLayerDataPtr(get_dispatch_key(physicalDevice), layer_data_map);
- layer_data->instance_dispatch_table.GetPhysicalDeviceFormatProperties2(physicalDevice, format, pFormatProperties);
-
-}
-
-VkResult DispatchGetPhysicalDeviceImageFormatProperties2(
- VkPhysicalDevice physicalDevice,
- const VkPhysicalDeviceImageFormatInfo2* pImageFormatInfo,
- VkImageFormatProperties2* pImageFormatProperties)
-{
- auto layer_data = GetLayerDataPtr(get_dispatch_key(physicalDevice), layer_data_map);
- if (!wrap_handles) return layer_data->instance_dispatch_table.GetPhysicalDeviceImageFormatProperties2(physicalDevice, pImageFormatInfo, pImageFormatProperties);
- safe_VkPhysicalDeviceImageFormatInfo2 *local_pImageFormatInfo = NULL;
- {
- if (pImageFormatInfo) {
- local_pImageFormatInfo = new safe_VkPhysicalDeviceImageFormatInfo2(pImageFormatInfo);
- WrapPnextChainHandles(layer_data, local_pImageFormatInfo->pNext);
- }
- }
- VkResult result = layer_data->instance_dispatch_table.GetPhysicalDeviceImageFormatProperties2(physicalDevice, (const VkPhysicalDeviceImageFormatInfo2*)local_pImageFormatInfo, pImageFormatProperties);
- if (local_pImageFormatInfo) {
- delete local_pImageFormatInfo;
- }
- return result;
-}
-
-void DispatchGetPhysicalDeviceQueueFamilyProperties2(
- VkPhysicalDevice physicalDevice,
- uint32_t* pQueueFamilyPropertyCount,
- VkQueueFamilyProperties2* pQueueFamilyProperties)
-{
- auto layer_data = GetLayerDataPtr(get_dispatch_key(physicalDevice), layer_data_map);
- layer_data->instance_dispatch_table.GetPhysicalDeviceQueueFamilyProperties2(physicalDevice, pQueueFamilyPropertyCount, pQueueFamilyProperties);
-
-}
-
-void DispatchGetPhysicalDeviceMemoryProperties2(
- VkPhysicalDevice physicalDevice,
- VkPhysicalDeviceMemoryProperties2* pMemoryProperties)
-{
- auto layer_data = GetLayerDataPtr(get_dispatch_key(physicalDevice), layer_data_map);
- layer_data->instance_dispatch_table.GetPhysicalDeviceMemoryProperties2(physicalDevice, pMemoryProperties);
-
-}
-
-void DispatchGetPhysicalDeviceSparseImageFormatProperties2(
- VkPhysicalDevice physicalDevice,
- const VkPhysicalDeviceSparseImageFormatInfo2* pFormatInfo,
- uint32_t* pPropertyCount,
- VkSparseImageFormatProperties2* pProperties)
-{
- auto layer_data = GetLayerDataPtr(get_dispatch_key(physicalDevice), layer_data_map);
- layer_data->instance_dispatch_table.GetPhysicalDeviceSparseImageFormatProperties2(physicalDevice, pFormatInfo, pPropertyCount, pProperties);
-
-}
-
-void DispatchTrimCommandPool(
- VkDevice device,
- VkCommandPool commandPool,
- VkCommandPoolTrimFlags flags)
-{
- auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
- if (!wrap_handles) return layer_data->device_dispatch_table.TrimCommandPool(device, commandPool, flags);
- {
- commandPool = layer_data->Unwrap(commandPool);
- }
- layer_data->device_dispatch_table.TrimCommandPool(device, commandPool, flags);
-
-}
-
-void DispatchGetDeviceQueue2(
- VkDevice device,
- const VkDeviceQueueInfo2* pQueueInfo,
- VkQueue* pQueue)
-{
- auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
- layer_data->device_dispatch_table.GetDeviceQueue2(device, pQueueInfo, pQueue);
-
-}
-
-VkResult DispatchCreateSamplerYcbcrConversion(
- VkDevice device,
- const VkSamplerYcbcrConversionCreateInfo* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkSamplerYcbcrConversion* pYcbcrConversion)
-{
- auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
- if (!wrap_handles) return layer_data->device_dispatch_table.CreateSamplerYcbcrConversion(device, pCreateInfo, pAllocator, pYcbcrConversion);
- safe_VkSamplerYcbcrConversionCreateInfo *local_pCreateInfo = NULL;
- {
- if (pCreateInfo) {
- local_pCreateInfo = new safe_VkSamplerYcbcrConversionCreateInfo(pCreateInfo);
- WrapPnextChainHandles(layer_data, local_pCreateInfo->pNext);
- }
- }
- VkResult result = layer_data->device_dispatch_table.CreateSamplerYcbcrConversion(device, (const VkSamplerYcbcrConversionCreateInfo*)local_pCreateInfo, pAllocator, pYcbcrConversion);
- if (local_pCreateInfo) {
- delete local_pCreateInfo;
- }
- if (VK_SUCCESS == result) {
- *pYcbcrConversion = layer_data->WrapNew(*pYcbcrConversion);
- }
- return result;
-}
-
-void DispatchDestroySamplerYcbcrConversion(
- VkDevice device,
- VkSamplerYcbcrConversion ycbcrConversion,
- const VkAllocationCallbacks* pAllocator)
-{
- auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
- if (!wrap_handles) return layer_data->device_dispatch_table.DestroySamplerYcbcrConversion(device, ycbcrConversion, pAllocator);
- uint64_t ycbcrConversion_id = reinterpret_cast<uint64_t &>(ycbcrConversion);
- auto iter = unique_id_mapping.pop(ycbcrConversion_id);
- if (iter != unique_id_mapping.end()) {
- ycbcrConversion = (VkSamplerYcbcrConversion)iter->second;
- } else {
- ycbcrConversion = (VkSamplerYcbcrConversion)0;
- }
- layer_data->device_dispatch_table.DestroySamplerYcbcrConversion(device, ycbcrConversion, pAllocator);
-
-}
-
-// Skip vkCreateDescriptorUpdateTemplate dispatch, manually generated
-
-// Skip vkDestroyDescriptorUpdateTemplate dispatch, manually generated
-
-// Skip vkUpdateDescriptorSetWithTemplate dispatch, manually generated
-
-void DispatchGetPhysicalDeviceExternalBufferProperties(
- VkPhysicalDevice physicalDevice,
- const VkPhysicalDeviceExternalBufferInfo* pExternalBufferInfo,
- VkExternalBufferProperties* pExternalBufferProperties)
-{
- auto layer_data = GetLayerDataPtr(get_dispatch_key(physicalDevice), layer_data_map);
- layer_data->instance_dispatch_table.GetPhysicalDeviceExternalBufferProperties(physicalDevice, pExternalBufferInfo, pExternalBufferProperties);
-
-}
-
-void DispatchGetPhysicalDeviceExternalFenceProperties(
- VkPhysicalDevice physicalDevice,
- const VkPhysicalDeviceExternalFenceInfo* pExternalFenceInfo,
- VkExternalFenceProperties* pExternalFenceProperties)
-{
- auto layer_data = GetLayerDataPtr(get_dispatch_key(physicalDevice), layer_data_map);
- layer_data->instance_dispatch_table.GetPhysicalDeviceExternalFenceProperties(physicalDevice, pExternalFenceInfo, pExternalFenceProperties);
-
-}
-
-void DispatchGetPhysicalDeviceExternalSemaphoreProperties(
- VkPhysicalDevice physicalDevice,
- const VkPhysicalDeviceExternalSemaphoreInfo* pExternalSemaphoreInfo,
- VkExternalSemaphoreProperties* pExternalSemaphoreProperties)
-{
- auto layer_data = GetLayerDataPtr(get_dispatch_key(physicalDevice), layer_data_map);
- layer_data->instance_dispatch_table.GetPhysicalDeviceExternalSemaphoreProperties(physicalDevice, pExternalSemaphoreInfo, pExternalSemaphoreProperties);
-
-}
-
-void DispatchGetDescriptorSetLayoutSupport(
- VkDevice device,
- const VkDescriptorSetLayoutCreateInfo* pCreateInfo,
- VkDescriptorSetLayoutSupport* pSupport)
-{
- auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
- if (!wrap_handles) return layer_data->device_dispatch_table.GetDescriptorSetLayoutSupport(device, pCreateInfo, pSupport);
- safe_VkDescriptorSetLayoutCreateInfo *local_pCreateInfo = NULL;
- {
- if (pCreateInfo) {
- local_pCreateInfo = new safe_VkDescriptorSetLayoutCreateInfo(pCreateInfo);
- if (local_pCreateInfo->pBindings) {
- for (uint32_t index1 = 0; index1 < local_pCreateInfo->bindingCount; ++index1) {
- if (local_pCreateInfo->pBindings[index1].pImmutableSamplers) {
- for (uint32_t index2 = 0; index2 < local_pCreateInfo->pBindings[index1].descriptorCount; ++index2) {
- local_pCreateInfo->pBindings[index1].pImmutableSamplers[index2] = layer_data->Unwrap(local_pCreateInfo->pBindings[index1].pImmutableSamplers[index2]);
- }
- }
- }
- }
- }
- }
- layer_data->device_dispatch_table.GetDescriptorSetLayoutSupport(device, (const VkDescriptorSetLayoutCreateInfo*)local_pCreateInfo, pSupport);
- if (local_pCreateInfo) {
- delete local_pCreateInfo;
- }
-}
-
-void DispatchDestroySurfaceKHR(
- VkInstance instance,
- VkSurfaceKHR surface,
- const VkAllocationCallbacks* pAllocator)
-{
- auto layer_data = GetLayerDataPtr(get_dispatch_key(instance), layer_data_map);
- if (!wrap_handles) return layer_data->instance_dispatch_table.DestroySurfaceKHR(instance, surface, pAllocator);
- uint64_t surface_id = reinterpret_cast<uint64_t &>(surface);
- auto iter = unique_id_mapping.pop(surface_id);
- if (iter != unique_id_mapping.end()) {
- surface = (VkSurfaceKHR)iter->second;
- } else {
- surface = (VkSurfaceKHR)0;
- }
- layer_data->instance_dispatch_table.DestroySurfaceKHR(instance, surface, pAllocator);
-
-}
-
-VkResult DispatchGetPhysicalDeviceSurfaceSupportKHR(
- VkPhysicalDevice physicalDevice,
- uint32_t queueFamilyIndex,
- VkSurfaceKHR surface,
- VkBool32* pSupported)
-{
- auto layer_data = GetLayerDataPtr(get_dispatch_key(physicalDevice), layer_data_map);
- if (!wrap_handles) return layer_data->instance_dispatch_table.GetPhysicalDeviceSurfaceSupportKHR(physicalDevice, queueFamilyIndex, surface, pSupported);
- {
- surface = layer_data->Unwrap(surface);
- }
- VkResult result = layer_data->instance_dispatch_table.GetPhysicalDeviceSurfaceSupportKHR(physicalDevice, queueFamilyIndex, surface, pSupported);
-
- return result;
-}
-
-VkResult DispatchGetPhysicalDeviceSurfaceCapabilitiesKHR(
- VkPhysicalDevice physicalDevice,
- VkSurfaceKHR surface,
- VkSurfaceCapabilitiesKHR* pSurfaceCapabilities)
-{
- auto layer_data = GetLayerDataPtr(get_dispatch_key(physicalDevice), layer_data_map);
- if (!wrap_handles) return layer_data->instance_dispatch_table.GetPhysicalDeviceSurfaceCapabilitiesKHR(physicalDevice, surface, pSurfaceCapabilities);
- {
- surface = layer_data->Unwrap(surface);
- }
- VkResult result = layer_data->instance_dispatch_table.GetPhysicalDeviceSurfaceCapabilitiesKHR(physicalDevice, surface, pSurfaceCapabilities);
-
- return result;
-}
-
-VkResult DispatchGetPhysicalDeviceSurfaceFormatsKHR(
- VkPhysicalDevice physicalDevice,
- VkSurfaceKHR surface,
- uint32_t* pSurfaceFormatCount,
- VkSurfaceFormatKHR* pSurfaceFormats)
-{
- auto layer_data = GetLayerDataPtr(get_dispatch_key(physicalDevice), layer_data_map);
- if (!wrap_handles) return layer_data->instance_dispatch_table.GetPhysicalDeviceSurfaceFormatsKHR(physicalDevice, surface, pSurfaceFormatCount, pSurfaceFormats);
- {
- surface = layer_data->Unwrap(surface);
- }
- VkResult result = layer_data->instance_dispatch_table.GetPhysicalDeviceSurfaceFormatsKHR(physicalDevice, surface, pSurfaceFormatCount, pSurfaceFormats);
-
- return result;
-}
-
-VkResult DispatchGetPhysicalDeviceSurfacePresentModesKHR(
- VkPhysicalDevice physicalDevice,
- VkSurfaceKHR surface,
- uint32_t* pPresentModeCount,
- VkPresentModeKHR* pPresentModes)
-{
- auto layer_data = GetLayerDataPtr(get_dispatch_key(physicalDevice), layer_data_map);
- if (!wrap_handles) return layer_data->instance_dispatch_table.GetPhysicalDeviceSurfacePresentModesKHR(physicalDevice, surface, pPresentModeCount, pPresentModes);
- {
- surface = layer_data->Unwrap(surface);
- }
- VkResult result = layer_data->instance_dispatch_table.GetPhysicalDeviceSurfacePresentModesKHR(physicalDevice, surface, pPresentModeCount, pPresentModes);
-
- return result;
-}
-
-// Skip vkCreateSwapchainKHR dispatch, manually generated
-
-// Skip vkDestroySwapchainKHR dispatch, manually generated
-
-// Skip vkGetSwapchainImagesKHR dispatch, manually generated
-
-VkResult DispatchAcquireNextImageKHR(
- VkDevice device,
- VkSwapchainKHR swapchain,
- uint64_t timeout,
- VkSemaphore semaphore,
- VkFence fence,
- uint32_t* pImageIndex)
-{
- auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
- if (!wrap_handles) return layer_data->device_dispatch_table.AcquireNextImageKHR(device, swapchain, timeout, semaphore, fence, pImageIndex);
- {
- swapchain = layer_data->Unwrap(swapchain);
- semaphore = layer_data->Unwrap(semaphore);
- fence = layer_data->Unwrap(fence);
- }
- VkResult result = layer_data->device_dispatch_table.AcquireNextImageKHR(device, swapchain, timeout, semaphore, fence, pImageIndex);
-
- return result;
-}
-
-// Skip vkQueuePresentKHR dispatch, manually generated
-
-VkResult DispatchGetDeviceGroupPresentCapabilitiesKHR(
- VkDevice device,
- VkDeviceGroupPresentCapabilitiesKHR* pDeviceGroupPresentCapabilities)
-{
- auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
- VkResult result = layer_data->device_dispatch_table.GetDeviceGroupPresentCapabilitiesKHR(device, pDeviceGroupPresentCapabilities);
-
- return result;
-}
-
-VkResult DispatchGetDeviceGroupSurfacePresentModesKHR(
- VkDevice device,
- VkSurfaceKHR surface,
- VkDeviceGroupPresentModeFlagsKHR* pModes)
-{
- auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
- if (!wrap_handles) return layer_data->device_dispatch_table.GetDeviceGroupSurfacePresentModesKHR(device, surface, pModes);
- {
- surface = layer_data->Unwrap(surface);
- }
- VkResult result = layer_data->device_dispatch_table.GetDeviceGroupSurfacePresentModesKHR(device, surface, pModes);
-
- return result;
-}
-
-VkResult DispatchGetPhysicalDevicePresentRectanglesKHR(
- VkPhysicalDevice physicalDevice,
- VkSurfaceKHR surface,
- uint32_t* pRectCount,
- VkRect2D* pRects)
-{
- auto layer_data = GetLayerDataPtr(get_dispatch_key(physicalDevice), layer_data_map);
- if (!wrap_handles) return layer_data->instance_dispatch_table.GetPhysicalDevicePresentRectanglesKHR(physicalDevice, surface, pRectCount, pRects);
- {
- surface = layer_data->Unwrap(surface);
- }
- VkResult result = layer_data->instance_dispatch_table.GetPhysicalDevicePresentRectanglesKHR(physicalDevice, surface, pRectCount, pRects);
-
- return result;
-}
-
-VkResult DispatchAcquireNextImage2KHR(
- VkDevice device,
- const VkAcquireNextImageInfoKHR* pAcquireInfo,
- uint32_t* pImageIndex)
-{
- auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
- if (!wrap_handles) return layer_data->device_dispatch_table.AcquireNextImage2KHR(device, pAcquireInfo, pImageIndex);
- safe_VkAcquireNextImageInfoKHR *local_pAcquireInfo = NULL;
- {
- if (pAcquireInfo) {
- local_pAcquireInfo = new safe_VkAcquireNextImageInfoKHR(pAcquireInfo);
- if (pAcquireInfo->swapchain) {
- local_pAcquireInfo->swapchain = layer_data->Unwrap(pAcquireInfo->swapchain);
- }
- if (pAcquireInfo->semaphore) {
- local_pAcquireInfo->semaphore = layer_data->Unwrap(pAcquireInfo->semaphore);
- }
- if (pAcquireInfo->fence) {
- local_pAcquireInfo->fence = layer_data->Unwrap(pAcquireInfo->fence);
- }
- }
- }
- VkResult result = layer_data->device_dispatch_table.AcquireNextImage2KHR(device, (const VkAcquireNextImageInfoKHR*)local_pAcquireInfo, pImageIndex);
- if (local_pAcquireInfo) {
- delete local_pAcquireInfo;
- }
- return result;
-}
-
-// Skip vkGetPhysicalDeviceDisplayPropertiesKHR dispatch, manually generated
-
-// Skip vkGetPhysicalDeviceDisplayPlanePropertiesKHR dispatch, manually generated
-
-// Skip vkGetDisplayPlaneSupportedDisplaysKHR dispatch, manually generated
-
-// Skip vkGetDisplayModePropertiesKHR dispatch, manually generated
-
-VkResult DispatchCreateDisplayModeKHR(
- VkPhysicalDevice physicalDevice,
- VkDisplayKHR display,
- const VkDisplayModeCreateInfoKHR* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkDisplayModeKHR* pMode)
-{
- auto layer_data = GetLayerDataPtr(get_dispatch_key(physicalDevice), layer_data_map);
- if (!wrap_handles) return layer_data->instance_dispatch_table.CreateDisplayModeKHR(physicalDevice, display, pCreateInfo, pAllocator, pMode);
- {
- display = layer_data->Unwrap(display);
- }
- VkResult result = layer_data->instance_dispatch_table.CreateDisplayModeKHR(physicalDevice, display, pCreateInfo, pAllocator, pMode);
- if (VK_SUCCESS == result) {
- *pMode = layer_data->WrapNew(*pMode);
- }
- return result;
-}
-
-VkResult DispatchGetDisplayPlaneCapabilitiesKHR(
- VkPhysicalDevice physicalDevice,
- VkDisplayModeKHR mode,
- uint32_t planeIndex,
- VkDisplayPlaneCapabilitiesKHR* pCapabilities)
-{
- auto layer_data = GetLayerDataPtr(get_dispatch_key(physicalDevice), layer_data_map);
- if (!wrap_handles) return layer_data->instance_dispatch_table.GetDisplayPlaneCapabilitiesKHR(physicalDevice, mode, planeIndex, pCapabilities);
- {
- mode = layer_data->Unwrap(mode);
- }
- VkResult result = layer_data->instance_dispatch_table.GetDisplayPlaneCapabilitiesKHR(physicalDevice, mode, planeIndex, pCapabilities);
-
- return result;
-}
-
-VkResult DispatchCreateDisplayPlaneSurfaceKHR(
- VkInstance instance,
- const VkDisplaySurfaceCreateInfoKHR* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkSurfaceKHR* pSurface)
-{
- auto layer_data = GetLayerDataPtr(get_dispatch_key(instance), layer_data_map);
- if (!wrap_handles) return layer_data->instance_dispatch_table.CreateDisplayPlaneSurfaceKHR(instance, pCreateInfo, pAllocator, pSurface);
- safe_VkDisplaySurfaceCreateInfoKHR *local_pCreateInfo = NULL;
- {
- if (pCreateInfo) {
- local_pCreateInfo = new safe_VkDisplaySurfaceCreateInfoKHR(pCreateInfo);
- if (pCreateInfo->displayMode) {
- local_pCreateInfo->displayMode = layer_data->Unwrap(pCreateInfo->displayMode);
- }
- }
- }
- VkResult result = layer_data->instance_dispatch_table.CreateDisplayPlaneSurfaceKHR(instance, (const VkDisplaySurfaceCreateInfoKHR*)local_pCreateInfo, pAllocator, pSurface);
- if (local_pCreateInfo) {
- delete local_pCreateInfo;
- }
- if (VK_SUCCESS == result) {
- *pSurface = layer_data->WrapNew(*pSurface);
- }
- return result;
-}
-
-// Skip vkCreateSharedSwapchainsKHR dispatch, manually generated
-
-#ifdef VK_USE_PLATFORM_XLIB_KHR
-
-VkResult DispatchCreateXlibSurfaceKHR(
- VkInstance instance,
- const VkXlibSurfaceCreateInfoKHR* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkSurfaceKHR* pSurface)
-{
- auto layer_data = GetLayerDataPtr(get_dispatch_key(instance), layer_data_map);
- if (!wrap_handles) return layer_data->instance_dispatch_table.CreateXlibSurfaceKHR(instance, pCreateInfo, pAllocator, pSurface);
- VkResult result = layer_data->instance_dispatch_table.CreateXlibSurfaceKHR(instance, pCreateInfo, pAllocator, pSurface);
- if (VK_SUCCESS == result) {
- *pSurface = layer_data->WrapNew(*pSurface);
- }
- return result;
-}
-#endif // VK_USE_PLATFORM_XLIB_KHR
-
-#ifdef VK_USE_PLATFORM_XLIB_KHR
-
-VkBool32 DispatchGetPhysicalDeviceXlibPresentationSupportKHR(
- VkPhysicalDevice physicalDevice,
- uint32_t queueFamilyIndex,
- Display* dpy,
- VisualID visualID)
-{
- auto layer_data = GetLayerDataPtr(get_dispatch_key(physicalDevice), layer_data_map);
- VkBool32 result = layer_data->instance_dispatch_table.GetPhysicalDeviceXlibPresentationSupportKHR(physicalDevice, queueFamilyIndex, dpy, visualID);
-
- return result;
-}
-#endif // VK_USE_PLATFORM_XLIB_KHR
-
-#ifdef VK_USE_PLATFORM_XCB_KHR
-
-VkResult DispatchCreateXcbSurfaceKHR(
- VkInstance instance,
- const VkXcbSurfaceCreateInfoKHR* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkSurfaceKHR* pSurface)
-{
- auto layer_data = GetLayerDataPtr(get_dispatch_key(instance), layer_data_map);
- if (!wrap_handles) return layer_data->instance_dispatch_table.CreateXcbSurfaceKHR(instance, pCreateInfo, pAllocator, pSurface);
- VkResult result = layer_data->instance_dispatch_table.CreateXcbSurfaceKHR(instance, pCreateInfo, pAllocator, pSurface);
- if (VK_SUCCESS == result) {
- *pSurface = layer_data->WrapNew(*pSurface);
- }
- return result;
-}
-#endif // VK_USE_PLATFORM_XCB_KHR
-
-#ifdef VK_USE_PLATFORM_XCB_KHR
-
-VkBool32 DispatchGetPhysicalDeviceXcbPresentationSupportKHR(
- VkPhysicalDevice physicalDevice,
- uint32_t queueFamilyIndex,
- xcb_connection_t* connection,
- xcb_visualid_t visual_id)
-{
- auto layer_data = GetLayerDataPtr(get_dispatch_key(physicalDevice), layer_data_map);
- VkBool32 result = layer_data->instance_dispatch_table.GetPhysicalDeviceXcbPresentationSupportKHR(physicalDevice, queueFamilyIndex, connection, visual_id);
-
- return result;
-}
-#endif // VK_USE_PLATFORM_XCB_KHR
-
-#ifdef VK_USE_PLATFORM_WAYLAND_KHR
-
-VkResult DispatchCreateWaylandSurfaceKHR(
- VkInstance instance,
- const VkWaylandSurfaceCreateInfoKHR* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkSurfaceKHR* pSurface)
-{
- auto layer_data = GetLayerDataPtr(get_dispatch_key(instance), layer_data_map);
- if (!wrap_handles) return layer_data->instance_dispatch_table.CreateWaylandSurfaceKHR(instance, pCreateInfo, pAllocator, pSurface);
- VkResult result = layer_data->instance_dispatch_table.CreateWaylandSurfaceKHR(instance, pCreateInfo, pAllocator, pSurface);
- if (VK_SUCCESS == result) {
- *pSurface = layer_data->WrapNew(*pSurface);
- }
- return result;
-}
-#endif // VK_USE_PLATFORM_WAYLAND_KHR
-
-#ifdef VK_USE_PLATFORM_WAYLAND_KHR
-
-VkBool32 DispatchGetPhysicalDeviceWaylandPresentationSupportKHR(
- VkPhysicalDevice physicalDevice,
- uint32_t queueFamilyIndex,
- struct wl_display* display)
-{
- auto layer_data = GetLayerDataPtr(get_dispatch_key(physicalDevice), layer_data_map);
- VkBool32 result = layer_data->instance_dispatch_table.GetPhysicalDeviceWaylandPresentationSupportKHR(physicalDevice, queueFamilyIndex, display);
-
- return result;
-}
-#endif // VK_USE_PLATFORM_WAYLAND_KHR
-
-#ifdef VK_USE_PLATFORM_ANDROID_KHR
-
-VkResult DispatchCreateAndroidSurfaceKHR(
- VkInstance instance,
- const VkAndroidSurfaceCreateInfoKHR* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkSurfaceKHR* pSurface)
-{
- auto layer_data = GetLayerDataPtr(get_dispatch_key(instance), layer_data_map);
- if (!wrap_handles) return layer_data->instance_dispatch_table.CreateAndroidSurfaceKHR(instance, pCreateInfo, pAllocator, pSurface);
- VkResult result = layer_data->instance_dispatch_table.CreateAndroidSurfaceKHR(instance, pCreateInfo, pAllocator, pSurface);
- if (VK_SUCCESS == result) {
- *pSurface = layer_data->WrapNew(*pSurface);
- }
- return result;
-}
-#endif // VK_USE_PLATFORM_ANDROID_KHR
-
-#ifdef VK_USE_PLATFORM_WIN32_KHR
-
-VkResult DispatchCreateWin32SurfaceKHR(
- VkInstance instance,
- const VkWin32SurfaceCreateInfoKHR* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkSurfaceKHR* pSurface)
-{
- auto layer_data = GetLayerDataPtr(get_dispatch_key(instance), layer_data_map);
- if (!wrap_handles) return layer_data->instance_dispatch_table.CreateWin32SurfaceKHR(instance, pCreateInfo, pAllocator, pSurface);
- VkResult result = layer_data->instance_dispatch_table.CreateWin32SurfaceKHR(instance, pCreateInfo, pAllocator, pSurface);
- if (VK_SUCCESS == result) {
- *pSurface = layer_data->WrapNew(*pSurface);
- }
- return result;
-}
-#endif // VK_USE_PLATFORM_WIN32_KHR
-
-#ifdef VK_USE_PLATFORM_WIN32_KHR
-
-VkBool32 DispatchGetPhysicalDeviceWin32PresentationSupportKHR(
- VkPhysicalDevice physicalDevice,
- uint32_t queueFamilyIndex)
-{
- auto layer_data = GetLayerDataPtr(get_dispatch_key(physicalDevice), layer_data_map);
- VkBool32 result = layer_data->instance_dispatch_table.GetPhysicalDeviceWin32PresentationSupportKHR(physicalDevice, queueFamilyIndex);
-
- return result;
-}
-#endif // VK_USE_PLATFORM_WIN32_KHR
-
-void DispatchGetPhysicalDeviceFeatures2KHR(
- VkPhysicalDevice physicalDevice,
- VkPhysicalDeviceFeatures2* pFeatures)
-{
- auto layer_data = GetLayerDataPtr(get_dispatch_key(physicalDevice), layer_data_map);
- layer_data->instance_dispatch_table.GetPhysicalDeviceFeatures2KHR(physicalDevice, pFeatures);
-
-}
-
-void DispatchGetPhysicalDeviceProperties2KHR(
- VkPhysicalDevice physicalDevice,
- VkPhysicalDeviceProperties2* pProperties)
-{
- auto layer_data = GetLayerDataPtr(get_dispatch_key(physicalDevice), layer_data_map);
- layer_data->instance_dispatch_table.GetPhysicalDeviceProperties2KHR(physicalDevice, pProperties);
-
-}
-
-void DispatchGetPhysicalDeviceFormatProperties2KHR(
- VkPhysicalDevice physicalDevice,
- VkFormat format,
- VkFormatProperties2* pFormatProperties)
-{
- auto layer_data = GetLayerDataPtr(get_dispatch_key(physicalDevice), layer_data_map);
- layer_data->instance_dispatch_table.GetPhysicalDeviceFormatProperties2KHR(physicalDevice, format, pFormatProperties);
-
-}
-
-VkResult DispatchGetPhysicalDeviceImageFormatProperties2KHR(
- VkPhysicalDevice physicalDevice,
- const VkPhysicalDeviceImageFormatInfo2* pImageFormatInfo,
- VkImageFormatProperties2* pImageFormatProperties)
-{
- auto layer_data = GetLayerDataPtr(get_dispatch_key(physicalDevice), layer_data_map);
- if (!wrap_handles) return layer_data->instance_dispatch_table.GetPhysicalDeviceImageFormatProperties2KHR(physicalDevice, pImageFormatInfo, pImageFormatProperties);
- safe_VkPhysicalDeviceImageFormatInfo2 *local_pImageFormatInfo = NULL;
- {
- if (pImageFormatInfo) {
- local_pImageFormatInfo = new safe_VkPhysicalDeviceImageFormatInfo2(pImageFormatInfo);
- WrapPnextChainHandles(layer_data, local_pImageFormatInfo->pNext);
- }
- }
- VkResult result = layer_data->instance_dispatch_table.GetPhysicalDeviceImageFormatProperties2KHR(physicalDevice, (const VkPhysicalDeviceImageFormatInfo2*)local_pImageFormatInfo, pImageFormatProperties);
- if (local_pImageFormatInfo) {
- delete local_pImageFormatInfo;
- }
- return result;
-}
-
-void DispatchGetPhysicalDeviceQueueFamilyProperties2KHR(
- VkPhysicalDevice physicalDevice,
- uint32_t* pQueueFamilyPropertyCount,
- VkQueueFamilyProperties2* pQueueFamilyProperties)
-{
- auto layer_data = GetLayerDataPtr(get_dispatch_key(physicalDevice), layer_data_map);
- layer_data->instance_dispatch_table.GetPhysicalDeviceQueueFamilyProperties2KHR(physicalDevice, pQueueFamilyPropertyCount, pQueueFamilyProperties);
-
-}
-
-void DispatchGetPhysicalDeviceMemoryProperties2KHR(
- VkPhysicalDevice physicalDevice,
- VkPhysicalDeviceMemoryProperties2* pMemoryProperties)
-{
- auto layer_data = GetLayerDataPtr(get_dispatch_key(physicalDevice), layer_data_map);
- layer_data->instance_dispatch_table.GetPhysicalDeviceMemoryProperties2KHR(physicalDevice, pMemoryProperties);
-
-}
-
-void DispatchGetPhysicalDeviceSparseImageFormatProperties2KHR(
- VkPhysicalDevice physicalDevice,
- const VkPhysicalDeviceSparseImageFormatInfo2* pFormatInfo,
- uint32_t* pPropertyCount,
- VkSparseImageFormatProperties2* pProperties)
-{
- auto layer_data = GetLayerDataPtr(get_dispatch_key(physicalDevice), layer_data_map);
- layer_data->instance_dispatch_table.GetPhysicalDeviceSparseImageFormatProperties2KHR(physicalDevice, pFormatInfo, pPropertyCount, pProperties);
-
-}
-
-void DispatchGetDeviceGroupPeerMemoryFeaturesKHR(
- VkDevice device,
- uint32_t heapIndex,
- uint32_t localDeviceIndex,
- uint32_t remoteDeviceIndex,
- VkPeerMemoryFeatureFlags* pPeerMemoryFeatures)
-{
- auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
- layer_data->device_dispatch_table.GetDeviceGroupPeerMemoryFeaturesKHR(device, heapIndex, localDeviceIndex, remoteDeviceIndex, pPeerMemoryFeatures);
-
-}
-
-void DispatchCmdSetDeviceMaskKHR(
- VkCommandBuffer commandBuffer,
- uint32_t deviceMask)
-{
- auto layer_data = GetLayerDataPtr(get_dispatch_key(commandBuffer), layer_data_map);
- layer_data->device_dispatch_table.CmdSetDeviceMaskKHR(commandBuffer, deviceMask);
-
-}
-
-void DispatchCmdDispatchBaseKHR(
- VkCommandBuffer commandBuffer,
- uint32_t baseGroupX,
- uint32_t baseGroupY,
- uint32_t baseGroupZ,
- uint32_t groupCountX,
- uint32_t groupCountY,
- uint32_t groupCountZ)
-{
- auto layer_data = GetLayerDataPtr(get_dispatch_key(commandBuffer), layer_data_map);
- layer_data->device_dispatch_table.CmdDispatchBaseKHR(commandBuffer, baseGroupX, baseGroupY, baseGroupZ, groupCountX, groupCountY, groupCountZ);
-
-}
-
-void DispatchTrimCommandPoolKHR(
- VkDevice device,
- VkCommandPool commandPool,
- VkCommandPoolTrimFlags flags)
-{
- auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
- if (!wrap_handles) return layer_data->device_dispatch_table.TrimCommandPoolKHR(device, commandPool, flags);
- {
- commandPool = layer_data->Unwrap(commandPool);
- }
- layer_data->device_dispatch_table.TrimCommandPoolKHR(device, commandPool, flags);
-
-}
-
-VkResult DispatchEnumeratePhysicalDeviceGroupsKHR(
- VkInstance instance,
- uint32_t* pPhysicalDeviceGroupCount,
- VkPhysicalDeviceGroupProperties* pPhysicalDeviceGroupProperties)
-{
- auto layer_data = GetLayerDataPtr(get_dispatch_key(instance), layer_data_map);
- VkResult result = layer_data->instance_dispatch_table.EnumeratePhysicalDeviceGroupsKHR(instance, pPhysicalDeviceGroupCount, pPhysicalDeviceGroupProperties);
-
- return result;
-}
-
-void DispatchGetPhysicalDeviceExternalBufferPropertiesKHR(
- VkPhysicalDevice physicalDevice,
- const VkPhysicalDeviceExternalBufferInfo* pExternalBufferInfo,
- VkExternalBufferProperties* pExternalBufferProperties)
-{
- auto layer_data = GetLayerDataPtr(get_dispatch_key(physicalDevice), layer_data_map);
- layer_data->instance_dispatch_table.GetPhysicalDeviceExternalBufferPropertiesKHR(physicalDevice, pExternalBufferInfo, pExternalBufferProperties);
-
-}
-
-#ifdef VK_USE_PLATFORM_WIN32_KHR
-
-VkResult DispatchGetMemoryWin32HandleKHR(
- VkDevice device,
- const VkMemoryGetWin32HandleInfoKHR* pGetWin32HandleInfo,
- HANDLE* pHandle)
-{
- auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
- if (!wrap_handles) return layer_data->device_dispatch_table.GetMemoryWin32HandleKHR(device, pGetWin32HandleInfo, pHandle);
- safe_VkMemoryGetWin32HandleInfoKHR *local_pGetWin32HandleInfo = NULL;
- {
- if (pGetWin32HandleInfo) {
- local_pGetWin32HandleInfo = new safe_VkMemoryGetWin32HandleInfoKHR(pGetWin32HandleInfo);
- if (pGetWin32HandleInfo->memory) {
- local_pGetWin32HandleInfo->memory = layer_data->Unwrap(pGetWin32HandleInfo->memory);
- }
- }
- }
- VkResult result = layer_data->device_dispatch_table.GetMemoryWin32HandleKHR(device, (const VkMemoryGetWin32HandleInfoKHR*)local_pGetWin32HandleInfo, pHandle);
- if (local_pGetWin32HandleInfo) {
- delete local_pGetWin32HandleInfo;
- }
- return result;
-}
-#endif // VK_USE_PLATFORM_WIN32_KHR
-
-#ifdef VK_USE_PLATFORM_WIN32_KHR
-
-VkResult DispatchGetMemoryWin32HandlePropertiesKHR(
- VkDevice device,
- VkExternalMemoryHandleTypeFlagBits handleType,
- HANDLE handle,
- VkMemoryWin32HandlePropertiesKHR* pMemoryWin32HandleProperties)
-{
- auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
- VkResult result = layer_data->device_dispatch_table.GetMemoryWin32HandlePropertiesKHR(device, handleType, handle, pMemoryWin32HandleProperties);
-
- return result;
-}
-#endif // VK_USE_PLATFORM_WIN32_KHR
-
-VkResult DispatchGetMemoryFdKHR(
- VkDevice device,
- const VkMemoryGetFdInfoKHR* pGetFdInfo,
- int* pFd)
-{
- auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
- if (!wrap_handles) return layer_data->device_dispatch_table.GetMemoryFdKHR(device, pGetFdInfo, pFd);
- safe_VkMemoryGetFdInfoKHR *local_pGetFdInfo = NULL;
- {
- if (pGetFdInfo) {
- local_pGetFdInfo = new safe_VkMemoryGetFdInfoKHR(pGetFdInfo);
- if (pGetFdInfo->memory) {
- local_pGetFdInfo->memory = layer_data->Unwrap(pGetFdInfo->memory);
- }
- }
- }
- VkResult result = layer_data->device_dispatch_table.GetMemoryFdKHR(device, (const VkMemoryGetFdInfoKHR*)local_pGetFdInfo, pFd);
- if (local_pGetFdInfo) {
- delete local_pGetFdInfo;
- }
- return result;
-}
-
-VkResult DispatchGetMemoryFdPropertiesKHR(
- VkDevice device,
- VkExternalMemoryHandleTypeFlagBits handleType,
- int fd,
- VkMemoryFdPropertiesKHR* pMemoryFdProperties)
-{
- auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
- VkResult result = layer_data->device_dispatch_table.GetMemoryFdPropertiesKHR(device, handleType, fd, pMemoryFdProperties);
-
- return result;
-}
-
-void DispatchGetPhysicalDeviceExternalSemaphorePropertiesKHR(
- VkPhysicalDevice physicalDevice,
- const VkPhysicalDeviceExternalSemaphoreInfo* pExternalSemaphoreInfo,
- VkExternalSemaphoreProperties* pExternalSemaphoreProperties)
-{
- auto layer_data = GetLayerDataPtr(get_dispatch_key(physicalDevice), layer_data_map);
- layer_data->instance_dispatch_table.GetPhysicalDeviceExternalSemaphorePropertiesKHR(physicalDevice, pExternalSemaphoreInfo, pExternalSemaphoreProperties);
-
-}
-
-#ifdef VK_USE_PLATFORM_WIN32_KHR
-
-VkResult DispatchImportSemaphoreWin32HandleKHR(
- VkDevice device,
- const VkImportSemaphoreWin32HandleInfoKHR* pImportSemaphoreWin32HandleInfo)
-{
- auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
- if (!wrap_handles) return layer_data->device_dispatch_table.ImportSemaphoreWin32HandleKHR(device, pImportSemaphoreWin32HandleInfo);
- safe_VkImportSemaphoreWin32HandleInfoKHR *local_pImportSemaphoreWin32HandleInfo = NULL;
- {
- if (pImportSemaphoreWin32HandleInfo) {
- local_pImportSemaphoreWin32HandleInfo = new safe_VkImportSemaphoreWin32HandleInfoKHR(pImportSemaphoreWin32HandleInfo);
- if (pImportSemaphoreWin32HandleInfo->semaphore) {
- local_pImportSemaphoreWin32HandleInfo->semaphore = layer_data->Unwrap(pImportSemaphoreWin32HandleInfo->semaphore);
- }
- }
- }
- VkResult result = layer_data->device_dispatch_table.ImportSemaphoreWin32HandleKHR(device, (const VkImportSemaphoreWin32HandleInfoKHR*)local_pImportSemaphoreWin32HandleInfo);
- if (local_pImportSemaphoreWin32HandleInfo) {
- delete local_pImportSemaphoreWin32HandleInfo;
- }
- return result;
-}
-#endif // VK_USE_PLATFORM_WIN32_KHR
-
-#ifdef VK_USE_PLATFORM_WIN32_KHR
-
-VkResult DispatchGetSemaphoreWin32HandleKHR(
- VkDevice device,
- const VkSemaphoreGetWin32HandleInfoKHR* pGetWin32HandleInfo,
- HANDLE* pHandle)
-{
- auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
- if (!wrap_handles) return layer_data->device_dispatch_table.GetSemaphoreWin32HandleKHR(device, pGetWin32HandleInfo, pHandle);
- safe_VkSemaphoreGetWin32HandleInfoKHR *local_pGetWin32HandleInfo = NULL;
- {
- if (pGetWin32HandleInfo) {
- local_pGetWin32HandleInfo = new safe_VkSemaphoreGetWin32HandleInfoKHR(pGetWin32HandleInfo);
- if (pGetWin32HandleInfo->semaphore) {
- local_pGetWin32HandleInfo->semaphore = layer_data->Unwrap(pGetWin32HandleInfo->semaphore);
- }
- }
- }
- VkResult result = layer_data->device_dispatch_table.GetSemaphoreWin32HandleKHR(device, (const VkSemaphoreGetWin32HandleInfoKHR*)local_pGetWin32HandleInfo, pHandle);
- if (local_pGetWin32HandleInfo) {
- delete local_pGetWin32HandleInfo;
- }
- return result;
-}
-#endif // VK_USE_PLATFORM_WIN32_KHR
-
-VkResult DispatchImportSemaphoreFdKHR(
- VkDevice device,
- const VkImportSemaphoreFdInfoKHR* pImportSemaphoreFdInfo)
-{
- auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
- if (!wrap_handles) return layer_data->device_dispatch_table.ImportSemaphoreFdKHR(device, pImportSemaphoreFdInfo);
- safe_VkImportSemaphoreFdInfoKHR *local_pImportSemaphoreFdInfo = NULL;
- {
- if (pImportSemaphoreFdInfo) {
- local_pImportSemaphoreFdInfo = new safe_VkImportSemaphoreFdInfoKHR(pImportSemaphoreFdInfo);
- if (pImportSemaphoreFdInfo->semaphore) {
- local_pImportSemaphoreFdInfo->semaphore = layer_data->Unwrap(pImportSemaphoreFdInfo->semaphore);
- }
- }
- }
- VkResult result = layer_data->device_dispatch_table.ImportSemaphoreFdKHR(device, (const VkImportSemaphoreFdInfoKHR*)local_pImportSemaphoreFdInfo);
- if (local_pImportSemaphoreFdInfo) {
- delete local_pImportSemaphoreFdInfo;
- }
- return result;
-}
-
-VkResult DispatchGetSemaphoreFdKHR(
- VkDevice device,
- const VkSemaphoreGetFdInfoKHR* pGetFdInfo,
- int* pFd)
-{
- auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
- if (!wrap_handles) return layer_data->device_dispatch_table.GetSemaphoreFdKHR(device, pGetFdInfo, pFd);
- safe_VkSemaphoreGetFdInfoKHR *local_pGetFdInfo = NULL;
- {
- if (pGetFdInfo) {
- local_pGetFdInfo = new safe_VkSemaphoreGetFdInfoKHR(pGetFdInfo);
- if (pGetFdInfo->semaphore) {
- local_pGetFdInfo->semaphore = layer_data->Unwrap(pGetFdInfo->semaphore);
- }
- }
- }
- VkResult result = layer_data->device_dispatch_table.GetSemaphoreFdKHR(device, (const VkSemaphoreGetFdInfoKHR*)local_pGetFdInfo, pFd);
- if (local_pGetFdInfo) {
- delete local_pGetFdInfo;
- }
- return result;
-}
-
-void DispatchCmdPushDescriptorSetKHR(
- VkCommandBuffer commandBuffer,
- VkPipelineBindPoint pipelineBindPoint,
- VkPipelineLayout layout,
- uint32_t set,
- uint32_t descriptorWriteCount,
- const VkWriteDescriptorSet* pDescriptorWrites)
-{
- auto layer_data = GetLayerDataPtr(get_dispatch_key(commandBuffer), layer_data_map);
- if (!wrap_handles) return layer_data->device_dispatch_table.CmdPushDescriptorSetKHR(commandBuffer, pipelineBindPoint, layout, set, descriptorWriteCount, pDescriptorWrites);
- safe_VkWriteDescriptorSet *local_pDescriptorWrites = NULL;
- {
- layout = layer_data->Unwrap(layout);
- if (pDescriptorWrites) {
- local_pDescriptorWrites = new safe_VkWriteDescriptorSet[descriptorWriteCount];
- for (uint32_t index0 = 0; index0 < descriptorWriteCount; ++index0) {
- local_pDescriptorWrites[index0].initialize(&pDescriptorWrites[index0]);
- WrapPnextChainHandles(layer_data, local_pDescriptorWrites[index0].pNext);
- if (pDescriptorWrites[index0].dstSet) {
- local_pDescriptorWrites[index0].dstSet = layer_data->Unwrap(pDescriptorWrites[index0].dstSet);
- }
- if (local_pDescriptorWrites[index0].pImageInfo) {
- for (uint32_t index1 = 0; index1 < local_pDescriptorWrites[index0].descriptorCount; ++index1) {
- if (pDescriptorWrites[index0].pImageInfo[index1].sampler) {
- local_pDescriptorWrites[index0].pImageInfo[index1].sampler = layer_data->Unwrap(pDescriptorWrites[index0].pImageInfo[index1].sampler);
- }
- if (pDescriptorWrites[index0].pImageInfo[index1].imageView) {
- local_pDescriptorWrites[index0].pImageInfo[index1].imageView = layer_data->Unwrap(pDescriptorWrites[index0].pImageInfo[index1].imageView);
- }
- }
- }
- if (local_pDescriptorWrites[index0].pBufferInfo) {
- for (uint32_t index1 = 0; index1 < local_pDescriptorWrites[index0].descriptorCount; ++index1) {
- if (pDescriptorWrites[index0].pBufferInfo[index1].buffer) {
- local_pDescriptorWrites[index0].pBufferInfo[index1].buffer = layer_data->Unwrap(pDescriptorWrites[index0].pBufferInfo[index1].buffer);
- }
- }
- }
- if (local_pDescriptorWrites[index0].pTexelBufferView) {
- for (uint32_t index1 = 0; index1 < local_pDescriptorWrites[index0].descriptorCount; ++index1) {
- local_pDescriptorWrites[index0].pTexelBufferView[index1] = layer_data->Unwrap(local_pDescriptorWrites[index0].pTexelBufferView[index1]);
- }
- }
- }
- }
- }
- layer_data->device_dispatch_table.CmdPushDescriptorSetKHR(commandBuffer, pipelineBindPoint, layout, set, descriptorWriteCount, (const VkWriteDescriptorSet*)local_pDescriptorWrites);
- if (local_pDescriptorWrites) {
- delete[] local_pDescriptorWrites;
- }
-}
-
-// Skip vkCmdPushDescriptorSetWithTemplateKHR dispatch, manually generated
-
-// Skip vkCreateDescriptorUpdateTemplateKHR dispatch, manually generated
-
-// Skip vkDestroyDescriptorUpdateTemplateKHR dispatch, manually generated
-
-// Skip vkUpdateDescriptorSetWithTemplateKHR dispatch, manually generated
-
-// Skip vkCreateRenderPass2KHR dispatch, manually generated
-
-void DispatchCmdBeginRenderPass2KHR(
- VkCommandBuffer commandBuffer,
- const VkRenderPassBeginInfo* pRenderPassBegin,
- const VkSubpassBeginInfoKHR* pSubpassBeginInfo)
-{
- auto layer_data = GetLayerDataPtr(get_dispatch_key(commandBuffer), layer_data_map);
- if (!wrap_handles) return layer_data->device_dispatch_table.CmdBeginRenderPass2KHR(commandBuffer, pRenderPassBegin, pSubpassBeginInfo);
- safe_VkRenderPassBeginInfo *local_pRenderPassBegin = NULL;
- {
- if (pRenderPassBegin) {
- local_pRenderPassBegin = new safe_VkRenderPassBeginInfo(pRenderPassBegin);
- if (pRenderPassBegin->renderPass) {
- local_pRenderPassBegin->renderPass = layer_data->Unwrap(pRenderPassBegin->renderPass);
- }
- if (pRenderPassBegin->framebuffer) {
- local_pRenderPassBegin->framebuffer = layer_data->Unwrap(pRenderPassBegin->framebuffer);
- }
- WrapPnextChainHandles(layer_data, local_pRenderPassBegin->pNext);
- }
- }
- layer_data->device_dispatch_table.CmdBeginRenderPass2KHR(commandBuffer, (const VkRenderPassBeginInfo*)local_pRenderPassBegin, pSubpassBeginInfo);
- if (local_pRenderPassBegin) {
- delete local_pRenderPassBegin;
- }
-}
-
-void DispatchCmdNextSubpass2KHR(
- VkCommandBuffer commandBuffer,
- const VkSubpassBeginInfoKHR* pSubpassBeginInfo,
- const VkSubpassEndInfoKHR* pSubpassEndInfo)
-{
- auto layer_data = GetLayerDataPtr(get_dispatch_key(commandBuffer), layer_data_map);
- layer_data->device_dispatch_table.CmdNextSubpass2KHR(commandBuffer, pSubpassBeginInfo, pSubpassEndInfo);
-
-}
-
-void DispatchCmdEndRenderPass2KHR(
- VkCommandBuffer commandBuffer,
- const VkSubpassEndInfoKHR* pSubpassEndInfo)
-{
- auto layer_data = GetLayerDataPtr(get_dispatch_key(commandBuffer), layer_data_map);
- layer_data->device_dispatch_table.CmdEndRenderPass2KHR(commandBuffer, pSubpassEndInfo);
-
-}
-
-VkResult DispatchGetSwapchainStatusKHR(
- VkDevice device,
- VkSwapchainKHR swapchain)
-{
- auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
- if (!wrap_handles) return layer_data->device_dispatch_table.GetSwapchainStatusKHR(device, swapchain);
- {
- swapchain = layer_data->Unwrap(swapchain);
- }
- VkResult result = layer_data->device_dispatch_table.GetSwapchainStatusKHR(device, swapchain);
-
- return result;
-}
-
-void DispatchGetPhysicalDeviceExternalFencePropertiesKHR(
- VkPhysicalDevice physicalDevice,
- const VkPhysicalDeviceExternalFenceInfo* pExternalFenceInfo,
- VkExternalFenceProperties* pExternalFenceProperties)
-{
- auto layer_data = GetLayerDataPtr(get_dispatch_key(physicalDevice), layer_data_map);
- layer_data->instance_dispatch_table.GetPhysicalDeviceExternalFencePropertiesKHR(physicalDevice, pExternalFenceInfo, pExternalFenceProperties);
-
-}
-
-#ifdef VK_USE_PLATFORM_WIN32_KHR
-
-VkResult DispatchImportFenceWin32HandleKHR(
- VkDevice device,
- const VkImportFenceWin32HandleInfoKHR* pImportFenceWin32HandleInfo)
-{
- auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
- if (!wrap_handles) return layer_data->device_dispatch_table.ImportFenceWin32HandleKHR(device, pImportFenceWin32HandleInfo);
- safe_VkImportFenceWin32HandleInfoKHR *local_pImportFenceWin32HandleInfo = NULL;
- {
- if (pImportFenceWin32HandleInfo) {
- local_pImportFenceWin32HandleInfo = new safe_VkImportFenceWin32HandleInfoKHR(pImportFenceWin32HandleInfo);
- if (pImportFenceWin32HandleInfo->fence) {
- local_pImportFenceWin32HandleInfo->fence = layer_data->Unwrap(pImportFenceWin32HandleInfo->fence);
- }
- }
- }
- VkResult result = layer_data->device_dispatch_table.ImportFenceWin32HandleKHR(device, (const VkImportFenceWin32HandleInfoKHR*)local_pImportFenceWin32HandleInfo);
- if (local_pImportFenceWin32HandleInfo) {
- delete local_pImportFenceWin32HandleInfo;
- }
- return result;
-}
-#endif // VK_USE_PLATFORM_WIN32_KHR
-
-#ifdef VK_USE_PLATFORM_WIN32_KHR
-
-VkResult DispatchGetFenceWin32HandleKHR(
- VkDevice device,
- const VkFenceGetWin32HandleInfoKHR* pGetWin32HandleInfo,
- HANDLE* pHandle)
-{
- auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
- if (!wrap_handles) return layer_data->device_dispatch_table.GetFenceWin32HandleKHR(device, pGetWin32HandleInfo, pHandle);
- safe_VkFenceGetWin32HandleInfoKHR *local_pGetWin32HandleInfo = NULL;
- {
- if (pGetWin32HandleInfo) {
- local_pGetWin32HandleInfo = new safe_VkFenceGetWin32HandleInfoKHR(pGetWin32HandleInfo);
- if (pGetWin32HandleInfo->fence) {
- local_pGetWin32HandleInfo->fence = layer_data->Unwrap(pGetWin32HandleInfo->fence);
- }
- }
- }
- VkResult result = layer_data->device_dispatch_table.GetFenceWin32HandleKHR(device, (const VkFenceGetWin32HandleInfoKHR*)local_pGetWin32HandleInfo, pHandle);
- if (local_pGetWin32HandleInfo) {
- delete local_pGetWin32HandleInfo;
- }
- return result;
-}
-#endif // VK_USE_PLATFORM_WIN32_KHR
-
-VkResult DispatchImportFenceFdKHR(
- VkDevice device,
- const VkImportFenceFdInfoKHR* pImportFenceFdInfo)
-{
- auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
- if (!wrap_handles) return layer_data->device_dispatch_table.ImportFenceFdKHR(device, pImportFenceFdInfo);
- safe_VkImportFenceFdInfoKHR *local_pImportFenceFdInfo = NULL;
- {
- if (pImportFenceFdInfo) {
- local_pImportFenceFdInfo = new safe_VkImportFenceFdInfoKHR(pImportFenceFdInfo);
- if (pImportFenceFdInfo->fence) {
- local_pImportFenceFdInfo->fence = layer_data->Unwrap(pImportFenceFdInfo->fence);
- }
- }
- }
- VkResult result = layer_data->device_dispatch_table.ImportFenceFdKHR(device, (const VkImportFenceFdInfoKHR*)local_pImportFenceFdInfo);
- if (local_pImportFenceFdInfo) {
- delete local_pImportFenceFdInfo;
- }
- return result;
-}
-
-VkResult DispatchGetFenceFdKHR(
- VkDevice device,
- const VkFenceGetFdInfoKHR* pGetFdInfo,
- int* pFd)
-{
- auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
- if (!wrap_handles) return layer_data->device_dispatch_table.GetFenceFdKHR(device, pGetFdInfo, pFd);
- safe_VkFenceGetFdInfoKHR *local_pGetFdInfo = NULL;
- {
- if (pGetFdInfo) {
- local_pGetFdInfo = new safe_VkFenceGetFdInfoKHR(pGetFdInfo);
- if (pGetFdInfo->fence) {
- local_pGetFdInfo->fence = layer_data->Unwrap(pGetFdInfo->fence);
- }
- }
- }
- VkResult result = layer_data->device_dispatch_table.GetFenceFdKHR(device, (const VkFenceGetFdInfoKHR*)local_pGetFdInfo, pFd);
- if (local_pGetFdInfo) {
- delete local_pGetFdInfo;
- }
- return result;
-}
-
-VkResult DispatchGetPhysicalDeviceSurfaceCapabilities2KHR(
- VkPhysicalDevice physicalDevice,
- const VkPhysicalDeviceSurfaceInfo2KHR* pSurfaceInfo,
- VkSurfaceCapabilities2KHR* pSurfaceCapabilities)
-{
- auto layer_data = GetLayerDataPtr(get_dispatch_key(physicalDevice), layer_data_map);
- if (!wrap_handles) return layer_data->instance_dispatch_table.GetPhysicalDeviceSurfaceCapabilities2KHR(physicalDevice, pSurfaceInfo, pSurfaceCapabilities);
- safe_VkPhysicalDeviceSurfaceInfo2KHR *local_pSurfaceInfo = NULL;
- {
- if (pSurfaceInfo) {
- local_pSurfaceInfo = new safe_VkPhysicalDeviceSurfaceInfo2KHR(pSurfaceInfo);
- if (pSurfaceInfo->surface) {
- local_pSurfaceInfo->surface = layer_data->Unwrap(pSurfaceInfo->surface);
- }
- }
- }
- VkResult result = layer_data->instance_dispatch_table.GetPhysicalDeviceSurfaceCapabilities2KHR(physicalDevice, (const VkPhysicalDeviceSurfaceInfo2KHR*)local_pSurfaceInfo, pSurfaceCapabilities);
- if (local_pSurfaceInfo) {
- delete local_pSurfaceInfo;
- }
- return result;
-}
-
-VkResult DispatchGetPhysicalDeviceSurfaceFormats2KHR(
- VkPhysicalDevice physicalDevice,
- const VkPhysicalDeviceSurfaceInfo2KHR* pSurfaceInfo,
- uint32_t* pSurfaceFormatCount,
- VkSurfaceFormat2KHR* pSurfaceFormats)
-{
- auto layer_data = GetLayerDataPtr(get_dispatch_key(physicalDevice), layer_data_map);
- if (!wrap_handles) return layer_data->instance_dispatch_table.GetPhysicalDeviceSurfaceFormats2KHR(physicalDevice, pSurfaceInfo, pSurfaceFormatCount, pSurfaceFormats);
- safe_VkPhysicalDeviceSurfaceInfo2KHR *local_pSurfaceInfo = NULL;
- {
- if (pSurfaceInfo) {
- local_pSurfaceInfo = new safe_VkPhysicalDeviceSurfaceInfo2KHR(pSurfaceInfo);
- if (pSurfaceInfo->surface) {
- local_pSurfaceInfo->surface = layer_data->Unwrap(pSurfaceInfo->surface);
- }
- }
- }
- VkResult result = layer_data->instance_dispatch_table.GetPhysicalDeviceSurfaceFormats2KHR(physicalDevice, (const VkPhysicalDeviceSurfaceInfo2KHR*)local_pSurfaceInfo, pSurfaceFormatCount, pSurfaceFormats);
- if (local_pSurfaceInfo) {
- delete local_pSurfaceInfo;
- }
- return result;
-}
-
-// Skip vkGetPhysicalDeviceDisplayProperties2KHR dispatch, manually generated
-
-// Skip vkGetPhysicalDeviceDisplayPlaneProperties2KHR dispatch, manually generated
-
-// Skip vkGetDisplayModeProperties2KHR dispatch, manually generated
-
-VkResult DispatchGetDisplayPlaneCapabilities2KHR(
- VkPhysicalDevice physicalDevice,
- const VkDisplayPlaneInfo2KHR* pDisplayPlaneInfo,
- VkDisplayPlaneCapabilities2KHR* pCapabilities)
-{
- auto layer_data = GetLayerDataPtr(get_dispatch_key(physicalDevice), layer_data_map);
- if (!wrap_handles) return layer_data->instance_dispatch_table.GetDisplayPlaneCapabilities2KHR(physicalDevice, pDisplayPlaneInfo, pCapabilities);
- safe_VkDisplayPlaneInfo2KHR *local_pDisplayPlaneInfo = NULL;
- {
- if (pDisplayPlaneInfo) {
- local_pDisplayPlaneInfo = new safe_VkDisplayPlaneInfo2KHR(pDisplayPlaneInfo);
- if (pDisplayPlaneInfo->mode) {
- local_pDisplayPlaneInfo->mode = layer_data->Unwrap(pDisplayPlaneInfo->mode);
- }
- }
- }
- VkResult result = layer_data->instance_dispatch_table.GetDisplayPlaneCapabilities2KHR(physicalDevice, (const VkDisplayPlaneInfo2KHR*)local_pDisplayPlaneInfo, pCapabilities);
- if (local_pDisplayPlaneInfo) {
- delete local_pDisplayPlaneInfo;
- }
- return result;
-}
-
-void DispatchGetImageMemoryRequirements2KHR(
- VkDevice device,
- const VkImageMemoryRequirementsInfo2* pInfo,
- VkMemoryRequirements2* pMemoryRequirements)
-{
- auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
- if (!wrap_handles) return layer_data->device_dispatch_table.GetImageMemoryRequirements2KHR(device, pInfo, pMemoryRequirements);
- safe_VkImageMemoryRequirementsInfo2 *local_pInfo = NULL;
- {
- if (pInfo) {
- local_pInfo = new safe_VkImageMemoryRequirementsInfo2(pInfo);
- if (pInfo->image) {
- local_pInfo->image = layer_data->Unwrap(pInfo->image);
- }
- }
- }
- layer_data->device_dispatch_table.GetImageMemoryRequirements2KHR(device, (const VkImageMemoryRequirementsInfo2*)local_pInfo, pMemoryRequirements);
- if (local_pInfo) {
- delete local_pInfo;
- }
-}
-
-void DispatchGetBufferMemoryRequirements2KHR(
- VkDevice device,
- const VkBufferMemoryRequirementsInfo2* pInfo,
- VkMemoryRequirements2* pMemoryRequirements)
-{
- auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
- if (!wrap_handles) return layer_data->device_dispatch_table.GetBufferMemoryRequirements2KHR(device, pInfo, pMemoryRequirements);
- safe_VkBufferMemoryRequirementsInfo2 *local_pInfo = NULL;
- {
- if (pInfo) {
- local_pInfo = new safe_VkBufferMemoryRequirementsInfo2(pInfo);
- if (pInfo->buffer) {
- local_pInfo->buffer = layer_data->Unwrap(pInfo->buffer);
- }
- }
- }
- layer_data->device_dispatch_table.GetBufferMemoryRequirements2KHR(device, (const VkBufferMemoryRequirementsInfo2*)local_pInfo, pMemoryRequirements);
- if (local_pInfo) {
- delete local_pInfo;
- }
-}
-
-void DispatchGetImageSparseMemoryRequirements2KHR(
- VkDevice device,
- const VkImageSparseMemoryRequirementsInfo2* pInfo,
- uint32_t* pSparseMemoryRequirementCount,
- VkSparseImageMemoryRequirements2* pSparseMemoryRequirements)
-{
- auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
- if (!wrap_handles) return layer_data->device_dispatch_table.GetImageSparseMemoryRequirements2KHR(device, pInfo, pSparseMemoryRequirementCount, pSparseMemoryRequirements);
- safe_VkImageSparseMemoryRequirementsInfo2 *local_pInfo = NULL;
- {
- if (pInfo) {
- local_pInfo = new safe_VkImageSparseMemoryRequirementsInfo2(pInfo);
- if (pInfo->image) {
- local_pInfo->image = layer_data->Unwrap(pInfo->image);
- }
- }
- }
- layer_data->device_dispatch_table.GetImageSparseMemoryRequirements2KHR(device, (const VkImageSparseMemoryRequirementsInfo2*)local_pInfo, pSparseMemoryRequirementCount, pSparseMemoryRequirements);
- if (local_pInfo) {
- delete local_pInfo;
- }
-}
-
-VkResult DispatchCreateSamplerYcbcrConversionKHR(
- VkDevice device,
- const VkSamplerYcbcrConversionCreateInfo* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkSamplerYcbcrConversion* pYcbcrConversion)
-{
- auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
- if (!wrap_handles) return layer_data->device_dispatch_table.CreateSamplerYcbcrConversionKHR(device, pCreateInfo, pAllocator, pYcbcrConversion);
- safe_VkSamplerYcbcrConversionCreateInfo *local_pCreateInfo = NULL;
- {
- if (pCreateInfo) {
- local_pCreateInfo = new safe_VkSamplerYcbcrConversionCreateInfo(pCreateInfo);
- WrapPnextChainHandles(layer_data, local_pCreateInfo->pNext);
- }
- }
- VkResult result = layer_data->device_dispatch_table.CreateSamplerYcbcrConversionKHR(device, (const VkSamplerYcbcrConversionCreateInfo*)local_pCreateInfo, pAllocator, pYcbcrConversion);
- if (local_pCreateInfo) {
- delete local_pCreateInfo;
- }
- if (VK_SUCCESS == result) {
- *pYcbcrConversion = layer_data->WrapNew(*pYcbcrConversion);
- }
- return result;
-}
-
-void DispatchDestroySamplerYcbcrConversionKHR(
- VkDevice device,
- VkSamplerYcbcrConversion ycbcrConversion,
- const VkAllocationCallbacks* pAllocator)
-{
- auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
- if (!wrap_handles) return layer_data->device_dispatch_table.DestroySamplerYcbcrConversionKHR(device, ycbcrConversion, pAllocator);
- uint64_t ycbcrConversion_id = reinterpret_cast<uint64_t &>(ycbcrConversion);
- auto iter = unique_id_mapping.pop(ycbcrConversion_id);
- if (iter != unique_id_mapping.end()) {
- ycbcrConversion = (VkSamplerYcbcrConversion)iter->second;
- } else {
- ycbcrConversion = (VkSamplerYcbcrConversion)0;
- }
- layer_data->device_dispatch_table.DestroySamplerYcbcrConversionKHR(device, ycbcrConversion, pAllocator);
-
-}
-
-VkResult DispatchBindBufferMemory2KHR(
- VkDevice device,
- uint32_t bindInfoCount,
- const VkBindBufferMemoryInfo* pBindInfos)
-{
- auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
- if (!wrap_handles) return layer_data->device_dispatch_table.BindBufferMemory2KHR(device, bindInfoCount, pBindInfos);
- safe_VkBindBufferMemoryInfo *local_pBindInfos = NULL;
- {
- if (pBindInfos) {
- local_pBindInfos = new safe_VkBindBufferMemoryInfo[bindInfoCount];
- for (uint32_t index0 = 0; index0 < bindInfoCount; ++index0) {
- local_pBindInfos[index0].initialize(&pBindInfos[index0]);
- if (pBindInfos[index0].buffer) {
- local_pBindInfos[index0].buffer = layer_data->Unwrap(pBindInfos[index0].buffer);
- }
- if (pBindInfos[index0].memory) {
- local_pBindInfos[index0].memory = layer_data->Unwrap(pBindInfos[index0].memory);
- }
- }
- }
- }
- VkResult result = layer_data->device_dispatch_table.BindBufferMemory2KHR(device, bindInfoCount, (const VkBindBufferMemoryInfo*)local_pBindInfos);
- if (local_pBindInfos) {
- delete[] local_pBindInfos;
- }
- return result;
-}
-
-VkResult DispatchBindImageMemory2KHR(
- VkDevice device,
- uint32_t bindInfoCount,
- const VkBindImageMemoryInfo* pBindInfos)
-{
- auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
- if (!wrap_handles) return layer_data->device_dispatch_table.BindImageMemory2KHR(device, bindInfoCount, pBindInfos);
- safe_VkBindImageMemoryInfo *local_pBindInfos = NULL;
- {
- if (pBindInfos) {
- local_pBindInfos = new safe_VkBindImageMemoryInfo[bindInfoCount];
- for (uint32_t index0 = 0; index0 < bindInfoCount; ++index0) {
- local_pBindInfos[index0].initialize(&pBindInfos[index0]);
- WrapPnextChainHandles(layer_data, local_pBindInfos[index0].pNext);
- if (pBindInfos[index0].image) {
- local_pBindInfos[index0].image = layer_data->Unwrap(pBindInfos[index0].image);
- }
- if (pBindInfos[index0].memory) {
- local_pBindInfos[index0].memory = layer_data->Unwrap(pBindInfos[index0].memory);
- }
- }
- }
- }
- VkResult result = layer_data->device_dispatch_table.BindImageMemory2KHR(device, bindInfoCount, (const VkBindImageMemoryInfo*)local_pBindInfos);
- if (local_pBindInfos) {
- delete[] local_pBindInfos;
- }
- return result;
-}
-
-void DispatchGetDescriptorSetLayoutSupportKHR(
- VkDevice device,
- const VkDescriptorSetLayoutCreateInfo* pCreateInfo,
- VkDescriptorSetLayoutSupport* pSupport)
-{
- auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
- if (!wrap_handles) return layer_data->device_dispatch_table.GetDescriptorSetLayoutSupportKHR(device, pCreateInfo, pSupport);
- safe_VkDescriptorSetLayoutCreateInfo *local_pCreateInfo = NULL;
- {
- if (pCreateInfo) {
- local_pCreateInfo = new safe_VkDescriptorSetLayoutCreateInfo(pCreateInfo);
- if (local_pCreateInfo->pBindings) {
- for (uint32_t index1 = 0; index1 < local_pCreateInfo->bindingCount; ++index1) {
- if (local_pCreateInfo->pBindings[index1].pImmutableSamplers) {
- for (uint32_t index2 = 0; index2 < local_pCreateInfo->pBindings[index1].descriptorCount; ++index2) {
- local_pCreateInfo->pBindings[index1].pImmutableSamplers[index2] = layer_data->Unwrap(local_pCreateInfo->pBindings[index1].pImmutableSamplers[index2]);
- }
- }
- }
- }
- }
- }
- layer_data->device_dispatch_table.GetDescriptorSetLayoutSupportKHR(device, (const VkDescriptorSetLayoutCreateInfo*)local_pCreateInfo, pSupport);
- if (local_pCreateInfo) {
- delete local_pCreateInfo;
- }
-}
-
-void DispatchCmdDrawIndirectCountKHR(
- VkCommandBuffer commandBuffer,
- VkBuffer buffer,
- VkDeviceSize offset,
- VkBuffer countBuffer,
- VkDeviceSize countBufferOffset,
- uint32_t maxDrawCount,
- uint32_t stride)
-{
- auto layer_data = GetLayerDataPtr(get_dispatch_key(commandBuffer), layer_data_map);
- if (!wrap_handles) return layer_data->device_dispatch_table.CmdDrawIndirectCountKHR(commandBuffer, buffer, offset, countBuffer, countBufferOffset, maxDrawCount, stride);
- {
- buffer = layer_data->Unwrap(buffer);
- countBuffer = layer_data->Unwrap(countBuffer);
- }
- layer_data->device_dispatch_table.CmdDrawIndirectCountKHR(commandBuffer, buffer, offset, countBuffer, countBufferOffset, maxDrawCount, stride);
-
-}
-
-void DispatchCmdDrawIndexedIndirectCountKHR(
- VkCommandBuffer commandBuffer,
- VkBuffer buffer,
- VkDeviceSize offset,
- VkBuffer countBuffer,
- VkDeviceSize countBufferOffset,
- uint32_t maxDrawCount,
- uint32_t stride)
-{
- auto layer_data = GetLayerDataPtr(get_dispatch_key(commandBuffer), layer_data_map);
- if (!wrap_handles) return layer_data->device_dispatch_table.CmdDrawIndexedIndirectCountKHR(commandBuffer, buffer, offset, countBuffer, countBufferOffset, maxDrawCount, stride);
- {
- buffer = layer_data->Unwrap(buffer);
- countBuffer = layer_data->Unwrap(countBuffer);
- }
- layer_data->device_dispatch_table.CmdDrawIndexedIndirectCountKHR(commandBuffer, buffer, offset, countBuffer, countBufferOffset, maxDrawCount, stride);
-
-}
-
-VkResult DispatchGetPipelineExecutablePropertiesKHR(
- VkDevice device,
- const VkPipelineInfoKHR* pPipelineInfo,
- uint32_t* pExecutableCount,
- VkPipelineExecutablePropertiesKHR* pProperties)
-{
- auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
- if (!wrap_handles) return layer_data->device_dispatch_table.GetPipelineExecutablePropertiesKHR(device, pPipelineInfo, pExecutableCount, pProperties);
- safe_VkPipelineInfoKHR *local_pPipelineInfo = NULL;
- {
- if (pPipelineInfo) {
- local_pPipelineInfo = new safe_VkPipelineInfoKHR(pPipelineInfo);
- if (pPipelineInfo->pipeline) {
- local_pPipelineInfo->pipeline = layer_data->Unwrap(pPipelineInfo->pipeline);
- }
- }
- }
- VkResult result = layer_data->device_dispatch_table.GetPipelineExecutablePropertiesKHR(device, (const VkPipelineInfoKHR*)local_pPipelineInfo, pExecutableCount, pProperties);
- if (local_pPipelineInfo) {
- delete local_pPipelineInfo;
- }
- return result;
-}
-
-VkResult DispatchGetPipelineExecutableStatisticsKHR(
- VkDevice device,
- const VkPipelineExecutableInfoKHR* pExecutableInfo,
- uint32_t* pStatisticCount,
- VkPipelineExecutableStatisticKHR* pStatistics)
-{
- auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
- if (!wrap_handles) return layer_data->device_dispatch_table.GetPipelineExecutableStatisticsKHR(device, pExecutableInfo, pStatisticCount, pStatistics);
- safe_VkPipelineExecutableInfoKHR *local_pExecutableInfo = NULL;
- {
- if (pExecutableInfo) {
- local_pExecutableInfo = new safe_VkPipelineExecutableInfoKHR(pExecutableInfo);
- if (pExecutableInfo->pipeline) {
- local_pExecutableInfo->pipeline = layer_data->Unwrap(pExecutableInfo->pipeline);
- }
- }
- }
- VkResult result = layer_data->device_dispatch_table.GetPipelineExecutableStatisticsKHR(device, (const VkPipelineExecutableInfoKHR*)local_pExecutableInfo, pStatisticCount, pStatistics);
- if (local_pExecutableInfo) {
- delete local_pExecutableInfo;
- }
- return result;
-}
-
-VkResult DispatchGetPipelineExecutableInternalRepresentationsKHR(
- VkDevice device,
- const VkPipelineExecutableInfoKHR* pExecutableInfo,
- uint32_t* pInternalRepresentationCount,
- VkPipelineExecutableInternalRepresentationKHR* pInternalRepresentations)
-{
- auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
- if (!wrap_handles) return layer_data->device_dispatch_table.GetPipelineExecutableInternalRepresentationsKHR(device, pExecutableInfo, pInternalRepresentationCount, pInternalRepresentations);
- safe_VkPipelineExecutableInfoKHR *local_pExecutableInfo = NULL;
- {
- if (pExecutableInfo) {
- local_pExecutableInfo = new safe_VkPipelineExecutableInfoKHR(pExecutableInfo);
- if (pExecutableInfo->pipeline) {
- local_pExecutableInfo->pipeline = layer_data->Unwrap(pExecutableInfo->pipeline);
- }
- }
- }
- VkResult result = layer_data->device_dispatch_table.GetPipelineExecutableInternalRepresentationsKHR(device, (const VkPipelineExecutableInfoKHR*)local_pExecutableInfo, pInternalRepresentationCount, pInternalRepresentations);
- if (local_pExecutableInfo) {
- delete local_pExecutableInfo;
- }
- return result;
-}
-
-VkResult DispatchCreateDebugReportCallbackEXT(
- VkInstance instance,
- const VkDebugReportCallbackCreateInfoEXT* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkDebugReportCallbackEXT* pCallback)
-{
- auto layer_data = GetLayerDataPtr(get_dispatch_key(instance), layer_data_map);
- if (!wrap_handles) return layer_data->instance_dispatch_table.CreateDebugReportCallbackEXT(instance, pCreateInfo, pAllocator, pCallback);
- VkResult result = layer_data->instance_dispatch_table.CreateDebugReportCallbackEXT(instance, pCreateInfo, pAllocator, pCallback);
- if (VK_SUCCESS == result) {
- *pCallback = layer_data->WrapNew(*pCallback);
- }
- return result;
-}
-
-void DispatchDestroyDebugReportCallbackEXT(
- VkInstance instance,
- VkDebugReportCallbackEXT callback,
- const VkAllocationCallbacks* pAllocator)
-{
- auto layer_data = GetLayerDataPtr(get_dispatch_key(instance), layer_data_map);
- if (!wrap_handles) return layer_data->instance_dispatch_table.DestroyDebugReportCallbackEXT(instance, callback, pAllocator);
- uint64_t callback_id = reinterpret_cast<uint64_t &>(callback);
- auto iter = unique_id_mapping.pop(callback_id);
- if (iter != unique_id_mapping.end()) {
- callback = (VkDebugReportCallbackEXT)iter->second;
- } else {
- callback = (VkDebugReportCallbackEXT)0;
- }
- layer_data->instance_dispatch_table.DestroyDebugReportCallbackEXT(instance, callback, pAllocator);
-
-}
-
-void DispatchDebugReportMessageEXT(
- VkInstance instance,
- VkDebugReportFlagsEXT flags,
- VkDebugReportObjectTypeEXT objectType,
- uint64_t object,
- size_t location,
- int32_t messageCode,
- const char* pLayerPrefix,
- const char* pMessage)
-{
- auto layer_data = GetLayerDataPtr(get_dispatch_key(instance), layer_data_map);
- layer_data->instance_dispatch_table.DebugReportMessageEXT(instance, flags, objectType, object, location, messageCode, pLayerPrefix, pMessage);
-
-}
-
-// Skip vkDebugMarkerSetObjectTagEXT dispatch, manually generated
-
-// Skip vkDebugMarkerSetObjectNameEXT dispatch, manually generated
-
-void DispatchCmdDebugMarkerBeginEXT(
- VkCommandBuffer commandBuffer,
- const VkDebugMarkerMarkerInfoEXT* pMarkerInfo)
-{
- auto layer_data = GetLayerDataPtr(get_dispatch_key(commandBuffer), layer_data_map);
- layer_data->device_dispatch_table.CmdDebugMarkerBeginEXT(commandBuffer, pMarkerInfo);
-
-}
-
-void DispatchCmdDebugMarkerEndEXT(
- VkCommandBuffer commandBuffer)
-{
- auto layer_data = GetLayerDataPtr(get_dispatch_key(commandBuffer), layer_data_map);
- layer_data->device_dispatch_table.CmdDebugMarkerEndEXT(commandBuffer);
-
-}
-
-void DispatchCmdDebugMarkerInsertEXT(
- VkCommandBuffer commandBuffer,
- const VkDebugMarkerMarkerInfoEXT* pMarkerInfo)
-{
- auto layer_data = GetLayerDataPtr(get_dispatch_key(commandBuffer), layer_data_map);
- layer_data->device_dispatch_table.CmdDebugMarkerInsertEXT(commandBuffer, pMarkerInfo);
-
-}
-
-void DispatchCmdBindTransformFeedbackBuffersEXT(
- VkCommandBuffer commandBuffer,
- uint32_t firstBinding,
- uint32_t bindingCount,
- const VkBuffer* pBuffers,
- const VkDeviceSize* pOffsets,
- const VkDeviceSize* pSizes)
-{
- auto layer_data = GetLayerDataPtr(get_dispatch_key(commandBuffer), layer_data_map);
- if (!wrap_handles) return layer_data->device_dispatch_table.CmdBindTransformFeedbackBuffersEXT(commandBuffer, firstBinding, bindingCount, pBuffers, pOffsets, pSizes);
- VkBuffer *local_pBuffers = NULL;
- {
- if (pBuffers) {
- local_pBuffers = new VkBuffer[bindingCount];
- for (uint32_t index0 = 0; index0 < bindingCount; ++index0) {
- local_pBuffers[index0] = layer_data->Unwrap(pBuffers[index0]);
- }
- }
- }
- layer_data->device_dispatch_table.CmdBindTransformFeedbackBuffersEXT(commandBuffer, firstBinding, bindingCount, (const VkBuffer*)local_pBuffers, pOffsets, pSizes);
- if (local_pBuffers)
- delete[] local_pBuffers;
-}
-
-void DispatchCmdBeginTransformFeedbackEXT(
- VkCommandBuffer commandBuffer,
- uint32_t firstCounterBuffer,
- uint32_t counterBufferCount,
- const VkBuffer* pCounterBuffers,
- const VkDeviceSize* pCounterBufferOffsets)
-{
- auto layer_data = GetLayerDataPtr(get_dispatch_key(commandBuffer), layer_data_map);
- if (!wrap_handles) return layer_data->device_dispatch_table.CmdBeginTransformFeedbackEXT(commandBuffer, firstCounterBuffer, counterBufferCount, pCounterBuffers, pCounterBufferOffsets);
- VkBuffer *local_pCounterBuffers = NULL;
- {
- if (pCounterBuffers) {
- local_pCounterBuffers = new VkBuffer[counterBufferCount];
- for (uint32_t index0 = 0; index0 < counterBufferCount; ++index0) {
- local_pCounterBuffers[index0] = layer_data->Unwrap(pCounterBuffers[index0]);
- }
- }
- }
- layer_data->device_dispatch_table.CmdBeginTransformFeedbackEXT(commandBuffer, firstCounterBuffer, counterBufferCount, (const VkBuffer*)local_pCounterBuffers, pCounterBufferOffsets);
- if (local_pCounterBuffers)
- delete[] local_pCounterBuffers;
-}
-
-void DispatchCmdEndTransformFeedbackEXT(
- VkCommandBuffer commandBuffer,
- uint32_t firstCounterBuffer,
- uint32_t counterBufferCount,
- const VkBuffer* pCounterBuffers,
- const VkDeviceSize* pCounterBufferOffsets)
-{
- auto layer_data = GetLayerDataPtr(get_dispatch_key(commandBuffer), layer_data_map);
- if (!wrap_handles) return layer_data->device_dispatch_table.CmdEndTransformFeedbackEXT(commandBuffer, firstCounterBuffer, counterBufferCount, pCounterBuffers, pCounterBufferOffsets);
- VkBuffer *local_pCounterBuffers = NULL;
- {
- if (pCounterBuffers) {
- local_pCounterBuffers = new VkBuffer[counterBufferCount];
- for (uint32_t index0 = 0; index0 < counterBufferCount; ++index0) {
- local_pCounterBuffers[index0] = layer_data->Unwrap(pCounterBuffers[index0]);
- }
- }
- }
- layer_data->device_dispatch_table.CmdEndTransformFeedbackEXT(commandBuffer, firstCounterBuffer, counterBufferCount, (const VkBuffer*)local_pCounterBuffers, pCounterBufferOffsets);
- if (local_pCounterBuffers)
- delete[] local_pCounterBuffers;
-}
-
-void DispatchCmdBeginQueryIndexedEXT(
- VkCommandBuffer commandBuffer,
- VkQueryPool queryPool,
- uint32_t query,
- VkQueryControlFlags flags,
- uint32_t index)
-{
- auto layer_data = GetLayerDataPtr(get_dispatch_key(commandBuffer), layer_data_map);
- if (!wrap_handles) return layer_data->device_dispatch_table.CmdBeginQueryIndexedEXT(commandBuffer, queryPool, query, flags, index);
- {
- queryPool = layer_data->Unwrap(queryPool);
- }
- layer_data->device_dispatch_table.CmdBeginQueryIndexedEXT(commandBuffer, queryPool, query, flags, index);
-
-}
-
-void DispatchCmdEndQueryIndexedEXT(
- VkCommandBuffer commandBuffer,
- VkQueryPool queryPool,
- uint32_t query,
- uint32_t index)
-{
- auto layer_data = GetLayerDataPtr(get_dispatch_key(commandBuffer), layer_data_map);
- if (!wrap_handles) return layer_data->device_dispatch_table.CmdEndQueryIndexedEXT(commandBuffer, queryPool, query, index);
- {
- queryPool = layer_data->Unwrap(queryPool);
- }
- layer_data->device_dispatch_table.CmdEndQueryIndexedEXT(commandBuffer, queryPool, query, index);
-
-}
-
-void DispatchCmdDrawIndirectByteCountEXT(
- VkCommandBuffer commandBuffer,
- uint32_t instanceCount,
- uint32_t firstInstance,
- VkBuffer counterBuffer,
- VkDeviceSize counterBufferOffset,
- uint32_t counterOffset,
- uint32_t vertexStride)
-{
- auto layer_data = GetLayerDataPtr(get_dispatch_key(commandBuffer), layer_data_map);
- if (!wrap_handles) return layer_data->device_dispatch_table.CmdDrawIndirectByteCountEXT(commandBuffer, instanceCount, firstInstance, counterBuffer, counterBufferOffset, counterOffset, vertexStride);
- {
- counterBuffer = layer_data->Unwrap(counterBuffer);
- }
- layer_data->device_dispatch_table.CmdDrawIndirectByteCountEXT(commandBuffer, instanceCount, firstInstance, counterBuffer, counterBufferOffset, counterOffset, vertexStride);
-
-}
-
-uint32_t DispatchGetImageViewHandleNVX(
- VkDevice device,
- const VkImageViewHandleInfoNVX* pInfo)
-{
- auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
- if (!wrap_handles) return layer_data->device_dispatch_table.GetImageViewHandleNVX(device, pInfo);
- safe_VkImageViewHandleInfoNVX *local_pInfo = NULL;
- {
- if (pInfo) {
- local_pInfo = new safe_VkImageViewHandleInfoNVX(pInfo);
- if (pInfo->imageView) {
- local_pInfo->imageView = layer_data->Unwrap(pInfo->imageView);
- }
- if (pInfo->sampler) {
- local_pInfo->sampler = layer_data->Unwrap(pInfo->sampler);
- }
- }
- }
- uint32_t result = layer_data->device_dispatch_table.GetImageViewHandleNVX(device, (const VkImageViewHandleInfoNVX*)local_pInfo);
- if (local_pInfo) {
- delete local_pInfo;
- }
- return result;
-}
-
-void DispatchCmdDrawIndirectCountAMD(
- VkCommandBuffer commandBuffer,
- VkBuffer buffer,
- VkDeviceSize offset,
- VkBuffer countBuffer,
- VkDeviceSize countBufferOffset,
- uint32_t maxDrawCount,
- uint32_t stride)
-{
- auto layer_data = GetLayerDataPtr(get_dispatch_key(commandBuffer), layer_data_map);
- if (!wrap_handles) return layer_data->device_dispatch_table.CmdDrawIndirectCountAMD(commandBuffer, buffer, offset, countBuffer, countBufferOffset, maxDrawCount, stride);
- {
- buffer = layer_data->Unwrap(buffer);
- countBuffer = layer_data->Unwrap(countBuffer);
- }
- layer_data->device_dispatch_table.CmdDrawIndirectCountAMD(commandBuffer, buffer, offset, countBuffer, countBufferOffset, maxDrawCount, stride);
-
-}
-
-void DispatchCmdDrawIndexedIndirectCountAMD(
- VkCommandBuffer commandBuffer,
- VkBuffer buffer,
- VkDeviceSize offset,
- VkBuffer countBuffer,
- VkDeviceSize countBufferOffset,
- uint32_t maxDrawCount,
- uint32_t stride)
-{
- auto layer_data = GetLayerDataPtr(get_dispatch_key(commandBuffer), layer_data_map);
- if (!wrap_handles) return layer_data->device_dispatch_table.CmdDrawIndexedIndirectCountAMD(commandBuffer, buffer, offset, countBuffer, countBufferOffset, maxDrawCount, stride);
- {
- buffer = layer_data->Unwrap(buffer);
- countBuffer = layer_data->Unwrap(countBuffer);
- }
- layer_data->device_dispatch_table.CmdDrawIndexedIndirectCountAMD(commandBuffer, buffer, offset, countBuffer, countBufferOffset, maxDrawCount, stride);
-
-}
-
-VkResult DispatchGetShaderInfoAMD(
- VkDevice device,
- VkPipeline pipeline,
- VkShaderStageFlagBits shaderStage,
- VkShaderInfoTypeAMD infoType,
- size_t* pInfoSize,
- void* pInfo)
-{
- auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
- if (!wrap_handles) return layer_data->device_dispatch_table.GetShaderInfoAMD(device, pipeline, shaderStage, infoType, pInfoSize, pInfo);
- {
- pipeline = layer_data->Unwrap(pipeline);
- }
- VkResult result = layer_data->device_dispatch_table.GetShaderInfoAMD(device, pipeline, shaderStage, infoType, pInfoSize, pInfo);
-
- return result;
-}
-
-#ifdef VK_USE_PLATFORM_GGP
-
-VkResult DispatchCreateStreamDescriptorSurfaceGGP(
- VkInstance instance,
- const VkStreamDescriptorSurfaceCreateInfoGGP* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkSurfaceKHR* pSurface)
-{
- auto layer_data = GetLayerDataPtr(get_dispatch_key(instance), layer_data_map);
- if (!wrap_handles) return layer_data->instance_dispatch_table.CreateStreamDescriptorSurfaceGGP(instance, pCreateInfo, pAllocator, pSurface);
- VkResult result = layer_data->instance_dispatch_table.CreateStreamDescriptorSurfaceGGP(instance, pCreateInfo, pAllocator, pSurface);
- if (VK_SUCCESS == result) {
- *pSurface = layer_data->WrapNew(*pSurface);
- }
- return result;
-}
-#endif // VK_USE_PLATFORM_GGP
-
-VkResult DispatchGetPhysicalDeviceExternalImageFormatPropertiesNV(
- VkPhysicalDevice physicalDevice,
- VkFormat format,
- VkImageType type,
- VkImageTiling tiling,
- VkImageUsageFlags usage,
- VkImageCreateFlags flags,
- VkExternalMemoryHandleTypeFlagsNV externalHandleType,
- VkExternalImageFormatPropertiesNV* pExternalImageFormatProperties)
-{
- auto layer_data = GetLayerDataPtr(get_dispatch_key(physicalDevice), layer_data_map);
- VkResult result = layer_data->instance_dispatch_table.GetPhysicalDeviceExternalImageFormatPropertiesNV(physicalDevice, format, type, tiling, usage, flags, externalHandleType, pExternalImageFormatProperties);
-
- return result;
-}
-
-#ifdef VK_USE_PLATFORM_WIN32_KHR
-
-VkResult DispatchGetMemoryWin32HandleNV(
- VkDevice device,
- VkDeviceMemory memory,
- VkExternalMemoryHandleTypeFlagsNV handleType,
- HANDLE* pHandle)
-{
- auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
- if (!wrap_handles) return layer_data->device_dispatch_table.GetMemoryWin32HandleNV(device, memory, handleType, pHandle);
- {
- memory = layer_data->Unwrap(memory);
- }
- VkResult result = layer_data->device_dispatch_table.GetMemoryWin32HandleNV(device, memory, handleType, pHandle);
-
- return result;
-}
-#endif // VK_USE_PLATFORM_WIN32_KHR
-
-#ifdef VK_USE_PLATFORM_VI_NN
-
-VkResult DispatchCreateViSurfaceNN(
- VkInstance instance,
- const VkViSurfaceCreateInfoNN* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkSurfaceKHR* pSurface)
-{
- auto layer_data = GetLayerDataPtr(get_dispatch_key(instance), layer_data_map);
- if (!wrap_handles) return layer_data->instance_dispatch_table.CreateViSurfaceNN(instance, pCreateInfo, pAllocator, pSurface);
- VkResult result = layer_data->instance_dispatch_table.CreateViSurfaceNN(instance, pCreateInfo, pAllocator, pSurface);
- if (VK_SUCCESS == result) {
- *pSurface = layer_data->WrapNew(*pSurface);
- }
- return result;
-}
-#endif // VK_USE_PLATFORM_VI_NN
-
-void DispatchCmdBeginConditionalRenderingEXT(
- VkCommandBuffer commandBuffer,
- const VkConditionalRenderingBeginInfoEXT* pConditionalRenderingBegin)
-{
- auto layer_data = GetLayerDataPtr(get_dispatch_key(commandBuffer), layer_data_map);
- if (!wrap_handles) return layer_data->device_dispatch_table.CmdBeginConditionalRenderingEXT(commandBuffer, pConditionalRenderingBegin);
- safe_VkConditionalRenderingBeginInfoEXT *local_pConditionalRenderingBegin = NULL;
- {
- if (pConditionalRenderingBegin) {
- local_pConditionalRenderingBegin = new safe_VkConditionalRenderingBeginInfoEXT(pConditionalRenderingBegin);
- if (pConditionalRenderingBegin->buffer) {
- local_pConditionalRenderingBegin->buffer = layer_data->Unwrap(pConditionalRenderingBegin->buffer);
- }
- }
- }
- layer_data->device_dispatch_table.CmdBeginConditionalRenderingEXT(commandBuffer, (const VkConditionalRenderingBeginInfoEXT*)local_pConditionalRenderingBegin);
- if (local_pConditionalRenderingBegin) {
- delete local_pConditionalRenderingBegin;
- }
-}
-
-void DispatchCmdEndConditionalRenderingEXT(
- VkCommandBuffer commandBuffer)
-{
- auto layer_data = GetLayerDataPtr(get_dispatch_key(commandBuffer), layer_data_map);
- layer_data->device_dispatch_table.CmdEndConditionalRenderingEXT(commandBuffer);
-
-}
-
-void DispatchCmdProcessCommandsNVX(
- VkCommandBuffer commandBuffer,
- const VkCmdProcessCommandsInfoNVX* pProcessCommandsInfo)
-{
- auto layer_data = GetLayerDataPtr(get_dispatch_key(commandBuffer), layer_data_map);
- if (!wrap_handles) return layer_data->device_dispatch_table.CmdProcessCommandsNVX(commandBuffer, pProcessCommandsInfo);
- safe_VkCmdProcessCommandsInfoNVX *local_pProcessCommandsInfo = NULL;
- {
- if (pProcessCommandsInfo) {
- local_pProcessCommandsInfo = new safe_VkCmdProcessCommandsInfoNVX(pProcessCommandsInfo);
- if (pProcessCommandsInfo->objectTable) {
- local_pProcessCommandsInfo->objectTable = layer_data->Unwrap(pProcessCommandsInfo->objectTable);
- }
- if (pProcessCommandsInfo->indirectCommandsLayout) {
- local_pProcessCommandsInfo->indirectCommandsLayout = layer_data->Unwrap(pProcessCommandsInfo->indirectCommandsLayout);
- }
- if (local_pProcessCommandsInfo->pIndirectCommandsTokens) {
- for (uint32_t index1 = 0; index1 < local_pProcessCommandsInfo->indirectCommandsTokenCount; ++index1) {
- if (pProcessCommandsInfo->pIndirectCommandsTokens[index1].buffer) {
- local_pProcessCommandsInfo->pIndirectCommandsTokens[index1].buffer = layer_data->Unwrap(pProcessCommandsInfo->pIndirectCommandsTokens[index1].buffer);
- }
- }
- }
- if (pProcessCommandsInfo->sequencesCountBuffer) {
- local_pProcessCommandsInfo->sequencesCountBuffer = layer_data->Unwrap(pProcessCommandsInfo->sequencesCountBuffer);
- }
- if (pProcessCommandsInfo->sequencesIndexBuffer) {
- local_pProcessCommandsInfo->sequencesIndexBuffer = layer_data->Unwrap(pProcessCommandsInfo->sequencesIndexBuffer);
- }
- }
- }
- layer_data->device_dispatch_table.CmdProcessCommandsNVX(commandBuffer, (const VkCmdProcessCommandsInfoNVX*)local_pProcessCommandsInfo);
- if (local_pProcessCommandsInfo) {
- delete local_pProcessCommandsInfo;
- }
-}
-
-void DispatchCmdReserveSpaceForCommandsNVX(
- VkCommandBuffer commandBuffer,
- const VkCmdReserveSpaceForCommandsInfoNVX* pReserveSpaceInfo)
-{
- auto layer_data = GetLayerDataPtr(get_dispatch_key(commandBuffer), layer_data_map);
- if (!wrap_handles) return layer_data->device_dispatch_table.CmdReserveSpaceForCommandsNVX(commandBuffer, pReserveSpaceInfo);
- safe_VkCmdReserveSpaceForCommandsInfoNVX *local_pReserveSpaceInfo = NULL;
- {
- if (pReserveSpaceInfo) {
- local_pReserveSpaceInfo = new safe_VkCmdReserveSpaceForCommandsInfoNVX(pReserveSpaceInfo);
- if (pReserveSpaceInfo->objectTable) {
- local_pReserveSpaceInfo->objectTable = layer_data->Unwrap(pReserveSpaceInfo->objectTable);
- }
- if (pReserveSpaceInfo->indirectCommandsLayout) {
- local_pReserveSpaceInfo->indirectCommandsLayout = layer_data->Unwrap(pReserveSpaceInfo->indirectCommandsLayout);
- }
- }
- }
- layer_data->device_dispatch_table.CmdReserveSpaceForCommandsNVX(commandBuffer, (const VkCmdReserveSpaceForCommandsInfoNVX*)local_pReserveSpaceInfo);
- if (local_pReserveSpaceInfo) {
- delete local_pReserveSpaceInfo;
- }
-}
-
-VkResult DispatchCreateIndirectCommandsLayoutNVX(
- VkDevice device,
- const VkIndirectCommandsLayoutCreateInfoNVX* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkIndirectCommandsLayoutNVX* pIndirectCommandsLayout)
-{
- auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
- if (!wrap_handles) return layer_data->device_dispatch_table.CreateIndirectCommandsLayoutNVX(device, pCreateInfo, pAllocator, pIndirectCommandsLayout);
- VkResult result = layer_data->device_dispatch_table.CreateIndirectCommandsLayoutNVX(device, pCreateInfo, pAllocator, pIndirectCommandsLayout);
- if (VK_SUCCESS == result) {
- *pIndirectCommandsLayout = layer_data->WrapNew(*pIndirectCommandsLayout);
- }
- return result;
-}
-
-void DispatchDestroyIndirectCommandsLayoutNVX(
- VkDevice device,
- VkIndirectCommandsLayoutNVX indirectCommandsLayout,
- const VkAllocationCallbacks* pAllocator)
-{
- auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
- if (!wrap_handles) return layer_data->device_dispatch_table.DestroyIndirectCommandsLayoutNVX(device, indirectCommandsLayout, pAllocator);
- uint64_t indirectCommandsLayout_id = reinterpret_cast<uint64_t &>(indirectCommandsLayout);
- auto iter = unique_id_mapping.pop(indirectCommandsLayout_id);
- if (iter != unique_id_mapping.end()) {
- indirectCommandsLayout = (VkIndirectCommandsLayoutNVX)iter->second;
- } else {
- indirectCommandsLayout = (VkIndirectCommandsLayoutNVX)0;
- }
- layer_data->device_dispatch_table.DestroyIndirectCommandsLayoutNVX(device, indirectCommandsLayout, pAllocator);
-
-}
-
-VkResult DispatchCreateObjectTableNVX(
- VkDevice device,
- const VkObjectTableCreateInfoNVX* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkObjectTableNVX* pObjectTable)
-{
- auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
- if (!wrap_handles) return layer_data->device_dispatch_table.CreateObjectTableNVX(device, pCreateInfo, pAllocator, pObjectTable);
- VkResult result = layer_data->device_dispatch_table.CreateObjectTableNVX(device, pCreateInfo, pAllocator, pObjectTable);
- if (VK_SUCCESS == result) {
- *pObjectTable = layer_data->WrapNew(*pObjectTable);
- }
- return result;
-}
-
-void DispatchDestroyObjectTableNVX(
- VkDevice device,
- VkObjectTableNVX objectTable,
- const VkAllocationCallbacks* pAllocator)
-{
- auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
- if (!wrap_handles) return layer_data->device_dispatch_table.DestroyObjectTableNVX(device, objectTable, pAllocator);
- uint64_t objectTable_id = reinterpret_cast<uint64_t &>(objectTable);
- auto iter = unique_id_mapping.pop(objectTable_id);
- if (iter != unique_id_mapping.end()) {
- objectTable = (VkObjectTableNVX)iter->second;
- } else {
- objectTable = (VkObjectTableNVX)0;
- }
- layer_data->device_dispatch_table.DestroyObjectTableNVX(device, objectTable, pAllocator);
-
-}
-
-VkResult DispatchRegisterObjectsNVX(
- VkDevice device,
- VkObjectTableNVX objectTable,
- uint32_t objectCount,
- const VkObjectTableEntryNVX* const* ppObjectTableEntries,
- const uint32_t* pObjectIndices)
-{
- auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
- if (!wrap_handles) return layer_data->device_dispatch_table.RegisterObjectsNVX(device, objectTable, objectCount, ppObjectTableEntries, pObjectIndices);
- {
- objectTable = layer_data->Unwrap(objectTable);
- }
- VkResult result = layer_data->device_dispatch_table.RegisterObjectsNVX(device, objectTable, objectCount, ppObjectTableEntries, pObjectIndices);
-
- return result;
-}
-
-VkResult DispatchUnregisterObjectsNVX(
- VkDevice device,
- VkObjectTableNVX objectTable,
- uint32_t objectCount,
- const VkObjectEntryTypeNVX* pObjectEntryTypes,
- const uint32_t* pObjectIndices)
-{
- auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
- if (!wrap_handles) return layer_data->device_dispatch_table.UnregisterObjectsNVX(device, objectTable, objectCount, pObjectEntryTypes, pObjectIndices);
- {
- objectTable = layer_data->Unwrap(objectTable);
- }
- VkResult result = layer_data->device_dispatch_table.UnregisterObjectsNVX(device, objectTable, objectCount, pObjectEntryTypes, pObjectIndices);
-
- return result;
-}
-
-void DispatchGetPhysicalDeviceGeneratedCommandsPropertiesNVX(
- VkPhysicalDevice physicalDevice,
- VkDeviceGeneratedCommandsFeaturesNVX* pFeatures,
- VkDeviceGeneratedCommandsLimitsNVX* pLimits)
-{
- auto layer_data = GetLayerDataPtr(get_dispatch_key(physicalDevice), layer_data_map);
- layer_data->instance_dispatch_table.GetPhysicalDeviceGeneratedCommandsPropertiesNVX(physicalDevice, pFeatures, pLimits);
-
-}
-
-void DispatchCmdSetViewportWScalingNV(
- VkCommandBuffer commandBuffer,
- uint32_t firstViewport,
- uint32_t viewportCount,
- const VkViewportWScalingNV* pViewportWScalings)
-{
- auto layer_data = GetLayerDataPtr(get_dispatch_key(commandBuffer), layer_data_map);
- layer_data->device_dispatch_table.CmdSetViewportWScalingNV(commandBuffer, firstViewport, viewportCount, pViewportWScalings);
-
-}
-
-VkResult DispatchReleaseDisplayEXT(
- VkPhysicalDevice physicalDevice,
- VkDisplayKHR display)
-{
- auto layer_data = GetLayerDataPtr(get_dispatch_key(physicalDevice), layer_data_map);
- if (!wrap_handles) return layer_data->instance_dispatch_table.ReleaseDisplayEXT(physicalDevice, display);
- {
- display = layer_data->Unwrap(display);
- }
- VkResult result = layer_data->instance_dispatch_table.ReleaseDisplayEXT(physicalDevice, display);
-
- return result;
-}
-
-#ifdef VK_USE_PLATFORM_XLIB_XRANDR_EXT
-
-VkResult DispatchAcquireXlibDisplayEXT(
- VkPhysicalDevice physicalDevice,
- Display* dpy,
- VkDisplayKHR display)
-{
- auto layer_data = GetLayerDataPtr(get_dispatch_key(physicalDevice), layer_data_map);
- if (!wrap_handles) return layer_data->instance_dispatch_table.AcquireXlibDisplayEXT(physicalDevice, dpy, display);
- {
- display = layer_data->Unwrap(display);
- }
- VkResult result = layer_data->instance_dispatch_table.AcquireXlibDisplayEXT(physicalDevice, dpy, display);
-
- return result;
-}
-#endif // VK_USE_PLATFORM_XLIB_XRANDR_EXT
-
-#ifdef VK_USE_PLATFORM_XLIB_XRANDR_EXT
-
-VkResult DispatchGetRandROutputDisplayEXT(
- VkPhysicalDevice physicalDevice,
- Display* dpy,
- RROutput rrOutput,
- VkDisplayKHR* pDisplay)
-{
- auto layer_data = GetLayerDataPtr(get_dispatch_key(physicalDevice), layer_data_map);
- if (!wrap_handles) return layer_data->instance_dispatch_table.GetRandROutputDisplayEXT(physicalDevice, dpy, rrOutput, pDisplay);
- VkResult result = layer_data->instance_dispatch_table.GetRandROutputDisplayEXT(physicalDevice, dpy, rrOutput, pDisplay);
- if (VK_SUCCESS == result) {
- *pDisplay = layer_data->WrapNew(*pDisplay);
- }
- return result;
-}
-#endif // VK_USE_PLATFORM_XLIB_XRANDR_EXT
-
-VkResult DispatchGetPhysicalDeviceSurfaceCapabilities2EXT(
- VkPhysicalDevice physicalDevice,
- VkSurfaceKHR surface,
- VkSurfaceCapabilities2EXT* pSurfaceCapabilities)
-{
- auto layer_data = GetLayerDataPtr(get_dispatch_key(physicalDevice), layer_data_map);
- if (!wrap_handles) return layer_data->instance_dispatch_table.GetPhysicalDeviceSurfaceCapabilities2EXT(physicalDevice, surface, pSurfaceCapabilities);
- {
- surface = layer_data->Unwrap(surface);
- }
- VkResult result = layer_data->instance_dispatch_table.GetPhysicalDeviceSurfaceCapabilities2EXT(physicalDevice, surface, pSurfaceCapabilities);
-
- return result;
-}
-
-VkResult DispatchDisplayPowerControlEXT(
- VkDevice device,
- VkDisplayKHR display,
- const VkDisplayPowerInfoEXT* pDisplayPowerInfo)
-{
- auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
- if (!wrap_handles) return layer_data->device_dispatch_table.DisplayPowerControlEXT(device, display, pDisplayPowerInfo);
- {
- display = layer_data->Unwrap(display);
- }
- VkResult result = layer_data->device_dispatch_table.DisplayPowerControlEXT(device, display, pDisplayPowerInfo);
-
- return result;
-}
-
-VkResult DispatchRegisterDeviceEventEXT(
- VkDevice device,
- const VkDeviceEventInfoEXT* pDeviceEventInfo,
- const VkAllocationCallbacks* pAllocator,
- VkFence* pFence)
-{
- auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
- if (!wrap_handles) return layer_data->device_dispatch_table.RegisterDeviceEventEXT(device, pDeviceEventInfo, pAllocator, pFence);
- VkResult result = layer_data->device_dispatch_table.RegisterDeviceEventEXT(device, pDeviceEventInfo, pAllocator, pFence);
- if (VK_SUCCESS == result) {
- *pFence = layer_data->WrapNew(*pFence);
- }
- return result;
-}
-
-VkResult DispatchRegisterDisplayEventEXT(
- VkDevice device,
- VkDisplayKHR display,
- const VkDisplayEventInfoEXT* pDisplayEventInfo,
- const VkAllocationCallbacks* pAllocator,
- VkFence* pFence)
-{
- auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
- if (!wrap_handles) return layer_data->device_dispatch_table.RegisterDisplayEventEXT(device, display, pDisplayEventInfo, pAllocator, pFence);
- {
- display = layer_data->Unwrap(display);
- }
- VkResult result = layer_data->device_dispatch_table.RegisterDisplayEventEXT(device, display, pDisplayEventInfo, pAllocator, pFence);
- if (VK_SUCCESS == result) {
- *pFence = layer_data->WrapNew(*pFence);
- }
- return result;
-}
-
-VkResult DispatchGetSwapchainCounterEXT(
- VkDevice device,
- VkSwapchainKHR swapchain,
- VkSurfaceCounterFlagBitsEXT counter,
- uint64_t* pCounterValue)
-{
- auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
- if (!wrap_handles) return layer_data->device_dispatch_table.GetSwapchainCounterEXT(device, swapchain, counter, pCounterValue);
- {
- swapchain = layer_data->Unwrap(swapchain);
- }
- VkResult result = layer_data->device_dispatch_table.GetSwapchainCounterEXT(device, swapchain, counter, pCounterValue);
-
- return result;
-}
-
-VkResult DispatchGetRefreshCycleDurationGOOGLE(
- VkDevice device,
- VkSwapchainKHR swapchain,
- VkRefreshCycleDurationGOOGLE* pDisplayTimingProperties)
-{
- auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
- if (!wrap_handles) return layer_data->device_dispatch_table.GetRefreshCycleDurationGOOGLE(device, swapchain, pDisplayTimingProperties);
- {
- swapchain = layer_data->Unwrap(swapchain);
- }
- VkResult result = layer_data->device_dispatch_table.GetRefreshCycleDurationGOOGLE(device, swapchain, pDisplayTimingProperties);
-
- return result;
-}
-
-VkResult DispatchGetPastPresentationTimingGOOGLE(
- VkDevice device,
- VkSwapchainKHR swapchain,
- uint32_t* pPresentationTimingCount,
- VkPastPresentationTimingGOOGLE* pPresentationTimings)
-{
- auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
- if (!wrap_handles) return layer_data->device_dispatch_table.GetPastPresentationTimingGOOGLE(device, swapchain, pPresentationTimingCount, pPresentationTimings);
- {
- swapchain = layer_data->Unwrap(swapchain);
- }
- VkResult result = layer_data->device_dispatch_table.GetPastPresentationTimingGOOGLE(device, swapchain, pPresentationTimingCount, pPresentationTimings);
-
- return result;
-}
-
-void DispatchCmdSetDiscardRectangleEXT(
- VkCommandBuffer commandBuffer,
- uint32_t firstDiscardRectangle,
- uint32_t discardRectangleCount,
- const VkRect2D* pDiscardRectangles)
-{
- auto layer_data = GetLayerDataPtr(get_dispatch_key(commandBuffer), layer_data_map);
- layer_data->device_dispatch_table.CmdSetDiscardRectangleEXT(commandBuffer, firstDiscardRectangle, discardRectangleCount, pDiscardRectangles);
-
-}
-
-void DispatchSetHdrMetadataEXT(
- VkDevice device,
- uint32_t swapchainCount,
- const VkSwapchainKHR* pSwapchains,
- const VkHdrMetadataEXT* pMetadata)
-{
- auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
- if (!wrap_handles) return layer_data->device_dispatch_table.SetHdrMetadataEXT(device, swapchainCount, pSwapchains, pMetadata);
- VkSwapchainKHR *local_pSwapchains = NULL;
- {
- if (pSwapchains) {
- local_pSwapchains = new VkSwapchainKHR[swapchainCount];
- for (uint32_t index0 = 0; index0 < swapchainCount; ++index0) {
- local_pSwapchains[index0] = layer_data->Unwrap(pSwapchains[index0]);
- }
- }
- }
- layer_data->device_dispatch_table.SetHdrMetadataEXT(device, swapchainCount, (const VkSwapchainKHR*)local_pSwapchains, pMetadata);
- if (local_pSwapchains)
- delete[] local_pSwapchains;
-}
-
-#ifdef VK_USE_PLATFORM_IOS_MVK
-
-VkResult DispatchCreateIOSSurfaceMVK(
- VkInstance instance,
- const VkIOSSurfaceCreateInfoMVK* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkSurfaceKHR* pSurface)
-{
- auto layer_data = GetLayerDataPtr(get_dispatch_key(instance), layer_data_map);
- if (!wrap_handles) return layer_data->instance_dispatch_table.CreateIOSSurfaceMVK(instance, pCreateInfo, pAllocator, pSurface);
- VkResult result = layer_data->instance_dispatch_table.CreateIOSSurfaceMVK(instance, pCreateInfo, pAllocator, pSurface);
- if (VK_SUCCESS == result) {
- *pSurface = layer_data->WrapNew(*pSurface);
- }
- return result;
-}
-#endif // VK_USE_PLATFORM_IOS_MVK
-
-#ifdef VK_USE_PLATFORM_MACOS_MVK
-
-VkResult DispatchCreateMacOSSurfaceMVK(
- VkInstance instance,
- const VkMacOSSurfaceCreateInfoMVK* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkSurfaceKHR* pSurface)
-{
- auto layer_data = GetLayerDataPtr(get_dispatch_key(instance), layer_data_map);
- if (!wrap_handles) return layer_data->instance_dispatch_table.CreateMacOSSurfaceMVK(instance, pCreateInfo, pAllocator, pSurface);
- VkResult result = layer_data->instance_dispatch_table.CreateMacOSSurfaceMVK(instance, pCreateInfo, pAllocator, pSurface);
- if (VK_SUCCESS == result) {
- *pSurface = layer_data->WrapNew(*pSurface);
- }
- return result;
-}
-#endif // VK_USE_PLATFORM_MACOS_MVK
-
-// Skip vkSetDebugUtilsObjectNameEXT dispatch, manually generated
-
-// Skip vkSetDebugUtilsObjectTagEXT dispatch, manually generated
-
-void DispatchQueueBeginDebugUtilsLabelEXT(
- VkQueue queue,
- const VkDebugUtilsLabelEXT* pLabelInfo)
-{
- auto layer_data = GetLayerDataPtr(get_dispatch_key(queue), layer_data_map);
- layer_data->device_dispatch_table.QueueBeginDebugUtilsLabelEXT(queue, pLabelInfo);
-
-}
-
-void DispatchQueueEndDebugUtilsLabelEXT(
- VkQueue queue)
-{
- auto layer_data = GetLayerDataPtr(get_dispatch_key(queue), layer_data_map);
- layer_data->device_dispatch_table.QueueEndDebugUtilsLabelEXT(queue);
-
-}
-
-void DispatchQueueInsertDebugUtilsLabelEXT(
- VkQueue queue,
- const VkDebugUtilsLabelEXT* pLabelInfo)
-{
- auto layer_data = GetLayerDataPtr(get_dispatch_key(queue), layer_data_map);
- layer_data->device_dispatch_table.QueueInsertDebugUtilsLabelEXT(queue, pLabelInfo);
-
-}
-
-void DispatchCmdBeginDebugUtilsLabelEXT(
- VkCommandBuffer commandBuffer,
- const VkDebugUtilsLabelEXT* pLabelInfo)
-{
- auto layer_data = GetLayerDataPtr(get_dispatch_key(commandBuffer), layer_data_map);
- layer_data->device_dispatch_table.CmdBeginDebugUtilsLabelEXT(commandBuffer, pLabelInfo);
-
-}
-
-void DispatchCmdEndDebugUtilsLabelEXT(
- VkCommandBuffer commandBuffer)
-{
- auto layer_data = GetLayerDataPtr(get_dispatch_key(commandBuffer), layer_data_map);
- layer_data->device_dispatch_table.CmdEndDebugUtilsLabelEXT(commandBuffer);
-
-}
-
-void DispatchCmdInsertDebugUtilsLabelEXT(
- VkCommandBuffer commandBuffer,
- const VkDebugUtilsLabelEXT* pLabelInfo)
-{
- auto layer_data = GetLayerDataPtr(get_dispatch_key(commandBuffer), layer_data_map);
- layer_data->device_dispatch_table.CmdInsertDebugUtilsLabelEXT(commandBuffer, pLabelInfo);
-
-}
-
-VkResult DispatchCreateDebugUtilsMessengerEXT(
- VkInstance instance,
- const VkDebugUtilsMessengerCreateInfoEXT* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkDebugUtilsMessengerEXT* pMessenger)
-{
- auto layer_data = GetLayerDataPtr(get_dispatch_key(instance), layer_data_map);
- if (!wrap_handles) return layer_data->instance_dispatch_table.CreateDebugUtilsMessengerEXT(instance, pCreateInfo, pAllocator, pMessenger);
- VkResult result = layer_data->instance_dispatch_table.CreateDebugUtilsMessengerEXT(instance, pCreateInfo, pAllocator, pMessenger);
- if (VK_SUCCESS == result) {
- *pMessenger = layer_data->WrapNew(*pMessenger);
- }
- return result;
-}
-
-void DispatchDestroyDebugUtilsMessengerEXT(
- VkInstance instance,
- VkDebugUtilsMessengerEXT messenger,
- const VkAllocationCallbacks* pAllocator)
-{
- auto layer_data = GetLayerDataPtr(get_dispatch_key(instance), layer_data_map);
- if (!wrap_handles) return layer_data->instance_dispatch_table.DestroyDebugUtilsMessengerEXT(instance, messenger, pAllocator);
- uint64_t messenger_id = reinterpret_cast<uint64_t &>(messenger);
- auto iter = unique_id_mapping.pop(messenger_id);
- if (iter != unique_id_mapping.end()) {
- messenger = (VkDebugUtilsMessengerEXT)iter->second;
- } else {
- messenger = (VkDebugUtilsMessengerEXT)0;
- }
- layer_data->instance_dispatch_table.DestroyDebugUtilsMessengerEXT(instance, messenger, pAllocator);
-
-}
-
-void DispatchSubmitDebugUtilsMessageEXT(
- VkInstance instance,
- VkDebugUtilsMessageSeverityFlagBitsEXT messageSeverity,
- VkDebugUtilsMessageTypeFlagsEXT messageTypes,
- const VkDebugUtilsMessengerCallbackDataEXT* pCallbackData)
-{
- auto layer_data = GetLayerDataPtr(get_dispatch_key(instance), layer_data_map);
- layer_data->instance_dispatch_table.SubmitDebugUtilsMessageEXT(instance, messageSeverity, messageTypes, pCallbackData);
-
-}
-
-#ifdef VK_USE_PLATFORM_ANDROID_KHR
-
-VkResult DispatchGetAndroidHardwareBufferPropertiesANDROID(
- VkDevice device,
- const struct AHardwareBuffer* buffer,
- VkAndroidHardwareBufferPropertiesANDROID* pProperties)
-{
- auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
- VkResult result = layer_data->device_dispatch_table.GetAndroidHardwareBufferPropertiesANDROID(device, buffer, pProperties);
-
- return result;
-}
-#endif // VK_USE_PLATFORM_ANDROID_KHR
-
-#ifdef VK_USE_PLATFORM_ANDROID_KHR
-
-VkResult DispatchGetMemoryAndroidHardwareBufferANDROID(
- VkDevice device,
- const VkMemoryGetAndroidHardwareBufferInfoANDROID* pInfo,
- struct AHardwareBuffer** pBuffer)
-{
- auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
- if (!wrap_handles) return layer_data->device_dispatch_table.GetMemoryAndroidHardwareBufferANDROID(device, pInfo, pBuffer);
- safe_VkMemoryGetAndroidHardwareBufferInfoANDROID *local_pInfo = NULL;
- {
- if (pInfo) {
- local_pInfo = new safe_VkMemoryGetAndroidHardwareBufferInfoANDROID(pInfo);
- if (pInfo->memory) {
- local_pInfo->memory = layer_data->Unwrap(pInfo->memory);
- }
- }
- }
- VkResult result = layer_data->device_dispatch_table.GetMemoryAndroidHardwareBufferANDROID(device, (const VkMemoryGetAndroidHardwareBufferInfoANDROID*)local_pInfo, pBuffer);
- if (local_pInfo) {
- delete local_pInfo;
- }
- return result;
-}
-#endif // VK_USE_PLATFORM_ANDROID_KHR
-
-void DispatchCmdSetSampleLocationsEXT(
- VkCommandBuffer commandBuffer,
- const VkSampleLocationsInfoEXT* pSampleLocationsInfo)
-{
- auto layer_data = GetLayerDataPtr(get_dispatch_key(commandBuffer), layer_data_map);
- layer_data->device_dispatch_table.CmdSetSampleLocationsEXT(commandBuffer, pSampleLocationsInfo);
-
-}
-
-void DispatchGetPhysicalDeviceMultisamplePropertiesEXT(
- VkPhysicalDevice physicalDevice,
- VkSampleCountFlagBits samples,
- VkMultisamplePropertiesEXT* pMultisampleProperties)
-{
- auto layer_data = GetLayerDataPtr(get_dispatch_key(physicalDevice), layer_data_map);
- layer_data->instance_dispatch_table.GetPhysicalDeviceMultisamplePropertiesEXT(physicalDevice, samples, pMultisampleProperties);
-
-}
-
-VkResult DispatchGetImageDrmFormatModifierPropertiesEXT(
- VkDevice device,
- VkImage image,
- VkImageDrmFormatModifierPropertiesEXT* pProperties)
-{
- auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
- if (!wrap_handles) return layer_data->device_dispatch_table.GetImageDrmFormatModifierPropertiesEXT(device, image, pProperties);
- {
- image = layer_data->Unwrap(image);
- }
- VkResult result = layer_data->device_dispatch_table.GetImageDrmFormatModifierPropertiesEXT(device, image, pProperties);
-
- return result;
-}
-
-VkResult DispatchCreateValidationCacheEXT(
- VkDevice device,
- const VkValidationCacheCreateInfoEXT* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkValidationCacheEXT* pValidationCache)
-{
- auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
- if (!wrap_handles) return layer_data->device_dispatch_table.CreateValidationCacheEXT(device, pCreateInfo, pAllocator, pValidationCache);
- VkResult result = layer_data->device_dispatch_table.CreateValidationCacheEXT(device, pCreateInfo, pAllocator, pValidationCache);
- if (VK_SUCCESS == result) {
- *pValidationCache = layer_data->WrapNew(*pValidationCache);
- }
- return result;
-}
-
-void DispatchDestroyValidationCacheEXT(
- VkDevice device,
- VkValidationCacheEXT validationCache,
- const VkAllocationCallbacks* pAllocator)
-{
- auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
- if (!wrap_handles) return layer_data->device_dispatch_table.DestroyValidationCacheEXT(device, validationCache, pAllocator);
- uint64_t validationCache_id = reinterpret_cast<uint64_t &>(validationCache);
- auto iter = unique_id_mapping.pop(validationCache_id);
- if (iter != unique_id_mapping.end()) {
- validationCache = (VkValidationCacheEXT)iter->second;
- } else {
- validationCache = (VkValidationCacheEXT)0;
- }
- layer_data->device_dispatch_table.DestroyValidationCacheEXT(device, validationCache, pAllocator);
-
-}
-
-VkResult DispatchMergeValidationCachesEXT(
- VkDevice device,
- VkValidationCacheEXT dstCache,
- uint32_t srcCacheCount,
- const VkValidationCacheEXT* pSrcCaches)
-{
- auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
- if (!wrap_handles) return layer_data->device_dispatch_table.MergeValidationCachesEXT(device, dstCache, srcCacheCount, pSrcCaches);
- VkValidationCacheEXT *local_pSrcCaches = NULL;
- {
- dstCache = layer_data->Unwrap(dstCache);
- if (pSrcCaches) {
- local_pSrcCaches = new VkValidationCacheEXT[srcCacheCount];
- for (uint32_t index0 = 0; index0 < srcCacheCount; ++index0) {
- local_pSrcCaches[index0] = layer_data->Unwrap(pSrcCaches[index0]);
- }
- }
- }
- VkResult result = layer_data->device_dispatch_table.MergeValidationCachesEXT(device, dstCache, srcCacheCount, (const VkValidationCacheEXT*)local_pSrcCaches);
- if (local_pSrcCaches)
- delete[] local_pSrcCaches;
- return result;
-}
-
-VkResult DispatchGetValidationCacheDataEXT(
- VkDevice device,
- VkValidationCacheEXT validationCache,
- size_t* pDataSize,
- void* pData)
-{
- auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
- if (!wrap_handles) return layer_data->device_dispatch_table.GetValidationCacheDataEXT(device, validationCache, pDataSize, pData);
- {
- validationCache = layer_data->Unwrap(validationCache);
- }
- VkResult result = layer_data->device_dispatch_table.GetValidationCacheDataEXT(device, validationCache, pDataSize, pData);
-
- return result;
-}
-
-void DispatchCmdBindShadingRateImageNV(
- VkCommandBuffer commandBuffer,
- VkImageView imageView,
- VkImageLayout imageLayout)
-{
- auto layer_data = GetLayerDataPtr(get_dispatch_key(commandBuffer), layer_data_map);
- if (!wrap_handles) return layer_data->device_dispatch_table.CmdBindShadingRateImageNV(commandBuffer, imageView, imageLayout);
- {
- imageView = layer_data->Unwrap(imageView);
- }
- layer_data->device_dispatch_table.CmdBindShadingRateImageNV(commandBuffer, imageView, imageLayout);
-
-}
-
-void DispatchCmdSetViewportShadingRatePaletteNV(
- VkCommandBuffer commandBuffer,
- uint32_t firstViewport,
- uint32_t viewportCount,
- const VkShadingRatePaletteNV* pShadingRatePalettes)
-{
- auto layer_data = GetLayerDataPtr(get_dispatch_key(commandBuffer), layer_data_map);
- layer_data->device_dispatch_table.CmdSetViewportShadingRatePaletteNV(commandBuffer, firstViewport, viewportCount, pShadingRatePalettes);
-
-}
-
-void DispatchCmdSetCoarseSampleOrderNV(
- VkCommandBuffer commandBuffer,
- VkCoarseSampleOrderTypeNV sampleOrderType,
- uint32_t customSampleOrderCount,
- const VkCoarseSampleOrderCustomNV* pCustomSampleOrders)
-{
- auto layer_data = GetLayerDataPtr(get_dispatch_key(commandBuffer), layer_data_map);
- layer_data->device_dispatch_table.CmdSetCoarseSampleOrderNV(commandBuffer, sampleOrderType, customSampleOrderCount, pCustomSampleOrders);
-
-}
-
-VkResult DispatchCreateAccelerationStructureNV(
- VkDevice device,
- const VkAccelerationStructureCreateInfoNV* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkAccelerationStructureNV* pAccelerationStructure)
-{
- auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
- if (!wrap_handles) return layer_data->device_dispatch_table.CreateAccelerationStructureNV(device, pCreateInfo, pAllocator, pAccelerationStructure);
- safe_VkAccelerationStructureCreateInfoNV *local_pCreateInfo = NULL;
- {
- if (pCreateInfo) {
- local_pCreateInfo = new safe_VkAccelerationStructureCreateInfoNV(pCreateInfo);
- if (local_pCreateInfo->info.pGeometries) {
- for (uint32_t index2 = 0; index2 < local_pCreateInfo->info.geometryCount; ++index2) {
- if (pCreateInfo->info.pGeometries[index2].geometry.triangles.vertexData) {
- local_pCreateInfo->info.pGeometries[index2].geometry.triangles.vertexData = layer_data->Unwrap(pCreateInfo->info.pGeometries[index2].geometry.triangles.vertexData);
- }
- if (pCreateInfo->info.pGeometries[index2].geometry.triangles.indexData) {
- local_pCreateInfo->info.pGeometries[index2].geometry.triangles.indexData = layer_data->Unwrap(pCreateInfo->info.pGeometries[index2].geometry.triangles.indexData);
- }
- if (pCreateInfo->info.pGeometries[index2].geometry.triangles.transformData) {
- local_pCreateInfo->info.pGeometries[index2].geometry.triangles.transformData = layer_data->Unwrap(pCreateInfo->info.pGeometries[index2].geometry.triangles.transformData);
- }
- if (pCreateInfo->info.pGeometries[index2].geometry.aabbs.aabbData) {
- local_pCreateInfo->info.pGeometries[index2].geometry.aabbs.aabbData = layer_data->Unwrap(pCreateInfo->info.pGeometries[index2].geometry.aabbs.aabbData);
- }
- }
- }
- }
- }
- VkResult result = layer_data->device_dispatch_table.CreateAccelerationStructureNV(device, (const VkAccelerationStructureCreateInfoNV*)local_pCreateInfo, pAllocator, pAccelerationStructure);
- if (local_pCreateInfo) {
- delete local_pCreateInfo;
- }
- if (VK_SUCCESS == result) {
- *pAccelerationStructure = layer_data->WrapNew(*pAccelerationStructure);
- }
- return result;
-}
-
-void DispatchDestroyAccelerationStructureNV(
- VkDevice device,
- VkAccelerationStructureNV accelerationStructure,
- const VkAllocationCallbacks* pAllocator)
-{
- auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
- if (!wrap_handles) return layer_data->device_dispatch_table.DestroyAccelerationStructureNV(device, accelerationStructure, pAllocator);
- uint64_t accelerationStructure_id = reinterpret_cast<uint64_t &>(accelerationStructure);
- auto iter = unique_id_mapping.pop(accelerationStructure_id);
- if (iter != unique_id_mapping.end()) {
- accelerationStructure = (VkAccelerationStructureNV)iter->second;
- } else {
- accelerationStructure = (VkAccelerationStructureNV)0;
- }
- layer_data->device_dispatch_table.DestroyAccelerationStructureNV(device, accelerationStructure, pAllocator);
-
-}
-
-void DispatchGetAccelerationStructureMemoryRequirementsNV(
- VkDevice device,
- const VkAccelerationStructureMemoryRequirementsInfoNV* pInfo,
- VkMemoryRequirements2KHR* pMemoryRequirements)
-{
- auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
- if (!wrap_handles) return layer_data->device_dispatch_table.GetAccelerationStructureMemoryRequirementsNV(device, pInfo, pMemoryRequirements);
- safe_VkAccelerationStructureMemoryRequirementsInfoNV *local_pInfo = NULL;
- {
- if (pInfo) {
- local_pInfo = new safe_VkAccelerationStructureMemoryRequirementsInfoNV(pInfo);
- if (pInfo->accelerationStructure) {
- local_pInfo->accelerationStructure = layer_data->Unwrap(pInfo->accelerationStructure);
- }
- }
- }
- layer_data->device_dispatch_table.GetAccelerationStructureMemoryRequirementsNV(device, (const VkAccelerationStructureMemoryRequirementsInfoNV*)local_pInfo, pMemoryRequirements);
- if (local_pInfo) {
- delete local_pInfo;
- }
-}
-
-VkResult DispatchBindAccelerationStructureMemoryNV(
- VkDevice device,
- uint32_t bindInfoCount,
- const VkBindAccelerationStructureMemoryInfoNV* pBindInfos)
-{
- auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
- if (!wrap_handles) return layer_data->device_dispatch_table.BindAccelerationStructureMemoryNV(device, bindInfoCount, pBindInfos);
- safe_VkBindAccelerationStructureMemoryInfoNV *local_pBindInfos = NULL;
- {
- if (pBindInfos) {
- local_pBindInfos = new safe_VkBindAccelerationStructureMemoryInfoNV[bindInfoCount];
- for (uint32_t index0 = 0; index0 < bindInfoCount; ++index0) {
- local_pBindInfos[index0].initialize(&pBindInfos[index0]);
- if (pBindInfos[index0].accelerationStructure) {
- local_pBindInfos[index0].accelerationStructure = layer_data->Unwrap(pBindInfos[index0].accelerationStructure);
- }
- if (pBindInfos[index0].memory) {
- local_pBindInfos[index0].memory = layer_data->Unwrap(pBindInfos[index0].memory);
- }
- }
- }
- }
- VkResult result = layer_data->device_dispatch_table.BindAccelerationStructureMemoryNV(device, bindInfoCount, (const VkBindAccelerationStructureMemoryInfoNV*)local_pBindInfos);
- if (local_pBindInfos) {
- delete[] local_pBindInfos;
- }
- return result;
-}
-
-void DispatchCmdBuildAccelerationStructureNV(
- VkCommandBuffer commandBuffer,
- const VkAccelerationStructureInfoNV* pInfo,
- VkBuffer instanceData,
- VkDeviceSize instanceOffset,
- VkBool32 update,
- VkAccelerationStructureNV dst,
- VkAccelerationStructureNV src,
- VkBuffer scratch,
- VkDeviceSize scratchOffset)
-{
- auto layer_data = GetLayerDataPtr(get_dispatch_key(commandBuffer), layer_data_map);
- if (!wrap_handles) return layer_data->device_dispatch_table.CmdBuildAccelerationStructureNV(commandBuffer, pInfo, instanceData, instanceOffset, update, dst, src, scratch, scratchOffset);
- safe_VkAccelerationStructureInfoNV *local_pInfo = NULL;
- {
- if (pInfo) {
- local_pInfo = new safe_VkAccelerationStructureInfoNV(pInfo);
- if (local_pInfo->pGeometries) {
- for (uint32_t index1 = 0; index1 < local_pInfo->geometryCount; ++index1) {
- if (pInfo->pGeometries[index1].geometry.triangles.vertexData) {
- local_pInfo->pGeometries[index1].geometry.triangles.vertexData = layer_data->Unwrap(pInfo->pGeometries[index1].geometry.triangles.vertexData);
- }
- if (pInfo->pGeometries[index1].geometry.triangles.indexData) {
- local_pInfo->pGeometries[index1].geometry.triangles.indexData = layer_data->Unwrap(pInfo->pGeometries[index1].geometry.triangles.indexData);
- }
- if (pInfo->pGeometries[index1].geometry.triangles.transformData) {
- local_pInfo->pGeometries[index1].geometry.triangles.transformData = layer_data->Unwrap(pInfo->pGeometries[index1].geometry.triangles.transformData);
- }
- if (pInfo->pGeometries[index1].geometry.aabbs.aabbData) {
- local_pInfo->pGeometries[index1].geometry.aabbs.aabbData = layer_data->Unwrap(pInfo->pGeometries[index1].geometry.aabbs.aabbData);
- }
- }
- }
- }
- instanceData = layer_data->Unwrap(instanceData);
- dst = layer_data->Unwrap(dst);
- src = layer_data->Unwrap(src);
- scratch = layer_data->Unwrap(scratch);
- }
- layer_data->device_dispatch_table.CmdBuildAccelerationStructureNV(commandBuffer, (const VkAccelerationStructureInfoNV*)local_pInfo, instanceData, instanceOffset, update, dst, src, scratch, scratchOffset);
- if (local_pInfo) {
- delete local_pInfo;
- }
-}
-
-void DispatchCmdCopyAccelerationStructureNV(
- VkCommandBuffer commandBuffer,
- VkAccelerationStructureNV dst,
- VkAccelerationStructureNV src,
- VkCopyAccelerationStructureModeNV mode)
-{
- auto layer_data = GetLayerDataPtr(get_dispatch_key(commandBuffer), layer_data_map);
- if (!wrap_handles) return layer_data->device_dispatch_table.CmdCopyAccelerationStructureNV(commandBuffer, dst, src, mode);
- {
- dst = layer_data->Unwrap(dst);
- src = layer_data->Unwrap(src);
- }
- layer_data->device_dispatch_table.CmdCopyAccelerationStructureNV(commandBuffer, dst, src, mode);
-
-}
-
-void DispatchCmdTraceRaysNV(
- VkCommandBuffer commandBuffer,
- VkBuffer raygenShaderBindingTableBuffer,
- VkDeviceSize raygenShaderBindingOffset,
- VkBuffer missShaderBindingTableBuffer,
- VkDeviceSize missShaderBindingOffset,
- VkDeviceSize missShaderBindingStride,
- VkBuffer hitShaderBindingTableBuffer,
- VkDeviceSize hitShaderBindingOffset,
- VkDeviceSize hitShaderBindingStride,
- VkBuffer callableShaderBindingTableBuffer,
- VkDeviceSize callableShaderBindingOffset,
- VkDeviceSize callableShaderBindingStride,
- uint32_t width,
- uint32_t height,
- uint32_t depth)
-{
- auto layer_data = GetLayerDataPtr(get_dispatch_key(commandBuffer), layer_data_map);
- if (!wrap_handles) return layer_data->device_dispatch_table.CmdTraceRaysNV(commandBuffer, raygenShaderBindingTableBuffer, raygenShaderBindingOffset, missShaderBindingTableBuffer, missShaderBindingOffset, missShaderBindingStride, hitShaderBindingTableBuffer, hitShaderBindingOffset, hitShaderBindingStride, callableShaderBindingTableBuffer, callableShaderBindingOffset, callableShaderBindingStride, width, height, depth);
- {
- raygenShaderBindingTableBuffer = layer_data->Unwrap(raygenShaderBindingTableBuffer);
- missShaderBindingTableBuffer = layer_data->Unwrap(missShaderBindingTableBuffer);
- hitShaderBindingTableBuffer = layer_data->Unwrap(hitShaderBindingTableBuffer);
- callableShaderBindingTableBuffer = layer_data->Unwrap(callableShaderBindingTableBuffer);
- }
- layer_data->device_dispatch_table.CmdTraceRaysNV(commandBuffer, raygenShaderBindingTableBuffer, raygenShaderBindingOffset, missShaderBindingTableBuffer, missShaderBindingOffset, missShaderBindingStride, hitShaderBindingTableBuffer, hitShaderBindingOffset, hitShaderBindingStride, callableShaderBindingTableBuffer, callableShaderBindingOffset, callableShaderBindingStride, width, height, depth);
-
-}
-
-VkResult DispatchCreateRayTracingPipelinesNV(
- VkDevice device,
- VkPipelineCache pipelineCache,
- uint32_t createInfoCount,
- const VkRayTracingPipelineCreateInfoNV* pCreateInfos,
- const VkAllocationCallbacks* pAllocator,
- VkPipeline* pPipelines)
-{
- auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
- if (!wrap_handles) return layer_data->device_dispatch_table.CreateRayTracingPipelinesNV(device, pipelineCache, createInfoCount, pCreateInfos, pAllocator, pPipelines);
- safe_VkRayTracingPipelineCreateInfoNV *local_pCreateInfos = NULL;
- {
- pipelineCache = layer_data->Unwrap(pipelineCache);
- if (pCreateInfos) {
- local_pCreateInfos = new safe_VkRayTracingPipelineCreateInfoNV[createInfoCount];
- for (uint32_t index0 = 0; index0 < createInfoCount; ++index0) {
- local_pCreateInfos[index0].initialize(&pCreateInfos[index0]);
- if (local_pCreateInfos[index0].pStages) {
- for (uint32_t index1 = 0; index1 < local_pCreateInfos[index0].stageCount; ++index1) {
- if (pCreateInfos[index0].pStages[index1].module) {
- local_pCreateInfos[index0].pStages[index1].module = layer_data->Unwrap(pCreateInfos[index0].pStages[index1].module);
- }
- }
- }
- if (pCreateInfos[index0].layout) {
- local_pCreateInfos[index0].layout = layer_data->Unwrap(pCreateInfos[index0].layout);
- }
- if (pCreateInfos[index0].basePipelineHandle) {
- local_pCreateInfos[index0].basePipelineHandle = layer_data->Unwrap(pCreateInfos[index0].basePipelineHandle);
- }
- }
- }
- }
- VkResult result = layer_data->device_dispatch_table.CreateRayTracingPipelinesNV(device, pipelineCache, createInfoCount, (const VkRayTracingPipelineCreateInfoNV*)local_pCreateInfos, pAllocator, pPipelines);
- if (local_pCreateInfos) {
- delete[] local_pCreateInfos;
- }
- if (VK_SUCCESS == result) {
- for (uint32_t index0 = 0; index0 < createInfoCount; index0++) {
- pPipelines[index0] = layer_data->WrapNew(pPipelines[index0]);
- }
- }
- return result;
-}
-
-VkResult DispatchGetRayTracingShaderGroupHandlesNV(
- VkDevice device,
- VkPipeline pipeline,
- uint32_t firstGroup,
- uint32_t groupCount,
- size_t dataSize,
- void* pData)
-{
- auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
- if (!wrap_handles) return layer_data->device_dispatch_table.GetRayTracingShaderGroupHandlesNV(device, pipeline, firstGroup, groupCount, dataSize, pData);
- {
- pipeline = layer_data->Unwrap(pipeline);
- }
- VkResult result = layer_data->device_dispatch_table.GetRayTracingShaderGroupHandlesNV(device, pipeline, firstGroup, groupCount, dataSize, pData);
-
- return result;
-}
-
-VkResult DispatchGetAccelerationStructureHandleNV(
- VkDevice device,
- VkAccelerationStructureNV accelerationStructure,
- size_t dataSize,
- void* pData)
-{
- auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
- if (!wrap_handles) return layer_data->device_dispatch_table.GetAccelerationStructureHandleNV(device, accelerationStructure, dataSize, pData);
- {
- accelerationStructure = layer_data->Unwrap(accelerationStructure);
- }
- VkResult result = layer_data->device_dispatch_table.GetAccelerationStructureHandleNV(device, accelerationStructure, dataSize, pData);
-
- return result;
-}
-
-void DispatchCmdWriteAccelerationStructuresPropertiesNV(
- VkCommandBuffer commandBuffer,
- uint32_t accelerationStructureCount,
- const VkAccelerationStructureNV* pAccelerationStructures,
- VkQueryType queryType,
- VkQueryPool queryPool,
- uint32_t firstQuery)
-{
- auto layer_data = GetLayerDataPtr(get_dispatch_key(commandBuffer), layer_data_map);
- if (!wrap_handles) return layer_data->device_dispatch_table.CmdWriteAccelerationStructuresPropertiesNV(commandBuffer, accelerationStructureCount, pAccelerationStructures, queryType, queryPool, firstQuery);
- VkAccelerationStructureNV *local_pAccelerationStructures = NULL;
- {
- if (pAccelerationStructures) {
- local_pAccelerationStructures = new VkAccelerationStructureNV[accelerationStructureCount];
- for (uint32_t index0 = 0; index0 < accelerationStructureCount; ++index0) {
- local_pAccelerationStructures[index0] = layer_data->Unwrap(pAccelerationStructures[index0]);
- }
- }
- queryPool = layer_data->Unwrap(queryPool);
- }
- layer_data->device_dispatch_table.CmdWriteAccelerationStructuresPropertiesNV(commandBuffer, accelerationStructureCount, (const VkAccelerationStructureNV*)local_pAccelerationStructures, queryType, queryPool, firstQuery);
- if (local_pAccelerationStructures)
- delete[] local_pAccelerationStructures;
-}
-
-VkResult DispatchCompileDeferredNV(
- VkDevice device,
- VkPipeline pipeline,
- uint32_t shader)
-{
- auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
- if (!wrap_handles) return layer_data->device_dispatch_table.CompileDeferredNV(device, pipeline, shader);
- {
- pipeline = layer_data->Unwrap(pipeline);
- }
- VkResult result = layer_data->device_dispatch_table.CompileDeferredNV(device, pipeline, shader);
-
- return result;
-}
-
-VkResult DispatchGetMemoryHostPointerPropertiesEXT(
- VkDevice device,
- VkExternalMemoryHandleTypeFlagBits handleType,
- const void* pHostPointer,
- VkMemoryHostPointerPropertiesEXT* pMemoryHostPointerProperties)
-{
- auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
- VkResult result = layer_data->device_dispatch_table.GetMemoryHostPointerPropertiesEXT(device, handleType, pHostPointer, pMemoryHostPointerProperties);
-
- return result;
-}
-
-void DispatchCmdWriteBufferMarkerAMD(
- VkCommandBuffer commandBuffer,
- VkPipelineStageFlagBits pipelineStage,
- VkBuffer dstBuffer,
- VkDeviceSize dstOffset,
- uint32_t marker)
-{
- auto layer_data = GetLayerDataPtr(get_dispatch_key(commandBuffer), layer_data_map);
- if (!wrap_handles) return layer_data->device_dispatch_table.CmdWriteBufferMarkerAMD(commandBuffer, pipelineStage, dstBuffer, dstOffset, marker);
- {
- dstBuffer = layer_data->Unwrap(dstBuffer);
- }
- layer_data->device_dispatch_table.CmdWriteBufferMarkerAMD(commandBuffer, pipelineStage, dstBuffer, dstOffset, marker);
-
-}
-
-VkResult DispatchGetPhysicalDeviceCalibrateableTimeDomainsEXT(
- VkPhysicalDevice physicalDevice,
- uint32_t* pTimeDomainCount,
- VkTimeDomainEXT* pTimeDomains)
-{
- auto layer_data = GetLayerDataPtr(get_dispatch_key(physicalDevice), layer_data_map);
- VkResult result = layer_data->instance_dispatch_table.GetPhysicalDeviceCalibrateableTimeDomainsEXT(physicalDevice, pTimeDomainCount, pTimeDomains);
-
- return result;
-}
-
-VkResult DispatchGetCalibratedTimestampsEXT(
- VkDevice device,
- uint32_t timestampCount,
- const VkCalibratedTimestampInfoEXT* pTimestampInfos,
- uint64_t* pTimestamps,
- uint64_t* pMaxDeviation)
-{
- auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
- VkResult result = layer_data->device_dispatch_table.GetCalibratedTimestampsEXT(device, timestampCount, pTimestampInfos, pTimestamps, pMaxDeviation);
-
- return result;
-}
-
-void DispatchCmdDrawMeshTasksNV(
- VkCommandBuffer commandBuffer,
- uint32_t taskCount,
- uint32_t firstTask)
-{
- auto layer_data = GetLayerDataPtr(get_dispatch_key(commandBuffer), layer_data_map);
- layer_data->device_dispatch_table.CmdDrawMeshTasksNV(commandBuffer, taskCount, firstTask);
-
-}
-
-void DispatchCmdDrawMeshTasksIndirectNV(
- VkCommandBuffer commandBuffer,
- VkBuffer buffer,
- VkDeviceSize offset,
- uint32_t drawCount,
- uint32_t stride)
-{
- auto layer_data = GetLayerDataPtr(get_dispatch_key(commandBuffer), layer_data_map);
- if (!wrap_handles) return layer_data->device_dispatch_table.CmdDrawMeshTasksIndirectNV(commandBuffer, buffer, offset, drawCount, stride);
- {
- buffer = layer_data->Unwrap(buffer);
- }
- layer_data->device_dispatch_table.CmdDrawMeshTasksIndirectNV(commandBuffer, buffer, offset, drawCount, stride);
-
-}
-
-void DispatchCmdDrawMeshTasksIndirectCountNV(
- VkCommandBuffer commandBuffer,
- VkBuffer buffer,
- VkDeviceSize offset,
- VkBuffer countBuffer,
- VkDeviceSize countBufferOffset,
- uint32_t maxDrawCount,
- uint32_t stride)
-{
- auto layer_data = GetLayerDataPtr(get_dispatch_key(commandBuffer), layer_data_map);
- if (!wrap_handles) return layer_data->device_dispatch_table.CmdDrawMeshTasksIndirectCountNV(commandBuffer, buffer, offset, countBuffer, countBufferOffset, maxDrawCount, stride);
- {
- buffer = layer_data->Unwrap(buffer);
- countBuffer = layer_data->Unwrap(countBuffer);
- }
- layer_data->device_dispatch_table.CmdDrawMeshTasksIndirectCountNV(commandBuffer, buffer, offset, countBuffer, countBufferOffset, maxDrawCount, stride);
-
-}
-
-void DispatchCmdSetExclusiveScissorNV(
- VkCommandBuffer commandBuffer,
- uint32_t firstExclusiveScissor,
- uint32_t exclusiveScissorCount,
- const VkRect2D* pExclusiveScissors)
-{
- auto layer_data = GetLayerDataPtr(get_dispatch_key(commandBuffer), layer_data_map);
- layer_data->device_dispatch_table.CmdSetExclusiveScissorNV(commandBuffer, firstExclusiveScissor, exclusiveScissorCount, pExclusiveScissors);
-
-}
-
-void DispatchCmdSetCheckpointNV(
- VkCommandBuffer commandBuffer,
- const void* pCheckpointMarker)
-{
- auto layer_data = GetLayerDataPtr(get_dispatch_key(commandBuffer), layer_data_map);
- layer_data->device_dispatch_table.CmdSetCheckpointNV(commandBuffer, pCheckpointMarker);
-
-}
-
-void DispatchGetQueueCheckpointDataNV(
- VkQueue queue,
- uint32_t* pCheckpointDataCount,
- VkCheckpointDataNV* pCheckpointData)
-{
- auto layer_data = GetLayerDataPtr(get_dispatch_key(queue), layer_data_map);
- layer_data->device_dispatch_table.GetQueueCheckpointDataNV(queue, pCheckpointDataCount, pCheckpointData);
-
-}
-
-VkResult DispatchInitializePerformanceApiINTEL(
- VkDevice device,
- const VkInitializePerformanceApiInfoINTEL* pInitializeInfo)
-{
- auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
- VkResult result = layer_data->device_dispatch_table.InitializePerformanceApiINTEL(device, pInitializeInfo);
-
- return result;
-}
-
-void DispatchUninitializePerformanceApiINTEL(
- VkDevice device)
-{
- auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
- layer_data->device_dispatch_table.UninitializePerformanceApiINTEL(device);
-
-}
-
-VkResult DispatchCmdSetPerformanceMarkerINTEL(
- VkCommandBuffer commandBuffer,
- const VkPerformanceMarkerInfoINTEL* pMarkerInfo)
-{
- auto layer_data = GetLayerDataPtr(get_dispatch_key(commandBuffer), layer_data_map);
- VkResult result = layer_data->device_dispatch_table.CmdSetPerformanceMarkerINTEL(commandBuffer, pMarkerInfo);
-
- return result;
-}
-
-VkResult DispatchCmdSetPerformanceStreamMarkerINTEL(
- VkCommandBuffer commandBuffer,
- const VkPerformanceStreamMarkerInfoINTEL* pMarkerInfo)
-{
- auto layer_data = GetLayerDataPtr(get_dispatch_key(commandBuffer), layer_data_map);
- VkResult result = layer_data->device_dispatch_table.CmdSetPerformanceStreamMarkerINTEL(commandBuffer, pMarkerInfo);
-
- return result;
-}
-
-VkResult DispatchCmdSetPerformanceOverrideINTEL(
- VkCommandBuffer commandBuffer,
- const VkPerformanceOverrideInfoINTEL* pOverrideInfo)
-{
- auto layer_data = GetLayerDataPtr(get_dispatch_key(commandBuffer), layer_data_map);
- VkResult result = layer_data->device_dispatch_table.CmdSetPerformanceOverrideINTEL(commandBuffer, pOverrideInfo);
-
- return result;
-}
-
-VkResult DispatchAcquirePerformanceConfigurationINTEL(
- VkDevice device,
- const VkPerformanceConfigurationAcquireInfoINTEL* pAcquireInfo,
- VkPerformanceConfigurationINTEL* pConfiguration)
-{
- auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
- if (!wrap_handles) return layer_data->device_dispatch_table.AcquirePerformanceConfigurationINTEL(device, pAcquireInfo, pConfiguration);
- {
- pConfiguration = layer_data->Unwrap(pConfiguration);
- }
- VkResult result = layer_data->device_dispatch_table.AcquirePerformanceConfigurationINTEL(device, pAcquireInfo, pConfiguration);
-
- return result;
-}
-
-VkResult DispatchReleasePerformanceConfigurationINTEL(
- VkDevice device,
- VkPerformanceConfigurationINTEL configuration)
-{
- auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
- if (!wrap_handles) return layer_data->device_dispatch_table.ReleasePerformanceConfigurationINTEL(device, configuration);
- {
- configuration = layer_data->Unwrap(configuration);
- }
- VkResult result = layer_data->device_dispatch_table.ReleasePerformanceConfigurationINTEL(device, configuration);
-
- return result;
-}
-
-VkResult DispatchQueueSetPerformanceConfigurationINTEL(
- VkQueue queue,
- VkPerformanceConfigurationINTEL configuration)
-{
- auto layer_data = GetLayerDataPtr(get_dispatch_key(queue), layer_data_map);
- if (!wrap_handles) return layer_data->device_dispatch_table.QueueSetPerformanceConfigurationINTEL(queue, configuration);
- {
- configuration = layer_data->Unwrap(configuration);
- }
- VkResult result = layer_data->device_dispatch_table.QueueSetPerformanceConfigurationINTEL(queue, configuration);
-
- return result;
-}
-
-VkResult DispatchGetPerformanceParameterINTEL(
- VkDevice device,
- VkPerformanceParameterTypeINTEL parameter,
- VkPerformanceValueINTEL* pValue)
-{
- auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
- VkResult result = layer_data->device_dispatch_table.GetPerformanceParameterINTEL(device, parameter, pValue);
-
- return result;
-}
-
-void DispatchSetLocalDimmingAMD(
- VkDevice device,
- VkSwapchainKHR swapChain,
- VkBool32 localDimmingEnable)
-{
- auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
- if (!wrap_handles) return layer_data->device_dispatch_table.SetLocalDimmingAMD(device, swapChain, localDimmingEnable);
- {
- swapChain = layer_data->Unwrap(swapChain);
- }
- layer_data->device_dispatch_table.SetLocalDimmingAMD(device, swapChain, localDimmingEnable);
-
-}
-
-#ifdef VK_USE_PLATFORM_FUCHSIA
-
-VkResult DispatchCreateImagePipeSurfaceFUCHSIA(
- VkInstance instance,
- const VkImagePipeSurfaceCreateInfoFUCHSIA* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkSurfaceKHR* pSurface)
-{
- auto layer_data = GetLayerDataPtr(get_dispatch_key(instance), layer_data_map);
- if (!wrap_handles) return layer_data->instance_dispatch_table.CreateImagePipeSurfaceFUCHSIA(instance, pCreateInfo, pAllocator, pSurface);
- VkResult result = layer_data->instance_dispatch_table.CreateImagePipeSurfaceFUCHSIA(instance, pCreateInfo, pAllocator, pSurface);
- if (VK_SUCCESS == result) {
- *pSurface = layer_data->WrapNew(*pSurface);
- }
- return result;
-}
-#endif // VK_USE_PLATFORM_FUCHSIA
-
-#ifdef VK_USE_PLATFORM_METAL_EXT
-
-VkResult DispatchCreateMetalSurfaceEXT(
- VkInstance instance,
- const VkMetalSurfaceCreateInfoEXT* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkSurfaceKHR* pSurface)
-{
- auto layer_data = GetLayerDataPtr(get_dispatch_key(instance), layer_data_map);
- if (!wrap_handles) return layer_data->instance_dispatch_table.CreateMetalSurfaceEXT(instance, pCreateInfo, pAllocator, pSurface);
- VkResult result = layer_data->instance_dispatch_table.CreateMetalSurfaceEXT(instance, pCreateInfo, pAllocator, pSurface);
- if (VK_SUCCESS == result) {
- *pSurface = layer_data->WrapNew(*pSurface);
- }
- return result;
-}
-#endif // VK_USE_PLATFORM_METAL_EXT
-
-VkDeviceAddress DispatchGetBufferDeviceAddressEXT(
- VkDevice device,
- const VkBufferDeviceAddressInfoEXT* pInfo)
-{
- auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
- if (!wrap_handles) return layer_data->device_dispatch_table.GetBufferDeviceAddressEXT(device, pInfo);
- safe_VkBufferDeviceAddressInfoEXT *local_pInfo = NULL;
- {
- if (pInfo) {
- local_pInfo = new safe_VkBufferDeviceAddressInfoEXT(pInfo);
- if (pInfo->buffer) {
- local_pInfo->buffer = layer_data->Unwrap(pInfo->buffer);
- }
- }
- }
- VkDeviceAddress result = layer_data->device_dispatch_table.GetBufferDeviceAddressEXT(device, (const VkBufferDeviceAddressInfoEXT*)local_pInfo);
- if (local_pInfo) {
- delete local_pInfo;
- }
- return result;
-}
-
-VkResult DispatchGetPhysicalDeviceCooperativeMatrixPropertiesNV(
- VkPhysicalDevice physicalDevice,
- uint32_t* pPropertyCount,
- VkCooperativeMatrixPropertiesNV* pProperties)
-{
- auto layer_data = GetLayerDataPtr(get_dispatch_key(physicalDevice), layer_data_map);
- VkResult result = layer_data->instance_dispatch_table.GetPhysicalDeviceCooperativeMatrixPropertiesNV(physicalDevice, pPropertyCount, pProperties);
-
- return result;
-}
-
-VkResult DispatchGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV(
- VkPhysicalDevice physicalDevice,
- uint32_t* pCombinationCount,
- VkFramebufferMixedSamplesCombinationNV* pCombinations)
-{
- auto layer_data = GetLayerDataPtr(get_dispatch_key(physicalDevice), layer_data_map);
- VkResult result = layer_data->instance_dispatch_table.GetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV(physicalDevice, pCombinationCount, pCombinations);
-
- return result;
-}
-
-#ifdef VK_USE_PLATFORM_WIN32_KHR
-
-VkResult DispatchGetPhysicalDeviceSurfacePresentModes2EXT(
- VkPhysicalDevice physicalDevice,
- const VkPhysicalDeviceSurfaceInfo2KHR* pSurfaceInfo,
- uint32_t* pPresentModeCount,
- VkPresentModeKHR* pPresentModes)
-{
- auto layer_data = GetLayerDataPtr(get_dispatch_key(physicalDevice), layer_data_map);
- if (!wrap_handles) return layer_data->instance_dispatch_table.GetPhysicalDeviceSurfacePresentModes2EXT(physicalDevice, pSurfaceInfo, pPresentModeCount, pPresentModes);
- safe_VkPhysicalDeviceSurfaceInfo2KHR *local_pSurfaceInfo = NULL;
- {
- if (pSurfaceInfo) {
- local_pSurfaceInfo = new safe_VkPhysicalDeviceSurfaceInfo2KHR(pSurfaceInfo);
- if (pSurfaceInfo->surface) {
- local_pSurfaceInfo->surface = layer_data->Unwrap(pSurfaceInfo->surface);
- }
- }
- }
- VkResult result = layer_data->instance_dispatch_table.GetPhysicalDeviceSurfacePresentModes2EXT(physicalDevice, (const VkPhysicalDeviceSurfaceInfo2KHR*)local_pSurfaceInfo, pPresentModeCount, pPresentModes);
- if (local_pSurfaceInfo) {
- delete local_pSurfaceInfo;
- }
- return result;
-}
-#endif // VK_USE_PLATFORM_WIN32_KHR
-
-#ifdef VK_USE_PLATFORM_WIN32_KHR
-
-VkResult DispatchAcquireFullScreenExclusiveModeEXT(
- VkDevice device,
- VkSwapchainKHR swapchain)
-{
- auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
- if (!wrap_handles) return layer_data->device_dispatch_table.AcquireFullScreenExclusiveModeEXT(device, swapchain);
- {
- swapchain = layer_data->Unwrap(swapchain);
- }
- VkResult result = layer_data->device_dispatch_table.AcquireFullScreenExclusiveModeEXT(device, swapchain);
-
- return result;
-}
-#endif // VK_USE_PLATFORM_WIN32_KHR
-
-#ifdef VK_USE_PLATFORM_WIN32_KHR
-
-VkResult DispatchReleaseFullScreenExclusiveModeEXT(
- VkDevice device,
- VkSwapchainKHR swapchain)
-{
- auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
- if (!wrap_handles) return layer_data->device_dispatch_table.ReleaseFullScreenExclusiveModeEXT(device, swapchain);
- {
- swapchain = layer_data->Unwrap(swapchain);
- }
- VkResult result = layer_data->device_dispatch_table.ReleaseFullScreenExclusiveModeEXT(device, swapchain);
-
- return result;
-}
-#endif // VK_USE_PLATFORM_WIN32_KHR
-
-#ifdef VK_USE_PLATFORM_WIN32_KHR
-
-VkResult DispatchGetDeviceGroupSurfacePresentModes2EXT(
- VkDevice device,
- const VkPhysicalDeviceSurfaceInfo2KHR* pSurfaceInfo,
- VkDeviceGroupPresentModeFlagsKHR* pModes)
-{
- auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
- if (!wrap_handles) return layer_data->device_dispatch_table.GetDeviceGroupSurfacePresentModes2EXT(device, pSurfaceInfo, pModes);
- safe_VkPhysicalDeviceSurfaceInfo2KHR *local_pSurfaceInfo = NULL;
- {
- if (pSurfaceInfo) {
- local_pSurfaceInfo = new safe_VkPhysicalDeviceSurfaceInfo2KHR(pSurfaceInfo);
- if (pSurfaceInfo->surface) {
- local_pSurfaceInfo->surface = layer_data->Unwrap(pSurfaceInfo->surface);
- }
- }
- }
- VkResult result = layer_data->device_dispatch_table.GetDeviceGroupSurfacePresentModes2EXT(device, (const VkPhysicalDeviceSurfaceInfo2KHR*)local_pSurfaceInfo, pModes);
- if (local_pSurfaceInfo) {
- delete local_pSurfaceInfo;
- }
- return result;
-}
-#endif // VK_USE_PLATFORM_WIN32_KHR
-
-VkResult DispatchCreateHeadlessSurfaceEXT(
- VkInstance instance,
- const VkHeadlessSurfaceCreateInfoEXT* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkSurfaceKHR* pSurface)
-{
- auto layer_data = GetLayerDataPtr(get_dispatch_key(instance), layer_data_map);
- if (!wrap_handles) return layer_data->instance_dispatch_table.CreateHeadlessSurfaceEXT(instance, pCreateInfo, pAllocator, pSurface);
- VkResult result = layer_data->instance_dispatch_table.CreateHeadlessSurfaceEXT(instance, pCreateInfo, pAllocator, pSurface);
- if (VK_SUCCESS == result) {
- *pSurface = layer_data->WrapNew(*pSurface);
- }
- return result;
-}
-
-void DispatchCmdSetLineStippleEXT(
- VkCommandBuffer commandBuffer,
- uint32_t lineStippleFactor,
- uint16_t lineStipplePattern)
-{
- auto layer_data = GetLayerDataPtr(get_dispatch_key(commandBuffer), layer_data_map);
- layer_data->device_dispatch_table.CmdSetLineStippleEXT(commandBuffer, lineStippleFactor, lineStipplePattern);
-
-}
-
-void DispatchResetQueryPoolEXT(
- VkDevice device,
- VkQueryPool queryPool,
- uint32_t firstQuery,
- uint32_t queryCount)
-{
- auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
- if (!wrap_handles) return layer_data->device_dispatch_table.ResetQueryPoolEXT(device, queryPool, firstQuery, queryCount);
- {
- queryPool = layer_data->Unwrap(queryPool);
- }
- layer_data->device_dispatch_table.ResetQueryPoolEXT(device, queryPool, firstQuery, queryCount);
-
-} \ No newline at end of file
diff --git a/layers/generated/layer_chassis_dispatch.h b/layers/generated/layer_chassis_dispatch.h
deleted file mode 100644
index 2d4664931..000000000
--- a/layers/generated/layer_chassis_dispatch.h
+++ /dev/null
@@ -1,1793 +0,0 @@
-
-
-// This file is ***GENERATED***. Do Not Edit.
-// See layer_chassis_dispatch_generator.py for modifications.
-
-/* Copyright (c) 2015-2019 The Khronos Group Inc.
- * Copyright (c) 2015-2019 Valve Corporation
- * Copyright (c) 2015-2019 LunarG, Inc.
- * Copyright (c) 2015-2019 Google Inc.
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- *
- * Author: Mark Lobodzinski <mark@lunarg.com>
- */
-#pragma once
-
-#if defined(LAYER_CHASSIS_CAN_WRAP_HANDLES)
-extern bool wrap_handles;
-#else
-extern bool wrap_handles;
-#endif
-VkResult DispatchCreateInstance(
- const VkInstanceCreateInfo* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkInstance* pInstance);
-void DispatchDestroyInstance(
- VkInstance instance,
- const VkAllocationCallbacks* pAllocator);
-VkResult DispatchEnumeratePhysicalDevices(
- VkInstance instance,
- uint32_t* pPhysicalDeviceCount,
- VkPhysicalDevice* pPhysicalDevices);
-void DispatchGetPhysicalDeviceFeatures(
- VkPhysicalDevice physicalDevice,
- VkPhysicalDeviceFeatures* pFeatures);
-void DispatchGetPhysicalDeviceFormatProperties(
- VkPhysicalDevice physicalDevice,
- VkFormat format,
- VkFormatProperties* pFormatProperties);
-VkResult DispatchGetPhysicalDeviceImageFormatProperties(
- VkPhysicalDevice physicalDevice,
- VkFormat format,
- VkImageType type,
- VkImageTiling tiling,
- VkImageUsageFlags usage,
- VkImageCreateFlags flags,
- VkImageFormatProperties* pImageFormatProperties);
-void DispatchGetPhysicalDeviceProperties(
- VkPhysicalDevice physicalDevice,
- VkPhysicalDeviceProperties* pProperties);
-void DispatchGetPhysicalDeviceQueueFamilyProperties(
- VkPhysicalDevice physicalDevice,
- uint32_t* pQueueFamilyPropertyCount,
- VkQueueFamilyProperties* pQueueFamilyProperties);
-void DispatchGetPhysicalDeviceMemoryProperties(
- VkPhysicalDevice physicalDevice,
- VkPhysicalDeviceMemoryProperties* pMemoryProperties);
-PFN_vkVoidFunction DispatchGetInstanceProcAddr(
- VkInstance instance,
- const char* pName);
-PFN_vkVoidFunction DispatchGetDeviceProcAddr(
- VkDevice device,
- const char* pName);
-VkResult DispatchCreateDevice(
- VkPhysicalDevice physicalDevice,
- const VkDeviceCreateInfo* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkDevice* pDevice);
-void DispatchDestroyDevice(
- VkDevice device,
- const VkAllocationCallbacks* pAllocator);
-VkResult DispatchEnumerateInstanceExtensionProperties(
- const char* pLayerName,
- uint32_t* pPropertyCount,
- VkExtensionProperties* pProperties);
-VkResult DispatchEnumerateDeviceExtensionProperties(
- VkPhysicalDevice physicalDevice,
- const char* pLayerName,
- uint32_t* pPropertyCount,
- VkExtensionProperties* pProperties);
-VkResult DispatchEnumerateInstanceLayerProperties(
- uint32_t* pPropertyCount,
- VkLayerProperties* pProperties);
-VkResult DispatchEnumerateDeviceLayerProperties(
- VkPhysicalDevice physicalDevice,
- uint32_t* pPropertyCount,
- VkLayerProperties* pProperties);
-void DispatchGetDeviceQueue(
- VkDevice device,
- uint32_t queueFamilyIndex,
- uint32_t queueIndex,
- VkQueue* pQueue);
-VkResult DispatchQueueSubmit(
- VkQueue queue,
- uint32_t submitCount,
- const VkSubmitInfo* pSubmits,
- VkFence fence);
-VkResult DispatchQueueWaitIdle(
- VkQueue queue);
-VkResult DispatchDeviceWaitIdle(
- VkDevice device);
-VkResult DispatchAllocateMemory(
- VkDevice device,
- const VkMemoryAllocateInfo* pAllocateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkDeviceMemory* pMemory);
-void DispatchFreeMemory(
- VkDevice device,
- VkDeviceMemory memory,
- const VkAllocationCallbacks* pAllocator);
-VkResult DispatchMapMemory(
- VkDevice device,
- VkDeviceMemory memory,
- VkDeviceSize offset,
- VkDeviceSize size,
- VkMemoryMapFlags flags,
- void** ppData);
-void DispatchUnmapMemory(
- VkDevice device,
- VkDeviceMemory memory);
-VkResult DispatchFlushMappedMemoryRanges(
- VkDevice device,
- uint32_t memoryRangeCount,
- const VkMappedMemoryRange* pMemoryRanges);
-VkResult DispatchInvalidateMappedMemoryRanges(
- VkDevice device,
- uint32_t memoryRangeCount,
- const VkMappedMemoryRange* pMemoryRanges);
-void DispatchGetDeviceMemoryCommitment(
- VkDevice device,
- VkDeviceMemory memory,
- VkDeviceSize* pCommittedMemoryInBytes);
-VkResult DispatchBindBufferMemory(
- VkDevice device,
- VkBuffer buffer,
- VkDeviceMemory memory,
- VkDeviceSize memoryOffset);
-VkResult DispatchBindImageMemory(
- VkDevice device,
- VkImage image,
- VkDeviceMemory memory,
- VkDeviceSize memoryOffset);
-void DispatchGetBufferMemoryRequirements(
- VkDevice device,
- VkBuffer buffer,
- VkMemoryRequirements* pMemoryRequirements);
-void DispatchGetImageMemoryRequirements(
- VkDevice device,
- VkImage image,
- VkMemoryRequirements* pMemoryRequirements);
-void DispatchGetImageSparseMemoryRequirements(
- VkDevice device,
- VkImage image,
- uint32_t* pSparseMemoryRequirementCount,
- VkSparseImageMemoryRequirements* pSparseMemoryRequirements);
-void DispatchGetPhysicalDeviceSparseImageFormatProperties(
- VkPhysicalDevice physicalDevice,
- VkFormat format,
- VkImageType type,
- VkSampleCountFlagBits samples,
- VkImageUsageFlags usage,
- VkImageTiling tiling,
- uint32_t* pPropertyCount,
- VkSparseImageFormatProperties* pProperties);
-VkResult DispatchQueueBindSparse(
- VkQueue queue,
- uint32_t bindInfoCount,
- const VkBindSparseInfo* pBindInfo,
- VkFence fence);
-VkResult DispatchCreateFence(
- VkDevice device,
- const VkFenceCreateInfo* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkFence* pFence);
-void DispatchDestroyFence(
- VkDevice device,
- VkFence fence,
- const VkAllocationCallbacks* pAllocator);
-VkResult DispatchResetFences(
- VkDevice device,
- uint32_t fenceCount,
- const VkFence* pFences);
-VkResult DispatchGetFenceStatus(
- VkDevice device,
- VkFence fence);
-VkResult DispatchWaitForFences(
- VkDevice device,
- uint32_t fenceCount,
- const VkFence* pFences,
- VkBool32 waitAll,
- uint64_t timeout);
-VkResult DispatchCreateSemaphore(
- VkDevice device,
- const VkSemaphoreCreateInfo* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkSemaphore* pSemaphore);
-void DispatchDestroySemaphore(
- VkDevice device,
- VkSemaphore semaphore,
- const VkAllocationCallbacks* pAllocator);
-VkResult DispatchCreateEvent(
- VkDevice device,
- const VkEventCreateInfo* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkEvent* pEvent);
-void DispatchDestroyEvent(
- VkDevice device,
- VkEvent event,
- const VkAllocationCallbacks* pAllocator);
-VkResult DispatchGetEventStatus(
- VkDevice device,
- VkEvent event);
-VkResult DispatchSetEvent(
- VkDevice device,
- VkEvent event);
-VkResult DispatchResetEvent(
- VkDevice device,
- VkEvent event);
-VkResult DispatchCreateQueryPool(
- VkDevice device,
- const VkQueryPoolCreateInfo* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkQueryPool* pQueryPool);
-void DispatchDestroyQueryPool(
- VkDevice device,
- VkQueryPool queryPool,
- const VkAllocationCallbacks* pAllocator);
-VkResult DispatchGetQueryPoolResults(
- VkDevice device,
- VkQueryPool queryPool,
- uint32_t firstQuery,
- uint32_t queryCount,
- size_t dataSize,
- void* pData,
- VkDeviceSize stride,
- VkQueryResultFlags flags);
-VkResult DispatchCreateBuffer(
- VkDevice device,
- const VkBufferCreateInfo* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkBuffer* pBuffer);
-void DispatchDestroyBuffer(
- VkDevice device,
- VkBuffer buffer,
- const VkAllocationCallbacks* pAllocator);
-VkResult DispatchCreateBufferView(
- VkDevice device,
- const VkBufferViewCreateInfo* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkBufferView* pView);
-void DispatchDestroyBufferView(
- VkDevice device,
- VkBufferView bufferView,
- const VkAllocationCallbacks* pAllocator);
-VkResult DispatchCreateImage(
- VkDevice device,
- const VkImageCreateInfo* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkImage* pImage);
-void DispatchDestroyImage(
- VkDevice device,
- VkImage image,
- const VkAllocationCallbacks* pAllocator);
-void DispatchGetImageSubresourceLayout(
- VkDevice device,
- VkImage image,
- const VkImageSubresource* pSubresource,
- VkSubresourceLayout* pLayout);
-VkResult DispatchCreateImageView(
- VkDevice device,
- const VkImageViewCreateInfo* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkImageView* pView);
-void DispatchDestroyImageView(
- VkDevice device,
- VkImageView imageView,
- const VkAllocationCallbacks* pAllocator);
-VkResult DispatchCreateShaderModule(
- VkDevice device,
- const VkShaderModuleCreateInfo* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkShaderModule* pShaderModule);
-void DispatchDestroyShaderModule(
- VkDevice device,
- VkShaderModule shaderModule,
- const VkAllocationCallbacks* pAllocator);
-VkResult DispatchCreatePipelineCache(
- VkDevice device,
- const VkPipelineCacheCreateInfo* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkPipelineCache* pPipelineCache);
-void DispatchDestroyPipelineCache(
- VkDevice device,
- VkPipelineCache pipelineCache,
- const VkAllocationCallbacks* pAllocator);
-VkResult DispatchGetPipelineCacheData(
- VkDevice device,
- VkPipelineCache pipelineCache,
- size_t* pDataSize,
- void* pData);
-VkResult DispatchMergePipelineCaches(
- VkDevice device,
- VkPipelineCache dstCache,
- uint32_t srcCacheCount,
- const VkPipelineCache* pSrcCaches);
-VkResult DispatchCreateGraphicsPipelines(
- VkDevice device,
- VkPipelineCache pipelineCache,
- uint32_t createInfoCount,
- const VkGraphicsPipelineCreateInfo* pCreateInfos,
- const VkAllocationCallbacks* pAllocator,
- VkPipeline* pPipelines);
-VkResult DispatchCreateComputePipelines(
- VkDevice device,
- VkPipelineCache pipelineCache,
- uint32_t createInfoCount,
- const VkComputePipelineCreateInfo* pCreateInfos,
- const VkAllocationCallbacks* pAllocator,
- VkPipeline* pPipelines);
-void DispatchDestroyPipeline(
- VkDevice device,
- VkPipeline pipeline,
- const VkAllocationCallbacks* pAllocator);
-VkResult DispatchCreatePipelineLayout(
- VkDevice device,
- const VkPipelineLayoutCreateInfo* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkPipelineLayout* pPipelineLayout);
-void DispatchDestroyPipelineLayout(
- VkDevice device,
- VkPipelineLayout pipelineLayout,
- const VkAllocationCallbacks* pAllocator);
-VkResult DispatchCreateSampler(
- VkDevice device,
- const VkSamplerCreateInfo* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkSampler* pSampler);
-void DispatchDestroySampler(
- VkDevice device,
- VkSampler sampler,
- const VkAllocationCallbacks* pAllocator);
-VkResult DispatchCreateDescriptorSetLayout(
- VkDevice device,
- const VkDescriptorSetLayoutCreateInfo* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkDescriptorSetLayout* pSetLayout);
-void DispatchDestroyDescriptorSetLayout(
- VkDevice device,
- VkDescriptorSetLayout descriptorSetLayout,
- const VkAllocationCallbacks* pAllocator);
-VkResult DispatchCreateDescriptorPool(
- VkDevice device,
- const VkDescriptorPoolCreateInfo* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkDescriptorPool* pDescriptorPool);
-void DispatchDestroyDescriptorPool(
- VkDevice device,
- VkDescriptorPool descriptorPool,
- const VkAllocationCallbacks* pAllocator);
-VkResult DispatchResetDescriptorPool(
- VkDevice device,
- VkDescriptorPool descriptorPool,
- VkDescriptorPoolResetFlags flags);
-VkResult DispatchAllocateDescriptorSets(
- VkDevice device,
- const VkDescriptorSetAllocateInfo* pAllocateInfo,
- VkDescriptorSet* pDescriptorSets);
-VkResult DispatchFreeDescriptorSets(
- VkDevice device,
- VkDescriptorPool descriptorPool,
- uint32_t descriptorSetCount,
- const VkDescriptorSet* pDescriptorSets);
-void DispatchUpdateDescriptorSets(
- VkDevice device,
- uint32_t descriptorWriteCount,
- const VkWriteDescriptorSet* pDescriptorWrites,
- uint32_t descriptorCopyCount,
- const VkCopyDescriptorSet* pDescriptorCopies);
-VkResult DispatchCreateFramebuffer(
- VkDevice device,
- const VkFramebufferCreateInfo* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkFramebuffer* pFramebuffer);
-void DispatchDestroyFramebuffer(
- VkDevice device,
- VkFramebuffer framebuffer,
- const VkAllocationCallbacks* pAllocator);
-VkResult DispatchCreateRenderPass(
- VkDevice device,
- const VkRenderPassCreateInfo* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkRenderPass* pRenderPass);
-void DispatchDestroyRenderPass(
- VkDevice device,
- VkRenderPass renderPass,
- const VkAllocationCallbacks* pAllocator);
-void DispatchGetRenderAreaGranularity(
- VkDevice device,
- VkRenderPass renderPass,
- VkExtent2D* pGranularity);
-VkResult DispatchCreateCommandPool(
- VkDevice device,
- const VkCommandPoolCreateInfo* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkCommandPool* pCommandPool);
-void DispatchDestroyCommandPool(
- VkDevice device,
- VkCommandPool commandPool,
- const VkAllocationCallbacks* pAllocator);
-VkResult DispatchResetCommandPool(
- VkDevice device,
- VkCommandPool commandPool,
- VkCommandPoolResetFlags flags);
-VkResult DispatchAllocateCommandBuffers(
- VkDevice device,
- const VkCommandBufferAllocateInfo* pAllocateInfo,
- VkCommandBuffer* pCommandBuffers);
-void DispatchFreeCommandBuffers(
- VkDevice device,
- VkCommandPool commandPool,
- uint32_t commandBufferCount,
- const VkCommandBuffer* pCommandBuffers);
-VkResult DispatchBeginCommandBuffer(
- VkCommandBuffer commandBuffer,
- const VkCommandBufferBeginInfo* pBeginInfo);
-VkResult DispatchEndCommandBuffer(
- VkCommandBuffer commandBuffer);
-VkResult DispatchResetCommandBuffer(
- VkCommandBuffer commandBuffer,
- VkCommandBufferResetFlags flags);
-void DispatchCmdBindPipeline(
- VkCommandBuffer commandBuffer,
- VkPipelineBindPoint pipelineBindPoint,
- VkPipeline pipeline);
-void DispatchCmdSetViewport(
- VkCommandBuffer commandBuffer,
- uint32_t firstViewport,
- uint32_t viewportCount,
- const VkViewport* pViewports);
-void DispatchCmdSetScissor(
- VkCommandBuffer commandBuffer,
- uint32_t firstScissor,
- uint32_t scissorCount,
- const VkRect2D* pScissors);
-void DispatchCmdSetLineWidth(
- VkCommandBuffer commandBuffer,
- float lineWidth);
-void DispatchCmdSetDepthBias(
- VkCommandBuffer commandBuffer,
- float depthBiasConstantFactor,
- float depthBiasClamp,
- float depthBiasSlopeFactor);
-void DispatchCmdSetBlendConstants(
- VkCommandBuffer commandBuffer,
- const float blendConstants[4]);
-void DispatchCmdSetDepthBounds(
- VkCommandBuffer commandBuffer,
- float minDepthBounds,
- float maxDepthBounds);
-void DispatchCmdSetStencilCompareMask(
- VkCommandBuffer commandBuffer,
- VkStencilFaceFlags faceMask,
- uint32_t compareMask);
-void DispatchCmdSetStencilWriteMask(
- VkCommandBuffer commandBuffer,
- VkStencilFaceFlags faceMask,
- uint32_t writeMask);
-void DispatchCmdSetStencilReference(
- VkCommandBuffer commandBuffer,
- VkStencilFaceFlags faceMask,
- uint32_t reference);
-void DispatchCmdBindDescriptorSets(
- VkCommandBuffer commandBuffer,
- VkPipelineBindPoint pipelineBindPoint,
- VkPipelineLayout layout,
- uint32_t firstSet,
- uint32_t descriptorSetCount,
- const VkDescriptorSet* pDescriptorSets,
- uint32_t dynamicOffsetCount,
- const uint32_t* pDynamicOffsets);
-void DispatchCmdBindIndexBuffer(
- VkCommandBuffer commandBuffer,
- VkBuffer buffer,
- VkDeviceSize offset,
- VkIndexType indexType);
-void DispatchCmdBindVertexBuffers(
- VkCommandBuffer commandBuffer,
- uint32_t firstBinding,
- uint32_t bindingCount,
- const VkBuffer* pBuffers,
- const VkDeviceSize* pOffsets);
-void DispatchCmdDraw(
- VkCommandBuffer commandBuffer,
- uint32_t vertexCount,
- uint32_t instanceCount,
- uint32_t firstVertex,
- uint32_t firstInstance);
-void DispatchCmdDrawIndexed(
- VkCommandBuffer commandBuffer,
- uint32_t indexCount,
- uint32_t instanceCount,
- uint32_t firstIndex,
- int32_t vertexOffset,
- uint32_t firstInstance);
-void DispatchCmdDrawIndirect(
- VkCommandBuffer commandBuffer,
- VkBuffer buffer,
- VkDeviceSize offset,
- uint32_t drawCount,
- uint32_t stride);
-void DispatchCmdDrawIndexedIndirect(
- VkCommandBuffer commandBuffer,
- VkBuffer buffer,
- VkDeviceSize offset,
- uint32_t drawCount,
- uint32_t stride);
-void DispatchCmdDispatch(
- VkCommandBuffer commandBuffer,
- uint32_t groupCountX,
- uint32_t groupCountY,
- uint32_t groupCountZ);
-void DispatchCmdDispatchIndirect(
- VkCommandBuffer commandBuffer,
- VkBuffer buffer,
- VkDeviceSize offset);
-void DispatchCmdCopyBuffer(
- VkCommandBuffer commandBuffer,
- VkBuffer srcBuffer,
- VkBuffer dstBuffer,
- uint32_t regionCount,
- const VkBufferCopy* pRegions);
-void DispatchCmdCopyImage(
- VkCommandBuffer commandBuffer,
- VkImage srcImage,
- VkImageLayout srcImageLayout,
- VkImage dstImage,
- VkImageLayout dstImageLayout,
- uint32_t regionCount,
- const VkImageCopy* pRegions);
-void DispatchCmdBlitImage(
- VkCommandBuffer commandBuffer,
- VkImage srcImage,
- VkImageLayout srcImageLayout,
- VkImage dstImage,
- VkImageLayout dstImageLayout,
- uint32_t regionCount,
- const VkImageBlit* pRegions,
- VkFilter filter);
-void DispatchCmdCopyBufferToImage(
- VkCommandBuffer commandBuffer,
- VkBuffer srcBuffer,
- VkImage dstImage,
- VkImageLayout dstImageLayout,
- uint32_t regionCount,
- const VkBufferImageCopy* pRegions);
-void DispatchCmdCopyImageToBuffer(
- VkCommandBuffer commandBuffer,
- VkImage srcImage,
- VkImageLayout srcImageLayout,
- VkBuffer dstBuffer,
- uint32_t regionCount,
- const VkBufferImageCopy* pRegions);
-void DispatchCmdUpdateBuffer(
- VkCommandBuffer commandBuffer,
- VkBuffer dstBuffer,
- VkDeviceSize dstOffset,
- VkDeviceSize dataSize,
- const void* pData);
-void DispatchCmdFillBuffer(
- VkCommandBuffer commandBuffer,
- VkBuffer dstBuffer,
- VkDeviceSize dstOffset,
- VkDeviceSize size,
- uint32_t data);
-void DispatchCmdClearColorImage(
- VkCommandBuffer commandBuffer,
- VkImage image,
- VkImageLayout imageLayout,
- const VkClearColorValue* pColor,
- uint32_t rangeCount,
- const VkImageSubresourceRange* pRanges);
-void DispatchCmdClearDepthStencilImage(
- VkCommandBuffer commandBuffer,
- VkImage image,
- VkImageLayout imageLayout,
- const VkClearDepthStencilValue* pDepthStencil,
- uint32_t rangeCount,
- const VkImageSubresourceRange* pRanges);
-void DispatchCmdClearAttachments(
- VkCommandBuffer commandBuffer,
- uint32_t attachmentCount,
- const VkClearAttachment* pAttachments,
- uint32_t rectCount,
- const VkClearRect* pRects);
-void DispatchCmdResolveImage(
- VkCommandBuffer commandBuffer,
- VkImage srcImage,
- VkImageLayout srcImageLayout,
- VkImage dstImage,
- VkImageLayout dstImageLayout,
- uint32_t regionCount,
- const VkImageResolve* pRegions);
-void DispatchCmdSetEvent(
- VkCommandBuffer commandBuffer,
- VkEvent event,
- VkPipelineStageFlags stageMask);
-void DispatchCmdResetEvent(
- VkCommandBuffer commandBuffer,
- VkEvent event,
- VkPipelineStageFlags stageMask);
-void DispatchCmdWaitEvents(
- VkCommandBuffer commandBuffer,
- uint32_t eventCount,
- const VkEvent* pEvents,
- VkPipelineStageFlags srcStageMask,
- VkPipelineStageFlags dstStageMask,
- uint32_t memoryBarrierCount,
- const VkMemoryBarrier* pMemoryBarriers,
- uint32_t bufferMemoryBarrierCount,
- const VkBufferMemoryBarrier* pBufferMemoryBarriers,
- uint32_t imageMemoryBarrierCount,
- const VkImageMemoryBarrier* pImageMemoryBarriers);
-void DispatchCmdPipelineBarrier(
- VkCommandBuffer commandBuffer,
- VkPipelineStageFlags srcStageMask,
- VkPipelineStageFlags dstStageMask,
- VkDependencyFlags dependencyFlags,
- uint32_t memoryBarrierCount,
- const VkMemoryBarrier* pMemoryBarriers,
- uint32_t bufferMemoryBarrierCount,
- const VkBufferMemoryBarrier* pBufferMemoryBarriers,
- uint32_t imageMemoryBarrierCount,
- const VkImageMemoryBarrier* pImageMemoryBarriers);
-void DispatchCmdBeginQuery(
- VkCommandBuffer commandBuffer,
- VkQueryPool queryPool,
- uint32_t query,
- VkQueryControlFlags flags);
-void DispatchCmdEndQuery(
- VkCommandBuffer commandBuffer,
- VkQueryPool queryPool,
- uint32_t query);
-void DispatchCmdResetQueryPool(
- VkCommandBuffer commandBuffer,
- VkQueryPool queryPool,
- uint32_t firstQuery,
- uint32_t queryCount);
-void DispatchCmdWriteTimestamp(
- VkCommandBuffer commandBuffer,
- VkPipelineStageFlagBits pipelineStage,
- VkQueryPool queryPool,
- uint32_t query);
-void DispatchCmdCopyQueryPoolResults(
- VkCommandBuffer commandBuffer,
- VkQueryPool queryPool,
- uint32_t firstQuery,
- uint32_t queryCount,
- VkBuffer dstBuffer,
- VkDeviceSize dstOffset,
- VkDeviceSize stride,
- VkQueryResultFlags flags);
-void DispatchCmdPushConstants(
- VkCommandBuffer commandBuffer,
- VkPipelineLayout layout,
- VkShaderStageFlags stageFlags,
- uint32_t offset,
- uint32_t size,
- const void* pValues);
-void DispatchCmdBeginRenderPass(
- VkCommandBuffer commandBuffer,
- const VkRenderPassBeginInfo* pRenderPassBegin,
- VkSubpassContents contents);
-void DispatchCmdNextSubpass(
- VkCommandBuffer commandBuffer,
- VkSubpassContents contents);
-void DispatchCmdEndRenderPass(
- VkCommandBuffer commandBuffer);
-void DispatchCmdExecuteCommands(
- VkCommandBuffer commandBuffer,
- uint32_t commandBufferCount,
- const VkCommandBuffer* pCommandBuffers);
-VkResult DispatchEnumerateInstanceVersion(
- uint32_t* pApiVersion);
-VkResult DispatchBindBufferMemory2(
- VkDevice device,
- uint32_t bindInfoCount,
- const VkBindBufferMemoryInfo* pBindInfos);
-VkResult DispatchBindImageMemory2(
- VkDevice device,
- uint32_t bindInfoCount,
- const VkBindImageMemoryInfo* pBindInfos);
-void DispatchGetDeviceGroupPeerMemoryFeatures(
- VkDevice device,
- uint32_t heapIndex,
- uint32_t localDeviceIndex,
- uint32_t remoteDeviceIndex,
- VkPeerMemoryFeatureFlags* pPeerMemoryFeatures);
-void DispatchCmdSetDeviceMask(
- VkCommandBuffer commandBuffer,
- uint32_t deviceMask);
-void DispatchCmdDispatchBase(
- VkCommandBuffer commandBuffer,
- uint32_t baseGroupX,
- uint32_t baseGroupY,
- uint32_t baseGroupZ,
- uint32_t groupCountX,
- uint32_t groupCountY,
- uint32_t groupCountZ);
-VkResult DispatchEnumeratePhysicalDeviceGroups(
- VkInstance instance,
- uint32_t* pPhysicalDeviceGroupCount,
- VkPhysicalDeviceGroupProperties* pPhysicalDeviceGroupProperties);
-void DispatchGetImageMemoryRequirements2(
- VkDevice device,
- const VkImageMemoryRequirementsInfo2* pInfo,
- VkMemoryRequirements2* pMemoryRequirements);
-void DispatchGetBufferMemoryRequirements2(
- VkDevice device,
- const VkBufferMemoryRequirementsInfo2* pInfo,
- VkMemoryRequirements2* pMemoryRequirements);
-void DispatchGetImageSparseMemoryRequirements2(
- VkDevice device,
- const VkImageSparseMemoryRequirementsInfo2* pInfo,
- uint32_t* pSparseMemoryRequirementCount,
- VkSparseImageMemoryRequirements2* pSparseMemoryRequirements);
-void DispatchGetPhysicalDeviceFeatures2(
- VkPhysicalDevice physicalDevice,
- VkPhysicalDeviceFeatures2* pFeatures);
-void DispatchGetPhysicalDeviceProperties2(
- VkPhysicalDevice physicalDevice,
- VkPhysicalDeviceProperties2* pProperties);
-void DispatchGetPhysicalDeviceFormatProperties2(
- VkPhysicalDevice physicalDevice,
- VkFormat format,
- VkFormatProperties2* pFormatProperties);
-VkResult DispatchGetPhysicalDeviceImageFormatProperties2(
- VkPhysicalDevice physicalDevice,
- const VkPhysicalDeviceImageFormatInfo2* pImageFormatInfo,
- VkImageFormatProperties2* pImageFormatProperties);
-void DispatchGetPhysicalDeviceQueueFamilyProperties2(
- VkPhysicalDevice physicalDevice,
- uint32_t* pQueueFamilyPropertyCount,
- VkQueueFamilyProperties2* pQueueFamilyProperties);
-void DispatchGetPhysicalDeviceMemoryProperties2(
- VkPhysicalDevice physicalDevice,
- VkPhysicalDeviceMemoryProperties2* pMemoryProperties);
-void DispatchGetPhysicalDeviceSparseImageFormatProperties2(
- VkPhysicalDevice physicalDevice,
- const VkPhysicalDeviceSparseImageFormatInfo2* pFormatInfo,
- uint32_t* pPropertyCount,
- VkSparseImageFormatProperties2* pProperties);
-void DispatchTrimCommandPool(
- VkDevice device,
- VkCommandPool commandPool,
- VkCommandPoolTrimFlags flags);
-void DispatchGetDeviceQueue2(
- VkDevice device,
- const VkDeviceQueueInfo2* pQueueInfo,
- VkQueue* pQueue);
-VkResult DispatchCreateSamplerYcbcrConversion(
- VkDevice device,
- const VkSamplerYcbcrConversionCreateInfo* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkSamplerYcbcrConversion* pYcbcrConversion);
-void DispatchDestroySamplerYcbcrConversion(
- VkDevice device,
- VkSamplerYcbcrConversion ycbcrConversion,
- const VkAllocationCallbacks* pAllocator);
-VkResult DispatchCreateDescriptorUpdateTemplate(
- VkDevice device,
- const VkDescriptorUpdateTemplateCreateInfo* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkDescriptorUpdateTemplate* pDescriptorUpdateTemplate);
-void DispatchDestroyDescriptorUpdateTemplate(
- VkDevice device,
- VkDescriptorUpdateTemplate descriptorUpdateTemplate,
- const VkAllocationCallbacks* pAllocator);
-void DispatchUpdateDescriptorSetWithTemplate(
- VkDevice device,
- VkDescriptorSet descriptorSet,
- VkDescriptorUpdateTemplate descriptorUpdateTemplate,
- const void* pData);
-void DispatchGetPhysicalDeviceExternalBufferProperties(
- VkPhysicalDevice physicalDevice,
- const VkPhysicalDeviceExternalBufferInfo* pExternalBufferInfo,
- VkExternalBufferProperties* pExternalBufferProperties);
-void DispatchGetPhysicalDeviceExternalFenceProperties(
- VkPhysicalDevice physicalDevice,
- const VkPhysicalDeviceExternalFenceInfo* pExternalFenceInfo,
- VkExternalFenceProperties* pExternalFenceProperties);
-void DispatchGetPhysicalDeviceExternalSemaphoreProperties(
- VkPhysicalDevice physicalDevice,
- const VkPhysicalDeviceExternalSemaphoreInfo* pExternalSemaphoreInfo,
- VkExternalSemaphoreProperties* pExternalSemaphoreProperties);
-void DispatchGetDescriptorSetLayoutSupport(
- VkDevice device,
- const VkDescriptorSetLayoutCreateInfo* pCreateInfo,
- VkDescriptorSetLayoutSupport* pSupport);
-void DispatchDestroySurfaceKHR(
- VkInstance instance,
- VkSurfaceKHR surface,
- const VkAllocationCallbacks* pAllocator);
-VkResult DispatchGetPhysicalDeviceSurfaceSupportKHR(
- VkPhysicalDevice physicalDevice,
- uint32_t queueFamilyIndex,
- VkSurfaceKHR surface,
- VkBool32* pSupported);
-VkResult DispatchGetPhysicalDeviceSurfaceCapabilitiesKHR(
- VkPhysicalDevice physicalDevice,
- VkSurfaceKHR surface,
- VkSurfaceCapabilitiesKHR* pSurfaceCapabilities);
-VkResult DispatchGetPhysicalDeviceSurfaceFormatsKHR(
- VkPhysicalDevice physicalDevice,
- VkSurfaceKHR surface,
- uint32_t* pSurfaceFormatCount,
- VkSurfaceFormatKHR* pSurfaceFormats);
-VkResult DispatchGetPhysicalDeviceSurfacePresentModesKHR(
- VkPhysicalDevice physicalDevice,
- VkSurfaceKHR surface,
- uint32_t* pPresentModeCount,
- VkPresentModeKHR* pPresentModes);
-VkResult DispatchCreateSwapchainKHR(
- VkDevice device,
- const VkSwapchainCreateInfoKHR* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkSwapchainKHR* pSwapchain);
-void DispatchDestroySwapchainKHR(
- VkDevice device,
- VkSwapchainKHR swapchain,
- const VkAllocationCallbacks* pAllocator);
-VkResult DispatchGetSwapchainImagesKHR(
- VkDevice device,
- VkSwapchainKHR swapchain,
- uint32_t* pSwapchainImageCount,
- VkImage* pSwapchainImages);
-VkResult DispatchAcquireNextImageKHR(
- VkDevice device,
- VkSwapchainKHR swapchain,
- uint64_t timeout,
- VkSemaphore semaphore,
- VkFence fence,
- uint32_t* pImageIndex);
-VkResult DispatchQueuePresentKHR(
- VkQueue queue,
- const VkPresentInfoKHR* pPresentInfo);
-VkResult DispatchGetDeviceGroupPresentCapabilitiesKHR(
- VkDevice device,
- VkDeviceGroupPresentCapabilitiesKHR* pDeviceGroupPresentCapabilities);
-VkResult DispatchGetDeviceGroupSurfacePresentModesKHR(
- VkDevice device,
- VkSurfaceKHR surface,
- VkDeviceGroupPresentModeFlagsKHR* pModes);
-VkResult DispatchGetPhysicalDevicePresentRectanglesKHR(
- VkPhysicalDevice physicalDevice,
- VkSurfaceKHR surface,
- uint32_t* pRectCount,
- VkRect2D* pRects);
-VkResult DispatchAcquireNextImage2KHR(
- VkDevice device,
- const VkAcquireNextImageInfoKHR* pAcquireInfo,
- uint32_t* pImageIndex);
-VkResult DispatchGetPhysicalDeviceDisplayPropertiesKHR(
- VkPhysicalDevice physicalDevice,
- uint32_t* pPropertyCount,
- VkDisplayPropertiesKHR* pProperties);
-VkResult DispatchGetPhysicalDeviceDisplayPlanePropertiesKHR(
- VkPhysicalDevice physicalDevice,
- uint32_t* pPropertyCount,
- VkDisplayPlanePropertiesKHR* pProperties);
-VkResult DispatchGetDisplayPlaneSupportedDisplaysKHR(
- VkPhysicalDevice physicalDevice,
- uint32_t planeIndex,
- uint32_t* pDisplayCount,
- VkDisplayKHR* pDisplays);
-VkResult DispatchGetDisplayModePropertiesKHR(
- VkPhysicalDevice physicalDevice,
- VkDisplayKHR display,
- uint32_t* pPropertyCount,
- VkDisplayModePropertiesKHR* pProperties);
-VkResult DispatchCreateDisplayModeKHR(
- VkPhysicalDevice physicalDevice,
- VkDisplayKHR display,
- const VkDisplayModeCreateInfoKHR* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkDisplayModeKHR* pMode);
-VkResult DispatchGetDisplayPlaneCapabilitiesKHR(
- VkPhysicalDevice physicalDevice,
- VkDisplayModeKHR mode,
- uint32_t planeIndex,
- VkDisplayPlaneCapabilitiesKHR* pCapabilities);
-VkResult DispatchCreateDisplayPlaneSurfaceKHR(
- VkInstance instance,
- const VkDisplaySurfaceCreateInfoKHR* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkSurfaceKHR* pSurface);
-VkResult DispatchCreateSharedSwapchainsKHR(
- VkDevice device,
- uint32_t swapchainCount,
- const VkSwapchainCreateInfoKHR* pCreateInfos,
- const VkAllocationCallbacks* pAllocator,
- VkSwapchainKHR* pSwapchains);
-#ifdef VK_USE_PLATFORM_XLIB_KHR
-VkResult DispatchCreateXlibSurfaceKHR(
- VkInstance instance,
- const VkXlibSurfaceCreateInfoKHR* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkSurfaceKHR* pSurface);
-#endif // VK_USE_PLATFORM_XLIB_KHR
-#ifdef VK_USE_PLATFORM_XLIB_KHR
-VkBool32 DispatchGetPhysicalDeviceXlibPresentationSupportKHR(
- VkPhysicalDevice physicalDevice,
- uint32_t queueFamilyIndex,
- Display* dpy,
- VisualID visualID);
-#endif // VK_USE_PLATFORM_XLIB_KHR
-#ifdef VK_USE_PLATFORM_XCB_KHR
-VkResult DispatchCreateXcbSurfaceKHR(
- VkInstance instance,
- const VkXcbSurfaceCreateInfoKHR* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkSurfaceKHR* pSurface);
-#endif // VK_USE_PLATFORM_XCB_KHR
-#ifdef VK_USE_PLATFORM_XCB_KHR
-VkBool32 DispatchGetPhysicalDeviceXcbPresentationSupportKHR(
- VkPhysicalDevice physicalDevice,
- uint32_t queueFamilyIndex,
- xcb_connection_t* connection,
- xcb_visualid_t visual_id);
-#endif // VK_USE_PLATFORM_XCB_KHR
-#ifdef VK_USE_PLATFORM_WAYLAND_KHR
-VkResult DispatchCreateWaylandSurfaceKHR(
- VkInstance instance,
- const VkWaylandSurfaceCreateInfoKHR* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkSurfaceKHR* pSurface);
-#endif // VK_USE_PLATFORM_WAYLAND_KHR
-#ifdef VK_USE_PLATFORM_WAYLAND_KHR
-VkBool32 DispatchGetPhysicalDeviceWaylandPresentationSupportKHR(
- VkPhysicalDevice physicalDevice,
- uint32_t queueFamilyIndex,
- struct wl_display* display);
-#endif // VK_USE_PLATFORM_WAYLAND_KHR
-#ifdef VK_USE_PLATFORM_ANDROID_KHR
-VkResult DispatchCreateAndroidSurfaceKHR(
- VkInstance instance,
- const VkAndroidSurfaceCreateInfoKHR* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkSurfaceKHR* pSurface);
-#endif // VK_USE_PLATFORM_ANDROID_KHR
-#ifdef VK_USE_PLATFORM_WIN32_KHR
-VkResult DispatchCreateWin32SurfaceKHR(
- VkInstance instance,
- const VkWin32SurfaceCreateInfoKHR* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkSurfaceKHR* pSurface);
-#endif // VK_USE_PLATFORM_WIN32_KHR
-#ifdef VK_USE_PLATFORM_WIN32_KHR
-VkBool32 DispatchGetPhysicalDeviceWin32PresentationSupportKHR(
- VkPhysicalDevice physicalDevice,
- uint32_t queueFamilyIndex);
-#endif // VK_USE_PLATFORM_WIN32_KHR
-void DispatchGetPhysicalDeviceFeatures2KHR(
- VkPhysicalDevice physicalDevice,
- VkPhysicalDeviceFeatures2* pFeatures);
-void DispatchGetPhysicalDeviceProperties2KHR(
- VkPhysicalDevice physicalDevice,
- VkPhysicalDeviceProperties2* pProperties);
-void DispatchGetPhysicalDeviceFormatProperties2KHR(
- VkPhysicalDevice physicalDevice,
- VkFormat format,
- VkFormatProperties2* pFormatProperties);
-VkResult DispatchGetPhysicalDeviceImageFormatProperties2KHR(
- VkPhysicalDevice physicalDevice,
- const VkPhysicalDeviceImageFormatInfo2* pImageFormatInfo,
- VkImageFormatProperties2* pImageFormatProperties);
-void DispatchGetPhysicalDeviceQueueFamilyProperties2KHR(
- VkPhysicalDevice physicalDevice,
- uint32_t* pQueueFamilyPropertyCount,
- VkQueueFamilyProperties2* pQueueFamilyProperties);
-void DispatchGetPhysicalDeviceMemoryProperties2KHR(
- VkPhysicalDevice physicalDevice,
- VkPhysicalDeviceMemoryProperties2* pMemoryProperties);
-void DispatchGetPhysicalDeviceSparseImageFormatProperties2KHR(
- VkPhysicalDevice physicalDevice,
- const VkPhysicalDeviceSparseImageFormatInfo2* pFormatInfo,
- uint32_t* pPropertyCount,
- VkSparseImageFormatProperties2* pProperties);
-void DispatchGetDeviceGroupPeerMemoryFeaturesKHR(
- VkDevice device,
- uint32_t heapIndex,
- uint32_t localDeviceIndex,
- uint32_t remoteDeviceIndex,
- VkPeerMemoryFeatureFlags* pPeerMemoryFeatures);
-void DispatchCmdSetDeviceMaskKHR(
- VkCommandBuffer commandBuffer,
- uint32_t deviceMask);
-void DispatchCmdDispatchBaseKHR(
- VkCommandBuffer commandBuffer,
- uint32_t baseGroupX,
- uint32_t baseGroupY,
- uint32_t baseGroupZ,
- uint32_t groupCountX,
- uint32_t groupCountY,
- uint32_t groupCountZ);
-void DispatchTrimCommandPoolKHR(
- VkDevice device,
- VkCommandPool commandPool,
- VkCommandPoolTrimFlags flags);
-VkResult DispatchEnumeratePhysicalDeviceGroupsKHR(
- VkInstance instance,
- uint32_t* pPhysicalDeviceGroupCount,
- VkPhysicalDeviceGroupProperties* pPhysicalDeviceGroupProperties);
-void DispatchGetPhysicalDeviceExternalBufferPropertiesKHR(
- VkPhysicalDevice physicalDevice,
- const VkPhysicalDeviceExternalBufferInfo* pExternalBufferInfo,
- VkExternalBufferProperties* pExternalBufferProperties);
-#ifdef VK_USE_PLATFORM_WIN32_KHR
-VkResult DispatchGetMemoryWin32HandleKHR(
- VkDevice device,
- const VkMemoryGetWin32HandleInfoKHR* pGetWin32HandleInfo,
- HANDLE* pHandle);
-#endif // VK_USE_PLATFORM_WIN32_KHR
-#ifdef VK_USE_PLATFORM_WIN32_KHR
-VkResult DispatchGetMemoryWin32HandlePropertiesKHR(
- VkDevice device,
- VkExternalMemoryHandleTypeFlagBits handleType,
- HANDLE handle,
- VkMemoryWin32HandlePropertiesKHR* pMemoryWin32HandleProperties);
-#endif // VK_USE_PLATFORM_WIN32_KHR
-VkResult DispatchGetMemoryFdKHR(
- VkDevice device,
- const VkMemoryGetFdInfoKHR* pGetFdInfo,
- int* pFd);
-VkResult DispatchGetMemoryFdPropertiesKHR(
- VkDevice device,
- VkExternalMemoryHandleTypeFlagBits handleType,
- int fd,
- VkMemoryFdPropertiesKHR* pMemoryFdProperties);
-void DispatchGetPhysicalDeviceExternalSemaphorePropertiesKHR(
- VkPhysicalDevice physicalDevice,
- const VkPhysicalDeviceExternalSemaphoreInfo* pExternalSemaphoreInfo,
- VkExternalSemaphoreProperties* pExternalSemaphoreProperties);
-#ifdef VK_USE_PLATFORM_WIN32_KHR
-VkResult DispatchImportSemaphoreWin32HandleKHR(
- VkDevice device,
- const VkImportSemaphoreWin32HandleInfoKHR* pImportSemaphoreWin32HandleInfo);
-#endif // VK_USE_PLATFORM_WIN32_KHR
-#ifdef VK_USE_PLATFORM_WIN32_KHR
-VkResult DispatchGetSemaphoreWin32HandleKHR(
- VkDevice device,
- const VkSemaphoreGetWin32HandleInfoKHR* pGetWin32HandleInfo,
- HANDLE* pHandle);
-#endif // VK_USE_PLATFORM_WIN32_KHR
-VkResult DispatchImportSemaphoreFdKHR(
- VkDevice device,
- const VkImportSemaphoreFdInfoKHR* pImportSemaphoreFdInfo);
-VkResult DispatchGetSemaphoreFdKHR(
- VkDevice device,
- const VkSemaphoreGetFdInfoKHR* pGetFdInfo,
- int* pFd);
-void DispatchCmdPushDescriptorSetKHR(
- VkCommandBuffer commandBuffer,
- VkPipelineBindPoint pipelineBindPoint,
- VkPipelineLayout layout,
- uint32_t set,
- uint32_t descriptorWriteCount,
- const VkWriteDescriptorSet* pDescriptorWrites);
-void DispatchCmdPushDescriptorSetWithTemplateKHR(
- VkCommandBuffer commandBuffer,
- VkDescriptorUpdateTemplate descriptorUpdateTemplate,
- VkPipelineLayout layout,
- uint32_t set,
- const void* pData);
-VkResult DispatchCreateDescriptorUpdateTemplateKHR(
- VkDevice device,
- const VkDescriptorUpdateTemplateCreateInfo* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkDescriptorUpdateTemplate* pDescriptorUpdateTemplate);
-void DispatchDestroyDescriptorUpdateTemplateKHR(
- VkDevice device,
- VkDescriptorUpdateTemplate descriptorUpdateTemplate,
- const VkAllocationCallbacks* pAllocator);
-void DispatchUpdateDescriptorSetWithTemplateKHR(
- VkDevice device,
- VkDescriptorSet descriptorSet,
- VkDescriptorUpdateTemplate descriptorUpdateTemplate,
- const void* pData);
-VkResult DispatchCreateRenderPass2KHR(
- VkDevice device,
- const VkRenderPassCreateInfo2KHR* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkRenderPass* pRenderPass);
-void DispatchCmdBeginRenderPass2KHR(
- VkCommandBuffer commandBuffer,
- const VkRenderPassBeginInfo* pRenderPassBegin,
- const VkSubpassBeginInfoKHR* pSubpassBeginInfo);
-void DispatchCmdNextSubpass2KHR(
- VkCommandBuffer commandBuffer,
- const VkSubpassBeginInfoKHR* pSubpassBeginInfo,
- const VkSubpassEndInfoKHR* pSubpassEndInfo);
-void DispatchCmdEndRenderPass2KHR(
- VkCommandBuffer commandBuffer,
- const VkSubpassEndInfoKHR* pSubpassEndInfo);
-VkResult DispatchGetSwapchainStatusKHR(
- VkDevice device,
- VkSwapchainKHR swapchain);
-void DispatchGetPhysicalDeviceExternalFencePropertiesKHR(
- VkPhysicalDevice physicalDevice,
- const VkPhysicalDeviceExternalFenceInfo* pExternalFenceInfo,
- VkExternalFenceProperties* pExternalFenceProperties);
-#ifdef VK_USE_PLATFORM_WIN32_KHR
-VkResult DispatchImportFenceWin32HandleKHR(
- VkDevice device,
- const VkImportFenceWin32HandleInfoKHR* pImportFenceWin32HandleInfo);
-#endif // VK_USE_PLATFORM_WIN32_KHR
-#ifdef VK_USE_PLATFORM_WIN32_KHR
-VkResult DispatchGetFenceWin32HandleKHR(
- VkDevice device,
- const VkFenceGetWin32HandleInfoKHR* pGetWin32HandleInfo,
- HANDLE* pHandle);
-#endif // VK_USE_PLATFORM_WIN32_KHR
-VkResult DispatchImportFenceFdKHR(
- VkDevice device,
- const VkImportFenceFdInfoKHR* pImportFenceFdInfo);
-VkResult DispatchGetFenceFdKHR(
- VkDevice device,
- const VkFenceGetFdInfoKHR* pGetFdInfo,
- int* pFd);
-VkResult DispatchGetPhysicalDeviceSurfaceCapabilities2KHR(
- VkPhysicalDevice physicalDevice,
- const VkPhysicalDeviceSurfaceInfo2KHR* pSurfaceInfo,
- VkSurfaceCapabilities2KHR* pSurfaceCapabilities);
-VkResult DispatchGetPhysicalDeviceSurfaceFormats2KHR(
- VkPhysicalDevice physicalDevice,
- const VkPhysicalDeviceSurfaceInfo2KHR* pSurfaceInfo,
- uint32_t* pSurfaceFormatCount,
- VkSurfaceFormat2KHR* pSurfaceFormats);
-VkResult DispatchGetPhysicalDeviceDisplayProperties2KHR(
- VkPhysicalDevice physicalDevice,
- uint32_t* pPropertyCount,
- VkDisplayProperties2KHR* pProperties);
-VkResult DispatchGetPhysicalDeviceDisplayPlaneProperties2KHR(
- VkPhysicalDevice physicalDevice,
- uint32_t* pPropertyCount,
- VkDisplayPlaneProperties2KHR* pProperties);
-VkResult DispatchGetDisplayModeProperties2KHR(
- VkPhysicalDevice physicalDevice,
- VkDisplayKHR display,
- uint32_t* pPropertyCount,
- VkDisplayModeProperties2KHR* pProperties);
-VkResult DispatchGetDisplayPlaneCapabilities2KHR(
- VkPhysicalDevice physicalDevice,
- const VkDisplayPlaneInfo2KHR* pDisplayPlaneInfo,
- VkDisplayPlaneCapabilities2KHR* pCapabilities);
-void DispatchGetImageMemoryRequirements2KHR(
- VkDevice device,
- const VkImageMemoryRequirementsInfo2* pInfo,
- VkMemoryRequirements2* pMemoryRequirements);
-void DispatchGetBufferMemoryRequirements2KHR(
- VkDevice device,
- const VkBufferMemoryRequirementsInfo2* pInfo,
- VkMemoryRequirements2* pMemoryRequirements);
-void DispatchGetImageSparseMemoryRequirements2KHR(
- VkDevice device,
- const VkImageSparseMemoryRequirementsInfo2* pInfo,
- uint32_t* pSparseMemoryRequirementCount,
- VkSparseImageMemoryRequirements2* pSparseMemoryRequirements);
-VkResult DispatchCreateSamplerYcbcrConversionKHR(
- VkDevice device,
- const VkSamplerYcbcrConversionCreateInfo* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkSamplerYcbcrConversion* pYcbcrConversion);
-void DispatchDestroySamplerYcbcrConversionKHR(
- VkDevice device,
- VkSamplerYcbcrConversion ycbcrConversion,
- const VkAllocationCallbacks* pAllocator);
-VkResult DispatchBindBufferMemory2KHR(
- VkDevice device,
- uint32_t bindInfoCount,
- const VkBindBufferMemoryInfo* pBindInfos);
-VkResult DispatchBindImageMemory2KHR(
- VkDevice device,
- uint32_t bindInfoCount,
- const VkBindImageMemoryInfo* pBindInfos);
-void DispatchGetDescriptorSetLayoutSupportKHR(
- VkDevice device,
- const VkDescriptorSetLayoutCreateInfo* pCreateInfo,
- VkDescriptorSetLayoutSupport* pSupport);
-void DispatchCmdDrawIndirectCountKHR(
- VkCommandBuffer commandBuffer,
- VkBuffer buffer,
- VkDeviceSize offset,
- VkBuffer countBuffer,
- VkDeviceSize countBufferOffset,
- uint32_t maxDrawCount,
- uint32_t stride);
-void DispatchCmdDrawIndexedIndirectCountKHR(
- VkCommandBuffer commandBuffer,
- VkBuffer buffer,
- VkDeviceSize offset,
- VkBuffer countBuffer,
- VkDeviceSize countBufferOffset,
- uint32_t maxDrawCount,
- uint32_t stride);
-VkResult DispatchGetPipelineExecutablePropertiesKHR(
- VkDevice device,
- const VkPipelineInfoKHR* pPipelineInfo,
- uint32_t* pExecutableCount,
- VkPipelineExecutablePropertiesKHR* pProperties);
-VkResult DispatchGetPipelineExecutableStatisticsKHR(
- VkDevice device,
- const VkPipelineExecutableInfoKHR* pExecutableInfo,
- uint32_t* pStatisticCount,
- VkPipelineExecutableStatisticKHR* pStatistics);
-VkResult DispatchGetPipelineExecutableInternalRepresentationsKHR(
- VkDevice device,
- const VkPipelineExecutableInfoKHR* pExecutableInfo,
- uint32_t* pInternalRepresentationCount,
- VkPipelineExecutableInternalRepresentationKHR* pInternalRepresentations);
-VkResult DispatchCreateDebugReportCallbackEXT(
- VkInstance instance,
- const VkDebugReportCallbackCreateInfoEXT* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkDebugReportCallbackEXT* pCallback);
-void DispatchDestroyDebugReportCallbackEXT(
- VkInstance instance,
- VkDebugReportCallbackEXT callback,
- const VkAllocationCallbacks* pAllocator);
-void DispatchDebugReportMessageEXT(
- VkInstance instance,
- VkDebugReportFlagsEXT flags,
- VkDebugReportObjectTypeEXT objectType,
- uint64_t object,
- size_t location,
- int32_t messageCode,
- const char* pLayerPrefix,
- const char* pMessage);
-VkResult DispatchDebugMarkerSetObjectTagEXT(
- VkDevice device,
- const VkDebugMarkerObjectTagInfoEXT* pTagInfo);
-VkResult DispatchDebugMarkerSetObjectNameEXT(
- VkDevice device,
- const VkDebugMarkerObjectNameInfoEXT* pNameInfo);
-void DispatchCmdDebugMarkerBeginEXT(
- VkCommandBuffer commandBuffer,
- const VkDebugMarkerMarkerInfoEXT* pMarkerInfo);
-void DispatchCmdDebugMarkerEndEXT(
- VkCommandBuffer commandBuffer);
-void DispatchCmdDebugMarkerInsertEXT(
- VkCommandBuffer commandBuffer,
- const VkDebugMarkerMarkerInfoEXT* pMarkerInfo);
-void DispatchCmdBindTransformFeedbackBuffersEXT(
- VkCommandBuffer commandBuffer,
- uint32_t firstBinding,
- uint32_t bindingCount,
- const VkBuffer* pBuffers,
- const VkDeviceSize* pOffsets,
- const VkDeviceSize* pSizes);
-void DispatchCmdBeginTransformFeedbackEXT(
- VkCommandBuffer commandBuffer,
- uint32_t firstCounterBuffer,
- uint32_t counterBufferCount,
- const VkBuffer* pCounterBuffers,
- const VkDeviceSize* pCounterBufferOffsets);
-void DispatchCmdEndTransformFeedbackEXT(
- VkCommandBuffer commandBuffer,
- uint32_t firstCounterBuffer,
- uint32_t counterBufferCount,
- const VkBuffer* pCounterBuffers,
- const VkDeviceSize* pCounterBufferOffsets);
-void DispatchCmdBeginQueryIndexedEXT(
- VkCommandBuffer commandBuffer,
- VkQueryPool queryPool,
- uint32_t query,
- VkQueryControlFlags flags,
- uint32_t index);
-void DispatchCmdEndQueryIndexedEXT(
- VkCommandBuffer commandBuffer,
- VkQueryPool queryPool,
- uint32_t query,
- uint32_t index);
-void DispatchCmdDrawIndirectByteCountEXT(
- VkCommandBuffer commandBuffer,
- uint32_t instanceCount,
- uint32_t firstInstance,
- VkBuffer counterBuffer,
- VkDeviceSize counterBufferOffset,
- uint32_t counterOffset,
- uint32_t vertexStride);
-uint32_t DispatchGetImageViewHandleNVX(
- VkDevice device,
- const VkImageViewHandleInfoNVX* pInfo);
-void DispatchCmdDrawIndirectCountAMD(
- VkCommandBuffer commandBuffer,
- VkBuffer buffer,
- VkDeviceSize offset,
- VkBuffer countBuffer,
- VkDeviceSize countBufferOffset,
- uint32_t maxDrawCount,
- uint32_t stride);
-void DispatchCmdDrawIndexedIndirectCountAMD(
- VkCommandBuffer commandBuffer,
- VkBuffer buffer,
- VkDeviceSize offset,
- VkBuffer countBuffer,
- VkDeviceSize countBufferOffset,
- uint32_t maxDrawCount,
- uint32_t stride);
-VkResult DispatchGetShaderInfoAMD(
- VkDevice device,
- VkPipeline pipeline,
- VkShaderStageFlagBits shaderStage,
- VkShaderInfoTypeAMD infoType,
- size_t* pInfoSize,
- void* pInfo);
-#ifdef VK_USE_PLATFORM_GGP
-VkResult DispatchCreateStreamDescriptorSurfaceGGP(
- VkInstance instance,
- const VkStreamDescriptorSurfaceCreateInfoGGP* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkSurfaceKHR* pSurface);
-#endif // VK_USE_PLATFORM_GGP
-VkResult DispatchGetPhysicalDeviceExternalImageFormatPropertiesNV(
- VkPhysicalDevice physicalDevice,
- VkFormat format,
- VkImageType type,
- VkImageTiling tiling,
- VkImageUsageFlags usage,
- VkImageCreateFlags flags,
- VkExternalMemoryHandleTypeFlagsNV externalHandleType,
- VkExternalImageFormatPropertiesNV* pExternalImageFormatProperties);
-#ifdef VK_USE_PLATFORM_WIN32_KHR
-VkResult DispatchGetMemoryWin32HandleNV(
- VkDevice device,
- VkDeviceMemory memory,
- VkExternalMemoryHandleTypeFlagsNV handleType,
- HANDLE* pHandle);
-#endif // VK_USE_PLATFORM_WIN32_KHR
-#ifdef VK_USE_PLATFORM_VI_NN
-VkResult DispatchCreateViSurfaceNN(
- VkInstance instance,
- const VkViSurfaceCreateInfoNN* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkSurfaceKHR* pSurface);
-#endif // VK_USE_PLATFORM_VI_NN
-void DispatchCmdBeginConditionalRenderingEXT(
- VkCommandBuffer commandBuffer,
- const VkConditionalRenderingBeginInfoEXT* pConditionalRenderingBegin);
-void DispatchCmdEndConditionalRenderingEXT(
- VkCommandBuffer commandBuffer);
-void DispatchCmdProcessCommandsNVX(
- VkCommandBuffer commandBuffer,
- const VkCmdProcessCommandsInfoNVX* pProcessCommandsInfo);
-void DispatchCmdReserveSpaceForCommandsNVX(
- VkCommandBuffer commandBuffer,
- const VkCmdReserveSpaceForCommandsInfoNVX* pReserveSpaceInfo);
-VkResult DispatchCreateIndirectCommandsLayoutNVX(
- VkDevice device,
- const VkIndirectCommandsLayoutCreateInfoNVX* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkIndirectCommandsLayoutNVX* pIndirectCommandsLayout);
-void DispatchDestroyIndirectCommandsLayoutNVX(
- VkDevice device,
- VkIndirectCommandsLayoutNVX indirectCommandsLayout,
- const VkAllocationCallbacks* pAllocator);
-VkResult DispatchCreateObjectTableNVX(
- VkDevice device,
- const VkObjectTableCreateInfoNVX* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkObjectTableNVX* pObjectTable);
-void DispatchDestroyObjectTableNVX(
- VkDevice device,
- VkObjectTableNVX objectTable,
- const VkAllocationCallbacks* pAllocator);
-VkResult DispatchRegisterObjectsNVX(
- VkDevice device,
- VkObjectTableNVX objectTable,
- uint32_t objectCount,
- const VkObjectTableEntryNVX* const* ppObjectTableEntries,
- const uint32_t* pObjectIndices);
-VkResult DispatchUnregisterObjectsNVX(
- VkDevice device,
- VkObjectTableNVX objectTable,
- uint32_t objectCount,
- const VkObjectEntryTypeNVX* pObjectEntryTypes,
- const uint32_t* pObjectIndices);
-void DispatchGetPhysicalDeviceGeneratedCommandsPropertiesNVX(
- VkPhysicalDevice physicalDevice,
- VkDeviceGeneratedCommandsFeaturesNVX* pFeatures,
- VkDeviceGeneratedCommandsLimitsNVX* pLimits);
-void DispatchCmdSetViewportWScalingNV(
- VkCommandBuffer commandBuffer,
- uint32_t firstViewport,
- uint32_t viewportCount,
- const VkViewportWScalingNV* pViewportWScalings);
-VkResult DispatchReleaseDisplayEXT(
- VkPhysicalDevice physicalDevice,
- VkDisplayKHR display);
-#ifdef VK_USE_PLATFORM_XLIB_XRANDR_EXT
-VkResult DispatchAcquireXlibDisplayEXT(
- VkPhysicalDevice physicalDevice,
- Display* dpy,
- VkDisplayKHR display);
-#endif // VK_USE_PLATFORM_XLIB_XRANDR_EXT
-#ifdef VK_USE_PLATFORM_XLIB_XRANDR_EXT
-VkResult DispatchGetRandROutputDisplayEXT(
- VkPhysicalDevice physicalDevice,
- Display* dpy,
- RROutput rrOutput,
- VkDisplayKHR* pDisplay);
-#endif // VK_USE_PLATFORM_XLIB_XRANDR_EXT
-VkResult DispatchGetPhysicalDeviceSurfaceCapabilities2EXT(
- VkPhysicalDevice physicalDevice,
- VkSurfaceKHR surface,
- VkSurfaceCapabilities2EXT* pSurfaceCapabilities);
-VkResult DispatchDisplayPowerControlEXT(
- VkDevice device,
- VkDisplayKHR display,
- const VkDisplayPowerInfoEXT* pDisplayPowerInfo);
-VkResult DispatchRegisterDeviceEventEXT(
- VkDevice device,
- const VkDeviceEventInfoEXT* pDeviceEventInfo,
- const VkAllocationCallbacks* pAllocator,
- VkFence* pFence);
-VkResult DispatchRegisterDisplayEventEXT(
- VkDevice device,
- VkDisplayKHR display,
- const VkDisplayEventInfoEXT* pDisplayEventInfo,
- const VkAllocationCallbacks* pAllocator,
- VkFence* pFence);
-VkResult DispatchGetSwapchainCounterEXT(
- VkDevice device,
- VkSwapchainKHR swapchain,
- VkSurfaceCounterFlagBitsEXT counter,
- uint64_t* pCounterValue);
-VkResult DispatchGetRefreshCycleDurationGOOGLE(
- VkDevice device,
- VkSwapchainKHR swapchain,
- VkRefreshCycleDurationGOOGLE* pDisplayTimingProperties);
-VkResult DispatchGetPastPresentationTimingGOOGLE(
- VkDevice device,
- VkSwapchainKHR swapchain,
- uint32_t* pPresentationTimingCount,
- VkPastPresentationTimingGOOGLE* pPresentationTimings);
-void DispatchCmdSetDiscardRectangleEXT(
- VkCommandBuffer commandBuffer,
- uint32_t firstDiscardRectangle,
- uint32_t discardRectangleCount,
- const VkRect2D* pDiscardRectangles);
-void DispatchSetHdrMetadataEXT(
- VkDevice device,
- uint32_t swapchainCount,
- const VkSwapchainKHR* pSwapchains,
- const VkHdrMetadataEXT* pMetadata);
-#ifdef VK_USE_PLATFORM_IOS_MVK
-VkResult DispatchCreateIOSSurfaceMVK(
- VkInstance instance,
- const VkIOSSurfaceCreateInfoMVK* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkSurfaceKHR* pSurface);
-#endif // VK_USE_PLATFORM_IOS_MVK
-#ifdef VK_USE_PLATFORM_MACOS_MVK
-VkResult DispatchCreateMacOSSurfaceMVK(
- VkInstance instance,
- const VkMacOSSurfaceCreateInfoMVK* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkSurfaceKHR* pSurface);
-#endif // VK_USE_PLATFORM_MACOS_MVK
-VkResult DispatchSetDebugUtilsObjectNameEXT(
- VkDevice device,
- const VkDebugUtilsObjectNameInfoEXT* pNameInfo);
-VkResult DispatchSetDebugUtilsObjectTagEXT(
- VkDevice device,
- const VkDebugUtilsObjectTagInfoEXT* pTagInfo);
-void DispatchQueueBeginDebugUtilsLabelEXT(
- VkQueue queue,
- const VkDebugUtilsLabelEXT* pLabelInfo);
-void DispatchQueueEndDebugUtilsLabelEXT(
- VkQueue queue);
-void DispatchQueueInsertDebugUtilsLabelEXT(
- VkQueue queue,
- const VkDebugUtilsLabelEXT* pLabelInfo);
-void DispatchCmdBeginDebugUtilsLabelEXT(
- VkCommandBuffer commandBuffer,
- const VkDebugUtilsLabelEXT* pLabelInfo);
-void DispatchCmdEndDebugUtilsLabelEXT(
- VkCommandBuffer commandBuffer);
-void DispatchCmdInsertDebugUtilsLabelEXT(
- VkCommandBuffer commandBuffer,
- const VkDebugUtilsLabelEXT* pLabelInfo);
-VkResult DispatchCreateDebugUtilsMessengerEXT(
- VkInstance instance,
- const VkDebugUtilsMessengerCreateInfoEXT* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkDebugUtilsMessengerEXT* pMessenger);
-void DispatchDestroyDebugUtilsMessengerEXT(
- VkInstance instance,
- VkDebugUtilsMessengerEXT messenger,
- const VkAllocationCallbacks* pAllocator);
-void DispatchSubmitDebugUtilsMessageEXT(
- VkInstance instance,
- VkDebugUtilsMessageSeverityFlagBitsEXT messageSeverity,
- VkDebugUtilsMessageTypeFlagsEXT messageTypes,
- const VkDebugUtilsMessengerCallbackDataEXT* pCallbackData);
-#ifdef VK_USE_PLATFORM_ANDROID_KHR
-VkResult DispatchGetAndroidHardwareBufferPropertiesANDROID(
- VkDevice device,
- const struct AHardwareBuffer* buffer,
- VkAndroidHardwareBufferPropertiesANDROID* pProperties);
-#endif // VK_USE_PLATFORM_ANDROID_KHR
-#ifdef VK_USE_PLATFORM_ANDROID_KHR
-VkResult DispatchGetMemoryAndroidHardwareBufferANDROID(
- VkDevice device,
- const VkMemoryGetAndroidHardwareBufferInfoANDROID* pInfo,
- struct AHardwareBuffer** pBuffer);
-#endif // VK_USE_PLATFORM_ANDROID_KHR
-void DispatchCmdSetSampleLocationsEXT(
- VkCommandBuffer commandBuffer,
- const VkSampleLocationsInfoEXT* pSampleLocationsInfo);
-void DispatchGetPhysicalDeviceMultisamplePropertiesEXT(
- VkPhysicalDevice physicalDevice,
- VkSampleCountFlagBits samples,
- VkMultisamplePropertiesEXT* pMultisampleProperties);
-VkResult DispatchGetImageDrmFormatModifierPropertiesEXT(
- VkDevice device,
- VkImage image,
- VkImageDrmFormatModifierPropertiesEXT* pProperties);
-VkResult DispatchCreateValidationCacheEXT(
- VkDevice device,
- const VkValidationCacheCreateInfoEXT* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkValidationCacheEXT* pValidationCache);
-void DispatchDestroyValidationCacheEXT(
- VkDevice device,
- VkValidationCacheEXT validationCache,
- const VkAllocationCallbacks* pAllocator);
-VkResult DispatchMergeValidationCachesEXT(
- VkDevice device,
- VkValidationCacheEXT dstCache,
- uint32_t srcCacheCount,
- const VkValidationCacheEXT* pSrcCaches);
-VkResult DispatchGetValidationCacheDataEXT(
- VkDevice device,
- VkValidationCacheEXT validationCache,
- size_t* pDataSize,
- void* pData);
-void DispatchCmdBindShadingRateImageNV(
- VkCommandBuffer commandBuffer,
- VkImageView imageView,
- VkImageLayout imageLayout);
-void DispatchCmdSetViewportShadingRatePaletteNV(
- VkCommandBuffer commandBuffer,
- uint32_t firstViewport,
- uint32_t viewportCount,
- const VkShadingRatePaletteNV* pShadingRatePalettes);
-void DispatchCmdSetCoarseSampleOrderNV(
- VkCommandBuffer commandBuffer,
- VkCoarseSampleOrderTypeNV sampleOrderType,
- uint32_t customSampleOrderCount,
- const VkCoarseSampleOrderCustomNV* pCustomSampleOrders);
-VkResult DispatchCreateAccelerationStructureNV(
- VkDevice device,
- const VkAccelerationStructureCreateInfoNV* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkAccelerationStructureNV* pAccelerationStructure);
-void DispatchDestroyAccelerationStructureNV(
- VkDevice device,
- VkAccelerationStructureNV accelerationStructure,
- const VkAllocationCallbacks* pAllocator);
-void DispatchGetAccelerationStructureMemoryRequirementsNV(
- VkDevice device,
- const VkAccelerationStructureMemoryRequirementsInfoNV* pInfo,
- VkMemoryRequirements2KHR* pMemoryRequirements);
-VkResult DispatchBindAccelerationStructureMemoryNV(
- VkDevice device,
- uint32_t bindInfoCount,
- const VkBindAccelerationStructureMemoryInfoNV* pBindInfos);
-void DispatchCmdBuildAccelerationStructureNV(
- VkCommandBuffer commandBuffer,
- const VkAccelerationStructureInfoNV* pInfo,
- VkBuffer instanceData,
- VkDeviceSize instanceOffset,
- VkBool32 update,
- VkAccelerationStructureNV dst,
- VkAccelerationStructureNV src,
- VkBuffer scratch,
- VkDeviceSize scratchOffset);
-void DispatchCmdCopyAccelerationStructureNV(
- VkCommandBuffer commandBuffer,
- VkAccelerationStructureNV dst,
- VkAccelerationStructureNV src,
- VkCopyAccelerationStructureModeNV mode);
-void DispatchCmdTraceRaysNV(
- VkCommandBuffer commandBuffer,
- VkBuffer raygenShaderBindingTableBuffer,
- VkDeviceSize raygenShaderBindingOffset,
- VkBuffer missShaderBindingTableBuffer,
- VkDeviceSize missShaderBindingOffset,
- VkDeviceSize missShaderBindingStride,
- VkBuffer hitShaderBindingTableBuffer,
- VkDeviceSize hitShaderBindingOffset,
- VkDeviceSize hitShaderBindingStride,
- VkBuffer callableShaderBindingTableBuffer,
- VkDeviceSize callableShaderBindingOffset,
- VkDeviceSize callableShaderBindingStride,
- uint32_t width,
- uint32_t height,
- uint32_t depth);
-VkResult DispatchCreateRayTracingPipelinesNV(
- VkDevice device,
- VkPipelineCache pipelineCache,
- uint32_t createInfoCount,
- const VkRayTracingPipelineCreateInfoNV* pCreateInfos,
- const VkAllocationCallbacks* pAllocator,
- VkPipeline* pPipelines);
-VkResult DispatchGetRayTracingShaderGroupHandlesNV(
- VkDevice device,
- VkPipeline pipeline,
- uint32_t firstGroup,
- uint32_t groupCount,
- size_t dataSize,
- void* pData);
-VkResult DispatchGetAccelerationStructureHandleNV(
- VkDevice device,
- VkAccelerationStructureNV accelerationStructure,
- size_t dataSize,
- void* pData);
-void DispatchCmdWriteAccelerationStructuresPropertiesNV(
- VkCommandBuffer commandBuffer,
- uint32_t accelerationStructureCount,
- const VkAccelerationStructureNV* pAccelerationStructures,
- VkQueryType queryType,
- VkQueryPool queryPool,
- uint32_t firstQuery);
-VkResult DispatchCompileDeferredNV(
- VkDevice device,
- VkPipeline pipeline,
- uint32_t shader);
-VkResult DispatchGetMemoryHostPointerPropertiesEXT(
- VkDevice device,
- VkExternalMemoryHandleTypeFlagBits handleType,
- const void* pHostPointer,
- VkMemoryHostPointerPropertiesEXT* pMemoryHostPointerProperties);
-void DispatchCmdWriteBufferMarkerAMD(
- VkCommandBuffer commandBuffer,
- VkPipelineStageFlagBits pipelineStage,
- VkBuffer dstBuffer,
- VkDeviceSize dstOffset,
- uint32_t marker);
-VkResult DispatchGetPhysicalDeviceCalibrateableTimeDomainsEXT(
- VkPhysicalDevice physicalDevice,
- uint32_t* pTimeDomainCount,
- VkTimeDomainEXT* pTimeDomains);
-VkResult DispatchGetCalibratedTimestampsEXT(
- VkDevice device,
- uint32_t timestampCount,
- const VkCalibratedTimestampInfoEXT* pTimestampInfos,
- uint64_t* pTimestamps,
- uint64_t* pMaxDeviation);
-void DispatchCmdDrawMeshTasksNV(
- VkCommandBuffer commandBuffer,
- uint32_t taskCount,
- uint32_t firstTask);
-void DispatchCmdDrawMeshTasksIndirectNV(
- VkCommandBuffer commandBuffer,
- VkBuffer buffer,
- VkDeviceSize offset,
- uint32_t drawCount,
- uint32_t stride);
-void DispatchCmdDrawMeshTasksIndirectCountNV(
- VkCommandBuffer commandBuffer,
- VkBuffer buffer,
- VkDeviceSize offset,
- VkBuffer countBuffer,
- VkDeviceSize countBufferOffset,
- uint32_t maxDrawCount,
- uint32_t stride);
-void DispatchCmdSetExclusiveScissorNV(
- VkCommandBuffer commandBuffer,
- uint32_t firstExclusiveScissor,
- uint32_t exclusiveScissorCount,
- const VkRect2D* pExclusiveScissors);
-void DispatchCmdSetCheckpointNV(
- VkCommandBuffer commandBuffer,
- const void* pCheckpointMarker);
-void DispatchGetQueueCheckpointDataNV(
- VkQueue queue,
- uint32_t* pCheckpointDataCount,
- VkCheckpointDataNV* pCheckpointData);
-VkResult DispatchInitializePerformanceApiINTEL(
- VkDevice device,
- const VkInitializePerformanceApiInfoINTEL* pInitializeInfo);
-void DispatchUninitializePerformanceApiINTEL(
- VkDevice device);
-VkResult DispatchCmdSetPerformanceMarkerINTEL(
- VkCommandBuffer commandBuffer,
- const VkPerformanceMarkerInfoINTEL* pMarkerInfo);
-VkResult DispatchCmdSetPerformanceStreamMarkerINTEL(
- VkCommandBuffer commandBuffer,
- const VkPerformanceStreamMarkerInfoINTEL* pMarkerInfo);
-VkResult DispatchCmdSetPerformanceOverrideINTEL(
- VkCommandBuffer commandBuffer,
- const VkPerformanceOverrideInfoINTEL* pOverrideInfo);
-VkResult DispatchAcquirePerformanceConfigurationINTEL(
- VkDevice device,
- const VkPerformanceConfigurationAcquireInfoINTEL* pAcquireInfo,
- VkPerformanceConfigurationINTEL* pConfiguration);
-VkResult DispatchReleasePerformanceConfigurationINTEL(
- VkDevice device,
- VkPerformanceConfigurationINTEL configuration);
-VkResult DispatchQueueSetPerformanceConfigurationINTEL(
- VkQueue queue,
- VkPerformanceConfigurationINTEL configuration);
-VkResult DispatchGetPerformanceParameterINTEL(
- VkDevice device,
- VkPerformanceParameterTypeINTEL parameter,
- VkPerformanceValueINTEL* pValue);
-void DispatchSetLocalDimmingAMD(
- VkDevice device,
- VkSwapchainKHR swapChain,
- VkBool32 localDimmingEnable);
-#ifdef VK_USE_PLATFORM_FUCHSIA
-VkResult DispatchCreateImagePipeSurfaceFUCHSIA(
- VkInstance instance,
- const VkImagePipeSurfaceCreateInfoFUCHSIA* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkSurfaceKHR* pSurface);
-#endif // VK_USE_PLATFORM_FUCHSIA
-#ifdef VK_USE_PLATFORM_METAL_EXT
-VkResult DispatchCreateMetalSurfaceEXT(
- VkInstance instance,
- const VkMetalSurfaceCreateInfoEXT* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkSurfaceKHR* pSurface);
-#endif // VK_USE_PLATFORM_METAL_EXT
-VkDeviceAddress DispatchGetBufferDeviceAddressEXT(
- VkDevice device,
- const VkBufferDeviceAddressInfoEXT* pInfo);
-VkResult DispatchGetPhysicalDeviceCooperativeMatrixPropertiesNV(
- VkPhysicalDevice physicalDevice,
- uint32_t* pPropertyCount,
- VkCooperativeMatrixPropertiesNV* pProperties);
-VkResult DispatchGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV(
- VkPhysicalDevice physicalDevice,
- uint32_t* pCombinationCount,
- VkFramebufferMixedSamplesCombinationNV* pCombinations);
-#ifdef VK_USE_PLATFORM_WIN32_KHR
-VkResult DispatchGetPhysicalDeviceSurfacePresentModes2EXT(
- VkPhysicalDevice physicalDevice,
- const VkPhysicalDeviceSurfaceInfo2KHR* pSurfaceInfo,
- uint32_t* pPresentModeCount,
- VkPresentModeKHR* pPresentModes);
-#endif // VK_USE_PLATFORM_WIN32_KHR
-#ifdef VK_USE_PLATFORM_WIN32_KHR
-VkResult DispatchAcquireFullScreenExclusiveModeEXT(
- VkDevice device,
- VkSwapchainKHR swapchain);
-#endif // VK_USE_PLATFORM_WIN32_KHR
-#ifdef VK_USE_PLATFORM_WIN32_KHR
-VkResult DispatchReleaseFullScreenExclusiveModeEXT(
- VkDevice device,
- VkSwapchainKHR swapchain);
-#endif // VK_USE_PLATFORM_WIN32_KHR
-#ifdef VK_USE_PLATFORM_WIN32_KHR
-VkResult DispatchGetDeviceGroupSurfacePresentModes2EXT(
- VkDevice device,
- const VkPhysicalDeviceSurfaceInfo2KHR* pSurfaceInfo,
- VkDeviceGroupPresentModeFlagsKHR* pModes);
-#endif // VK_USE_PLATFORM_WIN32_KHR
-VkResult DispatchCreateHeadlessSurfaceEXT(
- VkInstance instance,
- const VkHeadlessSurfaceCreateInfoEXT* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkSurfaceKHR* pSurface);
-void DispatchCmdSetLineStippleEXT(
- VkCommandBuffer commandBuffer,
- uint32_t lineStippleFactor,
- uint16_t lineStipplePattern);
-void DispatchResetQueryPoolEXT(
- VkDevice device,
- VkQueryPool queryPool,
- uint32_t firstQuery,
- uint32_t queryCount); \ No newline at end of file
diff --git a/layers/generated/object_tracker.cpp b/layers/generated/object_tracker.cpp
deleted file mode 100644
index 3d3c0115c..000000000
--- a/layers/generated/object_tracker.cpp
+++ /dev/null
@@ -1,5145 +0,0 @@
-// *** THIS FILE IS GENERATED - DO NOT EDIT ***
-// See object_tracker_generator.py for modifications
-
-
-/***************************************************************************
- *
- * Copyright (c) 2015-2019 The Khronos Group Inc.
- * Copyright (c) 2015-2019 Valve Corporation
- * Copyright (c) 2015-2019 LunarG, Inc.
- * Copyright (c) 2015-2019 Google Inc.
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- *
- * Author: Mark Lobodzinski <mark@lunarg.com>
- * Author: Dave Houlton <daveh@lunarg.com>
- *
- ****************************************************************************/
-
-
-#include "chassis.h"
-#include "object_lifetime_validation.h"
-
-
-
-// ObjectTracker undestroyed objects validation function
-bool ObjectLifetimes::ReportUndestroyedObjects(VkDevice device, const std::string& error_code) {
- bool skip = false;
- skip |= DeviceReportUndestroyedObjects(device, kVulkanObjectTypeCommandBuffer, error_code);
- skip |= DeviceReportUndestroyedObjects(device, kVulkanObjectTypeSemaphore, error_code);
- skip |= DeviceReportUndestroyedObjects(device, kVulkanObjectTypeFence, error_code);
- skip |= DeviceReportUndestroyedObjects(device, kVulkanObjectTypeDeviceMemory, error_code);
- skip |= DeviceReportUndestroyedObjects(device, kVulkanObjectTypeBuffer, error_code);
- skip |= DeviceReportUndestroyedObjects(device, kVulkanObjectTypeImage, error_code);
- skip |= DeviceReportUndestroyedObjects(device, kVulkanObjectTypeEvent, error_code);
- skip |= DeviceReportUndestroyedObjects(device, kVulkanObjectTypeQueryPool, error_code);
- skip |= DeviceReportUndestroyedObjects(device, kVulkanObjectTypeBufferView, error_code);
- skip |= DeviceReportUndestroyedObjects(device, kVulkanObjectTypeImageView, error_code);
- skip |= DeviceReportUndestroyedObjects(device, kVulkanObjectTypeShaderModule, error_code);
- skip |= DeviceReportUndestroyedObjects(device, kVulkanObjectTypePipelineCache, error_code);
- skip |= DeviceReportUndestroyedObjects(device, kVulkanObjectTypePipelineLayout, error_code);
- skip |= DeviceReportUndestroyedObjects(device, kVulkanObjectTypeRenderPass, error_code);
- skip |= DeviceReportUndestroyedObjects(device, kVulkanObjectTypePipeline, error_code);
- skip |= DeviceReportUndestroyedObjects(device, kVulkanObjectTypeDescriptorSetLayout, error_code);
- skip |= DeviceReportUndestroyedObjects(device, kVulkanObjectTypeSampler, error_code);
- skip |= DeviceReportUndestroyedObjects(device, kVulkanObjectTypeDescriptorPool, error_code);
- skip |= DeviceReportUndestroyedObjects(device, kVulkanObjectTypeDescriptorSet, error_code);
- skip |= DeviceReportUndestroyedObjects(device, kVulkanObjectTypeFramebuffer, error_code);
- skip |= DeviceReportUndestroyedObjects(device, kVulkanObjectTypeCommandPool, error_code);
- skip |= DeviceReportUndestroyedObjects(device, kVulkanObjectTypeSamplerYcbcrConversion, error_code);
- skip |= DeviceReportUndestroyedObjects(device, kVulkanObjectTypeDescriptorUpdateTemplate, error_code);
- skip |= DeviceReportUndestroyedObjects(device, kVulkanObjectTypeSurfaceKHR, error_code);
- skip |= DeviceReportUndestroyedObjects(device, kVulkanObjectTypeSwapchainKHR, error_code);
- skip |= DeviceReportUndestroyedObjects(device, kVulkanObjectTypeDisplayKHR, error_code);
- skip |= DeviceReportUndestroyedObjects(device, kVulkanObjectTypeDisplayModeKHR, error_code);
- skip |= DeviceReportUndestroyedObjects(device, kVulkanObjectTypeDebugReportCallbackEXT, error_code);
- skip |= DeviceReportUndestroyedObjects(device, kVulkanObjectTypeObjectTableNVX, error_code);
- skip |= DeviceReportUndestroyedObjects(device, kVulkanObjectTypeIndirectCommandsLayoutNVX, error_code);
- skip |= DeviceReportUndestroyedObjects(device, kVulkanObjectTypeDebugUtilsMessengerEXT, error_code);
- skip |= DeviceReportUndestroyedObjects(device, kVulkanObjectTypeValidationCacheEXT, error_code);
- skip |= DeviceReportUndestroyedObjects(device, kVulkanObjectTypeAccelerationStructureNV, error_code);
- skip |= DeviceReportUndestroyedObjects(device, kVulkanObjectTypePerformanceConfigurationINTEL, error_code);
- return skip;
-}
-
-void ObjectLifetimes::DestroyUndestroyedObjects(VkDevice device) {
- DeviceDestroyUndestroyedObjects(device, kVulkanObjectTypeCommandBuffer);
- DeviceDestroyUndestroyedObjects(device, kVulkanObjectTypeSemaphore);
- DeviceDestroyUndestroyedObjects(device, kVulkanObjectTypeFence);
- DeviceDestroyUndestroyedObjects(device, kVulkanObjectTypeDeviceMemory);
- DeviceDestroyUndestroyedObjects(device, kVulkanObjectTypeBuffer);
- DeviceDestroyUndestroyedObjects(device, kVulkanObjectTypeImage);
- DeviceDestroyUndestroyedObjects(device, kVulkanObjectTypeEvent);
- DeviceDestroyUndestroyedObjects(device, kVulkanObjectTypeQueryPool);
- DeviceDestroyUndestroyedObjects(device, kVulkanObjectTypeBufferView);
- DeviceDestroyUndestroyedObjects(device, kVulkanObjectTypeImageView);
- DeviceDestroyUndestroyedObjects(device, kVulkanObjectTypeShaderModule);
- DeviceDestroyUndestroyedObjects(device, kVulkanObjectTypePipelineCache);
- DeviceDestroyUndestroyedObjects(device, kVulkanObjectTypePipelineLayout);
- DeviceDestroyUndestroyedObjects(device, kVulkanObjectTypeRenderPass);
- DeviceDestroyUndestroyedObjects(device, kVulkanObjectTypePipeline);
- DeviceDestroyUndestroyedObjects(device, kVulkanObjectTypeDescriptorSetLayout);
- DeviceDestroyUndestroyedObjects(device, kVulkanObjectTypeSampler);
- DeviceDestroyUndestroyedObjects(device, kVulkanObjectTypeDescriptorPool);
- DeviceDestroyUndestroyedObjects(device, kVulkanObjectTypeDescriptorSet);
- DeviceDestroyUndestroyedObjects(device, kVulkanObjectTypeFramebuffer);
- DeviceDestroyUndestroyedObjects(device, kVulkanObjectTypeCommandPool);
- DeviceDestroyUndestroyedObjects(device, kVulkanObjectTypeSamplerYcbcrConversion);
- DeviceDestroyUndestroyedObjects(device, kVulkanObjectTypeDescriptorUpdateTemplate);
- DeviceDestroyUndestroyedObjects(device, kVulkanObjectTypeSurfaceKHR);
- DeviceDestroyUndestroyedObjects(device, kVulkanObjectTypeSwapchainKHR);
- DeviceDestroyUndestroyedObjects(device, kVulkanObjectTypeDisplayKHR);
- DeviceDestroyUndestroyedObjects(device, kVulkanObjectTypeDisplayModeKHR);
- DeviceDestroyUndestroyedObjects(device, kVulkanObjectTypeDebugReportCallbackEXT);
- DeviceDestroyUndestroyedObjects(device, kVulkanObjectTypeObjectTableNVX);
- DeviceDestroyUndestroyedObjects(device, kVulkanObjectTypeIndirectCommandsLayoutNVX);
- DeviceDestroyUndestroyedObjects(device, kVulkanObjectTypeDebugUtilsMessengerEXT);
- DeviceDestroyUndestroyedObjects(device, kVulkanObjectTypeValidationCacheEXT);
- DeviceDestroyUndestroyedObjects(device, kVulkanObjectTypeAccelerationStructureNV);
- DeviceDestroyUndestroyedObjects(device, kVulkanObjectTypePerformanceConfigurationINTEL);
-}
-
-
-
-bool ObjectLifetimes::PreCallValidateGetPhysicalDeviceFeatures(
- VkPhysicalDevice physicalDevice,
- VkPhysicalDeviceFeatures* pFeatures) {
- bool skip = false;
- skip |= ValidateObject(physicalDevice, physicalDevice, kVulkanObjectTypePhysicalDevice, false, "VUID-vkGetPhysicalDeviceFeatures-physicalDevice-parameter", kVUIDUndefined);
-
- return skip;
-}
-
-bool ObjectLifetimes::PreCallValidateGetPhysicalDeviceFormatProperties(
- VkPhysicalDevice physicalDevice,
- VkFormat format,
- VkFormatProperties* pFormatProperties) {
- bool skip = false;
- skip |= ValidateObject(physicalDevice, physicalDevice, kVulkanObjectTypePhysicalDevice, false, "VUID-vkGetPhysicalDeviceFormatProperties-physicalDevice-parameter", kVUIDUndefined);
-
- return skip;
-}
-
-bool ObjectLifetimes::PreCallValidateGetPhysicalDeviceImageFormatProperties(
- VkPhysicalDevice physicalDevice,
- VkFormat format,
- VkImageType type,
- VkImageTiling tiling,
- VkImageUsageFlags usage,
- VkImageCreateFlags flags,
- VkImageFormatProperties* pImageFormatProperties) {
- bool skip = false;
- skip |= ValidateObject(physicalDevice, physicalDevice, kVulkanObjectTypePhysicalDevice, false, "VUID-vkGetPhysicalDeviceImageFormatProperties-physicalDevice-parameter", kVUIDUndefined);
-
- return skip;
-}
-
-bool ObjectLifetimes::PreCallValidateGetPhysicalDeviceProperties(
- VkPhysicalDevice physicalDevice,
- VkPhysicalDeviceProperties* pProperties) {
- bool skip = false;
- skip |= ValidateObject(physicalDevice, physicalDevice, kVulkanObjectTypePhysicalDevice, false, "VUID-vkGetPhysicalDeviceProperties-physicalDevice-parameter", kVUIDUndefined);
-
- return skip;
-}
-
-bool ObjectLifetimes::PreCallValidateGetPhysicalDeviceMemoryProperties(
- VkPhysicalDevice physicalDevice,
- VkPhysicalDeviceMemoryProperties* pMemoryProperties) {
- bool skip = false;
- skip |= ValidateObject(physicalDevice, physicalDevice, kVulkanObjectTypePhysicalDevice, false, "VUID-vkGetPhysicalDeviceMemoryProperties-physicalDevice-parameter", kVUIDUndefined);
-
- return skip;
-}
-
-bool ObjectLifetimes::PreCallValidateGetInstanceProcAddr(
- VkInstance instance,
- const char* pName) {
- bool skip = false;
- skip |= ValidateObject(instance, instance, kVulkanObjectTypeInstance, true, "VUID-vkGetInstanceProcAddr-instance-parameter", kVUIDUndefined);
-
- return skip;
-}
-
-bool ObjectLifetimes::PreCallValidateGetDeviceProcAddr(
- VkDevice device,
- const char* pName) {
- bool skip = false;
- skip |= ValidateObject(device, device, kVulkanObjectTypeDevice, false, "VUID-vkGetDeviceProcAddr-device-parameter", kVUIDUndefined);
-
- return skip;
-}
-
-bool ObjectLifetimes::PreCallValidateCreateDevice(
- VkPhysicalDevice physicalDevice,
- const VkDeviceCreateInfo* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkDevice* pDevice) {
- bool skip = false;
- skip |= ValidateObject(physicalDevice, physicalDevice, kVulkanObjectTypePhysicalDevice, false, "VUID-vkCreateDevice-physicalDevice-parameter", kVUIDUndefined);
-
- return skip;
-}
-
-void ObjectLifetimes::PostCallRecordCreateDevice(
- VkPhysicalDevice physicalDevice,
- const VkDeviceCreateInfo* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkDevice* pDevice,
- VkResult result) {
- if (result != VK_SUCCESS) return;
- CreateObject(physicalDevice, *pDevice, kVulkanObjectTypeDevice, pAllocator);
-
-}
-
-bool ObjectLifetimes::PreCallValidateEnumerateDeviceExtensionProperties(
- VkPhysicalDevice physicalDevice,
- const char* pLayerName,
- uint32_t* pPropertyCount,
- VkExtensionProperties* pProperties) {
- bool skip = false;
- skip |= ValidateObject(physicalDevice, physicalDevice, kVulkanObjectTypePhysicalDevice, false, "VUID-vkEnumerateDeviceExtensionProperties-physicalDevice-parameter", kVUIDUndefined);
-
- return skip;
-}
-
-bool ObjectLifetimes::PreCallValidateEnumerateDeviceLayerProperties(
- VkPhysicalDevice physicalDevice,
- uint32_t* pPropertyCount,
- VkLayerProperties* pProperties) {
- bool skip = false;
- skip |= ValidateObject(physicalDevice, physicalDevice, kVulkanObjectTypePhysicalDevice, false, "VUID-vkEnumerateDeviceLayerProperties-physicalDevice-parameter", kVUIDUndefined);
-
- return skip;
-}
-
-bool ObjectLifetimes::PreCallValidateQueueSubmit(
- VkQueue queue,
- uint32_t submitCount,
- const VkSubmitInfo* pSubmits,
- VkFence fence) {
- bool skip = false;
- skip |= ValidateObject(queue, queue, kVulkanObjectTypeQueue, false, "VUID-vkQueueSubmit-queue-parameter", "VUID-vkQueueSubmit-commonparent");
- if (pSubmits) {
- for (uint32_t index0 = 0; index0 < submitCount; ++index0) {
- if (pSubmits[index0].pWaitSemaphores) {
- for (uint32_t index1 = 0; index1 < pSubmits[index0].waitSemaphoreCount; ++index1) {
- skip |= ValidateObject(queue, pSubmits[index0].pWaitSemaphores[index1], kVulkanObjectTypeSemaphore, false, "VUID-VkSubmitInfo-pWaitSemaphores-parameter", "VUID-VkSubmitInfo-commonparent");
- }
- }
- if (pSubmits[index0].pCommandBuffers) {
- for (uint32_t index1 = 0; index1 < pSubmits[index0].commandBufferCount; ++index1) {
- skip |= ValidateObject(queue, pSubmits[index0].pCommandBuffers[index1], kVulkanObjectTypeCommandBuffer, false, "VUID-VkSubmitInfo-pCommandBuffers-parameter", "VUID-VkSubmitInfo-commonparent");
- }
- }
- if (pSubmits[index0].pSignalSemaphores) {
- for (uint32_t index1 = 0; index1 < pSubmits[index0].signalSemaphoreCount; ++index1) {
- skip |= ValidateObject(queue, pSubmits[index0].pSignalSemaphores[index1], kVulkanObjectTypeSemaphore, false, "VUID-VkSubmitInfo-pSignalSemaphores-parameter", "VUID-VkSubmitInfo-commonparent");
- }
- }
- }
- }
- skip |= ValidateObject(queue, fence, kVulkanObjectTypeFence, true, "VUID-vkQueueSubmit-fence-parameter", "VUID-vkQueueSubmit-commonparent");
-
- return skip;
-}
-
-bool ObjectLifetimes::PreCallValidateQueueWaitIdle(
- VkQueue queue) {
- bool skip = false;
- skip |= ValidateObject(queue, queue, kVulkanObjectTypeQueue, false, "VUID-vkQueueWaitIdle-queue-parameter", kVUIDUndefined);
-
- return skip;
-}
-
-bool ObjectLifetimes::PreCallValidateDeviceWaitIdle(
- VkDevice device) {
- bool skip = false;
- skip |= ValidateObject(device, device, kVulkanObjectTypeDevice, false, "VUID-vkDeviceWaitIdle-device-parameter", kVUIDUndefined);
-
- return skip;
-}
-
-bool ObjectLifetimes::PreCallValidateAllocateMemory(
- VkDevice device,
- const VkMemoryAllocateInfo* pAllocateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkDeviceMemory* pMemory) {
- bool skip = false;
- skip |= ValidateObject(device, device, kVulkanObjectTypeDevice, false, "VUID-vkAllocateMemory-device-parameter", kVUIDUndefined);
-
- return skip;
-}
-
-void ObjectLifetimes::PostCallRecordAllocateMemory(
- VkDevice device,
- const VkMemoryAllocateInfo* pAllocateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkDeviceMemory* pMemory,
- VkResult result) {
- if (result != VK_SUCCESS) return;
- CreateObject(device, *pMemory, kVulkanObjectTypeDeviceMemory, pAllocator);
-
-}
-
-bool ObjectLifetimes::PreCallValidateFreeMemory(
- VkDevice device,
- VkDeviceMemory memory,
- const VkAllocationCallbacks* pAllocator) {
- bool skip = false;
- skip |= ValidateObject(device, device, kVulkanObjectTypeDevice, false, "VUID-vkFreeMemory-device-parameter", kVUIDUndefined);
- skip |= ValidateObject(device, memory, kVulkanObjectTypeDeviceMemory, true, "VUID-vkFreeMemory-memory-parameter", "VUID-vkFreeMemory-memory-parent");
- skip |= ValidateDestroyObject(device, memory, kVulkanObjectTypeDeviceMemory, pAllocator, kVUIDUndefined, kVUIDUndefined);
-
- return skip;
-}
-
-void ObjectLifetimes::PreCallRecordFreeMemory(
- VkDevice device,
- VkDeviceMemory memory,
- const VkAllocationCallbacks* pAllocator) {
- RecordDestroyObject(device, memory, kVulkanObjectTypeDeviceMemory);
-
-}
-
-bool ObjectLifetimes::PreCallValidateMapMemory(
- VkDevice device,
- VkDeviceMemory memory,
- VkDeviceSize offset,
- VkDeviceSize size,
- VkMemoryMapFlags flags,
- void** ppData) {
- bool skip = false;
- skip |= ValidateObject(device, device, kVulkanObjectTypeDevice, false, "VUID-vkMapMemory-device-parameter", kVUIDUndefined);
- skip |= ValidateObject(device, memory, kVulkanObjectTypeDeviceMemory, false, "VUID-vkMapMemory-memory-parameter", "VUID-vkMapMemory-memory-parent");
-
- return skip;
-}
-
-bool ObjectLifetimes::PreCallValidateUnmapMemory(
- VkDevice device,
- VkDeviceMemory memory) {
- bool skip = false;
- skip |= ValidateObject(device, device, kVulkanObjectTypeDevice, false, "VUID-vkUnmapMemory-device-parameter", kVUIDUndefined);
- skip |= ValidateObject(device, memory, kVulkanObjectTypeDeviceMemory, false, "VUID-vkUnmapMemory-memory-parameter", "VUID-vkUnmapMemory-memory-parent");
-
- return skip;
-}
-
-bool ObjectLifetimes::PreCallValidateFlushMappedMemoryRanges(
- VkDevice device,
- uint32_t memoryRangeCount,
- const VkMappedMemoryRange* pMemoryRanges) {
- bool skip = false;
- skip |= ValidateObject(device, device, kVulkanObjectTypeDevice, false, "VUID-vkFlushMappedMemoryRanges-device-parameter", kVUIDUndefined);
- if (pMemoryRanges) {
- for (uint32_t index0 = 0; index0 < memoryRangeCount; ++index0) {
- skip |= ValidateObject(device, pMemoryRanges[index0].memory, kVulkanObjectTypeDeviceMemory, false, "VUID-VkMappedMemoryRange-memory-parameter", kVUIDUndefined);
- }
- }
-
- return skip;
-}
-
-bool ObjectLifetimes::PreCallValidateInvalidateMappedMemoryRanges(
- VkDevice device,
- uint32_t memoryRangeCount,
- const VkMappedMemoryRange* pMemoryRanges) {
- bool skip = false;
- skip |= ValidateObject(device, device, kVulkanObjectTypeDevice, false, "VUID-vkInvalidateMappedMemoryRanges-device-parameter", kVUIDUndefined);
- if (pMemoryRanges) {
- for (uint32_t index0 = 0; index0 < memoryRangeCount; ++index0) {
- skip |= ValidateObject(device, pMemoryRanges[index0].memory, kVulkanObjectTypeDeviceMemory, false, "VUID-VkMappedMemoryRange-memory-parameter", kVUIDUndefined);
- }
- }
-
- return skip;
-}
-
-bool ObjectLifetimes::PreCallValidateGetDeviceMemoryCommitment(
- VkDevice device,
- VkDeviceMemory memory,
- VkDeviceSize* pCommittedMemoryInBytes) {
- bool skip = false;
- skip |= ValidateObject(device, device, kVulkanObjectTypeDevice, false, "VUID-vkGetDeviceMemoryCommitment-device-parameter", kVUIDUndefined);
- skip |= ValidateObject(device, memory, kVulkanObjectTypeDeviceMemory, false, "VUID-vkGetDeviceMemoryCommitment-memory-parameter", "VUID-vkGetDeviceMemoryCommitment-memory-parent");
-
- return skip;
-}
-
-bool ObjectLifetimes::PreCallValidateBindBufferMemory(
- VkDevice device,
- VkBuffer buffer,
- VkDeviceMemory memory,
- VkDeviceSize memoryOffset) {
- bool skip = false;
- skip |= ValidateObject(device, device, kVulkanObjectTypeDevice, false, "VUID-vkBindBufferMemory-device-parameter", kVUIDUndefined);
- skip |= ValidateObject(device, buffer, kVulkanObjectTypeBuffer, false, "VUID-vkBindBufferMemory-buffer-parameter", "VUID-vkBindBufferMemory-buffer-parent");
- skip |= ValidateObject(device, memory, kVulkanObjectTypeDeviceMemory, false, "VUID-vkBindBufferMemory-memory-parameter", "VUID-vkBindBufferMemory-memory-parent");
-
- return skip;
-}
-
-bool ObjectLifetimes::PreCallValidateBindImageMemory(
- VkDevice device,
- VkImage image,
- VkDeviceMemory memory,
- VkDeviceSize memoryOffset) {
- bool skip = false;
- skip |= ValidateObject(device, device, kVulkanObjectTypeDevice, false, "VUID-vkBindImageMemory-device-parameter", kVUIDUndefined);
- skip |= ValidateObject(device, image, kVulkanObjectTypeImage, false, "VUID-vkBindImageMemory-image-parameter", "VUID-vkBindImageMemory-image-parent");
- skip |= ValidateObject(device, memory, kVulkanObjectTypeDeviceMemory, false, "VUID-vkBindImageMemory-memory-parameter", "VUID-vkBindImageMemory-memory-parent");
-
- return skip;
-}
-
-bool ObjectLifetimes::PreCallValidateGetBufferMemoryRequirements(
- VkDevice device,
- VkBuffer buffer,
- VkMemoryRequirements* pMemoryRequirements) {
- bool skip = false;
- skip |= ValidateObject(device, device, kVulkanObjectTypeDevice, false, "VUID-vkGetBufferMemoryRequirements-device-parameter", kVUIDUndefined);
- skip |= ValidateObject(device, buffer, kVulkanObjectTypeBuffer, false, "VUID-vkGetBufferMemoryRequirements-buffer-parameter", "VUID-vkGetBufferMemoryRequirements-buffer-parent");
-
- return skip;
-}
-
-bool ObjectLifetimes::PreCallValidateGetImageMemoryRequirements(
- VkDevice device,
- VkImage image,
- VkMemoryRequirements* pMemoryRequirements) {
- bool skip = false;
- skip |= ValidateObject(device, device, kVulkanObjectTypeDevice, false, "VUID-vkGetImageMemoryRequirements-device-parameter", kVUIDUndefined);
- skip |= ValidateObject(device, image, kVulkanObjectTypeImage, false, "VUID-vkGetImageMemoryRequirements-image-parameter", "VUID-vkGetImageMemoryRequirements-image-parent");
-
- return skip;
-}
-
-bool ObjectLifetimes::PreCallValidateGetImageSparseMemoryRequirements(
- VkDevice device,
- VkImage image,
- uint32_t* pSparseMemoryRequirementCount,
- VkSparseImageMemoryRequirements* pSparseMemoryRequirements) {
- bool skip = false;
- skip |= ValidateObject(device, device, kVulkanObjectTypeDevice, false, "VUID-vkGetImageSparseMemoryRequirements-device-parameter", kVUIDUndefined);
- skip |= ValidateObject(device, image, kVulkanObjectTypeImage, false, "VUID-vkGetImageSparseMemoryRequirements-image-parameter", "VUID-vkGetImageSparseMemoryRequirements-image-parent");
-
- return skip;
-}
-
-bool ObjectLifetimes::PreCallValidateGetPhysicalDeviceSparseImageFormatProperties(
- VkPhysicalDevice physicalDevice,
- VkFormat format,
- VkImageType type,
- VkSampleCountFlagBits samples,
- VkImageUsageFlags usage,
- VkImageTiling tiling,
- uint32_t* pPropertyCount,
- VkSparseImageFormatProperties* pProperties) {
- bool skip = false;
- skip |= ValidateObject(physicalDevice, physicalDevice, kVulkanObjectTypePhysicalDevice, false, "VUID-vkGetPhysicalDeviceSparseImageFormatProperties-physicalDevice-parameter", kVUIDUndefined);
-
- return skip;
-}
-
-bool ObjectLifetimes::PreCallValidateQueueBindSparse(
- VkQueue queue,
- uint32_t bindInfoCount,
- const VkBindSparseInfo* pBindInfo,
- VkFence fence) {
- bool skip = false;
- skip |= ValidateObject(queue, queue, kVulkanObjectTypeQueue, false, "VUID-vkQueueBindSparse-queue-parameter", "VUID-vkQueueBindSparse-commonparent");
- if (pBindInfo) {
- for (uint32_t index0 = 0; index0 < bindInfoCount; ++index0) {
- if (pBindInfo[index0].pWaitSemaphores) {
- for (uint32_t index1 = 0; index1 < pBindInfo[index0].waitSemaphoreCount; ++index1) {
- skip |= ValidateObject(queue, pBindInfo[index0].pWaitSemaphores[index1], kVulkanObjectTypeSemaphore, false, "VUID-VkBindSparseInfo-pWaitSemaphores-parameter", "VUID-VkBindSparseInfo-commonparent");
- }
- }
- if (pBindInfo[index0].pBufferBinds) {
- for (uint32_t index1 = 0; index1 < pBindInfo[index0].bufferBindCount; ++index1) {
- skip |= ValidateObject(queue, pBindInfo[index0].pBufferBinds[index1].buffer, kVulkanObjectTypeBuffer, false, "VUID-VkSparseBufferMemoryBindInfo-buffer-parameter", kVUIDUndefined);
- if (pBindInfo[index0].pBufferBinds[index1].pBinds) {
- for (uint32_t index2 = 0; index2 < pBindInfo[index0].pBufferBinds[index1].bindCount; ++index2) {
- skip |= ValidateObject(queue, pBindInfo[index0].pBufferBinds[index1].pBinds[index2].memory, kVulkanObjectTypeDeviceMemory, true, "VUID-VkSparseMemoryBind-memory-parameter", kVUIDUndefined);
- }
- }
- }
- }
- if (pBindInfo[index0].pImageOpaqueBinds) {
- for (uint32_t index1 = 0; index1 < pBindInfo[index0].imageOpaqueBindCount; ++index1) {
- skip |= ValidateObject(queue, pBindInfo[index0].pImageOpaqueBinds[index1].image, kVulkanObjectTypeImage, false, "VUID-VkSparseImageOpaqueMemoryBindInfo-image-parameter", kVUIDUndefined);
- if (pBindInfo[index0].pImageOpaqueBinds[index1].pBinds) {
- for (uint32_t index2 = 0; index2 < pBindInfo[index0].pImageOpaqueBinds[index1].bindCount; ++index2) {
- skip |= ValidateObject(queue, pBindInfo[index0].pImageOpaqueBinds[index1].pBinds[index2].memory, kVulkanObjectTypeDeviceMemory, true, "VUID-VkSparseMemoryBind-memory-parameter", kVUIDUndefined);
- }
- }
- }
- }
- if (pBindInfo[index0].pImageBinds) {
- for (uint32_t index1 = 0; index1 < pBindInfo[index0].imageBindCount; ++index1) {
- skip |= ValidateObject(queue, pBindInfo[index0].pImageBinds[index1].image, kVulkanObjectTypeImage, false, "VUID-VkSparseImageMemoryBindInfo-image-parameter", kVUIDUndefined);
- if (pBindInfo[index0].pImageBinds[index1].pBinds) {
- for (uint32_t index2 = 0; index2 < pBindInfo[index0].pImageBinds[index1].bindCount; ++index2) {
- skip |= ValidateObject(queue, pBindInfo[index0].pImageBinds[index1].pBinds[index2].memory, kVulkanObjectTypeDeviceMemory, true, "VUID-VkSparseImageMemoryBind-memory-parameter", kVUIDUndefined);
- }
- }
- }
- }
- if (pBindInfo[index0].pSignalSemaphores) {
- for (uint32_t index1 = 0; index1 < pBindInfo[index0].signalSemaphoreCount; ++index1) {
- skip |= ValidateObject(queue, pBindInfo[index0].pSignalSemaphores[index1], kVulkanObjectTypeSemaphore, false, "VUID-VkBindSparseInfo-pSignalSemaphores-parameter", "VUID-VkBindSparseInfo-commonparent");
- }
- }
- }
- }
- skip |= ValidateObject(queue, fence, kVulkanObjectTypeFence, true, "VUID-vkQueueBindSparse-fence-parameter", "VUID-vkQueueBindSparse-commonparent");
-
- return skip;
-}
-
-bool ObjectLifetimes::PreCallValidateCreateFence(
- VkDevice device,
- const VkFenceCreateInfo* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkFence* pFence) {
- bool skip = false;
- skip |= ValidateObject(device, device, kVulkanObjectTypeDevice, false, "VUID-vkCreateFence-device-parameter", kVUIDUndefined);
-
- return skip;
-}
-
-void ObjectLifetimes::PostCallRecordCreateFence(
- VkDevice device,
- const VkFenceCreateInfo* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkFence* pFence,
- VkResult result) {
- if (result != VK_SUCCESS) return;
- CreateObject(device, *pFence, kVulkanObjectTypeFence, pAllocator);
-
-}
-
-bool ObjectLifetimes::PreCallValidateDestroyFence(
- VkDevice device,
- VkFence fence,
- const VkAllocationCallbacks* pAllocator) {
- bool skip = false;
- skip |= ValidateObject(device, device, kVulkanObjectTypeDevice, false, "VUID-vkDestroyFence-device-parameter", kVUIDUndefined);
- skip |= ValidateObject(device, fence, kVulkanObjectTypeFence, true, "VUID-vkDestroyFence-fence-parameter", "VUID-vkDestroyFence-fence-parent");
- skip |= ValidateDestroyObject(device, fence, kVulkanObjectTypeFence, pAllocator, "VUID-vkDestroyFence-fence-01121", "VUID-vkDestroyFence-fence-01122");
-
- return skip;
-}
-
-void ObjectLifetimes::PreCallRecordDestroyFence(
- VkDevice device,
- VkFence fence,
- const VkAllocationCallbacks* pAllocator) {
- RecordDestroyObject(device, fence, kVulkanObjectTypeFence);
-
-}
-
-bool ObjectLifetimes::PreCallValidateResetFences(
- VkDevice device,
- uint32_t fenceCount,
- const VkFence* pFences) {
- bool skip = false;
- skip |= ValidateObject(device, device, kVulkanObjectTypeDevice, false, "VUID-vkResetFences-device-parameter", kVUIDUndefined);
- if (pFences) {
- for (uint32_t index0 = 0; index0 < fenceCount; ++index0) {
- skip |= ValidateObject(device, pFences[index0], kVulkanObjectTypeFence, false, "VUID-vkResetFences-pFences-parameter", "VUID-vkResetFences-pFences-parent");
- }
- }
-
- return skip;
-}
-
-bool ObjectLifetimes::PreCallValidateGetFenceStatus(
- VkDevice device,
- VkFence fence) {
- bool skip = false;
- skip |= ValidateObject(device, device, kVulkanObjectTypeDevice, false, "VUID-vkGetFenceStatus-device-parameter", kVUIDUndefined);
- skip |= ValidateObject(device, fence, kVulkanObjectTypeFence, false, "VUID-vkGetFenceStatus-fence-parameter", "VUID-vkGetFenceStatus-fence-parent");
-
- return skip;
-}
-
-bool ObjectLifetimes::PreCallValidateWaitForFences(
- VkDevice device,
- uint32_t fenceCount,
- const VkFence* pFences,
- VkBool32 waitAll,
- uint64_t timeout) {
- bool skip = false;
- skip |= ValidateObject(device, device, kVulkanObjectTypeDevice, false, "VUID-vkWaitForFences-device-parameter", kVUIDUndefined);
- if (pFences) {
- for (uint32_t index0 = 0; index0 < fenceCount; ++index0) {
- skip |= ValidateObject(device, pFences[index0], kVulkanObjectTypeFence, false, "VUID-vkWaitForFences-pFences-parameter", "VUID-vkWaitForFences-pFences-parent");
- }
- }
-
- return skip;
-}
-
-bool ObjectLifetimes::PreCallValidateCreateSemaphore(
- VkDevice device,
- const VkSemaphoreCreateInfo* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkSemaphore* pSemaphore) {
- bool skip = false;
- skip |= ValidateObject(device, device, kVulkanObjectTypeDevice, false, "VUID-vkCreateSemaphore-device-parameter", kVUIDUndefined);
-
- return skip;
-}
-
-void ObjectLifetimes::PostCallRecordCreateSemaphore(
- VkDevice device,
- const VkSemaphoreCreateInfo* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkSemaphore* pSemaphore,
- VkResult result) {
- if (result != VK_SUCCESS) return;
- CreateObject(device, *pSemaphore, kVulkanObjectTypeSemaphore, pAllocator);
-
-}
-
-bool ObjectLifetimes::PreCallValidateDestroySemaphore(
- VkDevice device,
- VkSemaphore semaphore,
- const VkAllocationCallbacks* pAllocator) {
- bool skip = false;
- skip |= ValidateObject(device, device, kVulkanObjectTypeDevice, false, "VUID-vkDestroySemaphore-device-parameter", kVUIDUndefined);
- skip |= ValidateObject(device, semaphore, kVulkanObjectTypeSemaphore, true, "VUID-vkDestroySemaphore-semaphore-parameter", "VUID-vkDestroySemaphore-semaphore-parent");
- skip |= ValidateDestroyObject(device, semaphore, kVulkanObjectTypeSemaphore, pAllocator, "VUID-vkDestroySemaphore-semaphore-01138", "VUID-vkDestroySemaphore-semaphore-01139");
-
- return skip;
-}
-
-void ObjectLifetimes::PreCallRecordDestroySemaphore(
- VkDevice device,
- VkSemaphore semaphore,
- const VkAllocationCallbacks* pAllocator) {
- RecordDestroyObject(device, semaphore, kVulkanObjectTypeSemaphore);
-
-}
-
-bool ObjectLifetimes::PreCallValidateCreateEvent(
- VkDevice device,
- const VkEventCreateInfo* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkEvent* pEvent) {
- bool skip = false;
- skip |= ValidateObject(device, device, kVulkanObjectTypeDevice, false, "VUID-vkCreateEvent-device-parameter", kVUIDUndefined);
-
- return skip;
-}
-
-void ObjectLifetimes::PostCallRecordCreateEvent(
- VkDevice device,
- const VkEventCreateInfo* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkEvent* pEvent,
- VkResult result) {
- if (result != VK_SUCCESS) return;
- CreateObject(device, *pEvent, kVulkanObjectTypeEvent, pAllocator);
-
-}
-
-bool ObjectLifetimes::PreCallValidateDestroyEvent(
- VkDevice device,
- VkEvent event,
- const VkAllocationCallbacks* pAllocator) {
- bool skip = false;
- skip |= ValidateObject(device, device, kVulkanObjectTypeDevice, false, "VUID-vkDestroyEvent-device-parameter", kVUIDUndefined);
- skip |= ValidateObject(device, event, kVulkanObjectTypeEvent, true, "VUID-vkDestroyEvent-event-parameter", "VUID-vkDestroyEvent-event-parent");
- skip |= ValidateDestroyObject(device, event, kVulkanObjectTypeEvent, pAllocator, "VUID-vkDestroyEvent-event-01146", "VUID-vkDestroyEvent-event-01147");
-
- return skip;
-}
-
-void ObjectLifetimes::PreCallRecordDestroyEvent(
- VkDevice device,
- VkEvent event,
- const VkAllocationCallbacks* pAllocator) {
- RecordDestroyObject(device, event, kVulkanObjectTypeEvent);
-
-}
-
-bool ObjectLifetimes::PreCallValidateGetEventStatus(
- VkDevice device,
- VkEvent event) {
- bool skip = false;
- skip |= ValidateObject(device, device, kVulkanObjectTypeDevice, false, "VUID-vkGetEventStatus-device-parameter", kVUIDUndefined);
- skip |= ValidateObject(device, event, kVulkanObjectTypeEvent, false, "VUID-vkGetEventStatus-event-parameter", "VUID-vkGetEventStatus-event-parent");
-
- return skip;
-}
-
-bool ObjectLifetimes::PreCallValidateSetEvent(
- VkDevice device,
- VkEvent event) {
- bool skip = false;
- skip |= ValidateObject(device, device, kVulkanObjectTypeDevice, false, "VUID-vkSetEvent-device-parameter", kVUIDUndefined);
- skip |= ValidateObject(device, event, kVulkanObjectTypeEvent, false, "VUID-vkSetEvent-event-parameter", "VUID-vkSetEvent-event-parent");
-
- return skip;
-}
-
-bool ObjectLifetimes::PreCallValidateResetEvent(
- VkDevice device,
- VkEvent event) {
- bool skip = false;
- skip |= ValidateObject(device, device, kVulkanObjectTypeDevice, false, "VUID-vkResetEvent-device-parameter", kVUIDUndefined);
- skip |= ValidateObject(device, event, kVulkanObjectTypeEvent, false, "VUID-vkResetEvent-event-parameter", "VUID-vkResetEvent-event-parent");
-
- return skip;
-}
-
-bool ObjectLifetimes::PreCallValidateCreateQueryPool(
- VkDevice device,
- const VkQueryPoolCreateInfo* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkQueryPool* pQueryPool) {
- bool skip = false;
- skip |= ValidateObject(device, device, kVulkanObjectTypeDevice, false, "VUID-vkCreateQueryPool-device-parameter", kVUIDUndefined);
-
- return skip;
-}
-
-void ObjectLifetimes::PostCallRecordCreateQueryPool(
- VkDevice device,
- const VkQueryPoolCreateInfo* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkQueryPool* pQueryPool,
- VkResult result) {
- if (result != VK_SUCCESS) return;
- CreateObject(device, *pQueryPool, kVulkanObjectTypeQueryPool, pAllocator);
-
-}
-
-bool ObjectLifetimes::PreCallValidateDestroyQueryPool(
- VkDevice device,
- VkQueryPool queryPool,
- const VkAllocationCallbacks* pAllocator) {
- bool skip = false;
- skip |= ValidateObject(device, device, kVulkanObjectTypeDevice, false, "VUID-vkDestroyQueryPool-device-parameter", kVUIDUndefined);
- skip |= ValidateObject(device, queryPool, kVulkanObjectTypeQueryPool, true, "VUID-vkDestroyQueryPool-queryPool-parameter", "VUID-vkDestroyQueryPool-queryPool-parent");
- skip |= ValidateDestroyObject(device, queryPool, kVulkanObjectTypeQueryPool, pAllocator, "VUID-vkDestroyQueryPool-queryPool-00794", "VUID-vkDestroyQueryPool-queryPool-00795");
-
- return skip;
-}
-
-void ObjectLifetimes::PreCallRecordDestroyQueryPool(
- VkDevice device,
- VkQueryPool queryPool,
- const VkAllocationCallbacks* pAllocator) {
- RecordDestroyObject(device, queryPool, kVulkanObjectTypeQueryPool);
-
-}
-
-bool ObjectLifetimes::PreCallValidateGetQueryPoolResults(
- VkDevice device,
- VkQueryPool queryPool,
- uint32_t firstQuery,
- uint32_t queryCount,
- size_t dataSize,
- void* pData,
- VkDeviceSize stride,
- VkQueryResultFlags flags) {
- bool skip = false;
- skip |= ValidateObject(device, device, kVulkanObjectTypeDevice, false, "VUID-vkGetQueryPoolResults-device-parameter", kVUIDUndefined);
- skip |= ValidateObject(device, queryPool, kVulkanObjectTypeQueryPool, false, "VUID-vkGetQueryPoolResults-queryPool-parameter", "VUID-vkGetQueryPoolResults-queryPool-parent");
-
- return skip;
-}
-
-bool ObjectLifetimes::PreCallValidateCreateBuffer(
- VkDevice device,
- const VkBufferCreateInfo* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkBuffer* pBuffer) {
- bool skip = false;
- skip |= ValidateObject(device, device, kVulkanObjectTypeDevice, false, "VUID-vkCreateBuffer-device-parameter", kVUIDUndefined);
-
- return skip;
-}
-
-void ObjectLifetimes::PostCallRecordCreateBuffer(
- VkDevice device,
- const VkBufferCreateInfo* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkBuffer* pBuffer,
- VkResult result) {
- if (result != VK_SUCCESS) return;
- CreateObject(device, *pBuffer, kVulkanObjectTypeBuffer, pAllocator);
-
-}
-
-bool ObjectLifetimes::PreCallValidateDestroyBuffer(
- VkDevice device,
- VkBuffer buffer,
- const VkAllocationCallbacks* pAllocator) {
- bool skip = false;
- skip |= ValidateObject(device, device, kVulkanObjectTypeDevice, false, "VUID-vkDestroyBuffer-device-parameter", kVUIDUndefined);
- skip |= ValidateObject(device, buffer, kVulkanObjectTypeBuffer, true, "VUID-vkDestroyBuffer-buffer-parameter", "VUID-vkDestroyBuffer-buffer-parent");
- skip |= ValidateDestroyObject(device, buffer, kVulkanObjectTypeBuffer, pAllocator, "VUID-vkDestroyBuffer-buffer-00923", "VUID-vkDestroyBuffer-buffer-00924");
-
- return skip;
-}
-
-void ObjectLifetimes::PreCallRecordDestroyBuffer(
- VkDevice device,
- VkBuffer buffer,
- const VkAllocationCallbacks* pAllocator) {
- RecordDestroyObject(device, buffer, kVulkanObjectTypeBuffer);
-
-}
-
-bool ObjectLifetimes::PreCallValidateCreateBufferView(
- VkDevice device,
- const VkBufferViewCreateInfo* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkBufferView* pView) {
- bool skip = false;
- skip |= ValidateObject(device, device, kVulkanObjectTypeDevice, false, "VUID-vkCreateBufferView-device-parameter", kVUIDUndefined);
- if (pCreateInfo) {
- skip |= ValidateObject(device, pCreateInfo->buffer, kVulkanObjectTypeBuffer, false, "VUID-VkBufferViewCreateInfo-buffer-parameter", kVUIDUndefined);
- }
-
- return skip;
-}
-
-void ObjectLifetimes::PostCallRecordCreateBufferView(
- VkDevice device,
- const VkBufferViewCreateInfo* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkBufferView* pView,
- VkResult result) {
- if (result != VK_SUCCESS) return;
- CreateObject(device, *pView, kVulkanObjectTypeBufferView, pAllocator);
-
-}
-
-bool ObjectLifetimes::PreCallValidateDestroyBufferView(
- VkDevice device,
- VkBufferView bufferView,
- const VkAllocationCallbacks* pAllocator) {
- bool skip = false;
- skip |= ValidateObject(device, device, kVulkanObjectTypeDevice, false, "VUID-vkDestroyBufferView-device-parameter", kVUIDUndefined);
- skip |= ValidateObject(device, bufferView, kVulkanObjectTypeBufferView, true, "VUID-vkDestroyBufferView-bufferView-parameter", "VUID-vkDestroyBufferView-bufferView-parent");
- skip |= ValidateDestroyObject(device, bufferView, kVulkanObjectTypeBufferView, pAllocator, "VUID-vkDestroyBufferView-bufferView-00937", "VUID-vkDestroyBufferView-bufferView-00938");
-
- return skip;
-}
-
-void ObjectLifetimes::PreCallRecordDestroyBufferView(
- VkDevice device,
- VkBufferView bufferView,
- const VkAllocationCallbacks* pAllocator) {
- RecordDestroyObject(device, bufferView, kVulkanObjectTypeBufferView);
-
-}
-
-bool ObjectLifetimes::PreCallValidateCreateImage(
- VkDevice device,
- const VkImageCreateInfo* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkImage* pImage) {
- bool skip = false;
- skip |= ValidateObject(device, device, kVulkanObjectTypeDevice, false, "VUID-vkCreateImage-device-parameter", kVUIDUndefined);
-
- return skip;
-}
-
-void ObjectLifetimes::PostCallRecordCreateImage(
- VkDevice device,
- const VkImageCreateInfo* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkImage* pImage,
- VkResult result) {
- if (result != VK_SUCCESS) return;
- CreateObject(device, *pImage, kVulkanObjectTypeImage, pAllocator);
-
-}
-
-bool ObjectLifetimes::PreCallValidateDestroyImage(
- VkDevice device,
- VkImage image,
- const VkAllocationCallbacks* pAllocator) {
- bool skip = false;
- skip |= ValidateObject(device, device, kVulkanObjectTypeDevice, false, "VUID-vkDestroyImage-device-parameter", kVUIDUndefined);
- skip |= ValidateObject(device, image, kVulkanObjectTypeImage, true, "VUID-vkDestroyImage-image-parameter", "VUID-vkDestroyImage-image-parent");
- skip |= ValidateDestroyObject(device, image, kVulkanObjectTypeImage, pAllocator, "VUID-vkDestroyImage-image-01001", "VUID-vkDestroyImage-image-01002");
-
- return skip;
-}
-
-void ObjectLifetimes::PreCallRecordDestroyImage(
- VkDevice device,
- VkImage image,
- const VkAllocationCallbacks* pAllocator) {
- RecordDestroyObject(device, image, kVulkanObjectTypeImage);
-
-}
-
-bool ObjectLifetimes::PreCallValidateGetImageSubresourceLayout(
- VkDevice device,
- VkImage image,
- const VkImageSubresource* pSubresource,
- VkSubresourceLayout* pLayout) {
- bool skip = false;
- skip |= ValidateObject(device, device, kVulkanObjectTypeDevice, false, "VUID-vkGetImageSubresourceLayout-device-parameter", kVUIDUndefined);
- skip |= ValidateObject(device, image, kVulkanObjectTypeImage, false, "VUID-vkGetImageSubresourceLayout-image-parameter", "VUID-vkGetImageSubresourceLayout-image-parent");
-
- return skip;
-}
-
-bool ObjectLifetimes::PreCallValidateCreateImageView(
- VkDevice device,
- const VkImageViewCreateInfo* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkImageView* pView) {
- bool skip = false;
- skip |= ValidateObject(device, device, kVulkanObjectTypeDevice, false, "VUID-vkCreateImageView-device-parameter", kVUIDUndefined);
- if (pCreateInfo) {
- skip |= ValidateObject(device, pCreateInfo->image, kVulkanObjectTypeImage, false, "VUID-VkImageViewCreateInfo-image-parameter", kVUIDUndefined);
- }
-
- return skip;
-}
-
-void ObjectLifetimes::PostCallRecordCreateImageView(
- VkDevice device,
- const VkImageViewCreateInfo* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkImageView* pView,
- VkResult result) {
- if (result != VK_SUCCESS) return;
- CreateObject(device, *pView, kVulkanObjectTypeImageView, pAllocator);
-
-}
-
-bool ObjectLifetimes::PreCallValidateDestroyImageView(
- VkDevice device,
- VkImageView imageView,
- const VkAllocationCallbacks* pAllocator) {
- bool skip = false;
- skip |= ValidateObject(device, device, kVulkanObjectTypeDevice, false, "VUID-vkDestroyImageView-device-parameter", kVUIDUndefined);
- skip |= ValidateObject(device, imageView, kVulkanObjectTypeImageView, true, "VUID-vkDestroyImageView-imageView-parameter", "VUID-vkDestroyImageView-imageView-parent");
- skip |= ValidateDestroyObject(device, imageView, kVulkanObjectTypeImageView, pAllocator, "VUID-vkDestroyImageView-imageView-01027", "VUID-vkDestroyImageView-imageView-01028");
-
- return skip;
-}
-
-void ObjectLifetimes::PreCallRecordDestroyImageView(
- VkDevice device,
- VkImageView imageView,
- const VkAllocationCallbacks* pAllocator) {
- RecordDestroyObject(device, imageView, kVulkanObjectTypeImageView);
-
-}
-
-bool ObjectLifetimes::PreCallValidateCreateShaderModule(
- VkDevice device,
- const VkShaderModuleCreateInfo* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkShaderModule* pShaderModule) {
- bool skip = false;
- skip |= ValidateObject(device, device, kVulkanObjectTypeDevice, false, "VUID-vkCreateShaderModule-device-parameter", kVUIDUndefined);
-
- return skip;
-}
-
-void ObjectLifetimes::PostCallRecordCreateShaderModule(
- VkDevice device,
- const VkShaderModuleCreateInfo* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkShaderModule* pShaderModule,
- VkResult result) {
- if (result != VK_SUCCESS) return;
- CreateObject(device, *pShaderModule, kVulkanObjectTypeShaderModule, pAllocator);
-
-}
-
-bool ObjectLifetimes::PreCallValidateDestroyShaderModule(
- VkDevice device,
- VkShaderModule shaderModule,
- const VkAllocationCallbacks* pAllocator) {
- bool skip = false;
- skip |= ValidateObject(device, device, kVulkanObjectTypeDevice, false, "VUID-vkDestroyShaderModule-device-parameter", kVUIDUndefined);
- skip |= ValidateObject(device, shaderModule, kVulkanObjectTypeShaderModule, true, "VUID-vkDestroyShaderModule-shaderModule-parameter", "VUID-vkDestroyShaderModule-shaderModule-parent");
- skip |= ValidateDestroyObject(device, shaderModule, kVulkanObjectTypeShaderModule, pAllocator, "VUID-vkDestroyShaderModule-shaderModule-01092", "VUID-vkDestroyShaderModule-shaderModule-01093");
-
- return skip;
-}
-
-void ObjectLifetimes::PreCallRecordDestroyShaderModule(
- VkDevice device,
- VkShaderModule shaderModule,
- const VkAllocationCallbacks* pAllocator) {
- RecordDestroyObject(device, shaderModule, kVulkanObjectTypeShaderModule);
-
-}
-
-bool ObjectLifetimes::PreCallValidateCreatePipelineCache(
- VkDevice device,
- const VkPipelineCacheCreateInfo* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkPipelineCache* pPipelineCache) {
- bool skip = false;
- skip |= ValidateObject(device, device, kVulkanObjectTypeDevice, false, "VUID-vkCreatePipelineCache-device-parameter", kVUIDUndefined);
-
- return skip;
-}
-
-void ObjectLifetimes::PostCallRecordCreatePipelineCache(
- VkDevice device,
- const VkPipelineCacheCreateInfo* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkPipelineCache* pPipelineCache,
- VkResult result) {
- if (result != VK_SUCCESS) return;
- CreateObject(device, *pPipelineCache, kVulkanObjectTypePipelineCache, pAllocator);
-
-}
-
-bool ObjectLifetimes::PreCallValidateDestroyPipelineCache(
- VkDevice device,
- VkPipelineCache pipelineCache,
- const VkAllocationCallbacks* pAllocator) {
- bool skip = false;
- skip |= ValidateObject(device, device, kVulkanObjectTypeDevice, false, "VUID-vkDestroyPipelineCache-device-parameter", kVUIDUndefined);
- skip |= ValidateObject(device, pipelineCache, kVulkanObjectTypePipelineCache, true, "VUID-vkDestroyPipelineCache-pipelineCache-parameter", "VUID-vkDestroyPipelineCache-pipelineCache-parent");
- skip |= ValidateDestroyObject(device, pipelineCache, kVulkanObjectTypePipelineCache, pAllocator, "VUID-vkDestroyPipelineCache-pipelineCache-00771", "VUID-vkDestroyPipelineCache-pipelineCache-00772");
-
- return skip;
-}
-
-void ObjectLifetimes::PreCallRecordDestroyPipelineCache(
- VkDevice device,
- VkPipelineCache pipelineCache,
- const VkAllocationCallbacks* pAllocator) {
- RecordDestroyObject(device, pipelineCache, kVulkanObjectTypePipelineCache);
-
-}
-
-bool ObjectLifetimes::PreCallValidateGetPipelineCacheData(
- VkDevice device,
- VkPipelineCache pipelineCache,
- size_t* pDataSize,
- void* pData) {
- bool skip = false;
- skip |= ValidateObject(device, device, kVulkanObjectTypeDevice, false, "VUID-vkGetPipelineCacheData-device-parameter", kVUIDUndefined);
- skip |= ValidateObject(device, pipelineCache, kVulkanObjectTypePipelineCache, false, "VUID-vkGetPipelineCacheData-pipelineCache-parameter", "VUID-vkGetPipelineCacheData-pipelineCache-parent");
-
- return skip;
-}
-
-bool ObjectLifetimes::PreCallValidateMergePipelineCaches(
- VkDevice device,
- VkPipelineCache dstCache,
- uint32_t srcCacheCount,
- const VkPipelineCache* pSrcCaches) {
- bool skip = false;
- skip |= ValidateObject(device, device, kVulkanObjectTypeDevice, false, "VUID-vkMergePipelineCaches-device-parameter", kVUIDUndefined);
- skip |= ValidateObject(device, dstCache, kVulkanObjectTypePipelineCache, false, "VUID-vkMergePipelineCaches-dstCache-parameter", "VUID-vkMergePipelineCaches-dstCache-parent");
- if (pSrcCaches) {
- for (uint32_t index0 = 0; index0 < srcCacheCount; ++index0) {
- skip |= ValidateObject(device, pSrcCaches[index0], kVulkanObjectTypePipelineCache, false, "VUID-vkMergePipelineCaches-pSrcCaches-parameter", "VUID-vkMergePipelineCaches-pSrcCaches-parent");
- }
- }
-
- return skip;
-}
-
-bool ObjectLifetimes::PreCallValidateCreateGraphicsPipelines(
- VkDevice device,
- VkPipelineCache pipelineCache,
- uint32_t createInfoCount,
- const VkGraphicsPipelineCreateInfo* pCreateInfos,
- const VkAllocationCallbacks* pAllocator,
- VkPipeline* pPipelines) {
- bool skip = false;
- skip |= ValidateObject(device, device, kVulkanObjectTypeDevice, false, "VUID-vkCreateGraphicsPipelines-device-parameter", kVUIDUndefined);
- skip |= ValidateObject(device, pipelineCache, kVulkanObjectTypePipelineCache, true, "VUID-vkCreateGraphicsPipelines-pipelineCache-parameter", "VUID-vkCreateGraphicsPipelines-pipelineCache-parent");
- if (pCreateInfos) {
- for (uint32_t index0 = 0; index0 < createInfoCount; ++index0) {
- if (pCreateInfos[index0].pStages) {
- for (uint32_t index1 = 0; index1 < pCreateInfos[index0].stageCount; ++index1) {
- skip |= ValidateObject(device, pCreateInfos[index0].pStages[index1].module, kVulkanObjectTypeShaderModule, false, "VUID-VkPipelineShaderStageCreateInfo-module-parameter", kVUIDUndefined);
- }
- }
- skip |= ValidateObject(device, pCreateInfos[index0].layout, kVulkanObjectTypePipelineLayout, false, "VUID-VkGraphicsPipelineCreateInfo-layout-parameter", "VUID-VkGraphicsPipelineCreateInfo-commonparent");
- skip |= ValidateObject(device, pCreateInfos[index0].renderPass, kVulkanObjectTypeRenderPass, false, "VUID-VkGraphicsPipelineCreateInfo-renderPass-parameter", "VUID-VkGraphicsPipelineCreateInfo-commonparent");
- skip |= ValidateObject(device, pCreateInfos[index0].basePipelineHandle, kVulkanObjectTypePipeline, true, kVUIDUndefined, "VUID-VkGraphicsPipelineCreateInfo-commonparent");
- }
- }
-
- return skip;
-}
-
-void ObjectLifetimes::PostCallRecordCreateGraphicsPipelines(
- VkDevice device,
- VkPipelineCache pipelineCache,
- uint32_t createInfoCount,
- const VkGraphicsPipelineCreateInfo* pCreateInfos,
- const VkAllocationCallbacks* pAllocator,
- VkPipeline* pPipelines,
- VkResult result) {
- if (VK_ERROR_VALIDATION_FAILED_EXT == result) return;
- if (pPipelines) {
- for (uint32_t index = 0; index < createInfoCount; index++) {
- if (!pPipelines[index]) continue;
- CreateObject(device, pPipelines[index], kVulkanObjectTypePipeline, pAllocator);
- }
- }
-
-}
-
-bool ObjectLifetimes::PreCallValidateCreateComputePipelines(
- VkDevice device,
- VkPipelineCache pipelineCache,
- uint32_t createInfoCount,
- const VkComputePipelineCreateInfo* pCreateInfos,
- const VkAllocationCallbacks* pAllocator,
- VkPipeline* pPipelines) {
- bool skip = false;
- skip |= ValidateObject(device, device, kVulkanObjectTypeDevice, false, "VUID-vkCreateComputePipelines-device-parameter", kVUIDUndefined);
- skip |= ValidateObject(device, pipelineCache, kVulkanObjectTypePipelineCache, true, "VUID-vkCreateComputePipelines-pipelineCache-parameter", "VUID-vkCreateComputePipelines-pipelineCache-parent");
- if (pCreateInfos) {
- for (uint32_t index0 = 0; index0 < createInfoCount; ++index0) {
- skip |= ValidateObject(device, pCreateInfos[index0].stage.module, kVulkanObjectTypeShaderModule, false, "VUID-VkPipelineShaderStageCreateInfo-module-parameter", kVUIDUndefined);
- skip |= ValidateObject(device, pCreateInfos[index0].layout, kVulkanObjectTypePipelineLayout, false, "VUID-VkComputePipelineCreateInfo-layout-parameter", "VUID-VkComputePipelineCreateInfo-commonparent");
- skip |= ValidateObject(device, pCreateInfos[index0].basePipelineHandle, kVulkanObjectTypePipeline, true, kVUIDUndefined, "VUID-VkComputePipelineCreateInfo-commonparent");
- }
- }
-
- return skip;
-}
-
-void ObjectLifetimes::PostCallRecordCreateComputePipelines(
- VkDevice device,
- VkPipelineCache pipelineCache,
- uint32_t createInfoCount,
- const VkComputePipelineCreateInfo* pCreateInfos,
- const VkAllocationCallbacks* pAllocator,
- VkPipeline* pPipelines,
- VkResult result) {
- if (VK_ERROR_VALIDATION_FAILED_EXT == result) return;
- if (pPipelines) {
- for (uint32_t index = 0; index < createInfoCount; index++) {
- if (!pPipelines[index]) continue;
- CreateObject(device, pPipelines[index], kVulkanObjectTypePipeline, pAllocator);
- }
- }
-
-}
-
-bool ObjectLifetimes::PreCallValidateDestroyPipeline(
- VkDevice device,
- VkPipeline pipeline,
- const VkAllocationCallbacks* pAllocator) {
- bool skip = false;
- skip |= ValidateObject(device, device, kVulkanObjectTypeDevice, false, "VUID-vkDestroyPipeline-device-parameter", kVUIDUndefined);
- skip |= ValidateObject(device, pipeline, kVulkanObjectTypePipeline, true, "VUID-vkDestroyPipeline-pipeline-parameter", "VUID-vkDestroyPipeline-pipeline-parent");
- skip |= ValidateDestroyObject(device, pipeline, kVulkanObjectTypePipeline, pAllocator, "VUID-vkDestroyPipeline-pipeline-00766", "VUID-vkDestroyPipeline-pipeline-00767");
-
- return skip;
-}
-
-void ObjectLifetimes::PreCallRecordDestroyPipeline(
- VkDevice device,
- VkPipeline pipeline,
- const VkAllocationCallbacks* pAllocator) {
- RecordDestroyObject(device, pipeline, kVulkanObjectTypePipeline);
-
-}
-
-bool ObjectLifetimes::PreCallValidateCreatePipelineLayout(
- VkDevice device,
- const VkPipelineLayoutCreateInfo* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkPipelineLayout* pPipelineLayout) {
- bool skip = false;
- skip |= ValidateObject(device, device, kVulkanObjectTypeDevice, false, "VUID-vkCreatePipelineLayout-device-parameter", kVUIDUndefined);
- if (pCreateInfo) {
- if (pCreateInfo->pSetLayouts) {
- for (uint32_t index1 = 0; index1 < pCreateInfo->setLayoutCount; ++index1) {
- skip |= ValidateObject(device, pCreateInfo->pSetLayouts[index1], kVulkanObjectTypeDescriptorSetLayout, false, "VUID-VkPipelineLayoutCreateInfo-pSetLayouts-parameter", kVUIDUndefined);
- }
- }
- }
-
- return skip;
-}
-
-void ObjectLifetimes::PostCallRecordCreatePipelineLayout(
- VkDevice device,
- const VkPipelineLayoutCreateInfo* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkPipelineLayout* pPipelineLayout,
- VkResult result) {
- if (result != VK_SUCCESS) return;
- CreateObject(device, *pPipelineLayout, kVulkanObjectTypePipelineLayout, pAllocator);
-
-}
-
-bool ObjectLifetimes::PreCallValidateDestroyPipelineLayout(
- VkDevice device,
- VkPipelineLayout pipelineLayout,
- const VkAllocationCallbacks* pAllocator) {
- bool skip = false;
- skip |= ValidateObject(device, device, kVulkanObjectTypeDevice, false, "VUID-vkDestroyPipelineLayout-device-parameter", kVUIDUndefined);
- skip |= ValidateObject(device, pipelineLayout, kVulkanObjectTypePipelineLayout, true, "VUID-vkDestroyPipelineLayout-pipelineLayout-parameter", "VUID-vkDestroyPipelineLayout-pipelineLayout-parent");
- skip |= ValidateDestroyObject(device, pipelineLayout, kVulkanObjectTypePipelineLayout, pAllocator, "VUID-vkDestroyPipelineLayout-pipelineLayout-00299", "VUID-vkDestroyPipelineLayout-pipelineLayout-00300");
-
- return skip;
-}
-
-void ObjectLifetimes::PreCallRecordDestroyPipelineLayout(
- VkDevice device,
- VkPipelineLayout pipelineLayout,
- const VkAllocationCallbacks* pAllocator) {
- RecordDestroyObject(device, pipelineLayout, kVulkanObjectTypePipelineLayout);
-
-}
-
-bool ObjectLifetimes::PreCallValidateCreateSampler(
- VkDevice device,
- const VkSamplerCreateInfo* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkSampler* pSampler) {
- bool skip = false;
- skip |= ValidateObject(device, device, kVulkanObjectTypeDevice, false, "VUID-vkCreateSampler-device-parameter", kVUIDUndefined);
-
- return skip;
-}
-
-void ObjectLifetimes::PostCallRecordCreateSampler(
- VkDevice device,
- const VkSamplerCreateInfo* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkSampler* pSampler,
- VkResult result) {
- if (result != VK_SUCCESS) return;
- CreateObject(device, *pSampler, kVulkanObjectTypeSampler, pAllocator);
-
-}
-
-bool ObjectLifetimes::PreCallValidateDestroySampler(
- VkDevice device,
- VkSampler sampler,
- const VkAllocationCallbacks* pAllocator) {
- bool skip = false;
- skip |= ValidateObject(device, device, kVulkanObjectTypeDevice, false, "VUID-vkDestroySampler-device-parameter", kVUIDUndefined);
- skip |= ValidateObject(device, sampler, kVulkanObjectTypeSampler, true, "VUID-vkDestroySampler-sampler-parameter", "VUID-vkDestroySampler-sampler-parent");
- skip |= ValidateDestroyObject(device, sampler, kVulkanObjectTypeSampler, pAllocator, "VUID-vkDestroySampler-sampler-01083", "VUID-vkDestroySampler-sampler-01084");
-
- return skip;
-}
-
-void ObjectLifetimes::PreCallRecordDestroySampler(
- VkDevice device,
- VkSampler sampler,
- const VkAllocationCallbacks* pAllocator) {
- RecordDestroyObject(device, sampler, kVulkanObjectTypeSampler);
-
-}
-
-bool ObjectLifetimes::PreCallValidateDestroyDescriptorSetLayout(
- VkDevice device,
- VkDescriptorSetLayout descriptorSetLayout,
- const VkAllocationCallbacks* pAllocator) {
- bool skip = false;
- skip |= ValidateObject(device, device, kVulkanObjectTypeDevice, false, "VUID-vkDestroyDescriptorSetLayout-device-parameter", kVUIDUndefined);
- skip |= ValidateObject(device, descriptorSetLayout, kVulkanObjectTypeDescriptorSetLayout, true, "VUID-vkDestroyDescriptorSetLayout-descriptorSetLayout-parameter", "VUID-vkDestroyDescriptorSetLayout-descriptorSetLayout-parent");
- skip |= ValidateDestroyObject(device, descriptorSetLayout, kVulkanObjectTypeDescriptorSetLayout, pAllocator, "VUID-vkDestroyDescriptorSetLayout-descriptorSetLayout-00284", "VUID-vkDestroyDescriptorSetLayout-descriptorSetLayout-00285");
-
- return skip;
-}
-
-void ObjectLifetimes::PreCallRecordDestroyDescriptorSetLayout(
- VkDevice device,
- VkDescriptorSetLayout descriptorSetLayout,
- const VkAllocationCallbacks* pAllocator) {
- RecordDestroyObject(device, descriptorSetLayout, kVulkanObjectTypeDescriptorSetLayout);
-
-}
-
-bool ObjectLifetimes::PreCallValidateCreateDescriptorPool(
- VkDevice device,
- const VkDescriptorPoolCreateInfo* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkDescriptorPool* pDescriptorPool) {
- bool skip = false;
- skip |= ValidateObject(device, device, kVulkanObjectTypeDevice, false, "VUID-vkCreateDescriptorPool-device-parameter", kVUIDUndefined);
-
- return skip;
-}
-
-void ObjectLifetimes::PostCallRecordCreateDescriptorPool(
- VkDevice device,
- const VkDescriptorPoolCreateInfo* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkDescriptorPool* pDescriptorPool,
- VkResult result) {
- if (result != VK_SUCCESS) return;
- CreateObject(device, *pDescriptorPool, kVulkanObjectTypeDescriptorPool, pAllocator);
-
-}
-
-bool ObjectLifetimes::PreCallValidateDestroyFramebuffer(
- VkDevice device,
- VkFramebuffer framebuffer,
- const VkAllocationCallbacks* pAllocator) {
- bool skip = false;
- skip |= ValidateObject(device, device, kVulkanObjectTypeDevice, false, "VUID-vkDestroyFramebuffer-device-parameter", kVUIDUndefined);
- skip |= ValidateObject(device, framebuffer, kVulkanObjectTypeFramebuffer, true, "VUID-vkDestroyFramebuffer-framebuffer-parameter", "VUID-vkDestroyFramebuffer-framebuffer-parent");
- skip |= ValidateDestroyObject(device, framebuffer, kVulkanObjectTypeFramebuffer, pAllocator, "VUID-vkDestroyFramebuffer-framebuffer-00893", "VUID-vkDestroyFramebuffer-framebuffer-00894");
-
- return skip;
-}
-
-void ObjectLifetimes::PreCallRecordDestroyFramebuffer(
- VkDevice device,
- VkFramebuffer framebuffer,
- const VkAllocationCallbacks* pAllocator) {
- RecordDestroyObject(device, framebuffer, kVulkanObjectTypeFramebuffer);
-
-}
-
-bool ObjectLifetimes::PreCallValidateCreateRenderPass(
- VkDevice device,
- const VkRenderPassCreateInfo* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkRenderPass* pRenderPass) {
- bool skip = false;
- skip |= ValidateObject(device, device, kVulkanObjectTypeDevice, false, "VUID-vkCreateRenderPass-device-parameter", kVUIDUndefined);
-
- return skip;
-}
-
-void ObjectLifetimes::PostCallRecordCreateRenderPass(
- VkDevice device,
- const VkRenderPassCreateInfo* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkRenderPass* pRenderPass,
- VkResult result) {
- if (result != VK_SUCCESS) return;
- CreateObject(device, *pRenderPass, kVulkanObjectTypeRenderPass, pAllocator);
-
-}
-
-bool ObjectLifetimes::PreCallValidateDestroyRenderPass(
- VkDevice device,
- VkRenderPass renderPass,
- const VkAllocationCallbacks* pAllocator) {
- bool skip = false;
- skip |= ValidateObject(device, device, kVulkanObjectTypeDevice, false, "VUID-vkDestroyRenderPass-device-parameter", kVUIDUndefined);
- skip |= ValidateObject(device, renderPass, kVulkanObjectTypeRenderPass, true, "VUID-vkDestroyRenderPass-renderPass-parameter", "VUID-vkDestroyRenderPass-renderPass-parent");
- skip |= ValidateDestroyObject(device, renderPass, kVulkanObjectTypeRenderPass, pAllocator, "VUID-vkDestroyRenderPass-renderPass-00874", "VUID-vkDestroyRenderPass-renderPass-00875");
-
- return skip;
-}
-
-void ObjectLifetimes::PreCallRecordDestroyRenderPass(
- VkDevice device,
- VkRenderPass renderPass,
- const VkAllocationCallbacks* pAllocator) {
- RecordDestroyObject(device, renderPass, kVulkanObjectTypeRenderPass);
-
-}
-
-bool ObjectLifetimes::PreCallValidateGetRenderAreaGranularity(
- VkDevice device,
- VkRenderPass renderPass,
- VkExtent2D* pGranularity) {
- bool skip = false;
- skip |= ValidateObject(device, device, kVulkanObjectTypeDevice, false, "VUID-vkGetRenderAreaGranularity-device-parameter", kVUIDUndefined);
- skip |= ValidateObject(device, renderPass, kVulkanObjectTypeRenderPass, false, "VUID-vkGetRenderAreaGranularity-renderPass-parameter", "VUID-vkGetRenderAreaGranularity-renderPass-parent");
-
- return skip;
-}
-
-bool ObjectLifetimes::PreCallValidateCreateCommandPool(
- VkDevice device,
- const VkCommandPoolCreateInfo* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkCommandPool* pCommandPool) {
- bool skip = false;
- skip |= ValidateObject(device, device, kVulkanObjectTypeDevice, false, "VUID-vkCreateCommandPool-device-parameter", kVUIDUndefined);
-
- return skip;
-}
-
-void ObjectLifetimes::PostCallRecordCreateCommandPool(
- VkDevice device,
- const VkCommandPoolCreateInfo* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkCommandPool* pCommandPool,
- VkResult result) {
- if (result != VK_SUCCESS) return;
- CreateObject(device, *pCommandPool, kVulkanObjectTypeCommandPool, pAllocator);
-
-}
-
-bool ObjectLifetimes::PreCallValidateResetCommandPool(
- VkDevice device,
- VkCommandPool commandPool,
- VkCommandPoolResetFlags flags) {
- bool skip = false;
- skip |= ValidateObject(device, device, kVulkanObjectTypeDevice, false, "VUID-vkResetCommandPool-device-parameter", kVUIDUndefined);
- skip |= ValidateObject(device, commandPool, kVulkanObjectTypeCommandPool, false, "VUID-vkResetCommandPool-commandPool-parameter", "VUID-vkResetCommandPool-commandPool-parent");
-
- return skip;
-}
-
-bool ObjectLifetimes::PreCallValidateEndCommandBuffer(
- VkCommandBuffer commandBuffer) {
- bool skip = false;
- skip |= ValidateObject(commandBuffer, commandBuffer, kVulkanObjectTypeCommandBuffer, false, "VUID-vkEndCommandBuffer-commandBuffer-parameter", kVUIDUndefined);
-
- return skip;
-}
-
-bool ObjectLifetimes::PreCallValidateResetCommandBuffer(
- VkCommandBuffer commandBuffer,
- VkCommandBufferResetFlags flags) {
- bool skip = false;
- skip |= ValidateObject(commandBuffer, commandBuffer, kVulkanObjectTypeCommandBuffer, false, "VUID-vkResetCommandBuffer-commandBuffer-parameter", kVUIDUndefined);
-
- return skip;
-}
-
-bool ObjectLifetimes::PreCallValidateCmdBindPipeline(
- VkCommandBuffer commandBuffer,
- VkPipelineBindPoint pipelineBindPoint,
- VkPipeline pipeline) {
- bool skip = false;
- skip |= ValidateObject(commandBuffer, commandBuffer, kVulkanObjectTypeCommandBuffer, false, "VUID-vkCmdBindPipeline-commandBuffer-parameter", "VUID-vkCmdBindPipeline-commonparent");
- skip |= ValidateObject(commandBuffer, pipeline, kVulkanObjectTypePipeline, false, "VUID-vkCmdBindPipeline-pipeline-parameter", "VUID-vkCmdBindPipeline-commonparent");
-
- return skip;
-}
-
-bool ObjectLifetimes::PreCallValidateCmdSetViewport(
- VkCommandBuffer commandBuffer,
- uint32_t firstViewport,
- uint32_t viewportCount,
- const VkViewport* pViewports) {
- bool skip = false;
- skip |= ValidateObject(commandBuffer, commandBuffer, kVulkanObjectTypeCommandBuffer, false, "VUID-vkCmdSetViewport-commandBuffer-parameter", kVUIDUndefined);
-
- return skip;
-}
-
-bool ObjectLifetimes::PreCallValidateCmdSetScissor(
- VkCommandBuffer commandBuffer,
- uint32_t firstScissor,
- uint32_t scissorCount,
- const VkRect2D* pScissors) {
- bool skip = false;
- skip |= ValidateObject(commandBuffer, commandBuffer, kVulkanObjectTypeCommandBuffer, false, "VUID-vkCmdSetScissor-commandBuffer-parameter", kVUIDUndefined);
-
- return skip;
-}
-
-bool ObjectLifetimes::PreCallValidateCmdSetLineWidth(
- VkCommandBuffer commandBuffer,
- float lineWidth) {
- bool skip = false;
- skip |= ValidateObject(commandBuffer, commandBuffer, kVulkanObjectTypeCommandBuffer, false, "VUID-vkCmdSetLineWidth-commandBuffer-parameter", kVUIDUndefined);
-
- return skip;
-}
-
-bool ObjectLifetimes::PreCallValidateCmdSetDepthBias(
- VkCommandBuffer commandBuffer,
- float depthBiasConstantFactor,
- float depthBiasClamp,
- float depthBiasSlopeFactor) {
- bool skip = false;
- skip |= ValidateObject(commandBuffer, commandBuffer, kVulkanObjectTypeCommandBuffer, false, "VUID-vkCmdSetDepthBias-commandBuffer-parameter", kVUIDUndefined);
-
- return skip;
-}
-
-bool ObjectLifetimes::PreCallValidateCmdSetBlendConstants(
- VkCommandBuffer commandBuffer,
- const float blendConstants[4]) {
- bool skip = false;
- skip |= ValidateObject(commandBuffer, commandBuffer, kVulkanObjectTypeCommandBuffer, false, "VUID-vkCmdSetBlendConstants-commandBuffer-parameter", kVUIDUndefined);
-
- return skip;
-}
-
-bool ObjectLifetimes::PreCallValidateCmdSetDepthBounds(
- VkCommandBuffer commandBuffer,
- float minDepthBounds,
- float maxDepthBounds) {
- bool skip = false;
- skip |= ValidateObject(commandBuffer, commandBuffer, kVulkanObjectTypeCommandBuffer, false, "VUID-vkCmdSetDepthBounds-commandBuffer-parameter", kVUIDUndefined);
-
- return skip;
-}
-
-bool ObjectLifetimes::PreCallValidateCmdSetStencilCompareMask(
- VkCommandBuffer commandBuffer,
- VkStencilFaceFlags faceMask,
- uint32_t compareMask) {
- bool skip = false;
- skip |= ValidateObject(commandBuffer, commandBuffer, kVulkanObjectTypeCommandBuffer, false, "VUID-vkCmdSetStencilCompareMask-commandBuffer-parameter", kVUIDUndefined);
-
- return skip;
-}
-
-bool ObjectLifetimes::PreCallValidateCmdSetStencilWriteMask(
- VkCommandBuffer commandBuffer,
- VkStencilFaceFlags faceMask,
- uint32_t writeMask) {
- bool skip = false;
- skip |= ValidateObject(commandBuffer, commandBuffer, kVulkanObjectTypeCommandBuffer, false, "VUID-vkCmdSetStencilWriteMask-commandBuffer-parameter", kVUIDUndefined);
-
- return skip;
-}
-
-bool ObjectLifetimes::PreCallValidateCmdSetStencilReference(
- VkCommandBuffer commandBuffer,
- VkStencilFaceFlags faceMask,
- uint32_t reference) {
- bool skip = false;
- skip |= ValidateObject(commandBuffer, commandBuffer, kVulkanObjectTypeCommandBuffer, false, "VUID-vkCmdSetStencilReference-commandBuffer-parameter", kVUIDUndefined);
-
- return skip;
-}
-
-bool ObjectLifetimes::PreCallValidateCmdBindDescriptorSets(
- VkCommandBuffer commandBuffer,
- VkPipelineBindPoint pipelineBindPoint,
- VkPipelineLayout layout,
- uint32_t firstSet,
- uint32_t descriptorSetCount,
- const VkDescriptorSet* pDescriptorSets,
- uint32_t dynamicOffsetCount,
- const uint32_t* pDynamicOffsets) {
- bool skip = false;
- skip |= ValidateObject(commandBuffer, commandBuffer, kVulkanObjectTypeCommandBuffer, false, "VUID-vkCmdBindDescriptorSets-commandBuffer-parameter", "VUID-vkCmdBindDescriptorSets-commonparent");
- skip |= ValidateObject(commandBuffer, layout, kVulkanObjectTypePipelineLayout, false, "VUID-vkCmdBindDescriptorSets-layout-parameter", "VUID-vkCmdBindDescriptorSets-commonparent");
- if (pDescriptorSets) {
- for (uint32_t index0 = 0; index0 < descriptorSetCount; ++index0) {
- skip |= ValidateObject(commandBuffer, pDescriptorSets[index0], kVulkanObjectTypeDescriptorSet, false, "VUID-vkCmdBindDescriptorSets-pDescriptorSets-parameter", "VUID-vkCmdBindDescriptorSets-commonparent");
- }
- }
-
- return skip;
-}
-
-bool ObjectLifetimes::PreCallValidateCmdBindIndexBuffer(
- VkCommandBuffer commandBuffer,
- VkBuffer buffer,
- VkDeviceSize offset,
- VkIndexType indexType) {
- bool skip = false;
- skip |= ValidateObject(commandBuffer, commandBuffer, kVulkanObjectTypeCommandBuffer, false, "VUID-vkCmdBindIndexBuffer-commandBuffer-parameter", "VUID-vkCmdBindIndexBuffer-commonparent");
- skip |= ValidateObject(commandBuffer, buffer, kVulkanObjectTypeBuffer, false, "VUID-vkCmdBindIndexBuffer-buffer-parameter", "VUID-vkCmdBindIndexBuffer-commonparent");
-
- return skip;
-}
-
-bool ObjectLifetimes::PreCallValidateCmdBindVertexBuffers(
- VkCommandBuffer commandBuffer,
- uint32_t firstBinding,
- uint32_t bindingCount,
- const VkBuffer* pBuffers,
- const VkDeviceSize* pOffsets) {
- bool skip = false;
- skip |= ValidateObject(commandBuffer, commandBuffer, kVulkanObjectTypeCommandBuffer, false, "VUID-vkCmdBindVertexBuffers-commandBuffer-parameter", "VUID-vkCmdBindVertexBuffers-commonparent");
- if (pBuffers) {
- for (uint32_t index0 = 0; index0 < bindingCount; ++index0) {
- skip |= ValidateObject(commandBuffer, pBuffers[index0], kVulkanObjectTypeBuffer, false, "VUID-vkCmdBindVertexBuffers-pBuffers-parameter", "VUID-vkCmdBindVertexBuffers-commonparent");
- }
- }
-
- return skip;
-}
-
-bool ObjectLifetimes::PreCallValidateCmdDraw(
- VkCommandBuffer commandBuffer,
- uint32_t vertexCount,
- uint32_t instanceCount,
- uint32_t firstVertex,
- uint32_t firstInstance) {
- bool skip = false;
- skip |= ValidateObject(commandBuffer, commandBuffer, kVulkanObjectTypeCommandBuffer, false, "VUID-vkCmdDraw-commandBuffer-parameter", kVUIDUndefined);
-
- return skip;
-}
-
-bool ObjectLifetimes::PreCallValidateCmdDrawIndexed(
- VkCommandBuffer commandBuffer,
- uint32_t indexCount,
- uint32_t instanceCount,
- uint32_t firstIndex,
- int32_t vertexOffset,
- uint32_t firstInstance) {
- bool skip = false;
- skip |= ValidateObject(commandBuffer, commandBuffer, kVulkanObjectTypeCommandBuffer, false, "VUID-vkCmdDrawIndexed-commandBuffer-parameter", kVUIDUndefined);
-
- return skip;
-}
-
-bool ObjectLifetimes::PreCallValidateCmdDrawIndirect(
- VkCommandBuffer commandBuffer,
- VkBuffer buffer,
- VkDeviceSize offset,
- uint32_t drawCount,
- uint32_t stride) {
- bool skip = false;
- skip |= ValidateObject(commandBuffer, commandBuffer, kVulkanObjectTypeCommandBuffer, false, "VUID-vkCmdDrawIndirect-commandBuffer-parameter", "VUID-vkCmdDrawIndirect-commonparent");
- skip |= ValidateObject(commandBuffer, buffer, kVulkanObjectTypeBuffer, false, "VUID-vkCmdDrawIndirect-buffer-parameter", "VUID-vkCmdDrawIndirect-commonparent");
-
- return skip;
-}
-
-bool ObjectLifetimes::PreCallValidateCmdDrawIndexedIndirect(
- VkCommandBuffer commandBuffer,
- VkBuffer buffer,
- VkDeviceSize offset,
- uint32_t drawCount,
- uint32_t stride) {
- bool skip = false;
- skip |= ValidateObject(commandBuffer, commandBuffer, kVulkanObjectTypeCommandBuffer, false, "VUID-vkCmdDrawIndexedIndirect-commandBuffer-parameter", "VUID-vkCmdDrawIndexedIndirect-commonparent");
- skip |= ValidateObject(commandBuffer, buffer, kVulkanObjectTypeBuffer, false, "VUID-vkCmdDrawIndexedIndirect-buffer-parameter", "VUID-vkCmdDrawIndexedIndirect-commonparent");
-
- return skip;
-}
-
-bool ObjectLifetimes::PreCallValidateCmdDispatch(
- VkCommandBuffer commandBuffer,
- uint32_t groupCountX,
- uint32_t groupCountY,
- uint32_t groupCountZ) {
- bool skip = false;
- skip |= ValidateObject(commandBuffer, commandBuffer, kVulkanObjectTypeCommandBuffer, false, "VUID-vkCmdDispatch-commandBuffer-parameter", kVUIDUndefined);
-
- return skip;
-}
-
-bool ObjectLifetimes::PreCallValidateCmdDispatchIndirect(
- VkCommandBuffer commandBuffer,
- VkBuffer buffer,
- VkDeviceSize offset) {
- bool skip = false;
- skip |= ValidateObject(commandBuffer, commandBuffer, kVulkanObjectTypeCommandBuffer, false, "VUID-vkCmdDispatchIndirect-commandBuffer-parameter", "VUID-vkCmdDispatchIndirect-commonparent");
- skip |= ValidateObject(commandBuffer, buffer, kVulkanObjectTypeBuffer, false, "VUID-vkCmdDispatchIndirect-buffer-parameter", "VUID-vkCmdDispatchIndirect-commonparent");
-
- return skip;
-}
-
-bool ObjectLifetimes::PreCallValidateCmdCopyBuffer(
- VkCommandBuffer commandBuffer,
- VkBuffer srcBuffer,
- VkBuffer dstBuffer,
- uint32_t regionCount,
- const VkBufferCopy* pRegions) {
- bool skip = false;
- skip |= ValidateObject(commandBuffer, commandBuffer, kVulkanObjectTypeCommandBuffer, false, "VUID-vkCmdCopyBuffer-commandBuffer-parameter", "VUID-vkCmdCopyBuffer-commonparent");
- skip |= ValidateObject(commandBuffer, srcBuffer, kVulkanObjectTypeBuffer, false, "VUID-vkCmdCopyBuffer-srcBuffer-parameter", "VUID-vkCmdCopyBuffer-commonparent");
- skip |= ValidateObject(commandBuffer, dstBuffer, kVulkanObjectTypeBuffer, false, "VUID-vkCmdCopyBuffer-dstBuffer-parameter", "VUID-vkCmdCopyBuffer-commonparent");
-
- return skip;
-}
-
-bool ObjectLifetimes::PreCallValidateCmdCopyImage(
- VkCommandBuffer commandBuffer,
- VkImage srcImage,
- VkImageLayout srcImageLayout,
- VkImage dstImage,
- VkImageLayout dstImageLayout,
- uint32_t regionCount,
- const VkImageCopy* pRegions) {
- bool skip = false;
- skip |= ValidateObject(commandBuffer, commandBuffer, kVulkanObjectTypeCommandBuffer, false, "VUID-vkCmdCopyImage-commandBuffer-parameter", "VUID-vkCmdCopyImage-commonparent");
- skip |= ValidateObject(commandBuffer, srcImage, kVulkanObjectTypeImage, false, "VUID-vkCmdCopyImage-srcImage-parameter", "VUID-vkCmdCopyImage-commonparent");
- skip |= ValidateObject(commandBuffer, dstImage, kVulkanObjectTypeImage, false, "VUID-vkCmdCopyImage-dstImage-parameter", "VUID-vkCmdCopyImage-commonparent");
-
- return skip;
-}
-
-bool ObjectLifetimes::PreCallValidateCmdBlitImage(
- VkCommandBuffer commandBuffer,
- VkImage srcImage,
- VkImageLayout srcImageLayout,
- VkImage dstImage,
- VkImageLayout dstImageLayout,
- uint32_t regionCount,
- const VkImageBlit* pRegions,
- VkFilter filter) {
- bool skip = false;
- skip |= ValidateObject(commandBuffer, commandBuffer, kVulkanObjectTypeCommandBuffer, false, "VUID-vkCmdBlitImage-commandBuffer-parameter", "VUID-vkCmdBlitImage-commonparent");
- skip |= ValidateObject(commandBuffer, srcImage, kVulkanObjectTypeImage, false, "VUID-vkCmdBlitImage-srcImage-parameter", "VUID-vkCmdBlitImage-commonparent");
- skip |= ValidateObject(commandBuffer, dstImage, kVulkanObjectTypeImage, false, "VUID-vkCmdBlitImage-dstImage-parameter", "VUID-vkCmdBlitImage-commonparent");
-
- return skip;
-}
-
-bool ObjectLifetimes::PreCallValidateCmdCopyBufferToImage(
- VkCommandBuffer commandBuffer,
- VkBuffer srcBuffer,
- VkImage dstImage,
- VkImageLayout dstImageLayout,
- uint32_t regionCount,
- const VkBufferImageCopy* pRegions) {
- bool skip = false;
- skip |= ValidateObject(commandBuffer, commandBuffer, kVulkanObjectTypeCommandBuffer, false, "VUID-vkCmdCopyBufferToImage-commandBuffer-parameter", "VUID-vkCmdCopyBufferToImage-commonparent");
- skip |= ValidateObject(commandBuffer, srcBuffer, kVulkanObjectTypeBuffer, false, "VUID-vkCmdCopyBufferToImage-srcBuffer-parameter", "VUID-vkCmdCopyBufferToImage-commonparent");
- skip |= ValidateObject(commandBuffer, dstImage, kVulkanObjectTypeImage, false, "VUID-vkCmdCopyBufferToImage-dstImage-parameter", "VUID-vkCmdCopyBufferToImage-commonparent");
-
- return skip;
-}
-
-bool ObjectLifetimes::PreCallValidateCmdCopyImageToBuffer(
- VkCommandBuffer commandBuffer,
- VkImage srcImage,
- VkImageLayout srcImageLayout,
- VkBuffer dstBuffer,
- uint32_t regionCount,
- const VkBufferImageCopy* pRegions) {
- bool skip = false;
- skip |= ValidateObject(commandBuffer, commandBuffer, kVulkanObjectTypeCommandBuffer, false, "VUID-vkCmdCopyImageToBuffer-commandBuffer-parameter", "VUID-vkCmdCopyImageToBuffer-commonparent");
- skip |= ValidateObject(commandBuffer, srcImage, kVulkanObjectTypeImage, false, "VUID-vkCmdCopyImageToBuffer-srcImage-parameter", "VUID-vkCmdCopyImageToBuffer-commonparent");
- skip |= ValidateObject(commandBuffer, dstBuffer, kVulkanObjectTypeBuffer, false, "VUID-vkCmdCopyImageToBuffer-dstBuffer-parameter", "VUID-vkCmdCopyImageToBuffer-commonparent");
-
- return skip;
-}
-
-bool ObjectLifetimes::PreCallValidateCmdUpdateBuffer(
- VkCommandBuffer commandBuffer,
- VkBuffer dstBuffer,
- VkDeviceSize dstOffset,
- VkDeviceSize dataSize,
- const void* pData) {
- bool skip = false;
- skip |= ValidateObject(commandBuffer, commandBuffer, kVulkanObjectTypeCommandBuffer, false, "VUID-vkCmdUpdateBuffer-commandBuffer-parameter", "VUID-vkCmdUpdateBuffer-commonparent");
- skip |= ValidateObject(commandBuffer, dstBuffer, kVulkanObjectTypeBuffer, false, "VUID-vkCmdUpdateBuffer-dstBuffer-parameter", "VUID-vkCmdUpdateBuffer-commonparent");
-
- return skip;
-}
-
-bool ObjectLifetimes::PreCallValidateCmdFillBuffer(
- VkCommandBuffer commandBuffer,
- VkBuffer dstBuffer,
- VkDeviceSize dstOffset,
- VkDeviceSize size,
- uint32_t data) {
- bool skip = false;
- skip |= ValidateObject(commandBuffer, commandBuffer, kVulkanObjectTypeCommandBuffer, false, "VUID-vkCmdFillBuffer-commandBuffer-parameter", "VUID-vkCmdFillBuffer-commonparent");
- skip |= ValidateObject(commandBuffer, dstBuffer, kVulkanObjectTypeBuffer, false, "VUID-vkCmdFillBuffer-dstBuffer-parameter", "VUID-vkCmdFillBuffer-commonparent");
-
- return skip;
-}
-
-bool ObjectLifetimes::PreCallValidateCmdClearColorImage(
- VkCommandBuffer commandBuffer,
- VkImage image,
- VkImageLayout imageLayout,
- const VkClearColorValue* pColor,
- uint32_t rangeCount,
- const VkImageSubresourceRange* pRanges) {
- bool skip = false;
- skip |= ValidateObject(commandBuffer, commandBuffer, kVulkanObjectTypeCommandBuffer, false, "VUID-vkCmdClearColorImage-commandBuffer-parameter", "VUID-vkCmdClearColorImage-commonparent");
- skip |= ValidateObject(commandBuffer, image, kVulkanObjectTypeImage, false, "VUID-vkCmdClearColorImage-image-parameter", "VUID-vkCmdClearColorImage-commonparent");
-
- return skip;
-}
-
-bool ObjectLifetimes::PreCallValidateCmdClearDepthStencilImage(
- VkCommandBuffer commandBuffer,
- VkImage image,
- VkImageLayout imageLayout,
- const VkClearDepthStencilValue* pDepthStencil,
- uint32_t rangeCount,
- const VkImageSubresourceRange* pRanges) {
- bool skip = false;
- skip |= ValidateObject(commandBuffer, commandBuffer, kVulkanObjectTypeCommandBuffer, false, "VUID-vkCmdClearDepthStencilImage-commandBuffer-parameter", "VUID-vkCmdClearDepthStencilImage-commonparent");
- skip |= ValidateObject(commandBuffer, image, kVulkanObjectTypeImage, false, "VUID-vkCmdClearDepthStencilImage-image-parameter", "VUID-vkCmdClearDepthStencilImage-commonparent");
-
- return skip;
-}
-
-bool ObjectLifetimes::PreCallValidateCmdClearAttachments(
- VkCommandBuffer commandBuffer,
- uint32_t attachmentCount,
- const VkClearAttachment* pAttachments,
- uint32_t rectCount,
- const VkClearRect* pRects) {
- bool skip = false;
- skip |= ValidateObject(commandBuffer, commandBuffer, kVulkanObjectTypeCommandBuffer, false, "VUID-vkCmdClearAttachments-commandBuffer-parameter", kVUIDUndefined);
-
- return skip;
-}
-
-bool ObjectLifetimes::PreCallValidateCmdResolveImage(
- VkCommandBuffer commandBuffer,
- VkImage srcImage,
- VkImageLayout srcImageLayout,
- VkImage dstImage,
- VkImageLayout dstImageLayout,
- uint32_t regionCount,
- const VkImageResolve* pRegions) {
- bool skip = false;
- skip |= ValidateObject(commandBuffer, commandBuffer, kVulkanObjectTypeCommandBuffer, false, "VUID-vkCmdResolveImage-commandBuffer-parameter", "VUID-vkCmdResolveImage-commonparent");
- skip |= ValidateObject(commandBuffer, srcImage, kVulkanObjectTypeImage, false, "VUID-vkCmdResolveImage-srcImage-parameter", "VUID-vkCmdResolveImage-commonparent");
- skip |= ValidateObject(commandBuffer, dstImage, kVulkanObjectTypeImage, false, "VUID-vkCmdResolveImage-dstImage-parameter", "VUID-vkCmdResolveImage-commonparent");
-
- return skip;
-}
-
-bool ObjectLifetimes::PreCallValidateCmdSetEvent(
- VkCommandBuffer commandBuffer,
- VkEvent event,
- VkPipelineStageFlags stageMask) {
- bool skip = false;
- skip |= ValidateObject(commandBuffer, commandBuffer, kVulkanObjectTypeCommandBuffer, false, "VUID-vkCmdSetEvent-commandBuffer-parameter", "VUID-vkCmdSetEvent-commonparent");
- skip |= ValidateObject(commandBuffer, event, kVulkanObjectTypeEvent, false, "VUID-vkCmdSetEvent-event-parameter", "VUID-vkCmdSetEvent-commonparent");
-
- return skip;
-}
-
-bool ObjectLifetimes::PreCallValidateCmdResetEvent(
- VkCommandBuffer commandBuffer,
- VkEvent event,
- VkPipelineStageFlags stageMask) {
- bool skip = false;
- skip |= ValidateObject(commandBuffer, commandBuffer, kVulkanObjectTypeCommandBuffer, false, "VUID-vkCmdResetEvent-commandBuffer-parameter", "VUID-vkCmdResetEvent-commonparent");
- skip |= ValidateObject(commandBuffer, event, kVulkanObjectTypeEvent, false, "VUID-vkCmdResetEvent-event-parameter", "VUID-vkCmdResetEvent-commonparent");
-
- return skip;
-}
-
-bool ObjectLifetimes::PreCallValidateCmdWaitEvents(
- VkCommandBuffer commandBuffer,
- uint32_t eventCount,
- const VkEvent* pEvents,
- VkPipelineStageFlags srcStageMask,
- VkPipelineStageFlags dstStageMask,
- uint32_t memoryBarrierCount,
- const VkMemoryBarrier* pMemoryBarriers,
- uint32_t bufferMemoryBarrierCount,
- const VkBufferMemoryBarrier* pBufferMemoryBarriers,
- uint32_t imageMemoryBarrierCount,
- const VkImageMemoryBarrier* pImageMemoryBarriers) {
- bool skip = false;
- skip |= ValidateObject(commandBuffer, commandBuffer, kVulkanObjectTypeCommandBuffer, false, "VUID-vkCmdWaitEvents-commandBuffer-parameter", "VUID-vkCmdWaitEvents-commonparent");
- if (pEvents) {
- for (uint32_t index0 = 0; index0 < eventCount; ++index0) {
- skip |= ValidateObject(commandBuffer, pEvents[index0], kVulkanObjectTypeEvent, false, "VUID-vkCmdWaitEvents-pEvents-parameter", "VUID-vkCmdWaitEvents-commonparent");
- }
- }
- if (pBufferMemoryBarriers) {
- for (uint32_t index0 = 0; index0 < bufferMemoryBarrierCount; ++index0) {
- skip |= ValidateObject(commandBuffer, pBufferMemoryBarriers[index0].buffer, kVulkanObjectTypeBuffer, false, "VUID-VkBufferMemoryBarrier-buffer-parameter", kVUIDUndefined);
- }
- }
- if (pImageMemoryBarriers) {
- for (uint32_t index0 = 0; index0 < imageMemoryBarrierCount; ++index0) {
- skip |= ValidateObject(commandBuffer, pImageMemoryBarriers[index0].image, kVulkanObjectTypeImage, false, "VUID-VkImageMemoryBarrier-image-parameter", kVUIDUndefined);
- }
- }
-
- return skip;
-}
-
-bool ObjectLifetimes::PreCallValidateCmdPipelineBarrier(
- VkCommandBuffer commandBuffer,
- VkPipelineStageFlags srcStageMask,
- VkPipelineStageFlags dstStageMask,
- VkDependencyFlags dependencyFlags,
- uint32_t memoryBarrierCount,
- const VkMemoryBarrier* pMemoryBarriers,
- uint32_t bufferMemoryBarrierCount,
- const VkBufferMemoryBarrier* pBufferMemoryBarriers,
- uint32_t imageMemoryBarrierCount,
- const VkImageMemoryBarrier* pImageMemoryBarriers) {
- bool skip = false;
- skip |= ValidateObject(commandBuffer, commandBuffer, kVulkanObjectTypeCommandBuffer, false, "VUID-vkCmdPipelineBarrier-commandBuffer-parameter", kVUIDUndefined);
- if (pBufferMemoryBarriers) {
- for (uint32_t index0 = 0; index0 < bufferMemoryBarrierCount; ++index0) {
- skip |= ValidateObject(commandBuffer, pBufferMemoryBarriers[index0].buffer, kVulkanObjectTypeBuffer, false, "VUID-VkBufferMemoryBarrier-buffer-parameter", kVUIDUndefined);
- }
- }
- if (pImageMemoryBarriers) {
- for (uint32_t index0 = 0; index0 < imageMemoryBarrierCount; ++index0) {
- skip |= ValidateObject(commandBuffer, pImageMemoryBarriers[index0].image, kVulkanObjectTypeImage, false, "VUID-VkImageMemoryBarrier-image-parameter", kVUIDUndefined);
- }
- }
-
- return skip;
-}
-
-bool ObjectLifetimes::PreCallValidateCmdBeginQuery(
- VkCommandBuffer commandBuffer,
- VkQueryPool queryPool,
- uint32_t query,
- VkQueryControlFlags flags) {
- bool skip = false;
- skip |= ValidateObject(commandBuffer, commandBuffer, kVulkanObjectTypeCommandBuffer, false, "VUID-vkCmdBeginQuery-commandBuffer-parameter", "VUID-vkCmdBeginQuery-commonparent");
- skip |= ValidateObject(commandBuffer, queryPool, kVulkanObjectTypeQueryPool, false, "VUID-vkCmdBeginQuery-queryPool-parameter", "VUID-vkCmdBeginQuery-commonparent");
-
- return skip;
-}
-
-bool ObjectLifetimes::PreCallValidateCmdEndQuery(
- VkCommandBuffer commandBuffer,
- VkQueryPool queryPool,
- uint32_t query) {
- bool skip = false;
- skip |= ValidateObject(commandBuffer, commandBuffer, kVulkanObjectTypeCommandBuffer, false, "VUID-vkCmdEndQuery-commandBuffer-parameter", "VUID-vkCmdEndQuery-commonparent");
- skip |= ValidateObject(commandBuffer, queryPool, kVulkanObjectTypeQueryPool, false, "VUID-vkCmdEndQuery-queryPool-parameter", "VUID-vkCmdEndQuery-commonparent");
-
- return skip;
-}
-
-bool ObjectLifetimes::PreCallValidateCmdResetQueryPool(
- VkCommandBuffer commandBuffer,
- VkQueryPool queryPool,
- uint32_t firstQuery,
- uint32_t queryCount) {
- bool skip = false;
- skip |= ValidateObject(commandBuffer, commandBuffer, kVulkanObjectTypeCommandBuffer, false, "VUID-vkCmdResetQueryPool-commandBuffer-parameter", "VUID-vkCmdResetQueryPool-commonparent");
- skip |= ValidateObject(commandBuffer, queryPool, kVulkanObjectTypeQueryPool, false, "VUID-vkCmdResetQueryPool-queryPool-parameter", "VUID-vkCmdResetQueryPool-commonparent");
-
- return skip;
-}
-
-bool ObjectLifetimes::PreCallValidateCmdWriteTimestamp(
- VkCommandBuffer commandBuffer,
- VkPipelineStageFlagBits pipelineStage,
- VkQueryPool queryPool,
- uint32_t query) {
- bool skip = false;
- skip |= ValidateObject(commandBuffer, commandBuffer, kVulkanObjectTypeCommandBuffer, false, "VUID-vkCmdWriteTimestamp-commandBuffer-parameter", "VUID-vkCmdWriteTimestamp-commonparent");
- skip |= ValidateObject(commandBuffer, queryPool, kVulkanObjectTypeQueryPool, false, "VUID-vkCmdWriteTimestamp-queryPool-parameter", "VUID-vkCmdWriteTimestamp-commonparent");
-
- return skip;
-}
-
-bool ObjectLifetimes::PreCallValidateCmdCopyQueryPoolResults(
- VkCommandBuffer commandBuffer,
- VkQueryPool queryPool,
- uint32_t firstQuery,
- uint32_t queryCount,
- VkBuffer dstBuffer,
- VkDeviceSize dstOffset,
- VkDeviceSize stride,
- VkQueryResultFlags flags) {
- bool skip = false;
- skip |= ValidateObject(commandBuffer, commandBuffer, kVulkanObjectTypeCommandBuffer, false, "VUID-vkCmdCopyQueryPoolResults-commandBuffer-parameter", "VUID-vkCmdCopyQueryPoolResults-commonparent");
- skip |= ValidateObject(commandBuffer, queryPool, kVulkanObjectTypeQueryPool, false, "VUID-vkCmdCopyQueryPoolResults-queryPool-parameter", "VUID-vkCmdCopyQueryPoolResults-commonparent");
- skip |= ValidateObject(commandBuffer, dstBuffer, kVulkanObjectTypeBuffer, false, "VUID-vkCmdCopyQueryPoolResults-dstBuffer-parameter", "VUID-vkCmdCopyQueryPoolResults-commonparent");
-
- return skip;
-}
-
-bool ObjectLifetimes::PreCallValidateCmdPushConstants(
- VkCommandBuffer commandBuffer,
- VkPipelineLayout layout,
- VkShaderStageFlags stageFlags,
- uint32_t offset,
- uint32_t size,
- const void* pValues) {
- bool skip = false;
- skip |= ValidateObject(commandBuffer, commandBuffer, kVulkanObjectTypeCommandBuffer, false, "VUID-vkCmdPushConstants-commandBuffer-parameter", "VUID-vkCmdPushConstants-commonparent");
- skip |= ValidateObject(commandBuffer, layout, kVulkanObjectTypePipelineLayout, false, "VUID-vkCmdPushConstants-layout-parameter", "VUID-vkCmdPushConstants-commonparent");
-
- return skip;
-}
-
-bool ObjectLifetimes::PreCallValidateCmdBeginRenderPass(
- VkCommandBuffer commandBuffer,
- const VkRenderPassBeginInfo* pRenderPassBegin,
- VkSubpassContents contents) {
- bool skip = false;
- skip |= ValidateObject(commandBuffer, commandBuffer, kVulkanObjectTypeCommandBuffer, false, "VUID-vkCmdBeginRenderPass-commandBuffer-parameter", kVUIDUndefined);
- if (pRenderPassBegin) {
- skip |= ValidateObject(commandBuffer, pRenderPassBegin->renderPass, kVulkanObjectTypeRenderPass, false, "VUID-VkRenderPassBeginInfo-renderPass-parameter", "VUID-VkRenderPassBeginInfo-commonparent");
- skip |= ValidateObject(commandBuffer, pRenderPassBegin->framebuffer, kVulkanObjectTypeFramebuffer, false, "VUID-VkRenderPassBeginInfo-framebuffer-parameter", "VUID-VkRenderPassBeginInfo-commonparent");
- }
-
- return skip;
-}
-
-bool ObjectLifetimes::PreCallValidateCmdNextSubpass(
- VkCommandBuffer commandBuffer,
- VkSubpassContents contents) {
- bool skip = false;
- skip |= ValidateObject(commandBuffer, commandBuffer, kVulkanObjectTypeCommandBuffer, false, "VUID-vkCmdNextSubpass-commandBuffer-parameter", kVUIDUndefined);
-
- return skip;
-}
-
-bool ObjectLifetimes::PreCallValidateCmdEndRenderPass(
- VkCommandBuffer commandBuffer) {
- bool skip = false;
- skip |= ValidateObject(commandBuffer, commandBuffer, kVulkanObjectTypeCommandBuffer, false, "VUID-vkCmdEndRenderPass-commandBuffer-parameter", kVUIDUndefined);
-
- return skip;
-}
-
-bool ObjectLifetimes::PreCallValidateCmdExecuteCommands(
- VkCommandBuffer commandBuffer,
- uint32_t commandBufferCount,
- const VkCommandBuffer* pCommandBuffers) {
- bool skip = false;
- skip |= ValidateObject(commandBuffer, commandBuffer, kVulkanObjectTypeCommandBuffer, false, "VUID-vkCmdExecuteCommands-commandBuffer-parameter", "VUID-vkCmdExecuteCommands-commonparent");
- if (pCommandBuffers) {
- for (uint32_t index0 = 0; index0 < commandBufferCount; ++index0) {
- skip |= ValidateObject(commandBuffer, pCommandBuffers[index0], kVulkanObjectTypeCommandBuffer, false, "VUID-vkCmdExecuteCommands-pCommandBuffers-parameter", "VUID-vkCmdExecuteCommands-commonparent");
- }
- }
-
- return skip;
-}
-
-bool ObjectLifetimes::PreCallValidateBindBufferMemory2(
- VkDevice device,
- uint32_t bindInfoCount,
- const VkBindBufferMemoryInfo* pBindInfos) {
- bool skip = false;
- skip |= ValidateObject(device, device, kVulkanObjectTypeDevice, false, "VUID-vkBindBufferMemory2-device-parameter", kVUIDUndefined);
- if (pBindInfos) {
- for (uint32_t index0 = 0; index0 < bindInfoCount; ++index0) {
- skip |= ValidateObject(device, pBindInfos[index0].buffer, kVulkanObjectTypeBuffer, false, "VUID-VkBindBufferMemoryInfo-buffer-parameter", "VUID-VkBindBufferMemoryInfo-commonparent");
- skip |= ValidateObject(device, pBindInfos[index0].memory, kVulkanObjectTypeDeviceMemory, false, "VUID-VkBindBufferMemoryInfo-memory-parameter", "VUID-VkBindBufferMemoryInfo-commonparent");
- }
- }
-
- return skip;
-}
-
-bool ObjectLifetimes::PreCallValidateBindImageMemory2(
- VkDevice device,
- uint32_t bindInfoCount,
- const VkBindImageMemoryInfo* pBindInfos) {
- bool skip = false;
- skip |= ValidateObject(device, device, kVulkanObjectTypeDevice, false, "VUID-vkBindImageMemory2-device-parameter", kVUIDUndefined);
- if (pBindInfos) {
- for (uint32_t index0 = 0; index0 < bindInfoCount; ++index0) {
- skip |= ValidateObject(device, pBindInfos[index0].image, kVulkanObjectTypeImage, false, "VUID-VkBindImageMemoryInfo-image-parameter", "VUID-VkBindImageMemoryInfo-commonparent");
- skip |= ValidateObject(device, pBindInfos[index0].memory, kVulkanObjectTypeDeviceMemory, true, kVUIDUndefined, "VUID-VkBindImageMemoryInfo-commonparent");
- }
- }
-
- return skip;
-}
-
-bool ObjectLifetimes::PreCallValidateGetDeviceGroupPeerMemoryFeatures(
- VkDevice device,
- uint32_t heapIndex,
- uint32_t localDeviceIndex,
- uint32_t remoteDeviceIndex,
- VkPeerMemoryFeatureFlags* pPeerMemoryFeatures) {
- bool skip = false;
- skip |= ValidateObject(device, device, kVulkanObjectTypeDevice, false, "VUID-vkGetDeviceGroupPeerMemoryFeatures-device-parameter", kVUIDUndefined);
-
- return skip;
-}
-
-bool ObjectLifetimes::PreCallValidateCmdSetDeviceMask(
- VkCommandBuffer commandBuffer,
- uint32_t deviceMask) {
- bool skip = false;
- skip |= ValidateObject(commandBuffer, commandBuffer, kVulkanObjectTypeCommandBuffer, false, "VUID-vkCmdSetDeviceMask-commandBuffer-parameter", kVUIDUndefined);
-
- return skip;
-}
-
-bool ObjectLifetimes::PreCallValidateCmdDispatchBase(
- VkCommandBuffer commandBuffer,
- uint32_t baseGroupX,
- uint32_t baseGroupY,
- uint32_t baseGroupZ,
- uint32_t groupCountX,
- uint32_t groupCountY,
- uint32_t groupCountZ) {
- bool skip = false;
- skip |= ValidateObject(commandBuffer, commandBuffer, kVulkanObjectTypeCommandBuffer, false, "VUID-vkCmdDispatchBase-commandBuffer-parameter", kVUIDUndefined);
-
- return skip;
-}
-
-bool ObjectLifetimes::PreCallValidateEnumeratePhysicalDeviceGroups(
- VkInstance instance,
- uint32_t* pPhysicalDeviceGroupCount,
- VkPhysicalDeviceGroupProperties* pPhysicalDeviceGroupProperties) {
- bool skip = false;
- skip |= ValidateObject(instance, instance, kVulkanObjectTypeInstance, false, "VUID-vkEnumeratePhysicalDeviceGroups-instance-parameter", kVUIDUndefined);
-
- return skip;
-}
-
-bool ObjectLifetimes::PreCallValidateGetImageMemoryRequirements2(
- VkDevice device,
- const VkImageMemoryRequirementsInfo2* pInfo,
- VkMemoryRequirements2* pMemoryRequirements) {
- bool skip = false;
- skip |= ValidateObject(device, device, kVulkanObjectTypeDevice, false, "VUID-vkGetImageMemoryRequirements2-device-parameter", kVUIDUndefined);
- if (pInfo) {
- skip |= ValidateObject(device, pInfo->image, kVulkanObjectTypeImage, false, "VUID-VkImageMemoryRequirementsInfo2-image-parameter", kVUIDUndefined);
- }
-
- return skip;
-}
-
-bool ObjectLifetimes::PreCallValidateGetBufferMemoryRequirements2(
- VkDevice device,
- const VkBufferMemoryRequirementsInfo2* pInfo,
- VkMemoryRequirements2* pMemoryRequirements) {
- bool skip = false;
- skip |= ValidateObject(device, device, kVulkanObjectTypeDevice, false, "VUID-vkGetBufferMemoryRequirements2-device-parameter", kVUIDUndefined);
- if (pInfo) {
- skip |= ValidateObject(device, pInfo->buffer, kVulkanObjectTypeBuffer, false, "VUID-VkBufferMemoryRequirementsInfo2-buffer-parameter", kVUIDUndefined);
- }
-
- return skip;
-}
-
-bool ObjectLifetimes::PreCallValidateGetImageSparseMemoryRequirements2(
- VkDevice device,
- const VkImageSparseMemoryRequirementsInfo2* pInfo,
- uint32_t* pSparseMemoryRequirementCount,
- VkSparseImageMemoryRequirements2* pSparseMemoryRequirements) {
- bool skip = false;
- skip |= ValidateObject(device, device, kVulkanObjectTypeDevice, false, "VUID-vkGetImageSparseMemoryRequirements2-device-parameter", kVUIDUndefined);
- if (pInfo) {
- skip |= ValidateObject(device, pInfo->image, kVulkanObjectTypeImage, false, "VUID-VkImageSparseMemoryRequirementsInfo2-image-parameter", kVUIDUndefined);
- }
-
- return skip;
-}
-
-bool ObjectLifetimes::PreCallValidateGetPhysicalDeviceFeatures2(
- VkPhysicalDevice physicalDevice,
- VkPhysicalDeviceFeatures2* pFeatures) {
- bool skip = false;
- skip |= ValidateObject(physicalDevice, physicalDevice, kVulkanObjectTypePhysicalDevice, false, "VUID-vkGetPhysicalDeviceFeatures2-physicalDevice-parameter", kVUIDUndefined);
-
- return skip;
-}
-
-bool ObjectLifetimes::PreCallValidateGetPhysicalDeviceProperties2(
- VkPhysicalDevice physicalDevice,
- VkPhysicalDeviceProperties2* pProperties) {
- bool skip = false;
- skip |= ValidateObject(physicalDevice, physicalDevice, kVulkanObjectTypePhysicalDevice, false, "VUID-vkGetPhysicalDeviceProperties2-physicalDevice-parameter", kVUIDUndefined);
-
- return skip;
-}
-
-bool ObjectLifetimes::PreCallValidateGetPhysicalDeviceFormatProperties2(
- VkPhysicalDevice physicalDevice,
- VkFormat format,
- VkFormatProperties2* pFormatProperties) {
- bool skip = false;
- skip |= ValidateObject(physicalDevice, physicalDevice, kVulkanObjectTypePhysicalDevice, false, "VUID-vkGetPhysicalDeviceFormatProperties2-physicalDevice-parameter", kVUIDUndefined);
-
- return skip;
-}
-
-bool ObjectLifetimes::PreCallValidateGetPhysicalDeviceImageFormatProperties2(
- VkPhysicalDevice physicalDevice,
- const VkPhysicalDeviceImageFormatInfo2* pImageFormatInfo,
- VkImageFormatProperties2* pImageFormatProperties) {
- bool skip = false;
- skip |= ValidateObject(physicalDevice, physicalDevice, kVulkanObjectTypePhysicalDevice, false, "VUID-vkGetPhysicalDeviceImageFormatProperties2-physicalDevice-parameter", kVUIDUndefined);
-
- return skip;
-}
-
-bool ObjectLifetimes::PreCallValidateGetPhysicalDeviceMemoryProperties2(
- VkPhysicalDevice physicalDevice,
- VkPhysicalDeviceMemoryProperties2* pMemoryProperties) {
- bool skip = false;
- skip |= ValidateObject(physicalDevice, physicalDevice, kVulkanObjectTypePhysicalDevice, false, "VUID-vkGetPhysicalDeviceMemoryProperties2-physicalDevice-parameter", kVUIDUndefined);
-
- return skip;
-}
-
-bool ObjectLifetimes::PreCallValidateGetPhysicalDeviceSparseImageFormatProperties2(
- VkPhysicalDevice physicalDevice,
- const VkPhysicalDeviceSparseImageFormatInfo2* pFormatInfo,
- uint32_t* pPropertyCount,
- VkSparseImageFormatProperties2* pProperties) {
- bool skip = false;
- skip |= ValidateObject(physicalDevice, physicalDevice, kVulkanObjectTypePhysicalDevice, false, "VUID-vkGetPhysicalDeviceSparseImageFormatProperties2-physicalDevice-parameter", kVUIDUndefined);
-
- return skip;
-}
-
-bool ObjectLifetimes::PreCallValidateTrimCommandPool(
- VkDevice device,
- VkCommandPool commandPool,
- VkCommandPoolTrimFlags flags) {
- bool skip = false;
- skip |= ValidateObject(device, device, kVulkanObjectTypeDevice, false, "VUID-vkTrimCommandPool-device-parameter", kVUIDUndefined);
- skip |= ValidateObject(device, commandPool, kVulkanObjectTypeCommandPool, false, "VUID-vkTrimCommandPool-commandPool-parameter", "VUID-vkTrimCommandPool-commandPool-parent");
-
- return skip;
-}
-
-bool ObjectLifetimes::PreCallValidateCreateSamplerYcbcrConversion(
- VkDevice device,
- const VkSamplerYcbcrConversionCreateInfo* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkSamplerYcbcrConversion* pYcbcrConversion) {
- bool skip = false;
- skip |= ValidateObject(device, device, kVulkanObjectTypeDevice, false, "VUID-vkCreateSamplerYcbcrConversion-device-parameter", kVUIDUndefined);
-
- return skip;
-}
-
-void ObjectLifetimes::PostCallRecordCreateSamplerYcbcrConversion(
- VkDevice device,
- const VkSamplerYcbcrConversionCreateInfo* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkSamplerYcbcrConversion* pYcbcrConversion,
- VkResult result) {
- if (result != VK_SUCCESS) return;
- CreateObject(device, *pYcbcrConversion, kVulkanObjectTypeSamplerYcbcrConversion, pAllocator);
-
-}
-
-bool ObjectLifetimes::PreCallValidateDestroySamplerYcbcrConversion(
- VkDevice device,
- VkSamplerYcbcrConversion ycbcrConversion,
- const VkAllocationCallbacks* pAllocator) {
- bool skip = false;
- skip |= ValidateObject(device, device, kVulkanObjectTypeDevice, false, "VUID-vkDestroySamplerYcbcrConversion-device-parameter", kVUIDUndefined);
- skip |= ValidateObject(device, ycbcrConversion, kVulkanObjectTypeSamplerYcbcrConversion, true, "VUID-vkDestroySamplerYcbcrConversion-ycbcrConversion-parameter", "VUID-vkDestroySamplerYcbcrConversion-ycbcrConversion-parent");
- skip |= ValidateDestroyObject(device, ycbcrConversion, kVulkanObjectTypeSamplerYcbcrConversion, pAllocator, kVUIDUndefined, kVUIDUndefined);
-
- return skip;
-}
-
-void ObjectLifetimes::PreCallRecordDestroySamplerYcbcrConversion(
- VkDevice device,
- VkSamplerYcbcrConversion ycbcrConversion,
- const VkAllocationCallbacks* pAllocator) {
- RecordDestroyObject(device, ycbcrConversion, kVulkanObjectTypeSamplerYcbcrConversion);
-
-}
-
-bool ObjectLifetimes::PreCallValidateCreateDescriptorUpdateTemplate(
- VkDevice device,
- const VkDescriptorUpdateTemplateCreateInfo* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkDescriptorUpdateTemplate* pDescriptorUpdateTemplate) {
- bool skip = false;
- skip |= ValidateObject(device, device, kVulkanObjectTypeDevice, false, "VUID-vkCreateDescriptorUpdateTemplate-device-parameter", kVUIDUndefined);
- if (pCreateInfo) {
- skip |= ValidateObject(device, pCreateInfo->descriptorSetLayout, kVulkanObjectTypeDescriptorSetLayout, true, "VUID-VkDescriptorUpdateTemplateCreateInfo-descriptorSetLayout-parameter", "VUID-VkDescriptorUpdateTemplateCreateInfo-commonparent");
- skip |= ValidateObject(device, pCreateInfo->pipelineLayout, kVulkanObjectTypePipelineLayout, true, kVUIDUndefined, "VUID-VkDescriptorUpdateTemplateCreateInfo-commonparent");
- }
-
- return skip;
-}
-
-void ObjectLifetimes::PostCallRecordCreateDescriptorUpdateTemplate(
- VkDevice device,
- const VkDescriptorUpdateTemplateCreateInfo* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkDescriptorUpdateTemplate* pDescriptorUpdateTemplate,
- VkResult result) {
- if (result != VK_SUCCESS) return;
- CreateObject(device, *pDescriptorUpdateTemplate, kVulkanObjectTypeDescriptorUpdateTemplate, pAllocator);
-
-}
-
-bool ObjectLifetimes::PreCallValidateDestroyDescriptorUpdateTemplate(
- VkDevice device,
- VkDescriptorUpdateTemplate descriptorUpdateTemplate,
- const VkAllocationCallbacks* pAllocator) {
- bool skip = false;
- skip |= ValidateObject(device, device, kVulkanObjectTypeDevice, false, "VUID-vkDestroyDescriptorUpdateTemplate-device-parameter", kVUIDUndefined);
- skip |= ValidateObject(device, descriptorUpdateTemplate, kVulkanObjectTypeDescriptorUpdateTemplate, true, "VUID-vkDestroyDescriptorUpdateTemplate-descriptorUpdateTemplate-parameter", "VUID-vkDestroyDescriptorUpdateTemplate-descriptorUpdateTemplate-parent");
- skip |= ValidateDestroyObject(device, descriptorUpdateTemplate, kVulkanObjectTypeDescriptorUpdateTemplate, pAllocator, "VUID-vkDestroyDescriptorUpdateTemplate-descriptorSetLayout-00356", "VUID-vkDestroyDescriptorUpdateTemplate-descriptorSetLayout-00357");
-
- return skip;
-}
-
-void ObjectLifetimes::PreCallRecordDestroyDescriptorUpdateTemplate(
- VkDevice device,
- VkDescriptorUpdateTemplate descriptorUpdateTemplate,
- const VkAllocationCallbacks* pAllocator) {
- RecordDestroyObject(device, descriptorUpdateTemplate, kVulkanObjectTypeDescriptorUpdateTemplate);
-
-}
-
-bool ObjectLifetimes::PreCallValidateUpdateDescriptorSetWithTemplate(
- VkDevice device,
- VkDescriptorSet descriptorSet,
- VkDescriptorUpdateTemplate descriptorUpdateTemplate,
- const void* pData) {
- bool skip = false;
- skip |= ValidateObject(device, device, kVulkanObjectTypeDevice, false, "VUID-vkUpdateDescriptorSetWithTemplate-device-parameter", kVUIDUndefined);
- skip |= ValidateObject(device, descriptorSet, kVulkanObjectTypeDescriptorSet, false, "VUID-vkUpdateDescriptorSetWithTemplate-descriptorSet-parameter", kVUIDUndefined);
- skip |= ValidateObject(device, descriptorUpdateTemplate, kVulkanObjectTypeDescriptorUpdateTemplate, false, "VUID-vkUpdateDescriptorSetWithTemplate-descriptorUpdateTemplate-parameter", "VUID-vkUpdateDescriptorSetWithTemplate-descriptorUpdateTemplate-parent");
-
- return skip;
-}
-
-bool ObjectLifetimes::PreCallValidateGetPhysicalDeviceExternalBufferProperties(
- VkPhysicalDevice physicalDevice,
- const VkPhysicalDeviceExternalBufferInfo* pExternalBufferInfo,
- VkExternalBufferProperties* pExternalBufferProperties) {
- bool skip = false;
- skip |= ValidateObject(physicalDevice, physicalDevice, kVulkanObjectTypePhysicalDevice, false, "VUID-vkGetPhysicalDeviceExternalBufferProperties-physicalDevice-parameter", kVUIDUndefined);
-
- return skip;
-}
-
-bool ObjectLifetimes::PreCallValidateGetPhysicalDeviceExternalFenceProperties(
- VkPhysicalDevice physicalDevice,
- const VkPhysicalDeviceExternalFenceInfo* pExternalFenceInfo,
- VkExternalFenceProperties* pExternalFenceProperties) {
- bool skip = false;
- skip |= ValidateObject(physicalDevice, physicalDevice, kVulkanObjectTypePhysicalDevice, false, "VUID-vkGetPhysicalDeviceExternalFenceProperties-physicalDevice-parameter", kVUIDUndefined);
-
- return skip;
-}
-
-bool ObjectLifetimes::PreCallValidateGetPhysicalDeviceExternalSemaphoreProperties(
- VkPhysicalDevice physicalDevice,
- const VkPhysicalDeviceExternalSemaphoreInfo* pExternalSemaphoreInfo,
- VkExternalSemaphoreProperties* pExternalSemaphoreProperties) {
- bool skip = false;
- skip |= ValidateObject(physicalDevice, physicalDevice, kVulkanObjectTypePhysicalDevice, false, "VUID-vkGetPhysicalDeviceExternalSemaphoreProperties-physicalDevice-parameter", kVUIDUndefined);
-
- return skip;
-}
-
-bool ObjectLifetimes::PreCallValidateDestroySurfaceKHR(
- VkInstance instance,
- VkSurfaceKHR surface,
- const VkAllocationCallbacks* pAllocator) {
- bool skip = false;
- skip |= ValidateObject(instance, instance, kVulkanObjectTypeInstance, false, "VUID-vkDestroySurfaceKHR-instance-parameter", kVUIDUndefined);
- skip |= ValidateObject(instance, surface, kVulkanObjectTypeSurfaceKHR, true, "VUID-vkDestroySurfaceKHR-surface-parameter", "VUID-vkDestroySurfaceKHR-surface-parent");
- skip |= ValidateDestroyObject(instance, surface, kVulkanObjectTypeSurfaceKHR, pAllocator, "VUID-vkDestroySurfaceKHR-surface-01267", "VUID-vkDestroySurfaceKHR-surface-01268");
-
- return skip;
-}
-
-void ObjectLifetimes::PreCallRecordDestroySurfaceKHR(
- VkInstance instance,
- VkSurfaceKHR surface,
- const VkAllocationCallbacks* pAllocator) {
- RecordDestroyObject(instance, surface, kVulkanObjectTypeSurfaceKHR);
-
-}
-
-bool ObjectLifetimes::PreCallValidateGetPhysicalDeviceSurfaceSupportKHR(
- VkPhysicalDevice physicalDevice,
- uint32_t queueFamilyIndex,
- VkSurfaceKHR surface,
- VkBool32* pSupported) {
- bool skip = false;
- skip |= ValidateObject(physicalDevice, physicalDevice, kVulkanObjectTypePhysicalDevice, false, "VUID-vkGetPhysicalDeviceSurfaceSupportKHR-physicalDevice-parameter", "VUID-vkGetPhysicalDeviceSurfaceSupportKHR-commonparent");
- skip |= ValidateObject(physicalDevice, surface, kVulkanObjectTypeSurfaceKHR, false, "VUID-vkGetPhysicalDeviceSurfaceSupportKHR-surface-parameter", "VUID-vkGetPhysicalDeviceSurfaceSupportKHR-commonparent");
-
- return skip;
-}
-
-bool ObjectLifetimes::PreCallValidateGetPhysicalDeviceSurfaceCapabilitiesKHR(
- VkPhysicalDevice physicalDevice,
- VkSurfaceKHR surface,
- VkSurfaceCapabilitiesKHR* pSurfaceCapabilities) {
- bool skip = false;
- skip |= ValidateObject(physicalDevice, physicalDevice, kVulkanObjectTypePhysicalDevice, false, "VUID-vkGetPhysicalDeviceSurfaceCapabilitiesKHR-physicalDevice-parameter", "VUID-vkGetPhysicalDeviceSurfaceCapabilitiesKHR-commonparent");
- skip |= ValidateObject(physicalDevice, surface, kVulkanObjectTypeSurfaceKHR, false, "VUID-vkGetPhysicalDeviceSurfaceCapabilitiesKHR-surface-parameter", "VUID-vkGetPhysicalDeviceSurfaceCapabilitiesKHR-commonparent");
-
- return skip;
-}
-
-bool ObjectLifetimes::PreCallValidateGetPhysicalDeviceSurfaceFormatsKHR(
- VkPhysicalDevice physicalDevice,
- VkSurfaceKHR surface,
- uint32_t* pSurfaceFormatCount,
- VkSurfaceFormatKHR* pSurfaceFormats) {
- bool skip = false;
- skip |= ValidateObject(physicalDevice, physicalDevice, kVulkanObjectTypePhysicalDevice, false, "VUID-vkGetPhysicalDeviceSurfaceFormatsKHR-physicalDevice-parameter", "VUID-vkGetPhysicalDeviceSurfaceFormatsKHR-commonparent");
- skip |= ValidateObject(physicalDevice, surface, kVulkanObjectTypeSurfaceKHR, false, "VUID-vkGetPhysicalDeviceSurfaceFormatsKHR-surface-parameter", "VUID-vkGetPhysicalDeviceSurfaceFormatsKHR-commonparent");
-
- return skip;
-}
-
-bool ObjectLifetimes::PreCallValidateGetPhysicalDeviceSurfacePresentModesKHR(
- VkPhysicalDevice physicalDevice,
- VkSurfaceKHR surface,
- uint32_t* pPresentModeCount,
- VkPresentModeKHR* pPresentModes) {
- bool skip = false;
- skip |= ValidateObject(physicalDevice, physicalDevice, kVulkanObjectTypePhysicalDevice, false, "VUID-vkGetPhysicalDeviceSurfacePresentModesKHR-physicalDevice-parameter", "VUID-vkGetPhysicalDeviceSurfacePresentModesKHR-commonparent");
- skip |= ValidateObject(physicalDevice, surface, kVulkanObjectTypeSurfaceKHR, false, "VUID-vkGetPhysicalDeviceSurfacePresentModesKHR-surface-parameter", "VUID-vkGetPhysicalDeviceSurfacePresentModesKHR-commonparent");
-
- return skip;
-}
-
-bool ObjectLifetimes::PreCallValidateCreateSwapchainKHR(
- VkDevice device,
- const VkSwapchainCreateInfoKHR* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkSwapchainKHR* pSwapchain) {
- bool skip = false;
- skip |= ValidateObject(device, device, kVulkanObjectTypeDevice, false, "VUID-vkCreateSwapchainKHR-device-parameter", kVUIDUndefined);
- if (pCreateInfo) {
- skip |= ValidateObject(device, pCreateInfo->surface, kVulkanObjectTypeSurfaceKHR, false, "VUID-VkSwapchainCreateInfoKHR-surface-parameter", "VUID-VkSwapchainCreateInfoKHR-commonparent");
- skip |= ValidateObject(device, pCreateInfo->oldSwapchain, kVulkanObjectTypeSwapchainKHR, true, "VUID-VkSwapchainCreateInfoKHR-oldSwapchain-parameter", "VUID-VkSwapchainCreateInfoKHR-oldSwapchain-parent");
- }
-
- return skip;
-}
-
-void ObjectLifetimes::PostCallRecordCreateSwapchainKHR(
- VkDevice device,
- const VkSwapchainCreateInfoKHR* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkSwapchainKHR* pSwapchain,
- VkResult result) {
- if (result != VK_SUCCESS) return;
- CreateObject(device, *pSwapchain, kVulkanObjectTypeSwapchainKHR, pAllocator);
-
-}
-
-bool ObjectLifetimes::PreCallValidateAcquireNextImageKHR(
- VkDevice device,
- VkSwapchainKHR swapchain,
- uint64_t timeout,
- VkSemaphore semaphore,
- VkFence fence,
- uint32_t* pImageIndex) {
- bool skip = false;
- skip |= ValidateObject(device, device, kVulkanObjectTypeDevice, false, "VUID-vkAcquireNextImageKHR-device-parameter", "VUID-vkAcquireNextImageKHR-commonparent");
- skip |= ValidateObject(device, swapchain, kVulkanObjectTypeSwapchainKHR, false, "VUID-vkAcquireNextImageKHR-swapchain-parameter", "VUID-vkAcquireNextImageKHR-commonparent");
- skip |= ValidateObject(device, semaphore, kVulkanObjectTypeSemaphore, true, "VUID-vkAcquireNextImageKHR-semaphore-parameter", "VUID-vkAcquireNextImageKHR-semaphore-parent");
- skip |= ValidateObject(device, fence, kVulkanObjectTypeFence, true, "VUID-vkAcquireNextImageKHR-fence-parameter", "VUID-vkAcquireNextImageKHR-fence-parent");
-
- return skip;
-}
-
-bool ObjectLifetimes::PreCallValidateQueuePresentKHR(
- VkQueue queue,
- const VkPresentInfoKHR* pPresentInfo) {
- bool skip = false;
- skip |= ValidateObject(queue, queue, kVulkanObjectTypeQueue, false, "VUID-vkQueuePresentKHR-queue-parameter", kVUIDUndefined);
- if (pPresentInfo) {
- if (pPresentInfo->pWaitSemaphores) {
- for (uint32_t index1 = 0; index1 < pPresentInfo->waitSemaphoreCount; ++index1) {
- skip |= ValidateObject(queue, pPresentInfo->pWaitSemaphores[index1], kVulkanObjectTypeSemaphore, false, "VUID-VkPresentInfoKHR-pWaitSemaphores-parameter", "VUID-VkPresentInfoKHR-commonparent");
- }
- }
- if (pPresentInfo->pSwapchains) {
- for (uint32_t index1 = 0; index1 < pPresentInfo->swapchainCount; ++index1) {
- skip |= ValidateObject(queue, pPresentInfo->pSwapchains[index1], kVulkanObjectTypeSwapchainKHR, false, "VUID-VkPresentInfoKHR-pSwapchains-parameter", "VUID-VkPresentInfoKHR-commonparent");
- }
- }
- }
-
- return skip;
-}
-
-bool ObjectLifetimes::PreCallValidateGetDeviceGroupPresentCapabilitiesKHR(
- VkDevice device,
- VkDeviceGroupPresentCapabilitiesKHR* pDeviceGroupPresentCapabilities) {
- bool skip = false;
- skip |= ValidateObject(device, device, kVulkanObjectTypeDevice, false, "VUID-vkGetDeviceGroupPresentCapabilitiesKHR-device-parameter", kVUIDUndefined);
-
- return skip;
-}
-
-bool ObjectLifetimes::PreCallValidateGetDeviceGroupSurfacePresentModesKHR(
- VkDevice device,
- VkSurfaceKHR surface,
- VkDeviceGroupPresentModeFlagsKHR* pModes) {
- bool skip = false;
- skip |= ValidateObject(device, device, kVulkanObjectTypeDevice, false, "VUID-vkGetDeviceGroupSurfacePresentModesKHR-device-parameter", "VUID-vkGetDeviceGroupSurfacePresentModesKHR-commonparent");
- skip |= ValidateObject(device, surface, kVulkanObjectTypeSurfaceKHR, false, "VUID-vkGetDeviceGroupSurfacePresentModesKHR-surface-parameter", "VUID-vkGetDeviceGroupSurfacePresentModesKHR-commonparent");
-
- return skip;
-}
-
-bool ObjectLifetimes::PreCallValidateGetPhysicalDevicePresentRectanglesKHR(
- VkPhysicalDevice physicalDevice,
- VkSurfaceKHR surface,
- uint32_t* pRectCount,
- VkRect2D* pRects) {
- bool skip = false;
- skip |= ValidateObject(physicalDevice, physicalDevice, kVulkanObjectTypePhysicalDevice, false, "VUID-vkGetPhysicalDevicePresentRectanglesKHR-physicalDevice-parameter", "VUID-vkGetPhysicalDevicePresentRectanglesKHR-commonparent");
- skip |= ValidateObject(physicalDevice, surface, kVulkanObjectTypeSurfaceKHR, false, "VUID-vkGetPhysicalDevicePresentRectanglesKHR-surface-parameter", "VUID-vkGetPhysicalDevicePresentRectanglesKHR-commonparent");
-
- return skip;
-}
-
-bool ObjectLifetimes::PreCallValidateAcquireNextImage2KHR(
- VkDevice device,
- const VkAcquireNextImageInfoKHR* pAcquireInfo,
- uint32_t* pImageIndex) {
- bool skip = false;
- skip |= ValidateObject(device, device, kVulkanObjectTypeDevice, false, "VUID-vkAcquireNextImage2KHR-device-parameter", kVUIDUndefined);
- if (pAcquireInfo) {
- skip |= ValidateObject(device, pAcquireInfo->swapchain, kVulkanObjectTypeSwapchainKHR, false, "VUID-VkAcquireNextImageInfoKHR-swapchain-parameter", "VUID-VkAcquireNextImageInfoKHR-commonparent");
- skip |= ValidateObject(device, pAcquireInfo->semaphore, kVulkanObjectTypeSemaphore, true, "VUID-VkAcquireNextImageInfoKHR-semaphore-parameter", "VUID-VkAcquireNextImageInfoKHR-commonparent");
- skip |= ValidateObject(device, pAcquireInfo->fence, kVulkanObjectTypeFence, true, "VUID-VkAcquireNextImageInfoKHR-fence-parameter", "VUID-VkAcquireNextImageInfoKHR-commonparent");
- }
-
- return skip;
-}
-
-bool ObjectLifetimes::PreCallValidateGetPhysicalDeviceDisplayPlanePropertiesKHR(
- VkPhysicalDevice physicalDevice,
- uint32_t* pPropertyCount,
- VkDisplayPlanePropertiesKHR* pProperties) {
- bool skip = false;
- skip |= ValidateObject(physicalDevice, physicalDevice, kVulkanObjectTypePhysicalDevice, false, "VUID-vkGetPhysicalDeviceDisplayPlanePropertiesKHR-physicalDevice-parameter", kVUIDUndefined);
-
- return skip;
-}
-
-bool ObjectLifetimes::PreCallValidateGetDisplayPlaneSupportedDisplaysKHR(
- VkPhysicalDevice physicalDevice,
- uint32_t planeIndex,
- uint32_t* pDisplayCount,
- VkDisplayKHR* pDisplays) {
- bool skip = false;
- skip |= ValidateObject(physicalDevice, physicalDevice, kVulkanObjectTypePhysicalDevice, false, "VUID-vkGetDisplayPlaneSupportedDisplaysKHR-physicalDevice-parameter", kVUIDUndefined);
-
- return skip;
-}
-
-void ObjectLifetimes::PostCallRecordGetDisplayPlaneSupportedDisplaysKHR(
- VkPhysicalDevice physicalDevice,
- uint32_t planeIndex,
- uint32_t* pDisplayCount,
- VkDisplayKHR* pDisplays,
- VkResult result) {
- if (result != VK_SUCCESS) return;
- if (pDisplays) {
- for (uint32_t index = 0; index < *pDisplayCount; index++) {
- CreateObject(physicalDevice, pDisplays[index], kVulkanObjectTypeDisplayKHR, nullptr);
- }
- }
-
-}
-
-bool ObjectLifetimes::PreCallValidateCreateDisplayModeKHR(
- VkPhysicalDevice physicalDevice,
- VkDisplayKHR display,
- const VkDisplayModeCreateInfoKHR* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkDisplayModeKHR* pMode) {
- bool skip = false;
- skip |= ValidateObject(physicalDevice, physicalDevice, kVulkanObjectTypePhysicalDevice, false, "VUID-vkCreateDisplayModeKHR-physicalDevice-parameter", kVUIDUndefined);
- skip |= ValidateObject(physicalDevice, display, kVulkanObjectTypeDisplayKHR, false, "VUID-vkCreateDisplayModeKHR-display-parameter", kVUIDUndefined);
-
- return skip;
-}
-
-void ObjectLifetimes::PostCallRecordCreateDisplayModeKHR(
- VkPhysicalDevice physicalDevice,
- VkDisplayKHR display,
- const VkDisplayModeCreateInfoKHR* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkDisplayModeKHR* pMode,
- VkResult result) {
- if (result != VK_SUCCESS) return;
- CreateObject(physicalDevice, *pMode, kVulkanObjectTypeDisplayModeKHR, pAllocator);
-
-}
-
-bool ObjectLifetimes::PreCallValidateGetDisplayPlaneCapabilitiesKHR(
- VkPhysicalDevice physicalDevice,
- VkDisplayModeKHR mode,
- uint32_t planeIndex,
- VkDisplayPlaneCapabilitiesKHR* pCapabilities) {
- bool skip = false;
- skip |= ValidateObject(physicalDevice, physicalDevice, kVulkanObjectTypePhysicalDevice, false, "VUID-vkGetDisplayPlaneCapabilitiesKHR-physicalDevice-parameter", kVUIDUndefined);
- skip |= ValidateObject(physicalDevice, mode, kVulkanObjectTypeDisplayModeKHR, false, "VUID-vkGetDisplayPlaneCapabilitiesKHR-mode-parameter", kVUIDUndefined);
-
- return skip;
-}
-
-bool ObjectLifetimes::PreCallValidateCreateDisplayPlaneSurfaceKHR(
- VkInstance instance,
- const VkDisplaySurfaceCreateInfoKHR* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkSurfaceKHR* pSurface) {
- bool skip = false;
- skip |= ValidateObject(instance, instance, kVulkanObjectTypeInstance, false, "VUID-vkCreateDisplayPlaneSurfaceKHR-instance-parameter", kVUIDUndefined);
- if (pCreateInfo) {
- skip |= ValidateObject(instance, pCreateInfo->displayMode, kVulkanObjectTypeDisplayModeKHR, false, "VUID-VkDisplaySurfaceCreateInfoKHR-displayMode-parameter", kVUIDUndefined);
- }
-
- return skip;
-}
-
-void ObjectLifetimes::PostCallRecordCreateDisplayPlaneSurfaceKHR(
- VkInstance instance,
- const VkDisplaySurfaceCreateInfoKHR* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkSurfaceKHR* pSurface,
- VkResult result) {
- if (result != VK_SUCCESS) return;
- CreateObject(instance, *pSurface, kVulkanObjectTypeSurfaceKHR, pAllocator);
-
-}
-
-bool ObjectLifetimes::PreCallValidateCreateSharedSwapchainsKHR(
- VkDevice device,
- uint32_t swapchainCount,
- const VkSwapchainCreateInfoKHR* pCreateInfos,
- const VkAllocationCallbacks* pAllocator,
- VkSwapchainKHR* pSwapchains) {
- bool skip = false;
- skip |= ValidateObject(device, device, kVulkanObjectTypeDevice, false, "VUID-vkCreateSharedSwapchainsKHR-device-parameter", kVUIDUndefined);
- if (pCreateInfos) {
- for (uint32_t index0 = 0; index0 < swapchainCount; ++index0) {
- skip |= ValidateObject(device, pCreateInfos[index0].surface, kVulkanObjectTypeSurfaceKHR, false, "VUID-VkSwapchainCreateInfoKHR-surface-parameter", "VUID-VkSwapchainCreateInfoKHR-commonparent");
- skip |= ValidateObject(device, pCreateInfos[index0].oldSwapchain, kVulkanObjectTypeSwapchainKHR, true, "VUID-VkSwapchainCreateInfoKHR-oldSwapchain-parameter", "VUID-VkSwapchainCreateInfoKHR-oldSwapchain-parent");
- }
- }
-
- return skip;
-}
-
-void ObjectLifetimes::PostCallRecordCreateSharedSwapchainsKHR(
- VkDevice device,
- uint32_t swapchainCount,
- const VkSwapchainCreateInfoKHR* pCreateInfos,
- const VkAllocationCallbacks* pAllocator,
- VkSwapchainKHR* pSwapchains,
- VkResult result) {
- if (result != VK_SUCCESS) return;
- if (pSwapchains) {
- for (uint32_t index = 0; index < swapchainCount; index++) {
- CreateObject(device, pSwapchains[index], kVulkanObjectTypeSwapchainKHR, pAllocator);
- }
- }
-
-}
-
-#ifdef VK_USE_PLATFORM_XLIB_KHR
-
-bool ObjectLifetimes::PreCallValidateCreateXlibSurfaceKHR(
- VkInstance instance,
- const VkXlibSurfaceCreateInfoKHR* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkSurfaceKHR* pSurface) {
- bool skip = false;
- skip |= ValidateObject(instance, instance, kVulkanObjectTypeInstance, false, "VUID-vkCreateXlibSurfaceKHR-instance-parameter", kVUIDUndefined);
-
- return skip;
-}
-
-void ObjectLifetimes::PostCallRecordCreateXlibSurfaceKHR(
- VkInstance instance,
- const VkXlibSurfaceCreateInfoKHR* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkSurfaceKHR* pSurface,
- VkResult result) {
- if (result != VK_SUCCESS) return;
- CreateObject(instance, *pSurface, kVulkanObjectTypeSurfaceKHR, pAllocator);
-
-}
-#endif // VK_USE_PLATFORM_XLIB_KHR
-
-#ifdef VK_USE_PLATFORM_XLIB_KHR
-
-bool ObjectLifetimes::PreCallValidateGetPhysicalDeviceXlibPresentationSupportKHR(
- VkPhysicalDevice physicalDevice,
- uint32_t queueFamilyIndex,
- Display* dpy,
- VisualID visualID) {
- bool skip = false;
- skip |= ValidateObject(physicalDevice, physicalDevice, kVulkanObjectTypePhysicalDevice, false, "VUID-vkGetPhysicalDeviceXlibPresentationSupportKHR-physicalDevice-parameter", kVUIDUndefined);
-
- return skip;
-}
-#endif // VK_USE_PLATFORM_XLIB_KHR
-
-#ifdef VK_USE_PLATFORM_XCB_KHR
-
-bool ObjectLifetimes::PreCallValidateCreateXcbSurfaceKHR(
- VkInstance instance,
- const VkXcbSurfaceCreateInfoKHR* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkSurfaceKHR* pSurface) {
- bool skip = false;
- skip |= ValidateObject(instance, instance, kVulkanObjectTypeInstance, false, "VUID-vkCreateXcbSurfaceKHR-instance-parameter", kVUIDUndefined);
-
- return skip;
-}
-
-void ObjectLifetimes::PostCallRecordCreateXcbSurfaceKHR(
- VkInstance instance,
- const VkXcbSurfaceCreateInfoKHR* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkSurfaceKHR* pSurface,
- VkResult result) {
- if (result != VK_SUCCESS) return;
- CreateObject(instance, *pSurface, kVulkanObjectTypeSurfaceKHR, pAllocator);
-
-}
-#endif // VK_USE_PLATFORM_XCB_KHR
-
-#ifdef VK_USE_PLATFORM_XCB_KHR
-
-bool ObjectLifetimes::PreCallValidateGetPhysicalDeviceXcbPresentationSupportKHR(
- VkPhysicalDevice physicalDevice,
- uint32_t queueFamilyIndex,
- xcb_connection_t* connection,
- xcb_visualid_t visual_id) {
- bool skip = false;
- skip |= ValidateObject(physicalDevice, physicalDevice, kVulkanObjectTypePhysicalDevice, false, "VUID-vkGetPhysicalDeviceXcbPresentationSupportKHR-physicalDevice-parameter", kVUIDUndefined);
-
- return skip;
-}
-#endif // VK_USE_PLATFORM_XCB_KHR
-
-#ifdef VK_USE_PLATFORM_WAYLAND_KHR
-
-bool ObjectLifetimes::PreCallValidateCreateWaylandSurfaceKHR(
- VkInstance instance,
- const VkWaylandSurfaceCreateInfoKHR* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkSurfaceKHR* pSurface) {
- bool skip = false;
- skip |= ValidateObject(instance, instance, kVulkanObjectTypeInstance, false, "VUID-vkCreateWaylandSurfaceKHR-instance-parameter", kVUIDUndefined);
-
- return skip;
-}
-
-void ObjectLifetimes::PostCallRecordCreateWaylandSurfaceKHR(
- VkInstance instance,
- const VkWaylandSurfaceCreateInfoKHR* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkSurfaceKHR* pSurface,
- VkResult result) {
- if (result != VK_SUCCESS) return;
- CreateObject(instance, *pSurface, kVulkanObjectTypeSurfaceKHR, pAllocator);
-
-}
-#endif // VK_USE_PLATFORM_WAYLAND_KHR
-
-#ifdef VK_USE_PLATFORM_WAYLAND_KHR
-
-bool ObjectLifetimes::PreCallValidateGetPhysicalDeviceWaylandPresentationSupportKHR(
- VkPhysicalDevice physicalDevice,
- uint32_t queueFamilyIndex,
- struct wl_display* display) {
- bool skip = false;
- skip |= ValidateObject(physicalDevice, physicalDevice, kVulkanObjectTypePhysicalDevice, false, "VUID-vkGetPhysicalDeviceWaylandPresentationSupportKHR-physicalDevice-parameter", kVUIDUndefined);
-
- return skip;
-}
-#endif // VK_USE_PLATFORM_WAYLAND_KHR
-
-#ifdef VK_USE_PLATFORM_ANDROID_KHR
-
-bool ObjectLifetimes::PreCallValidateCreateAndroidSurfaceKHR(
- VkInstance instance,
- const VkAndroidSurfaceCreateInfoKHR* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkSurfaceKHR* pSurface) {
- bool skip = false;
- skip |= ValidateObject(instance, instance, kVulkanObjectTypeInstance, false, "VUID-vkCreateAndroidSurfaceKHR-instance-parameter", kVUIDUndefined);
-
- return skip;
-}
-
-void ObjectLifetimes::PostCallRecordCreateAndroidSurfaceKHR(
- VkInstance instance,
- const VkAndroidSurfaceCreateInfoKHR* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkSurfaceKHR* pSurface,
- VkResult result) {
- if (result != VK_SUCCESS) return;
- CreateObject(instance, *pSurface, kVulkanObjectTypeSurfaceKHR, pAllocator);
-
-}
-#endif // VK_USE_PLATFORM_ANDROID_KHR
-
-#ifdef VK_USE_PLATFORM_WIN32_KHR
-
-bool ObjectLifetimes::PreCallValidateCreateWin32SurfaceKHR(
- VkInstance instance,
- const VkWin32SurfaceCreateInfoKHR* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkSurfaceKHR* pSurface) {
- bool skip = false;
- skip |= ValidateObject(instance, instance, kVulkanObjectTypeInstance, false, "VUID-vkCreateWin32SurfaceKHR-instance-parameter", kVUIDUndefined);
-
- return skip;
-}
-
-void ObjectLifetimes::PostCallRecordCreateWin32SurfaceKHR(
- VkInstance instance,
- const VkWin32SurfaceCreateInfoKHR* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkSurfaceKHR* pSurface,
- VkResult result) {
- if (result != VK_SUCCESS) return;
- CreateObject(instance, *pSurface, kVulkanObjectTypeSurfaceKHR, pAllocator);
-
-}
-#endif // VK_USE_PLATFORM_WIN32_KHR
-
-#ifdef VK_USE_PLATFORM_WIN32_KHR
-
-bool ObjectLifetimes::PreCallValidateGetPhysicalDeviceWin32PresentationSupportKHR(
- VkPhysicalDevice physicalDevice,
- uint32_t queueFamilyIndex) {
- bool skip = false;
- skip |= ValidateObject(physicalDevice, physicalDevice, kVulkanObjectTypePhysicalDevice, false, "VUID-vkGetPhysicalDeviceWin32PresentationSupportKHR-physicalDevice-parameter", kVUIDUndefined);
-
- return skip;
-}
-#endif // VK_USE_PLATFORM_WIN32_KHR
-
-bool ObjectLifetimes::PreCallValidateGetPhysicalDeviceFeatures2KHR(
- VkPhysicalDevice physicalDevice,
- VkPhysicalDeviceFeatures2* pFeatures) {
- bool skip = false;
- skip |= ValidateObject(physicalDevice, physicalDevice, kVulkanObjectTypePhysicalDevice, false, "VUID-vkGetPhysicalDeviceFeatures2-physicalDevice-parameter", kVUIDUndefined);
-
- return skip;
-}
-
-bool ObjectLifetimes::PreCallValidateGetPhysicalDeviceProperties2KHR(
- VkPhysicalDevice physicalDevice,
- VkPhysicalDeviceProperties2* pProperties) {
- bool skip = false;
- skip |= ValidateObject(physicalDevice, physicalDevice, kVulkanObjectTypePhysicalDevice, false, "VUID-vkGetPhysicalDeviceProperties2-physicalDevice-parameter", kVUIDUndefined);
-
- return skip;
-}
-
-bool ObjectLifetimes::PreCallValidateGetPhysicalDeviceFormatProperties2KHR(
- VkPhysicalDevice physicalDevice,
- VkFormat format,
- VkFormatProperties2* pFormatProperties) {
- bool skip = false;
- skip |= ValidateObject(physicalDevice, physicalDevice, kVulkanObjectTypePhysicalDevice, false, "VUID-vkGetPhysicalDeviceFormatProperties2-physicalDevice-parameter", kVUIDUndefined);
-
- return skip;
-}
-
-bool ObjectLifetimes::PreCallValidateGetPhysicalDeviceImageFormatProperties2KHR(
- VkPhysicalDevice physicalDevice,
- const VkPhysicalDeviceImageFormatInfo2* pImageFormatInfo,
- VkImageFormatProperties2* pImageFormatProperties) {
- bool skip = false;
- skip |= ValidateObject(physicalDevice, physicalDevice, kVulkanObjectTypePhysicalDevice, false, "VUID-vkGetPhysicalDeviceImageFormatProperties2-physicalDevice-parameter", kVUIDUndefined);
-
- return skip;
-}
-
-bool ObjectLifetimes::PreCallValidateGetPhysicalDeviceMemoryProperties2KHR(
- VkPhysicalDevice physicalDevice,
- VkPhysicalDeviceMemoryProperties2* pMemoryProperties) {
- bool skip = false;
- skip |= ValidateObject(physicalDevice, physicalDevice, kVulkanObjectTypePhysicalDevice, false, "VUID-vkGetPhysicalDeviceMemoryProperties2-physicalDevice-parameter", kVUIDUndefined);
-
- return skip;
-}
-
-bool ObjectLifetimes::PreCallValidateGetPhysicalDeviceSparseImageFormatProperties2KHR(
- VkPhysicalDevice physicalDevice,
- const VkPhysicalDeviceSparseImageFormatInfo2* pFormatInfo,
- uint32_t* pPropertyCount,
- VkSparseImageFormatProperties2* pProperties) {
- bool skip = false;
- skip |= ValidateObject(physicalDevice, physicalDevice, kVulkanObjectTypePhysicalDevice, false, "VUID-vkGetPhysicalDeviceSparseImageFormatProperties2-physicalDevice-parameter", kVUIDUndefined);
-
- return skip;
-}
-
-bool ObjectLifetimes::PreCallValidateGetDeviceGroupPeerMemoryFeaturesKHR(
- VkDevice device,
- uint32_t heapIndex,
- uint32_t localDeviceIndex,
- uint32_t remoteDeviceIndex,
- VkPeerMemoryFeatureFlags* pPeerMemoryFeatures) {
- bool skip = false;
- skip |= ValidateObject(device, device, kVulkanObjectTypeDevice, false, "VUID-vkGetDeviceGroupPeerMemoryFeatures-device-parameter", kVUIDUndefined);
-
- return skip;
-}
-
-bool ObjectLifetimes::PreCallValidateCmdSetDeviceMaskKHR(
- VkCommandBuffer commandBuffer,
- uint32_t deviceMask) {
- bool skip = false;
- skip |= ValidateObject(commandBuffer, commandBuffer, kVulkanObjectTypeCommandBuffer, false, "VUID-vkCmdSetDeviceMask-commandBuffer-parameter", kVUIDUndefined);
-
- return skip;
-}
-
-bool ObjectLifetimes::PreCallValidateCmdDispatchBaseKHR(
- VkCommandBuffer commandBuffer,
- uint32_t baseGroupX,
- uint32_t baseGroupY,
- uint32_t baseGroupZ,
- uint32_t groupCountX,
- uint32_t groupCountY,
- uint32_t groupCountZ) {
- bool skip = false;
- skip |= ValidateObject(commandBuffer, commandBuffer, kVulkanObjectTypeCommandBuffer, false, "VUID-vkCmdDispatchBase-commandBuffer-parameter", kVUIDUndefined);
-
- return skip;
-}
-
-bool ObjectLifetimes::PreCallValidateTrimCommandPoolKHR(
- VkDevice device,
- VkCommandPool commandPool,
- VkCommandPoolTrimFlags flags) {
- bool skip = false;
- skip |= ValidateObject(device, device, kVulkanObjectTypeDevice, false, "VUID-vkTrimCommandPool-device-parameter", kVUIDUndefined);
- skip |= ValidateObject(device, commandPool, kVulkanObjectTypeCommandPool, false, "VUID-vkTrimCommandPool-commandPool-parameter", "VUID-vkTrimCommandPool-commandPool-parent");
-
- return skip;
-}
-
-bool ObjectLifetimes::PreCallValidateEnumeratePhysicalDeviceGroupsKHR(
- VkInstance instance,
- uint32_t* pPhysicalDeviceGroupCount,
- VkPhysicalDeviceGroupProperties* pPhysicalDeviceGroupProperties) {
- bool skip = false;
- skip |= ValidateObject(instance, instance, kVulkanObjectTypeInstance, false, "VUID-vkEnumeratePhysicalDeviceGroups-instance-parameter", kVUIDUndefined);
-
- return skip;
-}
-
-bool ObjectLifetimes::PreCallValidateGetPhysicalDeviceExternalBufferPropertiesKHR(
- VkPhysicalDevice physicalDevice,
- const VkPhysicalDeviceExternalBufferInfo* pExternalBufferInfo,
- VkExternalBufferProperties* pExternalBufferProperties) {
- bool skip = false;
- skip |= ValidateObject(physicalDevice, physicalDevice, kVulkanObjectTypePhysicalDevice, false, "VUID-vkGetPhysicalDeviceExternalBufferProperties-physicalDevice-parameter", kVUIDUndefined);
-
- return skip;
-}
-
-#ifdef VK_USE_PLATFORM_WIN32_KHR
-
-bool ObjectLifetimes::PreCallValidateGetMemoryWin32HandleKHR(
- VkDevice device,
- const VkMemoryGetWin32HandleInfoKHR* pGetWin32HandleInfo,
- HANDLE* pHandle) {
- bool skip = false;
- skip |= ValidateObject(device, device, kVulkanObjectTypeDevice, false, "VUID-vkGetMemoryWin32HandleKHR-device-parameter", kVUIDUndefined);
- if (pGetWin32HandleInfo) {
- skip |= ValidateObject(device, pGetWin32HandleInfo->memory, kVulkanObjectTypeDeviceMemory, false, "VUID-VkMemoryGetWin32HandleInfoKHR-memory-parameter", kVUIDUndefined);
- }
-
- return skip;
-}
-#endif // VK_USE_PLATFORM_WIN32_KHR
-
-#ifdef VK_USE_PLATFORM_WIN32_KHR
-
-bool ObjectLifetimes::PreCallValidateGetMemoryWin32HandlePropertiesKHR(
- VkDevice device,
- VkExternalMemoryHandleTypeFlagBits handleType,
- HANDLE handle,
- VkMemoryWin32HandlePropertiesKHR* pMemoryWin32HandleProperties) {
- bool skip = false;
- skip |= ValidateObject(device, device, kVulkanObjectTypeDevice, false, "VUID-vkGetMemoryWin32HandlePropertiesKHR-device-parameter", kVUIDUndefined);
-
- return skip;
-}
-#endif // VK_USE_PLATFORM_WIN32_KHR
-
-bool ObjectLifetimes::PreCallValidateGetMemoryFdKHR(
- VkDevice device,
- const VkMemoryGetFdInfoKHR* pGetFdInfo,
- int* pFd) {
- bool skip = false;
- skip |= ValidateObject(device, device, kVulkanObjectTypeDevice, false, "VUID-vkGetMemoryFdKHR-device-parameter", kVUIDUndefined);
- if (pGetFdInfo) {
- skip |= ValidateObject(device, pGetFdInfo->memory, kVulkanObjectTypeDeviceMemory, false, "VUID-VkMemoryGetFdInfoKHR-memory-parameter", kVUIDUndefined);
- }
-
- return skip;
-}
-
-bool ObjectLifetimes::PreCallValidateGetMemoryFdPropertiesKHR(
- VkDevice device,
- VkExternalMemoryHandleTypeFlagBits handleType,
- int fd,
- VkMemoryFdPropertiesKHR* pMemoryFdProperties) {
- bool skip = false;
- skip |= ValidateObject(device, device, kVulkanObjectTypeDevice, false, "VUID-vkGetMemoryFdPropertiesKHR-device-parameter", kVUIDUndefined);
-
- return skip;
-}
-
-bool ObjectLifetimes::PreCallValidateGetPhysicalDeviceExternalSemaphorePropertiesKHR(
- VkPhysicalDevice physicalDevice,
- const VkPhysicalDeviceExternalSemaphoreInfo* pExternalSemaphoreInfo,
- VkExternalSemaphoreProperties* pExternalSemaphoreProperties) {
- bool skip = false;
- skip |= ValidateObject(physicalDevice, physicalDevice, kVulkanObjectTypePhysicalDevice, false, "VUID-vkGetPhysicalDeviceExternalSemaphoreProperties-physicalDevice-parameter", kVUIDUndefined);
-
- return skip;
-}
-
-#ifdef VK_USE_PLATFORM_WIN32_KHR
-
-bool ObjectLifetimes::PreCallValidateImportSemaphoreWin32HandleKHR(
- VkDevice device,
- const VkImportSemaphoreWin32HandleInfoKHR* pImportSemaphoreWin32HandleInfo) {
- bool skip = false;
- skip |= ValidateObject(device, device, kVulkanObjectTypeDevice, false, "VUID-vkImportSemaphoreWin32HandleKHR-device-parameter", kVUIDUndefined);
- if (pImportSemaphoreWin32HandleInfo) {
- skip |= ValidateObject(device, pImportSemaphoreWin32HandleInfo->semaphore, kVulkanObjectTypeSemaphore, false, "VUID-VkImportSemaphoreWin32HandleInfoKHR-semaphore-parameter", kVUIDUndefined);
- }
-
- return skip;
-}
-#endif // VK_USE_PLATFORM_WIN32_KHR
-
-#ifdef VK_USE_PLATFORM_WIN32_KHR
-
-bool ObjectLifetimes::PreCallValidateGetSemaphoreWin32HandleKHR(
- VkDevice device,
- const VkSemaphoreGetWin32HandleInfoKHR* pGetWin32HandleInfo,
- HANDLE* pHandle) {
- bool skip = false;
- skip |= ValidateObject(device, device, kVulkanObjectTypeDevice, false, "VUID-vkGetSemaphoreWin32HandleKHR-device-parameter", kVUIDUndefined);
- if (pGetWin32HandleInfo) {
- skip |= ValidateObject(device, pGetWin32HandleInfo->semaphore, kVulkanObjectTypeSemaphore, false, "VUID-VkSemaphoreGetWin32HandleInfoKHR-semaphore-parameter", kVUIDUndefined);
- }
-
- return skip;
-}
-#endif // VK_USE_PLATFORM_WIN32_KHR
-
-bool ObjectLifetimes::PreCallValidateImportSemaphoreFdKHR(
- VkDevice device,
- const VkImportSemaphoreFdInfoKHR* pImportSemaphoreFdInfo) {
- bool skip = false;
- skip |= ValidateObject(device, device, kVulkanObjectTypeDevice, false, "VUID-vkImportSemaphoreFdKHR-device-parameter", kVUIDUndefined);
- if (pImportSemaphoreFdInfo) {
- skip |= ValidateObject(device, pImportSemaphoreFdInfo->semaphore, kVulkanObjectTypeSemaphore, false, "VUID-VkImportSemaphoreFdInfoKHR-semaphore-parameter", kVUIDUndefined);
- }
-
- return skip;
-}
-
-bool ObjectLifetimes::PreCallValidateGetSemaphoreFdKHR(
- VkDevice device,
- const VkSemaphoreGetFdInfoKHR* pGetFdInfo,
- int* pFd) {
- bool skip = false;
- skip |= ValidateObject(device, device, kVulkanObjectTypeDevice, false, "VUID-vkGetSemaphoreFdKHR-device-parameter", kVUIDUndefined);
- if (pGetFdInfo) {
- skip |= ValidateObject(device, pGetFdInfo->semaphore, kVulkanObjectTypeSemaphore, false, "VUID-VkSemaphoreGetFdInfoKHR-semaphore-parameter", kVUIDUndefined);
- }
-
- return skip;
-}
-
-bool ObjectLifetimes::PreCallValidateCmdPushDescriptorSetWithTemplateKHR(
- VkCommandBuffer commandBuffer,
- VkDescriptorUpdateTemplate descriptorUpdateTemplate,
- VkPipelineLayout layout,
- uint32_t set,
- const void* pData) {
- bool skip = false;
- skip |= ValidateObject(commandBuffer, commandBuffer, kVulkanObjectTypeCommandBuffer, false, "VUID-vkCmdPushDescriptorSetWithTemplateKHR-commandBuffer-parameter", "VUID-vkCmdPushDescriptorSetWithTemplateKHR-commonparent");
- skip |= ValidateObject(commandBuffer, descriptorUpdateTemplate, kVulkanObjectTypeDescriptorUpdateTemplate, false, "VUID-vkCmdPushDescriptorSetWithTemplateKHR-descriptorUpdateTemplate-parameter", "VUID-vkCmdPushDescriptorSetWithTemplateKHR-commonparent");
- skip |= ValidateObject(commandBuffer, layout, kVulkanObjectTypePipelineLayout, false, "VUID-vkCmdPushDescriptorSetWithTemplateKHR-layout-parameter", "VUID-vkCmdPushDescriptorSetWithTemplateKHR-commonparent");
-
- return skip;
-}
-
-bool ObjectLifetimes::PreCallValidateCreateDescriptorUpdateTemplateKHR(
- VkDevice device,
- const VkDescriptorUpdateTemplateCreateInfo* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkDescriptorUpdateTemplate* pDescriptorUpdateTemplate) {
- bool skip = false;
- skip |= ValidateObject(device, device, kVulkanObjectTypeDevice, false, "VUID-vkCreateDescriptorUpdateTemplate-device-parameter", kVUIDUndefined);
- if (pCreateInfo) {
- skip |= ValidateObject(device, pCreateInfo->descriptorSetLayout, kVulkanObjectTypeDescriptorSetLayout, true, "VUID-VkDescriptorUpdateTemplateCreateInfo-descriptorSetLayout-parameter", "VUID-VkDescriptorUpdateTemplateCreateInfo-commonparent");
- skip |= ValidateObject(device, pCreateInfo->pipelineLayout, kVulkanObjectTypePipelineLayout, true, kVUIDUndefined, "VUID-VkDescriptorUpdateTemplateCreateInfo-commonparent");
- }
-
- return skip;
-}
-
-void ObjectLifetimes::PostCallRecordCreateDescriptorUpdateTemplateKHR(
- VkDevice device,
- const VkDescriptorUpdateTemplateCreateInfo* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkDescriptorUpdateTemplate* pDescriptorUpdateTemplate,
- VkResult result) {
- if (result != VK_SUCCESS) return;
- CreateObject(device, *pDescriptorUpdateTemplate, kVulkanObjectTypeDescriptorUpdateTemplate, pAllocator);
-
-}
-
-bool ObjectLifetimes::PreCallValidateDestroyDescriptorUpdateTemplateKHR(
- VkDevice device,
- VkDescriptorUpdateTemplate descriptorUpdateTemplate,
- const VkAllocationCallbacks* pAllocator) {
- bool skip = false;
- skip |= ValidateObject(device, device, kVulkanObjectTypeDevice, false, "VUID-vkDestroyDescriptorUpdateTemplate-device-parameter", kVUIDUndefined);
- skip |= ValidateObject(device, descriptorUpdateTemplate, kVulkanObjectTypeDescriptorUpdateTemplate, true, "VUID-vkDestroyDescriptorUpdateTemplate-descriptorUpdateTemplate-parameter", "VUID-vkDestroyDescriptorUpdateTemplate-descriptorUpdateTemplate-parent");
- skip |= ValidateDestroyObject(device, descriptorUpdateTemplate, kVulkanObjectTypeDescriptorUpdateTemplate, pAllocator, "VUID-vkDestroyDescriptorUpdateTemplate-descriptorSetLayout-00356", "VUID-vkDestroyDescriptorUpdateTemplate-descriptorSetLayout-00357");
-
- return skip;
-}
-
-void ObjectLifetimes::PreCallRecordDestroyDescriptorUpdateTemplateKHR(
- VkDevice device,
- VkDescriptorUpdateTemplate descriptorUpdateTemplate,
- const VkAllocationCallbacks* pAllocator) {
- RecordDestroyObject(device, descriptorUpdateTemplate, kVulkanObjectTypeDescriptorUpdateTemplate);
-
-}
-
-bool ObjectLifetimes::PreCallValidateUpdateDescriptorSetWithTemplateKHR(
- VkDevice device,
- VkDescriptorSet descriptorSet,
- VkDescriptorUpdateTemplate descriptorUpdateTemplate,
- const void* pData) {
- bool skip = false;
- skip |= ValidateObject(device, device, kVulkanObjectTypeDevice, false, "VUID-vkUpdateDescriptorSetWithTemplate-device-parameter", kVUIDUndefined);
- skip |= ValidateObject(device, descriptorSet, kVulkanObjectTypeDescriptorSet, false, "VUID-vkUpdateDescriptorSetWithTemplate-descriptorSet-parameter", kVUIDUndefined);
- skip |= ValidateObject(device, descriptorUpdateTemplate, kVulkanObjectTypeDescriptorUpdateTemplate, false, "VUID-vkUpdateDescriptorSetWithTemplate-descriptorUpdateTemplate-parameter", "VUID-vkUpdateDescriptorSetWithTemplate-descriptorUpdateTemplate-parent");
-
- return skip;
-}
-
-bool ObjectLifetimes::PreCallValidateCreateRenderPass2KHR(
- VkDevice device,
- const VkRenderPassCreateInfo2KHR* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkRenderPass* pRenderPass) {
- bool skip = false;
- skip |= ValidateObject(device, device, kVulkanObjectTypeDevice, false, "VUID-vkCreateRenderPass2KHR-device-parameter", kVUIDUndefined);
-
- return skip;
-}
-
-void ObjectLifetimes::PostCallRecordCreateRenderPass2KHR(
- VkDevice device,
- const VkRenderPassCreateInfo2KHR* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkRenderPass* pRenderPass,
- VkResult result) {
- if (result != VK_SUCCESS) return;
- CreateObject(device, *pRenderPass, kVulkanObjectTypeRenderPass, pAllocator);
-
-}
-
-bool ObjectLifetimes::PreCallValidateCmdBeginRenderPass2KHR(
- VkCommandBuffer commandBuffer,
- const VkRenderPassBeginInfo* pRenderPassBegin,
- const VkSubpassBeginInfoKHR* pSubpassBeginInfo) {
- bool skip = false;
- skip |= ValidateObject(commandBuffer, commandBuffer, kVulkanObjectTypeCommandBuffer, false, "VUID-vkCmdBeginRenderPass2KHR-commandBuffer-parameter", kVUIDUndefined);
- if (pRenderPassBegin) {
- skip |= ValidateObject(commandBuffer, pRenderPassBegin->renderPass, kVulkanObjectTypeRenderPass, false, "VUID-VkRenderPassBeginInfo-renderPass-parameter", "VUID-VkRenderPassBeginInfo-commonparent");
- skip |= ValidateObject(commandBuffer, pRenderPassBegin->framebuffer, kVulkanObjectTypeFramebuffer, false, "VUID-VkRenderPassBeginInfo-framebuffer-parameter", "VUID-VkRenderPassBeginInfo-commonparent");
- }
-
- return skip;
-}
-
-bool ObjectLifetimes::PreCallValidateCmdNextSubpass2KHR(
- VkCommandBuffer commandBuffer,
- const VkSubpassBeginInfoKHR* pSubpassBeginInfo,
- const VkSubpassEndInfoKHR* pSubpassEndInfo) {
- bool skip = false;
- skip |= ValidateObject(commandBuffer, commandBuffer, kVulkanObjectTypeCommandBuffer, false, "VUID-vkCmdNextSubpass2KHR-commandBuffer-parameter", kVUIDUndefined);
-
- return skip;
-}
-
-bool ObjectLifetimes::PreCallValidateCmdEndRenderPass2KHR(
- VkCommandBuffer commandBuffer,
- const VkSubpassEndInfoKHR* pSubpassEndInfo) {
- bool skip = false;
- skip |= ValidateObject(commandBuffer, commandBuffer, kVulkanObjectTypeCommandBuffer, false, "VUID-vkCmdEndRenderPass2KHR-commandBuffer-parameter", kVUIDUndefined);
-
- return skip;
-}
-
-bool ObjectLifetimes::PreCallValidateGetSwapchainStatusKHR(
- VkDevice device,
- VkSwapchainKHR swapchain) {
- bool skip = false;
- skip |= ValidateObject(device, device, kVulkanObjectTypeDevice, false, "VUID-vkGetSwapchainStatusKHR-device-parameter", "VUID-vkGetSwapchainStatusKHR-commonparent");
- skip |= ValidateObject(device, swapchain, kVulkanObjectTypeSwapchainKHR, false, "VUID-vkGetSwapchainStatusKHR-swapchain-parameter", "VUID-vkGetSwapchainStatusKHR-commonparent");
-
- return skip;
-}
-
-bool ObjectLifetimes::PreCallValidateGetPhysicalDeviceExternalFencePropertiesKHR(
- VkPhysicalDevice physicalDevice,
- const VkPhysicalDeviceExternalFenceInfo* pExternalFenceInfo,
- VkExternalFenceProperties* pExternalFenceProperties) {
- bool skip = false;
- skip |= ValidateObject(physicalDevice, physicalDevice, kVulkanObjectTypePhysicalDevice, false, "VUID-vkGetPhysicalDeviceExternalFenceProperties-physicalDevice-parameter", kVUIDUndefined);
-
- return skip;
-}
-
-#ifdef VK_USE_PLATFORM_WIN32_KHR
-
-bool ObjectLifetimes::PreCallValidateImportFenceWin32HandleKHR(
- VkDevice device,
- const VkImportFenceWin32HandleInfoKHR* pImportFenceWin32HandleInfo) {
- bool skip = false;
- skip |= ValidateObject(device, device, kVulkanObjectTypeDevice, false, "VUID-vkImportFenceWin32HandleKHR-device-parameter", kVUIDUndefined);
- if (pImportFenceWin32HandleInfo) {
- skip |= ValidateObject(device, pImportFenceWin32HandleInfo->fence, kVulkanObjectTypeFence, false, "VUID-VkImportFenceWin32HandleInfoKHR-fence-parameter", kVUIDUndefined);
- }
-
- return skip;
-}
-#endif // VK_USE_PLATFORM_WIN32_KHR
-
-#ifdef VK_USE_PLATFORM_WIN32_KHR
-
-bool ObjectLifetimes::PreCallValidateGetFenceWin32HandleKHR(
- VkDevice device,
- const VkFenceGetWin32HandleInfoKHR* pGetWin32HandleInfo,
- HANDLE* pHandle) {
- bool skip = false;
- skip |= ValidateObject(device, device, kVulkanObjectTypeDevice, false, "VUID-vkGetFenceWin32HandleKHR-device-parameter", kVUIDUndefined);
- if (pGetWin32HandleInfo) {
- skip |= ValidateObject(device, pGetWin32HandleInfo->fence, kVulkanObjectTypeFence, false, "VUID-VkFenceGetWin32HandleInfoKHR-fence-parameter", kVUIDUndefined);
- }
-
- return skip;
-}
-#endif // VK_USE_PLATFORM_WIN32_KHR
-
-bool ObjectLifetimes::PreCallValidateImportFenceFdKHR(
- VkDevice device,
- const VkImportFenceFdInfoKHR* pImportFenceFdInfo) {
- bool skip = false;
- skip |= ValidateObject(device, device, kVulkanObjectTypeDevice, false, "VUID-vkImportFenceFdKHR-device-parameter", kVUIDUndefined);
- if (pImportFenceFdInfo) {
- skip |= ValidateObject(device, pImportFenceFdInfo->fence, kVulkanObjectTypeFence, false, "VUID-VkImportFenceFdInfoKHR-fence-parameter", kVUIDUndefined);
- }
-
- return skip;
-}
-
-bool ObjectLifetimes::PreCallValidateGetFenceFdKHR(
- VkDevice device,
- const VkFenceGetFdInfoKHR* pGetFdInfo,
- int* pFd) {
- bool skip = false;
- skip |= ValidateObject(device, device, kVulkanObjectTypeDevice, false, "VUID-vkGetFenceFdKHR-device-parameter", kVUIDUndefined);
- if (pGetFdInfo) {
- skip |= ValidateObject(device, pGetFdInfo->fence, kVulkanObjectTypeFence, false, "VUID-VkFenceGetFdInfoKHR-fence-parameter", kVUIDUndefined);
- }
-
- return skip;
-}
-
-bool ObjectLifetimes::PreCallValidateGetPhysicalDeviceSurfaceCapabilities2KHR(
- VkPhysicalDevice physicalDevice,
- const VkPhysicalDeviceSurfaceInfo2KHR* pSurfaceInfo,
- VkSurfaceCapabilities2KHR* pSurfaceCapabilities) {
- bool skip = false;
- skip |= ValidateObject(physicalDevice, physicalDevice, kVulkanObjectTypePhysicalDevice, false, "VUID-vkGetPhysicalDeviceSurfaceCapabilities2KHR-physicalDevice-parameter", kVUIDUndefined);
- if (pSurfaceInfo) {
- skip |= ValidateObject(physicalDevice, pSurfaceInfo->surface, kVulkanObjectTypeSurfaceKHR, false, "VUID-VkPhysicalDeviceSurfaceInfo2KHR-surface-parameter", kVUIDUndefined);
- }
-
- return skip;
-}
-
-bool ObjectLifetimes::PreCallValidateGetPhysicalDeviceSurfaceFormats2KHR(
- VkPhysicalDevice physicalDevice,
- const VkPhysicalDeviceSurfaceInfo2KHR* pSurfaceInfo,
- uint32_t* pSurfaceFormatCount,
- VkSurfaceFormat2KHR* pSurfaceFormats) {
- bool skip = false;
- skip |= ValidateObject(physicalDevice, physicalDevice, kVulkanObjectTypePhysicalDevice, false, "VUID-vkGetPhysicalDeviceSurfaceFormats2KHR-physicalDevice-parameter", kVUIDUndefined);
- if (pSurfaceInfo) {
- skip |= ValidateObject(physicalDevice, pSurfaceInfo->surface, kVulkanObjectTypeSurfaceKHR, false, "VUID-VkPhysicalDeviceSurfaceInfo2KHR-surface-parameter", kVUIDUndefined);
- }
-
- return skip;
-}
-
-bool ObjectLifetimes::PreCallValidateGetPhysicalDeviceDisplayPlaneProperties2KHR(
- VkPhysicalDevice physicalDevice,
- uint32_t* pPropertyCount,
- VkDisplayPlaneProperties2KHR* pProperties) {
- bool skip = false;
- skip |= ValidateObject(physicalDevice, physicalDevice, kVulkanObjectTypePhysicalDevice, false, "VUID-vkGetPhysicalDeviceDisplayPlaneProperties2KHR-physicalDevice-parameter", kVUIDUndefined);
-
- return skip;
-}
-
-bool ObjectLifetimes::PreCallValidateGetDisplayPlaneCapabilities2KHR(
- VkPhysicalDevice physicalDevice,
- const VkDisplayPlaneInfo2KHR* pDisplayPlaneInfo,
- VkDisplayPlaneCapabilities2KHR* pCapabilities) {
- bool skip = false;
- skip |= ValidateObject(physicalDevice, physicalDevice, kVulkanObjectTypePhysicalDevice, false, "VUID-vkGetDisplayPlaneCapabilities2KHR-physicalDevice-parameter", kVUIDUndefined);
- if (pDisplayPlaneInfo) {
- skip |= ValidateObject(physicalDevice, pDisplayPlaneInfo->mode, kVulkanObjectTypeDisplayModeKHR, false, "VUID-VkDisplayPlaneInfo2KHR-mode-parameter", kVUIDUndefined);
- }
-
- return skip;
-}
-
-bool ObjectLifetimes::PreCallValidateGetImageMemoryRequirements2KHR(
- VkDevice device,
- const VkImageMemoryRequirementsInfo2* pInfo,
- VkMemoryRequirements2* pMemoryRequirements) {
- bool skip = false;
- skip |= ValidateObject(device, device, kVulkanObjectTypeDevice, false, "VUID-vkGetImageMemoryRequirements2-device-parameter", kVUIDUndefined);
- if (pInfo) {
- skip |= ValidateObject(device, pInfo->image, kVulkanObjectTypeImage, false, "VUID-VkImageMemoryRequirementsInfo2-image-parameter", kVUIDUndefined);
- }
-
- return skip;
-}
-
-bool ObjectLifetimes::PreCallValidateGetBufferMemoryRequirements2KHR(
- VkDevice device,
- const VkBufferMemoryRequirementsInfo2* pInfo,
- VkMemoryRequirements2* pMemoryRequirements) {
- bool skip = false;
- skip |= ValidateObject(device, device, kVulkanObjectTypeDevice, false, "VUID-vkGetBufferMemoryRequirements2-device-parameter", kVUIDUndefined);
- if (pInfo) {
- skip |= ValidateObject(device, pInfo->buffer, kVulkanObjectTypeBuffer, false, "VUID-VkBufferMemoryRequirementsInfo2-buffer-parameter", kVUIDUndefined);
- }
-
- return skip;
-}
-
-bool ObjectLifetimes::PreCallValidateGetImageSparseMemoryRequirements2KHR(
- VkDevice device,
- const VkImageSparseMemoryRequirementsInfo2* pInfo,
- uint32_t* pSparseMemoryRequirementCount,
- VkSparseImageMemoryRequirements2* pSparseMemoryRequirements) {
- bool skip = false;
- skip |= ValidateObject(device, device, kVulkanObjectTypeDevice, false, "VUID-vkGetImageSparseMemoryRequirements2-device-parameter", kVUIDUndefined);
- if (pInfo) {
- skip |= ValidateObject(device, pInfo->image, kVulkanObjectTypeImage, false, "VUID-VkImageSparseMemoryRequirementsInfo2-image-parameter", kVUIDUndefined);
- }
-
- return skip;
-}
-
-bool ObjectLifetimes::PreCallValidateCreateSamplerYcbcrConversionKHR(
- VkDevice device,
- const VkSamplerYcbcrConversionCreateInfo* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkSamplerYcbcrConversion* pYcbcrConversion) {
- bool skip = false;
- skip |= ValidateObject(device, device, kVulkanObjectTypeDevice, false, "VUID-vkCreateSamplerYcbcrConversion-device-parameter", kVUIDUndefined);
-
- return skip;
-}
-
-void ObjectLifetimes::PostCallRecordCreateSamplerYcbcrConversionKHR(
- VkDevice device,
- const VkSamplerYcbcrConversionCreateInfo* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkSamplerYcbcrConversion* pYcbcrConversion,
- VkResult result) {
- if (result != VK_SUCCESS) return;
- CreateObject(device, *pYcbcrConversion, kVulkanObjectTypeSamplerYcbcrConversion, pAllocator);
-
-}
-
-bool ObjectLifetimes::PreCallValidateDestroySamplerYcbcrConversionKHR(
- VkDevice device,
- VkSamplerYcbcrConversion ycbcrConversion,
- const VkAllocationCallbacks* pAllocator) {
- bool skip = false;
- skip |= ValidateObject(device, device, kVulkanObjectTypeDevice, false, "VUID-vkDestroySamplerYcbcrConversion-device-parameter", kVUIDUndefined);
- skip |= ValidateObject(device, ycbcrConversion, kVulkanObjectTypeSamplerYcbcrConversion, true, "VUID-vkDestroySamplerYcbcrConversion-ycbcrConversion-parameter", "VUID-vkDestroySamplerYcbcrConversion-ycbcrConversion-parent");
- skip |= ValidateDestroyObject(device, ycbcrConversion, kVulkanObjectTypeSamplerYcbcrConversion, pAllocator, kVUIDUndefined, kVUIDUndefined);
-
- return skip;
-}
-
-void ObjectLifetimes::PreCallRecordDestroySamplerYcbcrConversionKHR(
- VkDevice device,
- VkSamplerYcbcrConversion ycbcrConversion,
- const VkAllocationCallbacks* pAllocator) {
- RecordDestroyObject(device, ycbcrConversion, kVulkanObjectTypeSamplerYcbcrConversion);
-
-}
-
-bool ObjectLifetimes::PreCallValidateBindBufferMemory2KHR(
- VkDevice device,
- uint32_t bindInfoCount,
- const VkBindBufferMemoryInfo* pBindInfos) {
- bool skip = false;
- skip |= ValidateObject(device, device, kVulkanObjectTypeDevice, false, "VUID-vkBindBufferMemory2-device-parameter", kVUIDUndefined);
- if (pBindInfos) {
- for (uint32_t index0 = 0; index0 < bindInfoCount; ++index0) {
- skip |= ValidateObject(device, pBindInfos[index0].buffer, kVulkanObjectTypeBuffer, false, "VUID-VkBindBufferMemoryInfo-buffer-parameter", "VUID-VkBindBufferMemoryInfo-commonparent");
- skip |= ValidateObject(device, pBindInfos[index0].memory, kVulkanObjectTypeDeviceMemory, false, "VUID-VkBindBufferMemoryInfo-memory-parameter", "VUID-VkBindBufferMemoryInfo-commonparent");
- }
- }
-
- return skip;
-}
-
-bool ObjectLifetimes::PreCallValidateBindImageMemory2KHR(
- VkDevice device,
- uint32_t bindInfoCount,
- const VkBindImageMemoryInfo* pBindInfos) {
- bool skip = false;
- skip |= ValidateObject(device, device, kVulkanObjectTypeDevice, false, "VUID-vkBindImageMemory2-device-parameter", kVUIDUndefined);
- if (pBindInfos) {
- for (uint32_t index0 = 0; index0 < bindInfoCount; ++index0) {
- skip |= ValidateObject(device, pBindInfos[index0].image, kVulkanObjectTypeImage, false, "VUID-VkBindImageMemoryInfo-image-parameter", "VUID-VkBindImageMemoryInfo-commonparent");
- skip |= ValidateObject(device, pBindInfos[index0].memory, kVulkanObjectTypeDeviceMemory, true, kVUIDUndefined, "VUID-VkBindImageMemoryInfo-commonparent");
- }
- }
-
- return skip;
-}
-
-bool ObjectLifetimes::PreCallValidateCmdDrawIndirectCountKHR(
- VkCommandBuffer commandBuffer,
- VkBuffer buffer,
- VkDeviceSize offset,
- VkBuffer countBuffer,
- VkDeviceSize countBufferOffset,
- uint32_t maxDrawCount,
- uint32_t stride) {
- bool skip = false;
- skip |= ValidateObject(commandBuffer, commandBuffer, kVulkanObjectTypeCommandBuffer, false, "VUID-vkCmdDrawIndirectCountKHR-commandBuffer-parameter", "VUID-vkCmdDrawIndirectCountKHR-commonparent");
- skip |= ValidateObject(commandBuffer, buffer, kVulkanObjectTypeBuffer, false, "VUID-vkCmdDrawIndirectCountKHR-buffer-parameter", "VUID-vkCmdDrawIndirectCountKHR-commonparent");
- skip |= ValidateObject(commandBuffer, countBuffer, kVulkanObjectTypeBuffer, false, "VUID-vkCmdDrawIndirectCountKHR-countBuffer-parameter", "VUID-vkCmdDrawIndirectCountKHR-commonparent");
-
- return skip;
-}
-
-bool ObjectLifetimes::PreCallValidateCmdDrawIndexedIndirectCountKHR(
- VkCommandBuffer commandBuffer,
- VkBuffer buffer,
- VkDeviceSize offset,
- VkBuffer countBuffer,
- VkDeviceSize countBufferOffset,
- uint32_t maxDrawCount,
- uint32_t stride) {
- bool skip = false;
- skip |= ValidateObject(commandBuffer, commandBuffer, kVulkanObjectTypeCommandBuffer, false, "VUID-vkCmdDrawIndexedIndirectCountKHR-commandBuffer-parameter", "VUID-vkCmdDrawIndexedIndirectCountKHR-commonparent");
- skip |= ValidateObject(commandBuffer, buffer, kVulkanObjectTypeBuffer, false, "VUID-vkCmdDrawIndexedIndirectCountKHR-buffer-parameter", "VUID-vkCmdDrawIndexedIndirectCountKHR-commonparent");
- skip |= ValidateObject(commandBuffer, countBuffer, kVulkanObjectTypeBuffer, false, "VUID-vkCmdDrawIndexedIndirectCountKHR-countBuffer-parameter", "VUID-vkCmdDrawIndexedIndirectCountKHR-commonparent");
-
- return skip;
-}
-
-bool ObjectLifetimes::PreCallValidateGetPipelineExecutablePropertiesKHR(
- VkDevice device,
- const VkPipelineInfoKHR* pPipelineInfo,
- uint32_t* pExecutableCount,
- VkPipelineExecutablePropertiesKHR* pProperties) {
- bool skip = false;
- skip |= ValidateObject(device, device, kVulkanObjectTypeDevice, false, "VUID-vkGetPipelineExecutablePropertiesKHR-device-parameter", kVUIDUndefined);
- if (pPipelineInfo) {
- skip |= ValidateObject(device, pPipelineInfo->pipeline, kVulkanObjectTypePipeline, false, "VUID-VkPipelineInfoKHR-pipeline-parameter", kVUIDUndefined);
- }
-
- return skip;
-}
-
-bool ObjectLifetimes::PreCallValidateGetPipelineExecutableStatisticsKHR(
- VkDevice device,
- const VkPipelineExecutableInfoKHR* pExecutableInfo,
- uint32_t* pStatisticCount,
- VkPipelineExecutableStatisticKHR* pStatistics) {
- bool skip = false;
- skip |= ValidateObject(device, device, kVulkanObjectTypeDevice, false, "VUID-vkGetPipelineExecutableStatisticsKHR-device-parameter", kVUIDUndefined);
- if (pExecutableInfo) {
- skip |= ValidateObject(device, pExecutableInfo->pipeline, kVulkanObjectTypePipeline, false, "VUID-VkPipelineExecutableInfoKHR-pipeline-parameter", kVUIDUndefined);
- }
-
- return skip;
-}
-
-bool ObjectLifetimes::PreCallValidateGetPipelineExecutableInternalRepresentationsKHR(
- VkDevice device,
- const VkPipelineExecutableInfoKHR* pExecutableInfo,
- uint32_t* pInternalRepresentationCount,
- VkPipelineExecutableInternalRepresentationKHR* pInternalRepresentations) {
- bool skip = false;
- skip |= ValidateObject(device, device, kVulkanObjectTypeDevice, false, "VUID-vkGetPipelineExecutableInternalRepresentationsKHR-device-parameter", kVUIDUndefined);
- if (pExecutableInfo) {
- skip |= ValidateObject(device, pExecutableInfo->pipeline, kVulkanObjectTypePipeline, false, "VUID-VkPipelineExecutableInfoKHR-pipeline-parameter", kVUIDUndefined);
- }
-
- return skip;
-}
-
-bool ObjectLifetimes::PreCallValidateCreateDebugReportCallbackEXT(
- VkInstance instance,
- const VkDebugReportCallbackCreateInfoEXT* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkDebugReportCallbackEXT* pCallback) {
- bool skip = false;
- skip |= ValidateObject(instance, instance, kVulkanObjectTypeInstance, false, "VUID-vkCreateDebugReportCallbackEXT-instance-parameter", kVUIDUndefined);
-
- return skip;
-}
-
-void ObjectLifetimes::PostCallRecordCreateDebugReportCallbackEXT(
- VkInstance instance,
- const VkDebugReportCallbackCreateInfoEXT* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkDebugReportCallbackEXT* pCallback,
- VkResult result) {
- if (result != VK_SUCCESS) return;
- CreateObject(instance, *pCallback, kVulkanObjectTypeDebugReportCallbackEXT, pAllocator);
-
-}
-
-bool ObjectLifetimes::PreCallValidateDestroyDebugReportCallbackEXT(
- VkInstance instance,
- VkDebugReportCallbackEXT callback,
- const VkAllocationCallbacks* pAllocator) {
- bool skip = false;
- skip |= ValidateObject(instance, instance, kVulkanObjectTypeInstance, false, "VUID-vkDestroyDebugReportCallbackEXT-instance-parameter", kVUIDUndefined);
- skip |= ValidateObject(instance, callback, kVulkanObjectTypeDebugReportCallbackEXT, false, "VUID-vkDestroyDebugReportCallbackEXT-callback-parameter", "VUID-vkDestroyDebugReportCallbackEXT-callback-parent");
- skip |= ValidateDestroyObject(instance, callback, kVulkanObjectTypeDebugReportCallbackEXT, pAllocator, kVUIDUndefined, kVUIDUndefined);
-
- return skip;
-}
-
-void ObjectLifetimes::PreCallRecordDestroyDebugReportCallbackEXT(
- VkInstance instance,
- VkDebugReportCallbackEXT callback,
- const VkAllocationCallbacks* pAllocator) {
- RecordDestroyObject(instance, callback, kVulkanObjectTypeDebugReportCallbackEXT);
-
-}
-
-bool ObjectLifetimes::PreCallValidateDebugReportMessageEXT(
- VkInstance instance,
- VkDebugReportFlagsEXT flags,
- VkDebugReportObjectTypeEXT objectType,
- uint64_t object,
- size_t location,
- int32_t messageCode,
- const char* pLayerPrefix,
- const char* pMessage) {
- bool skip = false;
- skip |= ValidateObject(instance, instance, kVulkanObjectTypeInstance, false, "VUID-vkDebugReportMessageEXT-instance-parameter", kVUIDUndefined);
-
- return skip;
-}
-
-bool ObjectLifetimes::PreCallValidateDebugMarkerSetObjectTagEXT(
- VkDevice device,
- const VkDebugMarkerObjectTagInfoEXT* pTagInfo) {
- bool skip = false;
- skip |= ValidateObject(device, device, kVulkanObjectTypeDevice, false, "VUID-vkDebugMarkerSetObjectTagEXT-device-parameter", kVUIDUndefined);
-
- return skip;
-}
-
-bool ObjectLifetimes::PreCallValidateDebugMarkerSetObjectNameEXT(
- VkDevice device,
- const VkDebugMarkerObjectNameInfoEXT* pNameInfo) {
- bool skip = false;
- skip |= ValidateObject(device, device, kVulkanObjectTypeDevice, false, "VUID-vkDebugMarkerSetObjectNameEXT-device-parameter", kVUIDUndefined);
-
- return skip;
-}
-
-bool ObjectLifetimes::PreCallValidateCmdDebugMarkerBeginEXT(
- VkCommandBuffer commandBuffer,
- const VkDebugMarkerMarkerInfoEXT* pMarkerInfo) {
- bool skip = false;
- skip |= ValidateObject(commandBuffer, commandBuffer, kVulkanObjectTypeCommandBuffer, false, "VUID-vkCmdDebugMarkerBeginEXT-commandBuffer-parameter", kVUIDUndefined);
-
- return skip;
-}
-
-bool ObjectLifetimes::PreCallValidateCmdDebugMarkerEndEXT(
- VkCommandBuffer commandBuffer) {
- bool skip = false;
- skip |= ValidateObject(commandBuffer, commandBuffer, kVulkanObjectTypeCommandBuffer, false, "VUID-vkCmdDebugMarkerEndEXT-commandBuffer-parameter", kVUIDUndefined);
-
- return skip;
-}
-
-bool ObjectLifetimes::PreCallValidateCmdDebugMarkerInsertEXT(
- VkCommandBuffer commandBuffer,
- const VkDebugMarkerMarkerInfoEXT* pMarkerInfo) {
- bool skip = false;
- skip |= ValidateObject(commandBuffer, commandBuffer, kVulkanObjectTypeCommandBuffer, false, "VUID-vkCmdDebugMarkerInsertEXT-commandBuffer-parameter", kVUIDUndefined);
-
- return skip;
-}
-
-bool ObjectLifetimes::PreCallValidateCmdBindTransformFeedbackBuffersEXT(
- VkCommandBuffer commandBuffer,
- uint32_t firstBinding,
- uint32_t bindingCount,
- const VkBuffer* pBuffers,
- const VkDeviceSize* pOffsets,
- const VkDeviceSize* pSizes) {
- bool skip = false;
- skip |= ValidateObject(commandBuffer, commandBuffer, kVulkanObjectTypeCommandBuffer, false, "VUID-vkCmdBindTransformFeedbackBuffersEXT-commandBuffer-parameter", "VUID-vkCmdBindTransformFeedbackBuffersEXT-commonparent");
- if (pBuffers) {
- for (uint32_t index0 = 0; index0 < bindingCount; ++index0) {
- skip |= ValidateObject(commandBuffer, pBuffers[index0], kVulkanObjectTypeBuffer, false, "VUID-vkCmdBindTransformFeedbackBuffersEXT-pBuffers-parameter", "VUID-vkCmdBindTransformFeedbackBuffersEXT-commonparent");
- }
- }
-
- return skip;
-}
-
-bool ObjectLifetimes::PreCallValidateCmdBeginTransformFeedbackEXT(
- VkCommandBuffer commandBuffer,
- uint32_t firstCounterBuffer,
- uint32_t counterBufferCount,
- const VkBuffer* pCounterBuffers,
- const VkDeviceSize* pCounterBufferOffsets) {
- bool skip = false;
- skip |= ValidateObject(commandBuffer, commandBuffer, kVulkanObjectTypeCommandBuffer, false, "VUID-vkCmdBeginTransformFeedbackEXT-commandBuffer-parameter", "VUID-vkCmdBeginTransformFeedbackEXT-commonparent");
- if (pCounterBuffers) {
- for (uint32_t index0 = 0; index0 < counterBufferCount; ++index0) {
- skip |= ValidateObject(commandBuffer, pCounterBuffers[index0], kVulkanObjectTypeBuffer, true, kVUIDUndefined, "VUID-vkCmdBeginTransformFeedbackEXT-commonparent");
- }
- }
-
- return skip;
-}
-
-bool ObjectLifetimes::PreCallValidateCmdEndTransformFeedbackEXT(
- VkCommandBuffer commandBuffer,
- uint32_t firstCounterBuffer,
- uint32_t counterBufferCount,
- const VkBuffer* pCounterBuffers,
- const VkDeviceSize* pCounterBufferOffsets) {
- bool skip = false;
- skip |= ValidateObject(commandBuffer, commandBuffer, kVulkanObjectTypeCommandBuffer, false, "VUID-vkCmdEndTransformFeedbackEXT-commandBuffer-parameter", "VUID-vkCmdEndTransformFeedbackEXT-commonparent");
- if (pCounterBuffers) {
- for (uint32_t index0 = 0; index0 < counterBufferCount; ++index0) {
- skip |= ValidateObject(commandBuffer, pCounterBuffers[index0], kVulkanObjectTypeBuffer, true, kVUIDUndefined, "VUID-vkCmdEndTransformFeedbackEXT-commonparent");
- }
- }
-
- return skip;
-}
-
-bool ObjectLifetimes::PreCallValidateCmdBeginQueryIndexedEXT(
- VkCommandBuffer commandBuffer,
- VkQueryPool queryPool,
- uint32_t query,
- VkQueryControlFlags flags,
- uint32_t index) {
- bool skip = false;
- skip |= ValidateObject(commandBuffer, commandBuffer, kVulkanObjectTypeCommandBuffer, false, "VUID-vkCmdBeginQueryIndexedEXT-commandBuffer-parameter", "VUID-vkCmdBeginQueryIndexedEXT-commonparent");
- skip |= ValidateObject(commandBuffer, queryPool, kVulkanObjectTypeQueryPool, false, "VUID-vkCmdBeginQueryIndexedEXT-queryPool-parameter", "VUID-vkCmdBeginQueryIndexedEXT-commonparent");
-
- return skip;
-}
-
-bool ObjectLifetimes::PreCallValidateCmdEndQueryIndexedEXT(
- VkCommandBuffer commandBuffer,
- VkQueryPool queryPool,
- uint32_t query,
- uint32_t index) {
- bool skip = false;
- skip |= ValidateObject(commandBuffer, commandBuffer, kVulkanObjectTypeCommandBuffer, false, "VUID-vkCmdEndQueryIndexedEXT-commandBuffer-parameter", "VUID-vkCmdEndQueryIndexedEXT-commonparent");
- skip |= ValidateObject(commandBuffer, queryPool, kVulkanObjectTypeQueryPool, false, "VUID-vkCmdEndQueryIndexedEXT-queryPool-parameter", "VUID-vkCmdEndQueryIndexedEXT-commonparent");
-
- return skip;
-}
-
-bool ObjectLifetimes::PreCallValidateCmdDrawIndirectByteCountEXT(
- VkCommandBuffer commandBuffer,
- uint32_t instanceCount,
- uint32_t firstInstance,
- VkBuffer counterBuffer,
- VkDeviceSize counterBufferOffset,
- uint32_t counterOffset,
- uint32_t vertexStride) {
- bool skip = false;
- skip |= ValidateObject(commandBuffer, commandBuffer, kVulkanObjectTypeCommandBuffer, false, "VUID-vkCmdDrawIndirectByteCountEXT-commandBuffer-parameter", "VUID-vkCmdDrawIndirectByteCountEXT-commonparent");
- skip |= ValidateObject(commandBuffer, counterBuffer, kVulkanObjectTypeBuffer, false, "VUID-vkCmdDrawIndirectByteCountEXT-counterBuffer-parameter", "VUID-vkCmdDrawIndirectByteCountEXT-commonparent");
-
- return skip;
-}
-
-bool ObjectLifetimes::PreCallValidateGetImageViewHandleNVX(
- VkDevice device,
- const VkImageViewHandleInfoNVX* pInfo) {
- bool skip = false;
- skip |= ValidateObject(device, device, kVulkanObjectTypeDevice, false, "VUID-vkGetImageViewHandleNVX-device-parameter", kVUIDUndefined);
-
- return skip;
-}
-
-bool ObjectLifetimes::PreCallValidateCmdDrawIndirectCountAMD(
- VkCommandBuffer commandBuffer,
- VkBuffer buffer,
- VkDeviceSize offset,
- VkBuffer countBuffer,
- VkDeviceSize countBufferOffset,
- uint32_t maxDrawCount,
- uint32_t stride) {
- bool skip = false;
- skip |= ValidateObject(commandBuffer, commandBuffer, kVulkanObjectTypeCommandBuffer, false, "VUID-vkCmdDrawIndirectCountKHR-commandBuffer-parameter", "VUID-vkCmdDrawIndirectCountKHR-commonparent");
- skip |= ValidateObject(commandBuffer, buffer, kVulkanObjectTypeBuffer, false, "VUID-vkCmdDrawIndirectCountKHR-buffer-parameter", "VUID-vkCmdDrawIndirectCountKHR-commonparent");
- skip |= ValidateObject(commandBuffer, countBuffer, kVulkanObjectTypeBuffer, false, "VUID-vkCmdDrawIndirectCountKHR-countBuffer-parameter", "VUID-vkCmdDrawIndirectCountKHR-commonparent");
-
- return skip;
-}
-
-bool ObjectLifetimes::PreCallValidateCmdDrawIndexedIndirectCountAMD(
- VkCommandBuffer commandBuffer,
- VkBuffer buffer,
- VkDeviceSize offset,
- VkBuffer countBuffer,
- VkDeviceSize countBufferOffset,
- uint32_t maxDrawCount,
- uint32_t stride) {
- bool skip = false;
- skip |= ValidateObject(commandBuffer, commandBuffer, kVulkanObjectTypeCommandBuffer, false, "VUID-vkCmdDrawIndexedIndirectCountKHR-commandBuffer-parameter", "VUID-vkCmdDrawIndexedIndirectCountKHR-commonparent");
- skip |= ValidateObject(commandBuffer, buffer, kVulkanObjectTypeBuffer, false, "VUID-vkCmdDrawIndexedIndirectCountKHR-buffer-parameter", "VUID-vkCmdDrawIndexedIndirectCountKHR-commonparent");
- skip |= ValidateObject(commandBuffer, countBuffer, kVulkanObjectTypeBuffer, false, "VUID-vkCmdDrawIndexedIndirectCountKHR-countBuffer-parameter", "VUID-vkCmdDrawIndexedIndirectCountKHR-commonparent");
-
- return skip;
-}
-
-bool ObjectLifetimes::PreCallValidateGetShaderInfoAMD(
- VkDevice device,
- VkPipeline pipeline,
- VkShaderStageFlagBits shaderStage,
- VkShaderInfoTypeAMD infoType,
- size_t* pInfoSize,
- void* pInfo) {
- bool skip = false;
- skip |= ValidateObject(device, device, kVulkanObjectTypeDevice, false, "VUID-vkGetShaderInfoAMD-device-parameter", kVUIDUndefined);
- skip |= ValidateObject(device, pipeline, kVulkanObjectTypePipeline, false, "VUID-vkGetShaderInfoAMD-pipeline-parameter", "VUID-vkGetShaderInfoAMD-pipeline-parent");
-
- return skip;
-}
-
-#ifdef VK_USE_PLATFORM_GGP
-
-bool ObjectLifetimes::PreCallValidateCreateStreamDescriptorSurfaceGGP(
- VkInstance instance,
- const VkStreamDescriptorSurfaceCreateInfoGGP* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkSurfaceKHR* pSurface) {
- bool skip = false;
- skip |= ValidateObject(instance, instance, kVulkanObjectTypeInstance, false, "VUID-vkCreateStreamDescriptorSurfaceGGP-instance-parameter", kVUIDUndefined);
-
- return skip;
-}
-
-void ObjectLifetimes::PostCallRecordCreateStreamDescriptorSurfaceGGP(
- VkInstance instance,
- const VkStreamDescriptorSurfaceCreateInfoGGP* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkSurfaceKHR* pSurface,
- VkResult result) {
- if (result != VK_SUCCESS) return;
- CreateObject(instance, *pSurface, kVulkanObjectTypeSurfaceKHR, pAllocator);
-
-}
-#endif // VK_USE_PLATFORM_GGP
-
-bool ObjectLifetimes::PreCallValidateGetPhysicalDeviceExternalImageFormatPropertiesNV(
- VkPhysicalDevice physicalDevice,
- VkFormat format,
- VkImageType type,
- VkImageTiling tiling,
- VkImageUsageFlags usage,
- VkImageCreateFlags flags,
- VkExternalMemoryHandleTypeFlagsNV externalHandleType,
- VkExternalImageFormatPropertiesNV* pExternalImageFormatProperties) {
- bool skip = false;
- skip |= ValidateObject(physicalDevice, physicalDevice, kVulkanObjectTypePhysicalDevice, false, "VUID-vkGetPhysicalDeviceExternalImageFormatPropertiesNV-physicalDevice-parameter", kVUIDUndefined);
-
- return skip;
-}
-
-#ifdef VK_USE_PLATFORM_WIN32_KHR
-
-bool ObjectLifetimes::PreCallValidateGetMemoryWin32HandleNV(
- VkDevice device,
- VkDeviceMemory memory,
- VkExternalMemoryHandleTypeFlagsNV handleType,
- HANDLE* pHandle) {
- bool skip = false;
- skip |= ValidateObject(device, device, kVulkanObjectTypeDevice, false, "VUID-vkGetMemoryWin32HandleNV-device-parameter", kVUIDUndefined);
- skip |= ValidateObject(device, memory, kVulkanObjectTypeDeviceMemory, false, "VUID-vkGetMemoryWin32HandleNV-memory-parameter", "VUID-vkGetMemoryWin32HandleNV-memory-parent");
-
- return skip;
-}
-#endif // VK_USE_PLATFORM_WIN32_KHR
-
-#ifdef VK_USE_PLATFORM_VI_NN
-
-bool ObjectLifetimes::PreCallValidateCreateViSurfaceNN(
- VkInstance instance,
- const VkViSurfaceCreateInfoNN* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkSurfaceKHR* pSurface) {
- bool skip = false;
- skip |= ValidateObject(instance, instance, kVulkanObjectTypeInstance, false, "VUID-vkCreateViSurfaceNN-instance-parameter", kVUIDUndefined);
-
- return skip;
-}
-
-void ObjectLifetimes::PostCallRecordCreateViSurfaceNN(
- VkInstance instance,
- const VkViSurfaceCreateInfoNN* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkSurfaceKHR* pSurface,
- VkResult result) {
- if (result != VK_SUCCESS) return;
- CreateObject(instance, *pSurface, kVulkanObjectTypeSurfaceKHR, pAllocator);
-
-}
-#endif // VK_USE_PLATFORM_VI_NN
-
-bool ObjectLifetimes::PreCallValidateCmdBeginConditionalRenderingEXT(
- VkCommandBuffer commandBuffer,
- const VkConditionalRenderingBeginInfoEXT* pConditionalRenderingBegin) {
- bool skip = false;
- skip |= ValidateObject(commandBuffer, commandBuffer, kVulkanObjectTypeCommandBuffer, false, "VUID-vkCmdBeginConditionalRenderingEXT-commandBuffer-parameter", kVUIDUndefined);
- if (pConditionalRenderingBegin) {
- skip |= ValidateObject(commandBuffer, pConditionalRenderingBegin->buffer, kVulkanObjectTypeBuffer, false, "VUID-VkConditionalRenderingBeginInfoEXT-buffer-parameter", kVUIDUndefined);
- }
-
- return skip;
-}
-
-bool ObjectLifetimes::PreCallValidateCmdEndConditionalRenderingEXT(
- VkCommandBuffer commandBuffer) {
- bool skip = false;
- skip |= ValidateObject(commandBuffer, commandBuffer, kVulkanObjectTypeCommandBuffer, false, "VUID-vkCmdEndConditionalRenderingEXT-commandBuffer-parameter", kVUIDUndefined);
-
- return skip;
-}
-
-bool ObjectLifetimes::PreCallValidateCmdProcessCommandsNVX(
- VkCommandBuffer commandBuffer,
- const VkCmdProcessCommandsInfoNVX* pProcessCommandsInfo) {
- bool skip = false;
- skip |= ValidateObject(commandBuffer, commandBuffer, kVulkanObjectTypeCommandBuffer, false, "VUID-vkCmdProcessCommandsNVX-commandBuffer-parameter", kVUIDUndefined);
- if (pProcessCommandsInfo) {
- skip |= ValidateObject(commandBuffer, pProcessCommandsInfo->objectTable, kVulkanObjectTypeObjectTableNVX, false, "VUID-VkCmdProcessCommandsInfoNVX-objectTable-parameter", "VUID-VkCmdProcessCommandsInfoNVX-commonparent");
- skip |= ValidateObject(commandBuffer, pProcessCommandsInfo->indirectCommandsLayout, kVulkanObjectTypeIndirectCommandsLayoutNVX, false, "VUID-VkCmdProcessCommandsInfoNVX-indirectCommandsLayout-parameter", "VUID-VkCmdProcessCommandsInfoNVX-commonparent");
- if (pProcessCommandsInfo->pIndirectCommandsTokens) {
- for (uint32_t index1 = 0; index1 < pProcessCommandsInfo->indirectCommandsTokenCount; ++index1) {
- skip |= ValidateObject(commandBuffer, pProcessCommandsInfo->pIndirectCommandsTokens[index1].buffer, kVulkanObjectTypeBuffer, false, "VUID-VkIndirectCommandsTokenNVX-buffer-parameter", kVUIDUndefined);
- }
- }
- skip |= ValidateObject(commandBuffer, pProcessCommandsInfo->targetCommandBuffer, kVulkanObjectTypeCommandBuffer, true, "VUID-VkCmdProcessCommandsInfoNVX-targetCommandBuffer-parameter", "VUID-VkCmdProcessCommandsInfoNVX-commonparent");
- skip |= ValidateObject(commandBuffer, pProcessCommandsInfo->sequencesCountBuffer, kVulkanObjectTypeBuffer, true, "VUID-VkCmdProcessCommandsInfoNVX-sequencesCountBuffer-parameter", "VUID-VkCmdProcessCommandsInfoNVX-commonparent");
- skip |= ValidateObject(commandBuffer, pProcessCommandsInfo->sequencesIndexBuffer, kVulkanObjectTypeBuffer, true, "VUID-VkCmdProcessCommandsInfoNVX-sequencesIndexBuffer-parameter", "VUID-VkCmdProcessCommandsInfoNVX-commonparent");
- }
-
- return skip;
-}
-
-bool ObjectLifetimes::PreCallValidateCmdReserveSpaceForCommandsNVX(
- VkCommandBuffer commandBuffer,
- const VkCmdReserveSpaceForCommandsInfoNVX* pReserveSpaceInfo) {
- bool skip = false;
- skip |= ValidateObject(commandBuffer, commandBuffer, kVulkanObjectTypeCommandBuffer, false, "VUID-vkCmdReserveSpaceForCommandsNVX-commandBuffer-parameter", kVUIDUndefined);
- if (pReserveSpaceInfo) {
- skip |= ValidateObject(commandBuffer, pReserveSpaceInfo->objectTable, kVulkanObjectTypeObjectTableNVX, false, "VUID-VkCmdReserveSpaceForCommandsInfoNVX-objectTable-parameter", "VUID-VkCmdReserveSpaceForCommandsInfoNVX-commonparent");
- skip |= ValidateObject(commandBuffer, pReserveSpaceInfo->indirectCommandsLayout, kVulkanObjectTypeIndirectCommandsLayoutNVX, false, "VUID-VkCmdReserveSpaceForCommandsInfoNVX-indirectCommandsLayout-parameter", "VUID-VkCmdReserveSpaceForCommandsInfoNVX-commonparent");
- }
-
- return skip;
-}
-
-bool ObjectLifetimes::PreCallValidateCreateIndirectCommandsLayoutNVX(
- VkDevice device,
- const VkIndirectCommandsLayoutCreateInfoNVX* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkIndirectCommandsLayoutNVX* pIndirectCommandsLayout) {
- bool skip = false;
- skip |= ValidateObject(device, device, kVulkanObjectTypeDevice, false, "VUID-vkCreateIndirectCommandsLayoutNVX-device-parameter", kVUIDUndefined);
-
- return skip;
-}
-
-void ObjectLifetimes::PostCallRecordCreateIndirectCommandsLayoutNVX(
- VkDevice device,
- const VkIndirectCommandsLayoutCreateInfoNVX* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkIndirectCommandsLayoutNVX* pIndirectCommandsLayout,
- VkResult result) {
- if (result != VK_SUCCESS) return;
- CreateObject(device, *pIndirectCommandsLayout, kVulkanObjectTypeIndirectCommandsLayoutNVX, pAllocator);
-
-}
-
-bool ObjectLifetimes::PreCallValidateDestroyIndirectCommandsLayoutNVX(
- VkDevice device,
- VkIndirectCommandsLayoutNVX indirectCommandsLayout,
- const VkAllocationCallbacks* pAllocator) {
- bool skip = false;
- skip |= ValidateObject(device, device, kVulkanObjectTypeDevice, false, "VUID-vkDestroyIndirectCommandsLayoutNVX-device-parameter", kVUIDUndefined);
- skip |= ValidateObject(device, indirectCommandsLayout, kVulkanObjectTypeIndirectCommandsLayoutNVX, false, "VUID-vkDestroyIndirectCommandsLayoutNVX-indirectCommandsLayout-parameter", "VUID-vkDestroyIndirectCommandsLayoutNVX-indirectCommandsLayout-parent");
- skip |= ValidateDestroyObject(device, indirectCommandsLayout, kVulkanObjectTypeIndirectCommandsLayoutNVX, pAllocator, kVUIDUndefined, kVUIDUndefined);
-
- return skip;
-}
-
-void ObjectLifetimes::PreCallRecordDestroyIndirectCommandsLayoutNVX(
- VkDevice device,
- VkIndirectCommandsLayoutNVX indirectCommandsLayout,
- const VkAllocationCallbacks* pAllocator) {
- RecordDestroyObject(device, indirectCommandsLayout, kVulkanObjectTypeIndirectCommandsLayoutNVX);
-
-}
-
-bool ObjectLifetimes::PreCallValidateCreateObjectTableNVX(
- VkDevice device,
- const VkObjectTableCreateInfoNVX* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkObjectTableNVX* pObjectTable) {
- bool skip = false;
- skip |= ValidateObject(device, device, kVulkanObjectTypeDevice, false, "VUID-vkCreateObjectTableNVX-device-parameter", kVUIDUndefined);
-
- return skip;
-}
-
-void ObjectLifetimes::PostCallRecordCreateObjectTableNVX(
- VkDevice device,
- const VkObjectTableCreateInfoNVX* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkObjectTableNVX* pObjectTable,
- VkResult result) {
- if (result != VK_SUCCESS) return;
- CreateObject(device, *pObjectTable, kVulkanObjectTypeObjectTableNVX, pAllocator);
-
-}
-
-bool ObjectLifetimes::PreCallValidateDestroyObjectTableNVX(
- VkDevice device,
- VkObjectTableNVX objectTable,
- const VkAllocationCallbacks* pAllocator) {
- bool skip = false;
- skip |= ValidateObject(device, device, kVulkanObjectTypeDevice, false, "VUID-vkDestroyObjectTableNVX-device-parameter", kVUIDUndefined);
- skip |= ValidateObject(device, objectTable, kVulkanObjectTypeObjectTableNVX, false, "VUID-vkDestroyObjectTableNVX-objectTable-parameter", "VUID-vkDestroyObjectTableNVX-objectTable-parent");
- skip |= ValidateDestroyObject(device, objectTable, kVulkanObjectTypeObjectTableNVX, pAllocator, kVUIDUndefined, kVUIDUndefined);
-
- return skip;
-}
-
-void ObjectLifetimes::PreCallRecordDestroyObjectTableNVX(
- VkDevice device,
- VkObjectTableNVX objectTable,
- const VkAllocationCallbacks* pAllocator) {
- RecordDestroyObject(device, objectTable, kVulkanObjectTypeObjectTableNVX);
-
-}
-
-bool ObjectLifetimes::PreCallValidateRegisterObjectsNVX(
- VkDevice device,
- VkObjectTableNVX objectTable,
- uint32_t objectCount,
- const VkObjectTableEntryNVX* const* ppObjectTableEntries,
- const uint32_t* pObjectIndices) {
- bool skip = false;
- skip |= ValidateObject(device, device, kVulkanObjectTypeDevice, false, "VUID-vkRegisterObjectsNVX-device-parameter", kVUIDUndefined);
- skip |= ValidateObject(device, objectTable, kVulkanObjectTypeObjectTableNVX, false, "VUID-vkRegisterObjectsNVX-objectTable-parameter", "VUID-vkRegisterObjectsNVX-objectTable-parent");
-
- return skip;
-}
-
-bool ObjectLifetimes::PreCallValidateUnregisterObjectsNVX(
- VkDevice device,
- VkObjectTableNVX objectTable,
- uint32_t objectCount,
- const VkObjectEntryTypeNVX* pObjectEntryTypes,
- const uint32_t* pObjectIndices) {
- bool skip = false;
- skip |= ValidateObject(device, device, kVulkanObjectTypeDevice, false, "VUID-vkUnregisterObjectsNVX-device-parameter", kVUIDUndefined);
- skip |= ValidateObject(device, objectTable, kVulkanObjectTypeObjectTableNVX, false, "VUID-vkUnregisterObjectsNVX-objectTable-parameter", "VUID-vkUnregisterObjectsNVX-objectTable-parent");
-
- return skip;
-}
-
-bool ObjectLifetimes::PreCallValidateGetPhysicalDeviceGeneratedCommandsPropertiesNVX(
- VkPhysicalDevice physicalDevice,
- VkDeviceGeneratedCommandsFeaturesNVX* pFeatures,
- VkDeviceGeneratedCommandsLimitsNVX* pLimits) {
- bool skip = false;
- skip |= ValidateObject(physicalDevice, physicalDevice, kVulkanObjectTypePhysicalDevice, false, "VUID-vkGetPhysicalDeviceGeneratedCommandsPropertiesNVX-physicalDevice-parameter", kVUIDUndefined);
-
- return skip;
-}
-
-bool ObjectLifetimes::PreCallValidateCmdSetViewportWScalingNV(
- VkCommandBuffer commandBuffer,
- uint32_t firstViewport,
- uint32_t viewportCount,
- const VkViewportWScalingNV* pViewportWScalings) {
- bool skip = false;
- skip |= ValidateObject(commandBuffer, commandBuffer, kVulkanObjectTypeCommandBuffer, false, "VUID-vkCmdSetViewportWScalingNV-commandBuffer-parameter", kVUIDUndefined);
-
- return skip;
-}
-
-bool ObjectLifetimes::PreCallValidateReleaseDisplayEXT(
- VkPhysicalDevice physicalDevice,
- VkDisplayKHR display) {
- bool skip = false;
- skip |= ValidateObject(physicalDevice, physicalDevice, kVulkanObjectTypePhysicalDevice, false, "VUID-vkReleaseDisplayEXT-physicalDevice-parameter", kVUIDUndefined);
- skip |= ValidateObject(physicalDevice, display, kVulkanObjectTypeDisplayKHR, false, "VUID-vkReleaseDisplayEXT-display-parameter", kVUIDUndefined);
-
- return skip;
-}
-
-#ifdef VK_USE_PLATFORM_XLIB_XRANDR_EXT
-
-bool ObjectLifetimes::PreCallValidateAcquireXlibDisplayEXT(
- VkPhysicalDevice physicalDevice,
- Display* dpy,
- VkDisplayKHR display) {
- bool skip = false;
- skip |= ValidateObject(physicalDevice, physicalDevice, kVulkanObjectTypePhysicalDevice, false, "VUID-vkAcquireXlibDisplayEXT-physicalDevice-parameter", kVUIDUndefined);
- skip |= ValidateObject(physicalDevice, display, kVulkanObjectTypeDisplayKHR, false, "VUID-vkAcquireXlibDisplayEXT-display-parameter", kVUIDUndefined);
-
- return skip;
-}
-#endif // VK_USE_PLATFORM_XLIB_XRANDR_EXT
-
-#ifdef VK_USE_PLATFORM_XLIB_XRANDR_EXT
-
-bool ObjectLifetimes::PreCallValidateGetRandROutputDisplayEXT(
- VkPhysicalDevice physicalDevice,
- Display* dpy,
- RROutput rrOutput,
- VkDisplayKHR* pDisplay) {
- bool skip = false;
- skip |= ValidateObject(physicalDevice, physicalDevice, kVulkanObjectTypePhysicalDevice, false, "VUID-vkGetRandROutputDisplayEXT-physicalDevice-parameter", kVUIDUndefined);
-
- return skip;
-}
-
-void ObjectLifetimes::PostCallRecordGetRandROutputDisplayEXT(
- VkPhysicalDevice physicalDevice,
- Display* dpy,
- RROutput rrOutput,
- VkDisplayKHR* pDisplay,
- VkResult result) {
- if (result != VK_SUCCESS) return;
- CreateObject(physicalDevice, *pDisplay, kVulkanObjectTypeDisplayKHR, nullptr);
-
-}
-#endif // VK_USE_PLATFORM_XLIB_XRANDR_EXT
-
-bool ObjectLifetimes::PreCallValidateGetPhysicalDeviceSurfaceCapabilities2EXT(
- VkPhysicalDevice physicalDevice,
- VkSurfaceKHR surface,
- VkSurfaceCapabilities2EXT* pSurfaceCapabilities) {
- bool skip = false;
- skip |= ValidateObject(physicalDevice, physicalDevice, kVulkanObjectTypePhysicalDevice, false, "VUID-vkGetPhysicalDeviceSurfaceCapabilities2EXT-physicalDevice-parameter", "VUID-vkGetPhysicalDeviceSurfaceCapabilities2EXT-commonparent");
- skip |= ValidateObject(physicalDevice, surface, kVulkanObjectTypeSurfaceKHR, false, "VUID-vkGetPhysicalDeviceSurfaceCapabilities2EXT-surface-parameter", "VUID-vkGetPhysicalDeviceSurfaceCapabilities2EXT-commonparent");
-
- return skip;
-}
-
-bool ObjectLifetimes::PreCallValidateDisplayPowerControlEXT(
- VkDevice device,
- VkDisplayKHR display,
- const VkDisplayPowerInfoEXT* pDisplayPowerInfo) {
- bool skip = false;
- skip |= ValidateObject(device, device, kVulkanObjectTypeDevice, false, "VUID-vkDisplayPowerControlEXT-device-parameter", kVUIDUndefined);
- skip |= ValidateObject(device, display, kVulkanObjectTypeDisplayKHR, false, "VUID-vkDisplayPowerControlEXT-display-parameter", kVUIDUndefined);
-
- return skip;
-}
-
-bool ObjectLifetimes::PreCallValidateRegisterDeviceEventEXT(
- VkDevice device,
- const VkDeviceEventInfoEXT* pDeviceEventInfo,
- const VkAllocationCallbacks* pAllocator,
- VkFence* pFence) {
- bool skip = false;
- skip |= ValidateObject(device, device, kVulkanObjectTypeDevice, false, "VUID-vkRegisterDeviceEventEXT-device-parameter", kVUIDUndefined);
-
- return skip;
-}
-
-void ObjectLifetimes::PostCallRecordRegisterDeviceEventEXT(
- VkDevice device,
- const VkDeviceEventInfoEXT* pDeviceEventInfo,
- const VkAllocationCallbacks* pAllocator,
- VkFence* pFence,
- VkResult result) {
- if (result != VK_SUCCESS) return;
- CreateObject(device, *pFence, kVulkanObjectTypeFence, pAllocator);
-
-}
-
-bool ObjectLifetimes::PreCallValidateRegisterDisplayEventEXT(
- VkDevice device,
- VkDisplayKHR display,
- const VkDisplayEventInfoEXT* pDisplayEventInfo,
- const VkAllocationCallbacks* pAllocator,
- VkFence* pFence) {
- bool skip = false;
- skip |= ValidateObject(device, device, kVulkanObjectTypeDevice, false, "VUID-vkRegisterDisplayEventEXT-device-parameter", kVUIDUndefined);
- skip |= ValidateObject(device, display, kVulkanObjectTypeDisplayKHR, false, "VUID-vkRegisterDisplayEventEXT-display-parameter", kVUIDUndefined);
-
- return skip;
-}
-
-void ObjectLifetimes::PostCallRecordRegisterDisplayEventEXT(
- VkDevice device,
- VkDisplayKHR display,
- const VkDisplayEventInfoEXT* pDisplayEventInfo,
- const VkAllocationCallbacks* pAllocator,
- VkFence* pFence,
- VkResult result) {
- if (result != VK_SUCCESS) return;
- CreateObject(device, *pFence, kVulkanObjectTypeFence, pAllocator);
-
-}
-
-bool ObjectLifetimes::PreCallValidateGetSwapchainCounterEXT(
- VkDevice device,
- VkSwapchainKHR swapchain,
- VkSurfaceCounterFlagBitsEXT counter,
- uint64_t* pCounterValue) {
- bool skip = false;
- skip |= ValidateObject(device, device, kVulkanObjectTypeDevice, false, "VUID-vkGetSwapchainCounterEXT-device-parameter", "VUID-vkGetSwapchainCounterEXT-commonparent");
- skip |= ValidateObject(device, swapchain, kVulkanObjectTypeSwapchainKHR, false, "VUID-vkGetSwapchainCounterEXT-swapchain-parameter", "VUID-vkGetSwapchainCounterEXT-commonparent");
-
- return skip;
-}
-
-bool ObjectLifetimes::PreCallValidateGetRefreshCycleDurationGOOGLE(
- VkDevice device,
- VkSwapchainKHR swapchain,
- VkRefreshCycleDurationGOOGLE* pDisplayTimingProperties) {
- bool skip = false;
- skip |= ValidateObject(device, device, kVulkanObjectTypeDevice, false, "VUID-vkGetRefreshCycleDurationGOOGLE-device-parameter", "VUID-vkGetRefreshCycleDurationGOOGLE-commonparent");
- skip |= ValidateObject(device, swapchain, kVulkanObjectTypeSwapchainKHR, false, "VUID-vkGetRefreshCycleDurationGOOGLE-swapchain-parameter", "VUID-vkGetRefreshCycleDurationGOOGLE-commonparent");
-
- return skip;
-}
-
-bool ObjectLifetimes::PreCallValidateGetPastPresentationTimingGOOGLE(
- VkDevice device,
- VkSwapchainKHR swapchain,
- uint32_t* pPresentationTimingCount,
- VkPastPresentationTimingGOOGLE* pPresentationTimings) {
- bool skip = false;
- skip |= ValidateObject(device, device, kVulkanObjectTypeDevice, false, "VUID-vkGetPastPresentationTimingGOOGLE-device-parameter", "VUID-vkGetPastPresentationTimingGOOGLE-commonparent");
- skip |= ValidateObject(device, swapchain, kVulkanObjectTypeSwapchainKHR, false, "VUID-vkGetPastPresentationTimingGOOGLE-swapchain-parameter", "VUID-vkGetPastPresentationTimingGOOGLE-commonparent");
-
- return skip;
-}
-
-bool ObjectLifetimes::PreCallValidateCmdSetDiscardRectangleEXT(
- VkCommandBuffer commandBuffer,
- uint32_t firstDiscardRectangle,
- uint32_t discardRectangleCount,
- const VkRect2D* pDiscardRectangles) {
- bool skip = false;
- skip |= ValidateObject(commandBuffer, commandBuffer, kVulkanObjectTypeCommandBuffer, false, "VUID-vkCmdSetDiscardRectangleEXT-commandBuffer-parameter", kVUIDUndefined);
-
- return skip;
-}
-
-bool ObjectLifetimes::PreCallValidateSetHdrMetadataEXT(
- VkDevice device,
- uint32_t swapchainCount,
- const VkSwapchainKHR* pSwapchains,
- const VkHdrMetadataEXT* pMetadata) {
- bool skip = false;
- skip |= ValidateObject(device, device, kVulkanObjectTypeDevice, false, "VUID-vkSetHdrMetadataEXT-device-parameter", "VUID-vkSetHdrMetadataEXT-commonparent");
- if (pSwapchains) {
- for (uint32_t index0 = 0; index0 < swapchainCount; ++index0) {
- skip |= ValidateObject(device, pSwapchains[index0], kVulkanObjectTypeSwapchainKHR, false, "VUID-vkSetHdrMetadataEXT-pSwapchains-parameter", "VUID-vkSetHdrMetadataEXT-commonparent");
- }
- }
-
- return skip;
-}
-
-#ifdef VK_USE_PLATFORM_IOS_MVK
-
-bool ObjectLifetimes::PreCallValidateCreateIOSSurfaceMVK(
- VkInstance instance,
- const VkIOSSurfaceCreateInfoMVK* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkSurfaceKHR* pSurface) {
- bool skip = false;
- skip |= ValidateObject(instance, instance, kVulkanObjectTypeInstance, false, "VUID-vkCreateIOSSurfaceMVK-instance-parameter", kVUIDUndefined);
-
- return skip;
-}
-
-void ObjectLifetimes::PostCallRecordCreateIOSSurfaceMVK(
- VkInstance instance,
- const VkIOSSurfaceCreateInfoMVK* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkSurfaceKHR* pSurface,
- VkResult result) {
- if (result != VK_SUCCESS) return;
- CreateObject(instance, *pSurface, kVulkanObjectTypeSurfaceKHR, pAllocator);
-
-}
-#endif // VK_USE_PLATFORM_IOS_MVK
-
-#ifdef VK_USE_PLATFORM_MACOS_MVK
-
-bool ObjectLifetimes::PreCallValidateCreateMacOSSurfaceMVK(
- VkInstance instance,
- const VkMacOSSurfaceCreateInfoMVK* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkSurfaceKHR* pSurface) {
- bool skip = false;
- skip |= ValidateObject(instance, instance, kVulkanObjectTypeInstance, false, "VUID-vkCreateMacOSSurfaceMVK-instance-parameter", kVUIDUndefined);
-
- return skip;
-}
-
-void ObjectLifetimes::PostCallRecordCreateMacOSSurfaceMVK(
- VkInstance instance,
- const VkMacOSSurfaceCreateInfoMVK* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkSurfaceKHR* pSurface,
- VkResult result) {
- if (result != VK_SUCCESS) return;
- CreateObject(instance, *pSurface, kVulkanObjectTypeSurfaceKHR, pAllocator);
-
-}
-#endif // VK_USE_PLATFORM_MACOS_MVK
-
-bool ObjectLifetimes::PreCallValidateSetDebugUtilsObjectNameEXT(
- VkDevice device,
- const VkDebugUtilsObjectNameInfoEXT* pNameInfo) {
- bool skip = false;
- skip |= ValidateObject(device, device, kVulkanObjectTypeDevice, false, "VUID-vkSetDebugUtilsObjectNameEXT-device-parameter", kVUIDUndefined);
-
- return skip;
-}
-
-bool ObjectLifetimes::PreCallValidateSetDebugUtilsObjectTagEXT(
- VkDevice device,
- const VkDebugUtilsObjectTagInfoEXT* pTagInfo) {
- bool skip = false;
- skip |= ValidateObject(device, device, kVulkanObjectTypeDevice, false, "VUID-vkSetDebugUtilsObjectTagEXT-device-parameter", kVUIDUndefined);
-
- return skip;
-}
-
-bool ObjectLifetimes::PreCallValidateQueueBeginDebugUtilsLabelEXT(
- VkQueue queue,
- const VkDebugUtilsLabelEXT* pLabelInfo) {
- bool skip = false;
- skip |= ValidateObject(queue, queue, kVulkanObjectTypeQueue, false, "VUID-vkQueueBeginDebugUtilsLabelEXT-queue-parameter", kVUIDUndefined);
-
- return skip;
-}
-
-bool ObjectLifetimes::PreCallValidateQueueEndDebugUtilsLabelEXT(
- VkQueue queue) {
- bool skip = false;
- skip |= ValidateObject(queue, queue, kVulkanObjectTypeQueue, false, "VUID-vkQueueEndDebugUtilsLabelEXT-queue-parameter", kVUIDUndefined);
-
- return skip;
-}
-
-bool ObjectLifetimes::PreCallValidateQueueInsertDebugUtilsLabelEXT(
- VkQueue queue,
- const VkDebugUtilsLabelEXT* pLabelInfo) {
- bool skip = false;
- skip |= ValidateObject(queue, queue, kVulkanObjectTypeQueue, false, "VUID-vkQueueInsertDebugUtilsLabelEXT-queue-parameter", kVUIDUndefined);
-
- return skip;
-}
-
-bool ObjectLifetimes::PreCallValidateCmdBeginDebugUtilsLabelEXT(
- VkCommandBuffer commandBuffer,
- const VkDebugUtilsLabelEXT* pLabelInfo) {
- bool skip = false;
- skip |= ValidateObject(commandBuffer, commandBuffer, kVulkanObjectTypeCommandBuffer, false, "VUID-vkCmdBeginDebugUtilsLabelEXT-commandBuffer-parameter", kVUIDUndefined);
-
- return skip;
-}
-
-bool ObjectLifetimes::PreCallValidateCmdEndDebugUtilsLabelEXT(
- VkCommandBuffer commandBuffer) {
- bool skip = false;
- skip |= ValidateObject(commandBuffer, commandBuffer, kVulkanObjectTypeCommandBuffer, false, "VUID-vkCmdEndDebugUtilsLabelEXT-commandBuffer-parameter", kVUIDUndefined);
-
- return skip;
-}
-
-bool ObjectLifetimes::PreCallValidateCmdInsertDebugUtilsLabelEXT(
- VkCommandBuffer commandBuffer,
- const VkDebugUtilsLabelEXT* pLabelInfo) {
- bool skip = false;
- skip |= ValidateObject(commandBuffer, commandBuffer, kVulkanObjectTypeCommandBuffer, false, "VUID-vkCmdInsertDebugUtilsLabelEXT-commandBuffer-parameter", kVUIDUndefined);
-
- return skip;
-}
-
-bool ObjectLifetimes::PreCallValidateCreateDebugUtilsMessengerEXT(
- VkInstance instance,
- const VkDebugUtilsMessengerCreateInfoEXT* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkDebugUtilsMessengerEXT* pMessenger) {
- bool skip = false;
- skip |= ValidateObject(instance, instance, kVulkanObjectTypeInstance, false, "VUID-vkCreateDebugUtilsMessengerEXT-instance-parameter", kVUIDUndefined);
-
- return skip;
-}
-
-void ObjectLifetimes::PostCallRecordCreateDebugUtilsMessengerEXT(
- VkInstance instance,
- const VkDebugUtilsMessengerCreateInfoEXT* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkDebugUtilsMessengerEXT* pMessenger,
- VkResult result) {
- if (result != VK_SUCCESS) return;
- CreateObject(instance, *pMessenger, kVulkanObjectTypeDebugUtilsMessengerEXT, pAllocator);
-
-}
-
-bool ObjectLifetimes::PreCallValidateDestroyDebugUtilsMessengerEXT(
- VkInstance instance,
- VkDebugUtilsMessengerEXT messenger,
- const VkAllocationCallbacks* pAllocator) {
- bool skip = false;
- skip |= ValidateObject(instance, instance, kVulkanObjectTypeInstance, false, "VUID-vkDestroyDebugUtilsMessengerEXT-instance-parameter", kVUIDUndefined);
- skip |= ValidateObject(instance, messenger, kVulkanObjectTypeDebugUtilsMessengerEXT, false, "VUID-vkDestroyDebugUtilsMessengerEXT-messenger-parameter", "VUID-vkDestroyDebugUtilsMessengerEXT-messenger-parent");
- skip |= ValidateDestroyObject(instance, messenger, kVulkanObjectTypeDebugUtilsMessengerEXT, pAllocator, kVUIDUndefined, kVUIDUndefined);
-
- return skip;
-}
-
-void ObjectLifetimes::PreCallRecordDestroyDebugUtilsMessengerEXT(
- VkInstance instance,
- VkDebugUtilsMessengerEXT messenger,
- const VkAllocationCallbacks* pAllocator) {
- RecordDestroyObject(instance, messenger, kVulkanObjectTypeDebugUtilsMessengerEXT);
-
-}
-
-bool ObjectLifetimes::PreCallValidateSubmitDebugUtilsMessageEXT(
- VkInstance instance,
- VkDebugUtilsMessageSeverityFlagBitsEXT messageSeverity,
- VkDebugUtilsMessageTypeFlagsEXT messageTypes,
- const VkDebugUtilsMessengerCallbackDataEXT* pCallbackData) {
- bool skip = false;
- skip |= ValidateObject(instance, instance, kVulkanObjectTypeInstance, false, "VUID-vkSubmitDebugUtilsMessageEXT-instance-parameter", kVUIDUndefined);
-
- return skip;
-}
-
-#ifdef VK_USE_PLATFORM_ANDROID_KHR
-
-bool ObjectLifetimes::PreCallValidateGetAndroidHardwareBufferPropertiesANDROID(
- VkDevice device,
- const struct AHardwareBuffer* buffer,
- VkAndroidHardwareBufferPropertiesANDROID* pProperties) {
- bool skip = false;
- skip |= ValidateObject(device, device, kVulkanObjectTypeDevice, false, "VUID-vkGetAndroidHardwareBufferPropertiesANDROID-device-parameter", kVUIDUndefined);
-
- return skip;
-}
-#endif // VK_USE_PLATFORM_ANDROID_KHR
-
-#ifdef VK_USE_PLATFORM_ANDROID_KHR
-
-bool ObjectLifetimes::PreCallValidateGetMemoryAndroidHardwareBufferANDROID(
- VkDevice device,
- const VkMemoryGetAndroidHardwareBufferInfoANDROID* pInfo,
- struct AHardwareBuffer** pBuffer) {
- bool skip = false;
- skip |= ValidateObject(device, device, kVulkanObjectTypeDevice, false, "VUID-vkGetMemoryAndroidHardwareBufferANDROID-device-parameter", kVUIDUndefined);
- if (pInfo) {
- skip |= ValidateObject(device, pInfo->memory, kVulkanObjectTypeDeviceMemory, false, "VUID-VkMemoryGetAndroidHardwareBufferInfoANDROID-memory-parameter", kVUIDUndefined);
- }
-
- return skip;
-}
-#endif // VK_USE_PLATFORM_ANDROID_KHR
-
-bool ObjectLifetimes::PreCallValidateCmdSetSampleLocationsEXT(
- VkCommandBuffer commandBuffer,
- const VkSampleLocationsInfoEXT* pSampleLocationsInfo) {
- bool skip = false;
- skip |= ValidateObject(commandBuffer, commandBuffer, kVulkanObjectTypeCommandBuffer, false, "VUID-vkCmdSetSampleLocationsEXT-commandBuffer-parameter", kVUIDUndefined);
-
- return skip;
-}
-
-bool ObjectLifetimes::PreCallValidateGetPhysicalDeviceMultisamplePropertiesEXT(
- VkPhysicalDevice physicalDevice,
- VkSampleCountFlagBits samples,
- VkMultisamplePropertiesEXT* pMultisampleProperties) {
- bool skip = false;
- skip |= ValidateObject(physicalDevice, physicalDevice, kVulkanObjectTypePhysicalDevice, false, "VUID-vkGetPhysicalDeviceMultisamplePropertiesEXT-physicalDevice-parameter", kVUIDUndefined);
-
- return skip;
-}
-
-bool ObjectLifetimes::PreCallValidateGetImageDrmFormatModifierPropertiesEXT(
- VkDevice device,
- VkImage image,
- VkImageDrmFormatModifierPropertiesEXT* pProperties) {
- bool skip = false;
- skip |= ValidateObject(device, device, kVulkanObjectTypeDevice, false, "VUID-vkGetImageDrmFormatModifierPropertiesEXT-device-parameter", kVUIDUndefined);
- skip |= ValidateObject(device, image, kVulkanObjectTypeImage, false, "VUID-vkGetImageDrmFormatModifierPropertiesEXT-image-parameter", "VUID-vkGetImageDrmFormatModifierPropertiesEXT-image-parent");
-
- return skip;
-}
-
-bool ObjectLifetimes::PreCallValidateCreateValidationCacheEXT(
- VkDevice device,
- const VkValidationCacheCreateInfoEXT* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkValidationCacheEXT* pValidationCache) {
- bool skip = false;
- skip |= ValidateObject(device, device, kVulkanObjectTypeDevice, false, "VUID-vkCreateValidationCacheEXT-device-parameter", kVUIDUndefined);
-
- return skip;
-}
-
-void ObjectLifetimes::PostCallRecordCreateValidationCacheEXT(
- VkDevice device,
- const VkValidationCacheCreateInfoEXT* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkValidationCacheEXT* pValidationCache,
- VkResult result) {
- if (result != VK_SUCCESS) return;
- CreateObject(device, *pValidationCache, kVulkanObjectTypeValidationCacheEXT, pAllocator);
-
-}
-
-bool ObjectLifetimes::PreCallValidateDestroyValidationCacheEXT(
- VkDevice device,
- VkValidationCacheEXT validationCache,
- const VkAllocationCallbacks* pAllocator) {
- bool skip = false;
- skip |= ValidateObject(device, device, kVulkanObjectTypeDevice, false, "VUID-vkDestroyValidationCacheEXT-device-parameter", kVUIDUndefined);
- skip |= ValidateObject(device, validationCache, kVulkanObjectTypeValidationCacheEXT, true, "VUID-vkDestroyValidationCacheEXT-validationCache-parameter", "VUID-vkDestroyValidationCacheEXT-validationCache-parent");
- skip |= ValidateDestroyObject(device, validationCache, kVulkanObjectTypeValidationCacheEXT, pAllocator, kVUIDUndefined, kVUIDUndefined);
-
- return skip;
-}
-
-void ObjectLifetimes::PreCallRecordDestroyValidationCacheEXT(
- VkDevice device,
- VkValidationCacheEXT validationCache,
- const VkAllocationCallbacks* pAllocator) {
- RecordDestroyObject(device, validationCache, kVulkanObjectTypeValidationCacheEXT);
-
-}
-
-bool ObjectLifetimes::PreCallValidateMergeValidationCachesEXT(
- VkDevice device,
- VkValidationCacheEXT dstCache,
- uint32_t srcCacheCount,
- const VkValidationCacheEXT* pSrcCaches) {
- bool skip = false;
- skip |= ValidateObject(device, device, kVulkanObjectTypeDevice, false, "VUID-vkMergeValidationCachesEXT-device-parameter", kVUIDUndefined);
- skip |= ValidateObject(device, dstCache, kVulkanObjectTypeValidationCacheEXT, false, "VUID-vkMergeValidationCachesEXT-dstCache-parameter", "VUID-vkMergeValidationCachesEXT-dstCache-parent");
- if (pSrcCaches) {
- for (uint32_t index0 = 0; index0 < srcCacheCount; ++index0) {
- skip |= ValidateObject(device, pSrcCaches[index0], kVulkanObjectTypeValidationCacheEXT, false, "VUID-vkMergeValidationCachesEXT-pSrcCaches-parameter", "VUID-vkMergeValidationCachesEXT-pSrcCaches-parent");
- }
- }
-
- return skip;
-}
-
-bool ObjectLifetimes::PreCallValidateGetValidationCacheDataEXT(
- VkDevice device,
- VkValidationCacheEXT validationCache,
- size_t* pDataSize,
- void* pData) {
- bool skip = false;
- skip |= ValidateObject(device, device, kVulkanObjectTypeDevice, false, "VUID-vkGetValidationCacheDataEXT-device-parameter", kVUIDUndefined);
- skip |= ValidateObject(device, validationCache, kVulkanObjectTypeValidationCacheEXT, false, "VUID-vkGetValidationCacheDataEXT-validationCache-parameter", "VUID-vkGetValidationCacheDataEXT-validationCache-parent");
-
- return skip;
-}
-
-bool ObjectLifetimes::PreCallValidateCmdBindShadingRateImageNV(
- VkCommandBuffer commandBuffer,
- VkImageView imageView,
- VkImageLayout imageLayout) {
- bool skip = false;
- skip |= ValidateObject(commandBuffer, commandBuffer, kVulkanObjectTypeCommandBuffer, false, "VUID-vkCmdBindShadingRateImageNV-commandBuffer-parameter", "VUID-vkCmdBindShadingRateImageNV-commonparent");
- skip |= ValidateObject(commandBuffer, imageView, kVulkanObjectTypeImageView, true, "VUID-vkCmdBindShadingRateImageNV-imageView-parameter", "VUID-vkCmdBindShadingRateImageNV-commonparent");
-
- return skip;
-}
-
-bool ObjectLifetimes::PreCallValidateCmdSetViewportShadingRatePaletteNV(
- VkCommandBuffer commandBuffer,
- uint32_t firstViewport,
- uint32_t viewportCount,
- const VkShadingRatePaletteNV* pShadingRatePalettes) {
- bool skip = false;
- skip |= ValidateObject(commandBuffer, commandBuffer, kVulkanObjectTypeCommandBuffer, false, "VUID-vkCmdSetViewportShadingRatePaletteNV-commandBuffer-parameter", kVUIDUndefined);
-
- return skip;
-}
-
-bool ObjectLifetimes::PreCallValidateCmdSetCoarseSampleOrderNV(
- VkCommandBuffer commandBuffer,
- VkCoarseSampleOrderTypeNV sampleOrderType,
- uint32_t customSampleOrderCount,
- const VkCoarseSampleOrderCustomNV* pCustomSampleOrders) {
- bool skip = false;
- skip |= ValidateObject(commandBuffer, commandBuffer, kVulkanObjectTypeCommandBuffer, false, "VUID-vkCmdSetCoarseSampleOrderNV-commandBuffer-parameter", kVUIDUndefined);
-
- return skip;
-}
-
-bool ObjectLifetimes::PreCallValidateCreateAccelerationStructureNV(
- VkDevice device,
- const VkAccelerationStructureCreateInfoNV* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkAccelerationStructureNV* pAccelerationStructure) {
- bool skip = false;
- skip |= ValidateObject(device, device, kVulkanObjectTypeDevice, false, "VUID-vkCreateAccelerationStructureNV-device-parameter", kVUIDUndefined);
- if (pCreateInfo) {
- if (pCreateInfo->info.pGeometries) {
- for (uint32_t index2 = 0; index2 < pCreateInfo->info.geometryCount; ++index2) {
- skip |= ValidateObject(device, pCreateInfo->info.pGeometries[index2].geometry.triangles.vertexData, kVulkanObjectTypeBuffer, true, "VUID-VkGeometryTrianglesNV-vertexData-parameter", "VUID-VkGeometryTrianglesNV-commonparent");
- skip |= ValidateObject(device, pCreateInfo->info.pGeometries[index2].geometry.triangles.indexData, kVulkanObjectTypeBuffer, true, "VUID-VkGeometryTrianglesNV-indexData-parameter", "VUID-VkGeometryTrianglesNV-commonparent");
- skip |= ValidateObject(device, pCreateInfo->info.pGeometries[index2].geometry.triangles.transformData, kVulkanObjectTypeBuffer, true, "VUID-VkGeometryTrianglesNV-transformData-parameter", "VUID-VkGeometryTrianglesNV-commonparent");
- skip |= ValidateObject(device, pCreateInfo->info.pGeometries[index2].geometry.aabbs.aabbData, kVulkanObjectTypeBuffer, true, "VUID-VkGeometryAABBNV-aabbData-parameter", kVUIDUndefined);
- }
- }
- }
-
- return skip;
-}
-
-void ObjectLifetimes::PostCallRecordCreateAccelerationStructureNV(
- VkDevice device,
- const VkAccelerationStructureCreateInfoNV* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkAccelerationStructureNV* pAccelerationStructure,
- VkResult result) {
- if (result != VK_SUCCESS) return;
- CreateObject(device, *pAccelerationStructure, kVulkanObjectTypeAccelerationStructureNV, pAllocator);
-
-}
-
-bool ObjectLifetimes::PreCallValidateDestroyAccelerationStructureNV(
- VkDevice device,
- VkAccelerationStructureNV accelerationStructure,
- const VkAllocationCallbacks* pAllocator) {
- bool skip = false;
- skip |= ValidateObject(device, device, kVulkanObjectTypeDevice, false, "VUID-vkDestroyAccelerationStructureNV-device-parameter", kVUIDUndefined);
- skip |= ValidateObject(device, accelerationStructure, kVulkanObjectTypeAccelerationStructureNV, false, "VUID-vkDestroyAccelerationStructureNV-accelerationStructure-parameter", "VUID-vkDestroyAccelerationStructureNV-accelerationStructure-parent");
- skip |= ValidateDestroyObject(device, accelerationStructure, kVulkanObjectTypeAccelerationStructureNV, pAllocator, kVUIDUndefined, kVUIDUndefined);
-
- return skip;
-}
-
-void ObjectLifetimes::PreCallRecordDestroyAccelerationStructureNV(
- VkDevice device,
- VkAccelerationStructureNV accelerationStructure,
- const VkAllocationCallbacks* pAllocator) {
- RecordDestroyObject(device, accelerationStructure, kVulkanObjectTypeAccelerationStructureNV);
-
-}
-
-bool ObjectLifetimes::PreCallValidateGetAccelerationStructureMemoryRequirementsNV(
- VkDevice device,
- const VkAccelerationStructureMemoryRequirementsInfoNV* pInfo,
- VkMemoryRequirements2KHR* pMemoryRequirements) {
- bool skip = false;
- skip |= ValidateObject(device, device, kVulkanObjectTypeDevice, false, "VUID-vkGetAccelerationStructureMemoryRequirementsNV-device-parameter", kVUIDUndefined);
- if (pInfo) {
- skip |= ValidateObject(device, pInfo->accelerationStructure, kVulkanObjectTypeAccelerationStructureNV, false, "VUID-VkAccelerationStructureMemoryRequirementsInfoNV-accelerationStructure-parameter", kVUIDUndefined);
- }
-
- return skip;
-}
-
-bool ObjectLifetimes::PreCallValidateBindAccelerationStructureMemoryNV(
- VkDevice device,
- uint32_t bindInfoCount,
- const VkBindAccelerationStructureMemoryInfoNV* pBindInfos) {
- bool skip = false;
- skip |= ValidateObject(device, device, kVulkanObjectTypeDevice, false, "VUID-vkBindAccelerationStructureMemoryNV-device-parameter", kVUIDUndefined);
- if (pBindInfos) {
- for (uint32_t index0 = 0; index0 < bindInfoCount; ++index0) {
- skip |= ValidateObject(device, pBindInfos[index0].accelerationStructure, kVulkanObjectTypeAccelerationStructureNV, false, "VUID-VkBindAccelerationStructureMemoryInfoNV-accelerationStructure-parameter", "VUID-VkBindAccelerationStructureMemoryInfoNV-commonparent");
- skip |= ValidateObject(device, pBindInfos[index0].memory, kVulkanObjectTypeDeviceMemory, false, "VUID-VkBindAccelerationStructureMemoryInfoNV-memory-parameter", "VUID-VkBindAccelerationStructureMemoryInfoNV-commonparent");
- }
- }
-
- return skip;
-}
-
-bool ObjectLifetimes::PreCallValidateCmdBuildAccelerationStructureNV(
- VkCommandBuffer commandBuffer,
- const VkAccelerationStructureInfoNV* pInfo,
- VkBuffer instanceData,
- VkDeviceSize instanceOffset,
- VkBool32 update,
- VkAccelerationStructureNV dst,
- VkAccelerationStructureNV src,
- VkBuffer scratch,
- VkDeviceSize scratchOffset) {
- bool skip = false;
- skip |= ValidateObject(commandBuffer, commandBuffer, kVulkanObjectTypeCommandBuffer, false, "VUID-vkCmdBuildAccelerationStructureNV-commandBuffer-parameter", "VUID-vkCmdBuildAccelerationStructureNV-commonparent");
- if (pInfo) {
- if (pInfo->pGeometries) {
- for (uint32_t index1 = 0; index1 < pInfo->geometryCount; ++index1) {
- skip |= ValidateObject(commandBuffer, pInfo->pGeometries[index1].geometry.triangles.vertexData, kVulkanObjectTypeBuffer, true, "VUID-VkGeometryTrianglesNV-vertexData-parameter", "VUID-VkGeometryTrianglesNV-commonparent");
- skip |= ValidateObject(commandBuffer, pInfo->pGeometries[index1].geometry.triangles.indexData, kVulkanObjectTypeBuffer, true, "VUID-VkGeometryTrianglesNV-indexData-parameter", "VUID-VkGeometryTrianglesNV-commonparent");
- skip |= ValidateObject(commandBuffer, pInfo->pGeometries[index1].geometry.triangles.transformData, kVulkanObjectTypeBuffer, true, "VUID-VkGeometryTrianglesNV-transformData-parameter", "VUID-VkGeometryTrianglesNV-commonparent");
- skip |= ValidateObject(commandBuffer, pInfo->pGeometries[index1].geometry.aabbs.aabbData, kVulkanObjectTypeBuffer, true, "VUID-VkGeometryAABBNV-aabbData-parameter", kVUIDUndefined);
- }
- }
- }
- skip |= ValidateObject(commandBuffer, instanceData, kVulkanObjectTypeBuffer, true, "VUID-vkCmdBuildAccelerationStructureNV-instanceData-parameter", "VUID-vkCmdBuildAccelerationStructureNV-commonparent");
- skip |= ValidateObject(commandBuffer, dst, kVulkanObjectTypeAccelerationStructureNV, false, "VUID-vkCmdBuildAccelerationStructureNV-dst-parameter", "VUID-vkCmdBuildAccelerationStructureNV-commonparent");
- skip |= ValidateObject(commandBuffer, src, kVulkanObjectTypeAccelerationStructureNV, true, "VUID-vkCmdBuildAccelerationStructureNV-src-parameter", "VUID-vkCmdBuildAccelerationStructureNV-commonparent");
- skip |= ValidateObject(commandBuffer, scratch, kVulkanObjectTypeBuffer, false, "VUID-vkCmdBuildAccelerationStructureNV-scratch-parameter", "VUID-vkCmdBuildAccelerationStructureNV-commonparent");
-
- return skip;
-}
-
-bool ObjectLifetimes::PreCallValidateCmdCopyAccelerationStructureNV(
- VkCommandBuffer commandBuffer,
- VkAccelerationStructureNV dst,
- VkAccelerationStructureNV src,
- VkCopyAccelerationStructureModeNV mode) {
- bool skip = false;
- skip |= ValidateObject(commandBuffer, commandBuffer, kVulkanObjectTypeCommandBuffer, false, "VUID-vkCmdCopyAccelerationStructureNV-commandBuffer-parameter", "VUID-vkCmdCopyAccelerationStructureNV-commonparent");
- skip |= ValidateObject(commandBuffer, dst, kVulkanObjectTypeAccelerationStructureNV, false, "VUID-vkCmdCopyAccelerationStructureNV-dst-parameter", "VUID-vkCmdCopyAccelerationStructureNV-commonparent");
- skip |= ValidateObject(commandBuffer, src, kVulkanObjectTypeAccelerationStructureNV, false, "VUID-vkCmdCopyAccelerationStructureNV-src-parameter", "VUID-vkCmdCopyAccelerationStructureNV-commonparent");
-
- return skip;
-}
-
-bool ObjectLifetimes::PreCallValidateCmdTraceRaysNV(
- VkCommandBuffer commandBuffer,
- VkBuffer raygenShaderBindingTableBuffer,
- VkDeviceSize raygenShaderBindingOffset,
- VkBuffer missShaderBindingTableBuffer,
- VkDeviceSize missShaderBindingOffset,
- VkDeviceSize missShaderBindingStride,
- VkBuffer hitShaderBindingTableBuffer,
- VkDeviceSize hitShaderBindingOffset,
- VkDeviceSize hitShaderBindingStride,
- VkBuffer callableShaderBindingTableBuffer,
- VkDeviceSize callableShaderBindingOffset,
- VkDeviceSize callableShaderBindingStride,
- uint32_t width,
- uint32_t height,
- uint32_t depth) {
- bool skip = false;
- skip |= ValidateObject(commandBuffer, commandBuffer, kVulkanObjectTypeCommandBuffer, false, "VUID-vkCmdTraceRaysNV-commandBuffer-parameter", "VUID-vkCmdTraceRaysNV-commonparent");
- skip |= ValidateObject(commandBuffer, raygenShaderBindingTableBuffer, kVulkanObjectTypeBuffer, false, "VUID-vkCmdTraceRaysNV-raygenShaderBindingTableBuffer-parameter", "VUID-vkCmdTraceRaysNV-commonparent");
- skip |= ValidateObject(commandBuffer, missShaderBindingTableBuffer, kVulkanObjectTypeBuffer, true, "VUID-vkCmdTraceRaysNV-missShaderBindingTableBuffer-parameter", "VUID-vkCmdTraceRaysNV-commonparent");
- skip |= ValidateObject(commandBuffer, hitShaderBindingTableBuffer, kVulkanObjectTypeBuffer, true, "VUID-vkCmdTraceRaysNV-hitShaderBindingTableBuffer-parameter", "VUID-vkCmdTraceRaysNV-commonparent");
- skip |= ValidateObject(commandBuffer, callableShaderBindingTableBuffer, kVulkanObjectTypeBuffer, true, "VUID-vkCmdTraceRaysNV-callableShaderBindingTableBuffer-parameter", "VUID-vkCmdTraceRaysNV-commonparent");
-
- return skip;
-}
-
-bool ObjectLifetimes::PreCallValidateCreateRayTracingPipelinesNV(
- VkDevice device,
- VkPipelineCache pipelineCache,
- uint32_t createInfoCount,
- const VkRayTracingPipelineCreateInfoNV* pCreateInfos,
- const VkAllocationCallbacks* pAllocator,
- VkPipeline* pPipelines) {
- bool skip = false;
- skip |= ValidateObject(device, device, kVulkanObjectTypeDevice, false, "VUID-vkCreateRayTracingPipelinesNV-device-parameter", kVUIDUndefined);
- skip |= ValidateObject(device, pipelineCache, kVulkanObjectTypePipelineCache, true, "VUID-vkCreateRayTracingPipelinesNV-pipelineCache-parameter", "VUID-vkCreateRayTracingPipelinesNV-pipelineCache-parent");
- if (pCreateInfos) {
- for (uint32_t index0 = 0; index0 < createInfoCount; ++index0) {
- if (pCreateInfos[index0].pStages) {
- for (uint32_t index1 = 0; index1 < pCreateInfos[index0].stageCount; ++index1) {
- skip |= ValidateObject(device, pCreateInfos[index0].pStages[index1].module, kVulkanObjectTypeShaderModule, false, "VUID-VkPipelineShaderStageCreateInfo-module-parameter", kVUIDUndefined);
- }
- }
- skip |= ValidateObject(device, pCreateInfos[index0].layout, kVulkanObjectTypePipelineLayout, false, "VUID-VkRayTracingPipelineCreateInfoNV-layout-parameter", "VUID-VkRayTracingPipelineCreateInfoNV-commonparent");
- skip |= ValidateObject(device, pCreateInfos[index0].basePipelineHandle, kVulkanObjectTypePipeline, true, kVUIDUndefined, "VUID-VkRayTracingPipelineCreateInfoNV-commonparent");
- }
- }
-
- return skip;
-}
-
-void ObjectLifetimes::PostCallRecordCreateRayTracingPipelinesNV(
- VkDevice device,
- VkPipelineCache pipelineCache,
- uint32_t createInfoCount,
- const VkRayTracingPipelineCreateInfoNV* pCreateInfos,
- const VkAllocationCallbacks* pAllocator,
- VkPipeline* pPipelines,
- VkResult result) {
- if (VK_ERROR_VALIDATION_FAILED_EXT == result) return;
- if (pPipelines) {
- for (uint32_t index = 0; index < createInfoCount; index++) {
- if (!pPipelines[index]) continue;
- CreateObject(device, pPipelines[index], kVulkanObjectTypePipeline, pAllocator);
- }
- }
-
-}
-
-bool ObjectLifetimes::PreCallValidateGetRayTracingShaderGroupHandlesNV(
- VkDevice device,
- VkPipeline pipeline,
- uint32_t firstGroup,
- uint32_t groupCount,
- size_t dataSize,
- void* pData) {
- bool skip = false;
- skip |= ValidateObject(device, device, kVulkanObjectTypeDevice, false, "VUID-vkGetRayTracingShaderGroupHandlesNV-device-parameter", kVUIDUndefined);
- skip |= ValidateObject(device, pipeline, kVulkanObjectTypePipeline, false, "VUID-vkGetRayTracingShaderGroupHandlesNV-pipeline-parameter", "VUID-vkGetRayTracingShaderGroupHandlesNV-pipeline-parent");
-
- return skip;
-}
-
-bool ObjectLifetimes::PreCallValidateGetAccelerationStructureHandleNV(
- VkDevice device,
- VkAccelerationStructureNV accelerationStructure,
- size_t dataSize,
- void* pData) {
- bool skip = false;
- skip |= ValidateObject(device, device, kVulkanObjectTypeDevice, false, "VUID-vkGetAccelerationStructureHandleNV-device-parameter", kVUIDUndefined);
- skip |= ValidateObject(device, accelerationStructure, kVulkanObjectTypeAccelerationStructureNV, false, "VUID-vkGetAccelerationStructureHandleNV-accelerationStructure-parameter", "VUID-vkGetAccelerationStructureHandleNV-accelerationStructure-parent");
-
- return skip;
-}
-
-bool ObjectLifetimes::PreCallValidateCmdWriteAccelerationStructuresPropertiesNV(
- VkCommandBuffer commandBuffer,
- uint32_t accelerationStructureCount,
- const VkAccelerationStructureNV* pAccelerationStructures,
- VkQueryType queryType,
- VkQueryPool queryPool,
- uint32_t firstQuery) {
- bool skip = false;
- skip |= ValidateObject(commandBuffer, commandBuffer, kVulkanObjectTypeCommandBuffer, false, "VUID-vkCmdWriteAccelerationStructuresPropertiesNV-commandBuffer-parameter", "VUID-vkCmdWriteAccelerationStructuresPropertiesNV-commonparent");
- if (pAccelerationStructures) {
- for (uint32_t index0 = 0; index0 < accelerationStructureCount; ++index0) {
- skip |= ValidateObject(commandBuffer, pAccelerationStructures[index0], kVulkanObjectTypeAccelerationStructureNV, false, "VUID-vkCmdWriteAccelerationStructuresPropertiesNV-pAccelerationStructures-parameter", "VUID-vkCmdWriteAccelerationStructuresPropertiesNV-commonparent");
- }
- }
- skip |= ValidateObject(commandBuffer, queryPool, kVulkanObjectTypeQueryPool, false, "VUID-vkCmdWriteAccelerationStructuresPropertiesNV-queryPool-parameter", "VUID-vkCmdWriteAccelerationStructuresPropertiesNV-commonparent");
-
- return skip;
-}
-
-bool ObjectLifetimes::PreCallValidateCompileDeferredNV(
- VkDevice device,
- VkPipeline pipeline,
- uint32_t shader) {
- bool skip = false;
- skip |= ValidateObject(device, device, kVulkanObjectTypeDevice, false, "VUID-vkCompileDeferredNV-device-parameter", kVUIDUndefined);
- skip |= ValidateObject(device, pipeline, kVulkanObjectTypePipeline, false, "VUID-vkCompileDeferredNV-pipeline-parameter", "VUID-vkCompileDeferredNV-pipeline-parent");
-
- return skip;
-}
-
-bool ObjectLifetimes::PreCallValidateGetMemoryHostPointerPropertiesEXT(
- VkDevice device,
- VkExternalMemoryHandleTypeFlagBits handleType,
- const void* pHostPointer,
- VkMemoryHostPointerPropertiesEXT* pMemoryHostPointerProperties) {
- bool skip = false;
- skip |= ValidateObject(device, device, kVulkanObjectTypeDevice, false, "VUID-vkGetMemoryHostPointerPropertiesEXT-device-parameter", kVUIDUndefined);
-
- return skip;
-}
-
-bool ObjectLifetimes::PreCallValidateCmdWriteBufferMarkerAMD(
- VkCommandBuffer commandBuffer,
- VkPipelineStageFlagBits pipelineStage,
- VkBuffer dstBuffer,
- VkDeviceSize dstOffset,
- uint32_t marker) {
- bool skip = false;
- skip |= ValidateObject(commandBuffer, commandBuffer, kVulkanObjectTypeCommandBuffer, false, "VUID-vkCmdWriteBufferMarkerAMD-commandBuffer-parameter", "VUID-vkCmdWriteBufferMarkerAMD-commonparent");
- skip |= ValidateObject(commandBuffer, dstBuffer, kVulkanObjectTypeBuffer, false, "VUID-vkCmdWriteBufferMarkerAMD-dstBuffer-parameter", "VUID-vkCmdWriteBufferMarkerAMD-commonparent");
-
- return skip;
-}
-
-bool ObjectLifetimes::PreCallValidateGetPhysicalDeviceCalibrateableTimeDomainsEXT(
- VkPhysicalDevice physicalDevice,
- uint32_t* pTimeDomainCount,
- VkTimeDomainEXT* pTimeDomains) {
- bool skip = false;
- skip |= ValidateObject(physicalDevice, physicalDevice, kVulkanObjectTypePhysicalDevice, false, "VUID-vkGetPhysicalDeviceCalibrateableTimeDomainsEXT-physicalDevice-parameter", kVUIDUndefined);
-
- return skip;
-}
-
-bool ObjectLifetimes::PreCallValidateGetCalibratedTimestampsEXT(
- VkDevice device,
- uint32_t timestampCount,
- const VkCalibratedTimestampInfoEXT* pTimestampInfos,
- uint64_t* pTimestamps,
- uint64_t* pMaxDeviation) {
- bool skip = false;
- skip |= ValidateObject(device, device, kVulkanObjectTypeDevice, false, "VUID-vkGetCalibratedTimestampsEXT-device-parameter", kVUIDUndefined);
-
- return skip;
-}
-
-bool ObjectLifetimes::PreCallValidateCmdDrawMeshTasksNV(
- VkCommandBuffer commandBuffer,
- uint32_t taskCount,
- uint32_t firstTask) {
- bool skip = false;
- skip |= ValidateObject(commandBuffer, commandBuffer, kVulkanObjectTypeCommandBuffer, false, "VUID-vkCmdDrawMeshTasksNV-commandBuffer-parameter", kVUIDUndefined);
-
- return skip;
-}
-
-bool ObjectLifetimes::PreCallValidateCmdDrawMeshTasksIndirectNV(
- VkCommandBuffer commandBuffer,
- VkBuffer buffer,
- VkDeviceSize offset,
- uint32_t drawCount,
- uint32_t stride) {
- bool skip = false;
- skip |= ValidateObject(commandBuffer, commandBuffer, kVulkanObjectTypeCommandBuffer, false, "VUID-vkCmdDrawMeshTasksIndirectNV-commandBuffer-parameter", "VUID-vkCmdDrawMeshTasksIndirectNV-commonparent");
- skip |= ValidateObject(commandBuffer, buffer, kVulkanObjectTypeBuffer, false, "VUID-vkCmdDrawMeshTasksIndirectNV-buffer-parameter", "VUID-vkCmdDrawMeshTasksIndirectNV-commonparent");
-
- return skip;
-}
-
-bool ObjectLifetimes::PreCallValidateCmdDrawMeshTasksIndirectCountNV(
- VkCommandBuffer commandBuffer,
- VkBuffer buffer,
- VkDeviceSize offset,
- VkBuffer countBuffer,
- VkDeviceSize countBufferOffset,
- uint32_t maxDrawCount,
- uint32_t stride) {
- bool skip = false;
- skip |= ValidateObject(commandBuffer, commandBuffer, kVulkanObjectTypeCommandBuffer, false, "VUID-vkCmdDrawMeshTasksIndirectCountNV-commandBuffer-parameter", "VUID-vkCmdDrawMeshTasksIndirectCountNV-commonparent");
- skip |= ValidateObject(commandBuffer, buffer, kVulkanObjectTypeBuffer, false, "VUID-vkCmdDrawMeshTasksIndirectCountNV-buffer-parameter", "VUID-vkCmdDrawMeshTasksIndirectCountNV-commonparent");
- skip |= ValidateObject(commandBuffer, countBuffer, kVulkanObjectTypeBuffer, false, "VUID-vkCmdDrawMeshTasksIndirectCountNV-countBuffer-parameter", "VUID-vkCmdDrawMeshTasksIndirectCountNV-commonparent");
-
- return skip;
-}
-
-bool ObjectLifetimes::PreCallValidateCmdSetExclusiveScissorNV(
- VkCommandBuffer commandBuffer,
- uint32_t firstExclusiveScissor,
- uint32_t exclusiveScissorCount,
- const VkRect2D* pExclusiveScissors) {
- bool skip = false;
- skip |= ValidateObject(commandBuffer, commandBuffer, kVulkanObjectTypeCommandBuffer, false, "VUID-vkCmdSetExclusiveScissorNV-commandBuffer-parameter", kVUIDUndefined);
-
- return skip;
-}
-
-bool ObjectLifetimes::PreCallValidateCmdSetCheckpointNV(
- VkCommandBuffer commandBuffer,
- const void* pCheckpointMarker) {
- bool skip = false;
- skip |= ValidateObject(commandBuffer, commandBuffer, kVulkanObjectTypeCommandBuffer, false, "VUID-vkCmdSetCheckpointNV-commandBuffer-parameter", kVUIDUndefined);
-
- return skip;
-}
-
-bool ObjectLifetimes::PreCallValidateGetQueueCheckpointDataNV(
- VkQueue queue,
- uint32_t* pCheckpointDataCount,
- VkCheckpointDataNV* pCheckpointData) {
- bool skip = false;
- skip |= ValidateObject(queue, queue, kVulkanObjectTypeQueue, false, "VUID-vkGetQueueCheckpointDataNV-queue-parameter", kVUIDUndefined);
-
- return skip;
-}
-
-bool ObjectLifetimes::PreCallValidateInitializePerformanceApiINTEL(
- VkDevice device,
- const VkInitializePerformanceApiInfoINTEL* pInitializeInfo) {
- bool skip = false;
- skip |= ValidateObject(device, device, kVulkanObjectTypeDevice, false, "VUID-vkInitializePerformanceApiINTEL-device-parameter", kVUIDUndefined);
-
- return skip;
-}
-
-bool ObjectLifetimes::PreCallValidateUninitializePerformanceApiINTEL(
- VkDevice device) {
- bool skip = false;
- skip |= ValidateObject(device, device, kVulkanObjectTypeDevice, false, "VUID-vkUninitializePerformanceApiINTEL-device-parameter", kVUIDUndefined);
-
- return skip;
-}
-
-bool ObjectLifetimes::PreCallValidateCmdSetPerformanceMarkerINTEL(
- VkCommandBuffer commandBuffer,
- const VkPerformanceMarkerInfoINTEL* pMarkerInfo) {
- bool skip = false;
- skip |= ValidateObject(commandBuffer, commandBuffer, kVulkanObjectTypeCommandBuffer, false, "VUID-vkCmdSetPerformanceMarkerINTEL-commandBuffer-parameter", kVUIDUndefined);
-
- return skip;
-}
-
-bool ObjectLifetimes::PreCallValidateCmdSetPerformanceStreamMarkerINTEL(
- VkCommandBuffer commandBuffer,
- const VkPerformanceStreamMarkerInfoINTEL* pMarkerInfo) {
- bool skip = false;
- skip |= ValidateObject(commandBuffer, commandBuffer, kVulkanObjectTypeCommandBuffer, false, "VUID-vkCmdSetPerformanceStreamMarkerINTEL-commandBuffer-parameter", kVUIDUndefined);
-
- return skip;
-}
-
-bool ObjectLifetimes::PreCallValidateCmdSetPerformanceOverrideINTEL(
- VkCommandBuffer commandBuffer,
- const VkPerformanceOverrideInfoINTEL* pOverrideInfo) {
- bool skip = false;
- skip |= ValidateObject(commandBuffer, commandBuffer, kVulkanObjectTypeCommandBuffer, false, "VUID-vkCmdSetPerformanceOverrideINTEL-commandBuffer-parameter", kVUIDUndefined);
-
- return skip;
-}
-
-bool ObjectLifetimes::PreCallValidateGetPerformanceParameterINTEL(
- VkDevice device,
- VkPerformanceParameterTypeINTEL parameter,
- VkPerformanceValueINTEL* pValue) {
- bool skip = false;
- skip |= ValidateObject(device, device, kVulkanObjectTypeDevice, false, "VUID-vkGetPerformanceParameterINTEL-device-parameter", kVUIDUndefined);
-
- return skip;
-}
-
-bool ObjectLifetimes::PreCallValidateSetLocalDimmingAMD(
- VkDevice device,
- VkSwapchainKHR swapChain,
- VkBool32 localDimmingEnable) {
- bool skip = false;
- skip |= ValidateObject(device, device, kVulkanObjectTypeDevice, false, "VUID-vkSetLocalDimmingAMD-device-parameter", "VUID-vkSetLocalDimmingAMD-commonparent");
- skip |= ValidateObject(device, swapChain, kVulkanObjectTypeSwapchainKHR, false, "VUID-vkSetLocalDimmingAMD-swapChain-parameter", "VUID-vkSetLocalDimmingAMD-commonparent");
-
- return skip;
-}
-
-#ifdef VK_USE_PLATFORM_FUCHSIA
-
-bool ObjectLifetimes::PreCallValidateCreateImagePipeSurfaceFUCHSIA(
- VkInstance instance,
- const VkImagePipeSurfaceCreateInfoFUCHSIA* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkSurfaceKHR* pSurface) {
- bool skip = false;
- skip |= ValidateObject(instance, instance, kVulkanObjectTypeInstance, false, "VUID-vkCreateImagePipeSurfaceFUCHSIA-instance-parameter", kVUIDUndefined);
-
- return skip;
-}
-
-void ObjectLifetimes::PostCallRecordCreateImagePipeSurfaceFUCHSIA(
- VkInstance instance,
- const VkImagePipeSurfaceCreateInfoFUCHSIA* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkSurfaceKHR* pSurface,
- VkResult result) {
- if (result != VK_SUCCESS) return;
- CreateObject(instance, *pSurface, kVulkanObjectTypeSurfaceKHR, pAllocator);
-
-}
-#endif // VK_USE_PLATFORM_FUCHSIA
-
-#ifdef VK_USE_PLATFORM_METAL_EXT
-
-bool ObjectLifetimes::PreCallValidateCreateMetalSurfaceEXT(
- VkInstance instance,
- const VkMetalSurfaceCreateInfoEXT* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkSurfaceKHR* pSurface) {
- bool skip = false;
- skip |= ValidateObject(instance, instance, kVulkanObjectTypeInstance, false, "VUID-vkCreateMetalSurfaceEXT-instance-parameter", kVUIDUndefined);
-
- return skip;
-}
-
-void ObjectLifetimes::PostCallRecordCreateMetalSurfaceEXT(
- VkInstance instance,
- const VkMetalSurfaceCreateInfoEXT* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkSurfaceKHR* pSurface,
- VkResult result) {
- if (result != VK_SUCCESS) return;
- CreateObject(instance, *pSurface, kVulkanObjectTypeSurfaceKHR, pAllocator);
-
-}
-#endif // VK_USE_PLATFORM_METAL_EXT
-
-bool ObjectLifetimes::PreCallValidateGetBufferDeviceAddressEXT(
- VkDevice device,
- const VkBufferDeviceAddressInfoEXT* pInfo) {
- bool skip = false;
- skip |= ValidateObject(device, device, kVulkanObjectTypeDevice, false, "VUID-vkGetBufferDeviceAddressEXT-device-parameter", kVUIDUndefined);
-
- return skip;
-}
-
-bool ObjectLifetimes::PreCallValidateGetPhysicalDeviceCooperativeMatrixPropertiesNV(
- VkPhysicalDevice physicalDevice,
- uint32_t* pPropertyCount,
- VkCooperativeMatrixPropertiesNV* pProperties) {
- bool skip = false;
- skip |= ValidateObject(physicalDevice, physicalDevice, kVulkanObjectTypePhysicalDevice, false, "VUID-vkGetPhysicalDeviceCooperativeMatrixPropertiesNV-physicalDevice-parameter", kVUIDUndefined);
-
- return skip;
-}
-
-bool ObjectLifetimes::PreCallValidateGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV(
- VkPhysicalDevice physicalDevice,
- uint32_t* pCombinationCount,
- VkFramebufferMixedSamplesCombinationNV* pCombinations) {
- bool skip = false;
- skip |= ValidateObject(physicalDevice, physicalDevice, kVulkanObjectTypePhysicalDevice, false, "VUID-vkGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV-physicalDevice-parameter", kVUIDUndefined);
-
- return skip;
-}
-
-#ifdef VK_USE_PLATFORM_WIN32_KHR
-
-bool ObjectLifetimes::PreCallValidateGetPhysicalDeviceSurfacePresentModes2EXT(
- VkPhysicalDevice physicalDevice,
- const VkPhysicalDeviceSurfaceInfo2KHR* pSurfaceInfo,
- uint32_t* pPresentModeCount,
- VkPresentModeKHR* pPresentModes) {
- bool skip = false;
- skip |= ValidateObject(physicalDevice, physicalDevice, kVulkanObjectTypePhysicalDevice, false, "VUID-vkGetPhysicalDeviceSurfacePresentModes2EXT-physicalDevice-parameter", kVUIDUndefined);
- if (pSurfaceInfo) {
- skip |= ValidateObject(physicalDevice, pSurfaceInfo->surface, kVulkanObjectTypeSurfaceKHR, false, "VUID-VkPhysicalDeviceSurfaceInfo2KHR-surface-parameter", kVUIDUndefined);
- }
-
- return skip;
-}
-#endif // VK_USE_PLATFORM_WIN32_KHR
-
-#ifdef VK_USE_PLATFORM_WIN32_KHR
-
-bool ObjectLifetimes::PreCallValidateAcquireFullScreenExclusiveModeEXT(
- VkDevice device,
- VkSwapchainKHR swapchain) {
- bool skip = false;
- skip |= ValidateObject(device, device, kVulkanObjectTypeDevice, false, "VUID-vkAcquireFullScreenExclusiveModeEXT-device-parameter", "VUID-vkAcquireFullScreenExclusiveModeEXT-commonparent");
- skip |= ValidateObject(device, swapchain, kVulkanObjectTypeSwapchainKHR, false, "VUID-vkAcquireFullScreenExclusiveModeEXT-swapchain-parameter", "VUID-vkAcquireFullScreenExclusiveModeEXT-commonparent");
-
- return skip;
-}
-#endif // VK_USE_PLATFORM_WIN32_KHR
-
-#ifdef VK_USE_PLATFORM_WIN32_KHR
-
-bool ObjectLifetimes::PreCallValidateReleaseFullScreenExclusiveModeEXT(
- VkDevice device,
- VkSwapchainKHR swapchain) {
- bool skip = false;
- skip |= ValidateObject(device, device, kVulkanObjectTypeDevice, false, kVUIDUndefined, kVUIDUndefined);
- skip |= ValidateObject(device, swapchain, kVulkanObjectTypeSwapchainKHR, false, kVUIDUndefined, kVUIDUndefined);
-
- return skip;
-}
-#endif // VK_USE_PLATFORM_WIN32_KHR
-
-#ifdef VK_USE_PLATFORM_WIN32_KHR
-
-bool ObjectLifetimes::PreCallValidateGetDeviceGroupSurfacePresentModes2EXT(
- VkDevice device,
- const VkPhysicalDeviceSurfaceInfo2KHR* pSurfaceInfo,
- VkDeviceGroupPresentModeFlagsKHR* pModes) {
- bool skip = false;
- skip |= ValidateObject(device, device, kVulkanObjectTypeDevice, false, "VUID-vkGetDeviceGroupSurfacePresentModes2EXT-device-parameter", kVUIDUndefined);
- if (pSurfaceInfo) {
- skip |= ValidateObject(device, pSurfaceInfo->surface, kVulkanObjectTypeSurfaceKHR, false, "VUID-VkPhysicalDeviceSurfaceInfo2KHR-surface-parameter", kVUIDUndefined);
- }
-
- return skip;
-}
-#endif // VK_USE_PLATFORM_WIN32_KHR
-
-bool ObjectLifetimes::PreCallValidateCreateHeadlessSurfaceEXT(
- VkInstance instance,
- const VkHeadlessSurfaceCreateInfoEXT* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkSurfaceKHR* pSurface) {
- bool skip = false;
- skip |= ValidateObject(instance, instance, kVulkanObjectTypeInstance, false, "VUID-vkCreateHeadlessSurfaceEXT-instance-parameter", kVUIDUndefined);
-
- return skip;
-}
-
-void ObjectLifetimes::PostCallRecordCreateHeadlessSurfaceEXT(
- VkInstance instance,
- const VkHeadlessSurfaceCreateInfoEXT* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkSurfaceKHR* pSurface,
- VkResult result) {
- if (result != VK_SUCCESS) return;
- CreateObject(instance, *pSurface, kVulkanObjectTypeSurfaceKHR, pAllocator);
-
-}
-
-bool ObjectLifetimes::PreCallValidateCmdSetLineStippleEXT(
- VkCommandBuffer commandBuffer,
- uint32_t lineStippleFactor,
- uint16_t lineStipplePattern) {
- bool skip = false;
- skip |= ValidateObject(commandBuffer, commandBuffer, kVulkanObjectTypeCommandBuffer, false, "VUID-vkCmdSetLineStippleEXT-commandBuffer-parameter", kVUIDUndefined);
-
- return skip;
-}
-
-bool ObjectLifetimes::PreCallValidateResetQueryPoolEXT(
- VkDevice device,
- VkQueryPool queryPool,
- uint32_t firstQuery,
- uint32_t queryCount) {
- bool skip = false;
- skip |= ValidateObject(device, device, kVulkanObjectTypeDevice, false, "VUID-vkResetQueryPoolEXT-device-parameter", kVUIDUndefined);
- skip |= ValidateObject(device, queryPool, kVulkanObjectTypeQueryPool, false, "VUID-vkResetQueryPoolEXT-queryPool-parameter", "VUID-vkResetQueryPoolEXT-queryPool-parent");
-
- return skip;
-}
-
-
diff --git a/layers/generated/object_tracker.h b/layers/generated/object_tracker.h
deleted file mode 100644
index 4426a5e5f..000000000
--- a/layers/generated/object_tracker.h
+++ /dev/null
@@ -1,2315 +0,0 @@
-// *** THIS FILE IS GENERATED - DO NOT EDIT ***
-// See object_tracker_generator.py for modifications
-
-
-/***************************************************************************
- *
- * Copyright (c) 2015-2019 The Khronos Group Inc.
- * Copyright (c) 2015-2019 Valve Corporation
- * Copyright (c) 2015-2019 LunarG, Inc.
- * Copyright (c) 2015-2019 Google Inc.
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- *
- * Author: Mark Lobodzinski <mark@lunarg.com>
- * Author: Dave Houlton <daveh@lunarg.com>
- *
- ****************************************************************************/
-
-
-
-
-void PostCallRecordCreateInstance(
- const VkInstanceCreateInfo* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkInstance* pInstance,
- VkResult result);
-bool PreCallValidateDestroyInstance(
- VkInstance instance,
- const VkAllocationCallbacks* pAllocator);
-void PreCallRecordDestroyInstance(
- VkInstance instance,
- const VkAllocationCallbacks* pAllocator);
-bool PreCallValidateEnumeratePhysicalDevices(
- VkInstance instance,
- uint32_t* pPhysicalDeviceCount,
- VkPhysicalDevice* pPhysicalDevices);
-void PostCallRecordEnumeratePhysicalDevices(
- VkInstance instance,
- uint32_t* pPhysicalDeviceCount,
- VkPhysicalDevice* pPhysicalDevices,
- VkResult result);
-bool PreCallValidateGetPhysicalDeviceFeatures(
- VkPhysicalDevice physicalDevice,
- VkPhysicalDeviceFeatures* pFeatures);
-bool PreCallValidateGetPhysicalDeviceFormatProperties(
- VkPhysicalDevice physicalDevice,
- VkFormat format,
- VkFormatProperties* pFormatProperties);
-bool PreCallValidateGetPhysicalDeviceImageFormatProperties(
- VkPhysicalDevice physicalDevice,
- VkFormat format,
- VkImageType type,
- VkImageTiling tiling,
- VkImageUsageFlags usage,
- VkImageCreateFlags flags,
- VkImageFormatProperties* pImageFormatProperties);
-bool PreCallValidateGetPhysicalDeviceProperties(
- VkPhysicalDevice physicalDevice,
- VkPhysicalDeviceProperties* pProperties);
-bool PreCallValidateGetPhysicalDeviceQueueFamilyProperties(
- VkPhysicalDevice physicalDevice,
- uint32_t* pQueueFamilyPropertyCount,
- VkQueueFamilyProperties* pQueueFamilyProperties);
-bool PreCallValidateGetPhysicalDeviceMemoryProperties(
- VkPhysicalDevice physicalDevice,
- VkPhysicalDeviceMemoryProperties* pMemoryProperties);
-bool PreCallValidateGetInstanceProcAddr(
- VkInstance instance,
- const char* pName);
-bool PreCallValidateGetDeviceProcAddr(
- VkDevice device,
- const char* pName);
-bool PreCallValidateCreateDevice(
- VkPhysicalDevice physicalDevice,
- const VkDeviceCreateInfo* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkDevice* pDevice);
-void PostCallRecordCreateDevice(
- VkPhysicalDevice physicalDevice,
- const VkDeviceCreateInfo* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkDevice* pDevice,
- VkResult result);
-bool PreCallValidateDestroyDevice(
- VkDevice device,
- const VkAllocationCallbacks* pAllocator);
-void PreCallRecordDestroyDevice(
- VkDevice device,
- const VkAllocationCallbacks* pAllocator);
-bool PreCallValidateEnumerateDeviceExtensionProperties(
- VkPhysicalDevice physicalDevice,
- const char* pLayerName,
- uint32_t* pPropertyCount,
- VkExtensionProperties* pProperties);
-bool PreCallValidateEnumerateDeviceLayerProperties(
- VkPhysicalDevice physicalDevice,
- uint32_t* pPropertyCount,
- VkLayerProperties* pProperties);
-bool PreCallValidateGetDeviceQueue(
- VkDevice device,
- uint32_t queueFamilyIndex,
- uint32_t queueIndex,
- VkQueue* pQueue);
-void PostCallRecordGetDeviceQueue(
- VkDevice device,
- uint32_t queueFamilyIndex,
- uint32_t queueIndex,
- VkQueue* pQueue);
-bool PreCallValidateQueueSubmit(
- VkQueue queue,
- uint32_t submitCount,
- const VkSubmitInfo* pSubmits,
- VkFence fence);
-bool PreCallValidateQueueWaitIdle(
- VkQueue queue);
-bool PreCallValidateDeviceWaitIdle(
- VkDevice device);
-bool PreCallValidateAllocateMemory(
- VkDevice device,
- const VkMemoryAllocateInfo* pAllocateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkDeviceMemory* pMemory);
-void PostCallRecordAllocateMemory(
- VkDevice device,
- const VkMemoryAllocateInfo* pAllocateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkDeviceMemory* pMemory,
- VkResult result);
-bool PreCallValidateFreeMemory(
- VkDevice device,
- VkDeviceMemory memory,
- const VkAllocationCallbacks* pAllocator);
-void PreCallRecordFreeMemory(
- VkDevice device,
- VkDeviceMemory memory,
- const VkAllocationCallbacks* pAllocator);
-bool PreCallValidateMapMemory(
- VkDevice device,
- VkDeviceMemory memory,
- VkDeviceSize offset,
- VkDeviceSize size,
- VkMemoryMapFlags flags,
- void** ppData);
-bool PreCallValidateUnmapMemory(
- VkDevice device,
- VkDeviceMemory memory);
-bool PreCallValidateFlushMappedMemoryRanges(
- VkDevice device,
- uint32_t memoryRangeCount,
- const VkMappedMemoryRange* pMemoryRanges);
-bool PreCallValidateInvalidateMappedMemoryRanges(
- VkDevice device,
- uint32_t memoryRangeCount,
- const VkMappedMemoryRange* pMemoryRanges);
-bool PreCallValidateGetDeviceMemoryCommitment(
- VkDevice device,
- VkDeviceMemory memory,
- VkDeviceSize* pCommittedMemoryInBytes);
-bool PreCallValidateBindBufferMemory(
- VkDevice device,
- VkBuffer buffer,
- VkDeviceMemory memory,
- VkDeviceSize memoryOffset);
-bool PreCallValidateBindImageMemory(
- VkDevice device,
- VkImage image,
- VkDeviceMemory memory,
- VkDeviceSize memoryOffset);
-bool PreCallValidateGetBufferMemoryRequirements(
- VkDevice device,
- VkBuffer buffer,
- VkMemoryRequirements* pMemoryRequirements);
-bool PreCallValidateGetImageMemoryRequirements(
- VkDevice device,
- VkImage image,
- VkMemoryRequirements* pMemoryRequirements);
-bool PreCallValidateGetImageSparseMemoryRequirements(
- VkDevice device,
- VkImage image,
- uint32_t* pSparseMemoryRequirementCount,
- VkSparseImageMemoryRequirements* pSparseMemoryRequirements);
-bool PreCallValidateGetPhysicalDeviceSparseImageFormatProperties(
- VkPhysicalDevice physicalDevice,
- VkFormat format,
- VkImageType type,
- VkSampleCountFlagBits samples,
- VkImageUsageFlags usage,
- VkImageTiling tiling,
- uint32_t* pPropertyCount,
- VkSparseImageFormatProperties* pProperties);
-bool PreCallValidateQueueBindSparse(
- VkQueue queue,
- uint32_t bindInfoCount,
- const VkBindSparseInfo* pBindInfo,
- VkFence fence);
-bool PreCallValidateCreateFence(
- VkDevice device,
- const VkFenceCreateInfo* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkFence* pFence);
-void PostCallRecordCreateFence(
- VkDevice device,
- const VkFenceCreateInfo* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkFence* pFence,
- VkResult result);
-bool PreCallValidateDestroyFence(
- VkDevice device,
- VkFence fence,
- const VkAllocationCallbacks* pAllocator);
-void PreCallRecordDestroyFence(
- VkDevice device,
- VkFence fence,
- const VkAllocationCallbacks* pAllocator);
-bool PreCallValidateResetFences(
- VkDevice device,
- uint32_t fenceCount,
- const VkFence* pFences);
-bool PreCallValidateGetFenceStatus(
- VkDevice device,
- VkFence fence);
-bool PreCallValidateWaitForFences(
- VkDevice device,
- uint32_t fenceCount,
- const VkFence* pFences,
- VkBool32 waitAll,
- uint64_t timeout);
-bool PreCallValidateCreateSemaphore(
- VkDevice device,
- const VkSemaphoreCreateInfo* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkSemaphore* pSemaphore);
-void PostCallRecordCreateSemaphore(
- VkDevice device,
- const VkSemaphoreCreateInfo* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkSemaphore* pSemaphore,
- VkResult result);
-bool PreCallValidateDestroySemaphore(
- VkDevice device,
- VkSemaphore semaphore,
- const VkAllocationCallbacks* pAllocator);
-void PreCallRecordDestroySemaphore(
- VkDevice device,
- VkSemaphore semaphore,
- const VkAllocationCallbacks* pAllocator);
-bool PreCallValidateCreateEvent(
- VkDevice device,
- const VkEventCreateInfo* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkEvent* pEvent);
-void PostCallRecordCreateEvent(
- VkDevice device,
- const VkEventCreateInfo* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkEvent* pEvent,
- VkResult result);
-bool PreCallValidateDestroyEvent(
- VkDevice device,
- VkEvent event,
- const VkAllocationCallbacks* pAllocator);
-void PreCallRecordDestroyEvent(
- VkDevice device,
- VkEvent event,
- const VkAllocationCallbacks* pAllocator);
-bool PreCallValidateGetEventStatus(
- VkDevice device,
- VkEvent event);
-bool PreCallValidateSetEvent(
- VkDevice device,
- VkEvent event);
-bool PreCallValidateResetEvent(
- VkDevice device,
- VkEvent event);
-bool PreCallValidateCreateQueryPool(
- VkDevice device,
- const VkQueryPoolCreateInfo* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkQueryPool* pQueryPool);
-void PostCallRecordCreateQueryPool(
- VkDevice device,
- const VkQueryPoolCreateInfo* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkQueryPool* pQueryPool,
- VkResult result);
-bool PreCallValidateDestroyQueryPool(
- VkDevice device,
- VkQueryPool queryPool,
- const VkAllocationCallbacks* pAllocator);
-void PreCallRecordDestroyQueryPool(
- VkDevice device,
- VkQueryPool queryPool,
- const VkAllocationCallbacks* pAllocator);
-bool PreCallValidateGetQueryPoolResults(
- VkDevice device,
- VkQueryPool queryPool,
- uint32_t firstQuery,
- uint32_t queryCount,
- size_t dataSize,
- void* pData,
- VkDeviceSize stride,
- VkQueryResultFlags flags);
-bool PreCallValidateCreateBuffer(
- VkDevice device,
- const VkBufferCreateInfo* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkBuffer* pBuffer);
-void PostCallRecordCreateBuffer(
- VkDevice device,
- const VkBufferCreateInfo* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkBuffer* pBuffer,
- VkResult result);
-bool PreCallValidateDestroyBuffer(
- VkDevice device,
- VkBuffer buffer,
- const VkAllocationCallbacks* pAllocator);
-void PreCallRecordDestroyBuffer(
- VkDevice device,
- VkBuffer buffer,
- const VkAllocationCallbacks* pAllocator);
-bool PreCallValidateCreateBufferView(
- VkDevice device,
- const VkBufferViewCreateInfo* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkBufferView* pView);
-void PostCallRecordCreateBufferView(
- VkDevice device,
- const VkBufferViewCreateInfo* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkBufferView* pView,
- VkResult result);
-bool PreCallValidateDestroyBufferView(
- VkDevice device,
- VkBufferView bufferView,
- const VkAllocationCallbacks* pAllocator);
-void PreCallRecordDestroyBufferView(
- VkDevice device,
- VkBufferView bufferView,
- const VkAllocationCallbacks* pAllocator);
-bool PreCallValidateCreateImage(
- VkDevice device,
- const VkImageCreateInfo* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkImage* pImage);
-void PostCallRecordCreateImage(
- VkDevice device,
- const VkImageCreateInfo* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkImage* pImage,
- VkResult result);
-bool PreCallValidateDestroyImage(
- VkDevice device,
- VkImage image,
- const VkAllocationCallbacks* pAllocator);
-void PreCallRecordDestroyImage(
- VkDevice device,
- VkImage image,
- const VkAllocationCallbacks* pAllocator);
-bool PreCallValidateGetImageSubresourceLayout(
- VkDevice device,
- VkImage image,
- const VkImageSubresource* pSubresource,
- VkSubresourceLayout* pLayout);
-bool PreCallValidateCreateImageView(
- VkDevice device,
- const VkImageViewCreateInfo* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkImageView* pView);
-void PostCallRecordCreateImageView(
- VkDevice device,
- const VkImageViewCreateInfo* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkImageView* pView,
- VkResult result);
-bool PreCallValidateDestroyImageView(
- VkDevice device,
- VkImageView imageView,
- const VkAllocationCallbacks* pAllocator);
-void PreCallRecordDestroyImageView(
- VkDevice device,
- VkImageView imageView,
- const VkAllocationCallbacks* pAllocator);
-bool PreCallValidateCreateShaderModule(
- VkDevice device,
- const VkShaderModuleCreateInfo* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkShaderModule* pShaderModule);
-void PostCallRecordCreateShaderModule(
- VkDevice device,
- const VkShaderModuleCreateInfo* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkShaderModule* pShaderModule,
- VkResult result);
-bool PreCallValidateDestroyShaderModule(
- VkDevice device,
- VkShaderModule shaderModule,
- const VkAllocationCallbacks* pAllocator);
-void PreCallRecordDestroyShaderModule(
- VkDevice device,
- VkShaderModule shaderModule,
- const VkAllocationCallbacks* pAllocator);
-bool PreCallValidateCreatePipelineCache(
- VkDevice device,
- const VkPipelineCacheCreateInfo* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkPipelineCache* pPipelineCache);
-void PostCallRecordCreatePipelineCache(
- VkDevice device,
- const VkPipelineCacheCreateInfo* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkPipelineCache* pPipelineCache,
- VkResult result);
-bool PreCallValidateDestroyPipelineCache(
- VkDevice device,
- VkPipelineCache pipelineCache,
- const VkAllocationCallbacks* pAllocator);
-void PreCallRecordDestroyPipelineCache(
- VkDevice device,
- VkPipelineCache pipelineCache,
- const VkAllocationCallbacks* pAllocator);
-bool PreCallValidateGetPipelineCacheData(
- VkDevice device,
- VkPipelineCache pipelineCache,
- size_t* pDataSize,
- void* pData);
-bool PreCallValidateMergePipelineCaches(
- VkDevice device,
- VkPipelineCache dstCache,
- uint32_t srcCacheCount,
- const VkPipelineCache* pSrcCaches);
-bool PreCallValidateCreateGraphicsPipelines(
- VkDevice device,
- VkPipelineCache pipelineCache,
- uint32_t createInfoCount,
- const VkGraphicsPipelineCreateInfo* pCreateInfos,
- const VkAllocationCallbacks* pAllocator,
- VkPipeline* pPipelines);
-void PostCallRecordCreateGraphicsPipelines(
- VkDevice device,
- VkPipelineCache pipelineCache,
- uint32_t createInfoCount,
- const VkGraphicsPipelineCreateInfo* pCreateInfos,
- const VkAllocationCallbacks* pAllocator,
- VkPipeline* pPipelines,
- VkResult result);
-bool PreCallValidateCreateComputePipelines(
- VkDevice device,
- VkPipelineCache pipelineCache,
- uint32_t createInfoCount,
- const VkComputePipelineCreateInfo* pCreateInfos,
- const VkAllocationCallbacks* pAllocator,
- VkPipeline* pPipelines);
-void PostCallRecordCreateComputePipelines(
- VkDevice device,
- VkPipelineCache pipelineCache,
- uint32_t createInfoCount,
- const VkComputePipelineCreateInfo* pCreateInfos,
- const VkAllocationCallbacks* pAllocator,
- VkPipeline* pPipelines,
- VkResult result);
-bool PreCallValidateDestroyPipeline(
- VkDevice device,
- VkPipeline pipeline,
- const VkAllocationCallbacks* pAllocator);
-void PreCallRecordDestroyPipeline(
- VkDevice device,
- VkPipeline pipeline,
- const VkAllocationCallbacks* pAllocator);
-bool PreCallValidateCreatePipelineLayout(
- VkDevice device,
- const VkPipelineLayoutCreateInfo* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkPipelineLayout* pPipelineLayout);
-void PostCallRecordCreatePipelineLayout(
- VkDevice device,
- const VkPipelineLayoutCreateInfo* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkPipelineLayout* pPipelineLayout,
- VkResult result);
-bool PreCallValidateDestroyPipelineLayout(
- VkDevice device,
- VkPipelineLayout pipelineLayout,
- const VkAllocationCallbacks* pAllocator);
-void PreCallRecordDestroyPipelineLayout(
- VkDevice device,
- VkPipelineLayout pipelineLayout,
- const VkAllocationCallbacks* pAllocator);
-bool PreCallValidateCreateSampler(
- VkDevice device,
- const VkSamplerCreateInfo* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkSampler* pSampler);
-void PostCallRecordCreateSampler(
- VkDevice device,
- const VkSamplerCreateInfo* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkSampler* pSampler,
- VkResult result);
-bool PreCallValidateDestroySampler(
- VkDevice device,
- VkSampler sampler,
- const VkAllocationCallbacks* pAllocator);
-void PreCallRecordDestroySampler(
- VkDevice device,
- VkSampler sampler,
- const VkAllocationCallbacks* pAllocator);
-bool PreCallValidateCreateDescriptorSetLayout(
- VkDevice device,
- const VkDescriptorSetLayoutCreateInfo* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkDescriptorSetLayout* pSetLayout);
-void PostCallRecordCreateDescriptorSetLayout(
- VkDevice device,
- const VkDescriptorSetLayoutCreateInfo* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkDescriptorSetLayout* pSetLayout,
- VkResult result);
-bool PreCallValidateDestroyDescriptorSetLayout(
- VkDevice device,
- VkDescriptorSetLayout descriptorSetLayout,
- const VkAllocationCallbacks* pAllocator);
-void PreCallRecordDestroyDescriptorSetLayout(
- VkDevice device,
- VkDescriptorSetLayout descriptorSetLayout,
- const VkAllocationCallbacks* pAllocator);
-bool PreCallValidateCreateDescriptorPool(
- VkDevice device,
- const VkDescriptorPoolCreateInfo* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkDescriptorPool* pDescriptorPool);
-void PostCallRecordCreateDescriptorPool(
- VkDevice device,
- const VkDescriptorPoolCreateInfo* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkDescriptorPool* pDescriptorPool,
- VkResult result);
-bool PreCallValidateDestroyDescriptorPool(
- VkDevice device,
- VkDescriptorPool descriptorPool,
- const VkAllocationCallbacks* pAllocator);
-void PreCallRecordDestroyDescriptorPool(
- VkDevice device,
- VkDescriptorPool descriptorPool,
- const VkAllocationCallbacks* pAllocator);
-bool PreCallValidateResetDescriptorPool(
- VkDevice device,
- VkDescriptorPool descriptorPool,
- VkDescriptorPoolResetFlags flags);
-bool PreCallValidateAllocateDescriptorSets(
- VkDevice device,
- const VkDescriptorSetAllocateInfo* pAllocateInfo,
- VkDescriptorSet* pDescriptorSets);
-void PostCallRecordAllocateDescriptorSets(
- VkDevice device,
- const VkDescriptorSetAllocateInfo* pAllocateInfo,
- VkDescriptorSet* pDescriptorSets,
- VkResult result);
-bool PreCallValidateFreeDescriptorSets(
- VkDevice device,
- VkDescriptorPool descriptorPool,
- uint32_t descriptorSetCount,
- const VkDescriptorSet* pDescriptorSets);
-bool PreCallValidateUpdateDescriptorSets(
- VkDevice device,
- uint32_t descriptorWriteCount,
- const VkWriteDescriptorSet* pDescriptorWrites,
- uint32_t descriptorCopyCount,
- const VkCopyDescriptorSet* pDescriptorCopies);
-bool PreCallValidateCreateFramebuffer(
- VkDevice device,
- const VkFramebufferCreateInfo* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkFramebuffer* pFramebuffer);
-void PostCallRecordCreateFramebuffer(
- VkDevice device,
- const VkFramebufferCreateInfo* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkFramebuffer* pFramebuffer,
- VkResult result);
-bool PreCallValidateDestroyFramebuffer(
- VkDevice device,
- VkFramebuffer framebuffer,
- const VkAllocationCallbacks* pAllocator);
-void PreCallRecordDestroyFramebuffer(
- VkDevice device,
- VkFramebuffer framebuffer,
- const VkAllocationCallbacks* pAllocator);
-bool PreCallValidateCreateRenderPass(
- VkDevice device,
- const VkRenderPassCreateInfo* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkRenderPass* pRenderPass);
-void PostCallRecordCreateRenderPass(
- VkDevice device,
- const VkRenderPassCreateInfo* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkRenderPass* pRenderPass,
- VkResult result);
-bool PreCallValidateDestroyRenderPass(
- VkDevice device,
- VkRenderPass renderPass,
- const VkAllocationCallbacks* pAllocator);
-void PreCallRecordDestroyRenderPass(
- VkDevice device,
- VkRenderPass renderPass,
- const VkAllocationCallbacks* pAllocator);
-bool PreCallValidateGetRenderAreaGranularity(
- VkDevice device,
- VkRenderPass renderPass,
- VkExtent2D* pGranularity);
-bool PreCallValidateCreateCommandPool(
- VkDevice device,
- const VkCommandPoolCreateInfo* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkCommandPool* pCommandPool);
-void PostCallRecordCreateCommandPool(
- VkDevice device,
- const VkCommandPoolCreateInfo* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkCommandPool* pCommandPool,
- VkResult result);
-bool PreCallValidateDestroyCommandPool(
- VkDevice device,
- VkCommandPool commandPool,
- const VkAllocationCallbacks* pAllocator);
-void PreCallRecordDestroyCommandPool(
- VkDevice device,
- VkCommandPool commandPool,
- const VkAllocationCallbacks* pAllocator);
-bool PreCallValidateResetCommandPool(
- VkDevice device,
- VkCommandPool commandPool,
- VkCommandPoolResetFlags flags);
-bool PreCallValidateAllocateCommandBuffers(
- VkDevice device,
- const VkCommandBufferAllocateInfo* pAllocateInfo,
- VkCommandBuffer* pCommandBuffers);
-void PostCallRecordAllocateCommandBuffers(
- VkDevice device,
- const VkCommandBufferAllocateInfo* pAllocateInfo,
- VkCommandBuffer* pCommandBuffers,
- VkResult result);
-bool PreCallValidateFreeCommandBuffers(
- VkDevice device,
- VkCommandPool commandPool,
- uint32_t commandBufferCount,
- const VkCommandBuffer* pCommandBuffers);
-bool PreCallValidateBeginCommandBuffer(
- VkCommandBuffer commandBuffer,
- const VkCommandBufferBeginInfo* pBeginInfo);
-bool PreCallValidateEndCommandBuffer(
- VkCommandBuffer commandBuffer);
-bool PreCallValidateResetCommandBuffer(
- VkCommandBuffer commandBuffer,
- VkCommandBufferResetFlags flags);
-bool PreCallValidateCmdBindPipeline(
- VkCommandBuffer commandBuffer,
- VkPipelineBindPoint pipelineBindPoint,
- VkPipeline pipeline);
-bool PreCallValidateCmdSetViewport(
- VkCommandBuffer commandBuffer,
- uint32_t firstViewport,
- uint32_t viewportCount,
- const VkViewport* pViewports);
-bool PreCallValidateCmdSetScissor(
- VkCommandBuffer commandBuffer,
- uint32_t firstScissor,
- uint32_t scissorCount,
- const VkRect2D* pScissors);
-bool PreCallValidateCmdSetLineWidth(
- VkCommandBuffer commandBuffer,
- float lineWidth);
-bool PreCallValidateCmdSetDepthBias(
- VkCommandBuffer commandBuffer,
- float depthBiasConstantFactor,
- float depthBiasClamp,
- float depthBiasSlopeFactor);
-bool PreCallValidateCmdSetBlendConstants(
- VkCommandBuffer commandBuffer,
- const float blendConstants[4]);
-bool PreCallValidateCmdSetDepthBounds(
- VkCommandBuffer commandBuffer,
- float minDepthBounds,
- float maxDepthBounds);
-bool PreCallValidateCmdSetStencilCompareMask(
- VkCommandBuffer commandBuffer,
- VkStencilFaceFlags faceMask,
- uint32_t compareMask);
-bool PreCallValidateCmdSetStencilWriteMask(
- VkCommandBuffer commandBuffer,
- VkStencilFaceFlags faceMask,
- uint32_t writeMask);
-bool PreCallValidateCmdSetStencilReference(
- VkCommandBuffer commandBuffer,
- VkStencilFaceFlags faceMask,
- uint32_t reference);
-bool PreCallValidateCmdBindDescriptorSets(
- VkCommandBuffer commandBuffer,
- VkPipelineBindPoint pipelineBindPoint,
- VkPipelineLayout layout,
- uint32_t firstSet,
- uint32_t descriptorSetCount,
- const VkDescriptorSet* pDescriptorSets,
- uint32_t dynamicOffsetCount,
- const uint32_t* pDynamicOffsets);
-bool PreCallValidateCmdBindIndexBuffer(
- VkCommandBuffer commandBuffer,
- VkBuffer buffer,
- VkDeviceSize offset,
- VkIndexType indexType);
-bool PreCallValidateCmdBindVertexBuffers(
- VkCommandBuffer commandBuffer,
- uint32_t firstBinding,
- uint32_t bindingCount,
- const VkBuffer* pBuffers,
- const VkDeviceSize* pOffsets);
-bool PreCallValidateCmdDraw(
- VkCommandBuffer commandBuffer,
- uint32_t vertexCount,
- uint32_t instanceCount,
- uint32_t firstVertex,
- uint32_t firstInstance);
-bool PreCallValidateCmdDrawIndexed(
- VkCommandBuffer commandBuffer,
- uint32_t indexCount,
- uint32_t instanceCount,
- uint32_t firstIndex,
- int32_t vertexOffset,
- uint32_t firstInstance);
-bool PreCallValidateCmdDrawIndirect(
- VkCommandBuffer commandBuffer,
- VkBuffer buffer,
- VkDeviceSize offset,
- uint32_t drawCount,
- uint32_t stride);
-bool PreCallValidateCmdDrawIndexedIndirect(
- VkCommandBuffer commandBuffer,
- VkBuffer buffer,
- VkDeviceSize offset,
- uint32_t drawCount,
- uint32_t stride);
-bool PreCallValidateCmdDispatch(
- VkCommandBuffer commandBuffer,
- uint32_t groupCountX,
- uint32_t groupCountY,
- uint32_t groupCountZ);
-bool PreCallValidateCmdDispatchIndirect(
- VkCommandBuffer commandBuffer,
- VkBuffer buffer,
- VkDeviceSize offset);
-bool PreCallValidateCmdCopyBuffer(
- VkCommandBuffer commandBuffer,
- VkBuffer srcBuffer,
- VkBuffer dstBuffer,
- uint32_t regionCount,
- const VkBufferCopy* pRegions);
-bool PreCallValidateCmdCopyImage(
- VkCommandBuffer commandBuffer,
- VkImage srcImage,
- VkImageLayout srcImageLayout,
- VkImage dstImage,
- VkImageLayout dstImageLayout,
- uint32_t regionCount,
- const VkImageCopy* pRegions);
-bool PreCallValidateCmdBlitImage(
- VkCommandBuffer commandBuffer,
- VkImage srcImage,
- VkImageLayout srcImageLayout,
- VkImage dstImage,
- VkImageLayout dstImageLayout,
- uint32_t regionCount,
- const VkImageBlit* pRegions,
- VkFilter filter);
-bool PreCallValidateCmdCopyBufferToImage(
- VkCommandBuffer commandBuffer,
- VkBuffer srcBuffer,
- VkImage dstImage,
- VkImageLayout dstImageLayout,
- uint32_t regionCount,
- const VkBufferImageCopy* pRegions);
-bool PreCallValidateCmdCopyImageToBuffer(
- VkCommandBuffer commandBuffer,
- VkImage srcImage,
- VkImageLayout srcImageLayout,
- VkBuffer dstBuffer,
- uint32_t regionCount,
- const VkBufferImageCopy* pRegions);
-bool PreCallValidateCmdUpdateBuffer(
- VkCommandBuffer commandBuffer,
- VkBuffer dstBuffer,
- VkDeviceSize dstOffset,
- VkDeviceSize dataSize,
- const void* pData);
-bool PreCallValidateCmdFillBuffer(
- VkCommandBuffer commandBuffer,
- VkBuffer dstBuffer,
- VkDeviceSize dstOffset,
- VkDeviceSize size,
- uint32_t data);
-bool PreCallValidateCmdClearColorImage(
- VkCommandBuffer commandBuffer,
- VkImage image,
- VkImageLayout imageLayout,
- const VkClearColorValue* pColor,
- uint32_t rangeCount,
- const VkImageSubresourceRange* pRanges);
-bool PreCallValidateCmdClearDepthStencilImage(
- VkCommandBuffer commandBuffer,
- VkImage image,
- VkImageLayout imageLayout,
- const VkClearDepthStencilValue* pDepthStencil,
- uint32_t rangeCount,
- const VkImageSubresourceRange* pRanges);
-bool PreCallValidateCmdClearAttachments(
- VkCommandBuffer commandBuffer,
- uint32_t attachmentCount,
- const VkClearAttachment* pAttachments,
- uint32_t rectCount,
- const VkClearRect* pRects);
-bool PreCallValidateCmdResolveImage(
- VkCommandBuffer commandBuffer,
- VkImage srcImage,
- VkImageLayout srcImageLayout,
- VkImage dstImage,
- VkImageLayout dstImageLayout,
- uint32_t regionCount,
- const VkImageResolve* pRegions);
-bool PreCallValidateCmdSetEvent(
- VkCommandBuffer commandBuffer,
- VkEvent event,
- VkPipelineStageFlags stageMask);
-bool PreCallValidateCmdResetEvent(
- VkCommandBuffer commandBuffer,
- VkEvent event,
- VkPipelineStageFlags stageMask);
-bool PreCallValidateCmdWaitEvents(
- VkCommandBuffer commandBuffer,
- uint32_t eventCount,
- const VkEvent* pEvents,
- VkPipelineStageFlags srcStageMask,
- VkPipelineStageFlags dstStageMask,
- uint32_t memoryBarrierCount,
- const VkMemoryBarrier* pMemoryBarriers,
- uint32_t bufferMemoryBarrierCount,
- const VkBufferMemoryBarrier* pBufferMemoryBarriers,
- uint32_t imageMemoryBarrierCount,
- const VkImageMemoryBarrier* pImageMemoryBarriers);
-bool PreCallValidateCmdPipelineBarrier(
- VkCommandBuffer commandBuffer,
- VkPipelineStageFlags srcStageMask,
- VkPipelineStageFlags dstStageMask,
- VkDependencyFlags dependencyFlags,
- uint32_t memoryBarrierCount,
- const VkMemoryBarrier* pMemoryBarriers,
- uint32_t bufferMemoryBarrierCount,
- const VkBufferMemoryBarrier* pBufferMemoryBarriers,
- uint32_t imageMemoryBarrierCount,
- const VkImageMemoryBarrier* pImageMemoryBarriers);
-bool PreCallValidateCmdBeginQuery(
- VkCommandBuffer commandBuffer,
- VkQueryPool queryPool,
- uint32_t query,
- VkQueryControlFlags flags);
-bool PreCallValidateCmdEndQuery(
- VkCommandBuffer commandBuffer,
- VkQueryPool queryPool,
- uint32_t query);
-bool PreCallValidateCmdResetQueryPool(
- VkCommandBuffer commandBuffer,
- VkQueryPool queryPool,
- uint32_t firstQuery,
- uint32_t queryCount);
-bool PreCallValidateCmdWriteTimestamp(
- VkCommandBuffer commandBuffer,
- VkPipelineStageFlagBits pipelineStage,
- VkQueryPool queryPool,
- uint32_t query);
-bool PreCallValidateCmdCopyQueryPoolResults(
- VkCommandBuffer commandBuffer,
- VkQueryPool queryPool,
- uint32_t firstQuery,
- uint32_t queryCount,
- VkBuffer dstBuffer,
- VkDeviceSize dstOffset,
- VkDeviceSize stride,
- VkQueryResultFlags flags);
-bool PreCallValidateCmdPushConstants(
- VkCommandBuffer commandBuffer,
- VkPipelineLayout layout,
- VkShaderStageFlags stageFlags,
- uint32_t offset,
- uint32_t size,
- const void* pValues);
-bool PreCallValidateCmdBeginRenderPass(
- VkCommandBuffer commandBuffer,
- const VkRenderPassBeginInfo* pRenderPassBegin,
- VkSubpassContents contents);
-bool PreCallValidateCmdNextSubpass(
- VkCommandBuffer commandBuffer,
- VkSubpassContents contents);
-bool PreCallValidateCmdEndRenderPass(
- VkCommandBuffer commandBuffer);
-bool PreCallValidateCmdExecuteCommands(
- VkCommandBuffer commandBuffer,
- uint32_t commandBufferCount,
- const VkCommandBuffer* pCommandBuffers);
-bool PreCallValidateBindBufferMemory2(
- VkDevice device,
- uint32_t bindInfoCount,
- const VkBindBufferMemoryInfo* pBindInfos);
-bool PreCallValidateBindImageMemory2(
- VkDevice device,
- uint32_t bindInfoCount,
- const VkBindImageMemoryInfo* pBindInfos);
-bool PreCallValidateGetDeviceGroupPeerMemoryFeatures(
- VkDevice device,
- uint32_t heapIndex,
- uint32_t localDeviceIndex,
- uint32_t remoteDeviceIndex,
- VkPeerMemoryFeatureFlags* pPeerMemoryFeatures);
-bool PreCallValidateCmdSetDeviceMask(
- VkCommandBuffer commandBuffer,
- uint32_t deviceMask);
-bool PreCallValidateCmdDispatchBase(
- VkCommandBuffer commandBuffer,
- uint32_t baseGroupX,
- uint32_t baseGroupY,
- uint32_t baseGroupZ,
- uint32_t groupCountX,
- uint32_t groupCountY,
- uint32_t groupCountZ);
-bool PreCallValidateEnumeratePhysicalDeviceGroups(
- VkInstance instance,
- uint32_t* pPhysicalDeviceGroupCount,
- VkPhysicalDeviceGroupProperties* pPhysicalDeviceGroupProperties);
-bool PreCallValidateGetImageMemoryRequirements2(
- VkDevice device,
- const VkImageMemoryRequirementsInfo2* pInfo,
- VkMemoryRequirements2* pMemoryRequirements);
-bool PreCallValidateGetBufferMemoryRequirements2(
- VkDevice device,
- const VkBufferMemoryRequirementsInfo2* pInfo,
- VkMemoryRequirements2* pMemoryRequirements);
-bool PreCallValidateGetImageSparseMemoryRequirements2(
- VkDevice device,
- const VkImageSparseMemoryRequirementsInfo2* pInfo,
- uint32_t* pSparseMemoryRequirementCount,
- VkSparseImageMemoryRequirements2* pSparseMemoryRequirements);
-bool PreCallValidateGetPhysicalDeviceFeatures2(
- VkPhysicalDevice physicalDevice,
- VkPhysicalDeviceFeatures2* pFeatures);
-bool PreCallValidateGetPhysicalDeviceProperties2(
- VkPhysicalDevice physicalDevice,
- VkPhysicalDeviceProperties2* pProperties);
-bool PreCallValidateGetPhysicalDeviceFormatProperties2(
- VkPhysicalDevice physicalDevice,
- VkFormat format,
- VkFormatProperties2* pFormatProperties);
-bool PreCallValidateGetPhysicalDeviceImageFormatProperties2(
- VkPhysicalDevice physicalDevice,
- const VkPhysicalDeviceImageFormatInfo2* pImageFormatInfo,
- VkImageFormatProperties2* pImageFormatProperties);
-bool PreCallValidateGetPhysicalDeviceQueueFamilyProperties2(
- VkPhysicalDevice physicalDevice,
- uint32_t* pQueueFamilyPropertyCount,
- VkQueueFamilyProperties2* pQueueFamilyProperties);
-bool PreCallValidateGetPhysicalDeviceMemoryProperties2(
- VkPhysicalDevice physicalDevice,
- VkPhysicalDeviceMemoryProperties2* pMemoryProperties);
-bool PreCallValidateGetPhysicalDeviceSparseImageFormatProperties2(
- VkPhysicalDevice physicalDevice,
- const VkPhysicalDeviceSparseImageFormatInfo2* pFormatInfo,
- uint32_t* pPropertyCount,
- VkSparseImageFormatProperties2* pProperties);
-bool PreCallValidateTrimCommandPool(
- VkDevice device,
- VkCommandPool commandPool,
- VkCommandPoolTrimFlags flags);
-bool PreCallValidateGetDeviceQueue2(
- VkDevice device,
- const VkDeviceQueueInfo2* pQueueInfo,
- VkQueue* pQueue);
-void PostCallRecordGetDeviceQueue2(
- VkDevice device,
- const VkDeviceQueueInfo2* pQueueInfo,
- VkQueue* pQueue);
-bool PreCallValidateCreateSamplerYcbcrConversion(
- VkDevice device,
- const VkSamplerYcbcrConversionCreateInfo* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkSamplerYcbcrConversion* pYcbcrConversion);
-void PostCallRecordCreateSamplerYcbcrConversion(
- VkDevice device,
- const VkSamplerYcbcrConversionCreateInfo* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkSamplerYcbcrConversion* pYcbcrConversion,
- VkResult result);
-bool PreCallValidateDestroySamplerYcbcrConversion(
- VkDevice device,
- VkSamplerYcbcrConversion ycbcrConversion,
- const VkAllocationCallbacks* pAllocator);
-void PreCallRecordDestroySamplerYcbcrConversion(
- VkDevice device,
- VkSamplerYcbcrConversion ycbcrConversion,
- const VkAllocationCallbacks* pAllocator);
-bool PreCallValidateCreateDescriptorUpdateTemplate(
- VkDevice device,
- const VkDescriptorUpdateTemplateCreateInfo* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkDescriptorUpdateTemplate* pDescriptorUpdateTemplate);
-void PostCallRecordCreateDescriptorUpdateTemplate(
- VkDevice device,
- const VkDescriptorUpdateTemplateCreateInfo* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkDescriptorUpdateTemplate* pDescriptorUpdateTemplate,
- VkResult result);
-bool PreCallValidateDestroyDescriptorUpdateTemplate(
- VkDevice device,
- VkDescriptorUpdateTemplate descriptorUpdateTemplate,
- const VkAllocationCallbacks* pAllocator);
-void PreCallRecordDestroyDescriptorUpdateTemplate(
- VkDevice device,
- VkDescriptorUpdateTemplate descriptorUpdateTemplate,
- const VkAllocationCallbacks* pAllocator);
-bool PreCallValidateUpdateDescriptorSetWithTemplate(
- VkDevice device,
- VkDescriptorSet descriptorSet,
- VkDescriptorUpdateTemplate descriptorUpdateTemplate,
- const void* pData);
-bool PreCallValidateGetPhysicalDeviceExternalBufferProperties(
- VkPhysicalDevice physicalDevice,
- const VkPhysicalDeviceExternalBufferInfo* pExternalBufferInfo,
- VkExternalBufferProperties* pExternalBufferProperties);
-bool PreCallValidateGetPhysicalDeviceExternalFenceProperties(
- VkPhysicalDevice physicalDevice,
- const VkPhysicalDeviceExternalFenceInfo* pExternalFenceInfo,
- VkExternalFenceProperties* pExternalFenceProperties);
-bool PreCallValidateGetPhysicalDeviceExternalSemaphoreProperties(
- VkPhysicalDevice physicalDevice,
- const VkPhysicalDeviceExternalSemaphoreInfo* pExternalSemaphoreInfo,
- VkExternalSemaphoreProperties* pExternalSemaphoreProperties);
-bool PreCallValidateGetDescriptorSetLayoutSupport(
- VkDevice device,
- const VkDescriptorSetLayoutCreateInfo* pCreateInfo,
- VkDescriptorSetLayoutSupport* pSupport);
-bool PreCallValidateDestroySurfaceKHR(
- VkInstance instance,
- VkSurfaceKHR surface,
- const VkAllocationCallbacks* pAllocator);
-void PreCallRecordDestroySurfaceKHR(
- VkInstance instance,
- VkSurfaceKHR surface,
- const VkAllocationCallbacks* pAllocator);
-bool PreCallValidateGetPhysicalDeviceSurfaceSupportKHR(
- VkPhysicalDevice physicalDevice,
- uint32_t queueFamilyIndex,
- VkSurfaceKHR surface,
- VkBool32* pSupported);
-bool PreCallValidateGetPhysicalDeviceSurfaceCapabilitiesKHR(
- VkPhysicalDevice physicalDevice,
- VkSurfaceKHR surface,
- VkSurfaceCapabilitiesKHR* pSurfaceCapabilities);
-bool PreCallValidateGetPhysicalDeviceSurfaceFormatsKHR(
- VkPhysicalDevice physicalDevice,
- VkSurfaceKHR surface,
- uint32_t* pSurfaceFormatCount,
- VkSurfaceFormatKHR* pSurfaceFormats);
-bool PreCallValidateGetPhysicalDeviceSurfacePresentModesKHR(
- VkPhysicalDevice physicalDevice,
- VkSurfaceKHR surface,
- uint32_t* pPresentModeCount,
- VkPresentModeKHR* pPresentModes);
-bool PreCallValidateCreateSwapchainKHR(
- VkDevice device,
- const VkSwapchainCreateInfoKHR* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkSwapchainKHR* pSwapchain);
-void PostCallRecordCreateSwapchainKHR(
- VkDevice device,
- const VkSwapchainCreateInfoKHR* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkSwapchainKHR* pSwapchain,
- VkResult result);
-bool PreCallValidateDestroySwapchainKHR(
- VkDevice device,
- VkSwapchainKHR swapchain,
- const VkAllocationCallbacks* pAllocator);
-void PreCallRecordDestroySwapchainKHR(
- VkDevice device,
- VkSwapchainKHR swapchain,
- const VkAllocationCallbacks* pAllocator);
-bool PreCallValidateGetSwapchainImagesKHR(
- VkDevice device,
- VkSwapchainKHR swapchain,
- uint32_t* pSwapchainImageCount,
- VkImage* pSwapchainImages);
-void PostCallRecordGetSwapchainImagesKHR(
- VkDevice device,
- VkSwapchainKHR swapchain,
- uint32_t* pSwapchainImageCount,
- VkImage* pSwapchainImages,
- VkResult result);
-bool PreCallValidateAcquireNextImageKHR(
- VkDevice device,
- VkSwapchainKHR swapchain,
- uint64_t timeout,
- VkSemaphore semaphore,
- VkFence fence,
- uint32_t* pImageIndex);
-bool PreCallValidateQueuePresentKHR(
- VkQueue queue,
- const VkPresentInfoKHR* pPresentInfo);
-bool PreCallValidateGetDeviceGroupPresentCapabilitiesKHR(
- VkDevice device,
- VkDeviceGroupPresentCapabilitiesKHR* pDeviceGroupPresentCapabilities);
-bool PreCallValidateGetDeviceGroupSurfacePresentModesKHR(
- VkDevice device,
- VkSurfaceKHR surface,
- VkDeviceGroupPresentModeFlagsKHR* pModes);
-bool PreCallValidateGetPhysicalDevicePresentRectanglesKHR(
- VkPhysicalDevice physicalDevice,
- VkSurfaceKHR surface,
- uint32_t* pRectCount,
- VkRect2D* pRects);
-bool PreCallValidateAcquireNextImage2KHR(
- VkDevice device,
- const VkAcquireNextImageInfoKHR* pAcquireInfo,
- uint32_t* pImageIndex);
-bool PreCallValidateGetPhysicalDeviceDisplayPropertiesKHR(
- VkPhysicalDevice physicalDevice,
- uint32_t* pPropertyCount,
- VkDisplayPropertiesKHR* pProperties);
-bool PreCallValidateGetPhysicalDeviceDisplayPlanePropertiesKHR(
- VkPhysicalDevice physicalDevice,
- uint32_t* pPropertyCount,
- VkDisplayPlanePropertiesKHR* pProperties);
-bool PreCallValidateGetDisplayPlaneSupportedDisplaysKHR(
- VkPhysicalDevice physicalDevice,
- uint32_t planeIndex,
- uint32_t* pDisplayCount,
- VkDisplayKHR* pDisplays);
-void PostCallRecordGetDisplayPlaneSupportedDisplaysKHR(
- VkPhysicalDevice physicalDevice,
- uint32_t planeIndex,
- uint32_t* pDisplayCount,
- VkDisplayKHR* pDisplays,
- VkResult result);
-bool PreCallValidateGetDisplayModePropertiesKHR(
- VkPhysicalDevice physicalDevice,
- VkDisplayKHR display,
- uint32_t* pPropertyCount,
- VkDisplayModePropertiesKHR* pProperties);
-bool PreCallValidateCreateDisplayModeKHR(
- VkPhysicalDevice physicalDevice,
- VkDisplayKHR display,
- const VkDisplayModeCreateInfoKHR* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkDisplayModeKHR* pMode);
-void PostCallRecordCreateDisplayModeKHR(
- VkPhysicalDevice physicalDevice,
- VkDisplayKHR display,
- const VkDisplayModeCreateInfoKHR* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkDisplayModeKHR* pMode,
- VkResult result);
-bool PreCallValidateGetDisplayPlaneCapabilitiesKHR(
- VkPhysicalDevice physicalDevice,
- VkDisplayModeKHR mode,
- uint32_t planeIndex,
- VkDisplayPlaneCapabilitiesKHR* pCapabilities);
-bool PreCallValidateCreateDisplayPlaneSurfaceKHR(
- VkInstance instance,
- const VkDisplaySurfaceCreateInfoKHR* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkSurfaceKHR* pSurface);
-void PostCallRecordCreateDisplayPlaneSurfaceKHR(
- VkInstance instance,
- const VkDisplaySurfaceCreateInfoKHR* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkSurfaceKHR* pSurface,
- VkResult result);
-bool PreCallValidateCreateSharedSwapchainsKHR(
- VkDevice device,
- uint32_t swapchainCount,
- const VkSwapchainCreateInfoKHR* pCreateInfos,
- const VkAllocationCallbacks* pAllocator,
- VkSwapchainKHR* pSwapchains);
-void PostCallRecordCreateSharedSwapchainsKHR(
- VkDevice device,
- uint32_t swapchainCount,
- const VkSwapchainCreateInfoKHR* pCreateInfos,
- const VkAllocationCallbacks* pAllocator,
- VkSwapchainKHR* pSwapchains,
- VkResult result);
-
-#ifdef VK_USE_PLATFORM_XLIB_KHR
-bool PreCallValidateCreateXlibSurfaceKHR(
- VkInstance instance,
- const VkXlibSurfaceCreateInfoKHR* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkSurfaceKHR* pSurface);
-void PostCallRecordCreateXlibSurfaceKHR(
- VkInstance instance,
- const VkXlibSurfaceCreateInfoKHR* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkSurfaceKHR* pSurface,
- VkResult result);
-#endif // VK_USE_PLATFORM_XLIB_KHR
-
-#ifdef VK_USE_PLATFORM_XLIB_KHR
-bool PreCallValidateGetPhysicalDeviceXlibPresentationSupportKHR(
- VkPhysicalDevice physicalDevice,
- uint32_t queueFamilyIndex,
- Display* dpy,
- VisualID visualID);
-#endif // VK_USE_PLATFORM_XLIB_KHR
-
-#ifdef VK_USE_PLATFORM_XCB_KHR
-bool PreCallValidateCreateXcbSurfaceKHR(
- VkInstance instance,
- const VkXcbSurfaceCreateInfoKHR* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkSurfaceKHR* pSurface);
-void PostCallRecordCreateXcbSurfaceKHR(
- VkInstance instance,
- const VkXcbSurfaceCreateInfoKHR* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkSurfaceKHR* pSurface,
- VkResult result);
-#endif // VK_USE_PLATFORM_XCB_KHR
-
-#ifdef VK_USE_PLATFORM_XCB_KHR
-bool PreCallValidateGetPhysicalDeviceXcbPresentationSupportKHR(
- VkPhysicalDevice physicalDevice,
- uint32_t queueFamilyIndex,
- xcb_connection_t* connection,
- xcb_visualid_t visual_id);
-#endif // VK_USE_PLATFORM_XCB_KHR
-
-#ifdef VK_USE_PLATFORM_WAYLAND_KHR
-bool PreCallValidateCreateWaylandSurfaceKHR(
- VkInstance instance,
- const VkWaylandSurfaceCreateInfoKHR* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkSurfaceKHR* pSurface);
-void PostCallRecordCreateWaylandSurfaceKHR(
- VkInstance instance,
- const VkWaylandSurfaceCreateInfoKHR* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkSurfaceKHR* pSurface,
- VkResult result);
-#endif // VK_USE_PLATFORM_WAYLAND_KHR
-
-#ifdef VK_USE_PLATFORM_WAYLAND_KHR
-bool PreCallValidateGetPhysicalDeviceWaylandPresentationSupportKHR(
- VkPhysicalDevice physicalDevice,
- uint32_t queueFamilyIndex,
- struct wl_display* display);
-#endif // VK_USE_PLATFORM_WAYLAND_KHR
-
-#ifdef VK_USE_PLATFORM_ANDROID_KHR
-bool PreCallValidateCreateAndroidSurfaceKHR(
- VkInstance instance,
- const VkAndroidSurfaceCreateInfoKHR* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkSurfaceKHR* pSurface);
-void PostCallRecordCreateAndroidSurfaceKHR(
- VkInstance instance,
- const VkAndroidSurfaceCreateInfoKHR* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkSurfaceKHR* pSurface,
- VkResult result);
-#endif // VK_USE_PLATFORM_ANDROID_KHR
-
-#ifdef VK_USE_PLATFORM_WIN32_KHR
-bool PreCallValidateCreateWin32SurfaceKHR(
- VkInstance instance,
- const VkWin32SurfaceCreateInfoKHR* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkSurfaceKHR* pSurface);
-void PostCallRecordCreateWin32SurfaceKHR(
- VkInstance instance,
- const VkWin32SurfaceCreateInfoKHR* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkSurfaceKHR* pSurface,
- VkResult result);
-#endif // VK_USE_PLATFORM_WIN32_KHR
-
-#ifdef VK_USE_PLATFORM_WIN32_KHR
-bool PreCallValidateGetPhysicalDeviceWin32PresentationSupportKHR(
- VkPhysicalDevice physicalDevice,
- uint32_t queueFamilyIndex);
-#endif // VK_USE_PLATFORM_WIN32_KHR
-bool PreCallValidateGetPhysicalDeviceFeatures2KHR(
- VkPhysicalDevice physicalDevice,
- VkPhysicalDeviceFeatures2* pFeatures);
-bool PreCallValidateGetPhysicalDeviceProperties2KHR(
- VkPhysicalDevice physicalDevice,
- VkPhysicalDeviceProperties2* pProperties);
-bool PreCallValidateGetPhysicalDeviceFormatProperties2KHR(
- VkPhysicalDevice physicalDevice,
- VkFormat format,
- VkFormatProperties2* pFormatProperties);
-bool PreCallValidateGetPhysicalDeviceImageFormatProperties2KHR(
- VkPhysicalDevice physicalDevice,
- const VkPhysicalDeviceImageFormatInfo2* pImageFormatInfo,
- VkImageFormatProperties2* pImageFormatProperties);
-bool PreCallValidateGetPhysicalDeviceQueueFamilyProperties2KHR(
- VkPhysicalDevice physicalDevice,
- uint32_t* pQueueFamilyPropertyCount,
- VkQueueFamilyProperties2* pQueueFamilyProperties);
-bool PreCallValidateGetPhysicalDeviceMemoryProperties2KHR(
- VkPhysicalDevice physicalDevice,
- VkPhysicalDeviceMemoryProperties2* pMemoryProperties);
-bool PreCallValidateGetPhysicalDeviceSparseImageFormatProperties2KHR(
- VkPhysicalDevice physicalDevice,
- const VkPhysicalDeviceSparseImageFormatInfo2* pFormatInfo,
- uint32_t* pPropertyCount,
- VkSparseImageFormatProperties2* pProperties);
-bool PreCallValidateGetDeviceGroupPeerMemoryFeaturesKHR(
- VkDevice device,
- uint32_t heapIndex,
- uint32_t localDeviceIndex,
- uint32_t remoteDeviceIndex,
- VkPeerMemoryFeatureFlags* pPeerMemoryFeatures);
-bool PreCallValidateCmdSetDeviceMaskKHR(
- VkCommandBuffer commandBuffer,
- uint32_t deviceMask);
-bool PreCallValidateCmdDispatchBaseKHR(
- VkCommandBuffer commandBuffer,
- uint32_t baseGroupX,
- uint32_t baseGroupY,
- uint32_t baseGroupZ,
- uint32_t groupCountX,
- uint32_t groupCountY,
- uint32_t groupCountZ);
-bool PreCallValidateTrimCommandPoolKHR(
- VkDevice device,
- VkCommandPool commandPool,
- VkCommandPoolTrimFlags flags);
-bool PreCallValidateEnumeratePhysicalDeviceGroupsKHR(
- VkInstance instance,
- uint32_t* pPhysicalDeviceGroupCount,
- VkPhysicalDeviceGroupProperties* pPhysicalDeviceGroupProperties);
-bool PreCallValidateGetPhysicalDeviceExternalBufferPropertiesKHR(
- VkPhysicalDevice physicalDevice,
- const VkPhysicalDeviceExternalBufferInfo* pExternalBufferInfo,
- VkExternalBufferProperties* pExternalBufferProperties);
-
-#ifdef VK_USE_PLATFORM_WIN32_KHR
-bool PreCallValidateGetMemoryWin32HandleKHR(
- VkDevice device,
- const VkMemoryGetWin32HandleInfoKHR* pGetWin32HandleInfo,
- HANDLE* pHandle);
-#endif // VK_USE_PLATFORM_WIN32_KHR
-
-#ifdef VK_USE_PLATFORM_WIN32_KHR
-bool PreCallValidateGetMemoryWin32HandlePropertiesKHR(
- VkDevice device,
- VkExternalMemoryHandleTypeFlagBits handleType,
- HANDLE handle,
- VkMemoryWin32HandlePropertiesKHR* pMemoryWin32HandleProperties);
-#endif // VK_USE_PLATFORM_WIN32_KHR
-bool PreCallValidateGetMemoryFdKHR(
- VkDevice device,
- const VkMemoryGetFdInfoKHR* pGetFdInfo,
- int* pFd);
-bool PreCallValidateGetMemoryFdPropertiesKHR(
- VkDevice device,
- VkExternalMemoryHandleTypeFlagBits handleType,
- int fd,
- VkMemoryFdPropertiesKHR* pMemoryFdProperties);
-bool PreCallValidateGetPhysicalDeviceExternalSemaphorePropertiesKHR(
- VkPhysicalDevice physicalDevice,
- const VkPhysicalDeviceExternalSemaphoreInfo* pExternalSemaphoreInfo,
- VkExternalSemaphoreProperties* pExternalSemaphoreProperties);
-
-#ifdef VK_USE_PLATFORM_WIN32_KHR
-bool PreCallValidateImportSemaphoreWin32HandleKHR(
- VkDevice device,
- const VkImportSemaphoreWin32HandleInfoKHR* pImportSemaphoreWin32HandleInfo);
-#endif // VK_USE_PLATFORM_WIN32_KHR
-
-#ifdef VK_USE_PLATFORM_WIN32_KHR
-bool PreCallValidateGetSemaphoreWin32HandleKHR(
- VkDevice device,
- const VkSemaphoreGetWin32HandleInfoKHR* pGetWin32HandleInfo,
- HANDLE* pHandle);
-#endif // VK_USE_PLATFORM_WIN32_KHR
-bool PreCallValidateImportSemaphoreFdKHR(
- VkDevice device,
- const VkImportSemaphoreFdInfoKHR* pImportSemaphoreFdInfo);
-bool PreCallValidateGetSemaphoreFdKHR(
- VkDevice device,
- const VkSemaphoreGetFdInfoKHR* pGetFdInfo,
- int* pFd);
-bool PreCallValidateCmdPushDescriptorSetKHR(
- VkCommandBuffer commandBuffer,
- VkPipelineBindPoint pipelineBindPoint,
- VkPipelineLayout layout,
- uint32_t set,
- uint32_t descriptorWriteCount,
- const VkWriteDescriptorSet* pDescriptorWrites);
-bool PreCallValidateCmdPushDescriptorSetWithTemplateKHR(
- VkCommandBuffer commandBuffer,
- VkDescriptorUpdateTemplate descriptorUpdateTemplate,
- VkPipelineLayout layout,
- uint32_t set,
- const void* pData);
-bool PreCallValidateCreateDescriptorUpdateTemplateKHR(
- VkDevice device,
- const VkDescriptorUpdateTemplateCreateInfo* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkDescriptorUpdateTemplate* pDescriptorUpdateTemplate);
-void PostCallRecordCreateDescriptorUpdateTemplateKHR(
- VkDevice device,
- const VkDescriptorUpdateTemplateCreateInfo* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkDescriptorUpdateTemplate* pDescriptorUpdateTemplate,
- VkResult result);
-bool PreCallValidateDestroyDescriptorUpdateTemplateKHR(
- VkDevice device,
- VkDescriptorUpdateTemplate descriptorUpdateTemplate,
- const VkAllocationCallbacks* pAllocator);
-void PreCallRecordDestroyDescriptorUpdateTemplateKHR(
- VkDevice device,
- VkDescriptorUpdateTemplate descriptorUpdateTemplate,
- const VkAllocationCallbacks* pAllocator);
-bool PreCallValidateUpdateDescriptorSetWithTemplateKHR(
- VkDevice device,
- VkDescriptorSet descriptorSet,
- VkDescriptorUpdateTemplate descriptorUpdateTemplate,
- const void* pData);
-bool PreCallValidateCreateRenderPass2KHR(
- VkDevice device,
- const VkRenderPassCreateInfo2KHR* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkRenderPass* pRenderPass);
-void PostCallRecordCreateRenderPass2KHR(
- VkDevice device,
- const VkRenderPassCreateInfo2KHR* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkRenderPass* pRenderPass,
- VkResult result);
-bool PreCallValidateCmdBeginRenderPass2KHR(
- VkCommandBuffer commandBuffer,
- const VkRenderPassBeginInfo* pRenderPassBegin,
- const VkSubpassBeginInfoKHR* pSubpassBeginInfo);
-bool PreCallValidateCmdNextSubpass2KHR(
- VkCommandBuffer commandBuffer,
- const VkSubpassBeginInfoKHR* pSubpassBeginInfo,
- const VkSubpassEndInfoKHR* pSubpassEndInfo);
-bool PreCallValidateCmdEndRenderPass2KHR(
- VkCommandBuffer commandBuffer,
- const VkSubpassEndInfoKHR* pSubpassEndInfo);
-bool PreCallValidateGetSwapchainStatusKHR(
- VkDevice device,
- VkSwapchainKHR swapchain);
-bool PreCallValidateGetPhysicalDeviceExternalFencePropertiesKHR(
- VkPhysicalDevice physicalDevice,
- const VkPhysicalDeviceExternalFenceInfo* pExternalFenceInfo,
- VkExternalFenceProperties* pExternalFenceProperties);
-
-#ifdef VK_USE_PLATFORM_WIN32_KHR
-bool PreCallValidateImportFenceWin32HandleKHR(
- VkDevice device,
- const VkImportFenceWin32HandleInfoKHR* pImportFenceWin32HandleInfo);
-#endif // VK_USE_PLATFORM_WIN32_KHR
-
-#ifdef VK_USE_PLATFORM_WIN32_KHR
-bool PreCallValidateGetFenceWin32HandleKHR(
- VkDevice device,
- const VkFenceGetWin32HandleInfoKHR* pGetWin32HandleInfo,
- HANDLE* pHandle);
-#endif // VK_USE_PLATFORM_WIN32_KHR
-bool PreCallValidateImportFenceFdKHR(
- VkDevice device,
- const VkImportFenceFdInfoKHR* pImportFenceFdInfo);
-bool PreCallValidateGetFenceFdKHR(
- VkDevice device,
- const VkFenceGetFdInfoKHR* pGetFdInfo,
- int* pFd);
-bool PreCallValidateGetPhysicalDeviceSurfaceCapabilities2KHR(
- VkPhysicalDevice physicalDevice,
- const VkPhysicalDeviceSurfaceInfo2KHR* pSurfaceInfo,
- VkSurfaceCapabilities2KHR* pSurfaceCapabilities);
-bool PreCallValidateGetPhysicalDeviceSurfaceFormats2KHR(
- VkPhysicalDevice physicalDevice,
- const VkPhysicalDeviceSurfaceInfo2KHR* pSurfaceInfo,
- uint32_t* pSurfaceFormatCount,
- VkSurfaceFormat2KHR* pSurfaceFormats);
-bool PreCallValidateGetPhysicalDeviceDisplayProperties2KHR(
- VkPhysicalDevice physicalDevice,
- uint32_t* pPropertyCount,
- VkDisplayProperties2KHR* pProperties);
-bool PreCallValidateGetPhysicalDeviceDisplayPlaneProperties2KHR(
- VkPhysicalDevice physicalDevice,
- uint32_t* pPropertyCount,
- VkDisplayPlaneProperties2KHR* pProperties);
-bool PreCallValidateGetDisplayModeProperties2KHR(
- VkPhysicalDevice physicalDevice,
- VkDisplayKHR display,
- uint32_t* pPropertyCount,
- VkDisplayModeProperties2KHR* pProperties);
-bool PreCallValidateGetDisplayPlaneCapabilities2KHR(
- VkPhysicalDevice physicalDevice,
- const VkDisplayPlaneInfo2KHR* pDisplayPlaneInfo,
- VkDisplayPlaneCapabilities2KHR* pCapabilities);
-bool PreCallValidateGetImageMemoryRequirements2KHR(
- VkDevice device,
- const VkImageMemoryRequirementsInfo2* pInfo,
- VkMemoryRequirements2* pMemoryRequirements);
-bool PreCallValidateGetBufferMemoryRequirements2KHR(
- VkDevice device,
- const VkBufferMemoryRequirementsInfo2* pInfo,
- VkMemoryRequirements2* pMemoryRequirements);
-bool PreCallValidateGetImageSparseMemoryRequirements2KHR(
- VkDevice device,
- const VkImageSparseMemoryRequirementsInfo2* pInfo,
- uint32_t* pSparseMemoryRequirementCount,
- VkSparseImageMemoryRequirements2* pSparseMemoryRequirements);
-bool PreCallValidateCreateSamplerYcbcrConversionKHR(
- VkDevice device,
- const VkSamplerYcbcrConversionCreateInfo* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkSamplerYcbcrConversion* pYcbcrConversion);
-void PostCallRecordCreateSamplerYcbcrConversionKHR(
- VkDevice device,
- const VkSamplerYcbcrConversionCreateInfo* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkSamplerYcbcrConversion* pYcbcrConversion,
- VkResult result);
-bool PreCallValidateDestroySamplerYcbcrConversionKHR(
- VkDevice device,
- VkSamplerYcbcrConversion ycbcrConversion,
- const VkAllocationCallbacks* pAllocator);
-void PreCallRecordDestroySamplerYcbcrConversionKHR(
- VkDevice device,
- VkSamplerYcbcrConversion ycbcrConversion,
- const VkAllocationCallbacks* pAllocator);
-bool PreCallValidateBindBufferMemory2KHR(
- VkDevice device,
- uint32_t bindInfoCount,
- const VkBindBufferMemoryInfo* pBindInfos);
-bool PreCallValidateBindImageMemory2KHR(
- VkDevice device,
- uint32_t bindInfoCount,
- const VkBindImageMemoryInfo* pBindInfos);
-bool PreCallValidateGetDescriptorSetLayoutSupportKHR(
- VkDevice device,
- const VkDescriptorSetLayoutCreateInfo* pCreateInfo,
- VkDescriptorSetLayoutSupport* pSupport);
-bool PreCallValidateCmdDrawIndirectCountKHR(
- VkCommandBuffer commandBuffer,
- VkBuffer buffer,
- VkDeviceSize offset,
- VkBuffer countBuffer,
- VkDeviceSize countBufferOffset,
- uint32_t maxDrawCount,
- uint32_t stride);
-bool PreCallValidateCmdDrawIndexedIndirectCountKHR(
- VkCommandBuffer commandBuffer,
- VkBuffer buffer,
- VkDeviceSize offset,
- VkBuffer countBuffer,
- VkDeviceSize countBufferOffset,
- uint32_t maxDrawCount,
- uint32_t stride);
-bool PreCallValidateGetPipelineExecutablePropertiesKHR(
- VkDevice device,
- const VkPipelineInfoKHR* pPipelineInfo,
- uint32_t* pExecutableCount,
- VkPipelineExecutablePropertiesKHR* pProperties);
-bool PreCallValidateGetPipelineExecutableStatisticsKHR(
- VkDevice device,
- const VkPipelineExecutableInfoKHR* pExecutableInfo,
- uint32_t* pStatisticCount,
- VkPipelineExecutableStatisticKHR* pStatistics);
-bool PreCallValidateGetPipelineExecutableInternalRepresentationsKHR(
- VkDevice device,
- const VkPipelineExecutableInfoKHR* pExecutableInfo,
- uint32_t* pInternalRepresentationCount,
- VkPipelineExecutableInternalRepresentationKHR* pInternalRepresentations);
-bool PreCallValidateCreateDebugReportCallbackEXT(
- VkInstance instance,
- const VkDebugReportCallbackCreateInfoEXT* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkDebugReportCallbackEXT* pCallback);
-void PostCallRecordCreateDebugReportCallbackEXT(
- VkInstance instance,
- const VkDebugReportCallbackCreateInfoEXT* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkDebugReportCallbackEXT* pCallback,
- VkResult result);
-bool PreCallValidateDestroyDebugReportCallbackEXT(
- VkInstance instance,
- VkDebugReportCallbackEXT callback,
- const VkAllocationCallbacks* pAllocator);
-void PreCallRecordDestroyDebugReportCallbackEXT(
- VkInstance instance,
- VkDebugReportCallbackEXT callback,
- const VkAllocationCallbacks* pAllocator);
-bool PreCallValidateDebugReportMessageEXT(
- VkInstance instance,
- VkDebugReportFlagsEXT flags,
- VkDebugReportObjectTypeEXT objectType,
- uint64_t object,
- size_t location,
- int32_t messageCode,
- const char* pLayerPrefix,
- const char* pMessage);
-bool PreCallValidateDebugMarkerSetObjectTagEXT(
- VkDevice device,
- const VkDebugMarkerObjectTagInfoEXT* pTagInfo);
-bool PreCallValidateDebugMarkerSetObjectNameEXT(
- VkDevice device,
- const VkDebugMarkerObjectNameInfoEXT* pNameInfo);
-bool PreCallValidateCmdDebugMarkerBeginEXT(
- VkCommandBuffer commandBuffer,
- const VkDebugMarkerMarkerInfoEXT* pMarkerInfo);
-bool PreCallValidateCmdDebugMarkerEndEXT(
- VkCommandBuffer commandBuffer);
-bool PreCallValidateCmdDebugMarkerInsertEXT(
- VkCommandBuffer commandBuffer,
- const VkDebugMarkerMarkerInfoEXT* pMarkerInfo);
-bool PreCallValidateCmdBindTransformFeedbackBuffersEXT(
- VkCommandBuffer commandBuffer,
- uint32_t firstBinding,
- uint32_t bindingCount,
- const VkBuffer* pBuffers,
- const VkDeviceSize* pOffsets,
- const VkDeviceSize* pSizes);
-bool PreCallValidateCmdBeginTransformFeedbackEXT(
- VkCommandBuffer commandBuffer,
- uint32_t firstCounterBuffer,
- uint32_t counterBufferCount,
- const VkBuffer* pCounterBuffers,
- const VkDeviceSize* pCounterBufferOffsets);
-bool PreCallValidateCmdEndTransformFeedbackEXT(
- VkCommandBuffer commandBuffer,
- uint32_t firstCounterBuffer,
- uint32_t counterBufferCount,
- const VkBuffer* pCounterBuffers,
- const VkDeviceSize* pCounterBufferOffsets);
-bool PreCallValidateCmdBeginQueryIndexedEXT(
- VkCommandBuffer commandBuffer,
- VkQueryPool queryPool,
- uint32_t query,
- VkQueryControlFlags flags,
- uint32_t index);
-bool PreCallValidateCmdEndQueryIndexedEXT(
- VkCommandBuffer commandBuffer,
- VkQueryPool queryPool,
- uint32_t query,
- uint32_t index);
-bool PreCallValidateCmdDrawIndirectByteCountEXT(
- VkCommandBuffer commandBuffer,
- uint32_t instanceCount,
- uint32_t firstInstance,
- VkBuffer counterBuffer,
- VkDeviceSize counterBufferOffset,
- uint32_t counterOffset,
- uint32_t vertexStride);
-bool PreCallValidateGetImageViewHandleNVX(
- VkDevice device,
- const VkImageViewHandleInfoNVX* pInfo);
-bool PreCallValidateCmdDrawIndirectCountAMD(
- VkCommandBuffer commandBuffer,
- VkBuffer buffer,
- VkDeviceSize offset,
- VkBuffer countBuffer,
- VkDeviceSize countBufferOffset,
- uint32_t maxDrawCount,
- uint32_t stride);
-bool PreCallValidateCmdDrawIndexedIndirectCountAMD(
- VkCommandBuffer commandBuffer,
- VkBuffer buffer,
- VkDeviceSize offset,
- VkBuffer countBuffer,
- VkDeviceSize countBufferOffset,
- uint32_t maxDrawCount,
- uint32_t stride);
-bool PreCallValidateGetShaderInfoAMD(
- VkDevice device,
- VkPipeline pipeline,
- VkShaderStageFlagBits shaderStage,
- VkShaderInfoTypeAMD infoType,
- size_t* pInfoSize,
- void* pInfo);
-
-#ifdef VK_USE_PLATFORM_GGP
-bool PreCallValidateCreateStreamDescriptorSurfaceGGP(
- VkInstance instance,
- const VkStreamDescriptorSurfaceCreateInfoGGP* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkSurfaceKHR* pSurface);
-void PostCallRecordCreateStreamDescriptorSurfaceGGP(
- VkInstance instance,
- const VkStreamDescriptorSurfaceCreateInfoGGP* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkSurfaceKHR* pSurface,
- VkResult result);
-#endif // VK_USE_PLATFORM_GGP
-bool PreCallValidateGetPhysicalDeviceExternalImageFormatPropertiesNV(
- VkPhysicalDevice physicalDevice,
- VkFormat format,
- VkImageType type,
- VkImageTiling tiling,
- VkImageUsageFlags usage,
- VkImageCreateFlags flags,
- VkExternalMemoryHandleTypeFlagsNV externalHandleType,
- VkExternalImageFormatPropertiesNV* pExternalImageFormatProperties);
-
-#ifdef VK_USE_PLATFORM_WIN32_KHR
-bool PreCallValidateGetMemoryWin32HandleNV(
- VkDevice device,
- VkDeviceMemory memory,
- VkExternalMemoryHandleTypeFlagsNV handleType,
- HANDLE* pHandle);
-#endif // VK_USE_PLATFORM_WIN32_KHR
-
-#ifdef VK_USE_PLATFORM_VI_NN
-bool PreCallValidateCreateViSurfaceNN(
- VkInstance instance,
- const VkViSurfaceCreateInfoNN* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkSurfaceKHR* pSurface);
-void PostCallRecordCreateViSurfaceNN(
- VkInstance instance,
- const VkViSurfaceCreateInfoNN* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkSurfaceKHR* pSurface,
- VkResult result);
-#endif // VK_USE_PLATFORM_VI_NN
-bool PreCallValidateCmdBeginConditionalRenderingEXT(
- VkCommandBuffer commandBuffer,
- const VkConditionalRenderingBeginInfoEXT* pConditionalRenderingBegin);
-bool PreCallValidateCmdEndConditionalRenderingEXT(
- VkCommandBuffer commandBuffer);
-bool PreCallValidateCmdProcessCommandsNVX(
- VkCommandBuffer commandBuffer,
- const VkCmdProcessCommandsInfoNVX* pProcessCommandsInfo);
-bool PreCallValidateCmdReserveSpaceForCommandsNVX(
- VkCommandBuffer commandBuffer,
- const VkCmdReserveSpaceForCommandsInfoNVX* pReserveSpaceInfo);
-bool PreCallValidateCreateIndirectCommandsLayoutNVX(
- VkDevice device,
- const VkIndirectCommandsLayoutCreateInfoNVX* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkIndirectCommandsLayoutNVX* pIndirectCommandsLayout);
-void PostCallRecordCreateIndirectCommandsLayoutNVX(
- VkDevice device,
- const VkIndirectCommandsLayoutCreateInfoNVX* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkIndirectCommandsLayoutNVX* pIndirectCommandsLayout,
- VkResult result);
-bool PreCallValidateDestroyIndirectCommandsLayoutNVX(
- VkDevice device,
- VkIndirectCommandsLayoutNVX indirectCommandsLayout,
- const VkAllocationCallbacks* pAllocator);
-void PreCallRecordDestroyIndirectCommandsLayoutNVX(
- VkDevice device,
- VkIndirectCommandsLayoutNVX indirectCommandsLayout,
- const VkAllocationCallbacks* pAllocator);
-bool PreCallValidateCreateObjectTableNVX(
- VkDevice device,
- const VkObjectTableCreateInfoNVX* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkObjectTableNVX* pObjectTable);
-void PostCallRecordCreateObjectTableNVX(
- VkDevice device,
- const VkObjectTableCreateInfoNVX* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkObjectTableNVX* pObjectTable,
- VkResult result);
-bool PreCallValidateDestroyObjectTableNVX(
- VkDevice device,
- VkObjectTableNVX objectTable,
- const VkAllocationCallbacks* pAllocator);
-void PreCallRecordDestroyObjectTableNVX(
- VkDevice device,
- VkObjectTableNVX objectTable,
- const VkAllocationCallbacks* pAllocator);
-bool PreCallValidateRegisterObjectsNVX(
- VkDevice device,
- VkObjectTableNVX objectTable,
- uint32_t objectCount,
- const VkObjectTableEntryNVX* const* ppObjectTableEntries,
- const uint32_t* pObjectIndices);
-bool PreCallValidateUnregisterObjectsNVX(
- VkDevice device,
- VkObjectTableNVX objectTable,
- uint32_t objectCount,
- const VkObjectEntryTypeNVX* pObjectEntryTypes,
- const uint32_t* pObjectIndices);
-bool PreCallValidateGetPhysicalDeviceGeneratedCommandsPropertiesNVX(
- VkPhysicalDevice physicalDevice,
- VkDeviceGeneratedCommandsFeaturesNVX* pFeatures,
- VkDeviceGeneratedCommandsLimitsNVX* pLimits);
-bool PreCallValidateCmdSetViewportWScalingNV(
- VkCommandBuffer commandBuffer,
- uint32_t firstViewport,
- uint32_t viewportCount,
- const VkViewportWScalingNV* pViewportWScalings);
-bool PreCallValidateReleaseDisplayEXT(
- VkPhysicalDevice physicalDevice,
- VkDisplayKHR display);
-
-#ifdef VK_USE_PLATFORM_XLIB_XRANDR_EXT
-bool PreCallValidateAcquireXlibDisplayEXT(
- VkPhysicalDevice physicalDevice,
- Display* dpy,
- VkDisplayKHR display);
-#endif // VK_USE_PLATFORM_XLIB_XRANDR_EXT
-
-#ifdef VK_USE_PLATFORM_XLIB_XRANDR_EXT
-bool PreCallValidateGetRandROutputDisplayEXT(
- VkPhysicalDevice physicalDevice,
- Display* dpy,
- RROutput rrOutput,
- VkDisplayKHR* pDisplay);
-void PostCallRecordGetRandROutputDisplayEXT(
- VkPhysicalDevice physicalDevice,
- Display* dpy,
- RROutput rrOutput,
- VkDisplayKHR* pDisplay,
- VkResult result);
-#endif // VK_USE_PLATFORM_XLIB_XRANDR_EXT
-bool PreCallValidateGetPhysicalDeviceSurfaceCapabilities2EXT(
- VkPhysicalDevice physicalDevice,
- VkSurfaceKHR surface,
- VkSurfaceCapabilities2EXT* pSurfaceCapabilities);
-bool PreCallValidateDisplayPowerControlEXT(
- VkDevice device,
- VkDisplayKHR display,
- const VkDisplayPowerInfoEXT* pDisplayPowerInfo);
-bool PreCallValidateRegisterDeviceEventEXT(
- VkDevice device,
- const VkDeviceEventInfoEXT* pDeviceEventInfo,
- const VkAllocationCallbacks* pAllocator,
- VkFence* pFence);
-void PostCallRecordRegisterDeviceEventEXT(
- VkDevice device,
- const VkDeviceEventInfoEXT* pDeviceEventInfo,
- const VkAllocationCallbacks* pAllocator,
- VkFence* pFence,
- VkResult result);
-bool PreCallValidateRegisterDisplayEventEXT(
- VkDevice device,
- VkDisplayKHR display,
- const VkDisplayEventInfoEXT* pDisplayEventInfo,
- const VkAllocationCallbacks* pAllocator,
- VkFence* pFence);
-void PostCallRecordRegisterDisplayEventEXT(
- VkDevice device,
- VkDisplayKHR display,
- const VkDisplayEventInfoEXT* pDisplayEventInfo,
- const VkAllocationCallbacks* pAllocator,
- VkFence* pFence,
- VkResult result);
-bool PreCallValidateGetSwapchainCounterEXT(
- VkDevice device,
- VkSwapchainKHR swapchain,
- VkSurfaceCounterFlagBitsEXT counter,
- uint64_t* pCounterValue);
-bool PreCallValidateGetRefreshCycleDurationGOOGLE(
- VkDevice device,
- VkSwapchainKHR swapchain,
- VkRefreshCycleDurationGOOGLE* pDisplayTimingProperties);
-bool PreCallValidateGetPastPresentationTimingGOOGLE(
- VkDevice device,
- VkSwapchainKHR swapchain,
- uint32_t* pPresentationTimingCount,
- VkPastPresentationTimingGOOGLE* pPresentationTimings);
-bool PreCallValidateCmdSetDiscardRectangleEXT(
- VkCommandBuffer commandBuffer,
- uint32_t firstDiscardRectangle,
- uint32_t discardRectangleCount,
- const VkRect2D* pDiscardRectangles);
-bool PreCallValidateSetHdrMetadataEXT(
- VkDevice device,
- uint32_t swapchainCount,
- const VkSwapchainKHR* pSwapchains,
- const VkHdrMetadataEXT* pMetadata);
-
-#ifdef VK_USE_PLATFORM_IOS_MVK
-bool PreCallValidateCreateIOSSurfaceMVK(
- VkInstance instance,
- const VkIOSSurfaceCreateInfoMVK* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkSurfaceKHR* pSurface);
-void PostCallRecordCreateIOSSurfaceMVK(
- VkInstance instance,
- const VkIOSSurfaceCreateInfoMVK* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkSurfaceKHR* pSurface,
- VkResult result);
-#endif // VK_USE_PLATFORM_IOS_MVK
-
-#ifdef VK_USE_PLATFORM_MACOS_MVK
-bool PreCallValidateCreateMacOSSurfaceMVK(
- VkInstance instance,
- const VkMacOSSurfaceCreateInfoMVK* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkSurfaceKHR* pSurface);
-void PostCallRecordCreateMacOSSurfaceMVK(
- VkInstance instance,
- const VkMacOSSurfaceCreateInfoMVK* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkSurfaceKHR* pSurface,
- VkResult result);
-#endif // VK_USE_PLATFORM_MACOS_MVK
-bool PreCallValidateSetDebugUtilsObjectNameEXT(
- VkDevice device,
- const VkDebugUtilsObjectNameInfoEXT* pNameInfo);
-bool PreCallValidateSetDebugUtilsObjectTagEXT(
- VkDevice device,
- const VkDebugUtilsObjectTagInfoEXT* pTagInfo);
-bool PreCallValidateQueueBeginDebugUtilsLabelEXT(
- VkQueue queue,
- const VkDebugUtilsLabelEXT* pLabelInfo);
-bool PreCallValidateQueueEndDebugUtilsLabelEXT(
- VkQueue queue);
-bool PreCallValidateQueueInsertDebugUtilsLabelEXT(
- VkQueue queue,
- const VkDebugUtilsLabelEXT* pLabelInfo);
-bool PreCallValidateCmdBeginDebugUtilsLabelEXT(
- VkCommandBuffer commandBuffer,
- const VkDebugUtilsLabelEXT* pLabelInfo);
-bool PreCallValidateCmdEndDebugUtilsLabelEXT(
- VkCommandBuffer commandBuffer);
-bool PreCallValidateCmdInsertDebugUtilsLabelEXT(
- VkCommandBuffer commandBuffer,
- const VkDebugUtilsLabelEXT* pLabelInfo);
-bool PreCallValidateCreateDebugUtilsMessengerEXT(
- VkInstance instance,
- const VkDebugUtilsMessengerCreateInfoEXT* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkDebugUtilsMessengerEXT* pMessenger);
-void PostCallRecordCreateDebugUtilsMessengerEXT(
- VkInstance instance,
- const VkDebugUtilsMessengerCreateInfoEXT* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkDebugUtilsMessengerEXT* pMessenger,
- VkResult result);
-bool PreCallValidateDestroyDebugUtilsMessengerEXT(
- VkInstance instance,
- VkDebugUtilsMessengerEXT messenger,
- const VkAllocationCallbacks* pAllocator);
-void PreCallRecordDestroyDebugUtilsMessengerEXT(
- VkInstance instance,
- VkDebugUtilsMessengerEXT messenger,
- const VkAllocationCallbacks* pAllocator);
-bool PreCallValidateSubmitDebugUtilsMessageEXT(
- VkInstance instance,
- VkDebugUtilsMessageSeverityFlagBitsEXT messageSeverity,
- VkDebugUtilsMessageTypeFlagsEXT messageTypes,
- const VkDebugUtilsMessengerCallbackDataEXT* pCallbackData);
-
-#ifdef VK_USE_PLATFORM_ANDROID_KHR
-bool PreCallValidateGetAndroidHardwareBufferPropertiesANDROID(
- VkDevice device,
- const struct AHardwareBuffer* buffer,
- VkAndroidHardwareBufferPropertiesANDROID* pProperties);
-#endif // VK_USE_PLATFORM_ANDROID_KHR
-
-#ifdef VK_USE_PLATFORM_ANDROID_KHR
-bool PreCallValidateGetMemoryAndroidHardwareBufferANDROID(
- VkDevice device,
- const VkMemoryGetAndroidHardwareBufferInfoANDROID* pInfo,
- struct AHardwareBuffer** pBuffer);
-#endif // VK_USE_PLATFORM_ANDROID_KHR
-bool PreCallValidateCmdSetSampleLocationsEXT(
- VkCommandBuffer commandBuffer,
- const VkSampleLocationsInfoEXT* pSampleLocationsInfo);
-bool PreCallValidateGetPhysicalDeviceMultisamplePropertiesEXT(
- VkPhysicalDevice physicalDevice,
- VkSampleCountFlagBits samples,
- VkMultisamplePropertiesEXT* pMultisampleProperties);
-bool PreCallValidateGetImageDrmFormatModifierPropertiesEXT(
- VkDevice device,
- VkImage image,
- VkImageDrmFormatModifierPropertiesEXT* pProperties);
-bool PreCallValidateCreateValidationCacheEXT(
- VkDevice device,
- const VkValidationCacheCreateInfoEXT* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkValidationCacheEXT* pValidationCache);
-void PostCallRecordCreateValidationCacheEXT(
- VkDevice device,
- const VkValidationCacheCreateInfoEXT* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkValidationCacheEXT* pValidationCache,
- VkResult result);
-bool PreCallValidateDestroyValidationCacheEXT(
- VkDevice device,
- VkValidationCacheEXT validationCache,
- const VkAllocationCallbacks* pAllocator);
-void PreCallRecordDestroyValidationCacheEXT(
- VkDevice device,
- VkValidationCacheEXT validationCache,
- const VkAllocationCallbacks* pAllocator);
-bool PreCallValidateMergeValidationCachesEXT(
- VkDevice device,
- VkValidationCacheEXT dstCache,
- uint32_t srcCacheCount,
- const VkValidationCacheEXT* pSrcCaches);
-bool PreCallValidateGetValidationCacheDataEXT(
- VkDevice device,
- VkValidationCacheEXT validationCache,
- size_t* pDataSize,
- void* pData);
-bool PreCallValidateCmdBindShadingRateImageNV(
- VkCommandBuffer commandBuffer,
- VkImageView imageView,
- VkImageLayout imageLayout);
-bool PreCallValidateCmdSetViewportShadingRatePaletteNV(
- VkCommandBuffer commandBuffer,
- uint32_t firstViewport,
- uint32_t viewportCount,
- const VkShadingRatePaletteNV* pShadingRatePalettes);
-bool PreCallValidateCmdSetCoarseSampleOrderNV(
- VkCommandBuffer commandBuffer,
- VkCoarseSampleOrderTypeNV sampleOrderType,
- uint32_t customSampleOrderCount,
- const VkCoarseSampleOrderCustomNV* pCustomSampleOrders);
-bool PreCallValidateCreateAccelerationStructureNV(
- VkDevice device,
- const VkAccelerationStructureCreateInfoNV* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkAccelerationStructureNV* pAccelerationStructure);
-void PostCallRecordCreateAccelerationStructureNV(
- VkDevice device,
- const VkAccelerationStructureCreateInfoNV* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkAccelerationStructureNV* pAccelerationStructure,
- VkResult result);
-bool PreCallValidateDestroyAccelerationStructureNV(
- VkDevice device,
- VkAccelerationStructureNV accelerationStructure,
- const VkAllocationCallbacks* pAllocator);
-void PreCallRecordDestroyAccelerationStructureNV(
- VkDevice device,
- VkAccelerationStructureNV accelerationStructure,
- const VkAllocationCallbacks* pAllocator);
-bool PreCallValidateGetAccelerationStructureMemoryRequirementsNV(
- VkDevice device,
- const VkAccelerationStructureMemoryRequirementsInfoNV* pInfo,
- VkMemoryRequirements2KHR* pMemoryRequirements);
-bool PreCallValidateBindAccelerationStructureMemoryNV(
- VkDevice device,
- uint32_t bindInfoCount,
- const VkBindAccelerationStructureMemoryInfoNV* pBindInfos);
-bool PreCallValidateCmdBuildAccelerationStructureNV(
- VkCommandBuffer commandBuffer,
- const VkAccelerationStructureInfoNV* pInfo,
- VkBuffer instanceData,
- VkDeviceSize instanceOffset,
- VkBool32 update,
- VkAccelerationStructureNV dst,
- VkAccelerationStructureNV src,
- VkBuffer scratch,
- VkDeviceSize scratchOffset);
-bool PreCallValidateCmdCopyAccelerationStructureNV(
- VkCommandBuffer commandBuffer,
- VkAccelerationStructureNV dst,
- VkAccelerationStructureNV src,
- VkCopyAccelerationStructureModeNV mode);
-bool PreCallValidateCmdTraceRaysNV(
- VkCommandBuffer commandBuffer,
- VkBuffer raygenShaderBindingTableBuffer,
- VkDeviceSize raygenShaderBindingOffset,
- VkBuffer missShaderBindingTableBuffer,
- VkDeviceSize missShaderBindingOffset,
- VkDeviceSize missShaderBindingStride,
- VkBuffer hitShaderBindingTableBuffer,
- VkDeviceSize hitShaderBindingOffset,
- VkDeviceSize hitShaderBindingStride,
- VkBuffer callableShaderBindingTableBuffer,
- VkDeviceSize callableShaderBindingOffset,
- VkDeviceSize callableShaderBindingStride,
- uint32_t width,
- uint32_t height,
- uint32_t depth);
-bool PreCallValidateCreateRayTracingPipelinesNV(
- VkDevice device,
- VkPipelineCache pipelineCache,
- uint32_t createInfoCount,
- const VkRayTracingPipelineCreateInfoNV* pCreateInfos,
- const VkAllocationCallbacks* pAllocator,
- VkPipeline* pPipelines);
-void PostCallRecordCreateRayTracingPipelinesNV(
- VkDevice device,
- VkPipelineCache pipelineCache,
- uint32_t createInfoCount,
- const VkRayTracingPipelineCreateInfoNV* pCreateInfos,
- const VkAllocationCallbacks* pAllocator,
- VkPipeline* pPipelines,
- VkResult result);
-bool PreCallValidateGetRayTracingShaderGroupHandlesNV(
- VkDevice device,
- VkPipeline pipeline,
- uint32_t firstGroup,
- uint32_t groupCount,
- size_t dataSize,
- void* pData);
-bool PreCallValidateGetAccelerationStructureHandleNV(
- VkDevice device,
- VkAccelerationStructureNV accelerationStructure,
- size_t dataSize,
- void* pData);
-bool PreCallValidateCmdWriteAccelerationStructuresPropertiesNV(
- VkCommandBuffer commandBuffer,
- uint32_t accelerationStructureCount,
- const VkAccelerationStructureNV* pAccelerationStructures,
- VkQueryType queryType,
- VkQueryPool queryPool,
- uint32_t firstQuery);
-bool PreCallValidateCompileDeferredNV(
- VkDevice device,
- VkPipeline pipeline,
- uint32_t shader);
-bool PreCallValidateGetMemoryHostPointerPropertiesEXT(
- VkDevice device,
- VkExternalMemoryHandleTypeFlagBits handleType,
- const void* pHostPointer,
- VkMemoryHostPointerPropertiesEXT* pMemoryHostPointerProperties);
-bool PreCallValidateCmdWriteBufferMarkerAMD(
- VkCommandBuffer commandBuffer,
- VkPipelineStageFlagBits pipelineStage,
- VkBuffer dstBuffer,
- VkDeviceSize dstOffset,
- uint32_t marker);
-bool PreCallValidateGetPhysicalDeviceCalibrateableTimeDomainsEXT(
- VkPhysicalDevice physicalDevice,
- uint32_t* pTimeDomainCount,
- VkTimeDomainEXT* pTimeDomains);
-bool PreCallValidateGetCalibratedTimestampsEXT(
- VkDevice device,
- uint32_t timestampCount,
- const VkCalibratedTimestampInfoEXT* pTimestampInfos,
- uint64_t* pTimestamps,
- uint64_t* pMaxDeviation);
-bool PreCallValidateCmdDrawMeshTasksNV(
- VkCommandBuffer commandBuffer,
- uint32_t taskCount,
- uint32_t firstTask);
-bool PreCallValidateCmdDrawMeshTasksIndirectNV(
- VkCommandBuffer commandBuffer,
- VkBuffer buffer,
- VkDeviceSize offset,
- uint32_t drawCount,
- uint32_t stride);
-bool PreCallValidateCmdDrawMeshTasksIndirectCountNV(
- VkCommandBuffer commandBuffer,
- VkBuffer buffer,
- VkDeviceSize offset,
- VkBuffer countBuffer,
- VkDeviceSize countBufferOffset,
- uint32_t maxDrawCount,
- uint32_t stride);
-bool PreCallValidateCmdSetExclusiveScissorNV(
- VkCommandBuffer commandBuffer,
- uint32_t firstExclusiveScissor,
- uint32_t exclusiveScissorCount,
- const VkRect2D* pExclusiveScissors);
-bool PreCallValidateCmdSetCheckpointNV(
- VkCommandBuffer commandBuffer,
- const void* pCheckpointMarker);
-bool PreCallValidateGetQueueCheckpointDataNV(
- VkQueue queue,
- uint32_t* pCheckpointDataCount,
- VkCheckpointDataNV* pCheckpointData);
-bool PreCallValidateInitializePerformanceApiINTEL(
- VkDevice device,
- const VkInitializePerformanceApiInfoINTEL* pInitializeInfo);
-bool PreCallValidateUninitializePerformanceApiINTEL(
- VkDevice device);
-bool PreCallValidateCmdSetPerformanceMarkerINTEL(
- VkCommandBuffer commandBuffer,
- const VkPerformanceMarkerInfoINTEL* pMarkerInfo);
-bool PreCallValidateCmdSetPerformanceStreamMarkerINTEL(
- VkCommandBuffer commandBuffer,
- const VkPerformanceStreamMarkerInfoINTEL* pMarkerInfo);
-bool PreCallValidateCmdSetPerformanceOverrideINTEL(
- VkCommandBuffer commandBuffer,
- const VkPerformanceOverrideInfoINTEL* pOverrideInfo);
-bool PreCallValidateAcquirePerformanceConfigurationINTEL(
- VkDevice device,
- const VkPerformanceConfigurationAcquireInfoINTEL* pAcquireInfo,
- VkPerformanceConfigurationINTEL* pConfiguration);
-bool PreCallValidateReleasePerformanceConfigurationINTEL(
- VkDevice device,
- VkPerformanceConfigurationINTEL configuration);
-bool PreCallValidateQueueSetPerformanceConfigurationINTEL(
- VkQueue queue,
- VkPerformanceConfigurationINTEL configuration);
-bool PreCallValidateGetPerformanceParameterINTEL(
- VkDevice device,
- VkPerformanceParameterTypeINTEL parameter,
- VkPerformanceValueINTEL* pValue);
-bool PreCallValidateSetLocalDimmingAMD(
- VkDevice device,
- VkSwapchainKHR swapChain,
- VkBool32 localDimmingEnable);
-
-#ifdef VK_USE_PLATFORM_FUCHSIA
-bool PreCallValidateCreateImagePipeSurfaceFUCHSIA(
- VkInstance instance,
- const VkImagePipeSurfaceCreateInfoFUCHSIA* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkSurfaceKHR* pSurface);
-void PostCallRecordCreateImagePipeSurfaceFUCHSIA(
- VkInstance instance,
- const VkImagePipeSurfaceCreateInfoFUCHSIA* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkSurfaceKHR* pSurface,
- VkResult result);
-#endif // VK_USE_PLATFORM_FUCHSIA
-
-#ifdef VK_USE_PLATFORM_METAL_EXT
-bool PreCallValidateCreateMetalSurfaceEXT(
- VkInstance instance,
- const VkMetalSurfaceCreateInfoEXT* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkSurfaceKHR* pSurface);
-void PostCallRecordCreateMetalSurfaceEXT(
- VkInstance instance,
- const VkMetalSurfaceCreateInfoEXT* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkSurfaceKHR* pSurface,
- VkResult result);
-#endif // VK_USE_PLATFORM_METAL_EXT
-bool PreCallValidateGetBufferDeviceAddressEXT(
- VkDevice device,
- const VkBufferDeviceAddressInfoEXT* pInfo);
-bool PreCallValidateGetPhysicalDeviceCooperativeMatrixPropertiesNV(
- VkPhysicalDevice physicalDevice,
- uint32_t* pPropertyCount,
- VkCooperativeMatrixPropertiesNV* pProperties);
-bool PreCallValidateGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV(
- VkPhysicalDevice physicalDevice,
- uint32_t* pCombinationCount,
- VkFramebufferMixedSamplesCombinationNV* pCombinations);
-
-#ifdef VK_USE_PLATFORM_WIN32_KHR
-bool PreCallValidateGetPhysicalDeviceSurfacePresentModes2EXT(
- VkPhysicalDevice physicalDevice,
- const VkPhysicalDeviceSurfaceInfo2KHR* pSurfaceInfo,
- uint32_t* pPresentModeCount,
- VkPresentModeKHR* pPresentModes);
-#endif // VK_USE_PLATFORM_WIN32_KHR
-
-#ifdef VK_USE_PLATFORM_WIN32_KHR
-bool PreCallValidateAcquireFullScreenExclusiveModeEXT(
- VkDevice device,
- VkSwapchainKHR swapchain);
-#endif // VK_USE_PLATFORM_WIN32_KHR
-
-#ifdef VK_USE_PLATFORM_WIN32_KHR
-bool PreCallValidateReleaseFullScreenExclusiveModeEXT(
- VkDevice device,
- VkSwapchainKHR swapchain);
-#endif // VK_USE_PLATFORM_WIN32_KHR
-
-#ifdef VK_USE_PLATFORM_WIN32_KHR
-bool PreCallValidateGetDeviceGroupSurfacePresentModes2EXT(
- VkDevice device,
- const VkPhysicalDeviceSurfaceInfo2KHR* pSurfaceInfo,
- VkDeviceGroupPresentModeFlagsKHR* pModes);
-#endif // VK_USE_PLATFORM_WIN32_KHR
-bool PreCallValidateCreateHeadlessSurfaceEXT(
- VkInstance instance,
- const VkHeadlessSurfaceCreateInfoEXT* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkSurfaceKHR* pSurface);
-void PostCallRecordCreateHeadlessSurfaceEXT(
- VkInstance instance,
- const VkHeadlessSurfaceCreateInfoEXT* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkSurfaceKHR* pSurface,
- VkResult result);
-bool PreCallValidateCmdSetLineStippleEXT(
- VkCommandBuffer commandBuffer,
- uint32_t lineStippleFactor,
- uint16_t lineStipplePattern);
-bool PreCallValidateResetQueryPoolEXT(
- VkDevice device,
- VkQueryPool queryPool,
- uint32_t firstQuery,
- uint32_t queryCount);
-
-
-void PostCallRecordDestroyInstance(VkInstance instance, const VkAllocationCallbacks *pAllocator);
-void PreCallRecordResetDescriptorPool(VkDevice device, VkDescriptorPool descriptorPool, VkDescriptorPoolResetFlags flags);
-void PostCallRecordGetPhysicalDeviceQueueFamilyProperties(VkPhysicalDevice physicalDevice, uint32_t *pQueueFamilyPropertyCount, VkQueueFamilyProperties *pQueueFamilyProperties);
-void PreCallRecordFreeCommandBuffers(VkDevice device, VkCommandPool commandPool, uint32_t commandBufferCount, const VkCommandBuffer *pCommandBuffers);
-void PreCallRecordFreeDescriptorSets(VkDevice device, VkDescriptorPool descriptorPool, uint32_t descriptorSetCount, const VkDescriptorSet *pDescriptorSets);
-void PostCallRecordGetPhysicalDeviceQueueFamilyProperties2(VkPhysicalDevice physicalDevice, uint32_t *pQueueFamilyPropertyCount, VkQueueFamilyProperties2KHR *pQueueFamilyProperties);
-void PostCallRecordGetPhysicalDeviceQueueFamilyProperties2KHR(VkPhysicalDevice physicalDevice, uint32_t *pQueueFamilyPropertyCount, VkQueueFamilyProperties2KHR *pQueueFamilyProperties);
-void PostCallRecordGetPhysicalDeviceDisplayPropertiesKHR(VkPhysicalDevice physicalDevice, uint32_t *pPropertyCount, VkDisplayPropertiesKHR *pProperties, VkResult result);
-void PostCallRecordGetDisplayModePropertiesKHR(VkPhysicalDevice physicalDevice, VkDisplayKHR display, uint32_t *pPropertyCount, VkDisplayModePropertiesKHR *pProperties, VkResult result);
-void PostCallRecordGetPhysicalDeviceDisplayProperties2KHR(VkPhysicalDevice physicalDevice, uint32_t *pPropertyCount, VkDisplayProperties2KHR *pProperties, VkResult result);
-void PostCallRecordGetDisplayModeProperties2KHR(VkPhysicalDevice physicalDevice, VkDisplayKHR display, uint32_t *pPropertyCount, VkDisplayModeProperties2KHR *pProperties, VkResult result);
diff --git a/layers/generated/parameter_validation.cpp b/layers/generated/parameter_validation.cpp
deleted file mode 100644
index c09045bf4..000000000
--- a/layers/generated/parameter_validation.cpp
+++ /dev/null
@@ -1,10392 +0,0 @@
-/* *** THIS FILE IS GENERATED - DO NOT EDIT! ***
- * See parameter_validation_generator.py for modifications
- *
- * Copyright (c) 2015-2019 The Khronos Group Inc.
- * Copyright (c) 2015-2019 LunarG, Inc.
- * Copyright (C) 2015-2019 Google Inc.
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * Copyright (c) 2015-2017 Valve Corporation
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- *
- * Author: Mark Lobodzinski <mark@LunarG.com>
- * Author: Dave Houlton <daveh@LunarG.com>
- */
-
-
-#include "chassis.h"
-
-#include "stateless_validation.h"
-
-const uint32_t GeneratedVulkanHeaderVersion = 121;
-
-const VkAccessFlags AllVkAccessFlagBits = VK_ACCESS_INDIRECT_COMMAND_READ_BIT|VK_ACCESS_INDEX_READ_BIT|VK_ACCESS_VERTEX_ATTRIBUTE_READ_BIT|VK_ACCESS_UNIFORM_READ_BIT|VK_ACCESS_INPUT_ATTACHMENT_READ_BIT|VK_ACCESS_SHADER_READ_BIT|VK_ACCESS_SHADER_WRITE_BIT|VK_ACCESS_COLOR_ATTACHMENT_READ_BIT|VK_ACCESS_COLOR_ATTACHMENT_WRITE_BIT|VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_READ_BIT|VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_WRITE_BIT|VK_ACCESS_TRANSFER_READ_BIT|VK_ACCESS_TRANSFER_WRITE_BIT|VK_ACCESS_HOST_READ_BIT|VK_ACCESS_HOST_WRITE_BIT|VK_ACCESS_MEMORY_READ_BIT|VK_ACCESS_MEMORY_WRITE_BIT|VK_ACCESS_TRANSFORM_FEEDBACK_WRITE_BIT_EXT|VK_ACCESS_TRANSFORM_FEEDBACK_COUNTER_READ_BIT_EXT|VK_ACCESS_TRANSFORM_FEEDBACK_COUNTER_WRITE_BIT_EXT|VK_ACCESS_CONDITIONAL_RENDERING_READ_BIT_EXT|VK_ACCESS_COMMAND_PROCESS_READ_BIT_NVX|VK_ACCESS_COMMAND_PROCESS_WRITE_BIT_NVX|VK_ACCESS_COLOR_ATTACHMENT_READ_NONCOHERENT_BIT_EXT|VK_ACCESS_SHADING_RATE_IMAGE_READ_BIT_NV|VK_ACCESS_ACCELERATION_STRUCTURE_READ_BIT_NV|VK_ACCESS_ACCELERATION_STRUCTURE_WRITE_BIT_NV|VK_ACCESS_FRAGMENT_DENSITY_MAP_READ_BIT_EXT;
-const VkAttachmentDescriptionFlags AllVkAttachmentDescriptionFlagBits = VK_ATTACHMENT_DESCRIPTION_MAY_ALIAS_BIT;
-const VkBufferCreateFlags AllVkBufferCreateFlagBits = VK_BUFFER_CREATE_SPARSE_BINDING_BIT|VK_BUFFER_CREATE_SPARSE_RESIDENCY_BIT|VK_BUFFER_CREATE_SPARSE_ALIASED_BIT|VK_BUFFER_CREATE_PROTECTED_BIT|VK_BUFFER_CREATE_DEVICE_ADDRESS_CAPTURE_REPLAY_BIT_EXT;
-const VkBufferUsageFlags AllVkBufferUsageFlagBits = VK_BUFFER_USAGE_TRANSFER_SRC_BIT|VK_BUFFER_USAGE_TRANSFER_DST_BIT|VK_BUFFER_USAGE_UNIFORM_TEXEL_BUFFER_BIT|VK_BUFFER_USAGE_STORAGE_TEXEL_BUFFER_BIT|VK_BUFFER_USAGE_UNIFORM_BUFFER_BIT|VK_BUFFER_USAGE_STORAGE_BUFFER_BIT|VK_BUFFER_USAGE_INDEX_BUFFER_BIT|VK_BUFFER_USAGE_VERTEX_BUFFER_BIT|VK_BUFFER_USAGE_INDIRECT_BUFFER_BIT|VK_BUFFER_USAGE_TRANSFORM_FEEDBACK_BUFFER_BIT_EXT|VK_BUFFER_USAGE_TRANSFORM_FEEDBACK_COUNTER_BUFFER_BIT_EXT|VK_BUFFER_USAGE_CONDITIONAL_RENDERING_BIT_EXT|VK_BUFFER_USAGE_RAY_TRACING_BIT_NV|VK_BUFFER_USAGE_SHADER_DEVICE_ADDRESS_BIT_EXT;
-const VkColorComponentFlags AllVkColorComponentFlagBits = VK_COLOR_COMPONENT_R_BIT|VK_COLOR_COMPONENT_G_BIT|VK_COLOR_COMPONENT_B_BIT|VK_COLOR_COMPONENT_A_BIT;
-const VkCommandBufferResetFlags AllVkCommandBufferResetFlagBits = VK_COMMAND_BUFFER_RESET_RELEASE_RESOURCES_BIT;
-const VkCommandBufferUsageFlags AllVkCommandBufferUsageFlagBits = VK_COMMAND_BUFFER_USAGE_ONE_TIME_SUBMIT_BIT|VK_COMMAND_BUFFER_USAGE_RENDER_PASS_CONTINUE_BIT|VK_COMMAND_BUFFER_USAGE_SIMULTANEOUS_USE_BIT;
-const VkCommandPoolCreateFlags AllVkCommandPoolCreateFlagBits = VK_COMMAND_POOL_CREATE_TRANSIENT_BIT|VK_COMMAND_POOL_CREATE_RESET_COMMAND_BUFFER_BIT|VK_COMMAND_POOL_CREATE_PROTECTED_BIT;
-const VkCommandPoolResetFlags AllVkCommandPoolResetFlagBits = VK_COMMAND_POOL_RESET_RELEASE_RESOURCES_BIT;
-const VkCullModeFlags AllVkCullModeFlagBits = VK_CULL_MODE_NONE|VK_CULL_MODE_FRONT_BIT|VK_CULL_MODE_BACK_BIT|VK_CULL_MODE_FRONT_AND_BACK;
-const VkDependencyFlags AllVkDependencyFlagBits = VK_DEPENDENCY_BY_REGION_BIT|VK_DEPENDENCY_DEVICE_GROUP_BIT|VK_DEPENDENCY_VIEW_LOCAL_BIT|VK_DEPENDENCY_VIEW_LOCAL_BIT_KHR|VK_DEPENDENCY_DEVICE_GROUP_BIT_KHR;
-const VkDescriptorPoolCreateFlags AllVkDescriptorPoolCreateFlagBits = VK_DESCRIPTOR_POOL_CREATE_FREE_DESCRIPTOR_SET_BIT|VK_DESCRIPTOR_POOL_CREATE_UPDATE_AFTER_BIND_BIT_EXT;
-const VkDescriptorSetLayoutCreateFlags AllVkDescriptorSetLayoutCreateFlagBits = VK_DESCRIPTOR_SET_LAYOUT_CREATE_PUSH_DESCRIPTOR_BIT_KHR|VK_DESCRIPTOR_SET_LAYOUT_CREATE_UPDATE_AFTER_BIND_POOL_BIT_EXT;
-const VkDeviceQueueCreateFlags AllVkDeviceQueueCreateFlagBits = VK_DEVICE_QUEUE_CREATE_PROTECTED_BIT;
-const VkFenceCreateFlags AllVkFenceCreateFlagBits = VK_FENCE_CREATE_SIGNALED_BIT;
-const VkFormatFeatureFlags AllVkFormatFeatureFlagBits = VK_FORMAT_FEATURE_SAMPLED_IMAGE_BIT|VK_FORMAT_FEATURE_STORAGE_IMAGE_BIT|VK_FORMAT_FEATURE_STORAGE_IMAGE_ATOMIC_BIT|VK_FORMAT_FEATURE_UNIFORM_TEXEL_BUFFER_BIT|VK_FORMAT_FEATURE_STORAGE_TEXEL_BUFFER_BIT|VK_FORMAT_FEATURE_STORAGE_TEXEL_BUFFER_ATOMIC_BIT|VK_FORMAT_FEATURE_VERTEX_BUFFER_BIT|VK_FORMAT_FEATURE_COLOR_ATTACHMENT_BIT|VK_FORMAT_FEATURE_COLOR_ATTACHMENT_BLEND_BIT|VK_FORMAT_FEATURE_DEPTH_STENCIL_ATTACHMENT_BIT|VK_FORMAT_FEATURE_BLIT_SRC_BIT|VK_FORMAT_FEATURE_BLIT_DST_BIT|VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_LINEAR_BIT|VK_FORMAT_FEATURE_TRANSFER_SRC_BIT|VK_FORMAT_FEATURE_TRANSFER_DST_BIT|VK_FORMAT_FEATURE_MIDPOINT_CHROMA_SAMPLES_BIT|VK_FORMAT_FEATURE_SAMPLED_IMAGE_YCBCR_CONVERSION_LINEAR_FILTER_BIT|VK_FORMAT_FEATURE_SAMPLED_IMAGE_YCBCR_CONVERSION_SEPARATE_RECONSTRUCTION_FILTER_BIT|VK_FORMAT_FEATURE_SAMPLED_IMAGE_YCBCR_CONVERSION_CHROMA_RECONSTRUCTION_EXPLICIT_BIT|VK_FORMAT_FEATURE_SAMPLED_IMAGE_YCBCR_CONVERSION_CHROMA_RECONSTRUCTION_EXPLICIT_FORCEABLE_BIT|VK_FORMAT_FEATURE_DISJOINT_BIT|VK_FORMAT_FEATURE_COSITED_CHROMA_SAMPLES_BIT|VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_CUBIC_BIT_IMG|VK_FORMAT_FEATURE_TRANSFER_SRC_BIT_KHR|VK_FORMAT_FEATURE_TRANSFER_DST_BIT_KHR|VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_MINMAX_BIT_EXT|VK_FORMAT_FEATURE_MIDPOINT_CHROMA_SAMPLES_BIT_KHR|VK_FORMAT_FEATURE_SAMPLED_IMAGE_YCBCR_CONVERSION_LINEAR_FILTER_BIT_KHR|VK_FORMAT_FEATURE_SAMPLED_IMAGE_YCBCR_CONVERSION_SEPARATE_RECONSTRUCTION_FILTER_BIT_KHR|VK_FORMAT_FEATURE_SAMPLED_IMAGE_YCBCR_CONVERSION_CHROMA_RECONSTRUCTION_EXPLICIT_BIT_KHR|VK_FORMAT_FEATURE_SAMPLED_IMAGE_YCBCR_CONVERSION_CHROMA_RECONSTRUCTION_EXPLICIT_FORCEABLE_BIT_KHR|VK_FORMAT_FEATURE_DISJOINT_BIT_KHR|VK_FORMAT_FEATURE_COSITED_CHROMA_SAMPLES_BIT_KHR|VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_CUBIC_BIT_EXT|VK_FORMAT_FEATURE_FRAGMENT_DENSITY_MAP_BIT_EXT;
-const VkFramebufferCreateFlags AllVkFramebufferCreateFlagBits = VK_FRAMEBUFFER_CREATE_IMAGELESS_BIT_KHR;
-const VkImageAspectFlags AllVkImageAspectFlagBits = VK_IMAGE_ASPECT_COLOR_BIT|VK_IMAGE_ASPECT_DEPTH_BIT|VK_IMAGE_ASPECT_STENCIL_BIT|VK_IMAGE_ASPECT_METADATA_BIT|VK_IMAGE_ASPECT_PLANE_0_BIT|VK_IMAGE_ASPECT_PLANE_1_BIT|VK_IMAGE_ASPECT_PLANE_2_BIT|VK_IMAGE_ASPECT_PLANE_0_BIT_KHR|VK_IMAGE_ASPECT_PLANE_1_BIT_KHR|VK_IMAGE_ASPECT_PLANE_2_BIT_KHR|VK_IMAGE_ASPECT_MEMORY_PLANE_0_BIT_EXT|VK_IMAGE_ASPECT_MEMORY_PLANE_1_BIT_EXT|VK_IMAGE_ASPECT_MEMORY_PLANE_2_BIT_EXT|VK_IMAGE_ASPECT_MEMORY_PLANE_3_BIT_EXT;
-const VkImageCreateFlags AllVkImageCreateFlagBits = VK_IMAGE_CREATE_SPARSE_BINDING_BIT|VK_IMAGE_CREATE_SPARSE_RESIDENCY_BIT|VK_IMAGE_CREATE_SPARSE_ALIASED_BIT|VK_IMAGE_CREATE_MUTABLE_FORMAT_BIT|VK_IMAGE_CREATE_CUBE_COMPATIBLE_BIT|VK_IMAGE_CREATE_ALIAS_BIT|VK_IMAGE_CREATE_SPLIT_INSTANCE_BIND_REGIONS_BIT|VK_IMAGE_CREATE_2D_ARRAY_COMPATIBLE_BIT|VK_IMAGE_CREATE_BLOCK_TEXEL_VIEW_COMPATIBLE_BIT|VK_IMAGE_CREATE_EXTENDED_USAGE_BIT|VK_IMAGE_CREATE_PROTECTED_BIT|VK_IMAGE_CREATE_DISJOINT_BIT|VK_IMAGE_CREATE_CORNER_SAMPLED_BIT_NV|VK_IMAGE_CREATE_SPLIT_INSTANCE_BIND_REGIONS_BIT_KHR|VK_IMAGE_CREATE_2D_ARRAY_COMPATIBLE_BIT_KHR|VK_IMAGE_CREATE_BLOCK_TEXEL_VIEW_COMPATIBLE_BIT_KHR|VK_IMAGE_CREATE_EXTENDED_USAGE_BIT_KHR|VK_IMAGE_CREATE_SAMPLE_LOCATIONS_COMPATIBLE_DEPTH_BIT_EXT|VK_IMAGE_CREATE_DISJOINT_BIT_KHR|VK_IMAGE_CREATE_ALIAS_BIT_KHR|VK_IMAGE_CREATE_SUBSAMPLED_BIT_EXT;
-const VkImageUsageFlags AllVkImageUsageFlagBits = VK_IMAGE_USAGE_TRANSFER_SRC_BIT|VK_IMAGE_USAGE_TRANSFER_DST_BIT|VK_IMAGE_USAGE_SAMPLED_BIT|VK_IMAGE_USAGE_STORAGE_BIT|VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT|VK_IMAGE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT|VK_IMAGE_USAGE_TRANSIENT_ATTACHMENT_BIT|VK_IMAGE_USAGE_INPUT_ATTACHMENT_BIT|VK_IMAGE_USAGE_SHADING_RATE_IMAGE_BIT_NV|VK_IMAGE_USAGE_FRAGMENT_DENSITY_MAP_BIT_EXT;
-const VkImageViewCreateFlags AllVkImageViewCreateFlagBits = VK_IMAGE_VIEW_CREATE_FRAGMENT_DENSITY_MAP_DYNAMIC_BIT_EXT;
-const VkMemoryHeapFlags AllVkMemoryHeapFlagBits = VK_MEMORY_HEAP_DEVICE_LOCAL_BIT|VK_MEMORY_HEAP_MULTI_INSTANCE_BIT|VK_MEMORY_HEAP_MULTI_INSTANCE_BIT_KHR;
-const VkMemoryPropertyFlags AllVkMemoryPropertyFlagBits = VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT|VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT|VK_MEMORY_PROPERTY_HOST_COHERENT_BIT|VK_MEMORY_PROPERTY_HOST_CACHED_BIT|VK_MEMORY_PROPERTY_LAZILY_ALLOCATED_BIT|VK_MEMORY_PROPERTY_PROTECTED_BIT|VK_MEMORY_PROPERTY_DEVICE_COHERENT_BIT_AMD|VK_MEMORY_PROPERTY_DEVICE_UNCACHED_BIT_AMD;
-const VkPipelineCreateFlags AllVkPipelineCreateFlagBits = VK_PIPELINE_CREATE_DISABLE_OPTIMIZATION_BIT|VK_PIPELINE_CREATE_ALLOW_DERIVATIVES_BIT|VK_PIPELINE_CREATE_DERIVATIVE_BIT|VK_PIPELINE_CREATE_VIEW_INDEX_FROM_DEVICE_INDEX_BIT|VK_PIPELINE_CREATE_DISPATCH_BASE|VK_PIPELINE_CREATE_VIEW_INDEX_FROM_DEVICE_INDEX_BIT_KHR|VK_PIPELINE_CREATE_DISPATCH_BASE_KHR|VK_PIPELINE_CREATE_DEFER_COMPILE_BIT_NV|VK_PIPELINE_CREATE_CAPTURE_STATISTICS_BIT_KHR|VK_PIPELINE_CREATE_CAPTURE_INTERNAL_REPRESENTATIONS_BIT_KHR;
-const VkPipelineShaderStageCreateFlags AllVkPipelineShaderStageCreateFlagBits = VK_PIPELINE_SHADER_STAGE_CREATE_ALLOW_VARYING_SUBGROUP_SIZE_BIT_EXT|VK_PIPELINE_SHADER_STAGE_CREATE_REQUIRE_FULL_SUBGROUPS_BIT_EXT;
-const VkPipelineStageFlags AllVkPipelineStageFlagBits = VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT|VK_PIPELINE_STAGE_DRAW_INDIRECT_BIT|VK_PIPELINE_STAGE_VERTEX_INPUT_BIT|VK_PIPELINE_STAGE_VERTEX_SHADER_BIT|VK_PIPELINE_STAGE_TESSELLATION_CONTROL_SHADER_BIT|VK_PIPELINE_STAGE_TESSELLATION_EVALUATION_SHADER_BIT|VK_PIPELINE_STAGE_GEOMETRY_SHADER_BIT|VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT|VK_PIPELINE_STAGE_EARLY_FRAGMENT_TESTS_BIT|VK_PIPELINE_STAGE_LATE_FRAGMENT_TESTS_BIT|VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT|VK_PIPELINE_STAGE_COMPUTE_SHADER_BIT|VK_PIPELINE_STAGE_TRANSFER_BIT|VK_PIPELINE_STAGE_BOTTOM_OF_PIPE_BIT|VK_PIPELINE_STAGE_HOST_BIT|VK_PIPELINE_STAGE_ALL_GRAPHICS_BIT|VK_PIPELINE_STAGE_ALL_COMMANDS_BIT|VK_PIPELINE_STAGE_TRANSFORM_FEEDBACK_BIT_EXT|VK_PIPELINE_STAGE_CONDITIONAL_RENDERING_BIT_EXT|VK_PIPELINE_STAGE_COMMAND_PROCESS_BIT_NVX|VK_PIPELINE_STAGE_SHADING_RATE_IMAGE_BIT_NV|VK_PIPELINE_STAGE_RAY_TRACING_SHADER_BIT_NV|VK_PIPELINE_STAGE_ACCELERATION_STRUCTURE_BUILD_BIT_NV|VK_PIPELINE_STAGE_TASK_SHADER_BIT_NV|VK_PIPELINE_STAGE_MESH_SHADER_BIT_NV|VK_PIPELINE_STAGE_FRAGMENT_DENSITY_PROCESS_BIT_EXT;
-const VkQueryControlFlags AllVkQueryControlFlagBits = VK_QUERY_CONTROL_PRECISE_BIT;
-const VkQueryPipelineStatisticFlags AllVkQueryPipelineStatisticFlagBits = VK_QUERY_PIPELINE_STATISTIC_INPUT_ASSEMBLY_VERTICES_BIT|VK_QUERY_PIPELINE_STATISTIC_INPUT_ASSEMBLY_PRIMITIVES_BIT|VK_QUERY_PIPELINE_STATISTIC_VERTEX_SHADER_INVOCATIONS_BIT|VK_QUERY_PIPELINE_STATISTIC_GEOMETRY_SHADER_INVOCATIONS_BIT|VK_QUERY_PIPELINE_STATISTIC_GEOMETRY_SHADER_PRIMITIVES_BIT|VK_QUERY_PIPELINE_STATISTIC_CLIPPING_INVOCATIONS_BIT|VK_QUERY_PIPELINE_STATISTIC_CLIPPING_PRIMITIVES_BIT|VK_QUERY_PIPELINE_STATISTIC_FRAGMENT_SHADER_INVOCATIONS_BIT|VK_QUERY_PIPELINE_STATISTIC_TESSELLATION_CONTROL_SHADER_PATCHES_BIT|VK_QUERY_PIPELINE_STATISTIC_TESSELLATION_EVALUATION_SHADER_INVOCATIONS_BIT|VK_QUERY_PIPELINE_STATISTIC_COMPUTE_SHADER_INVOCATIONS_BIT;
-const VkQueryResultFlags AllVkQueryResultFlagBits = VK_QUERY_RESULT_64_BIT|VK_QUERY_RESULT_WAIT_BIT|VK_QUERY_RESULT_WITH_AVAILABILITY_BIT|VK_QUERY_RESULT_PARTIAL_BIT;
-const VkQueueFlags AllVkQueueFlagBits = VK_QUEUE_GRAPHICS_BIT|VK_QUEUE_COMPUTE_BIT|VK_QUEUE_TRANSFER_BIT|VK_QUEUE_SPARSE_BINDING_BIT|VK_QUEUE_PROTECTED_BIT;
-const VkSampleCountFlags AllVkSampleCountFlagBits = VK_SAMPLE_COUNT_1_BIT|VK_SAMPLE_COUNT_2_BIT|VK_SAMPLE_COUNT_4_BIT|VK_SAMPLE_COUNT_8_BIT|VK_SAMPLE_COUNT_16_BIT|VK_SAMPLE_COUNT_32_BIT|VK_SAMPLE_COUNT_64_BIT;
-const VkSamplerCreateFlags AllVkSamplerCreateFlagBits = VK_SAMPLER_CREATE_SUBSAMPLED_BIT_EXT|VK_SAMPLER_CREATE_SUBSAMPLED_COARSE_RECONSTRUCTION_BIT_EXT;
-const VkShaderStageFlags AllVkShaderStageFlagBits = VK_SHADER_STAGE_VERTEX_BIT|VK_SHADER_STAGE_TESSELLATION_CONTROL_BIT|VK_SHADER_STAGE_TESSELLATION_EVALUATION_BIT|VK_SHADER_STAGE_GEOMETRY_BIT|VK_SHADER_STAGE_FRAGMENT_BIT|VK_SHADER_STAGE_COMPUTE_BIT|VK_SHADER_STAGE_ALL_GRAPHICS|VK_SHADER_STAGE_ALL|VK_SHADER_STAGE_RAYGEN_BIT_NV|VK_SHADER_STAGE_ANY_HIT_BIT_NV|VK_SHADER_STAGE_CLOSEST_HIT_BIT_NV|VK_SHADER_STAGE_MISS_BIT_NV|VK_SHADER_STAGE_INTERSECTION_BIT_NV|VK_SHADER_STAGE_CALLABLE_BIT_NV|VK_SHADER_STAGE_TASK_BIT_NV|VK_SHADER_STAGE_MESH_BIT_NV;
-const VkSparseImageFormatFlags AllVkSparseImageFormatFlagBits = VK_SPARSE_IMAGE_FORMAT_SINGLE_MIPTAIL_BIT|VK_SPARSE_IMAGE_FORMAT_ALIGNED_MIP_SIZE_BIT|VK_SPARSE_IMAGE_FORMAT_NONSTANDARD_BLOCK_SIZE_BIT;
-const VkSparseMemoryBindFlags AllVkSparseMemoryBindFlagBits = VK_SPARSE_MEMORY_BIND_METADATA_BIT;
-const VkStencilFaceFlags AllVkStencilFaceFlagBits = VK_STENCIL_FACE_FRONT_BIT|VK_STENCIL_FACE_BACK_BIT|VK_STENCIL_FACE_FRONT_AND_BACK|VK_STENCIL_FRONT_AND_BACK;
-const VkSubpassDescriptionFlags AllVkSubpassDescriptionFlagBits = VK_SUBPASS_DESCRIPTION_PER_VIEW_ATTRIBUTES_BIT_NVX|VK_SUBPASS_DESCRIPTION_PER_VIEW_POSITION_X_ONLY_BIT_NVX;
-const VkExternalFenceFeatureFlags AllVkExternalFenceFeatureFlagBits = VK_EXTERNAL_FENCE_FEATURE_EXPORTABLE_BIT|VK_EXTERNAL_FENCE_FEATURE_IMPORTABLE_BIT|VK_EXTERNAL_FENCE_FEATURE_EXPORTABLE_BIT_KHR|VK_EXTERNAL_FENCE_FEATURE_IMPORTABLE_BIT_KHR;
-const VkExternalFenceHandleTypeFlags AllVkExternalFenceHandleTypeFlagBits = VK_EXTERNAL_FENCE_HANDLE_TYPE_OPAQUE_FD_BIT|VK_EXTERNAL_FENCE_HANDLE_TYPE_OPAQUE_WIN32_BIT|VK_EXTERNAL_FENCE_HANDLE_TYPE_OPAQUE_WIN32_KMT_BIT|VK_EXTERNAL_FENCE_HANDLE_TYPE_SYNC_FD_BIT|VK_EXTERNAL_FENCE_HANDLE_TYPE_OPAQUE_FD_BIT_KHR|VK_EXTERNAL_FENCE_HANDLE_TYPE_OPAQUE_WIN32_BIT_KHR|VK_EXTERNAL_FENCE_HANDLE_TYPE_OPAQUE_WIN32_KMT_BIT_KHR|VK_EXTERNAL_FENCE_HANDLE_TYPE_SYNC_FD_BIT_KHR;
-const VkExternalMemoryFeatureFlags AllVkExternalMemoryFeatureFlagBits = VK_EXTERNAL_MEMORY_FEATURE_DEDICATED_ONLY_BIT|VK_EXTERNAL_MEMORY_FEATURE_EXPORTABLE_BIT|VK_EXTERNAL_MEMORY_FEATURE_IMPORTABLE_BIT|VK_EXTERNAL_MEMORY_FEATURE_DEDICATED_ONLY_BIT_KHR|VK_EXTERNAL_MEMORY_FEATURE_EXPORTABLE_BIT_KHR|VK_EXTERNAL_MEMORY_FEATURE_IMPORTABLE_BIT_KHR;
-const VkExternalMemoryHandleTypeFlags AllVkExternalMemoryHandleTypeFlagBits = VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_FD_BIT|VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_WIN32_BIT|VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_WIN32_KMT_BIT|VK_EXTERNAL_MEMORY_HANDLE_TYPE_D3D11_TEXTURE_BIT|VK_EXTERNAL_MEMORY_HANDLE_TYPE_D3D11_TEXTURE_KMT_BIT|VK_EXTERNAL_MEMORY_HANDLE_TYPE_D3D12_HEAP_BIT|VK_EXTERNAL_MEMORY_HANDLE_TYPE_D3D12_RESOURCE_BIT|VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_FD_BIT_KHR|VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_WIN32_BIT_KHR|VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_WIN32_KMT_BIT_KHR|VK_EXTERNAL_MEMORY_HANDLE_TYPE_D3D11_TEXTURE_BIT_KHR|VK_EXTERNAL_MEMORY_HANDLE_TYPE_D3D11_TEXTURE_KMT_BIT_KHR|VK_EXTERNAL_MEMORY_HANDLE_TYPE_D3D12_HEAP_BIT_KHR|VK_EXTERNAL_MEMORY_HANDLE_TYPE_D3D12_RESOURCE_BIT_KHR|VK_EXTERNAL_MEMORY_HANDLE_TYPE_DMA_BUF_BIT_EXT|VK_EXTERNAL_MEMORY_HANDLE_TYPE_ANDROID_HARDWARE_BUFFER_BIT_ANDROID|VK_EXTERNAL_MEMORY_HANDLE_TYPE_HOST_ALLOCATION_BIT_EXT|VK_EXTERNAL_MEMORY_HANDLE_TYPE_HOST_MAPPED_FOREIGN_MEMORY_BIT_EXT;
-const VkExternalSemaphoreFeatureFlags AllVkExternalSemaphoreFeatureFlagBits = VK_EXTERNAL_SEMAPHORE_FEATURE_EXPORTABLE_BIT|VK_EXTERNAL_SEMAPHORE_FEATURE_IMPORTABLE_BIT|VK_EXTERNAL_SEMAPHORE_FEATURE_EXPORTABLE_BIT_KHR|VK_EXTERNAL_SEMAPHORE_FEATURE_IMPORTABLE_BIT_KHR;
-const VkExternalSemaphoreHandleTypeFlags AllVkExternalSemaphoreHandleTypeFlagBits = VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_OPAQUE_FD_BIT|VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_OPAQUE_WIN32_BIT|VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_OPAQUE_WIN32_KMT_BIT|VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_D3D12_FENCE_BIT|VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_SYNC_FD_BIT|VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_OPAQUE_FD_BIT_KHR|VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_OPAQUE_WIN32_BIT_KHR|VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_OPAQUE_WIN32_KMT_BIT_KHR|VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_D3D12_FENCE_BIT_KHR|VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_SYNC_FD_BIT_KHR;
-const VkFenceImportFlags AllVkFenceImportFlagBits = VK_FENCE_IMPORT_TEMPORARY_BIT|VK_FENCE_IMPORT_TEMPORARY_BIT_KHR;
-const VkMemoryAllocateFlags AllVkMemoryAllocateFlagBits = VK_MEMORY_ALLOCATE_DEVICE_MASK_BIT|VK_MEMORY_ALLOCATE_DEVICE_MASK_BIT_KHR;
-const VkPeerMemoryFeatureFlags AllVkPeerMemoryFeatureFlagBits = VK_PEER_MEMORY_FEATURE_COPY_SRC_BIT|VK_PEER_MEMORY_FEATURE_COPY_DST_BIT|VK_PEER_MEMORY_FEATURE_GENERIC_SRC_BIT|VK_PEER_MEMORY_FEATURE_GENERIC_DST_BIT|VK_PEER_MEMORY_FEATURE_COPY_SRC_BIT_KHR|VK_PEER_MEMORY_FEATURE_COPY_DST_BIT_KHR|VK_PEER_MEMORY_FEATURE_GENERIC_SRC_BIT_KHR|VK_PEER_MEMORY_FEATURE_GENERIC_DST_BIT_KHR;
-const VkSemaphoreImportFlags AllVkSemaphoreImportFlagBits = VK_SEMAPHORE_IMPORT_TEMPORARY_BIT|VK_SEMAPHORE_IMPORT_TEMPORARY_BIT_KHR;
-const VkSubgroupFeatureFlags AllVkSubgroupFeatureFlagBits = VK_SUBGROUP_FEATURE_BASIC_BIT|VK_SUBGROUP_FEATURE_VOTE_BIT|VK_SUBGROUP_FEATURE_ARITHMETIC_BIT|VK_SUBGROUP_FEATURE_BALLOT_BIT|VK_SUBGROUP_FEATURE_SHUFFLE_BIT|VK_SUBGROUP_FEATURE_SHUFFLE_RELATIVE_BIT|VK_SUBGROUP_FEATURE_CLUSTERED_BIT|VK_SUBGROUP_FEATURE_QUAD_BIT|VK_SUBGROUP_FEATURE_PARTITIONED_BIT_NV;
-const VkCompositeAlphaFlagsKHR AllVkCompositeAlphaFlagBitsKHR = VK_COMPOSITE_ALPHA_OPAQUE_BIT_KHR|VK_COMPOSITE_ALPHA_PRE_MULTIPLIED_BIT_KHR|VK_COMPOSITE_ALPHA_POST_MULTIPLIED_BIT_KHR|VK_COMPOSITE_ALPHA_INHERIT_BIT_KHR;
-const VkSurfaceTransformFlagsKHR AllVkSurfaceTransformFlagBitsKHR = VK_SURFACE_TRANSFORM_IDENTITY_BIT_KHR|VK_SURFACE_TRANSFORM_ROTATE_90_BIT_KHR|VK_SURFACE_TRANSFORM_ROTATE_180_BIT_KHR|VK_SURFACE_TRANSFORM_ROTATE_270_BIT_KHR|VK_SURFACE_TRANSFORM_HORIZONTAL_MIRROR_BIT_KHR|VK_SURFACE_TRANSFORM_HORIZONTAL_MIRROR_ROTATE_90_BIT_KHR|VK_SURFACE_TRANSFORM_HORIZONTAL_MIRROR_ROTATE_180_BIT_KHR|VK_SURFACE_TRANSFORM_HORIZONTAL_MIRROR_ROTATE_270_BIT_KHR|VK_SURFACE_TRANSFORM_INHERIT_BIT_KHR;
-const VkDeviceGroupPresentModeFlagsKHR AllVkDeviceGroupPresentModeFlagBitsKHR = VK_DEVICE_GROUP_PRESENT_MODE_LOCAL_BIT_KHR|VK_DEVICE_GROUP_PRESENT_MODE_REMOTE_BIT_KHR|VK_DEVICE_GROUP_PRESENT_MODE_SUM_BIT_KHR|VK_DEVICE_GROUP_PRESENT_MODE_LOCAL_MULTI_DEVICE_BIT_KHR;
-const VkSwapchainCreateFlagsKHR AllVkSwapchainCreateFlagBitsKHR = VK_SWAPCHAIN_CREATE_SPLIT_INSTANCE_BIND_REGIONS_BIT_KHR|VK_SWAPCHAIN_CREATE_PROTECTED_BIT_KHR|VK_SWAPCHAIN_CREATE_SPLIT_INSTANCE_BIND_REGIONS_BIT_KHR|VK_SWAPCHAIN_CREATE_MUTABLE_FORMAT_BIT_KHR;
-const VkDisplayPlaneAlphaFlagsKHR AllVkDisplayPlaneAlphaFlagBitsKHR = VK_DISPLAY_PLANE_ALPHA_OPAQUE_BIT_KHR|VK_DISPLAY_PLANE_ALPHA_GLOBAL_BIT_KHR|VK_DISPLAY_PLANE_ALPHA_PER_PIXEL_BIT_KHR|VK_DISPLAY_PLANE_ALPHA_PER_PIXEL_PREMULTIPLIED_BIT_KHR;
-const VkMemoryAllocateFlagsKHR AllVkMemoryAllocateFlagBitsKHR = VK_MEMORY_ALLOCATE_DEVICE_MASK_BIT|VK_MEMORY_ALLOCATE_DEVICE_MASK_BIT_KHR;
-const VkPeerMemoryFeatureFlagsKHR AllVkPeerMemoryFeatureFlagBitsKHR = VK_PEER_MEMORY_FEATURE_COPY_SRC_BIT|VK_PEER_MEMORY_FEATURE_COPY_DST_BIT|VK_PEER_MEMORY_FEATURE_GENERIC_SRC_BIT|VK_PEER_MEMORY_FEATURE_GENERIC_DST_BIT|VK_PEER_MEMORY_FEATURE_COPY_SRC_BIT_KHR|VK_PEER_MEMORY_FEATURE_COPY_DST_BIT_KHR|VK_PEER_MEMORY_FEATURE_GENERIC_SRC_BIT_KHR|VK_PEER_MEMORY_FEATURE_GENERIC_DST_BIT_KHR;
-const VkExternalMemoryFeatureFlagsKHR AllVkExternalMemoryFeatureFlagBitsKHR = VK_EXTERNAL_MEMORY_FEATURE_DEDICATED_ONLY_BIT|VK_EXTERNAL_MEMORY_FEATURE_EXPORTABLE_BIT|VK_EXTERNAL_MEMORY_FEATURE_IMPORTABLE_BIT|VK_EXTERNAL_MEMORY_FEATURE_DEDICATED_ONLY_BIT_KHR|VK_EXTERNAL_MEMORY_FEATURE_EXPORTABLE_BIT_KHR|VK_EXTERNAL_MEMORY_FEATURE_IMPORTABLE_BIT_KHR;
-const VkExternalMemoryHandleTypeFlagsKHR AllVkExternalMemoryHandleTypeFlagBitsKHR = VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_FD_BIT|VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_WIN32_BIT|VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_WIN32_KMT_BIT|VK_EXTERNAL_MEMORY_HANDLE_TYPE_D3D11_TEXTURE_BIT|VK_EXTERNAL_MEMORY_HANDLE_TYPE_D3D11_TEXTURE_KMT_BIT|VK_EXTERNAL_MEMORY_HANDLE_TYPE_D3D12_HEAP_BIT|VK_EXTERNAL_MEMORY_HANDLE_TYPE_D3D12_RESOURCE_BIT|VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_FD_BIT_KHR|VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_WIN32_BIT_KHR|VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_WIN32_KMT_BIT_KHR|VK_EXTERNAL_MEMORY_HANDLE_TYPE_D3D11_TEXTURE_BIT_KHR|VK_EXTERNAL_MEMORY_HANDLE_TYPE_D3D11_TEXTURE_KMT_BIT_KHR|VK_EXTERNAL_MEMORY_HANDLE_TYPE_D3D12_HEAP_BIT_KHR|VK_EXTERNAL_MEMORY_HANDLE_TYPE_D3D12_RESOURCE_BIT_KHR|VK_EXTERNAL_MEMORY_HANDLE_TYPE_DMA_BUF_BIT_EXT|VK_EXTERNAL_MEMORY_HANDLE_TYPE_ANDROID_HARDWARE_BUFFER_BIT_ANDROID|VK_EXTERNAL_MEMORY_HANDLE_TYPE_HOST_ALLOCATION_BIT_EXT|VK_EXTERNAL_MEMORY_HANDLE_TYPE_HOST_MAPPED_FOREIGN_MEMORY_BIT_EXT;
-const VkExternalSemaphoreFeatureFlagsKHR AllVkExternalSemaphoreFeatureFlagBitsKHR = VK_EXTERNAL_SEMAPHORE_FEATURE_EXPORTABLE_BIT|VK_EXTERNAL_SEMAPHORE_FEATURE_IMPORTABLE_BIT|VK_EXTERNAL_SEMAPHORE_FEATURE_EXPORTABLE_BIT_KHR|VK_EXTERNAL_SEMAPHORE_FEATURE_IMPORTABLE_BIT_KHR;
-const VkExternalSemaphoreHandleTypeFlagsKHR AllVkExternalSemaphoreHandleTypeFlagBitsKHR = VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_OPAQUE_FD_BIT|VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_OPAQUE_WIN32_BIT|VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_OPAQUE_WIN32_KMT_BIT|VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_D3D12_FENCE_BIT|VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_SYNC_FD_BIT|VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_OPAQUE_FD_BIT_KHR|VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_OPAQUE_WIN32_BIT_KHR|VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_OPAQUE_WIN32_KMT_BIT_KHR|VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_D3D12_FENCE_BIT_KHR|VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_SYNC_FD_BIT_KHR;
-const VkSemaphoreImportFlagsKHR AllVkSemaphoreImportFlagBitsKHR = VK_SEMAPHORE_IMPORT_TEMPORARY_BIT|VK_SEMAPHORE_IMPORT_TEMPORARY_BIT_KHR;
-const VkExternalFenceFeatureFlagsKHR AllVkExternalFenceFeatureFlagBitsKHR = VK_EXTERNAL_FENCE_FEATURE_EXPORTABLE_BIT|VK_EXTERNAL_FENCE_FEATURE_IMPORTABLE_BIT|VK_EXTERNAL_FENCE_FEATURE_EXPORTABLE_BIT_KHR|VK_EXTERNAL_FENCE_FEATURE_IMPORTABLE_BIT_KHR;
-const VkExternalFenceHandleTypeFlagsKHR AllVkExternalFenceHandleTypeFlagBitsKHR = VK_EXTERNAL_FENCE_HANDLE_TYPE_OPAQUE_FD_BIT|VK_EXTERNAL_FENCE_HANDLE_TYPE_OPAQUE_WIN32_BIT|VK_EXTERNAL_FENCE_HANDLE_TYPE_OPAQUE_WIN32_KMT_BIT|VK_EXTERNAL_FENCE_HANDLE_TYPE_SYNC_FD_BIT|VK_EXTERNAL_FENCE_HANDLE_TYPE_OPAQUE_FD_BIT_KHR|VK_EXTERNAL_FENCE_HANDLE_TYPE_OPAQUE_WIN32_BIT_KHR|VK_EXTERNAL_FENCE_HANDLE_TYPE_OPAQUE_WIN32_KMT_BIT_KHR|VK_EXTERNAL_FENCE_HANDLE_TYPE_SYNC_FD_BIT_KHR;
-const VkFenceImportFlagsKHR AllVkFenceImportFlagBitsKHR = VK_FENCE_IMPORT_TEMPORARY_BIT|VK_FENCE_IMPORT_TEMPORARY_BIT_KHR;
-const VkResolveModeFlagsKHR AllVkResolveModeFlagBitsKHR = VK_RESOLVE_MODE_NONE_KHR|VK_RESOLVE_MODE_SAMPLE_ZERO_BIT_KHR|VK_RESOLVE_MODE_AVERAGE_BIT_KHR|VK_RESOLVE_MODE_MIN_BIT_KHR|VK_RESOLVE_MODE_MAX_BIT_KHR;
-const VkDebugReportFlagsEXT AllVkDebugReportFlagBitsEXT = VK_DEBUG_REPORT_INFORMATION_BIT_EXT|VK_DEBUG_REPORT_WARNING_BIT_EXT|VK_DEBUG_REPORT_PERFORMANCE_WARNING_BIT_EXT|VK_DEBUG_REPORT_ERROR_BIT_EXT|VK_DEBUG_REPORT_DEBUG_BIT_EXT;
-const VkExternalMemoryFeatureFlagsNV AllVkExternalMemoryFeatureFlagBitsNV = VK_EXTERNAL_MEMORY_FEATURE_DEDICATED_ONLY_BIT_NV|VK_EXTERNAL_MEMORY_FEATURE_EXPORTABLE_BIT_NV|VK_EXTERNAL_MEMORY_FEATURE_IMPORTABLE_BIT_NV;
-const VkExternalMemoryHandleTypeFlagsNV AllVkExternalMemoryHandleTypeFlagBitsNV = VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_WIN32_BIT_NV|VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_WIN32_KMT_BIT_NV|VK_EXTERNAL_MEMORY_HANDLE_TYPE_D3D11_IMAGE_BIT_NV|VK_EXTERNAL_MEMORY_HANDLE_TYPE_D3D11_IMAGE_KMT_BIT_NV;
-const VkConditionalRenderingFlagsEXT AllVkConditionalRenderingFlagBitsEXT = VK_CONDITIONAL_RENDERING_INVERTED_BIT_EXT;
-const VkIndirectCommandsLayoutUsageFlagsNVX AllVkIndirectCommandsLayoutUsageFlagBitsNVX = VK_INDIRECT_COMMANDS_LAYOUT_USAGE_UNORDERED_SEQUENCES_BIT_NVX|VK_INDIRECT_COMMANDS_LAYOUT_USAGE_SPARSE_SEQUENCES_BIT_NVX|VK_INDIRECT_COMMANDS_LAYOUT_USAGE_EMPTY_EXECUTIONS_BIT_NVX|VK_INDIRECT_COMMANDS_LAYOUT_USAGE_INDEXED_SEQUENCES_BIT_NVX;
-const VkObjectEntryUsageFlagsNVX AllVkObjectEntryUsageFlagBitsNVX = VK_OBJECT_ENTRY_USAGE_GRAPHICS_BIT_NVX|VK_OBJECT_ENTRY_USAGE_COMPUTE_BIT_NVX;
-const VkSurfaceCounterFlagsEXT AllVkSurfaceCounterFlagBitsEXT = VK_SURFACE_COUNTER_VBLANK_EXT;
-const VkDebugUtilsMessageSeverityFlagsEXT AllVkDebugUtilsMessageSeverityFlagBitsEXT = VK_DEBUG_UTILS_MESSAGE_SEVERITY_VERBOSE_BIT_EXT|VK_DEBUG_UTILS_MESSAGE_SEVERITY_INFO_BIT_EXT|VK_DEBUG_UTILS_MESSAGE_SEVERITY_WARNING_BIT_EXT|VK_DEBUG_UTILS_MESSAGE_SEVERITY_ERROR_BIT_EXT;
-const VkDebugUtilsMessageTypeFlagsEXT AllVkDebugUtilsMessageTypeFlagBitsEXT = VK_DEBUG_UTILS_MESSAGE_TYPE_GENERAL_BIT_EXT|VK_DEBUG_UTILS_MESSAGE_TYPE_VALIDATION_BIT_EXT|VK_DEBUG_UTILS_MESSAGE_TYPE_PERFORMANCE_BIT_EXT;
-const VkDescriptorBindingFlagsEXT AllVkDescriptorBindingFlagBitsEXT = VK_DESCRIPTOR_BINDING_UPDATE_AFTER_BIND_BIT_EXT|VK_DESCRIPTOR_BINDING_UPDATE_UNUSED_WHILE_PENDING_BIT_EXT|VK_DESCRIPTOR_BINDING_PARTIALLY_BOUND_BIT_EXT|VK_DESCRIPTOR_BINDING_VARIABLE_DESCRIPTOR_COUNT_BIT_EXT;
-const VkBuildAccelerationStructureFlagsNV AllVkBuildAccelerationStructureFlagBitsNV = VK_BUILD_ACCELERATION_STRUCTURE_ALLOW_UPDATE_BIT_NV|VK_BUILD_ACCELERATION_STRUCTURE_ALLOW_COMPACTION_BIT_NV|VK_BUILD_ACCELERATION_STRUCTURE_PREFER_FAST_TRACE_BIT_NV|VK_BUILD_ACCELERATION_STRUCTURE_PREFER_FAST_BUILD_BIT_NV|VK_BUILD_ACCELERATION_STRUCTURE_LOW_MEMORY_BIT_NV;
-const VkGeometryFlagsNV AllVkGeometryFlagBitsNV = VK_GEOMETRY_OPAQUE_BIT_NV|VK_GEOMETRY_NO_DUPLICATE_ANY_HIT_INVOCATION_BIT_NV;
-const VkGeometryInstanceFlagsNV AllVkGeometryInstanceFlagBitsNV = VK_GEOMETRY_INSTANCE_TRIANGLE_CULL_DISABLE_BIT_NV|VK_GEOMETRY_INSTANCE_TRIANGLE_FRONT_COUNTERCLOCKWISE_BIT_NV|VK_GEOMETRY_INSTANCE_FORCE_OPAQUE_BIT_NV|VK_GEOMETRY_INSTANCE_FORCE_NO_OPAQUE_BIT_NV;
-const VkPipelineCreationFeedbackFlagsEXT AllVkPipelineCreationFeedbackFlagBitsEXT = VK_PIPELINE_CREATION_FEEDBACK_VALID_BIT_EXT|VK_PIPELINE_CREATION_FEEDBACK_APPLICATION_PIPELINE_CACHE_HIT_BIT_EXT|VK_PIPELINE_CREATION_FEEDBACK_BASE_PIPELINE_ACCELERATION_BIT_EXT;
-
-const std::vector<VkPipelineCacheHeaderVersion> AllVkPipelineCacheHeaderVersionEnums = {VK_PIPELINE_CACHE_HEADER_VERSION_ONE, };
-const std::vector<VkResult> AllVkResultEnums = {VK_SUCCESS, VK_NOT_READY, VK_TIMEOUT, VK_EVENT_SET, VK_EVENT_RESET, VK_INCOMPLETE, VK_ERROR_OUT_OF_HOST_MEMORY, VK_ERROR_OUT_OF_DEVICE_MEMORY, VK_ERROR_INITIALIZATION_FAILED, VK_ERROR_DEVICE_LOST, VK_ERROR_MEMORY_MAP_FAILED, VK_ERROR_LAYER_NOT_PRESENT, VK_ERROR_EXTENSION_NOT_PRESENT, VK_ERROR_FEATURE_NOT_PRESENT, VK_ERROR_INCOMPATIBLE_DRIVER, VK_ERROR_TOO_MANY_OBJECTS, VK_ERROR_FORMAT_NOT_SUPPORTED, VK_ERROR_FRAGMENTED_POOL, VK_ERROR_OUT_OF_POOL_MEMORY, VK_ERROR_INVALID_EXTERNAL_HANDLE, VK_ERROR_SURFACE_LOST_KHR, VK_ERROR_NATIVE_WINDOW_IN_USE_KHR, VK_SUBOPTIMAL_KHR, VK_ERROR_OUT_OF_DATE_KHR, VK_ERROR_INCOMPATIBLE_DISPLAY_KHR, VK_ERROR_VALIDATION_FAILED_EXT, VK_ERROR_INVALID_SHADER_NV, VK_ERROR_OUT_OF_POOL_MEMORY_KHR, VK_ERROR_INVALID_EXTERNAL_HANDLE_KHR, VK_ERROR_INVALID_DRM_FORMAT_MODIFIER_PLANE_LAYOUT_EXT, VK_ERROR_FRAGMENTATION_EXT, VK_ERROR_NOT_PERMITTED_EXT, VK_ERROR_INVALID_DEVICE_ADDRESS_EXT, VK_ERROR_FULL_SCREEN_EXCLUSIVE_MODE_LOST_EXT, };
-const std::vector<VkSystemAllocationScope> AllVkSystemAllocationScopeEnums = {VK_SYSTEM_ALLOCATION_SCOPE_COMMAND, VK_SYSTEM_ALLOCATION_SCOPE_OBJECT, VK_SYSTEM_ALLOCATION_SCOPE_CACHE, VK_SYSTEM_ALLOCATION_SCOPE_DEVICE, VK_SYSTEM_ALLOCATION_SCOPE_INSTANCE, };
-const std::vector<VkInternalAllocationType> AllVkInternalAllocationTypeEnums = {VK_INTERNAL_ALLOCATION_TYPE_EXECUTABLE, };
-const std::vector<VkFormat> AllVkFormatEnums = {VK_FORMAT_UNDEFINED, VK_FORMAT_R4G4_UNORM_PACK8, VK_FORMAT_R4G4B4A4_UNORM_PACK16, VK_FORMAT_B4G4R4A4_UNORM_PACK16, VK_FORMAT_R5G6B5_UNORM_PACK16, VK_FORMAT_B5G6R5_UNORM_PACK16, VK_FORMAT_R5G5B5A1_UNORM_PACK16, VK_FORMAT_B5G5R5A1_UNORM_PACK16, VK_FORMAT_A1R5G5B5_UNORM_PACK16, VK_FORMAT_R8_UNORM, VK_FORMAT_R8_SNORM, VK_FORMAT_R8_USCALED, VK_FORMAT_R8_SSCALED, VK_FORMAT_R8_UINT, VK_FORMAT_R8_SINT, VK_FORMAT_R8_SRGB, VK_FORMAT_R8G8_UNORM, VK_FORMAT_R8G8_SNORM, VK_FORMAT_R8G8_USCALED, VK_FORMAT_R8G8_SSCALED, VK_FORMAT_R8G8_UINT, VK_FORMAT_R8G8_SINT, VK_FORMAT_R8G8_SRGB, VK_FORMAT_R8G8B8_UNORM, VK_FORMAT_R8G8B8_SNORM, VK_FORMAT_R8G8B8_USCALED, VK_FORMAT_R8G8B8_SSCALED, VK_FORMAT_R8G8B8_UINT, VK_FORMAT_R8G8B8_SINT, VK_FORMAT_R8G8B8_SRGB, VK_FORMAT_B8G8R8_UNORM, VK_FORMAT_B8G8R8_SNORM, VK_FORMAT_B8G8R8_USCALED, VK_FORMAT_B8G8R8_SSCALED, VK_FORMAT_B8G8R8_UINT, VK_FORMAT_B8G8R8_SINT, VK_FORMAT_B8G8R8_SRGB, VK_FORMAT_R8G8B8A8_UNORM, VK_FORMAT_R8G8B8A8_SNORM, VK_FORMAT_R8G8B8A8_USCALED, VK_FORMAT_R8G8B8A8_SSCALED, VK_FORMAT_R8G8B8A8_UINT, VK_FORMAT_R8G8B8A8_SINT, VK_FORMAT_R8G8B8A8_SRGB, VK_FORMAT_B8G8R8A8_UNORM, VK_FORMAT_B8G8R8A8_SNORM, VK_FORMAT_B8G8R8A8_USCALED, VK_FORMAT_B8G8R8A8_SSCALED, VK_FORMAT_B8G8R8A8_UINT, VK_FORMAT_B8G8R8A8_SINT, VK_FORMAT_B8G8R8A8_SRGB, VK_FORMAT_A8B8G8R8_UNORM_PACK32, VK_FORMAT_A8B8G8R8_SNORM_PACK32, VK_FORMAT_A8B8G8R8_USCALED_PACK32, VK_FORMAT_A8B8G8R8_SSCALED_PACK32, VK_FORMAT_A8B8G8R8_UINT_PACK32, VK_FORMAT_A8B8G8R8_SINT_PACK32, VK_FORMAT_A8B8G8R8_SRGB_PACK32, VK_FORMAT_A2R10G10B10_UNORM_PACK32, VK_FORMAT_A2R10G10B10_SNORM_PACK32, VK_FORMAT_A2R10G10B10_USCALED_PACK32, VK_FORMAT_A2R10G10B10_SSCALED_PACK32, VK_FORMAT_A2R10G10B10_UINT_PACK32, VK_FORMAT_A2R10G10B10_SINT_PACK32, VK_FORMAT_A2B10G10R10_UNORM_PACK32, VK_FORMAT_A2B10G10R10_SNORM_PACK32, VK_FORMAT_A2B10G10R10_USCALED_PACK32, VK_FORMAT_A2B10G10R10_SSCALED_PACK32, VK_FORMAT_A2B10G10R10_UINT_PACK32, VK_FORMAT_A2B10G10R10_SINT_PACK32, VK_FORMAT_R16_UNORM, VK_FORMAT_R16_SNORM, VK_FORMAT_R16_USCALED, VK_FORMAT_R16_SSCALED, VK_FORMAT_R16_UINT, VK_FORMAT_R16_SINT, VK_FORMAT_R16_SFLOAT, VK_FORMAT_R16G16_UNORM, VK_FORMAT_R16G16_SNORM, VK_FORMAT_R16G16_USCALED, VK_FORMAT_R16G16_SSCALED, VK_FORMAT_R16G16_UINT, VK_FORMAT_R16G16_SINT, VK_FORMAT_R16G16_SFLOAT, VK_FORMAT_R16G16B16_UNORM, VK_FORMAT_R16G16B16_SNORM, VK_FORMAT_R16G16B16_USCALED, VK_FORMAT_R16G16B16_SSCALED, VK_FORMAT_R16G16B16_UINT, VK_FORMAT_R16G16B16_SINT, VK_FORMAT_R16G16B16_SFLOAT, VK_FORMAT_R16G16B16A16_UNORM, VK_FORMAT_R16G16B16A16_SNORM, VK_FORMAT_R16G16B16A16_USCALED, VK_FORMAT_R16G16B16A16_SSCALED, VK_FORMAT_R16G16B16A16_UINT, VK_FORMAT_R16G16B16A16_SINT, VK_FORMAT_R16G16B16A16_SFLOAT, VK_FORMAT_R32_UINT, VK_FORMAT_R32_SINT, VK_FORMAT_R32_SFLOAT, VK_FORMAT_R32G32_UINT, VK_FORMAT_R32G32_SINT, VK_FORMAT_R32G32_SFLOAT, VK_FORMAT_R32G32B32_UINT, VK_FORMAT_R32G32B32_SINT, VK_FORMAT_R32G32B32_SFLOAT, VK_FORMAT_R32G32B32A32_UINT, VK_FORMAT_R32G32B32A32_SINT, VK_FORMAT_R32G32B32A32_SFLOAT, VK_FORMAT_R64_UINT, VK_FORMAT_R64_SINT, VK_FORMAT_R64_SFLOAT, VK_FORMAT_R64G64_UINT, VK_FORMAT_R64G64_SINT, VK_FORMAT_R64G64_SFLOAT, VK_FORMAT_R64G64B64_UINT, VK_FORMAT_R64G64B64_SINT, VK_FORMAT_R64G64B64_SFLOAT, VK_FORMAT_R64G64B64A64_UINT, VK_FORMAT_R64G64B64A64_SINT, VK_FORMAT_R64G64B64A64_SFLOAT, VK_FORMAT_B10G11R11_UFLOAT_PACK32, VK_FORMAT_E5B9G9R9_UFLOAT_PACK32, VK_FORMAT_D16_UNORM, VK_FORMAT_X8_D24_UNORM_PACK32, VK_FORMAT_D32_SFLOAT, VK_FORMAT_S8_UINT, VK_FORMAT_D16_UNORM_S8_UINT, VK_FORMAT_D24_UNORM_S8_UINT, VK_FORMAT_D32_SFLOAT_S8_UINT, VK_FORMAT_BC1_RGB_UNORM_BLOCK, VK_FORMAT_BC1_RGB_SRGB_BLOCK, VK_FORMAT_BC1_RGBA_UNORM_BLOCK, VK_FORMAT_BC1_RGBA_SRGB_BLOCK, VK_FORMAT_BC2_UNORM_BLOCK, VK_FORMAT_BC2_SRGB_BLOCK, VK_FORMAT_BC3_UNORM_BLOCK, VK_FORMAT_BC3_SRGB_BLOCK, VK_FORMAT_BC4_UNORM_BLOCK, VK_FORMAT_BC4_SNORM_BLOCK, VK_FORMAT_BC5_UNORM_BLOCK, VK_FORMAT_BC5_SNORM_BLOCK, VK_FORMAT_BC6H_UFLOAT_BLOCK, VK_FORMAT_BC6H_SFLOAT_BLOCK, VK_FORMAT_BC7_UNORM_BLOCK, VK_FORMAT_BC7_SRGB_BLOCK, VK_FORMAT_ETC2_R8G8B8_UNORM_BLOCK, VK_FORMAT_ETC2_R8G8B8_SRGB_BLOCK, VK_FORMAT_ETC2_R8G8B8A1_UNORM_BLOCK, VK_FORMAT_ETC2_R8G8B8A1_SRGB_BLOCK, VK_FORMAT_ETC2_R8G8B8A8_UNORM_BLOCK, VK_FORMAT_ETC2_R8G8B8A8_SRGB_BLOCK, VK_FORMAT_EAC_R11_UNORM_BLOCK, VK_FORMAT_EAC_R11_SNORM_BLOCK, VK_FORMAT_EAC_R11G11_UNORM_BLOCK, VK_FORMAT_EAC_R11G11_SNORM_BLOCK, VK_FORMAT_ASTC_4x4_UNORM_BLOCK, VK_FORMAT_ASTC_4x4_SRGB_BLOCK, VK_FORMAT_ASTC_5x4_UNORM_BLOCK, VK_FORMAT_ASTC_5x4_SRGB_BLOCK, VK_FORMAT_ASTC_5x5_UNORM_BLOCK, VK_FORMAT_ASTC_5x5_SRGB_BLOCK, VK_FORMAT_ASTC_6x5_UNORM_BLOCK, VK_FORMAT_ASTC_6x5_SRGB_BLOCK, VK_FORMAT_ASTC_6x6_UNORM_BLOCK, VK_FORMAT_ASTC_6x6_SRGB_BLOCK, VK_FORMAT_ASTC_8x5_UNORM_BLOCK, VK_FORMAT_ASTC_8x5_SRGB_BLOCK, VK_FORMAT_ASTC_8x6_UNORM_BLOCK, VK_FORMAT_ASTC_8x6_SRGB_BLOCK, VK_FORMAT_ASTC_8x8_UNORM_BLOCK, VK_FORMAT_ASTC_8x8_SRGB_BLOCK, VK_FORMAT_ASTC_10x5_UNORM_BLOCK, VK_FORMAT_ASTC_10x5_SRGB_BLOCK, VK_FORMAT_ASTC_10x6_UNORM_BLOCK, VK_FORMAT_ASTC_10x6_SRGB_BLOCK, VK_FORMAT_ASTC_10x8_UNORM_BLOCK, VK_FORMAT_ASTC_10x8_SRGB_BLOCK, VK_FORMAT_ASTC_10x10_UNORM_BLOCK, VK_FORMAT_ASTC_10x10_SRGB_BLOCK, VK_FORMAT_ASTC_12x10_UNORM_BLOCK, VK_FORMAT_ASTC_12x10_SRGB_BLOCK, VK_FORMAT_ASTC_12x12_UNORM_BLOCK, VK_FORMAT_ASTC_12x12_SRGB_BLOCK, VK_FORMAT_G8B8G8R8_422_UNORM, VK_FORMAT_B8G8R8G8_422_UNORM, VK_FORMAT_G8_B8_R8_3PLANE_420_UNORM, VK_FORMAT_G8_B8R8_2PLANE_420_UNORM, VK_FORMAT_G8_B8_R8_3PLANE_422_UNORM, VK_FORMAT_G8_B8R8_2PLANE_422_UNORM, VK_FORMAT_G8_B8_R8_3PLANE_444_UNORM, VK_FORMAT_R10X6_UNORM_PACK16, VK_FORMAT_R10X6G10X6_UNORM_2PACK16, VK_FORMAT_R10X6G10X6B10X6A10X6_UNORM_4PACK16, VK_FORMAT_G10X6B10X6G10X6R10X6_422_UNORM_4PACK16, VK_FORMAT_B10X6G10X6R10X6G10X6_422_UNORM_4PACK16, VK_FORMAT_G10X6_B10X6_R10X6_3PLANE_420_UNORM_3PACK16, VK_FORMAT_G10X6_B10X6R10X6_2PLANE_420_UNORM_3PACK16, VK_FORMAT_G10X6_B10X6_R10X6_3PLANE_422_UNORM_3PACK16, VK_FORMAT_G10X6_B10X6R10X6_2PLANE_422_UNORM_3PACK16, VK_FORMAT_G10X6_B10X6_R10X6_3PLANE_444_UNORM_3PACK16, VK_FORMAT_R12X4_UNORM_PACK16, VK_FORMAT_R12X4G12X4_UNORM_2PACK16, VK_FORMAT_R12X4G12X4B12X4A12X4_UNORM_4PACK16, VK_FORMAT_G12X4B12X4G12X4R12X4_422_UNORM_4PACK16, VK_FORMAT_B12X4G12X4R12X4G12X4_422_UNORM_4PACK16, VK_FORMAT_G12X4_B12X4_R12X4_3PLANE_420_UNORM_3PACK16, VK_FORMAT_G12X4_B12X4R12X4_2PLANE_420_UNORM_3PACK16, VK_FORMAT_G12X4_B12X4_R12X4_3PLANE_422_UNORM_3PACK16, VK_FORMAT_G12X4_B12X4R12X4_2PLANE_422_UNORM_3PACK16, VK_FORMAT_G12X4_B12X4_R12X4_3PLANE_444_UNORM_3PACK16, VK_FORMAT_G16B16G16R16_422_UNORM, VK_FORMAT_B16G16R16G16_422_UNORM, VK_FORMAT_G16_B16_R16_3PLANE_420_UNORM, VK_FORMAT_G16_B16R16_2PLANE_420_UNORM, VK_FORMAT_G16_B16_R16_3PLANE_422_UNORM, VK_FORMAT_G16_B16R16_2PLANE_422_UNORM, VK_FORMAT_G16_B16_R16_3PLANE_444_UNORM, VK_FORMAT_PVRTC1_2BPP_UNORM_BLOCK_IMG, VK_FORMAT_PVRTC1_4BPP_UNORM_BLOCK_IMG, VK_FORMAT_PVRTC2_2BPP_UNORM_BLOCK_IMG, VK_FORMAT_PVRTC2_4BPP_UNORM_BLOCK_IMG, VK_FORMAT_PVRTC1_2BPP_SRGB_BLOCK_IMG, VK_FORMAT_PVRTC1_4BPP_SRGB_BLOCK_IMG, VK_FORMAT_PVRTC2_2BPP_SRGB_BLOCK_IMG, VK_FORMAT_PVRTC2_4BPP_SRGB_BLOCK_IMG, VK_FORMAT_ASTC_4x4_SFLOAT_BLOCK_EXT, VK_FORMAT_ASTC_5x4_SFLOAT_BLOCK_EXT, VK_FORMAT_ASTC_5x5_SFLOAT_BLOCK_EXT, VK_FORMAT_ASTC_6x5_SFLOAT_BLOCK_EXT, VK_FORMAT_ASTC_6x6_SFLOAT_BLOCK_EXT, VK_FORMAT_ASTC_8x5_SFLOAT_BLOCK_EXT, VK_FORMAT_ASTC_8x6_SFLOAT_BLOCK_EXT, VK_FORMAT_ASTC_8x8_SFLOAT_BLOCK_EXT, VK_FORMAT_ASTC_10x5_SFLOAT_BLOCK_EXT, VK_FORMAT_ASTC_10x6_SFLOAT_BLOCK_EXT, VK_FORMAT_ASTC_10x8_SFLOAT_BLOCK_EXT, VK_FORMAT_ASTC_10x10_SFLOAT_BLOCK_EXT, VK_FORMAT_ASTC_12x10_SFLOAT_BLOCK_EXT, VK_FORMAT_ASTC_12x12_SFLOAT_BLOCK_EXT, VK_FORMAT_G8B8G8R8_422_UNORM_KHR, VK_FORMAT_B8G8R8G8_422_UNORM_KHR, VK_FORMAT_G8_B8_R8_3PLANE_420_UNORM_KHR, VK_FORMAT_G8_B8R8_2PLANE_420_UNORM_KHR, VK_FORMAT_G8_B8_R8_3PLANE_422_UNORM_KHR, VK_FORMAT_G8_B8R8_2PLANE_422_UNORM_KHR, VK_FORMAT_G8_B8_R8_3PLANE_444_UNORM_KHR, VK_FORMAT_R10X6_UNORM_PACK16_KHR, VK_FORMAT_R10X6G10X6_UNORM_2PACK16_KHR, VK_FORMAT_R10X6G10X6B10X6A10X6_UNORM_4PACK16_KHR, VK_FORMAT_G10X6B10X6G10X6R10X6_422_UNORM_4PACK16_KHR, VK_FORMAT_B10X6G10X6R10X6G10X6_422_UNORM_4PACK16_KHR, VK_FORMAT_G10X6_B10X6_R10X6_3PLANE_420_UNORM_3PACK16_KHR, VK_FORMAT_G10X6_B10X6R10X6_2PLANE_420_UNORM_3PACK16_KHR, VK_FORMAT_G10X6_B10X6_R10X6_3PLANE_422_UNORM_3PACK16_KHR, VK_FORMAT_G10X6_B10X6R10X6_2PLANE_422_UNORM_3PACK16_KHR, VK_FORMAT_G10X6_B10X6_R10X6_3PLANE_444_UNORM_3PACK16_KHR, VK_FORMAT_R12X4_UNORM_PACK16_KHR, VK_FORMAT_R12X4G12X4_UNORM_2PACK16_KHR, VK_FORMAT_R12X4G12X4B12X4A12X4_UNORM_4PACK16_KHR, VK_FORMAT_G12X4B12X4G12X4R12X4_422_UNORM_4PACK16_KHR, VK_FORMAT_B12X4G12X4R12X4G12X4_422_UNORM_4PACK16_KHR, VK_FORMAT_G12X4_B12X4_R12X4_3PLANE_420_UNORM_3PACK16_KHR, VK_FORMAT_G12X4_B12X4R12X4_2PLANE_420_UNORM_3PACK16_KHR, VK_FORMAT_G12X4_B12X4_R12X4_3PLANE_422_UNORM_3PACK16_KHR, VK_FORMAT_G12X4_B12X4R12X4_2PLANE_422_UNORM_3PACK16_KHR, VK_FORMAT_G12X4_B12X4_R12X4_3PLANE_444_UNORM_3PACK16_KHR, VK_FORMAT_G16B16G16R16_422_UNORM_KHR, VK_FORMAT_B16G16R16G16_422_UNORM_KHR, VK_FORMAT_G16_B16_R16_3PLANE_420_UNORM_KHR, VK_FORMAT_G16_B16R16_2PLANE_420_UNORM_KHR, VK_FORMAT_G16_B16_R16_3PLANE_422_UNORM_KHR, VK_FORMAT_G16_B16R16_2PLANE_422_UNORM_KHR, VK_FORMAT_G16_B16_R16_3PLANE_444_UNORM_KHR, };
-const std::vector<VkImageType> AllVkImageTypeEnums = {VK_IMAGE_TYPE_1D, VK_IMAGE_TYPE_2D, VK_IMAGE_TYPE_3D, };
-const std::vector<VkImageTiling> AllVkImageTilingEnums = {VK_IMAGE_TILING_OPTIMAL, VK_IMAGE_TILING_LINEAR, VK_IMAGE_TILING_DRM_FORMAT_MODIFIER_EXT, };
-const std::vector<VkPhysicalDeviceType> AllVkPhysicalDeviceTypeEnums = {VK_PHYSICAL_DEVICE_TYPE_OTHER, VK_PHYSICAL_DEVICE_TYPE_INTEGRATED_GPU, VK_PHYSICAL_DEVICE_TYPE_DISCRETE_GPU, VK_PHYSICAL_DEVICE_TYPE_VIRTUAL_GPU, VK_PHYSICAL_DEVICE_TYPE_CPU, };
-const std::vector<VkQueryType> AllVkQueryTypeEnums = {VK_QUERY_TYPE_OCCLUSION, VK_QUERY_TYPE_PIPELINE_STATISTICS, VK_QUERY_TYPE_TIMESTAMP, VK_QUERY_TYPE_TRANSFORM_FEEDBACK_STREAM_EXT, VK_QUERY_TYPE_ACCELERATION_STRUCTURE_COMPACTED_SIZE_NV, VK_QUERY_TYPE_PERFORMANCE_QUERY_INTEL, };
-const std::vector<VkSharingMode> AllVkSharingModeEnums = {VK_SHARING_MODE_EXCLUSIVE, VK_SHARING_MODE_CONCURRENT, };
-const std::vector<VkImageLayout> AllVkImageLayoutEnums = {VK_IMAGE_LAYOUT_UNDEFINED, VK_IMAGE_LAYOUT_GENERAL, VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL, VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL, VK_IMAGE_LAYOUT_DEPTH_STENCIL_READ_ONLY_OPTIMAL, VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL, VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL, VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, VK_IMAGE_LAYOUT_PREINITIALIZED, VK_IMAGE_LAYOUT_DEPTH_READ_ONLY_STENCIL_ATTACHMENT_OPTIMAL, VK_IMAGE_LAYOUT_DEPTH_ATTACHMENT_STENCIL_READ_ONLY_OPTIMAL, VK_IMAGE_LAYOUT_PRESENT_SRC_KHR, VK_IMAGE_LAYOUT_SHARED_PRESENT_KHR, VK_IMAGE_LAYOUT_DEPTH_READ_ONLY_STENCIL_ATTACHMENT_OPTIMAL_KHR, VK_IMAGE_LAYOUT_DEPTH_ATTACHMENT_STENCIL_READ_ONLY_OPTIMAL_KHR, VK_IMAGE_LAYOUT_SHADING_RATE_OPTIMAL_NV, VK_IMAGE_LAYOUT_FRAGMENT_DENSITY_MAP_OPTIMAL_EXT, };
-const std::vector<VkImageViewType> AllVkImageViewTypeEnums = {VK_IMAGE_VIEW_TYPE_1D, VK_IMAGE_VIEW_TYPE_2D, VK_IMAGE_VIEW_TYPE_3D, VK_IMAGE_VIEW_TYPE_CUBE, VK_IMAGE_VIEW_TYPE_1D_ARRAY, VK_IMAGE_VIEW_TYPE_2D_ARRAY, VK_IMAGE_VIEW_TYPE_CUBE_ARRAY, };
-const std::vector<VkComponentSwizzle> AllVkComponentSwizzleEnums = {VK_COMPONENT_SWIZZLE_IDENTITY, VK_COMPONENT_SWIZZLE_ZERO, VK_COMPONENT_SWIZZLE_ONE, VK_COMPONENT_SWIZZLE_R, VK_COMPONENT_SWIZZLE_G, VK_COMPONENT_SWIZZLE_B, VK_COMPONENT_SWIZZLE_A, };
-const std::vector<VkVertexInputRate> AllVkVertexInputRateEnums = {VK_VERTEX_INPUT_RATE_VERTEX, VK_VERTEX_INPUT_RATE_INSTANCE, };
-const std::vector<VkPrimitiveTopology> AllVkPrimitiveTopologyEnums = {VK_PRIMITIVE_TOPOLOGY_POINT_LIST, VK_PRIMITIVE_TOPOLOGY_LINE_LIST, VK_PRIMITIVE_TOPOLOGY_LINE_STRIP, VK_PRIMITIVE_TOPOLOGY_TRIANGLE_LIST, VK_PRIMITIVE_TOPOLOGY_TRIANGLE_STRIP, VK_PRIMITIVE_TOPOLOGY_TRIANGLE_FAN, VK_PRIMITIVE_TOPOLOGY_LINE_LIST_WITH_ADJACENCY, VK_PRIMITIVE_TOPOLOGY_LINE_STRIP_WITH_ADJACENCY, VK_PRIMITIVE_TOPOLOGY_TRIANGLE_LIST_WITH_ADJACENCY, VK_PRIMITIVE_TOPOLOGY_TRIANGLE_STRIP_WITH_ADJACENCY, VK_PRIMITIVE_TOPOLOGY_PATCH_LIST, };
-const std::vector<VkPolygonMode> AllVkPolygonModeEnums = {VK_POLYGON_MODE_FILL, VK_POLYGON_MODE_LINE, VK_POLYGON_MODE_POINT, VK_POLYGON_MODE_FILL_RECTANGLE_NV, };
-const std::vector<VkFrontFace> AllVkFrontFaceEnums = {VK_FRONT_FACE_COUNTER_CLOCKWISE, VK_FRONT_FACE_CLOCKWISE, };
-const std::vector<VkCompareOp> AllVkCompareOpEnums = {VK_COMPARE_OP_NEVER, VK_COMPARE_OP_LESS, VK_COMPARE_OP_EQUAL, VK_COMPARE_OP_LESS_OR_EQUAL, VK_COMPARE_OP_GREATER, VK_COMPARE_OP_NOT_EQUAL, VK_COMPARE_OP_GREATER_OR_EQUAL, VK_COMPARE_OP_ALWAYS, };
-const std::vector<VkStencilOp> AllVkStencilOpEnums = {VK_STENCIL_OP_KEEP, VK_STENCIL_OP_ZERO, VK_STENCIL_OP_REPLACE, VK_STENCIL_OP_INCREMENT_AND_CLAMP, VK_STENCIL_OP_DECREMENT_AND_CLAMP, VK_STENCIL_OP_INVERT, VK_STENCIL_OP_INCREMENT_AND_WRAP, VK_STENCIL_OP_DECREMENT_AND_WRAP, };
-const std::vector<VkLogicOp> AllVkLogicOpEnums = {VK_LOGIC_OP_CLEAR, VK_LOGIC_OP_AND, VK_LOGIC_OP_AND_REVERSE, VK_LOGIC_OP_COPY, VK_LOGIC_OP_AND_INVERTED, VK_LOGIC_OP_NO_OP, VK_LOGIC_OP_XOR, VK_LOGIC_OP_OR, VK_LOGIC_OP_NOR, VK_LOGIC_OP_EQUIVALENT, VK_LOGIC_OP_INVERT, VK_LOGIC_OP_OR_REVERSE, VK_LOGIC_OP_COPY_INVERTED, VK_LOGIC_OP_OR_INVERTED, VK_LOGIC_OP_NAND, VK_LOGIC_OP_SET, };
-const std::vector<VkBlendFactor> AllVkBlendFactorEnums = {VK_BLEND_FACTOR_ZERO, VK_BLEND_FACTOR_ONE, VK_BLEND_FACTOR_SRC_COLOR, VK_BLEND_FACTOR_ONE_MINUS_SRC_COLOR, VK_BLEND_FACTOR_DST_COLOR, VK_BLEND_FACTOR_ONE_MINUS_DST_COLOR, VK_BLEND_FACTOR_SRC_ALPHA, VK_BLEND_FACTOR_ONE_MINUS_SRC_ALPHA, VK_BLEND_FACTOR_DST_ALPHA, VK_BLEND_FACTOR_ONE_MINUS_DST_ALPHA, VK_BLEND_FACTOR_CONSTANT_COLOR, VK_BLEND_FACTOR_ONE_MINUS_CONSTANT_COLOR, VK_BLEND_FACTOR_CONSTANT_ALPHA, VK_BLEND_FACTOR_ONE_MINUS_CONSTANT_ALPHA, VK_BLEND_FACTOR_SRC_ALPHA_SATURATE, VK_BLEND_FACTOR_SRC1_COLOR, VK_BLEND_FACTOR_ONE_MINUS_SRC1_COLOR, VK_BLEND_FACTOR_SRC1_ALPHA, VK_BLEND_FACTOR_ONE_MINUS_SRC1_ALPHA, };
-const std::vector<VkBlendOp> AllVkBlendOpEnums = {VK_BLEND_OP_ADD, VK_BLEND_OP_SUBTRACT, VK_BLEND_OP_REVERSE_SUBTRACT, VK_BLEND_OP_MIN, VK_BLEND_OP_MAX, VK_BLEND_OP_ZERO_EXT, VK_BLEND_OP_SRC_EXT, VK_BLEND_OP_DST_EXT, VK_BLEND_OP_SRC_OVER_EXT, VK_BLEND_OP_DST_OVER_EXT, VK_BLEND_OP_SRC_IN_EXT, VK_BLEND_OP_DST_IN_EXT, VK_BLEND_OP_SRC_OUT_EXT, VK_BLEND_OP_DST_OUT_EXT, VK_BLEND_OP_SRC_ATOP_EXT, VK_BLEND_OP_DST_ATOP_EXT, VK_BLEND_OP_XOR_EXT, VK_BLEND_OP_MULTIPLY_EXT, VK_BLEND_OP_SCREEN_EXT, VK_BLEND_OP_OVERLAY_EXT, VK_BLEND_OP_DARKEN_EXT, VK_BLEND_OP_LIGHTEN_EXT, VK_BLEND_OP_COLORDODGE_EXT, VK_BLEND_OP_COLORBURN_EXT, VK_BLEND_OP_HARDLIGHT_EXT, VK_BLEND_OP_SOFTLIGHT_EXT, VK_BLEND_OP_DIFFERENCE_EXT, VK_BLEND_OP_EXCLUSION_EXT, VK_BLEND_OP_INVERT_EXT, VK_BLEND_OP_INVERT_RGB_EXT, VK_BLEND_OP_LINEARDODGE_EXT, VK_BLEND_OP_LINEARBURN_EXT, VK_BLEND_OP_VIVIDLIGHT_EXT, VK_BLEND_OP_LINEARLIGHT_EXT, VK_BLEND_OP_PINLIGHT_EXT, VK_BLEND_OP_HARDMIX_EXT, VK_BLEND_OP_HSL_HUE_EXT, VK_BLEND_OP_HSL_SATURATION_EXT, VK_BLEND_OP_HSL_COLOR_EXT, VK_BLEND_OP_HSL_LUMINOSITY_EXT, VK_BLEND_OP_PLUS_EXT, VK_BLEND_OP_PLUS_CLAMPED_EXT, VK_BLEND_OP_PLUS_CLAMPED_ALPHA_EXT, VK_BLEND_OP_PLUS_DARKER_EXT, VK_BLEND_OP_MINUS_EXT, VK_BLEND_OP_MINUS_CLAMPED_EXT, VK_BLEND_OP_CONTRAST_EXT, VK_BLEND_OP_INVERT_OVG_EXT, VK_BLEND_OP_RED_EXT, VK_BLEND_OP_GREEN_EXT, VK_BLEND_OP_BLUE_EXT, };
-const std::vector<VkDynamicState> AllVkDynamicStateEnums = {VK_DYNAMIC_STATE_VIEWPORT, VK_DYNAMIC_STATE_SCISSOR, VK_DYNAMIC_STATE_LINE_WIDTH, VK_DYNAMIC_STATE_DEPTH_BIAS, VK_DYNAMIC_STATE_BLEND_CONSTANTS, VK_DYNAMIC_STATE_DEPTH_BOUNDS, VK_DYNAMIC_STATE_STENCIL_COMPARE_MASK, VK_DYNAMIC_STATE_STENCIL_WRITE_MASK, VK_DYNAMIC_STATE_STENCIL_REFERENCE, VK_DYNAMIC_STATE_VIEWPORT_W_SCALING_NV, VK_DYNAMIC_STATE_DISCARD_RECTANGLE_EXT, VK_DYNAMIC_STATE_SAMPLE_LOCATIONS_EXT, VK_DYNAMIC_STATE_VIEWPORT_SHADING_RATE_PALETTE_NV, VK_DYNAMIC_STATE_VIEWPORT_COARSE_SAMPLE_ORDER_NV, VK_DYNAMIC_STATE_EXCLUSIVE_SCISSOR_NV, VK_DYNAMIC_STATE_LINE_STIPPLE_EXT, };
-const std::vector<VkFilter> AllVkFilterEnums = {VK_FILTER_NEAREST, VK_FILTER_LINEAR, VK_FILTER_CUBIC_IMG, VK_FILTER_CUBIC_EXT, };
-const std::vector<VkSamplerMipmapMode> AllVkSamplerMipmapModeEnums = {VK_SAMPLER_MIPMAP_MODE_NEAREST, VK_SAMPLER_MIPMAP_MODE_LINEAR, };
-const std::vector<VkSamplerAddressMode> AllVkSamplerAddressModeEnums = {VK_SAMPLER_ADDRESS_MODE_REPEAT, VK_SAMPLER_ADDRESS_MODE_MIRRORED_REPEAT, VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_EDGE, VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_BORDER, VK_SAMPLER_ADDRESS_MODE_MIRROR_CLAMP_TO_EDGE, VK_SAMPLER_ADDRESS_MODE_MIRROR_CLAMP_TO_EDGE_KHR, };
-const std::vector<VkBorderColor> AllVkBorderColorEnums = {VK_BORDER_COLOR_FLOAT_TRANSPARENT_BLACK, VK_BORDER_COLOR_INT_TRANSPARENT_BLACK, VK_BORDER_COLOR_FLOAT_OPAQUE_BLACK, VK_BORDER_COLOR_INT_OPAQUE_BLACK, VK_BORDER_COLOR_FLOAT_OPAQUE_WHITE, VK_BORDER_COLOR_INT_OPAQUE_WHITE, };
-const std::vector<VkDescriptorType> AllVkDescriptorTypeEnums = {VK_DESCRIPTOR_TYPE_SAMPLER, VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER, VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE, VK_DESCRIPTOR_TYPE_STORAGE_IMAGE, VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER, VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER, VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER, VK_DESCRIPTOR_TYPE_STORAGE_BUFFER, VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC, VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC, VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT, VK_DESCRIPTOR_TYPE_INLINE_UNIFORM_BLOCK_EXT, VK_DESCRIPTOR_TYPE_ACCELERATION_STRUCTURE_NV, };
-const std::vector<VkAttachmentLoadOp> AllVkAttachmentLoadOpEnums = {VK_ATTACHMENT_LOAD_OP_LOAD, VK_ATTACHMENT_LOAD_OP_CLEAR, VK_ATTACHMENT_LOAD_OP_DONT_CARE, };
-const std::vector<VkAttachmentStoreOp> AllVkAttachmentStoreOpEnums = {VK_ATTACHMENT_STORE_OP_STORE, VK_ATTACHMENT_STORE_OP_DONT_CARE, };
-const std::vector<VkPipelineBindPoint> AllVkPipelineBindPointEnums = {VK_PIPELINE_BIND_POINT_GRAPHICS, VK_PIPELINE_BIND_POINT_COMPUTE, VK_PIPELINE_BIND_POINT_RAY_TRACING_NV, };
-const std::vector<VkCommandBufferLevel> AllVkCommandBufferLevelEnums = {VK_COMMAND_BUFFER_LEVEL_PRIMARY, VK_COMMAND_BUFFER_LEVEL_SECONDARY, };
-const std::vector<VkIndexType> AllVkIndexTypeEnums = {VK_INDEX_TYPE_UINT16, VK_INDEX_TYPE_UINT32, VK_INDEX_TYPE_NONE_NV, VK_INDEX_TYPE_UINT8_EXT, };
-const std::vector<VkSubpassContents> AllVkSubpassContentsEnums = {VK_SUBPASS_CONTENTS_INLINE, VK_SUBPASS_CONTENTS_SECONDARY_COMMAND_BUFFERS, };
-const std::vector<VkObjectType> AllVkObjectTypeEnums = {VK_OBJECT_TYPE_UNKNOWN, VK_OBJECT_TYPE_INSTANCE, VK_OBJECT_TYPE_PHYSICAL_DEVICE, VK_OBJECT_TYPE_DEVICE, VK_OBJECT_TYPE_QUEUE, VK_OBJECT_TYPE_SEMAPHORE, VK_OBJECT_TYPE_COMMAND_BUFFER, VK_OBJECT_TYPE_FENCE, VK_OBJECT_TYPE_DEVICE_MEMORY, VK_OBJECT_TYPE_BUFFER, VK_OBJECT_TYPE_IMAGE, VK_OBJECT_TYPE_EVENT, VK_OBJECT_TYPE_QUERY_POOL, VK_OBJECT_TYPE_BUFFER_VIEW, VK_OBJECT_TYPE_IMAGE_VIEW, VK_OBJECT_TYPE_SHADER_MODULE, VK_OBJECT_TYPE_PIPELINE_CACHE, VK_OBJECT_TYPE_PIPELINE_LAYOUT, VK_OBJECT_TYPE_RENDER_PASS, VK_OBJECT_TYPE_PIPELINE, VK_OBJECT_TYPE_DESCRIPTOR_SET_LAYOUT, VK_OBJECT_TYPE_SAMPLER, VK_OBJECT_TYPE_DESCRIPTOR_POOL, VK_OBJECT_TYPE_DESCRIPTOR_SET, VK_OBJECT_TYPE_FRAMEBUFFER, VK_OBJECT_TYPE_COMMAND_POOL, VK_OBJECT_TYPE_SAMPLER_YCBCR_CONVERSION, VK_OBJECT_TYPE_DESCRIPTOR_UPDATE_TEMPLATE, VK_OBJECT_TYPE_SURFACE_KHR, VK_OBJECT_TYPE_SWAPCHAIN_KHR, VK_OBJECT_TYPE_DISPLAY_KHR, VK_OBJECT_TYPE_DISPLAY_MODE_KHR, VK_OBJECT_TYPE_DEBUG_REPORT_CALLBACK_EXT, VK_OBJECT_TYPE_DESCRIPTOR_UPDATE_TEMPLATE_KHR, VK_OBJECT_TYPE_OBJECT_TABLE_NVX, VK_OBJECT_TYPE_INDIRECT_COMMANDS_LAYOUT_NVX, VK_OBJECT_TYPE_DEBUG_UTILS_MESSENGER_EXT, VK_OBJECT_TYPE_SAMPLER_YCBCR_CONVERSION_KHR, VK_OBJECT_TYPE_VALIDATION_CACHE_EXT, VK_OBJECT_TYPE_ACCELERATION_STRUCTURE_NV, VK_OBJECT_TYPE_PERFORMANCE_CONFIGURATION_INTEL, };
-const std::vector<VkVendorId> AllVkVendorIdEnums = {VK_VENDOR_ID_VIV, VK_VENDOR_ID_VSI, VK_VENDOR_ID_KAZAN, };
-const std::vector<VkPointClippingBehavior> AllVkPointClippingBehaviorEnums = {VK_POINT_CLIPPING_BEHAVIOR_ALL_CLIP_PLANES, VK_POINT_CLIPPING_BEHAVIOR_USER_CLIP_PLANES_ONLY, VK_POINT_CLIPPING_BEHAVIOR_ALL_CLIP_PLANES_KHR, VK_POINT_CLIPPING_BEHAVIOR_USER_CLIP_PLANES_ONLY_KHR, };
-const std::vector<VkTessellationDomainOrigin> AllVkTessellationDomainOriginEnums = {VK_TESSELLATION_DOMAIN_ORIGIN_UPPER_LEFT, VK_TESSELLATION_DOMAIN_ORIGIN_LOWER_LEFT, VK_TESSELLATION_DOMAIN_ORIGIN_UPPER_LEFT_KHR, VK_TESSELLATION_DOMAIN_ORIGIN_LOWER_LEFT_KHR, };
-const std::vector<VkSamplerYcbcrModelConversion> AllVkSamplerYcbcrModelConversionEnums = {VK_SAMPLER_YCBCR_MODEL_CONVERSION_RGB_IDENTITY, VK_SAMPLER_YCBCR_MODEL_CONVERSION_YCBCR_IDENTITY, VK_SAMPLER_YCBCR_MODEL_CONVERSION_YCBCR_709, VK_SAMPLER_YCBCR_MODEL_CONVERSION_YCBCR_601, VK_SAMPLER_YCBCR_MODEL_CONVERSION_YCBCR_2020, VK_SAMPLER_YCBCR_MODEL_CONVERSION_RGB_IDENTITY_KHR, VK_SAMPLER_YCBCR_MODEL_CONVERSION_YCBCR_IDENTITY_KHR, VK_SAMPLER_YCBCR_MODEL_CONVERSION_YCBCR_709_KHR, VK_SAMPLER_YCBCR_MODEL_CONVERSION_YCBCR_601_KHR, VK_SAMPLER_YCBCR_MODEL_CONVERSION_YCBCR_2020_KHR, };
-const std::vector<VkSamplerYcbcrRange> AllVkSamplerYcbcrRangeEnums = {VK_SAMPLER_YCBCR_RANGE_ITU_FULL, VK_SAMPLER_YCBCR_RANGE_ITU_NARROW, VK_SAMPLER_YCBCR_RANGE_ITU_FULL_KHR, VK_SAMPLER_YCBCR_RANGE_ITU_NARROW_KHR, };
-const std::vector<VkChromaLocation> AllVkChromaLocationEnums = {VK_CHROMA_LOCATION_COSITED_EVEN, VK_CHROMA_LOCATION_MIDPOINT, VK_CHROMA_LOCATION_COSITED_EVEN_KHR, VK_CHROMA_LOCATION_MIDPOINT_KHR, };
-const std::vector<VkDescriptorUpdateTemplateType> AllVkDescriptorUpdateTemplateTypeEnums = {VK_DESCRIPTOR_UPDATE_TEMPLATE_TYPE_DESCRIPTOR_SET, VK_DESCRIPTOR_UPDATE_TEMPLATE_TYPE_PUSH_DESCRIPTORS_KHR, VK_DESCRIPTOR_UPDATE_TEMPLATE_TYPE_DESCRIPTOR_SET_KHR, VK_DESCRIPTOR_UPDATE_TEMPLATE_TYPE_PUSH_DESCRIPTORS_KHR, };
-const std::vector<VkColorSpaceKHR> AllVkColorSpaceKHREnums = {VK_COLOR_SPACE_SRGB_NONLINEAR_KHR, VK_COLORSPACE_SRGB_NONLINEAR_KHR, VK_COLOR_SPACE_DISPLAY_P3_NONLINEAR_EXT, VK_COLOR_SPACE_EXTENDED_SRGB_LINEAR_EXT, VK_COLOR_SPACE_DISPLAY_P3_LINEAR_EXT, VK_COLOR_SPACE_DCI_P3_NONLINEAR_EXT, VK_COLOR_SPACE_BT709_LINEAR_EXT, VK_COLOR_SPACE_BT709_NONLINEAR_EXT, VK_COLOR_SPACE_BT2020_LINEAR_EXT, VK_COLOR_SPACE_HDR10_ST2084_EXT, VK_COLOR_SPACE_DOLBYVISION_EXT, VK_COLOR_SPACE_HDR10_HLG_EXT, VK_COLOR_SPACE_ADOBERGB_LINEAR_EXT, VK_COLOR_SPACE_ADOBERGB_NONLINEAR_EXT, VK_COLOR_SPACE_PASS_THROUGH_EXT, VK_COLOR_SPACE_EXTENDED_SRGB_NONLINEAR_EXT, VK_COLOR_SPACE_DCI_P3_LINEAR_EXT, VK_COLOR_SPACE_DISPLAY_NATIVE_AMD, };
-const std::vector<VkPresentModeKHR> AllVkPresentModeKHREnums = {VK_PRESENT_MODE_IMMEDIATE_KHR, VK_PRESENT_MODE_MAILBOX_KHR, VK_PRESENT_MODE_FIFO_KHR, VK_PRESENT_MODE_FIFO_RELAXED_KHR, VK_PRESENT_MODE_SHARED_DEMAND_REFRESH_KHR, VK_PRESENT_MODE_SHARED_CONTINUOUS_REFRESH_KHR, };
-const std::vector<VkDescriptorUpdateTemplateTypeKHR> AllVkDescriptorUpdateTemplateTypeKHREnums = {VK_DESCRIPTOR_UPDATE_TEMPLATE_TYPE_DESCRIPTOR_SET, VK_DESCRIPTOR_UPDATE_TEMPLATE_TYPE_PUSH_DESCRIPTORS_KHR, VK_DESCRIPTOR_UPDATE_TEMPLATE_TYPE_DESCRIPTOR_SET_KHR, VK_DESCRIPTOR_UPDATE_TEMPLATE_TYPE_PUSH_DESCRIPTORS_KHR, };
-const std::vector<VkPointClippingBehaviorKHR> AllVkPointClippingBehaviorKHREnums = {VK_POINT_CLIPPING_BEHAVIOR_ALL_CLIP_PLANES, VK_POINT_CLIPPING_BEHAVIOR_USER_CLIP_PLANES_ONLY, VK_POINT_CLIPPING_BEHAVIOR_ALL_CLIP_PLANES_KHR, VK_POINT_CLIPPING_BEHAVIOR_USER_CLIP_PLANES_ONLY_KHR, };
-const std::vector<VkTessellationDomainOriginKHR> AllVkTessellationDomainOriginKHREnums = {VK_TESSELLATION_DOMAIN_ORIGIN_UPPER_LEFT, VK_TESSELLATION_DOMAIN_ORIGIN_LOWER_LEFT, VK_TESSELLATION_DOMAIN_ORIGIN_UPPER_LEFT_KHR, VK_TESSELLATION_DOMAIN_ORIGIN_LOWER_LEFT_KHR, };
-const std::vector<VkSamplerYcbcrModelConversionKHR> AllVkSamplerYcbcrModelConversionKHREnums = {VK_SAMPLER_YCBCR_MODEL_CONVERSION_RGB_IDENTITY, VK_SAMPLER_YCBCR_MODEL_CONVERSION_YCBCR_IDENTITY, VK_SAMPLER_YCBCR_MODEL_CONVERSION_YCBCR_709, VK_SAMPLER_YCBCR_MODEL_CONVERSION_YCBCR_601, VK_SAMPLER_YCBCR_MODEL_CONVERSION_YCBCR_2020, VK_SAMPLER_YCBCR_MODEL_CONVERSION_RGB_IDENTITY_KHR, VK_SAMPLER_YCBCR_MODEL_CONVERSION_YCBCR_IDENTITY_KHR, VK_SAMPLER_YCBCR_MODEL_CONVERSION_YCBCR_709_KHR, VK_SAMPLER_YCBCR_MODEL_CONVERSION_YCBCR_601_KHR, VK_SAMPLER_YCBCR_MODEL_CONVERSION_YCBCR_2020_KHR, };
-const std::vector<VkSamplerYcbcrRangeKHR> AllVkSamplerYcbcrRangeKHREnums = {VK_SAMPLER_YCBCR_RANGE_ITU_FULL, VK_SAMPLER_YCBCR_RANGE_ITU_NARROW, VK_SAMPLER_YCBCR_RANGE_ITU_FULL_KHR, VK_SAMPLER_YCBCR_RANGE_ITU_NARROW_KHR, };
-const std::vector<VkChromaLocationKHR> AllVkChromaLocationKHREnums = {VK_CHROMA_LOCATION_COSITED_EVEN, VK_CHROMA_LOCATION_MIDPOINT, VK_CHROMA_LOCATION_COSITED_EVEN_KHR, VK_CHROMA_LOCATION_MIDPOINT_KHR, };
-const std::vector<VkDriverIdKHR> AllVkDriverIdKHREnums = {VK_DRIVER_ID_AMD_PROPRIETARY_KHR, VK_DRIVER_ID_AMD_OPEN_SOURCE_KHR, VK_DRIVER_ID_MESA_RADV_KHR, VK_DRIVER_ID_NVIDIA_PROPRIETARY_KHR, VK_DRIVER_ID_INTEL_PROPRIETARY_WINDOWS_KHR, VK_DRIVER_ID_INTEL_OPEN_SOURCE_MESA_KHR, VK_DRIVER_ID_IMAGINATION_PROPRIETARY_KHR, VK_DRIVER_ID_QUALCOMM_PROPRIETARY_KHR, VK_DRIVER_ID_ARM_PROPRIETARY_KHR, VK_DRIVER_ID_GOOGLE_SWIFTSHADER_KHR, VK_DRIVER_ID_GGP_PROPRIETARY_KHR, VK_DRIVER_ID_BROADCOM_PROPRIETARY_KHR, };
-const std::vector<VkShaderFloatControlsIndependenceKHR> AllVkShaderFloatControlsIndependenceKHREnums = {VK_SHADER_FLOAT_CONTROLS_INDEPENDENCE_32_BIT_ONLY_KHR, VK_SHADER_FLOAT_CONTROLS_INDEPENDENCE_ALL_KHR, VK_SHADER_FLOAT_CONTROLS_INDEPENDENCE_NONE_KHR, };
-const std::vector<VkPipelineExecutableStatisticFormatKHR> AllVkPipelineExecutableStatisticFormatKHREnums = {VK_PIPELINE_EXECUTABLE_STATISTIC_FORMAT_BOOL32_KHR, VK_PIPELINE_EXECUTABLE_STATISTIC_FORMAT_INT64_KHR, VK_PIPELINE_EXECUTABLE_STATISTIC_FORMAT_UINT64_KHR, VK_PIPELINE_EXECUTABLE_STATISTIC_FORMAT_FLOAT64_KHR, };
-const std::vector<VkDebugReportObjectTypeEXT> AllVkDebugReportObjectTypeEXTEnums = {VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_INSTANCE_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_PHYSICAL_DEVICE_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_QUEUE_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_SEMAPHORE_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_FENCE_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_MEMORY_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_BUFFER_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_EVENT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_QUERY_POOL_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_BUFFER_VIEW_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_VIEW_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_SHADER_MODULE_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_CACHE_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_LAYOUT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_RENDER_PASS_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_SET_LAYOUT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_SAMPLER_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_POOL_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_SET_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_FRAMEBUFFER_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_POOL_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_SURFACE_KHR_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_SWAPCHAIN_KHR_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DEBUG_REPORT_CALLBACK_EXT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DEBUG_REPORT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DISPLAY_KHR_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DISPLAY_MODE_KHR_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_OBJECT_TABLE_NVX_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_INDIRECT_COMMANDS_LAYOUT_NVX_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_VALIDATION_CACHE_EXT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_VALIDATION_CACHE_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_SAMPLER_YCBCR_CONVERSION_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_UPDATE_TEMPLATE_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_UPDATE_TEMPLATE_KHR_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_SAMPLER_YCBCR_CONVERSION_KHR_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_SAMPLER_YCBCR_CONVERSION_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_ACCELERATION_STRUCTURE_NV_EXT, };
-const std::vector<VkRasterizationOrderAMD> AllVkRasterizationOrderAMDEnums = {VK_RASTERIZATION_ORDER_STRICT_AMD, VK_RASTERIZATION_ORDER_RELAXED_AMD, };
-const std::vector<VkShaderInfoTypeAMD> AllVkShaderInfoTypeAMDEnums = {VK_SHADER_INFO_TYPE_STATISTICS_AMD, VK_SHADER_INFO_TYPE_BINARY_AMD, VK_SHADER_INFO_TYPE_DISASSEMBLY_AMD, };
-const std::vector<VkValidationCheckEXT> AllVkValidationCheckEXTEnums = {VK_VALIDATION_CHECK_ALL_EXT, VK_VALIDATION_CHECK_SHADERS_EXT, };
-const std::vector<VkIndirectCommandsTokenTypeNVX> AllVkIndirectCommandsTokenTypeNVXEnums = {VK_INDIRECT_COMMANDS_TOKEN_TYPE_PIPELINE_NVX, VK_INDIRECT_COMMANDS_TOKEN_TYPE_DESCRIPTOR_SET_NVX, VK_INDIRECT_COMMANDS_TOKEN_TYPE_INDEX_BUFFER_NVX, VK_INDIRECT_COMMANDS_TOKEN_TYPE_VERTEX_BUFFER_NVX, VK_INDIRECT_COMMANDS_TOKEN_TYPE_PUSH_CONSTANT_NVX, VK_INDIRECT_COMMANDS_TOKEN_TYPE_DRAW_INDEXED_NVX, VK_INDIRECT_COMMANDS_TOKEN_TYPE_DRAW_NVX, VK_INDIRECT_COMMANDS_TOKEN_TYPE_DISPATCH_NVX, };
-const std::vector<VkObjectEntryTypeNVX> AllVkObjectEntryTypeNVXEnums = {VK_OBJECT_ENTRY_TYPE_DESCRIPTOR_SET_NVX, VK_OBJECT_ENTRY_TYPE_PIPELINE_NVX, VK_OBJECT_ENTRY_TYPE_INDEX_BUFFER_NVX, VK_OBJECT_ENTRY_TYPE_VERTEX_BUFFER_NVX, VK_OBJECT_ENTRY_TYPE_PUSH_CONSTANT_NVX, };
-const std::vector<VkDisplayPowerStateEXT> AllVkDisplayPowerStateEXTEnums = {VK_DISPLAY_POWER_STATE_OFF_EXT, VK_DISPLAY_POWER_STATE_SUSPEND_EXT, VK_DISPLAY_POWER_STATE_ON_EXT, };
-const std::vector<VkDeviceEventTypeEXT> AllVkDeviceEventTypeEXTEnums = {VK_DEVICE_EVENT_TYPE_DISPLAY_HOTPLUG_EXT, };
-const std::vector<VkDisplayEventTypeEXT> AllVkDisplayEventTypeEXTEnums = {VK_DISPLAY_EVENT_TYPE_FIRST_PIXEL_OUT_EXT, };
-const std::vector<VkViewportCoordinateSwizzleNV> AllVkViewportCoordinateSwizzleNVEnums = {VK_VIEWPORT_COORDINATE_SWIZZLE_POSITIVE_X_NV, VK_VIEWPORT_COORDINATE_SWIZZLE_NEGATIVE_X_NV, VK_VIEWPORT_COORDINATE_SWIZZLE_POSITIVE_Y_NV, VK_VIEWPORT_COORDINATE_SWIZZLE_NEGATIVE_Y_NV, VK_VIEWPORT_COORDINATE_SWIZZLE_POSITIVE_Z_NV, VK_VIEWPORT_COORDINATE_SWIZZLE_NEGATIVE_Z_NV, VK_VIEWPORT_COORDINATE_SWIZZLE_POSITIVE_W_NV, VK_VIEWPORT_COORDINATE_SWIZZLE_NEGATIVE_W_NV, };
-const std::vector<VkDiscardRectangleModeEXT> AllVkDiscardRectangleModeEXTEnums = {VK_DISCARD_RECTANGLE_MODE_INCLUSIVE_EXT, VK_DISCARD_RECTANGLE_MODE_EXCLUSIVE_EXT, };
-const std::vector<VkConservativeRasterizationModeEXT> AllVkConservativeRasterizationModeEXTEnums = {VK_CONSERVATIVE_RASTERIZATION_MODE_DISABLED_EXT, VK_CONSERVATIVE_RASTERIZATION_MODE_OVERESTIMATE_EXT, VK_CONSERVATIVE_RASTERIZATION_MODE_UNDERESTIMATE_EXT, };
-const std::vector<VkSamplerReductionModeEXT> AllVkSamplerReductionModeEXTEnums = {VK_SAMPLER_REDUCTION_MODE_WEIGHTED_AVERAGE_EXT, VK_SAMPLER_REDUCTION_MODE_MIN_EXT, VK_SAMPLER_REDUCTION_MODE_MAX_EXT, };
-const std::vector<VkBlendOverlapEXT> AllVkBlendOverlapEXTEnums = {VK_BLEND_OVERLAP_UNCORRELATED_EXT, VK_BLEND_OVERLAP_DISJOINT_EXT, VK_BLEND_OVERLAP_CONJOINT_EXT, };
-const std::vector<VkCoverageModulationModeNV> AllVkCoverageModulationModeNVEnums = {VK_COVERAGE_MODULATION_MODE_NONE_NV, VK_COVERAGE_MODULATION_MODE_RGB_NV, VK_COVERAGE_MODULATION_MODE_ALPHA_NV, VK_COVERAGE_MODULATION_MODE_RGBA_NV, };
-const std::vector<VkValidationCacheHeaderVersionEXT> AllVkValidationCacheHeaderVersionEXTEnums = {VK_VALIDATION_CACHE_HEADER_VERSION_ONE_EXT, };
-const std::vector<VkShadingRatePaletteEntryNV> AllVkShadingRatePaletteEntryNVEnums = {VK_SHADING_RATE_PALETTE_ENTRY_NO_INVOCATIONS_NV, VK_SHADING_RATE_PALETTE_ENTRY_16_INVOCATIONS_PER_PIXEL_NV, VK_SHADING_RATE_PALETTE_ENTRY_8_INVOCATIONS_PER_PIXEL_NV, VK_SHADING_RATE_PALETTE_ENTRY_4_INVOCATIONS_PER_PIXEL_NV, VK_SHADING_RATE_PALETTE_ENTRY_2_INVOCATIONS_PER_PIXEL_NV, VK_SHADING_RATE_PALETTE_ENTRY_1_INVOCATION_PER_PIXEL_NV, VK_SHADING_RATE_PALETTE_ENTRY_1_INVOCATION_PER_2X1_PIXELS_NV, VK_SHADING_RATE_PALETTE_ENTRY_1_INVOCATION_PER_1X2_PIXELS_NV, VK_SHADING_RATE_PALETTE_ENTRY_1_INVOCATION_PER_2X2_PIXELS_NV, VK_SHADING_RATE_PALETTE_ENTRY_1_INVOCATION_PER_4X2_PIXELS_NV, VK_SHADING_RATE_PALETTE_ENTRY_1_INVOCATION_PER_2X4_PIXELS_NV, VK_SHADING_RATE_PALETTE_ENTRY_1_INVOCATION_PER_4X4_PIXELS_NV, };
-const std::vector<VkCoarseSampleOrderTypeNV> AllVkCoarseSampleOrderTypeNVEnums = {VK_COARSE_SAMPLE_ORDER_TYPE_DEFAULT_NV, VK_COARSE_SAMPLE_ORDER_TYPE_CUSTOM_NV, VK_COARSE_SAMPLE_ORDER_TYPE_PIXEL_MAJOR_NV, VK_COARSE_SAMPLE_ORDER_TYPE_SAMPLE_MAJOR_NV, };
-const std::vector<VkAccelerationStructureTypeNV> AllVkAccelerationStructureTypeNVEnums = {VK_ACCELERATION_STRUCTURE_TYPE_TOP_LEVEL_NV, VK_ACCELERATION_STRUCTURE_TYPE_BOTTOM_LEVEL_NV, };
-const std::vector<VkRayTracingShaderGroupTypeNV> AllVkRayTracingShaderGroupTypeNVEnums = {VK_RAY_TRACING_SHADER_GROUP_TYPE_GENERAL_NV, VK_RAY_TRACING_SHADER_GROUP_TYPE_TRIANGLES_HIT_GROUP_NV, VK_RAY_TRACING_SHADER_GROUP_TYPE_PROCEDURAL_HIT_GROUP_NV, };
-const std::vector<VkGeometryTypeNV> AllVkGeometryTypeNVEnums = {VK_GEOMETRY_TYPE_TRIANGLES_NV, VK_GEOMETRY_TYPE_AABBS_NV, };
-const std::vector<VkCopyAccelerationStructureModeNV> AllVkCopyAccelerationStructureModeNVEnums = {VK_COPY_ACCELERATION_STRUCTURE_MODE_CLONE_NV, VK_COPY_ACCELERATION_STRUCTURE_MODE_COMPACT_NV, };
-const std::vector<VkAccelerationStructureMemoryRequirementsTypeNV> AllVkAccelerationStructureMemoryRequirementsTypeNVEnums = {VK_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_TYPE_OBJECT_NV, VK_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_TYPE_BUILD_SCRATCH_NV, VK_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_TYPE_UPDATE_SCRATCH_NV, };
-const std::vector<VkQueueGlobalPriorityEXT> AllVkQueueGlobalPriorityEXTEnums = {VK_QUEUE_GLOBAL_PRIORITY_LOW_EXT, VK_QUEUE_GLOBAL_PRIORITY_MEDIUM_EXT, VK_QUEUE_GLOBAL_PRIORITY_HIGH_EXT, VK_QUEUE_GLOBAL_PRIORITY_REALTIME_EXT, };
-const std::vector<VkTimeDomainEXT> AllVkTimeDomainEXTEnums = {VK_TIME_DOMAIN_DEVICE_EXT, VK_TIME_DOMAIN_CLOCK_MONOTONIC_EXT, VK_TIME_DOMAIN_CLOCK_MONOTONIC_RAW_EXT, VK_TIME_DOMAIN_QUERY_PERFORMANCE_COUNTER_EXT, };
-const std::vector<VkMemoryOverallocationBehaviorAMD> AllVkMemoryOverallocationBehaviorAMDEnums = {VK_MEMORY_OVERALLOCATION_BEHAVIOR_DEFAULT_AMD, VK_MEMORY_OVERALLOCATION_BEHAVIOR_ALLOWED_AMD, VK_MEMORY_OVERALLOCATION_BEHAVIOR_DISALLOWED_AMD, };
-const std::vector<VkPerformanceConfigurationTypeINTEL> AllVkPerformanceConfigurationTypeINTELEnums = {VK_PERFORMANCE_CONFIGURATION_TYPE_COMMAND_QUEUE_METRICS_DISCOVERY_ACTIVATED_INTEL, };
-const std::vector<VkQueryPoolSamplingModeINTEL> AllVkQueryPoolSamplingModeINTELEnums = {VK_QUERY_POOL_SAMPLING_MODE_MANUAL_INTEL, };
-const std::vector<VkPerformanceOverrideTypeINTEL> AllVkPerformanceOverrideTypeINTELEnums = {VK_PERFORMANCE_OVERRIDE_TYPE_NULL_HARDWARE_INTEL, VK_PERFORMANCE_OVERRIDE_TYPE_FLUSH_GPU_CACHES_INTEL, };
-const std::vector<VkPerformanceParameterTypeINTEL> AllVkPerformanceParameterTypeINTELEnums = {VK_PERFORMANCE_PARAMETER_TYPE_HW_COUNTERS_SUPPORTED_INTEL, VK_PERFORMANCE_PARAMETER_TYPE_STREAM_MARKER_VALID_BITS_INTEL, };
-const std::vector<VkPerformanceValueTypeINTEL> AllVkPerformanceValueTypeINTELEnums = {VK_PERFORMANCE_VALUE_TYPE_UINT32_INTEL, VK_PERFORMANCE_VALUE_TYPE_UINT64_INTEL, VK_PERFORMANCE_VALUE_TYPE_FLOAT_INTEL, VK_PERFORMANCE_VALUE_TYPE_BOOL_INTEL, VK_PERFORMANCE_VALUE_TYPE_STRING_INTEL, };
-const std::vector<VkValidationFeatureEnableEXT> AllVkValidationFeatureEnableEXTEnums = {VK_VALIDATION_FEATURE_ENABLE_GPU_ASSISTED_EXT, VK_VALIDATION_FEATURE_ENABLE_GPU_ASSISTED_RESERVE_BINDING_SLOT_EXT, VK_VALIDATION_FEATURE_ENABLE_BEST_PRACTICES_EXT, };
-const std::vector<VkValidationFeatureDisableEXT> AllVkValidationFeatureDisableEXTEnums = {VK_VALIDATION_FEATURE_DISABLE_ALL_EXT, VK_VALIDATION_FEATURE_DISABLE_SHADERS_EXT, VK_VALIDATION_FEATURE_DISABLE_THREAD_SAFETY_EXT, VK_VALIDATION_FEATURE_DISABLE_API_PARAMETERS_EXT, VK_VALIDATION_FEATURE_DISABLE_OBJECT_LIFETIMES_EXT, VK_VALIDATION_FEATURE_DISABLE_CORE_CHECKS_EXT, VK_VALIDATION_FEATURE_DISABLE_UNIQUE_HANDLES_EXT, };
-const std::vector<VkComponentTypeNV> AllVkComponentTypeNVEnums = {VK_COMPONENT_TYPE_FLOAT16_NV, VK_COMPONENT_TYPE_FLOAT32_NV, VK_COMPONENT_TYPE_FLOAT64_NV, VK_COMPONENT_TYPE_SINT8_NV, VK_COMPONENT_TYPE_SINT16_NV, VK_COMPONENT_TYPE_SINT32_NV, VK_COMPONENT_TYPE_SINT64_NV, VK_COMPONENT_TYPE_UINT8_NV, VK_COMPONENT_TYPE_UINT16_NV, VK_COMPONENT_TYPE_UINT32_NV, VK_COMPONENT_TYPE_UINT64_NV, };
-const std::vector<VkScopeNV> AllVkScopeNVEnums = {VK_SCOPE_DEVICE_NV, VK_SCOPE_WORKGROUP_NV, VK_SCOPE_SUBGROUP_NV, VK_SCOPE_QUEUE_FAMILY_NV, };
-const std::vector<VkCoverageReductionModeNV> AllVkCoverageReductionModeNVEnums = {VK_COVERAGE_REDUCTION_MODE_MERGE_NV, VK_COVERAGE_REDUCTION_MODE_TRUNCATE_NV, };
-
-#ifdef VK_USE_PLATFORM_WIN32_KHR
-const std::vector<VkFullScreenExclusiveEXT> AllVkFullScreenExclusiveEXTEnums = {VK_FULL_SCREEN_EXCLUSIVE_DEFAULT_EXT, VK_FULL_SCREEN_EXCLUSIVE_ALLOWED_EXT, VK_FULL_SCREEN_EXCLUSIVE_DISALLOWED_EXT, VK_FULL_SCREEN_EXCLUSIVE_APPLICATION_CONTROLLED_EXT, };
-#endif // VK_USE_PLATFORM_WIN32_KHR
-const std::vector<VkLineRasterizationModeEXT> AllVkLineRasterizationModeEXTEnums = {VK_LINE_RASTERIZATION_MODE_DEFAULT_EXT, VK_LINE_RASTERIZATION_MODE_RECTANGULAR_EXT, VK_LINE_RASTERIZATION_MODE_BRESENHAM_EXT, VK_LINE_RASTERIZATION_MODE_RECTANGULAR_SMOOTH_EXT, };
-
-
-bool StatelessValidation::ValidatePnextStructContents(const char *api_name, const ParameterName &parameter_name, const VkBaseOutStructure* header) {
- bool skip = false;
- switch(header->sType) {
-
- // Validation code for VkPhysicalDevice16BitStorageFeatures structure members
- case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_16BIT_STORAGE_FEATURES: {
- VkPhysicalDevice16BitStorageFeatures *structure = (VkPhysicalDevice16BitStorageFeatures *) header;
- skip |= validate_bool32("VkPhysicalDevice16BitStorageFeatures", "storageBuffer16BitAccess", structure->storageBuffer16BitAccess);
-
- skip |= validate_bool32("VkPhysicalDevice16BitStorageFeatures", "uniformAndStorageBuffer16BitAccess", structure->uniformAndStorageBuffer16BitAccess);
-
- skip |= validate_bool32("VkPhysicalDevice16BitStorageFeatures", "storagePushConstant16", structure->storagePushConstant16);
-
- skip |= validate_bool32("VkPhysicalDevice16BitStorageFeatures", "storageInputOutput16", structure->storageInputOutput16);
- } break;
-
- // Validation code for VkMemoryAllocateFlagsInfo structure members
- case VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_FLAGS_INFO: {
- VkMemoryAllocateFlagsInfo *structure = (VkMemoryAllocateFlagsInfo *) header;
- skip |= validate_flags("VkMemoryAllocateFlagsInfo", "flags", "VkMemoryAllocateFlagBits", AllVkMemoryAllocateFlagBits, structure->flags, kOptionalFlags, "VUID-VkMemoryAllocateFlagsInfo-flags-parameter");
- } break;
-
- // Validation code for VkDeviceGroupRenderPassBeginInfo structure members
- case VK_STRUCTURE_TYPE_DEVICE_GROUP_RENDER_PASS_BEGIN_INFO: {
- VkDeviceGroupRenderPassBeginInfo *structure = (VkDeviceGroupRenderPassBeginInfo *) header;
- skip |= validate_array("VkDeviceGroupRenderPassBeginInfo", "deviceRenderAreaCount", "pDeviceRenderAreas", structure->deviceRenderAreaCount, &structure->pDeviceRenderAreas, false, true, kVUIDUndefined, "VUID-VkDeviceGroupRenderPassBeginInfo-pDeviceRenderAreas-parameter");
-
- if (structure->pDeviceRenderAreas != NULL)
- {
- for (uint32_t deviceRenderAreaIndex = 0; deviceRenderAreaIndex < structure->deviceRenderAreaCount; ++deviceRenderAreaIndex)
- {
- }
- }
- } break;
-
- // Validation code for VkDeviceGroupSubmitInfo structure members
- case VK_STRUCTURE_TYPE_DEVICE_GROUP_SUBMIT_INFO: {
- VkDeviceGroupSubmitInfo *structure = (VkDeviceGroupSubmitInfo *) header;
- skip |= validate_array("VkDeviceGroupSubmitInfo", "waitSemaphoreCount", "pWaitSemaphoreDeviceIndices", structure->waitSemaphoreCount, &structure->pWaitSemaphoreDeviceIndices, false, true, kVUIDUndefined, "VUID-VkDeviceGroupSubmitInfo-pWaitSemaphoreDeviceIndices-parameter");
-
- skip |= validate_array("VkDeviceGroupSubmitInfo", "commandBufferCount", "pCommandBufferDeviceMasks", structure->commandBufferCount, &structure->pCommandBufferDeviceMasks, false, true, kVUIDUndefined, "VUID-VkDeviceGroupSubmitInfo-pCommandBufferDeviceMasks-parameter");
-
- skip |= validate_array("VkDeviceGroupSubmitInfo", "signalSemaphoreCount", "pSignalSemaphoreDeviceIndices", structure->signalSemaphoreCount, &structure->pSignalSemaphoreDeviceIndices, false, true, kVUIDUndefined, "VUID-VkDeviceGroupSubmitInfo-pSignalSemaphoreDeviceIndices-parameter");
- } break;
-
- // Validation code for VkBindBufferMemoryDeviceGroupInfo structure members
- case VK_STRUCTURE_TYPE_BIND_BUFFER_MEMORY_DEVICE_GROUP_INFO: {
- VkBindBufferMemoryDeviceGroupInfo *structure = (VkBindBufferMemoryDeviceGroupInfo *) header;
- skip |= validate_array("VkBindBufferMemoryDeviceGroupInfo", "deviceIndexCount", "pDeviceIndices", structure->deviceIndexCount, &structure->pDeviceIndices, false, true, kVUIDUndefined, "VUID-VkBindBufferMemoryDeviceGroupInfo-pDeviceIndices-parameter");
- } break;
-
- // Validation code for VkBindImageMemoryDeviceGroupInfo structure members
- case VK_STRUCTURE_TYPE_BIND_IMAGE_MEMORY_DEVICE_GROUP_INFO: {
- VkBindImageMemoryDeviceGroupInfo *structure = (VkBindImageMemoryDeviceGroupInfo *) header;
- skip |= validate_array("VkBindImageMemoryDeviceGroupInfo", "deviceIndexCount", "pDeviceIndices", structure->deviceIndexCount, &structure->pDeviceIndices, false, true, kVUIDUndefined, "VUID-VkBindImageMemoryDeviceGroupInfo-pDeviceIndices-parameter");
-
- skip |= validate_array("VkBindImageMemoryDeviceGroupInfo", "splitInstanceBindRegionCount", "pSplitInstanceBindRegions", structure->splitInstanceBindRegionCount, &structure->pSplitInstanceBindRegions, false, true, kVUIDUndefined, "VUID-VkBindImageMemoryDeviceGroupInfo-pSplitInstanceBindRegions-parameter");
-
- if (structure->pSplitInstanceBindRegions != NULL)
- {
- for (uint32_t splitInstanceBindRegionIndex = 0; splitInstanceBindRegionIndex < structure->splitInstanceBindRegionCount; ++splitInstanceBindRegionIndex)
- {
- }
- }
- } break;
-
- // Validation code for VkDeviceGroupDeviceCreateInfo structure members
- case VK_STRUCTURE_TYPE_DEVICE_GROUP_DEVICE_CREATE_INFO: {
- VkDeviceGroupDeviceCreateInfo *structure = (VkDeviceGroupDeviceCreateInfo *) header;
- skip |= validate_array("VkDeviceGroupDeviceCreateInfo", "physicalDeviceCount", "pPhysicalDevices", structure->physicalDeviceCount, &structure->pPhysicalDevices, false, true, kVUIDUndefined, "VUID-VkDeviceGroupDeviceCreateInfo-pPhysicalDevices-parameter");
- } break;
-
- // Validation code for VkPhysicalDeviceFeatures2 structure members
- case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FEATURES_2: {
- VkPhysicalDeviceFeatures2 *structure = (VkPhysicalDeviceFeatures2 *) header;
- skip |= validate_bool32("VkPhysicalDeviceFeatures2", "features.robustBufferAccess", structure->features.robustBufferAccess);
-
- skip |= validate_bool32("VkPhysicalDeviceFeatures2", "features.fullDrawIndexUint32", structure->features.fullDrawIndexUint32);
-
- skip |= validate_bool32("VkPhysicalDeviceFeatures2", "features.imageCubeArray", structure->features.imageCubeArray);
-
- skip |= validate_bool32("VkPhysicalDeviceFeatures2", "features.independentBlend", structure->features.independentBlend);
-
- skip |= validate_bool32("VkPhysicalDeviceFeatures2", "features.geometryShader", structure->features.geometryShader);
-
- skip |= validate_bool32("VkPhysicalDeviceFeatures2", "features.tessellationShader", structure->features.tessellationShader);
-
- skip |= validate_bool32("VkPhysicalDeviceFeatures2", "features.sampleRateShading", structure->features.sampleRateShading);
-
- skip |= validate_bool32("VkPhysicalDeviceFeatures2", "features.dualSrcBlend", structure->features.dualSrcBlend);
-
- skip |= validate_bool32("VkPhysicalDeviceFeatures2", "features.logicOp", structure->features.logicOp);
-
- skip |= validate_bool32("VkPhysicalDeviceFeatures2", "features.multiDrawIndirect", structure->features.multiDrawIndirect);
-
- skip |= validate_bool32("VkPhysicalDeviceFeatures2", "features.drawIndirectFirstInstance", structure->features.drawIndirectFirstInstance);
-
- skip |= validate_bool32("VkPhysicalDeviceFeatures2", "features.depthClamp", structure->features.depthClamp);
-
- skip |= validate_bool32("VkPhysicalDeviceFeatures2", "features.depthBiasClamp", structure->features.depthBiasClamp);
-
- skip |= validate_bool32("VkPhysicalDeviceFeatures2", "features.fillModeNonSolid", structure->features.fillModeNonSolid);
-
- skip |= validate_bool32("VkPhysicalDeviceFeatures2", "features.depthBounds", structure->features.depthBounds);
-
- skip |= validate_bool32("VkPhysicalDeviceFeatures2", "features.wideLines", structure->features.wideLines);
-
- skip |= validate_bool32("VkPhysicalDeviceFeatures2", "features.largePoints", structure->features.largePoints);
-
- skip |= validate_bool32("VkPhysicalDeviceFeatures2", "features.alphaToOne", structure->features.alphaToOne);
-
- skip |= validate_bool32("VkPhysicalDeviceFeatures2", "features.multiViewport", structure->features.multiViewport);
-
- skip |= validate_bool32("VkPhysicalDeviceFeatures2", "features.samplerAnisotropy", structure->features.samplerAnisotropy);
-
- skip |= validate_bool32("VkPhysicalDeviceFeatures2", "features.textureCompressionETC2", structure->features.textureCompressionETC2);
-
- skip |= validate_bool32("VkPhysicalDeviceFeatures2", "features.textureCompressionASTC_LDR", structure->features.textureCompressionASTC_LDR);
-
- skip |= validate_bool32("VkPhysicalDeviceFeatures2", "features.textureCompressionBC", structure->features.textureCompressionBC);
-
- skip |= validate_bool32("VkPhysicalDeviceFeatures2", "features.occlusionQueryPrecise", structure->features.occlusionQueryPrecise);
-
- skip |= validate_bool32("VkPhysicalDeviceFeatures2", "features.pipelineStatisticsQuery", structure->features.pipelineStatisticsQuery);
-
- skip |= validate_bool32("VkPhysicalDeviceFeatures2", "features.vertexPipelineStoresAndAtomics", structure->features.vertexPipelineStoresAndAtomics);
-
- skip |= validate_bool32("VkPhysicalDeviceFeatures2", "features.fragmentStoresAndAtomics", structure->features.fragmentStoresAndAtomics);
-
- skip |= validate_bool32("VkPhysicalDeviceFeatures2", "features.shaderTessellationAndGeometryPointSize", structure->features.shaderTessellationAndGeometryPointSize);
-
- skip |= validate_bool32("VkPhysicalDeviceFeatures2", "features.shaderImageGatherExtended", structure->features.shaderImageGatherExtended);
-
- skip |= validate_bool32("VkPhysicalDeviceFeatures2", "features.shaderStorageImageExtendedFormats", structure->features.shaderStorageImageExtendedFormats);
-
- skip |= validate_bool32("VkPhysicalDeviceFeatures2", "features.shaderStorageImageMultisample", structure->features.shaderStorageImageMultisample);
-
- skip |= validate_bool32("VkPhysicalDeviceFeatures2", "features.shaderStorageImageReadWithoutFormat", structure->features.shaderStorageImageReadWithoutFormat);
-
- skip |= validate_bool32("VkPhysicalDeviceFeatures2", "features.shaderStorageImageWriteWithoutFormat", structure->features.shaderStorageImageWriteWithoutFormat);
-
- skip |= validate_bool32("VkPhysicalDeviceFeatures2", "features.shaderUniformBufferArrayDynamicIndexing", structure->features.shaderUniformBufferArrayDynamicIndexing);
-
- skip |= validate_bool32("VkPhysicalDeviceFeatures2", "features.shaderSampledImageArrayDynamicIndexing", structure->features.shaderSampledImageArrayDynamicIndexing);
-
- skip |= validate_bool32("VkPhysicalDeviceFeatures2", "features.shaderStorageBufferArrayDynamicIndexing", structure->features.shaderStorageBufferArrayDynamicIndexing);
-
- skip |= validate_bool32("VkPhysicalDeviceFeatures2", "features.shaderStorageImageArrayDynamicIndexing", structure->features.shaderStorageImageArrayDynamicIndexing);
-
- skip |= validate_bool32("VkPhysicalDeviceFeatures2", "features.shaderClipDistance", structure->features.shaderClipDistance);
-
- skip |= validate_bool32("VkPhysicalDeviceFeatures2", "features.shaderCullDistance", structure->features.shaderCullDistance);
-
- skip |= validate_bool32("VkPhysicalDeviceFeatures2", "features.shaderFloat64", structure->features.shaderFloat64);
-
- skip |= validate_bool32("VkPhysicalDeviceFeatures2", "features.shaderInt64", structure->features.shaderInt64);
-
- skip |= validate_bool32("VkPhysicalDeviceFeatures2", "features.shaderInt16", structure->features.shaderInt16);
-
- skip |= validate_bool32("VkPhysicalDeviceFeatures2", "features.shaderResourceResidency", structure->features.shaderResourceResidency);
-
- skip |= validate_bool32("VkPhysicalDeviceFeatures2", "features.shaderResourceMinLod", structure->features.shaderResourceMinLod);
-
- skip |= validate_bool32("VkPhysicalDeviceFeatures2", "features.sparseBinding", structure->features.sparseBinding);
-
- skip |= validate_bool32("VkPhysicalDeviceFeatures2", "features.sparseResidencyBuffer", structure->features.sparseResidencyBuffer);
-
- skip |= validate_bool32("VkPhysicalDeviceFeatures2", "features.sparseResidencyImage2D", structure->features.sparseResidencyImage2D);
-
- skip |= validate_bool32("VkPhysicalDeviceFeatures2", "features.sparseResidencyImage3D", structure->features.sparseResidencyImage3D);
-
- skip |= validate_bool32("VkPhysicalDeviceFeatures2", "features.sparseResidency2Samples", structure->features.sparseResidency2Samples);
-
- skip |= validate_bool32("VkPhysicalDeviceFeatures2", "features.sparseResidency4Samples", structure->features.sparseResidency4Samples);
-
- skip |= validate_bool32("VkPhysicalDeviceFeatures2", "features.sparseResidency8Samples", structure->features.sparseResidency8Samples);
-
- skip |= validate_bool32("VkPhysicalDeviceFeatures2", "features.sparseResidency16Samples", structure->features.sparseResidency16Samples);
-
- skip |= validate_bool32("VkPhysicalDeviceFeatures2", "features.sparseResidencyAliased", structure->features.sparseResidencyAliased);
-
- skip |= validate_bool32("VkPhysicalDeviceFeatures2", "features.variableMultisampleRate", structure->features.variableMultisampleRate);
-
- skip |= validate_bool32("VkPhysicalDeviceFeatures2", "features.inheritedQueries", structure->features.inheritedQueries);
- } break;
-
- // Validation code for VkRenderPassInputAttachmentAspectCreateInfo structure members
- case VK_STRUCTURE_TYPE_RENDER_PASS_INPUT_ATTACHMENT_ASPECT_CREATE_INFO: {
- VkRenderPassInputAttachmentAspectCreateInfo *structure = (VkRenderPassInputAttachmentAspectCreateInfo *) header;
- skip |= validate_array("VkRenderPassInputAttachmentAspectCreateInfo", "aspectReferenceCount", "pAspectReferences", structure->aspectReferenceCount, &structure->pAspectReferences, true, true, "VUID-VkRenderPassInputAttachmentAspectCreateInfo-aspectReferenceCount-arraylength", "VUID-VkRenderPassInputAttachmentAspectCreateInfo-pAspectReferences-parameter");
-
- if (structure->pAspectReferences != NULL)
- {
- for (uint32_t aspectReferenceIndex = 0; aspectReferenceIndex < structure->aspectReferenceCount; ++aspectReferenceIndex)
- {
- skip |= validate_flags("VkRenderPassInputAttachmentAspectCreateInfo", ParameterName("pAspectReferences[%i].aspectMask", ParameterName::IndexVector{ aspectReferenceIndex }), "VkImageAspectFlagBits", AllVkImageAspectFlagBits, structure->pAspectReferences[aspectReferenceIndex].aspectMask, kRequiredFlags, "VUID-VkInputAttachmentAspectReference-aspectMask-parameter", "VUID-VkInputAttachmentAspectReference-aspectMask-requiredbitmask");
- }
- }
- } break;
-
- // Validation code for VkImageViewUsageCreateInfo structure members
- case VK_STRUCTURE_TYPE_IMAGE_VIEW_USAGE_CREATE_INFO: {
- VkImageViewUsageCreateInfo *structure = (VkImageViewUsageCreateInfo *) header;
- skip |= validate_flags("VkImageViewUsageCreateInfo", "usage", "VkImageUsageFlagBits", AllVkImageUsageFlagBits, structure->usage, kRequiredFlags, "VUID-VkImageViewUsageCreateInfo-usage-parameter", "VUID-VkImageViewUsageCreateInfo-usage-requiredbitmask");
- } break;
-
- // Validation code for VkPipelineTessellationDomainOriginStateCreateInfo structure members
- case VK_STRUCTURE_TYPE_PIPELINE_TESSELLATION_DOMAIN_ORIGIN_STATE_CREATE_INFO: {
- VkPipelineTessellationDomainOriginStateCreateInfo *structure = (VkPipelineTessellationDomainOriginStateCreateInfo *) header;
- skip |= validate_ranged_enum("VkPipelineTessellationDomainOriginStateCreateInfo", "domainOrigin", "VkTessellationDomainOrigin", AllVkTessellationDomainOriginEnums, structure->domainOrigin, "VUID-VkPipelineTessellationDomainOriginStateCreateInfo-domainOrigin-parameter");
- } break;
-
- // Validation code for VkRenderPassMultiviewCreateInfo structure members
- case VK_STRUCTURE_TYPE_RENDER_PASS_MULTIVIEW_CREATE_INFO: {
- VkRenderPassMultiviewCreateInfo *structure = (VkRenderPassMultiviewCreateInfo *) header;
- skip |= validate_array("VkRenderPassMultiviewCreateInfo", "subpassCount", "pViewMasks", structure->subpassCount, &structure->pViewMasks, false, true, kVUIDUndefined, "VUID-VkRenderPassMultiviewCreateInfo-pViewMasks-parameter");
-
- skip |= validate_array("VkRenderPassMultiviewCreateInfo", "dependencyCount", "pViewOffsets", structure->dependencyCount, &structure->pViewOffsets, false, true, kVUIDUndefined, "VUID-VkRenderPassMultiviewCreateInfo-pViewOffsets-parameter");
-
- skip |= validate_array("VkRenderPassMultiviewCreateInfo", "correlationMaskCount", "pCorrelationMasks", structure->correlationMaskCount, &structure->pCorrelationMasks, false, true, kVUIDUndefined, "VUID-VkRenderPassMultiviewCreateInfo-pCorrelationMasks-parameter");
- } break;
-
- // Validation code for VkPhysicalDeviceMultiviewFeatures structure members
- case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MULTIVIEW_FEATURES: {
- VkPhysicalDeviceMultiviewFeatures *structure = (VkPhysicalDeviceMultiviewFeatures *) header;
- skip |= validate_bool32("VkPhysicalDeviceMultiviewFeatures", "multiview", structure->multiview);
-
- skip |= validate_bool32("VkPhysicalDeviceMultiviewFeatures", "multiviewGeometryShader", structure->multiviewGeometryShader);
-
- skip |= validate_bool32("VkPhysicalDeviceMultiviewFeatures", "multiviewTessellationShader", structure->multiviewTessellationShader);
- } break;
-
- // Validation code for VkPhysicalDeviceVariablePointersFeatures structure members
- case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VARIABLE_POINTERS_FEATURES: {
- VkPhysicalDeviceVariablePointersFeatures *structure = (VkPhysicalDeviceVariablePointersFeatures *) header;
- skip |= validate_bool32("VkPhysicalDeviceVariablePointersFeatures", "variablePointersStorageBuffer", structure->variablePointersStorageBuffer);
-
- skip |= validate_bool32("VkPhysicalDeviceVariablePointersFeatures", "variablePointers", structure->variablePointers);
- } break;
-
- // Validation code for VkPhysicalDeviceProtectedMemoryFeatures structure members
- case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PROTECTED_MEMORY_FEATURES: {
- VkPhysicalDeviceProtectedMemoryFeatures *structure = (VkPhysicalDeviceProtectedMemoryFeatures *) header;
- skip |= validate_bool32("VkPhysicalDeviceProtectedMemoryFeatures", "protectedMemory", structure->protectedMemory);
- } break;
-
- // Validation code for VkProtectedSubmitInfo structure members
- case VK_STRUCTURE_TYPE_PROTECTED_SUBMIT_INFO: {
- VkProtectedSubmitInfo *structure = (VkProtectedSubmitInfo *) header;
- skip |= validate_bool32("VkProtectedSubmitInfo", "protectedSubmit", structure->protectedSubmit);
- } break;
-
- // Validation code for VkSamplerYcbcrConversionInfo structure members
- case VK_STRUCTURE_TYPE_SAMPLER_YCBCR_CONVERSION_INFO: {
- VkSamplerYcbcrConversionInfo *structure = (VkSamplerYcbcrConversionInfo *) header;
- skip |= validate_required_handle("VkSamplerYcbcrConversionInfo", "conversion", structure->conversion);
- } break;
-
- // Validation code for VkBindImagePlaneMemoryInfo structure members
- case VK_STRUCTURE_TYPE_BIND_IMAGE_PLANE_MEMORY_INFO: {
- VkBindImagePlaneMemoryInfo *structure = (VkBindImagePlaneMemoryInfo *) header;
- skip |= validate_flags("VkBindImagePlaneMemoryInfo", "planeAspect", "VkImageAspectFlagBits", AllVkImageAspectFlagBits, structure->planeAspect, kRequiredSingleBit, "VUID-VkBindImagePlaneMemoryInfo-planeAspect-parameter", "VUID-VkBindImagePlaneMemoryInfo-planeAspect-parameter");
- } break;
-
- // Validation code for VkImagePlaneMemoryRequirementsInfo structure members
- case VK_STRUCTURE_TYPE_IMAGE_PLANE_MEMORY_REQUIREMENTS_INFO: {
- VkImagePlaneMemoryRequirementsInfo *structure = (VkImagePlaneMemoryRequirementsInfo *) header;
- skip |= validate_flags("VkImagePlaneMemoryRequirementsInfo", "planeAspect", "VkImageAspectFlagBits", AllVkImageAspectFlagBits, structure->planeAspect, kRequiredSingleBit, "VUID-VkImagePlaneMemoryRequirementsInfo-planeAspect-parameter", "VUID-VkImagePlaneMemoryRequirementsInfo-planeAspect-parameter");
- } break;
-
- // Validation code for VkPhysicalDeviceSamplerYcbcrConversionFeatures structure members
- case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SAMPLER_YCBCR_CONVERSION_FEATURES: {
- VkPhysicalDeviceSamplerYcbcrConversionFeatures *structure = (VkPhysicalDeviceSamplerYcbcrConversionFeatures *) header;
- skip |= validate_bool32("VkPhysicalDeviceSamplerYcbcrConversionFeatures", "samplerYcbcrConversion", structure->samplerYcbcrConversion);
- } break;
-
- // Validation code for VkPhysicalDeviceExternalImageFormatInfo structure members
- case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_IMAGE_FORMAT_INFO: {
- VkPhysicalDeviceExternalImageFormatInfo *structure = (VkPhysicalDeviceExternalImageFormatInfo *) header;
- skip |= validate_flags("VkPhysicalDeviceExternalImageFormatInfo", "handleType", "VkExternalMemoryHandleTypeFlagBits", AllVkExternalMemoryHandleTypeFlagBits, structure->handleType, kOptionalSingleBit, "VUID-VkPhysicalDeviceExternalImageFormatInfo-handleType-parameter");
- } break;
-
- // Validation code for VkExternalMemoryImageCreateInfo structure members
- case VK_STRUCTURE_TYPE_EXTERNAL_MEMORY_IMAGE_CREATE_INFO: {
- VkExternalMemoryImageCreateInfo *structure = (VkExternalMemoryImageCreateInfo *) header;
- skip |= validate_flags("VkExternalMemoryImageCreateInfo", "handleTypes", "VkExternalMemoryHandleTypeFlagBits", AllVkExternalMemoryHandleTypeFlagBits, structure->handleTypes, kRequiredFlags, "VUID-VkExternalMemoryImageCreateInfo-handleTypes-parameter", "VUID-VkExternalMemoryImageCreateInfo-handleTypes-requiredbitmask");
- } break;
-
- // Validation code for VkExternalMemoryBufferCreateInfo structure members
- case VK_STRUCTURE_TYPE_EXTERNAL_MEMORY_BUFFER_CREATE_INFO: {
- VkExternalMemoryBufferCreateInfo *structure = (VkExternalMemoryBufferCreateInfo *) header;
- skip |= validate_flags("VkExternalMemoryBufferCreateInfo", "handleTypes", "VkExternalMemoryHandleTypeFlagBits", AllVkExternalMemoryHandleTypeFlagBits, structure->handleTypes, kOptionalFlags, "VUID-VkExternalMemoryBufferCreateInfo-handleTypes-parameter");
- } break;
-
- // Validation code for VkExportMemoryAllocateInfo structure members
- case VK_STRUCTURE_TYPE_EXPORT_MEMORY_ALLOCATE_INFO: {
- VkExportMemoryAllocateInfo *structure = (VkExportMemoryAllocateInfo *) header;
- skip |= validate_flags("VkExportMemoryAllocateInfo", "handleTypes", "VkExternalMemoryHandleTypeFlagBits", AllVkExternalMemoryHandleTypeFlagBits, structure->handleTypes, kOptionalFlags, "VUID-VkExportMemoryAllocateInfo-handleTypes-parameter");
- } break;
-
- // Validation code for VkExportFenceCreateInfo structure members
- case VK_STRUCTURE_TYPE_EXPORT_FENCE_CREATE_INFO: {
- VkExportFenceCreateInfo *structure = (VkExportFenceCreateInfo *) header;
- skip |= validate_flags("VkExportFenceCreateInfo", "handleTypes", "VkExternalFenceHandleTypeFlagBits", AllVkExternalFenceHandleTypeFlagBits, structure->handleTypes, kOptionalFlags, "VUID-VkExportFenceCreateInfo-handleTypes-parameter");
- } break;
-
- // Validation code for VkExportSemaphoreCreateInfo structure members
- case VK_STRUCTURE_TYPE_EXPORT_SEMAPHORE_CREATE_INFO: {
- VkExportSemaphoreCreateInfo *structure = (VkExportSemaphoreCreateInfo *) header;
- skip |= validate_flags("VkExportSemaphoreCreateInfo", "handleTypes", "VkExternalSemaphoreHandleTypeFlagBits", AllVkExternalSemaphoreHandleTypeFlagBits, structure->handleTypes, kOptionalFlags, "VUID-VkExportSemaphoreCreateInfo-handleTypes-parameter");
- } break;
-
- // Validation code for VkPhysicalDeviceShaderDrawParametersFeatures structure members
- case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_DRAW_PARAMETERS_FEATURES: {
- VkPhysicalDeviceShaderDrawParametersFeatures *structure = (VkPhysicalDeviceShaderDrawParametersFeatures *) header;
- skip |= validate_bool32("VkPhysicalDeviceShaderDrawParametersFeatures", "shaderDrawParameters", structure->shaderDrawParameters);
- } break;
-
- // Validation code for VkBindImageMemorySwapchainInfoKHR structure members
- case VK_STRUCTURE_TYPE_BIND_IMAGE_MEMORY_SWAPCHAIN_INFO_KHR: {
- VkBindImageMemorySwapchainInfoKHR *structure = (VkBindImageMemorySwapchainInfoKHR *) header;
- skip |= validate_required_handle("VkBindImageMemorySwapchainInfoKHR", "swapchain", structure->swapchain);
- } break;
-
- // Validation code for VkDeviceGroupPresentInfoKHR structure members
- case VK_STRUCTURE_TYPE_DEVICE_GROUP_PRESENT_INFO_KHR: {
- VkDeviceGroupPresentInfoKHR *structure = (VkDeviceGroupPresentInfoKHR *) header;
- skip |= validate_array("VkDeviceGroupPresentInfoKHR", "swapchainCount", "pDeviceMasks", structure->swapchainCount, &structure->pDeviceMasks, false, true, kVUIDUndefined, "VUID-VkDeviceGroupPresentInfoKHR-pDeviceMasks-parameter");
-
- skip |= validate_flags("VkDeviceGroupPresentInfoKHR", "mode", "VkDeviceGroupPresentModeFlagBitsKHR", AllVkDeviceGroupPresentModeFlagBitsKHR, structure->mode, kRequiredSingleBit, "VUID-VkDeviceGroupPresentInfoKHR-mode-parameter", "VUID-VkDeviceGroupPresentInfoKHR-mode-parameter");
- } break;
-
- // Validation code for VkDeviceGroupSwapchainCreateInfoKHR structure members
- case VK_STRUCTURE_TYPE_DEVICE_GROUP_SWAPCHAIN_CREATE_INFO_KHR: {
- VkDeviceGroupSwapchainCreateInfoKHR *structure = (VkDeviceGroupSwapchainCreateInfoKHR *) header;
- skip |= validate_flags("VkDeviceGroupSwapchainCreateInfoKHR", "modes", "VkDeviceGroupPresentModeFlagBitsKHR", AllVkDeviceGroupPresentModeFlagBitsKHR, structure->modes, kRequiredFlags, "VUID-VkDeviceGroupSwapchainCreateInfoKHR-modes-parameter", "VUID-VkDeviceGroupSwapchainCreateInfoKHR-modes-requiredbitmask");
- } break;
-
- // Validation code for VkDisplayPresentInfoKHR structure members
- case VK_STRUCTURE_TYPE_DISPLAY_PRESENT_INFO_KHR: {
- VkDisplayPresentInfoKHR *structure = (VkDisplayPresentInfoKHR *) header;
- skip |= validate_bool32("VkDisplayPresentInfoKHR", "persistent", structure->persistent);
- } break;
-
-#ifdef VK_USE_PLATFORM_WIN32_KHR
- // Validation code for VkImportMemoryWin32HandleInfoKHR structure members
- case VK_STRUCTURE_TYPE_IMPORT_MEMORY_WIN32_HANDLE_INFO_KHR: {
- VkImportMemoryWin32HandleInfoKHR *structure = (VkImportMemoryWin32HandleInfoKHR *) header;
- skip |= validate_flags("VkImportMemoryWin32HandleInfoKHR", "handleType", "VkExternalMemoryHandleTypeFlagBits", AllVkExternalMemoryHandleTypeFlagBits, structure->handleType, kOptionalSingleBit, "VUID-VkImportMemoryWin32HandleInfoKHR-handleType-parameter");
- } break;
-#endif // VK_USE_PLATFORM_WIN32_KHR
-
- // Validation code for VkImportMemoryFdInfoKHR structure members
- case VK_STRUCTURE_TYPE_IMPORT_MEMORY_FD_INFO_KHR: {
- VkImportMemoryFdInfoKHR *structure = (VkImportMemoryFdInfoKHR *) header;
- skip |= validate_flags("VkImportMemoryFdInfoKHR", "handleType", "VkExternalMemoryHandleTypeFlagBits", AllVkExternalMemoryHandleTypeFlagBits, structure->handleType, kOptionalSingleBit, "VUID-VkImportMemoryFdInfoKHR-handleType-parameter");
- } break;
-
-#ifdef VK_USE_PLATFORM_WIN32_KHR
- // Validation code for VkWin32KeyedMutexAcquireReleaseInfoKHR structure members
- case VK_STRUCTURE_TYPE_WIN32_KEYED_MUTEX_ACQUIRE_RELEASE_INFO_KHR: {
- VkWin32KeyedMutexAcquireReleaseInfoKHR *structure = (VkWin32KeyedMutexAcquireReleaseInfoKHR *) header;
- skip |= validate_array("VkWin32KeyedMutexAcquireReleaseInfoKHR", "acquireCount", "pAcquireSyncs", structure->acquireCount, &structure->pAcquireSyncs, false, true, kVUIDUndefined, "VUID-VkWin32KeyedMutexAcquireReleaseInfoKHR-pAcquireSyncs-parameter");
-
- skip |= validate_array("VkWin32KeyedMutexAcquireReleaseInfoKHR", "acquireCount", "pAcquireKeys", structure->acquireCount, &structure->pAcquireKeys, false, true, kVUIDUndefined, "VUID-VkWin32KeyedMutexAcquireReleaseInfoKHR-pAcquireKeys-parameter");
-
- skip |= validate_array("VkWin32KeyedMutexAcquireReleaseInfoKHR", "acquireCount", "pAcquireTimeouts", structure->acquireCount, &structure->pAcquireTimeouts, false, true, kVUIDUndefined, "VUID-VkWin32KeyedMutexAcquireReleaseInfoKHR-pAcquireTimeouts-parameter");
-
- skip |= validate_array("VkWin32KeyedMutexAcquireReleaseInfoKHR", "releaseCount", "pReleaseSyncs", structure->releaseCount, &structure->pReleaseSyncs, false, true, kVUIDUndefined, "VUID-VkWin32KeyedMutexAcquireReleaseInfoKHR-pReleaseSyncs-parameter");
-
- skip |= validate_array("VkWin32KeyedMutexAcquireReleaseInfoKHR", "releaseCount", "pReleaseKeys", structure->releaseCount, &structure->pReleaseKeys, false, true, kVUIDUndefined, "VUID-VkWin32KeyedMutexAcquireReleaseInfoKHR-pReleaseKeys-parameter");
- } break;
-#endif // VK_USE_PLATFORM_WIN32_KHR
-
- // Validation code for VkPhysicalDeviceShaderFloat16Int8FeaturesKHR structure members
- case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_FLOAT16_INT8_FEATURES_KHR: {
- VkPhysicalDeviceShaderFloat16Int8FeaturesKHR *structure = (VkPhysicalDeviceShaderFloat16Int8FeaturesKHR *) header;
- skip |= validate_bool32("VkPhysicalDeviceShaderFloat16Int8FeaturesKHR", "shaderFloat16", structure->shaderFloat16);
-
- skip |= validate_bool32("VkPhysicalDeviceShaderFloat16Int8FeaturesKHR", "shaderInt8", structure->shaderInt8);
- } break;
-
- // Validation code for VkPresentRegionsKHR structure members
- case VK_STRUCTURE_TYPE_PRESENT_REGIONS_KHR: {
- VkPresentRegionsKHR *structure = (VkPresentRegionsKHR *) header;
- skip |= validate_array("VkPresentRegionsKHR", "swapchainCount", "pRegions", structure->swapchainCount, &structure->pRegions, true, false, "VUID-VkPresentRegionsKHR-swapchainCount-arraylength", "VUID-VkPresentRegionsKHR-pRegions-parameter");
-
- if (structure->pRegions != NULL)
- {
- for (uint32_t swapchainIndex = 0; swapchainIndex < structure->swapchainCount; ++swapchainIndex)
- {
- if (structure->pRegions[swapchainIndex].pRectangles != NULL)
- {
- for (uint32_t rectangleIndex = 0; rectangleIndex < structure->pRegions[swapchainIndex].rectangleCount; ++rectangleIndex)
- {
- }
- }
- }
- }
- } break;
-
- // Validation code for VkPhysicalDeviceImagelessFramebufferFeaturesKHR structure members
- case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGELESS_FRAMEBUFFER_FEATURES_KHR: {
- VkPhysicalDeviceImagelessFramebufferFeaturesKHR *structure = (VkPhysicalDeviceImagelessFramebufferFeaturesKHR *) header;
- skip |= validate_bool32("VkPhysicalDeviceImagelessFramebufferFeaturesKHR", "imagelessFramebuffer", structure->imagelessFramebuffer);
- } break;
-
- // Validation code for VkFramebufferAttachmentsCreateInfoKHR structure members
- case VK_STRUCTURE_TYPE_FRAMEBUFFER_ATTACHMENTS_CREATE_INFO_KHR: {
- VkFramebufferAttachmentsCreateInfoKHR *structure = (VkFramebufferAttachmentsCreateInfoKHR *) header;
- skip |= validate_struct_type_array("VkFramebufferAttachmentsCreateInfoKHR", "attachmentImageInfoCount", "pAttachmentImageInfos", "VK_STRUCTURE_TYPE_FRAMEBUFFER_ATTACHMENT_IMAGE_INFO_KHR", structure->attachmentImageInfoCount, structure->pAttachmentImageInfos, VK_STRUCTURE_TYPE_FRAMEBUFFER_ATTACHMENT_IMAGE_INFO_KHR, false, true, "VUID-VkFramebufferAttachmentImageInfoKHR-sType-sType", "VUID-VkFramebufferAttachmentsCreateInfoKHR-pAttachmentImageInfos-parameter", kVUIDUndefined);
-
- if (structure->pAttachmentImageInfos != NULL)
- {
- for (uint32_t attachmentImageInfoIndex = 0; attachmentImageInfoIndex < structure->attachmentImageInfoCount; ++attachmentImageInfoIndex)
- {
- skip |= validate_flags("VkFramebufferAttachmentsCreateInfoKHR", ParameterName("pAttachmentImageInfos[%i].flags", ParameterName::IndexVector{ attachmentImageInfoIndex }), "VkImageCreateFlagBits", AllVkImageCreateFlagBits, structure->pAttachmentImageInfos[attachmentImageInfoIndex].flags, kOptionalFlags, "VUID-VkFramebufferAttachmentImageInfoKHR-flags-parameter");
-
- skip |= validate_flags("VkFramebufferAttachmentsCreateInfoKHR", ParameterName("pAttachmentImageInfos[%i].usage", ParameterName::IndexVector{ attachmentImageInfoIndex }), "VkImageUsageFlagBits", AllVkImageUsageFlagBits, structure->pAttachmentImageInfos[attachmentImageInfoIndex].usage, kRequiredFlags, "VUID-VkFramebufferAttachmentImageInfoKHR-usage-parameter", "VUID-VkFramebufferAttachmentImageInfoKHR-usage-requiredbitmask");
-
- skip |= validate_ranged_enum_array("VkFramebufferAttachmentsCreateInfoKHR", ParameterName("pAttachmentImageInfos[%i].viewFormatCount", ParameterName::IndexVector{ attachmentImageInfoIndex }), ParameterName("pAttachmentImageInfos[%i].pViewFormats", ParameterName::IndexVector{ attachmentImageInfoIndex }), "VkFormat", AllVkFormatEnums, structure->pAttachmentImageInfos[attachmentImageInfoIndex].viewFormatCount, structure->pAttachmentImageInfos[attachmentImageInfoIndex].pViewFormats, false, true);
- }
- }
- } break;
-
- // Validation code for VkRenderPassAttachmentBeginInfoKHR structure members
- case VK_STRUCTURE_TYPE_RENDER_PASS_ATTACHMENT_BEGIN_INFO_KHR: {
- VkRenderPassAttachmentBeginInfoKHR *structure = (VkRenderPassAttachmentBeginInfoKHR *) header;
- skip |= validate_array("VkRenderPassAttachmentBeginInfoKHR", "attachmentCount", "pAttachments", structure->attachmentCount, &structure->pAttachments, false, true, kVUIDUndefined, "VUID-VkRenderPassAttachmentBeginInfoKHR-pAttachments-parameter");
- } break;
-
- // Validation code for VkImageFormatListCreateInfoKHR structure members
- case VK_STRUCTURE_TYPE_IMAGE_FORMAT_LIST_CREATE_INFO_KHR: {
- VkImageFormatListCreateInfoKHR *structure = (VkImageFormatListCreateInfoKHR *) header;
- skip |= validate_ranged_enum_array("VkImageFormatListCreateInfoKHR", "viewFormatCount", "pViewFormats", "VkFormat", AllVkFormatEnums, structure->viewFormatCount, structure->pViewFormats, false, true);
- } break;
-
- // Validation code for VkPhysicalDevice8BitStorageFeaturesKHR structure members
- case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_8BIT_STORAGE_FEATURES_KHR: {
- VkPhysicalDevice8BitStorageFeaturesKHR *structure = (VkPhysicalDevice8BitStorageFeaturesKHR *) header;
- skip |= validate_bool32("VkPhysicalDevice8BitStorageFeaturesKHR", "storageBuffer8BitAccess", structure->storageBuffer8BitAccess);
-
- skip |= validate_bool32("VkPhysicalDevice8BitStorageFeaturesKHR", "uniformAndStorageBuffer8BitAccess", structure->uniformAndStorageBuffer8BitAccess);
-
- skip |= validate_bool32("VkPhysicalDevice8BitStorageFeaturesKHR", "storagePushConstant8", structure->storagePushConstant8);
- } break;
-
- // Validation code for VkPhysicalDeviceShaderAtomicInt64FeaturesKHR structure members
- case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_ATOMIC_INT64_FEATURES_KHR: {
- VkPhysicalDeviceShaderAtomicInt64FeaturesKHR *structure = (VkPhysicalDeviceShaderAtomicInt64FeaturesKHR *) header;
- skip |= validate_bool32("VkPhysicalDeviceShaderAtomicInt64FeaturesKHR", "shaderBufferInt64Atomics", structure->shaderBufferInt64Atomics);
-
- skip |= validate_bool32("VkPhysicalDeviceShaderAtomicInt64FeaturesKHR", "shaderSharedInt64Atomics", structure->shaderSharedInt64Atomics);
- } break;
-
- // Validation code for VkSubpassDescriptionDepthStencilResolveKHR structure members
- case VK_STRUCTURE_TYPE_SUBPASS_DESCRIPTION_DEPTH_STENCIL_RESOLVE_KHR: {
- VkSubpassDescriptionDepthStencilResolveKHR *structure = (VkSubpassDescriptionDepthStencilResolveKHR *) header;
- skip |= validate_flags("VkSubpassDescriptionDepthStencilResolveKHR", "depthResolveMode", "VkResolveModeFlagBitsKHR", AllVkResolveModeFlagBitsKHR, structure->depthResolveMode, kRequiredSingleBit, "VUID-VkSubpassDescriptionDepthStencilResolveKHR-depthResolveMode-parameter", "VUID-VkSubpassDescriptionDepthStencilResolveKHR-depthResolveMode-parameter");
-
- skip |= validate_flags("VkSubpassDescriptionDepthStencilResolveKHR", "stencilResolveMode", "VkResolveModeFlagBitsKHR", AllVkResolveModeFlagBitsKHR, structure->stencilResolveMode, kRequiredSingleBit, "VUID-VkSubpassDescriptionDepthStencilResolveKHR-stencilResolveMode-parameter", "VUID-VkSubpassDescriptionDepthStencilResolveKHR-stencilResolveMode-parameter");
-
- skip |= validate_struct_type("VkSubpassDescriptionDepthStencilResolveKHR", "pDepthStencilResolveAttachment", "VK_STRUCTURE_TYPE_ATTACHMENT_REFERENCE_2_KHR", structure->pDepthStencilResolveAttachment, VK_STRUCTURE_TYPE_ATTACHMENT_REFERENCE_2_KHR, false, "VUID-VkSubpassDescriptionDepthStencilResolveKHR-pDepthStencilResolveAttachment-parameter", "VUID-VkAttachmentReference2KHR-sType-sType");
-
- if (structure->pDepthStencilResolveAttachment != NULL)
- {
- skip |= validate_ranged_enum("VkSubpassDescriptionDepthStencilResolveKHR", "pDepthStencilResolveAttachment->layout", "VkImageLayout", AllVkImageLayoutEnums, structure->pDepthStencilResolveAttachment->layout, "VUID-VkAttachmentReference2KHR-layout-parameter");
- }
- } break;
-
- // Validation code for VkPhysicalDeviceVulkanMemoryModelFeaturesKHR structure members
- case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VULKAN_MEMORY_MODEL_FEATURES_KHR: {
- VkPhysicalDeviceVulkanMemoryModelFeaturesKHR *structure = (VkPhysicalDeviceVulkanMemoryModelFeaturesKHR *) header;
- skip |= validate_bool32("VkPhysicalDeviceVulkanMemoryModelFeaturesKHR", "vulkanMemoryModel", structure->vulkanMemoryModel);
-
- skip |= validate_bool32("VkPhysicalDeviceVulkanMemoryModelFeaturesKHR", "vulkanMemoryModelDeviceScope", structure->vulkanMemoryModelDeviceScope);
-
- skip |= validate_bool32("VkPhysicalDeviceVulkanMemoryModelFeaturesKHR", "vulkanMemoryModelAvailabilityVisibilityChains", structure->vulkanMemoryModelAvailabilityVisibilityChains);
- } break;
-
- // Validation code for VkSurfaceProtectedCapabilitiesKHR structure members
- case VK_STRUCTURE_TYPE_SURFACE_PROTECTED_CAPABILITIES_KHR: {
- VkSurfaceProtectedCapabilitiesKHR *structure = (VkSurfaceProtectedCapabilitiesKHR *) header;
- skip |= validate_bool32("VkSurfaceProtectedCapabilitiesKHR", "supportsProtected", structure->supportsProtected);
- } break;
-
- // Validation code for VkPhysicalDeviceUniformBufferStandardLayoutFeaturesKHR structure members
- case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_UNIFORM_BUFFER_STANDARD_LAYOUT_FEATURES_KHR: {
- VkPhysicalDeviceUniformBufferStandardLayoutFeaturesKHR *structure = (VkPhysicalDeviceUniformBufferStandardLayoutFeaturesKHR *) header;
- skip |= validate_bool32("VkPhysicalDeviceUniformBufferStandardLayoutFeaturesKHR", "uniformBufferStandardLayout", structure->uniformBufferStandardLayout);
- } break;
-
- // Validation code for VkPhysicalDevicePipelineExecutablePropertiesFeaturesKHR structure members
- case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PIPELINE_EXECUTABLE_PROPERTIES_FEATURES_KHR: {
- VkPhysicalDevicePipelineExecutablePropertiesFeaturesKHR *structure = (VkPhysicalDevicePipelineExecutablePropertiesFeaturesKHR *) header;
- skip |= validate_bool32("VkPhysicalDevicePipelineExecutablePropertiesFeaturesKHR", "pipelineExecutableInfo", structure->pipelineExecutableInfo);
- } break;
-
- // Validation code for VkDebugReportCallbackCreateInfoEXT structure members
- case VK_STRUCTURE_TYPE_DEBUG_REPORT_CALLBACK_CREATE_INFO_EXT: {
- VkDebugReportCallbackCreateInfoEXT *structure = (VkDebugReportCallbackCreateInfoEXT *) header;
- skip |= validate_flags("VkDebugReportCallbackCreateInfoEXT", "flags", "VkDebugReportFlagBitsEXT", AllVkDebugReportFlagBitsEXT, structure->flags, kOptionalFlags, "VUID-VkDebugReportCallbackCreateInfoEXT-flags-parameter");
-
- skip |= validate_required_pointer("VkDebugReportCallbackCreateInfoEXT", "pfnCallback", reinterpret_cast<const void*>(structure->pfnCallback), "VUID-VkDebugReportCallbackCreateInfoEXT-pfnCallback-parameter");
- } break;
-
- // Validation code for VkPipelineRasterizationStateRasterizationOrderAMD structure members
- case VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_STATE_RASTERIZATION_ORDER_AMD: {
- VkPipelineRasterizationStateRasterizationOrderAMD *structure = (VkPipelineRasterizationStateRasterizationOrderAMD *) header;
- skip |= validate_ranged_enum("VkPipelineRasterizationStateRasterizationOrderAMD", "rasterizationOrder", "VkRasterizationOrderAMD", AllVkRasterizationOrderAMDEnums, structure->rasterizationOrder, "VUID-VkPipelineRasterizationStateRasterizationOrderAMD-rasterizationOrder-parameter");
- } break;
-
- // Validation code for VkDedicatedAllocationImageCreateInfoNV structure members
- case VK_STRUCTURE_TYPE_DEDICATED_ALLOCATION_IMAGE_CREATE_INFO_NV: {
- VkDedicatedAllocationImageCreateInfoNV *structure = (VkDedicatedAllocationImageCreateInfoNV *) header;
- skip |= validate_bool32("VkDedicatedAllocationImageCreateInfoNV", "dedicatedAllocation", structure->dedicatedAllocation);
- } break;
-
- // Validation code for VkDedicatedAllocationBufferCreateInfoNV structure members
- case VK_STRUCTURE_TYPE_DEDICATED_ALLOCATION_BUFFER_CREATE_INFO_NV: {
- VkDedicatedAllocationBufferCreateInfoNV *structure = (VkDedicatedAllocationBufferCreateInfoNV *) header;
- skip |= validate_bool32("VkDedicatedAllocationBufferCreateInfoNV", "dedicatedAllocation", structure->dedicatedAllocation);
- } break;
-
- // Validation code for VkPhysicalDeviceTransformFeedbackFeaturesEXT structure members
- case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TRANSFORM_FEEDBACK_FEATURES_EXT: {
- VkPhysicalDeviceTransformFeedbackFeaturesEXT *structure = (VkPhysicalDeviceTransformFeedbackFeaturesEXT *) header;
- skip |= validate_bool32("VkPhysicalDeviceTransformFeedbackFeaturesEXT", "transformFeedback", structure->transformFeedback);
-
- skip |= validate_bool32("VkPhysicalDeviceTransformFeedbackFeaturesEXT", "geometryStreams", structure->geometryStreams);
- } break;
-
- // Validation code for VkPipelineRasterizationStateStreamCreateInfoEXT structure members
- case VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_STATE_STREAM_CREATE_INFO_EXT: {
- VkPipelineRasterizationStateStreamCreateInfoEXT *structure = (VkPipelineRasterizationStateStreamCreateInfoEXT *) header;
- skip |= validate_reserved_flags("VkPipelineRasterizationStateStreamCreateInfoEXT", "flags", structure->flags, "VUID-VkPipelineRasterizationStateStreamCreateInfoEXT-flags-zerobitmask");
- } break;
-
- // Validation code for VkPhysicalDeviceCornerSampledImageFeaturesNV structure members
- case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_CORNER_SAMPLED_IMAGE_FEATURES_NV: {
- VkPhysicalDeviceCornerSampledImageFeaturesNV *structure = (VkPhysicalDeviceCornerSampledImageFeaturesNV *) header;
- skip |= validate_bool32("VkPhysicalDeviceCornerSampledImageFeaturesNV", "cornerSampledImage", structure->cornerSampledImage);
- } break;
-
- // Validation code for VkExternalMemoryImageCreateInfoNV structure members
- case VK_STRUCTURE_TYPE_EXTERNAL_MEMORY_IMAGE_CREATE_INFO_NV: {
- VkExternalMemoryImageCreateInfoNV *structure = (VkExternalMemoryImageCreateInfoNV *) header;
- skip |= validate_flags("VkExternalMemoryImageCreateInfoNV", "handleTypes", "VkExternalMemoryHandleTypeFlagBitsNV", AllVkExternalMemoryHandleTypeFlagBitsNV, structure->handleTypes, kOptionalFlags, "VUID-VkExternalMemoryImageCreateInfoNV-handleTypes-parameter");
- } break;
-
- // Validation code for VkExportMemoryAllocateInfoNV structure members
- case VK_STRUCTURE_TYPE_EXPORT_MEMORY_ALLOCATE_INFO_NV: {
- VkExportMemoryAllocateInfoNV *structure = (VkExportMemoryAllocateInfoNV *) header;
- skip |= validate_flags("VkExportMemoryAllocateInfoNV", "handleTypes", "VkExternalMemoryHandleTypeFlagBitsNV", AllVkExternalMemoryHandleTypeFlagBitsNV, structure->handleTypes, kOptionalFlags, "VUID-VkExportMemoryAllocateInfoNV-handleTypes-parameter");
- } break;
-
-#ifdef VK_USE_PLATFORM_WIN32_KHR
- // Validation code for VkImportMemoryWin32HandleInfoNV structure members
- case VK_STRUCTURE_TYPE_IMPORT_MEMORY_WIN32_HANDLE_INFO_NV: {
- VkImportMemoryWin32HandleInfoNV *structure = (VkImportMemoryWin32HandleInfoNV *) header;
- skip |= validate_flags("VkImportMemoryWin32HandleInfoNV", "handleType", "VkExternalMemoryHandleTypeFlagBitsNV", AllVkExternalMemoryHandleTypeFlagBitsNV, structure->handleType, kOptionalFlags, "VUID-VkImportMemoryWin32HandleInfoNV-handleType-parameter");
- } break;
-#endif // VK_USE_PLATFORM_WIN32_KHR
-
-#ifdef VK_USE_PLATFORM_WIN32_KHR
- // Validation code for VkWin32KeyedMutexAcquireReleaseInfoNV structure members
- case VK_STRUCTURE_TYPE_WIN32_KEYED_MUTEX_ACQUIRE_RELEASE_INFO_NV: {
- VkWin32KeyedMutexAcquireReleaseInfoNV *structure = (VkWin32KeyedMutexAcquireReleaseInfoNV *) header;
- skip |= validate_array("VkWin32KeyedMutexAcquireReleaseInfoNV", "acquireCount", "pAcquireSyncs", structure->acquireCount, &structure->pAcquireSyncs, false, true, kVUIDUndefined, "VUID-VkWin32KeyedMutexAcquireReleaseInfoNV-pAcquireSyncs-parameter");
-
- skip |= validate_array("VkWin32KeyedMutexAcquireReleaseInfoNV", "acquireCount", "pAcquireKeys", structure->acquireCount, &structure->pAcquireKeys, false, true, kVUIDUndefined, "VUID-VkWin32KeyedMutexAcquireReleaseInfoNV-pAcquireKeys-parameter");
-
- skip |= validate_array("VkWin32KeyedMutexAcquireReleaseInfoNV", "acquireCount", "pAcquireTimeoutMilliseconds", structure->acquireCount, &structure->pAcquireTimeoutMilliseconds, false, true, kVUIDUndefined, "VUID-VkWin32KeyedMutexAcquireReleaseInfoNV-pAcquireTimeoutMilliseconds-parameter");
-
- skip |= validate_array("VkWin32KeyedMutexAcquireReleaseInfoNV", "releaseCount", "pReleaseSyncs", structure->releaseCount, &structure->pReleaseSyncs, false, true, kVUIDUndefined, "VUID-VkWin32KeyedMutexAcquireReleaseInfoNV-pReleaseSyncs-parameter");
-
- skip |= validate_array("VkWin32KeyedMutexAcquireReleaseInfoNV", "releaseCount", "pReleaseKeys", structure->releaseCount, &structure->pReleaseKeys, false, true, kVUIDUndefined, "VUID-VkWin32KeyedMutexAcquireReleaseInfoNV-pReleaseKeys-parameter");
- } break;
-#endif // VK_USE_PLATFORM_WIN32_KHR
-
- // Validation code for VkValidationFlagsEXT structure members
- case VK_STRUCTURE_TYPE_VALIDATION_FLAGS_EXT: {
- VkValidationFlagsEXT *structure = (VkValidationFlagsEXT *) header;
- skip |= validate_ranged_enum_array("VkValidationFlagsEXT", "disabledValidationCheckCount", "pDisabledValidationChecks", "VkValidationCheckEXT", AllVkValidationCheckEXTEnums, structure->disabledValidationCheckCount, structure->pDisabledValidationChecks, true, true);
- } break;
-
- // Validation code for VkPhysicalDeviceTextureCompressionASTCHDRFeaturesEXT structure members
- case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TEXTURE_COMPRESSION_ASTC_HDR_FEATURES_EXT: {
- VkPhysicalDeviceTextureCompressionASTCHDRFeaturesEXT *structure = (VkPhysicalDeviceTextureCompressionASTCHDRFeaturesEXT *) header;
- skip |= validate_bool32("VkPhysicalDeviceTextureCompressionASTCHDRFeaturesEXT", "textureCompressionASTC_HDR", structure->textureCompressionASTC_HDR);
- } break;
-
- // Validation code for VkImageViewASTCDecodeModeEXT structure members
- case VK_STRUCTURE_TYPE_IMAGE_VIEW_ASTC_DECODE_MODE_EXT: {
- VkImageViewASTCDecodeModeEXT *structure = (VkImageViewASTCDecodeModeEXT *) header;
- skip |= validate_ranged_enum("VkImageViewASTCDecodeModeEXT", "decodeMode", "VkFormat", AllVkFormatEnums, structure->decodeMode, "VUID-VkImageViewASTCDecodeModeEXT-decodeMode-parameter");
- } break;
-
- // Validation code for VkPhysicalDeviceASTCDecodeFeaturesEXT structure members
- case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_ASTC_DECODE_FEATURES_EXT: {
- VkPhysicalDeviceASTCDecodeFeaturesEXT *structure = (VkPhysicalDeviceASTCDecodeFeaturesEXT *) header;
- skip |= validate_bool32("VkPhysicalDeviceASTCDecodeFeaturesEXT", "decodeModeSharedExponent", structure->decodeModeSharedExponent);
- } break;
-
- // Validation code for VkPhysicalDeviceConditionalRenderingFeaturesEXT structure members
- case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_CONDITIONAL_RENDERING_FEATURES_EXT: {
- VkPhysicalDeviceConditionalRenderingFeaturesEXT *structure = (VkPhysicalDeviceConditionalRenderingFeaturesEXT *) header;
- skip |= validate_bool32("VkPhysicalDeviceConditionalRenderingFeaturesEXT", "conditionalRendering", structure->conditionalRendering);
-
- skip |= validate_bool32("VkPhysicalDeviceConditionalRenderingFeaturesEXT", "inheritedConditionalRendering", structure->inheritedConditionalRendering);
- } break;
-
- // Validation code for VkCommandBufferInheritanceConditionalRenderingInfoEXT structure members
- case VK_STRUCTURE_TYPE_COMMAND_BUFFER_INHERITANCE_CONDITIONAL_RENDERING_INFO_EXT: {
- VkCommandBufferInheritanceConditionalRenderingInfoEXT *structure = (VkCommandBufferInheritanceConditionalRenderingInfoEXT *) header;
- skip |= validate_bool32("VkCommandBufferInheritanceConditionalRenderingInfoEXT", "conditionalRenderingEnable", structure->conditionalRenderingEnable);
- } break;
-
- // Validation code for VkPipelineViewportWScalingStateCreateInfoNV structure members
- case VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_W_SCALING_STATE_CREATE_INFO_NV: {
- VkPipelineViewportWScalingStateCreateInfoNV *structure = (VkPipelineViewportWScalingStateCreateInfoNV *) header;
- skip |= validate_bool32("VkPipelineViewportWScalingStateCreateInfoNV", "viewportWScalingEnable", structure->viewportWScalingEnable);
- } break;
-
- // Validation code for VkSwapchainCounterCreateInfoEXT structure members
- case VK_STRUCTURE_TYPE_SWAPCHAIN_COUNTER_CREATE_INFO_EXT: {
- VkSwapchainCounterCreateInfoEXT *structure = (VkSwapchainCounterCreateInfoEXT *) header;
- skip |= validate_flags("VkSwapchainCounterCreateInfoEXT", "surfaceCounters", "VkSurfaceCounterFlagBitsEXT", AllVkSurfaceCounterFlagBitsEXT, structure->surfaceCounters, kOptionalFlags, "VUID-VkSwapchainCounterCreateInfoEXT-surfaceCounters-parameter");
- } break;
-
- // Validation code for VkPresentTimesInfoGOOGLE structure members
- case VK_STRUCTURE_TYPE_PRESENT_TIMES_INFO_GOOGLE: {
- VkPresentTimesInfoGOOGLE *structure = (VkPresentTimesInfoGOOGLE *) header;
- skip |= validate_array("VkPresentTimesInfoGOOGLE", "swapchainCount", "pTimes", structure->swapchainCount, &structure->pTimes, true, false, "VUID-VkPresentTimesInfoGOOGLE-swapchainCount-arraylength", "VUID-VkPresentTimesInfoGOOGLE-pTimes-parameter");
-
- if (structure->pTimes != NULL)
- {
- for (uint32_t swapchainIndex = 0; swapchainIndex < structure->swapchainCount; ++swapchainIndex)
- {
- }
- }
- } break;
-
- // Validation code for VkPipelineViewportSwizzleStateCreateInfoNV structure members
- case VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_SWIZZLE_STATE_CREATE_INFO_NV: {
- VkPipelineViewportSwizzleStateCreateInfoNV *structure = (VkPipelineViewportSwizzleStateCreateInfoNV *) header;
- skip |= validate_reserved_flags("VkPipelineViewportSwizzleStateCreateInfoNV", "flags", structure->flags, "VUID-VkPipelineViewportSwizzleStateCreateInfoNV-flags-zerobitmask");
-
- skip |= validate_array("VkPipelineViewportSwizzleStateCreateInfoNV", "viewportCount", "pViewportSwizzles", structure->viewportCount, &structure->pViewportSwizzles, true, true, "VUID-VkPipelineViewportSwizzleStateCreateInfoNV-viewportCount-arraylength", "VUID-VkPipelineViewportSwizzleStateCreateInfoNV-pViewportSwizzles-parameter");
-
- if (structure->pViewportSwizzles != NULL)
- {
- for (uint32_t viewportIndex = 0; viewportIndex < structure->viewportCount; ++viewportIndex)
- {
- skip |= validate_ranged_enum("VkPipelineViewportSwizzleStateCreateInfoNV", ParameterName("pViewportSwizzles[%i].x", ParameterName::IndexVector{ viewportIndex }), "VkViewportCoordinateSwizzleNV", AllVkViewportCoordinateSwizzleNVEnums, structure->pViewportSwizzles[viewportIndex].x, "VUID-VkViewportSwizzleNV-x-parameter");
-
- skip |= validate_ranged_enum("VkPipelineViewportSwizzleStateCreateInfoNV", ParameterName("pViewportSwizzles[%i].y", ParameterName::IndexVector{ viewportIndex }), "VkViewportCoordinateSwizzleNV", AllVkViewportCoordinateSwizzleNVEnums, structure->pViewportSwizzles[viewportIndex].y, "VUID-VkViewportSwizzleNV-y-parameter");
-
- skip |= validate_ranged_enum("VkPipelineViewportSwizzleStateCreateInfoNV", ParameterName("pViewportSwizzles[%i].z", ParameterName::IndexVector{ viewportIndex }), "VkViewportCoordinateSwizzleNV", AllVkViewportCoordinateSwizzleNVEnums, structure->pViewportSwizzles[viewportIndex].z, "VUID-VkViewportSwizzleNV-z-parameter");
-
- skip |= validate_ranged_enum("VkPipelineViewportSwizzleStateCreateInfoNV", ParameterName("pViewportSwizzles[%i].w", ParameterName::IndexVector{ viewportIndex }), "VkViewportCoordinateSwizzleNV", AllVkViewportCoordinateSwizzleNVEnums, structure->pViewportSwizzles[viewportIndex].w, "VUID-VkViewportSwizzleNV-w-parameter");
- }
- }
- } break;
-
- // Validation code for VkPipelineDiscardRectangleStateCreateInfoEXT structure members
- case VK_STRUCTURE_TYPE_PIPELINE_DISCARD_RECTANGLE_STATE_CREATE_INFO_EXT: {
- VkPipelineDiscardRectangleStateCreateInfoEXT *structure = (VkPipelineDiscardRectangleStateCreateInfoEXT *) header;
- skip |= validate_reserved_flags("VkPipelineDiscardRectangleStateCreateInfoEXT", "flags", structure->flags, "VUID-VkPipelineDiscardRectangleStateCreateInfoEXT-flags-zerobitmask");
-
- skip |= validate_ranged_enum("VkPipelineDiscardRectangleStateCreateInfoEXT", "discardRectangleMode", "VkDiscardRectangleModeEXT", AllVkDiscardRectangleModeEXTEnums, structure->discardRectangleMode, "VUID-VkPipelineDiscardRectangleStateCreateInfoEXT-discardRectangleMode-parameter");
- } break;
-
- // Validation code for VkPipelineRasterizationConservativeStateCreateInfoEXT structure members
- case VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_CONSERVATIVE_STATE_CREATE_INFO_EXT: {
- VkPipelineRasterizationConservativeStateCreateInfoEXT *structure = (VkPipelineRasterizationConservativeStateCreateInfoEXT *) header;
- skip |= validate_reserved_flags("VkPipelineRasterizationConservativeStateCreateInfoEXT", "flags", structure->flags, "VUID-VkPipelineRasterizationConservativeStateCreateInfoEXT-flags-zerobitmask");
-
- skip |= validate_ranged_enum("VkPipelineRasterizationConservativeStateCreateInfoEXT", "conservativeRasterizationMode", "VkConservativeRasterizationModeEXT", AllVkConservativeRasterizationModeEXTEnums, structure->conservativeRasterizationMode, "VUID-VkPipelineRasterizationConservativeStateCreateInfoEXT-conservativeRasterizationMode-parameter");
- } break;
-
- // Validation code for VkPhysicalDeviceDepthClipEnableFeaturesEXT structure members
- case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DEPTH_CLIP_ENABLE_FEATURES_EXT: {
- VkPhysicalDeviceDepthClipEnableFeaturesEXT *structure = (VkPhysicalDeviceDepthClipEnableFeaturesEXT *) header;
- skip |= validate_bool32("VkPhysicalDeviceDepthClipEnableFeaturesEXT", "depthClipEnable", structure->depthClipEnable);
- } break;
-
- // Validation code for VkPipelineRasterizationDepthClipStateCreateInfoEXT structure members
- case VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_DEPTH_CLIP_STATE_CREATE_INFO_EXT: {
- VkPipelineRasterizationDepthClipStateCreateInfoEXT *structure = (VkPipelineRasterizationDepthClipStateCreateInfoEXT *) header;
- skip |= validate_reserved_flags("VkPipelineRasterizationDepthClipStateCreateInfoEXT", "flags", structure->flags, "VUID-VkPipelineRasterizationDepthClipStateCreateInfoEXT-flags-zerobitmask");
-
- skip |= validate_bool32("VkPipelineRasterizationDepthClipStateCreateInfoEXT", "depthClipEnable", structure->depthClipEnable);
- } break;
-
- // Validation code for VkDebugUtilsMessengerCreateInfoEXT structure members
- case VK_STRUCTURE_TYPE_DEBUG_UTILS_MESSENGER_CREATE_INFO_EXT: {
- VkDebugUtilsMessengerCreateInfoEXT *structure = (VkDebugUtilsMessengerCreateInfoEXT *) header;
- skip |= validate_reserved_flags("VkDebugUtilsMessengerCreateInfoEXT", "flags", structure->flags, "VUID-VkDebugUtilsMessengerCreateInfoEXT-flags-zerobitmask");
-
- skip |= validate_flags("VkDebugUtilsMessengerCreateInfoEXT", "messageSeverity", "VkDebugUtilsMessageSeverityFlagBitsEXT", AllVkDebugUtilsMessageSeverityFlagBitsEXT, structure->messageSeverity, kRequiredFlags, "VUID-VkDebugUtilsMessengerCreateInfoEXT-messageSeverity-parameter", "VUID-VkDebugUtilsMessengerCreateInfoEXT-messageSeverity-requiredbitmask");
-
- skip |= validate_flags("VkDebugUtilsMessengerCreateInfoEXT", "messageType", "VkDebugUtilsMessageTypeFlagBitsEXT", AllVkDebugUtilsMessageTypeFlagBitsEXT, structure->messageType, kRequiredFlags, "VUID-VkDebugUtilsMessengerCreateInfoEXT-messageType-parameter", "VUID-VkDebugUtilsMessengerCreateInfoEXT-messageType-requiredbitmask");
-
- skip |= validate_required_pointer("VkDebugUtilsMessengerCreateInfoEXT", "pfnUserCallback", reinterpret_cast<const void*>(structure->pfnUserCallback), "VUID-VkDebugUtilsMessengerCreateInfoEXT-pfnUserCallback-parameter");
- } break;
-
-#ifdef VK_USE_PLATFORM_ANDROID_KHR
- // Validation code for VkImportAndroidHardwareBufferInfoANDROID structure members
- case VK_STRUCTURE_TYPE_IMPORT_ANDROID_HARDWARE_BUFFER_INFO_ANDROID: {
- VkImportAndroidHardwareBufferInfoANDROID *structure = (VkImportAndroidHardwareBufferInfoANDROID *) header;
- skip |= validate_required_pointer("VkImportAndroidHardwareBufferInfoANDROID", "buffer", structure->buffer, "VUID-VkImportAndroidHardwareBufferInfoANDROID-buffer-parameter");
- } break;
-#endif // VK_USE_PLATFORM_ANDROID_KHR
-
- // Validation code for VkSamplerReductionModeCreateInfoEXT structure members
- case VK_STRUCTURE_TYPE_SAMPLER_REDUCTION_MODE_CREATE_INFO_EXT: {
- VkSamplerReductionModeCreateInfoEXT *structure = (VkSamplerReductionModeCreateInfoEXT *) header;
- skip |= validate_ranged_enum("VkSamplerReductionModeCreateInfoEXT", "reductionMode", "VkSamplerReductionModeEXT", AllVkSamplerReductionModeEXTEnums, structure->reductionMode, "VUID-VkSamplerReductionModeCreateInfoEXT-reductionMode-parameter");
- } break;
-
- // Validation code for VkPhysicalDeviceInlineUniformBlockFeaturesEXT structure members
- case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_INLINE_UNIFORM_BLOCK_FEATURES_EXT: {
- VkPhysicalDeviceInlineUniformBlockFeaturesEXT *structure = (VkPhysicalDeviceInlineUniformBlockFeaturesEXT *) header;
- skip |= validate_bool32("VkPhysicalDeviceInlineUniformBlockFeaturesEXT", "inlineUniformBlock", structure->inlineUniformBlock);
-
- skip |= validate_bool32("VkPhysicalDeviceInlineUniformBlockFeaturesEXT", "descriptorBindingInlineUniformBlockUpdateAfterBind", structure->descriptorBindingInlineUniformBlockUpdateAfterBind);
- } break;
-
- // Validation code for VkWriteDescriptorSetInlineUniformBlockEXT structure members
- case VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET_INLINE_UNIFORM_BLOCK_EXT: {
- VkWriteDescriptorSetInlineUniformBlockEXT *structure = (VkWriteDescriptorSetInlineUniformBlockEXT *) header;
- skip |= validate_array("VkWriteDescriptorSetInlineUniformBlockEXT", "dataSize", "pData", structure->dataSize, &structure->pData, true, true, "VUID-VkWriteDescriptorSetInlineUniformBlockEXT-dataSize-arraylength", "VUID-VkWriteDescriptorSetInlineUniformBlockEXT-pData-parameter");
- } break;
-
- // Validation code for VkSampleLocationsInfoEXT structure members
- case VK_STRUCTURE_TYPE_SAMPLE_LOCATIONS_INFO_EXT: {
- VkSampleLocationsInfoEXT *structure = (VkSampleLocationsInfoEXT *) header;
- skip |= validate_flags("VkSampleLocationsInfoEXT", "sampleLocationsPerPixel", "VkSampleCountFlagBits", AllVkSampleCountFlagBits, structure->sampleLocationsPerPixel, kOptionalSingleBit, "VUID-VkSampleLocationsInfoEXT-sampleLocationsPerPixel-parameter");
-
- skip |= validate_array("VkSampleLocationsInfoEXT", "sampleLocationsCount", "pSampleLocations", structure->sampleLocationsCount, &structure->pSampleLocations, false, true, kVUIDUndefined, "VUID-VkSampleLocationsInfoEXT-pSampleLocations-parameter");
-
- if (structure->pSampleLocations != NULL)
- {
- for (uint32_t sampleLocationsIndex = 0; sampleLocationsIndex < structure->sampleLocationsCount; ++sampleLocationsIndex)
- {
- }
- }
- } break;
-
- // Validation code for VkRenderPassSampleLocationsBeginInfoEXT structure members
- case VK_STRUCTURE_TYPE_RENDER_PASS_SAMPLE_LOCATIONS_BEGIN_INFO_EXT: {
- VkRenderPassSampleLocationsBeginInfoEXT *structure = (VkRenderPassSampleLocationsBeginInfoEXT *) header;
- skip |= validate_array("VkRenderPassSampleLocationsBeginInfoEXT", "attachmentInitialSampleLocationsCount", "pAttachmentInitialSampleLocations", structure->attachmentInitialSampleLocationsCount, &structure->pAttachmentInitialSampleLocations, false, true, kVUIDUndefined, "VUID-VkRenderPassSampleLocationsBeginInfoEXT-pAttachmentInitialSampleLocations-parameter");
-
- if (structure->pAttachmentInitialSampleLocations != NULL)
- {
- for (uint32_t attachmentInitialSampleLocationsIndex = 0; attachmentInitialSampleLocationsIndex < structure->attachmentInitialSampleLocationsCount; ++attachmentInitialSampleLocationsIndex)
- {
- skip |= validate_struct_type("VkRenderPassSampleLocationsBeginInfoEXT", ParameterName("pAttachmentInitialSampleLocations[%i].sampleLocationsInfo", ParameterName::IndexVector{ attachmentInitialSampleLocationsIndex }), "VK_STRUCTURE_TYPE_SAMPLE_LOCATIONS_INFO_EXT", &(structure->pAttachmentInitialSampleLocations[attachmentInitialSampleLocationsIndex].sampleLocationsInfo), VK_STRUCTURE_TYPE_SAMPLE_LOCATIONS_INFO_EXT, false, kVUIDUndefined, "VUID-VkSampleLocationsInfoEXT-sType-sType");
-
- skip |= validate_flags("VkRenderPassSampleLocationsBeginInfoEXT", ParameterName("pAttachmentInitialSampleLocations[%i].sampleLocationsInfo.sampleLocationsPerPixel", ParameterName::IndexVector{ attachmentInitialSampleLocationsIndex }), "VkSampleCountFlagBits", AllVkSampleCountFlagBits, structure->pAttachmentInitialSampleLocations[attachmentInitialSampleLocationsIndex].sampleLocationsInfo.sampleLocationsPerPixel, kOptionalSingleBit, "VUID-VkSampleLocationsInfoEXT-sampleLocationsPerPixel-parameter");
-
- skip |= validate_array("VkRenderPassSampleLocationsBeginInfoEXT", ParameterName("pAttachmentInitialSampleLocations[%i].sampleLocationsInfo.sampleLocationsCount", ParameterName::IndexVector{ attachmentInitialSampleLocationsIndex }), ParameterName("pAttachmentInitialSampleLocations[%i].sampleLocationsInfo.pSampleLocations", ParameterName::IndexVector{ attachmentInitialSampleLocationsIndex }), structure->pAttachmentInitialSampleLocations[attachmentInitialSampleLocationsIndex].sampleLocationsInfo.sampleLocationsCount, &structure->pAttachmentInitialSampleLocations[attachmentInitialSampleLocationsIndex].sampleLocationsInfo.pSampleLocations, false, true, kVUIDUndefined, "VUID-VkSampleLocationsInfoEXT-pSampleLocations-parameter");
-
- if (structure->pAttachmentInitialSampleLocations[attachmentInitialSampleLocationsIndex].sampleLocationsInfo.pSampleLocations != NULL)
- {
- for (uint32_t sampleLocationsIndex = 0; sampleLocationsIndex < structure->pAttachmentInitialSampleLocations[attachmentInitialSampleLocationsIndex].sampleLocationsInfo.sampleLocationsCount; ++sampleLocationsIndex)
- {
- }
- }
- }
- }
-
- skip |= validate_array("VkRenderPassSampleLocationsBeginInfoEXT", "postSubpassSampleLocationsCount", "pPostSubpassSampleLocations", structure->postSubpassSampleLocationsCount, &structure->pPostSubpassSampleLocations, false, true, kVUIDUndefined, "VUID-VkRenderPassSampleLocationsBeginInfoEXT-pPostSubpassSampleLocations-parameter");
-
- if (structure->pPostSubpassSampleLocations != NULL)
- {
- for (uint32_t postSubpassSampleLocationsIndex = 0; postSubpassSampleLocationsIndex < structure->postSubpassSampleLocationsCount; ++postSubpassSampleLocationsIndex)
- {
- skip |= validate_struct_type("VkRenderPassSampleLocationsBeginInfoEXT", ParameterName("pPostSubpassSampleLocations[%i].sampleLocationsInfo", ParameterName::IndexVector{ postSubpassSampleLocationsIndex }), "VK_STRUCTURE_TYPE_SAMPLE_LOCATIONS_INFO_EXT", &(structure->pPostSubpassSampleLocations[postSubpassSampleLocationsIndex].sampleLocationsInfo), VK_STRUCTURE_TYPE_SAMPLE_LOCATIONS_INFO_EXT, false, kVUIDUndefined, "VUID-VkSampleLocationsInfoEXT-sType-sType");
-
- skip |= validate_flags("VkRenderPassSampleLocationsBeginInfoEXT", ParameterName("pPostSubpassSampleLocations[%i].sampleLocationsInfo.sampleLocationsPerPixel", ParameterName::IndexVector{ postSubpassSampleLocationsIndex }), "VkSampleCountFlagBits", AllVkSampleCountFlagBits, structure->pPostSubpassSampleLocations[postSubpassSampleLocationsIndex].sampleLocationsInfo.sampleLocationsPerPixel, kOptionalSingleBit, "VUID-VkSampleLocationsInfoEXT-sampleLocationsPerPixel-parameter");
-
- skip |= validate_array("VkRenderPassSampleLocationsBeginInfoEXT", ParameterName("pPostSubpassSampleLocations[%i].sampleLocationsInfo.sampleLocationsCount", ParameterName::IndexVector{ postSubpassSampleLocationsIndex }), ParameterName("pPostSubpassSampleLocations[%i].sampleLocationsInfo.pSampleLocations", ParameterName::IndexVector{ postSubpassSampleLocationsIndex }), structure->pPostSubpassSampleLocations[postSubpassSampleLocationsIndex].sampleLocationsInfo.sampleLocationsCount, &structure->pPostSubpassSampleLocations[postSubpassSampleLocationsIndex].sampleLocationsInfo.pSampleLocations, false, true, kVUIDUndefined, "VUID-VkSampleLocationsInfoEXT-pSampleLocations-parameter");
-
- if (structure->pPostSubpassSampleLocations[postSubpassSampleLocationsIndex].sampleLocationsInfo.pSampleLocations != NULL)
- {
- for (uint32_t sampleLocationsIndex = 0; sampleLocationsIndex < structure->pPostSubpassSampleLocations[postSubpassSampleLocationsIndex].sampleLocationsInfo.sampleLocationsCount; ++sampleLocationsIndex)
- {
- }
- }
- }
- }
- } break;
-
- // Validation code for VkPipelineSampleLocationsStateCreateInfoEXT structure members
- case VK_STRUCTURE_TYPE_PIPELINE_SAMPLE_LOCATIONS_STATE_CREATE_INFO_EXT: {
- VkPipelineSampleLocationsStateCreateInfoEXT *structure = (VkPipelineSampleLocationsStateCreateInfoEXT *) header;
- skip |= validate_bool32("VkPipelineSampleLocationsStateCreateInfoEXT", "sampleLocationsEnable", structure->sampleLocationsEnable);
-
- skip |= validate_struct_type("VkPipelineSampleLocationsStateCreateInfoEXT", "sampleLocationsInfo", "VK_STRUCTURE_TYPE_SAMPLE_LOCATIONS_INFO_EXT", &(structure->sampleLocationsInfo), VK_STRUCTURE_TYPE_SAMPLE_LOCATIONS_INFO_EXT, false, kVUIDUndefined, "VUID-VkSampleLocationsInfoEXT-sType-sType");
-
- skip |= validate_flags("VkPipelineSampleLocationsStateCreateInfoEXT", "sampleLocationsInfo.sampleLocationsPerPixel", "VkSampleCountFlagBits", AllVkSampleCountFlagBits, structure->sampleLocationsInfo.sampleLocationsPerPixel, kOptionalSingleBit, "VUID-VkSampleLocationsInfoEXT-sampleLocationsPerPixel-parameter");
-
- skip |= validate_array("VkPipelineSampleLocationsStateCreateInfoEXT", "sampleLocationsInfo.sampleLocationsCount", "sampleLocationsInfo.pSampleLocations", structure->sampleLocationsInfo.sampleLocationsCount, &structure->sampleLocationsInfo.pSampleLocations, false, true, kVUIDUndefined, "VUID-VkSampleLocationsInfoEXT-pSampleLocations-parameter");
-
- if (structure->sampleLocationsInfo.pSampleLocations != NULL)
- {
- for (uint32_t sampleLocationsIndex = 0; sampleLocationsIndex < structure->sampleLocationsInfo.sampleLocationsCount; ++sampleLocationsIndex)
- {
- }
- }
- } break;
-
- // Validation code for VkPhysicalDeviceBlendOperationAdvancedFeaturesEXT structure members
- case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_BLEND_OPERATION_ADVANCED_FEATURES_EXT: {
- VkPhysicalDeviceBlendOperationAdvancedFeaturesEXT *structure = (VkPhysicalDeviceBlendOperationAdvancedFeaturesEXT *) header;
- skip |= validate_bool32("VkPhysicalDeviceBlendOperationAdvancedFeaturesEXT", "advancedBlendCoherentOperations", structure->advancedBlendCoherentOperations);
- } break;
-
- // Validation code for VkPipelineColorBlendAdvancedStateCreateInfoEXT structure members
- case VK_STRUCTURE_TYPE_PIPELINE_COLOR_BLEND_ADVANCED_STATE_CREATE_INFO_EXT: {
- VkPipelineColorBlendAdvancedStateCreateInfoEXT *structure = (VkPipelineColorBlendAdvancedStateCreateInfoEXT *) header;
- skip |= validate_bool32("VkPipelineColorBlendAdvancedStateCreateInfoEXT", "srcPremultiplied", structure->srcPremultiplied);
-
- skip |= validate_bool32("VkPipelineColorBlendAdvancedStateCreateInfoEXT", "dstPremultiplied", structure->dstPremultiplied);
-
- skip |= validate_ranged_enum("VkPipelineColorBlendAdvancedStateCreateInfoEXT", "blendOverlap", "VkBlendOverlapEXT", AllVkBlendOverlapEXTEnums, structure->blendOverlap, "VUID-VkPipelineColorBlendAdvancedStateCreateInfoEXT-blendOverlap-parameter");
- } break;
-
- // Validation code for VkPipelineCoverageToColorStateCreateInfoNV structure members
- case VK_STRUCTURE_TYPE_PIPELINE_COVERAGE_TO_COLOR_STATE_CREATE_INFO_NV: {
- VkPipelineCoverageToColorStateCreateInfoNV *structure = (VkPipelineCoverageToColorStateCreateInfoNV *) header;
- skip |= validate_reserved_flags("VkPipelineCoverageToColorStateCreateInfoNV", "flags", structure->flags, "VUID-VkPipelineCoverageToColorStateCreateInfoNV-flags-zerobitmask");
-
- skip |= validate_bool32("VkPipelineCoverageToColorStateCreateInfoNV", "coverageToColorEnable", structure->coverageToColorEnable);
- } break;
-
- // Validation code for VkPipelineCoverageModulationStateCreateInfoNV structure members
- case VK_STRUCTURE_TYPE_PIPELINE_COVERAGE_MODULATION_STATE_CREATE_INFO_NV: {
- VkPipelineCoverageModulationStateCreateInfoNV *structure = (VkPipelineCoverageModulationStateCreateInfoNV *) header;
- skip |= validate_reserved_flags("VkPipelineCoverageModulationStateCreateInfoNV", "flags", structure->flags, "VUID-VkPipelineCoverageModulationStateCreateInfoNV-flags-zerobitmask");
-
- skip |= validate_ranged_enum("VkPipelineCoverageModulationStateCreateInfoNV", "coverageModulationMode", "VkCoverageModulationModeNV", AllVkCoverageModulationModeNVEnums, structure->coverageModulationMode, "VUID-VkPipelineCoverageModulationStateCreateInfoNV-coverageModulationMode-parameter");
-
- skip |= validate_bool32("VkPipelineCoverageModulationStateCreateInfoNV", "coverageModulationTableEnable", structure->coverageModulationTableEnable);
- } break;
-
- // Validation code for VkPhysicalDeviceShaderSMBuiltinsFeaturesNV structure members
- case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_SM_BUILTINS_FEATURES_NV: {
- VkPhysicalDeviceShaderSMBuiltinsFeaturesNV *structure = (VkPhysicalDeviceShaderSMBuiltinsFeaturesNV *) header;
- skip |= validate_bool32("VkPhysicalDeviceShaderSMBuiltinsFeaturesNV", "shaderSMBuiltins", structure->shaderSMBuiltins);
- } break;
-
- // Validation code for VkPhysicalDeviceImageDrmFormatModifierInfoEXT structure members
- case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_DRM_FORMAT_MODIFIER_INFO_EXT: {
- VkPhysicalDeviceImageDrmFormatModifierInfoEXT *structure = (VkPhysicalDeviceImageDrmFormatModifierInfoEXT *) header;
- skip |= validate_ranged_enum("VkPhysicalDeviceImageDrmFormatModifierInfoEXT", "sharingMode", "VkSharingMode", AllVkSharingModeEnums, structure->sharingMode, "VUID-VkPhysicalDeviceImageDrmFormatModifierInfoEXT-sharingMode-parameter");
- } break;
-
- // Validation code for VkImageDrmFormatModifierListCreateInfoEXT structure members
- case VK_STRUCTURE_TYPE_IMAGE_DRM_FORMAT_MODIFIER_LIST_CREATE_INFO_EXT: {
- VkImageDrmFormatModifierListCreateInfoEXT *structure = (VkImageDrmFormatModifierListCreateInfoEXT *) header;
- skip |= validate_array("VkImageDrmFormatModifierListCreateInfoEXT", "drmFormatModifierCount", "pDrmFormatModifiers", structure->drmFormatModifierCount, &structure->pDrmFormatModifiers, true, true, "VUID-VkImageDrmFormatModifierListCreateInfoEXT-drmFormatModifierCount-arraylength", "VUID-VkImageDrmFormatModifierListCreateInfoEXT-pDrmFormatModifiers-parameter");
- } break;
-
- // Validation code for VkImageDrmFormatModifierExplicitCreateInfoEXT structure members
- case VK_STRUCTURE_TYPE_IMAGE_DRM_FORMAT_MODIFIER_EXPLICIT_CREATE_INFO_EXT: {
- VkImageDrmFormatModifierExplicitCreateInfoEXT *structure = (VkImageDrmFormatModifierExplicitCreateInfoEXT *) header;
- skip |= validate_array("VkImageDrmFormatModifierExplicitCreateInfoEXT", "drmFormatModifierPlaneCount", "pPlaneLayouts", structure->drmFormatModifierPlaneCount, &structure->pPlaneLayouts, true, true, kVUIDUndefined, "VUID-VkImageDrmFormatModifierExplicitCreateInfoEXT-pPlaneLayouts-parameter");
-
- if (structure->pPlaneLayouts != NULL)
- {
- for (uint32_t drmFormatModifierPlaneIndex = 0; drmFormatModifierPlaneIndex < structure->drmFormatModifierPlaneCount; ++drmFormatModifierPlaneIndex)
- {
- }
- }
- } break;
-
- // Validation code for VkShaderModuleValidationCacheCreateInfoEXT structure members
- case VK_STRUCTURE_TYPE_SHADER_MODULE_VALIDATION_CACHE_CREATE_INFO_EXT: {
- VkShaderModuleValidationCacheCreateInfoEXT *structure = (VkShaderModuleValidationCacheCreateInfoEXT *) header;
- skip |= validate_required_handle("VkShaderModuleValidationCacheCreateInfoEXT", "validationCache", structure->validationCache);
- } break;
-
- // Validation code for VkDescriptorSetLayoutBindingFlagsCreateInfoEXT structure members
- case VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_BINDING_FLAGS_CREATE_INFO_EXT: {
- VkDescriptorSetLayoutBindingFlagsCreateInfoEXT *structure = (VkDescriptorSetLayoutBindingFlagsCreateInfoEXT *) header;
- skip |= validate_flags_array("VkDescriptorSetLayoutBindingFlagsCreateInfoEXT", "bindingCount", "pBindingFlags", "VkDescriptorBindingFlagBitsEXT", AllVkDescriptorBindingFlagBitsEXT, structure->bindingCount, structure->pBindingFlags, false, false);
- } break;
-
- // Validation code for VkPhysicalDeviceDescriptorIndexingFeaturesEXT structure members
- case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DESCRIPTOR_INDEXING_FEATURES_EXT: {
- VkPhysicalDeviceDescriptorIndexingFeaturesEXT *structure = (VkPhysicalDeviceDescriptorIndexingFeaturesEXT *) header;
- skip |= validate_bool32("VkPhysicalDeviceDescriptorIndexingFeaturesEXT", "shaderInputAttachmentArrayDynamicIndexing", structure->shaderInputAttachmentArrayDynamicIndexing);
-
- skip |= validate_bool32("VkPhysicalDeviceDescriptorIndexingFeaturesEXT", "shaderUniformTexelBufferArrayDynamicIndexing", structure->shaderUniformTexelBufferArrayDynamicIndexing);
-
- skip |= validate_bool32("VkPhysicalDeviceDescriptorIndexingFeaturesEXT", "shaderStorageTexelBufferArrayDynamicIndexing", structure->shaderStorageTexelBufferArrayDynamicIndexing);
-
- skip |= validate_bool32("VkPhysicalDeviceDescriptorIndexingFeaturesEXT", "shaderUniformBufferArrayNonUniformIndexing", structure->shaderUniformBufferArrayNonUniformIndexing);
-
- skip |= validate_bool32("VkPhysicalDeviceDescriptorIndexingFeaturesEXT", "shaderSampledImageArrayNonUniformIndexing", structure->shaderSampledImageArrayNonUniformIndexing);
-
- skip |= validate_bool32("VkPhysicalDeviceDescriptorIndexingFeaturesEXT", "shaderStorageBufferArrayNonUniformIndexing", structure->shaderStorageBufferArrayNonUniformIndexing);
-
- skip |= validate_bool32("VkPhysicalDeviceDescriptorIndexingFeaturesEXT", "shaderStorageImageArrayNonUniformIndexing", structure->shaderStorageImageArrayNonUniformIndexing);
-
- skip |= validate_bool32("VkPhysicalDeviceDescriptorIndexingFeaturesEXT", "shaderInputAttachmentArrayNonUniformIndexing", structure->shaderInputAttachmentArrayNonUniformIndexing);
-
- skip |= validate_bool32("VkPhysicalDeviceDescriptorIndexingFeaturesEXT", "shaderUniformTexelBufferArrayNonUniformIndexing", structure->shaderUniformTexelBufferArrayNonUniformIndexing);
-
- skip |= validate_bool32("VkPhysicalDeviceDescriptorIndexingFeaturesEXT", "shaderStorageTexelBufferArrayNonUniformIndexing", structure->shaderStorageTexelBufferArrayNonUniformIndexing);
-
- skip |= validate_bool32("VkPhysicalDeviceDescriptorIndexingFeaturesEXT", "descriptorBindingUniformBufferUpdateAfterBind", structure->descriptorBindingUniformBufferUpdateAfterBind);
-
- skip |= validate_bool32("VkPhysicalDeviceDescriptorIndexingFeaturesEXT", "descriptorBindingSampledImageUpdateAfterBind", structure->descriptorBindingSampledImageUpdateAfterBind);
-
- skip |= validate_bool32("VkPhysicalDeviceDescriptorIndexingFeaturesEXT", "descriptorBindingStorageImageUpdateAfterBind", structure->descriptorBindingStorageImageUpdateAfterBind);
-
- skip |= validate_bool32("VkPhysicalDeviceDescriptorIndexingFeaturesEXT", "descriptorBindingStorageBufferUpdateAfterBind", structure->descriptorBindingStorageBufferUpdateAfterBind);
-
- skip |= validate_bool32("VkPhysicalDeviceDescriptorIndexingFeaturesEXT", "descriptorBindingUniformTexelBufferUpdateAfterBind", structure->descriptorBindingUniformTexelBufferUpdateAfterBind);
-
- skip |= validate_bool32("VkPhysicalDeviceDescriptorIndexingFeaturesEXT", "descriptorBindingStorageTexelBufferUpdateAfterBind", structure->descriptorBindingStorageTexelBufferUpdateAfterBind);
-
- skip |= validate_bool32("VkPhysicalDeviceDescriptorIndexingFeaturesEXT", "descriptorBindingUpdateUnusedWhilePending", structure->descriptorBindingUpdateUnusedWhilePending);
-
- skip |= validate_bool32("VkPhysicalDeviceDescriptorIndexingFeaturesEXT", "descriptorBindingPartiallyBound", structure->descriptorBindingPartiallyBound);
-
- skip |= validate_bool32("VkPhysicalDeviceDescriptorIndexingFeaturesEXT", "descriptorBindingVariableDescriptorCount", structure->descriptorBindingVariableDescriptorCount);
-
- skip |= validate_bool32("VkPhysicalDeviceDescriptorIndexingFeaturesEXT", "runtimeDescriptorArray", structure->runtimeDescriptorArray);
- } break;
-
- // Validation code for VkDescriptorSetVariableDescriptorCountAllocateInfoEXT structure members
- case VK_STRUCTURE_TYPE_DESCRIPTOR_SET_VARIABLE_DESCRIPTOR_COUNT_ALLOCATE_INFO_EXT: {
- VkDescriptorSetVariableDescriptorCountAllocateInfoEXT *structure = (VkDescriptorSetVariableDescriptorCountAllocateInfoEXT *) header;
- skip |= validate_array("VkDescriptorSetVariableDescriptorCountAllocateInfoEXT", "descriptorSetCount", "pDescriptorCounts", structure->descriptorSetCount, &structure->pDescriptorCounts, false, true, kVUIDUndefined, "VUID-VkDescriptorSetVariableDescriptorCountAllocateInfoEXT-pDescriptorCounts-parameter");
- } break;
-
- // Validation code for VkPipelineViewportShadingRateImageStateCreateInfoNV structure members
- case VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_SHADING_RATE_IMAGE_STATE_CREATE_INFO_NV: {
- VkPipelineViewportShadingRateImageStateCreateInfoNV *structure = (VkPipelineViewportShadingRateImageStateCreateInfoNV *) header;
- skip |= validate_bool32("VkPipelineViewportShadingRateImageStateCreateInfoNV", "shadingRateImageEnable", structure->shadingRateImageEnable);
-
- if (structure->pShadingRatePalettes != NULL)
- {
- for (uint32_t viewportIndex = 0; viewportIndex < structure->viewportCount; ++viewportIndex)
- {
- skip |= validate_ranged_enum_array("VkPipelineViewportShadingRateImageStateCreateInfoNV", ParameterName("pShadingRatePalettes[%i].shadingRatePaletteEntryCount", ParameterName::IndexVector{ viewportIndex }), ParameterName("pShadingRatePalettes[%i].pShadingRatePaletteEntries", ParameterName::IndexVector{ viewportIndex }), "VkShadingRatePaletteEntryNV", AllVkShadingRatePaletteEntryNVEnums, structure->pShadingRatePalettes[viewportIndex].shadingRatePaletteEntryCount, structure->pShadingRatePalettes[viewportIndex].pShadingRatePaletteEntries, true, true);
- }
- }
- } break;
-
- // Validation code for VkPhysicalDeviceShadingRateImageFeaturesNV structure members
- case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADING_RATE_IMAGE_FEATURES_NV: {
- VkPhysicalDeviceShadingRateImageFeaturesNV *structure = (VkPhysicalDeviceShadingRateImageFeaturesNV *) header;
- skip |= validate_bool32("VkPhysicalDeviceShadingRateImageFeaturesNV", "shadingRateImage", structure->shadingRateImage);
-
- skip |= validate_bool32("VkPhysicalDeviceShadingRateImageFeaturesNV", "shadingRateCoarseSampleOrder", structure->shadingRateCoarseSampleOrder);
- } break;
-
- // Validation code for VkPipelineViewportCoarseSampleOrderStateCreateInfoNV structure members
- case VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_COARSE_SAMPLE_ORDER_STATE_CREATE_INFO_NV: {
- VkPipelineViewportCoarseSampleOrderStateCreateInfoNV *structure = (VkPipelineViewportCoarseSampleOrderStateCreateInfoNV *) header;
- skip |= validate_ranged_enum("VkPipelineViewportCoarseSampleOrderStateCreateInfoNV", "sampleOrderType", "VkCoarseSampleOrderTypeNV", AllVkCoarseSampleOrderTypeNVEnums, structure->sampleOrderType, "VUID-VkPipelineViewportCoarseSampleOrderStateCreateInfoNV-sampleOrderType-parameter");
-
- skip |= validate_array("VkPipelineViewportCoarseSampleOrderStateCreateInfoNV", "customSampleOrderCount", "pCustomSampleOrders", structure->customSampleOrderCount, &structure->pCustomSampleOrders, false, true, kVUIDUndefined, "VUID-VkPipelineViewportCoarseSampleOrderStateCreateInfoNV-pCustomSampleOrders-parameter");
-
- if (structure->pCustomSampleOrders != NULL)
- {
- for (uint32_t customSampleOrderIndex = 0; customSampleOrderIndex < structure->customSampleOrderCount; ++customSampleOrderIndex)
- {
- skip |= validate_ranged_enum("VkPipelineViewportCoarseSampleOrderStateCreateInfoNV", ParameterName("pCustomSampleOrders[%i].shadingRate", ParameterName::IndexVector{ customSampleOrderIndex }), "VkShadingRatePaletteEntryNV", AllVkShadingRatePaletteEntryNVEnums, structure->pCustomSampleOrders[customSampleOrderIndex].shadingRate, "VUID-VkCoarseSampleOrderCustomNV-shadingRate-parameter");
-
- skip |= validate_array("VkPipelineViewportCoarseSampleOrderStateCreateInfoNV", ParameterName("pCustomSampleOrders[%i].sampleLocationCount", ParameterName::IndexVector{ customSampleOrderIndex }), ParameterName("pCustomSampleOrders[%i].pSampleLocations", ParameterName::IndexVector{ customSampleOrderIndex }), structure->pCustomSampleOrders[customSampleOrderIndex].sampleLocationCount, &structure->pCustomSampleOrders[customSampleOrderIndex].pSampleLocations, true, true, "VUID-VkCoarseSampleOrderCustomNV-sampleLocationCount-arraylength", "VUID-VkCoarseSampleOrderCustomNV-pSampleLocations-parameter");
-
- if (structure->pCustomSampleOrders[customSampleOrderIndex].pSampleLocations != NULL)
- {
- for (uint32_t sampleLocationIndex = 0; sampleLocationIndex < structure->pCustomSampleOrders[customSampleOrderIndex].sampleLocationCount; ++sampleLocationIndex)
- {
- }
- }
- }
- }
- } break;
-
- // Validation code for VkWriteDescriptorSetAccelerationStructureNV structure members
- case VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET_ACCELERATION_STRUCTURE_NV: {
- VkWriteDescriptorSetAccelerationStructureNV *structure = (VkWriteDescriptorSetAccelerationStructureNV *) header;
- skip |= validate_handle_array("VkWriteDescriptorSetAccelerationStructureNV", "accelerationStructureCount", "pAccelerationStructures", structure->accelerationStructureCount, structure->pAccelerationStructures, true, true);
- } break;
-
- // Validation code for VkPhysicalDeviceRepresentativeFragmentTestFeaturesNV structure members
- case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_REPRESENTATIVE_FRAGMENT_TEST_FEATURES_NV: {
- VkPhysicalDeviceRepresentativeFragmentTestFeaturesNV *structure = (VkPhysicalDeviceRepresentativeFragmentTestFeaturesNV *) header;
- skip |= validate_bool32("VkPhysicalDeviceRepresentativeFragmentTestFeaturesNV", "representativeFragmentTest", structure->representativeFragmentTest);
- } break;
-
- // Validation code for VkPipelineRepresentativeFragmentTestStateCreateInfoNV structure members
- case VK_STRUCTURE_TYPE_PIPELINE_REPRESENTATIVE_FRAGMENT_TEST_STATE_CREATE_INFO_NV: {
- VkPipelineRepresentativeFragmentTestStateCreateInfoNV *structure = (VkPipelineRepresentativeFragmentTestStateCreateInfoNV *) header;
- skip |= validate_bool32("VkPipelineRepresentativeFragmentTestStateCreateInfoNV", "representativeFragmentTestEnable", structure->representativeFragmentTestEnable);
- } break;
-
- // Validation code for VkPhysicalDeviceImageViewImageFormatInfoEXT structure members
- case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_VIEW_IMAGE_FORMAT_INFO_EXT: {
- VkPhysicalDeviceImageViewImageFormatInfoEXT *structure = (VkPhysicalDeviceImageViewImageFormatInfoEXT *) header;
- skip |= validate_ranged_enum("VkPhysicalDeviceImageViewImageFormatInfoEXT", "imageViewType", "VkImageViewType", AllVkImageViewTypeEnums, structure->imageViewType, "VUID-VkPhysicalDeviceImageViewImageFormatInfoEXT-imageViewType-parameter");
- } break;
-
- // Validation code for VkDeviceQueueGlobalPriorityCreateInfoEXT structure members
- case VK_STRUCTURE_TYPE_DEVICE_QUEUE_GLOBAL_PRIORITY_CREATE_INFO_EXT: {
- VkDeviceQueueGlobalPriorityCreateInfoEXT *structure = (VkDeviceQueueGlobalPriorityCreateInfoEXT *) header;
- skip |= validate_ranged_enum("VkDeviceQueueGlobalPriorityCreateInfoEXT", "globalPriority", "VkQueueGlobalPriorityEXT", AllVkQueueGlobalPriorityEXTEnums, structure->globalPriority, "VUID-VkDeviceQueueGlobalPriorityCreateInfoEXT-globalPriority-parameter");
- } break;
-
- // Validation code for VkImportMemoryHostPointerInfoEXT structure members
- case VK_STRUCTURE_TYPE_IMPORT_MEMORY_HOST_POINTER_INFO_EXT: {
- VkImportMemoryHostPointerInfoEXT *structure = (VkImportMemoryHostPointerInfoEXT *) header;
- skip |= validate_flags("VkImportMemoryHostPointerInfoEXT", "handleType", "VkExternalMemoryHandleTypeFlagBits", AllVkExternalMemoryHandleTypeFlagBits, structure->handleType, kRequiredSingleBit, "VUID-VkImportMemoryHostPointerInfoEXT-handleType-parameter", "VUID-VkImportMemoryHostPointerInfoEXT-handleType-parameter");
-
- skip |= validate_required_pointer("VkImportMemoryHostPointerInfoEXT", "pHostPointer", structure->pHostPointer, kVUIDUndefined);
- } break;
-
- // Validation code for VkPipelineCompilerControlCreateInfoAMD structure members
- case VK_STRUCTURE_TYPE_PIPELINE_COMPILER_CONTROL_CREATE_INFO_AMD: {
- VkPipelineCompilerControlCreateInfoAMD *structure = (VkPipelineCompilerControlCreateInfoAMD *) header;
- skip |= validate_reserved_flags("VkPipelineCompilerControlCreateInfoAMD", "compilerControlFlags", structure->compilerControlFlags, "VUID-VkPipelineCompilerControlCreateInfoAMD-compilerControlFlags-zerobitmask");
- } break;
-
- // Validation code for VkDeviceMemoryOverallocationCreateInfoAMD structure members
- case VK_STRUCTURE_TYPE_DEVICE_MEMORY_OVERALLOCATION_CREATE_INFO_AMD: {
- VkDeviceMemoryOverallocationCreateInfoAMD *structure = (VkDeviceMemoryOverallocationCreateInfoAMD *) header;
- skip |= validate_ranged_enum("VkDeviceMemoryOverallocationCreateInfoAMD", "overallocationBehavior", "VkMemoryOverallocationBehaviorAMD", AllVkMemoryOverallocationBehaviorAMDEnums, structure->overallocationBehavior, "VUID-VkDeviceMemoryOverallocationCreateInfoAMD-overallocationBehavior-parameter");
- } break;
-
- // Validation code for VkPipelineVertexInputDivisorStateCreateInfoEXT structure members
- case VK_STRUCTURE_TYPE_PIPELINE_VERTEX_INPUT_DIVISOR_STATE_CREATE_INFO_EXT: {
- VkPipelineVertexInputDivisorStateCreateInfoEXT *structure = (VkPipelineVertexInputDivisorStateCreateInfoEXT *) header;
- skip |= validate_array("VkPipelineVertexInputDivisorStateCreateInfoEXT", "vertexBindingDivisorCount", "pVertexBindingDivisors", structure->vertexBindingDivisorCount, &structure->pVertexBindingDivisors, true, true, "VUID-VkPipelineVertexInputDivisorStateCreateInfoEXT-vertexBindingDivisorCount-arraylength", "VUID-VkPipelineVertexInputDivisorStateCreateInfoEXT-pVertexBindingDivisors-parameter");
-
- if (structure->pVertexBindingDivisors != NULL)
- {
- for (uint32_t vertexBindingDivisorIndex = 0; vertexBindingDivisorIndex < structure->vertexBindingDivisorCount; ++vertexBindingDivisorIndex)
- {
- }
- }
- } break;
-
- // Validation code for VkPhysicalDeviceVertexAttributeDivisorFeaturesEXT structure members
- case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VERTEX_ATTRIBUTE_DIVISOR_FEATURES_EXT: {
- VkPhysicalDeviceVertexAttributeDivisorFeaturesEXT *structure = (VkPhysicalDeviceVertexAttributeDivisorFeaturesEXT *) header;
- skip |= validate_bool32("VkPhysicalDeviceVertexAttributeDivisorFeaturesEXT", "vertexAttributeInstanceRateDivisor", structure->vertexAttributeInstanceRateDivisor);
-
- skip |= validate_bool32("VkPhysicalDeviceVertexAttributeDivisorFeaturesEXT", "vertexAttributeInstanceRateZeroDivisor", structure->vertexAttributeInstanceRateZeroDivisor);
- } break;
-
- // Validation code for VkPipelineCreationFeedbackCreateInfoEXT structure members
- case VK_STRUCTURE_TYPE_PIPELINE_CREATION_FEEDBACK_CREATE_INFO_EXT: {
- VkPipelineCreationFeedbackCreateInfoEXT *structure = (VkPipelineCreationFeedbackCreateInfoEXT *) header;
- skip |= validate_required_pointer("VkPipelineCreationFeedbackCreateInfoEXT", "pPipelineCreationFeedback", structure->pPipelineCreationFeedback, "VUID-VkPipelineCreationFeedbackCreateInfoEXT-pPipelineCreationFeedback-parameter");
-
- if (structure->pPipelineCreationFeedback != NULL)
- {
- }
-
- skip |= validate_array("VkPipelineCreationFeedbackCreateInfoEXT", "pipelineStageCreationFeedbackCount", "pPipelineStageCreationFeedbacks", structure->pipelineStageCreationFeedbackCount, &structure->pPipelineStageCreationFeedbacks, true, true, "VUID-VkPipelineCreationFeedbackCreateInfoEXT-pipelineStageCreationFeedbackCount-arraylength", "VUID-VkPipelineCreationFeedbackCreateInfoEXT-pPipelineStageCreationFeedbacks-parameter");
-
- if (structure->pPipelineStageCreationFeedbacks != NULL)
- {
- for (uint32_t pipelineStageCreationFeedbackIndex = 0; pipelineStageCreationFeedbackIndex < structure->pipelineStageCreationFeedbackCount; ++pipelineStageCreationFeedbackIndex)
- {
- }
- }
- } break;
-
- // Validation code for VkPhysicalDeviceComputeShaderDerivativesFeaturesNV structure members
- case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_COMPUTE_SHADER_DERIVATIVES_FEATURES_NV: {
- VkPhysicalDeviceComputeShaderDerivativesFeaturesNV *structure = (VkPhysicalDeviceComputeShaderDerivativesFeaturesNV *) header;
- skip |= validate_bool32("VkPhysicalDeviceComputeShaderDerivativesFeaturesNV", "computeDerivativeGroupQuads", structure->computeDerivativeGroupQuads);
-
- skip |= validate_bool32("VkPhysicalDeviceComputeShaderDerivativesFeaturesNV", "computeDerivativeGroupLinear", structure->computeDerivativeGroupLinear);
- } break;
-
- // Validation code for VkPhysicalDeviceMeshShaderFeaturesNV structure members
- case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MESH_SHADER_FEATURES_NV: {
- VkPhysicalDeviceMeshShaderFeaturesNV *structure = (VkPhysicalDeviceMeshShaderFeaturesNV *) header;
- skip |= validate_bool32("VkPhysicalDeviceMeshShaderFeaturesNV", "taskShader", structure->taskShader);
-
- skip |= validate_bool32("VkPhysicalDeviceMeshShaderFeaturesNV", "meshShader", structure->meshShader);
- } break;
-
- // Validation code for VkPhysicalDeviceFragmentShaderBarycentricFeaturesNV structure members
- case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_SHADER_BARYCENTRIC_FEATURES_NV: {
- VkPhysicalDeviceFragmentShaderBarycentricFeaturesNV *structure = (VkPhysicalDeviceFragmentShaderBarycentricFeaturesNV *) header;
- skip |= validate_bool32("VkPhysicalDeviceFragmentShaderBarycentricFeaturesNV", "fragmentShaderBarycentric", structure->fragmentShaderBarycentric);
- } break;
-
- // Validation code for VkPhysicalDeviceShaderImageFootprintFeaturesNV structure members
- case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_IMAGE_FOOTPRINT_FEATURES_NV: {
- VkPhysicalDeviceShaderImageFootprintFeaturesNV *structure = (VkPhysicalDeviceShaderImageFootprintFeaturesNV *) header;
- skip |= validate_bool32("VkPhysicalDeviceShaderImageFootprintFeaturesNV", "imageFootprint", structure->imageFootprint);
- } break;
-
- // Validation code for VkPipelineViewportExclusiveScissorStateCreateInfoNV structure members
- case VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_EXCLUSIVE_SCISSOR_STATE_CREATE_INFO_NV: {
- VkPipelineViewportExclusiveScissorStateCreateInfoNV *structure = (VkPipelineViewportExclusiveScissorStateCreateInfoNV *) header;
- if (structure->pExclusiveScissors != NULL)
- {
- for (uint32_t exclusiveScissorIndex = 0; exclusiveScissorIndex < structure->exclusiveScissorCount; ++exclusiveScissorIndex)
- {
- }
- }
- } break;
-
- // Validation code for VkPhysicalDeviceExclusiveScissorFeaturesNV structure members
- case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXCLUSIVE_SCISSOR_FEATURES_NV: {
- VkPhysicalDeviceExclusiveScissorFeaturesNV *structure = (VkPhysicalDeviceExclusiveScissorFeaturesNV *) header;
- skip |= validate_bool32("VkPhysicalDeviceExclusiveScissorFeaturesNV", "exclusiveScissor", structure->exclusiveScissor);
- } break;
-
- // Validation code for VkPhysicalDeviceShaderIntegerFunctions2FeaturesINTEL structure members
- case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_INTEGER_FUNCTIONS_2_FEATURES_INTEL: {
- VkPhysicalDeviceShaderIntegerFunctions2FeaturesINTEL *structure = (VkPhysicalDeviceShaderIntegerFunctions2FeaturesINTEL *) header;
- skip |= validate_bool32("VkPhysicalDeviceShaderIntegerFunctions2FeaturesINTEL", "shaderIntegerFunctions2", structure->shaderIntegerFunctions2);
- } break;
-
- // Validation code for VkSwapchainDisplayNativeHdrCreateInfoAMD structure members
- case VK_STRUCTURE_TYPE_SWAPCHAIN_DISPLAY_NATIVE_HDR_CREATE_INFO_AMD: {
- VkSwapchainDisplayNativeHdrCreateInfoAMD *structure = (VkSwapchainDisplayNativeHdrCreateInfoAMD *) header;
- skip |= validate_bool32("VkSwapchainDisplayNativeHdrCreateInfoAMD", "localDimmingEnable", structure->localDimmingEnable);
- } break;
-
- // Validation code for VkRenderPassFragmentDensityMapCreateInfoEXT structure members
- case VK_STRUCTURE_TYPE_RENDER_PASS_FRAGMENT_DENSITY_MAP_CREATE_INFO_EXT: {
- VkRenderPassFragmentDensityMapCreateInfoEXT *structure = (VkRenderPassFragmentDensityMapCreateInfoEXT *) header;
- skip |= validate_ranged_enum("VkRenderPassFragmentDensityMapCreateInfoEXT", "fragmentDensityMapAttachment.layout", "VkImageLayout", AllVkImageLayoutEnums, structure->fragmentDensityMapAttachment.layout, "VUID-VkAttachmentReference-layout-parameter");
- } break;
-
- // Validation code for VkPhysicalDeviceScalarBlockLayoutFeaturesEXT structure members
- case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SCALAR_BLOCK_LAYOUT_FEATURES_EXT: {
- VkPhysicalDeviceScalarBlockLayoutFeaturesEXT *structure = (VkPhysicalDeviceScalarBlockLayoutFeaturesEXT *) header;
- skip |= validate_bool32("VkPhysicalDeviceScalarBlockLayoutFeaturesEXT", "scalarBlockLayout", structure->scalarBlockLayout);
- } break;
-
- // Validation code for VkPhysicalDeviceSubgroupSizeControlFeaturesEXT structure members
- case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SUBGROUP_SIZE_CONTROL_FEATURES_EXT: {
- VkPhysicalDeviceSubgroupSizeControlFeaturesEXT *structure = (VkPhysicalDeviceSubgroupSizeControlFeaturesEXT *) header;
- skip |= validate_bool32("VkPhysicalDeviceSubgroupSizeControlFeaturesEXT", "subgroupSizeControl", structure->subgroupSizeControl);
-
- skip |= validate_bool32("VkPhysicalDeviceSubgroupSizeControlFeaturesEXT", "computeFullSubgroups", structure->computeFullSubgroups);
- } break;
-
- // Validation code for VkPhysicalDeviceCoherentMemoryFeaturesAMD structure members
- case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_COHERENT_MEMORY_FEATURES_AMD: {
- VkPhysicalDeviceCoherentMemoryFeaturesAMD *structure = (VkPhysicalDeviceCoherentMemoryFeaturesAMD *) header;
- skip |= validate_bool32("VkPhysicalDeviceCoherentMemoryFeaturesAMD", "deviceCoherentMemory", structure->deviceCoherentMemory);
- } break;
-
- // Validation code for VkPhysicalDeviceMemoryPriorityFeaturesEXT structure members
- case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MEMORY_PRIORITY_FEATURES_EXT: {
- VkPhysicalDeviceMemoryPriorityFeaturesEXT *structure = (VkPhysicalDeviceMemoryPriorityFeaturesEXT *) header;
- skip |= validate_bool32("VkPhysicalDeviceMemoryPriorityFeaturesEXT", "memoryPriority", structure->memoryPriority);
- } break;
-
- // Validation code for VkPhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV structure members
- case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DEDICATED_ALLOCATION_IMAGE_ALIASING_FEATURES_NV: {
- VkPhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV *structure = (VkPhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV *) header;
- skip |= validate_bool32("VkPhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV", "dedicatedAllocationImageAliasing", structure->dedicatedAllocationImageAliasing);
- } break;
-
- // Validation code for VkPhysicalDeviceBufferDeviceAddressFeaturesEXT structure members
- case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_BUFFER_DEVICE_ADDRESS_FEATURES_EXT: {
- VkPhysicalDeviceBufferDeviceAddressFeaturesEXT *structure = (VkPhysicalDeviceBufferDeviceAddressFeaturesEXT *) header;
- skip |= validate_bool32("VkPhysicalDeviceBufferDeviceAddressFeaturesEXT", "bufferDeviceAddress", structure->bufferDeviceAddress);
-
- skip |= validate_bool32("VkPhysicalDeviceBufferDeviceAddressFeaturesEXT", "bufferDeviceAddressCaptureReplay", structure->bufferDeviceAddressCaptureReplay);
-
- skip |= validate_bool32("VkPhysicalDeviceBufferDeviceAddressFeaturesEXT", "bufferDeviceAddressMultiDevice", structure->bufferDeviceAddressMultiDevice);
- } break;
-
- // Validation code for VkImageStencilUsageCreateInfoEXT structure members
- case VK_STRUCTURE_TYPE_IMAGE_STENCIL_USAGE_CREATE_INFO_EXT: {
- VkImageStencilUsageCreateInfoEXT *structure = (VkImageStencilUsageCreateInfoEXT *) header;
- skip |= validate_flags("VkImageStencilUsageCreateInfoEXT", "stencilUsage", "VkImageUsageFlagBits", AllVkImageUsageFlagBits, structure->stencilUsage, kRequiredFlags, "VUID-VkImageStencilUsageCreateInfoEXT-stencilUsage-parameter", "VUID-VkImageStencilUsageCreateInfoEXT-stencilUsage-requiredbitmask");
- } break;
-
- // Validation code for VkValidationFeaturesEXT structure members
- case VK_STRUCTURE_TYPE_VALIDATION_FEATURES_EXT: {
- VkValidationFeaturesEXT *structure = (VkValidationFeaturesEXT *) header;
- skip |= validate_ranged_enum_array("VkValidationFeaturesEXT", "enabledValidationFeatureCount", "pEnabledValidationFeatures", "VkValidationFeatureEnableEXT", AllVkValidationFeatureEnableEXTEnums, structure->enabledValidationFeatureCount, structure->pEnabledValidationFeatures, false, true);
-
- skip |= validate_ranged_enum_array("VkValidationFeaturesEXT", "disabledValidationFeatureCount", "pDisabledValidationFeatures", "VkValidationFeatureDisableEXT", AllVkValidationFeatureDisableEXTEnums, structure->disabledValidationFeatureCount, structure->pDisabledValidationFeatures, false, true);
- } break;
-
- // Validation code for VkPhysicalDeviceCooperativeMatrixFeaturesNV structure members
- case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_COOPERATIVE_MATRIX_FEATURES_NV: {
- VkPhysicalDeviceCooperativeMatrixFeaturesNV *structure = (VkPhysicalDeviceCooperativeMatrixFeaturesNV *) header;
- skip |= validate_bool32("VkPhysicalDeviceCooperativeMatrixFeaturesNV", "cooperativeMatrix", structure->cooperativeMatrix);
-
- skip |= validate_bool32("VkPhysicalDeviceCooperativeMatrixFeaturesNV", "cooperativeMatrixRobustBufferAccess", structure->cooperativeMatrixRobustBufferAccess);
- } break;
-
- // Validation code for VkPhysicalDeviceCoverageReductionModeFeaturesNV structure members
- case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_COVERAGE_REDUCTION_MODE_FEATURES_NV: {
- VkPhysicalDeviceCoverageReductionModeFeaturesNV *structure = (VkPhysicalDeviceCoverageReductionModeFeaturesNV *) header;
- skip |= validate_bool32("VkPhysicalDeviceCoverageReductionModeFeaturesNV", "coverageReductionMode", structure->coverageReductionMode);
- } break;
-
- // Validation code for VkPipelineCoverageReductionStateCreateInfoNV structure members
- case VK_STRUCTURE_TYPE_PIPELINE_COVERAGE_REDUCTION_STATE_CREATE_INFO_NV: {
- VkPipelineCoverageReductionStateCreateInfoNV *structure = (VkPipelineCoverageReductionStateCreateInfoNV *) header;
- skip |= validate_reserved_flags("VkPipelineCoverageReductionStateCreateInfoNV", "flags", structure->flags, "VUID-VkPipelineCoverageReductionStateCreateInfoNV-flags-zerobitmask");
-
- skip |= validate_ranged_enum("VkPipelineCoverageReductionStateCreateInfoNV", "coverageReductionMode", "VkCoverageReductionModeNV", AllVkCoverageReductionModeNVEnums, structure->coverageReductionMode, "VUID-VkPipelineCoverageReductionStateCreateInfoNV-coverageReductionMode-parameter");
- } break;
-
- // Validation code for VkPhysicalDeviceFragmentShaderInterlockFeaturesEXT structure members
- case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_SHADER_INTERLOCK_FEATURES_EXT: {
- VkPhysicalDeviceFragmentShaderInterlockFeaturesEXT *structure = (VkPhysicalDeviceFragmentShaderInterlockFeaturesEXT *) header;
- skip |= validate_bool32("VkPhysicalDeviceFragmentShaderInterlockFeaturesEXT", "fragmentShaderSampleInterlock", structure->fragmentShaderSampleInterlock);
-
- skip |= validate_bool32("VkPhysicalDeviceFragmentShaderInterlockFeaturesEXT", "fragmentShaderPixelInterlock", structure->fragmentShaderPixelInterlock);
-
- skip |= validate_bool32("VkPhysicalDeviceFragmentShaderInterlockFeaturesEXT", "fragmentShaderShadingRateInterlock", structure->fragmentShaderShadingRateInterlock);
- } break;
-
- // Validation code for VkPhysicalDeviceYcbcrImageArraysFeaturesEXT structure members
- case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_YCBCR_IMAGE_ARRAYS_FEATURES_EXT: {
- VkPhysicalDeviceYcbcrImageArraysFeaturesEXT *structure = (VkPhysicalDeviceYcbcrImageArraysFeaturesEXT *) header;
- skip |= validate_bool32("VkPhysicalDeviceYcbcrImageArraysFeaturesEXT", "ycbcrImageArrays", structure->ycbcrImageArrays);
- } break;
-
-#ifdef VK_USE_PLATFORM_WIN32_KHR
- // Validation code for VkSurfaceFullScreenExclusiveInfoEXT structure members
- case VK_STRUCTURE_TYPE_SURFACE_FULL_SCREEN_EXCLUSIVE_INFO_EXT: {
- VkSurfaceFullScreenExclusiveInfoEXT *structure = (VkSurfaceFullScreenExclusiveInfoEXT *) header;
- skip |= validate_ranged_enum("VkSurfaceFullScreenExclusiveInfoEXT", "fullScreenExclusive", "VkFullScreenExclusiveEXT", AllVkFullScreenExclusiveEXTEnums, structure->fullScreenExclusive, "VUID-VkSurfaceFullScreenExclusiveInfoEXT-fullScreenExclusive-parameter");
- } break;
-#endif // VK_USE_PLATFORM_WIN32_KHR
-
-#ifdef VK_USE_PLATFORM_WIN32_KHR
- // Validation code for VkSurfaceCapabilitiesFullScreenExclusiveEXT structure members
- case VK_STRUCTURE_TYPE_SURFACE_CAPABILITIES_FULL_SCREEN_EXCLUSIVE_EXT: {
- VkSurfaceCapabilitiesFullScreenExclusiveEXT *structure = (VkSurfaceCapabilitiesFullScreenExclusiveEXT *) header;
- skip |= validate_bool32("VkSurfaceCapabilitiesFullScreenExclusiveEXT", "fullScreenExclusiveSupported", structure->fullScreenExclusiveSupported);
- } break;
-#endif // VK_USE_PLATFORM_WIN32_KHR
-
- // Validation code for VkPhysicalDeviceLineRasterizationFeaturesEXT structure members
- case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_LINE_RASTERIZATION_FEATURES_EXT: {
- VkPhysicalDeviceLineRasterizationFeaturesEXT *structure = (VkPhysicalDeviceLineRasterizationFeaturesEXT *) header;
- skip |= validate_bool32("VkPhysicalDeviceLineRasterizationFeaturesEXT", "rectangularLines", structure->rectangularLines);
-
- skip |= validate_bool32("VkPhysicalDeviceLineRasterizationFeaturesEXT", "bresenhamLines", structure->bresenhamLines);
-
- skip |= validate_bool32("VkPhysicalDeviceLineRasterizationFeaturesEXT", "smoothLines", structure->smoothLines);
-
- skip |= validate_bool32("VkPhysicalDeviceLineRasterizationFeaturesEXT", "stippledRectangularLines", structure->stippledRectangularLines);
-
- skip |= validate_bool32("VkPhysicalDeviceLineRasterizationFeaturesEXT", "stippledBresenhamLines", structure->stippledBresenhamLines);
-
- skip |= validate_bool32("VkPhysicalDeviceLineRasterizationFeaturesEXT", "stippledSmoothLines", structure->stippledSmoothLines);
- } break;
-
- // Validation code for VkPipelineRasterizationLineStateCreateInfoEXT structure members
- case VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_LINE_STATE_CREATE_INFO_EXT: {
- VkPipelineRasterizationLineStateCreateInfoEXT *structure = (VkPipelineRasterizationLineStateCreateInfoEXT *) header;
- skip |= validate_ranged_enum("VkPipelineRasterizationLineStateCreateInfoEXT", "lineRasterizationMode", "VkLineRasterizationModeEXT", AllVkLineRasterizationModeEXTEnums, structure->lineRasterizationMode, "VUID-VkPipelineRasterizationLineStateCreateInfoEXT-lineRasterizationMode-parameter");
-
- skip |= validate_bool32("VkPipelineRasterizationLineStateCreateInfoEXT", "stippledLineEnable", structure->stippledLineEnable);
- } break;
-
- // Validation code for VkPhysicalDeviceHostQueryResetFeaturesEXT structure members
- case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_HOST_QUERY_RESET_FEATURES_EXT: {
- VkPhysicalDeviceHostQueryResetFeaturesEXT *structure = (VkPhysicalDeviceHostQueryResetFeaturesEXT *) header;
- skip |= validate_bool32("VkPhysicalDeviceHostQueryResetFeaturesEXT", "hostQueryReset", structure->hostQueryReset);
- } break;
-
- // Validation code for VkPhysicalDeviceIndexTypeUint8FeaturesEXT structure members
- case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_INDEX_TYPE_UINT8_FEATURES_EXT: {
- VkPhysicalDeviceIndexTypeUint8FeaturesEXT *structure = (VkPhysicalDeviceIndexTypeUint8FeaturesEXT *) header;
- skip |= validate_bool32("VkPhysicalDeviceIndexTypeUint8FeaturesEXT", "indexTypeUint8", structure->indexTypeUint8);
- } break;
-
- // Validation code for VkPhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT structure members
- case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_DEMOTE_TO_HELPER_INVOCATION_FEATURES_EXT: {
- VkPhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT *structure = (VkPhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT *) header;
- skip |= validate_bool32("VkPhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT", "shaderDemoteToHelperInvocation", structure->shaderDemoteToHelperInvocation);
- } break;
-
- // Validation code for VkPhysicalDeviceTexelBufferAlignmentFeaturesEXT structure members
- case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TEXEL_BUFFER_ALIGNMENT_FEATURES_EXT: {
- VkPhysicalDeviceTexelBufferAlignmentFeaturesEXT *structure = (VkPhysicalDeviceTexelBufferAlignmentFeaturesEXT *) header;
- skip |= validate_bool32("VkPhysicalDeviceTexelBufferAlignmentFeaturesEXT", "texelBufferAlignment", structure->texelBufferAlignment);
- } break;
- default:
- skip = false;
- }
- return skip;
-}
-
-
-bool StatelessValidation::OutputExtensionError(const std::string &api_name, const std::string &extension_name) {
- return log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
- kVUID_PVError_ExtensionNotEnabled, "Attemped to call %s() but its required extension %s has not been enabled\n",
- api_name.c_str(), extension_name.c_str());
-}
-
-
-bool StatelessValidation::PreCallValidateCreateInstance(
- const VkInstanceCreateInfo* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkInstance* pInstance) {
- bool skip = false;
- skip |= validate_struct_type("vkCreateInstance", "pCreateInfo", "VK_STRUCTURE_TYPE_INSTANCE_CREATE_INFO", pCreateInfo, VK_STRUCTURE_TYPE_INSTANCE_CREATE_INFO, true, "VUID-vkCreateInstance-pCreateInfo-parameter", "VUID-VkInstanceCreateInfo-sType-sType");
- if (pCreateInfo != NULL)
- {
- const VkStructureType allowed_structs_VkInstanceCreateInfo[] = { VK_STRUCTURE_TYPE_DEBUG_REPORT_CALLBACK_CREATE_INFO_EXT, VK_STRUCTURE_TYPE_DEBUG_UTILS_MESSENGER_CREATE_INFO_EXT, VK_STRUCTURE_TYPE_VALIDATION_FEATURES_EXT, VK_STRUCTURE_TYPE_VALIDATION_FLAGS_EXT };
-
- skip |= validate_struct_pnext("vkCreateInstance", "pCreateInfo->pNext", "VkDebugReportCallbackCreateInfoEXT, VkDebugUtilsMessengerCreateInfoEXT, VkValidationFeaturesEXT, VkValidationFlagsEXT", pCreateInfo->pNext, ARRAY_SIZE(allowed_structs_VkInstanceCreateInfo), allowed_structs_VkInstanceCreateInfo, GeneratedVulkanHeaderVersion, "VUID-VkInstanceCreateInfo-pNext-pNext");
-
- skip |= validate_reserved_flags("vkCreateInstance", "pCreateInfo->flags", pCreateInfo->flags, "VUID-VkInstanceCreateInfo-flags-zerobitmask");
-
- skip |= validate_struct_type("vkCreateInstance", "pCreateInfo->pApplicationInfo", "VK_STRUCTURE_TYPE_APPLICATION_INFO", pCreateInfo->pApplicationInfo, VK_STRUCTURE_TYPE_APPLICATION_INFO, false, "VUID-VkInstanceCreateInfo-pApplicationInfo-parameter", "VUID-VkApplicationInfo-sType-sType");
-
- if (pCreateInfo->pApplicationInfo != NULL)
- {
- skip |= validate_struct_pnext("vkCreateInstance", "pCreateInfo->pApplicationInfo->pNext", NULL, pCreateInfo->pApplicationInfo->pNext, 0, NULL, GeneratedVulkanHeaderVersion, "VUID-VkApplicationInfo-pNext-pNext");
- }
-
- skip |= validate_string_array("vkCreateInstance", "pCreateInfo->enabledLayerCount", "pCreateInfo->ppEnabledLayerNames", pCreateInfo->enabledLayerCount, pCreateInfo->ppEnabledLayerNames, false, true, kVUIDUndefined, "VUID-VkInstanceCreateInfo-ppEnabledLayerNames-parameter");
-
- skip |= validate_string_array("vkCreateInstance", "pCreateInfo->enabledExtensionCount", "pCreateInfo->ppEnabledExtensionNames", pCreateInfo->enabledExtensionCount, pCreateInfo->ppEnabledExtensionNames, false, true, kVUIDUndefined, "VUID-VkInstanceCreateInfo-ppEnabledExtensionNames-parameter");
- }
- if (pAllocator != NULL)
- {
- skip |= validate_required_pointer("vkCreateInstance", "pAllocator->pfnAllocation", reinterpret_cast<const void*>(pAllocator->pfnAllocation), "VUID-VkAllocationCallbacks-pfnAllocation-00632");
-
- skip |= validate_required_pointer("vkCreateInstance", "pAllocator->pfnReallocation", reinterpret_cast<const void*>(pAllocator->pfnReallocation), "VUID-VkAllocationCallbacks-pfnReallocation-00633");
-
- skip |= validate_required_pointer("vkCreateInstance", "pAllocator->pfnFree", reinterpret_cast<const void*>(pAllocator->pfnFree), "VUID-VkAllocationCallbacks-pfnFree-00634");
-
- if (pAllocator->pfnInternalAllocation != NULL)
- {
- skip |= validate_required_pointer("vkCreateInstance", "pAllocator->pfnInternalFree", reinterpret_cast<const void*>(pAllocator->pfnInternalFree), "VUID-VkAllocationCallbacks-pfnInternalAllocation-00635");
-
- }
-
- if (pAllocator->pfnInternalFree != NULL)
- {
- skip |= validate_required_pointer("vkCreateInstance", "pAllocator->pfnInternalAllocation", reinterpret_cast<const void*>(pAllocator->pfnInternalAllocation), "VUID-VkAllocationCallbacks-pfnInternalAllocation-00635");
-
- }
- }
- skip |= validate_required_pointer("vkCreateInstance", "pInstance", pInstance, "VUID-vkCreateInstance-pInstance-parameter");
- if (!skip) skip |= manual_PreCallValidateCreateInstance(pCreateInfo, pAllocator, pInstance);
- return skip;
-}
-
-bool StatelessValidation::PreCallValidateDestroyInstance(
- VkInstance instance,
- const VkAllocationCallbacks* pAllocator) {
- bool skip = false;
- if (pAllocator != NULL)
- {
- skip |= validate_required_pointer("vkDestroyInstance", "pAllocator->pfnAllocation", reinterpret_cast<const void*>(pAllocator->pfnAllocation), "VUID-VkAllocationCallbacks-pfnAllocation-00632");
-
- skip |= validate_required_pointer("vkDestroyInstance", "pAllocator->pfnReallocation", reinterpret_cast<const void*>(pAllocator->pfnReallocation), "VUID-VkAllocationCallbacks-pfnReallocation-00633");
-
- skip |= validate_required_pointer("vkDestroyInstance", "pAllocator->pfnFree", reinterpret_cast<const void*>(pAllocator->pfnFree), "VUID-VkAllocationCallbacks-pfnFree-00634");
-
- if (pAllocator->pfnInternalAllocation != NULL)
- {
- skip |= validate_required_pointer("vkDestroyInstance", "pAllocator->pfnInternalFree", reinterpret_cast<const void*>(pAllocator->pfnInternalFree), "VUID-VkAllocationCallbacks-pfnInternalAllocation-00635");
-
- }
-
- if (pAllocator->pfnInternalFree != NULL)
- {
- skip |= validate_required_pointer("vkDestroyInstance", "pAllocator->pfnInternalAllocation", reinterpret_cast<const void*>(pAllocator->pfnInternalAllocation), "VUID-VkAllocationCallbacks-pfnInternalAllocation-00635");
-
- }
- }
- return skip;
-}
-
-bool StatelessValidation::PreCallValidateEnumeratePhysicalDevices(
- VkInstance instance,
- uint32_t* pPhysicalDeviceCount,
- VkPhysicalDevice* pPhysicalDevices) {
- bool skip = false;
- skip |= validate_array("vkEnumeratePhysicalDevices", "pPhysicalDeviceCount", "pPhysicalDevices", pPhysicalDeviceCount, &pPhysicalDevices, true, false, false, kVUIDUndefined, "VUID-vkEnumeratePhysicalDevices-pPhysicalDevices-parameter");
- return skip;
-}
-
-bool StatelessValidation::PreCallValidateGetPhysicalDeviceFeatures(
- VkPhysicalDevice physicalDevice,
- VkPhysicalDeviceFeatures* pFeatures) {
- bool skip = false;
- skip |= validate_required_pointer("vkGetPhysicalDeviceFeatures", "pFeatures", pFeatures, "VUID-vkGetPhysicalDeviceFeatures-pFeatures-parameter");
- return skip;
-}
-
-bool StatelessValidation::PreCallValidateGetPhysicalDeviceFormatProperties(
- VkPhysicalDevice physicalDevice,
- VkFormat format,
- VkFormatProperties* pFormatProperties) {
- bool skip = false;
- skip |= validate_ranged_enum("vkGetPhysicalDeviceFormatProperties", "format", "VkFormat", AllVkFormatEnums, format, "VUID-vkGetPhysicalDeviceFormatProperties-format-parameter");
- skip |= validate_required_pointer("vkGetPhysicalDeviceFormatProperties", "pFormatProperties", pFormatProperties, "VUID-vkGetPhysicalDeviceFormatProperties-pFormatProperties-parameter");
- if (pFormatProperties != NULL)
- {
- // No xml-driven validation
- }
- return skip;
-}
-
-bool StatelessValidation::PreCallValidateGetPhysicalDeviceImageFormatProperties(
- VkPhysicalDevice physicalDevice,
- VkFormat format,
- VkImageType type,
- VkImageTiling tiling,
- VkImageUsageFlags usage,
- VkImageCreateFlags flags,
- VkImageFormatProperties* pImageFormatProperties) {
- bool skip = false;
- skip |= validate_ranged_enum("vkGetPhysicalDeviceImageFormatProperties", "format", "VkFormat", AllVkFormatEnums, format, "VUID-vkGetPhysicalDeviceImageFormatProperties-format-parameter");
- skip |= validate_ranged_enum("vkGetPhysicalDeviceImageFormatProperties", "type", "VkImageType", AllVkImageTypeEnums, type, "VUID-vkGetPhysicalDeviceImageFormatProperties-type-parameter");
- skip |= validate_ranged_enum("vkGetPhysicalDeviceImageFormatProperties", "tiling", "VkImageTiling", AllVkImageTilingEnums, tiling, "VUID-vkGetPhysicalDeviceImageFormatProperties-tiling-parameter");
- skip |= validate_flags("vkGetPhysicalDeviceImageFormatProperties", "usage", "VkImageUsageFlagBits", AllVkImageUsageFlagBits, usage, kRequiredFlags, "VUID-vkGetPhysicalDeviceImageFormatProperties-usage-parameter", "VUID-vkGetPhysicalDeviceImageFormatProperties-usage-requiredbitmask");
- skip |= validate_flags("vkGetPhysicalDeviceImageFormatProperties", "flags", "VkImageCreateFlagBits", AllVkImageCreateFlagBits, flags, kOptionalFlags, "VUID-vkGetPhysicalDeviceImageFormatProperties-flags-parameter");
- skip |= validate_required_pointer("vkGetPhysicalDeviceImageFormatProperties", "pImageFormatProperties", pImageFormatProperties, "VUID-vkGetPhysicalDeviceImageFormatProperties-pImageFormatProperties-parameter");
- if (pImageFormatProperties != NULL)
- {
- // No xml-driven validation
- }
- return skip;
-}
-
-bool StatelessValidation::PreCallValidateGetPhysicalDeviceProperties(
- VkPhysicalDevice physicalDevice,
- VkPhysicalDeviceProperties* pProperties) {
- bool skip = false;
- skip |= validate_required_pointer("vkGetPhysicalDeviceProperties", "pProperties", pProperties, "VUID-vkGetPhysicalDeviceProperties-pProperties-parameter");
- if (pProperties != NULL)
- {
- // No xml-driven validation
- }
- return skip;
-}
-
-bool StatelessValidation::PreCallValidateGetPhysicalDeviceQueueFamilyProperties(
- VkPhysicalDevice physicalDevice,
- uint32_t* pQueueFamilyPropertyCount,
- VkQueueFamilyProperties* pQueueFamilyProperties) {
- bool skip = false;
- skip |= validate_array("vkGetPhysicalDeviceQueueFamilyProperties", "pQueueFamilyPropertyCount", "pQueueFamilyProperties", pQueueFamilyPropertyCount, &pQueueFamilyProperties, true, false, false, kVUIDUndefined, "VUID-vkGetPhysicalDeviceQueueFamilyProperties-pQueueFamilyProperties-parameter");
- if (pQueueFamilyProperties != NULL)
- {
- for (uint32_t pQueueFamilyPropertyIndex = 0; pQueueFamilyPropertyIndex < *pQueueFamilyPropertyCount; ++pQueueFamilyPropertyIndex)
- {
- // No xml-driven validation
- }
- }
- return skip;
-}
-
-bool StatelessValidation::PreCallValidateGetPhysicalDeviceMemoryProperties(
- VkPhysicalDevice physicalDevice,
- VkPhysicalDeviceMemoryProperties* pMemoryProperties) {
- bool skip = false;
- skip |= validate_required_pointer("vkGetPhysicalDeviceMemoryProperties", "pMemoryProperties", pMemoryProperties, "VUID-vkGetPhysicalDeviceMemoryProperties-pMemoryProperties-parameter");
- if (pMemoryProperties != NULL)
- {
- // No xml-driven validation
- }
- return skip;
-}
-
-bool StatelessValidation::PreCallValidateCreateDevice(
- VkPhysicalDevice physicalDevice,
- const VkDeviceCreateInfo* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkDevice* pDevice) {
- bool skip = false;
- skip |= validate_struct_type("vkCreateDevice", "pCreateInfo", "VK_STRUCTURE_TYPE_DEVICE_CREATE_INFO", pCreateInfo, VK_STRUCTURE_TYPE_DEVICE_CREATE_INFO, true, "VUID-vkCreateDevice-pCreateInfo-parameter", "VUID-VkDeviceCreateInfo-sType-sType");
- if (pCreateInfo != NULL)
- {
- const VkStructureType allowed_structs_VkDeviceCreateInfo[] = { VK_STRUCTURE_TYPE_DEVICE_GROUP_DEVICE_CREATE_INFO, VK_STRUCTURE_TYPE_DEVICE_MEMORY_OVERALLOCATION_CREATE_INFO_AMD, VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_16BIT_STORAGE_FEATURES, VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_8BIT_STORAGE_FEATURES_KHR, VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_ASTC_DECODE_FEATURES_EXT, VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_BLEND_OPERATION_ADVANCED_FEATURES_EXT, VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_BUFFER_DEVICE_ADDRESS_FEATURES_EXT, VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_COHERENT_MEMORY_FEATURES_AMD, VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_COMPUTE_SHADER_DERIVATIVES_FEATURES_NV, VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_CONDITIONAL_RENDERING_FEATURES_EXT, VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_COOPERATIVE_MATRIX_FEATURES_NV, VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_CORNER_SAMPLED_IMAGE_FEATURES_NV, VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_COVERAGE_REDUCTION_MODE_FEATURES_NV, VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DEDICATED_ALLOCATION_IMAGE_ALIASING_FEATURES_NV, VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DEPTH_CLIP_ENABLE_FEATURES_EXT, VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DESCRIPTOR_INDEXING_FEATURES_EXT, VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXCLUSIVE_SCISSOR_FEATURES_NV, VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FEATURES_2, VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_DENSITY_MAP_FEATURES_EXT, VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_SHADER_BARYCENTRIC_FEATURES_NV, VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_SHADER_INTERLOCK_FEATURES_EXT, VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_HOST_QUERY_RESET_FEATURES_EXT, VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGELESS_FRAMEBUFFER_FEATURES_KHR, VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_INDEX_TYPE_UINT8_FEATURES_EXT, VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_INLINE_UNIFORM_BLOCK_FEATURES_EXT, VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_LINE_RASTERIZATION_FEATURES_EXT, VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MEMORY_PRIORITY_FEATURES_EXT, VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MESH_SHADER_FEATURES_NV, VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MULTIVIEW_FEATURES, VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PIPELINE_EXECUTABLE_PROPERTIES_FEATURES_KHR, VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PROTECTED_MEMORY_FEATURES, VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_REPRESENTATIVE_FRAGMENT_TEST_FEATURES_NV, VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SAMPLER_YCBCR_CONVERSION_FEATURES, VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SCALAR_BLOCK_LAYOUT_FEATURES_EXT, VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_ATOMIC_INT64_FEATURES_KHR, VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_DEMOTE_TO_HELPER_INVOCATION_FEATURES_EXT, VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_DRAW_PARAMETERS_FEATURES, VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_FLOAT16_INT8_FEATURES_KHR, VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_IMAGE_FOOTPRINT_FEATURES_NV, VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_INTEGER_FUNCTIONS_2_FEATURES_INTEL, VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_SM_BUILTINS_FEATURES_NV, VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADING_RATE_IMAGE_FEATURES_NV, VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SUBGROUP_SIZE_CONTROL_FEATURES_EXT, VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TEXEL_BUFFER_ALIGNMENT_FEATURES_EXT, VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TEXTURE_COMPRESSION_ASTC_HDR_FEATURES_EXT, VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TRANSFORM_FEEDBACK_FEATURES_EXT, VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_UNIFORM_BUFFER_STANDARD_LAYOUT_FEATURES_KHR, VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VARIABLE_POINTERS_FEATURES, VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VERTEX_ATTRIBUTE_DIVISOR_FEATURES_EXT, VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VULKAN_MEMORY_MODEL_FEATURES_KHR, VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_YCBCR_IMAGE_ARRAYS_FEATURES_EXT };
-
- skip |= validate_struct_pnext("vkCreateDevice", "pCreateInfo->pNext", "VkDeviceGroupDeviceCreateInfo, VkDeviceMemoryOverallocationCreateInfoAMD, VkPhysicalDevice16BitStorageFeatures, VkPhysicalDevice8BitStorageFeaturesKHR, VkPhysicalDeviceASTCDecodeFeaturesEXT, VkPhysicalDeviceBlendOperationAdvancedFeaturesEXT, VkPhysicalDeviceBufferDeviceAddressFeaturesEXT, VkPhysicalDeviceCoherentMemoryFeaturesAMD, VkPhysicalDeviceComputeShaderDerivativesFeaturesNV, VkPhysicalDeviceConditionalRenderingFeaturesEXT, VkPhysicalDeviceCooperativeMatrixFeaturesNV, VkPhysicalDeviceCornerSampledImageFeaturesNV, VkPhysicalDeviceCoverageReductionModeFeaturesNV, VkPhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV, VkPhysicalDeviceDepthClipEnableFeaturesEXT, VkPhysicalDeviceDescriptorIndexingFeaturesEXT, VkPhysicalDeviceExclusiveScissorFeaturesNV, VkPhysicalDeviceFeatures2, VkPhysicalDeviceFragmentDensityMapFeaturesEXT, VkPhysicalDeviceFragmentShaderBarycentricFeaturesNV, VkPhysicalDeviceFragmentShaderInterlockFeaturesEXT, VkPhysicalDeviceHostQueryResetFeaturesEXT, VkPhysicalDeviceImagelessFramebufferFeaturesKHR, VkPhysicalDeviceIndexTypeUint8FeaturesEXT, VkPhysicalDeviceInlineUniformBlockFeaturesEXT, VkPhysicalDeviceLineRasterizationFeaturesEXT, VkPhysicalDeviceMemoryPriorityFeaturesEXT, VkPhysicalDeviceMeshShaderFeaturesNV, VkPhysicalDeviceMultiviewFeatures, VkPhysicalDevicePipelineExecutablePropertiesFeaturesKHR, VkPhysicalDeviceProtectedMemoryFeatures, VkPhysicalDeviceRepresentativeFragmentTestFeaturesNV, VkPhysicalDeviceSamplerYcbcrConversionFeatures, VkPhysicalDeviceScalarBlockLayoutFeaturesEXT, VkPhysicalDeviceShaderAtomicInt64FeaturesKHR, VkPhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT, VkPhysicalDeviceShaderDrawParametersFeatures, VkPhysicalDeviceShaderFloat16Int8FeaturesKHR, VkPhysicalDeviceShaderImageFootprintFeaturesNV, VkPhysicalDeviceShaderIntegerFunctions2FeaturesINTEL, VkPhysicalDeviceShaderSMBuiltinsFeaturesNV, VkPhysicalDeviceShadingRateImageFeaturesNV, VkPhysicalDeviceSubgroupSizeControlFeaturesEXT, VkPhysicalDeviceTexelBufferAlignmentFeaturesEXT, VkPhysicalDeviceTextureCompressionASTCHDRFeaturesEXT, VkPhysicalDeviceTransformFeedbackFeaturesEXT, VkPhysicalDeviceUniformBufferStandardLayoutFeaturesKHR, VkPhysicalDeviceVariablePointersFeatures, VkPhysicalDeviceVertexAttributeDivisorFeaturesEXT, VkPhysicalDeviceVulkanMemoryModelFeaturesKHR, VkPhysicalDeviceYcbcrImageArraysFeaturesEXT", pCreateInfo->pNext, ARRAY_SIZE(allowed_structs_VkDeviceCreateInfo), allowed_structs_VkDeviceCreateInfo, GeneratedVulkanHeaderVersion, "VUID-VkDeviceCreateInfo-pNext-pNext");
-
- skip |= validate_reserved_flags("vkCreateDevice", "pCreateInfo->flags", pCreateInfo->flags, "VUID-VkDeviceCreateInfo-flags-zerobitmask");
-
- skip |= validate_struct_type_array("vkCreateDevice", "pCreateInfo->queueCreateInfoCount", "pCreateInfo->pQueueCreateInfos", "VK_STRUCTURE_TYPE_DEVICE_QUEUE_CREATE_INFO", pCreateInfo->queueCreateInfoCount, pCreateInfo->pQueueCreateInfos, VK_STRUCTURE_TYPE_DEVICE_QUEUE_CREATE_INFO, true, true, "VUID-VkDeviceQueueCreateInfo-sType-sType", "VUID-VkDeviceCreateInfo-pQueueCreateInfos-parameter", "VUID-VkDeviceCreateInfo-queueCreateInfoCount-arraylength");
-
- if (pCreateInfo->pQueueCreateInfos != NULL)
- {
- for (uint32_t queueCreateInfoIndex = 0; queueCreateInfoIndex < pCreateInfo->queueCreateInfoCount; ++queueCreateInfoIndex)
- {
- const VkStructureType allowed_structs_VkDeviceQueueCreateInfo[] = { VK_STRUCTURE_TYPE_DEVICE_QUEUE_GLOBAL_PRIORITY_CREATE_INFO_EXT };
-
- skip |= validate_struct_pnext("vkCreateDevice", ParameterName("pCreateInfo->pQueueCreateInfos[%i].pNext", ParameterName::IndexVector{ queueCreateInfoIndex }), "VkDeviceQueueGlobalPriorityCreateInfoEXT", pCreateInfo->pQueueCreateInfos[queueCreateInfoIndex].pNext, ARRAY_SIZE(allowed_structs_VkDeviceQueueCreateInfo), allowed_structs_VkDeviceQueueCreateInfo, GeneratedVulkanHeaderVersion, "VUID-VkDeviceQueueCreateInfo-pNext-pNext");
-
- skip |= validate_flags("vkCreateDevice", ParameterName("pCreateInfo->pQueueCreateInfos[%i].flags", ParameterName::IndexVector{ queueCreateInfoIndex }), "VkDeviceQueueCreateFlagBits", AllVkDeviceQueueCreateFlagBits, pCreateInfo->pQueueCreateInfos[queueCreateInfoIndex].flags, kOptionalFlags, "VUID-VkDeviceQueueCreateInfo-flags-parameter");
-
- skip |= validate_array("vkCreateDevice", ParameterName("pCreateInfo->pQueueCreateInfos[%i].queueCount", ParameterName::IndexVector{ queueCreateInfoIndex }), ParameterName("pCreateInfo->pQueueCreateInfos[%i].pQueuePriorities", ParameterName::IndexVector{ queueCreateInfoIndex }), pCreateInfo->pQueueCreateInfos[queueCreateInfoIndex].queueCount, &pCreateInfo->pQueueCreateInfos[queueCreateInfoIndex].pQueuePriorities, true, true, "VUID-VkDeviceQueueCreateInfo-queueCount-arraylength", "VUID-VkDeviceQueueCreateInfo-pQueuePriorities-parameter");
- }
- }
-
- skip |= validate_string_array("vkCreateDevice", "pCreateInfo->enabledLayerCount", "pCreateInfo->ppEnabledLayerNames", pCreateInfo->enabledLayerCount, pCreateInfo->ppEnabledLayerNames, false, true, kVUIDUndefined, "VUID-VkDeviceCreateInfo-ppEnabledLayerNames-parameter");
-
- skip |= validate_string_array("vkCreateDevice", "pCreateInfo->enabledExtensionCount", "pCreateInfo->ppEnabledExtensionNames", pCreateInfo->enabledExtensionCount, pCreateInfo->ppEnabledExtensionNames, false, true, kVUIDUndefined, "VUID-VkDeviceCreateInfo-ppEnabledExtensionNames-parameter");
-
- if (pCreateInfo->pEnabledFeatures != NULL)
- {
- skip |= validate_bool32("vkCreateDevice", "pCreateInfo->pEnabledFeatures->robustBufferAccess", pCreateInfo->pEnabledFeatures->robustBufferAccess);
-
- skip |= validate_bool32("vkCreateDevice", "pCreateInfo->pEnabledFeatures->fullDrawIndexUint32", pCreateInfo->pEnabledFeatures->fullDrawIndexUint32);
-
- skip |= validate_bool32("vkCreateDevice", "pCreateInfo->pEnabledFeatures->imageCubeArray", pCreateInfo->pEnabledFeatures->imageCubeArray);
-
- skip |= validate_bool32("vkCreateDevice", "pCreateInfo->pEnabledFeatures->independentBlend", pCreateInfo->pEnabledFeatures->independentBlend);
-
- skip |= validate_bool32("vkCreateDevice", "pCreateInfo->pEnabledFeatures->geometryShader", pCreateInfo->pEnabledFeatures->geometryShader);
-
- skip |= validate_bool32("vkCreateDevice", "pCreateInfo->pEnabledFeatures->tessellationShader", pCreateInfo->pEnabledFeatures->tessellationShader);
-
- skip |= validate_bool32("vkCreateDevice", "pCreateInfo->pEnabledFeatures->sampleRateShading", pCreateInfo->pEnabledFeatures->sampleRateShading);
-
- skip |= validate_bool32("vkCreateDevice", "pCreateInfo->pEnabledFeatures->dualSrcBlend", pCreateInfo->pEnabledFeatures->dualSrcBlend);
-
- skip |= validate_bool32("vkCreateDevice", "pCreateInfo->pEnabledFeatures->logicOp", pCreateInfo->pEnabledFeatures->logicOp);
-
- skip |= validate_bool32("vkCreateDevice", "pCreateInfo->pEnabledFeatures->multiDrawIndirect", pCreateInfo->pEnabledFeatures->multiDrawIndirect);
-
- skip |= validate_bool32("vkCreateDevice", "pCreateInfo->pEnabledFeatures->drawIndirectFirstInstance", pCreateInfo->pEnabledFeatures->drawIndirectFirstInstance);
-
- skip |= validate_bool32("vkCreateDevice", "pCreateInfo->pEnabledFeatures->depthClamp", pCreateInfo->pEnabledFeatures->depthClamp);
-
- skip |= validate_bool32("vkCreateDevice", "pCreateInfo->pEnabledFeatures->depthBiasClamp", pCreateInfo->pEnabledFeatures->depthBiasClamp);
-
- skip |= validate_bool32("vkCreateDevice", "pCreateInfo->pEnabledFeatures->fillModeNonSolid", pCreateInfo->pEnabledFeatures->fillModeNonSolid);
-
- skip |= validate_bool32("vkCreateDevice", "pCreateInfo->pEnabledFeatures->depthBounds", pCreateInfo->pEnabledFeatures->depthBounds);
-
- skip |= validate_bool32("vkCreateDevice", "pCreateInfo->pEnabledFeatures->wideLines", pCreateInfo->pEnabledFeatures->wideLines);
-
- skip |= validate_bool32("vkCreateDevice", "pCreateInfo->pEnabledFeatures->largePoints", pCreateInfo->pEnabledFeatures->largePoints);
-
- skip |= validate_bool32("vkCreateDevice", "pCreateInfo->pEnabledFeatures->alphaToOne", pCreateInfo->pEnabledFeatures->alphaToOne);
-
- skip |= validate_bool32("vkCreateDevice", "pCreateInfo->pEnabledFeatures->multiViewport", pCreateInfo->pEnabledFeatures->multiViewport);
-
- skip |= validate_bool32("vkCreateDevice", "pCreateInfo->pEnabledFeatures->samplerAnisotropy", pCreateInfo->pEnabledFeatures->samplerAnisotropy);
-
- skip |= validate_bool32("vkCreateDevice", "pCreateInfo->pEnabledFeatures->textureCompressionETC2", pCreateInfo->pEnabledFeatures->textureCompressionETC2);
-
- skip |= validate_bool32("vkCreateDevice", "pCreateInfo->pEnabledFeatures->textureCompressionASTC_LDR", pCreateInfo->pEnabledFeatures->textureCompressionASTC_LDR);
-
- skip |= validate_bool32("vkCreateDevice", "pCreateInfo->pEnabledFeatures->textureCompressionBC", pCreateInfo->pEnabledFeatures->textureCompressionBC);
-
- skip |= validate_bool32("vkCreateDevice", "pCreateInfo->pEnabledFeatures->occlusionQueryPrecise", pCreateInfo->pEnabledFeatures->occlusionQueryPrecise);
-
- skip |= validate_bool32("vkCreateDevice", "pCreateInfo->pEnabledFeatures->pipelineStatisticsQuery", pCreateInfo->pEnabledFeatures->pipelineStatisticsQuery);
-
- skip |= validate_bool32("vkCreateDevice", "pCreateInfo->pEnabledFeatures->vertexPipelineStoresAndAtomics", pCreateInfo->pEnabledFeatures->vertexPipelineStoresAndAtomics);
-
- skip |= validate_bool32("vkCreateDevice", "pCreateInfo->pEnabledFeatures->fragmentStoresAndAtomics", pCreateInfo->pEnabledFeatures->fragmentStoresAndAtomics);
-
- skip |= validate_bool32("vkCreateDevice", "pCreateInfo->pEnabledFeatures->shaderTessellationAndGeometryPointSize", pCreateInfo->pEnabledFeatures->shaderTessellationAndGeometryPointSize);
-
- skip |= validate_bool32("vkCreateDevice", "pCreateInfo->pEnabledFeatures->shaderImageGatherExtended", pCreateInfo->pEnabledFeatures->shaderImageGatherExtended);
-
- skip |= validate_bool32("vkCreateDevice", "pCreateInfo->pEnabledFeatures->shaderStorageImageExtendedFormats", pCreateInfo->pEnabledFeatures->shaderStorageImageExtendedFormats);
-
- skip |= validate_bool32("vkCreateDevice", "pCreateInfo->pEnabledFeatures->shaderStorageImageMultisample", pCreateInfo->pEnabledFeatures->shaderStorageImageMultisample);
-
- skip |= validate_bool32("vkCreateDevice", "pCreateInfo->pEnabledFeatures->shaderStorageImageReadWithoutFormat", pCreateInfo->pEnabledFeatures->shaderStorageImageReadWithoutFormat);
-
- skip |= validate_bool32("vkCreateDevice", "pCreateInfo->pEnabledFeatures->shaderStorageImageWriteWithoutFormat", pCreateInfo->pEnabledFeatures->shaderStorageImageWriteWithoutFormat);
-
- skip |= validate_bool32("vkCreateDevice", "pCreateInfo->pEnabledFeatures->shaderUniformBufferArrayDynamicIndexing", pCreateInfo->pEnabledFeatures->shaderUniformBufferArrayDynamicIndexing);
-
- skip |= validate_bool32("vkCreateDevice", "pCreateInfo->pEnabledFeatures->shaderSampledImageArrayDynamicIndexing", pCreateInfo->pEnabledFeatures->shaderSampledImageArrayDynamicIndexing);
-
- skip |= validate_bool32("vkCreateDevice", "pCreateInfo->pEnabledFeatures->shaderStorageBufferArrayDynamicIndexing", pCreateInfo->pEnabledFeatures->shaderStorageBufferArrayDynamicIndexing);
-
- skip |= validate_bool32("vkCreateDevice", "pCreateInfo->pEnabledFeatures->shaderStorageImageArrayDynamicIndexing", pCreateInfo->pEnabledFeatures->shaderStorageImageArrayDynamicIndexing);
-
- skip |= validate_bool32("vkCreateDevice", "pCreateInfo->pEnabledFeatures->shaderClipDistance", pCreateInfo->pEnabledFeatures->shaderClipDistance);
-
- skip |= validate_bool32("vkCreateDevice", "pCreateInfo->pEnabledFeatures->shaderCullDistance", pCreateInfo->pEnabledFeatures->shaderCullDistance);
-
- skip |= validate_bool32("vkCreateDevice", "pCreateInfo->pEnabledFeatures->shaderFloat64", pCreateInfo->pEnabledFeatures->shaderFloat64);
-
- skip |= validate_bool32("vkCreateDevice", "pCreateInfo->pEnabledFeatures->shaderInt64", pCreateInfo->pEnabledFeatures->shaderInt64);
-
- skip |= validate_bool32("vkCreateDevice", "pCreateInfo->pEnabledFeatures->shaderInt16", pCreateInfo->pEnabledFeatures->shaderInt16);
-
- skip |= validate_bool32("vkCreateDevice", "pCreateInfo->pEnabledFeatures->shaderResourceResidency", pCreateInfo->pEnabledFeatures->shaderResourceResidency);
-
- skip |= validate_bool32("vkCreateDevice", "pCreateInfo->pEnabledFeatures->shaderResourceMinLod", pCreateInfo->pEnabledFeatures->shaderResourceMinLod);
-
- skip |= validate_bool32("vkCreateDevice", "pCreateInfo->pEnabledFeatures->sparseBinding", pCreateInfo->pEnabledFeatures->sparseBinding);
-
- skip |= validate_bool32("vkCreateDevice", "pCreateInfo->pEnabledFeatures->sparseResidencyBuffer", pCreateInfo->pEnabledFeatures->sparseResidencyBuffer);
-
- skip |= validate_bool32("vkCreateDevice", "pCreateInfo->pEnabledFeatures->sparseResidencyImage2D", pCreateInfo->pEnabledFeatures->sparseResidencyImage2D);
-
- skip |= validate_bool32("vkCreateDevice", "pCreateInfo->pEnabledFeatures->sparseResidencyImage3D", pCreateInfo->pEnabledFeatures->sparseResidencyImage3D);
-
- skip |= validate_bool32("vkCreateDevice", "pCreateInfo->pEnabledFeatures->sparseResidency2Samples", pCreateInfo->pEnabledFeatures->sparseResidency2Samples);
-
- skip |= validate_bool32("vkCreateDevice", "pCreateInfo->pEnabledFeatures->sparseResidency4Samples", pCreateInfo->pEnabledFeatures->sparseResidency4Samples);
-
- skip |= validate_bool32("vkCreateDevice", "pCreateInfo->pEnabledFeatures->sparseResidency8Samples", pCreateInfo->pEnabledFeatures->sparseResidency8Samples);
-
- skip |= validate_bool32("vkCreateDevice", "pCreateInfo->pEnabledFeatures->sparseResidency16Samples", pCreateInfo->pEnabledFeatures->sparseResidency16Samples);
-
- skip |= validate_bool32("vkCreateDevice", "pCreateInfo->pEnabledFeatures->sparseResidencyAliased", pCreateInfo->pEnabledFeatures->sparseResidencyAliased);
-
- skip |= validate_bool32("vkCreateDevice", "pCreateInfo->pEnabledFeatures->variableMultisampleRate", pCreateInfo->pEnabledFeatures->variableMultisampleRate);
-
- skip |= validate_bool32("vkCreateDevice", "pCreateInfo->pEnabledFeatures->inheritedQueries", pCreateInfo->pEnabledFeatures->inheritedQueries);
- }
- }
- if (pAllocator != NULL)
- {
- skip |= validate_required_pointer("vkCreateDevice", "pAllocator->pfnAllocation", reinterpret_cast<const void*>(pAllocator->pfnAllocation), "VUID-VkAllocationCallbacks-pfnAllocation-00632");
-
- skip |= validate_required_pointer("vkCreateDevice", "pAllocator->pfnReallocation", reinterpret_cast<const void*>(pAllocator->pfnReallocation), "VUID-VkAllocationCallbacks-pfnReallocation-00633");
-
- skip |= validate_required_pointer("vkCreateDevice", "pAllocator->pfnFree", reinterpret_cast<const void*>(pAllocator->pfnFree), "VUID-VkAllocationCallbacks-pfnFree-00634");
-
- if (pAllocator->pfnInternalAllocation != NULL)
- {
- skip |= validate_required_pointer("vkCreateDevice", "pAllocator->pfnInternalFree", reinterpret_cast<const void*>(pAllocator->pfnInternalFree), "VUID-VkAllocationCallbacks-pfnInternalAllocation-00635");
-
- }
-
- if (pAllocator->pfnInternalFree != NULL)
- {
- skip |= validate_required_pointer("vkCreateDevice", "pAllocator->pfnInternalAllocation", reinterpret_cast<const void*>(pAllocator->pfnInternalAllocation), "VUID-VkAllocationCallbacks-pfnInternalAllocation-00635");
-
- }
- }
- skip |= validate_required_pointer("vkCreateDevice", "pDevice", pDevice, "VUID-vkCreateDevice-pDevice-parameter");
- if (!skip) skip |= manual_PreCallValidateCreateDevice(physicalDevice, pCreateInfo, pAllocator, pDevice);
- return skip;
-}
-
-bool StatelessValidation::PreCallValidateDestroyDevice(
- VkDevice device,
- const VkAllocationCallbacks* pAllocator) {
- bool skip = false;
- if (pAllocator != NULL)
- {
- skip |= validate_required_pointer("vkDestroyDevice", "pAllocator->pfnAllocation", reinterpret_cast<const void*>(pAllocator->pfnAllocation), "VUID-VkAllocationCallbacks-pfnAllocation-00632");
-
- skip |= validate_required_pointer("vkDestroyDevice", "pAllocator->pfnReallocation", reinterpret_cast<const void*>(pAllocator->pfnReallocation), "VUID-VkAllocationCallbacks-pfnReallocation-00633");
-
- skip |= validate_required_pointer("vkDestroyDevice", "pAllocator->pfnFree", reinterpret_cast<const void*>(pAllocator->pfnFree), "VUID-VkAllocationCallbacks-pfnFree-00634");
-
- if (pAllocator->pfnInternalAllocation != NULL)
- {
- skip |= validate_required_pointer("vkDestroyDevice", "pAllocator->pfnInternalFree", reinterpret_cast<const void*>(pAllocator->pfnInternalFree), "VUID-VkAllocationCallbacks-pfnInternalAllocation-00635");
-
- }
-
- if (pAllocator->pfnInternalFree != NULL)
- {
- skip |= validate_required_pointer("vkDestroyDevice", "pAllocator->pfnInternalAllocation", reinterpret_cast<const void*>(pAllocator->pfnInternalAllocation), "VUID-VkAllocationCallbacks-pfnInternalAllocation-00635");
-
- }
- }
- return skip;
-}
-
-bool StatelessValidation::PreCallValidateGetDeviceQueue(
- VkDevice device,
- uint32_t queueFamilyIndex,
- uint32_t queueIndex,
- VkQueue* pQueue) {
- bool skip = false;
- skip |= validate_required_pointer("vkGetDeviceQueue", "pQueue", pQueue, "VUID-vkGetDeviceQueue-pQueue-parameter");
- return skip;
-}
-
-bool StatelessValidation::PreCallValidateQueueSubmit(
- VkQueue queue,
- uint32_t submitCount,
- const VkSubmitInfo* pSubmits,
- VkFence fence) {
- bool skip = false;
- skip |= validate_struct_type_array("vkQueueSubmit", "submitCount", "pSubmits", "VK_STRUCTURE_TYPE_SUBMIT_INFO", submitCount, pSubmits, VK_STRUCTURE_TYPE_SUBMIT_INFO, false, true, "VUID-VkSubmitInfo-sType-sType", "VUID-vkQueueSubmit-pSubmits-parameter", kVUIDUndefined);
- if (pSubmits != NULL)
- {
- for (uint32_t submitIndex = 0; submitIndex < submitCount; ++submitIndex)
- {
- const VkStructureType allowed_structs_VkSubmitInfo[] = { VK_STRUCTURE_TYPE_D3D12_FENCE_SUBMIT_INFO_KHR, VK_STRUCTURE_TYPE_DEVICE_GROUP_SUBMIT_INFO, VK_STRUCTURE_TYPE_PROTECTED_SUBMIT_INFO, VK_STRUCTURE_TYPE_WIN32_KEYED_MUTEX_ACQUIRE_RELEASE_INFO_KHR, VK_STRUCTURE_TYPE_WIN32_KEYED_MUTEX_ACQUIRE_RELEASE_INFO_NV };
-
- skip |= validate_struct_pnext("vkQueueSubmit", ParameterName("pSubmits[%i].pNext", ParameterName::IndexVector{ submitIndex }), "VkD3D12FenceSubmitInfoKHR, VkDeviceGroupSubmitInfo, VkProtectedSubmitInfo, VkWin32KeyedMutexAcquireReleaseInfoKHR, VkWin32KeyedMutexAcquireReleaseInfoNV", pSubmits[submitIndex].pNext, ARRAY_SIZE(allowed_structs_VkSubmitInfo), allowed_structs_VkSubmitInfo, GeneratedVulkanHeaderVersion, "VUID-VkSubmitInfo-pNext-pNext");
-
- skip |= validate_array("vkQueueSubmit", ParameterName("pSubmits[%i].waitSemaphoreCount", ParameterName::IndexVector{ submitIndex }), ParameterName("pSubmits[%i].pWaitSemaphores", ParameterName::IndexVector{ submitIndex }), pSubmits[submitIndex].waitSemaphoreCount, &pSubmits[submitIndex].pWaitSemaphores, false, true, kVUIDUndefined, "VUID-VkSubmitInfo-pWaitSemaphores-parameter");
-
- skip |= validate_flags_array("vkQueueSubmit", ParameterName("pSubmits[%i].waitSemaphoreCount", ParameterName::IndexVector{ submitIndex }), ParameterName("pSubmits[%i].pWaitDstStageMask", ParameterName::IndexVector{ submitIndex }), "VkPipelineStageFlagBits", AllVkPipelineStageFlagBits, pSubmits[submitIndex].waitSemaphoreCount, pSubmits[submitIndex].pWaitDstStageMask, false, true);
-
- skip |= validate_array("vkQueueSubmit", ParameterName("pSubmits[%i].commandBufferCount", ParameterName::IndexVector{ submitIndex }), ParameterName("pSubmits[%i].pCommandBuffers", ParameterName::IndexVector{ submitIndex }), pSubmits[submitIndex].commandBufferCount, &pSubmits[submitIndex].pCommandBuffers, false, true, kVUIDUndefined, "VUID-VkSubmitInfo-pCommandBuffers-parameter");
-
- skip |= validate_array("vkQueueSubmit", ParameterName("pSubmits[%i].signalSemaphoreCount", ParameterName::IndexVector{ submitIndex }), ParameterName("pSubmits[%i].pSignalSemaphores", ParameterName::IndexVector{ submitIndex }), pSubmits[submitIndex].signalSemaphoreCount, &pSubmits[submitIndex].pSignalSemaphores, false, true, kVUIDUndefined, "VUID-VkSubmitInfo-pSignalSemaphores-parameter");
- }
- }
- return skip;
-}
-
-bool StatelessValidation::PreCallValidateQueueWaitIdle(
- VkQueue queue) {
- bool skip = false;
- // No xml-driven validation
- return skip;
-}
-
-bool StatelessValidation::PreCallValidateDeviceWaitIdle(
- VkDevice device) {
- bool skip = false;
- // No xml-driven validation
- return skip;
-}
-
-bool StatelessValidation::PreCallValidateAllocateMemory(
- VkDevice device,
- const VkMemoryAllocateInfo* pAllocateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkDeviceMemory* pMemory) {
- bool skip = false;
- skip |= validate_struct_type("vkAllocateMemory", "pAllocateInfo", "VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO", pAllocateInfo, VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO, true, "VUID-vkAllocateMemory-pAllocateInfo-parameter", "VUID-VkMemoryAllocateInfo-sType-sType");
- if (pAllocateInfo != NULL)
- {
- const VkStructureType allowed_structs_VkMemoryAllocateInfo[] = { VK_STRUCTURE_TYPE_DEDICATED_ALLOCATION_MEMORY_ALLOCATE_INFO_NV, VK_STRUCTURE_TYPE_EXPORT_MEMORY_ALLOCATE_INFO, VK_STRUCTURE_TYPE_EXPORT_MEMORY_ALLOCATE_INFO_NV, VK_STRUCTURE_TYPE_EXPORT_MEMORY_WIN32_HANDLE_INFO_KHR, VK_STRUCTURE_TYPE_EXPORT_MEMORY_WIN32_HANDLE_INFO_NV, VK_STRUCTURE_TYPE_IMPORT_ANDROID_HARDWARE_BUFFER_INFO_ANDROID, VK_STRUCTURE_TYPE_IMPORT_MEMORY_FD_INFO_KHR, VK_STRUCTURE_TYPE_IMPORT_MEMORY_HOST_POINTER_INFO_EXT, VK_STRUCTURE_TYPE_IMPORT_MEMORY_WIN32_HANDLE_INFO_KHR, VK_STRUCTURE_TYPE_IMPORT_MEMORY_WIN32_HANDLE_INFO_NV, VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_FLAGS_INFO, VK_STRUCTURE_TYPE_MEMORY_DEDICATED_ALLOCATE_INFO, VK_STRUCTURE_TYPE_MEMORY_PRIORITY_ALLOCATE_INFO_EXT };
-
- skip |= validate_struct_pnext("vkAllocateMemory", "pAllocateInfo->pNext", "VkDedicatedAllocationMemoryAllocateInfoNV, VkExportMemoryAllocateInfo, VkExportMemoryAllocateInfoNV, VkExportMemoryWin32HandleInfoKHR, VkExportMemoryWin32HandleInfoNV, VkImportAndroidHardwareBufferInfoANDROID, VkImportMemoryFdInfoKHR, VkImportMemoryHostPointerInfoEXT, VkImportMemoryWin32HandleInfoKHR, VkImportMemoryWin32HandleInfoNV, VkMemoryAllocateFlagsInfo, VkMemoryDedicatedAllocateInfo, VkMemoryPriorityAllocateInfoEXT", pAllocateInfo->pNext, ARRAY_SIZE(allowed_structs_VkMemoryAllocateInfo), allowed_structs_VkMemoryAllocateInfo, GeneratedVulkanHeaderVersion, "VUID-VkMemoryAllocateInfo-pNext-pNext");
- }
- if (pAllocator != NULL)
- {
- skip |= validate_required_pointer("vkAllocateMemory", "pAllocator->pfnAllocation", reinterpret_cast<const void*>(pAllocator->pfnAllocation), "VUID-VkAllocationCallbacks-pfnAllocation-00632");
-
- skip |= validate_required_pointer("vkAllocateMemory", "pAllocator->pfnReallocation", reinterpret_cast<const void*>(pAllocator->pfnReallocation), "VUID-VkAllocationCallbacks-pfnReallocation-00633");
-
- skip |= validate_required_pointer("vkAllocateMemory", "pAllocator->pfnFree", reinterpret_cast<const void*>(pAllocator->pfnFree), "VUID-VkAllocationCallbacks-pfnFree-00634");
-
- if (pAllocator->pfnInternalAllocation != NULL)
- {
- skip |= validate_required_pointer("vkAllocateMemory", "pAllocator->pfnInternalFree", reinterpret_cast<const void*>(pAllocator->pfnInternalFree), "VUID-VkAllocationCallbacks-pfnInternalAllocation-00635");
-
- }
-
- if (pAllocator->pfnInternalFree != NULL)
- {
- skip |= validate_required_pointer("vkAllocateMemory", "pAllocator->pfnInternalAllocation", reinterpret_cast<const void*>(pAllocator->pfnInternalAllocation), "VUID-VkAllocationCallbacks-pfnInternalAllocation-00635");
-
- }
- }
- skip |= validate_required_pointer("vkAllocateMemory", "pMemory", pMemory, "VUID-vkAllocateMemory-pMemory-parameter");
- if (!skip) skip |= manual_PreCallValidateAllocateMemory(device, pAllocateInfo, pAllocator, pMemory);
- return skip;
-}
-
-bool StatelessValidation::PreCallValidateFreeMemory(
- VkDevice device,
- VkDeviceMemory memory,
- const VkAllocationCallbacks* pAllocator) {
- bool skip = false;
- if (pAllocator != NULL)
- {
- skip |= validate_required_pointer("vkFreeMemory", "pAllocator->pfnAllocation", reinterpret_cast<const void*>(pAllocator->pfnAllocation), "VUID-VkAllocationCallbacks-pfnAllocation-00632");
-
- skip |= validate_required_pointer("vkFreeMemory", "pAllocator->pfnReallocation", reinterpret_cast<const void*>(pAllocator->pfnReallocation), "VUID-VkAllocationCallbacks-pfnReallocation-00633");
-
- skip |= validate_required_pointer("vkFreeMemory", "pAllocator->pfnFree", reinterpret_cast<const void*>(pAllocator->pfnFree), "VUID-VkAllocationCallbacks-pfnFree-00634");
-
- if (pAllocator->pfnInternalAllocation != NULL)
- {
- skip |= validate_required_pointer("vkFreeMemory", "pAllocator->pfnInternalFree", reinterpret_cast<const void*>(pAllocator->pfnInternalFree), "VUID-VkAllocationCallbacks-pfnInternalAllocation-00635");
-
- }
-
- if (pAllocator->pfnInternalFree != NULL)
- {
- skip |= validate_required_pointer("vkFreeMemory", "pAllocator->pfnInternalAllocation", reinterpret_cast<const void*>(pAllocator->pfnInternalAllocation), "VUID-VkAllocationCallbacks-pfnInternalAllocation-00635");
-
- }
- }
- return skip;
-}
-
-bool StatelessValidation::PreCallValidateMapMemory(
- VkDevice device,
- VkDeviceMemory memory,
- VkDeviceSize offset,
- VkDeviceSize size,
- VkMemoryMapFlags flags,
- void** ppData) {
- bool skip = false;
- skip |= validate_required_handle("vkMapMemory", "memory", memory);
- skip |= validate_reserved_flags("vkMapMemory", "flags", flags, "VUID-vkMapMemory-flags-zerobitmask");
- return skip;
-}
-
-bool StatelessValidation::PreCallValidateUnmapMemory(
- VkDevice device,
- VkDeviceMemory memory) {
- bool skip = false;
- skip |= validate_required_handle("vkUnmapMemory", "memory", memory);
- return skip;
-}
-
-bool StatelessValidation::PreCallValidateFlushMappedMemoryRanges(
- VkDevice device,
- uint32_t memoryRangeCount,
- const VkMappedMemoryRange* pMemoryRanges) {
- bool skip = false;
- skip |= validate_struct_type_array("vkFlushMappedMemoryRanges", "memoryRangeCount", "pMemoryRanges", "VK_STRUCTURE_TYPE_MAPPED_MEMORY_RANGE", memoryRangeCount, pMemoryRanges, VK_STRUCTURE_TYPE_MAPPED_MEMORY_RANGE, true, true, "VUID-VkMappedMemoryRange-sType-sType", "VUID-vkFlushMappedMemoryRanges-pMemoryRanges-parameter", "VUID-vkFlushMappedMemoryRanges-memoryRangeCount-arraylength");
- if (pMemoryRanges != NULL)
- {
- for (uint32_t memoryRangeIndex = 0; memoryRangeIndex < memoryRangeCount; ++memoryRangeIndex)
- {
- skip |= validate_struct_pnext("vkFlushMappedMemoryRanges", ParameterName("pMemoryRanges[%i].pNext", ParameterName::IndexVector{ memoryRangeIndex }), NULL, pMemoryRanges[memoryRangeIndex].pNext, 0, NULL, GeneratedVulkanHeaderVersion, "VUID-VkMappedMemoryRange-pNext-pNext");
-
- skip |= validate_required_handle("vkFlushMappedMemoryRanges", ParameterName("pMemoryRanges[%i].memory", ParameterName::IndexVector{ memoryRangeIndex }), pMemoryRanges[memoryRangeIndex].memory);
- }
- }
- return skip;
-}
-
-bool StatelessValidation::PreCallValidateInvalidateMappedMemoryRanges(
- VkDevice device,
- uint32_t memoryRangeCount,
- const VkMappedMemoryRange* pMemoryRanges) {
- bool skip = false;
- skip |= validate_struct_type_array("vkInvalidateMappedMemoryRanges", "memoryRangeCount", "pMemoryRanges", "VK_STRUCTURE_TYPE_MAPPED_MEMORY_RANGE", memoryRangeCount, pMemoryRanges, VK_STRUCTURE_TYPE_MAPPED_MEMORY_RANGE, true, true, "VUID-VkMappedMemoryRange-sType-sType", "VUID-vkInvalidateMappedMemoryRanges-pMemoryRanges-parameter", "VUID-vkInvalidateMappedMemoryRanges-memoryRangeCount-arraylength");
- if (pMemoryRanges != NULL)
- {
- for (uint32_t memoryRangeIndex = 0; memoryRangeIndex < memoryRangeCount; ++memoryRangeIndex)
- {
- skip |= validate_struct_pnext("vkInvalidateMappedMemoryRanges", ParameterName("pMemoryRanges[%i].pNext", ParameterName::IndexVector{ memoryRangeIndex }), NULL, pMemoryRanges[memoryRangeIndex].pNext, 0, NULL, GeneratedVulkanHeaderVersion, "VUID-VkMappedMemoryRange-pNext-pNext");
-
- skip |= validate_required_handle("vkInvalidateMappedMemoryRanges", ParameterName("pMemoryRanges[%i].memory", ParameterName::IndexVector{ memoryRangeIndex }), pMemoryRanges[memoryRangeIndex].memory);
- }
- }
- return skip;
-}
-
-bool StatelessValidation::PreCallValidateGetDeviceMemoryCommitment(
- VkDevice device,
- VkDeviceMemory memory,
- VkDeviceSize* pCommittedMemoryInBytes) {
- bool skip = false;
- skip |= validate_required_handle("vkGetDeviceMemoryCommitment", "memory", memory);
- skip |= validate_required_pointer("vkGetDeviceMemoryCommitment", "pCommittedMemoryInBytes", pCommittedMemoryInBytes, "VUID-vkGetDeviceMemoryCommitment-pCommittedMemoryInBytes-parameter");
- return skip;
-}
-
-bool StatelessValidation::PreCallValidateBindBufferMemory(
- VkDevice device,
- VkBuffer buffer,
- VkDeviceMemory memory,
- VkDeviceSize memoryOffset) {
- bool skip = false;
- skip |= validate_required_handle("vkBindBufferMemory", "buffer", buffer);
- skip |= validate_required_handle("vkBindBufferMemory", "memory", memory);
- return skip;
-}
-
-bool StatelessValidation::PreCallValidateBindImageMemory(
- VkDevice device,
- VkImage image,
- VkDeviceMemory memory,
- VkDeviceSize memoryOffset) {
- bool skip = false;
- skip |= validate_required_handle("vkBindImageMemory", "image", image);
- skip |= validate_required_handle("vkBindImageMemory", "memory", memory);
- return skip;
-}
-
-bool StatelessValidation::PreCallValidateGetBufferMemoryRequirements(
- VkDevice device,
- VkBuffer buffer,
- VkMemoryRequirements* pMemoryRequirements) {
- bool skip = false;
- skip |= validate_required_handle("vkGetBufferMemoryRequirements", "buffer", buffer);
- skip |= validate_required_pointer("vkGetBufferMemoryRequirements", "pMemoryRequirements", pMemoryRequirements, "VUID-vkGetBufferMemoryRequirements-pMemoryRequirements-parameter");
- if (pMemoryRequirements != NULL)
- {
- // No xml-driven validation
- }
- return skip;
-}
-
-bool StatelessValidation::PreCallValidateGetImageMemoryRequirements(
- VkDevice device,
- VkImage image,
- VkMemoryRequirements* pMemoryRequirements) {
- bool skip = false;
- skip |= validate_required_handle("vkGetImageMemoryRequirements", "image", image);
- skip |= validate_required_pointer("vkGetImageMemoryRequirements", "pMemoryRequirements", pMemoryRequirements, "VUID-vkGetImageMemoryRequirements-pMemoryRequirements-parameter");
- if (pMemoryRequirements != NULL)
- {
- // No xml-driven validation
- }
- return skip;
-}
-
-bool StatelessValidation::PreCallValidateGetImageSparseMemoryRequirements(
- VkDevice device,
- VkImage image,
- uint32_t* pSparseMemoryRequirementCount,
- VkSparseImageMemoryRequirements* pSparseMemoryRequirements) {
- bool skip = false;
- skip |= validate_required_handle("vkGetImageSparseMemoryRequirements", "image", image);
- skip |= validate_array("vkGetImageSparseMemoryRequirements", "pSparseMemoryRequirementCount", "pSparseMemoryRequirements", pSparseMemoryRequirementCount, &pSparseMemoryRequirements, true, false, false, kVUIDUndefined, "VUID-vkGetImageSparseMemoryRequirements-pSparseMemoryRequirements-parameter");
- if (pSparseMemoryRequirements != NULL)
- {
- for (uint32_t pSparseMemoryRequirementIndex = 0; pSparseMemoryRequirementIndex < *pSparseMemoryRequirementCount; ++pSparseMemoryRequirementIndex)
- {
- // No xml-driven validation
- }
- }
- return skip;
-}
-
-bool StatelessValidation::PreCallValidateGetPhysicalDeviceSparseImageFormatProperties(
- VkPhysicalDevice physicalDevice,
- VkFormat format,
- VkImageType type,
- VkSampleCountFlagBits samples,
- VkImageUsageFlags usage,
- VkImageTiling tiling,
- uint32_t* pPropertyCount,
- VkSparseImageFormatProperties* pProperties) {
- bool skip = false;
- skip |= validate_ranged_enum("vkGetPhysicalDeviceSparseImageFormatProperties", "format", "VkFormat", AllVkFormatEnums, format, "VUID-vkGetPhysicalDeviceSparseImageFormatProperties-format-parameter");
- skip |= validate_ranged_enum("vkGetPhysicalDeviceSparseImageFormatProperties", "type", "VkImageType", AllVkImageTypeEnums, type, "VUID-vkGetPhysicalDeviceSparseImageFormatProperties-type-parameter");
- skip |= validate_flags("vkGetPhysicalDeviceSparseImageFormatProperties", "samples", "VkSampleCountFlagBits", AllVkSampleCountFlagBits, samples, kRequiredSingleBit, "VUID-vkGetPhysicalDeviceSparseImageFormatProperties-samples-parameter", "VUID-vkGetPhysicalDeviceSparseImageFormatProperties-samples-parameter");
- skip |= validate_flags("vkGetPhysicalDeviceSparseImageFormatProperties", "usage", "VkImageUsageFlagBits", AllVkImageUsageFlagBits, usage, kRequiredFlags, "VUID-vkGetPhysicalDeviceSparseImageFormatProperties-usage-parameter", "VUID-vkGetPhysicalDeviceSparseImageFormatProperties-usage-requiredbitmask");
- skip |= validate_ranged_enum("vkGetPhysicalDeviceSparseImageFormatProperties", "tiling", "VkImageTiling", AllVkImageTilingEnums, tiling, "VUID-vkGetPhysicalDeviceSparseImageFormatProperties-tiling-parameter");
- skip |= validate_array("vkGetPhysicalDeviceSparseImageFormatProperties", "pPropertyCount", "pProperties", pPropertyCount, &pProperties, true, false, false, kVUIDUndefined, "VUID-vkGetPhysicalDeviceSparseImageFormatProperties-pProperties-parameter");
- if (pProperties != NULL)
- {
- for (uint32_t pPropertyIndex = 0; pPropertyIndex < *pPropertyCount; ++pPropertyIndex)
- {
- // No xml-driven validation
- }
- }
- return skip;
-}
-
-bool StatelessValidation::PreCallValidateQueueBindSparse(
- VkQueue queue,
- uint32_t bindInfoCount,
- const VkBindSparseInfo* pBindInfo,
- VkFence fence) {
- bool skip = false;
- skip |= validate_struct_type_array("vkQueueBindSparse", "bindInfoCount", "pBindInfo", "VK_STRUCTURE_TYPE_BIND_SPARSE_INFO", bindInfoCount, pBindInfo, VK_STRUCTURE_TYPE_BIND_SPARSE_INFO, false, true, "VUID-VkBindSparseInfo-sType-sType", "VUID-vkQueueBindSparse-pBindInfo-parameter", kVUIDUndefined);
- if (pBindInfo != NULL)
- {
- for (uint32_t bindInfoIndex = 0; bindInfoIndex < bindInfoCount; ++bindInfoIndex)
- {
- const VkStructureType allowed_structs_VkBindSparseInfo[] = { VK_STRUCTURE_TYPE_DEVICE_GROUP_BIND_SPARSE_INFO };
-
- skip |= validate_struct_pnext("vkQueueBindSparse", ParameterName("pBindInfo[%i].pNext", ParameterName::IndexVector{ bindInfoIndex }), "VkDeviceGroupBindSparseInfo", pBindInfo[bindInfoIndex].pNext, ARRAY_SIZE(allowed_structs_VkBindSparseInfo), allowed_structs_VkBindSparseInfo, GeneratedVulkanHeaderVersion, "VUID-VkBindSparseInfo-pNext-pNext");
-
- skip |= validate_array("vkQueueBindSparse", ParameterName("pBindInfo[%i].waitSemaphoreCount", ParameterName::IndexVector{ bindInfoIndex }), ParameterName("pBindInfo[%i].pWaitSemaphores", ParameterName::IndexVector{ bindInfoIndex }), pBindInfo[bindInfoIndex].waitSemaphoreCount, &pBindInfo[bindInfoIndex].pWaitSemaphores, false, true, kVUIDUndefined, "VUID-VkBindSparseInfo-pWaitSemaphores-parameter");
-
- skip |= validate_array("vkQueueBindSparse", ParameterName("pBindInfo[%i].bufferBindCount", ParameterName::IndexVector{ bindInfoIndex }), ParameterName("pBindInfo[%i].pBufferBinds", ParameterName::IndexVector{ bindInfoIndex }), pBindInfo[bindInfoIndex].bufferBindCount, &pBindInfo[bindInfoIndex].pBufferBinds, false, true, kVUIDUndefined, "VUID-VkBindSparseInfo-pBufferBinds-parameter");
-
- if (pBindInfo[bindInfoIndex].pBufferBinds != NULL)
- {
- for (uint32_t bufferBindIndex = 0; bufferBindIndex < pBindInfo[bindInfoIndex].bufferBindCount; ++bufferBindIndex)
- {
- skip |= validate_required_handle("vkQueueBindSparse", ParameterName("pBindInfo[%i].pBufferBinds[%i].buffer", ParameterName::IndexVector{ bindInfoIndex, bufferBindIndex }), pBindInfo[bindInfoIndex].pBufferBinds[bufferBindIndex].buffer);
-
- skip |= validate_array("vkQueueBindSparse", ParameterName("pBindInfo[%i].pBufferBinds[%i].bindCount", ParameterName::IndexVector{ bindInfoIndex, bufferBindIndex }), ParameterName("pBindInfo[%i].pBufferBinds[%i].pBinds", ParameterName::IndexVector{ bindInfoIndex, bufferBindIndex }), pBindInfo[bindInfoIndex].pBufferBinds[bufferBindIndex].bindCount, &pBindInfo[bindInfoIndex].pBufferBinds[bufferBindIndex].pBinds, true, true, "VUID-VkSparseBufferMemoryBindInfo-bindCount-arraylength", "VUID-VkSparseBufferMemoryBindInfo-pBinds-parameter");
-
- if (pBindInfo[bindInfoIndex].pBufferBinds[bufferBindIndex].pBinds != NULL)
- {
- for (uint32_t bindIndex = 0; bindIndex < pBindInfo[bindInfoIndex].pBufferBinds[bufferBindIndex].bindCount; ++bindIndex)
- {
- skip |= validate_flags("vkQueueBindSparse", ParameterName("pBindInfo[%i].pBufferBinds[%i].pBinds[%i].flags", ParameterName::IndexVector{ bindInfoIndex, bufferBindIndex, bindIndex }), "VkSparseMemoryBindFlagBits", AllVkSparseMemoryBindFlagBits, pBindInfo[bindInfoIndex].pBufferBinds[bufferBindIndex].pBinds[bindIndex].flags, kOptionalFlags, "VUID-VkSparseMemoryBind-flags-parameter");
- }
- }
- }
- }
-
- skip |= validate_array("vkQueueBindSparse", ParameterName("pBindInfo[%i].imageOpaqueBindCount", ParameterName::IndexVector{ bindInfoIndex }), ParameterName("pBindInfo[%i].pImageOpaqueBinds", ParameterName::IndexVector{ bindInfoIndex }), pBindInfo[bindInfoIndex].imageOpaqueBindCount, &pBindInfo[bindInfoIndex].pImageOpaqueBinds, false, true, kVUIDUndefined, "VUID-VkBindSparseInfo-pImageOpaqueBinds-parameter");
-
- if (pBindInfo[bindInfoIndex].pImageOpaqueBinds != NULL)
- {
- for (uint32_t imageOpaqueBindIndex = 0; imageOpaqueBindIndex < pBindInfo[bindInfoIndex].imageOpaqueBindCount; ++imageOpaqueBindIndex)
- {
- skip |= validate_required_handle("vkQueueBindSparse", ParameterName("pBindInfo[%i].pImageOpaqueBinds[%i].image", ParameterName::IndexVector{ bindInfoIndex, imageOpaqueBindIndex }), pBindInfo[bindInfoIndex].pImageOpaqueBinds[imageOpaqueBindIndex].image);
-
- skip |= validate_array("vkQueueBindSparse", ParameterName("pBindInfo[%i].pImageOpaqueBinds[%i].bindCount", ParameterName::IndexVector{ bindInfoIndex, imageOpaqueBindIndex }), ParameterName("pBindInfo[%i].pImageOpaqueBinds[%i].pBinds", ParameterName::IndexVector{ bindInfoIndex, imageOpaqueBindIndex }), pBindInfo[bindInfoIndex].pImageOpaqueBinds[imageOpaqueBindIndex].bindCount, &pBindInfo[bindInfoIndex].pImageOpaqueBinds[imageOpaqueBindIndex].pBinds, true, true, "VUID-VkSparseImageOpaqueMemoryBindInfo-bindCount-arraylength", "VUID-VkSparseImageOpaqueMemoryBindInfo-pBinds-parameter");
-
- if (pBindInfo[bindInfoIndex].pImageOpaqueBinds[imageOpaqueBindIndex].pBinds != NULL)
- {
- for (uint32_t bindIndex = 0; bindIndex < pBindInfo[bindInfoIndex].pImageOpaqueBinds[imageOpaqueBindIndex].bindCount; ++bindIndex)
- {
- skip |= validate_flags("vkQueueBindSparse", ParameterName("pBindInfo[%i].pImageOpaqueBinds[%i].pBinds[%i].flags", ParameterName::IndexVector{ bindInfoIndex, imageOpaqueBindIndex, bindIndex }), "VkSparseMemoryBindFlagBits", AllVkSparseMemoryBindFlagBits, pBindInfo[bindInfoIndex].pImageOpaqueBinds[imageOpaqueBindIndex].pBinds[bindIndex].flags, kOptionalFlags, "VUID-VkSparseMemoryBind-flags-parameter");
- }
- }
- }
- }
-
- skip |= validate_array("vkQueueBindSparse", ParameterName("pBindInfo[%i].imageBindCount", ParameterName::IndexVector{ bindInfoIndex }), ParameterName("pBindInfo[%i].pImageBinds", ParameterName::IndexVector{ bindInfoIndex }), pBindInfo[bindInfoIndex].imageBindCount, &pBindInfo[bindInfoIndex].pImageBinds, false, true, kVUIDUndefined, "VUID-VkBindSparseInfo-pImageBinds-parameter");
-
- if (pBindInfo[bindInfoIndex].pImageBinds != NULL)
- {
- for (uint32_t imageBindIndex = 0; imageBindIndex < pBindInfo[bindInfoIndex].imageBindCount; ++imageBindIndex)
- {
- skip |= validate_required_handle("vkQueueBindSparse", ParameterName("pBindInfo[%i].pImageBinds[%i].image", ParameterName::IndexVector{ bindInfoIndex, imageBindIndex }), pBindInfo[bindInfoIndex].pImageBinds[imageBindIndex].image);
-
- skip |= validate_array("vkQueueBindSparse", ParameterName("pBindInfo[%i].pImageBinds[%i].bindCount", ParameterName::IndexVector{ bindInfoIndex, imageBindIndex }), ParameterName("pBindInfo[%i].pImageBinds[%i].pBinds", ParameterName::IndexVector{ bindInfoIndex, imageBindIndex }), pBindInfo[bindInfoIndex].pImageBinds[imageBindIndex].bindCount, &pBindInfo[bindInfoIndex].pImageBinds[imageBindIndex].pBinds, true, true, "VUID-VkSparseImageMemoryBindInfo-bindCount-arraylength", "VUID-VkSparseImageMemoryBindInfo-pBinds-parameter");
-
- if (pBindInfo[bindInfoIndex].pImageBinds[imageBindIndex].pBinds != NULL)
- {
- for (uint32_t bindIndex = 0; bindIndex < pBindInfo[bindInfoIndex].pImageBinds[imageBindIndex].bindCount; ++bindIndex)
- {
- skip |= validate_flags("vkQueueBindSparse", ParameterName("pBindInfo[%i].pImageBinds[%i].pBinds[%i].subresource.aspectMask", ParameterName::IndexVector{ bindInfoIndex, imageBindIndex, bindIndex }), "VkImageAspectFlagBits", AllVkImageAspectFlagBits, pBindInfo[bindInfoIndex].pImageBinds[imageBindIndex].pBinds[bindIndex].subresource.aspectMask, kRequiredFlags, "VUID-VkImageSubresource-aspectMask-parameter", "VUID-VkImageSubresource-aspectMask-requiredbitmask");
-
- // No xml-driven validation
-
- // No xml-driven validation
-
- skip |= validate_flags("vkQueueBindSparse", ParameterName("pBindInfo[%i].pImageBinds[%i].pBinds[%i].flags", ParameterName::IndexVector{ bindInfoIndex, imageBindIndex, bindIndex }), "VkSparseMemoryBindFlagBits", AllVkSparseMemoryBindFlagBits, pBindInfo[bindInfoIndex].pImageBinds[imageBindIndex].pBinds[bindIndex].flags, kOptionalFlags, "VUID-VkSparseImageMemoryBind-flags-parameter");
- }
- }
- }
- }
-
- skip |= validate_array("vkQueueBindSparse", ParameterName("pBindInfo[%i].signalSemaphoreCount", ParameterName::IndexVector{ bindInfoIndex }), ParameterName("pBindInfo[%i].pSignalSemaphores", ParameterName::IndexVector{ bindInfoIndex }), pBindInfo[bindInfoIndex].signalSemaphoreCount, &pBindInfo[bindInfoIndex].pSignalSemaphores, false, true, kVUIDUndefined, "VUID-VkBindSparseInfo-pSignalSemaphores-parameter");
- }
- }
- return skip;
-}
-
-bool StatelessValidation::PreCallValidateCreateFence(
- VkDevice device,
- const VkFenceCreateInfo* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkFence* pFence) {
- bool skip = false;
- skip |= validate_struct_type("vkCreateFence", "pCreateInfo", "VK_STRUCTURE_TYPE_FENCE_CREATE_INFO", pCreateInfo, VK_STRUCTURE_TYPE_FENCE_CREATE_INFO, true, "VUID-vkCreateFence-pCreateInfo-parameter", "VUID-VkFenceCreateInfo-sType-sType");
- if (pCreateInfo != NULL)
- {
- const VkStructureType allowed_structs_VkFenceCreateInfo[] = { VK_STRUCTURE_TYPE_EXPORT_FENCE_CREATE_INFO, VK_STRUCTURE_TYPE_EXPORT_FENCE_WIN32_HANDLE_INFO_KHR };
-
- skip |= validate_struct_pnext("vkCreateFence", "pCreateInfo->pNext", "VkExportFenceCreateInfo, VkExportFenceWin32HandleInfoKHR", pCreateInfo->pNext, ARRAY_SIZE(allowed_structs_VkFenceCreateInfo), allowed_structs_VkFenceCreateInfo, GeneratedVulkanHeaderVersion, "VUID-VkFenceCreateInfo-pNext-pNext");
-
- skip |= validate_flags("vkCreateFence", "pCreateInfo->flags", "VkFenceCreateFlagBits", AllVkFenceCreateFlagBits, pCreateInfo->flags, kOptionalFlags, "VUID-VkFenceCreateInfo-flags-parameter");
- }
- if (pAllocator != NULL)
- {
- skip |= validate_required_pointer("vkCreateFence", "pAllocator->pfnAllocation", reinterpret_cast<const void*>(pAllocator->pfnAllocation), "VUID-VkAllocationCallbacks-pfnAllocation-00632");
-
- skip |= validate_required_pointer("vkCreateFence", "pAllocator->pfnReallocation", reinterpret_cast<const void*>(pAllocator->pfnReallocation), "VUID-VkAllocationCallbacks-pfnReallocation-00633");
-
- skip |= validate_required_pointer("vkCreateFence", "pAllocator->pfnFree", reinterpret_cast<const void*>(pAllocator->pfnFree), "VUID-VkAllocationCallbacks-pfnFree-00634");
-
- if (pAllocator->pfnInternalAllocation != NULL)
- {
- skip |= validate_required_pointer("vkCreateFence", "pAllocator->pfnInternalFree", reinterpret_cast<const void*>(pAllocator->pfnInternalFree), "VUID-VkAllocationCallbacks-pfnInternalAllocation-00635");
-
- }
-
- if (pAllocator->pfnInternalFree != NULL)
- {
- skip |= validate_required_pointer("vkCreateFence", "pAllocator->pfnInternalAllocation", reinterpret_cast<const void*>(pAllocator->pfnInternalAllocation), "VUID-VkAllocationCallbacks-pfnInternalAllocation-00635");
-
- }
- }
- skip |= validate_required_pointer("vkCreateFence", "pFence", pFence, "VUID-vkCreateFence-pFence-parameter");
- return skip;
-}
-
-bool StatelessValidation::PreCallValidateDestroyFence(
- VkDevice device,
- VkFence fence,
- const VkAllocationCallbacks* pAllocator) {
- bool skip = false;
- if (pAllocator != NULL)
- {
- skip |= validate_required_pointer("vkDestroyFence", "pAllocator->pfnAllocation", reinterpret_cast<const void*>(pAllocator->pfnAllocation), "VUID-VkAllocationCallbacks-pfnAllocation-00632");
-
- skip |= validate_required_pointer("vkDestroyFence", "pAllocator->pfnReallocation", reinterpret_cast<const void*>(pAllocator->pfnReallocation), "VUID-VkAllocationCallbacks-pfnReallocation-00633");
-
- skip |= validate_required_pointer("vkDestroyFence", "pAllocator->pfnFree", reinterpret_cast<const void*>(pAllocator->pfnFree), "VUID-VkAllocationCallbacks-pfnFree-00634");
-
- if (pAllocator->pfnInternalAllocation != NULL)
- {
- skip |= validate_required_pointer("vkDestroyFence", "pAllocator->pfnInternalFree", reinterpret_cast<const void*>(pAllocator->pfnInternalFree), "VUID-VkAllocationCallbacks-pfnInternalAllocation-00635");
-
- }
-
- if (pAllocator->pfnInternalFree != NULL)
- {
- skip |= validate_required_pointer("vkDestroyFence", "pAllocator->pfnInternalAllocation", reinterpret_cast<const void*>(pAllocator->pfnInternalAllocation), "VUID-VkAllocationCallbacks-pfnInternalAllocation-00635");
-
- }
- }
- return skip;
-}
-
-bool StatelessValidation::PreCallValidateResetFences(
- VkDevice device,
- uint32_t fenceCount,
- const VkFence* pFences) {
- bool skip = false;
- skip |= validate_handle_array("vkResetFences", "fenceCount", "pFences", fenceCount, pFences, true, true);
- return skip;
-}
-
-bool StatelessValidation::PreCallValidateGetFenceStatus(
- VkDevice device,
- VkFence fence) {
- bool skip = false;
- skip |= validate_required_handle("vkGetFenceStatus", "fence", fence);
- return skip;
-}
-
-bool StatelessValidation::PreCallValidateWaitForFences(
- VkDevice device,
- uint32_t fenceCount,
- const VkFence* pFences,
- VkBool32 waitAll,
- uint64_t timeout) {
- bool skip = false;
- skip |= validate_handle_array("vkWaitForFences", "fenceCount", "pFences", fenceCount, pFences, true, true);
- skip |= validate_bool32("vkWaitForFences", "waitAll", waitAll);
- return skip;
-}
-
-bool StatelessValidation::PreCallValidateCreateSemaphore(
- VkDevice device,
- const VkSemaphoreCreateInfo* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkSemaphore* pSemaphore) {
- bool skip = false;
- skip |= validate_struct_type("vkCreateSemaphore", "pCreateInfo", "VK_STRUCTURE_TYPE_SEMAPHORE_CREATE_INFO", pCreateInfo, VK_STRUCTURE_TYPE_SEMAPHORE_CREATE_INFO, true, "VUID-vkCreateSemaphore-pCreateInfo-parameter", "VUID-VkSemaphoreCreateInfo-sType-sType");
- if (pCreateInfo != NULL)
- {
- const VkStructureType allowed_structs_VkSemaphoreCreateInfo[] = { VK_STRUCTURE_TYPE_EXPORT_SEMAPHORE_CREATE_INFO, VK_STRUCTURE_TYPE_EXPORT_SEMAPHORE_WIN32_HANDLE_INFO_KHR };
-
- skip |= validate_struct_pnext("vkCreateSemaphore", "pCreateInfo->pNext", "VkExportSemaphoreCreateInfo, VkExportSemaphoreWin32HandleInfoKHR", pCreateInfo->pNext, ARRAY_SIZE(allowed_structs_VkSemaphoreCreateInfo), allowed_structs_VkSemaphoreCreateInfo, GeneratedVulkanHeaderVersion, "VUID-VkSemaphoreCreateInfo-pNext-pNext");
-
- skip |= validate_reserved_flags("vkCreateSemaphore", "pCreateInfo->flags", pCreateInfo->flags, "VUID-VkSemaphoreCreateInfo-flags-zerobitmask");
- }
- if (pAllocator != NULL)
- {
- skip |= validate_required_pointer("vkCreateSemaphore", "pAllocator->pfnAllocation", reinterpret_cast<const void*>(pAllocator->pfnAllocation), "VUID-VkAllocationCallbacks-pfnAllocation-00632");
-
- skip |= validate_required_pointer("vkCreateSemaphore", "pAllocator->pfnReallocation", reinterpret_cast<const void*>(pAllocator->pfnReallocation), "VUID-VkAllocationCallbacks-pfnReallocation-00633");
-
- skip |= validate_required_pointer("vkCreateSemaphore", "pAllocator->pfnFree", reinterpret_cast<const void*>(pAllocator->pfnFree), "VUID-VkAllocationCallbacks-pfnFree-00634");
-
- if (pAllocator->pfnInternalAllocation != NULL)
- {
- skip |= validate_required_pointer("vkCreateSemaphore", "pAllocator->pfnInternalFree", reinterpret_cast<const void*>(pAllocator->pfnInternalFree), "VUID-VkAllocationCallbacks-pfnInternalAllocation-00635");
-
- }
-
- if (pAllocator->pfnInternalFree != NULL)
- {
- skip |= validate_required_pointer("vkCreateSemaphore", "pAllocator->pfnInternalAllocation", reinterpret_cast<const void*>(pAllocator->pfnInternalAllocation), "VUID-VkAllocationCallbacks-pfnInternalAllocation-00635");
-
- }
- }
- skip |= validate_required_pointer("vkCreateSemaphore", "pSemaphore", pSemaphore, "VUID-vkCreateSemaphore-pSemaphore-parameter");
- return skip;
-}
-
-bool StatelessValidation::PreCallValidateDestroySemaphore(
- VkDevice device,
- VkSemaphore semaphore,
- const VkAllocationCallbacks* pAllocator) {
- bool skip = false;
- if (pAllocator != NULL)
- {
- skip |= validate_required_pointer("vkDestroySemaphore", "pAllocator->pfnAllocation", reinterpret_cast<const void*>(pAllocator->pfnAllocation), "VUID-VkAllocationCallbacks-pfnAllocation-00632");
-
- skip |= validate_required_pointer("vkDestroySemaphore", "pAllocator->pfnReallocation", reinterpret_cast<const void*>(pAllocator->pfnReallocation), "VUID-VkAllocationCallbacks-pfnReallocation-00633");
-
- skip |= validate_required_pointer("vkDestroySemaphore", "pAllocator->pfnFree", reinterpret_cast<const void*>(pAllocator->pfnFree), "VUID-VkAllocationCallbacks-pfnFree-00634");
-
- if (pAllocator->pfnInternalAllocation != NULL)
- {
- skip |= validate_required_pointer("vkDestroySemaphore", "pAllocator->pfnInternalFree", reinterpret_cast<const void*>(pAllocator->pfnInternalFree), "VUID-VkAllocationCallbacks-pfnInternalAllocation-00635");
-
- }
-
- if (pAllocator->pfnInternalFree != NULL)
- {
- skip |= validate_required_pointer("vkDestroySemaphore", "pAllocator->pfnInternalAllocation", reinterpret_cast<const void*>(pAllocator->pfnInternalAllocation), "VUID-VkAllocationCallbacks-pfnInternalAllocation-00635");
-
- }
- }
- return skip;
-}
-
-bool StatelessValidation::PreCallValidateCreateEvent(
- VkDevice device,
- const VkEventCreateInfo* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkEvent* pEvent) {
- bool skip = false;
- skip |= validate_struct_type("vkCreateEvent", "pCreateInfo", "VK_STRUCTURE_TYPE_EVENT_CREATE_INFO", pCreateInfo, VK_STRUCTURE_TYPE_EVENT_CREATE_INFO, true, "VUID-vkCreateEvent-pCreateInfo-parameter", "VUID-VkEventCreateInfo-sType-sType");
- if (pCreateInfo != NULL)
- {
- skip |= validate_struct_pnext("vkCreateEvent", "pCreateInfo->pNext", NULL, pCreateInfo->pNext, 0, NULL, GeneratedVulkanHeaderVersion, "VUID-VkEventCreateInfo-pNext-pNext");
-
- skip |= validate_reserved_flags("vkCreateEvent", "pCreateInfo->flags", pCreateInfo->flags, "VUID-VkEventCreateInfo-flags-zerobitmask");
- }
- if (pAllocator != NULL)
- {
- skip |= validate_required_pointer("vkCreateEvent", "pAllocator->pfnAllocation", reinterpret_cast<const void*>(pAllocator->pfnAllocation), "VUID-VkAllocationCallbacks-pfnAllocation-00632");
-
- skip |= validate_required_pointer("vkCreateEvent", "pAllocator->pfnReallocation", reinterpret_cast<const void*>(pAllocator->pfnReallocation), "VUID-VkAllocationCallbacks-pfnReallocation-00633");
-
- skip |= validate_required_pointer("vkCreateEvent", "pAllocator->pfnFree", reinterpret_cast<const void*>(pAllocator->pfnFree), "VUID-VkAllocationCallbacks-pfnFree-00634");
-
- if (pAllocator->pfnInternalAllocation != NULL)
- {
- skip |= validate_required_pointer("vkCreateEvent", "pAllocator->pfnInternalFree", reinterpret_cast<const void*>(pAllocator->pfnInternalFree), "VUID-VkAllocationCallbacks-pfnInternalAllocation-00635");
-
- }
-
- if (pAllocator->pfnInternalFree != NULL)
- {
- skip |= validate_required_pointer("vkCreateEvent", "pAllocator->pfnInternalAllocation", reinterpret_cast<const void*>(pAllocator->pfnInternalAllocation), "VUID-VkAllocationCallbacks-pfnInternalAllocation-00635");
-
- }
- }
- skip |= validate_required_pointer("vkCreateEvent", "pEvent", pEvent, "VUID-vkCreateEvent-pEvent-parameter");
- return skip;
-}
-
-bool StatelessValidation::PreCallValidateDestroyEvent(
- VkDevice device,
- VkEvent event,
- const VkAllocationCallbacks* pAllocator) {
- bool skip = false;
- if (pAllocator != NULL)
- {
- skip |= validate_required_pointer("vkDestroyEvent", "pAllocator->pfnAllocation", reinterpret_cast<const void*>(pAllocator->pfnAllocation), "VUID-VkAllocationCallbacks-pfnAllocation-00632");
-
- skip |= validate_required_pointer("vkDestroyEvent", "pAllocator->pfnReallocation", reinterpret_cast<const void*>(pAllocator->pfnReallocation), "VUID-VkAllocationCallbacks-pfnReallocation-00633");
-
- skip |= validate_required_pointer("vkDestroyEvent", "pAllocator->pfnFree", reinterpret_cast<const void*>(pAllocator->pfnFree), "VUID-VkAllocationCallbacks-pfnFree-00634");
-
- if (pAllocator->pfnInternalAllocation != NULL)
- {
- skip |= validate_required_pointer("vkDestroyEvent", "pAllocator->pfnInternalFree", reinterpret_cast<const void*>(pAllocator->pfnInternalFree), "VUID-VkAllocationCallbacks-pfnInternalAllocation-00635");
-
- }
-
- if (pAllocator->pfnInternalFree != NULL)
- {
- skip |= validate_required_pointer("vkDestroyEvent", "pAllocator->pfnInternalAllocation", reinterpret_cast<const void*>(pAllocator->pfnInternalAllocation), "VUID-VkAllocationCallbacks-pfnInternalAllocation-00635");
-
- }
- }
- return skip;
-}
-
-bool StatelessValidation::PreCallValidateGetEventStatus(
- VkDevice device,
- VkEvent event) {
- bool skip = false;
- skip |= validate_required_handle("vkGetEventStatus", "event", event);
- return skip;
-}
-
-bool StatelessValidation::PreCallValidateSetEvent(
- VkDevice device,
- VkEvent event) {
- bool skip = false;
- skip |= validate_required_handle("vkSetEvent", "event", event);
- return skip;
-}
-
-bool StatelessValidation::PreCallValidateResetEvent(
- VkDevice device,
- VkEvent event) {
- bool skip = false;
- skip |= validate_required_handle("vkResetEvent", "event", event);
- return skip;
-}
-
-bool StatelessValidation::PreCallValidateCreateQueryPool(
- VkDevice device,
- const VkQueryPoolCreateInfo* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkQueryPool* pQueryPool) {
- bool skip = false;
- skip |= validate_struct_type("vkCreateQueryPool", "pCreateInfo", "VK_STRUCTURE_TYPE_QUERY_POOL_CREATE_INFO", pCreateInfo, VK_STRUCTURE_TYPE_QUERY_POOL_CREATE_INFO, true, "VUID-vkCreateQueryPool-pCreateInfo-parameter", "VUID-VkQueryPoolCreateInfo-sType-sType");
- if (pCreateInfo != NULL)
- {
- skip |= validate_struct_pnext("vkCreateQueryPool", "pCreateInfo->pNext", NULL, pCreateInfo->pNext, 0, NULL, GeneratedVulkanHeaderVersion, "VUID-VkQueryPoolCreateInfo-pNext-pNext");
-
- skip |= validate_reserved_flags("vkCreateQueryPool", "pCreateInfo->flags", pCreateInfo->flags, "VUID-VkQueryPoolCreateInfo-flags-zerobitmask");
-
- skip |= validate_ranged_enum("vkCreateQueryPool", "pCreateInfo->queryType", "VkQueryType", AllVkQueryTypeEnums, pCreateInfo->queryType, "VUID-VkQueryPoolCreateInfo-queryType-parameter");
- }
- if (pAllocator != NULL)
- {
- skip |= validate_required_pointer("vkCreateQueryPool", "pAllocator->pfnAllocation", reinterpret_cast<const void*>(pAllocator->pfnAllocation), "VUID-VkAllocationCallbacks-pfnAllocation-00632");
-
- skip |= validate_required_pointer("vkCreateQueryPool", "pAllocator->pfnReallocation", reinterpret_cast<const void*>(pAllocator->pfnReallocation), "VUID-VkAllocationCallbacks-pfnReallocation-00633");
-
- skip |= validate_required_pointer("vkCreateQueryPool", "pAllocator->pfnFree", reinterpret_cast<const void*>(pAllocator->pfnFree), "VUID-VkAllocationCallbacks-pfnFree-00634");
-
- if (pAllocator->pfnInternalAllocation != NULL)
- {
- skip |= validate_required_pointer("vkCreateQueryPool", "pAllocator->pfnInternalFree", reinterpret_cast<const void*>(pAllocator->pfnInternalFree), "VUID-VkAllocationCallbacks-pfnInternalAllocation-00635");
-
- }
-
- if (pAllocator->pfnInternalFree != NULL)
- {
- skip |= validate_required_pointer("vkCreateQueryPool", "pAllocator->pfnInternalAllocation", reinterpret_cast<const void*>(pAllocator->pfnInternalAllocation), "VUID-VkAllocationCallbacks-pfnInternalAllocation-00635");
-
- }
- }
- skip |= validate_required_pointer("vkCreateQueryPool", "pQueryPool", pQueryPool, "VUID-vkCreateQueryPool-pQueryPool-parameter");
- return skip;
-}
-
-bool StatelessValidation::PreCallValidateDestroyQueryPool(
- VkDevice device,
- VkQueryPool queryPool,
- const VkAllocationCallbacks* pAllocator) {
- bool skip = false;
- if (pAllocator != NULL)
- {
- skip |= validate_required_pointer("vkDestroyQueryPool", "pAllocator->pfnAllocation", reinterpret_cast<const void*>(pAllocator->pfnAllocation), "VUID-VkAllocationCallbacks-pfnAllocation-00632");
-
- skip |= validate_required_pointer("vkDestroyQueryPool", "pAllocator->pfnReallocation", reinterpret_cast<const void*>(pAllocator->pfnReallocation), "VUID-VkAllocationCallbacks-pfnReallocation-00633");
-
- skip |= validate_required_pointer("vkDestroyQueryPool", "pAllocator->pfnFree", reinterpret_cast<const void*>(pAllocator->pfnFree), "VUID-VkAllocationCallbacks-pfnFree-00634");
-
- if (pAllocator->pfnInternalAllocation != NULL)
- {
- skip |= validate_required_pointer("vkDestroyQueryPool", "pAllocator->pfnInternalFree", reinterpret_cast<const void*>(pAllocator->pfnInternalFree), "VUID-VkAllocationCallbacks-pfnInternalAllocation-00635");
-
- }
-
- if (pAllocator->pfnInternalFree != NULL)
- {
- skip |= validate_required_pointer("vkDestroyQueryPool", "pAllocator->pfnInternalAllocation", reinterpret_cast<const void*>(pAllocator->pfnInternalAllocation), "VUID-VkAllocationCallbacks-pfnInternalAllocation-00635");
-
- }
- }
- return skip;
-}
-
-bool StatelessValidation::PreCallValidateGetQueryPoolResults(
- VkDevice device,
- VkQueryPool queryPool,
- uint32_t firstQuery,
- uint32_t queryCount,
- size_t dataSize,
- void* pData,
- VkDeviceSize stride,
- VkQueryResultFlags flags) {
- bool skip = false;
- skip |= validate_required_handle("vkGetQueryPoolResults", "queryPool", queryPool);
- skip |= validate_array("vkGetQueryPoolResults", "dataSize", "pData", dataSize, &pData, true, true, "VUID-vkGetQueryPoolResults-dataSize-arraylength", "VUID-vkGetQueryPoolResults-pData-parameter");
- skip |= validate_flags("vkGetQueryPoolResults", "flags", "VkQueryResultFlagBits", AllVkQueryResultFlagBits, flags, kOptionalFlags, "VUID-vkGetQueryPoolResults-flags-parameter");
- return skip;
-}
-
-bool StatelessValidation::PreCallValidateCreateBuffer(
- VkDevice device,
- const VkBufferCreateInfo* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkBuffer* pBuffer) {
- bool skip = false;
- skip |= validate_struct_type("vkCreateBuffer", "pCreateInfo", "VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO", pCreateInfo, VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO, true, "VUID-vkCreateBuffer-pCreateInfo-parameter", "VUID-VkBufferCreateInfo-sType-sType");
- if (pCreateInfo != NULL)
- {
- const VkStructureType allowed_structs_VkBufferCreateInfo[] = { VK_STRUCTURE_TYPE_BUFFER_DEVICE_ADDRESS_CREATE_INFO_EXT, VK_STRUCTURE_TYPE_DEDICATED_ALLOCATION_BUFFER_CREATE_INFO_NV, VK_STRUCTURE_TYPE_EXTERNAL_MEMORY_BUFFER_CREATE_INFO };
-
- skip |= validate_struct_pnext("vkCreateBuffer", "pCreateInfo->pNext", "VkBufferDeviceAddressCreateInfoEXT, VkDedicatedAllocationBufferCreateInfoNV, VkExternalMemoryBufferCreateInfo", pCreateInfo->pNext, ARRAY_SIZE(allowed_structs_VkBufferCreateInfo), allowed_structs_VkBufferCreateInfo, GeneratedVulkanHeaderVersion, "VUID-VkBufferCreateInfo-pNext-pNext");
-
- skip |= validate_flags("vkCreateBuffer", "pCreateInfo->flags", "VkBufferCreateFlagBits", AllVkBufferCreateFlagBits, pCreateInfo->flags, kOptionalFlags, "VUID-VkBufferCreateInfo-flags-parameter");
-
- skip |= validate_flags("vkCreateBuffer", "pCreateInfo->usage", "VkBufferUsageFlagBits", AllVkBufferUsageFlagBits, pCreateInfo->usage, kRequiredFlags, "VUID-VkBufferCreateInfo-usage-parameter", "VUID-VkBufferCreateInfo-usage-requiredbitmask");
-
- skip |= validate_ranged_enum("vkCreateBuffer", "pCreateInfo->sharingMode", "VkSharingMode", AllVkSharingModeEnums, pCreateInfo->sharingMode, "VUID-VkBufferCreateInfo-sharingMode-parameter");
- }
- if (pAllocator != NULL)
- {
- skip |= validate_required_pointer("vkCreateBuffer", "pAllocator->pfnAllocation", reinterpret_cast<const void*>(pAllocator->pfnAllocation), "VUID-VkAllocationCallbacks-pfnAllocation-00632");
-
- skip |= validate_required_pointer("vkCreateBuffer", "pAllocator->pfnReallocation", reinterpret_cast<const void*>(pAllocator->pfnReallocation), "VUID-VkAllocationCallbacks-pfnReallocation-00633");
-
- skip |= validate_required_pointer("vkCreateBuffer", "pAllocator->pfnFree", reinterpret_cast<const void*>(pAllocator->pfnFree), "VUID-VkAllocationCallbacks-pfnFree-00634");
-
- if (pAllocator->pfnInternalAllocation != NULL)
- {
- skip |= validate_required_pointer("vkCreateBuffer", "pAllocator->pfnInternalFree", reinterpret_cast<const void*>(pAllocator->pfnInternalFree), "VUID-VkAllocationCallbacks-pfnInternalAllocation-00635");
-
- }
-
- if (pAllocator->pfnInternalFree != NULL)
- {
- skip |= validate_required_pointer("vkCreateBuffer", "pAllocator->pfnInternalAllocation", reinterpret_cast<const void*>(pAllocator->pfnInternalAllocation), "VUID-VkAllocationCallbacks-pfnInternalAllocation-00635");
-
- }
- }
- skip |= validate_required_pointer("vkCreateBuffer", "pBuffer", pBuffer, "VUID-vkCreateBuffer-pBuffer-parameter");
- if (!skip) skip |= manual_PreCallValidateCreateBuffer(device, pCreateInfo, pAllocator, pBuffer);
- return skip;
-}
-
-bool StatelessValidation::PreCallValidateDestroyBuffer(
- VkDevice device,
- VkBuffer buffer,
- const VkAllocationCallbacks* pAllocator) {
- bool skip = false;
- if (pAllocator != NULL)
- {
- skip |= validate_required_pointer("vkDestroyBuffer", "pAllocator->pfnAllocation", reinterpret_cast<const void*>(pAllocator->pfnAllocation), "VUID-VkAllocationCallbacks-pfnAllocation-00632");
-
- skip |= validate_required_pointer("vkDestroyBuffer", "pAllocator->pfnReallocation", reinterpret_cast<const void*>(pAllocator->pfnReallocation), "VUID-VkAllocationCallbacks-pfnReallocation-00633");
-
- skip |= validate_required_pointer("vkDestroyBuffer", "pAllocator->pfnFree", reinterpret_cast<const void*>(pAllocator->pfnFree), "VUID-VkAllocationCallbacks-pfnFree-00634");
-
- if (pAllocator->pfnInternalAllocation != NULL)
- {
- skip |= validate_required_pointer("vkDestroyBuffer", "pAllocator->pfnInternalFree", reinterpret_cast<const void*>(pAllocator->pfnInternalFree), "VUID-VkAllocationCallbacks-pfnInternalAllocation-00635");
-
- }
-
- if (pAllocator->pfnInternalFree != NULL)
- {
- skip |= validate_required_pointer("vkDestroyBuffer", "pAllocator->pfnInternalAllocation", reinterpret_cast<const void*>(pAllocator->pfnInternalAllocation), "VUID-VkAllocationCallbacks-pfnInternalAllocation-00635");
-
- }
- }
- return skip;
-}
-
-bool StatelessValidation::PreCallValidateCreateBufferView(
- VkDevice device,
- const VkBufferViewCreateInfo* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkBufferView* pView) {
- bool skip = false;
- skip |= validate_struct_type("vkCreateBufferView", "pCreateInfo", "VK_STRUCTURE_TYPE_BUFFER_VIEW_CREATE_INFO", pCreateInfo, VK_STRUCTURE_TYPE_BUFFER_VIEW_CREATE_INFO, true, "VUID-vkCreateBufferView-pCreateInfo-parameter", "VUID-VkBufferViewCreateInfo-sType-sType");
- if (pCreateInfo != NULL)
- {
- skip |= validate_struct_pnext("vkCreateBufferView", "pCreateInfo->pNext", NULL, pCreateInfo->pNext, 0, NULL, GeneratedVulkanHeaderVersion, "VUID-VkBufferViewCreateInfo-pNext-pNext");
-
- skip |= validate_reserved_flags("vkCreateBufferView", "pCreateInfo->flags", pCreateInfo->flags, "VUID-VkBufferViewCreateInfo-flags-zerobitmask");
-
- skip |= validate_required_handle("vkCreateBufferView", "pCreateInfo->buffer", pCreateInfo->buffer);
-
- skip |= validate_ranged_enum("vkCreateBufferView", "pCreateInfo->format", "VkFormat", AllVkFormatEnums, pCreateInfo->format, "VUID-VkBufferViewCreateInfo-format-parameter");
- }
- if (pAllocator != NULL)
- {
- skip |= validate_required_pointer("vkCreateBufferView", "pAllocator->pfnAllocation", reinterpret_cast<const void*>(pAllocator->pfnAllocation), "VUID-VkAllocationCallbacks-pfnAllocation-00632");
-
- skip |= validate_required_pointer("vkCreateBufferView", "pAllocator->pfnReallocation", reinterpret_cast<const void*>(pAllocator->pfnReallocation), "VUID-VkAllocationCallbacks-pfnReallocation-00633");
-
- skip |= validate_required_pointer("vkCreateBufferView", "pAllocator->pfnFree", reinterpret_cast<const void*>(pAllocator->pfnFree), "VUID-VkAllocationCallbacks-pfnFree-00634");
-
- if (pAllocator->pfnInternalAllocation != NULL)
- {
- skip |= validate_required_pointer("vkCreateBufferView", "pAllocator->pfnInternalFree", reinterpret_cast<const void*>(pAllocator->pfnInternalFree), "VUID-VkAllocationCallbacks-pfnInternalAllocation-00635");
-
- }
-
- if (pAllocator->pfnInternalFree != NULL)
- {
- skip |= validate_required_pointer("vkCreateBufferView", "pAllocator->pfnInternalAllocation", reinterpret_cast<const void*>(pAllocator->pfnInternalAllocation), "VUID-VkAllocationCallbacks-pfnInternalAllocation-00635");
-
- }
- }
- skip |= validate_required_pointer("vkCreateBufferView", "pView", pView, "VUID-vkCreateBufferView-pView-parameter");
- return skip;
-}
-
-bool StatelessValidation::PreCallValidateDestroyBufferView(
- VkDevice device,
- VkBufferView bufferView,
- const VkAllocationCallbacks* pAllocator) {
- bool skip = false;
- if (pAllocator != NULL)
- {
- skip |= validate_required_pointer("vkDestroyBufferView", "pAllocator->pfnAllocation", reinterpret_cast<const void*>(pAllocator->pfnAllocation), "VUID-VkAllocationCallbacks-pfnAllocation-00632");
-
- skip |= validate_required_pointer("vkDestroyBufferView", "pAllocator->pfnReallocation", reinterpret_cast<const void*>(pAllocator->pfnReallocation), "VUID-VkAllocationCallbacks-pfnReallocation-00633");
-
- skip |= validate_required_pointer("vkDestroyBufferView", "pAllocator->pfnFree", reinterpret_cast<const void*>(pAllocator->pfnFree), "VUID-VkAllocationCallbacks-pfnFree-00634");
-
- if (pAllocator->pfnInternalAllocation != NULL)
- {
- skip |= validate_required_pointer("vkDestroyBufferView", "pAllocator->pfnInternalFree", reinterpret_cast<const void*>(pAllocator->pfnInternalFree), "VUID-VkAllocationCallbacks-pfnInternalAllocation-00635");
-
- }
-
- if (pAllocator->pfnInternalFree != NULL)
- {
- skip |= validate_required_pointer("vkDestroyBufferView", "pAllocator->pfnInternalAllocation", reinterpret_cast<const void*>(pAllocator->pfnInternalAllocation), "VUID-VkAllocationCallbacks-pfnInternalAllocation-00635");
-
- }
- }
- return skip;
-}
-
-bool StatelessValidation::PreCallValidateCreateImage(
- VkDevice device,
- const VkImageCreateInfo* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkImage* pImage) {
- bool skip = false;
- skip |= validate_struct_type("vkCreateImage", "pCreateInfo", "VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO", pCreateInfo, VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO, true, "VUID-vkCreateImage-pCreateInfo-parameter", "VUID-VkImageCreateInfo-sType-sType");
- if (pCreateInfo != NULL)
- {
- const VkStructureType allowed_structs_VkImageCreateInfo[] = { VK_STRUCTURE_TYPE_DEDICATED_ALLOCATION_IMAGE_CREATE_INFO_NV, VK_STRUCTURE_TYPE_EXTERNAL_FORMAT_ANDROID, VK_STRUCTURE_TYPE_EXTERNAL_MEMORY_IMAGE_CREATE_INFO, VK_STRUCTURE_TYPE_EXTERNAL_MEMORY_IMAGE_CREATE_INFO_NV, VK_STRUCTURE_TYPE_IMAGE_DRM_FORMAT_MODIFIER_EXPLICIT_CREATE_INFO_EXT, VK_STRUCTURE_TYPE_IMAGE_DRM_FORMAT_MODIFIER_LIST_CREATE_INFO_EXT, VK_STRUCTURE_TYPE_IMAGE_FORMAT_LIST_CREATE_INFO_KHR, VK_STRUCTURE_TYPE_IMAGE_STENCIL_USAGE_CREATE_INFO_EXT, VK_STRUCTURE_TYPE_IMAGE_SWAPCHAIN_CREATE_INFO_KHR };
-
- skip |= validate_struct_pnext("vkCreateImage", "pCreateInfo->pNext", "VkDedicatedAllocationImageCreateInfoNV, VkExternalFormatANDROID, VkExternalMemoryImageCreateInfo, VkExternalMemoryImageCreateInfoNV, VkImageDrmFormatModifierExplicitCreateInfoEXT, VkImageDrmFormatModifierListCreateInfoEXT, VkImageFormatListCreateInfoKHR, VkImageStencilUsageCreateInfoEXT, VkImageSwapchainCreateInfoKHR", pCreateInfo->pNext, ARRAY_SIZE(allowed_structs_VkImageCreateInfo), allowed_structs_VkImageCreateInfo, GeneratedVulkanHeaderVersion, "VUID-VkImageCreateInfo-pNext-pNext");
-
- skip |= validate_flags("vkCreateImage", "pCreateInfo->flags", "VkImageCreateFlagBits", AllVkImageCreateFlagBits, pCreateInfo->flags, kOptionalFlags, "VUID-VkImageCreateInfo-flags-parameter");
-
- skip |= validate_ranged_enum("vkCreateImage", "pCreateInfo->imageType", "VkImageType", AllVkImageTypeEnums, pCreateInfo->imageType, "VUID-VkImageCreateInfo-imageType-parameter");
-
- skip |= validate_ranged_enum("vkCreateImage", "pCreateInfo->format", "VkFormat", AllVkFormatEnums, pCreateInfo->format, "VUID-VkImageCreateInfo-format-parameter");
-
- // No xml-driven validation
-
- skip |= validate_flags("vkCreateImage", "pCreateInfo->samples", "VkSampleCountFlagBits", AllVkSampleCountFlagBits, pCreateInfo->samples, kRequiredSingleBit, "VUID-VkImageCreateInfo-samples-parameter", "VUID-VkImageCreateInfo-samples-parameter");
-
- skip |= validate_ranged_enum("vkCreateImage", "pCreateInfo->tiling", "VkImageTiling", AllVkImageTilingEnums, pCreateInfo->tiling, "VUID-VkImageCreateInfo-tiling-parameter");
-
- skip |= validate_flags("vkCreateImage", "pCreateInfo->usage", "VkImageUsageFlagBits", AllVkImageUsageFlagBits, pCreateInfo->usage, kRequiredFlags, "VUID-VkImageCreateInfo-usage-parameter", "VUID-VkImageCreateInfo-usage-requiredbitmask");
-
- skip |= validate_ranged_enum("vkCreateImage", "pCreateInfo->sharingMode", "VkSharingMode", AllVkSharingModeEnums, pCreateInfo->sharingMode, "VUID-VkImageCreateInfo-sharingMode-parameter");
-
- skip |= validate_ranged_enum("vkCreateImage", "pCreateInfo->initialLayout", "VkImageLayout", AllVkImageLayoutEnums, pCreateInfo->initialLayout, "VUID-VkImageCreateInfo-initialLayout-parameter");
- }
- if (pAllocator != NULL)
- {
- skip |= validate_required_pointer("vkCreateImage", "pAllocator->pfnAllocation", reinterpret_cast<const void*>(pAllocator->pfnAllocation), "VUID-VkAllocationCallbacks-pfnAllocation-00632");
-
- skip |= validate_required_pointer("vkCreateImage", "pAllocator->pfnReallocation", reinterpret_cast<const void*>(pAllocator->pfnReallocation), "VUID-VkAllocationCallbacks-pfnReallocation-00633");
-
- skip |= validate_required_pointer("vkCreateImage", "pAllocator->pfnFree", reinterpret_cast<const void*>(pAllocator->pfnFree), "VUID-VkAllocationCallbacks-pfnFree-00634");
-
- if (pAllocator->pfnInternalAllocation != NULL)
- {
- skip |= validate_required_pointer("vkCreateImage", "pAllocator->pfnInternalFree", reinterpret_cast<const void*>(pAllocator->pfnInternalFree), "VUID-VkAllocationCallbacks-pfnInternalAllocation-00635");
-
- }
-
- if (pAllocator->pfnInternalFree != NULL)
- {
- skip |= validate_required_pointer("vkCreateImage", "pAllocator->pfnInternalAllocation", reinterpret_cast<const void*>(pAllocator->pfnInternalAllocation), "VUID-VkAllocationCallbacks-pfnInternalAllocation-00635");
-
- }
- }
- skip |= validate_required_pointer("vkCreateImage", "pImage", pImage, "VUID-vkCreateImage-pImage-parameter");
- if (!skip) skip |= manual_PreCallValidateCreateImage(device, pCreateInfo, pAllocator, pImage);
- return skip;
-}
-
-bool StatelessValidation::PreCallValidateDestroyImage(
- VkDevice device,
- VkImage image,
- const VkAllocationCallbacks* pAllocator) {
- bool skip = false;
- if (pAllocator != NULL)
- {
- skip |= validate_required_pointer("vkDestroyImage", "pAllocator->pfnAllocation", reinterpret_cast<const void*>(pAllocator->pfnAllocation), "VUID-VkAllocationCallbacks-pfnAllocation-00632");
-
- skip |= validate_required_pointer("vkDestroyImage", "pAllocator->pfnReallocation", reinterpret_cast<const void*>(pAllocator->pfnReallocation), "VUID-VkAllocationCallbacks-pfnReallocation-00633");
-
- skip |= validate_required_pointer("vkDestroyImage", "pAllocator->pfnFree", reinterpret_cast<const void*>(pAllocator->pfnFree), "VUID-VkAllocationCallbacks-pfnFree-00634");
-
- if (pAllocator->pfnInternalAllocation != NULL)
- {
- skip |= validate_required_pointer("vkDestroyImage", "pAllocator->pfnInternalFree", reinterpret_cast<const void*>(pAllocator->pfnInternalFree), "VUID-VkAllocationCallbacks-pfnInternalAllocation-00635");
-
- }
-
- if (pAllocator->pfnInternalFree != NULL)
- {
- skip |= validate_required_pointer("vkDestroyImage", "pAllocator->pfnInternalAllocation", reinterpret_cast<const void*>(pAllocator->pfnInternalAllocation), "VUID-VkAllocationCallbacks-pfnInternalAllocation-00635");
-
- }
- }
- return skip;
-}
-
-bool StatelessValidation::PreCallValidateGetImageSubresourceLayout(
- VkDevice device,
- VkImage image,
- const VkImageSubresource* pSubresource,
- VkSubresourceLayout* pLayout) {
- bool skip = false;
- skip |= validate_required_handle("vkGetImageSubresourceLayout", "image", image);
- skip |= validate_required_pointer("vkGetImageSubresourceLayout", "pSubresource", pSubresource, "VUID-vkGetImageSubresourceLayout-pSubresource-parameter");
- if (pSubresource != NULL)
- {
- skip |= validate_flags("vkGetImageSubresourceLayout", "pSubresource->aspectMask", "VkImageAspectFlagBits", AllVkImageAspectFlagBits, pSubresource->aspectMask, kRequiredFlags, "VUID-VkImageSubresource-aspectMask-parameter", "VUID-VkImageSubresource-aspectMask-requiredbitmask");
- }
- skip |= validate_required_pointer("vkGetImageSubresourceLayout", "pLayout", pLayout, "VUID-vkGetImageSubresourceLayout-pLayout-parameter");
- if (pLayout != NULL)
- {
- // No xml-driven validation
- }
- return skip;
-}
-
-bool StatelessValidation::PreCallValidateCreateImageView(
- VkDevice device,
- const VkImageViewCreateInfo* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkImageView* pView) {
- bool skip = false;
- skip |= validate_struct_type("vkCreateImageView", "pCreateInfo", "VK_STRUCTURE_TYPE_IMAGE_VIEW_CREATE_INFO", pCreateInfo, VK_STRUCTURE_TYPE_IMAGE_VIEW_CREATE_INFO, true, "VUID-vkCreateImageView-pCreateInfo-parameter", "VUID-VkImageViewCreateInfo-sType-sType");
- if (pCreateInfo != NULL)
- {
- const VkStructureType allowed_structs_VkImageViewCreateInfo[] = { VK_STRUCTURE_TYPE_IMAGE_VIEW_ASTC_DECODE_MODE_EXT, VK_STRUCTURE_TYPE_IMAGE_VIEW_USAGE_CREATE_INFO, VK_STRUCTURE_TYPE_SAMPLER_YCBCR_CONVERSION_INFO };
-
- skip |= validate_struct_pnext("vkCreateImageView", "pCreateInfo->pNext", "VkImageViewASTCDecodeModeEXT, VkImageViewUsageCreateInfo, VkSamplerYcbcrConversionInfo", pCreateInfo->pNext, ARRAY_SIZE(allowed_structs_VkImageViewCreateInfo), allowed_structs_VkImageViewCreateInfo, GeneratedVulkanHeaderVersion, "VUID-VkImageViewCreateInfo-pNext-pNext");
-
- skip |= validate_flags("vkCreateImageView", "pCreateInfo->flags", "VkImageViewCreateFlagBits", AllVkImageViewCreateFlagBits, pCreateInfo->flags, kOptionalFlags, "VUID-VkImageViewCreateInfo-flags-parameter");
-
- skip |= validate_required_handle("vkCreateImageView", "pCreateInfo->image", pCreateInfo->image);
-
- skip |= validate_ranged_enum("vkCreateImageView", "pCreateInfo->viewType", "VkImageViewType", AllVkImageViewTypeEnums, pCreateInfo->viewType, "VUID-VkImageViewCreateInfo-viewType-parameter");
-
- skip |= validate_ranged_enum("vkCreateImageView", "pCreateInfo->format", "VkFormat", AllVkFormatEnums, pCreateInfo->format, "VUID-VkImageViewCreateInfo-format-parameter");
-
- skip |= validate_ranged_enum("vkCreateImageView", "pCreateInfo->components.r", "VkComponentSwizzle", AllVkComponentSwizzleEnums, pCreateInfo->components.r, "VUID-VkComponentMapping-r-parameter");
-
- skip |= validate_ranged_enum("vkCreateImageView", "pCreateInfo->components.g", "VkComponentSwizzle", AllVkComponentSwizzleEnums, pCreateInfo->components.g, "VUID-VkComponentMapping-g-parameter");
-
- skip |= validate_ranged_enum("vkCreateImageView", "pCreateInfo->components.b", "VkComponentSwizzle", AllVkComponentSwizzleEnums, pCreateInfo->components.b, "VUID-VkComponentMapping-b-parameter");
-
- skip |= validate_ranged_enum("vkCreateImageView", "pCreateInfo->components.a", "VkComponentSwizzle", AllVkComponentSwizzleEnums, pCreateInfo->components.a, "VUID-VkComponentMapping-a-parameter");
-
- skip |= validate_flags("vkCreateImageView", "pCreateInfo->subresourceRange.aspectMask", "VkImageAspectFlagBits", AllVkImageAspectFlagBits, pCreateInfo->subresourceRange.aspectMask, kRequiredFlags, "VUID-VkImageSubresourceRange-aspectMask-parameter", "VUID-VkImageSubresourceRange-aspectMask-requiredbitmask");
- }
- if (pAllocator != NULL)
- {
- skip |= validate_required_pointer("vkCreateImageView", "pAllocator->pfnAllocation", reinterpret_cast<const void*>(pAllocator->pfnAllocation), "VUID-VkAllocationCallbacks-pfnAllocation-00632");
-
- skip |= validate_required_pointer("vkCreateImageView", "pAllocator->pfnReallocation", reinterpret_cast<const void*>(pAllocator->pfnReallocation), "VUID-VkAllocationCallbacks-pfnReallocation-00633");
-
- skip |= validate_required_pointer("vkCreateImageView", "pAllocator->pfnFree", reinterpret_cast<const void*>(pAllocator->pfnFree), "VUID-VkAllocationCallbacks-pfnFree-00634");
-
- if (pAllocator->pfnInternalAllocation != NULL)
- {
- skip |= validate_required_pointer("vkCreateImageView", "pAllocator->pfnInternalFree", reinterpret_cast<const void*>(pAllocator->pfnInternalFree), "VUID-VkAllocationCallbacks-pfnInternalAllocation-00635");
-
- }
-
- if (pAllocator->pfnInternalFree != NULL)
- {
- skip |= validate_required_pointer("vkCreateImageView", "pAllocator->pfnInternalAllocation", reinterpret_cast<const void*>(pAllocator->pfnInternalAllocation), "VUID-VkAllocationCallbacks-pfnInternalAllocation-00635");
-
- }
- }
- skip |= validate_required_pointer("vkCreateImageView", "pView", pView, "VUID-vkCreateImageView-pView-parameter");
- return skip;
-}
-
-bool StatelessValidation::PreCallValidateDestroyImageView(
- VkDevice device,
- VkImageView imageView,
- const VkAllocationCallbacks* pAllocator) {
- bool skip = false;
- if (pAllocator != NULL)
- {
- skip |= validate_required_pointer("vkDestroyImageView", "pAllocator->pfnAllocation", reinterpret_cast<const void*>(pAllocator->pfnAllocation), "VUID-VkAllocationCallbacks-pfnAllocation-00632");
-
- skip |= validate_required_pointer("vkDestroyImageView", "pAllocator->pfnReallocation", reinterpret_cast<const void*>(pAllocator->pfnReallocation), "VUID-VkAllocationCallbacks-pfnReallocation-00633");
-
- skip |= validate_required_pointer("vkDestroyImageView", "pAllocator->pfnFree", reinterpret_cast<const void*>(pAllocator->pfnFree), "VUID-VkAllocationCallbacks-pfnFree-00634");
-
- if (pAllocator->pfnInternalAllocation != NULL)
- {
- skip |= validate_required_pointer("vkDestroyImageView", "pAllocator->pfnInternalFree", reinterpret_cast<const void*>(pAllocator->pfnInternalFree), "VUID-VkAllocationCallbacks-pfnInternalAllocation-00635");
-
- }
-
- if (pAllocator->pfnInternalFree != NULL)
- {
- skip |= validate_required_pointer("vkDestroyImageView", "pAllocator->pfnInternalAllocation", reinterpret_cast<const void*>(pAllocator->pfnInternalAllocation), "VUID-VkAllocationCallbacks-pfnInternalAllocation-00635");
-
- }
- }
- return skip;
-}
-
-bool StatelessValidation::PreCallValidateCreateShaderModule(
- VkDevice device,
- const VkShaderModuleCreateInfo* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkShaderModule* pShaderModule) {
- bool skip = false;
- skip |= validate_struct_type("vkCreateShaderModule", "pCreateInfo", "VK_STRUCTURE_TYPE_SHADER_MODULE_CREATE_INFO", pCreateInfo, VK_STRUCTURE_TYPE_SHADER_MODULE_CREATE_INFO, true, "VUID-vkCreateShaderModule-pCreateInfo-parameter", "VUID-VkShaderModuleCreateInfo-sType-sType");
- if (pCreateInfo != NULL)
- {
- const VkStructureType allowed_structs_VkShaderModuleCreateInfo[] = { VK_STRUCTURE_TYPE_SHADER_MODULE_VALIDATION_CACHE_CREATE_INFO_EXT };
-
- skip |= validate_struct_pnext("vkCreateShaderModule", "pCreateInfo->pNext", "VkShaderModuleValidationCacheCreateInfoEXT", pCreateInfo->pNext, ARRAY_SIZE(allowed_structs_VkShaderModuleCreateInfo), allowed_structs_VkShaderModuleCreateInfo, GeneratedVulkanHeaderVersion, "VUID-VkShaderModuleCreateInfo-pNext-pNext");
-
- skip |= validate_reserved_flags("vkCreateShaderModule", "pCreateInfo->flags", pCreateInfo->flags, kVUIDUndefined);
-
- skip |= validate_array("vkCreateShaderModule", "pCreateInfo->codeSize", "pCreateInfo->pCode", pCreateInfo->codeSize, &pCreateInfo->pCode, true, true, kVUIDUndefined, "VUID-VkShaderModuleCreateInfo-pCode-parameter");
- }
- if (pAllocator != NULL)
- {
- skip |= validate_required_pointer("vkCreateShaderModule", "pAllocator->pfnAllocation", reinterpret_cast<const void*>(pAllocator->pfnAllocation), "VUID-VkAllocationCallbacks-pfnAllocation-00632");
-
- skip |= validate_required_pointer("vkCreateShaderModule", "pAllocator->pfnReallocation", reinterpret_cast<const void*>(pAllocator->pfnReallocation), "VUID-VkAllocationCallbacks-pfnReallocation-00633");
-
- skip |= validate_required_pointer("vkCreateShaderModule", "pAllocator->pfnFree", reinterpret_cast<const void*>(pAllocator->pfnFree), "VUID-VkAllocationCallbacks-pfnFree-00634");
-
- if (pAllocator->pfnInternalAllocation != NULL)
- {
- skip |= validate_required_pointer("vkCreateShaderModule", "pAllocator->pfnInternalFree", reinterpret_cast<const void*>(pAllocator->pfnInternalFree), "VUID-VkAllocationCallbacks-pfnInternalAllocation-00635");
-
- }
-
- if (pAllocator->pfnInternalFree != NULL)
- {
- skip |= validate_required_pointer("vkCreateShaderModule", "pAllocator->pfnInternalAllocation", reinterpret_cast<const void*>(pAllocator->pfnInternalAllocation), "VUID-VkAllocationCallbacks-pfnInternalAllocation-00635");
-
- }
- }
- skip |= validate_required_pointer("vkCreateShaderModule", "pShaderModule", pShaderModule, "VUID-vkCreateShaderModule-pShaderModule-parameter");
- return skip;
-}
-
-bool StatelessValidation::PreCallValidateDestroyShaderModule(
- VkDevice device,
- VkShaderModule shaderModule,
- const VkAllocationCallbacks* pAllocator) {
- bool skip = false;
- if (pAllocator != NULL)
- {
- skip |= validate_required_pointer("vkDestroyShaderModule", "pAllocator->pfnAllocation", reinterpret_cast<const void*>(pAllocator->pfnAllocation), "VUID-VkAllocationCallbacks-pfnAllocation-00632");
-
- skip |= validate_required_pointer("vkDestroyShaderModule", "pAllocator->pfnReallocation", reinterpret_cast<const void*>(pAllocator->pfnReallocation), "VUID-VkAllocationCallbacks-pfnReallocation-00633");
-
- skip |= validate_required_pointer("vkDestroyShaderModule", "pAllocator->pfnFree", reinterpret_cast<const void*>(pAllocator->pfnFree), "VUID-VkAllocationCallbacks-pfnFree-00634");
-
- if (pAllocator->pfnInternalAllocation != NULL)
- {
- skip |= validate_required_pointer("vkDestroyShaderModule", "pAllocator->pfnInternalFree", reinterpret_cast<const void*>(pAllocator->pfnInternalFree), "VUID-VkAllocationCallbacks-pfnInternalAllocation-00635");
-
- }
-
- if (pAllocator->pfnInternalFree != NULL)
- {
- skip |= validate_required_pointer("vkDestroyShaderModule", "pAllocator->pfnInternalAllocation", reinterpret_cast<const void*>(pAllocator->pfnInternalAllocation), "VUID-VkAllocationCallbacks-pfnInternalAllocation-00635");
-
- }
- }
- return skip;
-}
-
-bool StatelessValidation::PreCallValidateCreatePipelineCache(
- VkDevice device,
- const VkPipelineCacheCreateInfo* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkPipelineCache* pPipelineCache) {
- bool skip = false;
- skip |= validate_struct_type("vkCreatePipelineCache", "pCreateInfo", "VK_STRUCTURE_TYPE_PIPELINE_CACHE_CREATE_INFO", pCreateInfo, VK_STRUCTURE_TYPE_PIPELINE_CACHE_CREATE_INFO, true, "VUID-vkCreatePipelineCache-pCreateInfo-parameter", "VUID-VkPipelineCacheCreateInfo-sType-sType");
- if (pCreateInfo != NULL)
- {
- skip |= validate_struct_pnext("vkCreatePipelineCache", "pCreateInfo->pNext", NULL, pCreateInfo->pNext, 0, NULL, GeneratedVulkanHeaderVersion, "VUID-VkPipelineCacheCreateInfo-pNext-pNext");
-
- skip |= validate_reserved_flags("vkCreatePipelineCache", "pCreateInfo->flags", pCreateInfo->flags, "VUID-VkPipelineCacheCreateInfo-flags-zerobitmask");
-
- skip |= validate_array("vkCreatePipelineCache", "pCreateInfo->initialDataSize", "pCreateInfo->pInitialData", pCreateInfo->initialDataSize, &pCreateInfo->pInitialData, false, true, kVUIDUndefined, "VUID-VkPipelineCacheCreateInfo-pInitialData-parameter");
- }
- if (pAllocator != NULL)
- {
- skip |= validate_required_pointer("vkCreatePipelineCache", "pAllocator->pfnAllocation", reinterpret_cast<const void*>(pAllocator->pfnAllocation), "VUID-VkAllocationCallbacks-pfnAllocation-00632");
-
- skip |= validate_required_pointer("vkCreatePipelineCache", "pAllocator->pfnReallocation", reinterpret_cast<const void*>(pAllocator->pfnReallocation), "VUID-VkAllocationCallbacks-pfnReallocation-00633");
-
- skip |= validate_required_pointer("vkCreatePipelineCache", "pAllocator->pfnFree", reinterpret_cast<const void*>(pAllocator->pfnFree), "VUID-VkAllocationCallbacks-pfnFree-00634");
-
- if (pAllocator->pfnInternalAllocation != NULL)
- {
- skip |= validate_required_pointer("vkCreatePipelineCache", "pAllocator->pfnInternalFree", reinterpret_cast<const void*>(pAllocator->pfnInternalFree), "VUID-VkAllocationCallbacks-pfnInternalAllocation-00635");
-
- }
-
- if (pAllocator->pfnInternalFree != NULL)
- {
- skip |= validate_required_pointer("vkCreatePipelineCache", "pAllocator->pfnInternalAllocation", reinterpret_cast<const void*>(pAllocator->pfnInternalAllocation), "VUID-VkAllocationCallbacks-pfnInternalAllocation-00635");
-
- }
- }
- skip |= validate_required_pointer("vkCreatePipelineCache", "pPipelineCache", pPipelineCache, "VUID-vkCreatePipelineCache-pPipelineCache-parameter");
- return skip;
-}
-
-bool StatelessValidation::PreCallValidateDestroyPipelineCache(
- VkDevice device,
- VkPipelineCache pipelineCache,
- const VkAllocationCallbacks* pAllocator) {
- bool skip = false;
- if (pAllocator != NULL)
- {
- skip |= validate_required_pointer("vkDestroyPipelineCache", "pAllocator->pfnAllocation", reinterpret_cast<const void*>(pAllocator->pfnAllocation), "VUID-VkAllocationCallbacks-pfnAllocation-00632");
-
- skip |= validate_required_pointer("vkDestroyPipelineCache", "pAllocator->pfnReallocation", reinterpret_cast<const void*>(pAllocator->pfnReallocation), "VUID-VkAllocationCallbacks-pfnReallocation-00633");
-
- skip |= validate_required_pointer("vkDestroyPipelineCache", "pAllocator->pfnFree", reinterpret_cast<const void*>(pAllocator->pfnFree), "VUID-VkAllocationCallbacks-pfnFree-00634");
-
- if (pAllocator->pfnInternalAllocation != NULL)
- {
- skip |= validate_required_pointer("vkDestroyPipelineCache", "pAllocator->pfnInternalFree", reinterpret_cast<const void*>(pAllocator->pfnInternalFree), "VUID-VkAllocationCallbacks-pfnInternalAllocation-00635");
-
- }
-
- if (pAllocator->pfnInternalFree != NULL)
- {
- skip |= validate_required_pointer("vkDestroyPipelineCache", "pAllocator->pfnInternalAllocation", reinterpret_cast<const void*>(pAllocator->pfnInternalAllocation), "VUID-VkAllocationCallbacks-pfnInternalAllocation-00635");
-
- }
- }
- return skip;
-}
-
-bool StatelessValidation::PreCallValidateGetPipelineCacheData(
- VkDevice device,
- VkPipelineCache pipelineCache,
- size_t* pDataSize,
- void* pData) {
- bool skip = false;
- skip |= validate_required_handle("vkGetPipelineCacheData", "pipelineCache", pipelineCache);
- skip |= validate_array("vkGetPipelineCacheData", "pDataSize", "pData", pDataSize, &pData, true, false, false, kVUIDUndefined, "VUID-vkGetPipelineCacheData-pData-parameter");
- return skip;
-}
-
-bool StatelessValidation::PreCallValidateMergePipelineCaches(
- VkDevice device,
- VkPipelineCache dstCache,
- uint32_t srcCacheCount,
- const VkPipelineCache* pSrcCaches) {
- bool skip = false;
- skip |= validate_required_handle("vkMergePipelineCaches", "dstCache", dstCache);
- skip |= validate_handle_array("vkMergePipelineCaches", "srcCacheCount", "pSrcCaches", srcCacheCount, pSrcCaches, true, true);
- return skip;
-}
-
-bool StatelessValidation::PreCallValidateCreateGraphicsPipelines(
- VkDevice device,
- VkPipelineCache pipelineCache,
- uint32_t createInfoCount,
- const VkGraphicsPipelineCreateInfo* pCreateInfos,
- const VkAllocationCallbacks* pAllocator,
- VkPipeline* pPipelines) {
- bool skip = false;
- skip |= validate_struct_type_array("vkCreateGraphicsPipelines", "createInfoCount", "pCreateInfos", "VK_STRUCTURE_TYPE_GRAPHICS_PIPELINE_CREATE_INFO", createInfoCount, pCreateInfos, VK_STRUCTURE_TYPE_GRAPHICS_PIPELINE_CREATE_INFO, true, true, "VUID-VkGraphicsPipelineCreateInfo-sType-sType", "VUID-vkCreateGraphicsPipelines-pCreateInfos-parameter", "VUID-vkCreateGraphicsPipelines-createInfoCount-arraylength");
- if (pCreateInfos != NULL)
- {
- for (uint32_t createInfoIndex = 0; createInfoIndex < createInfoCount; ++createInfoIndex)
- {
- const VkStructureType allowed_structs_VkGraphicsPipelineCreateInfo[] = { VK_STRUCTURE_TYPE_PIPELINE_COMPILER_CONTROL_CREATE_INFO_AMD, VK_STRUCTURE_TYPE_PIPELINE_CREATION_FEEDBACK_CREATE_INFO_EXT, VK_STRUCTURE_TYPE_PIPELINE_DISCARD_RECTANGLE_STATE_CREATE_INFO_EXT, VK_STRUCTURE_TYPE_PIPELINE_REPRESENTATIVE_FRAGMENT_TEST_STATE_CREATE_INFO_NV };
-
- skip |= validate_struct_pnext("vkCreateGraphicsPipelines", ParameterName("pCreateInfos[%i].pNext", ParameterName::IndexVector{ createInfoIndex }), "VkPipelineCompilerControlCreateInfoAMD, VkPipelineCreationFeedbackCreateInfoEXT, VkPipelineDiscardRectangleStateCreateInfoEXT, VkPipelineRepresentativeFragmentTestStateCreateInfoNV", pCreateInfos[createInfoIndex].pNext, ARRAY_SIZE(allowed_structs_VkGraphicsPipelineCreateInfo), allowed_structs_VkGraphicsPipelineCreateInfo, GeneratedVulkanHeaderVersion, "VUID-VkGraphicsPipelineCreateInfo-pNext-pNext");
-
- skip |= validate_flags("vkCreateGraphicsPipelines", ParameterName("pCreateInfos[%i].flags", ParameterName::IndexVector{ createInfoIndex }), "VkPipelineCreateFlagBits", AllVkPipelineCreateFlagBits, pCreateInfos[createInfoIndex].flags, kOptionalFlags, "VUID-VkGraphicsPipelineCreateInfo-flags-parameter");
-
- skip |= validate_struct_type_array("vkCreateGraphicsPipelines", ParameterName("pCreateInfos[%i].stageCount", ParameterName::IndexVector{ createInfoIndex }), ParameterName("pCreateInfos[%i].pStages", ParameterName::IndexVector{ createInfoIndex }), "VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_CREATE_INFO", pCreateInfos[createInfoIndex].stageCount, pCreateInfos[createInfoIndex].pStages, VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_CREATE_INFO, true, true, "VUID-VkPipelineShaderStageCreateInfo-sType-sType", "VUID-VkGraphicsPipelineCreateInfo-pStages-parameter", "VUID-VkGraphicsPipelineCreateInfo-stageCount-arraylength");
-
- if (pCreateInfos[createInfoIndex].pStages != NULL)
- {
- for (uint32_t stageIndex = 0; stageIndex < pCreateInfos[createInfoIndex].stageCount; ++stageIndex)
- {
- const VkStructureType allowed_structs_VkPipelineShaderStageCreateInfo[] = { VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_REQUIRED_SUBGROUP_SIZE_CREATE_INFO_EXT };
-
- skip |= validate_struct_pnext("vkCreateGraphicsPipelines", ParameterName("pCreateInfos[%i].pStages[%i].pNext", ParameterName::IndexVector{ createInfoIndex, stageIndex }), "VkPipelineShaderStageRequiredSubgroupSizeCreateInfoEXT", pCreateInfos[createInfoIndex].pStages[stageIndex].pNext, ARRAY_SIZE(allowed_structs_VkPipelineShaderStageCreateInfo), allowed_structs_VkPipelineShaderStageCreateInfo, GeneratedVulkanHeaderVersion, "VUID-VkPipelineShaderStageCreateInfo-pNext-pNext");
-
- skip |= validate_flags("vkCreateGraphicsPipelines", ParameterName("pCreateInfos[%i].pStages[%i].flags", ParameterName::IndexVector{ createInfoIndex, stageIndex }), "VkPipelineShaderStageCreateFlagBits", AllVkPipelineShaderStageCreateFlagBits, pCreateInfos[createInfoIndex].pStages[stageIndex].flags, kOptionalFlags, "VUID-VkPipelineShaderStageCreateInfo-flags-parameter");
-
- skip |= validate_flags("vkCreateGraphicsPipelines", ParameterName("pCreateInfos[%i].pStages[%i].stage", ParameterName::IndexVector{ createInfoIndex, stageIndex }), "VkShaderStageFlagBits", AllVkShaderStageFlagBits, pCreateInfos[createInfoIndex].pStages[stageIndex].stage, kRequiredSingleBit, "VUID-VkPipelineShaderStageCreateInfo-stage-parameter", "VUID-VkPipelineShaderStageCreateInfo-stage-parameter");
-
- skip |= validate_required_handle("vkCreateGraphicsPipelines", ParameterName("pCreateInfos[%i].pStages[%i].module", ParameterName::IndexVector{ createInfoIndex, stageIndex }), pCreateInfos[createInfoIndex].pStages[stageIndex].module);
-
- skip |= validate_required_pointer("vkCreateGraphicsPipelines", ParameterName("pCreateInfos[%i].pStages[%i].pName", ParameterName::IndexVector{ createInfoIndex, stageIndex }), pCreateInfos[createInfoIndex].pStages[stageIndex].pName, "VUID-VkPipelineShaderStageCreateInfo-pName-parameter");
-
- if (pCreateInfos[createInfoIndex].pStages[stageIndex].pSpecializationInfo != NULL)
- {
- skip |= validate_array("vkCreateGraphicsPipelines", ParameterName("pCreateInfos[%i].pStages[%i].pSpecializationInfo->mapEntryCount", ParameterName::IndexVector{ createInfoIndex, stageIndex }), ParameterName("pCreateInfos[%i].pStages[%i].pSpecializationInfo->pMapEntries", ParameterName::IndexVector{ createInfoIndex, stageIndex }), pCreateInfos[createInfoIndex].pStages[stageIndex].pSpecializationInfo->mapEntryCount, &pCreateInfos[createInfoIndex].pStages[stageIndex].pSpecializationInfo->pMapEntries, false, true, kVUIDUndefined, "VUID-VkSpecializationInfo-pMapEntries-parameter");
-
- if (pCreateInfos[createInfoIndex].pStages[stageIndex].pSpecializationInfo->pMapEntries != NULL)
- {
- for (uint32_t mapEntryIndex = 0; mapEntryIndex < pCreateInfos[createInfoIndex].pStages[stageIndex].pSpecializationInfo->mapEntryCount; ++mapEntryIndex)
- {
- // No xml-driven validation
- }
- }
-
- skip |= validate_array("vkCreateGraphicsPipelines", ParameterName("pCreateInfos[%i].pStages[%i].pSpecializationInfo->dataSize", ParameterName::IndexVector{ createInfoIndex, stageIndex }), ParameterName("pCreateInfos[%i].pStages[%i].pSpecializationInfo->pData", ParameterName::IndexVector{ createInfoIndex, stageIndex }), pCreateInfos[createInfoIndex].pStages[stageIndex].pSpecializationInfo->dataSize, &pCreateInfos[createInfoIndex].pStages[stageIndex].pSpecializationInfo->pData, false, true, kVUIDUndefined, "VUID-VkSpecializationInfo-pData-parameter");
- }
- }
- }
-
- skip |= validate_struct_type("vkCreateGraphicsPipelines", ParameterName("pCreateInfos[%i].pRasterizationState", ParameterName::IndexVector{ createInfoIndex }), "VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_STATE_CREATE_INFO", pCreateInfos[createInfoIndex].pRasterizationState, VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_STATE_CREATE_INFO, true, "VUID-VkGraphicsPipelineCreateInfo-pRasterizationState-parameter", "VUID-VkPipelineRasterizationStateCreateInfo-sType-sType");
-
- if (pCreateInfos[createInfoIndex].pRasterizationState != NULL)
- {
- const VkStructureType allowed_structs_VkPipelineRasterizationStateCreateInfo[] = { VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_CONSERVATIVE_STATE_CREATE_INFO_EXT, VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_DEPTH_CLIP_STATE_CREATE_INFO_EXT, VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_LINE_STATE_CREATE_INFO_EXT, VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_STATE_RASTERIZATION_ORDER_AMD, VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_STATE_STREAM_CREATE_INFO_EXT };
-
- skip |= validate_struct_pnext("vkCreateGraphicsPipelines", ParameterName("pCreateInfos[%i].pRasterizationState->pNext", ParameterName::IndexVector{ createInfoIndex }), "VkPipelineRasterizationConservativeStateCreateInfoEXT, VkPipelineRasterizationDepthClipStateCreateInfoEXT, VkPipelineRasterizationLineStateCreateInfoEXT, VkPipelineRasterizationStateRasterizationOrderAMD, VkPipelineRasterizationStateStreamCreateInfoEXT", pCreateInfos[createInfoIndex].pRasterizationState->pNext, ARRAY_SIZE(allowed_structs_VkPipelineRasterizationStateCreateInfo), allowed_structs_VkPipelineRasterizationStateCreateInfo, GeneratedVulkanHeaderVersion, "VUID-VkPipelineRasterizationStateCreateInfo-pNext-pNext");
-
- skip |= validate_reserved_flags("vkCreateGraphicsPipelines", ParameterName("pCreateInfos[%i].pRasterizationState->flags", ParameterName::IndexVector{ createInfoIndex }), pCreateInfos[createInfoIndex].pRasterizationState->flags, "VUID-VkPipelineRasterizationStateCreateInfo-flags-zerobitmask");
-
- skip |= validate_bool32("vkCreateGraphicsPipelines", ParameterName("pCreateInfos[%i].pRasterizationState->depthClampEnable", ParameterName::IndexVector{ createInfoIndex }), pCreateInfos[createInfoIndex].pRasterizationState->depthClampEnable);
-
- skip |= validate_bool32("vkCreateGraphicsPipelines", ParameterName("pCreateInfos[%i].pRasterizationState->rasterizerDiscardEnable", ParameterName::IndexVector{ createInfoIndex }), pCreateInfos[createInfoIndex].pRasterizationState->rasterizerDiscardEnable);
-
- skip |= validate_ranged_enum("vkCreateGraphicsPipelines", ParameterName("pCreateInfos[%i].pRasterizationState->polygonMode", ParameterName::IndexVector{ createInfoIndex }), "VkPolygonMode", AllVkPolygonModeEnums, pCreateInfos[createInfoIndex].pRasterizationState->polygonMode, "VUID-VkPipelineRasterizationStateCreateInfo-polygonMode-parameter");
-
- skip |= validate_flags("vkCreateGraphicsPipelines", ParameterName("pCreateInfos[%i].pRasterizationState->cullMode", ParameterName::IndexVector{ createInfoIndex }), "VkCullModeFlagBits", AllVkCullModeFlagBits, pCreateInfos[createInfoIndex].pRasterizationState->cullMode, kOptionalFlags, "VUID-VkPipelineRasterizationStateCreateInfo-cullMode-parameter");
-
- skip |= validate_ranged_enum("vkCreateGraphicsPipelines", ParameterName("pCreateInfos[%i].pRasterizationState->frontFace", ParameterName::IndexVector{ createInfoIndex }), "VkFrontFace", AllVkFrontFaceEnums, pCreateInfos[createInfoIndex].pRasterizationState->frontFace, "VUID-VkPipelineRasterizationStateCreateInfo-frontFace-parameter");
-
- skip |= validate_bool32("vkCreateGraphicsPipelines", ParameterName("pCreateInfos[%i].pRasterizationState->depthBiasEnable", ParameterName::IndexVector{ createInfoIndex }), pCreateInfos[createInfoIndex].pRasterizationState->depthBiasEnable);
- }
-
- skip |= validate_struct_type("vkCreateGraphicsPipelines", ParameterName("pCreateInfos[%i].pDynamicState", ParameterName::IndexVector{ createInfoIndex }), "VK_STRUCTURE_TYPE_PIPELINE_DYNAMIC_STATE_CREATE_INFO", pCreateInfos[createInfoIndex].pDynamicState, VK_STRUCTURE_TYPE_PIPELINE_DYNAMIC_STATE_CREATE_INFO, false, "VUID-VkGraphicsPipelineCreateInfo-pDynamicState-parameter", "VUID-VkPipelineDynamicStateCreateInfo-sType-sType");
-
- if (pCreateInfos[createInfoIndex].pDynamicState != NULL)
- {
- skip |= validate_struct_pnext("vkCreateGraphicsPipelines", ParameterName("pCreateInfos[%i].pDynamicState->pNext", ParameterName::IndexVector{ createInfoIndex }), NULL, pCreateInfos[createInfoIndex].pDynamicState->pNext, 0, NULL, GeneratedVulkanHeaderVersion, "VUID-VkPipelineDynamicStateCreateInfo-pNext-pNext");
-
- skip |= validate_reserved_flags("vkCreateGraphicsPipelines", ParameterName("pCreateInfos[%i].pDynamicState->flags", ParameterName::IndexVector{ createInfoIndex }), pCreateInfos[createInfoIndex].pDynamicState->flags, "VUID-VkPipelineDynamicStateCreateInfo-flags-zerobitmask");
-
- skip |= validate_ranged_enum_array("vkCreateGraphicsPipelines", ParameterName("pCreateInfos[%i].pDynamicState->dynamicStateCount", ParameterName::IndexVector{ createInfoIndex }), ParameterName("pCreateInfos[%i].pDynamicState->pDynamicStates", ParameterName::IndexVector{ createInfoIndex }), "VkDynamicState", AllVkDynamicStateEnums, pCreateInfos[createInfoIndex].pDynamicState->dynamicStateCount, pCreateInfos[createInfoIndex].pDynamicState->pDynamicStates, false, true);
- }
-
- skip |= validate_required_handle("vkCreateGraphicsPipelines", ParameterName("pCreateInfos[%i].layout", ParameterName::IndexVector{ createInfoIndex }), pCreateInfos[createInfoIndex].layout);
-
- skip |= validate_required_handle("vkCreateGraphicsPipelines", ParameterName("pCreateInfos[%i].renderPass", ParameterName::IndexVector{ createInfoIndex }), pCreateInfos[createInfoIndex].renderPass);
- }
- }
- if (pAllocator != NULL)
- {
- skip |= validate_required_pointer("vkCreateGraphicsPipelines", "pAllocator->pfnAllocation", reinterpret_cast<const void*>(pAllocator->pfnAllocation), "VUID-VkAllocationCallbacks-pfnAllocation-00632");
-
- skip |= validate_required_pointer("vkCreateGraphicsPipelines", "pAllocator->pfnReallocation", reinterpret_cast<const void*>(pAllocator->pfnReallocation), "VUID-VkAllocationCallbacks-pfnReallocation-00633");
-
- skip |= validate_required_pointer("vkCreateGraphicsPipelines", "pAllocator->pfnFree", reinterpret_cast<const void*>(pAllocator->pfnFree), "VUID-VkAllocationCallbacks-pfnFree-00634");
-
- if (pAllocator->pfnInternalAllocation != NULL)
- {
- skip |= validate_required_pointer("vkCreateGraphicsPipelines", "pAllocator->pfnInternalFree", reinterpret_cast<const void*>(pAllocator->pfnInternalFree), "VUID-VkAllocationCallbacks-pfnInternalAllocation-00635");
-
- }
-
- if (pAllocator->pfnInternalFree != NULL)
- {
- skip |= validate_required_pointer("vkCreateGraphicsPipelines", "pAllocator->pfnInternalAllocation", reinterpret_cast<const void*>(pAllocator->pfnInternalAllocation), "VUID-VkAllocationCallbacks-pfnInternalAllocation-00635");
-
- }
- }
- skip |= validate_array("vkCreateGraphicsPipelines", "createInfoCount", "pPipelines", createInfoCount, &pPipelines, true, true, "VUID-vkCreateGraphicsPipelines-createInfoCount-arraylength", "VUID-vkCreateGraphicsPipelines-pPipelines-parameter");
- if (!skip) skip |= manual_PreCallValidateCreateGraphicsPipelines(device, pipelineCache, createInfoCount, pCreateInfos, pAllocator, pPipelines);
- return skip;
-}
-
-bool StatelessValidation::PreCallValidateCreateComputePipelines(
- VkDevice device,
- VkPipelineCache pipelineCache,
- uint32_t createInfoCount,
- const VkComputePipelineCreateInfo* pCreateInfos,
- const VkAllocationCallbacks* pAllocator,
- VkPipeline* pPipelines) {
- bool skip = false;
- skip |= validate_struct_type_array("vkCreateComputePipelines", "createInfoCount", "pCreateInfos", "VK_STRUCTURE_TYPE_COMPUTE_PIPELINE_CREATE_INFO", createInfoCount, pCreateInfos, VK_STRUCTURE_TYPE_COMPUTE_PIPELINE_CREATE_INFO, true, true, "VUID-VkComputePipelineCreateInfo-sType-sType", "VUID-vkCreateComputePipelines-pCreateInfos-parameter", "VUID-vkCreateComputePipelines-createInfoCount-arraylength");
- if (pCreateInfos != NULL)
- {
- for (uint32_t createInfoIndex = 0; createInfoIndex < createInfoCount; ++createInfoIndex)
- {
- const VkStructureType allowed_structs_VkComputePipelineCreateInfo[] = { VK_STRUCTURE_TYPE_PIPELINE_COMPILER_CONTROL_CREATE_INFO_AMD, VK_STRUCTURE_TYPE_PIPELINE_CREATION_FEEDBACK_CREATE_INFO_EXT };
-
- skip |= validate_struct_pnext("vkCreateComputePipelines", ParameterName("pCreateInfos[%i].pNext", ParameterName::IndexVector{ createInfoIndex }), "VkPipelineCompilerControlCreateInfoAMD, VkPipelineCreationFeedbackCreateInfoEXT", pCreateInfos[createInfoIndex].pNext, ARRAY_SIZE(allowed_structs_VkComputePipelineCreateInfo), allowed_structs_VkComputePipelineCreateInfo, GeneratedVulkanHeaderVersion, "VUID-VkComputePipelineCreateInfo-pNext-pNext");
-
- skip |= validate_flags("vkCreateComputePipelines", ParameterName("pCreateInfos[%i].flags", ParameterName::IndexVector{ createInfoIndex }), "VkPipelineCreateFlagBits", AllVkPipelineCreateFlagBits, pCreateInfos[createInfoIndex].flags, kOptionalFlags, "VUID-VkComputePipelineCreateInfo-flags-parameter");
-
- skip |= validate_struct_type("vkCreateComputePipelines", ParameterName("pCreateInfos[%i].stage", ParameterName::IndexVector{ createInfoIndex }), "VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_CREATE_INFO", &(pCreateInfos[createInfoIndex].stage), VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_CREATE_INFO, false, kVUIDUndefined, "VUID-VkPipelineShaderStageCreateInfo-sType-sType");
-
- const VkStructureType allowed_structs_VkPipelineShaderStageCreateInfo[] = { VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_REQUIRED_SUBGROUP_SIZE_CREATE_INFO_EXT };
-
- skip |= validate_struct_pnext("vkCreateComputePipelines", ParameterName("pCreateInfos[%i].stage.pNext", ParameterName::IndexVector{ createInfoIndex }), "VkPipelineShaderStageRequiredSubgroupSizeCreateInfoEXT", pCreateInfos[createInfoIndex].stage.pNext, ARRAY_SIZE(allowed_structs_VkPipelineShaderStageCreateInfo), allowed_structs_VkPipelineShaderStageCreateInfo, GeneratedVulkanHeaderVersion, "VUID-VkPipelineShaderStageCreateInfo-pNext-pNext");
-
- skip |= validate_flags("vkCreateComputePipelines", ParameterName("pCreateInfos[%i].stage.flags", ParameterName::IndexVector{ createInfoIndex }), "VkPipelineShaderStageCreateFlagBits", AllVkPipelineShaderStageCreateFlagBits, pCreateInfos[createInfoIndex].stage.flags, kOptionalFlags, "VUID-VkPipelineShaderStageCreateInfo-flags-parameter");
-
- skip |= validate_flags("vkCreateComputePipelines", ParameterName("pCreateInfos[%i].stage.stage", ParameterName::IndexVector{ createInfoIndex }), "VkShaderStageFlagBits", AllVkShaderStageFlagBits, pCreateInfos[createInfoIndex].stage.stage, kRequiredSingleBit, "VUID-VkPipelineShaderStageCreateInfo-stage-parameter", "VUID-VkPipelineShaderStageCreateInfo-stage-parameter");
-
- skip |= validate_required_handle("vkCreateComputePipelines", ParameterName("pCreateInfos[%i].stage.module", ParameterName::IndexVector{ createInfoIndex }), pCreateInfos[createInfoIndex].stage.module);
-
- skip |= validate_required_pointer("vkCreateComputePipelines", ParameterName("pCreateInfos[%i].stage.pName", ParameterName::IndexVector{ createInfoIndex }), pCreateInfos[createInfoIndex].stage.pName, "VUID-VkPipelineShaderStageCreateInfo-pName-parameter");
-
- if (pCreateInfos[createInfoIndex].stage.pSpecializationInfo != NULL)
- {
- skip |= validate_array("vkCreateComputePipelines", ParameterName("pCreateInfos[%i].stage.pSpecializationInfo->mapEntryCount", ParameterName::IndexVector{ createInfoIndex }), ParameterName("pCreateInfos[%i].stage.pSpecializationInfo->pMapEntries", ParameterName::IndexVector{ createInfoIndex }), pCreateInfos[createInfoIndex].stage.pSpecializationInfo->mapEntryCount, &pCreateInfos[createInfoIndex].stage.pSpecializationInfo->pMapEntries, false, true, kVUIDUndefined, "VUID-VkSpecializationInfo-pMapEntries-parameter");
-
- if (pCreateInfos[createInfoIndex].stage.pSpecializationInfo->pMapEntries != NULL)
- {
- for (uint32_t mapEntryIndex = 0; mapEntryIndex < pCreateInfos[createInfoIndex].stage.pSpecializationInfo->mapEntryCount; ++mapEntryIndex)
- {
- // No xml-driven validation
- }
- }
-
- skip |= validate_array("vkCreateComputePipelines", ParameterName("pCreateInfos[%i].stage.pSpecializationInfo->dataSize", ParameterName::IndexVector{ createInfoIndex }), ParameterName("pCreateInfos[%i].stage.pSpecializationInfo->pData", ParameterName::IndexVector{ createInfoIndex }), pCreateInfos[createInfoIndex].stage.pSpecializationInfo->dataSize, &pCreateInfos[createInfoIndex].stage.pSpecializationInfo->pData, false, true, kVUIDUndefined, "VUID-VkSpecializationInfo-pData-parameter");
- }
-
- skip |= validate_required_handle("vkCreateComputePipelines", ParameterName("pCreateInfos[%i].layout", ParameterName::IndexVector{ createInfoIndex }), pCreateInfos[createInfoIndex].layout);
- }
- }
- if (pAllocator != NULL)
- {
- skip |= validate_required_pointer("vkCreateComputePipelines", "pAllocator->pfnAllocation", reinterpret_cast<const void*>(pAllocator->pfnAllocation), "VUID-VkAllocationCallbacks-pfnAllocation-00632");
-
- skip |= validate_required_pointer("vkCreateComputePipelines", "pAllocator->pfnReallocation", reinterpret_cast<const void*>(pAllocator->pfnReallocation), "VUID-VkAllocationCallbacks-pfnReallocation-00633");
-
- skip |= validate_required_pointer("vkCreateComputePipelines", "pAllocator->pfnFree", reinterpret_cast<const void*>(pAllocator->pfnFree), "VUID-VkAllocationCallbacks-pfnFree-00634");
-
- if (pAllocator->pfnInternalAllocation != NULL)
- {
- skip |= validate_required_pointer("vkCreateComputePipelines", "pAllocator->pfnInternalFree", reinterpret_cast<const void*>(pAllocator->pfnInternalFree), "VUID-VkAllocationCallbacks-pfnInternalAllocation-00635");
-
- }
-
- if (pAllocator->pfnInternalFree != NULL)
- {
- skip |= validate_required_pointer("vkCreateComputePipelines", "pAllocator->pfnInternalAllocation", reinterpret_cast<const void*>(pAllocator->pfnInternalAllocation), "VUID-VkAllocationCallbacks-pfnInternalAllocation-00635");
-
- }
- }
- skip |= validate_array("vkCreateComputePipelines", "createInfoCount", "pPipelines", createInfoCount, &pPipelines, true, true, "VUID-vkCreateComputePipelines-createInfoCount-arraylength", "VUID-vkCreateComputePipelines-pPipelines-parameter");
- if (!skip) skip |= manual_PreCallValidateCreateComputePipelines(device, pipelineCache, createInfoCount, pCreateInfos, pAllocator, pPipelines);
- return skip;
-}
-
-bool StatelessValidation::PreCallValidateDestroyPipeline(
- VkDevice device,
- VkPipeline pipeline,
- const VkAllocationCallbacks* pAllocator) {
- bool skip = false;
- if (pAllocator != NULL)
- {
- skip |= validate_required_pointer("vkDestroyPipeline", "pAllocator->pfnAllocation", reinterpret_cast<const void*>(pAllocator->pfnAllocation), "VUID-VkAllocationCallbacks-pfnAllocation-00632");
-
- skip |= validate_required_pointer("vkDestroyPipeline", "pAllocator->pfnReallocation", reinterpret_cast<const void*>(pAllocator->pfnReallocation), "VUID-VkAllocationCallbacks-pfnReallocation-00633");
-
- skip |= validate_required_pointer("vkDestroyPipeline", "pAllocator->pfnFree", reinterpret_cast<const void*>(pAllocator->pfnFree), "VUID-VkAllocationCallbacks-pfnFree-00634");
-
- if (pAllocator->pfnInternalAllocation != NULL)
- {
- skip |= validate_required_pointer("vkDestroyPipeline", "pAllocator->pfnInternalFree", reinterpret_cast<const void*>(pAllocator->pfnInternalFree), "VUID-VkAllocationCallbacks-pfnInternalAllocation-00635");
-
- }
-
- if (pAllocator->pfnInternalFree != NULL)
- {
- skip |= validate_required_pointer("vkDestroyPipeline", "pAllocator->pfnInternalAllocation", reinterpret_cast<const void*>(pAllocator->pfnInternalAllocation), "VUID-VkAllocationCallbacks-pfnInternalAllocation-00635");
-
- }
- }
- return skip;
-}
-
-bool StatelessValidation::PreCallValidateCreatePipelineLayout(
- VkDevice device,
- const VkPipelineLayoutCreateInfo* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkPipelineLayout* pPipelineLayout) {
- bool skip = false;
- skip |= validate_struct_type("vkCreatePipelineLayout", "pCreateInfo", "VK_STRUCTURE_TYPE_PIPELINE_LAYOUT_CREATE_INFO", pCreateInfo, VK_STRUCTURE_TYPE_PIPELINE_LAYOUT_CREATE_INFO, true, "VUID-vkCreatePipelineLayout-pCreateInfo-parameter", "VUID-VkPipelineLayoutCreateInfo-sType-sType");
- if (pCreateInfo != NULL)
- {
- skip |= validate_struct_pnext("vkCreatePipelineLayout", "pCreateInfo->pNext", NULL, pCreateInfo->pNext, 0, NULL, GeneratedVulkanHeaderVersion, "VUID-VkPipelineLayoutCreateInfo-pNext-pNext");
-
- skip |= validate_reserved_flags("vkCreatePipelineLayout", "pCreateInfo->flags", pCreateInfo->flags, "VUID-VkPipelineLayoutCreateInfo-flags-zerobitmask");
-
- skip |= validate_array("vkCreatePipelineLayout", "pCreateInfo->setLayoutCount", "pCreateInfo->pSetLayouts", pCreateInfo->setLayoutCount, &pCreateInfo->pSetLayouts, false, true, kVUIDUndefined, "VUID-VkPipelineLayoutCreateInfo-pSetLayouts-parameter");
-
- skip |= validate_array("vkCreatePipelineLayout", "pCreateInfo->pushConstantRangeCount", "pCreateInfo->pPushConstantRanges", pCreateInfo->pushConstantRangeCount, &pCreateInfo->pPushConstantRanges, false, true, kVUIDUndefined, "VUID-VkPipelineLayoutCreateInfo-pPushConstantRanges-parameter");
-
- if (pCreateInfo->pPushConstantRanges != NULL)
- {
- for (uint32_t pushConstantRangeIndex = 0; pushConstantRangeIndex < pCreateInfo->pushConstantRangeCount; ++pushConstantRangeIndex)
- {
- skip |= validate_flags("vkCreatePipelineLayout", ParameterName("pCreateInfo->pPushConstantRanges[%i].stageFlags", ParameterName::IndexVector{ pushConstantRangeIndex }), "VkShaderStageFlagBits", AllVkShaderStageFlagBits, pCreateInfo->pPushConstantRanges[pushConstantRangeIndex].stageFlags, kRequiredFlags, "VUID-VkPushConstantRange-stageFlags-parameter", "VUID-VkPushConstantRange-stageFlags-requiredbitmask");
- }
- }
- }
- if (pAllocator != NULL)
- {
- skip |= validate_required_pointer("vkCreatePipelineLayout", "pAllocator->pfnAllocation", reinterpret_cast<const void*>(pAllocator->pfnAllocation), "VUID-VkAllocationCallbacks-pfnAllocation-00632");
-
- skip |= validate_required_pointer("vkCreatePipelineLayout", "pAllocator->pfnReallocation", reinterpret_cast<const void*>(pAllocator->pfnReallocation), "VUID-VkAllocationCallbacks-pfnReallocation-00633");
-
- skip |= validate_required_pointer("vkCreatePipelineLayout", "pAllocator->pfnFree", reinterpret_cast<const void*>(pAllocator->pfnFree), "VUID-VkAllocationCallbacks-pfnFree-00634");
-
- if (pAllocator->pfnInternalAllocation != NULL)
- {
- skip |= validate_required_pointer("vkCreatePipelineLayout", "pAllocator->pfnInternalFree", reinterpret_cast<const void*>(pAllocator->pfnInternalFree), "VUID-VkAllocationCallbacks-pfnInternalAllocation-00635");
-
- }
-
- if (pAllocator->pfnInternalFree != NULL)
- {
- skip |= validate_required_pointer("vkCreatePipelineLayout", "pAllocator->pfnInternalAllocation", reinterpret_cast<const void*>(pAllocator->pfnInternalAllocation), "VUID-VkAllocationCallbacks-pfnInternalAllocation-00635");
-
- }
- }
- skip |= validate_required_pointer("vkCreatePipelineLayout", "pPipelineLayout", pPipelineLayout, "VUID-vkCreatePipelineLayout-pPipelineLayout-parameter");
- return skip;
-}
-
-bool StatelessValidation::PreCallValidateDestroyPipelineLayout(
- VkDevice device,
- VkPipelineLayout pipelineLayout,
- const VkAllocationCallbacks* pAllocator) {
- bool skip = false;
- if (pAllocator != NULL)
- {
- skip |= validate_required_pointer("vkDestroyPipelineLayout", "pAllocator->pfnAllocation", reinterpret_cast<const void*>(pAllocator->pfnAllocation), "VUID-VkAllocationCallbacks-pfnAllocation-00632");
-
- skip |= validate_required_pointer("vkDestroyPipelineLayout", "pAllocator->pfnReallocation", reinterpret_cast<const void*>(pAllocator->pfnReallocation), "VUID-VkAllocationCallbacks-pfnReallocation-00633");
-
- skip |= validate_required_pointer("vkDestroyPipelineLayout", "pAllocator->pfnFree", reinterpret_cast<const void*>(pAllocator->pfnFree), "VUID-VkAllocationCallbacks-pfnFree-00634");
-
- if (pAllocator->pfnInternalAllocation != NULL)
- {
- skip |= validate_required_pointer("vkDestroyPipelineLayout", "pAllocator->pfnInternalFree", reinterpret_cast<const void*>(pAllocator->pfnInternalFree), "VUID-VkAllocationCallbacks-pfnInternalAllocation-00635");
-
- }
-
- if (pAllocator->pfnInternalFree != NULL)
- {
- skip |= validate_required_pointer("vkDestroyPipelineLayout", "pAllocator->pfnInternalAllocation", reinterpret_cast<const void*>(pAllocator->pfnInternalAllocation), "VUID-VkAllocationCallbacks-pfnInternalAllocation-00635");
-
- }
- }
- return skip;
-}
-
-bool StatelessValidation::PreCallValidateCreateSampler(
- VkDevice device,
- const VkSamplerCreateInfo* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkSampler* pSampler) {
- bool skip = false;
- skip |= validate_struct_type("vkCreateSampler", "pCreateInfo", "VK_STRUCTURE_TYPE_SAMPLER_CREATE_INFO", pCreateInfo, VK_STRUCTURE_TYPE_SAMPLER_CREATE_INFO, true, "VUID-vkCreateSampler-pCreateInfo-parameter", "VUID-VkSamplerCreateInfo-sType-sType");
- if (pCreateInfo != NULL)
- {
- const VkStructureType allowed_structs_VkSamplerCreateInfo[] = { VK_STRUCTURE_TYPE_SAMPLER_REDUCTION_MODE_CREATE_INFO_EXT, VK_STRUCTURE_TYPE_SAMPLER_YCBCR_CONVERSION_INFO };
-
- skip |= validate_struct_pnext("vkCreateSampler", "pCreateInfo->pNext", "VkSamplerReductionModeCreateInfoEXT, VkSamplerYcbcrConversionInfo", pCreateInfo->pNext, ARRAY_SIZE(allowed_structs_VkSamplerCreateInfo), allowed_structs_VkSamplerCreateInfo, GeneratedVulkanHeaderVersion, "VUID-VkSamplerCreateInfo-pNext-pNext");
-
- skip |= validate_flags("vkCreateSampler", "pCreateInfo->flags", "VkSamplerCreateFlagBits", AllVkSamplerCreateFlagBits, pCreateInfo->flags, kOptionalFlags, "VUID-VkSamplerCreateInfo-flags-parameter");
-
- skip |= validate_ranged_enum("vkCreateSampler", "pCreateInfo->magFilter", "VkFilter", AllVkFilterEnums, pCreateInfo->magFilter, "VUID-VkSamplerCreateInfo-magFilter-parameter");
-
- skip |= validate_ranged_enum("vkCreateSampler", "pCreateInfo->minFilter", "VkFilter", AllVkFilterEnums, pCreateInfo->minFilter, "VUID-VkSamplerCreateInfo-minFilter-parameter");
-
- skip |= validate_ranged_enum("vkCreateSampler", "pCreateInfo->mipmapMode", "VkSamplerMipmapMode", AllVkSamplerMipmapModeEnums, pCreateInfo->mipmapMode, "VUID-VkSamplerCreateInfo-mipmapMode-parameter");
-
- skip |= validate_ranged_enum("vkCreateSampler", "pCreateInfo->addressModeU", "VkSamplerAddressMode", AllVkSamplerAddressModeEnums, pCreateInfo->addressModeU, "VUID-VkSamplerCreateInfo-addressModeU-parameter");
-
- skip |= validate_ranged_enum("vkCreateSampler", "pCreateInfo->addressModeV", "VkSamplerAddressMode", AllVkSamplerAddressModeEnums, pCreateInfo->addressModeV, "VUID-VkSamplerCreateInfo-addressModeV-parameter");
-
- skip |= validate_ranged_enum("vkCreateSampler", "pCreateInfo->addressModeW", "VkSamplerAddressMode", AllVkSamplerAddressModeEnums, pCreateInfo->addressModeW, "VUID-VkSamplerCreateInfo-addressModeW-parameter");
-
- skip |= validate_bool32("vkCreateSampler", "pCreateInfo->anisotropyEnable", pCreateInfo->anisotropyEnable);
-
- skip |= validate_bool32("vkCreateSampler", "pCreateInfo->compareEnable", pCreateInfo->compareEnable);
-
- skip |= validate_bool32("vkCreateSampler", "pCreateInfo->unnormalizedCoordinates", pCreateInfo->unnormalizedCoordinates);
- }
- if (pAllocator != NULL)
- {
- skip |= validate_required_pointer("vkCreateSampler", "pAllocator->pfnAllocation", reinterpret_cast<const void*>(pAllocator->pfnAllocation), "VUID-VkAllocationCallbacks-pfnAllocation-00632");
-
- skip |= validate_required_pointer("vkCreateSampler", "pAllocator->pfnReallocation", reinterpret_cast<const void*>(pAllocator->pfnReallocation), "VUID-VkAllocationCallbacks-pfnReallocation-00633");
-
- skip |= validate_required_pointer("vkCreateSampler", "pAllocator->pfnFree", reinterpret_cast<const void*>(pAllocator->pfnFree), "VUID-VkAllocationCallbacks-pfnFree-00634");
-
- if (pAllocator->pfnInternalAllocation != NULL)
- {
- skip |= validate_required_pointer("vkCreateSampler", "pAllocator->pfnInternalFree", reinterpret_cast<const void*>(pAllocator->pfnInternalFree), "VUID-VkAllocationCallbacks-pfnInternalAllocation-00635");
-
- }
-
- if (pAllocator->pfnInternalFree != NULL)
- {
- skip |= validate_required_pointer("vkCreateSampler", "pAllocator->pfnInternalAllocation", reinterpret_cast<const void*>(pAllocator->pfnInternalAllocation), "VUID-VkAllocationCallbacks-pfnInternalAllocation-00635");
-
- }
- }
- skip |= validate_required_pointer("vkCreateSampler", "pSampler", pSampler, "VUID-vkCreateSampler-pSampler-parameter");
- if (!skip) skip |= manual_PreCallValidateCreateSampler(device, pCreateInfo, pAllocator, pSampler);
- return skip;
-}
-
-bool StatelessValidation::PreCallValidateDestroySampler(
- VkDevice device,
- VkSampler sampler,
- const VkAllocationCallbacks* pAllocator) {
- bool skip = false;
- if (pAllocator != NULL)
- {
- skip |= validate_required_pointer("vkDestroySampler", "pAllocator->pfnAllocation", reinterpret_cast<const void*>(pAllocator->pfnAllocation), "VUID-VkAllocationCallbacks-pfnAllocation-00632");
-
- skip |= validate_required_pointer("vkDestroySampler", "pAllocator->pfnReallocation", reinterpret_cast<const void*>(pAllocator->pfnReallocation), "VUID-VkAllocationCallbacks-pfnReallocation-00633");
-
- skip |= validate_required_pointer("vkDestroySampler", "pAllocator->pfnFree", reinterpret_cast<const void*>(pAllocator->pfnFree), "VUID-VkAllocationCallbacks-pfnFree-00634");
-
- if (pAllocator->pfnInternalAllocation != NULL)
- {
- skip |= validate_required_pointer("vkDestroySampler", "pAllocator->pfnInternalFree", reinterpret_cast<const void*>(pAllocator->pfnInternalFree), "VUID-VkAllocationCallbacks-pfnInternalAllocation-00635");
-
- }
-
- if (pAllocator->pfnInternalFree != NULL)
- {
- skip |= validate_required_pointer("vkDestroySampler", "pAllocator->pfnInternalAllocation", reinterpret_cast<const void*>(pAllocator->pfnInternalAllocation), "VUID-VkAllocationCallbacks-pfnInternalAllocation-00635");
-
- }
- }
- return skip;
-}
-
-bool StatelessValidation::PreCallValidateCreateDescriptorSetLayout(
- VkDevice device,
- const VkDescriptorSetLayoutCreateInfo* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkDescriptorSetLayout* pSetLayout) {
- bool skip = false;
- skip |= validate_struct_type("vkCreateDescriptorSetLayout", "pCreateInfo", "VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_CREATE_INFO", pCreateInfo, VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_CREATE_INFO, true, "VUID-vkCreateDescriptorSetLayout-pCreateInfo-parameter", "VUID-VkDescriptorSetLayoutCreateInfo-sType-sType");
- if (pCreateInfo != NULL)
- {
- const VkStructureType allowed_structs_VkDescriptorSetLayoutCreateInfo[] = { VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_BINDING_FLAGS_CREATE_INFO_EXT };
-
- skip |= validate_struct_pnext("vkCreateDescriptorSetLayout", "pCreateInfo->pNext", "VkDescriptorSetLayoutBindingFlagsCreateInfoEXT", pCreateInfo->pNext, ARRAY_SIZE(allowed_structs_VkDescriptorSetLayoutCreateInfo), allowed_structs_VkDescriptorSetLayoutCreateInfo, GeneratedVulkanHeaderVersion, "VUID-VkDescriptorSetLayoutCreateInfo-pNext-pNext");
-
- skip |= validate_flags("vkCreateDescriptorSetLayout", "pCreateInfo->flags", "VkDescriptorSetLayoutCreateFlagBits", AllVkDescriptorSetLayoutCreateFlagBits, pCreateInfo->flags, kOptionalFlags, "VUID-VkDescriptorSetLayoutCreateInfo-flags-parameter");
-
- skip |= validate_array("vkCreateDescriptorSetLayout", "pCreateInfo->bindingCount", "pCreateInfo->pBindings", pCreateInfo->bindingCount, &pCreateInfo->pBindings, false, true, kVUIDUndefined, "VUID-VkDescriptorSetLayoutCreateInfo-pBindings-parameter");
-
- if (pCreateInfo->pBindings != NULL)
- {
- for (uint32_t bindingIndex = 0; bindingIndex < pCreateInfo->bindingCount; ++bindingIndex)
- {
- skip |= validate_ranged_enum("vkCreateDescriptorSetLayout", ParameterName("pCreateInfo->pBindings[%i].descriptorType", ParameterName::IndexVector{ bindingIndex }), "VkDescriptorType", AllVkDescriptorTypeEnums, pCreateInfo->pBindings[bindingIndex].descriptorType, "VUID-VkDescriptorSetLayoutBinding-descriptorType-parameter");
- }
- }
- }
- if (pAllocator != NULL)
- {
- skip |= validate_required_pointer("vkCreateDescriptorSetLayout", "pAllocator->pfnAllocation", reinterpret_cast<const void*>(pAllocator->pfnAllocation), "VUID-VkAllocationCallbacks-pfnAllocation-00632");
-
- skip |= validate_required_pointer("vkCreateDescriptorSetLayout", "pAllocator->pfnReallocation", reinterpret_cast<const void*>(pAllocator->pfnReallocation), "VUID-VkAllocationCallbacks-pfnReallocation-00633");
-
- skip |= validate_required_pointer("vkCreateDescriptorSetLayout", "pAllocator->pfnFree", reinterpret_cast<const void*>(pAllocator->pfnFree), "VUID-VkAllocationCallbacks-pfnFree-00634");
-
- if (pAllocator->pfnInternalAllocation != NULL)
- {
- skip |= validate_required_pointer("vkCreateDescriptorSetLayout", "pAllocator->pfnInternalFree", reinterpret_cast<const void*>(pAllocator->pfnInternalFree), "VUID-VkAllocationCallbacks-pfnInternalAllocation-00635");
-
- }
-
- if (pAllocator->pfnInternalFree != NULL)
- {
- skip |= validate_required_pointer("vkCreateDescriptorSetLayout", "pAllocator->pfnInternalAllocation", reinterpret_cast<const void*>(pAllocator->pfnInternalAllocation), "VUID-VkAllocationCallbacks-pfnInternalAllocation-00635");
-
- }
- }
- skip |= validate_required_pointer("vkCreateDescriptorSetLayout", "pSetLayout", pSetLayout, "VUID-vkCreateDescriptorSetLayout-pSetLayout-parameter");
- if (!skip) skip |= manual_PreCallValidateCreateDescriptorSetLayout(device, pCreateInfo, pAllocator, pSetLayout);
- return skip;
-}
-
-bool StatelessValidation::PreCallValidateDestroyDescriptorSetLayout(
- VkDevice device,
- VkDescriptorSetLayout descriptorSetLayout,
- const VkAllocationCallbacks* pAllocator) {
- bool skip = false;
- if (pAllocator != NULL)
- {
- skip |= validate_required_pointer("vkDestroyDescriptorSetLayout", "pAllocator->pfnAllocation", reinterpret_cast<const void*>(pAllocator->pfnAllocation), "VUID-VkAllocationCallbacks-pfnAllocation-00632");
-
- skip |= validate_required_pointer("vkDestroyDescriptorSetLayout", "pAllocator->pfnReallocation", reinterpret_cast<const void*>(pAllocator->pfnReallocation), "VUID-VkAllocationCallbacks-pfnReallocation-00633");
-
- skip |= validate_required_pointer("vkDestroyDescriptorSetLayout", "pAllocator->pfnFree", reinterpret_cast<const void*>(pAllocator->pfnFree), "VUID-VkAllocationCallbacks-pfnFree-00634");
-
- if (pAllocator->pfnInternalAllocation != NULL)
- {
- skip |= validate_required_pointer("vkDestroyDescriptorSetLayout", "pAllocator->pfnInternalFree", reinterpret_cast<const void*>(pAllocator->pfnInternalFree), "VUID-VkAllocationCallbacks-pfnInternalAllocation-00635");
-
- }
-
- if (pAllocator->pfnInternalFree != NULL)
- {
- skip |= validate_required_pointer("vkDestroyDescriptorSetLayout", "pAllocator->pfnInternalAllocation", reinterpret_cast<const void*>(pAllocator->pfnInternalAllocation), "VUID-VkAllocationCallbacks-pfnInternalAllocation-00635");
-
- }
- }
- return skip;
-}
-
-bool StatelessValidation::PreCallValidateCreateDescriptorPool(
- VkDevice device,
- const VkDescriptorPoolCreateInfo* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkDescriptorPool* pDescriptorPool) {
- bool skip = false;
- skip |= validate_struct_type("vkCreateDescriptorPool", "pCreateInfo", "VK_STRUCTURE_TYPE_DESCRIPTOR_POOL_CREATE_INFO", pCreateInfo, VK_STRUCTURE_TYPE_DESCRIPTOR_POOL_CREATE_INFO, true, "VUID-vkCreateDescriptorPool-pCreateInfo-parameter", "VUID-VkDescriptorPoolCreateInfo-sType-sType");
- if (pCreateInfo != NULL)
- {
- const VkStructureType allowed_structs_VkDescriptorPoolCreateInfo[] = { VK_STRUCTURE_TYPE_DESCRIPTOR_POOL_INLINE_UNIFORM_BLOCK_CREATE_INFO_EXT };
-
- skip |= validate_struct_pnext("vkCreateDescriptorPool", "pCreateInfo->pNext", "VkDescriptorPoolInlineUniformBlockCreateInfoEXT", pCreateInfo->pNext, ARRAY_SIZE(allowed_structs_VkDescriptorPoolCreateInfo), allowed_structs_VkDescriptorPoolCreateInfo, GeneratedVulkanHeaderVersion, "VUID-VkDescriptorPoolCreateInfo-pNext-pNext");
-
- skip |= validate_flags("vkCreateDescriptorPool", "pCreateInfo->flags", "VkDescriptorPoolCreateFlagBits", AllVkDescriptorPoolCreateFlagBits, pCreateInfo->flags, kOptionalFlags, "VUID-VkDescriptorPoolCreateInfo-flags-parameter");
-
- skip |= validate_array("vkCreateDescriptorPool", "pCreateInfo->poolSizeCount", "pCreateInfo->pPoolSizes", pCreateInfo->poolSizeCount, &pCreateInfo->pPoolSizes, true, true, "VUID-VkDescriptorPoolCreateInfo-poolSizeCount-arraylength", "VUID-VkDescriptorPoolCreateInfo-pPoolSizes-parameter");
-
- if (pCreateInfo->pPoolSizes != NULL)
- {
- for (uint32_t poolSizeIndex = 0; poolSizeIndex < pCreateInfo->poolSizeCount; ++poolSizeIndex)
- {
- skip |= validate_ranged_enum("vkCreateDescriptorPool", ParameterName("pCreateInfo->pPoolSizes[%i].type", ParameterName::IndexVector{ poolSizeIndex }), "VkDescriptorType", AllVkDescriptorTypeEnums, pCreateInfo->pPoolSizes[poolSizeIndex].type, "VUID-VkDescriptorPoolSize-type-parameter");
- }
- }
- }
- if (pAllocator != NULL)
- {
- skip |= validate_required_pointer("vkCreateDescriptorPool", "pAllocator->pfnAllocation", reinterpret_cast<const void*>(pAllocator->pfnAllocation), "VUID-VkAllocationCallbacks-pfnAllocation-00632");
-
- skip |= validate_required_pointer("vkCreateDescriptorPool", "pAllocator->pfnReallocation", reinterpret_cast<const void*>(pAllocator->pfnReallocation), "VUID-VkAllocationCallbacks-pfnReallocation-00633");
-
- skip |= validate_required_pointer("vkCreateDescriptorPool", "pAllocator->pfnFree", reinterpret_cast<const void*>(pAllocator->pfnFree), "VUID-VkAllocationCallbacks-pfnFree-00634");
-
- if (pAllocator->pfnInternalAllocation != NULL)
- {
- skip |= validate_required_pointer("vkCreateDescriptorPool", "pAllocator->pfnInternalFree", reinterpret_cast<const void*>(pAllocator->pfnInternalFree), "VUID-VkAllocationCallbacks-pfnInternalAllocation-00635");
-
- }
-
- if (pAllocator->pfnInternalFree != NULL)
- {
- skip |= validate_required_pointer("vkCreateDescriptorPool", "pAllocator->pfnInternalAllocation", reinterpret_cast<const void*>(pAllocator->pfnInternalAllocation), "VUID-VkAllocationCallbacks-pfnInternalAllocation-00635");
-
- }
- }
- skip |= validate_required_pointer("vkCreateDescriptorPool", "pDescriptorPool", pDescriptorPool, "VUID-vkCreateDescriptorPool-pDescriptorPool-parameter");
- if (!skip) skip |= manual_PreCallValidateCreateDescriptorPool(device, pCreateInfo, pAllocator, pDescriptorPool);
- return skip;
-}
-
-bool StatelessValidation::PreCallValidateDestroyDescriptorPool(
- VkDevice device,
- VkDescriptorPool descriptorPool,
- const VkAllocationCallbacks* pAllocator) {
- bool skip = false;
- if (pAllocator != NULL)
- {
- skip |= validate_required_pointer("vkDestroyDescriptorPool", "pAllocator->pfnAllocation", reinterpret_cast<const void*>(pAllocator->pfnAllocation), "VUID-VkAllocationCallbacks-pfnAllocation-00632");
-
- skip |= validate_required_pointer("vkDestroyDescriptorPool", "pAllocator->pfnReallocation", reinterpret_cast<const void*>(pAllocator->pfnReallocation), "VUID-VkAllocationCallbacks-pfnReallocation-00633");
-
- skip |= validate_required_pointer("vkDestroyDescriptorPool", "pAllocator->pfnFree", reinterpret_cast<const void*>(pAllocator->pfnFree), "VUID-VkAllocationCallbacks-pfnFree-00634");
-
- if (pAllocator->pfnInternalAllocation != NULL)
- {
- skip |= validate_required_pointer("vkDestroyDescriptorPool", "pAllocator->pfnInternalFree", reinterpret_cast<const void*>(pAllocator->pfnInternalFree), "VUID-VkAllocationCallbacks-pfnInternalAllocation-00635");
-
- }
-
- if (pAllocator->pfnInternalFree != NULL)
- {
- skip |= validate_required_pointer("vkDestroyDescriptorPool", "pAllocator->pfnInternalAllocation", reinterpret_cast<const void*>(pAllocator->pfnInternalAllocation), "VUID-VkAllocationCallbacks-pfnInternalAllocation-00635");
-
- }
- }
- return skip;
-}
-
-bool StatelessValidation::PreCallValidateResetDescriptorPool(
- VkDevice device,
- VkDescriptorPool descriptorPool,
- VkDescriptorPoolResetFlags flags) {
- bool skip = false;
- skip |= validate_required_handle("vkResetDescriptorPool", "descriptorPool", descriptorPool);
- skip |= validate_reserved_flags("vkResetDescriptorPool", "flags", flags, "VUID-vkResetDescriptorPool-flags-zerobitmask");
- return skip;
-}
-
-bool StatelessValidation::PreCallValidateAllocateDescriptorSets(
- VkDevice device,
- const VkDescriptorSetAllocateInfo* pAllocateInfo,
- VkDescriptorSet* pDescriptorSets) {
- bool skip = false;
- skip |= validate_struct_type("vkAllocateDescriptorSets", "pAllocateInfo", "VK_STRUCTURE_TYPE_DESCRIPTOR_SET_ALLOCATE_INFO", pAllocateInfo, VK_STRUCTURE_TYPE_DESCRIPTOR_SET_ALLOCATE_INFO, true, "VUID-vkAllocateDescriptorSets-pAllocateInfo-parameter", "VUID-VkDescriptorSetAllocateInfo-sType-sType");
- if (pAllocateInfo != NULL)
- {
- const VkStructureType allowed_structs_VkDescriptorSetAllocateInfo[] = { VK_STRUCTURE_TYPE_DESCRIPTOR_SET_VARIABLE_DESCRIPTOR_COUNT_ALLOCATE_INFO_EXT };
-
- skip |= validate_struct_pnext("vkAllocateDescriptorSets", "pAllocateInfo->pNext", "VkDescriptorSetVariableDescriptorCountAllocateInfoEXT", pAllocateInfo->pNext, ARRAY_SIZE(allowed_structs_VkDescriptorSetAllocateInfo), allowed_structs_VkDescriptorSetAllocateInfo, GeneratedVulkanHeaderVersion, "VUID-VkDescriptorSetAllocateInfo-pNext-pNext");
-
- skip |= validate_required_handle("vkAllocateDescriptorSets", "pAllocateInfo->descriptorPool", pAllocateInfo->descriptorPool);
-
- skip |= validate_handle_array("vkAllocateDescriptorSets", "pAllocateInfo->descriptorSetCount", "pAllocateInfo->pSetLayouts", pAllocateInfo->descriptorSetCount, pAllocateInfo->pSetLayouts, true, true);
- }
- if (pAllocateInfo != NULL) {
- skip |= validate_array("vkAllocateDescriptorSets", "pAllocateInfo->descriptorSetCount", "pDescriptorSets", pAllocateInfo->descriptorSetCount, &pDescriptorSets, true, true, kVUIDUndefined, "VUID-vkAllocateDescriptorSets-pDescriptorSets-parameter");
- }
- return skip;
-}
-
-bool StatelessValidation::PreCallValidateFreeDescriptorSets(
- VkDevice device,
- VkDescriptorPool descriptorPool,
- uint32_t descriptorSetCount,
- const VkDescriptorSet* pDescriptorSets) {
- bool skip = false;
- skip |= validate_required_handle("vkFreeDescriptorSets", "descriptorPool", descriptorPool);
- if (!skip) skip |= manual_PreCallValidateFreeDescriptorSets(device, descriptorPool, descriptorSetCount, pDescriptorSets);
- return skip;
-}
-
-bool StatelessValidation::PreCallValidateUpdateDescriptorSets(
- VkDevice device,
- uint32_t descriptorWriteCount,
- const VkWriteDescriptorSet* pDescriptorWrites,
- uint32_t descriptorCopyCount,
- const VkCopyDescriptorSet* pDescriptorCopies) {
- bool skip = false;
- skip |= validate_struct_type_array("vkUpdateDescriptorSets", "descriptorWriteCount", "pDescriptorWrites", "VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET", descriptorWriteCount, pDescriptorWrites, VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET, false, true, "VUID-VkWriteDescriptorSet-sType-sType", "VUID-vkUpdateDescriptorSets-pDescriptorWrites-parameter", kVUIDUndefined);
- if (pDescriptorWrites != NULL)
- {
- for (uint32_t descriptorWriteIndex = 0; descriptorWriteIndex < descriptorWriteCount; ++descriptorWriteIndex)
- {
- const VkStructureType allowed_structs_VkWriteDescriptorSet[] = { VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET_ACCELERATION_STRUCTURE_NV, VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET_INLINE_UNIFORM_BLOCK_EXT };
-
- skip |= validate_struct_pnext("vkUpdateDescriptorSets", ParameterName("pDescriptorWrites[%i].pNext", ParameterName::IndexVector{ descriptorWriteIndex }), "VkWriteDescriptorSetAccelerationStructureNV, VkWriteDescriptorSetInlineUniformBlockEXT", pDescriptorWrites[descriptorWriteIndex].pNext, ARRAY_SIZE(allowed_structs_VkWriteDescriptorSet), allowed_structs_VkWriteDescriptorSet, GeneratedVulkanHeaderVersion, "VUID-VkWriteDescriptorSet-pNext-pNext");
-
- skip |= validate_ranged_enum("vkUpdateDescriptorSets", ParameterName("pDescriptorWrites[%i].descriptorType", ParameterName::IndexVector{ descriptorWriteIndex }), "VkDescriptorType", AllVkDescriptorTypeEnums, pDescriptorWrites[descriptorWriteIndex].descriptorType, "VUID-VkWriteDescriptorSet-descriptorType-parameter");
- }
- }
- skip |= validate_struct_type_array("vkUpdateDescriptorSets", "descriptorCopyCount", "pDescriptorCopies", "VK_STRUCTURE_TYPE_COPY_DESCRIPTOR_SET", descriptorCopyCount, pDescriptorCopies, VK_STRUCTURE_TYPE_COPY_DESCRIPTOR_SET, false, true, "VUID-VkCopyDescriptorSet-sType-sType", "VUID-vkUpdateDescriptorSets-pDescriptorCopies-parameter", kVUIDUndefined);
- if (pDescriptorCopies != NULL)
- {
- for (uint32_t descriptorCopyIndex = 0; descriptorCopyIndex < descriptorCopyCount; ++descriptorCopyIndex)
- {
- skip |= validate_struct_pnext("vkUpdateDescriptorSets", ParameterName("pDescriptorCopies[%i].pNext", ParameterName::IndexVector{ descriptorCopyIndex }), NULL, pDescriptorCopies[descriptorCopyIndex].pNext, 0, NULL, GeneratedVulkanHeaderVersion, "VUID-VkCopyDescriptorSet-pNext-pNext");
-
- skip |= validate_required_handle("vkUpdateDescriptorSets", ParameterName("pDescriptorCopies[%i].srcSet", ParameterName::IndexVector{ descriptorCopyIndex }), pDescriptorCopies[descriptorCopyIndex].srcSet);
-
- skip |= validate_required_handle("vkUpdateDescriptorSets", ParameterName("pDescriptorCopies[%i].dstSet", ParameterName::IndexVector{ descriptorCopyIndex }), pDescriptorCopies[descriptorCopyIndex].dstSet);
- }
- }
- if (!skip) skip |= manual_PreCallValidateUpdateDescriptorSets(device, descriptorWriteCount, pDescriptorWrites, descriptorCopyCount, pDescriptorCopies);
- return skip;
-}
-
-bool StatelessValidation::PreCallValidateCreateFramebuffer(
- VkDevice device,
- const VkFramebufferCreateInfo* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkFramebuffer* pFramebuffer) {
- bool skip = false;
- skip |= validate_struct_type("vkCreateFramebuffer", "pCreateInfo", "VK_STRUCTURE_TYPE_FRAMEBUFFER_CREATE_INFO", pCreateInfo, VK_STRUCTURE_TYPE_FRAMEBUFFER_CREATE_INFO, true, "VUID-vkCreateFramebuffer-pCreateInfo-parameter", "VUID-VkFramebufferCreateInfo-sType-sType");
- if (pCreateInfo != NULL)
- {
- const VkStructureType allowed_structs_VkFramebufferCreateInfo[] = { VK_STRUCTURE_TYPE_FRAMEBUFFER_ATTACHMENTS_CREATE_INFO_KHR };
-
- skip |= validate_struct_pnext("vkCreateFramebuffer", "pCreateInfo->pNext", "VkFramebufferAttachmentsCreateInfoKHR", pCreateInfo->pNext, ARRAY_SIZE(allowed_structs_VkFramebufferCreateInfo), allowed_structs_VkFramebufferCreateInfo, GeneratedVulkanHeaderVersion, "VUID-VkFramebufferCreateInfo-pNext-pNext");
-
- skip |= validate_flags("vkCreateFramebuffer", "pCreateInfo->flags", "VkFramebufferCreateFlagBits", AllVkFramebufferCreateFlagBits, pCreateInfo->flags, kOptionalFlags, "VUID-VkFramebufferCreateInfo-flags-parameter");
-
- skip |= validate_required_handle("vkCreateFramebuffer", "pCreateInfo->renderPass", pCreateInfo->renderPass);
- }
- if (pAllocator != NULL)
- {
- skip |= validate_required_pointer("vkCreateFramebuffer", "pAllocator->pfnAllocation", reinterpret_cast<const void*>(pAllocator->pfnAllocation), "VUID-VkAllocationCallbacks-pfnAllocation-00632");
-
- skip |= validate_required_pointer("vkCreateFramebuffer", "pAllocator->pfnReallocation", reinterpret_cast<const void*>(pAllocator->pfnReallocation), "VUID-VkAllocationCallbacks-pfnReallocation-00633");
-
- skip |= validate_required_pointer("vkCreateFramebuffer", "pAllocator->pfnFree", reinterpret_cast<const void*>(pAllocator->pfnFree), "VUID-VkAllocationCallbacks-pfnFree-00634");
-
- if (pAllocator->pfnInternalAllocation != NULL)
- {
- skip |= validate_required_pointer("vkCreateFramebuffer", "pAllocator->pfnInternalFree", reinterpret_cast<const void*>(pAllocator->pfnInternalFree), "VUID-VkAllocationCallbacks-pfnInternalAllocation-00635");
-
- }
-
- if (pAllocator->pfnInternalFree != NULL)
- {
- skip |= validate_required_pointer("vkCreateFramebuffer", "pAllocator->pfnInternalAllocation", reinterpret_cast<const void*>(pAllocator->pfnInternalAllocation), "VUID-VkAllocationCallbacks-pfnInternalAllocation-00635");
-
- }
- }
- skip |= validate_required_pointer("vkCreateFramebuffer", "pFramebuffer", pFramebuffer, "VUID-vkCreateFramebuffer-pFramebuffer-parameter");
- if (!skip) skip |= manual_PreCallValidateCreateFramebuffer(device, pCreateInfo, pAllocator, pFramebuffer);
- return skip;
-}
-
-bool StatelessValidation::PreCallValidateDestroyFramebuffer(
- VkDevice device,
- VkFramebuffer framebuffer,
- const VkAllocationCallbacks* pAllocator) {
- bool skip = false;
- if (pAllocator != NULL)
- {
- skip |= validate_required_pointer("vkDestroyFramebuffer", "pAllocator->pfnAllocation", reinterpret_cast<const void*>(pAllocator->pfnAllocation), "VUID-VkAllocationCallbacks-pfnAllocation-00632");
-
- skip |= validate_required_pointer("vkDestroyFramebuffer", "pAllocator->pfnReallocation", reinterpret_cast<const void*>(pAllocator->pfnReallocation), "VUID-VkAllocationCallbacks-pfnReallocation-00633");
-
- skip |= validate_required_pointer("vkDestroyFramebuffer", "pAllocator->pfnFree", reinterpret_cast<const void*>(pAllocator->pfnFree), "VUID-VkAllocationCallbacks-pfnFree-00634");
-
- if (pAllocator->pfnInternalAllocation != NULL)
- {
- skip |= validate_required_pointer("vkDestroyFramebuffer", "pAllocator->pfnInternalFree", reinterpret_cast<const void*>(pAllocator->pfnInternalFree), "VUID-VkAllocationCallbacks-pfnInternalAllocation-00635");
-
- }
-
- if (pAllocator->pfnInternalFree != NULL)
- {
- skip |= validate_required_pointer("vkDestroyFramebuffer", "pAllocator->pfnInternalAllocation", reinterpret_cast<const void*>(pAllocator->pfnInternalAllocation), "VUID-VkAllocationCallbacks-pfnInternalAllocation-00635");
-
- }
- }
- return skip;
-}
-
-bool StatelessValidation::PreCallValidateCreateRenderPass(
- VkDevice device,
- const VkRenderPassCreateInfo* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkRenderPass* pRenderPass) {
- bool skip = false;
- skip |= validate_struct_type("vkCreateRenderPass", "pCreateInfo", "VK_STRUCTURE_TYPE_RENDER_PASS_CREATE_INFO", pCreateInfo, VK_STRUCTURE_TYPE_RENDER_PASS_CREATE_INFO, true, "VUID-vkCreateRenderPass-pCreateInfo-parameter", "VUID-VkRenderPassCreateInfo-sType-sType");
- if (pCreateInfo != NULL)
- {
- const VkStructureType allowed_structs_VkRenderPassCreateInfo[] = { VK_STRUCTURE_TYPE_RENDER_PASS_FRAGMENT_DENSITY_MAP_CREATE_INFO_EXT, VK_STRUCTURE_TYPE_RENDER_PASS_INPUT_ATTACHMENT_ASPECT_CREATE_INFO, VK_STRUCTURE_TYPE_RENDER_PASS_MULTIVIEW_CREATE_INFO };
-
- skip |= validate_struct_pnext("vkCreateRenderPass", "pCreateInfo->pNext", "VkRenderPassFragmentDensityMapCreateInfoEXT, VkRenderPassInputAttachmentAspectCreateInfo, VkRenderPassMultiviewCreateInfo", pCreateInfo->pNext, ARRAY_SIZE(allowed_structs_VkRenderPassCreateInfo), allowed_structs_VkRenderPassCreateInfo, GeneratedVulkanHeaderVersion, "VUID-VkRenderPassCreateInfo-pNext-pNext");
-
- skip |= validate_array("vkCreateRenderPass", "pCreateInfo->attachmentCount", "pCreateInfo->pAttachments", pCreateInfo->attachmentCount, &pCreateInfo->pAttachments, false, true, kVUIDUndefined, "VUID-VkRenderPassCreateInfo-pAttachments-parameter");
-
- if (pCreateInfo->pAttachments != NULL)
- {
- for (uint32_t attachmentIndex = 0; attachmentIndex < pCreateInfo->attachmentCount; ++attachmentIndex)
- {
- skip |= validate_flags("vkCreateRenderPass", ParameterName("pCreateInfo->pAttachments[%i].flags", ParameterName::IndexVector{ attachmentIndex }), "VkAttachmentDescriptionFlagBits", AllVkAttachmentDescriptionFlagBits, pCreateInfo->pAttachments[attachmentIndex].flags, kOptionalFlags, "VUID-VkAttachmentDescription-flags-parameter");
-
- skip |= validate_ranged_enum("vkCreateRenderPass", ParameterName("pCreateInfo->pAttachments[%i].format", ParameterName::IndexVector{ attachmentIndex }), "VkFormat", AllVkFormatEnums, pCreateInfo->pAttachments[attachmentIndex].format, "VUID-VkAttachmentDescription-format-parameter");
-
- skip |= validate_flags("vkCreateRenderPass", ParameterName("pCreateInfo->pAttachments[%i].samples", ParameterName::IndexVector{ attachmentIndex }), "VkSampleCountFlagBits", AllVkSampleCountFlagBits, pCreateInfo->pAttachments[attachmentIndex].samples, kRequiredSingleBit, "VUID-VkAttachmentDescription-samples-parameter", "VUID-VkAttachmentDescription-samples-parameter");
-
- skip |= validate_ranged_enum("vkCreateRenderPass", ParameterName("pCreateInfo->pAttachments[%i].loadOp", ParameterName::IndexVector{ attachmentIndex }), "VkAttachmentLoadOp", AllVkAttachmentLoadOpEnums, pCreateInfo->pAttachments[attachmentIndex].loadOp, "VUID-VkAttachmentDescription-loadOp-parameter");
-
- skip |= validate_ranged_enum("vkCreateRenderPass", ParameterName("pCreateInfo->pAttachments[%i].storeOp", ParameterName::IndexVector{ attachmentIndex }), "VkAttachmentStoreOp", AllVkAttachmentStoreOpEnums, pCreateInfo->pAttachments[attachmentIndex].storeOp, "VUID-VkAttachmentDescription-storeOp-parameter");
-
- skip |= validate_ranged_enum("vkCreateRenderPass", ParameterName("pCreateInfo->pAttachments[%i].stencilLoadOp", ParameterName::IndexVector{ attachmentIndex }), "VkAttachmentLoadOp", AllVkAttachmentLoadOpEnums, pCreateInfo->pAttachments[attachmentIndex].stencilLoadOp, "VUID-VkAttachmentDescription-stencilLoadOp-parameter");
-
- skip |= validate_ranged_enum("vkCreateRenderPass", ParameterName("pCreateInfo->pAttachments[%i].stencilStoreOp", ParameterName::IndexVector{ attachmentIndex }), "VkAttachmentStoreOp", AllVkAttachmentStoreOpEnums, pCreateInfo->pAttachments[attachmentIndex].stencilStoreOp, "VUID-VkAttachmentDescription-stencilStoreOp-parameter");
-
- skip |= validate_ranged_enum("vkCreateRenderPass", ParameterName("pCreateInfo->pAttachments[%i].initialLayout", ParameterName::IndexVector{ attachmentIndex }), "VkImageLayout", AllVkImageLayoutEnums, pCreateInfo->pAttachments[attachmentIndex].initialLayout, "VUID-VkAttachmentDescription-initialLayout-parameter");
-
- skip |= validate_ranged_enum("vkCreateRenderPass", ParameterName("pCreateInfo->pAttachments[%i].finalLayout", ParameterName::IndexVector{ attachmentIndex }), "VkImageLayout", AllVkImageLayoutEnums, pCreateInfo->pAttachments[attachmentIndex].finalLayout, "VUID-VkAttachmentDescription-finalLayout-parameter");
- }
- }
-
- skip |= validate_array("vkCreateRenderPass", "pCreateInfo->subpassCount", "pCreateInfo->pSubpasses", pCreateInfo->subpassCount, &pCreateInfo->pSubpasses, true, true, "VUID-VkRenderPassCreateInfo-subpassCount-arraylength", "VUID-VkRenderPassCreateInfo-pSubpasses-parameter");
-
- if (pCreateInfo->pSubpasses != NULL)
- {
- for (uint32_t subpassIndex = 0; subpassIndex < pCreateInfo->subpassCount; ++subpassIndex)
- {
- skip |= validate_flags("vkCreateRenderPass", ParameterName("pCreateInfo->pSubpasses[%i].flags", ParameterName::IndexVector{ subpassIndex }), "VkSubpassDescriptionFlagBits", AllVkSubpassDescriptionFlagBits, pCreateInfo->pSubpasses[subpassIndex].flags, kOptionalFlags, "VUID-VkSubpassDescription-flags-parameter");
-
- skip |= validate_ranged_enum("vkCreateRenderPass", ParameterName("pCreateInfo->pSubpasses[%i].pipelineBindPoint", ParameterName::IndexVector{ subpassIndex }), "VkPipelineBindPoint", AllVkPipelineBindPointEnums, pCreateInfo->pSubpasses[subpassIndex].pipelineBindPoint, "VUID-VkSubpassDescription-pipelineBindPoint-parameter");
-
- skip |= validate_array("vkCreateRenderPass", ParameterName("pCreateInfo->pSubpasses[%i].inputAttachmentCount", ParameterName::IndexVector{ subpassIndex }), ParameterName("pCreateInfo->pSubpasses[%i].pInputAttachments", ParameterName::IndexVector{ subpassIndex }), pCreateInfo->pSubpasses[subpassIndex].inputAttachmentCount, &pCreateInfo->pSubpasses[subpassIndex].pInputAttachments, false, true, kVUIDUndefined, "VUID-VkSubpassDescription-pInputAttachments-parameter");
-
- if (pCreateInfo->pSubpasses[subpassIndex].pInputAttachments != NULL)
- {
- for (uint32_t inputAttachmentIndex = 0; inputAttachmentIndex < pCreateInfo->pSubpasses[subpassIndex].inputAttachmentCount; ++inputAttachmentIndex)
- {
- skip |= validate_ranged_enum("vkCreateRenderPass", ParameterName("pCreateInfo->pSubpasses[%i].pInputAttachments[%i].layout", ParameterName::IndexVector{ subpassIndex, inputAttachmentIndex }), "VkImageLayout", AllVkImageLayoutEnums, pCreateInfo->pSubpasses[subpassIndex].pInputAttachments[inputAttachmentIndex].layout, "VUID-VkAttachmentReference-layout-parameter");
- }
- }
-
- skip |= validate_array("vkCreateRenderPass", ParameterName("pCreateInfo->pSubpasses[%i].colorAttachmentCount", ParameterName::IndexVector{ subpassIndex }), ParameterName("pCreateInfo->pSubpasses[%i].pColorAttachments", ParameterName::IndexVector{ subpassIndex }), pCreateInfo->pSubpasses[subpassIndex].colorAttachmentCount, &pCreateInfo->pSubpasses[subpassIndex].pColorAttachments, false, true, kVUIDUndefined, "VUID-VkSubpassDescription-pColorAttachments-parameter");
-
- if (pCreateInfo->pSubpasses[subpassIndex].pColorAttachments != NULL)
- {
- for (uint32_t colorAttachmentIndex = 0; colorAttachmentIndex < pCreateInfo->pSubpasses[subpassIndex].colorAttachmentCount; ++colorAttachmentIndex)
- {
- skip |= validate_ranged_enum("vkCreateRenderPass", ParameterName("pCreateInfo->pSubpasses[%i].pColorAttachments[%i].layout", ParameterName::IndexVector{ subpassIndex, colorAttachmentIndex }), "VkImageLayout", AllVkImageLayoutEnums, pCreateInfo->pSubpasses[subpassIndex].pColorAttachments[colorAttachmentIndex].layout, "VUID-VkAttachmentReference-layout-parameter");
- }
- }
-
- if (pCreateInfo->pSubpasses[subpassIndex].pResolveAttachments != NULL)
- {
- for (uint32_t colorAttachmentIndex = 0; colorAttachmentIndex < pCreateInfo->pSubpasses[subpassIndex].colorAttachmentCount; ++colorAttachmentIndex)
- {
- skip |= validate_ranged_enum("vkCreateRenderPass", ParameterName("pCreateInfo->pSubpasses[%i].pResolveAttachments[%i].layout", ParameterName::IndexVector{ subpassIndex, colorAttachmentIndex }), "VkImageLayout", AllVkImageLayoutEnums, pCreateInfo->pSubpasses[subpassIndex].pResolveAttachments[colorAttachmentIndex].layout, "VUID-VkAttachmentReference-layout-parameter");
- }
- }
-
- if (pCreateInfo->pSubpasses[subpassIndex].pDepthStencilAttachment != NULL)
- {
- skip |= validate_ranged_enum("vkCreateRenderPass", ParameterName("pCreateInfo->pSubpasses[%i].pDepthStencilAttachment->layout", ParameterName::IndexVector{ subpassIndex }), "VkImageLayout", AllVkImageLayoutEnums, pCreateInfo->pSubpasses[subpassIndex].pDepthStencilAttachment->layout, "VUID-VkAttachmentReference-layout-parameter");
- }
-
- skip |= validate_array("vkCreateRenderPass", ParameterName("pCreateInfo->pSubpasses[%i].preserveAttachmentCount", ParameterName::IndexVector{ subpassIndex }), ParameterName("pCreateInfo->pSubpasses[%i].pPreserveAttachments", ParameterName::IndexVector{ subpassIndex }), pCreateInfo->pSubpasses[subpassIndex].preserveAttachmentCount, &pCreateInfo->pSubpasses[subpassIndex].pPreserveAttachments, false, true, kVUIDUndefined, "VUID-VkSubpassDescription-pPreserveAttachments-parameter");
- }
- }
-
- skip |= validate_array("vkCreateRenderPass", "pCreateInfo->dependencyCount", "pCreateInfo->pDependencies", pCreateInfo->dependencyCount, &pCreateInfo->pDependencies, false, true, kVUIDUndefined, "VUID-VkRenderPassCreateInfo-pDependencies-parameter");
-
- if (pCreateInfo->pDependencies != NULL)
- {
- for (uint32_t dependencyIndex = 0; dependencyIndex < pCreateInfo->dependencyCount; ++dependencyIndex)
- {
- skip |= validate_flags("vkCreateRenderPass", ParameterName("pCreateInfo->pDependencies[%i].srcStageMask", ParameterName::IndexVector{ dependencyIndex }), "VkPipelineStageFlagBits", AllVkPipelineStageFlagBits, pCreateInfo->pDependencies[dependencyIndex].srcStageMask, kRequiredFlags, "VUID-VkSubpassDependency-srcStageMask-parameter", "VUID-VkSubpassDependency-srcStageMask-requiredbitmask");
-
- skip |= validate_flags("vkCreateRenderPass", ParameterName("pCreateInfo->pDependencies[%i].dstStageMask", ParameterName::IndexVector{ dependencyIndex }), "VkPipelineStageFlagBits", AllVkPipelineStageFlagBits, pCreateInfo->pDependencies[dependencyIndex].dstStageMask, kRequiredFlags, "VUID-VkSubpassDependency-dstStageMask-parameter", "VUID-VkSubpassDependency-dstStageMask-requiredbitmask");
-
- skip |= validate_flags("vkCreateRenderPass", ParameterName("pCreateInfo->pDependencies[%i].srcAccessMask", ParameterName::IndexVector{ dependencyIndex }), "VkAccessFlagBits", AllVkAccessFlagBits, pCreateInfo->pDependencies[dependencyIndex].srcAccessMask, kOptionalFlags, "VUID-VkSubpassDependency-srcAccessMask-parameter");
-
- skip |= validate_flags("vkCreateRenderPass", ParameterName("pCreateInfo->pDependencies[%i].dstAccessMask", ParameterName::IndexVector{ dependencyIndex }), "VkAccessFlagBits", AllVkAccessFlagBits, pCreateInfo->pDependencies[dependencyIndex].dstAccessMask, kOptionalFlags, "VUID-VkSubpassDependency-dstAccessMask-parameter");
-
- skip |= validate_flags("vkCreateRenderPass", ParameterName("pCreateInfo->pDependencies[%i].dependencyFlags", ParameterName::IndexVector{ dependencyIndex }), "VkDependencyFlagBits", AllVkDependencyFlagBits, pCreateInfo->pDependencies[dependencyIndex].dependencyFlags, kOptionalFlags, "VUID-VkSubpassDependency-dependencyFlags-parameter");
- }
- }
- }
- if (pAllocator != NULL)
- {
- skip |= validate_required_pointer("vkCreateRenderPass", "pAllocator->pfnAllocation", reinterpret_cast<const void*>(pAllocator->pfnAllocation), "VUID-VkAllocationCallbacks-pfnAllocation-00632");
-
- skip |= validate_required_pointer("vkCreateRenderPass", "pAllocator->pfnReallocation", reinterpret_cast<const void*>(pAllocator->pfnReallocation), "VUID-VkAllocationCallbacks-pfnReallocation-00633");
-
- skip |= validate_required_pointer("vkCreateRenderPass", "pAllocator->pfnFree", reinterpret_cast<const void*>(pAllocator->pfnFree), "VUID-VkAllocationCallbacks-pfnFree-00634");
-
- if (pAllocator->pfnInternalAllocation != NULL)
- {
- skip |= validate_required_pointer("vkCreateRenderPass", "pAllocator->pfnInternalFree", reinterpret_cast<const void*>(pAllocator->pfnInternalFree), "VUID-VkAllocationCallbacks-pfnInternalAllocation-00635");
-
- }
-
- if (pAllocator->pfnInternalFree != NULL)
- {
- skip |= validate_required_pointer("vkCreateRenderPass", "pAllocator->pfnInternalAllocation", reinterpret_cast<const void*>(pAllocator->pfnInternalAllocation), "VUID-VkAllocationCallbacks-pfnInternalAllocation-00635");
-
- }
- }
- skip |= validate_required_pointer("vkCreateRenderPass", "pRenderPass", pRenderPass, "VUID-vkCreateRenderPass-pRenderPass-parameter");
- if (!skip) skip |= manual_PreCallValidateCreateRenderPass(device, pCreateInfo, pAllocator, pRenderPass);
- return skip;
-}
-
-bool StatelessValidation::PreCallValidateDestroyRenderPass(
- VkDevice device,
- VkRenderPass renderPass,
- const VkAllocationCallbacks* pAllocator) {
- bool skip = false;
- if (pAllocator != NULL)
- {
- skip |= validate_required_pointer("vkDestroyRenderPass", "pAllocator->pfnAllocation", reinterpret_cast<const void*>(pAllocator->pfnAllocation), "VUID-VkAllocationCallbacks-pfnAllocation-00632");
-
- skip |= validate_required_pointer("vkDestroyRenderPass", "pAllocator->pfnReallocation", reinterpret_cast<const void*>(pAllocator->pfnReallocation), "VUID-VkAllocationCallbacks-pfnReallocation-00633");
-
- skip |= validate_required_pointer("vkDestroyRenderPass", "pAllocator->pfnFree", reinterpret_cast<const void*>(pAllocator->pfnFree), "VUID-VkAllocationCallbacks-pfnFree-00634");
-
- if (pAllocator->pfnInternalAllocation != NULL)
- {
- skip |= validate_required_pointer("vkDestroyRenderPass", "pAllocator->pfnInternalFree", reinterpret_cast<const void*>(pAllocator->pfnInternalFree), "VUID-VkAllocationCallbacks-pfnInternalAllocation-00635");
-
- }
-
- if (pAllocator->pfnInternalFree != NULL)
- {
- skip |= validate_required_pointer("vkDestroyRenderPass", "pAllocator->pfnInternalAllocation", reinterpret_cast<const void*>(pAllocator->pfnInternalAllocation), "VUID-VkAllocationCallbacks-pfnInternalAllocation-00635");
-
- }
- }
- return skip;
-}
-
-bool StatelessValidation::PreCallValidateGetRenderAreaGranularity(
- VkDevice device,
- VkRenderPass renderPass,
- VkExtent2D* pGranularity) {
- bool skip = false;
- skip |= validate_required_handle("vkGetRenderAreaGranularity", "renderPass", renderPass);
- skip |= validate_required_pointer("vkGetRenderAreaGranularity", "pGranularity", pGranularity, "VUID-vkGetRenderAreaGranularity-pGranularity-parameter");
- return skip;
-}
-
-bool StatelessValidation::PreCallValidateCreateCommandPool(
- VkDevice device,
- const VkCommandPoolCreateInfo* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkCommandPool* pCommandPool) {
- bool skip = false;
- skip |= validate_struct_type("vkCreateCommandPool", "pCreateInfo", "VK_STRUCTURE_TYPE_COMMAND_POOL_CREATE_INFO", pCreateInfo, VK_STRUCTURE_TYPE_COMMAND_POOL_CREATE_INFO, true, "VUID-vkCreateCommandPool-pCreateInfo-parameter", "VUID-VkCommandPoolCreateInfo-sType-sType");
- if (pCreateInfo != NULL)
- {
- skip |= validate_struct_pnext("vkCreateCommandPool", "pCreateInfo->pNext", NULL, pCreateInfo->pNext, 0, NULL, GeneratedVulkanHeaderVersion, "VUID-VkCommandPoolCreateInfo-pNext-pNext");
-
- skip |= validate_flags("vkCreateCommandPool", "pCreateInfo->flags", "VkCommandPoolCreateFlagBits", AllVkCommandPoolCreateFlagBits, pCreateInfo->flags, kOptionalFlags, "VUID-VkCommandPoolCreateInfo-flags-parameter");
- }
- if (pAllocator != NULL)
- {
- skip |= validate_required_pointer("vkCreateCommandPool", "pAllocator->pfnAllocation", reinterpret_cast<const void*>(pAllocator->pfnAllocation), "VUID-VkAllocationCallbacks-pfnAllocation-00632");
-
- skip |= validate_required_pointer("vkCreateCommandPool", "pAllocator->pfnReallocation", reinterpret_cast<const void*>(pAllocator->pfnReallocation), "VUID-VkAllocationCallbacks-pfnReallocation-00633");
-
- skip |= validate_required_pointer("vkCreateCommandPool", "pAllocator->pfnFree", reinterpret_cast<const void*>(pAllocator->pfnFree), "VUID-VkAllocationCallbacks-pfnFree-00634");
-
- if (pAllocator->pfnInternalAllocation != NULL)
- {
- skip |= validate_required_pointer("vkCreateCommandPool", "pAllocator->pfnInternalFree", reinterpret_cast<const void*>(pAllocator->pfnInternalFree), "VUID-VkAllocationCallbacks-pfnInternalAllocation-00635");
-
- }
-
- if (pAllocator->pfnInternalFree != NULL)
- {
- skip |= validate_required_pointer("vkCreateCommandPool", "pAllocator->pfnInternalAllocation", reinterpret_cast<const void*>(pAllocator->pfnInternalAllocation), "VUID-VkAllocationCallbacks-pfnInternalAllocation-00635");
-
- }
- }
- skip |= validate_required_pointer("vkCreateCommandPool", "pCommandPool", pCommandPool, "VUID-vkCreateCommandPool-pCommandPool-parameter");
- return skip;
-}
-
-bool StatelessValidation::PreCallValidateDestroyCommandPool(
- VkDevice device,
- VkCommandPool commandPool,
- const VkAllocationCallbacks* pAllocator) {
- bool skip = false;
- if (pAllocator != NULL)
- {
- skip |= validate_required_pointer("vkDestroyCommandPool", "pAllocator->pfnAllocation", reinterpret_cast<const void*>(pAllocator->pfnAllocation), "VUID-VkAllocationCallbacks-pfnAllocation-00632");
-
- skip |= validate_required_pointer("vkDestroyCommandPool", "pAllocator->pfnReallocation", reinterpret_cast<const void*>(pAllocator->pfnReallocation), "VUID-VkAllocationCallbacks-pfnReallocation-00633");
-
- skip |= validate_required_pointer("vkDestroyCommandPool", "pAllocator->pfnFree", reinterpret_cast<const void*>(pAllocator->pfnFree), "VUID-VkAllocationCallbacks-pfnFree-00634");
-
- if (pAllocator->pfnInternalAllocation != NULL)
- {
- skip |= validate_required_pointer("vkDestroyCommandPool", "pAllocator->pfnInternalFree", reinterpret_cast<const void*>(pAllocator->pfnInternalFree), "VUID-VkAllocationCallbacks-pfnInternalAllocation-00635");
-
- }
-
- if (pAllocator->pfnInternalFree != NULL)
- {
- skip |= validate_required_pointer("vkDestroyCommandPool", "pAllocator->pfnInternalAllocation", reinterpret_cast<const void*>(pAllocator->pfnInternalAllocation), "VUID-VkAllocationCallbacks-pfnInternalAllocation-00635");
-
- }
- }
- return skip;
-}
-
-bool StatelessValidation::PreCallValidateResetCommandPool(
- VkDevice device,
- VkCommandPool commandPool,
- VkCommandPoolResetFlags flags) {
- bool skip = false;
- skip |= validate_required_handle("vkResetCommandPool", "commandPool", commandPool);
- skip |= validate_flags("vkResetCommandPool", "flags", "VkCommandPoolResetFlagBits", AllVkCommandPoolResetFlagBits, flags, kOptionalFlags, "VUID-vkResetCommandPool-flags-parameter");
- return skip;
-}
-
-bool StatelessValidation::PreCallValidateAllocateCommandBuffers(
- VkDevice device,
- const VkCommandBufferAllocateInfo* pAllocateInfo,
- VkCommandBuffer* pCommandBuffers) {
- bool skip = false;
- skip |= validate_struct_type("vkAllocateCommandBuffers", "pAllocateInfo", "VK_STRUCTURE_TYPE_COMMAND_BUFFER_ALLOCATE_INFO", pAllocateInfo, VK_STRUCTURE_TYPE_COMMAND_BUFFER_ALLOCATE_INFO, true, "VUID-vkAllocateCommandBuffers-pAllocateInfo-parameter", "VUID-VkCommandBufferAllocateInfo-sType-sType");
- if (pAllocateInfo != NULL)
- {
- skip |= validate_struct_pnext("vkAllocateCommandBuffers", "pAllocateInfo->pNext", NULL, pAllocateInfo->pNext, 0, NULL, GeneratedVulkanHeaderVersion, "VUID-VkCommandBufferAllocateInfo-pNext-pNext");
-
- skip |= validate_required_handle("vkAllocateCommandBuffers", "pAllocateInfo->commandPool", pAllocateInfo->commandPool);
-
- skip |= validate_ranged_enum("vkAllocateCommandBuffers", "pAllocateInfo->level", "VkCommandBufferLevel", AllVkCommandBufferLevelEnums, pAllocateInfo->level, "VUID-VkCommandBufferAllocateInfo-level-parameter");
- }
- if (pAllocateInfo != NULL) {
- skip |= validate_array("vkAllocateCommandBuffers", "pAllocateInfo->commandBufferCount", "pCommandBuffers", pAllocateInfo->commandBufferCount, &pCommandBuffers, true, true, kVUIDUndefined, "VUID-vkAllocateCommandBuffers-pCommandBuffers-parameter");
- }
- return skip;
-}
-
-bool StatelessValidation::PreCallValidateFreeCommandBuffers(
- VkDevice device,
- VkCommandPool commandPool,
- uint32_t commandBufferCount,
- const VkCommandBuffer* pCommandBuffers) {
- bool skip = false;
- skip |= validate_required_handle("vkFreeCommandBuffers", "commandPool", commandPool);
- return skip;
-}
-
-bool StatelessValidation::PreCallValidateBeginCommandBuffer(
- VkCommandBuffer commandBuffer,
- const VkCommandBufferBeginInfo* pBeginInfo) {
- bool skip = false;
- skip |= validate_struct_type("vkBeginCommandBuffer", "pBeginInfo", "VK_STRUCTURE_TYPE_COMMAND_BUFFER_BEGIN_INFO", pBeginInfo, VK_STRUCTURE_TYPE_COMMAND_BUFFER_BEGIN_INFO, true, "VUID-vkBeginCommandBuffer-pBeginInfo-parameter", "VUID-VkCommandBufferBeginInfo-sType-sType");
- if (pBeginInfo != NULL)
- {
- const VkStructureType allowed_structs_VkCommandBufferBeginInfo[] = { VK_STRUCTURE_TYPE_DEVICE_GROUP_COMMAND_BUFFER_BEGIN_INFO };
-
- skip |= validate_struct_pnext("vkBeginCommandBuffer", "pBeginInfo->pNext", "VkDeviceGroupCommandBufferBeginInfo", pBeginInfo->pNext, ARRAY_SIZE(allowed_structs_VkCommandBufferBeginInfo), allowed_structs_VkCommandBufferBeginInfo, GeneratedVulkanHeaderVersion, "VUID-VkCommandBufferBeginInfo-pNext-pNext");
-
- skip |= validate_flags("vkBeginCommandBuffer", "pBeginInfo->flags", "VkCommandBufferUsageFlagBits", AllVkCommandBufferUsageFlagBits, pBeginInfo->flags, kOptionalFlags, "VUID-VkCommandBufferBeginInfo-flags-parameter");
- }
- if (!skip) skip |= manual_PreCallValidateBeginCommandBuffer(commandBuffer, pBeginInfo);
- return skip;
-}
-
-bool StatelessValidation::PreCallValidateEndCommandBuffer(
- VkCommandBuffer commandBuffer) {
- bool skip = false;
- // No xml-driven validation
- return skip;
-}
-
-bool StatelessValidation::PreCallValidateResetCommandBuffer(
- VkCommandBuffer commandBuffer,
- VkCommandBufferResetFlags flags) {
- bool skip = false;
- skip |= validate_flags("vkResetCommandBuffer", "flags", "VkCommandBufferResetFlagBits", AllVkCommandBufferResetFlagBits, flags, kOptionalFlags, "VUID-vkResetCommandBuffer-flags-parameter");
- return skip;
-}
-
-bool StatelessValidation::PreCallValidateCmdBindPipeline(
- VkCommandBuffer commandBuffer,
- VkPipelineBindPoint pipelineBindPoint,
- VkPipeline pipeline) {
- bool skip = false;
- skip |= validate_ranged_enum("vkCmdBindPipeline", "pipelineBindPoint", "VkPipelineBindPoint", AllVkPipelineBindPointEnums, pipelineBindPoint, "VUID-vkCmdBindPipeline-pipelineBindPoint-parameter");
- skip |= validate_required_handle("vkCmdBindPipeline", "pipeline", pipeline);
- return skip;
-}
-
-bool StatelessValidation::PreCallValidateCmdSetViewport(
- VkCommandBuffer commandBuffer,
- uint32_t firstViewport,
- uint32_t viewportCount,
- const VkViewport* pViewports) {
- bool skip = false;
- skip |= validate_array("vkCmdSetViewport", "viewportCount", "pViewports", viewportCount, &pViewports, true, true, "VUID-vkCmdSetViewport-viewportCount-arraylength", "VUID-vkCmdSetViewport-pViewports-parameter");
- if (pViewports != NULL)
- {
- for (uint32_t viewportIndex = 0; viewportIndex < viewportCount; ++viewportIndex)
- {
- // No xml-driven validation
- }
- }
- if (!skip) skip |= manual_PreCallValidateCmdSetViewport(commandBuffer, firstViewport, viewportCount, pViewports);
- return skip;
-}
-
-bool StatelessValidation::PreCallValidateCmdSetScissor(
- VkCommandBuffer commandBuffer,
- uint32_t firstScissor,
- uint32_t scissorCount,
- const VkRect2D* pScissors) {
- bool skip = false;
- skip |= validate_array("vkCmdSetScissor", "scissorCount", "pScissors", scissorCount, &pScissors, true, true, "VUID-vkCmdSetScissor-scissorCount-arraylength", "VUID-vkCmdSetScissor-pScissors-parameter");
- if (pScissors != NULL)
- {
- for (uint32_t scissorIndex = 0; scissorIndex < scissorCount; ++scissorIndex)
- {
- // No xml-driven validation
-
- // No xml-driven validation
- }
- }
- if (!skip) skip |= manual_PreCallValidateCmdSetScissor(commandBuffer, firstScissor, scissorCount, pScissors);
- return skip;
-}
-
-bool StatelessValidation::PreCallValidateCmdSetLineWidth(
- VkCommandBuffer commandBuffer,
- float lineWidth) {
- bool skip = false;
- // No xml-driven validation
- if (!skip) skip |= manual_PreCallValidateCmdSetLineWidth(commandBuffer, lineWidth);
- return skip;
-}
-
-bool StatelessValidation::PreCallValidateCmdSetDepthBias(
- VkCommandBuffer commandBuffer,
- float depthBiasConstantFactor,
- float depthBiasClamp,
- float depthBiasSlopeFactor) {
- bool skip = false;
- // No xml-driven validation
- return skip;
-}
-
-bool StatelessValidation::PreCallValidateCmdSetBlendConstants(
- VkCommandBuffer commandBuffer,
- const float blendConstants[4]) {
- bool skip = false;
- skip |= validate_required_pointer("vkCmdSetBlendConstants", "blendConstants", blendConstants, kVUIDUndefined);
- return skip;
-}
-
-bool StatelessValidation::PreCallValidateCmdSetDepthBounds(
- VkCommandBuffer commandBuffer,
- float minDepthBounds,
- float maxDepthBounds) {
- bool skip = false;
- // No xml-driven validation
- return skip;
-}
-
-bool StatelessValidation::PreCallValidateCmdSetStencilCompareMask(
- VkCommandBuffer commandBuffer,
- VkStencilFaceFlags faceMask,
- uint32_t compareMask) {
- bool skip = false;
- skip |= validate_flags("vkCmdSetStencilCompareMask", "faceMask", "VkStencilFaceFlagBits", AllVkStencilFaceFlagBits, faceMask, kRequiredFlags, "VUID-vkCmdSetStencilCompareMask-faceMask-parameter", "VUID-vkCmdSetStencilCompareMask-faceMask-requiredbitmask");
- return skip;
-}
-
-bool StatelessValidation::PreCallValidateCmdSetStencilWriteMask(
- VkCommandBuffer commandBuffer,
- VkStencilFaceFlags faceMask,
- uint32_t writeMask) {
- bool skip = false;
- skip |= validate_flags("vkCmdSetStencilWriteMask", "faceMask", "VkStencilFaceFlagBits", AllVkStencilFaceFlagBits, faceMask, kRequiredFlags, "VUID-vkCmdSetStencilWriteMask-faceMask-parameter", "VUID-vkCmdSetStencilWriteMask-faceMask-requiredbitmask");
- return skip;
-}
-
-bool StatelessValidation::PreCallValidateCmdSetStencilReference(
- VkCommandBuffer commandBuffer,
- VkStencilFaceFlags faceMask,
- uint32_t reference) {
- bool skip = false;
- skip |= validate_flags("vkCmdSetStencilReference", "faceMask", "VkStencilFaceFlagBits", AllVkStencilFaceFlagBits, faceMask, kRequiredFlags, "VUID-vkCmdSetStencilReference-faceMask-parameter", "VUID-vkCmdSetStencilReference-faceMask-requiredbitmask");
- return skip;
-}
-
-bool StatelessValidation::PreCallValidateCmdBindDescriptorSets(
- VkCommandBuffer commandBuffer,
- VkPipelineBindPoint pipelineBindPoint,
- VkPipelineLayout layout,
- uint32_t firstSet,
- uint32_t descriptorSetCount,
- const VkDescriptorSet* pDescriptorSets,
- uint32_t dynamicOffsetCount,
- const uint32_t* pDynamicOffsets) {
- bool skip = false;
- skip |= validate_ranged_enum("vkCmdBindDescriptorSets", "pipelineBindPoint", "VkPipelineBindPoint", AllVkPipelineBindPointEnums, pipelineBindPoint, "VUID-vkCmdBindDescriptorSets-pipelineBindPoint-parameter");
- skip |= validate_required_handle("vkCmdBindDescriptorSets", "layout", layout);
- skip |= validate_handle_array("vkCmdBindDescriptorSets", "descriptorSetCount", "pDescriptorSets", descriptorSetCount, pDescriptorSets, true, true);
- skip |= validate_array("vkCmdBindDescriptorSets", "dynamicOffsetCount", "pDynamicOffsets", dynamicOffsetCount, &pDynamicOffsets, false, true, kVUIDUndefined, "VUID-vkCmdBindDescriptorSets-pDynamicOffsets-parameter");
- return skip;
-}
-
-bool StatelessValidation::PreCallValidateCmdBindIndexBuffer(
- VkCommandBuffer commandBuffer,
- VkBuffer buffer,
- VkDeviceSize offset,
- VkIndexType indexType) {
- bool skip = false;
- skip |= validate_required_handle("vkCmdBindIndexBuffer", "buffer", buffer);
- skip |= validate_ranged_enum("vkCmdBindIndexBuffer", "indexType", "VkIndexType", AllVkIndexTypeEnums, indexType, "VUID-vkCmdBindIndexBuffer-indexType-parameter");
- if (!skip) skip |= manual_PreCallValidateCmdBindIndexBuffer(commandBuffer, buffer, offset, indexType);
- return skip;
-}
-
-bool StatelessValidation::PreCallValidateCmdBindVertexBuffers(
- VkCommandBuffer commandBuffer,
- uint32_t firstBinding,
- uint32_t bindingCount,
- const VkBuffer* pBuffers,
- const VkDeviceSize* pOffsets) {
- bool skip = false;
- skip |= validate_handle_array("vkCmdBindVertexBuffers", "bindingCount", "pBuffers", bindingCount, pBuffers, true, true);
- skip |= validate_array("vkCmdBindVertexBuffers", "bindingCount", "pOffsets", bindingCount, &pOffsets, true, true, "VUID-vkCmdBindVertexBuffers-bindingCount-arraylength", "VUID-vkCmdBindVertexBuffers-pOffsets-parameter");
- return skip;
-}
-
-bool StatelessValidation::PreCallValidateCmdDraw(
- VkCommandBuffer commandBuffer,
- uint32_t vertexCount,
- uint32_t instanceCount,
- uint32_t firstVertex,
- uint32_t firstInstance) {
- bool skip = false;
- // No xml-driven validation
- if (!skip) skip |= manual_PreCallValidateCmdDraw(commandBuffer, vertexCount, instanceCount, firstVertex, firstInstance);
- return skip;
-}
-
-bool StatelessValidation::PreCallValidateCmdDrawIndexed(
- VkCommandBuffer commandBuffer,
- uint32_t indexCount,
- uint32_t instanceCount,
- uint32_t firstIndex,
- int32_t vertexOffset,
- uint32_t firstInstance) {
- bool skip = false;
- // No xml-driven validation
- return skip;
-}
-
-bool StatelessValidation::PreCallValidateCmdDrawIndirect(
- VkCommandBuffer commandBuffer,
- VkBuffer buffer,
- VkDeviceSize offset,
- uint32_t drawCount,
- uint32_t stride) {
- bool skip = false;
- skip |= validate_required_handle("vkCmdDrawIndirect", "buffer", buffer);
- if (!skip) skip |= manual_PreCallValidateCmdDrawIndirect(commandBuffer, buffer, offset, drawCount, stride);
- return skip;
-}
-
-bool StatelessValidation::PreCallValidateCmdDrawIndexedIndirect(
- VkCommandBuffer commandBuffer,
- VkBuffer buffer,
- VkDeviceSize offset,
- uint32_t drawCount,
- uint32_t stride) {
- bool skip = false;
- skip |= validate_required_handle("vkCmdDrawIndexedIndirect", "buffer", buffer);
- if (!skip) skip |= manual_PreCallValidateCmdDrawIndexedIndirect(commandBuffer, buffer, offset, drawCount, stride);
- return skip;
-}
-
-bool StatelessValidation::PreCallValidateCmdDispatch(
- VkCommandBuffer commandBuffer,
- uint32_t groupCountX,
- uint32_t groupCountY,
- uint32_t groupCountZ) {
- bool skip = false;
- // No xml-driven validation
- if (!skip) skip |= manual_PreCallValidateCmdDispatch(commandBuffer, groupCountX, groupCountY, groupCountZ);
- return skip;
-}
-
-bool StatelessValidation::PreCallValidateCmdDispatchIndirect(
- VkCommandBuffer commandBuffer,
- VkBuffer buffer,
- VkDeviceSize offset) {
- bool skip = false;
- skip |= validate_required_handle("vkCmdDispatchIndirect", "buffer", buffer);
- if (!skip) skip |= manual_PreCallValidateCmdDispatchIndirect(commandBuffer, buffer, offset);
- return skip;
-}
-
-bool StatelessValidation::PreCallValidateCmdCopyBuffer(
- VkCommandBuffer commandBuffer,
- VkBuffer srcBuffer,
- VkBuffer dstBuffer,
- uint32_t regionCount,
- const VkBufferCopy* pRegions) {
- bool skip = false;
- skip |= validate_required_handle("vkCmdCopyBuffer", "srcBuffer", srcBuffer);
- skip |= validate_required_handle("vkCmdCopyBuffer", "dstBuffer", dstBuffer);
- skip |= validate_array("vkCmdCopyBuffer", "regionCount", "pRegions", regionCount, &pRegions, true, true, "VUID-vkCmdCopyBuffer-regionCount-arraylength", "VUID-vkCmdCopyBuffer-pRegions-parameter");
- if (pRegions != NULL)
- {
- for (uint32_t regionIndex = 0; regionIndex < regionCount; ++regionIndex)
- {
- // No xml-driven validation
- }
- }
- return skip;
-}
-
-bool StatelessValidation::PreCallValidateCmdCopyImage(
- VkCommandBuffer commandBuffer,
- VkImage srcImage,
- VkImageLayout srcImageLayout,
- VkImage dstImage,
- VkImageLayout dstImageLayout,
- uint32_t regionCount,
- const VkImageCopy* pRegions) {
- bool skip = false;
- skip |= validate_required_handle("vkCmdCopyImage", "srcImage", srcImage);
- skip |= validate_ranged_enum("vkCmdCopyImage", "srcImageLayout", "VkImageLayout", AllVkImageLayoutEnums, srcImageLayout, "VUID-vkCmdCopyImage-srcImageLayout-parameter");
- skip |= validate_required_handle("vkCmdCopyImage", "dstImage", dstImage);
- skip |= validate_ranged_enum("vkCmdCopyImage", "dstImageLayout", "VkImageLayout", AllVkImageLayoutEnums, dstImageLayout, "VUID-vkCmdCopyImage-dstImageLayout-parameter");
- skip |= validate_array("vkCmdCopyImage", "regionCount", "pRegions", regionCount, &pRegions, true, true, "VUID-vkCmdCopyImage-regionCount-arraylength", "VUID-vkCmdCopyImage-pRegions-parameter");
- if (pRegions != NULL)
- {
- for (uint32_t regionIndex = 0; regionIndex < regionCount; ++regionIndex)
- {
- skip |= validate_flags("vkCmdCopyImage", ParameterName("pRegions[%i].srcSubresource.aspectMask", ParameterName::IndexVector{ regionIndex }), "VkImageAspectFlagBits", AllVkImageAspectFlagBits, pRegions[regionIndex].srcSubresource.aspectMask, kRequiredFlags, "VUID-VkImageSubresourceLayers-aspectMask-parameter", "VUID-VkImageSubresourceLayers-aspectMask-requiredbitmask");
-
- // No xml-driven validation
-
- skip |= validate_flags("vkCmdCopyImage", ParameterName("pRegions[%i].dstSubresource.aspectMask", ParameterName::IndexVector{ regionIndex }), "VkImageAspectFlagBits", AllVkImageAspectFlagBits, pRegions[regionIndex].dstSubresource.aspectMask, kRequiredFlags, "VUID-VkImageSubresourceLayers-aspectMask-parameter", "VUID-VkImageSubresourceLayers-aspectMask-requiredbitmask");
-
- // No xml-driven validation
-
- // No xml-driven validation
- }
- }
- if (!skip) skip |= manual_PreCallValidateCmdCopyImage(commandBuffer, srcImage, srcImageLayout, dstImage, dstImageLayout, regionCount, pRegions);
- return skip;
-}
-
-bool StatelessValidation::PreCallValidateCmdBlitImage(
- VkCommandBuffer commandBuffer,
- VkImage srcImage,
- VkImageLayout srcImageLayout,
- VkImage dstImage,
- VkImageLayout dstImageLayout,
- uint32_t regionCount,
- const VkImageBlit* pRegions,
- VkFilter filter) {
- bool skip = false;
- skip |= validate_required_handle("vkCmdBlitImage", "srcImage", srcImage);
- skip |= validate_ranged_enum("vkCmdBlitImage", "srcImageLayout", "VkImageLayout", AllVkImageLayoutEnums, srcImageLayout, "VUID-vkCmdBlitImage-srcImageLayout-parameter");
- skip |= validate_required_handle("vkCmdBlitImage", "dstImage", dstImage);
- skip |= validate_ranged_enum("vkCmdBlitImage", "dstImageLayout", "VkImageLayout", AllVkImageLayoutEnums, dstImageLayout, "VUID-vkCmdBlitImage-dstImageLayout-parameter");
- skip |= validate_array("vkCmdBlitImage", "regionCount", "pRegions", regionCount, &pRegions, true, true, "VUID-vkCmdBlitImage-regionCount-arraylength", "VUID-vkCmdBlitImage-pRegions-parameter");
- if (pRegions != NULL)
- {
- for (uint32_t regionIndex = 0; regionIndex < regionCount; ++regionIndex)
- {
- skip |= validate_flags("vkCmdBlitImage", ParameterName("pRegions[%i].srcSubresource.aspectMask", ParameterName::IndexVector{ regionIndex }), "VkImageAspectFlagBits", AllVkImageAspectFlagBits, pRegions[regionIndex].srcSubresource.aspectMask, kRequiredFlags, "VUID-VkImageSubresourceLayers-aspectMask-parameter", "VUID-VkImageSubresourceLayers-aspectMask-requiredbitmask");
-
- skip |= validate_flags("vkCmdBlitImage", ParameterName("pRegions[%i].dstSubresource.aspectMask", ParameterName::IndexVector{ regionIndex }), "VkImageAspectFlagBits", AllVkImageAspectFlagBits, pRegions[regionIndex].dstSubresource.aspectMask, kRequiredFlags, "VUID-VkImageSubresourceLayers-aspectMask-parameter", "VUID-VkImageSubresourceLayers-aspectMask-requiredbitmask");
- }
- }
- skip |= validate_ranged_enum("vkCmdBlitImage", "filter", "VkFilter", AllVkFilterEnums, filter, "VUID-vkCmdBlitImage-filter-parameter");
- if (!skip) skip |= manual_PreCallValidateCmdBlitImage(commandBuffer, srcImage, srcImageLayout, dstImage, dstImageLayout, regionCount, pRegions, filter);
- return skip;
-}
-
-bool StatelessValidation::PreCallValidateCmdCopyBufferToImage(
- VkCommandBuffer commandBuffer,
- VkBuffer srcBuffer,
- VkImage dstImage,
- VkImageLayout dstImageLayout,
- uint32_t regionCount,
- const VkBufferImageCopy* pRegions) {
- bool skip = false;
- skip |= validate_required_handle("vkCmdCopyBufferToImage", "srcBuffer", srcBuffer);
- skip |= validate_required_handle("vkCmdCopyBufferToImage", "dstImage", dstImage);
- skip |= validate_ranged_enum("vkCmdCopyBufferToImage", "dstImageLayout", "VkImageLayout", AllVkImageLayoutEnums, dstImageLayout, "VUID-vkCmdCopyBufferToImage-dstImageLayout-parameter");
- skip |= validate_array("vkCmdCopyBufferToImage", "regionCount", "pRegions", regionCount, &pRegions, true, true, "VUID-vkCmdCopyBufferToImage-regionCount-arraylength", "VUID-vkCmdCopyBufferToImage-pRegions-parameter");
- if (pRegions != NULL)
- {
- for (uint32_t regionIndex = 0; regionIndex < regionCount; ++regionIndex)
- {
- skip |= validate_flags("vkCmdCopyBufferToImage", ParameterName("pRegions[%i].imageSubresource.aspectMask", ParameterName::IndexVector{ regionIndex }), "VkImageAspectFlagBits", AllVkImageAspectFlagBits, pRegions[regionIndex].imageSubresource.aspectMask, kRequiredFlags, "VUID-VkImageSubresourceLayers-aspectMask-parameter", "VUID-VkImageSubresourceLayers-aspectMask-requiredbitmask");
-
- // No xml-driven validation
-
- // No xml-driven validation
- }
- }
- if (!skip) skip |= manual_PreCallValidateCmdCopyBufferToImage(commandBuffer, srcBuffer, dstImage, dstImageLayout, regionCount, pRegions);
- return skip;
-}
-
-bool StatelessValidation::PreCallValidateCmdCopyImageToBuffer(
- VkCommandBuffer commandBuffer,
- VkImage srcImage,
- VkImageLayout srcImageLayout,
- VkBuffer dstBuffer,
- uint32_t regionCount,
- const VkBufferImageCopy* pRegions) {
- bool skip = false;
- skip |= validate_required_handle("vkCmdCopyImageToBuffer", "srcImage", srcImage);
- skip |= validate_ranged_enum("vkCmdCopyImageToBuffer", "srcImageLayout", "VkImageLayout", AllVkImageLayoutEnums, srcImageLayout, "VUID-vkCmdCopyImageToBuffer-srcImageLayout-parameter");
- skip |= validate_required_handle("vkCmdCopyImageToBuffer", "dstBuffer", dstBuffer);
- skip |= validate_array("vkCmdCopyImageToBuffer", "regionCount", "pRegions", regionCount, &pRegions, true, true, "VUID-vkCmdCopyImageToBuffer-regionCount-arraylength", "VUID-vkCmdCopyImageToBuffer-pRegions-parameter");
- if (pRegions != NULL)
- {
- for (uint32_t regionIndex = 0; regionIndex < regionCount; ++regionIndex)
- {
- skip |= validate_flags("vkCmdCopyImageToBuffer", ParameterName("pRegions[%i].imageSubresource.aspectMask", ParameterName::IndexVector{ regionIndex }), "VkImageAspectFlagBits", AllVkImageAspectFlagBits, pRegions[regionIndex].imageSubresource.aspectMask, kRequiredFlags, "VUID-VkImageSubresourceLayers-aspectMask-parameter", "VUID-VkImageSubresourceLayers-aspectMask-requiredbitmask");
-
- // No xml-driven validation
-
- // No xml-driven validation
- }
- }
- if (!skip) skip |= manual_PreCallValidateCmdCopyImageToBuffer(commandBuffer, srcImage, srcImageLayout, dstBuffer, regionCount, pRegions);
- return skip;
-}
-
-bool StatelessValidation::PreCallValidateCmdUpdateBuffer(
- VkCommandBuffer commandBuffer,
- VkBuffer dstBuffer,
- VkDeviceSize dstOffset,
- VkDeviceSize dataSize,
- const void* pData) {
- bool skip = false;
- skip |= validate_required_handle("vkCmdUpdateBuffer", "dstBuffer", dstBuffer);
- skip |= validate_array("vkCmdUpdateBuffer", "dataSize", "pData", dataSize, &pData, true, true, "VUID-vkCmdUpdateBuffer-dataSize-arraylength", "VUID-vkCmdUpdateBuffer-pData-parameter");
- if (!skip) skip |= manual_PreCallValidateCmdUpdateBuffer(commandBuffer, dstBuffer, dstOffset, dataSize, pData);
- return skip;
-}
-
-bool StatelessValidation::PreCallValidateCmdFillBuffer(
- VkCommandBuffer commandBuffer,
- VkBuffer dstBuffer,
- VkDeviceSize dstOffset,
- VkDeviceSize size,
- uint32_t data) {
- bool skip = false;
- skip |= validate_required_handle("vkCmdFillBuffer", "dstBuffer", dstBuffer);
- if (!skip) skip |= manual_PreCallValidateCmdFillBuffer(commandBuffer, dstBuffer, dstOffset, size, data);
- return skip;
-}
-
-bool StatelessValidation::PreCallValidateCmdClearColorImage(
- VkCommandBuffer commandBuffer,
- VkImage image,
- VkImageLayout imageLayout,
- const VkClearColorValue* pColor,
- uint32_t rangeCount,
- const VkImageSubresourceRange* pRanges) {
- bool skip = false;
- skip |= validate_required_handle("vkCmdClearColorImage", "image", image);
- skip |= validate_ranged_enum("vkCmdClearColorImage", "imageLayout", "VkImageLayout", AllVkImageLayoutEnums, imageLayout, "VUID-vkCmdClearColorImage-imageLayout-parameter");
- skip |= validate_required_pointer("vkCmdClearColorImage", "pColor", pColor, "VUID-vkCmdClearColorImage-pColor-parameter");
- if (pColor != NULL)
- {
- // No xml-driven validation
- }
- skip |= validate_array("vkCmdClearColorImage", "rangeCount", "pRanges", rangeCount, &pRanges, true, true, "VUID-vkCmdClearColorImage-rangeCount-arraylength", "VUID-vkCmdClearColorImage-pRanges-parameter");
- if (pRanges != NULL)
- {
- for (uint32_t rangeIndex = 0; rangeIndex < rangeCount; ++rangeIndex)
- {
- skip |= validate_flags("vkCmdClearColorImage", ParameterName("pRanges[%i].aspectMask", ParameterName::IndexVector{ rangeIndex }), "VkImageAspectFlagBits", AllVkImageAspectFlagBits, pRanges[rangeIndex].aspectMask, kRequiredFlags, "VUID-VkImageSubresourceRange-aspectMask-parameter", "VUID-VkImageSubresourceRange-aspectMask-requiredbitmask");
- }
- }
- return skip;
-}
-
-bool StatelessValidation::PreCallValidateCmdClearDepthStencilImage(
- VkCommandBuffer commandBuffer,
- VkImage image,
- VkImageLayout imageLayout,
- const VkClearDepthStencilValue* pDepthStencil,
- uint32_t rangeCount,
- const VkImageSubresourceRange* pRanges) {
- bool skip = false;
- skip |= validate_required_handle("vkCmdClearDepthStencilImage", "image", image);
- skip |= validate_ranged_enum("vkCmdClearDepthStencilImage", "imageLayout", "VkImageLayout", AllVkImageLayoutEnums, imageLayout, "VUID-vkCmdClearDepthStencilImage-imageLayout-parameter");
- skip |= validate_required_pointer("vkCmdClearDepthStencilImage", "pDepthStencil", pDepthStencil, "VUID-vkCmdClearDepthStencilImage-pDepthStencil-parameter");
- if (pDepthStencil != NULL)
- {
- // No xml-driven validation
- }
- skip |= validate_array("vkCmdClearDepthStencilImage", "rangeCount", "pRanges", rangeCount, &pRanges, true, true, "VUID-vkCmdClearDepthStencilImage-rangeCount-arraylength", "VUID-vkCmdClearDepthStencilImage-pRanges-parameter");
- if (pRanges != NULL)
- {
- for (uint32_t rangeIndex = 0; rangeIndex < rangeCount; ++rangeIndex)
- {
- skip |= validate_flags("vkCmdClearDepthStencilImage", ParameterName("pRanges[%i].aspectMask", ParameterName::IndexVector{ rangeIndex }), "VkImageAspectFlagBits", AllVkImageAspectFlagBits, pRanges[rangeIndex].aspectMask, kRequiredFlags, "VUID-VkImageSubresourceRange-aspectMask-parameter", "VUID-VkImageSubresourceRange-aspectMask-requiredbitmask");
- }
- }
- return skip;
-}
-
-bool StatelessValidation::PreCallValidateCmdClearAttachments(
- VkCommandBuffer commandBuffer,
- uint32_t attachmentCount,
- const VkClearAttachment* pAttachments,
- uint32_t rectCount,
- const VkClearRect* pRects) {
- bool skip = false;
- skip |= validate_array("vkCmdClearAttachments", "attachmentCount", "pAttachments", attachmentCount, &pAttachments, true, true, "VUID-vkCmdClearAttachments-attachmentCount-arraylength", "VUID-vkCmdClearAttachments-pAttachments-parameter");
- if (pAttachments != NULL)
- {
- for (uint32_t attachmentIndex = 0; attachmentIndex < attachmentCount; ++attachmentIndex)
- {
- skip |= validate_flags("vkCmdClearAttachments", ParameterName("pAttachments[%i].aspectMask", ParameterName::IndexVector{ attachmentIndex }), "VkImageAspectFlagBits", AllVkImageAspectFlagBits, pAttachments[attachmentIndex].aspectMask, kRequiredFlags, "VUID-VkClearAttachment-aspectMask-parameter", "VUID-VkClearAttachment-aspectMask-requiredbitmask");
-
- // No xml-driven validation
-
- // No xml-driven validation
- }
- }
- skip |= validate_array("vkCmdClearAttachments", "rectCount", "pRects", rectCount, &pRects, true, true, "VUID-vkCmdClearAttachments-rectCount-arraylength", "VUID-vkCmdClearAttachments-pRects-parameter");
- if (pRects != NULL)
- {
- for (uint32_t rectIndex = 0; rectIndex < rectCount; ++rectIndex)
- {
- // No xml-driven validation
-
- // No xml-driven validation
- }
- }
- if (!skip) skip |= manual_PreCallValidateCmdClearAttachments(commandBuffer, attachmentCount, pAttachments, rectCount, pRects);
- return skip;
-}
-
-bool StatelessValidation::PreCallValidateCmdResolveImage(
- VkCommandBuffer commandBuffer,
- VkImage srcImage,
- VkImageLayout srcImageLayout,
- VkImage dstImage,
- VkImageLayout dstImageLayout,
- uint32_t regionCount,
- const VkImageResolve* pRegions) {
- bool skip = false;
- skip |= validate_required_handle("vkCmdResolveImage", "srcImage", srcImage);
- skip |= validate_ranged_enum("vkCmdResolveImage", "srcImageLayout", "VkImageLayout", AllVkImageLayoutEnums, srcImageLayout, "VUID-vkCmdResolveImage-srcImageLayout-parameter");
- skip |= validate_required_handle("vkCmdResolveImage", "dstImage", dstImage);
- skip |= validate_ranged_enum("vkCmdResolveImage", "dstImageLayout", "VkImageLayout", AllVkImageLayoutEnums, dstImageLayout, "VUID-vkCmdResolveImage-dstImageLayout-parameter");
- skip |= validate_array("vkCmdResolveImage", "regionCount", "pRegions", regionCount, &pRegions, true, true, "VUID-vkCmdResolveImage-regionCount-arraylength", "VUID-vkCmdResolveImage-pRegions-parameter");
- if (pRegions != NULL)
- {
- for (uint32_t regionIndex = 0; regionIndex < regionCount; ++regionIndex)
- {
- skip |= validate_flags("vkCmdResolveImage", ParameterName("pRegions[%i].srcSubresource.aspectMask", ParameterName::IndexVector{ regionIndex }), "VkImageAspectFlagBits", AllVkImageAspectFlagBits, pRegions[regionIndex].srcSubresource.aspectMask, kRequiredFlags, "VUID-VkImageSubresourceLayers-aspectMask-parameter", "VUID-VkImageSubresourceLayers-aspectMask-requiredbitmask");
-
- // No xml-driven validation
-
- skip |= validate_flags("vkCmdResolveImage", ParameterName("pRegions[%i].dstSubresource.aspectMask", ParameterName::IndexVector{ regionIndex }), "VkImageAspectFlagBits", AllVkImageAspectFlagBits, pRegions[regionIndex].dstSubresource.aspectMask, kRequiredFlags, "VUID-VkImageSubresourceLayers-aspectMask-parameter", "VUID-VkImageSubresourceLayers-aspectMask-requiredbitmask");
-
- // No xml-driven validation
-
- // No xml-driven validation
- }
- }
- return skip;
-}
-
-bool StatelessValidation::PreCallValidateCmdSetEvent(
- VkCommandBuffer commandBuffer,
- VkEvent event,
- VkPipelineStageFlags stageMask) {
- bool skip = false;
- skip |= validate_required_handle("vkCmdSetEvent", "event", event);
- skip |= validate_flags("vkCmdSetEvent", "stageMask", "VkPipelineStageFlagBits", AllVkPipelineStageFlagBits, stageMask, kRequiredFlags, "VUID-vkCmdSetEvent-stageMask-parameter", "VUID-vkCmdSetEvent-stageMask-requiredbitmask");
- return skip;
-}
-
-bool StatelessValidation::PreCallValidateCmdResetEvent(
- VkCommandBuffer commandBuffer,
- VkEvent event,
- VkPipelineStageFlags stageMask) {
- bool skip = false;
- skip |= validate_required_handle("vkCmdResetEvent", "event", event);
- skip |= validate_flags("vkCmdResetEvent", "stageMask", "VkPipelineStageFlagBits", AllVkPipelineStageFlagBits, stageMask, kRequiredFlags, "VUID-vkCmdResetEvent-stageMask-parameter", "VUID-vkCmdResetEvent-stageMask-requiredbitmask");
- return skip;
-}
-
-bool StatelessValidation::PreCallValidateCmdWaitEvents(
- VkCommandBuffer commandBuffer,
- uint32_t eventCount,
- const VkEvent* pEvents,
- VkPipelineStageFlags srcStageMask,
- VkPipelineStageFlags dstStageMask,
- uint32_t memoryBarrierCount,
- const VkMemoryBarrier* pMemoryBarriers,
- uint32_t bufferMemoryBarrierCount,
- const VkBufferMemoryBarrier* pBufferMemoryBarriers,
- uint32_t imageMemoryBarrierCount,
- const VkImageMemoryBarrier* pImageMemoryBarriers) {
- bool skip = false;
- skip |= validate_handle_array("vkCmdWaitEvents", "eventCount", "pEvents", eventCount, pEvents, true, true);
- skip |= validate_flags("vkCmdWaitEvents", "srcStageMask", "VkPipelineStageFlagBits", AllVkPipelineStageFlagBits, srcStageMask, kRequiredFlags, "VUID-vkCmdWaitEvents-srcStageMask-parameter", "VUID-vkCmdWaitEvents-srcStageMask-requiredbitmask");
- skip |= validate_flags("vkCmdWaitEvents", "dstStageMask", "VkPipelineStageFlagBits", AllVkPipelineStageFlagBits, dstStageMask, kRequiredFlags, "VUID-vkCmdWaitEvents-dstStageMask-parameter", "VUID-vkCmdWaitEvents-dstStageMask-requiredbitmask");
- skip |= validate_struct_type_array("vkCmdWaitEvents", "memoryBarrierCount", "pMemoryBarriers", "VK_STRUCTURE_TYPE_MEMORY_BARRIER", memoryBarrierCount, pMemoryBarriers, VK_STRUCTURE_TYPE_MEMORY_BARRIER, false, true, "VUID-VkMemoryBarrier-sType-sType", "VUID-vkCmdWaitEvents-pMemoryBarriers-parameter", kVUIDUndefined);
- if (pMemoryBarriers != NULL)
- {
- for (uint32_t memoryBarrierIndex = 0; memoryBarrierIndex < memoryBarrierCount; ++memoryBarrierIndex)
- {
- skip |= validate_struct_pnext("vkCmdWaitEvents", ParameterName("pMemoryBarriers[%i].pNext", ParameterName::IndexVector{ memoryBarrierIndex }), NULL, pMemoryBarriers[memoryBarrierIndex].pNext, 0, NULL, GeneratedVulkanHeaderVersion, "VUID-VkMemoryBarrier-pNext-pNext");
-
- skip |= validate_flags("vkCmdWaitEvents", ParameterName("pMemoryBarriers[%i].srcAccessMask", ParameterName::IndexVector{ memoryBarrierIndex }), "VkAccessFlagBits", AllVkAccessFlagBits, pMemoryBarriers[memoryBarrierIndex].srcAccessMask, kOptionalFlags, "VUID-VkMemoryBarrier-srcAccessMask-parameter");
-
- skip |= validate_flags("vkCmdWaitEvents", ParameterName("pMemoryBarriers[%i].dstAccessMask", ParameterName::IndexVector{ memoryBarrierIndex }), "VkAccessFlagBits", AllVkAccessFlagBits, pMemoryBarriers[memoryBarrierIndex].dstAccessMask, kOptionalFlags, "VUID-VkMemoryBarrier-dstAccessMask-parameter");
- }
- }
- skip |= validate_struct_type_array("vkCmdWaitEvents", "bufferMemoryBarrierCount", "pBufferMemoryBarriers", "VK_STRUCTURE_TYPE_BUFFER_MEMORY_BARRIER", bufferMemoryBarrierCount, pBufferMemoryBarriers, VK_STRUCTURE_TYPE_BUFFER_MEMORY_BARRIER, false, true, "VUID-VkBufferMemoryBarrier-sType-sType", "VUID-vkCmdWaitEvents-pBufferMemoryBarriers-parameter", kVUIDUndefined);
- if (pBufferMemoryBarriers != NULL)
- {
- for (uint32_t bufferMemoryBarrierIndex = 0; bufferMemoryBarrierIndex < bufferMemoryBarrierCount; ++bufferMemoryBarrierIndex)
- {
- skip |= validate_struct_pnext("vkCmdWaitEvents", ParameterName("pBufferMemoryBarriers[%i].pNext", ParameterName::IndexVector{ bufferMemoryBarrierIndex }), NULL, pBufferMemoryBarriers[bufferMemoryBarrierIndex].pNext, 0, NULL, GeneratedVulkanHeaderVersion, "VUID-VkBufferMemoryBarrier-pNext-pNext");
-
- skip |= validate_flags("vkCmdWaitEvents", ParameterName("pBufferMemoryBarriers[%i].srcAccessMask", ParameterName::IndexVector{ bufferMemoryBarrierIndex }), "VkAccessFlagBits", AllVkAccessFlagBits, pBufferMemoryBarriers[bufferMemoryBarrierIndex].srcAccessMask, kOptionalFlags, "VUID-VkBufferMemoryBarrier-srcAccessMask-parameter");
-
- skip |= validate_flags("vkCmdWaitEvents", ParameterName("pBufferMemoryBarriers[%i].dstAccessMask", ParameterName::IndexVector{ bufferMemoryBarrierIndex }), "VkAccessFlagBits", AllVkAccessFlagBits, pBufferMemoryBarriers[bufferMemoryBarrierIndex].dstAccessMask, kOptionalFlags, "VUID-VkBufferMemoryBarrier-dstAccessMask-parameter");
-
- skip |= validate_required_handle("vkCmdWaitEvents", ParameterName("pBufferMemoryBarriers[%i].buffer", ParameterName::IndexVector{ bufferMemoryBarrierIndex }), pBufferMemoryBarriers[bufferMemoryBarrierIndex].buffer);
- }
- }
- skip |= validate_struct_type_array("vkCmdWaitEvents", "imageMemoryBarrierCount", "pImageMemoryBarriers", "VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER", imageMemoryBarrierCount, pImageMemoryBarriers, VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER, false, true, "VUID-VkImageMemoryBarrier-sType-sType", "VUID-vkCmdWaitEvents-pImageMemoryBarriers-parameter", kVUIDUndefined);
- if (pImageMemoryBarriers != NULL)
- {
- for (uint32_t imageMemoryBarrierIndex = 0; imageMemoryBarrierIndex < imageMemoryBarrierCount; ++imageMemoryBarrierIndex)
- {
- const VkStructureType allowed_structs_VkImageMemoryBarrier[] = { VK_STRUCTURE_TYPE_SAMPLE_LOCATIONS_INFO_EXT };
-
- skip |= validate_struct_pnext("vkCmdWaitEvents", ParameterName("pImageMemoryBarriers[%i].pNext", ParameterName::IndexVector{ imageMemoryBarrierIndex }), "VkSampleLocationsInfoEXT", pImageMemoryBarriers[imageMemoryBarrierIndex].pNext, ARRAY_SIZE(allowed_structs_VkImageMemoryBarrier), allowed_structs_VkImageMemoryBarrier, GeneratedVulkanHeaderVersion, "VUID-VkImageMemoryBarrier-pNext-pNext");
-
- skip |= validate_flags("vkCmdWaitEvents", ParameterName("pImageMemoryBarriers[%i].srcAccessMask", ParameterName::IndexVector{ imageMemoryBarrierIndex }), "VkAccessFlagBits", AllVkAccessFlagBits, pImageMemoryBarriers[imageMemoryBarrierIndex].srcAccessMask, kOptionalFlags, "VUID-VkImageMemoryBarrier-srcAccessMask-parameter");
-
- skip |= validate_flags("vkCmdWaitEvents", ParameterName("pImageMemoryBarriers[%i].dstAccessMask", ParameterName::IndexVector{ imageMemoryBarrierIndex }), "VkAccessFlagBits", AllVkAccessFlagBits, pImageMemoryBarriers[imageMemoryBarrierIndex].dstAccessMask, kOptionalFlags, "VUID-VkImageMemoryBarrier-dstAccessMask-parameter");
-
- skip |= validate_ranged_enum("vkCmdWaitEvents", ParameterName("pImageMemoryBarriers[%i].oldLayout", ParameterName::IndexVector{ imageMemoryBarrierIndex }), "VkImageLayout", AllVkImageLayoutEnums, pImageMemoryBarriers[imageMemoryBarrierIndex].oldLayout, "VUID-VkImageMemoryBarrier-oldLayout-parameter");
-
- skip |= validate_ranged_enum("vkCmdWaitEvents", ParameterName("pImageMemoryBarriers[%i].newLayout", ParameterName::IndexVector{ imageMemoryBarrierIndex }), "VkImageLayout", AllVkImageLayoutEnums, pImageMemoryBarriers[imageMemoryBarrierIndex].newLayout, "VUID-VkImageMemoryBarrier-newLayout-parameter");
-
- skip |= validate_required_handle("vkCmdWaitEvents", ParameterName("pImageMemoryBarriers[%i].image", ParameterName::IndexVector{ imageMemoryBarrierIndex }), pImageMemoryBarriers[imageMemoryBarrierIndex].image);
-
- skip |= validate_flags("vkCmdWaitEvents", ParameterName("pImageMemoryBarriers[%i].subresourceRange.aspectMask", ParameterName::IndexVector{ imageMemoryBarrierIndex }), "VkImageAspectFlagBits", AllVkImageAspectFlagBits, pImageMemoryBarriers[imageMemoryBarrierIndex].subresourceRange.aspectMask, kRequiredFlags, "VUID-VkImageSubresourceRange-aspectMask-parameter", "VUID-VkImageSubresourceRange-aspectMask-requiredbitmask");
- }
- }
- return skip;
-}
-
-bool StatelessValidation::PreCallValidateCmdPipelineBarrier(
- VkCommandBuffer commandBuffer,
- VkPipelineStageFlags srcStageMask,
- VkPipelineStageFlags dstStageMask,
- VkDependencyFlags dependencyFlags,
- uint32_t memoryBarrierCount,
- const VkMemoryBarrier* pMemoryBarriers,
- uint32_t bufferMemoryBarrierCount,
- const VkBufferMemoryBarrier* pBufferMemoryBarriers,
- uint32_t imageMemoryBarrierCount,
- const VkImageMemoryBarrier* pImageMemoryBarriers) {
- bool skip = false;
- skip |= validate_flags("vkCmdPipelineBarrier", "srcStageMask", "VkPipelineStageFlagBits", AllVkPipelineStageFlagBits, srcStageMask, kRequiredFlags, "VUID-vkCmdPipelineBarrier-srcStageMask-parameter", "VUID-vkCmdPipelineBarrier-srcStageMask-requiredbitmask");
- skip |= validate_flags("vkCmdPipelineBarrier", "dstStageMask", "VkPipelineStageFlagBits", AllVkPipelineStageFlagBits, dstStageMask, kRequiredFlags, "VUID-vkCmdPipelineBarrier-dstStageMask-parameter", "VUID-vkCmdPipelineBarrier-dstStageMask-requiredbitmask");
- skip |= validate_flags("vkCmdPipelineBarrier", "dependencyFlags", "VkDependencyFlagBits", AllVkDependencyFlagBits, dependencyFlags, kOptionalFlags, "VUID-vkCmdPipelineBarrier-dependencyFlags-parameter");
- skip |= validate_struct_type_array("vkCmdPipelineBarrier", "memoryBarrierCount", "pMemoryBarriers", "VK_STRUCTURE_TYPE_MEMORY_BARRIER", memoryBarrierCount, pMemoryBarriers, VK_STRUCTURE_TYPE_MEMORY_BARRIER, false, true, "VUID-VkMemoryBarrier-sType-sType", "VUID-vkCmdPipelineBarrier-pMemoryBarriers-parameter", kVUIDUndefined);
- if (pMemoryBarriers != NULL)
- {
- for (uint32_t memoryBarrierIndex = 0; memoryBarrierIndex < memoryBarrierCount; ++memoryBarrierIndex)
- {
- skip |= validate_struct_pnext("vkCmdPipelineBarrier", ParameterName("pMemoryBarriers[%i].pNext", ParameterName::IndexVector{ memoryBarrierIndex }), NULL, pMemoryBarriers[memoryBarrierIndex].pNext, 0, NULL, GeneratedVulkanHeaderVersion, "VUID-VkMemoryBarrier-pNext-pNext");
-
- skip |= validate_flags("vkCmdPipelineBarrier", ParameterName("pMemoryBarriers[%i].srcAccessMask", ParameterName::IndexVector{ memoryBarrierIndex }), "VkAccessFlagBits", AllVkAccessFlagBits, pMemoryBarriers[memoryBarrierIndex].srcAccessMask, kOptionalFlags, "VUID-VkMemoryBarrier-srcAccessMask-parameter");
-
- skip |= validate_flags("vkCmdPipelineBarrier", ParameterName("pMemoryBarriers[%i].dstAccessMask", ParameterName::IndexVector{ memoryBarrierIndex }), "VkAccessFlagBits", AllVkAccessFlagBits, pMemoryBarriers[memoryBarrierIndex].dstAccessMask, kOptionalFlags, "VUID-VkMemoryBarrier-dstAccessMask-parameter");
- }
- }
- skip |= validate_struct_type_array("vkCmdPipelineBarrier", "bufferMemoryBarrierCount", "pBufferMemoryBarriers", "VK_STRUCTURE_TYPE_BUFFER_MEMORY_BARRIER", bufferMemoryBarrierCount, pBufferMemoryBarriers, VK_STRUCTURE_TYPE_BUFFER_MEMORY_BARRIER, false, true, "VUID-VkBufferMemoryBarrier-sType-sType", "VUID-vkCmdPipelineBarrier-pBufferMemoryBarriers-parameter", kVUIDUndefined);
- if (pBufferMemoryBarriers != NULL)
- {
- for (uint32_t bufferMemoryBarrierIndex = 0; bufferMemoryBarrierIndex < bufferMemoryBarrierCount; ++bufferMemoryBarrierIndex)
- {
- skip |= validate_struct_pnext("vkCmdPipelineBarrier", ParameterName("pBufferMemoryBarriers[%i].pNext", ParameterName::IndexVector{ bufferMemoryBarrierIndex }), NULL, pBufferMemoryBarriers[bufferMemoryBarrierIndex].pNext, 0, NULL, GeneratedVulkanHeaderVersion, "VUID-VkBufferMemoryBarrier-pNext-pNext");
-
- skip |= validate_flags("vkCmdPipelineBarrier", ParameterName("pBufferMemoryBarriers[%i].srcAccessMask", ParameterName::IndexVector{ bufferMemoryBarrierIndex }), "VkAccessFlagBits", AllVkAccessFlagBits, pBufferMemoryBarriers[bufferMemoryBarrierIndex].srcAccessMask, kOptionalFlags, "VUID-VkBufferMemoryBarrier-srcAccessMask-parameter");
-
- skip |= validate_flags("vkCmdPipelineBarrier", ParameterName("pBufferMemoryBarriers[%i].dstAccessMask", ParameterName::IndexVector{ bufferMemoryBarrierIndex }), "VkAccessFlagBits", AllVkAccessFlagBits, pBufferMemoryBarriers[bufferMemoryBarrierIndex].dstAccessMask, kOptionalFlags, "VUID-VkBufferMemoryBarrier-dstAccessMask-parameter");
-
- skip |= validate_required_handle("vkCmdPipelineBarrier", ParameterName("pBufferMemoryBarriers[%i].buffer", ParameterName::IndexVector{ bufferMemoryBarrierIndex }), pBufferMemoryBarriers[bufferMemoryBarrierIndex].buffer);
- }
- }
- skip |= validate_struct_type_array("vkCmdPipelineBarrier", "imageMemoryBarrierCount", "pImageMemoryBarriers", "VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER", imageMemoryBarrierCount, pImageMemoryBarriers, VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER, false, true, "VUID-VkImageMemoryBarrier-sType-sType", "VUID-vkCmdPipelineBarrier-pImageMemoryBarriers-parameter", kVUIDUndefined);
- if (pImageMemoryBarriers != NULL)
- {
- for (uint32_t imageMemoryBarrierIndex = 0; imageMemoryBarrierIndex < imageMemoryBarrierCount; ++imageMemoryBarrierIndex)
- {
- const VkStructureType allowed_structs_VkImageMemoryBarrier[] = { VK_STRUCTURE_TYPE_SAMPLE_LOCATIONS_INFO_EXT };
-
- skip |= validate_struct_pnext("vkCmdPipelineBarrier", ParameterName("pImageMemoryBarriers[%i].pNext", ParameterName::IndexVector{ imageMemoryBarrierIndex }), "VkSampleLocationsInfoEXT", pImageMemoryBarriers[imageMemoryBarrierIndex].pNext, ARRAY_SIZE(allowed_structs_VkImageMemoryBarrier), allowed_structs_VkImageMemoryBarrier, GeneratedVulkanHeaderVersion, "VUID-VkImageMemoryBarrier-pNext-pNext");
-
- skip |= validate_flags("vkCmdPipelineBarrier", ParameterName("pImageMemoryBarriers[%i].srcAccessMask", ParameterName::IndexVector{ imageMemoryBarrierIndex }), "VkAccessFlagBits", AllVkAccessFlagBits, pImageMemoryBarriers[imageMemoryBarrierIndex].srcAccessMask, kOptionalFlags, "VUID-VkImageMemoryBarrier-srcAccessMask-parameter");
-
- skip |= validate_flags("vkCmdPipelineBarrier", ParameterName("pImageMemoryBarriers[%i].dstAccessMask", ParameterName::IndexVector{ imageMemoryBarrierIndex }), "VkAccessFlagBits", AllVkAccessFlagBits, pImageMemoryBarriers[imageMemoryBarrierIndex].dstAccessMask, kOptionalFlags, "VUID-VkImageMemoryBarrier-dstAccessMask-parameter");
-
- skip |= validate_ranged_enum("vkCmdPipelineBarrier", ParameterName("pImageMemoryBarriers[%i].oldLayout", ParameterName::IndexVector{ imageMemoryBarrierIndex }), "VkImageLayout", AllVkImageLayoutEnums, pImageMemoryBarriers[imageMemoryBarrierIndex].oldLayout, "VUID-VkImageMemoryBarrier-oldLayout-parameter");
-
- skip |= validate_ranged_enum("vkCmdPipelineBarrier", ParameterName("pImageMemoryBarriers[%i].newLayout", ParameterName::IndexVector{ imageMemoryBarrierIndex }), "VkImageLayout", AllVkImageLayoutEnums, pImageMemoryBarriers[imageMemoryBarrierIndex].newLayout, "VUID-VkImageMemoryBarrier-newLayout-parameter");
-
- skip |= validate_required_handle("vkCmdPipelineBarrier", ParameterName("pImageMemoryBarriers[%i].image", ParameterName::IndexVector{ imageMemoryBarrierIndex }), pImageMemoryBarriers[imageMemoryBarrierIndex].image);
-
- skip |= validate_flags("vkCmdPipelineBarrier", ParameterName("pImageMemoryBarriers[%i].subresourceRange.aspectMask", ParameterName::IndexVector{ imageMemoryBarrierIndex }), "VkImageAspectFlagBits", AllVkImageAspectFlagBits, pImageMemoryBarriers[imageMemoryBarrierIndex].subresourceRange.aspectMask, kRequiredFlags, "VUID-VkImageSubresourceRange-aspectMask-parameter", "VUID-VkImageSubresourceRange-aspectMask-requiredbitmask");
- }
- }
- return skip;
-}
-
-bool StatelessValidation::PreCallValidateCmdBeginQuery(
- VkCommandBuffer commandBuffer,
- VkQueryPool queryPool,
- uint32_t query,
- VkQueryControlFlags flags) {
- bool skip = false;
- skip |= validate_required_handle("vkCmdBeginQuery", "queryPool", queryPool);
- skip |= validate_flags("vkCmdBeginQuery", "flags", "VkQueryControlFlagBits", AllVkQueryControlFlagBits, flags, kOptionalFlags, "VUID-vkCmdBeginQuery-flags-parameter");
- return skip;
-}
-
-bool StatelessValidation::PreCallValidateCmdEndQuery(
- VkCommandBuffer commandBuffer,
- VkQueryPool queryPool,
- uint32_t query) {
- bool skip = false;
- skip |= validate_required_handle("vkCmdEndQuery", "queryPool", queryPool);
- return skip;
-}
-
-bool StatelessValidation::PreCallValidateCmdResetQueryPool(
- VkCommandBuffer commandBuffer,
- VkQueryPool queryPool,
- uint32_t firstQuery,
- uint32_t queryCount) {
- bool skip = false;
- skip |= validate_required_handle("vkCmdResetQueryPool", "queryPool", queryPool);
- return skip;
-}
-
-bool StatelessValidation::PreCallValidateCmdWriteTimestamp(
- VkCommandBuffer commandBuffer,
- VkPipelineStageFlagBits pipelineStage,
- VkQueryPool queryPool,
- uint32_t query) {
- bool skip = false;
- skip |= validate_flags("vkCmdWriteTimestamp", "pipelineStage", "VkPipelineStageFlagBits", AllVkPipelineStageFlagBits, pipelineStage, kRequiredSingleBit, "VUID-vkCmdWriteTimestamp-pipelineStage-parameter", "VUID-vkCmdWriteTimestamp-pipelineStage-parameter");
- skip |= validate_required_handle("vkCmdWriteTimestamp", "queryPool", queryPool);
- return skip;
-}
-
-bool StatelessValidation::PreCallValidateCmdCopyQueryPoolResults(
- VkCommandBuffer commandBuffer,
- VkQueryPool queryPool,
- uint32_t firstQuery,
- uint32_t queryCount,
- VkBuffer dstBuffer,
- VkDeviceSize dstOffset,
- VkDeviceSize stride,
- VkQueryResultFlags flags) {
- bool skip = false;
- skip |= validate_required_handle("vkCmdCopyQueryPoolResults", "queryPool", queryPool);
- skip |= validate_required_handle("vkCmdCopyQueryPoolResults", "dstBuffer", dstBuffer);
- skip |= validate_flags("vkCmdCopyQueryPoolResults", "flags", "VkQueryResultFlagBits", AllVkQueryResultFlagBits, flags, kOptionalFlags, "VUID-vkCmdCopyQueryPoolResults-flags-parameter");
- return skip;
-}
-
-bool StatelessValidation::PreCallValidateCmdPushConstants(
- VkCommandBuffer commandBuffer,
- VkPipelineLayout layout,
- VkShaderStageFlags stageFlags,
- uint32_t offset,
- uint32_t size,
- const void* pValues) {
- bool skip = false;
- skip |= validate_required_handle("vkCmdPushConstants", "layout", layout);
- skip |= validate_flags("vkCmdPushConstants", "stageFlags", "VkShaderStageFlagBits", AllVkShaderStageFlagBits, stageFlags, kRequiredFlags, "VUID-vkCmdPushConstants-stageFlags-parameter", "VUID-vkCmdPushConstants-stageFlags-requiredbitmask");
- skip |= validate_array("vkCmdPushConstants", "size", "pValues", size, &pValues, true, true, "VUID-vkCmdPushConstants-size-arraylength", "VUID-vkCmdPushConstants-pValues-parameter");
- return skip;
-}
-
-bool StatelessValidation::PreCallValidateCmdBeginRenderPass(
- VkCommandBuffer commandBuffer,
- const VkRenderPassBeginInfo* pRenderPassBegin,
- VkSubpassContents contents) {
- bool skip = false;
- skip |= validate_struct_type("vkCmdBeginRenderPass", "pRenderPassBegin", "VK_STRUCTURE_TYPE_RENDER_PASS_BEGIN_INFO", pRenderPassBegin, VK_STRUCTURE_TYPE_RENDER_PASS_BEGIN_INFO, true, "VUID-vkCmdBeginRenderPass-pRenderPassBegin-parameter", "VUID-VkRenderPassBeginInfo-sType-sType");
- if (pRenderPassBegin != NULL)
- {
- const VkStructureType allowed_structs_VkRenderPassBeginInfo[] = { VK_STRUCTURE_TYPE_DEVICE_GROUP_RENDER_PASS_BEGIN_INFO, VK_STRUCTURE_TYPE_RENDER_PASS_ATTACHMENT_BEGIN_INFO_KHR, VK_STRUCTURE_TYPE_RENDER_PASS_SAMPLE_LOCATIONS_BEGIN_INFO_EXT };
-
- skip |= validate_struct_pnext("vkCmdBeginRenderPass", "pRenderPassBegin->pNext", "VkDeviceGroupRenderPassBeginInfo, VkRenderPassAttachmentBeginInfoKHR, VkRenderPassSampleLocationsBeginInfoEXT", pRenderPassBegin->pNext, ARRAY_SIZE(allowed_structs_VkRenderPassBeginInfo), allowed_structs_VkRenderPassBeginInfo, GeneratedVulkanHeaderVersion, "VUID-VkRenderPassBeginInfo-pNext-pNext");
-
- skip |= validate_required_handle("vkCmdBeginRenderPass", "pRenderPassBegin->renderPass", pRenderPassBegin->renderPass);
-
- skip |= validate_required_handle("vkCmdBeginRenderPass", "pRenderPassBegin->framebuffer", pRenderPassBegin->framebuffer);
-
- // No xml-driven validation
-
- // No xml-driven validation
-
- skip |= validate_array("vkCmdBeginRenderPass", "pRenderPassBegin->clearValueCount", "pRenderPassBegin->pClearValues", pRenderPassBegin->clearValueCount, &pRenderPassBegin->pClearValues, false, true, kVUIDUndefined, "VUID-VkRenderPassBeginInfo-pClearValues-parameter");
-
- if (pRenderPassBegin->pClearValues != NULL)
- {
- for (uint32_t clearValueIndex = 0; clearValueIndex < pRenderPassBegin->clearValueCount; ++clearValueIndex)
- {
- // No xml-driven validation
-
- // No xml-driven validation
- }
- }
- }
- skip |= validate_ranged_enum("vkCmdBeginRenderPass", "contents", "VkSubpassContents", AllVkSubpassContentsEnums, contents, "VUID-vkCmdBeginRenderPass-contents-parameter");
- return skip;
-}
-
-bool StatelessValidation::PreCallValidateCmdNextSubpass(
- VkCommandBuffer commandBuffer,
- VkSubpassContents contents) {
- bool skip = false;
- skip |= validate_ranged_enum("vkCmdNextSubpass", "contents", "VkSubpassContents", AllVkSubpassContentsEnums, contents, "VUID-vkCmdNextSubpass-contents-parameter");
- return skip;
-}
-
-bool StatelessValidation::PreCallValidateCmdEndRenderPass(
- VkCommandBuffer commandBuffer) {
- bool skip = false;
- // No xml-driven validation
- return skip;
-}
-
-bool StatelessValidation::PreCallValidateCmdExecuteCommands(
- VkCommandBuffer commandBuffer,
- uint32_t commandBufferCount,
- const VkCommandBuffer* pCommandBuffers) {
- bool skip = false;
- skip |= validate_handle_array("vkCmdExecuteCommands", "commandBufferCount", "pCommandBuffers", commandBufferCount, pCommandBuffers, true, true);
- return skip;
-}
-
-
-
-bool StatelessValidation::PreCallValidateBindBufferMemory2(
- VkDevice device,
- uint32_t bindInfoCount,
- const VkBindBufferMemoryInfo* pBindInfos) {
- bool skip = false;
- skip |= validate_struct_type_array("vkBindBufferMemory2", "bindInfoCount", "pBindInfos", "VK_STRUCTURE_TYPE_BIND_BUFFER_MEMORY_INFO", bindInfoCount, pBindInfos, VK_STRUCTURE_TYPE_BIND_BUFFER_MEMORY_INFO, true, true, "VUID-VkBindBufferMemoryInfo-sType-sType", "VUID-vkBindBufferMemory2-pBindInfos-parameter", "VUID-vkBindBufferMemory2-bindInfoCount-arraylength");
- if (pBindInfos != NULL)
- {
- for (uint32_t bindInfoIndex = 0; bindInfoIndex < bindInfoCount; ++bindInfoIndex)
- {
- const VkStructureType allowed_structs_VkBindBufferMemoryInfo[] = { VK_STRUCTURE_TYPE_BIND_BUFFER_MEMORY_DEVICE_GROUP_INFO };
-
- skip |= validate_struct_pnext("vkBindBufferMemory2", ParameterName("pBindInfos[%i].pNext", ParameterName::IndexVector{ bindInfoIndex }), "VkBindBufferMemoryDeviceGroupInfo", pBindInfos[bindInfoIndex].pNext, ARRAY_SIZE(allowed_structs_VkBindBufferMemoryInfo), allowed_structs_VkBindBufferMemoryInfo, GeneratedVulkanHeaderVersion, "VUID-VkBindBufferMemoryInfo-pNext-pNext");
-
- skip |= validate_required_handle("vkBindBufferMemory2", ParameterName("pBindInfos[%i].buffer", ParameterName::IndexVector{ bindInfoIndex }), pBindInfos[bindInfoIndex].buffer);
-
- skip |= validate_required_handle("vkBindBufferMemory2", ParameterName("pBindInfos[%i].memory", ParameterName::IndexVector{ bindInfoIndex }), pBindInfos[bindInfoIndex].memory);
- }
- }
- return skip;
-}
-
-bool StatelessValidation::PreCallValidateBindImageMemory2(
- VkDevice device,
- uint32_t bindInfoCount,
- const VkBindImageMemoryInfo* pBindInfos) {
- bool skip = false;
- skip |= validate_struct_type_array("vkBindImageMemory2", "bindInfoCount", "pBindInfos", "VK_STRUCTURE_TYPE_BIND_IMAGE_MEMORY_INFO", bindInfoCount, pBindInfos, VK_STRUCTURE_TYPE_BIND_IMAGE_MEMORY_INFO, true, true, "VUID-VkBindImageMemoryInfo-sType-sType", "VUID-vkBindImageMemory2-pBindInfos-parameter", "VUID-vkBindImageMemory2-bindInfoCount-arraylength");
- if (pBindInfos != NULL)
- {
- for (uint32_t bindInfoIndex = 0; bindInfoIndex < bindInfoCount; ++bindInfoIndex)
- {
- const VkStructureType allowed_structs_VkBindImageMemoryInfo[] = { VK_STRUCTURE_TYPE_BIND_IMAGE_MEMORY_DEVICE_GROUP_INFO, VK_STRUCTURE_TYPE_BIND_IMAGE_MEMORY_SWAPCHAIN_INFO_KHR, VK_STRUCTURE_TYPE_BIND_IMAGE_PLANE_MEMORY_INFO };
-
- skip |= validate_struct_pnext("vkBindImageMemory2", ParameterName("pBindInfos[%i].pNext", ParameterName::IndexVector{ bindInfoIndex }), "VkBindImageMemoryDeviceGroupInfo, VkBindImageMemorySwapchainInfoKHR, VkBindImagePlaneMemoryInfo", pBindInfos[bindInfoIndex].pNext, ARRAY_SIZE(allowed_structs_VkBindImageMemoryInfo), allowed_structs_VkBindImageMemoryInfo, GeneratedVulkanHeaderVersion, "VUID-VkBindImageMemoryInfo-pNext-pNext");
-
- skip |= validate_required_handle("vkBindImageMemory2", ParameterName("pBindInfos[%i].image", ParameterName::IndexVector{ bindInfoIndex }), pBindInfos[bindInfoIndex].image);
- }
- }
- return skip;
-}
-
-bool StatelessValidation::PreCallValidateGetDeviceGroupPeerMemoryFeatures(
- VkDevice device,
- uint32_t heapIndex,
- uint32_t localDeviceIndex,
- uint32_t remoteDeviceIndex,
- VkPeerMemoryFeatureFlags* pPeerMemoryFeatures) {
- bool skip = false;
- skip |= validate_required_pointer("vkGetDeviceGroupPeerMemoryFeatures", "pPeerMemoryFeatures", pPeerMemoryFeatures, "VUID-vkGetDeviceGroupPeerMemoryFeatures-pPeerMemoryFeatures-parameter");
- return skip;
-}
-
-bool StatelessValidation::PreCallValidateCmdSetDeviceMask(
- VkCommandBuffer commandBuffer,
- uint32_t deviceMask) {
- bool skip = false;
- // No xml-driven validation
- return skip;
-}
-
-bool StatelessValidation::PreCallValidateCmdDispatchBase(
- VkCommandBuffer commandBuffer,
- uint32_t baseGroupX,
- uint32_t baseGroupY,
- uint32_t baseGroupZ,
- uint32_t groupCountX,
- uint32_t groupCountY,
- uint32_t groupCountZ) {
- bool skip = false;
- // No xml-driven validation
- return skip;
-}
-
-bool StatelessValidation::PreCallValidateEnumeratePhysicalDeviceGroups(
- VkInstance instance,
- uint32_t* pPhysicalDeviceGroupCount,
- VkPhysicalDeviceGroupProperties* pPhysicalDeviceGroupProperties) {
- bool skip = false;
- skip |= validate_struct_type_array("vkEnumeratePhysicalDeviceGroups", "pPhysicalDeviceGroupCount", "pPhysicalDeviceGroupProperties", "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_GROUP_PROPERTIES", pPhysicalDeviceGroupCount, pPhysicalDeviceGroupProperties, VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_GROUP_PROPERTIES, true, false, false, "VUID-VkPhysicalDeviceGroupProperties-sType-sType", "VUID-vkEnumeratePhysicalDeviceGroups-pPhysicalDeviceGroupProperties-parameter", kVUIDUndefined);
- if (pPhysicalDeviceGroupProperties != NULL)
- {
- for (uint32_t pPhysicalDeviceGroupIndex = 0; pPhysicalDeviceGroupIndex < *pPhysicalDeviceGroupCount; ++pPhysicalDeviceGroupIndex)
- {
- // No xml-driven validation
- }
- }
- return skip;
-}
-
-bool StatelessValidation::PreCallValidateGetImageMemoryRequirements2(
- VkDevice device,
- const VkImageMemoryRequirementsInfo2* pInfo,
- VkMemoryRequirements2* pMemoryRequirements) {
- bool skip = false;
- skip |= validate_struct_type("vkGetImageMemoryRequirements2", "pInfo", "VK_STRUCTURE_TYPE_IMAGE_MEMORY_REQUIREMENTS_INFO_2", pInfo, VK_STRUCTURE_TYPE_IMAGE_MEMORY_REQUIREMENTS_INFO_2, true, "VUID-vkGetImageMemoryRequirements2-pInfo-parameter", "VUID-VkImageMemoryRequirementsInfo2-sType-sType");
- if (pInfo != NULL)
- {
- const VkStructureType allowed_structs_VkImageMemoryRequirementsInfo2[] = { VK_STRUCTURE_TYPE_IMAGE_PLANE_MEMORY_REQUIREMENTS_INFO };
-
- skip |= validate_struct_pnext("vkGetImageMemoryRequirements2", "pInfo->pNext", "VkImagePlaneMemoryRequirementsInfo", pInfo->pNext, ARRAY_SIZE(allowed_structs_VkImageMemoryRequirementsInfo2), allowed_structs_VkImageMemoryRequirementsInfo2, GeneratedVulkanHeaderVersion, "VUID-VkImageMemoryRequirementsInfo2-pNext-pNext");
-
- skip |= validate_required_handle("vkGetImageMemoryRequirements2", "pInfo->image", pInfo->image);
- }
- skip |= validate_struct_type("vkGetImageMemoryRequirements2", "pMemoryRequirements", "VK_STRUCTURE_TYPE_MEMORY_REQUIREMENTS_2", pMemoryRequirements, VK_STRUCTURE_TYPE_MEMORY_REQUIREMENTS_2, true, "VUID-vkGetImageMemoryRequirements2-pMemoryRequirements-parameter", "VUID-VkMemoryRequirements2-sType-sType");
- if (pMemoryRequirements != NULL)
- {
- // No xml-driven validation
- }
- return skip;
-}
-
-bool StatelessValidation::PreCallValidateGetBufferMemoryRequirements2(
- VkDevice device,
- const VkBufferMemoryRequirementsInfo2* pInfo,
- VkMemoryRequirements2* pMemoryRequirements) {
- bool skip = false;
- skip |= validate_struct_type("vkGetBufferMemoryRequirements2", "pInfo", "VK_STRUCTURE_TYPE_BUFFER_MEMORY_REQUIREMENTS_INFO_2", pInfo, VK_STRUCTURE_TYPE_BUFFER_MEMORY_REQUIREMENTS_INFO_2, true, "VUID-vkGetBufferMemoryRequirements2-pInfo-parameter", "VUID-VkBufferMemoryRequirementsInfo2-sType-sType");
- if (pInfo != NULL)
- {
- skip |= validate_struct_pnext("vkGetBufferMemoryRequirements2", "pInfo->pNext", NULL, pInfo->pNext, 0, NULL, GeneratedVulkanHeaderVersion, "VUID-VkBufferMemoryRequirementsInfo2-pNext-pNext");
-
- skip |= validate_required_handle("vkGetBufferMemoryRequirements2", "pInfo->buffer", pInfo->buffer);
- }
- skip |= validate_struct_type("vkGetBufferMemoryRequirements2", "pMemoryRequirements", "VK_STRUCTURE_TYPE_MEMORY_REQUIREMENTS_2", pMemoryRequirements, VK_STRUCTURE_TYPE_MEMORY_REQUIREMENTS_2, true, "VUID-vkGetBufferMemoryRequirements2-pMemoryRequirements-parameter", "VUID-VkMemoryRequirements2-sType-sType");
- if (pMemoryRequirements != NULL)
- {
- // No xml-driven validation
- }
- return skip;
-}
-
-bool StatelessValidation::PreCallValidateGetImageSparseMemoryRequirements2(
- VkDevice device,
- const VkImageSparseMemoryRequirementsInfo2* pInfo,
- uint32_t* pSparseMemoryRequirementCount,
- VkSparseImageMemoryRequirements2* pSparseMemoryRequirements) {
- bool skip = false;
- skip |= validate_struct_type("vkGetImageSparseMemoryRequirements2", "pInfo", "VK_STRUCTURE_TYPE_IMAGE_SPARSE_MEMORY_REQUIREMENTS_INFO_2", pInfo, VK_STRUCTURE_TYPE_IMAGE_SPARSE_MEMORY_REQUIREMENTS_INFO_2, true, "VUID-vkGetImageSparseMemoryRequirements2-pInfo-parameter", "VUID-VkImageSparseMemoryRequirementsInfo2-sType-sType");
- if (pInfo != NULL)
- {
- skip |= validate_struct_pnext("vkGetImageSparseMemoryRequirements2", "pInfo->pNext", NULL, pInfo->pNext, 0, NULL, GeneratedVulkanHeaderVersion, "VUID-VkImageSparseMemoryRequirementsInfo2-pNext-pNext");
-
- skip |= validate_required_handle("vkGetImageSparseMemoryRequirements2", "pInfo->image", pInfo->image);
- }
- skip |= validate_struct_type_array("vkGetImageSparseMemoryRequirements2", "pSparseMemoryRequirementCount", "pSparseMemoryRequirements", "VK_STRUCTURE_TYPE_SPARSE_IMAGE_MEMORY_REQUIREMENTS_2", pSparseMemoryRequirementCount, pSparseMemoryRequirements, VK_STRUCTURE_TYPE_SPARSE_IMAGE_MEMORY_REQUIREMENTS_2, true, false, false, "VUID-VkSparseImageMemoryRequirements2-sType-sType", "VUID-vkGetImageSparseMemoryRequirements2-pSparseMemoryRequirements-parameter", kVUIDUndefined);
- if (pSparseMemoryRequirements != NULL)
- {
- for (uint32_t pSparseMemoryRequirementIndex = 0; pSparseMemoryRequirementIndex < *pSparseMemoryRequirementCount; ++pSparseMemoryRequirementIndex)
- {
- // No xml-driven validation
- }
- }
- return skip;
-}
-
-bool StatelessValidation::PreCallValidateGetPhysicalDeviceFeatures2(
- VkPhysicalDevice physicalDevice,
- VkPhysicalDeviceFeatures2* pFeatures) {
- bool skip = false;
- skip |= validate_struct_type("vkGetPhysicalDeviceFeatures2", "pFeatures", "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FEATURES_2", pFeatures, VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FEATURES_2, true, "VUID-vkGetPhysicalDeviceFeatures2-pFeatures-parameter", "VUID-VkPhysicalDeviceFeatures2-sType-sType");
- return skip;
-}
-
-bool StatelessValidation::PreCallValidateGetPhysicalDeviceProperties2(
- VkPhysicalDevice physicalDevice,
- VkPhysicalDeviceProperties2* pProperties) {
- bool skip = false;
- skip |= validate_struct_type("vkGetPhysicalDeviceProperties2", "pProperties", "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PROPERTIES_2", pProperties, VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PROPERTIES_2, true, "VUID-vkGetPhysicalDeviceProperties2-pProperties-parameter", "VUID-VkPhysicalDeviceProperties2-sType-sType");
- if (pProperties != NULL)
- {
- // No xml-driven validation
- }
- return skip;
-}
-
-bool StatelessValidation::PreCallValidateGetPhysicalDeviceFormatProperties2(
- VkPhysicalDevice physicalDevice,
- VkFormat format,
- VkFormatProperties2* pFormatProperties) {
- bool skip = false;
- skip |= validate_ranged_enum("vkGetPhysicalDeviceFormatProperties2", "format", "VkFormat", AllVkFormatEnums, format, "VUID-vkGetPhysicalDeviceFormatProperties2-format-parameter");
- skip |= validate_struct_type("vkGetPhysicalDeviceFormatProperties2", "pFormatProperties", "VK_STRUCTURE_TYPE_FORMAT_PROPERTIES_2", pFormatProperties, VK_STRUCTURE_TYPE_FORMAT_PROPERTIES_2, true, "VUID-vkGetPhysicalDeviceFormatProperties2-pFormatProperties-parameter", "VUID-VkFormatProperties2-sType-sType");
- if (pFormatProperties != NULL)
- {
- // No xml-driven validation
- }
- return skip;
-}
-
-bool StatelessValidation::PreCallValidateGetPhysicalDeviceImageFormatProperties2(
- VkPhysicalDevice physicalDevice,
- const VkPhysicalDeviceImageFormatInfo2* pImageFormatInfo,
- VkImageFormatProperties2* pImageFormatProperties) {
- bool skip = false;
- skip |= validate_struct_type("vkGetPhysicalDeviceImageFormatProperties2", "pImageFormatInfo", "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_FORMAT_INFO_2", pImageFormatInfo, VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_FORMAT_INFO_2, true, "VUID-vkGetPhysicalDeviceImageFormatProperties2-pImageFormatInfo-parameter", "VUID-VkPhysicalDeviceImageFormatInfo2-sType-sType");
- if (pImageFormatInfo != NULL)
- {
- const VkStructureType allowed_structs_VkPhysicalDeviceImageFormatInfo2[] = { VK_STRUCTURE_TYPE_IMAGE_FORMAT_LIST_CREATE_INFO_KHR, VK_STRUCTURE_TYPE_IMAGE_STENCIL_USAGE_CREATE_INFO_EXT, VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_IMAGE_FORMAT_INFO, VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_DRM_FORMAT_MODIFIER_INFO_EXT, VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_VIEW_IMAGE_FORMAT_INFO_EXT };
-
- skip |= validate_struct_pnext("vkGetPhysicalDeviceImageFormatProperties2", "pImageFormatInfo->pNext", "VkImageFormatListCreateInfoKHR, VkImageStencilUsageCreateInfoEXT, VkPhysicalDeviceExternalImageFormatInfo, VkPhysicalDeviceImageDrmFormatModifierInfoEXT, VkPhysicalDeviceImageViewImageFormatInfoEXT", pImageFormatInfo->pNext, ARRAY_SIZE(allowed_structs_VkPhysicalDeviceImageFormatInfo2), allowed_structs_VkPhysicalDeviceImageFormatInfo2, GeneratedVulkanHeaderVersion, "VUID-VkPhysicalDeviceImageFormatInfo2-pNext-pNext");
-
- skip |= validate_ranged_enum("vkGetPhysicalDeviceImageFormatProperties2", "pImageFormatInfo->format", "VkFormat", AllVkFormatEnums, pImageFormatInfo->format, "VUID-VkPhysicalDeviceImageFormatInfo2-format-parameter");
-
- skip |= validate_ranged_enum("vkGetPhysicalDeviceImageFormatProperties2", "pImageFormatInfo->type", "VkImageType", AllVkImageTypeEnums, pImageFormatInfo->type, "VUID-VkPhysicalDeviceImageFormatInfo2-type-parameter");
-
- skip |= validate_ranged_enum("vkGetPhysicalDeviceImageFormatProperties2", "pImageFormatInfo->tiling", "VkImageTiling", AllVkImageTilingEnums, pImageFormatInfo->tiling, "VUID-VkPhysicalDeviceImageFormatInfo2-tiling-parameter");
-
- skip |= validate_flags("vkGetPhysicalDeviceImageFormatProperties2", "pImageFormatInfo->usage", "VkImageUsageFlagBits", AllVkImageUsageFlagBits, pImageFormatInfo->usage, kRequiredFlags, "VUID-VkPhysicalDeviceImageFormatInfo2-usage-parameter", "VUID-VkPhysicalDeviceImageFormatInfo2-usage-requiredbitmask");
-
- skip |= validate_flags("vkGetPhysicalDeviceImageFormatProperties2", "pImageFormatInfo->flags", "VkImageCreateFlagBits", AllVkImageCreateFlagBits, pImageFormatInfo->flags, kOptionalFlags, "VUID-VkPhysicalDeviceImageFormatInfo2-flags-parameter");
- }
- skip |= validate_struct_type("vkGetPhysicalDeviceImageFormatProperties2", "pImageFormatProperties", "VK_STRUCTURE_TYPE_IMAGE_FORMAT_PROPERTIES_2", pImageFormatProperties, VK_STRUCTURE_TYPE_IMAGE_FORMAT_PROPERTIES_2, true, "VUID-vkGetPhysicalDeviceImageFormatProperties2-pImageFormatProperties-parameter", "VUID-VkImageFormatProperties2-sType-sType");
- if (pImageFormatProperties != NULL)
- {
- // No xml-driven validation
- }
- return skip;
-}
-
-bool StatelessValidation::PreCallValidateGetPhysicalDeviceQueueFamilyProperties2(
- VkPhysicalDevice physicalDevice,
- uint32_t* pQueueFamilyPropertyCount,
- VkQueueFamilyProperties2* pQueueFamilyProperties) {
- bool skip = false;
- skip |= validate_struct_type_array("vkGetPhysicalDeviceQueueFamilyProperties2", "pQueueFamilyPropertyCount", "pQueueFamilyProperties", "VK_STRUCTURE_TYPE_QUEUE_FAMILY_PROPERTIES_2", pQueueFamilyPropertyCount, pQueueFamilyProperties, VK_STRUCTURE_TYPE_QUEUE_FAMILY_PROPERTIES_2, true, false, false, "VUID-VkQueueFamilyProperties2-sType-sType", "VUID-vkGetPhysicalDeviceQueueFamilyProperties2-pQueueFamilyProperties-parameter", kVUIDUndefined);
- if (pQueueFamilyProperties != NULL)
- {
- for (uint32_t pQueueFamilyPropertyIndex = 0; pQueueFamilyPropertyIndex < *pQueueFamilyPropertyCount; ++pQueueFamilyPropertyIndex)
- {
- // No xml-driven validation
- }
- }
- return skip;
-}
-
-bool StatelessValidation::PreCallValidateGetPhysicalDeviceMemoryProperties2(
- VkPhysicalDevice physicalDevice,
- VkPhysicalDeviceMemoryProperties2* pMemoryProperties) {
- bool skip = false;
- skip |= validate_struct_type("vkGetPhysicalDeviceMemoryProperties2", "pMemoryProperties", "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MEMORY_PROPERTIES_2", pMemoryProperties, VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MEMORY_PROPERTIES_2, true, "VUID-vkGetPhysicalDeviceMemoryProperties2-pMemoryProperties-parameter", "VUID-VkPhysicalDeviceMemoryProperties2-sType-sType");
- if (pMemoryProperties != NULL)
- {
- // No xml-driven validation
- }
- return skip;
-}
-
-bool StatelessValidation::PreCallValidateGetPhysicalDeviceSparseImageFormatProperties2(
- VkPhysicalDevice physicalDevice,
- const VkPhysicalDeviceSparseImageFormatInfo2* pFormatInfo,
- uint32_t* pPropertyCount,
- VkSparseImageFormatProperties2* pProperties) {
- bool skip = false;
- skip |= validate_struct_type("vkGetPhysicalDeviceSparseImageFormatProperties2", "pFormatInfo", "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SPARSE_IMAGE_FORMAT_INFO_2", pFormatInfo, VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SPARSE_IMAGE_FORMAT_INFO_2, true, "VUID-vkGetPhysicalDeviceSparseImageFormatProperties2-pFormatInfo-parameter", "VUID-VkPhysicalDeviceSparseImageFormatInfo2-sType-sType");
- if (pFormatInfo != NULL)
- {
- skip |= validate_struct_pnext("vkGetPhysicalDeviceSparseImageFormatProperties2", "pFormatInfo->pNext", NULL, pFormatInfo->pNext, 0, NULL, GeneratedVulkanHeaderVersion, "VUID-VkPhysicalDeviceSparseImageFormatInfo2-pNext-pNext");
-
- skip |= validate_ranged_enum("vkGetPhysicalDeviceSparseImageFormatProperties2", "pFormatInfo->format", "VkFormat", AllVkFormatEnums, pFormatInfo->format, "VUID-VkPhysicalDeviceSparseImageFormatInfo2-format-parameter");
-
- skip |= validate_ranged_enum("vkGetPhysicalDeviceSparseImageFormatProperties2", "pFormatInfo->type", "VkImageType", AllVkImageTypeEnums, pFormatInfo->type, "VUID-VkPhysicalDeviceSparseImageFormatInfo2-type-parameter");
-
- skip |= validate_flags("vkGetPhysicalDeviceSparseImageFormatProperties2", "pFormatInfo->samples", "VkSampleCountFlagBits", AllVkSampleCountFlagBits, pFormatInfo->samples, kRequiredSingleBit, "VUID-VkPhysicalDeviceSparseImageFormatInfo2-samples-parameter", "VUID-VkPhysicalDeviceSparseImageFormatInfo2-samples-parameter");
-
- skip |= validate_flags("vkGetPhysicalDeviceSparseImageFormatProperties2", "pFormatInfo->usage", "VkImageUsageFlagBits", AllVkImageUsageFlagBits, pFormatInfo->usage, kRequiredFlags, "VUID-VkPhysicalDeviceSparseImageFormatInfo2-usage-parameter", "VUID-VkPhysicalDeviceSparseImageFormatInfo2-usage-requiredbitmask");
-
- skip |= validate_ranged_enum("vkGetPhysicalDeviceSparseImageFormatProperties2", "pFormatInfo->tiling", "VkImageTiling", AllVkImageTilingEnums, pFormatInfo->tiling, "VUID-VkPhysicalDeviceSparseImageFormatInfo2-tiling-parameter");
- }
- skip |= validate_struct_type_array("vkGetPhysicalDeviceSparseImageFormatProperties2", "pPropertyCount", "pProperties", "VK_STRUCTURE_TYPE_SPARSE_IMAGE_FORMAT_PROPERTIES_2", pPropertyCount, pProperties, VK_STRUCTURE_TYPE_SPARSE_IMAGE_FORMAT_PROPERTIES_2, true, false, false, "VUID-VkSparseImageFormatProperties2-sType-sType", "VUID-vkGetPhysicalDeviceSparseImageFormatProperties2-pProperties-parameter", kVUIDUndefined);
- if (pProperties != NULL)
- {
- for (uint32_t pPropertyIndex = 0; pPropertyIndex < *pPropertyCount; ++pPropertyIndex)
- {
- // No xml-driven validation
- }
- }
- return skip;
-}
-
-bool StatelessValidation::PreCallValidateTrimCommandPool(
- VkDevice device,
- VkCommandPool commandPool,
- VkCommandPoolTrimFlags flags) {
- bool skip = false;
- skip |= validate_required_handle("vkTrimCommandPool", "commandPool", commandPool);
- skip |= validate_reserved_flags("vkTrimCommandPool", "flags", flags, "VUID-vkTrimCommandPool-flags-zerobitmask");
- return skip;
-}
-
-bool StatelessValidation::PreCallValidateGetDeviceQueue2(
- VkDevice device,
- const VkDeviceQueueInfo2* pQueueInfo,
- VkQueue* pQueue) {
- bool skip = false;
- skip |= validate_struct_type("vkGetDeviceQueue2", "pQueueInfo", "VK_STRUCTURE_TYPE_DEVICE_QUEUE_INFO_2", pQueueInfo, VK_STRUCTURE_TYPE_DEVICE_QUEUE_INFO_2, true, "VUID-vkGetDeviceQueue2-pQueueInfo-parameter", "VUID-VkDeviceQueueInfo2-sType-sType");
- if (pQueueInfo != NULL)
- {
- skip |= validate_struct_pnext("vkGetDeviceQueue2", "pQueueInfo->pNext", NULL, pQueueInfo->pNext, 0, NULL, GeneratedVulkanHeaderVersion, "VUID-VkDeviceQueueInfo2-pNext-pNext");
-
- skip |= validate_flags("vkGetDeviceQueue2", "pQueueInfo->flags", "VkDeviceQueueCreateFlagBits", AllVkDeviceQueueCreateFlagBits, pQueueInfo->flags, kRequiredFlags, "VUID-VkDeviceQueueInfo2-flags-parameter", "VUID-VkDeviceQueueInfo2-flags-requiredbitmask");
- }
- skip |= validate_required_pointer("vkGetDeviceQueue2", "pQueue", pQueue, "VUID-vkGetDeviceQueue2-pQueue-parameter");
- return skip;
-}
-
-bool StatelessValidation::PreCallValidateCreateSamplerYcbcrConversion(
- VkDevice device,
- const VkSamplerYcbcrConversionCreateInfo* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkSamplerYcbcrConversion* pYcbcrConversion) {
- bool skip = false;
- skip |= validate_struct_type("vkCreateSamplerYcbcrConversion", "pCreateInfo", "VK_STRUCTURE_TYPE_SAMPLER_YCBCR_CONVERSION_CREATE_INFO", pCreateInfo, VK_STRUCTURE_TYPE_SAMPLER_YCBCR_CONVERSION_CREATE_INFO, true, "VUID-vkCreateSamplerYcbcrConversion-pCreateInfo-parameter", "VUID-VkSamplerYcbcrConversionCreateInfo-sType-sType");
- if (pCreateInfo != NULL)
- {
- const VkStructureType allowed_structs_VkSamplerYcbcrConversionCreateInfo[] = { VK_STRUCTURE_TYPE_EXTERNAL_FORMAT_ANDROID };
-
- skip |= validate_struct_pnext("vkCreateSamplerYcbcrConversion", "pCreateInfo->pNext", "VkExternalFormatANDROID", pCreateInfo->pNext, ARRAY_SIZE(allowed_structs_VkSamplerYcbcrConversionCreateInfo), allowed_structs_VkSamplerYcbcrConversionCreateInfo, GeneratedVulkanHeaderVersion, "VUID-VkSamplerYcbcrConversionCreateInfo-pNext-pNext");
-
- skip |= validate_ranged_enum("vkCreateSamplerYcbcrConversion", "pCreateInfo->format", "VkFormat", AllVkFormatEnums, pCreateInfo->format, "VUID-VkSamplerYcbcrConversionCreateInfo-format-parameter");
-
- skip |= validate_ranged_enum("vkCreateSamplerYcbcrConversion", "pCreateInfo->ycbcrModel", "VkSamplerYcbcrModelConversion", AllVkSamplerYcbcrModelConversionEnums, pCreateInfo->ycbcrModel, "VUID-VkSamplerYcbcrConversionCreateInfo-ycbcrModel-parameter");
-
- skip |= validate_ranged_enum("vkCreateSamplerYcbcrConversion", "pCreateInfo->ycbcrRange", "VkSamplerYcbcrRange", AllVkSamplerYcbcrRangeEnums, pCreateInfo->ycbcrRange, "VUID-VkSamplerYcbcrConversionCreateInfo-ycbcrRange-parameter");
-
- skip |= validate_ranged_enum("vkCreateSamplerYcbcrConversion", "pCreateInfo->components.r", "VkComponentSwizzle", AllVkComponentSwizzleEnums, pCreateInfo->components.r, "VUID-VkComponentMapping-r-parameter");
-
- skip |= validate_ranged_enum("vkCreateSamplerYcbcrConversion", "pCreateInfo->components.g", "VkComponentSwizzle", AllVkComponentSwizzleEnums, pCreateInfo->components.g, "VUID-VkComponentMapping-g-parameter");
-
- skip |= validate_ranged_enum("vkCreateSamplerYcbcrConversion", "pCreateInfo->components.b", "VkComponentSwizzle", AllVkComponentSwizzleEnums, pCreateInfo->components.b, "VUID-VkComponentMapping-b-parameter");
-
- skip |= validate_ranged_enum("vkCreateSamplerYcbcrConversion", "pCreateInfo->components.a", "VkComponentSwizzle", AllVkComponentSwizzleEnums, pCreateInfo->components.a, "VUID-VkComponentMapping-a-parameter");
-
- skip |= validate_ranged_enum("vkCreateSamplerYcbcrConversion", "pCreateInfo->xChromaOffset", "VkChromaLocation", AllVkChromaLocationEnums, pCreateInfo->xChromaOffset, "VUID-VkSamplerYcbcrConversionCreateInfo-xChromaOffset-parameter");
-
- skip |= validate_ranged_enum("vkCreateSamplerYcbcrConversion", "pCreateInfo->yChromaOffset", "VkChromaLocation", AllVkChromaLocationEnums, pCreateInfo->yChromaOffset, "VUID-VkSamplerYcbcrConversionCreateInfo-yChromaOffset-parameter");
-
- skip |= validate_ranged_enum("vkCreateSamplerYcbcrConversion", "pCreateInfo->chromaFilter", "VkFilter", AllVkFilterEnums, pCreateInfo->chromaFilter, "VUID-VkSamplerYcbcrConversionCreateInfo-chromaFilter-parameter");
-
- skip |= validate_bool32("vkCreateSamplerYcbcrConversion", "pCreateInfo->forceExplicitReconstruction", pCreateInfo->forceExplicitReconstruction);
- }
- if (pAllocator != NULL)
- {
- skip |= validate_required_pointer("vkCreateSamplerYcbcrConversion", "pAllocator->pfnAllocation", reinterpret_cast<const void*>(pAllocator->pfnAllocation), "VUID-VkAllocationCallbacks-pfnAllocation-00632");
-
- skip |= validate_required_pointer("vkCreateSamplerYcbcrConversion", "pAllocator->pfnReallocation", reinterpret_cast<const void*>(pAllocator->pfnReallocation), "VUID-VkAllocationCallbacks-pfnReallocation-00633");
-
- skip |= validate_required_pointer("vkCreateSamplerYcbcrConversion", "pAllocator->pfnFree", reinterpret_cast<const void*>(pAllocator->pfnFree), "VUID-VkAllocationCallbacks-pfnFree-00634");
-
- if (pAllocator->pfnInternalAllocation != NULL)
- {
- skip |= validate_required_pointer("vkCreateSamplerYcbcrConversion", "pAllocator->pfnInternalFree", reinterpret_cast<const void*>(pAllocator->pfnInternalFree), "VUID-VkAllocationCallbacks-pfnInternalAllocation-00635");
-
- }
-
- if (pAllocator->pfnInternalFree != NULL)
- {
- skip |= validate_required_pointer("vkCreateSamplerYcbcrConversion", "pAllocator->pfnInternalAllocation", reinterpret_cast<const void*>(pAllocator->pfnInternalAllocation), "VUID-VkAllocationCallbacks-pfnInternalAllocation-00635");
-
- }
- }
- skip |= validate_required_pointer("vkCreateSamplerYcbcrConversion", "pYcbcrConversion", pYcbcrConversion, "VUID-vkCreateSamplerYcbcrConversion-pYcbcrConversion-parameter");
- return skip;
-}
-
-bool StatelessValidation::PreCallValidateDestroySamplerYcbcrConversion(
- VkDevice device,
- VkSamplerYcbcrConversion ycbcrConversion,
- const VkAllocationCallbacks* pAllocator) {
- bool skip = false;
- if (pAllocator != NULL)
- {
- skip |= validate_required_pointer("vkDestroySamplerYcbcrConversion", "pAllocator->pfnAllocation", reinterpret_cast<const void*>(pAllocator->pfnAllocation), "VUID-VkAllocationCallbacks-pfnAllocation-00632");
-
- skip |= validate_required_pointer("vkDestroySamplerYcbcrConversion", "pAllocator->pfnReallocation", reinterpret_cast<const void*>(pAllocator->pfnReallocation), "VUID-VkAllocationCallbacks-pfnReallocation-00633");
-
- skip |= validate_required_pointer("vkDestroySamplerYcbcrConversion", "pAllocator->pfnFree", reinterpret_cast<const void*>(pAllocator->pfnFree), "VUID-VkAllocationCallbacks-pfnFree-00634");
-
- if (pAllocator->pfnInternalAllocation != NULL)
- {
- skip |= validate_required_pointer("vkDestroySamplerYcbcrConversion", "pAllocator->pfnInternalFree", reinterpret_cast<const void*>(pAllocator->pfnInternalFree), "VUID-VkAllocationCallbacks-pfnInternalAllocation-00635");
-
- }
-
- if (pAllocator->pfnInternalFree != NULL)
- {
- skip |= validate_required_pointer("vkDestroySamplerYcbcrConversion", "pAllocator->pfnInternalAllocation", reinterpret_cast<const void*>(pAllocator->pfnInternalAllocation), "VUID-VkAllocationCallbacks-pfnInternalAllocation-00635");
-
- }
- }
- return skip;
-}
-
-bool StatelessValidation::PreCallValidateCreateDescriptorUpdateTemplate(
- VkDevice device,
- const VkDescriptorUpdateTemplateCreateInfo* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkDescriptorUpdateTemplate* pDescriptorUpdateTemplate) {
- bool skip = false;
- skip |= validate_struct_type("vkCreateDescriptorUpdateTemplate", "pCreateInfo", "VK_STRUCTURE_TYPE_DESCRIPTOR_UPDATE_TEMPLATE_CREATE_INFO", pCreateInfo, VK_STRUCTURE_TYPE_DESCRIPTOR_UPDATE_TEMPLATE_CREATE_INFO, true, "VUID-vkCreateDescriptorUpdateTemplate-pCreateInfo-parameter", "VUID-VkDescriptorUpdateTemplateCreateInfo-sType-sType");
- if (pCreateInfo != NULL)
- {
- skip |= validate_struct_pnext("vkCreateDescriptorUpdateTemplate", "pCreateInfo->pNext", NULL, pCreateInfo->pNext, 0, NULL, GeneratedVulkanHeaderVersion, "VUID-VkDescriptorUpdateTemplateCreateInfo-pNext-pNext");
-
- skip |= validate_reserved_flags("vkCreateDescriptorUpdateTemplate", "pCreateInfo->flags", pCreateInfo->flags, "VUID-VkDescriptorUpdateTemplateCreateInfo-flags-zerobitmask");
-
- skip |= validate_array("vkCreateDescriptorUpdateTemplate", "pCreateInfo->descriptorUpdateEntryCount", "pCreateInfo->pDescriptorUpdateEntries", pCreateInfo->descriptorUpdateEntryCount, &pCreateInfo->pDescriptorUpdateEntries, true, true, "VUID-VkDescriptorUpdateTemplateCreateInfo-descriptorUpdateEntryCount-arraylength", "VUID-VkDescriptorUpdateTemplateCreateInfo-pDescriptorUpdateEntries-parameter");
-
- if (pCreateInfo->pDescriptorUpdateEntries != NULL)
- {
- for (uint32_t descriptorUpdateEntryIndex = 0; descriptorUpdateEntryIndex < pCreateInfo->descriptorUpdateEntryCount; ++descriptorUpdateEntryIndex)
- {
- skip |= validate_ranged_enum("vkCreateDescriptorUpdateTemplate", ParameterName("pCreateInfo->pDescriptorUpdateEntries[%i].descriptorType", ParameterName::IndexVector{ descriptorUpdateEntryIndex }), "VkDescriptorType", AllVkDescriptorTypeEnums, pCreateInfo->pDescriptorUpdateEntries[descriptorUpdateEntryIndex].descriptorType, "VUID-VkDescriptorUpdateTemplateEntry-descriptorType-parameter");
- }
- }
-
- skip |= validate_ranged_enum("vkCreateDescriptorUpdateTemplate", "pCreateInfo->templateType", "VkDescriptorUpdateTemplateType", AllVkDescriptorUpdateTemplateTypeEnums, pCreateInfo->templateType, "VUID-VkDescriptorUpdateTemplateCreateInfo-templateType-parameter");
- }
- if (pAllocator != NULL)
- {
- skip |= validate_required_pointer("vkCreateDescriptorUpdateTemplate", "pAllocator->pfnAllocation", reinterpret_cast<const void*>(pAllocator->pfnAllocation), "VUID-VkAllocationCallbacks-pfnAllocation-00632");
-
- skip |= validate_required_pointer("vkCreateDescriptorUpdateTemplate", "pAllocator->pfnReallocation", reinterpret_cast<const void*>(pAllocator->pfnReallocation), "VUID-VkAllocationCallbacks-pfnReallocation-00633");
-
- skip |= validate_required_pointer("vkCreateDescriptorUpdateTemplate", "pAllocator->pfnFree", reinterpret_cast<const void*>(pAllocator->pfnFree), "VUID-VkAllocationCallbacks-pfnFree-00634");
-
- if (pAllocator->pfnInternalAllocation != NULL)
- {
- skip |= validate_required_pointer("vkCreateDescriptorUpdateTemplate", "pAllocator->pfnInternalFree", reinterpret_cast<const void*>(pAllocator->pfnInternalFree), "VUID-VkAllocationCallbacks-pfnInternalAllocation-00635");
-
- }
-
- if (pAllocator->pfnInternalFree != NULL)
- {
- skip |= validate_required_pointer("vkCreateDescriptorUpdateTemplate", "pAllocator->pfnInternalAllocation", reinterpret_cast<const void*>(pAllocator->pfnInternalAllocation), "VUID-VkAllocationCallbacks-pfnInternalAllocation-00635");
-
- }
- }
- skip |= validate_required_pointer("vkCreateDescriptorUpdateTemplate", "pDescriptorUpdateTemplate", pDescriptorUpdateTemplate, "VUID-vkCreateDescriptorUpdateTemplate-pDescriptorUpdateTemplate-parameter");
- return skip;
-}
-
-bool StatelessValidation::PreCallValidateDestroyDescriptorUpdateTemplate(
- VkDevice device,
- VkDescriptorUpdateTemplate descriptorUpdateTemplate,
- const VkAllocationCallbacks* pAllocator) {
- bool skip = false;
- if (pAllocator != NULL)
- {
- skip |= validate_required_pointer("vkDestroyDescriptorUpdateTemplate", "pAllocator->pfnAllocation", reinterpret_cast<const void*>(pAllocator->pfnAllocation), "VUID-VkAllocationCallbacks-pfnAllocation-00632");
-
- skip |= validate_required_pointer("vkDestroyDescriptorUpdateTemplate", "pAllocator->pfnReallocation", reinterpret_cast<const void*>(pAllocator->pfnReallocation), "VUID-VkAllocationCallbacks-pfnReallocation-00633");
-
- skip |= validate_required_pointer("vkDestroyDescriptorUpdateTemplate", "pAllocator->pfnFree", reinterpret_cast<const void*>(pAllocator->pfnFree), "VUID-VkAllocationCallbacks-pfnFree-00634");
-
- if (pAllocator->pfnInternalAllocation != NULL)
- {
- skip |= validate_required_pointer("vkDestroyDescriptorUpdateTemplate", "pAllocator->pfnInternalFree", reinterpret_cast<const void*>(pAllocator->pfnInternalFree), "VUID-VkAllocationCallbacks-pfnInternalAllocation-00635");
-
- }
-
- if (pAllocator->pfnInternalFree != NULL)
- {
- skip |= validate_required_pointer("vkDestroyDescriptorUpdateTemplate", "pAllocator->pfnInternalAllocation", reinterpret_cast<const void*>(pAllocator->pfnInternalAllocation), "VUID-VkAllocationCallbacks-pfnInternalAllocation-00635");
-
- }
- }
- return skip;
-}
-
-bool StatelessValidation::PreCallValidateUpdateDescriptorSetWithTemplate(
- VkDevice device,
- VkDescriptorSet descriptorSet,
- VkDescriptorUpdateTemplate descriptorUpdateTemplate,
- const void* pData) {
- bool skip = false;
- skip |= validate_required_handle("vkUpdateDescriptorSetWithTemplate", "descriptorSet", descriptorSet);
- skip |= validate_required_handle("vkUpdateDescriptorSetWithTemplate", "descriptorUpdateTemplate", descriptorUpdateTemplate);
- return skip;
-}
-
-bool StatelessValidation::PreCallValidateGetPhysicalDeviceExternalBufferProperties(
- VkPhysicalDevice physicalDevice,
- const VkPhysicalDeviceExternalBufferInfo* pExternalBufferInfo,
- VkExternalBufferProperties* pExternalBufferProperties) {
- bool skip = false;
- skip |= validate_struct_type("vkGetPhysicalDeviceExternalBufferProperties", "pExternalBufferInfo", "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_BUFFER_INFO", pExternalBufferInfo, VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_BUFFER_INFO, true, "VUID-vkGetPhysicalDeviceExternalBufferProperties-pExternalBufferInfo-parameter", "VUID-VkPhysicalDeviceExternalBufferInfo-sType-sType");
- if (pExternalBufferInfo != NULL)
- {
- skip |= validate_struct_pnext("vkGetPhysicalDeviceExternalBufferProperties", "pExternalBufferInfo->pNext", NULL, pExternalBufferInfo->pNext, 0, NULL, GeneratedVulkanHeaderVersion, "VUID-VkPhysicalDeviceExternalBufferInfo-pNext-pNext");
-
- skip |= validate_flags("vkGetPhysicalDeviceExternalBufferProperties", "pExternalBufferInfo->flags", "VkBufferCreateFlagBits", AllVkBufferCreateFlagBits, pExternalBufferInfo->flags, kOptionalFlags, "VUID-VkPhysicalDeviceExternalBufferInfo-flags-parameter");
-
- skip |= validate_flags("vkGetPhysicalDeviceExternalBufferProperties", "pExternalBufferInfo->usage", "VkBufferUsageFlagBits", AllVkBufferUsageFlagBits, pExternalBufferInfo->usage, kRequiredFlags, "VUID-VkPhysicalDeviceExternalBufferInfo-usage-parameter", "VUID-VkPhysicalDeviceExternalBufferInfo-usage-requiredbitmask");
-
- skip |= validate_flags("vkGetPhysicalDeviceExternalBufferProperties", "pExternalBufferInfo->handleType", "VkExternalMemoryHandleTypeFlagBits", AllVkExternalMemoryHandleTypeFlagBits, pExternalBufferInfo->handleType, kRequiredSingleBit, "VUID-VkPhysicalDeviceExternalBufferInfo-handleType-parameter", "VUID-VkPhysicalDeviceExternalBufferInfo-handleType-parameter");
- }
- skip |= validate_struct_type("vkGetPhysicalDeviceExternalBufferProperties", "pExternalBufferProperties", "VK_STRUCTURE_TYPE_EXTERNAL_BUFFER_PROPERTIES", pExternalBufferProperties, VK_STRUCTURE_TYPE_EXTERNAL_BUFFER_PROPERTIES, true, "VUID-vkGetPhysicalDeviceExternalBufferProperties-pExternalBufferProperties-parameter", "VUID-VkExternalBufferProperties-sType-sType");
- if (pExternalBufferProperties != NULL)
- {
- // No xml-driven validation
- }
- return skip;
-}
-
-bool StatelessValidation::PreCallValidateGetPhysicalDeviceExternalFenceProperties(
- VkPhysicalDevice physicalDevice,
- const VkPhysicalDeviceExternalFenceInfo* pExternalFenceInfo,
- VkExternalFenceProperties* pExternalFenceProperties) {
- bool skip = false;
- skip |= validate_struct_type("vkGetPhysicalDeviceExternalFenceProperties", "pExternalFenceInfo", "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_FENCE_INFO", pExternalFenceInfo, VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_FENCE_INFO, true, "VUID-vkGetPhysicalDeviceExternalFenceProperties-pExternalFenceInfo-parameter", "VUID-VkPhysicalDeviceExternalFenceInfo-sType-sType");
- if (pExternalFenceInfo != NULL)
- {
- skip |= validate_struct_pnext("vkGetPhysicalDeviceExternalFenceProperties", "pExternalFenceInfo->pNext", NULL, pExternalFenceInfo->pNext, 0, NULL, GeneratedVulkanHeaderVersion, "VUID-VkPhysicalDeviceExternalFenceInfo-pNext-pNext");
-
- skip |= validate_flags("vkGetPhysicalDeviceExternalFenceProperties", "pExternalFenceInfo->handleType", "VkExternalFenceHandleTypeFlagBits", AllVkExternalFenceHandleTypeFlagBits, pExternalFenceInfo->handleType, kRequiredSingleBit, "VUID-VkPhysicalDeviceExternalFenceInfo-handleType-parameter", "VUID-VkPhysicalDeviceExternalFenceInfo-handleType-parameter");
- }
- skip |= validate_struct_type("vkGetPhysicalDeviceExternalFenceProperties", "pExternalFenceProperties", "VK_STRUCTURE_TYPE_EXTERNAL_FENCE_PROPERTIES", pExternalFenceProperties, VK_STRUCTURE_TYPE_EXTERNAL_FENCE_PROPERTIES, true, "VUID-vkGetPhysicalDeviceExternalFenceProperties-pExternalFenceProperties-parameter", "VUID-VkExternalFenceProperties-sType-sType");
- if (pExternalFenceProperties != NULL)
- {
- // No xml-driven validation
- }
- return skip;
-}
-
-bool StatelessValidation::PreCallValidateGetPhysicalDeviceExternalSemaphoreProperties(
- VkPhysicalDevice physicalDevice,
- const VkPhysicalDeviceExternalSemaphoreInfo* pExternalSemaphoreInfo,
- VkExternalSemaphoreProperties* pExternalSemaphoreProperties) {
- bool skip = false;
- skip |= validate_struct_type("vkGetPhysicalDeviceExternalSemaphoreProperties", "pExternalSemaphoreInfo", "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_SEMAPHORE_INFO", pExternalSemaphoreInfo, VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_SEMAPHORE_INFO, true, "VUID-vkGetPhysicalDeviceExternalSemaphoreProperties-pExternalSemaphoreInfo-parameter", "VUID-VkPhysicalDeviceExternalSemaphoreInfo-sType-sType");
- if (pExternalSemaphoreInfo != NULL)
- {
- skip |= validate_struct_pnext("vkGetPhysicalDeviceExternalSemaphoreProperties", "pExternalSemaphoreInfo->pNext", NULL, pExternalSemaphoreInfo->pNext, 0, NULL, GeneratedVulkanHeaderVersion, "VUID-VkPhysicalDeviceExternalSemaphoreInfo-pNext-pNext");
-
- skip |= validate_flags("vkGetPhysicalDeviceExternalSemaphoreProperties", "pExternalSemaphoreInfo->handleType", "VkExternalSemaphoreHandleTypeFlagBits", AllVkExternalSemaphoreHandleTypeFlagBits, pExternalSemaphoreInfo->handleType, kRequiredSingleBit, "VUID-VkPhysicalDeviceExternalSemaphoreInfo-handleType-parameter", "VUID-VkPhysicalDeviceExternalSemaphoreInfo-handleType-parameter");
- }
- skip |= validate_struct_type("vkGetPhysicalDeviceExternalSemaphoreProperties", "pExternalSemaphoreProperties", "VK_STRUCTURE_TYPE_EXTERNAL_SEMAPHORE_PROPERTIES", pExternalSemaphoreProperties, VK_STRUCTURE_TYPE_EXTERNAL_SEMAPHORE_PROPERTIES, true, "VUID-vkGetPhysicalDeviceExternalSemaphoreProperties-pExternalSemaphoreProperties-parameter", "VUID-VkExternalSemaphoreProperties-sType-sType");
- if (pExternalSemaphoreProperties != NULL)
- {
- // No xml-driven validation
- }
- return skip;
-}
-
-bool StatelessValidation::PreCallValidateGetDescriptorSetLayoutSupport(
- VkDevice device,
- const VkDescriptorSetLayoutCreateInfo* pCreateInfo,
- VkDescriptorSetLayoutSupport* pSupport) {
- bool skip = false;
- skip |= validate_struct_type("vkGetDescriptorSetLayoutSupport", "pCreateInfo", "VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_CREATE_INFO", pCreateInfo, VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_CREATE_INFO, true, "VUID-vkGetDescriptorSetLayoutSupport-pCreateInfo-parameter", "VUID-VkDescriptorSetLayoutCreateInfo-sType-sType");
- if (pCreateInfo != NULL)
- {
- const VkStructureType allowed_structs_VkDescriptorSetLayoutCreateInfo[] = { VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_BINDING_FLAGS_CREATE_INFO_EXT };
-
- skip |= validate_struct_pnext("vkGetDescriptorSetLayoutSupport", "pCreateInfo->pNext", "VkDescriptorSetLayoutBindingFlagsCreateInfoEXT", pCreateInfo->pNext, ARRAY_SIZE(allowed_structs_VkDescriptorSetLayoutCreateInfo), allowed_structs_VkDescriptorSetLayoutCreateInfo, GeneratedVulkanHeaderVersion, "VUID-VkDescriptorSetLayoutCreateInfo-pNext-pNext");
-
- skip |= validate_flags("vkGetDescriptorSetLayoutSupport", "pCreateInfo->flags", "VkDescriptorSetLayoutCreateFlagBits", AllVkDescriptorSetLayoutCreateFlagBits, pCreateInfo->flags, kOptionalFlags, "VUID-VkDescriptorSetLayoutCreateInfo-flags-parameter");
-
- skip |= validate_array("vkGetDescriptorSetLayoutSupport", "pCreateInfo->bindingCount", "pCreateInfo->pBindings", pCreateInfo->bindingCount, &pCreateInfo->pBindings, false, true, kVUIDUndefined, "VUID-VkDescriptorSetLayoutCreateInfo-pBindings-parameter");
-
- if (pCreateInfo->pBindings != NULL)
- {
- for (uint32_t bindingIndex = 0; bindingIndex < pCreateInfo->bindingCount; ++bindingIndex)
- {
- skip |= validate_ranged_enum("vkGetDescriptorSetLayoutSupport", ParameterName("pCreateInfo->pBindings[%i].descriptorType", ParameterName::IndexVector{ bindingIndex }), "VkDescriptorType", AllVkDescriptorTypeEnums, pCreateInfo->pBindings[bindingIndex].descriptorType, "VUID-VkDescriptorSetLayoutBinding-descriptorType-parameter");
- }
- }
- }
- skip |= validate_struct_type("vkGetDescriptorSetLayoutSupport", "pSupport", "VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_SUPPORT", pSupport, VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_SUPPORT, true, "VUID-vkGetDescriptorSetLayoutSupport-pSupport-parameter", "VUID-VkDescriptorSetLayoutSupport-sType-sType");
- if (pSupport != NULL)
- {
- // No xml-driven validation
- }
- return skip;
-}
-
-
-
-bool StatelessValidation::PreCallValidateDestroySurfaceKHR(
- VkInstance instance,
- VkSurfaceKHR surface,
- const VkAllocationCallbacks* pAllocator) {
- bool skip = false;
- if (!instance_extensions.vk_khr_surface) skip |= OutputExtensionError("vkDestroySurfaceKHR", VK_KHR_SURFACE_EXTENSION_NAME);
- if (pAllocator != NULL)
- {
- skip |= validate_required_pointer("vkDestroySurfaceKHR", "pAllocator->pfnAllocation", reinterpret_cast<const void*>(pAllocator->pfnAllocation), "VUID-VkAllocationCallbacks-pfnAllocation-00632");
-
- skip |= validate_required_pointer("vkDestroySurfaceKHR", "pAllocator->pfnReallocation", reinterpret_cast<const void*>(pAllocator->pfnReallocation), "VUID-VkAllocationCallbacks-pfnReallocation-00633");
-
- skip |= validate_required_pointer("vkDestroySurfaceKHR", "pAllocator->pfnFree", reinterpret_cast<const void*>(pAllocator->pfnFree), "VUID-VkAllocationCallbacks-pfnFree-00634");
-
- if (pAllocator->pfnInternalAllocation != NULL)
- {
- skip |= validate_required_pointer("vkDestroySurfaceKHR", "pAllocator->pfnInternalFree", reinterpret_cast<const void*>(pAllocator->pfnInternalFree), "VUID-VkAllocationCallbacks-pfnInternalAllocation-00635");
-
- }
-
- if (pAllocator->pfnInternalFree != NULL)
- {
- skip |= validate_required_pointer("vkDestroySurfaceKHR", "pAllocator->pfnInternalAllocation", reinterpret_cast<const void*>(pAllocator->pfnInternalAllocation), "VUID-VkAllocationCallbacks-pfnInternalAllocation-00635");
-
- }
- }
- return skip;
-}
-
-bool StatelessValidation::PreCallValidateGetPhysicalDeviceSurfaceSupportKHR(
- VkPhysicalDevice physicalDevice,
- uint32_t queueFamilyIndex,
- VkSurfaceKHR surface,
- VkBool32* pSupported) {
- bool skip = false;
- if (!instance_extensions.vk_khr_surface) skip |= OutputExtensionError("vkGetPhysicalDeviceSurfaceSupportKHR", VK_KHR_SURFACE_EXTENSION_NAME);
- skip |= validate_required_handle("vkGetPhysicalDeviceSurfaceSupportKHR", "surface", surface);
- skip |= validate_required_pointer("vkGetPhysicalDeviceSurfaceSupportKHR", "pSupported", pSupported, "VUID-vkGetPhysicalDeviceSurfaceSupportKHR-pSupported-parameter");
- return skip;
-}
-
-bool StatelessValidation::PreCallValidateGetPhysicalDeviceSurfaceCapabilitiesKHR(
- VkPhysicalDevice physicalDevice,
- VkSurfaceKHR surface,
- VkSurfaceCapabilitiesKHR* pSurfaceCapabilities) {
- bool skip = false;
- if (!instance_extensions.vk_khr_surface) skip |= OutputExtensionError("vkGetPhysicalDeviceSurfaceCapabilitiesKHR", VK_KHR_SURFACE_EXTENSION_NAME);
- skip |= validate_required_handle("vkGetPhysicalDeviceSurfaceCapabilitiesKHR", "surface", surface);
- skip |= validate_required_pointer("vkGetPhysicalDeviceSurfaceCapabilitiesKHR", "pSurfaceCapabilities", pSurfaceCapabilities, "VUID-vkGetPhysicalDeviceSurfaceCapabilitiesKHR-pSurfaceCapabilities-parameter");
- if (pSurfaceCapabilities != NULL)
- {
- // No xml-driven validation
- }
- return skip;
-}
-
-bool StatelessValidation::PreCallValidateGetPhysicalDeviceSurfaceFormatsKHR(
- VkPhysicalDevice physicalDevice,
- VkSurfaceKHR surface,
- uint32_t* pSurfaceFormatCount,
- VkSurfaceFormatKHR* pSurfaceFormats) {
- bool skip = false;
- if (!instance_extensions.vk_khr_surface) skip |= OutputExtensionError("vkGetPhysicalDeviceSurfaceFormatsKHR", VK_KHR_SURFACE_EXTENSION_NAME);
- skip |= validate_required_handle("vkGetPhysicalDeviceSurfaceFormatsKHR", "surface", surface);
- skip |= validate_array("vkGetPhysicalDeviceSurfaceFormatsKHR", "pSurfaceFormatCount", "pSurfaceFormats", pSurfaceFormatCount, &pSurfaceFormats, true, false, false, kVUIDUndefined, "VUID-vkGetPhysicalDeviceSurfaceFormatsKHR-pSurfaceFormats-parameter");
- if (pSurfaceFormats != NULL)
- {
- for (uint32_t pSurfaceFormatIndex = 0; pSurfaceFormatIndex < *pSurfaceFormatCount; ++pSurfaceFormatIndex)
- {
- // No xml-driven validation
- }
- }
- return skip;
-}
-
-bool StatelessValidation::PreCallValidateGetPhysicalDeviceSurfacePresentModesKHR(
- VkPhysicalDevice physicalDevice,
- VkSurfaceKHR surface,
- uint32_t* pPresentModeCount,
- VkPresentModeKHR* pPresentModes) {
- bool skip = false;
- if (!instance_extensions.vk_khr_surface) skip |= OutputExtensionError("vkGetPhysicalDeviceSurfacePresentModesKHR", VK_KHR_SURFACE_EXTENSION_NAME);
- skip |= validate_required_handle("vkGetPhysicalDeviceSurfacePresentModesKHR", "surface", surface);
- skip |= validate_array("vkGetPhysicalDeviceSurfacePresentModesKHR", "pPresentModeCount", "pPresentModes", pPresentModeCount, &pPresentModes, true, false, false, kVUIDUndefined, "VUID-vkGetPhysicalDeviceSurfacePresentModesKHR-pPresentModes-parameter");
- return skip;
-}
-
-
-
-bool StatelessValidation::PreCallValidateCreateSwapchainKHR(
- VkDevice device,
- const VkSwapchainCreateInfoKHR* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkSwapchainKHR* pSwapchain) {
- bool skip = false;
- if (!device_extensions.vk_khr_surface) skip |= OutputExtensionError("vkCreateSwapchainKHR", VK_KHR_SURFACE_EXTENSION_NAME);
- if (!device_extensions.vk_khr_swapchain) skip |= OutputExtensionError("vkCreateSwapchainKHR", VK_KHR_SWAPCHAIN_EXTENSION_NAME);
- skip |= validate_struct_type("vkCreateSwapchainKHR", "pCreateInfo", "VK_STRUCTURE_TYPE_SWAPCHAIN_CREATE_INFO_KHR", pCreateInfo, VK_STRUCTURE_TYPE_SWAPCHAIN_CREATE_INFO_KHR, true, "VUID-vkCreateSwapchainKHR-pCreateInfo-parameter", "VUID-VkSwapchainCreateInfoKHR-sType-sType");
- if (pCreateInfo != NULL)
- {
- const VkStructureType allowed_structs_VkSwapchainCreateInfoKHR[] = { VK_STRUCTURE_TYPE_DEVICE_GROUP_SWAPCHAIN_CREATE_INFO_KHR, VK_STRUCTURE_TYPE_IMAGE_FORMAT_LIST_CREATE_INFO_KHR, VK_STRUCTURE_TYPE_SURFACE_FULL_SCREEN_EXCLUSIVE_INFO_EXT, VK_STRUCTURE_TYPE_SURFACE_FULL_SCREEN_EXCLUSIVE_WIN32_INFO_EXT, VK_STRUCTURE_TYPE_SWAPCHAIN_COUNTER_CREATE_INFO_EXT, VK_STRUCTURE_TYPE_SWAPCHAIN_DISPLAY_NATIVE_HDR_CREATE_INFO_AMD };
-
- skip |= validate_struct_pnext("vkCreateSwapchainKHR", "pCreateInfo->pNext", "VkDeviceGroupSwapchainCreateInfoKHR, VkImageFormatListCreateInfoKHR, VkSurfaceFullScreenExclusiveInfoEXT, VkSurfaceFullScreenExclusiveWin32InfoEXT, VkSwapchainCounterCreateInfoEXT, VkSwapchainDisplayNativeHdrCreateInfoAMD", pCreateInfo->pNext, ARRAY_SIZE(allowed_structs_VkSwapchainCreateInfoKHR), allowed_structs_VkSwapchainCreateInfoKHR, GeneratedVulkanHeaderVersion, "VUID-VkSwapchainCreateInfoKHR-pNext-pNext");
-
- skip |= validate_flags("vkCreateSwapchainKHR", "pCreateInfo->flags", "VkSwapchainCreateFlagBitsKHR", AllVkSwapchainCreateFlagBitsKHR, pCreateInfo->flags, kOptionalFlags, "VUID-VkSwapchainCreateInfoKHR-flags-parameter");
-
- skip |= validate_required_handle("vkCreateSwapchainKHR", "pCreateInfo->surface", pCreateInfo->surface);
-
- skip |= validate_ranged_enum("vkCreateSwapchainKHR", "pCreateInfo->imageFormat", "VkFormat", AllVkFormatEnums, pCreateInfo->imageFormat, "VUID-VkSwapchainCreateInfoKHR-imageFormat-parameter");
-
- skip |= validate_ranged_enum("vkCreateSwapchainKHR", "pCreateInfo->imageColorSpace", "VkColorSpaceKHR", AllVkColorSpaceKHREnums, pCreateInfo->imageColorSpace, "VUID-VkSwapchainCreateInfoKHR-imageColorSpace-parameter");
-
- // No xml-driven validation
-
- skip |= validate_flags("vkCreateSwapchainKHR", "pCreateInfo->imageUsage", "VkImageUsageFlagBits", AllVkImageUsageFlagBits, pCreateInfo->imageUsage, kRequiredFlags, "VUID-VkSwapchainCreateInfoKHR-imageUsage-parameter", "VUID-VkSwapchainCreateInfoKHR-imageUsage-requiredbitmask");
-
- skip |= validate_ranged_enum("vkCreateSwapchainKHR", "pCreateInfo->imageSharingMode", "VkSharingMode", AllVkSharingModeEnums, pCreateInfo->imageSharingMode, "VUID-VkSwapchainCreateInfoKHR-imageSharingMode-parameter");
-
- skip |= validate_flags("vkCreateSwapchainKHR", "pCreateInfo->preTransform", "VkSurfaceTransformFlagBitsKHR", AllVkSurfaceTransformFlagBitsKHR, pCreateInfo->preTransform, kRequiredSingleBit, "VUID-VkSwapchainCreateInfoKHR-preTransform-parameter", "VUID-VkSwapchainCreateInfoKHR-preTransform-parameter");
-
- skip |= validate_flags("vkCreateSwapchainKHR", "pCreateInfo->compositeAlpha", "VkCompositeAlphaFlagBitsKHR", AllVkCompositeAlphaFlagBitsKHR, pCreateInfo->compositeAlpha, kRequiredSingleBit, "VUID-VkSwapchainCreateInfoKHR-compositeAlpha-parameter", "VUID-VkSwapchainCreateInfoKHR-compositeAlpha-parameter");
-
- skip |= validate_ranged_enum("vkCreateSwapchainKHR", "pCreateInfo->presentMode", "VkPresentModeKHR", AllVkPresentModeKHREnums, pCreateInfo->presentMode, "VUID-VkSwapchainCreateInfoKHR-presentMode-parameter");
-
- skip |= validate_bool32("vkCreateSwapchainKHR", "pCreateInfo->clipped", pCreateInfo->clipped);
- }
- if (pAllocator != NULL)
- {
- skip |= validate_required_pointer("vkCreateSwapchainKHR", "pAllocator->pfnAllocation", reinterpret_cast<const void*>(pAllocator->pfnAllocation), "VUID-VkAllocationCallbacks-pfnAllocation-00632");
-
- skip |= validate_required_pointer("vkCreateSwapchainKHR", "pAllocator->pfnReallocation", reinterpret_cast<const void*>(pAllocator->pfnReallocation), "VUID-VkAllocationCallbacks-pfnReallocation-00633");
-
- skip |= validate_required_pointer("vkCreateSwapchainKHR", "pAllocator->pfnFree", reinterpret_cast<const void*>(pAllocator->pfnFree), "VUID-VkAllocationCallbacks-pfnFree-00634");
-
- if (pAllocator->pfnInternalAllocation != NULL)
- {
- skip |= validate_required_pointer("vkCreateSwapchainKHR", "pAllocator->pfnInternalFree", reinterpret_cast<const void*>(pAllocator->pfnInternalFree), "VUID-VkAllocationCallbacks-pfnInternalAllocation-00635");
-
- }
-
- if (pAllocator->pfnInternalFree != NULL)
- {
- skip |= validate_required_pointer("vkCreateSwapchainKHR", "pAllocator->pfnInternalAllocation", reinterpret_cast<const void*>(pAllocator->pfnInternalAllocation), "VUID-VkAllocationCallbacks-pfnInternalAllocation-00635");
-
- }
- }
- skip |= validate_required_pointer("vkCreateSwapchainKHR", "pSwapchain", pSwapchain, "VUID-vkCreateSwapchainKHR-pSwapchain-parameter");
- if (!skip) skip |= manual_PreCallValidateCreateSwapchainKHR(device, pCreateInfo, pAllocator, pSwapchain);
- return skip;
-}
-
-bool StatelessValidation::PreCallValidateDestroySwapchainKHR(
- VkDevice device,
- VkSwapchainKHR swapchain,
- const VkAllocationCallbacks* pAllocator) {
- bool skip = false;
- if (!device_extensions.vk_khr_surface) skip |= OutputExtensionError("vkDestroySwapchainKHR", VK_KHR_SURFACE_EXTENSION_NAME);
- if (!device_extensions.vk_khr_swapchain) skip |= OutputExtensionError("vkDestroySwapchainKHR", VK_KHR_SWAPCHAIN_EXTENSION_NAME);
- if (pAllocator != NULL)
- {
- skip |= validate_required_pointer("vkDestroySwapchainKHR", "pAllocator->pfnAllocation", reinterpret_cast<const void*>(pAllocator->pfnAllocation), "VUID-VkAllocationCallbacks-pfnAllocation-00632");
-
- skip |= validate_required_pointer("vkDestroySwapchainKHR", "pAllocator->pfnReallocation", reinterpret_cast<const void*>(pAllocator->pfnReallocation), "VUID-VkAllocationCallbacks-pfnReallocation-00633");
-
- skip |= validate_required_pointer("vkDestroySwapchainKHR", "pAllocator->pfnFree", reinterpret_cast<const void*>(pAllocator->pfnFree), "VUID-VkAllocationCallbacks-pfnFree-00634");
-
- if (pAllocator->pfnInternalAllocation != NULL)
- {
- skip |= validate_required_pointer("vkDestroySwapchainKHR", "pAllocator->pfnInternalFree", reinterpret_cast<const void*>(pAllocator->pfnInternalFree), "VUID-VkAllocationCallbacks-pfnInternalAllocation-00635");
-
- }
-
- if (pAllocator->pfnInternalFree != NULL)
- {
- skip |= validate_required_pointer("vkDestroySwapchainKHR", "pAllocator->pfnInternalAllocation", reinterpret_cast<const void*>(pAllocator->pfnInternalAllocation), "VUID-VkAllocationCallbacks-pfnInternalAllocation-00635");
-
- }
- }
- return skip;
-}
-
-bool StatelessValidation::PreCallValidateGetSwapchainImagesKHR(
- VkDevice device,
- VkSwapchainKHR swapchain,
- uint32_t* pSwapchainImageCount,
- VkImage* pSwapchainImages) {
- bool skip = false;
- if (!device_extensions.vk_khr_surface) skip |= OutputExtensionError("vkGetSwapchainImagesKHR", VK_KHR_SURFACE_EXTENSION_NAME);
- if (!device_extensions.vk_khr_swapchain) skip |= OutputExtensionError("vkGetSwapchainImagesKHR", VK_KHR_SWAPCHAIN_EXTENSION_NAME);
- skip |= validate_required_handle("vkGetSwapchainImagesKHR", "swapchain", swapchain);
- skip |= validate_array("vkGetSwapchainImagesKHR", "pSwapchainImageCount", "pSwapchainImages", pSwapchainImageCount, &pSwapchainImages, true, false, false, kVUIDUndefined, "VUID-vkGetSwapchainImagesKHR-pSwapchainImages-parameter");
- return skip;
-}
-
-bool StatelessValidation::PreCallValidateAcquireNextImageKHR(
- VkDevice device,
- VkSwapchainKHR swapchain,
- uint64_t timeout,
- VkSemaphore semaphore,
- VkFence fence,
- uint32_t* pImageIndex) {
- bool skip = false;
- if (!device_extensions.vk_khr_surface) skip |= OutputExtensionError("vkAcquireNextImageKHR", VK_KHR_SURFACE_EXTENSION_NAME);
- if (!device_extensions.vk_khr_swapchain) skip |= OutputExtensionError("vkAcquireNextImageKHR", VK_KHR_SWAPCHAIN_EXTENSION_NAME);
- skip |= validate_required_handle("vkAcquireNextImageKHR", "swapchain", swapchain);
- skip |= validate_required_pointer("vkAcquireNextImageKHR", "pImageIndex", pImageIndex, "VUID-vkAcquireNextImageKHR-pImageIndex-parameter");
- return skip;
-}
-
-bool StatelessValidation::PreCallValidateQueuePresentKHR(
- VkQueue queue,
- const VkPresentInfoKHR* pPresentInfo) {
- bool skip = false;
- if (!device_extensions.vk_khr_surface) skip |= OutputExtensionError("vkQueuePresentKHR", VK_KHR_SURFACE_EXTENSION_NAME);
- if (!device_extensions.vk_khr_swapchain) skip |= OutputExtensionError("vkQueuePresentKHR", VK_KHR_SWAPCHAIN_EXTENSION_NAME);
- skip |= validate_struct_type("vkQueuePresentKHR", "pPresentInfo", "VK_STRUCTURE_TYPE_PRESENT_INFO_KHR", pPresentInfo, VK_STRUCTURE_TYPE_PRESENT_INFO_KHR, true, "VUID-vkQueuePresentKHR-pPresentInfo-parameter", "VUID-VkPresentInfoKHR-sType-sType");
- if (pPresentInfo != NULL)
- {
- const VkStructureType allowed_structs_VkPresentInfoKHR[] = { VK_STRUCTURE_TYPE_DEVICE_GROUP_PRESENT_INFO_KHR, VK_STRUCTURE_TYPE_DISPLAY_PRESENT_INFO_KHR, VK_STRUCTURE_TYPE_PRESENT_FRAME_TOKEN_GGP, VK_STRUCTURE_TYPE_PRESENT_REGIONS_KHR, VK_STRUCTURE_TYPE_PRESENT_TIMES_INFO_GOOGLE };
-
- skip |= validate_struct_pnext("vkQueuePresentKHR", "pPresentInfo->pNext", "VkDeviceGroupPresentInfoKHR, VkDisplayPresentInfoKHR, VkPresentFrameTokenGGP, VkPresentRegionsKHR, VkPresentTimesInfoGOOGLE", pPresentInfo->pNext, ARRAY_SIZE(allowed_structs_VkPresentInfoKHR), allowed_structs_VkPresentInfoKHR, GeneratedVulkanHeaderVersion, "VUID-VkPresentInfoKHR-pNext-pNext");
-
- skip |= validate_array("vkQueuePresentKHR", "pPresentInfo->waitSemaphoreCount", "pPresentInfo->pWaitSemaphores", pPresentInfo->waitSemaphoreCount, &pPresentInfo->pWaitSemaphores, false, true, kVUIDUndefined, "VUID-VkPresentInfoKHR-pWaitSemaphores-parameter");
-
- skip |= validate_handle_array("vkQueuePresentKHR", "pPresentInfo->swapchainCount", "pPresentInfo->pSwapchains", pPresentInfo->swapchainCount, pPresentInfo->pSwapchains, true, true);
-
- skip |= validate_array("vkQueuePresentKHR", "pPresentInfo->swapchainCount", "pPresentInfo->pImageIndices", pPresentInfo->swapchainCount, &pPresentInfo->pImageIndices, true, true, "VUID-VkPresentInfoKHR-swapchainCount-arraylength", "VUID-VkPresentInfoKHR-pImageIndices-parameter");
-
- skip |= validate_array("vkQueuePresentKHR", "pPresentInfo->swapchainCount", "pPresentInfo->pResults", pPresentInfo->swapchainCount, &pPresentInfo->pResults, true, false, "VUID-VkPresentInfoKHR-swapchainCount-arraylength", "VUID-VkPresentInfoKHR-pResults-parameter");
- }
- if (!skip) skip |= manual_PreCallValidateQueuePresentKHR(queue, pPresentInfo);
- return skip;
-}
-
-bool StatelessValidation::PreCallValidateGetDeviceGroupPresentCapabilitiesKHR(
- VkDevice device,
- VkDeviceGroupPresentCapabilitiesKHR* pDeviceGroupPresentCapabilities) {
- bool skip = false;
- if (!device_extensions.vk_khr_surface) skip |= OutputExtensionError("vkGetDeviceGroupPresentCapabilitiesKHR", VK_KHR_SURFACE_EXTENSION_NAME);
- if (!device_extensions.vk_khr_swapchain) skip |= OutputExtensionError("vkGetDeviceGroupPresentCapabilitiesKHR", VK_KHR_SWAPCHAIN_EXTENSION_NAME);
- skip |= validate_struct_type("vkGetDeviceGroupPresentCapabilitiesKHR", "pDeviceGroupPresentCapabilities", "VK_STRUCTURE_TYPE_DEVICE_GROUP_PRESENT_CAPABILITIES_KHR", pDeviceGroupPresentCapabilities, VK_STRUCTURE_TYPE_DEVICE_GROUP_PRESENT_CAPABILITIES_KHR, true, "VUID-vkGetDeviceGroupPresentCapabilitiesKHR-pDeviceGroupPresentCapabilities-parameter", "VUID-VkDeviceGroupPresentCapabilitiesKHR-sType-sType");
- if (pDeviceGroupPresentCapabilities != NULL)
- {
- skip |= validate_struct_pnext("vkGetDeviceGroupPresentCapabilitiesKHR", "pDeviceGroupPresentCapabilities->pNext", NULL, pDeviceGroupPresentCapabilities->pNext, 0, NULL, GeneratedVulkanHeaderVersion, "VUID-VkDeviceGroupPresentCapabilitiesKHR-pNext-pNext");
- }
- return skip;
-}
-
-bool StatelessValidation::PreCallValidateGetDeviceGroupSurfacePresentModesKHR(
- VkDevice device,
- VkSurfaceKHR surface,
- VkDeviceGroupPresentModeFlagsKHR* pModes) {
- bool skip = false;
- if (!device_extensions.vk_khr_surface) skip |= OutputExtensionError("vkGetDeviceGroupSurfacePresentModesKHR", VK_KHR_SURFACE_EXTENSION_NAME);
- if (!device_extensions.vk_khr_swapchain) skip |= OutputExtensionError("vkGetDeviceGroupSurfacePresentModesKHR", VK_KHR_SWAPCHAIN_EXTENSION_NAME);
- skip |= validate_required_handle("vkGetDeviceGroupSurfacePresentModesKHR", "surface", surface);
- return skip;
-}
-
-bool StatelessValidation::PreCallValidateGetPhysicalDevicePresentRectanglesKHR(
- VkPhysicalDevice physicalDevice,
- VkSurfaceKHR surface,
- uint32_t* pRectCount,
- VkRect2D* pRects) {
- bool skip = false;
- skip |= validate_required_handle("vkGetPhysicalDevicePresentRectanglesKHR", "surface", surface);
- skip |= validate_array("vkGetPhysicalDevicePresentRectanglesKHR", "pRectCount", "pRects", pRectCount, &pRects, true, false, false, kVUIDUndefined, "VUID-vkGetPhysicalDevicePresentRectanglesKHR-pRects-parameter");
- return skip;
-}
-
-bool StatelessValidation::PreCallValidateAcquireNextImage2KHR(
- VkDevice device,
- const VkAcquireNextImageInfoKHR* pAcquireInfo,
- uint32_t* pImageIndex) {
- bool skip = false;
- if (!device_extensions.vk_khr_surface) skip |= OutputExtensionError("vkAcquireNextImage2KHR", VK_KHR_SURFACE_EXTENSION_NAME);
- if (!device_extensions.vk_khr_swapchain) skip |= OutputExtensionError("vkAcquireNextImage2KHR", VK_KHR_SWAPCHAIN_EXTENSION_NAME);
- skip |= validate_struct_type("vkAcquireNextImage2KHR", "pAcquireInfo", "VK_STRUCTURE_TYPE_ACQUIRE_NEXT_IMAGE_INFO_KHR", pAcquireInfo, VK_STRUCTURE_TYPE_ACQUIRE_NEXT_IMAGE_INFO_KHR, true, "VUID-vkAcquireNextImage2KHR-pAcquireInfo-parameter", "VUID-VkAcquireNextImageInfoKHR-sType-sType");
- if (pAcquireInfo != NULL)
- {
- skip |= validate_struct_pnext("vkAcquireNextImage2KHR", "pAcquireInfo->pNext", NULL, pAcquireInfo->pNext, 0, NULL, GeneratedVulkanHeaderVersion, "VUID-VkAcquireNextImageInfoKHR-pNext-pNext");
-
- skip |= validate_required_handle("vkAcquireNextImage2KHR", "pAcquireInfo->swapchain", pAcquireInfo->swapchain);
- }
- skip |= validate_required_pointer("vkAcquireNextImage2KHR", "pImageIndex", pImageIndex, "VUID-vkAcquireNextImage2KHR-pImageIndex-parameter");
- return skip;
-}
-
-
-
-bool StatelessValidation::PreCallValidateGetPhysicalDeviceDisplayPropertiesKHR(
- VkPhysicalDevice physicalDevice,
- uint32_t* pPropertyCount,
- VkDisplayPropertiesKHR* pProperties) {
- bool skip = false;
- if (!instance_extensions.vk_khr_surface) skip |= OutputExtensionError("vkGetPhysicalDeviceDisplayPropertiesKHR", VK_KHR_SURFACE_EXTENSION_NAME);
- if (!instance_extensions.vk_khr_display) skip |= OutputExtensionError("vkGetPhysicalDeviceDisplayPropertiesKHR", VK_KHR_DISPLAY_EXTENSION_NAME);
- skip |= validate_array("vkGetPhysicalDeviceDisplayPropertiesKHR", "pPropertyCount", "pProperties", pPropertyCount, &pProperties, true, false, false, kVUIDUndefined, "VUID-vkGetPhysicalDeviceDisplayPropertiesKHR-pProperties-parameter");
- if (pProperties != NULL)
- {
- for (uint32_t pPropertyIndex = 0; pPropertyIndex < *pPropertyCount; ++pPropertyIndex)
- {
- // No xml-driven validation
- }
- }
- return skip;
-}
-
-bool StatelessValidation::PreCallValidateGetPhysicalDeviceDisplayPlanePropertiesKHR(
- VkPhysicalDevice physicalDevice,
- uint32_t* pPropertyCount,
- VkDisplayPlanePropertiesKHR* pProperties) {
- bool skip = false;
- if (!instance_extensions.vk_khr_surface) skip |= OutputExtensionError("vkGetPhysicalDeviceDisplayPlanePropertiesKHR", VK_KHR_SURFACE_EXTENSION_NAME);
- if (!instance_extensions.vk_khr_display) skip |= OutputExtensionError("vkGetPhysicalDeviceDisplayPlanePropertiesKHR", VK_KHR_DISPLAY_EXTENSION_NAME);
- skip |= validate_array("vkGetPhysicalDeviceDisplayPlanePropertiesKHR", "pPropertyCount", "pProperties", pPropertyCount, &pProperties, true, false, false, kVUIDUndefined, "VUID-vkGetPhysicalDeviceDisplayPlanePropertiesKHR-pProperties-parameter");
- if (pProperties != NULL)
- {
- for (uint32_t pPropertyIndex = 0; pPropertyIndex < *pPropertyCount; ++pPropertyIndex)
- {
- // No xml-driven validation
- }
- }
- return skip;
-}
-
-bool StatelessValidation::PreCallValidateGetDisplayPlaneSupportedDisplaysKHR(
- VkPhysicalDevice physicalDevice,
- uint32_t planeIndex,
- uint32_t* pDisplayCount,
- VkDisplayKHR* pDisplays) {
- bool skip = false;
- if (!instance_extensions.vk_khr_surface) skip |= OutputExtensionError("vkGetDisplayPlaneSupportedDisplaysKHR", VK_KHR_SURFACE_EXTENSION_NAME);
- if (!instance_extensions.vk_khr_display) skip |= OutputExtensionError("vkGetDisplayPlaneSupportedDisplaysKHR", VK_KHR_DISPLAY_EXTENSION_NAME);
- skip |= validate_array("vkGetDisplayPlaneSupportedDisplaysKHR", "pDisplayCount", "pDisplays", pDisplayCount, &pDisplays, true, false, false, kVUIDUndefined, "VUID-vkGetDisplayPlaneSupportedDisplaysKHR-pDisplays-parameter");
- return skip;
-}
-
-bool StatelessValidation::PreCallValidateGetDisplayModePropertiesKHR(
- VkPhysicalDevice physicalDevice,
- VkDisplayKHR display,
- uint32_t* pPropertyCount,
- VkDisplayModePropertiesKHR* pProperties) {
- bool skip = false;
- if (!instance_extensions.vk_khr_surface) skip |= OutputExtensionError("vkGetDisplayModePropertiesKHR", VK_KHR_SURFACE_EXTENSION_NAME);
- if (!instance_extensions.vk_khr_display) skip |= OutputExtensionError("vkGetDisplayModePropertiesKHR", VK_KHR_DISPLAY_EXTENSION_NAME);
- skip |= validate_required_handle("vkGetDisplayModePropertiesKHR", "display", display);
- skip |= validate_array("vkGetDisplayModePropertiesKHR", "pPropertyCount", "pProperties", pPropertyCount, &pProperties, true, false, false, kVUIDUndefined, "VUID-vkGetDisplayModePropertiesKHR-pProperties-parameter");
- if (pProperties != NULL)
- {
- for (uint32_t pPropertyIndex = 0; pPropertyIndex < *pPropertyCount; ++pPropertyIndex)
- {
- // No xml-driven validation
- }
- }
- return skip;
-}
-
-bool StatelessValidation::PreCallValidateCreateDisplayModeKHR(
- VkPhysicalDevice physicalDevice,
- VkDisplayKHR display,
- const VkDisplayModeCreateInfoKHR* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkDisplayModeKHR* pMode) {
- bool skip = false;
- if (!instance_extensions.vk_khr_surface) skip |= OutputExtensionError("vkCreateDisplayModeKHR", VK_KHR_SURFACE_EXTENSION_NAME);
- if (!instance_extensions.vk_khr_display) skip |= OutputExtensionError("vkCreateDisplayModeKHR", VK_KHR_DISPLAY_EXTENSION_NAME);
- skip |= validate_required_handle("vkCreateDisplayModeKHR", "display", display);
- skip |= validate_struct_type("vkCreateDisplayModeKHR", "pCreateInfo", "VK_STRUCTURE_TYPE_DISPLAY_MODE_CREATE_INFO_KHR", pCreateInfo, VK_STRUCTURE_TYPE_DISPLAY_MODE_CREATE_INFO_KHR, true, "VUID-vkCreateDisplayModeKHR-pCreateInfo-parameter", "VUID-VkDisplayModeCreateInfoKHR-sType-sType");
- if (pCreateInfo != NULL)
- {
- skip |= validate_struct_pnext("vkCreateDisplayModeKHR", "pCreateInfo->pNext", NULL, pCreateInfo->pNext, 0, NULL, GeneratedVulkanHeaderVersion, "VUID-VkDisplayModeCreateInfoKHR-pNext-pNext");
-
- skip |= validate_reserved_flags("vkCreateDisplayModeKHR", "pCreateInfo->flags", pCreateInfo->flags, "VUID-VkDisplayModeCreateInfoKHR-flags-zerobitmask");
-
- // No xml-driven validation
- }
- if (pAllocator != NULL)
- {
- skip |= validate_required_pointer("vkCreateDisplayModeKHR", "pAllocator->pfnAllocation", reinterpret_cast<const void*>(pAllocator->pfnAllocation), "VUID-VkAllocationCallbacks-pfnAllocation-00632");
-
- skip |= validate_required_pointer("vkCreateDisplayModeKHR", "pAllocator->pfnReallocation", reinterpret_cast<const void*>(pAllocator->pfnReallocation), "VUID-VkAllocationCallbacks-pfnReallocation-00633");
-
- skip |= validate_required_pointer("vkCreateDisplayModeKHR", "pAllocator->pfnFree", reinterpret_cast<const void*>(pAllocator->pfnFree), "VUID-VkAllocationCallbacks-pfnFree-00634");
-
- if (pAllocator->pfnInternalAllocation != NULL)
- {
- skip |= validate_required_pointer("vkCreateDisplayModeKHR", "pAllocator->pfnInternalFree", reinterpret_cast<const void*>(pAllocator->pfnInternalFree), "VUID-VkAllocationCallbacks-pfnInternalAllocation-00635");
-
- }
-
- if (pAllocator->pfnInternalFree != NULL)
- {
- skip |= validate_required_pointer("vkCreateDisplayModeKHR", "pAllocator->pfnInternalAllocation", reinterpret_cast<const void*>(pAllocator->pfnInternalAllocation), "VUID-VkAllocationCallbacks-pfnInternalAllocation-00635");
-
- }
- }
- skip |= validate_required_pointer("vkCreateDisplayModeKHR", "pMode", pMode, "VUID-vkCreateDisplayModeKHR-pMode-parameter");
- return skip;
-}
-
-bool StatelessValidation::PreCallValidateGetDisplayPlaneCapabilitiesKHR(
- VkPhysicalDevice physicalDevice,
- VkDisplayModeKHR mode,
- uint32_t planeIndex,
- VkDisplayPlaneCapabilitiesKHR* pCapabilities) {
- bool skip = false;
- if (!instance_extensions.vk_khr_surface) skip |= OutputExtensionError("vkGetDisplayPlaneCapabilitiesKHR", VK_KHR_SURFACE_EXTENSION_NAME);
- if (!instance_extensions.vk_khr_display) skip |= OutputExtensionError("vkGetDisplayPlaneCapabilitiesKHR", VK_KHR_DISPLAY_EXTENSION_NAME);
- skip |= validate_required_handle("vkGetDisplayPlaneCapabilitiesKHR", "mode", mode);
- skip |= validate_required_pointer("vkGetDisplayPlaneCapabilitiesKHR", "pCapabilities", pCapabilities, "VUID-vkGetDisplayPlaneCapabilitiesKHR-pCapabilities-parameter");
- if (pCapabilities != NULL)
- {
- // No xml-driven validation
- }
- return skip;
-}
-
-bool StatelessValidation::PreCallValidateCreateDisplayPlaneSurfaceKHR(
- VkInstance instance,
- const VkDisplaySurfaceCreateInfoKHR* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkSurfaceKHR* pSurface) {
- bool skip = false;
- if (!instance_extensions.vk_khr_surface) skip |= OutputExtensionError("vkCreateDisplayPlaneSurfaceKHR", VK_KHR_SURFACE_EXTENSION_NAME);
- if (!instance_extensions.vk_khr_display) skip |= OutputExtensionError("vkCreateDisplayPlaneSurfaceKHR", VK_KHR_DISPLAY_EXTENSION_NAME);
- skip |= validate_struct_type("vkCreateDisplayPlaneSurfaceKHR", "pCreateInfo", "VK_STRUCTURE_TYPE_DISPLAY_SURFACE_CREATE_INFO_KHR", pCreateInfo, VK_STRUCTURE_TYPE_DISPLAY_SURFACE_CREATE_INFO_KHR, true, "VUID-vkCreateDisplayPlaneSurfaceKHR-pCreateInfo-parameter", "VUID-VkDisplaySurfaceCreateInfoKHR-sType-sType");
- if (pCreateInfo != NULL)
- {
- skip |= validate_struct_pnext("vkCreateDisplayPlaneSurfaceKHR", "pCreateInfo->pNext", NULL, pCreateInfo->pNext, 0, NULL, GeneratedVulkanHeaderVersion, "VUID-VkDisplaySurfaceCreateInfoKHR-pNext-pNext");
-
- skip |= validate_reserved_flags("vkCreateDisplayPlaneSurfaceKHR", "pCreateInfo->flags", pCreateInfo->flags, "VUID-VkDisplaySurfaceCreateInfoKHR-flags-zerobitmask");
-
- skip |= validate_required_handle("vkCreateDisplayPlaneSurfaceKHR", "pCreateInfo->displayMode", pCreateInfo->displayMode);
-
- skip |= validate_flags("vkCreateDisplayPlaneSurfaceKHR", "pCreateInfo->transform", "VkSurfaceTransformFlagBitsKHR", AllVkSurfaceTransformFlagBitsKHR, pCreateInfo->transform, kRequiredSingleBit, "VUID-VkDisplaySurfaceCreateInfoKHR-transform-parameter", "VUID-VkDisplaySurfaceCreateInfoKHR-transform-parameter");
-
- skip |= validate_flags("vkCreateDisplayPlaneSurfaceKHR", "pCreateInfo->alphaMode", "VkDisplayPlaneAlphaFlagBitsKHR", AllVkDisplayPlaneAlphaFlagBitsKHR, pCreateInfo->alphaMode, kRequiredSingleBit, "VUID-VkDisplaySurfaceCreateInfoKHR-alphaMode-parameter", "VUID-VkDisplaySurfaceCreateInfoKHR-alphaMode-parameter");
-
- // No xml-driven validation
- }
- if (pAllocator != NULL)
- {
- skip |= validate_required_pointer("vkCreateDisplayPlaneSurfaceKHR", "pAllocator->pfnAllocation", reinterpret_cast<const void*>(pAllocator->pfnAllocation), "VUID-VkAllocationCallbacks-pfnAllocation-00632");
-
- skip |= validate_required_pointer("vkCreateDisplayPlaneSurfaceKHR", "pAllocator->pfnReallocation", reinterpret_cast<const void*>(pAllocator->pfnReallocation), "VUID-VkAllocationCallbacks-pfnReallocation-00633");
-
- skip |= validate_required_pointer("vkCreateDisplayPlaneSurfaceKHR", "pAllocator->pfnFree", reinterpret_cast<const void*>(pAllocator->pfnFree), "VUID-VkAllocationCallbacks-pfnFree-00634");
-
- if (pAllocator->pfnInternalAllocation != NULL)
- {
- skip |= validate_required_pointer("vkCreateDisplayPlaneSurfaceKHR", "pAllocator->pfnInternalFree", reinterpret_cast<const void*>(pAllocator->pfnInternalFree), "VUID-VkAllocationCallbacks-pfnInternalAllocation-00635");
-
- }
-
- if (pAllocator->pfnInternalFree != NULL)
- {
- skip |= validate_required_pointer("vkCreateDisplayPlaneSurfaceKHR", "pAllocator->pfnInternalAllocation", reinterpret_cast<const void*>(pAllocator->pfnInternalAllocation), "VUID-VkAllocationCallbacks-pfnInternalAllocation-00635");
-
- }
- }
- skip |= validate_required_pointer("vkCreateDisplayPlaneSurfaceKHR", "pSurface", pSurface, "VUID-vkCreateDisplayPlaneSurfaceKHR-pSurface-parameter");
- return skip;
-}
-
-
-
-bool StatelessValidation::PreCallValidateCreateSharedSwapchainsKHR(
- VkDevice device,
- uint32_t swapchainCount,
- const VkSwapchainCreateInfoKHR* pCreateInfos,
- const VkAllocationCallbacks* pAllocator,
- VkSwapchainKHR* pSwapchains) {
- bool skip = false;
- if (!device_extensions.vk_khr_display) skip |= OutputExtensionError("vkCreateSharedSwapchainsKHR", VK_KHR_DISPLAY_EXTENSION_NAME);
- if (!device_extensions.vk_khr_swapchain) skip |= OutputExtensionError("vkCreateSharedSwapchainsKHR", VK_KHR_SWAPCHAIN_EXTENSION_NAME);
- if (!device_extensions.vk_khr_display_swapchain) skip |= OutputExtensionError("vkCreateSharedSwapchainsKHR", VK_KHR_DISPLAY_SWAPCHAIN_EXTENSION_NAME);
- skip |= validate_struct_type_array("vkCreateSharedSwapchainsKHR", "swapchainCount", "pCreateInfos", "VK_STRUCTURE_TYPE_SWAPCHAIN_CREATE_INFO_KHR", swapchainCount, pCreateInfos, VK_STRUCTURE_TYPE_SWAPCHAIN_CREATE_INFO_KHR, true, true, "VUID-VkSwapchainCreateInfoKHR-sType-sType", "VUID-vkCreateSharedSwapchainsKHR-pCreateInfos-parameter", "VUID-vkCreateSharedSwapchainsKHR-swapchainCount-arraylength");
- if (pCreateInfos != NULL)
- {
- for (uint32_t swapchainIndex = 0; swapchainIndex < swapchainCount; ++swapchainIndex)
- {
- const VkStructureType allowed_structs_VkSwapchainCreateInfoKHR[] = { VK_STRUCTURE_TYPE_DEVICE_GROUP_SWAPCHAIN_CREATE_INFO_KHR, VK_STRUCTURE_TYPE_IMAGE_FORMAT_LIST_CREATE_INFO_KHR, VK_STRUCTURE_TYPE_SURFACE_FULL_SCREEN_EXCLUSIVE_INFO_EXT, VK_STRUCTURE_TYPE_SURFACE_FULL_SCREEN_EXCLUSIVE_WIN32_INFO_EXT, VK_STRUCTURE_TYPE_SWAPCHAIN_COUNTER_CREATE_INFO_EXT, VK_STRUCTURE_TYPE_SWAPCHAIN_DISPLAY_NATIVE_HDR_CREATE_INFO_AMD };
-
- skip |= validate_struct_pnext("vkCreateSharedSwapchainsKHR", ParameterName("pCreateInfos[%i].pNext", ParameterName::IndexVector{ swapchainIndex }), "VkDeviceGroupSwapchainCreateInfoKHR, VkImageFormatListCreateInfoKHR, VkSurfaceFullScreenExclusiveInfoEXT, VkSurfaceFullScreenExclusiveWin32InfoEXT, VkSwapchainCounterCreateInfoEXT, VkSwapchainDisplayNativeHdrCreateInfoAMD", pCreateInfos[swapchainIndex].pNext, ARRAY_SIZE(allowed_structs_VkSwapchainCreateInfoKHR), allowed_structs_VkSwapchainCreateInfoKHR, GeneratedVulkanHeaderVersion, "VUID-VkSwapchainCreateInfoKHR-pNext-pNext");
-
- skip |= validate_flags("vkCreateSharedSwapchainsKHR", ParameterName("pCreateInfos[%i].flags", ParameterName::IndexVector{ swapchainIndex }), "VkSwapchainCreateFlagBitsKHR", AllVkSwapchainCreateFlagBitsKHR, pCreateInfos[swapchainIndex].flags, kOptionalFlags, "VUID-VkSwapchainCreateInfoKHR-flags-parameter");
-
- skip |= validate_required_handle("vkCreateSharedSwapchainsKHR", ParameterName("pCreateInfos[%i].surface", ParameterName::IndexVector{ swapchainIndex }), pCreateInfos[swapchainIndex].surface);
-
- skip |= validate_ranged_enum("vkCreateSharedSwapchainsKHR", ParameterName("pCreateInfos[%i].imageFormat", ParameterName::IndexVector{ swapchainIndex }), "VkFormat", AllVkFormatEnums, pCreateInfos[swapchainIndex].imageFormat, "VUID-VkSwapchainCreateInfoKHR-imageFormat-parameter");
-
- skip |= validate_ranged_enum("vkCreateSharedSwapchainsKHR", ParameterName("pCreateInfos[%i].imageColorSpace", ParameterName::IndexVector{ swapchainIndex }), "VkColorSpaceKHR", AllVkColorSpaceKHREnums, pCreateInfos[swapchainIndex].imageColorSpace, "VUID-VkSwapchainCreateInfoKHR-imageColorSpace-parameter");
-
- // No xml-driven validation
-
- skip |= validate_flags("vkCreateSharedSwapchainsKHR", ParameterName("pCreateInfos[%i].imageUsage", ParameterName::IndexVector{ swapchainIndex }), "VkImageUsageFlagBits", AllVkImageUsageFlagBits, pCreateInfos[swapchainIndex].imageUsage, kRequiredFlags, "VUID-VkSwapchainCreateInfoKHR-imageUsage-parameter", "VUID-VkSwapchainCreateInfoKHR-imageUsage-requiredbitmask");
-
- skip |= validate_ranged_enum("vkCreateSharedSwapchainsKHR", ParameterName("pCreateInfos[%i].imageSharingMode", ParameterName::IndexVector{ swapchainIndex }), "VkSharingMode", AllVkSharingModeEnums, pCreateInfos[swapchainIndex].imageSharingMode, "VUID-VkSwapchainCreateInfoKHR-imageSharingMode-parameter");
-
- skip |= validate_flags("vkCreateSharedSwapchainsKHR", ParameterName("pCreateInfos[%i].preTransform", ParameterName::IndexVector{ swapchainIndex }), "VkSurfaceTransformFlagBitsKHR", AllVkSurfaceTransformFlagBitsKHR, pCreateInfos[swapchainIndex].preTransform, kRequiredSingleBit, "VUID-VkSwapchainCreateInfoKHR-preTransform-parameter", "VUID-VkSwapchainCreateInfoKHR-preTransform-parameter");
-
- skip |= validate_flags("vkCreateSharedSwapchainsKHR", ParameterName("pCreateInfos[%i].compositeAlpha", ParameterName::IndexVector{ swapchainIndex }), "VkCompositeAlphaFlagBitsKHR", AllVkCompositeAlphaFlagBitsKHR, pCreateInfos[swapchainIndex].compositeAlpha, kRequiredSingleBit, "VUID-VkSwapchainCreateInfoKHR-compositeAlpha-parameter", "VUID-VkSwapchainCreateInfoKHR-compositeAlpha-parameter");
-
- skip |= validate_ranged_enum("vkCreateSharedSwapchainsKHR", ParameterName("pCreateInfos[%i].presentMode", ParameterName::IndexVector{ swapchainIndex }), "VkPresentModeKHR", AllVkPresentModeKHREnums, pCreateInfos[swapchainIndex].presentMode, "VUID-VkSwapchainCreateInfoKHR-presentMode-parameter");
-
- skip |= validate_bool32("vkCreateSharedSwapchainsKHR", ParameterName("pCreateInfos[%i].clipped", ParameterName::IndexVector{ swapchainIndex }), pCreateInfos[swapchainIndex].clipped);
- }
- }
- if (pAllocator != NULL)
- {
- skip |= validate_required_pointer("vkCreateSharedSwapchainsKHR", "pAllocator->pfnAllocation", reinterpret_cast<const void*>(pAllocator->pfnAllocation), "VUID-VkAllocationCallbacks-pfnAllocation-00632");
-
- skip |= validate_required_pointer("vkCreateSharedSwapchainsKHR", "pAllocator->pfnReallocation", reinterpret_cast<const void*>(pAllocator->pfnReallocation), "VUID-VkAllocationCallbacks-pfnReallocation-00633");
-
- skip |= validate_required_pointer("vkCreateSharedSwapchainsKHR", "pAllocator->pfnFree", reinterpret_cast<const void*>(pAllocator->pfnFree), "VUID-VkAllocationCallbacks-pfnFree-00634");
-
- if (pAllocator->pfnInternalAllocation != NULL)
- {
- skip |= validate_required_pointer("vkCreateSharedSwapchainsKHR", "pAllocator->pfnInternalFree", reinterpret_cast<const void*>(pAllocator->pfnInternalFree), "VUID-VkAllocationCallbacks-pfnInternalAllocation-00635");
-
- }
-
- if (pAllocator->pfnInternalFree != NULL)
- {
- skip |= validate_required_pointer("vkCreateSharedSwapchainsKHR", "pAllocator->pfnInternalAllocation", reinterpret_cast<const void*>(pAllocator->pfnInternalAllocation), "VUID-VkAllocationCallbacks-pfnInternalAllocation-00635");
-
- }
- }
- skip |= validate_array("vkCreateSharedSwapchainsKHR", "swapchainCount", "pSwapchains", swapchainCount, &pSwapchains, true, true, "VUID-vkCreateSharedSwapchainsKHR-swapchainCount-arraylength", "VUID-vkCreateSharedSwapchainsKHR-pSwapchains-parameter");
- return skip;
-}
-
-
-
-#ifdef VK_USE_PLATFORM_XLIB_KHR
-
-bool StatelessValidation::PreCallValidateCreateXlibSurfaceKHR(
- VkInstance instance,
- const VkXlibSurfaceCreateInfoKHR* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkSurfaceKHR* pSurface) {
- bool skip = false;
- if (!instance_extensions.vk_khr_surface) skip |= OutputExtensionError("vkCreateXlibSurfaceKHR", VK_KHR_SURFACE_EXTENSION_NAME);
- if (!instance_extensions.vk_khr_xlib_surface) skip |= OutputExtensionError("vkCreateXlibSurfaceKHR", VK_KHR_XLIB_SURFACE_EXTENSION_NAME);
- skip |= validate_struct_type("vkCreateXlibSurfaceKHR", "pCreateInfo", "VK_STRUCTURE_TYPE_XLIB_SURFACE_CREATE_INFO_KHR", pCreateInfo, VK_STRUCTURE_TYPE_XLIB_SURFACE_CREATE_INFO_KHR, true, "VUID-vkCreateXlibSurfaceKHR-pCreateInfo-parameter", "VUID-VkXlibSurfaceCreateInfoKHR-sType-sType");
- if (pCreateInfo != NULL)
- {
- skip |= validate_struct_pnext("vkCreateXlibSurfaceKHR", "pCreateInfo->pNext", NULL, pCreateInfo->pNext, 0, NULL, GeneratedVulkanHeaderVersion, "VUID-VkXlibSurfaceCreateInfoKHR-pNext-pNext");
-
- skip |= validate_reserved_flags("vkCreateXlibSurfaceKHR", "pCreateInfo->flags", pCreateInfo->flags, "VUID-VkXlibSurfaceCreateInfoKHR-flags-zerobitmask");
- }
- if (pAllocator != NULL)
- {
- skip |= validate_required_pointer("vkCreateXlibSurfaceKHR", "pAllocator->pfnAllocation", reinterpret_cast<const void*>(pAllocator->pfnAllocation), "VUID-VkAllocationCallbacks-pfnAllocation-00632");
-
- skip |= validate_required_pointer("vkCreateXlibSurfaceKHR", "pAllocator->pfnReallocation", reinterpret_cast<const void*>(pAllocator->pfnReallocation), "VUID-VkAllocationCallbacks-pfnReallocation-00633");
-
- skip |= validate_required_pointer("vkCreateXlibSurfaceKHR", "pAllocator->pfnFree", reinterpret_cast<const void*>(pAllocator->pfnFree), "VUID-VkAllocationCallbacks-pfnFree-00634");
-
- if (pAllocator->pfnInternalAllocation != NULL)
- {
- skip |= validate_required_pointer("vkCreateXlibSurfaceKHR", "pAllocator->pfnInternalFree", reinterpret_cast<const void*>(pAllocator->pfnInternalFree), "VUID-VkAllocationCallbacks-pfnInternalAllocation-00635");
-
- }
-
- if (pAllocator->pfnInternalFree != NULL)
- {
- skip |= validate_required_pointer("vkCreateXlibSurfaceKHR", "pAllocator->pfnInternalAllocation", reinterpret_cast<const void*>(pAllocator->pfnInternalAllocation), "VUID-VkAllocationCallbacks-pfnInternalAllocation-00635");
-
- }
- }
- skip |= validate_required_pointer("vkCreateXlibSurfaceKHR", "pSurface", pSurface, "VUID-vkCreateXlibSurfaceKHR-pSurface-parameter");
- return skip;
-}
-
-bool StatelessValidation::PreCallValidateGetPhysicalDeviceXlibPresentationSupportKHR(
- VkPhysicalDevice physicalDevice,
- uint32_t queueFamilyIndex,
- Display* dpy,
- VisualID visualID) {
- bool skip = false;
- if (!instance_extensions.vk_khr_surface) skip |= OutputExtensionError("vkGetPhysicalDeviceXlibPresentationSupportKHR", VK_KHR_SURFACE_EXTENSION_NAME);
- if (!instance_extensions.vk_khr_xlib_surface) skip |= OutputExtensionError("vkGetPhysicalDeviceXlibPresentationSupportKHR", VK_KHR_XLIB_SURFACE_EXTENSION_NAME);
- skip |= validate_required_pointer("vkGetPhysicalDeviceXlibPresentationSupportKHR", "dpy", dpy, "VUID-vkGetPhysicalDeviceXlibPresentationSupportKHR-dpy-parameter");
- return skip;
-}
-
-#endif // VK_USE_PLATFORM_XLIB_KHR
-
-#ifdef VK_USE_PLATFORM_XCB_KHR
-
-bool StatelessValidation::PreCallValidateCreateXcbSurfaceKHR(
- VkInstance instance,
- const VkXcbSurfaceCreateInfoKHR* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkSurfaceKHR* pSurface) {
- bool skip = false;
- if (!instance_extensions.vk_khr_surface) skip |= OutputExtensionError("vkCreateXcbSurfaceKHR", VK_KHR_SURFACE_EXTENSION_NAME);
- if (!instance_extensions.vk_khr_xcb_surface) skip |= OutputExtensionError("vkCreateXcbSurfaceKHR", VK_KHR_XCB_SURFACE_EXTENSION_NAME);
- skip |= validate_struct_type("vkCreateXcbSurfaceKHR", "pCreateInfo", "VK_STRUCTURE_TYPE_XCB_SURFACE_CREATE_INFO_KHR", pCreateInfo, VK_STRUCTURE_TYPE_XCB_SURFACE_CREATE_INFO_KHR, true, "VUID-vkCreateXcbSurfaceKHR-pCreateInfo-parameter", "VUID-VkXcbSurfaceCreateInfoKHR-sType-sType");
- if (pCreateInfo != NULL)
- {
- skip |= validate_struct_pnext("vkCreateXcbSurfaceKHR", "pCreateInfo->pNext", NULL, pCreateInfo->pNext, 0, NULL, GeneratedVulkanHeaderVersion, "VUID-VkXcbSurfaceCreateInfoKHR-pNext-pNext");
-
- skip |= validate_reserved_flags("vkCreateXcbSurfaceKHR", "pCreateInfo->flags", pCreateInfo->flags, "VUID-VkXcbSurfaceCreateInfoKHR-flags-zerobitmask");
- }
- if (pAllocator != NULL)
- {
- skip |= validate_required_pointer("vkCreateXcbSurfaceKHR", "pAllocator->pfnAllocation", reinterpret_cast<const void*>(pAllocator->pfnAllocation), "VUID-VkAllocationCallbacks-pfnAllocation-00632");
-
- skip |= validate_required_pointer("vkCreateXcbSurfaceKHR", "pAllocator->pfnReallocation", reinterpret_cast<const void*>(pAllocator->pfnReallocation), "VUID-VkAllocationCallbacks-pfnReallocation-00633");
-
- skip |= validate_required_pointer("vkCreateXcbSurfaceKHR", "pAllocator->pfnFree", reinterpret_cast<const void*>(pAllocator->pfnFree), "VUID-VkAllocationCallbacks-pfnFree-00634");
-
- if (pAllocator->pfnInternalAllocation != NULL)
- {
- skip |= validate_required_pointer("vkCreateXcbSurfaceKHR", "pAllocator->pfnInternalFree", reinterpret_cast<const void*>(pAllocator->pfnInternalFree), "VUID-VkAllocationCallbacks-pfnInternalAllocation-00635");
-
- }
-
- if (pAllocator->pfnInternalFree != NULL)
- {
- skip |= validate_required_pointer("vkCreateXcbSurfaceKHR", "pAllocator->pfnInternalAllocation", reinterpret_cast<const void*>(pAllocator->pfnInternalAllocation), "VUID-VkAllocationCallbacks-pfnInternalAllocation-00635");
-
- }
- }
- skip |= validate_required_pointer("vkCreateXcbSurfaceKHR", "pSurface", pSurface, "VUID-vkCreateXcbSurfaceKHR-pSurface-parameter");
- return skip;
-}
-
-bool StatelessValidation::PreCallValidateGetPhysicalDeviceXcbPresentationSupportKHR(
- VkPhysicalDevice physicalDevice,
- uint32_t queueFamilyIndex,
- xcb_connection_t* connection,
- xcb_visualid_t visual_id) {
- bool skip = false;
- if (!instance_extensions.vk_khr_surface) skip |= OutputExtensionError("vkGetPhysicalDeviceXcbPresentationSupportKHR", VK_KHR_SURFACE_EXTENSION_NAME);
- if (!instance_extensions.vk_khr_xcb_surface) skip |= OutputExtensionError("vkGetPhysicalDeviceXcbPresentationSupportKHR", VK_KHR_XCB_SURFACE_EXTENSION_NAME);
- skip |= validate_required_pointer("vkGetPhysicalDeviceXcbPresentationSupportKHR", "connection", connection, "VUID-vkGetPhysicalDeviceXcbPresentationSupportKHR-connection-parameter");
- return skip;
-}
-
-#endif // VK_USE_PLATFORM_XCB_KHR
-
-#ifdef VK_USE_PLATFORM_WAYLAND_KHR
-
-bool StatelessValidation::PreCallValidateCreateWaylandSurfaceKHR(
- VkInstance instance,
- const VkWaylandSurfaceCreateInfoKHR* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkSurfaceKHR* pSurface) {
- bool skip = false;
- if (!instance_extensions.vk_khr_surface) skip |= OutputExtensionError("vkCreateWaylandSurfaceKHR", VK_KHR_SURFACE_EXTENSION_NAME);
- if (!instance_extensions.vk_khr_wayland_surface) skip |= OutputExtensionError("vkCreateWaylandSurfaceKHR", VK_KHR_WAYLAND_SURFACE_EXTENSION_NAME);
- skip |= validate_struct_type("vkCreateWaylandSurfaceKHR", "pCreateInfo", "VK_STRUCTURE_TYPE_WAYLAND_SURFACE_CREATE_INFO_KHR", pCreateInfo, VK_STRUCTURE_TYPE_WAYLAND_SURFACE_CREATE_INFO_KHR, true, "VUID-vkCreateWaylandSurfaceKHR-pCreateInfo-parameter", "VUID-VkWaylandSurfaceCreateInfoKHR-sType-sType");
- if (pCreateInfo != NULL)
- {
- skip |= validate_struct_pnext("vkCreateWaylandSurfaceKHR", "pCreateInfo->pNext", NULL, pCreateInfo->pNext, 0, NULL, GeneratedVulkanHeaderVersion, "VUID-VkWaylandSurfaceCreateInfoKHR-pNext-pNext");
-
- skip |= validate_reserved_flags("vkCreateWaylandSurfaceKHR", "pCreateInfo->flags", pCreateInfo->flags, "VUID-VkWaylandSurfaceCreateInfoKHR-flags-zerobitmask");
- }
- if (pAllocator != NULL)
- {
- skip |= validate_required_pointer("vkCreateWaylandSurfaceKHR", "pAllocator->pfnAllocation", reinterpret_cast<const void*>(pAllocator->pfnAllocation), "VUID-VkAllocationCallbacks-pfnAllocation-00632");
-
- skip |= validate_required_pointer("vkCreateWaylandSurfaceKHR", "pAllocator->pfnReallocation", reinterpret_cast<const void*>(pAllocator->pfnReallocation), "VUID-VkAllocationCallbacks-pfnReallocation-00633");
-
- skip |= validate_required_pointer("vkCreateWaylandSurfaceKHR", "pAllocator->pfnFree", reinterpret_cast<const void*>(pAllocator->pfnFree), "VUID-VkAllocationCallbacks-pfnFree-00634");
-
- if (pAllocator->pfnInternalAllocation != NULL)
- {
- skip |= validate_required_pointer("vkCreateWaylandSurfaceKHR", "pAllocator->pfnInternalFree", reinterpret_cast<const void*>(pAllocator->pfnInternalFree), "VUID-VkAllocationCallbacks-pfnInternalAllocation-00635");
-
- }
-
- if (pAllocator->pfnInternalFree != NULL)
- {
- skip |= validate_required_pointer("vkCreateWaylandSurfaceKHR", "pAllocator->pfnInternalAllocation", reinterpret_cast<const void*>(pAllocator->pfnInternalAllocation), "VUID-VkAllocationCallbacks-pfnInternalAllocation-00635");
-
- }
- }
- skip |= validate_required_pointer("vkCreateWaylandSurfaceKHR", "pSurface", pSurface, "VUID-vkCreateWaylandSurfaceKHR-pSurface-parameter");
- return skip;
-}
-
-bool StatelessValidation::PreCallValidateGetPhysicalDeviceWaylandPresentationSupportKHR(
- VkPhysicalDevice physicalDevice,
- uint32_t queueFamilyIndex,
- struct wl_display* display) {
- bool skip = false;
- if (!instance_extensions.vk_khr_surface) skip |= OutputExtensionError("vkGetPhysicalDeviceWaylandPresentationSupportKHR", VK_KHR_SURFACE_EXTENSION_NAME);
- if (!instance_extensions.vk_khr_wayland_surface) skip |= OutputExtensionError("vkGetPhysicalDeviceWaylandPresentationSupportKHR", VK_KHR_WAYLAND_SURFACE_EXTENSION_NAME);
- skip |= validate_required_pointer("vkGetPhysicalDeviceWaylandPresentationSupportKHR", "display", display, "VUID-vkGetPhysicalDeviceWaylandPresentationSupportKHR-display-parameter");
- return skip;
-}
-
-#endif // VK_USE_PLATFORM_WAYLAND_KHR
-
-#ifdef VK_USE_PLATFORM_ANDROID_KHR
-
-bool StatelessValidation::PreCallValidateCreateAndroidSurfaceKHR(
- VkInstance instance,
- const VkAndroidSurfaceCreateInfoKHR* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkSurfaceKHR* pSurface) {
- bool skip = false;
- if (!instance_extensions.vk_khr_surface) skip |= OutputExtensionError("vkCreateAndroidSurfaceKHR", VK_KHR_SURFACE_EXTENSION_NAME);
- if (!instance_extensions.vk_khr_android_surface) skip |= OutputExtensionError("vkCreateAndroidSurfaceKHR", VK_KHR_ANDROID_SURFACE_EXTENSION_NAME);
- skip |= validate_struct_type("vkCreateAndroidSurfaceKHR", "pCreateInfo", "VK_STRUCTURE_TYPE_ANDROID_SURFACE_CREATE_INFO_KHR", pCreateInfo, VK_STRUCTURE_TYPE_ANDROID_SURFACE_CREATE_INFO_KHR, true, "VUID-vkCreateAndroidSurfaceKHR-pCreateInfo-parameter", "VUID-VkAndroidSurfaceCreateInfoKHR-sType-sType");
- if (pCreateInfo != NULL)
- {
- skip |= validate_struct_pnext("vkCreateAndroidSurfaceKHR", "pCreateInfo->pNext", NULL, pCreateInfo->pNext, 0, NULL, GeneratedVulkanHeaderVersion, "VUID-VkAndroidSurfaceCreateInfoKHR-pNext-pNext");
-
- skip |= validate_reserved_flags("vkCreateAndroidSurfaceKHR", "pCreateInfo->flags", pCreateInfo->flags, "VUID-VkAndroidSurfaceCreateInfoKHR-flags-zerobitmask");
- }
- if (pAllocator != NULL)
- {
- skip |= validate_required_pointer("vkCreateAndroidSurfaceKHR", "pAllocator->pfnAllocation", reinterpret_cast<const void*>(pAllocator->pfnAllocation), "VUID-VkAllocationCallbacks-pfnAllocation-00632");
-
- skip |= validate_required_pointer("vkCreateAndroidSurfaceKHR", "pAllocator->pfnReallocation", reinterpret_cast<const void*>(pAllocator->pfnReallocation), "VUID-VkAllocationCallbacks-pfnReallocation-00633");
-
- skip |= validate_required_pointer("vkCreateAndroidSurfaceKHR", "pAllocator->pfnFree", reinterpret_cast<const void*>(pAllocator->pfnFree), "VUID-VkAllocationCallbacks-pfnFree-00634");
-
- if (pAllocator->pfnInternalAllocation != NULL)
- {
- skip |= validate_required_pointer("vkCreateAndroidSurfaceKHR", "pAllocator->pfnInternalFree", reinterpret_cast<const void*>(pAllocator->pfnInternalFree), "VUID-VkAllocationCallbacks-pfnInternalAllocation-00635");
-
- }
-
- if (pAllocator->pfnInternalFree != NULL)
- {
- skip |= validate_required_pointer("vkCreateAndroidSurfaceKHR", "pAllocator->pfnInternalAllocation", reinterpret_cast<const void*>(pAllocator->pfnInternalAllocation), "VUID-VkAllocationCallbacks-pfnInternalAllocation-00635");
-
- }
- }
- skip |= validate_required_pointer("vkCreateAndroidSurfaceKHR", "pSurface", pSurface, "VUID-vkCreateAndroidSurfaceKHR-pSurface-parameter");
- return skip;
-}
-
-#endif // VK_USE_PLATFORM_ANDROID_KHR
-
-#ifdef VK_USE_PLATFORM_WIN32_KHR
-
-bool StatelessValidation::PreCallValidateCreateWin32SurfaceKHR(
- VkInstance instance,
- const VkWin32SurfaceCreateInfoKHR* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkSurfaceKHR* pSurface) {
- bool skip = false;
- if (!instance_extensions.vk_khr_surface) skip |= OutputExtensionError("vkCreateWin32SurfaceKHR", VK_KHR_SURFACE_EXTENSION_NAME);
- if (!instance_extensions.vk_khr_win32_surface) skip |= OutputExtensionError("vkCreateWin32SurfaceKHR", VK_KHR_WIN32_SURFACE_EXTENSION_NAME);
- skip |= validate_struct_type("vkCreateWin32SurfaceKHR", "pCreateInfo", "VK_STRUCTURE_TYPE_WIN32_SURFACE_CREATE_INFO_KHR", pCreateInfo, VK_STRUCTURE_TYPE_WIN32_SURFACE_CREATE_INFO_KHR, true, "VUID-vkCreateWin32SurfaceKHR-pCreateInfo-parameter", "VUID-VkWin32SurfaceCreateInfoKHR-sType-sType");
- if (pCreateInfo != NULL)
- {
- skip |= validate_struct_pnext("vkCreateWin32SurfaceKHR", "pCreateInfo->pNext", NULL, pCreateInfo->pNext, 0, NULL, GeneratedVulkanHeaderVersion, "VUID-VkWin32SurfaceCreateInfoKHR-pNext-pNext");
-
- skip |= validate_reserved_flags("vkCreateWin32SurfaceKHR", "pCreateInfo->flags", pCreateInfo->flags, "VUID-VkWin32SurfaceCreateInfoKHR-flags-zerobitmask");
- }
- if (pAllocator != NULL)
- {
- skip |= validate_required_pointer("vkCreateWin32SurfaceKHR", "pAllocator->pfnAllocation", reinterpret_cast<const void*>(pAllocator->pfnAllocation), "VUID-VkAllocationCallbacks-pfnAllocation-00632");
-
- skip |= validate_required_pointer("vkCreateWin32SurfaceKHR", "pAllocator->pfnReallocation", reinterpret_cast<const void*>(pAllocator->pfnReallocation), "VUID-VkAllocationCallbacks-pfnReallocation-00633");
-
- skip |= validate_required_pointer("vkCreateWin32SurfaceKHR", "pAllocator->pfnFree", reinterpret_cast<const void*>(pAllocator->pfnFree), "VUID-VkAllocationCallbacks-pfnFree-00634");
-
- if (pAllocator->pfnInternalAllocation != NULL)
- {
- skip |= validate_required_pointer("vkCreateWin32SurfaceKHR", "pAllocator->pfnInternalFree", reinterpret_cast<const void*>(pAllocator->pfnInternalFree), "VUID-VkAllocationCallbacks-pfnInternalAllocation-00635");
-
- }
-
- if (pAllocator->pfnInternalFree != NULL)
- {
- skip |= validate_required_pointer("vkCreateWin32SurfaceKHR", "pAllocator->pfnInternalAllocation", reinterpret_cast<const void*>(pAllocator->pfnInternalAllocation), "VUID-VkAllocationCallbacks-pfnInternalAllocation-00635");
-
- }
- }
- skip |= validate_required_pointer("vkCreateWin32SurfaceKHR", "pSurface", pSurface, "VUID-vkCreateWin32SurfaceKHR-pSurface-parameter");
- return skip;
-}
-
-bool StatelessValidation::PreCallValidateGetPhysicalDeviceWin32PresentationSupportKHR(
- VkPhysicalDevice physicalDevice,
- uint32_t queueFamilyIndex) {
- bool skip = false;
- if (!instance_extensions.vk_khr_surface) skip |= OutputExtensionError("vkGetPhysicalDeviceWin32PresentationSupportKHR", VK_KHR_SURFACE_EXTENSION_NAME);
- if (!instance_extensions.vk_khr_win32_surface) skip |= OutputExtensionError("vkGetPhysicalDeviceWin32PresentationSupportKHR", VK_KHR_WIN32_SURFACE_EXTENSION_NAME);
- // No xml-driven validation
- return skip;
-}
-
-#endif // VK_USE_PLATFORM_WIN32_KHR
-
-
-
-
-
-bool StatelessValidation::PreCallValidateGetPhysicalDeviceFeatures2KHR(
- VkPhysicalDevice physicalDevice,
- VkPhysicalDeviceFeatures2* pFeatures) {
- bool skip = false;
- if (!instance_extensions.vk_khr_get_physical_device_properties_2) skip |= OutputExtensionError("vkGetPhysicalDeviceFeatures2KHR", VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME);
- skip |= validate_struct_type("vkGetPhysicalDeviceFeatures2KHR", "pFeatures", "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FEATURES_2", pFeatures, VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FEATURES_2, true, "VUID-vkGetPhysicalDeviceFeatures2-pFeatures-parameter", "VUID-VkPhysicalDeviceFeatures2-sType-sType");
- return skip;
-}
-
-bool StatelessValidation::PreCallValidateGetPhysicalDeviceProperties2KHR(
- VkPhysicalDevice physicalDevice,
- VkPhysicalDeviceProperties2* pProperties) {
- bool skip = false;
- if (!instance_extensions.vk_khr_get_physical_device_properties_2) skip |= OutputExtensionError("vkGetPhysicalDeviceProperties2KHR", VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME);
- skip |= validate_struct_type("vkGetPhysicalDeviceProperties2KHR", "pProperties", "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PROPERTIES_2", pProperties, VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PROPERTIES_2, true, "VUID-vkGetPhysicalDeviceProperties2-pProperties-parameter", "VUID-VkPhysicalDeviceProperties2-sType-sType");
- if (pProperties != NULL)
- {
- // No xml-driven validation
- }
- return skip;
-}
-
-bool StatelessValidation::PreCallValidateGetPhysicalDeviceFormatProperties2KHR(
- VkPhysicalDevice physicalDevice,
- VkFormat format,
- VkFormatProperties2* pFormatProperties) {
- bool skip = false;
- if (!instance_extensions.vk_khr_get_physical_device_properties_2) skip |= OutputExtensionError("vkGetPhysicalDeviceFormatProperties2KHR", VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME);
- skip |= validate_ranged_enum("vkGetPhysicalDeviceFormatProperties2KHR", "format", "VkFormat", AllVkFormatEnums, format, "VUID-vkGetPhysicalDeviceFormatProperties2-format-parameter");
- skip |= validate_struct_type("vkGetPhysicalDeviceFormatProperties2KHR", "pFormatProperties", "VK_STRUCTURE_TYPE_FORMAT_PROPERTIES_2", pFormatProperties, VK_STRUCTURE_TYPE_FORMAT_PROPERTIES_2, true, "VUID-vkGetPhysicalDeviceFormatProperties2-pFormatProperties-parameter", "VUID-VkFormatProperties2-sType-sType");
- if (pFormatProperties != NULL)
- {
- // No xml-driven validation
- }
- return skip;
-}
-
-bool StatelessValidation::PreCallValidateGetPhysicalDeviceImageFormatProperties2KHR(
- VkPhysicalDevice physicalDevice,
- const VkPhysicalDeviceImageFormatInfo2* pImageFormatInfo,
- VkImageFormatProperties2* pImageFormatProperties) {
- bool skip = false;
- if (!instance_extensions.vk_khr_get_physical_device_properties_2) skip |= OutputExtensionError("vkGetPhysicalDeviceImageFormatProperties2KHR", VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME);
- skip |= validate_struct_type("vkGetPhysicalDeviceImageFormatProperties2KHR", "pImageFormatInfo", "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_FORMAT_INFO_2", pImageFormatInfo, VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_FORMAT_INFO_2, true, "VUID-vkGetPhysicalDeviceImageFormatProperties2-pImageFormatInfo-parameter", "VUID-VkPhysicalDeviceImageFormatInfo2-sType-sType");
- if (pImageFormatInfo != NULL)
- {
- const VkStructureType allowed_structs_VkPhysicalDeviceImageFormatInfo2[] = { VK_STRUCTURE_TYPE_IMAGE_FORMAT_LIST_CREATE_INFO_KHR, VK_STRUCTURE_TYPE_IMAGE_STENCIL_USAGE_CREATE_INFO_EXT, VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_IMAGE_FORMAT_INFO, VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_DRM_FORMAT_MODIFIER_INFO_EXT, VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_VIEW_IMAGE_FORMAT_INFO_EXT };
-
- skip |= validate_struct_pnext("vkGetPhysicalDeviceImageFormatProperties2KHR", "pImageFormatInfo->pNext", "VkImageFormatListCreateInfoKHR, VkImageStencilUsageCreateInfoEXT, VkPhysicalDeviceExternalImageFormatInfo, VkPhysicalDeviceImageDrmFormatModifierInfoEXT, VkPhysicalDeviceImageViewImageFormatInfoEXT", pImageFormatInfo->pNext, ARRAY_SIZE(allowed_structs_VkPhysicalDeviceImageFormatInfo2), allowed_structs_VkPhysicalDeviceImageFormatInfo2, GeneratedVulkanHeaderVersion, "VUID-VkPhysicalDeviceImageFormatInfo2-pNext-pNext");
-
- skip |= validate_ranged_enum("vkGetPhysicalDeviceImageFormatProperties2KHR", "pImageFormatInfo->format", "VkFormat", AllVkFormatEnums, pImageFormatInfo->format, "VUID-VkPhysicalDeviceImageFormatInfo2-format-parameter");
-
- skip |= validate_ranged_enum("vkGetPhysicalDeviceImageFormatProperties2KHR", "pImageFormatInfo->type", "VkImageType", AllVkImageTypeEnums, pImageFormatInfo->type, "VUID-VkPhysicalDeviceImageFormatInfo2-type-parameter");
-
- skip |= validate_ranged_enum("vkGetPhysicalDeviceImageFormatProperties2KHR", "pImageFormatInfo->tiling", "VkImageTiling", AllVkImageTilingEnums, pImageFormatInfo->tiling, "VUID-VkPhysicalDeviceImageFormatInfo2-tiling-parameter");
-
- skip |= validate_flags("vkGetPhysicalDeviceImageFormatProperties2KHR", "pImageFormatInfo->usage", "VkImageUsageFlagBits", AllVkImageUsageFlagBits, pImageFormatInfo->usage, kRequiredFlags, "VUID-VkPhysicalDeviceImageFormatInfo2-usage-parameter", "VUID-VkPhysicalDeviceImageFormatInfo2-usage-requiredbitmask");
-
- skip |= validate_flags("vkGetPhysicalDeviceImageFormatProperties2KHR", "pImageFormatInfo->flags", "VkImageCreateFlagBits", AllVkImageCreateFlagBits, pImageFormatInfo->flags, kOptionalFlags, "VUID-VkPhysicalDeviceImageFormatInfo2-flags-parameter");
- }
- skip |= validate_struct_type("vkGetPhysicalDeviceImageFormatProperties2KHR", "pImageFormatProperties", "VK_STRUCTURE_TYPE_IMAGE_FORMAT_PROPERTIES_2", pImageFormatProperties, VK_STRUCTURE_TYPE_IMAGE_FORMAT_PROPERTIES_2, true, "VUID-vkGetPhysicalDeviceImageFormatProperties2-pImageFormatProperties-parameter", "VUID-VkImageFormatProperties2-sType-sType");
- if (pImageFormatProperties != NULL)
- {
- // No xml-driven validation
- }
- return skip;
-}
-
-bool StatelessValidation::PreCallValidateGetPhysicalDeviceQueueFamilyProperties2KHR(
- VkPhysicalDevice physicalDevice,
- uint32_t* pQueueFamilyPropertyCount,
- VkQueueFamilyProperties2* pQueueFamilyProperties) {
- bool skip = false;
- if (!instance_extensions.vk_khr_get_physical_device_properties_2) skip |= OutputExtensionError("vkGetPhysicalDeviceQueueFamilyProperties2KHR", VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME);
- skip |= validate_struct_type_array("vkGetPhysicalDeviceQueueFamilyProperties2KHR", "pQueueFamilyPropertyCount", "pQueueFamilyProperties", "VK_STRUCTURE_TYPE_QUEUE_FAMILY_PROPERTIES_2", pQueueFamilyPropertyCount, pQueueFamilyProperties, VK_STRUCTURE_TYPE_QUEUE_FAMILY_PROPERTIES_2, true, false, false, "VUID-VkQueueFamilyProperties2-sType-sType", "VUID-vkGetPhysicalDeviceQueueFamilyProperties2-pQueueFamilyProperties-parameter", kVUIDUndefined);
- if (pQueueFamilyProperties != NULL)
- {
- for (uint32_t pQueueFamilyPropertyIndex = 0; pQueueFamilyPropertyIndex < *pQueueFamilyPropertyCount; ++pQueueFamilyPropertyIndex)
- {
- // No xml-driven validation
- }
- }
- return skip;
-}
-
-bool StatelessValidation::PreCallValidateGetPhysicalDeviceMemoryProperties2KHR(
- VkPhysicalDevice physicalDevice,
- VkPhysicalDeviceMemoryProperties2* pMemoryProperties) {
- bool skip = false;
- if (!instance_extensions.vk_khr_get_physical_device_properties_2) skip |= OutputExtensionError("vkGetPhysicalDeviceMemoryProperties2KHR", VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME);
- skip |= validate_struct_type("vkGetPhysicalDeviceMemoryProperties2KHR", "pMemoryProperties", "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MEMORY_PROPERTIES_2", pMemoryProperties, VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MEMORY_PROPERTIES_2, true, "VUID-vkGetPhysicalDeviceMemoryProperties2-pMemoryProperties-parameter", "VUID-VkPhysicalDeviceMemoryProperties2-sType-sType");
- if (pMemoryProperties != NULL)
- {
- // No xml-driven validation
- }
- return skip;
-}
-
-bool StatelessValidation::PreCallValidateGetPhysicalDeviceSparseImageFormatProperties2KHR(
- VkPhysicalDevice physicalDevice,
- const VkPhysicalDeviceSparseImageFormatInfo2* pFormatInfo,
- uint32_t* pPropertyCount,
- VkSparseImageFormatProperties2* pProperties) {
- bool skip = false;
- if (!instance_extensions.vk_khr_get_physical_device_properties_2) skip |= OutputExtensionError("vkGetPhysicalDeviceSparseImageFormatProperties2KHR", VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME);
- skip |= validate_struct_type("vkGetPhysicalDeviceSparseImageFormatProperties2KHR", "pFormatInfo", "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SPARSE_IMAGE_FORMAT_INFO_2", pFormatInfo, VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SPARSE_IMAGE_FORMAT_INFO_2, true, "VUID-vkGetPhysicalDeviceSparseImageFormatProperties2-pFormatInfo-parameter", "VUID-VkPhysicalDeviceSparseImageFormatInfo2-sType-sType");
- if (pFormatInfo != NULL)
- {
- skip |= validate_struct_pnext("vkGetPhysicalDeviceSparseImageFormatProperties2KHR", "pFormatInfo->pNext", NULL, pFormatInfo->pNext, 0, NULL, GeneratedVulkanHeaderVersion, "VUID-VkPhysicalDeviceSparseImageFormatInfo2-pNext-pNext");
-
- skip |= validate_ranged_enum("vkGetPhysicalDeviceSparseImageFormatProperties2KHR", "pFormatInfo->format", "VkFormat", AllVkFormatEnums, pFormatInfo->format, "VUID-VkPhysicalDeviceSparseImageFormatInfo2-format-parameter");
-
- skip |= validate_ranged_enum("vkGetPhysicalDeviceSparseImageFormatProperties2KHR", "pFormatInfo->type", "VkImageType", AllVkImageTypeEnums, pFormatInfo->type, "VUID-VkPhysicalDeviceSparseImageFormatInfo2-type-parameter");
-
- skip |= validate_flags("vkGetPhysicalDeviceSparseImageFormatProperties2KHR", "pFormatInfo->samples", "VkSampleCountFlagBits", AllVkSampleCountFlagBits, pFormatInfo->samples, kRequiredSingleBit, "VUID-VkPhysicalDeviceSparseImageFormatInfo2-samples-parameter", "VUID-VkPhysicalDeviceSparseImageFormatInfo2-samples-parameter");
-
- skip |= validate_flags("vkGetPhysicalDeviceSparseImageFormatProperties2KHR", "pFormatInfo->usage", "VkImageUsageFlagBits", AllVkImageUsageFlagBits, pFormatInfo->usage, kRequiredFlags, "VUID-VkPhysicalDeviceSparseImageFormatInfo2-usage-parameter", "VUID-VkPhysicalDeviceSparseImageFormatInfo2-usage-requiredbitmask");
-
- skip |= validate_ranged_enum("vkGetPhysicalDeviceSparseImageFormatProperties2KHR", "pFormatInfo->tiling", "VkImageTiling", AllVkImageTilingEnums, pFormatInfo->tiling, "VUID-VkPhysicalDeviceSparseImageFormatInfo2-tiling-parameter");
- }
- skip |= validate_struct_type_array("vkGetPhysicalDeviceSparseImageFormatProperties2KHR", "pPropertyCount", "pProperties", "VK_STRUCTURE_TYPE_SPARSE_IMAGE_FORMAT_PROPERTIES_2", pPropertyCount, pProperties, VK_STRUCTURE_TYPE_SPARSE_IMAGE_FORMAT_PROPERTIES_2, true, false, false, "VUID-VkSparseImageFormatProperties2-sType-sType", "VUID-vkGetPhysicalDeviceSparseImageFormatProperties2-pProperties-parameter", kVUIDUndefined);
- if (pProperties != NULL)
- {
- for (uint32_t pPropertyIndex = 0; pPropertyIndex < *pPropertyCount; ++pPropertyIndex)
- {
- // No xml-driven validation
- }
- }
- return skip;
-}
-
-
-
-bool StatelessValidation::PreCallValidateGetDeviceGroupPeerMemoryFeaturesKHR(
- VkDevice device,
- uint32_t heapIndex,
- uint32_t localDeviceIndex,
- uint32_t remoteDeviceIndex,
- VkPeerMemoryFeatureFlags* pPeerMemoryFeatures) {
- bool skip = false;
- if (!device_extensions.vk_khr_device_group_creation) skip |= OutputExtensionError("vkGetDeviceGroupPeerMemoryFeaturesKHR", VK_KHR_DEVICE_GROUP_CREATION_EXTENSION_NAME);
- if (!device_extensions.vk_khr_device_group) skip |= OutputExtensionError("vkGetDeviceGroupPeerMemoryFeaturesKHR", VK_KHR_DEVICE_GROUP_EXTENSION_NAME);
- skip |= validate_required_pointer("vkGetDeviceGroupPeerMemoryFeaturesKHR", "pPeerMemoryFeatures", pPeerMemoryFeatures, "VUID-vkGetDeviceGroupPeerMemoryFeatures-pPeerMemoryFeatures-parameter");
- return skip;
-}
-
-bool StatelessValidation::PreCallValidateCmdSetDeviceMaskKHR(
- VkCommandBuffer commandBuffer,
- uint32_t deviceMask) {
- bool skip = false;
- if (!device_extensions.vk_khr_device_group_creation) skip |= OutputExtensionError("vkCmdSetDeviceMaskKHR", VK_KHR_DEVICE_GROUP_CREATION_EXTENSION_NAME);
- if (!device_extensions.vk_khr_device_group) skip |= OutputExtensionError("vkCmdSetDeviceMaskKHR", VK_KHR_DEVICE_GROUP_EXTENSION_NAME);
- // No xml-driven validation
- return skip;
-}
-
-bool StatelessValidation::PreCallValidateCmdDispatchBaseKHR(
- VkCommandBuffer commandBuffer,
- uint32_t baseGroupX,
- uint32_t baseGroupY,
- uint32_t baseGroupZ,
- uint32_t groupCountX,
- uint32_t groupCountY,
- uint32_t groupCountZ) {
- bool skip = false;
- if (!device_extensions.vk_khr_device_group_creation) skip |= OutputExtensionError("vkCmdDispatchBaseKHR", VK_KHR_DEVICE_GROUP_CREATION_EXTENSION_NAME);
- if (!device_extensions.vk_khr_device_group) skip |= OutputExtensionError("vkCmdDispatchBaseKHR", VK_KHR_DEVICE_GROUP_EXTENSION_NAME);
- // No xml-driven validation
- if (!skip) skip |= manual_PreCallValidateCmdDispatchBaseKHR(commandBuffer, baseGroupX, baseGroupY, baseGroupZ, groupCountX, groupCountY, groupCountZ);
- return skip;
-}
-
-
-
-
-
-bool StatelessValidation::PreCallValidateTrimCommandPoolKHR(
- VkDevice device,
- VkCommandPool commandPool,
- VkCommandPoolTrimFlags flags) {
- bool skip = false;
- if (!device_extensions.vk_khr_maintenance1) skip |= OutputExtensionError("vkTrimCommandPoolKHR", VK_KHR_MAINTENANCE1_EXTENSION_NAME);
- skip |= validate_required_handle("vkTrimCommandPoolKHR", "commandPool", commandPool);
- skip |= validate_reserved_flags("vkTrimCommandPoolKHR", "flags", flags, "VUID-vkTrimCommandPool-flags-zerobitmask");
- return skip;
-}
-
-
-
-bool StatelessValidation::PreCallValidateEnumeratePhysicalDeviceGroupsKHR(
- VkInstance instance,
- uint32_t* pPhysicalDeviceGroupCount,
- VkPhysicalDeviceGroupProperties* pPhysicalDeviceGroupProperties) {
- bool skip = false;
- if (!instance_extensions.vk_khr_device_group_creation) skip |= OutputExtensionError("vkEnumeratePhysicalDeviceGroupsKHR", VK_KHR_DEVICE_GROUP_CREATION_EXTENSION_NAME);
- skip |= validate_struct_type_array("vkEnumeratePhysicalDeviceGroupsKHR", "pPhysicalDeviceGroupCount", "pPhysicalDeviceGroupProperties", "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_GROUP_PROPERTIES", pPhysicalDeviceGroupCount, pPhysicalDeviceGroupProperties, VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_GROUP_PROPERTIES, true, false, false, "VUID-VkPhysicalDeviceGroupProperties-sType-sType", "VUID-vkEnumeratePhysicalDeviceGroups-pPhysicalDeviceGroupProperties-parameter", kVUIDUndefined);
- if (pPhysicalDeviceGroupProperties != NULL)
- {
- for (uint32_t pPhysicalDeviceGroupIndex = 0; pPhysicalDeviceGroupIndex < *pPhysicalDeviceGroupCount; ++pPhysicalDeviceGroupIndex)
- {
- // No xml-driven validation
- }
- }
- return skip;
-}
-
-
-
-bool StatelessValidation::PreCallValidateGetPhysicalDeviceExternalBufferPropertiesKHR(
- VkPhysicalDevice physicalDevice,
- const VkPhysicalDeviceExternalBufferInfo* pExternalBufferInfo,
- VkExternalBufferProperties* pExternalBufferProperties) {
- bool skip = false;
- if (!instance_extensions.vk_khr_get_physical_device_properties_2) skip |= OutputExtensionError("vkGetPhysicalDeviceExternalBufferPropertiesKHR", VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME);
- if (!instance_extensions.vk_khr_external_memory_capabilities) skip |= OutputExtensionError("vkGetPhysicalDeviceExternalBufferPropertiesKHR", VK_KHR_EXTERNAL_MEMORY_CAPABILITIES_EXTENSION_NAME);
- skip |= validate_struct_type("vkGetPhysicalDeviceExternalBufferPropertiesKHR", "pExternalBufferInfo", "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_BUFFER_INFO", pExternalBufferInfo, VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_BUFFER_INFO, true, "VUID-vkGetPhysicalDeviceExternalBufferProperties-pExternalBufferInfo-parameter", "VUID-VkPhysicalDeviceExternalBufferInfo-sType-sType");
- if (pExternalBufferInfo != NULL)
- {
- skip |= validate_struct_pnext("vkGetPhysicalDeviceExternalBufferPropertiesKHR", "pExternalBufferInfo->pNext", NULL, pExternalBufferInfo->pNext, 0, NULL, GeneratedVulkanHeaderVersion, "VUID-VkPhysicalDeviceExternalBufferInfo-pNext-pNext");
-
- skip |= validate_flags("vkGetPhysicalDeviceExternalBufferPropertiesKHR", "pExternalBufferInfo->flags", "VkBufferCreateFlagBits", AllVkBufferCreateFlagBits, pExternalBufferInfo->flags, kOptionalFlags, "VUID-VkPhysicalDeviceExternalBufferInfo-flags-parameter");
-
- skip |= validate_flags("vkGetPhysicalDeviceExternalBufferPropertiesKHR", "pExternalBufferInfo->usage", "VkBufferUsageFlagBits", AllVkBufferUsageFlagBits, pExternalBufferInfo->usage, kRequiredFlags, "VUID-VkPhysicalDeviceExternalBufferInfo-usage-parameter", "VUID-VkPhysicalDeviceExternalBufferInfo-usage-requiredbitmask");
-
- skip |= validate_flags("vkGetPhysicalDeviceExternalBufferPropertiesKHR", "pExternalBufferInfo->handleType", "VkExternalMemoryHandleTypeFlagBits", AllVkExternalMemoryHandleTypeFlagBits, pExternalBufferInfo->handleType, kRequiredSingleBit, "VUID-VkPhysicalDeviceExternalBufferInfo-handleType-parameter", "VUID-VkPhysicalDeviceExternalBufferInfo-handleType-parameter");
- }
- skip |= validate_struct_type("vkGetPhysicalDeviceExternalBufferPropertiesKHR", "pExternalBufferProperties", "VK_STRUCTURE_TYPE_EXTERNAL_BUFFER_PROPERTIES", pExternalBufferProperties, VK_STRUCTURE_TYPE_EXTERNAL_BUFFER_PROPERTIES, true, "VUID-vkGetPhysicalDeviceExternalBufferProperties-pExternalBufferProperties-parameter", "VUID-VkExternalBufferProperties-sType-sType");
- if (pExternalBufferProperties != NULL)
- {
- // No xml-driven validation
- }
- return skip;
-}
-
-
-
-
-
-#ifdef VK_USE_PLATFORM_WIN32_KHR
-
-bool StatelessValidation::PreCallValidateGetMemoryWin32HandleKHR(
- VkDevice device,
- const VkMemoryGetWin32HandleInfoKHR* pGetWin32HandleInfo,
- HANDLE* pHandle) {
- bool skip = false;
- if (!device_extensions.vk_khr_external_memory) skip |= OutputExtensionError("vkGetMemoryWin32HandleKHR", VK_KHR_EXTERNAL_MEMORY_EXTENSION_NAME);
- if (!device_extensions.vk_khr_external_memory_win32) skip |= OutputExtensionError("vkGetMemoryWin32HandleKHR", VK_KHR_EXTERNAL_MEMORY_WIN32_EXTENSION_NAME);
- skip |= validate_struct_type("vkGetMemoryWin32HandleKHR", "pGetWin32HandleInfo", "VK_STRUCTURE_TYPE_MEMORY_GET_WIN32_HANDLE_INFO_KHR", pGetWin32HandleInfo, VK_STRUCTURE_TYPE_MEMORY_GET_WIN32_HANDLE_INFO_KHR, true, "VUID-vkGetMemoryWin32HandleKHR-pGetWin32HandleInfo-parameter", "VUID-VkMemoryGetWin32HandleInfoKHR-sType-sType");
- if (pGetWin32HandleInfo != NULL)
- {
- skip |= validate_struct_pnext("vkGetMemoryWin32HandleKHR", "pGetWin32HandleInfo->pNext", NULL, pGetWin32HandleInfo->pNext, 0, NULL, GeneratedVulkanHeaderVersion, "VUID-VkMemoryGetWin32HandleInfoKHR-pNext-pNext");
-
- skip |= validate_required_handle("vkGetMemoryWin32HandleKHR", "pGetWin32HandleInfo->memory", pGetWin32HandleInfo->memory);
-
- skip |= validate_flags("vkGetMemoryWin32HandleKHR", "pGetWin32HandleInfo->handleType", "VkExternalMemoryHandleTypeFlagBits", AllVkExternalMemoryHandleTypeFlagBits, pGetWin32HandleInfo->handleType, kRequiredSingleBit, "VUID-VkMemoryGetWin32HandleInfoKHR-handleType-parameter", "VUID-VkMemoryGetWin32HandleInfoKHR-handleType-parameter");
- }
- skip |= validate_required_pointer("vkGetMemoryWin32HandleKHR", "pHandle", pHandle, "VUID-vkGetMemoryWin32HandleKHR-pHandle-parameter");
- return skip;
-}
-
-bool StatelessValidation::PreCallValidateGetMemoryWin32HandlePropertiesKHR(
- VkDevice device,
- VkExternalMemoryHandleTypeFlagBits handleType,
- HANDLE handle,
- VkMemoryWin32HandlePropertiesKHR* pMemoryWin32HandleProperties) {
- bool skip = false;
- if (!device_extensions.vk_khr_external_memory) skip |= OutputExtensionError("vkGetMemoryWin32HandlePropertiesKHR", VK_KHR_EXTERNAL_MEMORY_EXTENSION_NAME);
- if (!device_extensions.vk_khr_external_memory_win32) skip |= OutputExtensionError("vkGetMemoryWin32HandlePropertiesKHR", VK_KHR_EXTERNAL_MEMORY_WIN32_EXTENSION_NAME);
- skip |= validate_flags("vkGetMemoryWin32HandlePropertiesKHR", "handleType", "VkExternalMemoryHandleTypeFlagBits", AllVkExternalMemoryHandleTypeFlagBits, handleType, kRequiredSingleBit, "VUID-vkGetMemoryWin32HandlePropertiesKHR-handleType-parameter", "VUID-vkGetMemoryWin32HandlePropertiesKHR-handleType-parameter");
- skip |= validate_struct_type("vkGetMemoryWin32HandlePropertiesKHR", "pMemoryWin32HandleProperties", "VK_STRUCTURE_TYPE_MEMORY_WIN32_HANDLE_PROPERTIES_KHR", pMemoryWin32HandleProperties, VK_STRUCTURE_TYPE_MEMORY_WIN32_HANDLE_PROPERTIES_KHR, true, "VUID-vkGetMemoryWin32HandlePropertiesKHR-pMemoryWin32HandleProperties-parameter", "VUID-VkMemoryWin32HandlePropertiesKHR-sType-sType");
- if (pMemoryWin32HandleProperties != NULL)
- {
- // No xml-driven validation
- }
- return skip;
-}
-
-#endif // VK_USE_PLATFORM_WIN32_KHR
-
-bool StatelessValidation::PreCallValidateGetMemoryFdKHR(
- VkDevice device,
- const VkMemoryGetFdInfoKHR* pGetFdInfo,
- int* pFd) {
- bool skip = false;
- if (!device_extensions.vk_khr_external_memory) skip |= OutputExtensionError("vkGetMemoryFdKHR", VK_KHR_EXTERNAL_MEMORY_EXTENSION_NAME);
- if (!device_extensions.vk_khr_external_memory_fd) skip |= OutputExtensionError("vkGetMemoryFdKHR", VK_KHR_EXTERNAL_MEMORY_FD_EXTENSION_NAME);
- skip |= validate_struct_type("vkGetMemoryFdKHR", "pGetFdInfo", "VK_STRUCTURE_TYPE_MEMORY_GET_FD_INFO_KHR", pGetFdInfo, VK_STRUCTURE_TYPE_MEMORY_GET_FD_INFO_KHR, true, "VUID-vkGetMemoryFdKHR-pGetFdInfo-parameter", "VUID-VkMemoryGetFdInfoKHR-sType-sType");
- if (pGetFdInfo != NULL)
- {
- skip |= validate_struct_pnext("vkGetMemoryFdKHR", "pGetFdInfo->pNext", NULL, pGetFdInfo->pNext, 0, NULL, GeneratedVulkanHeaderVersion, "VUID-VkMemoryGetFdInfoKHR-pNext-pNext");
-
- skip |= validate_required_handle("vkGetMemoryFdKHR", "pGetFdInfo->memory", pGetFdInfo->memory);
-
- skip |= validate_flags("vkGetMemoryFdKHR", "pGetFdInfo->handleType", "VkExternalMemoryHandleTypeFlagBits", AllVkExternalMemoryHandleTypeFlagBits, pGetFdInfo->handleType, kRequiredSingleBit, "VUID-VkMemoryGetFdInfoKHR-handleType-parameter", "VUID-VkMemoryGetFdInfoKHR-handleType-parameter");
- }
- skip |= validate_required_pointer("vkGetMemoryFdKHR", "pFd", pFd, "VUID-vkGetMemoryFdKHR-pFd-parameter");
- return skip;
-}
-
-bool StatelessValidation::PreCallValidateGetMemoryFdPropertiesKHR(
- VkDevice device,
- VkExternalMemoryHandleTypeFlagBits handleType,
- int fd,
- VkMemoryFdPropertiesKHR* pMemoryFdProperties) {
- bool skip = false;
- if (!device_extensions.vk_khr_external_memory) skip |= OutputExtensionError("vkGetMemoryFdPropertiesKHR", VK_KHR_EXTERNAL_MEMORY_EXTENSION_NAME);
- if (!device_extensions.vk_khr_external_memory_fd) skip |= OutputExtensionError("vkGetMemoryFdPropertiesKHR", VK_KHR_EXTERNAL_MEMORY_FD_EXTENSION_NAME);
- skip |= validate_flags("vkGetMemoryFdPropertiesKHR", "handleType", "VkExternalMemoryHandleTypeFlagBits", AllVkExternalMemoryHandleTypeFlagBits, handleType, kRequiredSingleBit, "VUID-vkGetMemoryFdPropertiesKHR-handleType-parameter", "VUID-vkGetMemoryFdPropertiesKHR-handleType-parameter");
- skip |= validate_struct_type("vkGetMemoryFdPropertiesKHR", "pMemoryFdProperties", "VK_STRUCTURE_TYPE_MEMORY_FD_PROPERTIES_KHR", pMemoryFdProperties, VK_STRUCTURE_TYPE_MEMORY_FD_PROPERTIES_KHR, true, "VUID-vkGetMemoryFdPropertiesKHR-pMemoryFdProperties-parameter", "VUID-VkMemoryFdPropertiesKHR-sType-sType");
- if (pMemoryFdProperties != NULL)
- {
- // No xml-driven validation
- }
- return skip;
-}
-
-
-
-#ifdef VK_USE_PLATFORM_WIN32_KHR
-
-#endif // VK_USE_PLATFORM_WIN32_KHR
-
-bool StatelessValidation::PreCallValidateGetPhysicalDeviceExternalSemaphorePropertiesKHR(
- VkPhysicalDevice physicalDevice,
- const VkPhysicalDeviceExternalSemaphoreInfo* pExternalSemaphoreInfo,
- VkExternalSemaphoreProperties* pExternalSemaphoreProperties) {
- bool skip = false;
- if (!instance_extensions.vk_khr_get_physical_device_properties_2) skip |= OutputExtensionError("vkGetPhysicalDeviceExternalSemaphorePropertiesKHR", VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME);
- if (!instance_extensions.vk_khr_external_semaphore_capabilities) skip |= OutputExtensionError("vkGetPhysicalDeviceExternalSemaphorePropertiesKHR", VK_KHR_EXTERNAL_SEMAPHORE_CAPABILITIES_EXTENSION_NAME);
- skip |= validate_struct_type("vkGetPhysicalDeviceExternalSemaphorePropertiesKHR", "pExternalSemaphoreInfo", "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_SEMAPHORE_INFO", pExternalSemaphoreInfo, VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_SEMAPHORE_INFO, true, "VUID-vkGetPhysicalDeviceExternalSemaphoreProperties-pExternalSemaphoreInfo-parameter", "VUID-VkPhysicalDeviceExternalSemaphoreInfo-sType-sType");
- if (pExternalSemaphoreInfo != NULL)
- {
- skip |= validate_struct_pnext("vkGetPhysicalDeviceExternalSemaphorePropertiesKHR", "pExternalSemaphoreInfo->pNext", NULL, pExternalSemaphoreInfo->pNext, 0, NULL, GeneratedVulkanHeaderVersion, "VUID-VkPhysicalDeviceExternalSemaphoreInfo-pNext-pNext");
-
- skip |= validate_flags("vkGetPhysicalDeviceExternalSemaphorePropertiesKHR", "pExternalSemaphoreInfo->handleType", "VkExternalSemaphoreHandleTypeFlagBits", AllVkExternalSemaphoreHandleTypeFlagBits, pExternalSemaphoreInfo->handleType, kRequiredSingleBit, "VUID-VkPhysicalDeviceExternalSemaphoreInfo-handleType-parameter", "VUID-VkPhysicalDeviceExternalSemaphoreInfo-handleType-parameter");
- }
- skip |= validate_struct_type("vkGetPhysicalDeviceExternalSemaphorePropertiesKHR", "pExternalSemaphoreProperties", "VK_STRUCTURE_TYPE_EXTERNAL_SEMAPHORE_PROPERTIES", pExternalSemaphoreProperties, VK_STRUCTURE_TYPE_EXTERNAL_SEMAPHORE_PROPERTIES, true, "VUID-vkGetPhysicalDeviceExternalSemaphoreProperties-pExternalSemaphoreProperties-parameter", "VUID-VkExternalSemaphoreProperties-sType-sType");
- if (pExternalSemaphoreProperties != NULL)
- {
- // No xml-driven validation
- }
- return skip;
-}
-
-
-
-
-
-#ifdef VK_USE_PLATFORM_WIN32_KHR
-
-bool StatelessValidation::PreCallValidateImportSemaphoreWin32HandleKHR(
- VkDevice device,
- const VkImportSemaphoreWin32HandleInfoKHR* pImportSemaphoreWin32HandleInfo) {
- bool skip = false;
- if (!device_extensions.vk_khr_external_semaphore) skip |= OutputExtensionError("vkImportSemaphoreWin32HandleKHR", VK_KHR_EXTERNAL_SEMAPHORE_EXTENSION_NAME);
- if (!device_extensions.vk_khr_external_semaphore_win32) skip |= OutputExtensionError("vkImportSemaphoreWin32HandleKHR", VK_KHR_EXTERNAL_SEMAPHORE_WIN32_EXTENSION_NAME);
- skip |= validate_struct_type("vkImportSemaphoreWin32HandleKHR", "pImportSemaphoreWin32HandleInfo", "VK_STRUCTURE_TYPE_IMPORT_SEMAPHORE_WIN32_HANDLE_INFO_KHR", pImportSemaphoreWin32HandleInfo, VK_STRUCTURE_TYPE_IMPORT_SEMAPHORE_WIN32_HANDLE_INFO_KHR, true, "VUID-vkImportSemaphoreWin32HandleKHR-pImportSemaphoreWin32HandleInfo-parameter", "VUID-VkImportSemaphoreWin32HandleInfoKHR-sType-sType");
- if (pImportSemaphoreWin32HandleInfo != NULL)
- {
- skip |= validate_struct_pnext("vkImportSemaphoreWin32HandleKHR", "pImportSemaphoreWin32HandleInfo->pNext", NULL, pImportSemaphoreWin32HandleInfo->pNext, 0, NULL, GeneratedVulkanHeaderVersion, "VUID-VkImportSemaphoreWin32HandleInfoKHR-pNext-pNext");
-
- skip |= validate_required_handle("vkImportSemaphoreWin32HandleKHR", "pImportSemaphoreWin32HandleInfo->semaphore", pImportSemaphoreWin32HandleInfo->semaphore);
-
- skip |= validate_flags("vkImportSemaphoreWin32HandleKHR", "pImportSemaphoreWin32HandleInfo->flags", "VkSemaphoreImportFlagBits", AllVkSemaphoreImportFlagBits, pImportSemaphoreWin32HandleInfo->flags, kOptionalFlags, "VUID-VkImportSemaphoreWin32HandleInfoKHR-flags-parameter");
-
- skip |= validate_flags("vkImportSemaphoreWin32HandleKHR", "pImportSemaphoreWin32HandleInfo->handleType", "VkExternalSemaphoreHandleTypeFlagBits", AllVkExternalSemaphoreHandleTypeFlagBits, pImportSemaphoreWin32HandleInfo->handleType, kOptionalSingleBit, "VUID-VkImportSemaphoreWin32HandleInfoKHR-handleType-parameter");
- }
- return skip;
-}
-
-bool StatelessValidation::PreCallValidateGetSemaphoreWin32HandleKHR(
- VkDevice device,
- const VkSemaphoreGetWin32HandleInfoKHR* pGetWin32HandleInfo,
- HANDLE* pHandle) {
- bool skip = false;
- if (!device_extensions.vk_khr_external_semaphore) skip |= OutputExtensionError("vkGetSemaphoreWin32HandleKHR", VK_KHR_EXTERNAL_SEMAPHORE_EXTENSION_NAME);
- if (!device_extensions.vk_khr_external_semaphore_win32) skip |= OutputExtensionError("vkGetSemaphoreWin32HandleKHR", VK_KHR_EXTERNAL_SEMAPHORE_WIN32_EXTENSION_NAME);
- skip |= validate_struct_type("vkGetSemaphoreWin32HandleKHR", "pGetWin32HandleInfo", "VK_STRUCTURE_TYPE_SEMAPHORE_GET_WIN32_HANDLE_INFO_KHR", pGetWin32HandleInfo, VK_STRUCTURE_TYPE_SEMAPHORE_GET_WIN32_HANDLE_INFO_KHR, true, "VUID-vkGetSemaphoreWin32HandleKHR-pGetWin32HandleInfo-parameter", "VUID-VkSemaphoreGetWin32HandleInfoKHR-sType-sType");
- if (pGetWin32HandleInfo != NULL)
- {
- skip |= validate_struct_pnext("vkGetSemaphoreWin32HandleKHR", "pGetWin32HandleInfo->pNext", NULL, pGetWin32HandleInfo->pNext, 0, NULL, GeneratedVulkanHeaderVersion, "VUID-VkSemaphoreGetWin32HandleInfoKHR-pNext-pNext");
-
- skip |= validate_required_handle("vkGetSemaphoreWin32HandleKHR", "pGetWin32HandleInfo->semaphore", pGetWin32HandleInfo->semaphore);
-
- skip |= validate_flags("vkGetSemaphoreWin32HandleKHR", "pGetWin32HandleInfo->handleType", "VkExternalSemaphoreHandleTypeFlagBits", AllVkExternalSemaphoreHandleTypeFlagBits, pGetWin32HandleInfo->handleType, kRequiredSingleBit, "VUID-VkSemaphoreGetWin32HandleInfoKHR-handleType-parameter", "VUID-VkSemaphoreGetWin32HandleInfoKHR-handleType-parameter");
- }
- skip |= validate_required_pointer("vkGetSemaphoreWin32HandleKHR", "pHandle", pHandle, "VUID-vkGetSemaphoreWin32HandleKHR-pHandle-parameter");
- return skip;
-}
-
-#endif // VK_USE_PLATFORM_WIN32_KHR
-
-bool StatelessValidation::PreCallValidateImportSemaphoreFdKHR(
- VkDevice device,
- const VkImportSemaphoreFdInfoKHR* pImportSemaphoreFdInfo) {
- bool skip = false;
- if (!device_extensions.vk_khr_external_semaphore) skip |= OutputExtensionError("vkImportSemaphoreFdKHR", VK_KHR_EXTERNAL_SEMAPHORE_EXTENSION_NAME);
- if (!device_extensions.vk_khr_external_semaphore_fd) skip |= OutputExtensionError("vkImportSemaphoreFdKHR", VK_KHR_EXTERNAL_SEMAPHORE_FD_EXTENSION_NAME);
- skip |= validate_struct_type("vkImportSemaphoreFdKHR", "pImportSemaphoreFdInfo", "VK_STRUCTURE_TYPE_IMPORT_SEMAPHORE_FD_INFO_KHR", pImportSemaphoreFdInfo, VK_STRUCTURE_TYPE_IMPORT_SEMAPHORE_FD_INFO_KHR, true, "VUID-vkImportSemaphoreFdKHR-pImportSemaphoreFdInfo-parameter", "VUID-VkImportSemaphoreFdInfoKHR-sType-sType");
- if (pImportSemaphoreFdInfo != NULL)
- {
- skip |= validate_struct_pnext("vkImportSemaphoreFdKHR", "pImportSemaphoreFdInfo->pNext", NULL, pImportSemaphoreFdInfo->pNext, 0, NULL, GeneratedVulkanHeaderVersion, "VUID-VkImportSemaphoreFdInfoKHR-pNext-pNext");
-
- skip |= validate_required_handle("vkImportSemaphoreFdKHR", "pImportSemaphoreFdInfo->semaphore", pImportSemaphoreFdInfo->semaphore);
-
- skip |= validate_flags("vkImportSemaphoreFdKHR", "pImportSemaphoreFdInfo->flags", "VkSemaphoreImportFlagBits", AllVkSemaphoreImportFlagBits, pImportSemaphoreFdInfo->flags, kOptionalFlags, "VUID-VkImportSemaphoreFdInfoKHR-flags-parameter");
-
- skip |= validate_flags("vkImportSemaphoreFdKHR", "pImportSemaphoreFdInfo->handleType", "VkExternalSemaphoreHandleTypeFlagBits", AllVkExternalSemaphoreHandleTypeFlagBits, pImportSemaphoreFdInfo->handleType, kRequiredSingleBit, "VUID-VkImportSemaphoreFdInfoKHR-handleType-parameter", "VUID-VkImportSemaphoreFdInfoKHR-handleType-parameter");
- }
- return skip;
-}
-
-bool StatelessValidation::PreCallValidateGetSemaphoreFdKHR(
- VkDevice device,
- const VkSemaphoreGetFdInfoKHR* pGetFdInfo,
- int* pFd) {
- bool skip = false;
- if (!device_extensions.vk_khr_external_semaphore) skip |= OutputExtensionError("vkGetSemaphoreFdKHR", VK_KHR_EXTERNAL_SEMAPHORE_EXTENSION_NAME);
- if (!device_extensions.vk_khr_external_semaphore_fd) skip |= OutputExtensionError("vkGetSemaphoreFdKHR", VK_KHR_EXTERNAL_SEMAPHORE_FD_EXTENSION_NAME);
- skip |= validate_struct_type("vkGetSemaphoreFdKHR", "pGetFdInfo", "VK_STRUCTURE_TYPE_SEMAPHORE_GET_FD_INFO_KHR", pGetFdInfo, VK_STRUCTURE_TYPE_SEMAPHORE_GET_FD_INFO_KHR, true, "VUID-vkGetSemaphoreFdKHR-pGetFdInfo-parameter", "VUID-VkSemaphoreGetFdInfoKHR-sType-sType");
- if (pGetFdInfo != NULL)
- {
- skip |= validate_struct_pnext("vkGetSemaphoreFdKHR", "pGetFdInfo->pNext", NULL, pGetFdInfo->pNext, 0, NULL, GeneratedVulkanHeaderVersion, "VUID-VkSemaphoreGetFdInfoKHR-pNext-pNext");
-
- skip |= validate_required_handle("vkGetSemaphoreFdKHR", "pGetFdInfo->semaphore", pGetFdInfo->semaphore);
-
- skip |= validate_flags("vkGetSemaphoreFdKHR", "pGetFdInfo->handleType", "VkExternalSemaphoreHandleTypeFlagBits", AllVkExternalSemaphoreHandleTypeFlagBits, pGetFdInfo->handleType, kRequiredSingleBit, "VUID-VkSemaphoreGetFdInfoKHR-handleType-parameter", "VUID-VkSemaphoreGetFdInfoKHR-handleType-parameter");
- }
- skip |= validate_required_pointer("vkGetSemaphoreFdKHR", "pFd", pFd, "VUID-vkGetSemaphoreFdKHR-pFd-parameter");
- return skip;
-}
-
-
-
-bool StatelessValidation::PreCallValidateCmdPushDescriptorSetKHR(
- VkCommandBuffer commandBuffer,
- VkPipelineBindPoint pipelineBindPoint,
- VkPipelineLayout layout,
- uint32_t set,
- uint32_t descriptorWriteCount,
- const VkWriteDescriptorSet* pDescriptorWrites) {
- bool skip = false;
- if (!device_extensions.vk_khr_get_physical_device_properties_2) skip |= OutputExtensionError("vkCmdPushDescriptorSetKHR", VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME);
- if (!device_extensions.vk_khr_push_descriptor) skip |= OutputExtensionError("vkCmdPushDescriptorSetKHR", VK_KHR_PUSH_DESCRIPTOR_EXTENSION_NAME);
- skip |= validate_ranged_enum("vkCmdPushDescriptorSetKHR", "pipelineBindPoint", "VkPipelineBindPoint", AllVkPipelineBindPointEnums, pipelineBindPoint, "VUID-vkCmdPushDescriptorSetKHR-pipelineBindPoint-parameter");
- skip |= validate_required_handle("vkCmdPushDescriptorSetKHR", "layout", layout);
- skip |= validate_struct_type_array("vkCmdPushDescriptorSetKHR", "descriptorWriteCount", "pDescriptorWrites", "VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET", descriptorWriteCount, pDescriptorWrites, VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET, true, true, "VUID-VkWriteDescriptorSet-sType-sType", "VUID-vkCmdPushDescriptorSetKHR-pDescriptorWrites-parameter", "VUID-vkCmdPushDescriptorSetKHR-descriptorWriteCount-arraylength");
- if (pDescriptorWrites != NULL)
- {
- for (uint32_t descriptorWriteIndex = 0; descriptorWriteIndex < descriptorWriteCount; ++descriptorWriteIndex)
- {
- const VkStructureType allowed_structs_VkWriteDescriptorSet[] = { VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET_ACCELERATION_STRUCTURE_NV, VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET_INLINE_UNIFORM_BLOCK_EXT };
-
- skip |= validate_struct_pnext("vkCmdPushDescriptorSetKHR", ParameterName("pDescriptorWrites[%i].pNext", ParameterName::IndexVector{ descriptorWriteIndex }), "VkWriteDescriptorSetAccelerationStructureNV, VkWriteDescriptorSetInlineUniformBlockEXT", pDescriptorWrites[descriptorWriteIndex].pNext, ARRAY_SIZE(allowed_structs_VkWriteDescriptorSet), allowed_structs_VkWriteDescriptorSet, GeneratedVulkanHeaderVersion, "VUID-VkWriteDescriptorSet-pNext-pNext");
-
- skip |= validate_ranged_enum("vkCmdPushDescriptorSetKHR", ParameterName("pDescriptorWrites[%i].descriptorType", ParameterName::IndexVector{ descriptorWriteIndex }), "VkDescriptorType", AllVkDescriptorTypeEnums, pDescriptorWrites[descriptorWriteIndex].descriptorType, "VUID-VkWriteDescriptorSet-descriptorType-parameter");
- }
- }
- return skip;
-}
-
-bool StatelessValidation::PreCallValidateCmdPushDescriptorSetWithTemplateKHR(
- VkCommandBuffer commandBuffer,
- VkDescriptorUpdateTemplate descriptorUpdateTemplate,
- VkPipelineLayout layout,
- uint32_t set,
- const void* pData) {
- bool skip = false;
- if (!device_extensions.vk_khr_get_physical_device_properties_2) skip |= OutputExtensionError("vkCmdPushDescriptorSetWithTemplateKHR", VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME);
- if (!device_extensions.vk_khr_push_descriptor) skip |= OutputExtensionError("vkCmdPushDescriptorSetWithTemplateKHR", VK_KHR_PUSH_DESCRIPTOR_EXTENSION_NAME);
- skip |= validate_required_handle("vkCmdPushDescriptorSetWithTemplateKHR", "descriptorUpdateTemplate", descriptorUpdateTemplate);
- skip |= validate_required_handle("vkCmdPushDescriptorSetWithTemplateKHR", "layout", layout);
- return skip;
-}
-
-
-
-
-
-
-
-
-
-bool StatelessValidation::PreCallValidateCreateDescriptorUpdateTemplateKHR(
- VkDevice device,
- const VkDescriptorUpdateTemplateCreateInfo* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkDescriptorUpdateTemplate* pDescriptorUpdateTemplate) {
- bool skip = false;
- if (!device_extensions.vk_khr_descriptor_update_template) skip |= OutputExtensionError("vkCreateDescriptorUpdateTemplateKHR", VK_KHR_DESCRIPTOR_UPDATE_TEMPLATE_EXTENSION_NAME);
- skip |= validate_struct_type("vkCreateDescriptorUpdateTemplateKHR", "pCreateInfo", "VK_STRUCTURE_TYPE_DESCRIPTOR_UPDATE_TEMPLATE_CREATE_INFO", pCreateInfo, VK_STRUCTURE_TYPE_DESCRIPTOR_UPDATE_TEMPLATE_CREATE_INFO, true, "VUID-vkCreateDescriptorUpdateTemplate-pCreateInfo-parameter", "VUID-VkDescriptorUpdateTemplateCreateInfo-sType-sType");
- if (pCreateInfo != NULL)
- {
- skip |= validate_struct_pnext("vkCreateDescriptorUpdateTemplateKHR", "pCreateInfo->pNext", NULL, pCreateInfo->pNext, 0, NULL, GeneratedVulkanHeaderVersion, "VUID-VkDescriptorUpdateTemplateCreateInfo-pNext-pNext");
-
- skip |= validate_reserved_flags("vkCreateDescriptorUpdateTemplateKHR", "pCreateInfo->flags", pCreateInfo->flags, "VUID-VkDescriptorUpdateTemplateCreateInfo-flags-zerobitmask");
-
- skip |= validate_array("vkCreateDescriptorUpdateTemplateKHR", "pCreateInfo->descriptorUpdateEntryCount", "pCreateInfo->pDescriptorUpdateEntries", pCreateInfo->descriptorUpdateEntryCount, &pCreateInfo->pDescriptorUpdateEntries, true, true, "VUID-VkDescriptorUpdateTemplateCreateInfo-descriptorUpdateEntryCount-arraylength", "VUID-VkDescriptorUpdateTemplateCreateInfo-pDescriptorUpdateEntries-parameter");
-
- if (pCreateInfo->pDescriptorUpdateEntries != NULL)
- {
- for (uint32_t descriptorUpdateEntryIndex = 0; descriptorUpdateEntryIndex < pCreateInfo->descriptorUpdateEntryCount; ++descriptorUpdateEntryIndex)
- {
- skip |= validate_ranged_enum("vkCreateDescriptorUpdateTemplateKHR", ParameterName("pCreateInfo->pDescriptorUpdateEntries[%i].descriptorType", ParameterName::IndexVector{ descriptorUpdateEntryIndex }), "VkDescriptorType", AllVkDescriptorTypeEnums, pCreateInfo->pDescriptorUpdateEntries[descriptorUpdateEntryIndex].descriptorType, "VUID-VkDescriptorUpdateTemplateEntry-descriptorType-parameter");
- }
- }
-
- skip |= validate_ranged_enum("vkCreateDescriptorUpdateTemplateKHR", "pCreateInfo->templateType", "VkDescriptorUpdateTemplateType", AllVkDescriptorUpdateTemplateTypeEnums, pCreateInfo->templateType, "VUID-VkDescriptorUpdateTemplateCreateInfo-templateType-parameter");
- }
- if (pAllocator != NULL)
- {
- skip |= validate_required_pointer("vkCreateDescriptorUpdateTemplateKHR", "pAllocator->pfnAllocation", reinterpret_cast<const void*>(pAllocator->pfnAllocation), "VUID-VkAllocationCallbacks-pfnAllocation-00632");
-
- skip |= validate_required_pointer("vkCreateDescriptorUpdateTemplateKHR", "pAllocator->pfnReallocation", reinterpret_cast<const void*>(pAllocator->pfnReallocation), "VUID-VkAllocationCallbacks-pfnReallocation-00633");
-
- skip |= validate_required_pointer("vkCreateDescriptorUpdateTemplateKHR", "pAllocator->pfnFree", reinterpret_cast<const void*>(pAllocator->pfnFree), "VUID-VkAllocationCallbacks-pfnFree-00634");
-
- if (pAllocator->pfnInternalAllocation != NULL)
- {
- skip |= validate_required_pointer("vkCreateDescriptorUpdateTemplateKHR", "pAllocator->pfnInternalFree", reinterpret_cast<const void*>(pAllocator->pfnInternalFree), "VUID-VkAllocationCallbacks-pfnInternalAllocation-00635");
-
- }
-
- if (pAllocator->pfnInternalFree != NULL)
- {
- skip |= validate_required_pointer("vkCreateDescriptorUpdateTemplateKHR", "pAllocator->pfnInternalAllocation", reinterpret_cast<const void*>(pAllocator->pfnInternalAllocation), "VUID-VkAllocationCallbacks-pfnInternalAllocation-00635");
-
- }
- }
- skip |= validate_required_pointer("vkCreateDescriptorUpdateTemplateKHR", "pDescriptorUpdateTemplate", pDescriptorUpdateTemplate, "VUID-vkCreateDescriptorUpdateTemplate-pDescriptorUpdateTemplate-parameter");
- return skip;
-}
-
-bool StatelessValidation::PreCallValidateDestroyDescriptorUpdateTemplateKHR(
- VkDevice device,
- VkDescriptorUpdateTemplate descriptorUpdateTemplate,
- const VkAllocationCallbacks* pAllocator) {
- bool skip = false;
- if (!device_extensions.vk_khr_descriptor_update_template) skip |= OutputExtensionError("vkDestroyDescriptorUpdateTemplateKHR", VK_KHR_DESCRIPTOR_UPDATE_TEMPLATE_EXTENSION_NAME);
- if (pAllocator != NULL)
- {
- skip |= validate_required_pointer("vkDestroyDescriptorUpdateTemplateKHR", "pAllocator->pfnAllocation", reinterpret_cast<const void*>(pAllocator->pfnAllocation), "VUID-VkAllocationCallbacks-pfnAllocation-00632");
-
- skip |= validate_required_pointer("vkDestroyDescriptorUpdateTemplateKHR", "pAllocator->pfnReallocation", reinterpret_cast<const void*>(pAllocator->pfnReallocation), "VUID-VkAllocationCallbacks-pfnReallocation-00633");
-
- skip |= validate_required_pointer("vkDestroyDescriptorUpdateTemplateKHR", "pAllocator->pfnFree", reinterpret_cast<const void*>(pAllocator->pfnFree), "VUID-VkAllocationCallbacks-pfnFree-00634");
-
- if (pAllocator->pfnInternalAllocation != NULL)
- {
- skip |= validate_required_pointer("vkDestroyDescriptorUpdateTemplateKHR", "pAllocator->pfnInternalFree", reinterpret_cast<const void*>(pAllocator->pfnInternalFree), "VUID-VkAllocationCallbacks-pfnInternalAllocation-00635");
-
- }
-
- if (pAllocator->pfnInternalFree != NULL)
- {
- skip |= validate_required_pointer("vkDestroyDescriptorUpdateTemplateKHR", "pAllocator->pfnInternalAllocation", reinterpret_cast<const void*>(pAllocator->pfnInternalAllocation), "VUID-VkAllocationCallbacks-pfnInternalAllocation-00635");
-
- }
- }
- return skip;
-}
-
-bool StatelessValidation::PreCallValidateUpdateDescriptorSetWithTemplateKHR(
- VkDevice device,
- VkDescriptorSet descriptorSet,
- VkDescriptorUpdateTemplate descriptorUpdateTemplate,
- const void* pData) {
- bool skip = false;
- if (!device_extensions.vk_khr_descriptor_update_template) skip |= OutputExtensionError("vkUpdateDescriptorSetWithTemplateKHR", VK_KHR_DESCRIPTOR_UPDATE_TEMPLATE_EXTENSION_NAME);
- skip |= validate_required_handle("vkUpdateDescriptorSetWithTemplateKHR", "descriptorSet", descriptorSet);
- skip |= validate_required_handle("vkUpdateDescriptorSetWithTemplateKHR", "descriptorUpdateTemplate", descriptorUpdateTemplate);
- return skip;
-}
-
-
-
-
-
-bool StatelessValidation::PreCallValidateCreateRenderPass2KHR(
- VkDevice device,
- const VkRenderPassCreateInfo2KHR* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkRenderPass* pRenderPass) {
- bool skip = false;
- if (!device_extensions.vk_khr_maintenance2) skip |= OutputExtensionError("vkCreateRenderPass2KHR", VK_KHR_MAINTENANCE2_EXTENSION_NAME);
- if (!device_extensions.vk_khr_multiview) skip |= OutputExtensionError("vkCreateRenderPass2KHR", VK_KHR_MULTIVIEW_EXTENSION_NAME);
- if (!device_extensions.vk_khr_create_renderpass_2) skip |= OutputExtensionError("vkCreateRenderPass2KHR", VK_KHR_CREATE_RENDERPASS_2_EXTENSION_NAME);
- skip |= validate_struct_type("vkCreateRenderPass2KHR", "pCreateInfo", "VK_STRUCTURE_TYPE_RENDER_PASS_CREATE_INFO_2_KHR", pCreateInfo, VK_STRUCTURE_TYPE_RENDER_PASS_CREATE_INFO_2_KHR, true, "VUID-vkCreateRenderPass2KHR-pCreateInfo-parameter", "VUID-VkRenderPassCreateInfo2KHR-sType-sType");
- if (pCreateInfo != NULL)
- {
- skip |= validate_struct_pnext("vkCreateRenderPass2KHR", "pCreateInfo->pNext", NULL, pCreateInfo->pNext, 0, NULL, GeneratedVulkanHeaderVersion, "VUID-VkRenderPassCreateInfo2KHR-pNext-pNext");
-
- skip |= validate_struct_type_array("vkCreateRenderPass2KHR", "pCreateInfo->attachmentCount", "pCreateInfo->pAttachments", "VK_STRUCTURE_TYPE_ATTACHMENT_DESCRIPTION_2_KHR", pCreateInfo->attachmentCount, pCreateInfo->pAttachments, VK_STRUCTURE_TYPE_ATTACHMENT_DESCRIPTION_2_KHR, false, true, "VUID-VkAttachmentDescription2KHR-sType-sType", "VUID-VkRenderPassCreateInfo2KHR-pAttachments-parameter", kVUIDUndefined);
-
- if (pCreateInfo->pAttachments != NULL)
- {
- for (uint32_t attachmentIndex = 0; attachmentIndex < pCreateInfo->attachmentCount; ++attachmentIndex)
- {
- skip |= validate_flags("vkCreateRenderPass2KHR", ParameterName("pCreateInfo->pAttachments[%i].flags", ParameterName::IndexVector{ attachmentIndex }), "VkAttachmentDescriptionFlagBits", AllVkAttachmentDescriptionFlagBits, pCreateInfo->pAttachments[attachmentIndex].flags, kOptionalFlags, "VUID-VkAttachmentDescription2KHR-flags-parameter");
-
- skip |= validate_ranged_enum("vkCreateRenderPass2KHR", ParameterName("pCreateInfo->pAttachments[%i].format", ParameterName::IndexVector{ attachmentIndex }), "VkFormat", AllVkFormatEnums, pCreateInfo->pAttachments[attachmentIndex].format, "VUID-VkAttachmentDescription2KHR-format-parameter");
-
- skip |= validate_flags("vkCreateRenderPass2KHR", ParameterName("pCreateInfo->pAttachments[%i].samples", ParameterName::IndexVector{ attachmentIndex }), "VkSampleCountFlagBits", AllVkSampleCountFlagBits, pCreateInfo->pAttachments[attachmentIndex].samples, kRequiredSingleBit, "VUID-VkAttachmentDescription2KHR-samples-parameter", "VUID-VkAttachmentDescription2KHR-samples-parameter");
-
- skip |= validate_ranged_enum("vkCreateRenderPass2KHR", ParameterName("pCreateInfo->pAttachments[%i].loadOp", ParameterName::IndexVector{ attachmentIndex }), "VkAttachmentLoadOp", AllVkAttachmentLoadOpEnums, pCreateInfo->pAttachments[attachmentIndex].loadOp, "VUID-VkAttachmentDescription2KHR-loadOp-parameter");
-
- skip |= validate_ranged_enum("vkCreateRenderPass2KHR", ParameterName("pCreateInfo->pAttachments[%i].storeOp", ParameterName::IndexVector{ attachmentIndex }), "VkAttachmentStoreOp", AllVkAttachmentStoreOpEnums, pCreateInfo->pAttachments[attachmentIndex].storeOp, "VUID-VkAttachmentDescription2KHR-storeOp-parameter");
-
- skip |= validate_ranged_enum("vkCreateRenderPass2KHR", ParameterName("pCreateInfo->pAttachments[%i].stencilLoadOp", ParameterName::IndexVector{ attachmentIndex }), "VkAttachmentLoadOp", AllVkAttachmentLoadOpEnums, pCreateInfo->pAttachments[attachmentIndex].stencilLoadOp, "VUID-VkAttachmentDescription2KHR-stencilLoadOp-parameter");
-
- skip |= validate_ranged_enum("vkCreateRenderPass2KHR", ParameterName("pCreateInfo->pAttachments[%i].stencilStoreOp", ParameterName::IndexVector{ attachmentIndex }), "VkAttachmentStoreOp", AllVkAttachmentStoreOpEnums, pCreateInfo->pAttachments[attachmentIndex].stencilStoreOp, "VUID-VkAttachmentDescription2KHR-stencilStoreOp-parameter");
-
- skip |= validate_ranged_enum("vkCreateRenderPass2KHR", ParameterName("pCreateInfo->pAttachments[%i].initialLayout", ParameterName::IndexVector{ attachmentIndex }), "VkImageLayout", AllVkImageLayoutEnums, pCreateInfo->pAttachments[attachmentIndex].initialLayout, "VUID-VkAttachmentDescription2KHR-initialLayout-parameter");
-
- skip |= validate_ranged_enum("vkCreateRenderPass2KHR", ParameterName("pCreateInfo->pAttachments[%i].finalLayout", ParameterName::IndexVector{ attachmentIndex }), "VkImageLayout", AllVkImageLayoutEnums, pCreateInfo->pAttachments[attachmentIndex].finalLayout, "VUID-VkAttachmentDescription2KHR-finalLayout-parameter");
- }
- }
-
- skip |= validate_struct_type_array("vkCreateRenderPass2KHR", "pCreateInfo->subpassCount", "pCreateInfo->pSubpasses", "VK_STRUCTURE_TYPE_SUBPASS_DESCRIPTION_2_KHR", pCreateInfo->subpassCount, pCreateInfo->pSubpasses, VK_STRUCTURE_TYPE_SUBPASS_DESCRIPTION_2_KHR, true, true, "VUID-VkSubpassDescription2KHR-sType-sType", "VUID-VkRenderPassCreateInfo2KHR-pSubpasses-parameter", "VUID-VkRenderPassCreateInfo2KHR-subpassCount-arraylength");
-
- if (pCreateInfo->pSubpasses != NULL)
- {
- for (uint32_t subpassIndex = 0; subpassIndex < pCreateInfo->subpassCount; ++subpassIndex)
- {
- skip |= validate_flags("vkCreateRenderPass2KHR", ParameterName("pCreateInfo->pSubpasses[%i].flags", ParameterName::IndexVector{ subpassIndex }), "VkSubpassDescriptionFlagBits", AllVkSubpassDescriptionFlagBits, pCreateInfo->pSubpasses[subpassIndex].flags, kOptionalFlags, "VUID-VkSubpassDescription2KHR-flags-parameter");
-
- skip |= validate_ranged_enum("vkCreateRenderPass2KHR", ParameterName("pCreateInfo->pSubpasses[%i].pipelineBindPoint", ParameterName::IndexVector{ subpassIndex }), "VkPipelineBindPoint", AllVkPipelineBindPointEnums, pCreateInfo->pSubpasses[subpassIndex].pipelineBindPoint, "VUID-VkSubpassDescription2KHR-pipelineBindPoint-parameter");
-
- skip |= validate_struct_type_array("vkCreateRenderPass2KHR", ParameterName("pCreateInfo->pSubpasses[%i].inputAttachmentCount", ParameterName::IndexVector{ subpassIndex }), ParameterName("pCreateInfo->pSubpasses[%i].pInputAttachments", ParameterName::IndexVector{ subpassIndex }), "VK_STRUCTURE_TYPE_ATTACHMENT_REFERENCE_2_KHR", pCreateInfo->pSubpasses[subpassIndex].inputAttachmentCount, pCreateInfo->pSubpasses[subpassIndex].pInputAttachments, VK_STRUCTURE_TYPE_ATTACHMENT_REFERENCE_2_KHR, false, true, "VUID-VkAttachmentReference2KHR-sType-sType", "VUID-VkSubpassDescription2KHR-pInputAttachments-parameter", kVUIDUndefined);
-
- if (pCreateInfo->pSubpasses[subpassIndex].pInputAttachments != NULL)
- {
- for (uint32_t inputAttachmentIndex = 0; inputAttachmentIndex < pCreateInfo->pSubpasses[subpassIndex].inputAttachmentCount; ++inputAttachmentIndex)
- {
- skip |= validate_ranged_enum("vkCreateRenderPass2KHR", ParameterName("pCreateInfo->pSubpasses[%i].pInputAttachments[%i].layout", ParameterName::IndexVector{ subpassIndex, inputAttachmentIndex }), "VkImageLayout", AllVkImageLayoutEnums, pCreateInfo->pSubpasses[subpassIndex].pInputAttachments[inputAttachmentIndex].layout, "VUID-VkAttachmentReference2KHR-layout-parameter");
- }
- }
-
- skip |= validate_struct_type_array("vkCreateRenderPass2KHR", ParameterName("pCreateInfo->pSubpasses[%i].colorAttachmentCount", ParameterName::IndexVector{ subpassIndex }), ParameterName("pCreateInfo->pSubpasses[%i].pColorAttachments", ParameterName::IndexVector{ subpassIndex }), "VK_STRUCTURE_TYPE_ATTACHMENT_REFERENCE_2_KHR", pCreateInfo->pSubpasses[subpassIndex].colorAttachmentCount, pCreateInfo->pSubpasses[subpassIndex].pColorAttachments, VK_STRUCTURE_TYPE_ATTACHMENT_REFERENCE_2_KHR, false, true, "VUID-VkAttachmentReference2KHR-sType-sType", "VUID-VkSubpassDescription2KHR-pColorAttachments-parameter", kVUIDUndefined);
-
- if (pCreateInfo->pSubpasses[subpassIndex].pColorAttachments != NULL)
- {
- for (uint32_t colorAttachmentIndex = 0; colorAttachmentIndex < pCreateInfo->pSubpasses[subpassIndex].colorAttachmentCount; ++colorAttachmentIndex)
- {
- skip |= validate_ranged_enum("vkCreateRenderPass2KHR", ParameterName("pCreateInfo->pSubpasses[%i].pColorAttachments[%i].layout", ParameterName::IndexVector{ subpassIndex, colorAttachmentIndex }), "VkImageLayout", AllVkImageLayoutEnums, pCreateInfo->pSubpasses[subpassIndex].pColorAttachments[colorAttachmentIndex].layout, "VUID-VkAttachmentReference2KHR-layout-parameter");
- }
- }
-
- skip |= validate_struct_type_array("vkCreateRenderPass2KHR", ParameterName("pCreateInfo->pSubpasses[%i].colorAttachmentCount", ParameterName::IndexVector{ subpassIndex }), ParameterName("pCreateInfo->pSubpasses[%i].pResolveAttachments", ParameterName::IndexVector{ subpassIndex }), "VK_STRUCTURE_TYPE_ATTACHMENT_REFERENCE_2_KHR", pCreateInfo->pSubpasses[subpassIndex].colorAttachmentCount, pCreateInfo->pSubpasses[subpassIndex].pResolveAttachments, VK_STRUCTURE_TYPE_ATTACHMENT_REFERENCE_2_KHR, false, false, "VUID-VkAttachmentReference2KHR-sType-sType", "VUID-VkSubpassDescription2KHR-pResolveAttachments-parameter", kVUIDUndefined);
-
- if (pCreateInfo->pSubpasses[subpassIndex].pResolveAttachments != NULL)
- {
- for (uint32_t colorAttachmentIndex = 0; colorAttachmentIndex < pCreateInfo->pSubpasses[subpassIndex].colorAttachmentCount; ++colorAttachmentIndex)
- {
- skip |= validate_ranged_enum("vkCreateRenderPass2KHR", ParameterName("pCreateInfo->pSubpasses[%i].pResolveAttachments[%i].layout", ParameterName::IndexVector{ subpassIndex, colorAttachmentIndex }), "VkImageLayout", AllVkImageLayoutEnums, pCreateInfo->pSubpasses[subpassIndex].pResolveAttachments[colorAttachmentIndex].layout, "VUID-VkAttachmentReference2KHR-layout-parameter");
- }
- }
-
- skip |= validate_struct_type("vkCreateRenderPass2KHR", ParameterName("pCreateInfo->pSubpasses[%i].pDepthStencilAttachment", ParameterName::IndexVector{ subpassIndex }), "VK_STRUCTURE_TYPE_ATTACHMENT_REFERENCE_2_KHR", pCreateInfo->pSubpasses[subpassIndex].pDepthStencilAttachment, VK_STRUCTURE_TYPE_ATTACHMENT_REFERENCE_2_KHR, false, "VUID-VkSubpassDescription2KHR-pDepthStencilAttachment-parameter", "VUID-VkAttachmentReference2KHR-sType-sType");
-
- if (pCreateInfo->pSubpasses[subpassIndex].pDepthStencilAttachment != NULL)
- {
- skip |= validate_ranged_enum("vkCreateRenderPass2KHR", ParameterName("pCreateInfo->pSubpasses[%i].pDepthStencilAttachment->layout", ParameterName::IndexVector{ subpassIndex }), "VkImageLayout", AllVkImageLayoutEnums, pCreateInfo->pSubpasses[subpassIndex].pDepthStencilAttachment->layout, "VUID-VkAttachmentReference2KHR-layout-parameter");
- }
-
- skip |= validate_array("vkCreateRenderPass2KHR", ParameterName("pCreateInfo->pSubpasses[%i].preserveAttachmentCount", ParameterName::IndexVector{ subpassIndex }), ParameterName("pCreateInfo->pSubpasses[%i].pPreserveAttachments", ParameterName::IndexVector{ subpassIndex }), pCreateInfo->pSubpasses[subpassIndex].preserveAttachmentCount, &pCreateInfo->pSubpasses[subpassIndex].pPreserveAttachments, false, true, kVUIDUndefined, "VUID-VkSubpassDescription2KHR-pPreserveAttachments-parameter");
- }
- }
-
- skip |= validate_struct_type_array("vkCreateRenderPass2KHR", "pCreateInfo->dependencyCount", "pCreateInfo->pDependencies", "VK_STRUCTURE_TYPE_SUBPASS_DEPENDENCY_2_KHR", pCreateInfo->dependencyCount, pCreateInfo->pDependencies, VK_STRUCTURE_TYPE_SUBPASS_DEPENDENCY_2_KHR, false, true, "VUID-VkSubpassDependency2KHR-sType-sType", "VUID-VkRenderPassCreateInfo2KHR-pDependencies-parameter", kVUIDUndefined);
-
- if (pCreateInfo->pDependencies != NULL)
- {
- for (uint32_t dependencyIndex = 0; dependencyIndex < pCreateInfo->dependencyCount; ++dependencyIndex)
- {
- skip |= validate_flags("vkCreateRenderPass2KHR", ParameterName("pCreateInfo->pDependencies[%i].srcStageMask", ParameterName::IndexVector{ dependencyIndex }), "VkPipelineStageFlagBits", AllVkPipelineStageFlagBits, pCreateInfo->pDependencies[dependencyIndex].srcStageMask, kRequiredFlags, "VUID-VkSubpassDependency2KHR-srcStageMask-parameter", "VUID-VkSubpassDependency2KHR-srcStageMask-requiredbitmask");
-
- skip |= validate_flags("vkCreateRenderPass2KHR", ParameterName("pCreateInfo->pDependencies[%i].dstStageMask", ParameterName::IndexVector{ dependencyIndex }), "VkPipelineStageFlagBits", AllVkPipelineStageFlagBits, pCreateInfo->pDependencies[dependencyIndex].dstStageMask, kRequiredFlags, "VUID-VkSubpassDependency2KHR-dstStageMask-parameter", "VUID-VkSubpassDependency2KHR-dstStageMask-requiredbitmask");
-
- skip |= validate_flags("vkCreateRenderPass2KHR", ParameterName("pCreateInfo->pDependencies[%i].srcAccessMask", ParameterName::IndexVector{ dependencyIndex }), "VkAccessFlagBits", AllVkAccessFlagBits, pCreateInfo->pDependencies[dependencyIndex].srcAccessMask, kOptionalFlags, "VUID-VkSubpassDependency2KHR-srcAccessMask-parameter");
-
- skip |= validate_flags("vkCreateRenderPass2KHR", ParameterName("pCreateInfo->pDependencies[%i].dstAccessMask", ParameterName::IndexVector{ dependencyIndex }), "VkAccessFlagBits", AllVkAccessFlagBits, pCreateInfo->pDependencies[dependencyIndex].dstAccessMask, kOptionalFlags, "VUID-VkSubpassDependency2KHR-dstAccessMask-parameter");
-
- skip |= validate_flags("vkCreateRenderPass2KHR", ParameterName("pCreateInfo->pDependencies[%i].dependencyFlags", ParameterName::IndexVector{ dependencyIndex }), "VkDependencyFlagBits", AllVkDependencyFlagBits, pCreateInfo->pDependencies[dependencyIndex].dependencyFlags, kOptionalFlags, "VUID-VkSubpassDependency2KHR-dependencyFlags-parameter");
- }
- }
-
- skip |= validate_array("vkCreateRenderPass2KHR", "pCreateInfo->correlatedViewMaskCount", "pCreateInfo->pCorrelatedViewMasks", pCreateInfo->correlatedViewMaskCount, &pCreateInfo->pCorrelatedViewMasks, false, true, kVUIDUndefined, "VUID-VkRenderPassCreateInfo2KHR-pCorrelatedViewMasks-parameter");
- }
- if (pAllocator != NULL)
- {
- skip |= validate_required_pointer("vkCreateRenderPass2KHR", "pAllocator->pfnAllocation", reinterpret_cast<const void*>(pAllocator->pfnAllocation), "VUID-VkAllocationCallbacks-pfnAllocation-00632");
-
- skip |= validate_required_pointer("vkCreateRenderPass2KHR", "pAllocator->pfnReallocation", reinterpret_cast<const void*>(pAllocator->pfnReallocation), "VUID-VkAllocationCallbacks-pfnReallocation-00633");
-
- skip |= validate_required_pointer("vkCreateRenderPass2KHR", "pAllocator->pfnFree", reinterpret_cast<const void*>(pAllocator->pfnFree), "VUID-VkAllocationCallbacks-pfnFree-00634");
-
- if (pAllocator->pfnInternalAllocation != NULL)
- {
- skip |= validate_required_pointer("vkCreateRenderPass2KHR", "pAllocator->pfnInternalFree", reinterpret_cast<const void*>(pAllocator->pfnInternalFree), "VUID-VkAllocationCallbacks-pfnInternalAllocation-00635");
-
- }
-
- if (pAllocator->pfnInternalFree != NULL)
- {
- skip |= validate_required_pointer("vkCreateRenderPass2KHR", "pAllocator->pfnInternalAllocation", reinterpret_cast<const void*>(pAllocator->pfnInternalAllocation), "VUID-VkAllocationCallbacks-pfnInternalAllocation-00635");
-
- }
- }
- skip |= validate_required_pointer("vkCreateRenderPass2KHR", "pRenderPass", pRenderPass, "VUID-vkCreateRenderPass2KHR-pRenderPass-parameter");
- if (!skip) skip |= manual_PreCallValidateCreateRenderPass2KHR(device, pCreateInfo, pAllocator, pRenderPass);
- return skip;
-}
-
-bool StatelessValidation::PreCallValidateCmdBeginRenderPass2KHR(
- VkCommandBuffer commandBuffer,
- const VkRenderPassBeginInfo* pRenderPassBegin,
- const VkSubpassBeginInfoKHR* pSubpassBeginInfo) {
- bool skip = false;
- if (!device_extensions.vk_khr_maintenance2) skip |= OutputExtensionError("vkCmdBeginRenderPass2KHR", VK_KHR_MAINTENANCE2_EXTENSION_NAME);
- if (!device_extensions.vk_khr_multiview) skip |= OutputExtensionError("vkCmdBeginRenderPass2KHR", VK_KHR_MULTIVIEW_EXTENSION_NAME);
- if (!device_extensions.vk_khr_create_renderpass_2) skip |= OutputExtensionError("vkCmdBeginRenderPass2KHR", VK_KHR_CREATE_RENDERPASS_2_EXTENSION_NAME);
- skip |= validate_struct_type("vkCmdBeginRenderPass2KHR", "pRenderPassBegin", "VK_STRUCTURE_TYPE_RENDER_PASS_BEGIN_INFO", pRenderPassBegin, VK_STRUCTURE_TYPE_RENDER_PASS_BEGIN_INFO, true, "VUID-vkCmdBeginRenderPass2KHR-pRenderPassBegin-parameter", "VUID-VkRenderPassBeginInfo-sType-sType");
- if (pRenderPassBegin != NULL)
- {
- const VkStructureType allowed_structs_VkRenderPassBeginInfo[] = { VK_STRUCTURE_TYPE_DEVICE_GROUP_RENDER_PASS_BEGIN_INFO, VK_STRUCTURE_TYPE_RENDER_PASS_ATTACHMENT_BEGIN_INFO_KHR, VK_STRUCTURE_TYPE_RENDER_PASS_SAMPLE_LOCATIONS_BEGIN_INFO_EXT };
-
- skip |= validate_struct_pnext("vkCmdBeginRenderPass2KHR", "pRenderPassBegin->pNext", "VkDeviceGroupRenderPassBeginInfo, VkRenderPassAttachmentBeginInfoKHR, VkRenderPassSampleLocationsBeginInfoEXT", pRenderPassBegin->pNext, ARRAY_SIZE(allowed_structs_VkRenderPassBeginInfo), allowed_structs_VkRenderPassBeginInfo, GeneratedVulkanHeaderVersion, "VUID-VkRenderPassBeginInfo-pNext-pNext");
-
- skip |= validate_required_handle("vkCmdBeginRenderPass2KHR", "pRenderPassBegin->renderPass", pRenderPassBegin->renderPass);
-
- skip |= validate_required_handle("vkCmdBeginRenderPass2KHR", "pRenderPassBegin->framebuffer", pRenderPassBegin->framebuffer);
-
- // No xml-driven validation
-
- // No xml-driven validation
-
- skip |= validate_array("vkCmdBeginRenderPass2KHR", "pRenderPassBegin->clearValueCount", "pRenderPassBegin->pClearValues", pRenderPassBegin->clearValueCount, &pRenderPassBegin->pClearValues, false, true, kVUIDUndefined, "VUID-VkRenderPassBeginInfo-pClearValues-parameter");
-
- if (pRenderPassBegin->pClearValues != NULL)
- {
- for (uint32_t clearValueIndex = 0; clearValueIndex < pRenderPassBegin->clearValueCount; ++clearValueIndex)
- {
- // No xml-driven validation
-
- // No xml-driven validation
- }
- }
- }
- skip |= validate_struct_type("vkCmdBeginRenderPass2KHR", "pSubpassBeginInfo", "VK_STRUCTURE_TYPE_SUBPASS_BEGIN_INFO_KHR", pSubpassBeginInfo, VK_STRUCTURE_TYPE_SUBPASS_BEGIN_INFO_KHR, true, "VUID-vkCmdBeginRenderPass2KHR-pSubpassBeginInfo-parameter", "VUID-VkSubpassBeginInfoKHR-sType-sType");
- if (pSubpassBeginInfo != NULL)
- {
- skip |= validate_struct_pnext("vkCmdBeginRenderPass2KHR", "pSubpassBeginInfo->pNext", NULL, pSubpassBeginInfo->pNext, 0, NULL, GeneratedVulkanHeaderVersion, "VUID-VkSubpassBeginInfoKHR-pNext-pNext");
-
- skip |= validate_ranged_enum("vkCmdBeginRenderPass2KHR", "pSubpassBeginInfo->contents", "VkSubpassContents", AllVkSubpassContentsEnums, pSubpassBeginInfo->contents, "VUID-VkSubpassBeginInfoKHR-contents-parameter");
- }
- return skip;
-}
-
-bool StatelessValidation::PreCallValidateCmdNextSubpass2KHR(
- VkCommandBuffer commandBuffer,
- const VkSubpassBeginInfoKHR* pSubpassBeginInfo,
- const VkSubpassEndInfoKHR* pSubpassEndInfo) {
- bool skip = false;
- if (!device_extensions.vk_khr_maintenance2) skip |= OutputExtensionError("vkCmdNextSubpass2KHR", VK_KHR_MAINTENANCE2_EXTENSION_NAME);
- if (!device_extensions.vk_khr_multiview) skip |= OutputExtensionError("vkCmdNextSubpass2KHR", VK_KHR_MULTIVIEW_EXTENSION_NAME);
- if (!device_extensions.vk_khr_create_renderpass_2) skip |= OutputExtensionError("vkCmdNextSubpass2KHR", VK_KHR_CREATE_RENDERPASS_2_EXTENSION_NAME);
- skip |= validate_struct_type("vkCmdNextSubpass2KHR", "pSubpassBeginInfo", "VK_STRUCTURE_TYPE_SUBPASS_BEGIN_INFO_KHR", pSubpassBeginInfo, VK_STRUCTURE_TYPE_SUBPASS_BEGIN_INFO_KHR, true, "VUID-vkCmdNextSubpass2KHR-pSubpassBeginInfo-parameter", "VUID-VkSubpassBeginInfoKHR-sType-sType");
- if (pSubpassBeginInfo != NULL)
- {
- skip |= validate_struct_pnext("vkCmdNextSubpass2KHR", "pSubpassBeginInfo->pNext", NULL, pSubpassBeginInfo->pNext, 0, NULL, GeneratedVulkanHeaderVersion, "VUID-VkSubpassBeginInfoKHR-pNext-pNext");
-
- skip |= validate_ranged_enum("vkCmdNextSubpass2KHR", "pSubpassBeginInfo->contents", "VkSubpassContents", AllVkSubpassContentsEnums, pSubpassBeginInfo->contents, "VUID-VkSubpassBeginInfoKHR-contents-parameter");
- }
- skip |= validate_struct_type("vkCmdNextSubpass2KHR", "pSubpassEndInfo", "VK_STRUCTURE_TYPE_SUBPASS_END_INFO_KHR", pSubpassEndInfo, VK_STRUCTURE_TYPE_SUBPASS_END_INFO_KHR, true, "VUID-vkCmdNextSubpass2KHR-pSubpassEndInfo-parameter", "VUID-VkSubpassEndInfoKHR-sType-sType");
- if (pSubpassEndInfo != NULL)
- {
- skip |= validate_struct_pnext("vkCmdNextSubpass2KHR", "pSubpassEndInfo->pNext", NULL, pSubpassEndInfo->pNext, 0, NULL, GeneratedVulkanHeaderVersion, "VUID-VkSubpassEndInfoKHR-pNext-pNext");
- }
- return skip;
-}
-
-bool StatelessValidation::PreCallValidateCmdEndRenderPass2KHR(
- VkCommandBuffer commandBuffer,
- const VkSubpassEndInfoKHR* pSubpassEndInfo) {
- bool skip = false;
- if (!device_extensions.vk_khr_maintenance2) skip |= OutputExtensionError("vkCmdEndRenderPass2KHR", VK_KHR_MAINTENANCE2_EXTENSION_NAME);
- if (!device_extensions.vk_khr_multiview) skip |= OutputExtensionError("vkCmdEndRenderPass2KHR", VK_KHR_MULTIVIEW_EXTENSION_NAME);
- if (!device_extensions.vk_khr_create_renderpass_2) skip |= OutputExtensionError("vkCmdEndRenderPass2KHR", VK_KHR_CREATE_RENDERPASS_2_EXTENSION_NAME);
- skip |= validate_struct_type("vkCmdEndRenderPass2KHR", "pSubpassEndInfo", "VK_STRUCTURE_TYPE_SUBPASS_END_INFO_KHR", pSubpassEndInfo, VK_STRUCTURE_TYPE_SUBPASS_END_INFO_KHR, true, "VUID-vkCmdEndRenderPass2KHR-pSubpassEndInfo-parameter", "VUID-VkSubpassEndInfoKHR-sType-sType");
- if (pSubpassEndInfo != NULL)
- {
- skip |= validate_struct_pnext("vkCmdEndRenderPass2KHR", "pSubpassEndInfo->pNext", NULL, pSubpassEndInfo->pNext, 0, NULL, GeneratedVulkanHeaderVersion, "VUID-VkSubpassEndInfoKHR-pNext-pNext");
- }
- return skip;
-}
-
-
-
-bool StatelessValidation::PreCallValidateGetSwapchainStatusKHR(
- VkDevice device,
- VkSwapchainKHR swapchain) {
- bool skip = false;
- if (!device_extensions.vk_khr_get_surface_capabilities_2) skip |= OutputExtensionError("vkGetSwapchainStatusKHR", VK_KHR_GET_SURFACE_CAPABILITIES_2_EXTENSION_NAME);
- if (!device_extensions.vk_khr_get_physical_device_properties_2) skip |= OutputExtensionError("vkGetSwapchainStatusKHR", VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME);
- if (!device_extensions.vk_khr_swapchain) skip |= OutputExtensionError("vkGetSwapchainStatusKHR", VK_KHR_SWAPCHAIN_EXTENSION_NAME);
- if (!device_extensions.vk_khr_shared_presentable_image) skip |= OutputExtensionError("vkGetSwapchainStatusKHR", VK_KHR_SHARED_PRESENTABLE_IMAGE_EXTENSION_NAME);
- skip |= validate_required_handle("vkGetSwapchainStatusKHR", "swapchain", swapchain);
- return skip;
-}
-
-
-
-bool StatelessValidation::PreCallValidateGetPhysicalDeviceExternalFencePropertiesKHR(
- VkPhysicalDevice physicalDevice,
- const VkPhysicalDeviceExternalFenceInfo* pExternalFenceInfo,
- VkExternalFenceProperties* pExternalFenceProperties) {
- bool skip = false;
- if (!instance_extensions.vk_khr_get_physical_device_properties_2) skip |= OutputExtensionError("vkGetPhysicalDeviceExternalFencePropertiesKHR", VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME);
- if (!instance_extensions.vk_khr_external_fence_capabilities) skip |= OutputExtensionError("vkGetPhysicalDeviceExternalFencePropertiesKHR", VK_KHR_EXTERNAL_FENCE_CAPABILITIES_EXTENSION_NAME);
- skip |= validate_struct_type("vkGetPhysicalDeviceExternalFencePropertiesKHR", "pExternalFenceInfo", "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_FENCE_INFO", pExternalFenceInfo, VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_FENCE_INFO, true, "VUID-vkGetPhysicalDeviceExternalFenceProperties-pExternalFenceInfo-parameter", "VUID-VkPhysicalDeviceExternalFenceInfo-sType-sType");
- if (pExternalFenceInfo != NULL)
- {
- skip |= validate_struct_pnext("vkGetPhysicalDeviceExternalFencePropertiesKHR", "pExternalFenceInfo->pNext", NULL, pExternalFenceInfo->pNext, 0, NULL, GeneratedVulkanHeaderVersion, "VUID-VkPhysicalDeviceExternalFenceInfo-pNext-pNext");
-
- skip |= validate_flags("vkGetPhysicalDeviceExternalFencePropertiesKHR", "pExternalFenceInfo->handleType", "VkExternalFenceHandleTypeFlagBits", AllVkExternalFenceHandleTypeFlagBits, pExternalFenceInfo->handleType, kRequiredSingleBit, "VUID-VkPhysicalDeviceExternalFenceInfo-handleType-parameter", "VUID-VkPhysicalDeviceExternalFenceInfo-handleType-parameter");
- }
- skip |= validate_struct_type("vkGetPhysicalDeviceExternalFencePropertiesKHR", "pExternalFenceProperties", "VK_STRUCTURE_TYPE_EXTERNAL_FENCE_PROPERTIES", pExternalFenceProperties, VK_STRUCTURE_TYPE_EXTERNAL_FENCE_PROPERTIES, true, "VUID-vkGetPhysicalDeviceExternalFenceProperties-pExternalFenceProperties-parameter", "VUID-VkExternalFenceProperties-sType-sType");
- if (pExternalFenceProperties != NULL)
- {
- // No xml-driven validation
- }
- return skip;
-}
-
-
-
-
-
-#ifdef VK_USE_PLATFORM_WIN32_KHR
-
-bool StatelessValidation::PreCallValidateImportFenceWin32HandleKHR(
- VkDevice device,
- const VkImportFenceWin32HandleInfoKHR* pImportFenceWin32HandleInfo) {
- bool skip = false;
- if (!device_extensions.vk_khr_external_fence) skip |= OutputExtensionError("vkImportFenceWin32HandleKHR", VK_KHR_EXTERNAL_FENCE_EXTENSION_NAME);
- if (!device_extensions.vk_khr_external_fence_win32) skip |= OutputExtensionError("vkImportFenceWin32HandleKHR", VK_KHR_EXTERNAL_FENCE_WIN32_EXTENSION_NAME);
- skip |= validate_struct_type("vkImportFenceWin32HandleKHR", "pImportFenceWin32HandleInfo", "VK_STRUCTURE_TYPE_IMPORT_FENCE_WIN32_HANDLE_INFO_KHR", pImportFenceWin32HandleInfo, VK_STRUCTURE_TYPE_IMPORT_FENCE_WIN32_HANDLE_INFO_KHR, true, "VUID-vkImportFenceWin32HandleKHR-pImportFenceWin32HandleInfo-parameter", "VUID-VkImportFenceWin32HandleInfoKHR-sType-sType");
- if (pImportFenceWin32HandleInfo != NULL)
- {
- skip |= validate_struct_pnext("vkImportFenceWin32HandleKHR", "pImportFenceWin32HandleInfo->pNext", NULL, pImportFenceWin32HandleInfo->pNext, 0, NULL, GeneratedVulkanHeaderVersion, "VUID-VkImportFenceWin32HandleInfoKHR-pNext-pNext");
-
- skip |= validate_required_handle("vkImportFenceWin32HandleKHR", "pImportFenceWin32HandleInfo->fence", pImportFenceWin32HandleInfo->fence);
-
- skip |= validate_flags("vkImportFenceWin32HandleKHR", "pImportFenceWin32HandleInfo->flags", "VkFenceImportFlagBits", AllVkFenceImportFlagBits, pImportFenceWin32HandleInfo->flags, kOptionalFlags, "VUID-VkImportFenceWin32HandleInfoKHR-flags-parameter");
-
- skip |= validate_flags("vkImportFenceWin32HandleKHR", "pImportFenceWin32HandleInfo->handleType", "VkExternalFenceHandleTypeFlagBits", AllVkExternalFenceHandleTypeFlagBits, pImportFenceWin32HandleInfo->handleType, kOptionalSingleBit, "VUID-VkImportFenceWin32HandleInfoKHR-handleType-parameter");
- }
- return skip;
-}
-
-bool StatelessValidation::PreCallValidateGetFenceWin32HandleKHR(
- VkDevice device,
- const VkFenceGetWin32HandleInfoKHR* pGetWin32HandleInfo,
- HANDLE* pHandle) {
- bool skip = false;
- if (!device_extensions.vk_khr_external_fence) skip |= OutputExtensionError("vkGetFenceWin32HandleKHR", VK_KHR_EXTERNAL_FENCE_EXTENSION_NAME);
- if (!device_extensions.vk_khr_external_fence_win32) skip |= OutputExtensionError("vkGetFenceWin32HandleKHR", VK_KHR_EXTERNAL_FENCE_WIN32_EXTENSION_NAME);
- skip |= validate_struct_type("vkGetFenceWin32HandleKHR", "pGetWin32HandleInfo", "VK_STRUCTURE_TYPE_FENCE_GET_WIN32_HANDLE_INFO_KHR", pGetWin32HandleInfo, VK_STRUCTURE_TYPE_FENCE_GET_WIN32_HANDLE_INFO_KHR, true, "VUID-vkGetFenceWin32HandleKHR-pGetWin32HandleInfo-parameter", "VUID-VkFenceGetWin32HandleInfoKHR-sType-sType");
- if (pGetWin32HandleInfo != NULL)
- {
- skip |= validate_struct_pnext("vkGetFenceWin32HandleKHR", "pGetWin32HandleInfo->pNext", NULL, pGetWin32HandleInfo->pNext, 0, NULL, GeneratedVulkanHeaderVersion, "VUID-VkFenceGetWin32HandleInfoKHR-pNext-pNext");
-
- skip |= validate_required_handle("vkGetFenceWin32HandleKHR", "pGetWin32HandleInfo->fence", pGetWin32HandleInfo->fence);
-
- skip |= validate_flags("vkGetFenceWin32HandleKHR", "pGetWin32HandleInfo->handleType", "VkExternalFenceHandleTypeFlagBits", AllVkExternalFenceHandleTypeFlagBits, pGetWin32HandleInfo->handleType, kRequiredSingleBit, "VUID-VkFenceGetWin32HandleInfoKHR-handleType-parameter", "VUID-VkFenceGetWin32HandleInfoKHR-handleType-parameter");
- }
- skip |= validate_required_pointer("vkGetFenceWin32HandleKHR", "pHandle", pHandle, "VUID-vkGetFenceWin32HandleKHR-pHandle-parameter");
- return skip;
-}
-
-#endif // VK_USE_PLATFORM_WIN32_KHR
-
-bool StatelessValidation::PreCallValidateImportFenceFdKHR(
- VkDevice device,
- const VkImportFenceFdInfoKHR* pImportFenceFdInfo) {
- bool skip = false;
- if (!device_extensions.vk_khr_external_fence) skip |= OutputExtensionError("vkImportFenceFdKHR", VK_KHR_EXTERNAL_FENCE_EXTENSION_NAME);
- if (!device_extensions.vk_khr_external_fence_fd) skip |= OutputExtensionError("vkImportFenceFdKHR", VK_KHR_EXTERNAL_FENCE_FD_EXTENSION_NAME);
- skip |= validate_struct_type("vkImportFenceFdKHR", "pImportFenceFdInfo", "VK_STRUCTURE_TYPE_IMPORT_FENCE_FD_INFO_KHR", pImportFenceFdInfo, VK_STRUCTURE_TYPE_IMPORT_FENCE_FD_INFO_KHR, true, "VUID-vkImportFenceFdKHR-pImportFenceFdInfo-parameter", "VUID-VkImportFenceFdInfoKHR-sType-sType");
- if (pImportFenceFdInfo != NULL)
- {
- skip |= validate_struct_pnext("vkImportFenceFdKHR", "pImportFenceFdInfo->pNext", NULL, pImportFenceFdInfo->pNext, 0, NULL, GeneratedVulkanHeaderVersion, "VUID-VkImportFenceFdInfoKHR-pNext-pNext");
-
- skip |= validate_required_handle("vkImportFenceFdKHR", "pImportFenceFdInfo->fence", pImportFenceFdInfo->fence);
-
- skip |= validate_flags("vkImportFenceFdKHR", "pImportFenceFdInfo->flags", "VkFenceImportFlagBits", AllVkFenceImportFlagBits, pImportFenceFdInfo->flags, kOptionalFlags, "VUID-VkImportFenceFdInfoKHR-flags-parameter");
-
- skip |= validate_flags("vkImportFenceFdKHR", "pImportFenceFdInfo->handleType", "VkExternalFenceHandleTypeFlagBits", AllVkExternalFenceHandleTypeFlagBits, pImportFenceFdInfo->handleType, kRequiredSingleBit, "VUID-VkImportFenceFdInfoKHR-handleType-parameter", "VUID-VkImportFenceFdInfoKHR-handleType-parameter");
- }
- return skip;
-}
-
-bool StatelessValidation::PreCallValidateGetFenceFdKHR(
- VkDevice device,
- const VkFenceGetFdInfoKHR* pGetFdInfo,
- int* pFd) {
- bool skip = false;
- if (!device_extensions.vk_khr_external_fence) skip |= OutputExtensionError("vkGetFenceFdKHR", VK_KHR_EXTERNAL_FENCE_EXTENSION_NAME);
- if (!device_extensions.vk_khr_external_fence_fd) skip |= OutputExtensionError("vkGetFenceFdKHR", VK_KHR_EXTERNAL_FENCE_FD_EXTENSION_NAME);
- skip |= validate_struct_type("vkGetFenceFdKHR", "pGetFdInfo", "VK_STRUCTURE_TYPE_FENCE_GET_FD_INFO_KHR", pGetFdInfo, VK_STRUCTURE_TYPE_FENCE_GET_FD_INFO_KHR, true, "VUID-vkGetFenceFdKHR-pGetFdInfo-parameter", "VUID-VkFenceGetFdInfoKHR-sType-sType");
- if (pGetFdInfo != NULL)
- {
- skip |= validate_struct_pnext("vkGetFenceFdKHR", "pGetFdInfo->pNext", NULL, pGetFdInfo->pNext, 0, NULL, GeneratedVulkanHeaderVersion, "VUID-VkFenceGetFdInfoKHR-pNext-pNext");
-
- skip |= validate_required_handle("vkGetFenceFdKHR", "pGetFdInfo->fence", pGetFdInfo->fence);
-
- skip |= validate_flags("vkGetFenceFdKHR", "pGetFdInfo->handleType", "VkExternalFenceHandleTypeFlagBits", AllVkExternalFenceHandleTypeFlagBits, pGetFdInfo->handleType, kRequiredSingleBit, "VUID-VkFenceGetFdInfoKHR-handleType-parameter", "VUID-VkFenceGetFdInfoKHR-handleType-parameter");
- }
- skip |= validate_required_pointer("vkGetFenceFdKHR", "pFd", pFd, "VUID-vkGetFenceFdKHR-pFd-parameter");
- return skip;
-}
-
-
-
-
-
-bool StatelessValidation::PreCallValidateGetPhysicalDeviceSurfaceCapabilities2KHR(
- VkPhysicalDevice physicalDevice,
- const VkPhysicalDeviceSurfaceInfo2KHR* pSurfaceInfo,
- VkSurfaceCapabilities2KHR* pSurfaceCapabilities) {
- bool skip = false;
- if (!instance_extensions.vk_khr_surface) skip |= OutputExtensionError("vkGetPhysicalDeviceSurfaceCapabilities2KHR", VK_KHR_SURFACE_EXTENSION_NAME);
- if (!instance_extensions.vk_khr_get_surface_capabilities_2) skip |= OutputExtensionError("vkGetPhysicalDeviceSurfaceCapabilities2KHR", VK_KHR_GET_SURFACE_CAPABILITIES_2_EXTENSION_NAME);
- skip |= validate_struct_type("vkGetPhysicalDeviceSurfaceCapabilities2KHR", "pSurfaceInfo", "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SURFACE_INFO_2_KHR", pSurfaceInfo, VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SURFACE_INFO_2_KHR, true, "VUID-vkGetPhysicalDeviceSurfaceCapabilities2KHR-pSurfaceInfo-parameter", "VUID-VkPhysicalDeviceSurfaceInfo2KHR-sType-sType");
- if (pSurfaceInfo != NULL)
- {
- const VkStructureType allowed_structs_VkPhysicalDeviceSurfaceInfo2KHR[] = { VK_STRUCTURE_TYPE_SURFACE_FULL_SCREEN_EXCLUSIVE_INFO_EXT, VK_STRUCTURE_TYPE_SURFACE_FULL_SCREEN_EXCLUSIVE_WIN32_INFO_EXT };
-
- skip |= validate_struct_pnext("vkGetPhysicalDeviceSurfaceCapabilities2KHR", "pSurfaceInfo->pNext", "VkSurfaceFullScreenExclusiveInfoEXT, VkSurfaceFullScreenExclusiveWin32InfoEXT", pSurfaceInfo->pNext, ARRAY_SIZE(allowed_structs_VkPhysicalDeviceSurfaceInfo2KHR), allowed_structs_VkPhysicalDeviceSurfaceInfo2KHR, GeneratedVulkanHeaderVersion, "VUID-VkPhysicalDeviceSurfaceInfo2KHR-pNext-pNext");
-
- skip |= validate_required_handle("vkGetPhysicalDeviceSurfaceCapabilities2KHR", "pSurfaceInfo->surface", pSurfaceInfo->surface);
- }
- skip |= validate_struct_type("vkGetPhysicalDeviceSurfaceCapabilities2KHR", "pSurfaceCapabilities", "VK_STRUCTURE_TYPE_SURFACE_CAPABILITIES_2_KHR", pSurfaceCapabilities, VK_STRUCTURE_TYPE_SURFACE_CAPABILITIES_2_KHR, true, "VUID-vkGetPhysicalDeviceSurfaceCapabilities2KHR-pSurfaceCapabilities-parameter", "VUID-VkSurfaceCapabilities2KHR-sType-sType");
- if (pSurfaceCapabilities != NULL)
- {
- // No xml-driven validation
- }
- return skip;
-}
-
-bool StatelessValidation::PreCallValidateGetPhysicalDeviceSurfaceFormats2KHR(
- VkPhysicalDevice physicalDevice,
- const VkPhysicalDeviceSurfaceInfo2KHR* pSurfaceInfo,
- uint32_t* pSurfaceFormatCount,
- VkSurfaceFormat2KHR* pSurfaceFormats) {
- bool skip = false;
- if (!instance_extensions.vk_khr_surface) skip |= OutputExtensionError("vkGetPhysicalDeviceSurfaceFormats2KHR", VK_KHR_SURFACE_EXTENSION_NAME);
- if (!instance_extensions.vk_khr_get_surface_capabilities_2) skip |= OutputExtensionError("vkGetPhysicalDeviceSurfaceFormats2KHR", VK_KHR_GET_SURFACE_CAPABILITIES_2_EXTENSION_NAME);
- skip |= validate_struct_type("vkGetPhysicalDeviceSurfaceFormats2KHR", "pSurfaceInfo", "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SURFACE_INFO_2_KHR", pSurfaceInfo, VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SURFACE_INFO_2_KHR, true, "VUID-vkGetPhysicalDeviceSurfaceFormats2KHR-pSurfaceInfo-parameter", "VUID-VkPhysicalDeviceSurfaceInfo2KHR-sType-sType");
- if (pSurfaceInfo != NULL)
- {
- const VkStructureType allowed_structs_VkPhysicalDeviceSurfaceInfo2KHR[] = { VK_STRUCTURE_TYPE_SURFACE_FULL_SCREEN_EXCLUSIVE_INFO_EXT, VK_STRUCTURE_TYPE_SURFACE_FULL_SCREEN_EXCLUSIVE_WIN32_INFO_EXT };
-
- skip |= validate_struct_pnext("vkGetPhysicalDeviceSurfaceFormats2KHR", "pSurfaceInfo->pNext", "VkSurfaceFullScreenExclusiveInfoEXT, VkSurfaceFullScreenExclusiveWin32InfoEXT", pSurfaceInfo->pNext, ARRAY_SIZE(allowed_structs_VkPhysicalDeviceSurfaceInfo2KHR), allowed_structs_VkPhysicalDeviceSurfaceInfo2KHR, GeneratedVulkanHeaderVersion, "VUID-VkPhysicalDeviceSurfaceInfo2KHR-pNext-pNext");
-
- skip |= validate_required_handle("vkGetPhysicalDeviceSurfaceFormats2KHR", "pSurfaceInfo->surface", pSurfaceInfo->surface);
- }
- skip |= validate_struct_type_array("vkGetPhysicalDeviceSurfaceFormats2KHR", "pSurfaceFormatCount", "pSurfaceFormats", "VK_STRUCTURE_TYPE_SURFACE_FORMAT_2_KHR", pSurfaceFormatCount, pSurfaceFormats, VK_STRUCTURE_TYPE_SURFACE_FORMAT_2_KHR, true, false, false, "VUID-VkSurfaceFormat2KHR-sType-sType", "VUID-vkGetPhysicalDeviceSurfaceFormats2KHR-pSurfaceFormats-parameter", kVUIDUndefined);
- if (pSurfaceFormats != NULL)
- {
- for (uint32_t pSurfaceFormatIndex = 0; pSurfaceFormatIndex < *pSurfaceFormatCount; ++pSurfaceFormatIndex)
- {
- // No xml-driven validation
- }
- }
- return skip;
-}
-
-
-
-
-
-bool StatelessValidation::PreCallValidateGetPhysicalDeviceDisplayProperties2KHR(
- VkPhysicalDevice physicalDevice,
- uint32_t* pPropertyCount,
- VkDisplayProperties2KHR* pProperties) {
- bool skip = false;
- if (!instance_extensions.vk_khr_display) skip |= OutputExtensionError("vkGetPhysicalDeviceDisplayProperties2KHR", VK_KHR_DISPLAY_EXTENSION_NAME);
- if (!instance_extensions.vk_khr_get_display_properties_2) skip |= OutputExtensionError("vkGetPhysicalDeviceDisplayProperties2KHR", VK_KHR_GET_DISPLAY_PROPERTIES_2_EXTENSION_NAME);
- skip |= validate_struct_type_array("vkGetPhysicalDeviceDisplayProperties2KHR", "pPropertyCount", "pProperties", "VK_STRUCTURE_TYPE_DISPLAY_PROPERTIES_2_KHR", pPropertyCount, pProperties, VK_STRUCTURE_TYPE_DISPLAY_PROPERTIES_2_KHR, true, false, false, "VUID-VkDisplayProperties2KHR-sType-sType", "VUID-vkGetPhysicalDeviceDisplayProperties2KHR-pProperties-parameter", kVUIDUndefined);
- if (pProperties != NULL)
- {
- for (uint32_t pPropertyIndex = 0; pPropertyIndex < *pPropertyCount; ++pPropertyIndex)
- {
- // No xml-driven validation
- }
- }
- return skip;
-}
-
-bool StatelessValidation::PreCallValidateGetPhysicalDeviceDisplayPlaneProperties2KHR(
- VkPhysicalDevice physicalDevice,
- uint32_t* pPropertyCount,
- VkDisplayPlaneProperties2KHR* pProperties) {
- bool skip = false;
- if (!instance_extensions.vk_khr_display) skip |= OutputExtensionError("vkGetPhysicalDeviceDisplayPlaneProperties2KHR", VK_KHR_DISPLAY_EXTENSION_NAME);
- if (!instance_extensions.vk_khr_get_display_properties_2) skip |= OutputExtensionError("vkGetPhysicalDeviceDisplayPlaneProperties2KHR", VK_KHR_GET_DISPLAY_PROPERTIES_2_EXTENSION_NAME);
- skip |= validate_struct_type_array("vkGetPhysicalDeviceDisplayPlaneProperties2KHR", "pPropertyCount", "pProperties", "VK_STRUCTURE_TYPE_DISPLAY_PLANE_PROPERTIES_2_KHR", pPropertyCount, pProperties, VK_STRUCTURE_TYPE_DISPLAY_PLANE_PROPERTIES_2_KHR, true, false, false, "VUID-VkDisplayPlaneProperties2KHR-sType-sType", "VUID-vkGetPhysicalDeviceDisplayPlaneProperties2KHR-pProperties-parameter", kVUIDUndefined);
- if (pProperties != NULL)
- {
- for (uint32_t pPropertyIndex = 0; pPropertyIndex < *pPropertyCount; ++pPropertyIndex)
- {
- // No xml-driven validation
- }
- }
- return skip;
-}
-
-bool StatelessValidation::PreCallValidateGetDisplayModeProperties2KHR(
- VkPhysicalDevice physicalDevice,
- VkDisplayKHR display,
- uint32_t* pPropertyCount,
- VkDisplayModeProperties2KHR* pProperties) {
- bool skip = false;
- if (!instance_extensions.vk_khr_display) skip |= OutputExtensionError("vkGetDisplayModeProperties2KHR", VK_KHR_DISPLAY_EXTENSION_NAME);
- if (!instance_extensions.vk_khr_get_display_properties_2) skip |= OutputExtensionError("vkGetDisplayModeProperties2KHR", VK_KHR_GET_DISPLAY_PROPERTIES_2_EXTENSION_NAME);
- skip |= validate_required_handle("vkGetDisplayModeProperties2KHR", "display", display);
- skip |= validate_struct_type_array("vkGetDisplayModeProperties2KHR", "pPropertyCount", "pProperties", "VK_STRUCTURE_TYPE_DISPLAY_MODE_PROPERTIES_2_KHR", pPropertyCount, pProperties, VK_STRUCTURE_TYPE_DISPLAY_MODE_PROPERTIES_2_KHR, true, false, false, "VUID-VkDisplayModeProperties2KHR-sType-sType", "VUID-vkGetDisplayModeProperties2KHR-pProperties-parameter", kVUIDUndefined);
- if (pProperties != NULL)
- {
- for (uint32_t pPropertyIndex = 0; pPropertyIndex < *pPropertyCount; ++pPropertyIndex)
- {
- // No xml-driven validation
- }
- }
- return skip;
-}
-
-bool StatelessValidation::PreCallValidateGetDisplayPlaneCapabilities2KHR(
- VkPhysicalDevice physicalDevice,
- const VkDisplayPlaneInfo2KHR* pDisplayPlaneInfo,
- VkDisplayPlaneCapabilities2KHR* pCapabilities) {
- bool skip = false;
- if (!instance_extensions.vk_khr_display) skip |= OutputExtensionError("vkGetDisplayPlaneCapabilities2KHR", VK_KHR_DISPLAY_EXTENSION_NAME);
- if (!instance_extensions.vk_khr_get_display_properties_2) skip |= OutputExtensionError("vkGetDisplayPlaneCapabilities2KHR", VK_KHR_GET_DISPLAY_PROPERTIES_2_EXTENSION_NAME);
- skip |= validate_struct_type("vkGetDisplayPlaneCapabilities2KHR", "pDisplayPlaneInfo", "VK_STRUCTURE_TYPE_DISPLAY_PLANE_INFO_2_KHR", pDisplayPlaneInfo, VK_STRUCTURE_TYPE_DISPLAY_PLANE_INFO_2_KHR, true, "VUID-vkGetDisplayPlaneCapabilities2KHR-pDisplayPlaneInfo-parameter", "VUID-VkDisplayPlaneInfo2KHR-sType-sType");
- if (pDisplayPlaneInfo != NULL)
- {
- skip |= validate_struct_pnext("vkGetDisplayPlaneCapabilities2KHR", "pDisplayPlaneInfo->pNext", NULL, pDisplayPlaneInfo->pNext, 0, NULL, GeneratedVulkanHeaderVersion, "VUID-VkDisplayPlaneInfo2KHR-pNext-pNext");
-
- skip |= validate_required_handle("vkGetDisplayPlaneCapabilities2KHR", "pDisplayPlaneInfo->mode", pDisplayPlaneInfo->mode);
- }
- skip |= validate_struct_type("vkGetDisplayPlaneCapabilities2KHR", "pCapabilities", "VK_STRUCTURE_TYPE_DISPLAY_PLANE_CAPABILITIES_2_KHR", pCapabilities, VK_STRUCTURE_TYPE_DISPLAY_PLANE_CAPABILITIES_2_KHR, true, "VUID-vkGetDisplayPlaneCapabilities2KHR-pCapabilities-parameter", "VUID-VkDisplayPlaneCapabilities2KHR-sType-sType");
- if (pCapabilities != NULL)
- {
- // No xml-driven validation
- }
- return skip;
-}
-
-
-
-
-
-
-
-
-
-bool StatelessValidation::PreCallValidateGetImageMemoryRequirements2KHR(
- VkDevice device,
- const VkImageMemoryRequirementsInfo2* pInfo,
- VkMemoryRequirements2* pMemoryRequirements) {
- bool skip = false;
- if (!device_extensions.vk_khr_get_memory_requirements_2) skip |= OutputExtensionError("vkGetImageMemoryRequirements2KHR", VK_KHR_GET_MEMORY_REQUIREMENTS_2_EXTENSION_NAME);
- skip |= validate_struct_type("vkGetImageMemoryRequirements2KHR", "pInfo", "VK_STRUCTURE_TYPE_IMAGE_MEMORY_REQUIREMENTS_INFO_2", pInfo, VK_STRUCTURE_TYPE_IMAGE_MEMORY_REQUIREMENTS_INFO_2, true, "VUID-vkGetImageMemoryRequirements2-pInfo-parameter", "VUID-VkImageMemoryRequirementsInfo2-sType-sType");
- if (pInfo != NULL)
- {
- const VkStructureType allowed_structs_VkImageMemoryRequirementsInfo2[] = { VK_STRUCTURE_TYPE_IMAGE_PLANE_MEMORY_REQUIREMENTS_INFO };
-
- skip |= validate_struct_pnext("vkGetImageMemoryRequirements2KHR", "pInfo->pNext", "VkImagePlaneMemoryRequirementsInfo", pInfo->pNext, ARRAY_SIZE(allowed_structs_VkImageMemoryRequirementsInfo2), allowed_structs_VkImageMemoryRequirementsInfo2, GeneratedVulkanHeaderVersion, "VUID-VkImageMemoryRequirementsInfo2-pNext-pNext");
-
- skip |= validate_required_handle("vkGetImageMemoryRequirements2KHR", "pInfo->image", pInfo->image);
- }
- skip |= validate_struct_type("vkGetImageMemoryRequirements2KHR", "pMemoryRequirements", "VK_STRUCTURE_TYPE_MEMORY_REQUIREMENTS_2", pMemoryRequirements, VK_STRUCTURE_TYPE_MEMORY_REQUIREMENTS_2, true, "VUID-vkGetImageMemoryRequirements2-pMemoryRequirements-parameter", "VUID-VkMemoryRequirements2-sType-sType");
- if (pMemoryRequirements != NULL)
- {
- // No xml-driven validation
- }
- return skip;
-}
-
-bool StatelessValidation::PreCallValidateGetBufferMemoryRequirements2KHR(
- VkDevice device,
- const VkBufferMemoryRequirementsInfo2* pInfo,
- VkMemoryRequirements2* pMemoryRequirements) {
- bool skip = false;
- if (!device_extensions.vk_khr_get_memory_requirements_2) skip |= OutputExtensionError("vkGetBufferMemoryRequirements2KHR", VK_KHR_GET_MEMORY_REQUIREMENTS_2_EXTENSION_NAME);
- skip |= validate_struct_type("vkGetBufferMemoryRequirements2KHR", "pInfo", "VK_STRUCTURE_TYPE_BUFFER_MEMORY_REQUIREMENTS_INFO_2", pInfo, VK_STRUCTURE_TYPE_BUFFER_MEMORY_REQUIREMENTS_INFO_2, true, "VUID-vkGetBufferMemoryRequirements2-pInfo-parameter", "VUID-VkBufferMemoryRequirementsInfo2-sType-sType");
- if (pInfo != NULL)
- {
- skip |= validate_struct_pnext("vkGetBufferMemoryRequirements2KHR", "pInfo->pNext", NULL, pInfo->pNext, 0, NULL, GeneratedVulkanHeaderVersion, "VUID-VkBufferMemoryRequirementsInfo2-pNext-pNext");
-
- skip |= validate_required_handle("vkGetBufferMemoryRequirements2KHR", "pInfo->buffer", pInfo->buffer);
- }
- skip |= validate_struct_type("vkGetBufferMemoryRequirements2KHR", "pMemoryRequirements", "VK_STRUCTURE_TYPE_MEMORY_REQUIREMENTS_2", pMemoryRequirements, VK_STRUCTURE_TYPE_MEMORY_REQUIREMENTS_2, true, "VUID-vkGetBufferMemoryRequirements2-pMemoryRequirements-parameter", "VUID-VkMemoryRequirements2-sType-sType");
- if (pMemoryRequirements != NULL)
- {
- // No xml-driven validation
- }
- return skip;
-}
-
-bool StatelessValidation::PreCallValidateGetImageSparseMemoryRequirements2KHR(
- VkDevice device,
- const VkImageSparseMemoryRequirementsInfo2* pInfo,
- uint32_t* pSparseMemoryRequirementCount,
- VkSparseImageMemoryRequirements2* pSparseMemoryRequirements) {
- bool skip = false;
- if (!device_extensions.vk_khr_get_memory_requirements_2) skip |= OutputExtensionError("vkGetImageSparseMemoryRequirements2KHR", VK_KHR_GET_MEMORY_REQUIREMENTS_2_EXTENSION_NAME);
- skip |= validate_struct_type("vkGetImageSparseMemoryRequirements2KHR", "pInfo", "VK_STRUCTURE_TYPE_IMAGE_SPARSE_MEMORY_REQUIREMENTS_INFO_2", pInfo, VK_STRUCTURE_TYPE_IMAGE_SPARSE_MEMORY_REQUIREMENTS_INFO_2, true, "VUID-vkGetImageSparseMemoryRequirements2-pInfo-parameter", "VUID-VkImageSparseMemoryRequirementsInfo2-sType-sType");
- if (pInfo != NULL)
- {
- skip |= validate_struct_pnext("vkGetImageSparseMemoryRequirements2KHR", "pInfo->pNext", NULL, pInfo->pNext, 0, NULL, GeneratedVulkanHeaderVersion, "VUID-VkImageSparseMemoryRequirementsInfo2-pNext-pNext");
-
- skip |= validate_required_handle("vkGetImageSparseMemoryRequirements2KHR", "pInfo->image", pInfo->image);
- }
- skip |= validate_struct_type_array("vkGetImageSparseMemoryRequirements2KHR", "pSparseMemoryRequirementCount", "pSparseMemoryRequirements", "VK_STRUCTURE_TYPE_SPARSE_IMAGE_MEMORY_REQUIREMENTS_2", pSparseMemoryRequirementCount, pSparseMemoryRequirements, VK_STRUCTURE_TYPE_SPARSE_IMAGE_MEMORY_REQUIREMENTS_2, true, false, false, "VUID-VkSparseImageMemoryRequirements2-sType-sType", "VUID-vkGetImageSparseMemoryRequirements2-pSparseMemoryRequirements-parameter", kVUIDUndefined);
- if (pSparseMemoryRequirements != NULL)
- {
- for (uint32_t pSparseMemoryRequirementIndex = 0; pSparseMemoryRequirementIndex < *pSparseMemoryRequirementCount; ++pSparseMemoryRequirementIndex)
- {
- // No xml-driven validation
- }
- }
- return skip;
-}
-
-
-
-
-
-bool StatelessValidation::PreCallValidateCreateSamplerYcbcrConversionKHR(
- VkDevice device,
- const VkSamplerYcbcrConversionCreateInfo* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkSamplerYcbcrConversion* pYcbcrConversion) {
- bool skip = false;
- if (!device_extensions.vk_khr_get_physical_device_properties_2) skip |= OutputExtensionError("vkCreateSamplerYcbcrConversionKHR", VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME);
- if (!device_extensions.vk_khr_get_memory_requirements_2) skip |= OutputExtensionError("vkCreateSamplerYcbcrConversionKHR", VK_KHR_GET_MEMORY_REQUIREMENTS_2_EXTENSION_NAME);
- if (!device_extensions.vk_khr_bind_memory_2) skip |= OutputExtensionError("vkCreateSamplerYcbcrConversionKHR", VK_KHR_BIND_MEMORY_2_EXTENSION_NAME);
- if (!device_extensions.vk_khr_maintenance1) skip |= OutputExtensionError("vkCreateSamplerYcbcrConversionKHR", VK_KHR_MAINTENANCE1_EXTENSION_NAME);
- if (!device_extensions.vk_khr_sampler_ycbcr_conversion) skip |= OutputExtensionError("vkCreateSamplerYcbcrConversionKHR", VK_KHR_SAMPLER_YCBCR_CONVERSION_EXTENSION_NAME);
- skip |= validate_struct_type("vkCreateSamplerYcbcrConversionKHR", "pCreateInfo", "VK_STRUCTURE_TYPE_SAMPLER_YCBCR_CONVERSION_CREATE_INFO", pCreateInfo, VK_STRUCTURE_TYPE_SAMPLER_YCBCR_CONVERSION_CREATE_INFO, true, "VUID-vkCreateSamplerYcbcrConversion-pCreateInfo-parameter", "VUID-VkSamplerYcbcrConversionCreateInfo-sType-sType");
- if (pCreateInfo != NULL)
- {
- const VkStructureType allowed_structs_VkSamplerYcbcrConversionCreateInfo[] = { VK_STRUCTURE_TYPE_EXTERNAL_FORMAT_ANDROID };
-
- skip |= validate_struct_pnext("vkCreateSamplerYcbcrConversionKHR", "pCreateInfo->pNext", "VkExternalFormatANDROID", pCreateInfo->pNext, ARRAY_SIZE(allowed_structs_VkSamplerYcbcrConversionCreateInfo), allowed_structs_VkSamplerYcbcrConversionCreateInfo, GeneratedVulkanHeaderVersion, "VUID-VkSamplerYcbcrConversionCreateInfo-pNext-pNext");
-
- skip |= validate_ranged_enum("vkCreateSamplerYcbcrConversionKHR", "pCreateInfo->format", "VkFormat", AllVkFormatEnums, pCreateInfo->format, "VUID-VkSamplerYcbcrConversionCreateInfo-format-parameter");
-
- skip |= validate_ranged_enum("vkCreateSamplerYcbcrConversionKHR", "pCreateInfo->ycbcrModel", "VkSamplerYcbcrModelConversion", AllVkSamplerYcbcrModelConversionEnums, pCreateInfo->ycbcrModel, "VUID-VkSamplerYcbcrConversionCreateInfo-ycbcrModel-parameter");
-
- skip |= validate_ranged_enum("vkCreateSamplerYcbcrConversionKHR", "pCreateInfo->ycbcrRange", "VkSamplerYcbcrRange", AllVkSamplerYcbcrRangeEnums, pCreateInfo->ycbcrRange, "VUID-VkSamplerYcbcrConversionCreateInfo-ycbcrRange-parameter");
-
- skip |= validate_ranged_enum("vkCreateSamplerYcbcrConversionKHR", "pCreateInfo->components.r", "VkComponentSwizzle", AllVkComponentSwizzleEnums, pCreateInfo->components.r, "VUID-VkComponentMapping-r-parameter");
-
- skip |= validate_ranged_enum("vkCreateSamplerYcbcrConversionKHR", "pCreateInfo->components.g", "VkComponentSwizzle", AllVkComponentSwizzleEnums, pCreateInfo->components.g, "VUID-VkComponentMapping-g-parameter");
-
- skip |= validate_ranged_enum("vkCreateSamplerYcbcrConversionKHR", "pCreateInfo->components.b", "VkComponentSwizzle", AllVkComponentSwizzleEnums, pCreateInfo->components.b, "VUID-VkComponentMapping-b-parameter");
-
- skip |= validate_ranged_enum("vkCreateSamplerYcbcrConversionKHR", "pCreateInfo->components.a", "VkComponentSwizzle", AllVkComponentSwizzleEnums, pCreateInfo->components.a, "VUID-VkComponentMapping-a-parameter");
-
- skip |= validate_ranged_enum("vkCreateSamplerYcbcrConversionKHR", "pCreateInfo->xChromaOffset", "VkChromaLocation", AllVkChromaLocationEnums, pCreateInfo->xChromaOffset, "VUID-VkSamplerYcbcrConversionCreateInfo-xChromaOffset-parameter");
-
- skip |= validate_ranged_enum("vkCreateSamplerYcbcrConversionKHR", "pCreateInfo->yChromaOffset", "VkChromaLocation", AllVkChromaLocationEnums, pCreateInfo->yChromaOffset, "VUID-VkSamplerYcbcrConversionCreateInfo-yChromaOffset-parameter");
-
- skip |= validate_ranged_enum("vkCreateSamplerYcbcrConversionKHR", "pCreateInfo->chromaFilter", "VkFilter", AllVkFilterEnums, pCreateInfo->chromaFilter, "VUID-VkSamplerYcbcrConversionCreateInfo-chromaFilter-parameter");
-
- skip |= validate_bool32("vkCreateSamplerYcbcrConversionKHR", "pCreateInfo->forceExplicitReconstruction", pCreateInfo->forceExplicitReconstruction);
- }
- if (pAllocator != NULL)
- {
- skip |= validate_required_pointer("vkCreateSamplerYcbcrConversionKHR", "pAllocator->pfnAllocation", reinterpret_cast<const void*>(pAllocator->pfnAllocation), "VUID-VkAllocationCallbacks-pfnAllocation-00632");
-
- skip |= validate_required_pointer("vkCreateSamplerYcbcrConversionKHR", "pAllocator->pfnReallocation", reinterpret_cast<const void*>(pAllocator->pfnReallocation), "VUID-VkAllocationCallbacks-pfnReallocation-00633");
-
- skip |= validate_required_pointer("vkCreateSamplerYcbcrConversionKHR", "pAllocator->pfnFree", reinterpret_cast<const void*>(pAllocator->pfnFree), "VUID-VkAllocationCallbacks-pfnFree-00634");
-
- if (pAllocator->pfnInternalAllocation != NULL)
- {
- skip |= validate_required_pointer("vkCreateSamplerYcbcrConversionKHR", "pAllocator->pfnInternalFree", reinterpret_cast<const void*>(pAllocator->pfnInternalFree), "VUID-VkAllocationCallbacks-pfnInternalAllocation-00635");
-
- }
-
- if (pAllocator->pfnInternalFree != NULL)
- {
- skip |= validate_required_pointer("vkCreateSamplerYcbcrConversionKHR", "pAllocator->pfnInternalAllocation", reinterpret_cast<const void*>(pAllocator->pfnInternalAllocation), "VUID-VkAllocationCallbacks-pfnInternalAllocation-00635");
-
- }
- }
- skip |= validate_required_pointer("vkCreateSamplerYcbcrConversionKHR", "pYcbcrConversion", pYcbcrConversion, "VUID-vkCreateSamplerYcbcrConversion-pYcbcrConversion-parameter");
- return skip;
-}
-
-bool StatelessValidation::PreCallValidateDestroySamplerYcbcrConversionKHR(
- VkDevice device,
- VkSamplerYcbcrConversion ycbcrConversion,
- const VkAllocationCallbacks* pAllocator) {
- bool skip = false;
- if (!device_extensions.vk_khr_get_physical_device_properties_2) skip |= OutputExtensionError("vkDestroySamplerYcbcrConversionKHR", VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME);
- if (!device_extensions.vk_khr_get_memory_requirements_2) skip |= OutputExtensionError("vkDestroySamplerYcbcrConversionKHR", VK_KHR_GET_MEMORY_REQUIREMENTS_2_EXTENSION_NAME);
- if (!device_extensions.vk_khr_bind_memory_2) skip |= OutputExtensionError("vkDestroySamplerYcbcrConversionKHR", VK_KHR_BIND_MEMORY_2_EXTENSION_NAME);
- if (!device_extensions.vk_khr_maintenance1) skip |= OutputExtensionError("vkDestroySamplerYcbcrConversionKHR", VK_KHR_MAINTENANCE1_EXTENSION_NAME);
- if (!device_extensions.vk_khr_sampler_ycbcr_conversion) skip |= OutputExtensionError("vkDestroySamplerYcbcrConversionKHR", VK_KHR_SAMPLER_YCBCR_CONVERSION_EXTENSION_NAME);
- if (pAllocator != NULL)
- {
- skip |= validate_required_pointer("vkDestroySamplerYcbcrConversionKHR", "pAllocator->pfnAllocation", reinterpret_cast<const void*>(pAllocator->pfnAllocation), "VUID-VkAllocationCallbacks-pfnAllocation-00632");
-
- skip |= validate_required_pointer("vkDestroySamplerYcbcrConversionKHR", "pAllocator->pfnReallocation", reinterpret_cast<const void*>(pAllocator->pfnReallocation), "VUID-VkAllocationCallbacks-pfnReallocation-00633");
-
- skip |= validate_required_pointer("vkDestroySamplerYcbcrConversionKHR", "pAllocator->pfnFree", reinterpret_cast<const void*>(pAllocator->pfnFree), "VUID-VkAllocationCallbacks-pfnFree-00634");
-
- if (pAllocator->pfnInternalAllocation != NULL)
- {
- skip |= validate_required_pointer("vkDestroySamplerYcbcrConversionKHR", "pAllocator->pfnInternalFree", reinterpret_cast<const void*>(pAllocator->pfnInternalFree), "VUID-VkAllocationCallbacks-pfnInternalAllocation-00635");
-
- }
-
- if (pAllocator->pfnInternalFree != NULL)
- {
- skip |= validate_required_pointer("vkDestroySamplerYcbcrConversionKHR", "pAllocator->pfnInternalAllocation", reinterpret_cast<const void*>(pAllocator->pfnInternalAllocation), "VUID-VkAllocationCallbacks-pfnInternalAllocation-00635");
-
- }
- }
- return skip;
-}
-
-
-
-bool StatelessValidation::PreCallValidateBindBufferMemory2KHR(
- VkDevice device,
- uint32_t bindInfoCount,
- const VkBindBufferMemoryInfo* pBindInfos) {
- bool skip = false;
- if (!device_extensions.vk_khr_bind_memory_2) skip |= OutputExtensionError("vkBindBufferMemory2KHR", VK_KHR_BIND_MEMORY_2_EXTENSION_NAME);
- skip |= validate_struct_type_array("vkBindBufferMemory2KHR", "bindInfoCount", "pBindInfos", "VK_STRUCTURE_TYPE_BIND_BUFFER_MEMORY_INFO", bindInfoCount, pBindInfos, VK_STRUCTURE_TYPE_BIND_BUFFER_MEMORY_INFO, true, true, "VUID-VkBindBufferMemoryInfo-sType-sType", "VUID-vkBindBufferMemory2-pBindInfos-parameter", "VUID-vkBindBufferMemory2-bindInfoCount-arraylength");
- if (pBindInfos != NULL)
- {
- for (uint32_t bindInfoIndex = 0; bindInfoIndex < bindInfoCount; ++bindInfoIndex)
- {
- const VkStructureType allowed_structs_VkBindBufferMemoryInfo[] = { VK_STRUCTURE_TYPE_BIND_BUFFER_MEMORY_DEVICE_GROUP_INFO };
-
- skip |= validate_struct_pnext("vkBindBufferMemory2KHR", ParameterName("pBindInfos[%i].pNext", ParameterName::IndexVector{ bindInfoIndex }), "VkBindBufferMemoryDeviceGroupInfo", pBindInfos[bindInfoIndex].pNext, ARRAY_SIZE(allowed_structs_VkBindBufferMemoryInfo), allowed_structs_VkBindBufferMemoryInfo, GeneratedVulkanHeaderVersion, "VUID-VkBindBufferMemoryInfo-pNext-pNext");
-
- skip |= validate_required_handle("vkBindBufferMemory2KHR", ParameterName("pBindInfos[%i].buffer", ParameterName::IndexVector{ bindInfoIndex }), pBindInfos[bindInfoIndex].buffer);
-
- skip |= validate_required_handle("vkBindBufferMemory2KHR", ParameterName("pBindInfos[%i].memory", ParameterName::IndexVector{ bindInfoIndex }), pBindInfos[bindInfoIndex].memory);
- }
- }
- return skip;
-}
-
-bool StatelessValidation::PreCallValidateBindImageMemory2KHR(
- VkDevice device,
- uint32_t bindInfoCount,
- const VkBindImageMemoryInfo* pBindInfos) {
- bool skip = false;
- if (!device_extensions.vk_khr_bind_memory_2) skip |= OutputExtensionError("vkBindImageMemory2KHR", VK_KHR_BIND_MEMORY_2_EXTENSION_NAME);
- skip |= validate_struct_type_array("vkBindImageMemory2KHR", "bindInfoCount", "pBindInfos", "VK_STRUCTURE_TYPE_BIND_IMAGE_MEMORY_INFO", bindInfoCount, pBindInfos, VK_STRUCTURE_TYPE_BIND_IMAGE_MEMORY_INFO, true, true, "VUID-VkBindImageMemoryInfo-sType-sType", "VUID-vkBindImageMemory2-pBindInfos-parameter", "VUID-vkBindImageMemory2-bindInfoCount-arraylength");
- if (pBindInfos != NULL)
- {
- for (uint32_t bindInfoIndex = 0; bindInfoIndex < bindInfoCount; ++bindInfoIndex)
- {
- const VkStructureType allowed_structs_VkBindImageMemoryInfo[] = { VK_STRUCTURE_TYPE_BIND_IMAGE_MEMORY_DEVICE_GROUP_INFO, VK_STRUCTURE_TYPE_BIND_IMAGE_MEMORY_SWAPCHAIN_INFO_KHR, VK_STRUCTURE_TYPE_BIND_IMAGE_PLANE_MEMORY_INFO };
-
- skip |= validate_struct_pnext("vkBindImageMemory2KHR", ParameterName("pBindInfos[%i].pNext", ParameterName::IndexVector{ bindInfoIndex }), "VkBindImageMemoryDeviceGroupInfo, VkBindImageMemorySwapchainInfoKHR, VkBindImagePlaneMemoryInfo", pBindInfos[bindInfoIndex].pNext, ARRAY_SIZE(allowed_structs_VkBindImageMemoryInfo), allowed_structs_VkBindImageMemoryInfo, GeneratedVulkanHeaderVersion, "VUID-VkBindImageMemoryInfo-pNext-pNext");
-
- skip |= validate_required_handle("vkBindImageMemory2KHR", ParameterName("pBindInfos[%i].image", ParameterName::IndexVector{ bindInfoIndex }), pBindInfos[bindInfoIndex].image);
- }
- }
- return skip;
-}
-
-
-
-bool StatelessValidation::PreCallValidateGetDescriptorSetLayoutSupportKHR(
- VkDevice device,
- const VkDescriptorSetLayoutCreateInfo* pCreateInfo,
- VkDescriptorSetLayoutSupport* pSupport) {
- bool skip = false;
- if (!device_extensions.vk_khr_get_physical_device_properties_2) skip |= OutputExtensionError("vkGetDescriptorSetLayoutSupportKHR", VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME);
- if (!device_extensions.vk_khr_maintenance3) skip |= OutputExtensionError("vkGetDescriptorSetLayoutSupportKHR", VK_KHR_MAINTENANCE3_EXTENSION_NAME);
- skip |= validate_struct_type("vkGetDescriptorSetLayoutSupportKHR", "pCreateInfo", "VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_CREATE_INFO", pCreateInfo, VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_CREATE_INFO, true, "VUID-vkGetDescriptorSetLayoutSupport-pCreateInfo-parameter", "VUID-VkDescriptorSetLayoutCreateInfo-sType-sType");
- if (pCreateInfo != NULL)
- {
- const VkStructureType allowed_structs_VkDescriptorSetLayoutCreateInfo[] = { VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_BINDING_FLAGS_CREATE_INFO_EXT };
-
- skip |= validate_struct_pnext("vkGetDescriptorSetLayoutSupportKHR", "pCreateInfo->pNext", "VkDescriptorSetLayoutBindingFlagsCreateInfoEXT", pCreateInfo->pNext, ARRAY_SIZE(allowed_structs_VkDescriptorSetLayoutCreateInfo), allowed_structs_VkDescriptorSetLayoutCreateInfo, GeneratedVulkanHeaderVersion, "VUID-VkDescriptorSetLayoutCreateInfo-pNext-pNext");
-
- skip |= validate_flags("vkGetDescriptorSetLayoutSupportKHR", "pCreateInfo->flags", "VkDescriptorSetLayoutCreateFlagBits", AllVkDescriptorSetLayoutCreateFlagBits, pCreateInfo->flags, kOptionalFlags, "VUID-VkDescriptorSetLayoutCreateInfo-flags-parameter");
-
- skip |= validate_array("vkGetDescriptorSetLayoutSupportKHR", "pCreateInfo->bindingCount", "pCreateInfo->pBindings", pCreateInfo->bindingCount, &pCreateInfo->pBindings, false, true, kVUIDUndefined, "VUID-VkDescriptorSetLayoutCreateInfo-pBindings-parameter");
-
- if (pCreateInfo->pBindings != NULL)
- {
- for (uint32_t bindingIndex = 0; bindingIndex < pCreateInfo->bindingCount; ++bindingIndex)
- {
- skip |= validate_ranged_enum("vkGetDescriptorSetLayoutSupportKHR", ParameterName("pCreateInfo->pBindings[%i].descriptorType", ParameterName::IndexVector{ bindingIndex }), "VkDescriptorType", AllVkDescriptorTypeEnums, pCreateInfo->pBindings[bindingIndex].descriptorType, "VUID-VkDescriptorSetLayoutBinding-descriptorType-parameter");
- }
- }
- }
- skip |= validate_struct_type("vkGetDescriptorSetLayoutSupportKHR", "pSupport", "VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_SUPPORT", pSupport, VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_SUPPORT, true, "VUID-vkGetDescriptorSetLayoutSupport-pSupport-parameter", "VUID-VkDescriptorSetLayoutSupport-sType-sType");
- if (pSupport != NULL)
- {
- // No xml-driven validation
- }
- return skip;
-}
-
-
-
-bool StatelessValidation::PreCallValidateCmdDrawIndirectCountKHR(
- VkCommandBuffer commandBuffer,
- VkBuffer buffer,
- VkDeviceSize offset,
- VkBuffer countBuffer,
- VkDeviceSize countBufferOffset,
- uint32_t maxDrawCount,
- uint32_t stride) {
- bool skip = false;
- if (!device_extensions.vk_khr_draw_indirect_count) skip |= OutputExtensionError("vkCmdDrawIndirectCountKHR", VK_KHR_DRAW_INDIRECT_COUNT_EXTENSION_NAME);
- skip |= validate_required_handle("vkCmdDrawIndirectCountKHR", "buffer", buffer);
- skip |= validate_required_handle("vkCmdDrawIndirectCountKHR", "countBuffer", countBuffer);
- return skip;
-}
-
-bool StatelessValidation::PreCallValidateCmdDrawIndexedIndirectCountKHR(
- VkCommandBuffer commandBuffer,
- VkBuffer buffer,
- VkDeviceSize offset,
- VkBuffer countBuffer,
- VkDeviceSize countBufferOffset,
- uint32_t maxDrawCount,
- uint32_t stride) {
- bool skip = false;
- if (!device_extensions.vk_khr_draw_indirect_count) skip |= OutputExtensionError("vkCmdDrawIndexedIndirectCountKHR", VK_KHR_DRAW_INDIRECT_COUNT_EXTENSION_NAME);
- skip |= validate_required_handle("vkCmdDrawIndexedIndirectCountKHR", "buffer", buffer);
- skip |= validate_required_handle("vkCmdDrawIndexedIndirectCountKHR", "countBuffer", countBuffer);
- return skip;
-}
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-bool StatelessValidation::PreCallValidateGetPipelineExecutablePropertiesKHR(
- VkDevice device,
- const VkPipelineInfoKHR* pPipelineInfo,
- uint32_t* pExecutableCount,
- VkPipelineExecutablePropertiesKHR* pProperties) {
- bool skip = false;
- if (!device_extensions.vk_khr_pipeline_executable_properties) skip |= OutputExtensionError("vkGetPipelineExecutablePropertiesKHR", VK_KHR_PIPELINE_EXECUTABLE_PROPERTIES_EXTENSION_NAME);
- skip |= validate_struct_type("vkGetPipelineExecutablePropertiesKHR", "pPipelineInfo", "VK_STRUCTURE_TYPE_PIPELINE_INFO_KHR", pPipelineInfo, VK_STRUCTURE_TYPE_PIPELINE_INFO_KHR, true, "VUID-vkGetPipelineExecutablePropertiesKHR-pPipelineInfo-parameter", "VUID-VkPipelineInfoKHR-sType-sType");
- if (pPipelineInfo != NULL)
- {
- skip |= validate_struct_pnext("vkGetPipelineExecutablePropertiesKHR", "pPipelineInfo->pNext", NULL, pPipelineInfo->pNext, 0, NULL, GeneratedVulkanHeaderVersion, "VUID-VkPipelineInfoKHR-pNext-pNext");
-
- skip |= validate_required_handle("vkGetPipelineExecutablePropertiesKHR", "pPipelineInfo->pipeline", pPipelineInfo->pipeline);
- }
- skip |= validate_struct_type_array("vkGetPipelineExecutablePropertiesKHR", "pExecutableCount", "pProperties", "VK_STRUCTURE_TYPE_PIPELINE_EXECUTABLE_PROPERTIES_KHR", pExecutableCount, pProperties, VK_STRUCTURE_TYPE_PIPELINE_EXECUTABLE_PROPERTIES_KHR, true, false, false, "VUID-VkPipelineExecutablePropertiesKHR-sType-sType", "VUID-vkGetPipelineExecutablePropertiesKHR-pProperties-parameter", kVUIDUndefined);
- if (pProperties != NULL)
- {
- for (uint32_t pExecutableIndex = 0; pExecutableIndex < *pExecutableCount; ++pExecutableIndex)
- {
- // No xml-driven validation
- }
- }
- return skip;
-}
-
-bool StatelessValidation::PreCallValidateGetPipelineExecutableStatisticsKHR(
- VkDevice device,
- const VkPipelineExecutableInfoKHR* pExecutableInfo,
- uint32_t* pStatisticCount,
- VkPipelineExecutableStatisticKHR* pStatistics) {
- bool skip = false;
- if (!device_extensions.vk_khr_pipeline_executable_properties) skip |= OutputExtensionError("vkGetPipelineExecutableStatisticsKHR", VK_KHR_PIPELINE_EXECUTABLE_PROPERTIES_EXTENSION_NAME);
- skip |= validate_struct_type("vkGetPipelineExecutableStatisticsKHR", "pExecutableInfo", "VK_STRUCTURE_TYPE_PIPELINE_EXECUTABLE_INFO_KHR", pExecutableInfo, VK_STRUCTURE_TYPE_PIPELINE_EXECUTABLE_INFO_KHR, true, "VUID-vkGetPipelineExecutableStatisticsKHR-pExecutableInfo-parameter", "VUID-VkPipelineExecutableInfoKHR-sType-sType");
- if (pExecutableInfo != NULL)
- {
- skip |= validate_struct_pnext("vkGetPipelineExecutableStatisticsKHR", "pExecutableInfo->pNext", NULL, pExecutableInfo->pNext, 0, NULL, GeneratedVulkanHeaderVersion, "VUID-VkPipelineExecutableInfoKHR-pNext-pNext");
-
- skip |= validate_required_handle("vkGetPipelineExecutableStatisticsKHR", "pExecutableInfo->pipeline", pExecutableInfo->pipeline);
- }
- skip |= validate_struct_type_array("vkGetPipelineExecutableStatisticsKHR", "pStatisticCount", "pStatistics", "VK_STRUCTURE_TYPE_PIPELINE_EXECUTABLE_STATISTIC_KHR", pStatisticCount, pStatistics, VK_STRUCTURE_TYPE_PIPELINE_EXECUTABLE_STATISTIC_KHR, true, false, false, "VUID-VkPipelineExecutableStatisticKHR-sType-sType", "VUID-vkGetPipelineExecutableStatisticsKHR-pStatistics-parameter", kVUIDUndefined);
- if (pStatistics != NULL)
- {
- for (uint32_t pStatisticIndex = 0; pStatisticIndex < *pStatisticCount; ++pStatisticIndex)
- {
- // No xml-driven validation
- }
- }
- return skip;
-}
-
-bool StatelessValidation::PreCallValidateGetPipelineExecutableInternalRepresentationsKHR(
- VkDevice device,
- const VkPipelineExecutableInfoKHR* pExecutableInfo,
- uint32_t* pInternalRepresentationCount,
- VkPipelineExecutableInternalRepresentationKHR* pInternalRepresentations) {
- bool skip = false;
- if (!device_extensions.vk_khr_pipeline_executable_properties) skip |= OutputExtensionError("vkGetPipelineExecutableInternalRepresentationsKHR", VK_KHR_PIPELINE_EXECUTABLE_PROPERTIES_EXTENSION_NAME);
- skip |= validate_struct_type("vkGetPipelineExecutableInternalRepresentationsKHR", "pExecutableInfo", "VK_STRUCTURE_TYPE_PIPELINE_EXECUTABLE_INFO_KHR", pExecutableInfo, VK_STRUCTURE_TYPE_PIPELINE_EXECUTABLE_INFO_KHR, true, "VUID-vkGetPipelineExecutableInternalRepresentationsKHR-pExecutableInfo-parameter", "VUID-VkPipelineExecutableInfoKHR-sType-sType");
- if (pExecutableInfo != NULL)
- {
- skip |= validate_struct_pnext("vkGetPipelineExecutableInternalRepresentationsKHR", "pExecutableInfo->pNext", NULL, pExecutableInfo->pNext, 0, NULL, GeneratedVulkanHeaderVersion, "VUID-VkPipelineExecutableInfoKHR-pNext-pNext");
-
- skip |= validate_required_handle("vkGetPipelineExecutableInternalRepresentationsKHR", "pExecutableInfo->pipeline", pExecutableInfo->pipeline);
- }
- skip |= validate_struct_type_array("vkGetPipelineExecutableInternalRepresentationsKHR", "pInternalRepresentationCount", "pInternalRepresentations", "VK_STRUCTURE_TYPE_PIPELINE_EXECUTABLE_INTERNAL_REPRESENTATION_KHR", pInternalRepresentationCount, pInternalRepresentations, VK_STRUCTURE_TYPE_PIPELINE_EXECUTABLE_INTERNAL_REPRESENTATION_KHR, true, false, false, "VUID-VkPipelineExecutableInternalRepresentationKHR-sType-sType", "VUID-vkGetPipelineExecutableInternalRepresentationsKHR-pInternalRepresentations-parameter", kVUIDUndefined);
- return skip;
-}
-
-
-
-bool StatelessValidation::PreCallValidateCreateDebugReportCallbackEXT(
- VkInstance instance,
- const VkDebugReportCallbackCreateInfoEXT* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkDebugReportCallbackEXT* pCallback) {
- bool skip = false;
- if (!instance_extensions.vk_ext_debug_report) skip |= OutputExtensionError("vkCreateDebugReportCallbackEXT", VK_EXT_DEBUG_REPORT_EXTENSION_NAME);
- skip |= validate_struct_type("vkCreateDebugReportCallbackEXT", "pCreateInfo", "VK_STRUCTURE_TYPE_DEBUG_REPORT_CALLBACK_CREATE_INFO_EXT", pCreateInfo, VK_STRUCTURE_TYPE_DEBUG_REPORT_CALLBACK_CREATE_INFO_EXT, true, "VUID-vkCreateDebugReportCallbackEXT-pCreateInfo-parameter", "VUID-VkDebugReportCallbackCreateInfoEXT-sType-sType");
- if (pCreateInfo != NULL)
- {
- skip |= validate_struct_pnext("vkCreateDebugReportCallbackEXT", "pCreateInfo->pNext", NULL, pCreateInfo->pNext, 0, NULL, GeneratedVulkanHeaderVersion, kVUIDUndefined);
-
- skip |= validate_flags("vkCreateDebugReportCallbackEXT", "pCreateInfo->flags", "VkDebugReportFlagBitsEXT", AllVkDebugReportFlagBitsEXT, pCreateInfo->flags, kOptionalFlags, "VUID-VkDebugReportCallbackCreateInfoEXT-flags-parameter");
-
- skip |= validate_required_pointer("vkCreateDebugReportCallbackEXT", "pCreateInfo->pfnCallback", reinterpret_cast<const void*>(pCreateInfo->pfnCallback), "VUID-VkDebugReportCallbackCreateInfoEXT-pfnCallback-parameter");
- }
- if (pAllocator != NULL)
- {
- skip |= validate_required_pointer("vkCreateDebugReportCallbackEXT", "pAllocator->pfnAllocation", reinterpret_cast<const void*>(pAllocator->pfnAllocation), "VUID-VkAllocationCallbacks-pfnAllocation-00632");
-
- skip |= validate_required_pointer("vkCreateDebugReportCallbackEXT", "pAllocator->pfnReallocation", reinterpret_cast<const void*>(pAllocator->pfnReallocation), "VUID-VkAllocationCallbacks-pfnReallocation-00633");
-
- skip |= validate_required_pointer("vkCreateDebugReportCallbackEXT", "pAllocator->pfnFree", reinterpret_cast<const void*>(pAllocator->pfnFree), "VUID-VkAllocationCallbacks-pfnFree-00634");
-
- if (pAllocator->pfnInternalAllocation != NULL)
- {
- skip |= validate_required_pointer("vkCreateDebugReportCallbackEXT", "pAllocator->pfnInternalFree", reinterpret_cast<const void*>(pAllocator->pfnInternalFree), "VUID-VkAllocationCallbacks-pfnInternalAllocation-00635");
-
- }
-
- if (pAllocator->pfnInternalFree != NULL)
- {
- skip |= validate_required_pointer("vkCreateDebugReportCallbackEXT", "pAllocator->pfnInternalAllocation", reinterpret_cast<const void*>(pAllocator->pfnInternalAllocation), "VUID-VkAllocationCallbacks-pfnInternalAllocation-00635");
-
- }
- }
- skip |= validate_required_pointer("vkCreateDebugReportCallbackEXT", "pCallback", pCallback, "VUID-vkCreateDebugReportCallbackEXT-pCallback-parameter");
- return skip;
-}
-
-bool StatelessValidation::PreCallValidateDestroyDebugReportCallbackEXT(
- VkInstance instance,
- VkDebugReportCallbackEXT callback,
- const VkAllocationCallbacks* pAllocator) {
- bool skip = false;
- if (!instance_extensions.vk_ext_debug_report) skip |= OutputExtensionError("vkDestroyDebugReportCallbackEXT", VK_EXT_DEBUG_REPORT_EXTENSION_NAME);
- skip |= validate_required_handle("vkDestroyDebugReportCallbackEXT", "callback", callback);
- if (pAllocator != NULL)
- {
- skip |= validate_required_pointer("vkDestroyDebugReportCallbackEXT", "pAllocator->pfnAllocation", reinterpret_cast<const void*>(pAllocator->pfnAllocation), "VUID-VkAllocationCallbacks-pfnAllocation-00632");
-
- skip |= validate_required_pointer("vkDestroyDebugReportCallbackEXT", "pAllocator->pfnReallocation", reinterpret_cast<const void*>(pAllocator->pfnReallocation), "VUID-VkAllocationCallbacks-pfnReallocation-00633");
-
- skip |= validate_required_pointer("vkDestroyDebugReportCallbackEXT", "pAllocator->pfnFree", reinterpret_cast<const void*>(pAllocator->pfnFree), "VUID-VkAllocationCallbacks-pfnFree-00634");
-
- if (pAllocator->pfnInternalAllocation != NULL)
- {
- skip |= validate_required_pointer("vkDestroyDebugReportCallbackEXT", "pAllocator->pfnInternalFree", reinterpret_cast<const void*>(pAllocator->pfnInternalFree), "VUID-VkAllocationCallbacks-pfnInternalAllocation-00635");
-
- }
-
- if (pAllocator->pfnInternalFree != NULL)
- {
- skip |= validate_required_pointer("vkDestroyDebugReportCallbackEXT", "pAllocator->pfnInternalAllocation", reinterpret_cast<const void*>(pAllocator->pfnInternalAllocation), "VUID-VkAllocationCallbacks-pfnInternalAllocation-00635");
-
- }
- }
- return skip;
-}
-
-bool StatelessValidation::PreCallValidateDebugReportMessageEXT(
- VkInstance instance,
- VkDebugReportFlagsEXT flags,
- VkDebugReportObjectTypeEXT objectType,
- uint64_t object,
- size_t location,
- int32_t messageCode,
- const char* pLayerPrefix,
- const char* pMessage) {
- bool skip = false;
- if (!instance_extensions.vk_ext_debug_report) skip |= OutputExtensionError("vkDebugReportMessageEXT", VK_EXT_DEBUG_REPORT_EXTENSION_NAME);
- skip |= validate_flags("vkDebugReportMessageEXT", "flags", "VkDebugReportFlagBitsEXT", AllVkDebugReportFlagBitsEXT, flags, kRequiredFlags, "VUID-vkDebugReportMessageEXT-flags-parameter", "VUID-vkDebugReportMessageEXT-flags-requiredbitmask");
- skip |= validate_ranged_enum("vkDebugReportMessageEXT", "objectType", "VkDebugReportObjectTypeEXT", AllVkDebugReportObjectTypeEXTEnums, objectType, "VUID-vkDebugReportMessageEXT-objectType-parameter");
- skip |= validate_required_pointer("vkDebugReportMessageEXT", "pLayerPrefix", pLayerPrefix, "VUID-vkDebugReportMessageEXT-pLayerPrefix-parameter");
- skip |= validate_required_pointer("vkDebugReportMessageEXT", "pMessage", pMessage, "VUID-vkDebugReportMessageEXT-pMessage-parameter");
- return skip;
-}
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-bool StatelessValidation::PreCallValidateDebugMarkerSetObjectTagEXT(
- VkDevice device,
- const VkDebugMarkerObjectTagInfoEXT* pTagInfo) {
- bool skip = false;
- if (!device_extensions.vk_ext_debug_report) skip |= OutputExtensionError("vkDebugMarkerSetObjectTagEXT", VK_EXT_DEBUG_REPORT_EXTENSION_NAME);
- if (!device_extensions.vk_ext_debug_marker) skip |= OutputExtensionError("vkDebugMarkerSetObjectTagEXT", VK_EXT_DEBUG_MARKER_EXTENSION_NAME);
- skip |= validate_struct_type("vkDebugMarkerSetObjectTagEXT", "pTagInfo", "VK_STRUCTURE_TYPE_DEBUG_MARKER_OBJECT_TAG_INFO_EXT", pTagInfo, VK_STRUCTURE_TYPE_DEBUG_MARKER_OBJECT_TAG_INFO_EXT, true, "VUID-vkDebugMarkerSetObjectTagEXT-pTagInfo-parameter", "VUID-VkDebugMarkerObjectTagInfoEXT-sType-sType");
- if (pTagInfo != NULL)
- {
- skip |= validate_struct_pnext("vkDebugMarkerSetObjectTagEXT", "pTagInfo->pNext", NULL, pTagInfo->pNext, 0, NULL, GeneratedVulkanHeaderVersion, "VUID-VkDebugMarkerObjectTagInfoEXT-pNext-pNext");
-
- skip |= validate_ranged_enum("vkDebugMarkerSetObjectTagEXT", "pTagInfo->objectType", "VkDebugReportObjectTypeEXT", AllVkDebugReportObjectTypeEXTEnums, pTagInfo->objectType, "VUID-VkDebugMarkerObjectTagInfoEXT-objectType-parameter");
-
- skip |= validate_array("vkDebugMarkerSetObjectTagEXT", "pTagInfo->tagSize", "pTagInfo->pTag", pTagInfo->tagSize, &pTagInfo->pTag, true, true, "VUID-VkDebugMarkerObjectTagInfoEXT-tagSize-arraylength", "VUID-VkDebugMarkerObjectTagInfoEXT-pTag-parameter");
- }
- return skip;
-}
-
-bool StatelessValidation::PreCallValidateDebugMarkerSetObjectNameEXT(
- VkDevice device,
- const VkDebugMarkerObjectNameInfoEXT* pNameInfo) {
- bool skip = false;
- if (!device_extensions.vk_ext_debug_report) skip |= OutputExtensionError("vkDebugMarkerSetObjectNameEXT", VK_EXT_DEBUG_REPORT_EXTENSION_NAME);
- if (!device_extensions.vk_ext_debug_marker) skip |= OutputExtensionError("vkDebugMarkerSetObjectNameEXT", VK_EXT_DEBUG_MARKER_EXTENSION_NAME);
- skip |= validate_struct_type("vkDebugMarkerSetObjectNameEXT", "pNameInfo", "VK_STRUCTURE_TYPE_DEBUG_MARKER_OBJECT_NAME_INFO_EXT", pNameInfo, VK_STRUCTURE_TYPE_DEBUG_MARKER_OBJECT_NAME_INFO_EXT, true, "VUID-vkDebugMarkerSetObjectNameEXT-pNameInfo-parameter", "VUID-VkDebugMarkerObjectNameInfoEXT-sType-sType");
- if (pNameInfo != NULL)
- {
- skip |= validate_struct_pnext("vkDebugMarkerSetObjectNameEXT", "pNameInfo->pNext", NULL, pNameInfo->pNext, 0, NULL, GeneratedVulkanHeaderVersion, "VUID-VkDebugMarkerObjectNameInfoEXT-pNext-pNext");
-
- skip |= validate_ranged_enum("vkDebugMarkerSetObjectNameEXT", "pNameInfo->objectType", "VkDebugReportObjectTypeEXT", AllVkDebugReportObjectTypeEXTEnums, pNameInfo->objectType, "VUID-VkDebugMarkerObjectNameInfoEXT-objectType-parameter");
-
- skip |= validate_required_pointer("vkDebugMarkerSetObjectNameEXT", "pNameInfo->pObjectName", pNameInfo->pObjectName, "VUID-VkDebugMarkerObjectNameInfoEXT-pObjectName-parameter");
- }
- return skip;
-}
-
-bool StatelessValidation::PreCallValidateCmdDebugMarkerBeginEXT(
- VkCommandBuffer commandBuffer,
- const VkDebugMarkerMarkerInfoEXT* pMarkerInfo) {
- bool skip = false;
- if (!device_extensions.vk_ext_debug_report) skip |= OutputExtensionError("vkCmdDebugMarkerBeginEXT", VK_EXT_DEBUG_REPORT_EXTENSION_NAME);
- if (!device_extensions.vk_ext_debug_marker) skip |= OutputExtensionError("vkCmdDebugMarkerBeginEXT", VK_EXT_DEBUG_MARKER_EXTENSION_NAME);
- skip |= validate_struct_type("vkCmdDebugMarkerBeginEXT", "pMarkerInfo", "VK_STRUCTURE_TYPE_DEBUG_MARKER_MARKER_INFO_EXT", pMarkerInfo, VK_STRUCTURE_TYPE_DEBUG_MARKER_MARKER_INFO_EXT, true, "VUID-vkCmdDebugMarkerBeginEXT-pMarkerInfo-parameter", "VUID-VkDebugMarkerMarkerInfoEXT-sType-sType");
- if (pMarkerInfo != NULL)
- {
- skip |= validate_struct_pnext("vkCmdDebugMarkerBeginEXT", "pMarkerInfo->pNext", NULL, pMarkerInfo->pNext, 0, NULL, GeneratedVulkanHeaderVersion, "VUID-VkDebugMarkerMarkerInfoEXT-pNext-pNext");
-
- skip |= validate_required_pointer("vkCmdDebugMarkerBeginEXT", "pMarkerInfo->pMarkerName", pMarkerInfo->pMarkerName, "VUID-VkDebugMarkerMarkerInfoEXT-pMarkerName-parameter");
- }
- return skip;
-}
-
-bool StatelessValidation::PreCallValidateCmdDebugMarkerEndEXT(
- VkCommandBuffer commandBuffer) {
- bool skip = false;
- if (!device_extensions.vk_ext_debug_report) skip |= OutputExtensionError("vkCmdDebugMarkerEndEXT", VK_EXT_DEBUG_REPORT_EXTENSION_NAME);
- if (!device_extensions.vk_ext_debug_marker) skip |= OutputExtensionError("vkCmdDebugMarkerEndEXT", VK_EXT_DEBUG_MARKER_EXTENSION_NAME);
- // No xml-driven validation
- return skip;
-}
-
-bool StatelessValidation::PreCallValidateCmdDebugMarkerInsertEXT(
- VkCommandBuffer commandBuffer,
- const VkDebugMarkerMarkerInfoEXT* pMarkerInfo) {
- bool skip = false;
- if (!device_extensions.vk_ext_debug_report) skip |= OutputExtensionError("vkCmdDebugMarkerInsertEXT", VK_EXT_DEBUG_REPORT_EXTENSION_NAME);
- if (!device_extensions.vk_ext_debug_marker) skip |= OutputExtensionError("vkCmdDebugMarkerInsertEXT", VK_EXT_DEBUG_MARKER_EXTENSION_NAME);
- skip |= validate_struct_type("vkCmdDebugMarkerInsertEXT", "pMarkerInfo", "VK_STRUCTURE_TYPE_DEBUG_MARKER_MARKER_INFO_EXT", pMarkerInfo, VK_STRUCTURE_TYPE_DEBUG_MARKER_MARKER_INFO_EXT, true, "VUID-vkCmdDebugMarkerInsertEXT-pMarkerInfo-parameter", "VUID-VkDebugMarkerMarkerInfoEXT-sType-sType");
- if (pMarkerInfo != NULL)
- {
- skip |= validate_struct_pnext("vkCmdDebugMarkerInsertEXT", "pMarkerInfo->pNext", NULL, pMarkerInfo->pNext, 0, NULL, GeneratedVulkanHeaderVersion, "VUID-VkDebugMarkerMarkerInfoEXT-pNext-pNext");
-
- skip |= validate_required_pointer("vkCmdDebugMarkerInsertEXT", "pMarkerInfo->pMarkerName", pMarkerInfo->pMarkerName, "VUID-VkDebugMarkerMarkerInfoEXT-pMarkerName-parameter");
- }
- return skip;
-}
-
-
-
-
-
-
-
-bool StatelessValidation::PreCallValidateCmdBindTransformFeedbackBuffersEXT(
- VkCommandBuffer commandBuffer,
- uint32_t firstBinding,
- uint32_t bindingCount,
- const VkBuffer* pBuffers,
- const VkDeviceSize* pOffsets,
- const VkDeviceSize* pSizes) {
- bool skip = false;
- if (!device_extensions.vk_khr_get_physical_device_properties_2) skip |= OutputExtensionError("vkCmdBindTransformFeedbackBuffersEXT", VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME);
- if (!device_extensions.vk_ext_transform_feedback) skip |= OutputExtensionError("vkCmdBindTransformFeedbackBuffersEXT", VK_EXT_TRANSFORM_FEEDBACK_EXTENSION_NAME);
- skip |= validate_handle_array("vkCmdBindTransformFeedbackBuffersEXT", "bindingCount", "pBuffers", bindingCount, pBuffers, true, true);
- skip |= validate_array("vkCmdBindTransformFeedbackBuffersEXT", "bindingCount", "pOffsets", bindingCount, &pOffsets, true, true, "VUID-vkCmdBindTransformFeedbackBuffersEXT-bindingCount-arraylength", "VUID-vkCmdBindTransformFeedbackBuffersEXT-pOffsets-parameter");
- skip |= validate_array("vkCmdBindTransformFeedbackBuffersEXT", "bindingCount", "pSizes", bindingCount, &pSizes, true, false, "VUID-vkCmdBindTransformFeedbackBuffersEXT-bindingCount-arraylength", "VUID-vkCmdBindTransformFeedbackBuffersEXT-pSizes-parameter");
- return skip;
-}
-
-bool StatelessValidation::PreCallValidateCmdBeginTransformFeedbackEXT(
- VkCommandBuffer commandBuffer,
- uint32_t firstCounterBuffer,
- uint32_t counterBufferCount,
- const VkBuffer* pCounterBuffers,
- const VkDeviceSize* pCounterBufferOffsets) {
- bool skip = false;
- if (!device_extensions.vk_khr_get_physical_device_properties_2) skip |= OutputExtensionError("vkCmdBeginTransformFeedbackEXT", VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME);
- if (!device_extensions.vk_ext_transform_feedback) skip |= OutputExtensionError("vkCmdBeginTransformFeedbackEXT", VK_EXT_TRANSFORM_FEEDBACK_EXTENSION_NAME);
- // No xml-driven validation
- return skip;
-}
-
-bool StatelessValidation::PreCallValidateCmdEndTransformFeedbackEXT(
- VkCommandBuffer commandBuffer,
- uint32_t firstCounterBuffer,
- uint32_t counterBufferCount,
- const VkBuffer* pCounterBuffers,
- const VkDeviceSize* pCounterBufferOffsets) {
- bool skip = false;
- if (!device_extensions.vk_khr_get_physical_device_properties_2) skip |= OutputExtensionError("vkCmdEndTransformFeedbackEXT", VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME);
- if (!device_extensions.vk_ext_transform_feedback) skip |= OutputExtensionError("vkCmdEndTransformFeedbackEXT", VK_EXT_TRANSFORM_FEEDBACK_EXTENSION_NAME);
- // No xml-driven validation
- return skip;
-}
-
-bool StatelessValidation::PreCallValidateCmdBeginQueryIndexedEXT(
- VkCommandBuffer commandBuffer,
- VkQueryPool queryPool,
- uint32_t query,
- VkQueryControlFlags flags,
- uint32_t index) {
- bool skip = false;
- if (!device_extensions.vk_khr_get_physical_device_properties_2) skip |= OutputExtensionError("vkCmdBeginQueryIndexedEXT", VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME);
- if (!device_extensions.vk_ext_transform_feedback) skip |= OutputExtensionError("vkCmdBeginQueryIndexedEXT", VK_EXT_TRANSFORM_FEEDBACK_EXTENSION_NAME);
- skip |= validate_required_handle("vkCmdBeginQueryIndexedEXT", "queryPool", queryPool);
- skip |= validate_flags("vkCmdBeginQueryIndexedEXT", "flags", "VkQueryControlFlagBits", AllVkQueryControlFlagBits, flags, kOptionalFlags, "VUID-vkCmdBeginQueryIndexedEXT-flags-parameter");
- return skip;
-}
-
-bool StatelessValidation::PreCallValidateCmdEndQueryIndexedEXT(
- VkCommandBuffer commandBuffer,
- VkQueryPool queryPool,
- uint32_t query,
- uint32_t index) {
- bool skip = false;
- if (!device_extensions.vk_khr_get_physical_device_properties_2) skip |= OutputExtensionError("vkCmdEndQueryIndexedEXT", VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME);
- if (!device_extensions.vk_ext_transform_feedback) skip |= OutputExtensionError("vkCmdEndQueryIndexedEXT", VK_EXT_TRANSFORM_FEEDBACK_EXTENSION_NAME);
- skip |= validate_required_handle("vkCmdEndQueryIndexedEXT", "queryPool", queryPool);
- return skip;
-}
-
-bool StatelessValidation::PreCallValidateCmdDrawIndirectByteCountEXT(
- VkCommandBuffer commandBuffer,
- uint32_t instanceCount,
- uint32_t firstInstance,
- VkBuffer counterBuffer,
- VkDeviceSize counterBufferOffset,
- uint32_t counterOffset,
- uint32_t vertexStride) {
- bool skip = false;
- if (!device_extensions.vk_khr_get_physical_device_properties_2) skip |= OutputExtensionError("vkCmdDrawIndirectByteCountEXT", VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME);
- if (!device_extensions.vk_ext_transform_feedback) skip |= OutputExtensionError("vkCmdDrawIndirectByteCountEXT", VK_EXT_TRANSFORM_FEEDBACK_EXTENSION_NAME);
- skip |= validate_required_handle("vkCmdDrawIndirectByteCountEXT", "counterBuffer", counterBuffer);
- return skip;
-}
-
-
-
-bool StatelessValidation::PreCallValidateGetImageViewHandleNVX(
- VkDevice device,
- const VkImageViewHandleInfoNVX* pInfo) {
- bool skip = false;
- if (!device_extensions.vk_nvx_image_view_handle) skip |= OutputExtensionError("vkGetImageViewHandleNVX", VK_NVX_IMAGE_VIEW_HANDLE_EXTENSION_NAME);
- skip |= validate_struct_type("vkGetImageViewHandleNVX", "pInfo", "VK_STRUCTURE_TYPE_IMAGE_VIEW_HANDLE_INFO_NVX", pInfo, VK_STRUCTURE_TYPE_IMAGE_VIEW_HANDLE_INFO_NVX, true, "VUID-vkGetImageViewHandleNVX-pInfo-parameter", "VUID-VkImageViewHandleInfoNVX-sType-sType");
- if (pInfo != NULL)
- {
- skip |= validate_struct_pnext("vkGetImageViewHandleNVX", "pInfo->pNext", NULL, pInfo->pNext, 0, NULL, GeneratedVulkanHeaderVersion, "VUID-VkImageViewHandleInfoNVX-pNext-pNext");
-
- skip |= validate_required_handle("vkGetImageViewHandleNVX", "pInfo->imageView", pInfo->imageView);
-
- skip |= validate_ranged_enum("vkGetImageViewHandleNVX", "pInfo->descriptorType", "VkDescriptorType", AllVkDescriptorTypeEnums, pInfo->descriptorType, "VUID-VkImageViewHandleInfoNVX-descriptorType-parameter");
- }
- return skip;
-}
-
-
-
-bool StatelessValidation::PreCallValidateCmdDrawIndirectCountAMD(
- VkCommandBuffer commandBuffer,
- VkBuffer buffer,
- VkDeviceSize offset,
- VkBuffer countBuffer,
- VkDeviceSize countBufferOffset,
- uint32_t maxDrawCount,
- uint32_t stride) {
- bool skip = false;
- if (!device_extensions.vk_amd_draw_indirect_count) skip |= OutputExtensionError("vkCmdDrawIndirectCountAMD", VK_AMD_DRAW_INDIRECT_COUNT_EXTENSION_NAME);
- skip |= validate_required_handle("vkCmdDrawIndirectCountAMD", "buffer", buffer);
- skip |= validate_required_handle("vkCmdDrawIndirectCountAMD", "countBuffer", countBuffer);
- return skip;
-}
-
-bool StatelessValidation::PreCallValidateCmdDrawIndexedIndirectCountAMD(
- VkCommandBuffer commandBuffer,
- VkBuffer buffer,
- VkDeviceSize offset,
- VkBuffer countBuffer,
- VkDeviceSize countBufferOffset,
- uint32_t maxDrawCount,
- uint32_t stride) {
- bool skip = false;
- if (!device_extensions.vk_amd_draw_indirect_count) skip |= OutputExtensionError("vkCmdDrawIndexedIndirectCountAMD", VK_AMD_DRAW_INDIRECT_COUNT_EXTENSION_NAME);
- skip |= validate_required_handle("vkCmdDrawIndexedIndirectCountAMD", "buffer", buffer);
- skip |= validate_required_handle("vkCmdDrawIndexedIndirectCountAMD", "countBuffer", countBuffer);
- return skip;
-}
-
-
-
-
-
-
-
-
-
-
-
-bool StatelessValidation::PreCallValidateGetShaderInfoAMD(
- VkDevice device,
- VkPipeline pipeline,
- VkShaderStageFlagBits shaderStage,
- VkShaderInfoTypeAMD infoType,
- size_t* pInfoSize,
- void* pInfo) {
- bool skip = false;
- if (!device_extensions.vk_amd_shader_info) skip |= OutputExtensionError("vkGetShaderInfoAMD", VK_AMD_SHADER_INFO_EXTENSION_NAME);
- skip |= validate_required_handle("vkGetShaderInfoAMD", "pipeline", pipeline);
- skip |= validate_flags("vkGetShaderInfoAMD", "shaderStage", "VkShaderStageFlagBits", AllVkShaderStageFlagBits, shaderStage, kRequiredSingleBit, "VUID-vkGetShaderInfoAMD-shaderStage-parameter", "VUID-vkGetShaderInfoAMD-shaderStage-parameter");
- skip |= validate_ranged_enum("vkGetShaderInfoAMD", "infoType", "VkShaderInfoTypeAMD", AllVkShaderInfoTypeAMDEnums, infoType, "VUID-vkGetShaderInfoAMD-infoType-parameter");
- skip |= validate_array("vkGetShaderInfoAMD", "pInfoSize", "pInfo", pInfoSize, &pInfo, true, false, false, kVUIDUndefined, "VUID-vkGetShaderInfoAMD-pInfo-parameter");
- return skip;
-}
-
-
-
-
-
-#ifdef VK_USE_PLATFORM_GGP
-
-bool StatelessValidation::PreCallValidateCreateStreamDescriptorSurfaceGGP(
- VkInstance instance,
- const VkStreamDescriptorSurfaceCreateInfoGGP* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkSurfaceKHR* pSurface) {
- bool skip = false;
- if (!instance_extensions.vk_khr_surface) skip |= OutputExtensionError("vkCreateStreamDescriptorSurfaceGGP", VK_KHR_SURFACE_EXTENSION_NAME);
- if (!instance_extensions.vk_ggp_stream_descriptor_surface) skip |= OutputExtensionError("vkCreateStreamDescriptorSurfaceGGP", VK_GGP_STREAM_DESCRIPTOR_SURFACE_EXTENSION_NAME);
- skip |= validate_struct_type("vkCreateStreamDescriptorSurfaceGGP", "pCreateInfo", "VK_STRUCTURE_TYPE_STREAM_DESCRIPTOR_SURFACE_CREATE_INFO_GGP", pCreateInfo, VK_STRUCTURE_TYPE_STREAM_DESCRIPTOR_SURFACE_CREATE_INFO_GGP, true, "VUID-vkCreateStreamDescriptorSurfaceGGP-pCreateInfo-parameter", "VUID-VkStreamDescriptorSurfaceCreateInfoGGP-sType-sType");
- if (pCreateInfo != NULL)
- {
- skip |= validate_struct_pnext("vkCreateStreamDescriptorSurfaceGGP", "pCreateInfo->pNext", NULL, pCreateInfo->pNext, 0, NULL, GeneratedVulkanHeaderVersion, "VUID-VkStreamDescriptorSurfaceCreateInfoGGP-pNext-pNext");
-
- skip |= validate_reserved_flags("vkCreateStreamDescriptorSurfaceGGP", "pCreateInfo->flags", pCreateInfo->flags, "VUID-VkStreamDescriptorSurfaceCreateInfoGGP-flags-zerobitmask");
- }
- if (pAllocator != NULL)
- {
- skip |= validate_required_pointer("vkCreateStreamDescriptorSurfaceGGP", "pAllocator->pfnAllocation", reinterpret_cast<const void*>(pAllocator->pfnAllocation), "VUID-VkAllocationCallbacks-pfnAllocation-00632");
-
- skip |= validate_required_pointer("vkCreateStreamDescriptorSurfaceGGP", "pAllocator->pfnReallocation", reinterpret_cast<const void*>(pAllocator->pfnReallocation), "VUID-VkAllocationCallbacks-pfnReallocation-00633");
-
- skip |= validate_required_pointer("vkCreateStreamDescriptorSurfaceGGP", "pAllocator->pfnFree", reinterpret_cast<const void*>(pAllocator->pfnFree), "VUID-VkAllocationCallbacks-pfnFree-00634");
-
- if (pAllocator->pfnInternalAllocation != NULL)
- {
- skip |= validate_required_pointer("vkCreateStreamDescriptorSurfaceGGP", "pAllocator->pfnInternalFree", reinterpret_cast<const void*>(pAllocator->pfnInternalFree), "VUID-VkAllocationCallbacks-pfnInternalAllocation-00635");
-
- }
-
- if (pAllocator->pfnInternalFree != NULL)
- {
- skip |= validate_required_pointer("vkCreateStreamDescriptorSurfaceGGP", "pAllocator->pfnInternalAllocation", reinterpret_cast<const void*>(pAllocator->pfnInternalAllocation), "VUID-VkAllocationCallbacks-pfnInternalAllocation-00635");
-
- }
- }
- skip |= validate_required_pointer("vkCreateStreamDescriptorSurfaceGGP", "pSurface", pSurface, "VUID-vkCreateStreamDescriptorSurfaceGGP-pSurface-parameter");
- return skip;
-}
-
-#endif // VK_USE_PLATFORM_GGP
-
-
-
-
-
-bool StatelessValidation::PreCallValidateGetPhysicalDeviceExternalImageFormatPropertiesNV(
- VkPhysicalDevice physicalDevice,
- VkFormat format,
- VkImageType type,
- VkImageTiling tiling,
- VkImageUsageFlags usage,
- VkImageCreateFlags flags,
- VkExternalMemoryHandleTypeFlagsNV externalHandleType,
- VkExternalImageFormatPropertiesNV* pExternalImageFormatProperties) {
- bool skip = false;
- if (!instance_extensions.vk_nv_external_memory_capabilities) skip |= OutputExtensionError("vkGetPhysicalDeviceExternalImageFormatPropertiesNV", VK_NV_EXTERNAL_MEMORY_CAPABILITIES_EXTENSION_NAME);
- skip |= validate_ranged_enum("vkGetPhysicalDeviceExternalImageFormatPropertiesNV", "format", "VkFormat", AllVkFormatEnums, format, "VUID-vkGetPhysicalDeviceExternalImageFormatPropertiesNV-format-parameter");
- skip |= validate_ranged_enum("vkGetPhysicalDeviceExternalImageFormatPropertiesNV", "type", "VkImageType", AllVkImageTypeEnums, type, "VUID-vkGetPhysicalDeviceExternalImageFormatPropertiesNV-type-parameter");
- skip |= validate_ranged_enum("vkGetPhysicalDeviceExternalImageFormatPropertiesNV", "tiling", "VkImageTiling", AllVkImageTilingEnums, tiling, "VUID-vkGetPhysicalDeviceExternalImageFormatPropertiesNV-tiling-parameter");
- skip |= validate_flags("vkGetPhysicalDeviceExternalImageFormatPropertiesNV", "usage", "VkImageUsageFlagBits", AllVkImageUsageFlagBits, usage, kRequiredFlags, "VUID-vkGetPhysicalDeviceExternalImageFormatPropertiesNV-usage-parameter", "VUID-vkGetPhysicalDeviceExternalImageFormatPropertiesNV-usage-requiredbitmask");
- skip |= validate_flags("vkGetPhysicalDeviceExternalImageFormatPropertiesNV", "flags", "VkImageCreateFlagBits", AllVkImageCreateFlagBits, flags, kOptionalFlags, "VUID-vkGetPhysicalDeviceExternalImageFormatPropertiesNV-flags-parameter");
- skip |= validate_flags("vkGetPhysicalDeviceExternalImageFormatPropertiesNV", "externalHandleType", "VkExternalMemoryHandleTypeFlagBitsNV", AllVkExternalMemoryHandleTypeFlagBitsNV, externalHandleType, kOptionalFlags, "VUID-vkGetPhysicalDeviceExternalImageFormatPropertiesNV-externalHandleType-parameter");
- skip |= validate_required_pointer("vkGetPhysicalDeviceExternalImageFormatPropertiesNV", "pExternalImageFormatProperties", pExternalImageFormatProperties, "VUID-vkGetPhysicalDeviceExternalImageFormatPropertiesNV-pExternalImageFormatProperties-parameter");
- if (pExternalImageFormatProperties != NULL)
- {
- // No xml-driven validation
- }
- return skip;
-}
-
-
-
-
-
-#ifdef VK_USE_PLATFORM_WIN32_KHR
-
-bool StatelessValidation::PreCallValidateGetMemoryWin32HandleNV(
- VkDevice device,
- VkDeviceMemory memory,
- VkExternalMemoryHandleTypeFlagsNV handleType,
- HANDLE* pHandle) {
- bool skip = false;
- if (!device_extensions.vk_nv_external_memory) skip |= OutputExtensionError("vkGetMemoryWin32HandleNV", VK_NV_EXTERNAL_MEMORY_EXTENSION_NAME);
- if (!device_extensions.vk_nv_external_memory_win32) skip |= OutputExtensionError("vkGetMemoryWin32HandleNV", VK_NV_EXTERNAL_MEMORY_WIN32_EXTENSION_NAME);
- skip |= validate_required_handle("vkGetMemoryWin32HandleNV", "memory", memory);
- skip |= validate_flags("vkGetMemoryWin32HandleNV", "handleType", "VkExternalMemoryHandleTypeFlagBitsNV", AllVkExternalMemoryHandleTypeFlagBitsNV, handleType, kRequiredFlags, "VUID-vkGetMemoryWin32HandleNV-handleType-parameter", "VUID-vkGetMemoryWin32HandleNV-handleType-requiredbitmask");
- skip |= validate_required_pointer("vkGetMemoryWin32HandleNV", "pHandle", pHandle, "VUID-vkGetMemoryWin32HandleNV-pHandle-parameter");
- return skip;
-}
-
-#endif // VK_USE_PLATFORM_WIN32_KHR
-
-#ifdef VK_USE_PLATFORM_WIN32_KHR
-
-#endif // VK_USE_PLATFORM_WIN32_KHR
-
-
-
-#ifdef VK_USE_PLATFORM_VI_NN
-
-bool StatelessValidation::PreCallValidateCreateViSurfaceNN(
- VkInstance instance,
- const VkViSurfaceCreateInfoNN* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkSurfaceKHR* pSurface) {
- bool skip = false;
- if (!instance_extensions.vk_khr_surface) skip |= OutputExtensionError("vkCreateViSurfaceNN", VK_KHR_SURFACE_EXTENSION_NAME);
- if (!instance_extensions.vk_nn_vi_surface) skip |= OutputExtensionError("vkCreateViSurfaceNN", VK_NN_VI_SURFACE_EXTENSION_NAME);
- skip |= validate_struct_type("vkCreateViSurfaceNN", "pCreateInfo", "VK_STRUCTURE_TYPE_VI_SURFACE_CREATE_INFO_NN", pCreateInfo, VK_STRUCTURE_TYPE_VI_SURFACE_CREATE_INFO_NN, true, "VUID-vkCreateViSurfaceNN-pCreateInfo-parameter", "VUID-VkViSurfaceCreateInfoNN-sType-sType");
- if (pCreateInfo != NULL)
- {
- skip |= validate_struct_pnext("vkCreateViSurfaceNN", "pCreateInfo->pNext", NULL, pCreateInfo->pNext, 0, NULL, GeneratedVulkanHeaderVersion, "VUID-VkViSurfaceCreateInfoNN-pNext-pNext");
-
- skip |= validate_reserved_flags("vkCreateViSurfaceNN", "pCreateInfo->flags", pCreateInfo->flags, "VUID-VkViSurfaceCreateInfoNN-flags-zerobitmask");
- }
- if (pAllocator != NULL)
- {
- skip |= validate_required_pointer("vkCreateViSurfaceNN", "pAllocator->pfnAllocation", reinterpret_cast<const void*>(pAllocator->pfnAllocation), "VUID-VkAllocationCallbacks-pfnAllocation-00632");
-
- skip |= validate_required_pointer("vkCreateViSurfaceNN", "pAllocator->pfnReallocation", reinterpret_cast<const void*>(pAllocator->pfnReallocation), "VUID-VkAllocationCallbacks-pfnReallocation-00633");
-
- skip |= validate_required_pointer("vkCreateViSurfaceNN", "pAllocator->pfnFree", reinterpret_cast<const void*>(pAllocator->pfnFree), "VUID-VkAllocationCallbacks-pfnFree-00634");
-
- if (pAllocator->pfnInternalAllocation != NULL)
- {
- skip |= validate_required_pointer("vkCreateViSurfaceNN", "pAllocator->pfnInternalFree", reinterpret_cast<const void*>(pAllocator->pfnInternalFree), "VUID-VkAllocationCallbacks-pfnInternalAllocation-00635");
-
- }
-
- if (pAllocator->pfnInternalFree != NULL)
- {
- skip |= validate_required_pointer("vkCreateViSurfaceNN", "pAllocator->pfnInternalAllocation", reinterpret_cast<const void*>(pAllocator->pfnInternalAllocation), "VUID-VkAllocationCallbacks-pfnInternalAllocation-00635");
-
- }
- }
- skip |= validate_required_pointer("vkCreateViSurfaceNN", "pSurface", pSurface, "VUID-vkCreateViSurfaceNN-pSurface-parameter");
- return skip;
-}
-
-#endif // VK_USE_PLATFORM_VI_NN
-
-
-
-
-
-
-
-
-
-bool StatelessValidation::PreCallValidateCmdBeginConditionalRenderingEXT(
- VkCommandBuffer commandBuffer,
- const VkConditionalRenderingBeginInfoEXT* pConditionalRenderingBegin) {
- bool skip = false;
- if (!device_extensions.vk_ext_conditional_rendering) skip |= OutputExtensionError("vkCmdBeginConditionalRenderingEXT", VK_EXT_CONDITIONAL_RENDERING_EXTENSION_NAME);
- skip |= validate_struct_type("vkCmdBeginConditionalRenderingEXT", "pConditionalRenderingBegin", "VK_STRUCTURE_TYPE_CONDITIONAL_RENDERING_BEGIN_INFO_EXT", pConditionalRenderingBegin, VK_STRUCTURE_TYPE_CONDITIONAL_RENDERING_BEGIN_INFO_EXT, true, "VUID-vkCmdBeginConditionalRenderingEXT-pConditionalRenderingBegin-parameter", "VUID-VkConditionalRenderingBeginInfoEXT-sType-sType");
- if (pConditionalRenderingBegin != NULL)
- {
- skip |= validate_struct_pnext("vkCmdBeginConditionalRenderingEXT", "pConditionalRenderingBegin->pNext", NULL, pConditionalRenderingBegin->pNext, 0, NULL, GeneratedVulkanHeaderVersion, "VUID-VkConditionalRenderingBeginInfoEXT-pNext-pNext");
-
- skip |= validate_required_handle("vkCmdBeginConditionalRenderingEXT", "pConditionalRenderingBegin->buffer", pConditionalRenderingBegin->buffer);
-
- skip |= validate_flags("vkCmdBeginConditionalRenderingEXT", "pConditionalRenderingBegin->flags", "VkConditionalRenderingFlagBitsEXT", AllVkConditionalRenderingFlagBitsEXT, pConditionalRenderingBegin->flags, kOptionalFlags, "VUID-VkConditionalRenderingBeginInfoEXT-flags-parameter");
- }
- return skip;
-}
-
-bool StatelessValidation::PreCallValidateCmdEndConditionalRenderingEXT(
- VkCommandBuffer commandBuffer) {
- bool skip = false;
- if (!device_extensions.vk_ext_conditional_rendering) skip |= OutputExtensionError("vkCmdEndConditionalRenderingEXT", VK_EXT_CONDITIONAL_RENDERING_EXTENSION_NAME);
- // No xml-driven validation
- return skip;
-}
-
-
-
-bool StatelessValidation::PreCallValidateCmdProcessCommandsNVX(
- VkCommandBuffer commandBuffer,
- const VkCmdProcessCommandsInfoNVX* pProcessCommandsInfo) {
- bool skip = false;
- if (!device_extensions.vk_nvx_device_generated_commands) skip |= OutputExtensionError("vkCmdProcessCommandsNVX", VK_NVX_DEVICE_GENERATED_COMMANDS_EXTENSION_NAME);
- skip |= validate_struct_type("vkCmdProcessCommandsNVX", "pProcessCommandsInfo", "VK_STRUCTURE_TYPE_CMD_PROCESS_COMMANDS_INFO_NVX", pProcessCommandsInfo, VK_STRUCTURE_TYPE_CMD_PROCESS_COMMANDS_INFO_NVX, true, "VUID-vkCmdProcessCommandsNVX-pProcessCommandsInfo-parameter", "VUID-VkCmdProcessCommandsInfoNVX-sType-sType");
- if (pProcessCommandsInfo != NULL)
- {
- skip |= validate_struct_pnext("vkCmdProcessCommandsNVX", "pProcessCommandsInfo->pNext", NULL, pProcessCommandsInfo->pNext, 0, NULL, GeneratedVulkanHeaderVersion, "VUID-VkCmdProcessCommandsInfoNVX-pNext-pNext");
-
- skip |= validate_required_handle("vkCmdProcessCommandsNVX", "pProcessCommandsInfo->objectTable", pProcessCommandsInfo->objectTable);
-
- skip |= validate_required_handle("vkCmdProcessCommandsNVX", "pProcessCommandsInfo->indirectCommandsLayout", pProcessCommandsInfo->indirectCommandsLayout);
-
- skip |= validate_array("vkCmdProcessCommandsNVX", "pProcessCommandsInfo->indirectCommandsTokenCount", "pProcessCommandsInfo->pIndirectCommandsTokens", pProcessCommandsInfo->indirectCommandsTokenCount, &pProcessCommandsInfo->pIndirectCommandsTokens, true, true, "VUID-VkCmdProcessCommandsInfoNVX-indirectCommandsTokenCount-arraylength", "VUID-VkCmdProcessCommandsInfoNVX-pIndirectCommandsTokens-parameter");
-
- if (pProcessCommandsInfo->pIndirectCommandsTokens != NULL)
- {
- for (uint32_t indirectCommandsTokenIndex = 0; indirectCommandsTokenIndex < pProcessCommandsInfo->indirectCommandsTokenCount; ++indirectCommandsTokenIndex)
- {
- skip |= validate_ranged_enum("vkCmdProcessCommandsNVX", ParameterName("pProcessCommandsInfo->pIndirectCommandsTokens[%i].tokenType", ParameterName::IndexVector{ indirectCommandsTokenIndex }), "VkIndirectCommandsTokenTypeNVX", AllVkIndirectCommandsTokenTypeNVXEnums, pProcessCommandsInfo->pIndirectCommandsTokens[indirectCommandsTokenIndex].tokenType, "VUID-VkIndirectCommandsTokenNVX-tokenType-parameter");
-
- skip |= validate_required_handle("vkCmdProcessCommandsNVX", ParameterName("pProcessCommandsInfo->pIndirectCommandsTokens[%i].buffer", ParameterName::IndexVector{ indirectCommandsTokenIndex }), pProcessCommandsInfo->pIndirectCommandsTokens[indirectCommandsTokenIndex].buffer);
- }
- }
- }
- return skip;
-}
-
-bool StatelessValidation::PreCallValidateCmdReserveSpaceForCommandsNVX(
- VkCommandBuffer commandBuffer,
- const VkCmdReserveSpaceForCommandsInfoNVX* pReserveSpaceInfo) {
- bool skip = false;
- if (!device_extensions.vk_nvx_device_generated_commands) skip |= OutputExtensionError("vkCmdReserveSpaceForCommandsNVX", VK_NVX_DEVICE_GENERATED_COMMANDS_EXTENSION_NAME);
- skip |= validate_struct_type("vkCmdReserveSpaceForCommandsNVX", "pReserveSpaceInfo", "VK_STRUCTURE_TYPE_CMD_RESERVE_SPACE_FOR_COMMANDS_INFO_NVX", pReserveSpaceInfo, VK_STRUCTURE_TYPE_CMD_RESERVE_SPACE_FOR_COMMANDS_INFO_NVX, true, "VUID-vkCmdReserveSpaceForCommandsNVX-pReserveSpaceInfo-parameter", "VUID-VkCmdReserveSpaceForCommandsInfoNVX-sType-sType");
- if (pReserveSpaceInfo != NULL)
- {
- skip |= validate_struct_pnext("vkCmdReserveSpaceForCommandsNVX", "pReserveSpaceInfo->pNext", NULL, pReserveSpaceInfo->pNext, 0, NULL, GeneratedVulkanHeaderVersion, "VUID-VkCmdReserveSpaceForCommandsInfoNVX-pNext-pNext");
-
- skip |= validate_required_handle("vkCmdReserveSpaceForCommandsNVX", "pReserveSpaceInfo->objectTable", pReserveSpaceInfo->objectTable);
-
- skip |= validate_required_handle("vkCmdReserveSpaceForCommandsNVX", "pReserveSpaceInfo->indirectCommandsLayout", pReserveSpaceInfo->indirectCommandsLayout);
- }
- return skip;
-}
-
-bool StatelessValidation::PreCallValidateCreateIndirectCommandsLayoutNVX(
- VkDevice device,
- const VkIndirectCommandsLayoutCreateInfoNVX* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkIndirectCommandsLayoutNVX* pIndirectCommandsLayout) {
- bool skip = false;
- if (!device_extensions.vk_nvx_device_generated_commands) skip |= OutputExtensionError("vkCreateIndirectCommandsLayoutNVX", VK_NVX_DEVICE_GENERATED_COMMANDS_EXTENSION_NAME);
- skip |= validate_struct_type("vkCreateIndirectCommandsLayoutNVX", "pCreateInfo", "VK_STRUCTURE_TYPE_INDIRECT_COMMANDS_LAYOUT_CREATE_INFO_NVX", pCreateInfo, VK_STRUCTURE_TYPE_INDIRECT_COMMANDS_LAYOUT_CREATE_INFO_NVX, true, "VUID-vkCreateIndirectCommandsLayoutNVX-pCreateInfo-parameter", "VUID-VkIndirectCommandsLayoutCreateInfoNVX-sType-sType");
- if (pCreateInfo != NULL)
- {
- skip |= validate_struct_pnext("vkCreateIndirectCommandsLayoutNVX", "pCreateInfo->pNext", NULL, pCreateInfo->pNext, 0, NULL, GeneratedVulkanHeaderVersion, "VUID-VkIndirectCommandsLayoutCreateInfoNVX-pNext-pNext");
-
- skip |= validate_ranged_enum("vkCreateIndirectCommandsLayoutNVX", "pCreateInfo->pipelineBindPoint", "VkPipelineBindPoint", AllVkPipelineBindPointEnums, pCreateInfo->pipelineBindPoint, "VUID-VkIndirectCommandsLayoutCreateInfoNVX-pipelineBindPoint-parameter");
-
- skip |= validate_flags("vkCreateIndirectCommandsLayoutNVX", "pCreateInfo->flags", "VkIndirectCommandsLayoutUsageFlagBitsNVX", AllVkIndirectCommandsLayoutUsageFlagBitsNVX, pCreateInfo->flags, kRequiredFlags, "VUID-VkIndirectCommandsLayoutCreateInfoNVX-flags-parameter", "VUID-VkIndirectCommandsLayoutCreateInfoNVX-flags-requiredbitmask");
-
- skip |= validate_array("vkCreateIndirectCommandsLayoutNVX", "pCreateInfo->tokenCount", "pCreateInfo->pTokens", pCreateInfo->tokenCount, &pCreateInfo->pTokens, true, true, "VUID-VkIndirectCommandsLayoutCreateInfoNVX-tokenCount-arraylength", "VUID-VkIndirectCommandsLayoutCreateInfoNVX-pTokens-parameter");
-
- if (pCreateInfo->pTokens != NULL)
- {
- for (uint32_t tokenIndex = 0; tokenIndex < pCreateInfo->tokenCount; ++tokenIndex)
- {
- skip |= validate_ranged_enum("vkCreateIndirectCommandsLayoutNVX", ParameterName("pCreateInfo->pTokens[%i].tokenType", ParameterName::IndexVector{ tokenIndex }), "VkIndirectCommandsTokenTypeNVX", AllVkIndirectCommandsTokenTypeNVXEnums, pCreateInfo->pTokens[tokenIndex].tokenType, "VUID-VkIndirectCommandsLayoutTokenNVX-tokenType-parameter");
- }
- }
- }
- if (pAllocator != NULL)
- {
- skip |= validate_required_pointer("vkCreateIndirectCommandsLayoutNVX", "pAllocator->pfnAllocation", reinterpret_cast<const void*>(pAllocator->pfnAllocation), "VUID-VkAllocationCallbacks-pfnAllocation-00632");
-
- skip |= validate_required_pointer("vkCreateIndirectCommandsLayoutNVX", "pAllocator->pfnReallocation", reinterpret_cast<const void*>(pAllocator->pfnReallocation), "VUID-VkAllocationCallbacks-pfnReallocation-00633");
-
- skip |= validate_required_pointer("vkCreateIndirectCommandsLayoutNVX", "pAllocator->pfnFree", reinterpret_cast<const void*>(pAllocator->pfnFree), "VUID-VkAllocationCallbacks-pfnFree-00634");
-
- if (pAllocator->pfnInternalAllocation != NULL)
- {
- skip |= validate_required_pointer("vkCreateIndirectCommandsLayoutNVX", "pAllocator->pfnInternalFree", reinterpret_cast<const void*>(pAllocator->pfnInternalFree), "VUID-VkAllocationCallbacks-pfnInternalAllocation-00635");
-
- }
-
- if (pAllocator->pfnInternalFree != NULL)
- {
- skip |= validate_required_pointer("vkCreateIndirectCommandsLayoutNVX", "pAllocator->pfnInternalAllocation", reinterpret_cast<const void*>(pAllocator->pfnInternalAllocation), "VUID-VkAllocationCallbacks-pfnInternalAllocation-00635");
-
- }
- }
- skip |= validate_required_pointer("vkCreateIndirectCommandsLayoutNVX", "pIndirectCommandsLayout", pIndirectCommandsLayout, "VUID-vkCreateIndirectCommandsLayoutNVX-pIndirectCommandsLayout-parameter");
- return skip;
-}
-
-bool StatelessValidation::PreCallValidateDestroyIndirectCommandsLayoutNVX(
- VkDevice device,
- VkIndirectCommandsLayoutNVX indirectCommandsLayout,
- const VkAllocationCallbacks* pAllocator) {
- bool skip = false;
- if (!device_extensions.vk_nvx_device_generated_commands) skip |= OutputExtensionError("vkDestroyIndirectCommandsLayoutNVX", VK_NVX_DEVICE_GENERATED_COMMANDS_EXTENSION_NAME);
- skip |= validate_required_handle("vkDestroyIndirectCommandsLayoutNVX", "indirectCommandsLayout", indirectCommandsLayout);
- if (pAllocator != NULL)
- {
- skip |= validate_required_pointer("vkDestroyIndirectCommandsLayoutNVX", "pAllocator->pfnAllocation", reinterpret_cast<const void*>(pAllocator->pfnAllocation), "VUID-VkAllocationCallbacks-pfnAllocation-00632");
-
- skip |= validate_required_pointer("vkDestroyIndirectCommandsLayoutNVX", "pAllocator->pfnReallocation", reinterpret_cast<const void*>(pAllocator->pfnReallocation), "VUID-VkAllocationCallbacks-pfnReallocation-00633");
-
- skip |= validate_required_pointer("vkDestroyIndirectCommandsLayoutNVX", "pAllocator->pfnFree", reinterpret_cast<const void*>(pAllocator->pfnFree), "VUID-VkAllocationCallbacks-pfnFree-00634");
-
- if (pAllocator->pfnInternalAllocation != NULL)
- {
- skip |= validate_required_pointer("vkDestroyIndirectCommandsLayoutNVX", "pAllocator->pfnInternalFree", reinterpret_cast<const void*>(pAllocator->pfnInternalFree), "VUID-VkAllocationCallbacks-pfnInternalAllocation-00635");
-
- }
-
- if (pAllocator->pfnInternalFree != NULL)
- {
- skip |= validate_required_pointer("vkDestroyIndirectCommandsLayoutNVX", "pAllocator->pfnInternalAllocation", reinterpret_cast<const void*>(pAllocator->pfnInternalAllocation), "VUID-VkAllocationCallbacks-pfnInternalAllocation-00635");
-
- }
- }
- return skip;
-}
-
-bool StatelessValidation::PreCallValidateCreateObjectTableNVX(
- VkDevice device,
- const VkObjectTableCreateInfoNVX* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkObjectTableNVX* pObjectTable) {
- bool skip = false;
- if (!device_extensions.vk_nvx_device_generated_commands) skip |= OutputExtensionError("vkCreateObjectTableNVX", VK_NVX_DEVICE_GENERATED_COMMANDS_EXTENSION_NAME);
- skip |= validate_struct_type("vkCreateObjectTableNVX", "pCreateInfo", "VK_STRUCTURE_TYPE_OBJECT_TABLE_CREATE_INFO_NVX", pCreateInfo, VK_STRUCTURE_TYPE_OBJECT_TABLE_CREATE_INFO_NVX, true, "VUID-vkCreateObjectTableNVX-pCreateInfo-parameter", "VUID-VkObjectTableCreateInfoNVX-sType-sType");
- if (pCreateInfo != NULL)
- {
- skip |= validate_struct_pnext("vkCreateObjectTableNVX", "pCreateInfo->pNext", NULL, pCreateInfo->pNext, 0, NULL, GeneratedVulkanHeaderVersion, "VUID-VkObjectTableCreateInfoNVX-pNext-pNext");
-
- skip |= validate_ranged_enum_array("vkCreateObjectTableNVX", "pCreateInfo->objectCount", "pCreateInfo->pObjectEntryTypes", "VkObjectEntryTypeNVX", AllVkObjectEntryTypeNVXEnums, pCreateInfo->objectCount, pCreateInfo->pObjectEntryTypes, true, true);
-
- skip |= validate_array("vkCreateObjectTableNVX", "pCreateInfo->objectCount", "pCreateInfo->pObjectEntryCounts", pCreateInfo->objectCount, &pCreateInfo->pObjectEntryCounts, true, true, "VUID-VkObjectTableCreateInfoNVX-objectCount-arraylength", "VUID-VkObjectTableCreateInfoNVX-pObjectEntryCounts-parameter");
-
- skip |= validate_flags_array("vkCreateObjectTableNVX", "pCreateInfo->objectCount", "pCreateInfo->pObjectEntryUsageFlags", "VkObjectEntryUsageFlagBitsNVX", AllVkObjectEntryUsageFlagBitsNVX, pCreateInfo->objectCount, pCreateInfo->pObjectEntryUsageFlags, true, true);
- }
- if (pAllocator != NULL)
- {
- skip |= validate_required_pointer("vkCreateObjectTableNVX", "pAllocator->pfnAllocation", reinterpret_cast<const void*>(pAllocator->pfnAllocation), "VUID-VkAllocationCallbacks-pfnAllocation-00632");
-
- skip |= validate_required_pointer("vkCreateObjectTableNVX", "pAllocator->pfnReallocation", reinterpret_cast<const void*>(pAllocator->pfnReallocation), "VUID-VkAllocationCallbacks-pfnReallocation-00633");
-
- skip |= validate_required_pointer("vkCreateObjectTableNVX", "pAllocator->pfnFree", reinterpret_cast<const void*>(pAllocator->pfnFree), "VUID-VkAllocationCallbacks-pfnFree-00634");
-
- if (pAllocator->pfnInternalAllocation != NULL)
- {
- skip |= validate_required_pointer("vkCreateObjectTableNVX", "pAllocator->pfnInternalFree", reinterpret_cast<const void*>(pAllocator->pfnInternalFree), "VUID-VkAllocationCallbacks-pfnInternalAllocation-00635");
-
- }
-
- if (pAllocator->pfnInternalFree != NULL)
- {
- skip |= validate_required_pointer("vkCreateObjectTableNVX", "pAllocator->pfnInternalAllocation", reinterpret_cast<const void*>(pAllocator->pfnInternalAllocation), "VUID-VkAllocationCallbacks-pfnInternalAllocation-00635");
-
- }
- }
- skip |= validate_required_pointer("vkCreateObjectTableNVX", "pObjectTable", pObjectTable, "VUID-vkCreateObjectTableNVX-pObjectTable-parameter");
- return skip;
-}
-
-bool StatelessValidation::PreCallValidateDestroyObjectTableNVX(
- VkDevice device,
- VkObjectTableNVX objectTable,
- const VkAllocationCallbacks* pAllocator) {
- bool skip = false;
- if (!device_extensions.vk_nvx_device_generated_commands) skip |= OutputExtensionError("vkDestroyObjectTableNVX", VK_NVX_DEVICE_GENERATED_COMMANDS_EXTENSION_NAME);
- skip |= validate_required_handle("vkDestroyObjectTableNVX", "objectTable", objectTable);
- if (pAllocator != NULL)
- {
- skip |= validate_required_pointer("vkDestroyObjectTableNVX", "pAllocator->pfnAllocation", reinterpret_cast<const void*>(pAllocator->pfnAllocation), "VUID-VkAllocationCallbacks-pfnAllocation-00632");
-
- skip |= validate_required_pointer("vkDestroyObjectTableNVX", "pAllocator->pfnReallocation", reinterpret_cast<const void*>(pAllocator->pfnReallocation), "VUID-VkAllocationCallbacks-pfnReallocation-00633");
-
- skip |= validate_required_pointer("vkDestroyObjectTableNVX", "pAllocator->pfnFree", reinterpret_cast<const void*>(pAllocator->pfnFree), "VUID-VkAllocationCallbacks-pfnFree-00634");
-
- if (pAllocator->pfnInternalAllocation != NULL)
- {
- skip |= validate_required_pointer("vkDestroyObjectTableNVX", "pAllocator->pfnInternalFree", reinterpret_cast<const void*>(pAllocator->pfnInternalFree), "VUID-VkAllocationCallbacks-pfnInternalAllocation-00635");
-
- }
-
- if (pAllocator->pfnInternalFree != NULL)
- {
- skip |= validate_required_pointer("vkDestroyObjectTableNVX", "pAllocator->pfnInternalAllocation", reinterpret_cast<const void*>(pAllocator->pfnInternalAllocation), "VUID-VkAllocationCallbacks-pfnInternalAllocation-00635");
-
- }
- }
- return skip;
-}
-
-bool StatelessValidation::PreCallValidateRegisterObjectsNVX(
- VkDevice device,
- VkObjectTableNVX objectTable,
- uint32_t objectCount,
- const VkObjectTableEntryNVX* const* ppObjectTableEntries,
- const uint32_t* pObjectIndices) {
- bool skip = false;
- if (!device_extensions.vk_nvx_device_generated_commands) skip |= OutputExtensionError("vkRegisterObjectsNVX", VK_NVX_DEVICE_GENERATED_COMMANDS_EXTENSION_NAME);
- skip |= validate_required_handle("vkRegisterObjectsNVX", "objectTable", objectTable);
- skip |= validate_array("vkRegisterObjectsNVX", "objectCount", "ppObjectTableEntries", objectCount, &ppObjectTableEntries, true, true, "VUID-vkRegisterObjectsNVX-objectCount-arraylength", "VUID-vkRegisterObjectsNVX-ppObjectTableEntries-parameter");
- if (ppObjectTableEntries != NULL)
- {
- for (uint32_t objectIndex = 0; objectIndex < objectCount; ++objectIndex)
- {
- skip |= validate_ranged_enum("vkRegisterObjectsNVX", ParameterName("ppObjectTableEntries[%i]->type", ParameterName::IndexVector{ objectIndex }), "VkObjectEntryTypeNVX", AllVkObjectEntryTypeNVXEnums, ppObjectTableEntries[objectIndex]->type, "VUID-VkObjectTableEntryNVX-type-parameter");
-
- skip |= validate_flags("vkRegisterObjectsNVX", ParameterName("ppObjectTableEntries[%i]->flags", ParameterName::IndexVector{ objectIndex }), "VkObjectEntryUsageFlagBitsNVX", AllVkObjectEntryUsageFlagBitsNVX, ppObjectTableEntries[objectIndex]->flags, kRequiredFlags, "VUID-VkObjectTableEntryNVX-flags-parameter", "VUID-VkObjectTableEntryNVX-flags-requiredbitmask");
- }
- }
- skip |= validate_array("vkRegisterObjectsNVX", "objectCount", "pObjectIndices", objectCount, &pObjectIndices, true, true, "VUID-vkRegisterObjectsNVX-objectCount-arraylength", "VUID-vkRegisterObjectsNVX-pObjectIndices-parameter");
- return skip;
-}
-
-bool StatelessValidation::PreCallValidateUnregisterObjectsNVX(
- VkDevice device,
- VkObjectTableNVX objectTable,
- uint32_t objectCount,
- const VkObjectEntryTypeNVX* pObjectEntryTypes,
- const uint32_t* pObjectIndices) {
- bool skip = false;
- if (!device_extensions.vk_nvx_device_generated_commands) skip |= OutputExtensionError("vkUnregisterObjectsNVX", VK_NVX_DEVICE_GENERATED_COMMANDS_EXTENSION_NAME);
- skip |= validate_required_handle("vkUnregisterObjectsNVX", "objectTable", objectTable);
- skip |= validate_ranged_enum_array("vkUnregisterObjectsNVX", "objectCount", "pObjectEntryTypes", "VkObjectEntryTypeNVX", AllVkObjectEntryTypeNVXEnums, objectCount, pObjectEntryTypes, true, true);
- skip |= validate_array("vkUnregisterObjectsNVX", "objectCount", "pObjectIndices", objectCount, &pObjectIndices, true, true, "VUID-vkUnregisterObjectsNVX-objectCount-arraylength", "VUID-vkUnregisterObjectsNVX-pObjectIndices-parameter");
- return skip;
-}
-
-bool StatelessValidation::PreCallValidateGetPhysicalDeviceGeneratedCommandsPropertiesNVX(
- VkPhysicalDevice physicalDevice,
- VkDeviceGeneratedCommandsFeaturesNVX* pFeatures,
- VkDeviceGeneratedCommandsLimitsNVX* pLimits) {
- bool skip = false;
- skip |= validate_struct_type("vkGetPhysicalDeviceGeneratedCommandsPropertiesNVX", "pFeatures", "VK_STRUCTURE_TYPE_DEVICE_GENERATED_COMMANDS_FEATURES_NVX", pFeatures, VK_STRUCTURE_TYPE_DEVICE_GENERATED_COMMANDS_FEATURES_NVX, true, "VUID-vkGetPhysicalDeviceGeneratedCommandsPropertiesNVX-pFeatures-parameter", "VUID-VkDeviceGeneratedCommandsFeaturesNVX-sType-sType");
- skip |= validate_struct_type("vkGetPhysicalDeviceGeneratedCommandsPropertiesNVX", "pLimits", "VK_STRUCTURE_TYPE_DEVICE_GENERATED_COMMANDS_LIMITS_NVX", pLimits, VK_STRUCTURE_TYPE_DEVICE_GENERATED_COMMANDS_LIMITS_NVX, true, "VUID-vkGetPhysicalDeviceGeneratedCommandsPropertiesNVX-pLimits-parameter", "VUID-VkDeviceGeneratedCommandsLimitsNVX-sType-sType");
- return skip;
-}
-
-
-
-bool StatelessValidation::PreCallValidateCmdSetViewportWScalingNV(
- VkCommandBuffer commandBuffer,
- uint32_t firstViewport,
- uint32_t viewportCount,
- const VkViewportWScalingNV* pViewportWScalings) {
- bool skip = false;
- if (!device_extensions.vk_nv_clip_space_w_scaling) skip |= OutputExtensionError("vkCmdSetViewportWScalingNV", VK_NV_CLIP_SPACE_W_SCALING_EXTENSION_NAME);
- skip |= validate_array("vkCmdSetViewportWScalingNV", "viewportCount", "pViewportWScalings", viewportCount, &pViewportWScalings, true, true, "VUID-vkCmdSetViewportWScalingNV-viewportCount-arraylength", "VUID-vkCmdSetViewportWScalingNV-pViewportWScalings-parameter");
- if (pViewportWScalings != NULL)
- {
- for (uint32_t viewportIndex = 0; viewportIndex < viewportCount; ++viewportIndex)
- {
- // No xml-driven validation
- }
- }
- return skip;
-}
-
-
-
-bool StatelessValidation::PreCallValidateReleaseDisplayEXT(
- VkPhysicalDevice physicalDevice,
- VkDisplayKHR display) {
- bool skip = false;
- if (!instance_extensions.vk_khr_display) skip |= OutputExtensionError("vkReleaseDisplayEXT", VK_KHR_DISPLAY_EXTENSION_NAME);
- if (!instance_extensions.vk_ext_direct_mode_display) skip |= OutputExtensionError("vkReleaseDisplayEXT", VK_EXT_DIRECT_MODE_DISPLAY_EXTENSION_NAME);
- skip |= validate_required_handle("vkReleaseDisplayEXT", "display", display);
- return skip;
-}
-
-
-
-#ifdef VK_USE_PLATFORM_XLIB_XRANDR_EXT
-
-bool StatelessValidation::PreCallValidateAcquireXlibDisplayEXT(
- VkPhysicalDevice physicalDevice,
- Display* dpy,
- VkDisplayKHR display) {
- bool skip = false;
- if (!instance_extensions.vk_ext_direct_mode_display) skip |= OutputExtensionError("vkAcquireXlibDisplayEXT", VK_EXT_DIRECT_MODE_DISPLAY_EXTENSION_NAME);
- if (!instance_extensions.vk_ext_acquire_xlib_display) skip |= OutputExtensionError("vkAcquireXlibDisplayEXT", VK_EXT_ACQUIRE_XLIB_DISPLAY_EXTENSION_NAME);
- skip |= validate_required_pointer("vkAcquireXlibDisplayEXT", "dpy", dpy, "VUID-vkAcquireXlibDisplayEXT-dpy-parameter");
- skip |= validate_required_handle("vkAcquireXlibDisplayEXT", "display", display);
- return skip;
-}
-
-bool StatelessValidation::PreCallValidateGetRandROutputDisplayEXT(
- VkPhysicalDevice physicalDevice,
- Display* dpy,
- RROutput rrOutput,
- VkDisplayKHR* pDisplay) {
- bool skip = false;
- if (!instance_extensions.vk_ext_direct_mode_display) skip |= OutputExtensionError("vkGetRandROutputDisplayEXT", VK_EXT_DIRECT_MODE_DISPLAY_EXTENSION_NAME);
- if (!instance_extensions.vk_ext_acquire_xlib_display) skip |= OutputExtensionError("vkGetRandROutputDisplayEXT", VK_EXT_ACQUIRE_XLIB_DISPLAY_EXTENSION_NAME);
- skip |= validate_required_pointer("vkGetRandROutputDisplayEXT", "dpy", dpy, "VUID-vkGetRandROutputDisplayEXT-dpy-parameter");
- skip |= validate_required_pointer("vkGetRandROutputDisplayEXT", "pDisplay", pDisplay, "VUID-vkGetRandROutputDisplayEXT-pDisplay-parameter");
- return skip;
-}
-
-#endif // VK_USE_PLATFORM_XLIB_XRANDR_EXT
-
-bool StatelessValidation::PreCallValidateGetPhysicalDeviceSurfaceCapabilities2EXT(
- VkPhysicalDevice physicalDevice,
- VkSurfaceKHR surface,
- VkSurfaceCapabilities2EXT* pSurfaceCapabilities) {
- bool skip = false;
- if (!instance_extensions.vk_khr_display) skip |= OutputExtensionError("vkGetPhysicalDeviceSurfaceCapabilities2EXT", VK_KHR_DISPLAY_EXTENSION_NAME);
- if (!instance_extensions.vk_ext_display_surface_counter) skip |= OutputExtensionError("vkGetPhysicalDeviceSurfaceCapabilities2EXT", VK_EXT_DISPLAY_SURFACE_COUNTER_EXTENSION_NAME);
- skip |= validate_required_handle("vkGetPhysicalDeviceSurfaceCapabilities2EXT", "surface", surface);
- skip |= validate_struct_type("vkGetPhysicalDeviceSurfaceCapabilities2EXT", "pSurfaceCapabilities", "VK_STRUCTURE_TYPE_SURFACE_CAPABILITIES_2_EXT", pSurfaceCapabilities, VK_STRUCTURE_TYPE_SURFACE_CAPABILITIES_2_EXT, true, "VUID-vkGetPhysicalDeviceSurfaceCapabilities2EXT-pSurfaceCapabilities-parameter", "VUID-VkSurfaceCapabilities2EXT-sType-sType");
- if (pSurfaceCapabilities != NULL)
- {
- // No xml-driven validation
- }
- return skip;
-}
-
-
-
-bool StatelessValidation::PreCallValidateDisplayPowerControlEXT(
- VkDevice device,
- VkDisplayKHR display,
- const VkDisplayPowerInfoEXT* pDisplayPowerInfo) {
- bool skip = false;
- if (!device_extensions.vk_khr_swapchain) skip |= OutputExtensionError("vkDisplayPowerControlEXT", VK_KHR_SWAPCHAIN_EXTENSION_NAME);
- if (!device_extensions.vk_ext_display_surface_counter) skip |= OutputExtensionError("vkDisplayPowerControlEXT", VK_EXT_DISPLAY_SURFACE_COUNTER_EXTENSION_NAME);
- if (!device_extensions.vk_ext_display_control) skip |= OutputExtensionError("vkDisplayPowerControlEXT", VK_EXT_DISPLAY_CONTROL_EXTENSION_NAME);
- skip |= validate_required_handle("vkDisplayPowerControlEXT", "display", display);
- skip |= validate_struct_type("vkDisplayPowerControlEXT", "pDisplayPowerInfo", "VK_STRUCTURE_TYPE_DISPLAY_POWER_INFO_EXT", pDisplayPowerInfo, VK_STRUCTURE_TYPE_DISPLAY_POWER_INFO_EXT, true, "VUID-vkDisplayPowerControlEXT-pDisplayPowerInfo-parameter", "VUID-VkDisplayPowerInfoEXT-sType-sType");
- if (pDisplayPowerInfo != NULL)
- {
- skip |= validate_struct_pnext("vkDisplayPowerControlEXT", "pDisplayPowerInfo->pNext", NULL, pDisplayPowerInfo->pNext, 0, NULL, GeneratedVulkanHeaderVersion, "VUID-VkDisplayPowerInfoEXT-pNext-pNext");
-
- skip |= validate_ranged_enum("vkDisplayPowerControlEXT", "pDisplayPowerInfo->powerState", "VkDisplayPowerStateEXT", AllVkDisplayPowerStateEXTEnums, pDisplayPowerInfo->powerState, "VUID-VkDisplayPowerInfoEXT-powerState-parameter");
- }
- return skip;
-}
-
-bool StatelessValidation::PreCallValidateRegisterDeviceEventEXT(
- VkDevice device,
- const VkDeviceEventInfoEXT* pDeviceEventInfo,
- const VkAllocationCallbacks* pAllocator,
- VkFence* pFence) {
- bool skip = false;
- if (!device_extensions.vk_khr_swapchain) skip |= OutputExtensionError("vkRegisterDeviceEventEXT", VK_KHR_SWAPCHAIN_EXTENSION_NAME);
- if (!device_extensions.vk_ext_display_surface_counter) skip |= OutputExtensionError("vkRegisterDeviceEventEXT", VK_EXT_DISPLAY_SURFACE_COUNTER_EXTENSION_NAME);
- if (!device_extensions.vk_ext_display_control) skip |= OutputExtensionError("vkRegisterDeviceEventEXT", VK_EXT_DISPLAY_CONTROL_EXTENSION_NAME);
- skip |= validate_struct_type("vkRegisterDeviceEventEXT", "pDeviceEventInfo", "VK_STRUCTURE_TYPE_DEVICE_EVENT_INFO_EXT", pDeviceEventInfo, VK_STRUCTURE_TYPE_DEVICE_EVENT_INFO_EXT, true, "VUID-vkRegisterDeviceEventEXT-pDeviceEventInfo-parameter", "VUID-VkDeviceEventInfoEXT-sType-sType");
- if (pDeviceEventInfo != NULL)
- {
- skip |= validate_struct_pnext("vkRegisterDeviceEventEXT", "pDeviceEventInfo->pNext", NULL, pDeviceEventInfo->pNext, 0, NULL, GeneratedVulkanHeaderVersion, "VUID-VkDeviceEventInfoEXT-pNext-pNext");
-
- skip |= validate_ranged_enum("vkRegisterDeviceEventEXT", "pDeviceEventInfo->deviceEvent", "VkDeviceEventTypeEXT", AllVkDeviceEventTypeEXTEnums, pDeviceEventInfo->deviceEvent, "VUID-VkDeviceEventInfoEXT-deviceEvent-parameter");
- }
- if (pAllocator != NULL)
- {
- skip |= validate_required_pointer("vkRegisterDeviceEventEXT", "pAllocator->pfnAllocation", reinterpret_cast<const void*>(pAllocator->pfnAllocation), "VUID-VkAllocationCallbacks-pfnAllocation-00632");
-
- skip |= validate_required_pointer("vkRegisterDeviceEventEXT", "pAllocator->pfnReallocation", reinterpret_cast<const void*>(pAllocator->pfnReallocation), "VUID-VkAllocationCallbacks-pfnReallocation-00633");
-
- skip |= validate_required_pointer("vkRegisterDeviceEventEXT", "pAllocator->pfnFree", reinterpret_cast<const void*>(pAllocator->pfnFree), "VUID-VkAllocationCallbacks-pfnFree-00634");
-
- if (pAllocator->pfnInternalAllocation != NULL)
- {
- skip |= validate_required_pointer("vkRegisterDeviceEventEXT", "pAllocator->pfnInternalFree", reinterpret_cast<const void*>(pAllocator->pfnInternalFree), "VUID-VkAllocationCallbacks-pfnInternalAllocation-00635");
-
- }
-
- if (pAllocator->pfnInternalFree != NULL)
- {
- skip |= validate_required_pointer("vkRegisterDeviceEventEXT", "pAllocator->pfnInternalAllocation", reinterpret_cast<const void*>(pAllocator->pfnInternalAllocation), "VUID-VkAllocationCallbacks-pfnInternalAllocation-00635");
-
- }
- }
- skip |= validate_required_pointer("vkRegisterDeviceEventEXT", "pFence", pFence, "VUID-vkRegisterDeviceEventEXT-pFence-parameter");
- return skip;
-}
-
-bool StatelessValidation::PreCallValidateRegisterDisplayEventEXT(
- VkDevice device,
- VkDisplayKHR display,
- const VkDisplayEventInfoEXT* pDisplayEventInfo,
- const VkAllocationCallbacks* pAllocator,
- VkFence* pFence) {
- bool skip = false;
- if (!device_extensions.vk_khr_swapchain) skip |= OutputExtensionError("vkRegisterDisplayEventEXT", VK_KHR_SWAPCHAIN_EXTENSION_NAME);
- if (!device_extensions.vk_ext_display_surface_counter) skip |= OutputExtensionError("vkRegisterDisplayEventEXT", VK_EXT_DISPLAY_SURFACE_COUNTER_EXTENSION_NAME);
- if (!device_extensions.vk_ext_display_control) skip |= OutputExtensionError("vkRegisterDisplayEventEXT", VK_EXT_DISPLAY_CONTROL_EXTENSION_NAME);
- skip |= validate_required_handle("vkRegisterDisplayEventEXT", "display", display);
- skip |= validate_struct_type("vkRegisterDisplayEventEXT", "pDisplayEventInfo", "VK_STRUCTURE_TYPE_DISPLAY_EVENT_INFO_EXT", pDisplayEventInfo, VK_STRUCTURE_TYPE_DISPLAY_EVENT_INFO_EXT, true, "VUID-vkRegisterDisplayEventEXT-pDisplayEventInfo-parameter", "VUID-VkDisplayEventInfoEXT-sType-sType");
- if (pDisplayEventInfo != NULL)
- {
- skip |= validate_struct_pnext("vkRegisterDisplayEventEXT", "pDisplayEventInfo->pNext", NULL, pDisplayEventInfo->pNext, 0, NULL, GeneratedVulkanHeaderVersion, "VUID-VkDisplayEventInfoEXT-pNext-pNext");
-
- skip |= validate_ranged_enum("vkRegisterDisplayEventEXT", "pDisplayEventInfo->displayEvent", "VkDisplayEventTypeEXT", AllVkDisplayEventTypeEXTEnums, pDisplayEventInfo->displayEvent, "VUID-VkDisplayEventInfoEXT-displayEvent-parameter");
- }
- if (pAllocator != NULL)
- {
- skip |= validate_required_pointer("vkRegisterDisplayEventEXT", "pAllocator->pfnAllocation", reinterpret_cast<const void*>(pAllocator->pfnAllocation), "VUID-VkAllocationCallbacks-pfnAllocation-00632");
-
- skip |= validate_required_pointer("vkRegisterDisplayEventEXT", "pAllocator->pfnReallocation", reinterpret_cast<const void*>(pAllocator->pfnReallocation), "VUID-VkAllocationCallbacks-pfnReallocation-00633");
-
- skip |= validate_required_pointer("vkRegisterDisplayEventEXT", "pAllocator->pfnFree", reinterpret_cast<const void*>(pAllocator->pfnFree), "VUID-VkAllocationCallbacks-pfnFree-00634");
-
- if (pAllocator->pfnInternalAllocation != NULL)
- {
- skip |= validate_required_pointer("vkRegisterDisplayEventEXT", "pAllocator->pfnInternalFree", reinterpret_cast<const void*>(pAllocator->pfnInternalFree), "VUID-VkAllocationCallbacks-pfnInternalAllocation-00635");
-
- }
-
- if (pAllocator->pfnInternalFree != NULL)
- {
- skip |= validate_required_pointer("vkRegisterDisplayEventEXT", "pAllocator->pfnInternalAllocation", reinterpret_cast<const void*>(pAllocator->pfnInternalAllocation), "VUID-VkAllocationCallbacks-pfnInternalAllocation-00635");
-
- }
- }
- skip |= validate_required_pointer("vkRegisterDisplayEventEXT", "pFence", pFence, "VUID-vkRegisterDisplayEventEXT-pFence-parameter");
- return skip;
-}
-
-bool StatelessValidation::PreCallValidateGetSwapchainCounterEXT(
- VkDevice device,
- VkSwapchainKHR swapchain,
- VkSurfaceCounterFlagBitsEXT counter,
- uint64_t* pCounterValue) {
- bool skip = false;
- if (!device_extensions.vk_khr_swapchain) skip |= OutputExtensionError("vkGetSwapchainCounterEXT", VK_KHR_SWAPCHAIN_EXTENSION_NAME);
- if (!device_extensions.vk_ext_display_surface_counter) skip |= OutputExtensionError("vkGetSwapchainCounterEXT", VK_EXT_DISPLAY_SURFACE_COUNTER_EXTENSION_NAME);
- if (!device_extensions.vk_ext_display_control) skip |= OutputExtensionError("vkGetSwapchainCounterEXT", VK_EXT_DISPLAY_CONTROL_EXTENSION_NAME);
- skip |= validate_required_handle("vkGetSwapchainCounterEXT", "swapchain", swapchain);
- skip |= validate_flags("vkGetSwapchainCounterEXT", "counter", "VkSurfaceCounterFlagBitsEXT", AllVkSurfaceCounterFlagBitsEXT, counter, kRequiredSingleBit, "VUID-vkGetSwapchainCounterEXT-counter-parameter", "VUID-vkGetSwapchainCounterEXT-counter-parameter");
- skip |= validate_required_pointer("vkGetSwapchainCounterEXT", "pCounterValue", pCounterValue, "VUID-vkGetSwapchainCounterEXT-pCounterValue-parameter");
- return skip;
-}
-
-
-
-bool StatelessValidation::PreCallValidateGetRefreshCycleDurationGOOGLE(
- VkDevice device,
- VkSwapchainKHR swapchain,
- VkRefreshCycleDurationGOOGLE* pDisplayTimingProperties) {
- bool skip = false;
- if (!device_extensions.vk_khr_swapchain) skip |= OutputExtensionError("vkGetRefreshCycleDurationGOOGLE", VK_KHR_SWAPCHAIN_EXTENSION_NAME);
- if (!device_extensions.vk_google_display_timing) skip |= OutputExtensionError("vkGetRefreshCycleDurationGOOGLE", VK_GOOGLE_DISPLAY_TIMING_EXTENSION_NAME);
- skip |= validate_required_handle("vkGetRefreshCycleDurationGOOGLE", "swapchain", swapchain);
- skip |= validate_required_pointer("vkGetRefreshCycleDurationGOOGLE", "pDisplayTimingProperties", pDisplayTimingProperties, "VUID-vkGetRefreshCycleDurationGOOGLE-pDisplayTimingProperties-parameter");
- if (pDisplayTimingProperties != NULL)
- {
- // No xml-driven validation
- }
- return skip;
-}
-
-bool StatelessValidation::PreCallValidateGetPastPresentationTimingGOOGLE(
- VkDevice device,
- VkSwapchainKHR swapchain,
- uint32_t* pPresentationTimingCount,
- VkPastPresentationTimingGOOGLE* pPresentationTimings) {
- bool skip = false;
- if (!device_extensions.vk_khr_swapchain) skip |= OutputExtensionError("vkGetPastPresentationTimingGOOGLE", VK_KHR_SWAPCHAIN_EXTENSION_NAME);
- if (!device_extensions.vk_google_display_timing) skip |= OutputExtensionError("vkGetPastPresentationTimingGOOGLE", VK_GOOGLE_DISPLAY_TIMING_EXTENSION_NAME);
- skip |= validate_required_handle("vkGetPastPresentationTimingGOOGLE", "swapchain", swapchain);
- skip |= validate_array("vkGetPastPresentationTimingGOOGLE", "pPresentationTimingCount", "pPresentationTimings", pPresentationTimingCount, &pPresentationTimings, true, false, false, kVUIDUndefined, "VUID-vkGetPastPresentationTimingGOOGLE-pPresentationTimings-parameter");
- if (pPresentationTimings != NULL)
- {
- for (uint32_t pPresentationTimingIndex = 0; pPresentationTimingIndex < *pPresentationTimingCount; ++pPresentationTimingIndex)
- {
- // No xml-driven validation
- }
- }
- return skip;
-}
-
-
-
-
-
-
-
-
-
-
-
-
-
-bool StatelessValidation::PreCallValidateCmdSetDiscardRectangleEXT(
- VkCommandBuffer commandBuffer,
- uint32_t firstDiscardRectangle,
- uint32_t discardRectangleCount,
- const VkRect2D* pDiscardRectangles) {
- bool skip = false;
- if (!device_extensions.vk_khr_get_physical_device_properties_2) skip |= OutputExtensionError("vkCmdSetDiscardRectangleEXT", VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME);
- if (!device_extensions.vk_ext_discard_rectangles) skip |= OutputExtensionError("vkCmdSetDiscardRectangleEXT", VK_EXT_DISCARD_RECTANGLES_EXTENSION_NAME);
- skip |= validate_array("vkCmdSetDiscardRectangleEXT", "discardRectangleCount", "pDiscardRectangles", discardRectangleCount, &pDiscardRectangles, true, true, "VUID-vkCmdSetDiscardRectangleEXT-discardRectangleCount-arraylength", "VUID-vkCmdSetDiscardRectangleEXT-pDiscardRectangles-parameter");
- if (pDiscardRectangles != NULL)
- {
- for (uint32_t discardRectangleIndex = 0; discardRectangleIndex < discardRectangleCount; ++discardRectangleIndex)
- {
- // No xml-driven validation
-
- // No xml-driven validation
- }
- }
- return skip;
-}
-
-
-
-
-
-
-
-
-
-bool StatelessValidation::PreCallValidateSetHdrMetadataEXT(
- VkDevice device,
- uint32_t swapchainCount,
- const VkSwapchainKHR* pSwapchains,
- const VkHdrMetadataEXT* pMetadata) {
- bool skip = false;
- if (!device_extensions.vk_khr_swapchain) skip |= OutputExtensionError("vkSetHdrMetadataEXT", VK_KHR_SWAPCHAIN_EXTENSION_NAME);
- if (!device_extensions.vk_ext_hdr_metadata) skip |= OutputExtensionError("vkSetHdrMetadataEXT", VK_EXT_HDR_METADATA_EXTENSION_NAME);
- skip |= validate_handle_array("vkSetHdrMetadataEXT", "swapchainCount", "pSwapchains", swapchainCount, pSwapchains, true, true);
- skip |= validate_struct_type_array("vkSetHdrMetadataEXT", "swapchainCount", "pMetadata", "VK_STRUCTURE_TYPE_HDR_METADATA_EXT", swapchainCount, pMetadata, VK_STRUCTURE_TYPE_HDR_METADATA_EXT, true, true, "VUID-VkHdrMetadataEXT-sType-sType", "VUID-vkSetHdrMetadataEXT-pMetadata-parameter", "VUID-vkSetHdrMetadataEXT-swapchainCount-arraylength");
- if (pMetadata != NULL)
- {
- for (uint32_t swapchainIndex = 0; swapchainIndex < swapchainCount; ++swapchainIndex)
- {
- skip |= validate_struct_pnext("vkSetHdrMetadataEXT", ParameterName("pMetadata[%i].pNext", ParameterName::IndexVector{ swapchainIndex }), NULL, pMetadata[swapchainIndex].pNext, 0, NULL, GeneratedVulkanHeaderVersion, "VUID-VkHdrMetadataEXT-pNext-pNext");
- }
- }
- return skip;
-}
-
-
-
-#ifdef VK_USE_PLATFORM_IOS_MVK
-
-bool StatelessValidation::PreCallValidateCreateIOSSurfaceMVK(
- VkInstance instance,
- const VkIOSSurfaceCreateInfoMVK* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkSurfaceKHR* pSurface) {
- bool skip = false;
- if (!instance_extensions.vk_khr_surface) skip |= OutputExtensionError("vkCreateIOSSurfaceMVK", VK_KHR_SURFACE_EXTENSION_NAME);
- if (!instance_extensions.vk_mvk_ios_surface) skip |= OutputExtensionError("vkCreateIOSSurfaceMVK", VK_MVK_IOS_SURFACE_EXTENSION_NAME);
- skip |= validate_struct_type("vkCreateIOSSurfaceMVK", "pCreateInfo", "VK_STRUCTURE_TYPE_IOS_SURFACE_CREATE_INFO_MVK", pCreateInfo, VK_STRUCTURE_TYPE_IOS_SURFACE_CREATE_INFO_MVK, true, "VUID-vkCreateIOSSurfaceMVK-pCreateInfo-parameter", "VUID-VkIOSSurfaceCreateInfoMVK-sType-sType");
- if (pCreateInfo != NULL)
- {
- skip |= validate_struct_pnext("vkCreateIOSSurfaceMVK", "pCreateInfo->pNext", NULL, pCreateInfo->pNext, 0, NULL, GeneratedVulkanHeaderVersion, "VUID-VkIOSSurfaceCreateInfoMVK-pNext-pNext");
-
- skip |= validate_reserved_flags("vkCreateIOSSurfaceMVK", "pCreateInfo->flags", pCreateInfo->flags, "VUID-VkIOSSurfaceCreateInfoMVK-flags-zerobitmask");
- }
- if (pAllocator != NULL)
- {
- skip |= validate_required_pointer("vkCreateIOSSurfaceMVK", "pAllocator->pfnAllocation", reinterpret_cast<const void*>(pAllocator->pfnAllocation), "VUID-VkAllocationCallbacks-pfnAllocation-00632");
-
- skip |= validate_required_pointer("vkCreateIOSSurfaceMVK", "pAllocator->pfnReallocation", reinterpret_cast<const void*>(pAllocator->pfnReallocation), "VUID-VkAllocationCallbacks-pfnReallocation-00633");
-
- skip |= validate_required_pointer("vkCreateIOSSurfaceMVK", "pAllocator->pfnFree", reinterpret_cast<const void*>(pAllocator->pfnFree), "VUID-VkAllocationCallbacks-pfnFree-00634");
-
- if (pAllocator->pfnInternalAllocation != NULL)
- {
- skip |= validate_required_pointer("vkCreateIOSSurfaceMVK", "pAllocator->pfnInternalFree", reinterpret_cast<const void*>(pAllocator->pfnInternalFree), "VUID-VkAllocationCallbacks-pfnInternalAllocation-00635");
-
- }
-
- if (pAllocator->pfnInternalFree != NULL)
- {
- skip |= validate_required_pointer("vkCreateIOSSurfaceMVK", "pAllocator->pfnInternalAllocation", reinterpret_cast<const void*>(pAllocator->pfnInternalAllocation), "VUID-VkAllocationCallbacks-pfnInternalAllocation-00635");
-
- }
- }
- skip |= validate_required_pointer("vkCreateIOSSurfaceMVK", "pSurface", pSurface, "VUID-vkCreateIOSSurfaceMVK-pSurface-parameter");
- return skip;
-}
-
-#endif // VK_USE_PLATFORM_IOS_MVK
-
-#ifdef VK_USE_PLATFORM_MACOS_MVK
-
-bool StatelessValidation::PreCallValidateCreateMacOSSurfaceMVK(
- VkInstance instance,
- const VkMacOSSurfaceCreateInfoMVK* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkSurfaceKHR* pSurface) {
- bool skip = false;
- if (!instance_extensions.vk_khr_surface) skip |= OutputExtensionError("vkCreateMacOSSurfaceMVK", VK_KHR_SURFACE_EXTENSION_NAME);
- if (!instance_extensions.vk_mvk_macos_surface) skip |= OutputExtensionError("vkCreateMacOSSurfaceMVK", VK_MVK_MACOS_SURFACE_EXTENSION_NAME);
- skip |= validate_struct_type("vkCreateMacOSSurfaceMVK", "pCreateInfo", "VK_STRUCTURE_TYPE_MACOS_SURFACE_CREATE_INFO_MVK", pCreateInfo, VK_STRUCTURE_TYPE_MACOS_SURFACE_CREATE_INFO_MVK, true, "VUID-vkCreateMacOSSurfaceMVK-pCreateInfo-parameter", "VUID-VkMacOSSurfaceCreateInfoMVK-sType-sType");
- if (pCreateInfo != NULL)
- {
- skip |= validate_struct_pnext("vkCreateMacOSSurfaceMVK", "pCreateInfo->pNext", NULL, pCreateInfo->pNext, 0, NULL, GeneratedVulkanHeaderVersion, "VUID-VkMacOSSurfaceCreateInfoMVK-pNext-pNext");
-
- skip |= validate_reserved_flags("vkCreateMacOSSurfaceMVK", "pCreateInfo->flags", pCreateInfo->flags, "VUID-VkMacOSSurfaceCreateInfoMVK-flags-zerobitmask");
- }
- if (pAllocator != NULL)
- {
- skip |= validate_required_pointer("vkCreateMacOSSurfaceMVK", "pAllocator->pfnAllocation", reinterpret_cast<const void*>(pAllocator->pfnAllocation), "VUID-VkAllocationCallbacks-pfnAllocation-00632");
-
- skip |= validate_required_pointer("vkCreateMacOSSurfaceMVK", "pAllocator->pfnReallocation", reinterpret_cast<const void*>(pAllocator->pfnReallocation), "VUID-VkAllocationCallbacks-pfnReallocation-00633");
-
- skip |= validate_required_pointer("vkCreateMacOSSurfaceMVK", "pAllocator->pfnFree", reinterpret_cast<const void*>(pAllocator->pfnFree), "VUID-VkAllocationCallbacks-pfnFree-00634");
-
- if (pAllocator->pfnInternalAllocation != NULL)
- {
- skip |= validate_required_pointer("vkCreateMacOSSurfaceMVK", "pAllocator->pfnInternalFree", reinterpret_cast<const void*>(pAllocator->pfnInternalFree), "VUID-VkAllocationCallbacks-pfnInternalAllocation-00635");
-
- }
-
- if (pAllocator->pfnInternalFree != NULL)
- {
- skip |= validate_required_pointer("vkCreateMacOSSurfaceMVK", "pAllocator->pfnInternalAllocation", reinterpret_cast<const void*>(pAllocator->pfnInternalAllocation), "VUID-VkAllocationCallbacks-pfnInternalAllocation-00635");
-
- }
- }
- skip |= validate_required_pointer("vkCreateMacOSSurfaceMVK", "pSurface", pSurface, "VUID-vkCreateMacOSSurfaceMVK-pSurface-parameter");
- return skip;
-}
-
-#endif // VK_USE_PLATFORM_MACOS_MVK
-
-
-
-
-
-bool StatelessValidation::PreCallValidateSetDebugUtilsObjectNameEXT(
- VkDevice device,
- const VkDebugUtilsObjectNameInfoEXT* pNameInfo) {
- bool skip = false;
- if (!device_extensions.vk_ext_debug_utils) skip |= OutputExtensionError("vkSetDebugUtilsObjectNameEXT", VK_EXT_DEBUG_UTILS_EXTENSION_NAME);
- skip |= validate_struct_type("vkSetDebugUtilsObjectNameEXT", "pNameInfo", "VK_STRUCTURE_TYPE_DEBUG_UTILS_OBJECT_NAME_INFO_EXT", pNameInfo, VK_STRUCTURE_TYPE_DEBUG_UTILS_OBJECT_NAME_INFO_EXT, true, "VUID-vkSetDebugUtilsObjectNameEXT-pNameInfo-parameter", "VUID-VkDebugUtilsObjectNameInfoEXT-sType-sType");
- if (pNameInfo != NULL)
- {
- skip |= validate_struct_pnext("vkSetDebugUtilsObjectNameEXT", "pNameInfo->pNext", NULL, pNameInfo->pNext, 0, NULL, GeneratedVulkanHeaderVersion, "VUID-VkDebugUtilsObjectNameInfoEXT-pNext-pNext");
-
- skip |= validate_ranged_enum("vkSetDebugUtilsObjectNameEXT", "pNameInfo->objectType", "VkObjectType", AllVkObjectTypeEnums, pNameInfo->objectType, "VUID-VkDebugUtilsObjectNameInfoEXT-objectType-parameter");
- }
- return skip;
-}
-
-bool StatelessValidation::PreCallValidateSetDebugUtilsObjectTagEXT(
- VkDevice device,
- const VkDebugUtilsObjectTagInfoEXT* pTagInfo) {
- bool skip = false;
- if (!device_extensions.vk_ext_debug_utils) skip |= OutputExtensionError("vkSetDebugUtilsObjectTagEXT", VK_EXT_DEBUG_UTILS_EXTENSION_NAME);
- skip |= validate_struct_type("vkSetDebugUtilsObjectTagEXT", "pTagInfo", "VK_STRUCTURE_TYPE_DEBUG_UTILS_OBJECT_TAG_INFO_EXT", pTagInfo, VK_STRUCTURE_TYPE_DEBUG_UTILS_OBJECT_TAG_INFO_EXT, true, "VUID-vkSetDebugUtilsObjectTagEXT-pTagInfo-parameter", "VUID-VkDebugUtilsObjectTagInfoEXT-sType-sType");
- if (pTagInfo != NULL)
- {
- skip |= validate_struct_pnext("vkSetDebugUtilsObjectTagEXT", "pTagInfo->pNext", NULL, pTagInfo->pNext, 0, NULL, GeneratedVulkanHeaderVersion, "VUID-VkDebugUtilsObjectTagInfoEXT-pNext-pNext");
-
- skip |= validate_ranged_enum("vkSetDebugUtilsObjectTagEXT", "pTagInfo->objectType", "VkObjectType", AllVkObjectTypeEnums, pTagInfo->objectType, "VUID-VkDebugUtilsObjectTagInfoEXT-objectType-parameter");
-
- skip |= validate_array("vkSetDebugUtilsObjectTagEXT", "pTagInfo->tagSize", "pTagInfo->pTag", pTagInfo->tagSize, &pTagInfo->pTag, true, true, "VUID-VkDebugUtilsObjectTagInfoEXT-tagSize-arraylength", "VUID-VkDebugUtilsObjectTagInfoEXT-pTag-parameter");
- }
- return skip;
-}
-
-bool StatelessValidation::PreCallValidateQueueBeginDebugUtilsLabelEXT(
- VkQueue queue,
- const VkDebugUtilsLabelEXT* pLabelInfo) {
- bool skip = false;
- if (!device_extensions.vk_ext_debug_utils) skip |= OutputExtensionError("vkQueueBeginDebugUtilsLabelEXT", VK_EXT_DEBUG_UTILS_EXTENSION_NAME);
- skip |= validate_struct_type("vkQueueBeginDebugUtilsLabelEXT", "pLabelInfo", "VK_STRUCTURE_TYPE_DEBUG_UTILS_LABEL_EXT", pLabelInfo, VK_STRUCTURE_TYPE_DEBUG_UTILS_LABEL_EXT, true, "VUID-vkQueueBeginDebugUtilsLabelEXT-pLabelInfo-parameter", "VUID-VkDebugUtilsLabelEXT-sType-sType");
- if (pLabelInfo != NULL)
- {
- skip |= validate_struct_pnext("vkQueueBeginDebugUtilsLabelEXT", "pLabelInfo->pNext", NULL, pLabelInfo->pNext, 0, NULL, GeneratedVulkanHeaderVersion, "VUID-VkDebugUtilsLabelEXT-pNext-pNext");
-
- skip |= validate_required_pointer("vkQueueBeginDebugUtilsLabelEXT", "pLabelInfo->pLabelName", pLabelInfo->pLabelName, "VUID-VkDebugUtilsLabelEXT-pLabelName-parameter");
- }
- return skip;
-}
-
-bool StatelessValidation::PreCallValidateQueueEndDebugUtilsLabelEXT(
- VkQueue queue) {
- bool skip = false;
- if (!device_extensions.vk_ext_debug_utils) skip |= OutputExtensionError("vkQueueEndDebugUtilsLabelEXT", VK_EXT_DEBUG_UTILS_EXTENSION_NAME);
- // No xml-driven validation
- return skip;
-}
-
-bool StatelessValidation::PreCallValidateQueueInsertDebugUtilsLabelEXT(
- VkQueue queue,
- const VkDebugUtilsLabelEXT* pLabelInfo) {
- bool skip = false;
- if (!device_extensions.vk_ext_debug_utils) skip |= OutputExtensionError("vkQueueInsertDebugUtilsLabelEXT", VK_EXT_DEBUG_UTILS_EXTENSION_NAME);
- skip |= validate_struct_type("vkQueueInsertDebugUtilsLabelEXT", "pLabelInfo", "VK_STRUCTURE_TYPE_DEBUG_UTILS_LABEL_EXT", pLabelInfo, VK_STRUCTURE_TYPE_DEBUG_UTILS_LABEL_EXT, true, "VUID-vkQueueInsertDebugUtilsLabelEXT-pLabelInfo-parameter", "VUID-VkDebugUtilsLabelEXT-sType-sType");
- if (pLabelInfo != NULL)
- {
- skip |= validate_struct_pnext("vkQueueInsertDebugUtilsLabelEXT", "pLabelInfo->pNext", NULL, pLabelInfo->pNext, 0, NULL, GeneratedVulkanHeaderVersion, "VUID-VkDebugUtilsLabelEXT-pNext-pNext");
-
- skip |= validate_required_pointer("vkQueueInsertDebugUtilsLabelEXT", "pLabelInfo->pLabelName", pLabelInfo->pLabelName, "VUID-VkDebugUtilsLabelEXT-pLabelName-parameter");
- }
- return skip;
-}
-
-bool StatelessValidation::PreCallValidateCmdBeginDebugUtilsLabelEXT(
- VkCommandBuffer commandBuffer,
- const VkDebugUtilsLabelEXT* pLabelInfo) {
- bool skip = false;
- if (!device_extensions.vk_ext_debug_utils) skip |= OutputExtensionError("vkCmdBeginDebugUtilsLabelEXT", VK_EXT_DEBUG_UTILS_EXTENSION_NAME);
- skip |= validate_struct_type("vkCmdBeginDebugUtilsLabelEXT", "pLabelInfo", "VK_STRUCTURE_TYPE_DEBUG_UTILS_LABEL_EXT", pLabelInfo, VK_STRUCTURE_TYPE_DEBUG_UTILS_LABEL_EXT, true, "VUID-vkCmdBeginDebugUtilsLabelEXT-pLabelInfo-parameter", "VUID-VkDebugUtilsLabelEXT-sType-sType");
- if (pLabelInfo != NULL)
- {
- skip |= validate_struct_pnext("vkCmdBeginDebugUtilsLabelEXT", "pLabelInfo->pNext", NULL, pLabelInfo->pNext, 0, NULL, GeneratedVulkanHeaderVersion, "VUID-VkDebugUtilsLabelEXT-pNext-pNext");
-
- skip |= validate_required_pointer("vkCmdBeginDebugUtilsLabelEXT", "pLabelInfo->pLabelName", pLabelInfo->pLabelName, "VUID-VkDebugUtilsLabelEXT-pLabelName-parameter");
- }
- return skip;
-}
-
-bool StatelessValidation::PreCallValidateCmdEndDebugUtilsLabelEXT(
- VkCommandBuffer commandBuffer) {
- bool skip = false;
- if (!device_extensions.vk_ext_debug_utils) skip |= OutputExtensionError("vkCmdEndDebugUtilsLabelEXT", VK_EXT_DEBUG_UTILS_EXTENSION_NAME);
- // No xml-driven validation
- return skip;
-}
-
-bool StatelessValidation::PreCallValidateCmdInsertDebugUtilsLabelEXT(
- VkCommandBuffer commandBuffer,
- const VkDebugUtilsLabelEXT* pLabelInfo) {
- bool skip = false;
- if (!device_extensions.vk_ext_debug_utils) skip |= OutputExtensionError("vkCmdInsertDebugUtilsLabelEXT", VK_EXT_DEBUG_UTILS_EXTENSION_NAME);
- skip |= validate_struct_type("vkCmdInsertDebugUtilsLabelEXT", "pLabelInfo", "VK_STRUCTURE_TYPE_DEBUG_UTILS_LABEL_EXT", pLabelInfo, VK_STRUCTURE_TYPE_DEBUG_UTILS_LABEL_EXT, true, "VUID-vkCmdInsertDebugUtilsLabelEXT-pLabelInfo-parameter", "VUID-VkDebugUtilsLabelEXT-sType-sType");
- if (pLabelInfo != NULL)
- {
- skip |= validate_struct_pnext("vkCmdInsertDebugUtilsLabelEXT", "pLabelInfo->pNext", NULL, pLabelInfo->pNext, 0, NULL, GeneratedVulkanHeaderVersion, "VUID-VkDebugUtilsLabelEXT-pNext-pNext");
-
- skip |= validate_required_pointer("vkCmdInsertDebugUtilsLabelEXT", "pLabelInfo->pLabelName", pLabelInfo->pLabelName, "VUID-VkDebugUtilsLabelEXT-pLabelName-parameter");
- }
- return skip;
-}
-
-bool StatelessValidation::PreCallValidateCreateDebugUtilsMessengerEXT(
- VkInstance instance,
- const VkDebugUtilsMessengerCreateInfoEXT* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkDebugUtilsMessengerEXT* pMessenger) {
- bool skip = false;
- if (!instance_extensions.vk_ext_debug_utils) skip |= OutputExtensionError("vkCreateDebugUtilsMessengerEXT", VK_EXT_DEBUG_UTILS_EXTENSION_NAME);
- skip |= validate_struct_type("vkCreateDebugUtilsMessengerEXT", "pCreateInfo", "VK_STRUCTURE_TYPE_DEBUG_UTILS_MESSENGER_CREATE_INFO_EXT", pCreateInfo, VK_STRUCTURE_TYPE_DEBUG_UTILS_MESSENGER_CREATE_INFO_EXT, true, "VUID-vkCreateDebugUtilsMessengerEXT-pCreateInfo-parameter", "VUID-VkDebugUtilsMessengerCreateInfoEXT-sType-sType");
- if (pCreateInfo != NULL)
- {
- skip |= validate_struct_pnext("vkCreateDebugUtilsMessengerEXT", "pCreateInfo->pNext", NULL, pCreateInfo->pNext, 0, NULL, GeneratedVulkanHeaderVersion, kVUIDUndefined);
-
- skip |= validate_reserved_flags("vkCreateDebugUtilsMessengerEXT", "pCreateInfo->flags", pCreateInfo->flags, "VUID-VkDebugUtilsMessengerCreateInfoEXT-flags-zerobitmask");
-
- skip |= validate_flags("vkCreateDebugUtilsMessengerEXT", "pCreateInfo->messageSeverity", "VkDebugUtilsMessageSeverityFlagBitsEXT", AllVkDebugUtilsMessageSeverityFlagBitsEXT, pCreateInfo->messageSeverity, kRequiredFlags, "VUID-VkDebugUtilsMessengerCreateInfoEXT-messageSeverity-parameter", "VUID-VkDebugUtilsMessengerCreateInfoEXT-messageSeverity-requiredbitmask");
-
- skip |= validate_flags("vkCreateDebugUtilsMessengerEXT", "pCreateInfo->messageType", "VkDebugUtilsMessageTypeFlagBitsEXT", AllVkDebugUtilsMessageTypeFlagBitsEXT, pCreateInfo->messageType, kRequiredFlags, "VUID-VkDebugUtilsMessengerCreateInfoEXT-messageType-parameter", "VUID-VkDebugUtilsMessengerCreateInfoEXT-messageType-requiredbitmask");
-
- skip |= validate_required_pointer("vkCreateDebugUtilsMessengerEXT", "pCreateInfo->pfnUserCallback", reinterpret_cast<const void*>(pCreateInfo->pfnUserCallback), "VUID-VkDebugUtilsMessengerCreateInfoEXT-pfnUserCallback-parameter");
- }
- if (pAllocator != NULL)
- {
- skip |= validate_required_pointer("vkCreateDebugUtilsMessengerEXT", "pAllocator->pfnAllocation", reinterpret_cast<const void*>(pAllocator->pfnAllocation), "VUID-VkAllocationCallbacks-pfnAllocation-00632");
-
- skip |= validate_required_pointer("vkCreateDebugUtilsMessengerEXT", "pAllocator->pfnReallocation", reinterpret_cast<const void*>(pAllocator->pfnReallocation), "VUID-VkAllocationCallbacks-pfnReallocation-00633");
-
- skip |= validate_required_pointer("vkCreateDebugUtilsMessengerEXT", "pAllocator->pfnFree", reinterpret_cast<const void*>(pAllocator->pfnFree), "VUID-VkAllocationCallbacks-pfnFree-00634");
-
- if (pAllocator->pfnInternalAllocation != NULL)
- {
- skip |= validate_required_pointer("vkCreateDebugUtilsMessengerEXT", "pAllocator->pfnInternalFree", reinterpret_cast<const void*>(pAllocator->pfnInternalFree), "VUID-VkAllocationCallbacks-pfnInternalAllocation-00635");
-
- }
-
- if (pAllocator->pfnInternalFree != NULL)
- {
- skip |= validate_required_pointer("vkCreateDebugUtilsMessengerEXT", "pAllocator->pfnInternalAllocation", reinterpret_cast<const void*>(pAllocator->pfnInternalAllocation), "VUID-VkAllocationCallbacks-pfnInternalAllocation-00635");
-
- }
- }
- skip |= validate_required_pointer("vkCreateDebugUtilsMessengerEXT", "pMessenger", pMessenger, "VUID-vkCreateDebugUtilsMessengerEXT-pMessenger-parameter");
- return skip;
-}
-
-bool StatelessValidation::PreCallValidateDestroyDebugUtilsMessengerEXT(
- VkInstance instance,
- VkDebugUtilsMessengerEXT messenger,
- const VkAllocationCallbacks* pAllocator) {
- bool skip = false;
- if (!instance_extensions.vk_ext_debug_utils) skip |= OutputExtensionError("vkDestroyDebugUtilsMessengerEXT", VK_EXT_DEBUG_UTILS_EXTENSION_NAME);
- skip |= validate_required_handle("vkDestroyDebugUtilsMessengerEXT", "messenger", messenger);
- if (pAllocator != NULL)
- {
- skip |= validate_required_pointer("vkDestroyDebugUtilsMessengerEXT", "pAllocator->pfnAllocation", reinterpret_cast<const void*>(pAllocator->pfnAllocation), "VUID-VkAllocationCallbacks-pfnAllocation-00632");
-
- skip |= validate_required_pointer("vkDestroyDebugUtilsMessengerEXT", "pAllocator->pfnReallocation", reinterpret_cast<const void*>(pAllocator->pfnReallocation), "VUID-VkAllocationCallbacks-pfnReallocation-00633");
-
- skip |= validate_required_pointer("vkDestroyDebugUtilsMessengerEXT", "pAllocator->pfnFree", reinterpret_cast<const void*>(pAllocator->pfnFree), "VUID-VkAllocationCallbacks-pfnFree-00634");
-
- if (pAllocator->pfnInternalAllocation != NULL)
- {
- skip |= validate_required_pointer("vkDestroyDebugUtilsMessengerEXT", "pAllocator->pfnInternalFree", reinterpret_cast<const void*>(pAllocator->pfnInternalFree), "VUID-VkAllocationCallbacks-pfnInternalAllocation-00635");
-
- }
-
- if (pAllocator->pfnInternalFree != NULL)
- {
- skip |= validate_required_pointer("vkDestroyDebugUtilsMessengerEXT", "pAllocator->pfnInternalAllocation", reinterpret_cast<const void*>(pAllocator->pfnInternalAllocation), "VUID-VkAllocationCallbacks-pfnInternalAllocation-00635");
-
- }
- }
- return skip;
-}
-
-bool StatelessValidation::PreCallValidateSubmitDebugUtilsMessageEXT(
- VkInstance instance,
- VkDebugUtilsMessageSeverityFlagBitsEXT messageSeverity,
- VkDebugUtilsMessageTypeFlagsEXT messageTypes,
- const VkDebugUtilsMessengerCallbackDataEXT* pCallbackData) {
- bool skip = false;
- if (!instance_extensions.vk_ext_debug_utils) skip |= OutputExtensionError("vkSubmitDebugUtilsMessageEXT", VK_EXT_DEBUG_UTILS_EXTENSION_NAME);
- skip |= validate_flags("vkSubmitDebugUtilsMessageEXT", "messageSeverity", "VkDebugUtilsMessageSeverityFlagBitsEXT", AllVkDebugUtilsMessageSeverityFlagBitsEXT, messageSeverity, kRequiredSingleBit, "VUID-vkSubmitDebugUtilsMessageEXT-messageSeverity-parameter", "VUID-vkSubmitDebugUtilsMessageEXT-messageSeverity-parameter");
- skip |= validate_flags("vkSubmitDebugUtilsMessageEXT", "messageTypes", "VkDebugUtilsMessageTypeFlagBitsEXT", AllVkDebugUtilsMessageTypeFlagBitsEXT, messageTypes, kRequiredFlags, "VUID-vkSubmitDebugUtilsMessageEXT-messageTypes-parameter", "VUID-vkSubmitDebugUtilsMessageEXT-messageTypes-requiredbitmask");
- skip |= validate_struct_type("vkSubmitDebugUtilsMessageEXT", "pCallbackData", "VK_STRUCTURE_TYPE_DEBUG_UTILS_MESSENGER_CALLBACK_DATA_EXT", pCallbackData, VK_STRUCTURE_TYPE_DEBUG_UTILS_MESSENGER_CALLBACK_DATA_EXT, true, "VUID-vkSubmitDebugUtilsMessageEXT-pCallbackData-parameter", "VUID-VkDebugUtilsMessengerCallbackDataEXT-sType-sType");
- if (pCallbackData != NULL)
- {
- skip |= validate_struct_pnext("vkSubmitDebugUtilsMessageEXT", "pCallbackData->pNext", NULL, pCallbackData->pNext, 0, NULL, GeneratedVulkanHeaderVersion, "VUID-VkDebugUtilsMessengerCallbackDataEXT-pNext-pNext");
-
- skip |= validate_reserved_flags("vkSubmitDebugUtilsMessageEXT", "pCallbackData->flags", pCallbackData->flags, "VUID-VkDebugUtilsMessengerCallbackDataEXT-flags-zerobitmask");
-
- skip |= validate_required_pointer("vkSubmitDebugUtilsMessageEXT", "pCallbackData->pMessage", pCallbackData->pMessage, "VUID-VkDebugUtilsMessengerCallbackDataEXT-pMessage-parameter");
-
- skip |= validate_struct_type_array("vkSubmitDebugUtilsMessageEXT", "pCallbackData->queueLabelCount", "pCallbackData->pQueueLabels", "VK_STRUCTURE_TYPE_DEBUG_UTILS_LABEL_EXT", pCallbackData->queueLabelCount, pCallbackData->pQueueLabels, VK_STRUCTURE_TYPE_DEBUG_UTILS_LABEL_EXT, false, true, "VUID-VkDebugUtilsLabelEXT-sType-sType", "VUID-VkDebugUtilsMessengerCallbackDataEXT-pQueueLabels-parameter", kVUIDUndefined);
-
- if (pCallbackData->pQueueLabels != NULL)
- {
- for (uint32_t queueLabelIndex = 0; queueLabelIndex < pCallbackData->queueLabelCount; ++queueLabelIndex)
- {
- skip |= validate_struct_pnext("vkSubmitDebugUtilsMessageEXT", ParameterName("pCallbackData->pQueueLabels[%i].pNext", ParameterName::IndexVector{ queueLabelIndex }), NULL, pCallbackData->pQueueLabels[queueLabelIndex].pNext, 0, NULL, GeneratedVulkanHeaderVersion, "VUID-VkDebugUtilsLabelEXT-pNext-pNext");
-
- skip |= validate_required_pointer("vkSubmitDebugUtilsMessageEXT", ParameterName("pCallbackData->pQueueLabels[%i].pLabelName", ParameterName::IndexVector{ queueLabelIndex }), pCallbackData->pQueueLabels[queueLabelIndex].pLabelName, "VUID-VkDebugUtilsLabelEXT-pLabelName-parameter");
- }
- }
-
- skip |= validate_struct_type_array("vkSubmitDebugUtilsMessageEXT", "pCallbackData->cmdBufLabelCount", "pCallbackData->pCmdBufLabels", "VK_STRUCTURE_TYPE_DEBUG_UTILS_LABEL_EXT", pCallbackData->cmdBufLabelCount, pCallbackData->pCmdBufLabels, VK_STRUCTURE_TYPE_DEBUG_UTILS_LABEL_EXT, false, true, "VUID-VkDebugUtilsLabelEXT-sType-sType", "VUID-VkDebugUtilsMessengerCallbackDataEXT-pCmdBufLabels-parameter", kVUIDUndefined);
-
- if (pCallbackData->pCmdBufLabels != NULL)
- {
- for (uint32_t cmdBufLabelIndex = 0; cmdBufLabelIndex < pCallbackData->cmdBufLabelCount; ++cmdBufLabelIndex)
- {
- skip |= validate_struct_pnext("vkSubmitDebugUtilsMessageEXT", ParameterName("pCallbackData->pCmdBufLabels[%i].pNext", ParameterName::IndexVector{ cmdBufLabelIndex }), NULL, pCallbackData->pCmdBufLabels[cmdBufLabelIndex].pNext, 0, NULL, GeneratedVulkanHeaderVersion, "VUID-VkDebugUtilsLabelEXT-pNext-pNext");
-
- skip |= validate_required_pointer("vkSubmitDebugUtilsMessageEXT", ParameterName("pCallbackData->pCmdBufLabels[%i].pLabelName", ParameterName::IndexVector{ cmdBufLabelIndex }), pCallbackData->pCmdBufLabels[cmdBufLabelIndex].pLabelName, "VUID-VkDebugUtilsLabelEXT-pLabelName-parameter");
- }
- }
-
- skip |= validate_struct_type_array("vkSubmitDebugUtilsMessageEXT", "pCallbackData->objectCount", "pCallbackData->pObjects", "VK_STRUCTURE_TYPE_DEBUG_UTILS_OBJECT_NAME_INFO_EXT", pCallbackData->objectCount, pCallbackData->pObjects, VK_STRUCTURE_TYPE_DEBUG_UTILS_OBJECT_NAME_INFO_EXT, false, true, "VUID-VkDebugUtilsObjectNameInfoEXT-sType-sType", "VUID-VkDebugUtilsMessengerCallbackDataEXT-pObjects-parameter", kVUIDUndefined);
-
- if (pCallbackData->pObjects != NULL)
- {
- for (uint32_t objectIndex = 0; objectIndex < pCallbackData->objectCount; ++objectIndex)
- {
- skip |= validate_struct_pnext("vkSubmitDebugUtilsMessageEXT", ParameterName("pCallbackData->pObjects[%i].pNext", ParameterName::IndexVector{ objectIndex }), NULL, pCallbackData->pObjects[objectIndex].pNext, 0, NULL, GeneratedVulkanHeaderVersion, "VUID-VkDebugUtilsObjectNameInfoEXT-pNext-pNext");
-
- skip |= validate_ranged_enum("vkSubmitDebugUtilsMessageEXT", ParameterName("pCallbackData->pObjects[%i].objectType", ParameterName::IndexVector{ objectIndex }), "VkObjectType", AllVkObjectTypeEnums, pCallbackData->pObjects[objectIndex].objectType, "VUID-VkDebugUtilsObjectNameInfoEXT-objectType-parameter");
- }
- }
- }
- return skip;
-}
-
-
-
-#ifdef VK_USE_PLATFORM_ANDROID_KHR
-
-bool StatelessValidation::PreCallValidateGetAndroidHardwareBufferPropertiesANDROID(
- VkDevice device,
- const struct AHardwareBuffer* buffer,
- VkAndroidHardwareBufferPropertiesANDROID* pProperties) {
- bool skip = false;
- if (!device_extensions.vk_ext_queue_family_foreign) skip |= OutputExtensionError("vkGetAndroidHardwareBufferPropertiesANDROID", VK_EXT_QUEUE_FAMILY_FOREIGN_EXTENSION_NAME);
- if (!device_extensions.vk_khr_external_memory) skip |= OutputExtensionError("vkGetAndroidHardwareBufferPropertiesANDROID", VK_KHR_EXTERNAL_MEMORY_EXTENSION_NAME);
- if (!device_extensions.vk_khr_sampler_ycbcr_conversion) skip |= OutputExtensionError("vkGetAndroidHardwareBufferPropertiesANDROID", VK_KHR_SAMPLER_YCBCR_CONVERSION_EXTENSION_NAME);
- if (!device_extensions.vk_android_external_memory_android_hardware_buffer) skip |= OutputExtensionError("vkGetAndroidHardwareBufferPropertiesANDROID", VK_ANDROID_EXTERNAL_MEMORY_ANDROID_HARDWARE_BUFFER_EXTENSION_NAME);
- skip |= validate_required_pointer("vkGetAndroidHardwareBufferPropertiesANDROID", "buffer", buffer, "VUID-vkGetAndroidHardwareBufferPropertiesANDROID-buffer-parameter");
- skip |= validate_struct_type("vkGetAndroidHardwareBufferPropertiesANDROID", "pProperties", "VK_STRUCTURE_TYPE_ANDROID_HARDWARE_BUFFER_PROPERTIES_ANDROID", pProperties, VK_STRUCTURE_TYPE_ANDROID_HARDWARE_BUFFER_PROPERTIES_ANDROID, true, "VUID-vkGetAndroidHardwareBufferPropertiesANDROID-pProperties-parameter", "VUID-VkAndroidHardwareBufferPropertiesANDROID-sType-sType");
- if (pProperties != NULL)
- {
- // No xml-driven validation
- }
- return skip;
-}
-
-bool StatelessValidation::PreCallValidateGetMemoryAndroidHardwareBufferANDROID(
- VkDevice device,
- const VkMemoryGetAndroidHardwareBufferInfoANDROID* pInfo,
- struct AHardwareBuffer** pBuffer) {
- bool skip = false;
- if (!device_extensions.vk_ext_queue_family_foreign) skip |= OutputExtensionError("vkGetMemoryAndroidHardwareBufferANDROID", VK_EXT_QUEUE_FAMILY_FOREIGN_EXTENSION_NAME);
- if (!device_extensions.vk_khr_external_memory) skip |= OutputExtensionError("vkGetMemoryAndroidHardwareBufferANDROID", VK_KHR_EXTERNAL_MEMORY_EXTENSION_NAME);
- if (!device_extensions.vk_khr_sampler_ycbcr_conversion) skip |= OutputExtensionError("vkGetMemoryAndroidHardwareBufferANDROID", VK_KHR_SAMPLER_YCBCR_CONVERSION_EXTENSION_NAME);
- if (!device_extensions.vk_android_external_memory_android_hardware_buffer) skip |= OutputExtensionError("vkGetMemoryAndroidHardwareBufferANDROID", VK_ANDROID_EXTERNAL_MEMORY_ANDROID_HARDWARE_BUFFER_EXTENSION_NAME);
- skip |= validate_struct_type("vkGetMemoryAndroidHardwareBufferANDROID", "pInfo", "VK_STRUCTURE_TYPE_MEMORY_GET_ANDROID_HARDWARE_BUFFER_INFO_ANDROID", pInfo, VK_STRUCTURE_TYPE_MEMORY_GET_ANDROID_HARDWARE_BUFFER_INFO_ANDROID, true, "VUID-vkGetMemoryAndroidHardwareBufferANDROID-pInfo-parameter", "VUID-VkMemoryGetAndroidHardwareBufferInfoANDROID-sType-sType");
- if (pInfo != NULL)
- {
- skip |= validate_struct_pnext("vkGetMemoryAndroidHardwareBufferANDROID", "pInfo->pNext", NULL, pInfo->pNext, 0, NULL, GeneratedVulkanHeaderVersion, "VUID-VkMemoryGetAndroidHardwareBufferInfoANDROID-pNext-pNext");
-
- skip |= validate_required_handle("vkGetMemoryAndroidHardwareBufferANDROID", "pInfo->memory", pInfo->memory);
- }
- skip |= validate_required_pointer("vkGetMemoryAndroidHardwareBufferANDROID", "pBuffer", pBuffer, "VUID-vkGetMemoryAndroidHardwareBufferANDROID-pBuffer-parameter");
- return skip;
-}
-
-#endif // VK_USE_PLATFORM_ANDROID_KHR
-
-
-
-
-
-
-
-
-
-
-
-
-
-bool StatelessValidation::PreCallValidateCmdSetSampleLocationsEXT(
- VkCommandBuffer commandBuffer,
- const VkSampleLocationsInfoEXT* pSampleLocationsInfo) {
- bool skip = false;
- if (!device_extensions.vk_khr_get_physical_device_properties_2) skip |= OutputExtensionError("vkCmdSetSampleLocationsEXT", VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME);
- if (!device_extensions.vk_ext_sample_locations) skip |= OutputExtensionError("vkCmdSetSampleLocationsEXT", VK_EXT_SAMPLE_LOCATIONS_EXTENSION_NAME);
- skip |= validate_struct_type("vkCmdSetSampleLocationsEXT", "pSampleLocationsInfo", "VK_STRUCTURE_TYPE_SAMPLE_LOCATIONS_INFO_EXT", pSampleLocationsInfo, VK_STRUCTURE_TYPE_SAMPLE_LOCATIONS_INFO_EXT, true, "VUID-vkCmdSetSampleLocationsEXT-pSampleLocationsInfo-parameter", "VUID-VkSampleLocationsInfoEXT-sType-sType");
- if (pSampleLocationsInfo != NULL)
- {
- skip |= validate_struct_pnext("vkCmdSetSampleLocationsEXT", "pSampleLocationsInfo->pNext", NULL, pSampleLocationsInfo->pNext, 0, NULL, GeneratedVulkanHeaderVersion, kVUIDUndefined);
-
- skip |= validate_flags("vkCmdSetSampleLocationsEXT", "pSampleLocationsInfo->sampleLocationsPerPixel", "VkSampleCountFlagBits", AllVkSampleCountFlagBits, pSampleLocationsInfo->sampleLocationsPerPixel, kOptionalSingleBit, "VUID-VkSampleLocationsInfoEXT-sampleLocationsPerPixel-parameter");
-
- // No xml-driven validation
-
- skip |= validate_array("vkCmdSetSampleLocationsEXT", "pSampleLocationsInfo->sampleLocationsCount", "pSampleLocationsInfo->pSampleLocations", pSampleLocationsInfo->sampleLocationsCount, &pSampleLocationsInfo->pSampleLocations, false, true, kVUIDUndefined, "VUID-VkSampleLocationsInfoEXT-pSampleLocations-parameter");
-
- if (pSampleLocationsInfo->pSampleLocations != NULL)
- {
- for (uint32_t sampleLocationsIndex = 0; sampleLocationsIndex < pSampleLocationsInfo->sampleLocationsCount; ++sampleLocationsIndex)
- {
- // No xml-driven validation
- }
- }
- }
- return skip;
-}
-
-bool StatelessValidation::PreCallValidateGetPhysicalDeviceMultisamplePropertiesEXT(
- VkPhysicalDevice physicalDevice,
- VkSampleCountFlagBits samples,
- VkMultisamplePropertiesEXT* pMultisampleProperties) {
- bool skip = false;
- skip |= validate_flags("vkGetPhysicalDeviceMultisamplePropertiesEXT", "samples", "VkSampleCountFlagBits", AllVkSampleCountFlagBits, samples, kRequiredSingleBit, "VUID-vkGetPhysicalDeviceMultisamplePropertiesEXT-samples-parameter", "VUID-vkGetPhysicalDeviceMultisamplePropertiesEXT-samples-parameter");
- skip |= validate_struct_type("vkGetPhysicalDeviceMultisamplePropertiesEXT", "pMultisampleProperties", "VK_STRUCTURE_TYPE_MULTISAMPLE_PROPERTIES_EXT", pMultisampleProperties, VK_STRUCTURE_TYPE_MULTISAMPLE_PROPERTIES_EXT, true, "VUID-vkGetPhysicalDeviceMultisamplePropertiesEXT-pMultisampleProperties-parameter", "VUID-VkMultisamplePropertiesEXT-sType-sType");
- if (pMultisampleProperties != NULL)
- {
- // No xml-driven validation
- }
- return skip;
-}
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-bool StatelessValidation::PreCallValidateGetImageDrmFormatModifierPropertiesEXT(
- VkDevice device,
- VkImage image,
- VkImageDrmFormatModifierPropertiesEXT* pProperties) {
- bool skip = false;
- if (!device_extensions.vk_khr_sampler_ycbcr_conversion) skip |= OutputExtensionError("vkGetImageDrmFormatModifierPropertiesEXT", VK_KHR_SAMPLER_YCBCR_CONVERSION_EXTENSION_NAME);
- if (!device_extensions.vk_khr_image_format_list) skip |= OutputExtensionError("vkGetImageDrmFormatModifierPropertiesEXT", VK_KHR_IMAGE_FORMAT_LIST_EXTENSION_NAME);
- if (!device_extensions.vk_khr_get_physical_device_properties_2) skip |= OutputExtensionError("vkGetImageDrmFormatModifierPropertiesEXT", VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME);
- if (!device_extensions.vk_khr_bind_memory_2) skip |= OutputExtensionError("vkGetImageDrmFormatModifierPropertiesEXT", VK_KHR_BIND_MEMORY_2_EXTENSION_NAME);
- if (!device_extensions.vk_ext_image_drm_format_modifier) skip |= OutputExtensionError("vkGetImageDrmFormatModifierPropertiesEXT", VK_EXT_IMAGE_DRM_FORMAT_MODIFIER_EXTENSION_NAME);
- skip |= validate_required_handle("vkGetImageDrmFormatModifierPropertiesEXT", "image", image);
- skip |= validate_struct_type("vkGetImageDrmFormatModifierPropertiesEXT", "pProperties", "VK_STRUCTURE_TYPE_IMAGE_DRM_FORMAT_MODIFIER_PROPERTIES_EXT", pProperties, VK_STRUCTURE_TYPE_IMAGE_DRM_FORMAT_MODIFIER_PROPERTIES_EXT, true, "VUID-vkGetImageDrmFormatModifierPropertiesEXT-pProperties-parameter", "VUID-VkImageDrmFormatModifierPropertiesEXT-sType-sType");
- if (pProperties != NULL)
- {
- // No xml-driven validation
- }
- return skip;
-}
-
-
-
-bool StatelessValidation::PreCallValidateCreateValidationCacheEXT(
- VkDevice device,
- const VkValidationCacheCreateInfoEXT* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkValidationCacheEXT* pValidationCache) {
- bool skip = false;
- if (!device_extensions.vk_ext_validation_cache) skip |= OutputExtensionError("vkCreateValidationCacheEXT", VK_EXT_VALIDATION_CACHE_EXTENSION_NAME);
- skip |= validate_struct_type("vkCreateValidationCacheEXT", "pCreateInfo", "VK_STRUCTURE_TYPE_VALIDATION_CACHE_CREATE_INFO_EXT", pCreateInfo, VK_STRUCTURE_TYPE_VALIDATION_CACHE_CREATE_INFO_EXT, true, "VUID-vkCreateValidationCacheEXT-pCreateInfo-parameter", "VUID-VkValidationCacheCreateInfoEXT-sType-sType");
- if (pCreateInfo != NULL)
- {
- skip |= validate_struct_pnext("vkCreateValidationCacheEXT", "pCreateInfo->pNext", NULL, pCreateInfo->pNext, 0, NULL, GeneratedVulkanHeaderVersion, "VUID-VkValidationCacheCreateInfoEXT-pNext-pNext");
-
- skip |= validate_reserved_flags("vkCreateValidationCacheEXT", "pCreateInfo->flags", pCreateInfo->flags, "VUID-VkValidationCacheCreateInfoEXT-flags-zerobitmask");
-
- skip |= validate_array("vkCreateValidationCacheEXT", "pCreateInfo->initialDataSize", "pCreateInfo->pInitialData", pCreateInfo->initialDataSize, &pCreateInfo->pInitialData, false, true, kVUIDUndefined, "VUID-VkValidationCacheCreateInfoEXT-pInitialData-parameter");
- }
- if (pAllocator != NULL)
- {
- skip |= validate_required_pointer("vkCreateValidationCacheEXT", "pAllocator->pfnAllocation", reinterpret_cast<const void*>(pAllocator->pfnAllocation), "VUID-VkAllocationCallbacks-pfnAllocation-00632");
-
- skip |= validate_required_pointer("vkCreateValidationCacheEXT", "pAllocator->pfnReallocation", reinterpret_cast<const void*>(pAllocator->pfnReallocation), "VUID-VkAllocationCallbacks-pfnReallocation-00633");
-
- skip |= validate_required_pointer("vkCreateValidationCacheEXT", "pAllocator->pfnFree", reinterpret_cast<const void*>(pAllocator->pfnFree), "VUID-VkAllocationCallbacks-pfnFree-00634");
-
- if (pAllocator->pfnInternalAllocation != NULL)
- {
- skip |= validate_required_pointer("vkCreateValidationCacheEXT", "pAllocator->pfnInternalFree", reinterpret_cast<const void*>(pAllocator->pfnInternalFree), "VUID-VkAllocationCallbacks-pfnInternalAllocation-00635");
-
- }
-
- if (pAllocator->pfnInternalFree != NULL)
- {
- skip |= validate_required_pointer("vkCreateValidationCacheEXT", "pAllocator->pfnInternalAllocation", reinterpret_cast<const void*>(pAllocator->pfnInternalAllocation), "VUID-VkAllocationCallbacks-pfnInternalAllocation-00635");
-
- }
- }
- skip |= validate_required_pointer("vkCreateValidationCacheEXT", "pValidationCache", pValidationCache, "VUID-vkCreateValidationCacheEXT-pValidationCache-parameter");
- return skip;
-}
-
-bool StatelessValidation::PreCallValidateDestroyValidationCacheEXT(
- VkDevice device,
- VkValidationCacheEXT validationCache,
- const VkAllocationCallbacks* pAllocator) {
- bool skip = false;
- if (!device_extensions.vk_ext_validation_cache) skip |= OutputExtensionError("vkDestroyValidationCacheEXT", VK_EXT_VALIDATION_CACHE_EXTENSION_NAME);
- if (pAllocator != NULL)
- {
- skip |= validate_required_pointer("vkDestroyValidationCacheEXT", "pAllocator->pfnAllocation", reinterpret_cast<const void*>(pAllocator->pfnAllocation), "VUID-VkAllocationCallbacks-pfnAllocation-00632");
-
- skip |= validate_required_pointer("vkDestroyValidationCacheEXT", "pAllocator->pfnReallocation", reinterpret_cast<const void*>(pAllocator->pfnReallocation), "VUID-VkAllocationCallbacks-pfnReallocation-00633");
-
- skip |= validate_required_pointer("vkDestroyValidationCacheEXT", "pAllocator->pfnFree", reinterpret_cast<const void*>(pAllocator->pfnFree), "VUID-VkAllocationCallbacks-pfnFree-00634");
-
- if (pAllocator->pfnInternalAllocation != NULL)
- {
- skip |= validate_required_pointer("vkDestroyValidationCacheEXT", "pAllocator->pfnInternalFree", reinterpret_cast<const void*>(pAllocator->pfnInternalFree), "VUID-VkAllocationCallbacks-pfnInternalAllocation-00635");
-
- }
-
- if (pAllocator->pfnInternalFree != NULL)
- {
- skip |= validate_required_pointer("vkDestroyValidationCacheEXT", "pAllocator->pfnInternalAllocation", reinterpret_cast<const void*>(pAllocator->pfnInternalAllocation), "VUID-VkAllocationCallbacks-pfnInternalAllocation-00635");
-
- }
- }
- return skip;
-}
-
-bool StatelessValidation::PreCallValidateMergeValidationCachesEXT(
- VkDevice device,
- VkValidationCacheEXT dstCache,
- uint32_t srcCacheCount,
- const VkValidationCacheEXT* pSrcCaches) {
- bool skip = false;
- if (!device_extensions.vk_ext_validation_cache) skip |= OutputExtensionError("vkMergeValidationCachesEXT", VK_EXT_VALIDATION_CACHE_EXTENSION_NAME);
- skip |= validate_required_handle("vkMergeValidationCachesEXT", "dstCache", dstCache);
- skip |= validate_handle_array("vkMergeValidationCachesEXT", "srcCacheCount", "pSrcCaches", srcCacheCount, pSrcCaches, true, true);
- return skip;
-}
-
-bool StatelessValidation::PreCallValidateGetValidationCacheDataEXT(
- VkDevice device,
- VkValidationCacheEXT validationCache,
- size_t* pDataSize,
- void* pData) {
- bool skip = false;
- if (!device_extensions.vk_ext_validation_cache) skip |= OutputExtensionError("vkGetValidationCacheDataEXT", VK_EXT_VALIDATION_CACHE_EXTENSION_NAME);
- skip |= validate_required_handle("vkGetValidationCacheDataEXT", "validationCache", validationCache);
- skip |= validate_array("vkGetValidationCacheDataEXT", "pDataSize", "pData", pDataSize, &pData, true, false, false, kVUIDUndefined, "VUID-vkGetValidationCacheDataEXT-pData-parameter");
- return skip;
-}
-
-
-
-
-
-
-
-bool StatelessValidation::PreCallValidateCmdBindShadingRateImageNV(
- VkCommandBuffer commandBuffer,
- VkImageView imageView,
- VkImageLayout imageLayout) {
- bool skip = false;
- if (!device_extensions.vk_khr_get_physical_device_properties_2) skip |= OutputExtensionError("vkCmdBindShadingRateImageNV", VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME);
- if (!device_extensions.vk_nv_shading_rate_image) skip |= OutputExtensionError("vkCmdBindShadingRateImageNV", VK_NV_SHADING_RATE_IMAGE_EXTENSION_NAME);
- skip |= validate_ranged_enum("vkCmdBindShadingRateImageNV", "imageLayout", "VkImageLayout", AllVkImageLayoutEnums, imageLayout, "VUID-vkCmdBindShadingRateImageNV-imageLayout-parameter");
- return skip;
-}
-
-bool StatelessValidation::PreCallValidateCmdSetViewportShadingRatePaletteNV(
- VkCommandBuffer commandBuffer,
- uint32_t firstViewport,
- uint32_t viewportCount,
- const VkShadingRatePaletteNV* pShadingRatePalettes) {
- bool skip = false;
- if (!device_extensions.vk_khr_get_physical_device_properties_2) skip |= OutputExtensionError("vkCmdSetViewportShadingRatePaletteNV", VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME);
- if (!device_extensions.vk_nv_shading_rate_image) skip |= OutputExtensionError("vkCmdSetViewportShadingRatePaletteNV", VK_NV_SHADING_RATE_IMAGE_EXTENSION_NAME);
- skip |= validate_array("vkCmdSetViewportShadingRatePaletteNV", "viewportCount", "pShadingRatePalettes", viewportCount, &pShadingRatePalettes, true, true, "VUID-vkCmdSetViewportShadingRatePaletteNV-viewportCount-arraylength", "VUID-vkCmdSetViewportShadingRatePaletteNV-pShadingRatePalettes-parameter");
- if (pShadingRatePalettes != NULL)
- {
- for (uint32_t viewportIndex = 0; viewportIndex < viewportCount; ++viewportIndex)
- {
- skip |= validate_ranged_enum_array("vkCmdSetViewportShadingRatePaletteNV", ParameterName("pShadingRatePalettes[%i].shadingRatePaletteEntryCount", ParameterName::IndexVector{ viewportIndex }), ParameterName("pShadingRatePalettes[%i].pShadingRatePaletteEntries", ParameterName::IndexVector{ viewportIndex }), "VkShadingRatePaletteEntryNV", AllVkShadingRatePaletteEntryNVEnums, pShadingRatePalettes[viewportIndex].shadingRatePaletteEntryCount, pShadingRatePalettes[viewportIndex].pShadingRatePaletteEntries, true, true);
- }
- }
- if (!skip) skip |= manual_PreCallValidateCmdSetViewportShadingRatePaletteNV(commandBuffer, firstViewport, viewportCount, pShadingRatePalettes);
- return skip;
-}
-
-bool StatelessValidation::PreCallValidateCmdSetCoarseSampleOrderNV(
- VkCommandBuffer commandBuffer,
- VkCoarseSampleOrderTypeNV sampleOrderType,
- uint32_t customSampleOrderCount,
- const VkCoarseSampleOrderCustomNV* pCustomSampleOrders) {
- bool skip = false;
- if (!device_extensions.vk_khr_get_physical_device_properties_2) skip |= OutputExtensionError("vkCmdSetCoarseSampleOrderNV", VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME);
- if (!device_extensions.vk_nv_shading_rate_image) skip |= OutputExtensionError("vkCmdSetCoarseSampleOrderNV", VK_NV_SHADING_RATE_IMAGE_EXTENSION_NAME);
- skip |= validate_ranged_enum("vkCmdSetCoarseSampleOrderNV", "sampleOrderType", "VkCoarseSampleOrderTypeNV", AllVkCoarseSampleOrderTypeNVEnums, sampleOrderType, "VUID-vkCmdSetCoarseSampleOrderNV-sampleOrderType-parameter");
- skip |= validate_array("vkCmdSetCoarseSampleOrderNV", "customSampleOrderCount", "pCustomSampleOrders", customSampleOrderCount, &pCustomSampleOrders, false, true, kVUIDUndefined, "VUID-vkCmdSetCoarseSampleOrderNV-pCustomSampleOrders-parameter");
- if (pCustomSampleOrders != NULL)
- {
- for (uint32_t customSampleOrderIndex = 0; customSampleOrderIndex < customSampleOrderCount; ++customSampleOrderIndex)
- {
- skip |= validate_ranged_enum("vkCmdSetCoarseSampleOrderNV", ParameterName("pCustomSampleOrders[%i].shadingRate", ParameterName::IndexVector{ customSampleOrderIndex }), "VkShadingRatePaletteEntryNV", AllVkShadingRatePaletteEntryNVEnums, pCustomSampleOrders[customSampleOrderIndex].shadingRate, "VUID-VkCoarseSampleOrderCustomNV-shadingRate-parameter");
-
- skip |= validate_array("vkCmdSetCoarseSampleOrderNV", ParameterName("pCustomSampleOrders[%i].sampleLocationCount", ParameterName::IndexVector{ customSampleOrderIndex }), ParameterName("pCustomSampleOrders[%i].pSampleLocations", ParameterName::IndexVector{ customSampleOrderIndex }), pCustomSampleOrders[customSampleOrderIndex].sampleLocationCount, &pCustomSampleOrders[customSampleOrderIndex].pSampleLocations, true, true, "VUID-VkCoarseSampleOrderCustomNV-sampleLocationCount-arraylength", "VUID-VkCoarseSampleOrderCustomNV-pSampleLocations-parameter");
-
- if (pCustomSampleOrders[customSampleOrderIndex].pSampleLocations != NULL)
- {
- for (uint32_t sampleLocationIndex = 0; sampleLocationIndex < pCustomSampleOrders[customSampleOrderIndex].sampleLocationCount; ++sampleLocationIndex)
- {
- // No xml-driven validation
- }
- }
- }
- }
- if (!skip) skip |= manual_PreCallValidateCmdSetCoarseSampleOrderNV(commandBuffer, sampleOrderType, customSampleOrderCount, pCustomSampleOrders);
- return skip;
-}
-
-
-
-bool StatelessValidation::PreCallValidateCreateAccelerationStructureNV(
- VkDevice device,
- const VkAccelerationStructureCreateInfoNV* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkAccelerationStructureNV* pAccelerationStructure) {
- bool skip = false;
- if (!device_extensions.vk_khr_get_memory_requirements_2) skip |= OutputExtensionError("vkCreateAccelerationStructureNV", VK_KHR_GET_MEMORY_REQUIREMENTS_2_EXTENSION_NAME);
- if (!device_extensions.vk_khr_get_physical_device_properties_2) skip |= OutputExtensionError("vkCreateAccelerationStructureNV", VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME);
- if (!device_extensions.vk_nv_ray_tracing) skip |= OutputExtensionError("vkCreateAccelerationStructureNV", VK_NV_RAY_TRACING_EXTENSION_NAME);
- skip |= validate_struct_type("vkCreateAccelerationStructureNV", "pCreateInfo", "VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_CREATE_INFO_NV", pCreateInfo, VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_CREATE_INFO_NV, true, "VUID-vkCreateAccelerationStructureNV-pCreateInfo-parameter", "VUID-VkAccelerationStructureCreateInfoNV-sType-sType");
- if (pCreateInfo != NULL)
- {
- skip |= validate_struct_pnext("vkCreateAccelerationStructureNV", "pCreateInfo->pNext", NULL, pCreateInfo->pNext, 0, NULL, GeneratedVulkanHeaderVersion, "VUID-VkAccelerationStructureCreateInfoNV-pNext-pNext");
-
- skip |= validate_struct_type("vkCreateAccelerationStructureNV", "pCreateInfo->info", "VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_INFO_NV", &(pCreateInfo->info), VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_INFO_NV, false, kVUIDUndefined, "VUID-VkAccelerationStructureInfoNV-sType-sType");
-
- skip |= validate_struct_pnext("vkCreateAccelerationStructureNV", "pCreateInfo->info.pNext", NULL, pCreateInfo->info.pNext, 0, NULL, GeneratedVulkanHeaderVersion, "VUID-VkAccelerationStructureInfoNV-pNext-pNext");
-
- skip |= validate_ranged_enum("vkCreateAccelerationStructureNV", "pCreateInfo->info.type", "VkAccelerationStructureTypeNV", AllVkAccelerationStructureTypeNVEnums, pCreateInfo->info.type, "VUID-VkAccelerationStructureInfoNV-type-parameter");
-
- skip |= validate_flags("vkCreateAccelerationStructureNV", "pCreateInfo->info.flags", "VkBuildAccelerationStructureFlagBitsNV", AllVkBuildAccelerationStructureFlagBitsNV, pCreateInfo->info.flags, kOptionalFlags, "VUID-VkAccelerationStructureInfoNV-flags-parameter");
-
- skip |= validate_struct_type_array("vkCreateAccelerationStructureNV", "pCreateInfo->info.geometryCount", "pCreateInfo->info.pGeometries", "VK_STRUCTURE_TYPE_GEOMETRY_NV", pCreateInfo->info.geometryCount, pCreateInfo->info.pGeometries, VK_STRUCTURE_TYPE_GEOMETRY_NV, false, true, "VUID-VkGeometryNV-sType-sType", "VUID-VkAccelerationStructureInfoNV-pGeometries-parameter", kVUIDUndefined);
-
- if (pCreateInfo->info.pGeometries != NULL)
- {
- for (uint32_t geometryIndex = 0; geometryIndex < pCreateInfo->info.geometryCount; ++geometryIndex)
- {
- skip |= validate_struct_pnext("vkCreateAccelerationStructureNV", ParameterName("pCreateInfo->info.pGeometries[%i].pNext", ParameterName::IndexVector{ geometryIndex }), NULL, pCreateInfo->info.pGeometries[geometryIndex].pNext, 0, NULL, GeneratedVulkanHeaderVersion, "VUID-VkGeometryNV-pNext-pNext");
-
- skip |= validate_ranged_enum("vkCreateAccelerationStructureNV", ParameterName("pCreateInfo->info.pGeometries[%i].geometryType", ParameterName::IndexVector{ geometryIndex }), "VkGeometryTypeNV", AllVkGeometryTypeNVEnums, pCreateInfo->info.pGeometries[geometryIndex].geometryType, "VUID-VkGeometryNV-geometryType-parameter");
-
- skip |= validate_struct_type("vkCreateAccelerationStructureNV", ParameterName("pCreateInfo->info.pGeometries[%i].geometry.triangles", ParameterName::IndexVector{ geometryIndex }), "VK_STRUCTURE_TYPE_GEOMETRY_TRIANGLES_NV", &(pCreateInfo->info.pGeometries[geometryIndex].geometry.triangles), VK_STRUCTURE_TYPE_GEOMETRY_TRIANGLES_NV, false, kVUIDUndefined, "VUID-VkGeometryTrianglesNV-sType-sType");
-
- skip |= validate_struct_pnext("vkCreateAccelerationStructureNV", ParameterName("pCreateInfo->info.pGeometries[%i].geometry.triangles.pNext", ParameterName::IndexVector{ geometryIndex }), NULL, pCreateInfo->info.pGeometries[geometryIndex].geometry.triangles.pNext, 0, NULL, GeneratedVulkanHeaderVersion, "VUID-VkGeometryTrianglesNV-pNext-pNext");
-
- skip |= validate_ranged_enum("vkCreateAccelerationStructureNV", ParameterName("pCreateInfo->info.pGeometries[%i].geometry.triangles.vertexFormat", ParameterName::IndexVector{ geometryIndex }), "VkFormat", AllVkFormatEnums, pCreateInfo->info.pGeometries[geometryIndex].geometry.triangles.vertexFormat, "VUID-VkGeometryTrianglesNV-vertexFormat-parameter");
-
- skip |= validate_ranged_enum("vkCreateAccelerationStructureNV", ParameterName("pCreateInfo->info.pGeometries[%i].geometry.triangles.indexType", ParameterName::IndexVector{ geometryIndex }), "VkIndexType", AllVkIndexTypeEnums, pCreateInfo->info.pGeometries[geometryIndex].geometry.triangles.indexType, "VUID-VkGeometryTrianglesNV-indexType-parameter");
-
- skip |= validate_struct_type("vkCreateAccelerationStructureNV", ParameterName("pCreateInfo->info.pGeometries[%i].geometry.aabbs", ParameterName::IndexVector{ geometryIndex }), "VK_STRUCTURE_TYPE_GEOMETRY_AABB_NV", &(pCreateInfo->info.pGeometries[geometryIndex].geometry.aabbs), VK_STRUCTURE_TYPE_GEOMETRY_AABB_NV, false, kVUIDUndefined, "VUID-VkGeometryAABBNV-sType-sType");
-
- skip |= validate_struct_pnext("vkCreateAccelerationStructureNV", ParameterName("pCreateInfo->info.pGeometries[%i].geometry.aabbs.pNext", ParameterName::IndexVector{ geometryIndex }), NULL, pCreateInfo->info.pGeometries[geometryIndex].geometry.aabbs.pNext, 0, NULL, GeneratedVulkanHeaderVersion, "VUID-VkGeometryAABBNV-pNext-pNext");
-
- skip |= validate_flags("vkCreateAccelerationStructureNV", ParameterName("pCreateInfo->info.pGeometries[%i].flags", ParameterName::IndexVector{ geometryIndex }), "VkGeometryFlagBitsNV", AllVkGeometryFlagBitsNV, pCreateInfo->info.pGeometries[geometryIndex].flags, kOptionalFlags, "VUID-VkGeometryNV-flags-parameter");
- }
- }
- }
- if (pAllocator != NULL)
- {
- skip |= validate_required_pointer("vkCreateAccelerationStructureNV", "pAllocator->pfnAllocation", reinterpret_cast<const void*>(pAllocator->pfnAllocation), "VUID-VkAllocationCallbacks-pfnAllocation-00632");
-
- skip |= validate_required_pointer("vkCreateAccelerationStructureNV", "pAllocator->pfnReallocation", reinterpret_cast<const void*>(pAllocator->pfnReallocation), "VUID-VkAllocationCallbacks-pfnReallocation-00633");
-
- skip |= validate_required_pointer("vkCreateAccelerationStructureNV", "pAllocator->pfnFree", reinterpret_cast<const void*>(pAllocator->pfnFree), "VUID-VkAllocationCallbacks-pfnFree-00634");
-
- if (pAllocator->pfnInternalAllocation != NULL)
- {
- skip |= validate_required_pointer("vkCreateAccelerationStructureNV", "pAllocator->pfnInternalFree", reinterpret_cast<const void*>(pAllocator->pfnInternalFree), "VUID-VkAllocationCallbacks-pfnInternalAllocation-00635");
-
- }
-
- if (pAllocator->pfnInternalFree != NULL)
- {
- skip |= validate_required_pointer("vkCreateAccelerationStructureNV", "pAllocator->pfnInternalAllocation", reinterpret_cast<const void*>(pAllocator->pfnInternalAllocation), "VUID-VkAllocationCallbacks-pfnInternalAllocation-00635");
-
- }
- }
- skip |= validate_required_pointer("vkCreateAccelerationStructureNV", "pAccelerationStructure", pAccelerationStructure, "VUID-vkCreateAccelerationStructureNV-pAccelerationStructure-parameter");
- if (!skip) skip |= manual_PreCallValidateCreateAccelerationStructureNV(device, pCreateInfo, pAllocator, pAccelerationStructure);
- return skip;
-}
-
-bool StatelessValidation::PreCallValidateDestroyAccelerationStructureNV(
- VkDevice device,
- VkAccelerationStructureNV accelerationStructure,
- const VkAllocationCallbacks* pAllocator) {
- bool skip = false;
- if (!device_extensions.vk_khr_get_memory_requirements_2) skip |= OutputExtensionError("vkDestroyAccelerationStructureNV", VK_KHR_GET_MEMORY_REQUIREMENTS_2_EXTENSION_NAME);
- if (!device_extensions.vk_khr_get_physical_device_properties_2) skip |= OutputExtensionError("vkDestroyAccelerationStructureNV", VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME);
- if (!device_extensions.vk_nv_ray_tracing) skip |= OutputExtensionError("vkDestroyAccelerationStructureNV", VK_NV_RAY_TRACING_EXTENSION_NAME);
- skip |= validate_required_handle("vkDestroyAccelerationStructureNV", "accelerationStructure", accelerationStructure);
- if (pAllocator != NULL)
- {
- skip |= validate_required_pointer("vkDestroyAccelerationStructureNV", "pAllocator->pfnAllocation", reinterpret_cast<const void*>(pAllocator->pfnAllocation), "VUID-VkAllocationCallbacks-pfnAllocation-00632");
-
- skip |= validate_required_pointer("vkDestroyAccelerationStructureNV", "pAllocator->pfnReallocation", reinterpret_cast<const void*>(pAllocator->pfnReallocation), "VUID-VkAllocationCallbacks-pfnReallocation-00633");
-
- skip |= validate_required_pointer("vkDestroyAccelerationStructureNV", "pAllocator->pfnFree", reinterpret_cast<const void*>(pAllocator->pfnFree), "VUID-VkAllocationCallbacks-pfnFree-00634");
-
- if (pAllocator->pfnInternalAllocation != NULL)
- {
- skip |= validate_required_pointer("vkDestroyAccelerationStructureNV", "pAllocator->pfnInternalFree", reinterpret_cast<const void*>(pAllocator->pfnInternalFree), "VUID-VkAllocationCallbacks-pfnInternalAllocation-00635");
-
- }
-
- if (pAllocator->pfnInternalFree != NULL)
- {
- skip |= validate_required_pointer("vkDestroyAccelerationStructureNV", "pAllocator->pfnInternalAllocation", reinterpret_cast<const void*>(pAllocator->pfnInternalAllocation), "VUID-VkAllocationCallbacks-pfnInternalAllocation-00635");
-
- }
- }
- return skip;
-}
-
-bool StatelessValidation::PreCallValidateGetAccelerationStructureMemoryRequirementsNV(
- VkDevice device,
- const VkAccelerationStructureMemoryRequirementsInfoNV* pInfo,
- VkMemoryRequirements2KHR* pMemoryRequirements) {
- bool skip = false;
- if (!device_extensions.vk_khr_get_memory_requirements_2) skip |= OutputExtensionError("vkGetAccelerationStructureMemoryRequirementsNV", VK_KHR_GET_MEMORY_REQUIREMENTS_2_EXTENSION_NAME);
- if (!device_extensions.vk_khr_get_physical_device_properties_2) skip |= OutputExtensionError("vkGetAccelerationStructureMemoryRequirementsNV", VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME);
- if (!device_extensions.vk_nv_ray_tracing) skip |= OutputExtensionError("vkGetAccelerationStructureMemoryRequirementsNV", VK_NV_RAY_TRACING_EXTENSION_NAME);
- skip |= validate_struct_type("vkGetAccelerationStructureMemoryRequirementsNV", "pInfo", "VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_INFO_NV", pInfo, VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_INFO_NV, true, "VUID-vkGetAccelerationStructureMemoryRequirementsNV-pInfo-parameter", "VUID-VkAccelerationStructureMemoryRequirementsInfoNV-sType-sType");
- if (pInfo != NULL)
- {
- skip |= validate_struct_pnext("vkGetAccelerationStructureMemoryRequirementsNV", "pInfo->pNext", NULL, pInfo->pNext, 0, NULL, GeneratedVulkanHeaderVersion, "VUID-VkAccelerationStructureMemoryRequirementsInfoNV-pNext-pNext");
-
- skip |= validate_ranged_enum("vkGetAccelerationStructureMemoryRequirementsNV", "pInfo->type", "VkAccelerationStructureMemoryRequirementsTypeNV", AllVkAccelerationStructureMemoryRequirementsTypeNVEnums, pInfo->type, "VUID-VkAccelerationStructureMemoryRequirementsInfoNV-type-parameter");
-
- skip |= validate_required_handle("vkGetAccelerationStructureMemoryRequirementsNV", "pInfo->accelerationStructure", pInfo->accelerationStructure);
- }
- skip |= validate_required_pointer("vkGetAccelerationStructureMemoryRequirementsNV", "pMemoryRequirements", pMemoryRequirements, "VUID-vkGetAccelerationStructureMemoryRequirementsNV-pMemoryRequirements-parameter");
- return skip;
-}
-
-bool StatelessValidation::PreCallValidateBindAccelerationStructureMemoryNV(
- VkDevice device,
- uint32_t bindInfoCount,
- const VkBindAccelerationStructureMemoryInfoNV* pBindInfos) {
- bool skip = false;
- if (!device_extensions.vk_khr_get_memory_requirements_2) skip |= OutputExtensionError("vkBindAccelerationStructureMemoryNV", VK_KHR_GET_MEMORY_REQUIREMENTS_2_EXTENSION_NAME);
- if (!device_extensions.vk_khr_get_physical_device_properties_2) skip |= OutputExtensionError("vkBindAccelerationStructureMemoryNV", VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME);
- if (!device_extensions.vk_nv_ray_tracing) skip |= OutputExtensionError("vkBindAccelerationStructureMemoryNV", VK_NV_RAY_TRACING_EXTENSION_NAME);
- skip |= validate_struct_type_array("vkBindAccelerationStructureMemoryNV", "bindInfoCount", "pBindInfos", "VK_STRUCTURE_TYPE_BIND_ACCELERATION_STRUCTURE_MEMORY_INFO_NV", bindInfoCount, pBindInfos, VK_STRUCTURE_TYPE_BIND_ACCELERATION_STRUCTURE_MEMORY_INFO_NV, true, true, "VUID-VkBindAccelerationStructureMemoryInfoNV-sType-sType", "VUID-vkBindAccelerationStructureMemoryNV-pBindInfos-parameter", "VUID-vkBindAccelerationStructureMemoryNV-bindInfoCount-arraylength");
- if (pBindInfos != NULL)
- {
- for (uint32_t bindInfoIndex = 0; bindInfoIndex < bindInfoCount; ++bindInfoIndex)
- {
- skip |= validate_struct_pnext("vkBindAccelerationStructureMemoryNV", ParameterName("pBindInfos[%i].pNext", ParameterName::IndexVector{ bindInfoIndex }), NULL, pBindInfos[bindInfoIndex].pNext, 0, NULL, GeneratedVulkanHeaderVersion, "VUID-VkBindAccelerationStructureMemoryInfoNV-pNext-pNext");
-
- skip |= validate_required_handle("vkBindAccelerationStructureMemoryNV", ParameterName("pBindInfos[%i].accelerationStructure", ParameterName::IndexVector{ bindInfoIndex }), pBindInfos[bindInfoIndex].accelerationStructure);
-
- skip |= validate_required_handle("vkBindAccelerationStructureMemoryNV", ParameterName("pBindInfos[%i].memory", ParameterName::IndexVector{ bindInfoIndex }), pBindInfos[bindInfoIndex].memory);
-
- skip |= validate_array("vkBindAccelerationStructureMemoryNV", ParameterName("pBindInfos[%i].deviceIndexCount", ParameterName::IndexVector{ bindInfoIndex }), ParameterName("pBindInfos[%i].pDeviceIndices", ParameterName::IndexVector{ bindInfoIndex }), pBindInfos[bindInfoIndex].deviceIndexCount, &pBindInfos[bindInfoIndex].pDeviceIndices, false, true, kVUIDUndefined, "VUID-VkBindAccelerationStructureMemoryInfoNV-pDeviceIndices-parameter");
- }
- }
- return skip;
-}
-
-bool StatelessValidation::PreCallValidateCmdBuildAccelerationStructureNV(
- VkCommandBuffer commandBuffer,
- const VkAccelerationStructureInfoNV* pInfo,
- VkBuffer instanceData,
- VkDeviceSize instanceOffset,
- VkBool32 update,
- VkAccelerationStructureNV dst,
- VkAccelerationStructureNV src,
- VkBuffer scratch,
- VkDeviceSize scratchOffset) {
- bool skip = false;
- if (!device_extensions.vk_khr_get_memory_requirements_2) skip |= OutputExtensionError("vkCmdBuildAccelerationStructureNV", VK_KHR_GET_MEMORY_REQUIREMENTS_2_EXTENSION_NAME);
- if (!device_extensions.vk_khr_get_physical_device_properties_2) skip |= OutputExtensionError("vkCmdBuildAccelerationStructureNV", VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME);
- if (!device_extensions.vk_nv_ray_tracing) skip |= OutputExtensionError("vkCmdBuildAccelerationStructureNV", VK_NV_RAY_TRACING_EXTENSION_NAME);
- skip |= validate_struct_type("vkCmdBuildAccelerationStructureNV", "pInfo", "VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_INFO_NV", pInfo, VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_INFO_NV, true, "VUID-vkCmdBuildAccelerationStructureNV-pInfo-parameter", "VUID-VkAccelerationStructureInfoNV-sType-sType");
- if (pInfo != NULL)
- {
- skip |= validate_struct_pnext("vkCmdBuildAccelerationStructureNV", "pInfo->pNext", NULL, pInfo->pNext, 0, NULL, GeneratedVulkanHeaderVersion, "VUID-VkAccelerationStructureInfoNV-pNext-pNext");
-
- skip |= validate_ranged_enum("vkCmdBuildAccelerationStructureNV", "pInfo->type", "VkAccelerationStructureTypeNV", AllVkAccelerationStructureTypeNVEnums, pInfo->type, "VUID-VkAccelerationStructureInfoNV-type-parameter");
-
- skip |= validate_flags("vkCmdBuildAccelerationStructureNV", "pInfo->flags", "VkBuildAccelerationStructureFlagBitsNV", AllVkBuildAccelerationStructureFlagBitsNV, pInfo->flags, kOptionalFlags, "VUID-VkAccelerationStructureInfoNV-flags-parameter");
-
- skip |= validate_struct_type_array("vkCmdBuildAccelerationStructureNV", "pInfo->geometryCount", "pInfo->pGeometries", "VK_STRUCTURE_TYPE_GEOMETRY_NV", pInfo->geometryCount, pInfo->pGeometries, VK_STRUCTURE_TYPE_GEOMETRY_NV, false, true, "VUID-VkGeometryNV-sType-sType", "VUID-VkAccelerationStructureInfoNV-pGeometries-parameter", kVUIDUndefined);
-
- if (pInfo->pGeometries != NULL)
- {
- for (uint32_t geometryIndex = 0; geometryIndex < pInfo->geometryCount; ++geometryIndex)
- {
- skip |= validate_struct_pnext("vkCmdBuildAccelerationStructureNV", ParameterName("pInfo->pGeometries[%i].pNext", ParameterName::IndexVector{ geometryIndex }), NULL, pInfo->pGeometries[geometryIndex].pNext, 0, NULL, GeneratedVulkanHeaderVersion, "VUID-VkGeometryNV-pNext-pNext");
-
- skip |= validate_ranged_enum("vkCmdBuildAccelerationStructureNV", ParameterName("pInfo->pGeometries[%i].geometryType", ParameterName::IndexVector{ geometryIndex }), "VkGeometryTypeNV", AllVkGeometryTypeNVEnums, pInfo->pGeometries[geometryIndex].geometryType, "VUID-VkGeometryNV-geometryType-parameter");
-
- skip |= validate_struct_type("vkCmdBuildAccelerationStructureNV", ParameterName("pInfo->pGeometries[%i].geometry.triangles", ParameterName::IndexVector{ geometryIndex }), "VK_STRUCTURE_TYPE_GEOMETRY_TRIANGLES_NV", &(pInfo->pGeometries[geometryIndex].geometry.triangles), VK_STRUCTURE_TYPE_GEOMETRY_TRIANGLES_NV, false, kVUIDUndefined, "VUID-VkGeometryTrianglesNV-sType-sType");
-
- skip |= validate_struct_pnext("vkCmdBuildAccelerationStructureNV", ParameterName("pInfo->pGeometries[%i].geometry.triangles.pNext", ParameterName::IndexVector{ geometryIndex }), NULL, pInfo->pGeometries[geometryIndex].geometry.triangles.pNext, 0, NULL, GeneratedVulkanHeaderVersion, "VUID-VkGeometryTrianglesNV-pNext-pNext");
-
- skip |= validate_ranged_enum("vkCmdBuildAccelerationStructureNV", ParameterName("pInfo->pGeometries[%i].geometry.triangles.vertexFormat", ParameterName::IndexVector{ geometryIndex }), "VkFormat", AllVkFormatEnums, pInfo->pGeometries[geometryIndex].geometry.triangles.vertexFormat, "VUID-VkGeometryTrianglesNV-vertexFormat-parameter");
-
- skip |= validate_ranged_enum("vkCmdBuildAccelerationStructureNV", ParameterName("pInfo->pGeometries[%i].geometry.triangles.indexType", ParameterName::IndexVector{ geometryIndex }), "VkIndexType", AllVkIndexTypeEnums, pInfo->pGeometries[geometryIndex].geometry.triangles.indexType, "VUID-VkGeometryTrianglesNV-indexType-parameter");
-
- skip |= validate_struct_type("vkCmdBuildAccelerationStructureNV", ParameterName("pInfo->pGeometries[%i].geometry.aabbs", ParameterName::IndexVector{ geometryIndex }), "VK_STRUCTURE_TYPE_GEOMETRY_AABB_NV", &(pInfo->pGeometries[geometryIndex].geometry.aabbs), VK_STRUCTURE_TYPE_GEOMETRY_AABB_NV, false, kVUIDUndefined, "VUID-VkGeometryAABBNV-sType-sType");
-
- skip |= validate_struct_pnext("vkCmdBuildAccelerationStructureNV", ParameterName("pInfo->pGeometries[%i].geometry.aabbs.pNext", ParameterName::IndexVector{ geometryIndex }), NULL, pInfo->pGeometries[geometryIndex].geometry.aabbs.pNext, 0, NULL, GeneratedVulkanHeaderVersion, "VUID-VkGeometryAABBNV-pNext-pNext");
-
- skip |= validate_flags("vkCmdBuildAccelerationStructureNV", ParameterName("pInfo->pGeometries[%i].flags", ParameterName::IndexVector{ geometryIndex }), "VkGeometryFlagBitsNV", AllVkGeometryFlagBitsNV, pInfo->pGeometries[geometryIndex].flags, kOptionalFlags, "VUID-VkGeometryNV-flags-parameter");
- }
- }
- }
- skip |= validate_bool32("vkCmdBuildAccelerationStructureNV", "update", update);
- skip |= validate_required_handle("vkCmdBuildAccelerationStructureNV", "dst", dst);
- skip |= validate_required_handle("vkCmdBuildAccelerationStructureNV", "scratch", scratch);
- if (!skip) skip |= manual_PreCallValidateCmdBuildAccelerationStructureNV(commandBuffer, pInfo, instanceData, instanceOffset, update, dst, src, scratch, scratchOffset);
- return skip;
-}
-
-bool StatelessValidation::PreCallValidateCmdCopyAccelerationStructureNV(
- VkCommandBuffer commandBuffer,
- VkAccelerationStructureNV dst,
- VkAccelerationStructureNV src,
- VkCopyAccelerationStructureModeNV mode) {
- bool skip = false;
- if (!device_extensions.vk_khr_get_memory_requirements_2) skip |= OutputExtensionError("vkCmdCopyAccelerationStructureNV", VK_KHR_GET_MEMORY_REQUIREMENTS_2_EXTENSION_NAME);
- if (!device_extensions.vk_khr_get_physical_device_properties_2) skip |= OutputExtensionError("vkCmdCopyAccelerationStructureNV", VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME);
- if (!device_extensions.vk_nv_ray_tracing) skip |= OutputExtensionError("vkCmdCopyAccelerationStructureNV", VK_NV_RAY_TRACING_EXTENSION_NAME);
- skip |= validate_required_handle("vkCmdCopyAccelerationStructureNV", "dst", dst);
- skip |= validate_required_handle("vkCmdCopyAccelerationStructureNV", "src", src);
- skip |= validate_ranged_enum("vkCmdCopyAccelerationStructureNV", "mode", "VkCopyAccelerationStructureModeNV", AllVkCopyAccelerationStructureModeNVEnums, mode, "VUID-vkCmdCopyAccelerationStructureNV-mode-parameter");
- return skip;
-}
-
-bool StatelessValidation::PreCallValidateCmdTraceRaysNV(
- VkCommandBuffer commandBuffer,
- VkBuffer raygenShaderBindingTableBuffer,
- VkDeviceSize raygenShaderBindingOffset,
- VkBuffer missShaderBindingTableBuffer,
- VkDeviceSize missShaderBindingOffset,
- VkDeviceSize missShaderBindingStride,
- VkBuffer hitShaderBindingTableBuffer,
- VkDeviceSize hitShaderBindingOffset,
- VkDeviceSize hitShaderBindingStride,
- VkBuffer callableShaderBindingTableBuffer,
- VkDeviceSize callableShaderBindingOffset,
- VkDeviceSize callableShaderBindingStride,
- uint32_t width,
- uint32_t height,
- uint32_t depth) {
- bool skip = false;
- if (!device_extensions.vk_khr_get_memory_requirements_2) skip |= OutputExtensionError("vkCmdTraceRaysNV", VK_KHR_GET_MEMORY_REQUIREMENTS_2_EXTENSION_NAME);
- if (!device_extensions.vk_khr_get_physical_device_properties_2) skip |= OutputExtensionError("vkCmdTraceRaysNV", VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME);
- if (!device_extensions.vk_nv_ray_tracing) skip |= OutputExtensionError("vkCmdTraceRaysNV", VK_NV_RAY_TRACING_EXTENSION_NAME);
- skip |= validate_required_handle("vkCmdTraceRaysNV", "raygenShaderBindingTableBuffer", raygenShaderBindingTableBuffer);
- return skip;
-}
-
-bool StatelessValidation::PreCallValidateCreateRayTracingPipelinesNV(
- VkDevice device,
- VkPipelineCache pipelineCache,
- uint32_t createInfoCount,
- const VkRayTracingPipelineCreateInfoNV* pCreateInfos,
- const VkAllocationCallbacks* pAllocator,
- VkPipeline* pPipelines) {
- bool skip = false;
- if (!device_extensions.vk_khr_get_memory_requirements_2) skip |= OutputExtensionError("vkCreateRayTracingPipelinesNV", VK_KHR_GET_MEMORY_REQUIREMENTS_2_EXTENSION_NAME);
- if (!device_extensions.vk_khr_get_physical_device_properties_2) skip |= OutputExtensionError("vkCreateRayTracingPipelinesNV", VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME);
- if (!device_extensions.vk_nv_ray_tracing) skip |= OutputExtensionError("vkCreateRayTracingPipelinesNV", VK_NV_RAY_TRACING_EXTENSION_NAME);
- skip |= validate_struct_type_array("vkCreateRayTracingPipelinesNV", "createInfoCount", "pCreateInfos", "VK_STRUCTURE_TYPE_RAY_TRACING_PIPELINE_CREATE_INFO_NV", createInfoCount, pCreateInfos, VK_STRUCTURE_TYPE_RAY_TRACING_PIPELINE_CREATE_INFO_NV, true, true, "VUID-VkRayTracingPipelineCreateInfoNV-sType-sType", "VUID-vkCreateRayTracingPipelinesNV-pCreateInfos-parameter", "VUID-vkCreateRayTracingPipelinesNV-createInfoCount-arraylength");
- if (pCreateInfos != NULL)
- {
- for (uint32_t createInfoIndex = 0; createInfoIndex < createInfoCount; ++createInfoIndex)
- {
- const VkStructureType allowed_structs_VkRayTracingPipelineCreateInfoNV[] = { VK_STRUCTURE_TYPE_PIPELINE_CREATION_FEEDBACK_CREATE_INFO_EXT };
-
- skip |= validate_struct_pnext("vkCreateRayTracingPipelinesNV", ParameterName("pCreateInfos[%i].pNext", ParameterName::IndexVector{ createInfoIndex }), "VkPipelineCreationFeedbackCreateInfoEXT", pCreateInfos[createInfoIndex].pNext, ARRAY_SIZE(allowed_structs_VkRayTracingPipelineCreateInfoNV), allowed_structs_VkRayTracingPipelineCreateInfoNV, GeneratedVulkanHeaderVersion, "VUID-VkRayTracingPipelineCreateInfoNV-pNext-pNext");
-
- skip |= validate_flags("vkCreateRayTracingPipelinesNV", ParameterName("pCreateInfos[%i].flags", ParameterName::IndexVector{ createInfoIndex }), "VkPipelineCreateFlagBits", AllVkPipelineCreateFlagBits, pCreateInfos[createInfoIndex].flags, kOptionalFlags, "VUID-VkRayTracingPipelineCreateInfoNV-flags-parameter");
-
- skip |= validate_struct_type_array("vkCreateRayTracingPipelinesNV", ParameterName("pCreateInfos[%i].stageCount", ParameterName::IndexVector{ createInfoIndex }), ParameterName("pCreateInfos[%i].pStages", ParameterName::IndexVector{ createInfoIndex }), "VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_CREATE_INFO", pCreateInfos[createInfoIndex].stageCount, pCreateInfos[createInfoIndex].pStages, VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_CREATE_INFO, true, true, "VUID-VkPipelineShaderStageCreateInfo-sType-sType", "VUID-VkRayTracingPipelineCreateInfoNV-pStages-parameter", "VUID-VkRayTracingPipelineCreateInfoNV-stageCount-arraylength");
-
- if (pCreateInfos[createInfoIndex].pStages != NULL)
- {
- for (uint32_t stageIndex = 0; stageIndex < pCreateInfos[createInfoIndex].stageCount; ++stageIndex)
- {
- const VkStructureType allowed_structs_VkPipelineShaderStageCreateInfo[] = { VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_REQUIRED_SUBGROUP_SIZE_CREATE_INFO_EXT };
-
- skip |= validate_struct_pnext("vkCreateRayTracingPipelinesNV", ParameterName("pCreateInfos[%i].pStages[%i].pNext", ParameterName::IndexVector{ createInfoIndex, stageIndex }), "VkPipelineShaderStageRequiredSubgroupSizeCreateInfoEXT", pCreateInfos[createInfoIndex].pStages[stageIndex].pNext, ARRAY_SIZE(allowed_structs_VkPipelineShaderStageCreateInfo), allowed_structs_VkPipelineShaderStageCreateInfo, GeneratedVulkanHeaderVersion, "VUID-VkPipelineShaderStageCreateInfo-pNext-pNext");
-
- skip |= validate_flags("vkCreateRayTracingPipelinesNV", ParameterName("pCreateInfos[%i].pStages[%i].flags", ParameterName::IndexVector{ createInfoIndex, stageIndex }), "VkPipelineShaderStageCreateFlagBits", AllVkPipelineShaderStageCreateFlagBits, pCreateInfos[createInfoIndex].pStages[stageIndex].flags, kOptionalFlags, "VUID-VkPipelineShaderStageCreateInfo-flags-parameter");
-
- skip |= validate_flags("vkCreateRayTracingPipelinesNV", ParameterName("pCreateInfos[%i].pStages[%i].stage", ParameterName::IndexVector{ createInfoIndex, stageIndex }), "VkShaderStageFlagBits", AllVkShaderStageFlagBits, pCreateInfos[createInfoIndex].pStages[stageIndex].stage, kRequiredSingleBit, "VUID-VkPipelineShaderStageCreateInfo-stage-parameter", "VUID-VkPipelineShaderStageCreateInfo-stage-parameter");
-
- skip |= validate_required_handle("vkCreateRayTracingPipelinesNV", ParameterName("pCreateInfos[%i].pStages[%i].module", ParameterName::IndexVector{ createInfoIndex, stageIndex }), pCreateInfos[createInfoIndex].pStages[stageIndex].module);
-
- skip |= validate_required_pointer("vkCreateRayTracingPipelinesNV", ParameterName("pCreateInfos[%i].pStages[%i].pName", ParameterName::IndexVector{ createInfoIndex, stageIndex }), pCreateInfos[createInfoIndex].pStages[stageIndex].pName, "VUID-VkPipelineShaderStageCreateInfo-pName-parameter");
-
- if (pCreateInfos[createInfoIndex].pStages[stageIndex].pSpecializationInfo != NULL)
- {
- skip |= validate_array("vkCreateRayTracingPipelinesNV", ParameterName("pCreateInfos[%i].pStages[%i].pSpecializationInfo->mapEntryCount", ParameterName::IndexVector{ createInfoIndex, stageIndex }), ParameterName("pCreateInfos[%i].pStages[%i].pSpecializationInfo->pMapEntries", ParameterName::IndexVector{ createInfoIndex, stageIndex }), pCreateInfos[createInfoIndex].pStages[stageIndex].pSpecializationInfo->mapEntryCount, &pCreateInfos[createInfoIndex].pStages[stageIndex].pSpecializationInfo->pMapEntries, false, true, kVUIDUndefined, "VUID-VkSpecializationInfo-pMapEntries-parameter");
-
- if (pCreateInfos[createInfoIndex].pStages[stageIndex].pSpecializationInfo->pMapEntries != NULL)
- {
- for (uint32_t mapEntryIndex = 0; mapEntryIndex < pCreateInfos[createInfoIndex].pStages[stageIndex].pSpecializationInfo->mapEntryCount; ++mapEntryIndex)
- {
- // No xml-driven validation
- }
- }
-
- skip |= validate_array("vkCreateRayTracingPipelinesNV", ParameterName("pCreateInfos[%i].pStages[%i].pSpecializationInfo->dataSize", ParameterName::IndexVector{ createInfoIndex, stageIndex }), ParameterName("pCreateInfos[%i].pStages[%i].pSpecializationInfo->pData", ParameterName::IndexVector{ createInfoIndex, stageIndex }), pCreateInfos[createInfoIndex].pStages[stageIndex].pSpecializationInfo->dataSize, &pCreateInfos[createInfoIndex].pStages[stageIndex].pSpecializationInfo->pData, false, true, kVUIDUndefined, "VUID-VkSpecializationInfo-pData-parameter");
- }
- }
- }
-
- skip |= validate_struct_type_array("vkCreateRayTracingPipelinesNV", ParameterName("pCreateInfos[%i].groupCount", ParameterName::IndexVector{ createInfoIndex }), ParameterName("pCreateInfos[%i].pGroups", ParameterName::IndexVector{ createInfoIndex }), "VK_STRUCTURE_TYPE_RAY_TRACING_SHADER_GROUP_CREATE_INFO_NV", pCreateInfos[createInfoIndex].groupCount, pCreateInfos[createInfoIndex].pGroups, VK_STRUCTURE_TYPE_RAY_TRACING_SHADER_GROUP_CREATE_INFO_NV, true, true, "VUID-VkRayTracingShaderGroupCreateInfoNV-sType-sType", "VUID-VkRayTracingPipelineCreateInfoNV-pGroups-parameter", "VUID-VkRayTracingPipelineCreateInfoNV-groupCount-arraylength");
-
- if (pCreateInfos[createInfoIndex].pGroups != NULL)
- {
- for (uint32_t groupIndex = 0; groupIndex < pCreateInfos[createInfoIndex].groupCount; ++groupIndex)
- {
- skip |= validate_struct_pnext("vkCreateRayTracingPipelinesNV", ParameterName("pCreateInfos[%i].pGroups[%i].pNext", ParameterName::IndexVector{ createInfoIndex, groupIndex }), NULL, pCreateInfos[createInfoIndex].pGroups[groupIndex].pNext, 0, NULL, GeneratedVulkanHeaderVersion, "VUID-VkRayTracingShaderGroupCreateInfoNV-pNext-pNext");
-
- skip |= validate_ranged_enum("vkCreateRayTracingPipelinesNV", ParameterName("pCreateInfos[%i].pGroups[%i].type", ParameterName::IndexVector{ createInfoIndex, groupIndex }), "VkRayTracingShaderGroupTypeNV", AllVkRayTracingShaderGroupTypeNVEnums, pCreateInfos[createInfoIndex].pGroups[groupIndex].type, "VUID-VkRayTracingShaderGroupCreateInfoNV-type-parameter");
- }
- }
-
- skip |= validate_required_handle("vkCreateRayTracingPipelinesNV", ParameterName("pCreateInfos[%i].layout", ParameterName::IndexVector{ createInfoIndex }), pCreateInfos[createInfoIndex].layout);
- }
- }
- if (pAllocator != NULL)
- {
- skip |= validate_required_pointer("vkCreateRayTracingPipelinesNV", "pAllocator->pfnAllocation", reinterpret_cast<const void*>(pAllocator->pfnAllocation), "VUID-VkAllocationCallbacks-pfnAllocation-00632");
-
- skip |= validate_required_pointer("vkCreateRayTracingPipelinesNV", "pAllocator->pfnReallocation", reinterpret_cast<const void*>(pAllocator->pfnReallocation), "VUID-VkAllocationCallbacks-pfnReallocation-00633");
-
- skip |= validate_required_pointer("vkCreateRayTracingPipelinesNV", "pAllocator->pfnFree", reinterpret_cast<const void*>(pAllocator->pfnFree), "VUID-VkAllocationCallbacks-pfnFree-00634");
-
- if (pAllocator->pfnInternalAllocation != NULL)
- {
- skip |= validate_required_pointer("vkCreateRayTracingPipelinesNV", "pAllocator->pfnInternalFree", reinterpret_cast<const void*>(pAllocator->pfnInternalFree), "VUID-VkAllocationCallbacks-pfnInternalAllocation-00635");
-
- }
-
- if (pAllocator->pfnInternalFree != NULL)
- {
- skip |= validate_required_pointer("vkCreateRayTracingPipelinesNV", "pAllocator->pfnInternalAllocation", reinterpret_cast<const void*>(pAllocator->pfnInternalAllocation), "VUID-VkAllocationCallbacks-pfnInternalAllocation-00635");
-
- }
- }
- skip |= validate_array("vkCreateRayTracingPipelinesNV", "createInfoCount", "pPipelines", createInfoCount, &pPipelines, true, true, "VUID-vkCreateRayTracingPipelinesNV-createInfoCount-arraylength", "VUID-vkCreateRayTracingPipelinesNV-pPipelines-parameter");
- if (!skip) skip |= manual_PreCallValidateCreateRayTracingPipelinesNV(device, pipelineCache, createInfoCount, pCreateInfos, pAllocator, pPipelines);
- return skip;
-}
-
-bool StatelessValidation::PreCallValidateGetRayTracingShaderGroupHandlesNV(
- VkDevice device,
- VkPipeline pipeline,
- uint32_t firstGroup,
- uint32_t groupCount,
- size_t dataSize,
- void* pData) {
- bool skip = false;
- if (!device_extensions.vk_khr_get_memory_requirements_2) skip |= OutputExtensionError("vkGetRayTracingShaderGroupHandlesNV", VK_KHR_GET_MEMORY_REQUIREMENTS_2_EXTENSION_NAME);
- if (!device_extensions.vk_khr_get_physical_device_properties_2) skip |= OutputExtensionError("vkGetRayTracingShaderGroupHandlesNV", VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME);
- if (!device_extensions.vk_nv_ray_tracing) skip |= OutputExtensionError("vkGetRayTracingShaderGroupHandlesNV", VK_NV_RAY_TRACING_EXTENSION_NAME);
- skip |= validate_required_handle("vkGetRayTracingShaderGroupHandlesNV", "pipeline", pipeline);
- skip |= validate_array("vkGetRayTracingShaderGroupHandlesNV", "dataSize", "pData", dataSize, &pData, true, true, "VUID-vkGetRayTracingShaderGroupHandlesNV-dataSize-arraylength", "VUID-vkGetRayTracingShaderGroupHandlesNV-pData-parameter");
- return skip;
-}
-
-bool StatelessValidation::PreCallValidateGetAccelerationStructureHandleNV(
- VkDevice device,
- VkAccelerationStructureNV accelerationStructure,
- size_t dataSize,
- void* pData) {
- bool skip = false;
- if (!device_extensions.vk_khr_get_memory_requirements_2) skip |= OutputExtensionError("vkGetAccelerationStructureHandleNV", VK_KHR_GET_MEMORY_REQUIREMENTS_2_EXTENSION_NAME);
- if (!device_extensions.vk_khr_get_physical_device_properties_2) skip |= OutputExtensionError("vkGetAccelerationStructureHandleNV", VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME);
- if (!device_extensions.vk_nv_ray_tracing) skip |= OutputExtensionError("vkGetAccelerationStructureHandleNV", VK_NV_RAY_TRACING_EXTENSION_NAME);
- skip |= validate_required_handle("vkGetAccelerationStructureHandleNV", "accelerationStructure", accelerationStructure);
- skip |= validate_array("vkGetAccelerationStructureHandleNV", "dataSize", "pData", dataSize, &pData, true, true, "VUID-vkGetAccelerationStructureHandleNV-dataSize-arraylength", "VUID-vkGetAccelerationStructureHandleNV-pData-parameter");
- if (!skip) skip |= manual_PreCallValidateGetAccelerationStructureHandleNV(device, accelerationStructure, dataSize, pData);
- return skip;
-}
-
-bool StatelessValidation::PreCallValidateCmdWriteAccelerationStructuresPropertiesNV(
- VkCommandBuffer commandBuffer,
- uint32_t accelerationStructureCount,
- const VkAccelerationStructureNV* pAccelerationStructures,
- VkQueryType queryType,
- VkQueryPool queryPool,
- uint32_t firstQuery) {
- bool skip = false;
- if (!device_extensions.vk_khr_get_memory_requirements_2) skip |= OutputExtensionError("vkCmdWriteAccelerationStructuresPropertiesNV", VK_KHR_GET_MEMORY_REQUIREMENTS_2_EXTENSION_NAME);
- if (!device_extensions.vk_khr_get_physical_device_properties_2) skip |= OutputExtensionError("vkCmdWriteAccelerationStructuresPropertiesNV", VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME);
- if (!device_extensions.vk_nv_ray_tracing) skip |= OutputExtensionError("vkCmdWriteAccelerationStructuresPropertiesNV", VK_NV_RAY_TRACING_EXTENSION_NAME);
- skip |= validate_handle_array("vkCmdWriteAccelerationStructuresPropertiesNV", "accelerationStructureCount", "pAccelerationStructures", accelerationStructureCount, pAccelerationStructures, true, true);
- skip |= validate_ranged_enum("vkCmdWriteAccelerationStructuresPropertiesNV", "queryType", "VkQueryType", AllVkQueryTypeEnums, queryType, "VUID-vkCmdWriteAccelerationStructuresPropertiesNV-queryType-parameter");
- skip |= validate_required_handle("vkCmdWriteAccelerationStructuresPropertiesNV", "queryPool", queryPool);
- return skip;
-}
-
-bool StatelessValidation::PreCallValidateCompileDeferredNV(
- VkDevice device,
- VkPipeline pipeline,
- uint32_t shader) {
- bool skip = false;
- if (!device_extensions.vk_khr_get_memory_requirements_2) skip |= OutputExtensionError("vkCompileDeferredNV", VK_KHR_GET_MEMORY_REQUIREMENTS_2_EXTENSION_NAME);
- if (!device_extensions.vk_khr_get_physical_device_properties_2) skip |= OutputExtensionError("vkCompileDeferredNV", VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME);
- if (!device_extensions.vk_nv_ray_tracing) skip |= OutputExtensionError("vkCompileDeferredNV", VK_NV_RAY_TRACING_EXTENSION_NAME);
- skip |= validate_required_handle("vkCompileDeferredNV", "pipeline", pipeline);
- return skip;
-}
-
-
-
-
-
-
-
-
-
-bool StatelessValidation::PreCallValidateGetMemoryHostPointerPropertiesEXT(
- VkDevice device,
- VkExternalMemoryHandleTypeFlagBits handleType,
- const void* pHostPointer,
- VkMemoryHostPointerPropertiesEXT* pMemoryHostPointerProperties) {
- bool skip = false;
- if (!device_extensions.vk_khr_external_memory) skip |= OutputExtensionError("vkGetMemoryHostPointerPropertiesEXT", VK_KHR_EXTERNAL_MEMORY_EXTENSION_NAME);
- if (!device_extensions.vk_ext_external_memory_host) skip |= OutputExtensionError("vkGetMemoryHostPointerPropertiesEXT", VK_EXT_EXTERNAL_MEMORY_HOST_EXTENSION_NAME);
- skip |= validate_flags("vkGetMemoryHostPointerPropertiesEXT", "handleType", "VkExternalMemoryHandleTypeFlagBits", AllVkExternalMemoryHandleTypeFlagBits, handleType, kRequiredSingleBit, "VUID-vkGetMemoryHostPointerPropertiesEXT-handleType-parameter", "VUID-vkGetMemoryHostPointerPropertiesEXT-handleType-parameter");
- skip |= validate_required_pointer("vkGetMemoryHostPointerPropertiesEXT", "pHostPointer", pHostPointer, kVUIDUndefined);
- skip |= validate_struct_type("vkGetMemoryHostPointerPropertiesEXT", "pMemoryHostPointerProperties", "VK_STRUCTURE_TYPE_MEMORY_HOST_POINTER_PROPERTIES_EXT", pMemoryHostPointerProperties, VK_STRUCTURE_TYPE_MEMORY_HOST_POINTER_PROPERTIES_EXT, true, "VUID-vkGetMemoryHostPointerPropertiesEXT-pMemoryHostPointerProperties-parameter", "VUID-VkMemoryHostPointerPropertiesEXT-sType-sType");
- if (pMemoryHostPointerProperties != NULL)
- {
- // No xml-driven validation
- }
- return skip;
-}
-
-
-
-bool StatelessValidation::PreCallValidateCmdWriteBufferMarkerAMD(
- VkCommandBuffer commandBuffer,
- VkPipelineStageFlagBits pipelineStage,
- VkBuffer dstBuffer,
- VkDeviceSize dstOffset,
- uint32_t marker) {
- bool skip = false;
- if (!device_extensions.vk_amd_buffer_marker) skip |= OutputExtensionError("vkCmdWriteBufferMarkerAMD", VK_AMD_BUFFER_MARKER_EXTENSION_NAME);
- skip |= validate_flags("vkCmdWriteBufferMarkerAMD", "pipelineStage", "VkPipelineStageFlagBits", AllVkPipelineStageFlagBits, pipelineStage, kRequiredSingleBit, "VUID-vkCmdWriteBufferMarkerAMD-pipelineStage-parameter", "VUID-vkCmdWriteBufferMarkerAMD-pipelineStage-parameter");
- skip |= validate_required_handle("vkCmdWriteBufferMarkerAMD", "dstBuffer", dstBuffer);
- return skip;
-}
-
-
-
-
-
-bool StatelessValidation::PreCallValidateGetPhysicalDeviceCalibrateableTimeDomainsEXT(
- VkPhysicalDevice physicalDevice,
- uint32_t* pTimeDomainCount,
- VkTimeDomainEXT* pTimeDomains) {
- bool skip = false;
- skip |= validate_array("vkGetPhysicalDeviceCalibrateableTimeDomainsEXT", "pTimeDomainCount", "pTimeDomains", pTimeDomainCount, &pTimeDomains, true, false, false, kVUIDUndefined, "VUID-vkGetPhysicalDeviceCalibrateableTimeDomainsEXT-pTimeDomains-parameter");
- return skip;
-}
-
-bool StatelessValidation::PreCallValidateGetCalibratedTimestampsEXT(
- VkDevice device,
- uint32_t timestampCount,
- const VkCalibratedTimestampInfoEXT* pTimestampInfos,
- uint64_t* pTimestamps,
- uint64_t* pMaxDeviation) {
- bool skip = false;
- if (!device_extensions.vk_ext_calibrated_timestamps) skip |= OutputExtensionError("vkGetCalibratedTimestampsEXT", VK_EXT_CALIBRATED_TIMESTAMPS_EXTENSION_NAME);
- skip |= validate_struct_type_array("vkGetCalibratedTimestampsEXT", "timestampCount", "pTimestampInfos", "VK_STRUCTURE_TYPE_CALIBRATED_TIMESTAMP_INFO_EXT", timestampCount, pTimestampInfos, VK_STRUCTURE_TYPE_CALIBRATED_TIMESTAMP_INFO_EXT, true, true, "VUID-VkCalibratedTimestampInfoEXT-sType-sType", "VUID-vkGetCalibratedTimestampsEXT-pTimestampInfos-parameter", "VUID-vkGetCalibratedTimestampsEXT-timestampCount-arraylength");
- if (pTimestampInfos != NULL)
- {
- for (uint32_t timestampIndex = 0; timestampIndex < timestampCount; ++timestampIndex)
- {
- skip |= validate_struct_pnext("vkGetCalibratedTimestampsEXT", ParameterName("pTimestampInfos[%i].pNext", ParameterName::IndexVector{ timestampIndex }), NULL, pTimestampInfos[timestampIndex].pNext, 0, NULL, GeneratedVulkanHeaderVersion, "VUID-VkCalibratedTimestampInfoEXT-pNext-pNext");
-
- skip |= validate_ranged_enum("vkGetCalibratedTimestampsEXT", ParameterName("pTimestampInfos[%i].timeDomain", ParameterName::IndexVector{ timestampIndex }), "VkTimeDomainEXT", AllVkTimeDomainEXTEnums, pTimestampInfos[timestampIndex].timeDomain, "VUID-VkCalibratedTimestampInfoEXT-timeDomain-parameter");
- }
- }
- skip |= validate_array("vkGetCalibratedTimestampsEXT", "timestampCount", "pTimestamps", timestampCount, &pTimestamps, true, true, "VUID-vkGetCalibratedTimestampsEXT-timestampCount-arraylength", "VUID-vkGetCalibratedTimestampsEXT-pTimestamps-parameter");
- skip |= validate_required_pointer("vkGetCalibratedTimestampsEXT", "pMaxDeviation", pMaxDeviation, "VUID-vkGetCalibratedTimestampsEXT-pMaxDeviation-parameter");
- return skip;
-}
-
-
-
-
-
-
-
-
-
-#ifdef VK_USE_PLATFORM_GGP
-
-#endif // VK_USE_PLATFORM_GGP
-
-
-
-
-
-
-
-bool StatelessValidation::PreCallValidateCmdDrawMeshTasksNV(
- VkCommandBuffer commandBuffer,
- uint32_t taskCount,
- uint32_t firstTask) {
- bool skip = false;
- if (!device_extensions.vk_khr_get_physical_device_properties_2) skip |= OutputExtensionError("vkCmdDrawMeshTasksNV", VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME);
- if (!device_extensions.vk_nv_mesh_shader) skip |= OutputExtensionError("vkCmdDrawMeshTasksNV", VK_NV_MESH_SHADER_EXTENSION_NAME);
- // No xml-driven validation
- if (!skip) skip |= manual_PreCallValidateCmdDrawMeshTasksNV(commandBuffer, taskCount, firstTask);
- return skip;
-}
-
-bool StatelessValidation::PreCallValidateCmdDrawMeshTasksIndirectNV(
- VkCommandBuffer commandBuffer,
- VkBuffer buffer,
- VkDeviceSize offset,
- uint32_t drawCount,
- uint32_t stride) {
- bool skip = false;
- if (!device_extensions.vk_khr_get_physical_device_properties_2) skip |= OutputExtensionError("vkCmdDrawMeshTasksIndirectNV", VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME);
- if (!device_extensions.vk_nv_mesh_shader) skip |= OutputExtensionError("vkCmdDrawMeshTasksIndirectNV", VK_NV_MESH_SHADER_EXTENSION_NAME);
- skip |= validate_required_handle("vkCmdDrawMeshTasksIndirectNV", "buffer", buffer);
- if (!skip) skip |= manual_PreCallValidateCmdDrawMeshTasksIndirectNV(commandBuffer, buffer, offset, drawCount, stride);
- return skip;
-}
-
-bool StatelessValidation::PreCallValidateCmdDrawMeshTasksIndirectCountNV(
- VkCommandBuffer commandBuffer,
- VkBuffer buffer,
- VkDeviceSize offset,
- VkBuffer countBuffer,
- VkDeviceSize countBufferOffset,
- uint32_t maxDrawCount,
- uint32_t stride) {
- bool skip = false;
- if (!device_extensions.vk_khr_get_physical_device_properties_2) skip |= OutputExtensionError("vkCmdDrawMeshTasksIndirectCountNV", VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME);
- if (!device_extensions.vk_nv_mesh_shader) skip |= OutputExtensionError("vkCmdDrawMeshTasksIndirectCountNV", VK_NV_MESH_SHADER_EXTENSION_NAME);
- skip |= validate_required_handle("vkCmdDrawMeshTasksIndirectCountNV", "buffer", buffer);
- skip |= validate_required_handle("vkCmdDrawMeshTasksIndirectCountNV", "countBuffer", countBuffer);
- if (!skip) skip |= manual_PreCallValidateCmdDrawMeshTasksIndirectCountNV(commandBuffer, buffer, offset, countBuffer, countBufferOffset, maxDrawCount, stride);
- return skip;
-}
-
-
-
-
-
-
-
-bool StatelessValidation::PreCallValidateCmdSetExclusiveScissorNV(
- VkCommandBuffer commandBuffer,
- uint32_t firstExclusiveScissor,
- uint32_t exclusiveScissorCount,
- const VkRect2D* pExclusiveScissors) {
- bool skip = false;
- if (!device_extensions.vk_khr_get_physical_device_properties_2) skip |= OutputExtensionError("vkCmdSetExclusiveScissorNV", VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME);
- if (!device_extensions.vk_nv_scissor_exclusive) skip |= OutputExtensionError("vkCmdSetExclusiveScissorNV", VK_NV_SCISSOR_EXCLUSIVE_EXTENSION_NAME);
- skip |= validate_array("vkCmdSetExclusiveScissorNV", "exclusiveScissorCount", "pExclusiveScissors", exclusiveScissorCount, &pExclusiveScissors, true, true, "VUID-vkCmdSetExclusiveScissorNV-exclusiveScissorCount-arraylength", "VUID-vkCmdSetExclusiveScissorNV-pExclusiveScissors-parameter");
- if (pExclusiveScissors != NULL)
- {
- for (uint32_t exclusiveScissorIndex = 0; exclusiveScissorIndex < exclusiveScissorCount; ++exclusiveScissorIndex)
- {
- // No xml-driven validation
-
- // No xml-driven validation
- }
- }
- if (!skip) skip |= manual_PreCallValidateCmdSetExclusiveScissorNV(commandBuffer, firstExclusiveScissor, exclusiveScissorCount, pExclusiveScissors);
- return skip;
-}
-
-
-
-bool StatelessValidation::PreCallValidateCmdSetCheckpointNV(
- VkCommandBuffer commandBuffer,
- const void* pCheckpointMarker) {
- bool skip = false;
- if (!device_extensions.vk_khr_get_physical_device_properties_2) skip |= OutputExtensionError("vkCmdSetCheckpointNV", VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME);
- if (!device_extensions.vk_nv_device_diagnostic_checkpoints) skip |= OutputExtensionError("vkCmdSetCheckpointNV", VK_NV_DEVICE_DIAGNOSTIC_CHECKPOINTS_EXTENSION_NAME);
- // No xml-driven validation
- return skip;
-}
-
-bool StatelessValidation::PreCallValidateGetQueueCheckpointDataNV(
- VkQueue queue,
- uint32_t* pCheckpointDataCount,
- VkCheckpointDataNV* pCheckpointData) {
- bool skip = false;
- if (!device_extensions.vk_khr_get_physical_device_properties_2) skip |= OutputExtensionError("vkGetQueueCheckpointDataNV", VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME);
- if (!device_extensions.vk_nv_device_diagnostic_checkpoints) skip |= OutputExtensionError("vkGetQueueCheckpointDataNV", VK_NV_DEVICE_DIAGNOSTIC_CHECKPOINTS_EXTENSION_NAME);
- skip |= validate_struct_type_array("vkGetQueueCheckpointDataNV", "pCheckpointDataCount", "pCheckpointData", "VK_STRUCTURE_TYPE_CHECKPOINT_DATA_NV", pCheckpointDataCount, pCheckpointData, VK_STRUCTURE_TYPE_CHECKPOINT_DATA_NV, true, false, false, "VUID-VkCheckpointDataNV-sType-sType", "VUID-vkGetQueueCheckpointDataNV-pCheckpointData-parameter", kVUIDUndefined);
- if (pCheckpointData != NULL)
- {
- for (uint32_t pCheckpointDataIndex = 0; pCheckpointDataIndex < *pCheckpointDataCount; ++pCheckpointDataIndex)
- {
- // No xml-driven validation
- }
- }
- return skip;
-}
-
-
-
-
-
-bool StatelessValidation::PreCallValidateInitializePerformanceApiINTEL(
- VkDevice device,
- const VkInitializePerformanceApiInfoINTEL* pInitializeInfo) {
- bool skip = false;
- if (!device_extensions.vk_intel_performance_query) skip |= OutputExtensionError("vkInitializePerformanceApiINTEL", VK_INTEL_PERFORMANCE_QUERY_EXTENSION_NAME);
- skip |= validate_struct_type("vkInitializePerformanceApiINTEL", "pInitializeInfo", "VK_STRUCTURE_TYPE_INITIALIZE_PERFORMANCE_API_INFO_INTEL", pInitializeInfo, VK_STRUCTURE_TYPE_INITIALIZE_PERFORMANCE_API_INFO_INTEL, true, "VUID-vkInitializePerformanceApiINTEL-pInitializeInfo-parameter", "VUID-VkInitializePerformanceApiInfoINTEL-sType-sType");
- if (pInitializeInfo != NULL)
- {
- skip |= validate_struct_pnext("vkInitializePerformanceApiINTEL", "pInitializeInfo->pNext", NULL, pInitializeInfo->pNext, 0, NULL, GeneratedVulkanHeaderVersion, "VUID-VkInitializePerformanceApiInfoINTEL-pNext-pNext");
-
- skip |= validate_required_pointer("vkInitializePerformanceApiINTEL", "pInitializeInfo->pUserData", pInitializeInfo->pUserData, kVUIDUndefined);
- }
- return skip;
-}
-
-bool StatelessValidation::PreCallValidateUninitializePerformanceApiINTEL(
- VkDevice device) {
- bool skip = false;
- if (!device_extensions.vk_intel_performance_query) skip |= OutputExtensionError("vkUninitializePerformanceApiINTEL", VK_INTEL_PERFORMANCE_QUERY_EXTENSION_NAME);
- // No xml-driven validation
- return skip;
-}
-
-bool StatelessValidation::PreCallValidateCmdSetPerformanceMarkerINTEL(
- VkCommandBuffer commandBuffer,
- const VkPerformanceMarkerInfoINTEL* pMarkerInfo) {
- bool skip = false;
- if (!device_extensions.vk_intel_performance_query) skip |= OutputExtensionError("vkCmdSetPerformanceMarkerINTEL", VK_INTEL_PERFORMANCE_QUERY_EXTENSION_NAME);
- skip |= validate_struct_type("vkCmdSetPerformanceMarkerINTEL", "pMarkerInfo", "VK_STRUCTURE_TYPE_PERFORMANCE_MARKER_INFO_INTEL", pMarkerInfo, VK_STRUCTURE_TYPE_PERFORMANCE_MARKER_INFO_INTEL, true, "VUID-vkCmdSetPerformanceMarkerINTEL-pMarkerInfo-parameter", "VUID-VkPerformanceMarkerInfoINTEL-sType-sType");
- if (pMarkerInfo != NULL)
- {
- skip |= validate_struct_pnext("vkCmdSetPerformanceMarkerINTEL", "pMarkerInfo->pNext", NULL, pMarkerInfo->pNext, 0, NULL, GeneratedVulkanHeaderVersion, "VUID-VkPerformanceMarkerInfoINTEL-pNext-pNext");
- }
- return skip;
-}
-
-bool StatelessValidation::PreCallValidateCmdSetPerformanceStreamMarkerINTEL(
- VkCommandBuffer commandBuffer,
- const VkPerformanceStreamMarkerInfoINTEL* pMarkerInfo) {
- bool skip = false;
- if (!device_extensions.vk_intel_performance_query) skip |= OutputExtensionError("vkCmdSetPerformanceStreamMarkerINTEL", VK_INTEL_PERFORMANCE_QUERY_EXTENSION_NAME);
- skip |= validate_struct_type("vkCmdSetPerformanceStreamMarkerINTEL", "pMarkerInfo", "VK_STRUCTURE_TYPE_PERFORMANCE_STREAM_MARKER_INFO_INTEL", pMarkerInfo, VK_STRUCTURE_TYPE_PERFORMANCE_STREAM_MARKER_INFO_INTEL, true, "VUID-vkCmdSetPerformanceStreamMarkerINTEL-pMarkerInfo-parameter", "VUID-VkPerformanceStreamMarkerInfoINTEL-sType-sType");
- if (pMarkerInfo != NULL)
- {
- skip |= validate_struct_pnext("vkCmdSetPerformanceStreamMarkerINTEL", "pMarkerInfo->pNext", NULL, pMarkerInfo->pNext, 0, NULL, GeneratedVulkanHeaderVersion, "VUID-VkPerformanceStreamMarkerInfoINTEL-pNext-pNext");
- }
- return skip;
-}
-
-bool StatelessValidation::PreCallValidateCmdSetPerformanceOverrideINTEL(
- VkCommandBuffer commandBuffer,
- const VkPerformanceOverrideInfoINTEL* pOverrideInfo) {
- bool skip = false;
- if (!device_extensions.vk_intel_performance_query) skip |= OutputExtensionError("vkCmdSetPerformanceOverrideINTEL", VK_INTEL_PERFORMANCE_QUERY_EXTENSION_NAME);
- skip |= validate_struct_type("vkCmdSetPerformanceOverrideINTEL", "pOverrideInfo", "VK_STRUCTURE_TYPE_PERFORMANCE_OVERRIDE_INFO_INTEL", pOverrideInfo, VK_STRUCTURE_TYPE_PERFORMANCE_OVERRIDE_INFO_INTEL, true, "VUID-vkCmdSetPerformanceOverrideINTEL-pOverrideInfo-parameter", "VUID-VkPerformanceOverrideInfoINTEL-sType-sType");
- if (pOverrideInfo != NULL)
- {
- skip |= validate_struct_pnext("vkCmdSetPerformanceOverrideINTEL", "pOverrideInfo->pNext", NULL, pOverrideInfo->pNext, 0, NULL, GeneratedVulkanHeaderVersion, "VUID-VkPerformanceOverrideInfoINTEL-pNext-pNext");
-
- skip |= validate_ranged_enum("vkCmdSetPerformanceOverrideINTEL", "pOverrideInfo->type", "VkPerformanceOverrideTypeINTEL", AllVkPerformanceOverrideTypeINTELEnums, pOverrideInfo->type, "VUID-VkPerformanceOverrideInfoINTEL-type-parameter");
-
- skip |= validate_bool32("vkCmdSetPerformanceOverrideINTEL", "pOverrideInfo->enable", pOverrideInfo->enable);
- }
- return skip;
-}
-
-bool StatelessValidation::PreCallValidateAcquirePerformanceConfigurationINTEL(
- VkDevice device,
- const VkPerformanceConfigurationAcquireInfoINTEL* pAcquireInfo,
- VkPerformanceConfigurationINTEL* pConfiguration) {
- bool skip = false;
- if (!device_extensions.vk_intel_performance_query) skip |= OutputExtensionError("vkAcquirePerformanceConfigurationINTEL", VK_INTEL_PERFORMANCE_QUERY_EXTENSION_NAME);
- skip |= validate_struct_type("vkAcquirePerformanceConfigurationINTEL", "pAcquireInfo", "VK_STRUCTURE_TYPE_PERFORMANCE_CONFIGURATION_ACQUIRE_INFO_INTEL", pAcquireInfo, VK_STRUCTURE_TYPE_PERFORMANCE_CONFIGURATION_ACQUIRE_INFO_INTEL, true, "VUID-vkAcquirePerformanceConfigurationINTEL-pAcquireInfo-parameter", "VUID-VkPerformanceConfigurationAcquireInfoINTEL-sType-sType");
- if (pAcquireInfo != NULL)
- {
- skip |= validate_struct_pnext("vkAcquirePerformanceConfigurationINTEL", "pAcquireInfo->pNext", NULL, pAcquireInfo->pNext, 0, NULL, GeneratedVulkanHeaderVersion, "VUID-VkPerformanceConfigurationAcquireInfoINTEL-pNext-pNext");
-
- skip |= validate_ranged_enum("vkAcquirePerformanceConfigurationINTEL", "pAcquireInfo->type", "VkPerformanceConfigurationTypeINTEL", AllVkPerformanceConfigurationTypeINTELEnums, pAcquireInfo->type, "VUID-VkPerformanceConfigurationAcquireInfoINTEL-type-parameter");
- }
- skip |= validate_required_pointer("vkAcquirePerformanceConfigurationINTEL", "pConfiguration", pConfiguration, "VUID-vkAcquirePerformanceConfigurationINTEL-pConfiguration-parameter");
- return skip;
-}
-
-bool StatelessValidation::PreCallValidateReleasePerformanceConfigurationINTEL(
- VkDevice device,
- VkPerformanceConfigurationINTEL configuration) {
- bool skip = false;
- if (!device_extensions.vk_intel_performance_query) skip |= OutputExtensionError("vkReleasePerformanceConfigurationINTEL", VK_INTEL_PERFORMANCE_QUERY_EXTENSION_NAME);
- skip |= validate_required_handle("vkReleasePerformanceConfigurationINTEL", "configuration", configuration);
- return skip;
-}
-
-bool StatelessValidation::PreCallValidateQueueSetPerformanceConfigurationINTEL(
- VkQueue queue,
- VkPerformanceConfigurationINTEL configuration) {
- bool skip = false;
- if (!device_extensions.vk_intel_performance_query) skip |= OutputExtensionError("vkQueueSetPerformanceConfigurationINTEL", VK_INTEL_PERFORMANCE_QUERY_EXTENSION_NAME);
- skip |= validate_required_handle("vkQueueSetPerformanceConfigurationINTEL", "configuration", configuration);
- return skip;
-}
-
-bool StatelessValidation::PreCallValidateGetPerformanceParameterINTEL(
- VkDevice device,
- VkPerformanceParameterTypeINTEL parameter,
- VkPerformanceValueINTEL* pValue) {
- bool skip = false;
- if (!device_extensions.vk_intel_performance_query) skip |= OutputExtensionError("vkGetPerformanceParameterINTEL", VK_INTEL_PERFORMANCE_QUERY_EXTENSION_NAME);
- skip |= validate_ranged_enum("vkGetPerformanceParameterINTEL", "parameter", "VkPerformanceParameterTypeINTEL", AllVkPerformanceParameterTypeINTELEnums, parameter, "VUID-vkGetPerformanceParameterINTEL-parameter-parameter");
- skip |= validate_required_pointer("vkGetPerformanceParameterINTEL", "pValue", pValue, "VUID-vkGetPerformanceParameterINTEL-pValue-parameter");
- return skip;
-}
-
-
-
-
-
-bool StatelessValidation::PreCallValidateSetLocalDimmingAMD(
- VkDevice device,
- VkSwapchainKHR swapChain,
- VkBool32 localDimmingEnable) {
- bool skip = false;
- if (!device_extensions.vk_khr_swapchain) skip |= OutputExtensionError("vkSetLocalDimmingAMD", VK_KHR_SWAPCHAIN_EXTENSION_NAME);
- if (!device_extensions.vk_khr_get_surface_capabilities_2) skip |= OutputExtensionError("vkSetLocalDimmingAMD", VK_KHR_GET_SURFACE_CAPABILITIES_2_EXTENSION_NAME);
- if (!device_extensions.vk_khr_get_physical_device_properties_2) skip |= OutputExtensionError("vkSetLocalDimmingAMD", VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME);
- if (!device_extensions.vk_amd_display_native_hdr) skip |= OutputExtensionError("vkSetLocalDimmingAMD", VK_AMD_DISPLAY_NATIVE_HDR_EXTENSION_NAME);
- skip |= validate_required_handle("vkSetLocalDimmingAMD", "swapChain", swapChain);
- skip |= validate_bool32("vkSetLocalDimmingAMD", "localDimmingEnable", localDimmingEnable);
- return skip;
-}
-
-
-
-#ifdef VK_USE_PLATFORM_FUCHSIA
-
-bool StatelessValidation::PreCallValidateCreateImagePipeSurfaceFUCHSIA(
- VkInstance instance,
- const VkImagePipeSurfaceCreateInfoFUCHSIA* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkSurfaceKHR* pSurface) {
- bool skip = false;
- if (!instance_extensions.vk_khr_surface) skip |= OutputExtensionError("vkCreateImagePipeSurfaceFUCHSIA", VK_KHR_SURFACE_EXTENSION_NAME);
- if (!instance_extensions.vk_fuchsia_imagepipe_surface) skip |= OutputExtensionError("vkCreateImagePipeSurfaceFUCHSIA", VK_FUCHSIA_IMAGEPIPE_SURFACE_EXTENSION_NAME);
- skip |= validate_struct_type("vkCreateImagePipeSurfaceFUCHSIA", "pCreateInfo", "VK_STRUCTURE_TYPE_IMAGEPIPE_SURFACE_CREATE_INFO_FUCHSIA", pCreateInfo, VK_STRUCTURE_TYPE_IMAGEPIPE_SURFACE_CREATE_INFO_FUCHSIA, true, "VUID-vkCreateImagePipeSurfaceFUCHSIA-pCreateInfo-parameter", "VUID-VkImagePipeSurfaceCreateInfoFUCHSIA-sType-sType");
- if (pCreateInfo != NULL)
- {
- skip |= validate_struct_pnext("vkCreateImagePipeSurfaceFUCHSIA", "pCreateInfo->pNext", NULL, pCreateInfo->pNext, 0, NULL, GeneratedVulkanHeaderVersion, "VUID-VkImagePipeSurfaceCreateInfoFUCHSIA-pNext-pNext");
-
- skip |= validate_reserved_flags("vkCreateImagePipeSurfaceFUCHSIA", "pCreateInfo->flags", pCreateInfo->flags, "VUID-VkImagePipeSurfaceCreateInfoFUCHSIA-flags-zerobitmask");
- }
- if (pAllocator != NULL)
- {
- skip |= validate_required_pointer("vkCreateImagePipeSurfaceFUCHSIA", "pAllocator->pfnAllocation", reinterpret_cast<const void*>(pAllocator->pfnAllocation), "VUID-VkAllocationCallbacks-pfnAllocation-00632");
-
- skip |= validate_required_pointer("vkCreateImagePipeSurfaceFUCHSIA", "pAllocator->pfnReallocation", reinterpret_cast<const void*>(pAllocator->pfnReallocation), "VUID-VkAllocationCallbacks-pfnReallocation-00633");
-
- skip |= validate_required_pointer("vkCreateImagePipeSurfaceFUCHSIA", "pAllocator->pfnFree", reinterpret_cast<const void*>(pAllocator->pfnFree), "VUID-VkAllocationCallbacks-pfnFree-00634");
-
- if (pAllocator->pfnInternalAllocation != NULL)
- {
- skip |= validate_required_pointer("vkCreateImagePipeSurfaceFUCHSIA", "pAllocator->pfnInternalFree", reinterpret_cast<const void*>(pAllocator->pfnInternalFree), "VUID-VkAllocationCallbacks-pfnInternalAllocation-00635");
-
- }
-
- if (pAllocator->pfnInternalFree != NULL)
- {
- skip |= validate_required_pointer("vkCreateImagePipeSurfaceFUCHSIA", "pAllocator->pfnInternalAllocation", reinterpret_cast<const void*>(pAllocator->pfnInternalAllocation), "VUID-VkAllocationCallbacks-pfnInternalAllocation-00635");
-
- }
- }
- skip |= validate_required_pointer("vkCreateImagePipeSurfaceFUCHSIA", "pSurface", pSurface, "VUID-vkCreateImagePipeSurfaceFUCHSIA-pSurface-parameter");
- return skip;
-}
-
-#endif // VK_USE_PLATFORM_FUCHSIA
-
-#ifdef VK_USE_PLATFORM_METAL_EXT
-
-bool StatelessValidation::PreCallValidateCreateMetalSurfaceEXT(
- VkInstance instance,
- const VkMetalSurfaceCreateInfoEXT* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkSurfaceKHR* pSurface) {
- bool skip = false;
- if (!instance_extensions.vk_khr_surface) skip |= OutputExtensionError("vkCreateMetalSurfaceEXT", VK_KHR_SURFACE_EXTENSION_NAME);
- if (!instance_extensions.vk_ext_metal_surface) skip |= OutputExtensionError("vkCreateMetalSurfaceEXT", VK_EXT_METAL_SURFACE_EXTENSION_NAME);
- skip |= validate_struct_type("vkCreateMetalSurfaceEXT", "pCreateInfo", "VK_STRUCTURE_TYPE_METAL_SURFACE_CREATE_INFO_EXT", pCreateInfo, VK_STRUCTURE_TYPE_METAL_SURFACE_CREATE_INFO_EXT, true, "VUID-vkCreateMetalSurfaceEXT-pCreateInfo-parameter", "VUID-VkMetalSurfaceCreateInfoEXT-sType-sType");
- if (pCreateInfo != NULL)
- {
- skip |= validate_struct_pnext("vkCreateMetalSurfaceEXT", "pCreateInfo->pNext", NULL, pCreateInfo->pNext, 0, NULL, GeneratedVulkanHeaderVersion, "VUID-VkMetalSurfaceCreateInfoEXT-pNext-pNext");
-
- skip |= validate_reserved_flags("vkCreateMetalSurfaceEXT", "pCreateInfo->flags", pCreateInfo->flags, "VUID-VkMetalSurfaceCreateInfoEXT-flags-zerobitmask");
- }
- if (pAllocator != NULL)
- {
- skip |= validate_required_pointer("vkCreateMetalSurfaceEXT", "pAllocator->pfnAllocation", reinterpret_cast<const void*>(pAllocator->pfnAllocation), "VUID-VkAllocationCallbacks-pfnAllocation-00632");
-
- skip |= validate_required_pointer("vkCreateMetalSurfaceEXT", "pAllocator->pfnReallocation", reinterpret_cast<const void*>(pAllocator->pfnReallocation), "VUID-VkAllocationCallbacks-pfnReallocation-00633");
-
- skip |= validate_required_pointer("vkCreateMetalSurfaceEXT", "pAllocator->pfnFree", reinterpret_cast<const void*>(pAllocator->pfnFree), "VUID-VkAllocationCallbacks-pfnFree-00634");
-
- if (pAllocator->pfnInternalAllocation != NULL)
- {
- skip |= validate_required_pointer("vkCreateMetalSurfaceEXT", "pAllocator->pfnInternalFree", reinterpret_cast<const void*>(pAllocator->pfnInternalFree), "VUID-VkAllocationCallbacks-pfnInternalAllocation-00635");
-
- }
-
- if (pAllocator->pfnInternalFree != NULL)
- {
- skip |= validate_required_pointer("vkCreateMetalSurfaceEXT", "pAllocator->pfnInternalAllocation", reinterpret_cast<const void*>(pAllocator->pfnInternalAllocation), "VUID-VkAllocationCallbacks-pfnInternalAllocation-00635");
-
- }
- }
- skip |= validate_required_pointer("vkCreateMetalSurfaceEXT", "pSurface", pSurface, "VUID-vkCreateMetalSurfaceEXT-pSurface-parameter");
- return skip;
-}
-
-#endif // VK_USE_PLATFORM_METAL_EXT
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-bool StatelessValidation::PreCallValidateGetBufferDeviceAddressEXT(
- VkDevice device,
- const VkBufferDeviceAddressInfoEXT* pInfo) {
- bool skip = false;
- if (!device_extensions.vk_khr_get_physical_device_properties_2) skip |= OutputExtensionError("vkGetBufferDeviceAddressEXT", VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME);
- if (!device_extensions.vk_ext_buffer_device_address) skip |= OutputExtensionError("vkGetBufferDeviceAddressEXT", VK_EXT_BUFFER_DEVICE_ADDRESS_EXTENSION_NAME);
- skip |= validate_struct_type("vkGetBufferDeviceAddressEXT", "pInfo", "VK_STRUCTURE_TYPE_BUFFER_DEVICE_ADDRESS_INFO_EXT", pInfo, VK_STRUCTURE_TYPE_BUFFER_DEVICE_ADDRESS_INFO_EXT, true, "VUID-vkGetBufferDeviceAddressEXT-pInfo-parameter", "VUID-VkBufferDeviceAddressInfoEXT-sType-sType");
- if (pInfo != NULL)
- {
- skip |= validate_struct_pnext("vkGetBufferDeviceAddressEXT", "pInfo->pNext", NULL, pInfo->pNext, 0, NULL, GeneratedVulkanHeaderVersion, "VUID-VkBufferDeviceAddressInfoEXT-pNext-pNext");
-
- skip |= validate_required_handle("vkGetBufferDeviceAddressEXT", "pInfo->buffer", pInfo->buffer);
- }
- return skip;
-}
-
-
-
-
-
-
-
-bool StatelessValidation::PreCallValidateGetPhysicalDeviceCooperativeMatrixPropertiesNV(
- VkPhysicalDevice physicalDevice,
- uint32_t* pPropertyCount,
- VkCooperativeMatrixPropertiesNV* pProperties) {
- bool skip = false;
- skip |= validate_struct_type_array("vkGetPhysicalDeviceCooperativeMatrixPropertiesNV", "pPropertyCount", "pProperties", "VK_STRUCTURE_TYPE_COOPERATIVE_MATRIX_PROPERTIES_NV", pPropertyCount, pProperties, VK_STRUCTURE_TYPE_COOPERATIVE_MATRIX_PROPERTIES_NV, true, false, false, "VUID-VkCooperativeMatrixPropertiesNV-sType-sType", "VUID-vkGetPhysicalDeviceCooperativeMatrixPropertiesNV-pProperties-parameter", kVUIDUndefined);
- return skip;
-}
-
-
-
-bool StatelessValidation::PreCallValidateGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV(
- VkPhysicalDevice physicalDevice,
- uint32_t* pCombinationCount,
- VkFramebufferMixedSamplesCombinationNV* pCombinations) {
- bool skip = false;
- skip |= validate_struct_type_array("vkGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV", "pCombinationCount", "pCombinations", "VK_STRUCTURE_TYPE_FRAMEBUFFER_MIXED_SAMPLES_COMBINATION_NV", pCombinationCount, pCombinations, VK_STRUCTURE_TYPE_FRAMEBUFFER_MIXED_SAMPLES_COMBINATION_NV, true, false, false, "VUID-VkFramebufferMixedSamplesCombinationNV-sType-sType", "VUID-vkGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV-pCombinations-parameter", kVUIDUndefined);
- if (pCombinations != NULL)
- {
- for (uint32_t pCombinationIndex = 0; pCombinationIndex < *pCombinationCount; ++pCombinationIndex)
- {
- // No xml-driven validation
- }
- }
- return skip;
-}
-
-
-
-
-
-
-
-#ifdef VK_USE_PLATFORM_WIN32_KHR
-
-bool StatelessValidation::PreCallValidateGetPhysicalDeviceSurfacePresentModes2EXT(
- VkPhysicalDevice physicalDevice,
- const VkPhysicalDeviceSurfaceInfo2KHR* pSurfaceInfo,
- uint32_t* pPresentModeCount,
- VkPresentModeKHR* pPresentModes) {
- bool skip = false;
- skip |= validate_struct_type("vkGetPhysicalDeviceSurfacePresentModes2EXT", "pSurfaceInfo", "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SURFACE_INFO_2_KHR", pSurfaceInfo, VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SURFACE_INFO_2_KHR, true, "VUID-vkGetPhysicalDeviceSurfacePresentModes2EXT-pSurfaceInfo-parameter", "VUID-VkPhysicalDeviceSurfaceInfo2KHR-sType-sType");
- if (pSurfaceInfo != NULL)
- {
- const VkStructureType allowed_structs_VkPhysicalDeviceSurfaceInfo2KHR[] = { VK_STRUCTURE_TYPE_SURFACE_FULL_SCREEN_EXCLUSIVE_INFO_EXT, VK_STRUCTURE_TYPE_SURFACE_FULL_SCREEN_EXCLUSIVE_WIN32_INFO_EXT };
-
- skip |= validate_struct_pnext("vkGetPhysicalDeviceSurfacePresentModes2EXT", "pSurfaceInfo->pNext", "VkSurfaceFullScreenExclusiveInfoEXT, VkSurfaceFullScreenExclusiveWin32InfoEXT", pSurfaceInfo->pNext, ARRAY_SIZE(allowed_structs_VkPhysicalDeviceSurfaceInfo2KHR), allowed_structs_VkPhysicalDeviceSurfaceInfo2KHR, GeneratedVulkanHeaderVersion, "VUID-VkPhysicalDeviceSurfaceInfo2KHR-pNext-pNext");
-
- skip |= validate_required_handle("vkGetPhysicalDeviceSurfacePresentModes2EXT", "pSurfaceInfo->surface", pSurfaceInfo->surface);
- }
- skip |= validate_array("vkGetPhysicalDeviceSurfacePresentModes2EXT", "pPresentModeCount", "pPresentModes", pPresentModeCount, &pPresentModes, true, false, false, kVUIDUndefined, "VUID-vkGetPhysicalDeviceSurfacePresentModes2EXT-pPresentModes-parameter");
- return skip;
-}
-
-bool StatelessValidation::PreCallValidateAcquireFullScreenExclusiveModeEXT(
- VkDevice device,
- VkSwapchainKHR swapchain) {
- bool skip = false;
- if (!device_extensions.vk_khr_swapchain) skip |= OutputExtensionError("vkAcquireFullScreenExclusiveModeEXT", VK_KHR_SWAPCHAIN_EXTENSION_NAME);
- if (!device_extensions.vk_khr_get_surface_capabilities_2) skip |= OutputExtensionError("vkAcquireFullScreenExclusiveModeEXT", VK_KHR_GET_SURFACE_CAPABILITIES_2_EXTENSION_NAME);
- if (!device_extensions.vk_khr_surface) skip |= OutputExtensionError("vkAcquireFullScreenExclusiveModeEXT", VK_KHR_SURFACE_EXTENSION_NAME);
- if (!device_extensions.vk_khr_get_physical_device_properties_2) skip |= OutputExtensionError("vkAcquireFullScreenExclusiveModeEXT", VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME);
- if (!device_extensions.vk_ext_full_screen_exclusive) skip |= OutputExtensionError("vkAcquireFullScreenExclusiveModeEXT", VK_EXT_FULL_SCREEN_EXCLUSIVE_EXTENSION_NAME);
- skip |= validate_required_handle("vkAcquireFullScreenExclusiveModeEXT", "swapchain", swapchain);
- return skip;
-}
-
-bool StatelessValidation::PreCallValidateReleaseFullScreenExclusiveModeEXT(
- VkDevice device,
- VkSwapchainKHR swapchain) {
- bool skip = false;
- if (!device_extensions.vk_khr_swapchain) skip |= OutputExtensionError("vkReleaseFullScreenExclusiveModeEXT", VK_KHR_SWAPCHAIN_EXTENSION_NAME);
- if (!device_extensions.vk_khr_get_surface_capabilities_2) skip |= OutputExtensionError("vkReleaseFullScreenExclusiveModeEXT", VK_KHR_GET_SURFACE_CAPABILITIES_2_EXTENSION_NAME);
- if (!device_extensions.vk_khr_surface) skip |= OutputExtensionError("vkReleaseFullScreenExclusiveModeEXT", VK_KHR_SURFACE_EXTENSION_NAME);
- if (!device_extensions.vk_khr_get_physical_device_properties_2) skip |= OutputExtensionError("vkReleaseFullScreenExclusiveModeEXT", VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME);
- if (!device_extensions.vk_ext_full_screen_exclusive) skip |= OutputExtensionError("vkReleaseFullScreenExclusiveModeEXT", VK_EXT_FULL_SCREEN_EXCLUSIVE_EXTENSION_NAME);
- skip |= validate_required_handle("vkReleaseFullScreenExclusiveModeEXT", "swapchain", swapchain);
- return skip;
-}
-
-#endif // VK_USE_PLATFORM_WIN32_KHR
-
-bool StatelessValidation::PreCallValidateCreateHeadlessSurfaceEXT(
- VkInstance instance,
- const VkHeadlessSurfaceCreateInfoEXT* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkSurfaceKHR* pSurface) {
- bool skip = false;
- if (!instance_extensions.vk_khr_surface) skip |= OutputExtensionError("vkCreateHeadlessSurfaceEXT", VK_KHR_SURFACE_EXTENSION_NAME);
- if (!instance_extensions.vk_ext_headless_surface) skip |= OutputExtensionError("vkCreateHeadlessSurfaceEXT", VK_EXT_HEADLESS_SURFACE_EXTENSION_NAME);
- skip |= validate_struct_type("vkCreateHeadlessSurfaceEXT", "pCreateInfo", "VK_STRUCTURE_TYPE_HEADLESS_SURFACE_CREATE_INFO_EXT", pCreateInfo, VK_STRUCTURE_TYPE_HEADLESS_SURFACE_CREATE_INFO_EXT, true, "VUID-vkCreateHeadlessSurfaceEXT-pCreateInfo-parameter", "VUID-VkHeadlessSurfaceCreateInfoEXT-sType-sType");
- if (pCreateInfo != NULL)
- {
- skip |= validate_struct_pnext("vkCreateHeadlessSurfaceEXT", "pCreateInfo->pNext", NULL, pCreateInfo->pNext, 0, NULL, GeneratedVulkanHeaderVersion, "VUID-VkHeadlessSurfaceCreateInfoEXT-pNext-pNext");
-
- skip |= validate_reserved_flags("vkCreateHeadlessSurfaceEXT", "pCreateInfo->flags", pCreateInfo->flags, "VUID-VkHeadlessSurfaceCreateInfoEXT-flags-zerobitmask");
- }
- if (pAllocator != NULL)
- {
- skip |= validate_required_pointer("vkCreateHeadlessSurfaceEXT", "pAllocator->pfnAllocation", reinterpret_cast<const void*>(pAllocator->pfnAllocation), "VUID-VkAllocationCallbacks-pfnAllocation-00632");
-
- skip |= validate_required_pointer("vkCreateHeadlessSurfaceEXT", "pAllocator->pfnReallocation", reinterpret_cast<const void*>(pAllocator->pfnReallocation), "VUID-VkAllocationCallbacks-pfnReallocation-00633");
-
- skip |= validate_required_pointer("vkCreateHeadlessSurfaceEXT", "pAllocator->pfnFree", reinterpret_cast<const void*>(pAllocator->pfnFree), "VUID-VkAllocationCallbacks-pfnFree-00634");
-
- if (pAllocator->pfnInternalAllocation != NULL)
- {
- skip |= validate_required_pointer("vkCreateHeadlessSurfaceEXT", "pAllocator->pfnInternalFree", reinterpret_cast<const void*>(pAllocator->pfnInternalFree), "VUID-VkAllocationCallbacks-pfnInternalAllocation-00635");
-
- }
-
- if (pAllocator->pfnInternalFree != NULL)
- {
- skip |= validate_required_pointer("vkCreateHeadlessSurfaceEXT", "pAllocator->pfnInternalAllocation", reinterpret_cast<const void*>(pAllocator->pfnInternalAllocation), "VUID-VkAllocationCallbacks-pfnInternalAllocation-00635");
-
- }
- }
- skip |= validate_required_pointer("vkCreateHeadlessSurfaceEXT", "pSurface", pSurface, "VUID-vkCreateHeadlessSurfaceEXT-pSurface-parameter");
- return skip;
-}
-
-
-
-bool StatelessValidation::PreCallValidateCmdSetLineStippleEXT(
- VkCommandBuffer commandBuffer,
- uint32_t lineStippleFactor,
- uint16_t lineStipplePattern) {
- bool skip = false;
- if (!device_extensions.vk_khr_get_physical_device_properties_2) skip |= OutputExtensionError("vkCmdSetLineStippleEXT", VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME);
- if (!device_extensions.vk_ext_line_rasterization) skip |= OutputExtensionError("vkCmdSetLineStippleEXT", VK_EXT_LINE_RASTERIZATION_EXTENSION_NAME);
- // No xml-driven validation
- if (!skip) skip |= manual_PreCallValidateCmdSetLineStippleEXT(commandBuffer, lineStippleFactor, lineStipplePattern);
- return skip;
-}
-
-
-
-bool StatelessValidation::PreCallValidateResetQueryPoolEXT(
- VkDevice device,
- VkQueryPool queryPool,
- uint32_t firstQuery,
- uint32_t queryCount) {
- bool skip = false;
- if (!device_extensions.vk_khr_get_physical_device_properties_2) skip |= OutputExtensionError("vkResetQueryPoolEXT", VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME);
- if (!device_extensions.vk_ext_host_query_reset) skip |= OutputExtensionError("vkResetQueryPoolEXT", VK_EXT_HOST_QUERY_RESET_EXTENSION_NAME);
- skip |= validate_required_handle("vkResetQueryPoolEXT", "queryPool", queryPool);
- return skip;
-}
-
-
-
-
-
-
-
-
-
-
diff --git a/layers/generated/parameter_validation.h b/layers/generated/parameter_validation.h
deleted file mode 100644
index e2ceb5e17..000000000
--- a/layers/generated/parameter_validation.h
+++ /dev/null
@@ -1,1757 +0,0 @@
-/* *** THIS FILE IS GENERATED - DO NOT EDIT! ***
- * See parameter_validation_generator.py for modifications
- *
- * Copyright (c) 2015-2019 The Khronos Group Inc.
- * Copyright (c) 2015-2019 LunarG, Inc.
- * Copyright (C) 2015-2019 Google Inc.
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * Copyright (c) 2015-2017 Valve Corporation
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- *
- * Author: Mark Lobodzinski <mark@LunarG.com>
- * Author: Dave Houlton <daveh@LunarG.com>
- */
-
-
-bool PreCallValidateCreateInstance(
- const VkInstanceCreateInfo* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkInstance* pInstance);
-bool PreCallValidateDestroyInstance(
- VkInstance instance,
- const VkAllocationCallbacks* pAllocator);
-bool PreCallValidateEnumeratePhysicalDevices(
- VkInstance instance,
- uint32_t* pPhysicalDeviceCount,
- VkPhysicalDevice* pPhysicalDevices);
-bool PreCallValidateGetPhysicalDeviceFeatures(
- VkPhysicalDevice physicalDevice,
- VkPhysicalDeviceFeatures* pFeatures);
-bool PreCallValidateGetPhysicalDeviceFormatProperties(
- VkPhysicalDevice physicalDevice,
- VkFormat format,
- VkFormatProperties* pFormatProperties);
-bool PreCallValidateGetPhysicalDeviceImageFormatProperties(
- VkPhysicalDevice physicalDevice,
- VkFormat format,
- VkImageType type,
- VkImageTiling tiling,
- VkImageUsageFlags usage,
- VkImageCreateFlags flags,
- VkImageFormatProperties* pImageFormatProperties);
-bool PreCallValidateGetPhysicalDeviceProperties(
- VkPhysicalDevice physicalDevice,
- VkPhysicalDeviceProperties* pProperties);
-bool PreCallValidateGetPhysicalDeviceQueueFamilyProperties(
- VkPhysicalDevice physicalDevice,
- uint32_t* pQueueFamilyPropertyCount,
- VkQueueFamilyProperties* pQueueFamilyProperties);
-bool PreCallValidateGetPhysicalDeviceMemoryProperties(
- VkPhysicalDevice physicalDevice,
- VkPhysicalDeviceMemoryProperties* pMemoryProperties);
-bool PreCallValidateCreateDevice(
- VkPhysicalDevice physicalDevice,
- const VkDeviceCreateInfo* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkDevice* pDevice);
-bool PreCallValidateDestroyDevice(
- VkDevice device,
- const VkAllocationCallbacks* pAllocator);
-bool PreCallValidateGetDeviceQueue(
- VkDevice device,
- uint32_t queueFamilyIndex,
- uint32_t queueIndex,
- VkQueue* pQueue);
-bool PreCallValidateQueueSubmit(
- VkQueue queue,
- uint32_t submitCount,
- const VkSubmitInfo* pSubmits,
- VkFence fence);
-bool PreCallValidateQueueWaitIdle(
- VkQueue queue);
-bool PreCallValidateDeviceWaitIdle(
- VkDevice device);
-bool PreCallValidateAllocateMemory(
- VkDevice device,
- const VkMemoryAllocateInfo* pAllocateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkDeviceMemory* pMemory);
-bool PreCallValidateFreeMemory(
- VkDevice device,
- VkDeviceMemory memory,
- const VkAllocationCallbacks* pAllocator);
-bool PreCallValidateMapMemory(
- VkDevice device,
- VkDeviceMemory memory,
- VkDeviceSize offset,
- VkDeviceSize size,
- VkMemoryMapFlags flags,
- void** ppData);
-bool PreCallValidateUnmapMemory(
- VkDevice device,
- VkDeviceMemory memory);
-bool PreCallValidateFlushMappedMemoryRanges(
- VkDevice device,
- uint32_t memoryRangeCount,
- const VkMappedMemoryRange* pMemoryRanges);
-bool PreCallValidateInvalidateMappedMemoryRanges(
- VkDevice device,
- uint32_t memoryRangeCount,
- const VkMappedMemoryRange* pMemoryRanges);
-bool PreCallValidateGetDeviceMemoryCommitment(
- VkDevice device,
- VkDeviceMemory memory,
- VkDeviceSize* pCommittedMemoryInBytes);
-bool PreCallValidateBindBufferMemory(
- VkDevice device,
- VkBuffer buffer,
- VkDeviceMemory memory,
- VkDeviceSize memoryOffset);
-bool PreCallValidateBindImageMemory(
- VkDevice device,
- VkImage image,
- VkDeviceMemory memory,
- VkDeviceSize memoryOffset);
-bool PreCallValidateGetBufferMemoryRequirements(
- VkDevice device,
- VkBuffer buffer,
- VkMemoryRequirements* pMemoryRequirements);
-bool PreCallValidateGetImageMemoryRequirements(
- VkDevice device,
- VkImage image,
- VkMemoryRequirements* pMemoryRequirements);
-bool PreCallValidateGetImageSparseMemoryRequirements(
- VkDevice device,
- VkImage image,
- uint32_t* pSparseMemoryRequirementCount,
- VkSparseImageMemoryRequirements* pSparseMemoryRequirements);
-bool PreCallValidateGetPhysicalDeviceSparseImageFormatProperties(
- VkPhysicalDevice physicalDevice,
- VkFormat format,
- VkImageType type,
- VkSampleCountFlagBits samples,
- VkImageUsageFlags usage,
- VkImageTiling tiling,
- uint32_t* pPropertyCount,
- VkSparseImageFormatProperties* pProperties);
-bool PreCallValidateQueueBindSparse(
- VkQueue queue,
- uint32_t bindInfoCount,
- const VkBindSparseInfo* pBindInfo,
- VkFence fence);
-bool PreCallValidateCreateFence(
- VkDevice device,
- const VkFenceCreateInfo* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkFence* pFence);
-bool PreCallValidateDestroyFence(
- VkDevice device,
- VkFence fence,
- const VkAllocationCallbacks* pAllocator);
-bool PreCallValidateResetFences(
- VkDevice device,
- uint32_t fenceCount,
- const VkFence* pFences);
-bool PreCallValidateGetFenceStatus(
- VkDevice device,
- VkFence fence);
-bool PreCallValidateWaitForFences(
- VkDevice device,
- uint32_t fenceCount,
- const VkFence* pFences,
- VkBool32 waitAll,
- uint64_t timeout);
-bool PreCallValidateCreateSemaphore(
- VkDevice device,
- const VkSemaphoreCreateInfo* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkSemaphore* pSemaphore);
-bool PreCallValidateDestroySemaphore(
- VkDevice device,
- VkSemaphore semaphore,
- const VkAllocationCallbacks* pAllocator);
-bool PreCallValidateCreateEvent(
- VkDevice device,
- const VkEventCreateInfo* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkEvent* pEvent);
-bool PreCallValidateDestroyEvent(
- VkDevice device,
- VkEvent event,
- const VkAllocationCallbacks* pAllocator);
-bool PreCallValidateGetEventStatus(
- VkDevice device,
- VkEvent event);
-bool PreCallValidateSetEvent(
- VkDevice device,
- VkEvent event);
-bool PreCallValidateResetEvent(
- VkDevice device,
- VkEvent event);
-bool PreCallValidateCreateQueryPool(
- VkDevice device,
- const VkQueryPoolCreateInfo* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkQueryPool* pQueryPool);
-bool PreCallValidateDestroyQueryPool(
- VkDevice device,
- VkQueryPool queryPool,
- const VkAllocationCallbacks* pAllocator);
-bool PreCallValidateGetQueryPoolResults(
- VkDevice device,
- VkQueryPool queryPool,
- uint32_t firstQuery,
- uint32_t queryCount,
- size_t dataSize,
- void* pData,
- VkDeviceSize stride,
- VkQueryResultFlags flags);
-bool PreCallValidateCreateBuffer(
- VkDevice device,
- const VkBufferCreateInfo* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkBuffer* pBuffer);
-bool PreCallValidateDestroyBuffer(
- VkDevice device,
- VkBuffer buffer,
- const VkAllocationCallbacks* pAllocator);
-bool PreCallValidateCreateBufferView(
- VkDevice device,
- const VkBufferViewCreateInfo* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkBufferView* pView);
-bool PreCallValidateDestroyBufferView(
- VkDevice device,
- VkBufferView bufferView,
- const VkAllocationCallbacks* pAllocator);
-bool PreCallValidateCreateImage(
- VkDevice device,
- const VkImageCreateInfo* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkImage* pImage);
-bool PreCallValidateDestroyImage(
- VkDevice device,
- VkImage image,
- const VkAllocationCallbacks* pAllocator);
-bool PreCallValidateGetImageSubresourceLayout(
- VkDevice device,
- VkImage image,
- const VkImageSubresource* pSubresource,
- VkSubresourceLayout* pLayout);
-bool PreCallValidateCreateImageView(
- VkDevice device,
- const VkImageViewCreateInfo* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkImageView* pView);
-bool PreCallValidateDestroyImageView(
- VkDevice device,
- VkImageView imageView,
- const VkAllocationCallbacks* pAllocator);
-bool PreCallValidateCreateShaderModule(
- VkDevice device,
- const VkShaderModuleCreateInfo* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkShaderModule* pShaderModule);
-bool PreCallValidateDestroyShaderModule(
- VkDevice device,
- VkShaderModule shaderModule,
- const VkAllocationCallbacks* pAllocator);
-bool PreCallValidateCreatePipelineCache(
- VkDevice device,
- const VkPipelineCacheCreateInfo* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkPipelineCache* pPipelineCache);
-bool PreCallValidateDestroyPipelineCache(
- VkDevice device,
- VkPipelineCache pipelineCache,
- const VkAllocationCallbacks* pAllocator);
-bool PreCallValidateGetPipelineCacheData(
- VkDevice device,
- VkPipelineCache pipelineCache,
- size_t* pDataSize,
- void* pData);
-bool PreCallValidateMergePipelineCaches(
- VkDevice device,
- VkPipelineCache dstCache,
- uint32_t srcCacheCount,
- const VkPipelineCache* pSrcCaches);
-bool PreCallValidateCreateGraphicsPipelines(
- VkDevice device,
- VkPipelineCache pipelineCache,
- uint32_t createInfoCount,
- const VkGraphicsPipelineCreateInfo* pCreateInfos,
- const VkAllocationCallbacks* pAllocator,
- VkPipeline* pPipelines);
-bool PreCallValidateCreateComputePipelines(
- VkDevice device,
- VkPipelineCache pipelineCache,
- uint32_t createInfoCount,
- const VkComputePipelineCreateInfo* pCreateInfos,
- const VkAllocationCallbacks* pAllocator,
- VkPipeline* pPipelines);
-bool PreCallValidateDestroyPipeline(
- VkDevice device,
- VkPipeline pipeline,
- const VkAllocationCallbacks* pAllocator);
-bool PreCallValidateCreatePipelineLayout(
- VkDevice device,
- const VkPipelineLayoutCreateInfo* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkPipelineLayout* pPipelineLayout);
-bool PreCallValidateDestroyPipelineLayout(
- VkDevice device,
- VkPipelineLayout pipelineLayout,
- const VkAllocationCallbacks* pAllocator);
-bool PreCallValidateCreateSampler(
- VkDevice device,
- const VkSamplerCreateInfo* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkSampler* pSampler);
-bool PreCallValidateDestroySampler(
- VkDevice device,
- VkSampler sampler,
- const VkAllocationCallbacks* pAllocator);
-bool PreCallValidateCreateDescriptorSetLayout(
- VkDevice device,
- const VkDescriptorSetLayoutCreateInfo* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkDescriptorSetLayout* pSetLayout);
-bool PreCallValidateDestroyDescriptorSetLayout(
- VkDevice device,
- VkDescriptorSetLayout descriptorSetLayout,
- const VkAllocationCallbacks* pAllocator);
-bool PreCallValidateCreateDescriptorPool(
- VkDevice device,
- const VkDescriptorPoolCreateInfo* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkDescriptorPool* pDescriptorPool);
-bool PreCallValidateDestroyDescriptorPool(
- VkDevice device,
- VkDescriptorPool descriptorPool,
- const VkAllocationCallbacks* pAllocator);
-bool PreCallValidateResetDescriptorPool(
- VkDevice device,
- VkDescriptorPool descriptorPool,
- VkDescriptorPoolResetFlags flags);
-bool PreCallValidateAllocateDescriptorSets(
- VkDevice device,
- const VkDescriptorSetAllocateInfo* pAllocateInfo,
- VkDescriptorSet* pDescriptorSets);
-bool PreCallValidateFreeDescriptorSets(
- VkDevice device,
- VkDescriptorPool descriptorPool,
- uint32_t descriptorSetCount,
- const VkDescriptorSet* pDescriptorSets);
-bool PreCallValidateUpdateDescriptorSets(
- VkDevice device,
- uint32_t descriptorWriteCount,
- const VkWriteDescriptorSet* pDescriptorWrites,
- uint32_t descriptorCopyCount,
- const VkCopyDescriptorSet* pDescriptorCopies);
-bool PreCallValidateCreateFramebuffer(
- VkDevice device,
- const VkFramebufferCreateInfo* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkFramebuffer* pFramebuffer);
-bool PreCallValidateDestroyFramebuffer(
- VkDevice device,
- VkFramebuffer framebuffer,
- const VkAllocationCallbacks* pAllocator);
-bool PreCallValidateCreateRenderPass(
- VkDevice device,
- const VkRenderPassCreateInfo* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkRenderPass* pRenderPass);
-bool PreCallValidateDestroyRenderPass(
- VkDevice device,
- VkRenderPass renderPass,
- const VkAllocationCallbacks* pAllocator);
-bool PreCallValidateGetRenderAreaGranularity(
- VkDevice device,
- VkRenderPass renderPass,
- VkExtent2D* pGranularity);
-bool PreCallValidateCreateCommandPool(
- VkDevice device,
- const VkCommandPoolCreateInfo* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkCommandPool* pCommandPool);
-bool PreCallValidateDestroyCommandPool(
- VkDevice device,
- VkCommandPool commandPool,
- const VkAllocationCallbacks* pAllocator);
-bool PreCallValidateResetCommandPool(
- VkDevice device,
- VkCommandPool commandPool,
- VkCommandPoolResetFlags flags);
-bool PreCallValidateAllocateCommandBuffers(
- VkDevice device,
- const VkCommandBufferAllocateInfo* pAllocateInfo,
- VkCommandBuffer* pCommandBuffers);
-bool PreCallValidateFreeCommandBuffers(
- VkDevice device,
- VkCommandPool commandPool,
- uint32_t commandBufferCount,
- const VkCommandBuffer* pCommandBuffers);
-bool PreCallValidateBeginCommandBuffer(
- VkCommandBuffer commandBuffer,
- const VkCommandBufferBeginInfo* pBeginInfo);
-bool PreCallValidateEndCommandBuffer(
- VkCommandBuffer commandBuffer);
-bool PreCallValidateResetCommandBuffer(
- VkCommandBuffer commandBuffer,
- VkCommandBufferResetFlags flags);
-bool PreCallValidateCmdBindPipeline(
- VkCommandBuffer commandBuffer,
- VkPipelineBindPoint pipelineBindPoint,
- VkPipeline pipeline);
-bool PreCallValidateCmdSetViewport(
- VkCommandBuffer commandBuffer,
- uint32_t firstViewport,
- uint32_t viewportCount,
- const VkViewport* pViewports);
-bool PreCallValidateCmdSetScissor(
- VkCommandBuffer commandBuffer,
- uint32_t firstScissor,
- uint32_t scissorCount,
- const VkRect2D* pScissors);
-bool PreCallValidateCmdSetLineWidth(
- VkCommandBuffer commandBuffer,
- float lineWidth);
-bool PreCallValidateCmdSetDepthBias(
- VkCommandBuffer commandBuffer,
- float depthBiasConstantFactor,
- float depthBiasClamp,
- float depthBiasSlopeFactor);
-bool PreCallValidateCmdSetBlendConstants(
- VkCommandBuffer commandBuffer,
- const float blendConstants[4]);
-bool PreCallValidateCmdSetDepthBounds(
- VkCommandBuffer commandBuffer,
- float minDepthBounds,
- float maxDepthBounds);
-bool PreCallValidateCmdSetStencilCompareMask(
- VkCommandBuffer commandBuffer,
- VkStencilFaceFlags faceMask,
- uint32_t compareMask);
-bool PreCallValidateCmdSetStencilWriteMask(
- VkCommandBuffer commandBuffer,
- VkStencilFaceFlags faceMask,
- uint32_t writeMask);
-bool PreCallValidateCmdSetStencilReference(
- VkCommandBuffer commandBuffer,
- VkStencilFaceFlags faceMask,
- uint32_t reference);
-bool PreCallValidateCmdBindDescriptorSets(
- VkCommandBuffer commandBuffer,
- VkPipelineBindPoint pipelineBindPoint,
- VkPipelineLayout layout,
- uint32_t firstSet,
- uint32_t descriptorSetCount,
- const VkDescriptorSet* pDescriptorSets,
- uint32_t dynamicOffsetCount,
- const uint32_t* pDynamicOffsets);
-bool PreCallValidateCmdBindIndexBuffer(
- VkCommandBuffer commandBuffer,
- VkBuffer buffer,
- VkDeviceSize offset,
- VkIndexType indexType);
-bool PreCallValidateCmdBindVertexBuffers(
- VkCommandBuffer commandBuffer,
- uint32_t firstBinding,
- uint32_t bindingCount,
- const VkBuffer* pBuffers,
- const VkDeviceSize* pOffsets);
-bool PreCallValidateCmdDraw(
- VkCommandBuffer commandBuffer,
- uint32_t vertexCount,
- uint32_t instanceCount,
- uint32_t firstVertex,
- uint32_t firstInstance);
-bool PreCallValidateCmdDrawIndexed(
- VkCommandBuffer commandBuffer,
- uint32_t indexCount,
- uint32_t instanceCount,
- uint32_t firstIndex,
- int32_t vertexOffset,
- uint32_t firstInstance);
-bool PreCallValidateCmdDrawIndirect(
- VkCommandBuffer commandBuffer,
- VkBuffer buffer,
- VkDeviceSize offset,
- uint32_t drawCount,
- uint32_t stride);
-bool PreCallValidateCmdDrawIndexedIndirect(
- VkCommandBuffer commandBuffer,
- VkBuffer buffer,
- VkDeviceSize offset,
- uint32_t drawCount,
- uint32_t stride);
-bool PreCallValidateCmdDispatch(
- VkCommandBuffer commandBuffer,
- uint32_t groupCountX,
- uint32_t groupCountY,
- uint32_t groupCountZ);
-bool PreCallValidateCmdDispatchIndirect(
- VkCommandBuffer commandBuffer,
- VkBuffer buffer,
- VkDeviceSize offset);
-bool PreCallValidateCmdCopyBuffer(
- VkCommandBuffer commandBuffer,
- VkBuffer srcBuffer,
- VkBuffer dstBuffer,
- uint32_t regionCount,
- const VkBufferCopy* pRegions);
-bool PreCallValidateCmdCopyImage(
- VkCommandBuffer commandBuffer,
- VkImage srcImage,
- VkImageLayout srcImageLayout,
- VkImage dstImage,
- VkImageLayout dstImageLayout,
- uint32_t regionCount,
- const VkImageCopy* pRegions);
-bool PreCallValidateCmdBlitImage(
- VkCommandBuffer commandBuffer,
- VkImage srcImage,
- VkImageLayout srcImageLayout,
- VkImage dstImage,
- VkImageLayout dstImageLayout,
- uint32_t regionCount,
- const VkImageBlit* pRegions,
- VkFilter filter);
-bool PreCallValidateCmdCopyBufferToImage(
- VkCommandBuffer commandBuffer,
- VkBuffer srcBuffer,
- VkImage dstImage,
- VkImageLayout dstImageLayout,
- uint32_t regionCount,
- const VkBufferImageCopy* pRegions);
-bool PreCallValidateCmdCopyImageToBuffer(
- VkCommandBuffer commandBuffer,
- VkImage srcImage,
- VkImageLayout srcImageLayout,
- VkBuffer dstBuffer,
- uint32_t regionCount,
- const VkBufferImageCopy* pRegions);
-bool PreCallValidateCmdUpdateBuffer(
- VkCommandBuffer commandBuffer,
- VkBuffer dstBuffer,
- VkDeviceSize dstOffset,
- VkDeviceSize dataSize,
- const void* pData);
-bool PreCallValidateCmdFillBuffer(
- VkCommandBuffer commandBuffer,
- VkBuffer dstBuffer,
- VkDeviceSize dstOffset,
- VkDeviceSize size,
- uint32_t data);
-bool PreCallValidateCmdClearColorImage(
- VkCommandBuffer commandBuffer,
- VkImage image,
- VkImageLayout imageLayout,
- const VkClearColorValue* pColor,
- uint32_t rangeCount,
- const VkImageSubresourceRange* pRanges);
-bool PreCallValidateCmdClearDepthStencilImage(
- VkCommandBuffer commandBuffer,
- VkImage image,
- VkImageLayout imageLayout,
- const VkClearDepthStencilValue* pDepthStencil,
- uint32_t rangeCount,
- const VkImageSubresourceRange* pRanges);
-bool PreCallValidateCmdClearAttachments(
- VkCommandBuffer commandBuffer,
- uint32_t attachmentCount,
- const VkClearAttachment* pAttachments,
- uint32_t rectCount,
- const VkClearRect* pRects);
-bool PreCallValidateCmdResolveImage(
- VkCommandBuffer commandBuffer,
- VkImage srcImage,
- VkImageLayout srcImageLayout,
- VkImage dstImage,
- VkImageLayout dstImageLayout,
- uint32_t regionCount,
- const VkImageResolve* pRegions);
-bool PreCallValidateCmdSetEvent(
- VkCommandBuffer commandBuffer,
- VkEvent event,
- VkPipelineStageFlags stageMask);
-bool PreCallValidateCmdResetEvent(
- VkCommandBuffer commandBuffer,
- VkEvent event,
- VkPipelineStageFlags stageMask);
-bool PreCallValidateCmdWaitEvents(
- VkCommandBuffer commandBuffer,
- uint32_t eventCount,
- const VkEvent* pEvents,
- VkPipelineStageFlags srcStageMask,
- VkPipelineStageFlags dstStageMask,
- uint32_t memoryBarrierCount,
- const VkMemoryBarrier* pMemoryBarriers,
- uint32_t bufferMemoryBarrierCount,
- const VkBufferMemoryBarrier* pBufferMemoryBarriers,
- uint32_t imageMemoryBarrierCount,
- const VkImageMemoryBarrier* pImageMemoryBarriers);
-bool PreCallValidateCmdPipelineBarrier(
- VkCommandBuffer commandBuffer,
- VkPipelineStageFlags srcStageMask,
- VkPipelineStageFlags dstStageMask,
- VkDependencyFlags dependencyFlags,
- uint32_t memoryBarrierCount,
- const VkMemoryBarrier* pMemoryBarriers,
- uint32_t bufferMemoryBarrierCount,
- const VkBufferMemoryBarrier* pBufferMemoryBarriers,
- uint32_t imageMemoryBarrierCount,
- const VkImageMemoryBarrier* pImageMemoryBarriers);
-bool PreCallValidateCmdBeginQuery(
- VkCommandBuffer commandBuffer,
- VkQueryPool queryPool,
- uint32_t query,
- VkQueryControlFlags flags);
-bool PreCallValidateCmdEndQuery(
- VkCommandBuffer commandBuffer,
- VkQueryPool queryPool,
- uint32_t query);
-bool PreCallValidateCmdResetQueryPool(
- VkCommandBuffer commandBuffer,
- VkQueryPool queryPool,
- uint32_t firstQuery,
- uint32_t queryCount);
-bool PreCallValidateCmdWriteTimestamp(
- VkCommandBuffer commandBuffer,
- VkPipelineStageFlagBits pipelineStage,
- VkQueryPool queryPool,
- uint32_t query);
-bool PreCallValidateCmdCopyQueryPoolResults(
- VkCommandBuffer commandBuffer,
- VkQueryPool queryPool,
- uint32_t firstQuery,
- uint32_t queryCount,
- VkBuffer dstBuffer,
- VkDeviceSize dstOffset,
- VkDeviceSize stride,
- VkQueryResultFlags flags);
-bool PreCallValidateCmdPushConstants(
- VkCommandBuffer commandBuffer,
- VkPipelineLayout layout,
- VkShaderStageFlags stageFlags,
- uint32_t offset,
- uint32_t size,
- const void* pValues);
-bool PreCallValidateCmdBeginRenderPass(
- VkCommandBuffer commandBuffer,
- const VkRenderPassBeginInfo* pRenderPassBegin,
- VkSubpassContents contents);
-bool PreCallValidateCmdNextSubpass(
- VkCommandBuffer commandBuffer,
- VkSubpassContents contents);
-bool PreCallValidateCmdEndRenderPass(
- VkCommandBuffer commandBuffer);
-bool PreCallValidateCmdExecuteCommands(
- VkCommandBuffer commandBuffer,
- uint32_t commandBufferCount,
- const VkCommandBuffer* pCommandBuffers);
-bool PreCallValidateBindBufferMemory2(
- VkDevice device,
- uint32_t bindInfoCount,
- const VkBindBufferMemoryInfo* pBindInfos);
-bool PreCallValidateBindImageMemory2(
- VkDevice device,
- uint32_t bindInfoCount,
- const VkBindImageMemoryInfo* pBindInfos);
-bool PreCallValidateGetDeviceGroupPeerMemoryFeatures(
- VkDevice device,
- uint32_t heapIndex,
- uint32_t localDeviceIndex,
- uint32_t remoteDeviceIndex,
- VkPeerMemoryFeatureFlags* pPeerMemoryFeatures);
-bool PreCallValidateCmdSetDeviceMask(
- VkCommandBuffer commandBuffer,
- uint32_t deviceMask);
-bool PreCallValidateCmdDispatchBase(
- VkCommandBuffer commandBuffer,
- uint32_t baseGroupX,
- uint32_t baseGroupY,
- uint32_t baseGroupZ,
- uint32_t groupCountX,
- uint32_t groupCountY,
- uint32_t groupCountZ);
-bool PreCallValidateEnumeratePhysicalDeviceGroups(
- VkInstance instance,
- uint32_t* pPhysicalDeviceGroupCount,
- VkPhysicalDeviceGroupProperties* pPhysicalDeviceGroupProperties);
-bool PreCallValidateGetImageMemoryRequirements2(
- VkDevice device,
- const VkImageMemoryRequirementsInfo2* pInfo,
- VkMemoryRequirements2* pMemoryRequirements);
-bool PreCallValidateGetBufferMemoryRequirements2(
- VkDevice device,
- const VkBufferMemoryRequirementsInfo2* pInfo,
- VkMemoryRequirements2* pMemoryRequirements);
-bool PreCallValidateGetImageSparseMemoryRequirements2(
- VkDevice device,
- const VkImageSparseMemoryRequirementsInfo2* pInfo,
- uint32_t* pSparseMemoryRequirementCount,
- VkSparseImageMemoryRequirements2* pSparseMemoryRequirements);
-bool PreCallValidateGetPhysicalDeviceFeatures2(
- VkPhysicalDevice physicalDevice,
- VkPhysicalDeviceFeatures2* pFeatures);
-bool PreCallValidateGetPhysicalDeviceProperties2(
- VkPhysicalDevice physicalDevice,
- VkPhysicalDeviceProperties2* pProperties);
-bool PreCallValidateGetPhysicalDeviceFormatProperties2(
- VkPhysicalDevice physicalDevice,
- VkFormat format,
- VkFormatProperties2* pFormatProperties);
-bool PreCallValidateGetPhysicalDeviceImageFormatProperties2(
- VkPhysicalDevice physicalDevice,
- const VkPhysicalDeviceImageFormatInfo2* pImageFormatInfo,
- VkImageFormatProperties2* pImageFormatProperties);
-bool PreCallValidateGetPhysicalDeviceQueueFamilyProperties2(
- VkPhysicalDevice physicalDevice,
- uint32_t* pQueueFamilyPropertyCount,
- VkQueueFamilyProperties2* pQueueFamilyProperties);
-bool PreCallValidateGetPhysicalDeviceMemoryProperties2(
- VkPhysicalDevice physicalDevice,
- VkPhysicalDeviceMemoryProperties2* pMemoryProperties);
-bool PreCallValidateGetPhysicalDeviceSparseImageFormatProperties2(
- VkPhysicalDevice physicalDevice,
- const VkPhysicalDeviceSparseImageFormatInfo2* pFormatInfo,
- uint32_t* pPropertyCount,
- VkSparseImageFormatProperties2* pProperties);
-bool PreCallValidateTrimCommandPool(
- VkDevice device,
- VkCommandPool commandPool,
- VkCommandPoolTrimFlags flags);
-bool PreCallValidateGetDeviceQueue2(
- VkDevice device,
- const VkDeviceQueueInfo2* pQueueInfo,
- VkQueue* pQueue);
-bool PreCallValidateCreateSamplerYcbcrConversion(
- VkDevice device,
- const VkSamplerYcbcrConversionCreateInfo* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkSamplerYcbcrConversion* pYcbcrConversion);
-bool PreCallValidateDestroySamplerYcbcrConversion(
- VkDevice device,
- VkSamplerYcbcrConversion ycbcrConversion,
- const VkAllocationCallbacks* pAllocator);
-bool PreCallValidateCreateDescriptorUpdateTemplate(
- VkDevice device,
- const VkDescriptorUpdateTemplateCreateInfo* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkDescriptorUpdateTemplate* pDescriptorUpdateTemplate);
-bool PreCallValidateDestroyDescriptorUpdateTemplate(
- VkDevice device,
- VkDescriptorUpdateTemplate descriptorUpdateTemplate,
- const VkAllocationCallbacks* pAllocator);
-bool PreCallValidateUpdateDescriptorSetWithTemplate(
- VkDevice device,
- VkDescriptorSet descriptorSet,
- VkDescriptorUpdateTemplate descriptorUpdateTemplate,
- const void* pData);
-bool PreCallValidateGetPhysicalDeviceExternalBufferProperties(
- VkPhysicalDevice physicalDevice,
- const VkPhysicalDeviceExternalBufferInfo* pExternalBufferInfo,
- VkExternalBufferProperties* pExternalBufferProperties);
-bool PreCallValidateGetPhysicalDeviceExternalFenceProperties(
- VkPhysicalDevice physicalDevice,
- const VkPhysicalDeviceExternalFenceInfo* pExternalFenceInfo,
- VkExternalFenceProperties* pExternalFenceProperties);
-bool PreCallValidateGetPhysicalDeviceExternalSemaphoreProperties(
- VkPhysicalDevice physicalDevice,
- const VkPhysicalDeviceExternalSemaphoreInfo* pExternalSemaphoreInfo,
- VkExternalSemaphoreProperties* pExternalSemaphoreProperties);
-bool PreCallValidateGetDescriptorSetLayoutSupport(
- VkDevice device,
- const VkDescriptorSetLayoutCreateInfo* pCreateInfo,
- VkDescriptorSetLayoutSupport* pSupport);
-bool PreCallValidateDestroySurfaceKHR(
- VkInstance instance,
- VkSurfaceKHR surface,
- const VkAllocationCallbacks* pAllocator);
-bool PreCallValidateGetPhysicalDeviceSurfaceSupportKHR(
- VkPhysicalDevice physicalDevice,
- uint32_t queueFamilyIndex,
- VkSurfaceKHR surface,
- VkBool32* pSupported);
-bool PreCallValidateGetPhysicalDeviceSurfaceCapabilitiesKHR(
- VkPhysicalDevice physicalDevice,
- VkSurfaceKHR surface,
- VkSurfaceCapabilitiesKHR* pSurfaceCapabilities);
-bool PreCallValidateGetPhysicalDeviceSurfaceFormatsKHR(
- VkPhysicalDevice physicalDevice,
- VkSurfaceKHR surface,
- uint32_t* pSurfaceFormatCount,
- VkSurfaceFormatKHR* pSurfaceFormats);
-bool PreCallValidateGetPhysicalDeviceSurfacePresentModesKHR(
- VkPhysicalDevice physicalDevice,
- VkSurfaceKHR surface,
- uint32_t* pPresentModeCount,
- VkPresentModeKHR* pPresentModes);
-bool PreCallValidateCreateSwapchainKHR(
- VkDevice device,
- const VkSwapchainCreateInfoKHR* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkSwapchainKHR* pSwapchain);
-bool PreCallValidateDestroySwapchainKHR(
- VkDevice device,
- VkSwapchainKHR swapchain,
- const VkAllocationCallbacks* pAllocator);
-bool PreCallValidateGetSwapchainImagesKHR(
- VkDevice device,
- VkSwapchainKHR swapchain,
- uint32_t* pSwapchainImageCount,
- VkImage* pSwapchainImages);
-bool PreCallValidateAcquireNextImageKHR(
- VkDevice device,
- VkSwapchainKHR swapchain,
- uint64_t timeout,
- VkSemaphore semaphore,
- VkFence fence,
- uint32_t* pImageIndex);
-bool PreCallValidateQueuePresentKHR(
- VkQueue queue,
- const VkPresentInfoKHR* pPresentInfo);
-bool PreCallValidateGetDeviceGroupPresentCapabilitiesKHR(
- VkDevice device,
- VkDeviceGroupPresentCapabilitiesKHR* pDeviceGroupPresentCapabilities);
-bool PreCallValidateGetDeviceGroupSurfacePresentModesKHR(
- VkDevice device,
- VkSurfaceKHR surface,
- VkDeviceGroupPresentModeFlagsKHR* pModes);
-bool PreCallValidateGetPhysicalDevicePresentRectanglesKHR(
- VkPhysicalDevice physicalDevice,
- VkSurfaceKHR surface,
- uint32_t* pRectCount,
- VkRect2D* pRects);
-bool PreCallValidateAcquireNextImage2KHR(
- VkDevice device,
- const VkAcquireNextImageInfoKHR* pAcquireInfo,
- uint32_t* pImageIndex);
-bool PreCallValidateGetPhysicalDeviceDisplayPropertiesKHR(
- VkPhysicalDevice physicalDevice,
- uint32_t* pPropertyCount,
- VkDisplayPropertiesKHR* pProperties);
-bool PreCallValidateGetPhysicalDeviceDisplayPlanePropertiesKHR(
- VkPhysicalDevice physicalDevice,
- uint32_t* pPropertyCount,
- VkDisplayPlanePropertiesKHR* pProperties);
-bool PreCallValidateGetDisplayPlaneSupportedDisplaysKHR(
- VkPhysicalDevice physicalDevice,
- uint32_t planeIndex,
- uint32_t* pDisplayCount,
- VkDisplayKHR* pDisplays);
-bool PreCallValidateGetDisplayModePropertiesKHR(
- VkPhysicalDevice physicalDevice,
- VkDisplayKHR display,
- uint32_t* pPropertyCount,
- VkDisplayModePropertiesKHR* pProperties);
-bool PreCallValidateCreateDisplayModeKHR(
- VkPhysicalDevice physicalDevice,
- VkDisplayKHR display,
- const VkDisplayModeCreateInfoKHR* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkDisplayModeKHR* pMode);
-bool PreCallValidateGetDisplayPlaneCapabilitiesKHR(
- VkPhysicalDevice physicalDevice,
- VkDisplayModeKHR mode,
- uint32_t planeIndex,
- VkDisplayPlaneCapabilitiesKHR* pCapabilities);
-bool PreCallValidateCreateDisplayPlaneSurfaceKHR(
- VkInstance instance,
- const VkDisplaySurfaceCreateInfoKHR* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkSurfaceKHR* pSurface);
-bool PreCallValidateCreateSharedSwapchainsKHR(
- VkDevice device,
- uint32_t swapchainCount,
- const VkSwapchainCreateInfoKHR* pCreateInfos,
- const VkAllocationCallbacks* pAllocator,
- VkSwapchainKHR* pSwapchains);
-#ifdef VK_USE_PLATFORM_XLIB_KHR
-bool PreCallValidateCreateXlibSurfaceKHR(
- VkInstance instance,
- const VkXlibSurfaceCreateInfoKHR* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkSurfaceKHR* pSurface);
-#endif
-#ifdef VK_USE_PLATFORM_XLIB_KHR
-bool PreCallValidateGetPhysicalDeviceXlibPresentationSupportKHR(
- VkPhysicalDevice physicalDevice,
- uint32_t queueFamilyIndex,
- Display* dpy,
- VisualID visualID);
-#endif
-#ifdef VK_USE_PLATFORM_XCB_KHR
-bool PreCallValidateCreateXcbSurfaceKHR(
- VkInstance instance,
- const VkXcbSurfaceCreateInfoKHR* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkSurfaceKHR* pSurface);
-#endif
-#ifdef VK_USE_PLATFORM_XCB_KHR
-bool PreCallValidateGetPhysicalDeviceXcbPresentationSupportKHR(
- VkPhysicalDevice physicalDevice,
- uint32_t queueFamilyIndex,
- xcb_connection_t* connection,
- xcb_visualid_t visual_id);
-#endif
-#ifdef VK_USE_PLATFORM_WAYLAND_KHR
-bool PreCallValidateCreateWaylandSurfaceKHR(
- VkInstance instance,
- const VkWaylandSurfaceCreateInfoKHR* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkSurfaceKHR* pSurface);
-#endif
-#ifdef VK_USE_PLATFORM_WAYLAND_KHR
-bool PreCallValidateGetPhysicalDeviceWaylandPresentationSupportKHR(
- VkPhysicalDevice physicalDevice,
- uint32_t queueFamilyIndex,
- struct wl_display* display);
-#endif
-#ifdef VK_USE_PLATFORM_ANDROID_KHR
-bool PreCallValidateCreateAndroidSurfaceKHR(
- VkInstance instance,
- const VkAndroidSurfaceCreateInfoKHR* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkSurfaceKHR* pSurface);
-#endif
-#ifdef VK_USE_PLATFORM_WIN32_KHR
-bool PreCallValidateCreateWin32SurfaceKHR(
- VkInstance instance,
- const VkWin32SurfaceCreateInfoKHR* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkSurfaceKHR* pSurface);
-#endif
-#ifdef VK_USE_PLATFORM_WIN32_KHR
-bool PreCallValidateGetPhysicalDeviceWin32PresentationSupportKHR(
- VkPhysicalDevice physicalDevice,
- uint32_t queueFamilyIndex);
-#endif
-bool PreCallValidateGetPhysicalDeviceFeatures2KHR(
- VkPhysicalDevice physicalDevice,
- VkPhysicalDeviceFeatures2* pFeatures);
-bool PreCallValidateGetPhysicalDeviceProperties2KHR(
- VkPhysicalDevice physicalDevice,
- VkPhysicalDeviceProperties2* pProperties);
-bool PreCallValidateGetPhysicalDeviceFormatProperties2KHR(
- VkPhysicalDevice physicalDevice,
- VkFormat format,
- VkFormatProperties2* pFormatProperties);
-bool PreCallValidateGetPhysicalDeviceImageFormatProperties2KHR(
- VkPhysicalDevice physicalDevice,
- const VkPhysicalDeviceImageFormatInfo2* pImageFormatInfo,
- VkImageFormatProperties2* pImageFormatProperties);
-bool PreCallValidateGetPhysicalDeviceQueueFamilyProperties2KHR(
- VkPhysicalDevice physicalDevice,
- uint32_t* pQueueFamilyPropertyCount,
- VkQueueFamilyProperties2* pQueueFamilyProperties);
-bool PreCallValidateGetPhysicalDeviceMemoryProperties2KHR(
- VkPhysicalDevice physicalDevice,
- VkPhysicalDeviceMemoryProperties2* pMemoryProperties);
-bool PreCallValidateGetPhysicalDeviceSparseImageFormatProperties2KHR(
- VkPhysicalDevice physicalDevice,
- const VkPhysicalDeviceSparseImageFormatInfo2* pFormatInfo,
- uint32_t* pPropertyCount,
- VkSparseImageFormatProperties2* pProperties);
-bool PreCallValidateGetDeviceGroupPeerMemoryFeaturesKHR(
- VkDevice device,
- uint32_t heapIndex,
- uint32_t localDeviceIndex,
- uint32_t remoteDeviceIndex,
- VkPeerMemoryFeatureFlags* pPeerMemoryFeatures);
-bool PreCallValidateCmdSetDeviceMaskKHR(
- VkCommandBuffer commandBuffer,
- uint32_t deviceMask);
-bool PreCallValidateCmdDispatchBaseKHR(
- VkCommandBuffer commandBuffer,
- uint32_t baseGroupX,
- uint32_t baseGroupY,
- uint32_t baseGroupZ,
- uint32_t groupCountX,
- uint32_t groupCountY,
- uint32_t groupCountZ);
-bool PreCallValidateTrimCommandPoolKHR(
- VkDevice device,
- VkCommandPool commandPool,
- VkCommandPoolTrimFlags flags);
-bool PreCallValidateEnumeratePhysicalDeviceGroupsKHR(
- VkInstance instance,
- uint32_t* pPhysicalDeviceGroupCount,
- VkPhysicalDeviceGroupProperties* pPhysicalDeviceGroupProperties);
-bool PreCallValidateGetPhysicalDeviceExternalBufferPropertiesKHR(
- VkPhysicalDevice physicalDevice,
- const VkPhysicalDeviceExternalBufferInfo* pExternalBufferInfo,
- VkExternalBufferProperties* pExternalBufferProperties);
-#ifdef VK_USE_PLATFORM_WIN32_KHR
-bool PreCallValidateGetMemoryWin32HandleKHR(
- VkDevice device,
- const VkMemoryGetWin32HandleInfoKHR* pGetWin32HandleInfo,
- HANDLE* pHandle);
-#endif
-#ifdef VK_USE_PLATFORM_WIN32_KHR
-bool PreCallValidateGetMemoryWin32HandlePropertiesKHR(
- VkDevice device,
- VkExternalMemoryHandleTypeFlagBits handleType,
- HANDLE handle,
- VkMemoryWin32HandlePropertiesKHR* pMemoryWin32HandleProperties);
-#endif
-bool PreCallValidateGetMemoryFdKHR(
- VkDevice device,
- const VkMemoryGetFdInfoKHR* pGetFdInfo,
- int* pFd);
-bool PreCallValidateGetMemoryFdPropertiesKHR(
- VkDevice device,
- VkExternalMemoryHandleTypeFlagBits handleType,
- int fd,
- VkMemoryFdPropertiesKHR* pMemoryFdProperties);
-bool PreCallValidateGetPhysicalDeviceExternalSemaphorePropertiesKHR(
- VkPhysicalDevice physicalDevice,
- const VkPhysicalDeviceExternalSemaphoreInfo* pExternalSemaphoreInfo,
- VkExternalSemaphoreProperties* pExternalSemaphoreProperties);
-#ifdef VK_USE_PLATFORM_WIN32_KHR
-bool PreCallValidateImportSemaphoreWin32HandleKHR(
- VkDevice device,
- const VkImportSemaphoreWin32HandleInfoKHR* pImportSemaphoreWin32HandleInfo);
-#endif
-#ifdef VK_USE_PLATFORM_WIN32_KHR
-bool PreCallValidateGetSemaphoreWin32HandleKHR(
- VkDevice device,
- const VkSemaphoreGetWin32HandleInfoKHR* pGetWin32HandleInfo,
- HANDLE* pHandle);
-#endif
-bool PreCallValidateImportSemaphoreFdKHR(
- VkDevice device,
- const VkImportSemaphoreFdInfoKHR* pImportSemaphoreFdInfo);
-bool PreCallValidateGetSemaphoreFdKHR(
- VkDevice device,
- const VkSemaphoreGetFdInfoKHR* pGetFdInfo,
- int* pFd);
-bool PreCallValidateCmdPushDescriptorSetKHR(
- VkCommandBuffer commandBuffer,
- VkPipelineBindPoint pipelineBindPoint,
- VkPipelineLayout layout,
- uint32_t set,
- uint32_t descriptorWriteCount,
- const VkWriteDescriptorSet* pDescriptorWrites);
-bool PreCallValidateCmdPushDescriptorSetWithTemplateKHR(
- VkCommandBuffer commandBuffer,
- VkDescriptorUpdateTemplate descriptorUpdateTemplate,
- VkPipelineLayout layout,
- uint32_t set,
- const void* pData);
-bool PreCallValidateCreateDescriptorUpdateTemplateKHR(
- VkDevice device,
- const VkDescriptorUpdateTemplateCreateInfo* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkDescriptorUpdateTemplate* pDescriptorUpdateTemplate);
-bool PreCallValidateDestroyDescriptorUpdateTemplateKHR(
- VkDevice device,
- VkDescriptorUpdateTemplate descriptorUpdateTemplate,
- const VkAllocationCallbacks* pAllocator);
-bool PreCallValidateUpdateDescriptorSetWithTemplateKHR(
- VkDevice device,
- VkDescriptorSet descriptorSet,
- VkDescriptorUpdateTemplate descriptorUpdateTemplate,
- const void* pData);
-bool PreCallValidateCreateRenderPass2KHR(
- VkDevice device,
- const VkRenderPassCreateInfo2KHR* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkRenderPass* pRenderPass);
-bool PreCallValidateCmdBeginRenderPass2KHR(
- VkCommandBuffer commandBuffer,
- const VkRenderPassBeginInfo* pRenderPassBegin,
- const VkSubpassBeginInfoKHR* pSubpassBeginInfo);
-bool PreCallValidateCmdNextSubpass2KHR(
- VkCommandBuffer commandBuffer,
- const VkSubpassBeginInfoKHR* pSubpassBeginInfo,
- const VkSubpassEndInfoKHR* pSubpassEndInfo);
-bool PreCallValidateCmdEndRenderPass2KHR(
- VkCommandBuffer commandBuffer,
- const VkSubpassEndInfoKHR* pSubpassEndInfo);
-bool PreCallValidateGetSwapchainStatusKHR(
- VkDevice device,
- VkSwapchainKHR swapchain);
-bool PreCallValidateGetPhysicalDeviceExternalFencePropertiesKHR(
- VkPhysicalDevice physicalDevice,
- const VkPhysicalDeviceExternalFenceInfo* pExternalFenceInfo,
- VkExternalFenceProperties* pExternalFenceProperties);
-#ifdef VK_USE_PLATFORM_WIN32_KHR
-bool PreCallValidateImportFenceWin32HandleKHR(
- VkDevice device,
- const VkImportFenceWin32HandleInfoKHR* pImportFenceWin32HandleInfo);
-#endif
-#ifdef VK_USE_PLATFORM_WIN32_KHR
-bool PreCallValidateGetFenceWin32HandleKHR(
- VkDevice device,
- const VkFenceGetWin32HandleInfoKHR* pGetWin32HandleInfo,
- HANDLE* pHandle);
-#endif
-bool PreCallValidateImportFenceFdKHR(
- VkDevice device,
- const VkImportFenceFdInfoKHR* pImportFenceFdInfo);
-bool PreCallValidateGetFenceFdKHR(
- VkDevice device,
- const VkFenceGetFdInfoKHR* pGetFdInfo,
- int* pFd);
-bool PreCallValidateGetPhysicalDeviceSurfaceCapabilities2KHR(
- VkPhysicalDevice physicalDevice,
- const VkPhysicalDeviceSurfaceInfo2KHR* pSurfaceInfo,
- VkSurfaceCapabilities2KHR* pSurfaceCapabilities);
-bool PreCallValidateGetPhysicalDeviceSurfaceFormats2KHR(
- VkPhysicalDevice physicalDevice,
- const VkPhysicalDeviceSurfaceInfo2KHR* pSurfaceInfo,
- uint32_t* pSurfaceFormatCount,
- VkSurfaceFormat2KHR* pSurfaceFormats);
-bool PreCallValidateGetPhysicalDeviceDisplayProperties2KHR(
- VkPhysicalDevice physicalDevice,
- uint32_t* pPropertyCount,
- VkDisplayProperties2KHR* pProperties);
-bool PreCallValidateGetPhysicalDeviceDisplayPlaneProperties2KHR(
- VkPhysicalDevice physicalDevice,
- uint32_t* pPropertyCount,
- VkDisplayPlaneProperties2KHR* pProperties);
-bool PreCallValidateGetDisplayModeProperties2KHR(
- VkPhysicalDevice physicalDevice,
- VkDisplayKHR display,
- uint32_t* pPropertyCount,
- VkDisplayModeProperties2KHR* pProperties);
-bool PreCallValidateGetDisplayPlaneCapabilities2KHR(
- VkPhysicalDevice physicalDevice,
- const VkDisplayPlaneInfo2KHR* pDisplayPlaneInfo,
- VkDisplayPlaneCapabilities2KHR* pCapabilities);
-bool PreCallValidateGetImageMemoryRequirements2KHR(
- VkDevice device,
- const VkImageMemoryRequirementsInfo2* pInfo,
- VkMemoryRequirements2* pMemoryRequirements);
-bool PreCallValidateGetBufferMemoryRequirements2KHR(
- VkDevice device,
- const VkBufferMemoryRequirementsInfo2* pInfo,
- VkMemoryRequirements2* pMemoryRequirements);
-bool PreCallValidateGetImageSparseMemoryRequirements2KHR(
- VkDevice device,
- const VkImageSparseMemoryRequirementsInfo2* pInfo,
- uint32_t* pSparseMemoryRequirementCount,
- VkSparseImageMemoryRequirements2* pSparseMemoryRequirements);
-bool PreCallValidateCreateSamplerYcbcrConversionKHR(
- VkDevice device,
- const VkSamplerYcbcrConversionCreateInfo* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkSamplerYcbcrConversion* pYcbcrConversion);
-bool PreCallValidateDestroySamplerYcbcrConversionKHR(
- VkDevice device,
- VkSamplerYcbcrConversion ycbcrConversion,
- const VkAllocationCallbacks* pAllocator);
-bool PreCallValidateBindBufferMemory2KHR(
- VkDevice device,
- uint32_t bindInfoCount,
- const VkBindBufferMemoryInfo* pBindInfos);
-bool PreCallValidateBindImageMemory2KHR(
- VkDevice device,
- uint32_t bindInfoCount,
- const VkBindImageMemoryInfo* pBindInfos);
-bool PreCallValidateGetDescriptorSetLayoutSupportKHR(
- VkDevice device,
- const VkDescriptorSetLayoutCreateInfo* pCreateInfo,
- VkDescriptorSetLayoutSupport* pSupport);
-bool PreCallValidateCmdDrawIndirectCountKHR(
- VkCommandBuffer commandBuffer,
- VkBuffer buffer,
- VkDeviceSize offset,
- VkBuffer countBuffer,
- VkDeviceSize countBufferOffset,
- uint32_t maxDrawCount,
- uint32_t stride);
-bool PreCallValidateCmdDrawIndexedIndirectCountKHR(
- VkCommandBuffer commandBuffer,
- VkBuffer buffer,
- VkDeviceSize offset,
- VkBuffer countBuffer,
- VkDeviceSize countBufferOffset,
- uint32_t maxDrawCount,
- uint32_t stride);
-bool PreCallValidateGetPipelineExecutablePropertiesKHR(
- VkDevice device,
- const VkPipelineInfoKHR* pPipelineInfo,
- uint32_t* pExecutableCount,
- VkPipelineExecutablePropertiesKHR* pProperties);
-bool PreCallValidateGetPipelineExecutableStatisticsKHR(
- VkDevice device,
- const VkPipelineExecutableInfoKHR* pExecutableInfo,
- uint32_t* pStatisticCount,
- VkPipelineExecutableStatisticKHR* pStatistics);
-bool PreCallValidateGetPipelineExecutableInternalRepresentationsKHR(
- VkDevice device,
- const VkPipelineExecutableInfoKHR* pExecutableInfo,
- uint32_t* pInternalRepresentationCount,
- VkPipelineExecutableInternalRepresentationKHR* pInternalRepresentations);
-bool PreCallValidateCreateDebugReportCallbackEXT(
- VkInstance instance,
- const VkDebugReportCallbackCreateInfoEXT* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkDebugReportCallbackEXT* pCallback);
-bool PreCallValidateDestroyDebugReportCallbackEXT(
- VkInstance instance,
- VkDebugReportCallbackEXT callback,
- const VkAllocationCallbacks* pAllocator);
-bool PreCallValidateDebugReportMessageEXT(
- VkInstance instance,
- VkDebugReportFlagsEXT flags,
- VkDebugReportObjectTypeEXT objectType,
- uint64_t object,
- size_t location,
- int32_t messageCode,
- const char* pLayerPrefix,
- const char* pMessage);
-bool PreCallValidateDebugMarkerSetObjectTagEXT(
- VkDevice device,
- const VkDebugMarkerObjectTagInfoEXT* pTagInfo);
-bool PreCallValidateDebugMarkerSetObjectNameEXT(
- VkDevice device,
- const VkDebugMarkerObjectNameInfoEXT* pNameInfo);
-bool PreCallValidateCmdDebugMarkerBeginEXT(
- VkCommandBuffer commandBuffer,
- const VkDebugMarkerMarkerInfoEXT* pMarkerInfo);
-bool PreCallValidateCmdDebugMarkerEndEXT(
- VkCommandBuffer commandBuffer);
-bool PreCallValidateCmdDebugMarkerInsertEXT(
- VkCommandBuffer commandBuffer,
- const VkDebugMarkerMarkerInfoEXT* pMarkerInfo);
-bool PreCallValidateCmdBindTransformFeedbackBuffersEXT(
- VkCommandBuffer commandBuffer,
- uint32_t firstBinding,
- uint32_t bindingCount,
- const VkBuffer* pBuffers,
- const VkDeviceSize* pOffsets,
- const VkDeviceSize* pSizes);
-bool PreCallValidateCmdBeginTransformFeedbackEXT(
- VkCommandBuffer commandBuffer,
- uint32_t firstCounterBuffer,
- uint32_t counterBufferCount,
- const VkBuffer* pCounterBuffers,
- const VkDeviceSize* pCounterBufferOffsets);
-bool PreCallValidateCmdEndTransformFeedbackEXT(
- VkCommandBuffer commandBuffer,
- uint32_t firstCounterBuffer,
- uint32_t counterBufferCount,
- const VkBuffer* pCounterBuffers,
- const VkDeviceSize* pCounterBufferOffsets);
-bool PreCallValidateCmdBeginQueryIndexedEXT(
- VkCommandBuffer commandBuffer,
- VkQueryPool queryPool,
- uint32_t query,
- VkQueryControlFlags flags,
- uint32_t index);
-bool PreCallValidateCmdEndQueryIndexedEXT(
- VkCommandBuffer commandBuffer,
- VkQueryPool queryPool,
- uint32_t query,
- uint32_t index);
-bool PreCallValidateCmdDrawIndirectByteCountEXT(
- VkCommandBuffer commandBuffer,
- uint32_t instanceCount,
- uint32_t firstInstance,
- VkBuffer counterBuffer,
- VkDeviceSize counterBufferOffset,
- uint32_t counterOffset,
- uint32_t vertexStride);
-bool PreCallValidateGetImageViewHandleNVX(
- VkDevice device,
- const VkImageViewHandleInfoNVX* pInfo);
-bool PreCallValidateCmdDrawIndirectCountAMD(
- VkCommandBuffer commandBuffer,
- VkBuffer buffer,
- VkDeviceSize offset,
- VkBuffer countBuffer,
- VkDeviceSize countBufferOffset,
- uint32_t maxDrawCount,
- uint32_t stride);
-bool PreCallValidateCmdDrawIndexedIndirectCountAMD(
- VkCommandBuffer commandBuffer,
- VkBuffer buffer,
- VkDeviceSize offset,
- VkBuffer countBuffer,
- VkDeviceSize countBufferOffset,
- uint32_t maxDrawCount,
- uint32_t stride);
-bool PreCallValidateGetShaderInfoAMD(
- VkDevice device,
- VkPipeline pipeline,
- VkShaderStageFlagBits shaderStage,
- VkShaderInfoTypeAMD infoType,
- size_t* pInfoSize,
- void* pInfo);
-#ifdef VK_USE_PLATFORM_GGP
-bool PreCallValidateCreateStreamDescriptorSurfaceGGP(
- VkInstance instance,
- const VkStreamDescriptorSurfaceCreateInfoGGP* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkSurfaceKHR* pSurface);
-#endif
-bool PreCallValidateGetPhysicalDeviceExternalImageFormatPropertiesNV(
- VkPhysicalDevice physicalDevice,
- VkFormat format,
- VkImageType type,
- VkImageTiling tiling,
- VkImageUsageFlags usage,
- VkImageCreateFlags flags,
- VkExternalMemoryHandleTypeFlagsNV externalHandleType,
- VkExternalImageFormatPropertiesNV* pExternalImageFormatProperties);
-#ifdef VK_USE_PLATFORM_WIN32_KHR
-bool PreCallValidateGetMemoryWin32HandleNV(
- VkDevice device,
- VkDeviceMemory memory,
- VkExternalMemoryHandleTypeFlagsNV handleType,
- HANDLE* pHandle);
-#endif
-#ifdef VK_USE_PLATFORM_VI_NN
-bool PreCallValidateCreateViSurfaceNN(
- VkInstance instance,
- const VkViSurfaceCreateInfoNN* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkSurfaceKHR* pSurface);
-#endif
-bool PreCallValidateCmdBeginConditionalRenderingEXT(
- VkCommandBuffer commandBuffer,
- const VkConditionalRenderingBeginInfoEXT* pConditionalRenderingBegin);
-bool PreCallValidateCmdEndConditionalRenderingEXT(
- VkCommandBuffer commandBuffer);
-bool PreCallValidateCmdProcessCommandsNVX(
- VkCommandBuffer commandBuffer,
- const VkCmdProcessCommandsInfoNVX* pProcessCommandsInfo);
-bool PreCallValidateCmdReserveSpaceForCommandsNVX(
- VkCommandBuffer commandBuffer,
- const VkCmdReserveSpaceForCommandsInfoNVX* pReserveSpaceInfo);
-bool PreCallValidateCreateIndirectCommandsLayoutNVX(
- VkDevice device,
- const VkIndirectCommandsLayoutCreateInfoNVX* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkIndirectCommandsLayoutNVX* pIndirectCommandsLayout);
-bool PreCallValidateDestroyIndirectCommandsLayoutNVX(
- VkDevice device,
- VkIndirectCommandsLayoutNVX indirectCommandsLayout,
- const VkAllocationCallbacks* pAllocator);
-bool PreCallValidateCreateObjectTableNVX(
- VkDevice device,
- const VkObjectTableCreateInfoNVX* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkObjectTableNVX* pObjectTable);
-bool PreCallValidateDestroyObjectTableNVX(
- VkDevice device,
- VkObjectTableNVX objectTable,
- const VkAllocationCallbacks* pAllocator);
-bool PreCallValidateRegisterObjectsNVX(
- VkDevice device,
- VkObjectTableNVX objectTable,
- uint32_t objectCount,
- const VkObjectTableEntryNVX* const* ppObjectTableEntries,
- const uint32_t* pObjectIndices);
-bool PreCallValidateUnregisterObjectsNVX(
- VkDevice device,
- VkObjectTableNVX objectTable,
- uint32_t objectCount,
- const VkObjectEntryTypeNVX* pObjectEntryTypes,
- const uint32_t* pObjectIndices);
-bool PreCallValidateGetPhysicalDeviceGeneratedCommandsPropertiesNVX(
- VkPhysicalDevice physicalDevice,
- VkDeviceGeneratedCommandsFeaturesNVX* pFeatures,
- VkDeviceGeneratedCommandsLimitsNVX* pLimits);
-bool PreCallValidateCmdSetViewportWScalingNV(
- VkCommandBuffer commandBuffer,
- uint32_t firstViewport,
- uint32_t viewportCount,
- const VkViewportWScalingNV* pViewportWScalings);
-bool PreCallValidateReleaseDisplayEXT(
- VkPhysicalDevice physicalDevice,
- VkDisplayKHR display);
-#ifdef VK_USE_PLATFORM_XLIB_XRANDR_EXT
-bool PreCallValidateAcquireXlibDisplayEXT(
- VkPhysicalDevice physicalDevice,
- Display* dpy,
- VkDisplayKHR display);
-#endif
-#ifdef VK_USE_PLATFORM_XLIB_XRANDR_EXT
-bool PreCallValidateGetRandROutputDisplayEXT(
- VkPhysicalDevice physicalDevice,
- Display* dpy,
- RROutput rrOutput,
- VkDisplayKHR* pDisplay);
-#endif
-bool PreCallValidateGetPhysicalDeviceSurfaceCapabilities2EXT(
- VkPhysicalDevice physicalDevice,
- VkSurfaceKHR surface,
- VkSurfaceCapabilities2EXT* pSurfaceCapabilities);
-bool PreCallValidateDisplayPowerControlEXT(
- VkDevice device,
- VkDisplayKHR display,
- const VkDisplayPowerInfoEXT* pDisplayPowerInfo);
-bool PreCallValidateRegisterDeviceEventEXT(
- VkDevice device,
- const VkDeviceEventInfoEXT* pDeviceEventInfo,
- const VkAllocationCallbacks* pAllocator,
- VkFence* pFence);
-bool PreCallValidateRegisterDisplayEventEXT(
- VkDevice device,
- VkDisplayKHR display,
- const VkDisplayEventInfoEXT* pDisplayEventInfo,
- const VkAllocationCallbacks* pAllocator,
- VkFence* pFence);
-bool PreCallValidateGetSwapchainCounterEXT(
- VkDevice device,
- VkSwapchainKHR swapchain,
- VkSurfaceCounterFlagBitsEXT counter,
- uint64_t* pCounterValue);
-bool PreCallValidateGetRefreshCycleDurationGOOGLE(
- VkDevice device,
- VkSwapchainKHR swapchain,
- VkRefreshCycleDurationGOOGLE* pDisplayTimingProperties);
-bool PreCallValidateGetPastPresentationTimingGOOGLE(
- VkDevice device,
- VkSwapchainKHR swapchain,
- uint32_t* pPresentationTimingCount,
- VkPastPresentationTimingGOOGLE* pPresentationTimings);
-bool PreCallValidateCmdSetDiscardRectangleEXT(
- VkCommandBuffer commandBuffer,
- uint32_t firstDiscardRectangle,
- uint32_t discardRectangleCount,
- const VkRect2D* pDiscardRectangles);
-bool PreCallValidateSetHdrMetadataEXT(
- VkDevice device,
- uint32_t swapchainCount,
- const VkSwapchainKHR* pSwapchains,
- const VkHdrMetadataEXT* pMetadata);
-#ifdef VK_USE_PLATFORM_IOS_MVK
-bool PreCallValidateCreateIOSSurfaceMVK(
- VkInstance instance,
- const VkIOSSurfaceCreateInfoMVK* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkSurfaceKHR* pSurface);
-#endif
-#ifdef VK_USE_PLATFORM_MACOS_MVK
-bool PreCallValidateCreateMacOSSurfaceMVK(
- VkInstance instance,
- const VkMacOSSurfaceCreateInfoMVK* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkSurfaceKHR* pSurface);
-#endif
-bool PreCallValidateSetDebugUtilsObjectNameEXT(
- VkDevice device,
- const VkDebugUtilsObjectNameInfoEXT* pNameInfo);
-bool PreCallValidateSetDebugUtilsObjectTagEXT(
- VkDevice device,
- const VkDebugUtilsObjectTagInfoEXT* pTagInfo);
-bool PreCallValidateQueueBeginDebugUtilsLabelEXT(
- VkQueue queue,
- const VkDebugUtilsLabelEXT* pLabelInfo);
-bool PreCallValidateQueueEndDebugUtilsLabelEXT(
- VkQueue queue);
-bool PreCallValidateQueueInsertDebugUtilsLabelEXT(
- VkQueue queue,
- const VkDebugUtilsLabelEXT* pLabelInfo);
-bool PreCallValidateCmdBeginDebugUtilsLabelEXT(
- VkCommandBuffer commandBuffer,
- const VkDebugUtilsLabelEXT* pLabelInfo);
-bool PreCallValidateCmdEndDebugUtilsLabelEXT(
- VkCommandBuffer commandBuffer);
-bool PreCallValidateCmdInsertDebugUtilsLabelEXT(
- VkCommandBuffer commandBuffer,
- const VkDebugUtilsLabelEXT* pLabelInfo);
-bool PreCallValidateCreateDebugUtilsMessengerEXT(
- VkInstance instance,
- const VkDebugUtilsMessengerCreateInfoEXT* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkDebugUtilsMessengerEXT* pMessenger);
-bool PreCallValidateDestroyDebugUtilsMessengerEXT(
- VkInstance instance,
- VkDebugUtilsMessengerEXT messenger,
- const VkAllocationCallbacks* pAllocator);
-bool PreCallValidateSubmitDebugUtilsMessageEXT(
- VkInstance instance,
- VkDebugUtilsMessageSeverityFlagBitsEXT messageSeverity,
- VkDebugUtilsMessageTypeFlagsEXT messageTypes,
- const VkDebugUtilsMessengerCallbackDataEXT* pCallbackData);
-#ifdef VK_USE_PLATFORM_ANDROID_KHR
-bool PreCallValidateGetAndroidHardwareBufferPropertiesANDROID(
- VkDevice device,
- const struct AHardwareBuffer* buffer,
- VkAndroidHardwareBufferPropertiesANDROID* pProperties);
-#endif
-#ifdef VK_USE_PLATFORM_ANDROID_KHR
-bool PreCallValidateGetMemoryAndroidHardwareBufferANDROID(
- VkDevice device,
- const VkMemoryGetAndroidHardwareBufferInfoANDROID* pInfo,
- struct AHardwareBuffer** pBuffer);
-#endif
-bool PreCallValidateCmdSetSampleLocationsEXT(
- VkCommandBuffer commandBuffer,
- const VkSampleLocationsInfoEXT* pSampleLocationsInfo);
-bool PreCallValidateGetPhysicalDeviceMultisamplePropertiesEXT(
- VkPhysicalDevice physicalDevice,
- VkSampleCountFlagBits samples,
- VkMultisamplePropertiesEXT* pMultisampleProperties);
-bool PreCallValidateGetImageDrmFormatModifierPropertiesEXT(
- VkDevice device,
- VkImage image,
- VkImageDrmFormatModifierPropertiesEXT* pProperties);
-bool PreCallValidateCreateValidationCacheEXT(
- VkDevice device,
- const VkValidationCacheCreateInfoEXT* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkValidationCacheEXT* pValidationCache);
-bool PreCallValidateDestroyValidationCacheEXT(
- VkDevice device,
- VkValidationCacheEXT validationCache,
- const VkAllocationCallbacks* pAllocator);
-bool PreCallValidateMergeValidationCachesEXT(
- VkDevice device,
- VkValidationCacheEXT dstCache,
- uint32_t srcCacheCount,
- const VkValidationCacheEXT* pSrcCaches);
-bool PreCallValidateGetValidationCacheDataEXT(
- VkDevice device,
- VkValidationCacheEXT validationCache,
- size_t* pDataSize,
- void* pData);
-bool PreCallValidateCmdBindShadingRateImageNV(
- VkCommandBuffer commandBuffer,
- VkImageView imageView,
- VkImageLayout imageLayout);
-bool PreCallValidateCmdSetViewportShadingRatePaletteNV(
- VkCommandBuffer commandBuffer,
- uint32_t firstViewport,
- uint32_t viewportCount,
- const VkShadingRatePaletteNV* pShadingRatePalettes);
-bool PreCallValidateCmdSetCoarseSampleOrderNV(
- VkCommandBuffer commandBuffer,
- VkCoarseSampleOrderTypeNV sampleOrderType,
- uint32_t customSampleOrderCount,
- const VkCoarseSampleOrderCustomNV* pCustomSampleOrders);
-bool PreCallValidateCreateAccelerationStructureNV(
- VkDevice device,
- const VkAccelerationStructureCreateInfoNV* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkAccelerationStructureNV* pAccelerationStructure);
-bool PreCallValidateDestroyAccelerationStructureNV(
- VkDevice device,
- VkAccelerationStructureNV accelerationStructure,
- const VkAllocationCallbacks* pAllocator);
-bool PreCallValidateGetAccelerationStructureMemoryRequirementsNV(
- VkDevice device,
- const VkAccelerationStructureMemoryRequirementsInfoNV* pInfo,
- VkMemoryRequirements2KHR* pMemoryRequirements);
-bool PreCallValidateBindAccelerationStructureMemoryNV(
- VkDevice device,
- uint32_t bindInfoCount,
- const VkBindAccelerationStructureMemoryInfoNV* pBindInfos);
-bool PreCallValidateCmdBuildAccelerationStructureNV(
- VkCommandBuffer commandBuffer,
- const VkAccelerationStructureInfoNV* pInfo,
- VkBuffer instanceData,
- VkDeviceSize instanceOffset,
- VkBool32 update,
- VkAccelerationStructureNV dst,
- VkAccelerationStructureNV src,
- VkBuffer scratch,
- VkDeviceSize scratchOffset);
-bool PreCallValidateCmdCopyAccelerationStructureNV(
- VkCommandBuffer commandBuffer,
- VkAccelerationStructureNV dst,
- VkAccelerationStructureNV src,
- VkCopyAccelerationStructureModeNV mode);
-bool PreCallValidateCmdTraceRaysNV(
- VkCommandBuffer commandBuffer,
- VkBuffer raygenShaderBindingTableBuffer,
- VkDeviceSize raygenShaderBindingOffset,
- VkBuffer missShaderBindingTableBuffer,
- VkDeviceSize missShaderBindingOffset,
- VkDeviceSize missShaderBindingStride,
- VkBuffer hitShaderBindingTableBuffer,
- VkDeviceSize hitShaderBindingOffset,
- VkDeviceSize hitShaderBindingStride,
- VkBuffer callableShaderBindingTableBuffer,
- VkDeviceSize callableShaderBindingOffset,
- VkDeviceSize callableShaderBindingStride,
- uint32_t width,
- uint32_t height,
- uint32_t depth);
-bool PreCallValidateCreateRayTracingPipelinesNV(
- VkDevice device,
- VkPipelineCache pipelineCache,
- uint32_t createInfoCount,
- const VkRayTracingPipelineCreateInfoNV* pCreateInfos,
- const VkAllocationCallbacks* pAllocator,
- VkPipeline* pPipelines);
-bool PreCallValidateGetRayTracingShaderGroupHandlesNV(
- VkDevice device,
- VkPipeline pipeline,
- uint32_t firstGroup,
- uint32_t groupCount,
- size_t dataSize,
- void* pData);
-bool PreCallValidateGetAccelerationStructureHandleNV(
- VkDevice device,
- VkAccelerationStructureNV accelerationStructure,
- size_t dataSize,
- void* pData);
-bool PreCallValidateCmdWriteAccelerationStructuresPropertiesNV(
- VkCommandBuffer commandBuffer,
- uint32_t accelerationStructureCount,
- const VkAccelerationStructureNV* pAccelerationStructures,
- VkQueryType queryType,
- VkQueryPool queryPool,
- uint32_t firstQuery);
-bool PreCallValidateCompileDeferredNV(
- VkDevice device,
- VkPipeline pipeline,
- uint32_t shader);
-bool PreCallValidateGetMemoryHostPointerPropertiesEXT(
- VkDevice device,
- VkExternalMemoryHandleTypeFlagBits handleType,
- const void* pHostPointer,
- VkMemoryHostPointerPropertiesEXT* pMemoryHostPointerProperties);
-bool PreCallValidateCmdWriteBufferMarkerAMD(
- VkCommandBuffer commandBuffer,
- VkPipelineStageFlagBits pipelineStage,
- VkBuffer dstBuffer,
- VkDeviceSize dstOffset,
- uint32_t marker);
-bool PreCallValidateGetPhysicalDeviceCalibrateableTimeDomainsEXT(
- VkPhysicalDevice physicalDevice,
- uint32_t* pTimeDomainCount,
- VkTimeDomainEXT* pTimeDomains);
-bool PreCallValidateGetCalibratedTimestampsEXT(
- VkDevice device,
- uint32_t timestampCount,
- const VkCalibratedTimestampInfoEXT* pTimestampInfos,
- uint64_t* pTimestamps,
- uint64_t* pMaxDeviation);
-bool PreCallValidateCmdDrawMeshTasksNV(
- VkCommandBuffer commandBuffer,
- uint32_t taskCount,
- uint32_t firstTask);
-bool PreCallValidateCmdDrawMeshTasksIndirectNV(
- VkCommandBuffer commandBuffer,
- VkBuffer buffer,
- VkDeviceSize offset,
- uint32_t drawCount,
- uint32_t stride);
-bool PreCallValidateCmdDrawMeshTasksIndirectCountNV(
- VkCommandBuffer commandBuffer,
- VkBuffer buffer,
- VkDeviceSize offset,
- VkBuffer countBuffer,
- VkDeviceSize countBufferOffset,
- uint32_t maxDrawCount,
- uint32_t stride);
-bool PreCallValidateCmdSetExclusiveScissorNV(
- VkCommandBuffer commandBuffer,
- uint32_t firstExclusiveScissor,
- uint32_t exclusiveScissorCount,
- const VkRect2D* pExclusiveScissors);
-bool PreCallValidateCmdSetCheckpointNV(
- VkCommandBuffer commandBuffer,
- const void* pCheckpointMarker);
-bool PreCallValidateGetQueueCheckpointDataNV(
- VkQueue queue,
- uint32_t* pCheckpointDataCount,
- VkCheckpointDataNV* pCheckpointData);
-bool PreCallValidateInitializePerformanceApiINTEL(
- VkDevice device,
- const VkInitializePerformanceApiInfoINTEL* pInitializeInfo);
-bool PreCallValidateUninitializePerformanceApiINTEL(
- VkDevice device);
-bool PreCallValidateCmdSetPerformanceMarkerINTEL(
- VkCommandBuffer commandBuffer,
- const VkPerformanceMarkerInfoINTEL* pMarkerInfo);
-bool PreCallValidateCmdSetPerformanceStreamMarkerINTEL(
- VkCommandBuffer commandBuffer,
- const VkPerformanceStreamMarkerInfoINTEL* pMarkerInfo);
-bool PreCallValidateCmdSetPerformanceOverrideINTEL(
- VkCommandBuffer commandBuffer,
- const VkPerformanceOverrideInfoINTEL* pOverrideInfo);
-bool PreCallValidateAcquirePerformanceConfigurationINTEL(
- VkDevice device,
- const VkPerformanceConfigurationAcquireInfoINTEL* pAcquireInfo,
- VkPerformanceConfigurationINTEL* pConfiguration);
-bool PreCallValidateReleasePerformanceConfigurationINTEL(
- VkDevice device,
- VkPerformanceConfigurationINTEL configuration);
-bool PreCallValidateQueueSetPerformanceConfigurationINTEL(
- VkQueue queue,
- VkPerformanceConfigurationINTEL configuration);
-bool PreCallValidateGetPerformanceParameterINTEL(
- VkDevice device,
- VkPerformanceParameterTypeINTEL parameter,
- VkPerformanceValueINTEL* pValue);
-bool PreCallValidateSetLocalDimmingAMD(
- VkDevice device,
- VkSwapchainKHR swapChain,
- VkBool32 localDimmingEnable);
-#ifdef VK_USE_PLATFORM_FUCHSIA
-bool PreCallValidateCreateImagePipeSurfaceFUCHSIA(
- VkInstance instance,
- const VkImagePipeSurfaceCreateInfoFUCHSIA* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkSurfaceKHR* pSurface);
-#endif
-#ifdef VK_USE_PLATFORM_METAL_EXT
-bool PreCallValidateCreateMetalSurfaceEXT(
- VkInstance instance,
- const VkMetalSurfaceCreateInfoEXT* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkSurfaceKHR* pSurface);
-#endif
-bool PreCallValidateGetBufferDeviceAddressEXT(
- VkDevice device,
- const VkBufferDeviceAddressInfoEXT* pInfo);
-bool PreCallValidateGetPhysicalDeviceCooperativeMatrixPropertiesNV(
- VkPhysicalDevice physicalDevice,
- uint32_t* pPropertyCount,
- VkCooperativeMatrixPropertiesNV* pProperties);
-bool PreCallValidateGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV(
- VkPhysicalDevice physicalDevice,
- uint32_t* pCombinationCount,
- VkFramebufferMixedSamplesCombinationNV* pCombinations);
-#ifdef VK_USE_PLATFORM_WIN32_KHR
-bool PreCallValidateGetPhysicalDeviceSurfacePresentModes2EXT(
- VkPhysicalDevice physicalDevice,
- const VkPhysicalDeviceSurfaceInfo2KHR* pSurfaceInfo,
- uint32_t* pPresentModeCount,
- VkPresentModeKHR* pPresentModes);
-#endif
-#ifdef VK_USE_PLATFORM_WIN32_KHR
-bool PreCallValidateAcquireFullScreenExclusiveModeEXT(
- VkDevice device,
- VkSwapchainKHR swapchain);
-#endif
-#ifdef VK_USE_PLATFORM_WIN32_KHR
-bool PreCallValidateReleaseFullScreenExclusiveModeEXT(
- VkDevice device,
- VkSwapchainKHR swapchain);
-#endif
-bool PreCallValidateCreateHeadlessSurfaceEXT(
- VkInstance instance,
- const VkHeadlessSurfaceCreateInfoEXT* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkSurfaceKHR* pSurface);
-bool PreCallValidateCmdSetLineStippleEXT(
- VkCommandBuffer commandBuffer,
- uint32_t lineStippleFactor,
- uint16_t lineStipplePattern);
-bool PreCallValidateResetQueryPoolEXT(
- VkDevice device,
- VkQueryPool queryPool,
- uint32_t firstQuery,
- uint32_t queryCount);
diff --git a/layers/generated/spirv_tools_commit_id.h b/layers/generated/spirv_tools_commit_id.h
deleted file mode 100644
index 7404bf415..000000000
--- a/layers/generated/spirv_tools_commit_id.h
+++ /dev/null
@@ -1,29 +0,0 @@
-// *** THIS FILE IS GENERATED - DO NOT EDIT ***
-// See external_revision_generator.py for modifications
-
-/***************************************************************************
- *
- * Copyright (c) 2015-2019 The Khronos Group Inc.
- * Copyright (c) 2015-2019 Valve Corporation
- * Copyright (c) 2015-2019 LunarG, Inc.
- * Copyright (c) 2015-2019 Google Inc.
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- *
- * Author: Chris Forbes <chrisforbes@google.com>
- * Author: Cort Stratton <cort@google.com>
- *
- ****************************************************************************/
-#pragma once
-
-#define SPIRV_TOOLS_COMMIT_ID "333d1c95792692205472c457d7bec915a94c8000"
diff --git a/layers/generated/thread_safety.cpp b/layers/generated/thread_safety.cpp
deleted file mode 100644
index e33f0060d..000000000
--- a/layers/generated/thread_safety.cpp
+++ /dev/null
@@ -1,5849 +0,0 @@
-
-// This file is ***GENERATED***. Do Not Edit.
-// See thread_safety_generator.py for modifications.
-
-/* Copyright (c) 2015-2019 The Khronos Group Inc.
- * Copyright (c) 2015-2019 Valve Corporation
- * Copyright (c) 2015-2019 LunarG, Inc.
- * Copyright (c) 2015-2019 Google Inc.
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- *
- * Author: Mark Lobodzinski <mark@lunarg.com>
- */
-#include "chassis.h"
-#include "thread_safety.h"
-
-
-void ThreadSafety::PreCallRecordAllocateCommandBuffers(VkDevice device, const VkCommandBufferAllocateInfo *pAllocateInfo,
- VkCommandBuffer *pCommandBuffers) {
- StartReadObject(device);
- StartWriteObject(pAllocateInfo->commandPool);
-}
-
-void ThreadSafety::PostCallRecordAllocateCommandBuffers(VkDevice device, const VkCommandBufferAllocateInfo *pAllocateInfo,
- VkCommandBuffer *pCommandBuffers, VkResult result) {
- FinishReadObject(device);
- FinishWriteObject(pAllocateInfo->commandPool);
-
- // Record mapping from command buffer to command pool
- if(pCommandBuffers) {
- for (uint32_t index = 0; index < pAllocateInfo->commandBufferCount; index++) {
- auto &bucket = GetBucket(pCommandBuffers[index]);
- std::lock_guard<std::mutex> lock(bucket.command_pool_lock);
- bucket.command_pool_map[pCommandBuffers[index]] = pAllocateInfo->commandPool;
- }
- }
-}
-
-void ThreadSafety::PreCallRecordAllocateDescriptorSets(VkDevice device, const VkDescriptorSetAllocateInfo *pAllocateInfo,
- VkDescriptorSet *pDescriptorSets) {
- StartReadObject(device);
- StartWriteObject(pAllocateInfo->descriptorPool);
- // Host access to pAllocateInfo::descriptorPool must be externally synchronized
-}
-
-void ThreadSafety::PostCallRecordAllocateDescriptorSets(VkDevice device, const VkDescriptorSetAllocateInfo *pAllocateInfo,
- VkDescriptorSet *pDescriptorSets, VkResult result) {
- FinishReadObject(device);
- FinishWriteObject(pAllocateInfo->descriptorPool);
- // Host access to pAllocateInfo::descriptorPool must be externally synchronized
-}
-
-void ThreadSafety::PreCallRecordFreeCommandBuffers(VkDevice device, VkCommandPool commandPool, uint32_t commandBufferCount,
- const VkCommandBuffer *pCommandBuffers) {
- const bool lockCommandPool = false; // pool is already directly locked
- StartReadObject(device);
- StartWriteObject(commandPool);
- if(pCommandBuffers) {
- // Even though we're immediately "finishing" below, we still are testing for concurrency with any call in process
- // so this isn't a no-op
- for (uint32_t index = 0; index < commandBufferCount; index++) {
- StartWriteObject(pCommandBuffers[index], lockCommandPool);
- }
- // The driver may immediately reuse command buffers in another thread.
- // These updates need to be done before calling down to the driver.
- for (uint32_t index = 0; index < commandBufferCount; index++) {
- FinishWriteObject(pCommandBuffers[index], lockCommandPool);
- }
- // Holding the lock for the shortest time while we update the map
- for (uint32_t index = 0; index < commandBufferCount; index++) {
- auto &bucket = GetBucket(pCommandBuffers[index]);
- std::lock_guard<std::mutex> lock(bucket.command_pool_lock);
- bucket.command_pool_map.erase(pCommandBuffers[index]);
- }
- }
-}
-
-void ThreadSafety::PostCallRecordFreeCommandBuffers(VkDevice device, VkCommandPool commandPool, uint32_t commandBufferCount,
- const VkCommandBuffer *pCommandBuffers) {
- FinishReadObject(device);
- FinishWriteObject(commandPool);
-}
-
-void ThreadSafety::PreCallRecordResetCommandPool(VkDevice device, VkCommandPool commandPool, VkCommandPoolResetFlags flags) {
- StartReadObject(device);
- StartWriteObject(commandPool);
- // Check for any uses of non-externally sync'd command buffers (for example from vkCmdExecuteCommands)
- c_VkCommandPoolContents.StartWrite(commandPool);
- // Host access to commandPool must be externally synchronized
-}
-
-void ThreadSafety::PostCallRecordResetCommandPool(VkDevice device, VkCommandPool commandPool, VkCommandPoolResetFlags flags, VkResult result) {
- FinishReadObject(device);
- FinishWriteObject(commandPool);
- c_VkCommandPoolContents.FinishWrite(commandPool);
- // Host access to commandPool must be externally synchronized
-}
-
-void ThreadSafety::PreCallRecordDestroyCommandPool(VkDevice device, VkCommandPool commandPool, const VkAllocationCallbacks *pAllocator) {
- StartReadObject(device);
- StartWriteObject(commandPool);
- // Check for any uses of non-externally sync'd command buffers (for example from vkCmdExecuteCommands)
- c_VkCommandPoolContents.StartWrite(commandPool);
- // Host access to commandPool must be externally synchronized
-}
-
-void ThreadSafety::PostCallRecordDestroyCommandPool(VkDevice device, VkCommandPool commandPool, const VkAllocationCallbacks *pAllocator) {
- FinishReadObject(device);
- FinishWriteObject(commandPool);
- c_VkCommandPoolContents.FinishWrite(commandPool);
-}
-
-// GetSwapchainImages can return a non-zero count with a NULL pSwapchainImages pointer. Let's avoid crashes by ignoring
-// pSwapchainImages.
-void ThreadSafety::PreCallRecordGetSwapchainImagesKHR(VkDevice device, VkSwapchainKHR swapchain, uint32_t *pSwapchainImageCount,
- VkImage *pSwapchainImages) {
- StartReadObject(device);
- StartReadObject(swapchain);
-}
-
-void ThreadSafety::PostCallRecordGetSwapchainImagesKHR(VkDevice device, VkSwapchainKHR swapchain, uint32_t *pSwapchainImageCount,
- VkImage *pSwapchainImages, VkResult result) {
- FinishReadObject(device);
- FinishReadObject(swapchain);
-}
-
-
-
-void ThreadSafety::PreCallRecordDestroyInstance(
- VkInstance instance,
- const VkAllocationCallbacks* pAllocator) {
- StartWriteObject(instance);
- // Host access to instance must be externally synchronized
-}
-
-void ThreadSafety::PostCallRecordDestroyInstance(
- VkInstance instance,
- const VkAllocationCallbacks* pAllocator) {
- FinishWriteObject(instance);
- // Host access to instance must be externally synchronized
-}
-
-void ThreadSafety::PreCallRecordEnumeratePhysicalDevices(
- VkInstance instance,
- uint32_t* pPhysicalDeviceCount,
- VkPhysicalDevice* pPhysicalDevices) {
- StartReadObject(instance);
-}
-
-void ThreadSafety::PostCallRecordEnumeratePhysicalDevices(
- VkInstance instance,
- uint32_t* pPhysicalDeviceCount,
- VkPhysicalDevice* pPhysicalDevices,
- VkResult result) {
- FinishReadObject(instance);
-}
-
-void ThreadSafety::PreCallRecordGetInstanceProcAddr(
- VkInstance instance,
- const char* pName) {
- StartReadObject(instance);
-}
-
-void ThreadSafety::PostCallRecordGetInstanceProcAddr(
- VkInstance instance,
- const char* pName) {
- FinishReadObject(instance);
-}
-
-void ThreadSafety::PreCallRecordGetDeviceProcAddr(
- VkDevice device,
- const char* pName) {
- StartReadObject(device);
-}
-
-void ThreadSafety::PostCallRecordGetDeviceProcAddr(
- VkDevice device,
- const char* pName) {
- FinishReadObject(device);
-}
-
-void ThreadSafety::PreCallRecordDestroyDevice(
- VkDevice device,
- const VkAllocationCallbacks* pAllocator) {
- StartWriteObject(device);
- // Host access to device must be externally synchronized
-}
-
-void ThreadSafety::PostCallRecordDestroyDevice(
- VkDevice device,
- const VkAllocationCallbacks* pAllocator) {
- FinishWriteObject(device);
- // Host access to device must be externally synchronized
-}
-
-void ThreadSafety::PreCallRecordGetDeviceQueue(
- VkDevice device,
- uint32_t queueFamilyIndex,
- uint32_t queueIndex,
- VkQueue* pQueue) {
- StartReadObject(device);
-}
-
-void ThreadSafety::PostCallRecordGetDeviceQueue(
- VkDevice device,
- uint32_t queueFamilyIndex,
- uint32_t queueIndex,
- VkQueue* pQueue) {
- FinishReadObject(device);
-}
-
-void ThreadSafety::PreCallRecordQueueSubmit(
- VkQueue queue,
- uint32_t submitCount,
- const VkSubmitInfo* pSubmits,
- VkFence fence) {
- StartWriteObject(queue);
- StartWriteObject(fence);
- // Host access to queue must be externally synchronized
- // Host access to fence must be externally synchronized
-}
-
-void ThreadSafety::PostCallRecordQueueSubmit(
- VkQueue queue,
- uint32_t submitCount,
- const VkSubmitInfo* pSubmits,
- VkFence fence,
- VkResult result) {
- FinishWriteObject(queue);
- FinishWriteObject(fence);
- // Host access to queue must be externally synchronized
- // Host access to fence must be externally synchronized
-}
-
-void ThreadSafety::PreCallRecordQueueWaitIdle(
- VkQueue queue) {
- StartWriteObject(queue);
- // Host access to queue must be externally synchronized
-}
-
-void ThreadSafety::PostCallRecordQueueWaitIdle(
- VkQueue queue,
- VkResult result) {
- FinishWriteObject(queue);
- // Host access to queue must be externally synchronized
-}
-
-void ThreadSafety::PreCallRecordDeviceWaitIdle(
- VkDevice device) {
- StartReadObject(device);
- // all sname:VkQueue objects created from pname:device must be externally synchronized between host accesses
-}
-
-void ThreadSafety::PostCallRecordDeviceWaitIdle(
- VkDevice device,
- VkResult result) {
- FinishReadObject(device);
- // all sname:VkQueue objects created from pname:device must be externally synchronized between host accesses
-}
-
-void ThreadSafety::PreCallRecordAllocateMemory(
- VkDevice device,
- const VkMemoryAllocateInfo* pAllocateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkDeviceMemory* pMemory) {
- StartReadObject(device);
-}
-
-void ThreadSafety::PostCallRecordAllocateMemory(
- VkDevice device,
- const VkMemoryAllocateInfo* pAllocateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkDeviceMemory* pMemory,
- VkResult result) {
- FinishReadObject(device);
-}
-
-void ThreadSafety::PreCallRecordFreeMemory(
- VkDevice device,
- VkDeviceMemory memory,
- const VkAllocationCallbacks* pAllocator) {
- StartReadObject(device);
- StartWriteObject(memory);
- // Host access to memory must be externally synchronized
-}
-
-void ThreadSafety::PostCallRecordFreeMemory(
- VkDevice device,
- VkDeviceMemory memory,
- const VkAllocationCallbacks* pAllocator) {
- FinishReadObject(device);
- FinishWriteObject(memory);
- // Host access to memory must be externally synchronized
-}
-
-void ThreadSafety::PreCallRecordMapMemory(
- VkDevice device,
- VkDeviceMemory memory,
- VkDeviceSize offset,
- VkDeviceSize size,
- VkMemoryMapFlags flags,
- void** ppData) {
- StartReadObject(device);
- StartWriteObject(memory);
- // Host access to memory must be externally synchronized
-}
-
-void ThreadSafety::PostCallRecordMapMemory(
- VkDevice device,
- VkDeviceMemory memory,
- VkDeviceSize offset,
- VkDeviceSize size,
- VkMemoryMapFlags flags,
- void** ppData,
- VkResult result) {
- FinishReadObject(device);
- FinishWriteObject(memory);
- // Host access to memory must be externally synchronized
-}
-
-void ThreadSafety::PreCallRecordUnmapMemory(
- VkDevice device,
- VkDeviceMemory memory) {
- StartReadObject(device);
- StartWriteObject(memory);
- // Host access to memory must be externally synchronized
-}
-
-void ThreadSafety::PostCallRecordUnmapMemory(
- VkDevice device,
- VkDeviceMemory memory) {
- FinishReadObject(device);
- FinishWriteObject(memory);
- // Host access to memory must be externally synchronized
-}
-
-void ThreadSafety::PreCallRecordFlushMappedMemoryRanges(
- VkDevice device,
- uint32_t memoryRangeCount,
- const VkMappedMemoryRange* pMemoryRanges) {
- StartReadObject(device);
-}
-
-void ThreadSafety::PostCallRecordFlushMappedMemoryRanges(
- VkDevice device,
- uint32_t memoryRangeCount,
- const VkMappedMemoryRange* pMemoryRanges,
- VkResult result) {
- FinishReadObject(device);
-}
-
-void ThreadSafety::PreCallRecordInvalidateMappedMemoryRanges(
- VkDevice device,
- uint32_t memoryRangeCount,
- const VkMappedMemoryRange* pMemoryRanges) {
- StartReadObject(device);
-}
-
-void ThreadSafety::PostCallRecordInvalidateMappedMemoryRanges(
- VkDevice device,
- uint32_t memoryRangeCount,
- const VkMappedMemoryRange* pMemoryRanges,
- VkResult result) {
- FinishReadObject(device);
-}
-
-void ThreadSafety::PreCallRecordGetDeviceMemoryCommitment(
- VkDevice device,
- VkDeviceMemory memory,
- VkDeviceSize* pCommittedMemoryInBytes) {
- StartReadObject(device);
- StartReadObject(memory);
-}
-
-void ThreadSafety::PostCallRecordGetDeviceMemoryCommitment(
- VkDevice device,
- VkDeviceMemory memory,
- VkDeviceSize* pCommittedMemoryInBytes) {
- FinishReadObject(device);
- FinishReadObject(memory);
-}
-
-void ThreadSafety::PreCallRecordBindBufferMemory(
- VkDevice device,
- VkBuffer buffer,
- VkDeviceMemory memory,
- VkDeviceSize memoryOffset) {
- StartReadObject(device);
- StartWriteObject(buffer);
- StartReadObject(memory);
- // Host access to buffer must be externally synchronized
-}
-
-void ThreadSafety::PostCallRecordBindBufferMemory(
- VkDevice device,
- VkBuffer buffer,
- VkDeviceMemory memory,
- VkDeviceSize memoryOffset,
- VkResult result) {
- FinishReadObject(device);
- FinishWriteObject(buffer);
- FinishReadObject(memory);
- // Host access to buffer must be externally synchronized
-}
-
-void ThreadSafety::PreCallRecordBindImageMemory(
- VkDevice device,
- VkImage image,
- VkDeviceMemory memory,
- VkDeviceSize memoryOffset) {
- StartReadObject(device);
- StartWriteObject(image);
- StartReadObject(memory);
- // Host access to image must be externally synchronized
-}
-
-void ThreadSafety::PostCallRecordBindImageMemory(
- VkDevice device,
- VkImage image,
- VkDeviceMemory memory,
- VkDeviceSize memoryOffset,
- VkResult result) {
- FinishReadObject(device);
- FinishWriteObject(image);
- FinishReadObject(memory);
- // Host access to image must be externally synchronized
-}
-
-void ThreadSafety::PreCallRecordGetBufferMemoryRequirements(
- VkDevice device,
- VkBuffer buffer,
- VkMemoryRequirements* pMemoryRequirements) {
- StartReadObject(device);
- StartReadObject(buffer);
-}
-
-void ThreadSafety::PostCallRecordGetBufferMemoryRequirements(
- VkDevice device,
- VkBuffer buffer,
- VkMemoryRequirements* pMemoryRequirements) {
- FinishReadObject(device);
- FinishReadObject(buffer);
-}
-
-void ThreadSafety::PreCallRecordGetImageMemoryRequirements(
- VkDevice device,
- VkImage image,
- VkMemoryRequirements* pMemoryRequirements) {
- StartReadObject(device);
- StartReadObject(image);
-}
-
-void ThreadSafety::PostCallRecordGetImageMemoryRequirements(
- VkDevice device,
- VkImage image,
- VkMemoryRequirements* pMemoryRequirements) {
- FinishReadObject(device);
- FinishReadObject(image);
-}
-
-void ThreadSafety::PreCallRecordGetImageSparseMemoryRequirements(
- VkDevice device,
- VkImage image,
- uint32_t* pSparseMemoryRequirementCount,
- VkSparseImageMemoryRequirements* pSparseMemoryRequirements) {
- StartReadObject(device);
- StartReadObject(image);
-}
-
-void ThreadSafety::PostCallRecordGetImageSparseMemoryRequirements(
- VkDevice device,
- VkImage image,
- uint32_t* pSparseMemoryRequirementCount,
- VkSparseImageMemoryRequirements* pSparseMemoryRequirements) {
- FinishReadObject(device);
- FinishReadObject(image);
-}
-
-void ThreadSafety::PreCallRecordQueueBindSparse(
- VkQueue queue,
- uint32_t bindInfoCount,
- const VkBindSparseInfo* pBindInfo,
- VkFence fence) {
- StartWriteObject(queue);
- if (pBindInfo) {
- for (uint32_t index=0; index < bindInfoCount; index++) {
- if (pBindInfo[index].pBufferBinds) {
- for (uint32_t index2=0; index2 < pBindInfo[index].bufferBindCount; index2++) {
- StartWriteObject(pBindInfo[index].pBufferBinds[index2].buffer);
- }
- }
- if (pBindInfo[index].pImageOpaqueBinds) {
- for (uint32_t index2=0; index2 < pBindInfo[index].imageOpaqueBindCount; index2++) {
- StartWriteObject(pBindInfo[index].pImageOpaqueBinds[index2].image);
- }
- }
- if (pBindInfo[index].pImageBinds) {
- for (uint32_t index2=0; index2 < pBindInfo[index].imageBindCount; index2++) {
- StartWriteObject(pBindInfo[index].pImageBinds[index2].image);
- }
- }
- }
- }
- StartWriteObject(fence);
- // Host access to queue must be externally synchronized
- // Host access to pBindInfo[].pBufferBinds[].buffer,pBindInfo[].pImageOpaqueBinds[].image,pBindInfo[].pImageBinds[].image must be externally synchronized
- // Host access to fence must be externally synchronized
-}
-
-void ThreadSafety::PostCallRecordQueueBindSparse(
- VkQueue queue,
- uint32_t bindInfoCount,
- const VkBindSparseInfo* pBindInfo,
- VkFence fence,
- VkResult result) {
- FinishWriteObject(queue);
- if (pBindInfo) {
- for (uint32_t index=0; index < bindInfoCount; index++) {
- if (pBindInfo[index].pBufferBinds) {
- for (uint32_t index2=0; index2 < pBindInfo[index].bufferBindCount; index2++) {
- FinishWriteObject(pBindInfo[index].pBufferBinds[index2].buffer);
- }
- }
- if (pBindInfo[index].pImageOpaqueBinds) {
- for (uint32_t index2=0; index2 < pBindInfo[index].imageOpaqueBindCount; index2++) {
- FinishWriteObject(pBindInfo[index].pImageOpaqueBinds[index2].image);
- }
- }
- if (pBindInfo[index].pImageBinds) {
- for (uint32_t index2=0; index2 < pBindInfo[index].imageBindCount; index2++) {
- FinishWriteObject(pBindInfo[index].pImageBinds[index2].image);
- }
- }
- }
- }
- FinishWriteObject(fence);
- // Host access to queue must be externally synchronized
- // Host access to pBindInfo[].pBufferBinds[].buffer,pBindInfo[].pImageOpaqueBinds[].image,pBindInfo[].pImageBinds[].image must be externally synchronized
- // Host access to fence must be externally synchronized
-}
-
-void ThreadSafety::PreCallRecordCreateFence(
- VkDevice device,
- const VkFenceCreateInfo* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkFence* pFence) {
- StartReadObject(device);
-}
-
-void ThreadSafety::PostCallRecordCreateFence(
- VkDevice device,
- const VkFenceCreateInfo* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkFence* pFence,
- VkResult result) {
- FinishReadObject(device);
-}
-
-void ThreadSafety::PreCallRecordDestroyFence(
- VkDevice device,
- VkFence fence,
- const VkAllocationCallbacks* pAllocator) {
- StartReadObject(device);
- StartWriteObject(fence);
- // Host access to fence must be externally synchronized
-}
-
-void ThreadSafety::PostCallRecordDestroyFence(
- VkDevice device,
- VkFence fence,
- const VkAllocationCallbacks* pAllocator) {
- FinishReadObject(device);
- FinishWriteObject(fence);
- // Host access to fence must be externally synchronized
-}
-
-void ThreadSafety::PreCallRecordResetFences(
- VkDevice device,
- uint32_t fenceCount,
- const VkFence* pFences) {
- StartReadObject(device);
- if (pFences) {
- for (uint32_t index=0; index < fenceCount; index++) {
- StartWriteObject(pFences[index]);
- }
- }
- // Host access to each member of pFences must be externally synchronized
-}
-
-void ThreadSafety::PostCallRecordResetFences(
- VkDevice device,
- uint32_t fenceCount,
- const VkFence* pFences,
- VkResult result) {
- FinishReadObject(device);
- if (pFences) {
- for (uint32_t index=0; index < fenceCount; index++) {
- FinishWriteObject(pFences[index]);
- }
- }
- // Host access to each member of pFences must be externally synchronized
-}
-
-void ThreadSafety::PreCallRecordGetFenceStatus(
- VkDevice device,
- VkFence fence) {
- StartReadObject(device);
- StartReadObject(fence);
-}
-
-void ThreadSafety::PostCallRecordGetFenceStatus(
- VkDevice device,
- VkFence fence,
- VkResult result) {
- FinishReadObject(device);
- FinishReadObject(fence);
-}
-
-void ThreadSafety::PreCallRecordWaitForFences(
- VkDevice device,
- uint32_t fenceCount,
- const VkFence* pFences,
- VkBool32 waitAll,
- uint64_t timeout) {
- StartReadObject(device);
- if (pFences) {
- for (uint32_t index = 0; index < fenceCount; index++) {
- StartReadObject(pFences[index]);
- }
- }
-}
-
-void ThreadSafety::PostCallRecordWaitForFences(
- VkDevice device,
- uint32_t fenceCount,
- const VkFence* pFences,
- VkBool32 waitAll,
- uint64_t timeout,
- VkResult result) {
- FinishReadObject(device);
- if (pFences) {
- for (uint32_t index = 0; index < fenceCount; index++) {
- FinishReadObject(pFences[index]);
- }
- }
-}
-
-void ThreadSafety::PreCallRecordCreateSemaphore(
- VkDevice device,
- const VkSemaphoreCreateInfo* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkSemaphore* pSemaphore) {
- StartReadObject(device);
-}
-
-void ThreadSafety::PostCallRecordCreateSemaphore(
- VkDevice device,
- const VkSemaphoreCreateInfo* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkSemaphore* pSemaphore,
- VkResult result) {
- FinishReadObject(device);
-}
-
-void ThreadSafety::PreCallRecordDestroySemaphore(
- VkDevice device,
- VkSemaphore semaphore,
- const VkAllocationCallbacks* pAllocator) {
- StartReadObject(device);
- StartWriteObject(semaphore);
- // Host access to semaphore must be externally synchronized
-}
-
-void ThreadSafety::PostCallRecordDestroySemaphore(
- VkDevice device,
- VkSemaphore semaphore,
- const VkAllocationCallbacks* pAllocator) {
- FinishReadObject(device);
- FinishWriteObject(semaphore);
- // Host access to semaphore must be externally synchronized
-}
-
-void ThreadSafety::PreCallRecordCreateEvent(
- VkDevice device,
- const VkEventCreateInfo* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkEvent* pEvent) {
- StartReadObject(device);
-}
-
-void ThreadSafety::PostCallRecordCreateEvent(
- VkDevice device,
- const VkEventCreateInfo* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkEvent* pEvent,
- VkResult result) {
- FinishReadObject(device);
-}
-
-void ThreadSafety::PreCallRecordDestroyEvent(
- VkDevice device,
- VkEvent event,
- const VkAllocationCallbacks* pAllocator) {
- StartReadObject(device);
- StartWriteObject(event);
- // Host access to event must be externally synchronized
-}
-
-void ThreadSafety::PostCallRecordDestroyEvent(
- VkDevice device,
- VkEvent event,
- const VkAllocationCallbacks* pAllocator) {
- FinishReadObject(device);
- FinishWriteObject(event);
- // Host access to event must be externally synchronized
-}
-
-void ThreadSafety::PreCallRecordGetEventStatus(
- VkDevice device,
- VkEvent event) {
- StartReadObject(device);
- StartReadObject(event);
-}
-
-void ThreadSafety::PostCallRecordGetEventStatus(
- VkDevice device,
- VkEvent event,
- VkResult result) {
- FinishReadObject(device);
- FinishReadObject(event);
-}
-
-void ThreadSafety::PreCallRecordSetEvent(
- VkDevice device,
- VkEvent event) {
- StartReadObject(device);
- StartWriteObject(event);
- // Host access to event must be externally synchronized
-}
-
-void ThreadSafety::PostCallRecordSetEvent(
- VkDevice device,
- VkEvent event,
- VkResult result) {
- FinishReadObject(device);
- FinishWriteObject(event);
- // Host access to event must be externally synchronized
-}
-
-void ThreadSafety::PreCallRecordResetEvent(
- VkDevice device,
- VkEvent event) {
- StartReadObject(device);
- StartWriteObject(event);
- // Host access to event must be externally synchronized
-}
-
-void ThreadSafety::PostCallRecordResetEvent(
- VkDevice device,
- VkEvent event,
- VkResult result) {
- FinishReadObject(device);
- FinishWriteObject(event);
- // Host access to event must be externally synchronized
-}
-
-void ThreadSafety::PreCallRecordCreateQueryPool(
- VkDevice device,
- const VkQueryPoolCreateInfo* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkQueryPool* pQueryPool) {
- StartReadObject(device);
-}
-
-void ThreadSafety::PostCallRecordCreateQueryPool(
- VkDevice device,
- const VkQueryPoolCreateInfo* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkQueryPool* pQueryPool,
- VkResult result) {
- FinishReadObject(device);
-}
-
-void ThreadSafety::PreCallRecordDestroyQueryPool(
- VkDevice device,
- VkQueryPool queryPool,
- const VkAllocationCallbacks* pAllocator) {
- StartReadObject(device);
- StartWriteObject(queryPool);
- // Host access to queryPool must be externally synchronized
-}
-
-void ThreadSafety::PostCallRecordDestroyQueryPool(
- VkDevice device,
- VkQueryPool queryPool,
- const VkAllocationCallbacks* pAllocator) {
- FinishReadObject(device);
- FinishWriteObject(queryPool);
- // Host access to queryPool must be externally synchronized
-}
-
-void ThreadSafety::PreCallRecordGetQueryPoolResults(
- VkDevice device,
- VkQueryPool queryPool,
- uint32_t firstQuery,
- uint32_t queryCount,
- size_t dataSize,
- void* pData,
- VkDeviceSize stride,
- VkQueryResultFlags flags) {
- StartReadObject(device);
- StartReadObject(queryPool);
-}
-
-void ThreadSafety::PostCallRecordGetQueryPoolResults(
- VkDevice device,
- VkQueryPool queryPool,
- uint32_t firstQuery,
- uint32_t queryCount,
- size_t dataSize,
- void* pData,
- VkDeviceSize stride,
- VkQueryResultFlags flags,
- VkResult result) {
- FinishReadObject(device);
- FinishReadObject(queryPool);
-}
-
-void ThreadSafety::PreCallRecordCreateBuffer(
- VkDevice device,
- const VkBufferCreateInfo* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkBuffer* pBuffer) {
- StartReadObject(device);
-}
-
-void ThreadSafety::PostCallRecordCreateBuffer(
- VkDevice device,
- const VkBufferCreateInfo* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkBuffer* pBuffer,
- VkResult result) {
- FinishReadObject(device);
-}
-
-void ThreadSafety::PreCallRecordDestroyBuffer(
- VkDevice device,
- VkBuffer buffer,
- const VkAllocationCallbacks* pAllocator) {
- StartReadObject(device);
- StartWriteObject(buffer);
- // Host access to buffer must be externally synchronized
-}
-
-void ThreadSafety::PostCallRecordDestroyBuffer(
- VkDevice device,
- VkBuffer buffer,
- const VkAllocationCallbacks* pAllocator) {
- FinishReadObject(device);
- FinishWriteObject(buffer);
- // Host access to buffer must be externally synchronized
-}
-
-void ThreadSafety::PreCallRecordCreateBufferView(
- VkDevice device,
- const VkBufferViewCreateInfo* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkBufferView* pView) {
- StartReadObject(device);
-}
-
-void ThreadSafety::PostCallRecordCreateBufferView(
- VkDevice device,
- const VkBufferViewCreateInfo* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkBufferView* pView,
- VkResult result) {
- FinishReadObject(device);
-}
-
-void ThreadSafety::PreCallRecordDestroyBufferView(
- VkDevice device,
- VkBufferView bufferView,
- const VkAllocationCallbacks* pAllocator) {
- StartReadObject(device);
- StartWriteObject(bufferView);
- // Host access to bufferView must be externally synchronized
-}
-
-void ThreadSafety::PostCallRecordDestroyBufferView(
- VkDevice device,
- VkBufferView bufferView,
- const VkAllocationCallbacks* pAllocator) {
- FinishReadObject(device);
- FinishWriteObject(bufferView);
- // Host access to bufferView must be externally synchronized
-}
-
-void ThreadSafety::PreCallRecordCreateImage(
- VkDevice device,
- const VkImageCreateInfo* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkImage* pImage) {
- StartReadObject(device);
-}
-
-void ThreadSafety::PostCallRecordCreateImage(
- VkDevice device,
- const VkImageCreateInfo* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkImage* pImage,
- VkResult result) {
- FinishReadObject(device);
-}
-
-void ThreadSafety::PreCallRecordDestroyImage(
- VkDevice device,
- VkImage image,
- const VkAllocationCallbacks* pAllocator) {
- StartReadObject(device);
- StartWriteObject(image);
- // Host access to image must be externally synchronized
-}
-
-void ThreadSafety::PostCallRecordDestroyImage(
- VkDevice device,
- VkImage image,
- const VkAllocationCallbacks* pAllocator) {
- FinishReadObject(device);
- FinishWriteObject(image);
- // Host access to image must be externally synchronized
-}
-
-void ThreadSafety::PreCallRecordGetImageSubresourceLayout(
- VkDevice device,
- VkImage image,
- const VkImageSubresource* pSubresource,
- VkSubresourceLayout* pLayout) {
- StartReadObject(device);
- StartReadObject(image);
-}
-
-void ThreadSafety::PostCallRecordGetImageSubresourceLayout(
- VkDevice device,
- VkImage image,
- const VkImageSubresource* pSubresource,
- VkSubresourceLayout* pLayout) {
- FinishReadObject(device);
- FinishReadObject(image);
-}
-
-void ThreadSafety::PreCallRecordCreateImageView(
- VkDevice device,
- const VkImageViewCreateInfo* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkImageView* pView) {
- StartReadObject(device);
-}
-
-void ThreadSafety::PostCallRecordCreateImageView(
- VkDevice device,
- const VkImageViewCreateInfo* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkImageView* pView,
- VkResult result) {
- FinishReadObject(device);
-}
-
-void ThreadSafety::PreCallRecordDestroyImageView(
- VkDevice device,
- VkImageView imageView,
- const VkAllocationCallbacks* pAllocator) {
- StartReadObject(device);
- StartWriteObject(imageView);
- // Host access to imageView must be externally synchronized
-}
-
-void ThreadSafety::PostCallRecordDestroyImageView(
- VkDevice device,
- VkImageView imageView,
- const VkAllocationCallbacks* pAllocator) {
- FinishReadObject(device);
- FinishWriteObject(imageView);
- // Host access to imageView must be externally synchronized
-}
-
-void ThreadSafety::PreCallRecordCreateShaderModule(
- VkDevice device,
- const VkShaderModuleCreateInfo* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkShaderModule* pShaderModule) {
- StartReadObject(device);
-}
-
-void ThreadSafety::PostCallRecordCreateShaderModule(
- VkDevice device,
- const VkShaderModuleCreateInfo* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkShaderModule* pShaderModule,
- VkResult result) {
- FinishReadObject(device);
-}
-
-void ThreadSafety::PreCallRecordDestroyShaderModule(
- VkDevice device,
- VkShaderModule shaderModule,
- const VkAllocationCallbacks* pAllocator) {
- StartReadObject(device);
- StartWriteObject(shaderModule);
- // Host access to shaderModule must be externally synchronized
-}
-
-void ThreadSafety::PostCallRecordDestroyShaderModule(
- VkDevice device,
- VkShaderModule shaderModule,
- const VkAllocationCallbacks* pAllocator) {
- FinishReadObject(device);
- FinishWriteObject(shaderModule);
- // Host access to shaderModule must be externally synchronized
-}
-
-void ThreadSafety::PreCallRecordCreatePipelineCache(
- VkDevice device,
- const VkPipelineCacheCreateInfo* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkPipelineCache* pPipelineCache) {
- StartReadObject(device);
-}
-
-void ThreadSafety::PostCallRecordCreatePipelineCache(
- VkDevice device,
- const VkPipelineCacheCreateInfo* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkPipelineCache* pPipelineCache,
- VkResult result) {
- FinishReadObject(device);
-}
-
-void ThreadSafety::PreCallRecordDestroyPipelineCache(
- VkDevice device,
- VkPipelineCache pipelineCache,
- const VkAllocationCallbacks* pAllocator) {
- StartReadObject(device);
- StartWriteObject(pipelineCache);
- // Host access to pipelineCache must be externally synchronized
-}
-
-void ThreadSafety::PostCallRecordDestroyPipelineCache(
- VkDevice device,
- VkPipelineCache pipelineCache,
- const VkAllocationCallbacks* pAllocator) {
- FinishReadObject(device);
- FinishWriteObject(pipelineCache);
- // Host access to pipelineCache must be externally synchronized
-}
-
-void ThreadSafety::PreCallRecordGetPipelineCacheData(
- VkDevice device,
- VkPipelineCache pipelineCache,
- size_t* pDataSize,
- void* pData) {
- StartReadObject(device);
- StartReadObject(pipelineCache);
-}
-
-void ThreadSafety::PostCallRecordGetPipelineCacheData(
- VkDevice device,
- VkPipelineCache pipelineCache,
- size_t* pDataSize,
- void* pData,
- VkResult result) {
- FinishReadObject(device);
- FinishReadObject(pipelineCache);
-}
-
-void ThreadSafety::PreCallRecordMergePipelineCaches(
- VkDevice device,
- VkPipelineCache dstCache,
- uint32_t srcCacheCount,
- const VkPipelineCache* pSrcCaches) {
- StartReadObject(device);
- StartWriteObject(dstCache);
- if (pSrcCaches) {
- for (uint32_t index = 0; index < srcCacheCount; index++) {
- StartReadObject(pSrcCaches[index]);
- }
- }
- // Host access to dstCache must be externally synchronized
-}
-
-void ThreadSafety::PostCallRecordMergePipelineCaches(
- VkDevice device,
- VkPipelineCache dstCache,
- uint32_t srcCacheCount,
- const VkPipelineCache* pSrcCaches,
- VkResult result) {
- FinishReadObject(device);
- FinishWriteObject(dstCache);
- if (pSrcCaches) {
- for (uint32_t index = 0; index < srcCacheCount; index++) {
- FinishReadObject(pSrcCaches[index]);
- }
- }
- // Host access to dstCache must be externally synchronized
-}
-
-void ThreadSafety::PreCallRecordCreateGraphicsPipelines(
- VkDevice device,
- VkPipelineCache pipelineCache,
- uint32_t createInfoCount,
- const VkGraphicsPipelineCreateInfo* pCreateInfos,
- const VkAllocationCallbacks* pAllocator,
- VkPipeline* pPipelines) {
- StartReadObject(device);
- StartReadObject(pipelineCache);
-}
-
-void ThreadSafety::PostCallRecordCreateGraphicsPipelines(
- VkDevice device,
- VkPipelineCache pipelineCache,
- uint32_t createInfoCount,
- const VkGraphicsPipelineCreateInfo* pCreateInfos,
- const VkAllocationCallbacks* pAllocator,
- VkPipeline* pPipelines,
- VkResult result) {
- FinishReadObject(device);
- FinishReadObject(pipelineCache);
-}
-
-void ThreadSafety::PreCallRecordCreateComputePipelines(
- VkDevice device,
- VkPipelineCache pipelineCache,
- uint32_t createInfoCount,
- const VkComputePipelineCreateInfo* pCreateInfos,
- const VkAllocationCallbacks* pAllocator,
- VkPipeline* pPipelines) {
- StartReadObject(device);
- StartReadObject(pipelineCache);
-}
-
-void ThreadSafety::PostCallRecordCreateComputePipelines(
- VkDevice device,
- VkPipelineCache pipelineCache,
- uint32_t createInfoCount,
- const VkComputePipelineCreateInfo* pCreateInfos,
- const VkAllocationCallbacks* pAllocator,
- VkPipeline* pPipelines,
- VkResult result) {
- FinishReadObject(device);
- FinishReadObject(pipelineCache);
-}
-
-void ThreadSafety::PreCallRecordDestroyPipeline(
- VkDevice device,
- VkPipeline pipeline,
- const VkAllocationCallbacks* pAllocator) {
- StartReadObject(device);
- StartWriteObject(pipeline);
- // Host access to pipeline must be externally synchronized
-}
-
-void ThreadSafety::PostCallRecordDestroyPipeline(
- VkDevice device,
- VkPipeline pipeline,
- const VkAllocationCallbacks* pAllocator) {
- FinishReadObject(device);
- FinishWriteObject(pipeline);
- // Host access to pipeline must be externally synchronized
-}
-
-void ThreadSafety::PreCallRecordCreatePipelineLayout(
- VkDevice device,
- const VkPipelineLayoutCreateInfo* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkPipelineLayout* pPipelineLayout) {
- StartReadObject(device);
-}
-
-void ThreadSafety::PostCallRecordCreatePipelineLayout(
- VkDevice device,
- const VkPipelineLayoutCreateInfo* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkPipelineLayout* pPipelineLayout,
- VkResult result) {
- FinishReadObject(device);
-}
-
-void ThreadSafety::PreCallRecordDestroyPipelineLayout(
- VkDevice device,
- VkPipelineLayout pipelineLayout,
- const VkAllocationCallbacks* pAllocator) {
- StartReadObject(device);
- StartWriteObject(pipelineLayout);
- // Host access to pipelineLayout must be externally synchronized
-}
-
-void ThreadSafety::PostCallRecordDestroyPipelineLayout(
- VkDevice device,
- VkPipelineLayout pipelineLayout,
- const VkAllocationCallbacks* pAllocator) {
- FinishReadObject(device);
- FinishWriteObject(pipelineLayout);
- // Host access to pipelineLayout must be externally synchronized
-}
-
-void ThreadSafety::PreCallRecordCreateSampler(
- VkDevice device,
- const VkSamplerCreateInfo* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkSampler* pSampler) {
- StartReadObject(device);
-}
-
-void ThreadSafety::PostCallRecordCreateSampler(
- VkDevice device,
- const VkSamplerCreateInfo* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkSampler* pSampler,
- VkResult result) {
- FinishReadObject(device);
-}
-
-void ThreadSafety::PreCallRecordDestroySampler(
- VkDevice device,
- VkSampler sampler,
- const VkAllocationCallbacks* pAllocator) {
- StartReadObject(device);
- StartWriteObject(sampler);
- // Host access to sampler must be externally synchronized
-}
-
-void ThreadSafety::PostCallRecordDestroySampler(
- VkDevice device,
- VkSampler sampler,
- const VkAllocationCallbacks* pAllocator) {
- FinishReadObject(device);
- FinishWriteObject(sampler);
- // Host access to sampler must be externally synchronized
-}
-
-void ThreadSafety::PreCallRecordCreateDescriptorSetLayout(
- VkDevice device,
- const VkDescriptorSetLayoutCreateInfo* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkDescriptorSetLayout* pSetLayout) {
- StartReadObject(device);
-}
-
-void ThreadSafety::PostCallRecordCreateDescriptorSetLayout(
- VkDevice device,
- const VkDescriptorSetLayoutCreateInfo* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkDescriptorSetLayout* pSetLayout,
- VkResult result) {
- FinishReadObject(device);
-}
-
-void ThreadSafety::PreCallRecordDestroyDescriptorSetLayout(
- VkDevice device,
- VkDescriptorSetLayout descriptorSetLayout,
- const VkAllocationCallbacks* pAllocator) {
- StartReadObject(device);
- StartWriteObject(descriptorSetLayout);
- // Host access to descriptorSetLayout must be externally synchronized
-}
-
-void ThreadSafety::PostCallRecordDestroyDescriptorSetLayout(
- VkDevice device,
- VkDescriptorSetLayout descriptorSetLayout,
- const VkAllocationCallbacks* pAllocator) {
- FinishReadObject(device);
- FinishWriteObject(descriptorSetLayout);
- // Host access to descriptorSetLayout must be externally synchronized
-}
-
-void ThreadSafety::PreCallRecordCreateDescriptorPool(
- VkDevice device,
- const VkDescriptorPoolCreateInfo* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkDescriptorPool* pDescriptorPool) {
- StartReadObject(device);
-}
-
-void ThreadSafety::PostCallRecordCreateDescriptorPool(
- VkDevice device,
- const VkDescriptorPoolCreateInfo* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkDescriptorPool* pDescriptorPool,
- VkResult result) {
- FinishReadObject(device);
-}
-
-void ThreadSafety::PreCallRecordDestroyDescriptorPool(
- VkDevice device,
- VkDescriptorPool descriptorPool,
- const VkAllocationCallbacks* pAllocator) {
- StartReadObject(device);
- StartWriteObject(descriptorPool);
- // Host access to descriptorPool must be externally synchronized
-}
-
-void ThreadSafety::PostCallRecordDestroyDescriptorPool(
- VkDevice device,
- VkDescriptorPool descriptorPool,
- const VkAllocationCallbacks* pAllocator) {
- FinishReadObject(device);
- FinishWriteObject(descriptorPool);
- // Host access to descriptorPool must be externally synchronized
-}
-
-void ThreadSafety::PreCallRecordResetDescriptorPool(
- VkDevice device,
- VkDescriptorPool descriptorPool,
- VkDescriptorPoolResetFlags flags) {
- StartReadObject(device);
- StartWriteObject(descriptorPool);
- // Host access to descriptorPool must be externally synchronized
- // any sname:VkDescriptorSet objects allocated from pname:descriptorPool must be externally synchronized between host accesses
-}
-
-void ThreadSafety::PostCallRecordResetDescriptorPool(
- VkDevice device,
- VkDescriptorPool descriptorPool,
- VkDescriptorPoolResetFlags flags,
- VkResult result) {
- FinishReadObject(device);
- FinishWriteObject(descriptorPool);
- // Host access to descriptorPool must be externally synchronized
- // any sname:VkDescriptorSet objects allocated from pname:descriptorPool must be externally synchronized between host accesses
-}
-
-void ThreadSafety::PreCallRecordFreeDescriptorSets(
- VkDevice device,
- VkDescriptorPool descriptorPool,
- uint32_t descriptorSetCount,
- const VkDescriptorSet* pDescriptorSets) {
- StartReadObject(device);
- StartWriteObject(descriptorPool);
- if (pDescriptorSets) {
- for (uint32_t index=0; index < descriptorSetCount; index++) {
- StartWriteObject(pDescriptorSets[index]);
- }
- }
- // Host access to descriptorPool must be externally synchronized
- // Host access to each member of pDescriptorSets must be externally synchronized
-}
-
-void ThreadSafety::PostCallRecordFreeDescriptorSets(
- VkDevice device,
- VkDescriptorPool descriptorPool,
- uint32_t descriptorSetCount,
- const VkDescriptorSet* pDescriptorSets,
- VkResult result) {
- FinishReadObject(device);
- FinishWriteObject(descriptorPool);
- if (pDescriptorSets) {
- for (uint32_t index=0; index < descriptorSetCount; index++) {
- FinishWriteObject(pDescriptorSets[index]);
- }
- }
- // Host access to descriptorPool must be externally synchronized
- // Host access to each member of pDescriptorSets must be externally synchronized
-}
-
-void ThreadSafety::PreCallRecordUpdateDescriptorSets(
- VkDevice device,
- uint32_t descriptorWriteCount,
- const VkWriteDescriptorSet* pDescriptorWrites,
- uint32_t descriptorCopyCount,
- const VkCopyDescriptorSet* pDescriptorCopies) {
- StartReadObject(device);
- if (pDescriptorWrites) {
- for (uint32_t index=0; index < descriptorWriteCount; index++) {
- StartWriteObject(pDescriptorWrites[index].dstSet);
- }
- }
- if (pDescriptorCopies) {
- for (uint32_t index=0; index < descriptorCopyCount; index++) {
- StartWriteObject(pDescriptorCopies[index].dstSet);
- }
- }
- // Host access to pDescriptorWrites[].dstSet must be externally synchronized
- // Host access to pDescriptorCopies[].dstSet must be externally synchronized
-}
-
-void ThreadSafety::PostCallRecordUpdateDescriptorSets(
- VkDevice device,
- uint32_t descriptorWriteCount,
- const VkWriteDescriptorSet* pDescriptorWrites,
- uint32_t descriptorCopyCount,
- const VkCopyDescriptorSet* pDescriptorCopies) {
- FinishReadObject(device);
- if (pDescriptorWrites) {
- for (uint32_t index=0; index < descriptorWriteCount; index++) {
- FinishWriteObject(pDescriptorWrites[index].dstSet);
- }
- }
- if (pDescriptorCopies) {
- for (uint32_t index=0; index < descriptorCopyCount; index++) {
- FinishWriteObject(pDescriptorCopies[index].dstSet);
- }
- }
- // Host access to pDescriptorWrites[].dstSet must be externally synchronized
- // Host access to pDescriptorCopies[].dstSet must be externally synchronized
-}
-
-void ThreadSafety::PreCallRecordCreateFramebuffer(
- VkDevice device,
- const VkFramebufferCreateInfo* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkFramebuffer* pFramebuffer) {
- StartReadObject(device);
-}
-
-void ThreadSafety::PostCallRecordCreateFramebuffer(
- VkDevice device,
- const VkFramebufferCreateInfo* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkFramebuffer* pFramebuffer,
- VkResult result) {
- FinishReadObject(device);
-}
-
-void ThreadSafety::PreCallRecordDestroyFramebuffer(
- VkDevice device,
- VkFramebuffer framebuffer,
- const VkAllocationCallbacks* pAllocator) {
- StartReadObject(device);
- StartWriteObject(framebuffer);
- // Host access to framebuffer must be externally synchronized
-}
-
-void ThreadSafety::PostCallRecordDestroyFramebuffer(
- VkDevice device,
- VkFramebuffer framebuffer,
- const VkAllocationCallbacks* pAllocator) {
- FinishReadObject(device);
- FinishWriteObject(framebuffer);
- // Host access to framebuffer must be externally synchronized
-}
-
-void ThreadSafety::PreCallRecordCreateRenderPass(
- VkDevice device,
- const VkRenderPassCreateInfo* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkRenderPass* pRenderPass) {
- StartReadObject(device);
-}
-
-void ThreadSafety::PostCallRecordCreateRenderPass(
- VkDevice device,
- const VkRenderPassCreateInfo* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkRenderPass* pRenderPass,
- VkResult result) {
- FinishReadObject(device);
-}
-
-void ThreadSafety::PreCallRecordDestroyRenderPass(
- VkDevice device,
- VkRenderPass renderPass,
- const VkAllocationCallbacks* pAllocator) {
- StartReadObject(device);
- StartWriteObject(renderPass);
- // Host access to renderPass must be externally synchronized
-}
-
-void ThreadSafety::PostCallRecordDestroyRenderPass(
- VkDevice device,
- VkRenderPass renderPass,
- const VkAllocationCallbacks* pAllocator) {
- FinishReadObject(device);
- FinishWriteObject(renderPass);
- // Host access to renderPass must be externally synchronized
-}
-
-void ThreadSafety::PreCallRecordGetRenderAreaGranularity(
- VkDevice device,
- VkRenderPass renderPass,
- VkExtent2D* pGranularity) {
- StartReadObject(device);
- StartReadObject(renderPass);
-}
-
-void ThreadSafety::PostCallRecordGetRenderAreaGranularity(
- VkDevice device,
- VkRenderPass renderPass,
- VkExtent2D* pGranularity) {
- FinishReadObject(device);
- FinishReadObject(renderPass);
-}
-
-void ThreadSafety::PreCallRecordCreateCommandPool(
- VkDevice device,
- const VkCommandPoolCreateInfo* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkCommandPool* pCommandPool) {
- StartReadObject(device);
-}
-
-void ThreadSafety::PostCallRecordCreateCommandPool(
- VkDevice device,
- const VkCommandPoolCreateInfo* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkCommandPool* pCommandPool,
- VkResult result) {
- FinishReadObject(device);
-}
-
-void ThreadSafety::PreCallRecordBeginCommandBuffer(
- VkCommandBuffer commandBuffer,
- const VkCommandBufferBeginInfo* pBeginInfo) {
- StartWriteObject(commandBuffer);
- // Host access to commandBuffer must be externally synchronized
- // the sname:VkCommandPool that pname:commandBuffer was allocated from must be externally synchronized between host accesses
-}
-
-void ThreadSafety::PostCallRecordBeginCommandBuffer(
- VkCommandBuffer commandBuffer,
- const VkCommandBufferBeginInfo* pBeginInfo,
- VkResult result) {
- FinishWriteObject(commandBuffer);
- // Host access to commandBuffer must be externally synchronized
- // the sname:VkCommandPool that pname:commandBuffer was allocated from must be externally synchronized between host accesses
-}
-
-void ThreadSafety::PreCallRecordEndCommandBuffer(
- VkCommandBuffer commandBuffer) {
- StartWriteObject(commandBuffer);
- // Host access to commandBuffer must be externally synchronized
- // the sname:VkCommandPool that pname:commandBuffer was allocated from must be externally synchronized between host accesses
-}
-
-void ThreadSafety::PostCallRecordEndCommandBuffer(
- VkCommandBuffer commandBuffer,
- VkResult result) {
- FinishWriteObject(commandBuffer);
- // Host access to commandBuffer must be externally synchronized
- // the sname:VkCommandPool that pname:commandBuffer was allocated from must be externally synchronized between host accesses
-}
-
-void ThreadSafety::PreCallRecordResetCommandBuffer(
- VkCommandBuffer commandBuffer,
- VkCommandBufferResetFlags flags) {
- StartWriteObject(commandBuffer);
- // Host access to commandBuffer must be externally synchronized
-}
-
-void ThreadSafety::PostCallRecordResetCommandBuffer(
- VkCommandBuffer commandBuffer,
- VkCommandBufferResetFlags flags,
- VkResult result) {
- FinishWriteObject(commandBuffer);
- // Host access to commandBuffer must be externally synchronized
-}
-
-void ThreadSafety::PreCallRecordCmdBindPipeline(
- VkCommandBuffer commandBuffer,
- VkPipelineBindPoint pipelineBindPoint,
- VkPipeline pipeline) {
- StartWriteObject(commandBuffer);
- StartReadObject(pipeline);
- // Host access to commandBuffer must be externally synchronized
-}
-
-void ThreadSafety::PostCallRecordCmdBindPipeline(
- VkCommandBuffer commandBuffer,
- VkPipelineBindPoint pipelineBindPoint,
- VkPipeline pipeline) {
- FinishWriteObject(commandBuffer);
- FinishReadObject(pipeline);
- // Host access to commandBuffer must be externally synchronized
-}
-
-void ThreadSafety::PreCallRecordCmdSetViewport(
- VkCommandBuffer commandBuffer,
- uint32_t firstViewport,
- uint32_t viewportCount,
- const VkViewport* pViewports) {
- StartWriteObject(commandBuffer);
- // Host access to commandBuffer must be externally synchronized
-}
-
-void ThreadSafety::PostCallRecordCmdSetViewport(
- VkCommandBuffer commandBuffer,
- uint32_t firstViewport,
- uint32_t viewportCount,
- const VkViewport* pViewports) {
- FinishWriteObject(commandBuffer);
- // Host access to commandBuffer must be externally synchronized
-}
-
-void ThreadSafety::PreCallRecordCmdSetScissor(
- VkCommandBuffer commandBuffer,
- uint32_t firstScissor,
- uint32_t scissorCount,
- const VkRect2D* pScissors) {
- StartWriteObject(commandBuffer);
- // Host access to commandBuffer must be externally synchronized
-}
-
-void ThreadSafety::PostCallRecordCmdSetScissor(
- VkCommandBuffer commandBuffer,
- uint32_t firstScissor,
- uint32_t scissorCount,
- const VkRect2D* pScissors) {
- FinishWriteObject(commandBuffer);
- // Host access to commandBuffer must be externally synchronized
-}
-
-void ThreadSafety::PreCallRecordCmdSetLineWidth(
- VkCommandBuffer commandBuffer,
- float lineWidth) {
- StartWriteObject(commandBuffer);
- // Host access to commandBuffer must be externally synchronized
-}
-
-void ThreadSafety::PostCallRecordCmdSetLineWidth(
- VkCommandBuffer commandBuffer,
- float lineWidth) {
- FinishWriteObject(commandBuffer);
- // Host access to commandBuffer must be externally synchronized
-}
-
-void ThreadSafety::PreCallRecordCmdSetDepthBias(
- VkCommandBuffer commandBuffer,
- float depthBiasConstantFactor,
- float depthBiasClamp,
- float depthBiasSlopeFactor) {
- StartWriteObject(commandBuffer);
- // Host access to commandBuffer must be externally synchronized
-}
-
-void ThreadSafety::PostCallRecordCmdSetDepthBias(
- VkCommandBuffer commandBuffer,
- float depthBiasConstantFactor,
- float depthBiasClamp,
- float depthBiasSlopeFactor) {
- FinishWriteObject(commandBuffer);
- // Host access to commandBuffer must be externally synchronized
-}
-
-void ThreadSafety::PreCallRecordCmdSetBlendConstants(
- VkCommandBuffer commandBuffer,
- const float blendConstants[4]) {
- StartWriteObject(commandBuffer);
- // Host access to commandBuffer must be externally synchronized
-}
-
-void ThreadSafety::PostCallRecordCmdSetBlendConstants(
- VkCommandBuffer commandBuffer,
- const float blendConstants[4]) {
- FinishWriteObject(commandBuffer);
- // Host access to commandBuffer must be externally synchronized
-}
-
-void ThreadSafety::PreCallRecordCmdSetDepthBounds(
- VkCommandBuffer commandBuffer,
- float minDepthBounds,
- float maxDepthBounds) {
- StartWriteObject(commandBuffer);
- // Host access to commandBuffer must be externally synchronized
-}
-
-void ThreadSafety::PostCallRecordCmdSetDepthBounds(
- VkCommandBuffer commandBuffer,
- float minDepthBounds,
- float maxDepthBounds) {
- FinishWriteObject(commandBuffer);
- // Host access to commandBuffer must be externally synchronized
-}
-
-void ThreadSafety::PreCallRecordCmdSetStencilCompareMask(
- VkCommandBuffer commandBuffer,
- VkStencilFaceFlags faceMask,
- uint32_t compareMask) {
- StartWriteObject(commandBuffer);
- // Host access to commandBuffer must be externally synchronized
-}
-
-void ThreadSafety::PostCallRecordCmdSetStencilCompareMask(
- VkCommandBuffer commandBuffer,
- VkStencilFaceFlags faceMask,
- uint32_t compareMask) {
- FinishWriteObject(commandBuffer);
- // Host access to commandBuffer must be externally synchronized
-}
-
-void ThreadSafety::PreCallRecordCmdSetStencilWriteMask(
- VkCommandBuffer commandBuffer,
- VkStencilFaceFlags faceMask,
- uint32_t writeMask) {
- StartWriteObject(commandBuffer);
- // Host access to commandBuffer must be externally synchronized
-}
-
-void ThreadSafety::PostCallRecordCmdSetStencilWriteMask(
- VkCommandBuffer commandBuffer,
- VkStencilFaceFlags faceMask,
- uint32_t writeMask) {
- FinishWriteObject(commandBuffer);
- // Host access to commandBuffer must be externally synchronized
-}
-
-void ThreadSafety::PreCallRecordCmdSetStencilReference(
- VkCommandBuffer commandBuffer,
- VkStencilFaceFlags faceMask,
- uint32_t reference) {
- StartWriteObject(commandBuffer);
- // Host access to commandBuffer must be externally synchronized
-}
-
-void ThreadSafety::PostCallRecordCmdSetStencilReference(
- VkCommandBuffer commandBuffer,
- VkStencilFaceFlags faceMask,
- uint32_t reference) {
- FinishWriteObject(commandBuffer);
- // Host access to commandBuffer must be externally synchronized
-}
-
-void ThreadSafety::PreCallRecordCmdBindDescriptorSets(
- VkCommandBuffer commandBuffer,
- VkPipelineBindPoint pipelineBindPoint,
- VkPipelineLayout layout,
- uint32_t firstSet,
- uint32_t descriptorSetCount,
- const VkDescriptorSet* pDescriptorSets,
- uint32_t dynamicOffsetCount,
- const uint32_t* pDynamicOffsets) {
- StartWriteObject(commandBuffer);
- StartReadObject(layout);
- if (pDescriptorSets) {
- for (uint32_t index = 0; index < descriptorSetCount; index++) {
- StartReadObject(pDescriptorSets[index]);
- }
- }
- // Host access to commandBuffer must be externally synchronized
-}
-
-void ThreadSafety::PostCallRecordCmdBindDescriptorSets(
- VkCommandBuffer commandBuffer,
- VkPipelineBindPoint pipelineBindPoint,
- VkPipelineLayout layout,
- uint32_t firstSet,
- uint32_t descriptorSetCount,
- const VkDescriptorSet* pDescriptorSets,
- uint32_t dynamicOffsetCount,
- const uint32_t* pDynamicOffsets) {
- FinishWriteObject(commandBuffer);
- FinishReadObject(layout);
- if (pDescriptorSets) {
- for (uint32_t index = 0; index < descriptorSetCount; index++) {
- FinishReadObject(pDescriptorSets[index]);
- }
- }
- // Host access to commandBuffer must be externally synchronized
-}
-
-void ThreadSafety::PreCallRecordCmdBindIndexBuffer(
- VkCommandBuffer commandBuffer,
- VkBuffer buffer,
- VkDeviceSize offset,
- VkIndexType indexType) {
- StartWriteObject(commandBuffer);
- StartReadObject(buffer);
- // Host access to commandBuffer must be externally synchronized
-}
-
-void ThreadSafety::PostCallRecordCmdBindIndexBuffer(
- VkCommandBuffer commandBuffer,
- VkBuffer buffer,
- VkDeviceSize offset,
- VkIndexType indexType) {
- FinishWriteObject(commandBuffer);
- FinishReadObject(buffer);
- // Host access to commandBuffer must be externally synchronized
-}
-
-void ThreadSafety::PreCallRecordCmdBindVertexBuffers(
- VkCommandBuffer commandBuffer,
- uint32_t firstBinding,
- uint32_t bindingCount,
- const VkBuffer* pBuffers,
- const VkDeviceSize* pOffsets) {
- StartWriteObject(commandBuffer);
- if (pBuffers) {
- for (uint32_t index = 0; index < bindingCount; index++) {
- StartReadObject(pBuffers[index]);
- }
- }
- // Host access to commandBuffer must be externally synchronized
-}
-
-void ThreadSafety::PostCallRecordCmdBindVertexBuffers(
- VkCommandBuffer commandBuffer,
- uint32_t firstBinding,
- uint32_t bindingCount,
- const VkBuffer* pBuffers,
- const VkDeviceSize* pOffsets) {
- FinishWriteObject(commandBuffer);
- if (pBuffers) {
- for (uint32_t index = 0; index < bindingCount; index++) {
- FinishReadObject(pBuffers[index]);
- }
- }
- // Host access to commandBuffer must be externally synchronized
-}
-
-void ThreadSafety::PreCallRecordCmdDraw(
- VkCommandBuffer commandBuffer,
- uint32_t vertexCount,
- uint32_t instanceCount,
- uint32_t firstVertex,
- uint32_t firstInstance) {
- StartWriteObject(commandBuffer);
- // Host access to commandBuffer must be externally synchronized
-}
-
-void ThreadSafety::PostCallRecordCmdDraw(
- VkCommandBuffer commandBuffer,
- uint32_t vertexCount,
- uint32_t instanceCount,
- uint32_t firstVertex,
- uint32_t firstInstance) {
- FinishWriteObject(commandBuffer);
- // Host access to commandBuffer must be externally synchronized
-}
-
-void ThreadSafety::PreCallRecordCmdDrawIndexed(
- VkCommandBuffer commandBuffer,
- uint32_t indexCount,
- uint32_t instanceCount,
- uint32_t firstIndex,
- int32_t vertexOffset,
- uint32_t firstInstance) {
- StartWriteObject(commandBuffer);
- // Host access to commandBuffer must be externally synchronized
-}
-
-void ThreadSafety::PostCallRecordCmdDrawIndexed(
- VkCommandBuffer commandBuffer,
- uint32_t indexCount,
- uint32_t instanceCount,
- uint32_t firstIndex,
- int32_t vertexOffset,
- uint32_t firstInstance) {
- FinishWriteObject(commandBuffer);
- // Host access to commandBuffer must be externally synchronized
-}
-
-void ThreadSafety::PreCallRecordCmdDrawIndirect(
- VkCommandBuffer commandBuffer,
- VkBuffer buffer,
- VkDeviceSize offset,
- uint32_t drawCount,
- uint32_t stride) {
- StartWriteObject(commandBuffer);
- StartReadObject(buffer);
- // Host access to commandBuffer must be externally synchronized
-}
-
-void ThreadSafety::PostCallRecordCmdDrawIndirect(
- VkCommandBuffer commandBuffer,
- VkBuffer buffer,
- VkDeviceSize offset,
- uint32_t drawCount,
- uint32_t stride) {
- FinishWriteObject(commandBuffer);
- FinishReadObject(buffer);
- // Host access to commandBuffer must be externally synchronized
-}
-
-void ThreadSafety::PreCallRecordCmdDrawIndexedIndirect(
- VkCommandBuffer commandBuffer,
- VkBuffer buffer,
- VkDeviceSize offset,
- uint32_t drawCount,
- uint32_t stride) {
- StartWriteObject(commandBuffer);
- StartReadObject(buffer);
- // Host access to commandBuffer must be externally synchronized
-}
-
-void ThreadSafety::PostCallRecordCmdDrawIndexedIndirect(
- VkCommandBuffer commandBuffer,
- VkBuffer buffer,
- VkDeviceSize offset,
- uint32_t drawCount,
- uint32_t stride) {
- FinishWriteObject(commandBuffer);
- FinishReadObject(buffer);
- // Host access to commandBuffer must be externally synchronized
-}
-
-void ThreadSafety::PreCallRecordCmdDispatch(
- VkCommandBuffer commandBuffer,
- uint32_t groupCountX,
- uint32_t groupCountY,
- uint32_t groupCountZ) {
- StartWriteObject(commandBuffer);
- // Host access to commandBuffer must be externally synchronized
-}
-
-void ThreadSafety::PostCallRecordCmdDispatch(
- VkCommandBuffer commandBuffer,
- uint32_t groupCountX,
- uint32_t groupCountY,
- uint32_t groupCountZ) {
- FinishWriteObject(commandBuffer);
- // Host access to commandBuffer must be externally synchronized
-}
-
-void ThreadSafety::PreCallRecordCmdDispatchIndirect(
- VkCommandBuffer commandBuffer,
- VkBuffer buffer,
- VkDeviceSize offset) {
- StartWriteObject(commandBuffer);
- StartReadObject(buffer);
- // Host access to commandBuffer must be externally synchronized
-}
-
-void ThreadSafety::PostCallRecordCmdDispatchIndirect(
- VkCommandBuffer commandBuffer,
- VkBuffer buffer,
- VkDeviceSize offset) {
- FinishWriteObject(commandBuffer);
- FinishReadObject(buffer);
- // Host access to commandBuffer must be externally synchronized
-}
-
-void ThreadSafety::PreCallRecordCmdCopyBuffer(
- VkCommandBuffer commandBuffer,
- VkBuffer srcBuffer,
- VkBuffer dstBuffer,
- uint32_t regionCount,
- const VkBufferCopy* pRegions) {
- StartWriteObject(commandBuffer);
- StartReadObject(srcBuffer);
- StartReadObject(dstBuffer);
- // Host access to commandBuffer must be externally synchronized
-}
-
-void ThreadSafety::PostCallRecordCmdCopyBuffer(
- VkCommandBuffer commandBuffer,
- VkBuffer srcBuffer,
- VkBuffer dstBuffer,
- uint32_t regionCount,
- const VkBufferCopy* pRegions) {
- FinishWriteObject(commandBuffer);
- FinishReadObject(srcBuffer);
- FinishReadObject(dstBuffer);
- // Host access to commandBuffer must be externally synchronized
-}
-
-void ThreadSafety::PreCallRecordCmdCopyImage(
- VkCommandBuffer commandBuffer,
- VkImage srcImage,
- VkImageLayout srcImageLayout,
- VkImage dstImage,
- VkImageLayout dstImageLayout,
- uint32_t regionCount,
- const VkImageCopy* pRegions) {
- StartWriteObject(commandBuffer);
- StartReadObject(srcImage);
- StartReadObject(dstImage);
- // Host access to commandBuffer must be externally synchronized
-}
-
-void ThreadSafety::PostCallRecordCmdCopyImage(
- VkCommandBuffer commandBuffer,
- VkImage srcImage,
- VkImageLayout srcImageLayout,
- VkImage dstImage,
- VkImageLayout dstImageLayout,
- uint32_t regionCount,
- const VkImageCopy* pRegions) {
- FinishWriteObject(commandBuffer);
- FinishReadObject(srcImage);
- FinishReadObject(dstImage);
- // Host access to commandBuffer must be externally synchronized
-}
-
-void ThreadSafety::PreCallRecordCmdBlitImage(
- VkCommandBuffer commandBuffer,
- VkImage srcImage,
- VkImageLayout srcImageLayout,
- VkImage dstImage,
- VkImageLayout dstImageLayout,
- uint32_t regionCount,
- const VkImageBlit* pRegions,
- VkFilter filter) {
- StartWriteObject(commandBuffer);
- StartReadObject(srcImage);
- StartReadObject(dstImage);
- // Host access to commandBuffer must be externally synchronized
-}
-
-void ThreadSafety::PostCallRecordCmdBlitImage(
- VkCommandBuffer commandBuffer,
- VkImage srcImage,
- VkImageLayout srcImageLayout,
- VkImage dstImage,
- VkImageLayout dstImageLayout,
- uint32_t regionCount,
- const VkImageBlit* pRegions,
- VkFilter filter) {
- FinishWriteObject(commandBuffer);
- FinishReadObject(srcImage);
- FinishReadObject(dstImage);
- // Host access to commandBuffer must be externally synchronized
-}
-
-void ThreadSafety::PreCallRecordCmdCopyBufferToImage(
- VkCommandBuffer commandBuffer,
- VkBuffer srcBuffer,
- VkImage dstImage,
- VkImageLayout dstImageLayout,
- uint32_t regionCount,
- const VkBufferImageCopy* pRegions) {
- StartWriteObject(commandBuffer);
- StartReadObject(srcBuffer);
- StartReadObject(dstImage);
- // Host access to commandBuffer must be externally synchronized
-}
-
-void ThreadSafety::PostCallRecordCmdCopyBufferToImage(
- VkCommandBuffer commandBuffer,
- VkBuffer srcBuffer,
- VkImage dstImage,
- VkImageLayout dstImageLayout,
- uint32_t regionCount,
- const VkBufferImageCopy* pRegions) {
- FinishWriteObject(commandBuffer);
- FinishReadObject(srcBuffer);
- FinishReadObject(dstImage);
- // Host access to commandBuffer must be externally synchronized
-}
-
-void ThreadSafety::PreCallRecordCmdCopyImageToBuffer(
- VkCommandBuffer commandBuffer,
- VkImage srcImage,
- VkImageLayout srcImageLayout,
- VkBuffer dstBuffer,
- uint32_t regionCount,
- const VkBufferImageCopy* pRegions) {
- StartWriteObject(commandBuffer);
- StartReadObject(srcImage);
- StartReadObject(dstBuffer);
- // Host access to commandBuffer must be externally synchronized
-}
-
-void ThreadSafety::PostCallRecordCmdCopyImageToBuffer(
- VkCommandBuffer commandBuffer,
- VkImage srcImage,
- VkImageLayout srcImageLayout,
- VkBuffer dstBuffer,
- uint32_t regionCount,
- const VkBufferImageCopy* pRegions) {
- FinishWriteObject(commandBuffer);
- FinishReadObject(srcImage);
- FinishReadObject(dstBuffer);
- // Host access to commandBuffer must be externally synchronized
-}
-
-void ThreadSafety::PreCallRecordCmdUpdateBuffer(
- VkCommandBuffer commandBuffer,
- VkBuffer dstBuffer,
- VkDeviceSize dstOffset,
- VkDeviceSize dataSize,
- const void* pData) {
- StartWriteObject(commandBuffer);
- StartReadObject(dstBuffer);
- // Host access to commandBuffer must be externally synchronized
-}
-
-void ThreadSafety::PostCallRecordCmdUpdateBuffer(
- VkCommandBuffer commandBuffer,
- VkBuffer dstBuffer,
- VkDeviceSize dstOffset,
- VkDeviceSize dataSize,
- const void* pData) {
- FinishWriteObject(commandBuffer);
- FinishReadObject(dstBuffer);
- // Host access to commandBuffer must be externally synchronized
-}
-
-void ThreadSafety::PreCallRecordCmdFillBuffer(
- VkCommandBuffer commandBuffer,
- VkBuffer dstBuffer,
- VkDeviceSize dstOffset,
- VkDeviceSize size,
- uint32_t data) {
- StartWriteObject(commandBuffer);
- StartReadObject(dstBuffer);
- // Host access to commandBuffer must be externally synchronized
-}
-
-void ThreadSafety::PostCallRecordCmdFillBuffer(
- VkCommandBuffer commandBuffer,
- VkBuffer dstBuffer,
- VkDeviceSize dstOffset,
- VkDeviceSize size,
- uint32_t data) {
- FinishWriteObject(commandBuffer);
- FinishReadObject(dstBuffer);
- // Host access to commandBuffer must be externally synchronized
-}
-
-void ThreadSafety::PreCallRecordCmdClearColorImage(
- VkCommandBuffer commandBuffer,
- VkImage image,
- VkImageLayout imageLayout,
- const VkClearColorValue* pColor,
- uint32_t rangeCount,
- const VkImageSubresourceRange* pRanges) {
- StartWriteObject(commandBuffer);
- StartReadObject(image);
- // Host access to commandBuffer must be externally synchronized
-}
-
-void ThreadSafety::PostCallRecordCmdClearColorImage(
- VkCommandBuffer commandBuffer,
- VkImage image,
- VkImageLayout imageLayout,
- const VkClearColorValue* pColor,
- uint32_t rangeCount,
- const VkImageSubresourceRange* pRanges) {
- FinishWriteObject(commandBuffer);
- FinishReadObject(image);
- // Host access to commandBuffer must be externally synchronized
-}
-
-void ThreadSafety::PreCallRecordCmdClearDepthStencilImage(
- VkCommandBuffer commandBuffer,
- VkImage image,
- VkImageLayout imageLayout,
- const VkClearDepthStencilValue* pDepthStencil,
- uint32_t rangeCount,
- const VkImageSubresourceRange* pRanges) {
- StartWriteObject(commandBuffer);
- StartReadObject(image);
- // Host access to commandBuffer must be externally synchronized
-}
-
-void ThreadSafety::PostCallRecordCmdClearDepthStencilImage(
- VkCommandBuffer commandBuffer,
- VkImage image,
- VkImageLayout imageLayout,
- const VkClearDepthStencilValue* pDepthStencil,
- uint32_t rangeCount,
- const VkImageSubresourceRange* pRanges) {
- FinishWriteObject(commandBuffer);
- FinishReadObject(image);
- // Host access to commandBuffer must be externally synchronized
-}
-
-void ThreadSafety::PreCallRecordCmdClearAttachments(
- VkCommandBuffer commandBuffer,
- uint32_t attachmentCount,
- const VkClearAttachment* pAttachments,
- uint32_t rectCount,
- const VkClearRect* pRects) {
- StartWriteObject(commandBuffer);
- // Host access to commandBuffer must be externally synchronized
-}
-
-void ThreadSafety::PostCallRecordCmdClearAttachments(
- VkCommandBuffer commandBuffer,
- uint32_t attachmentCount,
- const VkClearAttachment* pAttachments,
- uint32_t rectCount,
- const VkClearRect* pRects) {
- FinishWriteObject(commandBuffer);
- // Host access to commandBuffer must be externally synchronized
-}
-
-void ThreadSafety::PreCallRecordCmdResolveImage(
- VkCommandBuffer commandBuffer,
- VkImage srcImage,
- VkImageLayout srcImageLayout,
- VkImage dstImage,
- VkImageLayout dstImageLayout,
- uint32_t regionCount,
- const VkImageResolve* pRegions) {
- StartWriteObject(commandBuffer);
- StartReadObject(srcImage);
- StartReadObject(dstImage);
- // Host access to commandBuffer must be externally synchronized
-}
-
-void ThreadSafety::PostCallRecordCmdResolveImage(
- VkCommandBuffer commandBuffer,
- VkImage srcImage,
- VkImageLayout srcImageLayout,
- VkImage dstImage,
- VkImageLayout dstImageLayout,
- uint32_t regionCount,
- const VkImageResolve* pRegions) {
- FinishWriteObject(commandBuffer);
- FinishReadObject(srcImage);
- FinishReadObject(dstImage);
- // Host access to commandBuffer must be externally synchronized
-}
-
-void ThreadSafety::PreCallRecordCmdSetEvent(
- VkCommandBuffer commandBuffer,
- VkEvent event,
- VkPipelineStageFlags stageMask) {
- StartWriteObject(commandBuffer);
- StartReadObject(event);
- // Host access to commandBuffer must be externally synchronized
-}
-
-void ThreadSafety::PostCallRecordCmdSetEvent(
- VkCommandBuffer commandBuffer,
- VkEvent event,
- VkPipelineStageFlags stageMask) {
- FinishWriteObject(commandBuffer);
- FinishReadObject(event);
- // Host access to commandBuffer must be externally synchronized
-}
-
-void ThreadSafety::PreCallRecordCmdResetEvent(
- VkCommandBuffer commandBuffer,
- VkEvent event,
- VkPipelineStageFlags stageMask) {
- StartWriteObject(commandBuffer);
- StartReadObject(event);
- // Host access to commandBuffer must be externally synchronized
-}
-
-void ThreadSafety::PostCallRecordCmdResetEvent(
- VkCommandBuffer commandBuffer,
- VkEvent event,
- VkPipelineStageFlags stageMask) {
- FinishWriteObject(commandBuffer);
- FinishReadObject(event);
- // Host access to commandBuffer must be externally synchronized
-}
-
-void ThreadSafety::PreCallRecordCmdWaitEvents(
- VkCommandBuffer commandBuffer,
- uint32_t eventCount,
- const VkEvent* pEvents,
- VkPipelineStageFlags srcStageMask,
- VkPipelineStageFlags dstStageMask,
- uint32_t memoryBarrierCount,
- const VkMemoryBarrier* pMemoryBarriers,
- uint32_t bufferMemoryBarrierCount,
- const VkBufferMemoryBarrier* pBufferMemoryBarriers,
- uint32_t imageMemoryBarrierCount,
- const VkImageMemoryBarrier* pImageMemoryBarriers) {
- StartWriteObject(commandBuffer);
- if (pEvents) {
- for (uint32_t index = 0; index < eventCount; index++) {
- StartReadObject(pEvents[index]);
- }
- }
- // Host access to commandBuffer must be externally synchronized
-}
-
-void ThreadSafety::PostCallRecordCmdWaitEvents(
- VkCommandBuffer commandBuffer,
- uint32_t eventCount,
- const VkEvent* pEvents,
- VkPipelineStageFlags srcStageMask,
- VkPipelineStageFlags dstStageMask,
- uint32_t memoryBarrierCount,
- const VkMemoryBarrier* pMemoryBarriers,
- uint32_t bufferMemoryBarrierCount,
- const VkBufferMemoryBarrier* pBufferMemoryBarriers,
- uint32_t imageMemoryBarrierCount,
- const VkImageMemoryBarrier* pImageMemoryBarriers) {
- FinishWriteObject(commandBuffer);
- if (pEvents) {
- for (uint32_t index = 0; index < eventCount; index++) {
- FinishReadObject(pEvents[index]);
- }
- }
- // Host access to commandBuffer must be externally synchronized
-}
-
-void ThreadSafety::PreCallRecordCmdPipelineBarrier(
- VkCommandBuffer commandBuffer,
- VkPipelineStageFlags srcStageMask,
- VkPipelineStageFlags dstStageMask,
- VkDependencyFlags dependencyFlags,
- uint32_t memoryBarrierCount,
- const VkMemoryBarrier* pMemoryBarriers,
- uint32_t bufferMemoryBarrierCount,
- const VkBufferMemoryBarrier* pBufferMemoryBarriers,
- uint32_t imageMemoryBarrierCount,
- const VkImageMemoryBarrier* pImageMemoryBarriers) {
- StartWriteObject(commandBuffer);
- // Host access to commandBuffer must be externally synchronized
-}
-
-void ThreadSafety::PostCallRecordCmdPipelineBarrier(
- VkCommandBuffer commandBuffer,
- VkPipelineStageFlags srcStageMask,
- VkPipelineStageFlags dstStageMask,
- VkDependencyFlags dependencyFlags,
- uint32_t memoryBarrierCount,
- const VkMemoryBarrier* pMemoryBarriers,
- uint32_t bufferMemoryBarrierCount,
- const VkBufferMemoryBarrier* pBufferMemoryBarriers,
- uint32_t imageMemoryBarrierCount,
- const VkImageMemoryBarrier* pImageMemoryBarriers) {
- FinishWriteObject(commandBuffer);
- // Host access to commandBuffer must be externally synchronized
-}
-
-void ThreadSafety::PreCallRecordCmdBeginQuery(
- VkCommandBuffer commandBuffer,
- VkQueryPool queryPool,
- uint32_t query,
- VkQueryControlFlags flags) {
- StartWriteObject(commandBuffer);
- StartReadObject(queryPool);
- // Host access to commandBuffer must be externally synchronized
-}
-
-void ThreadSafety::PostCallRecordCmdBeginQuery(
- VkCommandBuffer commandBuffer,
- VkQueryPool queryPool,
- uint32_t query,
- VkQueryControlFlags flags) {
- FinishWriteObject(commandBuffer);
- FinishReadObject(queryPool);
- // Host access to commandBuffer must be externally synchronized
-}
-
-void ThreadSafety::PreCallRecordCmdEndQuery(
- VkCommandBuffer commandBuffer,
- VkQueryPool queryPool,
- uint32_t query) {
- StartWriteObject(commandBuffer);
- StartReadObject(queryPool);
- // Host access to commandBuffer must be externally synchronized
-}
-
-void ThreadSafety::PostCallRecordCmdEndQuery(
- VkCommandBuffer commandBuffer,
- VkQueryPool queryPool,
- uint32_t query) {
- FinishWriteObject(commandBuffer);
- FinishReadObject(queryPool);
- // Host access to commandBuffer must be externally synchronized
-}
-
-void ThreadSafety::PreCallRecordCmdResetQueryPool(
- VkCommandBuffer commandBuffer,
- VkQueryPool queryPool,
- uint32_t firstQuery,
- uint32_t queryCount) {
- StartWriteObject(commandBuffer);
- StartReadObject(queryPool);
- // Host access to commandBuffer must be externally synchronized
-}
-
-void ThreadSafety::PostCallRecordCmdResetQueryPool(
- VkCommandBuffer commandBuffer,
- VkQueryPool queryPool,
- uint32_t firstQuery,
- uint32_t queryCount) {
- FinishWriteObject(commandBuffer);
- FinishReadObject(queryPool);
- // Host access to commandBuffer must be externally synchronized
-}
-
-void ThreadSafety::PreCallRecordCmdWriteTimestamp(
- VkCommandBuffer commandBuffer,
- VkPipelineStageFlagBits pipelineStage,
- VkQueryPool queryPool,
- uint32_t query) {
- StartWriteObject(commandBuffer);
- StartReadObject(queryPool);
- // Host access to commandBuffer must be externally synchronized
-}
-
-void ThreadSafety::PostCallRecordCmdWriteTimestamp(
- VkCommandBuffer commandBuffer,
- VkPipelineStageFlagBits pipelineStage,
- VkQueryPool queryPool,
- uint32_t query) {
- FinishWriteObject(commandBuffer);
- FinishReadObject(queryPool);
- // Host access to commandBuffer must be externally synchronized
-}
-
-void ThreadSafety::PreCallRecordCmdCopyQueryPoolResults(
- VkCommandBuffer commandBuffer,
- VkQueryPool queryPool,
- uint32_t firstQuery,
- uint32_t queryCount,
- VkBuffer dstBuffer,
- VkDeviceSize dstOffset,
- VkDeviceSize stride,
- VkQueryResultFlags flags) {
- StartWriteObject(commandBuffer);
- StartReadObject(queryPool);
- StartReadObject(dstBuffer);
- // Host access to commandBuffer must be externally synchronized
-}
-
-void ThreadSafety::PostCallRecordCmdCopyQueryPoolResults(
- VkCommandBuffer commandBuffer,
- VkQueryPool queryPool,
- uint32_t firstQuery,
- uint32_t queryCount,
- VkBuffer dstBuffer,
- VkDeviceSize dstOffset,
- VkDeviceSize stride,
- VkQueryResultFlags flags) {
- FinishWriteObject(commandBuffer);
- FinishReadObject(queryPool);
- FinishReadObject(dstBuffer);
- // Host access to commandBuffer must be externally synchronized
-}
-
-void ThreadSafety::PreCallRecordCmdPushConstants(
- VkCommandBuffer commandBuffer,
- VkPipelineLayout layout,
- VkShaderStageFlags stageFlags,
- uint32_t offset,
- uint32_t size,
- const void* pValues) {
- StartWriteObject(commandBuffer);
- StartReadObject(layout);
- // Host access to commandBuffer must be externally synchronized
-}
-
-void ThreadSafety::PostCallRecordCmdPushConstants(
- VkCommandBuffer commandBuffer,
- VkPipelineLayout layout,
- VkShaderStageFlags stageFlags,
- uint32_t offset,
- uint32_t size,
- const void* pValues) {
- FinishWriteObject(commandBuffer);
- FinishReadObject(layout);
- // Host access to commandBuffer must be externally synchronized
-}
-
-void ThreadSafety::PreCallRecordCmdBeginRenderPass(
- VkCommandBuffer commandBuffer,
- const VkRenderPassBeginInfo* pRenderPassBegin,
- VkSubpassContents contents) {
- StartWriteObject(commandBuffer);
- // Host access to commandBuffer must be externally synchronized
-}
-
-void ThreadSafety::PostCallRecordCmdBeginRenderPass(
- VkCommandBuffer commandBuffer,
- const VkRenderPassBeginInfo* pRenderPassBegin,
- VkSubpassContents contents) {
- FinishWriteObject(commandBuffer);
- // Host access to commandBuffer must be externally synchronized
-}
-
-void ThreadSafety::PreCallRecordCmdNextSubpass(
- VkCommandBuffer commandBuffer,
- VkSubpassContents contents) {
- StartWriteObject(commandBuffer);
- // Host access to commandBuffer must be externally synchronized
-}
-
-void ThreadSafety::PostCallRecordCmdNextSubpass(
- VkCommandBuffer commandBuffer,
- VkSubpassContents contents) {
- FinishWriteObject(commandBuffer);
- // Host access to commandBuffer must be externally synchronized
-}
-
-void ThreadSafety::PreCallRecordCmdEndRenderPass(
- VkCommandBuffer commandBuffer) {
- StartWriteObject(commandBuffer);
- // Host access to commandBuffer must be externally synchronized
-}
-
-void ThreadSafety::PostCallRecordCmdEndRenderPass(
- VkCommandBuffer commandBuffer) {
- FinishWriteObject(commandBuffer);
- // Host access to commandBuffer must be externally synchronized
-}
-
-void ThreadSafety::PreCallRecordCmdExecuteCommands(
- VkCommandBuffer commandBuffer,
- uint32_t commandBufferCount,
- const VkCommandBuffer* pCommandBuffers) {
- StartWriteObject(commandBuffer);
- if (pCommandBuffers) {
- for (uint32_t index = 0; index < commandBufferCount; index++) {
- StartReadObject(pCommandBuffers[index]);
- }
- }
- // Host access to commandBuffer must be externally synchronized
-}
-
-void ThreadSafety::PostCallRecordCmdExecuteCommands(
- VkCommandBuffer commandBuffer,
- uint32_t commandBufferCount,
- const VkCommandBuffer* pCommandBuffers) {
- FinishWriteObject(commandBuffer);
- if (pCommandBuffers) {
- for (uint32_t index = 0; index < commandBufferCount; index++) {
- FinishReadObject(pCommandBuffers[index]);
- }
- }
- // Host access to commandBuffer must be externally synchronized
-}
-
-void ThreadSafety::PreCallRecordBindBufferMemory2(
- VkDevice device,
- uint32_t bindInfoCount,
- const VkBindBufferMemoryInfo* pBindInfos) {
- StartReadObject(device);
-}
-
-void ThreadSafety::PostCallRecordBindBufferMemory2(
- VkDevice device,
- uint32_t bindInfoCount,
- const VkBindBufferMemoryInfo* pBindInfos,
- VkResult result) {
- FinishReadObject(device);
-}
-
-void ThreadSafety::PreCallRecordBindImageMemory2(
- VkDevice device,
- uint32_t bindInfoCount,
- const VkBindImageMemoryInfo* pBindInfos) {
- StartReadObject(device);
-}
-
-void ThreadSafety::PostCallRecordBindImageMemory2(
- VkDevice device,
- uint32_t bindInfoCount,
- const VkBindImageMemoryInfo* pBindInfos,
- VkResult result) {
- FinishReadObject(device);
-}
-
-void ThreadSafety::PreCallRecordGetDeviceGroupPeerMemoryFeatures(
- VkDevice device,
- uint32_t heapIndex,
- uint32_t localDeviceIndex,
- uint32_t remoteDeviceIndex,
- VkPeerMemoryFeatureFlags* pPeerMemoryFeatures) {
- StartReadObject(device);
-}
-
-void ThreadSafety::PostCallRecordGetDeviceGroupPeerMemoryFeatures(
- VkDevice device,
- uint32_t heapIndex,
- uint32_t localDeviceIndex,
- uint32_t remoteDeviceIndex,
- VkPeerMemoryFeatureFlags* pPeerMemoryFeatures) {
- FinishReadObject(device);
-}
-
-void ThreadSafety::PreCallRecordCmdSetDeviceMask(
- VkCommandBuffer commandBuffer,
- uint32_t deviceMask) {
- StartWriteObject(commandBuffer);
- // Host access to commandBuffer must be externally synchronized
-}
-
-void ThreadSafety::PostCallRecordCmdSetDeviceMask(
- VkCommandBuffer commandBuffer,
- uint32_t deviceMask) {
- FinishWriteObject(commandBuffer);
- // Host access to commandBuffer must be externally synchronized
-}
-
-void ThreadSafety::PreCallRecordCmdDispatchBase(
- VkCommandBuffer commandBuffer,
- uint32_t baseGroupX,
- uint32_t baseGroupY,
- uint32_t baseGroupZ,
- uint32_t groupCountX,
- uint32_t groupCountY,
- uint32_t groupCountZ) {
- StartWriteObject(commandBuffer);
- // Host access to commandBuffer must be externally synchronized
-}
-
-void ThreadSafety::PostCallRecordCmdDispatchBase(
- VkCommandBuffer commandBuffer,
- uint32_t baseGroupX,
- uint32_t baseGroupY,
- uint32_t baseGroupZ,
- uint32_t groupCountX,
- uint32_t groupCountY,
- uint32_t groupCountZ) {
- FinishWriteObject(commandBuffer);
- // Host access to commandBuffer must be externally synchronized
-}
-
-void ThreadSafety::PreCallRecordEnumeratePhysicalDeviceGroups(
- VkInstance instance,
- uint32_t* pPhysicalDeviceGroupCount,
- VkPhysicalDeviceGroupProperties* pPhysicalDeviceGroupProperties) {
- StartReadObject(instance);
-}
-
-void ThreadSafety::PostCallRecordEnumeratePhysicalDeviceGroups(
- VkInstance instance,
- uint32_t* pPhysicalDeviceGroupCount,
- VkPhysicalDeviceGroupProperties* pPhysicalDeviceGroupProperties,
- VkResult result) {
- FinishReadObject(instance);
-}
-
-void ThreadSafety::PreCallRecordGetImageMemoryRequirements2(
- VkDevice device,
- const VkImageMemoryRequirementsInfo2* pInfo,
- VkMemoryRequirements2* pMemoryRequirements) {
- StartReadObject(device);
-}
-
-void ThreadSafety::PostCallRecordGetImageMemoryRequirements2(
- VkDevice device,
- const VkImageMemoryRequirementsInfo2* pInfo,
- VkMemoryRequirements2* pMemoryRequirements) {
- FinishReadObject(device);
-}
-
-void ThreadSafety::PreCallRecordGetBufferMemoryRequirements2(
- VkDevice device,
- const VkBufferMemoryRequirementsInfo2* pInfo,
- VkMemoryRequirements2* pMemoryRequirements) {
- StartReadObject(device);
-}
-
-void ThreadSafety::PostCallRecordGetBufferMemoryRequirements2(
- VkDevice device,
- const VkBufferMemoryRequirementsInfo2* pInfo,
- VkMemoryRequirements2* pMemoryRequirements) {
- FinishReadObject(device);
-}
-
-void ThreadSafety::PreCallRecordGetImageSparseMemoryRequirements2(
- VkDevice device,
- const VkImageSparseMemoryRequirementsInfo2* pInfo,
- uint32_t* pSparseMemoryRequirementCount,
- VkSparseImageMemoryRequirements2* pSparseMemoryRequirements) {
- StartReadObject(device);
-}
-
-void ThreadSafety::PostCallRecordGetImageSparseMemoryRequirements2(
- VkDevice device,
- const VkImageSparseMemoryRequirementsInfo2* pInfo,
- uint32_t* pSparseMemoryRequirementCount,
- VkSparseImageMemoryRequirements2* pSparseMemoryRequirements) {
- FinishReadObject(device);
-}
-
-void ThreadSafety::PreCallRecordTrimCommandPool(
- VkDevice device,
- VkCommandPool commandPool,
- VkCommandPoolTrimFlags flags) {
- StartReadObject(device);
- StartWriteObject(commandPool);
- // Host access to commandPool must be externally synchronized
-}
-
-void ThreadSafety::PostCallRecordTrimCommandPool(
- VkDevice device,
- VkCommandPool commandPool,
- VkCommandPoolTrimFlags flags) {
- FinishReadObject(device);
- FinishWriteObject(commandPool);
- // Host access to commandPool must be externally synchronized
-}
-
-void ThreadSafety::PreCallRecordGetDeviceQueue2(
- VkDevice device,
- const VkDeviceQueueInfo2* pQueueInfo,
- VkQueue* pQueue) {
- StartReadObject(device);
-}
-
-void ThreadSafety::PostCallRecordGetDeviceQueue2(
- VkDevice device,
- const VkDeviceQueueInfo2* pQueueInfo,
- VkQueue* pQueue) {
- FinishReadObject(device);
-}
-
-void ThreadSafety::PreCallRecordCreateSamplerYcbcrConversion(
- VkDevice device,
- const VkSamplerYcbcrConversionCreateInfo* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkSamplerYcbcrConversion* pYcbcrConversion) {
- StartReadObject(device);
-}
-
-void ThreadSafety::PostCallRecordCreateSamplerYcbcrConversion(
- VkDevice device,
- const VkSamplerYcbcrConversionCreateInfo* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkSamplerYcbcrConversion* pYcbcrConversion,
- VkResult result) {
- FinishReadObject(device);
-}
-
-void ThreadSafety::PreCallRecordDestroySamplerYcbcrConversion(
- VkDevice device,
- VkSamplerYcbcrConversion ycbcrConversion,
- const VkAllocationCallbacks* pAllocator) {
- StartReadObject(device);
- StartWriteObject(ycbcrConversion);
- // Host access to ycbcrConversion must be externally synchronized
-}
-
-void ThreadSafety::PostCallRecordDestroySamplerYcbcrConversion(
- VkDevice device,
- VkSamplerYcbcrConversion ycbcrConversion,
- const VkAllocationCallbacks* pAllocator) {
- FinishReadObject(device);
- FinishWriteObject(ycbcrConversion);
- // Host access to ycbcrConversion must be externally synchronized
-}
-
-void ThreadSafety::PreCallRecordCreateDescriptorUpdateTemplate(
- VkDevice device,
- const VkDescriptorUpdateTemplateCreateInfo* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkDescriptorUpdateTemplate* pDescriptorUpdateTemplate) {
- StartReadObject(device);
-}
-
-void ThreadSafety::PostCallRecordCreateDescriptorUpdateTemplate(
- VkDevice device,
- const VkDescriptorUpdateTemplateCreateInfo* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkDescriptorUpdateTemplate* pDescriptorUpdateTemplate,
- VkResult result) {
- FinishReadObject(device);
-}
-
-void ThreadSafety::PreCallRecordDestroyDescriptorUpdateTemplate(
- VkDevice device,
- VkDescriptorUpdateTemplate descriptorUpdateTemplate,
- const VkAllocationCallbacks* pAllocator) {
- StartReadObject(device);
- StartWriteObject(descriptorUpdateTemplate);
- // Host access to descriptorUpdateTemplate must be externally synchronized
-}
-
-void ThreadSafety::PostCallRecordDestroyDescriptorUpdateTemplate(
- VkDevice device,
- VkDescriptorUpdateTemplate descriptorUpdateTemplate,
- const VkAllocationCallbacks* pAllocator) {
- FinishReadObject(device);
- FinishWriteObject(descriptorUpdateTemplate);
- // Host access to descriptorUpdateTemplate must be externally synchronized
-}
-
-void ThreadSafety::PreCallRecordUpdateDescriptorSetWithTemplate(
- VkDevice device,
- VkDescriptorSet descriptorSet,
- VkDescriptorUpdateTemplate descriptorUpdateTemplate,
- const void* pData) {
- StartReadObject(device);
- StartWriteObject(descriptorSet);
- StartReadObject(descriptorUpdateTemplate);
- // Host access to descriptorSet must be externally synchronized
-}
-
-void ThreadSafety::PostCallRecordUpdateDescriptorSetWithTemplate(
- VkDevice device,
- VkDescriptorSet descriptorSet,
- VkDescriptorUpdateTemplate descriptorUpdateTemplate,
- const void* pData) {
- FinishReadObject(device);
- FinishWriteObject(descriptorSet);
- FinishReadObject(descriptorUpdateTemplate);
- // Host access to descriptorSet must be externally synchronized
-}
-
-void ThreadSafety::PreCallRecordGetDescriptorSetLayoutSupport(
- VkDevice device,
- const VkDescriptorSetLayoutCreateInfo* pCreateInfo,
- VkDescriptorSetLayoutSupport* pSupport) {
- StartReadObject(device);
-}
-
-void ThreadSafety::PostCallRecordGetDescriptorSetLayoutSupport(
- VkDevice device,
- const VkDescriptorSetLayoutCreateInfo* pCreateInfo,
- VkDescriptorSetLayoutSupport* pSupport) {
- FinishReadObject(device);
-}
-
-void ThreadSafety::PreCallRecordDestroySurfaceKHR(
- VkInstance instance,
- VkSurfaceKHR surface,
- const VkAllocationCallbacks* pAllocator) {
- StartReadObject(instance);
- StartWriteObject(surface);
- // Host access to surface must be externally synchronized
-}
-
-void ThreadSafety::PostCallRecordDestroySurfaceKHR(
- VkInstance instance,
- VkSurfaceKHR surface,
- const VkAllocationCallbacks* pAllocator) {
- FinishReadObject(instance);
- FinishWriteObject(surface);
- // Host access to surface must be externally synchronized
-}
-
-void ThreadSafety::PreCallRecordGetPhysicalDeviceSurfaceSupportKHR(
- VkPhysicalDevice physicalDevice,
- uint32_t queueFamilyIndex,
- VkSurfaceKHR surface,
- VkBool32* pSupported) {
- StartReadObject(surface);
-}
-
-void ThreadSafety::PostCallRecordGetPhysicalDeviceSurfaceSupportKHR(
- VkPhysicalDevice physicalDevice,
- uint32_t queueFamilyIndex,
- VkSurfaceKHR surface,
- VkBool32* pSupported,
- VkResult result) {
- FinishReadObject(surface);
-}
-
-void ThreadSafety::PreCallRecordGetPhysicalDeviceSurfaceCapabilitiesKHR(
- VkPhysicalDevice physicalDevice,
- VkSurfaceKHR surface,
- VkSurfaceCapabilitiesKHR* pSurfaceCapabilities) {
- StartReadObject(surface);
-}
-
-void ThreadSafety::PostCallRecordGetPhysicalDeviceSurfaceCapabilitiesKHR(
- VkPhysicalDevice physicalDevice,
- VkSurfaceKHR surface,
- VkSurfaceCapabilitiesKHR* pSurfaceCapabilities,
- VkResult result) {
- FinishReadObject(surface);
-}
-
-void ThreadSafety::PreCallRecordGetPhysicalDeviceSurfaceFormatsKHR(
- VkPhysicalDevice physicalDevice,
- VkSurfaceKHR surface,
- uint32_t* pSurfaceFormatCount,
- VkSurfaceFormatKHR* pSurfaceFormats) {
- StartReadObject(surface);
-}
-
-void ThreadSafety::PostCallRecordGetPhysicalDeviceSurfaceFormatsKHR(
- VkPhysicalDevice physicalDevice,
- VkSurfaceKHR surface,
- uint32_t* pSurfaceFormatCount,
- VkSurfaceFormatKHR* pSurfaceFormats,
- VkResult result) {
- FinishReadObject(surface);
-}
-
-void ThreadSafety::PreCallRecordGetPhysicalDeviceSurfacePresentModesKHR(
- VkPhysicalDevice physicalDevice,
- VkSurfaceKHR surface,
- uint32_t* pPresentModeCount,
- VkPresentModeKHR* pPresentModes) {
- StartReadObject(surface);
-}
-
-void ThreadSafety::PostCallRecordGetPhysicalDeviceSurfacePresentModesKHR(
- VkPhysicalDevice physicalDevice,
- VkSurfaceKHR surface,
- uint32_t* pPresentModeCount,
- VkPresentModeKHR* pPresentModes,
- VkResult result) {
- FinishReadObject(surface);
-}
-
-void ThreadSafety::PreCallRecordCreateSwapchainKHR(
- VkDevice device,
- const VkSwapchainCreateInfoKHR* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkSwapchainKHR* pSwapchain) {
- StartReadObject(device);
- StartWriteObject(pCreateInfo->surface);
- StartWriteObject(pCreateInfo->oldSwapchain);
- // Host access to pCreateInfo.surface,pCreateInfo.oldSwapchain must be externally synchronized
-}
-
-void ThreadSafety::PostCallRecordCreateSwapchainKHR(
- VkDevice device,
- const VkSwapchainCreateInfoKHR* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkSwapchainKHR* pSwapchain,
- VkResult result) {
- FinishReadObject(device);
- FinishWriteObject(pCreateInfo->surface);
- FinishWriteObject(pCreateInfo->oldSwapchain);
- // Host access to pCreateInfo.surface,pCreateInfo.oldSwapchain must be externally synchronized
-}
-
-void ThreadSafety::PreCallRecordDestroySwapchainKHR(
- VkDevice device,
- VkSwapchainKHR swapchain,
- const VkAllocationCallbacks* pAllocator) {
- StartReadObject(device);
- StartWriteObject(swapchain);
- // Host access to swapchain must be externally synchronized
-}
-
-void ThreadSafety::PostCallRecordDestroySwapchainKHR(
- VkDevice device,
- VkSwapchainKHR swapchain,
- const VkAllocationCallbacks* pAllocator) {
- FinishReadObject(device);
- FinishWriteObject(swapchain);
- // Host access to swapchain must be externally synchronized
-}
-
-void ThreadSafety::PreCallRecordAcquireNextImageKHR(
- VkDevice device,
- VkSwapchainKHR swapchain,
- uint64_t timeout,
- VkSemaphore semaphore,
- VkFence fence,
- uint32_t* pImageIndex) {
- StartReadObject(device);
- StartWriteObject(swapchain);
- StartWriteObject(semaphore);
- StartWriteObject(fence);
- // Host access to swapchain must be externally synchronized
- // Host access to semaphore must be externally synchronized
- // Host access to fence must be externally synchronized
-}
-
-void ThreadSafety::PostCallRecordAcquireNextImageKHR(
- VkDevice device,
- VkSwapchainKHR swapchain,
- uint64_t timeout,
- VkSemaphore semaphore,
- VkFence fence,
- uint32_t* pImageIndex,
- VkResult result) {
- FinishReadObject(device);
- FinishWriteObject(swapchain);
- FinishWriteObject(semaphore);
- FinishWriteObject(fence);
- // Host access to swapchain must be externally synchronized
- // Host access to semaphore must be externally synchronized
- // Host access to fence must be externally synchronized
-}
-
-void ThreadSafety::PreCallRecordGetDeviceGroupPresentCapabilitiesKHR(
- VkDevice device,
- VkDeviceGroupPresentCapabilitiesKHR* pDeviceGroupPresentCapabilities) {
- StartReadObject(device);
-}
-
-void ThreadSafety::PostCallRecordGetDeviceGroupPresentCapabilitiesKHR(
- VkDevice device,
- VkDeviceGroupPresentCapabilitiesKHR* pDeviceGroupPresentCapabilities,
- VkResult result) {
- FinishReadObject(device);
-}
-
-void ThreadSafety::PreCallRecordGetDeviceGroupSurfacePresentModesKHR(
- VkDevice device,
- VkSurfaceKHR surface,
- VkDeviceGroupPresentModeFlagsKHR* pModes) {
- StartReadObject(device);
- StartWriteObject(surface);
- // Host access to surface must be externally synchronized
-}
-
-void ThreadSafety::PostCallRecordGetDeviceGroupSurfacePresentModesKHR(
- VkDevice device,
- VkSurfaceKHR surface,
- VkDeviceGroupPresentModeFlagsKHR* pModes,
- VkResult result) {
- FinishReadObject(device);
- FinishWriteObject(surface);
- // Host access to surface must be externally synchronized
-}
-
-void ThreadSafety::PreCallRecordGetPhysicalDevicePresentRectanglesKHR(
- VkPhysicalDevice physicalDevice,
- VkSurfaceKHR surface,
- uint32_t* pRectCount,
- VkRect2D* pRects) {
- StartWriteObject(surface);
- // Host access to surface must be externally synchronized
-}
-
-void ThreadSafety::PostCallRecordGetPhysicalDevicePresentRectanglesKHR(
- VkPhysicalDevice physicalDevice,
- VkSurfaceKHR surface,
- uint32_t* pRectCount,
- VkRect2D* pRects,
- VkResult result) {
- FinishWriteObject(surface);
- // Host access to surface must be externally synchronized
-}
-
-void ThreadSafety::PreCallRecordAcquireNextImage2KHR(
- VkDevice device,
- const VkAcquireNextImageInfoKHR* pAcquireInfo,
- uint32_t* pImageIndex) {
- StartReadObject(device);
-}
-
-void ThreadSafety::PostCallRecordAcquireNextImage2KHR(
- VkDevice device,
- const VkAcquireNextImageInfoKHR* pAcquireInfo,
- uint32_t* pImageIndex,
- VkResult result) {
- FinishReadObject(device);
-}
-
-void ThreadSafety::PreCallRecordGetDisplayPlaneSupportedDisplaysKHR(
- VkPhysicalDevice physicalDevice,
- uint32_t planeIndex,
- uint32_t* pDisplayCount,
- VkDisplayKHR* pDisplays) {
- if (pDisplays) {
- for (uint32_t index = 0; index < *pDisplayCount; index++) {
- StartReadObject(pDisplays[index]);
- }
- }
-}
-
-void ThreadSafety::PostCallRecordGetDisplayPlaneSupportedDisplaysKHR(
- VkPhysicalDevice physicalDevice,
- uint32_t planeIndex,
- uint32_t* pDisplayCount,
- VkDisplayKHR* pDisplays,
- VkResult result) {
- if (pDisplays) {
- for (uint32_t index = 0; index < *pDisplayCount; index++) {
- FinishReadObject(pDisplays[index]);
- }
- }
-}
-
-void ThreadSafety::PreCallRecordGetDisplayModePropertiesKHR(
- VkPhysicalDevice physicalDevice,
- VkDisplayKHR display,
- uint32_t* pPropertyCount,
- VkDisplayModePropertiesKHR* pProperties) {
- StartReadObject(display);
-}
-
-void ThreadSafety::PostCallRecordGetDisplayModePropertiesKHR(
- VkPhysicalDevice physicalDevice,
- VkDisplayKHR display,
- uint32_t* pPropertyCount,
- VkDisplayModePropertiesKHR* pProperties,
- VkResult result) {
- FinishReadObject(display);
-}
-
-void ThreadSafety::PreCallRecordCreateDisplayModeKHR(
- VkPhysicalDevice physicalDevice,
- VkDisplayKHR display,
- const VkDisplayModeCreateInfoKHR* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkDisplayModeKHR* pMode) {
- StartWriteObject(display);
- // Host access to display must be externally synchronized
-}
-
-void ThreadSafety::PostCallRecordCreateDisplayModeKHR(
- VkPhysicalDevice physicalDevice,
- VkDisplayKHR display,
- const VkDisplayModeCreateInfoKHR* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkDisplayModeKHR* pMode,
- VkResult result) {
- FinishWriteObject(display);
- // Host access to display must be externally synchronized
-}
-
-void ThreadSafety::PreCallRecordGetDisplayPlaneCapabilitiesKHR(
- VkPhysicalDevice physicalDevice,
- VkDisplayModeKHR mode,
- uint32_t planeIndex,
- VkDisplayPlaneCapabilitiesKHR* pCapabilities) {
- StartWriteObject(mode);
- // Host access to mode must be externally synchronized
-}
-
-void ThreadSafety::PostCallRecordGetDisplayPlaneCapabilitiesKHR(
- VkPhysicalDevice physicalDevice,
- VkDisplayModeKHR mode,
- uint32_t planeIndex,
- VkDisplayPlaneCapabilitiesKHR* pCapabilities,
- VkResult result) {
- FinishWriteObject(mode);
- // Host access to mode must be externally synchronized
-}
-
-void ThreadSafety::PreCallRecordCreateDisplayPlaneSurfaceKHR(
- VkInstance instance,
- const VkDisplaySurfaceCreateInfoKHR* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkSurfaceKHR* pSurface) {
- StartReadObject(instance);
-}
-
-void ThreadSafety::PostCallRecordCreateDisplayPlaneSurfaceKHR(
- VkInstance instance,
- const VkDisplaySurfaceCreateInfoKHR* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkSurfaceKHR* pSurface,
- VkResult result) {
- FinishReadObject(instance);
-}
-
-void ThreadSafety::PreCallRecordCreateSharedSwapchainsKHR(
- VkDevice device,
- uint32_t swapchainCount,
- const VkSwapchainCreateInfoKHR* pCreateInfos,
- const VkAllocationCallbacks* pAllocator,
- VkSwapchainKHR* pSwapchains) {
- StartReadObject(device);
- if (pCreateInfos) {
- for (uint32_t index=0; index < swapchainCount; index++) {
- StartWriteObject(pCreateInfos[index].surface);
- StartWriteObject(pCreateInfos[index].oldSwapchain);
- }
- }
- if (pSwapchains) {
- for (uint32_t index = 0; index < swapchainCount; index++) {
- StartReadObject(pSwapchains[index]);
- }
- }
- // Host access to pCreateInfos[].surface,pCreateInfos[].oldSwapchain must be externally synchronized
-}
-
-void ThreadSafety::PostCallRecordCreateSharedSwapchainsKHR(
- VkDevice device,
- uint32_t swapchainCount,
- const VkSwapchainCreateInfoKHR* pCreateInfos,
- const VkAllocationCallbacks* pAllocator,
- VkSwapchainKHR* pSwapchains,
- VkResult result) {
- FinishReadObject(device);
- if (pCreateInfos) {
- for (uint32_t index=0; index < swapchainCount; index++) {
- FinishWriteObject(pCreateInfos[index].surface);
- FinishWriteObject(pCreateInfos[index].oldSwapchain);
- }
- }
- if (pSwapchains) {
- for (uint32_t index = 0; index < swapchainCount; index++) {
- FinishReadObject(pSwapchains[index]);
- }
- }
- // Host access to pCreateInfos[].surface,pCreateInfos[].oldSwapchain must be externally synchronized
-}
-
-#ifdef VK_USE_PLATFORM_XLIB_KHR
-
-void ThreadSafety::PreCallRecordCreateXlibSurfaceKHR(
- VkInstance instance,
- const VkXlibSurfaceCreateInfoKHR* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkSurfaceKHR* pSurface) {
- StartReadObject(instance);
-}
-
-void ThreadSafety::PostCallRecordCreateXlibSurfaceKHR(
- VkInstance instance,
- const VkXlibSurfaceCreateInfoKHR* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkSurfaceKHR* pSurface,
- VkResult result) {
- FinishReadObject(instance);
-}
-#endif // VK_USE_PLATFORM_XLIB_KHR
-
-#ifdef VK_USE_PLATFORM_XCB_KHR
-
-void ThreadSafety::PreCallRecordCreateXcbSurfaceKHR(
- VkInstance instance,
- const VkXcbSurfaceCreateInfoKHR* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkSurfaceKHR* pSurface) {
- StartReadObject(instance);
-}
-
-void ThreadSafety::PostCallRecordCreateXcbSurfaceKHR(
- VkInstance instance,
- const VkXcbSurfaceCreateInfoKHR* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkSurfaceKHR* pSurface,
- VkResult result) {
- FinishReadObject(instance);
-}
-#endif // VK_USE_PLATFORM_XCB_KHR
-
-#ifdef VK_USE_PLATFORM_WAYLAND_KHR
-
-void ThreadSafety::PreCallRecordCreateWaylandSurfaceKHR(
- VkInstance instance,
- const VkWaylandSurfaceCreateInfoKHR* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkSurfaceKHR* pSurface) {
- StartReadObject(instance);
-}
-
-void ThreadSafety::PostCallRecordCreateWaylandSurfaceKHR(
- VkInstance instance,
- const VkWaylandSurfaceCreateInfoKHR* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkSurfaceKHR* pSurface,
- VkResult result) {
- FinishReadObject(instance);
-}
-#endif // VK_USE_PLATFORM_WAYLAND_KHR
-
-#ifdef VK_USE_PLATFORM_ANDROID_KHR
-
-void ThreadSafety::PreCallRecordCreateAndroidSurfaceKHR(
- VkInstance instance,
- const VkAndroidSurfaceCreateInfoKHR* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkSurfaceKHR* pSurface) {
- StartReadObject(instance);
-}
-
-void ThreadSafety::PostCallRecordCreateAndroidSurfaceKHR(
- VkInstance instance,
- const VkAndroidSurfaceCreateInfoKHR* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkSurfaceKHR* pSurface,
- VkResult result) {
- FinishReadObject(instance);
-}
-#endif // VK_USE_PLATFORM_ANDROID_KHR
-
-#ifdef VK_USE_PLATFORM_WIN32_KHR
-
-void ThreadSafety::PreCallRecordCreateWin32SurfaceKHR(
- VkInstance instance,
- const VkWin32SurfaceCreateInfoKHR* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkSurfaceKHR* pSurface) {
- StartReadObject(instance);
-}
-
-void ThreadSafety::PostCallRecordCreateWin32SurfaceKHR(
- VkInstance instance,
- const VkWin32SurfaceCreateInfoKHR* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkSurfaceKHR* pSurface,
- VkResult result) {
- FinishReadObject(instance);
-}
-#endif // VK_USE_PLATFORM_WIN32_KHR
-
-void ThreadSafety::PreCallRecordGetDeviceGroupPeerMemoryFeaturesKHR(
- VkDevice device,
- uint32_t heapIndex,
- uint32_t localDeviceIndex,
- uint32_t remoteDeviceIndex,
- VkPeerMemoryFeatureFlags* pPeerMemoryFeatures) {
- StartReadObject(device);
-}
-
-void ThreadSafety::PostCallRecordGetDeviceGroupPeerMemoryFeaturesKHR(
- VkDevice device,
- uint32_t heapIndex,
- uint32_t localDeviceIndex,
- uint32_t remoteDeviceIndex,
- VkPeerMemoryFeatureFlags* pPeerMemoryFeatures) {
- FinishReadObject(device);
-}
-
-void ThreadSafety::PreCallRecordCmdSetDeviceMaskKHR(
- VkCommandBuffer commandBuffer,
- uint32_t deviceMask) {
- StartWriteObject(commandBuffer);
- // Host access to commandBuffer must be externally synchronized
-}
-
-void ThreadSafety::PostCallRecordCmdSetDeviceMaskKHR(
- VkCommandBuffer commandBuffer,
- uint32_t deviceMask) {
- FinishWriteObject(commandBuffer);
- // Host access to commandBuffer must be externally synchronized
-}
-
-void ThreadSafety::PreCallRecordCmdDispatchBaseKHR(
- VkCommandBuffer commandBuffer,
- uint32_t baseGroupX,
- uint32_t baseGroupY,
- uint32_t baseGroupZ,
- uint32_t groupCountX,
- uint32_t groupCountY,
- uint32_t groupCountZ) {
- StartWriteObject(commandBuffer);
- // Host access to commandBuffer must be externally synchronized
-}
-
-void ThreadSafety::PostCallRecordCmdDispatchBaseKHR(
- VkCommandBuffer commandBuffer,
- uint32_t baseGroupX,
- uint32_t baseGroupY,
- uint32_t baseGroupZ,
- uint32_t groupCountX,
- uint32_t groupCountY,
- uint32_t groupCountZ) {
- FinishWriteObject(commandBuffer);
- // Host access to commandBuffer must be externally synchronized
-}
-
-void ThreadSafety::PreCallRecordTrimCommandPoolKHR(
- VkDevice device,
- VkCommandPool commandPool,
- VkCommandPoolTrimFlags flags) {
- StartReadObject(device);
- StartWriteObject(commandPool);
- // Host access to commandPool must be externally synchronized
-}
-
-void ThreadSafety::PostCallRecordTrimCommandPoolKHR(
- VkDevice device,
- VkCommandPool commandPool,
- VkCommandPoolTrimFlags flags) {
- FinishReadObject(device);
- FinishWriteObject(commandPool);
- // Host access to commandPool must be externally synchronized
-}
-
-void ThreadSafety::PreCallRecordEnumeratePhysicalDeviceGroupsKHR(
- VkInstance instance,
- uint32_t* pPhysicalDeviceGroupCount,
- VkPhysicalDeviceGroupProperties* pPhysicalDeviceGroupProperties) {
- StartReadObject(instance);
-}
-
-void ThreadSafety::PostCallRecordEnumeratePhysicalDeviceGroupsKHR(
- VkInstance instance,
- uint32_t* pPhysicalDeviceGroupCount,
- VkPhysicalDeviceGroupProperties* pPhysicalDeviceGroupProperties,
- VkResult result) {
- FinishReadObject(instance);
-}
-
-#ifdef VK_USE_PLATFORM_WIN32_KHR
-
-void ThreadSafety::PreCallRecordGetMemoryWin32HandleKHR(
- VkDevice device,
- const VkMemoryGetWin32HandleInfoKHR* pGetWin32HandleInfo,
- HANDLE* pHandle) {
- StartReadObject(device);
-}
-
-void ThreadSafety::PostCallRecordGetMemoryWin32HandleKHR(
- VkDevice device,
- const VkMemoryGetWin32HandleInfoKHR* pGetWin32HandleInfo,
- HANDLE* pHandle,
- VkResult result) {
- FinishReadObject(device);
-}
-
-void ThreadSafety::PreCallRecordGetMemoryWin32HandlePropertiesKHR(
- VkDevice device,
- VkExternalMemoryHandleTypeFlagBits handleType,
- HANDLE handle,
- VkMemoryWin32HandlePropertiesKHR* pMemoryWin32HandleProperties) {
- StartReadObject(device);
-}
-
-void ThreadSafety::PostCallRecordGetMemoryWin32HandlePropertiesKHR(
- VkDevice device,
- VkExternalMemoryHandleTypeFlagBits handleType,
- HANDLE handle,
- VkMemoryWin32HandlePropertiesKHR* pMemoryWin32HandleProperties,
- VkResult result) {
- FinishReadObject(device);
-}
-#endif // VK_USE_PLATFORM_WIN32_KHR
-
-void ThreadSafety::PreCallRecordGetMemoryFdKHR(
- VkDevice device,
- const VkMemoryGetFdInfoKHR* pGetFdInfo,
- int* pFd) {
- StartReadObject(device);
-}
-
-void ThreadSafety::PostCallRecordGetMemoryFdKHR(
- VkDevice device,
- const VkMemoryGetFdInfoKHR* pGetFdInfo,
- int* pFd,
- VkResult result) {
- FinishReadObject(device);
-}
-
-void ThreadSafety::PreCallRecordGetMemoryFdPropertiesKHR(
- VkDevice device,
- VkExternalMemoryHandleTypeFlagBits handleType,
- int fd,
- VkMemoryFdPropertiesKHR* pMemoryFdProperties) {
- StartReadObject(device);
-}
-
-void ThreadSafety::PostCallRecordGetMemoryFdPropertiesKHR(
- VkDevice device,
- VkExternalMemoryHandleTypeFlagBits handleType,
- int fd,
- VkMemoryFdPropertiesKHR* pMemoryFdProperties,
- VkResult result) {
- FinishReadObject(device);
-}
-
-#ifdef VK_USE_PLATFORM_WIN32_KHR
-#endif // VK_USE_PLATFORM_WIN32_KHR
-
-#ifdef VK_USE_PLATFORM_WIN32_KHR
-
-void ThreadSafety::PreCallRecordImportSemaphoreWin32HandleKHR(
- VkDevice device,
- const VkImportSemaphoreWin32HandleInfoKHR* pImportSemaphoreWin32HandleInfo) {
- StartReadObject(device);
-}
-
-void ThreadSafety::PostCallRecordImportSemaphoreWin32HandleKHR(
- VkDevice device,
- const VkImportSemaphoreWin32HandleInfoKHR* pImportSemaphoreWin32HandleInfo,
- VkResult result) {
- FinishReadObject(device);
-}
-
-void ThreadSafety::PreCallRecordGetSemaphoreWin32HandleKHR(
- VkDevice device,
- const VkSemaphoreGetWin32HandleInfoKHR* pGetWin32HandleInfo,
- HANDLE* pHandle) {
- StartReadObject(device);
-}
-
-void ThreadSafety::PostCallRecordGetSemaphoreWin32HandleKHR(
- VkDevice device,
- const VkSemaphoreGetWin32HandleInfoKHR* pGetWin32HandleInfo,
- HANDLE* pHandle,
- VkResult result) {
- FinishReadObject(device);
-}
-#endif // VK_USE_PLATFORM_WIN32_KHR
-
-void ThreadSafety::PreCallRecordImportSemaphoreFdKHR(
- VkDevice device,
- const VkImportSemaphoreFdInfoKHR* pImportSemaphoreFdInfo) {
- StartReadObject(device);
-}
-
-void ThreadSafety::PostCallRecordImportSemaphoreFdKHR(
- VkDevice device,
- const VkImportSemaphoreFdInfoKHR* pImportSemaphoreFdInfo,
- VkResult result) {
- FinishReadObject(device);
-}
-
-void ThreadSafety::PreCallRecordGetSemaphoreFdKHR(
- VkDevice device,
- const VkSemaphoreGetFdInfoKHR* pGetFdInfo,
- int* pFd) {
- StartReadObject(device);
-}
-
-void ThreadSafety::PostCallRecordGetSemaphoreFdKHR(
- VkDevice device,
- const VkSemaphoreGetFdInfoKHR* pGetFdInfo,
- int* pFd,
- VkResult result) {
- FinishReadObject(device);
-}
-
-void ThreadSafety::PreCallRecordCmdPushDescriptorSetKHR(
- VkCommandBuffer commandBuffer,
- VkPipelineBindPoint pipelineBindPoint,
- VkPipelineLayout layout,
- uint32_t set,
- uint32_t descriptorWriteCount,
- const VkWriteDescriptorSet* pDescriptorWrites) {
- StartWriteObject(commandBuffer);
- StartReadObject(layout);
- // Host access to commandBuffer must be externally synchronized
-}
-
-void ThreadSafety::PostCallRecordCmdPushDescriptorSetKHR(
- VkCommandBuffer commandBuffer,
- VkPipelineBindPoint pipelineBindPoint,
- VkPipelineLayout layout,
- uint32_t set,
- uint32_t descriptorWriteCount,
- const VkWriteDescriptorSet* pDescriptorWrites) {
- FinishWriteObject(commandBuffer);
- FinishReadObject(layout);
- // Host access to commandBuffer must be externally synchronized
-}
-
-void ThreadSafety::PreCallRecordCmdPushDescriptorSetWithTemplateKHR(
- VkCommandBuffer commandBuffer,
- VkDescriptorUpdateTemplate descriptorUpdateTemplate,
- VkPipelineLayout layout,
- uint32_t set,
- const void* pData) {
- StartWriteObject(commandBuffer);
- StartReadObject(descriptorUpdateTemplate);
- StartReadObject(layout);
- // Host access to commandBuffer must be externally synchronized
-}
-
-void ThreadSafety::PostCallRecordCmdPushDescriptorSetWithTemplateKHR(
- VkCommandBuffer commandBuffer,
- VkDescriptorUpdateTemplate descriptorUpdateTemplate,
- VkPipelineLayout layout,
- uint32_t set,
- const void* pData) {
- FinishWriteObject(commandBuffer);
- FinishReadObject(descriptorUpdateTemplate);
- FinishReadObject(layout);
- // Host access to commandBuffer must be externally synchronized
-}
-
-void ThreadSafety::PreCallRecordCreateDescriptorUpdateTemplateKHR(
- VkDevice device,
- const VkDescriptorUpdateTemplateCreateInfo* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkDescriptorUpdateTemplate* pDescriptorUpdateTemplate) {
- StartReadObject(device);
-}
-
-void ThreadSafety::PostCallRecordCreateDescriptorUpdateTemplateKHR(
- VkDevice device,
- const VkDescriptorUpdateTemplateCreateInfo* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkDescriptorUpdateTemplate* pDescriptorUpdateTemplate,
- VkResult result) {
- FinishReadObject(device);
-}
-
-void ThreadSafety::PreCallRecordDestroyDescriptorUpdateTemplateKHR(
- VkDevice device,
- VkDescriptorUpdateTemplate descriptorUpdateTemplate,
- const VkAllocationCallbacks* pAllocator) {
- StartReadObject(device);
- StartWriteObject(descriptorUpdateTemplate);
- // Host access to descriptorUpdateTemplate must be externally synchronized
-}
-
-void ThreadSafety::PostCallRecordDestroyDescriptorUpdateTemplateKHR(
- VkDevice device,
- VkDescriptorUpdateTemplate descriptorUpdateTemplate,
- const VkAllocationCallbacks* pAllocator) {
- FinishReadObject(device);
- FinishWriteObject(descriptorUpdateTemplate);
- // Host access to descriptorUpdateTemplate must be externally synchronized
-}
-
-void ThreadSafety::PreCallRecordUpdateDescriptorSetWithTemplateKHR(
- VkDevice device,
- VkDescriptorSet descriptorSet,
- VkDescriptorUpdateTemplate descriptorUpdateTemplate,
- const void* pData) {
- StartReadObject(device);
- StartWriteObject(descriptorSet);
- StartReadObject(descriptorUpdateTemplate);
- // Host access to descriptorSet must be externally synchronized
-}
-
-void ThreadSafety::PostCallRecordUpdateDescriptorSetWithTemplateKHR(
- VkDevice device,
- VkDescriptorSet descriptorSet,
- VkDescriptorUpdateTemplate descriptorUpdateTemplate,
- const void* pData) {
- FinishReadObject(device);
- FinishWriteObject(descriptorSet);
- FinishReadObject(descriptorUpdateTemplate);
- // Host access to descriptorSet must be externally synchronized
-}
-
-void ThreadSafety::PreCallRecordCreateRenderPass2KHR(
- VkDevice device,
- const VkRenderPassCreateInfo2KHR* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkRenderPass* pRenderPass) {
- StartReadObject(device);
-}
-
-void ThreadSafety::PostCallRecordCreateRenderPass2KHR(
- VkDevice device,
- const VkRenderPassCreateInfo2KHR* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkRenderPass* pRenderPass,
- VkResult result) {
- FinishReadObject(device);
-}
-
-void ThreadSafety::PreCallRecordCmdBeginRenderPass2KHR(
- VkCommandBuffer commandBuffer,
- const VkRenderPassBeginInfo* pRenderPassBegin,
- const VkSubpassBeginInfoKHR* pSubpassBeginInfo) {
- StartWriteObject(commandBuffer);
- // Host access to commandBuffer must be externally synchronized
-}
-
-void ThreadSafety::PostCallRecordCmdBeginRenderPass2KHR(
- VkCommandBuffer commandBuffer,
- const VkRenderPassBeginInfo* pRenderPassBegin,
- const VkSubpassBeginInfoKHR* pSubpassBeginInfo) {
- FinishWriteObject(commandBuffer);
- // Host access to commandBuffer must be externally synchronized
-}
-
-void ThreadSafety::PreCallRecordCmdNextSubpass2KHR(
- VkCommandBuffer commandBuffer,
- const VkSubpassBeginInfoKHR* pSubpassBeginInfo,
- const VkSubpassEndInfoKHR* pSubpassEndInfo) {
- StartWriteObject(commandBuffer);
- // Host access to commandBuffer must be externally synchronized
-}
-
-void ThreadSafety::PostCallRecordCmdNextSubpass2KHR(
- VkCommandBuffer commandBuffer,
- const VkSubpassBeginInfoKHR* pSubpassBeginInfo,
- const VkSubpassEndInfoKHR* pSubpassEndInfo) {
- FinishWriteObject(commandBuffer);
- // Host access to commandBuffer must be externally synchronized
-}
-
-void ThreadSafety::PreCallRecordCmdEndRenderPass2KHR(
- VkCommandBuffer commandBuffer,
- const VkSubpassEndInfoKHR* pSubpassEndInfo) {
- StartWriteObject(commandBuffer);
- // Host access to commandBuffer must be externally synchronized
-}
-
-void ThreadSafety::PostCallRecordCmdEndRenderPass2KHR(
- VkCommandBuffer commandBuffer,
- const VkSubpassEndInfoKHR* pSubpassEndInfo) {
- FinishWriteObject(commandBuffer);
- // Host access to commandBuffer must be externally synchronized
-}
-
-void ThreadSafety::PreCallRecordGetSwapchainStatusKHR(
- VkDevice device,
- VkSwapchainKHR swapchain) {
- StartReadObject(device);
- StartWriteObject(swapchain);
- // Host access to swapchain must be externally synchronized
-}
-
-void ThreadSafety::PostCallRecordGetSwapchainStatusKHR(
- VkDevice device,
- VkSwapchainKHR swapchain,
- VkResult result) {
- FinishReadObject(device);
- FinishWriteObject(swapchain);
- // Host access to swapchain must be externally synchronized
-}
-
-#ifdef VK_USE_PLATFORM_WIN32_KHR
-
-void ThreadSafety::PreCallRecordImportFenceWin32HandleKHR(
- VkDevice device,
- const VkImportFenceWin32HandleInfoKHR* pImportFenceWin32HandleInfo) {
- StartReadObject(device);
-}
-
-void ThreadSafety::PostCallRecordImportFenceWin32HandleKHR(
- VkDevice device,
- const VkImportFenceWin32HandleInfoKHR* pImportFenceWin32HandleInfo,
- VkResult result) {
- FinishReadObject(device);
-}
-
-void ThreadSafety::PreCallRecordGetFenceWin32HandleKHR(
- VkDevice device,
- const VkFenceGetWin32HandleInfoKHR* pGetWin32HandleInfo,
- HANDLE* pHandle) {
- StartReadObject(device);
-}
-
-void ThreadSafety::PostCallRecordGetFenceWin32HandleKHR(
- VkDevice device,
- const VkFenceGetWin32HandleInfoKHR* pGetWin32HandleInfo,
- HANDLE* pHandle,
- VkResult result) {
- FinishReadObject(device);
-}
-#endif // VK_USE_PLATFORM_WIN32_KHR
-
-void ThreadSafety::PreCallRecordImportFenceFdKHR(
- VkDevice device,
- const VkImportFenceFdInfoKHR* pImportFenceFdInfo) {
- StartReadObject(device);
-}
-
-void ThreadSafety::PostCallRecordImportFenceFdKHR(
- VkDevice device,
- const VkImportFenceFdInfoKHR* pImportFenceFdInfo,
- VkResult result) {
- FinishReadObject(device);
-}
-
-void ThreadSafety::PreCallRecordGetFenceFdKHR(
- VkDevice device,
- const VkFenceGetFdInfoKHR* pGetFdInfo,
- int* pFd) {
- StartReadObject(device);
-}
-
-void ThreadSafety::PostCallRecordGetFenceFdKHR(
- VkDevice device,
- const VkFenceGetFdInfoKHR* pGetFdInfo,
- int* pFd,
- VkResult result) {
- FinishReadObject(device);
-}
-
-void ThreadSafety::PreCallRecordGetDisplayModeProperties2KHR(
- VkPhysicalDevice physicalDevice,
- VkDisplayKHR display,
- uint32_t* pPropertyCount,
- VkDisplayModeProperties2KHR* pProperties) {
- StartReadObject(display);
-}
-
-void ThreadSafety::PostCallRecordGetDisplayModeProperties2KHR(
- VkPhysicalDevice physicalDevice,
- VkDisplayKHR display,
- uint32_t* pPropertyCount,
- VkDisplayModeProperties2KHR* pProperties,
- VkResult result) {
- FinishReadObject(display);
-}
-
-void ThreadSafety::PreCallRecordGetImageMemoryRequirements2KHR(
- VkDevice device,
- const VkImageMemoryRequirementsInfo2* pInfo,
- VkMemoryRequirements2* pMemoryRequirements) {
- StartReadObject(device);
-}
-
-void ThreadSafety::PostCallRecordGetImageMemoryRequirements2KHR(
- VkDevice device,
- const VkImageMemoryRequirementsInfo2* pInfo,
- VkMemoryRequirements2* pMemoryRequirements) {
- FinishReadObject(device);
-}
-
-void ThreadSafety::PreCallRecordGetBufferMemoryRequirements2KHR(
- VkDevice device,
- const VkBufferMemoryRequirementsInfo2* pInfo,
- VkMemoryRequirements2* pMemoryRequirements) {
- StartReadObject(device);
-}
-
-void ThreadSafety::PostCallRecordGetBufferMemoryRequirements2KHR(
- VkDevice device,
- const VkBufferMemoryRequirementsInfo2* pInfo,
- VkMemoryRequirements2* pMemoryRequirements) {
- FinishReadObject(device);
-}
-
-void ThreadSafety::PreCallRecordGetImageSparseMemoryRequirements2KHR(
- VkDevice device,
- const VkImageSparseMemoryRequirementsInfo2* pInfo,
- uint32_t* pSparseMemoryRequirementCount,
- VkSparseImageMemoryRequirements2* pSparseMemoryRequirements) {
- StartReadObject(device);
-}
-
-void ThreadSafety::PostCallRecordGetImageSparseMemoryRequirements2KHR(
- VkDevice device,
- const VkImageSparseMemoryRequirementsInfo2* pInfo,
- uint32_t* pSparseMemoryRequirementCount,
- VkSparseImageMemoryRequirements2* pSparseMemoryRequirements) {
- FinishReadObject(device);
-}
-
-void ThreadSafety::PreCallRecordCreateSamplerYcbcrConversionKHR(
- VkDevice device,
- const VkSamplerYcbcrConversionCreateInfo* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkSamplerYcbcrConversion* pYcbcrConversion) {
- StartReadObject(device);
-}
-
-void ThreadSafety::PostCallRecordCreateSamplerYcbcrConversionKHR(
- VkDevice device,
- const VkSamplerYcbcrConversionCreateInfo* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkSamplerYcbcrConversion* pYcbcrConversion,
- VkResult result) {
- FinishReadObject(device);
-}
-
-void ThreadSafety::PreCallRecordDestroySamplerYcbcrConversionKHR(
- VkDevice device,
- VkSamplerYcbcrConversion ycbcrConversion,
- const VkAllocationCallbacks* pAllocator) {
- StartReadObject(device);
- StartWriteObject(ycbcrConversion);
- // Host access to ycbcrConversion must be externally synchronized
-}
-
-void ThreadSafety::PostCallRecordDestroySamplerYcbcrConversionKHR(
- VkDevice device,
- VkSamplerYcbcrConversion ycbcrConversion,
- const VkAllocationCallbacks* pAllocator) {
- FinishReadObject(device);
- FinishWriteObject(ycbcrConversion);
- // Host access to ycbcrConversion must be externally synchronized
-}
-
-void ThreadSafety::PreCallRecordBindBufferMemory2KHR(
- VkDevice device,
- uint32_t bindInfoCount,
- const VkBindBufferMemoryInfo* pBindInfos) {
- StartReadObject(device);
-}
-
-void ThreadSafety::PostCallRecordBindBufferMemory2KHR(
- VkDevice device,
- uint32_t bindInfoCount,
- const VkBindBufferMemoryInfo* pBindInfos,
- VkResult result) {
- FinishReadObject(device);
-}
-
-void ThreadSafety::PreCallRecordBindImageMemory2KHR(
- VkDevice device,
- uint32_t bindInfoCount,
- const VkBindImageMemoryInfo* pBindInfos) {
- StartReadObject(device);
-}
-
-void ThreadSafety::PostCallRecordBindImageMemory2KHR(
- VkDevice device,
- uint32_t bindInfoCount,
- const VkBindImageMemoryInfo* pBindInfos,
- VkResult result) {
- FinishReadObject(device);
-}
-
-void ThreadSafety::PreCallRecordGetDescriptorSetLayoutSupportKHR(
- VkDevice device,
- const VkDescriptorSetLayoutCreateInfo* pCreateInfo,
- VkDescriptorSetLayoutSupport* pSupport) {
- StartReadObject(device);
-}
-
-void ThreadSafety::PostCallRecordGetDescriptorSetLayoutSupportKHR(
- VkDevice device,
- const VkDescriptorSetLayoutCreateInfo* pCreateInfo,
- VkDescriptorSetLayoutSupport* pSupport) {
- FinishReadObject(device);
-}
-
-void ThreadSafety::PreCallRecordCmdDrawIndirectCountKHR(
- VkCommandBuffer commandBuffer,
- VkBuffer buffer,
- VkDeviceSize offset,
- VkBuffer countBuffer,
- VkDeviceSize countBufferOffset,
- uint32_t maxDrawCount,
- uint32_t stride) {
- StartWriteObject(commandBuffer);
- StartReadObject(buffer);
- StartReadObject(countBuffer);
- // Host access to commandBuffer must be externally synchronized
-}
-
-void ThreadSafety::PostCallRecordCmdDrawIndirectCountKHR(
- VkCommandBuffer commandBuffer,
- VkBuffer buffer,
- VkDeviceSize offset,
- VkBuffer countBuffer,
- VkDeviceSize countBufferOffset,
- uint32_t maxDrawCount,
- uint32_t stride) {
- FinishWriteObject(commandBuffer);
- FinishReadObject(buffer);
- FinishReadObject(countBuffer);
- // Host access to commandBuffer must be externally synchronized
-}
-
-void ThreadSafety::PreCallRecordCmdDrawIndexedIndirectCountKHR(
- VkCommandBuffer commandBuffer,
- VkBuffer buffer,
- VkDeviceSize offset,
- VkBuffer countBuffer,
- VkDeviceSize countBufferOffset,
- uint32_t maxDrawCount,
- uint32_t stride) {
- StartWriteObject(commandBuffer);
- StartReadObject(buffer);
- StartReadObject(countBuffer);
- // Host access to commandBuffer must be externally synchronized
-}
-
-void ThreadSafety::PostCallRecordCmdDrawIndexedIndirectCountKHR(
- VkCommandBuffer commandBuffer,
- VkBuffer buffer,
- VkDeviceSize offset,
- VkBuffer countBuffer,
- VkDeviceSize countBufferOffset,
- uint32_t maxDrawCount,
- uint32_t stride) {
- FinishWriteObject(commandBuffer);
- FinishReadObject(buffer);
- FinishReadObject(countBuffer);
- // Host access to commandBuffer must be externally synchronized
-}
-
-void ThreadSafety::PreCallRecordGetPipelineExecutablePropertiesKHR(
- VkDevice device,
- const VkPipelineInfoKHR* pPipelineInfo,
- uint32_t* pExecutableCount,
- VkPipelineExecutablePropertiesKHR* pProperties) {
- StartReadObject(device);
-}
-
-void ThreadSafety::PostCallRecordGetPipelineExecutablePropertiesKHR(
- VkDevice device,
- const VkPipelineInfoKHR* pPipelineInfo,
- uint32_t* pExecutableCount,
- VkPipelineExecutablePropertiesKHR* pProperties,
- VkResult result) {
- FinishReadObject(device);
-}
-
-void ThreadSafety::PreCallRecordGetPipelineExecutableStatisticsKHR(
- VkDevice device,
- const VkPipelineExecutableInfoKHR* pExecutableInfo,
- uint32_t* pStatisticCount,
- VkPipelineExecutableStatisticKHR* pStatistics) {
- StartReadObject(device);
-}
-
-void ThreadSafety::PostCallRecordGetPipelineExecutableStatisticsKHR(
- VkDevice device,
- const VkPipelineExecutableInfoKHR* pExecutableInfo,
- uint32_t* pStatisticCount,
- VkPipelineExecutableStatisticKHR* pStatistics,
- VkResult result) {
- FinishReadObject(device);
-}
-
-void ThreadSafety::PreCallRecordGetPipelineExecutableInternalRepresentationsKHR(
- VkDevice device,
- const VkPipelineExecutableInfoKHR* pExecutableInfo,
- uint32_t* pInternalRepresentationCount,
- VkPipelineExecutableInternalRepresentationKHR* pInternalRepresentations) {
- StartReadObject(device);
-}
-
-void ThreadSafety::PostCallRecordGetPipelineExecutableInternalRepresentationsKHR(
- VkDevice device,
- const VkPipelineExecutableInfoKHR* pExecutableInfo,
- uint32_t* pInternalRepresentationCount,
- VkPipelineExecutableInternalRepresentationKHR* pInternalRepresentations,
- VkResult result) {
- FinishReadObject(device);
-}
-
-void ThreadSafety::PreCallRecordCreateDebugReportCallbackEXT(
- VkInstance instance,
- const VkDebugReportCallbackCreateInfoEXT* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkDebugReportCallbackEXT* pCallback) {
- StartReadObject(instance);
-}
-
-void ThreadSafety::PostCallRecordCreateDebugReportCallbackEXT(
- VkInstance instance,
- const VkDebugReportCallbackCreateInfoEXT* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkDebugReportCallbackEXT* pCallback,
- VkResult result) {
- FinishReadObject(instance);
-}
-
-void ThreadSafety::PreCallRecordDestroyDebugReportCallbackEXT(
- VkInstance instance,
- VkDebugReportCallbackEXT callback,
- const VkAllocationCallbacks* pAllocator) {
- StartReadObject(instance);
- StartWriteObject(callback);
- // Host access to callback must be externally synchronized
-}
-
-void ThreadSafety::PostCallRecordDestroyDebugReportCallbackEXT(
- VkInstance instance,
- VkDebugReportCallbackEXT callback,
- const VkAllocationCallbacks* pAllocator) {
- FinishReadObject(instance);
- FinishWriteObject(callback);
- // Host access to callback must be externally synchronized
-}
-
-void ThreadSafety::PreCallRecordDebugReportMessageEXT(
- VkInstance instance,
- VkDebugReportFlagsEXT flags,
- VkDebugReportObjectTypeEXT objectType,
- uint64_t object,
- size_t location,
- int32_t messageCode,
- const char* pLayerPrefix,
- const char* pMessage) {
- StartReadObject(instance);
-}
-
-void ThreadSafety::PostCallRecordDebugReportMessageEXT(
- VkInstance instance,
- VkDebugReportFlagsEXT flags,
- VkDebugReportObjectTypeEXT objectType,
- uint64_t object,
- size_t location,
- int32_t messageCode,
- const char* pLayerPrefix,
- const char* pMessage) {
- FinishReadObject(instance);
-}
-// TODO - not wrapping EXT function vkDebugMarkerSetObjectTagEXT
-// TODO - not wrapping EXT function vkDebugMarkerSetObjectNameEXT
-// TODO - not wrapping EXT function vkCmdDebugMarkerBeginEXT
-// TODO - not wrapping EXT function vkCmdDebugMarkerEndEXT
-// TODO - not wrapping EXT function vkCmdDebugMarkerInsertEXT
-
-void ThreadSafety::PreCallRecordCmdBindTransformFeedbackBuffersEXT(
- VkCommandBuffer commandBuffer,
- uint32_t firstBinding,
- uint32_t bindingCount,
- const VkBuffer* pBuffers,
- const VkDeviceSize* pOffsets,
- const VkDeviceSize* pSizes) {
- StartWriteObject(commandBuffer);
- if (pBuffers) {
- for (uint32_t index = 0; index < bindingCount; index++) {
- StartReadObject(pBuffers[index]);
- }
- }
- // Host access to commandBuffer must be externally synchronized
-}
-
-void ThreadSafety::PostCallRecordCmdBindTransformFeedbackBuffersEXT(
- VkCommandBuffer commandBuffer,
- uint32_t firstBinding,
- uint32_t bindingCount,
- const VkBuffer* pBuffers,
- const VkDeviceSize* pOffsets,
- const VkDeviceSize* pSizes) {
- FinishWriteObject(commandBuffer);
- if (pBuffers) {
- for (uint32_t index = 0; index < bindingCount; index++) {
- FinishReadObject(pBuffers[index]);
- }
- }
- // Host access to commandBuffer must be externally synchronized
-}
-
-void ThreadSafety::PreCallRecordCmdBeginTransformFeedbackEXT(
- VkCommandBuffer commandBuffer,
- uint32_t firstCounterBuffer,
- uint32_t counterBufferCount,
- const VkBuffer* pCounterBuffers,
- const VkDeviceSize* pCounterBufferOffsets) {
- StartWriteObject(commandBuffer);
- if (pCounterBuffers) {
- for (uint32_t index = 0; index < counterBufferCount; index++) {
- StartReadObject(pCounterBuffers[index]);
- }
- }
- // Host access to commandBuffer must be externally synchronized
-}
-
-void ThreadSafety::PostCallRecordCmdBeginTransformFeedbackEXT(
- VkCommandBuffer commandBuffer,
- uint32_t firstCounterBuffer,
- uint32_t counterBufferCount,
- const VkBuffer* pCounterBuffers,
- const VkDeviceSize* pCounterBufferOffsets) {
- FinishWriteObject(commandBuffer);
- if (pCounterBuffers) {
- for (uint32_t index = 0; index < counterBufferCount; index++) {
- FinishReadObject(pCounterBuffers[index]);
- }
- }
- // Host access to commandBuffer must be externally synchronized
-}
-
-void ThreadSafety::PreCallRecordCmdEndTransformFeedbackEXT(
- VkCommandBuffer commandBuffer,
- uint32_t firstCounterBuffer,
- uint32_t counterBufferCount,
- const VkBuffer* pCounterBuffers,
- const VkDeviceSize* pCounterBufferOffsets) {
- StartWriteObject(commandBuffer);
- if (pCounterBuffers) {
- for (uint32_t index = 0; index < counterBufferCount; index++) {
- StartReadObject(pCounterBuffers[index]);
- }
- }
- // Host access to commandBuffer must be externally synchronized
-}
-
-void ThreadSafety::PostCallRecordCmdEndTransformFeedbackEXT(
- VkCommandBuffer commandBuffer,
- uint32_t firstCounterBuffer,
- uint32_t counterBufferCount,
- const VkBuffer* pCounterBuffers,
- const VkDeviceSize* pCounterBufferOffsets) {
- FinishWriteObject(commandBuffer);
- if (pCounterBuffers) {
- for (uint32_t index = 0; index < counterBufferCount; index++) {
- FinishReadObject(pCounterBuffers[index]);
- }
- }
- // Host access to commandBuffer must be externally synchronized
-}
-
-void ThreadSafety::PreCallRecordCmdBeginQueryIndexedEXT(
- VkCommandBuffer commandBuffer,
- VkQueryPool queryPool,
- uint32_t query,
- VkQueryControlFlags flags,
- uint32_t index) {
- StartWriteObject(commandBuffer);
- StartReadObject(queryPool);
- // Host access to commandBuffer must be externally synchronized
-}
-
-void ThreadSafety::PostCallRecordCmdBeginQueryIndexedEXT(
- VkCommandBuffer commandBuffer,
- VkQueryPool queryPool,
- uint32_t query,
- VkQueryControlFlags flags,
- uint32_t index) {
- FinishWriteObject(commandBuffer);
- FinishReadObject(queryPool);
- // Host access to commandBuffer must be externally synchronized
-}
-
-void ThreadSafety::PreCallRecordCmdEndQueryIndexedEXT(
- VkCommandBuffer commandBuffer,
- VkQueryPool queryPool,
- uint32_t query,
- uint32_t index) {
- StartWriteObject(commandBuffer);
- StartReadObject(queryPool);
- // Host access to commandBuffer must be externally synchronized
-}
-
-void ThreadSafety::PostCallRecordCmdEndQueryIndexedEXT(
- VkCommandBuffer commandBuffer,
- VkQueryPool queryPool,
- uint32_t query,
- uint32_t index) {
- FinishWriteObject(commandBuffer);
- FinishReadObject(queryPool);
- // Host access to commandBuffer must be externally synchronized
-}
-
-void ThreadSafety::PreCallRecordCmdDrawIndirectByteCountEXT(
- VkCommandBuffer commandBuffer,
- uint32_t instanceCount,
- uint32_t firstInstance,
- VkBuffer counterBuffer,
- VkDeviceSize counterBufferOffset,
- uint32_t counterOffset,
- uint32_t vertexStride) {
- StartWriteObject(commandBuffer);
- StartReadObject(counterBuffer);
- // Host access to commandBuffer must be externally synchronized
-}
-
-void ThreadSafety::PostCallRecordCmdDrawIndirectByteCountEXT(
- VkCommandBuffer commandBuffer,
- uint32_t instanceCount,
- uint32_t firstInstance,
- VkBuffer counterBuffer,
- VkDeviceSize counterBufferOffset,
- uint32_t counterOffset,
- uint32_t vertexStride) {
- FinishWriteObject(commandBuffer);
- FinishReadObject(counterBuffer);
- // Host access to commandBuffer must be externally synchronized
-}
-
-void ThreadSafety::PreCallRecordGetImageViewHandleNVX(
- VkDevice device,
- const VkImageViewHandleInfoNVX* pInfo) {
- StartReadObject(device);
-}
-
-void ThreadSafety::PostCallRecordGetImageViewHandleNVX(
- VkDevice device,
- const VkImageViewHandleInfoNVX* pInfo) {
- FinishReadObject(device);
-}
-
-void ThreadSafety::PreCallRecordCmdDrawIndirectCountAMD(
- VkCommandBuffer commandBuffer,
- VkBuffer buffer,
- VkDeviceSize offset,
- VkBuffer countBuffer,
- VkDeviceSize countBufferOffset,
- uint32_t maxDrawCount,
- uint32_t stride) {
- StartWriteObject(commandBuffer);
- StartReadObject(buffer);
- StartReadObject(countBuffer);
- // Host access to commandBuffer must be externally synchronized
-}
-
-void ThreadSafety::PostCallRecordCmdDrawIndirectCountAMD(
- VkCommandBuffer commandBuffer,
- VkBuffer buffer,
- VkDeviceSize offset,
- VkBuffer countBuffer,
- VkDeviceSize countBufferOffset,
- uint32_t maxDrawCount,
- uint32_t stride) {
- FinishWriteObject(commandBuffer);
- FinishReadObject(buffer);
- FinishReadObject(countBuffer);
- // Host access to commandBuffer must be externally synchronized
-}
-
-void ThreadSafety::PreCallRecordCmdDrawIndexedIndirectCountAMD(
- VkCommandBuffer commandBuffer,
- VkBuffer buffer,
- VkDeviceSize offset,
- VkBuffer countBuffer,
- VkDeviceSize countBufferOffset,
- uint32_t maxDrawCount,
- uint32_t stride) {
- StartWriteObject(commandBuffer);
- StartReadObject(buffer);
- StartReadObject(countBuffer);
- // Host access to commandBuffer must be externally synchronized
-}
-
-void ThreadSafety::PostCallRecordCmdDrawIndexedIndirectCountAMD(
- VkCommandBuffer commandBuffer,
- VkBuffer buffer,
- VkDeviceSize offset,
- VkBuffer countBuffer,
- VkDeviceSize countBufferOffset,
- uint32_t maxDrawCount,
- uint32_t stride) {
- FinishWriteObject(commandBuffer);
- FinishReadObject(buffer);
- FinishReadObject(countBuffer);
- // Host access to commandBuffer must be externally synchronized
-}
-
-void ThreadSafety::PreCallRecordGetShaderInfoAMD(
- VkDevice device,
- VkPipeline pipeline,
- VkShaderStageFlagBits shaderStage,
- VkShaderInfoTypeAMD infoType,
- size_t* pInfoSize,
- void* pInfo) {
- StartReadObject(device);
- StartReadObject(pipeline);
-}
-
-void ThreadSafety::PostCallRecordGetShaderInfoAMD(
- VkDevice device,
- VkPipeline pipeline,
- VkShaderStageFlagBits shaderStage,
- VkShaderInfoTypeAMD infoType,
- size_t* pInfoSize,
- void* pInfo,
- VkResult result) {
- FinishReadObject(device);
- FinishReadObject(pipeline);
-}
-
-#ifdef VK_USE_PLATFORM_GGP
-
-void ThreadSafety::PreCallRecordCreateStreamDescriptorSurfaceGGP(
- VkInstance instance,
- const VkStreamDescriptorSurfaceCreateInfoGGP* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkSurfaceKHR* pSurface) {
- StartReadObject(instance);
-}
-
-void ThreadSafety::PostCallRecordCreateStreamDescriptorSurfaceGGP(
- VkInstance instance,
- const VkStreamDescriptorSurfaceCreateInfoGGP* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkSurfaceKHR* pSurface,
- VkResult result) {
- FinishReadObject(instance);
-}
-#endif // VK_USE_PLATFORM_GGP
-
-#ifdef VK_USE_PLATFORM_WIN32_KHR
-
-void ThreadSafety::PreCallRecordGetMemoryWin32HandleNV(
- VkDevice device,
- VkDeviceMemory memory,
- VkExternalMemoryHandleTypeFlagsNV handleType,
- HANDLE* pHandle) {
- StartReadObject(device);
- StartReadObject(memory);
-}
-
-void ThreadSafety::PostCallRecordGetMemoryWin32HandleNV(
- VkDevice device,
- VkDeviceMemory memory,
- VkExternalMemoryHandleTypeFlagsNV handleType,
- HANDLE* pHandle,
- VkResult result) {
- FinishReadObject(device);
- FinishReadObject(memory);
-}
-#endif // VK_USE_PLATFORM_WIN32_KHR
-
-#ifdef VK_USE_PLATFORM_WIN32_KHR
-#endif // VK_USE_PLATFORM_WIN32_KHR
-
-#ifdef VK_USE_PLATFORM_VI_NN
-
-void ThreadSafety::PreCallRecordCreateViSurfaceNN(
- VkInstance instance,
- const VkViSurfaceCreateInfoNN* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkSurfaceKHR* pSurface) {
- StartReadObject(instance);
-}
-
-void ThreadSafety::PostCallRecordCreateViSurfaceNN(
- VkInstance instance,
- const VkViSurfaceCreateInfoNN* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkSurfaceKHR* pSurface,
- VkResult result) {
- FinishReadObject(instance);
-}
-#endif // VK_USE_PLATFORM_VI_NN
-
-void ThreadSafety::PreCallRecordCmdBeginConditionalRenderingEXT(
- VkCommandBuffer commandBuffer,
- const VkConditionalRenderingBeginInfoEXT* pConditionalRenderingBegin) {
- StartWriteObject(commandBuffer);
- // Host access to commandBuffer must be externally synchronized
-}
-
-void ThreadSafety::PostCallRecordCmdBeginConditionalRenderingEXT(
- VkCommandBuffer commandBuffer,
- const VkConditionalRenderingBeginInfoEXT* pConditionalRenderingBegin) {
- FinishWriteObject(commandBuffer);
- // Host access to commandBuffer must be externally synchronized
-}
-
-void ThreadSafety::PreCallRecordCmdEndConditionalRenderingEXT(
- VkCommandBuffer commandBuffer) {
- StartWriteObject(commandBuffer);
- // Host access to commandBuffer must be externally synchronized
-}
-
-void ThreadSafety::PostCallRecordCmdEndConditionalRenderingEXT(
- VkCommandBuffer commandBuffer) {
- FinishWriteObject(commandBuffer);
- // Host access to commandBuffer must be externally synchronized
-}
-
-void ThreadSafety::PreCallRecordCmdProcessCommandsNVX(
- VkCommandBuffer commandBuffer,
- const VkCmdProcessCommandsInfoNVX* pProcessCommandsInfo) {
- StartWriteObject(commandBuffer);
- // Host access to commandBuffer must be externally synchronized
-}
-
-void ThreadSafety::PostCallRecordCmdProcessCommandsNVX(
- VkCommandBuffer commandBuffer,
- const VkCmdProcessCommandsInfoNVX* pProcessCommandsInfo) {
- FinishWriteObject(commandBuffer);
- // Host access to commandBuffer must be externally synchronized
-}
-
-void ThreadSafety::PreCallRecordCmdReserveSpaceForCommandsNVX(
- VkCommandBuffer commandBuffer,
- const VkCmdReserveSpaceForCommandsInfoNVX* pReserveSpaceInfo) {
- StartWriteObject(commandBuffer);
- // Host access to commandBuffer must be externally synchronized
-}
-
-void ThreadSafety::PostCallRecordCmdReserveSpaceForCommandsNVX(
- VkCommandBuffer commandBuffer,
- const VkCmdReserveSpaceForCommandsInfoNVX* pReserveSpaceInfo) {
- FinishWriteObject(commandBuffer);
- // Host access to commandBuffer must be externally synchronized
-}
-
-void ThreadSafety::PreCallRecordCreateIndirectCommandsLayoutNVX(
- VkDevice device,
- const VkIndirectCommandsLayoutCreateInfoNVX* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkIndirectCommandsLayoutNVX* pIndirectCommandsLayout) {
- StartReadObject(device);
-}
-
-void ThreadSafety::PostCallRecordCreateIndirectCommandsLayoutNVX(
- VkDevice device,
- const VkIndirectCommandsLayoutCreateInfoNVX* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkIndirectCommandsLayoutNVX* pIndirectCommandsLayout,
- VkResult result) {
- FinishReadObject(device);
-}
-
-void ThreadSafety::PreCallRecordDestroyIndirectCommandsLayoutNVX(
- VkDevice device,
- VkIndirectCommandsLayoutNVX indirectCommandsLayout,
- const VkAllocationCallbacks* pAllocator) {
- StartReadObject(device);
- StartReadObject(indirectCommandsLayout);
-}
-
-void ThreadSafety::PostCallRecordDestroyIndirectCommandsLayoutNVX(
- VkDevice device,
- VkIndirectCommandsLayoutNVX indirectCommandsLayout,
- const VkAllocationCallbacks* pAllocator) {
- FinishReadObject(device);
- FinishReadObject(indirectCommandsLayout);
-}
-
-void ThreadSafety::PreCallRecordCreateObjectTableNVX(
- VkDevice device,
- const VkObjectTableCreateInfoNVX* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkObjectTableNVX* pObjectTable) {
- StartReadObject(device);
-}
-
-void ThreadSafety::PostCallRecordCreateObjectTableNVX(
- VkDevice device,
- const VkObjectTableCreateInfoNVX* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkObjectTableNVX* pObjectTable,
- VkResult result) {
- FinishReadObject(device);
-}
-
-void ThreadSafety::PreCallRecordDestroyObjectTableNVX(
- VkDevice device,
- VkObjectTableNVX objectTable,
- const VkAllocationCallbacks* pAllocator) {
- StartReadObject(device);
- StartWriteObject(objectTable);
- // Host access to objectTable must be externally synchronized
-}
-
-void ThreadSafety::PostCallRecordDestroyObjectTableNVX(
- VkDevice device,
- VkObjectTableNVX objectTable,
- const VkAllocationCallbacks* pAllocator) {
- FinishReadObject(device);
- FinishWriteObject(objectTable);
- // Host access to objectTable must be externally synchronized
-}
-
-void ThreadSafety::PreCallRecordRegisterObjectsNVX(
- VkDevice device,
- VkObjectTableNVX objectTable,
- uint32_t objectCount,
- const VkObjectTableEntryNVX* const* ppObjectTableEntries,
- const uint32_t* pObjectIndices) {
- StartReadObject(device);
- StartWriteObject(objectTable);
- // Host access to objectTable must be externally synchronized
-}
-
-void ThreadSafety::PostCallRecordRegisterObjectsNVX(
- VkDevice device,
- VkObjectTableNVX objectTable,
- uint32_t objectCount,
- const VkObjectTableEntryNVX* const* ppObjectTableEntries,
- const uint32_t* pObjectIndices,
- VkResult result) {
- FinishReadObject(device);
- FinishWriteObject(objectTable);
- // Host access to objectTable must be externally synchronized
-}
-
-void ThreadSafety::PreCallRecordUnregisterObjectsNVX(
- VkDevice device,
- VkObjectTableNVX objectTable,
- uint32_t objectCount,
- const VkObjectEntryTypeNVX* pObjectEntryTypes,
- const uint32_t* pObjectIndices) {
- StartReadObject(device);
- StartWriteObject(objectTable);
- // Host access to objectTable must be externally synchronized
-}
-
-void ThreadSafety::PostCallRecordUnregisterObjectsNVX(
- VkDevice device,
- VkObjectTableNVX objectTable,
- uint32_t objectCount,
- const VkObjectEntryTypeNVX* pObjectEntryTypes,
- const uint32_t* pObjectIndices,
- VkResult result) {
- FinishReadObject(device);
- FinishWriteObject(objectTable);
- // Host access to objectTable must be externally synchronized
-}
-
-void ThreadSafety::PreCallRecordCmdSetViewportWScalingNV(
- VkCommandBuffer commandBuffer,
- uint32_t firstViewport,
- uint32_t viewportCount,
- const VkViewportWScalingNV* pViewportWScalings) {
- StartWriteObject(commandBuffer);
- // Host access to commandBuffer must be externally synchronized
-}
-
-void ThreadSafety::PostCallRecordCmdSetViewportWScalingNV(
- VkCommandBuffer commandBuffer,
- uint32_t firstViewport,
- uint32_t viewportCount,
- const VkViewportWScalingNV* pViewportWScalings) {
- FinishWriteObject(commandBuffer);
- // Host access to commandBuffer must be externally synchronized
-}
-
-void ThreadSafety::PreCallRecordReleaseDisplayEXT(
- VkPhysicalDevice physicalDevice,
- VkDisplayKHR display) {
- StartReadObject(display);
-}
-
-void ThreadSafety::PostCallRecordReleaseDisplayEXT(
- VkPhysicalDevice physicalDevice,
- VkDisplayKHR display,
- VkResult result) {
- FinishReadObject(display);
-}
-
-#ifdef VK_USE_PLATFORM_XLIB_XRANDR_EXT
-
-void ThreadSafety::PreCallRecordAcquireXlibDisplayEXT(
- VkPhysicalDevice physicalDevice,
- Display* dpy,
- VkDisplayKHR display) {
- StartReadObject(display);
-}
-
-void ThreadSafety::PostCallRecordAcquireXlibDisplayEXT(
- VkPhysicalDevice physicalDevice,
- Display* dpy,
- VkDisplayKHR display,
- VkResult result) {
- FinishReadObject(display);
-}
-#endif // VK_USE_PLATFORM_XLIB_XRANDR_EXT
-
-void ThreadSafety::PreCallRecordGetPhysicalDeviceSurfaceCapabilities2EXT(
- VkPhysicalDevice physicalDevice,
- VkSurfaceKHR surface,
- VkSurfaceCapabilities2EXT* pSurfaceCapabilities) {
- StartReadObject(surface);
-}
-
-void ThreadSafety::PostCallRecordGetPhysicalDeviceSurfaceCapabilities2EXT(
- VkPhysicalDevice physicalDevice,
- VkSurfaceKHR surface,
- VkSurfaceCapabilities2EXT* pSurfaceCapabilities,
- VkResult result) {
- FinishReadObject(surface);
-}
-
-void ThreadSafety::PreCallRecordDisplayPowerControlEXT(
- VkDevice device,
- VkDisplayKHR display,
- const VkDisplayPowerInfoEXT* pDisplayPowerInfo) {
- StartReadObject(device);
- StartReadObject(display);
-}
-
-void ThreadSafety::PostCallRecordDisplayPowerControlEXT(
- VkDevice device,
- VkDisplayKHR display,
- const VkDisplayPowerInfoEXT* pDisplayPowerInfo,
- VkResult result) {
- FinishReadObject(device);
- FinishReadObject(display);
-}
-
-void ThreadSafety::PreCallRecordRegisterDeviceEventEXT(
- VkDevice device,
- const VkDeviceEventInfoEXT* pDeviceEventInfo,
- const VkAllocationCallbacks* pAllocator,
- VkFence* pFence) {
- StartReadObject(device);
-}
-
-void ThreadSafety::PostCallRecordRegisterDeviceEventEXT(
- VkDevice device,
- const VkDeviceEventInfoEXT* pDeviceEventInfo,
- const VkAllocationCallbacks* pAllocator,
- VkFence* pFence,
- VkResult result) {
- FinishReadObject(device);
-}
-
-void ThreadSafety::PreCallRecordRegisterDisplayEventEXT(
- VkDevice device,
- VkDisplayKHR display,
- const VkDisplayEventInfoEXT* pDisplayEventInfo,
- const VkAllocationCallbacks* pAllocator,
- VkFence* pFence) {
- StartReadObject(device);
- StartReadObject(display);
-}
-
-void ThreadSafety::PostCallRecordRegisterDisplayEventEXT(
- VkDevice device,
- VkDisplayKHR display,
- const VkDisplayEventInfoEXT* pDisplayEventInfo,
- const VkAllocationCallbacks* pAllocator,
- VkFence* pFence,
- VkResult result) {
- FinishReadObject(device);
- FinishReadObject(display);
-}
-
-void ThreadSafety::PreCallRecordGetSwapchainCounterEXT(
- VkDevice device,
- VkSwapchainKHR swapchain,
- VkSurfaceCounterFlagBitsEXT counter,
- uint64_t* pCounterValue) {
- StartReadObject(device);
- StartReadObject(swapchain);
-}
-
-void ThreadSafety::PostCallRecordGetSwapchainCounterEXT(
- VkDevice device,
- VkSwapchainKHR swapchain,
- VkSurfaceCounterFlagBitsEXT counter,
- uint64_t* pCounterValue,
- VkResult result) {
- FinishReadObject(device);
- FinishReadObject(swapchain);
-}
-
-void ThreadSafety::PreCallRecordGetRefreshCycleDurationGOOGLE(
- VkDevice device,
- VkSwapchainKHR swapchain,
- VkRefreshCycleDurationGOOGLE* pDisplayTimingProperties) {
- StartReadObject(device);
- StartWriteObject(swapchain);
- // Host access to swapchain must be externally synchronized
-}
-
-void ThreadSafety::PostCallRecordGetRefreshCycleDurationGOOGLE(
- VkDevice device,
- VkSwapchainKHR swapchain,
- VkRefreshCycleDurationGOOGLE* pDisplayTimingProperties,
- VkResult result) {
- FinishReadObject(device);
- FinishWriteObject(swapchain);
- // Host access to swapchain must be externally synchronized
-}
-
-void ThreadSafety::PreCallRecordGetPastPresentationTimingGOOGLE(
- VkDevice device,
- VkSwapchainKHR swapchain,
- uint32_t* pPresentationTimingCount,
- VkPastPresentationTimingGOOGLE* pPresentationTimings) {
- StartReadObject(device);
- StartWriteObject(swapchain);
- // Host access to swapchain must be externally synchronized
-}
-
-void ThreadSafety::PostCallRecordGetPastPresentationTimingGOOGLE(
- VkDevice device,
- VkSwapchainKHR swapchain,
- uint32_t* pPresentationTimingCount,
- VkPastPresentationTimingGOOGLE* pPresentationTimings,
- VkResult result) {
- FinishReadObject(device);
- FinishWriteObject(swapchain);
- // Host access to swapchain must be externally synchronized
-}
-
-void ThreadSafety::PreCallRecordCmdSetDiscardRectangleEXT(
- VkCommandBuffer commandBuffer,
- uint32_t firstDiscardRectangle,
- uint32_t discardRectangleCount,
- const VkRect2D* pDiscardRectangles) {
- StartWriteObject(commandBuffer);
- // Host access to commandBuffer must be externally synchronized
-}
-
-void ThreadSafety::PostCallRecordCmdSetDiscardRectangleEXT(
- VkCommandBuffer commandBuffer,
- uint32_t firstDiscardRectangle,
- uint32_t discardRectangleCount,
- const VkRect2D* pDiscardRectangles) {
- FinishWriteObject(commandBuffer);
- // Host access to commandBuffer must be externally synchronized
-}
-
-void ThreadSafety::PreCallRecordSetHdrMetadataEXT(
- VkDevice device,
- uint32_t swapchainCount,
- const VkSwapchainKHR* pSwapchains,
- const VkHdrMetadataEXT* pMetadata) {
- StartReadObject(device);
- if (pSwapchains) {
- for (uint32_t index = 0; index < swapchainCount; index++) {
- StartReadObject(pSwapchains[index]);
- }
- }
-}
-
-void ThreadSafety::PostCallRecordSetHdrMetadataEXT(
- VkDevice device,
- uint32_t swapchainCount,
- const VkSwapchainKHR* pSwapchains,
- const VkHdrMetadataEXT* pMetadata) {
- FinishReadObject(device);
- if (pSwapchains) {
- for (uint32_t index = 0; index < swapchainCount; index++) {
- FinishReadObject(pSwapchains[index]);
- }
- }
-}
-
-#ifdef VK_USE_PLATFORM_IOS_MVK
-
-void ThreadSafety::PreCallRecordCreateIOSSurfaceMVK(
- VkInstance instance,
- const VkIOSSurfaceCreateInfoMVK* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkSurfaceKHR* pSurface) {
- StartReadObject(instance);
-}
-
-void ThreadSafety::PostCallRecordCreateIOSSurfaceMVK(
- VkInstance instance,
- const VkIOSSurfaceCreateInfoMVK* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkSurfaceKHR* pSurface,
- VkResult result) {
- FinishReadObject(instance);
-}
-#endif // VK_USE_PLATFORM_IOS_MVK
-
-#ifdef VK_USE_PLATFORM_MACOS_MVK
-
-void ThreadSafety::PreCallRecordCreateMacOSSurfaceMVK(
- VkInstance instance,
- const VkMacOSSurfaceCreateInfoMVK* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkSurfaceKHR* pSurface) {
- StartReadObject(instance);
-}
-
-void ThreadSafety::PostCallRecordCreateMacOSSurfaceMVK(
- VkInstance instance,
- const VkMacOSSurfaceCreateInfoMVK* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkSurfaceKHR* pSurface,
- VkResult result) {
- FinishReadObject(instance);
-}
-#endif // VK_USE_PLATFORM_MACOS_MVK
-// TODO - not wrapping EXT function vkSetDebugUtilsObjectNameEXT
-// TODO - not wrapping EXT function vkSetDebugUtilsObjectTagEXT
-
-void ThreadSafety::PreCallRecordQueueBeginDebugUtilsLabelEXT(
- VkQueue queue,
- const VkDebugUtilsLabelEXT* pLabelInfo) {
- StartReadObject(queue);
-}
-
-void ThreadSafety::PostCallRecordQueueBeginDebugUtilsLabelEXT(
- VkQueue queue,
- const VkDebugUtilsLabelEXT* pLabelInfo) {
- FinishReadObject(queue);
-}
-
-void ThreadSafety::PreCallRecordQueueEndDebugUtilsLabelEXT(
- VkQueue queue) {
- StartReadObject(queue);
-}
-
-void ThreadSafety::PostCallRecordQueueEndDebugUtilsLabelEXT(
- VkQueue queue) {
- FinishReadObject(queue);
-}
-
-void ThreadSafety::PreCallRecordQueueInsertDebugUtilsLabelEXT(
- VkQueue queue,
- const VkDebugUtilsLabelEXT* pLabelInfo) {
- StartReadObject(queue);
-}
-
-void ThreadSafety::PostCallRecordQueueInsertDebugUtilsLabelEXT(
- VkQueue queue,
- const VkDebugUtilsLabelEXT* pLabelInfo) {
- FinishReadObject(queue);
-}
-
-void ThreadSafety::PreCallRecordCmdBeginDebugUtilsLabelEXT(
- VkCommandBuffer commandBuffer,
- const VkDebugUtilsLabelEXT* pLabelInfo) {
- StartReadObject(commandBuffer);
-}
-
-void ThreadSafety::PostCallRecordCmdBeginDebugUtilsLabelEXT(
- VkCommandBuffer commandBuffer,
- const VkDebugUtilsLabelEXT* pLabelInfo) {
- FinishReadObject(commandBuffer);
-}
-
-void ThreadSafety::PreCallRecordCmdEndDebugUtilsLabelEXT(
- VkCommandBuffer commandBuffer) {
- StartReadObject(commandBuffer);
-}
-
-void ThreadSafety::PostCallRecordCmdEndDebugUtilsLabelEXT(
- VkCommandBuffer commandBuffer) {
- FinishReadObject(commandBuffer);
-}
-
-void ThreadSafety::PreCallRecordCmdInsertDebugUtilsLabelEXT(
- VkCommandBuffer commandBuffer,
- const VkDebugUtilsLabelEXT* pLabelInfo) {
- StartReadObject(commandBuffer);
-}
-
-void ThreadSafety::PostCallRecordCmdInsertDebugUtilsLabelEXT(
- VkCommandBuffer commandBuffer,
- const VkDebugUtilsLabelEXT* pLabelInfo) {
- FinishReadObject(commandBuffer);
-}
-
-void ThreadSafety::PreCallRecordCreateDebugUtilsMessengerEXT(
- VkInstance instance,
- const VkDebugUtilsMessengerCreateInfoEXT* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkDebugUtilsMessengerEXT* pMessenger) {
- StartReadObject(instance);
-}
-
-void ThreadSafety::PostCallRecordCreateDebugUtilsMessengerEXT(
- VkInstance instance,
- const VkDebugUtilsMessengerCreateInfoEXT* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkDebugUtilsMessengerEXT* pMessenger,
- VkResult result) {
- FinishReadObject(instance);
-}
-
-void ThreadSafety::PreCallRecordDestroyDebugUtilsMessengerEXT(
- VkInstance instance,
- VkDebugUtilsMessengerEXT messenger,
- const VkAllocationCallbacks* pAllocator) {
- StartReadObject(instance);
- StartWriteObject(messenger);
- // Host access to messenger must be externally synchronized
-}
-
-void ThreadSafety::PostCallRecordDestroyDebugUtilsMessengerEXT(
- VkInstance instance,
- VkDebugUtilsMessengerEXT messenger,
- const VkAllocationCallbacks* pAllocator) {
- FinishReadObject(instance);
- FinishWriteObject(messenger);
- // Host access to messenger must be externally synchronized
-}
-
-void ThreadSafety::PreCallRecordSubmitDebugUtilsMessageEXT(
- VkInstance instance,
- VkDebugUtilsMessageSeverityFlagBitsEXT messageSeverity,
- VkDebugUtilsMessageTypeFlagsEXT messageTypes,
- const VkDebugUtilsMessengerCallbackDataEXT* pCallbackData) {
- StartReadObject(instance);
-}
-
-void ThreadSafety::PostCallRecordSubmitDebugUtilsMessageEXT(
- VkInstance instance,
- VkDebugUtilsMessageSeverityFlagBitsEXT messageSeverity,
- VkDebugUtilsMessageTypeFlagsEXT messageTypes,
- const VkDebugUtilsMessengerCallbackDataEXT* pCallbackData) {
- FinishReadObject(instance);
-}
-
-#ifdef VK_USE_PLATFORM_ANDROID_KHR
-
-void ThreadSafety::PreCallRecordGetAndroidHardwareBufferPropertiesANDROID(
- VkDevice device,
- const struct AHardwareBuffer* buffer,
- VkAndroidHardwareBufferPropertiesANDROID* pProperties) {
- StartReadObject(device);
-}
-
-void ThreadSafety::PostCallRecordGetAndroidHardwareBufferPropertiesANDROID(
- VkDevice device,
- const struct AHardwareBuffer* buffer,
- VkAndroidHardwareBufferPropertiesANDROID* pProperties,
- VkResult result) {
- FinishReadObject(device);
-}
-
-void ThreadSafety::PreCallRecordGetMemoryAndroidHardwareBufferANDROID(
- VkDevice device,
- const VkMemoryGetAndroidHardwareBufferInfoANDROID* pInfo,
- struct AHardwareBuffer** pBuffer) {
- StartReadObject(device);
-}
-
-void ThreadSafety::PostCallRecordGetMemoryAndroidHardwareBufferANDROID(
- VkDevice device,
- const VkMemoryGetAndroidHardwareBufferInfoANDROID* pInfo,
- struct AHardwareBuffer** pBuffer,
- VkResult result) {
- FinishReadObject(device);
-}
-#endif // VK_USE_PLATFORM_ANDROID_KHR
-
-void ThreadSafety::PreCallRecordCmdSetSampleLocationsEXT(
- VkCommandBuffer commandBuffer,
- const VkSampleLocationsInfoEXT* pSampleLocationsInfo) {
- StartWriteObject(commandBuffer);
- // Host access to commandBuffer must be externally synchronized
-}
-
-void ThreadSafety::PostCallRecordCmdSetSampleLocationsEXT(
- VkCommandBuffer commandBuffer,
- const VkSampleLocationsInfoEXT* pSampleLocationsInfo) {
- FinishWriteObject(commandBuffer);
- // Host access to commandBuffer must be externally synchronized
-}
-
-void ThreadSafety::PreCallRecordGetImageDrmFormatModifierPropertiesEXT(
- VkDevice device,
- VkImage image,
- VkImageDrmFormatModifierPropertiesEXT* pProperties) {
- StartReadObject(device);
- StartReadObject(image);
-}
-
-void ThreadSafety::PostCallRecordGetImageDrmFormatModifierPropertiesEXT(
- VkDevice device,
- VkImage image,
- VkImageDrmFormatModifierPropertiesEXT* pProperties,
- VkResult result) {
- FinishReadObject(device);
- FinishReadObject(image);
-}
-
-void ThreadSafety::PreCallRecordCreateValidationCacheEXT(
- VkDevice device,
- const VkValidationCacheCreateInfoEXT* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkValidationCacheEXT* pValidationCache) {
- StartReadObject(device);
-}
-
-void ThreadSafety::PostCallRecordCreateValidationCacheEXT(
- VkDevice device,
- const VkValidationCacheCreateInfoEXT* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkValidationCacheEXT* pValidationCache,
- VkResult result) {
- FinishReadObject(device);
-}
-
-void ThreadSafety::PreCallRecordDestroyValidationCacheEXT(
- VkDevice device,
- VkValidationCacheEXT validationCache,
- const VkAllocationCallbacks* pAllocator) {
- StartReadObject(device);
- StartWriteObject(validationCache);
- // Host access to validationCache must be externally synchronized
-}
-
-void ThreadSafety::PostCallRecordDestroyValidationCacheEXT(
- VkDevice device,
- VkValidationCacheEXT validationCache,
- const VkAllocationCallbacks* pAllocator) {
- FinishReadObject(device);
- FinishWriteObject(validationCache);
- // Host access to validationCache must be externally synchronized
-}
-
-void ThreadSafety::PreCallRecordMergeValidationCachesEXT(
- VkDevice device,
- VkValidationCacheEXT dstCache,
- uint32_t srcCacheCount,
- const VkValidationCacheEXT* pSrcCaches) {
- StartReadObject(device);
- StartWriteObject(dstCache);
- if (pSrcCaches) {
- for (uint32_t index = 0; index < srcCacheCount; index++) {
- StartReadObject(pSrcCaches[index]);
- }
- }
- // Host access to dstCache must be externally synchronized
-}
-
-void ThreadSafety::PostCallRecordMergeValidationCachesEXT(
- VkDevice device,
- VkValidationCacheEXT dstCache,
- uint32_t srcCacheCount,
- const VkValidationCacheEXT* pSrcCaches,
- VkResult result) {
- FinishReadObject(device);
- FinishWriteObject(dstCache);
- if (pSrcCaches) {
- for (uint32_t index = 0; index < srcCacheCount; index++) {
- FinishReadObject(pSrcCaches[index]);
- }
- }
- // Host access to dstCache must be externally synchronized
-}
-
-void ThreadSafety::PreCallRecordGetValidationCacheDataEXT(
- VkDevice device,
- VkValidationCacheEXT validationCache,
- size_t* pDataSize,
- void* pData) {
- StartReadObject(device);
- StartReadObject(validationCache);
-}
-
-void ThreadSafety::PostCallRecordGetValidationCacheDataEXT(
- VkDevice device,
- VkValidationCacheEXT validationCache,
- size_t* pDataSize,
- void* pData,
- VkResult result) {
- FinishReadObject(device);
- FinishReadObject(validationCache);
-}
-
-void ThreadSafety::PreCallRecordCmdBindShadingRateImageNV(
- VkCommandBuffer commandBuffer,
- VkImageView imageView,
- VkImageLayout imageLayout) {
- StartWriteObject(commandBuffer);
- StartReadObject(imageView);
- // Host access to commandBuffer must be externally synchronized
-}
-
-void ThreadSafety::PostCallRecordCmdBindShadingRateImageNV(
- VkCommandBuffer commandBuffer,
- VkImageView imageView,
- VkImageLayout imageLayout) {
- FinishWriteObject(commandBuffer);
- FinishReadObject(imageView);
- // Host access to commandBuffer must be externally synchronized
-}
-
-void ThreadSafety::PreCallRecordCmdSetViewportShadingRatePaletteNV(
- VkCommandBuffer commandBuffer,
- uint32_t firstViewport,
- uint32_t viewportCount,
- const VkShadingRatePaletteNV* pShadingRatePalettes) {
- StartWriteObject(commandBuffer);
- // Host access to commandBuffer must be externally synchronized
-}
-
-void ThreadSafety::PostCallRecordCmdSetViewportShadingRatePaletteNV(
- VkCommandBuffer commandBuffer,
- uint32_t firstViewport,
- uint32_t viewportCount,
- const VkShadingRatePaletteNV* pShadingRatePalettes) {
- FinishWriteObject(commandBuffer);
- // Host access to commandBuffer must be externally synchronized
-}
-
-void ThreadSafety::PreCallRecordCmdSetCoarseSampleOrderNV(
- VkCommandBuffer commandBuffer,
- VkCoarseSampleOrderTypeNV sampleOrderType,
- uint32_t customSampleOrderCount,
- const VkCoarseSampleOrderCustomNV* pCustomSampleOrders) {
- StartWriteObject(commandBuffer);
- // Host access to commandBuffer must be externally synchronized
-}
-
-void ThreadSafety::PostCallRecordCmdSetCoarseSampleOrderNV(
- VkCommandBuffer commandBuffer,
- VkCoarseSampleOrderTypeNV sampleOrderType,
- uint32_t customSampleOrderCount,
- const VkCoarseSampleOrderCustomNV* pCustomSampleOrders) {
- FinishWriteObject(commandBuffer);
- // Host access to commandBuffer must be externally synchronized
-}
-
-void ThreadSafety::PreCallRecordCreateAccelerationStructureNV(
- VkDevice device,
- const VkAccelerationStructureCreateInfoNV* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkAccelerationStructureNV* pAccelerationStructure) {
- StartReadObject(device);
-}
-
-void ThreadSafety::PostCallRecordCreateAccelerationStructureNV(
- VkDevice device,
- const VkAccelerationStructureCreateInfoNV* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkAccelerationStructureNV* pAccelerationStructure,
- VkResult result) {
- FinishReadObject(device);
-}
-
-void ThreadSafety::PreCallRecordDestroyAccelerationStructureNV(
- VkDevice device,
- VkAccelerationStructureNV accelerationStructure,
- const VkAllocationCallbacks* pAllocator) {
- StartReadObject(device);
- StartReadObject(accelerationStructure);
-}
-
-void ThreadSafety::PostCallRecordDestroyAccelerationStructureNV(
- VkDevice device,
- VkAccelerationStructureNV accelerationStructure,
- const VkAllocationCallbacks* pAllocator) {
- FinishReadObject(device);
- FinishReadObject(accelerationStructure);
-}
-
-void ThreadSafety::PreCallRecordGetAccelerationStructureMemoryRequirementsNV(
- VkDevice device,
- const VkAccelerationStructureMemoryRequirementsInfoNV* pInfo,
- VkMemoryRequirements2KHR* pMemoryRequirements) {
- StartReadObject(device);
-}
-
-void ThreadSafety::PostCallRecordGetAccelerationStructureMemoryRequirementsNV(
- VkDevice device,
- const VkAccelerationStructureMemoryRequirementsInfoNV* pInfo,
- VkMemoryRequirements2KHR* pMemoryRequirements) {
- FinishReadObject(device);
-}
-
-void ThreadSafety::PreCallRecordBindAccelerationStructureMemoryNV(
- VkDevice device,
- uint32_t bindInfoCount,
- const VkBindAccelerationStructureMemoryInfoNV* pBindInfos) {
- StartReadObject(device);
-}
-
-void ThreadSafety::PostCallRecordBindAccelerationStructureMemoryNV(
- VkDevice device,
- uint32_t bindInfoCount,
- const VkBindAccelerationStructureMemoryInfoNV* pBindInfos,
- VkResult result) {
- FinishReadObject(device);
-}
-
-void ThreadSafety::PreCallRecordCmdBuildAccelerationStructureNV(
- VkCommandBuffer commandBuffer,
- const VkAccelerationStructureInfoNV* pInfo,
- VkBuffer instanceData,
- VkDeviceSize instanceOffset,
- VkBool32 update,
- VkAccelerationStructureNV dst,
- VkAccelerationStructureNV src,
- VkBuffer scratch,
- VkDeviceSize scratchOffset) {
- StartReadObject(commandBuffer);
- StartReadObject(instanceData);
- StartReadObject(dst);
- StartReadObject(src);
- StartReadObject(scratch);
-}
-
-void ThreadSafety::PostCallRecordCmdBuildAccelerationStructureNV(
- VkCommandBuffer commandBuffer,
- const VkAccelerationStructureInfoNV* pInfo,
- VkBuffer instanceData,
- VkDeviceSize instanceOffset,
- VkBool32 update,
- VkAccelerationStructureNV dst,
- VkAccelerationStructureNV src,
- VkBuffer scratch,
- VkDeviceSize scratchOffset) {
- FinishReadObject(commandBuffer);
- FinishReadObject(instanceData);
- FinishReadObject(dst);
- FinishReadObject(src);
- FinishReadObject(scratch);
-}
-
-void ThreadSafety::PreCallRecordCmdCopyAccelerationStructureNV(
- VkCommandBuffer commandBuffer,
- VkAccelerationStructureNV dst,
- VkAccelerationStructureNV src,
- VkCopyAccelerationStructureModeNV mode) {
- StartReadObject(commandBuffer);
- StartReadObject(dst);
- StartReadObject(src);
-}
-
-void ThreadSafety::PostCallRecordCmdCopyAccelerationStructureNV(
- VkCommandBuffer commandBuffer,
- VkAccelerationStructureNV dst,
- VkAccelerationStructureNV src,
- VkCopyAccelerationStructureModeNV mode) {
- FinishReadObject(commandBuffer);
- FinishReadObject(dst);
- FinishReadObject(src);
-}
-
-void ThreadSafety::PreCallRecordCmdTraceRaysNV(
- VkCommandBuffer commandBuffer,
- VkBuffer raygenShaderBindingTableBuffer,
- VkDeviceSize raygenShaderBindingOffset,
- VkBuffer missShaderBindingTableBuffer,
- VkDeviceSize missShaderBindingOffset,
- VkDeviceSize missShaderBindingStride,
- VkBuffer hitShaderBindingTableBuffer,
- VkDeviceSize hitShaderBindingOffset,
- VkDeviceSize hitShaderBindingStride,
- VkBuffer callableShaderBindingTableBuffer,
- VkDeviceSize callableShaderBindingOffset,
- VkDeviceSize callableShaderBindingStride,
- uint32_t width,
- uint32_t height,
- uint32_t depth) {
- StartReadObject(commandBuffer);
- StartReadObject(raygenShaderBindingTableBuffer);
- StartReadObject(missShaderBindingTableBuffer);
- StartReadObject(hitShaderBindingTableBuffer);
- StartReadObject(callableShaderBindingTableBuffer);
-}
-
-void ThreadSafety::PostCallRecordCmdTraceRaysNV(
- VkCommandBuffer commandBuffer,
- VkBuffer raygenShaderBindingTableBuffer,
- VkDeviceSize raygenShaderBindingOffset,
- VkBuffer missShaderBindingTableBuffer,
- VkDeviceSize missShaderBindingOffset,
- VkDeviceSize missShaderBindingStride,
- VkBuffer hitShaderBindingTableBuffer,
- VkDeviceSize hitShaderBindingOffset,
- VkDeviceSize hitShaderBindingStride,
- VkBuffer callableShaderBindingTableBuffer,
- VkDeviceSize callableShaderBindingOffset,
- VkDeviceSize callableShaderBindingStride,
- uint32_t width,
- uint32_t height,
- uint32_t depth) {
- FinishReadObject(commandBuffer);
- FinishReadObject(raygenShaderBindingTableBuffer);
- FinishReadObject(missShaderBindingTableBuffer);
- FinishReadObject(hitShaderBindingTableBuffer);
- FinishReadObject(callableShaderBindingTableBuffer);
-}
-
-void ThreadSafety::PreCallRecordCreateRayTracingPipelinesNV(
- VkDevice device,
- VkPipelineCache pipelineCache,
- uint32_t createInfoCount,
- const VkRayTracingPipelineCreateInfoNV* pCreateInfos,
- const VkAllocationCallbacks* pAllocator,
- VkPipeline* pPipelines) {
- StartReadObject(device);
- StartReadObject(pipelineCache);
-}
-
-void ThreadSafety::PostCallRecordCreateRayTracingPipelinesNV(
- VkDevice device,
- VkPipelineCache pipelineCache,
- uint32_t createInfoCount,
- const VkRayTracingPipelineCreateInfoNV* pCreateInfos,
- const VkAllocationCallbacks* pAllocator,
- VkPipeline* pPipelines,
- VkResult result) {
- FinishReadObject(device);
- FinishReadObject(pipelineCache);
-}
-
-void ThreadSafety::PreCallRecordGetRayTracingShaderGroupHandlesNV(
- VkDevice device,
- VkPipeline pipeline,
- uint32_t firstGroup,
- uint32_t groupCount,
- size_t dataSize,
- void* pData) {
- StartReadObject(device);
- StartReadObject(pipeline);
-}
-
-void ThreadSafety::PostCallRecordGetRayTracingShaderGroupHandlesNV(
- VkDevice device,
- VkPipeline pipeline,
- uint32_t firstGroup,
- uint32_t groupCount,
- size_t dataSize,
- void* pData,
- VkResult result) {
- FinishReadObject(device);
- FinishReadObject(pipeline);
-}
-
-void ThreadSafety::PreCallRecordGetAccelerationStructureHandleNV(
- VkDevice device,
- VkAccelerationStructureNV accelerationStructure,
- size_t dataSize,
- void* pData) {
- StartReadObject(device);
- StartReadObject(accelerationStructure);
-}
-
-void ThreadSafety::PostCallRecordGetAccelerationStructureHandleNV(
- VkDevice device,
- VkAccelerationStructureNV accelerationStructure,
- size_t dataSize,
- void* pData,
- VkResult result) {
- FinishReadObject(device);
- FinishReadObject(accelerationStructure);
-}
-
-void ThreadSafety::PreCallRecordCmdWriteAccelerationStructuresPropertiesNV(
- VkCommandBuffer commandBuffer,
- uint32_t accelerationStructureCount,
- const VkAccelerationStructureNV* pAccelerationStructures,
- VkQueryType queryType,
- VkQueryPool queryPool,
- uint32_t firstQuery) {
- StartReadObject(commandBuffer);
- if (pAccelerationStructures) {
- for (uint32_t index = 0; index < accelerationStructureCount; index++) {
- StartReadObject(pAccelerationStructures[index]);
- }
- }
- StartReadObject(queryPool);
-}
-
-void ThreadSafety::PostCallRecordCmdWriteAccelerationStructuresPropertiesNV(
- VkCommandBuffer commandBuffer,
- uint32_t accelerationStructureCount,
- const VkAccelerationStructureNV* pAccelerationStructures,
- VkQueryType queryType,
- VkQueryPool queryPool,
- uint32_t firstQuery) {
- FinishReadObject(commandBuffer);
- if (pAccelerationStructures) {
- for (uint32_t index = 0; index < accelerationStructureCount; index++) {
- FinishReadObject(pAccelerationStructures[index]);
- }
- }
- FinishReadObject(queryPool);
-}
-
-void ThreadSafety::PreCallRecordCompileDeferredNV(
- VkDevice device,
- VkPipeline pipeline,
- uint32_t shader) {
- StartReadObject(device);
- StartReadObject(pipeline);
-}
-
-void ThreadSafety::PostCallRecordCompileDeferredNV(
- VkDevice device,
- VkPipeline pipeline,
- uint32_t shader,
- VkResult result) {
- FinishReadObject(device);
- FinishReadObject(pipeline);
-}
-
-void ThreadSafety::PreCallRecordGetMemoryHostPointerPropertiesEXT(
- VkDevice device,
- VkExternalMemoryHandleTypeFlagBits handleType,
- const void* pHostPointer,
- VkMemoryHostPointerPropertiesEXT* pMemoryHostPointerProperties) {
- StartReadObject(device);
-}
-
-void ThreadSafety::PostCallRecordGetMemoryHostPointerPropertiesEXT(
- VkDevice device,
- VkExternalMemoryHandleTypeFlagBits handleType,
- const void* pHostPointer,
- VkMemoryHostPointerPropertiesEXT* pMemoryHostPointerProperties,
- VkResult result) {
- FinishReadObject(device);
-}
-
-void ThreadSafety::PreCallRecordCmdWriteBufferMarkerAMD(
- VkCommandBuffer commandBuffer,
- VkPipelineStageFlagBits pipelineStage,
- VkBuffer dstBuffer,
- VkDeviceSize dstOffset,
- uint32_t marker) {
- StartWriteObject(commandBuffer);
- StartReadObject(dstBuffer);
- // Host access to commandBuffer must be externally synchronized
-}
-
-void ThreadSafety::PostCallRecordCmdWriteBufferMarkerAMD(
- VkCommandBuffer commandBuffer,
- VkPipelineStageFlagBits pipelineStage,
- VkBuffer dstBuffer,
- VkDeviceSize dstOffset,
- uint32_t marker) {
- FinishWriteObject(commandBuffer);
- FinishReadObject(dstBuffer);
- // Host access to commandBuffer must be externally synchronized
-}
-
-void ThreadSafety::PreCallRecordGetCalibratedTimestampsEXT(
- VkDevice device,
- uint32_t timestampCount,
- const VkCalibratedTimestampInfoEXT* pTimestampInfos,
- uint64_t* pTimestamps,
- uint64_t* pMaxDeviation) {
- StartReadObject(device);
-}
-
-void ThreadSafety::PostCallRecordGetCalibratedTimestampsEXT(
- VkDevice device,
- uint32_t timestampCount,
- const VkCalibratedTimestampInfoEXT* pTimestampInfos,
- uint64_t* pTimestamps,
- uint64_t* pMaxDeviation,
- VkResult result) {
- FinishReadObject(device);
-}
-
-#ifdef VK_USE_PLATFORM_GGP
-#endif // VK_USE_PLATFORM_GGP
-
-void ThreadSafety::PreCallRecordCmdDrawMeshTasksNV(
- VkCommandBuffer commandBuffer,
- uint32_t taskCount,
- uint32_t firstTask) {
- StartWriteObject(commandBuffer);
- // Host access to commandBuffer must be externally synchronized
-}
-
-void ThreadSafety::PostCallRecordCmdDrawMeshTasksNV(
- VkCommandBuffer commandBuffer,
- uint32_t taskCount,
- uint32_t firstTask) {
- FinishWriteObject(commandBuffer);
- // Host access to commandBuffer must be externally synchronized
-}
-
-void ThreadSafety::PreCallRecordCmdDrawMeshTasksIndirectNV(
- VkCommandBuffer commandBuffer,
- VkBuffer buffer,
- VkDeviceSize offset,
- uint32_t drawCount,
- uint32_t stride) {
- StartWriteObject(commandBuffer);
- StartReadObject(buffer);
- // Host access to commandBuffer must be externally synchronized
-}
-
-void ThreadSafety::PostCallRecordCmdDrawMeshTasksIndirectNV(
- VkCommandBuffer commandBuffer,
- VkBuffer buffer,
- VkDeviceSize offset,
- uint32_t drawCount,
- uint32_t stride) {
- FinishWriteObject(commandBuffer);
- FinishReadObject(buffer);
- // Host access to commandBuffer must be externally synchronized
-}
-
-void ThreadSafety::PreCallRecordCmdDrawMeshTasksIndirectCountNV(
- VkCommandBuffer commandBuffer,
- VkBuffer buffer,
- VkDeviceSize offset,
- VkBuffer countBuffer,
- VkDeviceSize countBufferOffset,
- uint32_t maxDrawCount,
- uint32_t stride) {
- StartWriteObject(commandBuffer);
- StartReadObject(buffer);
- StartReadObject(countBuffer);
- // Host access to commandBuffer must be externally synchronized
-}
-
-void ThreadSafety::PostCallRecordCmdDrawMeshTasksIndirectCountNV(
- VkCommandBuffer commandBuffer,
- VkBuffer buffer,
- VkDeviceSize offset,
- VkBuffer countBuffer,
- VkDeviceSize countBufferOffset,
- uint32_t maxDrawCount,
- uint32_t stride) {
- FinishWriteObject(commandBuffer);
- FinishReadObject(buffer);
- FinishReadObject(countBuffer);
- // Host access to commandBuffer must be externally synchronized
-}
-
-void ThreadSafety::PreCallRecordCmdSetExclusiveScissorNV(
- VkCommandBuffer commandBuffer,
- uint32_t firstExclusiveScissor,
- uint32_t exclusiveScissorCount,
- const VkRect2D* pExclusiveScissors) {
- StartWriteObject(commandBuffer);
- // Host access to commandBuffer must be externally synchronized
-}
-
-void ThreadSafety::PostCallRecordCmdSetExclusiveScissorNV(
- VkCommandBuffer commandBuffer,
- uint32_t firstExclusiveScissor,
- uint32_t exclusiveScissorCount,
- const VkRect2D* pExclusiveScissors) {
- FinishWriteObject(commandBuffer);
- // Host access to commandBuffer must be externally synchronized
-}
-
-void ThreadSafety::PreCallRecordCmdSetCheckpointNV(
- VkCommandBuffer commandBuffer,
- const void* pCheckpointMarker) {
- StartReadObject(commandBuffer);
-}
-
-void ThreadSafety::PostCallRecordCmdSetCheckpointNV(
- VkCommandBuffer commandBuffer,
- const void* pCheckpointMarker) {
- FinishReadObject(commandBuffer);
-}
-
-void ThreadSafety::PreCallRecordGetQueueCheckpointDataNV(
- VkQueue queue,
- uint32_t* pCheckpointDataCount,
- VkCheckpointDataNV* pCheckpointData) {
- StartReadObject(queue);
-}
-
-void ThreadSafety::PostCallRecordGetQueueCheckpointDataNV(
- VkQueue queue,
- uint32_t* pCheckpointDataCount,
- VkCheckpointDataNV* pCheckpointData) {
- FinishReadObject(queue);
-}
-
-void ThreadSafety::PreCallRecordInitializePerformanceApiINTEL(
- VkDevice device,
- const VkInitializePerformanceApiInfoINTEL* pInitializeInfo) {
- StartReadObject(device);
-}
-
-void ThreadSafety::PostCallRecordInitializePerformanceApiINTEL(
- VkDevice device,
- const VkInitializePerformanceApiInfoINTEL* pInitializeInfo,
- VkResult result) {
- FinishReadObject(device);
-}
-
-void ThreadSafety::PreCallRecordUninitializePerformanceApiINTEL(
- VkDevice device) {
- StartReadObject(device);
-}
-
-void ThreadSafety::PostCallRecordUninitializePerformanceApiINTEL(
- VkDevice device) {
- FinishReadObject(device);
-}
-
-void ThreadSafety::PreCallRecordCmdSetPerformanceMarkerINTEL(
- VkCommandBuffer commandBuffer,
- const VkPerformanceMarkerInfoINTEL* pMarkerInfo) {
- StartReadObject(commandBuffer);
-}
-
-void ThreadSafety::PostCallRecordCmdSetPerformanceMarkerINTEL(
- VkCommandBuffer commandBuffer,
- const VkPerformanceMarkerInfoINTEL* pMarkerInfo,
- VkResult result) {
- FinishReadObject(commandBuffer);
-}
-
-void ThreadSafety::PreCallRecordCmdSetPerformanceStreamMarkerINTEL(
- VkCommandBuffer commandBuffer,
- const VkPerformanceStreamMarkerInfoINTEL* pMarkerInfo) {
- StartReadObject(commandBuffer);
-}
-
-void ThreadSafety::PostCallRecordCmdSetPerformanceStreamMarkerINTEL(
- VkCommandBuffer commandBuffer,
- const VkPerformanceStreamMarkerInfoINTEL* pMarkerInfo,
- VkResult result) {
- FinishReadObject(commandBuffer);
-}
-
-void ThreadSafety::PreCallRecordCmdSetPerformanceOverrideINTEL(
- VkCommandBuffer commandBuffer,
- const VkPerformanceOverrideInfoINTEL* pOverrideInfo) {
- StartReadObject(commandBuffer);
-}
-
-void ThreadSafety::PostCallRecordCmdSetPerformanceOverrideINTEL(
- VkCommandBuffer commandBuffer,
- const VkPerformanceOverrideInfoINTEL* pOverrideInfo,
- VkResult result) {
- FinishReadObject(commandBuffer);
-}
-
-void ThreadSafety::PreCallRecordAcquirePerformanceConfigurationINTEL(
- VkDevice device,
- const VkPerformanceConfigurationAcquireInfoINTEL* pAcquireInfo,
- VkPerformanceConfigurationINTEL* pConfiguration) {
- StartReadObject(device);
-}
-
-void ThreadSafety::PostCallRecordAcquirePerformanceConfigurationINTEL(
- VkDevice device,
- const VkPerformanceConfigurationAcquireInfoINTEL* pAcquireInfo,
- VkPerformanceConfigurationINTEL* pConfiguration,
- VkResult result) {
- FinishReadObject(device);
-}
-
-void ThreadSafety::PreCallRecordReleasePerformanceConfigurationINTEL(
- VkDevice device,
- VkPerformanceConfigurationINTEL configuration) {
- StartReadObject(device);
- StartReadObject(configuration);
-}
-
-void ThreadSafety::PostCallRecordReleasePerformanceConfigurationINTEL(
- VkDevice device,
- VkPerformanceConfigurationINTEL configuration,
- VkResult result) {
- FinishReadObject(device);
- FinishReadObject(configuration);
-}
-
-void ThreadSafety::PreCallRecordQueueSetPerformanceConfigurationINTEL(
- VkQueue queue,
- VkPerformanceConfigurationINTEL configuration) {
- StartReadObject(queue);
- StartReadObject(configuration);
-}
-
-void ThreadSafety::PostCallRecordQueueSetPerformanceConfigurationINTEL(
- VkQueue queue,
- VkPerformanceConfigurationINTEL configuration,
- VkResult result) {
- FinishReadObject(queue);
- FinishReadObject(configuration);
-}
-
-void ThreadSafety::PreCallRecordGetPerformanceParameterINTEL(
- VkDevice device,
- VkPerformanceParameterTypeINTEL parameter,
- VkPerformanceValueINTEL* pValue) {
- StartReadObject(device);
-}
-
-void ThreadSafety::PostCallRecordGetPerformanceParameterINTEL(
- VkDevice device,
- VkPerformanceParameterTypeINTEL parameter,
- VkPerformanceValueINTEL* pValue,
- VkResult result) {
- FinishReadObject(device);
-}
-
-void ThreadSafety::PreCallRecordSetLocalDimmingAMD(
- VkDevice device,
- VkSwapchainKHR swapChain,
- VkBool32 localDimmingEnable) {
- StartReadObject(device);
- StartReadObject(swapChain);
-}
-
-void ThreadSafety::PostCallRecordSetLocalDimmingAMD(
- VkDevice device,
- VkSwapchainKHR swapChain,
- VkBool32 localDimmingEnable) {
- FinishReadObject(device);
- FinishReadObject(swapChain);
-}
-
-#ifdef VK_USE_PLATFORM_FUCHSIA
-
-void ThreadSafety::PreCallRecordCreateImagePipeSurfaceFUCHSIA(
- VkInstance instance,
- const VkImagePipeSurfaceCreateInfoFUCHSIA* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkSurfaceKHR* pSurface) {
- StartReadObject(instance);
-}
-
-void ThreadSafety::PostCallRecordCreateImagePipeSurfaceFUCHSIA(
- VkInstance instance,
- const VkImagePipeSurfaceCreateInfoFUCHSIA* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkSurfaceKHR* pSurface,
- VkResult result) {
- FinishReadObject(instance);
-}
-#endif // VK_USE_PLATFORM_FUCHSIA
-
-#ifdef VK_USE_PLATFORM_METAL_EXT
-
-void ThreadSafety::PreCallRecordCreateMetalSurfaceEXT(
- VkInstance instance,
- const VkMetalSurfaceCreateInfoEXT* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkSurfaceKHR* pSurface) {
- StartReadObject(instance);
-}
-
-void ThreadSafety::PostCallRecordCreateMetalSurfaceEXT(
- VkInstance instance,
- const VkMetalSurfaceCreateInfoEXT* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkSurfaceKHR* pSurface,
- VkResult result) {
- FinishReadObject(instance);
-}
-#endif // VK_USE_PLATFORM_METAL_EXT
-
-void ThreadSafety::PreCallRecordGetBufferDeviceAddressEXT(
- VkDevice device,
- const VkBufferDeviceAddressInfoEXT* pInfo) {
- StartReadObject(device);
-}
-
-void ThreadSafety::PostCallRecordGetBufferDeviceAddressEXT(
- VkDevice device,
- const VkBufferDeviceAddressInfoEXT* pInfo) {
- FinishReadObject(device);
-}
-
-#ifdef VK_USE_PLATFORM_WIN32_KHR
-
-void ThreadSafety::PreCallRecordAcquireFullScreenExclusiveModeEXT(
- VkDevice device,
- VkSwapchainKHR swapchain) {
- StartReadObject(device);
- StartReadObject(swapchain);
-}
-
-void ThreadSafety::PostCallRecordAcquireFullScreenExclusiveModeEXT(
- VkDevice device,
- VkSwapchainKHR swapchain,
- VkResult result) {
- FinishReadObject(device);
- FinishReadObject(swapchain);
-}
-
-void ThreadSafety::PreCallRecordReleaseFullScreenExclusiveModeEXT(
- VkDevice device,
- VkSwapchainKHR swapchain) {
- StartReadObject(device);
- StartReadObject(swapchain);
-}
-
-void ThreadSafety::PostCallRecordReleaseFullScreenExclusiveModeEXT(
- VkDevice device,
- VkSwapchainKHR swapchain,
- VkResult result) {
- FinishReadObject(device);
- FinishReadObject(swapchain);
-}
-
-void ThreadSafety::PreCallRecordGetDeviceGroupSurfacePresentModes2EXT(
- VkDevice device,
- const VkPhysicalDeviceSurfaceInfo2KHR* pSurfaceInfo,
- VkDeviceGroupPresentModeFlagsKHR* pModes) {
- StartReadObject(device);
-}
-
-void ThreadSafety::PostCallRecordGetDeviceGroupSurfacePresentModes2EXT(
- VkDevice device,
- const VkPhysicalDeviceSurfaceInfo2KHR* pSurfaceInfo,
- VkDeviceGroupPresentModeFlagsKHR* pModes,
- VkResult result) {
- FinishReadObject(device);
-}
-#endif // VK_USE_PLATFORM_WIN32_KHR
-
-void ThreadSafety::PreCallRecordCreateHeadlessSurfaceEXT(
- VkInstance instance,
- const VkHeadlessSurfaceCreateInfoEXT* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkSurfaceKHR* pSurface) {
- StartReadObject(instance);
-}
-
-void ThreadSafety::PostCallRecordCreateHeadlessSurfaceEXT(
- VkInstance instance,
- const VkHeadlessSurfaceCreateInfoEXT* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkSurfaceKHR* pSurface,
- VkResult result) {
- FinishReadObject(instance);
-}
-
-void ThreadSafety::PreCallRecordCmdSetLineStippleEXT(
- VkCommandBuffer commandBuffer,
- uint32_t lineStippleFactor,
- uint16_t lineStipplePattern) {
- StartWriteObject(commandBuffer);
- // Host access to commandBuffer must be externally synchronized
-}
-
-void ThreadSafety::PostCallRecordCmdSetLineStippleEXT(
- VkCommandBuffer commandBuffer,
- uint32_t lineStippleFactor,
- uint16_t lineStipplePattern) {
- FinishWriteObject(commandBuffer);
- // Host access to commandBuffer must be externally synchronized
-}
-
-void ThreadSafety::PreCallRecordResetQueryPoolEXT(
- VkDevice device,
- VkQueryPool queryPool,
- uint32_t firstQuery,
- uint32_t queryCount) {
- StartReadObject(device);
- StartReadObject(queryPool);
-}
-
-void ThreadSafety::PostCallRecordResetQueryPoolEXT(
- VkDevice device,
- VkQueryPool queryPool,
- uint32_t firstQuery,
- uint32_t queryCount) {
- FinishReadObject(device);
- FinishReadObject(queryPool);
-}
diff --git a/layers/generated/thread_safety.h b/layers/generated/thread_safety.h
deleted file mode 100644
index a55fc704a..000000000
--- a/layers/generated/thread_safety.h
+++ /dev/null
@@ -1,4250 +0,0 @@
-
-// This file is ***GENERATED***. Do Not Edit.
-// See thread_safety_generator.py for modifications.
-
-/* Copyright (c) 2015-2019 The Khronos Group Inc.
- * Copyright (c) 2015-2019 Valve Corporation
- * Copyright (c) 2015-2019 LunarG, Inc.
- * Copyright (c) 2015-2019 Google Inc.
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- *
- * Author: Mark Lobodzinski <mark@lunarg.com>
- */
-
-#pragma once
-
-#include <chrono>
-#include <thread>
-#include <mutex>
-#include <vector>
-#include <unordered_set>
-#include <string>
-
-VK_DEFINE_NON_DISPATCHABLE_HANDLE(DISTINCT_NONDISPATCHABLE_PHONY_HANDLE)
-// The following line must match the vulkan_core.h condition guarding VK_DEFINE_NON_DISPATCHABLE_HANDLE
-#if defined(__LP64__) || defined(_WIN64) || (defined(__x86_64__) && !defined(__ILP32__)) || defined(_M_X64) || defined(__ia64) || defined(_M_IA64) || defined(__aarch64__) || defined(__powerpc64__)
-// If pointers are 64-bit, then there can be separate counters for each
-// NONDISPATCHABLE_HANDLE type. Otherwise they are all typedef uint64_t.
-#define DISTINCT_NONDISPATCHABLE_HANDLES
-// Make sure we catch any disagreement between us and the vulkan definition
-static_assert(std::is_pointer<DISTINCT_NONDISPATCHABLE_PHONY_HANDLE>::value,
- "Mismatched non-dispatchable handle handle, expected pointer type.");
-#else
-// Make sure we catch any disagreement between us and the vulkan definition
-static_assert(std::is_same<uint64_t, DISTINCT_NONDISPATCHABLE_PHONY_HANDLE>::value,
- "Mismatched non-dispatchable handle handle, expected uint64_t.");
-#endif
-
-// Suppress unused warning on Linux
-#if defined(__GNUC__)
-#define DECORATE_UNUSED __attribute__((unused))
-#else
-#define DECORATE_UNUSED
-#endif
-
-// clang-format off
-static const char DECORATE_UNUSED *kVUID_Threading_Info = "UNASSIGNED-Threading-Info";
-static const char DECORATE_UNUSED *kVUID_Threading_MultipleThreads = "UNASSIGNED-Threading-MultipleThreads";
-static const char DECORATE_UNUSED *kVUID_Threading_SingleThreadReuse = "UNASSIGNED-Threading-SingleThreadReuse";
-// clang-format on
-
-#undef DECORATE_UNUSED
-
-struct object_use_data {
- loader_platform_thread_id thread;
- int reader_count;
- int writer_count;
-};
-
-// This is a wrapper around unordered_map that optimizes for the common case
-// of only containing a single element. The "first" element's use is stored
-// inline in the class and doesn't require hashing or memory (de)allocation.
-// TODO: Consider generalizing this from one element to N elements (where N
-// is a template parameter).
-template <typename Key, typename T>
-class small_unordered_map {
-
- bool first_data_allocated;
- Key first_data_key;
- T first_data;
-
- std::unordered_map<Key, T> uses;
-
-public:
- small_unordered_map() : first_data_allocated(false) {}
-
- bool contains(const Key& object) const {
- if (first_data_allocated && object == first_data_key) {
- return true;
- // check size() first to avoid hashing object unnecessarily.
- } else if (uses.size() == 0) {
- return false;
- } else {
- return uses.find(object) != uses.end();
- }
- }
-
- T& operator[](const Key& object) {
- if (first_data_allocated && first_data_key == object) {
- return first_data;
- } else if (!first_data_allocated && uses.size() == 0) {
- first_data_allocated = true;
- first_data_key = object;
- return first_data;
- } else {
- return uses[object];
- }
- }
-
- typename std::unordered_map<Key, T>::size_type erase(const Key& object) {
- if (first_data_allocated && first_data_key == object) {
- first_data_allocated = false;
- return 1;
- } else {
- return uses.erase(object);
- }
- }
-};
-
-#define THREAD_SAFETY_BUCKETS_LOG2 6
-#define THREAD_SAFETY_BUCKETS (1 << THREAD_SAFETY_BUCKETS_LOG2)
-
-template <typename T> inline uint32_t ThreadSafetyHashObject(T object)
-{
- uint64_t u64 = (uint64_t)(uintptr_t)object;
- uint32_t hash = (uint32_t)(u64 >> 32) + (uint32_t)u64;
- hash ^= (hash >> THREAD_SAFETY_BUCKETS_LOG2) ^ (hash >> (2*THREAD_SAFETY_BUCKETS_LOG2));
- hash &= (THREAD_SAFETY_BUCKETS-1);
- return hash;
-}
-
-template <typename T>
-class counter {
-public:
- const char *typeName;
- VkDebugReportObjectTypeEXT objectType;
- debug_report_data **report_data;
-
- // Per-bucket locking, to reduce contention.
- struct CounterBucket {
- small_unordered_map<T, object_use_data> uses;
- std::mutex counter_lock;
- };
-
- CounterBucket buckets[THREAD_SAFETY_BUCKETS];
- CounterBucket &GetBucket(T object)
- {
- return buckets[ThreadSafetyHashObject(object)];
- }
-
- void StartWrite(T object) {
- if (object == VK_NULL_HANDLE) {
- return;
- }
- auto &bucket = GetBucket(object);
- bool skip = false;
- loader_platform_thread_id tid = loader_platform_get_thread_id();
- std::unique_lock<std::mutex> lock(bucket.counter_lock);
- if (!bucket.uses.contains(object)) {
- // There is no current use of the object. Record writer thread.
- struct object_use_data *use_data = &bucket.uses[object];
- use_data->reader_count = 0;
- use_data->writer_count = 1;
- use_data->thread = tid;
- } else {
- struct object_use_data *use_data = &bucket.uses[object];
- if (use_data->reader_count == 0) {
- // There are no readers. Two writers just collided.
- if (use_data->thread != tid) {
- skip |= log_msg(*report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, objectType, (uint64_t)(object),
- kVUID_Threading_MultipleThreads,
- "THREADING ERROR : object of type %s is simultaneously used in "
- "thread 0x%" PRIx64 " and thread 0x%" PRIx64,
- typeName, (uint64_t)use_data->thread, (uint64_t)tid);
- if (skip) {
- WaitForObjectIdle(bucket, object, lock);
- // There is now no current use of the object. Record writer thread.
- struct object_use_data *new_use_data = &bucket.uses[object];
- new_use_data->thread = tid;
- new_use_data->reader_count = 0;
- new_use_data->writer_count = 1;
- } else {
- // Continue with an unsafe use of the object.
- use_data->thread = tid;
- use_data->writer_count += 1;
- }
- } else {
- // This is either safe multiple use in one call, or recursive use.
- // There is no way to make recursion safe. Just forge ahead.
- use_data->writer_count += 1;
- }
- } else {
- // There are readers. This writer collided with them.
- if (use_data->thread != tid) {
- skip |= log_msg(*report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, objectType, (uint64_t)(object),
- kVUID_Threading_MultipleThreads,
- "THREADING ERROR : object of type %s is simultaneously used in "
- "thread 0x%" PRIx64 " and thread 0x%" PRIx64,
- typeName, (uint64_t)use_data->thread, (uint64_t)tid);
- if (skip) {
- WaitForObjectIdle(bucket, object, lock);
- // There is now no current use of the object. Record writer thread.
- struct object_use_data *new_use_data = &bucket.uses[object];
- new_use_data->thread = tid;
- new_use_data->reader_count = 0;
- new_use_data->writer_count = 1;
- } else {
- // Continue with an unsafe use of the object.
- use_data->thread = tid;
- use_data->writer_count += 1;
- }
- } else {
- // This is either safe multiple use in one call, or recursive use.
- // There is no way to make recursion safe. Just forge ahead.
- use_data->writer_count += 1;
- }
- }
- }
- }
-
- void FinishWrite(T object) {
- if (object == VK_NULL_HANDLE) {
- return;
- }
- auto &bucket = GetBucket(object);
- // Object is no longer in use
- std::unique_lock<std::mutex> lock(bucket.counter_lock);
- struct object_use_data *use_data = &bucket.uses[object];
- use_data->writer_count -= 1;
- if ((use_data->reader_count == 0) && (use_data->writer_count == 0)) {
- bucket.uses.erase(object);
- }
- }
-
- void StartRead(T object) {
- if (object == VK_NULL_HANDLE) {
- return;
- }
- auto &bucket = GetBucket(object);
- bool skip = false;
- loader_platform_thread_id tid = loader_platform_get_thread_id();
- std::unique_lock<std::mutex> lock(bucket.counter_lock);
- if (!bucket.uses.contains(object)) {
- // There is no current use of the object. Record reader count
- struct object_use_data *use_data = &bucket.uses[object];
- use_data->reader_count = 1;
- use_data->writer_count = 0;
- use_data->thread = tid;
- } else if (bucket.uses[object].writer_count > 0 && bucket.uses[object].thread != tid) {
- // There is a writer of the object.
- skip |= log_msg(*report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, objectType, (uint64_t)(object),
- kVUID_Threading_MultipleThreads,
- "THREADING ERROR : object of type %s is simultaneously used in "
- "thread 0x%" PRIx64 " and thread 0x%" PRIx64,
- typeName, (uint64_t)bucket.uses[object].thread, (uint64_t)tid);
- if (skip) {
- WaitForObjectIdle(bucket, object, lock);
- // There is no current use of the object. Record reader count
- struct object_use_data *use_data = &bucket.uses[object];
- use_data->reader_count = 1;
- use_data->writer_count = 0;
- use_data->thread = tid;
- } else {
- bucket.uses[object].reader_count += 1;
- }
- } else {
- // There are other readers of the object. Increase reader count
- bucket.uses[object].reader_count += 1;
- }
- }
- void FinishRead(T object) {
- if (object == VK_NULL_HANDLE) {
- return;
- }
- auto &bucket = GetBucket(object);
- std::unique_lock<std::mutex> lock(bucket.counter_lock);
- struct object_use_data *use_data = &bucket.uses[object];
- use_data->reader_count -= 1;
- if ((use_data->reader_count == 0) && (use_data->writer_count == 0)) {
- bucket.uses.erase(object);
- }
- }
- counter(const char *name = "", VkDebugReportObjectTypeEXT type = VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, debug_report_data **rep_data = nullptr) {
- typeName = name;
- objectType = type;
- report_data = rep_data;
- }
-
-private:
- void WaitForObjectIdle(CounterBucket &bucket, T object, std::unique_lock<std::mutex> &lock) {
- // Wait for thread-safe access to object instead of skipping call.
- // Don't use condition_variable to wait because it should be extremely
- // rare to have collisions, but signaling would be very frequent.
- while (bucket.uses.contains(object)) {
- lock.unlock();
- std::this_thread::sleep_for(std::chrono::microseconds(1));
- lock.lock();
- }
- }
-};
-
-
-
-class ThreadSafety : public ValidationObject {
-public:
-
- // Override chassis read/write locks for this validation object
- // This override takes a deferred lock. i.e. it is not acquired.
- std::unique_lock<std::mutex> write_lock() {
- return std::unique_lock<std::mutex>(validation_object_mutex, std::defer_lock);
- }
-
- // Per-bucket locking, to reduce contention.
- struct CommandBufferBucket {
- std::mutex command_pool_lock;
- small_unordered_map<VkCommandBuffer, VkCommandPool> command_pool_map;
- };
-
- CommandBufferBucket buckets[THREAD_SAFETY_BUCKETS];
- CommandBufferBucket &GetBucket(VkCommandBuffer object)
- {
- return buckets[ThreadSafetyHashObject(object)];
- }
-
- counter<VkCommandBuffer> c_VkCommandBuffer;
- counter<VkDevice> c_VkDevice;
- counter<VkInstance> c_VkInstance;
- counter<VkQueue> c_VkQueue;
-#ifdef DISTINCT_NONDISPATCHABLE_HANDLES
-
- // Special entry to allow tracking of command pool Reset and Destroy
- counter<VkCommandPool> c_VkCommandPoolContents;
- counter<VkAccelerationStructureNV> c_VkAccelerationStructureNV;
- counter<VkBuffer> c_VkBuffer;
- counter<VkBufferView> c_VkBufferView;
- counter<VkCommandPool> c_VkCommandPool;
- counter<VkDebugReportCallbackEXT> c_VkDebugReportCallbackEXT;
- counter<VkDebugUtilsMessengerEXT> c_VkDebugUtilsMessengerEXT;
- counter<VkDescriptorPool> c_VkDescriptorPool;
- counter<VkDescriptorSet> c_VkDescriptorSet;
- counter<VkDescriptorSetLayout> c_VkDescriptorSetLayout;
- counter<VkDescriptorUpdateTemplate> c_VkDescriptorUpdateTemplate;
- counter<VkDeviceMemory> c_VkDeviceMemory;
- counter<VkDisplayKHR> c_VkDisplayKHR;
- counter<VkDisplayModeKHR> c_VkDisplayModeKHR;
- counter<VkEvent> c_VkEvent;
- counter<VkFence> c_VkFence;
- counter<VkFramebuffer> c_VkFramebuffer;
- counter<VkImage> c_VkImage;
- counter<VkImageView> c_VkImageView;
- counter<VkIndirectCommandsLayoutNVX> c_VkIndirectCommandsLayoutNVX;
- counter<VkObjectTableNVX> c_VkObjectTableNVX;
- counter<VkPerformanceConfigurationINTEL> c_VkPerformanceConfigurationINTEL;
- counter<VkPipeline> c_VkPipeline;
- counter<VkPipelineCache> c_VkPipelineCache;
- counter<VkPipelineLayout> c_VkPipelineLayout;
- counter<VkQueryPool> c_VkQueryPool;
- counter<VkRenderPass> c_VkRenderPass;
- counter<VkSampler> c_VkSampler;
- counter<VkSamplerYcbcrConversion> c_VkSamplerYcbcrConversion;
- counter<VkSemaphore> c_VkSemaphore;
- counter<VkShaderModule> c_VkShaderModule;
- counter<VkSurfaceKHR> c_VkSurfaceKHR;
- counter<VkSwapchainKHR> c_VkSwapchainKHR;
- counter<VkValidationCacheEXT> c_VkValidationCacheEXT;
-
-
-#else // DISTINCT_NONDISPATCHABLE_HANDLES
- // Special entry to allow tracking of command pool Reset and Destroy
- counter<uint64_t> c_VkCommandPoolContents;
-
- counter<uint64_t> c_uint64_t;
-#endif // DISTINCT_NONDISPATCHABLE_HANDLES
-
- ThreadSafety()
- : c_VkCommandBuffer("VkCommandBuffer", VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, &report_data),
- c_VkDevice("VkDevice", VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, &report_data),
- c_VkInstance("VkInstance", VK_DEBUG_REPORT_OBJECT_TYPE_INSTANCE_EXT, &report_data),
- c_VkQueue("VkQueue", VK_DEBUG_REPORT_OBJECT_TYPE_QUEUE_EXT, &report_data),
- c_VkCommandPoolContents("VkCommandPool", VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_POOL_EXT, &report_data),
-
-#ifdef DISTINCT_NONDISPATCHABLE_HANDLES
- c_VkAccelerationStructureNV("VkAccelerationStructureNV", VK_DEBUG_REPORT_OBJECT_TYPE_ACCELERATION_STRUCTURE_NV_EXT, &report_data),
- c_VkBuffer("VkBuffer", VK_DEBUG_REPORT_OBJECT_TYPE_BUFFER_EXT, &report_data),
- c_VkBufferView("VkBufferView", VK_DEBUG_REPORT_OBJECT_TYPE_BUFFER_VIEW_EXT, &report_data),
- c_VkCommandPool("VkCommandPool", VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_POOL_EXT, &report_data),
- c_VkDebugReportCallbackEXT("VkDebugReportCallbackEXT", VK_DEBUG_REPORT_OBJECT_TYPE_DEBUG_REPORT_EXT, &report_data),
- c_VkDebugUtilsMessengerEXT("VkDebugUtilsMessengerEXT", VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, &report_data),
- c_VkDescriptorPool("VkDescriptorPool", VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_POOL_EXT, &report_data),
- c_VkDescriptorSet("VkDescriptorSet", VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_SET_EXT, &report_data),
- c_VkDescriptorSetLayout("VkDescriptorSetLayout", VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_SET_LAYOUT_EXT, &report_data),
- c_VkDescriptorUpdateTemplate("VkDescriptorUpdateTemplate", VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_UPDATE_TEMPLATE_EXT, &report_data),
- c_VkDeviceMemory("VkDeviceMemory", VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_MEMORY_EXT, &report_data),
- c_VkDisplayKHR("VkDisplayKHR", VK_DEBUG_REPORT_OBJECT_TYPE_DISPLAY_KHR_EXT, &report_data),
- c_VkDisplayModeKHR("VkDisplayModeKHR", VK_DEBUG_REPORT_OBJECT_TYPE_DISPLAY_MODE_KHR_EXT, &report_data),
- c_VkEvent("VkEvent", VK_DEBUG_REPORT_OBJECT_TYPE_EVENT_EXT, &report_data),
- c_VkFence("VkFence", VK_DEBUG_REPORT_OBJECT_TYPE_FENCE_EXT, &report_data),
- c_VkFramebuffer("VkFramebuffer", VK_DEBUG_REPORT_OBJECT_TYPE_FRAMEBUFFER_EXT, &report_data),
- c_VkImage("VkImage", VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT, &report_data),
- c_VkImageView("VkImageView", VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_VIEW_EXT, &report_data),
- c_VkIndirectCommandsLayoutNVX("VkIndirectCommandsLayoutNVX", VK_DEBUG_REPORT_OBJECT_TYPE_INDIRECT_COMMANDS_LAYOUT_NVX_EXT, &report_data),
- c_VkObjectTableNVX("VkObjectTableNVX", VK_DEBUG_REPORT_OBJECT_TYPE_OBJECT_TABLE_NVX_EXT, &report_data),
- c_VkPerformanceConfigurationINTEL("VkPerformanceConfigurationINTEL", VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, &report_data),
- c_VkPipeline("VkPipeline", VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_EXT, &report_data),
- c_VkPipelineCache("VkPipelineCache", VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_CACHE_EXT, &report_data),
- c_VkPipelineLayout("VkPipelineLayout", VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_LAYOUT_EXT, &report_data),
- c_VkQueryPool("VkQueryPool", VK_DEBUG_REPORT_OBJECT_TYPE_QUERY_POOL_EXT, &report_data),
- c_VkRenderPass("VkRenderPass", VK_DEBUG_REPORT_OBJECT_TYPE_RENDER_PASS_EXT, &report_data),
- c_VkSampler("VkSampler", VK_DEBUG_REPORT_OBJECT_TYPE_SAMPLER_EXT, &report_data),
- c_VkSamplerYcbcrConversion("VkSamplerYcbcrConversion", VK_DEBUG_REPORT_OBJECT_TYPE_SAMPLER_YCBCR_CONVERSION_EXT, &report_data),
- c_VkSemaphore("VkSemaphore", VK_DEBUG_REPORT_OBJECT_TYPE_SEMAPHORE_EXT, &report_data),
- c_VkShaderModule("VkShaderModule", VK_DEBUG_REPORT_OBJECT_TYPE_SHADER_MODULE_EXT, &report_data),
- c_VkSurfaceKHR("VkSurfaceKHR", VK_DEBUG_REPORT_OBJECT_TYPE_SURFACE_KHR_EXT, &report_data),
- c_VkSwapchainKHR("VkSwapchainKHR", VK_DEBUG_REPORT_OBJECT_TYPE_SWAPCHAIN_KHR_EXT, &report_data),
- c_VkValidationCacheEXT("VkValidationCacheEXT", VK_DEBUG_REPORT_OBJECT_TYPE_VALIDATION_CACHE_EXT, &report_data)
-
-
-#else // DISTINCT_NONDISPATCHABLE_HANDLES
- c_uint64_t("NON_DISPATCHABLE_HANDLE", VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, &report_data)
-#endif // DISTINCT_NONDISPATCHABLE_HANDLES
- {};
-
-#define WRAPPER(type) void StartWriteObject(type object) { c_##type.StartWrite(object); } void FinishWriteObject(type object) { c_##type.FinishWrite(object); } void StartReadObject(type object) { c_##type.StartRead(object); } void FinishReadObject(type object) { c_##type.FinishRead(object); }
-
-WRAPPER(VkDevice)
-WRAPPER(VkInstance)
-WRAPPER(VkQueue)
-#ifdef DISTINCT_NONDISPATCHABLE_HANDLES
-WRAPPER(VkAccelerationStructureNV)
-WRAPPER(VkBuffer)
-WRAPPER(VkBufferView)
-WRAPPER(VkCommandPool)
-WRAPPER(VkDebugReportCallbackEXT)
-WRAPPER(VkDebugUtilsMessengerEXT)
-WRAPPER(VkDescriptorPool)
-WRAPPER(VkDescriptorSet)
-WRAPPER(VkDescriptorSetLayout)
-WRAPPER(VkDescriptorUpdateTemplate)
-WRAPPER(VkDeviceMemory)
-WRAPPER(VkDisplayKHR)
-WRAPPER(VkDisplayModeKHR)
-WRAPPER(VkEvent)
-WRAPPER(VkFence)
-WRAPPER(VkFramebuffer)
-WRAPPER(VkImage)
-WRAPPER(VkImageView)
-WRAPPER(VkIndirectCommandsLayoutNVX)
-WRAPPER(VkObjectTableNVX)
-WRAPPER(VkPerformanceConfigurationINTEL)
-WRAPPER(VkPipeline)
-WRAPPER(VkPipelineCache)
-WRAPPER(VkPipelineLayout)
-WRAPPER(VkQueryPool)
-WRAPPER(VkRenderPass)
-WRAPPER(VkSampler)
-WRAPPER(VkSamplerYcbcrConversion)
-WRAPPER(VkSemaphore)
-WRAPPER(VkShaderModule)
-WRAPPER(VkSurfaceKHR)
-WRAPPER(VkSwapchainKHR)
-WRAPPER(VkValidationCacheEXT)
-
-
-#else // DISTINCT_NONDISPATCHABLE_HANDLES
-WRAPPER(uint64_t)
-#endif // DISTINCT_NONDISPATCHABLE_HANDLES
-
- // VkCommandBuffer needs check for implicit use of command pool
- void StartWriteObject(VkCommandBuffer object, bool lockPool = true) {
- if (lockPool) {
- auto &bucket = GetBucket(object);
- std::unique_lock<std::mutex> lock(bucket.command_pool_lock);
- VkCommandPool pool = bucket.command_pool_map[object];
- lock.unlock();
- StartWriteObject(pool);
- }
- c_VkCommandBuffer.StartWrite(object);
- }
- void FinishWriteObject(VkCommandBuffer object, bool lockPool = true) {
- c_VkCommandBuffer.FinishWrite(object);
- if (lockPool) {
- auto &bucket = GetBucket(object);
- std::unique_lock<std::mutex> lock(bucket.command_pool_lock);
- VkCommandPool pool = bucket.command_pool_map[object];
- lock.unlock();
- FinishWriteObject(pool);
- }
- }
- void StartReadObject(VkCommandBuffer object) {
- auto &bucket = GetBucket(object);
- std::unique_lock<std::mutex> lock(bucket.command_pool_lock);
- VkCommandPool pool = bucket.command_pool_map[object];
- lock.unlock();
- // We set up a read guard against the "Contents" counter to catch conflict vs. vkResetCommandPool and vkDestroyCommandPool
- // while *not* establishing a read guard against the command pool counter itself to avoid false postives for
- // non-externally sync'd command buffers
- c_VkCommandPoolContents.StartRead(pool);
- c_VkCommandBuffer.StartRead(object);
- }
- void FinishReadObject(VkCommandBuffer object) {
- auto &bucket = GetBucket(object);
- c_VkCommandBuffer.FinishRead(object);
- std::unique_lock<std::mutex> lock(bucket.command_pool_lock);
- VkCommandPool pool = bucket.command_pool_map[object];
- lock.unlock();
- c_VkCommandPoolContents.FinishRead(pool);
- }
-
-void PreCallRecordDestroyInstance(
- VkInstance instance,
- const VkAllocationCallbacks* pAllocator);
-
-void PostCallRecordDestroyInstance(
- VkInstance instance,
- const VkAllocationCallbacks* pAllocator);
-
-void PreCallRecordEnumeratePhysicalDevices(
- VkInstance instance,
- uint32_t* pPhysicalDeviceCount,
- VkPhysicalDevice* pPhysicalDevices);
-
-void PostCallRecordEnumeratePhysicalDevices(
- VkInstance instance,
- uint32_t* pPhysicalDeviceCount,
- VkPhysicalDevice* pPhysicalDevices,
- VkResult result);
-
-void PreCallRecordGetInstanceProcAddr(
- VkInstance instance,
- const char* pName);
-
-void PostCallRecordGetInstanceProcAddr(
- VkInstance instance,
- const char* pName);
-
-void PreCallRecordGetDeviceProcAddr(
- VkDevice device,
- const char* pName);
-
-void PostCallRecordGetDeviceProcAddr(
- VkDevice device,
- const char* pName);
-
-void PreCallRecordDestroyDevice(
- VkDevice device,
- const VkAllocationCallbacks* pAllocator);
-
-void PostCallRecordDestroyDevice(
- VkDevice device,
- const VkAllocationCallbacks* pAllocator);
-
-void PreCallRecordGetDeviceQueue(
- VkDevice device,
- uint32_t queueFamilyIndex,
- uint32_t queueIndex,
- VkQueue* pQueue);
-
-void PostCallRecordGetDeviceQueue(
- VkDevice device,
- uint32_t queueFamilyIndex,
- uint32_t queueIndex,
- VkQueue* pQueue);
-
-void PreCallRecordQueueSubmit(
- VkQueue queue,
- uint32_t submitCount,
- const VkSubmitInfo* pSubmits,
- VkFence fence);
-
-void PostCallRecordQueueSubmit(
- VkQueue queue,
- uint32_t submitCount,
- const VkSubmitInfo* pSubmits,
- VkFence fence,
- VkResult result);
-
-void PreCallRecordQueueWaitIdle(
- VkQueue queue);
-
-void PostCallRecordQueueWaitIdle(
- VkQueue queue,
- VkResult result);
-
-void PreCallRecordDeviceWaitIdle(
- VkDevice device);
-
-void PostCallRecordDeviceWaitIdle(
- VkDevice device,
- VkResult result);
-
-void PreCallRecordAllocateMemory(
- VkDevice device,
- const VkMemoryAllocateInfo* pAllocateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkDeviceMemory* pMemory);
-
-void PostCallRecordAllocateMemory(
- VkDevice device,
- const VkMemoryAllocateInfo* pAllocateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkDeviceMemory* pMemory,
- VkResult result);
-
-void PreCallRecordFreeMemory(
- VkDevice device,
- VkDeviceMemory memory,
- const VkAllocationCallbacks* pAllocator);
-
-void PostCallRecordFreeMemory(
- VkDevice device,
- VkDeviceMemory memory,
- const VkAllocationCallbacks* pAllocator);
-
-void PreCallRecordMapMemory(
- VkDevice device,
- VkDeviceMemory memory,
- VkDeviceSize offset,
- VkDeviceSize size,
- VkMemoryMapFlags flags,
- void** ppData);
-
-void PostCallRecordMapMemory(
- VkDevice device,
- VkDeviceMemory memory,
- VkDeviceSize offset,
- VkDeviceSize size,
- VkMemoryMapFlags flags,
- void** ppData,
- VkResult result);
-
-void PreCallRecordUnmapMemory(
- VkDevice device,
- VkDeviceMemory memory);
-
-void PostCallRecordUnmapMemory(
- VkDevice device,
- VkDeviceMemory memory);
-
-void PreCallRecordFlushMappedMemoryRanges(
- VkDevice device,
- uint32_t memoryRangeCount,
- const VkMappedMemoryRange* pMemoryRanges);
-
-void PostCallRecordFlushMappedMemoryRanges(
- VkDevice device,
- uint32_t memoryRangeCount,
- const VkMappedMemoryRange* pMemoryRanges,
- VkResult result);
-
-void PreCallRecordInvalidateMappedMemoryRanges(
- VkDevice device,
- uint32_t memoryRangeCount,
- const VkMappedMemoryRange* pMemoryRanges);
-
-void PostCallRecordInvalidateMappedMemoryRanges(
- VkDevice device,
- uint32_t memoryRangeCount,
- const VkMappedMemoryRange* pMemoryRanges,
- VkResult result);
-
-void PreCallRecordGetDeviceMemoryCommitment(
- VkDevice device,
- VkDeviceMemory memory,
- VkDeviceSize* pCommittedMemoryInBytes);
-
-void PostCallRecordGetDeviceMemoryCommitment(
- VkDevice device,
- VkDeviceMemory memory,
- VkDeviceSize* pCommittedMemoryInBytes);
-
-void PreCallRecordBindBufferMemory(
- VkDevice device,
- VkBuffer buffer,
- VkDeviceMemory memory,
- VkDeviceSize memoryOffset);
-
-void PostCallRecordBindBufferMemory(
- VkDevice device,
- VkBuffer buffer,
- VkDeviceMemory memory,
- VkDeviceSize memoryOffset,
- VkResult result);
-
-void PreCallRecordBindImageMemory(
- VkDevice device,
- VkImage image,
- VkDeviceMemory memory,
- VkDeviceSize memoryOffset);
-
-void PostCallRecordBindImageMemory(
- VkDevice device,
- VkImage image,
- VkDeviceMemory memory,
- VkDeviceSize memoryOffset,
- VkResult result);
-
-void PreCallRecordGetBufferMemoryRequirements(
- VkDevice device,
- VkBuffer buffer,
- VkMemoryRequirements* pMemoryRequirements);
-
-void PostCallRecordGetBufferMemoryRequirements(
- VkDevice device,
- VkBuffer buffer,
- VkMemoryRequirements* pMemoryRequirements);
-
-void PreCallRecordGetImageMemoryRequirements(
- VkDevice device,
- VkImage image,
- VkMemoryRequirements* pMemoryRequirements);
-
-void PostCallRecordGetImageMemoryRequirements(
- VkDevice device,
- VkImage image,
- VkMemoryRequirements* pMemoryRequirements);
-
-void PreCallRecordGetImageSparseMemoryRequirements(
- VkDevice device,
- VkImage image,
- uint32_t* pSparseMemoryRequirementCount,
- VkSparseImageMemoryRequirements* pSparseMemoryRequirements);
-
-void PostCallRecordGetImageSparseMemoryRequirements(
- VkDevice device,
- VkImage image,
- uint32_t* pSparseMemoryRequirementCount,
- VkSparseImageMemoryRequirements* pSparseMemoryRequirements);
-
-void PreCallRecordQueueBindSparse(
- VkQueue queue,
- uint32_t bindInfoCount,
- const VkBindSparseInfo* pBindInfo,
- VkFence fence);
-
-void PostCallRecordQueueBindSparse(
- VkQueue queue,
- uint32_t bindInfoCount,
- const VkBindSparseInfo* pBindInfo,
- VkFence fence,
- VkResult result);
-
-void PreCallRecordCreateFence(
- VkDevice device,
- const VkFenceCreateInfo* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkFence* pFence);
-
-void PostCallRecordCreateFence(
- VkDevice device,
- const VkFenceCreateInfo* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkFence* pFence,
- VkResult result);
-
-void PreCallRecordDestroyFence(
- VkDevice device,
- VkFence fence,
- const VkAllocationCallbacks* pAllocator);
-
-void PostCallRecordDestroyFence(
- VkDevice device,
- VkFence fence,
- const VkAllocationCallbacks* pAllocator);
-
-void PreCallRecordResetFences(
- VkDevice device,
- uint32_t fenceCount,
- const VkFence* pFences);
-
-void PostCallRecordResetFences(
- VkDevice device,
- uint32_t fenceCount,
- const VkFence* pFences,
- VkResult result);
-
-void PreCallRecordGetFenceStatus(
- VkDevice device,
- VkFence fence);
-
-void PostCallRecordGetFenceStatus(
- VkDevice device,
- VkFence fence,
- VkResult result);
-
-void PreCallRecordWaitForFences(
- VkDevice device,
- uint32_t fenceCount,
- const VkFence* pFences,
- VkBool32 waitAll,
- uint64_t timeout);
-
-void PostCallRecordWaitForFences(
- VkDevice device,
- uint32_t fenceCount,
- const VkFence* pFences,
- VkBool32 waitAll,
- uint64_t timeout,
- VkResult result);
-
-void PreCallRecordCreateSemaphore(
- VkDevice device,
- const VkSemaphoreCreateInfo* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkSemaphore* pSemaphore);
-
-void PostCallRecordCreateSemaphore(
- VkDevice device,
- const VkSemaphoreCreateInfo* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkSemaphore* pSemaphore,
- VkResult result);
-
-void PreCallRecordDestroySemaphore(
- VkDevice device,
- VkSemaphore semaphore,
- const VkAllocationCallbacks* pAllocator);
-
-void PostCallRecordDestroySemaphore(
- VkDevice device,
- VkSemaphore semaphore,
- const VkAllocationCallbacks* pAllocator);
-
-void PreCallRecordCreateEvent(
- VkDevice device,
- const VkEventCreateInfo* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkEvent* pEvent);
-
-void PostCallRecordCreateEvent(
- VkDevice device,
- const VkEventCreateInfo* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkEvent* pEvent,
- VkResult result);
-
-void PreCallRecordDestroyEvent(
- VkDevice device,
- VkEvent event,
- const VkAllocationCallbacks* pAllocator);
-
-void PostCallRecordDestroyEvent(
- VkDevice device,
- VkEvent event,
- const VkAllocationCallbacks* pAllocator);
-
-void PreCallRecordGetEventStatus(
- VkDevice device,
- VkEvent event);
-
-void PostCallRecordGetEventStatus(
- VkDevice device,
- VkEvent event,
- VkResult result);
-
-void PreCallRecordSetEvent(
- VkDevice device,
- VkEvent event);
-
-void PostCallRecordSetEvent(
- VkDevice device,
- VkEvent event,
- VkResult result);
-
-void PreCallRecordResetEvent(
- VkDevice device,
- VkEvent event);
-
-void PostCallRecordResetEvent(
- VkDevice device,
- VkEvent event,
- VkResult result);
-
-void PreCallRecordCreateQueryPool(
- VkDevice device,
- const VkQueryPoolCreateInfo* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkQueryPool* pQueryPool);
-
-void PostCallRecordCreateQueryPool(
- VkDevice device,
- const VkQueryPoolCreateInfo* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkQueryPool* pQueryPool,
- VkResult result);
-
-void PreCallRecordDestroyQueryPool(
- VkDevice device,
- VkQueryPool queryPool,
- const VkAllocationCallbacks* pAllocator);
-
-void PostCallRecordDestroyQueryPool(
- VkDevice device,
- VkQueryPool queryPool,
- const VkAllocationCallbacks* pAllocator);
-
-void PreCallRecordGetQueryPoolResults(
- VkDevice device,
- VkQueryPool queryPool,
- uint32_t firstQuery,
- uint32_t queryCount,
- size_t dataSize,
- void* pData,
- VkDeviceSize stride,
- VkQueryResultFlags flags);
-
-void PostCallRecordGetQueryPoolResults(
- VkDevice device,
- VkQueryPool queryPool,
- uint32_t firstQuery,
- uint32_t queryCount,
- size_t dataSize,
- void* pData,
- VkDeviceSize stride,
- VkQueryResultFlags flags,
- VkResult result);
-
-void PreCallRecordCreateBuffer(
- VkDevice device,
- const VkBufferCreateInfo* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkBuffer* pBuffer);
-
-void PostCallRecordCreateBuffer(
- VkDevice device,
- const VkBufferCreateInfo* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkBuffer* pBuffer,
- VkResult result);
-
-void PreCallRecordDestroyBuffer(
- VkDevice device,
- VkBuffer buffer,
- const VkAllocationCallbacks* pAllocator);
-
-void PostCallRecordDestroyBuffer(
- VkDevice device,
- VkBuffer buffer,
- const VkAllocationCallbacks* pAllocator);
-
-void PreCallRecordCreateBufferView(
- VkDevice device,
- const VkBufferViewCreateInfo* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkBufferView* pView);
-
-void PostCallRecordCreateBufferView(
- VkDevice device,
- const VkBufferViewCreateInfo* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkBufferView* pView,
- VkResult result);
-
-void PreCallRecordDestroyBufferView(
- VkDevice device,
- VkBufferView bufferView,
- const VkAllocationCallbacks* pAllocator);
-
-void PostCallRecordDestroyBufferView(
- VkDevice device,
- VkBufferView bufferView,
- const VkAllocationCallbacks* pAllocator);
-
-void PreCallRecordCreateImage(
- VkDevice device,
- const VkImageCreateInfo* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkImage* pImage);
-
-void PostCallRecordCreateImage(
- VkDevice device,
- const VkImageCreateInfo* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkImage* pImage,
- VkResult result);
-
-void PreCallRecordDestroyImage(
- VkDevice device,
- VkImage image,
- const VkAllocationCallbacks* pAllocator);
-
-void PostCallRecordDestroyImage(
- VkDevice device,
- VkImage image,
- const VkAllocationCallbacks* pAllocator);
-
-void PreCallRecordGetImageSubresourceLayout(
- VkDevice device,
- VkImage image,
- const VkImageSubresource* pSubresource,
- VkSubresourceLayout* pLayout);
-
-void PostCallRecordGetImageSubresourceLayout(
- VkDevice device,
- VkImage image,
- const VkImageSubresource* pSubresource,
- VkSubresourceLayout* pLayout);
-
-void PreCallRecordCreateImageView(
- VkDevice device,
- const VkImageViewCreateInfo* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkImageView* pView);
-
-void PostCallRecordCreateImageView(
- VkDevice device,
- const VkImageViewCreateInfo* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkImageView* pView,
- VkResult result);
-
-void PreCallRecordDestroyImageView(
- VkDevice device,
- VkImageView imageView,
- const VkAllocationCallbacks* pAllocator);
-
-void PostCallRecordDestroyImageView(
- VkDevice device,
- VkImageView imageView,
- const VkAllocationCallbacks* pAllocator);
-
-void PreCallRecordCreateShaderModule(
- VkDevice device,
- const VkShaderModuleCreateInfo* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkShaderModule* pShaderModule);
-
-void PostCallRecordCreateShaderModule(
- VkDevice device,
- const VkShaderModuleCreateInfo* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkShaderModule* pShaderModule,
- VkResult result);
-
-void PreCallRecordDestroyShaderModule(
- VkDevice device,
- VkShaderModule shaderModule,
- const VkAllocationCallbacks* pAllocator);
-
-void PostCallRecordDestroyShaderModule(
- VkDevice device,
- VkShaderModule shaderModule,
- const VkAllocationCallbacks* pAllocator);
-
-void PreCallRecordCreatePipelineCache(
- VkDevice device,
- const VkPipelineCacheCreateInfo* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkPipelineCache* pPipelineCache);
-
-void PostCallRecordCreatePipelineCache(
- VkDevice device,
- const VkPipelineCacheCreateInfo* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkPipelineCache* pPipelineCache,
- VkResult result);
-
-void PreCallRecordDestroyPipelineCache(
- VkDevice device,
- VkPipelineCache pipelineCache,
- const VkAllocationCallbacks* pAllocator);
-
-void PostCallRecordDestroyPipelineCache(
- VkDevice device,
- VkPipelineCache pipelineCache,
- const VkAllocationCallbacks* pAllocator);
-
-void PreCallRecordGetPipelineCacheData(
- VkDevice device,
- VkPipelineCache pipelineCache,
- size_t* pDataSize,
- void* pData);
-
-void PostCallRecordGetPipelineCacheData(
- VkDevice device,
- VkPipelineCache pipelineCache,
- size_t* pDataSize,
- void* pData,
- VkResult result);
-
-void PreCallRecordMergePipelineCaches(
- VkDevice device,
- VkPipelineCache dstCache,
- uint32_t srcCacheCount,
- const VkPipelineCache* pSrcCaches);
-
-void PostCallRecordMergePipelineCaches(
- VkDevice device,
- VkPipelineCache dstCache,
- uint32_t srcCacheCount,
- const VkPipelineCache* pSrcCaches,
- VkResult result);
-
-void PreCallRecordCreateGraphicsPipelines(
- VkDevice device,
- VkPipelineCache pipelineCache,
- uint32_t createInfoCount,
- const VkGraphicsPipelineCreateInfo* pCreateInfos,
- const VkAllocationCallbacks* pAllocator,
- VkPipeline* pPipelines);
-
-void PostCallRecordCreateGraphicsPipelines(
- VkDevice device,
- VkPipelineCache pipelineCache,
- uint32_t createInfoCount,
- const VkGraphicsPipelineCreateInfo* pCreateInfos,
- const VkAllocationCallbacks* pAllocator,
- VkPipeline* pPipelines,
- VkResult result);
-
-void PreCallRecordCreateComputePipelines(
- VkDevice device,
- VkPipelineCache pipelineCache,
- uint32_t createInfoCount,
- const VkComputePipelineCreateInfo* pCreateInfos,
- const VkAllocationCallbacks* pAllocator,
- VkPipeline* pPipelines);
-
-void PostCallRecordCreateComputePipelines(
- VkDevice device,
- VkPipelineCache pipelineCache,
- uint32_t createInfoCount,
- const VkComputePipelineCreateInfo* pCreateInfos,
- const VkAllocationCallbacks* pAllocator,
- VkPipeline* pPipelines,
- VkResult result);
-
-void PreCallRecordDestroyPipeline(
- VkDevice device,
- VkPipeline pipeline,
- const VkAllocationCallbacks* pAllocator);
-
-void PostCallRecordDestroyPipeline(
- VkDevice device,
- VkPipeline pipeline,
- const VkAllocationCallbacks* pAllocator);
-
-void PreCallRecordCreatePipelineLayout(
- VkDevice device,
- const VkPipelineLayoutCreateInfo* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkPipelineLayout* pPipelineLayout);
-
-void PostCallRecordCreatePipelineLayout(
- VkDevice device,
- const VkPipelineLayoutCreateInfo* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkPipelineLayout* pPipelineLayout,
- VkResult result);
-
-void PreCallRecordDestroyPipelineLayout(
- VkDevice device,
- VkPipelineLayout pipelineLayout,
- const VkAllocationCallbacks* pAllocator);
-
-void PostCallRecordDestroyPipelineLayout(
- VkDevice device,
- VkPipelineLayout pipelineLayout,
- const VkAllocationCallbacks* pAllocator);
-
-void PreCallRecordCreateSampler(
- VkDevice device,
- const VkSamplerCreateInfo* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkSampler* pSampler);
-
-void PostCallRecordCreateSampler(
- VkDevice device,
- const VkSamplerCreateInfo* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkSampler* pSampler,
- VkResult result);
-
-void PreCallRecordDestroySampler(
- VkDevice device,
- VkSampler sampler,
- const VkAllocationCallbacks* pAllocator);
-
-void PostCallRecordDestroySampler(
- VkDevice device,
- VkSampler sampler,
- const VkAllocationCallbacks* pAllocator);
-
-void PreCallRecordCreateDescriptorSetLayout(
- VkDevice device,
- const VkDescriptorSetLayoutCreateInfo* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkDescriptorSetLayout* pSetLayout);
-
-void PostCallRecordCreateDescriptorSetLayout(
- VkDevice device,
- const VkDescriptorSetLayoutCreateInfo* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkDescriptorSetLayout* pSetLayout,
- VkResult result);
-
-void PreCallRecordDestroyDescriptorSetLayout(
- VkDevice device,
- VkDescriptorSetLayout descriptorSetLayout,
- const VkAllocationCallbacks* pAllocator);
-
-void PostCallRecordDestroyDescriptorSetLayout(
- VkDevice device,
- VkDescriptorSetLayout descriptorSetLayout,
- const VkAllocationCallbacks* pAllocator);
-
-void PreCallRecordCreateDescriptorPool(
- VkDevice device,
- const VkDescriptorPoolCreateInfo* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkDescriptorPool* pDescriptorPool);
-
-void PostCallRecordCreateDescriptorPool(
- VkDevice device,
- const VkDescriptorPoolCreateInfo* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkDescriptorPool* pDescriptorPool,
- VkResult result);
-
-void PreCallRecordDestroyDescriptorPool(
- VkDevice device,
- VkDescriptorPool descriptorPool,
- const VkAllocationCallbacks* pAllocator);
-
-void PostCallRecordDestroyDescriptorPool(
- VkDevice device,
- VkDescriptorPool descriptorPool,
- const VkAllocationCallbacks* pAllocator);
-
-void PreCallRecordResetDescriptorPool(
- VkDevice device,
- VkDescriptorPool descriptorPool,
- VkDescriptorPoolResetFlags flags);
-
-void PostCallRecordResetDescriptorPool(
- VkDevice device,
- VkDescriptorPool descriptorPool,
- VkDescriptorPoolResetFlags flags,
- VkResult result);
-
-void PreCallRecordAllocateDescriptorSets(
- VkDevice device,
- const VkDescriptorSetAllocateInfo* pAllocateInfo,
- VkDescriptorSet* pDescriptorSets);
-
-void PostCallRecordAllocateDescriptorSets(
- VkDevice device,
- const VkDescriptorSetAllocateInfo* pAllocateInfo,
- VkDescriptorSet* pDescriptorSets,
- VkResult result);
-
-void PreCallRecordFreeDescriptorSets(
- VkDevice device,
- VkDescriptorPool descriptorPool,
- uint32_t descriptorSetCount,
- const VkDescriptorSet* pDescriptorSets);
-
-void PostCallRecordFreeDescriptorSets(
- VkDevice device,
- VkDescriptorPool descriptorPool,
- uint32_t descriptorSetCount,
- const VkDescriptorSet* pDescriptorSets,
- VkResult result);
-
-void PreCallRecordUpdateDescriptorSets(
- VkDevice device,
- uint32_t descriptorWriteCount,
- const VkWriteDescriptorSet* pDescriptorWrites,
- uint32_t descriptorCopyCount,
- const VkCopyDescriptorSet* pDescriptorCopies);
-
-void PostCallRecordUpdateDescriptorSets(
- VkDevice device,
- uint32_t descriptorWriteCount,
- const VkWriteDescriptorSet* pDescriptorWrites,
- uint32_t descriptorCopyCount,
- const VkCopyDescriptorSet* pDescriptorCopies);
-
-void PreCallRecordCreateFramebuffer(
- VkDevice device,
- const VkFramebufferCreateInfo* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkFramebuffer* pFramebuffer);
-
-void PostCallRecordCreateFramebuffer(
- VkDevice device,
- const VkFramebufferCreateInfo* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkFramebuffer* pFramebuffer,
- VkResult result);
-
-void PreCallRecordDestroyFramebuffer(
- VkDevice device,
- VkFramebuffer framebuffer,
- const VkAllocationCallbacks* pAllocator);
-
-void PostCallRecordDestroyFramebuffer(
- VkDevice device,
- VkFramebuffer framebuffer,
- const VkAllocationCallbacks* pAllocator);
-
-void PreCallRecordCreateRenderPass(
- VkDevice device,
- const VkRenderPassCreateInfo* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkRenderPass* pRenderPass);
-
-void PostCallRecordCreateRenderPass(
- VkDevice device,
- const VkRenderPassCreateInfo* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkRenderPass* pRenderPass,
- VkResult result);
-
-void PreCallRecordDestroyRenderPass(
- VkDevice device,
- VkRenderPass renderPass,
- const VkAllocationCallbacks* pAllocator);
-
-void PostCallRecordDestroyRenderPass(
- VkDevice device,
- VkRenderPass renderPass,
- const VkAllocationCallbacks* pAllocator);
-
-void PreCallRecordGetRenderAreaGranularity(
- VkDevice device,
- VkRenderPass renderPass,
- VkExtent2D* pGranularity);
-
-void PostCallRecordGetRenderAreaGranularity(
- VkDevice device,
- VkRenderPass renderPass,
- VkExtent2D* pGranularity);
-
-void PreCallRecordCreateCommandPool(
- VkDevice device,
- const VkCommandPoolCreateInfo* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkCommandPool* pCommandPool);
-
-void PostCallRecordCreateCommandPool(
- VkDevice device,
- const VkCommandPoolCreateInfo* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkCommandPool* pCommandPool,
- VkResult result);
-
-void PreCallRecordDestroyCommandPool(
- VkDevice device,
- VkCommandPool commandPool,
- const VkAllocationCallbacks* pAllocator);
-
-void PostCallRecordDestroyCommandPool(
- VkDevice device,
- VkCommandPool commandPool,
- const VkAllocationCallbacks* pAllocator);
-
-void PreCallRecordResetCommandPool(
- VkDevice device,
- VkCommandPool commandPool,
- VkCommandPoolResetFlags flags);
-
-void PostCallRecordResetCommandPool(
- VkDevice device,
- VkCommandPool commandPool,
- VkCommandPoolResetFlags flags,
- VkResult result);
-
-void PreCallRecordAllocateCommandBuffers(
- VkDevice device,
- const VkCommandBufferAllocateInfo* pAllocateInfo,
- VkCommandBuffer* pCommandBuffers);
-
-void PostCallRecordAllocateCommandBuffers(
- VkDevice device,
- const VkCommandBufferAllocateInfo* pAllocateInfo,
- VkCommandBuffer* pCommandBuffers,
- VkResult result);
-
-void PreCallRecordFreeCommandBuffers(
- VkDevice device,
- VkCommandPool commandPool,
- uint32_t commandBufferCount,
- const VkCommandBuffer* pCommandBuffers);
-
-void PostCallRecordFreeCommandBuffers(
- VkDevice device,
- VkCommandPool commandPool,
- uint32_t commandBufferCount,
- const VkCommandBuffer* pCommandBuffers);
-
-void PreCallRecordBeginCommandBuffer(
- VkCommandBuffer commandBuffer,
- const VkCommandBufferBeginInfo* pBeginInfo);
-
-void PostCallRecordBeginCommandBuffer(
- VkCommandBuffer commandBuffer,
- const VkCommandBufferBeginInfo* pBeginInfo,
- VkResult result);
-
-void PreCallRecordEndCommandBuffer(
- VkCommandBuffer commandBuffer);
-
-void PostCallRecordEndCommandBuffer(
- VkCommandBuffer commandBuffer,
- VkResult result);
-
-void PreCallRecordResetCommandBuffer(
- VkCommandBuffer commandBuffer,
- VkCommandBufferResetFlags flags);
-
-void PostCallRecordResetCommandBuffer(
- VkCommandBuffer commandBuffer,
- VkCommandBufferResetFlags flags,
- VkResult result);
-
-void PreCallRecordCmdBindPipeline(
- VkCommandBuffer commandBuffer,
- VkPipelineBindPoint pipelineBindPoint,
- VkPipeline pipeline);
-
-void PostCallRecordCmdBindPipeline(
- VkCommandBuffer commandBuffer,
- VkPipelineBindPoint pipelineBindPoint,
- VkPipeline pipeline);
-
-void PreCallRecordCmdSetViewport(
- VkCommandBuffer commandBuffer,
- uint32_t firstViewport,
- uint32_t viewportCount,
- const VkViewport* pViewports);
-
-void PostCallRecordCmdSetViewport(
- VkCommandBuffer commandBuffer,
- uint32_t firstViewport,
- uint32_t viewportCount,
- const VkViewport* pViewports);
-
-void PreCallRecordCmdSetScissor(
- VkCommandBuffer commandBuffer,
- uint32_t firstScissor,
- uint32_t scissorCount,
- const VkRect2D* pScissors);
-
-void PostCallRecordCmdSetScissor(
- VkCommandBuffer commandBuffer,
- uint32_t firstScissor,
- uint32_t scissorCount,
- const VkRect2D* pScissors);
-
-void PreCallRecordCmdSetLineWidth(
- VkCommandBuffer commandBuffer,
- float lineWidth);
-
-void PostCallRecordCmdSetLineWidth(
- VkCommandBuffer commandBuffer,
- float lineWidth);
-
-void PreCallRecordCmdSetDepthBias(
- VkCommandBuffer commandBuffer,
- float depthBiasConstantFactor,
- float depthBiasClamp,
- float depthBiasSlopeFactor);
-
-void PostCallRecordCmdSetDepthBias(
- VkCommandBuffer commandBuffer,
- float depthBiasConstantFactor,
- float depthBiasClamp,
- float depthBiasSlopeFactor);
-
-void PreCallRecordCmdSetBlendConstants(
- VkCommandBuffer commandBuffer,
- const float blendConstants[4]);
-
-void PostCallRecordCmdSetBlendConstants(
- VkCommandBuffer commandBuffer,
- const float blendConstants[4]);
-
-void PreCallRecordCmdSetDepthBounds(
- VkCommandBuffer commandBuffer,
- float minDepthBounds,
- float maxDepthBounds);
-
-void PostCallRecordCmdSetDepthBounds(
- VkCommandBuffer commandBuffer,
- float minDepthBounds,
- float maxDepthBounds);
-
-void PreCallRecordCmdSetStencilCompareMask(
- VkCommandBuffer commandBuffer,
- VkStencilFaceFlags faceMask,
- uint32_t compareMask);
-
-void PostCallRecordCmdSetStencilCompareMask(
- VkCommandBuffer commandBuffer,
- VkStencilFaceFlags faceMask,
- uint32_t compareMask);
-
-void PreCallRecordCmdSetStencilWriteMask(
- VkCommandBuffer commandBuffer,
- VkStencilFaceFlags faceMask,
- uint32_t writeMask);
-
-void PostCallRecordCmdSetStencilWriteMask(
- VkCommandBuffer commandBuffer,
- VkStencilFaceFlags faceMask,
- uint32_t writeMask);
-
-void PreCallRecordCmdSetStencilReference(
- VkCommandBuffer commandBuffer,
- VkStencilFaceFlags faceMask,
- uint32_t reference);
-
-void PostCallRecordCmdSetStencilReference(
- VkCommandBuffer commandBuffer,
- VkStencilFaceFlags faceMask,
- uint32_t reference);
-
-void PreCallRecordCmdBindDescriptorSets(
- VkCommandBuffer commandBuffer,
- VkPipelineBindPoint pipelineBindPoint,
- VkPipelineLayout layout,
- uint32_t firstSet,
- uint32_t descriptorSetCount,
- const VkDescriptorSet* pDescriptorSets,
- uint32_t dynamicOffsetCount,
- const uint32_t* pDynamicOffsets);
-
-void PostCallRecordCmdBindDescriptorSets(
- VkCommandBuffer commandBuffer,
- VkPipelineBindPoint pipelineBindPoint,
- VkPipelineLayout layout,
- uint32_t firstSet,
- uint32_t descriptorSetCount,
- const VkDescriptorSet* pDescriptorSets,
- uint32_t dynamicOffsetCount,
- const uint32_t* pDynamicOffsets);
-
-void PreCallRecordCmdBindIndexBuffer(
- VkCommandBuffer commandBuffer,
- VkBuffer buffer,
- VkDeviceSize offset,
- VkIndexType indexType);
-
-void PostCallRecordCmdBindIndexBuffer(
- VkCommandBuffer commandBuffer,
- VkBuffer buffer,
- VkDeviceSize offset,
- VkIndexType indexType);
-
-void PreCallRecordCmdBindVertexBuffers(
- VkCommandBuffer commandBuffer,
- uint32_t firstBinding,
- uint32_t bindingCount,
- const VkBuffer* pBuffers,
- const VkDeviceSize* pOffsets);
-
-void PostCallRecordCmdBindVertexBuffers(
- VkCommandBuffer commandBuffer,
- uint32_t firstBinding,
- uint32_t bindingCount,
- const VkBuffer* pBuffers,
- const VkDeviceSize* pOffsets);
-
-void PreCallRecordCmdDraw(
- VkCommandBuffer commandBuffer,
- uint32_t vertexCount,
- uint32_t instanceCount,
- uint32_t firstVertex,
- uint32_t firstInstance);
-
-void PostCallRecordCmdDraw(
- VkCommandBuffer commandBuffer,
- uint32_t vertexCount,
- uint32_t instanceCount,
- uint32_t firstVertex,
- uint32_t firstInstance);
-
-void PreCallRecordCmdDrawIndexed(
- VkCommandBuffer commandBuffer,
- uint32_t indexCount,
- uint32_t instanceCount,
- uint32_t firstIndex,
- int32_t vertexOffset,
- uint32_t firstInstance);
-
-void PostCallRecordCmdDrawIndexed(
- VkCommandBuffer commandBuffer,
- uint32_t indexCount,
- uint32_t instanceCount,
- uint32_t firstIndex,
- int32_t vertexOffset,
- uint32_t firstInstance);
-
-void PreCallRecordCmdDrawIndirect(
- VkCommandBuffer commandBuffer,
- VkBuffer buffer,
- VkDeviceSize offset,
- uint32_t drawCount,
- uint32_t stride);
-
-void PostCallRecordCmdDrawIndirect(
- VkCommandBuffer commandBuffer,
- VkBuffer buffer,
- VkDeviceSize offset,
- uint32_t drawCount,
- uint32_t stride);
-
-void PreCallRecordCmdDrawIndexedIndirect(
- VkCommandBuffer commandBuffer,
- VkBuffer buffer,
- VkDeviceSize offset,
- uint32_t drawCount,
- uint32_t stride);
-
-void PostCallRecordCmdDrawIndexedIndirect(
- VkCommandBuffer commandBuffer,
- VkBuffer buffer,
- VkDeviceSize offset,
- uint32_t drawCount,
- uint32_t stride);
-
-void PreCallRecordCmdDispatch(
- VkCommandBuffer commandBuffer,
- uint32_t groupCountX,
- uint32_t groupCountY,
- uint32_t groupCountZ);
-
-void PostCallRecordCmdDispatch(
- VkCommandBuffer commandBuffer,
- uint32_t groupCountX,
- uint32_t groupCountY,
- uint32_t groupCountZ);
-
-void PreCallRecordCmdDispatchIndirect(
- VkCommandBuffer commandBuffer,
- VkBuffer buffer,
- VkDeviceSize offset);
-
-void PostCallRecordCmdDispatchIndirect(
- VkCommandBuffer commandBuffer,
- VkBuffer buffer,
- VkDeviceSize offset);
-
-void PreCallRecordCmdCopyBuffer(
- VkCommandBuffer commandBuffer,
- VkBuffer srcBuffer,
- VkBuffer dstBuffer,
- uint32_t regionCount,
- const VkBufferCopy* pRegions);
-
-void PostCallRecordCmdCopyBuffer(
- VkCommandBuffer commandBuffer,
- VkBuffer srcBuffer,
- VkBuffer dstBuffer,
- uint32_t regionCount,
- const VkBufferCopy* pRegions);
-
-void PreCallRecordCmdCopyImage(
- VkCommandBuffer commandBuffer,
- VkImage srcImage,
- VkImageLayout srcImageLayout,
- VkImage dstImage,
- VkImageLayout dstImageLayout,
- uint32_t regionCount,
- const VkImageCopy* pRegions);
-
-void PostCallRecordCmdCopyImage(
- VkCommandBuffer commandBuffer,
- VkImage srcImage,
- VkImageLayout srcImageLayout,
- VkImage dstImage,
- VkImageLayout dstImageLayout,
- uint32_t regionCount,
- const VkImageCopy* pRegions);
-
-void PreCallRecordCmdBlitImage(
- VkCommandBuffer commandBuffer,
- VkImage srcImage,
- VkImageLayout srcImageLayout,
- VkImage dstImage,
- VkImageLayout dstImageLayout,
- uint32_t regionCount,
- const VkImageBlit* pRegions,
- VkFilter filter);
-
-void PostCallRecordCmdBlitImage(
- VkCommandBuffer commandBuffer,
- VkImage srcImage,
- VkImageLayout srcImageLayout,
- VkImage dstImage,
- VkImageLayout dstImageLayout,
- uint32_t regionCount,
- const VkImageBlit* pRegions,
- VkFilter filter);
-
-void PreCallRecordCmdCopyBufferToImage(
- VkCommandBuffer commandBuffer,
- VkBuffer srcBuffer,
- VkImage dstImage,
- VkImageLayout dstImageLayout,
- uint32_t regionCount,
- const VkBufferImageCopy* pRegions);
-
-void PostCallRecordCmdCopyBufferToImage(
- VkCommandBuffer commandBuffer,
- VkBuffer srcBuffer,
- VkImage dstImage,
- VkImageLayout dstImageLayout,
- uint32_t regionCount,
- const VkBufferImageCopy* pRegions);
-
-void PreCallRecordCmdCopyImageToBuffer(
- VkCommandBuffer commandBuffer,
- VkImage srcImage,
- VkImageLayout srcImageLayout,
- VkBuffer dstBuffer,
- uint32_t regionCount,
- const VkBufferImageCopy* pRegions);
-
-void PostCallRecordCmdCopyImageToBuffer(
- VkCommandBuffer commandBuffer,
- VkImage srcImage,
- VkImageLayout srcImageLayout,
- VkBuffer dstBuffer,
- uint32_t regionCount,
- const VkBufferImageCopy* pRegions);
-
-void PreCallRecordCmdUpdateBuffer(
- VkCommandBuffer commandBuffer,
- VkBuffer dstBuffer,
- VkDeviceSize dstOffset,
- VkDeviceSize dataSize,
- const void* pData);
-
-void PostCallRecordCmdUpdateBuffer(
- VkCommandBuffer commandBuffer,
- VkBuffer dstBuffer,
- VkDeviceSize dstOffset,
- VkDeviceSize dataSize,
- const void* pData);
-
-void PreCallRecordCmdFillBuffer(
- VkCommandBuffer commandBuffer,
- VkBuffer dstBuffer,
- VkDeviceSize dstOffset,
- VkDeviceSize size,
- uint32_t data);
-
-void PostCallRecordCmdFillBuffer(
- VkCommandBuffer commandBuffer,
- VkBuffer dstBuffer,
- VkDeviceSize dstOffset,
- VkDeviceSize size,
- uint32_t data);
-
-void PreCallRecordCmdClearColorImage(
- VkCommandBuffer commandBuffer,
- VkImage image,
- VkImageLayout imageLayout,
- const VkClearColorValue* pColor,
- uint32_t rangeCount,
- const VkImageSubresourceRange* pRanges);
-
-void PostCallRecordCmdClearColorImage(
- VkCommandBuffer commandBuffer,
- VkImage image,
- VkImageLayout imageLayout,
- const VkClearColorValue* pColor,
- uint32_t rangeCount,
- const VkImageSubresourceRange* pRanges);
-
-void PreCallRecordCmdClearDepthStencilImage(
- VkCommandBuffer commandBuffer,
- VkImage image,
- VkImageLayout imageLayout,
- const VkClearDepthStencilValue* pDepthStencil,
- uint32_t rangeCount,
- const VkImageSubresourceRange* pRanges);
-
-void PostCallRecordCmdClearDepthStencilImage(
- VkCommandBuffer commandBuffer,
- VkImage image,
- VkImageLayout imageLayout,
- const VkClearDepthStencilValue* pDepthStencil,
- uint32_t rangeCount,
- const VkImageSubresourceRange* pRanges);
-
-void PreCallRecordCmdClearAttachments(
- VkCommandBuffer commandBuffer,
- uint32_t attachmentCount,
- const VkClearAttachment* pAttachments,
- uint32_t rectCount,
- const VkClearRect* pRects);
-
-void PostCallRecordCmdClearAttachments(
- VkCommandBuffer commandBuffer,
- uint32_t attachmentCount,
- const VkClearAttachment* pAttachments,
- uint32_t rectCount,
- const VkClearRect* pRects);
-
-void PreCallRecordCmdResolveImage(
- VkCommandBuffer commandBuffer,
- VkImage srcImage,
- VkImageLayout srcImageLayout,
- VkImage dstImage,
- VkImageLayout dstImageLayout,
- uint32_t regionCount,
- const VkImageResolve* pRegions);
-
-void PostCallRecordCmdResolveImage(
- VkCommandBuffer commandBuffer,
- VkImage srcImage,
- VkImageLayout srcImageLayout,
- VkImage dstImage,
- VkImageLayout dstImageLayout,
- uint32_t regionCount,
- const VkImageResolve* pRegions);
-
-void PreCallRecordCmdSetEvent(
- VkCommandBuffer commandBuffer,
- VkEvent event,
- VkPipelineStageFlags stageMask);
-
-void PostCallRecordCmdSetEvent(
- VkCommandBuffer commandBuffer,
- VkEvent event,
- VkPipelineStageFlags stageMask);
-
-void PreCallRecordCmdResetEvent(
- VkCommandBuffer commandBuffer,
- VkEvent event,
- VkPipelineStageFlags stageMask);
-
-void PostCallRecordCmdResetEvent(
- VkCommandBuffer commandBuffer,
- VkEvent event,
- VkPipelineStageFlags stageMask);
-
-void PreCallRecordCmdWaitEvents(
- VkCommandBuffer commandBuffer,
- uint32_t eventCount,
- const VkEvent* pEvents,
- VkPipelineStageFlags srcStageMask,
- VkPipelineStageFlags dstStageMask,
- uint32_t memoryBarrierCount,
- const VkMemoryBarrier* pMemoryBarriers,
- uint32_t bufferMemoryBarrierCount,
- const VkBufferMemoryBarrier* pBufferMemoryBarriers,
- uint32_t imageMemoryBarrierCount,
- const VkImageMemoryBarrier* pImageMemoryBarriers);
-
-void PostCallRecordCmdWaitEvents(
- VkCommandBuffer commandBuffer,
- uint32_t eventCount,
- const VkEvent* pEvents,
- VkPipelineStageFlags srcStageMask,
- VkPipelineStageFlags dstStageMask,
- uint32_t memoryBarrierCount,
- const VkMemoryBarrier* pMemoryBarriers,
- uint32_t bufferMemoryBarrierCount,
- const VkBufferMemoryBarrier* pBufferMemoryBarriers,
- uint32_t imageMemoryBarrierCount,
- const VkImageMemoryBarrier* pImageMemoryBarriers);
-
-void PreCallRecordCmdPipelineBarrier(
- VkCommandBuffer commandBuffer,
- VkPipelineStageFlags srcStageMask,
- VkPipelineStageFlags dstStageMask,
- VkDependencyFlags dependencyFlags,
- uint32_t memoryBarrierCount,
- const VkMemoryBarrier* pMemoryBarriers,
- uint32_t bufferMemoryBarrierCount,
- const VkBufferMemoryBarrier* pBufferMemoryBarriers,
- uint32_t imageMemoryBarrierCount,
- const VkImageMemoryBarrier* pImageMemoryBarriers);
-
-void PostCallRecordCmdPipelineBarrier(
- VkCommandBuffer commandBuffer,
- VkPipelineStageFlags srcStageMask,
- VkPipelineStageFlags dstStageMask,
- VkDependencyFlags dependencyFlags,
- uint32_t memoryBarrierCount,
- const VkMemoryBarrier* pMemoryBarriers,
- uint32_t bufferMemoryBarrierCount,
- const VkBufferMemoryBarrier* pBufferMemoryBarriers,
- uint32_t imageMemoryBarrierCount,
- const VkImageMemoryBarrier* pImageMemoryBarriers);
-
-void PreCallRecordCmdBeginQuery(
- VkCommandBuffer commandBuffer,
- VkQueryPool queryPool,
- uint32_t query,
- VkQueryControlFlags flags);
-
-void PostCallRecordCmdBeginQuery(
- VkCommandBuffer commandBuffer,
- VkQueryPool queryPool,
- uint32_t query,
- VkQueryControlFlags flags);
-
-void PreCallRecordCmdEndQuery(
- VkCommandBuffer commandBuffer,
- VkQueryPool queryPool,
- uint32_t query);
-
-void PostCallRecordCmdEndQuery(
- VkCommandBuffer commandBuffer,
- VkQueryPool queryPool,
- uint32_t query);
-
-void PreCallRecordCmdResetQueryPool(
- VkCommandBuffer commandBuffer,
- VkQueryPool queryPool,
- uint32_t firstQuery,
- uint32_t queryCount);
-
-void PostCallRecordCmdResetQueryPool(
- VkCommandBuffer commandBuffer,
- VkQueryPool queryPool,
- uint32_t firstQuery,
- uint32_t queryCount);
-
-void PreCallRecordCmdWriteTimestamp(
- VkCommandBuffer commandBuffer,
- VkPipelineStageFlagBits pipelineStage,
- VkQueryPool queryPool,
- uint32_t query);
-
-void PostCallRecordCmdWriteTimestamp(
- VkCommandBuffer commandBuffer,
- VkPipelineStageFlagBits pipelineStage,
- VkQueryPool queryPool,
- uint32_t query);
-
-void PreCallRecordCmdCopyQueryPoolResults(
- VkCommandBuffer commandBuffer,
- VkQueryPool queryPool,
- uint32_t firstQuery,
- uint32_t queryCount,
- VkBuffer dstBuffer,
- VkDeviceSize dstOffset,
- VkDeviceSize stride,
- VkQueryResultFlags flags);
-
-void PostCallRecordCmdCopyQueryPoolResults(
- VkCommandBuffer commandBuffer,
- VkQueryPool queryPool,
- uint32_t firstQuery,
- uint32_t queryCount,
- VkBuffer dstBuffer,
- VkDeviceSize dstOffset,
- VkDeviceSize stride,
- VkQueryResultFlags flags);
-
-void PreCallRecordCmdPushConstants(
- VkCommandBuffer commandBuffer,
- VkPipelineLayout layout,
- VkShaderStageFlags stageFlags,
- uint32_t offset,
- uint32_t size,
- const void* pValues);
-
-void PostCallRecordCmdPushConstants(
- VkCommandBuffer commandBuffer,
- VkPipelineLayout layout,
- VkShaderStageFlags stageFlags,
- uint32_t offset,
- uint32_t size,
- const void* pValues);
-
-void PreCallRecordCmdBeginRenderPass(
- VkCommandBuffer commandBuffer,
- const VkRenderPassBeginInfo* pRenderPassBegin,
- VkSubpassContents contents);
-
-void PostCallRecordCmdBeginRenderPass(
- VkCommandBuffer commandBuffer,
- const VkRenderPassBeginInfo* pRenderPassBegin,
- VkSubpassContents contents);
-
-void PreCallRecordCmdNextSubpass(
- VkCommandBuffer commandBuffer,
- VkSubpassContents contents);
-
-void PostCallRecordCmdNextSubpass(
- VkCommandBuffer commandBuffer,
- VkSubpassContents contents);
-
-void PreCallRecordCmdEndRenderPass(
- VkCommandBuffer commandBuffer);
-
-void PostCallRecordCmdEndRenderPass(
- VkCommandBuffer commandBuffer);
-
-void PreCallRecordCmdExecuteCommands(
- VkCommandBuffer commandBuffer,
- uint32_t commandBufferCount,
- const VkCommandBuffer* pCommandBuffers);
-
-void PostCallRecordCmdExecuteCommands(
- VkCommandBuffer commandBuffer,
- uint32_t commandBufferCount,
- const VkCommandBuffer* pCommandBuffers);
-
-void PreCallRecordBindBufferMemory2(
- VkDevice device,
- uint32_t bindInfoCount,
- const VkBindBufferMemoryInfo* pBindInfos);
-
-void PostCallRecordBindBufferMemory2(
- VkDevice device,
- uint32_t bindInfoCount,
- const VkBindBufferMemoryInfo* pBindInfos,
- VkResult result);
-
-void PreCallRecordBindImageMemory2(
- VkDevice device,
- uint32_t bindInfoCount,
- const VkBindImageMemoryInfo* pBindInfos);
-
-void PostCallRecordBindImageMemory2(
- VkDevice device,
- uint32_t bindInfoCount,
- const VkBindImageMemoryInfo* pBindInfos,
- VkResult result);
-
-void PreCallRecordGetDeviceGroupPeerMemoryFeatures(
- VkDevice device,
- uint32_t heapIndex,
- uint32_t localDeviceIndex,
- uint32_t remoteDeviceIndex,
- VkPeerMemoryFeatureFlags* pPeerMemoryFeatures);
-
-void PostCallRecordGetDeviceGroupPeerMemoryFeatures(
- VkDevice device,
- uint32_t heapIndex,
- uint32_t localDeviceIndex,
- uint32_t remoteDeviceIndex,
- VkPeerMemoryFeatureFlags* pPeerMemoryFeatures);
-
-void PreCallRecordCmdSetDeviceMask(
- VkCommandBuffer commandBuffer,
- uint32_t deviceMask);
-
-void PostCallRecordCmdSetDeviceMask(
- VkCommandBuffer commandBuffer,
- uint32_t deviceMask);
-
-void PreCallRecordCmdDispatchBase(
- VkCommandBuffer commandBuffer,
- uint32_t baseGroupX,
- uint32_t baseGroupY,
- uint32_t baseGroupZ,
- uint32_t groupCountX,
- uint32_t groupCountY,
- uint32_t groupCountZ);
-
-void PostCallRecordCmdDispatchBase(
- VkCommandBuffer commandBuffer,
- uint32_t baseGroupX,
- uint32_t baseGroupY,
- uint32_t baseGroupZ,
- uint32_t groupCountX,
- uint32_t groupCountY,
- uint32_t groupCountZ);
-
-void PreCallRecordEnumeratePhysicalDeviceGroups(
- VkInstance instance,
- uint32_t* pPhysicalDeviceGroupCount,
- VkPhysicalDeviceGroupProperties* pPhysicalDeviceGroupProperties);
-
-void PostCallRecordEnumeratePhysicalDeviceGroups(
- VkInstance instance,
- uint32_t* pPhysicalDeviceGroupCount,
- VkPhysicalDeviceGroupProperties* pPhysicalDeviceGroupProperties,
- VkResult result);
-
-void PreCallRecordGetImageMemoryRequirements2(
- VkDevice device,
- const VkImageMemoryRequirementsInfo2* pInfo,
- VkMemoryRequirements2* pMemoryRequirements);
-
-void PostCallRecordGetImageMemoryRequirements2(
- VkDevice device,
- const VkImageMemoryRequirementsInfo2* pInfo,
- VkMemoryRequirements2* pMemoryRequirements);
-
-void PreCallRecordGetBufferMemoryRequirements2(
- VkDevice device,
- const VkBufferMemoryRequirementsInfo2* pInfo,
- VkMemoryRequirements2* pMemoryRequirements);
-
-void PostCallRecordGetBufferMemoryRequirements2(
- VkDevice device,
- const VkBufferMemoryRequirementsInfo2* pInfo,
- VkMemoryRequirements2* pMemoryRequirements);
-
-void PreCallRecordGetImageSparseMemoryRequirements2(
- VkDevice device,
- const VkImageSparseMemoryRequirementsInfo2* pInfo,
- uint32_t* pSparseMemoryRequirementCount,
- VkSparseImageMemoryRequirements2* pSparseMemoryRequirements);
-
-void PostCallRecordGetImageSparseMemoryRequirements2(
- VkDevice device,
- const VkImageSparseMemoryRequirementsInfo2* pInfo,
- uint32_t* pSparseMemoryRequirementCount,
- VkSparseImageMemoryRequirements2* pSparseMemoryRequirements);
-
-void PreCallRecordTrimCommandPool(
- VkDevice device,
- VkCommandPool commandPool,
- VkCommandPoolTrimFlags flags);
-
-void PostCallRecordTrimCommandPool(
- VkDevice device,
- VkCommandPool commandPool,
- VkCommandPoolTrimFlags flags);
-
-void PreCallRecordGetDeviceQueue2(
- VkDevice device,
- const VkDeviceQueueInfo2* pQueueInfo,
- VkQueue* pQueue);
-
-void PostCallRecordGetDeviceQueue2(
- VkDevice device,
- const VkDeviceQueueInfo2* pQueueInfo,
- VkQueue* pQueue);
-
-void PreCallRecordCreateSamplerYcbcrConversion(
- VkDevice device,
- const VkSamplerYcbcrConversionCreateInfo* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkSamplerYcbcrConversion* pYcbcrConversion);
-
-void PostCallRecordCreateSamplerYcbcrConversion(
- VkDevice device,
- const VkSamplerYcbcrConversionCreateInfo* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkSamplerYcbcrConversion* pYcbcrConversion,
- VkResult result);
-
-void PreCallRecordDestroySamplerYcbcrConversion(
- VkDevice device,
- VkSamplerYcbcrConversion ycbcrConversion,
- const VkAllocationCallbacks* pAllocator);
-
-void PostCallRecordDestroySamplerYcbcrConversion(
- VkDevice device,
- VkSamplerYcbcrConversion ycbcrConversion,
- const VkAllocationCallbacks* pAllocator);
-
-void PreCallRecordCreateDescriptorUpdateTemplate(
- VkDevice device,
- const VkDescriptorUpdateTemplateCreateInfo* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkDescriptorUpdateTemplate* pDescriptorUpdateTemplate);
-
-void PostCallRecordCreateDescriptorUpdateTemplate(
- VkDevice device,
- const VkDescriptorUpdateTemplateCreateInfo* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkDescriptorUpdateTemplate* pDescriptorUpdateTemplate,
- VkResult result);
-
-void PreCallRecordDestroyDescriptorUpdateTemplate(
- VkDevice device,
- VkDescriptorUpdateTemplate descriptorUpdateTemplate,
- const VkAllocationCallbacks* pAllocator);
-
-void PostCallRecordDestroyDescriptorUpdateTemplate(
- VkDevice device,
- VkDescriptorUpdateTemplate descriptorUpdateTemplate,
- const VkAllocationCallbacks* pAllocator);
-
-void PreCallRecordUpdateDescriptorSetWithTemplate(
- VkDevice device,
- VkDescriptorSet descriptorSet,
- VkDescriptorUpdateTemplate descriptorUpdateTemplate,
- const void* pData);
-
-void PostCallRecordUpdateDescriptorSetWithTemplate(
- VkDevice device,
- VkDescriptorSet descriptorSet,
- VkDescriptorUpdateTemplate descriptorUpdateTemplate,
- const void* pData);
-
-void PreCallRecordGetDescriptorSetLayoutSupport(
- VkDevice device,
- const VkDescriptorSetLayoutCreateInfo* pCreateInfo,
- VkDescriptorSetLayoutSupport* pSupport);
-
-void PostCallRecordGetDescriptorSetLayoutSupport(
- VkDevice device,
- const VkDescriptorSetLayoutCreateInfo* pCreateInfo,
- VkDescriptorSetLayoutSupport* pSupport);
-
-void PreCallRecordDestroySurfaceKHR(
- VkInstance instance,
- VkSurfaceKHR surface,
- const VkAllocationCallbacks* pAllocator);
-
-void PostCallRecordDestroySurfaceKHR(
- VkInstance instance,
- VkSurfaceKHR surface,
- const VkAllocationCallbacks* pAllocator);
-
-void PreCallRecordGetPhysicalDeviceSurfaceSupportKHR(
- VkPhysicalDevice physicalDevice,
- uint32_t queueFamilyIndex,
- VkSurfaceKHR surface,
- VkBool32* pSupported);
-
-void PostCallRecordGetPhysicalDeviceSurfaceSupportKHR(
- VkPhysicalDevice physicalDevice,
- uint32_t queueFamilyIndex,
- VkSurfaceKHR surface,
- VkBool32* pSupported,
- VkResult result);
-
-void PreCallRecordGetPhysicalDeviceSurfaceCapabilitiesKHR(
- VkPhysicalDevice physicalDevice,
- VkSurfaceKHR surface,
- VkSurfaceCapabilitiesKHR* pSurfaceCapabilities);
-
-void PostCallRecordGetPhysicalDeviceSurfaceCapabilitiesKHR(
- VkPhysicalDevice physicalDevice,
- VkSurfaceKHR surface,
- VkSurfaceCapabilitiesKHR* pSurfaceCapabilities,
- VkResult result);
-
-void PreCallRecordGetPhysicalDeviceSurfaceFormatsKHR(
- VkPhysicalDevice physicalDevice,
- VkSurfaceKHR surface,
- uint32_t* pSurfaceFormatCount,
- VkSurfaceFormatKHR* pSurfaceFormats);
-
-void PostCallRecordGetPhysicalDeviceSurfaceFormatsKHR(
- VkPhysicalDevice physicalDevice,
- VkSurfaceKHR surface,
- uint32_t* pSurfaceFormatCount,
- VkSurfaceFormatKHR* pSurfaceFormats,
- VkResult result);
-
-void PreCallRecordGetPhysicalDeviceSurfacePresentModesKHR(
- VkPhysicalDevice physicalDevice,
- VkSurfaceKHR surface,
- uint32_t* pPresentModeCount,
- VkPresentModeKHR* pPresentModes);
-
-void PostCallRecordGetPhysicalDeviceSurfacePresentModesKHR(
- VkPhysicalDevice physicalDevice,
- VkSurfaceKHR surface,
- uint32_t* pPresentModeCount,
- VkPresentModeKHR* pPresentModes,
- VkResult result);
-
-void PreCallRecordCreateSwapchainKHR(
- VkDevice device,
- const VkSwapchainCreateInfoKHR* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkSwapchainKHR* pSwapchain);
-
-void PostCallRecordCreateSwapchainKHR(
- VkDevice device,
- const VkSwapchainCreateInfoKHR* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkSwapchainKHR* pSwapchain,
- VkResult result);
-
-void PreCallRecordDestroySwapchainKHR(
- VkDevice device,
- VkSwapchainKHR swapchain,
- const VkAllocationCallbacks* pAllocator);
-
-void PostCallRecordDestroySwapchainKHR(
- VkDevice device,
- VkSwapchainKHR swapchain,
- const VkAllocationCallbacks* pAllocator);
-
-void PreCallRecordGetSwapchainImagesKHR(
- VkDevice device,
- VkSwapchainKHR swapchain,
- uint32_t* pSwapchainImageCount,
- VkImage* pSwapchainImages);
-
-void PostCallRecordGetSwapchainImagesKHR(
- VkDevice device,
- VkSwapchainKHR swapchain,
- uint32_t* pSwapchainImageCount,
- VkImage* pSwapchainImages,
- VkResult result);
-
-void PreCallRecordAcquireNextImageKHR(
- VkDevice device,
- VkSwapchainKHR swapchain,
- uint64_t timeout,
- VkSemaphore semaphore,
- VkFence fence,
- uint32_t* pImageIndex);
-
-void PostCallRecordAcquireNextImageKHR(
- VkDevice device,
- VkSwapchainKHR swapchain,
- uint64_t timeout,
- VkSemaphore semaphore,
- VkFence fence,
- uint32_t* pImageIndex,
- VkResult result);
-
-void PreCallRecordGetDeviceGroupPresentCapabilitiesKHR(
- VkDevice device,
- VkDeviceGroupPresentCapabilitiesKHR* pDeviceGroupPresentCapabilities);
-
-void PostCallRecordGetDeviceGroupPresentCapabilitiesKHR(
- VkDevice device,
- VkDeviceGroupPresentCapabilitiesKHR* pDeviceGroupPresentCapabilities,
- VkResult result);
-
-void PreCallRecordGetDeviceGroupSurfacePresentModesKHR(
- VkDevice device,
- VkSurfaceKHR surface,
- VkDeviceGroupPresentModeFlagsKHR* pModes);
-
-void PostCallRecordGetDeviceGroupSurfacePresentModesKHR(
- VkDevice device,
- VkSurfaceKHR surface,
- VkDeviceGroupPresentModeFlagsKHR* pModes,
- VkResult result);
-
-void PreCallRecordGetPhysicalDevicePresentRectanglesKHR(
- VkPhysicalDevice physicalDevice,
- VkSurfaceKHR surface,
- uint32_t* pRectCount,
- VkRect2D* pRects);
-
-void PostCallRecordGetPhysicalDevicePresentRectanglesKHR(
- VkPhysicalDevice physicalDevice,
- VkSurfaceKHR surface,
- uint32_t* pRectCount,
- VkRect2D* pRects,
- VkResult result);
-
-void PreCallRecordAcquireNextImage2KHR(
- VkDevice device,
- const VkAcquireNextImageInfoKHR* pAcquireInfo,
- uint32_t* pImageIndex);
-
-void PostCallRecordAcquireNextImage2KHR(
- VkDevice device,
- const VkAcquireNextImageInfoKHR* pAcquireInfo,
- uint32_t* pImageIndex,
- VkResult result);
-
-void PreCallRecordGetDisplayPlaneSupportedDisplaysKHR(
- VkPhysicalDevice physicalDevice,
- uint32_t planeIndex,
- uint32_t* pDisplayCount,
- VkDisplayKHR* pDisplays);
-
-void PostCallRecordGetDisplayPlaneSupportedDisplaysKHR(
- VkPhysicalDevice physicalDevice,
- uint32_t planeIndex,
- uint32_t* pDisplayCount,
- VkDisplayKHR* pDisplays,
- VkResult result);
-
-void PreCallRecordGetDisplayModePropertiesKHR(
- VkPhysicalDevice physicalDevice,
- VkDisplayKHR display,
- uint32_t* pPropertyCount,
- VkDisplayModePropertiesKHR* pProperties);
-
-void PostCallRecordGetDisplayModePropertiesKHR(
- VkPhysicalDevice physicalDevice,
- VkDisplayKHR display,
- uint32_t* pPropertyCount,
- VkDisplayModePropertiesKHR* pProperties,
- VkResult result);
-
-void PreCallRecordCreateDisplayModeKHR(
- VkPhysicalDevice physicalDevice,
- VkDisplayKHR display,
- const VkDisplayModeCreateInfoKHR* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkDisplayModeKHR* pMode);
-
-void PostCallRecordCreateDisplayModeKHR(
- VkPhysicalDevice physicalDevice,
- VkDisplayKHR display,
- const VkDisplayModeCreateInfoKHR* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkDisplayModeKHR* pMode,
- VkResult result);
-
-void PreCallRecordGetDisplayPlaneCapabilitiesKHR(
- VkPhysicalDevice physicalDevice,
- VkDisplayModeKHR mode,
- uint32_t planeIndex,
- VkDisplayPlaneCapabilitiesKHR* pCapabilities);
-
-void PostCallRecordGetDisplayPlaneCapabilitiesKHR(
- VkPhysicalDevice physicalDevice,
- VkDisplayModeKHR mode,
- uint32_t planeIndex,
- VkDisplayPlaneCapabilitiesKHR* pCapabilities,
- VkResult result);
-
-void PreCallRecordCreateDisplayPlaneSurfaceKHR(
- VkInstance instance,
- const VkDisplaySurfaceCreateInfoKHR* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkSurfaceKHR* pSurface);
-
-void PostCallRecordCreateDisplayPlaneSurfaceKHR(
- VkInstance instance,
- const VkDisplaySurfaceCreateInfoKHR* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkSurfaceKHR* pSurface,
- VkResult result);
-
-void PreCallRecordCreateSharedSwapchainsKHR(
- VkDevice device,
- uint32_t swapchainCount,
- const VkSwapchainCreateInfoKHR* pCreateInfos,
- const VkAllocationCallbacks* pAllocator,
- VkSwapchainKHR* pSwapchains);
-
-void PostCallRecordCreateSharedSwapchainsKHR(
- VkDevice device,
- uint32_t swapchainCount,
- const VkSwapchainCreateInfoKHR* pCreateInfos,
- const VkAllocationCallbacks* pAllocator,
- VkSwapchainKHR* pSwapchains,
- VkResult result);
-
-#ifdef VK_USE_PLATFORM_XLIB_KHR
-
-void PreCallRecordCreateXlibSurfaceKHR(
- VkInstance instance,
- const VkXlibSurfaceCreateInfoKHR* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkSurfaceKHR* pSurface);
-
-void PostCallRecordCreateXlibSurfaceKHR(
- VkInstance instance,
- const VkXlibSurfaceCreateInfoKHR* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkSurfaceKHR* pSurface,
- VkResult result);
-#endif // VK_USE_PLATFORM_XLIB_KHR
-
-#ifdef VK_USE_PLATFORM_XCB_KHR
-
-void PreCallRecordCreateXcbSurfaceKHR(
- VkInstance instance,
- const VkXcbSurfaceCreateInfoKHR* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkSurfaceKHR* pSurface);
-
-void PostCallRecordCreateXcbSurfaceKHR(
- VkInstance instance,
- const VkXcbSurfaceCreateInfoKHR* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkSurfaceKHR* pSurface,
- VkResult result);
-#endif // VK_USE_PLATFORM_XCB_KHR
-
-#ifdef VK_USE_PLATFORM_WAYLAND_KHR
-
-void PreCallRecordCreateWaylandSurfaceKHR(
- VkInstance instance,
- const VkWaylandSurfaceCreateInfoKHR* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkSurfaceKHR* pSurface);
-
-void PostCallRecordCreateWaylandSurfaceKHR(
- VkInstance instance,
- const VkWaylandSurfaceCreateInfoKHR* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkSurfaceKHR* pSurface,
- VkResult result);
-#endif // VK_USE_PLATFORM_WAYLAND_KHR
-
-#ifdef VK_USE_PLATFORM_ANDROID_KHR
-
-void PreCallRecordCreateAndroidSurfaceKHR(
- VkInstance instance,
- const VkAndroidSurfaceCreateInfoKHR* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkSurfaceKHR* pSurface);
-
-void PostCallRecordCreateAndroidSurfaceKHR(
- VkInstance instance,
- const VkAndroidSurfaceCreateInfoKHR* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkSurfaceKHR* pSurface,
- VkResult result);
-#endif // VK_USE_PLATFORM_ANDROID_KHR
-
-#ifdef VK_USE_PLATFORM_WIN32_KHR
-
-void PreCallRecordCreateWin32SurfaceKHR(
- VkInstance instance,
- const VkWin32SurfaceCreateInfoKHR* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkSurfaceKHR* pSurface);
-
-void PostCallRecordCreateWin32SurfaceKHR(
- VkInstance instance,
- const VkWin32SurfaceCreateInfoKHR* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkSurfaceKHR* pSurface,
- VkResult result);
-#endif // VK_USE_PLATFORM_WIN32_KHR
-
-void PreCallRecordGetDeviceGroupPeerMemoryFeaturesKHR(
- VkDevice device,
- uint32_t heapIndex,
- uint32_t localDeviceIndex,
- uint32_t remoteDeviceIndex,
- VkPeerMemoryFeatureFlags* pPeerMemoryFeatures);
-
-void PostCallRecordGetDeviceGroupPeerMemoryFeaturesKHR(
- VkDevice device,
- uint32_t heapIndex,
- uint32_t localDeviceIndex,
- uint32_t remoteDeviceIndex,
- VkPeerMemoryFeatureFlags* pPeerMemoryFeatures);
-
-void PreCallRecordCmdSetDeviceMaskKHR(
- VkCommandBuffer commandBuffer,
- uint32_t deviceMask);
-
-void PostCallRecordCmdSetDeviceMaskKHR(
- VkCommandBuffer commandBuffer,
- uint32_t deviceMask);
-
-void PreCallRecordCmdDispatchBaseKHR(
- VkCommandBuffer commandBuffer,
- uint32_t baseGroupX,
- uint32_t baseGroupY,
- uint32_t baseGroupZ,
- uint32_t groupCountX,
- uint32_t groupCountY,
- uint32_t groupCountZ);
-
-void PostCallRecordCmdDispatchBaseKHR(
- VkCommandBuffer commandBuffer,
- uint32_t baseGroupX,
- uint32_t baseGroupY,
- uint32_t baseGroupZ,
- uint32_t groupCountX,
- uint32_t groupCountY,
- uint32_t groupCountZ);
-
-void PreCallRecordTrimCommandPoolKHR(
- VkDevice device,
- VkCommandPool commandPool,
- VkCommandPoolTrimFlags flags);
-
-void PostCallRecordTrimCommandPoolKHR(
- VkDevice device,
- VkCommandPool commandPool,
- VkCommandPoolTrimFlags flags);
-
-void PreCallRecordEnumeratePhysicalDeviceGroupsKHR(
- VkInstance instance,
- uint32_t* pPhysicalDeviceGroupCount,
- VkPhysicalDeviceGroupProperties* pPhysicalDeviceGroupProperties);
-
-void PostCallRecordEnumeratePhysicalDeviceGroupsKHR(
- VkInstance instance,
- uint32_t* pPhysicalDeviceGroupCount,
- VkPhysicalDeviceGroupProperties* pPhysicalDeviceGroupProperties,
- VkResult result);
-
-#ifdef VK_USE_PLATFORM_WIN32_KHR
-
-void PreCallRecordGetMemoryWin32HandleKHR(
- VkDevice device,
- const VkMemoryGetWin32HandleInfoKHR* pGetWin32HandleInfo,
- HANDLE* pHandle);
-
-void PostCallRecordGetMemoryWin32HandleKHR(
- VkDevice device,
- const VkMemoryGetWin32HandleInfoKHR* pGetWin32HandleInfo,
- HANDLE* pHandle,
- VkResult result);
-
-void PreCallRecordGetMemoryWin32HandlePropertiesKHR(
- VkDevice device,
- VkExternalMemoryHandleTypeFlagBits handleType,
- HANDLE handle,
- VkMemoryWin32HandlePropertiesKHR* pMemoryWin32HandleProperties);
-
-void PostCallRecordGetMemoryWin32HandlePropertiesKHR(
- VkDevice device,
- VkExternalMemoryHandleTypeFlagBits handleType,
- HANDLE handle,
- VkMemoryWin32HandlePropertiesKHR* pMemoryWin32HandleProperties,
- VkResult result);
-#endif // VK_USE_PLATFORM_WIN32_KHR
-
-void PreCallRecordGetMemoryFdKHR(
- VkDevice device,
- const VkMemoryGetFdInfoKHR* pGetFdInfo,
- int* pFd);
-
-void PostCallRecordGetMemoryFdKHR(
- VkDevice device,
- const VkMemoryGetFdInfoKHR* pGetFdInfo,
- int* pFd,
- VkResult result);
-
-void PreCallRecordGetMemoryFdPropertiesKHR(
- VkDevice device,
- VkExternalMemoryHandleTypeFlagBits handleType,
- int fd,
- VkMemoryFdPropertiesKHR* pMemoryFdProperties);
-
-void PostCallRecordGetMemoryFdPropertiesKHR(
- VkDevice device,
- VkExternalMemoryHandleTypeFlagBits handleType,
- int fd,
- VkMemoryFdPropertiesKHR* pMemoryFdProperties,
- VkResult result);
-
-#ifdef VK_USE_PLATFORM_WIN32_KHR
-#endif // VK_USE_PLATFORM_WIN32_KHR
-
-#ifdef VK_USE_PLATFORM_WIN32_KHR
-
-void PreCallRecordImportSemaphoreWin32HandleKHR(
- VkDevice device,
- const VkImportSemaphoreWin32HandleInfoKHR* pImportSemaphoreWin32HandleInfo);
-
-void PostCallRecordImportSemaphoreWin32HandleKHR(
- VkDevice device,
- const VkImportSemaphoreWin32HandleInfoKHR* pImportSemaphoreWin32HandleInfo,
- VkResult result);
-
-void PreCallRecordGetSemaphoreWin32HandleKHR(
- VkDevice device,
- const VkSemaphoreGetWin32HandleInfoKHR* pGetWin32HandleInfo,
- HANDLE* pHandle);
-
-void PostCallRecordGetSemaphoreWin32HandleKHR(
- VkDevice device,
- const VkSemaphoreGetWin32HandleInfoKHR* pGetWin32HandleInfo,
- HANDLE* pHandle,
- VkResult result);
-#endif // VK_USE_PLATFORM_WIN32_KHR
-
-void PreCallRecordImportSemaphoreFdKHR(
- VkDevice device,
- const VkImportSemaphoreFdInfoKHR* pImportSemaphoreFdInfo);
-
-void PostCallRecordImportSemaphoreFdKHR(
- VkDevice device,
- const VkImportSemaphoreFdInfoKHR* pImportSemaphoreFdInfo,
- VkResult result);
-
-void PreCallRecordGetSemaphoreFdKHR(
- VkDevice device,
- const VkSemaphoreGetFdInfoKHR* pGetFdInfo,
- int* pFd);
-
-void PostCallRecordGetSemaphoreFdKHR(
- VkDevice device,
- const VkSemaphoreGetFdInfoKHR* pGetFdInfo,
- int* pFd,
- VkResult result);
-
-void PreCallRecordCmdPushDescriptorSetKHR(
- VkCommandBuffer commandBuffer,
- VkPipelineBindPoint pipelineBindPoint,
- VkPipelineLayout layout,
- uint32_t set,
- uint32_t descriptorWriteCount,
- const VkWriteDescriptorSet* pDescriptorWrites);
-
-void PostCallRecordCmdPushDescriptorSetKHR(
- VkCommandBuffer commandBuffer,
- VkPipelineBindPoint pipelineBindPoint,
- VkPipelineLayout layout,
- uint32_t set,
- uint32_t descriptorWriteCount,
- const VkWriteDescriptorSet* pDescriptorWrites);
-
-void PreCallRecordCmdPushDescriptorSetWithTemplateKHR(
- VkCommandBuffer commandBuffer,
- VkDescriptorUpdateTemplate descriptorUpdateTemplate,
- VkPipelineLayout layout,
- uint32_t set,
- const void* pData);
-
-void PostCallRecordCmdPushDescriptorSetWithTemplateKHR(
- VkCommandBuffer commandBuffer,
- VkDescriptorUpdateTemplate descriptorUpdateTemplate,
- VkPipelineLayout layout,
- uint32_t set,
- const void* pData);
-
-void PreCallRecordCreateDescriptorUpdateTemplateKHR(
- VkDevice device,
- const VkDescriptorUpdateTemplateCreateInfo* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkDescriptorUpdateTemplate* pDescriptorUpdateTemplate);
-
-void PostCallRecordCreateDescriptorUpdateTemplateKHR(
- VkDevice device,
- const VkDescriptorUpdateTemplateCreateInfo* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkDescriptorUpdateTemplate* pDescriptorUpdateTemplate,
- VkResult result);
-
-void PreCallRecordDestroyDescriptorUpdateTemplateKHR(
- VkDevice device,
- VkDescriptorUpdateTemplate descriptorUpdateTemplate,
- const VkAllocationCallbacks* pAllocator);
-
-void PostCallRecordDestroyDescriptorUpdateTemplateKHR(
- VkDevice device,
- VkDescriptorUpdateTemplate descriptorUpdateTemplate,
- const VkAllocationCallbacks* pAllocator);
-
-void PreCallRecordUpdateDescriptorSetWithTemplateKHR(
- VkDevice device,
- VkDescriptorSet descriptorSet,
- VkDescriptorUpdateTemplate descriptorUpdateTemplate,
- const void* pData);
-
-void PostCallRecordUpdateDescriptorSetWithTemplateKHR(
- VkDevice device,
- VkDescriptorSet descriptorSet,
- VkDescriptorUpdateTemplate descriptorUpdateTemplate,
- const void* pData);
-
-void PreCallRecordCreateRenderPass2KHR(
- VkDevice device,
- const VkRenderPassCreateInfo2KHR* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkRenderPass* pRenderPass);
-
-void PostCallRecordCreateRenderPass2KHR(
- VkDevice device,
- const VkRenderPassCreateInfo2KHR* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkRenderPass* pRenderPass,
- VkResult result);
-
-void PreCallRecordCmdBeginRenderPass2KHR(
- VkCommandBuffer commandBuffer,
- const VkRenderPassBeginInfo* pRenderPassBegin,
- const VkSubpassBeginInfoKHR* pSubpassBeginInfo);
-
-void PostCallRecordCmdBeginRenderPass2KHR(
- VkCommandBuffer commandBuffer,
- const VkRenderPassBeginInfo* pRenderPassBegin,
- const VkSubpassBeginInfoKHR* pSubpassBeginInfo);
-
-void PreCallRecordCmdNextSubpass2KHR(
- VkCommandBuffer commandBuffer,
- const VkSubpassBeginInfoKHR* pSubpassBeginInfo,
- const VkSubpassEndInfoKHR* pSubpassEndInfo);
-
-void PostCallRecordCmdNextSubpass2KHR(
- VkCommandBuffer commandBuffer,
- const VkSubpassBeginInfoKHR* pSubpassBeginInfo,
- const VkSubpassEndInfoKHR* pSubpassEndInfo);
-
-void PreCallRecordCmdEndRenderPass2KHR(
- VkCommandBuffer commandBuffer,
- const VkSubpassEndInfoKHR* pSubpassEndInfo);
-
-void PostCallRecordCmdEndRenderPass2KHR(
- VkCommandBuffer commandBuffer,
- const VkSubpassEndInfoKHR* pSubpassEndInfo);
-
-void PreCallRecordGetSwapchainStatusKHR(
- VkDevice device,
- VkSwapchainKHR swapchain);
-
-void PostCallRecordGetSwapchainStatusKHR(
- VkDevice device,
- VkSwapchainKHR swapchain,
- VkResult result);
-
-#ifdef VK_USE_PLATFORM_WIN32_KHR
-
-void PreCallRecordImportFenceWin32HandleKHR(
- VkDevice device,
- const VkImportFenceWin32HandleInfoKHR* pImportFenceWin32HandleInfo);
-
-void PostCallRecordImportFenceWin32HandleKHR(
- VkDevice device,
- const VkImportFenceWin32HandleInfoKHR* pImportFenceWin32HandleInfo,
- VkResult result);
-
-void PreCallRecordGetFenceWin32HandleKHR(
- VkDevice device,
- const VkFenceGetWin32HandleInfoKHR* pGetWin32HandleInfo,
- HANDLE* pHandle);
-
-void PostCallRecordGetFenceWin32HandleKHR(
- VkDevice device,
- const VkFenceGetWin32HandleInfoKHR* pGetWin32HandleInfo,
- HANDLE* pHandle,
- VkResult result);
-#endif // VK_USE_PLATFORM_WIN32_KHR
-
-void PreCallRecordImportFenceFdKHR(
- VkDevice device,
- const VkImportFenceFdInfoKHR* pImportFenceFdInfo);
-
-void PostCallRecordImportFenceFdKHR(
- VkDevice device,
- const VkImportFenceFdInfoKHR* pImportFenceFdInfo,
- VkResult result);
-
-void PreCallRecordGetFenceFdKHR(
- VkDevice device,
- const VkFenceGetFdInfoKHR* pGetFdInfo,
- int* pFd);
-
-void PostCallRecordGetFenceFdKHR(
- VkDevice device,
- const VkFenceGetFdInfoKHR* pGetFdInfo,
- int* pFd,
- VkResult result);
-
-void PreCallRecordGetDisplayModeProperties2KHR(
- VkPhysicalDevice physicalDevice,
- VkDisplayKHR display,
- uint32_t* pPropertyCount,
- VkDisplayModeProperties2KHR* pProperties);
-
-void PostCallRecordGetDisplayModeProperties2KHR(
- VkPhysicalDevice physicalDevice,
- VkDisplayKHR display,
- uint32_t* pPropertyCount,
- VkDisplayModeProperties2KHR* pProperties,
- VkResult result);
-
-void PreCallRecordGetImageMemoryRequirements2KHR(
- VkDevice device,
- const VkImageMemoryRequirementsInfo2* pInfo,
- VkMemoryRequirements2* pMemoryRequirements);
-
-void PostCallRecordGetImageMemoryRequirements2KHR(
- VkDevice device,
- const VkImageMemoryRequirementsInfo2* pInfo,
- VkMemoryRequirements2* pMemoryRequirements);
-
-void PreCallRecordGetBufferMemoryRequirements2KHR(
- VkDevice device,
- const VkBufferMemoryRequirementsInfo2* pInfo,
- VkMemoryRequirements2* pMemoryRequirements);
-
-void PostCallRecordGetBufferMemoryRequirements2KHR(
- VkDevice device,
- const VkBufferMemoryRequirementsInfo2* pInfo,
- VkMemoryRequirements2* pMemoryRequirements);
-
-void PreCallRecordGetImageSparseMemoryRequirements2KHR(
- VkDevice device,
- const VkImageSparseMemoryRequirementsInfo2* pInfo,
- uint32_t* pSparseMemoryRequirementCount,
- VkSparseImageMemoryRequirements2* pSparseMemoryRequirements);
-
-void PostCallRecordGetImageSparseMemoryRequirements2KHR(
- VkDevice device,
- const VkImageSparseMemoryRequirementsInfo2* pInfo,
- uint32_t* pSparseMemoryRequirementCount,
- VkSparseImageMemoryRequirements2* pSparseMemoryRequirements);
-
-void PreCallRecordCreateSamplerYcbcrConversionKHR(
- VkDevice device,
- const VkSamplerYcbcrConversionCreateInfo* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkSamplerYcbcrConversion* pYcbcrConversion);
-
-void PostCallRecordCreateSamplerYcbcrConversionKHR(
- VkDevice device,
- const VkSamplerYcbcrConversionCreateInfo* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkSamplerYcbcrConversion* pYcbcrConversion,
- VkResult result);
-
-void PreCallRecordDestroySamplerYcbcrConversionKHR(
- VkDevice device,
- VkSamplerYcbcrConversion ycbcrConversion,
- const VkAllocationCallbacks* pAllocator);
-
-void PostCallRecordDestroySamplerYcbcrConversionKHR(
- VkDevice device,
- VkSamplerYcbcrConversion ycbcrConversion,
- const VkAllocationCallbacks* pAllocator);
-
-void PreCallRecordBindBufferMemory2KHR(
- VkDevice device,
- uint32_t bindInfoCount,
- const VkBindBufferMemoryInfo* pBindInfos);
-
-void PostCallRecordBindBufferMemory2KHR(
- VkDevice device,
- uint32_t bindInfoCount,
- const VkBindBufferMemoryInfo* pBindInfos,
- VkResult result);
-
-void PreCallRecordBindImageMemory2KHR(
- VkDevice device,
- uint32_t bindInfoCount,
- const VkBindImageMemoryInfo* pBindInfos);
-
-void PostCallRecordBindImageMemory2KHR(
- VkDevice device,
- uint32_t bindInfoCount,
- const VkBindImageMemoryInfo* pBindInfos,
- VkResult result);
-
-void PreCallRecordGetDescriptorSetLayoutSupportKHR(
- VkDevice device,
- const VkDescriptorSetLayoutCreateInfo* pCreateInfo,
- VkDescriptorSetLayoutSupport* pSupport);
-
-void PostCallRecordGetDescriptorSetLayoutSupportKHR(
- VkDevice device,
- const VkDescriptorSetLayoutCreateInfo* pCreateInfo,
- VkDescriptorSetLayoutSupport* pSupport);
-
-void PreCallRecordCmdDrawIndirectCountKHR(
- VkCommandBuffer commandBuffer,
- VkBuffer buffer,
- VkDeviceSize offset,
- VkBuffer countBuffer,
- VkDeviceSize countBufferOffset,
- uint32_t maxDrawCount,
- uint32_t stride);
-
-void PostCallRecordCmdDrawIndirectCountKHR(
- VkCommandBuffer commandBuffer,
- VkBuffer buffer,
- VkDeviceSize offset,
- VkBuffer countBuffer,
- VkDeviceSize countBufferOffset,
- uint32_t maxDrawCount,
- uint32_t stride);
-
-void PreCallRecordCmdDrawIndexedIndirectCountKHR(
- VkCommandBuffer commandBuffer,
- VkBuffer buffer,
- VkDeviceSize offset,
- VkBuffer countBuffer,
- VkDeviceSize countBufferOffset,
- uint32_t maxDrawCount,
- uint32_t stride);
-
-void PostCallRecordCmdDrawIndexedIndirectCountKHR(
- VkCommandBuffer commandBuffer,
- VkBuffer buffer,
- VkDeviceSize offset,
- VkBuffer countBuffer,
- VkDeviceSize countBufferOffset,
- uint32_t maxDrawCount,
- uint32_t stride);
-
-void PreCallRecordGetPipelineExecutablePropertiesKHR(
- VkDevice device,
- const VkPipelineInfoKHR* pPipelineInfo,
- uint32_t* pExecutableCount,
- VkPipelineExecutablePropertiesKHR* pProperties);
-
-void PostCallRecordGetPipelineExecutablePropertiesKHR(
- VkDevice device,
- const VkPipelineInfoKHR* pPipelineInfo,
- uint32_t* pExecutableCount,
- VkPipelineExecutablePropertiesKHR* pProperties,
- VkResult result);
-
-void PreCallRecordGetPipelineExecutableStatisticsKHR(
- VkDevice device,
- const VkPipelineExecutableInfoKHR* pExecutableInfo,
- uint32_t* pStatisticCount,
- VkPipelineExecutableStatisticKHR* pStatistics);
-
-void PostCallRecordGetPipelineExecutableStatisticsKHR(
- VkDevice device,
- const VkPipelineExecutableInfoKHR* pExecutableInfo,
- uint32_t* pStatisticCount,
- VkPipelineExecutableStatisticKHR* pStatistics,
- VkResult result);
-
-void PreCallRecordGetPipelineExecutableInternalRepresentationsKHR(
- VkDevice device,
- const VkPipelineExecutableInfoKHR* pExecutableInfo,
- uint32_t* pInternalRepresentationCount,
- VkPipelineExecutableInternalRepresentationKHR* pInternalRepresentations);
-
-void PostCallRecordGetPipelineExecutableInternalRepresentationsKHR(
- VkDevice device,
- const VkPipelineExecutableInfoKHR* pExecutableInfo,
- uint32_t* pInternalRepresentationCount,
- VkPipelineExecutableInternalRepresentationKHR* pInternalRepresentations,
- VkResult result);
-
-void PreCallRecordCreateDebugReportCallbackEXT(
- VkInstance instance,
- const VkDebugReportCallbackCreateInfoEXT* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkDebugReportCallbackEXT* pCallback);
-
-void PostCallRecordCreateDebugReportCallbackEXT(
- VkInstance instance,
- const VkDebugReportCallbackCreateInfoEXT* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkDebugReportCallbackEXT* pCallback,
- VkResult result);
-
-void PreCallRecordDestroyDebugReportCallbackEXT(
- VkInstance instance,
- VkDebugReportCallbackEXT callback,
- const VkAllocationCallbacks* pAllocator);
-
-void PostCallRecordDestroyDebugReportCallbackEXT(
- VkInstance instance,
- VkDebugReportCallbackEXT callback,
- const VkAllocationCallbacks* pAllocator);
-
-void PreCallRecordDebugReportMessageEXT(
- VkInstance instance,
- VkDebugReportFlagsEXT flags,
- VkDebugReportObjectTypeEXT objectType,
- uint64_t object,
- size_t location,
- int32_t messageCode,
- const char* pLayerPrefix,
- const char* pMessage);
-
-void PostCallRecordDebugReportMessageEXT(
- VkInstance instance,
- VkDebugReportFlagsEXT flags,
- VkDebugReportObjectTypeEXT objectType,
- uint64_t object,
- size_t location,
- int32_t messageCode,
- const char* pLayerPrefix,
- const char* pMessage);
-// TODO - not wrapping EXT function vkDebugMarkerSetObjectTagEXT
-// TODO - not wrapping EXT function vkDebugMarkerSetObjectNameEXT
-// TODO - not wrapping EXT function vkCmdDebugMarkerBeginEXT
-// TODO - not wrapping EXT function vkCmdDebugMarkerEndEXT
-// TODO - not wrapping EXT function vkCmdDebugMarkerInsertEXT
-
-void PreCallRecordCmdBindTransformFeedbackBuffersEXT(
- VkCommandBuffer commandBuffer,
- uint32_t firstBinding,
- uint32_t bindingCount,
- const VkBuffer* pBuffers,
- const VkDeviceSize* pOffsets,
- const VkDeviceSize* pSizes);
-
-void PostCallRecordCmdBindTransformFeedbackBuffersEXT(
- VkCommandBuffer commandBuffer,
- uint32_t firstBinding,
- uint32_t bindingCount,
- const VkBuffer* pBuffers,
- const VkDeviceSize* pOffsets,
- const VkDeviceSize* pSizes);
-
-void PreCallRecordCmdBeginTransformFeedbackEXT(
- VkCommandBuffer commandBuffer,
- uint32_t firstCounterBuffer,
- uint32_t counterBufferCount,
- const VkBuffer* pCounterBuffers,
- const VkDeviceSize* pCounterBufferOffsets);
-
-void PostCallRecordCmdBeginTransformFeedbackEXT(
- VkCommandBuffer commandBuffer,
- uint32_t firstCounterBuffer,
- uint32_t counterBufferCount,
- const VkBuffer* pCounterBuffers,
- const VkDeviceSize* pCounterBufferOffsets);
-
-void PreCallRecordCmdEndTransformFeedbackEXT(
- VkCommandBuffer commandBuffer,
- uint32_t firstCounterBuffer,
- uint32_t counterBufferCount,
- const VkBuffer* pCounterBuffers,
- const VkDeviceSize* pCounterBufferOffsets);
-
-void PostCallRecordCmdEndTransformFeedbackEXT(
- VkCommandBuffer commandBuffer,
- uint32_t firstCounterBuffer,
- uint32_t counterBufferCount,
- const VkBuffer* pCounterBuffers,
- const VkDeviceSize* pCounterBufferOffsets);
-
-void PreCallRecordCmdBeginQueryIndexedEXT(
- VkCommandBuffer commandBuffer,
- VkQueryPool queryPool,
- uint32_t query,
- VkQueryControlFlags flags,
- uint32_t index);
-
-void PostCallRecordCmdBeginQueryIndexedEXT(
- VkCommandBuffer commandBuffer,
- VkQueryPool queryPool,
- uint32_t query,
- VkQueryControlFlags flags,
- uint32_t index);
-
-void PreCallRecordCmdEndQueryIndexedEXT(
- VkCommandBuffer commandBuffer,
- VkQueryPool queryPool,
- uint32_t query,
- uint32_t index);
-
-void PostCallRecordCmdEndQueryIndexedEXT(
- VkCommandBuffer commandBuffer,
- VkQueryPool queryPool,
- uint32_t query,
- uint32_t index);
-
-void PreCallRecordCmdDrawIndirectByteCountEXT(
- VkCommandBuffer commandBuffer,
- uint32_t instanceCount,
- uint32_t firstInstance,
- VkBuffer counterBuffer,
- VkDeviceSize counterBufferOffset,
- uint32_t counterOffset,
- uint32_t vertexStride);
-
-void PostCallRecordCmdDrawIndirectByteCountEXT(
- VkCommandBuffer commandBuffer,
- uint32_t instanceCount,
- uint32_t firstInstance,
- VkBuffer counterBuffer,
- VkDeviceSize counterBufferOffset,
- uint32_t counterOffset,
- uint32_t vertexStride);
-
-void PreCallRecordGetImageViewHandleNVX(
- VkDevice device,
- const VkImageViewHandleInfoNVX* pInfo);
-
-void PostCallRecordGetImageViewHandleNVX(
- VkDevice device,
- const VkImageViewHandleInfoNVX* pInfo);
-
-void PreCallRecordCmdDrawIndirectCountAMD(
- VkCommandBuffer commandBuffer,
- VkBuffer buffer,
- VkDeviceSize offset,
- VkBuffer countBuffer,
- VkDeviceSize countBufferOffset,
- uint32_t maxDrawCount,
- uint32_t stride);
-
-void PostCallRecordCmdDrawIndirectCountAMD(
- VkCommandBuffer commandBuffer,
- VkBuffer buffer,
- VkDeviceSize offset,
- VkBuffer countBuffer,
- VkDeviceSize countBufferOffset,
- uint32_t maxDrawCount,
- uint32_t stride);
-
-void PreCallRecordCmdDrawIndexedIndirectCountAMD(
- VkCommandBuffer commandBuffer,
- VkBuffer buffer,
- VkDeviceSize offset,
- VkBuffer countBuffer,
- VkDeviceSize countBufferOffset,
- uint32_t maxDrawCount,
- uint32_t stride);
-
-void PostCallRecordCmdDrawIndexedIndirectCountAMD(
- VkCommandBuffer commandBuffer,
- VkBuffer buffer,
- VkDeviceSize offset,
- VkBuffer countBuffer,
- VkDeviceSize countBufferOffset,
- uint32_t maxDrawCount,
- uint32_t stride);
-
-void PreCallRecordGetShaderInfoAMD(
- VkDevice device,
- VkPipeline pipeline,
- VkShaderStageFlagBits shaderStage,
- VkShaderInfoTypeAMD infoType,
- size_t* pInfoSize,
- void* pInfo);
-
-void PostCallRecordGetShaderInfoAMD(
- VkDevice device,
- VkPipeline pipeline,
- VkShaderStageFlagBits shaderStage,
- VkShaderInfoTypeAMD infoType,
- size_t* pInfoSize,
- void* pInfo,
- VkResult result);
-
-#ifdef VK_USE_PLATFORM_GGP
-
-void PreCallRecordCreateStreamDescriptorSurfaceGGP(
- VkInstance instance,
- const VkStreamDescriptorSurfaceCreateInfoGGP* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkSurfaceKHR* pSurface);
-
-void PostCallRecordCreateStreamDescriptorSurfaceGGP(
- VkInstance instance,
- const VkStreamDescriptorSurfaceCreateInfoGGP* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkSurfaceKHR* pSurface,
- VkResult result);
-#endif // VK_USE_PLATFORM_GGP
-
-#ifdef VK_USE_PLATFORM_WIN32_KHR
-
-void PreCallRecordGetMemoryWin32HandleNV(
- VkDevice device,
- VkDeviceMemory memory,
- VkExternalMemoryHandleTypeFlagsNV handleType,
- HANDLE* pHandle);
-
-void PostCallRecordGetMemoryWin32HandleNV(
- VkDevice device,
- VkDeviceMemory memory,
- VkExternalMemoryHandleTypeFlagsNV handleType,
- HANDLE* pHandle,
- VkResult result);
-#endif // VK_USE_PLATFORM_WIN32_KHR
-
-#ifdef VK_USE_PLATFORM_WIN32_KHR
-#endif // VK_USE_PLATFORM_WIN32_KHR
-
-#ifdef VK_USE_PLATFORM_VI_NN
-
-void PreCallRecordCreateViSurfaceNN(
- VkInstance instance,
- const VkViSurfaceCreateInfoNN* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkSurfaceKHR* pSurface);
-
-void PostCallRecordCreateViSurfaceNN(
- VkInstance instance,
- const VkViSurfaceCreateInfoNN* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkSurfaceKHR* pSurface,
- VkResult result);
-#endif // VK_USE_PLATFORM_VI_NN
-
-void PreCallRecordCmdBeginConditionalRenderingEXT(
- VkCommandBuffer commandBuffer,
- const VkConditionalRenderingBeginInfoEXT* pConditionalRenderingBegin);
-
-void PostCallRecordCmdBeginConditionalRenderingEXT(
- VkCommandBuffer commandBuffer,
- const VkConditionalRenderingBeginInfoEXT* pConditionalRenderingBegin);
-
-void PreCallRecordCmdEndConditionalRenderingEXT(
- VkCommandBuffer commandBuffer);
-
-void PostCallRecordCmdEndConditionalRenderingEXT(
- VkCommandBuffer commandBuffer);
-
-void PreCallRecordCmdProcessCommandsNVX(
- VkCommandBuffer commandBuffer,
- const VkCmdProcessCommandsInfoNVX* pProcessCommandsInfo);
-
-void PostCallRecordCmdProcessCommandsNVX(
- VkCommandBuffer commandBuffer,
- const VkCmdProcessCommandsInfoNVX* pProcessCommandsInfo);
-
-void PreCallRecordCmdReserveSpaceForCommandsNVX(
- VkCommandBuffer commandBuffer,
- const VkCmdReserveSpaceForCommandsInfoNVX* pReserveSpaceInfo);
-
-void PostCallRecordCmdReserveSpaceForCommandsNVX(
- VkCommandBuffer commandBuffer,
- const VkCmdReserveSpaceForCommandsInfoNVX* pReserveSpaceInfo);
-
-void PreCallRecordCreateIndirectCommandsLayoutNVX(
- VkDevice device,
- const VkIndirectCommandsLayoutCreateInfoNVX* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkIndirectCommandsLayoutNVX* pIndirectCommandsLayout);
-
-void PostCallRecordCreateIndirectCommandsLayoutNVX(
- VkDevice device,
- const VkIndirectCommandsLayoutCreateInfoNVX* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkIndirectCommandsLayoutNVX* pIndirectCommandsLayout,
- VkResult result);
-
-void PreCallRecordDestroyIndirectCommandsLayoutNVX(
- VkDevice device,
- VkIndirectCommandsLayoutNVX indirectCommandsLayout,
- const VkAllocationCallbacks* pAllocator);
-
-void PostCallRecordDestroyIndirectCommandsLayoutNVX(
- VkDevice device,
- VkIndirectCommandsLayoutNVX indirectCommandsLayout,
- const VkAllocationCallbacks* pAllocator);
-
-void PreCallRecordCreateObjectTableNVX(
- VkDevice device,
- const VkObjectTableCreateInfoNVX* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkObjectTableNVX* pObjectTable);
-
-void PostCallRecordCreateObjectTableNVX(
- VkDevice device,
- const VkObjectTableCreateInfoNVX* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkObjectTableNVX* pObjectTable,
- VkResult result);
-
-void PreCallRecordDestroyObjectTableNVX(
- VkDevice device,
- VkObjectTableNVX objectTable,
- const VkAllocationCallbacks* pAllocator);
-
-void PostCallRecordDestroyObjectTableNVX(
- VkDevice device,
- VkObjectTableNVX objectTable,
- const VkAllocationCallbacks* pAllocator);
-
-void PreCallRecordRegisterObjectsNVX(
- VkDevice device,
- VkObjectTableNVX objectTable,
- uint32_t objectCount,
- const VkObjectTableEntryNVX* const* ppObjectTableEntries,
- const uint32_t* pObjectIndices);
-
-void PostCallRecordRegisterObjectsNVX(
- VkDevice device,
- VkObjectTableNVX objectTable,
- uint32_t objectCount,
- const VkObjectTableEntryNVX* const* ppObjectTableEntries,
- const uint32_t* pObjectIndices,
- VkResult result);
-
-void PreCallRecordUnregisterObjectsNVX(
- VkDevice device,
- VkObjectTableNVX objectTable,
- uint32_t objectCount,
- const VkObjectEntryTypeNVX* pObjectEntryTypes,
- const uint32_t* pObjectIndices);
-
-void PostCallRecordUnregisterObjectsNVX(
- VkDevice device,
- VkObjectTableNVX objectTable,
- uint32_t objectCount,
- const VkObjectEntryTypeNVX* pObjectEntryTypes,
- const uint32_t* pObjectIndices,
- VkResult result);
-
-void PreCallRecordCmdSetViewportWScalingNV(
- VkCommandBuffer commandBuffer,
- uint32_t firstViewport,
- uint32_t viewportCount,
- const VkViewportWScalingNV* pViewportWScalings);
-
-void PostCallRecordCmdSetViewportWScalingNV(
- VkCommandBuffer commandBuffer,
- uint32_t firstViewport,
- uint32_t viewportCount,
- const VkViewportWScalingNV* pViewportWScalings);
-
-void PreCallRecordReleaseDisplayEXT(
- VkPhysicalDevice physicalDevice,
- VkDisplayKHR display);
-
-void PostCallRecordReleaseDisplayEXT(
- VkPhysicalDevice physicalDevice,
- VkDisplayKHR display,
- VkResult result);
-
-#ifdef VK_USE_PLATFORM_XLIB_XRANDR_EXT
-
-void PreCallRecordAcquireXlibDisplayEXT(
- VkPhysicalDevice physicalDevice,
- Display* dpy,
- VkDisplayKHR display);
-
-void PostCallRecordAcquireXlibDisplayEXT(
- VkPhysicalDevice physicalDevice,
- Display* dpy,
- VkDisplayKHR display,
- VkResult result);
-#endif // VK_USE_PLATFORM_XLIB_XRANDR_EXT
-
-void PreCallRecordGetPhysicalDeviceSurfaceCapabilities2EXT(
- VkPhysicalDevice physicalDevice,
- VkSurfaceKHR surface,
- VkSurfaceCapabilities2EXT* pSurfaceCapabilities);
-
-void PostCallRecordGetPhysicalDeviceSurfaceCapabilities2EXT(
- VkPhysicalDevice physicalDevice,
- VkSurfaceKHR surface,
- VkSurfaceCapabilities2EXT* pSurfaceCapabilities,
- VkResult result);
-
-void PreCallRecordDisplayPowerControlEXT(
- VkDevice device,
- VkDisplayKHR display,
- const VkDisplayPowerInfoEXT* pDisplayPowerInfo);
-
-void PostCallRecordDisplayPowerControlEXT(
- VkDevice device,
- VkDisplayKHR display,
- const VkDisplayPowerInfoEXT* pDisplayPowerInfo,
- VkResult result);
-
-void PreCallRecordRegisterDeviceEventEXT(
- VkDevice device,
- const VkDeviceEventInfoEXT* pDeviceEventInfo,
- const VkAllocationCallbacks* pAllocator,
- VkFence* pFence);
-
-void PostCallRecordRegisterDeviceEventEXT(
- VkDevice device,
- const VkDeviceEventInfoEXT* pDeviceEventInfo,
- const VkAllocationCallbacks* pAllocator,
- VkFence* pFence,
- VkResult result);
-
-void PreCallRecordRegisterDisplayEventEXT(
- VkDevice device,
- VkDisplayKHR display,
- const VkDisplayEventInfoEXT* pDisplayEventInfo,
- const VkAllocationCallbacks* pAllocator,
- VkFence* pFence);
-
-void PostCallRecordRegisterDisplayEventEXT(
- VkDevice device,
- VkDisplayKHR display,
- const VkDisplayEventInfoEXT* pDisplayEventInfo,
- const VkAllocationCallbacks* pAllocator,
- VkFence* pFence,
- VkResult result);
-
-void PreCallRecordGetSwapchainCounterEXT(
- VkDevice device,
- VkSwapchainKHR swapchain,
- VkSurfaceCounterFlagBitsEXT counter,
- uint64_t* pCounterValue);
-
-void PostCallRecordGetSwapchainCounterEXT(
- VkDevice device,
- VkSwapchainKHR swapchain,
- VkSurfaceCounterFlagBitsEXT counter,
- uint64_t* pCounterValue,
- VkResult result);
-
-void PreCallRecordGetRefreshCycleDurationGOOGLE(
- VkDevice device,
- VkSwapchainKHR swapchain,
- VkRefreshCycleDurationGOOGLE* pDisplayTimingProperties);
-
-void PostCallRecordGetRefreshCycleDurationGOOGLE(
- VkDevice device,
- VkSwapchainKHR swapchain,
- VkRefreshCycleDurationGOOGLE* pDisplayTimingProperties,
- VkResult result);
-
-void PreCallRecordGetPastPresentationTimingGOOGLE(
- VkDevice device,
- VkSwapchainKHR swapchain,
- uint32_t* pPresentationTimingCount,
- VkPastPresentationTimingGOOGLE* pPresentationTimings);
-
-void PostCallRecordGetPastPresentationTimingGOOGLE(
- VkDevice device,
- VkSwapchainKHR swapchain,
- uint32_t* pPresentationTimingCount,
- VkPastPresentationTimingGOOGLE* pPresentationTimings,
- VkResult result);
-
-void PreCallRecordCmdSetDiscardRectangleEXT(
- VkCommandBuffer commandBuffer,
- uint32_t firstDiscardRectangle,
- uint32_t discardRectangleCount,
- const VkRect2D* pDiscardRectangles);
-
-void PostCallRecordCmdSetDiscardRectangleEXT(
- VkCommandBuffer commandBuffer,
- uint32_t firstDiscardRectangle,
- uint32_t discardRectangleCount,
- const VkRect2D* pDiscardRectangles);
-
-void PreCallRecordSetHdrMetadataEXT(
- VkDevice device,
- uint32_t swapchainCount,
- const VkSwapchainKHR* pSwapchains,
- const VkHdrMetadataEXT* pMetadata);
-
-void PostCallRecordSetHdrMetadataEXT(
- VkDevice device,
- uint32_t swapchainCount,
- const VkSwapchainKHR* pSwapchains,
- const VkHdrMetadataEXT* pMetadata);
-
-#ifdef VK_USE_PLATFORM_IOS_MVK
-
-void PreCallRecordCreateIOSSurfaceMVK(
- VkInstance instance,
- const VkIOSSurfaceCreateInfoMVK* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkSurfaceKHR* pSurface);
-
-void PostCallRecordCreateIOSSurfaceMVK(
- VkInstance instance,
- const VkIOSSurfaceCreateInfoMVK* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkSurfaceKHR* pSurface,
- VkResult result);
-#endif // VK_USE_PLATFORM_IOS_MVK
-
-#ifdef VK_USE_PLATFORM_MACOS_MVK
-
-void PreCallRecordCreateMacOSSurfaceMVK(
- VkInstance instance,
- const VkMacOSSurfaceCreateInfoMVK* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkSurfaceKHR* pSurface);
-
-void PostCallRecordCreateMacOSSurfaceMVK(
- VkInstance instance,
- const VkMacOSSurfaceCreateInfoMVK* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkSurfaceKHR* pSurface,
- VkResult result);
-#endif // VK_USE_PLATFORM_MACOS_MVK
-// TODO - not wrapping EXT function vkSetDebugUtilsObjectNameEXT
-// TODO - not wrapping EXT function vkSetDebugUtilsObjectTagEXT
-
-void PreCallRecordQueueBeginDebugUtilsLabelEXT(
- VkQueue queue,
- const VkDebugUtilsLabelEXT* pLabelInfo);
-
-void PostCallRecordQueueBeginDebugUtilsLabelEXT(
- VkQueue queue,
- const VkDebugUtilsLabelEXT* pLabelInfo);
-
-void PreCallRecordQueueEndDebugUtilsLabelEXT(
- VkQueue queue);
-
-void PostCallRecordQueueEndDebugUtilsLabelEXT(
- VkQueue queue);
-
-void PreCallRecordQueueInsertDebugUtilsLabelEXT(
- VkQueue queue,
- const VkDebugUtilsLabelEXT* pLabelInfo);
-
-void PostCallRecordQueueInsertDebugUtilsLabelEXT(
- VkQueue queue,
- const VkDebugUtilsLabelEXT* pLabelInfo);
-
-void PreCallRecordCmdBeginDebugUtilsLabelEXT(
- VkCommandBuffer commandBuffer,
- const VkDebugUtilsLabelEXT* pLabelInfo);
-
-void PostCallRecordCmdBeginDebugUtilsLabelEXT(
- VkCommandBuffer commandBuffer,
- const VkDebugUtilsLabelEXT* pLabelInfo);
-
-void PreCallRecordCmdEndDebugUtilsLabelEXT(
- VkCommandBuffer commandBuffer);
-
-void PostCallRecordCmdEndDebugUtilsLabelEXT(
- VkCommandBuffer commandBuffer);
-
-void PreCallRecordCmdInsertDebugUtilsLabelEXT(
- VkCommandBuffer commandBuffer,
- const VkDebugUtilsLabelEXT* pLabelInfo);
-
-void PostCallRecordCmdInsertDebugUtilsLabelEXT(
- VkCommandBuffer commandBuffer,
- const VkDebugUtilsLabelEXT* pLabelInfo);
-
-void PreCallRecordCreateDebugUtilsMessengerEXT(
- VkInstance instance,
- const VkDebugUtilsMessengerCreateInfoEXT* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkDebugUtilsMessengerEXT* pMessenger);
-
-void PostCallRecordCreateDebugUtilsMessengerEXT(
- VkInstance instance,
- const VkDebugUtilsMessengerCreateInfoEXT* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkDebugUtilsMessengerEXT* pMessenger,
- VkResult result);
-
-void PreCallRecordDestroyDebugUtilsMessengerEXT(
- VkInstance instance,
- VkDebugUtilsMessengerEXT messenger,
- const VkAllocationCallbacks* pAllocator);
-
-void PostCallRecordDestroyDebugUtilsMessengerEXT(
- VkInstance instance,
- VkDebugUtilsMessengerEXT messenger,
- const VkAllocationCallbacks* pAllocator);
-
-void PreCallRecordSubmitDebugUtilsMessageEXT(
- VkInstance instance,
- VkDebugUtilsMessageSeverityFlagBitsEXT messageSeverity,
- VkDebugUtilsMessageTypeFlagsEXT messageTypes,
- const VkDebugUtilsMessengerCallbackDataEXT* pCallbackData);
-
-void PostCallRecordSubmitDebugUtilsMessageEXT(
- VkInstance instance,
- VkDebugUtilsMessageSeverityFlagBitsEXT messageSeverity,
- VkDebugUtilsMessageTypeFlagsEXT messageTypes,
- const VkDebugUtilsMessengerCallbackDataEXT* pCallbackData);
-
-#ifdef VK_USE_PLATFORM_ANDROID_KHR
-
-void PreCallRecordGetAndroidHardwareBufferPropertiesANDROID(
- VkDevice device,
- const struct AHardwareBuffer* buffer,
- VkAndroidHardwareBufferPropertiesANDROID* pProperties);
-
-void PostCallRecordGetAndroidHardwareBufferPropertiesANDROID(
- VkDevice device,
- const struct AHardwareBuffer* buffer,
- VkAndroidHardwareBufferPropertiesANDROID* pProperties,
- VkResult result);
-
-void PreCallRecordGetMemoryAndroidHardwareBufferANDROID(
- VkDevice device,
- const VkMemoryGetAndroidHardwareBufferInfoANDROID* pInfo,
- struct AHardwareBuffer** pBuffer);
-
-void PostCallRecordGetMemoryAndroidHardwareBufferANDROID(
- VkDevice device,
- const VkMemoryGetAndroidHardwareBufferInfoANDROID* pInfo,
- struct AHardwareBuffer** pBuffer,
- VkResult result);
-#endif // VK_USE_PLATFORM_ANDROID_KHR
-
-void PreCallRecordCmdSetSampleLocationsEXT(
- VkCommandBuffer commandBuffer,
- const VkSampleLocationsInfoEXT* pSampleLocationsInfo);
-
-void PostCallRecordCmdSetSampleLocationsEXT(
- VkCommandBuffer commandBuffer,
- const VkSampleLocationsInfoEXT* pSampleLocationsInfo);
-
-void PreCallRecordGetImageDrmFormatModifierPropertiesEXT(
- VkDevice device,
- VkImage image,
- VkImageDrmFormatModifierPropertiesEXT* pProperties);
-
-void PostCallRecordGetImageDrmFormatModifierPropertiesEXT(
- VkDevice device,
- VkImage image,
- VkImageDrmFormatModifierPropertiesEXT* pProperties,
- VkResult result);
-
-void PreCallRecordCreateValidationCacheEXT(
- VkDevice device,
- const VkValidationCacheCreateInfoEXT* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkValidationCacheEXT* pValidationCache);
-
-void PostCallRecordCreateValidationCacheEXT(
- VkDevice device,
- const VkValidationCacheCreateInfoEXT* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkValidationCacheEXT* pValidationCache,
- VkResult result);
-
-void PreCallRecordDestroyValidationCacheEXT(
- VkDevice device,
- VkValidationCacheEXT validationCache,
- const VkAllocationCallbacks* pAllocator);
-
-void PostCallRecordDestroyValidationCacheEXT(
- VkDevice device,
- VkValidationCacheEXT validationCache,
- const VkAllocationCallbacks* pAllocator);
-
-void PreCallRecordMergeValidationCachesEXT(
- VkDevice device,
- VkValidationCacheEXT dstCache,
- uint32_t srcCacheCount,
- const VkValidationCacheEXT* pSrcCaches);
-
-void PostCallRecordMergeValidationCachesEXT(
- VkDevice device,
- VkValidationCacheEXT dstCache,
- uint32_t srcCacheCount,
- const VkValidationCacheEXT* pSrcCaches,
- VkResult result);
-
-void PreCallRecordGetValidationCacheDataEXT(
- VkDevice device,
- VkValidationCacheEXT validationCache,
- size_t* pDataSize,
- void* pData);
-
-void PostCallRecordGetValidationCacheDataEXT(
- VkDevice device,
- VkValidationCacheEXT validationCache,
- size_t* pDataSize,
- void* pData,
- VkResult result);
-
-void PreCallRecordCmdBindShadingRateImageNV(
- VkCommandBuffer commandBuffer,
- VkImageView imageView,
- VkImageLayout imageLayout);
-
-void PostCallRecordCmdBindShadingRateImageNV(
- VkCommandBuffer commandBuffer,
- VkImageView imageView,
- VkImageLayout imageLayout);
-
-void PreCallRecordCmdSetViewportShadingRatePaletteNV(
- VkCommandBuffer commandBuffer,
- uint32_t firstViewport,
- uint32_t viewportCount,
- const VkShadingRatePaletteNV* pShadingRatePalettes);
-
-void PostCallRecordCmdSetViewportShadingRatePaletteNV(
- VkCommandBuffer commandBuffer,
- uint32_t firstViewport,
- uint32_t viewportCount,
- const VkShadingRatePaletteNV* pShadingRatePalettes);
-
-void PreCallRecordCmdSetCoarseSampleOrderNV(
- VkCommandBuffer commandBuffer,
- VkCoarseSampleOrderTypeNV sampleOrderType,
- uint32_t customSampleOrderCount,
- const VkCoarseSampleOrderCustomNV* pCustomSampleOrders);
-
-void PostCallRecordCmdSetCoarseSampleOrderNV(
- VkCommandBuffer commandBuffer,
- VkCoarseSampleOrderTypeNV sampleOrderType,
- uint32_t customSampleOrderCount,
- const VkCoarseSampleOrderCustomNV* pCustomSampleOrders);
-
-void PreCallRecordCreateAccelerationStructureNV(
- VkDevice device,
- const VkAccelerationStructureCreateInfoNV* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkAccelerationStructureNV* pAccelerationStructure);
-
-void PostCallRecordCreateAccelerationStructureNV(
- VkDevice device,
- const VkAccelerationStructureCreateInfoNV* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkAccelerationStructureNV* pAccelerationStructure,
- VkResult result);
-
-void PreCallRecordDestroyAccelerationStructureNV(
- VkDevice device,
- VkAccelerationStructureNV accelerationStructure,
- const VkAllocationCallbacks* pAllocator);
-
-void PostCallRecordDestroyAccelerationStructureNV(
- VkDevice device,
- VkAccelerationStructureNV accelerationStructure,
- const VkAllocationCallbacks* pAllocator);
-
-void PreCallRecordGetAccelerationStructureMemoryRequirementsNV(
- VkDevice device,
- const VkAccelerationStructureMemoryRequirementsInfoNV* pInfo,
- VkMemoryRequirements2KHR* pMemoryRequirements);
-
-void PostCallRecordGetAccelerationStructureMemoryRequirementsNV(
- VkDevice device,
- const VkAccelerationStructureMemoryRequirementsInfoNV* pInfo,
- VkMemoryRequirements2KHR* pMemoryRequirements);
-
-void PreCallRecordBindAccelerationStructureMemoryNV(
- VkDevice device,
- uint32_t bindInfoCount,
- const VkBindAccelerationStructureMemoryInfoNV* pBindInfos);
-
-void PostCallRecordBindAccelerationStructureMemoryNV(
- VkDevice device,
- uint32_t bindInfoCount,
- const VkBindAccelerationStructureMemoryInfoNV* pBindInfos,
- VkResult result);
-
-void PreCallRecordCmdBuildAccelerationStructureNV(
- VkCommandBuffer commandBuffer,
- const VkAccelerationStructureInfoNV* pInfo,
- VkBuffer instanceData,
- VkDeviceSize instanceOffset,
- VkBool32 update,
- VkAccelerationStructureNV dst,
- VkAccelerationStructureNV src,
- VkBuffer scratch,
- VkDeviceSize scratchOffset);
-
-void PostCallRecordCmdBuildAccelerationStructureNV(
- VkCommandBuffer commandBuffer,
- const VkAccelerationStructureInfoNV* pInfo,
- VkBuffer instanceData,
- VkDeviceSize instanceOffset,
- VkBool32 update,
- VkAccelerationStructureNV dst,
- VkAccelerationStructureNV src,
- VkBuffer scratch,
- VkDeviceSize scratchOffset);
-
-void PreCallRecordCmdCopyAccelerationStructureNV(
- VkCommandBuffer commandBuffer,
- VkAccelerationStructureNV dst,
- VkAccelerationStructureNV src,
- VkCopyAccelerationStructureModeNV mode);
-
-void PostCallRecordCmdCopyAccelerationStructureNV(
- VkCommandBuffer commandBuffer,
- VkAccelerationStructureNV dst,
- VkAccelerationStructureNV src,
- VkCopyAccelerationStructureModeNV mode);
-
-void PreCallRecordCmdTraceRaysNV(
- VkCommandBuffer commandBuffer,
- VkBuffer raygenShaderBindingTableBuffer,
- VkDeviceSize raygenShaderBindingOffset,
- VkBuffer missShaderBindingTableBuffer,
- VkDeviceSize missShaderBindingOffset,
- VkDeviceSize missShaderBindingStride,
- VkBuffer hitShaderBindingTableBuffer,
- VkDeviceSize hitShaderBindingOffset,
- VkDeviceSize hitShaderBindingStride,
- VkBuffer callableShaderBindingTableBuffer,
- VkDeviceSize callableShaderBindingOffset,
- VkDeviceSize callableShaderBindingStride,
- uint32_t width,
- uint32_t height,
- uint32_t depth);
-
-void PostCallRecordCmdTraceRaysNV(
- VkCommandBuffer commandBuffer,
- VkBuffer raygenShaderBindingTableBuffer,
- VkDeviceSize raygenShaderBindingOffset,
- VkBuffer missShaderBindingTableBuffer,
- VkDeviceSize missShaderBindingOffset,
- VkDeviceSize missShaderBindingStride,
- VkBuffer hitShaderBindingTableBuffer,
- VkDeviceSize hitShaderBindingOffset,
- VkDeviceSize hitShaderBindingStride,
- VkBuffer callableShaderBindingTableBuffer,
- VkDeviceSize callableShaderBindingOffset,
- VkDeviceSize callableShaderBindingStride,
- uint32_t width,
- uint32_t height,
- uint32_t depth);
-
-void PreCallRecordCreateRayTracingPipelinesNV(
- VkDevice device,
- VkPipelineCache pipelineCache,
- uint32_t createInfoCount,
- const VkRayTracingPipelineCreateInfoNV* pCreateInfos,
- const VkAllocationCallbacks* pAllocator,
- VkPipeline* pPipelines);
-
-void PostCallRecordCreateRayTracingPipelinesNV(
- VkDevice device,
- VkPipelineCache pipelineCache,
- uint32_t createInfoCount,
- const VkRayTracingPipelineCreateInfoNV* pCreateInfos,
- const VkAllocationCallbacks* pAllocator,
- VkPipeline* pPipelines,
- VkResult result);
-
-void PreCallRecordGetRayTracingShaderGroupHandlesNV(
- VkDevice device,
- VkPipeline pipeline,
- uint32_t firstGroup,
- uint32_t groupCount,
- size_t dataSize,
- void* pData);
-
-void PostCallRecordGetRayTracingShaderGroupHandlesNV(
- VkDevice device,
- VkPipeline pipeline,
- uint32_t firstGroup,
- uint32_t groupCount,
- size_t dataSize,
- void* pData,
- VkResult result);
-
-void PreCallRecordGetAccelerationStructureHandleNV(
- VkDevice device,
- VkAccelerationStructureNV accelerationStructure,
- size_t dataSize,
- void* pData);
-
-void PostCallRecordGetAccelerationStructureHandleNV(
- VkDevice device,
- VkAccelerationStructureNV accelerationStructure,
- size_t dataSize,
- void* pData,
- VkResult result);
-
-void PreCallRecordCmdWriteAccelerationStructuresPropertiesNV(
- VkCommandBuffer commandBuffer,
- uint32_t accelerationStructureCount,
- const VkAccelerationStructureNV* pAccelerationStructures,
- VkQueryType queryType,
- VkQueryPool queryPool,
- uint32_t firstQuery);
-
-void PostCallRecordCmdWriteAccelerationStructuresPropertiesNV(
- VkCommandBuffer commandBuffer,
- uint32_t accelerationStructureCount,
- const VkAccelerationStructureNV* pAccelerationStructures,
- VkQueryType queryType,
- VkQueryPool queryPool,
- uint32_t firstQuery);
-
-void PreCallRecordCompileDeferredNV(
- VkDevice device,
- VkPipeline pipeline,
- uint32_t shader);
-
-void PostCallRecordCompileDeferredNV(
- VkDevice device,
- VkPipeline pipeline,
- uint32_t shader,
- VkResult result);
-
-void PreCallRecordGetMemoryHostPointerPropertiesEXT(
- VkDevice device,
- VkExternalMemoryHandleTypeFlagBits handleType,
- const void* pHostPointer,
- VkMemoryHostPointerPropertiesEXT* pMemoryHostPointerProperties);
-
-void PostCallRecordGetMemoryHostPointerPropertiesEXT(
- VkDevice device,
- VkExternalMemoryHandleTypeFlagBits handleType,
- const void* pHostPointer,
- VkMemoryHostPointerPropertiesEXT* pMemoryHostPointerProperties,
- VkResult result);
-
-void PreCallRecordCmdWriteBufferMarkerAMD(
- VkCommandBuffer commandBuffer,
- VkPipelineStageFlagBits pipelineStage,
- VkBuffer dstBuffer,
- VkDeviceSize dstOffset,
- uint32_t marker);
-
-void PostCallRecordCmdWriteBufferMarkerAMD(
- VkCommandBuffer commandBuffer,
- VkPipelineStageFlagBits pipelineStage,
- VkBuffer dstBuffer,
- VkDeviceSize dstOffset,
- uint32_t marker);
-
-void PreCallRecordGetCalibratedTimestampsEXT(
- VkDevice device,
- uint32_t timestampCount,
- const VkCalibratedTimestampInfoEXT* pTimestampInfos,
- uint64_t* pTimestamps,
- uint64_t* pMaxDeviation);
-
-void PostCallRecordGetCalibratedTimestampsEXT(
- VkDevice device,
- uint32_t timestampCount,
- const VkCalibratedTimestampInfoEXT* pTimestampInfos,
- uint64_t* pTimestamps,
- uint64_t* pMaxDeviation,
- VkResult result);
-
-#ifdef VK_USE_PLATFORM_GGP
-#endif // VK_USE_PLATFORM_GGP
-
-void PreCallRecordCmdDrawMeshTasksNV(
- VkCommandBuffer commandBuffer,
- uint32_t taskCount,
- uint32_t firstTask);
-
-void PostCallRecordCmdDrawMeshTasksNV(
- VkCommandBuffer commandBuffer,
- uint32_t taskCount,
- uint32_t firstTask);
-
-void PreCallRecordCmdDrawMeshTasksIndirectNV(
- VkCommandBuffer commandBuffer,
- VkBuffer buffer,
- VkDeviceSize offset,
- uint32_t drawCount,
- uint32_t stride);
-
-void PostCallRecordCmdDrawMeshTasksIndirectNV(
- VkCommandBuffer commandBuffer,
- VkBuffer buffer,
- VkDeviceSize offset,
- uint32_t drawCount,
- uint32_t stride);
-
-void PreCallRecordCmdDrawMeshTasksIndirectCountNV(
- VkCommandBuffer commandBuffer,
- VkBuffer buffer,
- VkDeviceSize offset,
- VkBuffer countBuffer,
- VkDeviceSize countBufferOffset,
- uint32_t maxDrawCount,
- uint32_t stride);
-
-void PostCallRecordCmdDrawMeshTasksIndirectCountNV(
- VkCommandBuffer commandBuffer,
- VkBuffer buffer,
- VkDeviceSize offset,
- VkBuffer countBuffer,
- VkDeviceSize countBufferOffset,
- uint32_t maxDrawCount,
- uint32_t stride);
-
-void PreCallRecordCmdSetExclusiveScissorNV(
- VkCommandBuffer commandBuffer,
- uint32_t firstExclusiveScissor,
- uint32_t exclusiveScissorCount,
- const VkRect2D* pExclusiveScissors);
-
-void PostCallRecordCmdSetExclusiveScissorNV(
- VkCommandBuffer commandBuffer,
- uint32_t firstExclusiveScissor,
- uint32_t exclusiveScissorCount,
- const VkRect2D* pExclusiveScissors);
-
-void PreCallRecordCmdSetCheckpointNV(
- VkCommandBuffer commandBuffer,
- const void* pCheckpointMarker);
-
-void PostCallRecordCmdSetCheckpointNV(
- VkCommandBuffer commandBuffer,
- const void* pCheckpointMarker);
-
-void PreCallRecordGetQueueCheckpointDataNV(
- VkQueue queue,
- uint32_t* pCheckpointDataCount,
- VkCheckpointDataNV* pCheckpointData);
-
-void PostCallRecordGetQueueCheckpointDataNV(
- VkQueue queue,
- uint32_t* pCheckpointDataCount,
- VkCheckpointDataNV* pCheckpointData);
-
-void PreCallRecordInitializePerformanceApiINTEL(
- VkDevice device,
- const VkInitializePerformanceApiInfoINTEL* pInitializeInfo);
-
-void PostCallRecordInitializePerformanceApiINTEL(
- VkDevice device,
- const VkInitializePerformanceApiInfoINTEL* pInitializeInfo,
- VkResult result);
-
-void PreCallRecordUninitializePerformanceApiINTEL(
- VkDevice device);
-
-void PostCallRecordUninitializePerformanceApiINTEL(
- VkDevice device);
-
-void PreCallRecordCmdSetPerformanceMarkerINTEL(
- VkCommandBuffer commandBuffer,
- const VkPerformanceMarkerInfoINTEL* pMarkerInfo);
-
-void PostCallRecordCmdSetPerformanceMarkerINTEL(
- VkCommandBuffer commandBuffer,
- const VkPerformanceMarkerInfoINTEL* pMarkerInfo,
- VkResult result);
-
-void PreCallRecordCmdSetPerformanceStreamMarkerINTEL(
- VkCommandBuffer commandBuffer,
- const VkPerformanceStreamMarkerInfoINTEL* pMarkerInfo);
-
-void PostCallRecordCmdSetPerformanceStreamMarkerINTEL(
- VkCommandBuffer commandBuffer,
- const VkPerformanceStreamMarkerInfoINTEL* pMarkerInfo,
- VkResult result);
-
-void PreCallRecordCmdSetPerformanceOverrideINTEL(
- VkCommandBuffer commandBuffer,
- const VkPerformanceOverrideInfoINTEL* pOverrideInfo);
-
-void PostCallRecordCmdSetPerformanceOverrideINTEL(
- VkCommandBuffer commandBuffer,
- const VkPerformanceOverrideInfoINTEL* pOverrideInfo,
- VkResult result);
-
-void PreCallRecordAcquirePerformanceConfigurationINTEL(
- VkDevice device,
- const VkPerformanceConfigurationAcquireInfoINTEL* pAcquireInfo,
- VkPerformanceConfigurationINTEL* pConfiguration);
-
-void PostCallRecordAcquirePerformanceConfigurationINTEL(
- VkDevice device,
- const VkPerformanceConfigurationAcquireInfoINTEL* pAcquireInfo,
- VkPerformanceConfigurationINTEL* pConfiguration,
- VkResult result);
-
-void PreCallRecordReleasePerformanceConfigurationINTEL(
- VkDevice device,
- VkPerformanceConfigurationINTEL configuration);
-
-void PostCallRecordReleasePerformanceConfigurationINTEL(
- VkDevice device,
- VkPerformanceConfigurationINTEL configuration,
- VkResult result);
-
-void PreCallRecordQueueSetPerformanceConfigurationINTEL(
- VkQueue queue,
- VkPerformanceConfigurationINTEL configuration);
-
-void PostCallRecordQueueSetPerformanceConfigurationINTEL(
- VkQueue queue,
- VkPerformanceConfigurationINTEL configuration,
- VkResult result);
-
-void PreCallRecordGetPerformanceParameterINTEL(
- VkDevice device,
- VkPerformanceParameterTypeINTEL parameter,
- VkPerformanceValueINTEL* pValue);
-
-void PostCallRecordGetPerformanceParameterINTEL(
- VkDevice device,
- VkPerformanceParameterTypeINTEL parameter,
- VkPerformanceValueINTEL* pValue,
- VkResult result);
-
-void PreCallRecordSetLocalDimmingAMD(
- VkDevice device,
- VkSwapchainKHR swapChain,
- VkBool32 localDimmingEnable);
-
-void PostCallRecordSetLocalDimmingAMD(
- VkDevice device,
- VkSwapchainKHR swapChain,
- VkBool32 localDimmingEnable);
-
-#ifdef VK_USE_PLATFORM_FUCHSIA
-
-void PreCallRecordCreateImagePipeSurfaceFUCHSIA(
- VkInstance instance,
- const VkImagePipeSurfaceCreateInfoFUCHSIA* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkSurfaceKHR* pSurface);
-
-void PostCallRecordCreateImagePipeSurfaceFUCHSIA(
- VkInstance instance,
- const VkImagePipeSurfaceCreateInfoFUCHSIA* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkSurfaceKHR* pSurface,
- VkResult result);
-#endif // VK_USE_PLATFORM_FUCHSIA
-
-#ifdef VK_USE_PLATFORM_METAL_EXT
-
-void PreCallRecordCreateMetalSurfaceEXT(
- VkInstance instance,
- const VkMetalSurfaceCreateInfoEXT* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkSurfaceKHR* pSurface);
-
-void PostCallRecordCreateMetalSurfaceEXT(
- VkInstance instance,
- const VkMetalSurfaceCreateInfoEXT* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkSurfaceKHR* pSurface,
- VkResult result);
-#endif // VK_USE_PLATFORM_METAL_EXT
-
-void PreCallRecordGetBufferDeviceAddressEXT(
- VkDevice device,
- const VkBufferDeviceAddressInfoEXT* pInfo);
-
-void PostCallRecordGetBufferDeviceAddressEXT(
- VkDevice device,
- const VkBufferDeviceAddressInfoEXT* pInfo);
-
-#ifdef VK_USE_PLATFORM_WIN32_KHR
-
-void PreCallRecordAcquireFullScreenExclusiveModeEXT(
- VkDevice device,
- VkSwapchainKHR swapchain);
-
-void PostCallRecordAcquireFullScreenExclusiveModeEXT(
- VkDevice device,
- VkSwapchainKHR swapchain,
- VkResult result);
-
-void PreCallRecordReleaseFullScreenExclusiveModeEXT(
- VkDevice device,
- VkSwapchainKHR swapchain);
-
-void PostCallRecordReleaseFullScreenExclusiveModeEXT(
- VkDevice device,
- VkSwapchainKHR swapchain,
- VkResult result);
-
-void PreCallRecordGetDeviceGroupSurfacePresentModes2EXT(
- VkDevice device,
- const VkPhysicalDeviceSurfaceInfo2KHR* pSurfaceInfo,
- VkDeviceGroupPresentModeFlagsKHR* pModes);
-
-void PostCallRecordGetDeviceGroupSurfacePresentModes2EXT(
- VkDevice device,
- const VkPhysicalDeviceSurfaceInfo2KHR* pSurfaceInfo,
- VkDeviceGroupPresentModeFlagsKHR* pModes,
- VkResult result);
-#endif // VK_USE_PLATFORM_WIN32_KHR
-
-void PreCallRecordCreateHeadlessSurfaceEXT(
- VkInstance instance,
- const VkHeadlessSurfaceCreateInfoEXT* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkSurfaceKHR* pSurface);
-
-void PostCallRecordCreateHeadlessSurfaceEXT(
- VkInstance instance,
- const VkHeadlessSurfaceCreateInfoEXT* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkSurfaceKHR* pSurface,
- VkResult result);
-
-void PreCallRecordCmdSetLineStippleEXT(
- VkCommandBuffer commandBuffer,
- uint32_t lineStippleFactor,
- uint16_t lineStipplePattern);
-
-void PostCallRecordCmdSetLineStippleEXT(
- VkCommandBuffer commandBuffer,
- uint32_t lineStippleFactor,
- uint16_t lineStipplePattern);
-
-void PreCallRecordResetQueryPoolEXT(
- VkDevice device,
- VkQueryPool queryPool,
- uint32_t firstQuery,
- uint32_t queryCount);
-
-void PostCallRecordResetQueryPoolEXT(
- VkDevice device,
- VkQueryPool queryPool,
- uint32_t firstQuery,
- uint32_t queryCount);
-};
diff --git a/layers/generated/vk_dispatch_table_helper.h b/layers/generated/vk_dispatch_table_helper.h
deleted file mode 100644
index fca72736d..000000000
--- a/layers/generated/vk_dispatch_table_helper.h
+++ /dev/null
@@ -1,981 +0,0 @@
-#pragma once
-// *** THIS FILE IS GENERATED - DO NOT EDIT ***
-// See dispatch_helper_generator.py for modifications
-
-/*
- * Copyright (c) 2015-2019 The Khronos Group Inc.
- * Copyright (c) 2015-2019 Valve Corporation
- * Copyright (c) 2015-2019 LunarG, Inc.
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- *
- * Author: Courtney Goeltzenleuchter <courtney@LunarG.com>
- * Author: Jon Ashburn <jon@lunarg.com>
- * Author: Mark Lobodzinski <mark@lunarg.com>
- */
-
-#include <vulkan/vulkan.h>
-#include <vulkan/vk_layer.h>
-#include <cstring>
-#include <string>
-#include <unordered_set>
-#include <unordered_map>
-#include "vk_layer_dispatch_table.h"
-#include "vk_extension_helper.h"
-
-static VKAPI_ATTR VkResult VKAPI_CALL StubBindBufferMemory2(VkDevice device, uint32_t bindInfoCount, const VkBindBufferMemoryInfo* pBindInfos) { return VK_SUCCESS; };
-static VKAPI_ATTR VkResult VKAPI_CALL StubBindImageMemory2(VkDevice device, uint32_t bindInfoCount, const VkBindImageMemoryInfo* pBindInfos) { return VK_SUCCESS; };
-static VKAPI_ATTR void VKAPI_CALL StubGetDeviceGroupPeerMemoryFeatures(VkDevice device, uint32_t heapIndex, uint32_t localDeviceIndex, uint32_t remoteDeviceIndex, VkPeerMemoryFeatureFlags* pPeerMemoryFeatures) { };
-static VKAPI_ATTR void VKAPI_CALL StubCmdSetDeviceMask(VkCommandBuffer commandBuffer, uint32_t deviceMask) { };
-static VKAPI_ATTR void VKAPI_CALL StubCmdDispatchBase(VkCommandBuffer commandBuffer, uint32_t baseGroupX, uint32_t baseGroupY, uint32_t baseGroupZ, uint32_t groupCountX, uint32_t groupCountY, uint32_t groupCountZ) { };
-static VKAPI_ATTR void VKAPI_CALL StubGetImageMemoryRequirements2(VkDevice device, const VkImageMemoryRequirementsInfo2* pInfo, VkMemoryRequirements2* pMemoryRequirements) { };
-static VKAPI_ATTR void VKAPI_CALL StubGetBufferMemoryRequirements2(VkDevice device, const VkBufferMemoryRequirementsInfo2* pInfo, VkMemoryRequirements2* pMemoryRequirements) { };
-static VKAPI_ATTR void VKAPI_CALL StubGetImageSparseMemoryRequirements2(VkDevice device, const VkImageSparseMemoryRequirementsInfo2* pInfo, uint32_t* pSparseMemoryRequirementCount, VkSparseImageMemoryRequirements2* pSparseMemoryRequirements) { };
-static VKAPI_ATTR void VKAPI_CALL StubTrimCommandPool(VkDevice device, VkCommandPool commandPool, VkCommandPoolTrimFlags flags) { };
-static VKAPI_ATTR void VKAPI_CALL StubGetDeviceQueue2(VkDevice device, const VkDeviceQueueInfo2* pQueueInfo, VkQueue* pQueue) { };
-static VKAPI_ATTR VkResult VKAPI_CALL StubCreateSamplerYcbcrConversion(VkDevice device, const VkSamplerYcbcrConversionCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSamplerYcbcrConversion* pYcbcrConversion) { return VK_SUCCESS; };
-static VKAPI_ATTR void VKAPI_CALL StubDestroySamplerYcbcrConversion(VkDevice device, VkSamplerYcbcrConversion ycbcrConversion, const VkAllocationCallbacks* pAllocator) { };
-static VKAPI_ATTR VkResult VKAPI_CALL StubCreateDescriptorUpdateTemplate(VkDevice device, const VkDescriptorUpdateTemplateCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkDescriptorUpdateTemplate* pDescriptorUpdateTemplate) { return VK_SUCCESS; };
-static VKAPI_ATTR void VKAPI_CALL StubDestroyDescriptorUpdateTemplate(VkDevice device, VkDescriptorUpdateTemplate descriptorUpdateTemplate, const VkAllocationCallbacks* pAllocator) { };
-static VKAPI_ATTR void VKAPI_CALL StubUpdateDescriptorSetWithTemplate(VkDevice device, VkDescriptorSet descriptorSet, VkDescriptorUpdateTemplate descriptorUpdateTemplate, const void* pData) { };
-static VKAPI_ATTR void VKAPI_CALL StubGetDescriptorSetLayoutSupport(VkDevice device, const VkDescriptorSetLayoutCreateInfo* pCreateInfo, VkDescriptorSetLayoutSupport* pSupport) { };
-static VKAPI_ATTR VkResult VKAPI_CALL StubCreateSwapchainKHR(VkDevice device, const VkSwapchainCreateInfoKHR* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSwapchainKHR* pSwapchain) { return VK_SUCCESS; };
-static VKAPI_ATTR void VKAPI_CALL StubDestroySwapchainKHR(VkDevice device, VkSwapchainKHR swapchain, const VkAllocationCallbacks* pAllocator) { };
-static VKAPI_ATTR VkResult VKAPI_CALL StubGetSwapchainImagesKHR(VkDevice device, VkSwapchainKHR swapchain, uint32_t* pSwapchainImageCount, VkImage* pSwapchainImages) { return VK_SUCCESS; };
-static VKAPI_ATTR VkResult VKAPI_CALL StubAcquireNextImageKHR(VkDevice device, VkSwapchainKHR swapchain, uint64_t timeout, VkSemaphore semaphore, VkFence fence, uint32_t* pImageIndex) { return VK_SUCCESS; };
-static VKAPI_ATTR VkResult VKAPI_CALL StubQueuePresentKHR(VkQueue queue, const VkPresentInfoKHR* pPresentInfo) { return VK_SUCCESS; };
-static VKAPI_ATTR VkResult VKAPI_CALL StubGetDeviceGroupPresentCapabilitiesKHR(VkDevice device, VkDeviceGroupPresentCapabilitiesKHR* pDeviceGroupPresentCapabilities) { return VK_SUCCESS; };
-static VKAPI_ATTR VkResult VKAPI_CALL StubGetDeviceGroupSurfacePresentModesKHR(VkDevice device, VkSurfaceKHR surface, VkDeviceGroupPresentModeFlagsKHR* pModes) { return VK_SUCCESS; };
-static VKAPI_ATTR VkResult VKAPI_CALL StubAcquireNextImage2KHR(VkDevice device, const VkAcquireNextImageInfoKHR* pAcquireInfo, uint32_t* pImageIndex) { return VK_SUCCESS; };
-static VKAPI_ATTR VkResult VKAPI_CALL StubCreateSharedSwapchainsKHR(VkDevice device, uint32_t swapchainCount, const VkSwapchainCreateInfoKHR* pCreateInfos, const VkAllocationCallbacks* pAllocator, VkSwapchainKHR* pSwapchains) { return VK_SUCCESS; };
-static VKAPI_ATTR void VKAPI_CALL StubGetDeviceGroupPeerMemoryFeaturesKHR(VkDevice device, uint32_t heapIndex, uint32_t localDeviceIndex, uint32_t remoteDeviceIndex, VkPeerMemoryFeatureFlags* pPeerMemoryFeatures) { };
-static VKAPI_ATTR void VKAPI_CALL StubCmdSetDeviceMaskKHR(VkCommandBuffer commandBuffer, uint32_t deviceMask) { };
-static VKAPI_ATTR void VKAPI_CALL StubCmdDispatchBaseKHR(VkCommandBuffer commandBuffer, uint32_t baseGroupX, uint32_t baseGroupY, uint32_t baseGroupZ, uint32_t groupCountX, uint32_t groupCountY, uint32_t groupCountZ) { };
-static VKAPI_ATTR void VKAPI_CALL StubTrimCommandPoolKHR(VkDevice device, VkCommandPool commandPool, VkCommandPoolTrimFlags flags) { };
-#ifdef VK_USE_PLATFORM_WIN32_KHR
-static VKAPI_ATTR VkResult VKAPI_CALL StubGetMemoryWin32HandleKHR(VkDevice device, const VkMemoryGetWin32HandleInfoKHR* pGetWin32HandleInfo, HANDLE* pHandle) { return VK_SUCCESS; };
-#endif // VK_USE_PLATFORM_WIN32_KHR
-#ifdef VK_USE_PLATFORM_WIN32_KHR
-static VKAPI_ATTR VkResult VKAPI_CALL StubGetMemoryWin32HandlePropertiesKHR(VkDevice device, VkExternalMemoryHandleTypeFlagBits handleType, HANDLE handle, VkMemoryWin32HandlePropertiesKHR* pMemoryWin32HandleProperties) { return VK_SUCCESS; };
-#endif // VK_USE_PLATFORM_WIN32_KHR
-static VKAPI_ATTR VkResult VKAPI_CALL StubGetMemoryFdKHR(VkDevice device, const VkMemoryGetFdInfoKHR* pGetFdInfo, int* pFd) { return VK_SUCCESS; };
-static VKAPI_ATTR VkResult VKAPI_CALL StubGetMemoryFdPropertiesKHR(VkDevice device, VkExternalMemoryHandleTypeFlagBits handleType, int fd, VkMemoryFdPropertiesKHR* pMemoryFdProperties) { return VK_SUCCESS; };
-#ifdef VK_USE_PLATFORM_WIN32_KHR
-static VKAPI_ATTR VkResult VKAPI_CALL StubImportSemaphoreWin32HandleKHR(VkDevice device, const VkImportSemaphoreWin32HandleInfoKHR* pImportSemaphoreWin32HandleInfo) { return VK_SUCCESS; };
-#endif // VK_USE_PLATFORM_WIN32_KHR
-#ifdef VK_USE_PLATFORM_WIN32_KHR
-static VKAPI_ATTR VkResult VKAPI_CALL StubGetSemaphoreWin32HandleKHR(VkDevice device, const VkSemaphoreGetWin32HandleInfoKHR* pGetWin32HandleInfo, HANDLE* pHandle) { return VK_SUCCESS; };
-#endif // VK_USE_PLATFORM_WIN32_KHR
-static VKAPI_ATTR VkResult VKAPI_CALL StubImportSemaphoreFdKHR(VkDevice device, const VkImportSemaphoreFdInfoKHR* pImportSemaphoreFdInfo) { return VK_SUCCESS; };
-static VKAPI_ATTR VkResult VKAPI_CALL StubGetSemaphoreFdKHR(VkDevice device, const VkSemaphoreGetFdInfoKHR* pGetFdInfo, int* pFd) { return VK_SUCCESS; };
-static VKAPI_ATTR void VKAPI_CALL StubCmdPushDescriptorSetKHR(VkCommandBuffer commandBuffer, VkPipelineBindPoint pipelineBindPoint, VkPipelineLayout layout, uint32_t set, uint32_t descriptorWriteCount, const VkWriteDescriptorSet* pDescriptorWrites) { };
-static VKAPI_ATTR void VKAPI_CALL StubCmdPushDescriptorSetWithTemplateKHR(VkCommandBuffer commandBuffer, VkDescriptorUpdateTemplate descriptorUpdateTemplate, VkPipelineLayout layout, uint32_t set, const void* pData) { };
-static VKAPI_ATTR VkResult VKAPI_CALL StubCreateDescriptorUpdateTemplateKHR(VkDevice device, const VkDescriptorUpdateTemplateCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkDescriptorUpdateTemplate* pDescriptorUpdateTemplate) { return VK_SUCCESS; };
-static VKAPI_ATTR void VKAPI_CALL StubDestroyDescriptorUpdateTemplateKHR(VkDevice device, VkDescriptorUpdateTemplate descriptorUpdateTemplate, const VkAllocationCallbacks* pAllocator) { };
-static VKAPI_ATTR void VKAPI_CALL StubUpdateDescriptorSetWithTemplateKHR(VkDevice device, VkDescriptorSet descriptorSet, VkDescriptorUpdateTemplate descriptorUpdateTemplate, const void* pData) { };
-static VKAPI_ATTR VkResult VKAPI_CALL StubCreateRenderPass2KHR(VkDevice device, const VkRenderPassCreateInfo2KHR* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkRenderPass* pRenderPass) { return VK_SUCCESS; };
-static VKAPI_ATTR void VKAPI_CALL StubCmdBeginRenderPass2KHR(VkCommandBuffer commandBuffer, const VkRenderPassBeginInfo* pRenderPassBegin, const VkSubpassBeginInfoKHR* pSubpassBeginInfo) { };
-static VKAPI_ATTR void VKAPI_CALL StubCmdNextSubpass2KHR(VkCommandBuffer commandBuffer, const VkSubpassBeginInfoKHR* pSubpassBeginInfo, const VkSubpassEndInfoKHR* pSubpassEndInfo) { };
-static VKAPI_ATTR void VKAPI_CALL StubCmdEndRenderPass2KHR(VkCommandBuffer commandBuffer, const VkSubpassEndInfoKHR* pSubpassEndInfo) { };
-static VKAPI_ATTR VkResult VKAPI_CALL StubGetSwapchainStatusKHR(VkDevice device, VkSwapchainKHR swapchain) { return VK_SUCCESS; };
-#ifdef VK_USE_PLATFORM_WIN32_KHR
-static VKAPI_ATTR VkResult VKAPI_CALL StubImportFenceWin32HandleKHR(VkDevice device, const VkImportFenceWin32HandleInfoKHR* pImportFenceWin32HandleInfo) { return VK_SUCCESS; };
-#endif // VK_USE_PLATFORM_WIN32_KHR
-#ifdef VK_USE_PLATFORM_WIN32_KHR
-static VKAPI_ATTR VkResult VKAPI_CALL StubGetFenceWin32HandleKHR(VkDevice device, const VkFenceGetWin32HandleInfoKHR* pGetWin32HandleInfo, HANDLE* pHandle) { return VK_SUCCESS; };
-#endif // VK_USE_PLATFORM_WIN32_KHR
-static VKAPI_ATTR VkResult VKAPI_CALL StubImportFenceFdKHR(VkDevice device, const VkImportFenceFdInfoKHR* pImportFenceFdInfo) { return VK_SUCCESS; };
-static VKAPI_ATTR VkResult VKAPI_CALL StubGetFenceFdKHR(VkDevice device, const VkFenceGetFdInfoKHR* pGetFdInfo, int* pFd) { return VK_SUCCESS; };
-static VKAPI_ATTR void VKAPI_CALL StubGetImageMemoryRequirements2KHR(VkDevice device, const VkImageMemoryRequirementsInfo2* pInfo, VkMemoryRequirements2* pMemoryRequirements) { };
-static VKAPI_ATTR void VKAPI_CALL StubGetBufferMemoryRequirements2KHR(VkDevice device, const VkBufferMemoryRequirementsInfo2* pInfo, VkMemoryRequirements2* pMemoryRequirements) { };
-static VKAPI_ATTR void VKAPI_CALL StubGetImageSparseMemoryRequirements2KHR(VkDevice device, const VkImageSparseMemoryRequirementsInfo2* pInfo, uint32_t* pSparseMemoryRequirementCount, VkSparseImageMemoryRequirements2* pSparseMemoryRequirements) { };
-static VKAPI_ATTR VkResult VKAPI_CALL StubCreateSamplerYcbcrConversionKHR(VkDevice device, const VkSamplerYcbcrConversionCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSamplerYcbcrConversion* pYcbcrConversion) { return VK_SUCCESS; };
-static VKAPI_ATTR void VKAPI_CALL StubDestroySamplerYcbcrConversionKHR(VkDevice device, VkSamplerYcbcrConversion ycbcrConversion, const VkAllocationCallbacks* pAllocator) { };
-static VKAPI_ATTR VkResult VKAPI_CALL StubBindBufferMemory2KHR(VkDevice device, uint32_t bindInfoCount, const VkBindBufferMemoryInfo* pBindInfos) { return VK_SUCCESS; };
-static VKAPI_ATTR VkResult VKAPI_CALL StubBindImageMemory2KHR(VkDevice device, uint32_t bindInfoCount, const VkBindImageMemoryInfo* pBindInfos) { return VK_SUCCESS; };
-static VKAPI_ATTR void VKAPI_CALL StubGetDescriptorSetLayoutSupportKHR(VkDevice device, const VkDescriptorSetLayoutCreateInfo* pCreateInfo, VkDescriptorSetLayoutSupport* pSupport) { };
-static VKAPI_ATTR void VKAPI_CALL StubCmdDrawIndirectCountKHR(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, VkBuffer countBuffer, VkDeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride) { };
-static VKAPI_ATTR void VKAPI_CALL StubCmdDrawIndexedIndirectCountKHR(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, VkBuffer countBuffer, VkDeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride) { };
-static VKAPI_ATTR VkResult VKAPI_CALL StubGetPipelineExecutablePropertiesKHR(VkDevice device, const VkPipelineInfoKHR* pPipelineInfo, uint32_t* pExecutableCount, VkPipelineExecutablePropertiesKHR* pProperties) { return VK_SUCCESS; };
-static VKAPI_ATTR VkResult VKAPI_CALL StubGetPipelineExecutableStatisticsKHR(VkDevice device, const VkPipelineExecutableInfoKHR* pExecutableInfo, uint32_t* pStatisticCount, VkPipelineExecutableStatisticKHR* pStatistics) { return VK_SUCCESS; };
-static VKAPI_ATTR VkResult VKAPI_CALL StubGetPipelineExecutableInternalRepresentationsKHR(VkDevice device, const VkPipelineExecutableInfoKHR* pExecutableInfo, uint32_t* pInternalRepresentationCount, VkPipelineExecutableInternalRepresentationKHR* pInternalRepresentations) { return VK_SUCCESS; };
-static VKAPI_ATTR VkResult VKAPI_CALL StubDebugMarkerSetObjectTagEXT(VkDevice device, const VkDebugMarkerObjectTagInfoEXT* pTagInfo) { return VK_SUCCESS; };
-static VKAPI_ATTR VkResult VKAPI_CALL StubDebugMarkerSetObjectNameEXT(VkDevice device, const VkDebugMarkerObjectNameInfoEXT* pNameInfo) { return VK_SUCCESS; };
-static VKAPI_ATTR void VKAPI_CALL StubCmdDebugMarkerBeginEXT(VkCommandBuffer commandBuffer, const VkDebugMarkerMarkerInfoEXT* pMarkerInfo) { };
-static VKAPI_ATTR void VKAPI_CALL StubCmdDebugMarkerEndEXT(VkCommandBuffer commandBuffer) { };
-static VKAPI_ATTR void VKAPI_CALL StubCmdDebugMarkerInsertEXT(VkCommandBuffer commandBuffer, const VkDebugMarkerMarkerInfoEXT* pMarkerInfo) { };
-static VKAPI_ATTR void VKAPI_CALL StubCmdBindTransformFeedbackBuffersEXT(VkCommandBuffer commandBuffer, uint32_t firstBinding, uint32_t bindingCount, const VkBuffer* pBuffers, const VkDeviceSize* pOffsets, const VkDeviceSize* pSizes) { };
-static VKAPI_ATTR void VKAPI_CALL StubCmdBeginTransformFeedbackEXT(VkCommandBuffer commandBuffer, uint32_t firstCounterBuffer, uint32_t counterBufferCount, const VkBuffer* pCounterBuffers, const VkDeviceSize* pCounterBufferOffsets) { };
-static VKAPI_ATTR void VKAPI_CALL StubCmdEndTransformFeedbackEXT(VkCommandBuffer commandBuffer, uint32_t firstCounterBuffer, uint32_t counterBufferCount, const VkBuffer* pCounterBuffers, const VkDeviceSize* pCounterBufferOffsets) { };
-static VKAPI_ATTR void VKAPI_CALL StubCmdBeginQueryIndexedEXT(VkCommandBuffer commandBuffer, VkQueryPool queryPool, uint32_t query, VkQueryControlFlags flags, uint32_t index) { };
-static VKAPI_ATTR void VKAPI_CALL StubCmdEndQueryIndexedEXT(VkCommandBuffer commandBuffer, VkQueryPool queryPool, uint32_t query, uint32_t index) { };
-static VKAPI_ATTR void VKAPI_CALL StubCmdDrawIndirectByteCountEXT(VkCommandBuffer commandBuffer, uint32_t instanceCount, uint32_t firstInstance, VkBuffer counterBuffer, VkDeviceSize counterBufferOffset, uint32_t counterOffset, uint32_t vertexStride) { };
-static VKAPI_ATTR uint32_t VKAPI_CALL StubGetImageViewHandleNVX(VkDevice device, const VkImageViewHandleInfoNVX* pInfo) { return 0; };
-static VKAPI_ATTR void VKAPI_CALL StubCmdDrawIndirectCountAMD(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, VkBuffer countBuffer, VkDeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride) { };
-static VKAPI_ATTR void VKAPI_CALL StubCmdDrawIndexedIndirectCountAMD(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, VkBuffer countBuffer, VkDeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride) { };
-static VKAPI_ATTR VkResult VKAPI_CALL StubGetShaderInfoAMD(VkDevice device, VkPipeline pipeline, VkShaderStageFlagBits shaderStage, VkShaderInfoTypeAMD infoType, size_t* pInfoSize, void* pInfo) { return VK_SUCCESS; };
-#ifdef VK_USE_PLATFORM_WIN32_KHR
-static VKAPI_ATTR VkResult VKAPI_CALL StubGetMemoryWin32HandleNV(VkDevice device, VkDeviceMemory memory, VkExternalMemoryHandleTypeFlagsNV handleType, HANDLE* pHandle) { return VK_SUCCESS; };
-#endif // VK_USE_PLATFORM_WIN32_KHR
-static VKAPI_ATTR void VKAPI_CALL StubCmdBeginConditionalRenderingEXT(VkCommandBuffer commandBuffer, const VkConditionalRenderingBeginInfoEXT* pConditionalRenderingBegin) { };
-static VKAPI_ATTR void VKAPI_CALL StubCmdEndConditionalRenderingEXT(VkCommandBuffer commandBuffer) { };
-static VKAPI_ATTR void VKAPI_CALL StubCmdProcessCommandsNVX(VkCommandBuffer commandBuffer, const VkCmdProcessCommandsInfoNVX* pProcessCommandsInfo) { };
-static VKAPI_ATTR void VKAPI_CALL StubCmdReserveSpaceForCommandsNVX(VkCommandBuffer commandBuffer, const VkCmdReserveSpaceForCommandsInfoNVX* pReserveSpaceInfo) { };
-static VKAPI_ATTR VkResult VKAPI_CALL StubCreateIndirectCommandsLayoutNVX(VkDevice device, const VkIndirectCommandsLayoutCreateInfoNVX* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkIndirectCommandsLayoutNVX* pIndirectCommandsLayout) { return VK_SUCCESS; };
-static VKAPI_ATTR void VKAPI_CALL StubDestroyIndirectCommandsLayoutNVX(VkDevice device, VkIndirectCommandsLayoutNVX indirectCommandsLayout, const VkAllocationCallbacks* pAllocator) { };
-static VKAPI_ATTR VkResult VKAPI_CALL StubCreateObjectTableNVX(VkDevice device, const VkObjectTableCreateInfoNVX* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkObjectTableNVX* pObjectTable) { return VK_SUCCESS; };
-static VKAPI_ATTR void VKAPI_CALL StubDestroyObjectTableNVX(VkDevice device, VkObjectTableNVX objectTable, const VkAllocationCallbacks* pAllocator) { };
-static VKAPI_ATTR VkResult VKAPI_CALL StubRegisterObjectsNVX(VkDevice device, VkObjectTableNVX objectTable, uint32_t objectCount, const VkObjectTableEntryNVX* const* ppObjectTableEntries, const uint32_t* pObjectIndices) { return VK_SUCCESS; };
-static VKAPI_ATTR VkResult VKAPI_CALL StubUnregisterObjectsNVX(VkDevice device, VkObjectTableNVX objectTable, uint32_t objectCount, const VkObjectEntryTypeNVX* pObjectEntryTypes, const uint32_t* pObjectIndices) { return VK_SUCCESS; };
-static VKAPI_ATTR void VKAPI_CALL StubCmdSetViewportWScalingNV(VkCommandBuffer commandBuffer, uint32_t firstViewport, uint32_t viewportCount, const VkViewportWScalingNV* pViewportWScalings) { };
-static VKAPI_ATTR VkResult VKAPI_CALL StubDisplayPowerControlEXT(VkDevice device, VkDisplayKHR display, const VkDisplayPowerInfoEXT* pDisplayPowerInfo) { return VK_SUCCESS; };
-static VKAPI_ATTR VkResult VKAPI_CALL StubRegisterDeviceEventEXT(VkDevice device, const VkDeviceEventInfoEXT* pDeviceEventInfo, const VkAllocationCallbacks* pAllocator, VkFence* pFence) { return VK_SUCCESS; };
-static VKAPI_ATTR VkResult VKAPI_CALL StubRegisterDisplayEventEXT(VkDevice device, VkDisplayKHR display, const VkDisplayEventInfoEXT* pDisplayEventInfo, const VkAllocationCallbacks* pAllocator, VkFence* pFence) { return VK_SUCCESS; };
-static VKAPI_ATTR VkResult VKAPI_CALL StubGetSwapchainCounterEXT(VkDevice device, VkSwapchainKHR swapchain, VkSurfaceCounterFlagBitsEXT counter, uint64_t* pCounterValue) { return VK_SUCCESS; };
-static VKAPI_ATTR VkResult VKAPI_CALL StubGetRefreshCycleDurationGOOGLE(VkDevice device, VkSwapchainKHR swapchain, VkRefreshCycleDurationGOOGLE* pDisplayTimingProperties) { return VK_SUCCESS; };
-static VKAPI_ATTR VkResult VKAPI_CALL StubGetPastPresentationTimingGOOGLE(VkDevice device, VkSwapchainKHR swapchain, uint32_t* pPresentationTimingCount, VkPastPresentationTimingGOOGLE* pPresentationTimings) { return VK_SUCCESS; };
-static VKAPI_ATTR void VKAPI_CALL StubCmdSetDiscardRectangleEXT(VkCommandBuffer commandBuffer, uint32_t firstDiscardRectangle, uint32_t discardRectangleCount, const VkRect2D* pDiscardRectangles) { };
-static VKAPI_ATTR void VKAPI_CALL StubSetHdrMetadataEXT(VkDevice device, uint32_t swapchainCount, const VkSwapchainKHR* pSwapchains, const VkHdrMetadataEXT* pMetadata) { };
-static VKAPI_ATTR VkResult VKAPI_CALL StubSetDebugUtilsObjectNameEXT(VkDevice device, const VkDebugUtilsObjectNameInfoEXT* pNameInfo) { return VK_SUCCESS; };
-static VKAPI_ATTR VkResult VKAPI_CALL StubSetDebugUtilsObjectTagEXT(VkDevice device, const VkDebugUtilsObjectTagInfoEXT* pTagInfo) { return VK_SUCCESS; };
-static VKAPI_ATTR void VKAPI_CALL StubQueueBeginDebugUtilsLabelEXT(VkQueue queue, const VkDebugUtilsLabelEXT* pLabelInfo) { };
-static VKAPI_ATTR void VKAPI_CALL StubQueueEndDebugUtilsLabelEXT(VkQueue queue) { };
-static VKAPI_ATTR void VKAPI_CALL StubQueueInsertDebugUtilsLabelEXT(VkQueue queue, const VkDebugUtilsLabelEXT* pLabelInfo) { };
-static VKAPI_ATTR void VKAPI_CALL StubCmdBeginDebugUtilsLabelEXT(VkCommandBuffer commandBuffer, const VkDebugUtilsLabelEXT* pLabelInfo) { };
-static VKAPI_ATTR void VKAPI_CALL StubCmdEndDebugUtilsLabelEXT(VkCommandBuffer commandBuffer) { };
-static VKAPI_ATTR void VKAPI_CALL StubCmdInsertDebugUtilsLabelEXT(VkCommandBuffer commandBuffer, const VkDebugUtilsLabelEXT* pLabelInfo) { };
-#ifdef VK_USE_PLATFORM_ANDROID_KHR
-static VKAPI_ATTR VkResult VKAPI_CALL StubGetAndroidHardwareBufferPropertiesANDROID(VkDevice device, const struct AHardwareBuffer* buffer, VkAndroidHardwareBufferPropertiesANDROID* pProperties) { return VK_SUCCESS; };
-#endif // VK_USE_PLATFORM_ANDROID_KHR
-#ifdef VK_USE_PLATFORM_ANDROID_KHR
-static VKAPI_ATTR VkResult VKAPI_CALL StubGetMemoryAndroidHardwareBufferANDROID(VkDevice device, const VkMemoryGetAndroidHardwareBufferInfoANDROID* pInfo, struct AHardwareBuffer** pBuffer) { return VK_SUCCESS; };
-#endif // VK_USE_PLATFORM_ANDROID_KHR
-static VKAPI_ATTR void VKAPI_CALL StubCmdSetSampleLocationsEXT(VkCommandBuffer commandBuffer, const VkSampleLocationsInfoEXT* pSampleLocationsInfo) { };
-static VKAPI_ATTR VkResult VKAPI_CALL StubGetImageDrmFormatModifierPropertiesEXT(VkDevice device, VkImage image, VkImageDrmFormatModifierPropertiesEXT* pProperties) { return VK_SUCCESS; };
-static VKAPI_ATTR VkResult VKAPI_CALL StubCreateValidationCacheEXT(VkDevice device, const VkValidationCacheCreateInfoEXT* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkValidationCacheEXT* pValidationCache) { return VK_SUCCESS; };
-static VKAPI_ATTR void VKAPI_CALL StubDestroyValidationCacheEXT(VkDevice device, VkValidationCacheEXT validationCache, const VkAllocationCallbacks* pAllocator) { };
-static VKAPI_ATTR VkResult VKAPI_CALL StubMergeValidationCachesEXT(VkDevice device, VkValidationCacheEXT dstCache, uint32_t srcCacheCount, const VkValidationCacheEXT* pSrcCaches) { return VK_SUCCESS; };
-static VKAPI_ATTR VkResult VKAPI_CALL StubGetValidationCacheDataEXT(VkDevice device, VkValidationCacheEXT validationCache, size_t* pDataSize, void* pData) { return VK_SUCCESS; };
-static VKAPI_ATTR void VKAPI_CALL StubCmdBindShadingRateImageNV(VkCommandBuffer commandBuffer, VkImageView imageView, VkImageLayout imageLayout) { };
-static VKAPI_ATTR void VKAPI_CALL StubCmdSetViewportShadingRatePaletteNV(VkCommandBuffer commandBuffer, uint32_t firstViewport, uint32_t viewportCount, const VkShadingRatePaletteNV* pShadingRatePalettes) { };
-static VKAPI_ATTR void VKAPI_CALL StubCmdSetCoarseSampleOrderNV(VkCommandBuffer commandBuffer, VkCoarseSampleOrderTypeNV sampleOrderType, uint32_t customSampleOrderCount, const VkCoarseSampleOrderCustomNV* pCustomSampleOrders) { };
-static VKAPI_ATTR VkResult VKAPI_CALL StubCreateAccelerationStructureNV(VkDevice device, const VkAccelerationStructureCreateInfoNV* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkAccelerationStructureNV* pAccelerationStructure) { return VK_SUCCESS; };
-static VKAPI_ATTR void VKAPI_CALL StubDestroyAccelerationStructureNV(VkDevice device, VkAccelerationStructureNV accelerationStructure, const VkAllocationCallbacks* pAllocator) { };
-static VKAPI_ATTR void VKAPI_CALL StubGetAccelerationStructureMemoryRequirementsNV(VkDevice device, const VkAccelerationStructureMemoryRequirementsInfoNV* pInfo, VkMemoryRequirements2KHR* pMemoryRequirements) { };
-static VKAPI_ATTR VkResult VKAPI_CALL StubBindAccelerationStructureMemoryNV(VkDevice device, uint32_t bindInfoCount, const VkBindAccelerationStructureMemoryInfoNV* pBindInfos) { return VK_SUCCESS; };
-static VKAPI_ATTR void VKAPI_CALL StubCmdBuildAccelerationStructureNV(VkCommandBuffer commandBuffer, const VkAccelerationStructureInfoNV* pInfo, VkBuffer instanceData, VkDeviceSize instanceOffset, VkBool32 update, VkAccelerationStructureNV dst, VkAccelerationStructureNV src, VkBuffer scratch, VkDeviceSize scratchOffset) { };
-static VKAPI_ATTR void VKAPI_CALL StubCmdCopyAccelerationStructureNV(VkCommandBuffer commandBuffer, VkAccelerationStructureNV dst, VkAccelerationStructureNV src, VkCopyAccelerationStructureModeNV mode) { };
-static VKAPI_ATTR void VKAPI_CALL StubCmdTraceRaysNV(VkCommandBuffer commandBuffer, VkBuffer raygenShaderBindingTableBuffer, VkDeviceSize raygenShaderBindingOffset, VkBuffer missShaderBindingTableBuffer, VkDeviceSize missShaderBindingOffset, VkDeviceSize missShaderBindingStride, VkBuffer hitShaderBindingTableBuffer, VkDeviceSize hitShaderBindingOffset, VkDeviceSize hitShaderBindingStride, VkBuffer callableShaderBindingTableBuffer, VkDeviceSize callableShaderBindingOffset, VkDeviceSize callableShaderBindingStride, uint32_t width, uint32_t height, uint32_t depth) { };
-static VKAPI_ATTR VkResult VKAPI_CALL StubCreateRayTracingPipelinesNV(VkDevice device, VkPipelineCache pipelineCache, uint32_t createInfoCount, const VkRayTracingPipelineCreateInfoNV* pCreateInfos, const VkAllocationCallbacks* pAllocator, VkPipeline* pPipelines) { return VK_SUCCESS; };
-static VKAPI_ATTR VkResult VKAPI_CALL StubGetRayTracingShaderGroupHandlesNV(VkDevice device, VkPipeline pipeline, uint32_t firstGroup, uint32_t groupCount, size_t dataSize, void* pData) { return VK_SUCCESS; };
-static VKAPI_ATTR VkResult VKAPI_CALL StubGetAccelerationStructureHandleNV(VkDevice device, VkAccelerationStructureNV accelerationStructure, size_t dataSize, void* pData) { return VK_SUCCESS; };
-static VKAPI_ATTR void VKAPI_CALL StubCmdWriteAccelerationStructuresPropertiesNV(VkCommandBuffer commandBuffer, uint32_t accelerationStructureCount, const VkAccelerationStructureNV* pAccelerationStructures, VkQueryType queryType, VkQueryPool queryPool, uint32_t firstQuery) { };
-static VKAPI_ATTR VkResult VKAPI_CALL StubCompileDeferredNV(VkDevice device, VkPipeline pipeline, uint32_t shader) { return VK_SUCCESS; };
-static VKAPI_ATTR VkResult VKAPI_CALL StubGetMemoryHostPointerPropertiesEXT(VkDevice device, VkExternalMemoryHandleTypeFlagBits handleType, const void* pHostPointer, VkMemoryHostPointerPropertiesEXT* pMemoryHostPointerProperties) { return VK_SUCCESS; };
-static VKAPI_ATTR void VKAPI_CALL StubCmdWriteBufferMarkerAMD(VkCommandBuffer commandBuffer, VkPipelineStageFlagBits pipelineStage, VkBuffer dstBuffer, VkDeviceSize dstOffset, uint32_t marker) { };
-static VKAPI_ATTR VkResult VKAPI_CALL StubGetCalibratedTimestampsEXT(VkDevice device, uint32_t timestampCount, const VkCalibratedTimestampInfoEXT* pTimestampInfos, uint64_t* pTimestamps, uint64_t* pMaxDeviation) { return VK_SUCCESS; };
-static VKAPI_ATTR void VKAPI_CALL StubCmdDrawMeshTasksNV(VkCommandBuffer commandBuffer, uint32_t taskCount, uint32_t firstTask) { };
-static VKAPI_ATTR void VKAPI_CALL StubCmdDrawMeshTasksIndirectNV(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, uint32_t drawCount, uint32_t stride) { };
-static VKAPI_ATTR void VKAPI_CALL StubCmdDrawMeshTasksIndirectCountNV(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, VkBuffer countBuffer, VkDeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride) { };
-static VKAPI_ATTR void VKAPI_CALL StubCmdSetExclusiveScissorNV(VkCommandBuffer commandBuffer, uint32_t firstExclusiveScissor, uint32_t exclusiveScissorCount, const VkRect2D* pExclusiveScissors) { };
-static VKAPI_ATTR void VKAPI_CALL StubCmdSetCheckpointNV(VkCommandBuffer commandBuffer, const void* pCheckpointMarker) { };
-static VKAPI_ATTR void VKAPI_CALL StubGetQueueCheckpointDataNV(VkQueue queue, uint32_t* pCheckpointDataCount, VkCheckpointDataNV* pCheckpointData) { };
-static VKAPI_ATTR VkResult VKAPI_CALL StubInitializePerformanceApiINTEL(VkDevice device, const VkInitializePerformanceApiInfoINTEL* pInitializeInfo) { return VK_SUCCESS; };
-static VKAPI_ATTR void VKAPI_CALL StubUninitializePerformanceApiINTEL(VkDevice device) { };
-static VKAPI_ATTR VkResult VKAPI_CALL StubCmdSetPerformanceMarkerINTEL(VkCommandBuffer commandBuffer, const VkPerformanceMarkerInfoINTEL* pMarkerInfo) { return VK_SUCCESS; };
-static VKAPI_ATTR VkResult VKAPI_CALL StubCmdSetPerformanceStreamMarkerINTEL(VkCommandBuffer commandBuffer, const VkPerformanceStreamMarkerInfoINTEL* pMarkerInfo) { return VK_SUCCESS; };
-static VKAPI_ATTR VkResult VKAPI_CALL StubCmdSetPerformanceOverrideINTEL(VkCommandBuffer commandBuffer, const VkPerformanceOverrideInfoINTEL* pOverrideInfo) { return VK_SUCCESS; };
-static VKAPI_ATTR VkResult VKAPI_CALL StubAcquirePerformanceConfigurationINTEL(VkDevice device, const VkPerformanceConfigurationAcquireInfoINTEL* pAcquireInfo, VkPerformanceConfigurationINTEL* pConfiguration) { return VK_SUCCESS; };
-static VKAPI_ATTR VkResult VKAPI_CALL StubReleasePerformanceConfigurationINTEL(VkDevice device, VkPerformanceConfigurationINTEL configuration) { return VK_SUCCESS; };
-static VKAPI_ATTR VkResult VKAPI_CALL StubQueueSetPerformanceConfigurationINTEL(VkQueue queue, VkPerformanceConfigurationINTEL configuration) { return VK_SUCCESS; };
-static VKAPI_ATTR VkResult VKAPI_CALL StubGetPerformanceParameterINTEL(VkDevice device, VkPerformanceParameterTypeINTEL parameter, VkPerformanceValueINTEL* pValue) { return VK_SUCCESS; };
-static VKAPI_ATTR void VKAPI_CALL StubSetLocalDimmingAMD(VkDevice device, VkSwapchainKHR swapChain, VkBool32 localDimmingEnable) { };
-static VKAPI_ATTR VkDeviceAddress VKAPI_CALL StubGetBufferDeviceAddressEXT(VkDevice device, const VkBufferDeviceAddressInfoEXT* pInfo) { return 0; };
-#ifdef VK_USE_PLATFORM_WIN32_KHR
-static VKAPI_ATTR VkResult VKAPI_CALL StubAcquireFullScreenExclusiveModeEXT(VkDevice device, VkSwapchainKHR swapchain) { return VK_SUCCESS; };
-#endif // VK_USE_PLATFORM_WIN32_KHR
-#ifdef VK_USE_PLATFORM_WIN32_KHR
-static VKAPI_ATTR VkResult VKAPI_CALL StubReleaseFullScreenExclusiveModeEXT(VkDevice device, VkSwapchainKHR swapchain) { return VK_SUCCESS; };
-#endif // VK_USE_PLATFORM_WIN32_KHR
-#ifdef VK_USE_PLATFORM_WIN32_KHR
-static VKAPI_ATTR VkResult VKAPI_CALL StubGetDeviceGroupSurfacePresentModes2EXT(VkDevice device, const VkPhysicalDeviceSurfaceInfo2KHR* pSurfaceInfo, VkDeviceGroupPresentModeFlagsKHR* pModes) { return VK_SUCCESS; };
-#endif // VK_USE_PLATFORM_WIN32_KHR
-static VKAPI_ATTR void VKAPI_CALL StubCmdSetLineStippleEXT(VkCommandBuffer commandBuffer, uint32_t lineStippleFactor, uint16_t lineStipplePattern) { };
-static VKAPI_ATTR void VKAPI_CALL StubResetQueryPoolEXT(VkDevice device, VkQueryPool queryPool, uint32_t firstQuery, uint32_t queryCount) { };
-
-
-
-const std::unordered_map<std::string, std::string> api_extension_map {
- {"vkBindBufferMemory2", "VK_VERSION_1_1"},
- {"vkBindImageMemory2", "VK_VERSION_1_1"},
- {"vkGetDeviceGroupPeerMemoryFeatures", "VK_VERSION_1_1"},
- {"vkCmdSetDeviceMask", "VK_VERSION_1_1"},
- {"vkCmdDispatchBase", "VK_VERSION_1_1"},
- {"vkGetImageMemoryRequirements2", "VK_VERSION_1_1"},
- {"vkGetBufferMemoryRequirements2", "VK_VERSION_1_1"},
- {"vkGetImageSparseMemoryRequirements2", "VK_VERSION_1_1"},
- {"vkTrimCommandPool", "VK_VERSION_1_1"},
- {"vkGetDeviceQueue2", "VK_VERSION_1_1"},
- {"vkCreateSamplerYcbcrConversion", "VK_VERSION_1_1"},
- {"vkDestroySamplerYcbcrConversion", "VK_VERSION_1_1"},
- {"vkCreateDescriptorUpdateTemplate", "VK_VERSION_1_1"},
- {"vkDestroyDescriptorUpdateTemplate", "VK_VERSION_1_1"},
- {"vkUpdateDescriptorSetWithTemplate", "VK_VERSION_1_1"},
- {"vkGetDescriptorSetLayoutSupport", "VK_VERSION_1_1"},
- {"vkCreateSwapchainKHR", "VK_KHR_swapchain"},
- {"vkDestroySwapchainKHR", "VK_KHR_swapchain"},
- {"vkGetSwapchainImagesKHR", "VK_KHR_swapchain"},
- {"vkAcquireNextImageKHR", "VK_KHR_swapchain"},
- {"vkQueuePresentKHR", "VK_KHR_swapchain"},
- {"vkGetDeviceGroupPresentCapabilitiesKHR", "VK_KHR_swapchain"},
- {"vkGetDeviceGroupSurfacePresentModesKHR", "VK_KHR_swapchain"},
- {"vkAcquireNextImage2KHR", "VK_KHR_swapchain"},
- {"vkCreateSharedSwapchainsKHR", "VK_KHR_display_swapchain"},
- {"vkGetDeviceGroupPeerMemoryFeaturesKHR", "VK_KHR_device_group"},
- {"vkCmdSetDeviceMaskKHR", "VK_KHR_device_group"},
- {"vkCmdDispatchBaseKHR", "VK_KHR_device_group"},
- {"vkTrimCommandPoolKHR", "VK_KHR_maintenance1"},
- {"vkGetMemoryWin32HandleKHR", "VK_KHR_external_memory_win32"},
- {"vkGetMemoryWin32HandlePropertiesKHR", "VK_KHR_external_memory_win32"},
- {"vkGetMemoryFdKHR", "VK_KHR_external_memory_fd"},
- {"vkGetMemoryFdPropertiesKHR", "VK_KHR_external_memory_fd"},
- {"vkImportSemaphoreWin32HandleKHR", "VK_KHR_external_semaphore_win32"},
- {"vkGetSemaphoreWin32HandleKHR", "VK_KHR_external_semaphore_win32"},
- {"vkImportSemaphoreFdKHR", "VK_KHR_external_semaphore_fd"},
- {"vkGetSemaphoreFdKHR", "VK_KHR_external_semaphore_fd"},
- {"vkCmdPushDescriptorSetKHR", "VK_KHR_push_descriptor"},
- {"vkCmdPushDescriptorSetWithTemplateKHR", "VK_KHR_push_descriptor"},
- {"vkCreateDescriptorUpdateTemplateKHR", "VK_KHR_descriptor_update_template"},
- {"vkDestroyDescriptorUpdateTemplateKHR", "VK_KHR_descriptor_update_template"},
- {"vkUpdateDescriptorSetWithTemplateKHR", "VK_KHR_descriptor_update_template"},
- {"vkCreateRenderPass2KHR", "VK_KHR_create_renderpass2"},
- {"vkCmdBeginRenderPass2KHR", "VK_KHR_create_renderpass2"},
- {"vkCmdNextSubpass2KHR", "VK_KHR_create_renderpass2"},
- {"vkCmdEndRenderPass2KHR", "VK_KHR_create_renderpass2"},
- {"vkGetSwapchainStatusKHR", "VK_KHR_shared_presentable_image"},
- {"vkImportFenceWin32HandleKHR", "VK_KHR_external_fence_win32"},
- {"vkGetFenceWin32HandleKHR", "VK_KHR_external_fence_win32"},
- {"vkImportFenceFdKHR", "VK_KHR_external_fence_fd"},
- {"vkGetFenceFdKHR", "VK_KHR_external_fence_fd"},
- {"vkGetImageMemoryRequirements2KHR", "VK_KHR_get_memory_requirements2"},
- {"vkGetBufferMemoryRequirements2KHR", "VK_KHR_get_memory_requirements2"},
- {"vkGetImageSparseMemoryRequirements2KHR", "VK_KHR_get_memory_requirements2"},
- {"vkCreateSamplerYcbcrConversionKHR", "VK_KHR_sampler_ycbcr_conversion"},
- {"vkDestroySamplerYcbcrConversionKHR", "VK_KHR_sampler_ycbcr_conversion"},
- {"vkBindBufferMemory2KHR", "VK_KHR_bind_memory2"},
- {"vkBindImageMemory2KHR", "VK_KHR_bind_memory2"},
- {"vkGetDescriptorSetLayoutSupportKHR", "VK_KHR_maintenance3"},
- {"vkCmdDrawIndirectCountKHR", "VK_KHR_draw_indirect_count"},
- {"vkCmdDrawIndexedIndirectCountKHR", "VK_KHR_draw_indirect_count"},
- {"vkGetPipelineExecutablePropertiesKHR", "VK_KHR_pipeline_executable_properties"},
- {"vkGetPipelineExecutableStatisticsKHR", "VK_KHR_pipeline_executable_properties"},
- {"vkGetPipelineExecutableInternalRepresentationsKHR", "VK_KHR_pipeline_executable_properties"},
- {"vkDebugMarkerSetObjectTagEXT", "VK_EXT_debug_marker"},
- {"vkDebugMarkerSetObjectNameEXT", "VK_EXT_debug_marker"},
- {"vkCmdDebugMarkerBeginEXT", "VK_EXT_debug_marker"},
- {"vkCmdDebugMarkerEndEXT", "VK_EXT_debug_marker"},
- {"vkCmdDebugMarkerInsertEXT", "VK_EXT_debug_marker"},
- {"vkCmdBindTransformFeedbackBuffersEXT", "VK_EXT_transform_feedback"},
- {"vkCmdBeginTransformFeedbackEXT", "VK_EXT_transform_feedback"},
- {"vkCmdEndTransformFeedbackEXT", "VK_EXT_transform_feedback"},
- {"vkCmdBeginQueryIndexedEXT", "VK_EXT_transform_feedback"},
- {"vkCmdEndQueryIndexedEXT", "VK_EXT_transform_feedback"},
- {"vkCmdDrawIndirectByteCountEXT", "VK_EXT_transform_feedback"},
- {"vkGetImageViewHandleNVX", "VK_NVX_image_view_handle"},
- {"vkCmdDrawIndirectCountAMD", "VK_AMD_draw_indirect_count"},
- {"vkCmdDrawIndexedIndirectCountAMD", "VK_AMD_draw_indirect_count"},
- {"vkGetShaderInfoAMD", "VK_AMD_shader_info"},
- {"vkGetMemoryWin32HandleNV", "VK_NV_external_memory_win32"},
- {"vkCmdBeginConditionalRenderingEXT", "VK_EXT_conditional_rendering"},
- {"vkCmdEndConditionalRenderingEXT", "VK_EXT_conditional_rendering"},
- {"vkCmdProcessCommandsNVX", "VK_NVX_device_generated_commands"},
- {"vkCmdReserveSpaceForCommandsNVX", "VK_NVX_device_generated_commands"},
- {"vkCreateIndirectCommandsLayoutNVX", "VK_NVX_device_generated_commands"},
- {"vkDestroyIndirectCommandsLayoutNVX", "VK_NVX_device_generated_commands"},
- {"vkCreateObjectTableNVX", "VK_NVX_device_generated_commands"},
- {"vkDestroyObjectTableNVX", "VK_NVX_device_generated_commands"},
- {"vkRegisterObjectsNVX", "VK_NVX_device_generated_commands"},
- {"vkUnregisterObjectsNVX", "VK_NVX_device_generated_commands"},
- {"vkCmdSetViewportWScalingNV", "VK_NV_clip_space_w_scaling"},
- {"vkDisplayPowerControlEXT", "VK_EXT_display_control"},
- {"vkRegisterDeviceEventEXT", "VK_EXT_display_control"},
- {"vkRegisterDisplayEventEXT", "VK_EXT_display_control"},
- {"vkGetSwapchainCounterEXT", "VK_EXT_display_control"},
- {"vkGetRefreshCycleDurationGOOGLE", "VK_GOOGLE_display_timing"},
- {"vkGetPastPresentationTimingGOOGLE", "VK_GOOGLE_display_timing"},
- {"vkCmdSetDiscardRectangleEXT", "VK_EXT_discard_rectangles"},
- {"vkSetHdrMetadataEXT", "VK_EXT_hdr_metadata"},
- {"vkSetDebugUtilsObjectNameEXT", "VK_EXT_debug_utils"},
- {"vkSetDebugUtilsObjectTagEXT", "VK_EXT_debug_utils"},
- {"vkQueueBeginDebugUtilsLabelEXT", "VK_EXT_debug_utils"},
- {"vkQueueEndDebugUtilsLabelEXT", "VK_EXT_debug_utils"},
- {"vkQueueInsertDebugUtilsLabelEXT", "VK_EXT_debug_utils"},
- {"vkCmdBeginDebugUtilsLabelEXT", "VK_EXT_debug_utils"},
- {"vkCmdEndDebugUtilsLabelEXT", "VK_EXT_debug_utils"},
- {"vkCmdInsertDebugUtilsLabelEXT", "VK_EXT_debug_utils"},
- {"vkGetAndroidHardwareBufferPropertiesANDROID", "VK_ANDROID_external_memory_android_hardware_buffer"},
- {"vkGetMemoryAndroidHardwareBufferANDROID", "VK_ANDROID_external_memory_android_hardware_buffer"},
- {"vkCmdSetSampleLocationsEXT", "VK_EXT_sample_locations"},
- {"vkGetImageDrmFormatModifierPropertiesEXT", "VK_EXT_image_drm_format_modifier"},
- {"vkCreateValidationCacheEXT", "VK_EXT_validation_cache"},
- {"vkDestroyValidationCacheEXT", "VK_EXT_validation_cache"},
- {"vkMergeValidationCachesEXT", "VK_EXT_validation_cache"},
- {"vkGetValidationCacheDataEXT", "VK_EXT_validation_cache"},
- {"vkCmdBindShadingRateImageNV", "VK_NV_shading_rate_image"},
- {"vkCmdSetViewportShadingRatePaletteNV", "VK_NV_shading_rate_image"},
- {"vkCmdSetCoarseSampleOrderNV", "VK_NV_shading_rate_image"},
- {"vkCreateAccelerationStructureNV", "VK_NV_ray_tracing"},
- {"vkDestroyAccelerationStructureNV", "VK_NV_ray_tracing"},
- {"vkGetAccelerationStructureMemoryRequirementsNV", "VK_NV_ray_tracing"},
- {"vkBindAccelerationStructureMemoryNV", "VK_NV_ray_tracing"},
- {"vkCmdBuildAccelerationStructureNV", "VK_NV_ray_tracing"},
- {"vkCmdCopyAccelerationStructureNV", "VK_NV_ray_tracing"},
- {"vkCmdTraceRaysNV", "VK_NV_ray_tracing"},
- {"vkCreateRayTracingPipelinesNV", "VK_NV_ray_tracing"},
- {"vkGetRayTracingShaderGroupHandlesNV", "VK_NV_ray_tracing"},
- {"vkGetAccelerationStructureHandleNV", "VK_NV_ray_tracing"},
- {"vkCmdWriteAccelerationStructuresPropertiesNV", "VK_NV_ray_tracing"},
- {"vkCompileDeferredNV", "VK_NV_ray_tracing"},
- {"vkGetMemoryHostPointerPropertiesEXT", "VK_EXT_external_memory_host"},
- {"vkCmdWriteBufferMarkerAMD", "VK_AMD_buffer_marker"},
- {"vkGetCalibratedTimestampsEXT", "VK_EXT_calibrated_timestamps"},
- {"vkCmdDrawMeshTasksNV", "VK_NV_mesh_shader"},
- {"vkCmdDrawMeshTasksIndirectNV", "VK_NV_mesh_shader"},
- {"vkCmdDrawMeshTasksIndirectCountNV", "VK_NV_mesh_shader"},
- {"vkCmdSetExclusiveScissorNV", "VK_NV_scissor_exclusive"},
- {"vkCmdSetCheckpointNV", "VK_NV_device_diagnostic_checkpoints"},
- {"vkGetQueueCheckpointDataNV", "VK_NV_device_diagnostic_checkpoints"},
- {"vkInitializePerformanceApiINTEL", "VK_INTEL_performance_query"},
- {"vkUninitializePerformanceApiINTEL", "VK_INTEL_performance_query"},
- {"vkCmdSetPerformanceMarkerINTEL", "VK_INTEL_performance_query"},
- {"vkCmdSetPerformanceStreamMarkerINTEL", "VK_INTEL_performance_query"},
- {"vkCmdSetPerformanceOverrideINTEL", "VK_INTEL_performance_query"},
- {"vkAcquirePerformanceConfigurationINTEL", "VK_INTEL_performance_query"},
- {"vkReleasePerformanceConfigurationINTEL", "VK_INTEL_performance_query"},
- {"vkQueueSetPerformanceConfigurationINTEL", "VK_INTEL_performance_query"},
- {"vkGetPerformanceParameterINTEL", "VK_INTEL_performance_query"},
- {"vkSetLocalDimmingAMD", "VK_AMD_display_native_hdr"},
- {"vkGetBufferDeviceAddressEXT", "VK_EXT_buffer_device_address"},
- {"vkAcquireFullScreenExclusiveModeEXT", "VK_EXT_full_screen_exclusive"},
- {"vkReleaseFullScreenExclusiveModeEXT", "VK_EXT_full_screen_exclusive"},
- {"vkGetDeviceGroupSurfacePresentModes2EXT", "VK_EXT_full_screen_exclusive"},
- {"vkCmdSetLineStippleEXT", "VK_EXT_line_rasterization"},
- {"vkResetQueryPoolEXT", "VK_EXT_host_query_reset"},
-};
-
-// Using the above code-generated map of APINames-to-parent extension names, this function will:
-// o Determine if the API has an associated extension
-// o If it does, determine if that extension name is present in the passed-in set of enabled_ext_names
-// If the APIname has no parent extension, OR its parent extension name is IN the set, return TRUE, else FALSE
-static inline bool ApiParentExtensionEnabled(const std::string api_name, const DeviceExtensions *device_extension_info) {
- auto has_ext = api_extension_map.find(api_name);
- // Is this API part of an extension or feature group?
- if (has_ext != api_extension_map.end()) {
- // Was the extension for this API enabled in the CreateDevice call?
- auto info = device_extension_info->get_info(has_ext->second.c_str());
- if ((!info.state) || (device_extension_info->*(info.state) != true)) {
- return false;
- }
- }
- return true;
-}
-
-
-
-static inline void layer_init_device_dispatch_table(VkDevice device, VkLayerDispatchTable *table, PFN_vkGetDeviceProcAddr gpa) {
- memset(table, 0, sizeof(*table));
- // Device function pointers
- table->GetDeviceProcAddr = gpa;
- table->DestroyDevice = (PFN_vkDestroyDevice) gpa(device, "vkDestroyDevice");
- table->GetDeviceQueue = (PFN_vkGetDeviceQueue) gpa(device, "vkGetDeviceQueue");
- table->QueueSubmit = (PFN_vkQueueSubmit) gpa(device, "vkQueueSubmit");
- table->QueueWaitIdle = (PFN_vkQueueWaitIdle) gpa(device, "vkQueueWaitIdle");
- table->DeviceWaitIdle = (PFN_vkDeviceWaitIdle) gpa(device, "vkDeviceWaitIdle");
- table->AllocateMemory = (PFN_vkAllocateMemory) gpa(device, "vkAllocateMemory");
- table->FreeMemory = (PFN_vkFreeMemory) gpa(device, "vkFreeMemory");
- table->MapMemory = (PFN_vkMapMemory) gpa(device, "vkMapMemory");
- table->UnmapMemory = (PFN_vkUnmapMemory) gpa(device, "vkUnmapMemory");
- table->FlushMappedMemoryRanges = (PFN_vkFlushMappedMemoryRanges) gpa(device, "vkFlushMappedMemoryRanges");
- table->InvalidateMappedMemoryRanges = (PFN_vkInvalidateMappedMemoryRanges) gpa(device, "vkInvalidateMappedMemoryRanges");
- table->GetDeviceMemoryCommitment = (PFN_vkGetDeviceMemoryCommitment) gpa(device, "vkGetDeviceMemoryCommitment");
- table->BindBufferMemory = (PFN_vkBindBufferMemory) gpa(device, "vkBindBufferMemory");
- table->BindImageMemory = (PFN_vkBindImageMemory) gpa(device, "vkBindImageMemory");
- table->GetBufferMemoryRequirements = (PFN_vkGetBufferMemoryRequirements) gpa(device, "vkGetBufferMemoryRequirements");
- table->GetImageMemoryRequirements = (PFN_vkGetImageMemoryRequirements) gpa(device, "vkGetImageMemoryRequirements");
- table->GetImageSparseMemoryRequirements = (PFN_vkGetImageSparseMemoryRequirements) gpa(device, "vkGetImageSparseMemoryRequirements");
- table->QueueBindSparse = (PFN_vkQueueBindSparse) gpa(device, "vkQueueBindSparse");
- table->CreateFence = (PFN_vkCreateFence) gpa(device, "vkCreateFence");
- table->DestroyFence = (PFN_vkDestroyFence) gpa(device, "vkDestroyFence");
- table->ResetFences = (PFN_vkResetFences) gpa(device, "vkResetFences");
- table->GetFenceStatus = (PFN_vkGetFenceStatus) gpa(device, "vkGetFenceStatus");
- table->WaitForFences = (PFN_vkWaitForFences) gpa(device, "vkWaitForFences");
- table->CreateSemaphore = (PFN_vkCreateSemaphore) gpa(device, "vkCreateSemaphore");
- table->DestroySemaphore = (PFN_vkDestroySemaphore) gpa(device, "vkDestroySemaphore");
- table->CreateEvent = (PFN_vkCreateEvent) gpa(device, "vkCreateEvent");
- table->DestroyEvent = (PFN_vkDestroyEvent) gpa(device, "vkDestroyEvent");
- table->GetEventStatus = (PFN_vkGetEventStatus) gpa(device, "vkGetEventStatus");
- table->SetEvent = (PFN_vkSetEvent) gpa(device, "vkSetEvent");
- table->ResetEvent = (PFN_vkResetEvent) gpa(device, "vkResetEvent");
- table->CreateQueryPool = (PFN_vkCreateQueryPool) gpa(device, "vkCreateQueryPool");
- table->DestroyQueryPool = (PFN_vkDestroyQueryPool) gpa(device, "vkDestroyQueryPool");
- table->GetQueryPoolResults = (PFN_vkGetQueryPoolResults) gpa(device, "vkGetQueryPoolResults");
- table->CreateBuffer = (PFN_vkCreateBuffer) gpa(device, "vkCreateBuffer");
- table->DestroyBuffer = (PFN_vkDestroyBuffer) gpa(device, "vkDestroyBuffer");
- table->CreateBufferView = (PFN_vkCreateBufferView) gpa(device, "vkCreateBufferView");
- table->DestroyBufferView = (PFN_vkDestroyBufferView) gpa(device, "vkDestroyBufferView");
- table->CreateImage = (PFN_vkCreateImage) gpa(device, "vkCreateImage");
- table->DestroyImage = (PFN_vkDestroyImage) gpa(device, "vkDestroyImage");
- table->GetImageSubresourceLayout = (PFN_vkGetImageSubresourceLayout) gpa(device, "vkGetImageSubresourceLayout");
- table->CreateImageView = (PFN_vkCreateImageView) gpa(device, "vkCreateImageView");
- table->DestroyImageView = (PFN_vkDestroyImageView) gpa(device, "vkDestroyImageView");
- table->CreateShaderModule = (PFN_vkCreateShaderModule) gpa(device, "vkCreateShaderModule");
- table->DestroyShaderModule = (PFN_vkDestroyShaderModule) gpa(device, "vkDestroyShaderModule");
- table->CreatePipelineCache = (PFN_vkCreatePipelineCache) gpa(device, "vkCreatePipelineCache");
- table->DestroyPipelineCache = (PFN_vkDestroyPipelineCache) gpa(device, "vkDestroyPipelineCache");
- table->GetPipelineCacheData = (PFN_vkGetPipelineCacheData) gpa(device, "vkGetPipelineCacheData");
- table->MergePipelineCaches = (PFN_vkMergePipelineCaches) gpa(device, "vkMergePipelineCaches");
- table->CreateGraphicsPipelines = (PFN_vkCreateGraphicsPipelines) gpa(device, "vkCreateGraphicsPipelines");
- table->CreateComputePipelines = (PFN_vkCreateComputePipelines) gpa(device, "vkCreateComputePipelines");
- table->DestroyPipeline = (PFN_vkDestroyPipeline) gpa(device, "vkDestroyPipeline");
- table->CreatePipelineLayout = (PFN_vkCreatePipelineLayout) gpa(device, "vkCreatePipelineLayout");
- table->DestroyPipelineLayout = (PFN_vkDestroyPipelineLayout) gpa(device, "vkDestroyPipelineLayout");
- table->CreateSampler = (PFN_vkCreateSampler) gpa(device, "vkCreateSampler");
- table->DestroySampler = (PFN_vkDestroySampler) gpa(device, "vkDestroySampler");
- table->CreateDescriptorSetLayout = (PFN_vkCreateDescriptorSetLayout) gpa(device, "vkCreateDescriptorSetLayout");
- table->DestroyDescriptorSetLayout = (PFN_vkDestroyDescriptorSetLayout) gpa(device, "vkDestroyDescriptorSetLayout");
- table->CreateDescriptorPool = (PFN_vkCreateDescriptorPool) gpa(device, "vkCreateDescriptorPool");
- table->DestroyDescriptorPool = (PFN_vkDestroyDescriptorPool) gpa(device, "vkDestroyDescriptorPool");
- table->ResetDescriptorPool = (PFN_vkResetDescriptorPool) gpa(device, "vkResetDescriptorPool");
- table->AllocateDescriptorSets = (PFN_vkAllocateDescriptorSets) gpa(device, "vkAllocateDescriptorSets");
- table->FreeDescriptorSets = (PFN_vkFreeDescriptorSets) gpa(device, "vkFreeDescriptorSets");
- table->UpdateDescriptorSets = (PFN_vkUpdateDescriptorSets) gpa(device, "vkUpdateDescriptorSets");
- table->CreateFramebuffer = (PFN_vkCreateFramebuffer) gpa(device, "vkCreateFramebuffer");
- table->DestroyFramebuffer = (PFN_vkDestroyFramebuffer) gpa(device, "vkDestroyFramebuffer");
- table->CreateRenderPass = (PFN_vkCreateRenderPass) gpa(device, "vkCreateRenderPass");
- table->DestroyRenderPass = (PFN_vkDestroyRenderPass) gpa(device, "vkDestroyRenderPass");
- table->GetRenderAreaGranularity = (PFN_vkGetRenderAreaGranularity) gpa(device, "vkGetRenderAreaGranularity");
- table->CreateCommandPool = (PFN_vkCreateCommandPool) gpa(device, "vkCreateCommandPool");
- table->DestroyCommandPool = (PFN_vkDestroyCommandPool) gpa(device, "vkDestroyCommandPool");
- table->ResetCommandPool = (PFN_vkResetCommandPool) gpa(device, "vkResetCommandPool");
- table->AllocateCommandBuffers = (PFN_vkAllocateCommandBuffers) gpa(device, "vkAllocateCommandBuffers");
- table->FreeCommandBuffers = (PFN_vkFreeCommandBuffers) gpa(device, "vkFreeCommandBuffers");
- table->BeginCommandBuffer = (PFN_vkBeginCommandBuffer) gpa(device, "vkBeginCommandBuffer");
- table->EndCommandBuffer = (PFN_vkEndCommandBuffer) gpa(device, "vkEndCommandBuffer");
- table->ResetCommandBuffer = (PFN_vkResetCommandBuffer) gpa(device, "vkResetCommandBuffer");
- table->CmdBindPipeline = (PFN_vkCmdBindPipeline) gpa(device, "vkCmdBindPipeline");
- table->CmdSetViewport = (PFN_vkCmdSetViewport) gpa(device, "vkCmdSetViewport");
- table->CmdSetScissor = (PFN_vkCmdSetScissor) gpa(device, "vkCmdSetScissor");
- table->CmdSetLineWidth = (PFN_vkCmdSetLineWidth) gpa(device, "vkCmdSetLineWidth");
- table->CmdSetDepthBias = (PFN_vkCmdSetDepthBias) gpa(device, "vkCmdSetDepthBias");
- table->CmdSetBlendConstants = (PFN_vkCmdSetBlendConstants) gpa(device, "vkCmdSetBlendConstants");
- table->CmdSetDepthBounds = (PFN_vkCmdSetDepthBounds) gpa(device, "vkCmdSetDepthBounds");
- table->CmdSetStencilCompareMask = (PFN_vkCmdSetStencilCompareMask) gpa(device, "vkCmdSetStencilCompareMask");
- table->CmdSetStencilWriteMask = (PFN_vkCmdSetStencilWriteMask) gpa(device, "vkCmdSetStencilWriteMask");
- table->CmdSetStencilReference = (PFN_vkCmdSetStencilReference) gpa(device, "vkCmdSetStencilReference");
- table->CmdBindDescriptorSets = (PFN_vkCmdBindDescriptorSets) gpa(device, "vkCmdBindDescriptorSets");
- table->CmdBindIndexBuffer = (PFN_vkCmdBindIndexBuffer) gpa(device, "vkCmdBindIndexBuffer");
- table->CmdBindVertexBuffers = (PFN_vkCmdBindVertexBuffers) gpa(device, "vkCmdBindVertexBuffers");
- table->CmdDraw = (PFN_vkCmdDraw) gpa(device, "vkCmdDraw");
- table->CmdDrawIndexed = (PFN_vkCmdDrawIndexed) gpa(device, "vkCmdDrawIndexed");
- table->CmdDrawIndirect = (PFN_vkCmdDrawIndirect) gpa(device, "vkCmdDrawIndirect");
- table->CmdDrawIndexedIndirect = (PFN_vkCmdDrawIndexedIndirect) gpa(device, "vkCmdDrawIndexedIndirect");
- table->CmdDispatch = (PFN_vkCmdDispatch) gpa(device, "vkCmdDispatch");
- table->CmdDispatchIndirect = (PFN_vkCmdDispatchIndirect) gpa(device, "vkCmdDispatchIndirect");
- table->CmdCopyBuffer = (PFN_vkCmdCopyBuffer) gpa(device, "vkCmdCopyBuffer");
- table->CmdCopyImage = (PFN_vkCmdCopyImage) gpa(device, "vkCmdCopyImage");
- table->CmdBlitImage = (PFN_vkCmdBlitImage) gpa(device, "vkCmdBlitImage");
- table->CmdCopyBufferToImage = (PFN_vkCmdCopyBufferToImage) gpa(device, "vkCmdCopyBufferToImage");
- table->CmdCopyImageToBuffer = (PFN_vkCmdCopyImageToBuffer) gpa(device, "vkCmdCopyImageToBuffer");
- table->CmdUpdateBuffer = (PFN_vkCmdUpdateBuffer) gpa(device, "vkCmdUpdateBuffer");
- table->CmdFillBuffer = (PFN_vkCmdFillBuffer) gpa(device, "vkCmdFillBuffer");
- table->CmdClearColorImage = (PFN_vkCmdClearColorImage) gpa(device, "vkCmdClearColorImage");
- table->CmdClearDepthStencilImage = (PFN_vkCmdClearDepthStencilImage) gpa(device, "vkCmdClearDepthStencilImage");
- table->CmdClearAttachments = (PFN_vkCmdClearAttachments) gpa(device, "vkCmdClearAttachments");
- table->CmdResolveImage = (PFN_vkCmdResolveImage) gpa(device, "vkCmdResolveImage");
- table->CmdSetEvent = (PFN_vkCmdSetEvent) gpa(device, "vkCmdSetEvent");
- table->CmdResetEvent = (PFN_vkCmdResetEvent) gpa(device, "vkCmdResetEvent");
- table->CmdWaitEvents = (PFN_vkCmdWaitEvents) gpa(device, "vkCmdWaitEvents");
- table->CmdPipelineBarrier = (PFN_vkCmdPipelineBarrier) gpa(device, "vkCmdPipelineBarrier");
- table->CmdBeginQuery = (PFN_vkCmdBeginQuery) gpa(device, "vkCmdBeginQuery");
- table->CmdEndQuery = (PFN_vkCmdEndQuery) gpa(device, "vkCmdEndQuery");
- table->CmdResetQueryPool = (PFN_vkCmdResetQueryPool) gpa(device, "vkCmdResetQueryPool");
- table->CmdWriteTimestamp = (PFN_vkCmdWriteTimestamp) gpa(device, "vkCmdWriteTimestamp");
- table->CmdCopyQueryPoolResults = (PFN_vkCmdCopyQueryPoolResults) gpa(device, "vkCmdCopyQueryPoolResults");
- table->CmdPushConstants = (PFN_vkCmdPushConstants) gpa(device, "vkCmdPushConstants");
- table->CmdBeginRenderPass = (PFN_vkCmdBeginRenderPass) gpa(device, "vkCmdBeginRenderPass");
- table->CmdNextSubpass = (PFN_vkCmdNextSubpass) gpa(device, "vkCmdNextSubpass");
- table->CmdEndRenderPass = (PFN_vkCmdEndRenderPass) gpa(device, "vkCmdEndRenderPass");
- table->CmdExecuteCommands = (PFN_vkCmdExecuteCommands) gpa(device, "vkCmdExecuteCommands");
- table->BindBufferMemory2 = (PFN_vkBindBufferMemory2) gpa(device, "vkBindBufferMemory2");
- if (table->BindBufferMemory2 == nullptr) { table->BindBufferMemory2 = (PFN_vkBindBufferMemory2)StubBindBufferMemory2; }
- table->BindImageMemory2 = (PFN_vkBindImageMemory2) gpa(device, "vkBindImageMemory2");
- if (table->BindImageMemory2 == nullptr) { table->BindImageMemory2 = (PFN_vkBindImageMemory2)StubBindImageMemory2; }
- table->GetDeviceGroupPeerMemoryFeatures = (PFN_vkGetDeviceGroupPeerMemoryFeatures) gpa(device, "vkGetDeviceGroupPeerMemoryFeatures");
- if (table->GetDeviceGroupPeerMemoryFeatures == nullptr) { table->GetDeviceGroupPeerMemoryFeatures = (PFN_vkGetDeviceGroupPeerMemoryFeatures)StubGetDeviceGroupPeerMemoryFeatures; }
- table->CmdSetDeviceMask = (PFN_vkCmdSetDeviceMask) gpa(device, "vkCmdSetDeviceMask");
- if (table->CmdSetDeviceMask == nullptr) { table->CmdSetDeviceMask = (PFN_vkCmdSetDeviceMask)StubCmdSetDeviceMask; }
- table->CmdDispatchBase = (PFN_vkCmdDispatchBase) gpa(device, "vkCmdDispatchBase");
- if (table->CmdDispatchBase == nullptr) { table->CmdDispatchBase = (PFN_vkCmdDispatchBase)StubCmdDispatchBase; }
- table->GetImageMemoryRequirements2 = (PFN_vkGetImageMemoryRequirements2) gpa(device, "vkGetImageMemoryRequirements2");
- if (table->GetImageMemoryRequirements2 == nullptr) { table->GetImageMemoryRequirements2 = (PFN_vkGetImageMemoryRequirements2)StubGetImageMemoryRequirements2; }
- table->GetBufferMemoryRequirements2 = (PFN_vkGetBufferMemoryRequirements2) gpa(device, "vkGetBufferMemoryRequirements2");
- if (table->GetBufferMemoryRequirements2 == nullptr) { table->GetBufferMemoryRequirements2 = (PFN_vkGetBufferMemoryRequirements2)StubGetBufferMemoryRequirements2; }
- table->GetImageSparseMemoryRequirements2 = (PFN_vkGetImageSparseMemoryRequirements2) gpa(device, "vkGetImageSparseMemoryRequirements2");
- if (table->GetImageSparseMemoryRequirements2 == nullptr) { table->GetImageSparseMemoryRequirements2 = (PFN_vkGetImageSparseMemoryRequirements2)StubGetImageSparseMemoryRequirements2; }
- table->TrimCommandPool = (PFN_vkTrimCommandPool) gpa(device, "vkTrimCommandPool");
- if (table->TrimCommandPool == nullptr) { table->TrimCommandPool = (PFN_vkTrimCommandPool)StubTrimCommandPool; }
- table->GetDeviceQueue2 = (PFN_vkGetDeviceQueue2) gpa(device, "vkGetDeviceQueue2");
- if (table->GetDeviceQueue2 == nullptr) { table->GetDeviceQueue2 = (PFN_vkGetDeviceQueue2)StubGetDeviceQueue2; }
- table->CreateSamplerYcbcrConversion = (PFN_vkCreateSamplerYcbcrConversion) gpa(device, "vkCreateSamplerYcbcrConversion");
- if (table->CreateSamplerYcbcrConversion == nullptr) { table->CreateSamplerYcbcrConversion = (PFN_vkCreateSamplerYcbcrConversion)StubCreateSamplerYcbcrConversion; }
- table->DestroySamplerYcbcrConversion = (PFN_vkDestroySamplerYcbcrConversion) gpa(device, "vkDestroySamplerYcbcrConversion");
- if (table->DestroySamplerYcbcrConversion == nullptr) { table->DestroySamplerYcbcrConversion = (PFN_vkDestroySamplerYcbcrConversion)StubDestroySamplerYcbcrConversion; }
- table->CreateDescriptorUpdateTemplate = (PFN_vkCreateDescriptorUpdateTemplate) gpa(device, "vkCreateDescriptorUpdateTemplate");
- if (table->CreateDescriptorUpdateTemplate == nullptr) { table->CreateDescriptorUpdateTemplate = (PFN_vkCreateDescriptorUpdateTemplate)StubCreateDescriptorUpdateTemplate; }
- table->DestroyDescriptorUpdateTemplate = (PFN_vkDestroyDescriptorUpdateTemplate) gpa(device, "vkDestroyDescriptorUpdateTemplate");
- if (table->DestroyDescriptorUpdateTemplate == nullptr) { table->DestroyDescriptorUpdateTemplate = (PFN_vkDestroyDescriptorUpdateTemplate)StubDestroyDescriptorUpdateTemplate; }
- table->UpdateDescriptorSetWithTemplate = (PFN_vkUpdateDescriptorSetWithTemplate) gpa(device, "vkUpdateDescriptorSetWithTemplate");
- if (table->UpdateDescriptorSetWithTemplate == nullptr) { table->UpdateDescriptorSetWithTemplate = (PFN_vkUpdateDescriptorSetWithTemplate)StubUpdateDescriptorSetWithTemplate; }
- table->GetDescriptorSetLayoutSupport = (PFN_vkGetDescriptorSetLayoutSupport) gpa(device, "vkGetDescriptorSetLayoutSupport");
- if (table->GetDescriptorSetLayoutSupport == nullptr) { table->GetDescriptorSetLayoutSupport = (PFN_vkGetDescriptorSetLayoutSupport)StubGetDescriptorSetLayoutSupport; }
- table->CreateSwapchainKHR = (PFN_vkCreateSwapchainKHR) gpa(device, "vkCreateSwapchainKHR");
- if (table->CreateSwapchainKHR == nullptr) { table->CreateSwapchainKHR = (PFN_vkCreateSwapchainKHR)StubCreateSwapchainKHR; }
- table->DestroySwapchainKHR = (PFN_vkDestroySwapchainKHR) gpa(device, "vkDestroySwapchainKHR");
- if (table->DestroySwapchainKHR == nullptr) { table->DestroySwapchainKHR = (PFN_vkDestroySwapchainKHR)StubDestroySwapchainKHR; }
- table->GetSwapchainImagesKHR = (PFN_vkGetSwapchainImagesKHR) gpa(device, "vkGetSwapchainImagesKHR");
- if (table->GetSwapchainImagesKHR == nullptr) { table->GetSwapchainImagesKHR = (PFN_vkGetSwapchainImagesKHR)StubGetSwapchainImagesKHR; }
- table->AcquireNextImageKHR = (PFN_vkAcquireNextImageKHR) gpa(device, "vkAcquireNextImageKHR");
- if (table->AcquireNextImageKHR == nullptr) { table->AcquireNextImageKHR = (PFN_vkAcquireNextImageKHR)StubAcquireNextImageKHR; }
- table->QueuePresentKHR = (PFN_vkQueuePresentKHR) gpa(device, "vkQueuePresentKHR");
- if (table->QueuePresentKHR == nullptr) { table->QueuePresentKHR = (PFN_vkQueuePresentKHR)StubQueuePresentKHR; }
- table->GetDeviceGroupPresentCapabilitiesKHR = (PFN_vkGetDeviceGroupPresentCapabilitiesKHR) gpa(device, "vkGetDeviceGroupPresentCapabilitiesKHR");
- if (table->GetDeviceGroupPresentCapabilitiesKHR == nullptr) { table->GetDeviceGroupPresentCapabilitiesKHR = (PFN_vkGetDeviceGroupPresentCapabilitiesKHR)StubGetDeviceGroupPresentCapabilitiesKHR; }
- table->GetDeviceGroupSurfacePresentModesKHR = (PFN_vkGetDeviceGroupSurfacePresentModesKHR) gpa(device, "vkGetDeviceGroupSurfacePresentModesKHR");
- if (table->GetDeviceGroupSurfacePresentModesKHR == nullptr) { table->GetDeviceGroupSurfacePresentModesKHR = (PFN_vkGetDeviceGroupSurfacePresentModesKHR)StubGetDeviceGroupSurfacePresentModesKHR; }
- table->AcquireNextImage2KHR = (PFN_vkAcquireNextImage2KHR) gpa(device, "vkAcquireNextImage2KHR");
- if (table->AcquireNextImage2KHR == nullptr) { table->AcquireNextImage2KHR = (PFN_vkAcquireNextImage2KHR)StubAcquireNextImage2KHR; }
- table->CreateSharedSwapchainsKHR = (PFN_vkCreateSharedSwapchainsKHR) gpa(device, "vkCreateSharedSwapchainsKHR");
- if (table->CreateSharedSwapchainsKHR == nullptr) { table->CreateSharedSwapchainsKHR = (PFN_vkCreateSharedSwapchainsKHR)StubCreateSharedSwapchainsKHR; }
- table->GetDeviceGroupPeerMemoryFeaturesKHR = (PFN_vkGetDeviceGroupPeerMemoryFeaturesKHR) gpa(device, "vkGetDeviceGroupPeerMemoryFeaturesKHR");
- if (table->GetDeviceGroupPeerMemoryFeaturesKHR == nullptr) { table->GetDeviceGroupPeerMemoryFeaturesKHR = (PFN_vkGetDeviceGroupPeerMemoryFeaturesKHR)StubGetDeviceGroupPeerMemoryFeaturesKHR; }
- table->CmdSetDeviceMaskKHR = (PFN_vkCmdSetDeviceMaskKHR) gpa(device, "vkCmdSetDeviceMaskKHR");
- if (table->CmdSetDeviceMaskKHR == nullptr) { table->CmdSetDeviceMaskKHR = (PFN_vkCmdSetDeviceMaskKHR)StubCmdSetDeviceMaskKHR; }
- table->CmdDispatchBaseKHR = (PFN_vkCmdDispatchBaseKHR) gpa(device, "vkCmdDispatchBaseKHR");
- if (table->CmdDispatchBaseKHR == nullptr) { table->CmdDispatchBaseKHR = (PFN_vkCmdDispatchBaseKHR)StubCmdDispatchBaseKHR; }
- table->TrimCommandPoolKHR = (PFN_vkTrimCommandPoolKHR) gpa(device, "vkTrimCommandPoolKHR");
- if (table->TrimCommandPoolKHR == nullptr) { table->TrimCommandPoolKHR = (PFN_vkTrimCommandPoolKHR)StubTrimCommandPoolKHR; }
-#ifdef VK_USE_PLATFORM_WIN32_KHR
- table->GetMemoryWin32HandleKHR = (PFN_vkGetMemoryWin32HandleKHR) gpa(device, "vkGetMemoryWin32HandleKHR");
- if (table->GetMemoryWin32HandleKHR == nullptr) { table->GetMemoryWin32HandleKHR = (PFN_vkGetMemoryWin32HandleKHR)StubGetMemoryWin32HandleKHR; }
-#endif // VK_USE_PLATFORM_WIN32_KHR
-#ifdef VK_USE_PLATFORM_WIN32_KHR
- table->GetMemoryWin32HandlePropertiesKHR = (PFN_vkGetMemoryWin32HandlePropertiesKHR) gpa(device, "vkGetMemoryWin32HandlePropertiesKHR");
- if (table->GetMemoryWin32HandlePropertiesKHR == nullptr) { table->GetMemoryWin32HandlePropertiesKHR = (PFN_vkGetMemoryWin32HandlePropertiesKHR)StubGetMemoryWin32HandlePropertiesKHR; }
-#endif // VK_USE_PLATFORM_WIN32_KHR
- table->GetMemoryFdKHR = (PFN_vkGetMemoryFdKHR) gpa(device, "vkGetMemoryFdKHR");
- if (table->GetMemoryFdKHR == nullptr) { table->GetMemoryFdKHR = (PFN_vkGetMemoryFdKHR)StubGetMemoryFdKHR; }
- table->GetMemoryFdPropertiesKHR = (PFN_vkGetMemoryFdPropertiesKHR) gpa(device, "vkGetMemoryFdPropertiesKHR");
- if (table->GetMemoryFdPropertiesKHR == nullptr) { table->GetMemoryFdPropertiesKHR = (PFN_vkGetMemoryFdPropertiesKHR)StubGetMemoryFdPropertiesKHR; }
-#ifdef VK_USE_PLATFORM_WIN32_KHR
- table->ImportSemaphoreWin32HandleKHR = (PFN_vkImportSemaphoreWin32HandleKHR) gpa(device, "vkImportSemaphoreWin32HandleKHR");
- if (table->ImportSemaphoreWin32HandleKHR == nullptr) { table->ImportSemaphoreWin32HandleKHR = (PFN_vkImportSemaphoreWin32HandleKHR)StubImportSemaphoreWin32HandleKHR; }
-#endif // VK_USE_PLATFORM_WIN32_KHR
-#ifdef VK_USE_PLATFORM_WIN32_KHR
- table->GetSemaphoreWin32HandleKHR = (PFN_vkGetSemaphoreWin32HandleKHR) gpa(device, "vkGetSemaphoreWin32HandleKHR");
- if (table->GetSemaphoreWin32HandleKHR == nullptr) { table->GetSemaphoreWin32HandleKHR = (PFN_vkGetSemaphoreWin32HandleKHR)StubGetSemaphoreWin32HandleKHR; }
-#endif // VK_USE_PLATFORM_WIN32_KHR
- table->ImportSemaphoreFdKHR = (PFN_vkImportSemaphoreFdKHR) gpa(device, "vkImportSemaphoreFdKHR");
- if (table->ImportSemaphoreFdKHR == nullptr) { table->ImportSemaphoreFdKHR = (PFN_vkImportSemaphoreFdKHR)StubImportSemaphoreFdKHR; }
- table->GetSemaphoreFdKHR = (PFN_vkGetSemaphoreFdKHR) gpa(device, "vkGetSemaphoreFdKHR");
- if (table->GetSemaphoreFdKHR == nullptr) { table->GetSemaphoreFdKHR = (PFN_vkGetSemaphoreFdKHR)StubGetSemaphoreFdKHR; }
- table->CmdPushDescriptorSetKHR = (PFN_vkCmdPushDescriptorSetKHR) gpa(device, "vkCmdPushDescriptorSetKHR");
- if (table->CmdPushDescriptorSetKHR == nullptr) { table->CmdPushDescriptorSetKHR = (PFN_vkCmdPushDescriptorSetKHR)StubCmdPushDescriptorSetKHR; }
- table->CmdPushDescriptorSetWithTemplateKHR = (PFN_vkCmdPushDescriptorSetWithTemplateKHR) gpa(device, "vkCmdPushDescriptorSetWithTemplateKHR");
- if (table->CmdPushDescriptorSetWithTemplateKHR == nullptr) { table->CmdPushDescriptorSetWithTemplateKHR = (PFN_vkCmdPushDescriptorSetWithTemplateKHR)StubCmdPushDescriptorSetWithTemplateKHR; }
- table->CreateDescriptorUpdateTemplateKHR = (PFN_vkCreateDescriptorUpdateTemplateKHR) gpa(device, "vkCreateDescriptorUpdateTemplateKHR");
- if (table->CreateDescriptorUpdateTemplateKHR == nullptr) { table->CreateDescriptorUpdateTemplateKHR = (PFN_vkCreateDescriptorUpdateTemplateKHR)StubCreateDescriptorUpdateTemplateKHR; }
- table->DestroyDescriptorUpdateTemplateKHR = (PFN_vkDestroyDescriptorUpdateTemplateKHR) gpa(device, "vkDestroyDescriptorUpdateTemplateKHR");
- if (table->DestroyDescriptorUpdateTemplateKHR == nullptr) { table->DestroyDescriptorUpdateTemplateKHR = (PFN_vkDestroyDescriptorUpdateTemplateKHR)StubDestroyDescriptorUpdateTemplateKHR; }
- table->UpdateDescriptorSetWithTemplateKHR = (PFN_vkUpdateDescriptorSetWithTemplateKHR) gpa(device, "vkUpdateDescriptorSetWithTemplateKHR");
- if (table->UpdateDescriptorSetWithTemplateKHR == nullptr) { table->UpdateDescriptorSetWithTemplateKHR = (PFN_vkUpdateDescriptorSetWithTemplateKHR)StubUpdateDescriptorSetWithTemplateKHR; }
- table->CreateRenderPass2KHR = (PFN_vkCreateRenderPass2KHR) gpa(device, "vkCreateRenderPass2KHR");
- if (table->CreateRenderPass2KHR == nullptr) { table->CreateRenderPass2KHR = (PFN_vkCreateRenderPass2KHR)StubCreateRenderPass2KHR; }
- table->CmdBeginRenderPass2KHR = (PFN_vkCmdBeginRenderPass2KHR) gpa(device, "vkCmdBeginRenderPass2KHR");
- if (table->CmdBeginRenderPass2KHR == nullptr) { table->CmdBeginRenderPass2KHR = (PFN_vkCmdBeginRenderPass2KHR)StubCmdBeginRenderPass2KHR; }
- table->CmdNextSubpass2KHR = (PFN_vkCmdNextSubpass2KHR) gpa(device, "vkCmdNextSubpass2KHR");
- if (table->CmdNextSubpass2KHR == nullptr) { table->CmdNextSubpass2KHR = (PFN_vkCmdNextSubpass2KHR)StubCmdNextSubpass2KHR; }
- table->CmdEndRenderPass2KHR = (PFN_vkCmdEndRenderPass2KHR) gpa(device, "vkCmdEndRenderPass2KHR");
- if (table->CmdEndRenderPass2KHR == nullptr) { table->CmdEndRenderPass2KHR = (PFN_vkCmdEndRenderPass2KHR)StubCmdEndRenderPass2KHR; }
- table->GetSwapchainStatusKHR = (PFN_vkGetSwapchainStatusKHR) gpa(device, "vkGetSwapchainStatusKHR");
- if (table->GetSwapchainStatusKHR == nullptr) { table->GetSwapchainStatusKHR = (PFN_vkGetSwapchainStatusKHR)StubGetSwapchainStatusKHR; }
-#ifdef VK_USE_PLATFORM_WIN32_KHR
- table->ImportFenceWin32HandleKHR = (PFN_vkImportFenceWin32HandleKHR) gpa(device, "vkImportFenceWin32HandleKHR");
- if (table->ImportFenceWin32HandleKHR == nullptr) { table->ImportFenceWin32HandleKHR = (PFN_vkImportFenceWin32HandleKHR)StubImportFenceWin32HandleKHR; }
-#endif // VK_USE_PLATFORM_WIN32_KHR
-#ifdef VK_USE_PLATFORM_WIN32_KHR
- table->GetFenceWin32HandleKHR = (PFN_vkGetFenceWin32HandleKHR) gpa(device, "vkGetFenceWin32HandleKHR");
- if (table->GetFenceWin32HandleKHR == nullptr) { table->GetFenceWin32HandleKHR = (PFN_vkGetFenceWin32HandleKHR)StubGetFenceWin32HandleKHR; }
-#endif // VK_USE_PLATFORM_WIN32_KHR
- table->ImportFenceFdKHR = (PFN_vkImportFenceFdKHR) gpa(device, "vkImportFenceFdKHR");
- if (table->ImportFenceFdKHR == nullptr) { table->ImportFenceFdKHR = (PFN_vkImportFenceFdKHR)StubImportFenceFdKHR; }
- table->GetFenceFdKHR = (PFN_vkGetFenceFdKHR) gpa(device, "vkGetFenceFdKHR");
- if (table->GetFenceFdKHR == nullptr) { table->GetFenceFdKHR = (PFN_vkGetFenceFdKHR)StubGetFenceFdKHR; }
- table->GetImageMemoryRequirements2KHR = (PFN_vkGetImageMemoryRequirements2KHR) gpa(device, "vkGetImageMemoryRequirements2KHR");
- if (table->GetImageMemoryRequirements2KHR == nullptr) { table->GetImageMemoryRequirements2KHR = (PFN_vkGetImageMemoryRequirements2KHR)StubGetImageMemoryRequirements2KHR; }
- table->GetBufferMemoryRequirements2KHR = (PFN_vkGetBufferMemoryRequirements2KHR) gpa(device, "vkGetBufferMemoryRequirements2KHR");
- if (table->GetBufferMemoryRequirements2KHR == nullptr) { table->GetBufferMemoryRequirements2KHR = (PFN_vkGetBufferMemoryRequirements2KHR)StubGetBufferMemoryRequirements2KHR; }
- table->GetImageSparseMemoryRequirements2KHR = (PFN_vkGetImageSparseMemoryRequirements2KHR) gpa(device, "vkGetImageSparseMemoryRequirements2KHR");
- if (table->GetImageSparseMemoryRequirements2KHR == nullptr) { table->GetImageSparseMemoryRequirements2KHR = (PFN_vkGetImageSparseMemoryRequirements2KHR)StubGetImageSparseMemoryRequirements2KHR; }
- table->CreateSamplerYcbcrConversionKHR = (PFN_vkCreateSamplerYcbcrConversionKHR) gpa(device, "vkCreateSamplerYcbcrConversionKHR");
- if (table->CreateSamplerYcbcrConversionKHR == nullptr) { table->CreateSamplerYcbcrConversionKHR = (PFN_vkCreateSamplerYcbcrConversionKHR)StubCreateSamplerYcbcrConversionKHR; }
- table->DestroySamplerYcbcrConversionKHR = (PFN_vkDestroySamplerYcbcrConversionKHR) gpa(device, "vkDestroySamplerYcbcrConversionKHR");
- if (table->DestroySamplerYcbcrConversionKHR == nullptr) { table->DestroySamplerYcbcrConversionKHR = (PFN_vkDestroySamplerYcbcrConversionKHR)StubDestroySamplerYcbcrConversionKHR; }
- table->BindBufferMemory2KHR = (PFN_vkBindBufferMemory2KHR) gpa(device, "vkBindBufferMemory2KHR");
- if (table->BindBufferMemory2KHR == nullptr) { table->BindBufferMemory2KHR = (PFN_vkBindBufferMemory2KHR)StubBindBufferMemory2KHR; }
- table->BindImageMemory2KHR = (PFN_vkBindImageMemory2KHR) gpa(device, "vkBindImageMemory2KHR");
- if (table->BindImageMemory2KHR == nullptr) { table->BindImageMemory2KHR = (PFN_vkBindImageMemory2KHR)StubBindImageMemory2KHR; }
- table->GetDescriptorSetLayoutSupportKHR = (PFN_vkGetDescriptorSetLayoutSupportKHR) gpa(device, "vkGetDescriptorSetLayoutSupportKHR");
- if (table->GetDescriptorSetLayoutSupportKHR == nullptr) { table->GetDescriptorSetLayoutSupportKHR = (PFN_vkGetDescriptorSetLayoutSupportKHR)StubGetDescriptorSetLayoutSupportKHR; }
- table->CmdDrawIndirectCountKHR = (PFN_vkCmdDrawIndirectCountKHR) gpa(device, "vkCmdDrawIndirectCountKHR");
- if (table->CmdDrawIndirectCountKHR == nullptr) { table->CmdDrawIndirectCountKHR = (PFN_vkCmdDrawIndirectCountKHR)StubCmdDrawIndirectCountKHR; }
- table->CmdDrawIndexedIndirectCountKHR = (PFN_vkCmdDrawIndexedIndirectCountKHR) gpa(device, "vkCmdDrawIndexedIndirectCountKHR");
- if (table->CmdDrawIndexedIndirectCountKHR == nullptr) { table->CmdDrawIndexedIndirectCountKHR = (PFN_vkCmdDrawIndexedIndirectCountKHR)StubCmdDrawIndexedIndirectCountKHR; }
- table->GetPipelineExecutablePropertiesKHR = (PFN_vkGetPipelineExecutablePropertiesKHR) gpa(device, "vkGetPipelineExecutablePropertiesKHR");
- if (table->GetPipelineExecutablePropertiesKHR == nullptr) { table->GetPipelineExecutablePropertiesKHR = (PFN_vkGetPipelineExecutablePropertiesKHR)StubGetPipelineExecutablePropertiesKHR; }
- table->GetPipelineExecutableStatisticsKHR = (PFN_vkGetPipelineExecutableStatisticsKHR) gpa(device, "vkGetPipelineExecutableStatisticsKHR");
- if (table->GetPipelineExecutableStatisticsKHR == nullptr) { table->GetPipelineExecutableStatisticsKHR = (PFN_vkGetPipelineExecutableStatisticsKHR)StubGetPipelineExecutableStatisticsKHR; }
- table->GetPipelineExecutableInternalRepresentationsKHR = (PFN_vkGetPipelineExecutableInternalRepresentationsKHR) gpa(device, "vkGetPipelineExecutableInternalRepresentationsKHR");
- if (table->GetPipelineExecutableInternalRepresentationsKHR == nullptr) { table->GetPipelineExecutableInternalRepresentationsKHR = (PFN_vkGetPipelineExecutableInternalRepresentationsKHR)StubGetPipelineExecutableInternalRepresentationsKHR; }
- table->DebugMarkerSetObjectTagEXT = (PFN_vkDebugMarkerSetObjectTagEXT) gpa(device, "vkDebugMarkerSetObjectTagEXT");
- if (table->DebugMarkerSetObjectTagEXT == nullptr) { table->DebugMarkerSetObjectTagEXT = (PFN_vkDebugMarkerSetObjectTagEXT)StubDebugMarkerSetObjectTagEXT; }
- table->DebugMarkerSetObjectNameEXT = (PFN_vkDebugMarkerSetObjectNameEXT) gpa(device, "vkDebugMarkerSetObjectNameEXT");
- if (table->DebugMarkerSetObjectNameEXT == nullptr) { table->DebugMarkerSetObjectNameEXT = (PFN_vkDebugMarkerSetObjectNameEXT)StubDebugMarkerSetObjectNameEXT; }
- table->CmdDebugMarkerBeginEXT = (PFN_vkCmdDebugMarkerBeginEXT) gpa(device, "vkCmdDebugMarkerBeginEXT");
- if (table->CmdDebugMarkerBeginEXT == nullptr) { table->CmdDebugMarkerBeginEXT = (PFN_vkCmdDebugMarkerBeginEXT)StubCmdDebugMarkerBeginEXT; }
- table->CmdDebugMarkerEndEXT = (PFN_vkCmdDebugMarkerEndEXT) gpa(device, "vkCmdDebugMarkerEndEXT");
- if (table->CmdDebugMarkerEndEXT == nullptr) { table->CmdDebugMarkerEndEXT = (PFN_vkCmdDebugMarkerEndEXT)StubCmdDebugMarkerEndEXT; }
- table->CmdDebugMarkerInsertEXT = (PFN_vkCmdDebugMarkerInsertEXT) gpa(device, "vkCmdDebugMarkerInsertEXT");
- if (table->CmdDebugMarkerInsertEXT == nullptr) { table->CmdDebugMarkerInsertEXT = (PFN_vkCmdDebugMarkerInsertEXT)StubCmdDebugMarkerInsertEXT; }
- table->CmdBindTransformFeedbackBuffersEXT = (PFN_vkCmdBindTransformFeedbackBuffersEXT) gpa(device, "vkCmdBindTransformFeedbackBuffersEXT");
- if (table->CmdBindTransformFeedbackBuffersEXT == nullptr) { table->CmdBindTransformFeedbackBuffersEXT = (PFN_vkCmdBindTransformFeedbackBuffersEXT)StubCmdBindTransformFeedbackBuffersEXT; }
- table->CmdBeginTransformFeedbackEXT = (PFN_vkCmdBeginTransformFeedbackEXT) gpa(device, "vkCmdBeginTransformFeedbackEXT");
- if (table->CmdBeginTransformFeedbackEXT == nullptr) { table->CmdBeginTransformFeedbackEXT = (PFN_vkCmdBeginTransformFeedbackEXT)StubCmdBeginTransformFeedbackEXT; }
- table->CmdEndTransformFeedbackEXT = (PFN_vkCmdEndTransformFeedbackEXT) gpa(device, "vkCmdEndTransformFeedbackEXT");
- if (table->CmdEndTransformFeedbackEXT == nullptr) { table->CmdEndTransformFeedbackEXT = (PFN_vkCmdEndTransformFeedbackEXT)StubCmdEndTransformFeedbackEXT; }
- table->CmdBeginQueryIndexedEXT = (PFN_vkCmdBeginQueryIndexedEXT) gpa(device, "vkCmdBeginQueryIndexedEXT");
- if (table->CmdBeginQueryIndexedEXT == nullptr) { table->CmdBeginQueryIndexedEXT = (PFN_vkCmdBeginQueryIndexedEXT)StubCmdBeginQueryIndexedEXT; }
- table->CmdEndQueryIndexedEXT = (PFN_vkCmdEndQueryIndexedEXT) gpa(device, "vkCmdEndQueryIndexedEXT");
- if (table->CmdEndQueryIndexedEXT == nullptr) { table->CmdEndQueryIndexedEXT = (PFN_vkCmdEndQueryIndexedEXT)StubCmdEndQueryIndexedEXT; }
- table->CmdDrawIndirectByteCountEXT = (PFN_vkCmdDrawIndirectByteCountEXT) gpa(device, "vkCmdDrawIndirectByteCountEXT");
- if (table->CmdDrawIndirectByteCountEXT == nullptr) { table->CmdDrawIndirectByteCountEXT = (PFN_vkCmdDrawIndirectByteCountEXT)StubCmdDrawIndirectByteCountEXT; }
- table->GetImageViewHandleNVX = (PFN_vkGetImageViewHandleNVX) gpa(device, "vkGetImageViewHandleNVX");
- if (table->GetImageViewHandleNVX == nullptr) { table->GetImageViewHandleNVX = (PFN_vkGetImageViewHandleNVX)StubGetImageViewHandleNVX; }
- table->CmdDrawIndirectCountAMD = (PFN_vkCmdDrawIndirectCountAMD) gpa(device, "vkCmdDrawIndirectCountAMD");
- if (table->CmdDrawIndirectCountAMD == nullptr) { table->CmdDrawIndirectCountAMD = (PFN_vkCmdDrawIndirectCountAMD)StubCmdDrawIndirectCountAMD; }
- table->CmdDrawIndexedIndirectCountAMD = (PFN_vkCmdDrawIndexedIndirectCountAMD) gpa(device, "vkCmdDrawIndexedIndirectCountAMD");
- if (table->CmdDrawIndexedIndirectCountAMD == nullptr) { table->CmdDrawIndexedIndirectCountAMD = (PFN_vkCmdDrawIndexedIndirectCountAMD)StubCmdDrawIndexedIndirectCountAMD; }
- table->GetShaderInfoAMD = (PFN_vkGetShaderInfoAMD) gpa(device, "vkGetShaderInfoAMD");
- if (table->GetShaderInfoAMD == nullptr) { table->GetShaderInfoAMD = (PFN_vkGetShaderInfoAMD)StubGetShaderInfoAMD; }
-#ifdef VK_USE_PLATFORM_WIN32_KHR
- table->GetMemoryWin32HandleNV = (PFN_vkGetMemoryWin32HandleNV) gpa(device, "vkGetMemoryWin32HandleNV");
- if (table->GetMemoryWin32HandleNV == nullptr) { table->GetMemoryWin32HandleNV = (PFN_vkGetMemoryWin32HandleNV)StubGetMemoryWin32HandleNV; }
-#endif // VK_USE_PLATFORM_WIN32_KHR
- table->CmdBeginConditionalRenderingEXT = (PFN_vkCmdBeginConditionalRenderingEXT) gpa(device, "vkCmdBeginConditionalRenderingEXT");
- if (table->CmdBeginConditionalRenderingEXT == nullptr) { table->CmdBeginConditionalRenderingEXT = (PFN_vkCmdBeginConditionalRenderingEXT)StubCmdBeginConditionalRenderingEXT; }
- table->CmdEndConditionalRenderingEXT = (PFN_vkCmdEndConditionalRenderingEXT) gpa(device, "vkCmdEndConditionalRenderingEXT");
- if (table->CmdEndConditionalRenderingEXT == nullptr) { table->CmdEndConditionalRenderingEXT = (PFN_vkCmdEndConditionalRenderingEXT)StubCmdEndConditionalRenderingEXT; }
- table->CmdProcessCommandsNVX = (PFN_vkCmdProcessCommandsNVX) gpa(device, "vkCmdProcessCommandsNVX");
- if (table->CmdProcessCommandsNVX == nullptr) { table->CmdProcessCommandsNVX = (PFN_vkCmdProcessCommandsNVX)StubCmdProcessCommandsNVX; }
- table->CmdReserveSpaceForCommandsNVX = (PFN_vkCmdReserveSpaceForCommandsNVX) gpa(device, "vkCmdReserveSpaceForCommandsNVX");
- if (table->CmdReserveSpaceForCommandsNVX == nullptr) { table->CmdReserveSpaceForCommandsNVX = (PFN_vkCmdReserveSpaceForCommandsNVX)StubCmdReserveSpaceForCommandsNVX; }
- table->CreateIndirectCommandsLayoutNVX = (PFN_vkCreateIndirectCommandsLayoutNVX) gpa(device, "vkCreateIndirectCommandsLayoutNVX");
- if (table->CreateIndirectCommandsLayoutNVX == nullptr) { table->CreateIndirectCommandsLayoutNVX = (PFN_vkCreateIndirectCommandsLayoutNVX)StubCreateIndirectCommandsLayoutNVX; }
- table->DestroyIndirectCommandsLayoutNVX = (PFN_vkDestroyIndirectCommandsLayoutNVX) gpa(device, "vkDestroyIndirectCommandsLayoutNVX");
- if (table->DestroyIndirectCommandsLayoutNVX == nullptr) { table->DestroyIndirectCommandsLayoutNVX = (PFN_vkDestroyIndirectCommandsLayoutNVX)StubDestroyIndirectCommandsLayoutNVX; }
- table->CreateObjectTableNVX = (PFN_vkCreateObjectTableNVX) gpa(device, "vkCreateObjectTableNVX");
- if (table->CreateObjectTableNVX == nullptr) { table->CreateObjectTableNVX = (PFN_vkCreateObjectTableNVX)StubCreateObjectTableNVX; }
- table->DestroyObjectTableNVX = (PFN_vkDestroyObjectTableNVX) gpa(device, "vkDestroyObjectTableNVX");
- if (table->DestroyObjectTableNVX == nullptr) { table->DestroyObjectTableNVX = (PFN_vkDestroyObjectTableNVX)StubDestroyObjectTableNVX; }
- table->RegisterObjectsNVX = (PFN_vkRegisterObjectsNVX) gpa(device, "vkRegisterObjectsNVX");
- if (table->RegisterObjectsNVX == nullptr) { table->RegisterObjectsNVX = (PFN_vkRegisterObjectsNVX)StubRegisterObjectsNVX; }
- table->UnregisterObjectsNVX = (PFN_vkUnregisterObjectsNVX) gpa(device, "vkUnregisterObjectsNVX");
- if (table->UnregisterObjectsNVX == nullptr) { table->UnregisterObjectsNVX = (PFN_vkUnregisterObjectsNVX)StubUnregisterObjectsNVX; }
- table->CmdSetViewportWScalingNV = (PFN_vkCmdSetViewportWScalingNV) gpa(device, "vkCmdSetViewportWScalingNV");
- if (table->CmdSetViewportWScalingNV == nullptr) { table->CmdSetViewportWScalingNV = (PFN_vkCmdSetViewportWScalingNV)StubCmdSetViewportWScalingNV; }
- table->DisplayPowerControlEXT = (PFN_vkDisplayPowerControlEXT) gpa(device, "vkDisplayPowerControlEXT");
- if (table->DisplayPowerControlEXT == nullptr) { table->DisplayPowerControlEXT = (PFN_vkDisplayPowerControlEXT)StubDisplayPowerControlEXT; }
- table->RegisterDeviceEventEXT = (PFN_vkRegisterDeviceEventEXT) gpa(device, "vkRegisterDeviceEventEXT");
- if (table->RegisterDeviceEventEXT == nullptr) { table->RegisterDeviceEventEXT = (PFN_vkRegisterDeviceEventEXT)StubRegisterDeviceEventEXT; }
- table->RegisterDisplayEventEXT = (PFN_vkRegisterDisplayEventEXT) gpa(device, "vkRegisterDisplayEventEXT");
- if (table->RegisterDisplayEventEXT == nullptr) { table->RegisterDisplayEventEXT = (PFN_vkRegisterDisplayEventEXT)StubRegisterDisplayEventEXT; }
- table->GetSwapchainCounterEXT = (PFN_vkGetSwapchainCounterEXT) gpa(device, "vkGetSwapchainCounterEXT");
- if (table->GetSwapchainCounterEXT == nullptr) { table->GetSwapchainCounterEXT = (PFN_vkGetSwapchainCounterEXT)StubGetSwapchainCounterEXT; }
- table->GetRefreshCycleDurationGOOGLE = (PFN_vkGetRefreshCycleDurationGOOGLE) gpa(device, "vkGetRefreshCycleDurationGOOGLE");
- if (table->GetRefreshCycleDurationGOOGLE == nullptr) { table->GetRefreshCycleDurationGOOGLE = (PFN_vkGetRefreshCycleDurationGOOGLE)StubGetRefreshCycleDurationGOOGLE; }
- table->GetPastPresentationTimingGOOGLE = (PFN_vkGetPastPresentationTimingGOOGLE) gpa(device, "vkGetPastPresentationTimingGOOGLE");
- if (table->GetPastPresentationTimingGOOGLE == nullptr) { table->GetPastPresentationTimingGOOGLE = (PFN_vkGetPastPresentationTimingGOOGLE)StubGetPastPresentationTimingGOOGLE; }
- table->CmdSetDiscardRectangleEXT = (PFN_vkCmdSetDiscardRectangleEXT) gpa(device, "vkCmdSetDiscardRectangleEXT");
- if (table->CmdSetDiscardRectangleEXT == nullptr) { table->CmdSetDiscardRectangleEXT = (PFN_vkCmdSetDiscardRectangleEXT)StubCmdSetDiscardRectangleEXT; }
- table->SetHdrMetadataEXT = (PFN_vkSetHdrMetadataEXT) gpa(device, "vkSetHdrMetadataEXT");
- if (table->SetHdrMetadataEXT == nullptr) { table->SetHdrMetadataEXT = (PFN_vkSetHdrMetadataEXT)StubSetHdrMetadataEXT; }
- table->SetDebugUtilsObjectNameEXT = (PFN_vkSetDebugUtilsObjectNameEXT) gpa(device, "vkSetDebugUtilsObjectNameEXT");
- if (table->SetDebugUtilsObjectNameEXT == nullptr) { table->SetDebugUtilsObjectNameEXT = (PFN_vkSetDebugUtilsObjectNameEXT)StubSetDebugUtilsObjectNameEXT; }
- table->SetDebugUtilsObjectTagEXT = (PFN_vkSetDebugUtilsObjectTagEXT) gpa(device, "vkSetDebugUtilsObjectTagEXT");
- if (table->SetDebugUtilsObjectTagEXT == nullptr) { table->SetDebugUtilsObjectTagEXT = (PFN_vkSetDebugUtilsObjectTagEXT)StubSetDebugUtilsObjectTagEXT; }
- table->QueueBeginDebugUtilsLabelEXT = (PFN_vkQueueBeginDebugUtilsLabelEXT) gpa(device, "vkQueueBeginDebugUtilsLabelEXT");
- if (table->QueueBeginDebugUtilsLabelEXT == nullptr) { table->QueueBeginDebugUtilsLabelEXT = (PFN_vkQueueBeginDebugUtilsLabelEXT)StubQueueBeginDebugUtilsLabelEXT; }
- table->QueueEndDebugUtilsLabelEXT = (PFN_vkQueueEndDebugUtilsLabelEXT) gpa(device, "vkQueueEndDebugUtilsLabelEXT");
- if (table->QueueEndDebugUtilsLabelEXT == nullptr) { table->QueueEndDebugUtilsLabelEXT = (PFN_vkQueueEndDebugUtilsLabelEXT)StubQueueEndDebugUtilsLabelEXT; }
- table->QueueInsertDebugUtilsLabelEXT = (PFN_vkQueueInsertDebugUtilsLabelEXT) gpa(device, "vkQueueInsertDebugUtilsLabelEXT");
- if (table->QueueInsertDebugUtilsLabelEXT == nullptr) { table->QueueInsertDebugUtilsLabelEXT = (PFN_vkQueueInsertDebugUtilsLabelEXT)StubQueueInsertDebugUtilsLabelEXT; }
- table->CmdBeginDebugUtilsLabelEXT = (PFN_vkCmdBeginDebugUtilsLabelEXT) gpa(device, "vkCmdBeginDebugUtilsLabelEXT");
- if (table->CmdBeginDebugUtilsLabelEXT == nullptr) { table->CmdBeginDebugUtilsLabelEXT = (PFN_vkCmdBeginDebugUtilsLabelEXT)StubCmdBeginDebugUtilsLabelEXT; }
- table->CmdEndDebugUtilsLabelEXT = (PFN_vkCmdEndDebugUtilsLabelEXT) gpa(device, "vkCmdEndDebugUtilsLabelEXT");
- if (table->CmdEndDebugUtilsLabelEXT == nullptr) { table->CmdEndDebugUtilsLabelEXT = (PFN_vkCmdEndDebugUtilsLabelEXT)StubCmdEndDebugUtilsLabelEXT; }
- table->CmdInsertDebugUtilsLabelEXT = (PFN_vkCmdInsertDebugUtilsLabelEXT) gpa(device, "vkCmdInsertDebugUtilsLabelEXT");
- if (table->CmdInsertDebugUtilsLabelEXT == nullptr) { table->CmdInsertDebugUtilsLabelEXT = (PFN_vkCmdInsertDebugUtilsLabelEXT)StubCmdInsertDebugUtilsLabelEXT; }
-#ifdef VK_USE_PLATFORM_ANDROID_KHR
- table->GetAndroidHardwareBufferPropertiesANDROID = (PFN_vkGetAndroidHardwareBufferPropertiesANDROID) gpa(device, "vkGetAndroidHardwareBufferPropertiesANDROID");
- if (table->GetAndroidHardwareBufferPropertiesANDROID == nullptr) { table->GetAndroidHardwareBufferPropertiesANDROID = (PFN_vkGetAndroidHardwareBufferPropertiesANDROID)StubGetAndroidHardwareBufferPropertiesANDROID; }
-#endif // VK_USE_PLATFORM_ANDROID_KHR
-#ifdef VK_USE_PLATFORM_ANDROID_KHR
- table->GetMemoryAndroidHardwareBufferANDROID = (PFN_vkGetMemoryAndroidHardwareBufferANDROID) gpa(device, "vkGetMemoryAndroidHardwareBufferANDROID");
- if (table->GetMemoryAndroidHardwareBufferANDROID == nullptr) { table->GetMemoryAndroidHardwareBufferANDROID = (PFN_vkGetMemoryAndroidHardwareBufferANDROID)StubGetMemoryAndroidHardwareBufferANDROID; }
-#endif // VK_USE_PLATFORM_ANDROID_KHR
- table->CmdSetSampleLocationsEXT = (PFN_vkCmdSetSampleLocationsEXT) gpa(device, "vkCmdSetSampleLocationsEXT");
- if (table->CmdSetSampleLocationsEXT == nullptr) { table->CmdSetSampleLocationsEXT = (PFN_vkCmdSetSampleLocationsEXT)StubCmdSetSampleLocationsEXT; }
- table->GetImageDrmFormatModifierPropertiesEXT = (PFN_vkGetImageDrmFormatModifierPropertiesEXT) gpa(device, "vkGetImageDrmFormatModifierPropertiesEXT");
- if (table->GetImageDrmFormatModifierPropertiesEXT == nullptr) { table->GetImageDrmFormatModifierPropertiesEXT = (PFN_vkGetImageDrmFormatModifierPropertiesEXT)StubGetImageDrmFormatModifierPropertiesEXT; }
- table->CreateValidationCacheEXT = (PFN_vkCreateValidationCacheEXT) gpa(device, "vkCreateValidationCacheEXT");
- if (table->CreateValidationCacheEXT == nullptr) { table->CreateValidationCacheEXT = (PFN_vkCreateValidationCacheEXT)StubCreateValidationCacheEXT; }
- table->DestroyValidationCacheEXT = (PFN_vkDestroyValidationCacheEXT) gpa(device, "vkDestroyValidationCacheEXT");
- if (table->DestroyValidationCacheEXT == nullptr) { table->DestroyValidationCacheEXT = (PFN_vkDestroyValidationCacheEXT)StubDestroyValidationCacheEXT; }
- table->MergeValidationCachesEXT = (PFN_vkMergeValidationCachesEXT) gpa(device, "vkMergeValidationCachesEXT");
- if (table->MergeValidationCachesEXT == nullptr) { table->MergeValidationCachesEXT = (PFN_vkMergeValidationCachesEXT)StubMergeValidationCachesEXT; }
- table->GetValidationCacheDataEXT = (PFN_vkGetValidationCacheDataEXT) gpa(device, "vkGetValidationCacheDataEXT");
- if (table->GetValidationCacheDataEXT == nullptr) { table->GetValidationCacheDataEXT = (PFN_vkGetValidationCacheDataEXT)StubGetValidationCacheDataEXT; }
- table->CmdBindShadingRateImageNV = (PFN_vkCmdBindShadingRateImageNV) gpa(device, "vkCmdBindShadingRateImageNV");
- if (table->CmdBindShadingRateImageNV == nullptr) { table->CmdBindShadingRateImageNV = (PFN_vkCmdBindShadingRateImageNV)StubCmdBindShadingRateImageNV; }
- table->CmdSetViewportShadingRatePaletteNV = (PFN_vkCmdSetViewportShadingRatePaletteNV) gpa(device, "vkCmdSetViewportShadingRatePaletteNV");
- if (table->CmdSetViewportShadingRatePaletteNV == nullptr) { table->CmdSetViewportShadingRatePaletteNV = (PFN_vkCmdSetViewportShadingRatePaletteNV)StubCmdSetViewportShadingRatePaletteNV; }
- table->CmdSetCoarseSampleOrderNV = (PFN_vkCmdSetCoarseSampleOrderNV) gpa(device, "vkCmdSetCoarseSampleOrderNV");
- if (table->CmdSetCoarseSampleOrderNV == nullptr) { table->CmdSetCoarseSampleOrderNV = (PFN_vkCmdSetCoarseSampleOrderNV)StubCmdSetCoarseSampleOrderNV; }
- table->CreateAccelerationStructureNV = (PFN_vkCreateAccelerationStructureNV) gpa(device, "vkCreateAccelerationStructureNV");
- if (table->CreateAccelerationStructureNV == nullptr) { table->CreateAccelerationStructureNV = (PFN_vkCreateAccelerationStructureNV)StubCreateAccelerationStructureNV; }
- table->DestroyAccelerationStructureNV = (PFN_vkDestroyAccelerationStructureNV) gpa(device, "vkDestroyAccelerationStructureNV");
- if (table->DestroyAccelerationStructureNV == nullptr) { table->DestroyAccelerationStructureNV = (PFN_vkDestroyAccelerationStructureNV)StubDestroyAccelerationStructureNV; }
- table->GetAccelerationStructureMemoryRequirementsNV = (PFN_vkGetAccelerationStructureMemoryRequirementsNV) gpa(device, "vkGetAccelerationStructureMemoryRequirementsNV");
- if (table->GetAccelerationStructureMemoryRequirementsNV == nullptr) { table->GetAccelerationStructureMemoryRequirementsNV = (PFN_vkGetAccelerationStructureMemoryRequirementsNV)StubGetAccelerationStructureMemoryRequirementsNV; }
- table->BindAccelerationStructureMemoryNV = (PFN_vkBindAccelerationStructureMemoryNV) gpa(device, "vkBindAccelerationStructureMemoryNV");
- if (table->BindAccelerationStructureMemoryNV == nullptr) { table->BindAccelerationStructureMemoryNV = (PFN_vkBindAccelerationStructureMemoryNV)StubBindAccelerationStructureMemoryNV; }
- table->CmdBuildAccelerationStructureNV = (PFN_vkCmdBuildAccelerationStructureNV) gpa(device, "vkCmdBuildAccelerationStructureNV");
- if (table->CmdBuildAccelerationStructureNV == nullptr) { table->CmdBuildAccelerationStructureNV = (PFN_vkCmdBuildAccelerationStructureNV)StubCmdBuildAccelerationStructureNV; }
- table->CmdCopyAccelerationStructureNV = (PFN_vkCmdCopyAccelerationStructureNV) gpa(device, "vkCmdCopyAccelerationStructureNV");
- if (table->CmdCopyAccelerationStructureNV == nullptr) { table->CmdCopyAccelerationStructureNV = (PFN_vkCmdCopyAccelerationStructureNV)StubCmdCopyAccelerationStructureNV; }
- table->CmdTraceRaysNV = (PFN_vkCmdTraceRaysNV) gpa(device, "vkCmdTraceRaysNV");
- if (table->CmdTraceRaysNV == nullptr) { table->CmdTraceRaysNV = (PFN_vkCmdTraceRaysNV)StubCmdTraceRaysNV; }
- table->CreateRayTracingPipelinesNV = (PFN_vkCreateRayTracingPipelinesNV) gpa(device, "vkCreateRayTracingPipelinesNV");
- if (table->CreateRayTracingPipelinesNV == nullptr) { table->CreateRayTracingPipelinesNV = (PFN_vkCreateRayTracingPipelinesNV)StubCreateRayTracingPipelinesNV; }
- table->GetRayTracingShaderGroupHandlesNV = (PFN_vkGetRayTracingShaderGroupHandlesNV) gpa(device, "vkGetRayTracingShaderGroupHandlesNV");
- if (table->GetRayTracingShaderGroupHandlesNV == nullptr) { table->GetRayTracingShaderGroupHandlesNV = (PFN_vkGetRayTracingShaderGroupHandlesNV)StubGetRayTracingShaderGroupHandlesNV; }
- table->GetAccelerationStructureHandleNV = (PFN_vkGetAccelerationStructureHandleNV) gpa(device, "vkGetAccelerationStructureHandleNV");
- if (table->GetAccelerationStructureHandleNV == nullptr) { table->GetAccelerationStructureHandleNV = (PFN_vkGetAccelerationStructureHandleNV)StubGetAccelerationStructureHandleNV; }
- table->CmdWriteAccelerationStructuresPropertiesNV = (PFN_vkCmdWriteAccelerationStructuresPropertiesNV) gpa(device, "vkCmdWriteAccelerationStructuresPropertiesNV");
- if (table->CmdWriteAccelerationStructuresPropertiesNV == nullptr) { table->CmdWriteAccelerationStructuresPropertiesNV = (PFN_vkCmdWriteAccelerationStructuresPropertiesNV)StubCmdWriteAccelerationStructuresPropertiesNV; }
- table->CompileDeferredNV = (PFN_vkCompileDeferredNV) gpa(device, "vkCompileDeferredNV");
- if (table->CompileDeferredNV == nullptr) { table->CompileDeferredNV = (PFN_vkCompileDeferredNV)StubCompileDeferredNV; }
- table->GetMemoryHostPointerPropertiesEXT = (PFN_vkGetMemoryHostPointerPropertiesEXT) gpa(device, "vkGetMemoryHostPointerPropertiesEXT");
- if (table->GetMemoryHostPointerPropertiesEXT == nullptr) { table->GetMemoryHostPointerPropertiesEXT = (PFN_vkGetMemoryHostPointerPropertiesEXT)StubGetMemoryHostPointerPropertiesEXT; }
- table->CmdWriteBufferMarkerAMD = (PFN_vkCmdWriteBufferMarkerAMD) gpa(device, "vkCmdWriteBufferMarkerAMD");
- if (table->CmdWriteBufferMarkerAMD == nullptr) { table->CmdWriteBufferMarkerAMD = (PFN_vkCmdWriteBufferMarkerAMD)StubCmdWriteBufferMarkerAMD; }
- table->GetCalibratedTimestampsEXT = (PFN_vkGetCalibratedTimestampsEXT) gpa(device, "vkGetCalibratedTimestampsEXT");
- if (table->GetCalibratedTimestampsEXT == nullptr) { table->GetCalibratedTimestampsEXT = (PFN_vkGetCalibratedTimestampsEXT)StubGetCalibratedTimestampsEXT; }
- table->CmdDrawMeshTasksNV = (PFN_vkCmdDrawMeshTasksNV) gpa(device, "vkCmdDrawMeshTasksNV");
- if (table->CmdDrawMeshTasksNV == nullptr) { table->CmdDrawMeshTasksNV = (PFN_vkCmdDrawMeshTasksNV)StubCmdDrawMeshTasksNV; }
- table->CmdDrawMeshTasksIndirectNV = (PFN_vkCmdDrawMeshTasksIndirectNV) gpa(device, "vkCmdDrawMeshTasksIndirectNV");
- if (table->CmdDrawMeshTasksIndirectNV == nullptr) { table->CmdDrawMeshTasksIndirectNV = (PFN_vkCmdDrawMeshTasksIndirectNV)StubCmdDrawMeshTasksIndirectNV; }
- table->CmdDrawMeshTasksIndirectCountNV = (PFN_vkCmdDrawMeshTasksIndirectCountNV) gpa(device, "vkCmdDrawMeshTasksIndirectCountNV");
- if (table->CmdDrawMeshTasksIndirectCountNV == nullptr) { table->CmdDrawMeshTasksIndirectCountNV = (PFN_vkCmdDrawMeshTasksIndirectCountNV)StubCmdDrawMeshTasksIndirectCountNV; }
- table->CmdSetExclusiveScissorNV = (PFN_vkCmdSetExclusiveScissorNV) gpa(device, "vkCmdSetExclusiveScissorNV");
- if (table->CmdSetExclusiveScissorNV == nullptr) { table->CmdSetExclusiveScissorNV = (PFN_vkCmdSetExclusiveScissorNV)StubCmdSetExclusiveScissorNV; }
- table->CmdSetCheckpointNV = (PFN_vkCmdSetCheckpointNV) gpa(device, "vkCmdSetCheckpointNV");
- if (table->CmdSetCheckpointNV == nullptr) { table->CmdSetCheckpointNV = (PFN_vkCmdSetCheckpointNV)StubCmdSetCheckpointNV; }
- table->GetQueueCheckpointDataNV = (PFN_vkGetQueueCheckpointDataNV) gpa(device, "vkGetQueueCheckpointDataNV");
- if (table->GetQueueCheckpointDataNV == nullptr) { table->GetQueueCheckpointDataNV = (PFN_vkGetQueueCheckpointDataNV)StubGetQueueCheckpointDataNV; }
- table->InitializePerformanceApiINTEL = (PFN_vkInitializePerformanceApiINTEL) gpa(device, "vkInitializePerformanceApiINTEL");
- if (table->InitializePerformanceApiINTEL == nullptr) { table->InitializePerformanceApiINTEL = (PFN_vkInitializePerformanceApiINTEL)StubInitializePerformanceApiINTEL; }
- table->UninitializePerformanceApiINTEL = (PFN_vkUninitializePerformanceApiINTEL) gpa(device, "vkUninitializePerformanceApiINTEL");
- if (table->UninitializePerformanceApiINTEL == nullptr) { table->UninitializePerformanceApiINTEL = (PFN_vkUninitializePerformanceApiINTEL)StubUninitializePerformanceApiINTEL; }
- table->CmdSetPerformanceMarkerINTEL = (PFN_vkCmdSetPerformanceMarkerINTEL) gpa(device, "vkCmdSetPerformanceMarkerINTEL");
- if (table->CmdSetPerformanceMarkerINTEL == nullptr) { table->CmdSetPerformanceMarkerINTEL = (PFN_vkCmdSetPerformanceMarkerINTEL)StubCmdSetPerformanceMarkerINTEL; }
- table->CmdSetPerformanceStreamMarkerINTEL = (PFN_vkCmdSetPerformanceStreamMarkerINTEL) gpa(device, "vkCmdSetPerformanceStreamMarkerINTEL");
- if (table->CmdSetPerformanceStreamMarkerINTEL == nullptr) { table->CmdSetPerformanceStreamMarkerINTEL = (PFN_vkCmdSetPerformanceStreamMarkerINTEL)StubCmdSetPerformanceStreamMarkerINTEL; }
- table->CmdSetPerformanceOverrideINTEL = (PFN_vkCmdSetPerformanceOverrideINTEL) gpa(device, "vkCmdSetPerformanceOverrideINTEL");
- if (table->CmdSetPerformanceOverrideINTEL == nullptr) { table->CmdSetPerformanceOverrideINTEL = (PFN_vkCmdSetPerformanceOverrideINTEL)StubCmdSetPerformanceOverrideINTEL; }
- table->AcquirePerformanceConfigurationINTEL = (PFN_vkAcquirePerformanceConfigurationINTEL) gpa(device, "vkAcquirePerformanceConfigurationINTEL");
- if (table->AcquirePerformanceConfigurationINTEL == nullptr) { table->AcquirePerformanceConfigurationINTEL = (PFN_vkAcquirePerformanceConfigurationINTEL)StubAcquirePerformanceConfigurationINTEL; }
- table->ReleasePerformanceConfigurationINTEL = (PFN_vkReleasePerformanceConfigurationINTEL) gpa(device, "vkReleasePerformanceConfigurationINTEL");
- if (table->ReleasePerformanceConfigurationINTEL == nullptr) { table->ReleasePerformanceConfigurationINTEL = (PFN_vkReleasePerformanceConfigurationINTEL)StubReleasePerformanceConfigurationINTEL; }
- table->QueueSetPerformanceConfigurationINTEL = (PFN_vkQueueSetPerformanceConfigurationINTEL) gpa(device, "vkQueueSetPerformanceConfigurationINTEL");
- if (table->QueueSetPerformanceConfigurationINTEL == nullptr) { table->QueueSetPerformanceConfigurationINTEL = (PFN_vkQueueSetPerformanceConfigurationINTEL)StubQueueSetPerformanceConfigurationINTEL; }
- table->GetPerformanceParameterINTEL = (PFN_vkGetPerformanceParameterINTEL) gpa(device, "vkGetPerformanceParameterINTEL");
- if (table->GetPerformanceParameterINTEL == nullptr) { table->GetPerformanceParameterINTEL = (PFN_vkGetPerformanceParameterINTEL)StubGetPerformanceParameterINTEL; }
- table->SetLocalDimmingAMD = (PFN_vkSetLocalDimmingAMD) gpa(device, "vkSetLocalDimmingAMD");
- if (table->SetLocalDimmingAMD == nullptr) { table->SetLocalDimmingAMD = (PFN_vkSetLocalDimmingAMD)StubSetLocalDimmingAMD; }
- table->GetBufferDeviceAddressEXT = (PFN_vkGetBufferDeviceAddressEXT) gpa(device, "vkGetBufferDeviceAddressEXT");
- if (table->GetBufferDeviceAddressEXT == nullptr) { table->GetBufferDeviceAddressEXT = (PFN_vkGetBufferDeviceAddressEXT)StubGetBufferDeviceAddressEXT; }
-#ifdef VK_USE_PLATFORM_WIN32_KHR
- table->AcquireFullScreenExclusiveModeEXT = (PFN_vkAcquireFullScreenExclusiveModeEXT) gpa(device, "vkAcquireFullScreenExclusiveModeEXT");
- if (table->AcquireFullScreenExclusiveModeEXT == nullptr) { table->AcquireFullScreenExclusiveModeEXT = (PFN_vkAcquireFullScreenExclusiveModeEXT)StubAcquireFullScreenExclusiveModeEXT; }
-#endif // VK_USE_PLATFORM_WIN32_KHR
-#ifdef VK_USE_PLATFORM_WIN32_KHR
- table->ReleaseFullScreenExclusiveModeEXT = (PFN_vkReleaseFullScreenExclusiveModeEXT) gpa(device, "vkReleaseFullScreenExclusiveModeEXT");
- if (table->ReleaseFullScreenExclusiveModeEXT == nullptr) { table->ReleaseFullScreenExclusiveModeEXT = (PFN_vkReleaseFullScreenExclusiveModeEXT)StubReleaseFullScreenExclusiveModeEXT; }
-#endif // VK_USE_PLATFORM_WIN32_KHR
-#ifdef VK_USE_PLATFORM_WIN32_KHR
- table->GetDeviceGroupSurfacePresentModes2EXT = (PFN_vkGetDeviceGroupSurfacePresentModes2EXT) gpa(device, "vkGetDeviceGroupSurfacePresentModes2EXT");
- if (table->GetDeviceGroupSurfacePresentModes2EXT == nullptr) { table->GetDeviceGroupSurfacePresentModes2EXT = (PFN_vkGetDeviceGroupSurfacePresentModes2EXT)StubGetDeviceGroupSurfacePresentModes2EXT; }
-#endif // VK_USE_PLATFORM_WIN32_KHR
- table->CmdSetLineStippleEXT = (PFN_vkCmdSetLineStippleEXT) gpa(device, "vkCmdSetLineStippleEXT");
- if (table->CmdSetLineStippleEXT == nullptr) { table->CmdSetLineStippleEXT = (PFN_vkCmdSetLineStippleEXT)StubCmdSetLineStippleEXT; }
- table->ResetQueryPoolEXT = (PFN_vkResetQueryPoolEXT) gpa(device, "vkResetQueryPoolEXT");
- if (table->ResetQueryPoolEXT == nullptr) { table->ResetQueryPoolEXT = (PFN_vkResetQueryPoolEXT)StubResetQueryPoolEXT; }
-}
-
-
-static inline void layer_init_instance_dispatch_table(VkInstance instance, VkLayerInstanceDispatchTable *table, PFN_vkGetInstanceProcAddr gpa) {
- memset(table, 0, sizeof(*table));
- // Instance function pointers
- table->DestroyInstance = (PFN_vkDestroyInstance) gpa(instance, "vkDestroyInstance");
- table->EnumeratePhysicalDevices = (PFN_vkEnumeratePhysicalDevices) gpa(instance, "vkEnumeratePhysicalDevices");
- table->GetPhysicalDeviceFeatures = (PFN_vkGetPhysicalDeviceFeatures) gpa(instance, "vkGetPhysicalDeviceFeatures");
- table->GetPhysicalDeviceFormatProperties = (PFN_vkGetPhysicalDeviceFormatProperties) gpa(instance, "vkGetPhysicalDeviceFormatProperties");
- table->GetPhysicalDeviceImageFormatProperties = (PFN_vkGetPhysicalDeviceImageFormatProperties) gpa(instance, "vkGetPhysicalDeviceImageFormatProperties");
- table->GetPhysicalDeviceProperties = (PFN_vkGetPhysicalDeviceProperties) gpa(instance, "vkGetPhysicalDeviceProperties");
- table->GetPhysicalDeviceQueueFamilyProperties = (PFN_vkGetPhysicalDeviceQueueFamilyProperties) gpa(instance, "vkGetPhysicalDeviceQueueFamilyProperties");
- table->GetPhysicalDeviceMemoryProperties = (PFN_vkGetPhysicalDeviceMemoryProperties) gpa(instance, "vkGetPhysicalDeviceMemoryProperties");
- table->GetInstanceProcAddr = gpa;
- table->EnumerateDeviceExtensionProperties = (PFN_vkEnumerateDeviceExtensionProperties) gpa(instance, "vkEnumerateDeviceExtensionProperties");
- table->EnumerateDeviceLayerProperties = (PFN_vkEnumerateDeviceLayerProperties) gpa(instance, "vkEnumerateDeviceLayerProperties");
- table->GetPhysicalDeviceSparseImageFormatProperties = (PFN_vkGetPhysicalDeviceSparseImageFormatProperties) gpa(instance, "vkGetPhysicalDeviceSparseImageFormatProperties");
- table->EnumeratePhysicalDeviceGroups = (PFN_vkEnumeratePhysicalDeviceGroups) gpa(instance, "vkEnumeratePhysicalDeviceGroups");
- table->GetPhysicalDeviceFeatures2 = (PFN_vkGetPhysicalDeviceFeatures2) gpa(instance, "vkGetPhysicalDeviceFeatures2");
- table->GetPhysicalDeviceProperties2 = (PFN_vkGetPhysicalDeviceProperties2) gpa(instance, "vkGetPhysicalDeviceProperties2");
- table->GetPhysicalDeviceFormatProperties2 = (PFN_vkGetPhysicalDeviceFormatProperties2) gpa(instance, "vkGetPhysicalDeviceFormatProperties2");
- table->GetPhysicalDeviceImageFormatProperties2 = (PFN_vkGetPhysicalDeviceImageFormatProperties2) gpa(instance, "vkGetPhysicalDeviceImageFormatProperties2");
- table->GetPhysicalDeviceQueueFamilyProperties2 = (PFN_vkGetPhysicalDeviceQueueFamilyProperties2) gpa(instance, "vkGetPhysicalDeviceQueueFamilyProperties2");
- table->GetPhysicalDeviceMemoryProperties2 = (PFN_vkGetPhysicalDeviceMemoryProperties2) gpa(instance, "vkGetPhysicalDeviceMemoryProperties2");
- table->GetPhysicalDeviceSparseImageFormatProperties2 = (PFN_vkGetPhysicalDeviceSparseImageFormatProperties2) gpa(instance, "vkGetPhysicalDeviceSparseImageFormatProperties2");
- table->GetPhysicalDeviceExternalBufferProperties = (PFN_vkGetPhysicalDeviceExternalBufferProperties) gpa(instance, "vkGetPhysicalDeviceExternalBufferProperties");
- table->GetPhysicalDeviceExternalFenceProperties = (PFN_vkGetPhysicalDeviceExternalFenceProperties) gpa(instance, "vkGetPhysicalDeviceExternalFenceProperties");
- table->GetPhysicalDeviceExternalSemaphoreProperties = (PFN_vkGetPhysicalDeviceExternalSemaphoreProperties) gpa(instance, "vkGetPhysicalDeviceExternalSemaphoreProperties");
- table->DestroySurfaceKHR = (PFN_vkDestroySurfaceKHR) gpa(instance, "vkDestroySurfaceKHR");
- table->GetPhysicalDeviceSurfaceSupportKHR = (PFN_vkGetPhysicalDeviceSurfaceSupportKHR) gpa(instance, "vkGetPhysicalDeviceSurfaceSupportKHR");
- table->GetPhysicalDeviceSurfaceCapabilitiesKHR = (PFN_vkGetPhysicalDeviceSurfaceCapabilitiesKHR) gpa(instance, "vkGetPhysicalDeviceSurfaceCapabilitiesKHR");
- table->GetPhysicalDeviceSurfaceFormatsKHR = (PFN_vkGetPhysicalDeviceSurfaceFormatsKHR) gpa(instance, "vkGetPhysicalDeviceSurfaceFormatsKHR");
- table->GetPhysicalDeviceSurfacePresentModesKHR = (PFN_vkGetPhysicalDeviceSurfacePresentModesKHR) gpa(instance, "vkGetPhysicalDeviceSurfacePresentModesKHR");
- table->GetPhysicalDevicePresentRectanglesKHR = (PFN_vkGetPhysicalDevicePresentRectanglesKHR) gpa(instance, "vkGetPhysicalDevicePresentRectanglesKHR");
- table->GetPhysicalDeviceDisplayPropertiesKHR = (PFN_vkGetPhysicalDeviceDisplayPropertiesKHR) gpa(instance, "vkGetPhysicalDeviceDisplayPropertiesKHR");
- table->GetPhysicalDeviceDisplayPlanePropertiesKHR = (PFN_vkGetPhysicalDeviceDisplayPlanePropertiesKHR) gpa(instance, "vkGetPhysicalDeviceDisplayPlanePropertiesKHR");
- table->GetDisplayPlaneSupportedDisplaysKHR = (PFN_vkGetDisplayPlaneSupportedDisplaysKHR) gpa(instance, "vkGetDisplayPlaneSupportedDisplaysKHR");
- table->GetDisplayModePropertiesKHR = (PFN_vkGetDisplayModePropertiesKHR) gpa(instance, "vkGetDisplayModePropertiesKHR");
- table->CreateDisplayModeKHR = (PFN_vkCreateDisplayModeKHR) gpa(instance, "vkCreateDisplayModeKHR");
- table->GetDisplayPlaneCapabilitiesKHR = (PFN_vkGetDisplayPlaneCapabilitiesKHR) gpa(instance, "vkGetDisplayPlaneCapabilitiesKHR");
- table->CreateDisplayPlaneSurfaceKHR = (PFN_vkCreateDisplayPlaneSurfaceKHR) gpa(instance, "vkCreateDisplayPlaneSurfaceKHR");
-#ifdef VK_USE_PLATFORM_XLIB_KHR
- table->CreateXlibSurfaceKHR = (PFN_vkCreateXlibSurfaceKHR) gpa(instance, "vkCreateXlibSurfaceKHR");
-#endif // VK_USE_PLATFORM_XLIB_KHR
-#ifdef VK_USE_PLATFORM_XLIB_KHR
- table->GetPhysicalDeviceXlibPresentationSupportKHR = (PFN_vkGetPhysicalDeviceXlibPresentationSupportKHR) gpa(instance, "vkGetPhysicalDeviceXlibPresentationSupportKHR");
-#endif // VK_USE_PLATFORM_XLIB_KHR
-#ifdef VK_USE_PLATFORM_XCB_KHR
- table->CreateXcbSurfaceKHR = (PFN_vkCreateXcbSurfaceKHR) gpa(instance, "vkCreateXcbSurfaceKHR");
-#endif // VK_USE_PLATFORM_XCB_KHR
-#ifdef VK_USE_PLATFORM_XCB_KHR
- table->GetPhysicalDeviceXcbPresentationSupportKHR = (PFN_vkGetPhysicalDeviceXcbPresentationSupportKHR) gpa(instance, "vkGetPhysicalDeviceXcbPresentationSupportKHR");
-#endif // VK_USE_PLATFORM_XCB_KHR
-#ifdef VK_USE_PLATFORM_WAYLAND_KHR
- table->CreateWaylandSurfaceKHR = (PFN_vkCreateWaylandSurfaceKHR) gpa(instance, "vkCreateWaylandSurfaceKHR");
-#endif // VK_USE_PLATFORM_WAYLAND_KHR
-#ifdef VK_USE_PLATFORM_WAYLAND_KHR
- table->GetPhysicalDeviceWaylandPresentationSupportKHR = (PFN_vkGetPhysicalDeviceWaylandPresentationSupportKHR) gpa(instance, "vkGetPhysicalDeviceWaylandPresentationSupportKHR");
-#endif // VK_USE_PLATFORM_WAYLAND_KHR
-#ifdef VK_USE_PLATFORM_ANDROID_KHR
- table->CreateAndroidSurfaceKHR = (PFN_vkCreateAndroidSurfaceKHR) gpa(instance, "vkCreateAndroidSurfaceKHR");
-#endif // VK_USE_PLATFORM_ANDROID_KHR
-#ifdef VK_USE_PLATFORM_WIN32_KHR
- table->CreateWin32SurfaceKHR = (PFN_vkCreateWin32SurfaceKHR) gpa(instance, "vkCreateWin32SurfaceKHR");
-#endif // VK_USE_PLATFORM_WIN32_KHR
-#ifdef VK_USE_PLATFORM_WIN32_KHR
- table->GetPhysicalDeviceWin32PresentationSupportKHR = (PFN_vkGetPhysicalDeviceWin32PresentationSupportKHR) gpa(instance, "vkGetPhysicalDeviceWin32PresentationSupportKHR");
-#endif // VK_USE_PLATFORM_WIN32_KHR
- table->GetPhysicalDeviceFeatures2KHR = (PFN_vkGetPhysicalDeviceFeatures2KHR) gpa(instance, "vkGetPhysicalDeviceFeatures2KHR");
- table->GetPhysicalDeviceProperties2KHR = (PFN_vkGetPhysicalDeviceProperties2KHR) gpa(instance, "vkGetPhysicalDeviceProperties2KHR");
- table->GetPhysicalDeviceFormatProperties2KHR = (PFN_vkGetPhysicalDeviceFormatProperties2KHR) gpa(instance, "vkGetPhysicalDeviceFormatProperties2KHR");
- table->GetPhysicalDeviceImageFormatProperties2KHR = (PFN_vkGetPhysicalDeviceImageFormatProperties2KHR) gpa(instance, "vkGetPhysicalDeviceImageFormatProperties2KHR");
- table->GetPhysicalDeviceQueueFamilyProperties2KHR = (PFN_vkGetPhysicalDeviceQueueFamilyProperties2KHR) gpa(instance, "vkGetPhysicalDeviceQueueFamilyProperties2KHR");
- table->GetPhysicalDeviceMemoryProperties2KHR = (PFN_vkGetPhysicalDeviceMemoryProperties2KHR) gpa(instance, "vkGetPhysicalDeviceMemoryProperties2KHR");
- table->GetPhysicalDeviceSparseImageFormatProperties2KHR = (PFN_vkGetPhysicalDeviceSparseImageFormatProperties2KHR) gpa(instance, "vkGetPhysicalDeviceSparseImageFormatProperties2KHR");
- table->EnumeratePhysicalDeviceGroupsKHR = (PFN_vkEnumeratePhysicalDeviceGroupsKHR) gpa(instance, "vkEnumeratePhysicalDeviceGroupsKHR");
- table->GetPhysicalDeviceExternalBufferPropertiesKHR = (PFN_vkGetPhysicalDeviceExternalBufferPropertiesKHR) gpa(instance, "vkGetPhysicalDeviceExternalBufferPropertiesKHR");
- table->GetPhysicalDeviceExternalSemaphorePropertiesKHR = (PFN_vkGetPhysicalDeviceExternalSemaphorePropertiesKHR) gpa(instance, "vkGetPhysicalDeviceExternalSemaphorePropertiesKHR");
- table->GetPhysicalDeviceExternalFencePropertiesKHR = (PFN_vkGetPhysicalDeviceExternalFencePropertiesKHR) gpa(instance, "vkGetPhysicalDeviceExternalFencePropertiesKHR");
- table->GetPhysicalDeviceSurfaceCapabilities2KHR = (PFN_vkGetPhysicalDeviceSurfaceCapabilities2KHR) gpa(instance, "vkGetPhysicalDeviceSurfaceCapabilities2KHR");
- table->GetPhysicalDeviceSurfaceFormats2KHR = (PFN_vkGetPhysicalDeviceSurfaceFormats2KHR) gpa(instance, "vkGetPhysicalDeviceSurfaceFormats2KHR");
- table->GetPhysicalDeviceDisplayProperties2KHR = (PFN_vkGetPhysicalDeviceDisplayProperties2KHR) gpa(instance, "vkGetPhysicalDeviceDisplayProperties2KHR");
- table->GetPhysicalDeviceDisplayPlaneProperties2KHR = (PFN_vkGetPhysicalDeviceDisplayPlaneProperties2KHR) gpa(instance, "vkGetPhysicalDeviceDisplayPlaneProperties2KHR");
- table->GetDisplayModeProperties2KHR = (PFN_vkGetDisplayModeProperties2KHR) gpa(instance, "vkGetDisplayModeProperties2KHR");
- table->GetDisplayPlaneCapabilities2KHR = (PFN_vkGetDisplayPlaneCapabilities2KHR) gpa(instance, "vkGetDisplayPlaneCapabilities2KHR");
- table->CreateDebugReportCallbackEXT = (PFN_vkCreateDebugReportCallbackEXT) gpa(instance, "vkCreateDebugReportCallbackEXT");
- table->DestroyDebugReportCallbackEXT = (PFN_vkDestroyDebugReportCallbackEXT) gpa(instance, "vkDestroyDebugReportCallbackEXT");
- table->DebugReportMessageEXT = (PFN_vkDebugReportMessageEXT) gpa(instance, "vkDebugReportMessageEXT");
-#ifdef VK_USE_PLATFORM_GGP
- table->CreateStreamDescriptorSurfaceGGP = (PFN_vkCreateStreamDescriptorSurfaceGGP) gpa(instance, "vkCreateStreamDescriptorSurfaceGGP");
-#endif // VK_USE_PLATFORM_GGP
- table->GetPhysicalDeviceExternalImageFormatPropertiesNV = (PFN_vkGetPhysicalDeviceExternalImageFormatPropertiesNV) gpa(instance, "vkGetPhysicalDeviceExternalImageFormatPropertiesNV");
-#ifdef VK_USE_PLATFORM_VI_NN
- table->CreateViSurfaceNN = (PFN_vkCreateViSurfaceNN) gpa(instance, "vkCreateViSurfaceNN");
-#endif // VK_USE_PLATFORM_VI_NN
- table->GetPhysicalDeviceGeneratedCommandsPropertiesNVX = (PFN_vkGetPhysicalDeviceGeneratedCommandsPropertiesNVX) gpa(instance, "vkGetPhysicalDeviceGeneratedCommandsPropertiesNVX");
- table->ReleaseDisplayEXT = (PFN_vkReleaseDisplayEXT) gpa(instance, "vkReleaseDisplayEXT");
-#ifdef VK_USE_PLATFORM_XLIB_XRANDR_EXT
- table->AcquireXlibDisplayEXT = (PFN_vkAcquireXlibDisplayEXT) gpa(instance, "vkAcquireXlibDisplayEXT");
-#endif // VK_USE_PLATFORM_XLIB_XRANDR_EXT
-#ifdef VK_USE_PLATFORM_XLIB_XRANDR_EXT
- table->GetRandROutputDisplayEXT = (PFN_vkGetRandROutputDisplayEXT) gpa(instance, "vkGetRandROutputDisplayEXT");
-#endif // VK_USE_PLATFORM_XLIB_XRANDR_EXT
- table->GetPhysicalDeviceSurfaceCapabilities2EXT = (PFN_vkGetPhysicalDeviceSurfaceCapabilities2EXT) gpa(instance, "vkGetPhysicalDeviceSurfaceCapabilities2EXT");
-#ifdef VK_USE_PLATFORM_IOS_MVK
- table->CreateIOSSurfaceMVK = (PFN_vkCreateIOSSurfaceMVK) gpa(instance, "vkCreateIOSSurfaceMVK");
-#endif // VK_USE_PLATFORM_IOS_MVK
-#ifdef VK_USE_PLATFORM_MACOS_MVK
- table->CreateMacOSSurfaceMVK = (PFN_vkCreateMacOSSurfaceMVK) gpa(instance, "vkCreateMacOSSurfaceMVK");
-#endif // VK_USE_PLATFORM_MACOS_MVK
- table->CreateDebugUtilsMessengerEXT = (PFN_vkCreateDebugUtilsMessengerEXT) gpa(instance, "vkCreateDebugUtilsMessengerEXT");
- table->DestroyDebugUtilsMessengerEXT = (PFN_vkDestroyDebugUtilsMessengerEXT) gpa(instance, "vkDestroyDebugUtilsMessengerEXT");
- table->SubmitDebugUtilsMessageEXT = (PFN_vkSubmitDebugUtilsMessageEXT) gpa(instance, "vkSubmitDebugUtilsMessageEXT");
- table->GetPhysicalDeviceMultisamplePropertiesEXT = (PFN_vkGetPhysicalDeviceMultisamplePropertiesEXT) gpa(instance, "vkGetPhysicalDeviceMultisamplePropertiesEXT");
- table->GetPhysicalDeviceCalibrateableTimeDomainsEXT = (PFN_vkGetPhysicalDeviceCalibrateableTimeDomainsEXT) gpa(instance, "vkGetPhysicalDeviceCalibrateableTimeDomainsEXT");
-#ifdef VK_USE_PLATFORM_FUCHSIA
- table->CreateImagePipeSurfaceFUCHSIA = (PFN_vkCreateImagePipeSurfaceFUCHSIA) gpa(instance, "vkCreateImagePipeSurfaceFUCHSIA");
-#endif // VK_USE_PLATFORM_FUCHSIA
-#ifdef VK_USE_PLATFORM_METAL_EXT
- table->CreateMetalSurfaceEXT = (PFN_vkCreateMetalSurfaceEXT) gpa(instance, "vkCreateMetalSurfaceEXT");
-#endif // VK_USE_PLATFORM_METAL_EXT
- table->GetPhysicalDeviceCooperativeMatrixPropertiesNV = (PFN_vkGetPhysicalDeviceCooperativeMatrixPropertiesNV) gpa(instance, "vkGetPhysicalDeviceCooperativeMatrixPropertiesNV");
- table->GetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV = (PFN_vkGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV) gpa(instance, "vkGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV");
-#ifdef VK_USE_PLATFORM_WIN32_KHR
- table->GetPhysicalDeviceSurfacePresentModes2EXT = (PFN_vkGetPhysicalDeviceSurfacePresentModes2EXT) gpa(instance, "vkGetPhysicalDeviceSurfacePresentModes2EXT");
-#endif // VK_USE_PLATFORM_WIN32_KHR
- table->CreateHeadlessSurfaceEXT = (PFN_vkCreateHeadlessSurfaceEXT) gpa(instance, "vkCreateHeadlessSurfaceEXT");
-}
diff --git a/layers/generated/vk_enum_string_helper.h b/layers/generated/vk_enum_string_helper.h
deleted file mode 100644
index e73822802..000000000
--- a/layers/generated/vk_enum_string_helper.h
+++ /dev/null
@@ -1,5994 +0,0 @@
-// *** THIS FILE IS GENERATED - DO NOT EDIT ***
-// See helper_file_generator.py for modifications
-
-
-/***************************************************************************
- *
- * Copyright (c) 2015-2019 The Khronos Group Inc.
- * Copyright (c) 2015-2019 Valve Corporation
- * Copyright (c) 2015-2019 LunarG, Inc.
- * Copyright (c) 2015-2019 Google Inc.
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- *
- * Author: Mark Lobodzinski <mark@lunarg.com>
- * Author: Courtney Goeltzenleuchter <courtneygo@google.com>
- * Author: Tobin Ehlis <tobine@google.com>
- * Author: Chris Forbes <chrisforbes@google.com>
- * Author: John Zulauf<jzulauf@lunarg.com>
- *
- ****************************************************************************/
-
-
-#pragma once
-#ifdef _WIN32
-#pragma warning( disable : 4065 )
-#endif
-
-#include <string>
-#include <vulkan/vulkan.h>
-
-
-static inline const char* string_VkPipelineCacheHeaderVersion(VkPipelineCacheHeaderVersion input_value)
-{
- switch ((VkPipelineCacheHeaderVersion)input_value)
- {
- case VK_PIPELINE_CACHE_HEADER_VERSION_ONE:
- return "VK_PIPELINE_CACHE_HEADER_VERSION_ONE";
- default:
- return "Unhandled VkPipelineCacheHeaderVersion";
- }
-}
-
-static inline const char* string_VkResult(VkResult input_value)
-{
- switch ((VkResult)input_value)
- {
- case VK_ERROR_DEVICE_LOST:
- return "VK_ERROR_DEVICE_LOST";
- case VK_ERROR_EXTENSION_NOT_PRESENT:
- return "VK_ERROR_EXTENSION_NOT_PRESENT";
- case VK_ERROR_FEATURE_NOT_PRESENT:
- return "VK_ERROR_FEATURE_NOT_PRESENT";
- case VK_ERROR_FORMAT_NOT_SUPPORTED:
- return "VK_ERROR_FORMAT_NOT_SUPPORTED";
- case VK_ERROR_FRAGMENTATION_EXT:
- return "VK_ERROR_FRAGMENTATION_EXT";
- case VK_ERROR_FRAGMENTED_POOL:
- return "VK_ERROR_FRAGMENTED_POOL";
- case VK_ERROR_FULL_SCREEN_EXCLUSIVE_MODE_LOST_EXT:
- return "VK_ERROR_FULL_SCREEN_EXCLUSIVE_MODE_LOST_EXT";
- case VK_ERROR_INCOMPATIBLE_DISPLAY_KHR:
- return "VK_ERROR_INCOMPATIBLE_DISPLAY_KHR";
- case VK_ERROR_INCOMPATIBLE_DRIVER:
- return "VK_ERROR_INCOMPATIBLE_DRIVER";
- case VK_ERROR_INITIALIZATION_FAILED:
- return "VK_ERROR_INITIALIZATION_FAILED";
- case VK_ERROR_INVALID_DEVICE_ADDRESS_EXT:
- return "VK_ERROR_INVALID_DEVICE_ADDRESS_EXT";
- case VK_ERROR_INVALID_DRM_FORMAT_MODIFIER_PLANE_LAYOUT_EXT:
- return "VK_ERROR_INVALID_DRM_FORMAT_MODIFIER_PLANE_LAYOUT_EXT";
- case VK_ERROR_INVALID_EXTERNAL_HANDLE:
- return "VK_ERROR_INVALID_EXTERNAL_HANDLE";
- case VK_ERROR_INVALID_SHADER_NV:
- return "VK_ERROR_INVALID_SHADER_NV";
- case VK_ERROR_LAYER_NOT_PRESENT:
- return "VK_ERROR_LAYER_NOT_PRESENT";
- case VK_ERROR_MEMORY_MAP_FAILED:
- return "VK_ERROR_MEMORY_MAP_FAILED";
- case VK_ERROR_NATIVE_WINDOW_IN_USE_KHR:
- return "VK_ERROR_NATIVE_WINDOW_IN_USE_KHR";
- case VK_ERROR_NOT_PERMITTED_EXT:
- return "VK_ERROR_NOT_PERMITTED_EXT";
- case VK_ERROR_OUT_OF_DATE_KHR:
- return "VK_ERROR_OUT_OF_DATE_KHR";
- case VK_ERROR_OUT_OF_DEVICE_MEMORY:
- return "VK_ERROR_OUT_OF_DEVICE_MEMORY";
- case VK_ERROR_OUT_OF_HOST_MEMORY:
- return "VK_ERROR_OUT_OF_HOST_MEMORY";
- case VK_ERROR_OUT_OF_POOL_MEMORY:
- return "VK_ERROR_OUT_OF_POOL_MEMORY";
- case VK_ERROR_SURFACE_LOST_KHR:
- return "VK_ERROR_SURFACE_LOST_KHR";
- case VK_ERROR_TOO_MANY_OBJECTS:
- return "VK_ERROR_TOO_MANY_OBJECTS";
- case VK_ERROR_VALIDATION_FAILED_EXT:
- return "VK_ERROR_VALIDATION_FAILED_EXT";
- case VK_EVENT_RESET:
- return "VK_EVENT_RESET";
- case VK_EVENT_SET:
- return "VK_EVENT_SET";
- case VK_INCOMPLETE:
- return "VK_INCOMPLETE";
- case VK_NOT_READY:
- return "VK_NOT_READY";
- case VK_SUBOPTIMAL_KHR:
- return "VK_SUBOPTIMAL_KHR";
- case VK_SUCCESS:
- return "VK_SUCCESS";
- case VK_TIMEOUT:
- return "VK_TIMEOUT";
- default:
- return "Unhandled VkResult";
- }
-}
-
-static inline const char* string_VkStructureType(VkStructureType input_value)
-{
- switch ((VkStructureType)input_value)
- {
- case VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_CREATE_INFO_NV:
- return "VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_CREATE_INFO_NV";
- case VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_INFO_NV:
- return "VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_INFO_NV";
- case VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_INFO_NV:
- return "VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_INFO_NV";
- case VK_STRUCTURE_TYPE_ACQUIRE_NEXT_IMAGE_INFO_KHR:
- return "VK_STRUCTURE_TYPE_ACQUIRE_NEXT_IMAGE_INFO_KHR";
- case VK_STRUCTURE_TYPE_ANDROID_HARDWARE_BUFFER_FORMAT_PROPERTIES_ANDROID:
- return "VK_STRUCTURE_TYPE_ANDROID_HARDWARE_BUFFER_FORMAT_PROPERTIES_ANDROID";
- case VK_STRUCTURE_TYPE_ANDROID_HARDWARE_BUFFER_PROPERTIES_ANDROID:
- return "VK_STRUCTURE_TYPE_ANDROID_HARDWARE_BUFFER_PROPERTIES_ANDROID";
- case VK_STRUCTURE_TYPE_ANDROID_HARDWARE_BUFFER_USAGE_ANDROID:
- return "VK_STRUCTURE_TYPE_ANDROID_HARDWARE_BUFFER_USAGE_ANDROID";
- case VK_STRUCTURE_TYPE_ANDROID_SURFACE_CREATE_INFO_KHR:
- return "VK_STRUCTURE_TYPE_ANDROID_SURFACE_CREATE_INFO_KHR";
- case VK_STRUCTURE_TYPE_APPLICATION_INFO:
- return "VK_STRUCTURE_TYPE_APPLICATION_INFO";
- case VK_STRUCTURE_TYPE_ATTACHMENT_DESCRIPTION_2_KHR:
- return "VK_STRUCTURE_TYPE_ATTACHMENT_DESCRIPTION_2_KHR";
- case VK_STRUCTURE_TYPE_ATTACHMENT_REFERENCE_2_KHR:
- return "VK_STRUCTURE_TYPE_ATTACHMENT_REFERENCE_2_KHR";
- case VK_STRUCTURE_TYPE_BIND_ACCELERATION_STRUCTURE_MEMORY_INFO_NV:
- return "VK_STRUCTURE_TYPE_BIND_ACCELERATION_STRUCTURE_MEMORY_INFO_NV";
- case VK_STRUCTURE_TYPE_BIND_BUFFER_MEMORY_DEVICE_GROUP_INFO:
- return "VK_STRUCTURE_TYPE_BIND_BUFFER_MEMORY_DEVICE_GROUP_INFO";
- case VK_STRUCTURE_TYPE_BIND_BUFFER_MEMORY_INFO:
- return "VK_STRUCTURE_TYPE_BIND_BUFFER_MEMORY_INFO";
- case VK_STRUCTURE_TYPE_BIND_IMAGE_MEMORY_DEVICE_GROUP_INFO:
- return "VK_STRUCTURE_TYPE_BIND_IMAGE_MEMORY_DEVICE_GROUP_INFO";
- case VK_STRUCTURE_TYPE_BIND_IMAGE_MEMORY_INFO:
- return "VK_STRUCTURE_TYPE_BIND_IMAGE_MEMORY_INFO";
- case VK_STRUCTURE_TYPE_BIND_IMAGE_MEMORY_SWAPCHAIN_INFO_KHR:
- return "VK_STRUCTURE_TYPE_BIND_IMAGE_MEMORY_SWAPCHAIN_INFO_KHR";
- case VK_STRUCTURE_TYPE_BIND_IMAGE_PLANE_MEMORY_INFO:
- return "VK_STRUCTURE_TYPE_BIND_IMAGE_PLANE_MEMORY_INFO";
- case VK_STRUCTURE_TYPE_BIND_SPARSE_INFO:
- return "VK_STRUCTURE_TYPE_BIND_SPARSE_INFO";
- case VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO:
- return "VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO";
- case VK_STRUCTURE_TYPE_BUFFER_DEVICE_ADDRESS_CREATE_INFO_EXT:
- return "VK_STRUCTURE_TYPE_BUFFER_DEVICE_ADDRESS_CREATE_INFO_EXT";
- case VK_STRUCTURE_TYPE_BUFFER_DEVICE_ADDRESS_INFO_EXT:
- return "VK_STRUCTURE_TYPE_BUFFER_DEVICE_ADDRESS_INFO_EXT";
- case VK_STRUCTURE_TYPE_BUFFER_MEMORY_BARRIER:
- return "VK_STRUCTURE_TYPE_BUFFER_MEMORY_BARRIER";
- case VK_STRUCTURE_TYPE_BUFFER_MEMORY_REQUIREMENTS_INFO_2:
- return "VK_STRUCTURE_TYPE_BUFFER_MEMORY_REQUIREMENTS_INFO_2";
- case VK_STRUCTURE_TYPE_BUFFER_VIEW_CREATE_INFO:
- return "VK_STRUCTURE_TYPE_BUFFER_VIEW_CREATE_INFO";
- case VK_STRUCTURE_TYPE_CALIBRATED_TIMESTAMP_INFO_EXT:
- return "VK_STRUCTURE_TYPE_CALIBRATED_TIMESTAMP_INFO_EXT";
- case VK_STRUCTURE_TYPE_CHECKPOINT_DATA_NV:
- return "VK_STRUCTURE_TYPE_CHECKPOINT_DATA_NV";
- case VK_STRUCTURE_TYPE_CMD_PROCESS_COMMANDS_INFO_NVX:
- return "VK_STRUCTURE_TYPE_CMD_PROCESS_COMMANDS_INFO_NVX";
- case VK_STRUCTURE_TYPE_CMD_RESERVE_SPACE_FOR_COMMANDS_INFO_NVX:
- return "VK_STRUCTURE_TYPE_CMD_RESERVE_SPACE_FOR_COMMANDS_INFO_NVX";
- case VK_STRUCTURE_TYPE_COMMAND_BUFFER_ALLOCATE_INFO:
- return "VK_STRUCTURE_TYPE_COMMAND_BUFFER_ALLOCATE_INFO";
- case VK_STRUCTURE_TYPE_COMMAND_BUFFER_BEGIN_INFO:
- return "VK_STRUCTURE_TYPE_COMMAND_BUFFER_BEGIN_INFO";
- case VK_STRUCTURE_TYPE_COMMAND_BUFFER_INHERITANCE_CONDITIONAL_RENDERING_INFO_EXT:
- return "VK_STRUCTURE_TYPE_COMMAND_BUFFER_INHERITANCE_CONDITIONAL_RENDERING_INFO_EXT";
- case VK_STRUCTURE_TYPE_COMMAND_BUFFER_INHERITANCE_INFO:
- return "VK_STRUCTURE_TYPE_COMMAND_BUFFER_INHERITANCE_INFO";
- case VK_STRUCTURE_TYPE_COMMAND_POOL_CREATE_INFO:
- return "VK_STRUCTURE_TYPE_COMMAND_POOL_CREATE_INFO";
- case VK_STRUCTURE_TYPE_COMPUTE_PIPELINE_CREATE_INFO:
- return "VK_STRUCTURE_TYPE_COMPUTE_PIPELINE_CREATE_INFO";
- case VK_STRUCTURE_TYPE_CONDITIONAL_RENDERING_BEGIN_INFO_EXT:
- return "VK_STRUCTURE_TYPE_CONDITIONAL_RENDERING_BEGIN_INFO_EXT";
- case VK_STRUCTURE_TYPE_COOPERATIVE_MATRIX_PROPERTIES_NV:
- return "VK_STRUCTURE_TYPE_COOPERATIVE_MATRIX_PROPERTIES_NV";
- case VK_STRUCTURE_TYPE_COPY_DESCRIPTOR_SET:
- return "VK_STRUCTURE_TYPE_COPY_DESCRIPTOR_SET";
- case VK_STRUCTURE_TYPE_D3D12_FENCE_SUBMIT_INFO_KHR:
- return "VK_STRUCTURE_TYPE_D3D12_FENCE_SUBMIT_INFO_KHR";
- case VK_STRUCTURE_TYPE_DEBUG_MARKER_MARKER_INFO_EXT:
- return "VK_STRUCTURE_TYPE_DEBUG_MARKER_MARKER_INFO_EXT";
- case VK_STRUCTURE_TYPE_DEBUG_MARKER_OBJECT_NAME_INFO_EXT:
- return "VK_STRUCTURE_TYPE_DEBUG_MARKER_OBJECT_NAME_INFO_EXT";
- case VK_STRUCTURE_TYPE_DEBUG_MARKER_OBJECT_TAG_INFO_EXT:
- return "VK_STRUCTURE_TYPE_DEBUG_MARKER_OBJECT_TAG_INFO_EXT";
- case VK_STRUCTURE_TYPE_DEBUG_REPORT_CALLBACK_CREATE_INFO_EXT:
- return "VK_STRUCTURE_TYPE_DEBUG_REPORT_CALLBACK_CREATE_INFO_EXT";
- case VK_STRUCTURE_TYPE_DEBUG_UTILS_LABEL_EXT:
- return "VK_STRUCTURE_TYPE_DEBUG_UTILS_LABEL_EXT";
- case VK_STRUCTURE_TYPE_DEBUG_UTILS_MESSENGER_CALLBACK_DATA_EXT:
- return "VK_STRUCTURE_TYPE_DEBUG_UTILS_MESSENGER_CALLBACK_DATA_EXT";
- case VK_STRUCTURE_TYPE_DEBUG_UTILS_MESSENGER_CREATE_INFO_EXT:
- return "VK_STRUCTURE_TYPE_DEBUG_UTILS_MESSENGER_CREATE_INFO_EXT";
- case VK_STRUCTURE_TYPE_DEBUG_UTILS_OBJECT_NAME_INFO_EXT:
- return "VK_STRUCTURE_TYPE_DEBUG_UTILS_OBJECT_NAME_INFO_EXT";
- case VK_STRUCTURE_TYPE_DEBUG_UTILS_OBJECT_TAG_INFO_EXT:
- return "VK_STRUCTURE_TYPE_DEBUG_UTILS_OBJECT_TAG_INFO_EXT";
- case VK_STRUCTURE_TYPE_DEDICATED_ALLOCATION_BUFFER_CREATE_INFO_NV:
- return "VK_STRUCTURE_TYPE_DEDICATED_ALLOCATION_BUFFER_CREATE_INFO_NV";
- case VK_STRUCTURE_TYPE_DEDICATED_ALLOCATION_IMAGE_CREATE_INFO_NV:
- return "VK_STRUCTURE_TYPE_DEDICATED_ALLOCATION_IMAGE_CREATE_INFO_NV";
- case VK_STRUCTURE_TYPE_DEDICATED_ALLOCATION_MEMORY_ALLOCATE_INFO_NV:
- return "VK_STRUCTURE_TYPE_DEDICATED_ALLOCATION_MEMORY_ALLOCATE_INFO_NV";
- case VK_STRUCTURE_TYPE_DESCRIPTOR_POOL_CREATE_INFO:
- return "VK_STRUCTURE_TYPE_DESCRIPTOR_POOL_CREATE_INFO";
- case VK_STRUCTURE_TYPE_DESCRIPTOR_POOL_INLINE_UNIFORM_BLOCK_CREATE_INFO_EXT:
- return "VK_STRUCTURE_TYPE_DESCRIPTOR_POOL_INLINE_UNIFORM_BLOCK_CREATE_INFO_EXT";
- case VK_STRUCTURE_TYPE_DESCRIPTOR_SET_ALLOCATE_INFO:
- return "VK_STRUCTURE_TYPE_DESCRIPTOR_SET_ALLOCATE_INFO";
- case VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_BINDING_FLAGS_CREATE_INFO_EXT:
- return "VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_BINDING_FLAGS_CREATE_INFO_EXT";
- case VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_CREATE_INFO:
- return "VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_CREATE_INFO";
- case VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_SUPPORT:
- return "VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_SUPPORT";
- case VK_STRUCTURE_TYPE_DESCRIPTOR_SET_VARIABLE_DESCRIPTOR_COUNT_ALLOCATE_INFO_EXT:
- return "VK_STRUCTURE_TYPE_DESCRIPTOR_SET_VARIABLE_DESCRIPTOR_COUNT_ALLOCATE_INFO_EXT";
- case VK_STRUCTURE_TYPE_DESCRIPTOR_SET_VARIABLE_DESCRIPTOR_COUNT_LAYOUT_SUPPORT_EXT:
- return "VK_STRUCTURE_TYPE_DESCRIPTOR_SET_VARIABLE_DESCRIPTOR_COUNT_LAYOUT_SUPPORT_EXT";
- case VK_STRUCTURE_TYPE_DESCRIPTOR_UPDATE_TEMPLATE_CREATE_INFO:
- return "VK_STRUCTURE_TYPE_DESCRIPTOR_UPDATE_TEMPLATE_CREATE_INFO";
- case VK_STRUCTURE_TYPE_DEVICE_CREATE_INFO:
- return "VK_STRUCTURE_TYPE_DEVICE_CREATE_INFO";
- case VK_STRUCTURE_TYPE_DEVICE_EVENT_INFO_EXT:
- return "VK_STRUCTURE_TYPE_DEVICE_EVENT_INFO_EXT";
- case VK_STRUCTURE_TYPE_DEVICE_GENERATED_COMMANDS_FEATURES_NVX:
- return "VK_STRUCTURE_TYPE_DEVICE_GENERATED_COMMANDS_FEATURES_NVX";
- case VK_STRUCTURE_TYPE_DEVICE_GENERATED_COMMANDS_LIMITS_NVX:
- return "VK_STRUCTURE_TYPE_DEVICE_GENERATED_COMMANDS_LIMITS_NVX";
- case VK_STRUCTURE_TYPE_DEVICE_GROUP_BIND_SPARSE_INFO:
- return "VK_STRUCTURE_TYPE_DEVICE_GROUP_BIND_SPARSE_INFO";
- case VK_STRUCTURE_TYPE_DEVICE_GROUP_COMMAND_BUFFER_BEGIN_INFO:
- return "VK_STRUCTURE_TYPE_DEVICE_GROUP_COMMAND_BUFFER_BEGIN_INFO";
- case VK_STRUCTURE_TYPE_DEVICE_GROUP_DEVICE_CREATE_INFO:
- return "VK_STRUCTURE_TYPE_DEVICE_GROUP_DEVICE_CREATE_INFO";
- case VK_STRUCTURE_TYPE_DEVICE_GROUP_PRESENT_CAPABILITIES_KHR:
- return "VK_STRUCTURE_TYPE_DEVICE_GROUP_PRESENT_CAPABILITIES_KHR";
- case VK_STRUCTURE_TYPE_DEVICE_GROUP_PRESENT_INFO_KHR:
- return "VK_STRUCTURE_TYPE_DEVICE_GROUP_PRESENT_INFO_KHR";
- case VK_STRUCTURE_TYPE_DEVICE_GROUP_RENDER_PASS_BEGIN_INFO:
- return "VK_STRUCTURE_TYPE_DEVICE_GROUP_RENDER_PASS_BEGIN_INFO";
- case VK_STRUCTURE_TYPE_DEVICE_GROUP_SUBMIT_INFO:
- return "VK_STRUCTURE_TYPE_DEVICE_GROUP_SUBMIT_INFO";
- case VK_STRUCTURE_TYPE_DEVICE_GROUP_SWAPCHAIN_CREATE_INFO_KHR:
- return "VK_STRUCTURE_TYPE_DEVICE_GROUP_SWAPCHAIN_CREATE_INFO_KHR";
- case VK_STRUCTURE_TYPE_DEVICE_MEMORY_OVERALLOCATION_CREATE_INFO_AMD:
- return "VK_STRUCTURE_TYPE_DEVICE_MEMORY_OVERALLOCATION_CREATE_INFO_AMD";
- case VK_STRUCTURE_TYPE_DEVICE_QUEUE_CREATE_INFO:
- return "VK_STRUCTURE_TYPE_DEVICE_QUEUE_CREATE_INFO";
- case VK_STRUCTURE_TYPE_DEVICE_QUEUE_GLOBAL_PRIORITY_CREATE_INFO_EXT:
- return "VK_STRUCTURE_TYPE_DEVICE_QUEUE_GLOBAL_PRIORITY_CREATE_INFO_EXT";
- case VK_STRUCTURE_TYPE_DEVICE_QUEUE_INFO_2:
- return "VK_STRUCTURE_TYPE_DEVICE_QUEUE_INFO_2";
- case VK_STRUCTURE_TYPE_DISPLAY_EVENT_INFO_EXT:
- return "VK_STRUCTURE_TYPE_DISPLAY_EVENT_INFO_EXT";
- case VK_STRUCTURE_TYPE_DISPLAY_MODE_CREATE_INFO_KHR:
- return "VK_STRUCTURE_TYPE_DISPLAY_MODE_CREATE_INFO_KHR";
- case VK_STRUCTURE_TYPE_DISPLAY_MODE_PROPERTIES_2_KHR:
- return "VK_STRUCTURE_TYPE_DISPLAY_MODE_PROPERTIES_2_KHR";
- case VK_STRUCTURE_TYPE_DISPLAY_NATIVE_HDR_SURFACE_CAPABILITIES_AMD:
- return "VK_STRUCTURE_TYPE_DISPLAY_NATIVE_HDR_SURFACE_CAPABILITIES_AMD";
- case VK_STRUCTURE_TYPE_DISPLAY_PLANE_CAPABILITIES_2_KHR:
- return "VK_STRUCTURE_TYPE_DISPLAY_PLANE_CAPABILITIES_2_KHR";
- case VK_STRUCTURE_TYPE_DISPLAY_PLANE_INFO_2_KHR:
- return "VK_STRUCTURE_TYPE_DISPLAY_PLANE_INFO_2_KHR";
- case VK_STRUCTURE_TYPE_DISPLAY_PLANE_PROPERTIES_2_KHR:
- return "VK_STRUCTURE_TYPE_DISPLAY_PLANE_PROPERTIES_2_KHR";
- case VK_STRUCTURE_TYPE_DISPLAY_POWER_INFO_EXT:
- return "VK_STRUCTURE_TYPE_DISPLAY_POWER_INFO_EXT";
- case VK_STRUCTURE_TYPE_DISPLAY_PRESENT_INFO_KHR:
- return "VK_STRUCTURE_TYPE_DISPLAY_PRESENT_INFO_KHR";
- case VK_STRUCTURE_TYPE_DISPLAY_PROPERTIES_2_KHR:
- return "VK_STRUCTURE_TYPE_DISPLAY_PROPERTIES_2_KHR";
- case VK_STRUCTURE_TYPE_DISPLAY_SURFACE_CREATE_INFO_KHR:
- return "VK_STRUCTURE_TYPE_DISPLAY_SURFACE_CREATE_INFO_KHR";
- case VK_STRUCTURE_TYPE_DRM_FORMAT_MODIFIER_PROPERTIES_EXT:
- return "VK_STRUCTURE_TYPE_DRM_FORMAT_MODIFIER_PROPERTIES_EXT";
- case VK_STRUCTURE_TYPE_DRM_FORMAT_MODIFIER_PROPERTIES_LIST_EXT:
- return "VK_STRUCTURE_TYPE_DRM_FORMAT_MODIFIER_PROPERTIES_LIST_EXT";
- case VK_STRUCTURE_TYPE_EVENT_CREATE_INFO:
- return "VK_STRUCTURE_TYPE_EVENT_CREATE_INFO";
- case VK_STRUCTURE_TYPE_EXPORT_FENCE_CREATE_INFO:
- return "VK_STRUCTURE_TYPE_EXPORT_FENCE_CREATE_INFO";
- case VK_STRUCTURE_TYPE_EXPORT_FENCE_WIN32_HANDLE_INFO_KHR:
- return "VK_STRUCTURE_TYPE_EXPORT_FENCE_WIN32_HANDLE_INFO_KHR";
- case VK_STRUCTURE_TYPE_EXPORT_MEMORY_ALLOCATE_INFO:
- return "VK_STRUCTURE_TYPE_EXPORT_MEMORY_ALLOCATE_INFO";
- case VK_STRUCTURE_TYPE_EXPORT_MEMORY_ALLOCATE_INFO_NV:
- return "VK_STRUCTURE_TYPE_EXPORT_MEMORY_ALLOCATE_INFO_NV";
- case VK_STRUCTURE_TYPE_EXPORT_MEMORY_WIN32_HANDLE_INFO_KHR:
- return "VK_STRUCTURE_TYPE_EXPORT_MEMORY_WIN32_HANDLE_INFO_KHR";
- case VK_STRUCTURE_TYPE_EXPORT_MEMORY_WIN32_HANDLE_INFO_NV:
- return "VK_STRUCTURE_TYPE_EXPORT_MEMORY_WIN32_HANDLE_INFO_NV";
- case VK_STRUCTURE_TYPE_EXPORT_SEMAPHORE_CREATE_INFO:
- return "VK_STRUCTURE_TYPE_EXPORT_SEMAPHORE_CREATE_INFO";
- case VK_STRUCTURE_TYPE_EXPORT_SEMAPHORE_WIN32_HANDLE_INFO_KHR:
- return "VK_STRUCTURE_TYPE_EXPORT_SEMAPHORE_WIN32_HANDLE_INFO_KHR";
- case VK_STRUCTURE_TYPE_EXTERNAL_BUFFER_PROPERTIES:
- return "VK_STRUCTURE_TYPE_EXTERNAL_BUFFER_PROPERTIES";
- case VK_STRUCTURE_TYPE_EXTERNAL_FENCE_PROPERTIES:
- return "VK_STRUCTURE_TYPE_EXTERNAL_FENCE_PROPERTIES";
- case VK_STRUCTURE_TYPE_EXTERNAL_FORMAT_ANDROID:
- return "VK_STRUCTURE_TYPE_EXTERNAL_FORMAT_ANDROID";
- case VK_STRUCTURE_TYPE_EXTERNAL_IMAGE_FORMAT_PROPERTIES:
- return "VK_STRUCTURE_TYPE_EXTERNAL_IMAGE_FORMAT_PROPERTIES";
- case VK_STRUCTURE_TYPE_EXTERNAL_MEMORY_BUFFER_CREATE_INFO:
- return "VK_STRUCTURE_TYPE_EXTERNAL_MEMORY_BUFFER_CREATE_INFO";
- case VK_STRUCTURE_TYPE_EXTERNAL_MEMORY_IMAGE_CREATE_INFO:
- return "VK_STRUCTURE_TYPE_EXTERNAL_MEMORY_IMAGE_CREATE_INFO";
- case VK_STRUCTURE_TYPE_EXTERNAL_MEMORY_IMAGE_CREATE_INFO_NV:
- return "VK_STRUCTURE_TYPE_EXTERNAL_MEMORY_IMAGE_CREATE_INFO_NV";
- case VK_STRUCTURE_TYPE_EXTERNAL_SEMAPHORE_PROPERTIES:
- return "VK_STRUCTURE_TYPE_EXTERNAL_SEMAPHORE_PROPERTIES";
- case VK_STRUCTURE_TYPE_FENCE_CREATE_INFO:
- return "VK_STRUCTURE_TYPE_FENCE_CREATE_INFO";
- case VK_STRUCTURE_TYPE_FENCE_GET_FD_INFO_KHR:
- return "VK_STRUCTURE_TYPE_FENCE_GET_FD_INFO_KHR";
- case VK_STRUCTURE_TYPE_FENCE_GET_WIN32_HANDLE_INFO_KHR:
- return "VK_STRUCTURE_TYPE_FENCE_GET_WIN32_HANDLE_INFO_KHR";
- case VK_STRUCTURE_TYPE_FILTER_CUBIC_IMAGE_VIEW_IMAGE_FORMAT_PROPERTIES_EXT:
- return "VK_STRUCTURE_TYPE_FILTER_CUBIC_IMAGE_VIEW_IMAGE_FORMAT_PROPERTIES_EXT";
- case VK_STRUCTURE_TYPE_FORMAT_PROPERTIES_2:
- return "VK_STRUCTURE_TYPE_FORMAT_PROPERTIES_2";
- case VK_STRUCTURE_TYPE_FRAMEBUFFER_ATTACHMENTS_CREATE_INFO_KHR:
- return "VK_STRUCTURE_TYPE_FRAMEBUFFER_ATTACHMENTS_CREATE_INFO_KHR";
- case VK_STRUCTURE_TYPE_FRAMEBUFFER_ATTACHMENT_IMAGE_INFO_KHR:
- return "VK_STRUCTURE_TYPE_FRAMEBUFFER_ATTACHMENT_IMAGE_INFO_KHR";
- case VK_STRUCTURE_TYPE_FRAMEBUFFER_CREATE_INFO:
- return "VK_STRUCTURE_TYPE_FRAMEBUFFER_CREATE_INFO";
- case VK_STRUCTURE_TYPE_FRAMEBUFFER_MIXED_SAMPLES_COMBINATION_NV:
- return "VK_STRUCTURE_TYPE_FRAMEBUFFER_MIXED_SAMPLES_COMBINATION_NV";
- case VK_STRUCTURE_TYPE_GEOMETRY_AABB_NV:
- return "VK_STRUCTURE_TYPE_GEOMETRY_AABB_NV";
- case VK_STRUCTURE_TYPE_GEOMETRY_NV:
- return "VK_STRUCTURE_TYPE_GEOMETRY_NV";
- case VK_STRUCTURE_TYPE_GEOMETRY_TRIANGLES_NV:
- return "VK_STRUCTURE_TYPE_GEOMETRY_TRIANGLES_NV";
- case VK_STRUCTURE_TYPE_GRAPHICS_PIPELINE_CREATE_INFO:
- return "VK_STRUCTURE_TYPE_GRAPHICS_PIPELINE_CREATE_INFO";
- case VK_STRUCTURE_TYPE_HDR_METADATA_EXT:
- return "VK_STRUCTURE_TYPE_HDR_METADATA_EXT";
- case VK_STRUCTURE_TYPE_HEADLESS_SURFACE_CREATE_INFO_EXT:
- return "VK_STRUCTURE_TYPE_HEADLESS_SURFACE_CREATE_INFO_EXT";
- case VK_STRUCTURE_TYPE_IMAGEPIPE_SURFACE_CREATE_INFO_FUCHSIA:
- return "VK_STRUCTURE_TYPE_IMAGEPIPE_SURFACE_CREATE_INFO_FUCHSIA";
- case VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO:
- return "VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO";
- case VK_STRUCTURE_TYPE_IMAGE_DRM_FORMAT_MODIFIER_EXPLICIT_CREATE_INFO_EXT:
- return "VK_STRUCTURE_TYPE_IMAGE_DRM_FORMAT_MODIFIER_EXPLICIT_CREATE_INFO_EXT";
- case VK_STRUCTURE_TYPE_IMAGE_DRM_FORMAT_MODIFIER_LIST_CREATE_INFO_EXT:
- return "VK_STRUCTURE_TYPE_IMAGE_DRM_FORMAT_MODIFIER_LIST_CREATE_INFO_EXT";
- case VK_STRUCTURE_TYPE_IMAGE_DRM_FORMAT_MODIFIER_PROPERTIES_EXT:
- return "VK_STRUCTURE_TYPE_IMAGE_DRM_FORMAT_MODIFIER_PROPERTIES_EXT";
- case VK_STRUCTURE_TYPE_IMAGE_FORMAT_LIST_CREATE_INFO_KHR:
- return "VK_STRUCTURE_TYPE_IMAGE_FORMAT_LIST_CREATE_INFO_KHR";
- case VK_STRUCTURE_TYPE_IMAGE_FORMAT_PROPERTIES_2:
- return "VK_STRUCTURE_TYPE_IMAGE_FORMAT_PROPERTIES_2";
- case VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER:
- return "VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER";
- case VK_STRUCTURE_TYPE_IMAGE_MEMORY_REQUIREMENTS_INFO_2:
- return "VK_STRUCTURE_TYPE_IMAGE_MEMORY_REQUIREMENTS_INFO_2";
- case VK_STRUCTURE_TYPE_IMAGE_PLANE_MEMORY_REQUIREMENTS_INFO:
- return "VK_STRUCTURE_TYPE_IMAGE_PLANE_MEMORY_REQUIREMENTS_INFO";
- case VK_STRUCTURE_TYPE_IMAGE_SPARSE_MEMORY_REQUIREMENTS_INFO_2:
- return "VK_STRUCTURE_TYPE_IMAGE_SPARSE_MEMORY_REQUIREMENTS_INFO_2";
- case VK_STRUCTURE_TYPE_IMAGE_STENCIL_USAGE_CREATE_INFO_EXT:
- return "VK_STRUCTURE_TYPE_IMAGE_STENCIL_USAGE_CREATE_INFO_EXT";
- case VK_STRUCTURE_TYPE_IMAGE_SWAPCHAIN_CREATE_INFO_KHR:
- return "VK_STRUCTURE_TYPE_IMAGE_SWAPCHAIN_CREATE_INFO_KHR";
- case VK_STRUCTURE_TYPE_IMAGE_VIEW_ASTC_DECODE_MODE_EXT:
- return "VK_STRUCTURE_TYPE_IMAGE_VIEW_ASTC_DECODE_MODE_EXT";
- case VK_STRUCTURE_TYPE_IMAGE_VIEW_CREATE_INFO:
- return "VK_STRUCTURE_TYPE_IMAGE_VIEW_CREATE_INFO";
- case VK_STRUCTURE_TYPE_IMAGE_VIEW_HANDLE_INFO_NVX:
- return "VK_STRUCTURE_TYPE_IMAGE_VIEW_HANDLE_INFO_NVX";
- case VK_STRUCTURE_TYPE_IMAGE_VIEW_USAGE_CREATE_INFO:
- return "VK_STRUCTURE_TYPE_IMAGE_VIEW_USAGE_CREATE_INFO";
- case VK_STRUCTURE_TYPE_IMPORT_ANDROID_HARDWARE_BUFFER_INFO_ANDROID:
- return "VK_STRUCTURE_TYPE_IMPORT_ANDROID_HARDWARE_BUFFER_INFO_ANDROID";
- case VK_STRUCTURE_TYPE_IMPORT_FENCE_FD_INFO_KHR:
- return "VK_STRUCTURE_TYPE_IMPORT_FENCE_FD_INFO_KHR";
- case VK_STRUCTURE_TYPE_IMPORT_FENCE_WIN32_HANDLE_INFO_KHR:
- return "VK_STRUCTURE_TYPE_IMPORT_FENCE_WIN32_HANDLE_INFO_KHR";
- case VK_STRUCTURE_TYPE_IMPORT_MEMORY_FD_INFO_KHR:
- return "VK_STRUCTURE_TYPE_IMPORT_MEMORY_FD_INFO_KHR";
- case VK_STRUCTURE_TYPE_IMPORT_MEMORY_HOST_POINTER_INFO_EXT:
- return "VK_STRUCTURE_TYPE_IMPORT_MEMORY_HOST_POINTER_INFO_EXT";
- case VK_STRUCTURE_TYPE_IMPORT_MEMORY_WIN32_HANDLE_INFO_KHR:
- return "VK_STRUCTURE_TYPE_IMPORT_MEMORY_WIN32_HANDLE_INFO_KHR";
- case VK_STRUCTURE_TYPE_IMPORT_MEMORY_WIN32_HANDLE_INFO_NV:
- return "VK_STRUCTURE_TYPE_IMPORT_MEMORY_WIN32_HANDLE_INFO_NV";
- case VK_STRUCTURE_TYPE_IMPORT_SEMAPHORE_FD_INFO_KHR:
- return "VK_STRUCTURE_TYPE_IMPORT_SEMAPHORE_FD_INFO_KHR";
- case VK_STRUCTURE_TYPE_IMPORT_SEMAPHORE_WIN32_HANDLE_INFO_KHR:
- return "VK_STRUCTURE_TYPE_IMPORT_SEMAPHORE_WIN32_HANDLE_INFO_KHR";
- case VK_STRUCTURE_TYPE_INDIRECT_COMMANDS_LAYOUT_CREATE_INFO_NVX:
- return "VK_STRUCTURE_TYPE_INDIRECT_COMMANDS_LAYOUT_CREATE_INFO_NVX";
- case VK_STRUCTURE_TYPE_INITIALIZE_PERFORMANCE_API_INFO_INTEL:
- return "VK_STRUCTURE_TYPE_INITIALIZE_PERFORMANCE_API_INFO_INTEL";
- case VK_STRUCTURE_TYPE_INSTANCE_CREATE_INFO:
- return "VK_STRUCTURE_TYPE_INSTANCE_CREATE_INFO";
- case VK_STRUCTURE_TYPE_IOS_SURFACE_CREATE_INFO_MVK:
- return "VK_STRUCTURE_TYPE_IOS_SURFACE_CREATE_INFO_MVK";
- case VK_STRUCTURE_TYPE_LOADER_DEVICE_CREATE_INFO:
- return "VK_STRUCTURE_TYPE_LOADER_DEVICE_CREATE_INFO";
- case VK_STRUCTURE_TYPE_LOADER_INSTANCE_CREATE_INFO:
- return "VK_STRUCTURE_TYPE_LOADER_INSTANCE_CREATE_INFO";
- case VK_STRUCTURE_TYPE_MACOS_SURFACE_CREATE_INFO_MVK:
- return "VK_STRUCTURE_TYPE_MACOS_SURFACE_CREATE_INFO_MVK";
- case VK_STRUCTURE_TYPE_MAPPED_MEMORY_RANGE:
- return "VK_STRUCTURE_TYPE_MAPPED_MEMORY_RANGE";
- case VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_FLAGS_INFO:
- return "VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_FLAGS_INFO";
- case VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO:
- return "VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO";
- case VK_STRUCTURE_TYPE_MEMORY_BARRIER:
- return "VK_STRUCTURE_TYPE_MEMORY_BARRIER";
- case VK_STRUCTURE_TYPE_MEMORY_DEDICATED_ALLOCATE_INFO:
- return "VK_STRUCTURE_TYPE_MEMORY_DEDICATED_ALLOCATE_INFO";
- case VK_STRUCTURE_TYPE_MEMORY_DEDICATED_REQUIREMENTS:
- return "VK_STRUCTURE_TYPE_MEMORY_DEDICATED_REQUIREMENTS";
- case VK_STRUCTURE_TYPE_MEMORY_FD_PROPERTIES_KHR:
- return "VK_STRUCTURE_TYPE_MEMORY_FD_PROPERTIES_KHR";
- case VK_STRUCTURE_TYPE_MEMORY_GET_ANDROID_HARDWARE_BUFFER_INFO_ANDROID:
- return "VK_STRUCTURE_TYPE_MEMORY_GET_ANDROID_HARDWARE_BUFFER_INFO_ANDROID";
- case VK_STRUCTURE_TYPE_MEMORY_GET_FD_INFO_KHR:
- return "VK_STRUCTURE_TYPE_MEMORY_GET_FD_INFO_KHR";
- case VK_STRUCTURE_TYPE_MEMORY_GET_WIN32_HANDLE_INFO_KHR:
- return "VK_STRUCTURE_TYPE_MEMORY_GET_WIN32_HANDLE_INFO_KHR";
- case VK_STRUCTURE_TYPE_MEMORY_HOST_POINTER_PROPERTIES_EXT:
- return "VK_STRUCTURE_TYPE_MEMORY_HOST_POINTER_PROPERTIES_EXT";
- case VK_STRUCTURE_TYPE_MEMORY_PRIORITY_ALLOCATE_INFO_EXT:
- return "VK_STRUCTURE_TYPE_MEMORY_PRIORITY_ALLOCATE_INFO_EXT";
- case VK_STRUCTURE_TYPE_MEMORY_REQUIREMENTS_2:
- return "VK_STRUCTURE_TYPE_MEMORY_REQUIREMENTS_2";
- case VK_STRUCTURE_TYPE_MEMORY_WIN32_HANDLE_PROPERTIES_KHR:
- return "VK_STRUCTURE_TYPE_MEMORY_WIN32_HANDLE_PROPERTIES_KHR";
- case VK_STRUCTURE_TYPE_METAL_SURFACE_CREATE_INFO_EXT:
- return "VK_STRUCTURE_TYPE_METAL_SURFACE_CREATE_INFO_EXT";
- case VK_STRUCTURE_TYPE_MULTISAMPLE_PROPERTIES_EXT:
- return "VK_STRUCTURE_TYPE_MULTISAMPLE_PROPERTIES_EXT";
- case VK_STRUCTURE_TYPE_OBJECT_TABLE_CREATE_INFO_NVX:
- return "VK_STRUCTURE_TYPE_OBJECT_TABLE_CREATE_INFO_NVX";
- case VK_STRUCTURE_TYPE_PERFORMANCE_CONFIGURATION_ACQUIRE_INFO_INTEL:
- return "VK_STRUCTURE_TYPE_PERFORMANCE_CONFIGURATION_ACQUIRE_INFO_INTEL";
- case VK_STRUCTURE_TYPE_PERFORMANCE_MARKER_INFO_INTEL:
- return "VK_STRUCTURE_TYPE_PERFORMANCE_MARKER_INFO_INTEL";
- case VK_STRUCTURE_TYPE_PERFORMANCE_OVERRIDE_INFO_INTEL:
- return "VK_STRUCTURE_TYPE_PERFORMANCE_OVERRIDE_INFO_INTEL";
- case VK_STRUCTURE_TYPE_PERFORMANCE_STREAM_MARKER_INFO_INTEL:
- return "VK_STRUCTURE_TYPE_PERFORMANCE_STREAM_MARKER_INFO_INTEL";
- case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_16BIT_STORAGE_FEATURES:
- return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_16BIT_STORAGE_FEATURES";
- case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_8BIT_STORAGE_FEATURES_KHR:
- return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_8BIT_STORAGE_FEATURES_KHR";
- case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_ASTC_DECODE_FEATURES_EXT:
- return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_ASTC_DECODE_FEATURES_EXT";
- case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_BLEND_OPERATION_ADVANCED_FEATURES_EXT:
- return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_BLEND_OPERATION_ADVANCED_FEATURES_EXT";
- case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_BLEND_OPERATION_ADVANCED_PROPERTIES_EXT:
- return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_BLEND_OPERATION_ADVANCED_PROPERTIES_EXT";
- case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_BUFFER_DEVICE_ADDRESS_FEATURES_EXT:
- return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_BUFFER_DEVICE_ADDRESS_FEATURES_EXT";
- case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_COHERENT_MEMORY_FEATURES_AMD:
- return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_COHERENT_MEMORY_FEATURES_AMD";
- case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_COMPUTE_SHADER_DERIVATIVES_FEATURES_NV:
- return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_COMPUTE_SHADER_DERIVATIVES_FEATURES_NV";
- case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_CONDITIONAL_RENDERING_FEATURES_EXT:
- return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_CONDITIONAL_RENDERING_FEATURES_EXT";
- case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_CONSERVATIVE_RASTERIZATION_PROPERTIES_EXT:
- return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_CONSERVATIVE_RASTERIZATION_PROPERTIES_EXT";
- case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_COOPERATIVE_MATRIX_FEATURES_NV:
- return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_COOPERATIVE_MATRIX_FEATURES_NV";
- case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_COOPERATIVE_MATRIX_PROPERTIES_NV:
- return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_COOPERATIVE_MATRIX_PROPERTIES_NV";
- case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_CORNER_SAMPLED_IMAGE_FEATURES_NV:
- return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_CORNER_SAMPLED_IMAGE_FEATURES_NV";
- case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_COVERAGE_REDUCTION_MODE_FEATURES_NV:
- return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_COVERAGE_REDUCTION_MODE_FEATURES_NV";
- case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DEDICATED_ALLOCATION_IMAGE_ALIASING_FEATURES_NV:
- return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DEDICATED_ALLOCATION_IMAGE_ALIASING_FEATURES_NV";
- case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DEPTH_CLIP_ENABLE_FEATURES_EXT:
- return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DEPTH_CLIP_ENABLE_FEATURES_EXT";
- case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DEPTH_STENCIL_RESOLVE_PROPERTIES_KHR:
- return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DEPTH_STENCIL_RESOLVE_PROPERTIES_KHR";
- case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DESCRIPTOR_INDEXING_FEATURES_EXT:
- return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DESCRIPTOR_INDEXING_FEATURES_EXT";
- case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DESCRIPTOR_INDEXING_PROPERTIES_EXT:
- return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DESCRIPTOR_INDEXING_PROPERTIES_EXT";
- case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DISCARD_RECTANGLE_PROPERTIES_EXT:
- return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DISCARD_RECTANGLE_PROPERTIES_EXT";
- case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DRIVER_PROPERTIES_KHR:
- return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DRIVER_PROPERTIES_KHR";
- case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXCLUSIVE_SCISSOR_FEATURES_NV:
- return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXCLUSIVE_SCISSOR_FEATURES_NV";
- case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_BUFFER_INFO:
- return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_BUFFER_INFO";
- case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_FENCE_INFO:
- return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_FENCE_INFO";
- case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_IMAGE_FORMAT_INFO:
- return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_IMAGE_FORMAT_INFO";
- case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_MEMORY_HOST_PROPERTIES_EXT:
- return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_MEMORY_HOST_PROPERTIES_EXT";
- case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_SEMAPHORE_INFO:
- return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_SEMAPHORE_INFO";
- case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FEATURES_2:
- return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FEATURES_2";
- case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FLOAT_CONTROLS_PROPERTIES_KHR:
- return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FLOAT_CONTROLS_PROPERTIES_KHR";
- case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_DENSITY_MAP_FEATURES_EXT:
- return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_DENSITY_MAP_FEATURES_EXT";
- case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_DENSITY_MAP_PROPERTIES_EXT:
- return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_DENSITY_MAP_PROPERTIES_EXT";
- case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_SHADER_BARYCENTRIC_FEATURES_NV:
- return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_SHADER_BARYCENTRIC_FEATURES_NV";
- case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_SHADER_INTERLOCK_FEATURES_EXT:
- return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_SHADER_INTERLOCK_FEATURES_EXT";
- case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_GROUP_PROPERTIES:
- return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_GROUP_PROPERTIES";
- case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_HOST_QUERY_RESET_FEATURES_EXT:
- return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_HOST_QUERY_RESET_FEATURES_EXT";
- case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_ID_PROPERTIES:
- return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_ID_PROPERTIES";
- case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGELESS_FRAMEBUFFER_FEATURES_KHR:
- return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGELESS_FRAMEBUFFER_FEATURES_KHR";
- case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_DRM_FORMAT_MODIFIER_INFO_EXT:
- return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_DRM_FORMAT_MODIFIER_INFO_EXT";
- case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_FORMAT_INFO_2:
- return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_FORMAT_INFO_2";
- case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_VIEW_IMAGE_FORMAT_INFO_EXT:
- return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_VIEW_IMAGE_FORMAT_INFO_EXT";
- case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_INDEX_TYPE_UINT8_FEATURES_EXT:
- return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_INDEX_TYPE_UINT8_FEATURES_EXT";
- case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_INLINE_UNIFORM_BLOCK_FEATURES_EXT:
- return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_INLINE_UNIFORM_BLOCK_FEATURES_EXT";
- case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_INLINE_UNIFORM_BLOCK_PROPERTIES_EXT:
- return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_INLINE_UNIFORM_BLOCK_PROPERTIES_EXT";
- case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_LINE_RASTERIZATION_FEATURES_EXT:
- return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_LINE_RASTERIZATION_FEATURES_EXT";
- case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_LINE_RASTERIZATION_PROPERTIES_EXT:
- return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_LINE_RASTERIZATION_PROPERTIES_EXT";
- case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MAINTENANCE_3_PROPERTIES:
- return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MAINTENANCE_3_PROPERTIES";
- case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MEMORY_BUDGET_PROPERTIES_EXT:
- return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MEMORY_BUDGET_PROPERTIES_EXT";
- case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MEMORY_PRIORITY_FEATURES_EXT:
- return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MEMORY_PRIORITY_FEATURES_EXT";
- case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MEMORY_PROPERTIES_2:
- return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MEMORY_PROPERTIES_2";
- case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MESH_SHADER_FEATURES_NV:
- return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MESH_SHADER_FEATURES_NV";
- case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MESH_SHADER_PROPERTIES_NV:
- return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MESH_SHADER_PROPERTIES_NV";
- case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MULTIVIEW_FEATURES:
- return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MULTIVIEW_FEATURES";
- case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MULTIVIEW_PER_VIEW_ATTRIBUTES_PROPERTIES_NVX:
- return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MULTIVIEW_PER_VIEW_ATTRIBUTES_PROPERTIES_NVX";
- case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MULTIVIEW_PROPERTIES:
- return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MULTIVIEW_PROPERTIES";
- case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PCI_BUS_INFO_PROPERTIES_EXT:
- return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PCI_BUS_INFO_PROPERTIES_EXT";
- case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PIPELINE_EXECUTABLE_PROPERTIES_FEATURES_KHR:
- return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PIPELINE_EXECUTABLE_PROPERTIES_FEATURES_KHR";
- case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_POINT_CLIPPING_PROPERTIES:
- return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_POINT_CLIPPING_PROPERTIES";
- case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PROPERTIES_2:
- return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PROPERTIES_2";
- case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PROTECTED_MEMORY_FEATURES:
- return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PROTECTED_MEMORY_FEATURES";
- case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PROTECTED_MEMORY_PROPERTIES:
- return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PROTECTED_MEMORY_PROPERTIES";
- case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PUSH_DESCRIPTOR_PROPERTIES_KHR:
- return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PUSH_DESCRIPTOR_PROPERTIES_KHR";
- case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_RAY_TRACING_PROPERTIES_NV:
- return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_RAY_TRACING_PROPERTIES_NV";
- case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_REPRESENTATIVE_FRAGMENT_TEST_FEATURES_NV:
- return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_REPRESENTATIVE_FRAGMENT_TEST_FEATURES_NV";
- case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SAMPLER_FILTER_MINMAX_PROPERTIES_EXT:
- return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SAMPLER_FILTER_MINMAX_PROPERTIES_EXT";
- case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SAMPLER_YCBCR_CONVERSION_FEATURES:
- return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SAMPLER_YCBCR_CONVERSION_FEATURES";
- case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SAMPLE_LOCATIONS_PROPERTIES_EXT:
- return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SAMPLE_LOCATIONS_PROPERTIES_EXT";
- case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SCALAR_BLOCK_LAYOUT_FEATURES_EXT:
- return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SCALAR_BLOCK_LAYOUT_FEATURES_EXT";
- case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_ATOMIC_INT64_FEATURES_KHR:
- return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_ATOMIC_INT64_FEATURES_KHR";
- case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_CORE_PROPERTIES_2_AMD:
- return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_CORE_PROPERTIES_2_AMD";
- case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_CORE_PROPERTIES_AMD:
- return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_CORE_PROPERTIES_AMD";
- case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_DEMOTE_TO_HELPER_INVOCATION_FEATURES_EXT:
- return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_DEMOTE_TO_HELPER_INVOCATION_FEATURES_EXT";
- case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_DRAW_PARAMETERS_FEATURES:
- return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_DRAW_PARAMETERS_FEATURES";
- case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_FLOAT16_INT8_FEATURES_KHR:
- return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_FLOAT16_INT8_FEATURES_KHR";
- case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_IMAGE_FOOTPRINT_FEATURES_NV:
- return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_IMAGE_FOOTPRINT_FEATURES_NV";
- case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_INTEGER_FUNCTIONS_2_FEATURES_INTEL:
- return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_INTEGER_FUNCTIONS_2_FEATURES_INTEL";
- case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_SM_BUILTINS_FEATURES_NV:
- return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_SM_BUILTINS_FEATURES_NV";
- case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_SM_BUILTINS_PROPERTIES_NV:
- return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_SM_BUILTINS_PROPERTIES_NV";
- case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADING_RATE_IMAGE_FEATURES_NV:
- return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADING_RATE_IMAGE_FEATURES_NV";
- case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADING_RATE_IMAGE_PROPERTIES_NV:
- return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADING_RATE_IMAGE_PROPERTIES_NV";
- case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SPARSE_IMAGE_FORMAT_INFO_2:
- return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SPARSE_IMAGE_FORMAT_INFO_2";
- case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SUBGROUP_PROPERTIES:
- return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SUBGROUP_PROPERTIES";
- case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SUBGROUP_SIZE_CONTROL_FEATURES_EXT:
- return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SUBGROUP_SIZE_CONTROL_FEATURES_EXT";
- case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SUBGROUP_SIZE_CONTROL_PROPERTIES_EXT:
- return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SUBGROUP_SIZE_CONTROL_PROPERTIES_EXT";
- case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SURFACE_INFO_2_KHR:
- return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SURFACE_INFO_2_KHR";
- case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TEXEL_BUFFER_ALIGNMENT_FEATURES_EXT:
- return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TEXEL_BUFFER_ALIGNMENT_FEATURES_EXT";
- case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TEXEL_BUFFER_ALIGNMENT_PROPERTIES_EXT:
- return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TEXEL_BUFFER_ALIGNMENT_PROPERTIES_EXT";
- case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TEXTURE_COMPRESSION_ASTC_HDR_FEATURES_EXT:
- return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TEXTURE_COMPRESSION_ASTC_HDR_FEATURES_EXT";
- case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TRANSFORM_FEEDBACK_FEATURES_EXT:
- return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TRANSFORM_FEEDBACK_FEATURES_EXT";
- case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TRANSFORM_FEEDBACK_PROPERTIES_EXT:
- return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TRANSFORM_FEEDBACK_PROPERTIES_EXT";
- case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_UNIFORM_BUFFER_STANDARD_LAYOUT_FEATURES_KHR:
- return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_UNIFORM_BUFFER_STANDARD_LAYOUT_FEATURES_KHR";
- case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VARIABLE_POINTERS_FEATURES:
- return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VARIABLE_POINTERS_FEATURES";
- case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VERTEX_ATTRIBUTE_DIVISOR_FEATURES_EXT:
- return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VERTEX_ATTRIBUTE_DIVISOR_FEATURES_EXT";
- case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VERTEX_ATTRIBUTE_DIVISOR_PROPERTIES_EXT:
- return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VERTEX_ATTRIBUTE_DIVISOR_PROPERTIES_EXT";
- case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VULKAN_MEMORY_MODEL_FEATURES_KHR:
- return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VULKAN_MEMORY_MODEL_FEATURES_KHR";
- case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_YCBCR_IMAGE_ARRAYS_FEATURES_EXT:
- return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_YCBCR_IMAGE_ARRAYS_FEATURES_EXT";
- case VK_STRUCTURE_TYPE_PIPELINE_CACHE_CREATE_INFO:
- return "VK_STRUCTURE_TYPE_PIPELINE_CACHE_CREATE_INFO";
- case VK_STRUCTURE_TYPE_PIPELINE_COLOR_BLEND_ADVANCED_STATE_CREATE_INFO_EXT:
- return "VK_STRUCTURE_TYPE_PIPELINE_COLOR_BLEND_ADVANCED_STATE_CREATE_INFO_EXT";
- case VK_STRUCTURE_TYPE_PIPELINE_COLOR_BLEND_STATE_CREATE_INFO:
- return "VK_STRUCTURE_TYPE_PIPELINE_COLOR_BLEND_STATE_CREATE_INFO";
- case VK_STRUCTURE_TYPE_PIPELINE_COMPILER_CONTROL_CREATE_INFO_AMD:
- return "VK_STRUCTURE_TYPE_PIPELINE_COMPILER_CONTROL_CREATE_INFO_AMD";
- case VK_STRUCTURE_TYPE_PIPELINE_COVERAGE_MODULATION_STATE_CREATE_INFO_NV:
- return "VK_STRUCTURE_TYPE_PIPELINE_COVERAGE_MODULATION_STATE_CREATE_INFO_NV";
- case VK_STRUCTURE_TYPE_PIPELINE_COVERAGE_REDUCTION_STATE_CREATE_INFO_NV:
- return "VK_STRUCTURE_TYPE_PIPELINE_COVERAGE_REDUCTION_STATE_CREATE_INFO_NV";
- case VK_STRUCTURE_TYPE_PIPELINE_COVERAGE_TO_COLOR_STATE_CREATE_INFO_NV:
- return "VK_STRUCTURE_TYPE_PIPELINE_COVERAGE_TO_COLOR_STATE_CREATE_INFO_NV";
- case VK_STRUCTURE_TYPE_PIPELINE_CREATION_FEEDBACK_CREATE_INFO_EXT:
- return "VK_STRUCTURE_TYPE_PIPELINE_CREATION_FEEDBACK_CREATE_INFO_EXT";
- case VK_STRUCTURE_TYPE_PIPELINE_DEPTH_STENCIL_STATE_CREATE_INFO:
- return "VK_STRUCTURE_TYPE_PIPELINE_DEPTH_STENCIL_STATE_CREATE_INFO";
- case VK_STRUCTURE_TYPE_PIPELINE_DISCARD_RECTANGLE_STATE_CREATE_INFO_EXT:
- return "VK_STRUCTURE_TYPE_PIPELINE_DISCARD_RECTANGLE_STATE_CREATE_INFO_EXT";
- case VK_STRUCTURE_TYPE_PIPELINE_DYNAMIC_STATE_CREATE_INFO:
- return "VK_STRUCTURE_TYPE_PIPELINE_DYNAMIC_STATE_CREATE_INFO";
- case VK_STRUCTURE_TYPE_PIPELINE_EXECUTABLE_INFO_KHR:
- return "VK_STRUCTURE_TYPE_PIPELINE_EXECUTABLE_INFO_KHR";
- case VK_STRUCTURE_TYPE_PIPELINE_EXECUTABLE_INTERNAL_REPRESENTATION_KHR:
- return "VK_STRUCTURE_TYPE_PIPELINE_EXECUTABLE_INTERNAL_REPRESENTATION_KHR";
- case VK_STRUCTURE_TYPE_PIPELINE_EXECUTABLE_PROPERTIES_KHR:
- return "VK_STRUCTURE_TYPE_PIPELINE_EXECUTABLE_PROPERTIES_KHR";
- case VK_STRUCTURE_TYPE_PIPELINE_EXECUTABLE_STATISTIC_KHR:
- return "VK_STRUCTURE_TYPE_PIPELINE_EXECUTABLE_STATISTIC_KHR";
- case VK_STRUCTURE_TYPE_PIPELINE_INFO_KHR:
- return "VK_STRUCTURE_TYPE_PIPELINE_INFO_KHR";
- case VK_STRUCTURE_TYPE_PIPELINE_INPUT_ASSEMBLY_STATE_CREATE_INFO:
- return "VK_STRUCTURE_TYPE_PIPELINE_INPUT_ASSEMBLY_STATE_CREATE_INFO";
- case VK_STRUCTURE_TYPE_PIPELINE_LAYOUT_CREATE_INFO:
- return "VK_STRUCTURE_TYPE_PIPELINE_LAYOUT_CREATE_INFO";
- case VK_STRUCTURE_TYPE_PIPELINE_MULTISAMPLE_STATE_CREATE_INFO:
- return "VK_STRUCTURE_TYPE_PIPELINE_MULTISAMPLE_STATE_CREATE_INFO";
- case VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_CONSERVATIVE_STATE_CREATE_INFO_EXT:
- return "VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_CONSERVATIVE_STATE_CREATE_INFO_EXT";
- case VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_DEPTH_CLIP_STATE_CREATE_INFO_EXT:
- return "VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_DEPTH_CLIP_STATE_CREATE_INFO_EXT";
- case VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_LINE_STATE_CREATE_INFO_EXT:
- return "VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_LINE_STATE_CREATE_INFO_EXT";
- case VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_STATE_CREATE_INFO:
- return "VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_STATE_CREATE_INFO";
- case VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_STATE_RASTERIZATION_ORDER_AMD:
- return "VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_STATE_RASTERIZATION_ORDER_AMD";
- case VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_STATE_STREAM_CREATE_INFO_EXT:
- return "VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_STATE_STREAM_CREATE_INFO_EXT";
- case VK_STRUCTURE_TYPE_PIPELINE_REPRESENTATIVE_FRAGMENT_TEST_STATE_CREATE_INFO_NV:
- return "VK_STRUCTURE_TYPE_PIPELINE_REPRESENTATIVE_FRAGMENT_TEST_STATE_CREATE_INFO_NV";
- case VK_STRUCTURE_TYPE_PIPELINE_SAMPLE_LOCATIONS_STATE_CREATE_INFO_EXT:
- return "VK_STRUCTURE_TYPE_PIPELINE_SAMPLE_LOCATIONS_STATE_CREATE_INFO_EXT";
- case VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_CREATE_INFO:
- return "VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_CREATE_INFO";
- case VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_REQUIRED_SUBGROUP_SIZE_CREATE_INFO_EXT:
- return "VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_REQUIRED_SUBGROUP_SIZE_CREATE_INFO_EXT";
- case VK_STRUCTURE_TYPE_PIPELINE_TESSELLATION_DOMAIN_ORIGIN_STATE_CREATE_INFO:
- return "VK_STRUCTURE_TYPE_PIPELINE_TESSELLATION_DOMAIN_ORIGIN_STATE_CREATE_INFO";
- case VK_STRUCTURE_TYPE_PIPELINE_TESSELLATION_STATE_CREATE_INFO:
- return "VK_STRUCTURE_TYPE_PIPELINE_TESSELLATION_STATE_CREATE_INFO";
- case VK_STRUCTURE_TYPE_PIPELINE_VERTEX_INPUT_DIVISOR_STATE_CREATE_INFO_EXT:
- return "VK_STRUCTURE_TYPE_PIPELINE_VERTEX_INPUT_DIVISOR_STATE_CREATE_INFO_EXT";
- case VK_STRUCTURE_TYPE_PIPELINE_VERTEX_INPUT_STATE_CREATE_INFO:
- return "VK_STRUCTURE_TYPE_PIPELINE_VERTEX_INPUT_STATE_CREATE_INFO";
- case VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_COARSE_SAMPLE_ORDER_STATE_CREATE_INFO_NV:
- return "VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_COARSE_SAMPLE_ORDER_STATE_CREATE_INFO_NV";
- case VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_EXCLUSIVE_SCISSOR_STATE_CREATE_INFO_NV:
- return "VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_EXCLUSIVE_SCISSOR_STATE_CREATE_INFO_NV";
- case VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_SHADING_RATE_IMAGE_STATE_CREATE_INFO_NV:
- return "VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_SHADING_RATE_IMAGE_STATE_CREATE_INFO_NV";
- case VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_STATE_CREATE_INFO:
- return "VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_STATE_CREATE_INFO";
- case VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_SWIZZLE_STATE_CREATE_INFO_NV:
- return "VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_SWIZZLE_STATE_CREATE_INFO_NV";
- case VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_W_SCALING_STATE_CREATE_INFO_NV:
- return "VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_W_SCALING_STATE_CREATE_INFO_NV";
- case VK_STRUCTURE_TYPE_PRESENT_FRAME_TOKEN_GGP:
- return "VK_STRUCTURE_TYPE_PRESENT_FRAME_TOKEN_GGP";
- case VK_STRUCTURE_TYPE_PRESENT_INFO_KHR:
- return "VK_STRUCTURE_TYPE_PRESENT_INFO_KHR";
- case VK_STRUCTURE_TYPE_PRESENT_REGIONS_KHR:
- return "VK_STRUCTURE_TYPE_PRESENT_REGIONS_KHR";
- case VK_STRUCTURE_TYPE_PRESENT_TIMES_INFO_GOOGLE:
- return "VK_STRUCTURE_TYPE_PRESENT_TIMES_INFO_GOOGLE";
- case VK_STRUCTURE_TYPE_PROTECTED_SUBMIT_INFO:
- return "VK_STRUCTURE_TYPE_PROTECTED_SUBMIT_INFO";
- case VK_STRUCTURE_TYPE_QUERY_POOL_CREATE_INFO:
- return "VK_STRUCTURE_TYPE_QUERY_POOL_CREATE_INFO";
- case VK_STRUCTURE_TYPE_QUERY_POOL_CREATE_INFO_INTEL:
- return "VK_STRUCTURE_TYPE_QUERY_POOL_CREATE_INFO_INTEL";
- case VK_STRUCTURE_TYPE_QUEUE_FAMILY_CHECKPOINT_PROPERTIES_NV:
- return "VK_STRUCTURE_TYPE_QUEUE_FAMILY_CHECKPOINT_PROPERTIES_NV";
- case VK_STRUCTURE_TYPE_QUEUE_FAMILY_PROPERTIES_2:
- return "VK_STRUCTURE_TYPE_QUEUE_FAMILY_PROPERTIES_2";
- case VK_STRUCTURE_TYPE_RAY_TRACING_PIPELINE_CREATE_INFO_NV:
- return "VK_STRUCTURE_TYPE_RAY_TRACING_PIPELINE_CREATE_INFO_NV";
- case VK_STRUCTURE_TYPE_RAY_TRACING_SHADER_GROUP_CREATE_INFO_NV:
- return "VK_STRUCTURE_TYPE_RAY_TRACING_SHADER_GROUP_CREATE_INFO_NV";
- case VK_STRUCTURE_TYPE_RENDER_PASS_ATTACHMENT_BEGIN_INFO_KHR:
- return "VK_STRUCTURE_TYPE_RENDER_PASS_ATTACHMENT_BEGIN_INFO_KHR";
- case VK_STRUCTURE_TYPE_RENDER_PASS_BEGIN_INFO:
- return "VK_STRUCTURE_TYPE_RENDER_PASS_BEGIN_INFO";
- case VK_STRUCTURE_TYPE_RENDER_PASS_CREATE_INFO:
- return "VK_STRUCTURE_TYPE_RENDER_PASS_CREATE_INFO";
- case VK_STRUCTURE_TYPE_RENDER_PASS_CREATE_INFO_2_KHR:
- return "VK_STRUCTURE_TYPE_RENDER_PASS_CREATE_INFO_2_KHR";
- case VK_STRUCTURE_TYPE_RENDER_PASS_FRAGMENT_DENSITY_MAP_CREATE_INFO_EXT:
- return "VK_STRUCTURE_TYPE_RENDER_PASS_FRAGMENT_DENSITY_MAP_CREATE_INFO_EXT";
- case VK_STRUCTURE_TYPE_RENDER_PASS_INPUT_ATTACHMENT_ASPECT_CREATE_INFO:
- return "VK_STRUCTURE_TYPE_RENDER_PASS_INPUT_ATTACHMENT_ASPECT_CREATE_INFO";
- case VK_STRUCTURE_TYPE_RENDER_PASS_MULTIVIEW_CREATE_INFO:
- return "VK_STRUCTURE_TYPE_RENDER_PASS_MULTIVIEW_CREATE_INFO";
- case VK_STRUCTURE_TYPE_RENDER_PASS_SAMPLE_LOCATIONS_BEGIN_INFO_EXT:
- return "VK_STRUCTURE_TYPE_RENDER_PASS_SAMPLE_LOCATIONS_BEGIN_INFO_EXT";
- case VK_STRUCTURE_TYPE_SAMPLER_CREATE_INFO:
- return "VK_STRUCTURE_TYPE_SAMPLER_CREATE_INFO";
- case VK_STRUCTURE_TYPE_SAMPLER_REDUCTION_MODE_CREATE_INFO_EXT:
- return "VK_STRUCTURE_TYPE_SAMPLER_REDUCTION_MODE_CREATE_INFO_EXT";
- case VK_STRUCTURE_TYPE_SAMPLER_YCBCR_CONVERSION_CREATE_INFO:
- return "VK_STRUCTURE_TYPE_SAMPLER_YCBCR_CONVERSION_CREATE_INFO";
- case VK_STRUCTURE_TYPE_SAMPLER_YCBCR_CONVERSION_IMAGE_FORMAT_PROPERTIES:
- return "VK_STRUCTURE_TYPE_SAMPLER_YCBCR_CONVERSION_IMAGE_FORMAT_PROPERTIES";
- case VK_STRUCTURE_TYPE_SAMPLER_YCBCR_CONVERSION_INFO:
- return "VK_STRUCTURE_TYPE_SAMPLER_YCBCR_CONVERSION_INFO";
- case VK_STRUCTURE_TYPE_SAMPLE_LOCATIONS_INFO_EXT:
- return "VK_STRUCTURE_TYPE_SAMPLE_LOCATIONS_INFO_EXT";
- case VK_STRUCTURE_TYPE_SEMAPHORE_CREATE_INFO:
- return "VK_STRUCTURE_TYPE_SEMAPHORE_CREATE_INFO";
- case VK_STRUCTURE_TYPE_SEMAPHORE_GET_FD_INFO_KHR:
- return "VK_STRUCTURE_TYPE_SEMAPHORE_GET_FD_INFO_KHR";
- case VK_STRUCTURE_TYPE_SEMAPHORE_GET_WIN32_HANDLE_INFO_KHR:
- return "VK_STRUCTURE_TYPE_SEMAPHORE_GET_WIN32_HANDLE_INFO_KHR";
- case VK_STRUCTURE_TYPE_SHADER_MODULE_CREATE_INFO:
- return "VK_STRUCTURE_TYPE_SHADER_MODULE_CREATE_INFO";
- case VK_STRUCTURE_TYPE_SHADER_MODULE_VALIDATION_CACHE_CREATE_INFO_EXT:
- return "VK_STRUCTURE_TYPE_SHADER_MODULE_VALIDATION_CACHE_CREATE_INFO_EXT";
- case VK_STRUCTURE_TYPE_SHARED_PRESENT_SURFACE_CAPABILITIES_KHR:
- return "VK_STRUCTURE_TYPE_SHARED_PRESENT_SURFACE_CAPABILITIES_KHR";
- case VK_STRUCTURE_TYPE_SPARSE_IMAGE_FORMAT_PROPERTIES_2:
- return "VK_STRUCTURE_TYPE_SPARSE_IMAGE_FORMAT_PROPERTIES_2";
- case VK_STRUCTURE_TYPE_SPARSE_IMAGE_MEMORY_REQUIREMENTS_2:
- return "VK_STRUCTURE_TYPE_SPARSE_IMAGE_MEMORY_REQUIREMENTS_2";
- case VK_STRUCTURE_TYPE_STREAM_DESCRIPTOR_SURFACE_CREATE_INFO_GGP:
- return "VK_STRUCTURE_TYPE_STREAM_DESCRIPTOR_SURFACE_CREATE_INFO_GGP";
- case VK_STRUCTURE_TYPE_SUBMIT_INFO:
- return "VK_STRUCTURE_TYPE_SUBMIT_INFO";
- case VK_STRUCTURE_TYPE_SUBPASS_BEGIN_INFO_KHR:
- return "VK_STRUCTURE_TYPE_SUBPASS_BEGIN_INFO_KHR";
- case VK_STRUCTURE_TYPE_SUBPASS_DEPENDENCY_2_KHR:
- return "VK_STRUCTURE_TYPE_SUBPASS_DEPENDENCY_2_KHR";
- case VK_STRUCTURE_TYPE_SUBPASS_DESCRIPTION_2_KHR:
- return "VK_STRUCTURE_TYPE_SUBPASS_DESCRIPTION_2_KHR";
- case VK_STRUCTURE_TYPE_SUBPASS_DESCRIPTION_DEPTH_STENCIL_RESOLVE_KHR:
- return "VK_STRUCTURE_TYPE_SUBPASS_DESCRIPTION_DEPTH_STENCIL_RESOLVE_KHR";
- case VK_STRUCTURE_TYPE_SUBPASS_END_INFO_KHR:
- return "VK_STRUCTURE_TYPE_SUBPASS_END_INFO_KHR";
- case VK_STRUCTURE_TYPE_SURFACE_CAPABILITIES_2_EXT:
- return "VK_STRUCTURE_TYPE_SURFACE_CAPABILITIES_2_EXT";
- case VK_STRUCTURE_TYPE_SURFACE_CAPABILITIES_2_KHR:
- return "VK_STRUCTURE_TYPE_SURFACE_CAPABILITIES_2_KHR";
- case VK_STRUCTURE_TYPE_SURFACE_CAPABILITIES_FULL_SCREEN_EXCLUSIVE_EXT:
- return "VK_STRUCTURE_TYPE_SURFACE_CAPABILITIES_FULL_SCREEN_EXCLUSIVE_EXT";
- case VK_STRUCTURE_TYPE_SURFACE_FORMAT_2_KHR:
- return "VK_STRUCTURE_TYPE_SURFACE_FORMAT_2_KHR";
- case VK_STRUCTURE_TYPE_SURFACE_FULL_SCREEN_EXCLUSIVE_INFO_EXT:
- return "VK_STRUCTURE_TYPE_SURFACE_FULL_SCREEN_EXCLUSIVE_INFO_EXT";
- case VK_STRUCTURE_TYPE_SURFACE_FULL_SCREEN_EXCLUSIVE_WIN32_INFO_EXT:
- return "VK_STRUCTURE_TYPE_SURFACE_FULL_SCREEN_EXCLUSIVE_WIN32_INFO_EXT";
- case VK_STRUCTURE_TYPE_SURFACE_PROTECTED_CAPABILITIES_KHR:
- return "VK_STRUCTURE_TYPE_SURFACE_PROTECTED_CAPABILITIES_KHR";
- case VK_STRUCTURE_TYPE_SWAPCHAIN_COUNTER_CREATE_INFO_EXT:
- return "VK_STRUCTURE_TYPE_SWAPCHAIN_COUNTER_CREATE_INFO_EXT";
- case VK_STRUCTURE_TYPE_SWAPCHAIN_CREATE_INFO_KHR:
- return "VK_STRUCTURE_TYPE_SWAPCHAIN_CREATE_INFO_KHR";
- case VK_STRUCTURE_TYPE_SWAPCHAIN_DISPLAY_NATIVE_HDR_CREATE_INFO_AMD:
- return "VK_STRUCTURE_TYPE_SWAPCHAIN_DISPLAY_NATIVE_HDR_CREATE_INFO_AMD";
- case VK_STRUCTURE_TYPE_TEXTURE_LOD_GATHER_FORMAT_PROPERTIES_AMD:
- return "VK_STRUCTURE_TYPE_TEXTURE_LOD_GATHER_FORMAT_PROPERTIES_AMD";
- case VK_STRUCTURE_TYPE_VALIDATION_CACHE_CREATE_INFO_EXT:
- return "VK_STRUCTURE_TYPE_VALIDATION_CACHE_CREATE_INFO_EXT";
- case VK_STRUCTURE_TYPE_VALIDATION_FEATURES_EXT:
- return "VK_STRUCTURE_TYPE_VALIDATION_FEATURES_EXT";
- case VK_STRUCTURE_TYPE_VALIDATION_FLAGS_EXT:
- return "VK_STRUCTURE_TYPE_VALIDATION_FLAGS_EXT";
- case VK_STRUCTURE_TYPE_VI_SURFACE_CREATE_INFO_NN:
- return "VK_STRUCTURE_TYPE_VI_SURFACE_CREATE_INFO_NN";
- case VK_STRUCTURE_TYPE_WAYLAND_SURFACE_CREATE_INFO_KHR:
- return "VK_STRUCTURE_TYPE_WAYLAND_SURFACE_CREATE_INFO_KHR";
- case VK_STRUCTURE_TYPE_WIN32_KEYED_MUTEX_ACQUIRE_RELEASE_INFO_KHR:
- return "VK_STRUCTURE_TYPE_WIN32_KEYED_MUTEX_ACQUIRE_RELEASE_INFO_KHR";
- case VK_STRUCTURE_TYPE_WIN32_KEYED_MUTEX_ACQUIRE_RELEASE_INFO_NV:
- return "VK_STRUCTURE_TYPE_WIN32_KEYED_MUTEX_ACQUIRE_RELEASE_INFO_NV";
- case VK_STRUCTURE_TYPE_WIN32_SURFACE_CREATE_INFO_KHR:
- return "VK_STRUCTURE_TYPE_WIN32_SURFACE_CREATE_INFO_KHR";
- case VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET:
- return "VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET";
- case VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET_ACCELERATION_STRUCTURE_NV:
- return "VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET_ACCELERATION_STRUCTURE_NV";
- case VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET_INLINE_UNIFORM_BLOCK_EXT:
- return "VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET_INLINE_UNIFORM_BLOCK_EXT";
- case VK_STRUCTURE_TYPE_XCB_SURFACE_CREATE_INFO_KHR:
- return "VK_STRUCTURE_TYPE_XCB_SURFACE_CREATE_INFO_KHR";
- case VK_STRUCTURE_TYPE_XLIB_SURFACE_CREATE_INFO_KHR:
- return "VK_STRUCTURE_TYPE_XLIB_SURFACE_CREATE_INFO_KHR";
- default:
- return "Unhandled VkStructureType";
- }
-}
-
-static inline const char* string_VkSystemAllocationScope(VkSystemAllocationScope input_value)
-{
- switch ((VkSystemAllocationScope)input_value)
- {
- case VK_SYSTEM_ALLOCATION_SCOPE_CACHE:
- return "VK_SYSTEM_ALLOCATION_SCOPE_CACHE";
- case VK_SYSTEM_ALLOCATION_SCOPE_COMMAND:
- return "VK_SYSTEM_ALLOCATION_SCOPE_COMMAND";
- case VK_SYSTEM_ALLOCATION_SCOPE_DEVICE:
- return "VK_SYSTEM_ALLOCATION_SCOPE_DEVICE";
- case VK_SYSTEM_ALLOCATION_SCOPE_INSTANCE:
- return "VK_SYSTEM_ALLOCATION_SCOPE_INSTANCE";
- case VK_SYSTEM_ALLOCATION_SCOPE_OBJECT:
- return "VK_SYSTEM_ALLOCATION_SCOPE_OBJECT";
- default:
- return "Unhandled VkSystemAllocationScope";
- }
-}
-
-static inline const char* string_VkInternalAllocationType(VkInternalAllocationType input_value)
-{
- switch ((VkInternalAllocationType)input_value)
- {
- case VK_INTERNAL_ALLOCATION_TYPE_EXECUTABLE:
- return "VK_INTERNAL_ALLOCATION_TYPE_EXECUTABLE";
- default:
- return "Unhandled VkInternalAllocationType";
- }
-}
-
-static inline const char* string_VkFormat(VkFormat input_value)
-{
- switch ((VkFormat)input_value)
- {
- case VK_FORMAT_A1R5G5B5_UNORM_PACK16:
- return "VK_FORMAT_A1R5G5B5_UNORM_PACK16";
- case VK_FORMAT_A2B10G10R10_SINT_PACK32:
- return "VK_FORMAT_A2B10G10R10_SINT_PACK32";
- case VK_FORMAT_A2B10G10R10_SNORM_PACK32:
- return "VK_FORMAT_A2B10G10R10_SNORM_PACK32";
- case VK_FORMAT_A2B10G10R10_SSCALED_PACK32:
- return "VK_FORMAT_A2B10G10R10_SSCALED_PACK32";
- case VK_FORMAT_A2B10G10R10_UINT_PACK32:
- return "VK_FORMAT_A2B10G10R10_UINT_PACK32";
- case VK_FORMAT_A2B10G10R10_UNORM_PACK32:
- return "VK_FORMAT_A2B10G10R10_UNORM_PACK32";
- case VK_FORMAT_A2B10G10R10_USCALED_PACK32:
- return "VK_FORMAT_A2B10G10R10_USCALED_PACK32";
- case VK_FORMAT_A2R10G10B10_SINT_PACK32:
- return "VK_FORMAT_A2R10G10B10_SINT_PACK32";
- case VK_FORMAT_A2R10G10B10_SNORM_PACK32:
- return "VK_FORMAT_A2R10G10B10_SNORM_PACK32";
- case VK_FORMAT_A2R10G10B10_SSCALED_PACK32:
- return "VK_FORMAT_A2R10G10B10_SSCALED_PACK32";
- case VK_FORMAT_A2R10G10B10_UINT_PACK32:
- return "VK_FORMAT_A2R10G10B10_UINT_PACK32";
- case VK_FORMAT_A2R10G10B10_UNORM_PACK32:
- return "VK_FORMAT_A2R10G10B10_UNORM_PACK32";
- case VK_FORMAT_A2R10G10B10_USCALED_PACK32:
- return "VK_FORMAT_A2R10G10B10_USCALED_PACK32";
- case VK_FORMAT_A8B8G8R8_SINT_PACK32:
- return "VK_FORMAT_A8B8G8R8_SINT_PACK32";
- case VK_FORMAT_A8B8G8R8_SNORM_PACK32:
- return "VK_FORMAT_A8B8G8R8_SNORM_PACK32";
- case VK_FORMAT_A8B8G8R8_SRGB_PACK32:
- return "VK_FORMAT_A8B8G8R8_SRGB_PACK32";
- case VK_FORMAT_A8B8G8R8_SSCALED_PACK32:
- return "VK_FORMAT_A8B8G8R8_SSCALED_PACK32";
- case VK_FORMAT_A8B8G8R8_UINT_PACK32:
- return "VK_FORMAT_A8B8G8R8_UINT_PACK32";
- case VK_FORMAT_A8B8G8R8_UNORM_PACK32:
- return "VK_FORMAT_A8B8G8R8_UNORM_PACK32";
- case VK_FORMAT_A8B8G8R8_USCALED_PACK32:
- return "VK_FORMAT_A8B8G8R8_USCALED_PACK32";
- case VK_FORMAT_ASTC_10x10_SFLOAT_BLOCK_EXT:
- return "VK_FORMAT_ASTC_10x10_SFLOAT_BLOCK_EXT";
- case VK_FORMAT_ASTC_10x10_SRGB_BLOCK:
- return "VK_FORMAT_ASTC_10x10_SRGB_BLOCK";
- case VK_FORMAT_ASTC_10x10_UNORM_BLOCK:
- return "VK_FORMAT_ASTC_10x10_UNORM_BLOCK";
- case VK_FORMAT_ASTC_10x5_SFLOAT_BLOCK_EXT:
- return "VK_FORMAT_ASTC_10x5_SFLOAT_BLOCK_EXT";
- case VK_FORMAT_ASTC_10x5_SRGB_BLOCK:
- return "VK_FORMAT_ASTC_10x5_SRGB_BLOCK";
- case VK_FORMAT_ASTC_10x5_UNORM_BLOCK:
- return "VK_FORMAT_ASTC_10x5_UNORM_BLOCK";
- case VK_FORMAT_ASTC_10x6_SFLOAT_BLOCK_EXT:
- return "VK_FORMAT_ASTC_10x6_SFLOAT_BLOCK_EXT";
- case VK_FORMAT_ASTC_10x6_SRGB_BLOCK:
- return "VK_FORMAT_ASTC_10x6_SRGB_BLOCK";
- case VK_FORMAT_ASTC_10x6_UNORM_BLOCK:
- return "VK_FORMAT_ASTC_10x6_UNORM_BLOCK";
- case VK_FORMAT_ASTC_10x8_SFLOAT_BLOCK_EXT:
- return "VK_FORMAT_ASTC_10x8_SFLOAT_BLOCK_EXT";
- case VK_FORMAT_ASTC_10x8_SRGB_BLOCK:
- return "VK_FORMAT_ASTC_10x8_SRGB_BLOCK";
- case VK_FORMAT_ASTC_10x8_UNORM_BLOCK:
- return "VK_FORMAT_ASTC_10x8_UNORM_BLOCK";
- case VK_FORMAT_ASTC_12x10_SFLOAT_BLOCK_EXT:
- return "VK_FORMAT_ASTC_12x10_SFLOAT_BLOCK_EXT";
- case VK_FORMAT_ASTC_12x10_SRGB_BLOCK:
- return "VK_FORMAT_ASTC_12x10_SRGB_BLOCK";
- case VK_FORMAT_ASTC_12x10_UNORM_BLOCK:
- return "VK_FORMAT_ASTC_12x10_UNORM_BLOCK";
- case VK_FORMAT_ASTC_12x12_SFLOAT_BLOCK_EXT:
- return "VK_FORMAT_ASTC_12x12_SFLOAT_BLOCK_EXT";
- case VK_FORMAT_ASTC_12x12_SRGB_BLOCK:
- return "VK_FORMAT_ASTC_12x12_SRGB_BLOCK";
- case VK_FORMAT_ASTC_12x12_UNORM_BLOCK:
- return "VK_FORMAT_ASTC_12x12_UNORM_BLOCK";
- case VK_FORMAT_ASTC_4x4_SFLOAT_BLOCK_EXT:
- return "VK_FORMAT_ASTC_4x4_SFLOAT_BLOCK_EXT";
- case VK_FORMAT_ASTC_4x4_SRGB_BLOCK:
- return "VK_FORMAT_ASTC_4x4_SRGB_BLOCK";
- case VK_FORMAT_ASTC_4x4_UNORM_BLOCK:
- return "VK_FORMAT_ASTC_4x4_UNORM_BLOCK";
- case VK_FORMAT_ASTC_5x4_SFLOAT_BLOCK_EXT:
- return "VK_FORMAT_ASTC_5x4_SFLOAT_BLOCK_EXT";
- case VK_FORMAT_ASTC_5x4_SRGB_BLOCK:
- return "VK_FORMAT_ASTC_5x4_SRGB_BLOCK";
- case VK_FORMAT_ASTC_5x4_UNORM_BLOCK:
- return "VK_FORMAT_ASTC_5x4_UNORM_BLOCK";
- case VK_FORMAT_ASTC_5x5_SFLOAT_BLOCK_EXT:
- return "VK_FORMAT_ASTC_5x5_SFLOAT_BLOCK_EXT";
- case VK_FORMAT_ASTC_5x5_SRGB_BLOCK:
- return "VK_FORMAT_ASTC_5x5_SRGB_BLOCK";
- case VK_FORMAT_ASTC_5x5_UNORM_BLOCK:
- return "VK_FORMAT_ASTC_5x5_UNORM_BLOCK";
- case VK_FORMAT_ASTC_6x5_SFLOAT_BLOCK_EXT:
- return "VK_FORMAT_ASTC_6x5_SFLOAT_BLOCK_EXT";
- case VK_FORMAT_ASTC_6x5_SRGB_BLOCK:
- return "VK_FORMAT_ASTC_6x5_SRGB_BLOCK";
- case VK_FORMAT_ASTC_6x5_UNORM_BLOCK:
- return "VK_FORMAT_ASTC_6x5_UNORM_BLOCK";
- case VK_FORMAT_ASTC_6x6_SFLOAT_BLOCK_EXT:
- return "VK_FORMAT_ASTC_6x6_SFLOAT_BLOCK_EXT";
- case VK_FORMAT_ASTC_6x6_SRGB_BLOCK:
- return "VK_FORMAT_ASTC_6x6_SRGB_BLOCK";
- case VK_FORMAT_ASTC_6x6_UNORM_BLOCK:
- return "VK_FORMAT_ASTC_6x6_UNORM_BLOCK";
- case VK_FORMAT_ASTC_8x5_SFLOAT_BLOCK_EXT:
- return "VK_FORMAT_ASTC_8x5_SFLOAT_BLOCK_EXT";
- case VK_FORMAT_ASTC_8x5_SRGB_BLOCK:
- return "VK_FORMAT_ASTC_8x5_SRGB_BLOCK";
- case VK_FORMAT_ASTC_8x5_UNORM_BLOCK:
- return "VK_FORMAT_ASTC_8x5_UNORM_BLOCK";
- case VK_FORMAT_ASTC_8x6_SFLOAT_BLOCK_EXT:
- return "VK_FORMAT_ASTC_8x6_SFLOAT_BLOCK_EXT";
- case VK_FORMAT_ASTC_8x6_SRGB_BLOCK:
- return "VK_FORMAT_ASTC_8x6_SRGB_BLOCK";
- case VK_FORMAT_ASTC_8x6_UNORM_BLOCK:
- return "VK_FORMAT_ASTC_8x6_UNORM_BLOCK";
- case VK_FORMAT_ASTC_8x8_SFLOAT_BLOCK_EXT:
- return "VK_FORMAT_ASTC_8x8_SFLOAT_BLOCK_EXT";
- case VK_FORMAT_ASTC_8x8_SRGB_BLOCK:
- return "VK_FORMAT_ASTC_8x8_SRGB_BLOCK";
- case VK_FORMAT_ASTC_8x8_UNORM_BLOCK:
- return "VK_FORMAT_ASTC_8x8_UNORM_BLOCK";
- case VK_FORMAT_B10G11R11_UFLOAT_PACK32:
- return "VK_FORMAT_B10G11R11_UFLOAT_PACK32";
- case VK_FORMAT_B10X6G10X6R10X6G10X6_422_UNORM_4PACK16:
- return "VK_FORMAT_B10X6G10X6R10X6G10X6_422_UNORM_4PACK16";
- case VK_FORMAT_B12X4G12X4R12X4G12X4_422_UNORM_4PACK16:
- return "VK_FORMAT_B12X4G12X4R12X4G12X4_422_UNORM_4PACK16";
- case VK_FORMAT_B16G16R16G16_422_UNORM:
- return "VK_FORMAT_B16G16R16G16_422_UNORM";
- case VK_FORMAT_B4G4R4A4_UNORM_PACK16:
- return "VK_FORMAT_B4G4R4A4_UNORM_PACK16";
- case VK_FORMAT_B5G5R5A1_UNORM_PACK16:
- return "VK_FORMAT_B5G5R5A1_UNORM_PACK16";
- case VK_FORMAT_B5G6R5_UNORM_PACK16:
- return "VK_FORMAT_B5G6R5_UNORM_PACK16";
- case VK_FORMAT_B8G8R8A8_SINT:
- return "VK_FORMAT_B8G8R8A8_SINT";
- case VK_FORMAT_B8G8R8A8_SNORM:
- return "VK_FORMAT_B8G8R8A8_SNORM";
- case VK_FORMAT_B8G8R8A8_SRGB:
- return "VK_FORMAT_B8G8R8A8_SRGB";
- case VK_FORMAT_B8G8R8A8_SSCALED:
- return "VK_FORMAT_B8G8R8A8_SSCALED";
- case VK_FORMAT_B8G8R8A8_UINT:
- return "VK_FORMAT_B8G8R8A8_UINT";
- case VK_FORMAT_B8G8R8A8_UNORM:
- return "VK_FORMAT_B8G8R8A8_UNORM";
- case VK_FORMAT_B8G8R8A8_USCALED:
- return "VK_FORMAT_B8G8R8A8_USCALED";
- case VK_FORMAT_B8G8R8G8_422_UNORM:
- return "VK_FORMAT_B8G8R8G8_422_UNORM";
- case VK_FORMAT_B8G8R8_SINT:
- return "VK_FORMAT_B8G8R8_SINT";
- case VK_FORMAT_B8G8R8_SNORM:
- return "VK_FORMAT_B8G8R8_SNORM";
- case VK_FORMAT_B8G8R8_SRGB:
- return "VK_FORMAT_B8G8R8_SRGB";
- case VK_FORMAT_B8G8R8_SSCALED:
- return "VK_FORMAT_B8G8R8_SSCALED";
- case VK_FORMAT_B8G8R8_UINT:
- return "VK_FORMAT_B8G8R8_UINT";
- case VK_FORMAT_B8G8R8_UNORM:
- return "VK_FORMAT_B8G8R8_UNORM";
- case VK_FORMAT_B8G8R8_USCALED:
- return "VK_FORMAT_B8G8R8_USCALED";
- case VK_FORMAT_BC1_RGBA_SRGB_BLOCK:
- return "VK_FORMAT_BC1_RGBA_SRGB_BLOCK";
- case VK_FORMAT_BC1_RGBA_UNORM_BLOCK:
- return "VK_FORMAT_BC1_RGBA_UNORM_BLOCK";
- case VK_FORMAT_BC1_RGB_SRGB_BLOCK:
- return "VK_FORMAT_BC1_RGB_SRGB_BLOCK";
- case VK_FORMAT_BC1_RGB_UNORM_BLOCK:
- return "VK_FORMAT_BC1_RGB_UNORM_BLOCK";
- case VK_FORMAT_BC2_SRGB_BLOCK:
- return "VK_FORMAT_BC2_SRGB_BLOCK";
- case VK_FORMAT_BC2_UNORM_BLOCK:
- return "VK_FORMAT_BC2_UNORM_BLOCK";
- case VK_FORMAT_BC3_SRGB_BLOCK:
- return "VK_FORMAT_BC3_SRGB_BLOCK";
- case VK_FORMAT_BC3_UNORM_BLOCK:
- return "VK_FORMAT_BC3_UNORM_BLOCK";
- case VK_FORMAT_BC4_SNORM_BLOCK:
- return "VK_FORMAT_BC4_SNORM_BLOCK";
- case VK_FORMAT_BC4_UNORM_BLOCK:
- return "VK_FORMAT_BC4_UNORM_BLOCK";
- case VK_FORMAT_BC5_SNORM_BLOCK:
- return "VK_FORMAT_BC5_SNORM_BLOCK";
- case VK_FORMAT_BC5_UNORM_BLOCK:
- return "VK_FORMAT_BC5_UNORM_BLOCK";
- case VK_FORMAT_BC6H_SFLOAT_BLOCK:
- return "VK_FORMAT_BC6H_SFLOAT_BLOCK";
- case VK_FORMAT_BC6H_UFLOAT_BLOCK:
- return "VK_FORMAT_BC6H_UFLOAT_BLOCK";
- case VK_FORMAT_BC7_SRGB_BLOCK:
- return "VK_FORMAT_BC7_SRGB_BLOCK";
- case VK_FORMAT_BC7_UNORM_BLOCK:
- return "VK_FORMAT_BC7_UNORM_BLOCK";
- case VK_FORMAT_D16_UNORM:
- return "VK_FORMAT_D16_UNORM";
- case VK_FORMAT_D16_UNORM_S8_UINT:
- return "VK_FORMAT_D16_UNORM_S8_UINT";
- case VK_FORMAT_D24_UNORM_S8_UINT:
- return "VK_FORMAT_D24_UNORM_S8_UINT";
- case VK_FORMAT_D32_SFLOAT:
- return "VK_FORMAT_D32_SFLOAT";
- case VK_FORMAT_D32_SFLOAT_S8_UINT:
- return "VK_FORMAT_D32_SFLOAT_S8_UINT";
- case VK_FORMAT_E5B9G9R9_UFLOAT_PACK32:
- return "VK_FORMAT_E5B9G9R9_UFLOAT_PACK32";
- case VK_FORMAT_EAC_R11G11_SNORM_BLOCK:
- return "VK_FORMAT_EAC_R11G11_SNORM_BLOCK";
- case VK_FORMAT_EAC_R11G11_UNORM_BLOCK:
- return "VK_FORMAT_EAC_R11G11_UNORM_BLOCK";
- case VK_FORMAT_EAC_R11_SNORM_BLOCK:
- return "VK_FORMAT_EAC_R11_SNORM_BLOCK";
- case VK_FORMAT_EAC_R11_UNORM_BLOCK:
- return "VK_FORMAT_EAC_R11_UNORM_BLOCK";
- case VK_FORMAT_ETC2_R8G8B8A1_SRGB_BLOCK:
- return "VK_FORMAT_ETC2_R8G8B8A1_SRGB_BLOCK";
- case VK_FORMAT_ETC2_R8G8B8A1_UNORM_BLOCK:
- return "VK_FORMAT_ETC2_R8G8B8A1_UNORM_BLOCK";
- case VK_FORMAT_ETC2_R8G8B8A8_SRGB_BLOCK:
- return "VK_FORMAT_ETC2_R8G8B8A8_SRGB_BLOCK";
- case VK_FORMAT_ETC2_R8G8B8A8_UNORM_BLOCK:
- return "VK_FORMAT_ETC2_R8G8B8A8_UNORM_BLOCK";
- case VK_FORMAT_ETC2_R8G8B8_SRGB_BLOCK:
- return "VK_FORMAT_ETC2_R8G8B8_SRGB_BLOCK";
- case VK_FORMAT_ETC2_R8G8B8_UNORM_BLOCK:
- return "VK_FORMAT_ETC2_R8G8B8_UNORM_BLOCK";
- case VK_FORMAT_G10X6B10X6G10X6R10X6_422_UNORM_4PACK16:
- return "VK_FORMAT_G10X6B10X6G10X6R10X6_422_UNORM_4PACK16";
- case VK_FORMAT_G10X6_B10X6R10X6_2PLANE_420_UNORM_3PACK16:
- return "VK_FORMAT_G10X6_B10X6R10X6_2PLANE_420_UNORM_3PACK16";
- case VK_FORMAT_G10X6_B10X6R10X6_2PLANE_422_UNORM_3PACK16:
- return "VK_FORMAT_G10X6_B10X6R10X6_2PLANE_422_UNORM_3PACK16";
- case VK_FORMAT_G10X6_B10X6_R10X6_3PLANE_420_UNORM_3PACK16:
- return "VK_FORMAT_G10X6_B10X6_R10X6_3PLANE_420_UNORM_3PACK16";
- case VK_FORMAT_G10X6_B10X6_R10X6_3PLANE_422_UNORM_3PACK16:
- return "VK_FORMAT_G10X6_B10X6_R10X6_3PLANE_422_UNORM_3PACK16";
- case VK_FORMAT_G10X6_B10X6_R10X6_3PLANE_444_UNORM_3PACK16:
- return "VK_FORMAT_G10X6_B10X6_R10X6_3PLANE_444_UNORM_3PACK16";
- case VK_FORMAT_G12X4B12X4G12X4R12X4_422_UNORM_4PACK16:
- return "VK_FORMAT_G12X4B12X4G12X4R12X4_422_UNORM_4PACK16";
- case VK_FORMAT_G12X4_B12X4R12X4_2PLANE_420_UNORM_3PACK16:
- return "VK_FORMAT_G12X4_B12X4R12X4_2PLANE_420_UNORM_3PACK16";
- case VK_FORMAT_G12X4_B12X4R12X4_2PLANE_422_UNORM_3PACK16:
- return "VK_FORMAT_G12X4_B12X4R12X4_2PLANE_422_UNORM_3PACK16";
- case VK_FORMAT_G12X4_B12X4_R12X4_3PLANE_420_UNORM_3PACK16:
- return "VK_FORMAT_G12X4_B12X4_R12X4_3PLANE_420_UNORM_3PACK16";
- case VK_FORMAT_G12X4_B12X4_R12X4_3PLANE_422_UNORM_3PACK16:
- return "VK_FORMAT_G12X4_B12X4_R12X4_3PLANE_422_UNORM_3PACK16";
- case VK_FORMAT_G12X4_B12X4_R12X4_3PLANE_444_UNORM_3PACK16:
- return "VK_FORMAT_G12X4_B12X4_R12X4_3PLANE_444_UNORM_3PACK16";
- case VK_FORMAT_G16B16G16R16_422_UNORM:
- return "VK_FORMAT_G16B16G16R16_422_UNORM";
- case VK_FORMAT_G16_B16R16_2PLANE_420_UNORM:
- return "VK_FORMAT_G16_B16R16_2PLANE_420_UNORM";
- case VK_FORMAT_G16_B16R16_2PLANE_422_UNORM:
- return "VK_FORMAT_G16_B16R16_2PLANE_422_UNORM";
- case VK_FORMAT_G16_B16_R16_3PLANE_420_UNORM:
- return "VK_FORMAT_G16_B16_R16_3PLANE_420_UNORM";
- case VK_FORMAT_G16_B16_R16_3PLANE_422_UNORM:
- return "VK_FORMAT_G16_B16_R16_3PLANE_422_UNORM";
- case VK_FORMAT_G16_B16_R16_3PLANE_444_UNORM:
- return "VK_FORMAT_G16_B16_R16_3PLANE_444_UNORM";
- case VK_FORMAT_G8B8G8R8_422_UNORM:
- return "VK_FORMAT_G8B8G8R8_422_UNORM";
- case VK_FORMAT_G8_B8R8_2PLANE_420_UNORM:
- return "VK_FORMAT_G8_B8R8_2PLANE_420_UNORM";
- case VK_FORMAT_G8_B8R8_2PLANE_422_UNORM:
- return "VK_FORMAT_G8_B8R8_2PLANE_422_UNORM";
- case VK_FORMAT_G8_B8_R8_3PLANE_420_UNORM:
- return "VK_FORMAT_G8_B8_R8_3PLANE_420_UNORM";
- case VK_FORMAT_G8_B8_R8_3PLANE_422_UNORM:
- return "VK_FORMAT_G8_B8_R8_3PLANE_422_UNORM";
- case VK_FORMAT_G8_B8_R8_3PLANE_444_UNORM:
- return "VK_FORMAT_G8_B8_R8_3PLANE_444_UNORM";
- case VK_FORMAT_PVRTC1_2BPP_SRGB_BLOCK_IMG:
- return "VK_FORMAT_PVRTC1_2BPP_SRGB_BLOCK_IMG";
- case VK_FORMAT_PVRTC1_2BPP_UNORM_BLOCK_IMG:
- return "VK_FORMAT_PVRTC1_2BPP_UNORM_BLOCK_IMG";
- case VK_FORMAT_PVRTC1_4BPP_SRGB_BLOCK_IMG:
- return "VK_FORMAT_PVRTC1_4BPP_SRGB_BLOCK_IMG";
- case VK_FORMAT_PVRTC1_4BPP_UNORM_BLOCK_IMG:
- return "VK_FORMAT_PVRTC1_4BPP_UNORM_BLOCK_IMG";
- case VK_FORMAT_PVRTC2_2BPP_SRGB_BLOCK_IMG:
- return "VK_FORMAT_PVRTC2_2BPP_SRGB_BLOCK_IMG";
- case VK_FORMAT_PVRTC2_2BPP_UNORM_BLOCK_IMG:
- return "VK_FORMAT_PVRTC2_2BPP_UNORM_BLOCK_IMG";
- case VK_FORMAT_PVRTC2_4BPP_SRGB_BLOCK_IMG:
- return "VK_FORMAT_PVRTC2_4BPP_SRGB_BLOCK_IMG";
- case VK_FORMAT_PVRTC2_4BPP_UNORM_BLOCK_IMG:
- return "VK_FORMAT_PVRTC2_4BPP_UNORM_BLOCK_IMG";
- case VK_FORMAT_R10X6G10X6B10X6A10X6_UNORM_4PACK16:
- return "VK_FORMAT_R10X6G10X6B10X6A10X6_UNORM_4PACK16";
- case VK_FORMAT_R10X6G10X6_UNORM_2PACK16:
- return "VK_FORMAT_R10X6G10X6_UNORM_2PACK16";
- case VK_FORMAT_R10X6_UNORM_PACK16:
- return "VK_FORMAT_R10X6_UNORM_PACK16";
- case VK_FORMAT_R12X4G12X4B12X4A12X4_UNORM_4PACK16:
- return "VK_FORMAT_R12X4G12X4B12X4A12X4_UNORM_4PACK16";
- case VK_FORMAT_R12X4G12X4_UNORM_2PACK16:
- return "VK_FORMAT_R12X4G12X4_UNORM_2PACK16";
- case VK_FORMAT_R12X4_UNORM_PACK16:
- return "VK_FORMAT_R12X4_UNORM_PACK16";
- case VK_FORMAT_R16G16B16A16_SFLOAT:
- return "VK_FORMAT_R16G16B16A16_SFLOAT";
- case VK_FORMAT_R16G16B16A16_SINT:
- return "VK_FORMAT_R16G16B16A16_SINT";
- case VK_FORMAT_R16G16B16A16_SNORM:
- return "VK_FORMAT_R16G16B16A16_SNORM";
- case VK_FORMAT_R16G16B16A16_SSCALED:
- return "VK_FORMAT_R16G16B16A16_SSCALED";
- case VK_FORMAT_R16G16B16A16_UINT:
- return "VK_FORMAT_R16G16B16A16_UINT";
- case VK_FORMAT_R16G16B16A16_UNORM:
- return "VK_FORMAT_R16G16B16A16_UNORM";
- case VK_FORMAT_R16G16B16A16_USCALED:
- return "VK_FORMAT_R16G16B16A16_USCALED";
- case VK_FORMAT_R16G16B16_SFLOAT:
- return "VK_FORMAT_R16G16B16_SFLOAT";
- case VK_FORMAT_R16G16B16_SINT:
- return "VK_FORMAT_R16G16B16_SINT";
- case VK_FORMAT_R16G16B16_SNORM:
- return "VK_FORMAT_R16G16B16_SNORM";
- case VK_FORMAT_R16G16B16_SSCALED:
- return "VK_FORMAT_R16G16B16_SSCALED";
- case VK_FORMAT_R16G16B16_UINT:
- return "VK_FORMAT_R16G16B16_UINT";
- case VK_FORMAT_R16G16B16_UNORM:
- return "VK_FORMAT_R16G16B16_UNORM";
- case VK_FORMAT_R16G16B16_USCALED:
- return "VK_FORMAT_R16G16B16_USCALED";
- case VK_FORMAT_R16G16_SFLOAT:
- return "VK_FORMAT_R16G16_SFLOAT";
- case VK_FORMAT_R16G16_SINT:
- return "VK_FORMAT_R16G16_SINT";
- case VK_FORMAT_R16G16_SNORM:
- return "VK_FORMAT_R16G16_SNORM";
- case VK_FORMAT_R16G16_SSCALED:
- return "VK_FORMAT_R16G16_SSCALED";
- case VK_FORMAT_R16G16_UINT:
- return "VK_FORMAT_R16G16_UINT";
- case VK_FORMAT_R16G16_UNORM:
- return "VK_FORMAT_R16G16_UNORM";
- case VK_FORMAT_R16G16_USCALED:
- return "VK_FORMAT_R16G16_USCALED";
- case VK_FORMAT_R16_SFLOAT:
- return "VK_FORMAT_R16_SFLOAT";
- case VK_FORMAT_R16_SINT:
- return "VK_FORMAT_R16_SINT";
- case VK_FORMAT_R16_SNORM:
- return "VK_FORMAT_R16_SNORM";
- case VK_FORMAT_R16_SSCALED:
- return "VK_FORMAT_R16_SSCALED";
- case VK_FORMAT_R16_UINT:
- return "VK_FORMAT_R16_UINT";
- case VK_FORMAT_R16_UNORM:
- return "VK_FORMAT_R16_UNORM";
- case VK_FORMAT_R16_USCALED:
- return "VK_FORMAT_R16_USCALED";
- case VK_FORMAT_R32G32B32A32_SFLOAT:
- return "VK_FORMAT_R32G32B32A32_SFLOAT";
- case VK_FORMAT_R32G32B32A32_SINT:
- return "VK_FORMAT_R32G32B32A32_SINT";
- case VK_FORMAT_R32G32B32A32_UINT:
- return "VK_FORMAT_R32G32B32A32_UINT";
- case VK_FORMAT_R32G32B32_SFLOAT:
- return "VK_FORMAT_R32G32B32_SFLOAT";
- case VK_FORMAT_R32G32B32_SINT:
- return "VK_FORMAT_R32G32B32_SINT";
- case VK_FORMAT_R32G32B32_UINT:
- return "VK_FORMAT_R32G32B32_UINT";
- case VK_FORMAT_R32G32_SFLOAT:
- return "VK_FORMAT_R32G32_SFLOAT";
- case VK_FORMAT_R32G32_SINT:
- return "VK_FORMAT_R32G32_SINT";
- case VK_FORMAT_R32G32_UINT:
- return "VK_FORMAT_R32G32_UINT";
- case VK_FORMAT_R32_SFLOAT:
- return "VK_FORMAT_R32_SFLOAT";
- case VK_FORMAT_R32_SINT:
- return "VK_FORMAT_R32_SINT";
- case VK_FORMAT_R32_UINT:
- return "VK_FORMAT_R32_UINT";
- case VK_FORMAT_R4G4B4A4_UNORM_PACK16:
- return "VK_FORMAT_R4G4B4A4_UNORM_PACK16";
- case VK_FORMAT_R4G4_UNORM_PACK8:
- return "VK_FORMAT_R4G4_UNORM_PACK8";
- case VK_FORMAT_R5G5B5A1_UNORM_PACK16:
- return "VK_FORMAT_R5G5B5A1_UNORM_PACK16";
- case VK_FORMAT_R5G6B5_UNORM_PACK16:
- return "VK_FORMAT_R5G6B5_UNORM_PACK16";
- case VK_FORMAT_R64G64B64A64_SFLOAT:
- return "VK_FORMAT_R64G64B64A64_SFLOAT";
- case VK_FORMAT_R64G64B64A64_SINT:
- return "VK_FORMAT_R64G64B64A64_SINT";
- case VK_FORMAT_R64G64B64A64_UINT:
- return "VK_FORMAT_R64G64B64A64_UINT";
- case VK_FORMAT_R64G64B64_SFLOAT:
- return "VK_FORMAT_R64G64B64_SFLOAT";
- case VK_FORMAT_R64G64B64_SINT:
- return "VK_FORMAT_R64G64B64_SINT";
- case VK_FORMAT_R64G64B64_UINT:
- return "VK_FORMAT_R64G64B64_UINT";
- case VK_FORMAT_R64G64_SFLOAT:
- return "VK_FORMAT_R64G64_SFLOAT";
- case VK_FORMAT_R64G64_SINT:
- return "VK_FORMAT_R64G64_SINT";
- case VK_FORMAT_R64G64_UINT:
- return "VK_FORMAT_R64G64_UINT";
- case VK_FORMAT_R64_SFLOAT:
- return "VK_FORMAT_R64_SFLOAT";
- case VK_FORMAT_R64_SINT:
- return "VK_FORMAT_R64_SINT";
- case VK_FORMAT_R64_UINT:
- return "VK_FORMAT_R64_UINT";
- case VK_FORMAT_R8G8B8A8_SINT:
- return "VK_FORMAT_R8G8B8A8_SINT";
- case VK_FORMAT_R8G8B8A8_SNORM:
- return "VK_FORMAT_R8G8B8A8_SNORM";
- case VK_FORMAT_R8G8B8A8_SRGB:
- return "VK_FORMAT_R8G8B8A8_SRGB";
- case VK_FORMAT_R8G8B8A8_SSCALED:
- return "VK_FORMAT_R8G8B8A8_SSCALED";
- case VK_FORMAT_R8G8B8A8_UINT:
- return "VK_FORMAT_R8G8B8A8_UINT";
- case VK_FORMAT_R8G8B8A8_UNORM:
- return "VK_FORMAT_R8G8B8A8_UNORM";
- case VK_FORMAT_R8G8B8A8_USCALED:
- return "VK_FORMAT_R8G8B8A8_USCALED";
- case VK_FORMAT_R8G8B8_SINT:
- return "VK_FORMAT_R8G8B8_SINT";
- case VK_FORMAT_R8G8B8_SNORM:
- return "VK_FORMAT_R8G8B8_SNORM";
- case VK_FORMAT_R8G8B8_SRGB:
- return "VK_FORMAT_R8G8B8_SRGB";
- case VK_FORMAT_R8G8B8_SSCALED:
- return "VK_FORMAT_R8G8B8_SSCALED";
- case VK_FORMAT_R8G8B8_UINT:
- return "VK_FORMAT_R8G8B8_UINT";
- case VK_FORMAT_R8G8B8_UNORM:
- return "VK_FORMAT_R8G8B8_UNORM";
- case VK_FORMAT_R8G8B8_USCALED:
- return "VK_FORMAT_R8G8B8_USCALED";
- case VK_FORMAT_R8G8_SINT:
- return "VK_FORMAT_R8G8_SINT";
- case VK_FORMAT_R8G8_SNORM:
- return "VK_FORMAT_R8G8_SNORM";
- case VK_FORMAT_R8G8_SRGB:
- return "VK_FORMAT_R8G8_SRGB";
- case VK_FORMAT_R8G8_SSCALED:
- return "VK_FORMAT_R8G8_SSCALED";
- case VK_FORMAT_R8G8_UINT:
- return "VK_FORMAT_R8G8_UINT";
- case VK_FORMAT_R8G8_UNORM:
- return "VK_FORMAT_R8G8_UNORM";
- case VK_FORMAT_R8G8_USCALED:
- return "VK_FORMAT_R8G8_USCALED";
- case VK_FORMAT_R8_SINT:
- return "VK_FORMAT_R8_SINT";
- case VK_FORMAT_R8_SNORM:
- return "VK_FORMAT_R8_SNORM";
- case VK_FORMAT_R8_SRGB:
- return "VK_FORMAT_R8_SRGB";
- case VK_FORMAT_R8_SSCALED:
- return "VK_FORMAT_R8_SSCALED";
- case VK_FORMAT_R8_UINT:
- return "VK_FORMAT_R8_UINT";
- case VK_FORMAT_R8_UNORM:
- return "VK_FORMAT_R8_UNORM";
- case VK_FORMAT_R8_USCALED:
- return "VK_FORMAT_R8_USCALED";
- case VK_FORMAT_S8_UINT:
- return "VK_FORMAT_S8_UINT";
- case VK_FORMAT_UNDEFINED:
- return "VK_FORMAT_UNDEFINED";
- case VK_FORMAT_X8_D24_UNORM_PACK32:
- return "VK_FORMAT_X8_D24_UNORM_PACK32";
- default:
- return "Unhandled VkFormat";
- }
-}
-
-static inline const char* string_VkFormatFeatureFlagBits(VkFormatFeatureFlagBits input_value)
-{
- switch ((VkFormatFeatureFlagBits)input_value)
- {
- case VK_FORMAT_FEATURE_BLIT_DST_BIT:
- return "VK_FORMAT_FEATURE_BLIT_DST_BIT";
- case VK_FORMAT_FEATURE_BLIT_SRC_BIT:
- return "VK_FORMAT_FEATURE_BLIT_SRC_BIT";
- case VK_FORMAT_FEATURE_COLOR_ATTACHMENT_BIT:
- return "VK_FORMAT_FEATURE_COLOR_ATTACHMENT_BIT";
- case VK_FORMAT_FEATURE_COLOR_ATTACHMENT_BLEND_BIT:
- return "VK_FORMAT_FEATURE_COLOR_ATTACHMENT_BLEND_BIT";
- case VK_FORMAT_FEATURE_COSITED_CHROMA_SAMPLES_BIT:
- return "VK_FORMAT_FEATURE_COSITED_CHROMA_SAMPLES_BIT";
- case VK_FORMAT_FEATURE_DEPTH_STENCIL_ATTACHMENT_BIT:
- return "VK_FORMAT_FEATURE_DEPTH_STENCIL_ATTACHMENT_BIT";
- case VK_FORMAT_FEATURE_DISJOINT_BIT:
- return "VK_FORMAT_FEATURE_DISJOINT_BIT";
- case VK_FORMAT_FEATURE_FRAGMENT_DENSITY_MAP_BIT_EXT:
- return "VK_FORMAT_FEATURE_FRAGMENT_DENSITY_MAP_BIT_EXT";
- case VK_FORMAT_FEATURE_MIDPOINT_CHROMA_SAMPLES_BIT:
- return "VK_FORMAT_FEATURE_MIDPOINT_CHROMA_SAMPLES_BIT";
- case VK_FORMAT_FEATURE_SAMPLED_IMAGE_BIT:
- return "VK_FORMAT_FEATURE_SAMPLED_IMAGE_BIT";
- case VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_CUBIC_BIT_IMG:
- return "VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_CUBIC_BIT_IMG";
- case VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_LINEAR_BIT:
- return "VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_LINEAR_BIT";
- case VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_MINMAX_BIT_EXT:
- return "VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_MINMAX_BIT_EXT";
- case VK_FORMAT_FEATURE_SAMPLED_IMAGE_YCBCR_CONVERSION_CHROMA_RECONSTRUCTION_EXPLICIT_BIT:
- return "VK_FORMAT_FEATURE_SAMPLED_IMAGE_YCBCR_CONVERSION_CHROMA_RECONSTRUCTION_EXPLICIT_BIT";
- case VK_FORMAT_FEATURE_SAMPLED_IMAGE_YCBCR_CONVERSION_CHROMA_RECONSTRUCTION_EXPLICIT_FORCEABLE_BIT:
- return "VK_FORMAT_FEATURE_SAMPLED_IMAGE_YCBCR_CONVERSION_CHROMA_RECONSTRUCTION_EXPLICIT_FORCEABLE_BIT";
- case VK_FORMAT_FEATURE_SAMPLED_IMAGE_YCBCR_CONVERSION_LINEAR_FILTER_BIT:
- return "VK_FORMAT_FEATURE_SAMPLED_IMAGE_YCBCR_CONVERSION_LINEAR_FILTER_BIT";
- case VK_FORMAT_FEATURE_SAMPLED_IMAGE_YCBCR_CONVERSION_SEPARATE_RECONSTRUCTION_FILTER_BIT:
- return "VK_FORMAT_FEATURE_SAMPLED_IMAGE_YCBCR_CONVERSION_SEPARATE_RECONSTRUCTION_FILTER_BIT";
- case VK_FORMAT_FEATURE_STORAGE_IMAGE_ATOMIC_BIT:
- return "VK_FORMAT_FEATURE_STORAGE_IMAGE_ATOMIC_BIT";
- case VK_FORMAT_FEATURE_STORAGE_IMAGE_BIT:
- return "VK_FORMAT_FEATURE_STORAGE_IMAGE_BIT";
- case VK_FORMAT_FEATURE_STORAGE_TEXEL_BUFFER_ATOMIC_BIT:
- return "VK_FORMAT_FEATURE_STORAGE_TEXEL_BUFFER_ATOMIC_BIT";
- case VK_FORMAT_FEATURE_STORAGE_TEXEL_BUFFER_BIT:
- return "VK_FORMAT_FEATURE_STORAGE_TEXEL_BUFFER_BIT";
- case VK_FORMAT_FEATURE_TRANSFER_DST_BIT:
- return "VK_FORMAT_FEATURE_TRANSFER_DST_BIT";
- case VK_FORMAT_FEATURE_TRANSFER_SRC_BIT:
- return "VK_FORMAT_FEATURE_TRANSFER_SRC_BIT";
- case VK_FORMAT_FEATURE_UNIFORM_TEXEL_BUFFER_BIT:
- return "VK_FORMAT_FEATURE_UNIFORM_TEXEL_BUFFER_BIT";
- case VK_FORMAT_FEATURE_VERTEX_BUFFER_BIT:
- return "VK_FORMAT_FEATURE_VERTEX_BUFFER_BIT";
- default:
- return "Unhandled VkFormatFeatureFlagBits";
- }
-}
-
-static inline std::string string_VkFormatFeatureFlags(VkFormatFeatureFlags input_value)
-{
- std::string ret;
- int index = 0;
- while(input_value) {
- if (input_value & 1) {
- if( !ret.empty()) ret.append("|");
- ret.append(string_VkFormatFeatureFlagBits(static_cast<VkFormatFeatureFlagBits>(1 << index)));
- }
- ++index;
- input_value >>= 1;
- }
- if( ret.empty()) ret.append(string_VkFormatFeatureFlagBits(static_cast<VkFormatFeatureFlagBits>(0)));
- return ret;
-}
-
-static inline const char* string_VkImageType(VkImageType input_value)
-{
- switch ((VkImageType)input_value)
- {
- case VK_IMAGE_TYPE_1D:
- return "VK_IMAGE_TYPE_1D";
- case VK_IMAGE_TYPE_2D:
- return "VK_IMAGE_TYPE_2D";
- case VK_IMAGE_TYPE_3D:
- return "VK_IMAGE_TYPE_3D";
- default:
- return "Unhandled VkImageType";
- }
-}
-
-static inline const char* string_VkImageTiling(VkImageTiling input_value)
-{
- switch ((VkImageTiling)input_value)
- {
- case VK_IMAGE_TILING_DRM_FORMAT_MODIFIER_EXT:
- return "VK_IMAGE_TILING_DRM_FORMAT_MODIFIER_EXT";
- case VK_IMAGE_TILING_LINEAR:
- return "VK_IMAGE_TILING_LINEAR";
- case VK_IMAGE_TILING_OPTIMAL:
- return "VK_IMAGE_TILING_OPTIMAL";
- default:
- return "Unhandled VkImageTiling";
- }
-}
-
-static inline const char* string_VkImageUsageFlagBits(VkImageUsageFlagBits input_value)
-{
- switch ((VkImageUsageFlagBits)input_value)
- {
- case VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT:
- return "VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT";
- case VK_IMAGE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT:
- return "VK_IMAGE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT";
- case VK_IMAGE_USAGE_FRAGMENT_DENSITY_MAP_BIT_EXT:
- return "VK_IMAGE_USAGE_FRAGMENT_DENSITY_MAP_BIT_EXT";
- case VK_IMAGE_USAGE_INPUT_ATTACHMENT_BIT:
- return "VK_IMAGE_USAGE_INPUT_ATTACHMENT_BIT";
- case VK_IMAGE_USAGE_SAMPLED_BIT:
- return "VK_IMAGE_USAGE_SAMPLED_BIT";
- case VK_IMAGE_USAGE_SHADING_RATE_IMAGE_BIT_NV:
- return "VK_IMAGE_USAGE_SHADING_RATE_IMAGE_BIT_NV";
- case VK_IMAGE_USAGE_STORAGE_BIT:
- return "VK_IMAGE_USAGE_STORAGE_BIT";
- case VK_IMAGE_USAGE_TRANSFER_DST_BIT:
- return "VK_IMAGE_USAGE_TRANSFER_DST_BIT";
- case VK_IMAGE_USAGE_TRANSFER_SRC_BIT:
- return "VK_IMAGE_USAGE_TRANSFER_SRC_BIT";
- case VK_IMAGE_USAGE_TRANSIENT_ATTACHMENT_BIT:
- return "VK_IMAGE_USAGE_TRANSIENT_ATTACHMENT_BIT";
- default:
- return "Unhandled VkImageUsageFlagBits";
- }
-}
-
-static inline std::string string_VkImageUsageFlags(VkImageUsageFlags input_value)
-{
- std::string ret;
- int index = 0;
- while(input_value) {
- if (input_value & 1) {
- if( !ret.empty()) ret.append("|");
- ret.append(string_VkImageUsageFlagBits(static_cast<VkImageUsageFlagBits>(1 << index)));
- }
- ++index;
- input_value >>= 1;
- }
- if( ret.empty()) ret.append(string_VkImageUsageFlagBits(static_cast<VkImageUsageFlagBits>(0)));
- return ret;
-}
-
-static inline const char* string_VkImageCreateFlagBits(VkImageCreateFlagBits input_value)
-{
- switch ((VkImageCreateFlagBits)input_value)
- {
- case VK_IMAGE_CREATE_2D_ARRAY_COMPATIBLE_BIT:
- return "VK_IMAGE_CREATE_2D_ARRAY_COMPATIBLE_BIT";
- case VK_IMAGE_CREATE_ALIAS_BIT:
- return "VK_IMAGE_CREATE_ALIAS_BIT";
- case VK_IMAGE_CREATE_BLOCK_TEXEL_VIEW_COMPATIBLE_BIT:
- return "VK_IMAGE_CREATE_BLOCK_TEXEL_VIEW_COMPATIBLE_BIT";
- case VK_IMAGE_CREATE_CORNER_SAMPLED_BIT_NV:
- return "VK_IMAGE_CREATE_CORNER_SAMPLED_BIT_NV";
- case VK_IMAGE_CREATE_CUBE_COMPATIBLE_BIT:
- return "VK_IMAGE_CREATE_CUBE_COMPATIBLE_BIT";
- case VK_IMAGE_CREATE_DISJOINT_BIT:
- return "VK_IMAGE_CREATE_DISJOINT_BIT";
- case VK_IMAGE_CREATE_EXTENDED_USAGE_BIT:
- return "VK_IMAGE_CREATE_EXTENDED_USAGE_BIT";
- case VK_IMAGE_CREATE_MUTABLE_FORMAT_BIT:
- return "VK_IMAGE_CREATE_MUTABLE_FORMAT_BIT";
- case VK_IMAGE_CREATE_PROTECTED_BIT:
- return "VK_IMAGE_CREATE_PROTECTED_BIT";
- case VK_IMAGE_CREATE_SAMPLE_LOCATIONS_COMPATIBLE_DEPTH_BIT_EXT:
- return "VK_IMAGE_CREATE_SAMPLE_LOCATIONS_COMPATIBLE_DEPTH_BIT_EXT";
- case VK_IMAGE_CREATE_SPARSE_ALIASED_BIT:
- return "VK_IMAGE_CREATE_SPARSE_ALIASED_BIT";
- case VK_IMAGE_CREATE_SPARSE_BINDING_BIT:
- return "VK_IMAGE_CREATE_SPARSE_BINDING_BIT";
- case VK_IMAGE_CREATE_SPARSE_RESIDENCY_BIT:
- return "VK_IMAGE_CREATE_SPARSE_RESIDENCY_BIT";
- case VK_IMAGE_CREATE_SPLIT_INSTANCE_BIND_REGIONS_BIT:
- return "VK_IMAGE_CREATE_SPLIT_INSTANCE_BIND_REGIONS_BIT";
- case VK_IMAGE_CREATE_SUBSAMPLED_BIT_EXT:
- return "VK_IMAGE_CREATE_SUBSAMPLED_BIT_EXT";
- default:
- return "Unhandled VkImageCreateFlagBits";
- }
-}
-
-static inline std::string string_VkImageCreateFlags(VkImageCreateFlags input_value)
-{
- std::string ret;
- int index = 0;
- while(input_value) {
- if (input_value & 1) {
- if( !ret.empty()) ret.append("|");
- ret.append(string_VkImageCreateFlagBits(static_cast<VkImageCreateFlagBits>(1 << index)));
- }
- ++index;
- input_value >>= 1;
- }
- if( ret.empty()) ret.append(string_VkImageCreateFlagBits(static_cast<VkImageCreateFlagBits>(0)));
- return ret;
-}
-
-static inline const char* string_VkSampleCountFlagBits(VkSampleCountFlagBits input_value)
-{
- switch ((VkSampleCountFlagBits)input_value)
- {
- case VK_SAMPLE_COUNT_16_BIT:
- return "VK_SAMPLE_COUNT_16_BIT";
- case VK_SAMPLE_COUNT_1_BIT:
- return "VK_SAMPLE_COUNT_1_BIT";
- case VK_SAMPLE_COUNT_2_BIT:
- return "VK_SAMPLE_COUNT_2_BIT";
- case VK_SAMPLE_COUNT_32_BIT:
- return "VK_SAMPLE_COUNT_32_BIT";
- case VK_SAMPLE_COUNT_4_BIT:
- return "VK_SAMPLE_COUNT_4_BIT";
- case VK_SAMPLE_COUNT_64_BIT:
- return "VK_SAMPLE_COUNT_64_BIT";
- case VK_SAMPLE_COUNT_8_BIT:
- return "VK_SAMPLE_COUNT_8_BIT";
- default:
- return "Unhandled VkSampleCountFlagBits";
- }
-}
-
-static inline std::string string_VkSampleCountFlags(VkSampleCountFlags input_value)
-{
- std::string ret;
- int index = 0;
- while(input_value) {
- if (input_value & 1) {
- if( !ret.empty()) ret.append("|");
- ret.append(string_VkSampleCountFlagBits(static_cast<VkSampleCountFlagBits>(1 << index)));
- }
- ++index;
- input_value >>= 1;
- }
- if( ret.empty()) ret.append(string_VkSampleCountFlagBits(static_cast<VkSampleCountFlagBits>(0)));
- return ret;
-}
-
-static inline const char* string_VkPhysicalDeviceType(VkPhysicalDeviceType input_value)
-{
- switch ((VkPhysicalDeviceType)input_value)
- {
- case VK_PHYSICAL_DEVICE_TYPE_CPU:
- return "VK_PHYSICAL_DEVICE_TYPE_CPU";
- case VK_PHYSICAL_DEVICE_TYPE_DISCRETE_GPU:
- return "VK_PHYSICAL_DEVICE_TYPE_DISCRETE_GPU";
- case VK_PHYSICAL_DEVICE_TYPE_INTEGRATED_GPU:
- return "VK_PHYSICAL_DEVICE_TYPE_INTEGRATED_GPU";
- case VK_PHYSICAL_DEVICE_TYPE_OTHER:
- return "VK_PHYSICAL_DEVICE_TYPE_OTHER";
- case VK_PHYSICAL_DEVICE_TYPE_VIRTUAL_GPU:
- return "VK_PHYSICAL_DEVICE_TYPE_VIRTUAL_GPU";
- default:
- return "Unhandled VkPhysicalDeviceType";
- }
-}
-
-static inline const char* string_VkQueueFlagBits(VkQueueFlagBits input_value)
-{
- switch ((VkQueueFlagBits)input_value)
- {
- case VK_QUEUE_COMPUTE_BIT:
- return "VK_QUEUE_COMPUTE_BIT";
- case VK_QUEUE_GRAPHICS_BIT:
- return "VK_QUEUE_GRAPHICS_BIT";
- case VK_QUEUE_PROTECTED_BIT:
- return "VK_QUEUE_PROTECTED_BIT";
- case VK_QUEUE_SPARSE_BINDING_BIT:
- return "VK_QUEUE_SPARSE_BINDING_BIT";
- case VK_QUEUE_TRANSFER_BIT:
- return "VK_QUEUE_TRANSFER_BIT";
- default:
- return "Unhandled VkQueueFlagBits";
- }
-}
-
-static inline std::string string_VkQueueFlags(VkQueueFlags input_value)
-{
- std::string ret;
- int index = 0;
- while(input_value) {
- if (input_value & 1) {
- if( !ret.empty()) ret.append("|");
- ret.append(string_VkQueueFlagBits(static_cast<VkQueueFlagBits>(1 << index)));
- }
- ++index;
- input_value >>= 1;
- }
- if( ret.empty()) ret.append(string_VkQueueFlagBits(static_cast<VkQueueFlagBits>(0)));
- return ret;
-}
-
-static inline const char* string_VkMemoryPropertyFlagBits(VkMemoryPropertyFlagBits input_value)
-{
- switch ((VkMemoryPropertyFlagBits)input_value)
- {
- case VK_MEMORY_PROPERTY_DEVICE_COHERENT_BIT_AMD:
- return "VK_MEMORY_PROPERTY_DEVICE_COHERENT_BIT_AMD";
- case VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT:
- return "VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT";
- case VK_MEMORY_PROPERTY_DEVICE_UNCACHED_BIT_AMD:
- return "VK_MEMORY_PROPERTY_DEVICE_UNCACHED_BIT_AMD";
- case VK_MEMORY_PROPERTY_HOST_CACHED_BIT:
- return "VK_MEMORY_PROPERTY_HOST_CACHED_BIT";
- case VK_MEMORY_PROPERTY_HOST_COHERENT_BIT:
- return "VK_MEMORY_PROPERTY_HOST_COHERENT_BIT";
- case VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT:
- return "VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT";
- case VK_MEMORY_PROPERTY_LAZILY_ALLOCATED_BIT:
- return "VK_MEMORY_PROPERTY_LAZILY_ALLOCATED_BIT";
- case VK_MEMORY_PROPERTY_PROTECTED_BIT:
- return "VK_MEMORY_PROPERTY_PROTECTED_BIT";
- default:
- return "Unhandled VkMemoryPropertyFlagBits";
- }
-}
-
-static inline std::string string_VkMemoryPropertyFlags(VkMemoryPropertyFlags input_value)
-{
- std::string ret;
- int index = 0;
- while(input_value) {
- if (input_value & 1) {
- if( !ret.empty()) ret.append("|");
- ret.append(string_VkMemoryPropertyFlagBits(static_cast<VkMemoryPropertyFlagBits>(1 << index)));
- }
- ++index;
- input_value >>= 1;
- }
- if( ret.empty()) ret.append(string_VkMemoryPropertyFlagBits(static_cast<VkMemoryPropertyFlagBits>(0)));
- return ret;
-}
-
-static inline const char* string_VkMemoryHeapFlagBits(VkMemoryHeapFlagBits input_value)
-{
- switch ((VkMemoryHeapFlagBits)input_value)
- {
- case VK_MEMORY_HEAP_DEVICE_LOCAL_BIT:
- return "VK_MEMORY_HEAP_DEVICE_LOCAL_BIT";
- case VK_MEMORY_HEAP_MULTI_INSTANCE_BIT:
- return "VK_MEMORY_HEAP_MULTI_INSTANCE_BIT";
- default:
- return "Unhandled VkMemoryHeapFlagBits";
- }
-}
-
-static inline std::string string_VkMemoryHeapFlags(VkMemoryHeapFlags input_value)
-{
- std::string ret;
- int index = 0;
- while(input_value) {
- if (input_value & 1) {
- if( !ret.empty()) ret.append("|");
- ret.append(string_VkMemoryHeapFlagBits(static_cast<VkMemoryHeapFlagBits>(1 << index)));
- }
- ++index;
- input_value >>= 1;
- }
- if( ret.empty()) ret.append(string_VkMemoryHeapFlagBits(static_cast<VkMemoryHeapFlagBits>(0)));
- return ret;
-}
-
-static inline const char* string_VkDeviceQueueCreateFlagBits(VkDeviceQueueCreateFlagBits input_value)
-{
- switch ((VkDeviceQueueCreateFlagBits)input_value)
- {
- case VK_DEVICE_QUEUE_CREATE_PROTECTED_BIT:
- return "VK_DEVICE_QUEUE_CREATE_PROTECTED_BIT";
- default:
- return "Unhandled VkDeviceQueueCreateFlagBits";
- }
-}
-
-static inline std::string string_VkDeviceQueueCreateFlags(VkDeviceQueueCreateFlags input_value)
-{
- std::string ret;
- int index = 0;
- while(input_value) {
- if (input_value & 1) {
- if( !ret.empty()) ret.append("|");
- ret.append(string_VkDeviceQueueCreateFlagBits(static_cast<VkDeviceQueueCreateFlagBits>(1 << index)));
- }
- ++index;
- input_value >>= 1;
- }
- if( ret.empty()) ret.append(string_VkDeviceQueueCreateFlagBits(static_cast<VkDeviceQueueCreateFlagBits>(0)));
- return ret;
-}
-
-static inline const char* string_VkPipelineStageFlagBits(VkPipelineStageFlagBits input_value)
-{
- switch ((VkPipelineStageFlagBits)input_value)
- {
- case VK_PIPELINE_STAGE_ACCELERATION_STRUCTURE_BUILD_BIT_NV:
- return "VK_PIPELINE_STAGE_ACCELERATION_STRUCTURE_BUILD_BIT_NV";
- case VK_PIPELINE_STAGE_ALL_COMMANDS_BIT:
- return "VK_PIPELINE_STAGE_ALL_COMMANDS_BIT";
- case VK_PIPELINE_STAGE_ALL_GRAPHICS_BIT:
- return "VK_PIPELINE_STAGE_ALL_GRAPHICS_BIT";
- case VK_PIPELINE_STAGE_BOTTOM_OF_PIPE_BIT:
- return "VK_PIPELINE_STAGE_BOTTOM_OF_PIPE_BIT";
- case VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT:
- return "VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT";
- case VK_PIPELINE_STAGE_COMMAND_PROCESS_BIT_NVX:
- return "VK_PIPELINE_STAGE_COMMAND_PROCESS_BIT_NVX";
- case VK_PIPELINE_STAGE_COMPUTE_SHADER_BIT:
- return "VK_PIPELINE_STAGE_COMPUTE_SHADER_BIT";
- case VK_PIPELINE_STAGE_CONDITIONAL_RENDERING_BIT_EXT:
- return "VK_PIPELINE_STAGE_CONDITIONAL_RENDERING_BIT_EXT";
- case VK_PIPELINE_STAGE_DRAW_INDIRECT_BIT:
- return "VK_PIPELINE_STAGE_DRAW_INDIRECT_BIT";
- case VK_PIPELINE_STAGE_EARLY_FRAGMENT_TESTS_BIT:
- return "VK_PIPELINE_STAGE_EARLY_FRAGMENT_TESTS_BIT";
- case VK_PIPELINE_STAGE_FRAGMENT_DENSITY_PROCESS_BIT_EXT:
- return "VK_PIPELINE_STAGE_FRAGMENT_DENSITY_PROCESS_BIT_EXT";
- case VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT:
- return "VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT";
- case VK_PIPELINE_STAGE_GEOMETRY_SHADER_BIT:
- return "VK_PIPELINE_STAGE_GEOMETRY_SHADER_BIT";
- case VK_PIPELINE_STAGE_HOST_BIT:
- return "VK_PIPELINE_STAGE_HOST_BIT";
- case VK_PIPELINE_STAGE_LATE_FRAGMENT_TESTS_BIT:
- return "VK_PIPELINE_STAGE_LATE_FRAGMENT_TESTS_BIT";
- case VK_PIPELINE_STAGE_MESH_SHADER_BIT_NV:
- return "VK_PIPELINE_STAGE_MESH_SHADER_BIT_NV";
- case VK_PIPELINE_STAGE_RAY_TRACING_SHADER_BIT_NV:
- return "VK_PIPELINE_STAGE_RAY_TRACING_SHADER_BIT_NV";
- case VK_PIPELINE_STAGE_SHADING_RATE_IMAGE_BIT_NV:
- return "VK_PIPELINE_STAGE_SHADING_RATE_IMAGE_BIT_NV";
- case VK_PIPELINE_STAGE_TASK_SHADER_BIT_NV:
- return "VK_PIPELINE_STAGE_TASK_SHADER_BIT_NV";
- case VK_PIPELINE_STAGE_TESSELLATION_CONTROL_SHADER_BIT:
- return "VK_PIPELINE_STAGE_TESSELLATION_CONTROL_SHADER_BIT";
- case VK_PIPELINE_STAGE_TESSELLATION_EVALUATION_SHADER_BIT:
- return "VK_PIPELINE_STAGE_TESSELLATION_EVALUATION_SHADER_BIT";
- case VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT:
- return "VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT";
- case VK_PIPELINE_STAGE_TRANSFER_BIT:
- return "VK_PIPELINE_STAGE_TRANSFER_BIT";
- case VK_PIPELINE_STAGE_TRANSFORM_FEEDBACK_BIT_EXT:
- return "VK_PIPELINE_STAGE_TRANSFORM_FEEDBACK_BIT_EXT";
- case VK_PIPELINE_STAGE_VERTEX_INPUT_BIT:
- return "VK_PIPELINE_STAGE_VERTEX_INPUT_BIT";
- case VK_PIPELINE_STAGE_VERTEX_SHADER_BIT:
- return "VK_PIPELINE_STAGE_VERTEX_SHADER_BIT";
- default:
- return "Unhandled VkPipelineStageFlagBits";
- }
-}
-
-static inline std::string string_VkPipelineStageFlags(VkPipelineStageFlags input_value)
-{
- std::string ret;
- int index = 0;
- while(input_value) {
- if (input_value & 1) {
- if( !ret.empty()) ret.append("|");
- ret.append(string_VkPipelineStageFlagBits(static_cast<VkPipelineStageFlagBits>(1 << index)));
- }
- ++index;
- input_value >>= 1;
- }
- if( ret.empty()) ret.append(string_VkPipelineStageFlagBits(static_cast<VkPipelineStageFlagBits>(0)));
- return ret;
-}
-
-static inline const char* string_VkImageAspectFlagBits(VkImageAspectFlagBits input_value)
-{
- switch ((VkImageAspectFlagBits)input_value)
- {
- case VK_IMAGE_ASPECT_COLOR_BIT:
- return "VK_IMAGE_ASPECT_COLOR_BIT";
- case VK_IMAGE_ASPECT_DEPTH_BIT:
- return "VK_IMAGE_ASPECT_DEPTH_BIT";
- case VK_IMAGE_ASPECT_MEMORY_PLANE_0_BIT_EXT:
- return "VK_IMAGE_ASPECT_MEMORY_PLANE_0_BIT_EXT";
- case VK_IMAGE_ASPECT_MEMORY_PLANE_1_BIT_EXT:
- return "VK_IMAGE_ASPECT_MEMORY_PLANE_1_BIT_EXT";
- case VK_IMAGE_ASPECT_MEMORY_PLANE_2_BIT_EXT:
- return "VK_IMAGE_ASPECT_MEMORY_PLANE_2_BIT_EXT";
- case VK_IMAGE_ASPECT_MEMORY_PLANE_3_BIT_EXT:
- return "VK_IMAGE_ASPECT_MEMORY_PLANE_3_BIT_EXT";
- case VK_IMAGE_ASPECT_METADATA_BIT:
- return "VK_IMAGE_ASPECT_METADATA_BIT";
- case VK_IMAGE_ASPECT_PLANE_0_BIT:
- return "VK_IMAGE_ASPECT_PLANE_0_BIT";
- case VK_IMAGE_ASPECT_PLANE_1_BIT:
- return "VK_IMAGE_ASPECT_PLANE_1_BIT";
- case VK_IMAGE_ASPECT_PLANE_2_BIT:
- return "VK_IMAGE_ASPECT_PLANE_2_BIT";
- case VK_IMAGE_ASPECT_STENCIL_BIT:
- return "VK_IMAGE_ASPECT_STENCIL_BIT";
- default:
- return "Unhandled VkImageAspectFlagBits";
- }
-}
-
-static inline std::string string_VkImageAspectFlags(VkImageAspectFlags input_value)
-{
- std::string ret;
- int index = 0;
- while(input_value) {
- if (input_value & 1) {
- if( !ret.empty()) ret.append("|");
- ret.append(string_VkImageAspectFlagBits(static_cast<VkImageAspectFlagBits>(1 << index)));
- }
- ++index;
- input_value >>= 1;
- }
- if( ret.empty()) ret.append(string_VkImageAspectFlagBits(static_cast<VkImageAspectFlagBits>(0)));
- return ret;
-}
-
-static inline const char* string_VkSparseImageFormatFlagBits(VkSparseImageFormatFlagBits input_value)
-{
- switch ((VkSparseImageFormatFlagBits)input_value)
- {
- case VK_SPARSE_IMAGE_FORMAT_ALIGNED_MIP_SIZE_BIT:
- return "VK_SPARSE_IMAGE_FORMAT_ALIGNED_MIP_SIZE_BIT";
- case VK_SPARSE_IMAGE_FORMAT_NONSTANDARD_BLOCK_SIZE_BIT:
- return "VK_SPARSE_IMAGE_FORMAT_NONSTANDARD_BLOCK_SIZE_BIT";
- case VK_SPARSE_IMAGE_FORMAT_SINGLE_MIPTAIL_BIT:
- return "VK_SPARSE_IMAGE_FORMAT_SINGLE_MIPTAIL_BIT";
- default:
- return "Unhandled VkSparseImageFormatFlagBits";
- }
-}
-
-static inline std::string string_VkSparseImageFormatFlags(VkSparseImageFormatFlags input_value)
-{
- std::string ret;
- int index = 0;
- while(input_value) {
- if (input_value & 1) {
- if( !ret.empty()) ret.append("|");
- ret.append(string_VkSparseImageFormatFlagBits(static_cast<VkSparseImageFormatFlagBits>(1 << index)));
- }
- ++index;
- input_value >>= 1;
- }
- if( ret.empty()) ret.append(string_VkSparseImageFormatFlagBits(static_cast<VkSparseImageFormatFlagBits>(0)));
- return ret;
-}
-
-static inline const char* string_VkSparseMemoryBindFlagBits(VkSparseMemoryBindFlagBits input_value)
-{
- switch ((VkSparseMemoryBindFlagBits)input_value)
- {
- case VK_SPARSE_MEMORY_BIND_METADATA_BIT:
- return "VK_SPARSE_MEMORY_BIND_METADATA_BIT";
- default:
- return "Unhandled VkSparseMemoryBindFlagBits";
- }
-}
-
-static inline std::string string_VkSparseMemoryBindFlags(VkSparseMemoryBindFlags input_value)
-{
- std::string ret;
- int index = 0;
- while(input_value) {
- if (input_value & 1) {
- if( !ret.empty()) ret.append("|");
- ret.append(string_VkSparseMemoryBindFlagBits(static_cast<VkSparseMemoryBindFlagBits>(1 << index)));
- }
- ++index;
- input_value >>= 1;
- }
- if( ret.empty()) ret.append(string_VkSparseMemoryBindFlagBits(static_cast<VkSparseMemoryBindFlagBits>(0)));
- return ret;
-}
-
-static inline const char* string_VkFenceCreateFlagBits(VkFenceCreateFlagBits input_value)
-{
- switch ((VkFenceCreateFlagBits)input_value)
- {
- case VK_FENCE_CREATE_SIGNALED_BIT:
- return "VK_FENCE_CREATE_SIGNALED_BIT";
- default:
- return "Unhandled VkFenceCreateFlagBits";
- }
-}
-
-static inline std::string string_VkFenceCreateFlags(VkFenceCreateFlags input_value)
-{
- std::string ret;
- int index = 0;
- while(input_value) {
- if (input_value & 1) {
- if( !ret.empty()) ret.append("|");
- ret.append(string_VkFenceCreateFlagBits(static_cast<VkFenceCreateFlagBits>(1 << index)));
- }
- ++index;
- input_value >>= 1;
- }
- if( ret.empty()) ret.append(string_VkFenceCreateFlagBits(static_cast<VkFenceCreateFlagBits>(0)));
- return ret;
-}
-
-static inline const char* string_VkQueryType(VkQueryType input_value)
-{
- switch ((VkQueryType)input_value)
- {
- case VK_QUERY_TYPE_ACCELERATION_STRUCTURE_COMPACTED_SIZE_NV:
- return "VK_QUERY_TYPE_ACCELERATION_STRUCTURE_COMPACTED_SIZE_NV";
- case VK_QUERY_TYPE_OCCLUSION:
- return "VK_QUERY_TYPE_OCCLUSION";
- case VK_QUERY_TYPE_PERFORMANCE_QUERY_INTEL:
- return "VK_QUERY_TYPE_PERFORMANCE_QUERY_INTEL";
- case VK_QUERY_TYPE_PIPELINE_STATISTICS:
- return "VK_QUERY_TYPE_PIPELINE_STATISTICS";
- case VK_QUERY_TYPE_TIMESTAMP:
- return "VK_QUERY_TYPE_TIMESTAMP";
- case VK_QUERY_TYPE_TRANSFORM_FEEDBACK_STREAM_EXT:
- return "VK_QUERY_TYPE_TRANSFORM_FEEDBACK_STREAM_EXT";
- default:
- return "Unhandled VkQueryType";
- }
-}
-
-static inline const char* string_VkQueryPipelineStatisticFlagBits(VkQueryPipelineStatisticFlagBits input_value)
-{
- switch ((VkQueryPipelineStatisticFlagBits)input_value)
- {
- case VK_QUERY_PIPELINE_STATISTIC_CLIPPING_INVOCATIONS_BIT:
- return "VK_QUERY_PIPELINE_STATISTIC_CLIPPING_INVOCATIONS_BIT";
- case VK_QUERY_PIPELINE_STATISTIC_CLIPPING_PRIMITIVES_BIT:
- return "VK_QUERY_PIPELINE_STATISTIC_CLIPPING_PRIMITIVES_BIT";
- case VK_QUERY_PIPELINE_STATISTIC_COMPUTE_SHADER_INVOCATIONS_BIT:
- return "VK_QUERY_PIPELINE_STATISTIC_COMPUTE_SHADER_INVOCATIONS_BIT";
- case VK_QUERY_PIPELINE_STATISTIC_FRAGMENT_SHADER_INVOCATIONS_BIT:
- return "VK_QUERY_PIPELINE_STATISTIC_FRAGMENT_SHADER_INVOCATIONS_BIT";
- case VK_QUERY_PIPELINE_STATISTIC_GEOMETRY_SHADER_INVOCATIONS_BIT:
- return "VK_QUERY_PIPELINE_STATISTIC_GEOMETRY_SHADER_INVOCATIONS_BIT";
- case VK_QUERY_PIPELINE_STATISTIC_GEOMETRY_SHADER_PRIMITIVES_BIT:
- return "VK_QUERY_PIPELINE_STATISTIC_GEOMETRY_SHADER_PRIMITIVES_BIT";
- case VK_QUERY_PIPELINE_STATISTIC_INPUT_ASSEMBLY_PRIMITIVES_BIT:
- return "VK_QUERY_PIPELINE_STATISTIC_INPUT_ASSEMBLY_PRIMITIVES_BIT";
- case VK_QUERY_PIPELINE_STATISTIC_INPUT_ASSEMBLY_VERTICES_BIT:
- return "VK_QUERY_PIPELINE_STATISTIC_INPUT_ASSEMBLY_VERTICES_BIT";
- case VK_QUERY_PIPELINE_STATISTIC_TESSELLATION_CONTROL_SHADER_PATCHES_BIT:
- return "VK_QUERY_PIPELINE_STATISTIC_TESSELLATION_CONTROL_SHADER_PATCHES_BIT";
- case VK_QUERY_PIPELINE_STATISTIC_TESSELLATION_EVALUATION_SHADER_INVOCATIONS_BIT:
- return "VK_QUERY_PIPELINE_STATISTIC_TESSELLATION_EVALUATION_SHADER_INVOCATIONS_BIT";
- case VK_QUERY_PIPELINE_STATISTIC_VERTEX_SHADER_INVOCATIONS_BIT:
- return "VK_QUERY_PIPELINE_STATISTIC_VERTEX_SHADER_INVOCATIONS_BIT";
- default:
- return "Unhandled VkQueryPipelineStatisticFlagBits";
- }
-}
-
-static inline std::string string_VkQueryPipelineStatisticFlags(VkQueryPipelineStatisticFlags input_value)
-{
- std::string ret;
- int index = 0;
- while(input_value) {
- if (input_value & 1) {
- if( !ret.empty()) ret.append("|");
- ret.append(string_VkQueryPipelineStatisticFlagBits(static_cast<VkQueryPipelineStatisticFlagBits>(1 << index)));
- }
- ++index;
- input_value >>= 1;
- }
- if( ret.empty()) ret.append(string_VkQueryPipelineStatisticFlagBits(static_cast<VkQueryPipelineStatisticFlagBits>(0)));
- return ret;
-}
-
-static inline const char* string_VkQueryResultFlagBits(VkQueryResultFlagBits input_value)
-{
- switch ((VkQueryResultFlagBits)input_value)
- {
- case VK_QUERY_RESULT_64_BIT:
- return "VK_QUERY_RESULT_64_BIT";
- case VK_QUERY_RESULT_PARTIAL_BIT:
- return "VK_QUERY_RESULT_PARTIAL_BIT";
- case VK_QUERY_RESULT_WAIT_BIT:
- return "VK_QUERY_RESULT_WAIT_BIT";
- case VK_QUERY_RESULT_WITH_AVAILABILITY_BIT:
- return "VK_QUERY_RESULT_WITH_AVAILABILITY_BIT";
- default:
- return "Unhandled VkQueryResultFlagBits";
- }
-}
-
-static inline std::string string_VkQueryResultFlags(VkQueryResultFlags input_value)
-{
- std::string ret;
- int index = 0;
- while(input_value) {
- if (input_value & 1) {
- if( !ret.empty()) ret.append("|");
- ret.append(string_VkQueryResultFlagBits(static_cast<VkQueryResultFlagBits>(1 << index)));
- }
- ++index;
- input_value >>= 1;
- }
- if( ret.empty()) ret.append(string_VkQueryResultFlagBits(static_cast<VkQueryResultFlagBits>(0)));
- return ret;
-}
-
-static inline const char* string_VkBufferCreateFlagBits(VkBufferCreateFlagBits input_value)
-{
- switch ((VkBufferCreateFlagBits)input_value)
- {
- case VK_BUFFER_CREATE_DEVICE_ADDRESS_CAPTURE_REPLAY_BIT_EXT:
- return "VK_BUFFER_CREATE_DEVICE_ADDRESS_CAPTURE_REPLAY_BIT_EXT";
- case VK_BUFFER_CREATE_PROTECTED_BIT:
- return "VK_BUFFER_CREATE_PROTECTED_BIT";
- case VK_BUFFER_CREATE_SPARSE_ALIASED_BIT:
- return "VK_BUFFER_CREATE_SPARSE_ALIASED_BIT";
- case VK_BUFFER_CREATE_SPARSE_BINDING_BIT:
- return "VK_BUFFER_CREATE_SPARSE_BINDING_BIT";
- case VK_BUFFER_CREATE_SPARSE_RESIDENCY_BIT:
- return "VK_BUFFER_CREATE_SPARSE_RESIDENCY_BIT";
- default:
- return "Unhandled VkBufferCreateFlagBits";
- }
-}
-
-static inline std::string string_VkBufferCreateFlags(VkBufferCreateFlags input_value)
-{
- std::string ret;
- int index = 0;
- while(input_value) {
- if (input_value & 1) {
- if( !ret.empty()) ret.append("|");
- ret.append(string_VkBufferCreateFlagBits(static_cast<VkBufferCreateFlagBits>(1 << index)));
- }
- ++index;
- input_value >>= 1;
- }
- if( ret.empty()) ret.append(string_VkBufferCreateFlagBits(static_cast<VkBufferCreateFlagBits>(0)));
- return ret;
-}
-
-static inline const char* string_VkBufferUsageFlagBits(VkBufferUsageFlagBits input_value)
-{
- switch ((VkBufferUsageFlagBits)input_value)
- {
- case VK_BUFFER_USAGE_CONDITIONAL_RENDERING_BIT_EXT:
- return "VK_BUFFER_USAGE_CONDITIONAL_RENDERING_BIT_EXT";
- case VK_BUFFER_USAGE_INDEX_BUFFER_BIT:
- return "VK_BUFFER_USAGE_INDEX_BUFFER_BIT";
- case VK_BUFFER_USAGE_INDIRECT_BUFFER_BIT:
- return "VK_BUFFER_USAGE_INDIRECT_BUFFER_BIT";
- case VK_BUFFER_USAGE_RAY_TRACING_BIT_NV:
- return "VK_BUFFER_USAGE_RAY_TRACING_BIT_NV";
- case VK_BUFFER_USAGE_SHADER_DEVICE_ADDRESS_BIT_EXT:
- return "VK_BUFFER_USAGE_SHADER_DEVICE_ADDRESS_BIT_EXT";
- case VK_BUFFER_USAGE_STORAGE_BUFFER_BIT:
- return "VK_BUFFER_USAGE_STORAGE_BUFFER_BIT";
- case VK_BUFFER_USAGE_STORAGE_TEXEL_BUFFER_BIT:
- return "VK_BUFFER_USAGE_STORAGE_TEXEL_BUFFER_BIT";
- case VK_BUFFER_USAGE_TRANSFER_DST_BIT:
- return "VK_BUFFER_USAGE_TRANSFER_DST_BIT";
- case VK_BUFFER_USAGE_TRANSFER_SRC_BIT:
- return "VK_BUFFER_USAGE_TRANSFER_SRC_BIT";
- case VK_BUFFER_USAGE_TRANSFORM_FEEDBACK_BUFFER_BIT_EXT:
- return "VK_BUFFER_USAGE_TRANSFORM_FEEDBACK_BUFFER_BIT_EXT";
- case VK_BUFFER_USAGE_TRANSFORM_FEEDBACK_COUNTER_BUFFER_BIT_EXT:
- return "VK_BUFFER_USAGE_TRANSFORM_FEEDBACK_COUNTER_BUFFER_BIT_EXT";
- case VK_BUFFER_USAGE_UNIFORM_BUFFER_BIT:
- return "VK_BUFFER_USAGE_UNIFORM_BUFFER_BIT";
- case VK_BUFFER_USAGE_UNIFORM_TEXEL_BUFFER_BIT:
- return "VK_BUFFER_USAGE_UNIFORM_TEXEL_BUFFER_BIT";
- case VK_BUFFER_USAGE_VERTEX_BUFFER_BIT:
- return "VK_BUFFER_USAGE_VERTEX_BUFFER_BIT";
- default:
- return "Unhandled VkBufferUsageFlagBits";
- }
-}
-
-static inline std::string string_VkBufferUsageFlags(VkBufferUsageFlags input_value)
-{
- std::string ret;
- int index = 0;
- while(input_value) {
- if (input_value & 1) {
- if( !ret.empty()) ret.append("|");
- ret.append(string_VkBufferUsageFlagBits(static_cast<VkBufferUsageFlagBits>(1 << index)));
- }
- ++index;
- input_value >>= 1;
- }
- if( ret.empty()) ret.append(string_VkBufferUsageFlagBits(static_cast<VkBufferUsageFlagBits>(0)));
- return ret;
-}
-
-static inline const char* string_VkSharingMode(VkSharingMode input_value)
-{
- switch ((VkSharingMode)input_value)
- {
- case VK_SHARING_MODE_CONCURRENT:
- return "VK_SHARING_MODE_CONCURRENT";
- case VK_SHARING_MODE_EXCLUSIVE:
- return "VK_SHARING_MODE_EXCLUSIVE";
- default:
- return "Unhandled VkSharingMode";
- }
-}
-
-static inline const char* string_VkImageLayout(VkImageLayout input_value)
-{
- switch ((VkImageLayout)input_value)
- {
- case VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL:
- return "VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL";
- case VK_IMAGE_LAYOUT_DEPTH_ATTACHMENT_STENCIL_READ_ONLY_OPTIMAL:
- return "VK_IMAGE_LAYOUT_DEPTH_ATTACHMENT_STENCIL_READ_ONLY_OPTIMAL";
- case VK_IMAGE_LAYOUT_DEPTH_READ_ONLY_STENCIL_ATTACHMENT_OPTIMAL:
- return "VK_IMAGE_LAYOUT_DEPTH_READ_ONLY_STENCIL_ATTACHMENT_OPTIMAL";
- case VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL:
- return "VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL";
- case VK_IMAGE_LAYOUT_DEPTH_STENCIL_READ_ONLY_OPTIMAL:
- return "VK_IMAGE_LAYOUT_DEPTH_STENCIL_READ_ONLY_OPTIMAL";
- case VK_IMAGE_LAYOUT_FRAGMENT_DENSITY_MAP_OPTIMAL_EXT:
- return "VK_IMAGE_LAYOUT_FRAGMENT_DENSITY_MAP_OPTIMAL_EXT";
- case VK_IMAGE_LAYOUT_GENERAL:
- return "VK_IMAGE_LAYOUT_GENERAL";
- case VK_IMAGE_LAYOUT_PREINITIALIZED:
- return "VK_IMAGE_LAYOUT_PREINITIALIZED";
- case VK_IMAGE_LAYOUT_PRESENT_SRC_KHR:
- return "VK_IMAGE_LAYOUT_PRESENT_SRC_KHR";
- case VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL:
- return "VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL";
- case VK_IMAGE_LAYOUT_SHADING_RATE_OPTIMAL_NV:
- return "VK_IMAGE_LAYOUT_SHADING_RATE_OPTIMAL_NV";
- case VK_IMAGE_LAYOUT_SHARED_PRESENT_KHR:
- return "VK_IMAGE_LAYOUT_SHARED_PRESENT_KHR";
- case VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL:
- return "VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL";
- case VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL:
- return "VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL";
- case VK_IMAGE_LAYOUT_UNDEFINED:
- return "VK_IMAGE_LAYOUT_UNDEFINED";
- default:
- return "Unhandled VkImageLayout";
- }
-}
-
-static inline const char* string_VkImageViewCreateFlagBits(VkImageViewCreateFlagBits input_value)
-{
- switch ((VkImageViewCreateFlagBits)input_value)
- {
- case VK_IMAGE_VIEW_CREATE_FRAGMENT_DENSITY_MAP_DYNAMIC_BIT_EXT:
- return "VK_IMAGE_VIEW_CREATE_FRAGMENT_DENSITY_MAP_DYNAMIC_BIT_EXT";
- default:
- return "Unhandled VkImageViewCreateFlagBits";
- }
-}
-
-static inline std::string string_VkImageViewCreateFlags(VkImageViewCreateFlags input_value)
-{
- std::string ret;
- int index = 0;
- while(input_value) {
- if (input_value & 1) {
- if( !ret.empty()) ret.append("|");
- ret.append(string_VkImageViewCreateFlagBits(static_cast<VkImageViewCreateFlagBits>(1 << index)));
- }
- ++index;
- input_value >>= 1;
- }
- if( ret.empty()) ret.append(string_VkImageViewCreateFlagBits(static_cast<VkImageViewCreateFlagBits>(0)));
- return ret;
-}
-
-static inline const char* string_VkImageViewType(VkImageViewType input_value)
-{
- switch ((VkImageViewType)input_value)
- {
- case VK_IMAGE_VIEW_TYPE_1D:
- return "VK_IMAGE_VIEW_TYPE_1D";
- case VK_IMAGE_VIEW_TYPE_1D_ARRAY:
- return "VK_IMAGE_VIEW_TYPE_1D_ARRAY";
- case VK_IMAGE_VIEW_TYPE_2D:
- return "VK_IMAGE_VIEW_TYPE_2D";
- case VK_IMAGE_VIEW_TYPE_2D_ARRAY:
- return "VK_IMAGE_VIEW_TYPE_2D_ARRAY";
- case VK_IMAGE_VIEW_TYPE_3D:
- return "VK_IMAGE_VIEW_TYPE_3D";
- case VK_IMAGE_VIEW_TYPE_CUBE:
- return "VK_IMAGE_VIEW_TYPE_CUBE";
- case VK_IMAGE_VIEW_TYPE_CUBE_ARRAY:
- return "VK_IMAGE_VIEW_TYPE_CUBE_ARRAY";
- default:
- return "Unhandled VkImageViewType";
- }
-}
-
-static inline const char* string_VkComponentSwizzle(VkComponentSwizzle input_value)
-{
- switch ((VkComponentSwizzle)input_value)
- {
- case VK_COMPONENT_SWIZZLE_A:
- return "VK_COMPONENT_SWIZZLE_A";
- case VK_COMPONENT_SWIZZLE_B:
- return "VK_COMPONENT_SWIZZLE_B";
- case VK_COMPONENT_SWIZZLE_G:
- return "VK_COMPONENT_SWIZZLE_G";
- case VK_COMPONENT_SWIZZLE_IDENTITY:
- return "VK_COMPONENT_SWIZZLE_IDENTITY";
- case VK_COMPONENT_SWIZZLE_ONE:
- return "VK_COMPONENT_SWIZZLE_ONE";
- case VK_COMPONENT_SWIZZLE_R:
- return "VK_COMPONENT_SWIZZLE_R";
- case VK_COMPONENT_SWIZZLE_ZERO:
- return "VK_COMPONENT_SWIZZLE_ZERO";
- default:
- return "Unhandled VkComponentSwizzle";
- }
-}
-
-static inline const char* string_VkPipelineCreateFlagBits(VkPipelineCreateFlagBits input_value)
-{
- switch ((VkPipelineCreateFlagBits)input_value)
- {
- case VK_PIPELINE_CREATE_ALLOW_DERIVATIVES_BIT:
- return "VK_PIPELINE_CREATE_ALLOW_DERIVATIVES_BIT";
- case VK_PIPELINE_CREATE_CAPTURE_INTERNAL_REPRESENTATIONS_BIT_KHR:
- return "VK_PIPELINE_CREATE_CAPTURE_INTERNAL_REPRESENTATIONS_BIT_KHR";
- case VK_PIPELINE_CREATE_CAPTURE_STATISTICS_BIT_KHR:
- return "VK_PIPELINE_CREATE_CAPTURE_STATISTICS_BIT_KHR";
- case VK_PIPELINE_CREATE_DEFER_COMPILE_BIT_NV:
- return "VK_PIPELINE_CREATE_DEFER_COMPILE_BIT_NV";
- case VK_PIPELINE_CREATE_DERIVATIVE_BIT:
- return "VK_PIPELINE_CREATE_DERIVATIVE_BIT";
- case VK_PIPELINE_CREATE_DISABLE_OPTIMIZATION_BIT:
- return "VK_PIPELINE_CREATE_DISABLE_OPTIMIZATION_BIT";
- case VK_PIPELINE_CREATE_DISPATCH_BASE:
- return "VK_PIPELINE_CREATE_DISPATCH_BASE";
- case VK_PIPELINE_CREATE_VIEW_INDEX_FROM_DEVICE_INDEX_BIT:
- return "VK_PIPELINE_CREATE_VIEW_INDEX_FROM_DEVICE_INDEX_BIT";
- default:
- return "Unhandled VkPipelineCreateFlagBits";
- }
-}
-
-static inline std::string string_VkPipelineCreateFlags(VkPipelineCreateFlags input_value)
-{
- std::string ret;
- int index = 0;
- while(input_value) {
- if (input_value & 1) {
- if( !ret.empty()) ret.append("|");
- ret.append(string_VkPipelineCreateFlagBits(static_cast<VkPipelineCreateFlagBits>(1 << index)));
- }
- ++index;
- input_value >>= 1;
- }
- if( ret.empty()) ret.append(string_VkPipelineCreateFlagBits(static_cast<VkPipelineCreateFlagBits>(0)));
- return ret;
-}
-
-static inline const char* string_VkPipelineShaderStageCreateFlagBits(VkPipelineShaderStageCreateFlagBits input_value)
-{
- switch ((VkPipelineShaderStageCreateFlagBits)input_value)
- {
- case VK_PIPELINE_SHADER_STAGE_CREATE_ALLOW_VARYING_SUBGROUP_SIZE_BIT_EXT:
- return "VK_PIPELINE_SHADER_STAGE_CREATE_ALLOW_VARYING_SUBGROUP_SIZE_BIT_EXT";
- case VK_PIPELINE_SHADER_STAGE_CREATE_REQUIRE_FULL_SUBGROUPS_BIT_EXT:
- return "VK_PIPELINE_SHADER_STAGE_CREATE_REQUIRE_FULL_SUBGROUPS_BIT_EXT";
- default:
- return "Unhandled VkPipelineShaderStageCreateFlagBits";
- }
-}
-
-static inline std::string string_VkPipelineShaderStageCreateFlags(VkPipelineShaderStageCreateFlags input_value)
-{
- std::string ret;
- int index = 0;
- while(input_value) {
- if (input_value & 1) {
- if( !ret.empty()) ret.append("|");
- ret.append(string_VkPipelineShaderStageCreateFlagBits(static_cast<VkPipelineShaderStageCreateFlagBits>(1 << index)));
- }
- ++index;
- input_value >>= 1;
- }
- if( ret.empty()) ret.append(string_VkPipelineShaderStageCreateFlagBits(static_cast<VkPipelineShaderStageCreateFlagBits>(0)));
- return ret;
-}
-
-static inline const char* string_VkShaderStageFlagBits(VkShaderStageFlagBits input_value)
-{
- switch ((VkShaderStageFlagBits)input_value)
- {
- case VK_SHADER_STAGE_ALL:
- return "VK_SHADER_STAGE_ALL";
- case VK_SHADER_STAGE_ALL_GRAPHICS:
- return "VK_SHADER_STAGE_ALL_GRAPHICS";
- case VK_SHADER_STAGE_ANY_HIT_BIT_NV:
- return "VK_SHADER_STAGE_ANY_HIT_BIT_NV";
- case VK_SHADER_STAGE_CALLABLE_BIT_NV:
- return "VK_SHADER_STAGE_CALLABLE_BIT_NV";
- case VK_SHADER_STAGE_CLOSEST_HIT_BIT_NV:
- return "VK_SHADER_STAGE_CLOSEST_HIT_BIT_NV";
- case VK_SHADER_STAGE_COMPUTE_BIT:
- return "VK_SHADER_STAGE_COMPUTE_BIT";
- case VK_SHADER_STAGE_FRAGMENT_BIT:
- return "VK_SHADER_STAGE_FRAGMENT_BIT";
- case VK_SHADER_STAGE_GEOMETRY_BIT:
- return "VK_SHADER_STAGE_GEOMETRY_BIT";
- case VK_SHADER_STAGE_INTERSECTION_BIT_NV:
- return "VK_SHADER_STAGE_INTERSECTION_BIT_NV";
- case VK_SHADER_STAGE_MESH_BIT_NV:
- return "VK_SHADER_STAGE_MESH_BIT_NV";
- case VK_SHADER_STAGE_MISS_BIT_NV:
- return "VK_SHADER_STAGE_MISS_BIT_NV";
- case VK_SHADER_STAGE_RAYGEN_BIT_NV:
- return "VK_SHADER_STAGE_RAYGEN_BIT_NV";
- case VK_SHADER_STAGE_TASK_BIT_NV:
- return "VK_SHADER_STAGE_TASK_BIT_NV";
- case VK_SHADER_STAGE_TESSELLATION_CONTROL_BIT:
- return "VK_SHADER_STAGE_TESSELLATION_CONTROL_BIT";
- case VK_SHADER_STAGE_TESSELLATION_EVALUATION_BIT:
- return "VK_SHADER_STAGE_TESSELLATION_EVALUATION_BIT";
- case VK_SHADER_STAGE_VERTEX_BIT:
- return "VK_SHADER_STAGE_VERTEX_BIT";
- default:
- return "Unhandled VkShaderStageFlagBits";
- }
-}
-
-static inline std::string string_VkShaderStageFlags(VkShaderStageFlags input_value)
-{
- std::string ret;
- int index = 0;
- while(input_value) {
- if (input_value & 1) {
- if( !ret.empty()) ret.append("|");
- ret.append(string_VkShaderStageFlagBits(static_cast<VkShaderStageFlagBits>(1 << index)));
- }
- ++index;
- input_value >>= 1;
- }
- if( ret.empty()) ret.append(string_VkShaderStageFlagBits(static_cast<VkShaderStageFlagBits>(0)));
- return ret;
-}
-
-static inline const char* string_VkVertexInputRate(VkVertexInputRate input_value)
-{
- switch ((VkVertexInputRate)input_value)
- {
- case VK_VERTEX_INPUT_RATE_INSTANCE:
- return "VK_VERTEX_INPUT_RATE_INSTANCE";
- case VK_VERTEX_INPUT_RATE_VERTEX:
- return "VK_VERTEX_INPUT_RATE_VERTEX";
- default:
- return "Unhandled VkVertexInputRate";
- }
-}
-
-static inline const char* string_VkPrimitiveTopology(VkPrimitiveTopology input_value)
-{
- switch ((VkPrimitiveTopology)input_value)
- {
- case VK_PRIMITIVE_TOPOLOGY_LINE_LIST:
- return "VK_PRIMITIVE_TOPOLOGY_LINE_LIST";
- case VK_PRIMITIVE_TOPOLOGY_LINE_LIST_WITH_ADJACENCY:
- return "VK_PRIMITIVE_TOPOLOGY_LINE_LIST_WITH_ADJACENCY";
- case VK_PRIMITIVE_TOPOLOGY_LINE_STRIP:
- return "VK_PRIMITIVE_TOPOLOGY_LINE_STRIP";
- case VK_PRIMITIVE_TOPOLOGY_LINE_STRIP_WITH_ADJACENCY:
- return "VK_PRIMITIVE_TOPOLOGY_LINE_STRIP_WITH_ADJACENCY";
- case VK_PRIMITIVE_TOPOLOGY_PATCH_LIST:
- return "VK_PRIMITIVE_TOPOLOGY_PATCH_LIST";
- case VK_PRIMITIVE_TOPOLOGY_POINT_LIST:
- return "VK_PRIMITIVE_TOPOLOGY_POINT_LIST";
- case VK_PRIMITIVE_TOPOLOGY_TRIANGLE_FAN:
- return "VK_PRIMITIVE_TOPOLOGY_TRIANGLE_FAN";
- case VK_PRIMITIVE_TOPOLOGY_TRIANGLE_LIST:
- return "VK_PRIMITIVE_TOPOLOGY_TRIANGLE_LIST";
- case VK_PRIMITIVE_TOPOLOGY_TRIANGLE_LIST_WITH_ADJACENCY:
- return "VK_PRIMITIVE_TOPOLOGY_TRIANGLE_LIST_WITH_ADJACENCY";
- case VK_PRIMITIVE_TOPOLOGY_TRIANGLE_STRIP:
- return "VK_PRIMITIVE_TOPOLOGY_TRIANGLE_STRIP";
- case VK_PRIMITIVE_TOPOLOGY_TRIANGLE_STRIP_WITH_ADJACENCY:
- return "VK_PRIMITIVE_TOPOLOGY_TRIANGLE_STRIP_WITH_ADJACENCY";
- default:
- return "Unhandled VkPrimitiveTopology";
- }
-}
-
-static inline const char* string_VkPolygonMode(VkPolygonMode input_value)
-{
- switch ((VkPolygonMode)input_value)
- {
- case VK_POLYGON_MODE_FILL:
- return "VK_POLYGON_MODE_FILL";
- case VK_POLYGON_MODE_FILL_RECTANGLE_NV:
- return "VK_POLYGON_MODE_FILL_RECTANGLE_NV";
- case VK_POLYGON_MODE_LINE:
- return "VK_POLYGON_MODE_LINE";
- case VK_POLYGON_MODE_POINT:
- return "VK_POLYGON_MODE_POINT";
- default:
- return "Unhandled VkPolygonMode";
- }
-}
-
-static inline const char* string_VkCullModeFlagBits(VkCullModeFlagBits input_value)
-{
- switch ((VkCullModeFlagBits)input_value)
- {
- case VK_CULL_MODE_BACK_BIT:
- return "VK_CULL_MODE_BACK_BIT";
- case VK_CULL_MODE_FRONT_AND_BACK:
- return "VK_CULL_MODE_FRONT_AND_BACK";
- case VK_CULL_MODE_FRONT_BIT:
- return "VK_CULL_MODE_FRONT_BIT";
- case VK_CULL_MODE_NONE:
- return "VK_CULL_MODE_NONE";
- default:
- return "Unhandled VkCullModeFlagBits";
- }
-}
-
-static inline std::string string_VkCullModeFlags(VkCullModeFlags input_value)
-{
- std::string ret;
- int index = 0;
- while(input_value) {
- if (input_value & 1) {
- if( !ret.empty()) ret.append("|");
- ret.append(string_VkCullModeFlagBits(static_cast<VkCullModeFlagBits>(1 << index)));
- }
- ++index;
- input_value >>= 1;
- }
- if( ret.empty()) ret.append(string_VkCullModeFlagBits(static_cast<VkCullModeFlagBits>(0)));
- return ret;
-}
-
-static inline const char* string_VkFrontFace(VkFrontFace input_value)
-{
- switch ((VkFrontFace)input_value)
- {
- case VK_FRONT_FACE_CLOCKWISE:
- return "VK_FRONT_FACE_CLOCKWISE";
- case VK_FRONT_FACE_COUNTER_CLOCKWISE:
- return "VK_FRONT_FACE_COUNTER_CLOCKWISE";
- default:
- return "Unhandled VkFrontFace";
- }
-}
-
-static inline const char* string_VkCompareOp(VkCompareOp input_value)
-{
- switch ((VkCompareOp)input_value)
- {
- case VK_COMPARE_OP_ALWAYS:
- return "VK_COMPARE_OP_ALWAYS";
- case VK_COMPARE_OP_EQUAL:
- return "VK_COMPARE_OP_EQUAL";
- case VK_COMPARE_OP_GREATER:
- return "VK_COMPARE_OP_GREATER";
- case VK_COMPARE_OP_GREATER_OR_EQUAL:
- return "VK_COMPARE_OP_GREATER_OR_EQUAL";
- case VK_COMPARE_OP_LESS:
- return "VK_COMPARE_OP_LESS";
- case VK_COMPARE_OP_LESS_OR_EQUAL:
- return "VK_COMPARE_OP_LESS_OR_EQUAL";
- case VK_COMPARE_OP_NEVER:
- return "VK_COMPARE_OP_NEVER";
- case VK_COMPARE_OP_NOT_EQUAL:
- return "VK_COMPARE_OP_NOT_EQUAL";
- default:
- return "Unhandled VkCompareOp";
- }
-}
-
-static inline const char* string_VkStencilOp(VkStencilOp input_value)
-{
- switch ((VkStencilOp)input_value)
- {
- case VK_STENCIL_OP_DECREMENT_AND_CLAMP:
- return "VK_STENCIL_OP_DECREMENT_AND_CLAMP";
- case VK_STENCIL_OP_DECREMENT_AND_WRAP:
- return "VK_STENCIL_OP_DECREMENT_AND_WRAP";
- case VK_STENCIL_OP_INCREMENT_AND_CLAMP:
- return "VK_STENCIL_OP_INCREMENT_AND_CLAMP";
- case VK_STENCIL_OP_INCREMENT_AND_WRAP:
- return "VK_STENCIL_OP_INCREMENT_AND_WRAP";
- case VK_STENCIL_OP_INVERT:
- return "VK_STENCIL_OP_INVERT";
- case VK_STENCIL_OP_KEEP:
- return "VK_STENCIL_OP_KEEP";
- case VK_STENCIL_OP_REPLACE:
- return "VK_STENCIL_OP_REPLACE";
- case VK_STENCIL_OP_ZERO:
- return "VK_STENCIL_OP_ZERO";
- default:
- return "Unhandled VkStencilOp";
- }
-}
-
-static inline const char* string_VkLogicOp(VkLogicOp input_value)
-{
- switch ((VkLogicOp)input_value)
- {
- case VK_LOGIC_OP_AND:
- return "VK_LOGIC_OP_AND";
- case VK_LOGIC_OP_AND_INVERTED:
- return "VK_LOGIC_OP_AND_INVERTED";
- case VK_LOGIC_OP_AND_REVERSE:
- return "VK_LOGIC_OP_AND_REVERSE";
- case VK_LOGIC_OP_CLEAR:
- return "VK_LOGIC_OP_CLEAR";
- case VK_LOGIC_OP_COPY:
- return "VK_LOGIC_OP_COPY";
- case VK_LOGIC_OP_COPY_INVERTED:
- return "VK_LOGIC_OP_COPY_INVERTED";
- case VK_LOGIC_OP_EQUIVALENT:
- return "VK_LOGIC_OP_EQUIVALENT";
- case VK_LOGIC_OP_INVERT:
- return "VK_LOGIC_OP_INVERT";
- case VK_LOGIC_OP_NAND:
- return "VK_LOGIC_OP_NAND";
- case VK_LOGIC_OP_NOR:
- return "VK_LOGIC_OP_NOR";
- case VK_LOGIC_OP_NO_OP:
- return "VK_LOGIC_OP_NO_OP";
- case VK_LOGIC_OP_OR:
- return "VK_LOGIC_OP_OR";
- case VK_LOGIC_OP_OR_INVERTED:
- return "VK_LOGIC_OP_OR_INVERTED";
- case VK_LOGIC_OP_OR_REVERSE:
- return "VK_LOGIC_OP_OR_REVERSE";
- case VK_LOGIC_OP_SET:
- return "VK_LOGIC_OP_SET";
- case VK_LOGIC_OP_XOR:
- return "VK_LOGIC_OP_XOR";
- default:
- return "Unhandled VkLogicOp";
- }
-}
-
-static inline const char* string_VkBlendFactor(VkBlendFactor input_value)
-{
- switch ((VkBlendFactor)input_value)
- {
- case VK_BLEND_FACTOR_CONSTANT_ALPHA:
- return "VK_BLEND_FACTOR_CONSTANT_ALPHA";
- case VK_BLEND_FACTOR_CONSTANT_COLOR:
- return "VK_BLEND_FACTOR_CONSTANT_COLOR";
- case VK_BLEND_FACTOR_DST_ALPHA:
- return "VK_BLEND_FACTOR_DST_ALPHA";
- case VK_BLEND_FACTOR_DST_COLOR:
- return "VK_BLEND_FACTOR_DST_COLOR";
- case VK_BLEND_FACTOR_ONE:
- return "VK_BLEND_FACTOR_ONE";
- case VK_BLEND_FACTOR_ONE_MINUS_CONSTANT_ALPHA:
- return "VK_BLEND_FACTOR_ONE_MINUS_CONSTANT_ALPHA";
- case VK_BLEND_FACTOR_ONE_MINUS_CONSTANT_COLOR:
- return "VK_BLEND_FACTOR_ONE_MINUS_CONSTANT_COLOR";
- case VK_BLEND_FACTOR_ONE_MINUS_DST_ALPHA:
- return "VK_BLEND_FACTOR_ONE_MINUS_DST_ALPHA";
- case VK_BLEND_FACTOR_ONE_MINUS_DST_COLOR:
- return "VK_BLEND_FACTOR_ONE_MINUS_DST_COLOR";
- case VK_BLEND_FACTOR_ONE_MINUS_SRC1_ALPHA:
- return "VK_BLEND_FACTOR_ONE_MINUS_SRC1_ALPHA";
- case VK_BLEND_FACTOR_ONE_MINUS_SRC1_COLOR:
- return "VK_BLEND_FACTOR_ONE_MINUS_SRC1_COLOR";
- case VK_BLEND_FACTOR_ONE_MINUS_SRC_ALPHA:
- return "VK_BLEND_FACTOR_ONE_MINUS_SRC_ALPHA";
- case VK_BLEND_FACTOR_ONE_MINUS_SRC_COLOR:
- return "VK_BLEND_FACTOR_ONE_MINUS_SRC_COLOR";
- case VK_BLEND_FACTOR_SRC1_ALPHA:
- return "VK_BLEND_FACTOR_SRC1_ALPHA";
- case VK_BLEND_FACTOR_SRC1_COLOR:
- return "VK_BLEND_FACTOR_SRC1_COLOR";
- case VK_BLEND_FACTOR_SRC_ALPHA:
- return "VK_BLEND_FACTOR_SRC_ALPHA";
- case VK_BLEND_FACTOR_SRC_ALPHA_SATURATE:
- return "VK_BLEND_FACTOR_SRC_ALPHA_SATURATE";
- case VK_BLEND_FACTOR_SRC_COLOR:
- return "VK_BLEND_FACTOR_SRC_COLOR";
- case VK_BLEND_FACTOR_ZERO:
- return "VK_BLEND_FACTOR_ZERO";
- default:
- return "Unhandled VkBlendFactor";
- }
-}
-
-static inline const char* string_VkBlendOp(VkBlendOp input_value)
-{
- switch ((VkBlendOp)input_value)
- {
- case VK_BLEND_OP_ADD:
- return "VK_BLEND_OP_ADD";
- case VK_BLEND_OP_BLUE_EXT:
- return "VK_BLEND_OP_BLUE_EXT";
- case VK_BLEND_OP_COLORBURN_EXT:
- return "VK_BLEND_OP_COLORBURN_EXT";
- case VK_BLEND_OP_COLORDODGE_EXT:
- return "VK_BLEND_OP_COLORDODGE_EXT";
- case VK_BLEND_OP_CONTRAST_EXT:
- return "VK_BLEND_OP_CONTRAST_EXT";
- case VK_BLEND_OP_DARKEN_EXT:
- return "VK_BLEND_OP_DARKEN_EXT";
- case VK_BLEND_OP_DIFFERENCE_EXT:
- return "VK_BLEND_OP_DIFFERENCE_EXT";
- case VK_BLEND_OP_DST_ATOP_EXT:
- return "VK_BLEND_OP_DST_ATOP_EXT";
- case VK_BLEND_OP_DST_EXT:
- return "VK_BLEND_OP_DST_EXT";
- case VK_BLEND_OP_DST_IN_EXT:
- return "VK_BLEND_OP_DST_IN_EXT";
- case VK_BLEND_OP_DST_OUT_EXT:
- return "VK_BLEND_OP_DST_OUT_EXT";
- case VK_BLEND_OP_DST_OVER_EXT:
- return "VK_BLEND_OP_DST_OVER_EXT";
- case VK_BLEND_OP_EXCLUSION_EXT:
- return "VK_BLEND_OP_EXCLUSION_EXT";
- case VK_BLEND_OP_GREEN_EXT:
- return "VK_BLEND_OP_GREEN_EXT";
- case VK_BLEND_OP_HARDLIGHT_EXT:
- return "VK_BLEND_OP_HARDLIGHT_EXT";
- case VK_BLEND_OP_HARDMIX_EXT:
- return "VK_BLEND_OP_HARDMIX_EXT";
- case VK_BLEND_OP_HSL_COLOR_EXT:
- return "VK_BLEND_OP_HSL_COLOR_EXT";
- case VK_BLEND_OP_HSL_HUE_EXT:
- return "VK_BLEND_OP_HSL_HUE_EXT";
- case VK_BLEND_OP_HSL_LUMINOSITY_EXT:
- return "VK_BLEND_OP_HSL_LUMINOSITY_EXT";
- case VK_BLEND_OP_HSL_SATURATION_EXT:
- return "VK_BLEND_OP_HSL_SATURATION_EXT";
- case VK_BLEND_OP_INVERT_EXT:
- return "VK_BLEND_OP_INVERT_EXT";
- case VK_BLEND_OP_INVERT_OVG_EXT:
- return "VK_BLEND_OP_INVERT_OVG_EXT";
- case VK_BLEND_OP_INVERT_RGB_EXT:
- return "VK_BLEND_OP_INVERT_RGB_EXT";
- case VK_BLEND_OP_LIGHTEN_EXT:
- return "VK_BLEND_OP_LIGHTEN_EXT";
- case VK_BLEND_OP_LINEARBURN_EXT:
- return "VK_BLEND_OP_LINEARBURN_EXT";
- case VK_BLEND_OP_LINEARDODGE_EXT:
- return "VK_BLEND_OP_LINEARDODGE_EXT";
- case VK_BLEND_OP_LINEARLIGHT_EXT:
- return "VK_BLEND_OP_LINEARLIGHT_EXT";
- case VK_BLEND_OP_MAX:
- return "VK_BLEND_OP_MAX";
- case VK_BLEND_OP_MIN:
- return "VK_BLEND_OP_MIN";
- case VK_BLEND_OP_MINUS_CLAMPED_EXT:
- return "VK_BLEND_OP_MINUS_CLAMPED_EXT";
- case VK_BLEND_OP_MINUS_EXT:
- return "VK_BLEND_OP_MINUS_EXT";
- case VK_BLEND_OP_MULTIPLY_EXT:
- return "VK_BLEND_OP_MULTIPLY_EXT";
- case VK_BLEND_OP_OVERLAY_EXT:
- return "VK_BLEND_OP_OVERLAY_EXT";
- case VK_BLEND_OP_PINLIGHT_EXT:
- return "VK_BLEND_OP_PINLIGHT_EXT";
- case VK_BLEND_OP_PLUS_CLAMPED_ALPHA_EXT:
- return "VK_BLEND_OP_PLUS_CLAMPED_ALPHA_EXT";
- case VK_BLEND_OP_PLUS_CLAMPED_EXT:
- return "VK_BLEND_OP_PLUS_CLAMPED_EXT";
- case VK_BLEND_OP_PLUS_DARKER_EXT:
- return "VK_BLEND_OP_PLUS_DARKER_EXT";
- case VK_BLEND_OP_PLUS_EXT:
- return "VK_BLEND_OP_PLUS_EXT";
- case VK_BLEND_OP_RED_EXT:
- return "VK_BLEND_OP_RED_EXT";
- case VK_BLEND_OP_REVERSE_SUBTRACT:
- return "VK_BLEND_OP_REVERSE_SUBTRACT";
- case VK_BLEND_OP_SCREEN_EXT:
- return "VK_BLEND_OP_SCREEN_EXT";
- case VK_BLEND_OP_SOFTLIGHT_EXT:
- return "VK_BLEND_OP_SOFTLIGHT_EXT";
- case VK_BLEND_OP_SRC_ATOP_EXT:
- return "VK_BLEND_OP_SRC_ATOP_EXT";
- case VK_BLEND_OP_SRC_EXT:
- return "VK_BLEND_OP_SRC_EXT";
- case VK_BLEND_OP_SRC_IN_EXT:
- return "VK_BLEND_OP_SRC_IN_EXT";
- case VK_BLEND_OP_SRC_OUT_EXT:
- return "VK_BLEND_OP_SRC_OUT_EXT";
- case VK_BLEND_OP_SRC_OVER_EXT:
- return "VK_BLEND_OP_SRC_OVER_EXT";
- case VK_BLEND_OP_SUBTRACT:
- return "VK_BLEND_OP_SUBTRACT";
- case VK_BLEND_OP_VIVIDLIGHT_EXT:
- return "VK_BLEND_OP_VIVIDLIGHT_EXT";
- case VK_BLEND_OP_XOR_EXT:
- return "VK_BLEND_OP_XOR_EXT";
- case VK_BLEND_OP_ZERO_EXT:
- return "VK_BLEND_OP_ZERO_EXT";
- default:
- return "Unhandled VkBlendOp";
- }
-}
-
-static inline const char* string_VkColorComponentFlagBits(VkColorComponentFlagBits input_value)
-{
- switch ((VkColorComponentFlagBits)input_value)
- {
- case VK_COLOR_COMPONENT_A_BIT:
- return "VK_COLOR_COMPONENT_A_BIT";
- case VK_COLOR_COMPONENT_B_BIT:
- return "VK_COLOR_COMPONENT_B_BIT";
- case VK_COLOR_COMPONENT_G_BIT:
- return "VK_COLOR_COMPONENT_G_BIT";
- case VK_COLOR_COMPONENT_R_BIT:
- return "VK_COLOR_COMPONENT_R_BIT";
- default:
- return "Unhandled VkColorComponentFlagBits";
- }
-}
-
-static inline std::string string_VkColorComponentFlags(VkColorComponentFlags input_value)
-{
- std::string ret;
- int index = 0;
- while(input_value) {
- if (input_value & 1) {
- if( !ret.empty()) ret.append("|");
- ret.append(string_VkColorComponentFlagBits(static_cast<VkColorComponentFlagBits>(1 << index)));
- }
- ++index;
- input_value >>= 1;
- }
- if( ret.empty()) ret.append(string_VkColorComponentFlagBits(static_cast<VkColorComponentFlagBits>(0)));
- return ret;
-}
-
-static inline const char* string_VkDynamicState(VkDynamicState input_value)
-{
- switch ((VkDynamicState)input_value)
- {
- case VK_DYNAMIC_STATE_BLEND_CONSTANTS:
- return "VK_DYNAMIC_STATE_BLEND_CONSTANTS";
- case VK_DYNAMIC_STATE_DEPTH_BIAS:
- return "VK_DYNAMIC_STATE_DEPTH_BIAS";
- case VK_DYNAMIC_STATE_DEPTH_BOUNDS:
- return "VK_DYNAMIC_STATE_DEPTH_BOUNDS";
- case VK_DYNAMIC_STATE_DISCARD_RECTANGLE_EXT:
- return "VK_DYNAMIC_STATE_DISCARD_RECTANGLE_EXT";
- case VK_DYNAMIC_STATE_EXCLUSIVE_SCISSOR_NV:
- return "VK_DYNAMIC_STATE_EXCLUSIVE_SCISSOR_NV";
- case VK_DYNAMIC_STATE_LINE_STIPPLE_EXT:
- return "VK_DYNAMIC_STATE_LINE_STIPPLE_EXT";
- case VK_DYNAMIC_STATE_LINE_WIDTH:
- return "VK_DYNAMIC_STATE_LINE_WIDTH";
- case VK_DYNAMIC_STATE_SAMPLE_LOCATIONS_EXT:
- return "VK_DYNAMIC_STATE_SAMPLE_LOCATIONS_EXT";
- case VK_DYNAMIC_STATE_SCISSOR:
- return "VK_DYNAMIC_STATE_SCISSOR";
- case VK_DYNAMIC_STATE_STENCIL_COMPARE_MASK:
- return "VK_DYNAMIC_STATE_STENCIL_COMPARE_MASK";
- case VK_DYNAMIC_STATE_STENCIL_REFERENCE:
- return "VK_DYNAMIC_STATE_STENCIL_REFERENCE";
- case VK_DYNAMIC_STATE_STENCIL_WRITE_MASK:
- return "VK_DYNAMIC_STATE_STENCIL_WRITE_MASK";
- case VK_DYNAMIC_STATE_VIEWPORT:
- return "VK_DYNAMIC_STATE_VIEWPORT";
- case VK_DYNAMIC_STATE_VIEWPORT_COARSE_SAMPLE_ORDER_NV:
- return "VK_DYNAMIC_STATE_VIEWPORT_COARSE_SAMPLE_ORDER_NV";
- case VK_DYNAMIC_STATE_VIEWPORT_SHADING_RATE_PALETTE_NV:
- return "VK_DYNAMIC_STATE_VIEWPORT_SHADING_RATE_PALETTE_NV";
- case VK_DYNAMIC_STATE_VIEWPORT_W_SCALING_NV:
- return "VK_DYNAMIC_STATE_VIEWPORT_W_SCALING_NV";
- default:
- return "Unhandled VkDynamicState";
- }
-}
-
-static inline const char* string_VkSamplerCreateFlagBits(VkSamplerCreateFlagBits input_value)
-{
- switch ((VkSamplerCreateFlagBits)input_value)
- {
- case VK_SAMPLER_CREATE_SUBSAMPLED_BIT_EXT:
- return "VK_SAMPLER_CREATE_SUBSAMPLED_BIT_EXT";
- case VK_SAMPLER_CREATE_SUBSAMPLED_COARSE_RECONSTRUCTION_BIT_EXT:
- return "VK_SAMPLER_CREATE_SUBSAMPLED_COARSE_RECONSTRUCTION_BIT_EXT";
- default:
- return "Unhandled VkSamplerCreateFlagBits";
- }
-}
-
-static inline std::string string_VkSamplerCreateFlags(VkSamplerCreateFlags input_value)
-{
- std::string ret;
- int index = 0;
- while(input_value) {
- if (input_value & 1) {
- if( !ret.empty()) ret.append("|");
- ret.append(string_VkSamplerCreateFlagBits(static_cast<VkSamplerCreateFlagBits>(1 << index)));
- }
- ++index;
- input_value >>= 1;
- }
- if( ret.empty()) ret.append(string_VkSamplerCreateFlagBits(static_cast<VkSamplerCreateFlagBits>(0)));
- return ret;
-}
-
-static inline const char* string_VkFilter(VkFilter input_value)
-{
- switch ((VkFilter)input_value)
- {
- case VK_FILTER_CUBIC_IMG:
- return "VK_FILTER_CUBIC_IMG";
- case VK_FILTER_LINEAR:
- return "VK_FILTER_LINEAR";
- case VK_FILTER_NEAREST:
- return "VK_FILTER_NEAREST";
- default:
- return "Unhandled VkFilter";
- }
-}
-
-static inline const char* string_VkSamplerMipmapMode(VkSamplerMipmapMode input_value)
-{
- switch ((VkSamplerMipmapMode)input_value)
- {
- case VK_SAMPLER_MIPMAP_MODE_LINEAR:
- return "VK_SAMPLER_MIPMAP_MODE_LINEAR";
- case VK_SAMPLER_MIPMAP_MODE_NEAREST:
- return "VK_SAMPLER_MIPMAP_MODE_NEAREST";
- default:
- return "Unhandled VkSamplerMipmapMode";
- }
-}
-
-static inline const char* string_VkSamplerAddressMode(VkSamplerAddressMode input_value)
-{
- switch ((VkSamplerAddressMode)input_value)
- {
- case VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_BORDER:
- return "VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_BORDER";
- case VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_EDGE:
- return "VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_EDGE";
- case VK_SAMPLER_ADDRESS_MODE_MIRRORED_REPEAT:
- return "VK_SAMPLER_ADDRESS_MODE_MIRRORED_REPEAT";
- case VK_SAMPLER_ADDRESS_MODE_MIRROR_CLAMP_TO_EDGE:
- return "VK_SAMPLER_ADDRESS_MODE_MIRROR_CLAMP_TO_EDGE";
- case VK_SAMPLER_ADDRESS_MODE_REPEAT:
- return "VK_SAMPLER_ADDRESS_MODE_REPEAT";
- default:
- return "Unhandled VkSamplerAddressMode";
- }
-}
-
-static inline const char* string_VkBorderColor(VkBorderColor input_value)
-{
- switch ((VkBorderColor)input_value)
- {
- case VK_BORDER_COLOR_FLOAT_OPAQUE_BLACK:
- return "VK_BORDER_COLOR_FLOAT_OPAQUE_BLACK";
- case VK_BORDER_COLOR_FLOAT_OPAQUE_WHITE:
- return "VK_BORDER_COLOR_FLOAT_OPAQUE_WHITE";
- case VK_BORDER_COLOR_FLOAT_TRANSPARENT_BLACK:
- return "VK_BORDER_COLOR_FLOAT_TRANSPARENT_BLACK";
- case VK_BORDER_COLOR_INT_OPAQUE_BLACK:
- return "VK_BORDER_COLOR_INT_OPAQUE_BLACK";
- case VK_BORDER_COLOR_INT_OPAQUE_WHITE:
- return "VK_BORDER_COLOR_INT_OPAQUE_WHITE";
- case VK_BORDER_COLOR_INT_TRANSPARENT_BLACK:
- return "VK_BORDER_COLOR_INT_TRANSPARENT_BLACK";
- default:
- return "Unhandled VkBorderColor";
- }
-}
-
-static inline const char* string_VkDescriptorSetLayoutCreateFlagBits(VkDescriptorSetLayoutCreateFlagBits input_value)
-{
- switch ((VkDescriptorSetLayoutCreateFlagBits)input_value)
- {
- case VK_DESCRIPTOR_SET_LAYOUT_CREATE_PUSH_DESCRIPTOR_BIT_KHR:
- return "VK_DESCRIPTOR_SET_LAYOUT_CREATE_PUSH_DESCRIPTOR_BIT_KHR";
- case VK_DESCRIPTOR_SET_LAYOUT_CREATE_UPDATE_AFTER_BIND_POOL_BIT_EXT:
- return "VK_DESCRIPTOR_SET_LAYOUT_CREATE_UPDATE_AFTER_BIND_POOL_BIT_EXT";
- default:
- return "Unhandled VkDescriptorSetLayoutCreateFlagBits";
- }
-}
-
-static inline std::string string_VkDescriptorSetLayoutCreateFlags(VkDescriptorSetLayoutCreateFlags input_value)
-{
- std::string ret;
- int index = 0;
- while(input_value) {
- if (input_value & 1) {
- if( !ret.empty()) ret.append("|");
- ret.append(string_VkDescriptorSetLayoutCreateFlagBits(static_cast<VkDescriptorSetLayoutCreateFlagBits>(1 << index)));
- }
- ++index;
- input_value >>= 1;
- }
- if( ret.empty()) ret.append(string_VkDescriptorSetLayoutCreateFlagBits(static_cast<VkDescriptorSetLayoutCreateFlagBits>(0)));
- return ret;
-}
-
-static inline const char* string_VkDescriptorType(VkDescriptorType input_value)
-{
- switch ((VkDescriptorType)input_value)
- {
- case VK_DESCRIPTOR_TYPE_ACCELERATION_STRUCTURE_NV:
- return "VK_DESCRIPTOR_TYPE_ACCELERATION_STRUCTURE_NV";
- case VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER:
- return "VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER";
- case VK_DESCRIPTOR_TYPE_INLINE_UNIFORM_BLOCK_EXT:
- return "VK_DESCRIPTOR_TYPE_INLINE_UNIFORM_BLOCK_EXT";
- case VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT:
- return "VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT";
- case VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE:
- return "VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE";
- case VK_DESCRIPTOR_TYPE_SAMPLER:
- return "VK_DESCRIPTOR_TYPE_SAMPLER";
- case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER:
- return "VK_DESCRIPTOR_TYPE_STORAGE_BUFFER";
- case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC:
- return "VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC";
- case VK_DESCRIPTOR_TYPE_STORAGE_IMAGE:
- return "VK_DESCRIPTOR_TYPE_STORAGE_IMAGE";
- case VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER:
- return "VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER";
- case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER:
- return "VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER";
- case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC:
- return "VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC";
- case VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER:
- return "VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER";
- default:
- return "Unhandled VkDescriptorType";
- }
-}
-
-static inline const char* string_VkDescriptorPoolCreateFlagBits(VkDescriptorPoolCreateFlagBits input_value)
-{
- switch ((VkDescriptorPoolCreateFlagBits)input_value)
- {
- case VK_DESCRIPTOR_POOL_CREATE_FREE_DESCRIPTOR_SET_BIT:
- return "VK_DESCRIPTOR_POOL_CREATE_FREE_DESCRIPTOR_SET_BIT";
- case VK_DESCRIPTOR_POOL_CREATE_UPDATE_AFTER_BIND_BIT_EXT:
- return "VK_DESCRIPTOR_POOL_CREATE_UPDATE_AFTER_BIND_BIT_EXT";
- default:
- return "Unhandled VkDescriptorPoolCreateFlagBits";
- }
-}
-
-static inline std::string string_VkDescriptorPoolCreateFlags(VkDescriptorPoolCreateFlags input_value)
-{
- std::string ret;
- int index = 0;
- while(input_value) {
- if (input_value & 1) {
- if( !ret.empty()) ret.append("|");
- ret.append(string_VkDescriptorPoolCreateFlagBits(static_cast<VkDescriptorPoolCreateFlagBits>(1 << index)));
- }
- ++index;
- input_value >>= 1;
- }
- if( ret.empty()) ret.append(string_VkDescriptorPoolCreateFlagBits(static_cast<VkDescriptorPoolCreateFlagBits>(0)));
- return ret;
-}
-
-static inline const char* string_VkFramebufferCreateFlagBits(VkFramebufferCreateFlagBits input_value)
-{
- switch ((VkFramebufferCreateFlagBits)input_value)
- {
- case VK_FRAMEBUFFER_CREATE_IMAGELESS_BIT_KHR:
- return "VK_FRAMEBUFFER_CREATE_IMAGELESS_BIT_KHR";
- default:
- return "Unhandled VkFramebufferCreateFlagBits";
- }
-}
-
-static inline std::string string_VkFramebufferCreateFlags(VkFramebufferCreateFlags input_value)
-{
- std::string ret;
- int index = 0;
- while(input_value) {
- if (input_value & 1) {
- if( !ret.empty()) ret.append("|");
- ret.append(string_VkFramebufferCreateFlagBits(static_cast<VkFramebufferCreateFlagBits>(1 << index)));
- }
- ++index;
- input_value >>= 1;
- }
- if( ret.empty()) ret.append(string_VkFramebufferCreateFlagBits(static_cast<VkFramebufferCreateFlagBits>(0)));
- return ret;
-}
-
-static inline const char* string_VkAttachmentDescriptionFlagBits(VkAttachmentDescriptionFlagBits input_value)
-{
- switch ((VkAttachmentDescriptionFlagBits)input_value)
- {
- case VK_ATTACHMENT_DESCRIPTION_MAY_ALIAS_BIT:
- return "VK_ATTACHMENT_DESCRIPTION_MAY_ALIAS_BIT";
- default:
- return "Unhandled VkAttachmentDescriptionFlagBits";
- }
-}
-
-static inline std::string string_VkAttachmentDescriptionFlags(VkAttachmentDescriptionFlags input_value)
-{
- std::string ret;
- int index = 0;
- while(input_value) {
- if (input_value & 1) {
- if( !ret.empty()) ret.append("|");
- ret.append(string_VkAttachmentDescriptionFlagBits(static_cast<VkAttachmentDescriptionFlagBits>(1 << index)));
- }
- ++index;
- input_value >>= 1;
- }
- if( ret.empty()) ret.append(string_VkAttachmentDescriptionFlagBits(static_cast<VkAttachmentDescriptionFlagBits>(0)));
- return ret;
-}
-
-static inline const char* string_VkAttachmentLoadOp(VkAttachmentLoadOp input_value)
-{
- switch ((VkAttachmentLoadOp)input_value)
- {
- case VK_ATTACHMENT_LOAD_OP_CLEAR:
- return "VK_ATTACHMENT_LOAD_OP_CLEAR";
- case VK_ATTACHMENT_LOAD_OP_DONT_CARE:
- return "VK_ATTACHMENT_LOAD_OP_DONT_CARE";
- case VK_ATTACHMENT_LOAD_OP_LOAD:
- return "VK_ATTACHMENT_LOAD_OP_LOAD";
- default:
- return "Unhandled VkAttachmentLoadOp";
- }
-}
-
-static inline const char* string_VkAttachmentStoreOp(VkAttachmentStoreOp input_value)
-{
- switch ((VkAttachmentStoreOp)input_value)
- {
- case VK_ATTACHMENT_STORE_OP_DONT_CARE:
- return "VK_ATTACHMENT_STORE_OP_DONT_CARE";
- case VK_ATTACHMENT_STORE_OP_STORE:
- return "VK_ATTACHMENT_STORE_OP_STORE";
- default:
- return "Unhandled VkAttachmentStoreOp";
- }
-}
-
-static inline const char* string_VkSubpassDescriptionFlagBits(VkSubpassDescriptionFlagBits input_value)
-{
- switch ((VkSubpassDescriptionFlagBits)input_value)
- {
- case VK_SUBPASS_DESCRIPTION_PER_VIEW_ATTRIBUTES_BIT_NVX:
- return "VK_SUBPASS_DESCRIPTION_PER_VIEW_ATTRIBUTES_BIT_NVX";
- case VK_SUBPASS_DESCRIPTION_PER_VIEW_POSITION_X_ONLY_BIT_NVX:
- return "VK_SUBPASS_DESCRIPTION_PER_VIEW_POSITION_X_ONLY_BIT_NVX";
- default:
- return "Unhandled VkSubpassDescriptionFlagBits";
- }
-}
-
-static inline std::string string_VkSubpassDescriptionFlags(VkSubpassDescriptionFlags input_value)
-{
- std::string ret;
- int index = 0;
- while(input_value) {
- if (input_value & 1) {
- if( !ret.empty()) ret.append("|");
- ret.append(string_VkSubpassDescriptionFlagBits(static_cast<VkSubpassDescriptionFlagBits>(1 << index)));
- }
- ++index;
- input_value >>= 1;
- }
- if( ret.empty()) ret.append(string_VkSubpassDescriptionFlagBits(static_cast<VkSubpassDescriptionFlagBits>(0)));
- return ret;
-}
-
-static inline const char* string_VkPipelineBindPoint(VkPipelineBindPoint input_value)
-{
- switch ((VkPipelineBindPoint)input_value)
- {
- case VK_PIPELINE_BIND_POINT_COMPUTE:
- return "VK_PIPELINE_BIND_POINT_COMPUTE";
- case VK_PIPELINE_BIND_POINT_GRAPHICS:
- return "VK_PIPELINE_BIND_POINT_GRAPHICS";
- case VK_PIPELINE_BIND_POINT_RAY_TRACING_NV:
- return "VK_PIPELINE_BIND_POINT_RAY_TRACING_NV";
- default:
- return "Unhandled VkPipelineBindPoint";
- }
-}
-
-static inline const char* string_VkAccessFlagBits(VkAccessFlagBits input_value)
-{
- switch ((VkAccessFlagBits)input_value)
- {
- case VK_ACCESS_ACCELERATION_STRUCTURE_READ_BIT_NV:
- return "VK_ACCESS_ACCELERATION_STRUCTURE_READ_BIT_NV";
- case VK_ACCESS_ACCELERATION_STRUCTURE_WRITE_BIT_NV:
- return "VK_ACCESS_ACCELERATION_STRUCTURE_WRITE_BIT_NV";
- case VK_ACCESS_COLOR_ATTACHMENT_READ_BIT:
- return "VK_ACCESS_COLOR_ATTACHMENT_READ_BIT";
- case VK_ACCESS_COLOR_ATTACHMENT_READ_NONCOHERENT_BIT_EXT:
- return "VK_ACCESS_COLOR_ATTACHMENT_READ_NONCOHERENT_BIT_EXT";
- case VK_ACCESS_COLOR_ATTACHMENT_WRITE_BIT:
- return "VK_ACCESS_COLOR_ATTACHMENT_WRITE_BIT";
- case VK_ACCESS_COMMAND_PROCESS_READ_BIT_NVX:
- return "VK_ACCESS_COMMAND_PROCESS_READ_BIT_NVX";
- case VK_ACCESS_COMMAND_PROCESS_WRITE_BIT_NVX:
- return "VK_ACCESS_COMMAND_PROCESS_WRITE_BIT_NVX";
- case VK_ACCESS_CONDITIONAL_RENDERING_READ_BIT_EXT:
- return "VK_ACCESS_CONDITIONAL_RENDERING_READ_BIT_EXT";
- case VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_READ_BIT:
- return "VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_READ_BIT";
- case VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_WRITE_BIT:
- return "VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_WRITE_BIT";
- case VK_ACCESS_FRAGMENT_DENSITY_MAP_READ_BIT_EXT:
- return "VK_ACCESS_FRAGMENT_DENSITY_MAP_READ_BIT_EXT";
- case VK_ACCESS_HOST_READ_BIT:
- return "VK_ACCESS_HOST_READ_BIT";
- case VK_ACCESS_HOST_WRITE_BIT:
- return "VK_ACCESS_HOST_WRITE_BIT";
- case VK_ACCESS_INDEX_READ_BIT:
- return "VK_ACCESS_INDEX_READ_BIT";
- case VK_ACCESS_INDIRECT_COMMAND_READ_BIT:
- return "VK_ACCESS_INDIRECT_COMMAND_READ_BIT";
- case VK_ACCESS_INPUT_ATTACHMENT_READ_BIT:
- return "VK_ACCESS_INPUT_ATTACHMENT_READ_BIT";
- case VK_ACCESS_MEMORY_READ_BIT:
- return "VK_ACCESS_MEMORY_READ_BIT";
- case VK_ACCESS_MEMORY_WRITE_BIT:
- return "VK_ACCESS_MEMORY_WRITE_BIT";
- case VK_ACCESS_SHADER_READ_BIT:
- return "VK_ACCESS_SHADER_READ_BIT";
- case VK_ACCESS_SHADER_WRITE_BIT:
- return "VK_ACCESS_SHADER_WRITE_BIT";
- case VK_ACCESS_SHADING_RATE_IMAGE_READ_BIT_NV:
- return "VK_ACCESS_SHADING_RATE_IMAGE_READ_BIT_NV";
- case VK_ACCESS_TRANSFER_READ_BIT:
- return "VK_ACCESS_TRANSFER_READ_BIT";
- case VK_ACCESS_TRANSFER_WRITE_BIT:
- return "VK_ACCESS_TRANSFER_WRITE_BIT";
- case VK_ACCESS_TRANSFORM_FEEDBACK_COUNTER_READ_BIT_EXT:
- return "VK_ACCESS_TRANSFORM_FEEDBACK_COUNTER_READ_BIT_EXT";
- case VK_ACCESS_TRANSFORM_FEEDBACK_COUNTER_WRITE_BIT_EXT:
- return "VK_ACCESS_TRANSFORM_FEEDBACK_COUNTER_WRITE_BIT_EXT";
- case VK_ACCESS_TRANSFORM_FEEDBACK_WRITE_BIT_EXT:
- return "VK_ACCESS_TRANSFORM_FEEDBACK_WRITE_BIT_EXT";
- case VK_ACCESS_UNIFORM_READ_BIT:
- return "VK_ACCESS_UNIFORM_READ_BIT";
- case VK_ACCESS_VERTEX_ATTRIBUTE_READ_BIT:
- return "VK_ACCESS_VERTEX_ATTRIBUTE_READ_BIT";
- default:
- return "Unhandled VkAccessFlagBits";
- }
-}
-
-static inline std::string string_VkAccessFlags(VkAccessFlags input_value)
-{
- std::string ret;
- int index = 0;
- while(input_value) {
- if (input_value & 1) {
- if( !ret.empty()) ret.append("|");
- ret.append(string_VkAccessFlagBits(static_cast<VkAccessFlagBits>(1 << index)));
- }
- ++index;
- input_value >>= 1;
- }
- if( ret.empty()) ret.append(string_VkAccessFlagBits(static_cast<VkAccessFlagBits>(0)));
- return ret;
-}
-
-static inline const char* string_VkDependencyFlagBits(VkDependencyFlagBits input_value)
-{
- switch ((VkDependencyFlagBits)input_value)
- {
- case VK_DEPENDENCY_BY_REGION_BIT:
- return "VK_DEPENDENCY_BY_REGION_BIT";
- case VK_DEPENDENCY_DEVICE_GROUP_BIT:
- return "VK_DEPENDENCY_DEVICE_GROUP_BIT";
- case VK_DEPENDENCY_VIEW_LOCAL_BIT:
- return "VK_DEPENDENCY_VIEW_LOCAL_BIT";
- default:
- return "Unhandled VkDependencyFlagBits";
- }
-}
-
-static inline std::string string_VkDependencyFlags(VkDependencyFlags input_value)
-{
- std::string ret;
- int index = 0;
- while(input_value) {
- if (input_value & 1) {
- if( !ret.empty()) ret.append("|");
- ret.append(string_VkDependencyFlagBits(static_cast<VkDependencyFlagBits>(1 << index)));
- }
- ++index;
- input_value >>= 1;
- }
- if( ret.empty()) ret.append(string_VkDependencyFlagBits(static_cast<VkDependencyFlagBits>(0)));
- return ret;
-}
-
-static inline const char* string_VkCommandPoolCreateFlagBits(VkCommandPoolCreateFlagBits input_value)
-{
- switch ((VkCommandPoolCreateFlagBits)input_value)
- {
- case VK_COMMAND_POOL_CREATE_PROTECTED_BIT:
- return "VK_COMMAND_POOL_CREATE_PROTECTED_BIT";
- case VK_COMMAND_POOL_CREATE_RESET_COMMAND_BUFFER_BIT:
- return "VK_COMMAND_POOL_CREATE_RESET_COMMAND_BUFFER_BIT";
- case VK_COMMAND_POOL_CREATE_TRANSIENT_BIT:
- return "VK_COMMAND_POOL_CREATE_TRANSIENT_BIT";
- default:
- return "Unhandled VkCommandPoolCreateFlagBits";
- }
-}
-
-static inline std::string string_VkCommandPoolCreateFlags(VkCommandPoolCreateFlags input_value)
-{
- std::string ret;
- int index = 0;
- while(input_value) {
- if (input_value & 1) {
- if( !ret.empty()) ret.append("|");
- ret.append(string_VkCommandPoolCreateFlagBits(static_cast<VkCommandPoolCreateFlagBits>(1 << index)));
- }
- ++index;
- input_value >>= 1;
- }
- if( ret.empty()) ret.append(string_VkCommandPoolCreateFlagBits(static_cast<VkCommandPoolCreateFlagBits>(0)));
- return ret;
-}
-
-static inline const char* string_VkCommandPoolResetFlagBits(VkCommandPoolResetFlagBits input_value)
-{
- switch ((VkCommandPoolResetFlagBits)input_value)
- {
- case VK_COMMAND_POOL_RESET_RELEASE_RESOURCES_BIT:
- return "VK_COMMAND_POOL_RESET_RELEASE_RESOURCES_BIT";
- default:
- return "Unhandled VkCommandPoolResetFlagBits";
- }
-}
-
-static inline std::string string_VkCommandPoolResetFlags(VkCommandPoolResetFlags input_value)
-{
- std::string ret;
- int index = 0;
- while(input_value) {
- if (input_value & 1) {
- if( !ret.empty()) ret.append("|");
- ret.append(string_VkCommandPoolResetFlagBits(static_cast<VkCommandPoolResetFlagBits>(1 << index)));
- }
- ++index;
- input_value >>= 1;
- }
- if( ret.empty()) ret.append(string_VkCommandPoolResetFlagBits(static_cast<VkCommandPoolResetFlagBits>(0)));
- return ret;
-}
-
-static inline const char* string_VkCommandBufferLevel(VkCommandBufferLevel input_value)
-{
- switch ((VkCommandBufferLevel)input_value)
- {
- case VK_COMMAND_BUFFER_LEVEL_PRIMARY:
- return "VK_COMMAND_BUFFER_LEVEL_PRIMARY";
- case VK_COMMAND_BUFFER_LEVEL_SECONDARY:
- return "VK_COMMAND_BUFFER_LEVEL_SECONDARY";
- default:
- return "Unhandled VkCommandBufferLevel";
- }
-}
-
-static inline const char* string_VkCommandBufferUsageFlagBits(VkCommandBufferUsageFlagBits input_value)
-{
- switch ((VkCommandBufferUsageFlagBits)input_value)
- {
- case VK_COMMAND_BUFFER_USAGE_ONE_TIME_SUBMIT_BIT:
- return "VK_COMMAND_BUFFER_USAGE_ONE_TIME_SUBMIT_BIT";
- case VK_COMMAND_BUFFER_USAGE_RENDER_PASS_CONTINUE_BIT:
- return "VK_COMMAND_BUFFER_USAGE_RENDER_PASS_CONTINUE_BIT";
- case VK_COMMAND_BUFFER_USAGE_SIMULTANEOUS_USE_BIT:
- return "VK_COMMAND_BUFFER_USAGE_SIMULTANEOUS_USE_BIT";
- default:
- return "Unhandled VkCommandBufferUsageFlagBits";
- }
-}
-
-static inline std::string string_VkCommandBufferUsageFlags(VkCommandBufferUsageFlags input_value)
-{
- std::string ret;
- int index = 0;
- while(input_value) {
- if (input_value & 1) {
- if( !ret.empty()) ret.append("|");
- ret.append(string_VkCommandBufferUsageFlagBits(static_cast<VkCommandBufferUsageFlagBits>(1 << index)));
- }
- ++index;
- input_value >>= 1;
- }
- if( ret.empty()) ret.append(string_VkCommandBufferUsageFlagBits(static_cast<VkCommandBufferUsageFlagBits>(0)));
- return ret;
-}
-
-static inline const char* string_VkQueryControlFlagBits(VkQueryControlFlagBits input_value)
-{
- switch ((VkQueryControlFlagBits)input_value)
- {
- case VK_QUERY_CONTROL_PRECISE_BIT:
- return "VK_QUERY_CONTROL_PRECISE_BIT";
- default:
- return "Unhandled VkQueryControlFlagBits";
- }
-}
-
-static inline std::string string_VkQueryControlFlags(VkQueryControlFlags input_value)
-{
- std::string ret;
- int index = 0;
- while(input_value) {
- if (input_value & 1) {
- if( !ret.empty()) ret.append("|");
- ret.append(string_VkQueryControlFlagBits(static_cast<VkQueryControlFlagBits>(1 << index)));
- }
- ++index;
- input_value >>= 1;
- }
- if( ret.empty()) ret.append(string_VkQueryControlFlagBits(static_cast<VkQueryControlFlagBits>(0)));
- return ret;
-}
-
-static inline const char* string_VkCommandBufferResetFlagBits(VkCommandBufferResetFlagBits input_value)
-{
- switch ((VkCommandBufferResetFlagBits)input_value)
- {
- case VK_COMMAND_BUFFER_RESET_RELEASE_RESOURCES_BIT:
- return "VK_COMMAND_BUFFER_RESET_RELEASE_RESOURCES_BIT";
- default:
- return "Unhandled VkCommandBufferResetFlagBits";
- }
-}
-
-static inline std::string string_VkCommandBufferResetFlags(VkCommandBufferResetFlags input_value)
-{
- std::string ret;
- int index = 0;
- while(input_value) {
- if (input_value & 1) {
- if( !ret.empty()) ret.append("|");
- ret.append(string_VkCommandBufferResetFlagBits(static_cast<VkCommandBufferResetFlagBits>(1 << index)));
- }
- ++index;
- input_value >>= 1;
- }
- if( ret.empty()) ret.append(string_VkCommandBufferResetFlagBits(static_cast<VkCommandBufferResetFlagBits>(0)));
- return ret;
-}
-
-static inline const char* string_VkStencilFaceFlagBits(VkStencilFaceFlagBits input_value)
-{
- switch ((VkStencilFaceFlagBits)input_value)
- {
- case VK_STENCIL_FACE_BACK_BIT:
- return "VK_STENCIL_FACE_BACK_BIT";
- case VK_STENCIL_FACE_FRONT_AND_BACK:
- return "VK_STENCIL_FACE_FRONT_AND_BACK";
- case VK_STENCIL_FACE_FRONT_BIT:
- return "VK_STENCIL_FACE_FRONT_BIT";
- default:
- return "Unhandled VkStencilFaceFlagBits";
- }
-}
-
-static inline std::string string_VkStencilFaceFlags(VkStencilFaceFlags input_value)
-{
- std::string ret;
- int index = 0;
- while(input_value) {
- if (input_value & 1) {
- if( !ret.empty()) ret.append("|");
- ret.append(string_VkStencilFaceFlagBits(static_cast<VkStencilFaceFlagBits>(1 << index)));
- }
- ++index;
- input_value >>= 1;
- }
- if( ret.empty()) ret.append(string_VkStencilFaceFlagBits(static_cast<VkStencilFaceFlagBits>(0)));
- return ret;
-}
-
-static inline const char* string_VkIndexType(VkIndexType input_value)
-{
- switch ((VkIndexType)input_value)
- {
- case VK_INDEX_TYPE_NONE_NV:
- return "VK_INDEX_TYPE_NONE_NV";
- case VK_INDEX_TYPE_UINT16:
- return "VK_INDEX_TYPE_UINT16";
- case VK_INDEX_TYPE_UINT32:
- return "VK_INDEX_TYPE_UINT32";
- case VK_INDEX_TYPE_UINT8_EXT:
- return "VK_INDEX_TYPE_UINT8_EXT";
- default:
- return "Unhandled VkIndexType";
- }
-}
-
-static inline const char* string_VkSubpassContents(VkSubpassContents input_value)
-{
- switch ((VkSubpassContents)input_value)
- {
- case VK_SUBPASS_CONTENTS_INLINE:
- return "VK_SUBPASS_CONTENTS_INLINE";
- case VK_SUBPASS_CONTENTS_SECONDARY_COMMAND_BUFFERS:
- return "VK_SUBPASS_CONTENTS_SECONDARY_COMMAND_BUFFERS";
- default:
- return "Unhandled VkSubpassContents";
- }
-}
-
-static inline const char* string_VkObjectType(VkObjectType input_value)
-{
- switch ((VkObjectType)input_value)
- {
- case VK_OBJECT_TYPE_ACCELERATION_STRUCTURE_NV:
- return "VK_OBJECT_TYPE_ACCELERATION_STRUCTURE_NV";
- case VK_OBJECT_TYPE_BUFFER:
- return "VK_OBJECT_TYPE_BUFFER";
- case VK_OBJECT_TYPE_BUFFER_VIEW:
- return "VK_OBJECT_TYPE_BUFFER_VIEW";
- case VK_OBJECT_TYPE_COMMAND_BUFFER:
- return "VK_OBJECT_TYPE_COMMAND_BUFFER";
- case VK_OBJECT_TYPE_COMMAND_POOL:
- return "VK_OBJECT_TYPE_COMMAND_POOL";
- case VK_OBJECT_TYPE_DEBUG_REPORT_CALLBACK_EXT:
- return "VK_OBJECT_TYPE_DEBUG_REPORT_CALLBACK_EXT";
- case VK_OBJECT_TYPE_DEBUG_UTILS_MESSENGER_EXT:
- return "VK_OBJECT_TYPE_DEBUG_UTILS_MESSENGER_EXT";
- case VK_OBJECT_TYPE_DESCRIPTOR_POOL:
- return "VK_OBJECT_TYPE_DESCRIPTOR_POOL";
- case VK_OBJECT_TYPE_DESCRIPTOR_SET:
- return "VK_OBJECT_TYPE_DESCRIPTOR_SET";
- case VK_OBJECT_TYPE_DESCRIPTOR_SET_LAYOUT:
- return "VK_OBJECT_TYPE_DESCRIPTOR_SET_LAYOUT";
- case VK_OBJECT_TYPE_DESCRIPTOR_UPDATE_TEMPLATE:
- return "VK_OBJECT_TYPE_DESCRIPTOR_UPDATE_TEMPLATE";
- case VK_OBJECT_TYPE_DEVICE:
- return "VK_OBJECT_TYPE_DEVICE";
- case VK_OBJECT_TYPE_DEVICE_MEMORY:
- return "VK_OBJECT_TYPE_DEVICE_MEMORY";
- case VK_OBJECT_TYPE_DISPLAY_KHR:
- return "VK_OBJECT_TYPE_DISPLAY_KHR";
- case VK_OBJECT_TYPE_DISPLAY_MODE_KHR:
- return "VK_OBJECT_TYPE_DISPLAY_MODE_KHR";
- case VK_OBJECT_TYPE_EVENT:
- return "VK_OBJECT_TYPE_EVENT";
- case VK_OBJECT_TYPE_FENCE:
- return "VK_OBJECT_TYPE_FENCE";
- case VK_OBJECT_TYPE_FRAMEBUFFER:
- return "VK_OBJECT_TYPE_FRAMEBUFFER";
- case VK_OBJECT_TYPE_IMAGE:
- return "VK_OBJECT_TYPE_IMAGE";
- case VK_OBJECT_TYPE_IMAGE_VIEW:
- return "VK_OBJECT_TYPE_IMAGE_VIEW";
- case VK_OBJECT_TYPE_INDIRECT_COMMANDS_LAYOUT_NVX:
- return "VK_OBJECT_TYPE_INDIRECT_COMMANDS_LAYOUT_NVX";
- case VK_OBJECT_TYPE_INSTANCE:
- return "VK_OBJECT_TYPE_INSTANCE";
- case VK_OBJECT_TYPE_OBJECT_TABLE_NVX:
- return "VK_OBJECT_TYPE_OBJECT_TABLE_NVX";
- case VK_OBJECT_TYPE_PERFORMANCE_CONFIGURATION_INTEL:
- return "VK_OBJECT_TYPE_PERFORMANCE_CONFIGURATION_INTEL";
- case VK_OBJECT_TYPE_PHYSICAL_DEVICE:
- return "VK_OBJECT_TYPE_PHYSICAL_DEVICE";
- case VK_OBJECT_TYPE_PIPELINE:
- return "VK_OBJECT_TYPE_PIPELINE";
- case VK_OBJECT_TYPE_PIPELINE_CACHE:
- return "VK_OBJECT_TYPE_PIPELINE_CACHE";
- case VK_OBJECT_TYPE_PIPELINE_LAYOUT:
- return "VK_OBJECT_TYPE_PIPELINE_LAYOUT";
- case VK_OBJECT_TYPE_QUERY_POOL:
- return "VK_OBJECT_TYPE_QUERY_POOL";
- case VK_OBJECT_TYPE_QUEUE:
- return "VK_OBJECT_TYPE_QUEUE";
- case VK_OBJECT_TYPE_RENDER_PASS:
- return "VK_OBJECT_TYPE_RENDER_PASS";
- case VK_OBJECT_TYPE_SAMPLER:
- return "VK_OBJECT_TYPE_SAMPLER";
- case VK_OBJECT_TYPE_SAMPLER_YCBCR_CONVERSION:
- return "VK_OBJECT_TYPE_SAMPLER_YCBCR_CONVERSION";
- case VK_OBJECT_TYPE_SEMAPHORE:
- return "VK_OBJECT_TYPE_SEMAPHORE";
- case VK_OBJECT_TYPE_SHADER_MODULE:
- return "VK_OBJECT_TYPE_SHADER_MODULE";
- case VK_OBJECT_TYPE_SURFACE_KHR:
- return "VK_OBJECT_TYPE_SURFACE_KHR";
- case VK_OBJECT_TYPE_SWAPCHAIN_KHR:
- return "VK_OBJECT_TYPE_SWAPCHAIN_KHR";
- case VK_OBJECT_TYPE_UNKNOWN:
- return "VK_OBJECT_TYPE_UNKNOWN";
- case VK_OBJECT_TYPE_VALIDATION_CACHE_EXT:
- return "VK_OBJECT_TYPE_VALIDATION_CACHE_EXT";
- default:
- return "Unhandled VkObjectType";
- }
-}
-
-static inline const char* string_VkVendorId(VkVendorId input_value)
-{
- switch ((VkVendorId)input_value)
- {
- case VK_VENDOR_ID_KAZAN:
- return "VK_VENDOR_ID_KAZAN";
- case VK_VENDOR_ID_VIV:
- return "VK_VENDOR_ID_VIV";
- case VK_VENDOR_ID_VSI:
- return "VK_VENDOR_ID_VSI";
- default:
- return "Unhandled VkVendorId";
- }
-}
-
-static inline const char* string_VkSubgroupFeatureFlagBits(VkSubgroupFeatureFlagBits input_value)
-{
- switch ((VkSubgroupFeatureFlagBits)input_value)
- {
- case VK_SUBGROUP_FEATURE_ARITHMETIC_BIT:
- return "VK_SUBGROUP_FEATURE_ARITHMETIC_BIT";
- case VK_SUBGROUP_FEATURE_BALLOT_BIT:
- return "VK_SUBGROUP_FEATURE_BALLOT_BIT";
- case VK_SUBGROUP_FEATURE_BASIC_BIT:
- return "VK_SUBGROUP_FEATURE_BASIC_BIT";
- case VK_SUBGROUP_FEATURE_CLUSTERED_BIT:
- return "VK_SUBGROUP_FEATURE_CLUSTERED_BIT";
- case VK_SUBGROUP_FEATURE_PARTITIONED_BIT_NV:
- return "VK_SUBGROUP_FEATURE_PARTITIONED_BIT_NV";
- case VK_SUBGROUP_FEATURE_QUAD_BIT:
- return "VK_SUBGROUP_FEATURE_QUAD_BIT";
- case VK_SUBGROUP_FEATURE_SHUFFLE_BIT:
- return "VK_SUBGROUP_FEATURE_SHUFFLE_BIT";
- case VK_SUBGROUP_FEATURE_SHUFFLE_RELATIVE_BIT:
- return "VK_SUBGROUP_FEATURE_SHUFFLE_RELATIVE_BIT";
- case VK_SUBGROUP_FEATURE_VOTE_BIT:
- return "VK_SUBGROUP_FEATURE_VOTE_BIT";
- default:
- return "Unhandled VkSubgroupFeatureFlagBits";
- }
-}
-
-static inline std::string string_VkSubgroupFeatureFlags(VkSubgroupFeatureFlags input_value)
-{
- std::string ret;
- int index = 0;
- while(input_value) {
- if (input_value & 1) {
- if( !ret.empty()) ret.append("|");
- ret.append(string_VkSubgroupFeatureFlagBits(static_cast<VkSubgroupFeatureFlagBits>(1 << index)));
- }
- ++index;
- input_value >>= 1;
- }
- if( ret.empty()) ret.append(string_VkSubgroupFeatureFlagBits(static_cast<VkSubgroupFeatureFlagBits>(0)));
- return ret;
-}
-
-static inline const char* string_VkPeerMemoryFeatureFlagBits(VkPeerMemoryFeatureFlagBits input_value)
-{
- switch ((VkPeerMemoryFeatureFlagBits)input_value)
- {
- case VK_PEER_MEMORY_FEATURE_COPY_DST_BIT:
- return "VK_PEER_MEMORY_FEATURE_COPY_DST_BIT";
- case VK_PEER_MEMORY_FEATURE_COPY_SRC_BIT:
- return "VK_PEER_MEMORY_FEATURE_COPY_SRC_BIT";
- case VK_PEER_MEMORY_FEATURE_GENERIC_DST_BIT:
- return "VK_PEER_MEMORY_FEATURE_GENERIC_DST_BIT";
- case VK_PEER_MEMORY_FEATURE_GENERIC_SRC_BIT:
- return "VK_PEER_MEMORY_FEATURE_GENERIC_SRC_BIT";
- default:
- return "Unhandled VkPeerMemoryFeatureFlagBits";
- }
-}
-
-static inline std::string string_VkPeerMemoryFeatureFlags(VkPeerMemoryFeatureFlags input_value)
-{
- std::string ret;
- int index = 0;
- while(input_value) {
- if (input_value & 1) {
- if( !ret.empty()) ret.append("|");
- ret.append(string_VkPeerMemoryFeatureFlagBits(static_cast<VkPeerMemoryFeatureFlagBits>(1 << index)));
- }
- ++index;
- input_value >>= 1;
- }
- if( ret.empty()) ret.append(string_VkPeerMemoryFeatureFlagBits(static_cast<VkPeerMemoryFeatureFlagBits>(0)));
- return ret;
-}
-
-static inline const char* string_VkMemoryAllocateFlagBits(VkMemoryAllocateFlagBits input_value)
-{
- switch ((VkMemoryAllocateFlagBits)input_value)
- {
- case VK_MEMORY_ALLOCATE_DEVICE_MASK_BIT:
- return "VK_MEMORY_ALLOCATE_DEVICE_MASK_BIT";
- default:
- return "Unhandled VkMemoryAllocateFlagBits";
- }
-}
-
-static inline std::string string_VkMemoryAllocateFlags(VkMemoryAllocateFlags input_value)
-{
- std::string ret;
- int index = 0;
- while(input_value) {
- if (input_value & 1) {
- if( !ret.empty()) ret.append("|");
- ret.append(string_VkMemoryAllocateFlagBits(static_cast<VkMemoryAllocateFlagBits>(1 << index)));
- }
- ++index;
- input_value >>= 1;
- }
- if( ret.empty()) ret.append(string_VkMemoryAllocateFlagBits(static_cast<VkMemoryAllocateFlagBits>(0)));
- return ret;
-}
-
-static inline const char* string_VkPointClippingBehavior(VkPointClippingBehavior input_value)
-{
- switch ((VkPointClippingBehavior)input_value)
- {
- case VK_POINT_CLIPPING_BEHAVIOR_ALL_CLIP_PLANES:
- return "VK_POINT_CLIPPING_BEHAVIOR_ALL_CLIP_PLANES";
- case VK_POINT_CLIPPING_BEHAVIOR_USER_CLIP_PLANES_ONLY:
- return "VK_POINT_CLIPPING_BEHAVIOR_USER_CLIP_PLANES_ONLY";
- default:
- return "Unhandled VkPointClippingBehavior";
- }
-}
-
-static inline const char* string_VkTessellationDomainOrigin(VkTessellationDomainOrigin input_value)
-{
- switch ((VkTessellationDomainOrigin)input_value)
- {
- case VK_TESSELLATION_DOMAIN_ORIGIN_LOWER_LEFT:
- return "VK_TESSELLATION_DOMAIN_ORIGIN_LOWER_LEFT";
- case VK_TESSELLATION_DOMAIN_ORIGIN_UPPER_LEFT:
- return "VK_TESSELLATION_DOMAIN_ORIGIN_UPPER_LEFT";
- default:
- return "Unhandled VkTessellationDomainOrigin";
- }
-}
-
-static inline const char* string_VkSamplerYcbcrModelConversion(VkSamplerYcbcrModelConversion input_value)
-{
- switch ((VkSamplerYcbcrModelConversion)input_value)
- {
- case VK_SAMPLER_YCBCR_MODEL_CONVERSION_RGB_IDENTITY:
- return "VK_SAMPLER_YCBCR_MODEL_CONVERSION_RGB_IDENTITY";
- case VK_SAMPLER_YCBCR_MODEL_CONVERSION_YCBCR_2020:
- return "VK_SAMPLER_YCBCR_MODEL_CONVERSION_YCBCR_2020";
- case VK_SAMPLER_YCBCR_MODEL_CONVERSION_YCBCR_601:
- return "VK_SAMPLER_YCBCR_MODEL_CONVERSION_YCBCR_601";
- case VK_SAMPLER_YCBCR_MODEL_CONVERSION_YCBCR_709:
- return "VK_SAMPLER_YCBCR_MODEL_CONVERSION_YCBCR_709";
- case VK_SAMPLER_YCBCR_MODEL_CONVERSION_YCBCR_IDENTITY:
- return "VK_SAMPLER_YCBCR_MODEL_CONVERSION_YCBCR_IDENTITY";
- default:
- return "Unhandled VkSamplerYcbcrModelConversion";
- }
-}
-
-static inline const char* string_VkSamplerYcbcrRange(VkSamplerYcbcrRange input_value)
-{
- switch ((VkSamplerYcbcrRange)input_value)
- {
- case VK_SAMPLER_YCBCR_RANGE_ITU_FULL:
- return "VK_SAMPLER_YCBCR_RANGE_ITU_FULL";
- case VK_SAMPLER_YCBCR_RANGE_ITU_NARROW:
- return "VK_SAMPLER_YCBCR_RANGE_ITU_NARROW";
- default:
- return "Unhandled VkSamplerYcbcrRange";
- }
-}
-
-static inline const char* string_VkChromaLocation(VkChromaLocation input_value)
-{
- switch ((VkChromaLocation)input_value)
- {
- case VK_CHROMA_LOCATION_COSITED_EVEN:
- return "VK_CHROMA_LOCATION_COSITED_EVEN";
- case VK_CHROMA_LOCATION_MIDPOINT:
- return "VK_CHROMA_LOCATION_MIDPOINT";
- default:
- return "Unhandled VkChromaLocation";
- }
-}
-
-static inline const char* string_VkDescriptorUpdateTemplateType(VkDescriptorUpdateTemplateType input_value)
-{
- switch ((VkDescriptorUpdateTemplateType)input_value)
- {
- case VK_DESCRIPTOR_UPDATE_TEMPLATE_TYPE_DESCRIPTOR_SET:
- return "VK_DESCRIPTOR_UPDATE_TEMPLATE_TYPE_DESCRIPTOR_SET";
- case VK_DESCRIPTOR_UPDATE_TEMPLATE_TYPE_PUSH_DESCRIPTORS_KHR:
- return "VK_DESCRIPTOR_UPDATE_TEMPLATE_TYPE_PUSH_DESCRIPTORS_KHR";
- default:
- return "Unhandled VkDescriptorUpdateTemplateType";
- }
-}
-
-static inline const char* string_VkExternalMemoryHandleTypeFlagBits(VkExternalMemoryHandleTypeFlagBits input_value)
-{
- switch ((VkExternalMemoryHandleTypeFlagBits)input_value)
- {
- case VK_EXTERNAL_MEMORY_HANDLE_TYPE_ANDROID_HARDWARE_BUFFER_BIT_ANDROID:
- return "VK_EXTERNAL_MEMORY_HANDLE_TYPE_ANDROID_HARDWARE_BUFFER_BIT_ANDROID";
- case VK_EXTERNAL_MEMORY_HANDLE_TYPE_D3D11_TEXTURE_BIT:
- return "VK_EXTERNAL_MEMORY_HANDLE_TYPE_D3D11_TEXTURE_BIT";
- case VK_EXTERNAL_MEMORY_HANDLE_TYPE_D3D11_TEXTURE_KMT_BIT:
- return "VK_EXTERNAL_MEMORY_HANDLE_TYPE_D3D11_TEXTURE_KMT_BIT";
- case VK_EXTERNAL_MEMORY_HANDLE_TYPE_D3D12_HEAP_BIT:
- return "VK_EXTERNAL_MEMORY_HANDLE_TYPE_D3D12_HEAP_BIT";
- case VK_EXTERNAL_MEMORY_HANDLE_TYPE_D3D12_RESOURCE_BIT:
- return "VK_EXTERNAL_MEMORY_HANDLE_TYPE_D3D12_RESOURCE_BIT";
- case VK_EXTERNAL_MEMORY_HANDLE_TYPE_DMA_BUF_BIT_EXT:
- return "VK_EXTERNAL_MEMORY_HANDLE_TYPE_DMA_BUF_BIT_EXT";
- case VK_EXTERNAL_MEMORY_HANDLE_TYPE_HOST_ALLOCATION_BIT_EXT:
- return "VK_EXTERNAL_MEMORY_HANDLE_TYPE_HOST_ALLOCATION_BIT_EXT";
- case VK_EXTERNAL_MEMORY_HANDLE_TYPE_HOST_MAPPED_FOREIGN_MEMORY_BIT_EXT:
- return "VK_EXTERNAL_MEMORY_HANDLE_TYPE_HOST_MAPPED_FOREIGN_MEMORY_BIT_EXT";
- case VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_FD_BIT:
- return "VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_FD_BIT";
- case VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_WIN32_BIT:
- return "VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_WIN32_BIT";
- case VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_WIN32_KMT_BIT:
- return "VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_WIN32_KMT_BIT";
- default:
- return "Unhandled VkExternalMemoryHandleTypeFlagBits";
- }
-}
-
-static inline std::string string_VkExternalMemoryHandleTypeFlags(VkExternalMemoryHandleTypeFlags input_value)
-{
- std::string ret;
- int index = 0;
- while(input_value) {
- if (input_value & 1) {
- if( !ret.empty()) ret.append("|");
- ret.append(string_VkExternalMemoryHandleTypeFlagBits(static_cast<VkExternalMemoryHandleTypeFlagBits>(1 << index)));
- }
- ++index;
- input_value >>= 1;
- }
- if( ret.empty()) ret.append(string_VkExternalMemoryHandleTypeFlagBits(static_cast<VkExternalMemoryHandleTypeFlagBits>(0)));
- return ret;
-}
-
-static inline const char* string_VkExternalMemoryFeatureFlagBits(VkExternalMemoryFeatureFlagBits input_value)
-{
- switch ((VkExternalMemoryFeatureFlagBits)input_value)
- {
- case VK_EXTERNAL_MEMORY_FEATURE_DEDICATED_ONLY_BIT:
- return "VK_EXTERNAL_MEMORY_FEATURE_DEDICATED_ONLY_BIT";
- case VK_EXTERNAL_MEMORY_FEATURE_EXPORTABLE_BIT:
- return "VK_EXTERNAL_MEMORY_FEATURE_EXPORTABLE_BIT";
- case VK_EXTERNAL_MEMORY_FEATURE_IMPORTABLE_BIT:
- return "VK_EXTERNAL_MEMORY_FEATURE_IMPORTABLE_BIT";
- default:
- return "Unhandled VkExternalMemoryFeatureFlagBits";
- }
-}
-
-static inline std::string string_VkExternalMemoryFeatureFlags(VkExternalMemoryFeatureFlags input_value)
-{
- std::string ret;
- int index = 0;
- while(input_value) {
- if (input_value & 1) {
- if( !ret.empty()) ret.append("|");
- ret.append(string_VkExternalMemoryFeatureFlagBits(static_cast<VkExternalMemoryFeatureFlagBits>(1 << index)));
- }
- ++index;
- input_value >>= 1;
- }
- if( ret.empty()) ret.append(string_VkExternalMemoryFeatureFlagBits(static_cast<VkExternalMemoryFeatureFlagBits>(0)));
- return ret;
-}
-
-static inline const char* string_VkExternalFenceHandleTypeFlagBits(VkExternalFenceHandleTypeFlagBits input_value)
-{
- switch ((VkExternalFenceHandleTypeFlagBits)input_value)
- {
- case VK_EXTERNAL_FENCE_HANDLE_TYPE_OPAQUE_FD_BIT:
- return "VK_EXTERNAL_FENCE_HANDLE_TYPE_OPAQUE_FD_BIT";
- case VK_EXTERNAL_FENCE_HANDLE_TYPE_OPAQUE_WIN32_BIT:
- return "VK_EXTERNAL_FENCE_HANDLE_TYPE_OPAQUE_WIN32_BIT";
- case VK_EXTERNAL_FENCE_HANDLE_TYPE_OPAQUE_WIN32_KMT_BIT:
- return "VK_EXTERNAL_FENCE_HANDLE_TYPE_OPAQUE_WIN32_KMT_BIT";
- case VK_EXTERNAL_FENCE_HANDLE_TYPE_SYNC_FD_BIT:
- return "VK_EXTERNAL_FENCE_HANDLE_TYPE_SYNC_FD_BIT";
- default:
- return "Unhandled VkExternalFenceHandleTypeFlagBits";
- }
-}
-
-static inline std::string string_VkExternalFenceHandleTypeFlags(VkExternalFenceHandleTypeFlags input_value)
-{
- std::string ret;
- int index = 0;
- while(input_value) {
- if (input_value & 1) {
- if( !ret.empty()) ret.append("|");
- ret.append(string_VkExternalFenceHandleTypeFlagBits(static_cast<VkExternalFenceHandleTypeFlagBits>(1 << index)));
- }
- ++index;
- input_value >>= 1;
- }
- if( ret.empty()) ret.append(string_VkExternalFenceHandleTypeFlagBits(static_cast<VkExternalFenceHandleTypeFlagBits>(0)));
- return ret;
-}
-
-static inline const char* string_VkExternalFenceFeatureFlagBits(VkExternalFenceFeatureFlagBits input_value)
-{
- switch ((VkExternalFenceFeatureFlagBits)input_value)
- {
- case VK_EXTERNAL_FENCE_FEATURE_EXPORTABLE_BIT:
- return "VK_EXTERNAL_FENCE_FEATURE_EXPORTABLE_BIT";
- case VK_EXTERNAL_FENCE_FEATURE_IMPORTABLE_BIT:
- return "VK_EXTERNAL_FENCE_FEATURE_IMPORTABLE_BIT";
- default:
- return "Unhandled VkExternalFenceFeatureFlagBits";
- }
-}
-
-static inline std::string string_VkExternalFenceFeatureFlags(VkExternalFenceFeatureFlags input_value)
-{
- std::string ret;
- int index = 0;
- while(input_value) {
- if (input_value & 1) {
- if( !ret.empty()) ret.append("|");
- ret.append(string_VkExternalFenceFeatureFlagBits(static_cast<VkExternalFenceFeatureFlagBits>(1 << index)));
- }
- ++index;
- input_value >>= 1;
- }
- if( ret.empty()) ret.append(string_VkExternalFenceFeatureFlagBits(static_cast<VkExternalFenceFeatureFlagBits>(0)));
- return ret;
-}
-
-static inline const char* string_VkFenceImportFlagBits(VkFenceImportFlagBits input_value)
-{
- switch ((VkFenceImportFlagBits)input_value)
- {
- case VK_FENCE_IMPORT_TEMPORARY_BIT:
- return "VK_FENCE_IMPORT_TEMPORARY_BIT";
- default:
- return "Unhandled VkFenceImportFlagBits";
- }
-}
-
-static inline std::string string_VkFenceImportFlags(VkFenceImportFlags input_value)
-{
- std::string ret;
- int index = 0;
- while(input_value) {
- if (input_value & 1) {
- if( !ret.empty()) ret.append("|");
- ret.append(string_VkFenceImportFlagBits(static_cast<VkFenceImportFlagBits>(1 << index)));
- }
- ++index;
- input_value >>= 1;
- }
- if( ret.empty()) ret.append(string_VkFenceImportFlagBits(static_cast<VkFenceImportFlagBits>(0)));
- return ret;
-}
-
-static inline const char* string_VkSemaphoreImportFlagBits(VkSemaphoreImportFlagBits input_value)
-{
- switch ((VkSemaphoreImportFlagBits)input_value)
- {
- case VK_SEMAPHORE_IMPORT_TEMPORARY_BIT:
- return "VK_SEMAPHORE_IMPORT_TEMPORARY_BIT";
- default:
- return "Unhandled VkSemaphoreImportFlagBits";
- }
-}
-
-static inline std::string string_VkSemaphoreImportFlags(VkSemaphoreImportFlags input_value)
-{
- std::string ret;
- int index = 0;
- while(input_value) {
- if (input_value & 1) {
- if( !ret.empty()) ret.append("|");
- ret.append(string_VkSemaphoreImportFlagBits(static_cast<VkSemaphoreImportFlagBits>(1 << index)));
- }
- ++index;
- input_value >>= 1;
- }
- if( ret.empty()) ret.append(string_VkSemaphoreImportFlagBits(static_cast<VkSemaphoreImportFlagBits>(0)));
- return ret;
-}
-
-static inline const char* string_VkExternalSemaphoreHandleTypeFlagBits(VkExternalSemaphoreHandleTypeFlagBits input_value)
-{
- switch ((VkExternalSemaphoreHandleTypeFlagBits)input_value)
- {
- case VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_D3D12_FENCE_BIT:
- return "VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_D3D12_FENCE_BIT";
- case VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_OPAQUE_FD_BIT:
- return "VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_OPAQUE_FD_BIT";
- case VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_OPAQUE_WIN32_BIT:
- return "VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_OPAQUE_WIN32_BIT";
- case VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_OPAQUE_WIN32_KMT_BIT:
- return "VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_OPAQUE_WIN32_KMT_BIT";
- case VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_SYNC_FD_BIT:
- return "VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_SYNC_FD_BIT";
- default:
- return "Unhandled VkExternalSemaphoreHandleTypeFlagBits";
- }
-}
-
-static inline std::string string_VkExternalSemaphoreHandleTypeFlags(VkExternalSemaphoreHandleTypeFlags input_value)
-{
- std::string ret;
- int index = 0;
- while(input_value) {
- if (input_value & 1) {
- if( !ret.empty()) ret.append("|");
- ret.append(string_VkExternalSemaphoreHandleTypeFlagBits(static_cast<VkExternalSemaphoreHandleTypeFlagBits>(1 << index)));
- }
- ++index;
- input_value >>= 1;
- }
- if( ret.empty()) ret.append(string_VkExternalSemaphoreHandleTypeFlagBits(static_cast<VkExternalSemaphoreHandleTypeFlagBits>(0)));
- return ret;
-}
-
-static inline const char* string_VkExternalSemaphoreFeatureFlagBits(VkExternalSemaphoreFeatureFlagBits input_value)
-{
- switch ((VkExternalSemaphoreFeatureFlagBits)input_value)
- {
- case VK_EXTERNAL_SEMAPHORE_FEATURE_EXPORTABLE_BIT:
- return "VK_EXTERNAL_SEMAPHORE_FEATURE_EXPORTABLE_BIT";
- case VK_EXTERNAL_SEMAPHORE_FEATURE_IMPORTABLE_BIT:
- return "VK_EXTERNAL_SEMAPHORE_FEATURE_IMPORTABLE_BIT";
- default:
- return "Unhandled VkExternalSemaphoreFeatureFlagBits";
- }
-}
-
-static inline std::string string_VkExternalSemaphoreFeatureFlags(VkExternalSemaphoreFeatureFlags input_value)
-{
- std::string ret;
- int index = 0;
- while(input_value) {
- if (input_value & 1) {
- if( !ret.empty()) ret.append("|");
- ret.append(string_VkExternalSemaphoreFeatureFlagBits(static_cast<VkExternalSemaphoreFeatureFlagBits>(1 << index)));
- }
- ++index;
- input_value >>= 1;
- }
- if( ret.empty()) ret.append(string_VkExternalSemaphoreFeatureFlagBits(static_cast<VkExternalSemaphoreFeatureFlagBits>(0)));
- return ret;
-}
-
-static inline const char* string_VkSurfaceTransformFlagBitsKHR(VkSurfaceTransformFlagBitsKHR input_value)
-{
- switch ((VkSurfaceTransformFlagBitsKHR)input_value)
- {
- case VK_SURFACE_TRANSFORM_HORIZONTAL_MIRROR_BIT_KHR:
- return "VK_SURFACE_TRANSFORM_HORIZONTAL_MIRROR_BIT_KHR";
- case VK_SURFACE_TRANSFORM_HORIZONTAL_MIRROR_ROTATE_180_BIT_KHR:
- return "VK_SURFACE_TRANSFORM_HORIZONTAL_MIRROR_ROTATE_180_BIT_KHR";
- case VK_SURFACE_TRANSFORM_HORIZONTAL_MIRROR_ROTATE_270_BIT_KHR:
- return "VK_SURFACE_TRANSFORM_HORIZONTAL_MIRROR_ROTATE_270_BIT_KHR";
- case VK_SURFACE_TRANSFORM_HORIZONTAL_MIRROR_ROTATE_90_BIT_KHR:
- return "VK_SURFACE_TRANSFORM_HORIZONTAL_MIRROR_ROTATE_90_BIT_KHR";
- case VK_SURFACE_TRANSFORM_IDENTITY_BIT_KHR:
- return "VK_SURFACE_TRANSFORM_IDENTITY_BIT_KHR";
- case VK_SURFACE_TRANSFORM_INHERIT_BIT_KHR:
- return "VK_SURFACE_TRANSFORM_INHERIT_BIT_KHR";
- case VK_SURFACE_TRANSFORM_ROTATE_180_BIT_KHR:
- return "VK_SURFACE_TRANSFORM_ROTATE_180_BIT_KHR";
- case VK_SURFACE_TRANSFORM_ROTATE_270_BIT_KHR:
- return "VK_SURFACE_TRANSFORM_ROTATE_270_BIT_KHR";
- case VK_SURFACE_TRANSFORM_ROTATE_90_BIT_KHR:
- return "VK_SURFACE_TRANSFORM_ROTATE_90_BIT_KHR";
- default:
- return "Unhandled VkSurfaceTransformFlagBitsKHR";
- }
-}
-
-static inline std::string string_VkSurfaceTransformFlagsKHR(VkSurfaceTransformFlagsKHR input_value)
-{
- std::string ret;
- int index = 0;
- while(input_value) {
- if (input_value & 1) {
- if( !ret.empty()) ret.append("|");
- ret.append(string_VkSurfaceTransformFlagBitsKHR(static_cast<VkSurfaceTransformFlagBitsKHR>(1 << index)));
- }
- ++index;
- input_value >>= 1;
- }
- if( ret.empty()) ret.append(string_VkSurfaceTransformFlagBitsKHR(static_cast<VkSurfaceTransformFlagBitsKHR>(0)));
- return ret;
-}
-
-static inline const char* string_VkCompositeAlphaFlagBitsKHR(VkCompositeAlphaFlagBitsKHR input_value)
-{
- switch ((VkCompositeAlphaFlagBitsKHR)input_value)
- {
- case VK_COMPOSITE_ALPHA_INHERIT_BIT_KHR:
- return "VK_COMPOSITE_ALPHA_INHERIT_BIT_KHR";
- case VK_COMPOSITE_ALPHA_OPAQUE_BIT_KHR:
- return "VK_COMPOSITE_ALPHA_OPAQUE_BIT_KHR";
- case VK_COMPOSITE_ALPHA_POST_MULTIPLIED_BIT_KHR:
- return "VK_COMPOSITE_ALPHA_POST_MULTIPLIED_BIT_KHR";
- case VK_COMPOSITE_ALPHA_PRE_MULTIPLIED_BIT_KHR:
- return "VK_COMPOSITE_ALPHA_PRE_MULTIPLIED_BIT_KHR";
- default:
- return "Unhandled VkCompositeAlphaFlagBitsKHR";
- }
-}
-
-static inline std::string string_VkCompositeAlphaFlagsKHR(VkCompositeAlphaFlagsKHR input_value)
-{
- std::string ret;
- int index = 0;
- while(input_value) {
- if (input_value & 1) {
- if( !ret.empty()) ret.append("|");
- ret.append(string_VkCompositeAlphaFlagBitsKHR(static_cast<VkCompositeAlphaFlagBitsKHR>(1 << index)));
- }
- ++index;
- input_value >>= 1;
- }
- if( ret.empty()) ret.append(string_VkCompositeAlphaFlagBitsKHR(static_cast<VkCompositeAlphaFlagBitsKHR>(0)));
- return ret;
-}
-
-static inline const char* string_VkColorSpaceKHR(VkColorSpaceKHR input_value)
-{
- switch ((VkColorSpaceKHR)input_value)
- {
- case VK_COLOR_SPACE_ADOBERGB_LINEAR_EXT:
- return "VK_COLOR_SPACE_ADOBERGB_LINEAR_EXT";
- case VK_COLOR_SPACE_ADOBERGB_NONLINEAR_EXT:
- return "VK_COLOR_SPACE_ADOBERGB_NONLINEAR_EXT";
- case VK_COLOR_SPACE_BT2020_LINEAR_EXT:
- return "VK_COLOR_SPACE_BT2020_LINEAR_EXT";
- case VK_COLOR_SPACE_BT709_LINEAR_EXT:
- return "VK_COLOR_SPACE_BT709_LINEAR_EXT";
- case VK_COLOR_SPACE_BT709_NONLINEAR_EXT:
- return "VK_COLOR_SPACE_BT709_NONLINEAR_EXT";
- case VK_COLOR_SPACE_DCI_P3_NONLINEAR_EXT:
- return "VK_COLOR_SPACE_DCI_P3_NONLINEAR_EXT";
- case VK_COLOR_SPACE_DISPLAY_NATIVE_AMD:
- return "VK_COLOR_SPACE_DISPLAY_NATIVE_AMD";
- case VK_COLOR_SPACE_DISPLAY_P3_LINEAR_EXT:
- return "VK_COLOR_SPACE_DISPLAY_P3_LINEAR_EXT";
- case VK_COLOR_SPACE_DISPLAY_P3_NONLINEAR_EXT:
- return "VK_COLOR_SPACE_DISPLAY_P3_NONLINEAR_EXT";
- case VK_COLOR_SPACE_DOLBYVISION_EXT:
- return "VK_COLOR_SPACE_DOLBYVISION_EXT";
- case VK_COLOR_SPACE_EXTENDED_SRGB_LINEAR_EXT:
- return "VK_COLOR_SPACE_EXTENDED_SRGB_LINEAR_EXT";
- case VK_COLOR_SPACE_EXTENDED_SRGB_NONLINEAR_EXT:
- return "VK_COLOR_SPACE_EXTENDED_SRGB_NONLINEAR_EXT";
- case VK_COLOR_SPACE_HDR10_HLG_EXT:
- return "VK_COLOR_SPACE_HDR10_HLG_EXT";
- case VK_COLOR_SPACE_HDR10_ST2084_EXT:
- return "VK_COLOR_SPACE_HDR10_ST2084_EXT";
- case VK_COLOR_SPACE_PASS_THROUGH_EXT:
- return "VK_COLOR_SPACE_PASS_THROUGH_EXT";
- case VK_COLOR_SPACE_SRGB_NONLINEAR_KHR:
- return "VK_COLOR_SPACE_SRGB_NONLINEAR_KHR";
- default:
- return "Unhandled VkColorSpaceKHR";
- }
-}
-
-static inline const char* string_VkPresentModeKHR(VkPresentModeKHR input_value)
-{
- switch ((VkPresentModeKHR)input_value)
- {
- case VK_PRESENT_MODE_FIFO_KHR:
- return "VK_PRESENT_MODE_FIFO_KHR";
- case VK_PRESENT_MODE_FIFO_RELAXED_KHR:
- return "VK_PRESENT_MODE_FIFO_RELAXED_KHR";
- case VK_PRESENT_MODE_IMMEDIATE_KHR:
- return "VK_PRESENT_MODE_IMMEDIATE_KHR";
- case VK_PRESENT_MODE_MAILBOX_KHR:
- return "VK_PRESENT_MODE_MAILBOX_KHR";
- case VK_PRESENT_MODE_SHARED_CONTINUOUS_REFRESH_KHR:
- return "VK_PRESENT_MODE_SHARED_CONTINUOUS_REFRESH_KHR";
- case VK_PRESENT_MODE_SHARED_DEMAND_REFRESH_KHR:
- return "VK_PRESENT_MODE_SHARED_DEMAND_REFRESH_KHR";
- default:
- return "Unhandled VkPresentModeKHR";
- }
-}
-
-static inline const char* string_VkSwapchainCreateFlagBitsKHR(VkSwapchainCreateFlagBitsKHR input_value)
-{
- switch ((VkSwapchainCreateFlagBitsKHR)input_value)
- {
- case VK_SWAPCHAIN_CREATE_MUTABLE_FORMAT_BIT_KHR:
- return "VK_SWAPCHAIN_CREATE_MUTABLE_FORMAT_BIT_KHR";
- case VK_SWAPCHAIN_CREATE_PROTECTED_BIT_KHR:
- return "VK_SWAPCHAIN_CREATE_PROTECTED_BIT_KHR";
- case VK_SWAPCHAIN_CREATE_SPLIT_INSTANCE_BIND_REGIONS_BIT_KHR:
- return "VK_SWAPCHAIN_CREATE_SPLIT_INSTANCE_BIND_REGIONS_BIT_KHR";
- default:
- return "Unhandled VkSwapchainCreateFlagBitsKHR";
- }
-}
-
-static inline std::string string_VkSwapchainCreateFlagsKHR(VkSwapchainCreateFlagsKHR input_value)
-{
- std::string ret;
- int index = 0;
- while(input_value) {
- if (input_value & 1) {
- if( !ret.empty()) ret.append("|");
- ret.append(string_VkSwapchainCreateFlagBitsKHR(static_cast<VkSwapchainCreateFlagBitsKHR>(1 << index)));
- }
- ++index;
- input_value >>= 1;
- }
- if( ret.empty()) ret.append(string_VkSwapchainCreateFlagBitsKHR(static_cast<VkSwapchainCreateFlagBitsKHR>(0)));
- return ret;
-}
-
-static inline const char* string_VkDeviceGroupPresentModeFlagBitsKHR(VkDeviceGroupPresentModeFlagBitsKHR input_value)
-{
- switch ((VkDeviceGroupPresentModeFlagBitsKHR)input_value)
- {
- case VK_DEVICE_GROUP_PRESENT_MODE_LOCAL_BIT_KHR:
- return "VK_DEVICE_GROUP_PRESENT_MODE_LOCAL_BIT_KHR";
- case VK_DEVICE_GROUP_PRESENT_MODE_LOCAL_MULTI_DEVICE_BIT_KHR:
- return "VK_DEVICE_GROUP_PRESENT_MODE_LOCAL_MULTI_DEVICE_BIT_KHR";
- case VK_DEVICE_GROUP_PRESENT_MODE_REMOTE_BIT_KHR:
- return "VK_DEVICE_GROUP_PRESENT_MODE_REMOTE_BIT_KHR";
- case VK_DEVICE_GROUP_PRESENT_MODE_SUM_BIT_KHR:
- return "VK_DEVICE_GROUP_PRESENT_MODE_SUM_BIT_KHR";
- default:
- return "Unhandled VkDeviceGroupPresentModeFlagBitsKHR";
- }
-}
-
-static inline std::string string_VkDeviceGroupPresentModeFlagsKHR(VkDeviceGroupPresentModeFlagsKHR input_value)
-{
- std::string ret;
- int index = 0;
- while(input_value) {
- if (input_value & 1) {
- if( !ret.empty()) ret.append("|");
- ret.append(string_VkDeviceGroupPresentModeFlagBitsKHR(static_cast<VkDeviceGroupPresentModeFlagBitsKHR>(1 << index)));
- }
- ++index;
- input_value >>= 1;
- }
- if( ret.empty()) ret.append(string_VkDeviceGroupPresentModeFlagBitsKHR(static_cast<VkDeviceGroupPresentModeFlagBitsKHR>(0)));
- return ret;
-}
-
-static inline const char* string_VkDisplayPlaneAlphaFlagBitsKHR(VkDisplayPlaneAlphaFlagBitsKHR input_value)
-{
- switch ((VkDisplayPlaneAlphaFlagBitsKHR)input_value)
- {
- case VK_DISPLAY_PLANE_ALPHA_GLOBAL_BIT_KHR:
- return "VK_DISPLAY_PLANE_ALPHA_GLOBAL_BIT_KHR";
- case VK_DISPLAY_PLANE_ALPHA_OPAQUE_BIT_KHR:
- return "VK_DISPLAY_PLANE_ALPHA_OPAQUE_BIT_KHR";
- case VK_DISPLAY_PLANE_ALPHA_PER_PIXEL_BIT_KHR:
- return "VK_DISPLAY_PLANE_ALPHA_PER_PIXEL_BIT_KHR";
- case VK_DISPLAY_PLANE_ALPHA_PER_PIXEL_PREMULTIPLIED_BIT_KHR:
- return "VK_DISPLAY_PLANE_ALPHA_PER_PIXEL_PREMULTIPLIED_BIT_KHR";
- default:
- return "Unhandled VkDisplayPlaneAlphaFlagBitsKHR";
- }
-}
-
-static inline std::string string_VkDisplayPlaneAlphaFlagsKHR(VkDisplayPlaneAlphaFlagsKHR input_value)
-{
- std::string ret;
- int index = 0;
- while(input_value) {
- if (input_value & 1) {
- if( !ret.empty()) ret.append("|");
- ret.append(string_VkDisplayPlaneAlphaFlagBitsKHR(static_cast<VkDisplayPlaneAlphaFlagBitsKHR>(1 << index)));
- }
- ++index;
- input_value >>= 1;
- }
- if( ret.empty()) ret.append(string_VkDisplayPlaneAlphaFlagBitsKHR(static_cast<VkDisplayPlaneAlphaFlagBitsKHR>(0)));
- return ret;
-}
-
-static inline const char* string_VkPeerMemoryFeatureFlagBitsKHR(VkPeerMemoryFeatureFlagBitsKHR input_value)
-{
- switch ((VkPeerMemoryFeatureFlagBitsKHR)input_value)
- {
- case VK_PEER_MEMORY_FEATURE_COPY_DST_BIT:
- return "VK_PEER_MEMORY_FEATURE_COPY_DST_BIT";
- case VK_PEER_MEMORY_FEATURE_COPY_SRC_BIT:
- return "VK_PEER_MEMORY_FEATURE_COPY_SRC_BIT";
- case VK_PEER_MEMORY_FEATURE_GENERIC_DST_BIT:
- return "VK_PEER_MEMORY_FEATURE_GENERIC_DST_BIT";
- case VK_PEER_MEMORY_FEATURE_GENERIC_SRC_BIT:
- return "VK_PEER_MEMORY_FEATURE_GENERIC_SRC_BIT";
- default:
- return "Unhandled VkPeerMemoryFeatureFlagBitsKHR";
- }
-}
-
-static inline std::string string_VkPeerMemoryFeatureFlagsKHR(VkPeerMemoryFeatureFlagsKHR input_value)
-{
- std::string ret;
- int index = 0;
- while(input_value) {
- if (input_value & 1) {
- if( !ret.empty()) ret.append("|");
- ret.append(string_VkPeerMemoryFeatureFlagBitsKHR(static_cast<VkPeerMemoryFeatureFlagBitsKHR>(1 << index)));
- }
- ++index;
- input_value >>= 1;
- }
- if( ret.empty()) ret.append(string_VkPeerMemoryFeatureFlagBitsKHR(static_cast<VkPeerMemoryFeatureFlagBitsKHR>(0)));
- return ret;
-}
-
-static inline const char* string_VkMemoryAllocateFlagBitsKHR(VkMemoryAllocateFlagBitsKHR input_value)
-{
- switch ((VkMemoryAllocateFlagBitsKHR)input_value)
- {
- case VK_MEMORY_ALLOCATE_DEVICE_MASK_BIT:
- return "VK_MEMORY_ALLOCATE_DEVICE_MASK_BIT";
- default:
- return "Unhandled VkMemoryAllocateFlagBitsKHR";
- }
-}
-
-static inline std::string string_VkMemoryAllocateFlagsKHR(VkMemoryAllocateFlagsKHR input_value)
-{
- std::string ret;
- int index = 0;
- while(input_value) {
- if (input_value & 1) {
- if( !ret.empty()) ret.append("|");
- ret.append(string_VkMemoryAllocateFlagBitsKHR(static_cast<VkMemoryAllocateFlagBitsKHR>(1 << index)));
- }
- ++index;
- input_value >>= 1;
- }
- if( ret.empty()) ret.append(string_VkMemoryAllocateFlagBitsKHR(static_cast<VkMemoryAllocateFlagBitsKHR>(0)));
- return ret;
-}
-
-static inline const char* string_VkExternalMemoryHandleTypeFlagBitsKHR(VkExternalMemoryHandleTypeFlagBitsKHR input_value)
-{
- switch ((VkExternalMemoryHandleTypeFlagBitsKHR)input_value)
- {
- case VK_EXTERNAL_MEMORY_HANDLE_TYPE_ANDROID_HARDWARE_BUFFER_BIT_ANDROID:
- return "VK_EXTERNAL_MEMORY_HANDLE_TYPE_ANDROID_HARDWARE_BUFFER_BIT_ANDROID";
- case VK_EXTERNAL_MEMORY_HANDLE_TYPE_D3D11_TEXTURE_BIT:
- return "VK_EXTERNAL_MEMORY_HANDLE_TYPE_D3D11_TEXTURE_BIT";
- case VK_EXTERNAL_MEMORY_HANDLE_TYPE_D3D11_TEXTURE_KMT_BIT:
- return "VK_EXTERNAL_MEMORY_HANDLE_TYPE_D3D11_TEXTURE_KMT_BIT";
- case VK_EXTERNAL_MEMORY_HANDLE_TYPE_D3D12_HEAP_BIT:
- return "VK_EXTERNAL_MEMORY_HANDLE_TYPE_D3D12_HEAP_BIT";
- case VK_EXTERNAL_MEMORY_HANDLE_TYPE_D3D12_RESOURCE_BIT:
- return "VK_EXTERNAL_MEMORY_HANDLE_TYPE_D3D12_RESOURCE_BIT";
- case VK_EXTERNAL_MEMORY_HANDLE_TYPE_DMA_BUF_BIT_EXT:
- return "VK_EXTERNAL_MEMORY_HANDLE_TYPE_DMA_BUF_BIT_EXT";
- case VK_EXTERNAL_MEMORY_HANDLE_TYPE_HOST_ALLOCATION_BIT_EXT:
- return "VK_EXTERNAL_MEMORY_HANDLE_TYPE_HOST_ALLOCATION_BIT_EXT";
- case VK_EXTERNAL_MEMORY_HANDLE_TYPE_HOST_MAPPED_FOREIGN_MEMORY_BIT_EXT:
- return "VK_EXTERNAL_MEMORY_HANDLE_TYPE_HOST_MAPPED_FOREIGN_MEMORY_BIT_EXT";
- case VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_FD_BIT:
- return "VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_FD_BIT";
- case VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_WIN32_BIT:
- return "VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_WIN32_BIT";
- case VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_WIN32_KMT_BIT:
- return "VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_WIN32_KMT_BIT";
- default:
- return "Unhandled VkExternalMemoryHandleTypeFlagBitsKHR";
- }
-}
-
-static inline std::string string_VkExternalMemoryHandleTypeFlagsKHR(VkExternalMemoryHandleTypeFlagsKHR input_value)
-{
- std::string ret;
- int index = 0;
- while(input_value) {
- if (input_value & 1) {
- if( !ret.empty()) ret.append("|");
- ret.append(string_VkExternalMemoryHandleTypeFlagBitsKHR(static_cast<VkExternalMemoryHandleTypeFlagBitsKHR>(1 << index)));
- }
- ++index;
- input_value >>= 1;
- }
- if( ret.empty()) ret.append(string_VkExternalMemoryHandleTypeFlagBitsKHR(static_cast<VkExternalMemoryHandleTypeFlagBitsKHR>(0)));
- return ret;
-}
-
-static inline const char* string_VkExternalMemoryFeatureFlagBitsKHR(VkExternalMemoryFeatureFlagBitsKHR input_value)
-{
- switch ((VkExternalMemoryFeatureFlagBitsKHR)input_value)
- {
- case VK_EXTERNAL_MEMORY_FEATURE_DEDICATED_ONLY_BIT:
- return "VK_EXTERNAL_MEMORY_FEATURE_DEDICATED_ONLY_BIT";
- case VK_EXTERNAL_MEMORY_FEATURE_EXPORTABLE_BIT:
- return "VK_EXTERNAL_MEMORY_FEATURE_EXPORTABLE_BIT";
- case VK_EXTERNAL_MEMORY_FEATURE_IMPORTABLE_BIT:
- return "VK_EXTERNAL_MEMORY_FEATURE_IMPORTABLE_BIT";
- default:
- return "Unhandled VkExternalMemoryFeatureFlagBitsKHR";
- }
-}
-
-static inline std::string string_VkExternalMemoryFeatureFlagsKHR(VkExternalMemoryFeatureFlagsKHR input_value)
-{
- std::string ret;
- int index = 0;
- while(input_value) {
- if (input_value & 1) {
- if( !ret.empty()) ret.append("|");
- ret.append(string_VkExternalMemoryFeatureFlagBitsKHR(static_cast<VkExternalMemoryFeatureFlagBitsKHR>(1 << index)));
- }
- ++index;
- input_value >>= 1;
- }
- if( ret.empty()) ret.append(string_VkExternalMemoryFeatureFlagBitsKHR(static_cast<VkExternalMemoryFeatureFlagBitsKHR>(0)));
- return ret;
-}
-
-static inline const char* string_VkExternalSemaphoreHandleTypeFlagBitsKHR(VkExternalSemaphoreHandleTypeFlagBitsKHR input_value)
-{
- switch ((VkExternalSemaphoreHandleTypeFlagBitsKHR)input_value)
- {
- case VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_D3D12_FENCE_BIT:
- return "VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_D3D12_FENCE_BIT";
- case VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_OPAQUE_FD_BIT:
- return "VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_OPAQUE_FD_BIT";
- case VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_OPAQUE_WIN32_BIT:
- return "VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_OPAQUE_WIN32_BIT";
- case VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_OPAQUE_WIN32_KMT_BIT:
- return "VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_OPAQUE_WIN32_KMT_BIT";
- case VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_SYNC_FD_BIT:
- return "VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_SYNC_FD_BIT";
- default:
- return "Unhandled VkExternalSemaphoreHandleTypeFlagBitsKHR";
- }
-}
-
-static inline std::string string_VkExternalSemaphoreHandleTypeFlagsKHR(VkExternalSemaphoreHandleTypeFlagsKHR input_value)
-{
- std::string ret;
- int index = 0;
- while(input_value) {
- if (input_value & 1) {
- if( !ret.empty()) ret.append("|");
- ret.append(string_VkExternalSemaphoreHandleTypeFlagBitsKHR(static_cast<VkExternalSemaphoreHandleTypeFlagBitsKHR>(1 << index)));
- }
- ++index;
- input_value >>= 1;
- }
- if( ret.empty()) ret.append(string_VkExternalSemaphoreHandleTypeFlagBitsKHR(static_cast<VkExternalSemaphoreHandleTypeFlagBitsKHR>(0)));
- return ret;
-}
-
-static inline const char* string_VkExternalSemaphoreFeatureFlagBitsKHR(VkExternalSemaphoreFeatureFlagBitsKHR input_value)
-{
- switch ((VkExternalSemaphoreFeatureFlagBitsKHR)input_value)
- {
- case VK_EXTERNAL_SEMAPHORE_FEATURE_EXPORTABLE_BIT:
- return "VK_EXTERNAL_SEMAPHORE_FEATURE_EXPORTABLE_BIT";
- case VK_EXTERNAL_SEMAPHORE_FEATURE_IMPORTABLE_BIT:
- return "VK_EXTERNAL_SEMAPHORE_FEATURE_IMPORTABLE_BIT";
- default:
- return "Unhandled VkExternalSemaphoreFeatureFlagBitsKHR";
- }
-}
-
-static inline std::string string_VkExternalSemaphoreFeatureFlagsKHR(VkExternalSemaphoreFeatureFlagsKHR input_value)
-{
- std::string ret;
- int index = 0;
- while(input_value) {
- if (input_value & 1) {
- if( !ret.empty()) ret.append("|");
- ret.append(string_VkExternalSemaphoreFeatureFlagBitsKHR(static_cast<VkExternalSemaphoreFeatureFlagBitsKHR>(1 << index)));
- }
- ++index;
- input_value >>= 1;
- }
- if( ret.empty()) ret.append(string_VkExternalSemaphoreFeatureFlagBitsKHR(static_cast<VkExternalSemaphoreFeatureFlagBitsKHR>(0)));
- return ret;
-}
-
-static inline const char* string_VkSemaphoreImportFlagBitsKHR(VkSemaphoreImportFlagBitsKHR input_value)
-{
- switch ((VkSemaphoreImportFlagBitsKHR)input_value)
- {
- case VK_SEMAPHORE_IMPORT_TEMPORARY_BIT:
- return "VK_SEMAPHORE_IMPORT_TEMPORARY_BIT";
- default:
- return "Unhandled VkSemaphoreImportFlagBitsKHR";
- }
-}
-
-static inline std::string string_VkSemaphoreImportFlagsKHR(VkSemaphoreImportFlagsKHR input_value)
-{
- std::string ret;
- int index = 0;
- while(input_value) {
- if (input_value & 1) {
- if( !ret.empty()) ret.append("|");
- ret.append(string_VkSemaphoreImportFlagBitsKHR(static_cast<VkSemaphoreImportFlagBitsKHR>(1 << index)));
- }
- ++index;
- input_value >>= 1;
- }
- if( ret.empty()) ret.append(string_VkSemaphoreImportFlagBitsKHR(static_cast<VkSemaphoreImportFlagBitsKHR>(0)));
- return ret;
-}
-
-static inline const char* string_VkDescriptorUpdateTemplateTypeKHR(VkDescriptorUpdateTemplateTypeKHR input_value)
-{
- switch ((VkDescriptorUpdateTemplateTypeKHR)input_value)
- {
- case VK_DESCRIPTOR_UPDATE_TEMPLATE_TYPE_DESCRIPTOR_SET:
- return "VK_DESCRIPTOR_UPDATE_TEMPLATE_TYPE_DESCRIPTOR_SET";
- case VK_DESCRIPTOR_UPDATE_TEMPLATE_TYPE_PUSH_DESCRIPTORS_KHR:
- return "VK_DESCRIPTOR_UPDATE_TEMPLATE_TYPE_PUSH_DESCRIPTORS_KHR";
- default:
- return "Unhandled VkDescriptorUpdateTemplateTypeKHR";
- }
-}
-
-static inline const char* string_VkExternalFenceHandleTypeFlagBitsKHR(VkExternalFenceHandleTypeFlagBitsKHR input_value)
-{
- switch ((VkExternalFenceHandleTypeFlagBitsKHR)input_value)
- {
- case VK_EXTERNAL_FENCE_HANDLE_TYPE_OPAQUE_FD_BIT:
- return "VK_EXTERNAL_FENCE_HANDLE_TYPE_OPAQUE_FD_BIT";
- case VK_EXTERNAL_FENCE_HANDLE_TYPE_OPAQUE_WIN32_BIT:
- return "VK_EXTERNAL_FENCE_HANDLE_TYPE_OPAQUE_WIN32_BIT";
- case VK_EXTERNAL_FENCE_HANDLE_TYPE_OPAQUE_WIN32_KMT_BIT:
- return "VK_EXTERNAL_FENCE_HANDLE_TYPE_OPAQUE_WIN32_KMT_BIT";
- case VK_EXTERNAL_FENCE_HANDLE_TYPE_SYNC_FD_BIT:
- return "VK_EXTERNAL_FENCE_HANDLE_TYPE_SYNC_FD_BIT";
- default:
- return "Unhandled VkExternalFenceHandleTypeFlagBitsKHR";
- }
-}
-
-static inline std::string string_VkExternalFenceHandleTypeFlagsKHR(VkExternalFenceHandleTypeFlagsKHR input_value)
-{
- std::string ret;
- int index = 0;
- while(input_value) {
- if (input_value & 1) {
- if( !ret.empty()) ret.append("|");
- ret.append(string_VkExternalFenceHandleTypeFlagBitsKHR(static_cast<VkExternalFenceHandleTypeFlagBitsKHR>(1 << index)));
- }
- ++index;
- input_value >>= 1;
- }
- if( ret.empty()) ret.append(string_VkExternalFenceHandleTypeFlagBitsKHR(static_cast<VkExternalFenceHandleTypeFlagBitsKHR>(0)));
- return ret;
-}
-
-static inline const char* string_VkExternalFenceFeatureFlagBitsKHR(VkExternalFenceFeatureFlagBitsKHR input_value)
-{
- switch ((VkExternalFenceFeatureFlagBitsKHR)input_value)
- {
- case VK_EXTERNAL_FENCE_FEATURE_EXPORTABLE_BIT:
- return "VK_EXTERNAL_FENCE_FEATURE_EXPORTABLE_BIT";
- case VK_EXTERNAL_FENCE_FEATURE_IMPORTABLE_BIT:
- return "VK_EXTERNAL_FENCE_FEATURE_IMPORTABLE_BIT";
- default:
- return "Unhandled VkExternalFenceFeatureFlagBitsKHR";
- }
-}
-
-static inline std::string string_VkExternalFenceFeatureFlagsKHR(VkExternalFenceFeatureFlagsKHR input_value)
-{
- std::string ret;
- int index = 0;
- while(input_value) {
- if (input_value & 1) {
- if( !ret.empty()) ret.append("|");
- ret.append(string_VkExternalFenceFeatureFlagBitsKHR(static_cast<VkExternalFenceFeatureFlagBitsKHR>(1 << index)));
- }
- ++index;
- input_value >>= 1;
- }
- if( ret.empty()) ret.append(string_VkExternalFenceFeatureFlagBitsKHR(static_cast<VkExternalFenceFeatureFlagBitsKHR>(0)));
- return ret;
-}
-
-static inline const char* string_VkFenceImportFlagBitsKHR(VkFenceImportFlagBitsKHR input_value)
-{
- switch ((VkFenceImportFlagBitsKHR)input_value)
- {
- case VK_FENCE_IMPORT_TEMPORARY_BIT:
- return "VK_FENCE_IMPORT_TEMPORARY_BIT";
- default:
- return "Unhandled VkFenceImportFlagBitsKHR";
- }
-}
-
-static inline std::string string_VkFenceImportFlagsKHR(VkFenceImportFlagsKHR input_value)
-{
- std::string ret;
- int index = 0;
- while(input_value) {
- if (input_value & 1) {
- if( !ret.empty()) ret.append("|");
- ret.append(string_VkFenceImportFlagBitsKHR(static_cast<VkFenceImportFlagBitsKHR>(1 << index)));
- }
- ++index;
- input_value >>= 1;
- }
- if( ret.empty()) ret.append(string_VkFenceImportFlagBitsKHR(static_cast<VkFenceImportFlagBitsKHR>(0)));
- return ret;
-}
-
-static inline const char* string_VkPointClippingBehaviorKHR(VkPointClippingBehaviorKHR input_value)
-{
- switch ((VkPointClippingBehaviorKHR)input_value)
- {
- case VK_POINT_CLIPPING_BEHAVIOR_ALL_CLIP_PLANES:
- return "VK_POINT_CLIPPING_BEHAVIOR_ALL_CLIP_PLANES";
- case VK_POINT_CLIPPING_BEHAVIOR_USER_CLIP_PLANES_ONLY:
- return "VK_POINT_CLIPPING_BEHAVIOR_USER_CLIP_PLANES_ONLY";
- default:
- return "Unhandled VkPointClippingBehaviorKHR";
- }
-}
-
-static inline const char* string_VkTessellationDomainOriginKHR(VkTessellationDomainOriginKHR input_value)
-{
- switch ((VkTessellationDomainOriginKHR)input_value)
- {
- case VK_TESSELLATION_DOMAIN_ORIGIN_LOWER_LEFT:
- return "VK_TESSELLATION_DOMAIN_ORIGIN_LOWER_LEFT";
- case VK_TESSELLATION_DOMAIN_ORIGIN_UPPER_LEFT:
- return "VK_TESSELLATION_DOMAIN_ORIGIN_UPPER_LEFT";
- default:
- return "Unhandled VkTessellationDomainOriginKHR";
- }
-}
-
-static inline const char* string_VkSamplerYcbcrModelConversionKHR(VkSamplerYcbcrModelConversionKHR input_value)
-{
- switch ((VkSamplerYcbcrModelConversionKHR)input_value)
- {
- case VK_SAMPLER_YCBCR_MODEL_CONVERSION_RGB_IDENTITY:
- return "VK_SAMPLER_YCBCR_MODEL_CONVERSION_RGB_IDENTITY";
- case VK_SAMPLER_YCBCR_MODEL_CONVERSION_YCBCR_2020:
- return "VK_SAMPLER_YCBCR_MODEL_CONVERSION_YCBCR_2020";
- case VK_SAMPLER_YCBCR_MODEL_CONVERSION_YCBCR_601:
- return "VK_SAMPLER_YCBCR_MODEL_CONVERSION_YCBCR_601";
- case VK_SAMPLER_YCBCR_MODEL_CONVERSION_YCBCR_709:
- return "VK_SAMPLER_YCBCR_MODEL_CONVERSION_YCBCR_709";
- case VK_SAMPLER_YCBCR_MODEL_CONVERSION_YCBCR_IDENTITY:
- return "VK_SAMPLER_YCBCR_MODEL_CONVERSION_YCBCR_IDENTITY";
- default:
- return "Unhandled VkSamplerYcbcrModelConversionKHR";
- }
-}
-
-static inline const char* string_VkSamplerYcbcrRangeKHR(VkSamplerYcbcrRangeKHR input_value)
-{
- switch ((VkSamplerYcbcrRangeKHR)input_value)
- {
- case VK_SAMPLER_YCBCR_RANGE_ITU_FULL:
- return "VK_SAMPLER_YCBCR_RANGE_ITU_FULL";
- case VK_SAMPLER_YCBCR_RANGE_ITU_NARROW:
- return "VK_SAMPLER_YCBCR_RANGE_ITU_NARROW";
- default:
- return "Unhandled VkSamplerYcbcrRangeKHR";
- }
-}
-
-static inline const char* string_VkChromaLocationKHR(VkChromaLocationKHR input_value)
-{
- switch ((VkChromaLocationKHR)input_value)
- {
- case VK_CHROMA_LOCATION_COSITED_EVEN:
- return "VK_CHROMA_LOCATION_COSITED_EVEN";
- case VK_CHROMA_LOCATION_MIDPOINT:
- return "VK_CHROMA_LOCATION_MIDPOINT";
- default:
- return "Unhandled VkChromaLocationKHR";
- }
-}
-
-static inline const char* string_VkDriverIdKHR(VkDriverIdKHR input_value)
-{
- switch ((VkDriverIdKHR)input_value)
- {
- case VK_DRIVER_ID_AMD_OPEN_SOURCE_KHR:
- return "VK_DRIVER_ID_AMD_OPEN_SOURCE_KHR";
- case VK_DRIVER_ID_AMD_PROPRIETARY_KHR:
- return "VK_DRIVER_ID_AMD_PROPRIETARY_KHR";
- case VK_DRIVER_ID_ARM_PROPRIETARY_KHR:
- return "VK_DRIVER_ID_ARM_PROPRIETARY_KHR";
- case VK_DRIVER_ID_BROADCOM_PROPRIETARY_KHR:
- return "VK_DRIVER_ID_BROADCOM_PROPRIETARY_KHR";
- case VK_DRIVER_ID_GGP_PROPRIETARY_KHR:
- return "VK_DRIVER_ID_GGP_PROPRIETARY_KHR";
- case VK_DRIVER_ID_GOOGLE_SWIFTSHADER_KHR:
- return "VK_DRIVER_ID_GOOGLE_SWIFTSHADER_KHR";
- case VK_DRIVER_ID_IMAGINATION_PROPRIETARY_KHR:
- return "VK_DRIVER_ID_IMAGINATION_PROPRIETARY_KHR";
- case VK_DRIVER_ID_INTEL_OPEN_SOURCE_MESA_KHR:
- return "VK_DRIVER_ID_INTEL_OPEN_SOURCE_MESA_KHR";
- case VK_DRIVER_ID_INTEL_PROPRIETARY_WINDOWS_KHR:
- return "VK_DRIVER_ID_INTEL_PROPRIETARY_WINDOWS_KHR";
- case VK_DRIVER_ID_MESA_RADV_KHR:
- return "VK_DRIVER_ID_MESA_RADV_KHR";
- case VK_DRIVER_ID_NVIDIA_PROPRIETARY_KHR:
- return "VK_DRIVER_ID_NVIDIA_PROPRIETARY_KHR";
- case VK_DRIVER_ID_QUALCOMM_PROPRIETARY_KHR:
- return "VK_DRIVER_ID_QUALCOMM_PROPRIETARY_KHR";
- default:
- return "Unhandled VkDriverIdKHR";
- }
-}
-
-static inline const char* string_VkShaderFloatControlsIndependenceKHR(VkShaderFloatControlsIndependenceKHR input_value)
-{
- switch ((VkShaderFloatControlsIndependenceKHR)input_value)
- {
- case VK_SHADER_FLOAT_CONTROLS_INDEPENDENCE_32_BIT_ONLY_KHR:
- return "VK_SHADER_FLOAT_CONTROLS_INDEPENDENCE_32_BIT_ONLY_KHR";
- case VK_SHADER_FLOAT_CONTROLS_INDEPENDENCE_ALL_KHR:
- return "VK_SHADER_FLOAT_CONTROLS_INDEPENDENCE_ALL_KHR";
- case VK_SHADER_FLOAT_CONTROLS_INDEPENDENCE_NONE_KHR:
- return "VK_SHADER_FLOAT_CONTROLS_INDEPENDENCE_NONE_KHR";
- default:
- return "Unhandled VkShaderFloatControlsIndependenceKHR";
- }
-}
-
-static inline const char* string_VkResolveModeFlagBitsKHR(VkResolveModeFlagBitsKHR input_value)
-{
- switch ((VkResolveModeFlagBitsKHR)input_value)
- {
- case VK_RESOLVE_MODE_AVERAGE_BIT_KHR:
- return "VK_RESOLVE_MODE_AVERAGE_BIT_KHR";
- case VK_RESOLVE_MODE_MAX_BIT_KHR:
- return "VK_RESOLVE_MODE_MAX_BIT_KHR";
- case VK_RESOLVE_MODE_MIN_BIT_KHR:
- return "VK_RESOLVE_MODE_MIN_BIT_KHR";
- case VK_RESOLVE_MODE_NONE_KHR:
- return "VK_RESOLVE_MODE_NONE_KHR";
- case VK_RESOLVE_MODE_SAMPLE_ZERO_BIT_KHR:
- return "VK_RESOLVE_MODE_SAMPLE_ZERO_BIT_KHR";
- default:
- return "Unhandled VkResolveModeFlagBitsKHR";
- }
-}
-
-static inline std::string string_VkResolveModeFlagsKHR(VkResolveModeFlagsKHR input_value)
-{
- std::string ret;
- int index = 0;
- while(input_value) {
- if (input_value & 1) {
- if( !ret.empty()) ret.append("|");
- ret.append(string_VkResolveModeFlagBitsKHR(static_cast<VkResolveModeFlagBitsKHR>(1 << index)));
- }
- ++index;
- input_value >>= 1;
- }
- if( ret.empty()) ret.append(string_VkResolveModeFlagBitsKHR(static_cast<VkResolveModeFlagBitsKHR>(0)));
- return ret;
-}
-
-static inline const char* string_VkPipelineExecutableStatisticFormatKHR(VkPipelineExecutableStatisticFormatKHR input_value)
-{
- switch ((VkPipelineExecutableStatisticFormatKHR)input_value)
- {
- case VK_PIPELINE_EXECUTABLE_STATISTIC_FORMAT_BOOL32_KHR:
- return "VK_PIPELINE_EXECUTABLE_STATISTIC_FORMAT_BOOL32_KHR";
- case VK_PIPELINE_EXECUTABLE_STATISTIC_FORMAT_FLOAT64_KHR:
- return "VK_PIPELINE_EXECUTABLE_STATISTIC_FORMAT_FLOAT64_KHR";
- case VK_PIPELINE_EXECUTABLE_STATISTIC_FORMAT_INT64_KHR:
- return "VK_PIPELINE_EXECUTABLE_STATISTIC_FORMAT_INT64_KHR";
- case VK_PIPELINE_EXECUTABLE_STATISTIC_FORMAT_UINT64_KHR:
- return "VK_PIPELINE_EXECUTABLE_STATISTIC_FORMAT_UINT64_KHR";
- default:
- return "Unhandled VkPipelineExecutableStatisticFormatKHR";
- }
-}
-
-static inline const char* string_VkDebugReportObjectTypeEXT(VkDebugReportObjectTypeEXT input_value)
-{
- switch ((VkDebugReportObjectTypeEXT)input_value)
- {
- case VK_DEBUG_REPORT_OBJECT_TYPE_ACCELERATION_STRUCTURE_NV_EXT:
- return "VK_DEBUG_REPORT_OBJECT_TYPE_ACCELERATION_STRUCTURE_NV_EXT";
- case VK_DEBUG_REPORT_OBJECT_TYPE_BUFFER_EXT:
- return "VK_DEBUG_REPORT_OBJECT_TYPE_BUFFER_EXT";
- case VK_DEBUG_REPORT_OBJECT_TYPE_BUFFER_VIEW_EXT:
- return "VK_DEBUG_REPORT_OBJECT_TYPE_BUFFER_VIEW_EXT";
- case VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT:
- return "VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT";
- case VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_POOL_EXT:
- return "VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_POOL_EXT";
- case VK_DEBUG_REPORT_OBJECT_TYPE_DEBUG_REPORT_CALLBACK_EXT_EXT:
- return "VK_DEBUG_REPORT_OBJECT_TYPE_DEBUG_REPORT_CALLBACK_EXT_EXT";
- case VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_POOL_EXT:
- return "VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_POOL_EXT";
- case VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_SET_EXT:
- return "VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_SET_EXT";
- case VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_SET_LAYOUT_EXT:
- return "VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_SET_LAYOUT_EXT";
- case VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_UPDATE_TEMPLATE_EXT:
- return "VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_UPDATE_TEMPLATE_EXT";
- case VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT:
- return "VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT";
- case VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_MEMORY_EXT:
- return "VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_MEMORY_EXT";
- case VK_DEBUG_REPORT_OBJECT_TYPE_DISPLAY_KHR_EXT:
- return "VK_DEBUG_REPORT_OBJECT_TYPE_DISPLAY_KHR_EXT";
- case VK_DEBUG_REPORT_OBJECT_TYPE_DISPLAY_MODE_KHR_EXT:
- return "VK_DEBUG_REPORT_OBJECT_TYPE_DISPLAY_MODE_KHR_EXT";
- case VK_DEBUG_REPORT_OBJECT_TYPE_EVENT_EXT:
- return "VK_DEBUG_REPORT_OBJECT_TYPE_EVENT_EXT";
- case VK_DEBUG_REPORT_OBJECT_TYPE_FENCE_EXT:
- return "VK_DEBUG_REPORT_OBJECT_TYPE_FENCE_EXT";
- case VK_DEBUG_REPORT_OBJECT_TYPE_FRAMEBUFFER_EXT:
- return "VK_DEBUG_REPORT_OBJECT_TYPE_FRAMEBUFFER_EXT";
- case VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT:
- return "VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT";
- case VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_VIEW_EXT:
- return "VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_VIEW_EXT";
- case VK_DEBUG_REPORT_OBJECT_TYPE_INDIRECT_COMMANDS_LAYOUT_NVX_EXT:
- return "VK_DEBUG_REPORT_OBJECT_TYPE_INDIRECT_COMMANDS_LAYOUT_NVX_EXT";
- case VK_DEBUG_REPORT_OBJECT_TYPE_INSTANCE_EXT:
- return "VK_DEBUG_REPORT_OBJECT_TYPE_INSTANCE_EXT";
- case VK_DEBUG_REPORT_OBJECT_TYPE_OBJECT_TABLE_NVX_EXT:
- return "VK_DEBUG_REPORT_OBJECT_TYPE_OBJECT_TABLE_NVX_EXT";
- case VK_DEBUG_REPORT_OBJECT_TYPE_PHYSICAL_DEVICE_EXT:
- return "VK_DEBUG_REPORT_OBJECT_TYPE_PHYSICAL_DEVICE_EXT";
- case VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_CACHE_EXT:
- return "VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_CACHE_EXT";
- case VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_EXT:
- return "VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_EXT";
- case VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_LAYOUT_EXT:
- return "VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_LAYOUT_EXT";
- case VK_DEBUG_REPORT_OBJECT_TYPE_QUERY_POOL_EXT:
- return "VK_DEBUG_REPORT_OBJECT_TYPE_QUERY_POOL_EXT";
- case VK_DEBUG_REPORT_OBJECT_TYPE_QUEUE_EXT:
- return "VK_DEBUG_REPORT_OBJECT_TYPE_QUEUE_EXT";
- case VK_DEBUG_REPORT_OBJECT_TYPE_RENDER_PASS_EXT:
- return "VK_DEBUG_REPORT_OBJECT_TYPE_RENDER_PASS_EXT";
- case VK_DEBUG_REPORT_OBJECT_TYPE_SAMPLER_EXT:
- return "VK_DEBUG_REPORT_OBJECT_TYPE_SAMPLER_EXT";
- case VK_DEBUG_REPORT_OBJECT_TYPE_SAMPLER_YCBCR_CONVERSION_EXT:
- return "VK_DEBUG_REPORT_OBJECT_TYPE_SAMPLER_YCBCR_CONVERSION_EXT";
- case VK_DEBUG_REPORT_OBJECT_TYPE_SEMAPHORE_EXT:
- return "VK_DEBUG_REPORT_OBJECT_TYPE_SEMAPHORE_EXT";
- case VK_DEBUG_REPORT_OBJECT_TYPE_SHADER_MODULE_EXT:
- return "VK_DEBUG_REPORT_OBJECT_TYPE_SHADER_MODULE_EXT";
- case VK_DEBUG_REPORT_OBJECT_TYPE_SURFACE_KHR_EXT:
- return "VK_DEBUG_REPORT_OBJECT_TYPE_SURFACE_KHR_EXT";
- case VK_DEBUG_REPORT_OBJECT_TYPE_SWAPCHAIN_KHR_EXT:
- return "VK_DEBUG_REPORT_OBJECT_TYPE_SWAPCHAIN_KHR_EXT";
- case VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT:
- return "VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT";
- case VK_DEBUG_REPORT_OBJECT_TYPE_VALIDATION_CACHE_EXT_EXT:
- return "VK_DEBUG_REPORT_OBJECT_TYPE_VALIDATION_CACHE_EXT_EXT";
- default:
- return "Unhandled VkDebugReportObjectTypeEXT";
- }
-}
-
-static inline const char* string_VkDebugReportFlagBitsEXT(VkDebugReportFlagBitsEXT input_value)
-{
- switch ((VkDebugReportFlagBitsEXT)input_value)
- {
- case VK_DEBUG_REPORT_DEBUG_BIT_EXT:
- return "VK_DEBUG_REPORT_DEBUG_BIT_EXT";
- case VK_DEBUG_REPORT_ERROR_BIT_EXT:
- return "VK_DEBUG_REPORT_ERROR_BIT_EXT";
- case VK_DEBUG_REPORT_INFORMATION_BIT_EXT:
- return "VK_DEBUG_REPORT_INFORMATION_BIT_EXT";
- case VK_DEBUG_REPORT_PERFORMANCE_WARNING_BIT_EXT:
- return "VK_DEBUG_REPORT_PERFORMANCE_WARNING_BIT_EXT";
- case VK_DEBUG_REPORT_WARNING_BIT_EXT:
- return "VK_DEBUG_REPORT_WARNING_BIT_EXT";
- default:
- return "Unhandled VkDebugReportFlagBitsEXT";
- }
-}
-
-static inline std::string string_VkDebugReportFlagsEXT(VkDebugReportFlagsEXT input_value)
-{
- std::string ret;
- int index = 0;
- while(input_value) {
- if (input_value & 1) {
- if( !ret.empty()) ret.append("|");
- ret.append(string_VkDebugReportFlagBitsEXT(static_cast<VkDebugReportFlagBitsEXT>(1 << index)));
- }
- ++index;
- input_value >>= 1;
- }
- if( ret.empty()) ret.append(string_VkDebugReportFlagBitsEXT(static_cast<VkDebugReportFlagBitsEXT>(0)));
- return ret;
-}
-
-static inline const char* string_VkRasterizationOrderAMD(VkRasterizationOrderAMD input_value)
-{
- switch ((VkRasterizationOrderAMD)input_value)
- {
- case VK_RASTERIZATION_ORDER_RELAXED_AMD:
- return "VK_RASTERIZATION_ORDER_RELAXED_AMD";
- case VK_RASTERIZATION_ORDER_STRICT_AMD:
- return "VK_RASTERIZATION_ORDER_STRICT_AMD";
- default:
- return "Unhandled VkRasterizationOrderAMD";
- }
-}
-
-static inline const char* string_VkShaderInfoTypeAMD(VkShaderInfoTypeAMD input_value)
-{
- switch ((VkShaderInfoTypeAMD)input_value)
- {
- case VK_SHADER_INFO_TYPE_BINARY_AMD:
- return "VK_SHADER_INFO_TYPE_BINARY_AMD";
- case VK_SHADER_INFO_TYPE_DISASSEMBLY_AMD:
- return "VK_SHADER_INFO_TYPE_DISASSEMBLY_AMD";
- case VK_SHADER_INFO_TYPE_STATISTICS_AMD:
- return "VK_SHADER_INFO_TYPE_STATISTICS_AMD";
- default:
- return "Unhandled VkShaderInfoTypeAMD";
- }
-}
-
-static inline const char* string_VkExternalMemoryHandleTypeFlagBitsNV(VkExternalMemoryHandleTypeFlagBitsNV input_value)
-{
- switch ((VkExternalMemoryHandleTypeFlagBitsNV)input_value)
- {
- case VK_EXTERNAL_MEMORY_HANDLE_TYPE_D3D11_IMAGE_BIT_NV:
- return "VK_EXTERNAL_MEMORY_HANDLE_TYPE_D3D11_IMAGE_BIT_NV";
- case VK_EXTERNAL_MEMORY_HANDLE_TYPE_D3D11_IMAGE_KMT_BIT_NV:
- return "VK_EXTERNAL_MEMORY_HANDLE_TYPE_D3D11_IMAGE_KMT_BIT_NV";
- case VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_WIN32_BIT_NV:
- return "VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_WIN32_BIT_NV";
- case VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_WIN32_KMT_BIT_NV:
- return "VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_WIN32_KMT_BIT_NV";
- default:
- return "Unhandled VkExternalMemoryHandleTypeFlagBitsNV";
- }
-}
-
-static inline std::string string_VkExternalMemoryHandleTypeFlagsNV(VkExternalMemoryHandleTypeFlagsNV input_value)
-{
- std::string ret;
- int index = 0;
- while(input_value) {
- if (input_value & 1) {
- if( !ret.empty()) ret.append("|");
- ret.append(string_VkExternalMemoryHandleTypeFlagBitsNV(static_cast<VkExternalMemoryHandleTypeFlagBitsNV>(1 << index)));
- }
- ++index;
- input_value >>= 1;
- }
- if( ret.empty()) ret.append(string_VkExternalMemoryHandleTypeFlagBitsNV(static_cast<VkExternalMemoryHandleTypeFlagBitsNV>(0)));
- return ret;
-}
-
-static inline const char* string_VkExternalMemoryFeatureFlagBitsNV(VkExternalMemoryFeatureFlagBitsNV input_value)
-{
- switch ((VkExternalMemoryFeatureFlagBitsNV)input_value)
- {
- case VK_EXTERNAL_MEMORY_FEATURE_DEDICATED_ONLY_BIT_NV:
- return "VK_EXTERNAL_MEMORY_FEATURE_DEDICATED_ONLY_BIT_NV";
- case VK_EXTERNAL_MEMORY_FEATURE_EXPORTABLE_BIT_NV:
- return "VK_EXTERNAL_MEMORY_FEATURE_EXPORTABLE_BIT_NV";
- case VK_EXTERNAL_MEMORY_FEATURE_IMPORTABLE_BIT_NV:
- return "VK_EXTERNAL_MEMORY_FEATURE_IMPORTABLE_BIT_NV";
- default:
- return "Unhandled VkExternalMemoryFeatureFlagBitsNV";
- }
-}
-
-static inline std::string string_VkExternalMemoryFeatureFlagsNV(VkExternalMemoryFeatureFlagsNV input_value)
-{
- std::string ret;
- int index = 0;
- while(input_value) {
- if (input_value & 1) {
- if( !ret.empty()) ret.append("|");
- ret.append(string_VkExternalMemoryFeatureFlagBitsNV(static_cast<VkExternalMemoryFeatureFlagBitsNV>(1 << index)));
- }
- ++index;
- input_value >>= 1;
- }
- if( ret.empty()) ret.append(string_VkExternalMemoryFeatureFlagBitsNV(static_cast<VkExternalMemoryFeatureFlagBitsNV>(0)));
- return ret;
-}
-
-static inline const char* string_VkValidationCheckEXT(VkValidationCheckEXT input_value)
-{
- switch ((VkValidationCheckEXT)input_value)
- {
- case VK_VALIDATION_CHECK_ALL_EXT:
- return "VK_VALIDATION_CHECK_ALL_EXT";
- case VK_VALIDATION_CHECK_SHADERS_EXT:
- return "VK_VALIDATION_CHECK_SHADERS_EXT";
- default:
- return "Unhandled VkValidationCheckEXT";
- }
-}
-
-static inline const char* string_VkConditionalRenderingFlagBitsEXT(VkConditionalRenderingFlagBitsEXT input_value)
-{
- switch ((VkConditionalRenderingFlagBitsEXT)input_value)
- {
- case VK_CONDITIONAL_RENDERING_INVERTED_BIT_EXT:
- return "VK_CONDITIONAL_RENDERING_INVERTED_BIT_EXT";
- default:
- return "Unhandled VkConditionalRenderingFlagBitsEXT";
- }
-}
-
-static inline std::string string_VkConditionalRenderingFlagsEXT(VkConditionalRenderingFlagsEXT input_value)
-{
- std::string ret;
- int index = 0;
- while(input_value) {
- if (input_value & 1) {
- if( !ret.empty()) ret.append("|");
- ret.append(string_VkConditionalRenderingFlagBitsEXT(static_cast<VkConditionalRenderingFlagBitsEXT>(1 << index)));
- }
- ++index;
- input_value >>= 1;
- }
- if( ret.empty()) ret.append(string_VkConditionalRenderingFlagBitsEXT(static_cast<VkConditionalRenderingFlagBitsEXT>(0)));
- return ret;
-}
-
-static inline const char* string_VkIndirectCommandsLayoutUsageFlagBitsNVX(VkIndirectCommandsLayoutUsageFlagBitsNVX input_value)
-{
- switch ((VkIndirectCommandsLayoutUsageFlagBitsNVX)input_value)
- {
- case VK_INDIRECT_COMMANDS_LAYOUT_USAGE_EMPTY_EXECUTIONS_BIT_NVX:
- return "VK_INDIRECT_COMMANDS_LAYOUT_USAGE_EMPTY_EXECUTIONS_BIT_NVX";
- case VK_INDIRECT_COMMANDS_LAYOUT_USAGE_INDEXED_SEQUENCES_BIT_NVX:
- return "VK_INDIRECT_COMMANDS_LAYOUT_USAGE_INDEXED_SEQUENCES_BIT_NVX";
- case VK_INDIRECT_COMMANDS_LAYOUT_USAGE_SPARSE_SEQUENCES_BIT_NVX:
- return "VK_INDIRECT_COMMANDS_LAYOUT_USAGE_SPARSE_SEQUENCES_BIT_NVX";
- case VK_INDIRECT_COMMANDS_LAYOUT_USAGE_UNORDERED_SEQUENCES_BIT_NVX:
- return "VK_INDIRECT_COMMANDS_LAYOUT_USAGE_UNORDERED_SEQUENCES_BIT_NVX";
- default:
- return "Unhandled VkIndirectCommandsLayoutUsageFlagBitsNVX";
- }
-}
-
-static inline std::string string_VkIndirectCommandsLayoutUsageFlagsNVX(VkIndirectCommandsLayoutUsageFlagsNVX input_value)
-{
- std::string ret;
- int index = 0;
- while(input_value) {
- if (input_value & 1) {
- if( !ret.empty()) ret.append("|");
- ret.append(string_VkIndirectCommandsLayoutUsageFlagBitsNVX(static_cast<VkIndirectCommandsLayoutUsageFlagBitsNVX>(1 << index)));
- }
- ++index;
- input_value >>= 1;
- }
- if( ret.empty()) ret.append(string_VkIndirectCommandsLayoutUsageFlagBitsNVX(static_cast<VkIndirectCommandsLayoutUsageFlagBitsNVX>(0)));
- return ret;
-}
-
-static inline const char* string_VkObjectEntryUsageFlagBitsNVX(VkObjectEntryUsageFlagBitsNVX input_value)
-{
- switch ((VkObjectEntryUsageFlagBitsNVX)input_value)
- {
- case VK_OBJECT_ENTRY_USAGE_COMPUTE_BIT_NVX:
- return "VK_OBJECT_ENTRY_USAGE_COMPUTE_BIT_NVX";
- case VK_OBJECT_ENTRY_USAGE_GRAPHICS_BIT_NVX:
- return "VK_OBJECT_ENTRY_USAGE_GRAPHICS_BIT_NVX";
- default:
- return "Unhandled VkObjectEntryUsageFlagBitsNVX";
- }
-}
-
-static inline std::string string_VkObjectEntryUsageFlagsNVX(VkObjectEntryUsageFlagsNVX input_value)
-{
- std::string ret;
- int index = 0;
- while(input_value) {
- if (input_value & 1) {
- if( !ret.empty()) ret.append("|");
- ret.append(string_VkObjectEntryUsageFlagBitsNVX(static_cast<VkObjectEntryUsageFlagBitsNVX>(1 << index)));
- }
- ++index;
- input_value >>= 1;
- }
- if( ret.empty()) ret.append(string_VkObjectEntryUsageFlagBitsNVX(static_cast<VkObjectEntryUsageFlagBitsNVX>(0)));
- return ret;
-}
-
-static inline const char* string_VkIndirectCommandsTokenTypeNVX(VkIndirectCommandsTokenTypeNVX input_value)
-{
- switch ((VkIndirectCommandsTokenTypeNVX)input_value)
- {
- case VK_INDIRECT_COMMANDS_TOKEN_TYPE_DESCRIPTOR_SET_NVX:
- return "VK_INDIRECT_COMMANDS_TOKEN_TYPE_DESCRIPTOR_SET_NVX";
- case VK_INDIRECT_COMMANDS_TOKEN_TYPE_DISPATCH_NVX:
- return "VK_INDIRECT_COMMANDS_TOKEN_TYPE_DISPATCH_NVX";
- case VK_INDIRECT_COMMANDS_TOKEN_TYPE_DRAW_INDEXED_NVX:
- return "VK_INDIRECT_COMMANDS_TOKEN_TYPE_DRAW_INDEXED_NVX";
- case VK_INDIRECT_COMMANDS_TOKEN_TYPE_DRAW_NVX:
- return "VK_INDIRECT_COMMANDS_TOKEN_TYPE_DRAW_NVX";
- case VK_INDIRECT_COMMANDS_TOKEN_TYPE_INDEX_BUFFER_NVX:
- return "VK_INDIRECT_COMMANDS_TOKEN_TYPE_INDEX_BUFFER_NVX";
- case VK_INDIRECT_COMMANDS_TOKEN_TYPE_PIPELINE_NVX:
- return "VK_INDIRECT_COMMANDS_TOKEN_TYPE_PIPELINE_NVX";
- case VK_INDIRECT_COMMANDS_TOKEN_TYPE_PUSH_CONSTANT_NVX:
- return "VK_INDIRECT_COMMANDS_TOKEN_TYPE_PUSH_CONSTANT_NVX";
- case VK_INDIRECT_COMMANDS_TOKEN_TYPE_VERTEX_BUFFER_NVX:
- return "VK_INDIRECT_COMMANDS_TOKEN_TYPE_VERTEX_BUFFER_NVX";
- default:
- return "Unhandled VkIndirectCommandsTokenTypeNVX";
- }
-}
-
-static inline const char* string_VkObjectEntryTypeNVX(VkObjectEntryTypeNVX input_value)
-{
- switch ((VkObjectEntryTypeNVX)input_value)
- {
- case VK_OBJECT_ENTRY_TYPE_DESCRIPTOR_SET_NVX:
- return "VK_OBJECT_ENTRY_TYPE_DESCRIPTOR_SET_NVX";
- case VK_OBJECT_ENTRY_TYPE_INDEX_BUFFER_NVX:
- return "VK_OBJECT_ENTRY_TYPE_INDEX_BUFFER_NVX";
- case VK_OBJECT_ENTRY_TYPE_PIPELINE_NVX:
- return "VK_OBJECT_ENTRY_TYPE_PIPELINE_NVX";
- case VK_OBJECT_ENTRY_TYPE_PUSH_CONSTANT_NVX:
- return "VK_OBJECT_ENTRY_TYPE_PUSH_CONSTANT_NVX";
- case VK_OBJECT_ENTRY_TYPE_VERTEX_BUFFER_NVX:
- return "VK_OBJECT_ENTRY_TYPE_VERTEX_BUFFER_NVX";
- default:
- return "Unhandled VkObjectEntryTypeNVX";
- }
-}
-
-static inline const char* string_VkSurfaceCounterFlagBitsEXT(VkSurfaceCounterFlagBitsEXT input_value)
-{
- switch ((VkSurfaceCounterFlagBitsEXT)input_value)
- {
- case VK_SURFACE_COUNTER_VBLANK_EXT:
- return "VK_SURFACE_COUNTER_VBLANK_EXT";
- default:
- return "Unhandled VkSurfaceCounterFlagBitsEXT";
- }
-}
-
-static inline std::string string_VkSurfaceCounterFlagsEXT(VkSurfaceCounterFlagsEXT input_value)
-{
- std::string ret;
- int index = 0;
- while(input_value) {
- if (input_value & 1) {
- if( !ret.empty()) ret.append("|");
- ret.append(string_VkSurfaceCounterFlagBitsEXT(static_cast<VkSurfaceCounterFlagBitsEXT>(1 << index)));
- }
- ++index;
- input_value >>= 1;
- }
- if( ret.empty()) ret.append(string_VkSurfaceCounterFlagBitsEXT(static_cast<VkSurfaceCounterFlagBitsEXT>(0)));
- return ret;
-}
-
-static inline const char* string_VkDisplayPowerStateEXT(VkDisplayPowerStateEXT input_value)
-{
- switch ((VkDisplayPowerStateEXT)input_value)
- {
- case VK_DISPLAY_POWER_STATE_OFF_EXT:
- return "VK_DISPLAY_POWER_STATE_OFF_EXT";
- case VK_DISPLAY_POWER_STATE_ON_EXT:
- return "VK_DISPLAY_POWER_STATE_ON_EXT";
- case VK_DISPLAY_POWER_STATE_SUSPEND_EXT:
- return "VK_DISPLAY_POWER_STATE_SUSPEND_EXT";
- default:
- return "Unhandled VkDisplayPowerStateEXT";
- }
-}
-
-static inline const char* string_VkDeviceEventTypeEXT(VkDeviceEventTypeEXT input_value)
-{
- switch ((VkDeviceEventTypeEXT)input_value)
- {
- case VK_DEVICE_EVENT_TYPE_DISPLAY_HOTPLUG_EXT:
- return "VK_DEVICE_EVENT_TYPE_DISPLAY_HOTPLUG_EXT";
- default:
- return "Unhandled VkDeviceEventTypeEXT";
- }
-}
-
-static inline const char* string_VkDisplayEventTypeEXT(VkDisplayEventTypeEXT input_value)
-{
- switch ((VkDisplayEventTypeEXT)input_value)
- {
- case VK_DISPLAY_EVENT_TYPE_FIRST_PIXEL_OUT_EXT:
- return "VK_DISPLAY_EVENT_TYPE_FIRST_PIXEL_OUT_EXT";
- default:
- return "Unhandled VkDisplayEventTypeEXT";
- }
-}
-
-static inline const char* string_VkViewportCoordinateSwizzleNV(VkViewportCoordinateSwizzleNV input_value)
-{
- switch ((VkViewportCoordinateSwizzleNV)input_value)
- {
- case VK_VIEWPORT_COORDINATE_SWIZZLE_NEGATIVE_W_NV:
- return "VK_VIEWPORT_COORDINATE_SWIZZLE_NEGATIVE_W_NV";
- case VK_VIEWPORT_COORDINATE_SWIZZLE_NEGATIVE_X_NV:
- return "VK_VIEWPORT_COORDINATE_SWIZZLE_NEGATIVE_X_NV";
- case VK_VIEWPORT_COORDINATE_SWIZZLE_NEGATIVE_Y_NV:
- return "VK_VIEWPORT_COORDINATE_SWIZZLE_NEGATIVE_Y_NV";
- case VK_VIEWPORT_COORDINATE_SWIZZLE_NEGATIVE_Z_NV:
- return "VK_VIEWPORT_COORDINATE_SWIZZLE_NEGATIVE_Z_NV";
- case VK_VIEWPORT_COORDINATE_SWIZZLE_POSITIVE_W_NV:
- return "VK_VIEWPORT_COORDINATE_SWIZZLE_POSITIVE_W_NV";
- case VK_VIEWPORT_COORDINATE_SWIZZLE_POSITIVE_X_NV:
- return "VK_VIEWPORT_COORDINATE_SWIZZLE_POSITIVE_X_NV";
- case VK_VIEWPORT_COORDINATE_SWIZZLE_POSITIVE_Y_NV:
- return "VK_VIEWPORT_COORDINATE_SWIZZLE_POSITIVE_Y_NV";
- case VK_VIEWPORT_COORDINATE_SWIZZLE_POSITIVE_Z_NV:
- return "VK_VIEWPORT_COORDINATE_SWIZZLE_POSITIVE_Z_NV";
- default:
- return "Unhandled VkViewportCoordinateSwizzleNV";
- }
-}
-
-static inline const char* string_VkDiscardRectangleModeEXT(VkDiscardRectangleModeEXT input_value)
-{
- switch ((VkDiscardRectangleModeEXT)input_value)
- {
- case VK_DISCARD_RECTANGLE_MODE_EXCLUSIVE_EXT:
- return "VK_DISCARD_RECTANGLE_MODE_EXCLUSIVE_EXT";
- case VK_DISCARD_RECTANGLE_MODE_INCLUSIVE_EXT:
- return "VK_DISCARD_RECTANGLE_MODE_INCLUSIVE_EXT";
- default:
- return "Unhandled VkDiscardRectangleModeEXT";
- }
-}
-
-static inline const char* string_VkConservativeRasterizationModeEXT(VkConservativeRasterizationModeEXT input_value)
-{
- switch ((VkConservativeRasterizationModeEXT)input_value)
- {
- case VK_CONSERVATIVE_RASTERIZATION_MODE_DISABLED_EXT:
- return "VK_CONSERVATIVE_RASTERIZATION_MODE_DISABLED_EXT";
- case VK_CONSERVATIVE_RASTERIZATION_MODE_OVERESTIMATE_EXT:
- return "VK_CONSERVATIVE_RASTERIZATION_MODE_OVERESTIMATE_EXT";
- case VK_CONSERVATIVE_RASTERIZATION_MODE_UNDERESTIMATE_EXT:
- return "VK_CONSERVATIVE_RASTERIZATION_MODE_UNDERESTIMATE_EXT";
- default:
- return "Unhandled VkConservativeRasterizationModeEXT";
- }
-}
-
-static inline const char* string_VkDebugUtilsMessageSeverityFlagBitsEXT(VkDebugUtilsMessageSeverityFlagBitsEXT input_value)
-{
- switch ((VkDebugUtilsMessageSeverityFlagBitsEXT)input_value)
- {
- case VK_DEBUG_UTILS_MESSAGE_SEVERITY_ERROR_BIT_EXT:
- return "VK_DEBUG_UTILS_MESSAGE_SEVERITY_ERROR_BIT_EXT";
- case VK_DEBUG_UTILS_MESSAGE_SEVERITY_INFO_BIT_EXT:
- return "VK_DEBUG_UTILS_MESSAGE_SEVERITY_INFO_BIT_EXT";
- case VK_DEBUG_UTILS_MESSAGE_SEVERITY_VERBOSE_BIT_EXT:
- return "VK_DEBUG_UTILS_MESSAGE_SEVERITY_VERBOSE_BIT_EXT";
- case VK_DEBUG_UTILS_MESSAGE_SEVERITY_WARNING_BIT_EXT:
- return "VK_DEBUG_UTILS_MESSAGE_SEVERITY_WARNING_BIT_EXT";
- default:
- return "Unhandled VkDebugUtilsMessageSeverityFlagBitsEXT";
- }
-}
-
-static inline std::string string_VkDebugUtilsMessageSeverityFlagsEXT(VkDebugUtilsMessageSeverityFlagsEXT input_value)
-{
- std::string ret;
- int index = 0;
- while(input_value) {
- if (input_value & 1) {
- if( !ret.empty()) ret.append("|");
- ret.append(string_VkDebugUtilsMessageSeverityFlagBitsEXT(static_cast<VkDebugUtilsMessageSeverityFlagBitsEXT>(1 << index)));
- }
- ++index;
- input_value >>= 1;
- }
- if( ret.empty()) ret.append(string_VkDebugUtilsMessageSeverityFlagBitsEXT(static_cast<VkDebugUtilsMessageSeverityFlagBitsEXT>(0)));
- return ret;
-}
-
-static inline const char* string_VkDebugUtilsMessageTypeFlagBitsEXT(VkDebugUtilsMessageTypeFlagBitsEXT input_value)
-{
- switch ((VkDebugUtilsMessageTypeFlagBitsEXT)input_value)
- {
- case VK_DEBUG_UTILS_MESSAGE_TYPE_GENERAL_BIT_EXT:
- return "VK_DEBUG_UTILS_MESSAGE_TYPE_GENERAL_BIT_EXT";
- case VK_DEBUG_UTILS_MESSAGE_TYPE_PERFORMANCE_BIT_EXT:
- return "VK_DEBUG_UTILS_MESSAGE_TYPE_PERFORMANCE_BIT_EXT";
- case VK_DEBUG_UTILS_MESSAGE_TYPE_VALIDATION_BIT_EXT:
- return "VK_DEBUG_UTILS_MESSAGE_TYPE_VALIDATION_BIT_EXT";
- default:
- return "Unhandled VkDebugUtilsMessageTypeFlagBitsEXT";
- }
-}
-
-static inline std::string string_VkDebugUtilsMessageTypeFlagsEXT(VkDebugUtilsMessageTypeFlagsEXT input_value)
-{
- std::string ret;
- int index = 0;
- while(input_value) {
- if (input_value & 1) {
- if( !ret.empty()) ret.append("|");
- ret.append(string_VkDebugUtilsMessageTypeFlagBitsEXT(static_cast<VkDebugUtilsMessageTypeFlagBitsEXT>(1 << index)));
- }
- ++index;
- input_value >>= 1;
- }
- if( ret.empty()) ret.append(string_VkDebugUtilsMessageTypeFlagBitsEXT(static_cast<VkDebugUtilsMessageTypeFlagBitsEXT>(0)));
- return ret;
-}
-
-static inline const char* string_VkSamplerReductionModeEXT(VkSamplerReductionModeEXT input_value)
-{
- switch ((VkSamplerReductionModeEXT)input_value)
- {
- case VK_SAMPLER_REDUCTION_MODE_MAX_EXT:
- return "VK_SAMPLER_REDUCTION_MODE_MAX_EXT";
- case VK_SAMPLER_REDUCTION_MODE_MIN_EXT:
- return "VK_SAMPLER_REDUCTION_MODE_MIN_EXT";
- case VK_SAMPLER_REDUCTION_MODE_WEIGHTED_AVERAGE_EXT:
- return "VK_SAMPLER_REDUCTION_MODE_WEIGHTED_AVERAGE_EXT";
- default:
- return "Unhandled VkSamplerReductionModeEXT";
- }
-}
-
-static inline const char* string_VkBlendOverlapEXT(VkBlendOverlapEXT input_value)
-{
- switch ((VkBlendOverlapEXT)input_value)
- {
- case VK_BLEND_OVERLAP_CONJOINT_EXT:
- return "VK_BLEND_OVERLAP_CONJOINT_EXT";
- case VK_BLEND_OVERLAP_DISJOINT_EXT:
- return "VK_BLEND_OVERLAP_DISJOINT_EXT";
- case VK_BLEND_OVERLAP_UNCORRELATED_EXT:
- return "VK_BLEND_OVERLAP_UNCORRELATED_EXT";
- default:
- return "Unhandled VkBlendOverlapEXT";
- }
-}
-
-static inline const char* string_VkCoverageModulationModeNV(VkCoverageModulationModeNV input_value)
-{
- switch ((VkCoverageModulationModeNV)input_value)
- {
- case VK_COVERAGE_MODULATION_MODE_ALPHA_NV:
- return "VK_COVERAGE_MODULATION_MODE_ALPHA_NV";
- case VK_COVERAGE_MODULATION_MODE_NONE_NV:
- return "VK_COVERAGE_MODULATION_MODE_NONE_NV";
- case VK_COVERAGE_MODULATION_MODE_RGBA_NV:
- return "VK_COVERAGE_MODULATION_MODE_RGBA_NV";
- case VK_COVERAGE_MODULATION_MODE_RGB_NV:
- return "VK_COVERAGE_MODULATION_MODE_RGB_NV";
- default:
- return "Unhandled VkCoverageModulationModeNV";
- }
-}
-
-static inline const char* string_VkValidationCacheHeaderVersionEXT(VkValidationCacheHeaderVersionEXT input_value)
-{
- switch ((VkValidationCacheHeaderVersionEXT)input_value)
- {
- case VK_VALIDATION_CACHE_HEADER_VERSION_ONE_EXT:
- return "VK_VALIDATION_CACHE_HEADER_VERSION_ONE_EXT";
- default:
- return "Unhandled VkValidationCacheHeaderVersionEXT";
- }
-}
-
-static inline const char* string_VkDescriptorBindingFlagBitsEXT(VkDescriptorBindingFlagBitsEXT input_value)
-{
- switch ((VkDescriptorBindingFlagBitsEXT)input_value)
- {
- case VK_DESCRIPTOR_BINDING_PARTIALLY_BOUND_BIT_EXT:
- return "VK_DESCRIPTOR_BINDING_PARTIALLY_BOUND_BIT_EXT";
- case VK_DESCRIPTOR_BINDING_UPDATE_AFTER_BIND_BIT_EXT:
- return "VK_DESCRIPTOR_BINDING_UPDATE_AFTER_BIND_BIT_EXT";
- case VK_DESCRIPTOR_BINDING_UPDATE_UNUSED_WHILE_PENDING_BIT_EXT:
- return "VK_DESCRIPTOR_BINDING_UPDATE_UNUSED_WHILE_PENDING_BIT_EXT";
- case VK_DESCRIPTOR_BINDING_VARIABLE_DESCRIPTOR_COUNT_BIT_EXT:
- return "VK_DESCRIPTOR_BINDING_VARIABLE_DESCRIPTOR_COUNT_BIT_EXT";
- default:
- return "Unhandled VkDescriptorBindingFlagBitsEXT";
- }
-}
-
-static inline std::string string_VkDescriptorBindingFlagsEXT(VkDescriptorBindingFlagsEXT input_value)
-{
- std::string ret;
- int index = 0;
- while(input_value) {
- if (input_value & 1) {
- if( !ret.empty()) ret.append("|");
- ret.append(string_VkDescriptorBindingFlagBitsEXT(static_cast<VkDescriptorBindingFlagBitsEXT>(1 << index)));
- }
- ++index;
- input_value >>= 1;
- }
- if( ret.empty()) ret.append(string_VkDescriptorBindingFlagBitsEXT(static_cast<VkDescriptorBindingFlagBitsEXT>(0)));
- return ret;
-}
-
-static inline const char* string_VkShadingRatePaletteEntryNV(VkShadingRatePaletteEntryNV input_value)
-{
- switch ((VkShadingRatePaletteEntryNV)input_value)
- {
- case VK_SHADING_RATE_PALETTE_ENTRY_16_INVOCATIONS_PER_PIXEL_NV:
- return "VK_SHADING_RATE_PALETTE_ENTRY_16_INVOCATIONS_PER_PIXEL_NV";
- case VK_SHADING_RATE_PALETTE_ENTRY_1_INVOCATION_PER_1X2_PIXELS_NV:
- return "VK_SHADING_RATE_PALETTE_ENTRY_1_INVOCATION_PER_1X2_PIXELS_NV";
- case VK_SHADING_RATE_PALETTE_ENTRY_1_INVOCATION_PER_2X1_PIXELS_NV:
- return "VK_SHADING_RATE_PALETTE_ENTRY_1_INVOCATION_PER_2X1_PIXELS_NV";
- case VK_SHADING_RATE_PALETTE_ENTRY_1_INVOCATION_PER_2X2_PIXELS_NV:
- return "VK_SHADING_RATE_PALETTE_ENTRY_1_INVOCATION_PER_2X2_PIXELS_NV";
- case VK_SHADING_RATE_PALETTE_ENTRY_1_INVOCATION_PER_2X4_PIXELS_NV:
- return "VK_SHADING_RATE_PALETTE_ENTRY_1_INVOCATION_PER_2X4_PIXELS_NV";
- case VK_SHADING_RATE_PALETTE_ENTRY_1_INVOCATION_PER_4X2_PIXELS_NV:
- return "VK_SHADING_RATE_PALETTE_ENTRY_1_INVOCATION_PER_4X2_PIXELS_NV";
- case VK_SHADING_RATE_PALETTE_ENTRY_1_INVOCATION_PER_4X4_PIXELS_NV:
- return "VK_SHADING_RATE_PALETTE_ENTRY_1_INVOCATION_PER_4X4_PIXELS_NV";
- case VK_SHADING_RATE_PALETTE_ENTRY_1_INVOCATION_PER_PIXEL_NV:
- return "VK_SHADING_RATE_PALETTE_ENTRY_1_INVOCATION_PER_PIXEL_NV";
- case VK_SHADING_RATE_PALETTE_ENTRY_2_INVOCATIONS_PER_PIXEL_NV:
- return "VK_SHADING_RATE_PALETTE_ENTRY_2_INVOCATIONS_PER_PIXEL_NV";
- case VK_SHADING_RATE_PALETTE_ENTRY_4_INVOCATIONS_PER_PIXEL_NV:
- return "VK_SHADING_RATE_PALETTE_ENTRY_4_INVOCATIONS_PER_PIXEL_NV";
- case VK_SHADING_RATE_PALETTE_ENTRY_8_INVOCATIONS_PER_PIXEL_NV:
- return "VK_SHADING_RATE_PALETTE_ENTRY_8_INVOCATIONS_PER_PIXEL_NV";
- case VK_SHADING_RATE_PALETTE_ENTRY_NO_INVOCATIONS_NV:
- return "VK_SHADING_RATE_PALETTE_ENTRY_NO_INVOCATIONS_NV";
- default:
- return "Unhandled VkShadingRatePaletteEntryNV";
- }
-}
-
-static inline const char* string_VkCoarseSampleOrderTypeNV(VkCoarseSampleOrderTypeNV input_value)
-{
- switch ((VkCoarseSampleOrderTypeNV)input_value)
- {
- case VK_COARSE_SAMPLE_ORDER_TYPE_CUSTOM_NV:
- return "VK_COARSE_SAMPLE_ORDER_TYPE_CUSTOM_NV";
- case VK_COARSE_SAMPLE_ORDER_TYPE_DEFAULT_NV:
- return "VK_COARSE_SAMPLE_ORDER_TYPE_DEFAULT_NV";
- case VK_COARSE_SAMPLE_ORDER_TYPE_PIXEL_MAJOR_NV:
- return "VK_COARSE_SAMPLE_ORDER_TYPE_PIXEL_MAJOR_NV";
- case VK_COARSE_SAMPLE_ORDER_TYPE_SAMPLE_MAJOR_NV:
- return "VK_COARSE_SAMPLE_ORDER_TYPE_SAMPLE_MAJOR_NV";
- default:
- return "Unhandled VkCoarseSampleOrderTypeNV";
- }
-}
-
-static inline const char* string_VkAccelerationStructureTypeNV(VkAccelerationStructureTypeNV input_value)
-{
- switch ((VkAccelerationStructureTypeNV)input_value)
- {
- case VK_ACCELERATION_STRUCTURE_TYPE_BOTTOM_LEVEL_NV:
- return "VK_ACCELERATION_STRUCTURE_TYPE_BOTTOM_LEVEL_NV";
- case VK_ACCELERATION_STRUCTURE_TYPE_TOP_LEVEL_NV:
- return "VK_ACCELERATION_STRUCTURE_TYPE_TOP_LEVEL_NV";
- default:
- return "Unhandled VkAccelerationStructureTypeNV";
- }
-}
-
-static inline const char* string_VkRayTracingShaderGroupTypeNV(VkRayTracingShaderGroupTypeNV input_value)
-{
- switch ((VkRayTracingShaderGroupTypeNV)input_value)
- {
- case VK_RAY_TRACING_SHADER_GROUP_TYPE_GENERAL_NV:
- return "VK_RAY_TRACING_SHADER_GROUP_TYPE_GENERAL_NV";
- case VK_RAY_TRACING_SHADER_GROUP_TYPE_PROCEDURAL_HIT_GROUP_NV:
- return "VK_RAY_TRACING_SHADER_GROUP_TYPE_PROCEDURAL_HIT_GROUP_NV";
- case VK_RAY_TRACING_SHADER_GROUP_TYPE_TRIANGLES_HIT_GROUP_NV:
- return "VK_RAY_TRACING_SHADER_GROUP_TYPE_TRIANGLES_HIT_GROUP_NV";
- default:
- return "Unhandled VkRayTracingShaderGroupTypeNV";
- }
-}
-
-static inline const char* string_VkGeometryTypeNV(VkGeometryTypeNV input_value)
-{
- switch ((VkGeometryTypeNV)input_value)
- {
- case VK_GEOMETRY_TYPE_AABBS_NV:
- return "VK_GEOMETRY_TYPE_AABBS_NV";
- case VK_GEOMETRY_TYPE_TRIANGLES_NV:
- return "VK_GEOMETRY_TYPE_TRIANGLES_NV";
- default:
- return "Unhandled VkGeometryTypeNV";
- }
-}
-
-static inline const char* string_VkGeometryFlagBitsNV(VkGeometryFlagBitsNV input_value)
-{
- switch ((VkGeometryFlagBitsNV)input_value)
- {
- case VK_GEOMETRY_NO_DUPLICATE_ANY_HIT_INVOCATION_BIT_NV:
- return "VK_GEOMETRY_NO_DUPLICATE_ANY_HIT_INVOCATION_BIT_NV";
- case VK_GEOMETRY_OPAQUE_BIT_NV:
- return "VK_GEOMETRY_OPAQUE_BIT_NV";
- default:
- return "Unhandled VkGeometryFlagBitsNV";
- }
-}
-
-static inline std::string string_VkGeometryFlagsNV(VkGeometryFlagsNV input_value)
-{
- std::string ret;
- int index = 0;
- while(input_value) {
- if (input_value & 1) {
- if( !ret.empty()) ret.append("|");
- ret.append(string_VkGeometryFlagBitsNV(static_cast<VkGeometryFlagBitsNV>(1 << index)));
- }
- ++index;
- input_value >>= 1;
- }
- if( ret.empty()) ret.append(string_VkGeometryFlagBitsNV(static_cast<VkGeometryFlagBitsNV>(0)));
- return ret;
-}
-
-static inline const char* string_VkGeometryInstanceFlagBitsNV(VkGeometryInstanceFlagBitsNV input_value)
-{
- switch ((VkGeometryInstanceFlagBitsNV)input_value)
- {
- case VK_GEOMETRY_INSTANCE_FORCE_NO_OPAQUE_BIT_NV:
- return "VK_GEOMETRY_INSTANCE_FORCE_NO_OPAQUE_BIT_NV";
- case VK_GEOMETRY_INSTANCE_FORCE_OPAQUE_BIT_NV:
- return "VK_GEOMETRY_INSTANCE_FORCE_OPAQUE_BIT_NV";
- case VK_GEOMETRY_INSTANCE_TRIANGLE_CULL_DISABLE_BIT_NV:
- return "VK_GEOMETRY_INSTANCE_TRIANGLE_CULL_DISABLE_BIT_NV";
- case VK_GEOMETRY_INSTANCE_TRIANGLE_FRONT_COUNTERCLOCKWISE_BIT_NV:
- return "VK_GEOMETRY_INSTANCE_TRIANGLE_FRONT_COUNTERCLOCKWISE_BIT_NV";
- default:
- return "Unhandled VkGeometryInstanceFlagBitsNV";
- }
-}
-
-static inline std::string string_VkGeometryInstanceFlagsNV(VkGeometryInstanceFlagsNV input_value)
-{
- std::string ret;
- int index = 0;
- while(input_value) {
- if (input_value & 1) {
- if( !ret.empty()) ret.append("|");
- ret.append(string_VkGeometryInstanceFlagBitsNV(static_cast<VkGeometryInstanceFlagBitsNV>(1 << index)));
- }
- ++index;
- input_value >>= 1;
- }
- if( ret.empty()) ret.append(string_VkGeometryInstanceFlagBitsNV(static_cast<VkGeometryInstanceFlagBitsNV>(0)));
- return ret;
-}
-
-static inline const char* string_VkBuildAccelerationStructureFlagBitsNV(VkBuildAccelerationStructureFlagBitsNV input_value)
-{
- switch ((VkBuildAccelerationStructureFlagBitsNV)input_value)
- {
- case VK_BUILD_ACCELERATION_STRUCTURE_ALLOW_COMPACTION_BIT_NV:
- return "VK_BUILD_ACCELERATION_STRUCTURE_ALLOW_COMPACTION_BIT_NV";
- case VK_BUILD_ACCELERATION_STRUCTURE_ALLOW_UPDATE_BIT_NV:
- return "VK_BUILD_ACCELERATION_STRUCTURE_ALLOW_UPDATE_BIT_NV";
- case VK_BUILD_ACCELERATION_STRUCTURE_LOW_MEMORY_BIT_NV:
- return "VK_BUILD_ACCELERATION_STRUCTURE_LOW_MEMORY_BIT_NV";
- case VK_BUILD_ACCELERATION_STRUCTURE_PREFER_FAST_BUILD_BIT_NV:
- return "VK_BUILD_ACCELERATION_STRUCTURE_PREFER_FAST_BUILD_BIT_NV";
- case VK_BUILD_ACCELERATION_STRUCTURE_PREFER_FAST_TRACE_BIT_NV:
- return "VK_BUILD_ACCELERATION_STRUCTURE_PREFER_FAST_TRACE_BIT_NV";
- default:
- return "Unhandled VkBuildAccelerationStructureFlagBitsNV";
- }
-}
-
-static inline std::string string_VkBuildAccelerationStructureFlagsNV(VkBuildAccelerationStructureFlagsNV input_value)
-{
- std::string ret;
- int index = 0;
- while(input_value) {
- if (input_value & 1) {
- if( !ret.empty()) ret.append("|");
- ret.append(string_VkBuildAccelerationStructureFlagBitsNV(static_cast<VkBuildAccelerationStructureFlagBitsNV>(1 << index)));
- }
- ++index;
- input_value >>= 1;
- }
- if( ret.empty()) ret.append(string_VkBuildAccelerationStructureFlagBitsNV(static_cast<VkBuildAccelerationStructureFlagBitsNV>(0)));
- return ret;
-}
-
-static inline const char* string_VkCopyAccelerationStructureModeNV(VkCopyAccelerationStructureModeNV input_value)
-{
- switch ((VkCopyAccelerationStructureModeNV)input_value)
- {
- case VK_COPY_ACCELERATION_STRUCTURE_MODE_CLONE_NV:
- return "VK_COPY_ACCELERATION_STRUCTURE_MODE_CLONE_NV";
- case VK_COPY_ACCELERATION_STRUCTURE_MODE_COMPACT_NV:
- return "VK_COPY_ACCELERATION_STRUCTURE_MODE_COMPACT_NV";
- default:
- return "Unhandled VkCopyAccelerationStructureModeNV";
- }
-}
-
-static inline const char* string_VkAccelerationStructureMemoryRequirementsTypeNV(VkAccelerationStructureMemoryRequirementsTypeNV input_value)
-{
- switch ((VkAccelerationStructureMemoryRequirementsTypeNV)input_value)
- {
- case VK_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_TYPE_BUILD_SCRATCH_NV:
- return "VK_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_TYPE_BUILD_SCRATCH_NV";
- case VK_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_TYPE_OBJECT_NV:
- return "VK_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_TYPE_OBJECT_NV";
- case VK_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_TYPE_UPDATE_SCRATCH_NV:
- return "VK_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_TYPE_UPDATE_SCRATCH_NV";
- default:
- return "Unhandled VkAccelerationStructureMemoryRequirementsTypeNV";
- }
-}
-
-static inline const char* string_VkQueueGlobalPriorityEXT(VkQueueGlobalPriorityEXT input_value)
-{
- switch ((VkQueueGlobalPriorityEXT)input_value)
- {
- case VK_QUEUE_GLOBAL_PRIORITY_HIGH_EXT:
- return "VK_QUEUE_GLOBAL_PRIORITY_HIGH_EXT";
- case VK_QUEUE_GLOBAL_PRIORITY_LOW_EXT:
- return "VK_QUEUE_GLOBAL_PRIORITY_LOW_EXT";
- case VK_QUEUE_GLOBAL_PRIORITY_MEDIUM_EXT:
- return "VK_QUEUE_GLOBAL_PRIORITY_MEDIUM_EXT";
- case VK_QUEUE_GLOBAL_PRIORITY_REALTIME_EXT:
- return "VK_QUEUE_GLOBAL_PRIORITY_REALTIME_EXT";
- default:
- return "Unhandled VkQueueGlobalPriorityEXT";
- }
-}
-
-static inline const char* string_VkTimeDomainEXT(VkTimeDomainEXT input_value)
-{
- switch ((VkTimeDomainEXT)input_value)
- {
- case VK_TIME_DOMAIN_CLOCK_MONOTONIC_EXT:
- return "VK_TIME_DOMAIN_CLOCK_MONOTONIC_EXT";
- case VK_TIME_DOMAIN_CLOCK_MONOTONIC_RAW_EXT:
- return "VK_TIME_DOMAIN_CLOCK_MONOTONIC_RAW_EXT";
- case VK_TIME_DOMAIN_DEVICE_EXT:
- return "VK_TIME_DOMAIN_DEVICE_EXT";
- case VK_TIME_DOMAIN_QUERY_PERFORMANCE_COUNTER_EXT:
- return "VK_TIME_DOMAIN_QUERY_PERFORMANCE_COUNTER_EXT";
- default:
- return "Unhandled VkTimeDomainEXT";
- }
-}
-
-static inline const char* string_VkMemoryOverallocationBehaviorAMD(VkMemoryOverallocationBehaviorAMD input_value)
-{
- switch ((VkMemoryOverallocationBehaviorAMD)input_value)
- {
- case VK_MEMORY_OVERALLOCATION_BEHAVIOR_ALLOWED_AMD:
- return "VK_MEMORY_OVERALLOCATION_BEHAVIOR_ALLOWED_AMD";
- case VK_MEMORY_OVERALLOCATION_BEHAVIOR_DEFAULT_AMD:
- return "VK_MEMORY_OVERALLOCATION_BEHAVIOR_DEFAULT_AMD";
- case VK_MEMORY_OVERALLOCATION_BEHAVIOR_DISALLOWED_AMD:
- return "VK_MEMORY_OVERALLOCATION_BEHAVIOR_DISALLOWED_AMD";
- default:
- return "Unhandled VkMemoryOverallocationBehaviorAMD";
- }
-}
-
-static inline const char* string_VkPipelineCreationFeedbackFlagBitsEXT(VkPipelineCreationFeedbackFlagBitsEXT input_value)
-{
- switch ((VkPipelineCreationFeedbackFlagBitsEXT)input_value)
- {
- case VK_PIPELINE_CREATION_FEEDBACK_APPLICATION_PIPELINE_CACHE_HIT_BIT_EXT:
- return "VK_PIPELINE_CREATION_FEEDBACK_APPLICATION_PIPELINE_CACHE_HIT_BIT_EXT";
- case VK_PIPELINE_CREATION_FEEDBACK_BASE_PIPELINE_ACCELERATION_BIT_EXT:
- return "VK_PIPELINE_CREATION_FEEDBACK_BASE_PIPELINE_ACCELERATION_BIT_EXT";
- case VK_PIPELINE_CREATION_FEEDBACK_VALID_BIT_EXT:
- return "VK_PIPELINE_CREATION_FEEDBACK_VALID_BIT_EXT";
- default:
- return "Unhandled VkPipelineCreationFeedbackFlagBitsEXT";
- }
-}
-
-static inline std::string string_VkPipelineCreationFeedbackFlagsEXT(VkPipelineCreationFeedbackFlagsEXT input_value)
-{
- std::string ret;
- int index = 0;
- while(input_value) {
- if (input_value & 1) {
- if( !ret.empty()) ret.append("|");
- ret.append(string_VkPipelineCreationFeedbackFlagBitsEXT(static_cast<VkPipelineCreationFeedbackFlagBitsEXT>(1 << index)));
- }
- ++index;
- input_value >>= 1;
- }
- if( ret.empty()) ret.append(string_VkPipelineCreationFeedbackFlagBitsEXT(static_cast<VkPipelineCreationFeedbackFlagBitsEXT>(0)));
- return ret;
-}
-
-static inline const char* string_VkPerformanceConfigurationTypeINTEL(VkPerformanceConfigurationTypeINTEL input_value)
-{
- switch ((VkPerformanceConfigurationTypeINTEL)input_value)
- {
- case VK_PERFORMANCE_CONFIGURATION_TYPE_COMMAND_QUEUE_METRICS_DISCOVERY_ACTIVATED_INTEL:
- return "VK_PERFORMANCE_CONFIGURATION_TYPE_COMMAND_QUEUE_METRICS_DISCOVERY_ACTIVATED_INTEL";
- default:
- return "Unhandled VkPerformanceConfigurationTypeINTEL";
- }
-}
-
-static inline const char* string_VkQueryPoolSamplingModeINTEL(VkQueryPoolSamplingModeINTEL input_value)
-{
- switch ((VkQueryPoolSamplingModeINTEL)input_value)
- {
- case VK_QUERY_POOL_SAMPLING_MODE_MANUAL_INTEL:
- return "VK_QUERY_POOL_SAMPLING_MODE_MANUAL_INTEL";
- default:
- return "Unhandled VkQueryPoolSamplingModeINTEL";
- }
-}
-
-static inline const char* string_VkPerformanceOverrideTypeINTEL(VkPerformanceOverrideTypeINTEL input_value)
-{
- switch ((VkPerformanceOverrideTypeINTEL)input_value)
- {
- case VK_PERFORMANCE_OVERRIDE_TYPE_FLUSH_GPU_CACHES_INTEL:
- return "VK_PERFORMANCE_OVERRIDE_TYPE_FLUSH_GPU_CACHES_INTEL";
- case VK_PERFORMANCE_OVERRIDE_TYPE_NULL_HARDWARE_INTEL:
- return "VK_PERFORMANCE_OVERRIDE_TYPE_NULL_HARDWARE_INTEL";
- default:
- return "Unhandled VkPerformanceOverrideTypeINTEL";
- }
-}
-
-static inline const char* string_VkPerformanceParameterTypeINTEL(VkPerformanceParameterTypeINTEL input_value)
-{
- switch ((VkPerformanceParameterTypeINTEL)input_value)
- {
- case VK_PERFORMANCE_PARAMETER_TYPE_HW_COUNTERS_SUPPORTED_INTEL:
- return "VK_PERFORMANCE_PARAMETER_TYPE_HW_COUNTERS_SUPPORTED_INTEL";
- case VK_PERFORMANCE_PARAMETER_TYPE_STREAM_MARKER_VALID_BITS_INTEL:
- return "VK_PERFORMANCE_PARAMETER_TYPE_STREAM_MARKER_VALID_BITS_INTEL";
- default:
- return "Unhandled VkPerformanceParameterTypeINTEL";
- }
-}
-
-static inline const char* string_VkPerformanceValueTypeINTEL(VkPerformanceValueTypeINTEL input_value)
-{
- switch ((VkPerformanceValueTypeINTEL)input_value)
- {
- case VK_PERFORMANCE_VALUE_TYPE_BOOL_INTEL:
- return "VK_PERFORMANCE_VALUE_TYPE_BOOL_INTEL";
- case VK_PERFORMANCE_VALUE_TYPE_FLOAT_INTEL:
- return "VK_PERFORMANCE_VALUE_TYPE_FLOAT_INTEL";
- case VK_PERFORMANCE_VALUE_TYPE_STRING_INTEL:
- return "VK_PERFORMANCE_VALUE_TYPE_STRING_INTEL";
- case VK_PERFORMANCE_VALUE_TYPE_UINT32_INTEL:
- return "VK_PERFORMANCE_VALUE_TYPE_UINT32_INTEL";
- case VK_PERFORMANCE_VALUE_TYPE_UINT64_INTEL:
- return "VK_PERFORMANCE_VALUE_TYPE_UINT64_INTEL";
- default:
- return "Unhandled VkPerformanceValueTypeINTEL";
- }
-}
-
-static inline const char* string_VkValidationFeatureEnableEXT(VkValidationFeatureEnableEXT input_value)
-{
- switch ((VkValidationFeatureEnableEXT)input_value)
- {
- case VK_VALIDATION_FEATURE_ENABLE_BEST_PRACTICES_EXT:
- return "VK_VALIDATION_FEATURE_ENABLE_BEST_PRACTICES_EXT";
- case VK_VALIDATION_FEATURE_ENABLE_GPU_ASSISTED_EXT:
- return "VK_VALIDATION_FEATURE_ENABLE_GPU_ASSISTED_EXT";
- case VK_VALIDATION_FEATURE_ENABLE_GPU_ASSISTED_RESERVE_BINDING_SLOT_EXT:
- return "VK_VALIDATION_FEATURE_ENABLE_GPU_ASSISTED_RESERVE_BINDING_SLOT_EXT";
- default:
- return "Unhandled VkValidationFeatureEnableEXT";
- }
-}
-
-static inline const char* string_VkValidationFeatureDisableEXT(VkValidationFeatureDisableEXT input_value)
-{
- switch ((VkValidationFeatureDisableEXT)input_value)
- {
- case VK_VALIDATION_FEATURE_DISABLE_ALL_EXT:
- return "VK_VALIDATION_FEATURE_DISABLE_ALL_EXT";
- case VK_VALIDATION_FEATURE_DISABLE_API_PARAMETERS_EXT:
- return "VK_VALIDATION_FEATURE_DISABLE_API_PARAMETERS_EXT";
- case VK_VALIDATION_FEATURE_DISABLE_CORE_CHECKS_EXT:
- return "VK_VALIDATION_FEATURE_DISABLE_CORE_CHECKS_EXT";
- case VK_VALIDATION_FEATURE_DISABLE_OBJECT_LIFETIMES_EXT:
- return "VK_VALIDATION_FEATURE_DISABLE_OBJECT_LIFETIMES_EXT";
- case VK_VALIDATION_FEATURE_DISABLE_SHADERS_EXT:
- return "VK_VALIDATION_FEATURE_DISABLE_SHADERS_EXT";
- case VK_VALIDATION_FEATURE_DISABLE_THREAD_SAFETY_EXT:
- return "VK_VALIDATION_FEATURE_DISABLE_THREAD_SAFETY_EXT";
- case VK_VALIDATION_FEATURE_DISABLE_UNIQUE_HANDLES_EXT:
- return "VK_VALIDATION_FEATURE_DISABLE_UNIQUE_HANDLES_EXT";
- default:
- return "Unhandled VkValidationFeatureDisableEXT";
- }
-}
-
-static inline const char* string_VkComponentTypeNV(VkComponentTypeNV input_value)
-{
- switch ((VkComponentTypeNV)input_value)
- {
- case VK_COMPONENT_TYPE_FLOAT16_NV:
- return "VK_COMPONENT_TYPE_FLOAT16_NV";
- case VK_COMPONENT_TYPE_FLOAT32_NV:
- return "VK_COMPONENT_TYPE_FLOAT32_NV";
- case VK_COMPONENT_TYPE_FLOAT64_NV:
- return "VK_COMPONENT_TYPE_FLOAT64_NV";
- case VK_COMPONENT_TYPE_SINT16_NV:
- return "VK_COMPONENT_TYPE_SINT16_NV";
- case VK_COMPONENT_TYPE_SINT32_NV:
- return "VK_COMPONENT_TYPE_SINT32_NV";
- case VK_COMPONENT_TYPE_SINT64_NV:
- return "VK_COMPONENT_TYPE_SINT64_NV";
- case VK_COMPONENT_TYPE_SINT8_NV:
- return "VK_COMPONENT_TYPE_SINT8_NV";
- case VK_COMPONENT_TYPE_UINT16_NV:
- return "VK_COMPONENT_TYPE_UINT16_NV";
- case VK_COMPONENT_TYPE_UINT32_NV:
- return "VK_COMPONENT_TYPE_UINT32_NV";
- case VK_COMPONENT_TYPE_UINT64_NV:
- return "VK_COMPONENT_TYPE_UINT64_NV";
- case VK_COMPONENT_TYPE_UINT8_NV:
- return "VK_COMPONENT_TYPE_UINT8_NV";
- default:
- return "Unhandled VkComponentTypeNV";
- }
-}
-
-static inline const char* string_VkScopeNV(VkScopeNV input_value)
-{
- switch ((VkScopeNV)input_value)
- {
- case VK_SCOPE_DEVICE_NV:
- return "VK_SCOPE_DEVICE_NV";
- case VK_SCOPE_QUEUE_FAMILY_NV:
- return "VK_SCOPE_QUEUE_FAMILY_NV";
- case VK_SCOPE_SUBGROUP_NV:
- return "VK_SCOPE_SUBGROUP_NV";
- case VK_SCOPE_WORKGROUP_NV:
- return "VK_SCOPE_WORKGROUP_NV";
- default:
- return "Unhandled VkScopeNV";
- }
-}
-
-static inline const char* string_VkCoverageReductionModeNV(VkCoverageReductionModeNV input_value)
-{
- switch ((VkCoverageReductionModeNV)input_value)
- {
- case VK_COVERAGE_REDUCTION_MODE_MERGE_NV:
- return "VK_COVERAGE_REDUCTION_MODE_MERGE_NV";
- case VK_COVERAGE_REDUCTION_MODE_TRUNCATE_NV:
- return "VK_COVERAGE_REDUCTION_MODE_TRUNCATE_NV";
- default:
- return "Unhandled VkCoverageReductionModeNV";
- }
-}
-
-
-#ifdef VK_USE_PLATFORM_WIN32_KHR
-
-static inline const char* string_VkFullScreenExclusiveEXT(VkFullScreenExclusiveEXT input_value)
-{
- switch ((VkFullScreenExclusiveEXT)input_value)
- {
- case VK_FULL_SCREEN_EXCLUSIVE_ALLOWED_EXT:
- return "VK_FULL_SCREEN_EXCLUSIVE_ALLOWED_EXT";
- case VK_FULL_SCREEN_EXCLUSIVE_APPLICATION_CONTROLLED_EXT:
- return "VK_FULL_SCREEN_EXCLUSIVE_APPLICATION_CONTROLLED_EXT";
- case VK_FULL_SCREEN_EXCLUSIVE_DEFAULT_EXT:
- return "VK_FULL_SCREEN_EXCLUSIVE_DEFAULT_EXT";
- case VK_FULL_SCREEN_EXCLUSIVE_DISALLOWED_EXT:
- return "VK_FULL_SCREEN_EXCLUSIVE_DISALLOWED_EXT";
- default:
- return "Unhandled VkFullScreenExclusiveEXT";
- }
-}
-#endif // VK_USE_PLATFORM_WIN32_KHR
-
-static inline const char* string_VkLineRasterizationModeEXT(VkLineRasterizationModeEXT input_value)
-{
- switch ((VkLineRasterizationModeEXT)input_value)
- {
- case VK_LINE_RASTERIZATION_MODE_BRESENHAM_EXT:
- return "VK_LINE_RASTERIZATION_MODE_BRESENHAM_EXT";
- case VK_LINE_RASTERIZATION_MODE_DEFAULT_EXT:
- return "VK_LINE_RASTERIZATION_MODE_DEFAULT_EXT";
- case VK_LINE_RASTERIZATION_MODE_RECTANGULAR_EXT:
- return "VK_LINE_RASTERIZATION_MODE_RECTANGULAR_EXT";
- case VK_LINE_RASTERIZATION_MODE_RECTANGULAR_SMOOTH_EXT:
- return "VK_LINE_RASTERIZATION_MODE_RECTANGULAR_SMOOTH_EXT";
- default:
- return "Unhandled VkLineRasterizationModeEXT";
- }
-}
-
-static inline const char * GetPhysDevFeatureString(uint32_t index) {
- const char * IndexToPhysDevFeatureString[] = {
- "robustBufferAccess",
- "fullDrawIndexUint32",
- "imageCubeArray",
- "independentBlend",
- "geometryShader",
- "tessellationShader",
- "sampleRateShading",
- "dualSrcBlend",
- "logicOp",
- "multiDrawIndirect",
- "drawIndirectFirstInstance",
- "depthClamp",
- "depthBiasClamp",
- "fillModeNonSolid",
- "depthBounds",
- "wideLines",
- "largePoints",
- "alphaToOne",
- "multiViewport",
- "samplerAnisotropy",
- "textureCompressionETC2",
- "textureCompressionASTC_LDR",
- "textureCompressionBC",
- "occlusionQueryPrecise",
- "pipelineStatisticsQuery",
- "vertexPipelineStoresAndAtomics",
- "fragmentStoresAndAtomics",
- "shaderTessellationAndGeometryPointSize",
- "shaderImageGatherExtended",
- "shaderStorageImageExtendedFormats",
- "shaderStorageImageMultisample",
- "shaderStorageImageReadWithoutFormat",
- "shaderStorageImageWriteWithoutFormat",
- "shaderUniformBufferArrayDynamicIndexing",
- "shaderSampledImageArrayDynamicIndexing",
- "shaderStorageBufferArrayDynamicIndexing",
- "shaderStorageImageArrayDynamicIndexing",
- "shaderClipDistance",
- "shaderCullDistance",
- "shaderFloat64",
- "shaderInt64",
- "shaderInt16",
- "shaderResourceResidency",
- "shaderResourceMinLod",
- "sparseBinding",
- "sparseResidencyBuffer",
- "sparseResidencyImage2D",
- "sparseResidencyImage3D",
- "sparseResidency2Samples",
- "sparseResidency4Samples",
- "sparseResidency8Samples",
- "sparseResidency16Samples",
- "sparseResidencyAliased",
- "variableMultisampleRate",
- "inheritedQueries",
- };
-
- return IndexToPhysDevFeatureString[index];
-}
diff --git a/layers/generated/vk_extension_helper.h b/layers/generated/vk_extension_helper.h
deleted file mode 100644
index 2047e6058..000000000
--- a/layers/generated/vk_extension_helper.h
+++ /dev/null
@@ -1,969 +0,0 @@
-// *** THIS FILE IS GENERATED - DO NOT EDIT ***
-// See helper_file_generator.py for modifications
-
-
-/***************************************************************************
- *
- * Copyright (c) 2015-2019 The Khronos Group Inc.
- * Copyright (c) 2015-2019 Valve Corporation
- * Copyright (c) 2015-2019 LunarG, Inc.
- * Copyright (c) 2015-2019 Google Inc.
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- *
- * Author: Mark Lobodzinski <mark@lunarg.com>
- * Author: Courtney Goeltzenleuchter <courtneygo@google.com>
- * Author: Tobin Ehlis <tobine@google.com>
- * Author: Chris Forbes <chrisforbes@google.com>
- * Author: John Zulauf<jzulauf@lunarg.com>
- *
- ****************************************************************************/
-
-
-#ifndef VK_EXTENSION_HELPER_H_
-#define VK_EXTENSION_HELPER_H_
-#include <unordered_set>
-#include <string>
-#include <unordered_map>
-#include <utility>
-#include <set>
-#include <vector>
-
-#include <vulkan/vulkan.h>
-
-struct InstanceExtensions {
- bool vk_feature_version_1_1{false};
- bool vk_ext_acquire_xlib_display{false};
- bool vk_ext_debug_report{false};
- bool vk_ext_debug_utils{false};
- bool vk_ext_direct_mode_display{false};
- bool vk_ext_display_surface_counter{false};
- bool vk_ext_headless_surface{false};
- bool vk_ext_metal_surface{false};
- bool vk_ext_swapchain_color_space{false};
- bool vk_ext_validation_features{false};
- bool vk_ext_validation_flags{false};
- bool vk_fuchsia_imagepipe_surface{false};
- bool vk_ggp_stream_descriptor_surface{false};
- bool vk_khr_android_surface{false};
- bool vk_khr_device_group_creation{false};
- bool vk_khr_display{false};
- bool vk_khr_external_fence_capabilities{false};
- bool vk_khr_external_memory_capabilities{false};
- bool vk_khr_external_semaphore_capabilities{false};
- bool vk_khr_get_display_properties_2{false};
- bool vk_khr_get_physical_device_properties_2{false};
- bool vk_khr_get_surface_capabilities_2{false};
- bool vk_khr_surface{false};
- bool vk_khr_surface_protected_capabilities{false};
- bool vk_khr_wayland_surface{false};
- bool vk_khr_win32_surface{false};
- bool vk_khr_xcb_surface{false};
- bool vk_khr_xlib_surface{false};
- bool vk_mvk_ios_surface{false};
- bool vk_mvk_macos_surface{false};
- bool vk_nn_vi_surface{false};
- bool vk_nv_external_memory_capabilities{false};
-
- struct InstanceReq {
- const bool InstanceExtensions::* enabled;
- const char *name;
- };
- typedef std::vector<InstanceReq> InstanceReqVec;
- struct InstanceInfo {
- InstanceInfo(bool InstanceExtensions::* state_, const InstanceReqVec requires_): state(state_), requires(requires_) {}
- bool InstanceExtensions::* state;
- InstanceReqVec requires;
- };
-
- typedef std::unordered_map<std::string,InstanceInfo> InstanceInfoMap;
- static const InstanceInfo &get_info(const char *name) {
- static const InstanceInfoMap info_map = {
- std::make_pair("VK_VERSION_1_1", InstanceInfo(&InstanceExtensions::vk_feature_version_1_1, {})),
-#ifdef VK_USE_PLATFORM_XLIB_XRANDR_EXT
- std::make_pair(VK_EXT_ACQUIRE_XLIB_DISPLAY_EXTENSION_NAME, InstanceInfo(&InstanceExtensions::vk_ext_acquire_xlib_display, {{
- {&InstanceExtensions::vk_ext_direct_mode_display, VK_EXT_DIRECT_MODE_DISPLAY_EXTENSION_NAME}}})),
-#endif
- std::make_pair(VK_EXT_DEBUG_REPORT_EXTENSION_NAME, InstanceInfo(&InstanceExtensions::vk_ext_debug_report, {})),
- std::make_pair(VK_EXT_DEBUG_UTILS_EXTENSION_NAME, InstanceInfo(&InstanceExtensions::vk_ext_debug_utils, {})),
- std::make_pair(VK_EXT_DIRECT_MODE_DISPLAY_EXTENSION_NAME, InstanceInfo(&InstanceExtensions::vk_ext_direct_mode_display, {{
- {&InstanceExtensions::vk_khr_display, VK_KHR_DISPLAY_EXTENSION_NAME}}})),
- std::make_pair(VK_EXT_DISPLAY_SURFACE_COUNTER_EXTENSION_NAME, InstanceInfo(&InstanceExtensions::vk_ext_display_surface_counter, {{
- {&InstanceExtensions::vk_khr_display, VK_KHR_DISPLAY_EXTENSION_NAME}}})),
- std::make_pair(VK_EXT_HEADLESS_SURFACE_EXTENSION_NAME, InstanceInfo(&InstanceExtensions::vk_ext_headless_surface, {{
- {&InstanceExtensions::vk_khr_surface, VK_KHR_SURFACE_EXTENSION_NAME}}})),
-#ifdef VK_USE_PLATFORM_METAL_EXT
- std::make_pair(VK_EXT_METAL_SURFACE_EXTENSION_NAME, InstanceInfo(&InstanceExtensions::vk_ext_metal_surface, {{
- {&InstanceExtensions::vk_khr_surface, VK_KHR_SURFACE_EXTENSION_NAME}}})),
-#endif
- std::make_pair(VK_EXT_SWAPCHAIN_COLOR_SPACE_EXTENSION_NAME, InstanceInfo(&InstanceExtensions::vk_ext_swapchain_color_space, {{
- {&InstanceExtensions::vk_khr_surface, VK_KHR_SURFACE_EXTENSION_NAME}}})),
- std::make_pair(VK_EXT_VALIDATION_FEATURES_EXTENSION_NAME, InstanceInfo(&InstanceExtensions::vk_ext_validation_features, {})),
- std::make_pair(VK_EXT_VALIDATION_FLAGS_EXTENSION_NAME, InstanceInfo(&InstanceExtensions::vk_ext_validation_flags, {})),
-#ifdef VK_USE_PLATFORM_FUCHSIA
- std::make_pair(VK_FUCHSIA_IMAGEPIPE_SURFACE_EXTENSION_NAME, InstanceInfo(&InstanceExtensions::vk_fuchsia_imagepipe_surface, {{
- {&InstanceExtensions::vk_khr_surface, VK_KHR_SURFACE_EXTENSION_NAME}}})),
-#endif
-#ifdef VK_USE_PLATFORM_GGP
- std::make_pair(VK_GGP_STREAM_DESCRIPTOR_SURFACE_EXTENSION_NAME, InstanceInfo(&InstanceExtensions::vk_ggp_stream_descriptor_surface, {{
- {&InstanceExtensions::vk_khr_surface, VK_KHR_SURFACE_EXTENSION_NAME}}})),
-#endif
-#ifdef VK_USE_PLATFORM_ANDROID_KHR
- std::make_pair(VK_KHR_ANDROID_SURFACE_EXTENSION_NAME, InstanceInfo(&InstanceExtensions::vk_khr_android_surface, {{
- {&InstanceExtensions::vk_khr_surface, VK_KHR_SURFACE_EXTENSION_NAME}}})),
-#endif
- std::make_pair(VK_KHR_DEVICE_GROUP_CREATION_EXTENSION_NAME, InstanceInfo(&InstanceExtensions::vk_khr_device_group_creation, {})),
- std::make_pair(VK_KHR_DISPLAY_EXTENSION_NAME, InstanceInfo(&InstanceExtensions::vk_khr_display, {{
- {&InstanceExtensions::vk_khr_surface, VK_KHR_SURFACE_EXTENSION_NAME}}})),
- std::make_pair(VK_KHR_EXTERNAL_FENCE_CAPABILITIES_EXTENSION_NAME, InstanceInfo(&InstanceExtensions::vk_khr_external_fence_capabilities, {{
- {&InstanceExtensions::vk_khr_get_physical_device_properties_2, VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME}}})),
- std::make_pair(VK_KHR_EXTERNAL_MEMORY_CAPABILITIES_EXTENSION_NAME, InstanceInfo(&InstanceExtensions::vk_khr_external_memory_capabilities, {{
- {&InstanceExtensions::vk_khr_get_physical_device_properties_2, VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME}}})),
- std::make_pair(VK_KHR_EXTERNAL_SEMAPHORE_CAPABILITIES_EXTENSION_NAME, InstanceInfo(&InstanceExtensions::vk_khr_external_semaphore_capabilities, {{
- {&InstanceExtensions::vk_khr_get_physical_device_properties_2, VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME}}})),
- std::make_pair(VK_KHR_GET_DISPLAY_PROPERTIES_2_EXTENSION_NAME, InstanceInfo(&InstanceExtensions::vk_khr_get_display_properties_2, {{
- {&InstanceExtensions::vk_khr_display, VK_KHR_DISPLAY_EXTENSION_NAME}}})),
- std::make_pair(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME, InstanceInfo(&InstanceExtensions::vk_khr_get_physical_device_properties_2, {})),
- std::make_pair(VK_KHR_GET_SURFACE_CAPABILITIES_2_EXTENSION_NAME, InstanceInfo(&InstanceExtensions::vk_khr_get_surface_capabilities_2, {{
- {&InstanceExtensions::vk_khr_surface, VK_KHR_SURFACE_EXTENSION_NAME}}})),
- std::make_pair(VK_KHR_SURFACE_EXTENSION_NAME, InstanceInfo(&InstanceExtensions::vk_khr_surface, {})),
- std::make_pair(VK_KHR_SURFACE_PROTECTED_CAPABILITIES_EXTENSION_NAME, InstanceInfo(&InstanceExtensions::vk_khr_surface_protected_capabilities, {{
- {&InstanceExtensions::vk_khr_get_surface_capabilities_2, VK_KHR_GET_SURFACE_CAPABILITIES_2_EXTENSION_NAME}}})),
-#ifdef VK_USE_PLATFORM_WAYLAND_KHR
- std::make_pair(VK_KHR_WAYLAND_SURFACE_EXTENSION_NAME, InstanceInfo(&InstanceExtensions::vk_khr_wayland_surface, {{
- {&InstanceExtensions::vk_khr_surface, VK_KHR_SURFACE_EXTENSION_NAME}}})),
-#endif
-#ifdef VK_USE_PLATFORM_WIN32_KHR
- std::make_pair(VK_KHR_WIN32_SURFACE_EXTENSION_NAME, InstanceInfo(&InstanceExtensions::vk_khr_win32_surface, {{
- {&InstanceExtensions::vk_khr_surface, VK_KHR_SURFACE_EXTENSION_NAME}}})),
-#endif
-#ifdef VK_USE_PLATFORM_XCB_KHR
- std::make_pair(VK_KHR_XCB_SURFACE_EXTENSION_NAME, InstanceInfo(&InstanceExtensions::vk_khr_xcb_surface, {{
- {&InstanceExtensions::vk_khr_surface, VK_KHR_SURFACE_EXTENSION_NAME}}})),
-#endif
-#ifdef VK_USE_PLATFORM_XLIB_KHR
- std::make_pair(VK_KHR_XLIB_SURFACE_EXTENSION_NAME, InstanceInfo(&InstanceExtensions::vk_khr_xlib_surface, {{
- {&InstanceExtensions::vk_khr_surface, VK_KHR_SURFACE_EXTENSION_NAME}}})),
-#endif
-#ifdef VK_USE_PLATFORM_IOS_MVK
- std::make_pair(VK_MVK_IOS_SURFACE_EXTENSION_NAME, InstanceInfo(&InstanceExtensions::vk_mvk_ios_surface, {{
- {&InstanceExtensions::vk_khr_surface, VK_KHR_SURFACE_EXTENSION_NAME}}})),
-#endif
-#ifdef VK_USE_PLATFORM_MACOS_MVK
- std::make_pair(VK_MVK_MACOS_SURFACE_EXTENSION_NAME, InstanceInfo(&InstanceExtensions::vk_mvk_macos_surface, {{
- {&InstanceExtensions::vk_khr_surface, VK_KHR_SURFACE_EXTENSION_NAME}}})),
-#endif
-#ifdef VK_USE_PLATFORM_VI_NN
- std::make_pair(VK_NN_VI_SURFACE_EXTENSION_NAME, InstanceInfo(&InstanceExtensions::vk_nn_vi_surface, {{
- {&InstanceExtensions::vk_khr_surface, VK_KHR_SURFACE_EXTENSION_NAME}}})),
-#endif
- std::make_pair(VK_NV_EXTERNAL_MEMORY_CAPABILITIES_EXTENSION_NAME, InstanceInfo(&InstanceExtensions::vk_nv_external_memory_capabilities, {})),
- };
-
- static const InstanceInfo empty_info {nullptr, InstanceReqVec()};
- InstanceInfoMap::const_iterator info = info_map.find(name);
- if ( info != info_map.cend()) {
- return info->second;
- }
- return empty_info;
- }
-
- uint32_t NormalizeApiVersion(uint32_t specified_version) {
- uint32_t api_version = (specified_version < VK_API_VERSION_1_1) ? VK_API_VERSION_1_0 : VK_API_VERSION_1_1;
- return api_version;
- }
-
- uint32_t InitFromInstanceCreateInfo(uint32_t requested_api_version, const VkInstanceCreateInfo *pCreateInfo) {
-
- static const std::vector<const char *> V_1_1_promoted_instance_apis = {
- VK_KHR_DEVICE_GROUP_CREATION_EXTENSION_NAME,
- VK_KHR_EXTERNAL_FENCE_CAPABILITIES_EXTENSION_NAME,
- VK_KHR_EXTERNAL_MEMORY_CAPABILITIES_EXTENSION_NAME,
- VK_KHR_EXTERNAL_SEMAPHORE_CAPABILITIES_EXTENSION_NAME,
- VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME,
- "VK_VERSION_1_1",
- };
-
- // Initialize struct data, robust to invalid pCreateInfo
- if (pCreateInfo->ppEnabledExtensionNames) {
- for (uint32_t i = 0; i < pCreateInfo->enabledExtensionCount; i++) {
- if (!pCreateInfo->ppEnabledExtensionNames[i]) continue;
- auto info = get_info(pCreateInfo->ppEnabledExtensionNames[i]);
- if(info.state) this->*(info.state) = true;
- }
- }
- uint32_t api_version = NormalizeApiVersion(requested_api_version);
- if (api_version >= VK_API_VERSION_1_1) {
- for (auto promoted_ext : V_1_1_promoted_instance_apis) {
- auto info = get_info(promoted_ext);
- assert(info.state);
- if (info.state) this->*(info.state) = true;
- }
- }
- return api_version;
- }
-};
-
-static const std::set<std::string> kInstanceExtensionNames = {
-#ifdef VK_USE_PLATFORM_XLIB_XRANDR_EXT
- VK_EXT_ACQUIRE_XLIB_DISPLAY_EXTENSION_NAME,
-#endif
- VK_EXT_DEBUG_REPORT_EXTENSION_NAME,
- VK_EXT_DEBUG_UTILS_EXTENSION_NAME,
- VK_EXT_DIRECT_MODE_DISPLAY_EXTENSION_NAME,
- VK_EXT_DISPLAY_SURFACE_COUNTER_EXTENSION_NAME,
- VK_EXT_HEADLESS_SURFACE_EXTENSION_NAME,
-#ifdef VK_USE_PLATFORM_METAL_EXT
- VK_EXT_METAL_SURFACE_EXTENSION_NAME,
-#endif
- VK_EXT_SWAPCHAIN_COLOR_SPACE_EXTENSION_NAME,
- VK_EXT_VALIDATION_FEATURES_EXTENSION_NAME,
- VK_EXT_VALIDATION_FLAGS_EXTENSION_NAME,
-#ifdef VK_USE_PLATFORM_FUCHSIA
- VK_FUCHSIA_IMAGEPIPE_SURFACE_EXTENSION_NAME,
-#endif
-#ifdef VK_USE_PLATFORM_GGP
- VK_GGP_STREAM_DESCRIPTOR_SURFACE_EXTENSION_NAME,
-#endif
-#ifdef VK_USE_PLATFORM_ANDROID_KHR
- VK_KHR_ANDROID_SURFACE_EXTENSION_NAME,
-#endif
- VK_KHR_DEVICE_GROUP_CREATION_EXTENSION_NAME,
- VK_KHR_DISPLAY_EXTENSION_NAME,
- VK_KHR_EXTERNAL_FENCE_CAPABILITIES_EXTENSION_NAME,
- VK_KHR_EXTERNAL_MEMORY_CAPABILITIES_EXTENSION_NAME,
- VK_KHR_EXTERNAL_SEMAPHORE_CAPABILITIES_EXTENSION_NAME,
- VK_KHR_GET_DISPLAY_PROPERTIES_2_EXTENSION_NAME,
- VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME,
- VK_KHR_GET_SURFACE_CAPABILITIES_2_EXTENSION_NAME,
- VK_KHR_SURFACE_EXTENSION_NAME,
- VK_KHR_SURFACE_PROTECTED_CAPABILITIES_EXTENSION_NAME,
-#ifdef VK_USE_PLATFORM_WAYLAND_KHR
- VK_KHR_WAYLAND_SURFACE_EXTENSION_NAME,
-#endif
-#ifdef VK_USE_PLATFORM_WIN32_KHR
- VK_KHR_WIN32_SURFACE_EXTENSION_NAME,
-#endif
-#ifdef VK_USE_PLATFORM_XCB_KHR
- VK_KHR_XCB_SURFACE_EXTENSION_NAME,
-#endif
-#ifdef VK_USE_PLATFORM_XLIB_KHR
- VK_KHR_XLIB_SURFACE_EXTENSION_NAME,
-#endif
-#ifdef VK_USE_PLATFORM_IOS_MVK
- VK_MVK_IOS_SURFACE_EXTENSION_NAME,
-#endif
-#ifdef VK_USE_PLATFORM_MACOS_MVK
- VK_MVK_MACOS_SURFACE_EXTENSION_NAME,
-#endif
-#ifdef VK_USE_PLATFORM_VI_NN
- VK_NN_VI_SURFACE_EXTENSION_NAME,
-#endif
- VK_NV_EXTERNAL_MEMORY_CAPABILITIES_EXTENSION_NAME,
-};
-
-struct DeviceExtensions : public InstanceExtensions {
- bool vk_feature_version_1_1{false};
- bool vk_amd_buffer_marker{false};
- bool vk_amd_device_coherent_memory{false};
- bool vk_amd_display_native_hdr{false};
- bool vk_amd_draw_indirect_count{false};
- bool vk_amd_gcn_shader{false};
- bool vk_amd_gpu_shader_half_float{false};
- bool vk_amd_gpu_shader_int16{false};
- bool vk_amd_memory_overallocation_behavior{false};
- bool vk_amd_mixed_attachment_samples{false};
- bool vk_amd_negative_viewport_height{false};
- bool vk_amd_pipeline_compiler_control{false};
- bool vk_amd_rasterization_order{false};
- bool vk_amd_shader_ballot{false};
- bool vk_amd_shader_core_properties{false};
- bool vk_amd_shader_core_properties_2{false};
- bool vk_amd_shader_explicit_vertex_parameter{false};
- bool vk_amd_shader_fragment_mask{false};
- bool vk_amd_shader_image_load_store_lod{false};
- bool vk_amd_shader_info{false};
- bool vk_amd_shader_trinary_minmax{false};
- bool vk_amd_texture_gather_bias_lod{false};
- bool vk_android_external_memory_android_hardware_buffer{false};
- bool vk_ext_astc_decode_mode{false};
- bool vk_ext_blend_operation_advanced{false};
- bool vk_ext_buffer_device_address{false};
- bool vk_ext_calibrated_timestamps{false};
- bool vk_ext_conditional_rendering{false};
- bool vk_ext_conservative_rasterization{false};
- bool vk_ext_debug_marker{false};
- bool vk_ext_depth_clip_enable{false};
- bool vk_ext_depth_range_unrestricted{false};
- bool vk_ext_descriptor_indexing{false};
- bool vk_ext_discard_rectangles{false};
- bool vk_ext_display_control{false};
- bool vk_ext_external_memory_dma_buf{false};
- bool vk_ext_external_memory_host{false};
- bool vk_ext_filter_cubic{false};
- bool vk_ext_fragment_density_map{false};
- bool vk_ext_fragment_shader_interlock{false};
- bool vk_ext_full_screen_exclusive{false};
- bool vk_ext_global_priority{false};
- bool vk_ext_hdr_metadata{false};
- bool vk_ext_host_query_reset{false};
- bool vk_ext_image_drm_format_modifier{false};
- bool vk_ext_index_type_uint8{false};
- bool vk_ext_inline_uniform_block{false};
- bool vk_ext_line_rasterization{false};
- bool vk_ext_memory_budget{false};
- bool vk_ext_memory_priority{false};
- bool vk_ext_pci_bus_info{false};
- bool vk_ext_pipeline_creation_feedback{false};
- bool vk_ext_post_depth_coverage{false};
- bool vk_ext_queue_family_foreign{false};
- bool vk_ext_sample_locations{false};
- bool vk_ext_sampler_filter_minmax{false};
- bool vk_ext_scalar_block_layout{false};
- bool vk_ext_separate_stencil_usage{false};
- bool vk_ext_shader_demote_to_helper_invocation{false};
- bool vk_ext_shader_stencil_export{false};
- bool vk_ext_shader_subgroup_ballot{false};
- bool vk_ext_shader_subgroup_vote{false};
- bool vk_ext_shader_viewport_index_layer{false};
- bool vk_ext_subgroup_size_control{false};
- bool vk_ext_texel_buffer_alignment{false};
- bool vk_ext_texture_compression_astc_hdr{false};
- bool vk_ext_transform_feedback{false};
- bool vk_ext_validation_cache{false};
- bool vk_ext_vertex_attribute_divisor{false};
- bool vk_ext_ycbcr_image_arrays{false};
- bool vk_ggp_frame_token{false};
- bool vk_google_decorate_string{false};
- bool vk_google_display_timing{false};
- bool vk_google_hlsl_functionality1{false};
- bool vk_img_filter_cubic{false};
- bool vk_img_format_pvrtc{false};
- bool vk_intel_performance_query{false};
- bool vk_intel_shader_integer_functions_2{false};
- bool vk_khr_16bit_storage{false};
- bool vk_khr_8bit_storage{false};
- bool vk_khr_bind_memory_2{false};
- bool vk_khr_create_renderpass_2{false};
- bool vk_khr_dedicated_allocation{false};
- bool vk_khr_depth_stencil_resolve{false};
- bool vk_khr_descriptor_update_template{false};
- bool vk_khr_device_group{false};
- bool vk_khr_display_swapchain{false};
- bool vk_khr_draw_indirect_count{false};
- bool vk_khr_driver_properties{false};
- bool vk_khr_external_fence{false};
- bool vk_khr_external_fence_fd{false};
- bool vk_khr_external_fence_win32{false};
- bool vk_khr_external_memory{false};
- bool vk_khr_external_memory_fd{false};
- bool vk_khr_external_memory_win32{false};
- bool vk_khr_external_semaphore{false};
- bool vk_khr_external_semaphore_fd{false};
- bool vk_khr_external_semaphore_win32{false};
- bool vk_khr_get_memory_requirements_2{false};
- bool vk_khr_image_format_list{false};
- bool vk_khr_imageless_framebuffer{false};
- bool vk_khr_incremental_present{false};
- bool vk_khr_maintenance1{false};
- bool vk_khr_maintenance2{false};
- bool vk_khr_maintenance3{false};
- bool vk_khr_multiview{false};
- bool vk_khr_pipeline_executable_properties{false};
- bool vk_khr_push_descriptor{false};
- bool vk_khr_relaxed_block_layout{false};
- bool vk_khr_sampler_mirror_clamp_to_edge{false};
- bool vk_khr_sampler_ycbcr_conversion{false};
- bool vk_khr_shader_atomic_int64{false};
- bool vk_khr_shader_draw_parameters{false};
- bool vk_khr_shader_float16_int8{false};
- bool vk_khr_shader_float_controls{false};
- bool vk_khr_shared_presentable_image{false};
- bool vk_khr_storage_buffer_storage_class{false};
- bool vk_khr_swapchain{false};
- bool vk_khr_swapchain_mutable_format{false};
- bool vk_khr_uniform_buffer_standard_layout{false};
- bool vk_khr_variable_pointers{false};
- bool vk_khr_vulkan_memory_model{false};
- bool vk_khr_win32_keyed_mutex{false};
- bool vk_nvx_device_generated_commands{false};
- bool vk_nvx_image_view_handle{false};
- bool vk_nvx_multiview_per_view_attributes{false};
- bool vk_nv_clip_space_w_scaling{false};
- bool vk_nv_compute_shader_derivatives{false};
- bool vk_nv_cooperative_matrix{false};
- bool vk_nv_corner_sampled_image{false};
- bool vk_nv_coverage_reduction_mode{false};
- bool vk_nv_dedicated_allocation{false};
- bool vk_nv_dedicated_allocation_image_aliasing{false};
- bool vk_nv_device_diagnostic_checkpoints{false};
- bool vk_nv_external_memory{false};
- bool vk_nv_external_memory_win32{false};
- bool vk_nv_fill_rectangle{false};
- bool vk_nv_fragment_coverage_to_color{false};
- bool vk_nv_fragment_shader_barycentric{false};
- bool vk_nv_framebuffer_mixed_samples{false};
- bool vk_nv_geometry_shader_passthrough{false};
- bool vk_nv_glsl_shader{false};
- bool vk_nv_mesh_shader{false};
- bool vk_nv_ray_tracing{false};
- bool vk_nv_representative_fragment_test{false};
- bool vk_nv_sample_mask_override_coverage{false};
- bool vk_nv_scissor_exclusive{false};
- bool vk_nv_shader_image_footprint{false};
- bool vk_nv_shader_sm_builtins{false};
- bool vk_nv_shader_subgroup_partitioned{false};
- bool vk_nv_shading_rate_image{false};
- bool vk_nv_viewport_array2{false};
- bool vk_nv_viewport_swizzle{false};
- bool vk_nv_win32_keyed_mutex{false};
-
- struct DeviceReq {
- const bool DeviceExtensions::* enabled;
- const char *name;
- };
- typedef std::vector<DeviceReq> DeviceReqVec;
- struct DeviceInfo {
- DeviceInfo(bool DeviceExtensions::* state_, const DeviceReqVec requires_): state(state_), requires(requires_) {}
- bool DeviceExtensions::* state;
- DeviceReqVec requires;
- };
-
- typedef std::unordered_map<std::string,DeviceInfo> DeviceInfoMap;
- static const DeviceInfo &get_info(const char *name) {
- static const DeviceInfoMap info_map = {
- std::make_pair("VK_VERSION_1_1", DeviceInfo(&DeviceExtensions::vk_feature_version_1_1, {})),
- std::make_pair(VK_AMD_BUFFER_MARKER_EXTENSION_NAME, DeviceInfo(&DeviceExtensions::vk_amd_buffer_marker, {})),
- std::make_pair(VK_AMD_DEVICE_COHERENT_MEMORY_EXTENSION_NAME, DeviceInfo(&DeviceExtensions::vk_amd_device_coherent_memory, {})),
- std::make_pair(VK_AMD_DISPLAY_NATIVE_HDR_EXTENSION_NAME, DeviceInfo(&DeviceExtensions::vk_amd_display_native_hdr, {{
- {&DeviceExtensions::vk_khr_get_physical_device_properties_2, VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME},
- {&DeviceExtensions::vk_khr_get_surface_capabilities_2, VK_KHR_GET_SURFACE_CAPABILITIES_2_EXTENSION_NAME},
- {&DeviceExtensions::vk_khr_swapchain, VK_KHR_SWAPCHAIN_EXTENSION_NAME}}})),
- std::make_pair(VK_AMD_DRAW_INDIRECT_COUNT_EXTENSION_NAME, DeviceInfo(&DeviceExtensions::vk_amd_draw_indirect_count, {})),
- std::make_pair(VK_AMD_GCN_SHADER_EXTENSION_NAME, DeviceInfo(&DeviceExtensions::vk_amd_gcn_shader, {})),
- std::make_pair(VK_AMD_GPU_SHADER_HALF_FLOAT_EXTENSION_NAME, DeviceInfo(&DeviceExtensions::vk_amd_gpu_shader_half_float, {})),
- std::make_pair(VK_AMD_GPU_SHADER_INT16_EXTENSION_NAME, DeviceInfo(&DeviceExtensions::vk_amd_gpu_shader_int16, {})),
- std::make_pair(VK_AMD_MEMORY_OVERALLOCATION_BEHAVIOR_EXTENSION_NAME, DeviceInfo(&DeviceExtensions::vk_amd_memory_overallocation_behavior, {})),
- std::make_pair(VK_AMD_MIXED_ATTACHMENT_SAMPLES_EXTENSION_NAME, DeviceInfo(&DeviceExtensions::vk_amd_mixed_attachment_samples, {})),
- std::make_pair(VK_AMD_NEGATIVE_VIEWPORT_HEIGHT_EXTENSION_NAME, DeviceInfo(&DeviceExtensions::vk_amd_negative_viewport_height, {})),
- std::make_pair(VK_AMD_PIPELINE_COMPILER_CONTROL_EXTENSION_NAME, DeviceInfo(&DeviceExtensions::vk_amd_pipeline_compiler_control, {})),
- std::make_pair(VK_AMD_RASTERIZATION_ORDER_EXTENSION_NAME, DeviceInfo(&DeviceExtensions::vk_amd_rasterization_order, {})),
- std::make_pair(VK_AMD_SHADER_BALLOT_EXTENSION_NAME, DeviceInfo(&DeviceExtensions::vk_amd_shader_ballot, {})),
- std::make_pair(VK_AMD_SHADER_CORE_PROPERTIES_EXTENSION_NAME, DeviceInfo(&DeviceExtensions::vk_amd_shader_core_properties, {{
- {&DeviceExtensions::vk_khr_get_physical_device_properties_2, VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME}}})),
- std::make_pair(VK_AMD_SHADER_CORE_PROPERTIES_2_EXTENSION_NAME, DeviceInfo(&DeviceExtensions::vk_amd_shader_core_properties_2, {{
- {&DeviceExtensions::vk_amd_shader_core_properties, VK_AMD_SHADER_CORE_PROPERTIES_EXTENSION_NAME}}})),
- std::make_pair(VK_AMD_SHADER_EXPLICIT_VERTEX_PARAMETER_EXTENSION_NAME, DeviceInfo(&DeviceExtensions::vk_amd_shader_explicit_vertex_parameter, {})),
- std::make_pair(VK_AMD_SHADER_FRAGMENT_MASK_EXTENSION_NAME, DeviceInfo(&DeviceExtensions::vk_amd_shader_fragment_mask, {})),
- std::make_pair(VK_AMD_SHADER_IMAGE_LOAD_STORE_LOD_EXTENSION_NAME, DeviceInfo(&DeviceExtensions::vk_amd_shader_image_load_store_lod, {})),
- std::make_pair(VK_AMD_SHADER_INFO_EXTENSION_NAME, DeviceInfo(&DeviceExtensions::vk_amd_shader_info, {})),
- std::make_pair(VK_AMD_SHADER_TRINARY_MINMAX_EXTENSION_NAME, DeviceInfo(&DeviceExtensions::vk_amd_shader_trinary_minmax, {})),
- std::make_pair(VK_AMD_TEXTURE_GATHER_BIAS_LOD_EXTENSION_NAME, DeviceInfo(&DeviceExtensions::vk_amd_texture_gather_bias_lod, {{
- {&DeviceExtensions::vk_khr_get_physical_device_properties_2, VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME}}})),
-#ifdef VK_USE_PLATFORM_ANDROID_KHR
- std::make_pair(VK_ANDROID_EXTERNAL_MEMORY_ANDROID_HARDWARE_BUFFER_EXTENSION_NAME, DeviceInfo(&DeviceExtensions::vk_android_external_memory_android_hardware_buffer, {{
- {&DeviceExtensions::vk_khr_sampler_ycbcr_conversion, VK_KHR_SAMPLER_YCBCR_CONVERSION_EXTENSION_NAME},
- {&DeviceExtensions::vk_khr_external_memory, VK_KHR_EXTERNAL_MEMORY_EXTENSION_NAME},
- {&DeviceExtensions::vk_ext_queue_family_foreign, VK_EXT_QUEUE_FAMILY_FOREIGN_EXTENSION_NAME}}})),
-#endif
- std::make_pair(VK_EXT_ASTC_DECODE_MODE_EXTENSION_NAME, DeviceInfo(&DeviceExtensions::vk_ext_astc_decode_mode, {{
- {&DeviceExtensions::vk_khr_get_physical_device_properties_2, VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME}}})),
- std::make_pair(VK_EXT_BLEND_OPERATION_ADVANCED_EXTENSION_NAME, DeviceInfo(&DeviceExtensions::vk_ext_blend_operation_advanced, {})),
- std::make_pair(VK_EXT_BUFFER_DEVICE_ADDRESS_EXTENSION_NAME, DeviceInfo(&DeviceExtensions::vk_ext_buffer_device_address, {{
- {&DeviceExtensions::vk_khr_get_physical_device_properties_2, VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME}}})),
- std::make_pair(VK_EXT_CALIBRATED_TIMESTAMPS_EXTENSION_NAME, DeviceInfo(&DeviceExtensions::vk_ext_calibrated_timestamps, {})),
- std::make_pair(VK_EXT_CONDITIONAL_RENDERING_EXTENSION_NAME, DeviceInfo(&DeviceExtensions::vk_ext_conditional_rendering, {})),
- std::make_pair(VK_EXT_CONSERVATIVE_RASTERIZATION_EXTENSION_NAME, DeviceInfo(&DeviceExtensions::vk_ext_conservative_rasterization, {{
- {&DeviceExtensions::vk_khr_get_physical_device_properties_2, VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME}}})),
- std::make_pair(VK_EXT_DEBUG_MARKER_EXTENSION_NAME, DeviceInfo(&DeviceExtensions::vk_ext_debug_marker, {{
- {&DeviceExtensions::vk_ext_debug_report, VK_EXT_DEBUG_REPORT_EXTENSION_NAME}}})),
- std::make_pair(VK_EXT_DEPTH_CLIP_ENABLE_EXTENSION_NAME, DeviceInfo(&DeviceExtensions::vk_ext_depth_clip_enable, {})),
- std::make_pair(VK_EXT_DEPTH_RANGE_UNRESTRICTED_EXTENSION_NAME, DeviceInfo(&DeviceExtensions::vk_ext_depth_range_unrestricted, {})),
- std::make_pair(VK_EXT_DESCRIPTOR_INDEXING_EXTENSION_NAME, DeviceInfo(&DeviceExtensions::vk_ext_descriptor_indexing, {{
- {&DeviceExtensions::vk_khr_get_physical_device_properties_2, VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME},
- {&DeviceExtensions::vk_khr_maintenance3, VK_KHR_MAINTENANCE3_EXTENSION_NAME}}})),
- std::make_pair(VK_EXT_DISCARD_RECTANGLES_EXTENSION_NAME, DeviceInfo(&DeviceExtensions::vk_ext_discard_rectangles, {{
- {&DeviceExtensions::vk_khr_get_physical_device_properties_2, VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME}}})),
- std::make_pair(VK_EXT_DISPLAY_CONTROL_EXTENSION_NAME, DeviceInfo(&DeviceExtensions::vk_ext_display_control, {{
- {&DeviceExtensions::vk_ext_display_surface_counter, VK_EXT_DISPLAY_SURFACE_COUNTER_EXTENSION_NAME},
- {&DeviceExtensions::vk_khr_swapchain, VK_KHR_SWAPCHAIN_EXTENSION_NAME}}})),
- std::make_pair(VK_EXT_EXTERNAL_MEMORY_DMA_BUF_EXTENSION_NAME, DeviceInfo(&DeviceExtensions::vk_ext_external_memory_dma_buf, {{
- {&DeviceExtensions::vk_khr_external_memory_fd, VK_KHR_EXTERNAL_MEMORY_FD_EXTENSION_NAME}}})),
- std::make_pair(VK_EXT_EXTERNAL_MEMORY_HOST_EXTENSION_NAME, DeviceInfo(&DeviceExtensions::vk_ext_external_memory_host, {{
- {&DeviceExtensions::vk_khr_external_memory, VK_KHR_EXTERNAL_MEMORY_EXTENSION_NAME}}})),
- std::make_pair(VK_EXT_FILTER_CUBIC_EXTENSION_NAME, DeviceInfo(&DeviceExtensions::vk_ext_filter_cubic, {{
- {&DeviceExtensions::vk_img_filter_cubic, VK_IMG_FILTER_CUBIC_EXTENSION_NAME}}})),
- std::make_pair(VK_EXT_FRAGMENT_DENSITY_MAP_EXTENSION_NAME, DeviceInfo(&DeviceExtensions::vk_ext_fragment_density_map, {{
- {&DeviceExtensions::vk_khr_get_physical_device_properties_2, VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME}}})),
- std::make_pair(VK_EXT_FRAGMENT_SHADER_INTERLOCK_EXTENSION_NAME, DeviceInfo(&DeviceExtensions::vk_ext_fragment_shader_interlock, {{
- {&DeviceExtensions::vk_khr_get_physical_device_properties_2, VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME}}})),
-#ifdef VK_USE_PLATFORM_WIN32_KHR
- std::make_pair(VK_EXT_FULL_SCREEN_EXCLUSIVE_EXTENSION_NAME, DeviceInfo(&DeviceExtensions::vk_ext_full_screen_exclusive, {{
- {&DeviceExtensions::vk_khr_get_physical_device_properties_2, VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME},
- {&DeviceExtensions::vk_khr_surface, VK_KHR_SURFACE_EXTENSION_NAME},
- {&DeviceExtensions::vk_khr_get_surface_capabilities_2, VK_KHR_GET_SURFACE_CAPABILITIES_2_EXTENSION_NAME},
- {&DeviceExtensions::vk_khr_swapchain, VK_KHR_SWAPCHAIN_EXTENSION_NAME}}})),
-#endif
- std::make_pair(VK_EXT_GLOBAL_PRIORITY_EXTENSION_NAME, DeviceInfo(&DeviceExtensions::vk_ext_global_priority, {})),
- std::make_pair(VK_EXT_HDR_METADATA_EXTENSION_NAME, DeviceInfo(&DeviceExtensions::vk_ext_hdr_metadata, {{
- {&DeviceExtensions::vk_khr_swapchain, VK_KHR_SWAPCHAIN_EXTENSION_NAME}}})),
- std::make_pair(VK_EXT_HOST_QUERY_RESET_EXTENSION_NAME, DeviceInfo(&DeviceExtensions::vk_ext_host_query_reset, {{
- {&DeviceExtensions::vk_khr_get_physical_device_properties_2, VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME}}})),
- std::make_pair(VK_EXT_IMAGE_DRM_FORMAT_MODIFIER_EXTENSION_NAME, DeviceInfo(&DeviceExtensions::vk_ext_image_drm_format_modifier, {{
- {&DeviceExtensions::vk_khr_bind_memory_2, VK_KHR_BIND_MEMORY_2_EXTENSION_NAME},
- {&DeviceExtensions::vk_khr_get_physical_device_properties_2, VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME},
- {&DeviceExtensions::vk_khr_image_format_list, VK_KHR_IMAGE_FORMAT_LIST_EXTENSION_NAME},
- {&DeviceExtensions::vk_khr_sampler_ycbcr_conversion, VK_KHR_SAMPLER_YCBCR_CONVERSION_EXTENSION_NAME}}})),
- std::make_pair(VK_EXT_INDEX_TYPE_UINT8_EXTENSION_NAME, DeviceInfo(&DeviceExtensions::vk_ext_index_type_uint8, {})),
- std::make_pair(VK_EXT_INLINE_UNIFORM_BLOCK_EXTENSION_NAME, DeviceInfo(&DeviceExtensions::vk_ext_inline_uniform_block, {{
- {&DeviceExtensions::vk_khr_get_physical_device_properties_2, VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME},
- {&DeviceExtensions::vk_khr_maintenance1, VK_KHR_MAINTENANCE1_EXTENSION_NAME}}})),
- std::make_pair(VK_EXT_LINE_RASTERIZATION_EXTENSION_NAME, DeviceInfo(&DeviceExtensions::vk_ext_line_rasterization, {{
- {&DeviceExtensions::vk_khr_get_physical_device_properties_2, VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME}}})),
- std::make_pair(VK_EXT_MEMORY_BUDGET_EXTENSION_NAME, DeviceInfo(&DeviceExtensions::vk_ext_memory_budget, {{
- {&DeviceExtensions::vk_khr_get_physical_device_properties_2, VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME}}})),
- std::make_pair(VK_EXT_MEMORY_PRIORITY_EXTENSION_NAME, DeviceInfo(&DeviceExtensions::vk_ext_memory_priority, {{
- {&DeviceExtensions::vk_khr_get_physical_device_properties_2, VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME}}})),
- std::make_pair(VK_EXT_PCI_BUS_INFO_EXTENSION_NAME, DeviceInfo(&DeviceExtensions::vk_ext_pci_bus_info, {{
- {&DeviceExtensions::vk_khr_get_physical_device_properties_2, VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME}}})),
- std::make_pair(VK_EXT_PIPELINE_CREATION_FEEDBACK_EXTENSION_NAME, DeviceInfo(&DeviceExtensions::vk_ext_pipeline_creation_feedback, {})),
- std::make_pair(VK_EXT_POST_DEPTH_COVERAGE_EXTENSION_NAME, DeviceInfo(&DeviceExtensions::vk_ext_post_depth_coverage, {})),
- std::make_pair(VK_EXT_QUEUE_FAMILY_FOREIGN_EXTENSION_NAME, DeviceInfo(&DeviceExtensions::vk_ext_queue_family_foreign, {{
- {&DeviceExtensions::vk_khr_external_memory, VK_KHR_EXTERNAL_MEMORY_EXTENSION_NAME}}})),
- std::make_pair(VK_EXT_SAMPLE_LOCATIONS_EXTENSION_NAME, DeviceInfo(&DeviceExtensions::vk_ext_sample_locations, {{
- {&DeviceExtensions::vk_khr_get_physical_device_properties_2, VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME}}})),
- std::make_pair(VK_EXT_SAMPLER_FILTER_MINMAX_EXTENSION_NAME, DeviceInfo(&DeviceExtensions::vk_ext_sampler_filter_minmax, {{
- {&DeviceExtensions::vk_khr_get_physical_device_properties_2, VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME}}})),
- std::make_pair(VK_EXT_SCALAR_BLOCK_LAYOUT_EXTENSION_NAME, DeviceInfo(&DeviceExtensions::vk_ext_scalar_block_layout, {{
- {&DeviceExtensions::vk_khr_get_physical_device_properties_2, VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME}}})),
- std::make_pair(VK_EXT_SEPARATE_STENCIL_USAGE_EXTENSION_NAME, DeviceInfo(&DeviceExtensions::vk_ext_separate_stencil_usage, {})),
- std::make_pair(VK_EXT_SHADER_DEMOTE_TO_HELPER_INVOCATION_EXTENSION_NAME, DeviceInfo(&DeviceExtensions::vk_ext_shader_demote_to_helper_invocation, {{
- {&DeviceExtensions::vk_khr_get_physical_device_properties_2, VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME}}})),
- std::make_pair(VK_EXT_SHADER_STENCIL_EXPORT_EXTENSION_NAME, DeviceInfo(&DeviceExtensions::vk_ext_shader_stencil_export, {})),
- std::make_pair(VK_EXT_SHADER_SUBGROUP_BALLOT_EXTENSION_NAME, DeviceInfo(&DeviceExtensions::vk_ext_shader_subgroup_ballot, {})),
- std::make_pair(VK_EXT_SHADER_SUBGROUP_VOTE_EXTENSION_NAME, DeviceInfo(&DeviceExtensions::vk_ext_shader_subgroup_vote, {})),
- std::make_pair(VK_EXT_SHADER_VIEWPORT_INDEX_LAYER_EXTENSION_NAME, DeviceInfo(&DeviceExtensions::vk_ext_shader_viewport_index_layer, {})),
- std::make_pair(VK_EXT_SUBGROUP_SIZE_CONTROL_EXTENSION_NAME, DeviceInfo(&DeviceExtensions::vk_ext_subgroup_size_control, {})),
- std::make_pair(VK_EXT_TEXEL_BUFFER_ALIGNMENT_EXTENSION_NAME, DeviceInfo(&DeviceExtensions::vk_ext_texel_buffer_alignment, {{
- {&DeviceExtensions::vk_khr_get_physical_device_properties_2, VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME}}})),
- std::make_pair(VK_EXT_TEXTURE_COMPRESSION_ASTC_HDR_EXTENSION_NAME, DeviceInfo(&DeviceExtensions::vk_ext_texture_compression_astc_hdr, {{
- {&DeviceExtensions::vk_khr_get_physical_device_properties_2, VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME}}})),
- std::make_pair(VK_EXT_TRANSFORM_FEEDBACK_EXTENSION_NAME, DeviceInfo(&DeviceExtensions::vk_ext_transform_feedback, {{
- {&DeviceExtensions::vk_khr_get_physical_device_properties_2, VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME}}})),
- std::make_pair(VK_EXT_VALIDATION_CACHE_EXTENSION_NAME, DeviceInfo(&DeviceExtensions::vk_ext_validation_cache, {})),
- std::make_pair(VK_EXT_VERTEX_ATTRIBUTE_DIVISOR_EXTENSION_NAME, DeviceInfo(&DeviceExtensions::vk_ext_vertex_attribute_divisor, {{
- {&DeviceExtensions::vk_khr_get_physical_device_properties_2, VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME}}})),
- std::make_pair(VK_EXT_YCBCR_IMAGE_ARRAYS_EXTENSION_NAME, DeviceInfo(&DeviceExtensions::vk_ext_ycbcr_image_arrays, {{
- {&DeviceExtensions::vk_khr_sampler_ycbcr_conversion, VK_KHR_SAMPLER_YCBCR_CONVERSION_EXTENSION_NAME}}})),
-#ifdef VK_USE_PLATFORM_GGP
- std::make_pair(VK_GGP_FRAME_TOKEN_EXTENSION_NAME, DeviceInfo(&DeviceExtensions::vk_ggp_frame_token, {{
- {&DeviceExtensions::vk_khr_swapchain, VK_KHR_SWAPCHAIN_EXTENSION_NAME},
- {&DeviceExtensions::vk_ggp_stream_descriptor_surface, VK_GGP_STREAM_DESCRIPTOR_SURFACE_EXTENSION_NAME}}})),
-#endif
- std::make_pair(VK_GOOGLE_DECORATE_STRING_EXTENSION_NAME, DeviceInfo(&DeviceExtensions::vk_google_decorate_string, {})),
- std::make_pair(VK_GOOGLE_DISPLAY_TIMING_EXTENSION_NAME, DeviceInfo(&DeviceExtensions::vk_google_display_timing, {{
- {&DeviceExtensions::vk_khr_swapchain, VK_KHR_SWAPCHAIN_EXTENSION_NAME}}})),
- std::make_pair(VK_GOOGLE_HLSL_FUNCTIONALITY1_EXTENSION_NAME, DeviceInfo(&DeviceExtensions::vk_google_hlsl_functionality1, {})),
- std::make_pair(VK_IMG_FILTER_CUBIC_EXTENSION_NAME, DeviceInfo(&DeviceExtensions::vk_img_filter_cubic, {})),
- std::make_pair(VK_IMG_FORMAT_PVRTC_EXTENSION_NAME, DeviceInfo(&DeviceExtensions::vk_img_format_pvrtc, {})),
- std::make_pair(VK_INTEL_PERFORMANCE_QUERY_EXTENSION_NAME, DeviceInfo(&DeviceExtensions::vk_intel_performance_query, {})),
- std::make_pair(VK_INTEL_SHADER_INTEGER_FUNCTIONS_2_EXTENSION_NAME, DeviceInfo(&DeviceExtensions::vk_intel_shader_integer_functions_2, {{
- {&DeviceExtensions::vk_khr_get_physical_device_properties_2, VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME}}})),
- std::make_pair(VK_KHR_16BIT_STORAGE_EXTENSION_NAME, DeviceInfo(&DeviceExtensions::vk_khr_16bit_storage, {{
- {&DeviceExtensions::vk_khr_get_physical_device_properties_2, VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME},
- {&DeviceExtensions::vk_khr_storage_buffer_storage_class, VK_KHR_STORAGE_BUFFER_STORAGE_CLASS_EXTENSION_NAME}}})),
- std::make_pair(VK_KHR_8BIT_STORAGE_EXTENSION_NAME, DeviceInfo(&DeviceExtensions::vk_khr_8bit_storage, {{
- {&DeviceExtensions::vk_khr_get_physical_device_properties_2, VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME},
- {&DeviceExtensions::vk_khr_storage_buffer_storage_class, VK_KHR_STORAGE_BUFFER_STORAGE_CLASS_EXTENSION_NAME}}})),
- std::make_pair(VK_KHR_BIND_MEMORY_2_EXTENSION_NAME, DeviceInfo(&DeviceExtensions::vk_khr_bind_memory_2, {})),
- std::make_pair(VK_KHR_CREATE_RENDERPASS_2_EXTENSION_NAME, DeviceInfo(&DeviceExtensions::vk_khr_create_renderpass_2, {{
- {&DeviceExtensions::vk_khr_multiview, VK_KHR_MULTIVIEW_EXTENSION_NAME},
- {&DeviceExtensions::vk_khr_maintenance2, VK_KHR_MAINTENANCE2_EXTENSION_NAME}}})),
- std::make_pair(VK_KHR_DEDICATED_ALLOCATION_EXTENSION_NAME, DeviceInfo(&DeviceExtensions::vk_khr_dedicated_allocation, {{
- {&DeviceExtensions::vk_khr_get_memory_requirements_2, VK_KHR_GET_MEMORY_REQUIREMENTS_2_EXTENSION_NAME}}})),
- std::make_pair(VK_KHR_DEPTH_STENCIL_RESOLVE_EXTENSION_NAME, DeviceInfo(&DeviceExtensions::vk_khr_depth_stencil_resolve, {{
- {&DeviceExtensions::vk_khr_create_renderpass_2, VK_KHR_CREATE_RENDERPASS_2_EXTENSION_NAME}}})),
- std::make_pair(VK_KHR_DESCRIPTOR_UPDATE_TEMPLATE_EXTENSION_NAME, DeviceInfo(&DeviceExtensions::vk_khr_descriptor_update_template, {})),
- std::make_pair(VK_KHR_DEVICE_GROUP_EXTENSION_NAME, DeviceInfo(&DeviceExtensions::vk_khr_device_group, {{
- {&DeviceExtensions::vk_khr_device_group_creation, VK_KHR_DEVICE_GROUP_CREATION_EXTENSION_NAME}}})),
- std::make_pair(VK_KHR_DISPLAY_SWAPCHAIN_EXTENSION_NAME, DeviceInfo(&DeviceExtensions::vk_khr_display_swapchain, {{
- {&DeviceExtensions::vk_khr_swapchain, VK_KHR_SWAPCHAIN_EXTENSION_NAME},
- {&DeviceExtensions::vk_khr_display, VK_KHR_DISPLAY_EXTENSION_NAME}}})),
- std::make_pair(VK_KHR_DRAW_INDIRECT_COUNT_EXTENSION_NAME, DeviceInfo(&DeviceExtensions::vk_khr_draw_indirect_count, {})),
- std::make_pair(VK_KHR_DRIVER_PROPERTIES_EXTENSION_NAME, DeviceInfo(&DeviceExtensions::vk_khr_driver_properties, {{
- {&DeviceExtensions::vk_khr_get_physical_device_properties_2, VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME}}})),
- std::make_pair(VK_KHR_EXTERNAL_FENCE_EXTENSION_NAME, DeviceInfo(&DeviceExtensions::vk_khr_external_fence, {{
- {&DeviceExtensions::vk_khr_external_fence_capabilities, VK_KHR_EXTERNAL_FENCE_CAPABILITIES_EXTENSION_NAME}}})),
- std::make_pair(VK_KHR_EXTERNAL_FENCE_FD_EXTENSION_NAME, DeviceInfo(&DeviceExtensions::vk_khr_external_fence_fd, {{
- {&DeviceExtensions::vk_khr_external_fence, VK_KHR_EXTERNAL_FENCE_EXTENSION_NAME}}})),
-#ifdef VK_USE_PLATFORM_WIN32_KHR
- std::make_pair(VK_KHR_EXTERNAL_FENCE_WIN32_EXTENSION_NAME, DeviceInfo(&DeviceExtensions::vk_khr_external_fence_win32, {{
- {&DeviceExtensions::vk_khr_external_fence, VK_KHR_EXTERNAL_FENCE_EXTENSION_NAME}}})),
-#endif
- std::make_pair(VK_KHR_EXTERNAL_MEMORY_EXTENSION_NAME, DeviceInfo(&DeviceExtensions::vk_khr_external_memory, {{
- {&DeviceExtensions::vk_khr_external_memory_capabilities, VK_KHR_EXTERNAL_MEMORY_CAPABILITIES_EXTENSION_NAME}}})),
- std::make_pair(VK_KHR_EXTERNAL_MEMORY_FD_EXTENSION_NAME, DeviceInfo(&DeviceExtensions::vk_khr_external_memory_fd, {{
- {&DeviceExtensions::vk_khr_external_memory, VK_KHR_EXTERNAL_MEMORY_EXTENSION_NAME}}})),
-#ifdef VK_USE_PLATFORM_WIN32_KHR
- std::make_pair(VK_KHR_EXTERNAL_MEMORY_WIN32_EXTENSION_NAME, DeviceInfo(&DeviceExtensions::vk_khr_external_memory_win32, {{
- {&DeviceExtensions::vk_khr_external_memory, VK_KHR_EXTERNAL_MEMORY_EXTENSION_NAME}}})),
-#endif
- std::make_pair(VK_KHR_EXTERNAL_SEMAPHORE_EXTENSION_NAME, DeviceInfo(&DeviceExtensions::vk_khr_external_semaphore, {{
- {&DeviceExtensions::vk_khr_external_semaphore_capabilities, VK_KHR_EXTERNAL_SEMAPHORE_CAPABILITIES_EXTENSION_NAME}}})),
- std::make_pair(VK_KHR_EXTERNAL_SEMAPHORE_FD_EXTENSION_NAME, DeviceInfo(&DeviceExtensions::vk_khr_external_semaphore_fd, {{
- {&DeviceExtensions::vk_khr_external_semaphore, VK_KHR_EXTERNAL_SEMAPHORE_EXTENSION_NAME}}})),
-#ifdef VK_USE_PLATFORM_WIN32_KHR
- std::make_pair(VK_KHR_EXTERNAL_SEMAPHORE_WIN32_EXTENSION_NAME, DeviceInfo(&DeviceExtensions::vk_khr_external_semaphore_win32, {{
- {&DeviceExtensions::vk_khr_external_semaphore, VK_KHR_EXTERNAL_SEMAPHORE_EXTENSION_NAME}}})),
-#endif
- std::make_pair(VK_KHR_GET_MEMORY_REQUIREMENTS_2_EXTENSION_NAME, DeviceInfo(&DeviceExtensions::vk_khr_get_memory_requirements_2, {})),
- std::make_pair(VK_KHR_IMAGE_FORMAT_LIST_EXTENSION_NAME, DeviceInfo(&DeviceExtensions::vk_khr_image_format_list, {})),
- std::make_pair(VK_KHR_IMAGELESS_FRAMEBUFFER_EXTENSION_NAME, DeviceInfo(&DeviceExtensions::vk_khr_imageless_framebuffer, {{
- {&DeviceExtensions::vk_khr_maintenance2, VK_KHR_MAINTENANCE2_EXTENSION_NAME},
- {&DeviceExtensions::vk_khr_image_format_list, VK_KHR_IMAGE_FORMAT_LIST_EXTENSION_NAME}}})),
- std::make_pair(VK_KHR_INCREMENTAL_PRESENT_EXTENSION_NAME, DeviceInfo(&DeviceExtensions::vk_khr_incremental_present, {{
- {&DeviceExtensions::vk_khr_swapchain, VK_KHR_SWAPCHAIN_EXTENSION_NAME}}})),
- std::make_pair(VK_KHR_MAINTENANCE1_EXTENSION_NAME, DeviceInfo(&DeviceExtensions::vk_khr_maintenance1, {})),
- std::make_pair(VK_KHR_MAINTENANCE2_EXTENSION_NAME, DeviceInfo(&DeviceExtensions::vk_khr_maintenance2, {})),
- std::make_pair(VK_KHR_MAINTENANCE3_EXTENSION_NAME, DeviceInfo(&DeviceExtensions::vk_khr_maintenance3, {{
- {&DeviceExtensions::vk_khr_get_physical_device_properties_2, VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME}}})),
- std::make_pair(VK_KHR_MULTIVIEW_EXTENSION_NAME, DeviceInfo(&DeviceExtensions::vk_khr_multiview, {{
- {&DeviceExtensions::vk_khr_get_physical_device_properties_2, VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME}}})),
- std::make_pair(VK_KHR_PIPELINE_EXECUTABLE_PROPERTIES_EXTENSION_NAME, DeviceInfo(&DeviceExtensions::vk_khr_pipeline_executable_properties, {})),
- std::make_pair(VK_KHR_PUSH_DESCRIPTOR_EXTENSION_NAME, DeviceInfo(&DeviceExtensions::vk_khr_push_descriptor, {{
- {&DeviceExtensions::vk_khr_get_physical_device_properties_2, VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME}}})),
- std::make_pair(VK_KHR_RELAXED_BLOCK_LAYOUT_EXTENSION_NAME, DeviceInfo(&DeviceExtensions::vk_khr_relaxed_block_layout, {})),
- std::make_pair(VK_KHR_SAMPLER_MIRROR_CLAMP_TO_EDGE_EXTENSION_NAME, DeviceInfo(&DeviceExtensions::vk_khr_sampler_mirror_clamp_to_edge, {})),
- std::make_pair(VK_KHR_SAMPLER_YCBCR_CONVERSION_EXTENSION_NAME, DeviceInfo(&DeviceExtensions::vk_khr_sampler_ycbcr_conversion, {{
- {&DeviceExtensions::vk_khr_maintenance1, VK_KHR_MAINTENANCE1_EXTENSION_NAME},
- {&DeviceExtensions::vk_khr_bind_memory_2, VK_KHR_BIND_MEMORY_2_EXTENSION_NAME},
- {&DeviceExtensions::vk_khr_get_memory_requirements_2, VK_KHR_GET_MEMORY_REQUIREMENTS_2_EXTENSION_NAME},
- {&DeviceExtensions::vk_khr_get_physical_device_properties_2, VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME}}})),
- std::make_pair(VK_KHR_SHADER_ATOMIC_INT64_EXTENSION_NAME, DeviceInfo(&DeviceExtensions::vk_khr_shader_atomic_int64, {{
- {&DeviceExtensions::vk_khr_get_physical_device_properties_2, VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME}}})),
- std::make_pair(VK_KHR_SHADER_DRAW_PARAMETERS_EXTENSION_NAME, DeviceInfo(&DeviceExtensions::vk_khr_shader_draw_parameters, {})),
- std::make_pair(VK_KHR_SHADER_FLOAT16_INT8_EXTENSION_NAME, DeviceInfo(&DeviceExtensions::vk_khr_shader_float16_int8, {{
- {&DeviceExtensions::vk_khr_get_physical_device_properties_2, VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME}}})),
- std::make_pair(VK_KHR_SHADER_FLOAT_CONTROLS_EXTENSION_NAME, DeviceInfo(&DeviceExtensions::vk_khr_shader_float_controls, {{
- {&DeviceExtensions::vk_khr_get_physical_device_properties_2, VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME}}})),
- std::make_pair(VK_KHR_SHARED_PRESENTABLE_IMAGE_EXTENSION_NAME, DeviceInfo(&DeviceExtensions::vk_khr_shared_presentable_image, {{
- {&DeviceExtensions::vk_khr_swapchain, VK_KHR_SWAPCHAIN_EXTENSION_NAME},
- {&DeviceExtensions::vk_khr_get_physical_device_properties_2, VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME},
- {&DeviceExtensions::vk_khr_get_surface_capabilities_2, VK_KHR_GET_SURFACE_CAPABILITIES_2_EXTENSION_NAME}}})),
- std::make_pair(VK_KHR_STORAGE_BUFFER_STORAGE_CLASS_EXTENSION_NAME, DeviceInfo(&DeviceExtensions::vk_khr_storage_buffer_storage_class, {})),
- std::make_pair(VK_KHR_SWAPCHAIN_EXTENSION_NAME, DeviceInfo(&DeviceExtensions::vk_khr_swapchain, {{
- {&DeviceExtensions::vk_khr_surface, VK_KHR_SURFACE_EXTENSION_NAME}}})),
- std::make_pair(VK_KHR_SWAPCHAIN_MUTABLE_FORMAT_EXTENSION_NAME, DeviceInfo(&DeviceExtensions::vk_khr_swapchain_mutable_format, {{
- {&DeviceExtensions::vk_khr_swapchain, VK_KHR_SWAPCHAIN_EXTENSION_NAME},
- {&DeviceExtensions::vk_khr_maintenance2, VK_KHR_MAINTENANCE2_EXTENSION_NAME},
- {&DeviceExtensions::vk_khr_image_format_list, VK_KHR_IMAGE_FORMAT_LIST_EXTENSION_NAME}}})),
- std::make_pair(VK_KHR_UNIFORM_BUFFER_STANDARD_LAYOUT_EXTENSION_NAME, DeviceInfo(&DeviceExtensions::vk_khr_uniform_buffer_standard_layout, {{
- {&DeviceExtensions::vk_khr_get_physical_device_properties_2, VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME}}})),
- std::make_pair(VK_KHR_VARIABLE_POINTERS_EXTENSION_NAME, DeviceInfo(&DeviceExtensions::vk_khr_variable_pointers, {{
- {&DeviceExtensions::vk_khr_get_physical_device_properties_2, VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME},
- {&DeviceExtensions::vk_khr_storage_buffer_storage_class, VK_KHR_STORAGE_BUFFER_STORAGE_CLASS_EXTENSION_NAME}}})),
- std::make_pair(VK_KHR_VULKAN_MEMORY_MODEL_EXTENSION_NAME, DeviceInfo(&DeviceExtensions::vk_khr_vulkan_memory_model, {})),
-#ifdef VK_USE_PLATFORM_WIN32_KHR
- std::make_pair(VK_KHR_WIN32_KEYED_MUTEX_EXTENSION_NAME, DeviceInfo(&DeviceExtensions::vk_khr_win32_keyed_mutex, {{
- {&DeviceExtensions::vk_khr_external_memory_win32, VK_KHR_EXTERNAL_MEMORY_WIN32_EXTENSION_NAME}}})),
-#endif
- std::make_pair(VK_NVX_DEVICE_GENERATED_COMMANDS_EXTENSION_NAME, DeviceInfo(&DeviceExtensions::vk_nvx_device_generated_commands, {})),
- std::make_pair(VK_NVX_IMAGE_VIEW_HANDLE_EXTENSION_NAME, DeviceInfo(&DeviceExtensions::vk_nvx_image_view_handle, {})),
- std::make_pair(VK_NVX_MULTIVIEW_PER_VIEW_ATTRIBUTES_EXTENSION_NAME, DeviceInfo(&DeviceExtensions::vk_nvx_multiview_per_view_attributes, {{
- {&DeviceExtensions::vk_khr_multiview, VK_KHR_MULTIVIEW_EXTENSION_NAME}}})),
- std::make_pair(VK_NV_CLIP_SPACE_W_SCALING_EXTENSION_NAME, DeviceInfo(&DeviceExtensions::vk_nv_clip_space_w_scaling, {})),
- std::make_pair(VK_NV_COMPUTE_SHADER_DERIVATIVES_EXTENSION_NAME, DeviceInfo(&DeviceExtensions::vk_nv_compute_shader_derivatives, {{
- {&DeviceExtensions::vk_khr_get_physical_device_properties_2, VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME}}})),
- std::make_pair(VK_NV_COOPERATIVE_MATRIX_EXTENSION_NAME, DeviceInfo(&DeviceExtensions::vk_nv_cooperative_matrix, {{
- {&DeviceExtensions::vk_khr_get_physical_device_properties_2, VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME}}})),
- std::make_pair(VK_NV_CORNER_SAMPLED_IMAGE_EXTENSION_NAME, DeviceInfo(&DeviceExtensions::vk_nv_corner_sampled_image, {{
- {&DeviceExtensions::vk_khr_get_physical_device_properties_2, VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME}}})),
- std::make_pair(VK_NV_COVERAGE_REDUCTION_MODE_EXTENSION_NAME, DeviceInfo(&DeviceExtensions::vk_nv_coverage_reduction_mode, {{
- {&DeviceExtensions::vk_nv_framebuffer_mixed_samples, VK_NV_FRAMEBUFFER_MIXED_SAMPLES_EXTENSION_NAME}}})),
- std::make_pair(VK_NV_DEDICATED_ALLOCATION_EXTENSION_NAME, DeviceInfo(&DeviceExtensions::vk_nv_dedicated_allocation, {})),
- std::make_pair(VK_NV_DEDICATED_ALLOCATION_IMAGE_ALIASING_EXTENSION_NAME, DeviceInfo(&DeviceExtensions::vk_nv_dedicated_allocation_image_aliasing, {{
- {&DeviceExtensions::vk_khr_dedicated_allocation, VK_KHR_DEDICATED_ALLOCATION_EXTENSION_NAME}}})),
- std::make_pair(VK_NV_DEVICE_DIAGNOSTIC_CHECKPOINTS_EXTENSION_NAME, DeviceInfo(&DeviceExtensions::vk_nv_device_diagnostic_checkpoints, {{
- {&DeviceExtensions::vk_khr_get_physical_device_properties_2, VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME}}})),
- std::make_pair(VK_NV_EXTERNAL_MEMORY_EXTENSION_NAME, DeviceInfo(&DeviceExtensions::vk_nv_external_memory, {{
- {&DeviceExtensions::vk_nv_external_memory_capabilities, VK_NV_EXTERNAL_MEMORY_CAPABILITIES_EXTENSION_NAME}}})),
-#ifdef VK_USE_PLATFORM_WIN32_KHR
- std::make_pair(VK_NV_EXTERNAL_MEMORY_WIN32_EXTENSION_NAME, DeviceInfo(&DeviceExtensions::vk_nv_external_memory_win32, {{
- {&DeviceExtensions::vk_nv_external_memory, VK_NV_EXTERNAL_MEMORY_EXTENSION_NAME}}})),
-#endif
- std::make_pair(VK_NV_FILL_RECTANGLE_EXTENSION_NAME, DeviceInfo(&DeviceExtensions::vk_nv_fill_rectangle, {})),
- std::make_pair(VK_NV_FRAGMENT_COVERAGE_TO_COLOR_EXTENSION_NAME, DeviceInfo(&DeviceExtensions::vk_nv_fragment_coverage_to_color, {})),
- std::make_pair(VK_NV_FRAGMENT_SHADER_BARYCENTRIC_EXTENSION_NAME, DeviceInfo(&DeviceExtensions::vk_nv_fragment_shader_barycentric, {{
- {&DeviceExtensions::vk_khr_get_physical_device_properties_2, VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME}}})),
- std::make_pair(VK_NV_FRAMEBUFFER_MIXED_SAMPLES_EXTENSION_NAME, DeviceInfo(&DeviceExtensions::vk_nv_framebuffer_mixed_samples, {})),
- std::make_pair(VK_NV_GEOMETRY_SHADER_PASSTHROUGH_EXTENSION_NAME, DeviceInfo(&DeviceExtensions::vk_nv_geometry_shader_passthrough, {})),
- std::make_pair(VK_NV_GLSL_SHADER_EXTENSION_NAME, DeviceInfo(&DeviceExtensions::vk_nv_glsl_shader, {})),
- std::make_pair(VK_NV_MESH_SHADER_EXTENSION_NAME, DeviceInfo(&DeviceExtensions::vk_nv_mesh_shader, {{
- {&DeviceExtensions::vk_khr_get_physical_device_properties_2, VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME}}})),
- std::make_pair(VK_NV_RAY_TRACING_EXTENSION_NAME, DeviceInfo(&DeviceExtensions::vk_nv_ray_tracing, {{
- {&DeviceExtensions::vk_khr_get_physical_device_properties_2, VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME},
- {&DeviceExtensions::vk_khr_get_memory_requirements_2, VK_KHR_GET_MEMORY_REQUIREMENTS_2_EXTENSION_NAME}}})),
- std::make_pair(VK_NV_REPRESENTATIVE_FRAGMENT_TEST_EXTENSION_NAME, DeviceInfo(&DeviceExtensions::vk_nv_representative_fragment_test, {})),
- std::make_pair(VK_NV_SAMPLE_MASK_OVERRIDE_COVERAGE_EXTENSION_NAME, DeviceInfo(&DeviceExtensions::vk_nv_sample_mask_override_coverage, {})),
- std::make_pair(VK_NV_SCISSOR_EXCLUSIVE_EXTENSION_NAME, DeviceInfo(&DeviceExtensions::vk_nv_scissor_exclusive, {{
- {&DeviceExtensions::vk_khr_get_physical_device_properties_2, VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME}}})),
- std::make_pair(VK_NV_SHADER_IMAGE_FOOTPRINT_EXTENSION_NAME, DeviceInfo(&DeviceExtensions::vk_nv_shader_image_footprint, {{
- {&DeviceExtensions::vk_khr_get_physical_device_properties_2, VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME}}})),
- std::make_pair(VK_NV_SHADER_SM_BUILTINS_EXTENSION_NAME, DeviceInfo(&DeviceExtensions::vk_nv_shader_sm_builtins, {})),
- std::make_pair(VK_NV_SHADER_SUBGROUP_PARTITIONED_EXTENSION_NAME, DeviceInfo(&DeviceExtensions::vk_nv_shader_subgroup_partitioned, {})),
- std::make_pair(VK_NV_SHADING_RATE_IMAGE_EXTENSION_NAME, DeviceInfo(&DeviceExtensions::vk_nv_shading_rate_image, {{
- {&DeviceExtensions::vk_khr_get_physical_device_properties_2, VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME}}})),
- std::make_pair(VK_NV_VIEWPORT_ARRAY2_EXTENSION_NAME, DeviceInfo(&DeviceExtensions::vk_nv_viewport_array2, {})),
- std::make_pair(VK_NV_VIEWPORT_SWIZZLE_EXTENSION_NAME, DeviceInfo(&DeviceExtensions::vk_nv_viewport_swizzle, {})),
-#ifdef VK_USE_PLATFORM_WIN32_KHR
- std::make_pair(VK_NV_WIN32_KEYED_MUTEX_EXTENSION_NAME, DeviceInfo(&DeviceExtensions::vk_nv_win32_keyed_mutex, {{
- {&DeviceExtensions::vk_nv_external_memory_win32, VK_NV_EXTERNAL_MEMORY_WIN32_EXTENSION_NAME}}})),
-#endif
- };
-
- static const DeviceInfo empty_info {nullptr, DeviceReqVec()};
- DeviceInfoMap::const_iterator info = info_map.find(name);
- if ( info != info_map.cend()) {
- return info->second;
- }
- return empty_info;
- }
-
- DeviceExtensions() = default;
- DeviceExtensions(const InstanceExtensions& instance_ext) : InstanceExtensions(instance_ext) {}
-
- uint32_t InitFromDeviceCreateInfo(const InstanceExtensions *instance_extensions, uint32_t requested_api_version,
- const VkDeviceCreateInfo *pCreateInfo) {
- // Initialize: this to defaults, base class fields to input.
- assert(instance_extensions);
- *this = DeviceExtensions(*instance_extensions);
-
-
- static const std::vector<const char *> V_1_1_promoted_device_apis = {
- VK_KHR_16BIT_STORAGE_EXTENSION_NAME,
- VK_KHR_BIND_MEMORY_2_EXTENSION_NAME,
- VK_KHR_DEDICATED_ALLOCATION_EXTENSION_NAME,
- VK_KHR_DESCRIPTOR_UPDATE_TEMPLATE_EXTENSION_NAME,
- VK_KHR_DEVICE_GROUP_EXTENSION_NAME,
- VK_KHR_EXTERNAL_FENCE_EXTENSION_NAME,
- VK_KHR_EXTERNAL_MEMORY_EXTENSION_NAME,
- VK_KHR_EXTERNAL_SEMAPHORE_EXTENSION_NAME,
- VK_KHR_GET_MEMORY_REQUIREMENTS_2_EXTENSION_NAME,
- VK_KHR_MAINTENANCE1_EXTENSION_NAME,
- VK_KHR_MAINTENANCE2_EXTENSION_NAME,
- VK_KHR_MAINTENANCE3_EXTENSION_NAME,
- VK_KHR_MULTIVIEW_EXTENSION_NAME,
- VK_KHR_RELAXED_BLOCK_LAYOUT_EXTENSION_NAME,
- VK_KHR_SAMPLER_YCBCR_CONVERSION_EXTENSION_NAME,
- VK_KHR_SHADER_DRAW_PARAMETERS_EXTENSION_NAME,
- VK_KHR_STORAGE_BUFFER_STORAGE_CLASS_EXTENSION_NAME,
- VK_KHR_VARIABLE_POINTERS_EXTENSION_NAME,
- "VK_VERSION_1_1",
- };
-
- // Initialize struct data, robust to invalid pCreateInfo
- if (pCreateInfo->ppEnabledExtensionNames) {
- for (uint32_t i = 0; i < pCreateInfo->enabledExtensionCount; i++) {
- if (!pCreateInfo->ppEnabledExtensionNames[i]) continue;
- auto info = get_info(pCreateInfo->ppEnabledExtensionNames[i]);
- if(info.state) this->*(info.state) = true;
- }
- }
- uint32_t api_version = NormalizeApiVersion(requested_api_version);
- if (api_version >= VK_API_VERSION_1_1) {
- for (auto promoted_ext : V_1_1_promoted_device_apis) {
- auto info = get_info(promoted_ext);
- assert(info.state);
- if (info.state) this->*(info.state) = true;
- }
- }
- return api_version;
- }
-};
-
-static const std::set<std::string> kDeviceExtensionNames = {
- VK_AMD_BUFFER_MARKER_EXTENSION_NAME,
- VK_AMD_DEVICE_COHERENT_MEMORY_EXTENSION_NAME,
- VK_AMD_DISPLAY_NATIVE_HDR_EXTENSION_NAME,
- VK_AMD_DRAW_INDIRECT_COUNT_EXTENSION_NAME,
- VK_AMD_GCN_SHADER_EXTENSION_NAME,
- VK_AMD_GPU_SHADER_HALF_FLOAT_EXTENSION_NAME,
- VK_AMD_GPU_SHADER_INT16_EXTENSION_NAME,
- VK_AMD_MEMORY_OVERALLOCATION_BEHAVIOR_EXTENSION_NAME,
- VK_AMD_MIXED_ATTACHMENT_SAMPLES_EXTENSION_NAME,
- VK_AMD_NEGATIVE_VIEWPORT_HEIGHT_EXTENSION_NAME,
- VK_AMD_PIPELINE_COMPILER_CONTROL_EXTENSION_NAME,
- VK_AMD_RASTERIZATION_ORDER_EXTENSION_NAME,
- VK_AMD_SHADER_BALLOT_EXTENSION_NAME,
- VK_AMD_SHADER_CORE_PROPERTIES_EXTENSION_NAME,
- VK_AMD_SHADER_CORE_PROPERTIES_2_EXTENSION_NAME,
- VK_AMD_SHADER_EXPLICIT_VERTEX_PARAMETER_EXTENSION_NAME,
- VK_AMD_SHADER_FRAGMENT_MASK_EXTENSION_NAME,
- VK_AMD_SHADER_IMAGE_LOAD_STORE_LOD_EXTENSION_NAME,
- VK_AMD_SHADER_INFO_EXTENSION_NAME,
- VK_AMD_SHADER_TRINARY_MINMAX_EXTENSION_NAME,
- VK_AMD_TEXTURE_GATHER_BIAS_LOD_EXTENSION_NAME,
-#ifdef VK_USE_PLATFORM_ANDROID_KHR
- VK_ANDROID_EXTERNAL_MEMORY_ANDROID_HARDWARE_BUFFER_EXTENSION_NAME,
-#endif
- VK_EXT_ASTC_DECODE_MODE_EXTENSION_NAME,
- VK_EXT_BLEND_OPERATION_ADVANCED_EXTENSION_NAME,
- VK_EXT_BUFFER_DEVICE_ADDRESS_EXTENSION_NAME,
- VK_EXT_CALIBRATED_TIMESTAMPS_EXTENSION_NAME,
- VK_EXT_CONDITIONAL_RENDERING_EXTENSION_NAME,
- VK_EXT_CONSERVATIVE_RASTERIZATION_EXTENSION_NAME,
- VK_EXT_DEBUG_MARKER_EXTENSION_NAME,
- VK_EXT_DEPTH_CLIP_ENABLE_EXTENSION_NAME,
- VK_EXT_DEPTH_RANGE_UNRESTRICTED_EXTENSION_NAME,
- VK_EXT_DESCRIPTOR_INDEXING_EXTENSION_NAME,
- VK_EXT_DISCARD_RECTANGLES_EXTENSION_NAME,
- VK_EXT_DISPLAY_CONTROL_EXTENSION_NAME,
- VK_EXT_EXTERNAL_MEMORY_DMA_BUF_EXTENSION_NAME,
- VK_EXT_EXTERNAL_MEMORY_HOST_EXTENSION_NAME,
- VK_EXT_FILTER_CUBIC_EXTENSION_NAME,
- VK_EXT_FRAGMENT_DENSITY_MAP_EXTENSION_NAME,
- VK_EXT_FRAGMENT_SHADER_INTERLOCK_EXTENSION_NAME,
-#ifdef VK_USE_PLATFORM_WIN32_KHR
- VK_EXT_FULL_SCREEN_EXCLUSIVE_EXTENSION_NAME,
-#endif
- VK_EXT_GLOBAL_PRIORITY_EXTENSION_NAME,
- VK_EXT_HDR_METADATA_EXTENSION_NAME,
- VK_EXT_HOST_QUERY_RESET_EXTENSION_NAME,
- VK_EXT_IMAGE_DRM_FORMAT_MODIFIER_EXTENSION_NAME,
- VK_EXT_INDEX_TYPE_UINT8_EXTENSION_NAME,
- VK_EXT_INLINE_UNIFORM_BLOCK_EXTENSION_NAME,
- VK_EXT_LINE_RASTERIZATION_EXTENSION_NAME,
- VK_EXT_MEMORY_BUDGET_EXTENSION_NAME,
- VK_EXT_MEMORY_PRIORITY_EXTENSION_NAME,
- VK_EXT_PCI_BUS_INFO_EXTENSION_NAME,
- VK_EXT_PIPELINE_CREATION_FEEDBACK_EXTENSION_NAME,
- VK_EXT_POST_DEPTH_COVERAGE_EXTENSION_NAME,
- VK_EXT_QUEUE_FAMILY_FOREIGN_EXTENSION_NAME,
- VK_EXT_SAMPLE_LOCATIONS_EXTENSION_NAME,
- VK_EXT_SAMPLER_FILTER_MINMAX_EXTENSION_NAME,
- VK_EXT_SCALAR_BLOCK_LAYOUT_EXTENSION_NAME,
- VK_EXT_SEPARATE_STENCIL_USAGE_EXTENSION_NAME,
- VK_EXT_SHADER_DEMOTE_TO_HELPER_INVOCATION_EXTENSION_NAME,
- VK_EXT_SHADER_STENCIL_EXPORT_EXTENSION_NAME,
- VK_EXT_SHADER_SUBGROUP_BALLOT_EXTENSION_NAME,
- VK_EXT_SHADER_SUBGROUP_VOTE_EXTENSION_NAME,
- VK_EXT_SHADER_VIEWPORT_INDEX_LAYER_EXTENSION_NAME,
- VK_EXT_SUBGROUP_SIZE_CONTROL_EXTENSION_NAME,
- VK_EXT_TEXEL_BUFFER_ALIGNMENT_EXTENSION_NAME,
- VK_EXT_TEXTURE_COMPRESSION_ASTC_HDR_EXTENSION_NAME,
- VK_EXT_TRANSFORM_FEEDBACK_EXTENSION_NAME,
- VK_EXT_VALIDATION_CACHE_EXTENSION_NAME,
- VK_EXT_VERTEX_ATTRIBUTE_DIVISOR_EXTENSION_NAME,
- VK_EXT_YCBCR_IMAGE_ARRAYS_EXTENSION_NAME,
-#ifdef VK_USE_PLATFORM_GGP
- VK_GGP_FRAME_TOKEN_EXTENSION_NAME,
-#endif
- VK_GOOGLE_DECORATE_STRING_EXTENSION_NAME,
- VK_GOOGLE_DISPLAY_TIMING_EXTENSION_NAME,
- VK_GOOGLE_HLSL_FUNCTIONALITY1_EXTENSION_NAME,
- VK_IMG_FILTER_CUBIC_EXTENSION_NAME,
- VK_IMG_FORMAT_PVRTC_EXTENSION_NAME,
- VK_INTEL_PERFORMANCE_QUERY_EXTENSION_NAME,
- VK_INTEL_SHADER_INTEGER_FUNCTIONS_2_EXTENSION_NAME,
- VK_KHR_16BIT_STORAGE_EXTENSION_NAME,
- VK_KHR_8BIT_STORAGE_EXTENSION_NAME,
- VK_KHR_BIND_MEMORY_2_EXTENSION_NAME,
- VK_KHR_CREATE_RENDERPASS_2_EXTENSION_NAME,
- VK_KHR_DEDICATED_ALLOCATION_EXTENSION_NAME,
- VK_KHR_DEPTH_STENCIL_RESOLVE_EXTENSION_NAME,
- VK_KHR_DESCRIPTOR_UPDATE_TEMPLATE_EXTENSION_NAME,
- VK_KHR_DEVICE_GROUP_EXTENSION_NAME,
- VK_KHR_DISPLAY_SWAPCHAIN_EXTENSION_NAME,
- VK_KHR_DRAW_INDIRECT_COUNT_EXTENSION_NAME,
- VK_KHR_DRIVER_PROPERTIES_EXTENSION_NAME,
- VK_KHR_EXTERNAL_FENCE_EXTENSION_NAME,
- VK_KHR_EXTERNAL_FENCE_FD_EXTENSION_NAME,
-#ifdef VK_USE_PLATFORM_WIN32_KHR
- VK_KHR_EXTERNAL_FENCE_WIN32_EXTENSION_NAME,
-#endif
- VK_KHR_EXTERNAL_MEMORY_EXTENSION_NAME,
- VK_KHR_EXTERNAL_MEMORY_FD_EXTENSION_NAME,
-#ifdef VK_USE_PLATFORM_WIN32_KHR
- VK_KHR_EXTERNAL_MEMORY_WIN32_EXTENSION_NAME,
-#endif
- VK_KHR_EXTERNAL_SEMAPHORE_EXTENSION_NAME,
- VK_KHR_EXTERNAL_SEMAPHORE_FD_EXTENSION_NAME,
-#ifdef VK_USE_PLATFORM_WIN32_KHR
- VK_KHR_EXTERNAL_SEMAPHORE_WIN32_EXTENSION_NAME,
-#endif
- VK_KHR_GET_MEMORY_REQUIREMENTS_2_EXTENSION_NAME,
- VK_KHR_IMAGE_FORMAT_LIST_EXTENSION_NAME,
- VK_KHR_IMAGELESS_FRAMEBUFFER_EXTENSION_NAME,
- VK_KHR_INCREMENTAL_PRESENT_EXTENSION_NAME,
- VK_KHR_MAINTENANCE1_EXTENSION_NAME,
- VK_KHR_MAINTENANCE2_EXTENSION_NAME,
- VK_KHR_MAINTENANCE3_EXTENSION_NAME,
- VK_KHR_MULTIVIEW_EXTENSION_NAME,
- VK_KHR_PIPELINE_EXECUTABLE_PROPERTIES_EXTENSION_NAME,
- VK_KHR_PUSH_DESCRIPTOR_EXTENSION_NAME,
- VK_KHR_RELAXED_BLOCK_LAYOUT_EXTENSION_NAME,
- VK_KHR_SAMPLER_MIRROR_CLAMP_TO_EDGE_EXTENSION_NAME,
- VK_KHR_SAMPLER_YCBCR_CONVERSION_EXTENSION_NAME,
- VK_KHR_SHADER_ATOMIC_INT64_EXTENSION_NAME,
- VK_KHR_SHADER_DRAW_PARAMETERS_EXTENSION_NAME,
- VK_KHR_SHADER_FLOAT16_INT8_EXTENSION_NAME,
- VK_KHR_SHADER_FLOAT_CONTROLS_EXTENSION_NAME,
- VK_KHR_SHARED_PRESENTABLE_IMAGE_EXTENSION_NAME,
- VK_KHR_STORAGE_BUFFER_STORAGE_CLASS_EXTENSION_NAME,
- VK_KHR_SWAPCHAIN_EXTENSION_NAME,
- VK_KHR_SWAPCHAIN_MUTABLE_FORMAT_EXTENSION_NAME,
- VK_KHR_UNIFORM_BUFFER_STANDARD_LAYOUT_EXTENSION_NAME,
- VK_KHR_VARIABLE_POINTERS_EXTENSION_NAME,
- VK_KHR_VULKAN_MEMORY_MODEL_EXTENSION_NAME,
-#ifdef VK_USE_PLATFORM_WIN32_KHR
- VK_KHR_WIN32_KEYED_MUTEX_EXTENSION_NAME,
-#endif
- VK_NVX_DEVICE_GENERATED_COMMANDS_EXTENSION_NAME,
- VK_NVX_IMAGE_VIEW_HANDLE_EXTENSION_NAME,
- VK_NVX_MULTIVIEW_PER_VIEW_ATTRIBUTES_EXTENSION_NAME,
- VK_NV_CLIP_SPACE_W_SCALING_EXTENSION_NAME,
- VK_NV_COMPUTE_SHADER_DERIVATIVES_EXTENSION_NAME,
- VK_NV_COOPERATIVE_MATRIX_EXTENSION_NAME,
- VK_NV_CORNER_SAMPLED_IMAGE_EXTENSION_NAME,
- VK_NV_COVERAGE_REDUCTION_MODE_EXTENSION_NAME,
- VK_NV_DEDICATED_ALLOCATION_EXTENSION_NAME,
- VK_NV_DEDICATED_ALLOCATION_IMAGE_ALIASING_EXTENSION_NAME,
- VK_NV_DEVICE_DIAGNOSTIC_CHECKPOINTS_EXTENSION_NAME,
- VK_NV_EXTERNAL_MEMORY_EXTENSION_NAME,
-#ifdef VK_USE_PLATFORM_WIN32_KHR
- VK_NV_EXTERNAL_MEMORY_WIN32_EXTENSION_NAME,
-#endif
- VK_NV_FILL_RECTANGLE_EXTENSION_NAME,
- VK_NV_FRAGMENT_COVERAGE_TO_COLOR_EXTENSION_NAME,
- VK_NV_FRAGMENT_SHADER_BARYCENTRIC_EXTENSION_NAME,
- VK_NV_FRAMEBUFFER_MIXED_SAMPLES_EXTENSION_NAME,
- VK_NV_GEOMETRY_SHADER_PASSTHROUGH_EXTENSION_NAME,
- VK_NV_GLSL_SHADER_EXTENSION_NAME,
- VK_NV_MESH_SHADER_EXTENSION_NAME,
- VK_NV_RAY_TRACING_EXTENSION_NAME,
- VK_NV_REPRESENTATIVE_FRAGMENT_TEST_EXTENSION_NAME,
- VK_NV_SAMPLE_MASK_OVERRIDE_COVERAGE_EXTENSION_NAME,
- VK_NV_SCISSOR_EXCLUSIVE_EXTENSION_NAME,
- VK_NV_SHADER_IMAGE_FOOTPRINT_EXTENSION_NAME,
- VK_NV_SHADER_SM_BUILTINS_EXTENSION_NAME,
- VK_NV_SHADER_SUBGROUP_PARTITIONED_EXTENSION_NAME,
- VK_NV_SHADING_RATE_IMAGE_EXTENSION_NAME,
- VK_NV_VIEWPORT_ARRAY2_EXTENSION_NAME,
- VK_NV_VIEWPORT_SWIZZLE_EXTENSION_NAME,
-#ifdef VK_USE_PLATFORM_WIN32_KHR
- VK_NV_WIN32_KEYED_MUTEX_EXTENSION_NAME,
-#endif
-};
-
-
-#endif // VK_EXTENSION_HELPER_H_
diff --git a/layers/generated/vk_layer_dispatch_table.h b/layers/generated/vk_layer_dispatch_table.h
deleted file mode 100644
index 3aed0c5eb..000000000
--- a/layers/generated/vk_layer_dispatch_table.h
+++ /dev/null
@@ -1,646 +0,0 @@
-// *** THIS FILE IS GENERATED - DO NOT EDIT ***
-// See layer_dispatch_table_generator.py for modifications
-
-/*
- * Copyright (c) 2015-2019 The Khronos Group Inc.
- * Copyright (c) 2015-2019 Valve Corporation
- * Copyright (c) 2015-2019 LunarG, Inc.
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- *
- * Author: Mark Lobodzinski <mark@lunarg.com>
- * Author: Mark Young <marky@lunarg.com>
- */
-
-#pragma once
-
-typedef PFN_vkVoidFunction (VKAPI_PTR *PFN_GetPhysicalDeviceProcAddr)(VkInstance instance, const char* pName);
-
-// Instance function pointer dispatch table
-typedef struct VkLayerInstanceDispatchTable_ {
- // Manually add in GetPhysicalDeviceProcAddr entry
- PFN_GetPhysicalDeviceProcAddr GetPhysicalDeviceProcAddr;
-
- // ---- Core 1_0 commands
- PFN_vkCreateInstance CreateInstance;
- PFN_vkDestroyInstance DestroyInstance;
- PFN_vkEnumeratePhysicalDevices EnumeratePhysicalDevices;
- PFN_vkGetPhysicalDeviceFeatures GetPhysicalDeviceFeatures;
- PFN_vkGetPhysicalDeviceFormatProperties GetPhysicalDeviceFormatProperties;
- PFN_vkGetPhysicalDeviceImageFormatProperties GetPhysicalDeviceImageFormatProperties;
- PFN_vkGetPhysicalDeviceProperties GetPhysicalDeviceProperties;
- PFN_vkGetPhysicalDeviceQueueFamilyProperties GetPhysicalDeviceQueueFamilyProperties;
- PFN_vkGetPhysicalDeviceMemoryProperties GetPhysicalDeviceMemoryProperties;
- PFN_vkGetInstanceProcAddr GetInstanceProcAddr;
- PFN_vkCreateDevice CreateDevice;
- PFN_vkEnumerateInstanceExtensionProperties EnumerateInstanceExtensionProperties;
- PFN_vkEnumerateDeviceExtensionProperties EnumerateDeviceExtensionProperties;
- PFN_vkEnumerateInstanceLayerProperties EnumerateInstanceLayerProperties;
- PFN_vkEnumerateDeviceLayerProperties EnumerateDeviceLayerProperties;
- PFN_vkGetPhysicalDeviceSparseImageFormatProperties GetPhysicalDeviceSparseImageFormatProperties;
-
- // ---- Core 1_1 commands
- PFN_vkEnumerateInstanceVersion EnumerateInstanceVersion;
- PFN_vkEnumeratePhysicalDeviceGroups EnumeratePhysicalDeviceGroups;
- PFN_vkGetPhysicalDeviceFeatures2 GetPhysicalDeviceFeatures2;
- PFN_vkGetPhysicalDeviceProperties2 GetPhysicalDeviceProperties2;
- PFN_vkGetPhysicalDeviceFormatProperties2 GetPhysicalDeviceFormatProperties2;
- PFN_vkGetPhysicalDeviceImageFormatProperties2 GetPhysicalDeviceImageFormatProperties2;
- PFN_vkGetPhysicalDeviceQueueFamilyProperties2 GetPhysicalDeviceQueueFamilyProperties2;
- PFN_vkGetPhysicalDeviceMemoryProperties2 GetPhysicalDeviceMemoryProperties2;
- PFN_vkGetPhysicalDeviceSparseImageFormatProperties2 GetPhysicalDeviceSparseImageFormatProperties2;
- PFN_vkGetPhysicalDeviceExternalBufferProperties GetPhysicalDeviceExternalBufferProperties;
- PFN_vkGetPhysicalDeviceExternalFenceProperties GetPhysicalDeviceExternalFenceProperties;
- PFN_vkGetPhysicalDeviceExternalSemaphoreProperties GetPhysicalDeviceExternalSemaphoreProperties;
-
- // ---- VK_KHR_surface extension commands
- PFN_vkDestroySurfaceKHR DestroySurfaceKHR;
- PFN_vkGetPhysicalDeviceSurfaceSupportKHR GetPhysicalDeviceSurfaceSupportKHR;
- PFN_vkGetPhysicalDeviceSurfaceCapabilitiesKHR GetPhysicalDeviceSurfaceCapabilitiesKHR;
- PFN_vkGetPhysicalDeviceSurfaceFormatsKHR GetPhysicalDeviceSurfaceFormatsKHR;
- PFN_vkGetPhysicalDeviceSurfacePresentModesKHR GetPhysicalDeviceSurfacePresentModesKHR;
-
- // ---- VK_KHR_swapchain extension commands
- PFN_vkGetPhysicalDevicePresentRectanglesKHR GetPhysicalDevicePresentRectanglesKHR;
-
- // ---- VK_KHR_display extension commands
- PFN_vkGetPhysicalDeviceDisplayPropertiesKHR GetPhysicalDeviceDisplayPropertiesKHR;
- PFN_vkGetPhysicalDeviceDisplayPlanePropertiesKHR GetPhysicalDeviceDisplayPlanePropertiesKHR;
- PFN_vkGetDisplayPlaneSupportedDisplaysKHR GetDisplayPlaneSupportedDisplaysKHR;
- PFN_vkGetDisplayModePropertiesKHR GetDisplayModePropertiesKHR;
- PFN_vkCreateDisplayModeKHR CreateDisplayModeKHR;
- PFN_vkGetDisplayPlaneCapabilitiesKHR GetDisplayPlaneCapabilitiesKHR;
- PFN_vkCreateDisplayPlaneSurfaceKHR CreateDisplayPlaneSurfaceKHR;
-
- // ---- VK_KHR_xlib_surface extension commands
-#ifdef VK_USE_PLATFORM_XLIB_KHR
- PFN_vkCreateXlibSurfaceKHR CreateXlibSurfaceKHR;
-#endif // VK_USE_PLATFORM_XLIB_KHR
-#ifdef VK_USE_PLATFORM_XLIB_KHR
- PFN_vkGetPhysicalDeviceXlibPresentationSupportKHR GetPhysicalDeviceXlibPresentationSupportKHR;
-#endif // VK_USE_PLATFORM_XLIB_KHR
-
- // ---- VK_KHR_xcb_surface extension commands
-#ifdef VK_USE_PLATFORM_XCB_KHR
- PFN_vkCreateXcbSurfaceKHR CreateXcbSurfaceKHR;
-#endif // VK_USE_PLATFORM_XCB_KHR
-#ifdef VK_USE_PLATFORM_XCB_KHR
- PFN_vkGetPhysicalDeviceXcbPresentationSupportKHR GetPhysicalDeviceXcbPresentationSupportKHR;
-#endif // VK_USE_PLATFORM_XCB_KHR
-
- // ---- VK_KHR_wayland_surface extension commands
-#ifdef VK_USE_PLATFORM_WAYLAND_KHR
- PFN_vkCreateWaylandSurfaceKHR CreateWaylandSurfaceKHR;
-#endif // VK_USE_PLATFORM_WAYLAND_KHR
-#ifdef VK_USE_PLATFORM_WAYLAND_KHR
- PFN_vkGetPhysicalDeviceWaylandPresentationSupportKHR GetPhysicalDeviceWaylandPresentationSupportKHR;
-#endif // VK_USE_PLATFORM_WAYLAND_KHR
-
- // ---- VK_KHR_android_surface extension commands
-#ifdef VK_USE_PLATFORM_ANDROID_KHR
- PFN_vkCreateAndroidSurfaceKHR CreateAndroidSurfaceKHR;
-#endif // VK_USE_PLATFORM_ANDROID_KHR
-
- // ---- VK_KHR_win32_surface extension commands
-#ifdef VK_USE_PLATFORM_WIN32_KHR
- PFN_vkCreateWin32SurfaceKHR CreateWin32SurfaceKHR;
-#endif // VK_USE_PLATFORM_WIN32_KHR
-#ifdef VK_USE_PLATFORM_WIN32_KHR
- PFN_vkGetPhysicalDeviceWin32PresentationSupportKHR GetPhysicalDeviceWin32PresentationSupportKHR;
-#endif // VK_USE_PLATFORM_WIN32_KHR
-
- // ---- VK_KHR_get_physical_device_properties2 extension commands
- PFN_vkGetPhysicalDeviceFeatures2KHR GetPhysicalDeviceFeatures2KHR;
- PFN_vkGetPhysicalDeviceProperties2KHR GetPhysicalDeviceProperties2KHR;
- PFN_vkGetPhysicalDeviceFormatProperties2KHR GetPhysicalDeviceFormatProperties2KHR;
- PFN_vkGetPhysicalDeviceImageFormatProperties2KHR GetPhysicalDeviceImageFormatProperties2KHR;
- PFN_vkGetPhysicalDeviceQueueFamilyProperties2KHR GetPhysicalDeviceQueueFamilyProperties2KHR;
- PFN_vkGetPhysicalDeviceMemoryProperties2KHR GetPhysicalDeviceMemoryProperties2KHR;
- PFN_vkGetPhysicalDeviceSparseImageFormatProperties2KHR GetPhysicalDeviceSparseImageFormatProperties2KHR;
-
- // ---- VK_KHR_device_group_creation extension commands
- PFN_vkEnumeratePhysicalDeviceGroupsKHR EnumeratePhysicalDeviceGroupsKHR;
-
- // ---- VK_KHR_external_memory_capabilities extension commands
- PFN_vkGetPhysicalDeviceExternalBufferPropertiesKHR GetPhysicalDeviceExternalBufferPropertiesKHR;
-
- // ---- VK_KHR_external_semaphore_capabilities extension commands
- PFN_vkGetPhysicalDeviceExternalSemaphorePropertiesKHR GetPhysicalDeviceExternalSemaphorePropertiesKHR;
-
- // ---- VK_KHR_external_fence_capabilities extension commands
- PFN_vkGetPhysicalDeviceExternalFencePropertiesKHR GetPhysicalDeviceExternalFencePropertiesKHR;
-
- // ---- VK_KHR_get_surface_capabilities2 extension commands
- PFN_vkGetPhysicalDeviceSurfaceCapabilities2KHR GetPhysicalDeviceSurfaceCapabilities2KHR;
- PFN_vkGetPhysicalDeviceSurfaceFormats2KHR GetPhysicalDeviceSurfaceFormats2KHR;
-
- // ---- VK_KHR_get_display_properties2 extension commands
- PFN_vkGetPhysicalDeviceDisplayProperties2KHR GetPhysicalDeviceDisplayProperties2KHR;
- PFN_vkGetPhysicalDeviceDisplayPlaneProperties2KHR GetPhysicalDeviceDisplayPlaneProperties2KHR;
- PFN_vkGetDisplayModeProperties2KHR GetDisplayModeProperties2KHR;
- PFN_vkGetDisplayPlaneCapabilities2KHR GetDisplayPlaneCapabilities2KHR;
-
- // ---- VK_EXT_debug_report extension commands
- PFN_vkCreateDebugReportCallbackEXT CreateDebugReportCallbackEXT;
- PFN_vkDestroyDebugReportCallbackEXT DestroyDebugReportCallbackEXT;
- PFN_vkDebugReportMessageEXT DebugReportMessageEXT;
-
- // ---- VK_GGP_stream_descriptor_surface extension commands
-#ifdef VK_USE_PLATFORM_GGP
- PFN_vkCreateStreamDescriptorSurfaceGGP CreateStreamDescriptorSurfaceGGP;
-#endif // VK_USE_PLATFORM_GGP
-
- // ---- VK_NV_external_memory_capabilities extension commands
- PFN_vkGetPhysicalDeviceExternalImageFormatPropertiesNV GetPhysicalDeviceExternalImageFormatPropertiesNV;
-
- // ---- VK_NN_vi_surface extension commands
-#ifdef VK_USE_PLATFORM_VI_NN
- PFN_vkCreateViSurfaceNN CreateViSurfaceNN;
-#endif // VK_USE_PLATFORM_VI_NN
-
- // ---- VK_NVX_device_generated_commands extension commands
- PFN_vkGetPhysicalDeviceGeneratedCommandsPropertiesNVX GetPhysicalDeviceGeneratedCommandsPropertiesNVX;
-
- // ---- VK_EXT_direct_mode_display extension commands
- PFN_vkReleaseDisplayEXT ReleaseDisplayEXT;
-
- // ---- VK_EXT_acquire_xlib_display extension commands
-#ifdef VK_USE_PLATFORM_XLIB_XRANDR_EXT
- PFN_vkAcquireXlibDisplayEXT AcquireXlibDisplayEXT;
-#endif // VK_USE_PLATFORM_XLIB_XRANDR_EXT
-#ifdef VK_USE_PLATFORM_XLIB_XRANDR_EXT
- PFN_vkGetRandROutputDisplayEXT GetRandROutputDisplayEXT;
-#endif // VK_USE_PLATFORM_XLIB_XRANDR_EXT
-
- // ---- VK_EXT_display_surface_counter extension commands
- PFN_vkGetPhysicalDeviceSurfaceCapabilities2EXT GetPhysicalDeviceSurfaceCapabilities2EXT;
-
- // ---- VK_MVK_ios_surface extension commands
-#ifdef VK_USE_PLATFORM_IOS_MVK
- PFN_vkCreateIOSSurfaceMVK CreateIOSSurfaceMVK;
-#endif // VK_USE_PLATFORM_IOS_MVK
-
- // ---- VK_MVK_macos_surface extension commands
-#ifdef VK_USE_PLATFORM_MACOS_MVK
- PFN_vkCreateMacOSSurfaceMVK CreateMacOSSurfaceMVK;
-#endif // VK_USE_PLATFORM_MACOS_MVK
-
- // ---- VK_EXT_debug_utils extension commands
- PFN_vkCreateDebugUtilsMessengerEXT CreateDebugUtilsMessengerEXT;
- PFN_vkDestroyDebugUtilsMessengerEXT DestroyDebugUtilsMessengerEXT;
- PFN_vkSubmitDebugUtilsMessageEXT SubmitDebugUtilsMessageEXT;
-
- // ---- VK_EXT_sample_locations extension commands
- PFN_vkGetPhysicalDeviceMultisamplePropertiesEXT GetPhysicalDeviceMultisamplePropertiesEXT;
-
- // ---- VK_EXT_calibrated_timestamps extension commands
- PFN_vkGetPhysicalDeviceCalibrateableTimeDomainsEXT GetPhysicalDeviceCalibrateableTimeDomainsEXT;
-
- // ---- VK_FUCHSIA_imagepipe_surface extension commands
-#ifdef VK_USE_PLATFORM_FUCHSIA
- PFN_vkCreateImagePipeSurfaceFUCHSIA CreateImagePipeSurfaceFUCHSIA;
-#endif // VK_USE_PLATFORM_FUCHSIA
-
- // ---- VK_EXT_metal_surface extension commands
-#ifdef VK_USE_PLATFORM_METAL_EXT
- PFN_vkCreateMetalSurfaceEXT CreateMetalSurfaceEXT;
-#endif // VK_USE_PLATFORM_METAL_EXT
-
- // ---- VK_NV_cooperative_matrix extension commands
- PFN_vkGetPhysicalDeviceCooperativeMatrixPropertiesNV GetPhysicalDeviceCooperativeMatrixPropertiesNV;
-
- // ---- VK_NV_coverage_reduction_mode extension commands
- PFN_vkGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV GetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV;
-
- // ---- VK_EXT_full_screen_exclusive extension commands
-#ifdef VK_USE_PLATFORM_WIN32_KHR
- PFN_vkGetPhysicalDeviceSurfacePresentModes2EXT GetPhysicalDeviceSurfacePresentModes2EXT;
-#endif // VK_USE_PLATFORM_WIN32_KHR
-
- // ---- VK_EXT_headless_surface extension commands
- PFN_vkCreateHeadlessSurfaceEXT CreateHeadlessSurfaceEXT;
-} VkLayerInstanceDispatchTable;
-
-// Device function pointer dispatch table
-typedef struct VkLayerDispatchTable_ {
-
- // ---- Core 1_0 commands
- PFN_vkGetDeviceProcAddr GetDeviceProcAddr;
- PFN_vkDestroyDevice DestroyDevice;
- PFN_vkGetDeviceQueue GetDeviceQueue;
- PFN_vkQueueSubmit QueueSubmit;
- PFN_vkQueueWaitIdle QueueWaitIdle;
- PFN_vkDeviceWaitIdle DeviceWaitIdle;
- PFN_vkAllocateMemory AllocateMemory;
- PFN_vkFreeMemory FreeMemory;
- PFN_vkMapMemory MapMemory;
- PFN_vkUnmapMemory UnmapMemory;
- PFN_vkFlushMappedMemoryRanges FlushMappedMemoryRanges;
- PFN_vkInvalidateMappedMemoryRanges InvalidateMappedMemoryRanges;
- PFN_vkGetDeviceMemoryCommitment GetDeviceMemoryCommitment;
- PFN_vkBindBufferMemory BindBufferMemory;
- PFN_vkBindImageMemory BindImageMemory;
- PFN_vkGetBufferMemoryRequirements GetBufferMemoryRequirements;
- PFN_vkGetImageMemoryRequirements GetImageMemoryRequirements;
- PFN_vkGetImageSparseMemoryRequirements GetImageSparseMemoryRequirements;
- PFN_vkQueueBindSparse QueueBindSparse;
- PFN_vkCreateFence CreateFence;
- PFN_vkDestroyFence DestroyFence;
- PFN_vkResetFences ResetFences;
- PFN_vkGetFenceStatus GetFenceStatus;
- PFN_vkWaitForFences WaitForFences;
- PFN_vkCreateSemaphore CreateSemaphore;
- PFN_vkDestroySemaphore DestroySemaphore;
- PFN_vkCreateEvent CreateEvent;
- PFN_vkDestroyEvent DestroyEvent;
- PFN_vkGetEventStatus GetEventStatus;
- PFN_vkSetEvent SetEvent;
- PFN_vkResetEvent ResetEvent;
- PFN_vkCreateQueryPool CreateQueryPool;
- PFN_vkDestroyQueryPool DestroyQueryPool;
- PFN_vkGetQueryPoolResults GetQueryPoolResults;
- PFN_vkCreateBuffer CreateBuffer;
- PFN_vkDestroyBuffer DestroyBuffer;
- PFN_vkCreateBufferView CreateBufferView;
- PFN_vkDestroyBufferView DestroyBufferView;
- PFN_vkCreateImage CreateImage;
- PFN_vkDestroyImage DestroyImage;
- PFN_vkGetImageSubresourceLayout GetImageSubresourceLayout;
- PFN_vkCreateImageView CreateImageView;
- PFN_vkDestroyImageView DestroyImageView;
- PFN_vkCreateShaderModule CreateShaderModule;
- PFN_vkDestroyShaderModule DestroyShaderModule;
- PFN_vkCreatePipelineCache CreatePipelineCache;
- PFN_vkDestroyPipelineCache DestroyPipelineCache;
- PFN_vkGetPipelineCacheData GetPipelineCacheData;
- PFN_vkMergePipelineCaches MergePipelineCaches;
- PFN_vkCreateGraphicsPipelines CreateGraphicsPipelines;
- PFN_vkCreateComputePipelines CreateComputePipelines;
- PFN_vkDestroyPipeline DestroyPipeline;
- PFN_vkCreatePipelineLayout CreatePipelineLayout;
- PFN_vkDestroyPipelineLayout DestroyPipelineLayout;
- PFN_vkCreateSampler CreateSampler;
- PFN_vkDestroySampler DestroySampler;
- PFN_vkCreateDescriptorSetLayout CreateDescriptorSetLayout;
- PFN_vkDestroyDescriptorSetLayout DestroyDescriptorSetLayout;
- PFN_vkCreateDescriptorPool CreateDescriptorPool;
- PFN_vkDestroyDescriptorPool DestroyDescriptorPool;
- PFN_vkResetDescriptorPool ResetDescriptorPool;
- PFN_vkAllocateDescriptorSets AllocateDescriptorSets;
- PFN_vkFreeDescriptorSets FreeDescriptorSets;
- PFN_vkUpdateDescriptorSets UpdateDescriptorSets;
- PFN_vkCreateFramebuffer CreateFramebuffer;
- PFN_vkDestroyFramebuffer DestroyFramebuffer;
- PFN_vkCreateRenderPass CreateRenderPass;
- PFN_vkDestroyRenderPass DestroyRenderPass;
- PFN_vkGetRenderAreaGranularity GetRenderAreaGranularity;
- PFN_vkCreateCommandPool CreateCommandPool;
- PFN_vkDestroyCommandPool DestroyCommandPool;
- PFN_vkResetCommandPool ResetCommandPool;
- PFN_vkAllocateCommandBuffers AllocateCommandBuffers;
- PFN_vkFreeCommandBuffers FreeCommandBuffers;
- PFN_vkBeginCommandBuffer BeginCommandBuffer;
- PFN_vkEndCommandBuffer EndCommandBuffer;
- PFN_vkResetCommandBuffer ResetCommandBuffer;
- PFN_vkCmdBindPipeline CmdBindPipeline;
- PFN_vkCmdSetViewport CmdSetViewport;
- PFN_vkCmdSetScissor CmdSetScissor;
- PFN_vkCmdSetLineWidth CmdSetLineWidth;
- PFN_vkCmdSetDepthBias CmdSetDepthBias;
- PFN_vkCmdSetBlendConstants CmdSetBlendConstants;
- PFN_vkCmdSetDepthBounds CmdSetDepthBounds;
- PFN_vkCmdSetStencilCompareMask CmdSetStencilCompareMask;
- PFN_vkCmdSetStencilWriteMask CmdSetStencilWriteMask;
- PFN_vkCmdSetStencilReference CmdSetStencilReference;
- PFN_vkCmdBindDescriptorSets CmdBindDescriptorSets;
- PFN_vkCmdBindIndexBuffer CmdBindIndexBuffer;
- PFN_vkCmdBindVertexBuffers CmdBindVertexBuffers;
- PFN_vkCmdDraw CmdDraw;
- PFN_vkCmdDrawIndexed CmdDrawIndexed;
- PFN_vkCmdDrawIndirect CmdDrawIndirect;
- PFN_vkCmdDrawIndexedIndirect CmdDrawIndexedIndirect;
- PFN_vkCmdDispatch CmdDispatch;
- PFN_vkCmdDispatchIndirect CmdDispatchIndirect;
- PFN_vkCmdCopyBuffer CmdCopyBuffer;
- PFN_vkCmdCopyImage CmdCopyImage;
- PFN_vkCmdBlitImage CmdBlitImage;
- PFN_vkCmdCopyBufferToImage CmdCopyBufferToImage;
- PFN_vkCmdCopyImageToBuffer CmdCopyImageToBuffer;
- PFN_vkCmdUpdateBuffer CmdUpdateBuffer;
- PFN_vkCmdFillBuffer CmdFillBuffer;
- PFN_vkCmdClearColorImage CmdClearColorImage;
- PFN_vkCmdClearDepthStencilImage CmdClearDepthStencilImage;
- PFN_vkCmdClearAttachments CmdClearAttachments;
- PFN_vkCmdResolveImage CmdResolveImage;
- PFN_vkCmdSetEvent CmdSetEvent;
- PFN_vkCmdResetEvent CmdResetEvent;
- PFN_vkCmdWaitEvents CmdWaitEvents;
- PFN_vkCmdPipelineBarrier CmdPipelineBarrier;
- PFN_vkCmdBeginQuery CmdBeginQuery;
- PFN_vkCmdEndQuery CmdEndQuery;
- PFN_vkCmdResetQueryPool CmdResetQueryPool;
- PFN_vkCmdWriteTimestamp CmdWriteTimestamp;
- PFN_vkCmdCopyQueryPoolResults CmdCopyQueryPoolResults;
- PFN_vkCmdPushConstants CmdPushConstants;
- PFN_vkCmdBeginRenderPass CmdBeginRenderPass;
- PFN_vkCmdNextSubpass CmdNextSubpass;
- PFN_vkCmdEndRenderPass CmdEndRenderPass;
- PFN_vkCmdExecuteCommands CmdExecuteCommands;
-
- // ---- Core 1_1 commands
- PFN_vkBindBufferMemory2 BindBufferMemory2;
- PFN_vkBindImageMemory2 BindImageMemory2;
- PFN_vkGetDeviceGroupPeerMemoryFeatures GetDeviceGroupPeerMemoryFeatures;
- PFN_vkCmdSetDeviceMask CmdSetDeviceMask;
- PFN_vkCmdDispatchBase CmdDispatchBase;
- PFN_vkGetImageMemoryRequirements2 GetImageMemoryRequirements2;
- PFN_vkGetBufferMemoryRequirements2 GetBufferMemoryRequirements2;
- PFN_vkGetImageSparseMemoryRequirements2 GetImageSparseMemoryRequirements2;
- PFN_vkTrimCommandPool TrimCommandPool;
- PFN_vkGetDeviceQueue2 GetDeviceQueue2;
- PFN_vkCreateSamplerYcbcrConversion CreateSamplerYcbcrConversion;
- PFN_vkDestroySamplerYcbcrConversion DestroySamplerYcbcrConversion;
- PFN_vkCreateDescriptorUpdateTemplate CreateDescriptorUpdateTemplate;
- PFN_vkDestroyDescriptorUpdateTemplate DestroyDescriptorUpdateTemplate;
- PFN_vkUpdateDescriptorSetWithTemplate UpdateDescriptorSetWithTemplate;
- PFN_vkGetDescriptorSetLayoutSupport GetDescriptorSetLayoutSupport;
-
- // ---- VK_KHR_swapchain extension commands
- PFN_vkCreateSwapchainKHR CreateSwapchainKHR;
- PFN_vkDestroySwapchainKHR DestroySwapchainKHR;
- PFN_vkGetSwapchainImagesKHR GetSwapchainImagesKHR;
- PFN_vkAcquireNextImageKHR AcquireNextImageKHR;
- PFN_vkQueuePresentKHR QueuePresentKHR;
- PFN_vkGetDeviceGroupPresentCapabilitiesKHR GetDeviceGroupPresentCapabilitiesKHR;
- PFN_vkGetDeviceGroupSurfacePresentModesKHR GetDeviceGroupSurfacePresentModesKHR;
- PFN_vkAcquireNextImage2KHR AcquireNextImage2KHR;
-
- // ---- VK_KHR_display_swapchain extension commands
- PFN_vkCreateSharedSwapchainsKHR CreateSharedSwapchainsKHR;
-
- // ---- VK_KHR_device_group extension commands
- PFN_vkGetDeviceGroupPeerMemoryFeaturesKHR GetDeviceGroupPeerMemoryFeaturesKHR;
- PFN_vkCmdSetDeviceMaskKHR CmdSetDeviceMaskKHR;
- PFN_vkCmdDispatchBaseKHR CmdDispatchBaseKHR;
-
- // ---- VK_KHR_maintenance1 extension commands
- PFN_vkTrimCommandPoolKHR TrimCommandPoolKHR;
-
- // ---- VK_KHR_external_memory_win32 extension commands
-#ifdef VK_USE_PLATFORM_WIN32_KHR
- PFN_vkGetMemoryWin32HandleKHR GetMemoryWin32HandleKHR;
-#endif // VK_USE_PLATFORM_WIN32_KHR
-#ifdef VK_USE_PLATFORM_WIN32_KHR
- PFN_vkGetMemoryWin32HandlePropertiesKHR GetMemoryWin32HandlePropertiesKHR;
-#endif // VK_USE_PLATFORM_WIN32_KHR
-
- // ---- VK_KHR_external_memory_fd extension commands
- PFN_vkGetMemoryFdKHR GetMemoryFdKHR;
- PFN_vkGetMemoryFdPropertiesKHR GetMemoryFdPropertiesKHR;
-
- // ---- VK_KHR_external_semaphore_win32 extension commands
-#ifdef VK_USE_PLATFORM_WIN32_KHR
- PFN_vkImportSemaphoreWin32HandleKHR ImportSemaphoreWin32HandleKHR;
-#endif // VK_USE_PLATFORM_WIN32_KHR
-#ifdef VK_USE_PLATFORM_WIN32_KHR
- PFN_vkGetSemaphoreWin32HandleKHR GetSemaphoreWin32HandleKHR;
-#endif // VK_USE_PLATFORM_WIN32_KHR
-
- // ---- VK_KHR_external_semaphore_fd extension commands
- PFN_vkImportSemaphoreFdKHR ImportSemaphoreFdKHR;
- PFN_vkGetSemaphoreFdKHR GetSemaphoreFdKHR;
-
- // ---- VK_KHR_push_descriptor extension commands
- PFN_vkCmdPushDescriptorSetKHR CmdPushDescriptorSetKHR;
- PFN_vkCmdPushDescriptorSetWithTemplateKHR CmdPushDescriptorSetWithTemplateKHR;
-
- // ---- VK_KHR_descriptor_update_template extension commands
- PFN_vkCreateDescriptorUpdateTemplateKHR CreateDescriptorUpdateTemplateKHR;
- PFN_vkDestroyDescriptorUpdateTemplateKHR DestroyDescriptorUpdateTemplateKHR;
- PFN_vkUpdateDescriptorSetWithTemplateKHR UpdateDescriptorSetWithTemplateKHR;
-
- // ---- VK_KHR_create_renderpass2 extension commands
- PFN_vkCreateRenderPass2KHR CreateRenderPass2KHR;
- PFN_vkCmdBeginRenderPass2KHR CmdBeginRenderPass2KHR;
- PFN_vkCmdNextSubpass2KHR CmdNextSubpass2KHR;
- PFN_vkCmdEndRenderPass2KHR CmdEndRenderPass2KHR;
-
- // ---- VK_KHR_shared_presentable_image extension commands
- PFN_vkGetSwapchainStatusKHR GetSwapchainStatusKHR;
-
- // ---- VK_KHR_external_fence_win32 extension commands
-#ifdef VK_USE_PLATFORM_WIN32_KHR
- PFN_vkImportFenceWin32HandleKHR ImportFenceWin32HandleKHR;
-#endif // VK_USE_PLATFORM_WIN32_KHR
-#ifdef VK_USE_PLATFORM_WIN32_KHR
- PFN_vkGetFenceWin32HandleKHR GetFenceWin32HandleKHR;
-#endif // VK_USE_PLATFORM_WIN32_KHR
-
- // ---- VK_KHR_external_fence_fd extension commands
- PFN_vkImportFenceFdKHR ImportFenceFdKHR;
- PFN_vkGetFenceFdKHR GetFenceFdKHR;
-
- // ---- VK_KHR_get_memory_requirements2 extension commands
- PFN_vkGetImageMemoryRequirements2KHR GetImageMemoryRequirements2KHR;
- PFN_vkGetBufferMemoryRequirements2KHR GetBufferMemoryRequirements2KHR;
- PFN_vkGetImageSparseMemoryRequirements2KHR GetImageSparseMemoryRequirements2KHR;
-
- // ---- VK_KHR_sampler_ycbcr_conversion extension commands
- PFN_vkCreateSamplerYcbcrConversionKHR CreateSamplerYcbcrConversionKHR;
- PFN_vkDestroySamplerYcbcrConversionKHR DestroySamplerYcbcrConversionKHR;
-
- // ---- VK_KHR_bind_memory2 extension commands
- PFN_vkBindBufferMemory2KHR BindBufferMemory2KHR;
- PFN_vkBindImageMemory2KHR BindImageMemory2KHR;
-
- // ---- VK_KHR_maintenance3 extension commands
- PFN_vkGetDescriptorSetLayoutSupportKHR GetDescriptorSetLayoutSupportKHR;
-
- // ---- VK_KHR_draw_indirect_count extension commands
- PFN_vkCmdDrawIndirectCountKHR CmdDrawIndirectCountKHR;
- PFN_vkCmdDrawIndexedIndirectCountKHR CmdDrawIndexedIndirectCountKHR;
-
- // ---- VK_KHR_pipeline_executable_properties extension commands
- PFN_vkGetPipelineExecutablePropertiesKHR GetPipelineExecutablePropertiesKHR;
- PFN_vkGetPipelineExecutableStatisticsKHR GetPipelineExecutableStatisticsKHR;
- PFN_vkGetPipelineExecutableInternalRepresentationsKHR GetPipelineExecutableInternalRepresentationsKHR;
-
- // ---- VK_EXT_debug_marker extension commands
- PFN_vkDebugMarkerSetObjectTagEXT DebugMarkerSetObjectTagEXT;
- PFN_vkDebugMarkerSetObjectNameEXT DebugMarkerSetObjectNameEXT;
- PFN_vkCmdDebugMarkerBeginEXT CmdDebugMarkerBeginEXT;
- PFN_vkCmdDebugMarkerEndEXT CmdDebugMarkerEndEXT;
- PFN_vkCmdDebugMarkerInsertEXT CmdDebugMarkerInsertEXT;
-
- // ---- VK_EXT_transform_feedback extension commands
- PFN_vkCmdBindTransformFeedbackBuffersEXT CmdBindTransformFeedbackBuffersEXT;
- PFN_vkCmdBeginTransformFeedbackEXT CmdBeginTransformFeedbackEXT;
- PFN_vkCmdEndTransformFeedbackEXT CmdEndTransformFeedbackEXT;
- PFN_vkCmdBeginQueryIndexedEXT CmdBeginQueryIndexedEXT;
- PFN_vkCmdEndQueryIndexedEXT CmdEndQueryIndexedEXT;
- PFN_vkCmdDrawIndirectByteCountEXT CmdDrawIndirectByteCountEXT;
-
- // ---- VK_NVX_image_view_handle extension commands
- PFN_vkGetImageViewHandleNVX GetImageViewHandleNVX;
-
- // ---- VK_AMD_draw_indirect_count extension commands
- PFN_vkCmdDrawIndirectCountAMD CmdDrawIndirectCountAMD;
- PFN_vkCmdDrawIndexedIndirectCountAMD CmdDrawIndexedIndirectCountAMD;
-
- // ---- VK_AMD_shader_info extension commands
- PFN_vkGetShaderInfoAMD GetShaderInfoAMD;
-
- // ---- VK_NV_external_memory_win32 extension commands
-#ifdef VK_USE_PLATFORM_WIN32_KHR
- PFN_vkGetMemoryWin32HandleNV GetMemoryWin32HandleNV;
-#endif // VK_USE_PLATFORM_WIN32_KHR
-
- // ---- VK_EXT_conditional_rendering extension commands
- PFN_vkCmdBeginConditionalRenderingEXT CmdBeginConditionalRenderingEXT;
- PFN_vkCmdEndConditionalRenderingEXT CmdEndConditionalRenderingEXT;
-
- // ---- VK_NVX_device_generated_commands extension commands
- PFN_vkCmdProcessCommandsNVX CmdProcessCommandsNVX;
- PFN_vkCmdReserveSpaceForCommandsNVX CmdReserveSpaceForCommandsNVX;
- PFN_vkCreateIndirectCommandsLayoutNVX CreateIndirectCommandsLayoutNVX;
- PFN_vkDestroyIndirectCommandsLayoutNVX DestroyIndirectCommandsLayoutNVX;
- PFN_vkCreateObjectTableNVX CreateObjectTableNVX;
- PFN_vkDestroyObjectTableNVX DestroyObjectTableNVX;
- PFN_vkRegisterObjectsNVX RegisterObjectsNVX;
- PFN_vkUnregisterObjectsNVX UnregisterObjectsNVX;
-
- // ---- VK_NV_clip_space_w_scaling extension commands
- PFN_vkCmdSetViewportWScalingNV CmdSetViewportWScalingNV;
-
- // ---- VK_EXT_display_control extension commands
- PFN_vkDisplayPowerControlEXT DisplayPowerControlEXT;
- PFN_vkRegisterDeviceEventEXT RegisterDeviceEventEXT;
- PFN_vkRegisterDisplayEventEXT RegisterDisplayEventEXT;
- PFN_vkGetSwapchainCounterEXT GetSwapchainCounterEXT;
-
- // ---- VK_GOOGLE_display_timing extension commands
- PFN_vkGetRefreshCycleDurationGOOGLE GetRefreshCycleDurationGOOGLE;
- PFN_vkGetPastPresentationTimingGOOGLE GetPastPresentationTimingGOOGLE;
-
- // ---- VK_EXT_discard_rectangles extension commands
- PFN_vkCmdSetDiscardRectangleEXT CmdSetDiscardRectangleEXT;
-
- // ---- VK_EXT_hdr_metadata extension commands
- PFN_vkSetHdrMetadataEXT SetHdrMetadataEXT;
-
- // ---- VK_EXT_debug_utils extension commands
- PFN_vkSetDebugUtilsObjectNameEXT SetDebugUtilsObjectNameEXT;
- PFN_vkSetDebugUtilsObjectTagEXT SetDebugUtilsObjectTagEXT;
- PFN_vkQueueBeginDebugUtilsLabelEXT QueueBeginDebugUtilsLabelEXT;
- PFN_vkQueueEndDebugUtilsLabelEXT QueueEndDebugUtilsLabelEXT;
- PFN_vkQueueInsertDebugUtilsLabelEXT QueueInsertDebugUtilsLabelEXT;
- PFN_vkCmdBeginDebugUtilsLabelEXT CmdBeginDebugUtilsLabelEXT;
- PFN_vkCmdEndDebugUtilsLabelEXT CmdEndDebugUtilsLabelEXT;
- PFN_vkCmdInsertDebugUtilsLabelEXT CmdInsertDebugUtilsLabelEXT;
-
- // ---- VK_ANDROID_external_memory_android_hardware_buffer extension commands
-#ifdef VK_USE_PLATFORM_ANDROID_KHR
- PFN_vkGetAndroidHardwareBufferPropertiesANDROID GetAndroidHardwareBufferPropertiesANDROID;
-#endif // VK_USE_PLATFORM_ANDROID_KHR
-#ifdef VK_USE_PLATFORM_ANDROID_KHR
- PFN_vkGetMemoryAndroidHardwareBufferANDROID GetMemoryAndroidHardwareBufferANDROID;
-#endif // VK_USE_PLATFORM_ANDROID_KHR
-
- // ---- VK_EXT_sample_locations extension commands
- PFN_vkCmdSetSampleLocationsEXT CmdSetSampleLocationsEXT;
-
- // ---- VK_EXT_image_drm_format_modifier extension commands
- PFN_vkGetImageDrmFormatModifierPropertiesEXT GetImageDrmFormatModifierPropertiesEXT;
-
- // ---- VK_EXT_validation_cache extension commands
- PFN_vkCreateValidationCacheEXT CreateValidationCacheEXT;
- PFN_vkDestroyValidationCacheEXT DestroyValidationCacheEXT;
- PFN_vkMergeValidationCachesEXT MergeValidationCachesEXT;
- PFN_vkGetValidationCacheDataEXT GetValidationCacheDataEXT;
-
- // ---- VK_NV_shading_rate_image extension commands
- PFN_vkCmdBindShadingRateImageNV CmdBindShadingRateImageNV;
- PFN_vkCmdSetViewportShadingRatePaletteNV CmdSetViewportShadingRatePaletteNV;
- PFN_vkCmdSetCoarseSampleOrderNV CmdSetCoarseSampleOrderNV;
-
- // ---- VK_NV_ray_tracing extension commands
- PFN_vkCreateAccelerationStructureNV CreateAccelerationStructureNV;
- PFN_vkDestroyAccelerationStructureNV DestroyAccelerationStructureNV;
- PFN_vkGetAccelerationStructureMemoryRequirementsNV GetAccelerationStructureMemoryRequirementsNV;
- PFN_vkBindAccelerationStructureMemoryNV BindAccelerationStructureMemoryNV;
- PFN_vkCmdBuildAccelerationStructureNV CmdBuildAccelerationStructureNV;
- PFN_vkCmdCopyAccelerationStructureNV CmdCopyAccelerationStructureNV;
- PFN_vkCmdTraceRaysNV CmdTraceRaysNV;
- PFN_vkCreateRayTracingPipelinesNV CreateRayTracingPipelinesNV;
- PFN_vkGetRayTracingShaderGroupHandlesNV GetRayTracingShaderGroupHandlesNV;
- PFN_vkGetAccelerationStructureHandleNV GetAccelerationStructureHandleNV;
- PFN_vkCmdWriteAccelerationStructuresPropertiesNV CmdWriteAccelerationStructuresPropertiesNV;
- PFN_vkCompileDeferredNV CompileDeferredNV;
-
- // ---- VK_EXT_external_memory_host extension commands
- PFN_vkGetMemoryHostPointerPropertiesEXT GetMemoryHostPointerPropertiesEXT;
-
- // ---- VK_AMD_buffer_marker extension commands
- PFN_vkCmdWriteBufferMarkerAMD CmdWriteBufferMarkerAMD;
-
- // ---- VK_EXT_calibrated_timestamps extension commands
- PFN_vkGetCalibratedTimestampsEXT GetCalibratedTimestampsEXT;
-
- // ---- VK_NV_mesh_shader extension commands
- PFN_vkCmdDrawMeshTasksNV CmdDrawMeshTasksNV;
- PFN_vkCmdDrawMeshTasksIndirectNV CmdDrawMeshTasksIndirectNV;
- PFN_vkCmdDrawMeshTasksIndirectCountNV CmdDrawMeshTasksIndirectCountNV;
-
- // ---- VK_NV_scissor_exclusive extension commands
- PFN_vkCmdSetExclusiveScissorNV CmdSetExclusiveScissorNV;
-
- // ---- VK_NV_device_diagnostic_checkpoints extension commands
- PFN_vkCmdSetCheckpointNV CmdSetCheckpointNV;
- PFN_vkGetQueueCheckpointDataNV GetQueueCheckpointDataNV;
-
- // ---- VK_INTEL_performance_query extension commands
- PFN_vkInitializePerformanceApiINTEL InitializePerformanceApiINTEL;
- PFN_vkUninitializePerformanceApiINTEL UninitializePerformanceApiINTEL;
- PFN_vkCmdSetPerformanceMarkerINTEL CmdSetPerformanceMarkerINTEL;
- PFN_vkCmdSetPerformanceStreamMarkerINTEL CmdSetPerformanceStreamMarkerINTEL;
- PFN_vkCmdSetPerformanceOverrideINTEL CmdSetPerformanceOverrideINTEL;
- PFN_vkAcquirePerformanceConfigurationINTEL AcquirePerformanceConfigurationINTEL;
- PFN_vkReleasePerformanceConfigurationINTEL ReleasePerformanceConfigurationINTEL;
- PFN_vkQueueSetPerformanceConfigurationINTEL QueueSetPerformanceConfigurationINTEL;
- PFN_vkGetPerformanceParameterINTEL GetPerformanceParameterINTEL;
-
- // ---- VK_AMD_display_native_hdr extension commands
- PFN_vkSetLocalDimmingAMD SetLocalDimmingAMD;
-
- // ---- VK_EXT_buffer_device_address extension commands
- PFN_vkGetBufferDeviceAddressEXT GetBufferDeviceAddressEXT;
-
- // ---- VK_EXT_full_screen_exclusive extension commands
-#ifdef VK_USE_PLATFORM_WIN32_KHR
- PFN_vkAcquireFullScreenExclusiveModeEXT AcquireFullScreenExclusiveModeEXT;
-#endif // VK_USE_PLATFORM_WIN32_KHR
-#ifdef VK_USE_PLATFORM_WIN32_KHR
- PFN_vkReleaseFullScreenExclusiveModeEXT ReleaseFullScreenExclusiveModeEXT;
-#endif // VK_USE_PLATFORM_WIN32_KHR
-#ifdef VK_USE_PLATFORM_WIN32_KHR
- PFN_vkGetDeviceGroupSurfacePresentModes2EXT GetDeviceGroupSurfacePresentModes2EXT;
-#endif // VK_USE_PLATFORM_WIN32_KHR
-
- // ---- VK_EXT_line_rasterization extension commands
- PFN_vkCmdSetLineStippleEXT CmdSetLineStippleEXT;
-
- // ---- VK_EXT_host_query_reset extension commands
- PFN_vkResetQueryPoolEXT ResetQueryPoolEXT;
-} VkLayerDispatchTable;
-
-
diff --git a/layers/generated/vk_object_types.h b/layers/generated/vk_object_types.h
deleted file mode 100644
index 6ff6ed99f..000000000
--- a/layers/generated/vk_object_types.h
+++ /dev/null
@@ -1,767 +0,0 @@
-// *** THIS FILE IS GENERATED - DO NOT EDIT ***
-// See helper_file_generator.py for modifications
-
-
-/***************************************************************************
- *
- * Copyright (c) 2015-2019 The Khronos Group Inc.
- * Copyright (c) 2015-2019 Valve Corporation
- * Copyright (c) 2015-2019 LunarG, Inc.
- * Copyright (c) 2015-2019 Google Inc.
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- *
- * Author: Mark Lobodzinski <mark@lunarg.com>
- * Author: Courtney Goeltzenleuchter <courtneygo@google.com>
- * Author: Tobin Ehlis <tobine@google.com>
- * Author: Chris Forbes <chrisforbes@google.com>
- * Author: John Zulauf<jzulauf@lunarg.com>
- *
- ****************************************************************************/
-
-
-#pragma once
-
-#include "cast_utils.h"
-
-// Object Type enum for validation layer internal object handling
-typedef enum VulkanObjectType {
- kVulkanObjectTypeUnknown = 0,
- kVulkanObjectTypeInstance = 1,
- kVulkanObjectTypePhysicalDevice = 2,
- kVulkanObjectTypeDevice = 3,
- kVulkanObjectTypeQueue = 4,
- kVulkanObjectTypeSemaphore = 5,
- kVulkanObjectTypeCommandBuffer = 6,
- kVulkanObjectTypeFence = 7,
- kVulkanObjectTypeDeviceMemory = 8,
- kVulkanObjectTypeBuffer = 9,
- kVulkanObjectTypeImage = 10,
- kVulkanObjectTypeEvent = 11,
- kVulkanObjectTypeQueryPool = 12,
- kVulkanObjectTypeBufferView = 13,
- kVulkanObjectTypeImageView = 14,
- kVulkanObjectTypeShaderModule = 15,
- kVulkanObjectTypePipelineCache = 16,
- kVulkanObjectTypePipelineLayout = 17,
- kVulkanObjectTypeRenderPass = 18,
- kVulkanObjectTypePipeline = 19,
- kVulkanObjectTypeDescriptorSetLayout = 20,
- kVulkanObjectTypeSampler = 21,
- kVulkanObjectTypeDescriptorPool = 22,
- kVulkanObjectTypeDescriptorSet = 23,
- kVulkanObjectTypeFramebuffer = 24,
- kVulkanObjectTypeCommandPool = 25,
- kVulkanObjectTypeSamplerYcbcrConversion = 26,
- kVulkanObjectTypeDescriptorUpdateTemplate = 27,
- kVulkanObjectTypeSurfaceKHR = 28,
- kVulkanObjectTypeSwapchainKHR = 29,
- kVulkanObjectTypeDisplayKHR = 30,
- kVulkanObjectTypeDisplayModeKHR = 31,
- kVulkanObjectTypeDebugReportCallbackEXT = 32,
- kVulkanObjectTypeObjectTableNVX = 33,
- kVulkanObjectTypeIndirectCommandsLayoutNVX = 34,
- kVulkanObjectTypeDebugUtilsMessengerEXT = 35,
- kVulkanObjectTypeValidationCacheEXT = 36,
- kVulkanObjectTypeAccelerationStructureNV = 37,
- kVulkanObjectTypePerformanceConfigurationINTEL = 38,
- kVulkanObjectTypeMax = 39,
- // Aliases for backwards compatibilty of "promoted" types
- kVulkanObjectTypeDescriptorUpdateTemplateKHR = kVulkanObjectTypeDescriptorUpdateTemplate,
- kVulkanObjectTypeSamplerYcbcrConversionKHR = kVulkanObjectTypeSamplerYcbcrConversion,
-} VulkanObjectType;
-
-// Array of object name strings for OBJECT_TYPE enum conversion
-static const char * const object_string[kVulkanObjectTypeMax] = {
- "VkNonDispatchableHandle",
- "VkInstance",
- "VkPhysicalDevice",
- "VkDevice",
- "VkQueue",
- "VkSemaphore",
- "VkCommandBuffer",
- "VkFence",
- "VkDeviceMemory",
- "VkBuffer",
- "VkImage",
- "VkEvent",
- "VkQueryPool",
- "VkBufferView",
- "VkImageView",
- "VkShaderModule",
- "VkPipelineCache",
- "VkPipelineLayout",
- "VkRenderPass",
- "VkPipeline",
- "VkDescriptorSetLayout",
- "VkSampler",
- "VkDescriptorPool",
- "VkDescriptorSet",
- "VkFramebuffer",
- "VkCommandPool",
- "VkSamplerYcbcrConversion",
- "VkDescriptorUpdateTemplate",
- "VkSurfaceKHR",
- "VkSwapchainKHR",
- "VkDisplayKHR",
- "VkDisplayModeKHR",
- "VkDebugReportCallbackEXT",
- "VkObjectTableNVX",
- "VkIndirectCommandsLayoutNVX",
- "VkDebugUtilsMessengerEXT",
- "VkValidationCacheEXT",
- "VkAccelerationStructureNV",
- "VkPerformanceConfigurationINTEL",
-};
-
-// Helper array to get Vulkan VK_EXT_debug_report object type enum from the internal layers version
-const VkDebugReportObjectTypeEXT get_debug_report_enum[] = {
- VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, // kVulkanObjectTypeUnknown
- VK_DEBUG_REPORT_OBJECT_TYPE_INSTANCE_EXT, // kVulkanObjectTypeInstance
- VK_DEBUG_REPORT_OBJECT_TYPE_PHYSICAL_DEVICE_EXT, // kVulkanObjectTypePhysicalDevice
- VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, // kVulkanObjectTypeDevice
- VK_DEBUG_REPORT_OBJECT_TYPE_QUEUE_EXT, // kVulkanObjectTypeQueue
- VK_DEBUG_REPORT_OBJECT_TYPE_SEMAPHORE_EXT, // kVulkanObjectTypeSemaphore
- VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, // kVulkanObjectTypeCommandBuffer
- VK_DEBUG_REPORT_OBJECT_TYPE_FENCE_EXT, // kVulkanObjectTypeFence
- VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_MEMORY_EXT, // kVulkanObjectTypeDeviceMemory
- VK_DEBUG_REPORT_OBJECT_TYPE_BUFFER_EXT, // kVulkanObjectTypeBuffer
- VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT, // kVulkanObjectTypeImage
- VK_DEBUG_REPORT_OBJECT_TYPE_EVENT_EXT, // kVulkanObjectTypeEvent
- VK_DEBUG_REPORT_OBJECT_TYPE_QUERY_POOL_EXT, // kVulkanObjectTypeQueryPool
- VK_DEBUG_REPORT_OBJECT_TYPE_BUFFER_VIEW_EXT, // kVulkanObjectTypeBufferView
- VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_VIEW_EXT, // kVulkanObjectTypeImageView
- VK_DEBUG_REPORT_OBJECT_TYPE_SHADER_MODULE_EXT, // kVulkanObjectTypeShaderModule
- VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_CACHE_EXT, // kVulkanObjectTypePipelineCache
- VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_LAYOUT_EXT, // kVulkanObjectTypePipelineLayout
- VK_DEBUG_REPORT_OBJECT_TYPE_RENDER_PASS_EXT, // kVulkanObjectTypeRenderPass
- VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_EXT, // kVulkanObjectTypePipeline
- VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_SET_LAYOUT_EXT, // kVulkanObjectTypeDescriptorSetLayout
- VK_DEBUG_REPORT_OBJECT_TYPE_SAMPLER_EXT, // kVulkanObjectTypeSampler
- VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_POOL_EXT, // kVulkanObjectTypeDescriptorPool
- VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_SET_EXT, // kVulkanObjectTypeDescriptorSet
- VK_DEBUG_REPORT_OBJECT_TYPE_FRAMEBUFFER_EXT, // kVulkanObjectTypeFramebuffer
- VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_POOL_EXT, // kVulkanObjectTypeCommandPool
- VK_DEBUG_REPORT_OBJECT_TYPE_SAMPLER_YCBCR_CONVERSION_EXT, // kVulkanObjectTypeSamplerYcbcrConversion
- VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_UPDATE_TEMPLATE_EXT, // kVulkanObjectTypeDescriptorUpdateTemplate
- VK_DEBUG_REPORT_OBJECT_TYPE_SURFACE_KHR_EXT, // kVulkanObjectTypeSurfaceKHR
- VK_DEBUG_REPORT_OBJECT_TYPE_SWAPCHAIN_KHR_EXT, // kVulkanObjectTypeSwapchainKHR
- VK_DEBUG_REPORT_OBJECT_TYPE_DISPLAY_KHR_EXT, // kVulkanObjectTypeDisplayKHR
- VK_DEBUG_REPORT_OBJECT_TYPE_DISPLAY_MODE_KHR_EXT, // kVulkanObjectTypeDisplayModeKHR
- VK_DEBUG_REPORT_OBJECT_TYPE_DEBUG_REPORT_CALLBACK_EXT_EXT, // kVulkanObjectTypeDebugReportCallbackEXT
- VK_DEBUG_REPORT_OBJECT_TYPE_OBJECT_TABLE_NVX_EXT, // kVulkanObjectTypeObjectTableNVX
- VK_DEBUG_REPORT_OBJECT_TYPE_INDIRECT_COMMANDS_LAYOUT_NVX_EXT, // kVulkanObjectTypeIndirectCommandsLayoutNVX
- VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, // kVulkanObjectTypeDebugUtilsMessengerEXT
- VK_DEBUG_REPORT_OBJECT_TYPE_VALIDATION_CACHE_EXT_EXT, // kVulkanObjectTypeValidationCacheEXT
- VK_DEBUG_REPORT_OBJECT_TYPE_ACCELERATION_STRUCTURE_NV_EXT, // kVulkanObjectTypeAccelerationStructureNV
- VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, // kVulkanObjectTypePerformanceConfigurationINTEL
-};
-
-// Helper array to get Official Vulkan VkObjectType enum from the internal layers version
-const VkObjectType get_object_type_enum[] = {
- VK_OBJECT_TYPE_UNKNOWN, // kVulkanObjectTypeUnknown
- VK_OBJECT_TYPE_INSTANCE, // kVulkanObjectTypeInstance
- VK_OBJECT_TYPE_PHYSICAL_DEVICE, // kVulkanObjectTypePhysicalDevice
- VK_OBJECT_TYPE_DEVICE, // kVulkanObjectTypeDevice
- VK_OBJECT_TYPE_QUEUE, // kVulkanObjectTypeQueue
- VK_OBJECT_TYPE_SEMAPHORE, // kVulkanObjectTypeSemaphore
- VK_OBJECT_TYPE_COMMAND_BUFFER, // kVulkanObjectTypeCommandBuffer
- VK_OBJECT_TYPE_FENCE, // kVulkanObjectTypeFence
- VK_OBJECT_TYPE_DEVICE_MEMORY, // kVulkanObjectTypeDeviceMemory
- VK_OBJECT_TYPE_BUFFER, // kVulkanObjectTypeBuffer
- VK_OBJECT_TYPE_IMAGE, // kVulkanObjectTypeImage
- VK_OBJECT_TYPE_EVENT, // kVulkanObjectTypeEvent
- VK_OBJECT_TYPE_QUERY_POOL, // kVulkanObjectTypeQueryPool
- VK_OBJECT_TYPE_BUFFER_VIEW, // kVulkanObjectTypeBufferView
- VK_OBJECT_TYPE_IMAGE_VIEW, // kVulkanObjectTypeImageView
- VK_OBJECT_TYPE_SHADER_MODULE, // kVulkanObjectTypeShaderModule
- VK_OBJECT_TYPE_PIPELINE_CACHE, // kVulkanObjectTypePipelineCache
- VK_OBJECT_TYPE_PIPELINE_LAYOUT, // kVulkanObjectTypePipelineLayout
- VK_OBJECT_TYPE_RENDER_PASS, // kVulkanObjectTypeRenderPass
- VK_OBJECT_TYPE_PIPELINE, // kVulkanObjectTypePipeline
- VK_OBJECT_TYPE_DESCRIPTOR_SET_LAYOUT, // kVulkanObjectTypeDescriptorSetLayout
- VK_OBJECT_TYPE_SAMPLER, // kVulkanObjectTypeSampler
- VK_OBJECT_TYPE_DESCRIPTOR_POOL, // kVulkanObjectTypeDescriptorPool
- VK_OBJECT_TYPE_DESCRIPTOR_SET, // kVulkanObjectTypeDescriptorSet
- VK_OBJECT_TYPE_FRAMEBUFFER, // kVulkanObjectTypeFramebuffer
- VK_OBJECT_TYPE_COMMAND_POOL, // kVulkanObjectTypeCommandPool
- VK_OBJECT_TYPE_SAMPLER_YCBCR_CONVERSION, // kVulkanObjectTypeSamplerYcbcrConversion
- VK_OBJECT_TYPE_DESCRIPTOR_UPDATE_TEMPLATE, // kVulkanObjectTypeDescriptorUpdateTemplate
- VK_OBJECT_TYPE_SURFACE_KHR, // kVulkanObjectTypeSurfaceKHR
- VK_OBJECT_TYPE_SWAPCHAIN_KHR, // kVulkanObjectTypeSwapchainKHR
- VK_OBJECT_TYPE_DISPLAY_KHR, // kVulkanObjectTypeDisplayKHR
- VK_OBJECT_TYPE_DISPLAY_MODE_KHR, // kVulkanObjectTypeDisplayModeKHR
- VK_OBJECT_TYPE_DEBUG_REPORT_CALLBACK_EXT, // kVulkanObjectTypeDebugReportCallbackEXT
- VK_OBJECT_TYPE_OBJECT_TABLE_NVX, // kVulkanObjectTypeObjectTableNVX
- VK_OBJECT_TYPE_INDIRECT_COMMANDS_LAYOUT_NVX, // kVulkanObjectTypeIndirectCommandsLayoutNVX
- VK_OBJECT_TYPE_DEBUG_UTILS_MESSENGER_EXT, // kVulkanObjectTypeDebugUtilsMessengerEXT
- VK_OBJECT_TYPE_VALIDATION_CACHE_EXT, // kVulkanObjectTypeValidationCacheEXT
- VK_OBJECT_TYPE_ACCELERATION_STRUCTURE_NV, // kVulkanObjectTypeAccelerationStructureNV
- VK_OBJECT_TYPE_PERFORMANCE_CONFIGURATION_INTEL, // kVulkanObjectTypePerformanceConfigurationINTEL
-};
-
-static inline VkObjectType convertDebugReportObjectToCoreObject(VkDebugReportObjectTypeEXT debug_report_obj) {
- switch (debug_report_obj) {
- case VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT: return VK_OBJECT_TYPE_UNKNOWN;
- case VK_DEBUG_REPORT_OBJECT_TYPE_INSTANCE_EXT: return VK_OBJECT_TYPE_INSTANCE;
- case VK_DEBUG_REPORT_OBJECT_TYPE_PHYSICAL_DEVICE_EXT: return VK_OBJECT_TYPE_PHYSICAL_DEVICE;
- case VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT: return VK_OBJECT_TYPE_DEVICE;
- case VK_DEBUG_REPORT_OBJECT_TYPE_QUEUE_EXT: return VK_OBJECT_TYPE_QUEUE;
- case VK_DEBUG_REPORT_OBJECT_TYPE_SEMAPHORE_EXT: return VK_OBJECT_TYPE_SEMAPHORE;
- case VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT: return VK_OBJECT_TYPE_COMMAND_BUFFER;
- case VK_DEBUG_REPORT_OBJECT_TYPE_FENCE_EXT: return VK_OBJECT_TYPE_FENCE;
- case VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_MEMORY_EXT: return VK_OBJECT_TYPE_DEVICE_MEMORY;
- case VK_DEBUG_REPORT_OBJECT_TYPE_BUFFER_EXT: return VK_OBJECT_TYPE_BUFFER;
- case VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT: return VK_OBJECT_TYPE_IMAGE;
- case VK_DEBUG_REPORT_OBJECT_TYPE_EVENT_EXT: return VK_OBJECT_TYPE_EVENT;
- case VK_DEBUG_REPORT_OBJECT_TYPE_QUERY_POOL_EXT: return VK_OBJECT_TYPE_QUERY_POOL;
- case VK_DEBUG_REPORT_OBJECT_TYPE_BUFFER_VIEW_EXT: return VK_OBJECT_TYPE_BUFFER_VIEW;
- case VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_VIEW_EXT: return VK_OBJECT_TYPE_IMAGE_VIEW;
- case VK_DEBUG_REPORT_OBJECT_TYPE_SHADER_MODULE_EXT: return VK_OBJECT_TYPE_SHADER_MODULE;
- case VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_CACHE_EXT: return VK_OBJECT_TYPE_PIPELINE_CACHE;
- case VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_LAYOUT_EXT: return VK_OBJECT_TYPE_PIPELINE_LAYOUT;
- case VK_DEBUG_REPORT_OBJECT_TYPE_RENDER_PASS_EXT: return VK_OBJECT_TYPE_RENDER_PASS;
- case VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_EXT: return VK_OBJECT_TYPE_PIPELINE;
- case VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_SET_LAYOUT_EXT: return VK_OBJECT_TYPE_DESCRIPTOR_SET_LAYOUT;
- case VK_DEBUG_REPORT_OBJECT_TYPE_SAMPLER_EXT: return VK_OBJECT_TYPE_SAMPLER;
- case VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_POOL_EXT: return VK_OBJECT_TYPE_DESCRIPTOR_POOL;
- case VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_SET_EXT: return VK_OBJECT_TYPE_DESCRIPTOR_SET;
- case VK_DEBUG_REPORT_OBJECT_TYPE_FRAMEBUFFER_EXT: return VK_OBJECT_TYPE_FRAMEBUFFER;
- case VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_POOL_EXT: return VK_OBJECT_TYPE_COMMAND_POOL;
- case VK_DEBUG_REPORT_OBJECT_TYPE_SURFACE_KHR_EXT: return VK_OBJECT_TYPE_SURFACE_KHR;
- case VK_DEBUG_REPORT_OBJECT_TYPE_SWAPCHAIN_KHR_EXT: return VK_OBJECT_TYPE_SWAPCHAIN_KHR;
- case VK_DEBUG_REPORT_OBJECT_TYPE_DEBUG_REPORT_CALLBACK_EXT_EXT: return VK_OBJECT_TYPE_DEBUG_REPORT_CALLBACK_EXT;
- case VK_DEBUG_REPORT_OBJECT_TYPE_DISPLAY_KHR_EXT: return VK_OBJECT_TYPE_DISPLAY_KHR;
- case VK_DEBUG_REPORT_OBJECT_TYPE_DISPLAY_MODE_KHR_EXT: return VK_OBJECT_TYPE_DISPLAY_MODE_KHR;
- case VK_DEBUG_REPORT_OBJECT_TYPE_OBJECT_TABLE_NVX_EXT: return VK_OBJECT_TYPE_OBJECT_TABLE_NVX;
- case VK_DEBUG_REPORT_OBJECT_TYPE_INDIRECT_COMMANDS_LAYOUT_NVX_EXT: return VK_OBJECT_TYPE_INDIRECT_COMMANDS_LAYOUT_NVX;
- case VK_DEBUG_REPORT_OBJECT_TYPE_VALIDATION_CACHE_EXT_EXT: return VK_OBJECT_TYPE_VALIDATION_CACHE_EXT;
- case VK_DEBUG_REPORT_OBJECT_TYPE_SAMPLER_YCBCR_CONVERSION_EXT: return VK_OBJECT_TYPE_SAMPLER_YCBCR_CONVERSION;
- case VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_UPDATE_TEMPLATE_EXT: return VK_OBJECT_TYPE_DESCRIPTOR_UPDATE_TEMPLATE;
- case VK_DEBUG_REPORT_OBJECT_TYPE_ACCELERATION_STRUCTURE_NV_EXT: return VK_OBJECT_TYPE_ACCELERATION_STRUCTURE_NV;
- default: return VK_OBJECT_TYPE_UNKNOWN;
- }
-}
-
-static inline VkDebugReportObjectTypeEXT convertCoreObjectToDebugReportObject(VkObjectType core_report_obj) {
- switch (core_report_obj) {
- case VK_OBJECT_TYPE_UNKNOWN: return VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT;
- case VK_OBJECT_TYPE_INSTANCE: return VK_DEBUG_REPORT_OBJECT_TYPE_INSTANCE_EXT;
- case VK_OBJECT_TYPE_PHYSICAL_DEVICE: return VK_DEBUG_REPORT_OBJECT_TYPE_PHYSICAL_DEVICE_EXT;
- case VK_OBJECT_TYPE_DEVICE: return VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT;
- case VK_OBJECT_TYPE_QUEUE: return VK_DEBUG_REPORT_OBJECT_TYPE_QUEUE_EXT;
- case VK_OBJECT_TYPE_SEMAPHORE: return VK_DEBUG_REPORT_OBJECT_TYPE_SEMAPHORE_EXT;
- case VK_OBJECT_TYPE_COMMAND_BUFFER: return VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT;
- case VK_OBJECT_TYPE_FENCE: return VK_DEBUG_REPORT_OBJECT_TYPE_FENCE_EXT;
- case VK_OBJECT_TYPE_DEVICE_MEMORY: return VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_MEMORY_EXT;
- case VK_OBJECT_TYPE_BUFFER: return VK_DEBUG_REPORT_OBJECT_TYPE_BUFFER_EXT;
- case VK_OBJECT_TYPE_IMAGE: return VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT;
- case VK_OBJECT_TYPE_EVENT: return VK_DEBUG_REPORT_OBJECT_TYPE_EVENT_EXT;
- case VK_OBJECT_TYPE_QUERY_POOL: return VK_DEBUG_REPORT_OBJECT_TYPE_QUERY_POOL_EXT;
- case VK_OBJECT_TYPE_BUFFER_VIEW: return VK_DEBUG_REPORT_OBJECT_TYPE_BUFFER_VIEW_EXT;
- case VK_OBJECT_TYPE_IMAGE_VIEW: return VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_VIEW_EXT;
- case VK_OBJECT_TYPE_SHADER_MODULE: return VK_DEBUG_REPORT_OBJECT_TYPE_SHADER_MODULE_EXT;
- case VK_OBJECT_TYPE_PIPELINE_CACHE: return VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_CACHE_EXT;
- case VK_OBJECT_TYPE_PIPELINE_LAYOUT: return VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_LAYOUT_EXT;
- case VK_OBJECT_TYPE_RENDER_PASS: return VK_DEBUG_REPORT_OBJECT_TYPE_RENDER_PASS_EXT;
- case VK_OBJECT_TYPE_PIPELINE: return VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_EXT;
- case VK_OBJECT_TYPE_DESCRIPTOR_SET_LAYOUT: return VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_SET_LAYOUT_EXT;
- case VK_OBJECT_TYPE_SAMPLER: return VK_DEBUG_REPORT_OBJECT_TYPE_SAMPLER_EXT;
- case VK_OBJECT_TYPE_DESCRIPTOR_POOL: return VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_POOL_EXT;
- case VK_OBJECT_TYPE_DESCRIPTOR_SET: return VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_SET_EXT;
- case VK_OBJECT_TYPE_FRAMEBUFFER: return VK_DEBUG_REPORT_OBJECT_TYPE_FRAMEBUFFER_EXT;
- case VK_OBJECT_TYPE_COMMAND_POOL: return VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_POOL_EXT;
- case VK_OBJECT_TYPE_SAMPLER_YCBCR_CONVERSION: return VK_DEBUG_REPORT_OBJECT_TYPE_SAMPLER_YCBCR_CONVERSION_EXT;
- case VK_OBJECT_TYPE_DESCRIPTOR_UPDATE_TEMPLATE: return VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_UPDATE_TEMPLATE_EXT;
- case VK_OBJECT_TYPE_SURFACE_KHR: return VK_DEBUG_REPORT_OBJECT_TYPE_SURFACE_KHR_EXT;
- case VK_OBJECT_TYPE_SWAPCHAIN_KHR: return VK_DEBUG_REPORT_OBJECT_TYPE_SWAPCHAIN_KHR_EXT;
- case VK_OBJECT_TYPE_DISPLAY_KHR: return VK_DEBUG_REPORT_OBJECT_TYPE_DISPLAY_KHR_EXT;
- case VK_OBJECT_TYPE_DISPLAY_MODE_KHR: return VK_DEBUG_REPORT_OBJECT_TYPE_DISPLAY_MODE_KHR_EXT;
- case VK_OBJECT_TYPE_DEBUG_REPORT_CALLBACK_EXT: return VK_DEBUG_REPORT_OBJECT_TYPE_DEBUG_REPORT_CALLBACK_EXT_EXT;
- case VK_OBJECT_TYPE_OBJECT_TABLE_NVX: return VK_DEBUG_REPORT_OBJECT_TYPE_OBJECT_TABLE_NVX_EXT;
- case VK_OBJECT_TYPE_INDIRECT_COMMANDS_LAYOUT_NVX: return VK_DEBUG_REPORT_OBJECT_TYPE_INDIRECT_COMMANDS_LAYOUT_NVX_EXT;
- case VK_OBJECT_TYPE_VALIDATION_CACHE_EXT: return VK_DEBUG_REPORT_OBJECT_TYPE_VALIDATION_CACHE_EXT_EXT;
- case VK_OBJECT_TYPE_ACCELERATION_STRUCTURE_NV: return VK_DEBUG_REPORT_OBJECT_TYPE_ACCELERATION_STRUCTURE_NV_EXT;
- default: return VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT;
- }
-}
-
-// Traits objects from each type statically map from Vk<handleType> to the various enums
-template <typename VkType> struct VkHandleInfo {};
-template <VulkanObjectType id> struct VulkanObjectTypeInfo {};
-
-// The following line must match the vulkan_core.h condition guarding VK_DEFINE_NON_DISPATCHABLE_HANDLE
-#if defined(__LP64__) || defined(_WIN64) || (defined(__x86_64__) && !defined(__ILP32__)) || defined(_M_X64) || defined(__ia64) || defined(_M_IA64) || defined(__aarch64__) || defined(__powerpc64__)
-#define TYPESAFE_NONDISPATCHABLE_HANDLES
-#else
-VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkNonDispatchableHandle)
-
-template <> struct VkHandleInfo<VkNonDispatchableHandle> {
- static const VulkanObjectType kVulkanObjectType = kVulkanObjectTypeUnknown;
- static const VkDebugReportObjectTypeEXT kDebugReportObjectType = VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT;
- static const VkObjectType kVkObjectType = VK_OBJECT_TYPE_UNKNOWN;
- static const char* Typename() {
- return "VkNonDispatchableHandle";
- }
-};
-template <> struct VulkanObjectTypeInfo<kVulkanObjectTypeUnknown> {
- typedef VkNonDispatchableHandle Type;
-};
-
-#endif // VK_DEFINE_HANDLE logic duplication
-template <> struct VkHandleInfo<VkCommandBuffer> {
- static const VulkanObjectType kVulkanObjectType = kVulkanObjectTypeCommandBuffer;
- static const VkDebugReportObjectTypeEXT kDebugReportObjectType = VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT;
- static const VkObjectType kVkObjectType = VK_OBJECT_TYPE_COMMAND_BUFFER;
- static const char* Typename() {
- return "VkCommandBuffer";
- }
-};
-template <> struct VulkanObjectTypeInfo<kVulkanObjectTypeCommandBuffer> {
- typedef VkCommandBuffer Type;
-};
-template <> struct VkHandleInfo<VkDevice> {
- static const VulkanObjectType kVulkanObjectType = kVulkanObjectTypeDevice;
- static const VkDebugReportObjectTypeEXT kDebugReportObjectType = VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT;
- static const VkObjectType kVkObjectType = VK_OBJECT_TYPE_DEVICE;
- static const char* Typename() {
- return "VkDevice";
- }
-};
-template <> struct VulkanObjectTypeInfo<kVulkanObjectTypeDevice> {
- typedef VkDevice Type;
-};
-template <> struct VkHandleInfo<VkInstance> {
- static const VulkanObjectType kVulkanObjectType = kVulkanObjectTypeInstance;
- static const VkDebugReportObjectTypeEXT kDebugReportObjectType = VK_DEBUG_REPORT_OBJECT_TYPE_INSTANCE_EXT;
- static const VkObjectType kVkObjectType = VK_OBJECT_TYPE_INSTANCE;
- static const char* Typename() {
- return "VkInstance";
- }
-};
-template <> struct VulkanObjectTypeInfo<kVulkanObjectTypeInstance> {
- typedef VkInstance Type;
-};
-template <> struct VkHandleInfo<VkPhysicalDevice> {
- static const VulkanObjectType kVulkanObjectType = kVulkanObjectTypePhysicalDevice;
- static const VkDebugReportObjectTypeEXT kDebugReportObjectType = VK_DEBUG_REPORT_OBJECT_TYPE_PHYSICAL_DEVICE_EXT;
- static const VkObjectType kVkObjectType = VK_OBJECT_TYPE_PHYSICAL_DEVICE;
- static const char* Typename() {
- return "VkPhysicalDevice";
- }
-};
-template <> struct VulkanObjectTypeInfo<kVulkanObjectTypePhysicalDevice> {
- typedef VkPhysicalDevice Type;
-};
-template <> struct VkHandleInfo<VkQueue> {
- static const VulkanObjectType kVulkanObjectType = kVulkanObjectTypeQueue;
- static const VkDebugReportObjectTypeEXT kDebugReportObjectType = VK_DEBUG_REPORT_OBJECT_TYPE_QUEUE_EXT;
- static const VkObjectType kVkObjectType = VK_OBJECT_TYPE_QUEUE;
- static const char* Typename() {
- return "VkQueue";
- }
-};
-template <> struct VulkanObjectTypeInfo<kVulkanObjectTypeQueue> {
- typedef VkQueue Type;
-};
-#ifdef TYPESAFE_NONDISPATCHABLE_HANDLES
-template <> struct VkHandleInfo<VkAccelerationStructureNV> {
- static const VulkanObjectType kVulkanObjectType = kVulkanObjectTypeAccelerationStructureNV;
- static const VkDebugReportObjectTypeEXT kDebugReportObjectType = VK_DEBUG_REPORT_OBJECT_TYPE_ACCELERATION_STRUCTURE_NV_EXT;
- static const VkObjectType kVkObjectType = VK_OBJECT_TYPE_ACCELERATION_STRUCTURE_NV;
- static const char* Typename() {
- return "VkAccelerationStructureNV";
- }
-};
-template <> struct VulkanObjectTypeInfo<kVulkanObjectTypeAccelerationStructureNV> {
- typedef VkAccelerationStructureNV Type;
-};
-template <> struct VkHandleInfo<VkBuffer> {
- static const VulkanObjectType kVulkanObjectType = kVulkanObjectTypeBuffer;
- static const VkDebugReportObjectTypeEXT kDebugReportObjectType = VK_DEBUG_REPORT_OBJECT_TYPE_BUFFER_EXT;
- static const VkObjectType kVkObjectType = VK_OBJECT_TYPE_BUFFER;
- static const char* Typename() {
- return "VkBuffer";
- }
-};
-template <> struct VulkanObjectTypeInfo<kVulkanObjectTypeBuffer> {
- typedef VkBuffer Type;
-};
-template <> struct VkHandleInfo<VkBufferView> {
- static const VulkanObjectType kVulkanObjectType = kVulkanObjectTypeBufferView;
- static const VkDebugReportObjectTypeEXT kDebugReportObjectType = VK_DEBUG_REPORT_OBJECT_TYPE_BUFFER_VIEW_EXT;
- static const VkObjectType kVkObjectType = VK_OBJECT_TYPE_BUFFER_VIEW;
- static const char* Typename() {
- return "VkBufferView";
- }
-};
-template <> struct VulkanObjectTypeInfo<kVulkanObjectTypeBufferView> {
- typedef VkBufferView Type;
-};
-template <> struct VkHandleInfo<VkCommandPool> {
- static const VulkanObjectType kVulkanObjectType = kVulkanObjectTypeCommandPool;
- static const VkDebugReportObjectTypeEXT kDebugReportObjectType = VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_POOL_EXT;
- static const VkObjectType kVkObjectType = VK_OBJECT_TYPE_COMMAND_POOL;
- static const char* Typename() {
- return "VkCommandPool";
- }
-};
-template <> struct VulkanObjectTypeInfo<kVulkanObjectTypeCommandPool> {
- typedef VkCommandPool Type;
-};
-template <> struct VkHandleInfo<VkDebugReportCallbackEXT> {
- static const VulkanObjectType kVulkanObjectType = kVulkanObjectTypeDebugReportCallbackEXT;
- static const VkDebugReportObjectTypeEXT kDebugReportObjectType = VK_DEBUG_REPORT_OBJECT_TYPE_DEBUG_REPORT_CALLBACK_EXT_EXT;
- static const VkObjectType kVkObjectType = VK_OBJECT_TYPE_DEBUG_REPORT_CALLBACK_EXT;
- static const char* Typename() {
- return "VkDebugReportCallbackEXT";
- }
-};
-template <> struct VulkanObjectTypeInfo<kVulkanObjectTypeDebugReportCallbackEXT> {
- typedef VkDebugReportCallbackEXT Type;
-};
-template <> struct VkHandleInfo<VkDebugUtilsMessengerEXT> {
- static const VulkanObjectType kVulkanObjectType = kVulkanObjectTypeDebugUtilsMessengerEXT;
- static const VkDebugReportObjectTypeEXT kDebugReportObjectType = VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT;
- static const VkObjectType kVkObjectType = VK_OBJECT_TYPE_DEBUG_UTILS_MESSENGER_EXT;
- static const char* Typename() {
- return "VkDebugUtilsMessengerEXT";
- }
-};
-template <> struct VulkanObjectTypeInfo<kVulkanObjectTypeDebugUtilsMessengerEXT> {
- typedef VkDebugUtilsMessengerEXT Type;
-};
-template <> struct VkHandleInfo<VkDescriptorPool> {
- static const VulkanObjectType kVulkanObjectType = kVulkanObjectTypeDescriptorPool;
- static const VkDebugReportObjectTypeEXT kDebugReportObjectType = VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_POOL_EXT;
- static const VkObjectType kVkObjectType = VK_OBJECT_TYPE_DESCRIPTOR_POOL;
- static const char* Typename() {
- return "VkDescriptorPool";
- }
-};
-template <> struct VulkanObjectTypeInfo<kVulkanObjectTypeDescriptorPool> {
- typedef VkDescriptorPool Type;
-};
-template <> struct VkHandleInfo<VkDescriptorSet> {
- static const VulkanObjectType kVulkanObjectType = kVulkanObjectTypeDescriptorSet;
- static const VkDebugReportObjectTypeEXT kDebugReportObjectType = VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_SET_EXT;
- static const VkObjectType kVkObjectType = VK_OBJECT_TYPE_DESCRIPTOR_SET;
- static const char* Typename() {
- return "VkDescriptorSet";
- }
-};
-template <> struct VulkanObjectTypeInfo<kVulkanObjectTypeDescriptorSet> {
- typedef VkDescriptorSet Type;
-};
-template <> struct VkHandleInfo<VkDescriptorSetLayout> {
- static const VulkanObjectType kVulkanObjectType = kVulkanObjectTypeDescriptorSetLayout;
- static const VkDebugReportObjectTypeEXT kDebugReportObjectType = VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_SET_LAYOUT_EXT;
- static const VkObjectType kVkObjectType = VK_OBJECT_TYPE_DESCRIPTOR_SET_LAYOUT;
- static const char* Typename() {
- return "VkDescriptorSetLayout";
- }
-};
-template <> struct VulkanObjectTypeInfo<kVulkanObjectTypeDescriptorSetLayout> {
- typedef VkDescriptorSetLayout Type;
-};
-template <> struct VkHandleInfo<VkDescriptorUpdateTemplate> {
- static const VulkanObjectType kVulkanObjectType = kVulkanObjectTypeDescriptorUpdateTemplate;
- static const VkDebugReportObjectTypeEXT kDebugReportObjectType = VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_UPDATE_TEMPLATE_EXT;
- static const VkObjectType kVkObjectType = VK_OBJECT_TYPE_DESCRIPTOR_UPDATE_TEMPLATE;
- static const char* Typename() {
- return "VkDescriptorUpdateTemplate";
- }
-};
-template <> struct VulkanObjectTypeInfo<kVulkanObjectTypeDescriptorUpdateTemplate> {
- typedef VkDescriptorUpdateTemplate Type;
-};
-template <> struct VkHandleInfo<VkDeviceMemory> {
- static const VulkanObjectType kVulkanObjectType = kVulkanObjectTypeDeviceMemory;
- static const VkDebugReportObjectTypeEXT kDebugReportObjectType = VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_MEMORY_EXT;
- static const VkObjectType kVkObjectType = VK_OBJECT_TYPE_DEVICE_MEMORY;
- static const char* Typename() {
- return "VkDeviceMemory";
- }
-};
-template <> struct VulkanObjectTypeInfo<kVulkanObjectTypeDeviceMemory> {
- typedef VkDeviceMemory Type;
-};
-template <> struct VkHandleInfo<VkDisplayKHR> {
- static const VulkanObjectType kVulkanObjectType = kVulkanObjectTypeDisplayKHR;
- static const VkDebugReportObjectTypeEXT kDebugReportObjectType = VK_DEBUG_REPORT_OBJECT_TYPE_DISPLAY_KHR_EXT;
- static const VkObjectType kVkObjectType = VK_OBJECT_TYPE_DISPLAY_KHR;
- static const char* Typename() {
- return "VkDisplayKHR";
- }
-};
-template <> struct VulkanObjectTypeInfo<kVulkanObjectTypeDisplayKHR> {
- typedef VkDisplayKHR Type;
-};
-template <> struct VkHandleInfo<VkDisplayModeKHR> {
- static const VulkanObjectType kVulkanObjectType = kVulkanObjectTypeDisplayModeKHR;
- static const VkDebugReportObjectTypeEXT kDebugReportObjectType = VK_DEBUG_REPORT_OBJECT_TYPE_DISPLAY_MODE_KHR_EXT;
- static const VkObjectType kVkObjectType = VK_OBJECT_TYPE_DISPLAY_MODE_KHR;
- static const char* Typename() {
- return "VkDisplayModeKHR";
- }
-};
-template <> struct VulkanObjectTypeInfo<kVulkanObjectTypeDisplayModeKHR> {
- typedef VkDisplayModeKHR Type;
-};
-template <> struct VkHandleInfo<VkEvent> {
- static const VulkanObjectType kVulkanObjectType = kVulkanObjectTypeEvent;
- static const VkDebugReportObjectTypeEXT kDebugReportObjectType = VK_DEBUG_REPORT_OBJECT_TYPE_EVENT_EXT;
- static const VkObjectType kVkObjectType = VK_OBJECT_TYPE_EVENT;
- static const char* Typename() {
- return "VkEvent";
- }
-};
-template <> struct VulkanObjectTypeInfo<kVulkanObjectTypeEvent> {
- typedef VkEvent Type;
-};
-template <> struct VkHandleInfo<VkFence> {
- static const VulkanObjectType kVulkanObjectType = kVulkanObjectTypeFence;
- static const VkDebugReportObjectTypeEXT kDebugReportObjectType = VK_DEBUG_REPORT_OBJECT_TYPE_FENCE_EXT;
- static const VkObjectType kVkObjectType = VK_OBJECT_TYPE_FENCE;
- static const char* Typename() {
- return "VkFence";
- }
-};
-template <> struct VulkanObjectTypeInfo<kVulkanObjectTypeFence> {
- typedef VkFence Type;
-};
-template <> struct VkHandleInfo<VkFramebuffer> {
- static const VulkanObjectType kVulkanObjectType = kVulkanObjectTypeFramebuffer;
- static const VkDebugReportObjectTypeEXT kDebugReportObjectType = VK_DEBUG_REPORT_OBJECT_TYPE_FRAMEBUFFER_EXT;
- static const VkObjectType kVkObjectType = VK_OBJECT_TYPE_FRAMEBUFFER;
- static const char* Typename() {
- return "VkFramebuffer";
- }
-};
-template <> struct VulkanObjectTypeInfo<kVulkanObjectTypeFramebuffer> {
- typedef VkFramebuffer Type;
-};
-template <> struct VkHandleInfo<VkImage> {
- static const VulkanObjectType kVulkanObjectType = kVulkanObjectTypeImage;
- static const VkDebugReportObjectTypeEXT kDebugReportObjectType = VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT;
- static const VkObjectType kVkObjectType = VK_OBJECT_TYPE_IMAGE;
- static const char* Typename() {
- return "VkImage";
- }
-};
-template <> struct VulkanObjectTypeInfo<kVulkanObjectTypeImage> {
- typedef VkImage Type;
-};
-template <> struct VkHandleInfo<VkImageView> {
- static const VulkanObjectType kVulkanObjectType = kVulkanObjectTypeImageView;
- static const VkDebugReportObjectTypeEXT kDebugReportObjectType = VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_VIEW_EXT;
- static const VkObjectType kVkObjectType = VK_OBJECT_TYPE_IMAGE_VIEW;
- static const char* Typename() {
- return "VkImageView";
- }
-};
-template <> struct VulkanObjectTypeInfo<kVulkanObjectTypeImageView> {
- typedef VkImageView Type;
-};
-template <> struct VkHandleInfo<VkIndirectCommandsLayoutNVX> {
- static const VulkanObjectType kVulkanObjectType = kVulkanObjectTypeIndirectCommandsLayoutNVX;
- static const VkDebugReportObjectTypeEXT kDebugReportObjectType = VK_DEBUG_REPORT_OBJECT_TYPE_INDIRECT_COMMANDS_LAYOUT_NVX_EXT;
- static const VkObjectType kVkObjectType = VK_OBJECT_TYPE_INDIRECT_COMMANDS_LAYOUT_NVX;
- static const char* Typename() {
- return "VkIndirectCommandsLayoutNVX";
- }
-};
-template <> struct VulkanObjectTypeInfo<kVulkanObjectTypeIndirectCommandsLayoutNVX> {
- typedef VkIndirectCommandsLayoutNVX Type;
-};
-template <> struct VkHandleInfo<VkObjectTableNVX> {
- static const VulkanObjectType kVulkanObjectType = kVulkanObjectTypeObjectTableNVX;
- static const VkDebugReportObjectTypeEXT kDebugReportObjectType = VK_DEBUG_REPORT_OBJECT_TYPE_OBJECT_TABLE_NVX_EXT;
- static const VkObjectType kVkObjectType = VK_OBJECT_TYPE_OBJECT_TABLE_NVX;
- static const char* Typename() {
- return "VkObjectTableNVX";
- }
-};
-template <> struct VulkanObjectTypeInfo<kVulkanObjectTypeObjectTableNVX> {
- typedef VkObjectTableNVX Type;
-};
-template <> struct VkHandleInfo<VkPerformanceConfigurationINTEL> {
- static const VulkanObjectType kVulkanObjectType = kVulkanObjectTypePerformanceConfigurationINTEL;
- static const VkDebugReportObjectTypeEXT kDebugReportObjectType = VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT;
- static const VkObjectType kVkObjectType = VK_OBJECT_TYPE_PERFORMANCE_CONFIGURATION_INTEL;
- static const char* Typename() {
- return "VkPerformanceConfigurationINTEL";
- }
-};
-template <> struct VulkanObjectTypeInfo<kVulkanObjectTypePerformanceConfigurationINTEL> {
- typedef VkPerformanceConfigurationINTEL Type;
-};
-template <> struct VkHandleInfo<VkPipeline> {
- static const VulkanObjectType kVulkanObjectType = kVulkanObjectTypePipeline;
- static const VkDebugReportObjectTypeEXT kDebugReportObjectType = VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_EXT;
- static const VkObjectType kVkObjectType = VK_OBJECT_TYPE_PIPELINE;
- static const char* Typename() {
- return "VkPipeline";
- }
-};
-template <> struct VulkanObjectTypeInfo<kVulkanObjectTypePipeline> {
- typedef VkPipeline Type;
-};
-template <> struct VkHandleInfo<VkPipelineCache> {
- static const VulkanObjectType kVulkanObjectType = kVulkanObjectTypePipelineCache;
- static const VkDebugReportObjectTypeEXT kDebugReportObjectType = VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_CACHE_EXT;
- static const VkObjectType kVkObjectType = VK_OBJECT_TYPE_PIPELINE_CACHE;
- static const char* Typename() {
- return "VkPipelineCache";
- }
-};
-template <> struct VulkanObjectTypeInfo<kVulkanObjectTypePipelineCache> {
- typedef VkPipelineCache Type;
-};
-template <> struct VkHandleInfo<VkPipelineLayout> {
- static const VulkanObjectType kVulkanObjectType = kVulkanObjectTypePipelineLayout;
- static const VkDebugReportObjectTypeEXT kDebugReportObjectType = VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_LAYOUT_EXT;
- static const VkObjectType kVkObjectType = VK_OBJECT_TYPE_PIPELINE_LAYOUT;
- static const char* Typename() {
- return "VkPipelineLayout";
- }
-};
-template <> struct VulkanObjectTypeInfo<kVulkanObjectTypePipelineLayout> {
- typedef VkPipelineLayout Type;
-};
-template <> struct VkHandleInfo<VkQueryPool> {
- static const VulkanObjectType kVulkanObjectType = kVulkanObjectTypeQueryPool;
- static const VkDebugReportObjectTypeEXT kDebugReportObjectType = VK_DEBUG_REPORT_OBJECT_TYPE_QUERY_POOL_EXT;
- static const VkObjectType kVkObjectType = VK_OBJECT_TYPE_QUERY_POOL;
- static const char* Typename() {
- return "VkQueryPool";
- }
-};
-template <> struct VulkanObjectTypeInfo<kVulkanObjectTypeQueryPool> {
- typedef VkQueryPool Type;
-};
-template <> struct VkHandleInfo<VkRenderPass> {
- static const VulkanObjectType kVulkanObjectType = kVulkanObjectTypeRenderPass;
- static const VkDebugReportObjectTypeEXT kDebugReportObjectType = VK_DEBUG_REPORT_OBJECT_TYPE_RENDER_PASS_EXT;
- static const VkObjectType kVkObjectType = VK_OBJECT_TYPE_RENDER_PASS;
- static const char* Typename() {
- return "VkRenderPass";
- }
-};
-template <> struct VulkanObjectTypeInfo<kVulkanObjectTypeRenderPass> {
- typedef VkRenderPass Type;
-};
-template <> struct VkHandleInfo<VkSampler> {
- static const VulkanObjectType kVulkanObjectType = kVulkanObjectTypeSampler;
- static const VkDebugReportObjectTypeEXT kDebugReportObjectType = VK_DEBUG_REPORT_OBJECT_TYPE_SAMPLER_EXT;
- static const VkObjectType kVkObjectType = VK_OBJECT_TYPE_SAMPLER;
- static const char* Typename() {
- return "VkSampler";
- }
-};
-template <> struct VulkanObjectTypeInfo<kVulkanObjectTypeSampler> {
- typedef VkSampler Type;
-};
-template <> struct VkHandleInfo<VkSamplerYcbcrConversion> {
- static const VulkanObjectType kVulkanObjectType = kVulkanObjectTypeSamplerYcbcrConversion;
- static const VkDebugReportObjectTypeEXT kDebugReportObjectType = VK_DEBUG_REPORT_OBJECT_TYPE_SAMPLER_YCBCR_CONVERSION_EXT;
- static const VkObjectType kVkObjectType = VK_OBJECT_TYPE_SAMPLER_YCBCR_CONVERSION;
- static const char* Typename() {
- return "VkSamplerYcbcrConversion";
- }
-};
-template <> struct VulkanObjectTypeInfo<kVulkanObjectTypeSamplerYcbcrConversion> {
- typedef VkSamplerYcbcrConversion Type;
-};
-template <> struct VkHandleInfo<VkSemaphore> {
- static const VulkanObjectType kVulkanObjectType = kVulkanObjectTypeSemaphore;
- static const VkDebugReportObjectTypeEXT kDebugReportObjectType = VK_DEBUG_REPORT_OBJECT_TYPE_SEMAPHORE_EXT;
- static const VkObjectType kVkObjectType = VK_OBJECT_TYPE_SEMAPHORE;
- static const char* Typename() {
- return "VkSemaphore";
- }
-};
-template <> struct VulkanObjectTypeInfo<kVulkanObjectTypeSemaphore> {
- typedef VkSemaphore Type;
-};
-template <> struct VkHandleInfo<VkShaderModule> {
- static const VulkanObjectType kVulkanObjectType = kVulkanObjectTypeShaderModule;
- static const VkDebugReportObjectTypeEXT kDebugReportObjectType = VK_DEBUG_REPORT_OBJECT_TYPE_SHADER_MODULE_EXT;
- static const VkObjectType kVkObjectType = VK_OBJECT_TYPE_SHADER_MODULE;
- static const char* Typename() {
- return "VkShaderModule";
- }
-};
-template <> struct VulkanObjectTypeInfo<kVulkanObjectTypeShaderModule> {
- typedef VkShaderModule Type;
-};
-template <> struct VkHandleInfo<VkSurfaceKHR> {
- static const VulkanObjectType kVulkanObjectType = kVulkanObjectTypeSurfaceKHR;
- static const VkDebugReportObjectTypeEXT kDebugReportObjectType = VK_DEBUG_REPORT_OBJECT_TYPE_SURFACE_KHR_EXT;
- static const VkObjectType kVkObjectType = VK_OBJECT_TYPE_SURFACE_KHR;
- static const char* Typename() {
- return "VkSurfaceKHR";
- }
-};
-template <> struct VulkanObjectTypeInfo<kVulkanObjectTypeSurfaceKHR> {
- typedef VkSurfaceKHR Type;
-};
-template <> struct VkHandleInfo<VkSwapchainKHR> {
- static const VulkanObjectType kVulkanObjectType = kVulkanObjectTypeSwapchainKHR;
- static const VkDebugReportObjectTypeEXT kDebugReportObjectType = VK_DEBUG_REPORT_OBJECT_TYPE_SWAPCHAIN_KHR_EXT;
- static const VkObjectType kVkObjectType = VK_OBJECT_TYPE_SWAPCHAIN_KHR;
- static const char* Typename() {
- return "VkSwapchainKHR";
- }
-};
-template <> struct VulkanObjectTypeInfo<kVulkanObjectTypeSwapchainKHR> {
- typedef VkSwapchainKHR Type;
-};
-template <> struct VkHandleInfo<VkValidationCacheEXT> {
- static const VulkanObjectType kVulkanObjectType = kVulkanObjectTypeValidationCacheEXT;
- static const VkDebugReportObjectTypeEXT kDebugReportObjectType = VK_DEBUG_REPORT_OBJECT_TYPE_VALIDATION_CACHE_EXT_EXT;
- static const VkObjectType kVkObjectType = VK_OBJECT_TYPE_VALIDATION_CACHE_EXT;
- static const char* Typename() {
- return "VkValidationCacheEXT";
- }
-};
-template <> struct VulkanObjectTypeInfo<kVulkanObjectTypeValidationCacheEXT> {
- typedef VkValidationCacheEXT Type;
-};
-#endif // TYPESAFE_NONDISPATCHABLE_HANDLES
-struct VulkanTypedHandle {
- uint64_t handle;
- VulkanObjectType type;
- template <typename Handle>
- VulkanTypedHandle(Handle handle_, VulkanObjectType type_) :
- handle(CastToUint64(handle_)),
- type(type_) {
-#ifdef TYPESAFE_NONDISPATCHABLE_HANDLES
- // For 32 bit it's not always safe to check for traits <-> type
- // as all non-dispatchable handles have the same type-id and thus traits,
- // but on 64 bit we can validate the passed type matches the passed handle
- assert(type == VkHandleInfo<Handle>::kVulkanObjectType);
-#endif // TYPESAFE_NONDISPATCHABLE_HANDLES
- }
- template <typename Handle>
- Handle Cast() const {
-#ifdef TYPESAFE_NONDISPATCHABLE_HANDLES
- assert(type == VkHandleInfo<Handle>::kVulkanObjectType);
-#endif // TYPESAFE_NONDISPATCHABLE_HANDLES
- return CastFromUint64<Handle>(handle);
- }
- VulkanTypedHandle() :
- handle(VK_NULL_HANDLE),
- type(kVulkanObjectTypeUnknown) {}
-};
-
diff --git a/layers/generated/vk_safe_struct.cpp b/layers/generated/vk_safe_struct.cpp
deleted file mode 100644
index 3ec4f4626..000000000
--- a/layers/generated/vk_safe_struct.cpp
+++ /dev/null
@@ -1,29684 +0,0 @@
-// *** THIS FILE IS GENERATED - DO NOT EDIT ***
-// See helper_file_generator.py for modifications
-
-
-/***************************************************************************
- *
- * Copyright (c) 2015-2019 The Khronos Group Inc.
- * Copyright (c) 2015-2019 Valve Corporation
- * Copyright (c) 2015-2019 LunarG, Inc.
- * Copyright (c) 2015-2019 Google Inc.
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- *
- * Author: Mark Lobodzinski <mark@lunarg.com>
- * Author: Courtney Goeltzenleuchter <courtneygo@google.com>
- * Author: Tobin Ehlis <tobine@google.com>
- * Author: Chris Forbes <chrisforbes@google.com>
- * Author: John Zulauf<jzulauf@lunarg.com>
- *
- ****************************************************************************/
-
-
-#include "vk_safe_struct.h"
-
-#include <string.h>
-#include <cassert>
-#include <cstring>
-
-#include <vulkan/vk_layer.h>
-
-
-safe_VkApplicationInfo::safe_VkApplicationInfo(const VkApplicationInfo* in_struct) :
- sType(in_struct->sType),
- applicationVersion(in_struct->applicationVersion),
- engineVersion(in_struct->engineVersion),
- apiVersion(in_struct->apiVersion)
-{
- pNext = SafePnextCopy(in_struct->pNext);
- pApplicationName = SafeStringCopy(in_struct->pApplicationName);
- pEngineName = SafeStringCopy(in_struct->pEngineName);
-}
-
-safe_VkApplicationInfo::safe_VkApplicationInfo() :
- pNext(nullptr),
- pApplicationName(nullptr),
- pEngineName(nullptr)
-{}
-
-safe_VkApplicationInfo::safe_VkApplicationInfo(const safe_VkApplicationInfo& src)
-{
- sType = src.sType;
- applicationVersion = src.applicationVersion;
- engineVersion = src.engineVersion;
- apiVersion = src.apiVersion;
- pNext = SafePnextCopy(src.pNext);
- pApplicationName = SafeStringCopy(src.pApplicationName);
- pEngineName = SafeStringCopy(src.pEngineName);
-}
-
-safe_VkApplicationInfo& safe_VkApplicationInfo::operator=(const safe_VkApplicationInfo& src)
-{
- if (&src == this) return *this;
-
- if (pApplicationName) delete [] pApplicationName;
- if (pEngineName) delete [] pEngineName;
- if (pNext)
- FreePnextChain(pNext);
-
- sType = src.sType;
- applicationVersion = src.applicationVersion;
- engineVersion = src.engineVersion;
- apiVersion = src.apiVersion;
- pNext = SafePnextCopy(src.pNext);
- pApplicationName = SafeStringCopy(src.pApplicationName);
- pEngineName = SafeStringCopy(src.pEngineName);
-
- return *this;
-}
-
-safe_VkApplicationInfo::~safe_VkApplicationInfo()
-{
- if (pApplicationName) delete [] pApplicationName;
- if (pEngineName) delete [] pEngineName;
- if (pNext)
- FreePnextChain(pNext);
-}
-
-void safe_VkApplicationInfo::initialize(const VkApplicationInfo* in_struct)
-{
- sType = in_struct->sType;
- applicationVersion = in_struct->applicationVersion;
- engineVersion = in_struct->engineVersion;
- apiVersion = in_struct->apiVersion;
- pNext = SafePnextCopy(in_struct->pNext);
- pApplicationName = SafeStringCopy(in_struct->pApplicationName);
- pEngineName = SafeStringCopy(in_struct->pEngineName);
-}
-
-void safe_VkApplicationInfo::initialize(const safe_VkApplicationInfo* src)
-{
- sType = src->sType;
- applicationVersion = src->applicationVersion;
- engineVersion = src->engineVersion;
- apiVersion = src->apiVersion;
- pNext = SafePnextCopy(src->pNext);
- pApplicationName = SafeStringCopy(src->pApplicationName);
- pEngineName = SafeStringCopy(src->pEngineName);
-}
-
-safe_VkInstanceCreateInfo::safe_VkInstanceCreateInfo(const VkInstanceCreateInfo* in_struct) :
- sType(in_struct->sType),
- flags(in_struct->flags),
- pApplicationInfo(nullptr),
- enabledLayerCount(in_struct->enabledLayerCount),
- enabledExtensionCount(in_struct->enabledExtensionCount)
-{
- pNext = SafePnextCopy(in_struct->pNext);
- char **tmp_ppEnabledLayerNames = new char *[in_struct->enabledLayerCount];
- for (uint32_t i = 0; i < enabledLayerCount; ++i) {
- tmp_ppEnabledLayerNames[i] = SafeStringCopy(in_struct->ppEnabledLayerNames[i]);
- }
- ppEnabledLayerNames = tmp_ppEnabledLayerNames;
- char **tmp_ppEnabledExtensionNames = new char *[in_struct->enabledExtensionCount];
- for (uint32_t i = 0; i < enabledExtensionCount; ++i) {
- tmp_ppEnabledExtensionNames[i] = SafeStringCopy(in_struct->ppEnabledExtensionNames[i]);
- }
- ppEnabledExtensionNames = tmp_ppEnabledExtensionNames;
- if (in_struct->pApplicationInfo)
- pApplicationInfo = new safe_VkApplicationInfo(in_struct->pApplicationInfo);
-}
-
-safe_VkInstanceCreateInfo::safe_VkInstanceCreateInfo() :
- pNext(nullptr),
- pApplicationInfo(nullptr),
- ppEnabledLayerNames(nullptr),
- ppEnabledExtensionNames(nullptr)
-{}
-
-safe_VkInstanceCreateInfo::safe_VkInstanceCreateInfo(const safe_VkInstanceCreateInfo& src)
-{
- sType = src.sType;
- flags = src.flags;
- pApplicationInfo = nullptr;
- enabledLayerCount = src.enabledLayerCount;
- enabledExtensionCount = src.enabledExtensionCount;
- pNext = SafePnextCopy(src.pNext);
- char **tmp_ppEnabledLayerNames = new char *[src.enabledLayerCount];
- for (uint32_t i = 0; i < enabledLayerCount; ++i) {
- tmp_ppEnabledLayerNames[i] = SafeStringCopy(src.ppEnabledLayerNames[i]);
- }
- ppEnabledLayerNames = tmp_ppEnabledLayerNames;
- char **tmp_ppEnabledExtensionNames = new char *[src.enabledExtensionCount];
- for (uint32_t i = 0; i < enabledExtensionCount; ++i) {
- tmp_ppEnabledExtensionNames[i] = SafeStringCopy(src.ppEnabledExtensionNames[i]);
- }
- ppEnabledExtensionNames = tmp_ppEnabledExtensionNames;
- if (src.pApplicationInfo)
- pApplicationInfo = new safe_VkApplicationInfo(*src.pApplicationInfo);
-}
-
-safe_VkInstanceCreateInfo& safe_VkInstanceCreateInfo::operator=(const safe_VkInstanceCreateInfo& src)
-{
- if (&src == this) return *this;
-
- if (pApplicationInfo)
- delete pApplicationInfo;
- if (ppEnabledLayerNames) {
- for (uint32_t i = 0; i < enabledLayerCount; ++i) {
- delete [] ppEnabledLayerNames[i];
- }
- delete [] ppEnabledLayerNames;
- }
- if (ppEnabledExtensionNames) {
- for (uint32_t i = 0; i < enabledExtensionCount; ++i) {
- delete [] ppEnabledExtensionNames[i];
- }
- delete [] ppEnabledExtensionNames;
- }
- if (pNext)
- FreePnextChain(pNext);
-
- sType = src.sType;
- flags = src.flags;
- pApplicationInfo = nullptr;
- enabledLayerCount = src.enabledLayerCount;
- enabledExtensionCount = src.enabledExtensionCount;
- pNext = SafePnextCopy(src.pNext);
- char **tmp_ppEnabledLayerNames = new char *[src.enabledLayerCount];
- for (uint32_t i = 0; i < enabledLayerCount; ++i) {
- tmp_ppEnabledLayerNames[i] = SafeStringCopy(src.ppEnabledLayerNames[i]);
- }
- ppEnabledLayerNames = tmp_ppEnabledLayerNames;
- char **tmp_ppEnabledExtensionNames = new char *[src.enabledExtensionCount];
- for (uint32_t i = 0; i < enabledExtensionCount; ++i) {
- tmp_ppEnabledExtensionNames[i] = SafeStringCopy(src.ppEnabledExtensionNames[i]);
- }
- ppEnabledExtensionNames = tmp_ppEnabledExtensionNames;
- if (src.pApplicationInfo)
- pApplicationInfo = new safe_VkApplicationInfo(*src.pApplicationInfo);
-
- return *this;
-}
-
-safe_VkInstanceCreateInfo::~safe_VkInstanceCreateInfo()
-{
- if (pApplicationInfo)
- delete pApplicationInfo;
- if (ppEnabledLayerNames) {
- for (uint32_t i = 0; i < enabledLayerCount; ++i) {
- delete [] ppEnabledLayerNames[i];
- }
- delete [] ppEnabledLayerNames;
- }
- if (ppEnabledExtensionNames) {
- for (uint32_t i = 0; i < enabledExtensionCount; ++i) {
- delete [] ppEnabledExtensionNames[i];
- }
- delete [] ppEnabledExtensionNames;
- }
- if (pNext)
- FreePnextChain(pNext);
-}
-
-void safe_VkInstanceCreateInfo::initialize(const VkInstanceCreateInfo* in_struct)
-{
- sType = in_struct->sType;
- flags = in_struct->flags;
- pApplicationInfo = nullptr;
- enabledLayerCount = in_struct->enabledLayerCount;
- enabledExtensionCount = in_struct->enabledExtensionCount;
- pNext = SafePnextCopy(in_struct->pNext);
- char **tmp_ppEnabledLayerNames = new char *[in_struct->enabledLayerCount];
- for (uint32_t i = 0; i < enabledLayerCount; ++i) {
- tmp_ppEnabledLayerNames[i] = SafeStringCopy(in_struct->ppEnabledLayerNames[i]);
- }
- ppEnabledLayerNames = tmp_ppEnabledLayerNames;
- char **tmp_ppEnabledExtensionNames = new char *[in_struct->enabledExtensionCount];
- for (uint32_t i = 0; i < enabledExtensionCount; ++i) {
- tmp_ppEnabledExtensionNames[i] = SafeStringCopy(in_struct->ppEnabledExtensionNames[i]);
- }
- ppEnabledExtensionNames = tmp_ppEnabledExtensionNames;
- if (in_struct->pApplicationInfo)
- pApplicationInfo = new safe_VkApplicationInfo(in_struct->pApplicationInfo);
-}
-
-void safe_VkInstanceCreateInfo::initialize(const safe_VkInstanceCreateInfo* src)
-{
- sType = src->sType;
- flags = src->flags;
- pApplicationInfo = nullptr;
- enabledLayerCount = src->enabledLayerCount;
- enabledExtensionCount = src->enabledExtensionCount;
- pNext = SafePnextCopy(src->pNext);
- char **tmp_ppEnabledLayerNames = new char *[src->enabledLayerCount];
- for (uint32_t i = 0; i < enabledLayerCount; ++i) {
- tmp_ppEnabledLayerNames[i] = SafeStringCopy(src->ppEnabledLayerNames[i]);
- }
- ppEnabledLayerNames = tmp_ppEnabledLayerNames;
- char **tmp_ppEnabledExtensionNames = new char *[src->enabledExtensionCount];
- for (uint32_t i = 0; i < enabledExtensionCount; ++i) {
- tmp_ppEnabledExtensionNames[i] = SafeStringCopy(src->ppEnabledExtensionNames[i]);
- }
- ppEnabledExtensionNames = tmp_ppEnabledExtensionNames;
- if (src->pApplicationInfo)
- pApplicationInfo = new safe_VkApplicationInfo(*src->pApplicationInfo);
-}
-
-safe_VkAllocationCallbacks::safe_VkAllocationCallbacks(const VkAllocationCallbacks* in_struct) :
- pUserData(in_struct->pUserData),
- pfnAllocation(in_struct->pfnAllocation),
- pfnReallocation(in_struct->pfnReallocation),
- pfnFree(in_struct->pfnFree),
- pfnInternalAllocation(in_struct->pfnInternalAllocation),
- pfnInternalFree(in_struct->pfnInternalFree)
-{
-}
-
-safe_VkAllocationCallbacks::safe_VkAllocationCallbacks() :
- pUserData(nullptr)
-{}
-
-safe_VkAllocationCallbacks::safe_VkAllocationCallbacks(const safe_VkAllocationCallbacks& src)
-{
- pUserData = src.pUserData;
- pfnAllocation = src.pfnAllocation;
- pfnReallocation = src.pfnReallocation;
- pfnFree = src.pfnFree;
- pfnInternalAllocation = src.pfnInternalAllocation;
- pfnInternalFree = src.pfnInternalFree;
-}
-
-safe_VkAllocationCallbacks& safe_VkAllocationCallbacks::operator=(const safe_VkAllocationCallbacks& src)
-{
- if (&src == this) return *this;
-
-
- pUserData = src.pUserData;
- pfnAllocation = src.pfnAllocation;
- pfnReallocation = src.pfnReallocation;
- pfnFree = src.pfnFree;
- pfnInternalAllocation = src.pfnInternalAllocation;
- pfnInternalFree = src.pfnInternalFree;
-
- return *this;
-}
-
-safe_VkAllocationCallbacks::~safe_VkAllocationCallbacks()
-{
-}
-
-void safe_VkAllocationCallbacks::initialize(const VkAllocationCallbacks* in_struct)
-{
- pUserData = in_struct->pUserData;
- pfnAllocation = in_struct->pfnAllocation;
- pfnReallocation = in_struct->pfnReallocation;
- pfnFree = in_struct->pfnFree;
- pfnInternalAllocation = in_struct->pfnInternalAllocation;
- pfnInternalFree = in_struct->pfnInternalFree;
-}
-
-void safe_VkAllocationCallbacks::initialize(const safe_VkAllocationCallbacks* src)
-{
- pUserData = src->pUserData;
- pfnAllocation = src->pfnAllocation;
- pfnReallocation = src->pfnReallocation;
- pfnFree = src->pfnFree;
- pfnInternalAllocation = src->pfnInternalAllocation;
- pfnInternalFree = src->pfnInternalFree;
-}
-
-safe_VkDeviceQueueCreateInfo::safe_VkDeviceQueueCreateInfo(const VkDeviceQueueCreateInfo* in_struct) :
- sType(in_struct->sType),
- flags(in_struct->flags),
- queueFamilyIndex(in_struct->queueFamilyIndex),
- queueCount(in_struct->queueCount),
- pQueuePriorities(nullptr)
-{
- pNext = SafePnextCopy(in_struct->pNext);
- if (in_struct->pQueuePriorities) {
- pQueuePriorities = new float[in_struct->queueCount];
- memcpy ((void *)pQueuePriorities, (void *)in_struct->pQueuePriorities, sizeof(float)*in_struct->queueCount);
- }
-}
-
-safe_VkDeviceQueueCreateInfo::safe_VkDeviceQueueCreateInfo() :
- pNext(nullptr),
- pQueuePriorities(nullptr)
-{}
-
-safe_VkDeviceQueueCreateInfo::safe_VkDeviceQueueCreateInfo(const safe_VkDeviceQueueCreateInfo& src)
-{
- sType = src.sType;
- flags = src.flags;
- queueFamilyIndex = src.queueFamilyIndex;
- queueCount = src.queueCount;
- pQueuePriorities = nullptr;
- pNext = SafePnextCopy(src.pNext);
- if (src.pQueuePriorities) {
- pQueuePriorities = new float[src.queueCount];
- memcpy ((void *)pQueuePriorities, (void *)src.pQueuePriorities, sizeof(float)*src.queueCount);
- }
-}
-
-safe_VkDeviceQueueCreateInfo& safe_VkDeviceQueueCreateInfo::operator=(const safe_VkDeviceQueueCreateInfo& src)
-{
- if (&src == this) return *this;
-
- if (pQueuePriorities)
- delete[] pQueuePriorities;
- if (pNext)
- FreePnextChain(pNext);
-
- sType = src.sType;
- flags = src.flags;
- queueFamilyIndex = src.queueFamilyIndex;
- queueCount = src.queueCount;
- pQueuePriorities = nullptr;
- pNext = SafePnextCopy(src.pNext);
- if (src.pQueuePriorities) {
- pQueuePriorities = new float[src.queueCount];
- memcpy ((void *)pQueuePriorities, (void *)src.pQueuePriorities, sizeof(float)*src.queueCount);
- }
-
- return *this;
-}
-
-safe_VkDeviceQueueCreateInfo::~safe_VkDeviceQueueCreateInfo()
-{
- if (pQueuePriorities)
- delete[] pQueuePriorities;
- if (pNext)
- FreePnextChain(pNext);
-}
-
-void safe_VkDeviceQueueCreateInfo::initialize(const VkDeviceQueueCreateInfo* in_struct)
-{
- sType = in_struct->sType;
- flags = in_struct->flags;
- queueFamilyIndex = in_struct->queueFamilyIndex;
- queueCount = in_struct->queueCount;
- pQueuePriorities = nullptr;
- pNext = SafePnextCopy(in_struct->pNext);
- if (in_struct->pQueuePriorities) {
- pQueuePriorities = new float[in_struct->queueCount];
- memcpy ((void *)pQueuePriorities, (void *)in_struct->pQueuePriorities, sizeof(float)*in_struct->queueCount);
- }
-}
-
-void safe_VkDeviceQueueCreateInfo::initialize(const safe_VkDeviceQueueCreateInfo* src)
-{
- sType = src->sType;
- flags = src->flags;
- queueFamilyIndex = src->queueFamilyIndex;
- queueCount = src->queueCount;
- pQueuePriorities = nullptr;
- pNext = SafePnextCopy(src->pNext);
- if (src->pQueuePriorities) {
- pQueuePriorities = new float[src->queueCount];
- memcpy ((void *)pQueuePriorities, (void *)src->pQueuePriorities, sizeof(float)*src->queueCount);
- }
-}
-
-safe_VkDeviceCreateInfo::safe_VkDeviceCreateInfo(const VkDeviceCreateInfo* in_struct) :
- sType(in_struct->sType),
- flags(in_struct->flags),
- queueCreateInfoCount(in_struct->queueCreateInfoCount),
- pQueueCreateInfos(nullptr),
- enabledLayerCount(in_struct->enabledLayerCount),
- enabledExtensionCount(in_struct->enabledExtensionCount),
- pEnabledFeatures(nullptr)
-{
- pNext = SafePnextCopy(in_struct->pNext);
- char **tmp_ppEnabledLayerNames = new char *[in_struct->enabledLayerCount];
- for (uint32_t i = 0; i < enabledLayerCount; ++i) {
- tmp_ppEnabledLayerNames[i] = SafeStringCopy(in_struct->ppEnabledLayerNames[i]);
- }
- ppEnabledLayerNames = tmp_ppEnabledLayerNames;
- char **tmp_ppEnabledExtensionNames = new char *[in_struct->enabledExtensionCount];
- for (uint32_t i = 0; i < enabledExtensionCount; ++i) {
- tmp_ppEnabledExtensionNames[i] = SafeStringCopy(in_struct->ppEnabledExtensionNames[i]);
- }
- ppEnabledExtensionNames = tmp_ppEnabledExtensionNames;
- if (queueCreateInfoCount && in_struct->pQueueCreateInfos) {
- pQueueCreateInfos = new safe_VkDeviceQueueCreateInfo[queueCreateInfoCount];
- for (uint32_t i = 0; i < queueCreateInfoCount; ++i) {
- pQueueCreateInfos[i].initialize(&in_struct->pQueueCreateInfos[i]);
- }
- }
- if (in_struct->pEnabledFeatures) {
- pEnabledFeatures = new VkPhysicalDeviceFeatures(*in_struct->pEnabledFeatures);
- }
-}
-
-safe_VkDeviceCreateInfo::safe_VkDeviceCreateInfo() :
- pNext(nullptr),
- pQueueCreateInfos(nullptr),
- ppEnabledLayerNames(nullptr),
- ppEnabledExtensionNames(nullptr),
- pEnabledFeatures(nullptr)
-{}
-
-safe_VkDeviceCreateInfo::safe_VkDeviceCreateInfo(const safe_VkDeviceCreateInfo& src)
-{
- sType = src.sType;
- flags = src.flags;
- queueCreateInfoCount = src.queueCreateInfoCount;
- pQueueCreateInfos = nullptr;
- enabledLayerCount = src.enabledLayerCount;
- enabledExtensionCount = src.enabledExtensionCount;
- pEnabledFeatures = nullptr;
- pNext = SafePnextCopy(src.pNext);
- char **tmp_ppEnabledLayerNames = new char *[src.enabledLayerCount];
- for (uint32_t i = 0; i < enabledLayerCount; ++i) {
- tmp_ppEnabledLayerNames[i] = SafeStringCopy(src.ppEnabledLayerNames[i]);
- }
- ppEnabledLayerNames = tmp_ppEnabledLayerNames;
- char **tmp_ppEnabledExtensionNames = new char *[src.enabledExtensionCount];
- for (uint32_t i = 0; i < enabledExtensionCount; ++i) {
- tmp_ppEnabledExtensionNames[i] = SafeStringCopy(src.ppEnabledExtensionNames[i]);
- }
- ppEnabledExtensionNames = tmp_ppEnabledExtensionNames;
- if (queueCreateInfoCount && src.pQueueCreateInfos) {
- pQueueCreateInfos = new safe_VkDeviceQueueCreateInfo[queueCreateInfoCount];
- for (uint32_t i = 0; i < queueCreateInfoCount; ++i) {
- pQueueCreateInfos[i].initialize(&src.pQueueCreateInfos[i]);
- }
- }
- if (src.pEnabledFeatures) {
- pEnabledFeatures = new VkPhysicalDeviceFeatures(*src.pEnabledFeatures);
- }
-}
-
-safe_VkDeviceCreateInfo& safe_VkDeviceCreateInfo::operator=(const safe_VkDeviceCreateInfo& src)
-{
- if (&src == this) return *this;
-
- if (pQueueCreateInfos)
- delete[] pQueueCreateInfos;
- if (ppEnabledLayerNames) {
- for (uint32_t i = 0; i < enabledLayerCount; ++i) {
- delete [] ppEnabledLayerNames[i];
- }
- delete [] ppEnabledLayerNames;
- }
- if (ppEnabledExtensionNames) {
- for (uint32_t i = 0; i < enabledExtensionCount; ++i) {
- delete [] ppEnabledExtensionNames[i];
- }
- delete [] ppEnabledExtensionNames;
- }
- if (pEnabledFeatures)
- delete pEnabledFeatures;
- if (pNext)
- FreePnextChain(pNext);
-
- sType = src.sType;
- flags = src.flags;
- queueCreateInfoCount = src.queueCreateInfoCount;
- pQueueCreateInfos = nullptr;
- enabledLayerCount = src.enabledLayerCount;
- enabledExtensionCount = src.enabledExtensionCount;
- pEnabledFeatures = nullptr;
- pNext = SafePnextCopy(src.pNext);
- char **tmp_ppEnabledLayerNames = new char *[src.enabledLayerCount];
- for (uint32_t i = 0; i < enabledLayerCount; ++i) {
- tmp_ppEnabledLayerNames[i] = SafeStringCopy(src.ppEnabledLayerNames[i]);
- }
- ppEnabledLayerNames = tmp_ppEnabledLayerNames;
- char **tmp_ppEnabledExtensionNames = new char *[src.enabledExtensionCount];
- for (uint32_t i = 0; i < enabledExtensionCount; ++i) {
- tmp_ppEnabledExtensionNames[i] = SafeStringCopy(src.ppEnabledExtensionNames[i]);
- }
- ppEnabledExtensionNames = tmp_ppEnabledExtensionNames;
- if (queueCreateInfoCount && src.pQueueCreateInfos) {
- pQueueCreateInfos = new safe_VkDeviceQueueCreateInfo[queueCreateInfoCount];
- for (uint32_t i = 0; i < queueCreateInfoCount; ++i) {
- pQueueCreateInfos[i].initialize(&src.pQueueCreateInfos[i]);
- }
- }
- if (src.pEnabledFeatures) {
- pEnabledFeatures = new VkPhysicalDeviceFeatures(*src.pEnabledFeatures);
- }
-
- return *this;
-}
-
-safe_VkDeviceCreateInfo::~safe_VkDeviceCreateInfo()
-{
- if (pQueueCreateInfos)
- delete[] pQueueCreateInfos;
- if (ppEnabledLayerNames) {
- for (uint32_t i = 0; i < enabledLayerCount; ++i) {
- delete [] ppEnabledLayerNames[i];
- }
- delete [] ppEnabledLayerNames;
- }
- if (ppEnabledExtensionNames) {
- for (uint32_t i = 0; i < enabledExtensionCount; ++i) {
- delete [] ppEnabledExtensionNames[i];
- }
- delete [] ppEnabledExtensionNames;
- }
- if (pEnabledFeatures)
- delete pEnabledFeatures;
- if (pNext)
- FreePnextChain(pNext);
-}
-
-void safe_VkDeviceCreateInfo::initialize(const VkDeviceCreateInfo* in_struct)
-{
- sType = in_struct->sType;
- flags = in_struct->flags;
- queueCreateInfoCount = in_struct->queueCreateInfoCount;
- pQueueCreateInfos = nullptr;
- enabledLayerCount = in_struct->enabledLayerCount;
- enabledExtensionCount = in_struct->enabledExtensionCount;
- pEnabledFeatures = nullptr;
- pNext = SafePnextCopy(in_struct->pNext);
- char **tmp_ppEnabledLayerNames = new char *[in_struct->enabledLayerCount];
- for (uint32_t i = 0; i < enabledLayerCount; ++i) {
- tmp_ppEnabledLayerNames[i] = SafeStringCopy(in_struct->ppEnabledLayerNames[i]);
- }
- ppEnabledLayerNames = tmp_ppEnabledLayerNames;
- char **tmp_ppEnabledExtensionNames = new char *[in_struct->enabledExtensionCount];
- for (uint32_t i = 0; i < enabledExtensionCount; ++i) {
- tmp_ppEnabledExtensionNames[i] = SafeStringCopy(in_struct->ppEnabledExtensionNames[i]);
- }
- ppEnabledExtensionNames = tmp_ppEnabledExtensionNames;
- if (queueCreateInfoCount && in_struct->pQueueCreateInfos) {
- pQueueCreateInfos = new safe_VkDeviceQueueCreateInfo[queueCreateInfoCount];
- for (uint32_t i = 0; i < queueCreateInfoCount; ++i) {
- pQueueCreateInfos[i].initialize(&in_struct->pQueueCreateInfos[i]);
- }
- }
- if (in_struct->pEnabledFeatures) {
- pEnabledFeatures = new VkPhysicalDeviceFeatures(*in_struct->pEnabledFeatures);
- }
-}
-
-void safe_VkDeviceCreateInfo::initialize(const safe_VkDeviceCreateInfo* src)
-{
- sType = src->sType;
- flags = src->flags;
- queueCreateInfoCount = src->queueCreateInfoCount;
- pQueueCreateInfos = nullptr;
- enabledLayerCount = src->enabledLayerCount;
- enabledExtensionCount = src->enabledExtensionCount;
- pEnabledFeatures = nullptr;
- pNext = SafePnextCopy(src->pNext);
- char **tmp_ppEnabledLayerNames = new char *[src->enabledLayerCount];
- for (uint32_t i = 0; i < enabledLayerCount; ++i) {
- tmp_ppEnabledLayerNames[i] = SafeStringCopy(src->ppEnabledLayerNames[i]);
- }
- ppEnabledLayerNames = tmp_ppEnabledLayerNames;
- char **tmp_ppEnabledExtensionNames = new char *[src->enabledExtensionCount];
- for (uint32_t i = 0; i < enabledExtensionCount; ++i) {
- tmp_ppEnabledExtensionNames[i] = SafeStringCopy(src->ppEnabledExtensionNames[i]);
- }
- ppEnabledExtensionNames = tmp_ppEnabledExtensionNames;
- if (queueCreateInfoCount && src->pQueueCreateInfos) {
- pQueueCreateInfos = new safe_VkDeviceQueueCreateInfo[queueCreateInfoCount];
- for (uint32_t i = 0; i < queueCreateInfoCount; ++i) {
- pQueueCreateInfos[i].initialize(&src->pQueueCreateInfos[i]);
- }
- }
- if (src->pEnabledFeatures) {
- pEnabledFeatures = new VkPhysicalDeviceFeatures(*src->pEnabledFeatures);
- }
-}
-
-safe_VkSubmitInfo::safe_VkSubmitInfo(const VkSubmitInfo* in_struct) :
- sType(in_struct->sType),
- waitSemaphoreCount(in_struct->waitSemaphoreCount),
- pWaitSemaphores(nullptr),
- pWaitDstStageMask(nullptr),
- commandBufferCount(in_struct->commandBufferCount),
- pCommandBuffers(nullptr),
- signalSemaphoreCount(in_struct->signalSemaphoreCount),
- pSignalSemaphores(nullptr)
-{
- pNext = SafePnextCopy(in_struct->pNext);
- if (waitSemaphoreCount && in_struct->pWaitSemaphores) {
- pWaitSemaphores = new VkSemaphore[waitSemaphoreCount];
- for (uint32_t i = 0; i < waitSemaphoreCount; ++i) {
- pWaitSemaphores[i] = in_struct->pWaitSemaphores[i];
- }
- }
- if (in_struct->pWaitDstStageMask) {
- pWaitDstStageMask = new VkPipelineStageFlags[in_struct->waitSemaphoreCount];
- memcpy ((void *)pWaitDstStageMask, (void *)in_struct->pWaitDstStageMask, sizeof(VkPipelineStageFlags)*in_struct->waitSemaphoreCount);
- }
- if (in_struct->pCommandBuffers) {
- pCommandBuffers = new VkCommandBuffer[in_struct->commandBufferCount];
- memcpy ((void *)pCommandBuffers, (void *)in_struct->pCommandBuffers, sizeof(VkCommandBuffer)*in_struct->commandBufferCount);
- }
- if (signalSemaphoreCount && in_struct->pSignalSemaphores) {
- pSignalSemaphores = new VkSemaphore[signalSemaphoreCount];
- for (uint32_t i = 0; i < signalSemaphoreCount; ++i) {
- pSignalSemaphores[i] = in_struct->pSignalSemaphores[i];
- }
- }
-}
-
-safe_VkSubmitInfo::safe_VkSubmitInfo() :
- pNext(nullptr),
- pWaitSemaphores(nullptr),
- pWaitDstStageMask(nullptr),
- pCommandBuffers(nullptr),
- pSignalSemaphores(nullptr)
-{}
-
-safe_VkSubmitInfo::safe_VkSubmitInfo(const safe_VkSubmitInfo& src)
-{
- sType = src.sType;
- waitSemaphoreCount = src.waitSemaphoreCount;
- pWaitSemaphores = nullptr;
- pWaitDstStageMask = nullptr;
- commandBufferCount = src.commandBufferCount;
- pCommandBuffers = nullptr;
- signalSemaphoreCount = src.signalSemaphoreCount;
- pSignalSemaphores = nullptr;
- pNext = SafePnextCopy(src.pNext);
- if (waitSemaphoreCount && src.pWaitSemaphores) {
- pWaitSemaphores = new VkSemaphore[waitSemaphoreCount];
- for (uint32_t i = 0; i < waitSemaphoreCount; ++i) {
- pWaitSemaphores[i] = src.pWaitSemaphores[i];
- }
- }
- if (src.pWaitDstStageMask) {
- pWaitDstStageMask = new VkPipelineStageFlags[src.waitSemaphoreCount];
- memcpy ((void *)pWaitDstStageMask, (void *)src.pWaitDstStageMask, sizeof(VkPipelineStageFlags)*src.waitSemaphoreCount);
- }
- if (src.pCommandBuffers) {
- pCommandBuffers = new VkCommandBuffer[src.commandBufferCount];
- memcpy ((void *)pCommandBuffers, (void *)src.pCommandBuffers, sizeof(VkCommandBuffer)*src.commandBufferCount);
- }
- if (signalSemaphoreCount && src.pSignalSemaphores) {
- pSignalSemaphores = new VkSemaphore[signalSemaphoreCount];
- for (uint32_t i = 0; i < signalSemaphoreCount; ++i) {
- pSignalSemaphores[i] = src.pSignalSemaphores[i];
- }
- }
-}
-
-safe_VkSubmitInfo& safe_VkSubmitInfo::operator=(const safe_VkSubmitInfo& src)
-{
- if (&src == this) return *this;
-
- if (pWaitSemaphores)
- delete[] pWaitSemaphores;
- if (pWaitDstStageMask)
- delete[] pWaitDstStageMask;
- if (pCommandBuffers)
- delete[] pCommandBuffers;
- if (pSignalSemaphores)
- delete[] pSignalSemaphores;
- if (pNext)
- FreePnextChain(pNext);
-
- sType = src.sType;
- waitSemaphoreCount = src.waitSemaphoreCount;
- pWaitSemaphores = nullptr;
- pWaitDstStageMask = nullptr;
- commandBufferCount = src.commandBufferCount;
- pCommandBuffers = nullptr;
- signalSemaphoreCount = src.signalSemaphoreCount;
- pSignalSemaphores = nullptr;
- pNext = SafePnextCopy(src.pNext);
- if (waitSemaphoreCount && src.pWaitSemaphores) {
- pWaitSemaphores = new VkSemaphore[waitSemaphoreCount];
- for (uint32_t i = 0; i < waitSemaphoreCount; ++i) {
- pWaitSemaphores[i] = src.pWaitSemaphores[i];
- }
- }
- if (src.pWaitDstStageMask) {
- pWaitDstStageMask = new VkPipelineStageFlags[src.waitSemaphoreCount];
- memcpy ((void *)pWaitDstStageMask, (void *)src.pWaitDstStageMask, sizeof(VkPipelineStageFlags)*src.waitSemaphoreCount);
- }
- if (src.pCommandBuffers) {
- pCommandBuffers = new VkCommandBuffer[src.commandBufferCount];
- memcpy ((void *)pCommandBuffers, (void *)src.pCommandBuffers, sizeof(VkCommandBuffer)*src.commandBufferCount);
- }
- if (signalSemaphoreCount && src.pSignalSemaphores) {
- pSignalSemaphores = new VkSemaphore[signalSemaphoreCount];
- for (uint32_t i = 0; i < signalSemaphoreCount; ++i) {
- pSignalSemaphores[i] = src.pSignalSemaphores[i];
- }
- }
-
- return *this;
-}
-
-safe_VkSubmitInfo::~safe_VkSubmitInfo()
-{
- if (pWaitSemaphores)
- delete[] pWaitSemaphores;
- if (pWaitDstStageMask)
- delete[] pWaitDstStageMask;
- if (pCommandBuffers)
- delete[] pCommandBuffers;
- if (pSignalSemaphores)
- delete[] pSignalSemaphores;
- if (pNext)
- FreePnextChain(pNext);
-}
-
-void safe_VkSubmitInfo::initialize(const VkSubmitInfo* in_struct)
-{
- sType = in_struct->sType;
- waitSemaphoreCount = in_struct->waitSemaphoreCount;
- pWaitSemaphores = nullptr;
- pWaitDstStageMask = nullptr;
- commandBufferCount = in_struct->commandBufferCount;
- pCommandBuffers = nullptr;
- signalSemaphoreCount = in_struct->signalSemaphoreCount;
- pSignalSemaphores = nullptr;
- pNext = SafePnextCopy(in_struct->pNext);
- if (waitSemaphoreCount && in_struct->pWaitSemaphores) {
- pWaitSemaphores = new VkSemaphore[waitSemaphoreCount];
- for (uint32_t i = 0; i < waitSemaphoreCount; ++i) {
- pWaitSemaphores[i] = in_struct->pWaitSemaphores[i];
- }
- }
- if (in_struct->pWaitDstStageMask) {
- pWaitDstStageMask = new VkPipelineStageFlags[in_struct->waitSemaphoreCount];
- memcpy ((void *)pWaitDstStageMask, (void *)in_struct->pWaitDstStageMask, sizeof(VkPipelineStageFlags)*in_struct->waitSemaphoreCount);
- }
- if (in_struct->pCommandBuffers) {
- pCommandBuffers = new VkCommandBuffer[in_struct->commandBufferCount];
- memcpy ((void *)pCommandBuffers, (void *)in_struct->pCommandBuffers, sizeof(VkCommandBuffer)*in_struct->commandBufferCount);
- }
- if (signalSemaphoreCount && in_struct->pSignalSemaphores) {
- pSignalSemaphores = new VkSemaphore[signalSemaphoreCount];
- for (uint32_t i = 0; i < signalSemaphoreCount; ++i) {
- pSignalSemaphores[i] = in_struct->pSignalSemaphores[i];
- }
- }
-}
-
-void safe_VkSubmitInfo::initialize(const safe_VkSubmitInfo* src)
-{
- sType = src->sType;
- waitSemaphoreCount = src->waitSemaphoreCount;
- pWaitSemaphores = nullptr;
- pWaitDstStageMask = nullptr;
- commandBufferCount = src->commandBufferCount;
- pCommandBuffers = nullptr;
- signalSemaphoreCount = src->signalSemaphoreCount;
- pSignalSemaphores = nullptr;
- pNext = SafePnextCopy(src->pNext);
- if (waitSemaphoreCount && src->pWaitSemaphores) {
- pWaitSemaphores = new VkSemaphore[waitSemaphoreCount];
- for (uint32_t i = 0; i < waitSemaphoreCount; ++i) {
- pWaitSemaphores[i] = src->pWaitSemaphores[i];
- }
- }
- if (src->pWaitDstStageMask) {
- pWaitDstStageMask = new VkPipelineStageFlags[src->waitSemaphoreCount];
- memcpy ((void *)pWaitDstStageMask, (void *)src->pWaitDstStageMask, sizeof(VkPipelineStageFlags)*src->waitSemaphoreCount);
- }
- if (src->pCommandBuffers) {
- pCommandBuffers = new VkCommandBuffer[src->commandBufferCount];
- memcpy ((void *)pCommandBuffers, (void *)src->pCommandBuffers, sizeof(VkCommandBuffer)*src->commandBufferCount);
- }
- if (signalSemaphoreCount && src->pSignalSemaphores) {
- pSignalSemaphores = new VkSemaphore[signalSemaphoreCount];
- for (uint32_t i = 0; i < signalSemaphoreCount; ++i) {
- pSignalSemaphores[i] = src->pSignalSemaphores[i];
- }
- }
-}
-
-safe_VkMemoryAllocateInfo::safe_VkMemoryAllocateInfo(const VkMemoryAllocateInfo* in_struct) :
- sType(in_struct->sType),
- allocationSize(in_struct->allocationSize),
- memoryTypeIndex(in_struct->memoryTypeIndex)
-{
- pNext = SafePnextCopy(in_struct->pNext);
-}
-
-safe_VkMemoryAllocateInfo::safe_VkMemoryAllocateInfo() :
- pNext(nullptr)
-{}
-
-safe_VkMemoryAllocateInfo::safe_VkMemoryAllocateInfo(const safe_VkMemoryAllocateInfo& src)
-{
- sType = src.sType;
- allocationSize = src.allocationSize;
- memoryTypeIndex = src.memoryTypeIndex;
- pNext = SafePnextCopy(src.pNext);
-}
-
-safe_VkMemoryAllocateInfo& safe_VkMemoryAllocateInfo::operator=(const safe_VkMemoryAllocateInfo& src)
-{
- if (&src == this) return *this;
-
- if (pNext)
- FreePnextChain(pNext);
-
- sType = src.sType;
- allocationSize = src.allocationSize;
- memoryTypeIndex = src.memoryTypeIndex;
- pNext = SafePnextCopy(src.pNext);
-
- return *this;
-}
-
-safe_VkMemoryAllocateInfo::~safe_VkMemoryAllocateInfo()
-{
- if (pNext)
- FreePnextChain(pNext);
-}
-
-void safe_VkMemoryAllocateInfo::initialize(const VkMemoryAllocateInfo* in_struct)
-{
- sType = in_struct->sType;
- allocationSize = in_struct->allocationSize;
- memoryTypeIndex = in_struct->memoryTypeIndex;
- pNext = SafePnextCopy(in_struct->pNext);
-}
-
-void safe_VkMemoryAllocateInfo::initialize(const safe_VkMemoryAllocateInfo* src)
-{
- sType = src->sType;
- allocationSize = src->allocationSize;
- memoryTypeIndex = src->memoryTypeIndex;
- pNext = SafePnextCopy(src->pNext);
-}
-
-safe_VkMappedMemoryRange::safe_VkMappedMemoryRange(const VkMappedMemoryRange* in_struct) :
- sType(in_struct->sType),
- memory(in_struct->memory),
- offset(in_struct->offset),
- size(in_struct->size)
-{
- pNext = SafePnextCopy(in_struct->pNext);
-}
-
-safe_VkMappedMemoryRange::safe_VkMappedMemoryRange() :
- pNext(nullptr)
-{}
-
-safe_VkMappedMemoryRange::safe_VkMappedMemoryRange(const safe_VkMappedMemoryRange& src)
-{
- sType = src.sType;
- memory = src.memory;
- offset = src.offset;
- size = src.size;
- pNext = SafePnextCopy(src.pNext);
-}
-
-safe_VkMappedMemoryRange& safe_VkMappedMemoryRange::operator=(const safe_VkMappedMemoryRange& src)
-{
- if (&src == this) return *this;
-
- if (pNext)
- FreePnextChain(pNext);
-
- sType = src.sType;
- memory = src.memory;
- offset = src.offset;
- size = src.size;
- pNext = SafePnextCopy(src.pNext);
-
- return *this;
-}
-
-safe_VkMappedMemoryRange::~safe_VkMappedMemoryRange()
-{
- if (pNext)
- FreePnextChain(pNext);
-}
-
-void safe_VkMappedMemoryRange::initialize(const VkMappedMemoryRange* in_struct)
-{
- sType = in_struct->sType;
- memory = in_struct->memory;
- offset = in_struct->offset;
- size = in_struct->size;
- pNext = SafePnextCopy(in_struct->pNext);
-}
-
-void safe_VkMappedMemoryRange::initialize(const safe_VkMappedMemoryRange* src)
-{
- sType = src->sType;
- memory = src->memory;
- offset = src->offset;
- size = src->size;
- pNext = SafePnextCopy(src->pNext);
-}
-
-safe_VkSparseBufferMemoryBindInfo::safe_VkSparseBufferMemoryBindInfo(const VkSparseBufferMemoryBindInfo* in_struct) :
- buffer(in_struct->buffer),
- bindCount(in_struct->bindCount),
- pBinds(nullptr)
-{
- if (bindCount && in_struct->pBinds) {
- pBinds = new VkSparseMemoryBind[bindCount];
- for (uint32_t i = 0; i < bindCount; ++i) {
- pBinds[i] = in_struct->pBinds[i];
- }
- }
-}
-
-safe_VkSparseBufferMemoryBindInfo::safe_VkSparseBufferMemoryBindInfo() :
- pBinds(nullptr)
-{}
-
-safe_VkSparseBufferMemoryBindInfo::safe_VkSparseBufferMemoryBindInfo(const safe_VkSparseBufferMemoryBindInfo& src)
-{
- buffer = src.buffer;
- bindCount = src.bindCount;
- pBinds = nullptr;
- if (bindCount && src.pBinds) {
- pBinds = new VkSparseMemoryBind[bindCount];
- for (uint32_t i = 0; i < bindCount; ++i) {
- pBinds[i] = src.pBinds[i];
- }
- }
-}
-
-safe_VkSparseBufferMemoryBindInfo& safe_VkSparseBufferMemoryBindInfo::operator=(const safe_VkSparseBufferMemoryBindInfo& src)
-{
- if (&src == this) return *this;
-
- if (pBinds)
- delete[] pBinds;
-
- buffer = src.buffer;
- bindCount = src.bindCount;
- pBinds = nullptr;
- if (bindCount && src.pBinds) {
- pBinds = new VkSparseMemoryBind[bindCount];
- for (uint32_t i = 0; i < bindCount; ++i) {
- pBinds[i] = src.pBinds[i];
- }
- }
-
- return *this;
-}
-
-safe_VkSparseBufferMemoryBindInfo::~safe_VkSparseBufferMemoryBindInfo()
-{
- if (pBinds)
- delete[] pBinds;
-}
-
-void safe_VkSparseBufferMemoryBindInfo::initialize(const VkSparseBufferMemoryBindInfo* in_struct)
-{
- buffer = in_struct->buffer;
- bindCount = in_struct->bindCount;
- pBinds = nullptr;
- if (bindCount && in_struct->pBinds) {
- pBinds = new VkSparseMemoryBind[bindCount];
- for (uint32_t i = 0; i < bindCount; ++i) {
- pBinds[i] = in_struct->pBinds[i];
- }
- }
-}
-
-void safe_VkSparseBufferMemoryBindInfo::initialize(const safe_VkSparseBufferMemoryBindInfo* src)
-{
- buffer = src->buffer;
- bindCount = src->bindCount;
- pBinds = nullptr;
- if (bindCount && src->pBinds) {
- pBinds = new VkSparseMemoryBind[bindCount];
- for (uint32_t i = 0; i < bindCount; ++i) {
- pBinds[i] = src->pBinds[i];
- }
- }
-}
-
-safe_VkSparseImageOpaqueMemoryBindInfo::safe_VkSparseImageOpaqueMemoryBindInfo(const VkSparseImageOpaqueMemoryBindInfo* in_struct) :
- image(in_struct->image),
- bindCount(in_struct->bindCount),
- pBinds(nullptr)
-{
- if (bindCount && in_struct->pBinds) {
- pBinds = new VkSparseMemoryBind[bindCount];
- for (uint32_t i = 0; i < bindCount; ++i) {
- pBinds[i] = in_struct->pBinds[i];
- }
- }
-}
-
-safe_VkSparseImageOpaqueMemoryBindInfo::safe_VkSparseImageOpaqueMemoryBindInfo() :
- pBinds(nullptr)
-{}
-
-safe_VkSparseImageOpaqueMemoryBindInfo::safe_VkSparseImageOpaqueMemoryBindInfo(const safe_VkSparseImageOpaqueMemoryBindInfo& src)
-{
- image = src.image;
- bindCount = src.bindCount;
- pBinds = nullptr;
- if (bindCount && src.pBinds) {
- pBinds = new VkSparseMemoryBind[bindCount];
- for (uint32_t i = 0; i < bindCount; ++i) {
- pBinds[i] = src.pBinds[i];
- }
- }
-}
-
-safe_VkSparseImageOpaqueMemoryBindInfo& safe_VkSparseImageOpaqueMemoryBindInfo::operator=(const safe_VkSparseImageOpaqueMemoryBindInfo& src)
-{
- if (&src == this) return *this;
-
- if (pBinds)
- delete[] pBinds;
-
- image = src.image;
- bindCount = src.bindCount;
- pBinds = nullptr;
- if (bindCount && src.pBinds) {
- pBinds = new VkSparseMemoryBind[bindCount];
- for (uint32_t i = 0; i < bindCount; ++i) {
- pBinds[i] = src.pBinds[i];
- }
- }
-
- return *this;
-}
-
-safe_VkSparseImageOpaqueMemoryBindInfo::~safe_VkSparseImageOpaqueMemoryBindInfo()
-{
- if (pBinds)
- delete[] pBinds;
-}
-
-void safe_VkSparseImageOpaqueMemoryBindInfo::initialize(const VkSparseImageOpaqueMemoryBindInfo* in_struct)
-{
- image = in_struct->image;
- bindCount = in_struct->bindCount;
- pBinds = nullptr;
- if (bindCount && in_struct->pBinds) {
- pBinds = new VkSparseMemoryBind[bindCount];
- for (uint32_t i = 0; i < bindCount; ++i) {
- pBinds[i] = in_struct->pBinds[i];
- }
- }
-}
-
-void safe_VkSparseImageOpaqueMemoryBindInfo::initialize(const safe_VkSparseImageOpaqueMemoryBindInfo* src)
-{
- image = src->image;
- bindCount = src->bindCount;
- pBinds = nullptr;
- if (bindCount && src->pBinds) {
- pBinds = new VkSparseMemoryBind[bindCount];
- for (uint32_t i = 0; i < bindCount; ++i) {
- pBinds[i] = src->pBinds[i];
- }
- }
-}
-
-safe_VkSparseImageMemoryBindInfo::safe_VkSparseImageMemoryBindInfo(const VkSparseImageMemoryBindInfo* in_struct) :
- image(in_struct->image),
- bindCount(in_struct->bindCount),
- pBinds(nullptr)
-{
- if (bindCount && in_struct->pBinds) {
- pBinds = new VkSparseImageMemoryBind[bindCount];
- for (uint32_t i = 0; i < bindCount; ++i) {
- pBinds[i] = in_struct->pBinds[i];
- }
- }
-}
-
-safe_VkSparseImageMemoryBindInfo::safe_VkSparseImageMemoryBindInfo() :
- pBinds(nullptr)
-{}
-
-safe_VkSparseImageMemoryBindInfo::safe_VkSparseImageMemoryBindInfo(const safe_VkSparseImageMemoryBindInfo& src)
-{
- image = src.image;
- bindCount = src.bindCount;
- pBinds = nullptr;
- if (bindCount && src.pBinds) {
- pBinds = new VkSparseImageMemoryBind[bindCount];
- for (uint32_t i = 0; i < bindCount; ++i) {
- pBinds[i] = src.pBinds[i];
- }
- }
-}
-
-safe_VkSparseImageMemoryBindInfo& safe_VkSparseImageMemoryBindInfo::operator=(const safe_VkSparseImageMemoryBindInfo& src)
-{
- if (&src == this) return *this;
-
- if (pBinds)
- delete[] pBinds;
-
- image = src.image;
- bindCount = src.bindCount;
- pBinds = nullptr;
- if (bindCount && src.pBinds) {
- pBinds = new VkSparseImageMemoryBind[bindCount];
- for (uint32_t i = 0; i < bindCount; ++i) {
- pBinds[i] = src.pBinds[i];
- }
- }
-
- return *this;
-}
-
-safe_VkSparseImageMemoryBindInfo::~safe_VkSparseImageMemoryBindInfo()
-{
- if (pBinds)
- delete[] pBinds;
-}
-
-void safe_VkSparseImageMemoryBindInfo::initialize(const VkSparseImageMemoryBindInfo* in_struct)
-{
- image = in_struct->image;
- bindCount = in_struct->bindCount;
- pBinds = nullptr;
- if (bindCount && in_struct->pBinds) {
- pBinds = new VkSparseImageMemoryBind[bindCount];
- for (uint32_t i = 0; i < bindCount; ++i) {
- pBinds[i] = in_struct->pBinds[i];
- }
- }
-}
-
-void safe_VkSparseImageMemoryBindInfo::initialize(const safe_VkSparseImageMemoryBindInfo* src)
-{
- image = src->image;
- bindCount = src->bindCount;
- pBinds = nullptr;
- if (bindCount && src->pBinds) {
- pBinds = new VkSparseImageMemoryBind[bindCount];
- for (uint32_t i = 0; i < bindCount; ++i) {
- pBinds[i] = src->pBinds[i];
- }
- }
-}
-
-safe_VkBindSparseInfo::safe_VkBindSparseInfo(const VkBindSparseInfo* in_struct) :
- sType(in_struct->sType),
- waitSemaphoreCount(in_struct->waitSemaphoreCount),
- pWaitSemaphores(nullptr),
- bufferBindCount(in_struct->bufferBindCount),
- pBufferBinds(nullptr),
- imageOpaqueBindCount(in_struct->imageOpaqueBindCount),
- pImageOpaqueBinds(nullptr),
- imageBindCount(in_struct->imageBindCount),
- pImageBinds(nullptr),
- signalSemaphoreCount(in_struct->signalSemaphoreCount),
- pSignalSemaphores(nullptr)
-{
- pNext = SafePnextCopy(in_struct->pNext);
- if (waitSemaphoreCount && in_struct->pWaitSemaphores) {
- pWaitSemaphores = new VkSemaphore[waitSemaphoreCount];
- for (uint32_t i = 0; i < waitSemaphoreCount; ++i) {
- pWaitSemaphores[i] = in_struct->pWaitSemaphores[i];
- }
- }
- if (bufferBindCount && in_struct->pBufferBinds) {
- pBufferBinds = new safe_VkSparseBufferMemoryBindInfo[bufferBindCount];
- for (uint32_t i = 0; i < bufferBindCount; ++i) {
- pBufferBinds[i].initialize(&in_struct->pBufferBinds[i]);
- }
- }
- if (imageOpaqueBindCount && in_struct->pImageOpaqueBinds) {
- pImageOpaqueBinds = new safe_VkSparseImageOpaqueMemoryBindInfo[imageOpaqueBindCount];
- for (uint32_t i = 0; i < imageOpaqueBindCount; ++i) {
- pImageOpaqueBinds[i].initialize(&in_struct->pImageOpaqueBinds[i]);
- }
- }
- if (imageBindCount && in_struct->pImageBinds) {
- pImageBinds = new safe_VkSparseImageMemoryBindInfo[imageBindCount];
- for (uint32_t i = 0; i < imageBindCount; ++i) {
- pImageBinds[i].initialize(&in_struct->pImageBinds[i]);
- }
- }
- if (signalSemaphoreCount && in_struct->pSignalSemaphores) {
- pSignalSemaphores = new VkSemaphore[signalSemaphoreCount];
- for (uint32_t i = 0; i < signalSemaphoreCount; ++i) {
- pSignalSemaphores[i] = in_struct->pSignalSemaphores[i];
- }
- }
-}
-
-safe_VkBindSparseInfo::safe_VkBindSparseInfo() :
- pNext(nullptr),
- pWaitSemaphores(nullptr),
- pBufferBinds(nullptr),
- pImageOpaqueBinds(nullptr),
- pImageBinds(nullptr),
- pSignalSemaphores(nullptr)
-{}
-
-safe_VkBindSparseInfo::safe_VkBindSparseInfo(const safe_VkBindSparseInfo& src)
-{
- sType = src.sType;
- waitSemaphoreCount = src.waitSemaphoreCount;
- pWaitSemaphores = nullptr;
- bufferBindCount = src.bufferBindCount;
- pBufferBinds = nullptr;
- imageOpaqueBindCount = src.imageOpaqueBindCount;
- pImageOpaqueBinds = nullptr;
- imageBindCount = src.imageBindCount;
- pImageBinds = nullptr;
- signalSemaphoreCount = src.signalSemaphoreCount;
- pSignalSemaphores = nullptr;
- pNext = SafePnextCopy(src.pNext);
- if (waitSemaphoreCount && src.pWaitSemaphores) {
- pWaitSemaphores = new VkSemaphore[waitSemaphoreCount];
- for (uint32_t i = 0; i < waitSemaphoreCount; ++i) {
- pWaitSemaphores[i] = src.pWaitSemaphores[i];
- }
- }
- if (bufferBindCount && src.pBufferBinds) {
- pBufferBinds = new safe_VkSparseBufferMemoryBindInfo[bufferBindCount];
- for (uint32_t i = 0; i < bufferBindCount; ++i) {
- pBufferBinds[i].initialize(&src.pBufferBinds[i]);
- }
- }
- if (imageOpaqueBindCount && src.pImageOpaqueBinds) {
- pImageOpaqueBinds = new safe_VkSparseImageOpaqueMemoryBindInfo[imageOpaqueBindCount];
- for (uint32_t i = 0; i < imageOpaqueBindCount; ++i) {
- pImageOpaqueBinds[i].initialize(&src.pImageOpaqueBinds[i]);
- }
- }
- if (imageBindCount && src.pImageBinds) {
- pImageBinds = new safe_VkSparseImageMemoryBindInfo[imageBindCount];
- for (uint32_t i = 0; i < imageBindCount; ++i) {
- pImageBinds[i].initialize(&src.pImageBinds[i]);
- }
- }
- if (signalSemaphoreCount && src.pSignalSemaphores) {
- pSignalSemaphores = new VkSemaphore[signalSemaphoreCount];
- for (uint32_t i = 0; i < signalSemaphoreCount; ++i) {
- pSignalSemaphores[i] = src.pSignalSemaphores[i];
- }
- }
-}
-
-safe_VkBindSparseInfo& safe_VkBindSparseInfo::operator=(const safe_VkBindSparseInfo& src)
-{
- if (&src == this) return *this;
-
- if (pWaitSemaphores)
- delete[] pWaitSemaphores;
- if (pBufferBinds)
- delete[] pBufferBinds;
- if (pImageOpaqueBinds)
- delete[] pImageOpaqueBinds;
- if (pImageBinds)
- delete[] pImageBinds;
- if (pSignalSemaphores)
- delete[] pSignalSemaphores;
- if (pNext)
- FreePnextChain(pNext);
-
- sType = src.sType;
- waitSemaphoreCount = src.waitSemaphoreCount;
- pWaitSemaphores = nullptr;
- bufferBindCount = src.bufferBindCount;
- pBufferBinds = nullptr;
- imageOpaqueBindCount = src.imageOpaqueBindCount;
- pImageOpaqueBinds = nullptr;
- imageBindCount = src.imageBindCount;
- pImageBinds = nullptr;
- signalSemaphoreCount = src.signalSemaphoreCount;
- pSignalSemaphores = nullptr;
- pNext = SafePnextCopy(src.pNext);
- if (waitSemaphoreCount && src.pWaitSemaphores) {
- pWaitSemaphores = new VkSemaphore[waitSemaphoreCount];
- for (uint32_t i = 0; i < waitSemaphoreCount; ++i) {
- pWaitSemaphores[i] = src.pWaitSemaphores[i];
- }
- }
- if (bufferBindCount && src.pBufferBinds) {
- pBufferBinds = new safe_VkSparseBufferMemoryBindInfo[bufferBindCount];
- for (uint32_t i = 0; i < bufferBindCount; ++i) {
- pBufferBinds[i].initialize(&src.pBufferBinds[i]);
- }
- }
- if (imageOpaqueBindCount && src.pImageOpaqueBinds) {
- pImageOpaqueBinds = new safe_VkSparseImageOpaqueMemoryBindInfo[imageOpaqueBindCount];
- for (uint32_t i = 0; i < imageOpaqueBindCount; ++i) {
- pImageOpaqueBinds[i].initialize(&src.pImageOpaqueBinds[i]);
- }
- }
- if (imageBindCount && src.pImageBinds) {
- pImageBinds = new safe_VkSparseImageMemoryBindInfo[imageBindCount];
- for (uint32_t i = 0; i < imageBindCount; ++i) {
- pImageBinds[i].initialize(&src.pImageBinds[i]);
- }
- }
- if (signalSemaphoreCount && src.pSignalSemaphores) {
- pSignalSemaphores = new VkSemaphore[signalSemaphoreCount];
- for (uint32_t i = 0; i < signalSemaphoreCount; ++i) {
- pSignalSemaphores[i] = src.pSignalSemaphores[i];
- }
- }
-
- return *this;
-}
-
-safe_VkBindSparseInfo::~safe_VkBindSparseInfo()
-{
- if (pWaitSemaphores)
- delete[] pWaitSemaphores;
- if (pBufferBinds)
- delete[] pBufferBinds;
- if (pImageOpaqueBinds)
- delete[] pImageOpaqueBinds;
- if (pImageBinds)
- delete[] pImageBinds;
- if (pSignalSemaphores)
- delete[] pSignalSemaphores;
- if (pNext)
- FreePnextChain(pNext);
-}
-
-void safe_VkBindSparseInfo::initialize(const VkBindSparseInfo* in_struct)
-{
- sType = in_struct->sType;
- waitSemaphoreCount = in_struct->waitSemaphoreCount;
- pWaitSemaphores = nullptr;
- bufferBindCount = in_struct->bufferBindCount;
- pBufferBinds = nullptr;
- imageOpaqueBindCount = in_struct->imageOpaqueBindCount;
- pImageOpaqueBinds = nullptr;
- imageBindCount = in_struct->imageBindCount;
- pImageBinds = nullptr;
- signalSemaphoreCount = in_struct->signalSemaphoreCount;
- pSignalSemaphores = nullptr;
- pNext = SafePnextCopy(in_struct->pNext);
- if (waitSemaphoreCount && in_struct->pWaitSemaphores) {
- pWaitSemaphores = new VkSemaphore[waitSemaphoreCount];
- for (uint32_t i = 0; i < waitSemaphoreCount; ++i) {
- pWaitSemaphores[i] = in_struct->pWaitSemaphores[i];
- }
- }
- if (bufferBindCount && in_struct->pBufferBinds) {
- pBufferBinds = new safe_VkSparseBufferMemoryBindInfo[bufferBindCount];
- for (uint32_t i = 0; i < bufferBindCount; ++i) {
- pBufferBinds[i].initialize(&in_struct->pBufferBinds[i]);
- }
- }
- if (imageOpaqueBindCount && in_struct->pImageOpaqueBinds) {
- pImageOpaqueBinds = new safe_VkSparseImageOpaqueMemoryBindInfo[imageOpaqueBindCount];
- for (uint32_t i = 0; i < imageOpaqueBindCount; ++i) {
- pImageOpaqueBinds[i].initialize(&in_struct->pImageOpaqueBinds[i]);
- }
- }
- if (imageBindCount && in_struct->pImageBinds) {
- pImageBinds = new safe_VkSparseImageMemoryBindInfo[imageBindCount];
- for (uint32_t i = 0; i < imageBindCount; ++i) {
- pImageBinds[i].initialize(&in_struct->pImageBinds[i]);
- }
- }
- if (signalSemaphoreCount && in_struct->pSignalSemaphores) {
- pSignalSemaphores = new VkSemaphore[signalSemaphoreCount];
- for (uint32_t i = 0; i < signalSemaphoreCount; ++i) {
- pSignalSemaphores[i] = in_struct->pSignalSemaphores[i];
- }
- }
-}
-
-void safe_VkBindSparseInfo::initialize(const safe_VkBindSparseInfo* src)
-{
- sType = src->sType;
- waitSemaphoreCount = src->waitSemaphoreCount;
- pWaitSemaphores = nullptr;
- bufferBindCount = src->bufferBindCount;
- pBufferBinds = nullptr;
- imageOpaqueBindCount = src->imageOpaqueBindCount;
- pImageOpaqueBinds = nullptr;
- imageBindCount = src->imageBindCount;
- pImageBinds = nullptr;
- signalSemaphoreCount = src->signalSemaphoreCount;
- pSignalSemaphores = nullptr;
- pNext = SafePnextCopy(src->pNext);
- if (waitSemaphoreCount && src->pWaitSemaphores) {
- pWaitSemaphores = new VkSemaphore[waitSemaphoreCount];
- for (uint32_t i = 0; i < waitSemaphoreCount; ++i) {
- pWaitSemaphores[i] = src->pWaitSemaphores[i];
- }
- }
- if (bufferBindCount && src->pBufferBinds) {
- pBufferBinds = new safe_VkSparseBufferMemoryBindInfo[bufferBindCount];
- for (uint32_t i = 0; i < bufferBindCount; ++i) {
- pBufferBinds[i].initialize(&src->pBufferBinds[i]);
- }
- }
- if (imageOpaqueBindCount && src->pImageOpaqueBinds) {
- pImageOpaqueBinds = new safe_VkSparseImageOpaqueMemoryBindInfo[imageOpaqueBindCount];
- for (uint32_t i = 0; i < imageOpaqueBindCount; ++i) {
- pImageOpaqueBinds[i].initialize(&src->pImageOpaqueBinds[i]);
- }
- }
- if (imageBindCount && src->pImageBinds) {
- pImageBinds = new safe_VkSparseImageMemoryBindInfo[imageBindCount];
- for (uint32_t i = 0; i < imageBindCount; ++i) {
- pImageBinds[i].initialize(&src->pImageBinds[i]);
- }
- }
- if (signalSemaphoreCount && src->pSignalSemaphores) {
- pSignalSemaphores = new VkSemaphore[signalSemaphoreCount];
- for (uint32_t i = 0; i < signalSemaphoreCount; ++i) {
- pSignalSemaphores[i] = src->pSignalSemaphores[i];
- }
- }
-}
-
-safe_VkFenceCreateInfo::safe_VkFenceCreateInfo(const VkFenceCreateInfo* in_struct) :
- sType(in_struct->sType),
- flags(in_struct->flags)
-{
- pNext = SafePnextCopy(in_struct->pNext);
-}
-
-safe_VkFenceCreateInfo::safe_VkFenceCreateInfo() :
- pNext(nullptr)
-{}
-
-safe_VkFenceCreateInfo::safe_VkFenceCreateInfo(const safe_VkFenceCreateInfo& src)
-{
- sType = src.sType;
- flags = src.flags;
- pNext = SafePnextCopy(src.pNext);
-}
-
-safe_VkFenceCreateInfo& safe_VkFenceCreateInfo::operator=(const safe_VkFenceCreateInfo& src)
-{
- if (&src == this) return *this;
-
- if (pNext)
- FreePnextChain(pNext);
-
- sType = src.sType;
- flags = src.flags;
- pNext = SafePnextCopy(src.pNext);
-
- return *this;
-}
-
-safe_VkFenceCreateInfo::~safe_VkFenceCreateInfo()
-{
- if (pNext)
- FreePnextChain(pNext);
-}
-
-void safe_VkFenceCreateInfo::initialize(const VkFenceCreateInfo* in_struct)
-{
- sType = in_struct->sType;
- flags = in_struct->flags;
- pNext = SafePnextCopy(in_struct->pNext);
-}
-
-void safe_VkFenceCreateInfo::initialize(const safe_VkFenceCreateInfo* src)
-{
- sType = src->sType;
- flags = src->flags;
- pNext = SafePnextCopy(src->pNext);
-}
-
-safe_VkSemaphoreCreateInfo::safe_VkSemaphoreCreateInfo(const VkSemaphoreCreateInfo* in_struct) :
- sType(in_struct->sType),
- flags(in_struct->flags)
-{
- pNext = SafePnextCopy(in_struct->pNext);
-}
-
-safe_VkSemaphoreCreateInfo::safe_VkSemaphoreCreateInfo() :
- pNext(nullptr)
-{}
-
-safe_VkSemaphoreCreateInfo::safe_VkSemaphoreCreateInfo(const safe_VkSemaphoreCreateInfo& src)
-{
- sType = src.sType;
- flags = src.flags;
- pNext = SafePnextCopy(src.pNext);
-}
-
-safe_VkSemaphoreCreateInfo& safe_VkSemaphoreCreateInfo::operator=(const safe_VkSemaphoreCreateInfo& src)
-{
- if (&src == this) return *this;
-
- if (pNext)
- FreePnextChain(pNext);
-
- sType = src.sType;
- flags = src.flags;
- pNext = SafePnextCopy(src.pNext);
-
- return *this;
-}
-
-safe_VkSemaphoreCreateInfo::~safe_VkSemaphoreCreateInfo()
-{
- if (pNext)
- FreePnextChain(pNext);
-}
-
-void safe_VkSemaphoreCreateInfo::initialize(const VkSemaphoreCreateInfo* in_struct)
-{
- sType = in_struct->sType;
- flags = in_struct->flags;
- pNext = SafePnextCopy(in_struct->pNext);
-}
-
-void safe_VkSemaphoreCreateInfo::initialize(const safe_VkSemaphoreCreateInfo* src)
-{
- sType = src->sType;
- flags = src->flags;
- pNext = SafePnextCopy(src->pNext);
-}
-
-safe_VkEventCreateInfo::safe_VkEventCreateInfo(const VkEventCreateInfo* in_struct) :
- sType(in_struct->sType),
- flags(in_struct->flags)
-{
- pNext = SafePnextCopy(in_struct->pNext);
-}
-
-safe_VkEventCreateInfo::safe_VkEventCreateInfo() :
- pNext(nullptr)
-{}
-
-safe_VkEventCreateInfo::safe_VkEventCreateInfo(const safe_VkEventCreateInfo& src)
-{
- sType = src.sType;
- flags = src.flags;
- pNext = SafePnextCopy(src.pNext);
-}
-
-safe_VkEventCreateInfo& safe_VkEventCreateInfo::operator=(const safe_VkEventCreateInfo& src)
-{
- if (&src == this) return *this;
-
- if (pNext)
- FreePnextChain(pNext);
-
- sType = src.sType;
- flags = src.flags;
- pNext = SafePnextCopy(src.pNext);
-
- return *this;
-}
-
-safe_VkEventCreateInfo::~safe_VkEventCreateInfo()
-{
- if (pNext)
- FreePnextChain(pNext);
-}
-
-void safe_VkEventCreateInfo::initialize(const VkEventCreateInfo* in_struct)
-{
- sType = in_struct->sType;
- flags = in_struct->flags;
- pNext = SafePnextCopy(in_struct->pNext);
-}
-
-void safe_VkEventCreateInfo::initialize(const safe_VkEventCreateInfo* src)
-{
- sType = src->sType;
- flags = src->flags;
- pNext = SafePnextCopy(src->pNext);
-}
-
-safe_VkQueryPoolCreateInfo::safe_VkQueryPoolCreateInfo(const VkQueryPoolCreateInfo* in_struct) :
- sType(in_struct->sType),
- flags(in_struct->flags),
- queryType(in_struct->queryType),
- queryCount(in_struct->queryCount),
- pipelineStatistics(in_struct->pipelineStatistics)
-{
- pNext = SafePnextCopy(in_struct->pNext);
-}
-
-safe_VkQueryPoolCreateInfo::safe_VkQueryPoolCreateInfo() :
- pNext(nullptr)
-{}
-
-safe_VkQueryPoolCreateInfo::safe_VkQueryPoolCreateInfo(const safe_VkQueryPoolCreateInfo& src)
-{
- sType = src.sType;
- flags = src.flags;
- queryType = src.queryType;
- queryCount = src.queryCount;
- pipelineStatistics = src.pipelineStatistics;
- pNext = SafePnextCopy(src.pNext);
-}
-
-safe_VkQueryPoolCreateInfo& safe_VkQueryPoolCreateInfo::operator=(const safe_VkQueryPoolCreateInfo& src)
-{
- if (&src == this) return *this;
-
- if (pNext)
- FreePnextChain(pNext);
-
- sType = src.sType;
- flags = src.flags;
- queryType = src.queryType;
- queryCount = src.queryCount;
- pipelineStatistics = src.pipelineStatistics;
- pNext = SafePnextCopy(src.pNext);
-
- return *this;
-}
-
-safe_VkQueryPoolCreateInfo::~safe_VkQueryPoolCreateInfo()
-{
- if (pNext)
- FreePnextChain(pNext);
-}
-
-void safe_VkQueryPoolCreateInfo::initialize(const VkQueryPoolCreateInfo* in_struct)
-{
- sType = in_struct->sType;
- flags = in_struct->flags;
- queryType = in_struct->queryType;
- queryCount = in_struct->queryCount;
- pipelineStatistics = in_struct->pipelineStatistics;
- pNext = SafePnextCopy(in_struct->pNext);
-}
-
-void safe_VkQueryPoolCreateInfo::initialize(const safe_VkQueryPoolCreateInfo* src)
-{
- sType = src->sType;
- flags = src->flags;
- queryType = src->queryType;
- queryCount = src->queryCount;
- pipelineStatistics = src->pipelineStatistics;
- pNext = SafePnextCopy(src->pNext);
-}
-
-safe_VkBufferCreateInfo::safe_VkBufferCreateInfo(const VkBufferCreateInfo* in_struct) :
- sType(in_struct->sType),
- flags(in_struct->flags),
- size(in_struct->size),
- usage(in_struct->usage),
- sharingMode(in_struct->sharingMode),
- queueFamilyIndexCount(in_struct->queueFamilyIndexCount),
- pQueueFamilyIndices(nullptr)
-{
- pNext = SafePnextCopy(in_struct->pNext);
- if (in_struct->pQueueFamilyIndices) {
- pQueueFamilyIndices = new uint32_t[in_struct->queueFamilyIndexCount];
- memcpy ((void *)pQueueFamilyIndices, (void *)in_struct->pQueueFamilyIndices, sizeof(uint32_t)*in_struct->queueFamilyIndexCount);
- }
-}
-
-safe_VkBufferCreateInfo::safe_VkBufferCreateInfo() :
- pNext(nullptr),
- pQueueFamilyIndices(nullptr)
-{}
-
-safe_VkBufferCreateInfo::safe_VkBufferCreateInfo(const safe_VkBufferCreateInfo& src)
-{
- sType = src.sType;
- flags = src.flags;
- size = src.size;
- usage = src.usage;
- sharingMode = src.sharingMode;
- queueFamilyIndexCount = src.queueFamilyIndexCount;
- pQueueFamilyIndices = nullptr;
- pNext = SafePnextCopy(src.pNext);
- if (src.pQueueFamilyIndices) {
- pQueueFamilyIndices = new uint32_t[src.queueFamilyIndexCount];
- memcpy ((void *)pQueueFamilyIndices, (void *)src.pQueueFamilyIndices, sizeof(uint32_t)*src.queueFamilyIndexCount);
- }
-}
-
-safe_VkBufferCreateInfo& safe_VkBufferCreateInfo::operator=(const safe_VkBufferCreateInfo& src)
-{
- if (&src == this) return *this;
-
- if (pQueueFamilyIndices)
- delete[] pQueueFamilyIndices;
- if (pNext)
- FreePnextChain(pNext);
-
- sType = src.sType;
- flags = src.flags;
- size = src.size;
- usage = src.usage;
- sharingMode = src.sharingMode;
- queueFamilyIndexCount = src.queueFamilyIndexCount;
- pQueueFamilyIndices = nullptr;
- pNext = SafePnextCopy(src.pNext);
- if (src.pQueueFamilyIndices) {
- pQueueFamilyIndices = new uint32_t[src.queueFamilyIndexCount];
- memcpy ((void *)pQueueFamilyIndices, (void *)src.pQueueFamilyIndices, sizeof(uint32_t)*src.queueFamilyIndexCount);
- }
-
- return *this;
-}
-
-safe_VkBufferCreateInfo::~safe_VkBufferCreateInfo()
-{
- if (pQueueFamilyIndices)
- delete[] pQueueFamilyIndices;
- if (pNext)
- FreePnextChain(pNext);
-}
-
-void safe_VkBufferCreateInfo::initialize(const VkBufferCreateInfo* in_struct)
-{
- sType = in_struct->sType;
- flags = in_struct->flags;
- size = in_struct->size;
- usage = in_struct->usage;
- sharingMode = in_struct->sharingMode;
- queueFamilyIndexCount = in_struct->queueFamilyIndexCount;
- pQueueFamilyIndices = nullptr;
- pNext = SafePnextCopy(in_struct->pNext);
- if (in_struct->pQueueFamilyIndices) {
- pQueueFamilyIndices = new uint32_t[in_struct->queueFamilyIndexCount];
- memcpy ((void *)pQueueFamilyIndices, (void *)in_struct->pQueueFamilyIndices, sizeof(uint32_t)*in_struct->queueFamilyIndexCount);
- }
-}
-
-void safe_VkBufferCreateInfo::initialize(const safe_VkBufferCreateInfo* src)
-{
- sType = src->sType;
- flags = src->flags;
- size = src->size;
- usage = src->usage;
- sharingMode = src->sharingMode;
- queueFamilyIndexCount = src->queueFamilyIndexCount;
- pQueueFamilyIndices = nullptr;
- pNext = SafePnextCopy(src->pNext);
- if (src->pQueueFamilyIndices) {
- pQueueFamilyIndices = new uint32_t[src->queueFamilyIndexCount];
- memcpy ((void *)pQueueFamilyIndices, (void *)src->pQueueFamilyIndices, sizeof(uint32_t)*src->queueFamilyIndexCount);
- }
-}
-
-safe_VkBufferViewCreateInfo::safe_VkBufferViewCreateInfo(const VkBufferViewCreateInfo* in_struct) :
- sType(in_struct->sType),
- flags(in_struct->flags),
- buffer(in_struct->buffer),
- format(in_struct->format),
- offset(in_struct->offset),
- range(in_struct->range)
-{
- pNext = SafePnextCopy(in_struct->pNext);
-}
-
-safe_VkBufferViewCreateInfo::safe_VkBufferViewCreateInfo() :
- pNext(nullptr)
-{}
-
-safe_VkBufferViewCreateInfo::safe_VkBufferViewCreateInfo(const safe_VkBufferViewCreateInfo& src)
-{
- sType = src.sType;
- flags = src.flags;
- buffer = src.buffer;
- format = src.format;
- offset = src.offset;
- range = src.range;
- pNext = SafePnextCopy(src.pNext);
-}
-
-safe_VkBufferViewCreateInfo& safe_VkBufferViewCreateInfo::operator=(const safe_VkBufferViewCreateInfo& src)
-{
- if (&src == this) return *this;
-
- if (pNext)
- FreePnextChain(pNext);
-
- sType = src.sType;
- flags = src.flags;
- buffer = src.buffer;
- format = src.format;
- offset = src.offset;
- range = src.range;
- pNext = SafePnextCopy(src.pNext);
-
- return *this;
-}
-
-safe_VkBufferViewCreateInfo::~safe_VkBufferViewCreateInfo()
-{
- if (pNext)
- FreePnextChain(pNext);
-}
-
-void safe_VkBufferViewCreateInfo::initialize(const VkBufferViewCreateInfo* in_struct)
-{
- sType = in_struct->sType;
- flags = in_struct->flags;
- buffer = in_struct->buffer;
- format = in_struct->format;
- offset = in_struct->offset;
- range = in_struct->range;
- pNext = SafePnextCopy(in_struct->pNext);
-}
-
-void safe_VkBufferViewCreateInfo::initialize(const safe_VkBufferViewCreateInfo* src)
-{
- sType = src->sType;
- flags = src->flags;
- buffer = src->buffer;
- format = src->format;
- offset = src->offset;
- range = src->range;
- pNext = SafePnextCopy(src->pNext);
-}
-
-safe_VkImageCreateInfo::safe_VkImageCreateInfo(const VkImageCreateInfo* in_struct) :
- sType(in_struct->sType),
- flags(in_struct->flags),
- imageType(in_struct->imageType),
- format(in_struct->format),
- extent(in_struct->extent),
- mipLevels(in_struct->mipLevels),
- arrayLayers(in_struct->arrayLayers),
- samples(in_struct->samples),
- tiling(in_struct->tiling),
- usage(in_struct->usage),
- sharingMode(in_struct->sharingMode),
- queueFamilyIndexCount(in_struct->queueFamilyIndexCount),
- pQueueFamilyIndices(nullptr),
- initialLayout(in_struct->initialLayout)
-{
- pNext = SafePnextCopy(in_struct->pNext);
- if (in_struct->pQueueFamilyIndices) {
- pQueueFamilyIndices = new uint32_t[in_struct->queueFamilyIndexCount];
- memcpy ((void *)pQueueFamilyIndices, (void *)in_struct->pQueueFamilyIndices, sizeof(uint32_t)*in_struct->queueFamilyIndexCount);
- }
-}
-
-safe_VkImageCreateInfo::safe_VkImageCreateInfo() :
- pNext(nullptr),
- pQueueFamilyIndices(nullptr)
-{}
-
-safe_VkImageCreateInfo::safe_VkImageCreateInfo(const safe_VkImageCreateInfo& src)
-{
- sType = src.sType;
- flags = src.flags;
- imageType = src.imageType;
- format = src.format;
- extent = src.extent;
- mipLevels = src.mipLevels;
- arrayLayers = src.arrayLayers;
- samples = src.samples;
- tiling = src.tiling;
- usage = src.usage;
- sharingMode = src.sharingMode;
- queueFamilyIndexCount = src.queueFamilyIndexCount;
- pQueueFamilyIndices = nullptr;
- initialLayout = src.initialLayout;
- pNext = SafePnextCopy(src.pNext);
- if (src.pQueueFamilyIndices) {
- pQueueFamilyIndices = new uint32_t[src.queueFamilyIndexCount];
- memcpy ((void *)pQueueFamilyIndices, (void *)src.pQueueFamilyIndices, sizeof(uint32_t)*src.queueFamilyIndexCount);
- }
-}
-
-safe_VkImageCreateInfo& safe_VkImageCreateInfo::operator=(const safe_VkImageCreateInfo& src)
-{
- if (&src == this) return *this;
-
- if (pQueueFamilyIndices)
- delete[] pQueueFamilyIndices;
- if (pNext)
- FreePnextChain(pNext);
-
- sType = src.sType;
- flags = src.flags;
- imageType = src.imageType;
- format = src.format;
- extent = src.extent;
- mipLevels = src.mipLevels;
- arrayLayers = src.arrayLayers;
- samples = src.samples;
- tiling = src.tiling;
- usage = src.usage;
- sharingMode = src.sharingMode;
- queueFamilyIndexCount = src.queueFamilyIndexCount;
- pQueueFamilyIndices = nullptr;
- initialLayout = src.initialLayout;
- pNext = SafePnextCopy(src.pNext);
- if (src.pQueueFamilyIndices) {
- pQueueFamilyIndices = new uint32_t[src.queueFamilyIndexCount];
- memcpy ((void *)pQueueFamilyIndices, (void *)src.pQueueFamilyIndices, sizeof(uint32_t)*src.queueFamilyIndexCount);
- }
-
- return *this;
-}
-
-safe_VkImageCreateInfo::~safe_VkImageCreateInfo()
-{
- if (pQueueFamilyIndices)
- delete[] pQueueFamilyIndices;
- if (pNext)
- FreePnextChain(pNext);
-}
-
-void safe_VkImageCreateInfo::initialize(const VkImageCreateInfo* in_struct)
-{
- sType = in_struct->sType;
- flags = in_struct->flags;
- imageType = in_struct->imageType;
- format = in_struct->format;
- extent = in_struct->extent;
- mipLevels = in_struct->mipLevels;
- arrayLayers = in_struct->arrayLayers;
- samples = in_struct->samples;
- tiling = in_struct->tiling;
- usage = in_struct->usage;
- sharingMode = in_struct->sharingMode;
- queueFamilyIndexCount = in_struct->queueFamilyIndexCount;
- pQueueFamilyIndices = nullptr;
- initialLayout = in_struct->initialLayout;
- pNext = SafePnextCopy(in_struct->pNext);
- if (in_struct->pQueueFamilyIndices) {
- pQueueFamilyIndices = new uint32_t[in_struct->queueFamilyIndexCount];
- memcpy ((void *)pQueueFamilyIndices, (void *)in_struct->pQueueFamilyIndices, sizeof(uint32_t)*in_struct->queueFamilyIndexCount);
- }
-}
-
-void safe_VkImageCreateInfo::initialize(const safe_VkImageCreateInfo* src)
-{
- sType = src->sType;
- flags = src->flags;
- imageType = src->imageType;
- format = src->format;
- extent = src->extent;
- mipLevels = src->mipLevels;
- arrayLayers = src->arrayLayers;
- samples = src->samples;
- tiling = src->tiling;
- usage = src->usage;
- sharingMode = src->sharingMode;
- queueFamilyIndexCount = src->queueFamilyIndexCount;
- pQueueFamilyIndices = nullptr;
- initialLayout = src->initialLayout;
- pNext = SafePnextCopy(src->pNext);
- if (src->pQueueFamilyIndices) {
- pQueueFamilyIndices = new uint32_t[src->queueFamilyIndexCount];
- memcpy ((void *)pQueueFamilyIndices, (void *)src->pQueueFamilyIndices, sizeof(uint32_t)*src->queueFamilyIndexCount);
- }
-}
-
-safe_VkImageViewCreateInfo::safe_VkImageViewCreateInfo(const VkImageViewCreateInfo* in_struct) :
- sType(in_struct->sType),
- flags(in_struct->flags),
- image(in_struct->image),
- viewType(in_struct->viewType),
- format(in_struct->format),
- components(in_struct->components),
- subresourceRange(in_struct->subresourceRange)
-{
- pNext = SafePnextCopy(in_struct->pNext);
-}
-
-safe_VkImageViewCreateInfo::safe_VkImageViewCreateInfo() :
- pNext(nullptr)
-{}
-
-safe_VkImageViewCreateInfo::safe_VkImageViewCreateInfo(const safe_VkImageViewCreateInfo& src)
-{
- sType = src.sType;
- flags = src.flags;
- image = src.image;
- viewType = src.viewType;
- format = src.format;
- components = src.components;
- subresourceRange = src.subresourceRange;
- pNext = SafePnextCopy(src.pNext);
-}
-
-safe_VkImageViewCreateInfo& safe_VkImageViewCreateInfo::operator=(const safe_VkImageViewCreateInfo& src)
-{
- if (&src == this) return *this;
-
- if (pNext)
- FreePnextChain(pNext);
-
- sType = src.sType;
- flags = src.flags;
- image = src.image;
- viewType = src.viewType;
- format = src.format;
- components = src.components;
- subresourceRange = src.subresourceRange;
- pNext = SafePnextCopy(src.pNext);
-
- return *this;
-}
-
-safe_VkImageViewCreateInfo::~safe_VkImageViewCreateInfo()
-{
- if (pNext)
- FreePnextChain(pNext);
-}
-
-void safe_VkImageViewCreateInfo::initialize(const VkImageViewCreateInfo* in_struct)
-{
- sType = in_struct->sType;
- flags = in_struct->flags;
- image = in_struct->image;
- viewType = in_struct->viewType;
- format = in_struct->format;
- components = in_struct->components;
- subresourceRange = in_struct->subresourceRange;
- pNext = SafePnextCopy(in_struct->pNext);
-}
-
-void safe_VkImageViewCreateInfo::initialize(const safe_VkImageViewCreateInfo* src)
-{
- sType = src->sType;
- flags = src->flags;
- image = src->image;
- viewType = src->viewType;
- format = src->format;
- components = src->components;
- subresourceRange = src->subresourceRange;
- pNext = SafePnextCopy(src->pNext);
-}
-
-safe_VkShaderModuleCreateInfo::safe_VkShaderModuleCreateInfo(const VkShaderModuleCreateInfo* in_struct) :
- sType(in_struct->sType),
- flags(in_struct->flags),
- codeSize(in_struct->codeSize),
- pCode(nullptr)
-{
- pNext = SafePnextCopy(in_struct->pNext);
- if (in_struct->pCode) {
- pCode = reinterpret_cast<uint32_t *>(new uint8_t[codeSize]);
- memcpy((void *)pCode, (void *)in_struct->pCode, codeSize);
- }
-}
-
-safe_VkShaderModuleCreateInfo::safe_VkShaderModuleCreateInfo() :
- pNext(nullptr),
- pCode(nullptr)
-{}
-
-safe_VkShaderModuleCreateInfo::safe_VkShaderModuleCreateInfo(const safe_VkShaderModuleCreateInfo& src)
-{
- sType = src.sType;
- flags = src.flags;
- codeSize = src.codeSize;
- pCode = nullptr;
- pNext = SafePnextCopy(src.pNext);
- if (src.pCode) {
- pCode = reinterpret_cast<uint32_t *>(new uint8_t[codeSize]);
- memcpy((void *)pCode, (void *)src.pCode, codeSize);
- }
-}
-
-safe_VkShaderModuleCreateInfo& safe_VkShaderModuleCreateInfo::operator=(const safe_VkShaderModuleCreateInfo& src)
-{
- if (&src == this) return *this;
-
- if (pCode)
- delete[] reinterpret_cast<const uint8_t *>(pCode);
- if (pNext)
- FreePnextChain(pNext);
-
- sType = src.sType;
- flags = src.flags;
- codeSize = src.codeSize;
- pCode = nullptr;
- pNext = SafePnextCopy(src.pNext);
- if (src.pCode) {
- pCode = reinterpret_cast<uint32_t *>(new uint8_t[codeSize]);
- memcpy((void *)pCode, (void *)src.pCode, codeSize);
- }
-
- return *this;
-}
-
-safe_VkShaderModuleCreateInfo::~safe_VkShaderModuleCreateInfo()
-{
- if (pCode)
- delete[] reinterpret_cast<const uint8_t *>(pCode);
- if (pNext)
- FreePnextChain(pNext);
-}
-
-void safe_VkShaderModuleCreateInfo::initialize(const VkShaderModuleCreateInfo* in_struct)
-{
- sType = in_struct->sType;
- flags = in_struct->flags;
- codeSize = in_struct->codeSize;
- pCode = nullptr;
- pNext = SafePnextCopy(in_struct->pNext);
- if (in_struct->pCode) {
- pCode = reinterpret_cast<uint32_t *>(new uint8_t[codeSize]);
- memcpy((void *)pCode, (void *)in_struct->pCode, codeSize);
- }
-}
-
-void safe_VkShaderModuleCreateInfo::initialize(const safe_VkShaderModuleCreateInfo* src)
-{
- sType = src->sType;
- flags = src->flags;
- codeSize = src->codeSize;
- pCode = nullptr;
- pNext = SafePnextCopy(src->pNext);
- if (src->pCode) {
- pCode = reinterpret_cast<uint32_t *>(new uint8_t[codeSize]);
- memcpy((void *)pCode, (void *)src->pCode, codeSize);
- }
-}
-
-safe_VkPipelineCacheCreateInfo::safe_VkPipelineCacheCreateInfo(const VkPipelineCacheCreateInfo* in_struct) :
- sType(in_struct->sType),
- flags(in_struct->flags),
- initialDataSize(in_struct->initialDataSize),
- pInitialData(in_struct->pInitialData)
-{
- pNext = SafePnextCopy(in_struct->pNext);
-}
-
-safe_VkPipelineCacheCreateInfo::safe_VkPipelineCacheCreateInfo() :
- pNext(nullptr),
- pInitialData(nullptr)
-{}
-
-safe_VkPipelineCacheCreateInfo::safe_VkPipelineCacheCreateInfo(const safe_VkPipelineCacheCreateInfo& src)
-{
- sType = src.sType;
- flags = src.flags;
- initialDataSize = src.initialDataSize;
- pInitialData = src.pInitialData;
- pNext = SafePnextCopy(src.pNext);
-}
-
-safe_VkPipelineCacheCreateInfo& safe_VkPipelineCacheCreateInfo::operator=(const safe_VkPipelineCacheCreateInfo& src)
-{
- if (&src == this) return *this;
-
- if (pNext)
- FreePnextChain(pNext);
-
- sType = src.sType;
- flags = src.flags;
- initialDataSize = src.initialDataSize;
- pInitialData = src.pInitialData;
- pNext = SafePnextCopy(src.pNext);
-
- return *this;
-}
-
-safe_VkPipelineCacheCreateInfo::~safe_VkPipelineCacheCreateInfo()
-{
- if (pNext)
- FreePnextChain(pNext);
-}
-
-void safe_VkPipelineCacheCreateInfo::initialize(const VkPipelineCacheCreateInfo* in_struct)
-{
- sType = in_struct->sType;
- flags = in_struct->flags;
- initialDataSize = in_struct->initialDataSize;
- pInitialData = in_struct->pInitialData;
- pNext = SafePnextCopy(in_struct->pNext);
-}
-
-void safe_VkPipelineCacheCreateInfo::initialize(const safe_VkPipelineCacheCreateInfo* src)
-{
- sType = src->sType;
- flags = src->flags;
- initialDataSize = src->initialDataSize;
- pInitialData = src->pInitialData;
- pNext = SafePnextCopy(src->pNext);
-}
-
-safe_VkSpecializationInfo::safe_VkSpecializationInfo(const VkSpecializationInfo* in_struct) :
- mapEntryCount(in_struct->mapEntryCount),
- pMapEntries(nullptr),
- dataSize(in_struct->dataSize),
- pData(in_struct->pData)
-{
- if (in_struct->pMapEntries) {
- pMapEntries = new VkSpecializationMapEntry[in_struct->mapEntryCount];
- memcpy ((void *)pMapEntries, (void *)in_struct->pMapEntries, sizeof(VkSpecializationMapEntry)*in_struct->mapEntryCount);
- }
-}
-
-safe_VkSpecializationInfo::safe_VkSpecializationInfo() :
- pMapEntries(nullptr),
- pData(nullptr)
-{}
-
-safe_VkSpecializationInfo::safe_VkSpecializationInfo(const safe_VkSpecializationInfo& src)
-{
- mapEntryCount = src.mapEntryCount;
- pMapEntries = nullptr;
- dataSize = src.dataSize;
- pData = src.pData;
- if (src.pMapEntries) {
- pMapEntries = new VkSpecializationMapEntry[src.mapEntryCount];
- memcpy ((void *)pMapEntries, (void *)src.pMapEntries, sizeof(VkSpecializationMapEntry)*src.mapEntryCount);
- }
-}
-
-safe_VkSpecializationInfo& safe_VkSpecializationInfo::operator=(const safe_VkSpecializationInfo& src)
-{
- if (&src == this) return *this;
-
- if (pMapEntries)
- delete[] pMapEntries;
-
- mapEntryCount = src.mapEntryCount;
- pMapEntries = nullptr;
- dataSize = src.dataSize;
- pData = src.pData;
- if (src.pMapEntries) {
- pMapEntries = new VkSpecializationMapEntry[src.mapEntryCount];
- memcpy ((void *)pMapEntries, (void *)src.pMapEntries, sizeof(VkSpecializationMapEntry)*src.mapEntryCount);
- }
-
- return *this;
-}
-
-safe_VkSpecializationInfo::~safe_VkSpecializationInfo()
-{
- if (pMapEntries)
- delete[] pMapEntries;
-}
-
-void safe_VkSpecializationInfo::initialize(const VkSpecializationInfo* in_struct)
-{
- mapEntryCount = in_struct->mapEntryCount;
- pMapEntries = nullptr;
- dataSize = in_struct->dataSize;
- pData = in_struct->pData;
- if (in_struct->pMapEntries) {
- pMapEntries = new VkSpecializationMapEntry[in_struct->mapEntryCount];
- memcpy ((void *)pMapEntries, (void *)in_struct->pMapEntries, sizeof(VkSpecializationMapEntry)*in_struct->mapEntryCount);
- }
-}
-
-void safe_VkSpecializationInfo::initialize(const safe_VkSpecializationInfo* src)
-{
- mapEntryCount = src->mapEntryCount;
- pMapEntries = nullptr;
- dataSize = src->dataSize;
- pData = src->pData;
- if (src->pMapEntries) {
- pMapEntries = new VkSpecializationMapEntry[src->mapEntryCount];
- memcpy ((void *)pMapEntries, (void *)src->pMapEntries, sizeof(VkSpecializationMapEntry)*src->mapEntryCount);
- }
-}
-
-safe_VkPipelineShaderStageCreateInfo::safe_VkPipelineShaderStageCreateInfo(const VkPipelineShaderStageCreateInfo* in_struct) :
- sType(in_struct->sType),
- flags(in_struct->flags),
- stage(in_struct->stage),
- module(in_struct->module),
- pSpecializationInfo(nullptr)
-{
- pNext = SafePnextCopy(in_struct->pNext);
- pName = SafeStringCopy(in_struct->pName);
- if (in_struct->pSpecializationInfo)
- pSpecializationInfo = new safe_VkSpecializationInfo(in_struct->pSpecializationInfo);
-}
-
-safe_VkPipelineShaderStageCreateInfo::safe_VkPipelineShaderStageCreateInfo() :
- pNext(nullptr),
- pName(nullptr),
- pSpecializationInfo(nullptr)
-{}
-
-safe_VkPipelineShaderStageCreateInfo::safe_VkPipelineShaderStageCreateInfo(const safe_VkPipelineShaderStageCreateInfo& src)
-{
- sType = src.sType;
- flags = src.flags;
- stage = src.stage;
- module = src.module;
- pSpecializationInfo = nullptr;
- pNext = SafePnextCopy(src.pNext);
- pName = SafeStringCopy(src.pName);
- if (src.pSpecializationInfo)
- pSpecializationInfo = new safe_VkSpecializationInfo(*src.pSpecializationInfo);
-}
-
-safe_VkPipelineShaderStageCreateInfo& safe_VkPipelineShaderStageCreateInfo::operator=(const safe_VkPipelineShaderStageCreateInfo& src)
-{
- if (&src == this) return *this;
-
- if (pName) delete [] pName;
- if (pSpecializationInfo)
- delete pSpecializationInfo;
- if (pNext)
- FreePnextChain(pNext);
-
- sType = src.sType;
- flags = src.flags;
- stage = src.stage;
- module = src.module;
- pSpecializationInfo = nullptr;
- pNext = SafePnextCopy(src.pNext);
- pName = SafeStringCopy(src.pName);
- if (src.pSpecializationInfo)
- pSpecializationInfo = new safe_VkSpecializationInfo(*src.pSpecializationInfo);
-
- return *this;
-}
-
-safe_VkPipelineShaderStageCreateInfo::~safe_VkPipelineShaderStageCreateInfo()
-{
- if (pName) delete [] pName;
- if (pSpecializationInfo)
- delete pSpecializationInfo;
- if (pNext)
- FreePnextChain(pNext);
-}
-
-void safe_VkPipelineShaderStageCreateInfo::initialize(const VkPipelineShaderStageCreateInfo* in_struct)
-{
- sType = in_struct->sType;
- flags = in_struct->flags;
- stage = in_struct->stage;
- module = in_struct->module;
- pSpecializationInfo = nullptr;
- pNext = SafePnextCopy(in_struct->pNext);
- pName = SafeStringCopy(in_struct->pName);
- if (in_struct->pSpecializationInfo)
- pSpecializationInfo = new safe_VkSpecializationInfo(in_struct->pSpecializationInfo);
-}
-
-void safe_VkPipelineShaderStageCreateInfo::initialize(const safe_VkPipelineShaderStageCreateInfo* src)
-{
- sType = src->sType;
- flags = src->flags;
- stage = src->stage;
- module = src->module;
- pSpecializationInfo = nullptr;
- pNext = SafePnextCopy(src->pNext);
- pName = SafeStringCopy(src->pName);
- if (src->pSpecializationInfo)
- pSpecializationInfo = new safe_VkSpecializationInfo(*src->pSpecializationInfo);
-}
-
-safe_VkPipelineVertexInputStateCreateInfo::safe_VkPipelineVertexInputStateCreateInfo(const VkPipelineVertexInputStateCreateInfo* in_struct) :
- sType(in_struct->sType),
- flags(in_struct->flags),
- vertexBindingDescriptionCount(in_struct->vertexBindingDescriptionCount),
- pVertexBindingDescriptions(nullptr),
- vertexAttributeDescriptionCount(in_struct->vertexAttributeDescriptionCount),
- pVertexAttributeDescriptions(nullptr)
-{
- pNext = SafePnextCopy(in_struct->pNext);
- if (in_struct->pVertexBindingDescriptions) {
- pVertexBindingDescriptions = new VkVertexInputBindingDescription[in_struct->vertexBindingDescriptionCount];
- memcpy ((void *)pVertexBindingDescriptions, (void *)in_struct->pVertexBindingDescriptions, sizeof(VkVertexInputBindingDescription)*in_struct->vertexBindingDescriptionCount);
- }
- if (in_struct->pVertexAttributeDescriptions) {
- pVertexAttributeDescriptions = new VkVertexInputAttributeDescription[in_struct->vertexAttributeDescriptionCount];
- memcpy ((void *)pVertexAttributeDescriptions, (void *)in_struct->pVertexAttributeDescriptions, sizeof(VkVertexInputAttributeDescription)*in_struct->vertexAttributeDescriptionCount);
- }
-}
-
-safe_VkPipelineVertexInputStateCreateInfo::safe_VkPipelineVertexInputStateCreateInfo() :
- pNext(nullptr),
- pVertexBindingDescriptions(nullptr),
- pVertexAttributeDescriptions(nullptr)
-{}
-
-safe_VkPipelineVertexInputStateCreateInfo::safe_VkPipelineVertexInputStateCreateInfo(const safe_VkPipelineVertexInputStateCreateInfo& src)
-{
- sType = src.sType;
- flags = src.flags;
- vertexBindingDescriptionCount = src.vertexBindingDescriptionCount;
- pVertexBindingDescriptions = nullptr;
- vertexAttributeDescriptionCount = src.vertexAttributeDescriptionCount;
- pVertexAttributeDescriptions = nullptr;
- pNext = SafePnextCopy(src.pNext);
- if (src.pVertexBindingDescriptions) {
- pVertexBindingDescriptions = new VkVertexInputBindingDescription[src.vertexBindingDescriptionCount];
- memcpy ((void *)pVertexBindingDescriptions, (void *)src.pVertexBindingDescriptions, sizeof(VkVertexInputBindingDescription)*src.vertexBindingDescriptionCount);
- }
- if (src.pVertexAttributeDescriptions) {
- pVertexAttributeDescriptions = new VkVertexInputAttributeDescription[src.vertexAttributeDescriptionCount];
- memcpy ((void *)pVertexAttributeDescriptions, (void *)src.pVertexAttributeDescriptions, sizeof(VkVertexInputAttributeDescription)*src.vertexAttributeDescriptionCount);
- }
-}
-
-safe_VkPipelineVertexInputStateCreateInfo& safe_VkPipelineVertexInputStateCreateInfo::operator=(const safe_VkPipelineVertexInputStateCreateInfo& src)
-{
- if (&src == this) return *this;
-
- if (pVertexBindingDescriptions)
- delete[] pVertexBindingDescriptions;
- if (pVertexAttributeDescriptions)
- delete[] pVertexAttributeDescriptions;
- if (pNext)
- FreePnextChain(pNext);
-
- sType = src.sType;
- flags = src.flags;
- vertexBindingDescriptionCount = src.vertexBindingDescriptionCount;
- pVertexBindingDescriptions = nullptr;
- vertexAttributeDescriptionCount = src.vertexAttributeDescriptionCount;
- pVertexAttributeDescriptions = nullptr;
- pNext = SafePnextCopy(src.pNext);
- if (src.pVertexBindingDescriptions) {
- pVertexBindingDescriptions = new VkVertexInputBindingDescription[src.vertexBindingDescriptionCount];
- memcpy ((void *)pVertexBindingDescriptions, (void *)src.pVertexBindingDescriptions, sizeof(VkVertexInputBindingDescription)*src.vertexBindingDescriptionCount);
- }
- if (src.pVertexAttributeDescriptions) {
- pVertexAttributeDescriptions = new VkVertexInputAttributeDescription[src.vertexAttributeDescriptionCount];
- memcpy ((void *)pVertexAttributeDescriptions, (void *)src.pVertexAttributeDescriptions, sizeof(VkVertexInputAttributeDescription)*src.vertexAttributeDescriptionCount);
- }
-
- return *this;
-}
-
-safe_VkPipelineVertexInputStateCreateInfo::~safe_VkPipelineVertexInputStateCreateInfo()
-{
- if (pVertexBindingDescriptions)
- delete[] pVertexBindingDescriptions;
- if (pVertexAttributeDescriptions)
- delete[] pVertexAttributeDescriptions;
- if (pNext)
- FreePnextChain(pNext);
-}
-
-void safe_VkPipelineVertexInputStateCreateInfo::initialize(const VkPipelineVertexInputStateCreateInfo* in_struct)
-{
- sType = in_struct->sType;
- flags = in_struct->flags;
- vertexBindingDescriptionCount = in_struct->vertexBindingDescriptionCount;
- pVertexBindingDescriptions = nullptr;
- vertexAttributeDescriptionCount = in_struct->vertexAttributeDescriptionCount;
- pVertexAttributeDescriptions = nullptr;
- pNext = SafePnextCopy(in_struct->pNext);
- if (in_struct->pVertexBindingDescriptions) {
- pVertexBindingDescriptions = new VkVertexInputBindingDescription[in_struct->vertexBindingDescriptionCount];
- memcpy ((void *)pVertexBindingDescriptions, (void *)in_struct->pVertexBindingDescriptions, sizeof(VkVertexInputBindingDescription)*in_struct->vertexBindingDescriptionCount);
- }
- if (in_struct->pVertexAttributeDescriptions) {
- pVertexAttributeDescriptions = new VkVertexInputAttributeDescription[in_struct->vertexAttributeDescriptionCount];
- memcpy ((void *)pVertexAttributeDescriptions, (void *)in_struct->pVertexAttributeDescriptions, sizeof(VkVertexInputAttributeDescription)*in_struct->vertexAttributeDescriptionCount);
- }
-}
-
-void safe_VkPipelineVertexInputStateCreateInfo::initialize(const safe_VkPipelineVertexInputStateCreateInfo* src)
-{
- sType = src->sType;
- flags = src->flags;
- vertexBindingDescriptionCount = src->vertexBindingDescriptionCount;
- pVertexBindingDescriptions = nullptr;
- vertexAttributeDescriptionCount = src->vertexAttributeDescriptionCount;
- pVertexAttributeDescriptions = nullptr;
- pNext = SafePnextCopy(src->pNext);
- if (src->pVertexBindingDescriptions) {
- pVertexBindingDescriptions = new VkVertexInputBindingDescription[src->vertexBindingDescriptionCount];
- memcpy ((void *)pVertexBindingDescriptions, (void *)src->pVertexBindingDescriptions, sizeof(VkVertexInputBindingDescription)*src->vertexBindingDescriptionCount);
- }
- if (src->pVertexAttributeDescriptions) {
- pVertexAttributeDescriptions = new VkVertexInputAttributeDescription[src->vertexAttributeDescriptionCount];
- memcpy ((void *)pVertexAttributeDescriptions, (void *)src->pVertexAttributeDescriptions, sizeof(VkVertexInputAttributeDescription)*src->vertexAttributeDescriptionCount);
- }
-}
-
-safe_VkPipelineInputAssemblyStateCreateInfo::safe_VkPipelineInputAssemblyStateCreateInfo(const VkPipelineInputAssemblyStateCreateInfo* in_struct) :
- sType(in_struct->sType),
- flags(in_struct->flags),
- topology(in_struct->topology),
- primitiveRestartEnable(in_struct->primitiveRestartEnable)
-{
- pNext = SafePnextCopy(in_struct->pNext);
-}
-
-safe_VkPipelineInputAssemblyStateCreateInfo::safe_VkPipelineInputAssemblyStateCreateInfo() :
- pNext(nullptr)
-{}
-
-safe_VkPipelineInputAssemblyStateCreateInfo::safe_VkPipelineInputAssemblyStateCreateInfo(const safe_VkPipelineInputAssemblyStateCreateInfo& src)
-{
- sType = src.sType;
- flags = src.flags;
- topology = src.topology;
- primitiveRestartEnable = src.primitiveRestartEnable;
- pNext = SafePnextCopy(src.pNext);
-}
-
-safe_VkPipelineInputAssemblyStateCreateInfo& safe_VkPipelineInputAssemblyStateCreateInfo::operator=(const safe_VkPipelineInputAssemblyStateCreateInfo& src)
-{
- if (&src == this) return *this;
-
- if (pNext)
- FreePnextChain(pNext);
-
- sType = src.sType;
- flags = src.flags;
- topology = src.topology;
- primitiveRestartEnable = src.primitiveRestartEnable;
- pNext = SafePnextCopy(src.pNext);
-
- return *this;
-}
-
-safe_VkPipelineInputAssemblyStateCreateInfo::~safe_VkPipelineInputAssemblyStateCreateInfo()
-{
- if (pNext)
- FreePnextChain(pNext);
-}
-
-void safe_VkPipelineInputAssemblyStateCreateInfo::initialize(const VkPipelineInputAssemblyStateCreateInfo* in_struct)
-{
- sType = in_struct->sType;
- flags = in_struct->flags;
- topology = in_struct->topology;
- primitiveRestartEnable = in_struct->primitiveRestartEnable;
- pNext = SafePnextCopy(in_struct->pNext);
-}
-
-void safe_VkPipelineInputAssemblyStateCreateInfo::initialize(const safe_VkPipelineInputAssemblyStateCreateInfo* src)
-{
- sType = src->sType;
- flags = src->flags;
- topology = src->topology;
- primitiveRestartEnable = src->primitiveRestartEnable;
- pNext = SafePnextCopy(src->pNext);
-}
-
-safe_VkPipelineTessellationStateCreateInfo::safe_VkPipelineTessellationStateCreateInfo(const VkPipelineTessellationStateCreateInfo* in_struct) :
- sType(in_struct->sType),
- flags(in_struct->flags),
- patchControlPoints(in_struct->patchControlPoints)
-{
- pNext = SafePnextCopy(in_struct->pNext);
-}
-
-safe_VkPipelineTessellationStateCreateInfo::safe_VkPipelineTessellationStateCreateInfo() :
- pNext(nullptr)
-{}
-
-safe_VkPipelineTessellationStateCreateInfo::safe_VkPipelineTessellationStateCreateInfo(const safe_VkPipelineTessellationStateCreateInfo& src)
-{
- sType = src.sType;
- flags = src.flags;
- patchControlPoints = src.patchControlPoints;
- pNext = SafePnextCopy(src.pNext);
-}
-
-safe_VkPipelineTessellationStateCreateInfo& safe_VkPipelineTessellationStateCreateInfo::operator=(const safe_VkPipelineTessellationStateCreateInfo& src)
-{
- if (&src == this) return *this;
-
- if (pNext)
- FreePnextChain(pNext);
-
- sType = src.sType;
- flags = src.flags;
- patchControlPoints = src.patchControlPoints;
- pNext = SafePnextCopy(src.pNext);
-
- return *this;
-}
-
-safe_VkPipelineTessellationStateCreateInfo::~safe_VkPipelineTessellationStateCreateInfo()
-{
- if (pNext)
- FreePnextChain(pNext);
-}
-
-void safe_VkPipelineTessellationStateCreateInfo::initialize(const VkPipelineTessellationStateCreateInfo* in_struct)
-{
- sType = in_struct->sType;
- flags = in_struct->flags;
- patchControlPoints = in_struct->patchControlPoints;
- pNext = SafePnextCopy(in_struct->pNext);
-}
-
-void safe_VkPipelineTessellationStateCreateInfo::initialize(const safe_VkPipelineTessellationStateCreateInfo* src)
-{
- sType = src->sType;
- flags = src->flags;
- patchControlPoints = src->patchControlPoints;
- pNext = SafePnextCopy(src->pNext);
-}
-
-safe_VkPipelineViewportStateCreateInfo::safe_VkPipelineViewportStateCreateInfo(const VkPipelineViewportStateCreateInfo* in_struct, const bool is_dynamic_viewports, const bool is_dynamic_scissors) :
- sType(in_struct->sType),
- flags(in_struct->flags),
- viewportCount(in_struct->viewportCount),
- pViewports(nullptr),
- scissorCount(in_struct->scissorCount),
- pScissors(nullptr)
-{
- pNext = SafePnextCopy(in_struct->pNext);
- if (in_struct->pViewports && !is_dynamic_viewports) {
- pViewports = new VkViewport[in_struct->viewportCount];
- memcpy ((void *)pViewports, (void *)in_struct->pViewports, sizeof(VkViewport)*in_struct->viewportCount);
- }
- else
- pViewports = NULL;
- if (in_struct->pScissors && !is_dynamic_scissors) {
- pScissors = new VkRect2D[in_struct->scissorCount];
- memcpy ((void *)pScissors, (void *)in_struct->pScissors, sizeof(VkRect2D)*in_struct->scissorCount);
- }
- else
- pScissors = NULL;
-}
-
-safe_VkPipelineViewportStateCreateInfo::safe_VkPipelineViewportStateCreateInfo() :
- pNext(nullptr),
- pViewports(nullptr),
- pScissors(nullptr)
-{}
-
-safe_VkPipelineViewportStateCreateInfo::safe_VkPipelineViewportStateCreateInfo(const safe_VkPipelineViewportStateCreateInfo& src)
-{
- sType = src.sType;
- flags = src.flags;
- viewportCount = src.viewportCount;
- pViewports = nullptr;
- scissorCount = src.scissorCount;
- pScissors = nullptr;
- pNext = SafePnextCopy(src.pNext);
- if (src.pViewports) {
- pViewports = new VkViewport[src.viewportCount];
- memcpy ((void *)pViewports, (void *)src.pViewports, sizeof(VkViewport)*src.viewportCount);
- }
- else
- pViewports = NULL;
- if (src.pScissors) {
- pScissors = new VkRect2D[src.scissorCount];
- memcpy ((void *)pScissors, (void *)src.pScissors, sizeof(VkRect2D)*src.scissorCount);
- }
- else
- pScissors = NULL;
-}
-
-safe_VkPipelineViewportStateCreateInfo& safe_VkPipelineViewportStateCreateInfo::operator=(const safe_VkPipelineViewportStateCreateInfo& src)
-{
- if (&src == this) return *this;
-
- if (pViewports)
- delete[] pViewports;
- if (pScissors)
- delete[] pScissors;
- if (pNext)
- FreePnextChain(pNext);
-
- sType = src.sType;
- flags = src.flags;
- viewportCount = src.viewportCount;
- pViewports = nullptr;
- scissorCount = src.scissorCount;
- pScissors = nullptr;
- pNext = SafePnextCopy(src.pNext);
- if (src.pViewports) {
- pViewports = new VkViewport[src.viewportCount];
- memcpy ((void *)pViewports, (void *)src.pViewports, sizeof(VkViewport)*src.viewportCount);
- }
- else
- pViewports = NULL;
- if (src.pScissors) {
- pScissors = new VkRect2D[src.scissorCount];
- memcpy ((void *)pScissors, (void *)src.pScissors, sizeof(VkRect2D)*src.scissorCount);
- }
- else
- pScissors = NULL;
-
- return *this;
-}
-
-safe_VkPipelineViewportStateCreateInfo::~safe_VkPipelineViewportStateCreateInfo()
-{
- if (pViewports)
- delete[] pViewports;
- if (pScissors)
- delete[] pScissors;
- if (pNext)
- FreePnextChain(pNext);
-}
-
-void safe_VkPipelineViewportStateCreateInfo::initialize(const VkPipelineViewportStateCreateInfo* in_struct, const bool is_dynamic_viewports, const bool is_dynamic_scissors)
-{
- sType = in_struct->sType;
- flags = in_struct->flags;
- viewportCount = in_struct->viewportCount;
- pViewports = nullptr;
- scissorCount = in_struct->scissorCount;
- pScissors = nullptr;
- pNext = SafePnextCopy(in_struct->pNext);
- if (in_struct->pViewports && !is_dynamic_viewports) {
- pViewports = new VkViewport[in_struct->viewportCount];
- memcpy ((void *)pViewports, (void *)in_struct->pViewports, sizeof(VkViewport)*in_struct->viewportCount);
- }
- else
- pViewports = NULL;
- if (in_struct->pScissors && !is_dynamic_scissors) {
- pScissors = new VkRect2D[in_struct->scissorCount];
- memcpy ((void *)pScissors, (void *)in_struct->pScissors, sizeof(VkRect2D)*in_struct->scissorCount);
- }
- else
- pScissors = NULL;
-}
-
-void safe_VkPipelineViewportStateCreateInfo::initialize(const safe_VkPipelineViewportStateCreateInfo* src)
-{
- sType = src->sType;
- flags = src->flags;
- viewportCount = src->viewportCount;
- pViewports = nullptr;
- scissorCount = src->scissorCount;
- pScissors = nullptr;
- pNext = SafePnextCopy(src->pNext);
- if (src->pViewports) {
- pViewports = new VkViewport[src->viewportCount];
- memcpy ((void *)pViewports, (void *)src->pViewports, sizeof(VkViewport)*src->viewportCount);
- }
- else
- pViewports = NULL;
- if (src->pScissors) {
- pScissors = new VkRect2D[src->scissorCount];
- memcpy ((void *)pScissors, (void *)src->pScissors, sizeof(VkRect2D)*src->scissorCount);
- }
- else
- pScissors = NULL;
-}
-
-safe_VkPipelineRasterizationStateCreateInfo::safe_VkPipelineRasterizationStateCreateInfo(const VkPipelineRasterizationStateCreateInfo* in_struct) :
- sType(in_struct->sType),
- flags(in_struct->flags),
- depthClampEnable(in_struct->depthClampEnable),
- rasterizerDiscardEnable(in_struct->rasterizerDiscardEnable),
- polygonMode(in_struct->polygonMode),
- cullMode(in_struct->cullMode),
- frontFace(in_struct->frontFace),
- depthBiasEnable(in_struct->depthBiasEnable),
- depthBiasConstantFactor(in_struct->depthBiasConstantFactor),
- depthBiasClamp(in_struct->depthBiasClamp),
- depthBiasSlopeFactor(in_struct->depthBiasSlopeFactor),
- lineWidth(in_struct->lineWidth)
-{
- pNext = SafePnextCopy(in_struct->pNext);
-}
-
-safe_VkPipelineRasterizationStateCreateInfo::safe_VkPipelineRasterizationStateCreateInfo() :
- pNext(nullptr)
-{}
-
-safe_VkPipelineRasterizationStateCreateInfo::safe_VkPipelineRasterizationStateCreateInfo(const safe_VkPipelineRasterizationStateCreateInfo& src)
-{
- sType = src.sType;
- flags = src.flags;
- depthClampEnable = src.depthClampEnable;
- rasterizerDiscardEnable = src.rasterizerDiscardEnable;
- polygonMode = src.polygonMode;
- cullMode = src.cullMode;
- frontFace = src.frontFace;
- depthBiasEnable = src.depthBiasEnable;
- depthBiasConstantFactor = src.depthBiasConstantFactor;
- depthBiasClamp = src.depthBiasClamp;
- depthBiasSlopeFactor = src.depthBiasSlopeFactor;
- lineWidth = src.lineWidth;
- pNext = SafePnextCopy(src.pNext);
-}
-
-safe_VkPipelineRasterizationStateCreateInfo& safe_VkPipelineRasterizationStateCreateInfo::operator=(const safe_VkPipelineRasterizationStateCreateInfo& src)
-{
- if (&src == this) return *this;
-
- if (pNext)
- FreePnextChain(pNext);
-
- sType = src.sType;
- flags = src.flags;
- depthClampEnable = src.depthClampEnable;
- rasterizerDiscardEnable = src.rasterizerDiscardEnable;
- polygonMode = src.polygonMode;
- cullMode = src.cullMode;
- frontFace = src.frontFace;
- depthBiasEnable = src.depthBiasEnable;
- depthBiasConstantFactor = src.depthBiasConstantFactor;
- depthBiasClamp = src.depthBiasClamp;
- depthBiasSlopeFactor = src.depthBiasSlopeFactor;
- lineWidth = src.lineWidth;
- pNext = SafePnextCopy(src.pNext);
-
- return *this;
-}
-
-safe_VkPipelineRasterizationStateCreateInfo::~safe_VkPipelineRasterizationStateCreateInfo()
-{
- if (pNext)
- FreePnextChain(pNext);
-}
-
-void safe_VkPipelineRasterizationStateCreateInfo::initialize(const VkPipelineRasterizationStateCreateInfo* in_struct)
-{
- sType = in_struct->sType;
- flags = in_struct->flags;
- depthClampEnable = in_struct->depthClampEnable;
- rasterizerDiscardEnable = in_struct->rasterizerDiscardEnable;
- polygonMode = in_struct->polygonMode;
- cullMode = in_struct->cullMode;
- frontFace = in_struct->frontFace;
- depthBiasEnable = in_struct->depthBiasEnable;
- depthBiasConstantFactor = in_struct->depthBiasConstantFactor;
- depthBiasClamp = in_struct->depthBiasClamp;
- depthBiasSlopeFactor = in_struct->depthBiasSlopeFactor;
- lineWidth = in_struct->lineWidth;
- pNext = SafePnextCopy(in_struct->pNext);
-}
-
-void safe_VkPipelineRasterizationStateCreateInfo::initialize(const safe_VkPipelineRasterizationStateCreateInfo* src)
-{
- sType = src->sType;
- flags = src->flags;
- depthClampEnable = src->depthClampEnable;
- rasterizerDiscardEnable = src->rasterizerDiscardEnable;
- polygonMode = src->polygonMode;
- cullMode = src->cullMode;
- frontFace = src->frontFace;
- depthBiasEnable = src->depthBiasEnable;
- depthBiasConstantFactor = src->depthBiasConstantFactor;
- depthBiasClamp = src->depthBiasClamp;
- depthBiasSlopeFactor = src->depthBiasSlopeFactor;
- lineWidth = src->lineWidth;
- pNext = SafePnextCopy(src->pNext);
-}
-
-safe_VkPipelineMultisampleStateCreateInfo::safe_VkPipelineMultisampleStateCreateInfo(const VkPipelineMultisampleStateCreateInfo* in_struct) :
- sType(in_struct->sType),
- flags(in_struct->flags),
- rasterizationSamples(in_struct->rasterizationSamples),
- sampleShadingEnable(in_struct->sampleShadingEnable),
- minSampleShading(in_struct->minSampleShading),
- pSampleMask(nullptr),
- alphaToCoverageEnable(in_struct->alphaToCoverageEnable),
- alphaToOneEnable(in_struct->alphaToOneEnable)
-{
- pNext = SafePnextCopy(in_struct->pNext);
- if (in_struct->pSampleMask) {
- pSampleMask = new VkSampleMask(*in_struct->pSampleMask);
- }
-}
-
-safe_VkPipelineMultisampleStateCreateInfo::safe_VkPipelineMultisampleStateCreateInfo() :
- pNext(nullptr),
- pSampleMask(nullptr)
-{}
-
-safe_VkPipelineMultisampleStateCreateInfo::safe_VkPipelineMultisampleStateCreateInfo(const safe_VkPipelineMultisampleStateCreateInfo& src)
-{
- sType = src.sType;
- flags = src.flags;
- rasterizationSamples = src.rasterizationSamples;
- sampleShadingEnable = src.sampleShadingEnable;
- minSampleShading = src.minSampleShading;
- pSampleMask = nullptr;
- alphaToCoverageEnable = src.alphaToCoverageEnable;
- alphaToOneEnable = src.alphaToOneEnable;
- pNext = SafePnextCopy(src.pNext);
- if (src.pSampleMask) {
- pSampleMask = new VkSampleMask(*src.pSampleMask);
- }
-}
-
-safe_VkPipelineMultisampleStateCreateInfo& safe_VkPipelineMultisampleStateCreateInfo::operator=(const safe_VkPipelineMultisampleStateCreateInfo& src)
-{
- if (&src == this) return *this;
-
- if (pSampleMask)
- delete pSampleMask;
- if (pNext)
- FreePnextChain(pNext);
-
- sType = src.sType;
- flags = src.flags;
- rasterizationSamples = src.rasterizationSamples;
- sampleShadingEnable = src.sampleShadingEnable;
- minSampleShading = src.minSampleShading;
- pSampleMask = nullptr;
- alphaToCoverageEnable = src.alphaToCoverageEnable;
- alphaToOneEnable = src.alphaToOneEnable;
- pNext = SafePnextCopy(src.pNext);
- if (src.pSampleMask) {
- pSampleMask = new VkSampleMask(*src.pSampleMask);
- }
-
- return *this;
-}
-
-safe_VkPipelineMultisampleStateCreateInfo::~safe_VkPipelineMultisampleStateCreateInfo()
-{
- if (pSampleMask)
- delete pSampleMask;
- if (pNext)
- FreePnextChain(pNext);
-}
-
-void safe_VkPipelineMultisampleStateCreateInfo::initialize(const VkPipelineMultisampleStateCreateInfo* in_struct)
-{
- sType = in_struct->sType;
- flags = in_struct->flags;
- rasterizationSamples = in_struct->rasterizationSamples;
- sampleShadingEnable = in_struct->sampleShadingEnable;
- minSampleShading = in_struct->minSampleShading;
- pSampleMask = nullptr;
- alphaToCoverageEnable = in_struct->alphaToCoverageEnable;
- alphaToOneEnable = in_struct->alphaToOneEnable;
- pNext = SafePnextCopy(in_struct->pNext);
- if (in_struct->pSampleMask) {
- pSampleMask = new VkSampleMask(*in_struct->pSampleMask);
- }
-}
-
-void safe_VkPipelineMultisampleStateCreateInfo::initialize(const safe_VkPipelineMultisampleStateCreateInfo* src)
-{
- sType = src->sType;
- flags = src->flags;
- rasterizationSamples = src->rasterizationSamples;
- sampleShadingEnable = src->sampleShadingEnable;
- minSampleShading = src->minSampleShading;
- pSampleMask = nullptr;
- alphaToCoverageEnable = src->alphaToCoverageEnable;
- alphaToOneEnable = src->alphaToOneEnable;
- pNext = SafePnextCopy(src->pNext);
- if (src->pSampleMask) {
- pSampleMask = new VkSampleMask(*src->pSampleMask);
- }
-}
-
-safe_VkPipelineDepthStencilStateCreateInfo::safe_VkPipelineDepthStencilStateCreateInfo(const VkPipelineDepthStencilStateCreateInfo* in_struct) :
- sType(in_struct->sType),
- flags(in_struct->flags),
- depthTestEnable(in_struct->depthTestEnable),
- depthWriteEnable(in_struct->depthWriteEnable),
- depthCompareOp(in_struct->depthCompareOp),
- depthBoundsTestEnable(in_struct->depthBoundsTestEnable),
- stencilTestEnable(in_struct->stencilTestEnable),
- front(in_struct->front),
- back(in_struct->back),
- minDepthBounds(in_struct->minDepthBounds),
- maxDepthBounds(in_struct->maxDepthBounds)
-{
- pNext = SafePnextCopy(in_struct->pNext);
-}
-
-safe_VkPipelineDepthStencilStateCreateInfo::safe_VkPipelineDepthStencilStateCreateInfo() :
- pNext(nullptr)
-{}
-
-safe_VkPipelineDepthStencilStateCreateInfo::safe_VkPipelineDepthStencilStateCreateInfo(const safe_VkPipelineDepthStencilStateCreateInfo& src)
-{
- sType = src.sType;
- flags = src.flags;
- depthTestEnable = src.depthTestEnable;
- depthWriteEnable = src.depthWriteEnable;
- depthCompareOp = src.depthCompareOp;
- depthBoundsTestEnable = src.depthBoundsTestEnable;
- stencilTestEnable = src.stencilTestEnable;
- front = src.front;
- back = src.back;
- minDepthBounds = src.minDepthBounds;
- maxDepthBounds = src.maxDepthBounds;
- pNext = SafePnextCopy(src.pNext);
-}
-
-safe_VkPipelineDepthStencilStateCreateInfo& safe_VkPipelineDepthStencilStateCreateInfo::operator=(const safe_VkPipelineDepthStencilStateCreateInfo& src)
-{
- if (&src == this) return *this;
-
- if (pNext)
- FreePnextChain(pNext);
-
- sType = src.sType;
- flags = src.flags;
- depthTestEnable = src.depthTestEnable;
- depthWriteEnable = src.depthWriteEnable;
- depthCompareOp = src.depthCompareOp;
- depthBoundsTestEnable = src.depthBoundsTestEnable;
- stencilTestEnable = src.stencilTestEnable;
- front = src.front;
- back = src.back;
- minDepthBounds = src.minDepthBounds;
- maxDepthBounds = src.maxDepthBounds;
- pNext = SafePnextCopy(src.pNext);
-
- return *this;
-}
-
-safe_VkPipelineDepthStencilStateCreateInfo::~safe_VkPipelineDepthStencilStateCreateInfo()
-{
- if (pNext)
- FreePnextChain(pNext);
-}
-
-void safe_VkPipelineDepthStencilStateCreateInfo::initialize(const VkPipelineDepthStencilStateCreateInfo* in_struct)
-{
- sType = in_struct->sType;
- flags = in_struct->flags;
- depthTestEnable = in_struct->depthTestEnable;
- depthWriteEnable = in_struct->depthWriteEnable;
- depthCompareOp = in_struct->depthCompareOp;
- depthBoundsTestEnable = in_struct->depthBoundsTestEnable;
- stencilTestEnable = in_struct->stencilTestEnable;
- front = in_struct->front;
- back = in_struct->back;
- minDepthBounds = in_struct->minDepthBounds;
- maxDepthBounds = in_struct->maxDepthBounds;
- pNext = SafePnextCopy(in_struct->pNext);
-}
-
-void safe_VkPipelineDepthStencilStateCreateInfo::initialize(const safe_VkPipelineDepthStencilStateCreateInfo* src)
-{
- sType = src->sType;
- flags = src->flags;
- depthTestEnable = src->depthTestEnable;
- depthWriteEnable = src->depthWriteEnable;
- depthCompareOp = src->depthCompareOp;
- depthBoundsTestEnable = src->depthBoundsTestEnable;
- stencilTestEnable = src->stencilTestEnable;
- front = src->front;
- back = src->back;
- minDepthBounds = src->minDepthBounds;
- maxDepthBounds = src->maxDepthBounds;
- pNext = SafePnextCopy(src->pNext);
-}
-
-safe_VkPipelineColorBlendStateCreateInfo::safe_VkPipelineColorBlendStateCreateInfo(const VkPipelineColorBlendStateCreateInfo* in_struct) :
- sType(in_struct->sType),
- flags(in_struct->flags),
- logicOpEnable(in_struct->logicOpEnable),
- logicOp(in_struct->logicOp),
- attachmentCount(in_struct->attachmentCount),
- pAttachments(nullptr)
-{
- pNext = SafePnextCopy(in_struct->pNext);
- if (in_struct->pAttachments) {
- pAttachments = new VkPipelineColorBlendAttachmentState[in_struct->attachmentCount];
- memcpy ((void *)pAttachments, (void *)in_struct->pAttachments, sizeof(VkPipelineColorBlendAttachmentState)*in_struct->attachmentCount);
- }
- for (uint32_t i = 0; i < 4; ++i) {
- blendConstants[i] = in_struct->blendConstants[i];
- }
-}
-
-safe_VkPipelineColorBlendStateCreateInfo::safe_VkPipelineColorBlendStateCreateInfo() :
- pNext(nullptr),
- pAttachments(nullptr)
-{}
-
-safe_VkPipelineColorBlendStateCreateInfo::safe_VkPipelineColorBlendStateCreateInfo(const safe_VkPipelineColorBlendStateCreateInfo& src)
-{
- sType = src.sType;
- flags = src.flags;
- logicOpEnable = src.logicOpEnable;
- logicOp = src.logicOp;
- attachmentCount = src.attachmentCount;
- pAttachments = nullptr;
- pNext = SafePnextCopy(src.pNext);
- if (src.pAttachments) {
- pAttachments = new VkPipelineColorBlendAttachmentState[src.attachmentCount];
- memcpy ((void *)pAttachments, (void *)src.pAttachments, sizeof(VkPipelineColorBlendAttachmentState)*src.attachmentCount);
- }
- for (uint32_t i = 0; i < 4; ++i) {
- blendConstants[i] = src.blendConstants[i];
- }
-}
-
-safe_VkPipelineColorBlendStateCreateInfo& safe_VkPipelineColorBlendStateCreateInfo::operator=(const safe_VkPipelineColorBlendStateCreateInfo& src)
-{
- if (&src == this) return *this;
-
- if (pAttachments)
- delete[] pAttachments;
- if (pNext)
- FreePnextChain(pNext);
-
- sType = src.sType;
- flags = src.flags;
- logicOpEnable = src.logicOpEnable;
- logicOp = src.logicOp;
- attachmentCount = src.attachmentCount;
- pAttachments = nullptr;
- pNext = SafePnextCopy(src.pNext);
- if (src.pAttachments) {
- pAttachments = new VkPipelineColorBlendAttachmentState[src.attachmentCount];
- memcpy ((void *)pAttachments, (void *)src.pAttachments, sizeof(VkPipelineColorBlendAttachmentState)*src.attachmentCount);
- }
- for (uint32_t i = 0; i < 4; ++i) {
- blendConstants[i] = src.blendConstants[i];
- }
-
- return *this;
-}
-
-safe_VkPipelineColorBlendStateCreateInfo::~safe_VkPipelineColorBlendStateCreateInfo()
-{
- if (pAttachments)
- delete[] pAttachments;
- if (pNext)
- FreePnextChain(pNext);
-}
-
-void safe_VkPipelineColorBlendStateCreateInfo::initialize(const VkPipelineColorBlendStateCreateInfo* in_struct)
-{
- sType = in_struct->sType;
- flags = in_struct->flags;
- logicOpEnable = in_struct->logicOpEnable;
- logicOp = in_struct->logicOp;
- attachmentCount = in_struct->attachmentCount;
- pAttachments = nullptr;
- pNext = SafePnextCopy(in_struct->pNext);
- if (in_struct->pAttachments) {
- pAttachments = new VkPipelineColorBlendAttachmentState[in_struct->attachmentCount];
- memcpy ((void *)pAttachments, (void *)in_struct->pAttachments, sizeof(VkPipelineColorBlendAttachmentState)*in_struct->attachmentCount);
- }
- for (uint32_t i = 0; i < 4; ++i) {
- blendConstants[i] = in_struct->blendConstants[i];
- }
-}
-
-void safe_VkPipelineColorBlendStateCreateInfo::initialize(const safe_VkPipelineColorBlendStateCreateInfo* src)
-{
- sType = src->sType;
- flags = src->flags;
- logicOpEnable = src->logicOpEnable;
- logicOp = src->logicOp;
- attachmentCount = src->attachmentCount;
- pAttachments = nullptr;
- pNext = SafePnextCopy(src->pNext);
- if (src->pAttachments) {
- pAttachments = new VkPipelineColorBlendAttachmentState[src->attachmentCount];
- memcpy ((void *)pAttachments, (void *)src->pAttachments, sizeof(VkPipelineColorBlendAttachmentState)*src->attachmentCount);
- }
- for (uint32_t i = 0; i < 4; ++i) {
- blendConstants[i] = src->blendConstants[i];
- }
-}
-
-safe_VkPipelineDynamicStateCreateInfo::safe_VkPipelineDynamicStateCreateInfo(const VkPipelineDynamicStateCreateInfo* in_struct) :
- sType(in_struct->sType),
- flags(in_struct->flags),
- dynamicStateCount(in_struct->dynamicStateCount),
- pDynamicStates(nullptr)
-{
- pNext = SafePnextCopy(in_struct->pNext);
- if (in_struct->pDynamicStates) {
- pDynamicStates = new VkDynamicState[in_struct->dynamicStateCount];
- memcpy ((void *)pDynamicStates, (void *)in_struct->pDynamicStates, sizeof(VkDynamicState)*in_struct->dynamicStateCount);
- }
-}
-
-safe_VkPipelineDynamicStateCreateInfo::safe_VkPipelineDynamicStateCreateInfo() :
- pNext(nullptr),
- pDynamicStates(nullptr)
-{}
-
-safe_VkPipelineDynamicStateCreateInfo::safe_VkPipelineDynamicStateCreateInfo(const safe_VkPipelineDynamicStateCreateInfo& src)
-{
- sType = src.sType;
- flags = src.flags;
- dynamicStateCount = src.dynamicStateCount;
- pDynamicStates = nullptr;
- pNext = SafePnextCopy(src.pNext);
- if (src.pDynamicStates) {
- pDynamicStates = new VkDynamicState[src.dynamicStateCount];
- memcpy ((void *)pDynamicStates, (void *)src.pDynamicStates, sizeof(VkDynamicState)*src.dynamicStateCount);
- }
-}
-
-safe_VkPipelineDynamicStateCreateInfo& safe_VkPipelineDynamicStateCreateInfo::operator=(const safe_VkPipelineDynamicStateCreateInfo& src)
-{
- if (&src == this) return *this;
-
- if (pDynamicStates)
- delete[] pDynamicStates;
- if (pNext)
- FreePnextChain(pNext);
-
- sType = src.sType;
- flags = src.flags;
- dynamicStateCount = src.dynamicStateCount;
- pDynamicStates = nullptr;
- pNext = SafePnextCopy(src.pNext);
- if (src.pDynamicStates) {
- pDynamicStates = new VkDynamicState[src.dynamicStateCount];
- memcpy ((void *)pDynamicStates, (void *)src.pDynamicStates, sizeof(VkDynamicState)*src.dynamicStateCount);
- }
-
- return *this;
-}
-
-safe_VkPipelineDynamicStateCreateInfo::~safe_VkPipelineDynamicStateCreateInfo()
-{
- if (pDynamicStates)
- delete[] pDynamicStates;
- if (pNext)
- FreePnextChain(pNext);
-}
-
-void safe_VkPipelineDynamicStateCreateInfo::initialize(const VkPipelineDynamicStateCreateInfo* in_struct)
-{
- sType = in_struct->sType;
- flags = in_struct->flags;
- dynamicStateCount = in_struct->dynamicStateCount;
- pDynamicStates = nullptr;
- pNext = SafePnextCopy(in_struct->pNext);
- if (in_struct->pDynamicStates) {
- pDynamicStates = new VkDynamicState[in_struct->dynamicStateCount];
- memcpy ((void *)pDynamicStates, (void *)in_struct->pDynamicStates, sizeof(VkDynamicState)*in_struct->dynamicStateCount);
- }
-}
-
-void safe_VkPipelineDynamicStateCreateInfo::initialize(const safe_VkPipelineDynamicStateCreateInfo* src)
-{
- sType = src->sType;
- flags = src->flags;
- dynamicStateCount = src->dynamicStateCount;
- pDynamicStates = nullptr;
- pNext = SafePnextCopy(src->pNext);
- if (src->pDynamicStates) {
- pDynamicStates = new VkDynamicState[src->dynamicStateCount];
- memcpy ((void *)pDynamicStates, (void *)src->pDynamicStates, sizeof(VkDynamicState)*src->dynamicStateCount);
- }
-}
-
-safe_VkGraphicsPipelineCreateInfo::safe_VkGraphicsPipelineCreateInfo(const VkGraphicsPipelineCreateInfo* in_struct, const bool uses_color_attachment, const bool uses_depthstencil_attachment) :
- sType(in_struct->sType),
- flags(in_struct->flags),
- stageCount(in_struct->stageCount),
- pStages(nullptr),
- pVertexInputState(nullptr),
- pInputAssemblyState(nullptr),
- pTessellationState(nullptr),
- pViewportState(nullptr),
- pRasterizationState(nullptr),
- pMultisampleState(nullptr),
- pDepthStencilState(nullptr),
- pColorBlendState(nullptr),
- pDynamicState(nullptr),
- layout(in_struct->layout),
- renderPass(in_struct->renderPass),
- subpass(in_struct->subpass),
- basePipelineHandle(in_struct->basePipelineHandle),
- basePipelineIndex(in_struct->basePipelineIndex)
-{
- pNext = SafePnextCopy(in_struct->pNext);
- if (stageCount && in_struct->pStages) {
- pStages = new safe_VkPipelineShaderStageCreateInfo[stageCount];
- for (uint32_t i = 0; i < stageCount; ++i) {
- pStages[i].initialize(&in_struct->pStages[i]);
- }
- }
- if (in_struct->pVertexInputState)
- pVertexInputState = new safe_VkPipelineVertexInputStateCreateInfo(in_struct->pVertexInputState);
- else
- pVertexInputState = NULL;
- if (in_struct->pInputAssemblyState)
- pInputAssemblyState = new safe_VkPipelineInputAssemblyStateCreateInfo(in_struct->pInputAssemblyState);
- else
- pInputAssemblyState = NULL;
- bool has_tessellation_stage = false;
- if (stageCount && pStages)
- for (uint32_t i = 0; i < stageCount && !has_tessellation_stage; ++i)
- if (pStages[i].stage == VK_SHADER_STAGE_TESSELLATION_CONTROL_BIT || pStages[i].stage == VK_SHADER_STAGE_TESSELLATION_EVALUATION_BIT)
- has_tessellation_stage = true;
- if (in_struct->pTessellationState && has_tessellation_stage)
- pTessellationState = new safe_VkPipelineTessellationStateCreateInfo(in_struct->pTessellationState);
- else
- pTessellationState = NULL; // original pTessellationState pointer ignored
- bool has_rasterization = in_struct->pRasterizationState ? !in_struct->pRasterizationState->rasterizerDiscardEnable : false;
- if (in_struct->pViewportState && has_rasterization) {
- bool is_dynamic_viewports = false;
- bool is_dynamic_scissors = false;
- if (in_struct->pDynamicState && in_struct->pDynamicState->pDynamicStates) {
- for (uint32_t i = 0; i < in_struct->pDynamicState->dynamicStateCount && !is_dynamic_viewports; ++i)
- if (in_struct->pDynamicState->pDynamicStates[i] == VK_DYNAMIC_STATE_VIEWPORT)
- is_dynamic_viewports = true;
- for (uint32_t i = 0; i < in_struct->pDynamicState->dynamicStateCount && !is_dynamic_scissors; ++i)
- if (in_struct->pDynamicState->pDynamicStates[i] == VK_DYNAMIC_STATE_SCISSOR)
- is_dynamic_scissors = true;
- }
- pViewportState = new safe_VkPipelineViewportStateCreateInfo(in_struct->pViewportState, is_dynamic_viewports, is_dynamic_scissors);
- } else
- pViewportState = NULL; // original pViewportState pointer ignored
- if (in_struct->pRasterizationState)
- pRasterizationState = new safe_VkPipelineRasterizationStateCreateInfo(in_struct->pRasterizationState);
- else
- pRasterizationState = NULL;
- if (in_struct->pMultisampleState && has_rasterization)
- pMultisampleState = new safe_VkPipelineMultisampleStateCreateInfo(in_struct->pMultisampleState);
- else
- pMultisampleState = NULL; // original pMultisampleState pointer ignored
- // needs a tracked subpass state uses_depthstencil_attachment
- if (in_struct->pDepthStencilState && has_rasterization && uses_depthstencil_attachment)
- pDepthStencilState = new safe_VkPipelineDepthStencilStateCreateInfo(in_struct->pDepthStencilState);
- else
- pDepthStencilState = NULL; // original pDepthStencilState pointer ignored
- // needs a tracked subpass state usesColorAttachment
- if (in_struct->pColorBlendState && has_rasterization && uses_color_attachment)
- pColorBlendState = new safe_VkPipelineColorBlendStateCreateInfo(in_struct->pColorBlendState);
- else
- pColorBlendState = NULL; // original pColorBlendState pointer ignored
- if (in_struct->pDynamicState)
- pDynamicState = new safe_VkPipelineDynamicStateCreateInfo(in_struct->pDynamicState);
- else
- pDynamicState = NULL;
-}
-
-safe_VkGraphicsPipelineCreateInfo::safe_VkGraphicsPipelineCreateInfo() :
- pNext(nullptr),
- pStages(nullptr),
- pVertexInputState(nullptr),
- pInputAssemblyState(nullptr),
- pTessellationState(nullptr),
- pViewportState(nullptr),
- pRasterizationState(nullptr),
- pMultisampleState(nullptr),
- pDepthStencilState(nullptr),
- pColorBlendState(nullptr),
- pDynamicState(nullptr)
-{}
-
-safe_VkGraphicsPipelineCreateInfo::safe_VkGraphicsPipelineCreateInfo(const safe_VkGraphicsPipelineCreateInfo& src)
-{
- sType = src.sType;
- flags = src.flags;
- stageCount = src.stageCount;
- pStages = nullptr;
- pVertexInputState = nullptr;
- pInputAssemblyState = nullptr;
- pTessellationState = nullptr;
- pViewportState = nullptr;
- pRasterizationState = nullptr;
- pMultisampleState = nullptr;
- pDepthStencilState = nullptr;
- pColorBlendState = nullptr;
- pDynamicState = nullptr;
- layout = src.layout;
- renderPass = src.renderPass;
- subpass = src.subpass;
- basePipelineHandle = src.basePipelineHandle;
- basePipelineIndex = src.basePipelineIndex;
- pNext = SafePnextCopy(src.pNext);
- if (stageCount && src.pStages) {
- pStages = new safe_VkPipelineShaderStageCreateInfo[stageCount];
- for (uint32_t i = 0; i < stageCount; ++i) {
- pStages[i].initialize(&src.pStages[i]);
- }
- }
- if (src.pVertexInputState)
- pVertexInputState = new safe_VkPipelineVertexInputStateCreateInfo(*src.pVertexInputState);
- else
- pVertexInputState = NULL;
- if (src.pInputAssemblyState)
- pInputAssemblyState = new safe_VkPipelineInputAssemblyStateCreateInfo(*src.pInputAssemblyState);
- else
- pInputAssemblyState = NULL;
- bool has_tessellation_stage = false;
- if (stageCount && pStages)
- for (uint32_t i = 0; i < stageCount && !has_tessellation_stage; ++i)
- if (pStages[i].stage == VK_SHADER_STAGE_TESSELLATION_CONTROL_BIT || pStages[i].stage == VK_SHADER_STAGE_TESSELLATION_EVALUATION_BIT)
- has_tessellation_stage = true;
- if (src.pTessellationState && has_tessellation_stage)
- pTessellationState = new safe_VkPipelineTessellationStateCreateInfo(*src.pTessellationState);
- else
- pTessellationState = NULL; // original pTessellationState pointer ignored
- bool has_rasterization = src.pRasterizationState ? !src.pRasterizationState->rasterizerDiscardEnable : false;
- if (src.pViewportState && has_rasterization) {
- pViewportState = new safe_VkPipelineViewportStateCreateInfo(*src.pViewportState);
- } else
- pViewportState = NULL; // original pViewportState pointer ignored
- if (src.pRasterizationState)
- pRasterizationState = new safe_VkPipelineRasterizationStateCreateInfo(*src.pRasterizationState);
- else
- pRasterizationState = NULL;
- if (src.pMultisampleState && has_rasterization)
- pMultisampleState = new safe_VkPipelineMultisampleStateCreateInfo(*src.pMultisampleState);
- else
- pMultisampleState = NULL; // original pMultisampleState pointer ignored
- if (src.pDepthStencilState && has_rasterization)
- pDepthStencilState = new safe_VkPipelineDepthStencilStateCreateInfo(*src.pDepthStencilState);
- else
- pDepthStencilState = NULL; // original pDepthStencilState pointer ignored
- if (src.pColorBlendState && has_rasterization)
- pColorBlendState = new safe_VkPipelineColorBlendStateCreateInfo(*src.pColorBlendState);
- else
- pColorBlendState = NULL; // original pColorBlendState pointer ignored
- if (src.pDynamicState)
- pDynamicState = new safe_VkPipelineDynamicStateCreateInfo(*src.pDynamicState);
- else
- pDynamicState = NULL;
-}
-
-safe_VkGraphicsPipelineCreateInfo& safe_VkGraphicsPipelineCreateInfo::operator=(const safe_VkGraphicsPipelineCreateInfo& src)
-{
- if (&src == this) return *this;
-
- if (pStages)
- delete[] pStages;
- if (pVertexInputState)
- delete pVertexInputState;
- if (pInputAssemblyState)
- delete pInputAssemblyState;
- if (pTessellationState)
- delete pTessellationState;
- if (pViewportState)
- delete pViewportState;
- if (pRasterizationState)
- delete pRasterizationState;
- if (pMultisampleState)
- delete pMultisampleState;
- if (pDepthStencilState)
- delete pDepthStencilState;
- if (pColorBlendState)
- delete pColorBlendState;
- if (pDynamicState)
- delete pDynamicState;
- if (pNext)
- FreePnextChain(pNext);
-
- sType = src.sType;
- flags = src.flags;
- stageCount = src.stageCount;
- pStages = nullptr;
- pVertexInputState = nullptr;
- pInputAssemblyState = nullptr;
- pTessellationState = nullptr;
- pViewportState = nullptr;
- pRasterizationState = nullptr;
- pMultisampleState = nullptr;
- pDepthStencilState = nullptr;
- pColorBlendState = nullptr;
- pDynamicState = nullptr;
- layout = src.layout;
- renderPass = src.renderPass;
- subpass = src.subpass;
- basePipelineHandle = src.basePipelineHandle;
- basePipelineIndex = src.basePipelineIndex;
- pNext = SafePnextCopy(src.pNext);
- if (stageCount && src.pStages) {
- pStages = new safe_VkPipelineShaderStageCreateInfo[stageCount];
- for (uint32_t i = 0; i < stageCount; ++i) {
- pStages[i].initialize(&src.pStages[i]);
- }
- }
- if (src.pVertexInputState)
- pVertexInputState = new safe_VkPipelineVertexInputStateCreateInfo(*src.pVertexInputState);
- else
- pVertexInputState = NULL;
- if (src.pInputAssemblyState)
- pInputAssemblyState = new safe_VkPipelineInputAssemblyStateCreateInfo(*src.pInputAssemblyState);
- else
- pInputAssemblyState = NULL;
- bool has_tessellation_stage = false;
- if (stageCount && pStages)
- for (uint32_t i = 0; i < stageCount && !has_tessellation_stage; ++i)
- if (pStages[i].stage == VK_SHADER_STAGE_TESSELLATION_CONTROL_BIT || pStages[i].stage == VK_SHADER_STAGE_TESSELLATION_EVALUATION_BIT)
- has_tessellation_stage = true;
- if (src.pTessellationState && has_tessellation_stage)
- pTessellationState = new safe_VkPipelineTessellationStateCreateInfo(*src.pTessellationState);
- else
- pTessellationState = NULL; // original pTessellationState pointer ignored
- bool has_rasterization = src.pRasterizationState ? !src.pRasterizationState->rasterizerDiscardEnable : false;
- if (src.pViewportState && has_rasterization) {
- pViewportState = new safe_VkPipelineViewportStateCreateInfo(*src.pViewportState);
- } else
- pViewportState = NULL; // original pViewportState pointer ignored
- if (src.pRasterizationState)
- pRasterizationState = new safe_VkPipelineRasterizationStateCreateInfo(*src.pRasterizationState);
- else
- pRasterizationState = NULL;
- if (src.pMultisampleState && has_rasterization)
- pMultisampleState = new safe_VkPipelineMultisampleStateCreateInfo(*src.pMultisampleState);
- else
- pMultisampleState = NULL; // original pMultisampleState pointer ignored
- if (src.pDepthStencilState && has_rasterization)
- pDepthStencilState = new safe_VkPipelineDepthStencilStateCreateInfo(*src.pDepthStencilState);
- else
- pDepthStencilState = NULL; // original pDepthStencilState pointer ignored
- if (src.pColorBlendState && has_rasterization)
- pColorBlendState = new safe_VkPipelineColorBlendStateCreateInfo(*src.pColorBlendState);
- else
- pColorBlendState = NULL; // original pColorBlendState pointer ignored
- if (src.pDynamicState)
- pDynamicState = new safe_VkPipelineDynamicStateCreateInfo(*src.pDynamicState);
- else
- pDynamicState = NULL;
-
- return *this;
-}
-
-safe_VkGraphicsPipelineCreateInfo::~safe_VkGraphicsPipelineCreateInfo()
-{
- if (pStages)
- delete[] pStages;
- if (pVertexInputState)
- delete pVertexInputState;
- if (pInputAssemblyState)
- delete pInputAssemblyState;
- if (pTessellationState)
- delete pTessellationState;
- if (pViewportState)
- delete pViewportState;
- if (pRasterizationState)
- delete pRasterizationState;
- if (pMultisampleState)
- delete pMultisampleState;
- if (pDepthStencilState)
- delete pDepthStencilState;
- if (pColorBlendState)
- delete pColorBlendState;
- if (pDynamicState)
- delete pDynamicState;
- if (pNext)
- FreePnextChain(pNext);
-}
-
-void safe_VkGraphicsPipelineCreateInfo::initialize(const VkGraphicsPipelineCreateInfo* in_struct, const bool uses_color_attachment, const bool uses_depthstencil_attachment)
-{
- sType = in_struct->sType;
- flags = in_struct->flags;
- stageCount = in_struct->stageCount;
- pStages = nullptr;
- pVertexInputState = nullptr;
- pInputAssemblyState = nullptr;
- pTessellationState = nullptr;
- pViewportState = nullptr;
- pRasterizationState = nullptr;
- pMultisampleState = nullptr;
- pDepthStencilState = nullptr;
- pColorBlendState = nullptr;
- pDynamicState = nullptr;
- layout = in_struct->layout;
- renderPass = in_struct->renderPass;
- subpass = in_struct->subpass;
- basePipelineHandle = in_struct->basePipelineHandle;
- basePipelineIndex = in_struct->basePipelineIndex;
- pNext = SafePnextCopy(in_struct->pNext);
- if (stageCount && in_struct->pStages) {
- pStages = new safe_VkPipelineShaderStageCreateInfo[stageCount];
- for (uint32_t i = 0; i < stageCount; ++i) {
- pStages[i].initialize(&in_struct->pStages[i]);
- }
- }
- if (in_struct->pVertexInputState)
- pVertexInputState = new safe_VkPipelineVertexInputStateCreateInfo(in_struct->pVertexInputState);
- else
- pVertexInputState = NULL;
- if (in_struct->pInputAssemblyState)
- pInputAssemblyState = new safe_VkPipelineInputAssemblyStateCreateInfo(in_struct->pInputAssemblyState);
- else
- pInputAssemblyState = NULL;
- bool has_tessellation_stage = false;
- if (stageCount && pStages)
- for (uint32_t i = 0; i < stageCount && !has_tessellation_stage; ++i)
- if (pStages[i].stage == VK_SHADER_STAGE_TESSELLATION_CONTROL_BIT || pStages[i].stage == VK_SHADER_STAGE_TESSELLATION_EVALUATION_BIT)
- has_tessellation_stage = true;
- if (in_struct->pTessellationState && has_tessellation_stage)
- pTessellationState = new safe_VkPipelineTessellationStateCreateInfo(in_struct->pTessellationState);
- else
- pTessellationState = NULL; // original pTessellationState pointer ignored
- bool has_rasterization = in_struct->pRasterizationState ? !in_struct->pRasterizationState->rasterizerDiscardEnable : false;
- if (in_struct->pViewportState && has_rasterization) {
- bool is_dynamic_viewports = false;
- bool is_dynamic_scissors = false;
- if (in_struct->pDynamicState && in_struct->pDynamicState->pDynamicStates) {
- for (uint32_t i = 0; i < in_struct->pDynamicState->dynamicStateCount && !is_dynamic_viewports; ++i)
- if (in_struct->pDynamicState->pDynamicStates[i] == VK_DYNAMIC_STATE_VIEWPORT)
- is_dynamic_viewports = true;
- for (uint32_t i = 0; i < in_struct->pDynamicState->dynamicStateCount && !is_dynamic_scissors; ++i)
- if (in_struct->pDynamicState->pDynamicStates[i] == VK_DYNAMIC_STATE_SCISSOR)
- is_dynamic_scissors = true;
- }
- pViewportState = new safe_VkPipelineViewportStateCreateInfo(in_struct->pViewportState, is_dynamic_viewports, is_dynamic_scissors);
- } else
- pViewportState = NULL; // original pViewportState pointer ignored
- if (in_struct->pRasterizationState)
- pRasterizationState = new safe_VkPipelineRasterizationStateCreateInfo(in_struct->pRasterizationState);
- else
- pRasterizationState = NULL;
- if (in_struct->pMultisampleState && has_rasterization)
- pMultisampleState = new safe_VkPipelineMultisampleStateCreateInfo(in_struct->pMultisampleState);
- else
- pMultisampleState = NULL; // original pMultisampleState pointer ignored
- // needs a tracked subpass state uses_depthstencil_attachment
- if (in_struct->pDepthStencilState && has_rasterization && uses_depthstencil_attachment)
- pDepthStencilState = new safe_VkPipelineDepthStencilStateCreateInfo(in_struct->pDepthStencilState);
- else
- pDepthStencilState = NULL; // original pDepthStencilState pointer ignored
- // needs a tracked subpass state usesColorAttachment
- if (in_struct->pColorBlendState && has_rasterization && uses_color_attachment)
- pColorBlendState = new safe_VkPipelineColorBlendStateCreateInfo(in_struct->pColorBlendState);
- else
- pColorBlendState = NULL; // original pColorBlendState pointer ignored
- if (in_struct->pDynamicState)
- pDynamicState = new safe_VkPipelineDynamicStateCreateInfo(in_struct->pDynamicState);
- else
- pDynamicState = NULL;
-}
-
-void safe_VkGraphicsPipelineCreateInfo::initialize(const safe_VkGraphicsPipelineCreateInfo* src)
-{
- sType = src->sType;
- flags = src->flags;
- stageCount = src->stageCount;
- pStages = nullptr;
- pVertexInputState = nullptr;
- pInputAssemblyState = nullptr;
- pTessellationState = nullptr;
- pViewportState = nullptr;
- pRasterizationState = nullptr;
- pMultisampleState = nullptr;
- pDepthStencilState = nullptr;
- pColorBlendState = nullptr;
- pDynamicState = nullptr;
- layout = src->layout;
- renderPass = src->renderPass;
- subpass = src->subpass;
- basePipelineHandle = src->basePipelineHandle;
- basePipelineIndex = src->basePipelineIndex;
- pNext = SafePnextCopy(src->pNext);
- if (stageCount && src->pStages) {
- pStages = new safe_VkPipelineShaderStageCreateInfo[stageCount];
- for (uint32_t i = 0; i < stageCount; ++i) {
- pStages[i].initialize(&src->pStages[i]);
- }
- }
- if (src->pVertexInputState)
- pVertexInputState = new safe_VkPipelineVertexInputStateCreateInfo(*src->pVertexInputState);
- else
- pVertexInputState = NULL;
- if (src->pInputAssemblyState)
- pInputAssemblyState = new safe_VkPipelineInputAssemblyStateCreateInfo(*src->pInputAssemblyState);
- else
- pInputAssemblyState = NULL;
- bool has_tessellation_stage = false;
- if (stageCount && pStages)
- for (uint32_t i = 0; i < stageCount && !has_tessellation_stage; ++i)
- if (pStages[i].stage == VK_SHADER_STAGE_TESSELLATION_CONTROL_BIT || pStages[i].stage == VK_SHADER_STAGE_TESSELLATION_EVALUATION_BIT)
- has_tessellation_stage = true;
- if (src->pTessellationState && has_tessellation_stage)
- pTessellationState = new safe_VkPipelineTessellationStateCreateInfo(*src->pTessellationState);
- else
- pTessellationState = NULL; // original pTessellationState pointer ignored
- bool has_rasterization = src->pRasterizationState ? !src->pRasterizationState->rasterizerDiscardEnable : false;
- if (src->pViewportState && has_rasterization) {
- pViewportState = new safe_VkPipelineViewportStateCreateInfo(*src->pViewportState);
- } else
- pViewportState = NULL; // original pViewportState pointer ignored
- if (src->pRasterizationState)
- pRasterizationState = new safe_VkPipelineRasterizationStateCreateInfo(*src->pRasterizationState);
- else
- pRasterizationState = NULL;
- if (src->pMultisampleState && has_rasterization)
- pMultisampleState = new safe_VkPipelineMultisampleStateCreateInfo(*src->pMultisampleState);
- else
- pMultisampleState = NULL; // original pMultisampleState pointer ignored
- if (src->pDepthStencilState && has_rasterization)
- pDepthStencilState = new safe_VkPipelineDepthStencilStateCreateInfo(*src->pDepthStencilState);
- else
- pDepthStencilState = NULL; // original pDepthStencilState pointer ignored
- if (src->pColorBlendState && has_rasterization)
- pColorBlendState = new safe_VkPipelineColorBlendStateCreateInfo(*src->pColorBlendState);
- else
- pColorBlendState = NULL; // original pColorBlendState pointer ignored
- if (src->pDynamicState)
- pDynamicState = new safe_VkPipelineDynamicStateCreateInfo(*src->pDynamicState);
- else
- pDynamicState = NULL;
-}
-
-safe_VkComputePipelineCreateInfo::safe_VkComputePipelineCreateInfo(const VkComputePipelineCreateInfo* in_struct) :
- sType(in_struct->sType),
- flags(in_struct->flags),
- stage(&in_struct->stage),
- layout(in_struct->layout),
- basePipelineHandle(in_struct->basePipelineHandle),
- basePipelineIndex(in_struct->basePipelineIndex)
-{
- pNext = SafePnextCopy(in_struct->pNext);
-}
-
-safe_VkComputePipelineCreateInfo::safe_VkComputePipelineCreateInfo() :
- pNext(nullptr)
-{}
-
-safe_VkComputePipelineCreateInfo::safe_VkComputePipelineCreateInfo(const safe_VkComputePipelineCreateInfo& src)
-{
- sType = src.sType;
- flags = src.flags;
- stage.initialize(&src.stage);
- layout = src.layout;
- basePipelineHandle = src.basePipelineHandle;
- basePipelineIndex = src.basePipelineIndex;
- pNext = SafePnextCopy(src.pNext);
-}
-
-safe_VkComputePipelineCreateInfo& safe_VkComputePipelineCreateInfo::operator=(const safe_VkComputePipelineCreateInfo& src)
-{
- if (&src == this) return *this;
-
- if (pNext)
- FreePnextChain(pNext);
-
- sType = src.sType;
- flags = src.flags;
- stage.initialize(&src.stage);
- layout = src.layout;
- basePipelineHandle = src.basePipelineHandle;
- basePipelineIndex = src.basePipelineIndex;
- pNext = SafePnextCopy(src.pNext);
-
- return *this;
-}
-
-safe_VkComputePipelineCreateInfo::~safe_VkComputePipelineCreateInfo()
-{
- if (pNext)
- FreePnextChain(pNext);
-}
-
-void safe_VkComputePipelineCreateInfo::initialize(const VkComputePipelineCreateInfo* in_struct)
-{
- sType = in_struct->sType;
- flags = in_struct->flags;
- stage.initialize(&in_struct->stage);
- layout = in_struct->layout;
- basePipelineHandle = in_struct->basePipelineHandle;
- basePipelineIndex = in_struct->basePipelineIndex;
- pNext = SafePnextCopy(in_struct->pNext);
-}
-
-void safe_VkComputePipelineCreateInfo::initialize(const safe_VkComputePipelineCreateInfo* src)
-{
- sType = src->sType;
- flags = src->flags;
- stage.initialize(&src->stage);
- layout = src->layout;
- basePipelineHandle = src->basePipelineHandle;
- basePipelineIndex = src->basePipelineIndex;
- pNext = SafePnextCopy(src->pNext);
-}
-
-safe_VkPipelineLayoutCreateInfo::safe_VkPipelineLayoutCreateInfo(const VkPipelineLayoutCreateInfo* in_struct) :
- sType(in_struct->sType),
- flags(in_struct->flags),
- setLayoutCount(in_struct->setLayoutCount),
- pSetLayouts(nullptr),
- pushConstantRangeCount(in_struct->pushConstantRangeCount),
- pPushConstantRanges(nullptr)
-{
- pNext = SafePnextCopy(in_struct->pNext);
- if (setLayoutCount && in_struct->pSetLayouts) {
- pSetLayouts = new VkDescriptorSetLayout[setLayoutCount];
- for (uint32_t i = 0; i < setLayoutCount; ++i) {
- pSetLayouts[i] = in_struct->pSetLayouts[i];
- }
- }
- if (in_struct->pPushConstantRanges) {
- pPushConstantRanges = new VkPushConstantRange[in_struct->pushConstantRangeCount];
- memcpy ((void *)pPushConstantRanges, (void *)in_struct->pPushConstantRanges, sizeof(VkPushConstantRange)*in_struct->pushConstantRangeCount);
- }
-}
-
-safe_VkPipelineLayoutCreateInfo::safe_VkPipelineLayoutCreateInfo() :
- pNext(nullptr),
- pSetLayouts(nullptr),
- pPushConstantRanges(nullptr)
-{}
-
-safe_VkPipelineLayoutCreateInfo::safe_VkPipelineLayoutCreateInfo(const safe_VkPipelineLayoutCreateInfo& src)
-{
- sType = src.sType;
- flags = src.flags;
- setLayoutCount = src.setLayoutCount;
- pSetLayouts = nullptr;
- pushConstantRangeCount = src.pushConstantRangeCount;
- pPushConstantRanges = nullptr;
- pNext = SafePnextCopy(src.pNext);
- if (setLayoutCount && src.pSetLayouts) {
- pSetLayouts = new VkDescriptorSetLayout[setLayoutCount];
- for (uint32_t i = 0; i < setLayoutCount; ++i) {
- pSetLayouts[i] = src.pSetLayouts[i];
- }
- }
- if (src.pPushConstantRanges) {
- pPushConstantRanges = new VkPushConstantRange[src.pushConstantRangeCount];
- memcpy ((void *)pPushConstantRanges, (void *)src.pPushConstantRanges, sizeof(VkPushConstantRange)*src.pushConstantRangeCount);
- }
-}
-
-safe_VkPipelineLayoutCreateInfo& safe_VkPipelineLayoutCreateInfo::operator=(const safe_VkPipelineLayoutCreateInfo& src)
-{
- if (&src == this) return *this;
-
- if (pSetLayouts)
- delete[] pSetLayouts;
- if (pPushConstantRanges)
- delete[] pPushConstantRanges;
- if (pNext)
- FreePnextChain(pNext);
-
- sType = src.sType;
- flags = src.flags;
- setLayoutCount = src.setLayoutCount;
- pSetLayouts = nullptr;
- pushConstantRangeCount = src.pushConstantRangeCount;
- pPushConstantRanges = nullptr;
- pNext = SafePnextCopy(src.pNext);
- if (setLayoutCount && src.pSetLayouts) {
- pSetLayouts = new VkDescriptorSetLayout[setLayoutCount];
- for (uint32_t i = 0; i < setLayoutCount; ++i) {
- pSetLayouts[i] = src.pSetLayouts[i];
- }
- }
- if (src.pPushConstantRanges) {
- pPushConstantRanges = new VkPushConstantRange[src.pushConstantRangeCount];
- memcpy ((void *)pPushConstantRanges, (void *)src.pPushConstantRanges, sizeof(VkPushConstantRange)*src.pushConstantRangeCount);
- }
-
- return *this;
-}
-
-safe_VkPipelineLayoutCreateInfo::~safe_VkPipelineLayoutCreateInfo()
-{
- if (pSetLayouts)
- delete[] pSetLayouts;
- if (pPushConstantRanges)
- delete[] pPushConstantRanges;
- if (pNext)
- FreePnextChain(pNext);
-}
-
-void safe_VkPipelineLayoutCreateInfo::initialize(const VkPipelineLayoutCreateInfo* in_struct)
-{
- sType = in_struct->sType;
- flags = in_struct->flags;
- setLayoutCount = in_struct->setLayoutCount;
- pSetLayouts = nullptr;
- pushConstantRangeCount = in_struct->pushConstantRangeCount;
- pPushConstantRanges = nullptr;
- pNext = SafePnextCopy(in_struct->pNext);
- if (setLayoutCount && in_struct->pSetLayouts) {
- pSetLayouts = new VkDescriptorSetLayout[setLayoutCount];
- for (uint32_t i = 0; i < setLayoutCount; ++i) {
- pSetLayouts[i] = in_struct->pSetLayouts[i];
- }
- }
- if (in_struct->pPushConstantRanges) {
- pPushConstantRanges = new VkPushConstantRange[in_struct->pushConstantRangeCount];
- memcpy ((void *)pPushConstantRanges, (void *)in_struct->pPushConstantRanges, sizeof(VkPushConstantRange)*in_struct->pushConstantRangeCount);
- }
-}
-
-void safe_VkPipelineLayoutCreateInfo::initialize(const safe_VkPipelineLayoutCreateInfo* src)
-{
- sType = src->sType;
- flags = src->flags;
- setLayoutCount = src->setLayoutCount;
- pSetLayouts = nullptr;
- pushConstantRangeCount = src->pushConstantRangeCount;
- pPushConstantRanges = nullptr;
- pNext = SafePnextCopy(src->pNext);
- if (setLayoutCount && src->pSetLayouts) {
- pSetLayouts = new VkDescriptorSetLayout[setLayoutCount];
- for (uint32_t i = 0; i < setLayoutCount; ++i) {
- pSetLayouts[i] = src->pSetLayouts[i];
- }
- }
- if (src->pPushConstantRanges) {
- pPushConstantRanges = new VkPushConstantRange[src->pushConstantRangeCount];
- memcpy ((void *)pPushConstantRanges, (void *)src->pPushConstantRanges, sizeof(VkPushConstantRange)*src->pushConstantRangeCount);
- }
-}
-
-safe_VkSamplerCreateInfo::safe_VkSamplerCreateInfo(const VkSamplerCreateInfo* in_struct) :
- sType(in_struct->sType),
- flags(in_struct->flags),
- magFilter(in_struct->magFilter),
- minFilter(in_struct->minFilter),
- mipmapMode(in_struct->mipmapMode),
- addressModeU(in_struct->addressModeU),
- addressModeV(in_struct->addressModeV),
- addressModeW(in_struct->addressModeW),
- mipLodBias(in_struct->mipLodBias),
- anisotropyEnable(in_struct->anisotropyEnable),
- maxAnisotropy(in_struct->maxAnisotropy),
- compareEnable(in_struct->compareEnable),
- compareOp(in_struct->compareOp),
- minLod(in_struct->minLod),
- maxLod(in_struct->maxLod),
- borderColor(in_struct->borderColor),
- unnormalizedCoordinates(in_struct->unnormalizedCoordinates)
-{
- pNext = SafePnextCopy(in_struct->pNext);
-}
-
-safe_VkSamplerCreateInfo::safe_VkSamplerCreateInfo() :
- pNext(nullptr)
-{}
-
-safe_VkSamplerCreateInfo::safe_VkSamplerCreateInfo(const safe_VkSamplerCreateInfo& src)
-{
- sType = src.sType;
- flags = src.flags;
- magFilter = src.magFilter;
- minFilter = src.minFilter;
- mipmapMode = src.mipmapMode;
- addressModeU = src.addressModeU;
- addressModeV = src.addressModeV;
- addressModeW = src.addressModeW;
- mipLodBias = src.mipLodBias;
- anisotropyEnable = src.anisotropyEnable;
- maxAnisotropy = src.maxAnisotropy;
- compareEnable = src.compareEnable;
- compareOp = src.compareOp;
- minLod = src.minLod;
- maxLod = src.maxLod;
- borderColor = src.borderColor;
- unnormalizedCoordinates = src.unnormalizedCoordinates;
- pNext = SafePnextCopy(src.pNext);
-}
-
-safe_VkSamplerCreateInfo& safe_VkSamplerCreateInfo::operator=(const safe_VkSamplerCreateInfo& src)
-{
- if (&src == this) return *this;
-
- if (pNext)
- FreePnextChain(pNext);
-
- sType = src.sType;
- flags = src.flags;
- magFilter = src.magFilter;
- minFilter = src.minFilter;
- mipmapMode = src.mipmapMode;
- addressModeU = src.addressModeU;
- addressModeV = src.addressModeV;
- addressModeW = src.addressModeW;
- mipLodBias = src.mipLodBias;
- anisotropyEnable = src.anisotropyEnable;
- maxAnisotropy = src.maxAnisotropy;
- compareEnable = src.compareEnable;
- compareOp = src.compareOp;
- minLod = src.minLod;
- maxLod = src.maxLod;
- borderColor = src.borderColor;
- unnormalizedCoordinates = src.unnormalizedCoordinates;
- pNext = SafePnextCopy(src.pNext);
-
- return *this;
-}
-
-safe_VkSamplerCreateInfo::~safe_VkSamplerCreateInfo()
-{
- if (pNext)
- FreePnextChain(pNext);
-}
-
-void safe_VkSamplerCreateInfo::initialize(const VkSamplerCreateInfo* in_struct)
-{
- sType = in_struct->sType;
- flags = in_struct->flags;
- magFilter = in_struct->magFilter;
- minFilter = in_struct->minFilter;
- mipmapMode = in_struct->mipmapMode;
- addressModeU = in_struct->addressModeU;
- addressModeV = in_struct->addressModeV;
- addressModeW = in_struct->addressModeW;
- mipLodBias = in_struct->mipLodBias;
- anisotropyEnable = in_struct->anisotropyEnable;
- maxAnisotropy = in_struct->maxAnisotropy;
- compareEnable = in_struct->compareEnable;
- compareOp = in_struct->compareOp;
- minLod = in_struct->minLod;
- maxLod = in_struct->maxLod;
- borderColor = in_struct->borderColor;
- unnormalizedCoordinates = in_struct->unnormalizedCoordinates;
- pNext = SafePnextCopy(in_struct->pNext);
-}
-
-void safe_VkSamplerCreateInfo::initialize(const safe_VkSamplerCreateInfo* src)
-{
- sType = src->sType;
- flags = src->flags;
- magFilter = src->magFilter;
- minFilter = src->minFilter;
- mipmapMode = src->mipmapMode;
- addressModeU = src->addressModeU;
- addressModeV = src->addressModeV;
- addressModeW = src->addressModeW;
- mipLodBias = src->mipLodBias;
- anisotropyEnable = src->anisotropyEnable;
- maxAnisotropy = src->maxAnisotropy;
- compareEnable = src->compareEnable;
- compareOp = src->compareOp;
- minLod = src->minLod;
- maxLod = src->maxLod;
- borderColor = src->borderColor;
- unnormalizedCoordinates = src->unnormalizedCoordinates;
- pNext = SafePnextCopy(src->pNext);
-}
-
-safe_VkDescriptorSetLayoutBinding::safe_VkDescriptorSetLayoutBinding(const VkDescriptorSetLayoutBinding* in_struct) :
- binding(in_struct->binding),
- descriptorType(in_struct->descriptorType),
- descriptorCount(in_struct->descriptorCount),
- stageFlags(in_struct->stageFlags),
- pImmutableSamplers(nullptr)
-{
- const bool sampler_type = in_struct->descriptorType == VK_DESCRIPTOR_TYPE_SAMPLER || in_struct->descriptorType == VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER;
- if (descriptorCount && in_struct->pImmutableSamplers && sampler_type) {
- pImmutableSamplers = new VkSampler[descriptorCount];
- for (uint32_t i = 0; i < descriptorCount; ++i) {
- pImmutableSamplers[i] = in_struct->pImmutableSamplers[i];
- }
- }
-}
-
-safe_VkDescriptorSetLayoutBinding::safe_VkDescriptorSetLayoutBinding() :
- pImmutableSamplers(nullptr)
-{}
-
-safe_VkDescriptorSetLayoutBinding::safe_VkDescriptorSetLayoutBinding(const safe_VkDescriptorSetLayoutBinding& src)
-{
- binding = src.binding;
- descriptorType = src.descriptorType;
- descriptorCount = src.descriptorCount;
- stageFlags = src.stageFlags;
- pImmutableSamplers = nullptr;
- const bool sampler_type = src.descriptorType == VK_DESCRIPTOR_TYPE_SAMPLER || src.descriptorType == VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER;
- if (descriptorCount && src.pImmutableSamplers && sampler_type) {
- pImmutableSamplers = new VkSampler[descriptorCount];
- for (uint32_t i = 0; i < descriptorCount; ++i) {
- pImmutableSamplers[i] = src.pImmutableSamplers[i];
- }
- }
-}
-
-safe_VkDescriptorSetLayoutBinding& safe_VkDescriptorSetLayoutBinding::operator=(const safe_VkDescriptorSetLayoutBinding& src)
-{
- if (&src == this) return *this;
-
- if (pImmutableSamplers)
- delete[] pImmutableSamplers;
-
- binding = src.binding;
- descriptorType = src.descriptorType;
- descriptorCount = src.descriptorCount;
- stageFlags = src.stageFlags;
- pImmutableSamplers = nullptr;
- const bool sampler_type = src.descriptorType == VK_DESCRIPTOR_TYPE_SAMPLER || src.descriptorType == VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER;
- if (descriptorCount && src.pImmutableSamplers && sampler_type) {
- pImmutableSamplers = new VkSampler[descriptorCount];
- for (uint32_t i = 0; i < descriptorCount; ++i) {
- pImmutableSamplers[i] = src.pImmutableSamplers[i];
- }
- }
-
- return *this;
-}
-
-safe_VkDescriptorSetLayoutBinding::~safe_VkDescriptorSetLayoutBinding()
-{
- if (pImmutableSamplers)
- delete[] pImmutableSamplers;
-}
-
-void safe_VkDescriptorSetLayoutBinding::initialize(const VkDescriptorSetLayoutBinding* in_struct)
-{
- binding = in_struct->binding;
- descriptorType = in_struct->descriptorType;
- descriptorCount = in_struct->descriptorCount;
- stageFlags = in_struct->stageFlags;
- pImmutableSamplers = nullptr;
- const bool sampler_type = in_struct->descriptorType == VK_DESCRIPTOR_TYPE_SAMPLER || in_struct->descriptorType == VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER;
- if (descriptorCount && in_struct->pImmutableSamplers && sampler_type) {
- pImmutableSamplers = new VkSampler[descriptorCount];
- for (uint32_t i = 0; i < descriptorCount; ++i) {
- pImmutableSamplers[i] = in_struct->pImmutableSamplers[i];
- }
- }
-}
-
-void safe_VkDescriptorSetLayoutBinding::initialize(const safe_VkDescriptorSetLayoutBinding* src)
-{
- binding = src->binding;
- descriptorType = src->descriptorType;
- descriptorCount = src->descriptorCount;
- stageFlags = src->stageFlags;
- pImmutableSamplers = nullptr;
- const bool sampler_type = src->descriptorType == VK_DESCRIPTOR_TYPE_SAMPLER || src->descriptorType == VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER;
- if (descriptorCount && src->pImmutableSamplers && sampler_type) {
- pImmutableSamplers = new VkSampler[descriptorCount];
- for (uint32_t i = 0; i < descriptorCount; ++i) {
- pImmutableSamplers[i] = src->pImmutableSamplers[i];
- }
- }
-}
-
-safe_VkDescriptorSetLayoutCreateInfo::safe_VkDescriptorSetLayoutCreateInfo(const VkDescriptorSetLayoutCreateInfo* in_struct) :
- sType(in_struct->sType),
- flags(in_struct->flags),
- bindingCount(in_struct->bindingCount),
- pBindings(nullptr)
-{
- pNext = SafePnextCopy(in_struct->pNext);
- if (bindingCount && in_struct->pBindings) {
- pBindings = new safe_VkDescriptorSetLayoutBinding[bindingCount];
- for (uint32_t i = 0; i < bindingCount; ++i) {
- pBindings[i].initialize(&in_struct->pBindings[i]);
- }
- }
-}
-
-safe_VkDescriptorSetLayoutCreateInfo::safe_VkDescriptorSetLayoutCreateInfo() :
- pNext(nullptr),
- pBindings(nullptr)
-{}
-
-safe_VkDescriptorSetLayoutCreateInfo::safe_VkDescriptorSetLayoutCreateInfo(const safe_VkDescriptorSetLayoutCreateInfo& src)
-{
- sType = src.sType;
- flags = src.flags;
- bindingCount = src.bindingCount;
- pBindings = nullptr;
- pNext = SafePnextCopy(src.pNext);
- if (bindingCount && src.pBindings) {
- pBindings = new safe_VkDescriptorSetLayoutBinding[bindingCount];
- for (uint32_t i = 0; i < bindingCount; ++i) {
- pBindings[i].initialize(&src.pBindings[i]);
- }
- }
-}
-
-safe_VkDescriptorSetLayoutCreateInfo& safe_VkDescriptorSetLayoutCreateInfo::operator=(const safe_VkDescriptorSetLayoutCreateInfo& src)
-{
- if (&src == this) return *this;
-
- if (pBindings)
- delete[] pBindings;
- if (pNext)
- FreePnextChain(pNext);
-
- sType = src.sType;
- flags = src.flags;
- bindingCount = src.bindingCount;
- pBindings = nullptr;
- pNext = SafePnextCopy(src.pNext);
- if (bindingCount && src.pBindings) {
- pBindings = new safe_VkDescriptorSetLayoutBinding[bindingCount];
- for (uint32_t i = 0; i < bindingCount; ++i) {
- pBindings[i].initialize(&src.pBindings[i]);
- }
- }
-
- return *this;
-}
-
-safe_VkDescriptorSetLayoutCreateInfo::~safe_VkDescriptorSetLayoutCreateInfo()
-{
- if (pBindings)
- delete[] pBindings;
- if (pNext)
- FreePnextChain(pNext);
-}
-
-void safe_VkDescriptorSetLayoutCreateInfo::initialize(const VkDescriptorSetLayoutCreateInfo* in_struct)
-{
- sType = in_struct->sType;
- flags = in_struct->flags;
- bindingCount = in_struct->bindingCount;
- pBindings = nullptr;
- pNext = SafePnextCopy(in_struct->pNext);
- if (bindingCount && in_struct->pBindings) {
- pBindings = new safe_VkDescriptorSetLayoutBinding[bindingCount];
- for (uint32_t i = 0; i < bindingCount; ++i) {
- pBindings[i].initialize(&in_struct->pBindings[i]);
- }
- }
-}
-
-void safe_VkDescriptorSetLayoutCreateInfo::initialize(const safe_VkDescriptorSetLayoutCreateInfo* src)
-{
- sType = src->sType;
- flags = src->flags;
- bindingCount = src->bindingCount;
- pBindings = nullptr;
- pNext = SafePnextCopy(src->pNext);
- if (bindingCount && src->pBindings) {
- pBindings = new safe_VkDescriptorSetLayoutBinding[bindingCount];
- for (uint32_t i = 0; i < bindingCount; ++i) {
- pBindings[i].initialize(&src->pBindings[i]);
- }
- }
-}
-
-safe_VkDescriptorPoolCreateInfo::safe_VkDescriptorPoolCreateInfo(const VkDescriptorPoolCreateInfo* in_struct) :
- sType(in_struct->sType),
- flags(in_struct->flags),
- maxSets(in_struct->maxSets),
- poolSizeCount(in_struct->poolSizeCount),
- pPoolSizes(nullptr)
-{
- pNext = SafePnextCopy(in_struct->pNext);
- if (in_struct->pPoolSizes) {
- pPoolSizes = new VkDescriptorPoolSize[in_struct->poolSizeCount];
- memcpy ((void *)pPoolSizes, (void *)in_struct->pPoolSizes, sizeof(VkDescriptorPoolSize)*in_struct->poolSizeCount);
- }
-}
-
-safe_VkDescriptorPoolCreateInfo::safe_VkDescriptorPoolCreateInfo() :
- pNext(nullptr),
- pPoolSizes(nullptr)
-{}
-
-safe_VkDescriptorPoolCreateInfo::safe_VkDescriptorPoolCreateInfo(const safe_VkDescriptorPoolCreateInfo& src)
-{
- sType = src.sType;
- flags = src.flags;
- maxSets = src.maxSets;
- poolSizeCount = src.poolSizeCount;
- pPoolSizes = nullptr;
- pNext = SafePnextCopy(src.pNext);
- if (src.pPoolSizes) {
- pPoolSizes = new VkDescriptorPoolSize[src.poolSizeCount];
- memcpy ((void *)pPoolSizes, (void *)src.pPoolSizes, sizeof(VkDescriptorPoolSize)*src.poolSizeCount);
- }
-}
-
-safe_VkDescriptorPoolCreateInfo& safe_VkDescriptorPoolCreateInfo::operator=(const safe_VkDescriptorPoolCreateInfo& src)
-{
- if (&src == this) return *this;
-
- if (pPoolSizes)
- delete[] pPoolSizes;
- if (pNext)
- FreePnextChain(pNext);
-
- sType = src.sType;
- flags = src.flags;
- maxSets = src.maxSets;
- poolSizeCount = src.poolSizeCount;
- pPoolSizes = nullptr;
- pNext = SafePnextCopy(src.pNext);
- if (src.pPoolSizes) {
- pPoolSizes = new VkDescriptorPoolSize[src.poolSizeCount];
- memcpy ((void *)pPoolSizes, (void *)src.pPoolSizes, sizeof(VkDescriptorPoolSize)*src.poolSizeCount);
- }
-
- return *this;
-}
-
-safe_VkDescriptorPoolCreateInfo::~safe_VkDescriptorPoolCreateInfo()
-{
- if (pPoolSizes)
- delete[] pPoolSizes;
- if (pNext)
- FreePnextChain(pNext);
-}
-
-void safe_VkDescriptorPoolCreateInfo::initialize(const VkDescriptorPoolCreateInfo* in_struct)
-{
- sType = in_struct->sType;
- flags = in_struct->flags;
- maxSets = in_struct->maxSets;
- poolSizeCount = in_struct->poolSizeCount;
- pPoolSizes = nullptr;
- pNext = SafePnextCopy(in_struct->pNext);
- if (in_struct->pPoolSizes) {
- pPoolSizes = new VkDescriptorPoolSize[in_struct->poolSizeCount];
- memcpy ((void *)pPoolSizes, (void *)in_struct->pPoolSizes, sizeof(VkDescriptorPoolSize)*in_struct->poolSizeCount);
- }
-}
-
-void safe_VkDescriptorPoolCreateInfo::initialize(const safe_VkDescriptorPoolCreateInfo* src)
-{
- sType = src->sType;
- flags = src->flags;
- maxSets = src->maxSets;
- poolSizeCount = src->poolSizeCount;
- pPoolSizes = nullptr;
- pNext = SafePnextCopy(src->pNext);
- if (src->pPoolSizes) {
- pPoolSizes = new VkDescriptorPoolSize[src->poolSizeCount];
- memcpy ((void *)pPoolSizes, (void *)src->pPoolSizes, sizeof(VkDescriptorPoolSize)*src->poolSizeCount);
- }
-}
-
-safe_VkDescriptorSetAllocateInfo::safe_VkDescriptorSetAllocateInfo(const VkDescriptorSetAllocateInfo* in_struct) :
- sType(in_struct->sType),
- descriptorPool(in_struct->descriptorPool),
- descriptorSetCount(in_struct->descriptorSetCount),
- pSetLayouts(nullptr)
-{
- pNext = SafePnextCopy(in_struct->pNext);
- if (descriptorSetCount && in_struct->pSetLayouts) {
- pSetLayouts = new VkDescriptorSetLayout[descriptorSetCount];
- for (uint32_t i = 0; i < descriptorSetCount; ++i) {
- pSetLayouts[i] = in_struct->pSetLayouts[i];
- }
- }
-}
-
-safe_VkDescriptorSetAllocateInfo::safe_VkDescriptorSetAllocateInfo() :
- pNext(nullptr),
- pSetLayouts(nullptr)
-{}
-
-safe_VkDescriptorSetAllocateInfo::safe_VkDescriptorSetAllocateInfo(const safe_VkDescriptorSetAllocateInfo& src)
-{
- sType = src.sType;
- descriptorPool = src.descriptorPool;
- descriptorSetCount = src.descriptorSetCount;
- pSetLayouts = nullptr;
- pNext = SafePnextCopy(src.pNext);
- if (descriptorSetCount && src.pSetLayouts) {
- pSetLayouts = new VkDescriptorSetLayout[descriptorSetCount];
- for (uint32_t i = 0; i < descriptorSetCount; ++i) {
- pSetLayouts[i] = src.pSetLayouts[i];
- }
- }
-}
-
-safe_VkDescriptorSetAllocateInfo& safe_VkDescriptorSetAllocateInfo::operator=(const safe_VkDescriptorSetAllocateInfo& src)
-{
- if (&src == this) return *this;
-
- if (pSetLayouts)
- delete[] pSetLayouts;
- if (pNext)
- FreePnextChain(pNext);
-
- sType = src.sType;
- descriptorPool = src.descriptorPool;
- descriptorSetCount = src.descriptorSetCount;
- pSetLayouts = nullptr;
- pNext = SafePnextCopy(src.pNext);
- if (descriptorSetCount && src.pSetLayouts) {
- pSetLayouts = new VkDescriptorSetLayout[descriptorSetCount];
- for (uint32_t i = 0; i < descriptorSetCount; ++i) {
- pSetLayouts[i] = src.pSetLayouts[i];
- }
- }
-
- return *this;
-}
-
-safe_VkDescriptorSetAllocateInfo::~safe_VkDescriptorSetAllocateInfo()
-{
- if (pSetLayouts)
- delete[] pSetLayouts;
- if (pNext)
- FreePnextChain(pNext);
-}
-
-void safe_VkDescriptorSetAllocateInfo::initialize(const VkDescriptorSetAllocateInfo* in_struct)
-{
- sType = in_struct->sType;
- descriptorPool = in_struct->descriptorPool;
- descriptorSetCount = in_struct->descriptorSetCount;
- pSetLayouts = nullptr;
- pNext = SafePnextCopy(in_struct->pNext);
- if (descriptorSetCount && in_struct->pSetLayouts) {
- pSetLayouts = new VkDescriptorSetLayout[descriptorSetCount];
- for (uint32_t i = 0; i < descriptorSetCount; ++i) {
- pSetLayouts[i] = in_struct->pSetLayouts[i];
- }
- }
-}
-
-void safe_VkDescriptorSetAllocateInfo::initialize(const safe_VkDescriptorSetAllocateInfo* src)
-{
- sType = src->sType;
- descriptorPool = src->descriptorPool;
- descriptorSetCount = src->descriptorSetCount;
- pSetLayouts = nullptr;
- pNext = SafePnextCopy(src->pNext);
- if (descriptorSetCount && src->pSetLayouts) {
- pSetLayouts = new VkDescriptorSetLayout[descriptorSetCount];
- for (uint32_t i = 0; i < descriptorSetCount; ++i) {
- pSetLayouts[i] = src->pSetLayouts[i];
- }
- }
-}
-
-safe_VkWriteDescriptorSet::safe_VkWriteDescriptorSet(const VkWriteDescriptorSet* in_struct) :
- sType(in_struct->sType),
- dstSet(in_struct->dstSet),
- dstBinding(in_struct->dstBinding),
- dstArrayElement(in_struct->dstArrayElement),
- descriptorCount(in_struct->descriptorCount),
- descriptorType(in_struct->descriptorType),
- pImageInfo(nullptr),
- pBufferInfo(nullptr),
- pTexelBufferView(nullptr)
-{
- pNext = SafePnextCopy(in_struct->pNext);
- switch (descriptorType) {
- case VK_DESCRIPTOR_TYPE_SAMPLER:
- case VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER:
- case VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE:
- case VK_DESCRIPTOR_TYPE_STORAGE_IMAGE:
- case VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT:
- if (descriptorCount && in_struct->pImageInfo) {
- pImageInfo = new VkDescriptorImageInfo[descriptorCount];
- for (uint32_t i = 0; i < descriptorCount; ++i) {
- pImageInfo[i] = in_struct->pImageInfo[i];
- }
- }
- break;
- case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER:
- case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER:
- case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC:
- case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC:
- if (descriptorCount && in_struct->pBufferInfo) {
- pBufferInfo = new VkDescriptorBufferInfo[descriptorCount];
- for (uint32_t i = 0; i < descriptorCount; ++i) {
- pBufferInfo[i] = in_struct->pBufferInfo[i];
- }
- }
- break;
- case VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER:
- case VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER:
- if (descriptorCount && in_struct->pTexelBufferView) {
- pTexelBufferView = new VkBufferView[descriptorCount];
- for (uint32_t i = 0; i < descriptorCount; ++i) {
- pTexelBufferView[i] = in_struct->pTexelBufferView[i];
- }
- }
- break;
- default:
- break;
- }
-}
-
-safe_VkWriteDescriptorSet::safe_VkWriteDescriptorSet() :
- pNext(nullptr),
- pImageInfo(nullptr),
- pBufferInfo(nullptr),
- pTexelBufferView(nullptr)
-{}
-
-safe_VkWriteDescriptorSet::safe_VkWriteDescriptorSet(const safe_VkWriteDescriptorSet& src)
-{
- sType = src.sType;
- dstSet = src.dstSet;
- dstBinding = src.dstBinding;
- dstArrayElement = src.dstArrayElement;
- descriptorCount = src.descriptorCount;
- descriptorType = src.descriptorType;
- pImageInfo = nullptr;
- pBufferInfo = nullptr;
- pTexelBufferView = nullptr;
- pNext = SafePnextCopy(src.pNext);
- switch (descriptorType) {
- case VK_DESCRIPTOR_TYPE_SAMPLER:
- case VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER:
- case VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE:
- case VK_DESCRIPTOR_TYPE_STORAGE_IMAGE:
- case VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT:
- if (descriptorCount && src.pImageInfo) {
- pImageInfo = new VkDescriptorImageInfo[descriptorCount];
- for (uint32_t i = 0; i < descriptorCount; ++i) {
- pImageInfo[i] = src.pImageInfo[i];
- }
- }
- break;
- case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER:
- case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER:
- case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC:
- case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC:
- if (descriptorCount && src.pBufferInfo) {
- pBufferInfo = new VkDescriptorBufferInfo[descriptorCount];
- for (uint32_t i = 0; i < descriptorCount; ++i) {
- pBufferInfo[i] = src.pBufferInfo[i];
- }
- }
- break;
- case VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER:
- case VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER:
- if (descriptorCount && src.pTexelBufferView) {
- pTexelBufferView = new VkBufferView[descriptorCount];
- for (uint32_t i = 0; i < descriptorCount; ++i) {
- pTexelBufferView[i] = src.pTexelBufferView[i];
- }
- }
- break;
- default:
- break;
- }
-}
-
-safe_VkWriteDescriptorSet& safe_VkWriteDescriptorSet::operator=(const safe_VkWriteDescriptorSet& src)
-{
- if (&src == this) return *this;
-
- if (pImageInfo)
- delete[] pImageInfo;
- if (pBufferInfo)
- delete[] pBufferInfo;
- if (pTexelBufferView)
- delete[] pTexelBufferView;
- if (pNext)
- FreePnextChain(pNext);
-
- sType = src.sType;
- dstSet = src.dstSet;
- dstBinding = src.dstBinding;
- dstArrayElement = src.dstArrayElement;
- descriptorCount = src.descriptorCount;
- descriptorType = src.descriptorType;
- pImageInfo = nullptr;
- pBufferInfo = nullptr;
- pTexelBufferView = nullptr;
- pNext = SafePnextCopy(src.pNext);
- switch (descriptorType) {
- case VK_DESCRIPTOR_TYPE_SAMPLER:
- case VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER:
- case VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE:
- case VK_DESCRIPTOR_TYPE_STORAGE_IMAGE:
- case VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT:
- if (descriptorCount && src.pImageInfo) {
- pImageInfo = new VkDescriptorImageInfo[descriptorCount];
- for (uint32_t i = 0; i < descriptorCount; ++i) {
- pImageInfo[i] = src.pImageInfo[i];
- }
- }
- break;
- case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER:
- case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER:
- case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC:
- case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC:
- if (descriptorCount && src.pBufferInfo) {
- pBufferInfo = new VkDescriptorBufferInfo[descriptorCount];
- for (uint32_t i = 0; i < descriptorCount; ++i) {
- pBufferInfo[i] = src.pBufferInfo[i];
- }
- }
- break;
- case VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER:
- case VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER:
- if (descriptorCount && src.pTexelBufferView) {
- pTexelBufferView = new VkBufferView[descriptorCount];
- for (uint32_t i = 0; i < descriptorCount; ++i) {
- pTexelBufferView[i] = src.pTexelBufferView[i];
- }
- }
- break;
- default:
- break;
- }
-
- return *this;
-}
-
-safe_VkWriteDescriptorSet::~safe_VkWriteDescriptorSet()
-{
- if (pImageInfo)
- delete[] pImageInfo;
- if (pBufferInfo)
- delete[] pBufferInfo;
- if (pTexelBufferView)
- delete[] pTexelBufferView;
- if (pNext)
- FreePnextChain(pNext);
-}
-
-void safe_VkWriteDescriptorSet::initialize(const VkWriteDescriptorSet* in_struct)
-{
- sType = in_struct->sType;
- dstSet = in_struct->dstSet;
- dstBinding = in_struct->dstBinding;
- dstArrayElement = in_struct->dstArrayElement;
- descriptorCount = in_struct->descriptorCount;
- descriptorType = in_struct->descriptorType;
- pImageInfo = nullptr;
- pBufferInfo = nullptr;
- pTexelBufferView = nullptr;
- pNext = SafePnextCopy(in_struct->pNext);
- switch (descriptorType) {
- case VK_DESCRIPTOR_TYPE_SAMPLER:
- case VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER:
- case VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE:
- case VK_DESCRIPTOR_TYPE_STORAGE_IMAGE:
- case VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT:
- if (descriptorCount && in_struct->pImageInfo) {
- pImageInfo = new VkDescriptorImageInfo[descriptorCount];
- for (uint32_t i = 0; i < descriptorCount; ++i) {
- pImageInfo[i] = in_struct->pImageInfo[i];
- }
- }
- break;
- case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER:
- case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER:
- case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC:
- case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC:
- if (descriptorCount && in_struct->pBufferInfo) {
- pBufferInfo = new VkDescriptorBufferInfo[descriptorCount];
- for (uint32_t i = 0; i < descriptorCount; ++i) {
- pBufferInfo[i] = in_struct->pBufferInfo[i];
- }
- }
- break;
- case VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER:
- case VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER:
- if (descriptorCount && in_struct->pTexelBufferView) {
- pTexelBufferView = new VkBufferView[descriptorCount];
- for (uint32_t i = 0; i < descriptorCount; ++i) {
- pTexelBufferView[i] = in_struct->pTexelBufferView[i];
- }
- }
- break;
- default:
- break;
- }
-}
-
-void safe_VkWriteDescriptorSet::initialize(const safe_VkWriteDescriptorSet* src)
-{
- sType = src->sType;
- dstSet = src->dstSet;
- dstBinding = src->dstBinding;
- dstArrayElement = src->dstArrayElement;
- descriptorCount = src->descriptorCount;
- descriptorType = src->descriptorType;
- pImageInfo = nullptr;
- pBufferInfo = nullptr;
- pTexelBufferView = nullptr;
- pNext = SafePnextCopy(src->pNext);
- switch (descriptorType) {
- case VK_DESCRIPTOR_TYPE_SAMPLER:
- case VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER:
- case VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE:
- case VK_DESCRIPTOR_TYPE_STORAGE_IMAGE:
- case VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT:
- if (descriptorCount && src->pImageInfo) {
- pImageInfo = new VkDescriptorImageInfo[descriptorCount];
- for (uint32_t i = 0; i < descriptorCount; ++i) {
- pImageInfo[i] = src->pImageInfo[i];
- }
- }
- break;
- case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER:
- case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER:
- case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC:
- case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC:
- if (descriptorCount && src->pBufferInfo) {
- pBufferInfo = new VkDescriptorBufferInfo[descriptorCount];
- for (uint32_t i = 0; i < descriptorCount; ++i) {
- pBufferInfo[i] = src->pBufferInfo[i];
- }
- }
- break;
- case VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER:
- case VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER:
- if (descriptorCount && src->pTexelBufferView) {
- pTexelBufferView = new VkBufferView[descriptorCount];
- for (uint32_t i = 0; i < descriptorCount; ++i) {
- pTexelBufferView[i] = src->pTexelBufferView[i];
- }
- }
- break;
- default:
- break;
- }
-}
-
-safe_VkCopyDescriptorSet::safe_VkCopyDescriptorSet(const VkCopyDescriptorSet* in_struct) :
- sType(in_struct->sType),
- srcSet(in_struct->srcSet),
- srcBinding(in_struct->srcBinding),
- srcArrayElement(in_struct->srcArrayElement),
- dstSet(in_struct->dstSet),
- dstBinding(in_struct->dstBinding),
- dstArrayElement(in_struct->dstArrayElement),
- descriptorCount(in_struct->descriptorCount)
-{
- pNext = SafePnextCopy(in_struct->pNext);
-}
-
-safe_VkCopyDescriptorSet::safe_VkCopyDescriptorSet() :
- pNext(nullptr)
-{}
-
-safe_VkCopyDescriptorSet::safe_VkCopyDescriptorSet(const safe_VkCopyDescriptorSet& src)
-{
- sType = src.sType;
- srcSet = src.srcSet;
- srcBinding = src.srcBinding;
- srcArrayElement = src.srcArrayElement;
- dstSet = src.dstSet;
- dstBinding = src.dstBinding;
- dstArrayElement = src.dstArrayElement;
- descriptorCount = src.descriptorCount;
- pNext = SafePnextCopy(src.pNext);
-}
-
-safe_VkCopyDescriptorSet& safe_VkCopyDescriptorSet::operator=(const safe_VkCopyDescriptorSet& src)
-{
- if (&src == this) return *this;
-
- if (pNext)
- FreePnextChain(pNext);
-
- sType = src.sType;
- srcSet = src.srcSet;
- srcBinding = src.srcBinding;
- srcArrayElement = src.srcArrayElement;
- dstSet = src.dstSet;
- dstBinding = src.dstBinding;
- dstArrayElement = src.dstArrayElement;
- descriptorCount = src.descriptorCount;
- pNext = SafePnextCopy(src.pNext);
-
- return *this;
-}
-
-safe_VkCopyDescriptorSet::~safe_VkCopyDescriptorSet()
-{
- if (pNext)
- FreePnextChain(pNext);
-}
-
-void safe_VkCopyDescriptorSet::initialize(const VkCopyDescriptorSet* in_struct)
-{
- sType = in_struct->sType;
- srcSet = in_struct->srcSet;
- srcBinding = in_struct->srcBinding;
- srcArrayElement = in_struct->srcArrayElement;
- dstSet = in_struct->dstSet;
- dstBinding = in_struct->dstBinding;
- dstArrayElement = in_struct->dstArrayElement;
- descriptorCount = in_struct->descriptorCount;
- pNext = SafePnextCopy(in_struct->pNext);
-}
-
-void safe_VkCopyDescriptorSet::initialize(const safe_VkCopyDescriptorSet* src)
-{
- sType = src->sType;
- srcSet = src->srcSet;
- srcBinding = src->srcBinding;
- srcArrayElement = src->srcArrayElement;
- dstSet = src->dstSet;
- dstBinding = src->dstBinding;
- dstArrayElement = src->dstArrayElement;
- descriptorCount = src->descriptorCount;
- pNext = SafePnextCopy(src->pNext);
-}
-
-safe_VkFramebufferCreateInfo::safe_VkFramebufferCreateInfo(const VkFramebufferCreateInfo* in_struct) :
- sType(in_struct->sType),
- flags(in_struct->flags),
- renderPass(in_struct->renderPass),
- attachmentCount(in_struct->attachmentCount),
- pAttachments(nullptr),
- width(in_struct->width),
- height(in_struct->height),
- layers(in_struct->layers)
-{
- pNext = SafePnextCopy(in_struct->pNext);
- if (attachmentCount && in_struct->pAttachments) {
- pAttachments = new VkImageView[attachmentCount];
- for (uint32_t i = 0; i < attachmentCount; ++i) {
- pAttachments[i] = in_struct->pAttachments[i];
- }
- }
-}
-
-safe_VkFramebufferCreateInfo::safe_VkFramebufferCreateInfo() :
- pNext(nullptr),
- pAttachments(nullptr)
-{}
-
-safe_VkFramebufferCreateInfo::safe_VkFramebufferCreateInfo(const safe_VkFramebufferCreateInfo& src)
-{
- sType = src.sType;
- flags = src.flags;
- renderPass = src.renderPass;
- attachmentCount = src.attachmentCount;
- pAttachments = nullptr;
- width = src.width;
- height = src.height;
- layers = src.layers;
- pNext = SafePnextCopy(src.pNext);
- if (attachmentCount && src.pAttachments) {
- pAttachments = new VkImageView[attachmentCount];
- for (uint32_t i = 0; i < attachmentCount; ++i) {
- pAttachments[i] = src.pAttachments[i];
- }
- }
-}
-
-safe_VkFramebufferCreateInfo& safe_VkFramebufferCreateInfo::operator=(const safe_VkFramebufferCreateInfo& src)
-{
- if (&src == this) return *this;
-
- if (pAttachments)
- delete[] pAttachments;
- if (pNext)
- FreePnextChain(pNext);
-
- sType = src.sType;
- flags = src.flags;
- renderPass = src.renderPass;
- attachmentCount = src.attachmentCount;
- pAttachments = nullptr;
- width = src.width;
- height = src.height;
- layers = src.layers;
- pNext = SafePnextCopy(src.pNext);
- if (attachmentCount && src.pAttachments) {
- pAttachments = new VkImageView[attachmentCount];
- for (uint32_t i = 0; i < attachmentCount; ++i) {
- pAttachments[i] = src.pAttachments[i];
- }
- }
-
- return *this;
-}
-
-safe_VkFramebufferCreateInfo::~safe_VkFramebufferCreateInfo()
-{
- if (pAttachments)
- delete[] pAttachments;
- if (pNext)
- FreePnextChain(pNext);
-}
-
-void safe_VkFramebufferCreateInfo::initialize(const VkFramebufferCreateInfo* in_struct)
-{
- sType = in_struct->sType;
- flags = in_struct->flags;
- renderPass = in_struct->renderPass;
- attachmentCount = in_struct->attachmentCount;
- pAttachments = nullptr;
- width = in_struct->width;
- height = in_struct->height;
- layers = in_struct->layers;
- pNext = SafePnextCopy(in_struct->pNext);
- if (attachmentCount && in_struct->pAttachments) {
- pAttachments = new VkImageView[attachmentCount];
- for (uint32_t i = 0; i < attachmentCount; ++i) {
- pAttachments[i] = in_struct->pAttachments[i];
- }
- }
-}
-
-void safe_VkFramebufferCreateInfo::initialize(const safe_VkFramebufferCreateInfo* src)
-{
- sType = src->sType;
- flags = src->flags;
- renderPass = src->renderPass;
- attachmentCount = src->attachmentCount;
- pAttachments = nullptr;
- width = src->width;
- height = src->height;
- layers = src->layers;
- pNext = SafePnextCopy(src->pNext);
- if (attachmentCount && src->pAttachments) {
- pAttachments = new VkImageView[attachmentCount];
- for (uint32_t i = 0; i < attachmentCount; ++i) {
- pAttachments[i] = src->pAttachments[i];
- }
- }
-}
-
-safe_VkSubpassDescription::safe_VkSubpassDescription(const VkSubpassDescription* in_struct) :
- flags(in_struct->flags),
- pipelineBindPoint(in_struct->pipelineBindPoint),
- inputAttachmentCount(in_struct->inputAttachmentCount),
- pInputAttachments(nullptr),
- colorAttachmentCount(in_struct->colorAttachmentCount),
- pColorAttachments(nullptr),
- pResolveAttachments(nullptr),
- pDepthStencilAttachment(nullptr),
- preserveAttachmentCount(in_struct->preserveAttachmentCount),
- pPreserveAttachments(nullptr)
-{
- if (in_struct->pInputAttachments) {
- pInputAttachments = new VkAttachmentReference[in_struct->inputAttachmentCount];
- memcpy ((void *)pInputAttachments, (void *)in_struct->pInputAttachments, sizeof(VkAttachmentReference)*in_struct->inputAttachmentCount);
- }
- if (in_struct->pColorAttachments) {
- pColorAttachments = new VkAttachmentReference[in_struct->colorAttachmentCount];
- memcpy ((void *)pColorAttachments, (void *)in_struct->pColorAttachments, sizeof(VkAttachmentReference)*in_struct->colorAttachmentCount);
- }
- if (in_struct->pResolveAttachments) {
- pResolveAttachments = new VkAttachmentReference[in_struct->colorAttachmentCount];
- memcpy ((void *)pResolveAttachments, (void *)in_struct->pResolveAttachments, sizeof(VkAttachmentReference)*in_struct->colorAttachmentCount);
- }
- if (in_struct->pDepthStencilAttachment) {
- pDepthStencilAttachment = new VkAttachmentReference(*in_struct->pDepthStencilAttachment);
- }
- if (in_struct->pPreserveAttachments) {
- pPreserveAttachments = new uint32_t[in_struct->preserveAttachmentCount];
- memcpy ((void *)pPreserveAttachments, (void *)in_struct->pPreserveAttachments, sizeof(uint32_t)*in_struct->preserveAttachmentCount);
- }
-}
-
-safe_VkSubpassDescription::safe_VkSubpassDescription() :
- pInputAttachments(nullptr),
- pColorAttachments(nullptr),
- pResolveAttachments(nullptr),
- pDepthStencilAttachment(nullptr),
- pPreserveAttachments(nullptr)
-{}
-
-safe_VkSubpassDescription::safe_VkSubpassDescription(const safe_VkSubpassDescription& src)
-{
- flags = src.flags;
- pipelineBindPoint = src.pipelineBindPoint;
- inputAttachmentCount = src.inputAttachmentCount;
- pInputAttachments = nullptr;
- colorAttachmentCount = src.colorAttachmentCount;
- pColorAttachments = nullptr;
- pResolveAttachments = nullptr;
- pDepthStencilAttachment = nullptr;
- preserveAttachmentCount = src.preserveAttachmentCount;
- pPreserveAttachments = nullptr;
- if (src.pInputAttachments) {
- pInputAttachments = new VkAttachmentReference[src.inputAttachmentCount];
- memcpy ((void *)pInputAttachments, (void *)src.pInputAttachments, sizeof(VkAttachmentReference)*src.inputAttachmentCount);
- }
- if (src.pColorAttachments) {
- pColorAttachments = new VkAttachmentReference[src.colorAttachmentCount];
- memcpy ((void *)pColorAttachments, (void *)src.pColorAttachments, sizeof(VkAttachmentReference)*src.colorAttachmentCount);
- }
- if (src.pResolveAttachments) {
- pResolveAttachments = new VkAttachmentReference[src.colorAttachmentCount];
- memcpy ((void *)pResolveAttachments, (void *)src.pResolveAttachments, sizeof(VkAttachmentReference)*src.colorAttachmentCount);
- }
- if (src.pDepthStencilAttachment) {
- pDepthStencilAttachment = new VkAttachmentReference(*src.pDepthStencilAttachment);
- }
- if (src.pPreserveAttachments) {
- pPreserveAttachments = new uint32_t[src.preserveAttachmentCount];
- memcpy ((void *)pPreserveAttachments, (void *)src.pPreserveAttachments, sizeof(uint32_t)*src.preserveAttachmentCount);
- }
-}
-
-safe_VkSubpassDescription& safe_VkSubpassDescription::operator=(const safe_VkSubpassDescription& src)
-{
- if (&src == this) return *this;
-
- if (pInputAttachments)
- delete[] pInputAttachments;
- if (pColorAttachments)
- delete[] pColorAttachments;
- if (pResolveAttachments)
- delete[] pResolveAttachments;
- if (pDepthStencilAttachment)
- delete pDepthStencilAttachment;
- if (pPreserveAttachments)
- delete[] pPreserveAttachments;
-
- flags = src.flags;
- pipelineBindPoint = src.pipelineBindPoint;
- inputAttachmentCount = src.inputAttachmentCount;
- pInputAttachments = nullptr;
- colorAttachmentCount = src.colorAttachmentCount;
- pColorAttachments = nullptr;
- pResolveAttachments = nullptr;
- pDepthStencilAttachment = nullptr;
- preserveAttachmentCount = src.preserveAttachmentCount;
- pPreserveAttachments = nullptr;
- if (src.pInputAttachments) {
- pInputAttachments = new VkAttachmentReference[src.inputAttachmentCount];
- memcpy ((void *)pInputAttachments, (void *)src.pInputAttachments, sizeof(VkAttachmentReference)*src.inputAttachmentCount);
- }
- if (src.pColorAttachments) {
- pColorAttachments = new VkAttachmentReference[src.colorAttachmentCount];
- memcpy ((void *)pColorAttachments, (void *)src.pColorAttachments, sizeof(VkAttachmentReference)*src.colorAttachmentCount);
- }
- if (src.pResolveAttachments) {
- pResolveAttachments = new VkAttachmentReference[src.colorAttachmentCount];
- memcpy ((void *)pResolveAttachments, (void *)src.pResolveAttachments, sizeof(VkAttachmentReference)*src.colorAttachmentCount);
- }
- if (src.pDepthStencilAttachment) {
- pDepthStencilAttachment = new VkAttachmentReference(*src.pDepthStencilAttachment);
- }
- if (src.pPreserveAttachments) {
- pPreserveAttachments = new uint32_t[src.preserveAttachmentCount];
- memcpy ((void *)pPreserveAttachments, (void *)src.pPreserveAttachments, sizeof(uint32_t)*src.preserveAttachmentCount);
- }
-
- return *this;
-}
-
-safe_VkSubpassDescription::~safe_VkSubpassDescription()
-{
- if (pInputAttachments)
- delete[] pInputAttachments;
- if (pColorAttachments)
- delete[] pColorAttachments;
- if (pResolveAttachments)
- delete[] pResolveAttachments;
- if (pDepthStencilAttachment)
- delete pDepthStencilAttachment;
- if (pPreserveAttachments)
- delete[] pPreserveAttachments;
-}
-
-void safe_VkSubpassDescription::initialize(const VkSubpassDescription* in_struct)
-{
- flags = in_struct->flags;
- pipelineBindPoint = in_struct->pipelineBindPoint;
- inputAttachmentCount = in_struct->inputAttachmentCount;
- pInputAttachments = nullptr;
- colorAttachmentCount = in_struct->colorAttachmentCount;
- pColorAttachments = nullptr;
- pResolveAttachments = nullptr;
- pDepthStencilAttachment = nullptr;
- preserveAttachmentCount = in_struct->preserveAttachmentCount;
- pPreserveAttachments = nullptr;
- if (in_struct->pInputAttachments) {
- pInputAttachments = new VkAttachmentReference[in_struct->inputAttachmentCount];
- memcpy ((void *)pInputAttachments, (void *)in_struct->pInputAttachments, sizeof(VkAttachmentReference)*in_struct->inputAttachmentCount);
- }
- if (in_struct->pColorAttachments) {
- pColorAttachments = new VkAttachmentReference[in_struct->colorAttachmentCount];
- memcpy ((void *)pColorAttachments, (void *)in_struct->pColorAttachments, sizeof(VkAttachmentReference)*in_struct->colorAttachmentCount);
- }
- if (in_struct->pResolveAttachments) {
- pResolveAttachments = new VkAttachmentReference[in_struct->colorAttachmentCount];
- memcpy ((void *)pResolveAttachments, (void *)in_struct->pResolveAttachments, sizeof(VkAttachmentReference)*in_struct->colorAttachmentCount);
- }
- if (in_struct->pDepthStencilAttachment) {
- pDepthStencilAttachment = new VkAttachmentReference(*in_struct->pDepthStencilAttachment);
- }
- if (in_struct->pPreserveAttachments) {
- pPreserveAttachments = new uint32_t[in_struct->preserveAttachmentCount];
- memcpy ((void *)pPreserveAttachments, (void *)in_struct->pPreserveAttachments, sizeof(uint32_t)*in_struct->preserveAttachmentCount);
- }
-}
-
-void safe_VkSubpassDescription::initialize(const safe_VkSubpassDescription* src)
-{
- flags = src->flags;
- pipelineBindPoint = src->pipelineBindPoint;
- inputAttachmentCount = src->inputAttachmentCount;
- pInputAttachments = nullptr;
- colorAttachmentCount = src->colorAttachmentCount;
- pColorAttachments = nullptr;
- pResolveAttachments = nullptr;
- pDepthStencilAttachment = nullptr;
- preserveAttachmentCount = src->preserveAttachmentCount;
- pPreserveAttachments = nullptr;
- if (src->pInputAttachments) {
- pInputAttachments = new VkAttachmentReference[src->inputAttachmentCount];
- memcpy ((void *)pInputAttachments, (void *)src->pInputAttachments, sizeof(VkAttachmentReference)*src->inputAttachmentCount);
- }
- if (src->pColorAttachments) {
- pColorAttachments = new VkAttachmentReference[src->colorAttachmentCount];
- memcpy ((void *)pColorAttachments, (void *)src->pColorAttachments, sizeof(VkAttachmentReference)*src->colorAttachmentCount);
- }
- if (src->pResolveAttachments) {
- pResolveAttachments = new VkAttachmentReference[src->colorAttachmentCount];
- memcpy ((void *)pResolveAttachments, (void *)src->pResolveAttachments, sizeof(VkAttachmentReference)*src->colorAttachmentCount);
- }
- if (src->pDepthStencilAttachment) {
- pDepthStencilAttachment = new VkAttachmentReference(*src->pDepthStencilAttachment);
- }
- if (src->pPreserveAttachments) {
- pPreserveAttachments = new uint32_t[src->preserveAttachmentCount];
- memcpy ((void *)pPreserveAttachments, (void *)src->pPreserveAttachments, sizeof(uint32_t)*src->preserveAttachmentCount);
- }
-}
-
-safe_VkRenderPassCreateInfo::safe_VkRenderPassCreateInfo(const VkRenderPassCreateInfo* in_struct) :
- sType(in_struct->sType),
- flags(in_struct->flags),
- attachmentCount(in_struct->attachmentCount),
- pAttachments(nullptr),
- subpassCount(in_struct->subpassCount),
- pSubpasses(nullptr),
- dependencyCount(in_struct->dependencyCount),
- pDependencies(nullptr)
-{
- pNext = SafePnextCopy(in_struct->pNext);
- if (in_struct->pAttachments) {
- pAttachments = new VkAttachmentDescription[in_struct->attachmentCount];
- memcpy ((void *)pAttachments, (void *)in_struct->pAttachments, sizeof(VkAttachmentDescription)*in_struct->attachmentCount);
- }
- if (subpassCount && in_struct->pSubpasses) {
- pSubpasses = new safe_VkSubpassDescription[subpassCount];
- for (uint32_t i = 0; i < subpassCount; ++i) {
- pSubpasses[i].initialize(&in_struct->pSubpasses[i]);
- }
- }
- if (in_struct->pDependencies) {
- pDependencies = new VkSubpassDependency[in_struct->dependencyCount];
- memcpy ((void *)pDependencies, (void *)in_struct->pDependencies, sizeof(VkSubpassDependency)*in_struct->dependencyCount);
- }
-}
-
-safe_VkRenderPassCreateInfo::safe_VkRenderPassCreateInfo() :
- pNext(nullptr),
- pAttachments(nullptr),
- pSubpasses(nullptr),
- pDependencies(nullptr)
-{}
-
-safe_VkRenderPassCreateInfo::safe_VkRenderPassCreateInfo(const safe_VkRenderPassCreateInfo& src)
-{
- sType = src.sType;
- flags = src.flags;
- attachmentCount = src.attachmentCount;
- pAttachments = nullptr;
- subpassCount = src.subpassCount;
- pSubpasses = nullptr;
- dependencyCount = src.dependencyCount;
- pDependencies = nullptr;
- pNext = SafePnextCopy(src.pNext);
- if (src.pAttachments) {
- pAttachments = new VkAttachmentDescription[src.attachmentCount];
- memcpy ((void *)pAttachments, (void *)src.pAttachments, sizeof(VkAttachmentDescription)*src.attachmentCount);
- }
- if (subpassCount && src.pSubpasses) {
- pSubpasses = new safe_VkSubpassDescription[subpassCount];
- for (uint32_t i = 0; i < subpassCount; ++i) {
- pSubpasses[i].initialize(&src.pSubpasses[i]);
- }
- }
- if (src.pDependencies) {
- pDependencies = new VkSubpassDependency[src.dependencyCount];
- memcpy ((void *)pDependencies, (void *)src.pDependencies, sizeof(VkSubpassDependency)*src.dependencyCount);
- }
-}
-
-safe_VkRenderPassCreateInfo& safe_VkRenderPassCreateInfo::operator=(const safe_VkRenderPassCreateInfo& src)
-{
- if (&src == this) return *this;
-
- if (pAttachments)
- delete[] pAttachments;
- if (pSubpasses)
- delete[] pSubpasses;
- if (pDependencies)
- delete[] pDependencies;
- if (pNext)
- FreePnextChain(pNext);
-
- sType = src.sType;
- flags = src.flags;
- attachmentCount = src.attachmentCount;
- pAttachments = nullptr;
- subpassCount = src.subpassCount;
- pSubpasses = nullptr;
- dependencyCount = src.dependencyCount;
- pDependencies = nullptr;
- pNext = SafePnextCopy(src.pNext);
- if (src.pAttachments) {
- pAttachments = new VkAttachmentDescription[src.attachmentCount];
- memcpy ((void *)pAttachments, (void *)src.pAttachments, sizeof(VkAttachmentDescription)*src.attachmentCount);
- }
- if (subpassCount && src.pSubpasses) {
- pSubpasses = new safe_VkSubpassDescription[subpassCount];
- for (uint32_t i = 0; i < subpassCount; ++i) {
- pSubpasses[i].initialize(&src.pSubpasses[i]);
- }
- }
- if (src.pDependencies) {
- pDependencies = new VkSubpassDependency[src.dependencyCount];
- memcpy ((void *)pDependencies, (void *)src.pDependencies, sizeof(VkSubpassDependency)*src.dependencyCount);
- }
-
- return *this;
-}
-
-safe_VkRenderPassCreateInfo::~safe_VkRenderPassCreateInfo()
-{
- if (pAttachments)
- delete[] pAttachments;
- if (pSubpasses)
- delete[] pSubpasses;
- if (pDependencies)
- delete[] pDependencies;
- if (pNext)
- FreePnextChain(pNext);
-}
-
-void safe_VkRenderPassCreateInfo::initialize(const VkRenderPassCreateInfo* in_struct)
-{
- sType = in_struct->sType;
- flags = in_struct->flags;
- attachmentCount = in_struct->attachmentCount;
- pAttachments = nullptr;
- subpassCount = in_struct->subpassCount;
- pSubpasses = nullptr;
- dependencyCount = in_struct->dependencyCount;
- pDependencies = nullptr;
- pNext = SafePnextCopy(in_struct->pNext);
- if (in_struct->pAttachments) {
- pAttachments = new VkAttachmentDescription[in_struct->attachmentCount];
- memcpy ((void *)pAttachments, (void *)in_struct->pAttachments, sizeof(VkAttachmentDescription)*in_struct->attachmentCount);
- }
- if (subpassCount && in_struct->pSubpasses) {
- pSubpasses = new safe_VkSubpassDescription[subpassCount];
- for (uint32_t i = 0; i < subpassCount; ++i) {
- pSubpasses[i].initialize(&in_struct->pSubpasses[i]);
- }
- }
- if (in_struct->pDependencies) {
- pDependencies = new VkSubpassDependency[in_struct->dependencyCount];
- memcpy ((void *)pDependencies, (void *)in_struct->pDependencies, sizeof(VkSubpassDependency)*in_struct->dependencyCount);
- }
-}
-
-void safe_VkRenderPassCreateInfo::initialize(const safe_VkRenderPassCreateInfo* src)
-{
- sType = src->sType;
- flags = src->flags;
- attachmentCount = src->attachmentCount;
- pAttachments = nullptr;
- subpassCount = src->subpassCount;
- pSubpasses = nullptr;
- dependencyCount = src->dependencyCount;
- pDependencies = nullptr;
- pNext = SafePnextCopy(src->pNext);
- if (src->pAttachments) {
- pAttachments = new VkAttachmentDescription[src->attachmentCount];
- memcpy ((void *)pAttachments, (void *)src->pAttachments, sizeof(VkAttachmentDescription)*src->attachmentCount);
- }
- if (subpassCount && src->pSubpasses) {
- pSubpasses = new safe_VkSubpassDescription[subpassCount];
- for (uint32_t i = 0; i < subpassCount; ++i) {
- pSubpasses[i].initialize(&src->pSubpasses[i]);
- }
- }
- if (src->pDependencies) {
- pDependencies = new VkSubpassDependency[src->dependencyCount];
- memcpy ((void *)pDependencies, (void *)src->pDependencies, sizeof(VkSubpassDependency)*src->dependencyCount);
- }
-}
-
-safe_VkCommandPoolCreateInfo::safe_VkCommandPoolCreateInfo(const VkCommandPoolCreateInfo* in_struct) :
- sType(in_struct->sType),
- flags(in_struct->flags),
- queueFamilyIndex(in_struct->queueFamilyIndex)
-{
- pNext = SafePnextCopy(in_struct->pNext);
-}
-
-safe_VkCommandPoolCreateInfo::safe_VkCommandPoolCreateInfo() :
- pNext(nullptr)
-{}
-
-safe_VkCommandPoolCreateInfo::safe_VkCommandPoolCreateInfo(const safe_VkCommandPoolCreateInfo& src)
-{
- sType = src.sType;
- flags = src.flags;
- queueFamilyIndex = src.queueFamilyIndex;
- pNext = SafePnextCopy(src.pNext);
-}
-
-safe_VkCommandPoolCreateInfo& safe_VkCommandPoolCreateInfo::operator=(const safe_VkCommandPoolCreateInfo& src)
-{
- if (&src == this) return *this;
-
- if (pNext)
- FreePnextChain(pNext);
-
- sType = src.sType;
- flags = src.flags;
- queueFamilyIndex = src.queueFamilyIndex;
- pNext = SafePnextCopy(src.pNext);
-
- return *this;
-}
-
-safe_VkCommandPoolCreateInfo::~safe_VkCommandPoolCreateInfo()
-{
- if (pNext)
- FreePnextChain(pNext);
-}
-
-void safe_VkCommandPoolCreateInfo::initialize(const VkCommandPoolCreateInfo* in_struct)
-{
- sType = in_struct->sType;
- flags = in_struct->flags;
- queueFamilyIndex = in_struct->queueFamilyIndex;
- pNext = SafePnextCopy(in_struct->pNext);
-}
-
-void safe_VkCommandPoolCreateInfo::initialize(const safe_VkCommandPoolCreateInfo* src)
-{
- sType = src->sType;
- flags = src->flags;
- queueFamilyIndex = src->queueFamilyIndex;
- pNext = SafePnextCopy(src->pNext);
-}
-
-safe_VkCommandBufferAllocateInfo::safe_VkCommandBufferAllocateInfo(const VkCommandBufferAllocateInfo* in_struct) :
- sType(in_struct->sType),
- commandPool(in_struct->commandPool),
- level(in_struct->level),
- commandBufferCount(in_struct->commandBufferCount)
-{
- pNext = SafePnextCopy(in_struct->pNext);
-}
-
-safe_VkCommandBufferAllocateInfo::safe_VkCommandBufferAllocateInfo() :
- pNext(nullptr)
-{}
-
-safe_VkCommandBufferAllocateInfo::safe_VkCommandBufferAllocateInfo(const safe_VkCommandBufferAllocateInfo& src)
-{
- sType = src.sType;
- commandPool = src.commandPool;
- level = src.level;
- commandBufferCount = src.commandBufferCount;
- pNext = SafePnextCopy(src.pNext);
-}
-
-safe_VkCommandBufferAllocateInfo& safe_VkCommandBufferAllocateInfo::operator=(const safe_VkCommandBufferAllocateInfo& src)
-{
- if (&src == this) return *this;
-
- if (pNext)
- FreePnextChain(pNext);
-
- sType = src.sType;
- commandPool = src.commandPool;
- level = src.level;
- commandBufferCount = src.commandBufferCount;
- pNext = SafePnextCopy(src.pNext);
-
- return *this;
-}
-
-safe_VkCommandBufferAllocateInfo::~safe_VkCommandBufferAllocateInfo()
-{
- if (pNext)
- FreePnextChain(pNext);
-}
-
-void safe_VkCommandBufferAllocateInfo::initialize(const VkCommandBufferAllocateInfo* in_struct)
-{
- sType = in_struct->sType;
- commandPool = in_struct->commandPool;
- level = in_struct->level;
- commandBufferCount = in_struct->commandBufferCount;
- pNext = SafePnextCopy(in_struct->pNext);
-}
-
-void safe_VkCommandBufferAllocateInfo::initialize(const safe_VkCommandBufferAllocateInfo* src)
-{
- sType = src->sType;
- commandPool = src->commandPool;
- level = src->level;
- commandBufferCount = src->commandBufferCount;
- pNext = SafePnextCopy(src->pNext);
-}
-
-safe_VkCommandBufferInheritanceInfo::safe_VkCommandBufferInheritanceInfo(const VkCommandBufferInheritanceInfo* in_struct) :
- sType(in_struct->sType),
- renderPass(in_struct->renderPass),
- subpass(in_struct->subpass),
- framebuffer(in_struct->framebuffer),
- occlusionQueryEnable(in_struct->occlusionQueryEnable),
- queryFlags(in_struct->queryFlags),
- pipelineStatistics(in_struct->pipelineStatistics)
-{
- pNext = SafePnextCopy(in_struct->pNext);
-}
-
-safe_VkCommandBufferInheritanceInfo::safe_VkCommandBufferInheritanceInfo() :
- pNext(nullptr)
-{}
-
-safe_VkCommandBufferInheritanceInfo::safe_VkCommandBufferInheritanceInfo(const safe_VkCommandBufferInheritanceInfo& src)
-{
- sType = src.sType;
- renderPass = src.renderPass;
- subpass = src.subpass;
- framebuffer = src.framebuffer;
- occlusionQueryEnable = src.occlusionQueryEnable;
- queryFlags = src.queryFlags;
- pipelineStatistics = src.pipelineStatistics;
- pNext = SafePnextCopy(src.pNext);
-}
-
-safe_VkCommandBufferInheritanceInfo& safe_VkCommandBufferInheritanceInfo::operator=(const safe_VkCommandBufferInheritanceInfo& src)
-{
- if (&src == this) return *this;
-
- if (pNext)
- FreePnextChain(pNext);
-
- sType = src.sType;
- renderPass = src.renderPass;
- subpass = src.subpass;
- framebuffer = src.framebuffer;
- occlusionQueryEnable = src.occlusionQueryEnable;
- queryFlags = src.queryFlags;
- pipelineStatistics = src.pipelineStatistics;
- pNext = SafePnextCopy(src.pNext);
-
- return *this;
-}
-
-safe_VkCommandBufferInheritanceInfo::~safe_VkCommandBufferInheritanceInfo()
-{
- if (pNext)
- FreePnextChain(pNext);
-}
-
-void safe_VkCommandBufferInheritanceInfo::initialize(const VkCommandBufferInheritanceInfo* in_struct)
-{
- sType = in_struct->sType;
- renderPass = in_struct->renderPass;
- subpass = in_struct->subpass;
- framebuffer = in_struct->framebuffer;
- occlusionQueryEnable = in_struct->occlusionQueryEnable;
- queryFlags = in_struct->queryFlags;
- pipelineStatistics = in_struct->pipelineStatistics;
- pNext = SafePnextCopy(in_struct->pNext);
-}
-
-void safe_VkCommandBufferInheritanceInfo::initialize(const safe_VkCommandBufferInheritanceInfo* src)
-{
- sType = src->sType;
- renderPass = src->renderPass;
- subpass = src->subpass;
- framebuffer = src->framebuffer;
- occlusionQueryEnable = src->occlusionQueryEnable;
- queryFlags = src->queryFlags;
- pipelineStatistics = src->pipelineStatistics;
- pNext = SafePnextCopy(src->pNext);
-}
-
-safe_VkCommandBufferBeginInfo::safe_VkCommandBufferBeginInfo(const VkCommandBufferBeginInfo* in_struct) :
- sType(in_struct->sType),
- flags(in_struct->flags),
- pInheritanceInfo(nullptr)
-{
- pNext = SafePnextCopy(in_struct->pNext);
- if (in_struct->pInheritanceInfo)
- pInheritanceInfo = new safe_VkCommandBufferInheritanceInfo(in_struct->pInheritanceInfo);
-}
-
-safe_VkCommandBufferBeginInfo::safe_VkCommandBufferBeginInfo() :
- pNext(nullptr),
- pInheritanceInfo(nullptr)
-{}
-
-safe_VkCommandBufferBeginInfo::safe_VkCommandBufferBeginInfo(const safe_VkCommandBufferBeginInfo& src)
-{
- sType = src.sType;
- flags = src.flags;
- pInheritanceInfo = nullptr;
- pNext = SafePnextCopy(src.pNext);
- if (src.pInheritanceInfo)
- pInheritanceInfo = new safe_VkCommandBufferInheritanceInfo(*src.pInheritanceInfo);
-}
-
-safe_VkCommandBufferBeginInfo& safe_VkCommandBufferBeginInfo::operator=(const safe_VkCommandBufferBeginInfo& src)
-{
- if (&src == this) return *this;
-
- if (pInheritanceInfo)
- delete pInheritanceInfo;
- if (pNext)
- FreePnextChain(pNext);
-
- sType = src.sType;
- flags = src.flags;
- pInheritanceInfo = nullptr;
- pNext = SafePnextCopy(src.pNext);
- if (src.pInheritanceInfo)
- pInheritanceInfo = new safe_VkCommandBufferInheritanceInfo(*src.pInheritanceInfo);
-
- return *this;
-}
-
-safe_VkCommandBufferBeginInfo::~safe_VkCommandBufferBeginInfo()
-{
- if (pInheritanceInfo)
- delete pInheritanceInfo;
- if (pNext)
- FreePnextChain(pNext);
-}
-
-void safe_VkCommandBufferBeginInfo::initialize(const VkCommandBufferBeginInfo* in_struct)
-{
- sType = in_struct->sType;
- flags = in_struct->flags;
- pInheritanceInfo = nullptr;
- pNext = SafePnextCopy(in_struct->pNext);
- if (in_struct->pInheritanceInfo)
- pInheritanceInfo = new safe_VkCommandBufferInheritanceInfo(in_struct->pInheritanceInfo);
-}
-
-void safe_VkCommandBufferBeginInfo::initialize(const safe_VkCommandBufferBeginInfo* src)
-{
- sType = src->sType;
- flags = src->flags;
- pInheritanceInfo = nullptr;
- pNext = SafePnextCopy(src->pNext);
- if (src->pInheritanceInfo)
- pInheritanceInfo = new safe_VkCommandBufferInheritanceInfo(*src->pInheritanceInfo);
-}
-
-safe_VkMemoryBarrier::safe_VkMemoryBarrier(const VkMemoryBarrier* in_struct) :
- sType(in_struct->sType),
- srcAccessMask(in_struct->srcAccessMask),
- dstAccessMask(in_struct->dstAccessMask)
-{
- pNext = SafePnextCopy(in_struct->pNext);
-}
-
-safe_VkMemoryBarrier::safe_VkMemoryBarrier() :
- pNext(nullptr)
-{}
-
-safe_VkMemoryBarrier::safe_VkMemoryBarrier(const safe_VkMemoryBarrier& src)
-{
- sType = src.sType;
- srcAccessMask = src.srcAccessMask;
- dstAccessMask = src.dstAccessMask;
- pNext = SafePnextCopy(src.pNext);
-}
-
-safe_VkMemoryBarrier& safe_VkMemoryBarrier::operator=(const safe_VkMemoryBarrier& src)
-{
- if (&src == this) return *this;
-
- if (pNext)
- FreePnextChain(pNext);
-
- sType = src.sType;
- srcAccessMask = src.srcAccessMask;
- dstAccessMask = src.dstAccessMask;
- pNext = SafePnextCopy(src.pNext);
-
- return *this;
-}
-
-safe_VkMemoryBarrier::~safe_VkMemoryBarrier()
-{
- if (pNext)
- FreePnextChain(pNext);
-}
-
-void safe_VkMemoryBarrier::initialize(const VkMemoryBarrier* in_struct)
-{
- sType = in_struct->sType;
- srcAccessMask = in_struct->srcAccessMask;
- dstAccessMask = in_struct->dstAccessMask;
- pNext = SafePnextCopy(in_struct->pNext);
-}
-
-void safe_VkMemoryBarrier::initialize(const safe_VkMemoryBarrier* src)
-{
- sType = src->sType;
- srcAccessMask = src->srcAccessMask;
- dstAccessMask = src->dstAccessMask;
- pNext = SafePnextCopy(src->pNext);
-}
-
-safe_VkBufferMemoryBarrier::safe_VkBufferMemoryBarrier(const VkBufferMemoryBarrier* in_struct) :
- sType(in_struct->sType),
- srcAccessMask(in_struct->srcAccessMask),
- dstAccessMask(in_struct->dstAccessMask),
- srcQueueFamilyIndex(in_struct->srcQueueFamilyIndex),
- dstQueueFamilyIndex(in_struct->dstQueueFamilyIndex),
- buffer(in_struct->buffer),
- offset(in_struct->offset),
- size(in_struct->size)
-{
- pNext = SafePnextCopy(in_struct->pNext);
-}
-
-safe_VkBufferMemoryBarrier::safe_VkBufferMemoryBarrier() :
- pNext(nullptr)
-{}
-
-safe_VkBufferMemoryBarrier::safe_VkBufferMemoryBarrier(const safe_VkBufferMemoryBarrier& src)
-{
- sType = src.sType;
- srcAccessMask = src.srcAccessMask;
- dstAccessMask = src.dstAccessMask;
- srcQueueFamilyIndex = src.srcQueueFamilyIndex;
- dstQueueFamilyIndex = src.dstQueueFamilyIndex;
- buffer = src.buffer;
- offset = src.offset;
- size = src.size;
- pNext = SafePnextCopy(src.pNext);
-}
-
-safe_VkBufferMemoryBarrier& safe_VkBufferMemoryBarrier::operator=(const safe_VkBufferMemoryBarrier& src)
-{
- if (&src == this) return *this;
-
- if (pNext)
- FreePnextChain(pNext);
-
- sType = src.sType;
- srcAccessMask = src.srcAccessMask;
- dstAccessMask = src.dstAccessMask;
- srcQueueFamilyIndex = src.srcQueueFamilyIndex;
- dstQueueFamilyIndex = src.dstQueueFamilyIndex;
- buffer = src.buffer;
- offset = src.offset;
- size = src.size;
- pNext = SafePnextCopy(src.pNext);
-
- return *this;
-}
-
-safe_VkBufferMemoryBarrier::~safe_VkBufferMemoryBarrier()
-{
- if (pNext)
- FreePnextChain(pNext);
-}
-
-void safe_VkBufferMemoryBarrier::initialize(const VkBufferMemoryBarrier* in_struct)
-{
- sType = in_struct->sType;
- srcAccessMask = in_struct->srcAccessMask;
- dstAccessMask = in_struct->dstAccessMask;
- srcQueueFamilyIndex = in_struct->srcQueueFamilyIndex;
- dstQueueFamilyIndex = in_struct->dstQueueFamilyIndex;
- buffer = in_struct->buffer;
- offset = in_struct->offset;
- size = in_struct->size;
- pNext = SafePnextCopy(in_struct->pNext);
-}
-
-void safe_VkBufferMemoryBarrier::initialize(const safe_VkBufferMemoryBarrier* src)
-{
- sType = src->sType;
- srcAccessMask = src->srcAccessMask;
- dstAccessMask = src->dstAccessMask;
- srcQueueFamilyIndex = src->srcQueueFamilyIndex;
- dstQueueFamilyIndex = src->dstQueueFamilyIndex;
- buffer = src->buffer;
- offset = src->offset;
- size = src->size;
- pNext = SafePnextCopy(src->pNext);
-}
-
-safe_VkImageMemoryBarrier::safe_VkImageMemoryBarrier(const VkImageMemoryBarrier* in_struct) :
- sType(in_struct->sType),
- srcAccessMask(in_struct->srcAccessMask),
- dstAccessMask(in_struct->dstAccessMask),
- oldLayout(in_struct->oldLayout),
- newLayout(in_struct->newLayout),
- srcQueueFamilyIndex(in_struct->srcQueueFamilyIndex),
- dstQueueFamilyIndex(in_struct->dstQueueFamilyIndex),
- image(in_struct->image),
- subresourceRange(in_struct->subresourceRange)
-{
- pNext = SafePnextCopy(in_struct->pNext);
-}
-
-safe_VkImageMemoryBarrier::safe_VkImageMemoryBarrier() :
- pNext(nullptr)
-{}
-
-safe_VkImageMemoryBarrier::safe_VkImageMemoryBarrier(const safe_VkImageMemoryBarrier& src)
-{
- sType = src.sType;
- srcAccessMask = src.srcAccessMask;
- dstAccessMask = src.dstAccessMask;
- oldLayout = src.oldLayout;
- newLayout = src.newLayout;
- srcQueueFamilyIndex = src.srcQueueFamilyIndex;
- dstQueueFamilyIndex = src.dstQueueFamilyIndex;
- image = src.image;
- subresourceRange = src.subresourceRange;
- pNext = SafePnextCopy(src.pNext);
-}
-
-safe_VkImageMemoryBarrier& safe_VkImageMemoryBarrier::operator=(const safe_VkImageMemoryBarrier& src)
-{
- if (&src == this) return *this;
-
- if (pNext)
- FreePnextChain(pNext);
-
- sType = src.sType;
- srcAccessMask = src.srcAccessMask;
- dstAccessMask = src.dstAccessMask;
- oldLayout = src.oldLayout;
- newLayout = src.newLayout;
- srcQueueFamilyIndex = src.srcQueueFamilyIndex;
- dstQueueFamilyIndex = src.dstQueueFamilyIndex;
- image = src.image;
- subresourceRange = src.subresourceRange;
- pNext = SafePnextCopy(src.pNext);
-
- return *this;
-}
-
-safe_VkImageMemoryBarrier::~safe_VkImageMemoryBarrier()
-{
- if (pNext)
- FreePnextChain(pNext);
-}
-
-void safe_VkImageMemoryBarrier::initialize(const VkImageMemoryBarrier* in_struct)
-{
- sType = in_struct->sType;
- srcAccessMask = in_struct->srcAccessMask;
- dstAccessMask = in_struct->dstAccessMask;
- oldLayout = in_struct->oldLayout;
- newLayout = in_struct->newLayout;
- srcQueueFamilyIndex = in_struct->srcQueueFamilyIndex;
- dstQueueFamilyIndex = in_struct->dstQueueFamilyIndex;
- image = in_struct->image;
- subresourceRange = in_struct->subresourceRange;
- pNext = SafePnextCopy(in_struct->pNext);
-}
-
-void safe_VkImageMemoryBarrier::initialize(const safe_VkImageMemoryBarrier* src)
-{
- sType = src->sType;
- srcAccessMask = src->srcAccessMask;
- dstAccessMask = src->dstAccessMask;
- oldLayout = src->oldLayout;
- newLayout = src->newLayout;
- srcQueueFamilyIndex = src->srcQueueFamilyIndex;
- dstQueueFamilyIndex = src->dstQueueFamilyIndex;
- image = src->image;
- subresourceRange = src->subresourceRange;
- pNext = SafePnextCopy(src->pNext);
-}
-
-safe_VkRenderPassBeginInfo::safe_VkRenderPassBeginInfo(const VkRenderPassBeginInfo* in_struct) :
- sType(in_struct->sType),
- renderPass(in_struct->renderPass),
- framebuffer(in_struct->framebuffer),
- renderArea(in_struct->renderArea),
- clearValueCount(in_struct->clearValueCount),
- pClearValues(nullptr)
-{
- pNext = SafePnextCopy(in_struct->pNext);
- if (in_struct->pClearValues) {
- pClearValues = new VkClearValue[in_struct->clearValueCount];
- memcpy ((void *)pClearValues, (void *)in_struct->pClearValues, sizeof(VkClearValue)*in_struct->clearValueCount);
- }
-}
-
-safe_VkRenderPassBeginInfo::safe_VkRenderPassBeginInfo() :
- pNext(nullptr),
- pClearValues(nullptr)
-{}
-
-safe_VkRenderPassBeginInfo::safe_VkRenderPassBeginInfo(const safe_VkRenderPassBeginInfo& src)
-{
- sType = src.sType;
- renderPass = src.renderPass;
- framebuffer = src.framebuffer;
- renderArea = src.renderArea;
- clearValueCount = src.clearValueCount;
- pClearValues = nullptr;
- pNext = SafePnextCopy(src.pNext);
- if (src.pClearValues) {
- pClearValues = new VkClearValue[src.clearValueCount];
- memcpy ((void *)pClearValues, (void *)src.pClearValues, sizeof(VkClearValue)*src.clearValueCount);
- }
-}
-
-safe_VkRenderPassBeginInfo& safe_VkRenderPassBeginInfo::operator=(const safe_VkRenderPassBeginInfo& src)
-{
- if (&src == this) return *this;
-
- if (pClearValues)
- delete[] pClearValues;
- if (pNext)
- FreePnextChain(pNext);
-
- sType = src.sType;
- renderPass = src.renderPass;
- framebuffer = src.framebuffer;
- renderArea = src.renderArea;
- clearValueCount = src.clearValueCount;
- pClearValues = nullptr;
- pNext = SafePnextCopy(src.pNext);
- if (src.pClearValues) {
- pClearValues = new VkClearValue[src.clearValueCount];
- memcpy ((void *)pClearValues, (void *)src.pClearValues, sizeof(VkClearValue)*src.clearValueCount);
- }
-
- return *this;
-}
-
-safe_VkRenderPassBeginInfo::~safe_VkRenderPassBeginInfo()
-{
- if (pClearValues)
- delete[] pClearValues;
- if (pNext)
- FreePnextChain(pNext);
-}
-
-void safe_VkRenderPassBeginInfo::initialize(const VkRenderPassBeginInfo* in_struct)
-{
- sType = in_struct->sType;
- renderPass = in_struct->renderPass;
- framebuffer = in_struct->framebuffer;
- renderArea = in_struct->renderArea;
- clearValueCount = in_struct->clearValueCount;
- pClearValues = nullptr;
- pNext = SafePnextCopy(in_struct->pNext);
- if (in_struct->pClearValues) {
- pClearValues = new VkClearValue[in_struct->clearValueCount];
- memcpy ((void *)pClearValues, (void *)in_struct->pClearValues, sizeof(VkClearValue)*in_struct->clearValueCount);
- }
-}
-
-void safe_VkRenderPassBeginInfo::initialize(const safe_VkRenderPassBeginInfo* src)
-{
- sType = src->sType;
- renderPass = src->renderPass;
- framebuffer = src->framebuffer;
- renderArea = src->renderArea;
- clearValueCount = src->clearValueCount;
- pClearValues = nullptr;
- pNext = SafePnextCopy(src->pNext);
- if (src->pClearValues) {
- pClearValues = new VkClearValue[src->clearValueCount];
- memcpy ((void *)pClearValues, (void *)src->pClearValues, sizeof(VkClearValue)*src->clearValueCount);
- }
-}
-
-safe_VkPhysicalDeviceSubgroupProperties::safe_VkPhysicalDeviceSubgroupProperties(const VkPhysicalDeviceSubgroupProperties* in_struct) :
- sType(in_struct->sType),
- subgroupSize(in_struct->subgroupSize),
- supportedStages(in_struct->supportedStages),
- supportedOperations(in_struct->supportedOperations),
- quadOperationsInAllStages(in_struct->quadOperationsInAllStages)
-{
- pNext = SafePnextCopy(in_struct->pNext);
-}
-
-safe_VkPhysicalDeviceSubgroupProperties::safe_VkPhysicalDeviceSubgroupProperties() :
- pNext(nullptr)
-{}
-
-safe_VkPhysicalDeviceSubgroupProperties::safe_VkPhysicalDeviceSubgroupProperties(const safe_VkPhysicalDeviceSubgroupProperties& src)
-{
- sType = src.sType;
- subgroupSize = src.subgroupSize;
- supportedStages = src.supportedStages;
- supportedOperations = src.supportedOperations;
- quadOperationsInAllStages = src.quadOperationsInAllStages;
- pNext = SafePnextCopy(src.pNext);
-}
-
-safe_VkPhysicalDeviceSubgroupProperties& safe_VkPhysicalDeviceSubgroupProperties::operator=(const safe_VkPhysicalDeviceSubgroupProperties& src)
-{
- if (&src == this) return *this;
-
- if (pNext)
- FreePnextChain(pNext);
-
- sType = src.sType;
- subgroupSize = src.subgroupSize;
- supportedStages = src.supportedStages;
- supportedOperations = src.supportedOperations;
- quadOperationsInAllStages = src.quadOperationsInAllStages;
- pNext = SafePnextCopy(src.pNext);
-
- return *this;
-}
-
-safe_VkPhysicalDeviceSubgroupProperties::~safe_VkPhysicalDeviceSubgroupProperties()
-{
- if (pNext)
- FreePnextChain(pNext);
-}
-
-void safe_VkPhysicalDeviceSubgroupProperties::initialize(const VkPhysicalDeviceSubgroupProperties* in_struct)
-{
- sType = in_struct->sType;
- subgroupSize = in_struct->subgroupSize;
- supportedStages = in_struct->supportedStages;
- supportedOperations = in_struct->supportedOperations;
- quadOperationsInAllStages = in_struct->quadOperationsInAllStages;
- pNext = SafePnextCopy(in_struct->pNext);
-}
-
-void safe_VkPhysicalDeviceSubgroupProperties::initialize(const safe_VkPhysicalDeviceSubgroupProperties* src)
-{
- sType = src->sType;
- subgroupSize = src->subgroupSize;
- supportedStages = src->supportedStages;
- supportedOperations = src->supportedOperations;
- quadOperationsInAllStages = src->quadOperationsInAllStages;
- pNext = SafePnextCopy(src->pNext);
-}
-
-safe_VkBindBufferMemoryInfo::safe_VkBindBufferMemoryInfo(const VkBindBufferMemoryInfo* in_struct) :
- sType(in_struct->sType),
- buffer(in_struct->buffer),
- memory(in_struct->memory),
- memoryOffset(in_struct->memoryOffset)
-{
- pNext = SafePnextCopy(in_struct->pNext);
-}
-
-safe_VkBindBufferMemoryInfo::safe_VkBindBufferMemoryInfo() :
- pNext(nullptr)
-{}
-
-safe_VkBindBufferMemoryInfo::safe_VkBindBufferMemoryInfo(const safe_VkBindBufferMemoryInfo& src)
-{
- sType = src.sType;
- buffer = src.buffer;
- memory = src.memory;
- memoryOffset = src.memoryOffset;
- pNext = SafePnextCopy(src.pNext);
-}
-
-safe_VkBindBufferMemoryInfo& safe_VkBindBufferMemoryInfo::operator=(const safe_VkBindBufferMemoryInfo& src)
-{
- if (&src == this) return *this;
-
- if (pNext)
- FreePnextChain(pNext);
-
- sType = src.sType;
- buffer = src.buffer;
- memory = src.memory;
- memoryOffset = src.memoryOffset;
- pNext = SafePnextCopy(src.pNext);
-
- return *this;
-}
-
-safe_VkBindBufferMemoryInfo::~safe_VkBindBufferMemoryInfo()
-{
- if (pNext)
- FreePnextChain(pNext);
-}
-
-void safe_VkBindBufferMemoryInfo::initialize(const VkBindBufferMemoryInfo* in_struct)
-{
- sType = in_struct->sType;
- buffer = in_struct->buffer;
- memory = in_struct->memory;
- memoryOffset = in_struct->memoryOffset;
- pNext = SafePnextCopy(in_struct->pNext);
-}
-
-void safe_VkBindBufferMemoryInfo::initialize(const safe_VkBindBufferMemoryInfo* src)
-{
- sType = src->sType;
- buffer = src->buffer;
- memory = src->memory;
- memoryOffset = src->memoryOffset;
- pNext = SafePnextCopy(src->pNext);
-}
-
-safe_VkBindImageMemoryInfo::safe_VkBindImageMemoryInfo(const VkBindImageMemoryInfo* in_struct) :
- sType(in_struct->sType),
- image(in_struct->image),
- memory(in_struct->memory),
- memoryOffset(in_struct->memoryOffset)
-{
- pNext = SafePnextCopy(in_struct->pNext);
-}
-
-safe_VkBindImageMemoryInfo::safe_VkBindImageMemoryInfo() :
- pNext(nullptr)
-{}
-
-safe_VkBindImageMemoryInfo::safe_VkBindImageMemoryInfo(const safe_VkBindImageMemoryInfo& src)
-{
- sType = src.sType;
- image = src.image;
- memory = src.memory;
- memoryOffset = src.memoryOffset;
- pNext = SafePnextCopy(src.pNext);
-}
-
-safe_VkBindImageMemoryInfo& safe_VkBindImageMemoryInfo::operator=(const safe_VkBindImageMemoryInfo& src)
-{
- if (&src == this) return *this;
-
- if (pNext)
- FreePnextChain(pNext);
-
- sType = src.sType;
- image = src.image;
- memory = src.memory;
- memoryOffset = src.memoryOffset;
- pNext = SafePnextCopy(src.pNext);
-
- return *this;
-}
-
-safe_VkBindImageMemoryInfo::~safe_VkBindImageMemoryInfo()
-{
- if (pNext)
- FreePnextChain(pNext);
-}
-
-void safe_VkBindImageMemoryInfo::initialize(const VkBindImageMemoryInfo* in_struct)
-{
- sType = in_struct->sType;
- image = in_struct->image;
- memory = in_struct->memory;
- memoryOffset = in_struct->memoryOffset;
- pNext = SafePnextCopy(in_struct->pNext);
-}
-
-void safe_VkBindImageMemoryInfo::initialize(const safe_VkBindImageMemoryInfo* src)
-{
- sType = src->sType;
- image = src->image;
- memory = src->memory;
- memoryOffset = src->memoryOffset;
- pNext = SafePnextCopy(src->pNext);
-}
-
-safe_VkPhysicalDevice16BitStorageFeatures::safe_VkPhysicalDevice16BitStorageFeatures(const VkPhysicalDevice16BitStorageFeatures* in_struct) :
- sType(in_struct->sType),
- storageBuffer16BitAccess(in_struct->storageBuffer16BitAccess),
- uniformAndStorageBuffer16BitAccess(in_struct->uniformAndStorageBuffer16BitAccess),
- storagePushConstant16(in_struct->storagePushConstant16),
- storageInputOutput16(in_struct->storageInputOutput16)
-{
- pNext = SafePnextCopy(in_struct->pNext);
-}
-
-safe_VkPhysicalDevice16BitStorageFeatures::safe_VkPhysicalDevice16BitStorageFeatures() :
- pNext(nullptr)
-{}
-
-safe_VkPhysicalDevice16BitStorageFeatures::safe_VkPhysicalDevice16BitStorageFeatures(const safe_VkPhysicalDevice16BitStorageFeatures& src)
-{
- sType = src.sType;
- storageBuffer16BitAccess = src.storageBuffer16BitAccess;
- uniformAndStorageBuffer16BitAccess = src.uniformAndStorageBuffer16BitAccess;
- storagePushConstant16 = src.storagePushConstant16;
- storageInputOutput16 = src.storageInputOutput16;
- pNext = SafePnextCopy(src.pNext);
-}
-
-safe_VkPhysicalDevice16BitStorageFeatures& safe_VkPhysicalDevice16BitStorageFeatures::operator=(const safe_VkPhysicalDevice16BitStorageFeatures& src)
-{
- if (&src == this) return *this;
-
- if (pNext)
- FreePnextChain(pNext);
-
- sType = src.sType;
- storageBuffer16BitAccess = src.storageBuffer16BitAccess;
- uniformAndStorageBuffer16BitAccess = src.uniformAndStorageBuffer16BitAccess;
- storagePushConstant16 = src.storagePushConstant16;
- storageInputOutput16 = src.storageInputOutput16;
- pNext = SafePnextCopy(src.pNext);
-
- return *this;
-}
-
-safe_VkPhysicalDevice16BitStorageFeatures::~safe_VkPhysicalDevice16BitStorageFeatures()
-{
- if (pNext)
- FreePnextChain(pNext);
-}
-
-void safe_VkPhysicalDevice16BitStorageFeatures::initialize(const VkPhysicalDevice16BitStorageFeatures* in_struct)
-{
- sType = in_struct->sType;
- storageBuffer16BitAccess = in_struct->storageBuffer16BitAccess;
- uniformAndStorageBuffer16BitAccess = in_struct->uniformAndStorageBuffer16BitAccess;
- storagePushConstant16 = in_struct->storagePushConstant16;
- storageInputOutput16 = in_struct->storageInputOutput16;
- pNext = SafePnextCopy(in_struct->pNext);
-}
-
-void safe_VkPhysicalDevice16BitStorageFeatures::initialize(const safe_VkPhysicalDevice16BitStorageFeatures* src)
-{
- sType = src->sType;
- storageBuffer16BitAccess = src->storageBuffer16BitAccess;
- uniformAndStorageBuffer16BitAccess = src->uniformAndStorageBuffer16BitAccess;
- storagePushConstant16 = src->storagePushConstant16;
- storageInputOutput16 = src->storageInputOutput16;
- pNext = SafePnextCopy(src->pNext);
-}
-
-safe_VkMemoryDedicatedRequirements::safe_VkMemoryDedicatedRequirements(const VkMemoryDedicatedRequirements* in_struct) :
- sType(in_struct->sType),
- prefersDedicatedAllocation(in_struct->prefersDedicatedAllocation),
- requiresDedicatedAllocation(in_struct->requiresDedicatedAllocation)
-{
- pNext = SafePnextCopy(in_struct->pNext);
-}
-
-safe_VkMemoryDedicatedRequirements::safe_VkMemoryDedicatedRequirements() :
- pNext(nullptr)
-{}
-
-safe_VkMemoryDedicatedRequirements::safe_VkMemoryDedicatedRequirements(const safe_VkMemoryDedicatedRequirements& src)
-{
- sType = src.sType;
- prefersDedicatedAllocation = src.prefersDedicatedAllocation;
- requiresDedicatedAllocation = src.requiresDedicatedAllocation;
- pNext = SafePnextCopy(src.pNext);
-}
-
-safe_VkMemoryDedicatedRequirements& safe_VkMemoryDedicatedRequirements::operator=(const safe_VkMemoryDedicatedRequirements& src)
-{
- if (&src == this) return *this;
-
- if (pNext)
- FreePnextChain(pNext);
-
- sType = src.sType;
- prefersDedicatedAllocation = src.prefersDedicatedAllocation;
- requiresDedicatedAllocation = src.requiresDedicatedAllocation;
- pNext = SafePnextCopy(src.pNext);
-
- return *this;
-}
-
-safe_VkMemoryDedicatedRequirements::~safe_VkMemoryDedicatedRequirements()
-{
- if (pNext)
- FreePnextChain(pNext);
-}
-
-void safe_VkMemoryDedicatedRequirements::initialize(const VkMemoryDedicatedRequirements* in_struct)
-{
- sType = in_struct->sType;
- prefersDedicatedAllocation = in_struct->prefersDedicatedAllocation;
- requiresDedicatedAllocation = in_struct->requiresDedicatedAllocation;
- pNext = SafePnextCopy(in_struct->pNext);
-}
-
-void safe_VkMemoryDedicatedRequirements::initialize(const safe_VkMemoryDedicatedRequirements* src)
-{
- sType = src->sType;
- prefersDedicatedAllocation = src->prefersDedicatedAllocation;
- requiresDedicatedAllocation = src->requiresDedicatedAllocation;
- pNext = SafePnextCopy(src->pNext);
-}
-
-safe_VkMemoryDedicatedAllocateInfo::safe_VkMemoryDedicatedAllocateInfo(const VkMemoryDedicatedAllocateInfo* in_struct) :
- sType(in_struct->sType),
- image(in_struct->image),
- buffer(in_struct->buffer)
-{
- pNext = SafePnextCopy(in_struct->pNext);
-}
-
-safe_VkMemoryDedicatedAllocateInfo::safe_VkMemoryDedicatedAllocateInfo() :
- pNext(nullptr)
-{}
-
-safe_VkMemoryDedicatedAllocateInfo::safe_VkMemoryDedicatedAllocateInfo(const safe_VkMemoryDedicatedAllocateInfo& src)
-{
- sType = src.sType;
- image = src.image;
- buffer = src.buffer;
- pNext = SafePnextCopy(src.pNext);
-}
-
-safe_VkMemoryDedicatedAllocateInfo& safe_VkMemoryDedicatedAllocateInfo::operator=(const safe_VkMemoryDedicatedAllocateInfo& src)
-{
- if (&src == this) return *this;
-
- if (pNext)
- FreePnextChain(pNext);
-
- sType = src.sType;
- image = src.image;
- buffer = src.buffer;
- pNext = SafePnextCopy(src.pNext);
-
- return *this;
-}
-
-safe_VkMemoryDedicatedAllocateInfo::~safe_VkMemoryDedicatedAllocateInfo()
-{
- if (pNext)
- FreePnextChain(pNext);
-}
-
-void safe_VkMemoryDedicatedAllocateInfo::initialize(const VkMemoryDedicatedAllocateInfo* in_struct)
-{
- sType = in_struct->sType;
- image = in_struct->image;
- buffer = in_struct->buffer;
- pNext = SafePnextCopy(in_struct->pNext);
-}
-
-void safe_VkMemoryDedicatedAllocateInfo::initialize(const safe_VkMemoryDedicatedAllocateInfo* src)
-{
- sType = src->sType;
- image = src->image;
- buffer = src->buffer;
- pNext = SafePnextCopy(src->pNext);
-}
-
-safe_VkMemoryAllocateFlagsInfo::safe_VkMemoryAllocateFlagsInfo(const VkMemoryAllocateFlagsInfo* in_struct) :
- sType(in_struct->sType),
- flags(in_struct->flags),
- deviceMask(in_struct->deviceMask)
-{
- pNext = SafePnextCopy(in_struct->pNext);
-}
-
-safe_VkMemoryAllocateFlagsInfo::safe_VkMemoryAllocateFlagsInfo() :
- pNext(nullptr)
-{}
-
-safe_VkMemoryAllocateFlagsInfo::safe_VkMemoryAllocateFlagsInfo(const safe_VkMemoryAllocateFlagsInfo& src)
-{
- sType = src.sType;
- flags = src.flags;
- deviceMask = src.deviceMask;
- pNext = SafePnextCopy(src.pNext);
-}
-
-safe_VkMemoryAllocateFlagsInfo& safe_VkMemoryAllocateFlagsInfo::operator=(const safe_VkMemoryAllocateFlagsInfo& src)
-{
- if (&src == this) return *this;
-
- if (pNext)
- FreePnextChain(pNext);
-
- sType = src.sType;
- flags = src.flags;
- deviceMask = src.deviceMask;
- pNext = SafePnextCopy(src.pNext);
-
- return *this;
-}
-
-safe_VkMemoryAllocateFlagsInfo::~safe_VkMemoryAllocateFlagsInfo()
-{
- if (pNext)
- FreePnextChain(pNext);
-}
-
-void safe_VkMemoryAllocateFlagsInfo::initialize(const VkMemoryAllocateFlagsInfo* in_struct)
-{
- sType = in_struct->sType;
- flags = in_struct->flags;
- deviceMask = in_struct->deviceMask;
- pNext = SafePnextCopy(in_struct->pNext);
-}
-
-void safe_VkMemoryAllocateFlagsInfo::initialize(const safe_VkMemoryAllocateFlagsInfo* src)
-{
- sType = src->sType;
- flags = src->flags;
- deviceMask = src->deviceMask;
- pNext = SafePnextCopy(src->pNext);
-}
-
-safe_VkDeviceGroupRenderPassBeginInfo::safe_VkDeviceGroupRenderPassBeginInfo(const VkDeviceGroupRenderPassBeginInfo* in_struct) :
- sType(in_struct->sType),
- deviceMask(in_struct->deviceMask),
- deviceRenderAreaCount(in_struct->deviceRenderAreaCount),
- pDeviceRenderAreas(nullptr)
-{
- pNext = SafePnextCopy(in_struct->pNext);
- if (in_struct->pDeviceRenderAreas) {
- pDeviceRenderAreas = new VkRect2D[in_struct->deviceRenderAreaCount];
- memcpy ((void *)pDeviceRenderAreas, (void *)in_struct->pDeviceRenderAreas, sizeof(VkRect2D)*in_struct->deviceRenderAreaCount);
- }
-}
-
-safe_VkDeviceGroupRenderPassBeginInfo::safe_VkDeviceGroupRenderPassBeginInfo() :
- pNext(nullptr),
- pDeviceRenderAreas(nullptr)
-{}
-
-safe_VkDeviceGroupRenderPassBeginInfo::safe_VkDeviceGroupRenderPassBeginInfo(const safe_VkDeviceGroupRenderPassBeginInfo& src)
-{
- sType = src.sType;
- deviceMask = src.deviceMask;
- deviceRenderAreaCount = src.deviceRenderAreaCount;
- pDeviceRenderAreas = nullptr;
- pNext = SafePnextCopy(src.pNext);
- if (src.pDeviceRenderAreas) {
- pDeviceRenderAreas = new VkRect2D[src.deviceRenderAreaCount];
- memcpy ((void *)pDeviceRenderAreas, (void *)src.pDeviceRenderAreas, sizeof(VkRect2D)*src.deviceRenderAreaCount);
- }
-}
-
-safe_VkDeviceGroupRenderPassBeginInfo& safe_VkDeviceGroupRenderPassBeginInfo::operator=(const safe_VkDeviceGroupRenderPassBeginInfo& src)
-{
- if (&src == this) return *this;
-
- if (pDeviceRenderAreas)
- delete[] pDeviceRenderAreas;
- if (pNext)
- FreePnextChain(pNext);
-
- sType = src.sType;
- deviceMask = src.deviceMask;
- deviceRenderAreaCount = src.deviceRenderAreaCount;
- pDeviceRenderAreas = nullptr;
- pNext = SafePnextCopy(src.pNext);
- if (src.pDeviceRenderAreas) {
- pDeviceRenderAreas = new VkRect2D[src.deviceRenderAreaCount];
- memcpy ((void *)pDeviceRenderAreas, (void *)src.pDeviceRenderAreas, sizeof(VkRect2D)*src.deviceRenderAreaCount);
- }
-
- return *this;
-}
-
-safe_VkDeviceGroupRenderPassBeginInfo::~safe_VkDeviceGroupRenderPassBeginInfo()
-{
- if (pDeviceRenderAreas)
- delete[] pDeviceRenderAreas;
- if (pNext)
- FreePnextChain(pNext);
-}
-
-void safe_VkDeviceGroupRenderPassBeginInfo::initialize(const VkDeviceGroupRenderPassBeginInfo* in_struct)
-{
- sType = in_struct->sType;
- deviceMask = in_struct->deviceMask;
- deviceRenderAreaCount = in_struct->deviceRenderAreaCount;
- pDeviceRenderAreas = nullptr;
- pNext = SafePnextCopy(in_struct->pNext);
- if (in_struct->pDeviceRenderAreas) {
- pDeviceRenderAreas = new VkRect2D[in_struct->deviceRenderAreaCount];
- memcpy ((void *)pDeviceRenderAreas, (void *)in_struct->pDeviceRenderAreas, sizeof(VkRect2D)*in_struct->deviceRenderAreaCount);
- }
-}
-
-void safe_VkDeviceGroupRenderPassBeginInfo::initialize(const safe_VkDeviceGroupRenderPassBeginInfo* src)
-{
- sType = src->sType;
- deviceMask = src->deviceMask;
- deviceRenderAreaCount = src->deviceRenderAreaCount;
- pDeviceRenderAreas = nullptr;
- pNext = SafePnextCopy(src->pNext);
- if (src->pDeviceRenderAreas) {
- pDeviceRenderAreas = new VkRect2D[src->deviceRenderAreaCount];
- memcpy ((void *)pDeviceRenderAreas, (void *)src->pDeviceRenderAreas, sizeof(VkRect2D)*src->deviceRenderAreaCount);
- }
-}
-
-safe_VkDeviceGroupCommandBufferBeginInfo::safe_VkDeviceGroupCommandBufferBeginInfo(const VkDeviceGroupCommandBufferBeginInfo* in_struct) :
- sType(in_struct->sType),
- deviceMask(in_struct->deviceMask)
-{
- pNext = SafePnextCopy(in_struct->pNext);
-}
-
-safe_VkDeviceGroupCommandBufferBeginInfo::safe_VkDeviceGroupCommandBufferBeginInfo() :
- pNext(nullptr)
-{}
-
-safe_VkDeviceGroupCommandBufferBeginInfo::safe_VkDeviceGroupCommandBufferBeginInfo(const safe_VkDeviceGroupCommandBufferBeginInfo& src)
-{
- sType = src.sType;
- deviceMask = src.deviceMask;
- pNext = SafePnextCopy(src.pNext);
-}
-
-safe_VkDeviceGroupCommandBufferBeginInfo& safe_VkDeviceGroupCommandBufferBeginInfo::operator=(const safe_VkDeviceGroupCommandBufferBeginInfo& src)
-{
- if (&src == this) return *this;
-
- if (pNext)
- FreePnextChain(pNext);
-
- sType = src.sType;
- deviceMask = src.deviceMask;
- pNext = SafePnextCopy(src.pNext);
-
- return *this;
-}
-
-safe_VkDeviceGroupCommandBufferBeginInfo::~safe_VkDeviceGroupCommandBufferBeginInfo()
-{
- if (pNext)
- FreePnextChain(pNext);
-}
-
-void safe_VkDeviceGroupCommandBufferBeginInfo::initialize(const VkDeviceGroupCommandBufferBeginInfo* in_struct)
-{
- sType = in_struct->sType;
- deviceMask = in_struct->deviceMask;
- pNext = SafePnextCopy(in_struct->pNext);
-}
-
-void safe_VkDeviceGroupCommandBufferBeginInfo::initialize(const safe_VkDeviceGroupCommandBufferBeginInfo* src)
-{
- sType = src->sType;
- deviceMask = src->deviceMask;
- pNext = SafePnextCopy(src->pNext);
-}
-
-safe_VkDeviceGroupSubmitInfo::safe_VkDeviceGroupSubmitInfo(const VkDeviceGroupSubmitInfo* in_struct) :
- sType(in_struct->sType),
- waitSemaphoreCount(in_struct->waitSemaphoreCount),
- pWaitSemaphoreDeviceIndices(nullptr),
- commandBufferCount(in_struct->commandBufferCount),
- pCommandBufferDeviceMasks(nullptr),
- signalSemaphoreCount(in_struct->signalSemaphoreCount),
- pSignalSemaphoreDeviceIndices(nullptr)
-{
- pNext = SafePnextCopy(in_struct->pNext);
- if (in_struct->pWaitSemaphoreDeviceIndices) {
- pWaitSemaphoreDeviceIndices = new uint32_t[in_struct->waitSemaphoreCount];
- memcpy ((void *)pWaitSemaphoreDeviceIndices, (void *)in_struct->pWaitSemaphoreDeviceIndices, sizeof(uint32_t)*in_struct->waitSemaphoreCount);
- }
- if (in_struct->pCommandBufferDeviceMasks) {
- pCommandBufferDeviceMasks = new uint32_t[in_struct->commandBufferCount];
- memcpy ((void *)pCommandBufferDeviceMasks, (void *)in_struct->pCommandBufferDeviceMasks, sizeof(uint32_t)*in_struct->commandBufferCount);
- }
- if (in_struct->pSignalSemaphoreDeviceIndices) {
- pSignalSemaphoreDeviceIndices = new uint32_t[in_struct->signalSemaphoreCount];
- memcpy ((void *)pSignalSemaphoreDeviceIndices, (void *)in_struct->pSignalSemaphoreDeviceIndices, sizeof(uint32_t)*in_struct->signalSemaphoreCount);
- }
-}
-
-safe_VkDeviceGroupSubmitInfo::safe_VkDeviceGroupSubmitInfo() :
- pNext(nullptr),
- pWaitSemaphoreDeviceIndices(nullptr),
- pCommandBufferDeviceMasks(nullptr),
- pSignalSemaphoreDeviceIndices(nullptr)
-{}
-
-safe_VkDeviceGroupSubmitInfo::safe_VkDeviceGroupSubmitInfo(const safe_VkDeviceGroupSubmitInfo& src)
-{
- sType = src.sType;
- waitSemaphoreCount = src.waitSemaphoreCount;
- pWaitSemaphoreDeviceIndices = nullptr;
- commandBufferCount = src.commandBufferCount;
- pCommandBufferDeviceMasks = nullptr;
- signalSemaphoreCount = src.signalSemaphoreCount;
- pSignalSemaphoreDeviceIndices = nullptr;
- pNext = SafePnextCopy(src.pNext);
- if (src.pWaitSemaphoreDeviceIndices) {
- pWaitSemaphoreDeviceIndices = new uint32_t[src.waitSemaphoreCount];
- memcpy ((void *)pWaitSemaphoreDeviceIndices, (void *)src.pWaitSemaphoreDeviceIndices, sizeof(uint32_t)*src.waitSemaphoreCount);
- }
- if (src.pCommandBufferDeviceMasks) {
- pCommandBufferDeviceMasks = new uint32_t[src.commandBufferCount];
- memcpy ((void *)pCommandBufferDeviceMasks, (void *)src.pCommandBufferDeviceMasks, sizeof(uint32_t)*src.commandBufferCount);
- }
- if (src.pSignalSemaphoreDeviceIndices) {
- pSignalSemaphoreDeviceIndices = new uint32_t[src.signalSemaphoreCount];
- memcpy ((void *)pSignalSemaphoreDeviceIndices, (void *)src.pSignalSemaphoreDeviceIndices, sizeof(uint32_t)*src.signalSemaphoreCount);
- }
-}
-
-safe_VkDeviceGroupSubmitInfo& safe_VkDeviceGroupSubmitInfo::operator=(const safe_VkDeviceGroupSubmitInfo& src)
-{
- if (&src == this) return *this;
-
- if (pWaitSemaphoreDeviceIndices)
- delete[] pWaitSemaphoreDeviceIndices;
- if (pCommandBufferDeviceMasks)
- delete[] pCommandBufferDeviceMasks;
- if (pSignalSemaphoreDeviceIndices)
- delete[] pSignalSemaphoreDeviceIndices;
- if (pNext)
- FreePnextChain(pNext);
-
- sType = src.sType;
- waitSemaphoreCount = src.waitSemaphoreCount;
- pWaitSemaphoreDeviceIndices = nullptr;
- commandBufferCount = src.commandBufferCount;
- pCommandBufferDeviceMasks = nullptr;
- signalSemaphoreCount = src.signalSemaphoreCount;
- pSignalSemaphoreDeviceIndices = nullptr;
- pNext = SafePnextCopy(src.pNext);
- if (src.pWaitSemaphoreDeviceIndices) {
- pWaitSemaphoreDeviceIndices = new uint32_t[src.waitSemaphoreCount];
- memcpy ((void *)pWaitSemaphoreDeviceIndices, (void *)src.pWaitSemaphoreDeviceIndices, sizeof(uint32_t)*src.waitSemaphoreCount);
- }
- if (src.pCommandBufferDeviceMasks) {
- pCommandBufferDeviceMasks = new uint32_t[src.commandBufferCount];
- memcpy ((void *)pCommandBufferDeviceMasks, (void *)src.pCommandBufferDeviceMasks, sizeof(uint32_t)*src.commandBufferCount);
- }
- if (src.pSignalSemaphoreDeviceIndices) {
- pSignalSemaphoreDeviceIndices = new uint32_t[src.signalSemaphoreCount];
- memcpy ((void *)pSignalSemaphoreDeviceIndices, (void *)src.pSignalSemaphoreDeviceIndices, sizeof(uint32_t)*src.signalSemaphoreCount);
- }
-
- return *this;
-}
-
-safe_VkDeviceGroupSubmitInfo::~safe_VkDeviceGroupSubmitInfo()
-{
- if (pWaitSemaphoreDeviceIndices)
- delete[] pWaitSemaphoreDeviceIndices;
- if (pCommandBufferDeviceMasks)
- delete[] pCommandBufferDeviceMasks;
- if (pSignalSemaphoreDeviceIndices)
- delete[] pSignalSemaphoreDeviceIndices;
- if (pNext)
- FreePnextChain(pNext);
-}
-
-void safe_VkDeviceGroupSubmitInfo::initialize(const VkDeviceGroupSubmitInfo* in_struct)
-{
- sType = in_struct->sType;
- waitSemaphoreCount = in_struct->waitSemaphoreCount;
- pWaitSemaphoreDeviceIndices = nullptr;
- commandBufferCount = in_struct->commandBufferCount;
- pCommandBufferDeviceMasks = nullptr;
- signalSemaphoreCount = in_struct->signalSemaphoreCount;
- pSignalSemaphoreDeviceIndices = nullptr;
- pNext = SafePnextCopy(in_struct->pNext);
- if (in_struct->pWaitSemaphoreDeviceIndices) {
- pWaitSemaphoreDeviceIndices = new uint32_t[in_struct->waitSemaphoreCount];
- memcpy ((void *)pWaitSemaphoreDeviceIndices, (void *)in_struct->pWaitSemaphoreDeviceIndices, sizeof(uint32_t)*in_struct->waitSemaphoreCount);
- }
- if (in_struct->pCommandBufferDeviceMasks) {
- pCommandBufferDeviceMasks = new uint32_t[in_struct->commandBufferCount];
- memcpy ((void *)pCommandBufferDeviceMasks, (void *)in_struct->pCommandBufferDeviceMasks, sizeof(uint32_t)*in_struct->commandBufferCount);
- }
- if (in_struct->pSignalSemaphoreDeviceIndices) {
- pSignalSemaphoreDeviceIndices = new uint32_t[in_struct->signalSemaphoreCount];
- memcpy ((void *)pSignalSemaphoreDeviceIndices, (void *)in_struct->pSignalSemaphoreDeviceIndices, sizeof(uint32_t)*in_struct->signalSemaphoreCount);
- }
-}
-
-void safe_VkDeviceGroupSubmitInfo::initialize(const safe_VkDeviceGroupSubmitInfo* src)
-{
- sType = src->sType;
- waitSemaphoreCount = src->waitSemaphoreCount;
- pWaitSemaphoreDeviceIndices = nullptr;
- commandBufferCount = src->commandBufferCount;
- pCommandBufferDeviceMasks = nullptr;
- signalSemaphoreCount = src->signalSemaphoreCount;
- pSignalSemaphoreDeviceIndices = nullptr;
- pNext = SafePnextCopy(src->pNext);
- if (src->pWaitSemaphoreDeviceIndices) {
- pWaitSemaphoreDeviceIndices = new uint32_t[src->waitSemaphoreCount];
- memcpy ((void *)pWaitSemaphoreDeviceIndices, (void *)src->pWaitSemaphoreDeviceIndices, sizeof(uint32_t)*src->waitSemaphoreCount);
- }
- if (src->pCommandBufferDeviceMasks) {
- pCommandBufferDeviceMasks = new uint32_t[src->commandBufferCount];
- memcpy ((void *)pCommandBufferDeviceMasks, (void *)src->pCommandBufferDeviceMasks, sizeof(uint32_t)*src->commandBufferCount);
- }
- if (src->pSignalSemaphoreDeviceIndices) {
- pSignalSemaphoreDeviceIndices = new uint32_t[src->signalSemaphoreCount];
- memcpy ((void *)pSignalSemaphoreDeviceIndices, (void *)src->pSignalSemaphoreDeviceIndices, sizeof(uint32_t)*src->signalSemaphoreCount);
- }
-}
-
-safe_VkDeviceGroupBindSparseInfo::safe_VkDeviceGroupBindSparseInfo(const VkDeviceGroupBindSparseInfo* in_struct) :
- sType(in_struct->sType),
- resourceDeviceIndex(in_struct->resourceDeviceIndex),
- memoryDeviceIndex(in_struct->memoryDeviceIndex)
-{
- pNext = SafePnextCopy(in_struct->pNext);
-}
-
-safe_VkDeviceGroupBindSparseInfo::safe_VkDeviceGroupBindSparseInfo() :
- pNext(nullptr)
-{}
-
-safe_VkDeviceGroupBindSparseInfo::safe_VkDeviceGroupBindSparseInfo(const safe_VkDeviceGroupBindSparseInfo& src)
-{
- sType = src.sType;
- resourceDeviceIndex = src.resourceDeviceIndex;
- memoryDeviceIndex = src.memoryDeviceIndex;
- pNext = SafePnextCopy(src.pNext);
-}
-
-safe_VkDeviceGroupBindSparseInfo& safe_VkDeviceGroupBindSparseInfo::operator=(const safe_VkDeviceGroupBindSparseInfo& src)
-{
- if (&src == this) return *this;
-
- if (pNext)
- FreePnextChain(pNext);
-
- sType = src.sType;
- resourceDeviceIndex = src.resourceDeviceIndex;
- memoryDeviceIndex = src.memoryDeviceIndex;
- pNext = SafePnextCopy(src.pNext);
-
- return *this;
-}
-
-safe_VkDeviceGroupBindSparseInfo::~safe_VkDeviceGroupBindSparseInfo()
-{
- if (pNext)
- FreePnextChain(pNext);
-}
-
-void safe_VkDeviceGroupBindSparseInfo::initialize(const VkDeviceGroupBindSparseInfo* in_struct)
-{
- sType = in_struct->sType;
- resourceDeviceIndex = in_struct->resourceDeviceIndex;
- memoryDeviceIndex = in_struct->memoryDeviceIndex;
- pNext = SafePnextCopy(in_struct->pNext);
-}
-
-void safe_VkDeviceGroupBindSparseInfo::initialize(const safe_VkDeviceGroupBindSparseInfo* src)
-{
- sType = src->sType;
- resourceDeviceIndex = src->resourceDeviceIndex;
- memoryDeviceIndex = src->memoryDeviceIndex;
- pNext = SafePnextCopy(src->pNext);
-}
-
-safe_VkBindBufferMemoryDeviceGroupInfo::safe_VkBindBufferMemoryDeviceGroupInfo(const VkBindBufferMemoryDeviceGroupInfo* in_struct) :
- sType(in_struct->sType),
- deviceIndexCount(in_struct->deviceIndexCount),
- pDeviceIndices(nullptr)
-{
- pNext = SafePnextCopy(in_struct->pNext);
- if (in_struct->pDeviceIndices) {
- pDeviceIndices = new uint32_t[in_struct->deviceIndexCount];
- memcpy ((void *)pDeviceIndices, (void *)in_struct->pDeviceIndices, sizeof(uint32_t)*in_struct->deviceIndexCount);
- }
-}
-
-safe_VkBindBufferMemoryDeviceGroupInfo::safe_VkBindBufferMemoryDeviceGroupInfo() :
- pNext(nullptr),
- pDeviceIndices(nullptr)
-{}
-
-safe_VkBindBufferMemoryDeviceGroupInfo::safe_VkBindBufferMemoryDeviceGroupInfo(const safe_VkBindBufferMemoryDeviceGroupInfo& src)
-{
- sType = src.sType;
- deviceIndexCount = src.deviceIndexCount;
- pDeviceIndices = nullptr;
- pNext = SafePnextCopy(src.pNext);
- if (src.pDeviceIndices) {
- pDeviceIndices = new uint32_t[src.deviceIndexCount];
- memcpy ((void *)pDeviceIndices, (void *)src.pDeviceIndices, sizeof(uint32_t)*src.deviceIndexCount);
- }
-}
-
-safe_VkBindBufferMemoryDeviceGroupInfo& safe_VkBindBufferMemoryDeviceGroupInfo::operator=(const safe_VkBindBufferMemoryDeviceGroupInfo& src)
-{
- if (&src == this) return *this;
-
- if (pDeviceIndices)
- delete[] pDeviceIndices;
- if (pNext)
- FreePnextChain(pNext);
-
- sType = src.sType;
- deviceIndexCount = src.deviceIndexCount;
- pDeviceIndices = nullptr;
- pNext = SafePnextCopy(src.pNext);
- if (src.pDeviceIndices) {
- pDeviceIndices = new uint32_t[src.deviceIndexCount];
- memcpy ((void *)pDeviceIndices, (void *)src.pDeviceIndices, sizeof(uint32_t)*src.deviceIndexCount);
- }
-
- return *this;
-}
-
-safe_VkBindBufferMemoryDeviceGroupInfo::~safe_VkBindBufferMemoryDeviceGroupInfo()
-{
- if (pDeviceIndices)
- delete[] pDeviceIndices;
- if (pNext)
- FreePnextChain(pNext);
-}
-
-void safe_VkBindBufferMemoryDeviceGroupInfo::initialize(const VkBindBufferMemoryDeviceGroupInfo* in_struct)
-{
- sType = in_struct->sType;
- deviceIndexCount = in_struct->deviceIndexCount;
- pDeviceIndices = nullptr;
- pNext = SafePnextCopy(in_struct->pNext);
- if (in_struct->pDeviceIndices) {
- pDeviceIndices = new uint32_t[in_struct->deviceIndexCount];
- memcpy ((void *)pDeviceIndices, (void *)in_struct->pDeviceIndices, sizeof(uint32_t)*in_struct->deviceIndexCount);
- }
-}
-
-void safe_VkBindBufferMemoryDeviceGroupInfo::initialize(const safe_VkBindBufferMemoryDeviceGroupInfo* src)
-{
- sType = src->sType;
- deviceIndexCount = src->deviceIndexCount;
- pDeviceIndices = nullptr;
- pNext = SafePnextCopy(src->pNext);
- if (src->pDeviceIndices) {
- pDeviceIndices = new uint32_t[src->deviceIndexCount];
- memcpy ((void *)pDeviceIndices, (void *)src->pDeviceIndices, sizeof(uint32_t)*src->deviceIndexCount);
- }
-}
-
-safe_VkBindImageMemoryDeviceGroupInfo::safe_VkBindImageMemoryDeviceGroupInfo(const VkBindImageMemoryDeviceGroupInfo* in_struct) :
- sType(in_struct->sType),
- deviceIndexCount(in_struct->deviceIndexCount),
- pDeviceIndices(nullptr),
- splitInstanceBindRegionCount(in_struct->splitInstanceBindRegionCount),
- pSplitInstanceBindRegions(nullptr)
-{
- pNext = SafePnextCopy(in_struct->pNext);
- if (in_struct->pDeviceIndices) {
- pDeviceIndices = new uint32_t[in_struct->deviceIndexCount];
- memcpy ((void *)pDeviceIndices, (void *)in_struct->pDeviceIndices, sizeof(uint32_t)*in_struct->deviceIndexCount);
- }
- if (in_struct->pSplitInstanceBindRegions) {
- pSplitInstanceBindRegions = new VkRect2D[in_struct->splitInstanceBindRegionCount];
- memcpy ((void *)pSplitInstanceBindRegions, (void *)in_struct->pSplitInstanceBindRegions, sizeof(VkRect2D)*in_struct->splitInstanceBindRegionCount);
- }
-}
-
-safe_VkBindImageMemoryDeviceGroupInfo::safe_VkBindImageMemoryDeviceGroupInfo() :
- pNext(nullptr),
- pDeviceIndices(nullptr),
- pSplitInstanceBindRegions(nullptr)
-{}
-
-safe_VkBindImageMemoryDeviceGroupInfo::safe_VkBindImageMemoryDeviceGroupInfo(const safe_VkBindImageMemoryDeviceGroupInfo& src)
-{
- sType = src.sType;
- deviceIndexCount = src.deviceIndexCount;
- pDeviceIndices = nullptr;
- splitInstanceBindRegionCount = src.splitInstanceBindRegionCount;
- pSplitInstanceBindRegions = nullptr;
- pNext = SafePnextCopy(src.pNext);
- if (src.pDeviceIndices) {
- pDeviceIndices = new uint32_t[src.deviceIndexCount];
- memcpy ((void *)pDeviceIndices, (void *)src.pDeviceIndices, sizeof(uint32_t)*src.deviceIndexCount);
- }
- if (src.pSplitInstanceBindRegions) {
- pSplitInstanceBindRegions = new VkRect2D[src.splitInstanceBindRegionCount];
- memcpy ((void *)pSplitInstanceBindRegions, (void *)src.pSplitInstanceBindRegions, sizeof(VkRect2D)*src.splitInstanceBindRegionCount);
- }
-}
-
-safe_VkBindImageMemoryDeviceGroupInfo& safe_VkBindImageMemoryDeviceGroupInfo::operator=(const safe_VkBindImageMemoryDeviceGroupInfo& src)
-{
- if (&src == this) return *this;
-
- if (pDeviceIndices)
- delete[] pDeviceIndices;
- if (pSplitInstanceBindRegions)
- delete[] pSplitInstanceBindRegions;
- if (pNext)
- FreePnextChain(pNext);
-
- sType = src.sType;
- deviceIndexCount = src.deviceIndexCount;
- pDeviceIndices = nullptr;
- splitInstanceBindRegionCount = src.splitInstanceBindRegionCount;
- pSplitInstanceBindRegions = nullptr;
- pNext = SafePnextCopy(src.pNext);
- if (src.pDeviceIndices) {
- pDeviceIndices = new uint32_t[src.deviceIndexCount];
- memcpy ((void *)pDeviceIndices, (void *)src.pDeviceIndices, sizeof(uint32_t)*src.deviceIndexCount);
- }
- if (src.pSplitInstanceBindRegions) {
- pSplitInstanceBindRegions = new VkRect2D[src.splitInstanceBindRegionCount];
- memcpy ((void *)pSplitInstanceBindRegions, (void *)src.pSplitInstanceBindRegions, sizeof(VkRect2D)*src.splitInstanceBindRegionCount);
- }
-
- return *this;
-}
-
-safe_VkBindImageMemoryDeviceGroupInfo::~safe_VkBindImageMemoryDeviceGroupInfo()
-{
- if (pDeviceIndices)
- delete[] pDeviceIndices;
- if (pSplitInstanceBindRegions)
- delete[] pSplitInstanceBindRegions;
- if (pNext)
- FreePnextChain(pNext);
-}
-
-void safe_VkBindImageMemoryDeviceGroupInfo::initialize(const VkBindImageMemoryDeviceGroupInfo* in_struct)
-{
- sType = in_struct->sType;
- deviceIndexCount = in_struct->deviceIndexCount;
- pDeviceIndices = nullptr;
- splitInstanceBindRegionCount = in_struct->splitInstanceBindRegionCount;
- pSplitInstanceBindRegions = nullptr;
- pNext = SafePnextCopy(in_struct->pNext);
- if (in_struct->pDeviceIndices) {
- pDeviceIndices = new uint32_t[in_struct->deviceIndexCount];
- memcpy ((void *)pDeviceIndices, (void *)in_struct->pDeviceIndices, sizeof(uint32_t)*in_struct->deviceIndexCount);
- }
- if (in_struct->pSplitInstanceBindRegions) {
- pSplitInstanceBindRegions = new VkRect2D[in_struct->splitInstanceBindRegionCount];
- memcpy ((void *)pSplitInstanceBindRegions, (void *)in_struct->pSplitInstanceBindRegions, sizeof(VkRect2D)*in_struct->splitInstanceBindRegionCount);
- }
-}
-
-void safe_VkBindImageMemoryDeviceGroupInfo::initialize(const safe_VkBindImageMemoryDeviceGroupInfo* src)
-{
- sType = src->sType;
- deviceIndexCount = src->deviceIndexCount;
- pDeviceIndices = nullptr;
- splitInstanceBindRegionCount = src->splitInstanceBindRegionCount;
- pSplitInstanceBindRegions = nullptr;
- pNext = SafePnextCopy(src->pNext);
- if (src->pDeviceIndices) {
- pDeviceIndices = new uint32_t[src->deviceIndexCount];
- memcpy ((void *)pDeviceIndices, (void *)src->pDeviceIndices, sizeof(uint32_t)*src->deviceIndexCount);
- }
- if (src->pSplitInstanceBindRegions) {
- pSplitInstanceBindRegions = new VkRect2D[src->splitInstanceBindRegionCount];
- memcpy ((void *)pSplitInstanceBindRegions, (void *)src->pSplitInstanceBindRegions, sizeof(VkRect2D)*src->splitInstanceBindRegionCount);
- }
-}
-
-safe_VkPhysicalDeviceGroupProperties::safe_VkPhysicalDeviceGroupProperties(const VkPhysicalDeviceGroupProperties* in_struct) :
- sType(in_struct->sType),
- physicalDeviceCount(in_struct->physicalDeviceCount),
- subsetAllocation(in_struct->subsetAllocation)
-{
- pNext = SafePnextCopy(in_struct->pNext);
- for (uint32_t i = 0; i < VK_MAX_DEVICE_GROUP_SIZE; ++i) {
- physicalDevices[i] = in_struct->physicalDevices[i];
- }
-}
-
-safe_VkPhysicalDeviceGroupProperties::safe_VkPhysicalDeviceGroupProperties() :
- pNext(nullptr)
-{}
-
-safe_VkPhysicalDeviceGroupProperties::safe_VkPhysicalDeviceGroupProperties(const safe_VkPhysicalDeviceGroupProperties& src)
-{
- sType = src.sType;
- physicalDeviceCount = src.physicalDeviceCount;
- subsetAllocation = src.subsetAllocation;
- pNext = SafePnextCopy(src.pNext);
- for (uint32_t i = 0; i < VK_MAX_DEVICE_GROUP_SIZE; ++i) {
- physicalDevices[i] = src.physicalDevices[i];
- }
-}
-
-safe_VkPhysicalDeviceGroupProperties& safe_VkPhysicalDeviceGroupProperties::operator=(const safe_VkPhysicalDeviceGroupProperties& src)
-{
- if (&src == this) return *this;
-
- if (pNext)
- FreePnextChain(pNext);
-
- sType = src.sType;
- physicalDeviceCount = src.physicalDeviceCount;
- subsetAllocation = src.subsetAllocation;
- pNext = SafePnextCopy(src.pNext);
- for (uint32_t i = 0; i < VK_MAX_DEVICE_GROUP_SIZE; ++i) {
- physicalDevices[i] = src.physicalDevices[i];
- }
-
- return *this;
-}
-
-safe_VkPhysicalDeviceGroupProperties::~safe_VkPhysicalDeviceGroupProperties()
-{
- if (pNext)
- FreePnextChain(pNext);
-}
-
-void safe_VkPhysicalDeviceGroupProperties::initialize(const VkPhysicalDeviceGroupProperties* in_struct)
-{
- sType = in_struct->sType;
- physicalDeviceCount = in_struct->physicalDeviceCount;
- subsetAllocation = in_struct->subsetAllocation;
- pNext = SafePnextCopy(in_struct->pNext);
- for (uint32_t i = 0; i < VK_MAX_DEVICE_GROUP_SIZE; ++i) {
- physicalDevices[i] = in_struct->physicalDevices[i];
- }
-}
-
-void safe_VkPhysicalDeviceGroupProperties::initialize(const safe_VkPhysicalDeviceGroupProperties* src)
-{
- sType = src->sType;
- physicalDeviceCount = src->physicalDeviceCount;
- subsetAllocation = src->subsetAllocation;
- pNext = SafePnextCopy(src->pNext);
- for (uint32_t i = 0; i < VK_MAX_DEVICE_GROUP_SIZE; ++i) {
- physicalDevices[i] = src->physicalDevices[i];
- }
-}
-
-safe_VkDeviceGroupDeviceCreateInfo::safe_VkDeviceGroupDeviceCreateInfo(const VkDeviceGroupDeviceCreateInfo* in_struct) :
- sType(in_struct->sType),
- physicalDeviceCount(in_struct->physicalDeviceCount),
- pPhysicalDevices(nullptr)
-{
- pNext = SafePnextCopy(in_struct->pNext);
- if (in_struct->pPhysicalDevices) {
- pPhysicalDevices = new VkPhysicalDevice[in_struct->physicalDeviceCount];
- memcpy ((void *)pPhysicalDevices, (void *)in_struct->pPhysicalDevices, sizeof(VkPhysicalDevice)*in_struct->physicalDeviceCount);
- }
-}
-
-safe_VkDeviceGroupDeviceCreateInfo::safe_VkDeviceGroupDeviceCreateInfo() :
- pNext(nullptr),
- pPhysicalDevices(nullptr)
-{}
-
-safe_VkDeviceGroupDeviceCreateInfo::safe_VkDeviceGroupDeviceCreateInfo(const safe_VkDeviceGroupDeviceCreateInfo& src)
-{
- sType = src.sType;
- physicalDeviceCount = src.physicalDeviceCount;
- pPhysicalDevices = nullptr;
- pNext = SafePnextCopy(src.pNext);
- if (src.pPhysicalDevices) {
- pPhysicalDevices = new VkPhysicalDevice[src.physicalDeviceCount];
- memcpy ((void *)pPhysicalDevices, (void *)src.pPhysicalDevices, sizeof(VkPhysicalDevice)*src.physicalDeviceCount);
- }
-}
-
-safe_VkDeviceGroupDeviceCreateInfo& safe_VkDeviceGroupDeviceCreateInfo::operator=(const safe_VkDeviceGroupDeviceCreateInfo& src)
-{
- if (&src == this) return *this;
-
- if (pPhysicalDevices)
- delete[] pPhysicalDevices;
- if (pNext)
- FreePnextChain(pNext);
-
- sType = src.sType;
- physicalDeviceCount = src.physicalDeviceCount;
- pPhysicalDevices = nullptr;
- pNext = SafePnextCopy(src.pNext);
- if (src.pPhysicalDevices) {
- pPhysicalDevices = new VkPhysicalDevice[src.physicalDeviceCount];
- memcpy ((void *)pPhysicalDevices, (void *)src.pPhysicalDevices, sizeof(VkPhysicalDevice)*src.physicalDeviceCount);
- }
-
- return *this;
-}
-
-safe_VkDeviceGroupDeviceCreateInfo::~safe_VkDeviceGroupDeviceCreateInfo()
-{
- if (pPhysicalDevices)
- delete[] pPhysicalDevices;
- if (pNext)
- FreePnextChain(pNext);
-}
-
-void safe_VkDeviceGroupDeviceCreateInfo::initialize(const VkDeviceGroupDeviceCreateInfo* in_struct)
-{
- sType = in_struct->sType;
- physicalDeviceCount = in_struct->physicalDeviceCount;
- pPhysicalDevices = nullptr;
- pNext = SafePnextCopy(in_struct->pNext);
- if (in_struct->pPhysicalDevices) {
- pPhysicalDevices = new VkPhysicalDevice[in_struct->physicalDeviceCount];
- memcpy ((void *)pPhysicalDevices, (void *)in_struct->pPhysicalDevices, sizeof(VkPhysicalDevice)*in_struct->physicalDeviceCount);
- }
-}
-
-void safe_VkDeviceGroupDeviceCreateInfo::initialize(const safe_VkDeviceGroupDeviceCreateInfo* src)
-{
- sType = src->sType;
- physicalDeviceCount = src->physicalDeviceCount;
- pPhysicalDevices = nullptr;
- pNext = SafePnextCopy(src->pNext);
- if (src->pPhysicalDevices) {
- pPhysicalDevices = new VkPhysicalDevice[src->physicalDeviceCount];
- memcpy ((void *)pPhysicalDevices, (void *)src->pPhysicalDevices, sizeof(VkPhysicalDevice)*src->physicalDeviceCount);
- }
-}
-
-safe_VkBufferMemoryRequirementsInfo2::safe_VkBufferMemoryRequirementsInfo2(const VkBufferMemoryRequirementsInfo2* in_struct) :
- sType(in_struct->sType),
- buffer(in_struct->buffer)
-{
- pNext = SafePnextCopy(in_struct->pNext);
-}
-
-safe_VkBufferMemoryRequirementsInfo2::safe_VkBufferMemoryRequirementsInfo2() :
- pNext(nullptr)
-{}
-
-safe_VkBufferMemoryRequirementsInfo2::safe_VkBufferMemoryRequirementsInfo2(const safe_VkBufferMemoryRequirementsInfo2& src)
-{
- sType = src.sType;
- buffer = src.buffer;
- pNext = SafePnextCopy(src.pNext);
-}
-
-safe_VkBufferMemoryRequirementsInfo2& safe_VkBufferMemoryRequirementsInfo2::operator=(const safe_VkBufferMemoryRequirementsInfo2& src)
-{
- if (&src == this) return *this;
-
- if (pNext)
- FreePnextChain(pNext);
-
- sType = src.sType;
- buffer = src.buffer;
- pNext = SafePnextCopy(src.pNext);
-
- return *this;
-}
-
-safe_VkBufferMemoryRequirementsInfo2::~safe_VkBufferMemoryRequirementsInfo2()
-{
- if (pNext)
- FreePnextChain(pNext);
-}
-
-void safe_VkBufferMemoryRequirementsInfo2::initialize(const VkBufferMemoryRequirementsInfo2* in_struct)
-{
- sType = in_struct->sType;
- buffer = in_struct->buffer;
- pNext = SafePnextCopy(in_struct->pNext);
-}
-
-void safe_VkBufferMemoryRequirementsInfo2::initialize(const safe_VkBufferMemoryRequirementsInfo2* src)
-{
- sType = src->sType;
- buffer = src->buffer;
- pNext = SafePnextCopy(src->pNext);
-}
-
-safe_VkImageMemoryRequirementsInfo2::safe_VkImageMemoryRequirementsInfo2(const VkImageMemoryRequirementsInfo2* in_struct) :
- sType(in_struct->sType),
- image(in_struct->image)
-{
- pNext = SafePnextCopy(in_struct->pNext);
-}
-
-safe_VkImageMemoryRequirementsInfo2::safe_VkImageMemoryRequirementsInfo2() :
- pNext(nullptr)
-{}
-
-safe_VkImageMemoryRequirementsInfo2::safe_VkImageMemoryRequirementsInfo2(const safe_VkImageMemoryRequirementsInfo2& src)
-{
- sType = src.sType;
- image = src.image;
- pNext = SafePnextCopy(src.pNext);
-}
-
-safe_VkImageMemoryRequirementsInfo2& safe_VkImageMemoryRequirementsInfo2::operator=(const safe_VkImageMemoryRequirementsInfo2& src)
-{
- if (&src == this) return *this;
-
- if (pNext)
- FreePnextChain(pNext);
-
- sType = src.sType;
- image = src.image;
- pNext = SafePnextCopy(src.pNext);
-
- return *this;
-}
-
-safe_VkImageMemoryRequirementsInfo2::~safe_VkImageMemoryRequirementsInfo2()
-{
- if (pNext)
- FreePnextChain(pNext);
-}
-
-void safe_VkImageMemoryRequirementsInfo2::initialize(const VkImageMemoryRequirementsInfo2* in_struct)
-{
- sType = in_struct->sType;
- image = in_struct->image;
- pNext = SafePnextCopy(in_struct->pNext);
-}
-
-void safe_VkImageMemoryRequirementsInfo2::initialize(const safe_VkImageMemoryRequirementsInfo2* src)
-{
- sType = src->sType;
- image = src->image;
- pNext = SafePnextCopy(src->pNext);
-}
-
-safe_VkImageSparseMemoryRequirementsInfo2::safe_VkImageSparseMemoryRequirementsInfo2(const VkImageSparseMemoryRequirementsInfo2* in_struct) :
- sType(in_struct->sType),
- image(in_struct->image)
-{
- pNext = SafePnextCopy(in_struct->pNext);
-}
-
-safe_VkImageSparseMemoryRequirementsInfo2::safe_VkImageSparseMemoryRequirementsInfo2() :
- pNext(nullptr)
-{}
-
-safe_VkImageSparseMemoryRequirementsInfo2::safe_VkImageSparseMemoryRequirementsInfo2(const safe_VkImageSparseMemoryRequirementsInfo2& src)
-{
- sType = src.sType;
- image = src.image;
- pNext = SafePnextCopy(src.pNext);
-}
-
-safe_VkImageSparseMemoryRequirementsInfo2& safe_VkImageSparseMemoryRequirementsInfo2::operator=(const safe_VkImageSparseMemoryRequirementsInfo2& src)
-{
- if (&src == this) return *this;
-
- if (pNext)
- FreePnextChain(pNext);
-
- sType = src.sType;
- image = src.image;
- pNext = SafePnextCopy(src.pNext);
-
- return *this;
-}
-
-safe_VkImageSparseMemoryRequirementsInfo2::~safe_VkImageSparseMemoryRequirementsInfo2()
-{
- if (pNext)
- FreePnextChain(pNext);
-}
-
-void safe_VkImageSparseMemoryRequirementsInfo2::initialize(const VkImageSparseMemoryRequirementsInfo2* in_struct)
-{
- sType = in_struct->sType;
- image = in_struct->image;
- pNext = SafePnextCopy(in_struct->pNext);
-}
-
-void safe_VkImageSparseMemoryRequirementsInfo2::initialize(const safe_VkImageSparseMemoryRequirementsInfo2* src)
-{
- sType = src->sType;
- image = src->image;
- pNext = SafePnextCopy(src->pNext);
-}
-
-safe_VkMemoryRequirements2::safe_VkMemoryRequirements2(const VkMemoryRequirements2* in_struct) :
- sType(in_struct->sType),
- memoryRequirements(in_struct->memoryRequirements)
-{
- pNext = SafePnextCopy(in_struct->pNext);
-}
-
-safe_VkMemoryRequirements2::safe_VkMemoryRequirements2() :
- pNext(nullptr)
-{}
-
-safe_VkMemoryRequirements2::safe_VkMemoryRequirements2(const safe_VkMemoryRequirements2& src)
-{
- sType = src.sType;
- memoryRequirements = src.memoryRequirements;
- pNext = SafePnextCopy(src.pNext);
-}
-
-safe_VkMemoryRequirements2& safe_VkMemoryRequirements2::operator=(const safe_VkMemoryRequirements2& src)
-{
- if (&src == this) return *this;
-
- if (pNext)
- FreePnextChain(pNext);
-
- sType = src.sType;
- memoryRequirements = src.memoryRequirements;
- pNext = SafePnextCopy(src.pNext);
-
- return *this;
-}
-
-safe_VkMemoryRequirements2::~safe_VkMemoryRequirements2()
-{
- if (pNext)
- FreePnextChain(pNext);
-}
-
-void safe_VkMemoryRequirements2::initialize(const VkMemoryRequirements2* in_struct)
-{
- sType = in_struct->sType;
- memoryRequirements = in_struct->memoryRequirements;
- pNext = SafePnextCopy(in_struct->pNext);
-}
-
-void safe_VkMemoryRequirements2::initialize(const safe_VkMemoryRequirements2* src)
-{
- sType = src->sType;
- memoryRequirements = src->memoryRequirements;
- pNext = SafePnextCopy(src->pNext);
-}
-
-safe_VkSparseImageMemoryRequirements2::safe_VkSparseImageMemoryRequirements2(const VkSparseImageMemoryRequirements2* in_struct) :
- sType(in_struct->sType),
- memoryRequirements(in_struct->memoryRequirements)
-{
- pNext = SafePnextCopy(in_struct->pNext);
-}
-
-safe_VkSparseImageMemoryRequirements2::safe_VkSparseImageMemoryRequirements2() :
- pNext(nullptr)
-{}
-
-safe_VkSparseImageMemoryRequirements2::safe_VkSparseImageMemoryRequirements2(const safe_VkSparseImageMemoryRequirements2& src)
-{
- sType = src.sType;
- memoryRequirements = src.memoryRequirements;
- pNext = SafePnextCopy(src.pNext);
-}
-
-safe_VkSparseImageMemoryRequirements2& safe_VkSparseImageMemoryRequirements2::operator=(const safe_VkSparseImageMemoryRequirements2& src)
-{
- if (&src == this) return *this;
-
- if (pNext)
- FreePnextChain(pNext);
-
- sType = src.sType;
- memoryRequirements = src.memoryRequirements;
- pNext = SafePnextCopy(src.pNext);
-
- return *this;
-}
-
-safe_VkSparseImageMemoryRequirements2::~safe_VkSparseImageMemoryRequirements2()
-{
- if (pNext)
- FreePnextChain(pNext);
-}
-
-void safe_VkSparseImageMemoryRequirements2::initialize(const VkSparseImageMemoryRequirements2* in_struct)
-{
- sType = in_struct->sType;
- memoryRequirements = in_struct->memoryRequirements;
- pNext = SafePnextCopy(in_struct->pNext);
-}
-
-void safe_VkSparseImageMemoryRequirements2::initialize(const safe_VkSparseImageMemoryRequirements2* src)
-{
- sType = src->sType;
- memoryRequirements = src->memoryRequirements;
- pNext = SafePnextCopy(src->pNext);
-}
-
-safe_VkPhysicalDeviceFeatures2::safe_VkPhysicalDeviceFeatures2(const VkPhysicalDeviceFeatures2* in_struct) :
- sType(in_struct->sType),
- features(in_struct->features)
-{
- pNext = SafePnextCopy(in_struct->pNext);
-}
-
-safe_VkPhysicalDeviceFeatures2::safe_VkPhysicalDeviceFeatures2() :
- pNext(nullptr)
-{}
-
-safe_VkPhysicalDeviceFeatures2::safe_VkPhysicalDeviceFeatures2(const safe_VkPhysicalDeviceFeatures2& src)
-{
- sType = src.sType;
- features = src.features;
- pNext = SafePnextCopy(src.pNext);
-}
-
-safe_VkPhysicalDeviceFeatures2& safe_VkPhysicalDeviceFeatures2::operator=(const safe_VkPhysicalDeviceFeatures2& src)
-{
- if (&src == this) return *this;
-
- if (pNext)
- FreePnextChain(pNext);
-
- sType = src.sType;
- features = src.features;
- pNext = SafePnextCopy(src.pNext);
-
- return *this;
-}
-
-safe_VkPhysicalDeviceFeatures2::~safe_VkPhysicalDeviceFeatures2()
-{
- if (pNext)
- FreePnextChain(pNext);
-}
-
-void safe_VkPhysicalDeviceFeatures2::initialize(const VkPhysicalDeviceFeatures2* in_struct)
-{
- sType = in_struct->sType;
- features = in_struct->features;
- pNext = SafePnextCopy(in_struct->pNext);
-}
-
-void safe_VkPhysicalDeviceFeatures2::initialize(const safe_VkPhysicalDeviceFeatures2* src)
-{
- sType = src->sType;
- features = src->features;
- pNext = SafePnextCopy(src->pNext);
-}
-
-safe_VkPhysicalDeviceProperties2::safe_VkPhysicalDeviceProperties2(const VkPhysicalDeviceProperties2* in_struct) :
- sType(in_struct->sType),
- properties(in_struct->properties)
-{
- pNext = SafePnextCopy(in_struct->pNext);
-}
-
-safe_VkPhysicalDeviceProperties2::safe_VkPhysicalDeviceProperties2() :
- pNext(nullptr)
-{}
-
-safe_VkPhysicalDeviceProperties2::safe_VkPhysicalDeviceProperties2(const safe_VkPhysicalDeviceProperties2& src)
-{
- sType = src.sType;
- properties = src.properties;
- pNext = SafePnextCopy(src.pNext);
-}
-
-safe_VkPhysicalDeviceProperties2& safe_VkPhysicalDeviceProperties2::operator=(const safe_VkPhysicalDeviceProperties2& src)
-{
- if (&src == this) return *this;
-
- if (pNext)
- FreePnextChain(pNext);
-
- sType = src.sType;
- properties = src.properties;
- pNext = SafePnextCopy(src.pNext);
-
- return *this;
-}
-
-safe_VkPhysicalDeviceProperties2::~safe_VkPhysicalDeviceProperties2()
-{
- if (pNext)
- FreePnextChain(pNext);
-}
-
-void safe_VkPhysicalDeviceProperties2::initialize(const VkPhysicalDeviceProperties2* in_struct)
-{
- sType = in_struct->sType;
- properties = in_struct->properties;
- pNext = SafePnextCopy(in_struct->pNext);
-}
-
-void safe_VkPhysicalDeviceProperties2::initialize(const safe_VkPhysicalDeviceProperties2* src)
-{
- sType = src->sType;
- properties = src->properties;
- pNext = SafePnextCopy(src->pNext);
-}
-
-safe_VkFormatProperties2::safe_VkFormatProperties2(const VkFormatProperties2* in_struct) :
- sType(in_struct->sType),
- formatProperties(in_struct->formatProperties)
-{
- pNext = SafePnextCopy(in_struct->pNext);
-}
-
-safe_VkFormatProperties2::safe_VkFormatProperties2() :
- pNext(nullptr)
-{}
-
-safe_VkFormatProperties2::safe_VkFormatProperties2(const safe_VkFormatProperties2& src)
-{
- sType = src.sType;
- formatProperties = src.formatProperties;
- pNext = SafePnextCopy(src.pNext);
-}
-
-safe_VkFormatProperties2& safe_VkFormatProperties2::operator=(const safe_VkFormatProperties2& src)
-{
- if (&src == this) return *this;
-
- if (pNext)
- FreePnextChain(pNext);
-
- sType = src.sType;
- formatProperties = src.formatProperties;
- pNext = SafePnextCopy(src.pNext);
-
- return *this;
-}
-
-safe_VkFormatProperties2::~safe_VkFormatProperties2()
-{
- if (pNext)
- FreePnextChain(pNext);
-}
-
-void safe_VkFormatProperties2::initialize(const VkFormatProperties2* in_struct)
-{
- sType = in_struct->sType;
- formatProperties = in_struct->formatProperties;
- pNext = SafePnextCopy(in_struct->pNext);
-}
-
-void safe_VkFormatProperties2::initialize(const safe_VkFormatProperties2* src)
-{
- sType = src->sType;
- formatProperties = src->formatProperties;
- pNext = SafePnextCopy(src->pNext);
-}
-
-safe_VkImageFormatProperties2::safe_VkImageFormatProperties2(const VkImageFormatProperties2* in_struct) :
- sType(in_struct->sType),
- imageFormatProperties(in_struct->imageFormatProperties)
-{
- pNext = SafePnextCopy(in_struct->pNext);
-}
-
-safe_VkImageFormatProperties2::safe_VkImageFormatProperties2() :
- pNext(nullptr)
-{}
-
-safe_VkImageFormatProperties2::safe_VkImageFormatProperties2(const safe_VkImageFormatProperties2& src)
-{
- sType = src.sType;
- imageFormatProperties = src.imageFormatProperties;
- pNext = SafePnextCopy(src.pNext);
-}
-
-safe_VkImageFormatProperties2& safe_VkImageFormatProperties2::operator=(const safe_VkImageFormatProperties2& src)
-{
- if (&src == this) return *this;
-
- if (pNext)
- FreePnextChain(pNext);
-
- sType = src.sType;
- imageFormatProperties = src.imageFormatProperties;
- pNext = SafePnextCopy(src.pNext);
-
- return *this;
-}
-
-safe_VkImageFormatProperties2::~safe_VkImageFormatProperties2()
-{
- if (pNext)
- FreePnextChain(pNext);
-}
-
-void safe_VkImageFormatProperties2::initialize(const VkImageFormatProperties2* in_struct)
-{
- sType = in_struct->sType;
- imageFormatProperties = in_struct->imageFormatProperties;
- pNext = SafePnextCopy(in_struct->pNext);
-}
-
-void safe_VkImageFormatProperties2::initialize(const safe_VkImageFormatProperties2* src)
-{
- sType = src->sType;
- imageFormatProperties = src->imageFormatProperties;
- pNext = SafePnextCopy(src->pNext);
-}
-
-safe_VkPhysicalDeviceImageFormatInfo2::safe_VkPhysicalDeviceImageFormatInfo2(const VkPhysicalDeviceImageFormatInfo2* in_struct) :
- sType(in_struct->sType),
- format(in_struct->format),
- type(in_struct->type),
- tiling(in_struct->tiling),
- usage(in_struct->usage),
- flags(in_struct->flags)
-{
- pNext = SafePnextCopy(in_struct->pNext);
-}
-
-safe_VkPhysicalDeviceImageFormatInfo2::safe_VkPhysicalDeviceImageFormatInfo2() :
- pNext(nullptr)
-{}
-
-safe_VkPhysicalDeviceImageFormatInfo2::safe_VkPhysicalDeviceImageFormatInfo2(const safe_VkPhysicalDeviceImageFormatInfo2& src)
-{
- sType = src.sType;
- format = src.format;
- type = src.type;
- tiling = src.tiling;
- usage = src.usage;
- flags = src.flags;
- pNext = SafePnextCopy(src.pNext);
-}
-
-safe_VkPhysicalDeviceImageFormatInfo2& safe_VkPhysicalDeviceImageFormatInfo2::operator=(const safe_VkPhysicalDeviceImageFormatInfo2& src)
-{
- if (&src == this) return *this;
-
- if (pNext)
- FreePnextChain(pNext);
-
- sType = src.sType;
- format = src.format;
- type = src.type;
- tiling = src.tiling;
- usage = src.usage;
- flags = src.flags;
- pNext = SafePnextCopy(src.pNext);
-
- return *this;
-}
-
-safe_VkPhysicalDeviceImageFormatInfo2::~safe_VkPhysicalDeviceImageFormatInfo2()
-{
- if (pNext)
- FreePnextChain(pNext);
-}
-
-void safe_VkPhysicalDeviceImageFormatInfo2::initialize(const VkPhysicalDeviceImageFormatInfo2* in_struct)
-{
- sType = in_struct->sType;
- format = in_struct->format;
- type = in_struct->type;
- tiling = in_struct->tiling;
- usage = in_struct->usage;
- flags = in_struct->flags;
- pNext = SafePnextCopy(in_struct->pNext);
-}
-
-void safe_VkPhysicalDeviceImageFormatInfo2::initialize(const safe_VkPhysicalDeviceImageFormatInfo2* src)
-{
- sType = src->sType;
- format = src->format;
- type = src->type;
- tiling = src->tiling;
- usage = src->usage;
- flags = src->flags;
- pNext = SafePnextCopy(src->pNext);
-}
-
-safe_VkQueueFamilyProperties2::safe_VkQueueFamilyProperties2(const VkQueueFamilyProperties2* in_struct) :
- sType(in_struct->sType),
- queueFamilyProperties(in_struct->queueFamilyProperties)
-{
- pNext = SafePnextCopy(in_struct->pNext);
-}
-
-safe_VkQueueFamilyProperties2::safe_VkQueueFamilyProperties2() :
- pNext(nullptr)
-{}
-
-safe_VkQueueFamilyProperties2::safe_VkQueueFamilyProperties2(const safe_VkQueueFamilyProperties2& src)
-{
- sType = src.sType;
- queueFamilyProperties = src.queueFamilyProperties;
- pNext = SafePnextCopy(src.pNext);
-}
-
-safe_VkQueueFamilyProperties2& safe_VkQueueFamilyProperties2::operator=(const safe_VkQueueFamilyProperties2& src)
-{
- if (&src == this) return *this;
-
- if (pNext)
- FreePnextChain(pNext);
-
- sType = src.sType;
- queueFamilyProperties = src.queueFamilyProperties;
- pNext = SafePnextCopy(src.pNext);
-
- return *this;
-}
-
-safe_VkQueueFamilyProperties2::~safe_VkQueueFamilyProperties2()
-{
- if (pNext)
- FreePnextChain(pNext);
-}
-
-void safe_VkQueueFamilyProperties2::initialize(const VkQueueFamilyProperties2* in_struct)
-{
- sType = in_struct->sType;
- queueFamilyProperties = in_struct->queueFamilyProperties;
- pNext = SafePnextCopy(in_struct->pNext);
-}
-
-void safe_VkQueueFamilyProperties2::initialize(const safe_VkQueueFamilyProperties2* src)
-{
- sType = src->sType;
- queueFamilyProperties = src->queueFamilyProperties;
- pNext = SafePnextCopy(src->pNext);
-}
-
-safe_VkPhysicalDeviceMemoryProperties2::safe_VkPhysicalDeviceMemoryProperties2(const VkPhysicalDeviceMemoryProperties2* in_struct) :
- sType(in_struct->sType),
- memoryProperties(in_struct->memoryProperties)
-{
- pNext = SafePnextCopy(in_struct->pNext);
-}
-
-safe_VkPhysicalDeviceMemoryProperties2::safe_VkPhysicalDeviceMemoryProperties2() :
- pNext(nullptr)
-{}
-
-safe_VkPhysicalDeviceMemoryProperties2::safe_VkPhysicalDeviceMemoryProperties2(const safe_VkPhysicalDeviceMemoryProperties2& src)
-{
- sType = src.sType;
- memoryProperties = src.memoryProperties;
- pNext = SafePnextCopy(src.pNext);
-}
-
-safe_VkPhysicalDeviceMemoryProperties2& safe_VkPhysicalDeviceMemoryProperties2::operator=(const safe_VkPhysicalDeviceMemoryProperties2& src)
-{
- if (&src == this) return *this;
-
- if (pNext)
- FreePnextChain(pNext);
-
- sType = src.sType;
- memoryProperties = src.memoryProperties;
- pNext = SafePnextCopy(src.pNext);
-
- return *this;
-}
-
-safe_VkPhysicalDeviceMemoryProperties2::~safe_VkPhysicalDeviceMemoryProperties2()
-{
- if (pNext)
- FreePnextChain(pNext);
-}
-
-void safe_VkPhysicalDeviceMemoryProperties2::initialize(const VkPhysicalDeviceMemoryProperties2* in_struct)
-{
- sType = in_struct->sType;
- memoryProperties = in_struct->memoryProperties;
- pNext = SafePnextCopy(in_struct->pNext);
-}
-
-void safe_VkPhysicalDeviceMemoryProperties2::initialize(const safe_VkPhysicalDeviceMemoryProperties2* src)
-{
- sType = src->sType;
- memoryProperties = src->memoryProperties;
- pNext = SafePnextCopy(src->pNext);
-}
-
-safe_VkSparseImageFormatProperties2::safe_VkSparseImageFormatProperties2(const VkSparseImageFormatProperties2* in_struct) :
- sType(in_struct->sType),
- properties(in_struct->properties)
-{
- pNext = SafePnextCopy(in_struct->pNext);
-}
-
-safe_VkSparseImageFormatProperties2::safe_VkSparseImageFormatProperties2() :
- pNext(nullptr)
-{}
-
-safe_VkSparseImageFormatProperties2::safe_VkSparseImageFormatProperties2(const safe_VkSparseImageFormatProperties2& src)
-{
- sType = src.sType;
- properties = src.properties;
- pNext = SafePnextCopy(src.pNext);
-}
-
-safe_VkSparseImageFormatProperties2& safe_VkSparseImageFormatProperties2::operator=(const safe_VkSparseImageFormatProperties2& src)
-{
- if (&src == this) return *this;
-
- if (pNext)
- FreePnextChain(pNext);
-
- sType = src.sType;
- properties = src.properties;
- pNext = SafePnextCopy(src.pNext);
-
- return *this;
-}
-
-safe_VkSparseImageFormatProperties2::~safe_VkSparseImageFormatProperties2()
-{
- if (pNext)
- FreePnextChain(pNext);
-}
-
-void safe_VkSparseImageFormatProperties2::initialize(const VkSparseImageFormatProperties2* in_struct)
-{
- sType = in_struct->sType;
- properties = in_struct->properties;
- pNext = SafePnextCopy(in_struct->pNext);
-}
-
-void safe_VkSparseImageFormatProperties2::initialize(const safe_VkSparseImageFormatProperties2* src)
-{
- sType = src->sType;
- properties = src->properties;
- pNext = SafePnextCopy(src->pNext);
-}
-
-safe_VkPhysicalDeviceSparseImageFormatInfo2::safe_VkPhysicalDeviceSparseImageFormatInfo2(const VkPhysicalDeviceSparseImageFormatInfo2* in_struct) :
- sType(in_struct->sType),
- format(in_struct->format),
- type(in_struct->type),
- samples(in_struct->samples),
- usage(in_struct->usage),
- tiling(in_struct->tiling)
-{
- pNext = SafePnextCopy(in_struct->pNext);
-}
-
-safe_VkPhysicalDeviceSparseImageFormatInfo2::safe_VkPhysicalDeviceSparseImageFormatInfo2() :
- pNext(nullptr)
-{}
-
-safe_VkPhysicalDeviceSparseImageFormatInfo2::safe_VkPhysicalDeviceSparseImageFormatInfo2(const safe_VkPhysicalDeviceSparseImageFormatInfo2& src)
-{
- sType = src.sType;
- format = src.format;
- type = src.type;
- samples = src.samples;
- usage = src.usage;
- tiling = src.tiling;
- pNext = SafePnextCopy(src.pNext);
-}
-
-safe_VkPhysicalDeviceSparseImageFormatInfo2& safe_VkPhysicalDeviceSparseImageFormatInfo2::operator=(const safe_VkPhysicalDeviceSparseImageFormatInfo2& src)
-{
- if (&src == this) return *this;
-
- if (pNext)
- FreePnextChain(pNext);
-
- sType = src.sType;
- format = src.format;
- type = src.type;
- samples = src.samples;
- usage = src.usage;
- tiling = src.tiling;
- pNext = SafePnextCopy(src.pNext);
-
- return *this;
-}
-
-safe_VkPhysicalDeviceSparseImageFormatInfo2::~safe_VkPhysicalDeviceSparseImageFormatInfo2()
-{
- if (pNext)
- FreePnextChain(pNext);
-}
-
-void safe_VkPhysicalDeviceSparseImageFormatInfo2::initialize(const VkPhysicalDeviceSparseImageFormatInfo2* in_struct)
-{
- sType = in_struct->sType;
- format = in_struct->format;
- type = in_struct->type;
- samples = in_struct->samples;
- usage = in_struct->usage;
- tiling = in_struct->tiling;
- pNext = SafePnextCopy(in_struct->pNext);
-}
-
-void safe_VkPhysicalDeviceSparseImageFormatInfo2::initialize(const safe_VkPhysicalDeviceSparseImageFormatInfo2* src)
-{
- sType = src->sType;
- format = src->format;
- type = src->type;
- samples = src->samples;
- usage = src->usage;
- tiling = src->tiling;
- pNext = SafePnextCopy(src->pNext);
-}
-
-safe_VkPhysicalDevicePointClippingProperties::safe_VkPhysicalDevicePointClippingProperties(const VkPhysicalDevicePointClippingProperties* in_struct) :
- sType(in_struct->sType),
- pointClippingBehavior(in_struct->pointClippingBehavior)
-{
- pNext = SafePnextCopy(in_struct->pNext);
-}
-
-safe_VkPhysicalDevicePointClippingProperties::safe_VkPhysicalDevicePointClippingProperties() :
- pNext(nullptr)
-{}
-
-safe_VkPhysicalDevicePointClippingProperties::safe_VkPhysicalDevicePointClippingProperties(const safe_VkPhysicalDevicePointClippingProperties& src)
-{
- sType = src.sType;
- pointClippingBehavior = src.pointClippingBehavior;
- pNext = SafePnextCopy(src.pNext);
-}
-
-safe_VkPhysicalDevicePointClippingProperties& safe_VkPhysicalDevicePointClippingProperties::operator=(const safe_VkPhysicalDevicePointClippingProperties& src)
-{
- if (&src == this) return *this;
-
- if (pNext)
- FreePnextChain(pNext);
-
- sType = src.sType;
- pointClippingBehavior = src.pointClippingBehavior;
- pNext = SafePnextCopy(src.pNext);
-
- return *this;
-}
-
-safe_VkPhysicalDevicePointClippingProperties::~safe_VkPhysicalDevicePointClippingProperties()
-{
- if (pNext)
- FreePnextChain(pNext);
-}
-
-void safe_VkPhysicalDevicePointClippingProperties::initialize(const VkPhysicalDevicePointClippingProperties* in_struct)
-{
- sType = in_struct->sType;
- pointClippingBehavior = in_struct->pointClippingBehavior;
- pNext = SafePnextCopy(in_struct->pNext);
-}
-
-void safe_VkPhysicalDevicePointClippingProperties::initialize(const safe_VkPhysicalDevicePointClippingProperties* src)
-{
- sType = src->sType;
- pointClippingBehavior = src->pointClippingBehavior;
- pNext = SafePnextCopy(src->pNext);
-}
-
-safe_VkRenderPassInputAttachmentAspectCreateInfo::safe_VkRenderPassInputAttachmentAspectCreateInfo(const VkRenderPassInputAttachmentAspectCreateInfo* in_struct) :
- sType(in_struct->sType),
- aspectReferenceCount(in_struct->aspectReferenceCount),
- pAspectReferences(nullptr)
-{
- pNext = SafePnextCopy(in_struct->pNext);
- if (in_struct->pAspectReferences) {
- pAspectReferences = new VkInputAttachmentAspectReference[in_struct->aspectReferenceCount];
- memcpy ((void *)pAspectReferences, (void *)in_struct->pAspectReferences, sizeof(VkInputAttachmentAspectReference)*in_struct->aspectReferenceCount);
- }
-}
-
-safe_VkRenderPassInputAttachmentAspectCreateInfo::safe_VkRenderPassInputAttachmentAspectCreateInfo() :
- pNext(nullptr),
- pAspectReferences(nullptr)
-{}
-
-safe_VkRenderPassInputAttachmentAspectCreateInfo::safe_VkRenderPassInputAttachmentAspectCreateInfo(const safe_VkRenderPassInputAttachmentAspectCreateInfo& src)
-{
- sType = src.sType;
- aspectReferenceCount = src.aspectReferenceCount;
- pAspectReferences = nullptr;
- pNext = SafePnextCopy(src.pNext);
- if (src.pAspectReferences) {
- pAspectReferences = new VkInputAttachmentAspectReference[src.aspectReferenceCount];
- memcpy ((void *)pAspectReferences, (void *)src.pAspectReferences, sizeof(VkInputAttachmentAspectReference)*src.aspectReferenceCount);
- }
-}
-
-safe_VkRenderPassInputAttachmentAspectCreateInfo& safe_VkRenderPassInputAttachmentAspectCreateInfo::operator=(const safe_VkRenderPassInputAttachmentAspectCreateInfo& src)
-{
- if (&src == this) return *this;
-
- if (pAspectReferences)
- delete[] pAspectReferences;
- if (pNext)
- FreePnextChain(pNext);
-
- sType = src.sType;
- aspectReferenceCount = src.aspectReferenceCount;
- pAspectReferences = nullptr;
- pNext = SafePnextCopy(src.pNext);
- if (src.pAspectReferences) {
- pAspectReferences = new VkInputAttachmentAspectReference[src.aspectReferenceCount];
- memcpy ((void *)pAspectReferences, (void *)src.pAspectReferences, sizeof(VkInputAttachmentAspectReference)*src.aspectReferenceCount);
- }
-
- return *this;
-}
-
-safe_VkRenderPassInputAttachmentAspectCreateInfo::~safe_VkRenderPassInputAttachmentAspectCreateInfo()
-{
- if (pAspectReferences)
- delete[] pAspectReferences;
- if (pNext)
- FreePnextChain(pNext);
-}
-
-void safe_VkRenderPassInputAttachmentAspectCreateInfo::initialize(const VkRenderPassInputAttachmentAspectCreateInfo* in_struct)
-{
- sType = in_struct->sType;
- aspectReferenceCount = in_struct->aspectReferenceCount;
- pAspectReferences = nullptr;
- pNext = SafePnextCopy(in_struct->pNext);
- if (in_struct->pAspectReferences) {
- pAspectReferences = new VkInputAttachmentAspectReference[in_struct->aspectReferenceCount];
- memcpy ((void *)pAspectReferences, (void *)in_struct->pAspectReferences, sizeof(VkInputAttachmentAspectReference)*in_struct->aspectReferenceCount);
- }
-}
-
-void safe_VkRenderPassInputAttachmentAspectCreateInfo::initialize(const safe_VkRenderPassInputAttachmentAspectCreateInfo* src)
-{
- sType = src->sType;
- aspectReferenceCount = src->aspectReferenceCount;
- pAspectReferences = nullptr;
- pNext = SafePnextCopy(src->pNext);
- if (src->pAspectReferences) {
- pAspectReferences = new VkInputAttachmentAspectReference[src->aspectReferenceCount];
- memcpy ((void *)pAspectReferences, (void *)src->pAspectReferences, sizeof(VkInputAttachmentAspectReference)*src->aspectReferenceCount);
- }
-}
-
-safe_VkImageViewUsageCreateInfo::safe_VkImageViewUsageCreateInfo(const VkImageViewUsageCreateInfo* in_struct) :
- sType(in_struct->sType),
- usage(in_struct->usage)
-{
- pNext = SafePnextCopy(in_struct->pNext);
-}
-
-safe_VkImageViewUsageCreateInfo::safe_VkImageViewUsageCreateInfo() :
- pNext(nullptr)
-{}
-
-safe_VkImageViewUsageCreateInfo::safe_VkImageViewUsageCreateInfo(const safe_VkImageViewUsageCreateInfo& src)
-{
- sType = src.sType;
- usage = src.usage;
- pNext = SafePnextCopy(src.pNext);
-}
-
-safe_VkImageViewUsageCreateInfo& safe_VkImageViewUsageCreateInfo::operator=(const safe_VkImageViewUsageCreateInfo& src)
-{
- if (&src == this) return *this;
-
- if (pNext)
- FreePnextChain(pNext);
-
- sType = src.sType;
- usage = src.usage;
- pNext = SafePnextCopy(src.pNext);
-
- return *this;
-}
-
-safe_VkImageViewUsageCreateInfo::~safe_VkImageViewUsageCreateInfo()
-{
- if (pNext)
- FreePnextChain(pNext);
-}
-
-void safe_VkImageViewUsageCreateInfo::initialize(const VkImageViewUsageCreateInfo* in_struct)
-{
- sType = in_struct->sType;
- usage = in_struct->usage;
- pNext = SafePnextCopy(in_struct->pNext);
-}
-
-void safe_VkImageViewUsageCreateInfo::initialize(const safe_VkImageViewUsageCreateInfo* src)
-{
- sType = src->sType;
- usage = src->usage;
- pNext = SafePnextCopy(src->pNext);
-}
-
-safe_VkPipelineTessellationDomainOriginStateCreateInfo::safe_VkPipelineTessellationDomainOriginStateCreateInfo(const VkPipelineTessellationDomainOriginStateCreateInfo* in_struct) :
- sType(in_struct->sType),
- domainOrigin(in_struct->domainOrigin)
-{
- pNext = SafePnextCopy(in_struct->pNext);
-}
-
-safe_VkPipelineTessellationDomainOriginStateCreateInfo::safe_VkPipelineTessellationDomainOriginStateCreateInfo() :
- pNext(nullptr)
-{}
-
-safe_VkPipelineTessellationDomainOriginStateCreateInfo::safe_VkPipelineTessellationDomainOriginStateCreateInfo(const safe_VkPipelineTessellationDomainOriginStateCreateInfo& src)
-{
- sType = src.sType;
- domainOrigin = src.domainOrigin;
- pNext = SafePnextCopy(src.pNext);
-}
-
-safe_VkPipelineTessellationDomainOriginStateCreateInfo& safe_VkPipelineTessellationDomainOriginStateCreateInfo::operator=(const safe_VkPipelineTessellationDomainOriginStateCreateInfo& src)
-{
- if (&src == this) return *this;
-
- if (pNext)
- FreePnextChain(pNext);
-
- sType = src.sType;
- domainOrigin = src.domainOrigin;
- pNext = SafePnextCopy(src.pNext);
-
- return *this;
-}
-
-safe_VkPipelineTessellationDomainOriginStateCreateInfo::~safe_VkPipelineTessellationDomainOriginStateCreateInfo()
-{
- if (pNext)
- FreePnextChain(pNext);
-}
-
-void safe_VkPipelineTessellationDomainOriginStateCreateInfo::initialize(const VkPipelineTessellationDomainOriginStateCreateInfo* in_struct)
-{
- sType = in_struct->sType;
- domainOrigin = in_struct->domainOrigin;
- pNext = SafePnextCopy(in_struct->pNext);
-}
-
-void safe_VkPipelineTessellationDomainOriginStateCreateInfo::initialize(const safe_VkPipelineTessellationDomainOriginStateCreateInfo* src)
-{
- sType = src->sType;
- domainOrigin = src->domainOrigin;
- pNext = SafePnextCopy(src->pNext);
-}
-
-safe_VkRenderPassMultiviewCreateInfo::safe_VkRenderPassMultiviewCreateInfo(const VkRenderPassMultiviewCreateInfo* in_struct) :
- sType(in_struct->sType),
- subpassCount(in_struct->subpassCount),
- pViewMasks(nullptr),
- dependencyCount(in_struct->dependencyCount),
- pViewOffsets(nullptr),
- correlationMaskCount(in_struct->correlationMaskCount),
- pCorrelationMasks(nullptr)
-{
- pNext = SafePnextCopy(in_struct->pNext);
- if (in_struct->pViewMasks) {
- pViewMasks = new uint32_t[in_struct->subpassCount];
- memcpy ((void *)pViewMasks, (void *)in_struct->pViewMasks, sizeof(uint32_t)*in_struct->subpassCount);
- }
- if (in_struct->pViewOffsets) {
- pViewOffsets = new int32_t[in_struct->dependencyCount];
- memcpy ((void *)pViewOffsets, (void *)in_struct->pViewOffsets, sizeof(int32_t)*in_struct->dependencyCount);
- }
- if (in_struct->pCorrelationMasks) {
- pCorrelationMasks = new uint32_t[in_struct->correlationMaskCount];
- memcpy ((void *)pCorrelationMasks, (void *)in_struct->pCorrelationMasks, sizeof(uint32_t)*in_struct->correlationMaskCount);
- }
-}
-
-safe_VkRenderPassMultiviewCreateInfo::safe_VkRenderPassMultiviewCreateInfo() :
- pNext(nullptr),
- pViewMasks(nullptr),
- pViewOffsets(nullptr),
- pCorrelationMasks(nullptr)
-{}
-
-safe_VkRenderPassMultiviewCreateInfo::safe_VkRenderPassMultiviewCreateInfo(const safe_VkRenderPassMultiviewCreateInfo& src)
-{
- sType = src.sType;
- subpassCount = src.subpassCount;
- pViewMasks = nullptr;
- dependencyCount = src.dependencyCount;
- pViewOffsets = nullptr;
- correlationMaskCount = src.correlationMaskCount;
- pCorrelationMasks = nullptr;
- pNext = SafePnextCopy(src.pNext);
- if (src.pViewMasks) {
- pViewMasks = new uint32_t[src.subpassCount];
- memcpy ((void *)pViewMasks, (void *)src.pViewMasks, sizeof(uint32_t)*src.subpassCount);
- }
- if (src.pViewOffsets) {
- pViewOffsets = new int32_t[src.dependencyCount];
- memcpy ((void *)pViewOffsets, (void *)src.pViewOffsets, sizeof(int32_t)*src.dependencyCount);
- }
- if (src.pCorrelationMasks) {
- pCorrelationMasks = new uint32_t[src.correlationMaskCount];
- memcpy ((void *)pCorrelationMasks, (void *)src.pCorrelationMasks, sizeof(uint32_t)*src.correlationMaskCount);
- }
-}
-
-safe_VkRenderPassMultiviewCreateInfo& safe_VkRenderPassMultiviewCreateInfo::operator=(const safe_VkRenderPassMultiviewCreateInfo& src)
-{
- if (&src == this) return *this;
-
- if (pViewMasks)
- delete[] pViewMasks;
- if (pViewOffsets)
- delete[] pViewOffsets;
- if (pCorrelationMasks)
- delete[] pCorrelationMasks;
- if (pNext)
- FreePnextChain(pNext);
-
- sType = src.sType;
- subpassCount = src.subpassCount;
- pViewMasks = nullptr;
- dependencyCount = src.dependencyCount;
- pViewOffsets = nullptr;
- correlationMaskCount = src.correlationMaskCount;
- pCorrelationMasks = nullptr;
- pNext = SafePnextCopy(src.pNext);
- if (src.pViewMasks) {
- pViewMasks = new uint32_t[src.subpassCount];
- memcpy ((void *)pViewMasks, (void *)src.pViewMasks, sizeof(uint32_t)*src.subpassCount);
- }
- if (src.pViewOffsets) {
- pViewOffsets = new int32_t[src.dependencyCount];
- memcpy ((void *)pViewOffsets, (void *)src.pViewOffsets, sizeof(int32_t)*src.dependencyCount);
- }
- if (src.pCorrelationMasks) {
- pCorrelationMasks = new uint32_t[src.correlationMaskCount];
- memcpy ((void *)pCorrelationMasks, (void *)src.pCorrelationMasks, sizeof(uint32_t)*src.correlationMaskCount);
- }
-
- return *this;
-}
-
-safe_VkRenderPassMultiviewCreateInfo::~safe_VkRenderPassMultiviewCreateInfo()
-{
- if (pViewMasks)
- delete[] pViewMasks;
- if (pViewOffsets)
- delete[] pViewOffsets;
- if (pCorrelationMasks)
- delete[] pCorrelationMasks;
- if (pNext)
- FreePnextChain(pNext);
-}
-
-void safe_VkRenderPassMultiviewCreateInfo::initialize(const VkRenderPassMultiviewCreateInfo* in_struct)
-{
- sType = in_struct->sType;
- subpassCount = in_struct->subpassCount;
- pViewMasks = nullptr;
- dependencyCount = in_struct->dependencyCount;
- pViewOffsets = nullptr;
- correlationMaskCount = in_struct->correlationMaskCount;
- pCorrelationMasks = nullptr;
- pNext = SafePnextCopy(in_struct->pNext);
- if (in_struct->pViewMasks) {
- pViewMasks = new uint32_t[in_struct->subpassCount];
- memcpy ((void *)pViewMasks, (void *)in_struct->pViewMasks, sizeof(uint32_t)*in_struct->subpassCount);
- }
- if (in_struct->pViewOffsets) {
- pViewOffsets = new int32_t[in_struct->dependencyCount];
- memcpy ((void *)pViewOffsets, (void *)in_struct->pViewOffsets, sizeof(int32_t)*in_struct->dependencyCount);
- }
- if (in_struct->pCorrelationMasks) {
- pCorrelationMasks = new uint32_t[in_struct->correlationMaskCount];
- memcpy ((void *)pCorrelationMasks, (void *)in_struct->pCorrelationMasks, sizeof(uint32_t)*in_struct->correlationMaskCount);
- }
-}
-
-void safe_VkRenderPassMultiviewCreateInfo::initialize(const safe_VkRenderPassMultiviewCreateInfo* src)
-{
- sType = src->sType;
- subpassCount = src->subpassCount;
- pViewMasks = nullptr;
- dependencyCount = src->dependencyCount;
- pViewOffsets = nullptr;
- correlationMaskCount = src->correlationMaskCount;
- pCorrelationMasks = nullptr;
- pNext = SafePnextCopy(src->pNext);
- if (src->pViewMasks) {
- pViewMasks = new uint32_t[src->subpassCount];
- memcpy ((void *)pViewMasks, (void *)src->pViewMasks, sizeof(uint32_t)*src->subpassCount);
- }
- if (src->pViewOffsets) {
- pViewOffsets = new int32_t[src->dependencyCount];
- memcpy ((void *)pViewOffsets, (void *)src->pViewOffsets, sizeof(int32_t)*src->dependencyCount);
- }
- if (src->pCorrelationMasks) {
- pCorrelationMasks = new uint32_t[src->correlationMaskCount];
- memcpy ((void *)pCorrelationMasks, (void *)src->pCorrelationMasks, sizeof(uint32_t)*src->correlationMaskCount);
- }
-}
-
-safe_VkPhysicalDeviceMultiviewFeatures::safe_VkPhysicalDeviceMultiviewFeatures(const VkPhysicalDeviceMultiviewFeatures* in_struct) :
- sType(in_struct->sType),
- multiview(in_struct->multiview),
- multiviewGeometryShader(in_struct->multiviewGeometryShader),
- multiviewTessellationShader(in_struct->multiviewTessellationShader)
-{
- pNext = SafePnextCopy(in_struct->pNext);
-}
-
-safe_VkPhysicalDeviceMultiviewFeatures::safe_VkPhysicalDeviceMultiviewFeatures() :
- pNext(nullptr)
-{}
-
-safe_VkPhysicalDeviceMultiviewFeatures::safe_VkPhysicalDeviceMultiviewFeatures(const safe_VkPhysicalDeviceMultiviewFeatures& src)
-{
- sType = src.sType;
- multiview = src.multiview;
- multiviewGeometryShader = src.multiviewGeometryShader;
- multiviewTessellationShader = src.multiviewTessellationShader;
- pNext = SafePnextCopy(src.pNext);
-}
-
-safe_VkPhysicalDeviceMultiviewFeatures& safe_VkPhysicalDeviceMultiviewFeatures::operator=(const safe_VkPhysicalDeviceMultiviewFeatures& src)
-{
- if (&src == this) return *this;
-
- if (pNext)
- FreePnextChain(pNext);
-
- sType = src.sType;
- multiview = src.multiview;
- multiviewGeometryShader = src.multiviewGeometryShader;
- multiviewTessellationShader = src.multiviewTessellationShader;
- pNext = SafePnextCopy(src.pNext);
-
- return *this;
-}
-
-safe_VkPhysicalDeviceMultiviewFeatures::~safe_VkPhysicalDeviceMultiviewFeatures()
-{
- if (pNext)
- FreePnextChain(pNext);
-}
-
-void safe_VkPhysicalDeviceMultiviewFeatures::initialize(const VkPhysicalDeviceMultiviewFeatures* in_struct)
-{
- sType = in_struct->sType;
- multiview = in_struct->multiview;
- multiviewGeometryShader = in_struct->multiviewGeometryShader;
- multiviewTessellationShader = in_struct->multiviewTessellationShader;
- pNext = SafePnextCopy(in_struct->pNext);
-}
-
-void safe_VkPhysicalDeviceMultiviewFeatures::initialize(const safe_VkPhysicalDeviceMultiviewFeatures* src)
-{
- sType = src->sType;
- multiview = src->multiview;
- multiviewGeometryShader = src->multiviewGeometryShader;
- multiviewTessellationShader = src->multiviewTessellationShader;
- pNext = SafePnextCopy(src->pNext);
-}
-
-safe_VkPhysicalDeviceMultiviewProperties::safe_VkPhysicalDeviceMultiviewProperties(const VkPhysicalDeviceMultiviewProperties* in_struct) :
- sType(in_struct->sType),
- maxMultiviewViewCount(in_struct->maxMultiviewViewCount),
- maxMultiviewInstanceIndex(in_struct->maxMultiviewInstanceIndex)
-{
- pNext = SafePnextCopy(in_struct->pNext);
-}
-
-safe_VkPhysicalDeviceMultiviewProperties::safe_VkPhysicalDeviceMultiviewProperties() :
- pNext(nullptr)
-{}
-
-safe_VkPhysicalDeviceMultiviewProperties::safe_VkPhysicalDeviceMultiviewProperties(const safe_VkPhysicalDeviceMultiviewProperties& src)
-{
- sType = src.sType;
- maxMultiviewViewCount = src.maxMultiviewViewCount;
- maxMultiviewInstanceIndex = src.maxMultiviewInstanceIndex;
- pNext = SafePnextCopy(src.pNext);
-}
-
-safe_VkPhysicalDeviceMultiviewProperties& safe_VkPhysicalDeviceMultiviewProperties::operator=(const safe_VkPhysicalDeviceMultiviewProperties& src)
-{
- if (&src == this) return *this;
-
- if (pNext)
- FreePnextChain(pNext);
-
- sType = src.sType;
- maxMultiviewViewCount = src.maxMultiviewViewCount;
- maxMultiviewInstanceIndex = src.maxMultiviewInstanceIndex;
- pNext = SafePnextCopy(src.pNext);
-
- return *this;
-}
-
-safe_VkPhysicalDeviceMultiviewProperties::~safe_VkPhysicalDeviceMultiviewProperties()
-{
- if (pNext)
- FreePnextChain(pNext);
-}
-
-void safe_VkPhysicalDeviceMultiviewProperties::initialize(const VkPhysicalDeviceMultiviewProperties* in_struct)
-{
- sType = in_struct->sType;
- maxMultiviewViewCount = in_struct->maxMultiviewViewCount;
- maxMultiviewInstanceIndex = in_struct->maxMultiviewInstanceIndex;
- pNext = SafePnextCopy(in_struct->pNext);
-}
-
-void safe_VkPhysicalDeviceMultiviewProperties::initialize(const safe_VkPhysicalDeviceMultiviewProperties* src)
-{
- sType = src->sType;
- maxMultiviewViewCount = src->maxMultiviewViewCount;
- maxMultiviewInstanceIndex = src->maxMultiviewInstanceIndex;
- pNext = SafePnextCopy(src->pNext);
-}
-
-safe_VkPhysicalDeviceVariablePointersFeatures::safe_VkPhysicalDeviceVariablePointersFeatures(const VkPhysicalDeviceVariablePointersFeatures* in_struct) :
- sType(in_struct->sType),
- variablePointersStorageBuffer(in_struct->variablePointersStorageBuffer),
- variablePointers(in_struct->variablePointers)
-{
- pNext = SafePnextCopy(in_struct->pNext);
-}
-
-safe_VkPhysicalDeviceVariablePointersFeatures::safe_VkPhysicalDeviceVariablePointersFeatures() :
- pNext(nullptr)
-{}
-
-safe_VkPhysicalDeviceVariablePointersFeatures::safe_VkPhysicalDeviceVariablePointersFeatures(const safe_VkPhysicalDeviceVariablePointersFeatures& src)
-{
- sType = src.sType;
- variablePointersStorageBuffer = src.variablePointersStorageBuffer;
- variablePointers = src.variablePointers;
- pNext = SafePnextCopy(src.pNext);
-}
-
-safe_VkPhysicalDeviceVariablePointersFeatures& safe_VkPhysicalDeviceVariablePointersFeatures::operator=(const safe_VkPhysicalDeviceVariablePointersFeatures& src)
-{
- if (&src == this) return *this;
-
- if (pNext)
- FreePnextChain(pNext);
-
- sType = src.sType;
- variablePointersStorageBuffer = src.variablePointersStorageBuffer;
- variablePointers = src.variablePointers;
- pNext = SafePnextCopy(src.pNext);
-
- return *this;
-}
-
-safe_VkPhysicalDeviceVariablePointersFeatures::~safe_VkPhysicalDeviceVariablePointersFeatures()
-{
- if (pNext)
- FreePnextChain(pNext);
-}
-
-void safe_VkPhysicalDeviceVariablePointersFeatures::initialize(const VkPhysicalDeviceVariablePointersFeatures* in_struct)
-{
- sType = in_struct->sType;
- variablePointersStorageBuffer = in_struct->variablePointersStorageBuffer;
- variablePointers = in_struct->variablePointers;
- pNext = SafePnextCopy(in_struct->pNext);
-}
-
-void safe_VkPhysicalDeviceVariablePointersFeatures::initialize(const safe_VkPhysicalDeviceVariablePointersFeatures* src)
-{
- sType = src->sType;
- variablePointersStorageBuffer = src->variablePointersStorageBuffer;
- variablePointers = src->variablePointers;
- pNext = SafePnextCopy(src->pNext);
-}
-
-safe_VkPhysicalDeviceProtectedMemoryFeatures::safe_VkPhysicalDeviceProtectedMemoryFeatures(const VkPhysicalDeviceProtectedMemoryFeatures* in_struct) :
- sType(in_struct->sType),
- protectedMemory(in_struct->protectedMemory)
-{
- pNext = SafePnextCopy(in_struct->pNext);
-}
-
-safe_VkPhysicalDeviceProtectedMemoryFeatures::safe_VkPhysicalDeviceProtectedMemoryFeatures() :
- pNext(nullptr)
-{}
-
-safe_VkPhysicalDeviceProtectedMemoryFeatures::safe_VkPhysicalDeviceProtectedMemoryFeatures(const safe_VkPhysicalDeviceProtectedMemoryFeatures& src)
-{
- sType = src.sType;
- protectedMemory = src.protectedMemory;
- pNext = SafePnextCopy(src.pNext);
-}
-
-safe_VkPhysicalDeviceProtectedMemoryFeatures& safe_VkPhysicalDeviceProtectedMemoryFeatures::operator=(const safe_VkPhysicalDeviceProtectedMemoryFeatures& src)
-{
- if (&src == this) return *this;
-
- if (pNext)
- FreePnextChain(pNext);
-
- sType = src.sType;
- protectedMemory = src.protectedMemory;
- pNext = SafePnextCopy(src.pNext);
-
- return *this;
-}
-
-safe_VkPhysicalDeviceProtectedMemoryFeatures::~safe_VkPhysicalDeviceProtectedMemoryFeatures()
-{
- if (pNext)
- FreePnextChain(pNext);
-}
-
-void safe_VkPhysicalDeviceProtectedMemoryFeatures::initialize(const VkPhysicalDeviceProtectedMemoryFeatures* in_struct)
-{
- sType = in_struct->sType;
- protectedMemory = in_struct->protectedMemory;
- pNext = SafePnextCopy(in_struct->pNext);
-}
-
-void safe_VkPhysicalDeviceProtectedMemoryFeatures::initialize(const safe_VkPhysicalDeviceProtectedMemoryFeatures* src)
-{
- sType = src->sType;
- protectedMemory = src->protectedMemory;
- pNext = SafePnextCopy(src->pNext);
-}
-
-safe_VkPhysicalDeviceProtectedMemoryProperties::safe_VkPhysicalDeviceProtectedMemoryProperties(const VkPhysicalDeviceProtectedMemoryProperties* in_struct) :
- sType(in_struct->sType),
- protectedNoFault(in_struct->protectedNoFault)
-{
- pNext = SafePnextCopy(in_struct->pNext);
-}
-
-safe_VkPhysicalDeviceProtectedMemoryProperties::safe_VkPhysicalDeviceProtectedMemoryProperties() :
- pNext(nullptr)
-{}
-
-safe_VkPhysicalDeviceProtectedMemoryProperties::safe_VkPhysicalDeviceProtectedMemoryProperties(const safe_VkPhysicalDeviceProtectedMemoryProperties& src)
-{
- sType = src.sType;
- protectedNoFault = src.protectedNoFault;
- pNext = SafePnextCopy(src.pNext);
-}
-
-safe_VkPhysicalDeviceProtectedMemoryProperties& safe_VkPhysicalDeviceProtectedMemoryProperties::operator=(const safe_VkPhysicalDeviceProtectedMemoryProperties& src)
-{
- if (&src == this) return *this;
-
- if (pNext)
- FreePnextChain(pNext);
-
- sType = src.sType;
- protectedNoFault = src.protectedNoFault;
- pNext = SafePnextCopy(src.pNext);
-
- return *this;
-}
-
-safe_VkPhysicalDeviceProtectedMemoryProperties::~safe_VkPhysicalDeviceProtectedMemoryProperties()
-{
- if (pNext)
- FreePnextChain(pNext);
-}
-
-void safe_VkPhysicalDeviceProtectedMemoryProperties::initialize(const VkPhysicalDeviceProtectedMemoryProperties* in_struct)
-{
- sType = in_struct->sType;
- protectedNoFault = in_struct->protectedNoFault;
- pNext = SafePnextCopy(in_struct->pNext);
-}
-
-void safe_VkPhysicalDeviceProtectedMemoryProperties::initialize(const safe_VkPhysicalDeviceProtectedMemoryProperties* src)
-{
- sType = src->sType;
- protectedNoFault = src->protectedNoFault;
- pNext = SafePnextCopy(src->pNext);
-}
-
-safe_VkDeviceQueueInfo2::safe_VkDeviceQueueInfo2(const VkDeviceQueueInfo2* in_struct) :
- sType(in_struct->sType),
- flags(in_struct->flags),
- queueFamilyIndex(in_struct->queueFamilyIndex),
- queueIndex(in_struct->queueIndex)
-{
- pNext = SafePnextCopy(in_struct->pNext);
-}
-
-safe_VkDeviceQueueInfo2::safe_VkDeviceQueueInfo2() :
- pNext(nullptr)
-{}
-
-safe_VkDeviceQueueInfo2::safe_VkDeviceQueueInfo2(const safe_VkDeviceQueueInfo2& src)
-{
- sType = src.sType;
- flags = src.flags;
- queueFamilyIndex = src.queueFamilyIndex;
- queueIndex = src.queueIndex;
- pNext = SafePnextCopy(src.pNext);
-}
-
-safe_VkDeviceQueueInfo2& safe_VkDeviceQueueInfo2::operator=(const safe_VkDeviceQueueInfo2& src)
-{
- if (&src == this) return *this;
-
- if (pNext)
- FreePnextChain(pNext);
-
- sType = src.sType;
- flags = src.flags;
- queueFamilyIndex = src.queueFamilyIndex;
- queueIndex = src.queueIndex;
- pNext = SafePnextCopy(src.pNext);
-
- return *this;
-}
-
-safe_VkDeviceQueueInfo2::~safe_VkDeviceQueueInfo2()
-{
- if (pNext)
- FreePnextChain(pNext);
-}
-
-void safe_VkDeviceQueueInfo2::initialize(const VkDeviceQueueInfo2* in_struct)
-{
- sType = in_struct->sType;
- flags = in_struct->flags;
- queueFamilyIndex = in_struct->queueFamilyIndex;
- queueIndex = in_struct->queueIndex;
- pNext = SafePnextCopy(in_struct->pNext);
-}
-
-void safe_VkDeviceQueueInfo2::initialize(const safe_VkDeviceQueueInfo2* src)
-{
- sType = src->sType;
- flags = src->flags;
- queueFamilyIndex = src->queueFamilyIndex;
- queueIndex = src->queueIndex;
- pNext = SafePnextCopy(src->pNext);
-}
-
-safe_VkProtectedSubmitInfo::safe_VkProtectedSubmitInfo(const VkProtectedSubmitInfo* in_struct) :
- sType(in_struct->sType),
- protectedSubmit(in_struct->protectedSubmit)
-{
- pNext = SafePnextCopy(in_struct->pNext);
-}
-
-safe_VkProtectedSubmitInfo::safe_VkProtectedSubmitInfo() :
- pNext(nullptr)
-{}
-
-safe_VkProtectedSubmitInfo::safe_VkProtectedSubmitInfo(const safe_VkProtectedSubmitInfo& src)
-{
- sType = src.sType;
- protectedSubmit = src.protectedSubmit;
- pNext = SafePnextCopy(src.pNext);
-}
-
-safe_VkProtectedSubmitInfo& safe_VkProtectedSubmitInfo::operator=(const safe_VkProtectedSubmitInfo& src)
-{
- if (&src == this) return *this;
-
- if (pNext)
- FreePnextChain(pNext);
-
- sType = src.sType;
- protectedSubmit = src.protectedSubmit;
- pNext = SafePnextCopy(src.pNext);
-
- return *this;
-}
-
-safe_VkProtectedSubmitInfo::~safe_VkProtectedSubmitInfo()
-{
- if (pNext)
- FreePnextChain(pNext);
-}
-
-void safe_VkProtectedSubmitInfo::initialize(const VkProtectedSubmitInfo* in_struct)
-{
- sType = in_struct->sType;
- protectedSubmit = in_struct->protectedSubmit;
- pNext = SafePnextCopy(in_struct->pNext);
-}
-
-void safe_VkProtectedSubmitInfo::initialize(const safe_VkProtectedSubmitInfo* src)
-{
- sType = src->sType;
- protectedSubmit = src->protectedSubmit;
- pNext = SafePnextCopy(src->pNext);
-}
-
-safe_VkSamplerYcbcrConversionCreateInfo::safe_VkSamplerYcbcrConversionCreateInfo(const VkSamplerYcbcrConversionCreateInfo* in_struct) :
- sType(in_struct->sType),
- format(in_struct->format),
- ycbcrModel(in_struct->ycbcrModel),
- ycbcrRange(in_struct->ycbcrRange),
- components(in_struct->components),
- xChromaOffset(in_struct->xChromaOffset),
- yChromaOffset(in_struct->yChromaOffset),
- chromaFilter(in_struct->chromaFilter),
- forceExplicitReconstruction(in_struct->forceExplicitReconstruction)
-{
- pNext = SafePnextCopy(in_struct->pNext);
-}
-
-safe_VkSamplerYcbcrConversionCreateInfo::safe_VkSamplerYcbcrConversionCreateInfo() :
- pNext(nullptr)
-{}
-
-safe_VkSamplerYcbcrConversionCreateInfo::safe_VkSamplerYcbcrConversionCreateInfo(const safe_VkSamplerYcbcrConversionCreateInfo& src)
-{
- sType = src.sType;
- format = src.format;
- ycbcrModel = src.ycbcrModel;
- ycbcrRange = src.ycbcrRange;
- components = src.components;
- xChromaOffset = src.xChromaOffset;
- yChromaOffset = src.yChromaOffset;
- chromaFilter = src.chromaFilter;
- forceExplicitReconstruction = src.forceExplicitReconstruction;
- pNext = SafePnextCopy(src.pNext);
-}
-
-safe_VkSamplerYcbcrConversionCreateInfo& safe_VkSamplerYcbcrConversionCreateInfo::operator=(const safe_VkSamplerYcbcrConversionCreateInfo& src)
-{
- if (&src == this) return *this;
-
- if (pNext)
- FreePnextChain(pNext);
-
- sType = src.sType;
- format = src.format;
- ycbcrModel = src.ycbcrModel;
- ycbcrRange = src.ycbcrRange;
- components = src.components;
- xChromaOffset = src.xChromaOffset;
- yChromaOffset = src.yChromaOffset;
- chromaFilter = src.chromaFilter;
- forceExplicitReconstruction = src.forceExplicitReconstruction;
- pNext = SafePnextCopy(src.pNext);
-
- return *this;
-}
-
-safe_VkSamplerYcbcrConversionCreateInfo::~safe_VkSamplerYcbcrConversionCreateInfo()
-{
- if (pNext)
- FreePnextChain(pNext);
-}
-
-void safe_VkSamplerYcbcrConversionCreateInfo::initialize(const VkSamplerYcbcrConversionCreateInfo* in_struct)
-{
- sType = in_struct->sType;
- format = in_struct->format;
- ycbcrModel = in_struct->ycbcrModel;
- ycbcrRange = in_struct->ycbcrRange;
- components = in_struct->components;
- xChromaOffset = in_struct->xChromaOffset;
- yChromaOffset = in_struct->yChromaOffset;
- chromaFilter = in_struct->chromaFilter;
- forceExplicitReconstruction = in_struct->forceExplicitReconstruction;
- pNext = SafePnextCopy(in_struct->pNext);
-}
-
-void safe_VkSamplerYcbcrConversionCreateInfo::initialize(const safe_VkSamplerYcbcrConversionCreateInfo* src)
-{
- sType = src->sType;
- format = src->format;
- ycbcrModel = src->ycbcrModel;
- ycbcrRange = src->ycbcrRange;
- components = src->components;
- xChromaOffset = src->xChromaOffset;
- yChromaOffset = src->yChromaOffset;
- chromaFilter = src->chromaFilter;
- forceExplicitReconstruction = src->forceExplicitReconstruction;
- pNext = SafePnextCopy(src->pNext);
-}
-
-safe_VkSamplerYcbcrConversionInfo::safe_VkSamplerYcbcrConversionInfo(const VkSamplerYcbcrConversionInfo* in_struct) :
- sType(in_struct->sType),
- conversion(in_struct->conversion)
-{
- pNext = SafePnextCopy(in_struct->pNext);
-}
-
-safe_VkSamplerYcbcrConversionInfo::safe_VkSamplerYcbcrConversionInfo() :
- pNext(nullptr)
-{}
-
-safe_VkSamplerYcbcrConversionInfo::safe_VkSamplerYcbcrConversionInfo(const safe_VkSamplerYcbcrConversionInfo& src)
-{
- sType = src.sType;
- conversion = src.conversion;
- pNext = SafePnextCopy(src.pNext);
-}
-
-safe_VkSamplerYcbcrConversionInfo& safe_VkSamplerYcbcrConversionInfo::operator=(const safe_VkSamplerYcbcrConversionInfo& src)
-{
- if (&src == this) return *this;
-
- if (pNext)
- FreePnextChain(pNext);
-
- sType = src.sType;
- conversion = src.conversion;
- pNext = SafePnextCopy(src.pNext);
-
- return *this;
-}
-
-safe_VkSamplerYcbcrConversionInfo::~safe_VkSamplerYcbcrConversionInfo()
-{
- if (pNext)
- FreePnextChain(pNext);
-}
-
-void safe_VkSamplerYcbcrConversionInfo::initialize(const VkSamplerYcbcrConversionInfo* in_struct)
-{
- sType = in_struct->sType;
- conversion = in_struct->conversion;
- pNext = SafePnextCopy(in_struct->pNext);
-}
-
-void safe_VkSamplerYcbcrConversionInfo::initialize(const safe_VkSamplerYcbcrConversionInfo* src)
-{
- sType = src->sType;
- conversion = src->conversion;
- pNext = SafePnextCopy(src->pNext);
-}
-
-safe_VkBindImagePlaneMemoryInfo::safe_VkBindImagePlaneMemoryInfo(const VkBindImagePlaneMemoryInfo* in_struct) :
- sType(in_struct->sType),
- planeAspect(in_struct->planeAspect)
-{
- pNext = SafePnextCopy(in_struct->pNext);
-}
-
-safe_VkBindImagePlaneMemoryInfo::safe_VkBindImagePlaneMemoryInfo() :
- pNext(nullptr)
-{}
-
-safe_VkBindImagePlaneMemoryInfo::safe_VkBindImagePlaneMemoryInfo(const safe_VkBindImagePlaneMemoryInfo& src)
-{
- sType = src.sType;
- planeAspect = src.planeAspect;
- pNext = SafePnextCopy(src.pNext);
-}
-
-safe_VkBindImagePlaneMemoryInfo& safe_VkBindImagePlaneMemoryInfo::operator=(const safe_VkBindImagePlaneMemoryInfo& src)
-{
- if (&src == this) return *this;
-
- if (pNext)
- FreePnextChain(pNext);
-
- sType = src.sType;
- planeAspect = src.planeAspect;
- pNext = SafePnextCopy(src.pNext);
-
- return *this;
-}
-
-safe_VkBindImagePlaneMemoryInfo::~safe_VkBindImagePlaneMemoryInfo()
-{
- if (pNext)
- FreePnextChain(pNext);
-}
-
-void safe_VkBindImagePlaneMemoryInfo::initialize(const VkBindImagePlaneMemoryInfo* in_struct)
-{
- sType = in_struct->sType;
- planeAspect = in_struct->planeAspect;
- pNext = SafePnextCopy(in_struct->pNext);
-}
-
-void safe_VkBindImagePlaneMemoryInfo::initialize(const safe_VkBindImagePlaneMemoryInfo* src)
-{
- sType = src->sType;
- planeAspect = src->planeAspect;
- pNext = SafePnextCopy(src->pNext);
-}
-
-safe_VkImagePlaneMemoryRequirementsInfo::safe_VkImagePlaneMemoryRequirementsInfo(const VkImagePlaneMemoryRequirementsInfo* in_struct) :
- sType(in_struct->sType),
- planeAspect(in_struct->planeAspect)
-{
- pNext = SafePnextCopy(in_struct->pNext);
-}
-
-safe_VkImagePlaneMemoryRequirementsInfo::safe_VkImagePlaneMemoryRequirementsInfo() :
- pNext(nullptr)
-{}
-
-safe_VkImagePlaneMemoryRequirementsInfo::safe_VkImagePlaneMemoryRequirementsInfo(const safe_VkImagePlaneMemoryRequirementsInfo& src)
-{
- sType = src.sType;
- planeAspect = src.planeAspect;
- pNext = SafePnextCopy(src.pNext);
-}
-
-safe_VkImagePlaneMemoryRequirementsInfo& safe_VkImagePlaneMemoryRequirementsInfo::operator=(const safe_VkImagePlaneMemoryRequirementsInfo& src)
-{
- if (&src == this) return *this;
-
- if (pNext)
- FreePnextChain(pNext);
-
- sType = src.sType;
- planeAspect = src.planeAspect;
- pNext = SafePnextCopy(src.pNext);
-
- return *this;
-}
-
-safe_VkImagePlaneMemoryRequirementsInfo::~safe_VkImagePlaneMemoryRequirementsInfo()
-{
- if (pNext)
- FreePnextChain(pNext);
-}
-
-void safe_VkImagePlaneMemoryRequirementsInfo::initialize(const VkImagePlaneMemoryRequirementsInfo* in_struct)
-{
- sType = in_struct->sType;
- planeAspect = in_struct->planeAspect;
- pNext = SafePnextCopy(in_struct->pNext);
-}
-
-void safe_VkImagePlaneMemoryRequirementsInfo::initialize(const safe_VkImagePlaneMemoryRequirementsInfo* src)
-{
- sType = src->sType;
- planeAspect = src->planeAspect;
- pNext = SafePnextCopy(src->pNext);
-}
-
-safe_VkPhysicalDeviceSamplerYcbcrConversionFeatures::safe_VkPhysicalDeviceSamplerYcbcrConversionFeatures(const VkPhysicalDeviceSamplerYcbcrConversionFeatures* in_struct) :
- sType(in_struct->sType),
- samplerYcbcrConversion(in_struct->samplerYcbcrConversion)
-{
- pNext = SafePnextCopy(in_struct->pNext);
-}
-
-safe_VkPhysicalDeviceSamplerYcbcrConversionFeatures::safe_VkPhysicalDeviceSamplerYcbcrConversionFeatures() :
- pNext(nullptr)
-{}
-
-safe_VkPhysicalDeviceSamplerYcbcrConversionFeatures::safe_VkPhysicalDeviceSamplerYcbcrConversionFeatures(const safe_VkPhysicalDeviceSamplerYcbcrConversionFeatures& src)
-{
- sType = src.sType;
- samplerYcbcrConversion = src.samplerYcbcrConversion;
- pNext = SafePnextCopy(src.pNext);
-}
-
-safe_VkPhysicalDeviceSamplerYcbcrConversionFeatures& safe_VkPhysicalDeviceSamplerYcbcrConversionFeatures::operator=(const safe_VkPhysicalDeviceSamplerYcbcrConversionFeatures& src)
-{
- if (&src == this) return *this;
-
- if (pNext)
- FreePnextChain(pNext);
-
- sType = src.sType;
- samplerYcbcrConversion = src.samplerYcbcrConversion;
- pNext = SafePnextCopy(src.pNext);
-
- return *this;
-}
-
-safe_VkPhysicalDeviceSamplerYcbcrConversionFeatures::~safe_VkPhysicalDeviceSamplerYcbcrConversionFeatures()
-{
- if (pNext)
- FreePnextChain(pNext);
-}
-
-void safe_VkPhysicalDeviceSamplerYcbcrConversionFeatures::initialize(const VkPhysicalDeviceSamplerYcbcrConversionFeatures* in_struct)
-{
- sType = in_struct->sType;
- samplerYcbcrConversion = in_struct->samplerYcbcrConversion;
- pNext = SafePnextCopy(in_struct->pNext);
-}
-
-void safe_VkPhysicalDeviceSamplerYcbcrConversionFeatures::initialize(const safe_VkPhysicalDeviceSamplerYcbcrConversionFeatures* src)
-{
- sType = src->sType;
- samplerYcbcrConversion = src->samplerYcbcrConversion;
- pNext = SafePnextCopy(src->pNext);
-}
-
-safe_VkSamplerYcbcrConversionImageFormatProperties::safe_VkSamplerYcbcrConversionImageFormatProperties(const VkSamplerYcbcrConversionImageFormatProperties* in_struct) :
- sType(in_struct->sType),
- combinedImageSamplerDescriptorCount(in_struct->combinedImageSamplerDescriptorCount)
-{
- pNext = SafePnextCopy(in_struct->pNext);
-}
-
-safe_VkSamplerYcbcrConversionImageFormatProperties::safe_VkSamplerYcbcrConversionImageFormatProperties() :
- pNext(nullptr)
-{}
-
-safe_VkSamplerYcbcrConversionImageFormatProperties::safe_VkSamplerYcbcrConversionImageFormatProperties(const safe_VkSamplerYcbcrConversionImageFormatProperties& src)
-{
- sType = src.sType;
- combinedImageSamplerDescriptorCount = src.combinedImageSamplerDescriptorCount;
- pNext = SafePnextCopy(src.pNext);
-}
-
-safe_VkSamplerYcbcrConversionImageFormatProperties& safe_VkSamplerYcbcrConversionImageFormatProperties::operator=(const safe_VkSamplerYcbcrConversionImageFormatProperties& src)
-{
- if (&src == this) return *this;
-
- if (pNext)
- FreePnextChain(pNext);
-
- sType = src.sType;
- combinedImageSamplerDescriptorCount = src.combinedImageSamplerDescriptorCount;
- pNext = SafePnextCopy(src.pNext);
-
- return *this;
-}
-
-safe_VkSamplerYcbcrConversionImageFormatProperties::~safe_VkSamplerYcbcrConversionImageFormatProperties()
-{
- if (pNext)
- FreePnextChain(pNext);
-}
-
-void safe_VkSamplerYcbcrConversionImageFormatProperties::initialize(const VkSamplerYcbcrConversionImageFormatProperties* in_struct)
-{
- sType = in_struct->sType;
- combinedImageSamplerDescriptorCount = in_struct->combinedImageSamplerDescriptorCount;
- pNext = SafePnextCopy(in_struct->pNext);
-}
-
-void safe_VkSamplerYcbcrConversionImageFormatProperties::initialize(const safe_VkSamplerYcbcrConversionImageFormatProperties* src)
-{
- sType = src->sType;
- combinedImageSamplerDescriptorCount = src->combinedImageSamplerDescriptorCount;
- pNext = SafePnextCopy(src->pNext);
-}
-
-safe_VkDescriptorUpdateTemplateCreateInfo::safe_VkDescriptorUpdateTemplateCreateInfo(const VkDescriptorUpdateTemplateCreateInfo* in_struct) :
- sType(in_struct->sType),
- flags(in_struct->flags),
- descriptorUpdateEntryCount(in_struct->descriptorUpdateEntryCount),
- pDescriptorUpdateEntries(nullptr),
- templateType(in_struct->templateType),
- descriptorSetLayout(in_struct->descriptorSetLayout),
- pipelineBindPoint(in_struct->pipelineBindPoint),
- pipelineLayout(in_struct->pipelineLayout),
- set(in_struct->set)
-{
- pNext = SafePnextCopy(in_struct->pNext);
- if (in_struct->pDescriptorUpdateEntries) {
- pDescriptorUpdateEntries = new VkDescriptorUpdateTemplateEntry[in_struct->descriptorUpdateEntryCount];
- memcpy ((void *)pDescriptorUpdateEntries, (void *)in_struct->pDescriptorUpdateEntries, sizeof(VkDescriptorUpdateTemplateEntry)*in_struct->descriptorUpdateEntryCount);
- }
-}
-
-safe_VkDescriptorUpdateTemplateCreateInfo::safe_VkDescriptorUpdateTemplateCreateInfo() :
- pNext(nullptr),
- pDescriptorUpdateEntries(nullptr)
-{}
-
-safe_VkDescriptorUpdateTemplateCreateInfo::safe_VkDescriptorUpdateTemplateCreateInfo(const safe_VkDescriptorUpdateTemplateCreateInfo& src)
-{
- sType = src.sType;
- flags = src.flags;
- descriptorUpdateEntryCount = src.descriptorUpdateEntryCount;
- pDescriptorUpdateEntries = nullptr;
- templateType = src.templateType;
- descriptorSetLayout = src.descriptorSetLayout;
- pipelineBindPoint = src.pipelineBindPoint;
- pipelineLayout = src.pipelineLayout;
- set = src.set;
- pNext = SafePnextCopy(src.pNext);
- if (src.pDescriptorUpdateEntries) {
- pDescriptorUpdateEntries = new VkDescriptorUpdateTemplateEntry[src.descriptorUpdateEntryCount];
- memcpy ((void *)pDescriptorUpdateEntries, (void *)src.pDescriptorUpdateEntries, sizeof(VkDescriptorUpdateTemplateEntry)*src.descriptorUpdateEntryCount);
- }
-}
-
-safe_VkDescriptorUpdateTemplateCreateInfo& safe_VkDescriptorUpdateTemplateCreateInfo::operator=(const safe_VkDescriptorUpdateTemplateCreateInfo& src)
-{
- if (&src == this) return *this;
-
- if (pDescriptorUpdateEntries)
- delete[] pDescriptorUpdateEntries;
- if (pNext)
- FreePnextChain(pNext);
-
- sType = src.sType;
- flags = src.flags;
- descriptorUpdateEntryCount = src.descriptorUpdateEntryCount;
- pDescriptorUpdateEntries = nullptr;
- templateType = src.templateType;
- descriptorSetLayout = src.descriptorSetLayout;
- pipelineBindPoint = src.pipelineBindPoint;
- pipelineLayout = src.pipelineLayout;
- set = src.set;
- pNext = SafePnextCopy(src.pNext);
- if (src.pDescriptorUpdateEntries) {
- pDescriptorUpdateEntries = new VkDescriptorUpdateTemplateEntry[src.descriptorUpdateEntryCount];
- memcpy ((void *)pDescriptorUpdateEntries, (void *)src.pDescriptorUpdateEntries, sizeof(VkDescriptorUpdateTemplateEntry)*src.descriptorUpdateEntryCount);
- }
-
- return *this;
-}
-
-safe_VkDescriptorUpdateTemplateCreateInfo::~safe_VkDescriptorUpdateTemplateCreateInfo()
-{
- if (pDescriptorUpdateEntries)
- delete[] pDescriptorUpdateEntries;
- if (pNext)
- FreePnextChain(pNext);
-}
-
-void safe_VkDescriptorUpdateTemplateCreateInfo::initialize(const VkDescriptorUpdateTemplateCreateInfo* in_struct)
-{
- sType = in_struct->sType;
- flags = in_struct->flags;
- descriptorUpdateEntryCount = in_struct->descriptorUpdateEntryCount;
- pDescriptorUpdateEntries = nullptr;
- templateType = in_struct->templateType;
- descriptorSetLayout = in_struct->descriptorSetLayout;
- pipelineBindPoint = in_struct->pipelineBindPoint;
- pipelineLayout = in_struct->pipelineLayout;
- set = in_struct->set;
- pNext = SafePnextCopy(in_struct->pNext);
- if (in_struct->pDescriptorUpdateEntries) {
- pDescriptorUpdateEntries = new VkDescriptorUpdateTemplateEntry[in_struct->descriptorUpdateEntryCount];
- memcpy ((void *)pDescriptorUpdateEntries, (void *)in_struct->pDescriptorUpdateEntries, sizeof(VkDescriptorUpdateTemplateEntry)*in_struct->descriptorUpdateEntryCount);
- }
-}
-
-void safe_VkDescriptorUpdateTemplateCreateInfo::initialize(const safe_VkDescriptorUpdateTemplateCreateInfo* src)
-{
- sType = src->sType;
- flags = src->flags;
- descriptorUpdateEntryCount = src->descriptorUpdateEntryCount;
- pDescriptorUpdateEntries = nullptr;
- templateType = src->templateType;
- descriptorSetLayout = src->descriptorSetLayout;
- pipelineBindPoint = src->pipelineBindPoint;
- pipelineLayout = src->pipelineLayout;
- set = src->set;
- pNext = SafePnextCopy(src->pNext);
- if (src->pDescriptorUpdateEntries) {
- pDescriptorUpdateEntries = new VkDescriptorUpdateTemplateEntry[src->descriptorUpdateEntryCount];
- memcpy ((void *)pDescriptorUpdateEntries, (void *)src->pDescriptorUpdateEntries, sizeof(VkDescriptorUpdateTemplateEntry)*src->descriptorUpdateEntryCount);
- }
-}
-
-safe_VkPhysicalDeviceExternalImageFormatInfo::safe_VkPhysicalDeviceExternalImageFormatInfo(const VkPhysicalDeviceExternalImageFormatInfo* in_struct) :
- sType(in_struct->sType),
- handleType(in_struct->handleType)
-{
- pNext = SafePnextCopy(in_struct->pNext);
-}
-
-safe_VkPhysicalDeviceExternalImageFormatInfo::safe_VkPhysicalDeviceExternalImageFormatInfo() :
- pNext(nullptr)
-{}
-
-safe_VkPhysicalDeviceExternalImageFormatInfo::safe_VkPhysicalDeviceExternalImageFormatInfo(const safe_VkPhysicalDeviceExternalImageFormatInfo& src)
-{
- sType = src.sType;
- handleType = src.handleType;
- pNext = SafePnextCopy(src.pNext);
-}
-
-safe_VkPhysicalDeviceExternalImageFormatInfo& safe_VkPhysicalDeviceExternalImageFormatInfo::operator=(const safe_VkPhysicalDeviceExternalImageFormatInfo& src)
-{
- if (&src == this) return *this;
-
- if (pNext)
- FreePnextChain(pNext);
-
- sType = src.sType;
- handleType = src.handleType;
- pNext = SafePnextCopy(src.pNext);
-
- return *this;
-}
-
-safe_VkPhysicalDeviceExternalImageFormatInfo::~safe_VkPhysicalDeviceExternalImageFormatInfo()
-{
- if (pNext)
- FreePnextChain(pNext);
-}
-
-void safe_VkPhysicalDeviceExternalImageFormatInfo::initialize(const VkPhysicalDeviceExternalImageFormatInfo* in_struct)
-{
- sType = in_struct->sType;
- handleType = in_struct->handleType;
- pNext = SafePnextCopy(in_struct->pNext);
-}
-
-void safe_VkPhysicalDeviceExternalImageFormatInfo::initialize(const safe_VkPhysicalDeviceExternalImageFormatInfo* src)
-{
- sType = src->sType;
- handleType = src->handleType;
- pNext = SafePnextCopy(src->pNext);
-}
-
-safe_VkExternalImageFormatProperties::safe_VkExternalImageFormatProperties(const VkExternalImageFormatProperties* in_struct) :
- sType(in_struct->sType),
- externalMemoryProperties(in_struct->externalMemoryProperties)
-{
- pNext = SafePnextCopy(in_struct->pNext);
-}
-
-safe_VkExternalImageFormatProperties::safe_VkExternalImageFormatProperties() :
- pNext(nullptr)
-{}
-
-safe_VkExternalImageFormatProperties::safe_VkExternalImageFormatProperties(const safe_VkExternalImageFormatProperties& src)
-{
- sType = src.sType;
- externalMemoryProperties = src.externalMemoryProperties;
- pNext = SafePnextCopy(src.pNext);
-}
-
-safe_VkExternalImageFormatProperties& safe_VkExternalImageFormatProperties::operator=(const safe_VkExternalImageFormatProperties& src)
-{
- if (&src == this) return *this;
-
- if (pNext)
- FreePnextChain(pNext);
-
- sType = src.sType;
- externalMemoryProperties = src.externalMemoryProperties;
- pNext = SafePnextCopy(src.pNext);
-
- return *this;
-}
-
-safe_VkExternalImageFormatProperties::~safe_VkExternalImageFormatProperties()
-{
- if (pNext)
- FreePnextChain(pNext);
-}
-
-void safe_VkExternalImageFormatProperties::initialize(const VkExternalImageFormatProperties* in_struct)
-{
- sType = in_struct->sType;
- externalMemoryProperties = in_struct->externalMemoryProperties;
- pNext = SafePnextCopy(in_struct->pNext);
-}
-
-void safe_VkExternalImageFormatProperties::initialize(const safe_VkExternalImageFormatProperties* src)
-{
- sType = src->sType;
- externalMemoryProperties = src->externalMemoryProperties;
- pNext = SafePnextCopy(src->pNext);
-}
-
-safe_VkPhysicalDeviceExternalBufferInfo::safe_VkPhysicalDeviceExternalBufferInfo(const VkPhysicalDeviceExternalBufferInfo* in_struct) :
- sType(in_struct->sType),
- flags(in_struct->flags),
- usage(in_struct->usage),
- handleType(in_struct->handleType)
-{
- pNext = SafePnextCopy(in_struct->pNext);
-}
-
-safe_VkPhysicalDeviceExternalBufferInfo::safe_VkPhysicalDeviceExternalBufferInfo() :
- pNext(nullptr)
-{}
-
-safe_VkPhysicalDeviceExternalBufferInfo::safe_VkPhysicalDeviceExternalBufferInfo(const safe_VkPhysicalDeviceExternalBufferInfo& src)
-{
- sType = src.sType;
- flags = src.flags;
- usage = src.usage;
- handleType = src.handleType;
- pNext = SafePnextCopy(src.pNext);
-}
-
-safe_VkPhysicalDeviceExternalBufferInfo& safe_VkPhysicalDeviceExternalBufferInfo::operator=(const safe_VkPhysicalDeviceExternalBufferInfo& src)
-{
- if (&src == this) return *this;
-
- if (pNext)
- FreePnextChain(pNext);
-
- sType = src.sType;
- flags = src.flags;
- usage = src.usage;
- handleType = src.handleType;
- pNext = SafePnextCopy(src.pNext);
-
- return *this;
-}
-
-safe_VkPhysicalDeviceExternalBufferInfo::~safe_VkPhysicalDeviceExternalBufferInfo()
-{
- if (pNext)
- FreePnextChain(pNext);
-}
-
-void safe_VkPhysicalDeviceExternalBufferInfo::initialize(const VkPhysicalDeviceExternalBufferInfo* in_struct)
-{
- sType = in_struct->sType;
- flags = in_struct->flags;
- usage = in_struct->usage;
- handleType = in_struct->handleType;
- pNext = SafePnextCopy(in_struct->pNext);
-}
-
-void safe_VkPhysicalDeviceExternalBufferInfo::initialize(const safe_VkPhysicalDeviceExternalBufferInfo* src)
-{
- sType = src->sType;
- flags = src->flags;
- usage = src->usage;
- handleType = src->handleType;
- pNext = SafePnextCopy(src->pNext);
-}
-
-safe_VkExternalBufferProperties::safe_VkExternalBufferProperties(const VkExternalBufferProperties* in_struct) :
- sType(in_struct->sType),
- externalMemoryProperties(in_struct->externalMemoryProperties)
-{
- pNext = SafePnextCopy(in_struct->pNext);
-}
-
-safe_VkExternalBufferProperties::safe_VkExternalBufferProperties() :
- pNext(nullptr)
-{}
-
-safe_VkExternalBufferProperties::safe_VkExternalBufferProperties(const safe_VkExternalBufferProperties& src)
-{
- sType = src.sType;
- externalMemoryProperties = src.externalMemoryProperties;
- pNext = SafePnextCopy(src.pNext);
-}
-
-safe_VkExternalBufferProperties& safe_VkExternalBufferProperties::operator=(const safe_VkExternalBufferProperties& src)
-{
- if (&src == this) return *this;
-
- if (pNext)
- FreePnextChain(pNext);
-
- sType = src.sType;
- externalMemoryProperties = src.externalMemoryProperties;
- pNext = SafePnextCopy(src.pNext);
-
- return *this;
-}
-
-safe_VkExternalBufferProperties::~safe_VkExternalBufferProperties()
-{
- if (pNext)
- FreePnextChain(pNext);
-}
-
-void safe_VkExternalBufferProperties::initialize(const VkExternalBufferProperties* in_struct)
-{
- sType = in_struct->sType;
- externalMemoryProperties = in_struct->externalMemoryProperties;
- pNext = SafePnextCopy(in_struct->pNext);
-}
-
-void safe_VkExternalBufferProperties::initialize(const safe_VkExternalBufferProperties* src)
-{
- sType = src->sType;
- externalMemoryProperties = src->externalMemoryProperties;
- pNext = SafePnextCopy(src->pNext);
-}
-
-safe_VkPhysicalDeviceIDProperties::safe_VkPhysicalDeviceIDProperties(const VkPhysicalDeviceIDProperties* in_struct) :
- sType(in_struct->sType),
- deviceNodeMask(in_struct->deviceNodeMask),
- deviceLUIDValid(in_struct->deviceLUIDValid)
-{
- pNext = SafePnextCopy(in_struct->pNext);
- for (uint32_t i = 0; i < VK_UUID_SIZE; ++i) {
- deviceUUID[i] = in_struct->deviceUUID[i];
- }
- for (uint32_t i = 0; i < VK_UUID_SIZE; ++i) {
- driverUUID[i] = in_struct->driverUUID[i];
- }
- for (uint32_t i = 0; i < VK_LUID_SIZE; ++i) {
- deviceLUID[i] = in_struct->deviceLUID[i];
- }
-}
-
-safe_VkPhysicalDeviceIDProperties::safe_VkPhysicalDeviceIDProperties() :
- pNext(nullptr)
-{}
-
-safe_VkPhysicalDeviceIDProperties::safe_VkPhysicalDeviceIDProperties(const safe_VkPhysicalDeviceIDProperties& src)
-{
- sType = src.sType;
- deviceNodeMask = src.deviceNodeMask;
- deviceLUIDValid = src.deviceLUIDValid;
- pNext = SafePnextCopy(src.pNext);
- for (uint32_t i = 0; i < VK_UUID_SIZE; ++i) {
- deviceUUID[i] = src.deviceUUID[i];
- }
- for (uint32_t i = 0; i < VK_UUID_SIZE; ++i) {
- driverUUID[i] = src.driverUUID[i];
- }
- for (uint32_t i = 0; i < VK_LUID_SIZE; ++i) {
- deviceLUID[i] = src.deviceLUID[i];
- }
-}
-
-safe_VkPhysicalDeviceIDProperties& safe_VkPhysicalDeviceIDProperties::operator=(const safe_VkPhysicalDeviceIDProperties& src)
-{
- if (&src == this) return *this;
-
- if (pNext)
- FreePnextChain(pNext);
-
- sType = src.sType;
- deviceNodeMask = src.deviceNodeMask;
- deviceLUIDValid = src.deviceLUIDValid;
- pNext = SafePnextCopy(src.pNext);
- for (uint32_t i = 0; i < VK_UUID_SIZE; ++i) {
- deviceUUID[i] = src.deviceUUID[i];
- }
- for (uint32_t i = 0; i < VK_UUID_SIZE; ++i) {
- driverUUID[i] = src.driverUUID[i];
- }
- for (uint32_t i = 0; i < VK_LUID_SIZE; ++i) {
- deviceLUID[i] = src.deviceLUID[i];
- }
-
- return *this;
-}
-
-safe_VkPhysicalDeviceIDProperties::~safe_VkPhysicalDeviceIDProperties()
-{
- if (pNext)
- FreePnextChain(pNext);
-}
-
-void safe_VkPhysicalDeviceIDProperties::initialize(const VkPhysicalDeviceIDProperties* in_struct)
-{
- sType = in_struct->sType;
- deviceNodeMask = in_struct->deviceNodeMask;
- deviceLUIDValid = in_struct->deviceLUIDValid;
- pNext = SafePnextCopy(in_struct->pNext);
- for (uint32_t i = 0; i < VK_UUID_SIZE; ++i) {
- deviceUUID[i] = in_struct->deviceUUID[i];
- }
- for (uint32_t i = 0; i < VK_UUID_SIZE; ++i) {
- driverUUID[i] = in_struct->driverUUID[i];
- }
- for (uint32_t i = 0; i < VK_LUID_SIZE; ++i) {
- deviceLUID[i] = in_struct->deviceLUID[i];
- }
-}
-
-void safe_VkPhysicalDeviceIDProperties::initialize(const safe_VkPhysicalDeviceIDProperties* src)
-{
- sType = src->sType;
- deviceNodeMask = src->deviceNodeMask;
- deviceLUIDValid = src->deviceLUIDValid;
- pNext = SafePnextCopy(src->pNext);
- for (uint32_t i = 0; i < VK_UUID_SIZE; ++i) {
- deviceUUID[i] = src->deviceUUID[i];
- }
- for (uint32_t i = 0; i < VK_UUID_SIZE; ++i) {
- driverUUID[i] = src->driverUUID[i];
- }
- for (uint32_t i = 0; i < VK_LUID_SIZE; ++i) {
- deviceLUID[i] = src->deviceLUID[i];
- }
-}
-
-safe_VkExternalMemoryImageCreateInfo::safe_VkExternalMemoryImageCreateInfo(const VkExternalMemoryImageCreateInfo* in_struct) :
- sType(in_struct->sType),
- handleTypes(in_struct->handleTypes)
-{
- pNext = SafePnextCopy(in_struct->pNext);
-}
-
-safe_VkExternalMemoryImageCreateInfo::safe_VkExternalMemoryImageCreateInfo() :
- pNext(nullptr)
-{}
-
-safe_VkExternalMemoryImageCreateInfo::safe_VkExternalMemoryImageCreateInfo(const safe_VkExternalMemoryImageCreateInfo& src)
-{
- sType = src.sType;
- handleTypes = src.handleTypes;
- pNext = SafePnextCopy(src.pNext);
-}
-
-safe_VkExternalMemoryImageCreateInfo& safe_VkExternalMemoryImageCreateInfo::operator=(const safe_VkExternalMemoryImageCreateInfo& src)
-{
- if (&src == this) return *this;
-
- if (pNext)
- FreePnextChain(pNext);
-
- sType = src.sType;
- handleTypes = src.handleTypes;
- pNext = SafePnextCopy(src.pNext);
-
- return *this;
-}
-
-safe_VkExternalMemoryImageCreateInfo::~safe_VkExternalMemoryImageCreateInfo()
-{
- if (pNext)
- FreePnextChain(pNext);
-}
-
-void safe_VkExternalMemoryImageCreateInfo::initialize(const VkExternalMemoryImageCreateInfo* in_struct)
-{
- sType = in_struct->sType;
- handleTypes = in_struct->handleTypes;
- pNext = SafePnextCopy(in_struct->pNext);
-}
-
-void safe_VkExternalMemoryImageCreateInfo::initialize(const safe_VkExternalMemoryImageCreateInfo* src)
-{
- sType = src->sType;
- handleTypes = src->handleTypes;
- pNext = SafePnextCopy(src->pNext);
-}
-
-safe_VkExternalMemoryBufferCreateInfo::safe_VkExternalMemoryBufferCreateInfo(const VkExternalMemoryBufferCreateInfo* in_struct) :
- sType(in_struct->sType),
- handleTypes(in_struct->handleTypes)
-{
- pNext = SafePnextCopy(in_struct->pNext);
-}
-
-safe_VkExternalMemoryBufferCreateInfo::safe_VkExternalMemoryBufferCreateInfo() :
- pNext(nullptr)
-{}
-
-safe_VkExternalMemoryBufferCreateInfo::safe_VkExternalMemoryBufferCreateInfo(const safe_VkExternalMemoryBufferCreateInfo& src)
-{
- sType = src.sType;
- handleTypes = src.handleTypes;
- pNext = SafePnextCopy(src.pNext);
-}
-
-safe_VkExternalMemoryBufferCreateInfo& safe_VkExternalMemoryBufferCreateInfo::operator=(const safe_VkExternalMemoryBufferCreateInfo& src)
-{
- if (&src == this) return *this;
-
- if (pNext)
- FreePnextChain(pNext);
-
- sType = src.sType;
- handleTypes = src.handleTypes;
- pNext = SafePnextCopy(src.pNext);
-
- return *this;
-}
-
-safe_VkExternalMemoryBufferCreateInfo::~safe_VkExternalMemoryBufferCreateInfo()
-{
- if (pNext)
- FreePnextChain(pNext);
-}
-
-void safe_VkExternalMemoryBufferCreateInfo::initialize(const VkExternalMemoryBufferCreateInfo* in_struct)
-{
- sType = in_struct->sType;
- handleTypes = in_struct->handleTypes;
- pNext = SafePnextCopy(in_struct->pNext);
-}
-
-void safe_VkExternalMemoryBufferCreateInfo::initialize(const safe_VkExternalMemoryBufferCreateInfo* src)
-{
- sType = src->sType;
- handleTypes = src->handleTypes;
- pNext = SafePnextCopy(src->pNext);
-}
-
-safe_VkExportMemoryAllocateInfo::safe_VkExportMemoryAllocateInfo(const VkExportMemoryAllocateInfo* in_struct) :
- sType(in_struct->sType),
- handleTypes(in_struct->handleTypes)
-{
- pNext = SafePnextCopy(in_struct->pNext);
-}
-
-safe_VkExportMemoryAllocateInfo::safe_VkExportMemoryAllocateInfo() :
- pNext(nullptr)
-{}
-
-safe_VkExportMemoryAllocateInfo::safe_VkExportMemoryAllocateInfo(const safe_VkExportMemoryAllocateInfo& src)
-{
- sType = src.sType;
- handleTypes = src.handleTypes;
- pNext = SafePnextCopy(src.pNext);
-}
-
-safe_VkExportMemoryAllocateInfo& safe_VkExportMemoryAllocateInfo::operator=(const safe_VkExportMemoryAllocateInfo& src)
-{
- if (&src == this) return *this;
-
- if (pNext)
- FreePnextChain(pNext);
-
- sType = src.sType;
- handleTypes = src.handleTypes;
- pNext = SafePnextCopy(src.pNext);
-
- return *this;
-}
-
-safe_VkExportMemoryAllocateInfo::~safe_VkExportMemoryAllocateInfo()
-{
- if (pNext)
- FreePnextChain(pNext);
-}
-
-void safe_VkExportMemoryAllocateInfo::initialize(const VkExportMemoryAllocateInfo* in_struct)
-{
- sType = in_struct->sType;
- handleTypes = in_struct->handleTypes;
- pNext = SafePnextCopy(in_struct->pNext);
-}
-
-void safe_VkExportMemoryAllocateInfo::initialize(const safe_VkExportMemoryAllocateInfo* src)
-{
- sType = src->sType;
- handleTypes = src->handleTypes;
- pNext = SafePnextCopy(src->pNext);
-}
-
-safe_VkPhysicalDeviceExternalFenceInfo::safe_VkPhysicalDeviceExternalFenceInfo(const VkPhysicalDeviceExternalFenceInfo* in_struct) :
- sType(in_struct->sType),
- handleType(in_struct->handleType)
-{
- pNext = SafePnextCopy(in_struct->pNext);
-}
-
-safe_VkPhysicalDeviceExternalFenceInfo::safe_VkPhysicalDeviceExternalFenceInfo() :
- pNext(nullptr)
-{}
-
-safe_VkPhysicalDeviceExternalFenceInfo::safe_VkPhysicalDeviceExternalFenceInfo(const safe_VkPhysicalDeviceExternalFenceInfo& src)
-{
- sType = src.sType;
- handleType = src.handleType;
- pNext = SafePnextCopy(src.pNext);
-}
-
-safe_VkPhysicalDeviceExternalFenceInfo& safe_VkPhysicalDeviceExternalFenceInfo::operator=(const safe_VkPhysicalDeviceExternalFenceInfo& src)
-{
- if (&src == this) return *this;
-
- if (pNext)
- FreePnextChain(pNext);
-
- sType = src.sType;
- handleType = src.handleType;
- pNext = SafePnextCopy(src.pNext);
-
- return *this;
-}
-
-safe_VkPhysicalDeviceExternalFenceInfo::~safe_VkPhysicalDeviceExternalFenceInfo()
-{
- if (pNext)
- FreePnextChain(pNext);
-}
-
-void safe_VkPhysicalDeviceExternalFenceInfo::initialize(const VkPhysicalDeviceExternalFenceInfo* in_struct)
-{
- sType = in_struct->sType;
- handleType = in_struct->handleType;
- pNext = SafePnextCopy(in_struct->pNext);
-}
-
-void safe_VkPhysicalDeviceExternalFenceInfo::initialize(const safe_VkPhysicalDeviceExternalFenceInfo* src)
-{
- sType = src->sType;
- handleType = src->handleType;
- pNext = SafePnextCopy(src->pNext);
-}
-
-safe_VkExternalFenceProperties::safe_VkExternalFenceProperties(const VkExternalFenceProperties* in_struct) :
- sType(in_struct->sType),
- exportFromImportedHandleTypes(in_struct->exportFromImportedHandleTypes),
- compatibleHandleTypes(in_struct->compatibleHandleTypes),
- externalFenceFeatures(in_struct->externalFenceFeatures)
-{
- pNext = SafePnextCopy(in_struct->pNext);
-}
-
-safe_VkExternalFenceProperties::safe_VkExternalFenceProperties() :
- pNext(nullptr)
-{}
-
-safe_VkExternalFenceProperties::safe_VkExternalFenceProperties(const safe_VkExternalFenceProperties& src)
-{
- sType = src.sType;
- exportFromImportedHandleTypes = src.exportFromImportedHandleTypes;
- compatibleHandleTypes = src.compatibleHandleTypes;
- externalFenceFeatures = src.externalFenceFeatures;
- pNext = SafePnextCopy(src.pNext);
-}
-
-safe_VkExternalFenceProperties& safe_VkExternalFenceProperties::operator=(const safe_VkExternalFenceProperties& src)
-{
- if (&src == this) return *this;
-
- if (pNext)
- FreePnextChain(pNext);
-
- sType = src.sType;
- exportFromImportedHandleTypes = src.exportFromImportedHandleTypes;
- compatibleHandleTypes = src.compatibleHandleTypes;
- externalFenceFeatures = src.externalFenceFeatures;
- pNext = SafePnextCopy(src.pNext);
-
- return *this;
-}
-
-safe_VkExternalFenceProperties::~safe_VkExternalFenceProperties()
-{
- if (pNext)
- FreePnextChain(pNext);
-}
-
-void safe_VkExternalFenceProperties::initialize(const VkExternalFenceProperties* in_struct)
-{
- sType = in_struct->sType;
- exportFromImportedHandleTypes = in_struct->exportFromImportedHandleTypes;
- compatibleHandleTypes = in_struct->compatibleHandleTypes;
- externalFenceFeatures = in_struct->externalFenceFeatures;
- pNext = SafePnextCopy(in_struct->pNext);
-}
-
-void safe_VkExternalFenceProperties::initialize(const safe_VkExternalFenceProperties* src)
-{
- sType = src->sType;
- exportFromImportedHandleTypes = src->exportFromImportedHandleTypes;
- compatibleHandleTypes = src->compatibleHandleTypes;
- externalFenceFeatures = src->externalFenceFeatures;
- pNext = SafePnextCopy(src->pNext);
-}
-
-safe_VkExportFenceCreateInfo::safe_VkExportFenceCreateInfo(const VkExportFenceCreateInfo* in_struct) :
- sType(in_struct->sType),
- handleTypes(in_struct->handleTypes)
-{
- pNext = SafePnextCopy(in_struct->pNext);
-}
-
-safe_VkExportFenceCreateInfo::safe_VkExportFenceCreateInfo() :
- pNext(nullptr)
-{}
-
-safe_VkExportFenceCreateInfo::safe_VkExportFenceCreateInfo(const safe_VkExportFenceCreateInfo& src)
-{
- sType = src.sType;
- handleTypes = src.handleTypes;
- pNext = SafePnextCopy(src.pNext);
-}
-
-safe_VkExportFenceCreateInfo& safe_VkExportFenceCreateInfo::operator=(const safe_VkExportFenceCreateInfo& src)
-{
- if (&src == this) return *this;
-
- if (pNext)
- FreePnextChain(pNext);
-
- sType = src.sType;
- handleTypes = src.handleTypes;
- pNext = SafePnextCopy(src.pNext);
-
- return *this;
-}
-
-safe_VkExportFenceCreateInfo::~safe_VkExportFenceCreateInfo()
-{
- if (pNext)
- FreePnextChain(pNext);
-}
-
-void safe_VkExportFenceCreateInfo::initialize(const VkExportFenceCreateInfo* in_struct)
-{
- sType = in_struct->sType;
- handleTypes = in_struct->handleTypes;
- pNext = SafePnextCopy(in_struct->pNext);
-}
-
-void safe_VkExportFenceCreateInfo::initialize(const safe_VkExportFenceCreateInfo* src)
-{
- sType = src->sType;
- handleTypes = src->handleTypes;
- pNext = SafePnextCopy(src->pNext);
-}
-
-safe_VkExportSemaphoreCreateInfo::safe_VkExportSemaphoreCreateInfo(const VkExportSemaphoreCreateInfo* in_struct) :
- sType(in_struct->sType),
- handleTypes(in_struct->handleTypes)
-{
- pNext = SafePnextCopy(in_struct->pNext);
-}
-
-safe_VkExportSemaphoreCreateInfo::safe_VkExportSemaphoreCreateInfo() :
- pNext(nullptr)
-{}
-
-safe_VkExportSemaphoreCreateInfo::safe_VkExportSemaphoreCreateInfo(const safe_VkExportSemaphoreCreateInfo& src)
-{
- sType = src.sType;
- handleTypes = src.handleTypes;
- pNext = SafePnextCopy(src.pNext);
-}
-
-safe_VkExportSemaphoreCreateInfo& safe_VkExportSemaphoreCreateInfo::operator=(const safe_VkExportSemaphoreCreateInfo& src)
-{
- if (&src == this) return *this;
-
- if (pNext)
- FreePnextChain(pNext);
-
- sType = src.sType;
- handleTypes = src.handleTypes;
- pNext = SafePnextCopy(src.pNext);
-
- return *this;
-}
-
-safe_VkExportSemaphoreCreateInfo::~safe_VkExportSemaphoreCreateInfo()
-{
- if (pNext)
- FreePnextChain(pNext);
-}
-
-void safe_VkExportSemaphoreCreateInfo::initialize(const VkExportSemaphoreCreateInfo* in_struct)
-{
- sType = in_struct->sType;
- handleTypes = in_struct->handleTypes;
- pNext = SafePnextCopy(in_struct->pNext);
-}
-
-void safe_VkExportSemaphoreCreateInfo::initialize(const safe_VkExportSemaphoreCreateInfo* src)
-{
- sType = src->sType;
- handleTypes = src->handleTypes;
- pNext = SafePnextCopy(src->pNext);
-}
-
-safe_VkPhysicalDeviceExternalSemaphoreInfo::safe_VkPhysicalDeviceExternalSemaphoreInfo(const VkPhysicalDeviceExternalSemaphoreInfo* in_struct) :
- sType(in_struct->sType),
- handleType(in_struct->handleType)
-{
- pNext = SafePnextCopy(in_struct->pNext);
-}
-
-safe_VkPhysicalDeviceExternalSemaphoreInfo::safe_VkPhysicalDeviceExternalSemaphoreInfo() :
- pNext(nullptr)
-{}
-
-safe_VkPhysicalDeviceExternalSemaphoreInfo::safe_VkPhysicalDeviceExternalSemaphoreInfo(const safe_VkPhysicalDeviceExternalSemaphoreInfo& src)
-{
- sType = src.sType;
- handleType = src.handleType;
- pNext = SafePnextCopy(src.pNext);
-}
-
-safe_VkPhysicalDeviceExternalSemaphoreInfo& safe_VkPhysicalDeviceExternalSemaphoreInfo::operator=(const safe_VkPhysicalDeviceExternalSemaphoreInfo& src)
-{
- if (&src == this) return *this;
-
- if (pNext)
- FreePnextChain(pNext);
-
- sType = src.sType;
- handleType = src.handleType;
- pNext = SafePnextCopy(src.pNext);
-
- return *this;
-}
-
-safe_VkPhysicalDeviceExternalSemaphoreInfo::~safe_VkPhysicalDeviceExternalSemaphoreInfo()
-{
- if (pNext)
- FreePnextChain(pNext);
-}
-
-void safe_VkPhysicalDeviceExternalSemaphoreInfo::initialize(const VkPhysicalDeviceExternalSemaphoreInfo* in_struct)
-{
- sType = in_struct->sType;
- handleType = in_struct->handleType;
- pNext = SafePnextCopy(in_struct->pNext);
-}
-
-void safe_VkPhysicalDeviceExternalSemaphoreInfo::initialize(const safe_VkPhysicalDeviceExternalSemaphoreInfo* src)
-{
- sType = src->sType;
- handleType = src->handleType;
- pNext = SafePnextCopy(src->pNext);
-}
-
-safe_VkExternalSemaphoreProperties::safe_VkExternalSemaphoreProperties(const VkExternalSemaphoreProperties* in_struct) :
- sType(in_struct->sType),
- exportFromImportedHandleTypes(in_struct->exportFromImportedHandleTypes),
- compatibleHandleTypes(in_struct->compatibleHandleTypes),
- externalSemaphoreFeatures(in_struct->externalSemaphoreFeatures)
-{
- pNext = SafePnextCopy(in_struct->pNext);
-}
-
-safe_VkExternalSemaphoreProperties::safe_VkExternalSemaphoreProperties() :
- pNext(nullptr)
-{}
-
-safe_VkExternalSemaphoreProperties::safe_VkExternalSemaphoreProperties(const safe_VkExternalSemaphoreProperties& src)
-{
- sType = src.sType;
- exportFromImportedHandleTypes = src.exportFromImportedHandleTypes;
- compatibleHandleTypes = src.compatibleHandleTypes;
- externalSemaphoreFeatures = src.externalSemaphoreFeatures;
- pNext = SafePnextCopy(src.pNext);
-}
-
-safe_VkExternalSemaphoreProperties& safe_VkExternalSemaphoreProperties::operator=(const safe_VkExternalSemaphoreProperties& src)
-{
- if (&src == this) return *this;
-
- if (pNext)
- FreePnextChain(pNext);
-
- sType = src.sType;
- exportFromImportedHandleTypes = src.exportFromImportedHandleTypes;
- compatibleHandleTypes = src.compatibleHandleTypes;
- externalSemaphoreFeatures = src.externalSemaphoreFeatures;
- pNext = SafePnextCopy(src.pNext);
-
- return *this;
-}
-
-safe_VkExternalSemaphoreProperties::~safe_VkExternalSemaphoreProperties()
-{
- if (pNext)
- FreePnextChain(pNext);
-}
-
-void safe_VkExternalSemaphoreProperties::initialize(const VkExternalSemaphoreProperties* in_struct)
-{
- sType = in_struct->sType;
- exportFromImportedHandleTypes = in_struct->exportFromImportedHandleTypes;
- compatibleHandleTypes = in_struct->compatibleHandleTypes;
- externalSemaphoreFeatures = in_struct->externalSemaphoreFeatures;
- pNext = SafePnextCopy(in_struct->pNext);
-}
-
-void safe_VkExternalSemaphoreProperties::initialize(const safe_VkExternalSemaphoreProperties* src)
-{
- sType = src->sType;
- exportFromImportedHandleTypes = src->exportFromImportedHandleTypes;
- compatibleHandleTypes = src->compatibleHandleTypes;
- externalSemaphoreFeatures = src->externalSemaphoreFeatures;
- pNext = SafePnextCopy(src->pNext);
-}
-
-safe_VkPhysicalDeviceMaintenance3Properties::safe_VkPhysicalDeviceMaintenance3Properties(const VkPhysicalDeviceMaintenance3Properties* in_struct) :
- sType(in_struct->sType),
- maxPerSetDescriptors(in_struct->maxPerSetDescriptors),
- maxMemoryAllocationSize(in_struct->maxMemoryAllocationSize)
-{
- pNext = SafePnextCopy(in_struct->pNext);
-}
-
-safe_VkPhysicalDeviceMaintenance3Properties::safe_VkPhysicalDeviceMaintenance3Properties() :
- pNext(nullptr)
-{}
-
-safe_VkPhysicalDeviceMaintenance3Properties::safe_VkPhysicalDeviceMaintenance3Properties(const safe_VkPhysicalDeviceMaintenance3Properties& src)
-{
- sType = src.sType;
- maxPerSetDescriptors = src.maxPerSetDescriptors;
- maxMemoryAllocationSize = src.maxMemoryAllocationSize;
- pNext = SafePnextCopy(src.pNext);
-}
-
-safe_VkPhysicalDeviceMaintenance3Properties& safe_VkPhysicalDeviceMaintenance3Properties::operator=(const safe_VkPhysicalDeviceMaintenance3Properties& src)
-{
- if (&src == this) return *this;
-
- if (pNext)
- FreePnextChain(pNext);
-
- sType = src.sType;
- maxPerSetDescriptors = src.maxPerSetDescriptors;
- maxMemoryAllocationSize = src.maxMemoryAllocationSize;
- pNext = SafePnextCopy(src.pNext);
-
- return *this;
-}
-
-safe_VkPhysicalDeviceMaintenance3Properties::~safe_VkPhysicalDeviceMaintenance3Properties()
-{
- if (pNext)
- FreePnextChain(pNext);
-}
-
-void safe_VkPhysicalDeviceMaintenance3Properties::initialize(const VkPhysicalDeviceMaintenance3Properties* in_struct)
-{
- sType = in_struct->sType;
- maxPerSetDescriptors = in_struct->maxPerSetDescriptors;
- maxMemoryAllocationSize = in_struct->maxMemoryAllocationSize;
- pNext = SafePnextCopy(in_struct->pNext);
-}
-
-void safe_VkPhysicalDeviceMaintenance3Properties::initialize(const safe_VkPhysicalDeviceMaintenance3Properties* src)
-{
- sType = src->sType;
- maxPerSetDescriptors = src->maxPerSetDescriptors;
- maxMemoryAllocationSize = src->maxMemoryAllocationSize;
- pNext = SafePnextCopy(src->pNext);
-}
-
-safe_VkDescriptorSetLayoutSupport::safe_VkDescriptorSetLayoutSupport(const VkDescriptorSetLayoutSupport* in_struct) :
- sType(in_struct->sType),
- supported(in_struct->supported)
-{
- pNext = SafePnextCopy(in_struct->pNext);
-}
-
-safe_VkDescriptorSetLayoutSupport::safe_VkDescriptorSetLayoutSupport() :
- pNext(nullptr)
-{}
-
-safe_VkDescriptorSetLayoutSupport::safe_VkDescriptorSetLayoutSupport(const safe_VkDescriptorSetLayoutSupport& src)
-{
- sType = src.sType;
- supported = src.supported;
- pNext = SafePnextCopy(src.pNext);
-}
-
-safe_VkDescriptorSetLayoutSupport& safe_VkDescriptorSetLayoutSupport::operator=(const safe_VkDescriptorSetLayoutSupport& src)
-{
- if (&src == this) return *this;
-
- if (pNext)
- FreePnextChain(pNext);
-
- sType = src.sType;
- supported = src.supported;
- pNext = SafePnextCopy(src.pNext);
-
- return *this;
-}
-
-safe_VkDescriptorSetLayoutSupport::~safe_VkDescriptorSetLayoutSupport()
-{
- if (pNext)
- FreePnextChain(pNext);
-}
-
-void safe_VkDescriptorSetLayoutSupport::initialize(const VkDescriptorSetLayoutSupport* in_struct)
-{
- sType = in_struct->sType;
- supported = in_struct->supported;
- pNext = SafePnextCopy(in_struct->pNext);
-}
-
-void safe_VkDescriptorSetLayoutSupport::initialize(const safe_VkDescriptorSetLayoutSupport* src)
-{
- sType = src->sType;
- supported = src->supported;
- pNext = SafePnextCopy(src->pNext);
-}
-
-safe_VkPhysicalDeviceShaderDrawParametersFeatures::safe_VkPhysicalDeviceShaderDrawParametersFeatures(const VkPhysicalDeviceShaderDrawParametersFeatures* in_struct) :
- sType(in_struct->sType),
- shaderDrawParameters(in_struct->shaderDrawParameters)
-{
- pNext = SafePnextCopy(in_struct->pNext);
-}
-
-safe_VkPhysicalDeviceShaderDrawParametersFeatures::safe_VkPhysicalDeviceShaderDrawParametersFeatures() :
- pNext(nullptr)
-{}
-
-safe_VkPhysicalDeviceShaderDrawParametersFeatures::safe_VkPhysicalDeviceShaderDrawParametersFeatures(const safe_VkPhysicalDeviceShaderDrawParametersFeatures& src)
-{
- sType = src.sType;
- shaderDrawParameters = src.shaderDrawParameters;
- pNext = SafePnextCopy(src.pNext);
-}
-
-safe_VkPhysicalDeviceShaderDrawParametersFeatures& safe_VkPhysicalDeviceShaderDrawParametersFeatures::operator=(const safe_VkPhysicalDeviceShaderDrawParametersFeatures& src)
-{
- if (&src == this) return *this;
-
- if (pNext)
- FreePnextChain(pNext);
-
- sType = src.sType;
- shaderDrawParameters = src.shaderDrawParameters;
- pNext = SafePnextCopy(src.pNext);
-
- return *this;
-}
-
-safe_VkPhysicalDeviceShaderDrawParametersFeatures::~safe_VkPhysicalDeviceShaderDrawParametersFeatures()
-{
- if (pNext)
- FreePnextChain(pNext);
-}
-
-void safe_VkPhysicalDeviceShaderDrawParametersFeatures::initialize(const VkPhysicalDeviceShaderDrawParametersFeatures* in_struct)
-{
- sType = in_struct->sType;
- shaderDrawParameters = in_struct->shaderDrawParameters;
- pNext = SafePnextCopy(in_struct->pNext);
-}
-
-void safe_VkPhysicalDeviceShaderDrawParametersFeatures::initialize(const safe_VkPhysicalDeviceShaderDrawParametersFeatures* src)
-{
- sType = src->sType;
- shaderDrawParameters = src->shaderDrawParameters;
- pNext = SafePnextCopy(src->pNext);
-}
-
-safe_VkSwapchainCreateInfoKHR::safe_VkSwapchainCreateInfoKHR(const VkSwapchainCreateInfoKHR* in_struct) :
- sType(in_struct->sType),
- flags(in_struct->flags),
- surface(in_struct->surface),
- minImageCount(in_struct->minImageCount),
- imageFormat(in_struct->imageFormat),
- imageColorSpace(in_struct->imageColorSpace),
- imageExtent(in_struct->imageExtent),
- imageArrayLayers(in_struct->imageArrayLayers),
- imageUsage(in_struct->imageUsage),
- imageSharingMode(in_struct->imageSharingMode),
- queueFamilyIndexCount(in_struct->queueFamilyIndexCount),
- pQueueFamilyIndices(nullptr),
- preTransform(in_struct->preTransform),
- compositeAlpha(in_struct->compositeAlpha),
- presentMode(in_struct->presentMode),
- clipped(in_struct->clipped),
- oldSwapchain(in_struct->oldSwapchain)
-{
- pNext = SafePnextCopy(in_struct->pNext);
- if (in_struct->pQueueFamilyIndices) {
- pQueueFamilyIndices = new uint32_t[in_struct->queueFamilyIndexCount];
- memcpy ((void *)pQueueFamilyIndices, (void *)in_struct->pQueueFamilyIndices, sizeof(uint32_t)*in_struct->queueFamilyIndexCount);
- }
-}
-
-safe_VkSwapchainCreateInfoKHR::safe_VkSwapchainCreateInfoKHR() :
- pNext(nullptr),
- pQueueFamilyIndices(nullptr)
-{}
-
-safe_VkSwapchainCreateInfoKHR::safe_VkSwapchainCreateInfoKHR(const safe_VkSwapchainCreateInfoKHR& src)
-{
- sType = src.sType;
- flags = src.flags;
- surface = src.surface;
- minImageCount = src.minImageCount;
- imageFormat = src.imageFormat;
- imageColorSpace = src.imageColorSpace;
- imageExtent = src.imageExtent;
- imageArrayLayers = src.imageArrayLayers;
- imageUsage = src.imageUsage;
- imageSharingMode = src.imageSharingMode;
- queueFamilyIndexCount = src.queueFamilyIndexCount;
- pQueueFamilyIndices = nullptr;
- preTransform = src.preTransform;
- compositeAlpha = src.compositeAlpha;
- presentMode = src.presentMode;
- clipped = src.clipped;
- oldSwapchain = src.oldSwapchain;
- pNext = SafePnextCopy(src.pNext);
- if (src.pQueueFamilyIndices) {
- pQueueFamilyIndices = new uint32_t[src.queueFamilyIndexCount];
- memcpy ((void *)pQueueFamilyIndices, (void *)src.pQueueFamilyIndices, sizeof(uint32_t)*src.queueFamilyIndexCount);
- }
-}
-
-safe_VkSwapchainCreateInfoKHR& safe_VkSwapchainCreateInfoKHR::operator=(const safe_VkSwapchainCreateInfoKHR& src)
-{
- if (&src == this) return *this;
-
- if (pQueueFamilyIndices)
- delete[] pQueueFamilyIndices;
- if (pNext)
- FreePnextChain(pNext);
-
- sType = src.sType;
- flags = src.flags;
- surface = src.surface;
- minImageCount = src.minImageCount;
- imageFormat = src.imageFormat;
- imageColorSpace = src.imageColorSpace;
- imageExtent = src.imageExtent;
- imageArrayLayers = src.imageArrayLayers;
- imageUsage = src.imageUsage;
- imageSharingMode = src.imageSharingMode;
- queueFamilyIndexCount = src.queueFamilyIndexCount;
- pQueueFamilyIndices = nullptr;
- preTransform = src.preTransform;
- compositeAlpha = src.compositeAlpha;
- presentMode = src.presentMode;
- clipped = src.clipped;
- oldSwapchain = src.oldSwapchain;
- pNext = SafePnextCopy(src.pNext);
- if (src.pQueueFamilyIndices) {
- pQueueFamilyIndices = new uint32_t[src.queueFamilyIndexCount];
- memcpy ((void *)pQueueFamilyIndices, (void *)src.pQueueFamilyIndices, sizeof(uint32_t)*src.queueFamilyIndexCount);
- }
-
- return *this;
-}
-
-safe_VkSwapchainCreateInfoKHR::~safe_VkSwapchainCreateInfoKHR()
-{
- if (pQueueFamilyIndices)
- delete[] pQueueFamilyIndices;
- if (pNext)
- FreePnextChain(pNext);
-}
-
-void safe_VkSwapchainCreateInfoKHR::initialize(const VkSwapchainCreateInfoKHR* in_struct)
-{
- sType = in_struct->sType;
- flags = in_struct->flags;
- surface = in_struct->surface;
- minImageCount = in_struct->minImageCount;
- imageFormat = in_struct->imageFormat;
- imageColorSpace = in_struct->imageColorSpace;
- imageExtent = in_struct->imageExtent;
- imageArrayLayers = in_struct->imageArrayLayers;
- imageUsage = in_struct->imageUsage;
- imageSharingMode = in_struct->imageSharingMode;
- queueFamilyIndexCount = in_struct->queueFamilyIndexCount;
- pQueueFamilyIndices = nullptr;
- preTransform = in_struct->preTransform;
- compositeAlpha = in_struct->compositeAlpha;
- presentMode = in_struct->presentMode;
- clipped = in_struct->clipped;
- oldSwapchain = in_struct->oldSwapchain;
- pNext = SafePnextCopy(in_struct->pNext);
- if (in_struct->pQueueFamilyIndices) {
- pQueueFamilyIndices = new uint32_t[in_struct->queueFamilyIndexCount];
- memcpy ((void *)pQueueFamilyIndices, (void *)in_struct->pQueueFamilyIndices, sizeof(uint32_t)*in_struct->queueFamilyIndexCount);
- }
-}
-
-void safe_VkSwapchainCreateInfoKHR::initialize(const safe_VkSwapchainCreateInfoKHR* src)
-{
- sType = src->sType;
- flags = src->flags;
- surface = src->surface;
- minImageCount = src->minImageCount;
- imageFormat = src->imageFormat;
- imageColorSpace = src->imageColorSpace;
- imageExtent = src->imageExtent;
- imageArrayLayers = src->imageArrayLayers;
- imageUsage = src->imageUsage;
- imageSharingMode = src->imageSharingMode;
- queueFamilyIndexCount = src->queueFamilyIndexCount;
- pQueueFamilyIndices = nullptr;
- preTransform = src->preTransform;
- compositeAlpha = src->compositeAlpha;
- presentMode = src->presentMode;
- clipped = src->clipped;
- oldSwapchain = src->oldSwapchain;
- pNext = SafePnextCopy(src->pNext);
- if (src->pQueueFamilyIndices) {
- pQueueFamilyIndices = new uint32_t[src->queueFamilyIndexCount];
- memcpy ((void *)pQueueFamilyIndices, (void *)src->pQueueFamilyIndices, sizeof(uint32_t)*src->queueFamilyIndexCount);
- }
-}
-
-safe_VkPresentInfoKHR::safe_VkPresentInfoKHR(const VkPresentInfoKHR* in_struct) :
- sType(in_struct->sType),
- waitSemaphoreCount(in_struct->waitSemaphoreCount),
- pWaitSemaphores(nullptr),
- swapchainCount(in_struct->swapchainCount),
- pSwapchains(nullptr),
- pImageIndices(nullptr),
- pResults(nullptr)
-{
- pNext = SafePnextCopy(in_struct->pNext);
- if (waitSemaphoreCount && in_struct->pWaitSemaphores) {
- pWaitSemaphores = new VkSemaphore[waitSemaphoreCount];
- for (uint32_t i = 0; i < waitSemaphoreCount; ++i) {
- pWaitSemaphores[i] = in_struct->pWaitSemaphores[i];
- }
- }
- if (swapchainCount && in_struct->pSwapchains) {
- pSwapchains = new VkSwapchainKHR[swapchainCount];
- for (uint32_t i = 0; i < swapchainCount; ++i) {
- pSwapchains[i] = in_struct->pSwapchains[i];
- }
- }
- if (in_struct->pImageIndices) {
- pImageIndices = new uint32_t[in_struct->swapchainCount];
- memcpy ((void *)pImageIndices, (void *)in_struct->pImageIndices, sizeof(uint32_t)*in_struct->swapchainCount);
- }
- if (in_struct->pResults) {
- pResults = new VkResult[in_struct->swapchainCount];
- memcpy ((void *)pResults, (void *)in_struct->pResults, sizeof(VkResult)*in_struct->swapchainCount);
- }
-}
-
-safe_VkPresentInfoKHR::safe_VkPresentInfoKHR() :
- pNext(nullptr),
- pWaitSemaphores(nullptr),
- pSwapchains(nullptr),
- pImageIndices(nullptr),
- pResults(nullptr)
-{}
-
-safe_VkPresentInfoKHR::safe_VkPresentInfoKHR(const safe_VkPresentInfoKHR& src)
-{
- sType = src.sType;
- waitSemaphoreCount = src.waitSemaphoreCount;
- pWaitSemaphores = nullptr;
- swapchainCount = src.swapchainCount;
- pSwapchains = nullptr;
- pImageIndices = nullptr;
- pResults = nullptr;
- pNext = SafePnextCopy(src.pNext);
- if (waitSemaphoreCount && src.pWaitSemaphores) {
- pWaitSemaphores = new VkSemaphore[waitSemaphoreCount];
- for (uint32_t i = 0; i < waitSemaphoreCount; ++i) {
- pWaitSemaphores[i] = src.pWaitSemaphores[i];
- }
- }
- if (swapchainCount && src.pSwapchains) {
- pSwapchains = new VkSwapchainKHR[swapchainCount];
- for (uint32_t i = 0; i < swapchainCount; ++i) {
- pSwapchains[i] = src.pSwapchains[i];
- }
- }
- if (src.pImageIndices) {
- pImageIndices = new uint32_t[src.swapchainCount];
- memcpy ((void *)pImageIndices, (void *)src.pImageIndices, sizeof(uint32_t)*src.swapchainCount);
- }
- if (src.pResults) {
- pResults = new VkResult[src.swapchainCount];
- memcpy ((void *)pResults, (void *)src.pResults, sizeof(VkResult)*src.swapchainCount);
- }
-}
-
-safe_VkPresentInfoKHR& safe_VkPresentInfoKHR::operator=(const safe_VkPresentInfoKHR& src)
-{
- if (&src == this) return *this;
-
- if (pWaitSemaphores)
- delete[] pWaitSemaphores;
- if (pSwapchains)
- delete[] pSwapchains;
- if (pImageIndices)
- delete[] pImageIndices;
- if (pResults)
- delete[] pResults;
- if (pNext)
- FreePnextChain(pNext);
-
- sType = src.sType;
- waitSemaphoreCount = src.waitSemaphoreCount;
- pWaitSemaphores = nullptr;
- swapchainCount = src.swapchainCount;
- pSwapchains = nullptr;
- pImageIndices = nullptr;
- pResults = nullptr;
- pNext = SafePnextCopy(src.pNext);
- if (waitSemaphoreCount && src.pWaitSemaphores) {
- pWaitSemaphores = new VkSemaphore[waitSemaphoreCount];
- for (uint32_t i = 0; i < waitSemaphoreCount; ++i) {
- pWaitSemaphores[i] = src.pWaitSemaphores[i];
- }
- }
- if (swapchainCount && src.pSwapchains) {
- pSwapchains = new VkSwapchainKHR[swapchainCount];
- for (uint32_t i = 0; i < swapchainCount; ++i) {
- pSwapchains[i] = src.pSwapchains[i];
- }
- }
- if (src.pImageIndices) {
- pImageIndices = new uint32_t[src.swapchainCount];
- memcpy ((void *)pImageIndices, (void *)src.pImageIndices, sizeof(uint32_t)*src.swapchainCount);
- }
- if (src.pResults) {
- pResults = new VkResult[src.swapchainCount];
- memcpy ((void *)pResults, (void *)src.pResults, sizeof(VkResult)*src.swapchainCount);
- }
-
- return *this;
-}
-
-safe_VkPresentInfoKHR::~safe_VkPresentInfoKHR()
-{
- if (pWaitSemaphores)
- delete[] pWaitSemaphores;
- if (pSwapchains)
- delete[] pSwapchains;
- if (pImageIndices)
- delete[] pImageIndices;
- if (pResults)
- delete[] pResults;
- if (pNext)
- FreePnextChain(pNext);
-}
-
-void safe_VkPresentInfoKHR::initialize(const VkPresentInfoKHR* in_struct)
-{
- sType = in_struct->sType;
- waitSemaphoreCount = in_struct->waitSemaphoreCount;
- pWaitSemaphores = nullptr;
- swapchainCount = in_struct->swapchainCount;
- pSwapchains = nullptr;
- pImageIndices = nullptr;
- pResults = nullptr;
- pNext = SafePnextCopy(in_struct->pNext);
- if (waitSemaphoreCount && in_struct->pWaitSemaphores) {
- pWaitSemaphores = new VkSemaphore[waitSemaphoreCount];
- for (uint32_t i = 0; i < waitSemaphoreCount; ++i) {
- pWaitSemaphores[i] = in_struct->pWaitSemaphores[i];
- }
- }
- if (swapchainCount && in_struct->pSwapchains) {
- pSwapchains = new VkSwapchainKHR[swapchainCount];
- for (uint32_t i = 0; i < swapchainCount; ++i) {
- pSwapchains[i] = in_struct->pSwapchains[i];
- }
- }
- if (in_struct->pImageIndices) {
- pImageIndices = new uint32_t[in_struct->swapchainCount];
- memcpy ((void *)pImageIndices, (void *)in_struct->pImageIndices, sizeof(uint32_t)*in_struct->swapchainCount);
- }
- if (in_struct->pResults) {
- pResults = new VkResult[in_struct->swapchainCount];
- memcpy ((void *)pResults, (void *)in_struct->pResults, sizeof(VkResult)*in_struct->swapchainCount);
- }
-}
-
-void safe_VkPresentInfoKHR::initialize(const safe_VkPresentInfoKHR* src)
-{
- sType = src->sType;
- waitSemaphoreCount = src->waitSemaphoreCount;
- pWaitSemaphores = nullptr;
- swapchainCount = src->swapchainCount;
- pSwapchains = nullptr;
- pImageIndices = nullptr;
- pResults = nullptr;
- pNext = SafePnextCopy(src->pNext);
- if (waitSemaphoreCount && src->pWaitSemaphores) {
- pWaitSemaphores = new VkSemaphore[waitSemaphoreCount];
- for (uint32_t i = 0; i < waitSemaphoreCount; ++i) {
- pWaitSemaphores[i] = src->pWaitSemaphores[i];
- }
- }
- if (swapchainCount && src->pSwapchains) {
- pSwapchains = new VkSwapchainKHR[swapchainCount];
- for (uint32_t i = 0; i < swapchainCount; ++i) {
- pSwapchains[i] = src->pSwapchains[i];
- }
- }
- if (src->pImageIndices) {
- pImageIndices = new uint32_t[src->swapchainCount];
- memcpy ((void *)pImageIndices, (void *)src->pImageIndices, sizeof(uint32_t)*src->swapchainCount);
- }
- if (src->pResults) {
- pResults = new VkResult[src->swapchainCount];
- memcpy ((void *)pResults, (void *)src->pResults, sizeof(VkResult)*src->swapchainCount);
- }
-}
-
-safe_VkImageSwapchainCreateInfoKHR::safe_VkImageSwapchainCreateInfoKHR(const VkImageSwapchainCreateInfoKHR* in_struct) :
- sType(in_struct->sType),
- swapchain(in_struct->swapchain)
-{
- pNext = SafePnextCopy(in_struct->pNext);
-}
-
-safe_VkImageSwapchainCreateInfoKHR::safe_VkImageSwapchainCreateInfoKHR() :
- pNext(nullptr)
-{}
-
-safe_VkImageSwapchainCreateInfoKHR::safe_VkImageSwapchainCreateInfoKHR(const safe_VkImageSwapchainCreateInfoKHR& src)
-{
- sType = src.sType;
- swapchain = src.swapchain;
- pNext = SafePnextCopy(src.pNext);
-}
-
-safe_VkImageSwapchainCreateInfoKHR& safe_VkImageSwapchainCreateInfoKHR::operator=(const safe_VkImageSwapchainCreateInfoKHR& src)
-{
- if (&src == this) return *this;
-
- if (pNext)
- FreePnextChain(pNext);
-
- sType = src.sType;
- swapchain = src.swapchain;
- pNext = SafePnextCopy(src.pNext);
-
- return *this;
-}
-
-safe_VkImageSwapchainCreateInfoKHR::~safe_VkImageSwapchainCreateInfoKHR()
-{
- if (pNext)
- FreePnextChain(pNext);
-}
-
-void safe_VkImageSwapchainCreateInfoKHR::initialize(const VkImageSwapchainCreateInfoKHR* in_struct)
-{
- sType = in_struct->sType;
- swapchain = in_struct->swapchain;
- pNext = SafePnextCopy(in_struct->pNext);
-}
-
-void safe_VkImageSwapchainCreateInfoKHR::initialize(const safe_VkImageSwapchainCreateInfoKHR* src)
-{
- sType = src->sType;
- swapchain = src->swapchain;
- pNext = SafePnextCopy(src->pNext);
-}
-
-safe_VkBindImageMemorySwapchainInfoKHR::safe_VkBindImageMemorySwapchainInfoKHR(const VkBindImageMemorySwapchainInfoKHR* in_struct) :
- sType(in_struct->sType),
- swapchain(in_struct->swapchain),
- imageIndex(in_struct->imageIndex)
-{
- pNext = SafePnextCopy(in_struct->pNext);
-}
-
-safe_VkBindImageMemorySwapchainInfoKHR::safe_VkBindImageMemorySwapchainInfoKHR() :
- pNext(nullptr)
-{}
-
-safe_VkBindImageMemorySwapchainInfoKHR::safe_VkBindImageMemorySwapchainInfoKHR(const safe_VkBindImageMemorySwapchainInfoKHR& src)
-{
- sType = src.sType;
- swapchain = src.swapchain;
- imageIndex = src.imageIndex;
- pNext = SafePnextCopy(src.pNext);
-}
-
-safe_VkBindImageMemorySwapchainInfoKHR& safe_VkBindImageMemorySwapchainInfoKHR::operator=(const safe_VkBindImageMemorySwapchainInfoKHR& src)
-{
- if (&src == this) return *this;
-
- if (pNext)
- FreePnextChain(pNext);
-
- sType = src.sType;
- swapchain = src.swapchain;
- imageIndex = src.imageIndex;
- pNext = SafePnextCopy(src.pNext);
-
- return *this;
-}
-
-safe_VkBindImageMemorySwapchainInfoKHR::~safe_VkBindImageMemorySwapchainInfoKHR()
-{
- if (pNext)
- FreePnextChain(pNext);
-}
-
-void safe_VkBindImageMemorySwapchainInfoKHR::initialize(const VkBindImageMemorySwapchainInfoKHR* in_struct)
-{
- sType = in_struct->sType;
- swapchain = in_struct->swapchain;
- imageIndex = in_struct->imageIndex;
- pNext = SafePnextCopy(in_struct->pNext);
-}
-
-void safe_VkBindImageMemorySwapchainInfoKHR::initialize(const safe_VkBindImageMemorySwapchainInfoKHR* src)
-{
- sType = src->sType;
- swapchain = src->swapchain;
- imageIndex = src->imageIndex;
- pNext = SafePnextCopy(src->pNext);
-}
-
-safe_VkAcquireNextImageInfoKHR::safe_VkAcquireNextImageInfoKHR(const VkAcquireNextImageInfoKHR* in_struct) :
- sType(in_struct->sType),
- swapchain(in_struct->swapchain),
- timeout(in_struct->timeout),
- semaphore(in_struct->semaphore),
- fence(in_struct->fence),
- deviceMask(in_struct->deviceMask)
-{
- pNext = SafePnextCopy(in_struct->pNext);
-}
-
-safe_VkAcquireNextImageInfoKHR::safe_VkAcquireNextImageInfoKHR() :
- pNext(nullptr)
-{}
-
-safe_VkAcquireNextImageInfoKHR::safe_VkAcquireNextImageInfoKHR(const safe_VkAcquireNextImageInfoKHR& src)
-{
- sType = src.sType;
- swapchain = src.swapchain;
- timeout = src.timeout;
- semaphore = src.semaphore;
- fence = src.fence;
- deviceMask = src.deviceMask;
- pNext = SafePnextCopy(src.pNext);
-}
-
-safe_VkAcquireNextImageInfoKHR& safe_VkAcquireNextImageInfoKHR::operator=(const safe_VkAcquireNextImageInfoKHR& src)
-{
- if (&src == this) return *this;
-
- if (pNext)
- FreePnextChain(pNext);
-
- sType = src.sType;
- swapchain = src.swapchain;
- timeout = src.timeout;
- semaphore = src.semaphore;
- fence = src.fence;
- deviceMask = src.deviceMask;
- pNext = SafePnextCopy(src.pNext);
-
- return *this;
-}
-
-safe_VkAcquireNextImageInfoKHR::~safe_VkAcquireNextImageInfoKHR()
-{
- if (pNext)
- FreePnextChain(pNext);
-}
-
-void safe_VkAcquireNextImageInfoKHR::initialize(const VkAcquireNextImageInfoKHR* in_struct)
-{
- sType = in_struct->sType;
- swapchain = in_struct->swapchain;
- timeout = in_struct->timeout;
- semaphore = in_struct->semaphore;
- fence = in_struct->fence;
- deviceMask = in_struct->deviceMask;
- pNext = SafePnextCopy(in_struct->pNext);
-}
-
-void safe_VkAcquireNextImageInfoKHR::initialize(const safe_VkAcquireNextImageInfoKHR* src)
-{
- sType = src->sType;
- swapchain = src->swapchain;
- timeout = src->timeout;
- semaphore = src->semaphore;
- fence = src->fence;
- deviceMask = src->deviceMask;
- pNext = SafePnextCopy(src->pNext);
-}
-
-safe_VkDeviceGroupPresentCapabilitiesKHR::safe_VkDeviceGroupPresentCapabilitiesKHR(const VkDeviceGroupPresentCapabilitiesKHR* in_struct) :
- sType(in_struct->sType),
- modes(in_struct->modes)
-{
- pNext = SafePnextCopy(in_struct->pNext);
- for (uint32_t i = 0; i < VK_MAX_DEVICE_GROUP_SIZE; ++i) {
- presentMask[i] = in_struct->presentMask[i];
- }
-}
-
-safe_VkDeviceGroupPresentCapabilitiesKHR::safe_VkDeviceGroupPresentCapabilitiesKHR() :
- pNext(nullptr)
-{}
-
-safe_VkDeviceGroupPresentCapabilitiesKHR::safe_VkDeviceGroupPresentCapabilitiesKHR(const safe_VkDeviceGroupPresentCapabilitiesKHR& src)
-{
- sType = src.sType;
- modes = src.modes;
- pNext = SafePnextCopy(src.pNext);
- for (uint32_t i = 0; i < VK_MAX_DEVICE_GROUP_SIZE; ++i) {
- presentMask[i] = src.presentMask[i];
- }
-}
-
-safe_VkDeviceGroupPresentCapabilitiesKHR& safe_VkDeviceGroupPresentCapabilitiesKHR::operator=(const safe_VkDeviceGroupPresentCapabilitiesKHR& src)
-{
- if (&src == this) return *this;
-
- if (pNext)
- FreePnextChain(pNext);
-
- sType = src.sType;
- modes = src.modes;
- pNext = SafePnextCopy(src.pNext);
- for (uint32_t i = 0; i < VK_MAX_DEVICE_GROUP_SIZE; ++i) {
- presentMask[i] = src.presentMask[i];
- }
-
- return *this;
-}
-
-safe_VkDeviceGroupPresentCapabilitiesKHR::~safe_VkDeviceGroupPresentCapabilitiesKHR()
-{
- if (pNext)
- FreePnextChain(pNext);
-}
-
-void safe_VkDeviceGroupPresentCapabilitiesKHR::initialize(const VkDeviceGroupPresentCapabilitiesKHR* in_struct)
-{
- sType = in_struct->sType;
- modes = in_struct->modes;
- pNext = SafePnextCopy(in_struct->pNext);
- for (uint32_t i = 0; i < VK_MAX_DEVICE_GROUP_SIZE; ++i) {
- presentMask[i] = in_struct->presentMask[i];
- }
-}
-
-void safe_VkDeviceGroupPresentCapabilitiesKHR::initialize(const safe_VkDeviceGroupPresentCapabilitiesKHR* src)
-{
- sType = src->sType;
- modes = src->modes;
- pNext = SafePnextCopy(src->pNext);
- for (uint32_t i = 0; i < VK_MAX_DEVICE_GROUP_SIZE; ++i) {
- presentMask[i] = src->presentMask[i];
- }
-}
-
-safe_VkDeviceGroupPresentInfoKHR::safe_VkDeviceGroupPresentInfoKHR(const VkDeviceGroupPresentInfoKHR* in_struct) :
- sType(in_struct->sType),
- swapchainCount(in_struct->swapchainCount),
- pDeviceMasks(nullptr),
- mode(in_struct->mode)
-{
- pNext = SafePnextCopy(in_struct->pNext);
- if (in_struct->pDeviceMasks) {
- pDeviceMasks = new uint32_t[in_struct->swapchainCount];
- memcpy ((void *)pDeviceMasks, (void *)in_struct->pDeviceMasks, sizeof(uint32_t)*in_struct->swapchainCount);
- }
-}
-
-safe_VkDeviceGroupPresentInfoKHR::safe_VkDeviceGroupPresentInfoKHR() :
- pNext(nullptr),
- pDeviceMasks(nullptr)
-{}
-
-safe_VkDeviceGroupPresentInfoKHR::safe_VkDeviceGroupPresentInfoKHR(const safe_VkDeviceGroupPresentInfoKHR& src)
-{
- sType = src.sType;
- swapchainCount = src.swapchainCount;
- pDeviceMasks = nullptr;
- mode = src.mode;
- pNext = SafePnextCopy(src.pNext);
- if (src.pDeviceMasks) {
- pDeviceMasks = new uint32_t[src.swapchainCount];
- memcpy ((void *)pDeviceMasks, (void *)src.pDeviceMasks, sizeof(uint32_t)*src.swapchainCount);
- }
-}
-
-safe_VkDeviceGroupPresentInfoKHR& safe_VkDeviceGroupPresentInfoKHR::operator=(const safe_VkDeviceGroupPresentInfoKHR& src)
-{
- if (&src == this) return *this;
-
- if (pDeviceMasks)
- delete[] pDeviceMasks;
- if (pNext)
- FreePnextChain(pNext);
-
- sType = src.sType;
- swapchainCount = src.swapchainCount;
- pDeviceMasks = nullptr;
- mode = src.mode;
- pNext = SafePnextCopy(src.pNext);
- if (src.pDeviceMasks) {
- pDeviceMasks = new uint32_t[src.swapchainCount];
- memcpy ((void *)pDeviceMasks, (void *)src.pDeviceMasks, sizeof(uint32_t)*src.swapchainCount);
- }
-
- return *this;
-}
-
-safe_VkDeviceGroupPresentInfoKHR::~safe_VkDeviceGroupPresentInfoKHR()
-{
- if (pDeviceMasks)
- delete[] pDeviceMasks;
- if (pNext)
- FreePnextChain(pNext);
-}
-
-void safe_VkDeviceGroupPresentInfoKHR::initialize(const VkDeviceGroupPresentInfoKHR* in_struct)
-{
- sType = in_struct->sType;
- swapchainCount = in_struct->swapchainCount;
- pDeviceMasks = nullptr;
- mode = in_struct->mode;
- pNext = SafePnextCopy(in_struct->pNext);
- if (in_struct->pDeviceMasks) {
- pDeviceMasks = new uint32_t[in_struct->swapchainCount];
- memcpy ((void *)pDeviceMasks, (void *)in_struct->pDeviceMasks, sizeof(uint32_t)*in_struct->swapchainCount);
- }
-}
-
-void safe_VkDeviceGroupPresentInfoKHR::initialize(const safe_VkDeviceGroupPresentInfoKHR* src)
-{
- sType = src->sType;
- swapchainCount = src->swapchainCount;
- pDeviceMasks = nullptr;
- mode = src->mode;
- pNext = SafePnextCopy(src->pNext);
- if (src->pDeviceMasks) {
- pDeviceMasks = new uint32_t[src->swapchainCount];
- memcpy ((void *)pDeviceMasks, (void *)src->pDeviceMasks, sizeof(uint32_t)*src->swapchainCount);
- }
-}
-
-safe_VkDeviceGroupSwapchainCreateInfoKHR::safe_VkDeviceGroupSwapchainCreateInfoKHR(const VkDeviceGroupSwapchainCreateInfoKHR* in_struct) :
- sType(in_struct->sType),
- modes(in_struct->modes)
-{
- pNext = SafePnextCopy(in_struct->pNext);
-}
-
-safe_VkDeviceGroupSwapchainCreateInfoKHR::safe_VkDeviceGroupSwapchainCreateInfoKHR() :
- pNext(nullptr)
-{}
-
-safe_VkDeviceGroupSwapchainCreateInfoKHR::safe_VkDeviceGroupSwapchainCreateInfoKHR(const safe_VkDeviceGroupSwapchainCreateInfoKHR& src)
-{
- sType = src.sType;
- modes = src.modes;
- pNext = SafePnextCopy(src.pNext);
-}
-
-safe_VkDeviceGroupSwapchainCreateInfoKHR& safe_VkDeviceGroupSwapchainCreateInfoKHR::operator=(const safe_VkDeviceGroupSwapchainCreateInfoKHR& src)
-{
- if (&src == this) return *this;
-
- if (pNext)
- FreePnextChain(pNext);
-
- sType = src.sType;
- modes = src.modes;
- pNext = SafePnextCopy(src.pNext);
-
- return *this;
-}
-
-safe_VkDeviceGroupSwapchainCreateInfoKHR::~safe_VkDeviceGroupSwapchainCreateInfoKHR()
-{
- if (pNext)
- FreePnextChain(pNext);
-}
-
-void safe_VkDeviceGroupSwapchainCreateInfoKHR::initialize(const VkDeviceGroupSwapchainCreateInfoKHR* in_struct)
-{
- sType = in_struct->sType;
- modes = in_struct->modes;
- pNext = SafePnextCopy(in_struct->pNext);
-}
-
-void safe_VkDeviceGroupSwapchainCreateInfoKHR::initialize(const safe_VkDeviceGroupSwapchainCreateInfoKHR* src)
-{
- sType = src->sType;
- modes = src->modes;
- pNext = SafePnextCopy(src->pNext);
-}
-
-safe_VkDisplayPropertiesKHR::safe_VkDisplayPropertiesKHR(const VkDisplayPropertiesKHR* in_struct) :
- display(in_struct->display),
- physicalDimensions(in_struct->physicalDimensions),
- physicalResolution(in_struct->physicalResolution),
- supportedTransforms(in_struct->supportedTransforms),
- planeReorderPossible(in_struct->planeReorderPossible),
- persistentContent(in_struct->persistentContent)
-{
- displayName = SafeStringCopy(in_struct->displayName);
-}
-
-safe_VkDisplayPropertiesKHR::safe_VkDisplayPropertiesKHR() :
- displayName(nullptr)
-{}
-
-safe_VkDisplayPropertiesKHR::safe_VkDisplayPropertiesKHR(const safe_VkDisplayPropertiesKHR& src)
-{
- display = src.display;
- physicalDimensions = src.physicalDimensions;
- physicalResolution = src.physicalResolution;
- supportedTransforms = src.supportedTransforms;
- planeReorderPossible = src.planeReorderPossible;
- persistentContent = src.persistentContent;
- displayName = SafeStringCopy(src.displayName);
-}
-
-safe_VkDisplayPropertiesKHR& safe_VkDisplayPropertiesKHR::operator=(const safe_VkDisplayPropertiesKHR& src)
-{
- if (&src == this) return *this;
-
- if (displayName) delete [] displayName;
-
- display = src.display;
- physicalDimensions = src.physicalDimensions;
- physicalResolution = src.physicalResolution;
- supportedTransforms = src.supportedTransforms;
- planeReorderPossible = src.planeReorderPossible;
- persistentContent = src.persistentContent;
- displayName = SafeStringCopy(src.displayName);
-
- return *this;
-}
-
-safe_VkDisplayPropertiesKHR::~safe_VkDisplayPropertiesKHR()
-{
- if (displayName) delete [] displayName;
-}
-
-void safe_VkDisplayPropertiesKHR::initialize(const VkDisplayPropertiesKHR* in_struct)
-{
- display = in_struct->display;
- physicalDimensions = in_struct->physicalDimensions;
- physicalResolution = in_struct->physicalResolution;
- supportedTransforms = in_struct->supportedTransforms;
- planeReorderPossible = in_struct->planeReorderPossible;
- persistentContent = in_struct->persistentContent;
- displayName = SafeStringCopy(in_struct->displayName);
-}
-
-void safe_VkDisplayPropertiesKHR::initialize(const safe_VkDisplayPropertiesKHR* src)
-{
- display = src->display;
- physicalDimensions = src->physicalDimensions;
- physicalResolution = src->physicalResolution;
- supportedTransforms = src->supportedTransforms;
- planeReorderPossible = src->planeReorderPossible;
- persistentContent = src->persistentContent;
- displayName = SafeStringCopy(src->displayName);
-}
-
-safe_VkDisplayModeCreateInfoKHR::safe_VkDisplayModeCreateInfoKHR(const VkDisplayModeCreateInfoKHR* in_struct) :
- sType(in_struct->sType),
- flags(in_struct->flags),
- parameters(in_struct->parameters)
-{
- pNext = SafePnextCopy(in_struct->pNext);
-}
-
-safe_VkDisplayModeCreateInfoKHR::safe_VkDisplayModeCreateInfoKHR() :
- pNext(nullptr)
-{}
-
-safe_VkDisplayModeCreateInfoKHR::safe_VkDisplayModeCreateInfoKHR(const safe_VkDisplayModeCreateInfoKHR& src)
-{
- sType = src.sType;
- flags = src.flags;
- parameters = src.parameters;
- pNext = SafePnextCopy(src.pNext);
-}
-
-safe_VkDisplayModeCreateInfoKHR& safe_VkDisplayModeCreateInfoKHR::operator=(const safe_VkDisplayModeCreateInfoKHR& src)
-{
- if (&src == this) return *this;
-
- if (pNext)
- FreePnextChain(pNext);
-
- sType = src.sType;
- flags = src.flags;
- parameters = src.parameters;
- pNext = SafePnextCopy(src.pNext);
-
- return *this;
-}
-
-safe_VkDisplayModeCreateInfoKHR::~safe_VkDisplayModeCreateInfoKHR()
-{
- if (pNext)
- FreePnextChain(pNext);
-}
-
-void safe_VkDisplayModeCreateInfoKHR::initialize(const VkDisplayModeCreateInfoKHR* in_struct)
-{
- sType = in_struct->sType;
- flags = in_struct->flags;
- parameters = in_struct->parameters;
- pNext = SafePnextCopy(in_struct->pNext);
-}
-
-void safe_VkDisplayModeCreateInfoKHR::initialize(const safe_VkDisplayModeCreateInfoKHR* src)
-{
- sType = src->sType;
- flags = src->flags;
- parameters = src->parameters;
- pNext = SafePnextCopy(src->pNext);
-}
-
-safe_VkDisplaySurfaceCreateInfoKHR::safe_VkDisplaySurfaceCreateInfoKHR(const VkDisplaySurfaceCreateInfoKHR* in_struct) :
- sType(in_struct->sType),
- flags(in_struct->flags),
- displayMode(in_struct->displayMode),
- planeIndex(in_struct->planeIndex),
- planeStackIndex(in_struct->planeStackIndex),
- transform(in_struct->transform),
- globalAlpha(in_struct->globalAlpha),
- alphaMode(in_struct->alphaMode),
- imageExtent(in_struct->imageExtent)
-{
- pNext = SafePnextCopy(in_struct->pNext);
-}
-
-safe_VkDisplaySurfaceCreateInfoKHR::safe_VkDisplaySurfaceCreateInfoKHR() :
- pNext(nullptr)
-{}
-
-safe_VkDisplaySurfaceCreateInfoKHR::safe_VkDisplaySurfaceCreateInfoKHR(const safe_VkDisplaySurfaceCreateInfoKHR& src)
-{
- sType = src.sType;
- flags = src.flags;
- displayMode = src.displayMode;
- planeIndex = src.planeIndex;
- planeStackIndex = src.planeStackIndex;
- transform = src.transform;
- globalAlpha = src.globalAlpha;
- alphaMode = src.alphaMode;
- imageExtent = src.imageExtent;
- pNext = SafePnextCopy(src.pNext);
-}
-
-safe_VkDisplaySurfaceCreateInfoKHR& safe_VkDisplaySurfaceCreateInfoKHR::operator=(const safe_VkDisplaySurfaceCreateInfoKHR& src)
-{
- if (&src == this) return *this;
-
- if (pNext)
- FreePnextChain(pNext);
-
- sType = src.sType;
- flags = src.flags;
- displayMode = src.displayMode;
- planeIndex = src.planeIndex;
- planeStackIndex = src.planeStackIndex;
- transform = src.transform;
- globalAlpha = src.globalAlpha;
- alphaMode = src.alphaMode;
- imageExtent = src.imageExtent;
- pNext = SafePnextCopy(src.pNext);
-
- return *this;
-}
-
-safe_VkDisplaySurfaceCreateInfoKHR::~safe_VkDisplaySurfaceCreateInfoKHR()
-{
- if (pNext)
- FreePnextChain(pNext);
-}
-
-void safe_VkDisplaySurfaceCreateInfoKHR::initialize(const VkDisplaySurfaceCreateInfoKHR* in_struct)
-{
- sType = in_struct->sType;
- flags = in_struct->flags;
- displayMode = in_struct->displayMode;
- planeIndex = in_struct->planeIndex;
- planeStackIndex = in_struct->planeStackIndex;
- transform = in_struct->transform;
- globalAlpha = in_struct->globalAlpha;
- alphaMode = in_struct->alphaMode;
- imageExtent = in_struct->imageExtent;
- pNext = SafePnextCopy(in_struct->pNext);
-}
-
-void safe_VkDisplaySurfaceCreateInfoKHR::initialize(const safe_VkDisplaySurfaceCreateInfoKHR* src)
-{
- sType = src->sType;
- flags = src->flags;
- displayMode = src->displayMode;
- planeIndex = src->planeIndex;
- planeStackIndex = src->planeStackIndex;
- transform = src->transform;
- globalAlpha = src->globalAlpha;
- alphaMode = src->alphaMode;
- imageExtent = src->imageExtent;
- pNext = SafePnextCopy(src->pNext);
-}
-
-safe_VkDisplayPresentInfoKHR::safe_VkDisplayPresentInfoKHR(const VkDisplayPresentInfoKHR* in_struct) :
- sType(in_struct->sType),
- srcRect(in_struct->srcRect),
- dstRect(in_struct->dstRect),
- persistent(in_struct->persistent)
-{
- pNext = SafePnextCopy(in_struct->pNext);
-}
-
-safe_VkDisplayPresentInfoKHR::safe_VkDisplayPresentInfoKHR() :
- pNext(nullptr)
-{}
-
-safe_VkDisplayPresentInfoKHR::safe_VkDisplayPresentInfoKHR(const safe_VkDisplayPresentInfoKHR& src)
-{
- sType = src.sType;
- srcRect = src.srcRect;
- dstRect = src.dstRect;
- persistent = src.persistent;
- pNext = SafePnextCopy(src.pNext);
-}
-
-safe_VkDisplayPresentInfoKHR& safe_VkDisplayPresentInfoKHR::operator=(const safe_VkDisplayPresentInfoKHR& src)
-{
- if (&src == this) return *this;
-
- if (pNext)
- FreePnextChain(pNext);
-
- sType = src.sType;
- srcRect = src.srcRect;
- dstRect = src.dstRect;
- persistent = src.persistent;
- pNext = SafePnextCopy(src.pNext);
-
- return *this;
-}
-
-safe_VkDisplayPresentInfoKHR::~safe_VkDisplayPresentInfoKHR()
-{
- if (pNext)
- FreePnextChain(pNext);
-}
-
-void safe_VkDisplayPresentInfoKHR::initialize(const VkDisplayPresentInfoKHR* in_struct)
-{
- sType = in_struct->sType;
- srcRect = in_struct->srcRect;
- dstRect = in_struct->dstRect;
- persistent = in_struct->persistent;
- pNext = SafePnextCopy(in_struct->pNext);
-}
-
-void safe_VkDisplayPresentInfoKHR::initialize(const safe_VkDisplayPresentInfoKHR* src)
-{
- sType = src->sType;
- srcRect = src->srcRect;
- dstRect = src->dstRect;
- persistent = src->persistent;
- pNext = SafePnextCopy(src->pNext);
-}
-#ifdef VK_USE_PLATFORM_WIN32_KHR
-
-
-safe_VkImportMemoryWin32HandleInfoKHR::safe_VkImportMemoryWin32HandleInfoKHR(const VkImportMemoryWin32HandleInfoKHR* in_struct) :
- sType(in_struct->sType),
- handleType(in_struct->handleType),
- handle(in_struct->handle),
- name(in_struct->name)
-{
- pNext = SafePnextCopy(in_struct->pNext);
-}
-
-safe_VkImportMemoryWin32HandleInfoKHR::safe_VkImportMemoryWin32HandleInfoKHR() :
- pNext(nullptr)
-{}
-
-safe_VkImportMemoryWin32HandleInfoKHR::safe_VkImportMemoryWin32HandleInfoKHR(const safe_VkImportMemoryWin32HandleInfoKHR& src)
-{
- sType = src.sType;
- handleType = src.handleType;
- handle = src.handle;
- name = src.name;
- pNext = SafePnextCopy(src.pNext);
-}
-
-safe_VkImportMemoryWin32HandleInfoKHR& safe_VkImportMemoryWin32HandleInfoKHR::operator=(const safe_VkImportMemoryWin32HandleInfoKHR& src)
-{
- if (&src == this) return *this;
-
- if (pNext)
- FreePnextChain(pNext);
-
- sType = src.sType;
- handleType = src.handleType;
- handle = src.handle;
- name = src.name;
- pNext = SafePnextCopy(src.pNext);
-
- return *this;
-}
-
-safe_VkImportMemoryWin32HandleInfoKHR::~safe_VkImportMemoryWin32HandleInfoKHR()
-{
- if (pNext)
- FreePnextChain(pNext);
-}
-
-void safe_VkImportMemoryWin32HandleInfoKHR::initialize(const VkImportMemoryWin32HandleInfoKHR* in_struct)
-{
- sType = in_struct->sType;
- handleType = in_struct->handleType;
- handle = in_struct->handle;
- name = in_struct->name;
- pNext = SafePnextCopy(in_struct->pNext);
-}
-
-void safe_VkImportMemoryWin32HandleInfoKHR::initialize(const safe_VkImportMemoryWin32HandleInfoKHR* src)
-{
- sType = src->sType;
- handleType = src->handleType;
- handle = src->handle;
- name = src->name;
- pNext = SafePnextCopy(src->pNext);
-}
-#endif // VK_USE_PLATFORM_WIN32_KHR
-
-#ifdef VK_USE_PLATFORM_WIN32_KHR
-
-
-safe_VkExportMemoryWin32HandleInfoKHR::safe_VkExportMemoryWin32HandleInfoKHR(const VkExportMemoryWin32HandleInfoKHR* in_struct) :
- sType(in_struct->sType),
- pAttributes(nullptr),
- dwAccess(in_struct->dwAccess),
- name(in_struct->name)
-{
- pNext = SafePnextCopy(in_struct->pNext);
- if (in_struct->pAttributes) {
- pAttributes = new SECURITY_ATTRIBUTES(*in_struct->pAttributes);
- }
-}
-
-safe_VkExportMemoryWin32HandleInfoKHR::safe_VkExportMemoryWin32HandleInfoKHR() :
- pNext(nullptr),
- pAttributes(nullptr)
-{}
-
-safe_VkExportMemoryWin32HandleInfoKHR::safe_VkExportMemoryWin32HandleInfoKHR(const safe_VkExportMemoryWin32HandleInfoKHR& src)
-{
- sType = src.sType;
- pAttributes = nullptr;
- dwAccess = src.dwAccess;
- name = src.name;
- pNext = SafePnextCopy(src.pNext);
- if (src.pAttributes) {
- pAttributes = new SECURITY_ATTRIBUTES(*src.pAttributes);
- }
-}
-
-safe_VkExportMemoryWin32HandleInfoKHR& safe_VkExportMemoryWin32HandleInfoKHR::operator=(const safe_VkExportMemoryWin32HandleInfoKHR& src)
-{
- if (&src == this) return *this;
-
- if (pAttributes)
- delete pAttributes;
- if (pNext)
- FreePnextChain(pNext);
-
- sType = src.sType;
- pAttributes = nullptr;
- dwAccess = src.dwAccess;
- name = src.name;
- pNext = SafePnextCopy(src.pNext);
- if (src.pAttributes) {
- pAttributes = new SECURITY_ATTRIBUTES(*src.pAttributes);
- }
-
- return *this;
-}
-
-safe_VkExportMemoryWin32HandleInfoKHR::~safe_VkExportMemoryWin32HandleInfoKHR()
-{
- if (pAttributes)
- delete pAttributes;
- if (pNext)
- FreePnextChain(pNext);
-}
-
-void safe_VkExportMemoryWin32HandleInfoKHR::initialize(const VkExportMemoryWin32HandleInfoKHR* in_struct)
-{
- sType = in_struct->sType;
- pAttributes = nullptr;
- dwAccess = in_struct->dwAccess;
- name = in_struct->name;
- pNext = SafePnextCopy(in_struct->pNext);
- if (in_struct->pAttributes) {
- pAttributes = new SECURITY_ATTRIBUTES(*in_struct->pAttributes);
- }
-}
-
-void safe_VkExportMemoryWin32HandleInfoKHR::initialize(const safe_VkExportMemoryWin32HandleInfoKHR* src)
-{
- sType = src->sType;
- pAttributes = nullptr;
- dwAccess = src->dwAccess;
- name = src->name;
- pNext = SafePnextCopy(src->pNext);
- if (src->pAttributes) {
- pAttributes = new SECURITY_ATTRIBUTES(*src->pAttributes);
- }
-}
-#endif // VK_USE_PLATFORM_WIN32_KHR
-
-#ifdef VK_USE_PLATFORM_WIN32_KHR
-
-
-safe_VkMemoryWin32HandlePropertiesKHR::safe_VkMemoryWin32HandlePropertiesKHR(const VkMemoryWin32HandlePropertiesKHR* in_struct) :
- sType(in_struct->sType),
- memoryTypeBits(in_struct->memoryTypeBits)
-{
- pNext = SafePnextCopy(in_struct->pNext);
-}
-
-safe_VkMemoryWin32HandlePropertiesKHR::safe_VkMemoryWin32HandlePropertiesKHR() :
- pNext(nullptr)
-{}
-
-safe_VkMemoryWin32HandlePropertiesKHR::safe_VkMemoryWin32HandlePropertiesKHR(const safe_VkMemoryWin32HandlePropertiesKHR& src)
-{
- sType = src.sType;
- memoryTypeBits = src.memoryTypeBits;
- pNext = SafePnextCopy(src.pNext);
-}
-
-safe_VkMemoryWin32HandlePropertiesKHR& safe_VkMemoryWin32HandlePropertiesKHR::operator=(const safe_VkMemoryWin32HandlePropertiesKHR& src)
-{
- if (&src == this) return *this;
-
- if (pNext)
- FreePnextChain(pNext);
-
- sType = src.sType;
- memoryTypeBits = src.memoryTypeBits;
- pNext = SafePnextCopy(src.pNext);
-
- return *this;
-}
-
-safe_VkMemoryWin32HandlePropertiesKHR::~safe_VkMemoryWin32HandlePropertiesKHR()
-{
- if (pNext)
- FreePnextChain(pNext);
-}
-
-void safe_VkMemoryWin32HandlePropertiesKHR::initialize(const VkMemoryWin32HandlePropertiesKHR* in_struct)
-{
- sType = in_struct->sType;
- memoryTypeBits = in_struct->memoryTypeBits;
- pNext = SafePnextCopy(in_struct->pNext);
-}
-
-void safe_VkMemoryWin32HandlePropertiesKHR::initialize(const safe_VkMemoryWin32HandlePropertiesKHR* src)
-{
- sType = src->sType;
- memoryTypeBits = src->memoryTypeBits;
- pNext = SafePnextCopy(src->pNext);
-}
-#endif // VK_USE_PLATFORM_WIN32_KHR
-
-#ifdef VK_USE_PLATFORM_WIN32_KHR
-
-
-safe_VkMemoryGetWin32HandleInfoKHR::safe_VkMemoryGetWin32HandleInfoKHR(const VkMemoryGetWin32HandleInfoKHR* in_struct) :
- sType(in_struct->sType),
- memory(in_struct->memory),
- handleType(in_struct->handleType)
-{
- pNext = SafePnextCopy(in_struct->pNext);
-}
-
-safe_VkMemoryGetWin32HandleInfoKHR::safe_VkMemoryGetWin32HandleInfoKHR() :
- pNext(nullptr)
-{}
-
-safe_VkMemoryGetWin32HandleInfoKHR::safe_VkMemoryGetWin32HandleInfoKHR(const safe_VkMemoryGetWin32HandleInfoKHR& src)
-{
- sType = src.sType;
- memory = src.memory;
- handleType = src.handleType;
- pNext = SafePnextCopy(src.pNext);
-}
-
-safe_VkMemoryGetWin32HandleInfoKHR& safe_VkMemoryGetWin32HandleInfoKHR::operator=(const safe_VkMemoryGetWin32HandleInfoKHR& src)
-{
- if (&src == this) return *this;
-
- if (pNext)
- FreePnextChain(pNext);
-
- sType = src.sType;
- memory = src.memory;
- handleType = src.handleType;
- pNext = SafePnextCopy(src.pNext);
-
- return *this;
-}
-
-safe_VkMemoryGetWin32HandleInfoKHR::~safe_VkMemoryGetWin32HandleInfoKHR()
-{
- if (pNext)
- FreePnextChain(pNext);
-}
-
-void safe_VkMemoryGetWin32HandleInfoKHR::initialize(const VkMemoryGetWin32HandleInfoKHR* in_struct)
-{
- sType = in_struct->sType;
- memory = in_struct->memory;
- handleType = in_struct->handleType;
- pNext = SafePnextCopy(in_struct->pNext);
-}
-
-void safe_VkMemoryGetWin32HandleInfoKHR::initialize(const safe_VkMemoryGetWin32HandleInfoKHR* src)
-{
- sType = src->sType;
- memory = src->memory;
- handleType = src->handleType;
- pNext = SafePnextCopy(src->pNext);
-}
-#endif // VK_USE_PLATFORM_WIN32_KHR
-
-
-safe_VkImportMemoryFdInfoKHR::safe_VkImportMemoryFdInfoKHR(const VkImportMemoryFdInfoKHR* in_struct) :
- sType(in_struct->sType),
- handleType(in_struct->handleType),
- fd(in_struct->fd)
-{
- pNext = SafePnextCopy(in_struct->pNext);
-}
-
-safe_VkImportMemoryFdInfoKHR::safe_VkImportMemoryFdInfoKHR() :
- pNext(nullptr)
-{}
-
-safe_VkImportMemoryFdInfoKHR::safe_VkImportMemoryFdInfoKHR(const safe_VkImportMemoryFdInfoKHR& src)
-{
- sType = src.sType;
- handleType = src.handleType;
- fd = src.fd;
- pNext = SafePnextCopy(src.pNext);
-}
-
-safe_VkImportMemoryFdInfoKHR& safe_VkImportMemoryFdInfoKHR::operator=(const safe_VkImportMemoryFdInfoKHR& src)
-{
- if (&src == this) return *this;
-
- if (pNext)
- FreePnextChain(pNext);
-
- sType = src.sType;
- handleType = src.handleType;
- fd = src.fd;
- pNext = SafePnextCopy(src.pNext);
-
- return *this;
-}
-
-safe_VkImportMemoryFdInfoKHR::~safe_VkImportMemoryFdInfoKHR()
-{
- if (pNext)
- FreePnextChain(pNext);
-}
-
-void safe_VkImportMemoryFdInfoKHR::initialize(const VkImportMemoryFdInfoKHR* in_struct)
-{
- sType = in_struct->sType;
- handleType = in_struct->handleType;
- fd = in_struct->fd;
- pNext = SafePnextCopy(in_struct->pNext);
-}
-
-void safe_VkImportMemoryFdInfoKHR::initialize(const safe_VkImportMemoryFdInfoKHR* src)
-{
- sType = src->sType;
- handleType = src->handleType;
- fd = src->fd;
- pNext = SafePnextCopy(src->pNext);
-}
-
-safe_VkMemoryFdPropertiesKHR::safe_VkMemoryFdPropertiesKHR(const VkMemoryFdPropertiesKHR* in_struct) :
- sType(in_struct->sType),
- memoryTypeBits(in_struct->memoryTypeBits)
-{
- pNext = SafePnextCopy(in_struct->pNext);
-}
-
-safe_VkMemoryFdPropertiesKHR::safe_VkMemoryFdPropertiesKHR() :
- pNext(nullptr)
-{}
-
-safe_VkMemoryFdPropertiesKHR::safe_VkMemoryFdPropertiesKHR(const safe_VkMemoryFdPropertiesKHR& src)
-{
- sType = src.sType;
- memoryTypeBits = src.memoryTypeBits;
- pNext = SafePnextCopy(src.pNext);
-}
-
-safe_VkMemoryFdPropertiesKHR& safe_VkMemoryFdPropertiesKHR::operator=(const safe_VkMemoryFdPropertiesKHR& src)
-{
- if (&src == this) return *this;
-
- if (pNext)
- FreePnextChain(pNext);
-
- sType = src.sType;
- memoryTypeBits = src.memoryTypeBits;
- pNext = SafePnextCopy(src.pNext);
-
- return *this;
-}
-
-safe_VkMemoryFdPropertiesKHR::~safe_VkMemoryFdPropertiesKHR()
-{
- if (pNext)
- FreePnextChain(pNext);
-}
-
-void safe_VkMemoryFdPropertiesKHR::initialize(const VkMemoryFdPropertiesKHR* in_struct)
-{
- sType = in_struct->sType;
- memoryTypeBits = in_struct->memoryTypeBits;
- pNext = SafePnextCopy(in_struct->pNext);
-}
-
-void safe_VkMemoryFdPropertiesKHR::initialize(const safe_VkMemoryFdPropertiesKHR* src)
-{
- sType = src->sType;
- memoryTypeBits = src->memoryTypeBits;
- pNext = SafePnextCopy(src->pNext);
-}
-
-safe_VkMemoryGetFdInfoKHR::safe_VkMemoryGetFdInfoKHR(const VkMemoryGetFdInfoKHR* in_struct) :
- sType(in_struct->sType),
- memory(in_struct->memory),
- handleType(in_struct->handleType)
-{
- pNext = SafePnextCopy(in_struct->pNext);
-}
-
-safe_VkMemoryGetFdInfoKHR::safe_VkMemoryGetFdInfoKHR() :
- pNext(nullptr)
-{}
-
-safe_VkMemoryGetFdInfoKHR::safe_VkMemoryGetFdInfoKHR(const safe_VkMemoryGetFdInfoKHR& src)
-{
- sType = src.sType;
- memory = src.memory;
- handleType = src.handleType;
- pNext = SafePnextCopy(src.pNext);
-}
-
-safe_VkMemoryGetFdInfoKHR& safe_VkMemoryGetFdInfoKHR::operator=(const safe_VkMemoryGetFdInfoKHR& src)
-{
- if (&src == this) return *this;
-
- if (pNext)
- FreePnextChain(pNext);
-
- sType = src.sType;
- memory = src.memory;
- handleType = src.handleType;
- pNext = SafePnextCopy(src.pNext);
-
- return *this;
-}
-
-safe_VkMemoryGetFdInfoKHR::~safe_VkMemoryGetFdInfoKHR()
-{
- if (pNext)
- FreePnextChain(pNext);
-}
-
-void safe_VkMemoryGetFdInfoKHR::initialize(const VkMemoryGetFdInfoKHR* in_struct)
-{
- sType = in_struct->sType;
- memory = in_struct->memory;
- handleType = in_struct->handleType;
- pNext = SafePnextCopy(in_struct->pNext);
-}
-
-void safe_VkMemoryGetFdInfoKHR::initialize(const safe_VkMemoryGetFdInfoKHR* src)
-{
- sType = src->sType;
- memory = src->memory;
- handleType = src->handleType;
- pNext = SafePnextCopy(src->pNext);
-}
-#ifdef VK_USE_PLATFORM_WIN32_KHR
-
-
-safe_VkWin32KeyedMutexAcquireReleaseInfoKHR::safe_VkWin32KeyedMutexAcquireReleaseInfoKHR(const VkWin32KeyedMutexAcquireReleaseInfoKHR* in_struct) :
- sType(in_struct->sType),
- acquireCount(in_struct->acquireCount),
- pAcquireSyncs(nullptr),
- pAcquireKeys(nullptr),
- pAcquireTimeouts(nullptr),
- releaseCount(in_struct->releaseCount),
- pReleaseSyncs(nullptr),
- pReleaseKeys(nullptr)
-{
- pNext = SafePnextCopy(in_struct->pNext);
- if (acquireCount && in_struct->pAcquireSyncs) {
- pAcquireSyncs = new VkDeviceMemory[acquireCount];
- for (uint32_t i = 0; i < acquireCount; ++i) {
- pAcquireSyncs[i] = in_struct->pAcquireSyncs[i];
- }
- }
- if (in_struct->pAcquireKeys) {
- pAcquireKeys = new uint64_t[in_struct->acquireCount];
- memcpy ((void *)pAcquireKeys, (void *)in_struct->pAcquireKeys, sizeof(uint64_t)*in_struct->acquireCount);
- }
- if (in_struct->pAcquireTimeouts) {
- pAcquireTimeouts = new uint32_t[in_struct->acquireCount];
- memcpy ((void *)pAcquireTimeouts, (void *)in_struct->pAcquireTimeouts, sizeof(uint32_t)*in_struct->acquireCount);
- }
- if (releaseCount && in_struct->pReleaseSyncs) {
- pReleaseSyncs = new VkDeviceMemory[releaseCount];
- for (uint32_t i = 0; i < releaseCount; ++i) {
- pReleaseSyncs[i] = in_struct->pReleaseSyncs[i];
- }
- }
- if (in_struct->pReleaseKeys) {
- pReleaseKeys = new uint64_t[in_struct->releaseCount];
- memcpy ((void *)pReleaseKeys, (void *)in_struct->pReleaseKeys, sizeof(uint64_t)*in_struct->releaseCount);
- }
-}
-
-safe_VkWin32KeyedMutexAcquireReleaseInfoKHR::safe_VkWin32KeyedMutexAcquireReleaseInfoKHR() :
- pNext(nullptr),
- pAcquireSyncs(nullptr),
- pAcquireKeys(nullptr),
- pAcquireTimeouts(nullptr),
- pReleaseSyncs(nullptr),
- pReleaseKeys(nullptr)
-{}
-
-safe_VkWin32KeyedMutexAcquireReleaseInfoKHR::safe_VkWin32KeyedMutexAcquireReleaseInfoKHR(const safe_VkWin32KeyedMutexAcquireReleaseInfoKHR& src)
-{
- sType = src.sType;
- acquireCount = src.acquireCount;
- pAcquireSyncs = nullptr;
- pAcquireKeys = nullptr;
- pAcquireTimeouts = nullptr;
- releaseCount = src.releaseCount;
- pReleaseSyncs = nullptr;
- pReleaseKeys = nullptr;
- pNext = SafePnextCopy(src.pNext);
- if (acquireCount && src.pAcquireSyncs) {
- pAcquireSyncs = new VkDeviceMemory[acquireCount];
- for (uint32_t i = 0; i < acquireCount; ++i) {
- pAcquireSyncs[i] = src.pAcquireSyncs[i];
- }
- }
- if (src.pAcquireKeys) {
- pAcquireKeys = new uint64_t[src.acquireCount];
- memcpy ((void *)pAcquireKeys, (void *)src.pAcquireKeys, sizeof(uint64_t)*src.acquireCount);
- }
- if (src.pAcquireTimeouts) {
- pAcquireTimeouts = new uint32_t[src.acquireCount];
- memcpy ((void *)pAcquireTimeouts, (void *)src.pAcquireTimeouts, sizeof(uint32_t)*src.acquireCount);
- }
- if (releaseCount && src.pReleaseSyncs) {
- pReleaseSyncs = new VkDeviceMemory[releaseCount];
- for (uint32_t i = 0; i < releaseCount; ++i) {
- pReleaseSyncs[i] = src.pReleaseSyncs[i];
- }
- }
- if (src.pReleaseKeys) {
- pReleaseKeys = new uint64_t[src.releaseCount];
- memcpy ((void *)pReleaseKeys, (void *)src.pReleaseKeys, sizeof(uint64_t)*src.releaseCount);
- }
-}
-
-safe_VkWin32KeyedMutexAcquireReleaseInfoKHR& safe_VkWin32KeyedMutexAcquireReleaseInfoKHR::operator=(const safe_VkWin32KeyedMutexAcquireReleaseInfoKHR& src)
-{
- if (&src == this) return *this;
-
- if (pAcquireSyncs)
- delete[] pAcquireSyncs;
- if (pAcquireKeys)
- delete[] pAcquireKeys;
- if (pAcquireTimeouts)
- delete[] pAcquireTimeouts;
- if (pReleaseSyncs)
- delete[] pReleaseSyncs;
- if (pReleaseKeys)
- delete[] pReleaseKeys;
- if (pNext)
- FreePnextChain(pNext);
-
- sType = src.sType;
- acquireCount = src.acquireCount;
- pAcquireSyncs = nullptr;
- pAcquireKeys = nullptr;
- pAcquireTimeouts = nullptr;
- releaseCount = src.releaseCount;
- pReleaseSyncs = nullptr;
- pReleaseKeys = nullptr;
- pNext = SafePnextCopy(src.pNext);
- if (acquireCount && src.pAcquireSyncs) {
- pAcquireSyncs = new VkDeviceMemory[acquireCount];
- for (uint32_t i = 0; i < acquireCount; ++i) {
- pAcquireSyncs[i] = src.pAcquireSyncs[i];
- }
- }
- if (src.pAcquireKeys) {
- pAcquireKeys = new uint64_t[src.acquireCount];
- memcpy ((void *)pAcquireKeys, (void *)src.pAcquireKeys, sizeof(uint64_t)*src.acquireCount);
- }
- if (src.pAcquireTimeouts) {
- pAcquireTimeouts = new uint32_t[src.acquireCount];
- memcpy ((void *)pAcquireTimeouts, (void *)src.pAcquireTimeouts, sizeof(uint32_t)*src.acquireCount);
- }
- if (releaseCount && src.pReleaseSyncs) {
- pReleaseSyncs = new VkDeviceMemory[releaseCount];
- for (uint32_t i = 0; i < releaseCount; ++i) {
- pReleaseSyncs[i] = src.pReleaseSyncs[i];
- }
- }
- if (src.pReleaseKeys) {
- pReleaseKeys = new uint64_t[src.releaseCount];
- memcpy ((void *)pReleaseKeys, (void *)src.pReleaseKeys, sizeof(uint64_t)*src.releaseCount);
- }
-
- return *this;
-}
-
-safe_VkWin32KeyedMutexAcquireReleaseInfoKHR::~safe_VkWin32KeyedMutexAcquireReleaseInfoKHR()
-{
- if (pAcquireSyncs)
- delete[] pAcquireSyncs;
- if (pAcquireKeys)
- delete[] pAcquireKeys;
- if (pAcquireTimeouts)
- delete[] pAcquireTimeouts;
- if (pReleaseSyncs)
- delete[] pReleaseSyncs;
- if (pReleaseKeys)
- delete[] pReleaseKeys;
- if (pNext)
- FreePnextChain(pNext);
-}
-
-void safe_VkWin32KeyedMutexAcquireReleaseInfoKHR::initialize(const VkWin32KeyedMutexAcquireReleaseInfoKHR* in_struct)
-{
- sType = in_struct->sType;
- acquireCount = in_struct->acquireCount;
- pAcquireSyncs = nullptr;
- pAcquireKeys = nullptr;
- pAcquireTimeouts = nullptr;
- releaseCount = in_struct->releaseCount;
- pReleaseSyncs = nullptr;
- pReleaseKeys = nullptr;
- pNext = SafePnextCopy(in_struct->pNext);
- if (acquireCount && in_struct->pAcquireSyncs) {
- pAcquireSyncs = new VkDeviceMemory[acquireCount];
- for (uint32_t i = 0; i < acquireCount; ++i) {
- pAcquireSyncs[i] = in_struct->pAcquireSyncs[i];
- }
- }
- if (in_struct->pAcquireKeys) {
- pAcquireKeys = new uint64_t[in_struct->acquireCount];
- memcpy ((void *)pAcquireKeys, (void *)in_struct->pAcquireKeys, sizeof(uint64_t)*in_struct->acquireCount);
- }
- if (in_struct->pAcquireTimeouts) {
- pAcquireTimeouts = new uint32_t[in_struct->acquireCount];
- memcpy ((void *)pAcquireTimeouts, (void *)in_struct->pAcquireTimeouts, sizeof(uint32_t)*in_struct->acquireCount);
- }
- if (releaseCount && in_struct->pReleaseSyncs) {
- pReleaseSyncs = new VkDeviceMemory[releaseCount];
- for (uint32_t i = 0; i < releaseCount; ++i) {
- pReleaseSyncs[i] = in_struct->pReleaseSyncs[i];
- }
- }
- if (in_struct->pReleaseKeys) {
- pReleaseKeys = new uint64_t[in_struct->releaseCount];
- memcpy ((void *)pReleaseKeys, (void *)in_struct->pReleaseKeys, sizeof(uint64_t)*in_struct->releaseCount);
- }
-}
-
-void safe_VkWin32KeyedMutexAcquireReleaseInfoKHR::initialize(const safe_VkWin32KeyedMutexAcquireReleaseInfoKHR* src)
-{
- sType = src->sType;
- acquireCount = src->acquireCount;
- pAcquireSyncs = nullptr;
- pAcquireKeys = nullptr;
- pAcquireTimeouts = nullptr;
- releaseCount = src->releaseCount;
- pReleaseSyncs = nullptr;
- pReleaseKeys = nullptr;
- pNext = SafePnextCopy(src->pNext);
- if (acquireCount && src->pAcquireSyncs) {
- pAcquireSyncs = new VkDeviceMemory[acquireCount];
- for (uint32_t i = 0; i < acquireCount; ++i) {
- pAcquireSyncs[i] = src->pAcquireSyncs[i];
- }
- }
- if (src->pAcquireKeys) {
- pAcquireKeys = new uint64_t[src->acquireCount];
- memcpy ((void *)pAcquireKeys, (void *)src->pAcquireKeys, sizeof(uint64_t)*src->acquireCount);
- }
- if (src->pAcquireTimeouts) {
- pAcquireTimeouts = new uint32_t[src->acquireCount];
- memcpy ((void *)pAcquireTimeouts, (void *)src->pAcquireTimeouts, sizeof(uint32_t)*src->acquireCount);
- }
- if (releaseCount && src->pReleaseSyncs) {
- pReleaseSyncs = new VkDeviceMemory[releaseCount];
- for (uint32_t i = 0; i < releaseCount; ++i) {
- pReleaseSyncs[i] = src->pReleaseSyncs[i];
- }
- }
- if (src->pReleaseKeys) {
- pReleaseKeys = new uint64_t[src->releaseCount];
- memcpy ((void *)pReleaseKeys, (void *)src->pReleaseKeys, sizeof(uint64_t)*src->releaseCount);
- }
-}
-#endif // VK_USE_PLATFORM_WIN32_KHR
-
-#ifdef VK_USE_PLATFORM_WIN32_KHR
-
-
-safe_VkImportSemaphoreWin32HandleInfoKHR::safe_VkImportSemaphoreWin32HandleInfoKHR(const VkImportSemaphoreWin32HandleInfoKHR* in_struct) :
- sType(in_struct->sType),
- semaphore(in_struct->semaphore),
- flags(in_struct->flags),
- handleType(in_struct->handleType),
- handle(in_struct->handle),
- name(in_struct->name)
-{
- pNext = SafePnextCopy(in_struct->pNext);
-}
-
-safe_VkImportSemaphoreWin32HandleInfoKHR::safe_VkImportSemaphoreWin32HandleInfoKHR() :
- pNext(nullptr)
-{}
-
-safe_VkImportSemaphoreWin32HandleInfoKHR::safe_VkImportSemaphoreWin32HandleInfoKHR(const safe_VkImportSemaphoreWin32HandleInfoKHR& src)
-{
- sType = src.sType;
- semaphore = src.semaphore;
- flags = src.flags;
- handleType = src.handleType;
- handle = src.handle;
- name = src.name;
- pNext = SafePnextCopy(src.pNext);
-}
-
-safe_VkImportSemaphoreWin32HandleInfoKHR& safe_VkImportSemaphoreWin32HandleInfoKHR::operator=(const safe_VkImportSemaphoreWin32HandleInfoKHR& src)
-{
- if (&src == this) return *this;
-
- if (pNext)
- FreePnextChain(pNext);
-
- sType = src.sType;
- semaphore = src.semaphore;
- flags = src.flags;
- handleType = src.handleType;
- handle = src.handle;
- name = src.name;
- pNext = SafePnextCopy(src.pNext);
-
- return *this;
-}
-
-safe_VkImportSemaphoreWin32HandleInfoKHR::~safe_VkImportSemaphoreWin32HandleInfoKHR()
-{
- if (pNext)
- FreePnextChain(pNext);
-}
-
-void safe_VkImportSemaphoreWin32HandleInfoKHR::initialize(const VkImportSemaphoreWin32HandleInfoKHR* in_struct)
-{
- sType = in_struct->sType;
- semaphore = in_struct->semaphore;
- flags = in_struct->flags;
- handleType = in_struct->handleType;
- handle = in_struct->handle;
- name = in_struct->name;
- pNext = SafePnextCopy(in_struct->pNext);
-}
-
-void safe_VkImportSemaphoreWin32HandleInfoKHR::initialize(const safe_VkImportSemaphoreWin32HandleInfoKHR* src)
-{
- sType = src->sType;
- semaphore = src->semaphore;
- flags = src->flags;
- handleType = src->handleType;
- handle = src->handle;
- name = src->name;
- pNext = SafePnextCopy(src->pNext);
-}
-#endif // VK_USE_PLATFORM_WIN32_KHR
-
-#ifdef VK_USE_PLATFORM_WIN32_KHR
-
-
-safe_VkExportSemaphoreWin32HandleInfoKHR::safe_VkExportSemaphoreWin32HandleInfoKHR(const VkExportSemaphoreWin32HandleInfoKHR* in_struct) :
- sType(in_struct->sType),
- pAttributes(nullptr),
- dwAccess(in_struct->dwAccess),
- name(in_struct->name)
-{
- pNext = SafePnextCopy(in_struct->pNext);
- if (in_struct->pAttributes) {
- pAttributes = new SECURITY_ATTRIBUTES(*in_struct->pAttributes);
- }
-}
-
-safe_VkExportSemaphoreWin32HandleInfoKHR::safe_VkExportSemaphoreWin32HandleInfoKHR() :
- pNext(nullptr),
- pAttributes(nullptr)
-{}
-
-safe_VkExportSemaphoreWin32HandleInfoKHR::safe_VkExportSemaphoreWin32HandleInfoKHR(const safe_VkExportSemaphoreWin32HandleInfoKHR& src)
-{
- sType = src.sType;
- pAttributes = nullptr;
- dwAccess = src.dwAccess;
- name = src.name;
- pNext = SafePnextCopy(src.pNext);
- if (src.pAttributes) {
- pAttributes = new SECURITY_ATTRIBUTES(*src.pAttributes);
- }
-}
-
-safe_VkExportSemaphoreWin32HandleInfoKHR& safe_VkExportSemaphoreWin32HandleInfoKHR::operator=(const safe_VkExportSemaphoreWin32HandleInfoKHR& src)
-{
- if (&src == this) return *this;
-
- if (pAttributes)
- delete pAttributes;
- if (pNext)
- FreePnextChain(pNext);
-
- sType = src.sType;
- pAttributes = nullptr;
- dwAccess = src.dwAccess;
- name = src.name;
- pNext = SafePnextCopy(src.pNext);
- if (src.pAttributes) {
- pAttributes = new SECURITY_ATTRIBUTES(*src.pAttributes);
- }
-
- return *this;
-}
-
-safe_VkExportSemaphoreWin32HandleInfoKHR::~safe_VkExportSemaphoreWin32HandleInfoKHR()
-{
- if (pAttributes)
- delete pAttributes;
- if (pNext)
- FreePnextChain(pNext);
-}
-
-void safe_VkExportSemaphoreWin32HandleInfoKHR::initialize(const VkExportSemaphoreWin32HandleInfoKHR* in_struct)
-{
- sType = in_struct->sType;
- pAttributes = nullptr;
- dwAccess = in_struct->dwAccess;
- name = in_struct->name;
- pNext = SafePnextCopy(in_struct->pNext);
- if (in_struct->pAttributes) {
- pAttributes = new SECURITY_ATTRIBUTES(*in_struct->pAttributes);
- }
-}
-
-void safe_VkExportSemaphoreWin32HandleInfoKHR::initialize(const safe_VkExportSemaphoreWin32HandleInfoKHR* src)
-{
- sType = src->sType;
- pAttributes = nullptr;
- dwAccess = src->dwAccess;
- name = src->name;
- pNext = SafePnextCopy(src->pNext);
- if (src->pAttributes) {
- pAttributes = new SECURITY_ATTRIBUTES(*src->pAttributes);
- }
-}
-#endif // VK_USE_PLATFORM_WIN32_KHR
-
-#ifdef VK_USE_PLATFORM_WIN32_KHR
-
-
-safe_VkD3D12FenceSubmitInfoKHR::safe_VkD3D12FenceSubmitInfoKHR(const VkD3D12FenceSubmitInfoKHR* in_struct) :
- sType(in_struct->sType),
- waitSemaphoreValuesCount(in_struct->waitSemaphoreValuesCount),
- pWaitSemaphoreValues(nullptr),
- signalSemaphoreValuesCount(in_struct->signalSemaphoreValuesCount),
- pSignalSemaphoreValues(nullptr)
-{
- pNext = SafePnextCopy(in_struct->pNext);
- if (in_struct->pWaitSemaphoreValues) {
- pWaitSemaphoreValues = new uint64_t[in_struct->waitSemaphoreValuesCount];
- memcpy ((void *)pWaitSemaphoreValues, (void *)in_struct->pWaitSemaphoreValues, sizeof(uint64_t)*in_struct->waitSemaphoreValuesCount);
- }
- if (in_struct->pSignalSemaphoreValues) {
- pSignalSemaphoreValues = new uint64_t[in_struct->signalSemaphoreValuesCount];
- memcpy ((void *)pSignalSemaphoreValues, (void *)in_struct->pSignalSemaphoreValues, sizeof(uint64_t)*in_struct->signalSemaphoreValuesCount);
- }
-}
-
-safe_VkD3D12FenceSubmitInfoKHR::safe_VkD3D12FenceSubmitInfoKHR() :
- pNext(nullptr),
- pWaitSemaphoreValues(nullptr),
- pSignalSemaphoreValues(nullptr)
-{}
-
-safe_VkD3D12FenceSubmitInfoKHR::safe_VkD3D12FenceSubmitInfoKHR(const safe_VkD3D12FenceSubmitInfoKHR& src)
-{
- sType = src.sType;
- waitSemaphoreValuesCount = src.waitSemaphoreValuesCount;
- pWaitSemaphoreValues = nullptr;
- signalSemaphoreValuesCount = src.signalSemaphoreValuesCount;
- pSignalSemaphoreValues = nullptr;
- pNext = SafePnextCopy(src.pNext);
- if (src.pWaitSemaphoreValues) {
- pWaitSemaphoreValues = new uint64_t[src.waitSemaphoreValuesCount];
- memcpy ((void *)pWaitSemaphoreValues, (void *)src.pWaitSemaphoreValues, sizeof(uint64_t)*src.waitSemaphoreValuesCount);
- }
- if (src.pSignalSemaphoreValues) {
- pSignalSemaphoreValues = new uint64_t[src.signalSemaphoreValuesCount];
- memcpy ((void *)pSignalSemaphoreValues, (void *)src.pSignalSemaphoreValues, sizeof(uint64_t)*src.signalSemaphoreValuesCount);
- }
-}
-
-safe_VkD3D12FenceSubmitInfoKHR& safe_VkD3D12FenceSubmitInfoKHR::operator=(const safe_VkD3D12FenceSubmitInfoKHR& src)
-{
- if (&src == this) return *this;
-
- if (pWaitSemaphoreValues)
- delete[] pWaitSemaphoreValues;
- if (pSignalSemaphoreValues)
- delete[] pSignalSemaphoreValues;
- if (pNext)
- FreePnextChain(pNext);
-
- sType = src.sType;
- waitSemaphoreValuesCount = src.waitSemaphoreValuesCount;
- pWaitSemaphoreValues = nullptr;
- signalSemaphoreValuesCount = src.signalSemaphoreValuesCount;
- pSignalSemaphoreValues = nullptr;
- pNext = SafePnextCopy(src.pNext);
- if (src.pWaitSemaphoreValues) {
- pWaitSemaphoreValues = new uint64_t[src.waitSemaphoreValuesCount];
- memcpy ((void *)pWaitSemaphoreValues, (void *)src.pWaitSemaphoreValues, sizeof(uint64_t)*src.waitSemaphoreValuesCount);
- }
- if (src.pSignalSemaphoreValues) {
- pSignalSemaphoreValues = new uint64_t[src.signalSemaphoreValuesCount];
- memcpy ((void *)pSignalSemaphoreValues, (void *)src.pSignalSemaphoreValues, sizeof(uint64_t)*src.signalSemaphoreValuesCount);
- }
-
- return *this;
-}
-
-safe_VkD3D12FenceSubmitInfoKHR::~safe_VkD3D12FenceSubmitInfoKHR()
-{
- if (pWaitSemaphoreValues)
- delete[] pWaitSemaphoreValues;
- if (pSignalSemaphoreValues)
- delete[] pSignalSemaphoreValues;
- if (pNext)
- FreePnextChain(pNext);
-}
-
-void safe_VkD3D12FenceSubmitInfoKHR::initialize(const VkD3D12FenceSubmitInfoKHR* in_struct)
-{
- sType = in_struct->sType;
- waitSemaphoreValuesCount = in_struct->waitSemaphoreValuesCount;
- pWaitSemaphoreValues = nullptr;
- signalSemaphoreValuesCount = in_struct->signalSemaphoreValuesCount;
- pSignalSemaphoreValues = nullptr;
- pNext = SafePnextCopy(in_struct->pNext);
- if (in_struct->pWaitSemaphoreValues) {
- pWaitSemaphoreValues = new uint64_t[in_struct->waitSemaphoreValuesCount];
- memcpy ((void *)pWaitSemaphoreValues, (void *)in_struct->pWaitSemaphoreValues, sizeof(uint64_t)*in_struct->waitSemaphoreValuesCount);
- }
- if (in_struct->pSignalSemaphoreValues) {
- pSignalSemaphoreValues = new uint64_t[in_struct->signalSemaphoreValuesCount];
- memcpy ((void *)pSignalSemaphoreValues, (void *)in_struct->pSignalSemaphoreValues, sizeof(uint64_t)*in_struct->signalSemaphoreValuesCount);
- }
-}
-
-void safe_VkD3D12FenceSubmitInfoKHR::initialize(const safe_VkD3D12FenceSubmitInfoKHR* src)
-{
- sType = src->sType;
- waitSemaphoreValuesCount = src->waitSemaphoreValuesCount;
- pWaitSemaphoreValues = nullptr;
- signalSemaphoreValuesCount = src->signalSemaphoreValuesCount;
- pSignalSemaphoreValues = nullptr;
- pNext = SafePnextCopy(src->pNext);
- if (src->pWaitSemaphoreValues) {
- pWaitSemaphoreValues = new uint64_t[src->waitSemaphoreValuesCount];
- memcpy ((void *)pWaitSemaphoreValues, (void *)src->pWaitSemaphoreValues, sizeof(uint64_t)*src->waitSemaphoreValuesCount);
- }
- if (src->pSignalSemaphoreValues) {
- pSignalSemaphoreValues = new uint64_t[src->signalSemaphoreValuesCount];
- memcpy ((void *)pSignalSemaphoreValues, (void *)src->pSignalSemaphoreValues, sizeof(uint64_t)*src->signalSemaphoreValuesCount);
- }
-}
-#endif // VK_USE_PLATFORM_WIN32_KHR
-
-#ifdef VK_USE_PLATFORM_WIN32_KHR
-
-
-safe_VkSemaphoreGetWin32HandleInfoKHR::safe_VkSemaphoreGetWin32HandleInfoKHR(const VkSemaphoreGetWin32HandleInfoKHR* in_struct) :
- sType(in_struct->sType),
- semaphore(in_struct->semaphore),
- handleType(in_struct->handleType)
-{
- pNext = SafePnextCopy(in_struct->pNext);
-}
-
-safe_VkSemaphoreGetWin32HandleInfoKHR::safe_VkSemaphoreGetWin32HandleInfoKHR() :
- pNext(nullptr)
-{}
-
-safe_VkSemaphoreGetWin32HandleInfoKHR::safe_VkSemaphoreGetWin32HandleInfoKHR(const safe_VkSemaphoreGetWin32HandleInfoKHR& src)
-{
- sType = src.sType;
- semaphore = src.semaphore;
- handleType = src.handleType;
- pNext = SafePnextCopy(src.pNext);
-}
-
-safe_VkSemaphoreGetWin32HandleInfoKHR& safe_VkSemaphoreGetWin32HandleInfoKHR::operator=(const safe_VkSemaphoreGetWin32HandleInfoKHR& src)
-{
- if (&src == this) return *this;
-
- if (pNext)
- FreePnextChain(pNext);
-
- sType = src.sType;
- semaphore = src.semaphore;
- handleType = src.handleType;
- pNext = SafePnextCopy(src.pNext);
-
- return *this;
-}
-
-safe_VkSemaphoreGetWin32HandleInfoKHR::~safe_VkSemaphoreGetWin32HandleInfoKHR()
-{
- if (pNext)
- FreePnextChain(pNext);
-}
-
-void safe_VkSemaphoreGetWin32HandleInfoKHR::initialize(const VkSemaphoreGetWin32HandleInfoKHR* in_struct)
-{
- sType = in_struct->sType;
- semaphore = in_struct->semaphore;
- handleType = in_struct->handleType;
- pNext = SafePnextCopy(in_struct->pNext);
-}
-
-void safe_VkSemaphoreGetWin32HandleInfoKHR::initialize(const safe_VkSemaphoreGetWin32HandleInfoKHR* src)
-{
- sType = src->sType;
- semaphore = src->semaphore;
- handleType = src->handleType;
- pNext = SafePnextCopy(src->pNext);
-}
-#endif // VK_USE_PLATFORM_WIN32_KHR
-
-
-safe_VkImportSemaphoreFdInfoKHR::safe_VkImportSemaphoreFdInfoKHR(const VkImportSemaphoreFdInfoKHR* in_struct) :
- sType(in_struct->sType),
- semaphore(in_struct->semaphore),
- flags(in_struct->flags),
- handleType(in_struct->handleType),
- fd(in_struct->fd)
-{
- pNext = SafePnextCopy(in_struct->pNext);
-}
-
-safe_VkImportSemaphoreFdInfoKHR::safe_VkImportSemaphoreFdInfoKHR() :
- pNext(nullptr)
-{}
-
-safe_VkImportSemaphoreFdInfoKHR::safe_VkImportSemaphoreFdInfoKHR(const safe_VkImportSemaphoreFdInfoKHR& src)
-{
- sType = src.sType;
- semaphore = src.semaphore;
- flags = src.flags;
- handleType = src.handleType;
- fd = src.fd;
- pNext = SafePnextCopy(src.pNext);
-}
-
-safe_VkImportSemaphoreFdInfoKHR& safe_VkImportSemaphoreFdInfoKHR::operator=(const safe_VkImportSemaphoreFdInfoKHR& src)
-{
- if (&src == this) return *this;
-
- if (pNext)
- FreePnextChain(pNext);
-
- sType = src.sType;
- semaphore = src.semaphore;
- flags = src.flags;
- handleType = src.handleType;
- fd = src.fd;
- pNext = SafePnextCopy(src.pNext);
-
- return *this;
-}
-
-safe_VkImportSemaphoreFdInfoKHR::~safe_VkImportSemaphoreFdInfoKHR()
-{
- if (pNext)
- FreePnextChain(pNext);
-}
-
-void safe_VkImportSemaphoreFdInfoKHR::initialize(const VkImportSemaphoreFdInfoKHR* in_struct)
-{
- sType = in_struct->sType;
- semaphore = in_struct->semaphore;
- flags = in_struct->flags;
- handleType = in_struct->handleType;
- fd = in_struct->fd;
- pNext = SafePnextCopy(in_struct->pNext);
-}
-
-void safe_VkImportSemaphoreFdInfoKHR::initialize(const safe_VkImportSemaphoreFdInfoKHR* src)
-{
- sType = src->sType;
- semaphore = src->semaphore;
- flags = src->flags;
- handleType = src->handleType;
- fd = src->fd;
- pNext = SafePnextCopy(src->pNext);
-}
-
-safe_VkSemaphoreGetFdInfoKHR::safe_VkSemaphoreGetFdInfoKHR(const VkSemaphoreGetFdInfoKHR* in_struct) :
- sType(in_struct->sType),
- semaphore(in_struct->semaphore),
- handleType(in_struct->handleType)
-{
- pNext = SafePnextCopy(in_struct->pNext);
-}
-
-safe_VkSemaphoreGetFdInfoKHR::safe_VkSemaphoreGetFdInfoKHR() :
- pNext(nullptr)
-{}
-
-safe_VkSemaphoreGetFdInfoKHR::safe_VkSemaphoreGetFdInfoKHR(const safe_VkSemaphoreGetFdInfoKHR& src)
-{
- sType = src.sType;
- semaphore = src.semaphore;
- handleType = src.handleType;
- pNext = SafePnextCopy(src.pNext);
-}
-
-safe_VkSemaphoreGetFdInfoKHR& safe_VkSemaphoreGetFdInfoKHR::operator=(const safe_VkSemaphoreGetFdInfoKHR& src)
-{
- if (&src == this) return *this;
-
- if (pNext)
- FreePnextChain(pNext);
-
- sType = src.sType;
- semaphore = src.semaphore;
- handleType = src.handleType;
- pNext = SafePnextCopy(src.pNext);
-
- return *this;
-}
-
-safe_VkSemaphoreGetFdInfoKHR::~safe_VkSemaphoreGetFdInfoKHR()
-{
- if (pNext)
- FreePnextChain(pNext);
-}
-
-void safe_VkSemaphoreGetFdInfoKHR::initialize(const VkSemaphoreGetFdInfoKHR* in_struct)
-{
- sType = in_struct->sType;
- semaphore = in_struct->semaphore;
- handleType = in_struct->handleType;
- pNext = SafePnextCopy(in_struct->pNext);
-}
-
-void safe_VkSemaphoreGetFdInfoKHR::initialize(const safe_VkSemaphoreGetFdInfoKHR* src)
-{
- sType = src->sType;
- semaphore = src->semaphore;
- handleType = src->handleType;
- pNext = SafePnextCopy(src->pNext);
-}
-
-safe_VkPhysicalDevicePushDescriptorPropertiesKHR::safe_VkPhysicalDevicePushDescriptorPropertiesKHR(const VkPhysicalDevicePushDescriptorPropertiesKHR* in_struct) :
- sType(in_struct->sType),
- maxPushDescriptors(in_struct->maxPushDescriptors)
-{
- pNext = SafePnextCopy(in_struct->pNext);
-}
-
-safe_VkPhysicalDevicePushDescriptorPropertiesKHR::safe_VkPhysicalDevicePushDescriptorPropertiesKHR() :
- pNext(nullptr)
-{}
-
-safe_VkPhysicalDevicePushDescriptorPropertiesKHR::safe_VkPhysicalDevicePushDescriptorPropertiesKHR(const safe_VkPhysicalDevicePushDescriptorPropertiesKHR& src)
-{
- sType = src.sType;
- maxPushDescriptors = src.maxPushDescriptors;
- pNext = SafePnextCopy(src.pNext);
-}
-
-safe_VkPhysicalDevicePushDescriptorPropertiesKHR& safe_VkPhysicalDevicePushDescriptorPropertiesKHR::operator=(const safe_VkPhysicalDevicePushDescriptorPropertiesKHR& src)
-{
- if (&src == this) return *this;
-
- if (pNext)
- FreePnextChain(pNext);
-
- sType = src.sType;
- maxPushDescriptors = src.maxPushDescriptors;
- pNext = SafePnextCopy(src.pNext);
-
- return *this;
-}
-
-safe_VkPhysicalDevicePushDescriptorPropertiesKHR::~safe_VkPhysicalDevicePushDescriptorPropertiesKHR()
-{
- if (pNext)
- FreePnextChain(pNext);
-}
-
-void safe_VkPhysicalDevicePushDescriptorPropertiesKHR::initialize(const VkPhysicalDevicePushDescriptorPropertiesKHR* in_struct)
-{
- sType = in_struct->sType;
- maxPushDescriptors = in_struct->maxPushDescriptors;
- pNext = SafePnextCopy(in_struct->pNext);
-}
-
-void safe_VkPhysicalDevicePushDescriptorPropertiesKHR::initialize(const safe_VkPhysicalDevicePushDescriptorPropertiesKHR* src)
-{
- sType = src->sType;
- maxPushDescriptors = src->maxPushDescriptors;
- pNext = SafePnextCopy(src->pNext);
-}
-
-safe_VkPhysicalDeviceShaderFloat16Int8FeaturesKHR::safe_VkPhysicalDeviceShaderFloat16Int8FeaturesKHR(const VkPhysicalDeviceShaderFloat16Int8FeaturesKHR* in_struct) :
- sType(in_struct->sType),
- shaderFloat16(in_struct->shaderFloat16),
- shaderInt8(in_struct->shaderInt8)
-{
- pNext = SafePnextCopy(in_struct->pNext);
-}
-
-safe_VkPhysicalDeviceShaderFloat16Int8FeaturesKHR::safe_VkPhysicalDeviceShaderFloat16Int8FeaturesKHR() :
- pNext(nullptr)
-{}
-
-safe_VkPhysicalDeviceShaderFloat16Int8FeaturesKHR::safe_VkPhysicalDeviceShaderFloat16Int8FeaturesKHR(const safe_VkPhysicalDeviceShaderFloat16Int8FeaturesKHR& src)
-{
- sType = src.sType;
- shaderFloat16 = src.shaderFloat16;
- shaderInt8 = src.shaderInt8;
- pNext = SafePnextCopy(src.pNext);
-}
-
-safe_VkPhysicalDeviceShaderFloat16Int8FeaturesKHR& safe_VkPhysicalDeviceShaderFloat16Int8FeaturesKHR::operator=(const safe_VkPhysicalDeviceShaderFloat16Int8FeaturesKHR& src)
-{
- if (&src == this) return *this;
-
- if (pNext)
- FreePnextChain(pNext);
-
- sType = src.sType;
- shaderFloat16 = src.shaderFloat16;
- shaderInt8 = src.shaderInt8;
- pNext = SafePnextCopy(src.pNext);
-
- return *this;
-}
-
-safe_VkPhysicalDeviceShaderFloat16Int8FeaturesKHR::~safe_VkPhysicalDeviceShaderFloat16Int8FeaturesKHR()
-{
- if (pNext)
- FreePnextChain(pNext);
-}
-
-void safe_VkPhysicalDeviceShaderFloat16Int8FeaturesKHR::initialize(const VkPhysicalDeviceShaderFloat16Int8FeaturesKHR* in_struct)
-{
- sType = in_struct->sType;
- shaderFloat16 = in_struct->shaderFloat16;
- shaderInt8 = in_struct->shaderInt8;
- pNext = SafePnextCopy(in_struct->pNext);
-}
-
-void safe_VkPhysicalDeviceShaderFloat16Int8FeaturesKHR::initialize(const safe_VkPhysicalDeviceShaderFloat16Int8FeaturesKHR* src)
-{
- sType = src->sType;
- shaderFloat16 = src->shaderFloat16;
- shaderInt8 = src->shaderInt8;
- pNext = SafePnextCopy(src->pNext);
-}
-
-safe_VkPresentRegionKHR::safe_VkPresentRegionKHR(const VkPresentRegionKHR* in_struct) :
- rectangleCount(in_struct->rectangleCount),
- pRectangles(nullptr)
-{
- if (in_struct->pRectangles) {
- pRectangles = new VkRectLayerKHR[in_struct->rectangleCount];
- memcpy ((void *)pRectangles, (void *)in_struct->pRectangles, sizeof(VkRectLayerKHR)*in_struct->rectangleCount);
- }
-}
-
-safe_VkPresentRegionKHR::safe_VkPresentRegionKHR() :
- pRectangles(nullptr)
-{}
-
-safe_VkPresentRegionKHR::safe_VkPresentRegionKHR(const safe_VkPresentRegionKHR& src)
-{
- rectangleCount = src.rectangleCount;
- pRectangles = nullptr;
- if (src.pRectangles) {
- pRectangles = new VkRectLayerKHR[src.rectangleCount];
- memcpy ((void *)pRectangles, (void *)src.pRectangles, sizeof(VkRectLayerKHR)*src.rectangleCount);
- }
-}
-
-safe_VkPresentRegionKHR& safe_VkPresentRegionKHR::operator=(const safe_VkPresentRegionKHR& src)
-{
- if (&src == this) return *this;
-
- if (pRectangles)
- delete[] pRectangles;
-
- rectangleCount = src.rectangleCount;
- pRectangles = nullptr;
- if (src.pRectangles) {
- pRectangles = new VkRectLayerKHR[src.rectangleCount];
- memcpy ((void *)pRectangles, (void *)src.pRectangles, sizeof(VkRectLayerKHR)*src.rectangleCount);
- }
-
- return *this;
-}
-
-safe_VkPresentRegionKHR::~safe_VkPresentRegionKHR()
-{
- if (pRectangles)
- delete[] pRectangles;
-}
-
-void safe_VkPresentRegionKHR::initialize(const VkPresentRegionKHR* in_struct)
-{
- rectangleCount = in_struct->rectangleCount;
- pRectangles = nullptr;
- if (in_struct->pRectangles) {
- pRectangles = new VkRectLayerKHR[in_struct->rectangleCount];
- memcpy ((void *)pRectangles, (void *)in_struct->pRectangles, sizeof(VkRectLayerKHR)*in_struct->rectangleCount);
- }
-}
-
-void safe_VkPresentRegionKHR::initialize(const safe_VkPresentRegionKHR* src)
-{
- rectangleCount = src->rectangleCount;
- pRectangles = nullptr;
- if (src->pRectangles) {
- pRectangles = new VkRectLayerKHR[src->rectangleCount];
- memcpy ((void *)pRectangles, (void *)src->pRectangles, sizeof(VkRectLayerKHR)*src->rectangleCount);
- }
-}
-
-safe_VkPresentRegionsKHR::safe_VkPresentRegionsKHR(const VkPresentRegionsKHR* in_struct) :
- sType(in_struct->sType),
- swapchainCount(in_struct->swapchainCount),
- pRegions(nullptr)
-{
- pNext = SafePnextCopy(in_struct->pNext);
- if (swapchainCount && in_struct->pRegions) {
- pRegions = new safe_VkPresentRegionKHR[swapchainCount];
- for (uint32_t i = 0; i < swapchainCount; ++i) {
- pRegions[i].initialize(&in_struct->pRegions[i]);
- }
- }
-}
-
-safe_VkPresentRegionsKHR::safe_VkPresentRegionsKHR() :
- pNext(nullptr),
- pRegions(nullptr)
-{}
-
-safe_VkPresentRegionsKHR::safe_VkPresentRegionsKHR(const safe_VkPresentRegionsKHR& src)
-{
- sType = src.sType;
- swapchainCount = src.swapchainCount;
- pRegions = nullptr;
- pNext = SafePnextCopy(src.pNext);
- if (swapchainCount && src.pRegions) {
- pRegions = new safe_VkPresentRegionKHR[swapchainCount];
- for (uint32_t i = 0; i < swapchainCount; ++i) {
- pRegions[i].initialize(&src.pRegions[i]);
- }
- }
-}
-
-safe_VkPresentRegionsKHR& safe_VkPresentRegionsKHR::operator=(const safe_VkPresentRegionsKHR& src)
-{
- if (&src == this) return *this;
-
- if (pRegions)
- delete[] pRegions;
- if (pNext)
- FreePnextChain(pNext);
-
- sType = src.sType;
- swapchainCount = src.swapchainCount;
- pRegions = nullptr;
- pNext = SafePnextCopy(src.pNext);
- if (swapchainCount && src.pRegions) {
- pRegions = new safe_VkPresentRegionKHR[swapchainCount];
- for (uint32_t i = 0; i < swapchainCount; ++i) {
- pRegions[i].initialize(&src.pRegions[i]);
- }
- }
-
- return *this;
-}
-
-safe_VkPresentRegionsKHR::~safe_VkPresentRegionsKHR()
-{
- if (pRegions)
- delete[] pRegions;
- if (pNext)
- FreePnextChain(pNext);
-}
-
-void safe_VkPresentRegionsKHR::initialize(const VkPresentRegionsKHR* in_struct)
-{
- sType = in_struct->sType;
- swapchainCount = in_struct->swapchainCount;
- pRegions = nullptr;
- pNext = SafePnextCopy(in_struct->pNext);
- if (swapchainCount && in_struct->pRegions) {
- pRegions = new safe_VkPresentRegionKHR[swapchainCount];
- for (uint32_t i = 0; i < swapchainCount; ++i) {
- pRegions[i].initialize(&in_struct->pRegions[i]);
- }
- }
-}
-
-void safe_VkPresentRegionsKHR::initialize(const safe_VkPresentRegionsKHR* src)
-{
- sType = src->sType;
- swapchainCount = src->swapchainCount;
- pRegions = nullptr;
- pNext = SafePnextCopy(src->pNext);
- if (swapchainCount && src->pRegions) {
- pRegions = new safe_VkPresentRegionKHR[swapchainCount];
- for (uint32_t i = 0; i < swapchainCount; ++i) {
- pRegions[i].initialize(&src->pRegions[i]);
- }
- }
-}
-
-safe_VkPhysicalDeviceImagelessFramebufferFeaturesKHR::safe_VkPhysicalDeviceImagelessFramebufferFeaturesKHR(const VkPhysicalDeviceImagelessFramebufferFeaturesKHR* in_struct) :
- sType(in_struct->sType),
- imagelessFramebuffer(in_struct->imagelessFramebuffer)
-{
- pNext = SafePnextCopy(in_struct->pNext);
-}
-
-safe_VkPhysicalDeviceImagelessFramebufferFeaturesKHR::safe_VkPhysicalDeviceImagelessFramebufferFeaturesKHR() :
- pNext(nullptr)
-{}
-
-safe_VkPhysicalDeviceImagelessFramebufferFeaturesKHR::safe_VkPhysicalDeviceImagelessFramebufferFeaturesKHR(const safe_VkPhysicalDeviceImagelessFramebufferFeaturesKHR& src)
-{
- sType = src.sType;
- imagelessFramebuffer = src.imagelessFramebuffer;
- pNext = SafePnextCopy(src.pNext);
-}
-
-safe_VkPhysicalDeviceImagelessFramebufferFeaturesKHR& safe_VkPhysicalDeviceImagelessFramebufferFeaturesKHR::operator=(const safe_VkPhysicalDeviceImagelessFramebufferFeaturesKHR& src)
-{
- if (&src == this) return *this;
-
- if (pNext)
- FreePnextChain(pNext);
-
- sType = src.sType;
- imagelessFramebuffer = src.imagelessFramebuffer;
- pNext = SafePnextCopy(src.pNext);
-
- return *this;
-}
-
-safe_VkPhysicalDeviceImagelessFramebufferFeaturesKHR::~safe_VkPhysicalDeviceImagelessFramebufferFeaturesKHR()
-{
- if (pNext)
- FreePnextChain(pNext);
-}
-
-void safe_VkPhysicalDeviceImagelessFramebufferFeaturesKHR::initialize(const VkPhysicalDeviceImagelessFramebufferFeaturesKHR* in_struct)
-{
- sType = in_struct->sType;
- imagelessFramebuffer = in_struct->imagelessFramebuffer;
- pNext = SafePnextCopy(in_struct->pNext);
-}
-
-void safe_VkPhysicalDeviceImagelessFramebufferFeaturesKHR::initialize(const safe_VkPhysicalDeviceImagelessFramebufferFeaturesKHR* src)
-{
- sType = src->sType;
- imagelessFramebuffer = src->imagelessFramebuffer;
- pNext = SafePnextCopy(src->pNext);
-}
-
-safe_VkFramebufferAttachmentImageInfoKHR::safe_VkFramebufferAttachmentImageInfoKHR(const VkFramebufferAttachmentImageInfoKHR* in_struct) :
- sType(in_struct->sType),
- flags(in_struct->flags),
- usage(in_struct->usage),
- width(in_struct->width),
- height(in_struct->height),
- layerCount(in_struct->layerCount),
- viewFormatCount(in_struct->viewFormatCount),
- pViewFormats(nullptr)
-{
- pNext = SafePnextCopy(in_struct->pNext);
- if (in_struct->pViewFormats) {
- pViewFormats = new VkFormat[in_struct->viewFormatCount];
- memcpy ((void *)pViewFormats, (void *)in_struct->pViewFormats, sizeof(VkFormat)*in_struct->viewFormatCount);
- }
-}
-
-safe_VkFramebufferAttachmentImageInfoKHR::safe_VkFramebufferAttachmentImageInfoKHR() :
- pNext(nullptr),
- pViewFormats(nullptr)
-{}
-
-safe_VkFramebufferAttachmentImageInfoKHR::safe_VkFramebufferAttachmentImageInfoKHR(const safe_VkFramebufferAttachmentImageInfoKHR& src)
-{
- sType = src.sType;
- flags = src.flags;
- usage = src.usage;
- width = src.width;
- height = src.height;
- layerCount = src.layerCount;
- viewFormatCount = src.viewFormatCount;
- pViewFormats = nullptr;
- pNext = SafePnextCopy(src.pNext);
- if (src.pViewFormats) {
- pViewFormats = new VkFormat[src.viewFormatCount];
- memcpy ((void *)pViewFormats, (void *)src.pViewFormats, sizeof(VkFormat)*src.viewFormatCount);
- }
-}
-
-safe_VkFramebufferAttachmentImageInfoKHR& safe_VkFramebufferAttachmentImageInfoKHR::operator=(const safe_VkFramebufferAttachmentImageInfoKHR& src)
-{
- if (&src == this) return *this;
-
- if (pViewFormats)
- delete[] pViewFormats;
- if (pNext)
- FreePnextChain(pNext);
-
- sType = src.sType;
- flags = src.flags;
- usage = src.usage;
- width = src.width;
- height = src.height;
- layerCount = src.layerCount;
- viewFormatCount = src.viewFormatCount;
- pViewFormats = nullptr;
- pNext = SafePnextCopy(src.pNext);
- if (src.pViewFormats) {
- pViewFormats = new VkFormat[src.viewFormatCount];
- memcpy ((void *)pViewFormats, (void *)src.pViewFormats, sizeof(VkFormat)*src.viewFormatCount);
- }
-
- return *this;
-}
-
-safe_VkFramebufferAttachmentImageInfoKHR::~safe_VkFramebufferAttachmentImageInfoKHR()
-{
- if (pViewFormats)
- delete[] pViewFormats;
- if (pNext)
- FreePnextChain(pNext);
-}
-
-void safe_VkFramebufferAttachmentImageInfoKHR::initialize(const VkFramebufferAttachmentImageInfoKHR* in_struct)
-{
- sType = in_struct->sType;
- flags = in_struct->flags;
- usage = in_struct->usage;
- width = in_struct->width;
- height = in_struct->height;
- layerCount = in_struct->layerCount;
- viewFormatCount = in_struct->viewFormatCount;
- pViewFormats = nullptr;
- pNext = SafePnextCopy(in_struct->pNext);
- if (in_struct->pViewFormats) {
- pViewFormats = new VkFormat[in_struct->viewFormatCount];
- memcpy ((void *)pViewFormats, (void *)in_struct->pViewFormats, sizeof(VkFormat)*in_struct->viewFormatCount);
- }
-}
-
-void safe_VkFramebufferAttachmentImageInfoKHR::initialize(const safe_VkFramebufferAttachmentImageInfoKHR* src)
-{
- sType = src->sType;
- flags = src->flags;
- usage = src->usage;
- width = src->width;
- height = src->height;
- layerCount = src->layerCount;
- viewFormatCount = src->viewFormatCount;
- pViewFormats = nullptr;
- pNext = SafePnextCopy(src->pNext);
- if (src->pViewFormats) {
- pViewFormats = new VkFormat[src->viewFormatCount];
- memcpy ((void *)pViewFormats, (void *)src->pViewFormats, sizeof(VkFormat)*src->viewFormatCount);
- }
-}
-
-safe_VkFramebufferAttachmentsCreateInfoKHR::safe_VkFramebufferAttachmentsCreateInfoKHR(const VkFramebufferAttachmentsCreateInfoKHR* in_struct) :
- sType(in_struct->sType),
- attachmentImageInfoCount(in_struct->attachmentImageInfoCount),
- pAttachmentImageInfos(nullptr)
-{
- pNext = SafePnextCopy(in_struct->pNext);
- if (attachmentImageInfoCount && in_struct->pAttachmentImageInfos) {
- pAttachmentImageInfos = new safe_VkFramebufferAttachmentImageInfoKHR[attachmentImageInfoCount];
- for (uint32_t i = 0; i < attachmentImageInfoCount; ++i) {
- pAttachmentImageInfos[i].initialize(&in_struct->pAttachmentImageInfos[i]);
- }
- }
-}
-
-safe_VkFramebufferAttachmentsCreateInfoKHR::safe_VkFramebufferAttachmentsCreateInfoKHR() :
- pNext(nullptr),
- pAttachmentImageInfos(nullptr)
-{}
-
-safe_VkFramebufferAttachmentsCreateInfoKHR::safe_VkFramebufferAttachmentsCreateInfoKHR(const safe_VkFramebufferAttachmentsCreateInfoKHR& src)
-{
- sType = src.sType;
- attachmentImageInfoCount = src.attachmentImageInfoCount;
- pAttachmentImageInfos = nullptr;
- pNext = SafePnextCopy(src.pNext);
- if (attachmentImageInfoCount && src.pAttachmentImageInfos) {
- pAttachmentImageInfos = new safe_VkFramebufferAttachmentImageInfoKHR[attachmentImageInfoCount];
- for (uint32_t i = 0; i < attachmentImageInfoCount; ++i) {
- pAttachmentImageInfos[i].initialize(&src.pAttachmentImageInfos[i]);
- }
- }
-}
-
-safe_VkFramebufferAttachmentsCreateInfoKHR& safe_VkFramebufferAttachmentsCreateInfoKHR::operator=(const safe_VkFramebufferAttachmentsCreateInfoKHR& src)
-{
- if (&src == this) return *this;
-
- if (pAttachmentImageInfos)
- delete[] pAttachmentImageInfos;
- if (pNext)
- FreePnextChain(pNext);
-
- sType = src.sType;
- attachmentImageInfoCount = src.attachmentImageInfoCount;
- pAttachmentImageInfos = nullptr;
- pNext = SafePnextCopy(src.pNext);
- if (attachmentImageInfoCount && src.pAttachmentImageInfos) {
- pAttachmentImageInfos = new safe_VkFramebufferAttachmentImageInfoKHR[attachmentImageInfoCount];
- for (uint32_t i = 0; i < attachmentImageInfoCount; ++i) {
- pAttachmentImageInfos[i].initialize(&src.pAttachmentImageInfos[i]);
- }
- }
-
- return *this;
-}
-
-safe_VkFramebufferAttachmentsCreateInfoKHR::~safe_VkFramebufferAttachmentsCreateInfoKHR()
-{
- if (pAttachmentImageInfos)
- delete[] pAttachmentImageInfos;
- if (pNext)
- FreePnextChain(pNext);
-}
-
-void safe_VkFramebufferAttachmentsCreateInfoKHR::initialize(const VkFramebufferAttachmentsCreateInfoKHR* in_struct)
-{
- sType = in_struct->sType;
- attachmentImageInfoCount = in_struct->attachmentImageInfoCount;
- pAttachmentImageInfos = nullptr;
- pNext = SafePnextCopy(in_struct->pNext);
- if (attachmentImageInfoCount && in_struct->pAttachmentImageInfos) {
- pAttachmentImageInfos = new safe_VkFramebufferAttachmentImageInfoKHR[attachmentImageInfoCount];
- for (uint32_t i = 0; i < attachmentImageInfoCount; ++i) {
- pAttachmentImageInfos[i].initialize(&in_struct->pAttachmentImageInfos[i]);
- }
- }
-}
-
-void safe_VkFramebufferAttachmentsCreateInfoKHR::initialize(const safe_VkFramebufferAttachmentsCreateInfoKHR* src)
-{
- sType = src->sType;
- attachmentImageInfoCount = src->attachmentImageInfoCount;
- pAttachmentImageInfos = nullptr;
- pNext = SafePnextCopy(src->pNext);
- if (attachmentImageInfoCount && src->pAttachmentImageInfos) {
- pAttachmentImageInfos = new safe_VkFramebufferAttachmentImageInfoKHR[attachmentImageInfoCount];
- for (uint32_t i = 0; i < attachmentImageInfoCount; ++i) {
- pAttachmentImageInfos[i].initialize(&src->pAttachmentImageInfos[i]);
- }
- }
-}
-
-safe_VkRenderPassAttachmentBeginInfoKHR::safe_VkRenderPassAttachmentBeginInfoKHR(const VkRenderPassAttachmentBeginInfoKHR* in_struct) :
- sType(in_struct->sType),
- attachmentCount(in_struct->attachmentCount),
- pAttachments(nullptr)
-{
- pNext = SafePnextCopy(in_struct->pNext);
- if (attachmentCount && in_struct->pAttachments) {
- pAttachments = new VkImageView[attachmentCount];
- for (uint32_t i = 0; i < attachmentCount; ++i) {
- pAttachments[i] = in_struct->pAttachments[i];
- }
- }
-}
-
-safe_VkRenderPassAttachmentBeginInfoKHR::safe_VkRenderPassAttachmentBeginInfoKHR() :
- pNext(nullptr),
- pAttachments(nullptr)
-{}
-
-safe_VkRenderPassAttachmentBeginInfoKHR::safe_VkRenderPassAttachmentBeginInfoKHR(const safe_VkRenderPassAttachmentBeginInfoKHR& src)
-{
- sType = src.sType;
- attachmentCount = src.attachmentCount;
- pAttachments = nullptr;
- pNext = SafePnextCopy(src.pNext);
- if (attachmentCount && src.pAttachments) {
- pAttachments = new VkImageView[attachmentCount];
- for (uint32_t i = 0; i < attachmentCount; ++i) {
- pAttachments[i] = src.pAttachments[i];
- }
- }
-}
-
-safe_VkRenderPassAttachmentBeginInfoKHR& safe_VkRenderPassAttachmentBeginInfoKHR::operator=(const safe_VkRenderPassAttachmentBeginInfoKHR& src)
-{
- if (&src == this) return *this;
-
- if (pAttachments)
- delete[] pAttachments;
- if (pNext)
- FreePnextChain(pNext);
-
- sType = src.sType;
- attachmentCount = src.attachmentCount;
- pAttachments = nullptr;
- pNext = SafePnextCopy(src.pNext);
- if (attachmentCount && src.pAttachments) {
- pAttachments = new VkImageView[attachmentCount];
- for (uint32_t i = 0; i < attachmentCount; ++i) {
- pAttachments[i] = src.pAttachments[i];
- }
- }
-
- return *this;
-}
-
-safe_VkRenderPassAttachmentBeginInfoKHR::~safe_VkRenderPassAttachmentBeginInfoKHR()
-{
- if (pAttachments)
- delete[] pAttachments;
- if (pNext)
- FreePnextChain(pNext);
-}
-
-void safe_VkRenderPassAttachmentBeginInfoKHR::initialize(const VkRenderPassAttachmentBeginInfoKHR* in_struct)
-{
- sType = in_struct->sType;
- attachmentCount = in_struct->attachmentCount;
- pAttachments = nullptr;
- pNext = SafePnextCopy(in_struct->pNext);
- if (attachmentCount && in_struct->pAttachments) {
- pAttachments = new VkImageView[attachmentCount];
- for (uint32_t i = 0; i < attachmentCount; ++i) {
- pAttachments[i] = in_struct->pAttachments[i];
- }
- }
-}
-
-void safe_VkRenderPassAttachmentBeginInfoKHR::initialize(const safe_VkRenderPassAttachmentBeginInfoKHR* src)
-{
- sType = src->sType;
- attachmentCount = src->attachmentCount;
- pAttachments = nullptr;
- pNext = SafePnextCopy(src->pNext);
- if (attachmentCount && src->pAttachments) {
- pAttachments = new VkImageView[attachmentCount];
- for (uint32_t i = 0; i < attachmentCount; ++i) {
- pAttachments[i] = src->pAttachments[i];
- }
- }
-}
-
-safe_VkAttachmentDescription2KHR::safe_VkAttachmentDescription2KHR(const VkAttachmentDescription2KHR* in_struct) :
- sType(in_struct->sType),
- flags(in_struct->flags),
- format(in_struct->format),
- samples(in_struct->samples),
- loadOp(in_struct->loadOp),
- storeOp(in_struct->storeOp),
- stencilLoadOp(in_struct->stencilLoadOp),
- stencilStoreOp(in_struct->stencilStoreOp),
- initialLayout(in_struct->initialLayout),
- finalLayout(in_struct->finalLayout)
-{
- pNext = SafePnextCopy(in_struct->pNext);
-}
-
-safe_VkAttachmentDescription2KHR::safe_VkAttachmentDescription2KHR() :
- pNext(nullptr)
-{}
-
-safe_VkAttachmentDescription2KHR::safe_VkAttachmentDescription2KHR(const safe_VkAttachmentDescription2KHR& src)
-{
- sType = src.sType;
- flags = src.flags;
- format = src.format;
- samples = src.samples;
- loadOp = src.loadOp;
- storeOp = src.storeOp;
- stencilLoadOp = src.stencilLoadOp;
- stencilStoreOp = src.stencilStoreOp;
- initialLayout = src.initialLayout;
- finalLayout = src.finalLayout;
- pNext = SafePnextCopy(src.pNext);
-}
-
-safe_VkAttachmentDescription2KHR& safe_VkAttachmentDescription2KHR::operator=(const safe_VkAttachmentDescription2KHR& src)
-{
- if (&src == this) return *this;
-
- if (pNext)
- FreePnextChain(pNext);
-
- sType = src.sType;
- flags = src.flags;
- format = src.format;
- samples = src.samples;
- loadOp = src.loadOp;
- storeOp = src.storeOp;
- stencilLoadOp = src.stencilLoadOp;
- stencilStoreOp = src.stencilStoreOp;
- initialLayout = src.initialLayout;
- finalLayout = src.finalLayout;
- pNext = SafePnextCopy(src.pNext);
-
- return *this;
-}
-
-safe_VkAttachmentDescription2KHR::~safe_VkAttachmentDescription2KHR()
-{
- if (pNext)
- FreePnextChain(pNext);
-}
-
-void safe_VkAttachmentDescription2KHR::initialize(const VkAttachmentDescription2KHR* in_struct)
-{
- sType = in_struct->sType;
- flags = in_struct->flags;
- format = in_struct->format;
- samples = in_struct->samples;
- loadOp = in_struct->loadOp;
- storeOp = in_struct->storeOp;
- stencilLoadOp = in_struct->stencilLoadOp;
- stencilStoreOp = in_struct->stencilStoreOp;
- initialLayout = in_struct->initialLayout;
- finalLayout = in_struct->finalLayout;
- pNext = SafePnextCopy(in_struct->pNext);
-}
-
-void safe_VkAttachmentDescription2KHR::initialize(const safe_VkAttachmentDescription2KHR* src)
-{
- sType = src->sType;
- flags = src->flags;
- format = src->format;
- samples = src->samples;
- loadOp = src->loadOp;
- storeOp = src->storeOp;
- stencilLoadOp = src->stencilLoadOp;
- stencilStoreOp = src->stencilStoreOp;
- initialLayout = src->initialLayout;
- finalLayout = src->finalLayout;
- pNext = SafePnextCopy(src->pNext);
-}
-
-safe_VkAttachmentReference2KHR::safe_VkAttachmentReference2KHR(const VkAttachmentReference2KHR* in_struct) :
- sType(in_struct->sType),
- attachment(in_struct->attachment),
- layout(in_struct->layout),
- aspectMask(in_struct->aspectMask)
-{
- pNext = SafePnextCopy(in_struct->pNext);
-}
-
-safe_VkAttachmentReference2KHR::safe_VkAttachmentReference2KHR() :
- pNext(nullptr)
-{}
-
-safe_VkAttachmentReference2KHR::safe_VkAttachmentReference2KHR(const safe_VkAttachmentReference2KHR& src)
-{
- sType = src.sType;
- attachment = src.attachment;
- layout = src.layout;
- aspectMask = src.aspectMask;
- pNext = SafePnextCopy(src.pNext);
-}
-
-safe_VkAttachmentReference2KHR& safe_VkAttachmentReference2KHR::operator=(const safe_VkAttachmentReference2KHR& src)
-{
- if (&src == this) return *this;
-
- if (pNext)
- FreePnextChain(pNext);
-
- sType = src.sType;
- attachment = src.attachment;
- layout = src.layout;
- aspectMask = src.aspectMask;
- pNext = SafePnextCopy(src.pNext);
-
- return *this;
-}
-
-safe_VkAttachmentReference2KHR::~safe_VkAttachmentReference2KHR()
-{
- if (pNext)
- FreePnextChain(pNext);
-}
-
-void safe_VkAttachmentReference2KHR::initialize(const VkAttachmentReference2KHR* in_struct)
-{
- sType = in_struct->sType;
- attachment = in_struct->attachment;
- layout = in_struct->layout;
- aspectMask = in_struct->aspectMask;
- pNext = SafePnextCopy(in_struct->pNext);
-}
-
-void safe_VkAttachmentReference2KHR::initialize(const safe_VkAttachmentReference2KHR* src)
-{
- sType = src->sType;
- attachment = src->attachment;
- layout = src->layout;
- aspectMask = src->aspectMask;
- pNext = SafePnextCopy(src->pNext);
-}
-
-safe_VkSubpassDescription2KHR::safe_VkSubpassDescription2KHR(const VkSubpassDescription2KHR* in_struct) :
- sType(in_struct->sType),
- flags(in_struct->flags),
- pipelineBindPoint(in_struct->pipelineBindPoint),
- viewMask(in_struct->viewMask),
- inputAttachmentCount(in_struct->inputAttachmentCount),
- pInputAttachments(nullptr),
- colorAttachmentCount(in_struct->colorAttachmentCount),
- pColorAttachments(nullptr),
- pResolveAttachments(nullptr),
- pDepthStencilAttachment(nullptr),
- preserveAttachmentCount(in_struct->preserveAttachmentCount),
- pPreserveAttachments(nullptr)
-{
- pNext = SafePnextCopy(in_struct->pNext);
- if (inputAttachmentCount && in_struct->pInputAttachments) {
- pInputAttachments = new safe_VkAttachmentReference2KHR[inputAttachmentCount];
- for (uint32_t i = 0; i < inputAttachmentCount; ++i) {
- pInputAttachments[i].initialize(&in_struct->pInputAttachments[i]);
- }
- }
- if (colorAttachmentCount && in_struct->pColorAttachments) {
- pColorAttachments = new safe_VkAttachmentReference2KHR[colorAttachmentCount];
- for (uint32_t i = 0; i < colorAttachmentCount; ++i) {
- pColorAttachments[i].initialize(&in_struct->pColorAttachments[i]);
- }
- }
- if (colorAttachmentCount && in_struct->pResolveAttachments) {
- pResolveAttachments = new safe_VkAttachmentReference2KHR[colorAttachmentCount];
- for (uint32_t i = 0; i < colorAttachmentCount; ++i) {
- pResolveAttachments[i].initialize(&in_struct->pResolveAttachments[i]);
- }
- }
- if (in_struct->pDepthStencilAttachment)
- pDepthStencilAttachment = new safe_VkAttachmentReference2KHR(in_struct->pDepthStencilAttachment);
- if (in_struct->pPreserveAttachments) {
- pPreserveAttachments = new uint32_t[in_struct->preserveAttachmentCount];
- memcpy ((void *)pPreserveAttachments, (void *)in_struct->pPreserveAttachments, sizeof(uint32_t)*in_struct->preserveAttachmentCount);
- }
-}
-
-safe_VkSubpassDescription2KHR::safe_VkSubpassDescription2KHR() :
- pNext(nullptr),
- pInputAttachments(nullptr),
- pColorAttachments(nullptr),
- pResolveAttachments(nullptr),
- pDepthStencilAttachment(nullptr),
- pPreserveAttachments(nullptr)
-{}
-
-safe_VkSubpassDescription2KHR::safe_VkSubpassDescription2KHR(const safe_VkSubpassDescription2KHR& src)
-{
- sType = src.sType;
- flags = src.flags;
- pipelineBindPoint = src.pipelineBindPoint;
- viewMask = src.viewMask;
- inputAttachmentCount = src.inputAttachmentCount;
- pInputAttachments = nullptr;
- colorAttachmentCount = src.colorAttachmentCount;
- pColorAttachments = nullptr;
- pResolveAttachments = nullptr;
- pDepthStencilAttachment = nullptr;
- preserveAttachmentCount = src.preserveAttachmentCount;
- pPreserveAttachments = nullptr;
- pNext = SafePnextCopy(src.pNext);
- if (inputAttachmentCount && src.pInputAttachments) {
- pInputAttachments = new safe_VkAttachmentReference2KHR[inputAttachmentCount];
- for (uint32_t i = 0; i < inputAttachmentCount; ++i) {
- pInputAttachments[i].initialize(&src.pInputAttachments[i]);
- }
- }
- if (colorAttachmentCount && src.pColorAttachments) {
- pColorAttachments = new safe_VkAttachmentReference2KHR[colorAttachmentCount];
- for (uint32_t i = 0; i < colorAttachmentCount; ++i) {
- pColorAttachments[i].initialize(&src.pColorAttachments[i]);
- }
- }
- if (colorAttachmentCount && src.pResolveAttachments) {
- pResolveAttachments = new safe_VkAttachmentReference2KHR[colorAttachmentCount];
- for (uint32_t i = 0; i < colorAttachmentCount; ++i) {
- pResolveAttachments[i].initialize(&src.pResolveAttachments[i]);
- }
- }
- if (src.pDepthStencilAttachment)
- pDepthStencilAttachment = new safe_VkAttachmentReference2KHR(*src.pDepthStencilAttachment);
- if (src.pPreserveAttachments) {
- pPreserveAttachments = new uint32_t[src.preserveAttachmentCount];
- memcpy ((void *)pPreserveAttachments, (void *)src.pPreserveAttachments, sizeof(uint32_t)*src.preserveAttachmentCount);
- }
-}
-
-safe_VkSubpassDescription2KHR& safe_VkSubpassDescription2KHR::operator=(const safe_VkSubpassDescription2KHR& src)
-{
- if (&src == this) return *this;
-
- if (pInputAttachments)
- delete[] pInputAttachments;
- if (pColorAttachments)
- delete[] pColorAttachments;
- if (pResolveAttachments)
- delete[] pResolveAttachments;
- if (pDepthStencilAttachment)
- delete pDepthStencilAttachment;
- if (pPreserveAttachments)
- delete[] pPreserveAttachments;
- if (pNext)
- FreePnextChain(pNext);
-
- sType = src.sType;
- flags = src.flags;
- pipelineBindPoint = src.pipelineBindPoint;
- viewMask = src.viewMask;
- inputAttachmentCount = src.inputAttachmentCount;
- pInputAttachments = nullptr;
- colorAttachmentCount = src.colorAttachmentCount;
- pColorAttachments = nullptr;
- pResolveAttachments = nullptr;
- pDepthStencilAttachment = nullptr;
- preserveAttachmentCount = src.preserveAttachmentCount;
- pPreserveAttachments = nullptr;
- pNext = SafePnextCopy(src.pNext);
- if (inputAttachmentCount && src.pInputAttachments) {
- pInputAttachments = new safe_VkAttachmentReference2KHR[inputAttachmentCount];
- for (uint32_t i = 0; i < inputAttachmentCount; ++i) {
- pInputAttachments[i].initialize(&src.pInputAttachments[i]);
- }
- }
- if (colorAttachmentCount && src.pColorAttachments) {
- pColorAttachments = new safe_VkAttachmentReference2KHR[colorAttachmentCount];
- for (uint32_t i = 0; i < colorAttachmentCount; ++i) {
- pColorAttachments[i].initialize(&src.pColorAttachments[i]);
- }
- }
- if (colorAttachmentCount && src.pResolveAttachments) {
- pResolveAttachments = new safe_VkAttachmentReference2KHR[colorAttachmentCount];
- for (uint32_t i = 0; i < colorAttachmentCount; ++i) {
- pResolveAttachments[i].initialize(&src.pResolveAttachments[i]);
- }
- }
- if (src.pDepthStencilAttachment)
- pDepthStencilAttachment = new safe_VkAttachmentReference2KHR(*src.pDepthStencilAttachment);
- if (src.pPreserveAttachments) {
- pPreserveAttachments = new uint32_t[src.preserveAttachmentCount];
- memcpy ((void *)pPreserveAttachments, (void *)src.pPreserveAttachments, sizeof(uint32_t)*src.preserveAttachmentCount);
- }
-
- return *this;
-}
-
-safe_VkSubpassDescription2KHR::~safe_VkSubpassDescription2KHR()
-{
- if (pInputAttachments)
- delete[] pInputAttachments;
- if (pColorAttachments)
- delete[] pColorAttachments;
- if (pResolveAttachments)
- delete[] pResolveAttachments;
- if (pDepthStencilAttachment)
- delete pDepthStencilAttachment;
- if (pPreserveAttachments)
- delete[] pPreserveAttachments;
- if (pNext)
- FreePnextChain(pNext);
-}
-
-void safe_VkSubpassDescription2KHR::initialize(const VkSubpassDescription2KHR* in_struct)
-{
- sType = in_struct->sType;
- flags = in_struct->flags;
- pipelineBindPoint = in_struct->pipelineBindPoint;
- viewMask = in_struct->viewMask;
- inputAttachmentCount = in_struct->inputAttachmentCount;
- pInputAttachments = nullptr;
- colorAttachmentCount = in_struct->colorAttachmentCount;
- pColorAttachments = nullptr;
- pResolveAttachments = nullptr;
- pDepthStencilAttachment = nullptr;
- preserveAttachmentCount = in_struct->preserveAttachmentCount;
- pPreserveAttachments = nullptr;
- pNext = SafePnextCopy(in_struct->pNext);
- if (inputAttachmentCount && in_struct->pInputAttachments) {
- pInputAttachments = new safe_VkAttachmentReference2KHR[inputAttachmentCount];
- for (uint32_t i = 0; i < inputAttachmentCount; ++i) {
- pInputAttachments[i].initialize(&in_struct->pInputAttachments[i]);
- }
- }
- if (colorAttachmentCount && in_struct->pColorAttachments) {
- pColorAttachments = new safe_VkAttachmentReference2KHR[colorAttachmentCount];
- for (uint32_t i = 0; i < colorAttachmentCount; ++i) {
- pColorAttachments[i].initialize(&in_struct->pColorAttachments[i]);
- }
- }
- if (colorAttachmentCount && in_struct->pResolveAttachments) {
- pResolveAttachments = new safe_VkAttachmentReference2KHR[colorAttachmentCount];
- for (uint32_t i = 0; i < colorAttachmentCount; ++i) {
- pResolveAttachments[i].initialize(&in_struct->pResolveAttachments[i]);
- }
- }
- if (in_struct->pDepthStencilAttachment)
- pDepthStencilAttachment = new safe_VkAttachmentReference2KHR(in_struct->pDepthStencilAttachment);
- if (in_struct->pPreserveAttachments) {
- pPreserveAttachments = new uint32_t[in_struct->preserveAttachmentCount];
- memcpy ((void *)pPreserveAttachments, (void *)in_struct->pPreserveAttachments, sizeof(uint32_t)*in_struct->preserveAttachmentCount);
- }
-}
-
-void safe_VkSubpassDescription2KHR::initialize(const safe_VkSubpassDescription2KHR* src)
-{
- sType = src->sType;
- flags = src->flags;
- pipelineBindPoint = src->pipelineBindPoint;
- viewMask = src->viewMask;
- inputAttachmentCount = src->inputAttachmentCount;
- pInputAttachments = nullptr;
- colorAttachmentCount = src->colorAttachmentCount;
- pColorAttachments = nullptr;
- pResolveAttachments = nullptr;
- pDepthStencilAttachment = nullptr;
- preserveAttachmentCount = src->preserveAttachmentCount;
- pPreserveAttachments = nullptr;
- pNext = SafePnextCopy(src->pNext);
- if (inputAttachmentCount && src->pInputAttachments) {
- pInputAttachments = new safe_VkAttachmentReference2KHR[inputAttachmentCount];
- for (uint32_t i = 0; i < inputAttachmentCount; ++i) {
- pInputAttachments[i].initialize(&src->pInputAttachments[i]);
- }
- }
- if (colorAttachmentCount && src->pColorAttachments) {
- pColorAttachments = new safe_VkAttachmentReference2KHR[colorAttachmentCount];
- for (uint32_t i = 0; i < colorAttachmentCount; ++i) {
- pColorAttachments[i].initialize(&src->pColorAttachments[i]);
- }
- }
- if (colorAttachmentCount && src->pResolveAttachments) {
- pResolveAttachments = new safe_VkAttachmentReference2KHR[colorAttachmentCount];
- for (uint32_t i = 0; i < colorAttachmentCount; ++i) {
- pResolveAttachments[i].initialize(&src->pResolveAttachments[i]);
- }
- }
- if (src->pDepthStencilAttachment)
- pDepthStencilAttachment = new safe_VkAttachmentReference2KHR(*src->pDepthStencilAttachment);
- if (src->pPreserveAttachments) {
- pPreserveAttachments = new uint32_t[src->preserveAttachmentCount];
- memcpy ((void *)pPreserveAttachments, (void *)src->pPreserveAttachments, sizeof(uint32_t)*src->preserveAttachmentCount);
- }
-}
-
-safe_VkSubpassDependency2KHR::safe_VkSubpassDependency2KHR(const VkSubpassDependency2KHR* in_struct) :
- sType(in_struct->sType),
- srcSubpass(in_struct->srcSubpass),
- dstSubpass(in_struct->dstSubpass),
- srcStageMask(in_struct->srcStageMask),
- dstStageMask(in_struct->dstStageMask),
- srcAccessMask(in_struct->srcAccessMask),
- dstAccessMask(in_struct->dstAccessMask),
- dependencyFlags(in_struct->dependencyFlags),
- viewOffset(in_struct->viewOffset)
-{
- pNext = SafePnextCopy(in_struct->pNext);
-}
-
-safe_VkSubpassDependency2KHR::safe_VkSubpassDependency2KHR() :
- pNext(nullptr)
-{}
-
-safe_VkSubpassDependency2KHR::safe_VkSubpassDependency2KHR(const safe_VkSubpassDependency2KHR& src)
-{
- sType = src.sType;
- srcSubpass = src.srcSubpass;
- dstSubpass = src.dstSubpass;
- srcStageMask = src.srcStageMask;
- dstStageMask = src.dstStageMask;
- srcAccessMask = src.srcAccessMask;
- dstAccessMask = src.dstAccessMask;
- dependencyFlags = src.dependencyFlags;
- viewOffset = src.viewOffset;
- pNext = SafePnextCopy(src.pNext);
-}
-
-safe_VkSubpassDependency2KHR& safe_VkSubpassDependency2KHR::operator=(const safe_VkSubpassDependency2KHR& src)
-{
- if (&src == this) return *this;
-
- if (pNext)
- FreePnextChain(pNext);
-
- sType = src.sType;
- srcSubpass = src.srcSubpass;
- dstSubpass = src.dstSubpass;
- srcStageMask = src.srcStageMask;
- dstStageMask = src.dstStageMask;
- srcAccessMask = src.srcAccessMask;
- dstAccessMask = src.dstAccessMask;
- dependencyFlags = src.dependencyFlags;
- viewOffset = src.viewOffset;
- pNext = SafePnextCopy(src.pNext);
-
- return *this;
-}
-
-safe_VkSubpassDependency2KHR::~safe_VkSubpassDependency2KHR()
-{
- if (pNext)
- FreePnextChain(pNext);
-}
-
-void safe_VkSubpassDependency2KHR::initialize(const VkSubpassDependency2KHR* in_struct)
-{
- sType = in_struct->sType;
- srcSubpass = in_struct->srcSubpass;
- dstSubpass = in_struct->dstSubpass;
- srcStageMask = in_struct->srcStageMask;
- dstStageMask = in_struct->dstStageMask;
- srcAccessMask = in_struct->srcAccessMask;
- dstAccessMask = in_struct->dstAccessMask;
- dependencyFlags = in_struct->dependencyFlags;
- viewOffset = in_struct->viewOffset;
- pNext = SafePnextCopy(in_struct->pNext);
-}
-
-void safe_VkSubpassDependency2KHR::initialize(const safe_VkSubpassDependency2KHR* src)
-{
- sType = src->sType;
- srcSubpass = src->srcSubpass;
- dstSubpass = src->dstSubpass;
- srcStageMask = src->srcStageMask;
- dstStageMask = src->dstStageMask;
- srcAccessMask = src->srcAccessMask;
- dstAccessMask = src->dstAccessMask;
- dependencyFlags = src->dependencyFlags;
- viewOffset = src->viewOffset;
- pNext = SafePnextCopy(src->pNext);
-}
-
-safe_VkRenderPassCreateInfo2KHR::safe_VkRenderPassCreateInfo2KHR(const VkRenderPassCreateInfo2KHR* in_struct) :
- sType(in_struct->sType),
- flags(in_struct->flags),
- attachmentCount(in_struct->attachmentCount),
- pAttachments(nullptr),
- subpassCount(in_struct->subpassCount),
- pSubpasses(nullptr),
- dependencyCount(in_struct->dependencyCount),
- pDependencies(nullptr),
- correlatedViewMaskCount(in_struct->correlatedViewMaskCount),
- pCorrelatedViewMasks(nullptr)
-{
- pNext = SafePnextCopy(in_struct->pNext);
- if (attachmentCount && in_struct->pAttachments) {
- pAttachments = new safe_VkAttachmentDescription2KHR[attachmentCount];
- for (uint32_t i = 0; i < attachmentCount; ++i) {
- pAttachments[i].initialize(&in_struct->pAttachments[i]);
- }
- }
- if (subpassCount && in_struct->pSubpasses) {
- pSubpasses = new safe_VkSubpassDescription2KHR[subpassCount];
- for (uint32_t i = 0; i < subpassCount; ++i) {
- pSubpasses[i].initialize(&in_struct->pSubpasses[i]);
- }
- }
- if (dependencyCount && in_struct->pDependencies) {
- pDependencies = new safe_VkSubpassDependency2KHR[dependencyCount];
- for (uint32_t i = 0; i < dependencyCount; ++i) {
- pDependencies[i].initialize(&in_struct->pDependencies[i]);
- }
- }
- if (in_struct->pCorrelatedViewMasks) {
- pCorrelatedViewMasks = new uint32_t[in_struct->correlatedViewMaskCount];
- memcpy ((void *)pCorrelatedViewMasks, (void *)in_struct->pCorrelatedViewMasks, sizeof(uint32_t)*in_struct->correlatedViewMaskCount);
- }
-}
-
-safe_VkRenderPassCreateInfo2KHR::safe_VkRenderPassCreateInfo2KHR() :
- pNext(nullptr),
- pAttachments(nullptr),
- pSubpasses(nullptr),
- pDependencies(nullptr),
- pCorrelatedViewMasks(nullptr)
-{}
-
-safe_VkRenderPassCreateInfo2KHR::safe_VkRenderPassCreateInfo2KHR(const safe_VkRenderPassCreateInfo2KHR& src)
-{
- sType = src.sType;
- flags = src.flags;
- attachmentCount = src.attachmentCount;
- pAttachments = nullptr;
- subpassCount = src.subpassCount;
- pSubpasses = nullptr;
- dependencyCount = src.dependencyCount;
- pDependencies = nullptr;
- correlatedViewMaskCount = src.correlatedViewMaskCount;
- pCorrelatedViewMasks = nullptr;
- pNext = SafePnextCopy(src.pNext);
- if (attachmentCount && src.pAttachments) {
- pAttachments = new safe_VkAttachmentDescription2KHR[attachmentCount];
- for (uint32_t i = 0; i < attachmentCount; ++i) {
- pAttachments[i].initialize(&src.pAttachments[i]);
- }
- }
- if (subpassCount && src.pSubpasses) {
- pSubpasses = new safe_VkSubpassDescription2KHR[subpassCount];
- for (uint32_t i = 0; i < subpassCount; ++i) {
- pSubpasses[i].initialize(&src.pSubpasses[i]);
- }
- }
- if (dependencyCount && src.pDependencies) {
- pDependencies = new safe_VkSubpassDependency2KHR[dependencyCount];
- for (uint32_t i = 0; i < dependencyCount; ++i) {
- pDependencies[i].initialize(&src.pDependencies[i]);
- }
- }
- if (src.pCorrelatedViewMasks) {
- pCorrelatedViewMasks = new uint32_t[src.correlatedViewMaskCount];
- memcpy ((void *)pCorrelatedViewMasks, (void *)src.pCorrelatedViewMasks, sizeof(uint32_t)*src.correlatedViewMaskCount);
- }
-}
-
-safe_VkRenderPassCreateInfo2KHR& safe_VkRenderPassCreateInfo2KHR::operator=(const safe_VkRenderPassCreateInfo2KHR& src)
-{
- if (&src == this) return *this;
-
- if (pAttachments)
- delete[] pAttachments;
- if (pSubpasses)
- delete[] pSubpasses;
- if (pDependencies)
- delete[] pDependencies;
- if (pCorrelatedViewMasks)
- delete[] pCorrelatedViewMasks;
- if (pNext)
- FreePnextChain(pNext);
-
- sType = src.sType;
- flags = src.flags;
- attachmentCount = src.attachmentCount;
- pAttachments = nullptr;
- subpassCount = src.subpassCount;
- pSubpasses = nullptr;
- dependencyCount = src.dependencyCount;
- pDependencies = nullptr;
- correlatedViewMaskCount = src.correlatedViewMaskCount;
- pCorrelatedViewMasks = nullptr;
- pNext = SafePnextCopy(src.pNext);
- if (attachmentCount && src.pAttachments) {
- pAttachments = new safe_VkAttachmentDescription2KHR[attachmentCount];
- for (uint32_t i = 0; i < attachmentCount; ++i) {
- pAttachments[i].initialize(&src.pAttachments[i]);
- }
- }
- if (subpassCount && src.pSubpasses) {
- pSubpasses = new safe_VkSubpassDescription2KHR[subpassCount];
- for (uint32_t i = 0; i < subpassCount; ++i) {
- pSubpasses[i].initialize(&src.pSubpasses[i]);
- }
- }
- if (dependencyCount && src.pDependencies) {
- pDependencies = new safe_VkSubpassDependency2KHR[dependencyCount];
- for (uint32_t i = 0; i < dependencyCount; ++i) {
- pDependencies[i].initialize(&src.pDependencies[i]);
- }
- }
- if (src.pCorrelatedViewMasks) {
- pCorrelatedViewMasks = new uint32_t[src.correlatedViewMaskCount];
- memcpy ((void *)pCorrelatedViewMasks, (void *)src.pCorrelatedViewMasks, sizeof(uint32_t)*src.correlatedViewMaskCount);
- }
-
- return *this;
-}
-
-safe_VkRenderPassCreateInfo2KHR::~safe_VkRenderPassCreateInfo2KHR()
-{
- if (pAttachments)
- delete[] pAttachments;
- if (pSubpasses)
- delete[] pSubpasses;
- if (pDependencies)
- delete[] pDependencies;
- if (pCorrelatedViewMasks)
- delete[] pCorrelatedViewMasks;
- if (pNext)
- FreePnextChain(pNext);
-}
-
-void safe_VkRenderPassCreateInfo2KHR::initialize(const VkRenderPassCreateInfo2KHR* in_struct)
-{
- sType = in_struct->sType;
- flags = in_struct->flags;
- attachmentCount = in_struct->attachmentCount;
- pAttachments = nullptr;
- subpassCount = in_struct->subpassCount;
- pSubpasses = nullptr;
- dependencyCount = in_struct->dependencyCount;
- pDependencies = nullptr;
- correlatedViewMaskCount = in_struct->correlatedViewMaskCount;
- pCorrelatedViewMasks = nullptr;
- pNext = SafePnextCopy(in_struct->pNext);
- if (attachmentCount && in_struct->pAttachments) {
- pAttachments = new safe_VkAttachmentDescription2KHR[attachmentCount];
- for (uint32_t i = 0; i < attachmentCount; ++i) {
- pAttachments[i].initialize(&in_struct->pAttachments[i]);
- }
- }
- if (subpassCount && in_struct->pSubpasses) {
- pSubpasses = new safe_VkSubpassDescription2KHR[subpassCount];
- for (uint32_t i = 0; i < subpassCount; ++i) {
- pSubpasses[i].initialize(&in_struct->pSubpasses[i]);
- }
- }
- if (dependencyCount && in_struct->pDependencies) {
- pDependencies = new safe_VkSubpassDependency2KHR[dependencyCount];
- for (uint32_t i = 0; i < dependencyCount; ++i) {
- pDependencies[i].initialize(&in_struct->pDependencies[i]);
- }
- }
- if (in_struct->pCorrelatedViewMasks) {
- pCorrelatedViewMasks = new uint32_t[in_struct->correlatedViewMaskCount];
- memcpy ((void *)pCorrelatedViewMasks, (void *)in_struct->pCorrelatedViewMasks, sizeof(uint32_t)*in_struct->correlatedViewMaskCount);
- }
-}
-
-void safe_VkRenderPassCreateInfo2KHR::initialize(const safe_VkRenderPassCreateInfo2KHR* src)
-{
- sType = src->sType;
- flags = src->flags;
- attachmentCount = src->attachmentCount;
- pAttachments = nullptr;
- subpassCount = src->subpassCount;
- pSubpasses = nullptr;
- dependencyCount = src->dependencyCount;
- pDependencies = nullptr;
- correlatedViewMaskCount = src->correlatedViewMaskCount;
- pCorrelatedViewMasks = nullptr;
- pNext = SafePnextCopy(src->pNext);
- if (attachmentCount && src->pAttachments) {
- pAttachments = new safe_VkAttachmentDescription2KHR[attachmentCount];
- for (uint32_t i = 0; i < attachmentCount; ++i) {
- pAttachments[i].initialize(&src->pAttachments[i]);
- }
- }
- if (subpassCount && src->pSubpasses) {
- pSubpasses = new safe_VkSubpassDescription2KHR[subpassCount];
- for (uint32_t i = 0; i < subpassCount; ++i) {
- pSubpasses[i].initialize(&src->pSubpasses[i]);
- }
- }
- if (dependencyCount && src->pDependencies) {
- pDependencies = new safe_VkSubpassDependency2KHR[dependencyCount];
- for (uint32_t i = 0; i < dependencyCount; ++i) {
- pDependencies[i].initialize(&src->pDependencies[i]);
- }
- }
- if (src->pCorrelatedViewMasks) {
- pCorrelatedViewMasks = new uint32_t[src->correlatedViewMaskCount];
- memcpy ((void *)pCorrelatedViewMasks, (void *)src->pCorrelatedViewMasks, sizeof(uint32_t)*src->correlatedViewMaskCount);
- }
-}
-
-safe_VkSubpassBeginInfoKHR::safe_VkSubpassBeginInfoKHR(const VkSubpassBeginInfoKHR* in_struct) :
- sType(in_struct->sType),
- contents(in_struct->contents)
-{
- pNext = SafePnextCopy(in_struct->pNext);
-}
-
-safe_VkSubpassBeginInfoKHR::safe_VkSubpassBeginInfoKHR() :
- pNext(nullptr)
-{}
-
-safe_VkSubpassBeginInfoKHR::safe_VkSubpassBeginInfoKHR(const safe_VkSubpassBeginInfoKHR& src)
-{
- sType = src.sType;
- contents = src.contents;
- pNext = SafePnextCopy(src.pNext);
-}
-
-safe_VkSubpassBeginInfoKHR& safe_VkSubpassBeginInfoKHR::operator=(const safe_VkSubpassBeginInfoKHR& src)
-{
- if (&src == this) return *this;
-
- if (pNext)
- FreePnextChain(pNext);
-
- sType = src.sType;
- contents = src.contents;
- pNext = SafePnextCopy(src.pNext);
-
- return *this;
-}
-
-safe_VkSubpassBeginInfoKHR::~safe_VkSubpassBeginInfoKHR()
-{
- if (pNext)
- FreePnextChain(pNext);
-}
-
-void safe_VkSubpassBeginInfoKHR::initialize(const VkSubpassBeginInfoKHR* in_struct)
-{
- sType = in_struct->sType;
- contents = in_struct->contents;
- pNext = SafePnextCopy(in_struct->pNext);
-}
-
-void safe_VkSubpassBeginInfoKHR::initialize(const safe_VkSubpassBeginInfoKHR* src)
-{
- sType = src->sType;
- contents = src->contents;
- pNext = SafePnextCopy(src->pNext);
-}
-
-safe_VkSubpassEndInfoKHR::safe_VkSubpassEndInfoKHR(const VkSubpassEndInfoKHR* in_struct) :
- sType(in_struct->sType)
-{
- pNext = SafePnextCopy(in_struct->pNext);
-}
-
-safe_VkSubpassEndInfoKHR::safe_VkSubpassEndInfoKHR() :
- pNext(nullptr)
-{}
-
-safe_VkSubpassEndInfoKHR::safe_VkSubpassEndInfoKHR(const safe_VkSubpassEndInfoKHR& src)
-{
- sType = src.sType;
- pNext = SafePnextCopy(src.pNext);
-}
-
-safe_VkSubpassEndInfoKHR& safe_VkSubpassEndInfoKHR::operator=(const safe_VkSubpassEndInfoKHR& src)
-{
- if (&src == this) return *this;
-
- if (pNext)
- FreePnextChain(pNext);
-
- sType = src.sType;
- pNext = SafePnextCopy(src.pNext);
-
- return *this;
-}
-
-safe_VkSubpassEndInfoKHR::~safe_VkSubpassEndInfoKHR()
-{
- if (pNext)
- FreePnextChain(pNext);
-}
-
-void safe_VkSubpassEndInfoKHR::initialize(const VkSubpassEndInfoKHR* in_struct)
-{
- sType = in_struct->sType;
- pNext = SafePnextCopy(in_struct->pNext);
-}
-
-void safe_VkSubpassEndInfoKHR::initialize(const safe_VkSubpassEndInfoKHR* src)
-{
- sType = src->sType;
- pNext = SafePnextCopy(src->pNext);
-}
-
-safe_VkSharedPresentSurfaceCapabilitiesKHR::safe_VkSharedPresentSurfaceCapabilitiesKHR(const VkSharedPresentSurfaceCapabilitiesKHR* in_struct) :
- sType(in_struct->sType),
- sharedPresentSupportedUsageFlags(in_struct->sharedPresentSupportedUsageFlags)
-{
- pNext = SafePnextCopy(in_struct->pNext);
-}
-
-safe_VkSharedPresentSurfaceCapabilitiesKHR::safe_VkSharedPresentSurfaceCapabilitiesKHR() :
- pNext(nullptr)
-{}
-
-safe_VkSharedPresentSurfaceCapabilitiesKHR::safe_VkSharedPresentSurfaceCapabilitiesKHR(const safe_VkSharedPresentSurfaceCapabilitiesKHR& src)
-{
- sType = src.sType;
- sharedPresentSupportedUsageFlags = src.sharedPresentSupportedUsageFlags;
- pNext = SafePnextCopy(src.pNext);
-}
-
-safe_VkSharedPresentSurfaceCapabilitiesKHR& safe_VkSharedPresentSurfaceCapabilitiesKHR::operator=(const safe_VkSharedPresentSurfaceCapabilitiesKHR& src)
-{
- if (&src == this) return *this;
-
- if (pNext)
- FreePnextChain(pNext);
-
- sType = src.sType;
- sharedPresentSupportedUsageFlags = src.sharedPresentSupportedUsageFlags;
- pNext = SafePnextCopy(src.pNext);
-
- return *this;
-}
-
-safe_VkSharedPresentSurfaceCapabilitiesKHR::~safe_VkSharedPresentSurfaceCapabilitiesKHR()
-{
- if (pNext)
- FreePnextChain(pNext);
-}
-
-void safe_VkSharedPresentSurfaceCapabilitiesKHR::initialize(const VkSharedPresentSurfaceCapabilitiesKHR* in_struct)
-{
- sType = in_struct->sType;
- sharedPresentSupportedUsageFlags = in_struct->sharedPresentSupportedUsageFlags;
- pNext = SafePnextCopy(in_struct->pNext);
-}
-
-void safe_VkSharedPresentSurfaceCapabilitiesKHR::initialize(const safe_VkSharedPresentSurfaceCapabilitiesKHR* src)
-{
- sType = src->sType;
- sharedPresentSupportedUsageFlags = src->sharedPresentSupportedUsageFlags;
- pNext = SafePnextCopy(src->pNext);
-}
-#ifdef VK_USE_PLATFORM_WIN32_KHR
-
-
-safe_VkImportFenceWin32HandleInfoKHR::safe_VkImportFenceWin32HandleInfoKHR(const VkImportFenceWin32HandleInfoKHR* in_struct) :
- sType(in_struct->sType),
- fence(in_struct->fence),
- flags(in_struct->flags),
- handleType(in_struct->handleType),
- handle(in_struct->handle),
- name(in_struct->name)
-{
- pNext = SafePnextCopy(in_struct->pNext);
-}
-
-safe_VkImportFenceWin32HandleInfoKHR::safe_VkImportFenceWin32HandleInfoKHR() :
- pNext(nullptr)
-{}
-
-safe_VkImportFenceWin32HandleInfoKHR::safe_VkImportFenceWin32HandleInfoKHR(const safe_VkImportFenceWin32HandleInfoKHR& src)
-{
- sType = src.sType;
- fence = src.fence;
- flags = src.flags;
- handleType = src.handleType;
- handle = src.handle;
- name = src.name;
- pNext = SafePnextCopy(src.pNext);
-}
-
-safe_VkImportFenceWin32HandleInfoKHR& safe_VkImportFenceWin32HandleInfoKHR::operator=(const safe_VkImportFenceWin32HandleInfoKHR& src)
-{
- if (&src == this) return *this;
-
- if (pNext)
- FreePnextChain(pNext);
-
- sType = src.sType;
- fence = src.fence;
- flags = src.flags;
- handleType = src.handleType;
- handle = src.handle;
- name = src.name;
- pNext = SafePnextCopy(src.pNext);
-
- return *this;
-}
-
-safe_VkImportFenceWin32HandleInfoKHR::~safe_VkImportFenceWin32HandleInfoKHR()
-{
- if (pNext)
- FreePnextChain(pNext);
-}
-
-void safe_VkImportFenceWin32HandleInfoKHR::initialize(const VkImportFenceWin32HandleInfoKHR* in_struct)
-{
- sType = in_struct->sType;
- fence = in_struct->fence;
- flags = in_struct->flags;
- handleType = in_struct->handleType;
- handle = in_struct->handle;
- name = in_struct->name;
- pNext = SafePnextCopy(in_struct->pNext);
-}
-
-void safe_VkImportFenceWin32HandleInfoKHR::initialize(const safe_VkImportFenceWin32HandleInfoKHR* src)
-{
- sType = src->sType;
- fence = src->fence;
- flags = src->flags;
- handleType = src->handleType;
- handle = src->handle;
- name = src->name;
- pNext = SafePnextCopy(src->pNext);
-}
-#endif // VK_USE_PLATFORM_WIN32_KHR
-
-#ifdef VK_USE_PLATFORM_WIN32_KHR
-
-
-safe_VkExportFenceWin32HandleInfoKHR::safe_VkExportFenceWin32HandleInfoKHR(const VkExportFenceWin32HandleInfoKHR* in_struct) :
- sType(in_struct->sType),
- pAttributes(nullptr),
- dwAccess(in_struct->dwAccess),
- name(in_struct->name)
-{
- pNext = SafePnextCopy(in_struct->pNext);
- if (in_struct->pAttributes) {
- pAttributes = new SECURITY_ATTRIBUTES(*in_struct->pAttributes);
- }
-}
-
-safe_VkExportFenceWin32HandleInfoKHR::safe_VkExportFenceWin32HandleInfoKHR() :
- pNext(nullptr),
- pAttributes(nullptr)
-{}
-
-safe_VkExportFenceWin32HandleInfoKHR::safe_VkExportFenceWin32HandleInfoKHR(const safe_VkExportFenceWin32HandleInfoKHR& src)
-{
- sType = src.sType;
- pAttributes = nullptr;
- dwAccess = src.dwAccess;
- name = src.name;
- pNext = SafePnextCopy(src.pNext);
- if (src.pAttributes) {
- pAttributes = new SECURITY_ATTRIBUTES(*src.pAttributes);
- }
-}
-
-safe_VkExportFenceWin32HandleInfoKHR& safe_VkExportFenceWin32HandleInfoKHR::operator=(const safe_VkExportFenceWin32HandleInfoKHR& src)
-{
- if (&src == this) return *this;
-
- if (pAttributes)
- delete pAttributes;
- if (pNext)
- FreePnextChain(pNext);
-
- sType = src.sType;
- pAttributes = nullptr;
- dwAccess = src.dwAccess;
- name = src.name;
- pNext = SafePnextCopy(src.pNext);
- if (src.pAttributes) {
- pAttributes = new SECURITY_ATTRIBUTES(*src.pAttributes);
- }
-
- return *this;
-}
-
-safe_VkExportFenceWin32HandleInfoKHR::~safe_VkExportFenceWin32HandleInfoKHR()
-{
- if (pAttributes)
- delete pAttributes;
- if (pNext)
- FreePnextChain(pNext);
-}
-
-void safe_VkExportFenceWin32HandleInfoKHR::initialize(const VkExportFenceWin32HandleInfoKHR* in_struct)
-{
- sType = in_struct->sType;
- pAttributes = nullptr;
- dwAccess = in_struct->dwAccess;
- name = in_struct->name;
- pNext = SafePnextCopy(in_struct->pNext);
- if (in_struct->pAttributes) {
- pAttributes = new SECURITY_ATTRIBUTES(*in_struct->pAttributes);
- }
-}
-
-void safe_VkExportFenceWin32HandleInfoKHR::initialize(const safe_VkExportFenceWin32HandleInfoKHR* src)
-{
- sType = src->sType;
- pAttributes = nullptr;
- dwAccess = src->dwAccess;
- name = src->name;
- pNext = SafePnextCopy(src->pNext);
- if (src->pAttributes) {
- pAttributes = new SECURITY_ATTRIBUTES(*src->pAttributes);
- }
-}
-#endif // VK_USE_PLATFORM_WIN32_KHR
-
-#ifdef VK_USE_PLATFORM_WIN32_KHR
-
-
-safe_VkFenceGetWin32HandleInfoKHR::safe_VkFenceGetWin32HandleInfoKHR(const VkFenceGetWin32HandleInfoKHR* in_struct) :
- sType(in_struct->sType),
- fence(in_struct->fence),
- handleType(in_struct->handleType)
-{
- pNext = SafePnextCopy(in_struct->pNext);
-}
-
-safe_VkFenceGetWin32HandleInfoKHR::safe_VkFenceGetWin32HandleInfoKHR() :
- pNext(nullptr)
-{}
-
-safe_VkFenceGetWin32HandleInfoKHR::safe_VkFenceGetWin32HandleInfoKHR(const safe_VkFenceGetWin32HandleInfoKHR& src)
-{
- sType = src.sType;
- fence = src.fence;
- handleType = src.handleType;
- pNext = SafePnextCopy(src.pNext);
-}
-
-safe_VkFenceGetWin32HandleInfoKHR& safe_VkFenceGetWin32HandleInfoKHR::operator=(const safe_VkFenceGetWin32HandleInfoKHR& src)
-{
- if (&src == this) return *this;
-
- if (pNext)
- FreePnextChain(pNext);
-
- sType = src.sType;
- fence = src.fence;
- handleType = src.handleType;
- pNext = SafePnextCopy(src.pNext);
-
- return *this;
-}
-
-safe_VkFenceGetWin32HandleInfoKHR::~safe_VkFenceGetWin32HandleInfoKHR()
-{
- if (pNext)
- FreePnextChain(pNext);
-}
-
-void safe_VkFenceGetWin32HandleInfoKHR::initialize(const VkFenceGetWin32HandleInfoKHR* in_struct)
-{
- sType = in_struct->sType;
- fence = in_struct->fence;
- handleType = in_struct->handleType;
- pNext = SafePnextCopy(in_struct->pNext);
-}
-
-void safe_VkFenceGetWin32HandleInfoKHR::initialize(const safe_VkFenceGetWin32HandleInfoKHR* src)
-{
- sType = src->sType;
- fence = src->fence;
- handleType = src->handleType;
- pNext = SafePnextCopy(src->pNext);
-}
-#endif // VK_USE_PLATFORM_WIN32_KHR
-
-
-safe_VkImportFenceFdInfoKHR::safe_VkImportFenceFdInfoKHR(const VkImportFenceFdInfoKHR* in_struct) :
- sType(in_struct->sType),
- fence(in_struct->fence),
- flags(in_struct->flags),
- handleType(in_struct->handleType),
- fd(in_struct->fd)
-{
- pNext = SafePnextCopy(in_struct->pNext);
-}
-
-safe_VkImportFenceFdInfoKHR::safe_VkImportFenceFdInfoKHR() :
- pNext(nullptr)
-{}
-
-safe_VkImportFenceFdInfoKHR::safe_VkImportFenceFdInfoKHR(const safe_VkImportFenceFdInfoKHR& src)
-{
- sType = src.sType;
- fence = src.fence;
- flags = src.flags;
- handleType = src.handleType;
- fd = src.fd;
- pNext = SafePnextCopy(src.pNext);
-}
-
-safe_VkImportFenceFdInfoKHR& safe_VkImportFenceFdInfoKHR::operator=(const safe_VkImportFenceFdInfoKHR& src)
-{
- if (&src == this) return *this;
-
- if (pNext)
- FreePnextChain(pNext);
-
- sType = src.sType;
- fence = src.fence;
- flags = src.flags;
- handleType = src.handleType;
- fd = src.fd;
- pNext = SafePnextCopy(src.pNext);
-
- return *this;
-}
-
-safe_VkImportFenceFdInfoKHR::~safe_VkImportFenceFdInfoKHR()
-{
- if (pNext)
- FreePnextChain(pNext);
-}
-
-void safe_VkImportFenceFdInfoKHR::initialize(const VkImportFenceFdInfoKHR* in_struct)
-{
- sType = in_struct->sType;
- fence = in_struct->fence;
- flags = in_struct->flags;
- handleType = in_struct->handleType;
- fd = in_struct->fd;
- pNext = SafePnextCopy(in_struct->pNext);
-}
-
-void safe_VkImportFenceFdInfoKHR::initialize(const safe_VkImportFenceFdInfoKHR* src)
-{
- sType = src->sType;
- fence = src->fence;
- flags = src->flags;
- handleType = src->handleType;
- fd = src->fd;
- pNext = SafePnextCopy(src->pNext);
-}
-
-safe_VkFenceGetFdInfoKHR::safe_VkFenceGetFdInfoKHR(const VkFenceGetFdInfoKHR* in_struct) :
- sType(in_struct->sType),
- fence(in_struct->fence),
- handleType(in_struct->handleType)
-{
- pNext = SafePnextCopy(in_struct->pNext);
-}
-
-safe_VkFenceGetFdInfoKHR::safe_VkFenceGetFdInfoKHR() :
- pNext(nullptr)
-{}
-
-safe_VkFenceGetFdInfoKHR::safe_VkFenceGetFdInfoKHR(const safe_VkFenceGetFdInfoKHR& src)
-{
- sType = src.sType;
- fence = src.fence;
- handleType = src.handleType;
- pNext = SafePnextCopy(src.pNext);
-}
-
-safe_VkFenceGetFdInfoKHR& safe_VkFenceGetFdInfoKHR::operator=(const safe_VkFenceGetFdInfoKHR& src)
-{
- if (&src == this) return *this;
-
- if (pNext)
- FreePnextChain(pNext);
-
- sType = src.sType;
- fence = src.fence;
- handleType = src.handleType;
- pNext = SafePnextCopy(src.pNext);
-
- return *this;
-}
-
-safe_VkFenceGetFdInfoKHR::~safe_VkFenceGetFdInfoKHR()
-{
- if (pNext)
- FreePnextChain(pNext);
-}
-
-void safe_VkFenceGetFdInfoKHR::initialize(const VkFenceGetFdInfoKHR* in_struct)
-{
- sType = in_struct->sType;
- fence = in_struct->fence;
- handleType = in_struct->handleType;
- pNext = SafePnextCopy(in_struct->pNext);
-}
-
-void safe_VkFenceGetFdInfoKHR::initialize(const safe_VkFenceGetFdInfoKHR* src)
-{
- sType = src->sType;
- fence = src->fence;
- handleType = src->handleType;
- pNext = SafePnextCopy(src->pNext);
-}
-
-safe_VkPhysicalDeviceSurfaceInfo2KHR::safe_VkPhysicalDeviceSurfaceInfo2KHR(const VkPhysicalDeviceSurfaceInfo2KHR* in_struct) :
- sType(in_struct->sType),
- surface(in_struct->surface)
-{
- pNext = SafePnextCopy(in_struct->pNext);
-}
-
-safe_VkPhysicalDeviceSurfaceInfo2KHR::safe_VkPhysicalDeviceSurfaceInfo2KHR() :
- pNext(nullptr)
-{}
-
-safe_VkPhysicalDeviceSurfaceInfo2KHR::safe_VkPhysicalDeviceSurfaceInfo2KHR(const safe_VkPhysicalDeviceSurfaceInfo2KHR& src)
-{
- sType = src.sType;
- surface = src.surface;
- pNext = SafePnextCopy(src.pNext);
-}
-
-safe_VkPhysicalDeviceSurfaceInfo2KHR& safe_VkPhysicalDeviceSurfaceInfo2KHR::operator=(const safe_VkPhysicalDeviceSurfaceInfo2KHR& src)
-{
- if (&src == this) return *this;
-
- if (pNext)
- FreePnextChain(pNext);
-
- sType = src.sType;
- surface = src.surface;
- pNext = SafePnextCopy(src.pNext);
-
- return *this;
-}
-
-safe_VkPhysicalDeviceSurfaceInfo2KHR::~safe_VkPhysicalDeviceSurfaceInfo2KHR()
-{
- if (pNext)
- FreePnextChain(pNext);
-}
-
-void safe_VkPhysicalDeviceSurfaceInfo2KHR::initialize(const VkPhysicalDeviceSurfaceInfo2KHR* in_struct)
-{
- sType = in_struct->sType;
- surface = in_struct->surface;
- pNext = SafePnextCopy(in_struct->pNext);
-}
-
-void safe_VkPhysicalDeviceSurfaceInfo2KHR::initialize(const safe_VkPhysicalDeviceSurfaceInfo2KHR* src)
-{
- sType = src->sType;
- surface = src->surface;
- pNext = SafePnextCopy(src->pNext);
-}
-
-safe_VkSurfaceCapabilities2KHR::safe_VkSurfaceCapabilities2KHR(const VkSurfaceCapabilities2KHR* in_struct) :
- sType(in_struct->sType),
- surfaceCapabilities(in_struct->surfaceCapabilities)
-{
- pNext = SafePnextCopy(in_struct->pNext);
-}
-
-safe_VkSurfaceCapabilities2KHR::safe_VkSurfaceCapabilities2KHR() :
- pNext(nullptr)
-{}
-
-safe_VkSurfaceCapabilities2KHR::safe_VkSurfaceCapabilities2KHR(const safe_VkSurfaceCapabilities2KHR& src)
-{
- sType = src.sType;
- surfaceCapabilities = src.surfaceCapabilities;
- pNext = SafePnextCopy(src.pNext);
-}
-
-safe_VkSurfaceCapabilities2KHR& safe_VkSurfaceCapabilities2KHR::operator=(const safe_VkSurfaceCapabilities2KHR& src)
-{
- if (&src == this) return *this;
-
- if (pNext)
- FreePnextChain(pNext);
-
- sType = src.sType;
- surfaceCapabilities = src.surfaceCapabilities;
- pNext = SafePnextCopy(src.pNext);
-
- return *this;
-}
-
-safe_VkSurfaceCapabilities2KHR::~safe_VkSurfaceCapabilities2KHR()
-{
- if (pNext)
- FreePnextChain(pNext);
-}
-
-void safe_VkSurfaceCapabilities2KHR::initialize(const VkSurfaceCapabilities2KHR* in_struct)
-{
- sType = in_struct->sType;
- surfaceCapabilities = in_struct->surfaceCapabilities;
- pNext = SafePnextCopy(in_struct->pNext);
-}
-
-void safe_VkSurfaceCapabilities2KHR::initialize(const safe_VkSurfaceCapabilities2KHR* src)
-{
- sType = src->sType;
- surfaceCapabilities = src->surfaceCapabilities;
- pNext = SafePnextCopy(src->pNext);
-}
-
-safe_VkSurfaceFormat2KHR::safe_VkSurfaceFormat2KHR(const VkSurfaceFormat2KHR* in_struct) :
- sType(in_struct->sType),
- surfaceFormat(in_struct->surfaceFormat)
-{
- pNext = SafePnextCopy(in_struct->pNext);
-}
-
-safe_VkSurfaceFormat2KHR::safe_VkSurfaceFormat2KHR() :
- pNext(nullptr)
-{}
-
-safe_VkSurfaceFormat2KHR::safe_VkSurfaceFormat2KHR(const safe_VkSurfaceFormat2KHR& src)
-{
- sType = src.sType;
- surfaceFormat = src.surfaceFormat;
- pNext = SafePnextCopy(src.pNext);
-}
-
-safe_VkSurfaceFormat2KHR& safe_VkSurfaceFormat2KHR::operator=(const safe_VkSurfaceFormat2KHR& src)
-{
- if (&src == this) return *this;
-
- if (pNext)
- FreePnextChain(pNext);
-
- sType = src.sType;
- surfaceFormat = src.surfaceFormat;
- pNext = SafePnextCopy(src.pNext);
-
- return *this;
-}
-
-safe_VkSurfaceFormat2KHR::~safe_VkSurfaceFormat2KHR()
-{
- if (pNext)
- FreePnextChain(pNext);
-}
-
-void safe_VkSurfaceFormat2KHR::initialize(const VkSurfaceFormat2KHR* in_struct)
-{
- sType = in_struct->sType;
- surfaceFormat = in_struct->surfaceFormat;
- pNext = SafePnextCopy(in_struct->pNext);
-}
-
-void safe_VkSurfaceFormat2KHR::initialize(const safe_VkSurfaceFormat2KHR* src)
-{
- sType = src->sType;
- surfaceFormat = src->surfaceFormat;
- pNext = SafePnextCopy(src->pNext);
-}
-
-safe_VkDisplayProperties2KHR::safe_VkDisplayProperties2KHR(const VkDisplayProperties2KHR* in_struct) :
- sType(in_struct->sType),
- displayProperties(&in_struct->displayProperties)
-{
- pNext = SafePnextCopy(in_struct->pNext);
-}
-
-safe_VkDisplayProperties2KHR::safe_VkDisplayProperties2KHR() :
- pNext(nullptr)
-{}
-
-safe_VkDisplayProperties2KHR::safe_VkDisplayProperties2KHR(const safe_VkDisplayProperties2KHR& src)
-{
- sType = src.sType;
- displayProperties.initialize(&src.displayProperties);
- pNext = SafePnextCopy(src.pNext);
-}
-
-safe_VkDisplayProperties2KHR& safe_VkDisplayProperties2KHR::operator=(const safe_VkDisplayProperties2KHR& src)
-{
- if (&src == this) return *this;
-
- if (pNext)
- FreePnextChain(pNext);
-
- sType = src.sType;
- displayProperties.initialize(&src.displayProperties);
- pNext = SafePnextCopy(src.pNext);
-
- return *this;
-}
-
-safe_VkDisplayProperties2KHR::~safe_VkDisplayProperties2KHR()
-{
- if (pNext)
- FreePnextChain(pNext);
-}
-
-void safe_VkDisplayProperties2KHR::initialize(const VkDisplayProperties2KHR* in_struct)
-{
- sType = in_struct->sType;
- displayProperties.initialize(&in_struct->displayProperties);
- pNext = SafePnextCopy(in_struct->pNext);
-}
-
-void safe_VkDisplayProperties2KHR::initialize(const safe_VkDisplayProperties2KHR* src)
-{
- sType = src->sType;
- displayProperties.initialize(&src->displayProperties);
- pNext = SafePnextCopy(src->pNext);
-}
-
-safe_VkDisplayPlaneProperties2KHR::safe_VkDisplayPlaneProperties2KHR(const VkDisplayPlaneProperties2KHR* in_struct) :
- sType(in_struct->sType),
- displayPlaneProperties(in_struct->displayPlaneProperties)
-{
- pNext = SafePnextCopy(in_struct->pNext);
-}
-
-safe_VkDisplayPlaneProperties2KHR::safe_VkDisplayPlaneProperties2KHR() :
- pNext(nullptr)
-{}
-
-safe_VkDisplayPlaneProperties2KHR::safe_VkDisplayPlaneProperties2KHR(const safe_VkDisplayPlaneProperties2KHR& src)
-{
- sType = src.sType;
- displayPlaneProperties = src.displayPlaneProperties;
- pNext = SafePnextCopy(src.pNext);
-}
-
-safe_VkDisplayPlaneProperties2KHR& safe_VkDisplayPlaneProperties2KHR::operator=(const safe_VkDisplayPlaneProperties2KHR& src)
-{
- if (&src == this) return *this;
-
- if (pNext)
- FreePnextChain(pNext);
-
- sType = src.sType;
- displayPlaneProperties = src.displayPlaneProperties;
- pNext = SafePnextCopy(src.pNext);
-
- return *this;
-}
-
-safe_VkDisplayPlaneProperties2KHR::~safe_VkDisplayPlaneProperties2KHR()
-{
- if (pNext)
- FreePnextChain(pNext);
-}
-
-void safe_VkDisplayPlaneProperties2KHR::initialize(const VkDisplayPlaneProperties2KHR* in_struct)
-{
- sType = in_struct->sType;
- displayPlaneProperties = in_struct->displayPlaneProperties;
- pNext = SafePnextCopy(in_struct->pNext);
-}
-
-void safe_VkDisplayPlaneProperties2KHR::initialize(const safe_VkDisplayPlaneProperties2KHR* src)
-{
- sType = src->sType;
- displayPlaneProperties = src->displayPlaneProperties;
- pNext = SafePnextCopy(src->pNext);
-}
-
-safe_VkDisplayModeProperties2KHR::safe_VkDisplayModeProperties2KHR(const VkDisplayModeProperties2KHR* in_struct) :
- sType(in_struct->sType),
- displayModeProperties(in_struct->displayModeProperties)
-{
- pNext = SafePnextCopy(in_struct->pNext);
-}
-
-safe_VkDisplayModeProperties2KHR::safe_VkDisplayModeProperties2KHR() :
- pNext(nullptr)
-{}
-
-safe_VkDisplayModeProperties2KHR::safe_VkDisplayModeProperties2KHR(const safe_VkDisplayModeProperties2KHR& src)
-{
- sType = src.sType;
- displayModeProperties = src.displayModeProperties;
- pNext = SafePnextCopy(src.pNext);
-}
-
-safe_VkDisplayModeProperties2KHR& safe_VkDisplayModeProperties2KHR::operator=(const safe_VkDisplayModeProperties2KHR& src)
-{
- if (&src == this) return *this;
-
- if (pNext)
- FreePnextChain(pNext);
-
- sType = src.sType;
- displayModeProperties = src.displayModeProperties;
- pNext = SafePnextCopy(src.pNext);
-
- return *this;
-}
-
-safe_VkDisplayModeProperties2KHR::~safe_VkDisplayModeProperties2KHR()
-{
- if (pNext)
- FreePnextChain(pNext);
-}
-
-void safe_VkDisplayModeProperties2KHR::initialize(const VkDisplayModeProperties2KHR* in_struct)
-{
- sType = in_struct->sType;
- displayModeProperties = in_struct->displayModeProperties;
- pNext = SafePnextCopy(in_struct->pNext);
-}
-
-void safe_VkDisplayModeProperties2KHR::initialize(const safe_VkDisplayModeProperties2KHR* src)
-{
- sType = src->sType;
- displayModeProperties = src->displayModeProperties;
- pNext = SafePnextCopy(src->pNext);
-}
-
-safe_VkDisplayPlaneInfo2KHR::safe_VkDisplayPlaneInfo2KHR(const VkDisplayPlaneInfo2KHR* in_struct) :
- sType(in_struct->sType),
- mode(in_struct->mode),
- planeIndex(in_struct->planeIndex)
-{
- pNext = SafePnextCopy(in_struct->pNext);
-}
-
-safe_VkDisplayPlaneInfo2KHR::safe_VkDisplayPlaneInfo2KHR() :
- pNext(nullptr)
-{}
-
-safe_VkDisplayPlaneInfo2KHR::safe_VkDisplayPlaneInfo2KHR(const safe_VkDisplayPlaneInfo2KHR& src)
-{
- sType = src.sType;
- mode = src.mode;
- planeIndex = src.planeIndex;
- pNext = SafePnextCopy(src.pNext);
-}
-
-safe_VkDisplayPlaneInfo2KHR& safe_VkDisplayPlaneInfo2KHR::operator=(const safe_VkDisplayPlaneInfo2KHR& src)
-{
- if (&src == this) return *this;
-
- if (pNext)
- FreePnextChain(pNext);
-
- sType = src.sType;
- mode = src.mode;
- planeIndex = src.planeIndex;
- pNext = SafePnextCopy(src.pNext);
-
- return *this;
-}
-
-safe_VkDisplayPlaneInfo2KHR::~safe_VkDisplayPlaneInfo2KHR()
-{
- if (pNext)
- FreePnextChain(pNext);
-}
-
-void safe_VkDisplayPlaneInfo2KHR::initialize(const VkDisplayPlaneInfo2KHR* in_struct)
-{
- sType = in_struct->sType;
- mode = in_struct->mode;
- planeIndex = in_struct->planeIndex;
- pNext = SafePnextCopy(in_struct->pNext);
-}
-
-void safe_VkDisplayPlaneInfo2KHR::initialize(const safe_VkDisplayPlaneInfo2KHR* src)
-{
- sType = src->sType;
- mode = src->mode;
- planeIndex = src->planeIndex;
- pNext = SafePnextCopy(src->pNext);
-}
-
-safe_VkDisplayPlaneCapabilities2KHR::safe_VkDisplayPlaneCapabilities2KHR(const VkDisplayPlaneCapabilities2KHR* in_struct) :
- sType(in_struct->sType),
- capabilities(in_struct->capabilities)
-{
- pNext = SafePnextCopy(in_struct->pNext);
-}
-
-safe_VkDisplayPlaneCapabilities2KHR::safe_VkDisplayPlaneCapabilities2KHR() :
- pNext(nullptr)
-{}
-
-safe_VkDisplayPlaneCapabilities2KHR::safe_VkDisplayPlaneCapabilities2KHR(const safe_VkDisplayPlaneCapabilities2KHR& src)
-{
- sType = src.sType;
- capabilities = src.capabilities;
- pNext = SafePnextCopy(src.pNext);
-}
-
-safe_VkDisplayPlaneCapabilities2KHR& safe_VkDisplayPlaneCapabilities2KHR::operator=(const safe_VkDisplayPlaneCapabilities2KHR& src)
-{
- if (&src == this) return *this;
-
- if (pNext)
- FreePnextChain(pNext);
-
- sType = src.sType;
- capabilities = src.capabilities;
- pNext = SafePnextCopy(src.pNext);
-
- return *this;
-}
-
-safe_VkDisplayPlaneCapabilities2KHR::~safe_VkDisplayPlaneCapabilities2KHR()
-{
- if (pNext)
- FreePnextChain(pNext);
-}
-
-void safe_VkDisplayPlaneCapabilities2KHR::initialize(const VkDisplayPlaneCapabilities2KHR* in_struct)
-{
- sType = in_struct->sType;
- capabilities = in_struct->capabilities;
- pNext = SafePnextCopy(in_struct->pNext);
-}
-
-void safe_VkDisplayPlaneCapabilities2KHR::initialize(const safe_VkDisplayPlaneCapabilities2KHR* src)
-{
- sType = src->sType;
- capabilities = src->capabilities;
- pNext = SafePnextCopy(src->pNext);
-}
-
-safe_VkImageFormatListCreateInfoKHR::safe_VkImageFormatListCreateInfoKHR(const VkImageFormatListCreateInfoKHR* in_struct) :
- sType(in_struct->sType),
- viewFormatCount(in_struct->viewFormatCount),
- pViewFormats(nullptr)
-{
- pNext = SafePnextCopy(in_struct->pNext);
- if (in_struct->pViewFormats) {
- pViewFormats = new VkFormat[in_struct->viewFormatCount];
- memcpy ((void *)pViewFormats, (void *)in_struct->pViewFormats, sizeof(VkFormat)*in_struct->viewFormatCount);
- }
-}
-
-safe_VkImageFormatListCreateInfoKHR::safe_VkImageFormatListCreateInfoKHR() :
- pNext(nullptr),
- pViewFormats(nullptr)
-{}
-
-safe_VkImageFormatListCreateInfoKHR::safe_VkImageFormatListCreateInfoKHR(const safe_VkImageFormatListCreateInfoKHR& src)
-{
- sType = src.sType;
- viewFormatCount = src.viewFormatCount;
- pViewFormats = nullptr;
- pNext = SafePnextCopy(src.pNext);
- if (src.pViewFormats) {
- pViewFormats = new VkFormat[src.viewFormatCount];
- memcpy ((void *)pViewFormats, (void *)src.pViewFormats, sizeof(VkFormat)*src.viewFormatCount);
- }
-}
-
-safe_VkImageFormatListCreateInfoKHR& safe_VkImageFormatListCreateInfoKHR::operator=(const safe_VkImageFormatListCreateInfoKHR& src)
-{
- if (&src == this) return *this;
-
- if (pViewFormats)
- delete[] pViewFormats;
- if (pNext)
- FreePnextChain(pNext);
-
- sType = src.sType;
- viewFormatCount = src.viewFormatCount;
- pViewFormats = nullptr;
- pNext = SafePnextCopy(src.pNext);
- if (src.pViewFormats) {
- pViewFormats = new VkFormat[src.viewFormatCount];
- memcpy ((void *)pViewFormats, (void *)src.pViewFormats, sizeof(VkFormat)*src.viewFormatCount);
- }
-
- return *this;
-}
-
-safe_VkImageFormatListCreateInfoKHR::~safe_VkImageFormatListCreateInfoKHR()
-{
- if (pViewFormats)
- delete[] pViewFormats;
- if (pNext)
- FreePnextChain(pNext);
-}
-
-void safe_VkImageFormatListCreateInfoKHR::initialize(const VkImageFormatListCreateInfoKHR* in_struct)
-{
- sType = in_struct->sType;
- viewFormatCount = in_struct->viewFormatCount;
- pViewFormats = nullptr;
- pNext = SafePnextCopy(in_struct->pNext);
- if (in_struct->pViewFormats) {
- pViewFormats = new VkFormat[in_struct->viewFormatCount];
- memcpy ((void *)pViewFormats, (void *)in_struct->pViewFormats, sizeof(VkFormat)*in_struct->viewFormatCount);
- }
-}
-
-void safe_VkImageFormatListCreateInfoKHR::initialize(const safe_VkImageFormatListCreateInfoKHR* src)
-{
- sType = src->sType;
- viewFormatCount = src->viewFormatCount;
- pViewFormats = nullptr;
- pNext = SafePnextCopy(src->pNext);
- if (src->pViewFormats) {
- pViewFormats = new VkFormat[src->viewFormatCount];
- memcpy ((void *)pViewFormats, (void *)src->pViewFormats, sizeof(VkFormat)*src->viewFormatCount);
- }
-}
-
-safe_VkPhysicalDevice8BitStorageFeaturesKHR::safe_VkPhysicalDevice8BitStorageFeaturesKHR(const VkPhysicalDevice8BitStorageFeaturesKHR* in_struct) :
- sType(in_struct->sType),
- storageBuffer8BitAccess(in_struct->storageBuffer8BitAccess),
- uniformAndStorageBuffer8BitAccess(in_struct->uniformAndStorageBuffer8BitAccess),
- storagePushConstant8(in_struct->storagePushConstant8)
-{
- pNext = SafePnextCopy(in_struct->pNext);
-}
-
-safe_VkPhysicalDevice8BitStorageFeaturesKHR::safe_VkPhysicalDevice8BitStorageFeaturesKHR() :
- pNext(nullptr)
-{}
-
-safe_VkPhysicalDevice8BitStorageFeaturesKHR::safe_VkPhysicalDevice8BitStorageFeaturesKHR(const safe_VkPhysicalDevice8BitStorageFeaturesKHR& src)
-{
- sType = src.sType;
- storageBuffer8BitAccess = src.storageBuffer8BitAccess;
- uniformAndStorageBuffer8BitAccess = src.uniformAndStorageBuffer8BitAccess;
- storagePushConstant8 = src.storagePushConstant8;
- pNext = SafePnextCopy(src.pNext);
-}
-
-safe_VkPhysicalDevice8BitStorageFeaturesKHR& safe_VkPhysicalDevice8BitStorageFeaturesKHR::operator=(const safe_VkPhysicalDevice8BitStorageFeaturesKHR& src)
-{
- if (&src == this) return *this;
-
- if (pNext)
- FreePnextChain(pNext);
-
- sType = src.sType;
- storageBuffer8BitAccess = src.storageBuffer8BitAccess;
- uniformAndStorageBuffer8BitAccess = src.uniformAndStorageBuffer8BitAccess;
- storagePushConstant8 = src.storagePushConstant8;
- pNext = SafePnextCopy(src.pNext);
-
- return *this;
-}
-
-safe_VkPhysicalDevice8BitStorageFeaturesKHR::~safe_VkPhysicalDevice8BitStorageFeaturesKHR()
-{
- if (pNext)
- FreePnextChain(pNext);
-}
-
-void safe_VkPhysicalDevice8BitStorageFeaturesKHR::initialize(const VkPhysicalDevice8BitStorageFeaturesKHR* in_struct)
-{
- sType = in_struct->sType;
- storageBuffer8BitAccess = in_struct->storageBuffer8BitAccess;
- uniformAndStorageBuffer8BitAccess = in_struct->uniformAndStorageBuffer8BitAccess;
- storagePushConstant8 = in_struct->storagePushConstant8;
- pNext = SafePnextCopy(in_struct->pNext);
-}
-
-void safe_VkPhysicalDevice8BitStorageFeaturesKHR::initialize(const safe_VkPhysicalDevice8BitStorageFeaturesKHR* src)
-{
- sType = src->sType;
- storageBuffer8BitAccess = src->storageBuffer8BitAccess;
- uniformAndStorageBuffer8BitAccess = src->uniformAndStorageBuffer8BitAccess;
- storagePushConstant8 = src->storagePushConstant8;
- pNext = SafePnextCopy(src->pNext);
-}
-
-safe_VkPhysicalDeviceShaderAtomicInt64FeaturesKHR::safe_VkPhysicalDeviceShaderAtomicInt64FeaturesKHR(const VkPhysicalDeviceShaderAtomicInt64FeaturesKHR* in_struct) :
- sType(in_struct->sType),
- shaderBufferInt64Atomics(in_struct->shaderBufferInt64Atomics),
- shaderSharedInt64Atomics(in_struct->shaderSharedInt64Atomics)
-{
- pNext = SafePnextCopy(in_struct->pNext);
-}
-
-safe_VkPhysicalDeviceShaderAtomicInt64FeaturesKHR::safe_VkPhysicalDeviceShaderAtomicInt64FeaturesKHR() :
- pNext(nullptr)
-{}
-
-safe_VkPhysicalDeviceShaderAtomicInt64FeaturesKHR::safe_VkPhysicalDeviceShaderAtomicInt64FeaturesKHR(const safe_VkPhysicalDeviceShaderAtomicInt64FeaturesKHR& src)
-{
- sType = src.sType;
- shaderBufferInt64Atomics = src.shaderBufferInt64Atomics;
- shaderSharedInt64Atomics = src.shaderSharedInt64Atomics;
- pNext = SafePnextCopy(src.pNext);
-}
-
-safe_VkPhysicalDeviceShaderAtomicInt64FeaturesKHR& safe_VkPhysicalDeviceShaderAtomicInt64FeaturesKHR::operator=(const safe_VkPhysicalDeviceShaderAtomicInt64FeaturesKHR& src)
-{
- if (&src == this) return *this;
-
- if (pNext)
- FreePnextChain(pNext);
-
- sType = src.sType;
- shaderBufferInt64Atomics = src.shaderBufferInt64Atomics;
- shaderSharedInt64Atomics = src.shaderSharedInt64Atomics;
- pNext = SafePnextCopy(src.pNext);
-
- return *this;
-}
-
-safe_VkPhysicalDeviceShaderAtomicInt64FeaturesKHR::~safe_VkPhysicalDeviceShaderAtomicInt64FeaturesKHR()
-{
- if (pNext)
- FreePnextChain(pNext);
-}
-
-void safe_VkPhysicalDeviceShaderAtomicInt64FeaturesKHR::initialize(const VkPhysicalDeviceShaderAtomicInt64FeaturesKHR* in_struct)
-{
- sType = in_struct->sType;
- shaderBufferInt64Atomics = in_struct->shaderBufferInt64Atomics;
- shaderSharedInt64Atomics = in_struct->shaderSharedInt64Atomics;
- pNext = SafePnextCopy(in_struct->pNext);
-}
-
-void safe_VkPhysicalDeviceShaderAtomicInt64FeaturesKHR::initialize(const safe_VkPhysicalDeviceShaderAtomicInt64FeaturesKHR* src)
-{
- sType = src->sType;
- shaderBufferInt64Atomics = src->shaderBufferInt64Atomics;
- shaderSharedInt64Atomics = src->shaderSharedInt64Atomics;
- pNext = SafePnextCopy(src->pNext);
-}
-
-safe_VkPhysicalDeviceDriverPropertiesKHR::safe_VkPhysicalDeviceDriverPropertiesKHR(const VkPhysicalDeviceDriverPropertiesKHR* in_struct) :
- sType(in_struct->sType),
- driverID(in_struct->driverID),
- conformanceVersion(in_struct->conformanceVersion)
-{
- pNext = SafePnextCopy(in_struct->pNext);
- for (uint32_t i = 0; i < VK_MAX_DRIVER_NAME_SIZE_KHR; ++i) {
- driverName[i] = in_struct->driverName[i];
- }
- for (uint32_t i = 0; i < VK_MAX_DRIVER_INFO_SIZE_KHR; ++i) {
- driverInfo[i] = in_struct->driverInfo[i];
- }
-}
-
-safe_VkPhysicalDeviceDriverPropertiesKHR::safe_VkPhysicalDeviceDriverPropertiesKHR() :
- pNext(nullptr)
-{}
-
-safe_VkPhysicalDeviceDriverPropertiesKHR::safe_VkPhysicalDeviceDriverPropertiesKHR(const safe_VkPhysicalDeviceDriverPropertiesKHR& src)
-{
- sType = src.sType;
- driverID = src.driverID;
- conformanceVersion = src.conformanceVersion;
- pNext = SafePnextCopy(src.pNext);
- for (uint32_t i = 0; i < VK_MAX_DRIVER_NAME_SIZE_KHR; ++i) {
- driverName[i] = src.driverName[i];
- }
- for (uint32_t i = 0; i < VK_MAX_DRIVER_INFO_SIZE_KHR; ++i) {
- driverInfo[i] = src.driverInfo[i];
- }
-}
-
-safe_VkPhysicalDeviceDriverPropertiesKHR& safe_VkPhysicalDeviceDriverPropertiesKHR::operator=(const safe_VkPhysicalDeviceDriverPropertiesKHR& src)
-{
- if (&src == this) return *this;
-
- if (pNext)
- FreePnextChain(pNext);
-
- sType = src.sType;
- driverID = src.driverID;
- conformanceVersion = src.conformanceVersion;
- pNext = SafePnextCopy(src.pNext);
- for (uint32_t i = 0; i < VK_MAX_DRIVER_NAME_SIZE_KHR; ++i) {
- driverName[i] = src.driverName[i];
- }
- for (uint32_t i = 0; i < VK_MAX_DRIVER_INFO_SIZE_KHR; ++i) {
- driverInfo[i] = src.driverInfo[i];
- }
-
- return *this;
-}
-
-safe_VkPhysicalDeviceDriverPropertiesKHR::~safe_VkPhysicalDeviceDriverPropertiesKHR()
-{
- if (pNext)
- FreePnextChain(pNext);
-}
-
-void safe_VkPhysicalDeviceDriverPropertiesKHR::initialize(const VkPhysicalDeviceDriverPropertiesKHR* in_struct)
-{
- sType = in_struct->sType;
- driverID = in_struct->driverID;
- conformanceVersion = in_struct->conformanceVersion;
- pNext = SafePnextCopy(in_struct->pNext);
- for (uint32_t i = 0; i < VK_MAX_DRIVER_NAME_SIZE_KHR; ++i) {
- driverName[i] = in_struct->driverName[i];
- }
- for (uint32_t i = 0; i < VK_MAX_DRIVER_INFO_SIZE_KHR; ++i) {
- driverInfo[i] = in_struct->driverInfo[i];
- }
-}
-
-void safe_VkPhysicalDeviceDriverPropertiesKHR::initialize(const safe_VkPhysicalDeviceDriverPropertiesKHR* src)
-{
- sType = src->sType;
- driverID = src->driverID;
- conformanceVersion = src->conformanceVersion;
- pNext = SafePnextCopy(src->pNext);
- for (uint32_t i = 0; i < VK_MAX_DRIVER_NAME_SIZE_KHR; ++i) {
- driverName[i] = src->driverName[i];
- }
- for (uint32_t i = 0; i < VK_MAX_DRIVER_INFO_SIZE_KHR; ++i) {
- driverInfo[i] = src->driverInfo[i];
- }
-}
-
-safe_VkPhysicalDeviceFloatControlsPropertiesKHR::safe_VkPhysicalDeviceFloatControlsPropertiesKHR(const VkPhysicalDeviceFloatControlsPropertiesKHR* in_struct) :
- sType(in_struct->sType),
- denormBehaviorIndependence(in_struct->denormBehaviorIndependence),
- roundingModeIndependence(in_struct->roundingModeIndependence),
- shaderSignedZeroInfNanPreserveFloat16(in_struct->shaderSignedZeroInfNanPreserveFloat16),
- shaderSignedZeroInfNanPreserveFloat32(in_struct->shaderSignedZeroInfNanPreserveFloat32),
- shaderSignedZeroInfNanPreserveFloat64(in_struct->shaderSignedZeroInfNanPreserveFloat64),
- shaderDenormPreserveFloat16(in_struct->shaderDenormPreserveFloat16),
- shaderDenormPreserveFloat32(in_struct->shaderDenormPreserveFloat32),
- shaderDenormPreserveFloat64(in_struct->shaderDenormPreserveFloat64),
- shaderDenormFlushToZeroFloat16(in_struct->shaderDenormFlushToZeroFloat16),
- shaderDenormFlushToZeroFloat32(in_struct->shaderDenormFlushToZeroFloat32),
- shaderDenormFlushToZeroFloat64(in_struct->shaderDenormFlushToZeroFloat64),
- shaderRoundingModeRTEFloat16(in_struct->shaderRoundingModeRTEFloat16),
- shaderRoundingModeRTEFloat32(in_struct->shaderRoundingModeRTEFloat32),
- shaderRoundingModeRTEFloat64(in_struct->shaderRoundingModeRTEFloat64),
- shaderRoundingModeRTZFloat16(in_struct->shaderRoundingModeRTZFloat16),
- shaderRoundingModeRTZFloat32(in_struct->shaderRoundingModeRTZFloat32),
- shaderRoundingModeRTZFloat64(in_struct->shaderRoundingModeRTZFloat64)
-{
- pNext = SafePnextCopy(in_struct->pNext);
-}
-
-safe_VkPhysicalDeviceFloatControlsPropertiesKHR::safe_VkPhysicalDeviceFloatControlsPropertiesKHR() :
- pNext(nullptr)
-{}
-
-safe_VkPhysicalDeviceFloatControlsPropertiesKHR::safe_VkPhysicalDeviceFloatControlsPropertiesKHR(const safe_VkPhysicalDeviceFloatControlsPropertiesKHR& src)
-{
- sType = src.sType;
- denormBehaviorIndependence = src.denormBehaviorIndependence;
- roundingModeIndependence = src.roundingModeIndependence;
- shaderSignedZeroInfNanPreserveFloat16 = src.shaderSignedZeroInfNanPreserveFloat16;
- shaderSignedZeroInfNanPreserveFloat32 = src.shaderSignedZeroInfNanPreserveFloat32;
- shaderSignedZeroInfNanPreserveFloat64 = src.shaderSignedZeroInfNanPreserveFloat64;
- shaderDenormPreserveFloat16 = src.shaderDenormPreserveFloat16;
- shaderDenormPreserveFloat32 = src.shaderDenormPreserveFloat32;
- shaderDenormPreserveFloat64 = src.shaderDenormPreserveFloat64;
- shaderDenormFlushToZeroFloat16 = src.shaderDenormFlushToZeroFloat16;
- shaderDenormFlushToZeroFloat32 = src.shaderDenormFlushToZeroFloat32;
- shaderDenormFlushToZeroFloat64 = src.shaderDenormFlushToZeroFloat64;
- shaderRoundingModeRTEFloat16 = src.shaderRoundingModeRTEFloat16;
- shaderRoundingModeRTEFloat32 = src.shaderRoundingModeRTEFloat32;
- shaderRoundingModeRTEFloat64 = src.shaderRoundingModeRTEFloat64;
- shaderRoundingModeRTZFloat16 = src.shaderRoundingModeRTZFloat16;
- shaderRoundingModeRTZFloat32 = src.shaderRoundingModeRTZFloat32;
- shaderRoundingModeRTZFloat64 = src.shaderRoundingModeRTZFloat64;
- pNext = SafePnextCopy(src.pNext);
-}
-
-safe_VkPhysicalDeviceFloatControlsPropertiesKHR& safe_VkPhysicalDeviceFloatControlsPropertiesKHR::operator=(const safe_VkPhysicalDeviceFloatControlsPropertiesKHR& src)
-{
- if (&src == this) return *this;
-
- if (pNext)
- FreePnextChain(pNext);
-
- sType = src.sType;
- denormBehaviorIndependence = src.denormBehaviorIndependence;
- roundingModeIndependence = src.roundingModeIndependence;
- shaderSignedZeroInfNanPreserveFloat16 = src.shaderSignedZeroInfNanPreserveFloat16;
- shaderSignedZeroInfNanPreserveFloat32 = src.shaderSignedZeroInfNanPreserveFloat32;
- shaderSignedZeroInfNanPreserveFloat64 = src.shaderSignedZeroInfNanPreserveFloat64;
- shaderDenormPreserveFloat16 = src.shaderDenormPreserveFloat16;
- shaderDenormPreserveFloat32 = src.shaderDenormPreserveFloat32;
- shaderDenormPreserveFloat64 = src.shaderDenormPreserveFloat64;
- shaderDenormFlushToZeroFloat16 = src.shaderDenormFlushToZeroFloat16;
- shaderDenormFlushToZeroFloat32 = src.shaderDenormFlushToZeroFloat32;
- shaderDenormFlushToZeroFloat64 = src.shaderDenormFlushToZeroFloat64;
- shaderRoundingModeRTEFloat16 = src.shaderRoundingModeRTEFloat16;
- shaderRoundingModeRTEFloat32 = src.shaderRoundingModeRTEFloat32;
- shaderRoundingModeRTEFloat64 = src.shaderRoundingModeRTEFloat64;
- shaderRoundingModeRTZFloat16 = src.shaderRoundingModeRTZFloat16;
- shaderRoundingModeRTZFloat32 = src.shaderRoundingModeRTZFloat32;
- shaderRoundingModeRTZFloat64 = src.shaderRoundingModeRTZFloat64;
- pNext = SafePnextCopy(src.pNext);
-
- return *this;
-}
-
-safe_VkPhysicalDeviceFloatControlsPropertiesKHR::~safe_VkPhysicalDeviceFloatControlsPropertiesKHR()
-{
- if (pNext)
- FreePnextChain(pNext);
-}
-
-void safe_VkPhysicalDeviceFloatControlsPropertiesKHR::initialize(const VkPhysicalDeviceFloatControlsPropertiesKHR* in_struct)
-{
- sType = in_struct->sType;
- denormBehaviorIndependence = in_struct->denormBehaviorIndependence;
- roundingModeIndependence = in_struct->roundingModeIndependence;
- shaderSignedZeroInfNanPreserveFloat16 = in_struct->shaderSignedZeroInfNanPreserveFloat16;
- shaderSignedZeroInfNanPreserveFloat32 = in_struct->shaderSignedZeroInfNanPreserveFloat32;
- shaderSignedZeroInfNanPreserveFloat64 = in_struct->shaderSignedZeroInfNanPreserveFloat64;
- shaderDenormPreserveFloat16 = in_struct->shaderDenormPreserveFloat16;
- shaderDenormPreserveFloat32 = in_struct->shaderDenormPreserveFloat32;
- shaderDenormPreserveFloat64 = in_struct->shaderDenormPreserveFloat64;
- shaderDenormFlushToZeroFloat16 = in_struct->shaderDenormFlushToZeroFloat16;
- shaderDenormFlushToZeroFloat32 = in_struct->shaderDenormFlushToZeroFloat32;
- shaderDenormFlushToZeroFloat64 = in_struct->shaderDenormFlushToZeroFloat64;
- shaderRoundingModeRTEFloat16 = in_struct->shaderRoundingModeRTEFloat16;
- shaderRoundingModeRTEFloat32 = in_struct->shaderRoundingModeRTEFloat32;
- shaderRoundingModeRTEFloat64 = in_struct->shaderRoundingModeRTEFloat64;
- shaderRoundingModeRTZFloat16 = in_struct->shaderRoundingModeRTZFloat16;
- shaderRoundingModeRTZFloat32 = in_struct->shaderRoundingModeRTZFloat32;
- shaderRoundingModeRTZFloat64 = in_struct->shaderRoundingModeRTZFloat64;
- pNext = SafePnextCopy(in_struct->pNext);
-}
-
-void safe_VkPhysicalDeviceFloatControlsPropertiesKHR::initialize(const safe_VkPhysicalDeviceFloatControlsPropertiesKHR* src)
-{
- sType = src->sType;
- denormBehaviorIndependence = src->denormBehaviorIndependence;
- roundingModeIndependence = src->roundingModeIndependence;
- shaderSignedZeroInfNanPreserveFloat16 = src->shaderSignedZeroInfNanPreserveFloat16;
- shaderSignedZeroInfNanPreserveFloat32 = src->shaderSignedZeroInfNanPreserveFloat32;
- shaderSignedZeroInfNanPreserveFloat64 = src->shaderSignedZeroInfNanPreserveFloat64;
- shaderDenormPreserveFloat16 = src->shaderDenormPreserveFloat16;
- shaderDenormPreserveFloat32 = src->shaderDenormPreserveFloat32;
- shaderDenormPreserveFloat64 = src->shaderDenormPreserveFloat64;
- shaderDenormFlushToZeroFloat16 = src->shaderDenormFlushToZeroFloat16;
- shaderDenormFlushToZeroFloat32 = src->shaderDenormFlushToZeroFloat32;
- shaderDenormFlushToZeroFloat64 = src->shaderDenormFlushToZeroFloat64;
- shaderRoundingModeRTEFloat16 = src->shaderRoundingModeRTEFloat16;
- shaderRoundingModeRTEFloat32 = src->shaderRoundingModeRTEFloat32;
- shaderRoundingModeRTEFloat64 = src->shaderRoundingModeRTEFloat64;
- shaderRoundingModeRTZFloat16 = src->shaderRoundingModeRTZFloat16;
- shaderRoundingModeRTZFloat32 = src->shaderRoundingModeRTZFloat32;
- shaderRoundingModeRTZFloat64 = src->shaderRoundingModeRTZFloat64;
- pNext = SafePnextCopy(src->pNext);
-}
-
-safe_VkSubpassDescriptionDepthStencilResolveKHR::safe_VkSubpassDescriptionDepthStencilResolveKHR(const VkSubpassDescriptionDepthStencilResolveKHR* in_struct) :
- sType(in_struct->sType),
- depthResolveMode(in_struct->depthResolveMode),
- stencilResolveMode(in_struct->stencilResolveMode),
- pDepthStencilResolveAttachment(nullptr)
-{
- pNext = SafePnextCopy(in_struct->pNext);
- if (in_struct->pDepthStencilResolveAttachment)
- pDepthStencilResolveAttachment = new safe_VkAttachmentReference2KHR(in_struct->pDepthStencilResolveAttachment);
-}
-
-safe_VkSubpassDescriptionDepthStencilResolveKHR::safe_VkSubpassDescriptionDepthStencilResolveKHR() :
- pNext(nullptr),
- pDepthStencilResolveAttachment(nullptr)
-{}
-
-safe_VkSubpassDescriptionDepthStencilResolveKHR::safe_VkSubpassDescriptionDepthStencilResolveKHR(const safe_VkSubpassDescriptionDepthStencilResolveKHR& src)
-{
- sType = src.sType;
- depthResolveMode = src.depthResolveMode;
- stencilResolveMode = src.stencilResolveMode;
- pDepthStencilResolveAttachment = nullptr;
- pNext = SafePnextCopy(src.pNext);
- if (src.pDepthStencilResolveAttachment)
- pDepthStencilResolveAttachment = new safe_VkAttachmentReference2KHR(*src.pDepthStencilResolveAttachment);
-}
-
-safe_VkSubpassDescriptionDepthStencilResolveKHR& safe_VkSubpassDescriptionDepthStencilResolveKHR::operator=(const safe_VkSubpassDescriptionDepthStencilResolveKHR& src)
-{
- if (&src == this) return *this;
-
- if (pDepthStencilResolveAttachment)
- delete pDepthStencilResolveAttachment;
- if (pNext)
- FreePnextChain(pNext);
-
- sType = src.sType;
- depthResolveMode = src.depthResolveMode;
- stencilResolveMode = src.stencilResolveMode;
- pDepthStencilResolveAttachment = nullptr;
- pNext = SafePnextCopy(src.pNext);
- if (src.pDepthStencilResolveAttachment)
- pDepthStencilResolveAttachment = new safe_VkAttachmentReference2KHR(*src.pDepthStencilResolveAttachment);
-
- return *this;
-}
-
-safe_VkSubpassDescriptionDepthStencilResolveKHR::~safe_VkSubpassDescriptionDepthStencilResolveKHR()
-{
- if (pDepthStencilResolveAttachment)
- delete pDepthStencilResolveAttachment;
- if (pNext)
- FreePnextChain(pNext);
-}
-
-void safe_VkSubpassDescriptionDepthStencilResolveKHR::initialize(const VkSubpassDescriptionDepthStencilResolveKHR* in_struct)
-{
- sType = in_struct->sType;
- depthResolveMode = in_struct->depthResolveMode;
- stencilResolveMode = in_struct->stencilResolveMode;
- pDepthStencilResolveAttachment = nullptr;
- pNext = SafePnextCopy(in_struct->pNext);
- if (in_struct->pDepthStencilResolveAttachment)
- pDepthStencilResolveAttachment = new safe_VkAttachmentReference2KHR(in_struct->pDepthStencilResolveAttachment);
-}
-
-void safe_VkSubpassDescriptionDepthStencilResolveKHR::initialize(const safe_VkSubpassDescriptionDepthStencilResolveKHR* src)
-{
- sType = src->sType;
- depthResolveMode = src->depthResolveMode;
- stencilResolveMode = src->stencilResolveMode;
- pDepthStencilResolveAttachment = nullptr;
- pNext = SafePnextCopy(src->pNext);
- if (src->pDepthStencilResolveAttachment)
- pDepthStencilResolveAttachment = new safe_VkAttachmentReference2KHR(*src->pDepthStencilResolveAttachment);
-}
-
-safe_VkPhysicalDeviceDepthStencilResolvePropertiesKHR::safe_VkPhysicalDeviceDepthStencilResolvePropertiesKHR(const VkPhysicalDeviceDepthStencilResolvePropertiesKHR* in_struct) :
- sType(in_struct->sType),
- supportedDepthResolveModes(in_struct->supportedDepthResolveModes),
- supportedStencilResolveModes(in_struct->supportedStencilResolveModes),
- independentResolveNone(in_struct->independentResolveNone),
- independentResolve(in_struct->independentResolve)
-{
- pNext = SafePnextCopy(in_struct->pNext);
-}
-
-safe_VkPhysicalDeviceDepthStencilResolvePropertiesKHR::safe_VkPhysicalDeviceDepthStencilResolvePropertiesKHR() :
- pNext(nullptr)
-{}
-
-safe_VkPhysicalDeviceDepthStencilResolvePropertiesKHR::safe_VkPhysicalDeviceDepthStencilResolvePropertiesKHR(const safe_VkPhysicalDeviceDepthStencilResolvePropertiesKHR& src)
-{
- sType = src.sType;
- supportedDepthResolveModes = src.supportedDepthResolveModes;
- supportedStencilResolveModes = src.supportedStencilResolveModes;
- independentResolveNone = src.independentResolveNone;
- independentResolve = src.independentResolve;
- pNext = SafePnextCopy(src.pNext);
-}
-
-safe_VkPhysicalDeviceDepthStencilResolvePropertiesKHR& safe_VkPhysicalDeviceDepthStencilResolvePropertiesKHR::operator=(const safe_VkPhysicalDeviceDepthStencilResolvePropertiesKHR& src)
-{
- if (&src == this) return *this;
-
- if (pNext)
- FreePnextChain(pNext);
-
- sType = src.sType;
- supportedDepthResolveModes = src.supportedDepthResolveModes;
- supportedStencilResolveModes = src.supportedStencilResolveModes;
- independentResolveNone = src.independentResolveNone;
- independentResolve = src.independentResolve;
- pNext = SafePnextCopy(src.pNext);
-
- return *this;
-}
-
-safe_VkPhysicalDeviceDepthStencilResolvePropertiesKHR::~safe_VkPhysicalDeviceDepthStencilResolvePropertiesKHR()
-{
- if (pNext)
- FreePnextChain(pNext);
-}
-
-void safe_VkPhysicalDeviceDepthStencilResolvePropertiesKHR::initialize(const VkPhysicalDeviceDepthStencilResolvePropertiesKHR* in_struct)
-{
- sType = in_struct->sType;
- supportedDepthResolveModes = in_struct->supportedDepthResolveModes;
- supportedStencilResolveModes = in_struct->supportedStencilResolveModes;
- independentResolveNone = in_struct->independentResolveNone;
- independentResolve = in_struct->independentResolve;
- pNext = SafePnextCopy(in_struct->pNext);
-}
-
-void safe_VkPhysicalDeviceDepthStencilResolvePropertiesKHR::initialize(const safe_VkPhysicalDeviceDepthStencilResolvePropertiesKHR* src)
-{
- sType = src->sType;
- supportedDepthResolveModes = src->supportedDepthResolveModes;
- supportedStencilResolveModes = src->supportedStencilResolveModes;
- independentResolveNone = src->independentResolveNone;
- independentResolve = src->independentResolve;
- pNext = SafePnextCopy(src->pNext);
-}
-
-safe_VkPhysicalDeviceVulkanMemoryModelFeaturesKHR::safe_VkPhysicalDeviceVulkanMemoryModelFeaturesKHR(const VkPhysicalDeviceVulkanMemoryModelFeaturesKHR* in_struct) :
- sType(in_struct->sType),
- vulkanMemoryModel(in_struct->vulkanMemoryModel),
- vulkanMemoryModelDeviceScope(in_struct->vulkanMemoryModelDeviceScope),
- vulkanMemoryModelAvailabilityVisibilityChains(in_struct->vulkanMemoryModelAvailabilityVisibilityChains)
-{
- pNext = SafePnextCopy(in_struct->pNext);
-}
-
-safe_VkPhysicalDeviceVulkanMemoryModelFeaturesKHR::safe_VkPhysicalDeviceVulkanMemoryModelFeaturesKHR() :
- pNext(nullptr)
-{}
-
-safe_VkPhysicalDeviceVulkanMemoryModelFeaturesKHR::safe_VkPhysicalDeviceVulkanMemoryModelFeaturesKHR(const safe_VkPhysicalDeviceVulkanMemoryModelFeaturesKHR& src)
-{
- sType = src.sType;
- vulkanMemoryModel = src.vulkanMemoryModel;
- vulkanMemoryModelDeviceScope = src.vulkanMemoryModelDeviceScope;
- vulkanMemoryModelAvailabilityVisibilityChains = src.vulkanMemoryModelAvailabilityVisibilityChains;
- pNext = SafePnextCopy(src.pNext);
-}
-
-safe_VkPhysicalDeviceVulkanMemoryModelFeaturesKHR& safe_VkPhysicalDeviceVulkanMemoryModelFeaturesKHR::operator=(const safe_VkPhysicalDeviceVulkanMemoryModelFeaturesKHR& src)
-{
- if (&src == this) return *this;
-
- if (pNext)
- FreePnextChain(pNext);
-
- sType = src.sType;
- vulkanMemoryModel = src.vulkanMemoryModel;
- vulkanMemoryModelDeviceScope = src.vulkanMemoryModelDeviceScope;
- vulkanMemoryModelAvailabilityVisibilityChains = src.vulkanMemoryModelAvailabilityVisibilityChains;
- pNext = SafePnextCopy(src.pNext);
-
- return *this;
-}
-
-safe_VkPhysicalDeviceVulkanMemoryModelFeaturesKHR::~safe_VkPhysicalDeviceVulkanMemoryModelFeaturesKHR()
-{
- if (pNext)
- FreePnextChain(pNext);
-}
-
-void safe_VkPhysicalDeviceVulkanMemoryModelFeaturesKHR::initialize(const VkPhysicalDeviceVulkanMemoryModelFeaturesKHR* in_struct)
-{
- sType = in_struct->sType;
- vulkanMemoryModel = in_struct->vulkanMemoryModel;
- vulkanMemoryModelDeviceScope = in_struct->vulkanMemoryModelDeviceScope;
- vulkanMemoryModelAvailabilityVisibilityChains = in_struct->vulkanMemoryModelAvailabilityVisibilityChains;
- pNext = SafePnextCopy(in_struct->pNext);
-}
-
-void safe_VkPhysicalDeviceVulkanMemoryModelFeaturesKHR::initialize(const safe_VkPhysicalDeviceVulkanMemoryModelFeaturesKHR* src)
-{
- sType = src->sType;
- vulkanMemoryModel = src->vulkanMemoryModel;
- vulkanMemoryModelDeviceScope = src->vulkanMemoryModelDeviceScope;
- vulkanMemoryModelAvailabilityVisibilityChains = src->vulkanMemoryModelAvailabilityVisibilityChains;
- pNext = SafePnextCopy(src->pNext);
-}
-
-safe_VkSurfaceProtectedCapabilitiesKHR::safe_VkSurfaceProtectedCapabilitiesKHR(const VkSurfaceProtectedCapabilitiesKHR* in_struct) :
- sType(in_struct->sType),
- supportsProtected(in_struct->supportsProtected)
-{
- pNext = SafePnextCopy(in_struct->pNext);
-}
-
-safe_VkSurfaceProtectedCapabilitiesKHR::safe_VkSurfaceProtectedCapabilitiesKHR() :
- pNext(nullptr)
-{}
-
-safe_VkSurfaceProtectedCapabilitiesKHR::safe_VkSurfaceProtectedCapabilitiesKHR(const safe_VkSurfaceProtectedCapabilitiesKHR& src)
-{
- sType = src.sType;
- supportsProtected = src.supportsProtected;
- pNext = SafePnextCopy(src.pNext);
-}
-
-safe_VkSurfaceProtectedCapabilitiesKHR& safe_VkSurfaceProtectedCapabilitiesKHR::operator=(const safe_VkSurfaceProtectedCapabilitiesKHR& src)
-{
- if (&src == this) return *this;
-
- if (pNext)
- FreePnextChain(pNext);
-
- sType = src.sType;
- supportsProtected = src.supportsProtected;
- pNext = SafePnextCopy(src.pNext);
-
- return *this;
-}
-
-safe_VkSurfaceProtectedCapabilitiesKHR::~safe_VkSurfaceProtectedCapabilitiesKHR()
-{
- if (pNext)
- FreePnextChain(pNext);
-}
-
-void safe_VkSurfaceProtectedCapabilitiesKHR::initialize(const VkSurfaceProtectedCapabilitiesKHR* in_struct)
-{
- sType = in_struct->sType;
- supportsProtected = in_struct->supportsProtected;
- pNext = SafePnextCopy(in_struct->pNext);
-}
-
-void safe_VkSurfaceProtectedCapabilitiesKHR::initialize(const safe_VkSurfaceProtectedCapabilitiesKHR* src)
-{
- sType = src->sType;
- supportsProtected = src->supportsProtected;
- pNext = SafePnextCopy(src->pNext);
-}
-
-safe_VkPhysicalDeviceUniformBufferStandardLayoutFeaturesKHR::safe_VkPhysicalDeviceUniformBufferStandardLayoutFeaturesKHR(const VkPhysicalDeviceUniformBufferStandardLayoutFeaturesKHR* in_struct) :
- sType(in_struct->sType),
- uniformBufferStandardLayout(in_struct->uniformBufferStandardLayout)
-{
- pNext = SafePnextCopy(in_struct->pNext);
-}
-
-safe_VkPhysicalDeviceUniformBufferStandardLayoutFeaturesKHR::safe_VkPhysicalDeviceUniformBufferStandardLayoutFeaturesKHR() :
- pNext(nullptr)
-{}
-
-safe_VkPhysicalDeviceUniformBufferStandardLayoutFeaturesKHR::safe_VkPhysicalDeviceUniformBufferStandardLayoutFeaturesKHR(const safe_VkPhysicalDeviceUniformBufferStandardLayoutFeaturesKHR& src)
-{
- sType = src.sType;
- uniformBufferStandardLayout = src.uniformBufferStandardLayout;
- pNext = SafePnextCopy(src.pNext);
-}
-
-safe_VkPhysicalDeviceUniformBufferStandardLayoutFeaturesKHR& safe_VkPhysicalDeviceUniformBufferStandardLayoutFeaturesKHR::operator=(const safe_VkPhysicalDeviceUniformBufferStandardLayoutFeaturesKHR& src)
-{
- if (&src == this) return *this;
-
- if (pNext)
- FreePnextChain(pNext);
-
- sType = src.sType;
- uniformBufferStandardLayout = src.uniformBufferStandardLayout;
- pNext = SafePnextCopy(src.pNext);
-
- return *this;
-}
-
-safe_VkPhysicalDeviceUniformBufferStandardLayoutFeaturesKHR::~safe_VkPhysicalDeviceUniformBufferStandardLayoutFeaturesKHR()
-{
- if (pNext)
- FreePnextChain(pNext);
-}
-
-void safe_VkPhysicalDeviceUniformBufferStandardLayoutFeaturesKHR::initialize(const VkPhysicalDeviceUniformBufferStandardLayoutFeaturesKHR* in_struct)
-{
- sType = in_struct->sType;
- uniformBufferStandardLayout = in_struct->uniformBufferStandardLayout;
- pNext = SafePnextCopy(in_struct->pNext);
-}
-
-void safe_VkPhysicalDeviceUniformBufferStandardLayoutFeaturesKHR::initialize(const safe_VkPhysicalDeviceUniformBufferStandardLayoutFeaturesKHR* src)
-{
- sType = src->sType;
- uniformBufferStandardLayout = src->uniformBufferStandardLayout;
- pNext = SafePnextCopy(src->pNext);
-}
-
-safe_VkPhysicalDevicePipelineExecutablePropertiesFeaturesKHR::safe_VkPhysicalDevicePipelineExecutablePropertiesFeaturesKHR(const VkPhysicalDevicePipelineExecutablePropertiesFeaturesKHR* in_struct) :
- sType(in_struct->sType),
- pipelineExecutableInfo(in_struct->pipelineExecutableInfo)
-{
- pNext = SafePnextCopy(in_struct->pNext);
-}
-
-safe_VkPhysicalDevicePipelineExecutablePropertiesFeaturesKHR::safe_VkPhysicalDevicePipelineExecutablePropertiesFeaturesKHR() :
- pNext(nullptr)
-{}
-
-safe_VkPhysicalDevicePipelineExecutablePropertiesFeaturesKHR::safe_VkPhysicalDevicePipelineExecutablePropertiesFeaturesKHR(const safe_VkPhysicalDevicePipelineExecutablePropertiesFeaturesKHR& src)
-{
- sType = src.sType;
- pipelineExecutableInfo = src.pipelineExecutableInfo;
- pNext = SafePnextCopy(src.pNext);
-}
-
-safe_VkPhysicalDevicePipelineExecutablePropertiesFeaturesKHR& safe_VkPhysicalDevicePipelineExecutablePropertiesFeaturesKHR::operator=(const safe_VkPhysicalDevicePipelineExecutablePropertiesFeaturesKHR& src)
-{
- if (&src == this) return *this;
-
- if (pNext)
- FreePnextChain(pNext);
-
- sType = src.sType;
- pipelineExecutableInfo = src.pipelineExecutableInfo;
- pNext = SafePnextCopy(src.pNext);
-
- return *this;
-}
-
-safe_VkPhysicalDevicePipelineExecutablePropertiesFeaturesKHR::~safe_VkPhysicalDevicePipelineExecutablePropertiesFeaturesKHR()
-{
- if (pNext)
- FreePnextChain(pNext);
-}
-
-void safe_VkPhysicalDevicePipelineExecutablePropertiesFeaturesKHR::initialize(const VkPhysicalDevicePipelineExecutablePropertiesFeaturesKHR* in_struct)
-{
- sType = in_struct->sType;
- pipelineExecutableInfo = in_struct->pipelineExecutableInfo;
- pNext = SafePnextCopy(in_struct->pNext);
-}
-
-void safe_VkPhysicalDevicePipelineExecutablePropertiesFeaturesKHR::initialize(const safe_VkPhysicalDevicePipelineExecutablePropertiesFeaturesKHR* src)
-{
- sType = src->sType;
- pipelineExecutableInfo = src->pipelineExecutableInfo;
- pNext = SafePnextCopy(src->pNext);
-}
-
-safe_VkPipelineInfoKHR::safe_VkPipelineInfoKHR(const VkPipelineInfoKHR* in_struct) :
- sType(in_struct->sType),
- pipeline(in_struct->pipeline)
-{
- pNext = SafePnextCopy(in_struct->pNext);
-}
-
-safe_VkPipelineInfoKHR::safe_VkPipelineInfoKHR() :
- pNext(nullptr)
-{}
-
-safe_VkPipelineInfoKHR::safe_VkPipelineInfoKHR(const safe_VkPipelineInfoKHR& src)
-{
- sType = src.sType;
- pipeline = src.pipeline;
- pNext = SafePnextCopy(src.pNext);
-}
-
-safe_VkPipelineInfoKHR& safe_VkPipelineInfoKHR::operator=(const safe_VkPipelineInfoKHR& src)
-{
- if (&src == this) return *this;
-
- if (pNext)
- FreePnextChain(pNext);
-
- sType = src.sType;
- pipeline = src.pipeline;
- pNext = SafePnextCopy(src.pNext);
-
- return *this;
-}
-
-safe_VkPipelineInfoKHR::~safe_VkPipelineInfoKHR()
-{
- if (pNext)
- FreePnextChain(pNext);
-}
-
-void safe_VkPipelineInfoKHR::initialize(const VkPipelineInfoKHR* in_struct)
-{
- sType = in_struct->sType;
- pipeline = in_struct->pipeline;
- pNext = SafePnextCopy(in_struct->pNext);
-}
-
-void safe_VkPipelineInfoKHR::initialize(const safe_VkPipelineInfoKHR* src)
-{
- sType = src->sType;
- pipeline = src->pipeline;
- pNext = SafePnextCopy(src->pNext);
-}
-
-safe_VkPipelineExecutablePropertiesKHR::safe_VkPipelineExecutablePropertiesKHR(const VkPipelineExecutablePropertiesKHR* in_struct) :
- sType(in_struct->sType),
- stages(in_struct->stages),
- subgroupSize(in_struct->subgroupSize)
-{
- pNext = SafePnextCopy(in_struct->pNext);
- for (uint32_t i = 0; i < VK_MAX_DESCRIPTION_SIZE; ++i) {
- name[i] = in_struct->name[i];
- }
- for (uint32_t i = 0; i < VK_MAX_DESCRIPTION_SIZE; ++i) {
- description[i] = in_struct->description[i];
- }
-}
-
-safe_VkPipelineExecutablePropertiesKHR::safe_VkPipelineExecutablePropertiesKHR() :
- pNext(nullptr)
-{}
-
-safe_VkPipelineExecutablePropertiesKHR::safe_VkPipelineExecutablePropertiesKHR(const safe_VkPipelineExecutablePropertiesKHR& src)
-{
- sType = src.sType;
- stages = src.stages;
- subgroupSize = src.subgroupSize;
- pNext = SafePnextCopy(src.pNext);
- for (uint32_t i = 0; i < VK_MAX_DESCRIPTION_SIZE; ++i) {
- name[i] = src.name[i];
- }
- for (uint32_t i = 0; i < VK_MAX_DESCRIPTION_SIZE; ++i) {
- description[i] = src.description[i];
- }
-}
-
-safe_VkPipelineExecutablePropertiesKHR& safe_VkPipelineExecutablePropertiesKHR::operator=(const safe_VkPipelineExecutablePropertiesKHR& src)
-{
- if (&src == this) return *this;
-
- if (pNext)
- FreePnextChain(pNext);
-
- sType = src.sType;
- stages = src.stages;
- subgroupSize = src.subgroupSize;
- pNext = SafePnextCopy(src.pNext);
- for (uint32_t i = 0; i < VK_MAX_DESCRIPTION_SIZE; ++i) {
- name[i] = src.name[i];
- }
- for (uint32_t i = 0; i < VK_MAX_DESCRIPTION_SIZE; ++i) {
- description[i] = src.description[i];
- }
-
- return *this;
-}
-
-safe_VkPipelineExecutablePropertiesKHR::~safe_VkPipelineExecutablePropertiesKHR()
-{
- if (pNext)
- FreePnextChain(pNext);
-}
-
-void safe_VkPipelineExecutablePropertiesKHR::initialize(const VkPipelineExecutablePropertiesKHR* in_struct)
-{
- sType = in_struct->sType;
- stages = in_struct->stages;
- subgroupSize = in_struct->subgroupSize;
- pNext = SafePnextCopy(in_struct->pNext);
- for (uint32_t i = 0; i < VK_MAX_DESCRIPTION_SIZE; ++i) {
- name[i] = in_struct->name[i];
- }
- for (uint32_t i = 0; i < VK_MAX_DESCRIPTION_SIZE; ++i) {
- description[i] = in_struct->description[i];
- }
-}
-
-void safe_VkPipelineExecutablePropertiesKHR::initialize(const safe_VkPipelineExecutablePropertiesKHR* src)
-{
- sType = src->sType;
- stages = src->stages;
- subgroupSize = src->subgroupSize;
- pNext = SafePnextCopy(src->pNext);
- for (uint32_t i = 0; i < VK_MAX_DESCRIPTION_SIZE; ++i) {
- name[i] = src->name[i];
- }
- for (uint32_t i = 0; i < VK_MAX_DESCRIPTION_SIZE; ++i) {
- description[i] = src->description[i];
- }
-}
-
-safe_VkPipelineExecutableInfoKHR::safe_VkPipelineExecutableInfoKHR(const VkPipelineExecutableInfoKHR* in_struct) :
- sType(in_struct->sType),
- pipeline(in_struct->pipeline),
- executableIndex(in_struct->executableIndex)
-{
- pNext = SafePnextCopy(in_struct->pNext);
-}
-
-safe_VkPipelineExecutableInfoKHR::safe_VkPipelineExecutableInfoKHR() :
- pNext(nullptr)
-{}
-
-safe_VkPipelineExecutableInfoKHR::safe_VkPipelineExecutableInfoKHR(const safe_VkPipelineExecutableInfoKHR& src)
-{
- sType = src.sType;
- pipeline = src.pipeline;
- executableIndex = src.executableIndex;
- pNext = SafePnextCopy(src.pNext);
-}
-
-safe_VkPipelineExecutableInfoKHR& safe_VkPipelineExecutableInfoKHR::operator=(const safe_VkPipelineExecutableInfoKHR& src)
-{
- if (&src == this) return *this;
-
- if (pNext)
- FreePnextChain(pNext);
-
- sType = src.sType;
- pipeline = src.pipeline;
- executableIndex = src.executableIndex;
- pNext = SafePnextCopy(src.pNext);
-
- return *this;
-}
-
-safe_VkPipelineExecutableInfoKHR::~safe_VkPipelineExecutableInfoKHR()
-{
- if (pNext)
- FreePnextChain(pNext);
-}
-
-void safe_VkPipelineExecutableInfoKHR::initialize(const VkPipelineExecutableInfoKHR* in_struct)
-{
- sType = in_struct->sType;
- pipeline = in_struct->pipeline;
- executableIndex = in_struct->executableIndex;
- pNext = SafePnextCopy(in_struct->pNext);
-}
-
-void safe_VkPipelineExecutableInfoKHR::initialize(const safe_VkPipelineExecutableInfoKHR* src)
-{
- sType = src->sType;
- pipeline = src->pipeline;
- executableIndex = src->executableIndex;
- pNext = SafePnextCopy(src->pNext);
-}
-
-safe_VkPipelineExecutableStatisticKHR::safe_VkPipelineExecutableStatisticKHR(const VkPipelineExecutableStatisticKHR* in_struct) :
- sType(in_struct->sType),
- format(in_struct->format),
- value(in_struct->value)
-{
- pNext = SafePnextCopy(in_struct->pNext);
- for (uint32_t i = 0; i < VK_MAX_DESCRIPTION_SIZE; ++i) {
- name[i] = in_struct->name[i];
- }
- for (uint32_t i = 0; i < VK_MAX_DESCRIPTION_SIZE; ++i) {
- description[i] = in_struct->description[i];
- }
-}
-
-safe_VkPipelineExecutableStatisticKHR::safe_VkPipelineExecutableStatisticKHR() :
- pNext(nullptr)
-{}
-
-safe_VkPipelineExecutableStatisticKHR::safe_VkPipelineExecutableStatisticKHR(const safe_VkPipelineExecutableStatisticKHR& src)
-{
- sType = src.sType;
- format = src.format;
- value = src.value;
- pNext = SafePnextCopy(src.pNext);
- for (uint32_t i = 0; i < VK_MAX_DESCRIPTION_SIZE; ++i) {
- name[i] = src.name[i];
- }
- for (uint32_t i = 0; i < VK_MAX_DESCRIPTION_SIZE; ++i) {
- description[i] = src.description[i];
- }
-}
-
-safe_VkPipelineExecutableStatisticKHR& safe_VkPipelineExecutableStatisticKHR::operator=(const safe_VkPipelineExecutableStatisticKHR& src)
-{
- if (&src == this) return *this;
-
- if (pNext)
- FreePnextChain(pNext);
-
- sType = src.sType;
- format = src.format;
- value = src.value;
- pNext = SafePnextCopy(src.pNext);
- for (uint32_t i = 0; i < VK_MAX_DESCRIPTION_SIZE; ++i) {
- name[i] = src.name[i];
- }
- for (uint32_t i = 0; i < VK_MAX_DESCRIPTION_SIZE; ++i) {
- description[i] = src.description[i];
- }
-
- return *this;
-}
-
-safe_VkPipelineExecutableStatisticKHR::~safe_VkPipelineExecutableStatisticKHR()
-{
- if (pNext)
- FreePnextChain(pNext);
-}
-
-void safe_VkPipelineExecutableStatisticKHR::initialize(const VkPipelineExecutableStatisticKHR* in_struct)
-{
- sType = in_struct->sType;
- format = in_struct->format;
- value = in_struct->value;
- pNext = SafePnextCopy(in_struct->pNext);
- for (uint32_t i = 0; i < VK_MAX_DESCRIPTION_SIZE; ++i) {
- name[i] = in_struct->name[i];
- }
- for (uint32_t i = 0; i < VK_MAX_DESCRIPTION_SIZE; ++i) {
- description[i] = in_struct->description[i];
- }
-}
-
-void safe_VkPipelineExecutableStatisticKHR::initialize(const safe_VkPipelineExecutableStatisticKHR* src)
-{
- sType = src->sType;
- format = src->format;
- value = src->value;
- pNext = SafePnextCopy(src->pNext);
- for (uint32_t i = 0; i < VK_MAX_DESCRIPTION_SIZE; ++i) {
- name[i] = src->name[i];
- }
- for (uint32_t i = 0; i < VK_MAX_DESCRIPTION_SIZE; ++i) {
- description[i] = src->description[i];
- }
-}
-
-safe_VkPipelineExecutableInternalRepresentationKHR::safe_VkPipelineExecutableInternalRepresentationKHR(const VkPipelineExecutableInternalRepresentationKHR* in_struct) :
- sType(in_struct->sType),
- isText(in_struct->isText),
- dataSize(in_struct->dataSize),
- pData(in_struct->pData)
-{
- pNext = SafePnextCopy(in_struct->pNext);
- for (uint32_t i = 0; i < VK_MAX_DESCRIPTION_SIZE; ++i) {
- name[i] = in_struct->name[i];
- }
- for (uint32_t i = 0; i < VK_MAX_DESCRIPTION_SIZE; ++i) {
- description[i] = in_struct->description[i];
- }
-}
-
-safe_VkPipelineExecutableInternalRepresentationKHR::safe_VkPipelineExecutableInternalRepresentationKHR() :
- pNext(nullptr),
- pData(nullptr)
-{}
-
-safe_VkPipelineExecutableInternalRepresentationKHR::safe_VkPipelineExecutableInternalRepresentationKHR(const safe_VkPipelineExecutableInternalRepresentationKHR& src)
-{
- sType = src.sType;
- isText = src.isText;
- dataSize = src.dataSize;
- pData = src.pData;
- pNext = SafePnextCopy(src.pNext);
- for (uint32_t i = 0; i < VK_MAX_DESCRIPTION_SIZE; ++i) {
- name[i] = src.name[i];
- }
- for (uint32_t i = 0; i < VK_MAX_DESCRIPTION_SIZE; ++i) {
- description[i] = src.description[i];
- }
-}
-
-safe_VkPipelineExecutableInternalRepresentationKHR& safe_VkPipelineExecutableInternalRepresentationKHR::operator=(const safe_VkPipelineExecutableInternalRepresentationKHR& src)
-{
- if (&src == this) return *this;
-
- if (pNext)
- FreePnextChain(pNext);
-
- sType = src.sType;
- isText = src.isText;
- dataSize = src.dataSize;
- pData = src.pData;
- pNext = SafePnextCopy(src.pNext);
- for (uint32_t i = 0; i < VK_MAX_DESCRIPTION_SIZE; ++i) {
- name[i] = src.name[i];
- }
- for (uint32_t i = 0; i < VK_MAX_DESCRIPTION_SIZE; ++i) {
- description[i] = src.description[i];
- }
-
- return *this;
-}
-
-safe_VkPipelineExecutableInternalRepresentationKHR::~safe_VkPipelineExecutableInternalRepresentationKHR()
-{
- if (pNext)
- FreePnextChain(pNext);
-}
-
-void safe_VkPipelineExecutableInternalRepresentationKHR::initialize(const VkPipelineExecutableInternalRepresentationKHR* in_struct)
-{
- sType = in_struct->sType;
- isText = in_struct->isText;
- dataSize = in_struct->dataSize;
- pData = in_struct->pData;
- pNext = SafePnextCopy(in_struct->pNext);
- for (uint32_t i = 0; i < VK_MAX_DESCRIPTION_SIZE; ++i) {
- name[i] = in_struct->name[i];
- }
- for (uint32_t i = 0; i < VK_MAX_DESCRIPTION_SIZE; ++i) {
- description[i] = in_struct->description[i];
- }
-}
-
-void safe_VkPipelineExecutableInternalRepresentationKHR::initialize(const safe_VkPipelineExecutableInternalRepresentationKHR* src)
-{
- sType = src->sType;
- isText = src->isText;
- dataSize = src->dataSize;
- pData = src->pData;
- pNext = SafePnextCopy(src->pNext);
- for (uint32_t i = 0; i < VK_MAX_DESCRIPTION_SIZE; ++i) {
- name[i] = src->name[i];
- }
- for (uint32_t i = 0; i < VK_MAX_DESCRIPTION_SIZE; ++i) {
- description[i] = src->description[i];
- }
-}
-
-safe_VkDebugReportCallbackCreateInfoEXT::safe_VkDebugReportCallbackCreateInfoEXT(const VkDebugReportCallbackCreateInfoEXT* in_struct) :
- sType(in_struct->sType),
- flags(in_struct->flags),
- pfnCallback(in_struct->pfnCallback),
- pUserData(in_struct->pUserData)
-{
- pNext = SafePnextCopy(in_struct->pNext);
-}
-
-safe_VkDebugReportCallbackCreateInfoEXT::safe_VkDebugReportCallbackCreateInfoEXT() :
- pNext(nullptr),
- pUserData(nullptr)
-{}
-
-safe_VkDebugReportCallbackCreateInfoEXT::safe_VkDebugReportCallbackCreateInfoEXT(const safe_VkDebugReportCallbackCreateInfoEXT& src)
-{
- sType = src.sType;
- flags = src.flags;
- pfnCallback = src.pfnCallback;
- pUserData = src.pUserData;
- pNext = SafePnextCopy(src.pNext);
-}
-
-safe_VkDebugReportCallbackCreateInfoEXT& safe_VkDebugReportCallbackCreateInfoEXT::operator=(const safe_VkDebugReportCallbackCreateInfoEXT& src)
-{
- if (&src == this) return *this;
-
- if (pNext)
- FreePnextChain(pNext);
-
- sType = src.sType;
- flags = src.flags;
- pfnCallback = src.pfnCallback;
- pUserData = src.pUserData;
- pNext = SafePnextCopy(src.pNext);
-
- return *this;
-}
-
-safe_VkDebugReportCallbackCreateInfoEXT::~safe_VkDebugReportCallbackCreateInfoEXT()
-{
- if (pNext)
- FreePnextChain(pNext);
-}
-
-void safe_VkDebugReportCallbackCreateInfoEXT::initialize(const VkDebugReportCallbackCreateInfoEXT* in_struct)
-{
- sType = in_struct->sType;
- flags = in_struct->flags;
- pfnCallback = in_struct->pfnCallback;
- pUserData = in_struct->pUserData;
- pNext = SafePnextCopy(in_struct->pNext);
-}
-
-void safe_VkDebugReportCallbackCreateInfoEXT::initialize(const safe_VkDebugReportCallbackCreateInfoEXT* src)
-{
- sType = src->sType;
- flags = src->flags;
- pfnCallback = src->pfnCallback;
- pUserData = src->pUserData;
- pNext = SafePnextCopy(src->pNext);
-}
-
-safe_VkPipelineRasterizationStateRasterizationOrderAMD::safe_VkPipelineRasterizationStateRasterizationOrderAMD(const VkPipelineRasterizationStateRasterizationOrderAMD* in_struct) :
- sType(in_struct->sType),
- rasterizationOrder(in_struct->rasterizationOrder)
-{
- pNext = SafePnextCopy(in_struct->pNext);
-}
-
-safe_VkPipelineRasterizationStateRasterizationOrderAMD::safe_VkPipelineRasterizationStateRasterizationOrderAMD() :
- pNext(nullptr)
-{}
-
-safe_VkPipelineRasterizationStateRasterizationOrderAMD::safe_VkPipelineRasterizationStateRasterizationOrderAMD(const safe_VkPipelineRasterizationStateRasterizationOrderAMD& src)
-{
- sType = src.sType;
- rasterizationOrder = src.rasterizationOrder;
- pNext = SafePnextCopy(src.pNext);
-}
-
-safe_VkPipelineRasterizationStateRasterizationOrderAMD& safe_VkPipelineRasterizationStateRasterizationOrderAMD::operator=(const safe_VkPipelineRasterizationStateRasterizationOrderAMD& src)
-{
- if (&src == this) return *this;
-
- if (pNext)
- FreePnextChain(pNext);
-
- sType = src.sType;
- rasterizationOrder = src.rasterizationOrder;
- pNext = SafePnextCopy(src.pNext);
-
- return *this;
-}
-
-safe_VkPipelineRasterizationStateRasterizationOrderAMD::~safe_VkPipelineRasterizationStateRasterizationOrderAMD()
-{
- if (pNext)
- FreePnextChain(pNext);
-}
-
-void safe_VkPipelineRasterizationStateRasterizationOrderAMD::initialize(const VkPipelineRasterizationStateRasterizationOrderAMD* in_struct)
-{
- sType = in_struct->sType;
- rasterizationOrder = in_struct->rasterizationOrder;
- pNext = SafePnextCopy(in_struct->pNext);
-}
-
-void safe_VkPipelineRasterizationStateRasterizationOrderAMD::initialize(const safe_VkPipelineRasterizationStateRasterizationOrderAMD* src)
-{
- sType = src->sType;
- rasterizationOrder = src->rasterizationOrder;
- pNext = SafePnextCopy(src->pNext);
-}
-
-safe_VkDebugMarkerObjectNameInfoEXT::safe_VkDebugMarkerObjectNameInfoEXT(const VkDebugMarkerObjectNameInfoEXT* in_struct) :
- sType(in_struct->sType),
- objectType(in_struct->objectType),
- object(in_struct->object)
-{
- pNext = SafePnextCopy(in_struct->pNext);
- pObjectName = SafeStringCopy(in_struct->pObjectName);
-}
-
-safe_VkDebugMarkerObjectNameInfoEXT::safe_VkDebugMarkerObjectNameInfoEXT() :
- pNext(nullptr),
- pObjectName(nullptr)
-{}
-
-safe_VkDebugMarkerObjectNameInfoEXT::safe_VkDebugMarkerObjectNameInfoEXT(const safe_VkDebugMarkerObjectNameInfoEXT& src)
-{
- sType = src.sType;
- objectType = src.objectType;
- object = src.object;
- pNext = SafePnextCopy(src.pNext);
- pObjectName = SafeStringCopy(src.pObjectName);
-}
-
-safe_VkDebugMarkerObjectNameInfoEXT& safe_VkDebugMarkerObjectNameInfoEXT::operator=(const safe_VkDebugMarkerObjectNameInfoEXT& src)
-{
- if (&src == this) return *this;
-
- if (pObjectName) delete [] pObjectName;
- if (pNext)
- FreePnextChain(pNext);
-
- sType = src.sType;
- objectType = src.objectType;
- object = src.object;
- pNext = SafePnextCopy(src.pNext);
- pObjectName = SafeStringCopy(src.pObjectName);
-
- return *this;
-}
-
-safe_VkDebugMarkerObjectNameInfoEXT::~safe_VkDebugMarkerObjectNameInfoEXT()
-{
- if (pObjectName) delete [] pObjectName;
- if (pNext)
- FreePnextChain(pNext);
-}
-
-void safe_VkDebugMarkerObjectNameInfoEXT::initialize(const VkDebugMarkerObjectNameInfoEXT* in_struct)
-{
- sType = in_struct->sType;
- objectType = in_struct->objectType;
- object = in_struct->object;
- pNext = SafePnextCopy(in_struct->pNext);
- pObjectName = SafeStringCopy(in_struct->pObjectName);
-}
-
-void safe_VkDebugMarkerObjectNameInfoEXT::initialize(const safe_VkDebugMarkerObjectNameInfoEXT* src)
-{
- sType = src->sType;
- objectType = src->objectType;
- object = src->object;
- pNext = SafePnextCopy(src->pNext);
- pObjectName = SafeStringCopy(src->pObjectName);
-}
-
-safe_VkDebugMarkerObjectTagInfoEXT::safe_VkDebugMarkerObjectTagInfoEXT(const VkDebugMarkerObjectTagInfoEXT* in_struct) :
- sType(in_struct->sType),
- objectType(in_struct->objectType),
- object(in_struct->object),
- tagName(in_struct->tagName),
- tagSize(in_struct->tagSize),
- pTag(in_struct->pTag)
-{
- pNext = SafePnextCopy(in_struct->pNext);
-}
-
-safe_VkDebugMarkerObjectTagInfoEXT::safe_VkDebugMarkerObjectTagInfoEXT() :
- pNext(nullptr),
- pTag(nullptr)
-{}
-
-safe_VkDebugMarkerObjectTagInfoEXT::safe_VkDebugMarkerObjectTagInfoEXT(const safe_VkDebugMarkerObjectTagInfoEXT& src)
-{
- sType = src.sType;
- objectType = src.objectType;
- object = src.object;
- tagName = src.tagName;
- tagSize = src.tagSize;
- pTag = src.pTag;
- pNext = SafePnextCopy(src.pNext);
-}
-
-safe_VkDebugMarkerObjectTagInfoEXT& safe_VkDebugMarkerObjectTagInfoEXT::operator=(const safe_VkDebugMarkerObjectTagInfoEXT& src)
-{
- if (&src == this) return *this;
-
- if (pNext)
- FreePnextChain(pNext);
-
- sType = src.sType;
- objectType = src.objectType;
- object = src.object;
- tagName = src.tagName;
- tagSize = src.tagSize;
- pTag = src.pTag;
- pNext = SafePnextCopy(src.pNext);
-
- return *this;
-}
-
-safe_VkDebugMarkerObjectTagInfoEXT::~safe_VkDebugMarkerObjectTagInfoEXT()
-{
- if (pNext)
- FreePnextChain(pNext);
-}
-
-void safe_VkDebugMarkerObjectTagInfoEXT::initialize(const VkDebugMarkerObjectTagInfoEXT* in_struct)
-{
- sType = in_struct->sType;
- objectType = in_struct->objectType;
- object = in_struct->object;
- tagName = in_struct->tagName;
- tagSize = in_struct->tagSize;
- pTag = in_struct->pTag;
- pNext = SafePnextCopy(in_struct->pNext);
-}
-
-void safe_VkDebugMarkerObjectTagInfoEXT::initialize(const safe_VkDebugMarkerObjectTagInfoEXT* src)
-{
- sType = src->sType;
- objectType = src->objectType;
- object = src->object;
- tagName = src->tagName;
- tagSize = src->tagSize;
- pTag = src->pTag;
- pNext = SafePnextCopy(src->pNext);
-}
-
-safe_VkDebugMarkerMarkerInfoEXT::safe_VkDebugMarkerMarkerInfoEXT(const VkDebugMarkerMarkerInfoEXT* in_struct) :
- sType(in_struct->sType)
-{
- pNext = SafePnextCopy(in_struct->pNext);
- pMarkerName = SafeStringCopy(in_struct->pMarkerName);
- for (uint32_t i = 0; i < 4; ++i) {
- color[i] = in_struct->color[i];
- }
-}
-
-safe_VkDebugMarkerMarkerInfoEXT::safe_VkDebugMarkerMarkerInfoEXT() :
- pNext(nullptr),
- pMarkerName(nullptr)
-{}
-
-safe_VkDebugMarkerMarkerInfoEXT::safe_VkDebugMarkerMarkerInfoEXT(const safe_VkDebugMarkerMarkerInfoEXT& src)
-{
- sType = src.sType;
- pNext = SafePnextCopy(src.pNext);
- pMarkerName = SafeStringCopy(src.pMarkerName);
- for (uint32_t i = 0; i < 4; ++i) {
- color[i] = src.color[i];
- }
-}
-
-safe_VkDebugMarkerMarkerInfoEXT& safe_VkDebugMarkerMarkerInfoEXT::operator=(const safe_VkDebugMarkerMarkerInfoEXT& src)
-{
- if (&src == this) return *this;
-
- if (pMarkerName) delete [] pMarkerName;
- if (pNext)
- FreePnextChain(pNext);
-
- sType = src.sType;
- pNext = SafePnextCopy(src.pNext);
- pMarkerName = SafeStringCopy(src.pMarkerName);
- for (uint32_t i = 0; i < 4; ++i) {
- color[i] = src.color[i];
- }
-
- return *this;
-}
-
-safe_VkDebugMarkerMarkerInfoEXT::~safe_VkDebugMarkerMarkerInfoEXT()
-{
- if (pMarkerName) delete [] pMarkerName;
- if (pNext)
- FreePnextChain(pNext);
-}
-
-void safe_VkDebugMarkerMarkerInfoEXT::initialize(const VkDebugMarkerMarkerInfoEXT* in_struct)
-{
- sType = in_struct->sType;
- pNext = SafePnextCopy(in_struct->pNext);
- pMarkerName = SafeStringCopy(in_struct->pMarkerName);
- for (uint32_t i = 0; i < 4; ++i) {
- color[i] = in_struct->color[i];
- }
-}
-
-void safe_VkDebugMarkerMarkerInfoEXT::initialize(const safe_VkDebugMarkerMarkerInfoEXT* src)
-{
- sType = src->sType;
- pNext = SafePnextCopy(src->pNext);
- pMarkerName = SafeStringCopy(src->pMarkerName);
- for (uint32_t i = 0; i < 4; ++i) {
- color[i] = src->color[i];
- }
-}
-
-safe_VkDedicatedAllocationImageCreateInfoNV::safe_VkDedicatedAllocationImageCreateInfoNV(const VkDedicatedAllocationImageCreateInfoNV* in_struct) :
- sType(in_struct->sType),
- dedicatedAllocation(in_struct->dedicatedAllocation)
-{
- pNext = SafePnextCopy(in_struct->pNext);
-}
-
-safe_VkDedicatedAllocationImageCreateInfoNV::safe_VkDedicatedAllocationImageCreateInfoNV() :
- pNext(nullptr)
-{}
-
-safe_VkDedicatedAllocationImageCreateInfoNV::safe_VkDedicatedAllocationImageCreateInfoNV(const safe_VkDedicatedAllocationImageCreateInfoNV& src)
-{
- sType = src.sType;
- dedicatedAllocation = src.dedicatedAllocation;
- pNext = SafePnextCopy(src.pNext);
-}
-
-safe_VkDedicatedAllocationImageCreateInfoNV& safe_VkDedicatedAllocationImageCreateInfoNV::operator=(const safe_VkDedicatedAllocationImageCreateInfoNV& src)
-{
- if (&src == this) return *this;
-
- if (pNext)
- FreePnextChain(pNext);
-
- sType = src.sType;
- dedicatedAllocation = src.dedicatedAllocation;
- pNext = SafePnextCopy(src.pNext);
-
- return *this;
-}
-
-safe_VkDedicatedAllocationImageCreateInfoNV::~safe_VkDedicatedAllocationImageCreateInfoNV()
-{
- if (pNext)
- FreePnextChain(pNext);
-}
-
-void safe_VkDedicatedAllocationImageCreateInfoNV::initialize(const VkDedicatedAllocationImageCreateInfoNV* in_struct)
-{
- sType = in_struct->sType;
- dedicatedAllocation = in_struct->dedicatedAllocation;
- pNext = SafePnextCopy(in_struct->pNext);
-}
-
-void safe_VkDedicatedAllocationImageCreateInfoNV::initialize(const safe_VkDedicatedAllocationImageCreateInfoNV* src)
-{
- sType = src->sType;
- dedicatedAllocation = src->dedicatedAllocation;
- pNext = SafePnextCopy(src->pNext);
-}
-
-safe_VkDedicatedAllocationBufferCreateInfoNV::safe_VkDedicatedAllocationBufferCreateInfoNV(const VkDedicatedAllocationBufferCreateInfoNV* in_struct) :
- sType(in_struct->sType),
- dedicatedAllocation(in_struct->dedicatedAllocation)
-{
- pNext = SafePnextCopy(in_struct->pNext);
-}
-
-safe_VkDedicatedAllocationBufferCreateInfoNV::safe_VkDedicatedAllocationBufferCreateInfoNV() :
- pNext(nullptr)
-{}
-
-safe_VkDedicatedAllocationBufferCreateInfoNV::safe_VkDedicatedAllocationBufferCreateInfoNV(const safe_VkDedicatedAllocationBufferCreateInfoNV& src)
-{
- sType = src.sType;
- dedicatedAllocation = src.dedicatedAllocation;
- pNext = SafePnextCopy(src.pNext);
-}
-
-safe_VkDedicatedAllocationBufferCreateInfoNV& safe_VkDedicatedAllocationBufferCreateInfoNV::operator=(const safe_VkDedicatedAllocationBufferCreateInfoNV& src)
-{
- if (&src == this) return *this;
-
- if (pNext)
- FreePnextChain(pNext);
-
- sType = src.sType;
- dedicatedAllocation = src.dedicatedAllocation;
- pNext = SafePnextCopy(src.pNext);
-
- return *this;
-}
-
-safe_VkDedicatedAllocationBufferCreateInfoNV::~safe_VkDedicatedAllocationBufferCreateInfoNV()
-{
- if (pNext)
- FreePnextChain(pNext);
-}
-
-void safe_VkDedicatedAllocationBufferCreateInfoNV::initialize(const VkDedicatedAllocationBufferCreateInfoNV* in_struct)
-{
- sType = in_struct->sType;
- dedicatedAllocation = in_struct->dedicatedAllocation;
- pNext = SafePnextCopy(in_struct->pNext);
-}
-
-void safe_VkDedicatedAllocationBufferCreateInfoNV::initialize(const safe_VkDedicatedAllocationBufferCreateInfoNV* src)
-{
- sType = src->sType;
- dedicatedAllocation = src->dedicatedAllocation;
- pNext = SafePnextCopy(src->pNext);
-}
-
-safe_VkDedicatedAllocationMemoryAllocateInfoNV::safe_VkDedicatedAllocationMemoryAllocateInfoNV(const VkDedicatedAllocationMemoryAllocateInfoNV* in_struct) :
- sType(in_struct->sType),
- image(in_struct->image),
- buffer(in_struct->buffer)
-{
- pNext = SafePnextCopy(in_struct->pNext);
-}
-
-safe_VkDedicatedAllocationMemoryAllocateInfoNV::safe_VkDedicatedAllocationMemoryAllocateInfoNV() :
- pNext(nullptr)
-{}
-
-safe_VkDedicatedAllocationMemoryAllocateInfoNV::safe_VkDedicatedAllocationMemoryAllocateInfoNV(const safe_VkDedicatedAllocationMemoryAllocateInfoNV& src)
-{
- sType = src.sType;
- image = src.image;
- buffer = src.buffer;
- pNext = SafePnextCopy(src.pNext);
-}
-
-safe_VkDedicatedAllocationMemoryAllocateInfoNV& safe_VkDedicatedAllocationMemoryAllocateInfoNV::operator=(const safe_VkDedicatedAllocationMemoryAllocateInfoNV& src)
-{
- if (&src == this) return *this;
-
- if (pNext)
- FreePnextChain(pNext);
-
- sType = src.sType;
- image = src.image;
- buffer = src.buffer;
- pNext = SafePnextCopy(src.pNext);
-
- return *this;
-}
-
-safe_VkDedicatedAllocationMemoryAllocateInfoNV::~safe_VkDedicatedAllocationMemoryAllocateInfoNV()
-{
- if (pNext)
- FreePnextChain(pNext);
-}
-
-void safe_VkDedicatedAllocationMemoryAllocateInfoNV::initialize(const VkDedicatedAllocationMemoryAllocateInfoNV* in_struct)
-{
- sType = in_struct->sType;
- image = in_struct->image;
- buffer = in_struct->buffer;
- pNext = SafePnextCopy(in_struct->pNext);
-}
-
-void safe_VkDedicatedAllocationMemoryAllocateInfoNV::initialize(const safe_VkDedicatedAllocationMemoryAllocateInfoNV* src)
-{
- sType = src->sType;
- image = src->image;
- buffer = src->buffer;
- pNext = SafePnextCopy(src->pNext);
-}
-
-safe_VkPhysicalDeviceTransformFeedbackFeaturesEXT::safe_VkPhysicalDeviceTransformFeedbackFeaturesEXT(const VkPhysicalDeviceTransformFeedbackFeaturesEXT* in_struct) :
- sType(in_struct->sType),
- transformFeedback(in_struct->transformFeedback),
- geometryStreams(in_struct->geometryStreams)
-{
- pNext = SafePnextCopy(in_struct->pNext);
-}
-
-safe_VkPhysicalDeviceTransformFeedbackFeaturesEXT::safe_VkPhysicalDeviceTransformFeedbackFeaturesEXT() :
- pNext(nullptr)
-{}
-
-safe_VkPhysicalDeviceTransformFeedbackFeaturesEXT::safe_VkPhysicalDeviceTransformFeedbackFeaturesEXT(const safe_VkPhysicalDeviceTransformFeedbackFeaturesEXT& src)
-{
- sType = src.sType;
- transformFeedback = src.transformFeedback;
- geometryStreams = src.geometryStreams;
- pNext = SafePnextCopy(src.pNext);
-}
-
-safe_VkPhysicalDeviceTransformFeedbackFeaturesEXT& safe_VkPhysicalDeviceTransformFeedbackFeaturesEXT::operator=(const safe_VkPhysicalDeviceTransformFeedbackFeaturesEXT& src)
-{
- if (&src == this) return *this;
-
- if (pNext)
- FreePnextChain(pNext);
-
- sType = src.sType;
- transformFeedback = src.transformFeedback;
- geometryStreams = src.geometryStreams;
- pNext = SafePnextCopy(src.pNext);
-
- return *this;
-}
-
-safe_VkPhysicalDeviceTransformFeedbackFeaturesEXT::~safe_VkPhysicalDeviceTransformFeedbackFeaturesEXT()
-{
- if (pNext)
- FreePnextChain(pNext);
-}
-
-void safe_VkPhysicalDeviceTransformFeedbackFeaturesEXT::initialize(const VkPhysicalDeviceTransformFeedbackFeaturesEXT* in_struct)
-{
- sType = in_struct->sType;
- transformFeedback = in_struct->transformFeedback;
- geometryStreams = in_struct->geometryStreams;
- pNext = SafePnextCopy(in_struct->pNext);
-}
-
-void safe_VkPhysicalDeviceTransformFeedbackFeaturesEXT::initialize(const safe_VkPhysicalDeviceTransformFeedbackFeaturesEXT* src)
-{
- sType = src->sType;
- transformFeedback = src->transformFeedback;
- geometryStreams = src->geometryStreams;
- pNext = SafePnextCopy(src->pNext);
-}
-
-safe_VkPhysicalDeviceTransformFeedbackPropertiesEXT::safe_VkPhysicalDeviceTransformFeedbackPropertiesEXT(const VkPhysicalDeviceTransformFeedbackPropertiesEXT* in_struct) :
- sType(in_struct->sType),
- maxTransformFeedbackStreams(in_struct->maxTransformFeedbackStreams),
- maxTransformFeedbackBuffers(in_struct->maxTransformFeedbackBuffers),
- maxTransformFeedbackBufferSize(in_struct->maxTransformFeedbackBufferSize),
- maxTransformFeedbackStreamDataSize(in_struct->maxTransformFeedbackStreamDataSize),
- maxTransformFeedbackBufferDataSize(in_struct->maxTransformFeedbackBufferDataSize),
- maxTransformFeedbackBufferDataStride(in_struct->maxTransformFeedbackBufferDataStride),
- transformFeedbackQueries(in_struct->transformFeedbackQueries),
- transformFeedbackStreamsLinesTriangles(in_struct->transformFeedbackStreamsLinesTriangles),
- transformFeedbackRasterizationStreamSelect(in_struct->transformFeedbackRasterizationStreamSelect),
- transformFeedbackDraw(in_struct->transformFeedbackDraw)
-{
- pNext = SafePnextCopy(in_struct->pNext);
-}
-
-safe_VkPhysicalDeviceTransformFeedbackPropertiesEXT::safe_VkPhysicalDeviceTransformFeedbackPropertiesEXT() :
- pNext(nullptr)
-{}
-
-safe_VkPhysicalDeviceTransformFeedbackPropertiesEXT::safe_VkPhysicalDeviceTransformFeedbackPropertiesEXT(const safe_VkPhysicalDeviceTransformFeedbackPropertiesEXT& src)
-{
- sType = src.sType;
- maxTransformFeedbackStreams = src.maxTransformFeedbackStreams;
- maxTransformFeedbackBuffers = src.maxTransformFeedbackBuffers;
- maxTransformFeedbackBufferSize = src.maxTransformFeedbackBufferSize;
- maxTransformFeedbackStreamDataSize = src.maxTransformFeedbackStreamDataSize;
- maxTransformFeedbackBufferDataSize = src.maxTransformFeedbackBufferDataSize;
- maxTransformFeedbackBufferDataStride = src.maxTransformFeedbackBufferDataStride;
- transformFeedbackQueries = src.transformFeedbackQueries;
- transformFeedbackStreamsLinesTriangles = src.transformFeedbackStreamsLinesTriangles;
- transformFeedbackRasterizationStreamSelect = src.transformFeedbackRasterizationStreamSelect;
- transformFeedbackDraw = src.transformFeedbackDraw;
- pNext = SafePnextCopy(src.pNext);
-}
-
-safe_VkPhysicalDeviceTransformFeedbackPropertiesEXT& safe_VkPhysicalDeviceTransformFeedbackPropertiesEXT::operator=(const safe_VkPhysicalDeviceTransformFeedbackPropertiesEXT& src)
-{
- if (&src == this) return *this;
-
- if (pNext)
- FreePnextChain(pNext);
-
- sType = src.sType;
- maxTransformFeedbackStreams = src.maxTransformFeedbackStreams;
- maxTransformFeedbackBuffers = src.maxTransformFeedbackBuffers;
- maxTransformFeedbackBufferSize = src.maxTransformFeedbackBufferSize;
- maxTransformFeedbackStreamDataSize = src.maxTransformFeedbackStreamDataSize;
- maxTransformFeedbackBufferDataSize = src.maxTransformFeedbackBufferDataSize;
- maxTransformFeedbackBufferDataStride = src.maxTransformFeedbackBufferDataStride;
- transformFeedbackQueries = src.transformFeedbackQueries;
- transformFeedbackStreamsLinesTriangles = src.transformFeedbackStreamsLinesTriangles;
- transformFeedbackRasterizationStreamSelect = src.transformFeedbackRasterizationStreamSelect;
- transformFeedbackDraw = src.transformFeedbackDraw;
- pNext = SafePnextCopy(src.pNext);
-
- return *this;
-}
-
-safe_VkPhysicalDeviceTransformFeedbackPropertiesEXT::~safe_VkPhysicalDeviceTransformFeedbackPropertiesEXT()
-{
- if (pNext)
- FreePnextChain(pNext);
-}
-
-void safe_VkPhysicalDeviceTransformFeedbackPropertiesEXT::initialize(const VkPhysicalDeviceTransformFeedbackPropertiesEXT* in_struct)
-{
- sType = in_struct->sType;
- maxTransformFeedbackStreams = in_struct->maxTransformFeedbackStreams;
- maxTransformFeedbackBuffers = in_struct->maxTransformFeedbackBuffers;
- maxTransformFeedbackBufferSize = in_struct->maxTransformFeedbackBufferSize;
- maxTransformFeedbackStreamDataSize = in_struct->maxTransformFeedbackStreamDataSize;
- maxTransformFeedbackBufferDataSize = in_struct->maxTransformFeedbackBufferDataSize;
- maxTransformFeedbackBufferDataStride = in_struct->maxTransformFeedbackBufferDataStride;
- transformFeedbackQueries = in_struct->transformFeedbackQueries;
- transformFeedbackStreamsLinesTriangles = in_struct->transformFeedbackStreamsLinesTriangles;
- transformFeedbackRasterizationStreamSelect = in_struct->transformFeedbackRasterizationStreamSelect;
- transformFeedbackDraw = in_struct->transformFeedbackDraw;
- pNext = SafePnextCopy(in_struct->pNext);
-}
-
-void safe_VkPhysicalDeviceTransformFeedbackPropertiesEXT::initialize(const safe_VkPhysicalDeviceTransformFeedbackPropertiesEXT* src)
-{
- sType = src->sType;
- maxTransformFeedbackStreams = src->maxTransformFeedbackStreams;
- maxTransformFeedbackBuffers = src->maxTransformFeedbackBuffers;
- maxTransformFeedbackBufferSize = src->maxTransformFeedbackBufferSize;
- maxTransformFeedbackStreamDataSize = src->maxTransformFeedbackStreamDataSize;
- maxTransformFeedbackBufferDataSize = src->maxTransformFeedbackBufferDataSize;
- maxTransformFeedbackBufferDataStride = src->maxTransformFeedbackBufferDataStride;
- transformFeedbackQueries = src->transformFeedbackQueries;
- transformFeedbackStreamsLinesTriangles = src->transformFeedbackStreamsLinesTriangles;
- transformFeedbackRasterizationStreamSelect = src->transformFeedbackRasterizationStreamSelect;
- transformFeedbackDraw = src->transformFeedbackDraw;
- pNext = SafePnextCopy(src->pNext);
-}
-
-safe_VkPipelineRasterizationStateStreamCreateInfoEXT::safe_VkPipelineRasterizationStateStreamCreateInfoEXT(const VkPipelineRasterizationStateStreamCreateInfoEXT* in_struct) :
- sType(in_struct->sType),
- flags(in_struct->flags),
- rasterizationStream(in_struct->rasterizationStream)
-{
- pNext = SafePnextCopy(in_struct->pNext);
-}
-
-safe_VkPipelineRasterizationStateStreamCreateInfoEXT::safe_VkPipelineRasterizationStateStreamCreateInfoEXT() :
- pNext(nullptr)
-{}
-
-safe_VkPipelineRasterizationStateStreamCreateInfoEXT::safe_VkPipelineRasterizationStateStreamCreateInfoEXT(const safe_VkPipelineRasterizationStateStreamCreateInfoEXT& src)
-{
- sType = src.sType;
- flags = src.flags;
- rasterizationStream = src.rasterizationStream;
- pNext = SafePnextCopy(src.pNext);
-}
-
-safe_VkPipelineRasterizationStateStreamCreateInfoEXT& safe_VkPipelineRasterizationStateStreamCreateInfoEXT::operator=(const safe_VkPipelineRasterizationStateStreamCreateInfoEXT& src)
-{
- if (&src == this) return *this;
-
- if (pNext)
- FreePnextChain(pNext);
-
- sType = src.sType;
- flags = src.flags;
- rasterizationStream = src.rasterizationStream;
- pNext = SafePnextCopy(src.pNext);
-
- return *this;
-}
-
-safe_VkPipelineRasterizationStateStreamCreateInfoEXT::~safe_VkPipelineRasterizationStateStreamCreateInfoEXT()
-{
- if (pNext)
- FreePnextChain(pNext);
-}
-
-void safe_VkPipelineRasterizationStateStreamCreateInfoEXT::initialize(const VkPipelineRasterizationStateStreamCreateInfoEXT* in_struct)
-{
- sType = in_struct->sType;
- flags = in_struct->flags;
- rasterizationStream = in_struct->rasterizationStream;
- pNext = SafePnextCopy(in_struct->pNext);
-}
-
-void safe_VkPipelineRasterizationStateStreamCreateInfoEXT::initialize(const safe_VkPipelineRasterizationStateStreamCreateInfoEXT* src)
-{
- sType = src->sType;
- flags = src->flags;
- rasterizationStream = src->rasterizationStream;
- pNext = SafePnextCopy(src->pNext);
-}
-
-safe_VkImageViewHandleInfoNVX::safe_VkImageViewHandleInfoNVX(const VkImageViewHandleInfoNVX* in_struct) :
- sType(in_struct->sType),
- imageView(in_struct->imageView),
- descriptorType(in_struct->descriptorType),
- sampler(in_struct->sampler)
-{
- pNext = SafePnextCopy(in_struct->pNext);
-}
-
-safe_VkImageViewHandleInfoNVX::safe_VkImageViewHandleInfoNVX() :
- pNext(nullptr)
-{}
-
-safe_VkImageViewHandleInfoNVX::safe_VkImageViewHandleInfoNVX(const safe_VkImageViewHandleInfoNVX& src)
-{
- sType = src.sType;
- imageView = src.imageView;
- descriptorType = src.descriptorType;
- sampler = src.sampler;
- pNext = SafePnextCopy(src.pNext);
-}
-
-safe_VkImageViewHandleInfoNVX& safe_VkImageViewHandleInfoNVX::operator=(const safe_VkImageViewHandleInfoNVX& src)
-{
- if (&src == this) return *this;
-
- if (pNext)
- FreePnextChain(pNext);
-
- sType = src.sType;
- imageView = src.imageView;
- descriptorType = src.descriptorType;
- sampler = src.sampler;
- pNext = SafePnextCopy(src.pNext);
-
- return *this;
-}
-
-safe_VkImageViewHandleInfoNVX::~safe_VkImageViewHandleInfoNVX()
-{
- if (pNext)
- FreePnextChain(pNext);
-}
-
-void safe_VkImageViewHandleInfoNVX::initialize(const VkImageViewHandleInfoNVX* in_struct)
-{
- sType = in_struct->sType;
- imageView = in_struct->imageView;
- descriptorType = in_struct->descriptorType;
- sampler = in_struct->sampler;
- pNext = SafePnextCopy(in_struct->pNext);
-}
-
-void safe_VkImageViewHandleInfoNVX::initialize(const safe_VkImageViewHandleInfoNVX* src)
-{
- sType = src->sType;
- imageView = src->imageView;
- descriptorType = src->descriptorType;
- sampler = src->sampler;
- pNext = SafePnextCopy(src->pNext);
-}
-
-safe_VkTextureLODGatherFormatPropertiesAMD::safe_VkTextureLODGatherFormatPropertiesAMD(const VkTextureLODGatherFormatPropertiesAMD* in_struct) :
- sType(in_struct->sType),
- supportsTextureGatherLODBiasAMD(in_struct->supportsTextureGatherLODBiasAMD)
-{
- pNext = SafePnextCopy(in_struct->pNext);
-}
-
-safe_VkTextureLODGatherFormatPropertiesAMD::safe_VkTextureLODGatherFormatPropertiesAMD() :
- pNext(nullptr)
-{}
-
-safe_VkTextureLODGatherFormatPropertiesAMD::safe_VkTextureLODGatherFormatPropertiesAMD(const safe_VkTextureLODGatherFormatPropertiesAMD& src)
-{
- sType = src.sType;
- supportsTextureGatherLODBiasAMD = src.supportsTextureGatherLODBiasAMD;
- pNext = SafePnextCopy(src.pNext);
-}
-
-safe_VkTextureLODGatherFormatPropertiesAMD& safe_VkTextureLODGatherFormatPropertiesAMD::operator=(const safe_VkTextureLODGatherFormatPropertiesAMD& src)
-{
- if (&src == this) return *this;
-
- if (pNext)
- FreePnextChain(pNext);
-
- sType = src.sType;
- supportsTextureGatherLODBiasAMD = src.supportsTextureGatherLODBiasAMD;
- pNext = SafePnextCopy(src.pNext);
-
- return *this;
-}
-
-safe_VkTextureLODGatherFormatPropertiesAMD::~safe_VkTextureLODGatherFormatPropertiesAMD()
-{
- if (pNext)
- FreePnextChain(pNext);
-}
-
-void safe_VkTextureLODGatherFormatPropertiesAMD::initialize(const VkTextureLODGatherFormatPropertiesAMD* in_struct)
-{
- sType = in_struct->sType;
- supportsTextureGatherLODBiasAMD = in_struct->supportsTextureGatherLODBiasAMD;
- pNext = SafePnextCopy(in_struct->pNext);
-}
-
-void safe_VkTextureLODGatherFormatPropertiesAMD::initialize(const safe_VkTextureLODGatherFormatPropertiesAMD* src)
-{
- sType = src->sType;
- supportsTextureGatherLODBiasAMD = src->supportsTextureGatherLODBiasAMD;
- pNext = SafePnextCopy(src->pNext);
-}
-#ifdef VK_USE_PLATFORM_GGP
-
-
-safe_VkStreamDescriptorSurfaceCreateInfoGGP::safe_VkStreamDescriptorSurfaceCreateInfoGGP(const VkStreamDescriptorSurfaceCreateInfoGGP* in_struct) :
- sType(in_struct->sType),
- flags(in_struct->flags),
- streamDescriptor(in_struct->streamDescriptor)
-{
- pNext = SafePnextCopy(in_struct->pNext);
-}
-
-safe_VkStreamDescriptorSurfaceCreateInfoGGP::safe_VkStreamDescriptorSurfaceCreateInfoGGP() :
- pNext(nullptr)
-{}
-
-safe_VkStreamDescriptorSurfaceCreateInfoGGP::safe_VkStreamDescriptorSurfaceCreateInfoGGP(const safe_VkStreamDescriptorSurfaceCreateInfoGGP& src)
-{
- sType = src.sType;
- flags = src.flags;
- streamDescriptor = src.streamDescriptor;
- pNext = SafePnextCopy(src.pNext);
-}
-
-safe_VkStreamDescriptorSurfaceCreateInfoGGP& safe_VkStreamDescriptorSurfaceCreateInfoGGP::operator=(const safe_VkStreamDescriptorSurfaceCreateInfoGGP& src)
-{
- if (&src == this) return *this;
-
- if (pNext)
- FreePnextChain(pNext);
-
- sType = src.sType;
- flags = src.flags;
- streamDescriptor = src.streamDescriptor;
- pNext = SafePnextCopy(src.pNext);
-
- return *this;
-}
-
-safe_VkStreamDescriptorSurfaceCreateInfoGGP::~safe_VkStreamDescriptorSurfaceCreateInfoGGP()
-{
- if (pNext)
- FreePnextChain(pNext);
-}
-
-void safe_VkStreamDescriptorSurfaceCreateInfoGGP::initialize(const VkStreamDescriptorSurfaceCreateInfoGGP* in_struct)
-{
- sType = in_struct->sType;
- flags = in_struct->flags;
- streamDescriptor = in_struct->streamDescriptor;
- pNext = SafePnextCopy(in_struct->pNext);
-}
-
-void safe_VkStreamDescriptorSurfaceCreateInfoGGP::initialize(const safe_VkStreamDescriptorSurfaceCreateInfoGGP* src)
-{
- sType = src->sType;
- flags = src->flags;
- streamDescriptor = src->streamDescriptor;
- pNext = SafePnextCopy(src->pNext);
-}
-#endif // VK_USE_PLATFORM_GGP
-
-
-safe_VkPhysicalDeviceCornerSampledImageFeaturesNV::safe_VkPhysicalDeviceCornerSampledImageFeaturesNV(const VkPhysicalDeviceCornerSampledImageFeaturesNV* in_struct) :
- sType(in_struct->sType),
- cornerSampledImage(in_struct->cornerSampledImage)
-{
- pNext = SafePnextCopy(in_struct->pNext);
-}
-
-safe_VkPhysicalDeviceCornerSampledImageFeaturesNV::safe_VkPhysicalDeviceCornerSampledImageFeaturesNV() :
- pNext(nullptr)
-{}
-
-safe_VkPhysicalDeviceCornerSampledImageFeaturesNV::safe_VkPhysicalDeviceCornerSampledImageFeaturesNV(const safe_VkPhysicalDeviceCornerSampledImageFeaturesNV& src)
-{
- sType = src.sType;
- cornerSampledImage = src.cornerSampledImage;
- pNext = SafePnextCopy(src.pNext);
-}
-
-safe_VkPhysicalDeviceCornerSampledImageFeaturesNV& safe_VkPhysicalDeviceCornerSampledImageFeaturesNV::operator=(const safe_VkPhysicalDeviceCornerSampledImageFeaturesNV& src)
-{
- if (&src == this) return *this;
-
- if (pNext)
- FreePnextChain(pNext);
-
- sType = src.sType;
- cornerSampledImage = src.cornerSampledImage;
- pNext = SafePnextCopy(src.pNext);
-
- return *this;
-}
-
-safe_VkPhysicalDeviceCornerSampledImageFeaturesNV::~safe_VkPhysicalDeviceCornerSampledImageFeaturesNV()
-{
- if (pNext)
- FreePnextChain(pNext);
-}
-
-void safe_VkPhysicalDeviceCornerSampledImageFeaturesNV::initialize(const VkPhysicalDeviceCornerSampledImageFeaturesNV* in_struct)
-{
- sType = in_struct->sType;
- cornerSampledImage = in_struct->cornerSampledImage;
- pNext = SafePnextCopy(in_struct->pNext);
-}
-
-void safe_VkPhysicalDeviceCornerSampledImageFeaturesNV::initialize(const safe_VkPhysicalDeviceCornerSampledImageFeaturesNV* src)
-{
- sType = src->sType;
- cornerSampledImage = src->cornerSampledImage;
- pNext = SafePnextCopy(src->pNext);
-}
-
-safe_VkExternalMemoryImageCreateInfoNV::safe_VkExternalMemoryImageCreateInfoNV(const VkExternalMemoryImageCreateInfoNV* in_struct) :
- sType(in_struct->sType),
- handleTypes(in_struct->handleTypes)
-{
- pNext = SafePnextCopy(in_struct->pNext);
-}
-
-safe_VkExternalMemoryImageCreateInfoNV::safe_VkExternalMemoryImageCreateInfoNV() :
- pNext(nullptr)
-{}
-
-safe_VkExternalMemoryImageCreateInfoNV::safe_VkExternalMemoryImageCreateInfoNV(const safe_VkExternalMemoryImageCreateInfoNV& src)
-{
- sType = src.sType;
- handleTypes = src.handleTypes;
- pNext = SafePnextCopy(src.pNext);
-}
-
-safe_VkExternalMemoryImageCreateInfoNV& safe_VkExternalMemoryImageCreateInfoNV::operator=(const safe_VkExternalMemoryImageCreateInfoNV& src)
-{
- if (&src == this) return *this;
-
- if (pNext)
- FreePnextChain(pNext);
-
- sType = src.sType;
- handleTypes = src.handleTypes;
- pNext = SafePnextCopy(src.pNext);
-
- return *this;
-}
-
-safe_VkExternalMemoryImageCreateInfoNV::~safe_VkExternalMemoryImageCreateInfoNV()
-{
- if (pNext)
- FreePnextChain(pNext);
-}
-
-void safe_VkExternalMemoryImageCreateInfoNV::initialize(const VkExternalMemoryImageCreateInfoNV* in_struct)
-{
- sType = in_struct->sType;
- handleTypes = in_struct->handleTypes;
- pNext = SafePnextCopy(in_struct->pNext);
-}
-
-void safe_VkExternalMemoryImageCreateInfoNV::initialize(const safe_VkExternalMemoryImageCreateInfoNV* src)
-{
- sType = src->sType;
- handleTypes = src->handleTypes;
- pNext = SafePnextCopy(src->pNext);
-}
-
-safe_VkExportMemoryAllocateInfoNV::safe_VkExportMemoryAllocateInfoNV(const VkExportMemoryAllocateInfoNV* in_struct) :
- sType(in_struct->sType),
- handleTypes(in_struct->handleTypes)
-{
- pNext = SafePnextCopy(in_struct->pNext);
-}
-
-safe_VkExportMemoryAllocateInfoNV::safe_VkExportMemoryAllocateInfoNV() :
- pNext(nullptr)
-{}
-
-safe_VkExportMemoryAllocateInfoNV::safe_VkExportMemoryAllocateInfoNV(const safe_VkExportMemoryAllocateInfoNV& src)
-{
- sType = src.sType;
- handleTypes = src.handleTypes;
- pNext = SafePnextCopy(src.pNext);
-}
-
-safe_VkExportMemoryAllocateInfoNV& safe_VkExportMemoryAllocateInfoNV::operator=(const safe_VkExportMemoryAllocateInfoNV& src)
-{
- if (&src == this) return *this;
-
- if (pNext)
- FreePnextChain(pNext);
-
- sType = src.sType;
- handleTypes = src.handleTypes;
- pNext = SafePnextCopy(src.pNext);
-
- return *this;
-}
-
-safe_VkExportMemoryAllocateInfoNV::~safe_VkExportMemoryAllocateInfoNV()
-{
- if (pNext)
- FreePnextChain(pNext);
-}
-
-void safe_VkExportMemoryAllocateInfoNV::initialize(const VkExportMemoryAllocateInfoNV* in_struct)
-{
- sType = in_struct->sType;
- handleTypes = in_struct->handleTypes;
- pNext = SafePnextCopy(in_struct->pNext);
-}
-
-void safe_VkExportMemoryAllocateInfoNV::initialize(const safe_VkExportMemoryAllocateInfoNV* src)
-{
- sType = src->sType;
- handleTypes = src->handleTypes;
- pNext = SafePnextCopy(src->pNext);
-}
-#ifdef VK_USE_PLATFORM_WIN32_KHR
-
-
-safe_VkImportMemoryWin32HandleInfoNV::safe_VkImportMemoryWin32HandleInfoNV(const VkImportMemoryWin32HandleInfoNV* in_struct) :
- sType(in_struct->sType),
- handleType(in_struct->handleType),
- handle(in_struct->handle)
-{
- pNext = SafePnextCopy(in_struct->pNext);
-}
-
-safe_VkImportMemoryWin32HandleInfoNV::safe_VkImportMemoryWin32HandleInfoNV() :
- pNext(nullptr)
-{}
-
-safe_VkImportMemoryWin32HandleInfoNV::safe_VkImportMemoryWin32HandleInfoNV(const safe_VkImportMemoryWin32HandleInfoNV& src)
-{
- sType = src.sType;
- handleType = src.handleType;
- handle = src.handle;
- pNext = SafePnextCopy(src.pNext);
-}
-
-safe_VkImportMemoryWin32HandleInfoNV& safe_VkImportMemoryWin32HandleInfoNV::operator=(const safe_VkImportMemoryWin32HandleInfoNV& src)
-{
- if (&src == this) return *this;
-
- if (pNext)
- FreePnextChain(pNext);
-
- sType = src.sType;
- handleType = src.handleType;
- handle = src.handle;
- pNext = SafePnextCopy(src.pNext);
-
- return *this;
-}
-
-safe_VkImportMemoryWin32HandleInfoNV::~safe_VkImportMemoryWin32HandleInfoNV()
-{
- if (pNext)
- FreePnextChain(pNext);
-}
-
-void safe_VkImportMemoryWin32HandleInfoNV::initialize(const VkImportMemoryWin32HandleInfoNV* in_struct)
-{
- sType = in_struct->sType;
- handleType = in_struct->handleType;
- handle = in_struct->handle;
- pNext = SafePnextCopy(in_struct->pNext);
-}
-
-void safe_VkImportMemoryWin32HandleInfoNV::initialize(const safe_VkImportMemoryWin32HandleInfoNV* src)
-{
- sType = src->sType;
- handleType = src->handleType;
- handle = src->handle;
- pNext = SafePnextCopy(src->pNext);
-}
-#endif // VK_USE_PLATFORM_WIN32_KHR
-
-#ifdef VK_USE_PLATFORM_WIN32_KHR
-
-
-safe_VkExportMemoryWin32HandleInfoNV::safe_VkExportMemoryWin32HandleInfoNV(const VkExportMemoryWin32HandleInfoNV* in_struct) :
- sType(in_struct->sType),
- pAttributes(nullptr),
- dwAccess(in_struct->dwAccess)
-{
- pNext = SafePnextCopy(in_struct->pNext);
- if (in_struct->pAttributes) {
- pAttributes = new SECURITY_ATTRIBUTES(*in_struct->pAttributes);
- }
-}
-
-safe_VkExportMemoryWin32HandleInfoNV::safe_VkExportMemoryWin32HandleInfoNV() :
- pNext(nullptr),
- pAttributes(nullptr)
-{}
-
-safe_VkExportMemoryWin32HandleInfoNV::safe_VkExportMemoryWin32HandleInfoNV(const safe_VkExportMemoryWin32HandleInfoNV& src)
-{
- sType = src.sType;
- pAttributes = nullptr;
- dwAccess = src.dwAccess;
- pNext = SafePnextCopy(src.pNext);
- if (src.pAttributes) {
- pAttributes = new SECURITY_ATTRIBUTES(*src.pAttributes);
- }
-}
-
-safe_VkExportMemoryWin32HandleInfoNV& safe_VkExportMemoryWin32HandleInfoNV::operator=(const safe_VkExportMemoryWin32HandleInfoNV& src)
-{
- if (&src == this) return *this;
-
- if (pAttributes)
- delete pAttributes;
- if (pNext)
- FreePnextChain(pNext);
-
- sType = src.sType;
- pAttributes = nullptr;
- dwAccess = src.dwAccess;
- pNext = SafePnextCopy(src.pNext);
- if (src.pAttributes) {
- pAttributes = new SECURITY_ATTRIBUTES(*src.pAttributes);
- }
-
- return *this;
-}
-
-safe_VkExportMemoryWin32HandleInfoNV::~safe_VkExportMemoryWin32HandleInfoNV()
-{
- if (pAttributes)
- delete pAttributes;
- if (pNext)
- FreePnextChain(pNext);
-}
-
-void safe_VkExportMemoryWin32HandleInfoNV::initialize(const VkExportMemoryWin32HandleInfoNV* in_struct)
-{
- sType = in_struct->sType;
- pAttributes = nullptr;
- dwAccess = in_struct->dwAccess;
- pNext = SafePnextCopy(in_struct->pNext);
- if (in_struct->pAttributes) {
- pAttributes = new SECURITY_ATTRIBUTES(*in_struct->pAttributes);
- }
-}
-
-void safe_VkExportMemoryWin32HandleInfoNV::initialize(const safe_VkExportMemoryWin32HandleInfoNV* src)
-{
- sType = src->sType;
- pAttributes = nullptr;
- dwAccess = src->dwAccess;
- pNext = SafePnextCopy(src->pNext);
- if (src->pAttributes) {
- pAttributes = new SECURITY_ATTRIBUTES(*src->pAttributes);
- }
-}
-#endif // VK_USE_PLATFORM_WIN32_KHR
-
-#ifdef VK_USE_PLATFORM_WIN32_KHR
-
-
-safe_VkWin32KeyedMutexAcquireReleaseInfoNV::safe_VkWin32KeyedMutexAcquireReleaseInfoNV(const VkWin32KeyedMutexAcquireReleaseInfoNV* in_struct) :
- sType(in_struct->sType),
- acquireCount(in_struct->acquireCount),
- pAcquireSyncs(nullptr),
- pAcquireKeys(nullptr),
- pAcquireTimeoutMilliseconds(nullptr),
- releaseCount(in_struct->releaseCount),
- pReleaseSyncs(nullptr),
- pReleaseKeys(nullptr)
-{
- pNext = SafePnextCopy(in_struct->pNext);
- if (acquireCount && in_struct->pAcquireSyncs) {
- pAcquireSyncs = new VkDeviceMemory[acquireCount];
- for (uint32_t i = 0; i < acquireCount; ++i) {
- pAcquireSyncs[i] = in_struct->pAcquireSyncs[i];
- }
- }
- if (in_struct->pAcquireKeys) {
- pAcquireKeys = new uint64_t[in_struct->acquireCount];
- memcpy ((void *)pAcquireKeys, (void *)in_struct->pAcquireKeys, sizeof(uint64_t)*in_struct->acquireCount);
- }
- if (in_struct->pAcquireTimeoutMilliseconds) {
- pAcquireTimeoutMilliseconds = new uint32_t[in_struct->acquireCount];
- memcpy ((void *)pAcquireTimeoutMilliseconds, (void *)in_struct->pAcquireTimeoutMilliseconds, sizeof(uint32_t)*in_struct->acquireCount);
- }
- if (releaseCount && in_struct->pReleaseSyncs) {
- pReleaseSyncs = new VkDeviceMemory[releaseCount];
- for (uint32_t i = 0; i < releaseCount; ++i) {
- pReleaseSyncs[i] = in_struct->pReleaseSyncs[i];
- }
- }
- if (in_struct->pReleaseKeys) {
- pReleaseKeys = new uint64_t[in_struct->releaseCount];
- memcpy ((void *)pReleaseKeys, (void *)in_struct->pReleaseKeys, sizeof(uint64_t)*in_struct->releaseCount);
- }
-}
-
-safe_VkWin32KeyedMutexAcquireReleaseInfoNV::safe_VkWin32KeyedMutexAcquireReleaseInfoNV() :
- pNext(nullptr),
- pAcquireSyncs(nullptr),
- pAcquireKeys(nullptr),
- pAcquireTimeoutMilliseconds(nullptr),
- pReleaseSyncs(nullptr),
- pReleaseKeys(nullptr)
-{}
-
-safe_VkWin32KeyedMutexAcquireReleaseInfoNV::safe_VkWin32KeyedMutexAcquireReleaseInfoNV(const safe_VkWin32KeyedMutexAcquireReleaseInfoNV& src)
-{
- sType = src.sType;
- acquireCount = src.acquireCount;
- pAcquireSyncs = nullptr;
- pAcquireKeys = nullptr;
- pAcquireTimeoutMilliseconds = nullptr;
- releaseCount = src.releaseCount;
- pReleaseSyncs = nullptr;
- pReleaseKeys = nullptr;
- pNext = SafePnextCopy(src.pNext);
- if (acquireCount && src.pAcquireSyncs) {
- pAcquireSyncs = new VkDeviceMemory[acquireCount];
- for (uint32_t i = 0; i < acquireCount; ++i) {
- pAcquireSyncs[i] = src.pAcquireSyncs[i];
- }
- }
- if (src.pAcquireKeys) {
- pAcquireKeys = new uint64_t[src.acquireCount];
- memcpy ((void *)pAcquireKeys, (void *)src.pAcquireKeys, sizeof(uint64_t)*src.acquireCount);
- }
- if (src.pAcquireTimeoutMilliseconds) {
- pAcquireTimeoutMilliseconds = new uint32_t[src.acquireCount];
- memcpy ((void *)pAcquireTimeoutMilliseconds, (void *)src.pAcquireTimeoutMilliseconds, sizeof(uint32_t)*src.acquireCount);
- }
- if (releaseCount && src.pReleaseSyncs) {
- pReleaseSyncs = new VkDeviceMemory[releaseCount];
- for (uint32_t i = 0; i < releaseCount; ++i) {
- pReleaseSyncs[i] = src.pReleaseSyncs[i];
- }
- }
- if (src.pReleaseKeys) {
- pReleaseKeys = new uint64_t[src.releaseCount];
- memcpy ((void *)pReleaseKeys, (void *)src.pReleaseKeys, sizeof(uint64_t)*src.releaseCount);
- }
-}
-
-safe_VkWin32KeyedMutexAcquireReleaseInfoNV& safe_VkWin32KeyedMutexAcquireReleaseInfoNV::operator=(const safe_VkWin32KeyedMutexAcquireReleaseInfoNV& src)
-{
- if (&src == this) return *this;
-
- if (pAcquireSyncs)
- delete[] pAcquireSyncs;
- if (pAcquireKeys)
- delete[] pAcquireKeys;
- if (pAcquireTimeoutMilliseconds)
- delete[] pAcquireTimeoutMilliseconds;
- if (pReleaseSyncs)
- delete[] pReleaseSyncs;
- if (pReleaseKeys)
- delete[] pReleaseKeys;
- if (pNext)
- FreePnextChain(pNext);
-
- sType = src.sType;
- acquireCount = src.acquireCount;
- pAcquireSyncs = nullptr;
- pAcquireKeys = nullptr;
- pAcquireTimeoutMilliseconds = nullptr;
- releaseCount = src.releaseCount;
- pReleaseSyncs = nullptr;
- pReleaseKeys = nullptr;
- pNext = SafePnextCopy(src.pNext);
- if (acquireCount && src.pAcquireSyncs) {
- pAcquireSyncs = new VkDeviceMemory[acquireCount];
- for (uint32_t i = 0; i < acquireCount; ++i) {
- pAcquireSyncs[i] = src.pAcquireSyncs[i];
- }
- }
- if (src.pAcquireKeys) {
- pAcquireKeys = new uint64_t[src.acquireCount];
- memcpy ((void *)pAcquireKeys, (void *)src.pAcquireKeys, sizeof(uint64_t)*src.acquireCount);
- }
- if (src.pAcquireTimeoutMilliseconds) {
- pAcquireTimeoutMilliseconds = new uint32_t[src.acquireCount];
- memcpy ((void *)pAcquireTimeoutMilliseconds, (void *)src.pAcquireTimeoutMilliseconds, sizeof(uint32_t)*src.acquireCount);
- }
- if (releaseCount && src.pReleaseSyncs) {
- pReleaseSyncs = new VkDeviceMemory[releaseCount];
- for (uint32_t i = 0; i < releaseCount; ++i) {
- pReleaseSyncs[i] = src.pReleaseSyncs[i];
- }
- }
- if (src.pReleaseKeys) {
- pReleaseKeys = new uint64_t[src.releaseCount];
- memcpy ((void *)pReleaseKeys, (void *)src.pReleaseKeys, sizeof(uint64_t)*src.releaseCount);
- }
-
- return *this;
-}
-
-safe_VkWin32KeyedMutexAcquireReleaseInfoNV::~safe_VkWin32KeyedMutexAcquireReleaseInfoNV()
-{
- if (pAcquireSyncs)
- delete[] pAcquireSyncs;
- if (pAcquireKeys)
- delete[] pAcquireKeys;
- if (pAcquireTimeoutMilliseconds)
- delete[] pAcquireTimeoutMilliseconds;
- if (pReleaseSyncs)
- delete[] pReleaseSyncs;
- if (pReleaseKeys)
- delete[] pReleaseKeys;
- if (pNext)
- FreePnextChain(pNext);
-}
-
-void safe_VkWin32KeyedMutexAcquireReleaseInfoNV::initialize(const VkWin32KeyedMutexAcquireReleaseInfoNV* in_struct)
-{
- sType = in_struct->sType;
- acquireCount = in_struct->acquireCount;
- pAcquireSyncs = nullptr;
- pAcquireKeys = nullptr;
- pAcquireTimeoutMilliseconds = nullptr;
- releaseCount = in_struct->releaseCount;
- pReleaseSyncs = nullptr;
- pReleaseKeys = nullptr;
- pNext = SafePnextCopy(in_struct->pNext);
- if (acquireCount && in_struct->pAcquireSyncs) {
- pAcquireSyncs = new VkDeviceMemory[acquireCount];
- for (uint32_t i = 0; i < acquireCount; ++i) {
- pAcquireSyncs[i] = in_struct->pAcquireSyncs[i];
- }
- }
- if (in_struct->pAcquireKeys) {
- pAcquireKeys = new uint64_t[in_struct->acquireCount];
- memcpy ((void *)pAcquireKeys, (void *)in_struct->pAcquireKeys, sizeof(uint64_t)*in_struct->acquireCount);
- }
- if (in_struct->pAcquireTimeoutMilliseconds) {
- pAcquireTimeoutMilliseconds = new uint32_t[in_struct->acquireCount];
- memcpy ((void *)pAcquireTimeoutMilliseconds, (void *)in_struct->pAcquireTimeoutMilliseconds, sizeof(uint32_t)*in_struct->acquireCount);
- }
- if (releaseCount && in_struct->pReleaseSyncs) {
- pReleaseSyncs = new VkDeviceMemory[releaseCount];
- for (uint32_t i = 0; i < releaseCount; ++i) {
- pReleaseSyncs[i] = in_struct->pReleaseSyncs[i];
- }
- }
- if (in_struct->pReleaseKeys) {
- pReleaseKeys = new uint64_t[in_struct->releaseCount];
- memcpy ((void *)pReleaseKeys, (void *)in_struct->pReleaseKeys, sizeof(uint64_t)*in_struct->releaseCount);
- }
-}
-
-void safe_VkWin32KeyedMutexAcquireReleaseInfoNV::initialize(const safe_VkWin32KeyedMutexAcquireReleaseInfoNV* src)
-{
- sType = src->sType;
- acquireCount = src->acquireCount;
- pAcquireSyncs = nullptr;
- pAcquireKeys = nullptr;
- pAcquireTimeoutMilliseconds = nullptr;
- releaseCount = src->releaseCount;
- pReleaseSyncs = nullptr;
- pReleaseKeys = nullptr;
- pNext = SafePnextCopy(src->pNext);
- if (acquireCount && src->pAcquireSyncs) {
- pAcquireSyncs = new VkDeviceMemory[acquireCount];
- for (uint32_t i = 0; i < acquireCount; ++i) {
- pAcquireSyncs[i] = src->pAcquireSyncs[i];
- }
- }
- if (src->pAcquireKeys) {
- pAcquireKeys = new uint64_t[src->acquireCount];
- memcpy ((void *)pAcquireKeys, (void *)src->pAcquireKeys, sizeof(uint64_t)*src->acquireCount);
- }
- if (src->pAcquireTimeoutMilliseconds) {
- pAcquireTimeoutMilliseconds = new uint32_t[src->acquireCount];
- memcpy ((void *)pAcquireTimeoutMilliseconds, (void *)src->pAcquireTimeoutMilliseconds, sizeof(uint32_t)*src->acquireCount);
- }
- if (releaseCount && src->pReleaseSyncs) {
- pReleaseSyncs = new VkDeviceMemory[releaseCount];
- for (uint32_t i = 0; i < releaseCount; ++i) {
- pReleaseSyncs[i] = src->pReleaseSyncs[i];
- }
- }
- if (src->pReleaseKeys) {
- pReleaseKeys = new uint64_t[src->releaseCount];
- memcpy ((void *)pReleaseKeys, (void *)src->pReleaseKeys, sizeof(uint64_t)*src->releaseCount);
- }
-}
-#endif // VK_USE_PLATFORM_WIN32_KHR
-
-
-safe_VkValidationFlagsEXT::safe_VkValidationFlagsEXT(const VkValidationFlagsEXT* in_struct) :
- sType(in_struct->sType),
- disabledValidationCheckCount(in_struct->disabledValidationCheckCount),
- pDisabledValidationChecks(nullptr)
-{
- pNext = SafePnextCopy(in_struct->pNext);
- if (in_struct->pDisabledValidationChecks) {
- pDisabledValidationChecks = new VkValidationCheckEXT[in_struct->disabledValidationCheckCount];
- memcpy ((void *)pDisabledValidationChecks, (void *)in_struct->pDisabledValidationChecks, sizeof(VkValidationCheckEXT)*in_struct->disabledValidationCheckCount);
- }
-}
-
-safe_VkValidationFlagsEXT::safe_VkValidationFlagsEXT() :
- pNext(nullptr),
- pDisabledValidationChecks(nullptr)
-{}
-
-safe_VkValidationFlagsEXT::safe_VkValidationFlagsEXT(const safe_VkValidationFlagsEXT& src)
-{
- sType = src.sType;
- disabledValidationCheckCount = src.disabledValidationCheckCount;
- pDisabledValidationChecks = nullptr;
- pNext = SafePnextCopy(src.pNext);
- if (src.pDisabledValidationChecks) {
- pDisabledValidationChecks = new VkValidationCheckEXT[src.disabledValidationCheckCount];
- memcpy ((void *)pDisabledValidationChecks, (void *)src.pDisabledValidationChecks, sizeof(VkValidationCheckEXT)*src.disabledValidationCheckCount);
- }
-}
-
-safe_VkValidationFlagsEXT& safe_VkValidationFlagsEXT::operator=(const safe_VkValidationFlagsEXT& src)
-{
- if (&src == this) return *this;
-
- if (pDisabledValidationChecks)
- delete[] pDisabledValidationChecks;
- if (pNext)
- FreePnextChain(pNext);
-
- sType = src.sType;
- disabledValidationCheckCount = src.disabledValidationCheckCount;
- pDisabledValidationChecks = nullptr;
- pNext = SafePnextCopy(src.pNext);
- if (src.pDisabledValidationChecks) {
- pDisabledValidationChecks = new VkValidationCheckEXT[src.disabledValidationCheckCount];
- memcpy ((void *)pDisabledValidationChecks, (void *)src.pDisabledValidationChecks, sizeof(VkValidationCheckEXT)*src.disabledValidationCheckCount);
- }
-
- return *this;
-}
-
-safe_VkValidationFlagsEXT::~safe_VkValidationFlagsEXT()
-{
- if (pDisabledValidationChecks)
- delete[] pDisabledValidationChecks;
- if (pNext)
- FreePnextChain(pNext);
-}
-
-void safe_VkValidationFlagsEXT::initialize(const VkValidationFlagsEXT* in_struct)
-{
- sType = in_struct->sType;
- disabledValidationCheckCount = in_struct->disabledValidationCheckCount;
- pDisabledValidationChecks = nullptr;
- pNext = SafePnextCopy(in_struct->pNext);
- if (in_struct->pDisabledValidationChecks) {
- pDisabledValidationChecks = new VkValidationCheckEXT[in_struct->disabledValidationCheckCount];
- memcpy ((void *)pDisabledValidationChecks, (void *)in_struct->pDisabledValidationChecks, sizeof(VkValidationCheckEXT)*in_struct->disabledValidationCheckCount);
- }
-}
-
-void safe_VkValidationFlagsEXT::initialize(const safe_VkValidationFlagsEXT* src)
-{
- sType = src->sType;
- disabledValidationCheckCount = src->disabledValidationCheckCount;
- pDisabledValidationChecks = nullptr;
- pNext = SafePnextCopy(src->pNext);
- if (src->pDisabledValidationChecks) {
- pDisabledValidationChecks = new VkValidationCheckEXT[src->disabledValidationCheckCount];
- memcpy ((void *)pDisabledValidationChecks, (void *)src->pDisabledValidationChecks, sizeof(VkValidationCheckEXT)*src->disabledValidationCheckCount);
- }
-}
-#ifdef VK_USE_PLATFORM_VI_NN
-
-
-safe_VkViSurfaceCreateInfoNN::safe_VkViSurfaceCreateInfoNN(const VkViSurfaceCreateInfoNN* in_struct) :
- sType(in_struct->sType),
- flags(in_struct->flags),
- window(in_struct->window)
-{
- pNext = SafePnextCopy(in_struct->pNext);
-}
-
-safe_VkViSurfaceCreateInfoNN::safe_VkViSurfaceCreateInfoNN() :
- pNext(nullptr),
- window(nullptr)
-{}
-
-safe_VkViSurfaceCreateInfoNN::safe_VkViSurfaceCreateInfoNN(const safe_VkViSurfaceCreateInfoNN& src)
-{
- sType = src.sType;
- flags = src.flags;
- window = src.window;
- pNext = SafePnextCopy(src.pNext);
-}
-
-safe_VkViSurfaceCreateInfoNN& safe_VkViSurfaceCreateInfoNN::operator=(const safe_VkViSurfaceCreateInfoNN& src)
-{
- if (&src == this) return *this;
-
- if (pNext)
- FreePnextChain(pNext);
-
- sType = src.sType;
- flags = src.flags;
- window = src.window;
- pNext = SafePnextCopy(src.pNext);
-
- return *this;
-}
-
-safe_VkViSurfaceCreateInfoNN::~safe_VkViSurfaceCreateInfoNN()
-{
- if (pNext)
- FreePnextChain(pNext);
-}
-
-void safe_VkViSurfaceCreateInfoNN::initialize(const VkViSurfaceCreateInfoNN* in_struct)
-{
- sType = in_struct->sType;
- flags = in_struct->flags;
- window = in_struct->window;
- pNext = SafePnextCopy(in_struct->pNext);
-}
-
-void safe_VkViSurfaceCreateInfoNN::initialize(const safe_VkViSurfaceCreateInfoNN* src)
-{
- sType = src->sType;
- flags = src->flags;
- window = src->window;
- pNext = SafePnextCopy(src->pNext);
-}
-#endif // VK_USE_PLATFORM_VI_NN
-
-
-safe_VkPhysicalDeviceTextureCompressionASTCHDRFeaturesEXT::safe_VkPhysicalDeviceTextureCompressionASTCHDRFeaturesEXT(const VkPhysicalDeviceTextureCompressionASTCHDRFeaturesEXT* in_struct) :
- sType(in_struct->sType),
- textureCompressionASTC_HDR(in_struct->textureCompressionASTC_HDR)
-{
- pNext = SafePnextCopy(in_struct->pNext);
-}
-
-safe_VkPhysicalDeviceTextureCompressionASTCHDRFeaturesEXT::safe_VkPhysicalDeviceTextureCompressionASTCHDRFeaturesEXT() :
- pNext(nullptr)
-{}
-
-safe_VkPhysicalDeviceTextureCompressionASTCHDRFeaturesEXT::safe_VkPhysicalDeviceTextureCompressionASTCHDRFeaturesEXT(const safe_VkPhysicalDeviceTextureCompressionASTCHDRFeaturesEXT& src)
-{
- sType = src.sType;
- textureCompressionASTC_HDR = src.textureCompressionASTC_HDR;
- pNext = SafePnextCopy(src.pNext);
-}
-
-safe_VkPhysicalDeviceTextureCompressionASTCHDRFeaturesEXT& safe_VkPhysicalDeviceTextureCompressionASTCHDRFeaturesEXT::operator=(const safe_VkPhysicalDeviceTextureCompressionASTCHDRFeaturesEXT& src)
-{
- if (&src == this) return *this;
-
- if (pNext)
- FreePnextChain(pNext);
-
- sType = src.sType;
- textureCompressionASTC_HDR = src.textureCompressionASTC_HDR;
- pNext = SafePnextCopy(src.pNext);
-
- return *this;
-}
-
-safe_VkPhysicalDeviceTextureCompressionASTCHDRFeaturesEXT::~safe_VkPhysicalDeviceTextureCompressionASTCHDRFeaturesEXT()
-{
- if (pNext)
- FreePnextChain(pNext);
-}
-
-void safe_VkPhysicalDeviceTextureCompressionASTCHDRFeaturesEXT::initialize(const VkPhysicalDeviceTextureCompressionASTCHDRFeaturesEXT* in_struct)
-{
- sType = in_struct->sType;
- textureCompressionASTC_HDR = in_struct->textureCompressionASTC_HDR;
- pNext = SafePnextCopy(in_struct->pNext);
-}
-
-void safe_VkPhysicalDeviceTextureCompressionASTCHDRFeaturesEXT::initialize(const safe_VkPhysicalDeviceTextureCompressionASTCHDRFeaturesEXT* src)
-{
- sType = src->sType;
- textureCompressionASTC_HDR = src->textureCompressionASTC_HDR;
- pNext = SafePnextCopy(src->pNext);
-}
-
-safe_VkImageViewASTCDecodeModeEXT::safe_VkImageViewASTCDecodeModeEXT(const VkImageViewASTCDecodeModeEXT* in_struct) :
- sType(in_struct->sType),
- decodeMode(in_struct->decodeMode)
-{
- pNext = SafePnextCopy(in_struct->pNext);
-}
-
-safe_VkImageViewASTCDecodeModeEXT::safe_VkImageViewASTCDecodeModeEXT() :
- pNext(nullptr)
-{}
-
-safe_VkImageViewASTCDecodeModeEXT::safe_VkImageViewASTCDecodeModeEXT(const safe_VkImageViewASTCDecodeModeEXT& src)
-{
- sType = src.sType;
- decodeMode = src.decodeMode;
- pNext = SafePnextCopy(src.pNext);
-}
-
-safe_VkImageViewASTCDecodeModeEXT& safe_VkImageViewASTCDecodeModeEXT::operator=(const safe_VkImageViewASTCDecodeModeEXT& src)
-{
- if (&src == this) return *this;
-
- if (pNext)
- FreePnextChain(pNext);
-
- sType = src.sType;
- decodeMode = src.decodeMode;
- pNext = SafePnextCopy(src.pNext);
-
- return *this;
-}
-
-safe_VkImageViewASTCDecodeModeEXT::~safe_VkImageViewASTCDecodeModeEXT()
-{
- if (pNext)
- FreePnextChain(pNext);
-}
-
-void safe_VkImageViewASTCDecodeModeEXT::initialize(const VkImageViewASTCDecodeModeEXT* in_struct)
-{
- sType = in_struct->sType;
- decodeMode = in_struct->decodeMode;
- pNext = SafePnextCopy(in_struct->pNext);
-}
-
-void safe_VkImageViewASTCDecodeModeEXT::initialize(const safe_VkImageViewASTCDecodeModeEXT* src)
-{
- sType = src->sType;
- decodeMode = src->decodeMode;
- pNext = SafePnextCopy(src->pNext);
-}
-
-safe_VkPhysicalDeviceASTCDecodeFeaturesEXT::safe_VkPhysicalDeviceASTCDecodeFeaturesEXT(const VkPhysicalDeviceASTCDecodeFeaturesEXT* in_struct) :
- sType(in_struct->sType),
- decodeModeSharedExponent(in_struct->decodeModeSharedExponent)
-{
- pNext = SafePnextCopy(in_struct->pNext);
-}
-
-safe_VkPhysicalDeviceASTCDecodeFeaturesEXT::safe_VkPhysicalDeviceASTCDecodeFeaturesEXT() :
- pNext(nullptr)
-{}
-
-safe_VkPhysicalDeviceASTCDecodeFeaturesEXT::safe_VkPhysicalDeviceASTCDecodeFeaturesEXT(const safe_VkPhysicalDeviceASTCDecodeFeaturesEXT& src)
-{
- sType = src.sType;
- decodeModeSharedExponent = src.decodeModeSharedExponent;
- pNext = SafePnextCopy(src.pNext);
-}
-
-safe_VkPhysicalDeviceASTCDecodeFeaturesEXT& safe_VkPhysicalDeviceASTCDecodeFeaturesEXT::operator=(const safe_VkPhysicalDeviceASTCDecodeFeaturesEXT& src)
-{
- if (&src == this) return *this;
-
- if (pNext)
- FreePnextChain(pNext);
-
- sType = src.sType;
- decodeModeSharedExponent = src.decodeModeSharedExponent;
- pNext = SafePnextCopy(src.pNext);
-
- return *this;
-}
-
-safe_VkPhysicalDeviceASTCDecodeFeaturesEXT::~safe_VkPhysicalDeviceASTCDecodeFeaturesEXT()
-{
- if (pNext)
- FreePnextChain(pNext);
-}
-
-void safe_VkPhysicalDeviceASTCDecodeFeaturesEXT::initialize(const VkPhysicalDeviceASTCDecodeFeaturesEXT* in_struct)
-{
- sType = in_struct->sType;
- decodeModeSharedExponent = in_struct->decodeModeSharedExponent;
- pNext = SafePnextCopy(in_struct->pNext);
-}
-
-void safe_VkPhysicalDeviceASTCDecodeFeaturesEXT::initialize(const safe_VkPhysicalDeviceASTCDecodeFeaturesEXT* src)
-{
- sType = src->sType;
- decodeModeSharedExponent = src->decodeModeSharedExponent;
- pNext = SafePnextCopy(src->pNext);
-}
-
-safe_VkConditionalRenderingBeginInfoEXT::safe_VkConditionalRenderingBeginInfoEXT(const VkConditionalRenderingBeginInfoEXT* in_struct) :
- sType(in_struct->sType),
- buffer(in_struct->buffer),
- offset(in_struct->offset),
- flags(in_struct->flags)
-{
- pNext = SafePnextCopy(in_struct->pNext);
-}
-
-safe_VkConditionalRenderingBeginInfoEXT::safe_VkConditionalRenderingBeginInfoEXT() :
- pNext(nullptr)
-{}
-
-safe_VkConditionalRenderingBeginInfoEXT::safe_VkConditionalRenderingBeginInfoEXT(const safe_VkConditionalRenderingBeginInfoEXT& src)
-{
- sType = src.sType;
- buffer = src.buffer;
- offset = src.offset;
- flags = src.flags;
- pNext = SafePnextCopy(src.pNext);
-}
-
-safe_VkConditionalRenderingBeginInfoEXT& safe_VkConditionalRenderingBeginInfoEXT::operator=(const safe_VkConditionalRenderingBeginInfoEXT& src)
-{
- if (&src == this) return *this;
-
- if (pNext)
- FreePnextChain(pNext);
-
- sType = src.sType;
- buffer = src.buffer;
- offset = src.offset;
- flags = src.flags;
- pNext = SafePnextCopy(src.pNext);
-
- return *this;
-}
-
-safe_VkConditionalRenderingBeginInfoEXT::~safe_VkConditionalRenderingBeginInfoEXT()
-{
- if (pNext)
- FreePnextChain(pNext);
-}
-
-void safe_VkConditionalRenderingBeginInfoEXT::initialize(const VkConditionalRenderingBeginInfoEXT* in_struct)
-{
- sType = in_struct->sType;
- buffer = in_struct->buffer;
- offset = in_struct->offset;
- flags = in_struct->flags;
- pNext = SafePnextCopy(in_struct->pNext);
-}
-
-void safe_VkConditionalRenderingBeginInfoEXT::initialize(const safe_VkConditionalRenderingBeginInfoEXT* src)
-{
- sType = src->sType;
- buffer = src->buffer;
- offset = src->offset;
- flags = src->flags;
- pNext = SafePnextCopy(src->pNext);
-}
-
-safe_VkPhysicalDeviceConditionalRenderingFeaturesEXT::safe_VkPhysicalDeviceConditionalRenderingFeaturesEXT(const VkPhysicalDeviceConditionalRenderingFeaturesEXT* in_struct) :
- sType(in_struct->sType),
- conditionalRendering(in_struct->conditionalRendering),
- inheritedConditionalRendering(in_struct->inheritedConditionalRendering)
-{
- pNext = SafePnextCopy(in_struct->pNext);
-}
-
-safe_VkPhysicalDeviceConditionalRenderingFeaturesEXT::safe_VkPhysicalDeviceConditionalRenderingFeaturesEXT() :
- pNext(nullptr)
-{}
-
-safe_VkPhysicalDeviceConditionalRenderingFeaturesEXT::safe_VkPhysicalDeviceConditionalRenderingFeaturesEXT(const safe_VkPhysicalDeviceConditionalRenderingFeaturesEXT& src)
-{
- sType = src.sType;
- conditionalRendering = src.conditionalRendering;
- inheritedConditionalRendering = src.inheritedConditionalRendering;
- pNext = SafePnextCopy(src.pNext);
-}
-
-safe_VkPhysicalDeviceConditionalRenderingFeaturesEXT& safe_VkPhysicalDeviceConditionalRenderingFeaturesEXT::operator=(const safe_VkPhysicalDeviceConditionalRenderingFeaturesEXT& src)
-{
- if (&src == this) return *this;
-
- if (pNext)
- FreePnextChain(pNext);
-
- sType = src.sType;
- conditionalRendering = src.conditionalRendering;
- inheritedConditionalRendering = src.inheritedConditionalRendering;
- pNext = SafePnextCopy(src.pNext);
-
- return *this;
-}
-
-safe_VkPhysicalDeviceConditionalRenderingFeaturesEXT::~safe_VkPhysicalDeviceConditionalRenderingFeaturesEXT()
-{
- if (pNext)
- FreePnextChain(pNext);
-}
-
-void safe_VkPhysicalDeviceConditionalRenderingFeaturesEXT::initialize(const VkPhysicalDeviceConditionalRenderingFeaturesEXT* in_struct)
-{
- sType = in_struct->sType;
- conditionalRendering = in_struct->conditionalRendering;
- inheritedConditionalRendering = in_struct->inheritedConditionalRendering;
- pNext = SafePnextCopy(in_struct->pNext);
-}
-
-void safe_VkPhysicalDeviceConditionalRenderingFeaturesEXT::initialize(const safe_VkPhysicalDeviceConditionalRenderingFeaturesEXT* src)
-{
- sType = src->sType;
- conditionalRendering = src->conditionalRendering;
- inheritedConditionalRendering = src->inheritedConditionalRendering;
- pNext = SafePnextCopy(src->pNext);
-}
-
-safe_VkCommandBufferInheritanceConditionalRenderingInfoEXT::safe_VkCommandBufferInheritanceConditionalRenderingInfoEXT(const VkCommandBufferInheritanceConditionalRenderingInfoEXT* in_struct) :
- sType(in_struct->sType),
- conditionalRenderingEnable(in_struct->conditionalRenderingEnable)
-{
- pNext = SafePnextCopy(in_struct->pNext);
-}
-
-safe_VkCommandBufferInheritanceConditionalRenderingInfoEXT::safe_VkCommandBufferInheritanceConditionalRenderingInfoEXT() :
- pNext(nullptr)
-{}
-
-safe_VkCommandBufferInheritanceConditionalRenderingInfoEXT::safe_VkCommandBufferInheritanceConditionalRenderingInfoEXT(const safe_VkCommandBufferInheritanceConditionalRenderingInfoEXT& src)
-{
- sType = src.sType;
- conditionalRenderingEnable = src.conditionalRenderingEnable;
- pNext = SafePnextCopy(src.pNext);
-}
-
-safe_VkCommandBufferInheritanceConditionalRenderingInfoEXT& safe_VkCommandBufferInheritanceConditionalRenderingInfoEXT::operator=(const safe_VkCommandBufferInheritanceConditionalRenderingInfoEXT& src)
-{
- if (&src == this) return *this;
-
- if (pNext)
- FreePnextChain(pNext);
-
- sType = src.sType;
- conditionalRenderingEnable = src.conditionalRenderingEnable;
- pNext = SafePnextCopy(src.pNext);
-
- return *this;
-}
-
-safe_VkCommandBufferInheritanceConditionalRenderingInfoEXT::~safe_VkCommandBufferInheritanceConditionalRenderingInfoEXT()
-{
- if (pNext)
- FreePnextChain(pNext);
-}
-
-void safe_VkCommandBufferInheritanceConditionalRenderingInfoEXT::initialize(const VkCommandBufferInheritanceConditionalRenderingInfoEXT* in_struct)
-{
- sType = in_struct->sType;
- conditionalRenderingEnable = in_struct->conditionalRenderingEnable;
- pNext = SafePnextCopy(in_struct->pNext);
-}
-
-void safe_VkCommandBufferInheritanceConditionalRenderingInfoEXT::initialize(const safe_VkCommandBufferInheritanceConditionalRenderingInfoEXT* src)
-{
- sType = src->sType;
- conditionalRenderingEnable = src->conditionalRenderingEnable;
- pNext = SafePnextCopy(src->pNext);
-}
-
-safe_VkDeviceGeneratedCommandsFeaturesNVX::safe_VkDeviceGeneratedCommandsFeaturesNVX(const VkDeviceGeneratedCommandsFeaturesNVX* in_struct) :
- sType(in_struct->sType),
- computeBindingPointSupport(in_struct->computeBindingPointSupport)
-{
- pNext = SafePnextCopy(in_struct->pNext);
-}
-
-safe_VkDeviceGeneratedCommandsFeaturesNVX::safe_VkDeviceGeneratedCommandsFeaturesNVX() :
- pNext(nullptr)
-{}
-
-safe_VkDeviceGeneratedCommandsFeaturesNVX::safe_VkDeviceGeneratedCommandsFeaturesNVX(const safe_VkDeviceGeneratedCommandsFeaturesNVX& src)
-{
- sType = src.sType;
- computeBindingPointSupport = src.computeBindingPointSupport;
- pNext = SafePnextCopy(src.pNext);
-}
-
-safe_VkDeviceGeneratedCommandsFeaturesNVX& safe_VkDeviceGeneratedCommandsFeaturesNVX::operator=(const safe_VkDeviceGeneratedCommandsFeaturesNVX& src)
-{
- if (&src == this) return *this;
-
- if (pNext)
- FreePnextChain(pNext);
-
- sType = src.sType;
- computeBindingPointSupport = src.computeBindingPointSupport;
- pNext = SafePnextCopy(src.pNext);
-
- return *this;
-}
-
-safe_VkDeviceGeneratedCommandsFeaturesNVX::~safe_VkDeviceGeneratedCommandsFeaturesNVX()
-{
- if (pNext)
- FreePnextChain(pNext);
-}
-
-void safe_VkDeviceGeneratedCommandsFeaturesNVX::initialize(const VkDeviceGeneratedCommandsFeaturesNVX* in_struct)
-{
- sType = in_struct->sType;
- computeBindingPointSupport = in_struct->computeBindingPointSupport;
- pNext = SafePnextCopy(in_struct->pNext);
-}
-
-void safe_VkDeviceGeneratedCommandsFeaturesNVX::initialize(const safe_VkDeviceGeneratedCommandsFeaturesNVX* src)
-{
- sType = src->sType;
- computeBindingPointSupport = src->computeBindingPointSupport;
- pNext = SafePnextCopy(src->pNext);
-}
-
-safe_VkDeviceGeneratedCommandsLimitsNVX::safe_VkDeviceGeneratedCommandsLimitsNVX(const VkDeviceGeneratedCommandsLimitsNVX* in_struct) :
- sType(in_struct->sType),
- maxIndirectCommandsLayoutTokenCount(in_struct->maxIndirectCommandsLayoutTokenCount),
- maxObjectEntryCounts(in_struct->maxObjectEntryCounts),
- minSequenceCountBufferOffsetAlignment(in_struct->minSequenceCountBufferOffsetAlignment),
- minSequenceIndexBufferOffsetAlignment(in_struct->minSequenceIndexBufferOffsetAlignment),
- minCommandsTokenBufferOffsetAlignment(in_struct->minCommandsTokenBufferOffsetAlignment)
-{
- pNext = SafePnextCopy(in_struct->pNext);
-}
-
-safe_VkDeviceGeneratedCommandsLimitsNVX::safe_VkDeviceGeneratedCommandsLimitsNVX() :
- pNext(nullptr)
-{}
-
-safe_VkDeviceGeneratedCommandsLimitsNVX::safe_VkDeviceGeneratedCommandsLimitsNVX(const safe_VkDeviceGeneratedCommandsLimitsNVX& src)
-{
- sType = src.sType;
- maxIndirectCommandsLayoutTokenCount = src.maxIndirectCommandsLayoutTokenCount;
- maxObjectEntryCounts = src.maxObjectEntryCounts;
- minSequenceCountBufferOffsetAlignment = src.minSequenceCountBufferOffsetAlignment;
- minSequenceIndexBufferOffsetAlignment = src.minSequenceIndexBufferOffsetAlignment;
- minCommandsTokenBufferOffsetAlignment = src.minCommandsTokenBufferOffsetAlignment;
- pNext = SafePnextCopy(src.pNext);
-}
-
-safe_VkDeviceGeneratedCommandsLimitsNVX& safe_VkDeviceGeneratedCommandsLimitsNVX::operator=(const safe_VkDeviceGeneratedCommandsLimitsNVX& src)
-{
- if (&src == this) return *this;
-
- if (pNext)
- FreePnextChain(pNext);
-
- sType = src.sType;
- maxIndirectCommandsLayoutTokenCount = src.maxIndirectCommandsLayoutTokenCount;
- maxObjectEntryCounts = src.maxObjectEntryCounts;
- minSequenceCountBufferOffsetAlignment = src.minSequenceCountBufferOffsetAlignment;
- minSequenceIndexBufferOffsetAlignment = src.minSequenceIndexBufferOffsetAlignment;
- minCommandsTokenBufferOffsetAlignment = src.minCommandsTokenBufferOffsetAlignment;
- pNext = SafePnextCopy(src.pNext);
-
- return *this;
-}
-
-safe_VkDeviceGeneratedCommandsLimitsNVX::~safe_VkDeviceGeneratedCommandsLimitsNVX()
-{
- if (pNext)
- FreePnextChain(pNext);
-}
-
-void safe_VkDeviceGeneratedCommandsLimitsNVX::initialize(const VkDeviceGeneratedCommandsLimitsNVX* in_struct)
-{
- sType = in_struct->sType;
- maxIndirectCommandsLayoutTokenCount = in_struct->maxIndirectCommandsLayoutTokenCount;
- maxObjectEntryCounts = in_struct->maxObjectEntryCounts;
- minSequenceCountBufferOffsetAlignment = in_struct->minSequenceCountBufferOffsetAlignment;
- minSequenceIndexBufferOffsetAlignment = in_struct->minSequenceIndexBufferOffsetAlignment;
- minCommandsTokenBufferOffsetAlignment = in_struct->minCommandsTokenBufferOffsetAlignment;
- pNext = SafePnextCopy(in_struct->pNext);
-}
-
-void safe_VkDeviceGeneratedCommandsLimitsNVX::initialize(const safe_VkDeviceGeneratedCommandsLimitsNVX* src)
-{
- sType = src->sType;
- maxIndirectCommandsLayoutTokenCount = src->maxIndirectCommandsLayoutTokenCount;
- maxObjectEntryCounts = src->maxObjectEntryCounts;
- minSequenceCountBufferOffsetAlignment = src->minSequenceCountBufferOffsetAlignment;
- minSequenceIndexBufferOffsetAlignment = src->minSequenceIndexBufferOffsetAlignment;
- minCommandsTokenBufferOffsetAlignment = src->minCommandsTokenBufferOffsetAlignment;
- pNext = SafePnextCopy(src->pNext);
-}
-
-safe_VkIndirectCommandsLayoutCreateInfoNVX::safe_VkIndirectCommandsLayoutCreateInfoNVX(const VkIndirectCommandsLayoutCreateInfoNVX* in_struct) :
- sType(in_struct->sType),
- pipelineBindPoint(in_struct->pipelineBindPoint),
- flags(in_struct->flags),
- tokenCount(in_struct->tokenCount),
- pTokens(nullptr)
-{
- pNext = SafePnextCopy(in_struct->pNext);
- if (in_struct->pTokens) {
- pTokens = new VkIndirectCommandsLayoutTokenNVX[in_struct->tokenCount];
- memcpy ((void *)pTokens, (void *)in_struct->pTokens, sizeof(VkIndirectCommandsLayoutTokenNVX)*in_struct->tokenCount);
- }
-}
-
-safe_VkIndirectCommandsLayoutCreateInfoNVX::safe_VkIndirectCommandsLayoutCreateInfoNVX() :
- pNext(nullptr),
- pTokens(nullptr)
-{}
-
-safe_VkIndirectCommandsLayoutCreateInfoNVX::safe_VkIndirectCommandsLayoutCreateInfoNVX(const safe_VkIndirectCommandsLayoutCreateInfoNVX& src)
-{
- sType = src.sType;
- pipelineBindPoint = src.pipelineBindPoint;
- flags = src.flags;
- tokenCount = src.tokenCount;
- pTokens = nullptr;
- pNext = SafePnextCopy(src.pNext);
- if (src.pTokens) {
- pTokens = new VkIndirectCommandsLayoutTokenNVX[src.tokenCount];
- memcpy ((void *)pTokens, (void *)src.pTokens, sizeof(VkIndirectCommandsLayoutTokenNVX)*src.tokenCount);
- }
-}
-
-safe_VkIndirectCommandsLayoutCreateInfoNVX& safe_VkIndirectCommandsLayoutCreateInfoNVX::operator=(const safe_VkIndirectCommandsLayoutCreateInfoNVX& src)
-{
- if (&src == this) return *this;
-
- if (pTokens)
- delete[] pTokens;
- if (pNext)
- FreePnextChain(pNext);
-
- sType = src.sType;
- pipelineBindPoint = src.pipelineBindPoint;
- flags = src.flags;
- tokenCount = src.tokenCount;
- pTokens = nullptr;
- pNext = SafePnextCopy(src.pNext);
- if (src.pTokens) {
- pTokens = new VkIndirectCommandsLayoutTokenNVX[src.tokenCount];
- memcpy ((void *)pTokens, (void *)src.pTokens, sizeof(VkIndirectCommandsLayoutTokenNVX)*src.tokenCount);
- }
-
- return *this;
-}
-
-safe_VkIndirectCommandsLayoutCreateInfoNVX::~safe_VkIndirectCommandsLayoutCreateInfoNVX()
-{
- if (pTokens)
- delete[] pTokens;
- if (pNext)
- FreePnextChain(pNext);
-}
-
-void safe_VkIndirectCommandsLayoutCreateInfoNVX::initialize(const VkIndirectCommandsLayoutCreateInfoNVX* in_struct)
-{
- sType = in_struct->sType;
- pipelineBindPoint = in_struct->pipelineBindPoint;
- flags = in_struct->flags;
- tokenCount = in_struct->tokenCount;
- pTokens = nullptr;
- pNext = SafePnextCopy(in_struct->pNext);
- if (in_struct->pTokens) {
- pTokens = new VkIndirectCommandsLayoutTokenNVX[in_struct->tokenCount];
- memcpy ((void *)pTokens, (void *)in_struct->pTokens, sizeof(VkIndirectCommandsLayoutTokenNVX)*in_struct->tokenCount);
- }
-}
-
-void safe_VkIndirectCommandsLayoutCreateInfoNVX::initialize(const safe_VkIndirectCommandsLayoutCreateInfoNVX* src)
-{
- sType = src->sType;
- pipelineBindPoint = src->pipelineBindPoint;
- flags = src->flags;
- tokenCount = src->tokenCount;
- pTokens = nullptr;
- pNext = SafePnextCopy(src->pNext);
- if (src->pTokens) {
- pTokens = new VkIndirectCommandsLayoutTokenNVX[src->tokenCount];
- memcpy ((void *)pTokens, (void *)src->pTokens, sizeof(VkIndirectCommandsLayoutTokenNVX)*src->tokenCount);
- }
-}
-
-safe_VkCmdProcessCommandsInfoNVX::safe_VkCmdProcessCommandsInfoNVX(const VkCmdProcessCommandsInfoNVX* in_struct) :
- sType(in_struct->sType),
- objectTable(in_struct->objectTable),
- indirectCommandsLayout(in_struct->indirectCommandsLayout),
- indirectCommandsTokenCount(in_struct->indirectCommandsTokenCount),
- pIndirectCommandsTokens(nullptr),
- maxSequencesCount(in_struct->maxSequencesCount),
- targetCommandBuffer(in_struct->targetCommandBuffer),
- sequencesCountBuffer(in_struct->sequencesCountBuffer),
- sequencesCountOffset(in_struct->sequencesCountOffset),
- sequencesIndexBuffer(in_struct->sequencesIndexBuffer),
- sequencesIndexOffset(in_struct->sequencesIndexOffset)
-{
- pNext = SafePnextCopy(in_struct->pNext);
- if (indirectCommandsTokenCount && in_struct->pIndirectCommandsTokens) {
- pIndirectCommandsTokens = new VkIndirectCommandsTokenNVX[indirectCommandsTokenCount];
- for (uint32_t i = 0; i < indirectCommandsTokenCount; ++i) {
- pIndirectCommandsTokens[i] = in_struct->pIndirectCommandsTokens[i];
- }
- }
-}
-
-safe_VkCmdProcessCommandsInfoNVX::safe_VkCmdProcessCommandsInfoNVX() :
- pNext(nullptr),
- pIndirectCommandsTokens(nullptr)
-{}
-
-safe_VkCmdProcessCommandsInfoNVX::safe_VkCmdProcessCommandsInfoNVX(const safe_VkCmdProcessCommandsInfoNVX& src)
-{
- sType = src.sType;
- objectTable = src.objectTable;
- indirectCommandsLayout = src.indirectCommandsLayout;
- indirectCommandsTokenCount = src.indirectCommandsTokenCount;
- pIndirectCommandsTokens = nullptr;
- maxSequencesCount = src.maxSequencesCount;
- targetCommandBuffer = src.targetCommandBuffer;
- sequencesCountBuffer = src.sequencesCountBuffer;
- sequencesCountOffset = src.sequencesCountOffset;
- sequencesIndexBuffer = src.sequencesIndexBuffer;
- sequencesIndexOffset = src.sequencesIndexOffset;
- pNext = SafePnextCopy(src.pNext);
- if (indirectCommandsTokenCount && src.pIndirectCommandsTokens) {
- pIndirectCommandsTokens = new VkIndirectCommandsTokenNVX[indirectCommandsTokenCount];
- for (uint32_t i = 0; i < indirectCommandsTokenCount; ++i) {
- pIndirectCommandsTokens[i] = src.pIndirectCommandsTokens[i];
- }
- }
-}
-
-safe_VkCmdProcessCommandsInfoNVX& safe_VkCmdProcessCommandsInfoNVX::operator=(const safe_VkCmdProcessCommandsInfoNVX& src)
-{
- if (&src == this) return *this;
-
- if (pIndirectCommandsTokens)
- delete[] pIndirectCommandsTokens;
- if (pNext)
- FreePnextChain(pNext);
-
- sType = src.sType;
- objectTable = src.objectTable;
- indirectCommandsLayout = src.indirectCommandsLayout;
- indirectCommandsTokenCount = src.indirectCommandsTokenCount;
- pIndirectCommandsTokens = nullptr;
- maxSequencesCount = src.maxSequencesCount;
- targetCommandBuffer = src.targetCommandBuffer;
- sequencesCountBuffer = src.sequencesCountBuffer;
- sequencesCountOffset = src.sequencesCountOffset;
- sequencesIndexBuffer = src.sequencesIndexBuffer;
- sequencesIndexOffset = src.sequencesIndexOffset;
- pNext = SafePnextCopy(src.pNext);
- if (indirectCommandsTokenCount && src.pIndirectCommandsTokens) {
- pIndirectCommandsTokens = new VkIndirectCommandsTokenNVX[indirectCommandsTokenCount];
- for (uint32_t i = 0; i < indirectCommandsTokenCount; ++i) {
- pIndirectCommandsTokens[i] = src.pIndirectCommandsTokens[i];
- }
- }
-
- return *this;
-}
-
-safe_VkCmdProcessCommandsInfoNVX::~safe_VkCmdProcessCommandsInfoNVX()
-{
- if (pIndirectCommandsTokens)
- delete[] pIndirectCommandsTokens;
- if (pNext)
- FreePnextChain(pNext);
-}
-
-void safe_VkCmdProcessCommandsInfoNVX::initialize(const VkCmdProcessCommandsInfoNVX* in_struct)
-{
- sType = in_struct->sType;
- objectTable = in_struct->objectTable;
- indirectCommandsLayout = in_struct->indirectCommandsLayout;
- indirectCommandsTokenCount = in_struct->indirectCommandsTokenCount;
- pIndirectCommandsTokens = nullptr;
- maxSequencesCount = in_struct->maxSequencesCount;
- targetCommandBuffer = in_struct->targetCommandBuffer;
- sequencesCountBuffer = in_struct->sequencesCountBuffer;
- sequencesCountOffset = in_struct->sequencesCountOffset;
- sequencesIndexBuffer = in_struct->sequencesIndexBuffer;
- sequencesIndexOffset = in_struct->sequencesIndexOffset;
- pNext = SafePnextCopy(in_struct->pNext);
- if (indirectCommandsTokenCount && in_struct->pIndirectCommandsTokens) {
- pIndirectCommandsTokens = new VkIndirectCommandsTokenNVX[indirectCommandsTokenCount];
- for (uint32_t i = 0; i < indirectCommandsTokenCount; ++i) {
- pIndirectCommandsTokens[i] = in_struct->pIndirectCommandsTokens[i];
- }
- }
-}
-
-void safe_VkCmdProcessCommandsInfoNVX::initialize(const safe_VkCmdProcessCommandsInfoNVX* src)
-{
- sType = src->sType;
- objectTable = src->objectTable;
- indirectCommandsLayout = src->indirectCommandsLayout;
- indirectCommandsTokenCount = src->indirectCommandsTokenCount;
- pIndirectCommandsTokens = nullptr;
- maxSequencesCount = src->maxSequencesCount;
- targetCommandBuffer = src->targetCommandBuffer;
- sequencesCountBuffer = src->sequencesCountBuffer;
- sequencesCountOffset = src->sequencesCountOffset;
- sequencesIndexBuffer = src->sequencesIndexBuffer;
- sequencesIndexOffset = src->sequencesIndexOffset;
- pNext = SafePnextCopy(src->pNext);
- if (indirectCommandsTokenCount && src->pIndirectCommandsTokens) {
- pIndirectCommandsTokens = new VkIndirectCommandsTokenNVX[indirectCommandsTokenCount];
- for (uint32_t i = 0; i < indirectCommandsTokenCount; ++i) {
- pIndirectCommandsTokens[i] = src->pIndirectCommandsTokens[i];
- }
- }
-}
-
-safe_VkCmdReserveSpaceForCommandsInfoNVX::safe_VkCmdReserveSpaceForCommandsInfoNVX(const VkCmdReserveSpaceForCommandsInfoNVX* in_struct) :
- sType(in_struct->sType),
- objectTable(in_struct->objectTable),
- indirectCommandsLayout(in_struct->indirectCommandsLayout),
- maxSequencesCount(in_struct->maxSequencesCount)
-{
- pNext = SafePnextCopy(in_struct->pNext);
-}
-
-safe_VkCmdReserveSpaceForCommandsInfoNVX::safe_VkCmdReserveSpaceForCommandsInfoNVX() :
- pNext(nullptr)
-{}
-
-safe_VkCmdReserveSpaceForCommandsInfoNVX::safe_VkCmdReserveSpaceForCommandsInfoNVX(const safe_VkCmdReserveSpaceForCommandsInfoNVX& src)
-{
- sType = src.sType;
- objectTable = src.objectTable;
- indirectCommandsLayout = src.indirectCommandsLayout;
- maxSequencesCount = src.maxSequencesCount;
- pNext = SafePnextCopy(src.pNext);
-}
-
-safe_VkCmdReserveSpaceForCommandsInfoNVX& safe_VkCmdReserveSpaceForCommandsInfoNVX::operator=(const safe_VkCmdReserveSpaceForCommandsInfoNVX& src)
-{
- if (&src == this) return *this;
-
- if (pNext)
- FreePnextChain(pNext);
-
- sType = src.sType;
- objectTable = src.objectTable;
- indirectCommandsLayout = src.indirectCommandsLayout;
- maxSequencesCount = src.maxSequencesCount;
- pNext = SafePnextCopy(src.pNext);
-
- return *this;
-}
-
-safe_VkCmdReserveSpaceForCommandsInfoNVX::~safe_VkCmdReserveSpaceForCommandsInfoNVX()
-{
- if (pNext)
- FreePnextChain(pNext);
-}
-
-void safe_VkCmdReserveSpaceForCommandsInfoNVX::initialize(const VkCmdReserveSpaceForCommandsInfoNVX* in_struct)
-{
- sType = in_struct->sType;
- objectTable = in_struct->objectTable;
- indirectCommandsLayout = in_struct->indirectCommandsLayout;
- maxSequencesCount = in_struct->maxSequencesCount;
- pNext = SafePnextCopy(in_struct->pNext);
-}
-
-void safe_VkCmdReserveSpaceForCommandsInfoNVX::initialize(const safe_VkCmdReserveSpaceForCommandsInfoNVX* src)
-{
- sType = src->sType;
- objectTable = src->objectTable;
- indirectCommandsLayout = src->indirectCommandsLayout;
- maxSequencesCount = src->maxSequencesCount;
- pNext = SafePnextCopy(src->pNext);
-}
-
-safe_VkObjectTableCreateInfoNVX::safe_VkObjectTableCreateInfoNVX(const VkObjectTableCreateInfoNVX* in_struct) :
- sType(in_struct->sType),
- objectCount(in_struct->objectCount),
- pObjectEntryTypes(nullptr),
- pObjectEntryCounts(nullptr),
- pObjectEntryUsageFlags(nullptr),
- maxUniformBuffersPerDescriptor(in_struct->maxUniformBuffersPerDescriptor),
- maxStorageBuffersPerDescriptor(in_struct->maxStorageBuffersPerDescriptor),
- maxStorageImagesPerDescriptor(in_struct->maxStorageImagesPerDescriptor),
- maxSampledImagesPerDescriptor(in_struct->maxSampledImagesPerDescriptor),
- maxPipelineLayouts(in_struct->maxPipelineLayouts)
-{
- pNext = SafePnextCopy(in_struct->pNext);
- if (in_struct->pObjectEntryTypes) {
- pObjectEntryTypes = new VkObjectEntryTypeNVX[in_struct->objectCount];
- memcpy ((void *)pObjectEntryTypes, (void *)in_struct->pObjectEntryTypes, sizeof(VkObjectEntryTypeNVX)*in_struct->objectCount);
- }
- if (in_struct->pObjectEntryCounts) {
- pObjectEntryCounts = new uint32_t[in_struct->objectCount];
- memcpy ((void *)pObjectEntryCounts, (void *)in_struct->pObjectEntryCounts, sizeof(uint32_t)*in_struct->objectCount);
- }
- if (in_struct->pObjectEntryUsageFlags) {
- pObjectEntryUsageFlags = new VkObjectEntryUsageFlagsNVX[in_struct->objectCount];
- memcpy ((void *)pObjectEntryUsageFlags, (void *)in_struct->pObjectEntryUsageFlags, sizeof(VkObjectEntryUsageFlagsNVX)*in_struct->objectCount);
- }
-}
-
-safe_VkObjectTableCreateInfoNVX::safe_VkObjectTableCreateInfoNVX() :
- pNext(nullptr),
- pObjectEntryTypes(nullptr),
- pObjectEntryCounts(nullptr),
- pObjectEntryUsageFlags(nullptr)
-{}
-
-safe_VkObjectTableCreateInfoNVX::safe_VkObjectTableCreateInfoNVX(const safe_VkObjectTableCreateInfoNVX& src)
-{
- sType = src.sType;
- objectCount = src.objectCount;
- pObjectEntryTypes = nullptr;
- pObjectEntryCounts = nullptr;
- pObjectEntryUsageFlags = nullptr;
- maxUniformBuffersPerDescriptor = src.maxUniformBuffersPerDescriptor;
- maxStorageBuffersPerDescriptor = src.maxStorageBuffersPerDescriptor;
- maxStorageImagesPerDescriptor = src.maxStorageImagesPerDescriptor;
- maxSampledImagesPerDescriptor = src.maxSampledImagesPerDescriptor;
- maxPipelineLayouts = src.maxPipelineLayouts;
- pNext = SafePnextCopy(src.pNext);
- if (src.pObjectEntryTypes) {
- pObjectEntryTypes = new VkObjectEntryTypeNVX[src.objectCount];
- memcpy ((void *)pObjectEntryTypes, (void *)src.pObjectEntryTypes, sizeof(VkObjectEntryTypeNVX)*src.objectCount);
- }
- if (src.pObjectEntryCounts) {
- pObjectEntryCounts = new uint32_t[src.objectCount];
- memcpy ((void *)pObjectEntryCounts, (void *)src.pObjectEntryCounts, sizeof(uint32_t)*src.objectCount);
- }
- if (src.pObjectEntryUsageFlags) {
- pObjectEntryUsageFlags = new VkObjectEntryUsageFlagsNVX[src.objectCount];
- memcpy ((void *)pObjectEntryUsageFlags, (void *)src.pObjectEntryUsageFlags, sizeof(VkObjectEntryUsageFlagsNVX)*src.objectCount);
- }
-}
-
-safe_VkObjectTableCreateInfoNVX& safe_VkObjectTableCreateInfoNVX::operator=(const safe_VkObjectTableCreateInfoNVX& src)
-{
- if (&src == this) return *this;
-
- if (pObjectEntryTypes)
- delete[] pObjectEntryTypes;
- if (pObjectEntryCounts)
- delete[] pObjectEntryCounts;
- if (pObjectEntryUsageFlags)
- delete[] pObjectEntryUsageFlags;
- if (pNext)
- FreePnextChain(pNext);
-
- sType = src.sType;
- objectCount = src.objectCount;
- pObjectEntryTypes = nullptr;
- pObjectEntryCounts = nullptr;
- pObjectEntryUsageFlags = nullptr;
- maxUniformBuffersPerDescriptor = src.maxUniformBuffersPerDescriptor;
- maxStorageBuffersPerDescriptor = src.maxStorageBuffersPerDescriptor;
- maxStorageImagesPerDescriptor = src.maxStorageImagesPerDescriptor;
- maxSampledImagesPerDescriptor = src.maxSampledImagesPerDescriptor;
- maxPipelineLayouts = src.maxPipelineLayouts;
- pNext = SafePnextCopy(src.pNext);
- if (src.pObjectEntryTypes) {
- pObjectEntryTypes = new VkObjectEntryTypeNVX[src.objectCount];
- memcpy ((void *)pObjectEntryTypes, (void *)src.pObjectEntryTypes, sizeof(VkObjectEntryTypeNVX)*src.objectCount);
- }
- if (src.pObjectEntryCounts) {
- pObjectEntryCounts = new uint32_t[src.objectCount];
- memcpy ((void *)pObjectEntryCounts, (void *)src.pObjectEntryCounts, sizeof(uint32_t)*src.objectCount);
- }
- if (src.pObjectEntryUsageFlags) {
- pObjectEntryUsageFlags = new VkObjectEntryUsageFlagsNVX[src.objectCount];
- memcpy ((void *)pObjectEntryUsageFlags, (void *)src.pObjectEntryUsageFlags, sizeof(VkObjectEntryUsageFlagsNVX)*src.objectCount);
- }
-
- return *this;
-}
-
-safe_VkObjectTableCreateInfoNVX::~safe_VkObjectTableCreateInfoNVX()
-{
- if (pObjectEntryTypes)
- delete[] pObjectEntryTypes;
- if (pObjectEntryCounts)
- delete[] pObjectEntryCounts;
- if (pObjectEntryUsageFlags)
- delete[] pObjectEntryUsageFlags;
- if (pNext)
- FreePnextChain(pNext);
-}
-
-void safe_VkObjectTableCreateInfoNVX::initialize(const VkObjectTableCreateInfoNVX* in_struct)
-{
- sType = in_struct->sType;
- objectCount = in_struct->objectCount;
- pObjectEntryTypes = nullptr;
- pObjectEntryCounts = nullptr;
- pObjectEntryUsageFlags = nullptr;
- maxUniformBuffersPerDescriptor = in_struct->maxUniformBuffersPerDescriptor;
- maxStorageBuffersPerDescriptor = in_struct->maxStorageBuffersPerDescriptor;
- maxStorageImagesPerDescriptor = in_struct->maxStorageImagesPerDescriptor;
- maxSampledImagesPerDescriptor = in_struct->maxSampledImagesPerDescriptor;
- maxPipelineLayouts = in_struct->maxPipelineLayouts;
- pNext = SafePnextCopy(in_struct->pNext);
- if (in_struct->pObjectEntryTypes) {
- pObjectEntryTypes = new VkObjectEntryTypeNVX[in_struct->objectCount];
- memcpy ((void *)pObjectEntryTypes, (void *)in_struct->pObjectEntryTypes, sizeof(VkObjectEntryTypeNVX)*in_struct->objectCount);
- }
- if (in_struct->pObjectEntryCounts) {
- pObjectEntryCounts = new uint32_t[in_struct->objectCount];
- memcpy ((void *)pObjectEntryCounts, (void *)in_struct->pObjectEntryCounts, sizeof(uint32_t)*in_struct->objectCount);
- }
- if (in_struct->pObjectEntryUsageFlags) {
- pObjectEntryUsageFlags = new VkObjectEntryUsageFlagsNVX[in_struct->objectCount];
- memcpy ((void *)pObjectEntryUsageFlags, (void *)in_struct->pObjectEntryUsageFlags, sizeof(VkObjectEntryUsageFlagsNVX)*in_struct->objectCount);
- }
-}
-
-void safe_VkObjectTableCreateInfoNVX::initialize(const safe_VkObjectTableCreateInfoNVX* src)
-{
- sType = src->sType;
- objectCount = src->objectCount;
- pObjectEntryTypes = nullptr;
- pObjectEntryCounts = nullptr;
- pObjectEntryUsageFlags = nullptr;
- maxUniformBuffersPerDescriptor = src->maxUniformBuffersPerDescriptor;
- maxStorageBuffersPerDescriptor = src->maxStorageBuffersPerDescriptor;
- maxStorageImagesPerDescriptor = src->maxStorageImagesPerDescriptor;
- maxSampledImagesPerDescriptor = src->maxSampledImagesPerDescriptor;
- maxPipelineLayouts = src->maxPipelineLayouts;
- pNext = SafePnextCopy(src->pNext);
- if (src->pObjectEntryTypes) {
- pObjectEntryTypes = new VkObjectEntryTypeNVX[src->objectCount];
- memcpy ((void *)pObjectEntryTypes, (void *)src->pObjectEntryTypes, sizeof(VkObjectEntryTypeNVX)*src->objectCount);
- }
- if (src->pObjectEntryCounts) {
- pObjectEntryCounts = new uint32_t[src->objectCount];
- memcpy ((void *)pObjectEntryCounts, (void *)src->pObjectEntryCounts, sizeof(uint32_t)*src->objectCount);
- }
- if (src->pObjectEntryUsageFlags) {
- pObjectEntryUsageFlags = new VkObjectEntryUsageFlagsNVX[src->objectCount];
- memcpy ((void *)pObjectEntryUsageFlags, (void *)src->pObjectEntryUsageFlags, sizeof(VkObjectEntryUsageFlagsNVX)*src->objectCount);
- }
-}
-
-safe_VkPipelineViewportWScalingStateCreateInfoNV::safe_VkPipelineViewportWScalingStateCreateInfoNV(const VkPipelineViewportWScalingStateCreateInfoNV* in_struct) :
- sType(in_struct->sType),
- viewportWScalingEnable(in_struct->viewportWScalingEnable),
- viewportCount(in_struct->viewportCount),
- pViewportWScalings(nullptr)
-{
- pNext = SafePnextCopy(in_struct->pNext);
- if (in_struct->pViewportWScalings) {
- pViewportWScalings = new VkViewportWScalingNV[in_struct->viewportCount];
- memcpy ((void *)pViewportWScalings, (void *)in_struct->pViewportWScalings, sizeof(VkViewportWScalingNV)*in_struct->viewportCount);
- }
-}
-
-safe_VkPipelineViewportWScalingStateCreateInfoNV::safe_VkPipelineViewportWScalingStateCreateInfoNV() :
- pNext(nullptr),
- pViewportWScalings(nullptr)
-{}
-
-safe_VkPipelineViewportWScalingStateCreateInfoNV::safe_VkPipelineViewportWScalingStateCreateInfoNV(const safe_VkPipelineViewportWScalingStateCreateInfoNV& src)
-{
- sType = src.sType;
- viewportWScalingEnable = src.viewportWScalingEnable;
- viewportCount = src.viewportCount;
- pViewportWScalings = nullptr;
- pNext = SafePnextCopy(src.pNext);
- if (src.pViewportWScalings) {
- pViewportWScalings = new VkViewportWScalingNV[src.viewportCount];
- memcpy ((void *)pViewportWScalings, (void *)src.pViewportWScalings, sizeof(VkViewportWScalingNV)*src.viewportCount);
- }
-}
-
-safe_VkPipelineViewportWScalingStateCreateInfoNV& safe_VkPipelineViewportWScalingStateCreateInfoNV::operator=(const safe_VkPipelineViewportWScalingStateCreateInfoNV& src)
-{
- if (&src == this) return *this;
-
- if (pViewportWScalings)
- delete[] pViewportWScalings;
- if (pNext)
- FreePnextChain(pNext);
-
- sType = src.sType;
- viewportWScalingEnable = src.viewportWScalingEnable;
- viewportCount = src.viewportCount;
- pViewportWScalings = nullptr;
- pNext = SafePnextCopy(src.pNext);
- if (src.pViewportWScalings) {
- pViewportWScalings = new VkViewportWScalingNV[src.viewportCount];
- memcpy ((void *)pViewportWScalings, (void *)src.pViewportWScalings, sizeof(VkViewportWScalingNV)*src.viewportCount);
- }
-
- return *this;
-}
-
-safe_VkPipelineViewportWScalingStateCreateInfoNV::~safe_VkPipelineViewportWScalingStateCreateInfoNV()
-{
- if (pViewportWScalings)
- delete[] pViewportWScalings;
- if (pNext)
- FreePnextChain(pNext);
-}
-
-void safe_VkPipelineViewportWScalingStateCreateInfoNV::initialize(const VkPipelineViewportWScalingStateCreateInfoNV* in_struct)
-{
- sType = in_struct->sType;
- viewportWScalingEnable = in_struct->viewportWScalingEnable;
- viewportCount = in_struct->viewportCount;
- pViewportWScalings = nullptr;
- pNext = SafePnextCopy(in_struct->pNext);
- if (in_struct->pViewportWScalings) {
- pViewportWScalings = new VkViewportWScalingNV[in_struct->viewportCount];
- memcpy ((void *)pViewportWScalings, (void *)in_struct->pViewportWScalings, sizeof(VkViewportWScalingNV)*in_struct->viewportCount);
- }
-}
-
-void safe_VkPipelineViewportWScalingStateCreateInfoNV::initialize(const safe_VkPipelineViewportWScalingStateCreateInfoNV* src)
-{
- sType = src->sType;
- viewportWScalingEnable = src->viewportWScalingEnable;
- viewportCount = src->viewportCount;
- pViewportWScalings = nullptr;
- pNext = SafePnextCopy(src->pNext);
- if (src->pViewportWScalings) {
- pViewportWScalings = new VkViewportWScalingNV[src->viewportCount];
- memcpy ((void *)pViewportWScalings, (void *)src->pViewportWScalings, sizeof(VkViewportWScalingNV)*src->viewportCount);
- }
-}
-
-safe_VkSurfaceCapabilities2EXT::safe_VkSurfaceCapabilities2EXT(const VkSurfaceCapabilities2EXT* in_struct) :
- sType(in_struct->sType),
- minImageCount(in_struct->minImageCount),
- maxImageCount(in_struct->maxImageCount),
- currentExtent(in_struct->currentExtent),
- minImageExtent(in_struct->minImageExtent),
- maxImageExtent(in_struct->maxImageExtent),
- maxImageArrayLayers(in_struct->maxImageArrayLayers),
- supportedTransforms(in_struct->supportedTransforms),
- currentTransform(in_struct->currentTransform),
- supportedCompositeAlpha(in_struct->supportedCompositeAlpha),
- supportedUsageFlags(in_struct->supportedUsageFlags),
- supportedSurfaceCounters(in_struct->supportedSurfaceCounters)
-{
- pNext = SafePnextCopy(in_struct->pNext);
-}
-
-safe_VkSurfaceCapabilities2EXT::safe_VkSurfaceCapabilities2EXT() :
- pNext(nullptr)
-{}
-
-safe_VkSurfaceCapabilities2EXT::safe_VkSurfaceCapabilities2EXT(const safe_VkSurfaceCapabilities2EXT& src)
-{
- sType = src.sType;
- minImageCount = src.minImageCount;
- maxImageCount = src.maxImageCount;
- currentExtent = src.currentExtent;
- minImageExtent = src.minImageExtent;
- maxImageExtent = src.maxImageExtent;
- maxImageArrayLayers = src.maxImageArrayLayers;
- supportedTransforms = src.supportedTransforms;
- currentTransform = src.currentTransform;
- supportedCompositeAlpha = src.supportedCompositeAlpha;
- supportedUsageFlags = src.supportedUsageFlags;
- supportedSurfaceCounters = src.supportedSurfaceCounters;
- pNext = SafePnextCopy(src.pNext);
-}
-
-safe_VkSurfaceCapabilities2EXT& safe_VkSurfaceCapabilities2EXT::operator=(const safe_VkSurfaceCapabilities2EXT& src)
-{
- if (&src == this) return *this;
-
- if (pNext)
- FreePnextChain(pNext);
-
- sType = src.sType;
- minImageCount = src.minImageCount;
- maxImageCount = src.maxImageCount;
- currentExtent = src.currentExtent;
- minImageExtent = src.minImageExtent;
- maxImageExtent = src.maxImageExtent;
- maxImageArrayLayers = src.maxImageArrayLayers;
- supportedTransforms = src.supportedTransforms;
- currentTransform = src.currentTransform;
- supportedCompositeAlpha = src.supportedCompositeAlpha;
- supportedUsageFlags = src.supportedUsageFlags;
- supportedSurfaceCounters = src.supportedSurfaceCounters;
- pNext = SafePnextCopy(src.pNext);
-
- return *this;
-}
-
-safe_VkSurfaceCapabilities2EXT::~safe_VkSurfaceCapabilities2EXT()
-{
- if (pNext)
- FreePnextChain(pNext);
-}
-
-void safe_VkSurfaceCapabilities2EXT::initialize(const VkSurfaceCapabilities2EXT* in_struct)
-{
- sType = in_struct->sType;
- minImageCount = in_struct->minImageCount;
- maxImageCount = in_struct->maxImageCount;
- currentExtent = in_struct->currentExtent;
- minImageExtent = in_struct->minImageExtent;
- maxImageExtent = in_struct->maxImageExtent;
- maxImageArrayLayers = in_struct->maxImageArrayLayers;
- supportedTransforms = in_struct->supportedTransforms;
- currentTransform = in_struct->currentTransform;
- supportedCompositeAlpha = in_struct->supportedCompositeAlpha;
- supportedUsageFlags = in_struct->supportedUsageFlags;
- supportedSurfaceCounters = in_struct->supportedSurfaceCounters;
- pNext = SafePnextCopy(in_struct->pNext);
-}
-
-void safe_VkSurfaceCapabilities2EXT::initialize(const safe_VkSurfaceCapabilities2EXT* src)
-{
- sType = src->sType;
- minImageCount = src->minImageCount;
- maxImageCount = src->maxImageCount;
- currentExtent = src->currentExtent;
- minImageExtent = src->minImageExtent;
- maxImageExtent = src->maxImageExtent;
- maxImageArrayLayers = src->maxImageArrayLayers;
- supportedTransforms = src->supportedTransforms;
- currentTransform = src->currentTransform;
- supportedCompositeAlpha = src->supportedCompositeAlpha;
- supportedUsageFlags = src->supportedUsageFlags;
- supportedSurfaceCounters = src->supportedSurfaceCounters;
- pNext = SafePnextCopy(src->pNext);
-}
-
-safe_VkDisplayPowerInfoEXT::safe_VkDisplayPowerInfoEXT(const VkDisplayPowerInfoEXT* in_struct) :
- sType(in_struct->sType),
- powerState(in_struct->powerState)
-{
- pNext = SafePnextCopy(in_struct->pNext);
-}
-
-safe_VkDisplayPowerInfoEXT::safe_VkDisplayPowerInfoEXT() :
- pNext(nullptr)
-{}
-
-safe_VkDisplayPowerInfoEXT::safe_VkDisplayPowerInfoEXT(const safe_VkDisplayPowerInfoEXT& src)
-{
- sType = src.sType;
- powerState = src.powerState;
- pNext = SafePnextCopy(src.pNext);
-}
-
-safe_VkDisplayPowerInfoEXT& safe_VkDisplayPowerInfoEXT::operator=(const safe_VkDisplayPowerInfoEXT& src)
-{
- if (&src == this) return *this;
-
- if (pNext)
- FreePnextChain(pNext);
-
- sType = src.sType;
- powerState = src.powerState;
- pNext = SafePnextCopy(src.pNext);
-
- return *this;
-}
-
-safe_VkDisplayPowerInfoEXT::~safe_VkDisplayPowerInfoEXT()
-{
- if (pNext)
- FreePnextChain(pNext);
-}
-
-void safe_VkDisplayPowerInfoEXT::initialize(const VkDisplayPowerInfoEXT* in_struct)
-{
- sType = in_struct->sType;
- powerState = in_struct->powerState;
- pNext = SafePnextCopy(in_struct->pNext);
-}
-
-void safe_VkDisplayPowerInfoEXT::initialize(const safe_VkDisplayPowerInfoEXT* src)
-{
- sType = src->sType;
- powerState = src->powerState;
- pNext = SafePnextCopy(src->pNext);
-}
-
-safe_VkDeviceEventInfoEXT::safe_VkDeviceEventInfoEXT(const VkDeviceEventInfoEXT* in_struct) :
- sType(in_struct->sType),
- deviceEvent(in_struct->deviceEvent)
-{
- pNext = SafePnextCopy(in_struct->pNext);
-}
-
-safe_VkDeviceEventInfoEXT::safe_VkDeviceEventInfoEXT() :
- pNext(nullptr)
-{}
-
-safe_VkDeviceEventInfoEXT::safe_VkDeviceEventInfoEXT(const safe_VkDeviceEventInfoEXT& src)
-{
- sType = src.sType;
- deviceEvent = src.deviceEvent;
- pNext = SafePnextCopy(src.pNext);
-}
-
-safe_VkDeviceEventInfoEXT& safe_VkDeviceEventInfoEXT::operator=(const safe_VkDeviceEventInfoEXT& src)
-{
- if (&src == this) return *this;
-
- if (pNext)
- FreePnextChain(pNext);
-
- sType = src.sType;
- deviceEvent = src.deviceEvent;
- pNext = SafePnextCopy(src.pNext);
-
- return *this;
-}
-
-safe_VkDeviceEventInfoEXT::~safe_VkDeviceEventInfoEXT()
-{
- if (pNext)
- FreePnextChain(pNext);
-}
-
-void safe_VkDeviceEventInfoEXT::initialize(const VkDeviceEventInfoEXT* in_struct)
-{
- sType = in_struct->sType;
- deviceEvent = in_struct->deviceEvent;
- pNext = SafePnextCopy(in_struct->pNext);
-}
-
-void safe_VkDeviceEventInfoEXT::initialize(const safe_VkDeviceEventInfoEXT* src)
-{
- sType = src->sType;
- deviceEvent = src->deviceEvent;
- pNext = SafePnextCopy(src->pNext);
-}
-
-safe_VkDisplayEventInfoEXT::safe_VkDisplayEventInfoEXT(const VkDisplayEventInfoEXT* in_struct) :
- sType(in_struct->sType),
- displayEvent(in_struct->displayEvent)
-{
- pNext = SafePnextCopy(in_struct->pNext);
-}
-
-safe_VkDisplayEventInfoEXT::safe_VkDisplayEventInfoEXT() :
- pNext(nullptr)
-{}
-
-safe_VkDisplayEventInfoEXT::safe_VkDisplayEventInfoEXT(const safe_VkDisplayEventInfoEXT& src)
-{
- sType = src.sType;
- displayEvent = src.displayEvent;
- pNext = SafePnextCopy(src.pNext);
-}
-
-safe_VkDisplayEventInfoEXT& safe_VkDisplayEventInfoEXT::operator=(const safe_VkDisplayEventInfoEXT& src)
-{
- if (&src == this) return *this;
-
- if (pNext)
- FreePnextChain(pNext);
-
- sType = src.sType;
- displayEvent = src.displayEvent;
- pNext = SafePnextCopy(src.pNext);
-
- return *this;
-}
-
-safe_VkDisplayEventInfoEXT::~safe_VkDisplayEventInfoEXT()
-{
- if (pNext)
- FreePnextChain(pNext);
-}
-
-void safe_VkDisplayEventInfoEXT::initialize(const VkDisplayEventInfoEXT* in_struct)
-{
- sType = in_struct->sType;
- displayEvent = in_struct->displayEvent;
- pNext = SafePnextCopy(in_struct->pNext);
-}
-
-void safe_VkDisplayEventInfoEXT::initialize(const safe_VkDisplayEventInfoEXT* src)
-{
- sType = src->sType;
- displayEvent = src->displayEvent;
- pNext = SafePnextCopy(src->pNext);
-}
-
-safe_VkSwapchainCounterCreateInfoEXT::safe_VkSwapchainCounterCreateInfoEXT(const VkSwapchainCounterCreateInfoEXT* in_struct) :
- sType(in_struct->sType),
- surfaceCounters(in_struct->surfaceCounters)
-{
- pNext = SafePnextCopy(in_struct->pNext);
-}
-
-safe_VkSwapchainCounterCreateInfoEXT::safe_VkSwapchainCounterCreateInfoEXT() :
- pNext(nullptr)
-{}
-
-safe_VkSwapchainCounterCreateInfoEXT::safe_VkSwapchainCounterCreateInfoEXT(const safe_VkSwapchainCounterCreateInfoEXT& src)
-{
- sType = src.sType;
- surfaceCounters = src.surfaceCounters;
- pNext = SafePnextCopy(src.pNext);
-}
-
-safe_VkSwapchainCounterCreateInfoEXT& safe_VkSwapchainCounterCreateInfoEXT::operator=(const safe_VkSwapchainCounterCreateInfoEXT& src)
-{
- if (&src == this) return *this;
-
- if (pNext)
- FreePnextChain(pNext);
-
- sType = src.sType;
- surfaceCounters = src.surfaceCounters;
- pNext = SafePnextCopy(src.pNext);
-
- return *this;
-}
-
-safe_VkSwapchainCounterCreateInfoEXT::~safe_VkSwapchainCounterCreateInfoEXT()
-{
- if (pNext)
- FreePnextChain(pNext);
-}
-
-void safe_VkSwapchainCounterCreateInfoEXT::initialize(const VkSwapchainCounterCreateInfoEXT* in_struct)
-{
- sType = in_struct->sType;
- surfaceCounters = in_struct->surfaceCounters;
- pNext = SafePnextCopy(in_struct->pNext);
-}
-
-void safe_VkSwapchainCounterCreateInfoEXT::initialize(const safe_VkSwapchainCounterCreateInfoEXT* src)
-{
- sType = src->sType;
- surfaceCounters = src->surfaceCounters;
- pNext = SafePnextCopy(src->pNext);
-}
-
-safe_VkPresentTimesInfoGOOGLE::safe_VkPresentTimesInfoGOOGLE(const VkPresentTimesInfoGOOGLE* in_struct) :
- sType(in_struct->sType),
- swapchainCount(in_struct->swapchainCount),
- pTimes(nullptr)
-{
- pNext = SafePnextCopy(in_struct->pNext);
- if (in_struct->pTimes) {
- pTimes = new VkPresentTimeGOOGLE[in_struct->swapchainCount];
- memcpy ((void *)pTimes, (void *)in_struct->pTimes, sizeof(VkPresentTimeGOOGLE)*in_struct->swapchainCount);
- }
-}
-
-safe_VkPresentTimesInfoGOOGLE::safe_VkPresentTimesInfoGOOGLE() :
- pNext(nullptr),
- pTimes(nullptr)
-{}
-
-safe_VkPresentTimesInfoGOOGLE::safe_VkPresentTimesInfoGOOGLE(const safe_VkPresentTimesInfoGOOGLE& src)
-{
- sType = src.sType;
- swapchainCount = src.swapchainCount;
- pTimes = nullptr;
- pNext = SafePnextCopy(src.pNext);
- if (src.pTimes) {
- pTimes = new VkPresentTimeGOOGLE[src.swapchainCount];
- memcpy ((void *)pTimes, (void *)src.pTimes, sizeof(VkPresentTimeGOOGLE)*src.swapchainCount);
- }
-}
-
-safe_VkPresentTimesInfoGOOGLE& safe_VkPresentTimesInfoGOOGLE::operator=(const safe_VkPresentTimesInfoGOOGLE& src)
-{
- if (&src == this) return *this;
-
- if (pTimes)
- delete[] pTimes;
- if (pNext)
- FreePnextChain(pNext);
-
- sType = src.sType;
- swapchainCount = src.swapchainCount;
- pTimes = nullptr;
- pNext = SafePnextCopy(src.pNext);
- if (src.pTimes) {
- pTimes = new VkPresentTimeGOOGLE[src.swapchainCount];
- memcpy ((void *)pTimes, (void *)src.pTimes, sizeof(VkPresentTimeGOOGLE)*src.swapchainCount);
- }
-
- return *this;
-}
-
-safe_VkPresentTimesInfoGOOGLE::~safe_VkPresentTimesInfoGOOGLE()
-{
- if (pTimes)
- delete[] pTimes;
- if (pNext)
- FreePnextChain(pNext);
-}
-
-void safe_VkPresentTimesInfoGOOGLE::initialize(const VkPresentTimesInfoGOOGLE* in_struct)
-{
- sType = in_struct->sType;
- swapchainCount = in_struct->swapchainCount;
- pTimes = nullptr;
- pNext = SafePnextCopy(in_struct->pNext);
- if (in_struct->pTimes) {
- pTimes = new VkPresentTimeGOOGLE[in_struct->swapchainCount];
- memcpy ((void *)pTimes, (void *)in_struct->pTimes, sizeof(VkPresentTimeGOOGLE)*in_struct->swapchainCount);
- }
-}
-
-void safe_VkPresentTimesInfoGOOGLE::initialize(const safe_VkPresentTimesInfoGOOGLE* src)
-{
- sType = src->sType;
- swapchainCount = src->swapchainCount;
- pTimes = nullptr;
- pNext = SafePnextCopy(src->pNext);
- if (src->pTimes) {
- pTimes = new VkPresentTimeGOOGLE[src->swapchainCount];
- memcpy ((void *)pTimes, (void *)src->pTimes, sizeof(VkPresentTimeGOOGLE)*src->swapchainCount);
- }
-}
-
-safe_VkPhysicalDeviceMultiviewPerViewAttributesPropertiesNVX::safe_VkPhysicalDeviceMultiviewPerViewAttributesPropertiesNVX(const VkPhysicalDeviceMultiviewPerViewAttributesPropertiesNVX* in_struct) :
- sType(in_struct->sType),
- perViewPositionAllComponents(in_struct->perViewPositionAllComponents)
-{
- pNext = SafePnextCopy(in_struct->pNext);
-}
-
-safe_VkPhysicalDeviceMultiviewPerViewAttributesPropertiesNVX::safe_VkPhysicalDeviceMultiviewPerViewAttributesPropertiesNVX() :
- pNext(nullptr)
-{}
-
-safe_VkPhysicalDeviceMultiviewPerViewAttributesPropertiesNVX::safe_VkPhysicalDeviceMultiviewPerViewAttributesPropertiesNVX(const safe_VkPhysicalDeviceMultiviewPerViewAttributesPropertiesNVX& src)
-{
- sType = src.sType;
- perViewPositionAllComponents = src.perViewPositionAllComponents;
- pNext = SafePnextCopy(src.pNext);
-}
-
-safe_VkPhysicalDeviceMultiviewPerViewAttributesPropertiesNVX& safe_VkPhysicalDeviceMultiviewPerViewAttributesPropertiesNVX::operator=(const safe_VkPhysicalDeviceMultiviewPerViewAttributesPropertiesNVX& src)
-{
- if (&src == this) return *this;
-
- if (pNext)
- FreePnextChain(pNext);
-
- sType = src.sType;
- perViewPositionAllComponents = src.perViewPositionAllComponents;
- pNext = SafePnextCopy(src.pNext);
-
- return *this;
-}
-
-safe_VkPhysicalDeviceMultiviewPerViewAttributesPropertiesNVX::~safe_VkPhysicalDeviceMultiviewPerViewAttributesPropertiesNVX()
-{
- if (pNext)
- FreePnextChain(pNext);
-}
-
-void safe_VkPhysicalDeviceMultiviewPerViewAttributesPropertiesNVX::initialize(const VkPhysicalDeviceMultiviewPerViewAttributesPropertiesNVX* in_struct)
-{
- sType = in_struct->sType;
- perViewPositionAllComponents = in_struct->perViewPositionAllComponents;
- pNext = SafePnextCopy(in_struct->pNext);
-}
-
-void safe_VkPhysicalDeviceMultiviewPerViewAttributesPropertiesNVX::initialize(const safe_VkPhysicalDeviceMultiviewPerViewAttributesPropertiesNVX* src)
-{
- sType = src->sType;
- perViewPositionAllComponents = src->perViewPositionAllComponents;
- pNext = SafePnextCopy(src->pNext);
-}
-
-safe_VkPipelineViewportSwizzleStateCreateInfoNV::safe_VkPipelineViewportSwizzleStateCreateInfoNV(const VkPipelineViewportSwizzleStateCreateInfoNV* in_struct) :
- sType(in_struct->sType),
- flags(in_struct->flags),
- viewportCount(in_struct->viewportCount),
- pViewportSwizzles(nullptr)
-{
- pNext = SafePnextCopy(in_struct->pNext);
- if (in_struct->pViewportSwizzles) {
- pViewportSwizzles = new VkViewportSwizzleNV[in_struct->viewportCount];
- memcpy ((void *)pViewportSwizzles, (void *)in_struct->pViewportSwizzles, sizeof(VkViewportSwizzleNV)*in_struct->viewportCount);
- }
-}
-
-safe_VkPipelineViewportSwizzleStateCreateInfoNV::safe_VkPipelineViewportSwizzleStateCreateInfoNV() :
- pNext(nullptr),
- pViewportSwizzles(nullptr)
-{}
-
-safe_VkPipelineViewportSwizzleStateCreateInfoNV::safe_VkPipelineViewportSwizzleStateCreateInfoNV(const safe_VkPipelineViewportSwizzleStateCreateInfoNV& src)
-{
- sType = src.sType;
- flags = src.flags;
- viewportCount = src.viewportCount;
- pViewportSwizzles = nullptr;
- pNext = SafePnextCopy(src.pNext);
- if (src.pViewportSwizzles) {
- pViewportSwizzles = new VkViewportSwizzleNV[src.viewportCount];
- memcpy ((void *)pViewportSwizzles, (void *)src.pViewportSwizzles, sizeof(VkViewportSwizzleNV)*src.viewportCount);
- }
-}
-
-safe_VkPipelineViewportSwizzleStateCreateInfoNV& safe_VkPipelineViewportSwizzleStateCreateInfoNV::operator=(const safe_VkPipelineViewportSwizzleStateCreateInfoNV& src)
-{
- if (&src == this) return *this;
-
- if (pViewportSwizzles)
- delete[] pViewportSwizzles;
- if (pNext)
- FreePnextChain(pNext);
-
- sType = src.sType;
- flags = src.flags;
- viewportCount = src.viewportCount;
- pViewportSwizzles = nullptr;
- pNext = SafePnextCopy(src.pNext);
- if (src.pViewportSwizzles) {
- pViewportSwizzles = new VkViewportSwizzleNV[src.viewportCount];
- memcpy ((void *)pViewportSwizzles, (void *)src.pViewportSwizzles, sizeof(VkViewportSwizzleNV)*src.viewportCount);
- }
-
- return *this;
-}
-
-safe_VkPipelineViewportSwizzleStateCreateInfoNV::~safe_VkPipelineViewportSwizzleStateCreateInfoNV()
-{
- if (pViewportSwizzles)
- delete[] pViewportSwizzles;
- if (pNext)
- FreePnextChain(pNext);
-}
-
-void safe_VkPipelineViewportSwizzleStateCreateInfoNV::initialize(const VkPipelineViewportSwizzleStateCreateInfoNV* in_struct)
-{
- sType = in_struct->sType;
- flags = in_struct->flags;
- viewportCount = in_struct->viewportCount;
- pViewportSwizzles = nullptr;
- pNext = SafePnextCopy(in_struct->pNext);
- if (in_struct->pViewportSwizzles) {
- pViewportSwizzles = new VkViewportSwizzleNV[in_struct->viewportCount];
- memcpy ((void *)pViewportSwizzles, (void *)in_struct->pViewportSwizzles, sizeof(VkViewportSwizzleNV)*in_struct->viewportCount);
- }
-}
-
-void safe_VkPipelineViewportSwizzleStateCreateInfoNV::initialize(const safe_VkPipelineViewportSwizzleStateCreateInfoNV* src)
-{
- sType = src->sType;
- flags = src->flags;
- viewportCount = src->viewportCount;
- pViewportSwizzles = nullptr;
- pNext = SafePnextCopy(src->pNext);
- if (src->pViewportSwizzles) {
- pViewportSwizzles = new VkViewportSwizzleNV[src->viewportCount];
- memcpy ((void *)pViewportSwizzles, (void *)src->pViewportSwizzles, sizeof(VkViewportSwizzleNV)*src->viewportCount);
- }
-}
-
-safe_VkPhysicalDeviceDiscardRectanglePropertiesEXT::safe_VkPhysicalDeviceDiscardRectanglePropertiesEXT(const VkPhysicalDeviceDiscardRectanglePropertiesEXT* in_struct) :
- sType(in_struct->sType),
- maxDiscardRectangles(in_struct->maxDiscardRectangles)
-{
- pNext = SafePnextCopy(in_struct->pNext);
-}
-
-safe_VkPhysicalDeviceDiscardRectanglePropertiesEXT::safe_VkPhysicalDeviceDiscardRectanglePropertiesEXT() :
- pNext(nullptr)
-{}
-
-safe_VkPhysicalDeviceDiscardRectanglePropertiesEXT::safe_VkPhysicalDeviceDiscardRectanglePropertiesEXT(const safe_VkPhysicalDeviceDiscardRectanglePropertiesEXT& src)
-{
- sType = src.sType;
- maxDiscardRectangles = src.maxDiscardRectangles;
- pNext = SafePnextCopy(src.pNext);
-}
-
-safe_VkPhysicalDeviceDiscardRectanglePropertiesEXT& safe_VkPhysicalDeviceDiscardRectanglePropertiesEXT::operator=(const safe_VkPhysicalDeviceDiscardRectanglePropertiesEXT& src)
-{
- if (&src == this) return *this;
-
- if (pNext)
- FreePnextChain(pNext);
-
- sType = src.sType;
- maxDiscardRectangles = src.maxDiscardRectangles;
- pNext = SafePnextCopy(src.pNext);
-
- return *this;
-}
-
-safe_VkPhysicalDeviceDiscardRectanglePropertiesEXT::~safe_VkPhysicalDeviceDiscardRectanglePropertiesEXT()
-{
- if (pNext)
- FreePnextChain(pNext);
-}
-
-void safe_VkPhysicalDeviceDiscardRectanglePropertiesEXT::initialize(const VkPhysicalDeviceDiscardRectanglePropertiesEXT* in_struct)
-{
- sType = in_struct->sType;
- maxDiscardRectangles = in_struct->maxDiscardRectangles;
- pNext = SafePnextCopy(in_struct->pNext);
-}
-
-void safe_VkPhysicalDeviceDiscardRectanglePropertiesEXT::initialize(const safe_VkPhysicalDeviceDiscardRectanglePropertiesEXT* src)
-{
- sType = src->sType;
- maxDiscardRectangles = src->maxDiscardRectangles;
- pNext = SafePnextCopy(src->pNext);
-}
-
-safe_VkPipelineDiscardRectangleStateCreateInfoEXT::safe_VkPipelineDiscardRectangleStateCreateInfoEXT(const VkPipelineDiscardRectangleStateCreateInfoEXT* in_struct) :
- sType(in_struct->sType),
- flags(in_struct->flags),
- discardRectangleMode(in_struct->discardRectangleMode),
- discardRectangleCount(in_struct->discardRectangleCount),
- pDiscardRectangles(nullptr)
-{
- pNext = SafePnextCopy(in_struct->pNext);
- if (in_struct->pDiscardRectangles) {
- pDiscardRectangles = new VkRect2D[in_struct->discardRectangleCount];
- memcpy ((void *)pDiscardRectangles, (void *)in_struct->pDiscardRectangles, sizeof(VkRect2D)*in_struct->discardRectangleCount);
- }
-}
-
-safe_VkPipelineDiscardRectangleStateCreateInfoEXT::safe_VkPipelineDiscardRectangleStateCreateInfoEXT() :
- pNext(nullptr),
- pDiscardRectangles(nullptr)
-{}
-
-safe_VkPipelineDiscardRectangleStateCreateInfoEXT::safe_VkPipelineDiscardRectangleStateCreateInfoEXT(const safe_VkPipelineDiscardRectangleStateCreateInfoEXT& src)
-{
- sType = src.sType;
- flags = src.flags;
- discardRectangleMode = src.discardRectangleMode;
- discardRectangleCount = src.discardRectangleCount;
- pDiscardRectangles = nullptr;
- pNext = SafePnextCopy(src.pNext);
- if (src.pDiscardRectangles) {
- pDiscardRectangles = new VkRect2D[src.discardRectangleCount];
- memcpy ((void *)pDiscardRectangles, (void *)src.pDiscardRectangles, sizeof(VkRect2D)*src.discardRectangleCount);
- }
-}
-
-safe_VkPipelineDiscardRectangleStateCreateInfoEXT& safe_VkPipelineDiscardRectangleStateCreateInfoEXT::operator=(const safe_VkPipelineDiscardRectangleStateCreateInfoEXT& src)
-{
- if (&src == this) return *this;
-
- if (pDiscardRectangles)
- delete[] pDiscardRectangles;
- if (pNext)
- FreePnextChain(pNext);
-
- sType = src.sType;
- flags = src.flags;
- discardRectangleMode = src.discardRectangleMode;
- discardRectangleCount = src.discardRectangleCount;
- pDiscardRectangles = nullptr;
- pNext = SafePnextCopy(src.pNext);
- if (src.pDiscardRectangles) {
- pDiscardRectangles = new VkRect2D[src.discardRectangleCount];
- memcpy ((void *)pDiscardRectangles, (void *)src.pDiscardRectangles, sizeof(VkRect2D)*src.discardRectangleCount);
- }
-
- return *this;
-}
-
-safe_VkPipelineDiscardRectangleStateCreateInfoEXT::~safe_VkPipelineDiscardRectangleStateCreateInfoEXT()
-{
- if (pDiscardRectangles)
- delete[] pDiscardRectangles;
- if (pNext)
- FreePnextChain(pNext);
-}
-
-void safe_VkPipelineDiscardRectangleStateCreateInfoEXT::initialize(const VkPipelineDiscardRectangleStateCreateInfoEXT* in_struct)
-{
- sType = in_struct->sType;
- flags = in_struct->flags;
- discardRectangleMode = in_struct->discardRectangleMode;
- discardRectangleCount = in_struct->discardRectangleCount;
- pDiscardRectangles = nullptr;
- pNext = SafePnextCopy(in_struct->pNext);
- if (in_struct->pDiscardRectangles) {
- pDiscardRectangles = new VkRect2D[in_struct->discardRectangleCount];
- memcpy ((void *)pDiscardRectangles, (void *)in_struct->pDiscardRectangles, sizeof(VkRect2D)*in_struct->discardRectangleCount);
- }
-}
-
-void safe_VkPipelineDiscardRectangleStateCreateInfoEXT::initialize(const safe_VkPipelineDiscardRectangleStateCreateInfoEXT* src)
-{
- sType = src->sType;
- flags = src->flags;
- discardRectangleMode = src->discardRectangleMode;
- discardRectangleCount = src->discardRectangleCount;
- pDiscardRectangles = nullptr;
- pNext = SafePnextCopy(src->pNext);
- if (src->pDiscardRectangles) {
- pDiscardRectangles = new VkRect2D[src->discardRectangleCount];
- memcpy ((void *)pDiscardRectangles, (void *)src->pDiscardRectangles, sizeof(VkRect2D)*src->discardRectangleCount);
- }
-}
-
-safe_VkPhysicalDeviceConservativeRasterizationPropertiesEXT::safe_VkPhysicalDeviceConservativeRasterizationPropertiesEXT(const VkPhysicalDeviceConservativeRasterizationPropertiesEXT* in_struct) :
- sType(in_struct->sType),
- primitiveOverestimationSize(in_struct->primitiveOverestimationSize),
- maxExtraPrimitiveOverestimationSize(in_struct->maxExtraPrimitiveOverestimationSize),
- extraPrimitiveOverestimationSizeGranularity(in_struct->extraPrimitiveOverestimationSizeGranularity),
- primitiveUnderestimation(in_struct->primitiveUnderestimation),
- conservativePointAndLineRasterization(in_struct->conservativePointAndLineRasterization),
- degenerateTrianglesRasterized(in_struct->degenerateTrianglesRasterized),
- degenerateLinesRasterized(in_struct->degenerateLinesRasterized),
- fullyCoveredFragmentShaderInputVariable(in_struct->fullyCoveredFragmentShaderInputVariable),
- conservativeRasterizationPostDepthCoverage(in_struct->conservativeRasterizationPostDepthCoverage)
-{
- pNext = SafePnextCopy(in_struct->pNext);
-}
-
-safe_VkPhysicalDeviceConservativeRasterizationPropertiesEXT::safe_VkPhysicalDeviceConservativeRasterizationPropertiesEXT() :
- pNext(nullptr)
-{}
-
-safe_VkPhysicalDeviceConservativeRasterizationPropertiesEXT::safe_VkPhysicalDeviceConservativeRasterizationPropertiesEXT(const safe_VkPhysicalDeviceConservativeRasterizationPropertiesEXT& src)
-{
- sType = src.sType;
- primitiveOverestimationSize = src.primitiveOverestimationSize;
- maxExtraPrimitiveOverestimationSize = src.maxExtraPrimitiveOverestimationSize;
- extraPrimitiveOverestimationSizeGranularity = src.extraPrimitiveOverestimationSizeGranularity;
- primitiveUnderestimation = src.primitiveUnderestimation;
- conservativePointAndLineRasterization = src.conservativePointAndLineRasterization;
- degenerateTrianglesRasterized = src.degenerateTrianglesRasterized;
- degenerateLinesRasterized = src.degenerateLinesRasterized;
- fullyCoveredFragmentShaderInputVariable = src.fullyCoveredFragmentShaderInputVariable;
- conservativeRasterizationPostDepthCoverage = src.conservativeRasterizationPostDepthCoverage;
- pNext = SafePnextCopy(src.pNext);
-}
-
-safe_VkPhysicalDeviceConservativeRasterizationPropertiesEXT& safe_VkPhysicalDeviceConservativeRasterizationPropertiesEXT::operator=(const safe_VkPhysicalDeviceConservativeRasterizationPropertiesEXT& src)
-{
- if (&src == this) return *this;
-
- if (pNext)
- FreePnextChain(pNext);
-
- sType = src.sType;
- primitiveOverestimationSize = src.primitiveOverestimationSize;
- maxExtraPrimitiveOverestimationSize = src.maxExtraPrimitiveOverestimationSize;
- extraPrimitiveOverestimationSizeGranularity = src.extraPrimitiveOverestimationSizeGranularity;
- primitiveUnderestimation = src.primitiveUnderestimation;
- conservativePointAndLineRasterization = src.conservativePointAndLineRasterization;
- degenerateTrianglesRasterized = src.degenerateTrianglesRasterized;
- degenerateLinesRasterized = src.degenerateLinesRasterized;
- fullyCoveredFragmentShaderInputVariable = src.fullyCoveredFragmentShaderInputVariable;
- conservativeRasterizationPostDepthCoverage = src.conservativeRasterizationPostDepthCoverage;
- pNext = SafePnextCopy(src.pNext);
-
- return *this;
-}
-
-safe_VkPhysicalDeviceConservativeRasterizationPropertiesEXT::~safe_VkPhysicalDeviceConservativeRasterizationPropertiesEXT()
-{
- if (pNext)
- FreePnextChain(pNext);
-}
-
-void safe_VkPhysicalDeviceConservativeRasterizationPropertiesEXT::initialize(const VkPhysicalDeviceConservativeRasterizationPropertiesEXT* in_struct)
-{
- sType = in_struct->sType;
- primitiveOverestimationSize = in_struct->primitiveOverestimationSize;
- maxExtraPrimitiveOverestimationSize = in_struct->maxExtraPrimitiveOverestimationSize;
- extraPrimitiveOverestimationSizeGranularity = in_struct->extraPrimitiveOverestimationSizeGranularity;
- primitiveUnderestimation = in_struct->primitiveUnderestimation;
- conservativePointAndLineRasterization = in_struct->conservativePointAndLineRasterization;
- degenerateTrianglesRasterized = in_struct->degenerateTrianglesRasterized;
- degenerateLinesRasterized = in_struct->degenerateLinesRasterized;
- fullyCoveredFragmentShaderInputVariable = in_struct->fullyCoveredFragmentShaderInputVariable;
- conservativeRasterizationPostDepthCoverage = in_struct->conservativeRasterizationPostDepthCoverage;
- pNext = SafePnextCopy(in_struct->pNext);
-}
-
-void safe_VkPhysicalDeviceConservativeRasterizationPropertiesEXT::initialize(const safe_VkPhysicalDeviceConservativeRasterizationPropertiesEXT* src)
-{
- sType = src->sType;
- primitiveOverestimationSize = src->primitiveOverestimationSize;
- maxExtraPrimitiveOverestimationSize = src->maxExtraPrimitiveOverestimationSize;
- extraPrimitiveOverestimationSizeGranularity = src->extraPrimitiveOverestimationSizeGranularity;
- primitiveUnderestimation = src->primitiveUnderestimation;
- conservativePointAndLineRasterization = src->conservativePointAndLineRasterization;
- degenerateTrianglesRasterized = src->degenerateTrianglesRasterized;
- degenerateLinesRasterized = src->degenerateLinesRasterized;
- fullyCoveredFragmentShaderInputVariable = src->fullyCoveredFragmentShaderInputVariable;
- conservativeRasterizationPostDepthCoverage = src->conservativeRasterizationPostDepthCoverage;
- pNext = SafePnextCopy(src->pNext);
-}
-
-safe_VkPipelineRasterizationConservativeStateCreateInfoEXT::safe_VkPipelineRasterizationConservativeStateCreateInfoEXT(const VkPipelineRasterizationConservativeStateCreateInfoEXT* in_struct) :
- sType(in_struct->sType),
- flags(in_struct->flags),
- conservativeRasterizationMode(in_struct->conservativeRasterizationMode),
- extraPrimitiveOverestimationSize(in_struct->extraPrimitiveOverestimationSize)
-{
- pNext = SafePnextCopy(in_struct->pNext);
-}
-
-safe_VkPipelineRasterizationConservativeStateCreateInfoEXT::safe_VkPipelineRasterizationConservativeStateCreateInfoEXT() :
- pNext(nullptr)
-{}
-
-safe_VkPipelineRasterizationConservativeStateCreateInfoEXT::safe_VkPipelineRasterizationConservativeStateCreateInfoEXT(const safe_VkPipelineRasterizationConservativeStateCreateInfoEXT& src)
-{
- sType = src.sType;
- flags = src.flags;
- conservativeRasterizationMode = src.conservativeRasterizationMode;
- extraPrimitiveOverestimationSize = src.extraPrimitiveOverestimationSize;
- pNext = SafePnextCopy(src.pNext);
-}
-
-safe_VkPipelineRasterizationConservativeStateCreateInfoEXT& safe_VkPipelineRasterizationConservativeStateCreateInfoEXT::operator=(const safe_VkPipelineRasterizationConservativeStateCreateInfoEXT& src)
-{
- if (&src == this) return *this;
-
- if (pNext)
- FreePnextChain(pNext);
-
- sType = src.sType;
- flags = src.flags;
- conservativeRasterizationMode = src.conservativeRasterizationMode;
- extraPrimitiveOverestimationSize = src.extraPrimitiveOverestimationSize;
- pNext = SafePnextCopy(src.pNext);
-
- return *this;
-}
-
-safe_VkPipelineRasterizationConservativeStateCreateInfoEXT::~safe_VkPipelineRasterizationConservativeStateCreateInfoEXT()
-{
- if (pNext)
- FreePnextChain(pNext);
-}
-
-void safe_VkPipelineRasterizationConservativeStateCreateInfoEXT::initialize(const VkPipelineRasterizationConservativeStateCreateInfoEXT* in_struct)
-{
- sType = in_struct->sType;
- flags = in_struct->flags;
- conservativeRasterizationMode = in_struct->conservativeRasterizationMode;
- extraPrimitiveOverestimationSize = in_struct->extraPrimitiveOverestimationSize;
- pNext = SafePnextCopy(in_struct->pNext);
-}
-
-void safe_VkPipelineRasterizationConservativeStateCreateInfoEXT::initialize(const safe_VkPipelineRasterizationConservativeStateCreateInfoEXT* src)
-{
- sType = src->sType;
- flags = src->flags;
- conservativeRasterizationMode = src->conservativeRasterizationMode;
- extraPrimitiveOverestimationSize = src->extraPrimitiveOverestimationSize;
- pNext = SafePnextCopy(src->pNext);
-}
-
-safe_VkPhysicalDeviceDepthClipEnableFeaturesEXT::safe_VkPhysicalDeviceDepthClipEnableFeaturesEXT(const VkPhysicalDeviceDepthClipEnableFeaturesEXT* in_struct) :
- sType(in_struct->sType),
- depthClipEnable(in_struct->depthClipEnable)
-{
- pNext = SafePnextCopy(in_struct->pNext);
-}
-
-safe_VkPhysicalDeviceDepthClipEnableFeaturesEXT::safe_VkPhysicalDeviceDepthClipEnableFeaturesEXT() :
- pNext(nullptr)
-{}
-
-safe_VkPhysicalDeviceDepthClipEnableFeaturesEXT::safe_VkPhysicalDeviceDepthClipEnableFeaturesEXT(const safe_VkPhysicalDeviceDepthClipEnableFeaturesEXT& src)
-{
- sType = src.sType;
- depthClipEnable = src.depthClipEnable;
- pNext = SafePnextCopy(src.pNext);
-}
-
-safe_VkPhysicalDeviceDepthClipEnableFeaturesEXT& safe_VkPhysicalDeviceDepthClipEnableFeaturesEXT::operator=(const safe_VkPhysicalDeviceDepthClipEnableFeaturesEXT& src)
-{
- if (&src == this) return *this;
-
- if (pNext)
- FreePnextChain(pNext);
-
- sType = src.sType;
- depthClipEnable = src.depthClipEnable;
- pNext = SafePnextCopy(src.pNext);
-
- return *this;
-}
-
-safe_VkPhysicalDeviceDepthClipEnableFeaturesEXT::~safe_VkPhysicalDeviceDepthClipEnableFeaturesEXT()
-{
- if (pNext)
- FreePnextChain(pNext);
-}
-
-void safe_VkPhysicalDeviceDepthClipEnableFeaturesEXT::initialize(const VkPhysicalDeviceDepthClipEnableFeaturesEXT* in_struct)
-{
- sType = in_struct->sType;
- depthClipEnable = in_struct->depthClipEnable;
- pNext = SafePnextCopy(in_struct->pNext);
-}
-
-void safe_VkPhysicalDeviceDepthClipEnableFeaturesEXT::initialize(const safe_VkPhysicalDeviceDepthClipEnableFeaturesEXT* src)
-{
- sType = src->sType;
- depthClipEnable = src->depthClipEnable;
- pNext = SafePnextCopy(src->pNext);
-}
-
-safe_VkPipelineRasterizationDepthClipStateCreateInfoEXT::safe_VkPipelineRasterizationDepthClipStateCreateInfoEXT(const VkPipelineRasterizationDepthClipStateCreateInfoEXT* in_struct) :
- sType(in_struct->sType),
- flags(in_struct->flags),
- depthClipEnable(in_struct->depthClipEnable)
-{
- pNext = SafePnextCopy(in_struct->pNext);
-}
-
-safe_VkPipelineRasterizationDepthClipStateCreateInfoEXT::safe_VkPipelineRasterizationDepthClipStateCreateInfoEXT() :
- pNext(nullptr)
-{}
-
-safe_VkPipelineRasterizationDepthClipStateCreateInfoEXT::safe_VkPipelineRasterizationDepthClipStateCreateInfoEXT(const safe_VkPipelineRasterizationDepthClipStateCreateInfoEXT& src)
-{
- sType = src.sType;
- flags = src.flags;
- depthClipEnable = src.depthClipEnable;
- pNext = SafePnextCopy(src.pNext);
-}
-
-safe_VkPipelineRasterizationDepthClipStateCreateInfoEXT& safe_VkPipelineRasterizationDepthClipStateCreateInfoEXT::operator=(const safe_VkPipelineRasterizationDepthClipStateCreateInfoEXT& src)
-{
- if (&src == this) return *this;
-
- if (pNext)
- FreePnextChain(pNext);
-
- sType = src.sType;
- flags = src.flags;
- depthClipEnable = src.depthClipEnable;
- pNext = SafePnextCopy(src.pNext);
-
- return *this;
-}
-
-safe_VkPipelineRasterizationDepthClipStateCreateInfoEXT::~safe_VkPipelineRasterizationDepthClipStateCreateInfoEXT()
-{
- if (pNext)
- FreePnextChain(pNext);
-}
-
-void safe_VkPipelineRasterizationDepthClipStateCreateInfoEXT::initialize(const VkPipelineRasterizationDepthClipStateCreateInfoEXT* in_struct)
-{
- sType = in_struct->sType;
- flags = in_struct->flags;
- depthClipEnable = in_struct->depthClipEnable;
- pNext = SafePnextCopy(in_struct->pNext);
-}
-
-void safe_VkPipelineRasterizationDepthClipStateCreateInfoEXT::initialize(const safe_VkPipelineRasterizationDepthClipStateCreateInfoEXT* src)
-{
- sType = src->sType;
- flags = src->flags;
- depthClipEnable = src->depthClipEnable;
- pNext = SafePnextCopy(src->pNext);
-}
-
-safe_VkHdrMetadataEXT::safe_VkHdrMetadataEXT(const VkHdrMetadataEXT* in_struct) :
- sType(in_struct->sType),
- displayPrimaryRed(in_struct->displayPrimaryRed),
- displayPrimaryGreen(in_struct->displayPrimaryGreen),
- displayPrimaryBlue(in_struct->displayPrimaryBlue),
- whitePoint(in_struct->whitePoint),
- maxLuminance(in_struct->maxLuminance),
- minLuminance(in_struct->minLuminance),
- maxContentLightLevel(in_struct->maxContentLightLevel),
- maxFrameAverageLightLevel(in_struct->maxFrameAverageLightLevel)
-{
- pNext = SafePnextCopy(in_struct->pNext);
-}
-
-safe_VkHdrMetadataEXT::safe_VkHdrMetadataEXT() :
- pNext(nullptr)
-{}
-
-safe_VkHdrMetadataEXT::safe_VkHdrMetadataEXT(const safe_VkHdrMetadataEXT& src)
-{
- sType = src.sType;
- displayPrimaryRed = src.displayPrimaryRed;
- displayPrimaryGreen = src.displayPrimaryGreen;
- displayPrimaryBlue = src.displayPrimaryBlue;
- whitePoint = src.whitePoint;
- maxLuminance = src.maxLuminance;
- minLuminance = src.minLuminance;
- maxContentLightLevel = src.maxContentLightLevel;
- maxFrameAverageLightLevel = src.maxFrameAverageLightLevel;
- pNext = SafePnextCopy(src.pNext);
-}
-
-safe_VkHdrMetadataEXT& safe_VkHdrMetadataEXT::operator=(const safe_VkHdrMetadataEXT& src)
-{
- if (&src == this) return *this;
-
- if (pNext)
- FreePnextChain(pNext);
-
- sType = src.sType;
- displayPrimaryRed = src.displayPrimaryRed;
- displayPrimaryGreen = src.displayPrimaryGreen;
- displayPrimaryBlue = src.displayPrimaryBlue;
- whitePoint = src.whitePoint;
- maxLuminance = src.maxLuminance;
- minLuminance = src.minLuminance;
- maxContentLightLevel = src.maxContentLightLevel;
- maxFrameAverageLightLevel = src.maxFrameAverageLightLevel;
- pNext = SafePnextCopy(src.pNext);
-
- return *this;
-}
-
-safe_VkHdrMetadataEXT::~safe_VkHdrMetadataEXT()
-{
- if (pNext)
- FreePnextChain(pNext);
-}
-
-void safe_VkHdrMetadataEXT::initialize(const VkHdrMetadataEXT* in_struct)
-{
- sType = in_struct->sType;
- displayPrimaryRed = in_struct->displayPrimaryRed;
- displayPrimaryGreen = in_struct->displayPrimaryGreen;
- displayPrimaryBlue = in_struct->displayPrimaryBlue;
- whitePoint = in_struct->whitePoint;
- maxLuminance = in_struct->maxLuminance;
- minLuminance = in_struct->minLuminance;
- maxContentLightLevel = in_struct->maxContentLightLevel;
- maxFrameAverageLightLevel = in_struct->maxFrameAverageLightLevel;
- pNext = SafePnextCopy(in_struct->pNext);
-}
-
-void safe_VkHdrMetadataEXT::initialize(const safe_VkHdrMetadataEXT* src)
-{
- sType = src->sType;
- displayPrimaryRed = src->displayPrimaryRed;
- displayPrimaryGreen = src->displayPrimaryGreen;
- displayPrimaryBlue = src->displayPrimaryBlue;
- whitePoint = src->whitePoint;
- maxLuminance = src->maxLuminance;
- minLuminance = src->minLuminance;
- maxContentLightLevel = src->maxContentLightLevel;
- maxFrameAverageLightLevel = src->maxFrameAverageLightLevel;
- pNext = SafePnextCopy(src->pNext);
-}
-#ifdef VK_USE_PLATFORM_IOS_MVK
-
-
-safe_VkIOSSurfaceCreateInfoMVK::safe_VkIOSSurfaceCreateInfoMVK(const VkIOSSurfaceCreateInfoMVK* in_struct) :
- sType(in_struct->sType),
- flags(in_struct->flags),
- pView(in_struct->pView)
-{
- pNext = SafePnextCopy(in_struct->pNext);
-}
-
-safe_VkIOSSurfaceCreateInfoMVK::safe_VkIOSSurfaceCreateInfoMVK() :
- pNext(nullptr),
- pView(nullptr)
-{}
-
-safe_VkIOSSurfaceCreateInfoMVK::safe_VkIOSSurfaceCreateInfoMVK(const safe_VkIOSSurfaceCreateInfoMVK& src)
-{
- sType = src.sType;
- flags = src.flags;
- pView = src.pView;
- pNext = SafePnextCopy(src.pNext);
-}
-
-safe_VkIOSSurfaceCreateInfoMVK& safe_VkIOSSurfaceCreateInfoMVK::operator=(const safe_VkIOSSurfaceCreateInfoMVK& src)
-{
- if (&src == this) return *this;
-
- if (pNext)
- FreePnextChain(pNext);
-
- sType = src.sType;
- flags = src.flags;
- pView = src.pView;
- pNext = SafePnextCopy(src.pNext);
-
- return *this;
-}
-
-safe_VkIOSSurfaceCreateInfoMVK::~safe_VkIOSSurfaceCreateInfoMVK()
-{
- if (pNext)
- FreePnextChain(pNext);
-}
-
-void safe_VkIOSSurfaceCreateInfoMVK::initialize(const VkIOSSurfaceCreateInfoMVK* in_struct)
-{
- sType = in_struct->sType;
- flags = in_struct->flags;
- pView = in_struct->pView;
- pNext = SafePnextCopy(in_struct->pNext);
-}
-
-void safe_VkIOSSurfaceCreateInfoMVK::initialize(const safe_VkIOSSurfaceCreateInfoMVK* src)
-{
- sType = src->sType;
- flags = src->flags;
- pView = src->pView;
- pNext = SafePnextCopy(src->pNext);
-}
-#endif // VK_USE_PLATFORM_IOS_MVK
-
-#ifdef VK_USE_PLATFORM_MACOS_MVK
-
-
-safe_VkMacOSSurfaceCreateInfoMVK::safe_VkMacOSSurfaceCreateInfoMVK(const VkMacOSSurfaceCreateInfoMVK* in_struct) :
- sType(in_struct->sType),
- flags(in_struct->flags),
- pView(in_struct->pView)
-{
- pNext = SafePnextCopy(in_struct->pNext);
-}
-
-safe_VkMacOSSurfaceCreateInfoMVK::safe_VkMacOSSurfaceCreateInfoMVK() :
- pNext(nullptr),
- pView(nullptr)
-{}
-
-safe_VkMacOSSurfaceCreateInfoMVK::safe_VkMacOSSurfaceCreateInfoMVK(const safe_VkMacOSSurfaceCreateInfoMVK& src)
-{
- sType = src.sType;
- flags = src.flags;
- pView = src.pView;
- pNext = SafePnextCopy(src.pNext);
-}
-
-safe_VkMacOSSurfaceCreateInfoMVK& safe_VkMacOSSurfaceCreateInfoMVK::operator=(const safe_VkMacOSSurfaceCreateInfoMVK& src)
-{
- if (&src == this) return *this;
-
- if (pNext)
- FreePnextChain(pNext);
-
- sType = src.sType;
- flags = src.flags;
- pView = src.pView;
- pNext = SafePnextCopy(src.pNext);
-
- return *this;
-}
-
-safe_VkMacOSSurfaceCreateInfoMVK::~safe_VkMacOSSurfaceCreateInfoMVK()
-{
- if (pNext)
- FreePnextChain(pNext);
-}
-
-void safe_VkMacOSSurfaceCreateInfoMVK::initialize(const VkMacOSSurfaceCreateInfoMVK* in_struct)
-{
- sType = in_struct->sType;
- flags = in_struct->flags;
- pView = in_struct->pView;
- pNext = SafePnextCopy(in_struct->pNext);
-}
-
-void safe_VkMacOSSurfaceCreateInfoMVK::initialize(const safe_VkMacOSSurfaceCreateInfoMVK* src)
-{
- sType = src->sType;
- flags = src->flags;
- pView = src->pView;
- pNext = SafePnextCopy(src->pNext);
-}
-#endif // VK_USE_PLATFORM_MACOS_MVK
-
-
-safe_VkDebugUtilsObjectNameInfoEXT::safe_VkDebugUtilsObjectNameInfoEXT(const VkDebugUtilsObjectNameInfoEXT* in_struct) :
- sType(in_struct->sType),
- objectType(in_struct->objectType),
- objectHandle(in_struct->objectHandle)
-{
- pNext = SafePnextCopy(in_struct->pNext);
- pObjectName = SafeStringCopy(in_struct->pObjectName);
-}
-
-safe_VkDebugUtilsObjectNameInfoEXT::safe_VkDebugUtilsObjectNameInfoEXT() :
- pNext(nullptr),
- pObjectName(nullptr)
-{}
-
-safe_VkDebugUtilsObjectNameInfoEXT::safe_VkDebugUtilsObjectNameInfoEXT(const safe_VkDebugUtilsObjectNameInfoEXT& src)
-{
- sType = src.sType;
- objectType = src.objectType;
- objectHandle = src.objectHandle;
- pNext = SafePnextCopy(src.pNext);
- pObjectName = SafeStringCopy(src.pObjectName);
-}
-
-safe_VkDebugUtilsObjectNameInfoEXT& safe_VkDebugUtilsObjectNameInfoEXT::operator=(const safe_VkDebugUtilsObjectNameInfoEXT& src)
-{
- if (&src == this) return *this;
-
- if (pObjectName) delete [] pObjectName;
- if (pNext)
- FreePnextChain(pNext);
-
- sType = src.sType;
- objectType = src.objectType;
- objectHandle = src.objectHandle;
- pNext = SafePnextCopy(src.pNext);
- pObjectName = SafeStringCopy(src.pObjectName);
-
- return *this;
-}
-
-safe_VkDebugUtilsObjectNameInfoEXT::~safe_VkDebugUtilsObjectNameInfoEXT()
-{
- if (pObjectName) delete [] pObjectName;
- if (pNext)
- FreePnextChain(pNext);
-}
-
-void safe_VkDebugUtilsObjectNameInfoEXT::initialize(const VkDebugUtilsObjectNameInfoEXT* in_struct)
-{
- sType = in_struct->sType;
- objectType = in_struct->objectType;
- objectHandle = in_struct->objectHandle;
- pNext = SafePnextCopy(in_struct->pNext);
- pObjectName = SafeStringCopy(in_struct->pObjectName);
-}
-
-void safe_VkDebugUtilsObjectNameInfoEXT::initialize(const safe_VkDebugUtilsObjectNameInfoEXT* src)
-{
- sType = src->sType;
- objectType = src->objectType;
- objectHandle = src->objectHandle;
- pNext = SafePnextCopy(src->pNext);
- pObjectName = SafeStringCopy(src->pObjectName);
-}
-
-safe_VkDebugUtilsObjectTagInfoEXT::safe_VkDebugUtilsObjectTagInfoEXT(const VkDebugUtilsObjectTagInfoEXT* in_struct) :
- sType(in_struct->sType),
- objectType(in_struct->objectType),
- objectHandle(in_struct->objectHandle),
- tagName(in_struct->tagName),
- tagSize(in_struct->tagSize),
- pTag(in_struct->pTag)
-{
- pNext = SafePnextCopy(in_struct->pNext);
-}
-
-safe_VkDebugUtilsObjectTagInfoEXT::safe_VkDebugUtilsObjectTagInfoEXT() :
- pNext(nullptr),
- pTag(nullptr)
-{}
-
-safe_VkDebugUtilsObjectTagInfoEXT::safe_VkDebugUtilsObjectTagInfoEXT(const safe_VkDebugUtilsObjectTagInfoEXT& src)
-{
- sType = src.sType;
- objectType = src.objectType;
- objectHandle = src.objectHandle;
- tagName = src.tagName;
- tagSize = src.tagSize;
- pTag = src.pTag;
- pNext = SafePnextCopy(src.pNext);
-}
-
-safe_VkDebugUtilsObjectTagInfoEXT& safe_VkDebugUtilsObjectTagInfoEXT::operator=(const safe_VkDebugUtilsObjectTagInfoEXT& src)
-{
- if (&src == this) return *this;
-
- if (pNext)
- FreePnextChain(pNext);
-
- sType = src.sType;
- objectType = src.objectType;
- objectHandle = src.objectHandle;
- tagName = src.tagName;
- tagSize = src.tagSize;
- pTag = src.pTag;
- pNext = SafePnextCopy(src.pNext);
-
- return *this;
-}
-
-safe_VkDebugUtilsObjectTagInfoEXT::~safe_VkDebugUtilsObjectTagInfoEXT()
-{
- if (pNext)
- FreePnextChain(pNext);
-}
-
-void safe_VkDebugUtilsObjectTagInfoEXT::initialize(const VkDebugUtilsObjectTagInfoEXT* in_struct)
-{
- sType = in_struct->sType;
- objectType = in_struct->objectType;
- objectHandle = in_struct->objectHandle;
- tagName = in_struct->tagName;
- tagSize = in_struct->tagSize;
- pTag = in_struct->pTag;
- pNext = SafePnextCopy(in_struct->pNext);
-}
-
-void safe_VkDebugUtilsObjectTagInfoEXT::initialize(const safe_VkDebugUtilsObjectTagInfoEXT* src)
-{
- sType = src->sType;
- objectType = src->objectType;
- objectHandle = src->objectHandle;
- tagName = src->tagName;
- tagSize = src->tagSize;
- pTag = src->pTag;
- pNext = SafePnextCopy(src->pNext);
-}
-
-safe_VkDebugUtilsLabelEXT::safe_VkDebugUtilsLabelEXT(const VkDebugUtilsLabelEXT* in_struct) :
- sType(in_struct->sType)
-{
- pNext = SafePnextCopy(in_struct->pNext);
- pLabelName = SafeStringCopy(in_struct->pLabelName);
- for (uint32_t i = 0; i < 4; ++i) {
- color[i] = in_struct->color[i];
- }
-}
-
-safe_VkDebugUtilsLabelEXT::safe_VkDebugUtilsLabelEXT() :
- pNext(nullptr),
- pLabelName(nullptr)
-{}
-
-safe_VkDebugUtilsLabelEXT::safe_VkDebugUtilsLabelEXT(const safe_VkDebugUtilsLabelEXT& src)
-{
- sType = src.sType;
- pNext = SafePnextCopy(src.pNext);
- pLabelName = SafeStringCopy(src.pLabelName);
- for (uint32_t i = 0; i < 4; ++i) {
- color[i] = src.color[i];
- }
-}
-
-safe_VkDebugUtilsLabelEXT& safe_VkDebugUtilsLabelEXT::operator=(const safe_VkDebugUtilsLabelEXT& src)
-{
- if (&src == this) return *this;
-
- if (pLabelName) delete [] pLabelName;
- if (pNext)
- FreePnextChain(pNext);
-
- sType = src.sType;
- pNext = SafePnextCopy(src.pNext);
- pLabelName = SafeStringCopy(src.pLabelName);
- for (uint32_t i = 0; i < 4; ++i) {
- color[i] = src.color[i];
- }
-
- return *this;
-}
-
-safe_VkDebugUtilsLabelEXT::~safe_VkDebugUtilsLabelEXT()
-{
- if (pLabelName) delete [] pLabelName;
- if (pNext)
- FreePnextChain(pNext);
-}
-
-void safe_VkDebugUtilsLabelEXT::initialize(const VkDebugUtilsLabelEXT* in_struct)
-{
- sType = in_struct->sType;
- pNext = SafePnextCopy(in_struct->pNext);
- pLabelName = SafeStringCopy(in_struct->pLabelName);
- for (uint32_t i = 0; i < 4; ++i) {
- color[i] = in_struct->color[i];
- }
-}
-
-void safe_VkDebugUtilsLabelEXT::initialize(const safe_VkDebugUtilsLabelEXT* src)
-{
- sType = src->sType;
- pNext = SafePnextCopy(src->pNext);
- pLabelName = SafeStringCopy(src->pLabelName);
- for (uint32_t i = 0; i < 4; ++i) {
- color[i] = src->color[i];
- }
-}
-
-safe_VkDebugUtilsMessengerCallbackDataEXT::safe_VkDebugUtilsMessengerCallbackDataEXT(const VkDebugUtilsMessengerCallbackDataEXT* in_struct) :
- sType(in_struct->sType),
- flags(in_struct->flags),
- messageIdNumber(in_struct->messageIdNumber),
- queueLabelCount(in_struct->queueLabelCount),
- pQueueLabels(nullptr),
- cmdBufLabelCount(in_struct->cmdBufLabelCount),
- pCmdBufLabels(nullptr),
- objectCount(in_struct->objectCount),
- pObjects(nullptr)
-{
- pNext = SafePnextCopy(in_struct->pNext);
- pMessageIdName = SafeStringCopy(in_struct->pMessageIdName);
- pMessage = SafeStringCopy(in_struct->pMessage);
- if (queueLabelCount && in_struct->pQueueLabels) {
- pQueueLabels = new safe_VkDebugUtilsLabelEXT[queueLabelCount];
- for (uint32_t i = 0; i < queueLabelCount; ++i) {
- pQueueLabels[i].initialize(&in_struct->pQueueLabels[i]);
- }
- }
- if (cmdBufLabelCount && in_struct->pCmdBufLabels) {
- pCmdBufLabels = new safe_VkDebugUtilsLabelEXT[cmdBufLabelCount];
- for (uint32_t i = 0; i < cmdBufLabelCount; ++i) {
- pCmdBufLabels[i].initialize(&in_struct->pCmdBufLabels[i]);
- }
- }
- if (objectCount && in_struct->pObjects) {
- pObjects = new safe_VkDebugUtilsObjectNameInfoEXT[objectCount];
- for (uint32_t i = 0; i < objectCount; ++i) {
- pObjects[i].initialize(&in_struct->pObjects[i]);
- }
- }
-}
-
-safe_VkDebugUtilsMessengerCallbackDataEXT::safe_VkDebugUtilsMessengerCallbackDataEXT() :
- pNext(nullptr),
- pMessageIdName(nullptr),
- pMessage(nullptr),
- pQueueLabels(nullptr),
- pCmdBufLabels(nullptr),
- pObjects(nullptr)
-{}
-
-safe_VkDebugUtilsMessengerCallbackDataEXT::safe_VkDebugUtilsMessengerCallbackDataEXT(const safe_VkDebugUtilsMessengerCallbackDataEXT& src)
-{
- sType = src.sType;
- flags = src.flags;
- messageIdNumber = src.messageIdNumber;
- queueLabelCount = src.queueLabelCount;
- pQueueLabels = nullptr;
- cmdBufLabelCount = src.cmdBufLabelCount;
- pCmdBufLabels = nullptr;
- objectCount = src.objectCount;
- pObjects = nullptr;
- pNext = SafePnextCopy(src.pNext);
- pMessageIdName = SafeStringCopy(src.pMessageIdName);
- pMessage = SafeStringCopy(src.pMessage);
- if (queueLabelCount && src.pQueueLabels) {
- pQueueLabels = new safe_VkDebugUtilsLabelEXT[queueLabelCount];
- for (uint32_t i = 0; i < queueLabelCount; ++i) {
- pQueueLabels[i].initialize(&src.pQueueLabels[i]);
- }
- }
- if (cmdBufLabelCount && src.pCmdBufLabels) {
- pCmdBufLabels = new safe_VkDebugUtilsLabelEXT[cmdBufLabelCount];
- for (uint32_t i = 0; i < cmdBufLabelCount; ++i) {
- pCmdBufLabels[i].initialize(&src.pCmdBufLabels[i]);
- }
- }
- if (objectCount && src.pObjects) {
- pObjects = new safe_VkDebugUtilsObjectNameInfoEXT[objectCount];
- for (uint32_t i = 0; i < objectCount; ++i) {
- pObjects[i].initialize(&src.pObjects[i]);
- }
- }
-}
-
-safe_VkDebugUtilsMessengerCallbackDataEXT& safe_VkDebugUtilsMessengerCallbackDataEXT::operator=(const safe_VkDebugUtilsMessengerCallbackDataEXT& src)
-{
- if (&src == this) return *this;
-
- if (pMessageIdName) delete [] pMessageIdName;
- if (pMessage) delete [] pMessage;
- if (pQueueLabels)
- delete[] pQueueLabels;
- if (pCmdBufLabels)
- delete[] pCmdBufLabels;
- if (pObjects)
- delete[] pObjects;
- if (pNext)
- FreePnextChain(pNext);
-
- sType = src.sType;
- flags = src.flags;
- messageIdNumber = src.messageIdNumber;
- queueLabelCount = src.queueLabelCount;
- pQueueLabels = nullptr;
- cmdBufLabelCount = src.cmdBufLabelCount;
- pCmdBufLabels = nullptr;
- objectCount = src.objectCount;
- pObjects = nullptr;
- pNext = SafePnextCopy(src.pNext);
- pMessageIdName = SafeStringCopy(src.pMessageIdName);
- pMessage = SafeStringCopy(src.pMessage);
- if (queueLabelCount && src.pQueueLabels) {
- pQueueLabels = new safe_VkDebugUtilsLabelEXT[queueLabelCount];
- for (uint32_t i = 0; i < queueLabelCount; ++i) {
- pQueueLabels[i].initialize(&src.pQueueLabels[i]);
- }
- }
- if (cmdBufLabelCount && src.pCmdBufLabels) {
- pCmdBufLabels = new safe_VkDebugUtilsLabelEXT[cmdBufLabelCount];
- for (uint32_t i = 0; i < cmdBufLabelCount; ++i) {
- pCmdBufLabels[i].initialize(&src.pCmdBufLabels[i]);
- }
- }
- if (objectCount && src.pObjects) {
- pObjects = new safe_VkDebugUtilsObjectNameInfoEXT[objectCount];
- for (uint32_t i = 0; i < objectCount; ++i) {
- pObjects[i].initialize(&src.pObjects[i]);
- }
- }
-
- return *this;
-}
-
-safe_VkDebugUtilsMessengerCallbackDataEXT::~safe_VkDebugUtilsMessengerCallbackDataEXT()
-{
- if (pMessageIdName) delete [] pMessageIdName;
- if (pMessage) delete [] pMessage;
- if (pQueueLabels)
- delete[] pQueueLabels;
- if (pCmdBufLabels)
- delete[] pCmdBufLabels;
- if (pObjects)
- delete[] pObjects;
- if (pNext)
- FreePnextChain(pNext);
-}
-
-void safe_VkDebugUtilsMessengerCallbackDataEXT::initialize(const VkDebugUtilsMessengerCallbackDataEXT* in_struct)
-{
- sType = in_struct->sType;
- flags = in_struct->flags;
- messageIdNumber = in_struct->messageIdNumber;
- queueLabelCount = in_struct->queueLabelCount;
- pQueueLabels = nullptr;
- cmdBufLabelCount = in_struct->cmdBufLabelCount;
- pCmdBufLabels = nullptr;
- objectCount = in_struct->objectCount;
- pObjects = nullptr;
- pNext = SafePnextCopy(in_struct->pNext);
- pMessageIdName = SafeStringCopy(in_struct->pMessageIdName);
- pMessage = SafeStringCopy(in_struct->pMessage);
- if (queueLabelCount && in_struct->pQueueLabels) {
- pQueueLabels = new safe_VkDebugUtilsLabelEXT[queueLabelCount];
- for (uint32_t i = 0; i < queueLabelCount; ++i) {
- pQueueLabels[i].initialize(&in_struct->pQueueLabels[i]);
- }
- }
- if (cmdBufLabelCount && in_struct->pCmdBufLabels) {
- pCmdBufLabels = new safe_VkDebugUtilsLabelEXT[cmdBufLabelCount];
- for (uint32_t i = 0; i < cmdBufLabelCount; ++i) {
- pCmdBufLabels[i].initialize(&in_struct->pCmdBufLabels[i]);
- }
- }
- if (objectCount && in_struct->pObjects) {
- pObjects = new safe_VkDebugUtilsObjectNameInfoEXT[objectCount];
- for (uint32_t i = 0; i < objectCount; ++i) {
- pObjects[i].initialize(&in_struct->pObjects[i]);
- }
- }
-}
-
-void safe_VkDebugUtilsMessengerCallbackDataEXT::initialize(const safe_VkDebugUtilsMessengerCallbackDataEXT* src)
-{
- sType = src->sType;
- flags = src->flags;
- messageIdNumber = src->messageIdNumber;
- queueLabelCount = src->queueLabelCount;
- pQueueLabels = nullptr;
- cmdBufLabelCount = src->cmdBufLabelCount;
- pCmdBufLabels = nullptr;
- objectCount = src->objectCount;
- pObjects = nullptr;
- pNext = SafePnextCopy(src->pNext);
- pMessageIdName = SafeStringCopy(src->pMessageIdName);
- pMessage = SafeStringCopy(src->pMessage);
- if (queueLabelCount && src->pQueueLabels) {
- pQueueLabels = new safe_VkDebugUtilsLabelEXT[queueLabelCount];
- for (uint32_t i = 0; i < queueLabelCount; ++i) {
- pQueueLabels[i].initialize(&src->pQueueLabels[i]);
- }
- }
- if (cmdBufLabelCount && src->pCmdBufLabels) {
- pCmdBufLabels = new safe_VkDebugUtilsLabelEXT[cmdBufLabelCount];
- for (uint32_t i = 0; i < cmdBufLabelCount; ++i) {
- pCmdBufLabels[i].initialize(&src->pCmdBufLabels[i]);
- }
- }
- if (objectCount && src->pObjects) {
- pObjects = new safe_VkDebugUtilsObjectNameInfoEXT[objectCount];
- for (uint32_t i = 0; i < objectCount; ++i) {
- pObjects[i].initialize(&src->pObjects[i]);
- }
- }
-}
-
-safe_VkDebugUtilsMessengerCreateInfoEXT::safe_VkDebugUtilsMessengerCreateInfoEXT(const VkDebugUtilsMessengerCreateInfoEXT* in_struct) :
- sType(in_struct->sType),
- flags(in_struct->flags),
- messageSeverity(in_struct->messageSeverity),
- messageType(in_struct->messageType),
- pfnUserCallback(in_struct->pfnUserCallback),
- pUserData(in_struct->pUserData)
-{
- pNext = SafePnextCopy(in_struct->pNext);
-}
-
-safe_VkDebugUtilsMessengerCreateInfoEXT::safe_VkDebugUtilsMessengerCreateInfoEXT() :
- pNext(nullptr),
- pUserData(nullptr)
-{}
-
-safe_VkDebugUtilsMessengerCreateInfoEXT::safe_VkDebugUtilsMessengerCreateInfoEXT(const safe_VkDebugUtilsMessengerCreateInfoEXT& src)
-{
- sType = src.sType;
- flags = src.flags;
- messageSeverity = src.messageSeverity;
- messageType = src.messageType;
- pfnUserCallback = src.pfnUserCallback;
- pUserData = src.pUserData;
- pNext = SafePnextCopy(src.pNext);
-}
-
-safe_VkDebugUtilsMessengerCreateInfoEXT& safe_VkDebugUtilsMessengerCreateInfoEXT::operator=(const safe_VkDebugUtilsMessengerCreateInfoEXT& src)
-{
- if (&src == this) return *this;
-
- if (pNext)
- FreePnextChain(pNext);
-
- sType = src.sType;
- flags = src.flags;
- messageSeverity = src.messageSeverity;
- messageType = src.messageType;
- pfnUserCallback = src.pfnUserCallback;
- pUserData = src.pUserData;
- pNext = SafePnextCopy(src.pNext);
-
- return *this;
-}
-
-safe_VkDebugUtilsMessengerCreateInfoEXT::~safe_VkDebugUtilsMessengerCreateInfoEXT()
-{
- if (pNext)
- FreePnextChain(pNext);
-}
-
-void safe_VkDebugUtilsMessengerCreateInfoEXT::initialize(const VkDebugUtilsMessengerCreateInfoEXT* in_struct)
-{
- sType = in_struct->sType;
- flags = in_struct->flags;
- messageSeverity = in_struct->messageSeverity;
- messageType = in_struct->messageType;
- pfnUserCallback = in_struct->pfnUserCallback;
- pUserData = in_struct->pUserData;
- pNext = SafePnextCopy(in_struct->pNext);
-}
-
-void safe_VkDebugUtilsMessengerCreateInfoEXT::initialize(const safe_VkDebugUtilsMessengerCreateInfoEXT* src)
-{
- sType = src->sType;
- flags = src->flags;
- messageSeverity = src->messageSeverity;
- messageType = src->messageType;
- pfnUserCallback = src->pfnUserCallback;
- pUserData = src->pUserData;
- pNext = SafePnextCopy(src->pNext);
-}
-#ifdef VK_USE_PLATFORM_ANDROID_KHR
-
-
-safe_VkAndroidHardwareBufferUsageANDROID::safe_VkAndroidHardwareBufferUsageANDROID(const VkAndroidHardwareBufferUsageANDROID* in_struct) :
- sType(in_struct->sType),
- androidHardwareBufferUsage(in_struct->androidHardwareBufferUsage)
-{
- pNext = SafePnextCopy(in_struct->pNext);
-}
-
-safe_VkAndroidHardwareBufferUsageANDROID::safe_VkAndroidHardwareBufferUsageANDROID() :
- pNext(nullptr)
-{}
-
-safe_VkAndroidHardwareBufferUsageANDROID::safe_VkAndroidHardwareBufferUsageANDROID(const safe_VkAndroidHardwareBufferUsageANDROID& src)
-{
- sType = src.sType;
- androidHardwareBufferUsage = src.androidHardwareBufferUsage;
- pNext = SafePnextCopy(src.pNext);
-}
-
-safe_VkAndroidHardwareBufferUsageANDROID& safe_VkAndroidHardwareBufferUsageANDROID::operator=(const safe_VkAndroidHardwareBufferUsageANDROID& src)
-{
- if (&src == this) return *this;
-
- if (pNext)
- FreePnextChain(pNext);
-
- sType = src.sType;
- androidHardwareBufferUsage = src.androidHardwareBufferUsage;
- pNext = SafePnextCopy(src.pNext);
-
- return *this;
-}
-
-safe_VkAndroidHardwareBufferUsageANDROID::~safe_VkAndroidHardwareBufferUsageANDROID()
-{
- if (pNext)
- FreePnextChain(pNext);
-}
-
-void safe_VkAndroidHardwareBufferUsageANDROID::initialize(const VkAndroidHardwareBufferUsageANDROID* in_struct)
-{
- sType = in_struct->sType;
- androidHardwareBufferUsage = in_struct->androidHardwareBufferUsage;
- pNext = SafePnextCopy(in_struct->pNext);
-}
-
-void safe_VkAndroidHardwareBufferUsageANDROID::initialize(const safe_VkAndroidHardwareBufferUsageANDROID* src)
-{
- sType = src->sType;
- androidHardwareBufferUsage = src->androidHardwareBufferUsage;
- pNext = SafePnextCopy(src->pNext);
-}
-#endif // VK_USE_PLATFORM_ANDROID_KHR
-
-#ifdef VK_USE_PLATFORM_ANDROID_KHR
-
-
-safe_VkAndroidHardwareBufferPropertiesANDROID::safe_VkAndroidHardwareBufferPropertiesANDROID(const VkAndroidHardwareBufferPropertiesANDROID* in_struct) :
- sType(in_struct->sType),
- allocationSize(in_struct->allocationSize),
- memoryTypeBits(in_struct->memoryTypeBits)
-{
- pNext = SafePnextCopy(in_struct->pNext);
-}
-
-safe_VkAndroidHardwareBufferPropertiesANDROID::safe_VkAndroidHardwareBufferPropertiesANDROID() :
- pNext(nullptr)
-{}
-
-safe_VkAndroidHardwareBufferPropertiesANDROID::safe_VkAndroidHardwareBufferPropertiesANDROID(const safe_VkAndroidHardwareBufferPropertiesANDROID& src)
-{
- sType = src.sType;
- allocationSize = src.allocationSize;
- memoryTypeBits = src.memoryTypeBits;
- pNext = SafePnextCopy(src.pNext);
-}
-
-safe_VkAndroidHardwareBufferPropertiesANDROID& safe_VkAndroidHardwareBufferPropertiesANDROID::operator=(const safe_VkAndroidHardwareBufferPropertiesANDROID& src)
-{
- if (&src == this) return *this;
-
- if (pNext)
- FreePnextChain(pNext);
-
- sType = src.sType;
- allocationSize = src.allocationSize;
- memoryTypeBits = src.memoryTypeBits;
- pNext = SafePnextCopy(src.pNext);
-
- return *this;
-}
-
-safe_VkAndroidHardwareBufferPropertiesANDROID::~safe_VkAndroidHardwareBufferPropertiesANDROID()
-{
- if (pNext)
- FreePnextChain(pNext);
-}
-
-void safe_VkAndroidHardwareBufferPropertiesANDROID::initialize(const VkAndroidHardwareBufferPropertiesANDROID* in_struct)
-{
- sType = in_struct->sType;
- allocationSize = in_struct->allocationSize;
- memoryTypeBits = in_struct->memoryTypeBits;
- pNext = SafePnextCopy(in_struct->pNext);
-}
-
-void safe_VkAndroidHardwareBufferPropertiesANDROID::initialize(const safe_VkAndroidHardwareBufferPropertiesANDROID* src)
-{
- sType = src->sType;
- allocationSize = src->allocationSize;
- memoryTypeBits = src->memoryTypeBits;
- pNext = SafePnextCopy(src->pNext);
-}
-#endif // VK_USE_PLATFORM_ANDROID_KHR
-
-#ifdef VK_USE_PLATFORM_ANDROID_KHR
-
-
-safe_VkAndroidHardwareBufferFormatPropertiesANDROID::safe_VkAndroidHardwareBufferFormatPropertiesANDROID(const VkAndroidHardwareBufferFormatPropertiesANDROID* in_struct) :
- sType(in_struct->sType),
- format(in_struct->format),
- externalFormat(in_struct->externalFormat),
- formatFeatures(in_struct->formatFeatures),
- samplerYcbcrConversionComponents(in_struct->samplerYcbcrConversionComponents),
- suggestedYcbcrModel(in_struct->suggestedYcbcrModel),
- suggestedYcbcrRange(in_struct->suggestedYcbcrRange),
- suggestedXChromaOffset(in_struct->suggestedXChromaOffset),
- suggestedYChromaOffset(in_struct->suggestedYChromaOffset)
-{
- pNext = SafePnextCopy(in_struct->pNext);
-}
-
-safe_VkAndroidHardwareBufferFormatPropertiesANDROID::safe_VkAndroidHardwareBufferFormatPropertiesANDROID() :
- pNext(nullptr)
-{}
-
-safe_VkAndroidHardwareBufferFormatPropertiesANDROID::safe_VkAndroidHardwareBufferFormatPropertiesANDROID(const safe_VkAndroidHardwareBufferFormatPropertiesANDROID& src)
-{
- sType = src.sType;
- format = src.format;
- externalFormat = src.externalFormat;
- formatFeatures = src.formatFeatures;
- samplerYcbcrConversionComponents = src.samplerYcbcrConversionComponents;
- suggestedYcbcrModel = src.suggestedYcbcrModel;
- suggestedYcbcrRange = src.suggestedYcbcrRange;
- suggestedXChromaOffset = src.suggestedXChromaOffset;
- suggestedYChromaOffset = src.suggestedYChromaOffset;
- pNext = SafePnextCopy(src.pNext);
-}
-
-safe_VkAndroidHardwareBufferFormatPropertiesANDROID& safe_VkAndroidHardwareBufferFormatPropertiesANDROID::operator=(const safe_VkAndroidHardwareBufferFormatPropertiesANDROID& src)
-{
- if (&src == this) return *this;
-
- if (pNext)
- FreePnextChain(pNext);
-
- sType = src.sType;
- format = src.format;
- externalFormat = src.externalFormat;
- formatFeatures = src.formatFeatures;
- samplerYcbcrConversionComponents = src.samplerYcbcrConversionComponents;
- suggestedYcbcrModel = src.suggestedYcbcrModel;
- suggestedYcbcrRange = src.suggestedYcbcrRange;
- suggestedXChromaOffset = src.suggestedXChromaOffset;
- suggestedYChromaOffset = src.suggestedYChromaOffset;
- pNext = SafePnextCopy(src.pNext);
-
- return *this;
-}
-
-safe_VkAndroidHardwareBufferFormatPropertiesANDROID::~safe_VkAndroidHardwareBufferFormatPropertiesANDROID()
-{
- if (pNext)
- FreePnextChain(pNext);
-}
-
-void safe_VkAndroidHardwareBufferFormatPropertiesANDROID::initialize(const VkAndroidHardwareBufferFormatPropertiesANDROID* in_struct)
-{
- sType = in_struct->sType;
- format = in_struct->format;
- externalFormat = in_struct->externalFormat;
- formatFeatures = in_struct->formatFeatures;
- samplerYcbcrConversionComponents = in_struct->samplerYcbcrConversionComponents;
- suggestedYcbcrModel = in_struct->suggestedYcbcrModel;
- suggestedYcbcrRange = in_struct->suggestedYcbcrRange;
- suggestedXChromaOffset = in_struct->suggestedXChromaOffset;
- suggestedYChromaOffset = in_struct->suggestedYChromaOffset;
- pNext = SafePnextCopy(in_struct->pNext);
-}
-
-void safe_VkAndroidHardwareBufferFormatPropertiesANDROID::initialize(const safe_VkAndroidHardwareBufferFormatPropertiesANDROID* src)
-{
- sType = src->sType;
- format = src->format;
- externalFormat = src->externalFormat;
- formatFeatures = src->formatFeatures;
- samplerYcbcrConversionComponents = src->samplerYcbcrConversionComponents;
- suggestedYcbcrModel = src->suggestedYcbcrModel;
- suggestedYcbcrRange = src->suggestedYcbcrRange;
- suggestedXChromaOffset = src->suggestedXChromaOffset;
- suggestedYChromaOffset = src->suggestedYChromaOffset;
- pNext = SafePnextCopy(src->pNext);
-}
-#endif // VK_USE_PLATFORM_ANDROID_KHR
-
-#ifdef VK_USE_PLATFORM_ANDROID_KHR
-
-
-safe_VkImportAndroidHardwareBufferInfoANDROID::safe_VkImportAndroidHardwareBufferInfoANDROID(const VkImportAndroidHardwareBufferInfoANDROID* in_struct) :
- sType(in_struct->sType),
- buffer(nullptr)
-{
- pNext = SafePnextCopy(in_struct->pNext);
- buffer = in_struct->buffer;
-}
-
-safe_VkImportAndroidHardwareBufferInfoANDROID::safe_VkImportAndroidHardwareBufferInfoANDROID() :
- pNext(nullptr),
- buffer(nullptr)
-{}
-
-safe_VkImportAndroidHardwareBufferInfoANDROID::safe_VkImportAndroidHardwareBufferInfoANDROID(const safe_VkImportAndroidHardwareBufferInfoANDROID& src)
-{
- sType = src.sType;
- pNext = SafePnextCopy(src.pNext);
- buffer = src.buffer;
-}
-
-safe_VkImportAndroidHardwareBufferInfoANDROID& safe_VkImportAndroidHardwareBufferInfoANDROID::operator=(const safe_VkImportAndroidHardwareBufferInfoANDROID& src)
-{
- if (&src == this) return *this;
-
- if (pNext)
- FreePnextChain(pNext);
-
- sType = src.sType;
- pNext = SafePnextCopy(src.pNext);
- buffer = src.buffer;
-
- return *this;
-}
-
-safe_VkImportAndroidHardwareBufferInfoANDROID::~safe_VkImportAndroidHardwareBufferInfoANDROID()
-{
- if (pNext)
- FreePnextChain(pNext);
-}
-
-void safe_VkImportAndroidHardwareBufferInfoANDROID::initialize(const VkImportAndroidHardwareBufferInfoANDROID* in_struct)
-{
- sType = in_struct->sType;
- pNext = SafePnextCopy(in_struct->pNext);
- buffer = in_struct->buffer;
-}
-
-void safe_VkImportAndroidHardwareBufferInfoANDROID::initialize(const safe_VkImportAndroidHardwareBufferInfoANDROID* src)
-{
- sType = src->sType;
- pNext = SafePnextCopy(src->pNext);
- buffer = src->buffer;
-}
-#endif // VK_USE_PLATFORM_ANDROID_KHR
-
-#ifdef VK_USE_PLATFORM_ANDROID_KHR
-
-
-safe_VkMemoryGetAndroidHardwareBufferInfoANDROID::safe_VkMemoryGetAndroidHardwareBufferInfoANDROID(const VkMemoryGetAndroidHardwareBufferInfoANDROID* in_struct) :
- sType(in_struct->sType),
- memory(in_struct->memory)
-{
- pNext = SafePnextCopy(in_struct->pNext);
-}
-
-safe_VkMemoryGetAndroidHardwareBufferInfoANDROID::safe_VkMemoryGetAndroidHardwareBufferInfoANDROID() :
- pNext(nullptr)
-{}
-
-safe_VkMemoryGetAndroidHardwareBufferInfoANDROID::safe_VkMemoryGetAndroidHardwareBufferInfoANDROID(const safe_VkMemoryGetAndroidHardwareBufferInfoANDROID& src)
-{
- sType = src.sType;
- memory = src.memory;
- pNext = SafePnextCopy(src.pNext);
-}
-
-safe_VkMemoryGetAndroidHardwareBufferInfoANDROID& safe_VkMemoryGetAndroidHardwareBufferInfoANDROID::operator=(const safe_VkMemoryGetAndroidHardwareBufferInfoANDROID& src)
-{
- if (&src == this) return *this;
-
- if (pNext)
- FreePnextChain(pNext);
-
- sType = src.sType;
- memory = src.memory;
- pNext = SafePnextCopy(src.pNext);
-
- return *this;
-}
-
-safe_VkMemoryGetAndroidHardwareBufferInfoANDROID::~safe_VkMemoryGetAndroidHardwareBufferInfoANDROID()
-{
- if (pNext)
- FreePnextChain(pNext);
-}
-
-void safe_VkMemoryGetAndroidHardwareBufferInfoANDROID::initialize(const VkMemoryGetAndroidHardwareBufferInfoANDROID* in_struct)
-{
- sType = in_struct->sType;
- memory = in_struct->memory;
- pNext = SafePnextCopy(in_struct->pNext);
-}
-
-void safe_VkMemoryGetAndroidHardwareBufferInfoANDROID::initialize(const safe_VkMemoryGetAndroidHardwareBufferInfoANDROID* src)
-{
- sType = src->sType;
- memory = src->memory;
- pNext = SafePnextCopy(src->pNext);
-}
-#endif // VK_USE_PLATFORM_ANDROID_KHR
-
-#ifdef VK_USE_PLATFORM_ANDROID_KHR
-
-
-safe_VkExternalFormatANDROID::safe_VkExternalFormatANDROID(const VkExternalFormatANDROID* in_struct) :
- sType(in_struct->sType),
- externalFormat(in_struct->externalFormat)
-{
- pNext = SafePnextCopy(in_struct->pNext);
-}
-
-safe_VkExternalFormatANDROID::safe_VkExternalFormatANDROID() :
- pNext(nullptr)
-{}
-
-safe_VkExternalFormatANDROID::safe_VkExternalFormatANDROID(const safe_VkExternalFormatANDROID& src)
-{
- sType = src.sType;
- externalFormat = src.externalFormat;
- pNext = SafePnextCopy(src.pNext);
-}
-
-safe_VkExternalFormatANDROID& safe_VkExternalFormatANDROID::operator=(const safe_VkExternalFormatANDROID& src)
-{
- if (&src == this) return *this;
-
- if (pNext)
- FreePnextChain(pNext);
-
- sType = src.sType;
- externalFormat = src.externalFormat;
- pNext = SafePnextCopy(src.pNext);
-
- return *this;
-}
-
-safe_VkExternalFormatANDROID::~safe_VkExternalFormatANDROID()
-{
- if (pNext)
- FreePnextChain(pNext);
-}
-
-void safe_VkExternalFormatANDROID::initialize(const VkExternalFormatANDROID* in_struct)
-{
- sType = in_struct->sType;
- externalFormat = in_struct->externalFormat;
- pNext = SafePnextCopy(in_struct->pNext);
-}
-
-void safe_VkExternalFormatANDROID::initialize(const safe_VkExternalFormatANDROID* src)
-{
- sType = src->sType;
- externalFormat = src->externalFormat;
- pNext = SafePnextCopy(src->pNext);
-}
-#endif // VK_USE_PLATFORM_ANDROID_KHR
-
-
-safe_VkSamplerReductionModeCreateInfoEXT::safe_VkSamplerReductionModeCreateInfoEXT(const VkSamplerReductionModeCreateInfoEXT* in_struct) :
- sType(in_struct->sType),
- reductionMode(in_struct->reductionMode)
-{
- pNext = SafePnextCopy(in_struct->pNext);
-}
-
-safe_VkSamplerReductionModeCreateInfoEXT::safe_VkSamplerReductionModeCreateInfoEXT() :
- pNext(nullptr)
-{}
-
-safe_VkSamplerReductionModeCreateInfoEXT::safe_VkSamplerReductionModeCreateInfoEXT(const safe_VkSamplerReductionModeCreateInfoEXT& src)
-{
- sType = src.sType;
- reductionMode = src.reductionMode;
- pNext = SafePnextCopy(src.pNext);
-}
-
-safe_VkSamplerReductionModeCreateInfoEXT& safe_VkSamplerReductionModeCreateInfoEXT::operator=(const safe_VkSamplerReductionModeCreateInfoEXT& src)
-{
- if (&src == this) return *this;
-
- if (pNext)
- FreePnextChain(pNext);
-
- sType = src.sType;
- reductionMode = src.reductionMode;
- pNext = SafePnextCopy(src.pNext);
-
- return *this;
-}
-
-safe_VkSamplerReductionModeCreateInfoEXT::~safe_VkSamplerReductionModeCreateInfoEXT()
-{
- if (pNext)
- FreePnextChain(pNext);
-}
-
-void safe_VkSamplerReductionModeCreateInfoEXT::initialize(const VkSamplerReductionModeCreateInfoEXT* in_struct)
-{
- sType = in_struct->sType;
- reductionMode = in_struct->reductionMode;
- pNext = SafePnextCopy(in_struct->pNext);
-}
-
-void safe_VkSamplerReductionModeCreateInfoEXT::initialize(const safe_VkSamplerReductionModeCreateInfoEXT* src)
-{
- sType = src->sType;
- reductionMode = src->reductionMode;
- pNext = SafePnextCopy(src->pNext);
-}
-
-safe_VkPhysicalDeviceSamplerFilterMinmaxPropertiesEXT::safe_VkPhysicalDeviceSamplerFilterMinmaxPropertiesEXT(const VkPhysicalDeviceSamplerFilterMinmaxPropertiesEXT* in_struct) :
- sType(in_struct->sType),
- filterMinmaxSingleComponentFormats(in_struct->filterMinmaxSingleComponentFormats),
- filterMinmaxImageComponentMapping(in_struct->filterMinmaxImageComponentMapping)
-{
- pNext = SafePnextCopy(in_struct->pNext);
-}
-
-safe_VkPhysicalDeviceSamplerFilterMinmaxPropertiesEXT::safe_VkPhysicalDeviceSamplerFilterMinmaxPropertiesEXT() :
- pNext(nullptr)
-{}
-
-safe_VkPhysicalDeviceSamplerFilterMinmaxPropertiesEXT::safe_VkPhysicalDeviceSamplerFilterMinmaxPropertiesEXT(const safe_VkPhysicalDeviceSamplerFilterMinmaxPropertiesEXT& src)
-{
- sType = src.sType;
- filterMinmaxSingleComponentFormats = src.filterMinmaxSingleComponentFormats;
- filterMinmaxImageComponentMapping = src.filterMinmaxImageComponentMapping;
- pNext = SafePnextCopy(src.pNext);
-}
-
-safe_VkPhysicalDeviceSamplerFilterMinmaxPropertiesEXT& safe_VkPhysicalDeviceSamplerFilterMinmaxPropertiesEXT::operator=(const safe_VkPhysicalDeviceSamplerFilterMinmaxPropertiesEXT& src)
-{
- if (&src == this) return *this;
-
- if (pNext)
- FreePnextChain(pNext);
-
- sType = src.sType;
- filterMinmaxSingleComponentFormats = src.filterMinmaxSingleComponentFormats;
- filterMinmaxImageComponentMapping = src.filterMinmaxImageComponentMapping;
- pNext = SafePnextCopy(src.pNext);
-
- return *this;
-}
-
-safe_VkPhysicalDeviceSamplerFilterMinmaxPropertiesEXT::~safe_VkPhysicalDeviceSamplerFilterMinmaxPropertiesEXT()
-{
- if (pNext)
- FreePnextChain(pNext);
-}
-
-void safe_VkPhysicalDeviceSamplerFilterMinmaxPropertiesEXT::initialize(const VkPhysicalDeviceSamplerFilterMinmaxPropertiesEXT* in_struct)
-{
- sType = in_struct->sType;
- filterMinmaxSingleComponentFormats = in_struct->filterMinmaxSingleComponentFormats;
- filterMinmaxImageComponentMapping = in_struct->filterMinmaxImageComponentMapping;
- pNext = SafePnextCopy(in_struct->pNext);
-}
-
-void safe_VkPhysicalDeviceSamplerFilterMinmaxPropertiesEXT::initialize(const safe_VkPhysicalDeviceSamplerFilterMinmaxPropertiesEXT* src)
-{
- sType = src->sType;
- filterMinmaxSingleComponentFormats = src->filterMinmaxSingleComponentFormats;
- filterMinmaxImageComponentMapping = src->filterMinmaxImageComponentMapping;
- pNext = SafePnextCopy(src->pNext);
-}
-
-safe_VkPhysicalDeviceInlineUniformBlockFeaturesEXT::safe_VkPhysicalDeviceInlineUniformBlockFeaturesEXT(const VkPhysicalDeviceInlineUniformBlockFeaturesEXT* in_struct) :
- sType(in_struct->sType),
- inlineUniformBlock(in_struct->inlineUniformBlock),
- descriptorBindingInlineUniformBlockUpdateAfterBind(in_struct->descriptorBindingInlineUniformBlockUpdateAfterBind)
-{
- pNext = SafePnextCopy(in_struct->pNext);
-}
-
-safe_VkPhysicalDeviceInlineUniformBlockFeaturesEXT::safe_VkPhysicalDeviceInlineUniformBlockFeaturesEXT() :
- pNext(nullptr)
-{}
-
-safe_VkPhysicalDeviceInlineUniformBlockFeaturesEXT::safe_VkPhysicalDeviceInlineUniformBlockFeaturesEXT(const safe_VkPhysicalDeviceInlineUniformBlockFeaturesEXT& src)
-{
- sType = src.sType;
- inlineUniformBlock = src.inlineUniformBlock;
- descriptorBindingInlineUniformBlockUpdateAfterBind = src.descriptorBindingInlineUniformBlockUpdateAfterBind;
- pNext = SafePnextCopy(src.pNext);
-}
-
-safe_VkPhysicalDeviceInlineUniformBlockFeaturesEXT& safe_VkPhysicalDeviceInlineUniformBlockFeaturesEXT::operator=(const safe_VkPhysicalDeviceInlineUniformBlockFeaturesEXT& src)
-{
- if (&src == this) return *this;
-
- if (pNext)
- FreePnextChain(pNext);
-
- sType = src.sType;
- inlineUniformBlock = src.inlineUniformBlock;
- descriptorBindingInlineUniformBlockUpdateAfterBind = src.descriptorBindingInlineUniformBlockUpdateAfterBind;
- pNext = SafePnextCopy(src.pNext);
-
- return *this;
-}
-
-safe_VkPhysicalDeviceInlineUniformBlockFeaturesEXT::~safe_VkPhysicalDeviceInlineUniformBlockFeaturesEXT()
-{
- if (pNext)
- FreePnextChain(pNext);
-}
-
-void safe_VkPhysicalDeviceInlineUniformBlockFeaturesEXT::initialize(const VkPhysicalDeviceInlineUniformBlockFeaturesEXT* in_struct)
-{
- sType = in_struct->sType;
- inlineUniformBlock = in_struct->inlineUniformBlock;
- descriptorBindingInlineUniformBlockUpdateAfterBind = in_struct->descriptorBindingInlineUniformBlockUpdateAfterBind;
- pNext = SafePnextCopy(in_struct->pNext);
-}
-
-void safe_VkPhysicalDeviceInlineUniformBlockFeaturesEXT::initialize(const safe_VkPhysicalDeviceInlineUniformBlockFeaturesEXT* src)
-{
- sType = src->sType;
- inlineUniformBlock = src->inlineUniformBlock;
- descriptorBindingInlineUniformBlockUpdateAfterBind = src->descriptorBindingInlineUniformBlockUpdateAfterBind;
- pNext = SafePnextCopy(src->pNext);
-}
-
-safe_VkPhysicalDeviceInlineUniformBlockPropertiesEXT::safe_VkPhysicalDeviceInlineUniformBlockPropertiesEXT(const VkPhysicalDeviceInlineUniformBlockPropertiesEXT* in_struct) :
- sType(in_struct->sType),
- maxInlineUniformBlockSize(in_struct->maxInlineUniformBlockSize),
- maxPerStageDescriptorInlineUniformBlocks(in_struct->maxPerStageDescriptorInlineUniformBlocks),
- maxPerStageDescriptorUpdateAfterBindInlineUniformBlocks(in_struct->maxPerStageDescriptorUpdateAfterBindInlineUniformBlocks),
- maxDescriptorSetInlineUniformBlocks(in_struct->maxDescriptorSetInlineUniformBlocks),
- maxDescriptorSetUpdateAfterBindInlineUniformBlocks(in_struct->maxDescriptorSetUpdateAfterBindInlineUniformBlocks)
-{
- pNext = SafePnextCopy(in_struct->pNext);
-}
-
-safe_VkPhysicalDeviceInlineUniformBlockPropertiesEXT::safe_VkPhysicalDeviceInlineUniformBlockPropertiesEXT() :
- pNext(nullptr)
-{}
-
-safe_VkPhysicalDeviceInlineUniformBlockPropertiesEXT::safe_VkPhysicalDeviceInlineUniformBlockPropertiesEXT(const safe_VkPhysicalDeviceInlineUniformBlockPropertiesEXT& src)
-{
- sType = src.sType;
- maxInlineUniformBlockSize = src.maxInlineUniformBlockSize;
- maxPerStageDescriptorInlineUniformBlocks = src.maxPerStageDescriptorInlineUniformBlocks;
- maxPerStageDescriptorUpdateAfterBindInlineUniformBlocks = src.maxPerStageDescriptorUpdateAfterBindInlineUniformBlocks;
- maxDescriptorSetInlineUniformBlocks = src.maxDescriptorSetInlineUniformBlocks;
- maxDescriptorSetUpdateAfterBindInlineUniformBlocks = src.maxDescriptorSetUpdateAfterBindInlineUniformBlocks;
- pNext = SafePnextCopy(src.pNext);
-}
-
-safe_VkPhysicalDeviceInlineUniformBlockPropertiesEXT& safe_VkPhysicalDeviceInlineUniformBlockPropertiesEXT::operator=(const safe_VkPhysicalDeviceInlineUniformBlockPropertiesEXT& src)
-{
- if (&src == this) return *this;
-
- if (pNext)
- FreePnextChain(pNext);
-
- sType = src.sType;
- maxInlineUniformBlockSize = src.maxInlineUniformBlockSize;
- maxPerStageDescriptorInlineUniformBlocks = src.maxPerStageDescriptorInlineUniformBlocks;
- maxPerStageDescriptorUpdateAfterBindInlineUniformBlocks = src.maxPerStageDescriptorUpdateAfterBindInlineUniformBlocks;
- maxDescriptorSetInlineUniformBlocks = src.maxDescriptorSetInlineUniformBlocks;
- maxDescriptorSetUpdateAfterBindInlineUniformBlocks = src.maxDescriptorSetUpdateAfterBindInlineUniformBlocks;
- pNext = SafePnextCopy(src.pNext);
-
- return *this;
-}
-
-safe_VkPhysicalDeviceInlineUniformBlockPropertiesEXT::~safe_VkPhysicalDeviceInlineUniformBlockPropertiesEXT()
-{
- if (pNext)
- FreePnextChain(pNext);
-}
-
-void safe_VkPhysicalDeviceInlineUniformBlockPropertiesEXT::initialize(const VkPhysicalDeviceInlineUniformBlockPropertiesEXT* in_struct)
-{
- sType = in_struct->sType;
- maxInlineUniformBlockSize = in_struct->maxInlineUniformBlockSize;
- maxPerStageDescriptorInlineUniformBlocks = in_struct->maxPerStageDescriptorInlineUniformBlocks;
- maxPerStageDescriptorUpdateAfterBindInlineUniformBlocks = in_struct->maxPerStageDescriptorUpdateAfterBindInlineUniformBlocks;
- maxDescriptorSetInlineUniformBlocks = in_struct->maxDescriptorSetInlineUniformBlocks;
- maxDescriptorSetUpdateAfterBindInlineUniformBlocks = in_struct->maxDescriptorSetUpdateAfterBindInlineUniformBlocks;
- pNext = SafePnextCopy(in_struct->pNext);
-}
-
-void safe_VkPhysicalDeviceInlineUniformBlockPropertiesEXT::initialize(const safe_VkPhysicalDeviceInlineUniformBlockPropertiesEXT* src)
-{
- sType = src->sType;
- maxInlineUniformBlockSize = src->maxInlineUniformBlockSize;
- maxPerStageDescriptorInlineUniformBlocks = src->maxPerStageDescriptorInlineUniformBlocks;
- maxPerStageDescriptorUpdateAfterBindInlineUniformBlocks = src->maxPerStageDescriptorUpdateAfterBindInlineUniformBlocks;
- maxDescriptorSetInlineUniformBlocks = src->maxDescriptorSetInlineUniformBlocks;
- maxDescriptorSetUpdateAfterBindInlineUniformBlocks = src->maxDescriptorSetUpdateAfterBindInlineUniformBlocks;
- pNext = SafePnextCopy(src->pNext);
-}
-
-safe_VkWriteDescriptorSetInlineUniformBlockEXT::safe_VkWriteDescriptorSetInlineUniformBlockEXT(const VkWriteDescriptorSetInlineUniformBlockEXT* in_struct) :
- sType(in_struct->sType),
- dataSize(in_struct->dataSize),
- pData(in_struct->pData)
-{
- pNext = SafePnextCopy(in_struct->pNext);
-}
-
-safe_VkWriteDescriptorSetInlineUniformBlockEXT::safe_VkWriteDescriptorSetInlineUniformBlockEXT() :
- pNext(nullptr),
- pData(nullptr)
-{}
-
-safe_VkWriteDescriptorSetInlineUniformBlockEXT::safe_VkWriteDescriptorSetInlineUniformBlockEXT(const safe_VkWriteDescriptorSetInlineUniformBlockEXT& src)
-{
- sType = src.sType;
- dataSize = src.dataSize;
- pData = src.pData;
- pNext = SafePnextCopy(src.pNext);
-}
-
-safe_VkWriteDescriptorSetInlineUniformBlockEXT& safe_VkWriteDescriptorSetInlineUniformBlockEXT::operator=(const safe_VkWriteDescriptorSetInlineUniformBlockEXT& src)
-{
- if (&src == this) return *this;
-
- if (pNext)
- FreePnextChain(pNext);
-
- sType = src.sType;
- dataSize = src.dataSize;
- pData = src.pData;
- pNext = SafePnextCopy(src.pNext);
-
- return *this;
-}
-
-safe_VkWriteDescriptorSetInlineUniformBlockEXT::~safe_VkWriteDescriptorSetInlineUniformBlockEXT()
-{
- if (pNext)
- FreePnextChain(pNext);
-}
-
-void safe_VkWriteDescriptorSetInlineUniformBlockEXT::initialize(const VkWriteDescriptorSetInlineUniformBlockEXT* in_struct)
-{
- sType = in_struct->sType;
- dataSize = in_struct->dataSize;
- pData = in_struct->pData;
- pNext = SafePnextCopy(in_struct->pNext);
-}
-
-void safe_VkWriteDescriptorSetInlineUniformBlockEXT::initialize(const safe_VkWriteDescriptorSetInlineUniformBlockEXT* src)
-{
- sType = src->sType;
- dataSize = src->dataSize;
- pData = src->pData;
- pNext = SafePnextCopy(src->pNext);
-}
-
-safe_VkDescriptorPoolInlineUniformBlockCreateInfoEXT::safe_VkDescriptorPoolInlineUniformBlockCreateInfoEXT(const VkDescriptorPoolInlineUniformBlockCreateInfoEXT* in_struct) :
- sType(in_struct->sType),
- maxInlineUniformBlockBindings(in_struct->maxInlineUniformBlockBindings)
-{
- pNext = SafePnextCopy(in_struct->pNext);
-}
-
-safe_VkDescriptorPoolInlineUniformBlockCreateInfoEXT::safe_VkDescriptorPoolInlineUniformBlockCreateInfoEXT() :
- pNext(nullptr)
-{}
-
-safe_VkDescriptorPoolInlineUniformBlockCreateInfoEXT::safe_VkDescriptorPoolInlineUniformBlockCreateInfoEXT(const safe_VkDescriptorPoolInlineUniformBlockCreateInfoEXT& src)
-{
- sType = src.sType;
- maxInlineUniformBlockBindings = src.maxInlineUniformBlockBindings;
- pNext = SafePnextCopy(src.pNext);
-}
-
-safe_VkDescriptorPoolInlineUniformBlockCreateInfoEXT& safe_VkDescriptorPoolInlineUniformBlockCreateInfoEXT::operator=(const safe_VkDescriptorPoolInlineUniformBlockCreateInfoEXT& src)
-{
- if (&src == this) return *this;
-
- if (pNext)
- FreePnextChain(pNext);
-
- sType = src.sType;
- maxInlineUniformBlockBindings = src.maxInlineUniformBlockBindings;
- pNext = SafePnextCopy(src.pNext);
-
- return *this;
-}
-
-safe_VkDescriptorPoolInlineUniformBlockCreateInfoEXT::~safe_VkDescriptorPoolInlineUniformBlockCreateInfoEXT()
-{
- if (pNext)
- FreePnextChain(pNext);
-}
-
-void safe_VkDescriptorPoolInlineUniformBlockCreateInfoEXT::initialize(const VkDescriptorPoolInlineUniformBlockCreateInfoEXT* in_struct)
-{
- sType = in_struct->sType;
- maxInlineUniformBlockBindings = in_struct->maxInlineUniformBlockBindings;
- pNext = SafePnextCopy(in_struct->pNext);
-}
-
-void safe_VkDescriptorPoolInlineUniformBlockCreateInfoEXT::initialize(const safe_VkDescriptorPoolInlineUniformBlockCreateInfoEXT* src)
-{
- sType = src->sType;
- maxInlineUniformBlockBindings = src->maxInlineUniformBlockBindings;
- pNext = SafePnextCopy(src->pNext);
-}
-
-safe_VkSampleLocationsInfoEXT::safe_VkSampleLocationsInfoEXT(const VkSampleLocationsInfoEXT* in_struct) :
- sType(in_struct->sType),
- sampleLocationsPerPixel(in_struct->sampleLocationsPerPixel),
- sampleLocationGridSize(in_struct->sampleLocationGridSize),
- sampleLocationsCount(in_struct->sampleLocationsCount),
- pSampleLocations(nullptr)
-{
- pNext = SafePnextCopy(in_struct->pNext);
- if (in_struct->pSampleLocations) {
- pSampleLocations = new VkSampleLocationEXT[in_struct->sampleLocationsCount];
- memcpy ((void *)pSampleLocations, (void *)in_struct->pSampleLocations, sizeof(VkSampleLocationEXT)*in_struct->sampleLocationsCount);
- }
-}
-
-safe_VkSampleLocationsInfoEXT::safe_VkSampleLocationsInfoEXT() :
- pNext(nullptr),
- pSampleLocations(nullptr)
-{}
-
-safe_VkSampleLocationsInfoEXT::safe_VkSampleLocationsInfoEXT(const safe_VkSampleLocationsInfoEXT& src)
-{
- sType = src.sType;
- sampleLocationsPerPixel = src.sampleLocationsPerPixel;
- sampleLocationGridSize = src.sampleLocationGridSize;
- sampleLocationsCount = src.sampleLocationsCount;
- pSampleLocations = nullptr;
- pNext = SafePnextCopy(src.pNext);
- if (src.pSampleLocations) {
- pSampleLocations = new VkSampleLocationEXT[src.sampleLocationsCount];
- memcpy ((void *)pSampleLocations, (void *)src.pSampleLocations, sizeof(VkSampleLocationEXT)*src.sampleLocationsCount);
- }
-}
-
-safe_VkSampleLocationsInfoEXT& safe_VkSampleLocationsInfoEXT::operator=(const safe_VkSampleLocationsInfoEXT& src)
-{
- if (&src == this) return *this;
-
- if (pSampleLocations)
- delete[] pSampleLocations;
- if (pNext)
- FreePnextChain(pNext);
-
- sType = src.sType;
- sampleLocationsPerPixel = src.sampleLocationsPerPixel;
- sampleLocationGridSize = src.sampleLocationGridSize;
- sampleLocationsCount = src.sampleLocationsCount;
- pSampleLocations = nullptr;
- pNext = SafePnextCopy(src.pNext);
- if (src.pSampleLocations) {
- pSampleLocations = new VkSampleLocationEXT[src.sampleLocationsCount];
- memcpy ((void *)pSampleLocations, (void *)src.pSampleLocations, sizeof(VkSampleLocationEXT)*src.sampleLocationsCount);
- }
-
- return *this;
-}
-
-safe_VkSampleLocationsInfoEXT::~safe_VkSampleLocationsInfoEXT()
-{
- if (pSampleLocations)
- delete[] pSampleLocations;
- if (pNext)
- FreePnextChain(pNext);
-}
-
-void safe_VkSampleLocationsInfoEXT::initialize(const VkSampleLocationsInfoEXT* in_struct)
-{
- sType = in_struct->sType;
- sampleLocationsPerPixel = in_struct->sampleLocationsPerPixel;
- sampleLocationGridSize = in_struct->sampleLocationGridSize;
- sampleLocationsCount = in_struct->sampleLocationsCount;
- pSampleLocations = nullptr;
- pNext = SafePnextCopy(in_struct->pNext);
- if (in_struct->pSampleLocations) {
- pSampleLocations = new VkSampleLocationEXT[in_struct->sampleLocationsCount];
- memcpy ((void *)pSampleLocations, (void *)in_struct->pSampleLocations, sizeof(VkSampleLocationEXT)*in_struct->sampleLocationsCount);
- }
-}
-
-void safe_VkSampleLocationsInfoEXT::initialize(const safe_VkSampleLocationsInfoEXT* src)
-{
- sType = src->sType;
- sampleLocationsPerPixel = src->sampleLocationsPerPixel;
- sampleLocationGridSize = src->sampleLocationGridSize;
- sampleLocationsCount = src->sampleLocationsCount;
- pSampleLocations = nullptr;
- pNext = SafePnextCopy(src->pNext);
- if (src->pSampleLocations) {
- pSampleLocations = new VkSampleLocationEXT[src->sampleLocationsCount];
- memcpy ((void *)pSampleLocations, (void *)src->pSampleLocations, sizeof(VkSampleLocationEXT)*src->sampleLocationsCount);
- }
-}
-
-safe_VkRenderPassSampleLocationsBeginInfoEXT::safe_VkRenderPassSampleLocationsBeginInfoEXT(const VkRenderPassSampleLocationsBeginInfoEXT* in_struct) :
- sType(in_struct->sType),
- attachmentInitialSampleLocationsCount(in_struct->attachmentInitialSampleLocationsCount),
- pAttachmentInitialSampleLocations(nullptr),
- postSubpassSampleLocationsCount(in_struct->postSubpassSampleLocationsCount),
- pPostSubpassSampleLocations(nullptr)
-{
- pNext = SafePnextCopy(in_struct->pNext);
- if (in_struct->pAttachmentInitialSampleLocations) {
- pAttachmentInitialSampleLocations = new VkAttachmentSampleLocationsEXT[in_struct->attachmentInitialSampleLocationsCount];
- memcpy ((void *)pAttachmentInitialSampleLocations, (void *)in_struct->pAttachmentInitialSampleLocations, sizeof(VkAttachmentSampleLocationsEXT)*in_struct->attachmentInitialSampleLocationsCount);
- }
- if (in_struct->pPostSubpassSampleLocations) {
- pPostSubpassSampleLocations = new VkSubpassSampleLocationsEXT[in_struct->postSubpassSampleLocationsCount];
- memcpy ((void *)pPostSubpassSampleLocations, (void *)in_struct->pPostSubpassSampleLocations, sizeof(VkSubpassSampleLocationsEXT)*in_struct->postSubpassSampleLocationsCount);
- }
-}
-
-safe_VkRenderPassSampleLocationsBeginInfoEXT::safe_VkRenderPassSampleLocationsBeginInfoEXT() :
- pNext(nullptr),
- pAttachmentInitialSampleLocations(nullptr),
- pPostSubpassSampleLocations(nullptr)
-{}
-
-safe_VkRenderPassSampleLocationsBeginInfoEXT::safe_VkRenderPassSampleLocationsBeginInfoEXT(const safe_VkRenderPassSampleLocationsBeginInfoEXT& src)
-{
- sType = src.sType;
- attachmentInitialSampleLocationsCount = src.attachmentInitialSampleLocationsCount;
- pAttachmentInitialSampleLocations = nullptr;
- postSubpassSampleLocationsCount = src.postSubpassSampleLocationsCount;
- pPostSubpassSampleLocations = nullptr;
- pNext = SafePnextCopy(src.pNext);
- if (src.pAttachmentInitialSampleLocations) {
- pAttachmentInitialSampleLocations = new VkAttachmentSampleLocationsEXT[src.attachmentInitialSampleLocationsCount];
- memcpy ((void *)pAttachmentInitialSampleLocations, (void *)src.pAttachmentInitialSampleLocations, sizeof(VkAttachmentSampleLocationsEXT)*src.attachmentInitialSampleLocationsCount);
- }
- if (src.pPostSubpassSampleLocations) {
- pPostSubpassSampleLocations = new VkSubpassSampleLocationsEXT[src.postSubpassSampleLocationsCount];
- memcpy ((void *)pPostSubpassSampleLocations, (void *)src.pPostSubpassSampleLocations, sizeof(VkSubpassSampleLocationsEXT)*src.postSubpassSampleLocationsCount);
- }
-}
-
-safe_VkRenderPassSampleLocationsBeginInfoEXT& safe_VkRenderPassSampleLocationsBeginInfoEXT::operator=(const safe_VkRenderPassSampleLocationsBeginInfoEXT& src)
-{
- if (&src == this) return *this;
-
- if (pAttachmentInitialSampleLocations)
- delete[] pAttachmentInitialSampleLocations;
- if (pPostSubpassSampleLocations)
- delete[] pPostSubpassSampleLocations;
- if (pNext)
- FreePnextChain(pNext);
-
- sType = src.sType;
- attachmentInitialSampleLocationsCount = src.attachmentInitialSampleLocationsCount;
- pAttachmentInitialSampleLocations = nullptr;
- postSubpassSampleLocationsCount = src.postSubpassSampleLocationsCount;
- pPostSubpassSampleLocations = nullptr;
- pNext = SafePnextCopy(src.pNext);
- if (src.pAttachmentInitialSampleLocations) {
- pAttachmentInitialSampleLocations = new VkAttachmentSampleLocationsEXT[src.attachmentInitialSampleLocationsCount];
- memcpy ((void *)pAttachmentInitialSampleLocations, (void *)src.pAttachmentInitialSampleLocations, sizeof(VkAttachmentSampleLocationsEXT)*src.attachmentInitialSampleLocationsCount);
- }
- if (src.pPostSubpassSampleLocations) {
- pPostSubpassSampleLocations = new VkSubpassSampleLocationsEXT[src.postSubpassSampleLocationsCount];
- memcpy ((void *)pPostSubpassSampleLocations, (void *)src.pPostSubpassSampleLocations, sizeof(VkSubpassSampleLocationsEXT)*src.postSubpassSampleLocationsCount);
- }
-
- return *this;
-}
-
-safe_VkRenderPassSampleLocationsBeginInfoEXT::~safe_VkRenderPassSampleLocationsBeginInfoEXT()
-{
- if (pAttachmentInitialSampleLocations)
- delete[] pAttachmentInitialSampleLocations;
- if (pPostSubpassSampleLocations)
- delete[] pPostSubpassSampleLocations;
- if (pNext)
- FreePnextChain(pNext);
-}
-
-void safe_VkRenderPassSampleLocationsBeginInfoEXT::initialize(const VkRenderPassSampleLocationsBeginInfoEXT* in_struct)
-{
- sType = in_struct->sType;
- attachmentInitialSampleLocationsCount = in_struct->attachmentInitialSampleLocationsCount;
- pAttachmentInitialSampleLocations = nullptr;
- postSubpassSampleLocationsCount = in_struct->postSubpassSampleLocationsCount;
- pPostSubpassSampleLocations = nullptr;
- pNext = SafePnextCopy(in_struct->pNext);
- if (in_struct->pAttachmentInitialSampleLocations) {
- pAttachmentInitialSampleLocations = new VkAttachmentSampleLocationsEXT[in_struct->attachmentInitialSampleLocationsCount];
- memcpy ((void *)pAttachmentInitialSampleLocations, (void *)in_struct->pAttachmentInitialSampleLocations, sizeof(VkAttachmentSampleLocationsEXT)*in_struct->attachmentInitialSampleLocationsCount);
- }
- if (in_struct->pPostSubpassSampleLocations) {
- pPostSubpassSampleLocations = new VkSubpassSampleLocationsEXT[in_struct->postSubpassSampleLocationsCount];
- memcpy ((void *)pPostSubpassSampleLocations, (void *)in_struct->pPostSubpassSampleLocations, sizeof(VkSubpassSampleLocationsEXT)*in_struct->postSubpassSampleLocationsCount);
- }
-}
-
-void safe_VkRenderPassSampleLocationsBeginInfoEXT::initialize(const safe_VkRenderPassSampleLocationsBeginInfoEXT* src)
-{
- sType = src->sType;
- attachmentInitialSampleLocationsCount = src->attachmentInitialSampleLocationsCount;
- pAttachmentInitialSampleLocations = nullptr;
- postSubpassSampleLocationsCount = src->postSubpassSampleLocationsCount;
- pPostSubpassSampleLocations = nullptr;
- pNext = SafePnextCopy(src->pNext);
- if (src->pAttachmentInitialSampleLocations) {
- pAttachmentInitialSampleLocations = new VkAttachmentSampleLocationsEXT[src->attachmentInitialSampleLocationsCount];
- memcpy ((void *)pAttachmentInitialSampleLocations, (void *)src->pAttachmentInitialSampleLocations, sizeof(VkAttachmentSampleLocationsEXT)*src->attachmentInitialSampleLocationsCount);
- }
- if (src->pPostSubpassSampleLocations) {
- pPostSubpassSampleLocations = new VkSubpassSampleLocationsEXT[src->postSubpassSampleLocationsCount];
- memcpy ((void *)pPostSubpassSampleLocations, (void *)src->pPostSubpassSampleLocations, sizeof(VkSubpassSampleLocationsEXT)*src->postSubpassSampleLocationsCount);
- }
-}
-
-safe_VkPipelineSampleLocationsStateCreateInfoEXT::safe_VkPipelineSampleLocationsStateCreateInfoEXT(const VkPipelineSampleLocationsStateCreateInfoEXT* in_struct) :
- sType(in_struct->sType),
- sampleLocationsEnable(in_struct->sampleLocationsEnable),
- sampleLocationsInfo(&in_struct->sampleLocationsInfo)
-{
- pNext = SafePnextCopy(in_struct->pNext);
-}
-
-safe_VkPipelineSampleLocationsStateCreateInfoEXT::safe_VkPipelineSampleLocationsStateCreateInfoEXT() :
- pNext(nullptr)
-{}
-
-safe_VkPipelineSampleLocationsStateCreateInfoEXT::safe_VkPipelineSampleLocationsStateCreateInfoEXT(const safe_VkPipelineSampleLocationsStateCreateInfoEXT& src)
-{
- sType = src.sType;
- sampleLocationsEnable = src.sampleLocationsEnable;
- sampleLocationsInfo.initialize(&src.sampleLocationsInfo);
- pNext = SafePnextCopy(src.pNext);
-}
-
-safe_VkPipelineSampleLocationsStateCreateInfoEXT& safe_VkPipelineSampleLocationsStateCreateInfoEXT::operator=(const safe_VkPipelineSampleLocationsStateCreateInfoEXT& src)
-{
- if (&src == this) return *this;
-
- if (pNext)
- FreePnextChain(pNext);
-
- sType = src.sType;
- sampleLocationsEnable = src.sampleLocationsEnable;
- sampleLocationsInfo.initialize(&src.sampleLocationsInfo);
- pNext = SafePnextCopy(src.pNext);
-
- return *this;
-}
-
-safe_VkPipelineSampleLocationsStateCreateInfoEXT::~safe_VkPipelineSampleLocationsStateCreateInfoEXT()
-{
- if (pNext)
- FreePnextChain(pNext);
-}
-
-void safe_VkPipelineSampleLocationsStateCreateInfoEXT::initialize(const VkPipelineSampleLocationsStateCreateInfoEXT* in_struct)
-{
- sType = in_struct->sType;
- sampleLocationsEnable = in_struct->sampleLocationsEnable;
- sampleLocationsInfo.initialize(&in_struct->sampleLocationsInfo);
- pNext = SafePnextCopy(in_struct->pNext);
-}
-
-void safe_VkPipelineSampleLocationsStateCreateInfoEXT::initialize(const safe_VkPipelineSampleLocationsStateCreateInfoEXT* src)
-{
- sType = src->sType;
- sampleLocationsEnable = src->sampleLocationsEnable;
- sampleLocationsInfo.initialize(&src->sampleLocationsInfo);
- pNext = SafePnextCopy(src->pNext);
-}
-
-safe_VkPhysicalDeviceSampleLocationsPropertiesEXT::safe_VkPhysicalDeviceSampleLocationsPropertiesEXT(const VkPhysicalDeviceSampleLocationsPropertiesEXT* in_struct) :
- sType(in_struct->sType),
- sampleLocationSampleCounts(in_struct->sampleLocationSampleCounts),
- maxSampleLocationGridSize(in_struct->maxSampleLocationGridSize),
- sampleLocationSubPixelBits(in_struct->sampleLocationSubPixelBits),
- variableSampleLocations(in_struct->variableSampleLocations)
-{
- pNext = SafePnextCopy(in_struct->pNext);
- for (uint32_t i = 0; i < 2; ++i) {
- sampleLocationCoordinateRange[i] = in_struct->sampleLocationCoordinateRange[i];
- }
-}
-
-safe_VkPhysicalDeviceSampleLocationsPropertiesEXT::safe_VkPhysicalDeviceSampleLocationsPropertiesEXT() :
- pNext(nullptr)
-{}
-
-safe_VkPhysicalDeviceSampleLocationsPropertiesEXT::safe_VkPhysicalDeviceSampleLocationsPropertiesEXT(const safe_VkPhysicalDeviceSampleLocationsPropertiesEXT& src)
-{
- sType = src.sType;
- sampleLocationSampleCounts = src.sampleLocationSampleCounts;
- maxSampleLocationGridSize = src.maxSampleLocationGridSize;
- sampleLocationSubPixelBits = src.sampleLocationSubPixelBits;
- variableSampleLocations = src.variableSampleLocations;
- pNext = SafePnextCopy(src.pNext);
- for (uint32_t i = 0; i < 2; ++i) {
- sampleLocationCoordinateRange[i] = src.sampleLocationCoordinateRange[i];
- }
-}
-
-safe_VkPhysicalDeviceSampleLocationsPropertiesEXT& safe_VkPhysicalDeviceSampleLocationsPropertiesEXT::operator=(const safe_VkPhysicalDeviceSampleLocationsPropertiesEXT& src)
-{
- if (&src == this) return *this;
-
- if (pNext)
- FreePnextChain(pNext);
-
- sType = src.sType;
- sampleLocationSampleCounts = src.sampleLocationSampleCounts;
- maxSampleLocationGridSize = src.maxSampleLocationGridSize;
- sampleLocationSubPixelBits = src.sampleLocationSubPixelBits;
- variableSampleLocations = src.variableSampleLocations;
- pNext = SafePnextCopy(src.pNext);
- for (uint32_t i = 0; i < 2; ++i) {
- sampleLocationCoordinateRange[i] = src.sampleLocationCoordinateRange[i];
- }
-
- return *this;
-}
-
-safe_VkPhysicalDeviceSampleLocationsPropertiesEXT::~safe_VkPhysicalDeviceSampleLocationsPropertiesEXT()
-{
- if (pNext)
- FreePnextChain(pNext);
-}
-
-void safe_VkPhysicalDeviceSampleLocationsPropertiesEXT::initialize(const VkPhysicalDeviceSampleLocationsPropertiesEXT* in_struct)
-{
- sType = in_struct->sType;
- sampleLocationSampleCounts = in_struct->sampleLocationSampleCounts;
- maxSampleLocationGridSize = in_struct->maxSampleLocationGridSize;
- sampleLocationSubPixelBits = in_struct->sampleLocationSubPixelBits;
- variableSampleLocations = in_struct->variableSampleLocations;
- pNext = SafePnextCopy(in_struct->pNext);
- for (uint32_t i = 0; i < 2; ++i) {
- sampleLocationCoordinateRange[i] = in_struct->sampleLocationCoordinateRange[i];
- }
-}
-
-void safe_VkPhysicalDeviceSampleLocationsPropertiesEXT::initialize(const safe_VkPhysicalDeviceSampleLocationsPropertiesEXT* src)
-{
- sType = src->sType;
- sampleLocationSampleCounts = src->sampleLocationSampleCounts;
- maxSampleLocationGridSize = src->maxSampleLocationGridSize;
- sampleLocationSubPixelBits = src->sampleLocationSubPixelBits;
- variableSampleLocations = src->variableSampleLocations;
- pNext = SafePnextCopy(src->pNext);
- for (uint32_t i = 0; i < 2; ++i) {
- sampleLocationCoordinateRange[i] = src->sampleLocationCoordinateRange[i];
- }
-}
-
-safe_VkMultisamplePropertiesEXT::safe_VkMultisamplePropertiesEXT(const VkMultisamplePropertiesEXT* in_struct) :
- sType(in_struct->sType),
- maxSampleLocationGridSize(in_struct->maxSampleLocationGridSize)
-{
- pNext = SafePnextCopy(in_struct->pNext);
-}
-
-safe_VkMultisamplePropertiesEXT::safe_VkMultisamplePropertiesEXT() :
- pNext(nullptr)
-{}
-
-safe_VkMultisamplePropertiesEXT::safe_VkMultisamplePropertiesEXT(const safe_VkMultisamplePropertiesEXT& src)
-{
- sType = src.sType;
- maxSampleLocationGridSize = src.maxSampleLocationGridSize;
- pNext = SafePnextCopy(src.pNext);
-}
-
-safe_VkMultisamplePropertiesEXT& safe_VkMultisamplePropertiesEXT::operator=(const safe_VkMultisamplePropertiesEXT& src)
-{
- if (&src == this) return *this;
-
- if (pNext)
- FreePnextChain(pNext);
-
- sType = src.sType;
- maxSampleLocationGridSize = src.maxSampleLocationGridSize;
- pNext = SafePnextCopy(src.pNext);
-
- return *this;
-}
-
-safe_VkMultisamplePropertiesEXT::~safe_VkMultisamplePropertiesEXT()
-{
- if (pNext)
- FreePnextChain(pNext);
-}
-
-void safe_VkMultisamplePropertiesEXT::initialize(const VkMultisamplePropertiesEXT* in_struct)
-{
- sType = in_struct->sType;
- maxSampleLocationGridSize = in_struct->maxSampleLocationGridSize;
- pNext = SafePnextCopy(in_struct->pNext);
-}
-
-void safe_VkMultisamplePropertiesEXT::initialize(const safe_VkMultisamplePropertiesEXT* src)
-{
- sType = src->sType;
- maxSampleLocationGridSize = src->maxSampleLocationGridSize;
- pNext = SafePnextCopy(src->pNext);
-}
-
-safe_VkPhysicalDeviceBlendOperationAdvancedFeaturesEXT::safe_VkPhysicalDeviceBlendOperationAdvancedFeaturesEXT(const VkPhysicalDeviceBlendOperationAdvancedFeaturesEXT* in_struct) :
- sType(in_struct->sType),
- advancedBlendCoherentOperations(in_struct->advancedBlendCoherentOperations)
-{
- pNext = SafePnextCopy(in_struct->pNext);
-}
-
-safe_VkPhysicalDeviceBlendOperationAdvancedFeaturesEXT::safe_VkPhysicalDeviceBlendOperationAdvancedFeaturesEXT() :
- pNext(nullptr)
-{}
-
-safe_VkPhysicalDeviceBlendOperationAdvancedFeaturesEXT::safe_VkPhysicalDeviceBlendOperationAdvancedFeaturesEXT(const safe_VkPhysicalDeviceBlendOperationAdvancedFeaturesEXT& src)
-{
- sType = src.sType;
- advancedBlendCoherentOperations = src.advancedBlendCoherentOperations;
- pNext = SafePnextCopy(src.pNext);
-}
-
-safe_VkPhysicalDeviceBlendOperationAdvancedFeaturesEXT& safe_VkPhysicalDeviceBlendOperationAdvancedFeaturesEXT::operator=(const safe_VkPhysicalDeviceBlendOperationAdvancedFeaturesEXT& src)
-{
- if (&src == this) return *this;
-
- if (pNext)
- FreePnextChain(pNext);
-
- sType = src.sType;
- advancedBlendCoherentOperations = src.advancedBlendCoherentOperations;
- pNext = SafePnextCopy(src.pNext);
-
- return *this;
-}
-
-safe_VkPhysicalDeviceBlendOperationAdvancedFeaturesEXT::~safe_VkPhysicalDeviceBlendOperationAdvancedFeaturesEXT()
-{
- if (pNext)
- FreePnextChain(pNext);
-}
-
-void safe_VkPhysicalDeviceBlendOperationAdvancedFeaturesEXT::initialize(const VkPhysicalDeviceBlendOperationAdvancedFeaturesEXT* in_struct)
-{
- sType = in_struct->sType;
- advancedBlendCoherentOperations = in_struct->advancedBlendCoherentOperations;
- pNext = SafePnextCopy(in_struct->pNext);
-}
-
-void safe_VkPhysicalDeviceBlendOperationAdvancedFeaturesEXT::initialize(const safe_VkPhysicalDeviceBlendOperationAdvancedFeaturesEXT* src)
-{
- sType = src->sType;
- advancedBlendCoherentOperations = src->advancedBlendCoherentOperations;
- pNext = SafePnextCopy(src->pNext);
-}
-
-safe_VkPhysicalDeviceBlendOperationAdvancedPropertiesEXT::safe_VkPhysicalDeviceBlendOperationAdvancedPropertiesEXT(const VkPhysicalDeviceBlendOperationAdvancedPropertiesEXT* in_struct) :
- sType(in_struct->sType),
- advancedBlendMaxColorAttachments(in_struct->advancedBlendMaxColorAttachments),
- advancedBlendIndependentBlend(in_struct->advancedBlendIndependentBlend),
- advancedBlendNonPremultipliedSrcColor(in_struct->advancedBlendNonPremultipliedSrcColor),
- advancedBlendNonPremultipliedDstColor(in_struct->advancedBlendNonPremultipliedDstColor),
- advancedBlendCorrelatedOverlap(in_struct->advancedBlendCorrelatedOverlap),
- advancedBlendAllOperations(in_struct->advancedBlendAllOperations)
-{
- pNext = SafePnextCopy(in_struct->pNext);
-}
-
-safe_VkPhysicalDeviceBlendOperationAdvancedPropertiesEXT::safe_VkPhysicalDeviceBlendOperationAdvancedPropertiesEXT() :
- pNext(nullptr)
-{}
-
-safe_VkPhysicalDeviceBlendOperationAdvancedPropertiesEXT::safe_VkPhysicalDeviceBlendOperationAdvancedPropertiesEXT(const safe_VkPhysicalDeviceBlendOperationAdvancedPropertiesEXT& src)
-{
- sType = src.sType;
- advancedBlendMaxColorAttachments = src.advancedBlendMaxColorAttachments;
- advancedBlendIndependentBlend = src.advancedBlendIndependentBlend;
- advancedBlendNonPremultipliedSrcColor = src.advancedBlendNonPremultipliedSrcColor;
- advancedBlendNonPremultipliedDstColor = src.advancedBlendNonPremultipliedDstColor;
- advancedBlendCorrelatedOverlap = src.advancedBlendCorrelatedOverlap;
- advancedBlendAllOperations = src.advancedBlendAllOperations;
- pNext = SafePnextCopy(src.pNext);
-}
-
-safe_VkPhysicalDeviceBlendOperationAdvancedPropertiesEXT& safe_VkPhysicalDeviceBlendOperationAdvancedPropertiesEXT::operator=(const safe_VkPhysicalDeviceBlendOperationAdvancedPropertiesEXT& src)
-{
- if (&src == this) return *this;
-
- if (pNext)
- FreePnextChain(pNext);
-
- sType = src.sType;
- advancedBlendMaxColorAttachments = src.advancedBlendMaxColorAttachments;
- advancedBlendIndependentBlend = src.advancedBlendIndependentBlend;
- advancedBlendNonPremultipliedSrcColor = src.advancedBlendNonPremultipliedSrcColor;
- advancedBlendNonPremultipliedDstColor = src.advancedBlendNonPremultipliedDstColor;
- advancedBlendCorrelatedOverlap = src.advancedBlendCorrelatedOverlap;
- advancedBlendAllOperations = src.advancedBlendAllOperations;
- pNext = SafePnextCopy(src.pNext);
-
- return *this;
-}
-
-safe_VkPhysicalDeviceBlendOperationAdvancedPropertiesEXT::~safe_VkPhysicalDeviceBlendOperationAdvancedPropertiesEXT()
-{
- if (pNext)
- FreePnextChain(pNext);
-}
-
-void safe_VkPhysicalDeviceBlendOperationAdvancedPropertiesEXT::initialize(const VkPhysicalDeviceBlendOperationAdvancedPropertiesEXT* in_struct)
-{
- sType = in_struct->sType;
- advancedBlendMaxColorAttachments = in_struct->advancedBlendMaxColorAttachments;
- advancedBlendIndependentBlend = in_struct->advancedBlendIndependentBlend;
- advancedBlendNonPremultipliedSrcColor = in_struct->advancedBlendNonPremultipliedSrcColor;
- advancedBlendNonPremultipliedDstColor = in_struct->advancedBlendNonPremultipliedDstColor;
- advancedBlendCorrelatedOverlap = in_struct->advancedBlendCorrelatedOverlap;
- advancedBlendAllOperations = in_struct->advancedBlendAllOperations;
- pNext = SafePnextCopy(in_struct->pNext);
-}
-
-void safe_VkPhysicalDeviceBlendOperationAdvancedPropertiesEXT::initialize(const safe_VkPhysicalDeviceBlendOperationAdvancedPropertiesEXT* src)
-{
- sType = src->sType;
- advancedBlendMaxColorAttachments = src->advancedBlendMaxColorAttachments;
- advancedBlendIndependentBlend = src->advancedBlendIndependentBlend;
- advancedBlendNonPremultipliedSrcColor = src->advancedBlendNonPremultipliedSrcColor;
- advancedBlendNonPremultipliedDstColor = src->advancedBlendNonPremultipliedDstColor;
- advancedBlendCorrelatedOverlap = src->advancedBlendCorrelatedOverlap;
- advancedBlendAllOperations = src->advancedBlendAllOperations;
- pNext = SafePnextCopy(src->pNext);
-}
-
-safe_VkPipelineColorBlendAdvancedStateCreateInfoEXT::safe_VkPipelineColorBlendAdvancedStateCreateInfoEXT(const VkPipelineColorBlendAdvancedStateCreateInfoEXT* in_struct) :
- sType(in_struct->sType),
- srcPremultiplied(in_struct->srcPremultiplied),
- dstPremultiplied(in_struct->dstPremultiplied),
- blendOverlap(in_struct->blendOverlap)
-{
- pNext = SafePnextCopy(in_struct->pNext);
-}
-
-safe_VkPipelineColorBlendAdvancedStateCreateInfoEXT::safe_VkPipelineColorBlendAdvancedStateCreateInfoEXT() :
- pNext(nullptr)
-{}
-
-safe_VkPipelineColorBlendAdvancedStateCreateInfoEXT::safe_VkPipelineColorBlendAdvancedStateCreateInfoEXT(const safe_VkPipelineColorBlendAdvancedStateCreateInfoEXT& src)
-{
- sType = src.sType;
- srcPremultiplied = src.srcPremultiplied;
- dstPremultiplied = src.dstPremultiplied;
- blendOverlap = src.blendOverlap;
- pNext = SafePnextCopy(src.pNext);
-}
-
-safe_VkPipelineColorBlendAdvancedStateCreateInfoEXT& safe_VkPipelineColorBlendAdvancedStateCreateInfoEXT::operator=(const safe_VkPipelineColorBlendAdvancedStateCreateInfoEXT& src)
-{
- if (&src == this) return *this;
-
- if (pNext)
- FreePnextChain(pNext);
-
- sType = src.sType;
- srcPremultiplied = src.srcPremultiplied;
- dstPremultiplied = src.dstPremultiplied;
- blendOverlap = src.blendOverlap;
- pNext = SafePnextCopy(src.pNext);
-
- return *this;
-}
-
-safe_VkPipelineColorBlendAdvancedStateCreateInfoEXT::~safe_VkPipelineColorBlendAdvancedStateCreateInfoEXT()
-{
- if (pNext)
- FreePnextChain(pNext);
-}
-
-void safe_VkPipelineColorBlendAdvancedStateCreateInfoEXT::initialize(const VkPipelineColorBlendAdvancedStateCreateInfoEXT* in_struct)
-{
- sType = in_struct->sType;
- srcPremultiplied = in_struct->srcPremultiplied;
- dstPremultiplied = in_struct->dstPremultiplied;
- blendOverlap = in_struct->blendOverlap;
- pNext = SafePnextCopy(in_struct->pNext);
-}
-
-void safe_VkPipelineColorBlendAdvancedStateCreateInfoEXT::initialize(const safe_VkPipelineColorBlendAdvancedStateCreateInfoEXT* src)
-{
- sType = src->sType;
- srcPremultiplied = src->srcPremultiplied;
- dstPremultiplied = src->dstPremultiplied;
- blendOverlap = src->blendOverlap;
- pNext = SafePnextCopy(src->pNext);
-}
-
-safe_VkPipelineCoverageToColorStateCreateInfoNV::safe_VkPipelineCoverageToColorStateCreateInfoNV(const VkPipelineCoverageToColorStateCreateInfoNV* in_struct) :
- sType(in_struct->sType),
- flags(in_struct->flags),
- coverageToColorEnable(in_struct->coverageToColorEnable),
- coverageToColorLocation(in_struct->coverageToColorLocation)
-{
- pNext = SafePnextCopy(in_struct->pNext);
-}
-
-safe_VkPipelineCoverageToColorStateCreateInfoNV::safe_VkPipelineCoverageToColorStateCreateInfoNV() :
- pNext(nullptr)
-{}
-
-safe_VkPipelineCoverageToColorStateCreateInfoNV::safe_VkPipelineCoverageToColorStateCreateInfoNV(const safe_VkPipelineCoverageToColorStateCreateInfoNV& src)
-{
- sType = src.sType;
- flags = src.flags;
- coverageToColorEnable = src.coverageToColorEnable;
- coverageToColorLocation = src.coverageToColorLocation;
- pNext = SafePnextCopy(src.pNext);
-}
-
-safe_VkPipelineCoverageToColorStateCreateInfoNV& safe_VkPipelineCoverageToColorStateCreateInfoNV::operator=(const safe_VkPipelineCoverageToColorStateCreateInfoNV& src)
-{
- if (&src == this) return *this;
-
- if (pNext)
- FreePnextChain(pNext);
-
- sType = src.sType;
- flags = src.flags;
- coverageToColorEnable = src.coverageToColorEnable;
- coverageToColorLocation = src.coverageToColorLocation;
- pNext = SafePnextCopy(src.pNext);
-
- return *this;
-}
-
-safe_VkPipelineCoverageToColorStateCreateInfoNV::~safe_VkPipelineCoverageToColorStateCreateInfoNV()
-{
- if (pNext)
- FreePnextChain(pNext);
-}
-
-void safe_VkPipelineCoverageToColorStateCreateInfoNV::initialize(const VkPipelineCoverageToColorStateCreateInfoNV* in_struct)
-{
- sType = in_struct->sType;
- flags = in_struct->flags;
- coverageToColorEnable = in_struct->coverageToColorEnable;
- coverageToColorLocation = in_struct->coverageToColorLocation;
- pNext = SafePnextCopy(in_struct->pNext);
-}
-
-void safe_VkPipelineCoverageToColorStateCreateInfoNV::initialize(const safe_VkPipelineCoverageToColorStateCreateInfoNV* src)
-{
- sType = src->sType;
- flags = src->flags;
- coverageToColorEnable = src->coverageToColorEnable;
- coverageToColorLocation = src->coverageToColorLocation;
- pNext = SafePnextCopy(src->pNext);
-}
-
-safe_VkPipelineCoverageModulationStateCreateInfoNV::safe_VkPipelineCoverageModulationStateCreateInfoNV(const VkPipelineCoverageModulationStateCreateInfoNV* in_struct) :
- sType(in_struct->sType),
- flags(in_struct->flags),
- coverageModulationMode(in_struct->coverageModulationMode),
- coverageModulationTableEnable(in_struct->coverageModulationTableEnable),
- coverageModulationTableCount(in_struct->coverageModulationTableCount),
- pCoverageModulationTable(nullptr)
-{
- pNext = SafePnextCopy(in_struct->pNext);
- if (in_struct->pCoverageModulationTable) {
- pCoverageModulationTable = new float[in_struct->coverageModulationTableCount];
- memcpy ((void *)pCoverageModulationTable, (void *)in_struct->pCoverageModulationTable, sizeof(float)*in_struct->coverageModulationTableCount);
- }
-}
-
-safe_VkPipelineCoverageModulationStateCreateInfoNV::safe_VkPipelineCoverageModulationStateCreateInfoNV() :
- pNext(nullptr),
- pCoverageModulationTable(nullptr)
-{}
-
-safe_VkPipelineCoverageModulationStateCreateInfoNV::safe_VkPipelineCoverageModulationStateCreateInfoNV(const safe_VkPipelineCoverageModulationStateCreateInfoNV& src)
-{
- sType = src.sType;
- flags = src.flags;
- coverageModulationMode = src.coverageModulationMode;
- coverageModulationTableEnable = src.coverageModulationTableEnable;
- coverageModulationTableCount = src.coverageModulationTableCount;
- pCoverageModulationTable = nullptr;
- pNext = SafePnextCopy(src.pNext);
- if (src.pCoverageModulationTable) {
- pCoverageModulationTable = new float[src.coverageModulationTableCount];
- memcpy ((void *)pCoverageModulationTable, (void *)src.pCoverageModulationTable, sizeof(float)*src.coverageModulationTableCount);
- }
-}
-
-safe_VkPipelineCoverageModulationStateCreateInfoNV& safe_VkPipelineCoverageModulationStateCreateInfoNV::operator=(const safe_VkPipelineCoverageModulationStateCreateInfoNV& src)
-{
- if (&src == this) return *this;
-
- if (pCoverageModulationTable)
- delete[] pCoverageModulationTable;
- if (pNext)
- FreePnextChain(pNext);
-
- sType = src.sType;
- flags = src.flags;
- coverageModulationMode = src.coverageModulationMode;
- coverageModulationTableEnable = src.coverageModulationTableEnable;
- coverageModulationTableCount = src.coverageModulationTableCount;
- pCoverageModulationTable = nullptr;
- pNext = SafePnextCopy(src.pNext);
- if (src.pCoverageModulationTable) {
- pCoverageModulationTable = new float[src.coverageModulationTableCount];
- memcpy ((void *)pCoverageModulationTable, (void *)src.pCoverageModulationTable, sizeof(float)*src.coverageModulationTableCount);
- }
-
- return *this;
-}
-
-safe_VkPipelineCoverageModulationStateCreateInfoNV::~safe_VkPipelineCoverageModulationStateCreateInfoNV()
-{
- if (pCoverageModulationTable)
- delete[] pCoverageModulationTable;
- if (pNext)
- FreePnextChain(pNext);
-}
-
-void safe_VkPipelineCoverageModulationStateCreateInfoNV::initialize(const VkPipelineCoverageModulationStateCreateInfoNV* in_struct)
-{
- sType = in_struct->sType;
- flags = in_struct->flags;
- coverageModulationMode = in_struct->coverageModulationMode;
- coverageModulationTableEnable = in_struct->coverageModulationTableEnable;
- coverageModulationTableCount = in_struct->coverageModulationTableCount;
- pCoverageModulationTable = nullptr;
- pNext = SafePnextCopy(in_struct->pNext);
- if (in_struct->pCoverageModulationTable) {
- pCoverageModulationTable = new float[in_struct->coverageModulationTableCount];
- memcpy ((void *)pCoverageModulationTable, (void *)in_struct->pCoverageModulationTable, sizeof(float)*in_struct->coverageModulationTableCount);
- }
-}
-
-void safe_VkPipelineCoverageModulationStateCreateInfoNV::initialize(const safe_VkPipelineCoverageModulationStateCreateInfoNV* src)
-{
- sType = src->sType;
- flags = src->flags;
- coverageModulationMode = src->coverageModulationMode;
- coverageModulationTableEnable = src->coverageModulationTableEnable;
- coverageModulationTableCount = src->coverageModulationTableCount;
- pCoverageModulationTable = nullptr;
- pNext = SafePnextCopy(src->pNext);
- if (src->pCoverageModulationTable) {
- pCoverageModulationTable = new float[src->coverageModulationTableCount];
- memcpy ((void *)pCoverageModulationTable, (void *)src->pCoverageModulationTable, sizeof(float)*src->coverageModulationTableCount);
- }
-}
-
-safe_VkPhysicalDeviceShaderSMBuiltinsPropertiesNV::safe_VkPhysicalDeviceShaderSMBuiltinsPropertiesNV(const VkPhysicalDeviceShaderSMBuiltinsPropertiesNV* in_struct) :
- sType(in_struct->sType),
- shaderSMCount(in_struct->shaderSMCount),
- shaderWarpsPerSM(in_struct->shaderWarpsPerSM)
-{
- pNext = SafePnextCopy(in_struct->pNext);
-}
-
-safe_VkPhysicalDeviceShaderSMBuiltinsPropertiesNV::safe_VkPhysicalDeviceShaderSMBuiltinsPropertiesNV() :
- pNext(nullptr)
-{}
-
-safe_VkPhysicalDeviceShaderSMBuiltinsPropertiesNV::safe_VkPhysicalDeviceShaderSMBuiltinsPropertiesNV(const safe_VkPhysicalDeviceShaderSMBuiltinsPropertiesNV& src)
-{
- sType = src.sType;
- shaderSMCount = src.shaderSMCount;
- shaderWarpsPerSM = src.shaderWarpsPerSM;
- pNext = SafePnextCopy(src.pNext);
-}
-
-safe_VkPhysicalDeviceShaderSMBuiltinsPropertiesNV& safe_VkPhysicalDeviceShaderSMBuiltinsPropertiesNV::operator=(const safe_VkPhysicalDeviceShaderSMBuiltinsPropertiesNV& src)
-{
- if (&src == this) return *this;
-
- if (pNext)
- FreePnextChain(pNext);
-
- sType = src.sType;
- shaderSMCount = src.shaderSMCount;
- shaderWarpsPerSM = src.shaderWarpsPerSM;
- pNext = SafePnextCopy(src.pNext);
-
- return *this;
-}
-
-safe_VkPhysicalDeviceShaderSMBuiltinsPropertiesNV::~safe_VkPhysicalDeviceShaderSMBuiltinsPropertiesNV()
-{
- if (pNext)
- FreePnextChain(pNext);
-}
-
-void safe_VkPhysicalDeviceShaderSMBuiltinsPropertiesNV::initialize(const VkPhysicalDeviceShaderSMBuiltinsPropertiesNV* in_struct)
-{
- sType = in_struct->sType;
- shaderSMCount = in_struct->shaderSMCount;
- shaderWarpsPerSM = in_struct->shaderWarpsPerSM;
- pNext = SafePnextCopy(in_struct->pNext);
-}
-
-void safe_VkPhysicalDeviceShaderSMBuiltinsPropertiesNV::initialize(const safe_VkPhysicalDeviceShaderSMBuiltinsPropertiesNV* src)
-{
- sType = src->sType;
- shaderSMCount = src->shaderSMCount;
- shaderWarpsPerSM = src->shaderWarpsPerSM;
- pNext = SafePnextCopy(src->pNext);
-}
-
-safe_VkPhysicalDeviceShaderSMBuiltinsFeaturesNV::safe_VkPhysicalDeviceShaderSMBuiltinsFeaturesNV(const VkPhysicalDeviceShaderSMBuiltinsFeaturesNV* in_struct) :
- sType(in_struct->sType),
- shaderSMBuiltins(in_struct->shaderSMBuiltins)
-{
- pNext = SafePnextCopy(in_struct->pNext);
-}
-
-safe_VkPhysicalDeviceShaderSMBuiltinsFeaturesNV::safe_VkPhysicalDeviceShaderSMBuiltinsFeaturesNV() :
- pNext(nullptr)
-{}
-
-safe_VkPhysicalDeviceShaderSMBuiltinsFeaturesNV::safe_VkPhysicalDeviceShaderSMBuiltinsFeaturesNV(const safe_VkPhysicalDeviceShaderSMBuiltinsFeaturesNV& src)
-{
- sType = src.sType;
- shaderSMBuiltins = src.shaderSMBuiltins;
- pNext = SafePnextCopy(src.pNext);
-}
-
-safe_VkPhysicalDeviceShaderSMBuiltinsFeaturesNV& safe_VkPhysicalDeviceShaderSMBuiltinsFeaturesNV::operator=(const safe_VkPhysicalDeviceShaderSMBuiltinsFeaturesNV& src)
-{
- if (&src == this) return *this;
-
- if (pNext)
- FreePnextChain(pNext);
-
- sType = src.sType;
- shaderSMBuiltins = src.shaderSMBuiltins;
- pNext = SafePnextCopy(src.pNext);
-
- return *this;
-}
-
-safe_VkPhysicalDeviceShaderSMBuiltinsFeaturesNV::~safe_VkPhysicalDeviceShaderSMBuiltinsFeaturesNV()
-{
- if (pNext)
- FreePnextChain(pNext);
-}
-
-void safe_VkPhysicalDeviceShaderSMBuiltinsFeaturesNV::initialize(const VkPhysicalDeviceShaderSMBuiltinsFeaturesNV* in_struct)
-{
- sType = in_struct->sType;
- shaderSMBuiltins = in_struct->shaderSMBuiltins;
- pNext = SafePnextCopy(in_struct->pNext);
-}
-
-void safe_VkPhysicalDeviceShaderSMBuiltinsFeaturesNV::initialize(const safe_VkPhysicalDeviceShaderSMBuiltinsFeaturesNV* src)
-{
- sType = src->sType;
- shaderSMBuiltins = src->shaderSMBuiltins;
- pNext = SafePnextCopy(src->pNext);
-}
-
-safe_VkDrmFormatModifierPropertiesListEXT::safe_VkDrmFormatModifierPropertiesListEXT(const VkDrmFormatModifierPropertiesListEXT* in_struct) :
- sType(in_struct->sType),
- drmFormatModifierCount(in_struct->drmFormatModifierCount),
- pDrmFormatModifierProperties(nullptr)
-{
- pNext = SafePnextCopy(in_struct->pNext);
- if (in_struct->pDrmFormatModifierProperties) {
- pDrmFormatModifierProperties = new VkDrmFormatModifierPropertiesEXT[in_struct->drmFormatModifierCount];
- memcpy ((void *)pDrmFormatModifierProperties, (void *)in_struct->pDrmFormatModifierProperties, sizeof(VkDrmFormatModifierPropertiesEXT)*in_struct->drmFormatModifierCount);
- }
-}
-
-safe_VkDrmFormatModifierPropertiesListEXT::safe_VkDrmFormatModifierPropertiesListEXT() :
- pNext(nullptr),
- pDrmFormatModifierProperties(nullptr)
-{}
-
-safe_VkDrmFormatModifierPropertiesListEXT::safe_VkDrmFormatModifierPropertiesListEXT(const safe_VkDrmFormatModifierPropertiesListEXT& src)
-{
- sType = src.sType;
- drmFormatModifierCount = src.drmFormatModifierCount;
- pDrmFormatModifierProperties = nullptr;
- pNext = SafePnextCopy(src.pNext);
- if (src.pDrmFormatModifierProperties) {
- pDrmFormatModifierProperties = new VkDrmFormatModifierPropertiesEXT[src.drmFormatModifierCount];
- memcpy ((void *)pDrmFormatModifierProperties, (void *)src.pDrmFormatModifierProperties, sizeof(VkDrmFormatModifierPropertiesEXT)*src.drmFormatModifierCount);
- }
-}
-
-safe_VkDrmFormatModifierPropertiesListEXT& safe_VkDrmFormatModifierPropertiesListEXT::operator=(const safe_VkDrmFormatModifierPropertiesListEXT& src)
-{
- if (&src == this) return *this;
-
- if (pDrmFormatModifierProperties)
- delete[] pDrmFormatModifierProperties;
- if (pNext)
- FreePnextChain(pNext);
-
- sType = src.sType;
- drmFormatModifierCount = src.drmFormatModifierCount;
- pDrmFormatModifierProperties = nullptr;
- pNext = SafePnextCopy(src.pNext);
- if (src.pDrmFormatModifierProperties) {
- pDrmFormatModifierProperties = new VkDrmFormatModifierPropertiesEXT[src.drmFormatModifierCount];
- memcpy ((void *)pDrmFormatModifierProperties, (void *)src.pDrmFormatModifierProperties, sizeof(VkDrmFormatModifierPropertiesEXT)*src.drmFormatModifierCount);
- }
-
- return *this;
-}
-
-safe_VkDrmFormatModifierPropertiesListEXT::~safe_VkDrmFormatModifierPropertiesListEXT()
-{
- if (pDrmFormatModifierProperties)
- delete[] pDrmFormatModifierProperties;
- if (pNext)
- FreePnextChain(pNext);
-}
-
-void safe_VkDrmFormatModifierPropertiesListEXT::initialize(const VkDrmFormatModifierPropertiesListEXT* in_struct)
-{
- sType = in_struct->sType;
- drmFormatModifierCount = in_struct->drmFormatModifierCount;
- pDrmFormatModifierProperties = nullptr;
- pNext = SafePnextCopy(in_struct->pNext);
- if (in_struct->pDrmFormatModifierProperties) {
- pDrmFormatModifierProperties = new VkDrmFormatModifierPropertiesEXT[in_struct->drmFormatModifierCount];
- memcpy ((void *)pDrmFormatModifierProperties, (void *)in_struct->pDrmFormatModifierProperties, sizeof(VkDrmFormatModifierPropertiesEXT)*in_struct->drmFormatModifierCount);
- }
-}
-
-void safe_VkDrmFormatModifierPropertiesListEXT::initialize(const safe_VkDrmFormatModifierPropertiesListEXT* src)
-{
- sType = src->sType;
- drmFormatModifierCount = src->drmFormatModifierCount;
- pDrmFormatModifierProperties = nullptr;
- pNext = SafePnextCopy(src->pNext);
- if (src->pDrmFormatModifierProperties) {
- pDrmFormatModifierProperties = new VkDrmFormatModifierPropertiesEXT[src->drmFormatModifierCount];
- memcpy ((void *)pDrmFormatModifierProperties, (void *)src->pDrmFormatModifierProperties, sizeof(VkDrmFormatModifierPropertiesEXT)*src->drmFormatModifierCount);
- }
-}
-
-safe_VkPhysicalDeviceImageDrmFormatModifierInfoEXT::safe_VkPhysicalDeviceImageDrmFormatModifierInfoEXT(const VkPhysicalDeviceImageDrmFormatModifierInfoEXT* in_struct) :
- sType(in_struct->sType),
- drmFormatModifier(in_struct->drmFormatModifier),
- sharingMode(in_struct->sharingMode),
- queueFamilyIndexCount(in_struct->queueFamilyIndexCount),
- pQueueFamilyIndices(nullptr)
-{
- pNext = SafePnextCopy(in_struct->pNext);
- if (in_struct->pQueueFamilyIndices) {
- pQueueFamilyIndices = new uint32_t[in_struct->queueFamilyIndexCount];
- memcpy ((void *)pQueueFamilyIndices, (void *)in_struct->pQueueFamilyIndices, sizeof(uint32_t)*in_struct->queueFamilyIndexCount);
- }
-}
-
-safe_VkPhysicalDeviceImageDrmFormatModifierInfoEXT::safe_VkPhysicalDeviceImageDrmFormatModifierInfoEXT() :
- pNext(nullptr),
- pQueueFamilyIndices(nullptr)
-{}
-
-safe_VkPhysicalDeviceImageDrmFormatModifierInfoEXT::safe_VkPhysicalDeviceImageDrmFormatModifierInfoEXT(const safe_VkPhysicalDeviceImageDrmFormatModifierInfoEXT& src)
-{
- sType = src.sType;
- drmFormatModifier = src.drmFormatModifier;
- sharingMode = src.sharingMode;
- queueFamilyIndexCount = src.queueFamilyIndexCount;
- pQueueFamilyIndices = nullptr;
- pNext = SafePnextCopy(src.pNext);
- if (src.pQueueFamilyIndices) {
- pQueueFamilyIndices = new uint32_t[src.queueFamilyIndexCount];
- memcpy ((void *)pQueueFamilyIndices, (void *)src.pQueueFamilyIndices, sizeof(uint32_t)*src.queueFamilyIndexCount);
- }
-}
-
-safe_VkPhysicalDeviceImageDrmFormatModifierInfoEXT& safe_VkPhysicalDeviceImageDrmFormatModifierInfoEXT::operator=(const safe_VkPhysicalDeviceImageDrmFormatModifierInfoEXT& src)
-{
- if (&src == this) return *this;
-
- if (pQueueFamilyIndices)
- delete[] pQueueFamilyIndices;
- if (pNext)
- FreePnextChain(pNext);
-
- sType = src.sType;
- drmFormatModifier = src.drmFormatModifier;
- sharingMode = src.sharingMode;
- queueFamilyIndexCount = src.queueFamilyIndexCount;
- pQueueFamilyIndices = nullptr;
- pNext = SafePnextCopy(src.pNext);
- if (src.pQueueFamilyIndices) {
- pQueueFamilyIndices = new uint32_t[src.queueFamilyIndexCount];
- memcpy ((void *)pQueueFamilyIndices, (void *)src.pQueueFamilyIndices, sizeof(uint32_t)*src.queueFamilyIndexCount);
- }
-
- return *this;
-}
-
-safe_VkPhysicalDeviceImageDrmFormatModifierInfoEXT::~safe_VkPhysicalDeviceImageDrmFormatModifierInfoEXT()
-{
- if (pQueueFamilyIndices)
- delete[] pQueueFamilyIndices;
- if (pNext)
- FreePnextChain(pNext);
-}
-
-void safe_VkPhysicalDeviceImageDrmFormatModifierInfoEXT::initialize(const VkPhysicalDeviceImageDrmFormatModifierInfoEXT* in_struct)
-{
- sType = in_struct->sType;
- drmFormatModifier = in_struct->drmFormatModifier;
- sharingMode = in_struct->sharingMode;
- queueFamilyIndexCount = in_struct->queueFamilyIndexCount;
- pQueueFamilyIndices = nullptr;
- pNext = SafePnextCopy(in_struct->pNext);
- if (in_struct->pQueueFamilyIndices) {
- pQueueFamilyIndices = new uint32_t[in_struct->queueFamilyIndexCount];
- memcpy ((void *)pQueueFamilyIndices, (void *)in_struct->pQueueFamilyIndices, sizeof(uint32_t)*in_struct->queueFamilyIndexCount);
- }
-}
-
-void safe_VkPhysicalDeviceImageDrmFormatModifierInfoEXT::initialize(const safe_VkPhysicalDeviceImageDrmFormatModifierInfoEXT* src)
-{
- sType = src->sType;
- drmFormatModifier = src->drmFormatModifier;
- sharingMode = src->sharingMode;
- queueFamilyIndexCount = src->queueFamilyIndexCount;
- pQueueFamilyIndices = nullptr;
- pNext = SafePnextCopy(src->pNext);
- if (src->pQueueFamilyIndices) {
- pQueueFamilyIndices = new uint32_t[src->queueFamilyIndexCount];
- memcpy ((void *)pQueueFamilyIndices, (void *)src->pQueueFamilyIndices, sizeof(uint32_t)*src->queueFamilyIndexCount);
- }
-}
-
-safe_VkImageDrmFormatModifierListCreateInfoEXT::safe_VkImageDrmFormatModifierListCreateInfoEXT(const VkImageDrmFormatModifierListCreateInfoEXT* in_struct) :
- sType(in_struct->sType),
- drmFormatModifierCount(in_struct->drmFormatModifierCount),
- pDrmFormatModifiers(nullptr)
-{
- pNext = SafePnextCopy(in_struct->pNext);
- if (in_struct->pDrmFormatModifiers) {
- pDrmFormatModifiers = new uint64_t[in_struct->drmFormatModifierCount];
- memcpy ((void *)pDrmFormatModifiers, (void *)in_struct->pDrmFormatModifiers, sizeof(uint64_t)*in_struct->drmFormatModifierCount);
- }
-}
-
-safe_VkImageDrmFormatModifierListCreateInfoEXT::safe_VkImageDrmFormatModifierListCreateInfoEXT() :
- pNext(nullptr),
- pDrmFormatModifiers(nullptr)
-{}
-
-safe_VkImageDrmFormatModifierListCreateInfoEXT::safe_VkImageDrmFormatModifierListCreateInfoEXT(const safe_VkImageDrmFormatModifierListCreateInfoEXT& src)
-{
- sType = src.sType;
- drmFormatModifierCount = src.drmFormatModifierCount;
- pDrmFormatModifiers = nullptr;
- pNext = SafePnextCopy(src.pNext);
- if (src.pDrmFormatModifiers) {
- pDrmFormatModifiers = new uint64_t[src.drmFormatModifierCount];
- memcpy ((void *)pDrmFormatModifiers, (void *)src.pDrmFormatModifiers, sizeof(uint64_t)*src.drmFormatModifierCount);
- }
-}
-
-safe_VkImageDrmFormatModifierListCreateInfoEXT& safe_VkImageDrmFormatModifierListCreateInfoEXT::operator=(const safe_VkImageDrmFormatModifierListCreateInfoEXT& src)
-{
- if (&src == this) return *this;
-
- if (pDrmFormatModifiers)
- delete[] pDrmFormatModifiers;
- if (pNext)
- FreePnextChain(pNext);
-
- sType = src.sType;
- drmFormatModifierCount = src.drmFormatModifierCount;
- pDrmFormatModifiers = nullptr;
- pNext = SafePnextCopy(src.pNext);
- if (src.pDrmFormatModifiers) {
- pDrmFormatModifiers = new uint64_t[src.drmFormatModifierCount];
- memcpy ((void *)pDrmFormatModifiers, (void *)src.pDrmFormatModifiers, sizeof(uint64_t)*src.drmFormatModifierCount);
- }
-
- return *this;
-}
-
-safe_VkImageDrmFormatModifierListCreateInfoEXT::~safe_VkImageDrmFormatModifierListCreateInfoEXT()
-{
- if (pDrmFormatModifiers)
- delete[] pDrmFormatModifiers;
- if (pNext)
- FreePnextChain(pNext);
-}
-
-void safe_VkImageDrmFormatModifierListCreateInfoEXT::initialize(const VkImageDrmFormatModifierListCreateInfoEXT* in_struct)
-{
- sType = in_struct->sType;
- drmFormatModifierCount = in_struct->drmFormatModifierCount;
- pDrmFormatModifiers = nullptr;
- pNext = SafePnextCopy(in_struct->pNext);
- if (in_struct->pDrmFormatModifiers) {
- pDrmFormatModifiers = new uint64_t[in_struct->drmFormatModifierCount];
- memcpy ((void *)pDrmFormatModifiers, (void *)in_struct->pDrmFormatModifiers, sizeof(uint64_t)*in_struct->drmFormatModifierCount);
- }
-}
-
-void safe_VkImageDrmFormatModifierListCreateInfoEXT::initialize(const safe_VkImageDrmFormatModifierListCreateInfoEXT* src)
-{
- sType = src->sType;
- drmFormatModifierCount = src->drmFormatModifierCount;
- pDrmFormatModifiers = nullptr;
- pNext = SafePnextCopy(src->pNext);
- if (src->pDrmFormatModifiers) {
- pDrmFormatModifiers = new uint64_t[src->drmFormatModifierCount];
- memcpy ((void *)pDrmFormatModifiers, (void *)src->pDrmFormatModifiers, sizeof(uint64_t)*src->drmFormatModifierCount);
- }
-}
-
-safe_VkImageDrmFormatModifierExplicitCreateInfoEXT::safe_VkImageDrmFormatModifierExplicitCreateInfoEXT(const VkImageDrmFormatModifierExplicitCreateInfoEXT* in_struct) :
- sType(in_struct->sType),
- drmFormatModifier(in_struct->drmFormatModifier),
- drmFormatModifierPlaneCount(in_struct->drmFormatModifierPlaneCount),
- pPlaneLayouts(nullptr)
-{
- pNext = SafePnextCopy(in_struct->pNext);
- if (in_struct->pPlaneLayouts) {
- pPlaneLayouts = new VkSubresourceLayout[in_struct->drmFormatModifierPlaneCount];
- memcpy ((void *)pPlaneLayouts, (void *)in_struct->pPlaneLayouts, sizeof(VkSubresourceLayout)*in_struct->drmFormatModifierPlaneCount);
- }
-}
-
-safe_VkImageDrmFormatModifierExplicitCreateInfoEXT::safe_VkImageDrmFormatModifierExplicitCreateInfoEXT() :
- pNext(nullptr),
- pPlaneLayouts(nullptr)
-{}
-
-safe_VkImageDrmFormatModifierExplicitCreateInfoEXT::safe_VkImageDrmFormatModifierExplicitCreateInfoEXT(const safe_VkImageDrmFormatModifierExplicitCreateInfoEXT& src)
-{
- sType = src.sType;
- drmFormatModifier = src.drmFormatModifier;
- drmFormatModifierPlaneCount = src.drmFormatModifierPlaneCount;
- pPlaneLayouts = nullptr;
- pNext = SafePnextCopy(src.pNext);
- if (src.pPlaneLayouts) {
- pPlaneLayouts = new VkSubresourceLayout[src.drmFormatModifierPlaneCount];
- memcpy ((void *)pPlaneLayouts, (void *)src.pPlaneLayouts, sizeof(VkSubresourceLayout)*src.drmFormatModifierPlaneCount);
- }
-}
-
-safe_VkImageDrmFormatModifierExplicitCreateInfoEXT& safe_VkImageDrmFormatModifierExplicitCreateInfoEXT::operator=(const safe_VkImageDrmFormatModifierExplicitCreateInfoEXT& src)
-{
- if (&src == this) return *this;
-
- if (pPlaneLayouts)
- delete[] pPlaneLayouts;
- if (pNext)
- FreePnextChain(pNext);
-
- sType = src.sType;
- drmFormatModifier = src.drmFormatModifier;
- drmFormatModifierPlaneCount = src.drmFormatModifierPlaneCount;
- pPlaneLayouts = nullptr;
- pNext = SafePnextCopy(src.pNext);
- if (src.pPlaneLayouts) {
- pPlaneLayouts = new VkSubresourceLayout[src.drmFormatModifierPlaneCount];
- memcpy ((void *)pPlaneLayouts, (void *)src.pPlaneLayouts, sizeof(VkSubresourceLayout)*src.drmFormatModifierPlaneCount);
- }
-
- return *this;
-}
-
-safe_VkImageDrmFormatModifierExplicitCreateInfoEXT::~safe_VkImageDrmFormatModifierExplicitCreateInfoEXT()
-{
- if (pPlaneLayouts)
- delete[] pPlaneLayouts;
- if (pNext)
- FreePnextChain(pNext);
-}
-
-void safe_VkImageDrmFormatModifierExplicitCreateInfoEXT::initialize(const VkImageDrmFormatModifierExplicitCreateInfoEXT* in_struct)
-{
- sType = in_struct->sType;
- drmFormatModifier = in_struct->drmFormatModifier;
- drmFormatModifierPlaneCount = in_struct->drmFormatModifierPlaneCount;
- pPlaneLayouts = nullptr;
- pNext = SafePnextCopy(in_struct->pNext);
- if (in_struct->pPlaneLayouts) {
- pPlaneLayouts = new VkSubresourceLayout[in_struct->drmFormatModifierPlaneCount];
- memcpy ((void *)pPlaneLayouts, (void *)in_struct->pPlaneLayouts, sizeof(VkSubresourceLayout)*in_struct->drmFormatModifierPlaneCount);
- }
-}
-
-void safe_VkImageDrmFormatModifierExplicitCreateInfoEXT::initialize(const safe_VkImageDrmFormatModifierExplicitCreateInfoEXT* src)
-{
- sType = src->sType;
- drmFormatModifier = src->drmFormatModifier;
- drmFormatModifierPlaneCount = src->drmFormatModifierPlaneCount;
- pPlaneLayouts = nullptr;
- pNext = SafePnextCopy(src->pNext);
- if (src->pPlaneLayouts) {
- pPlaneLayouts = new VkSubresourceLayout[src->drmFormatModifierPlaneCount];
- memcpy ((void *)pPlaneLayouts, (void *)src->pPlaneLayouts, sizeof(VkSubresourceLayout)*src->drmFormatModifierPlaneCount);
- }
-}
-
-safe_VkImageDrmFormatModifierPropertiesEXT::safe_VkImageDrmFormatModifierPropertiesEXT(const VkImageDrmFormatModifierPropertiesEXT* in_struct) :
- sType(in_struct->sType),
- drmFormatModifier(in_struct->drmFormatModifier)
-{
- pNext = SafePnextCopy(in_struct->pNext);
-}
-
-safe_VkImageDrmFormatModifierPropertiesEXT::safe_VkImageDrmFormatModifierPropertiesEXT() :
- pNext(nullptr)
-{}
-
-safe_VkImageDrmFormatModifierPropertiesEXT::safe_VkImageDrmFormatModifierPropertiesEXT(const safe_VkImageDrmFormatModifierPropertiesEXT& src)
-{
- sType = src.sType;
- drmFormatModifier = src.drmFormatModifier;
- pNext = SafePnextCopy(src.pNext);
-}
-
-safe_VkImageDrmFormatModifierPropertiesEXT& safe_VkImageDrmFormatModifierPropertiesEXT::operator=(const safe_VkImageDrmFormatModifierPropertiesEXT& src)
-{
- if (&src == this) return *this;
-
- if (pNext)
- FreePnextChain(pNext);
-
- sType = src.sType;
- drmFormatModifier = src.drmFormatModifier;
- pNext = SafePnextCopy(src.pNext);
-
- return *this;
-}
-
-safe_VkImageDrmFormatModifierPropertiesEXT::~safe_VkImageDrmFormatModifierPropertiesEXT()
-{
- if (pNext)
- FreePnextChain(pNext);
-}
-
-void safe_VkImageDrmFormatModifierPropertiesEXT::initialize(const VkImageDrmFormatModifierPropertiesEXT* in_struct)
-{
- sType = in_struct->sType;
- drmFormatModifier = in_struct->drmFormatModifier;
- pNext = SafePnextCopy(in_struct->pNext);
-}
-
-void safe_VkImageDrmFormatModifierPropertiesEXT::initialize(const safe_VkImageDrmFormatModifierPropertiesEXT* src)
-{
- sType = src->sType;
- drmFormatModifier = src->drmFormatModifier;
- pNext = SafePnextCopy(src->pNext);
-}
-
-safe_VkValidationCacheCreateInfoEXT::safe_VkValidationCacheCreateInfoEXT(const VkValidationCacheCreateInfoEXT* in_struct) :
- sType(in_struct->sType),
- flags(in_struct->flags),
- initialDataSize(in_struct->initialDataSize),
- pInitialData(in_struct->pInitialData)
-{
- pNext = SafePnextCopy(in_struct->pNext);
-}
-
-safe_VkValidationCacheCreateInfoEXT::safe_VkValidationCacheCreateInfoEXT() :
- pNext(nullptr),
- pInitialData(nullptr)
-{}
-
-safe_VkValidationCacheCreateInfoEXT::safe_VkValidationCacheCreateInfoEXT(const safe_VkValidationCacheCreateInfoEXT& src)
-{
- sType = src.sType;
- flags = src.flags;
- initialDataSize = src.initialDataSize;
- pInitialData = src.pInitialData;
- pNext = SafePnextCopy(src.pNext);
-}
-
-safe_VkValidationCacheCreateInfoEXT& safe_VkValidationCacheCreateInfoEXT::operator=(const safe_VkValidationCacheCreateInfoEXT& src)
-{
- if (&src == this) return *this;
-
- if (pNext)
- FreePnextChain(pNext);
-
- sType = src.sType;
- flags = src.flags;
- initialDataSize = src.initialDataSize;
- pInitialData = src.pInitialData;
- pNext = SafePnextCopy(src.pNext);
-
- return *this;
-}
-
-safe_VkValidationCacheCreateInfoEXT::~safe_VkValidationCacheCreateInfoEXT()
-{
- if (pNext)
- FreePnextChain(pNext);
-}
-
-void safe_VkValidationCacheCreateInfoEXT::initialize(const VkValidationCacheCreateInfoEXT* in_struct)
-{
- sType = in_struct->sType;
- flags = in_struct->flags;
- initialDataSize = in_struct->initialDataSize;
- pInitialData = in_struct->pInitialData;
- pNext = SafePnextCopy(in_struct->pNext);
-}
-
-void safe_VkValidationCacheCreateInfoEXT::initialize(const safe_VkValidationCacheCreateInfoEXT* src)
-{
- sType = src->sType;
- flags = src->flags;
- initialDataSize = src->initialDataSize;
- pInitialData = src->pInitialData;
- pNext = SafePnextCopy(src->pNext);
-}
-
-safe_VkShaderModuleValidationCacheCreateInfoEXT::safe_VkShaderModuleValidationCacheCreateInfoEXT(const VkShaderModuleValidationCacheCreateInfoEXT* in_struct) :
- sType(in_struct->sType),
- validationCache(in_struct->validationCache)
-{
- pNext = SafePnextCopy(in_struct->pNext);
-}
-
-safe_VkShaderModuleValidationCacheCreateInfoEXT::safe_VkShaderModuleValidationCacheCreateInfoEXT() :
- pNext(nullptr)
-{}
-
-safe_VkShaderModuleValidationCacheCreateInfoEXT::safe_VkShaderModuleValidationCacheCreateInfoEXT(const safe_VkShaderModuleValidationCacheCreateInfoEXT& src)
-{
- sType = src.sType;
- validationCache = src.validationCache;
- pNext = SafePnextCopy(src.pNext);
-}
-
-safe_VkShaderModuleValidationCacheCreateInfoEXT& safe_VkShaderModuleValidationCacheCreateInfoEXT::operator=(const safe_VkShaderModuleValidationCacheCreateInfoEXT& src)
-{
- if (&src == this) return *this;
-
- if (pNext)
- FreePnextChain(pNext);
-
- sType = src.sType;
- validationCache = src.validationCache;
- pNext = SafePnextCopy(src.pNext);
-
- return *this;
-}
-
-safe_VkShaderModuleValidationCacheCreateInfoEXT::~safe_VkShaderModuleValidationCacheCreateInfoEXT()
-{
- if (pNext)
- FreePnextChain(pNext);
-}
-
-void safe_VkShaderModuleValidationCacheCreateInfoEXT::initialize(const VkShaderModuleValidationCacheCreateInfoEXT* in_struct)
-{
- sType = in_struct->sType;
- validationCache = in_struct->validationCache;
- pNext = SafePnextCopy(in_struct->pNext);
-}
-
-void safe_VkShaderModuleValidationCacheCreateInfoEXT::initialize(const safe_VkShaderModuleValidationCacheCreateInfoEXT* src)
-{
- sType = src->sType;
- validationCache = src->validationCache;
- pNext = SafePnextCopy(src->pNext);
-}
-
-safe_VkDescriptorSetLayoutBindingFlagsCreateInfoEXT::safe_VkDescriptorSetLayoutBindingFlagsCreateInfoEXT(const VkDescriptorSetLayoutBindingFlagsCreateInfoEXT* in_struct) :
- sType(in_struct->sType),
- bindingCount(in_struct->bindingCount),
- pBindingFlags(nullptr)
-{
- pNext = SafePnextCopy(in_struct->pNext);
- if (in_struct->pBindingFlags) {
- pBindingFlags = new VkDescriptorBindingFlagsEXT[in_struct->bindingCount];
- memcpy ((void *)pBindingFlags, (void *)in_struct->pBindingFlags, sizeof(VkDescriptorBindingFlagsEXT)*in_struct->bindingCount);
- }
-}
-
-safe_VkDescriptorSetLayoutBindingFlagsCreateInfoEXT::safe_VkDescriptorSetLayoutBindingFlagsCreateInfoEXT() :
- pNext(nullptr),
- pBindingFlags(nullptr)
-{}
-
-safe_VkDescriptorSetLayoutBindingFlagsCreateInfoEXT::safe_VkDescriptorSetLayoutBindingFlagsCreateInfoEXT(const safe_VkDescriptorSetLayoutBindingFlagsCreateInfoEXT& src)
-{
- sType = src.sType;
- bindingCount = src.bindingCount;
- pBindingFlags = nullptr;
- pNext = SafePnextCopy(src.pNext);
- if (src.pBindingFlags) {
- pBindingFlags = new VkDescriptorBindingFlagsEXT[src.bindingCount];
- memcpy ((void *)pBindingFlags, (void *)src.pBindingFlags, sizeof(VkDescriptorBindingFlagsEXT)*src.bindingCount);
- }
-}
-
-safe_VkDescriptorSetLayoutBindingFlagsCreateInfoEXT& safe_VkDescriptorSetLayoutBindingFlagsCreateInfoEXT::operator=(const safe_VkDescriptorSetLayoutBindingFlagsCreateInfoEXT& src)
-{
- if (&src == this) return *this;
-
- if (pBindingFlags)
- delete[] pBindingFlags;
- if (pNext)
- FreePnextChain(pNext);
-
- sType = src.sType;
- bindingCount = src.bindingCount;
- pBindingFlags = nullptr;
- pNext = SafePnextCopy(src.pNext);
- if (src.pBindingFlags) {
- pBindingFlags = new VkDescriptorBindingFlagsEXT[src.bindingCount];
- memcpy ((void *)pBindingFlags, (void *)src.pBindingFlags, sizeof(VkDescriptorBindingFlagsEXT)*src.bindingCount);
- }
-
- return *this;
-}
-
-safe_VkDescriptorSetLayoutBindingFlagsCreateInfoEXT::~safe_VkDescriptorSetLayoutBindingFlagsCreateInfoEXT()
-{
- if (pBindingFlags)
- delete[] pBindingFlags;
- if (pNext)
- FreePnextChain(pNext);
-}
-
-void safe_VkDescriptorSetLayoutBindingFlagsCreateInfoEXT::initialize(const VkDescriptorSetLayoutBindingFlagsCreateInfoEXT* in_struct)
-{
- sType = in_struct->sType;
- bindingCount = in_struct->bindingCount;
- pBindingFlags = nullptr;
- pNext = SafePnextCopy(in_struct->pNext);
- if (in_struct->pBindingFlags) {
- pBindingFlags = new VkDescriptorBindingFlagsEXT[in_struct->bindingCount];
- memcpy ((void *)pBindingFlags, (void *)in_struct->pBindingFlags, sizeof(VkDescriptorBindingFlagsEXT)*in_struct->bindingCount);
- }
-}
-
-void safe_VkDescriptorSetLayoutBindingFlagsCreateInfoEXT::initialize(const safe_VkDescriptorSetLayoutBindingFlagsCreateInfoEXT* src)
-{
- sType = src->sType;
- bindingCount = src->bindingCount;
- pBindingFlags = nullptr;
- pNext = SafePnextCopy(src->pNext);
- if (src->pBindingFlags) {
- pBindingFlags = new VkDescriptorBindingFlagsEXT[src->bindingCount];
- memcpy ((void *)pBindingFlags, (void *)src->pBindingFlags, sizeof(VkDescriptorBindingFlagsEXT)*src->bindingCount);
- }
-}
-
-safe_VkPhysicalDeviceDescriptorIndexingFeaturesEXT::safe_VkPhysicalDeviceDescriptorIndexingFeaturesEXT(const VkPhysicalDeviceDescriptorIndexingFeaturesEXT* in_struct) :
- sType(in_struct->sType),
- shaderInputAttachmentArrayDynamicIndexing(in_struct->shaderInputAttachmentArrayDynamicIndexing),
- shaderUniformTexelBufferArrayDynamicIndexing(in_struct->shaderUniformTexelBufferArrayDynamicIndexing),
- shaderStorageTexelBufferArrayDynamicIndexing(in_struct->shaderStorageTexelBufferArrayDynamicIndexing),
- shaderUniformBufferArrayNonUniformIndexing(in_struct->shaderUniformBufferArrayNonUniformIndexing),
- shaderSampledImageArrayNonUniformIndexing(in_struct->shaderSampledImageArrayNonUniformIndexing),
- shaderStorageBufferArrayNonUniformIndexing(in_struct->shaderStorageBufferArrayNonUniformIndexing),
- shaderStorageImageArrayNonUniformIndexing(in_struct->shaderStorageImageArrayNonUniformIndexing),
- shaderInputAttachmentArrayNonUniformIndexing(in_struct->shaderInputAttachmentArrayNonUniformIndexing),
- shaderUniformTexelBufferArrayNonUniformIndexing(in_struct->shaderUniformTexelBufferArrayNonUniformIndexing),
- shaderStorageTexelBufferArrayNonUniformIndexing(in_struct->shaderStorageTexelBufferArrayNonUniformIndexing),
- descriptorBindingUniformBufferUpdateAfterBind(in_struct->descriptorBindingUniformBufferUpdateAfterBind),
- descriptorBindingSampledImageUpdateAfterBind(in_struct->descriptorBindingSampledImageUpdateAfterBind),
- descriptorBindingStorageImageUpdateAfterBind(in_struct->descriptorBindingStorageImageUpdateAfterBind),
- descriptorBindingStorageBufferUpdateAfterBind(in_struct->descriptorBindingStorageBufferUpdateAfterBind),
- descriptorBindingUniformTexelBufferUpdateAfterBind(in_struct->descriptorBindingUniformTexelBufferUpdateAfterBind),
- descriptorBindingStorageTexelBufferUpdateAfterBind(in_struct->descriptorBindingStorageTexelBufferUpdateAfterBind),
- descriptorBindingUpdateUnusedWhilePending(in_struct->descriptorBindingUpdateUnusedWhilePending),
- descriptorBindingPartiallyBound(in_struct->descriptorBindingPartiallyBound),
- descriptorBindingVariableDescriptorCount(in_struct->descriptorBindingVariableDescriptorCount),
- runtimeDescriptorArray(in_struct->runtimeDescriptorArray)
-{
- pNext = SafePnextCopy(in_struct->pNext);
-}
-
-safe_VkPhysicalDeviceDescriptorIndexingFeaturesEXT::safe_VkPhysicalDeviceDescriptorIndexingFeaturesEXT() :
- pNext(nullptr)
-{}
-
-safe_VkPhysicalDeviceDescriptorIndexingFeaturesEXT::safe_VkPhysicalDeviceDescriptorIndexingFeaturesEXT(const safe_VkPhysicalDeviceDescriptorIndexingFeaturesEXT& src)
-{
- sType = src.sType;
- shaderInputAttachmentArrayDynamicIndexing = src.shaderInputAttachmentArrayDynamicIndexing;
- shaderUniformTexelBufferArrayDynamicIndexing = src.shaderUniformTexelBufferArrayDynamicIndexing;
- shaderStorageTexelBufferArrayDynamicIndexing = src.shaderStorageTexelBufferArrayDynamicIndexing;
- shaderUniformBufferArrayNonUniformIndexing = src.shaderUniformBufferArrayNonUniformIndexing;
- shaderSampledImageArrayNonUniformIndexing = src.shaderSampledImageArrayNonUniformIndexing;
- shaderStorageBufferArrayNonUniformIndexing = src.shaderStorageBufferArrayNonUniformIndexing;
- shaderStorageImageArrayNonUniformIndexing = src.shaderStorageImageArrayNonUniformIndexing;
- shaderInputAttachmentArrayNonUniformIndexing = src.shaderInputAttachmentArrayNonUniformIndexing;
- shaderUniformTexelBufferArrayNonUniformIndexing = src.shaderUniformTexelBufferArrayNonUniformIndexing;
- shaderStorageTexelBufferArrayNonUniformIndexing = src.shaderStorageTexelBufferArrayNonUniformIndexing;
- descriptorBindingUniformBufferUpdateAfterBind = src.descriptorBindingUniformBufferUpdateAfterBind;
- descriptorBindingSampledImageUpdateAfterBind = src.descriptorBindingSampledImageUpdateAfterBind;
- descriptorBindingStorageImageUpdateAfterBind = src.descriptorBindingStorageImageUpdateAfterBind;
- descriptorBindingStorageBufferUpdateAfterBind = src.descriptorBindingStorageBufferUpdateAfterBind;
- descriptorBindingUniformTexelBufferUpdateAfterBind = src.descriptorBindingUniformTexelBufferUpdateAfterBind;
- descriptorBindingStorageTexelBufferUpdateAfterBind = src.descriptorBindingStorageTexelBufferUpdateAfterBind;
- descriptorBindingUpdateUnusedWhilePending = src.descriptorBindingUpdateUnusedWhilePending;
- descriptorBindingPartiallyBound = src.descriptorBindingPartiallyBound;
- descriptorBindingVariableDescriptorCount = src.descriptorBindingVariableDescriptorCount;
- runtimeDescriptorArray = src.runtimeDescriptorArray;
- pNext = SafePnextCopy(src.pNext);
-}
-
-safe_VkPhysicalDeviceDescriptorIndexingFeaturesEXT& safe_VkPhysicalDeviceDescriptorIndexingFeaturesEXT::operator=(const safe_VkPhysicalDeviceDescriptorIndexingFeaturesEXT& src)
-{
- if (&src == this) return *this;
-
- if (pNext)
- FreePnextChain(pNext);
-
- sType = src.sType;
- shaderInputAttachmentArrayDynamicIndexing = src.shaderInputAttachmentArrayDynamicIndexing;
- shaderUniformTexelBufferArrayDynamicIndexing = src.shaderUniformTexelBufferArrayDynamicIndexing;
- shaderStorageTexelBufferArrayDynamicIndexing = src.shaderStorageTexelBufferArrayDynamicIndexing;
- shaderUniformBufferArrayNonUniformIndexing = src.shaderUniformBufferArrayNonUniformIndexing;
- shaderSampledImageArrayNonUniformIndexing = src.shaderSampledImageArrayNonUniformIndexing;
- shaderStorageBufferArrayNonUniformIndexing = src.shaderStorageBufferArrayNonUniformIndexing;
- shaderStorageImageArrayNonUniformIndexing = src.shaderStorageImageArrayNonUniformIndexing;
- shaderInputAttachmentArrayNonUniformIndexing = src.shaderInputAttachmentArrayNonUniformIndexing;
- shaderUniformTexelBufferArrayNonUniformIndexing = src.shaderUniformTexelBufferArrayNonUniformIndexing;
- shaderStorageTexelBufferArrayNonUniformIndexing = src.shaderStorageTexelBufferArrayNonUniformIndexing;
- descriptorBindingUniformBufferUpdateAfterBind = src.descriptorBindingUniformBufferUpdateAfterBind;
- descriptorBindingSampledImageUpdateAfterBind = src.descriptorBindingSampledImageUpdateAfterBind;
- descriptorBindingStorageImageUpdateAfterBind = src.descriptorBindingStorageImageUpdateAfterBind;
- descriptorBindingStorageBufferUpdateAfterBind = src.descriptorBindingStorageBufferUpdateAfterBind;
- descriptorBindingUniformTexelBufferUpdateAfterBind = src.descriptorBindingUniformTexelBufferUpdateAfterBind;
- descriptorBindingStorageTexelBufferUpdateAfterBind = src.descriptorBindingStorageTexelBufferUpdateAfterBind;
- descriptorBindingUpdateUnusedWhilePending = src.descriptorBindingUpdateUnusedWhilePending;
- descriptorBindingPartiallyBound = src.descriptorBindingPartiallyBound;
- descriptorBindingVariableDescriptorCount = src.descriptorBindingVariableDescriptorCount;
- runtimeDescriptorArray = src.runtimeDescriptorArray;
- pNext = SafePnextCopy(src.pNext);
-
- return *this;
-}
-
-safe_VkPhysicalDeviceDescriptorIndexingFeaturesEXT::~safe_VkPhysicalDeviceDescriptorIndexingFeaturesEXT()
-{
- if (pNext)
- FreePnextChain(pNext);
-}
-
-void safe_VkPhysicalDeviceDescriptorIndexingFeaturesEXT::initialize(const VkPhysicalDeviceDescriptorIndexingFeaturesEXT* in_struct)
-{
- sType = in_struct->sType;
- shaderInputAttachmentArrayDynamicIndexing = in_struct->shaderInputAttachmentArrayDynamicIndexing;
- shaderUniformTexelBufferArrayDynamicIndexing = in_struct->shaderUniformTexelBufferArrayDynamicIndexing;
- shaderStorageTexelBufferArrayDynamicIndexing = in_struct->shaderStorageTexelBufferArrayDynamicIndexing;
- shaderUniformBufferArrayNonUniformIndexing = in_struct->shaderUniformBufferArrayNonUniformIndexing;
- shaderSampledImageArrayNonUniformIndexing = in_struct->shaderSampledImageArrayNonUniformIndexing;
- shaderStorageBufferArrayNonUniformIndexing = in_struct->shaderStorageBufferArrayNonUniformIndexing;
- shaderStorageImageArrayNonUniformIndexing = in_struct->shaderStorageImageArrayNonUniformIndexing;
- shaderInputAttachmentArrayNonUniformIndexing = in_struct->shaderInputAttachmentArrayNonUniformIndexing;
- shaderUniformTexelBufferArrayNonUniformIndexing = in_struct->shaderUniformTexelBufferArrayNonUniformIndexing;
- shaderStorageTexelBufferArrayNonUniformIndexing = in_struct->shaderStorageTexelBufferArrayNonUniformIndexing;
- descriptorBindingUniformBufferUpdateAfterBind = in_struct->descriptorBindingUniformBufferUpdateAfterBind;
- descriptorBindingSampledImageUpdateAfterBind = in_struct->descriptorBindingSampledImageUpdateAfterBind;
- descriptorBindingStorageImageUpdateAfterBind = in_struct->descriptorBindingStorageImageUpdateAfterBind;
- descriptorBindingStorageBufferUpdateAfterBind = in_struct->descriptorBindingStorageBufferUpdateAfterBind;
- descriptorBindingUniformTexelBufferUpdateAfterBind = in_struct->descriptorBindingUniformTexelBufferUpdateAfterBind;
- descriptorBindingStorageTexelBufferUpdateAfterBind = in_struct->descriptorBindingStorageTexelBufferUpdateAfterBind;
- descriptorBindingUpdateUnusedWhilePending = in_struct->descriptorBindingUpdateUnusedWhilePending;
- descriptorBindingPartiallyBound = in_struct->descriptorBindingPartiallyBound;
- descriptorBindingVariableDescriptorCount = in_struct->descriptorBindingVariableDescriptorCount;
- runtimeDescriptorArray = in_struct->runtimeDescriptorArray;
- pNext = SafePnextCopy(in_struct->pNext);
-}
-
-void safe_VkPhysicalDeviceDescriptorIndexingFeaturesEXT::initialize(const safe_VkPhysicalDeviceDescriptorIndexingFeaturesEXT* src)
-{
- sType = src->sType;
- shaderInputAttachmentArrayDynamicIndexing = src->shaderInputAttachmentArrayDynamicIndexing;
- shaderUniformTexelBufferArrayDynamicIndexing = src->shaderUniformTexelBufferArrayDynamicIndexing;
- shaderStorageTexelBufferArrayDynamicIndexing = src->shaderStorageTexelBufferArrayDynamicIndexing;
- shaderUniformBufferArrayNonUniformIndexing = src->shaderUniformBufferArrayNonUniformIndexing;
- shaderSampledImageArrayNonUniformIndexing = src->shaderSampledImageArrayNonUniformIndexing;
- shaderStorageBufferArrayNonUniformIndexing = src->shaderStorageBufferArrayNonUniformIndexing;
- shaderStorageImageArrayNonUniformIndexing = src->shaderStorageImageArrayNonUniformIndexing;
- shaderInputAttachmentArrayNonUniformIndexing = src->shaderInputAttachmentArrayNonUniformIndexing;
- shaderUniformTexelBufferArrayNonUniformIndexing = src->shaderUniformTexelBufferArrayNonUniformIndexing;
- shaderStorageTexelBufferArrayNonUniformIndexing = src->shaderStorageTexelBufferArrayNonUniformIndexing;
- descriptorBindingUniformBufferUpdateAfterBind = src->descriptorBindingUniformBufferUpdateAfterBind;
- descriptorBindingSampledImageUpdateAfterBind = src->descriptorBindingSampledImageUpdateAfterBind;
- descriptorBindingStorageImageUpdateAfterBind = src->descriptorBindingStorageImageUpdateAfterBind;
- descriptorBindingStorageBufferUpdateAfterBind = src->descriptorBindingStorageBufferUpdateAfterBind;
- descriptorBindingUniformTexelBufferUpdateAfterBind = src->descriptorBindingUniformTexelBufferUpdateAfterBind;
- descriptorBindingStorageTexelBufferUpdateAfterBind = src->descriptorBindingStorageTexelBufferUpdateAfterBind;
- descriptorBindingUpdateUnusedWhilePending = src->descriptorBindingUpdateUnusedWhilePending;
- descriptorBindingPartiallyBound = src->descriptorBindingPartiallyBound;
- descriptorBindingVariableDescriptorCount = src->descriptorBindingVariableDescriptorCount;
- runtimeDescriptorArray = src->runtimeDescriptorArray;
- pNext = SafePnextCopy(src->pNext);
-}
-
-safe_VkPhysicalDeviceDescriptorIndexingPropertiesEXT::safe_VkPhysicalDeviceDescriptorIndexingPropertiesEXT(const VkPhysicalDeviceDescriptorIndexingPropertiesEXT* in_struct) :
- sType(in_struct->sType),
- maxUpdateAfterBindDescriptorsInAllPools(in_struct->maxUpdateAfterBindDescriptorsInAllPools),
- shaderUniformBufferArrayNonUniformIndexingNative(in_struct->shaderUniformBufferArrayNonUniformIndexingNative),
- shaderSampledImageArrayNonUniformIndexingNative(in_struct->shaderSampledImageArrayNonUniformIndexingNative),
- shaderStorageBufferArrayNonUniformIndexingNative(in_struct->shaderStorageBufferArrayNonUniformIndexingNative),
- shaderStorageImageArrayNonUniformIndexingNative(in_struct->shaderStorageImageArrayNonUniformIndexingNative),
- shaderInputAttachmentArrayNonUniformIndexingNative(in_struct->shaderInputAttachmentArrayNonUniformIndexingNative),
- robustBufferAccessUpdateAfterBind(in_struct->robustBufferAccessUpdateAfterBind),
- quadDivergentImplicitLod(in_struct->quadDivergentImplicitLod),
- maxPerStageDescriptorUpdateAfterBindSamplers(in_struct->maxPerStageDescriptorUpdateAfterBindSamplers),
- maxPerStageDescriptorUpdateAfterBindUniformBuffers(in_struct->maxPerStageDescriptorUpdateAfterBindUniformBuffers),
- maxPerStageDescriptorUpdateAfterBindStorageBuffers(in_struct->maxPerStageDescriptorUpdateAfterBindStorageBuffers),
- maxPerStageDescriptorUpdateAfterBindSampledImages(in_struct->maxPerStageDescriptorUpdateAfterBindSampledImages),
- maxPerStageDescriptorUpdateAfterBindStorageImages(in_struct->maxPerStageDescriptorUpdateAfterBindStorageImages),
- maxPerStageDescriptorUpdateAfterBindInputAttachments(in_struct->maxPerStageDescriptorUpdateAfterBindInputAttachments),
- maxPerStageUpdateAfterBindResources(in_struct->maxPerStageUpdateAfterBindResources),
- maxDescriptorSetUpdateAfterBindSamplers(in_struct->maxDescriptorSetUpdateAfterBindSamplers),
- maxDescriptorSetUpdateAfterBindUniformBuffers(in_struct->maxDescriptorSetUpdateAfterBindUniformBuffers),
- maxDescriptorSetUpdateAfterBindUniformBuffersDynamic(in_struct->maxDescriptorSetUpdateAfterBindUniformBuffersDynamic),
- maxDescriptorSetUpdateAfterBindStorageBuffers(in_struct->maxDescriptorSetUpdateAfterBindStorageBuffers),
- maxDescriptorSetUpdateAfterBindStorageBuffersDynamic(in_struct->maxDescriptorSetUpdateAfterBindStorageBuffersDynamic),
- maxDescriptorSetUpdateAfterBindSampledImages(in_struct->maxDescriptorSetUpdateAfterBindSampledImages),
- maxDescriptorSetUpdateAfterBindStorageImages(in_struct->maxDescriptorSetUpdateAfterBindStorageImages),
- maxDescriptorSetUpdateAfterBindInputAttachments(in_struct->maxDescriptorSetUpdateAfterBindInputAttachments)
-{
- pNext = SafePnextCopy(in_struct->pNext);
-}
-
-safe_VkPhysicalDeviceDescriptorIndexingPropertiesEXT::safe_VkPhysicalDeviceDescriptorIndexingPropertiesEXT() :
- pNext(nullptr)
-{}
-
-safe_VkPhysicalDeviceDescriptorIndexingPropertiesEXT::safe_VkPhysicalDeviceDescriptorIndexingPropertiesEXT(const safe_VkPhysicalDeviceDescriptorIndexingPropertiesEXT& src)
-{
- sType = src.sType;
- maxUpdateAfterBindDescriptorsInAllPools = src.maxUpdateAfterBindDescriptorsInAllPools;
- shaderUniformBufferArrayNonUniformIndexingNative = src.shaderUniformBufferArrayNonUniformIndexingNative;
- shaderSampledImageArrayNonUniformIndexingNative = src.shaderSampledImageArrayNonUniformIndexingNative;
- shaderStorageBufferArrayNonUniformIndexingNative = src.shaderStorageBufferArrayNonUniformIndexingNative;
- shaderStorageImageArrayNonUniformIndexingNative = src.shaderStorageImageArrayNonUniformIndexingNative;
- shaderInputAttachmentArrayNonUniformIndexingNative = src.shaderInputAttachmentArrayNonUniformIndexingNative;
- robustBufferAccessUpdateAfterBind = src.robustBufferAccessUpdateAfterBind;
- quadDivergentImplicitLod = src.quadDivergentImplicitLod;
- maxPerStageDescriptorUpdateAfterBindSamplers = src.maxPerStageDescriptorUpdateAfterBindSamplers;
- maxPerStageDescriptorUpdateAfterBindUniformBuffers = src.maxPerStageDescriptorUpdateAfterBindUniformBuffers;
- maxPerStageDescriptorUpdateAfterBindStorageBuffers = src.maxPerStageDescriptorUpdateAfterBindStorageBuffers;
- maxPerStageDescriptorUpdateAfterBindSampledImages = src.maxPerStageDescriptorUpdateAfterBindSampledImages;
- maxPerStageDescriptorUpdateAfterBindStorageImages = src.maxPerStageDescriptorUpdateAfterBindStorageImages;
- maxPerStageDescriptorUpdateAfterBindInputAttachments = src.maxPerStageDescriptorUpdateAfterBindInputAttachments;
- maxPerStageUpdateAfterBindResources = src.maxPerStageUpdateAfterBindResources;
- maxDescriptorSetUpdateAfterBindSamplers = src.maxDescriptorSetUpdateAfterBindSamplers;
- maxDescriptorSetUpdateAfterBindUniformBuffers = src.maxDescriptorSetUpdateAfterBindUniformBuffers;
- maxDescriptorSetUpdateAfterBindUniformBuffersDynamic = src.maxDescriptorSetUpdateAfterBindUniformBuffersDynamic;
- maxDescriptorSetUpdateAfterBindStorageBuffers = src.maxDescriptorSetUpdateAfterBindStorageBuffers;
- maxDescriptorSetUpdateAfterBindStorageBuffersDynamic = src.maxDescriptorSetUpdateAfterBindStorageBuffersDynamic;
- maxDescriptorSetUpdateAfterBindSampledImages = src.maxDescriptorSetUpdateAfterBindSampledImages;
- maxDescriptorSetUpdateAfterBindStorageImages = src.maxDescriptorSetUpdateAfterBindStorageImages;
- maxDescriptorSetUpdateAfterBindInputAttachments = src.maxDescriptorSetUpdateAfterBindInputAttachments;
- pNext = SafePnextCopy(src.pNext);
-}
-
-safe_VkPhysicalDeviceDescriptorIndexingPropertiesEXT& safe_VkPhysicalDeviceDescriptorIndexingPropertiesEXT::operator=(const safe_VkPhysicalDeviceDescriptorIndexingPropertiesEXT& src)
-{
- if (&src == this) return *this;
-
- if (pNext)
- FreePnextChain(pNext);
-
- sType = src.sType;
- maxUpdateAfterBindDescriptorsInAllPools = src.maxUpdateAfterBindDescriptorsInAllPools;
- shaderUniformBufferArrayNonUniformIndexingNative = src.shaderUniformBufferArrayNonUniformIndexingNative;
- shaderSampledImageArrayNonUniformIndexingNative = src.shaderSampledImageArrayNonUniformIndexingNative;
- shaderStorageBufferArrayNonUniformIndexingNative = src.shaderStorageBufferArrayNonUniformIndexingNative;
- shaderStorageImageArrayNonUniformIndexingNative = src.shaderStorageImageArrayNonUniformIndexingNative;
- shaderInputAttachmentArrayNonUniformIndexingNative = src.shaderInputAttachmentArrayNonUniformIndexingNative;
- robustBufferAccessUpdateAfterBind = src.robustBufferAccessUpdateAfterBind;
- quadDivergentImplicitLod = src.quadDivergentImplicitLod;
- maxPerStageDescriptorUpdateAfterBindSamplers = src.maxPerStageDescriptorUpdateAfterBindSamplers;
- maxPerStageDescriptorUpdateAfterBindUniformBuffers = src.maxPerStageDescriptorUpdateAfterBindUniformBuffers;
- maxPerStageDescriptorUpdateAfterBindStorageBuffers = src.maxPerStageDescriptorUpdateAfterBindStorageBuffers;
- maxPerStageDescriptorUpdateAfterBindSampledImages = src.maxPerStageDescriptorUpdateAfterBindSampledImages;
- maxPerStageDescriptorUpdateAfterBindStorageImages = src.maxPerStageDescriptorUpdateAfterBindStorageImages;
- maxPerStageDescriptorUpdateAfterBindInputAttachments = src.maxPerStageDescriptorUpdateAfterBindInputAttachments;
- maxPerStageUpdateAfterBindResources = src.maxPerStageUpdateAfterBindResources;
- maxDescriptorSetUpdateAfterBindSamplers = src.maxDescriptorSetUpdateAfterBindSamplers;
- maxDescriptorSetUpdateAfterBindUniformBuffers = src.maxDescriptorSetUpdateAfterBindUniformBuffers;
- maxDescriptorSetUpdateAfterBindUniformBuffersDynamic = src.maxDescriptorSetUpdateAfterBindUniformBuffersDynamic;
- maxDescriptorSetUpdateAfterBindStorageBuffers = src.maxDescriptorSetUpdateAfterBindStorageBuffers;
- maxDescriptorSetUpdateAfterBindStorageBuffersDynamic = src.maxDescriptorSetUpdateAfterBindStorageBuffersDynamic;
- maxDescriptorSetUpdateAfterBindSampledImages = src.maxDescriptorSetUpdateAfterBindSampledImages;
- maxDescriptorSetUpdateAfterBindStorageImages = src.maxDescriptorSetUpdateAfterBindStorageImages;
- maxDescriptorSetUpdateAfterBindInputAttachments = src.maxDescriptorSetUpdateAfterBindInputAttachments;
- pNext = SafePnextCopy(src.pNext);
-
- return *this;
-}
-
-safe_VkPhysicalDeviceDescriptorIndexingPropertiesEXT::~safe_VkPhysicalDeviceDescriptorIndexingPropertiesEXT()
-{
- if (pNext)
- FreePnextChain(pNext);
-}
-
-void safe_VkPhysicalDeviceDescriptorIndexingPropertiesEXT::initialize(const VkPhysicalDeviceDescriptorIndexingPropertiesEXT* in_struct)
-{
- sType = in_struct->sType;
- maxUpdateAfterBindDescriptorsInAllPools = in_struct->maxUpdateAfterBindDescriptorsInAllPools;
- shaderUniformBufferArrayNonUniformIndexingNative = in_struct->shaderUniformBufferArrayNonUniformIndexingNative;
- shaderSampledImageArrayNonUniformIndexingNative = in_struct->shaderSampledImageArrayNonUniformIndexingNative;
- shaderStorageBufferArrayNonUniformIndexingNative = in_struct->shaderStorageBufferArrayNonUniformIndexingNative;
- shaderStorageImageArrayNonUniformIndexingNative = in_struct->shaderStorageImageArrayNonUniformIndexingNative;
- shaderInputAttachmentArrayNonUniformIndexingNative = in_struct->shaderInputAttachmentArrayNonUniformIndexingNative;
- robustBufferAccessUpdateAfterBind = in_struct->robustBufferAccessUpdateAfterBind;
- quadDivergentImplicitLod = in_struct->quadDivergentImplicitLod;
- maxPerStageDescriptorUpdateAfterBindSamplers = in_struct->maxPerStageDescriptorUpdateAfterBindSamplers;
- maxPerStageDescriptorUpdateAfterBindUniformBuffers = in_struct->maxPerStageDescriptorUpdateAfterBindUniformBuffers;
- maxPerStageDescriptorUpdateAfterBindStorageBuffers = in_struct->maxPerStageDescriptorUpdateAfterBindStorageBuffers;
- maxPerStageDescriptorUpdateAfterBindSampledImages = in_struct->maxPerStageDescriptorUpdateAfterBindSampledImages;
- maxPerStageDescriptorUpdateAfterBindStorageImages = in_struct->maxPerStageDescriptorUpdateAfterBindStorageImages;
- maxPerStageDescriptorUpdateAfterBindInputAttachments = in_struct->maxPerStageDescriptorUpdateAfterBindInputAttachments;
- maxPerStageUpdateAfterBindResources = in_struct->maxPerStageUpdateAfterBindResources;
- maxDescriptorSetUpdateAfterBindSamplers = in_struct->maxDescriptorSetUpdateAfterBindSamplers;
- maxDescriptorSetUpdateAfterBindUniformBuffers = in_struct->maxDescriptorSetUpdateAfterBindUniformBuffers;
- maxDescriptorSetUpdateAfterBindUniformBuffersDynamic = in_struct->maxDescriptorSetUpdateAfterBindUniformBuffersDynamic;
- maxDescriptorSetUpdateAfterBindStorageBuffers = in_struct->maxDescriptorSetUpdateAfterBindStorageBuffers;
- maxDescriptorSetUpdateAfterBindStorageBuffersDynamic = in_struct->maxDescriptorSetUpdateAfterBindStorageBuffersDynamic;
- maxDescriptorSetUpdateAfterBindSampledImages = in_struct->maxDescriptorSetUpdateAfterBindSampledImages;
- maxDescriptorSetUpdateAfterBindStorageImages = in_struct->maxDescriptorSetUpdateAfterBindStorageImages;
- maxDescriptorSetUpdateAfterBindInputAttachments = in_struct->maxDescriptorSetUpdateAfterBindInputAttachments;
- pNext = SafePnextCopy(in_struct->pNext);
-}
-
-void safe_VkPhysicalDeviceDescriptorIndexingPropertiesEXT::initialize(const safe_VkPhysicalDeviceDescriptorIndexingPropertiesEXT* src)
-{
- sType = src->sType;
- maxUpdateAfterBindDescriptorsInAllPools = src->maxUpdateAfterBindDescriptorsInAllPools;
- shaderUniformBufferArrayNonUniformIndexingNative = src->shaderUniformBufferArrayNonUniformIndexingNative;
- shaderSampledImageArrayNonUniformIndexingNative = src->shaderSampledImageArrayNonUniformIndexingNative;
- shaderStorageBufferArrayNonUniformIndexingNative = src->shaderStorageBufferArrayNonUniformIndexingNative;
- shaderStorageImageArrayNonUniformIndexingNative = src->shaderStorageImageArrayNonUniformIndexingNative;
- shaderInputAttachmentArrayNonUniformIndexingNative = src->shaderInputAttachmentArrayNonUniformIndexingNative;
- robustBufferAccessUpdateAfterBind = src->robustBufferAccessUpdateAfterBind;
- quadDivergentImplicitLod = src->quadDivergentImplicitLod;
- maxPerStageDescriptorUpdateAfterBindSamplers = src->maxPerStageDescriptorUpdateAfterBindSamplers;
- maxPerStageDescriptorUpdateAfterBindUniformBuffers = src->maxPerStageDescriptorUpdateAfterBindUniformBuffers;
- maxPerStageDescriptorUpdateAfterBindStorageBuffers = src->maxPerStageDescriptorUpdateAfterBindStorageBuffers;
- maxPerStageDescriptorUpdateAfterBindSampledImages = src->maxPerStageDescriptorUpdateAfterBindSampledImages;
- maxPerStageDescriptorUpdateAfterBindStorageImages = src->maxPerStageDescriptorUpdateAfterBindStorageImages;
- maxPerStageDescriptorUpdateAfterBindInputAttachments = src->maxPerStageDescriptorUpdateAfterBindInputAttachments;
- maxPerStageUpdateAfterBindResources = src->maxPerStageUpdateAfterBindResources;
- maxDescriptorSetUpdateAfterBindSamplers = src->maxDescriptorSetUpdateAfterBindSamplers;
- maxDescriptorSetUpdateAfterBindUniformBuffers = src->maxDescriptorSetUpdateAfterBindUniformBuffers;
- maxDescriptorSetUpdateAfterBindUniformBuffersDynamic = src->maxDescriptorSetUpdateAfterBindUniformBuffersDynamic;
- maxDescriptorSetUpdateAfterBindStorageBuffers = src->maxDescriptorSetUpdateAfterBindStorageBuffers;
- maxDescriptorSetUpdateAfterBindStorageBuffersDynamic = src->maxDescriptorSetUpdateAfterBindStorageBuffersDynamic;
- maxDescriptorSetUpdateAfterBindSampledImages = src->maxDescriptorSetUpdateAfterBindSampledImages;
- maxDescriptorSetUpdateAfterBindStorageImages = src->maxDescriptorSetUpdateAfterBindStorageImages;
- maxDescriptorSetUpdateAfterBindInputAttachments = src->maxDescriptorSetUpdateAfterBindInputAttachments;
- pNext = SafePnextCopy(src->pNext);
-}
-
-safe_VkDescriptorSetVariableDescriptorCountAllocateInfoEXT::safe_VkDescriptorSetVariableDescriptorCountAllocateInfoEXT(const VkDescriptorSetVariableDescriptorCountAllocateInfoEXT* in_struct) :
- sType(in_struct->sType),
- descriptorSetCount(in_struct->descriptorSetCount),
- pDescriptorCounts(nullptr)
-{
- pNext = SafePnextCopy(in_struct->pNext);
- if (in_struct->pDescriptorCounts) {
- pDescriptorCounts = new uint32_t[in_struct->descriptorSetCount];
- memcpy ((void *)pDescriptorCounts, (void *)in_struct->pDescriptorCounts, sizeof(uint32_t)*in_struct->descriptorSetCount);
- }
-}
-
-safe_VkDescriptorSetVariableDescriptorCountAllocateInfoEXT::safe_VkDescriptorSetVariableDescriptorCountAllocateInfoEXT() :
- pNext(nullptr),
- pDescriptorCounts(nullptr)
-{}
-
-safe_VkDescriptorSetVariableDescriptorCountAllocateInfoEXT::safe_VkDescriptorSetVariableDescriptorCountAllocateInfoEXT(const safe_VkDescriptorSetVariableDescriptorCountAllocateInfoEXT& src)
-{
- sType = src.sType;
- descriptorSetCount = src.descriptorSetCount;
- pDescriptorCounts = nullptr;
- pNext = SafePnextCopy(src.pNext);
- if (src.pDescriptorCounts) {
- pDescriptorCounts = new uint32_t[src.descriptorSetCount];
- memcpy ((void *)pDescriptorCounts, (void *)src.pDescriptorCounts, sizeof(uint32_t)*src.descriptorSetCount);
- }
-}
-
-safe_VkDescriptorSetVariableDescriptorCountAllocateInfoEXT& safe_VkDescriptorSetVariableDescriptorCountAllocateInfoEXT::operator=(const safe_VkDescriptorSetVariableDescriptorCountAllocateInfoEXT& src)
-{
- if (&src == this) return *this;
-
- if (pDescriptorCounts)
- delete[] pDescriptorCounts;
- if (pNext)
- FreePnextChain(pNext);
-
- sType = src.sType;
- descriptorSetCount = src.descriptorSetCount;
- pDescriptorCounts = nullptr;
- pNext = SafePnextCopy(src.pNext);
- if (src.pDescriptorCounts) {
- pDescriptorCounts = new uint32_t[src.descriptorSetCount];
- memcpy ((void *)pDescriptorCounts, (void *)src.pDescriptorCounts, sizeof(uint32_t)*src.descriptorSetCount);
- }
-
- return *this;
-}
-
-safe_VkDescriptorSetVariableDescriptorCountAllocateInfoEXT::~safe_VkDescriptorSetVariableDescriptorCountAllocateInfoEXT()
-{
- if (pDescriptorCounts)
- delete[] pDescriptorCounts;
- if (pNext)
- FreePnextChain(pNext);
-}
-
-void safe_VkDescriptorSetVariableDescriptorCountAllocateInfoEXT::initialize(const VkDescriptorSetVariableDescriptorCountAllocateInfoEXT* in_struct)
-{
- sType = in_struct->sType;
- descriptorSetCount = in_struct->descriptorSetCount;
- pDescriptorCounts = nullptr;
- pNext = SafePnextCopy(in_struct->pNext);
- if (in_struct->pDescriptorCounts) {
- pDescriptorCounts = new uint32_t[in_struct->descriptorSetCount];
- memcpy ((void *)pDescriptorCounts, (void *)in_struct->pDescriptorCounts, sizeof(uint32_t)*in_struct->descriptorSetCount);
- }
-}
-
-void safe_VkDescriptorSetVariableDescriptorCountAllocateInfoEXT::initialize(const safe_VkDescriptorSetVariableDescriptorCountAllocateInfoEXT* src)
-{
- sType = src->sType;
- descriptorSetCount = src->descriptorSetCount;
- pDescriptorCounts = nullptr;
- pNext = SafePnextCopy(src->pNext);
- if (src->pDescriptorCounts) {
- pDescriptorCounts = new uint32_t[src->descriptorSetCount];
- memcpy ((void *)pDescriptorCounts, (void *)src->pDescriptorCounts, sizeof(uint32_t)*src->descriptorSetCount);
- }
-}
-
-safe_VkDescriptorSetVariableDescriptorCountLayoutSupportEXT::safe_VkDescriptorSetVariableDescriptorCountLayoutSupportEXT(const VkDescriptorSetVariableDescriptorCountLayoutSupportEXT* in_struct) :
- sType(in_struct->sType),
- maxVariableDescriptorCount(in_struct->maxVariableDescriptorCount)
-{
- pNext = SafePnextCopy(in_struct->pNext);
-}
-
-safe_VkDescriptorSetVariableDescriptorCountLayoutSupportEXT::safe_VkDescriptorSetVariableDescriptorCountLayoutSupportEXT() :
- pNext(nullptr)
-{}
-
-safe_VkDescriptorSetVariableDescriptorCountLayoutSupportEXT::safe_VkDescriptorSetVariableDescriptorCountLayoutSupportEXT(const safe_VkDescriptorSetVariableDescriptorCountLayoutSupportEXT& src)
-{
- sType = src.sType;
- maxVariableDescriptorCount = src.maxVariableDescriptorCount;
- pNext = SafePnextCopy(src.pNext);
-}
-
-safe_VkDescriptorSetVariableDescriptorCountLayoutSupportEXT& safe_VkDescriptorSetVariableDescriptorCountLayoutSupportEXT::operator=(const safe_VkDescriptorSetVariableDescriptorCountLayoutSupportEXT& src)
-{
- if (&src == this) return *this;
-
- if (pNext)
- FreePnextChain(pNext);
-
- sType = src.sType;
- maxVariableDescriptorCount = src.maxVariableDescriptorCount;
- pNext = SafePnextCopy(src.pNext);
-
- return *this;
-}
-
-safe_VkDescriptorSetVariableDescriptorCountLayoutSupportEXT::~safe_VkDescriptorSetVariableDescriptorCountLayoutSupportEXT()
-{
- if (pNext)
- FreePnextChain(pNext);
-}
-
-void safe_VkDescriptorSetVariableDescriptorCountLayoutSupportEXT::initialize(const VkDescriptorSetVariableDescriptorCountLayoutSupportEXT* in_struct)
-{
- sType = in_struct->sType;
- maxVariableDescriptorCount = in_struct->maxVariableDescriptorCount;
- pNext = SafePnextCopy(in_struct->pNext);
-}
-
-void safe_VkDescriptorSetVariableDescriptorCountLayoutSupportEXT::initialize(const safe_VkDescriptorSetVariableDescriptorCountLayoutSupportEXT* src)
-{
- sType = src->sType;
- maxVariableDescriptorCount = src->maxVariableDescriptorCount;
- pNext = SafePnextCopy(src->pNext);
-}
-
-safe_VkShadingRatePaletteNV::safe_VkShadingRatePaletteNV(const VkShadingRatePaletteNV* in_struct) :
- shadingRatePaletteEntryCount(in_struct->shadingRatePaletteEntryCount),
- pShadingRatePaletteEntries(nullptr)
-{
- if (in_struct->pShadingRatePaletteEntries) {
- pShadingRatePaletteEntries = new VkShadingRatePaletteEntryNV[in_struct->shadingRatePaletteEntryCount];
- memcpy ((void *)pShadingRatePaletteEntries, (void *)in_struct->pShadingRatePaletteEntries, sizeof(VkShadingRatePaletteEntryNV)*in_struct->shadingRatePaletteEntryCount);
- }
-}
-
-safe_VkShadingRatePaletteNV::safe_VkShadingRatePaletteNV() :
- pShadingRatePaletteEntries(nullptr)
-{}
-
-safe_VkShadingRatePaletteNV::safe_VkShadingRatePaletteNV(const safe_VkShadingRatePaletteNV& src)
-{
- shadingRatePaletteEntryCount = src.shadingRatePaletteEntryCount;
- pShadingRatePaletteEntries = nullptr;
- if (src.pShadingRatePaletteEntries) {
- pShadingRatePaletteEntries = new VkShadingRatePaletteEntryNV[src.shadingRatePaletteEntryCount];
- memcpy ((void *)pShadingRatePaletteEntries, (void *)src.pShadingRatePaletteEntries, sizeof(VkShadingRatePaletteEntryNV)*src.shadingRatePaletteEntryCount);
- }
-}
-
-safe_VkShadingRatePaletteNV& safe_VkShadingRatePaletteNV::operator=(const safe_VkShadingRatePaletteNV& src)
-{
- if (&src == this) return *this;
-
- if (pShadingRatePaletteEntries)
- delete[] pShadingRatePaletteEntries;
-
- shadingRatePaletteEntryCount = src.shadingRatePaletteEntryCount;
- pShadingRatePaletteEntries = nullptr;
- if (src.pShadingRatePaletteEntries) {
- pShadingRatePaletteEntries = new VkShadingRatePaletteEntryNV[src.shadingRatePaletteEntryCount];
- memcpy ((void *)pShadingRatePaletteEntries, (void *)src.pShadingRatePaletteEntries, sizeof(VkShadingRatePaletteEntryNV)*src.shadingRatePaletteEntryCount);
- }
-
- return *this;
-}
-
-safe_VkShadingRatePaletteNV::~safe_VkShadingRatePaletteNV()
-{
- if (pShadingRatePaletteEntries)
- delete[] pShadingRatePaletteEntries;
-}
-
-void safe_VkShadingRatePaletteNV::initialize(const VkShadingRatePaletteNV* in_struct)
-{
- shadingRatePaletteEntryCount = in_struct->shadingRatePaletteEntryCount;
- pShadingRatePaletteEntries = nullptr;
- if (in_struct->pShadingRatePaletteEntries) {
- pShadingRatePaletteEntries = new VkShadingRatePaletteEntryNV[in_struct->shadingRatePaletteEntryCount];
- memcpy ((void *)pShadingRatePaletteEntries, (void *)in_struct->pShadingRatePaletteEntries, sizeof(VkShadingRatePaletteEntryNV)*in_struct->shadingRatePaletteEntryCount);
- }
-}
-
-void safe_VkShadingRatePaletteNV::initialize(const safe_VkShadingRatePaletteNV* src)
-{
- shadingRatePaletteEntryCount = src->shadingRatePaletteEntryCount;
- pShadingRatePaletteEntries = nullptr;
- if (src->pShadingRatePaletteEntries) {
- pShadingRatePaletteEntries = new VkShadingRatePaletteEntryNV[src->shadingRatePaletteEntryCount];
- memcpy ((void *)pShadingRatePaletteEntries, (void *)src->pShadingRatePaletteEntries, sizeof(VkShadingRatePaletteEntryNV)*src->shadingRatePaletteEntryCount);
- }
-}
-
-safe_VkPipelineViewportShadingRateImageStateCreateInfoNV::safe_VkPipelineViewportShadingRateImageStateCreateInfoNV(const VkPipelineViewportShadingRateImageStateCreateInfoNV* in_struct) :
- sType(in_struct->sType),
- shadingRateImageEnable(in_struct->shadingRateImageEnable),
- viewportCount(in_struct->viewportCount),
- pShadingRatePalettes(nullptr)
-{
- pNext = SafePnextCopy(in_struct->pNext);
- if (viewportCount && in_struct->pShadingRatePalettes) {
- pShadingRatePalettes = new safe_VkShadingRatePaletteNV[viewportCount];
- for (uint32_t i = 0; i < viewportCount; ++i) {
- pShadingRatePalettes[i].initialize(&in_struct->pShadingRatePalettes[i]);
- }
- }
-}
-
-safe_VkPipelineViewportShadingRateImageStateCreateInfoNV::safe_VkPipelineViewportShadingRateImageStateCreateInfoNV() :
- pNext(nullptr),
- pShadingRatePalettes(nullptr)
-{}
-
-safe_VkPipelineViewportShadingRateImageStateCreateInfoNV::safe_VkPipelineViewportShadingRateImageStateCreateInfoNV(const safe_VkPipelineViewportShadingRateImageStateCreateInfoNV& src)
-{
- sType = src.sType;
- shadingRateImageEnable = src.shadingRateImageEnable;
- viewportCount = src.viewportCount;
- pShadingRatePalettes = nullptr;
- pNext = SafePnextCopy(src.pNext);
- if (viewportCount && src.pShadingRatePalettes) {
- pShadingRatePalettes = new safe_VkShadingRatePaletteNV[viewportCount];
- for (uint32_t i = 0; i < viewportCount; ++i) {
- pShadingRatePalettes[i].initialize(&src.pShadingRatePalettes[i]);
- }
- }
-}
-
-safe_VkPipelineViewportShadingRateImageStateCreateInfoNV& safe_VkPipelineViewportShadingRateImageStateCreateInfoNV::operator=(const safe_VkPipelineViewportShadingRateImageStateCreateInfoNV& src)
-{
- if (&src == this) return *this;
-
- if (pShadingRatePalettes)
- delete[] pShadingRatePalettes;
- if (pNext)
- FreePnextChain(pNext);
-
- sType = src.sType;
- shadingRateImageEnable = src.shadingRateImageEnable;
- viewportCount = src.viewportCount;
- pShadingRatePalettes = nullptr;
- pNext = SafePnextCopy(src.pNext);
- if (viewportCount && src.pShadingRatePalettes) {
- pShadingRatePalettes = new safe_VkShadingRatePaletteNV[viewportCount];
- for (uint32_t i = 0; i < viewportCount; ++i) {
- pShadingRatePalettes[i].initialize(&src.pShadingRatePalettes[i]);
- }
- }
-
- return *this;
-}
-
-safe_VkPipelineViewportShadingRateImageStateCreateInfoNV::~safe_VkPipelineViewportShadingRateImageStateCreateInfoNV()
-{
- if (pShadingRatePalettes)
- delete[] pShadingRatePalettes;
- if (pNext)
- FreePnextChain(pNext);
-}
-
-void safe_VkPipelineViewportShadingRateImageStateCreateInfoNV::initialize(const VkPipelineViewportShadingRateImageStateCreateInfoNV* in_struct)
-{
- sType = in_struct->sType;
- shadingRateImageEnable = in_struct->shadingRateImageEnable;
- viewportCount = in_struct->viewportCount;
- pShadingRatePalettes = nullptr;
- pNext = SafePnextCopy(in_struct->pNext);
- if (viewportCount && in_struct->pShadingRatePalettes) {
- pShadingRatePalettes = new safe_VkShadingRatePaletteNV[viewportCount];
- for (uint32_t i = 0; i < viewportCount; ++i) {
- pShadingRatePalettes[i].initialize(&in_struct->pShadingRatePalettes[i]);
- }
- }
-}
-
-void safe_VkPipelineViewportShadingRateImageStateCreateInfoNV::initialize(const safe_VkPipelineViewportShadingRateImageStateCreateInfoNV* src)
-{
- sType = src->sType;
- shadingRateImageEnable = src->shadingRateImageEnable;
- viewportCount = src->viewportCount;
- pShadingRatePalettes = nullptr;
- pNext = SafePnextCopy(src->pNext);
- if (viewportCount && src->pShadingRatePalettes) {
- pShadingRatePalettes = new safe_VkShadingRatePaletteNV[viewportCount];
- for (uint32_t i = 0; i < viewportCount; ++i) {
- pShadingRatePalettes[i].initialize(&src->pShadingRatePalettes[i]);
- }
- }
-}
-
-safe_VkPhysicalDeviceShadingRateImageFeaturesNV::safe_VkPhysicalDeviceShadingRateImageFeaturesNV(const VkPhysicalDeviceShadingRateImageFeaturesNV* in_struct) :
- sType(in_struct->sType),
- shadingRateImage(in_struct->shadingRateImage),
- shadingRateCoarseSampleOrder(in_struct->shadingRateCoarseSampleOrder)
-{
- pNext = SafePnextCopy(in_struct->pNext);
-}
-
-safe_VkPhysicalDeviceShadingRateImageFeaturesNV::safe_VkPhysicalDeviceShadingRateImageFeaturesNV() :
- pNext(nullptr)
-{}
-
-safe_VkPhysicalDeviceShadingRateImageFeaturesNV::safe_VkPhysicalDeviceShadingRateImageFeaturesNV(const safe_VkPhysicalDeviceShadingRateImageFeaturesNV& src)
-{
- sType = src.sType;
- shadingRateImage = src.shadingRateImage;
- shadingRateCoarseSampleOrder = src.shadingRateCoarseSampleOrder;
- pNext = SafePnextCopy(src.pNext);
-}
-
-safe_VkPhysicalDeviceShadingRateImageFeaturesNV& safe_VkPhysicalDeviceShadingRateImageFeaturesNV::operator=(const safe_VkPhysicalDeviceShadingRateImageFeaturesNV& src)
-{
- if (&src == this) return *this;
-
- if (pNext)
- FreePnextChain(pNext);
-
- sType = src.sType;
- shadingRateImage = src.shadingRateImage;
- shadingRateCoarseSampleOrder = src.shadingRateCoarseSampleOrder;
- pNext = SafePnextCopy(src.pNext);
-
- return *this;
-}
-
-safe_VkPhysicalDeviceShadingRateImageFeaturesNV::~safe_VkPhysicalDeviceShadingRateImageFeaturesNV()
-{
- if (pNext)
- FreePnextChain(pNext);
-}
-
-void safe_VkPhysicalDeviceShadingRateImageFeaturesNV::initialize(const VkPhysicalDeviceShadingRateImageFeaturesNV* in_struct)
-{
- sType = in_struct->sType;
- shadingRateImage = in_struct->shadingRateImage;
- shadingRateCoarseSampleOrder = in_struct->shadingRateCoarseSampleOrder;
- pNext = SafePnextCopy(in_struct->pNext);
-}
-
-void safe_VkPhysicalDeviceShadingRateImageFeaturesNV::initialize(const safe_VkPhysicalDeviceShadingRateImageFeaturesNV* src)
-{
- sType = src->sType;
- shadingRateImage = src->shadingRateImage;
- shadingRateCoarseSampleOrder = src->shadingRateCoarseSampleOrder;
- pNext = SafePnextCopy(src->pNext);
-}
-
-safe_VkPhysicalDeviceShadingRateImagePropertiesNV::safe_VkPhysicalDeviceShadingRateImagePropertiesNV(const VkPhysicalDeviceShadingRateImagePropertiesNV* in_struct) :
- sType(in_struct->sType),
- shadingRateTexelSize(in_struct->shadingRateTexelSize),
- shadingRatePaletteSize(in_struct->shadingRatePaletteSize),
- shadingRateMaxCoarseSamples(in_struct->shadingRateMaxCoarseSamples)
-{
- pNext = SafePnextCopy(in_struct->pNext);
-}
-
-safe_VkPhysicalDeviceShadingRateImagePropertiesNV::safe_VkPhysicalDeviceShadingRateImagePropertiesNV() :
- pNext(nullptr)
-{}
-
-safe_VkPhysicalDeviceShadingRateImagePropertiesNV::safe_VkPhysicalDeviceShadingRateImagePropertiesNV(const safe_VkPhysicalDeviceShadingRateImagePropertiesNV& src)
-{
- sType = src.sType;
- shadingRateTexelSize = src.shadingRateTexelSize;
- shadingRatePaletteSize = src.shadingRatePaletteSize;
- shadingRateMaxCoarseSamples = src.shadingRateMaxCoarseSamples;
- pNext = SafePnextCopy(src.pNext);
-}
-
-safe_VkPhysicalDeviceShadingRateImagePropertiesNV& safe_VkPhysicalDeviceShadingRateImagePropertiesNV::operator=(const safe_VkPhysicalDeviceShadingRateImagePropertiesNV& src)
-{
- if (&src == this) return *this;
-
- if (pNext)
- FreePnextChain(pNext);
-
- sType = src.sType;
- shadingRateTexelSize = src.shadingRateTexelSize;
- shadingRatePaletteSize = src.shadingRatePaletteSize;
- shadingRateMaxCoarseSamples = src.shadingRateMaxCoarseSamples;
- pNext = SafePnextCopy(src.pNext);
-
- return *this;
-}
-
-safe_VkPhysicalDeviceShadingRateImagePropertiesNV::~safe_VkPhysicalDeviceShadingRateImagePropertiesNV()
-{
- if (pNext)
- FreePnextChain(pNext);
-}
-
-void safe_VkPhysicalDeviceShadingRateImagePropertiesNV::initialize(const VkPhysicalDeviceShadingRateImagePropertiesNV* in_struct)
-{
- sType = in_struct->sType;
- shadingRateTexelSize = in_struct->shadingRateTexelSize;
- shadingRatePaletteSize = in_struct->shadingRatePaletteSize;
- shadingRateMaxCoarseSamples = in_struct->shadingRateMaxCoarseSamples;
- pNext = SafePnextCopy(in_struct->pNext);
-}
-
-void safe_VkPhysicalDeviceShadingRateImagePropertiesNV::initialize(const safe_VkPhysicalDeviceShadingRateImagePropertiesNV* src)
-{
- sType = src->sType;
- shadingRateTexelSize = src->shadingRateTexelSize;
- shadingRatePaletteSize = src->shadingRatePaletteSize;
- shadingRateMaxCoarseSamples = src->shadingRateMaxCoarseSamples;
- pNext = SafePnextCopy(src->pNext);
-}
-
-safe_VkCoarseSampleOrderCustomNV::safe_VkCoarseSampleOrderCustomNV(const VkCoarseSampleOrderCustomNV* in_struct) :
- shadingRate(in_struct->shadingRate),
- sampleCount(in_struct->sampleCount),
- sampleLocationCount(in_struct->sampleLocationCount),
- pSampleLocations(nullptr)
-{
- if (in_struct->pSampleLocations) {
- pSampleLocations = new VkCoarseSampleLocationNV[in_struct->sampleLocationCount];
- memcpy ((void *)pSampleLocations, (void *)in_struct->pSampleLocations, sizeof(VkCoarseSampleLocationNV)*in_struct->sampleLocationCount);
- }
-}
-
-safe_VkCoarseSampleOrderCustomNV::safe_VkCoarseSampleOrderCustomNV() :
- pSampleLocations(nullptr)
-{}
-
-safe_VkCoarseSampleOrderCustomNV::safe_VkCoarseSampleOrderCustomNV(const safe_VkCoarseSampleOrderCustomNV& src)
-{
- shadingRate = src.shadingRate;
- sampleCount = src.sampleCount;
- sampleLocationCount = src.sampleLocationCount;
- pSampleLocations = nullptr;
- if (src.pSampleLocations) {
- pSampleLocations = new VkCoarseSampleLocationNV[src.sampleLocationCount];
- memcpy ((void *)pSampleLocations, (void *)src.pSampleLocations, sizeof(VkCoarseSampleLocationNV)*src.sampleLocationCount);
- }
-}
-
-safe_VkCoarseSampleOrderCustomNV& safe_VkCoarseSampleOrderCustomNV::operator=(const safe_VkCoarseSampleOrderCustomNV& src)
-{
- if (&src == this) return *this;
-
- if (pSampleLocations)
- delete[] pSampleLocations;
-
- shadingRate = src.shadingRate;
- sampleCount = src.sampleCount;
- sampleLocationCount = src.sampleLocationCount;
- pSampleLocations = nullptr;
- if (src.pSampleLocations) {
- pSampleLocations = new VkCoarseSampleLocationNV[src.sampleLocationCount];
- memcpy ((void *)pSampleLocations, (void *)src.pSampleLocations, sizeof(VkCoarseSampleLocationNV)*src.sampleLocationCount);
- }
-
- return *this;
-}
-
-safe_VkCoarseSampleOrderCustomNV::~safe_VkCoarseSampleOrderCustomNV()
-{
- if (pSampleLocations)
- delete[] pSampleLocations;
-}
-
-void safe_VkCoarseSampleOrderCustomNV::initialize(const VkCoarseSampleOrderCustomNV* in_struct)
-{
- shadingRate = in_struct->shadingRate;
- sampleCount = in_struct->sampleCount;
- sampleLocationCount = in_struct->sampleLocationCount;
- pSampleLocations = nullptr;
- if (in_struct->pSampleLocations) {
- pSampleLocations = new VkCoarseSampleLocationNV[in_struct->sampleLocationCount];
- memcpy ((void *)pSampleLocations, (void *)in_struct->pSampleLocations, sizeof(VkCoarseSampleLocationNV)*in_struct->sampleLocationCount);
- }
-}
-
-void safe_VkCoarseSampleOrderCustomNV::initialize(const safe_VkCoarseSampleOrderCustomNV* src)
-{
- shadingRate = src->shadingRate;
- sampleCount = src->sampleCount;
- sampleLocationCount = src->sampleLocationCount;
- pSampleLocations = nullptr;
- if (src->pSampleLocations) {
- pSampleLocations = new VkCoarseSampleLocationNV[src->sampleLocationCount];
- memcpy ((void *)pSampleLocations, (void *)src->pSampleLocations, sizeof(VkCoarseSampleLocationNV)*src->sampleLocationCount);
- }
-}
-
-safe_VkPipelineViewportCoarseSampleOrderStateCreateInfoNV::safe_VkPipelineViewportCoarseSampleOrderStateCreateInfoNV(const VkPipelineViewportCoarseSampleOrderStateCreateInfoNV* in_struct) :
- sType(in_struct->sType),
- sampleOrderType(in_struct->sampleOrderType),
- customSampleOrderCount(in_struct->customSampleOrderCount),
- pCustomSampleOrders(nullptr)
-{
- pNext = SafePnextCopy(in_struct->pNext);
- if (customSampleOrderCount && in_struct->pCustomSampleOrders) {
- pCustomSampleOrders = new safe_VkCoarseSampleOrderCustomNV[customSampleOrderCount];
- for (uint32_t i = 0; i < customSampleOrderCount; ++i) {
- pCustomSampleOrders[i].initialize(&in_struct->pCustomSampleOrders[i]);
- }
- }
-}
-
-safe_VkPipelineViewportCoarseSampleOrderStateCreateInfoNV::safe_VkPipelineViewportCoarseSampleOrderStateCreateInfoNV() :
- pNext(nullptr),
- pCustomSampleOrders(nullptr)
-{}
-
-safe_VkPipelineViewportCoarseSampleOrderStateCreateInfoNV::safe_VkPipelineViewportCoarseSampleOrderStateCreateInfoNV(const safe_VkPipelineViewportCoarseSampleOrderStateCreateInfoNV& src)
-{
- sType = src.sType;
- sampleOrderType = src.sampleOrderType;
- customSampleOrderCount = src.customSampleOrderCount;
- pCustomSampleOrders = nullptr;
- pNext = SafePnextCopy(src.pNext);
- if (customSampleOrderCount && src.pCustomSampleOrders) {
- pCustomSampleOrders = new safe_VkCoarseSampleOrderCustomNV[customSampleOrderCount];
- for (uint32_t i = 0; i < customSampleOrderCount; ++i) {
- pCustomSampleOrders[i].initialize(&src.pCustomSampleOrders[i]);
- }
- }
-}
-
-safe_VkPipelineViewportCoarseSampleOrderStateCreateInfoNV& safe_VkPipelineViewportCoarseSampleOrderStateCreateInfoNV::operator=(const safe_VkPipelineViewportCoarseSampleOrderStateCreateInfoNV& src)
-{
- if (&src == this) return *this;
-
- if (pCustomSampleOrders)
- delete[] pCustomSampleOrders;
- if (pNext)
- FreePnextChain(pNext);
-
- sType = src.sType;
- sampleOrderType = src.sampleOrderType;
- customSampleOrderCount = src.customSampleOrderCount;
- pCustomSampleOrders = nullptr;
- pNext = SafePnextCopy(src.pNext);
- if (customSampleOrderCount && src.pCustomSampleOrders) {
- pCustomSampleOrders = new safe_VkCoarseSampleOrderCustomNV[customSampleOrderCount];
- for (uint32_t i = 0; i < customSampleOrderCount; ++i) {
- pCustomSampleOrders[i].initialize(&src.pCustomSampleOrders[i]);
- }
- }
-
- return *this;
-}
-
-safe_VkPipelineViewportCoarseSampleOrderStateCreateInfoNV::~safe_VkPipelineViewportCoarseSampleOrderStateCreateInfoNV()
-{
- if (pCustomSampleOrders)
- delete[] pCustomSampleOrders;
- if (pNext)
- FreePnextChain(pNext);
-}
-
-void safe_VkPipelineViewportCoarseSampleOrderStateCreateInfoNV::initialize(const VkPipelineViewportCoarseSampleOrderStateCreateInfoNV* in_struct)
-{
- sType = in_struct->sType;
- sampleOrderType = in_struct->sampleOrderType;
- customSampleOrderCount = in_struct->customSampleOrderCount;
- pCustomSampleOrders = nullptr;
- pNext = SafePnextCopy(in_struct->pNext);
- if (customSampleOrderCount && in_struct->pCustomSampleOrders) {
- pCustomSampleOrders = new safe_VkCoarseSampleOrderCustomNV[customSampleOrderCount];
- for (uint32_t i = 0; i < customSampleOrderCount; ++i) {
- pCustomSampleOrders[i].initialize(&in_struct->pCustomSampleOrders[i]);
- }
- }
-}
-
-void safe_VkPipelineViewportCoarseSampleOrderStateCreateInfoNV::initialize(const safe_VkPipelineViewportCoarseSampleOrderStateCreateInfoNV* src)
-{
- sType = src->sType;
- sampleOrderType = src->sampleOrderType;
- customSampleOrderCount = src->customSampleOrderCount;
- pCustomSampleOrders = nullptr;
- pNext = SafePnextCopy(src->pNext);
- if (customSampleOrderCount && src->pCustomSampleOrders) {
- pCustomSampleOrders = new safe_VkCoarseSampleOrderCustomNV[customSampleOrderCount];
- for (uint32_t i = 0; i < customSampleOrderCount; ++i) {
- pCustomSampleOrders[i].initialize(&src->pCustomSampleOrders[i]);
- }
- }
-}
-
-safe_VkRayTracingShaderGroupCreateInfoNV::safe_VkRayTracingShaderGroupCreateInfoNV(const VkRayTracingShaderGroupCreateInfoNV* in_struct) :
- sType(in_struct->sType),
- type(in_struct->type),
- generalShader(in_struct->generalShader),
- closestHitShader(in_struct->closestHitShader),
- anyHitShader(in_struct->anyHitShader),
- intersectionShader(in_struct->intersectionShader)
-{
- pNext = SafePnextCopy(in_struct->pNext);
-}
-
-safe_VkRayTracingShaderGroupCreateInfoNV::safe_VkRayTracingShaderGroupCreateInfoNV() :
- pNext(nullptr)
-{}
-
-safe_VkRayTracingShaderGroupCreateInfoNV::safe_VkRayTracingShaderGroupCreateInfoNV(const safe_VkRayTracingShaderGroupCreateInfoNV& src)
-{
- sType = src.sType;
- type = src.type;
- generalShader = src.generalShader;
- closestHitShader = src.closestHitShader;
- anyHitShader = src.anyHitShader;
- intersectionShader = src.intersectionShader;
- pNext = SafePnextCopy(src.pNext);
-}
-
-safe_VkRayTracingShaderGroupCreateInfoNV& safe_VkRayTracingShaderGroupCreateInfoNV::operator=(const safe_VkRayTracingShaderGroupCreateInfoNV& src)
-{
- if (&src == this) return *this;
-
- if (pNext)
- FreePnextChain(pNext);
-
- sType = src.sType;
- type = src.type;
- generalShader = src.generalShader;
- closestHitShader = src.closestHitShader;
- anyHitShader = src.anyHitShader;
- intersectionShader = src.intersectionShader;
- pNext = SafePnextCopy(src.pNext);
-
- return *this;
-}
-
-safe_VkRayTracingShaderGroupCreateInfoNV::~safe_VkRayTracingShaderGroupCreateInfoNV()
-{
- if (pNext)
- FreePnextChain(pNext);
-}
-
-void safe_VkRayTracingShaderGroupCreateInfoNV::initialize(const VkRayTracingShaderGroupCreateInfoNV* in_struct)
-{
- sType = in_struct->sType;
- type = in_struct->type;
- generalShader = in_struct->generalShader;
- closestHitShader = in_struct->closestHitShader;
- anyHitShader = in_struct->anyHitShader;
- intersectionShader = in_struct->intersectionShader;
- pNext = SafePnextCopy(in_struct->pNext);
-}
-
-void safe_VkRayTracingShaderGroupCreateInfoNV::initialize(const safe_VkRayTracingShaderGroupCreateInfoNV* src)
-{
- sType = src->sType;
- type = src->type;
- generalShader = src->generalShader;
- closestHitShader = src->closestHitShader;
- anyHitShader = src->anyHitShader;
- intersectionShader = src->intersectionShader;
- pNext = SafePnextCopy(src->pNext);
-}
-
-safe_VkRayTracingPipelineCreateInfoNV::safe_VkRayTracingPipelineCreateInfoNV(const VkRayTracingPipelineCreateInfoNV* in_struct) :
- sType(in_struct->sType),
- flags(in_struct->flags),
- stageCount(in_struct->stageCount),
- pStages(nullptr),
- groupCount(in_struct->groupCount),
- pGroups(nullptr),
- maxRecursionDepth(in_struct->maxRecursionDepth),
- layout(in_struct->layout),
- basePipelineHandle(in_struct->basePipelineHandle),
- basePipelineIndex(in_struct->basePipelineIndex)
-{
- pNext = SafePnextCopy(in_struct->pNext);
- if (stageCount && in_struct->pStages) {
- pStages = new safe_VkPipelineShaderStageCreateInfo[stageCount];
- for (uint32_t i = 0; i < stageCount; ++i) {
- pStages[i].initialize(&in_struct->pStages[i]);
- }
- }
- if (groupCount && in_struct->pGroups) {
- pGroups = new safe_VkRayTracingShaderGroupCreateInfoNV[groupCount];
- for (uint32_t i = 0; i < groupCount; ++i) {
- pGroups[i].initialize(&in_struct->pGroups[i]);
- }
- }
-}
-
-safe_VkRayTracingPipelineCreateInfoNV::safe_VkRayTracingPipelineCreateInfoNV() :
- pNext(nullptr),
- pStages(nullptr),
- pGroups(nullptr)
-{}
-
-safe_VkRayTracingPipelineCreateInfoNV::safe_VkRayTracingPipelineCreateInfoNV(const safe_VkRayTracingPipelineCreateInfoNV& src)
-{
- sType = src.sType;
- flags = src.flags;
- stageCount = src.stageCount;
- pStages = nullptr;
- groupCount = src.groupCount;
- pGroups = nullptr;
- maxRecursionDepth = src.maxRecursionDepth;
- layout = src.layout;
- basePipelineHandle = src.basePipelineHandle;
- basePipelineIndex = src.basePipelineIndex;
- pNext = SafePnextCopy(src.pNext);
- if (stageCount && src.pStages) {
- pStages = new safe_VkPipelineShaderStageCreateInfo[stageCount];
- for (uint32_t i = 0; i < stageCount; ++i) {
- pStages[i].initialize(&src.pStages[i]);
- }
- }
- if (groupCount && src.pGroups) {
- pGroups = new safe_VkRayTracingShaderGroupCreateInfoNV[groupCount];
- for (uint32_t i = 0; i < groupCount; ++i) {
- pGroups[i].initialize(&src.pGroups[i]);
- }
- }
-}
-
-safe_VkRayTracingPipelineCreateInfoNV& safe_VkRayTracingPipelineCreateInfoNV::operator=(const safe_VkRayTracingPipelineCreateInfoNV& src)
-{
- if (&src == this) return *this;
-
- if (pStages)
- delete[] pStages;
- if (pGroups)
- delete[] pGroups;
- if (pNext)
- FreePnextChain(pNext);
-
- sType = src.sType;
- flags = src.flags;
- stageCount = src.stageCount;
- pStages = nullptr;
- groupCount = src.groupCount;
- pGroups = nullptr;
- maxRecursionDepth = src.maxRecursionDepth;
- layout = src.layout;
- basePipelineHandle = src.basePipelineHandle;
- basePipelineIndex = src.basePipelineIndex;
- pNext = SafePnextCopy(src.pNext);
- if (stageCount && src.pStages) {
- pStages = new safe_VkPipelineShaderStageCreateInfo[stageCount];
- for (uint32_t i = 0; i < stageCount; ++i) {
- pStages[i].initialize(&src.pStages[i]);
- }
- }
- if (groupCount && src.pGroups) {
- pGroups = new safe_VkRayTracingShaderGroupCreateInfoNV[groupCount];
- for (uint32_t i = 0; i < groupCount; ++i) {
- pGroups[i].initialize(&src.pGroups[i]);
- }
- }
-
- return *this;
-}
-
-safe_VkRayTracingPipelineCreateInfoNV::~safe_VkRayTracingPipelineCreateInfoNV()
-{
- if (pStages)
- delete[] pStages;
- if (pGroups)
- delete[] pGroups;
- if (pNext)
- FreePnextChain(pNext);
-}
-
-void safe_VkRayTracingPipelineCreateInfoNV::initialize(const VkRayTracingPipelineCreateInfoNV* in_struct)
-{
- sType = in_struct->sType;
- flags = in_struct->flags;
- stageCount = in_struct->stageCount;
- pStages = nullptr;
- groupCount = in_struct->groupCount;
- pGroups = nullptr;
- maxRecursionDepth = in_struct->maxRecursionDepth;
- layout = in_struct->layout;
- basePipelineHandle = in_struct->basePipelineHandle;
- basePipelineIndex = in_struct->basePipelineIndex;
- pNext = SafePnextCopy(in_struct->pNext);
- if (stageCount && in_struct->pStages) {
- pStages = new safe_VkPipelineShaderStageCreateInfo[stageCount];
- for (uint32_t i = 0; i < stageCount; ++i) {
- pStages[i].initialize(&in_struct->pStages[i]);
- }
- }
- if (groupCount && in_struct->pGroups) {
- pGroups = new safe_VkRayTracingShaderGroupCreateInfoNV[groupCount];
- for (uint32_t i = 0; i < groupCount; ++i) {
- pGroups[i].initialize(&in_struct->pGroups[i]);
- }
- }
-}
-
-void safe_VkRayTracingPipelineCreateInfoNV::initialize(const safe_VkRayTracingPipelineCreateInfoNV* src)
-{
- sType = src->sType;
- flags = src->flags;
- stageCount = src->stageCount;
- pStages = nullptr;
- groupCount = src->groupCount;
- pGroups = nullptr;
- maxRecursionDepth = src->maxRecursionDepth;
- layout = src->layout;
- basePipelineHandle = src->basePipelineHandle;
- basePipelineIndex = src->basePipelineIndex;
- pNext = SafePnextCopy(src->pNext);
- if (stageCount && src->pStages) {
- pStages = new safe_VkPipelineShaderStageCreateInfo[stageCount];
- for (uint32_t i = 0; i < stageCount; ++i) {
- pStages[i].initialize(&src->pStages[i]);
- }
- }
- if (groupCount && src->pGroups) {
- pGroups = new safe_VkRayTracingShaderGroupCreateInfoNV[groupCount];
- for (uint32_t i = 0; i < groupCount; ++i) {
- pGroups[i].initialize(&src->pGroups[i]);
- }
- }
-}
-
-safe_VkGeometryTrianglesNV::safe_VkGeometryTrianglesNV(const VkGeometryTrianglesNV* in_struct) :
- sType(in_struct->sType),
- vertexData(in_struct->vertexData),
- vertexOffset(in_struct->vertexOffset),
- vertexCount(in_struct->vertexCount),
- vertexStride(in_struct->vertexStride),
- vertexFormat(in_struct->vertexFormat),
- indexData(in_struct->indexData),
- indexOffset(in_struct->indexOffset),
- indexCount(in_struct->indexCount),
- indexType(in_struct->indexType),
- transformData(in_struct->transformData),
- transformOffset(in_struct->transformOffset)
-{
- pNext = SafePnextCopy(in_struct->pNext);
-}
-
-safe_VkGeometryTrianglesNV::safe_VkGeometryTrianglesNV() :
- pNext(nullptr)
-{}
-
-safe_VkGeometryTrianglesNV::safe_VkGeometryTrianglesNV(const safe_VkGeometryTrianglesNV& src)
-{
- sType = src.sType;
- vertexData = src.vertexData;
- vertexOffset = src.vertexOffset;
- vertexCount = src.vertexCount;
- vertexStride = src.vertexStride;
- vertexFormat = src.vertexFormat;
- indexData = src.indexData;
- indexOffset = src.indexOffset;
- indexCount = src.indexCount;
- indexType = src.indexType;
- transformData = src.transformData;
- transformOffset = src.transformOffset;
- pNext = SafePnextCopy(src.pNext);
-}
-
-safe_VkGeometryTrianglesNV& safe_VkGeometryTrianglesNV::operator=(const safe_VkGeometryTrianglesNV& src)
-{
- if (&src == this) return *this;
-
- if (pNext)
- FreePnextChain(pNext);
-
- sType = src.sType;
- vertexData = src.vertexData;
- vertexOffset = src.vertexOffset;
- vertexCount = src.vertexCount;
- vertexStride = src.vertexStride;
- vertexFormat = src.vertexFormat;
- indexData = src.indexData;
- indexOffset = src.indexOffset;
- indexCount = src.indexCount;
- indexType = src.indexType;
- transformData = src.transformData;
- transformOffset = src.transformOffset;
- pNext = SafePnextCopy(src.pNext);
-
- return *this;
-}
-
-safe_VkGeometryTrianglesNV::~safe_VkGeometryTrianglesNV()
-{
- if (pNext)
- FreePnextChain(pNext);
-}
-
-void safe_VkGeometryTrianglesNV::initialize(const VkGeometryTrianglesNV* in_struct)
-{
- sType = in_struct->sType;
- vertexData = in_struct->vertexData;
- vertexOffset = in_struct->vertexOffset;
- vertexCount = in_struct->vertexCount;
- vertexStride = in_struct->vertexStride;
- vertexFormat = in_struct->vertexFormat;
- indexData = in_struct->indexData;
- indexOffset = in_struct->indexOffset;
- indexCount = in_struct->indexCount;
- indexType = in_struct->indexType;
- transformData = in_struct->transformData;
- transformOffset = in_struct->transformOffset;
- pNext = SafePnextCopy(in_struct->pNext);
-}
-
-void safe_VkGeometryTrianglesNV::initialize(const safe_VkGeometryTrianglesNV* src)
-{
- sType = src->sType;
- vertexData = src->vertexData;
- vertexOffset = src->vertexOffset;
- vertexCount = src->vertexCount;
- vertexStride = src->vertexStride;
- vertexFormat = src->vertexFormat;
- indexData = src->indexData;
- indexOffset = src->indexOffset;
- indexCount = src->indexCount;
- indexType = src->indexType;
- transformData = src->transformData;
- transformOffset = src->transformOffset;
- pNext = SafePnextCopy(src->pNext);
-}
-
-safe_VkGeometryAABBNV::safe_VkGeometryAABBNV(const VkGeometryAABBNV* in_struct) :
- sType(in_struct->sType),
- aabbData(in_struct->aabbData),
- numAABBs(in_struct->numAABBs),
- stride(in_struct->stride),
- offset(in_struct->offset)
-{
- pNext = SafePnextCopy(in_struct->pNext);
-}
-
-safe_VkGeometryAABBNV::safe_VkGeometryAABBNV() :
- pNext(nullptr)
-{}
-
-safe_VkGeometryAABBNV::safe_VkGeometryAABBNV(const safe_VkGeometryAABBNV& src)
-{
- sType = src.sType;
- aabbData = src.aabbData;
- numAABBs = src.numAABBs;
- stride = src.stride;
- offset = src.offset;
- pNext = SafePnextCopy(src.pNext);
-}
-
-safe_VkGeometryAABBNV& safe_VkGeometryAABBNV::operator=(const safe_VkGeometryAABBNV& src)
-{
- if (&src == this) return *this;
-
- if (pNext)
- FreePnextChain(pNext);
-
- sType = src.sType;
- aabbData = src.aabbData;
- numAABBs = src.numAABBs;
- stride = src.stride;
- offset = src.offset;
- pNext = SafePnextCopy(src.pNext);
-
- return *this;
-}
-
-safe_VkGeometryAABBNV::~safe_VkGeometryAABBNV()
-{
- if (pNext)
- FreePnextChain(pNext);
-}
-
-void safe_VkGeometryAABBNV::initialize(const VkGeometryAABBNV* in_struct)
-{
- sType = in_struct->sType;
- aabbData = in_struct->aabbData;
- numAABBs = in_struct->numAABBs;
- stride = in_struct->stride;
- offset = in_struct->offset;
- pNext = SafePnextCopy(in_struct->pNext);
-}
-
-void safe_VkGeometryAABBNV::initialize(const safe_VkGeometryAABBNV* src)
-{
- sType = src->sType;
- aabbData = src->aabbData;
- numAABBs = src->numAABBs;
- stride = src->stride;
- offset = src->offset;
- pNext = SafePnextCopy(src->pNext);
-}
-
-safe_VkGeometryNV::safe_VkGeometryNV(const VkGeometryNV* in_struct) :
- sType(in_struct->sType),
- geometryType(in_struct->geometryType),
- geometry(in_struct->geometry),
- flags(in_struct->flags)
-{
- pNext = SafePnextCopy(in_struct->pNext);
-}
-
-safe_VkGeometryNV::safe_VkGeometryNV() :
- pNext(nullptr)
-{}
-
-safe_VkGeometryNV::safe_VkGeometryNV(const safe_VkGeometryNV& src)
-{
- sType = src.sType;
- geometryType = src.geometryType;
- geometry = src.geometry;
- flags = src.flags;
- pNext = SafePnextCopy(src.pNext);
-}
-
-safe_VkGeometryNV& safe_VkGeometryNV::operator=(const safe_VkGeometryNV& src)
-{
- if (&src == this) return *this;
-
- if (pNext)
- FreePnextChain(pNext);
-
- sType = src.sType;
- geometryType = src.geometryType;
- geometry = src.geometry;
- flags = src.flags;
- pNext = SafePnextCopy(src.pNext);
-
- return *this;
-}
-
-safe_VkGeometryNV::~safe_VkGeometryNV()
-{
- if (pNext)
- FreePnextChain(pNext);
-}
-
-void safe_VkGeometryNV::initialize(const VkGeometryNV* in_struct)
-{
- sType = in_struct->sType;
- geometryType = in_struct->geometryType;
- geometry = in_struct->geometry;
- flags = in_struct->flags;
- pNext = SafePnextCopy(in_struct->pNext);
-}
-
-void safe_VkGeometryNV::initialize(const safe_VkGeometryNV* src)
-{
- sType = src->sType;
- geometryType = src->geometryType;
- geometry = src->geometry;
- flags = src->flags;
- pNext = SafePnextCopy(src->pNext);
-}
-
-safe_VkAccelerationStructureInfoNV::safe_VkAccelerationStructureInfoNV(const VkAccelerationStructureInfoNV* in_struct) :
- sType(in_struct->sType),
- type(in_struct->type),
- flags(in_struct->flags),
- instanceCount(in_struct->instanceCount),
- geometryCount(in_struct->geometryCount),
- pGeometries(nullptr)
-{
- pNext = SafePnextCopy(in_struct->pNext);
- if (geometryCount && in_struct->pGeometries) {
- pGeometries = new safe_VkGeometryNV[geometryCount];
- for (uint32_t i = 0; i < geometryCount; ++i) {
- pGeometries[i].initialize(&in_struct->pGeometries[i]);
- }
- }
-}
-
-safe_VkAccelerationStructureInfoNV::safe_VkAccelerationStructureInfoNV() :
- pNext(nullptr),
- pGeometries(nullptr)
-{}
-
-safe_VkAccelerationStructureInfoNV::safe_VkAccelerationStructureInfoNV(const safe_VkAccelerationStructureInfoNV& src)
-{
- sType = src.sType;
- type = src.type;
- flags = src.flags;
- instanceCount = src.instanceCount;
- geometryCount = src.geometryCount;
- pGeometries = nullptr;
- pNext = SafePnextCopy(src.pNext);
- if (geometryCount && src.pGeometries) {
- pGeometries = new safe_VkGeometryNV[geometryCount];
- for (uint32_t i = 0; i < geometryCount; ++i) {
- pGeometries[i].initialize(&src.pGeometries[i]);
- }
- }
-}
-
-safe_VkAccelerationStructureInfoNV& safe_VkAccelerationStructureInfoNV::operator=(const safe_VkAccelerationStructureInfoNV& src)
-{
- if (&src == this) return *this;
-
- if (pGeometries)
- delete[] pGeometries;
- if (pNext)
- FreePnextChain(pNext);
-
- sType = src.sType;
- type = src.type;
- flags = src.flags;
- instanceCount = src.instanceCount;
- geometryCount = src.geometryCount;
- pGeometries = nullptr;
- pNext = SafePnextCopy(src.pNext);
- if (geometryCount && src.pGeometries) {
- pGeometries = new safe_VkGeometryNV[geometryCount];
- for (uint32_t i = 0; i < geometryCount; ++i) {
- pGeometries[i].initialize(&src.pGeometries[i]);
- }
- }
-
- return *this;
-}
-
-safe_VkAccelerationStructureInfoNV::~safe_VkAccelerationStructureInfoNV()
-{
- if (pGeometries)
- delete[] pGeometries;
- if (pNext)
- FreePnextChain(pNext);
-}
-
-void safe_VkAccelerationStructureInfoNV::initialize(const VkAccelerationStructureInfoNV* in_struct)
-{
- sType = in_struct->sType;
- type = in_struct->type;
- flags = in_struct->flags;
- instanceCount = in_struct->instanceCount;
- geometryCount = in_struct->geometryCount;
- pGeometries = nullptr;
- pNext = SafePnextCopy(in_struct->pNext);
- if (geometryCount && in_struct->pGeometries) {
- pGeometries = new safe_VkGeometryNV[geometryCount];
- for (uint32_t i = 0; i < geometryCount; ++i) {
- pGeometries[i].initialize(&in_struct->pGeometries[i]);
- }
- }
-}
-
-void safe_VkAccelerationStructureInfoNV::initialize(const safe_VkAccelerationStructureInfoNV* src)
-{
- sType = src->sType;
- type = src->type;
- flags = src->flags;
- instanceCount = src->instanceCount;
- geometryCount = src->geometryCount;
- pGeometries = nullptr;
- pNext = SafePnextCopy(src->pNext);
- if (geometryCount && src->pGeometries) {
- pGeometries = new safe_VkGeometryNV[geometryCount];
- for (uint32_t i = 0; i < geometryCount; ++i) {
- pGeometries[i].initialize(&src->pGeometries[i]);
- }
- }
-}
-
-safe_VkAccelerationStructureCreateInfoNV::safe_VkAccelerationStructureCreateInfoNV(const VkAccelerationStructureCreateInfoNV* in_struct) :
- sType(in_struct->sType),
- compactedSize(in_struct->compactedSize),
- info(&in_struct->info)
-{
- pNext = SafePnextCopy(in_struct->pNext);
-}
-
-safe_VkAccelerationStructureCreateInfoNV::safe_VkAccelerationStructureCreateInfoNV() :
- pNext(nullptr)
-{}
-
-safe_VkAccelerationStructureCreateInfoNV::safe_VkAccelerationStructureCreateInfoNV(const safe_VkAccelerationStructureCreateInfoNV& src)
-{
- sType = src.sType;
- compactedSize = src.compactedSize;
- info.initialize(&src.info);
- pNext = SafePnextCopy(src.pNext);
-}
-
-safe_VkAccelerationStructureCreateInfoNV& safe_VkAccelerationStructureCreateInfoNV::operator=(const safe_VkAccelerationStructureCreateInfoNV& src)
-{
- if (&src == this) return *this;
-
- if (pNext)
- FreePnextChain(pNext);
-
- sType = src.sType;
- compactedSize = src.compactedSize;
- info.initialize(&src.info);
- pNext = SafePnextCopy(src.pNext);
-
- return *this;
-}
-
-safe_VkAccelerationStructureCreateInfoNV::~safe_VkAccelerationStructureCreateInfoNV()
-{
- if (pNext)
- FreePnextChain(pNext);
-}
-
-void safe_VkAccelerationStructureCreateInfoNV::initialize(const VkAccelerationStructureCreateInfoNV* in_struct)
-{
- sType = in_struct->sType;
- compactedSize = in_struct->compactedSize;
- info.initialize(&in_struct->info);
- pNext = SafePnextCopy(in_struct->pNext);
-}
-
-void safe_VkAccelerationStructureCreateInfoNV::initialize(const safe_VkAccelerationStructureCreateInfoNV* src)
-{
- sType = src->sType;
- compactedSize = src->compactedSize;
- info.initialize(&src->info);
- pNext = SafePnextCopy(src->pNext);
-}
-
-safe_VkBindAccelerationStructureMemoryInfoNV::safe_VkBindAccelerationStructureMemoryInfoNV(const VkBindAccelerationStructureMemoryInfoNV* in_struct) :
- sType(in_struct->sType),
- accelerationStructure(in_struct->accelerationStructure),
- memory(in_struct->memory),
- memoryOffset(in_struct->memoryOffset),
- deviceIndexCount(in_struct->deviceIndexCount),
- pDeviceIndices(nullptr)
-{
- pNext = SafePnextCopy(in_struct->pNext);
- if (in_struct->pDeviceIndices) {
- pDeviceIndices = new uint32_t[in_struct->deviceIndexCount];
- memcpy ((void *)pDeviceIndices, (void *)in_struct->pDeviceIndices, sizeof(uint32_t)*in_struct->deviceIndexCount);
- }
-}
-
-safe_VkBindAccelerationStructureMemoryInfoNV::safe_VkBindAccelerationStructureMemoryInfoNV() :
- pNext(nullptr),
- pDeviceIndices(nullptr)
-{}
-
-safe_VkBindAccelerationStructureMemoryInfoNV::safe_VkBindAccelerationStructureMemoryInfoNV(const safe_VkBindAccelerationStructureMemoryInfoNV& src)
-{
- sType = src.sType;
- accelerationStructure = src.accelerationStructure;
- memory = src.memory;
- memoryOffset = src.memoryOffset;
- deviceIndexCount = src.deviceIndexCount;
- pDeviceIndices = nullptr;
- pNext = SafePnextCopy(src.pNext);
- if (src.pDeviceIndices) {
- pDeviceIndices = new uint32_t[src.deviceIndexCount];
- memcpy ((void *)pDeviceIndices, (void *)src.pDeviceIndices, sizeof(uint32_t)*src.deviceIndexCount);
- }
-}
-
-safe_VkBindAccelerationStructureMemoryInfoNV& safe_VkBindAccelerationStructureMemoryInfoNV::operator=(const safe_VkBindAccelerationStructureMemoryInfoNV& src)
-{
- if (&src == this) return *this;
-
- if (pDeviceIndices)
- delete[] pDeviceIndices;
- if (pNext)
- FreePnextChain(pNext);
-
- sType = src.sType;
- accelerationStructure = src.accelerationStructure;
- memory = src.memory;
- memoryOffset = src.memoryOffset;
- deviceIndexCount = src.deviceIndexCount;
- pDeviceIndices = nullptr;
- pNext = SafePnextCopy(src.pNext);
- if (src.pDeviceIndices) {
- pDeviceIndices = new uint32_t[src.deviceIndexCount];
- memcpy ((void *)pDeviceIndices, (void *)src.pDeviceIndices, sizeof(uint32_t)*src.deviceIndexCount);
- }
-
- return *this;
-}
-
-safe_VkBindAccelerationStructureMemoryInfoNV::~safe_VkBindAccelerationStructureMemoryInfoNV()
-{
- if (pDeviceIndices)
- delete[] pDeviceIndices;
- if (pNext)
- FreePnextChain(pNext);
-}
-
-void safe_VkBindAccelerationStructureMemoryInfoNV::initialize(const VkBindAccelerationStructureMemoryInfoNV* in_struct)
-{
- sType = in_struct->sType;
- accelerationStructure = in_struct->accelerationStructure;
- memory = in_struct->memory;
- memoryOffset = in_struct->memoryOffset;
- deviceIndexCount = in_struct->deviceIndexCount;
- pDeviceIndices = nullptr;
- pNext = SafePnextCopy(in_struct->pNext);
- if (in_struct->pDeviceIndices) {
- pDeviceIndices = new uint32_t[in_struct->deviceIndexCount];
- memcpy ((void *)pDeviceIndices, (void *)in_struct->pDeviceIndices, sizeof(uint32_t)*in_struct->deviceIndexCount);
- }
-}
-
-void safe_VkBindAccelerationStructureMemoryInfoNV::initialize(const safe_VkBindAccelerationStructureMemoryInfoNV* src)
-{
- sType = src->sType;
- accelerationStructure = src->accelerationStructure;
- memory = src->memory;
- memoryOffset = src->memoryOffset;
- deviceIndexCount = src->deviceIndexCount;
- pDeviceIndices = nullptr;
- pNext = SafePnextCopy(src->pNext);
- if (src->pDeviceIndices) {
- pDeviceIndices = new uint32_t[src->deviceIndexCount];
- memcpy ((void *)pDeviceIndices, (void *)src->pDeviceIndices, sizeof(uint32_t)*src->deviceIndexCount);
- }
-}
-
-safe_VkWriteDescriptorSetAccelerationStructureNV::safe_VkWriteDescriptorSetAccelerationStructureNV(const VkWriteDescriptorSetAccelerationStructureNV* in_struct) :
- sType(in_struct->sType),
- accelerationStructureCount(in_struct->accelerationStructureCount),
- pAccelerationStructures(nullptr)
-{
- pNext = SafePnextCopy(in_struct->pNext);
- if (accelerationStructureCount && in_struct->pAccelerationStructures) {
- pAccelerationStructures = new VkAccelerationStructureNV[accelerationStructureCount];
- for (uint32_t i = 0; i < accelerationStructureCount; ++i) {
- pAccelerationStructures[i] = in_struct->pAccelerationStructures[i];
- }
- }
-}
-
-safe_VkWriteDescriptorSetAccelerationStructureNV::safe_VkWriteDescriptorSetAccelerationStructureNV() :
- pNext(nullptr),
- pAccelerationStructures(nullptr)
-{}
-
-safe_VkWriteDescriptorSetAccelerationStructureNV::safe_VkWriteDescriptorSetAccelerationStructureNV(const safe_VkWriteDescriptorSetAccelerationStructureNV& src)
-{
- sType = src.sType;
- accelerationStructureCount = src.accelerationStructureCount;
- pAccelerationStructures = nullptr;
- pNext = SafePnextCopy(src.pNext);
- if (accelerationStructureCount && src.pAccelerationStructures) {
- pAccelerationStructures = new VkAccelerationStructureNV[accelerationStructureCount];
- for (uint32_t i = 0; i < accelerationStructureCount; ++i) {
- pAccelerationStructures[i] = src.pAccelerationStructures[i];
- }
- }
-}
-
-safe_VkWriteDescriptorSetAccelerationStructureNV& safe_VkWriteDescriptorSetAccelerationStructureNV::operator=(const safe_VkWriteDescriptorSetAccelerationStructureNV& src)
-{
- if (&src == this) return *this;
-
- if (pAccelerationStructures)
- delete[] pAccelerationStructures;
- if (pNext)
- FreePnextChain(pNext);
-
- sType = src.sType;
- accelerationStructureCount = src.accelerationStructureCount;
- pAccelerationStructures = nullptr;
- pNext = SafePnextCopy(src.pNext);
- if (accelerationStructureCount && src.pAccelerationStructures) {
- pAccelerationStructures = new VkAccelerationStructureNV[accelerationStructureCount];
- for (uint32_t i = 0; i < accelerationStructureCount; ++i) {
- pAccelerationStructures[i] = src.pAccelerationStructures[i];
- }
- }
-
- return *this;
-}
-
-safe_VkWriteDescriptorSetAccelerationStructureNV::~safe_VkWriteDescriptorSetAccelerationStructureNV()
-{
- if (pAccelerationStructures)
- delete[] pAccelerationStructures;
- if (pNext)
- FreePnextChain(pNext);
-}
-
-void safe_VkWriteDescriptorSetAccelerationStructureNV::initialize(const VkWriteDescriptorSetAccelerationStructureNV* in_struct)
-{
- sType = in_struct->sType;
- accelerationStructureCount = in_struct->accelerationStructureCount;
- pAccelerationStructures = nullptr;
- pNext = SafePnextCopy(in_struct->pNext);
- if (accelerationStructureCount && in_struct->pAccelerationStructures) {
- pAccelerationStructures = new VkAccelerationStructureNV[accelerationStructureCount];
- for (uint32_t i = 0; i < accelerationStructureCount; ++i) {
- pAccelerationStructures[i] = in_struct->pAccelerationStructures[i];
- }
- }
-}
-
-void safe_VkWriteDescriptorSetAccelerationStructureNV::initialize(const safe_VkWriteDescriptorSetAccelerationStructureNV* src)
-{
- sType = src->sType;
- accelerationStructureCount = src->accelerationStructureCount;
- pAccelerationStructures = nullptr;
- pNext = SafePnextCopy(src->pNext);
- if (accelerationStructureCount && src->pAccelerationStructures) {
- pAccelerationStructures = new VkAccelerationStructureNV[accelerationStructureCount];
- for (uint32_t i = 0; i < accelerationStructureCount; ++i) {
- pAccelerationStructures[i] = src->pAccelerationStructures[i];
- }
- }
-}
-
-safe_VkAccelerationStructureMemoryRequirementsInfoNV::safe_VkAccelerationStructureMemoryRequirementsInfoNV(const VkAccelerationStructureMemoryRequirementsInfoNV* in_struct) :
- sType(in_struct->sType),
- type(in_struct->type),
- accelerationStructure(in_struct->accelerationStructure)
-{
- pNext = SafePnextCopy(in_struct->pNext);
-}
-
-safe_VkAccelerationStructureMemoryRequirementsInfoNV::safe_VkAccelerationStructureMemoryRequirementsInfoNV() :
- pNext(nullptr)
-{}
-
-safe_VkAccelerationStructureMemoryRequirementsInfoNV::safe_VkAccelerationStructureMemoryRequirementsInfoNV(const safe_VkAccelerationStructureMemoryRequirementsInfoNV& src)
-{
- sType = src.sType;
- type = src.type;
- accelerationStructure = src.accelerationStructure;
- pNext = SafePnextCopy(src.pNext);
-}
-
-safe_VkAccelerationStructureMemoryRequirementsInfoNV& safe_VkAccelerationStructureMemoryRequirementsInfoNV::operator=(const safe_VkAccelerationStructureMemoryRequirementsInfoNV& src)
-{
- if (&src == this) return *this;
-
- if (pNext)
- FreePnextChain(pNext);
-
- sType = src.sType;
- type = src.type;
- accelerationStructure = src.accelerationStructure;
- pNext = SafePnextCopy(src.pNext);
-
- return *this;
-}
-
-safe_VkAccelerationStructureMemoryRequirementsInfoNV::~safe_VkAccelerationStructureMemoryRequirementsInfoNV()
-{
- if (pNext)
- FreePnextChain(pNext);
-}
-
-void safe_VkAccelerationStructureMemoryRequirementsInfoNV::initialize(const VkAccelerationStructureMemoryRequirementsInfoNV* in_struct)
-{
- sType = in_struct->sType;
- type = in_struct->type;
- accelerationStructure = in_struct->accelerationStructure;
- pNext = SafePnextCopy(in_struct->pNext);
-}
-
-void safe_VkAccelerationStructureMemoryRequirementsInfoNV::initialize(const safe_VkAccelerationStructureMemoryRequirementsInfoNV* src)
-{
- sType = src->sType;
- type = src->type;
- accelerationStructure = src->accelerationStructure;
- pNext = SafePnextCopy(src->pNext);
-}
-
-safe_VkPhysicalDeviceRayTracingPropertiesNV::safe_VkPhysicalDeviceRayTracingPropertiesNV(const VkPhysicalDeviceRayTracingPropertiesNV* in_struct) :
- sType(in_struct->sType),
- shaderGroupHandleSize(in_struct->shaderGroupHandleSize),
- maxRecursionDepth(in_struct->maxRecursionDepth),
- maxShaderGroupStride(in_struct->maxShaderGroupStride),
- shaderGroupBaseAlignment(in_struct->shaderGroupBaseAlignment),
- maxGeometryCount(in_struct->maxGeometryCount),
- maxInstanceCount(in_struct->maxInstanceCount),
- maxTriangleCount(in_struct->maxTriangleCount),
- maxDescriptorSetAccelerationStructures(in_struct->maxDescriptorSetAccelerationStructures)
-{
- pNext = SafePnextCopy(in_struct->pNext);
-}
-
-safe_VkPhysicalDeviceRayTracingPropertiesNV::safe_VkPhysicalDeviceRayTracingPropertiesNV() :
- pNext(nullptr)
-{}
-
-safe_VkPhysicalDeviceRayTracingPropertiesNV::safe_VkPhysicalDeviceRayTracingPropertiesNV(const safe_VkPhysicalDeviceRayTracingPropertiesNV& src)
-{
- sType = src.sType;
- shaderGroupHandleSize = src.shaderGroupHandleSize;
- maxRecursionDepth = src.maxRecursionDepth;
- maxShaderGroupStride = src.maxShaderGroupStride;
- shaderGroupBaseAlignment = src.shaderGroupBaseAlignment;
- maxGeometryCount = src.maxGeometryCount;
- maxInstanceCount = src.maxInstanceCount;
- maxTriangleCount = src.maxTriangleCount;
- maxDescriptorSetAccelerationStructures = src.maxDescriptorSetAccelerationStructures;
- pNext = SafePnextCopy(src.pNext);
-}
-
-safe_VkPhysicalDeviceRayTracingPropertiesNV& safe_VkPhysicalDeviceRayTracingPropertiesNV::operator=(const safe_VkPhysicalDeviceRayTracingPropertiesNV& src)
-{
- if (&src == this) return *this;
-
- if (pNext)
- FreePnextChain(pNext);
-
- sType = src.sType;
- shaderGroupHandleSize = src.shaderGroupHandleSize;
- maxRecursionDepth = src.maxRecursionDepth;
- maxShaderGroupStride = src.maxShaderGroupStride;
- shaderGroupBaseAlignment = src.shaderGroupBaseAlignment;
- maxGeometryCount = src.maxGeometryCount;
- maxInstanceCount = src.maxInstanceCount;
- maxTriangleCount = src.maxTriangleCount;
- maxDescriptorSetAccelerationStructures = src.maxDescriptorSetAccelerationStructures;
- pNext = SafePnextCopy(src.pNext);
-
- return *this;
-}
-
-safe_VkPhysicalDeviceRayTracingPropertiesNV::~safe_VkPhysicalDeviceRayTracingPropertiesNV()
-{
- if (pNext)
- FreePnextChain(pNext);
-}
-
-void safe_VkPhysicalDeviceRayTracingPropertiesNV::initialize(const VkPhysicalDeviceRayTracingPropertiesNV* in_struct)
-{
- sType = in_struct->sType;
- shaderGroupHandleSize = in_struct->shaderGroupHandleSize;
- maxRecursionDepth = in_struct->maxRecursionDepth;
- maxShaderGroupStride = in_struct->maxShaderGroupStride;
- shaderGroupBaseAlignment = in_struct->shaderGroupBaseAlignment;
- maxGeometryCount = in_struct->maxGeometryCount;
- maxInstanceCount = in_struct->maxInstanceCount;
- maxTriangleCount = in_struct->maxTriangleCount;
- maxDescriptorSetAccelerationStructures = in_struct->maxDescriptorSetAccelerationStructures;
- pNext = SafePnextCopy(in_struct->pNext);
-}
-
-void safe_VkPhysicalDeviceRayTracingPropertiesNV::initialize(const safe_VkPhysicalDeviceRayTracingPropertiesNV* src)
-{
- sType = src->sType;
- shaderGroupHandleSize = src->shaderGroupHandleSize;
- maxRecursionDepth = src->maxRecursionDepth;
- maxShaderGroupStride = src->maxShaderGroupStride;
- shaderGroupBaseAlignment = src->shaderGroupBaseAlignment;
- maxGeometryCount = src->maxGeometryCount;
- maxInstanceCount = src->maxInstanceCount;
- maxTriangleCount = src->maxTriangleCount;
- maxDescriptorSetAccelerationStructures = src->maxDescriptorSetAccelerationStructures;
- pNext = SafePnextCopy(src->pNext);
-}
-
-safe_VkPhysicalDeviceRepresentativeFragmentTestFeaturesNV::safe_VkPhysicalDeviceRepresentativeFragmentTestFeaturesNV(const VkPhysicalDeviceRepresentativeFragmentTestFeaturesNV* in_struct) :
- sType(in_struct->sType),
- representativeFragmentTest(in_struct->representativeFragmentTest)
-{
- pNext = SafePnextCopy(in_struct->pNext);
-}
-
-safe_VkPhysicalDeviceRepresentativeFragmentTestFeaturesNV::safe_VkPhysicalDeviceRepresentativeFragmentTestFeaturesNV() :
- pNext(nullptr)
-{}
-
-safe_VkPhysicalDeviceRepresentativeFragmentTestFeaturesNV::safe_VkPhysicalDeviceRepresentativeFragmentTestFeaturesNV(const safe_VkPhysicalDeviceRepresentativeFragmentTestFeaturesNV& src)
-{
- sType = src.sType;
- representativeFragmentTest = src.representativeFragmentTest;
- pNext = SafePnextCopy(src.pNext);
-}
-
-safe_VkPhysicalDeviceRepresentativeFragmentTestFeaturesNV& safe_VkPhysicalDeviceRepresentativeFragmentTestFeaturesNV::operator=(const safe_VkPhysicalDeviceRepresentativeFragmentTestFeaturesNV& src)
-{
- if (&src == this) return *this;
-
- if (pNext)
- FreePnextChain(pNext);
-
- sType = src.sType;
- representativeFragmentTest = src.representativeFragmentTest;
- pNext = SafePnextCopy(src.pNext);
-
- return *this;
-}
-
-safe_VkPhysicalDeviceRepresentativeFragmentTestFeaturesNV::~safe_VkPhysicalDeviceRepresentativeFragmentTestFeaturesNV()
-{
- if (pNext)
- FreePnextChain(pNext);
-}
-
-void safe_VkPhysicalDeviceRepresentativeFragmentTestFeaturesNV::initialize(const VkPhysicalDeviceRepresentativeFragmentTestFeaturesNV* in_struct)
-{
- sType = in_struct->sType;
- representativeFragmentTest = in_struct->representativeFragmentTest;
- pNext = SafePnextCopy(in_struct->pNext);
-}
-
-void safe_VkPhysicalDeviceRepresentativeFragmentTestFeaturesNV::initialize(const safe_VkPhysicalDeviceRepresentativeFragmentTestFeaturesNV* src)
-{
- sType = src->sType;
- representativeFragmentTest = src->representativeFragmentTest;
- pNext = SafePnextCopy(src->pNext);
-}
-
-safe_VkPipelineRepresentativeFragmentTestStateCreateInfoNV::safe_VkPipelineRepresentativeFragmentTestStateCreateInfoNV(const VkPipelineRepresentativeFragmentTestStateCreateInfoNV* in_struct) :
- sType(in_struct->sType),
- representativeFragmentTestEnable(in_struct->representativeFragmentTestEnable)
-{
- pNext = SafePnextCopy(in_struct->pNext);
-}
-
-safe_VkPipelineRepresentativeFragmentTestStateCreateInfoNV::safe_VkPipelineRepresentativeFragmentTestStateCreateInfoNV() :
- pNext(nullptr)
-{}
-
-safe_VkPipelineRepresentativeFragmentTestStateCreateInfoNV::safe_VkPipelineRepresentativeFragmentTestStateCreateInfoNV(const safe_VkPipelineRepresentativeFragmentTestStateCreateInfoNV& src)
-{
- sType = src.sType;
- representativeFragmentTestEnable = src.representativeFragmentTestEnable;
- pNext = SafePnextCopy(src.pNext);
-}
-
-safe_VkPipelineRepresentativeFragmentTestStateCreateInfoNV& safe_VkPipelineRepresentativeFragmentTestStateCreateInfoNV::operator=(const safe_VkPipelineRepresentativeFragmentTestStateCreateInfoNV& src)
-{
- if (&src == this) return *this;
-
- if (pNext)
- FreePnextChain(pNext);
-
- sType = src.sType;
- representativeFragmentTestEnable = src.representativeFragmentTestEnable;
- pNext = SafePnextCopy(src.pNext);
-
- return *this;
-}
-
-safe_VkPipelineRepresentativeFragmentTestStateCreateInfoNV::~safe_VkPipelineRepresentativeFragmentTestStateCreateInfoNV()
-{
- if (pNext)
- FreePnextChain(pNext);
-}
-
-void safe_VkPipelineRepresentativeFragmentTestStateCreateInfoNV::initialize(const VkPipelineRepresentativeFragmentTestStateCreateInfoNV* in_struct)
-{
- sType = in_struct->sType;
- representativeFragmentTestEnable = in_struct->representativeFragmentTestEnable;
- pNext = SafePnextCopy(in_struct->pNext);
-}
-
-void safe_VkPipelineRepresentativeFragmentTestStateCreateInfoNV::initialize(const safe_VkPipelineRepresentativeFragmentTestStateCreateInfoNV* src)
-{
- sType = src->sType;
- representativeFragmentTestEnable = src->representativeFragmentTestEnable;
- pNext = SafePnextCopy(src->pNext);
-}
-
-safe_VkPhysicalDeviceImageViewImageFormatInfoEXT::safe_VkPhysicalDeviceImageViewImageFormatInfoEXT(const VkPhysicalDeviceImageViewImageFormatInfoEXT* in_struct) :
- sType(in_struct->sType),
- imageViewType(in_struct->imageViewType)
-{
- pNext = SafePnextCopy(in_struct->pNext);
-}
-
-safe_VkPhysicalDeviceImageViewImageFormatInfoEXT::safe_VkPhysicalDeviceImageViewImageFormatInfoEXT() :
- pNext(nullptr)
-{}
-
-safe_VkPhysicalDeviceImageViewImageFormatInfoEXT::safe_VkPhysicalDeviceImageViewImageFormatInfoEXT(const safe_VkPhysicalDeviceImageViewImageFormatInfoEXT& src)
-{
- sType = src.sType;
- imageViewType = src.imageViewType;
- pNext = SafePnextCopy(src.pNext);
-}
-
-safe_VkPhysicalDeviceImageViewImageFormatInfoEXT& safe_VkPhysicalDeviceImageViewImageFormatInfoEXT::operator=(const safe_VkPhysicalDeviceImageViewImageFormatInfoEXT& src)
-{
- if (&src == this) return *this;
-
- if (pNext)
- FreePnextChain(pNext);
-
- sType = src.sType;
- imageViewType = src.imageViewType;
- pNext = SafePnextCopy(src.pNext);
-
- return *this;
-}
-
-safe_VkPhysicalDeviceImageViewImageFormatInfoEXT::~safe_VkPhysicalDeviceImageViewImageFormatInfoEXT()
-{
- if (pNext)
- FreePnextChain(pNext);
-}
-
-void safe_VkPhysicalDeviceImageViewImageFormatInfoEXT::initialize(const VkPhysicalDeviceImageViewImageFormatInfoEXT* in_struct)
-{
- sType = in_struct->sType;
- imageViewType = in_struct->imageViewType;
- pNext = SafePnextCopy(in_struct->pNext);
-}
-
-void safe_VkPhysicalDeviceImageViewImageFormatInfoEXT::initialize(const safe_VkPhysicalDeviceImageViewImageFormatInfoEXT* src)
-{
- sType = src->sType;
- imageViewType = src->imageViewType;
- pNext = SafePnextCopy(src->pNext);
-}
-
-safe_VkFilterCubicImageViewImageFormatPropertiesEXT::safe_VkFilterCubicImageViewImageFormatPropertiesEXT(const VkFilterCubicImageViewImageFormatPropertiesEXT* in_struct) :
- sType(in_struct->sType),
- filterCubic(in_struct->filterCubic),
- filterCubicMinmax(in_struct->filterCubicMinmax)
-{
- pNext = SafePnextCopy(in_struct->pNext);
-}
-
-safe_VkFilterCubicImageViewImageFormatPropertiesEXT::safe_VkFilterCubicImageViewImageFormatPropertiesEXT() :
- pNext(nullptr)
-{}
-
-safe_VkFilterCubicImageViewImageFormatPropertiesEXT::safe_VkFilterCubicImageViewImageFormatPropertiesEXT(const safe_VkFilterCubicImageViewImageFormatPropertiesEXT& src)
-{
- sType = src.sType;
- filterCubic = src.filterCubic;
- filterCubicMinmax = src.filterCubicMinmax;
- pNext = SafePnextCopy(src.pNext);
-}
-
-safe_VkFilterCubicImageViewImageFormatPropertiesEXT& safe_VkFilterCubicImageViewImageFormatPropertiesEXT::operator=(const safe_VkFilterCubicImageViewImageFormatPropertiesEXT& src)
-{
- if (&src == this) return *this;
-
- if (pNext)
- FreePnextChain(pNext);
-
- sType = src.sType;
- filterCubic = src.filterCubic;
- filterCubicMinmax = src.filterCubicMinmax;
- pNext = SafePnextCopy(src.pNext);
-
- return *this;
-}
-
-safe_VkFilterCubicImageViewImageFormatPropertiesEXT::~safe_VkFilterCubicImageViewImageFormatPropertiesEXT()
-{
- if (pNext)
- FreePnextChain(pNext);
-}
-
-void safe_VkFilterCubicImageViewImageFormatPropertiesEXT::initialize(const VkFilterCubicImageViewImageFormatPropertiesEXT* in_struct)
-{
- sType = in_struct->sType;
- filterCubic = in_struct->filterCubic;
- filterCubicMinmax = in_struct->filterCubicMinmax;
- pNext = SafePnextCopy(in_struct->pNext);
-}
-
-void safe_VkFilterCubicImageViewImageFormatPropertiesEXT::initialize(const safe_VkFilterCubicImageViewImageFormatPropertiesEXT* src)
-{
- sType = src->sType;
- filterCubic = src->filterCubic;
- filterCubicMinmax = src->filterCubicMinmax;
- pNext = SafePnextCopy(src->pNext);
-}
-
-safe_VkDeviceQueueGlobalPriorityCreateInfoEXT::safe_VkDeviceQueueGlobalPriorityCreateInfoEXT(const VkDeviceQueueGlobalPriorityCreateInfoEXT* in_struct) :
- sType(in_struct->sType),
- globalPriority(in_struct->globalPriority)
-{
- pNext = SafePnextCopy(in_struct->pNext);
-}
-
-safe_VkDeviceQueueGlobalPriorityCreateInfoEXT::safe_VkDeviceQueueGlobalPriorityCreateInfoEXT() :
- pNext(nullptr)
-{}
-
-safe_VkDeviceQueueGlobalPriorityCreateInfoEXT::safe_VkDeviceQueueGlobalPriorityCreateInfoEXT(const safe_VkDeviceQueueGlobalPriorityCreateInfoEXT& src)
-{
- sType = src.sType;
- globalPriority = src.globalPriority;
- pNext = SafePnextCopy(src.pNext);
-}
-
-safe_VkDeviceQueueGlobalPriorityCreateInfoEXT& safe_VkDeviceQueueGlobalPriorityCreateInfoEXT::operator=(const safe_VkDeviceQueueGlobalPriorityCreateInfoEXT& src)
-{
- if (&src == this) return *this;
-
- if (pNext)
- FreePnextChain(pNext);
-
- sType = src.sType;
- globalPriority = src.globalPriority;
- pNext = SafePnextCopy(src.pNext);
-
- return *this;
-}
-
-safe_VkDeviceQueueGlobalPriorityCreateInfoEXT::~safe_VkDeviceQueueGlobalPriorityCreateInfoEXT()
-{
- if (pNext)
- FreePnextChain(pNext);
-}
-
-void safe_VkDeviceQueueGlobalPriorityCreateInfoEXT::initialize(const VkDeviceQueueGlobalPriorityCreateInfoEXT* in_struct)
-{
- sType = in_struct->sType;
- globalPriority = in_struct->globalPriority;
- pNext = SafePnextCopy(in_struct->pNext);
-}
-
-void safe_VkDeviceQueueGlobalPriorityCreateInfoEXT::initialize(const safe_VkDeviceQueueGlobalPriorityCreateInfoEXT* src)
-{
- sType = src->sType;
- globalPriority = src->globalPriority;
- pNext = SafePnextCopy(src->pNext);
-}
-
-safe_VkImportMemoryHostPointerInfoEXT::safe_VkImportMemoryHostPointerInfoEXT(const VkImportMemoryHostPointerInfoEXT* in_struct) :
- sType(in_struct->sType),
- handleType(in_struct->handleType),
- pHostPointer(in_struct->pHostPointer)
-{
- pNext = SafePnextCopy(in_struct->pNext);
-}
-
-safe_VkImportMemoryHostPointerInfoEXT::safe_VkImportMemoryHostPointerInfoEXT() :
- pNext(nullptr),
- pHostPointer(nullptr)
-{}
-
-safe_VkImportMemoryHostPointerInfoEXT::safe_VkImportMemoryHostPointerInfoEXT(const safe_VkImportMemoryHostPointerInfoEXT& src)
-{
- sType = src.sType;
- handleType = src.handleType;
- pHostPointer = src.pHostPointer;
- pNext = SafePnextCopy(src.pNext);
-}
-
-safe_VkImportMemoryHostPointerInfoEXT& safe_VkImportMemoryHostPointerInfoEXT::operator=(const safe_VkImportMemoryHostPointerInfoEXT& src)
-{
- if (&src == this) return *this;
-
- if (pNext)
- FreePnextChain(pNext);
-
- sType = src.sType;
- handleType = src.handleType;
- pHostPointer = src.pHostPointer;
- pNext = SafePnextCopy(src.pNext);
-
- return *this;
-}
-
-safe_VkImportMemoryHostPointerInfoEXT::~safe_VkImportMemoryHostPointerInfoEXT()
-{
- if (pNext)
- FreePnextChain(pNext);
-}
-
-void safe_VkImportMemoryHostPointerInfoEXT::initialize(const VkImportMemoryHostPointerInfoEXT* in_struct)
-{
- sType = in_struct->sType;
- handleType = in_struct->handleType;
- pHostPointer = in_struct->pHostPointer;
- pNext = SafePnextCopy(in_struct->pNext);
-}
-
-void safe_VkImportMemoryHostPointerInfoEXT::initialize(const safe_VkImportMemoryHostPointerInfoEXT* src)
-{
- sType = src->sType;
- handleType = src->handleType;
- pHostPointer = src->pHostPointer;
- pNext = SafePnextCopy(src->pNext);
-}
-
-safe_VkMemoryHostPointerPropertiesEXT::safe_VkMemoryHostPointerPropertiesEXT(const VkMemoryHostPointerPropertiesEXT* in_struct) :
- sType(in_struct->sType),
- memoryTypeBits(in_struct->memoryTypeBits)
-{
- pNext = SafePnextCopy(in_struct->pNext);
-}
-
-safe_VkMemoryHostPointerPropertiesEXT::safe_VkMemoryHostPointerPropertiesEXT() :
- pNext(nullptr)
-{}
-
-safe_VkMemoryHostPointerPropertiesEXT::safe_VkMemoryHostPointerPropertiesEXT(const safe_VkMemoryHostPointerPropertiesEXT& src)
-{
- sType = src.sType;
- memoryTypeBits = src.memoryTypeBits;
- pNext = SafePnextCopy(src.pNext);
-}
-
-safe_VkMemoryHostPointerPropertiesEXT& safe_VkMemoryHostPointerPropertiesEXT::operator=(const safe_VkMemoryHostPointerPropertiesEXT& src)
-{
- if (&src == this) return *this;
-
- if (pNext)
- FreePnextChain(pNext);
-
- sType = src.sType;
- memoryTypeBits = src.memoryTypeBits;
- pNext = SafePnextCopy(src.pNext);
-
- return *this;
-}
-
-safe_VkMemoryHostPointerPropertiesEXT::~safe_VkMemoryHostPointerPropertiesEXT()
-{
- if (pNext)
- FreePnextChain(pNext);
-}
-
-void safe_VkMemoryHostPointerPropertiesEXT::initialize(const VkMemoryHostPointerPropertiesEXT* in_struct)
-{
- sType = in_struct->sType;
- memoryTypeBits = in_struct->memoryTypeBits;
- pNext = SafePnextCopy(in_struct->pNext);
-}
-
-void safe_VkMemoryHostPointerPropertiesEXT::initialize(const safe_VkMemoryHostPointerPropertiesEXT* src)
-{
- sType = src->sType;
- memoryTypeBits = src->memoryTypeBits;
- pNext = SafePnextCopy(src->pNext);
-}
-
-safe_VkPhysicalDeviceExternalMemoryHostPropertiesEXT::safe_VkPhysicalDeviceExternalMemoryHostPropertiesEXT(const VkPhysicalDeviceExternalMemoryHostPropertiesEXT* in_struct) :
- sType(in_struct->sType),
- minImportedHostPointerAlignment(in_struct->minImportedHostPointerAlignment)
-{
- pNext = SafePnextCopy(in_struct->pNext);
-}
-
-safe_VkPhysicalDeviceExternalMemoryHostPropertiesEXT::safe_VkPhysicalDeviceExternalMemoryHostPropertiesEXT() :
- pNext(nullptr)
-{}
-
-safe_VkPhysicalDeviceExternalMemoryHostPropertiesEXT::safe_VkPhysicalDeviceExternalMemoryHostPropertiesEXT(const safe_VkPhysicalDeviceExternalMemoryHostPropertiesEXT& src)
-{
- sType = src.sType;
- minImportedHostPointerAlignment = src.minImportedHostPointerAlignment;
- pNext = SafePnextCopy(src.pNext);
-}
-
-safe_VkPhysicalDeviceExternalMemoryHostPropertiesEXT& safe_VkPhysicalDeviceExternalMemoryHostPropertiesEXT::operator=(const safe_VkPhysicalDeviceExternalMemoryHostPropertiesEXT& src)
-{
- if (&src == this) return *this;
-
- if (pNext)
- FreePnextChain(pNext);
-
- sType = src.sType;
- minImportedHostPointerAlignment = src.minImportedHostPointerAlignment;
- pNext = SafePnextCopy(src.pNext);
-
- return *this;
-}
-
-safe_VkPhysicalDeviceExternalMemoryHostPropertiesEXT::~safe_VkPhysicalDeviceExternalMemoryHostPropertiesEXT()
-{
- if (pNext)
- FreePnextChain(pNext);
-}
-
-void safe_VkPhysicalDeviceExternalMemoryHostPropertiesEXT::initialize(const VkPhysicalDeviceExternalMemoryHostPropertiesEXT* in_struct)
-{
- sType = in_struct->sType;
- minImportedHostPointerAlignment = in_struct->minImportedHostPointerAlignment;
- pNext = SafePnextCopy(in_struct->pNext);
-}
-
-void safe_VkPhysicalDeviceExternalMemoryHostPropertiesEXT::initialize(const safe_VkPhysicalDeviceExternalMemoryHostPropertiesEXT* src)
-{
- sType = src->sType;
- minImportedHostPointerAlignment = src->minImportedHostPointerAlignment;
- pNext = SafePnextCopy(src->pNext);
-}
-
-safe_VkPipelineCompilerControlCreateInfoAMD::safe_VkPipelineCompilerControlCreateInfoAMD(const VkPipelineCompilerControlCreateInfoAMD* in_struct) :
- sType(in_struct->sType),
- compilerControlFlags(in_struct->compilerControlFlags)
-{
- pNext = SafePnextCopy(in_struct->pNext);
-}
-
-safe_VkPipelineCompilerControlCreateInfoAMD::safe_VkPipelineCompilerControlCreateInfoAMD() :
- pNext(nullptr)
-{}
-
-safe_VkPipelineCompilerControlCreateInfoAMD::safe_VkPipelineCompilerControlCreateInfoAMD(const safe_VkPipelineCompilerControlCreateInfoAMD& src)
-{
- sType = src.sType;
- compilerControlFlags = src.compilerControlFlags;
- pNext = SafePnextCopy(src.pNext);
-}
-
-safe_VkPipelineCompilerControlCreateInfoAMD& safe_VkPipelineCompilerControlCreateInfoAMD::operator=(const safe_VkPipelineCompilerControlCreateInfoAMD& src)
-{
- if (&src == this) return *this;
-
- if (pNext)
- FreePnextChain(pNext);
-
- sType = src.sType;
- compilerControlFlags = src.compilerControlFlags;
- pNext = SafePnextCopy(src.pNext);
-
- return *this;
-}
-
-safe_VkPipelineCompilerControlCreateInfoAMD::~safe_VkPipelineCompilerControlCreateInfoAMD()
-{
- if (pNext)
- FreePnextChain(pNext);
-}
-
-void safe_VkPipelineCompilerControlCreateInfoAMD::initialize(const VkPipelineCompilerControlCreateInfoAMD* in_struct)
-{
- sType = in_struct->sType;
- compilerControlFlags = in_struct->compilerControlFlags;
- pNext = SafePnextCopy(in_struct->pNext);
-}
-
-void safe_VkPipelineCompilerControlCreateInfoAMD::initialize(const safe_VkPipelineCompilerControlCreateInfoAMD* src)
-{
- sType = src->sType;
- compilerControlFlags = src->compilerControlFlags;
- pNext = SafePnextCopy(src->pNext);
-}
-
-safe_VkCalibratedTimestampInfoEXT::safe_VkCalibratedTimestampInfoEXT(const VkCalibratedTimestampInfoEXT* in_struct) :
- sType(in_struct->sType),
- timeDomain(in_struct->timeDomain)
-{
- pNext = SafePnextCopy(in_struct->pNext);
-}
-
-safe_VkCalibratedTimestampInfoEXT::safe_VkCalibratedTimestampInfoEXT() :
- pNext(nullptr)
-{}
-
-safe_VkCalibratedTimestampInfoEXT::safe_VkCalibratedTimestampInfoEXT(const safe_VkCalibratedTimestampInfoEXT& src)
-{
- sType = src.sType;
- timeDomain = src.timeDomain;
- pNext = SafePnextCopy(src.pNext);
-}
-
-safe_VkCalibratedTimestampInfoEXT& safe_VkCalibratedTimestampInfoEXT::operator=(const safe_VkCalibratedTimestampInfoEXT& src)
-{
- if (&src == this) return *this;
-
- if (pNext)
- FreePnextChain(pNext);
-
- sType = src.sType;
- timeDomain = src.timeDomain;
- pNext = SafePnextCopy(src.pNext);
-
- return *this;
-}
-
-safe_VkCalibratedTimestampInfoEXT::~safe_VkCalibratedTimestampInfoEXT()
-{
- if (pNext)
- FreePnextChain(pNext);
-}
-
-void safe_VkCalibratedTimestampInfoEXT::initialize(const VkCalibratedTimestampInfoEXT* in_struct)
-{
- sType = in_struct->sType;
- timeDomain = in_struct->timeDomain;
- pNext = SafePnextCopy(in_struct->pNext);
-}
-
-void safe_VkCalibratedTimestampInfoEXT::initialize(const safe_VkCalibratedTimestampInfoEXT* src)
-{
- sType = src->sType;
- timeDomain = src->timeDomain;
- pNext = SafePnextCopy(src->pNext);
-}
-
-safe_VkPhysicalDeviceShaderCorePropertiesAMD::safe_VkPhysicalDeviceShaderCorePropertiesAMD(const VkPhysicalDeviceShaderCorePropertiesAMD* in_struct) :
- sType(in_struct->sType),
- shaderEngineCount(in_struct->shaderEngineCount),
- shaderArraysPerEngineCount(in_struct->shaderArraysPerEngineCount),
- computeUnitsPerShaderArray(in_struct->computeUnitsPerShaderArray),
- simdPerComputeUnit(in_struct->simdPerComputeUnit),
- wavefrontsPerSimd(in_struct->wavefrontsPerSimd),
- wavefrontSize(in_struct->wavefrontSize),
- sgprsPerSimd(in_struct->sgprsPerSimd),
- minSgprAllocation(in_struct->minSgprAllocation),
- maxSgprAllocation(in_struct->maxSgprAllocation),
- sgprAllocationGranularity(in_struct->sgprAllocationGranularity),
- vgprsPerSimd(in_struct->vgprsPerSimd),
- minVgprAllocation(in_struct->minVgprAllocation),
- maxVgprAllocation(in_struct->maxVgprAllocation),
- vgprAllocationGranularity(in_struct->vgprAllocationGranularity)
-{
- pNext = SafePnextCopy(in_struct->pNext);
-}
-
-safe_VkPhysicalDeviceShaderCorePropertiesAMD::safe_VkPhysicalDeviceShaderCorePropertiesAMD() :
- pNext(nullptr)
-{}
-
-safe_VkPhysicalDeviceShaderCorePropertiesAMD::safe_VkPhysicalDeviceShaderCorePropertiesAMD(const safe_VkPhysicalDeviceShaderCorePropertiesAMD& src)
-{
- sType = src.sType;
- shaderEngineCount = src.shaderEngineCount;
- shaderArraysPerEngineCount = src.shaderArraysPerEngineCount;
- computeUnitsPerShaderArray = src.computeUnitsPerShaderArray;
- simdPerComputeUnit = src.simdPerComputeUnit;
- wavefrontsPerSimd = src.wavefrontsPerSimd;
- wavefrontSize = src.wavefrontSize;
- sgprsPerSimd = src.sgprsPerSimd;
- minSgprAllocation = src.minSgprAllocation;
- maxSgprAllocation = src.maxSgprAllocation;
- sgprAllocationGranularity = src.sgprAllocationGranularity;
- vgprsPerSimd = src.vgprsPerSimd;
- minVgprAllocation = src.minVgprAllocation;
- maxVgprAllocation = src.maxVgprAllocation;
- vgprAllocationGranularity = src.vgprAllocationGranularity;
- pNext = SafePnextCopy(src.pNext);
-}
-
-safe_VkPhysicalDeviceShaderCorePropertiesAMD& safe_VkPhysicalDeviceShaderCorePropertiesAMD::operator=(const safe_VkPhysicalDeviceShaderCorePropertiesAMD& src)
-{
- if (&src == this) return *this;
-
- if (pNext)
- FreePnextChain(pNext);
-
- sType = src.sType;
- shaderEngineCount = src.shaderEngineCount;
- shaderArraysPerEngineCount = src.shaderArraysPerEngineCount;
- computeUnitsPerShaderArray = src.computeUnitsPerShaderArray;
- simdPerComputeUnit = src.simdPerComputeUnit;
- wavefrontsPerSimd = src.wavefrontsPerSimd;
- wavefrontSize = src.wavefrontSize;
- sgprsPerSimd = src.sgprsPerSimd;
- minSgprAllocation = src.minSgprAllocation;
- maxSgprAllocation = src.maxSgprAllocation;
- sgprAllocationGranularity = src.sgprAllocationGranularity;
- vgprsPerSimd = src.vgprsPerSimd;
- minVgprAllocation = src.minVgprAllocation;
- maxVgprAllocation = src.maxVgprAllocation;
- vgprAllocationGranularity = src.vgprAllocationGranularity;
- pNext = SafePnextCopy(src.pNext);
-
- return *this;
-}
-
-safe_VkPhysicalDeviceShaderCorePropertiesAMD::~safe_VkPhysicalDeviceShaderCorePropertiesAMD()
-{
- if (pNext)
- FreePnextChain(pNext);
-}
-
-void safe_VkPhysicalDeviceShaderCorePropertiesAMD::initialize(const VkPhysicalDeviceShaderCorePropertiesAMD* in_struct)
-{
- sType = in_struct->sType;
- shaderEngineCount = in_struct->shaderEngineCount;
- shaderArraysPerEngineCount = in_struct->shaderArraysPerEngineCount;
- computeUnitsPerShaderArray = in_struct->computeUnitsPerShaderArray;
- simdPerComputeUnit = in_struct->simdPerComputeUnit;
- wavefrontsPerSimd = in_struct->wavefrontsPerSimd;
- wavefrontSize = in_struct->wavefrontSize;
- sgprsPerSimd = in_struct->sgprsPerSimd;
- minSgprAllocation = in_struct->minSgprAllocation;
- maxSgprAllocation = in_struct->maxSgprAllocation;
- sgprAllocationGranularity = in_struct->sgprAllocationGranularity;
- vgprsPerSimd = in_struct->vgprsPerSimd;
- minVgprAllocation = in_struct->minVgprAllocation;
- maxVgprAllocation = in_struct->maxVgprAllocation;
- vgprAllocationGranularity = in_struct->vgprAllocationGranularity;
- pNext = SafePnextCopy(in_struct->pNext);
-}
-
-void safe_VkPhysicalDeviceShaderCorePropertiesAMD::initialize(const safe_VkPhysicalDeviceShaderCorePropertiesAMD* src)
-{
- sType = src->sType;
- shaderEngineCount = src->shaderEngineCount;
- shaderArraysPerEngineCount = src->shaderArraysPerEngineCount;
- computeUnitsPerShaderArray = src->computeUnitsPerShaderArray;
- simdPerComputeUnit = src->simdPerComputeUnit;
- wavefrontsPerSimd = src->wavefrontsPerSimd;
- wavefrontSize = src->wavefrontSize;
- sgprsPerSimd = src->sgprsPerSimd;
- minSgprAllocation = src->minSgprAllocation;
- maxSgprAllocation = src->maxSgprAllocation;
- sgprAllocationGranularity = src->sgprAllocationGranularity;
- vgprsPerSimd = src->vgprsPerSimd;
- minVgprAllocation = src->minVgprAllocation;
- maxVgprAllocation = src->maxVgprAllocation;
- vgprAllocationGranularity = src->vgprAllocationGranularity;
- pNext = SafePnextCopy(src->pNext);
-}
-
-safe_VkDeviceMemoryOverallocationCreateInfoAMD::safe_VkDeviceMemoryOverallocationCreateInfoAMD(const VkDeviceMemoryOverallocationCreateInfoAMD* in_struct) :
- sType(in_struct->sType),
- overallocationBehavior(in_struct->overallocationBehavior)
-{
- pNext = SafePnextCopy(in_struct->pNext);
-}
-
-safe_VkDeviceMemoryOverallocationCreateInfoAMD::safe_VkDeviceMemoryOverallocationCreateInfoAMD() :
- pNext(nullptr)
-{}
-
-safe_VkDeviceMemoryOverallocationCreateInfoAMD::safe_VkDeviceMemoryOverallocationCreateInfoAMD(const safe_VkDeviceMemoryOverallocationCreateInfoAMD& src)
-{
- sType = src.sType;
- overallocationBehavior = src.overallocationBehavior;
- pNext = SafePnextCopy(src.pNext);
-}
-
-safe_VkDeviceMemoryOverallocationCreateInfoAMD& safe_VkDeviceMemoryOverallocationCreateInfoAMD::operator=(const safe_VkDeviceMemoryOverallocationCreateInfoAMD& src)
-{
- if (&src == this) return *this;
-
- if (pNext)
- FreePnextChain(pNext);
-
- sType = src.sType;
- overallocationBehavior = src.overallocationBehavior;
- pNext = SafePnextCopy(src.pNext);
-
- return *this;
-}
-
-safe_VkDeviceMemoryOverallocationCreateInfoAMD::~safe_VkDeviceMemoryOverallocationCreateInfoAMD()
-{
- if (pNext)
- FreePnextChain(pNext);
-}
-
-void safe_VkDeviceMemoryOverallocationCreateInfoAMD::initialize(const VkDeviceMemoryOverallocationCreateInfoAMD* in_struct)
-{
- sType = in_struct->sType;
- overallocationBehavior = in_struct->overallocationBehavior;
- pNext = SafePnextCopy(in_struct->pNext);
-}
-
-void safe_VkDeviceMemoryOverallocationCreateInfoAMD::initialize(const safe_VkDeviceMemoryOverallocationCreateInfoAMD* src)
-{
- sType = src->sType;
- overallocationBehavior = src->overallocationBehavior;
- pNext = SafePnextCopy(src->pNext);
-}
-
-safe_VkPhysicalDeviceVertexAttributeDivisorPropertiesEXT::safe_VkPhysicalDeviceVertexAttributeDivisorPropertiesEXT(const VkPhysicalDeviceVertexAttributeDivisorPropertiesEXT* in_struct) :
- sType(in_struct->sType),
- maxVertexAttribDivisor(in_struct->maxVertexAttribDivisor)
-{
- pNext = SafePnextCopy(in_struct->pNext);
-}
-
-safe_VkPhysicalDeviceVertexAttributeDivisorPropertiesEXT::safe_VkPhysicalDeviceVertexAttributeDivisorPropertiesEXT() :
- pNext(nullptr)
-{}
-
-safe_VkPhysicalDeviceVertexAttributeDivisorPropertiesEXT::safe_VkPhysicalDeviceVertexAttributeDivisorPropertiesEXT(const safe_VkPhysicalDeviceVertexAttributeDivisorPropertiesEXT& src)
-{
- sType = src.sType;
- maxVertexAttribDivisor = src.maxVertexAttribDivisor;
- pNext = SafePnextCopy(src.pNext);
-}
-
-safe_VkPhysicalDeviceVertexAttributeDivisorPropertiesEXT& safe_VkPhysicalDeviceVertexAttributeDivisorPropertiesEXT::operator=(const safe_VkPhysicalDeviceVertexAttributeDivisorPropertiesEXT& src)
-{
- if (&src == this) return *this;
-
- if (pNext)
- FreePnextChain(pNext);
-
- sType = src.sType;
- maxVertexAttribDivisor = src.maxVertexAttribDivisor;
- pNext = SafePnextCopy(src.pNext);
-
- return *this;
-}
-
-safe_VkPhysicalDeviceVertexAttributeDivisorPropertiesEXT::~safe_VkPhysicalDeviceVertexAttributeDivisorPropertiesEXT()
-{
- if (pNext)
- FreePnextChain(pNext);
-}
-
-void safe_VkPhysicalDeviceVertexAttributeDivisorPropertiesEXT::initialize(const VkPhysicalDeviceVertexAttributeDivisorPropertiesEXT* in_struct)
-{
- sType = in_struct->sType;
- maxVertexAttribDivisor = in_struct->maxVertexAttribDivisor;
- pNext = SafePnextCopy(in_struct->pNext);
-}
-
-void safe_VkPhysicalDeviceVertexAttributeDivisorPropertiesEXT::initialize(const safe_VkPhysicalDeviceVertexAttributeDivisorPropertiesEXT* src)
-{
- sType = src->sType;
- maxVertexAttribDivisor = src->maxVertexAttribDivisor;
- pNext = SafePnextCopy(src->pNext);
-}
-
-safe_VkPipelineVertexInputDivisorStateCreateInfoEXT::safe_VkPipelineVertexInputDivisorStateCreateInfoEXT(const VkPipelineVertexInputDivisorStateCreateInfoEXT* in_struct) :
- sType(in_struct->sType),
- vertexBindingDivisorCount(in_struct->vertexBindingDivisorCount),
- pVertexBindingDivisors(nullptr)
-{
- pNext = SafePnextCopy(in_struct->pNext);
- if (in_struct->pVertexBindingDivisors) {
- pVertexBindingDivisors = new VkVertexInputBindingDivisorDescriptionEXT[in_struct->vertexBindingDivisorCount];
- memcpy ((void *)pVertexBindingDivisors, (void *)in_struct->pVertexBindingDivisors, sizeof(VkVertexInputBindingDivisorDescriptionEXT)*in_struct->vertexBindingDivisorCount);
- }
-}
-
-safe_VkPipelineVertexInputDivisorStateCreateInfoEXT::safe_VkPipelineVertexInputDivisorStateCreateInfoEXT() :
- pNext(nullptr),
- pVertexBindingDivisors(nullptr)
-{}
-
-safe_VkPipelineVertexInputDivisorStateCreateInfoEXT::safe_VkPipelineVertexInputDivisorStateCreateInfoEXT(const safe_VkPipelineVertexInputDivisorStateCreateInfoEXT& src)
-{
- sType = src.sType;
- vertexBindingDivisorCount = src.vertexBindingDivisorCount;
- pVertexBindingDivisors = nullptr;
- pNext = SafePnextCopy(src.pNext);
- if (src.pVertexBindingDivisors) {
- pVertexBindingDivisors = new VkVertexInputBindingDivisorDescriptionEXT[src.vertexBindingDivisorCount];
- memcpy ((void *)pVertexBindingDivisors, (void *)src.pVertexBindingDivisors, sizeof(VkVertexInputBindingDivisorDescriptionEXT)*src.vertexBindingDivisorCount);
- }
-}
-
-safe_VkPipelineVertexInputDivisorStateCreateInfoEXT& safe_VkPipelineVertexInputDivisorStateCreateInfoEXT::operator=(const safe_VkPipelineVertexInputDivisorStateCreateInfoEXT& src)
-{
- if (&src == this) return *this;
-
- if (pVertexBindingDivisors)
- delete[] pVertexBindingDivisors;
- if (pNext)
- FreePnextChain(pNext);
-
- sType = src.sType;
- vertexBindingDivisorCount = src.vertexBindingDivisorCount;
- pVertexBindingDivisors = nullptr;
- pNext = SafePnextCopy(src.pNext);
- if (src.pVertexBindingDivisors) {
- pVertexBindingDivisors = new VkVertexInputBindingDivisorDescriptionEXT[src.vertexBindingDivisorCount];
- memcpy ((void *)pVertexBindingDivisors, (void *)src.pVertexBindingDivisors, sizeof(VkVertexInputBindingDivisorDescriptionEXT)*src.vertexBindingDivisorCount);
- }
-
- return *this;
-}
-
-safe_VkPipelineVertexInputDivisorStateCreateInfoEXT::~safe_VkPipelineVertexInputDivisorStateCreateInfoEXT()
-{
- if (pVertexBindingDivisors)
- delete[] pVertexBindingDivisors;
- if (pNext)
- FreePnextChain(pNext);
-}
-
-void safe_VkPipelineVertexInputDivisorStateCreateInfoEXT::initialize(const VkPipelineVertexInputDivisorStateCreateInfoEXT* in_struct)
-{
- sType = in_struct->sType;
- vertexBindingDivisorCount = in_struct->vertexBindingDivisorCount;
- pVertexBindingDivisors = nullptr;
- pNext = SafePnextCopy(in_struct->pNext);
- if (in_struct->pVertexBindingDivisors) {
- pVertexBindingDivisors = new VkVertexInputBindingDivisorDescriptionEXT[in_struct->vertexBindingDivisorCount];
- memcpy ((void *)pVertexBindingDivisors, (void *)in_struct->pVertexBindingDivisors, sizeof(VkVertexInputBindingDivisorDescriptionEXT)*in_struct->vertexBindingDivisorCount);
- }
-}
-
-void safe_VkPipelineVertexInputDivisorStateCreateInfoEXT::initialize(const safe_VkPipelineVertexInputDivisorStateCreateInfoEXT* src)
-{
- sType = src->sType;
- vertexBindingDivisorCount = src->vertexBindingDivisorCount;
- pVertexBindingDivisors = nullptr;
- pNext = SafePnextCopy(src->pNext);
- if (src->pVertexBindingDivisors) {
- pVertexBindingDivisors = new VkVertexInputBindingDivisorDescriptionEXT[src->vertexBindingDivisorCount];
- memcpy ((void *)pVertexBindingDivisors, (void *)src->pVertexBindingDivisors, sizeof(VkVertexInputBindingDivisorDescriptionEXT)*src->vertexBindingDivisorCount);
- }
-}
-
-safe_VkPhysicalDeviceVertexAttributeDivisorFeaturesEXT::safe_VkPhysicalDeviceVertexAttributeDivisorFeaturesEXT(const VkPhysicalDeviceVertexAttributeDivisorFeaturesEXT* in_struct) :
- sType(in_struct->sType),
- vertexAttributeInstanceRateDivisor(in_struct->vertexAttributeInstanceRateDivisor),
- vertexAttributeInstanceRateZeroDivisor(in_struct->vertexAttributeInstanceRateZeroDivisor)
-{
- pNext = SafePnextCopy(in_struct->pNext);
-}
-
-safe_VkPhysicalDeviceVertexAttributeDivisorFeaturesEXT::safe_VkPhysicalDeviceVertexAttributeDivisorFeaturesEXT() :
- pNext(nullptr)
-{}
-
-safe_VkPhysicalDeviceVertexAttributeDivisorFeaturesEXT::safe_VkPhysicalDeviceVertexAttributeDivisorFeaturesEXT(const safe_VkPhysicalDeviceVertexAttributeDivisorFeaturesEXT& src)
-{
- sType = src.sType;
- vertexAttributeInstanceRateDivisor = src.vertexAttributeInstanceRateDivisor;
- vertexAttributeInstanceRateZeroDivisor = src.vertexAttributeInstanceRateZeroDivisor;
- pNext = SafePnextCopy(src.pNext);
-}
-
-safe_VkPhysicalDeviceVertexAttributeDivisorFeaturesEXT& safe_VkPhysicalDeviceVertexAttributeDivisorFeaturesEXT::operator=(const safe_VkPhysicalDeviceVertexAttributeDivisorFeaturesEXT& src)
-{
- if (&src == this) return *this;
-
- if (pNext)
- FreePnextChain(pNext);
-
- sType = src.sType;
- vertexAttributeInstanceRateDivisor = src.vertexAttributeInstanceRateDivisor;
- vertexAttributeInstanceRateZeroDivisor = src.vertexAttributeInstanceRateZeroDivisor;
- pNext = SafePnextCopy(src.pNext);
-
- return *this;
-}
-
-safe_VkPhysicalDeviceVertexAttributeDivisorFeaturesEXT::~safe_VkPhysicalDeviceVertexAttributeDivisorFeaturesEXT()
-{
- if (pNext)
- FreePnextChain(pNext);
-}
-
-void safe_VkPhysicalDeviceVertexAttributeDivisorFeaturesEXT::initialize(const VkPhysicalDeviceVertexAttributeDivisorFeaturesEXT* in_struct)
-{
- sType = in_struct->sType;
- vertexAttributeInstanceRateDivisor = in_struct->vertexAttributeInstanceRateDivisor;
- vertexAttributeInstanceRateZeroDivisor = in_struct->vertexAttributeInstanceRateZeroDivisor;
- pNext = SafePnextCopy(in_struct->pNext);
-}
-
-void safe_VkPhysicalDeviceVertexAttributeDivisorFeaturesEXT::initialize(const safe_VkPhysicalDeviceVertexAttributeDivisorFeaturesEXT* src)
-{
- sType = src->sType;
- vertexAttributeInstanceRateDivisor = src->vertexAttributeInstanceRateDivisor;
- vertexAttributeInstanceRateZeroDivisor = src->vertexAttributeInstanceRateZeroDivisor;
- pNext = SafePnextCopy(src->pNext);
-}
-#ifdef VK_USE_PLATFORM_GGP
-
-
-safe_VkPresentFrameTokenGGP::safe_VkPresentFrameTokenGGP(const VkPresentFrameTokenGGP* in_struct) :
- sType(in_struct->sType),
- frameToken(in_struct->frameToken)
-{
- pNext = SafePnextCopy(in_struct->pNext);
-}
-
-safe_VkPresentFrameTokenGGP::safe_VkPresentFrameTokenGGP() :
- pNext(nullptr)
-{}
-
-safe_VkPresentFrameTokenGGP::safe_VkPresentFrameTokenGGP(const safe_VkPresentFrameTokenGGP& src)
-{
- sType = src.sType;
- frameToken = src.frameToken;
- pNext = SafePnextCopy(src.pNext);
-}
-
-safe_VkPresentFrameTokenGGP& safe_VkPresentFrameTokenGGP::operator=(const safe_VkPresentFrameTokenGGP& src)
-{
- if (&src == this) return *this;
-
- if (pNext)
- FreePnextChain(pNext);
-
- sType = src.sType;
- frameToken = src.frameToken;
- pNext = SafePnextCopy(src.pNext);
-
- return *this;
-}
-
-safe_VkPresentFrameTokenGGP::~safe_VkPresentFrameTokenGGP()
-{
- if (pNext)
- FreePnextChain(pNext);
-}
-
-void safe_VkPresentFrameTokenGGP::initialize(const VkPresentFrameTokenGGP* in_struct)
-{
- sType = in_struct->sType;
- frameToken = in_struct->frameToken;
- pNext = SafePnextCopy(in_struct->pNext);
-}
-
-void safe_VkPresentFrameTokenGGP::initialize(const safe_VkPresentFrameTokenGGP* src)
-{
- sType = src->sType;
- frameToken = src->frameToken;
- pNext = SafePnextCopy(src->pNext);
-}
-#endif // VK_USE_PLATFORM_GGP
-
-
-safe_VkPipelineCreationFeedbackCreateInfoEXT::safe_VkPipelineCreationFeedbackCreateInfoEXT(const VkPipelineCreationFeedbackCreateInfoEXT* in_struct) :
- sType(in_struct->sType),
- pPipelineCreationFeedback(nullptr),
- pipelineStageCreationFeedbackCount(in_struct->pipelineStageCreationFeedbackCount),
- pPipelineStageCreationFeedbacks(nullptr)
-{
- pNext = SafePnextCopy(in_struct->pNext);
- if (in_struct->pPipelineCreationFeedback) {
- pPipelineCreationFeedback = new VkPipelineCreationFeedbackEXT(*in_struct->pPipelineCreationFeedback);
- }
- if (in_struct->pPipelineStageCreationFeedbacks) {
- pPipelineStageCreationFeedbacks = new VkPipelineCreationFeedbackEXT[in_struct->pipelineStageCreationFeedbackCount];
- memcpy ((void *)pPipelineStageCreationFeedbacks, (void *)in_struct->pPipelineStageCreationFeedbacks, sizeof(VkPipelineCreationFeedbackEXT)*in_struct->pipelineStageCreationFeedbackCount);
- }
-}
-
-safe_VkPipelineCreationFeedbackCreateInfoEXT::safe_VkPipelineCreationFeedbackCreateInfoEXT() :
- pNext(nullptr),
- pPipelineCreationFeedback(nullptr),
- pPipelineStageCreationFeedbacks(nullptr)
-{}
-
-safe_VkPipelineCreationFeedbackCreateInfoEXT::safe_VkPipelineCreationFeedbackCreateInfoEXT(const safe_VkPipelineCreationFeedbackCreateInfoEXT& src)
-{
- sType = src.sType;
- pPipelineCreationFeedback = nullptr;
- pipelineStageCreationFeedbackCount = src.pipelineStageCreationFeedbackCount;
- pPipelineStageCreationFeedbacks = nullptr;
- pNext = SafePnextCopy(src.pNext);
- if (src.pPipelineCreationFeedback) {
- pPipelineCreationFeedback = new VkPipelineCreationFeedbackEXT(*src.pPipelineCreationFeedback);
- }
- if (src.pPipelineStageCreationFeedbacks) {
- pPipelineStageCreationFeedbacks = new VkPipelineCreationFeedbackEXT[src.pipelineStageCreationFeedbackCount];
- memcpy ((void *)pPipelineStageCreationFeedbacks, (void *)src.pPipelineStageCreationFeedbacks, sizeof(VkPipelineCreationFeedbackEXT)*src.pipelineStageCreationFeedbackCount);
- }
-}
-
-safe_VkPipelineCreationFeedbackCreateInfoEXT& safe_VkPipelineCreationFeedbackCreateInfoEXT::operator=(const safe_VkPipelineCreationFeedbackCreateInfoEXT& src)
-{
- if (&src == this) return *this;
-
- if (pPipelineCreationFeedback)
- delete pPipelineCreationFeedback;
- if (pPipelineStageCreationFeedbacks)
- delete[] pPipelineStageCreationFeedbacks;
- if (pNext)
- FreePnextChain(pNext);
-
- sType = src.sType;
- pPipelineCreationFeedback = nullptr;
- pipelineStageCreationFeedbackCount = src.pipelineStageCreationFeedbackCount;
- pPipelineStageCreationFeedbacks = nullptr;
- pNext = SafePnextCopy(src.pNext);
- if (src.pPipelineCreationFeedback) {
- pPipelineCreationFeedback = new VkPipelineCreationFeedbackEXT(*src.pPipelineCreationFeedback);
- }
- if (src.pPipelineStageCreationFeedbacks) {
- pPipelineStageCreationFeedbacks = new VkPipelineCreationFeedbackEXT[src.pipelineStageCreationFeedbackCount];
- memcpy ((void *)pPipelineStageCreationFeedbacks, (void *)src.pPipelineStageCreationFeedbacks, sizeof(VkPipelineCreationFeedbackEXT)*src.pipelineStageCreationFeedbackCount);
- }
-
- return *this;
-}
-
-safe_VkPipelineCreationFeedbackCreateInfoEXT::~safe_VkPipelineCreationFeedbackCreateInfoEXT()
-{
- if (pPipelineCreationFeedback)
- delete pPipelineCreationFeedback;
- if (pPipelineStageCreationFeedbacks)
- delete[] pPipelineStageCreationFeedbacks;
- if (pNext)
- FreePnextChain(pNext);
-}
-
-void safe_VkPipelineCreationFeedbackCreateInfoEXT::initialize(const VkPipelineCreationFeedbackCreateInfoEXT* in_struct)
-{
- sType = in_struct->sType;
- pPipelineCreationFeedback = nullptr;
- pipelineStageCreationFeedbackCount = in_struct->pipelineStageCreationFeedbackCount;
- pPipelineStageCreationFeedbacks = nullptr;
- pNext = SafePnextCopy(in_struct->pNext);
- if (in_struct->pPipelineCreationFeedback) {
- pPipelineCreationFeedback = new VkPipelineCreationFeedbackEXT(*in_struct->pPipelineCreationFeedback);
- }
- if (in_struct->pPipelineStageCreationFeedbacks) {
- pPipelineStageCreationFeedbacks = new VkPipelineCreationFeedbackEXT[in_struct->pipelineStageCreationFeedbackCount];
- memcpy ((void *)pPipelineStageCreationFeedbacks, (void *)in_struct->pPipelineStageCreationFeedbacks, sizeof(VkPipelineCreationFeedbackEXT)*in_struct->pipelineStageCreationFeedbackCount);
- }
-}
-
-void safe_VkPipelineCreationFeedbackCreateInfoEXT::initialize(const safe_VkPipelineCreationFeedbackCreateInfoEXT* src)
-{
- sType = src->sType;
- pPipelineCreationFeedback = nullptr;
- pipelineStageCreationFeedbackCount = src->pipelineStageCreationFeedbackCount;
- pPipelineStageCreationFeedbacks = nullptr;
- pNext = SafePnextCopy(src->pNext);
- if (src->pPipelineCreationFeedback) {
- pPipelineCreationFeedback = new VkPipelineCreationFeedbackEXT(*src->pPipelineCreationFeedback);
- }
- if (src->pPipelineStageCreationFeedbacks) {
- pPipelineStageCreationFeedbacks = new VkPipelineCreationFeedbackEXT[src->pipelineStageCreationFeedbackCount];
- memcpy ((void *)pPipelineStageCreationFeedbacks, (void *)src->pPipelineStageCreationFeedbacks, sizeof(VkPipelineCreationFeedbackEXT)*src->pipelineStageCreationFeedbackCount);
- }
-}
-
-safe_VkPhysicalDeviceComputeShaderDerivativesFeaturesNV::safe_VkPhysicalDeviceComputeShaderDerivativesFeaturesNV(const VkPhysicalDeviceComputeShaderDerivativesFeaturesNV* in_struct) :
- sType(in_struct->sType),
- computeDerivativeGroupQuads(in_struct->computeDerivativeGroupQuads),
- computeDerivativeGroupLinear(in_struct->computeDerivativeGroupLinear)
-{
- pNext = SafePnextCopy(in_struct->pNext);
-}
-
-safe_VkPhysicalDeviceComputeShaderDerivativesFeaturesNV::safe_VkPhysicalDeviceComputeShaderDerivativesFeaturesNV() :
- pNext(nullptr)
-{}
-
-safe_VkPhysicalDeviceComputeShaderDerivativesFeaturesNV::safe_VkPhysicalDeviceComputeShaderDerivativesFeaturesNV(const safe_VkPhysicalDeviceComputeShaderDerivativesFeaturesNV& src)
-{
- sType = src.sType;
- computeDerivativeGroupQuads = src.computeDerivativeGroupQuads;
- computeDerivativeGroupLinear = src.computeDerivativeGroupLinear;
- pNext = SafePnextCopy(src.pNext);
-}
-
-safe_VkPhysicalDeviceComputeShaderDerivativesFeaturesNV& safe_VkPhysicalDeviceComputeShaderDerivativesFeaturesNV::operator=(const safe_VkPhysicalDeviceComputeShaderDerivativesFeaturesNV& src)
-{
- if (&src == this) return *this;
-
- if (pNext)
- FreePnextChain(pNext);
-
- sType = src.sType;
- computeDerivativeGroupQuads = src.computeDerivativeGroupQuads;
- computeDerivativeGroupLinear = src.computeDerivativeGroupLinear;
- pNext = SafePnextCopy(src.pNext);
-
- return *this;
-}
-
-safe_VkPhysicalDeviceComputeShaderDerivativesFeaturesNV::~safe_VkPhysicalDeviceComputeShaderDerivativesFeaturesNV()
-{
- if (pNext)
- FreePnextChain(pNext);
-}
-
-void safe_VkPhysicalDeviceComputeShaderDerivativesFeaturesNV::initialize(const VkPhysicalDeviceComputeShaderDerivativesFeaturesNV* in_struct)
-{
- sType = in_struct->sType;
- computeDerivativeGroupQuads = in_struct->computeDerivativeGroupQuads;
- computeDerivativeGroupLinear = in_struct->computeDerivativeGroupLinear;
- pNext = SafePnextCopy(in_struct->pNext);
-}
-
-void safe_VkPhysicalDeviceComputeShaderDerivativesFeaturesNV::initialize(const safe_VkPhysicalDeviceComputeShaderDerivativesFeaturesNV* src)
-{
- sType = src->sType;
- computeDerivativeGroupQuads = src->computeDerivativeGroupQuads;
- computeDerivativeGroupLinear = src->computeDerivativeGroupLinear;
- pNext = SafePnextCopy(src->pNext);
-}
-
-safe_VkPhysicalDeviceMeshShaderFeaturesNV::safe_VkPhysicalDeviceMeshShaderFeaturesNV(const VkPhysicalDeviceMeshShaderFeaturesNV* in_struct) :
- sType(in_struct->sType),
- taskShader(in_struct->taskShader),
- meshShader(in_struct->meshShader)
-{
- pNext = SafePnextCopy(in_struct->pNext);
-}
-
-safe_VkPhysicalDeviceMeshShaderFeaturesNV::safe_VkPhysicalDeviceMeshShaderFeaturesNV() :
- pNext(nullptr)
-{}
-
-safe_VkPhysicalDeviceMeshShaderFeaturesNV::safe_VkPhysicalDeviceMeshShaderFeaturesNV(const safe_VkPhysicalDeviceMeshShaderFeaturesNV& src)
-{
- sType = src.sType;
- taskShader = src.taskShader;
- meshShader = src.meshShader;
- pNext = SafePnextCopy(src.pNext);
-}
-
-safe_VkPhysicalDeviceMeshShaderFeaturesNV& safe_VkPhysicalDeviceMeshShaderFeaturesNV::operator=(const safe_VkPhysicalDeviceMeshShaderFeaturesNV& src)
-{
- if (&src == this) return *this;
-
- if (pNext)
- FreePnextChain(pNext);
-
- sType = src.sType;
- taskShader = src.taskShader;
- meshShader = src.meshShader;
- pNext = SafePnextCopy(src.pNext);
-
- return *this;
-}
-
-safe_VkPhysicalDeviceMeshShaderFeaturesNV::~safe_VkPhysicalDeviceMeshShaderFeaturesNV()
-{
- if (pNext)
- FreePnextChain(pNext);
-}
-
-void safe_VkPhysicalDeviceMeshShaderFeaturesNV::initialize(const VkPhysicalDeviceMeshShaderFeaturesNV* in_struct)
-{
- sType = in_struct->sType;
- taskShader = in_struct->taskShader;
- meshShader = in_struct->meshShader;
- pNext = SafePnextCopy(in_struct->pNext);
-}
-
-void safe_VkPhysicalDeviceMeshShaderFeaturesNV::initialize(const safe_VkPhysicalDeviceMeshShaderFeaturesNV* src)
-{
- sType = src->sType;
- taskShader = src->taskShader;
- meshShader = src->meshShader;
- pNext = SafePnextCopy(src->pNext);
-}
-
-safe_VkPhysicalDeviceMeshShaderPropertiesNV::safe_VkPhysicalDeviceMeshShaderPropertiesNV(const VkPhysicalDeviceMeshShaderPropertiesNV* in_struct) :
- sType(in_struct->sType),
- maxDrawMeshTasksCount(in_struct->maxDrawMeshTasksCount),
- maxTaskWorkGroupInvocations(in_struct->maxTaskWorkGroupInvocations),
- maxTaskTotalMemorySize(in_struct->maxTaskTotalMemorySize),
- maxTaskOutputCount(in_struct->maxTaskOutputCount),
- maxMeshWorkGroupInvocations(in_struct->maxMeshWorkGroupInvocations),
- maxMeshTotalMemorySize(in_struct->maxMeshTotalMemorySize),
- maxMeshOutputVertices(in_struct->maxMeshOutputVertices),
- maxMeshOutputPrimitives(in_struct->maxMeshOutputPrimitives),
- maxMeshMultiviewViewCount(in_struct->maxMeshMultiviewViewCount),
- meshOutputPerVertexGranularity(in_struct->meshOutputPerVertexGranularity),
- meshOutputPerPrimitiveGranularity(in_struct->meshOutputPerPrimitiveGranularity)
-{
- pNext = SafePnextCopy(in_struct->pNext);
- for (uint32_t i = 0; i < 3; ++i) {
- maxTaskWorkGroupSize[i] = in_struct->maxTaskWorkGroupSize[i];
- }
- for (uint32_t i = 0; i < 3; ++i) {
- maxMeshWorkGroupSize[i] = in_struct->maxMeshWorkGroupSize[i];
- }
-}
-
-safe_VkPhysicalDeviceMeshShaderPropertiesNV::safe_VkPhysicalDeviceMeshShaderPropertiesNV() :
- pNext(nullptr)
-{}
-
-safe_VkPhysicalDeviceMeshShaderPropertiesNV::safe_VkPhysicalDeviceMeshShaderPropertiesNV(const safe_VkPhysicalDeviceMeshShaderPropertiesNV& src)
-{
- sType = src.sType;
- maxDrawMeshTasksCount = src.maxDrawMeshTasksCount;
- maxTaskWorkGroupInvocations = src.maxTaskWorkGroupInvocations;
- maxTaskTotalMemorySize = src.maxTaskTotalMemorySize;
- maxTaskOutputCount = src.maxTaskOutputCount;
- maxMeshWorkGroupInvocations = src.maxMeshWorkGroupInvocations;
- maxMeshTotalMemorySize = src.maxMeshTotalMemorySize;
- maxMeshOutputVertices = src.maxMeshOutputVertices;
- maxMeshOutputPrimitives = src.maxMeshOutputPrimitives;
- maxMeshMultiviewViewCount = src.maxMeshMultiviewViewCount;
- meshOutputPerVertexGranularity = src.meshOutputPerVertexGranularity;
- meshOutputPerPrimitiveGranularity = src.meshOutputPerPrimitiveGranularity;
- pNext = SafePnextCopy(src.pNext);
- for (uint32_t i = 0; i < 3; ++i) {
- maxTaskWorkGroupSize[i] = src.maxTaskWorkGroupSize[i];
- }
- for (uint32_t i = 0; i < 3; ++i) {
- maxMeshWorkGroupSize[i] = src.maxMeshWorkGroupSize[i];
- }
-}
-
-safe_VkPhysicalDeviceMeshShaderPropertiesNV& safe_VkPhysicalDeviceMeshShaderPropertiesNV::operator=(const safe_VkPhysicalDeviceMeshShaderPropertiesNV& src)
-{
- if (&src == this) return *this;
-
- if (pNext)
- FreePnextChain(pNext);
-
- sType = src.sType;
- maxDrawMeshTasksCount = src.maxDrawMeshTasksCount;
- maxTaskWorkGroupInvocations = src.maxTaskWorkGroupInvocations;
- maxTaskTotalMemorySize = src.maxTaskTotalMemorySize;
- maxTaskOutputCount = src.maxTaskOutputCount;
- maxMeshWorkGroupInvocations = src.maxMeshWorkGroupInvocations;
- maxMeshTotalMemorySize = src.maxMeshTotalMemorySize;
- maxMeshOutputVertices = src.maxMeshOutputVertices;
- maxMeshOutputPrimitives = src.maxMeshOutputPrimitives;
- maxMeshMultiviewViewCount = src.maxMeshMultiviewViewCount;
- meshOutputPerVertexGranularity = src.meshOutputPerVertexGranularity;
- meshOutputPerPrimitiveGranularity = src.meshOutputPerPrimitiveGranularity;
- pNext = SafePnextCopy(src.pNext);
- for (uint32_t i = 0; i < 3; ++i) {
- maxTaskWorkGroupSize[i] = src.maxTaskWorkGroupSize[i];
- }
- for (uint32_t i = 0; i < 3; ++i) {
- maxMeshWorkGroupSize[i] = src.maxMeshWorkGroupSize[i];
- }
-
- return *this;
-}
-
-safe_VkPhysicalDeviceMeshShaderPropertiesNV::~safe_VkPhysicalDeviceMeshShaderPropertiesNV()
-{
- if (pNext)
- FreePnextChain(pNext);
-}
-
-void safe_VkPhysicalDeviceMeshShaderPropertiesNV::initialize(const VkPhysicalDeviceMeshShaderPropertiesNV* in_struct)
-{
- sType = in_struct->sType;
- maxDrawMeshTasksCount = in_struct->maxDrawMeshTasksCount;
- maxTaskWorkGroupInvocations = in_struct->maxTaskWorkGroupInvocations;
- maxTaskTotalMemorySize = in_struct->maxTaskTotalMemorySize;
- maxTaskOutputCount = in_struct->maxTaskOutputCount;
- maxMeshWorkGroupInvocations = in_struct->maxMeshWorkGroupInvocations;
- maxMeshTotalMemorySize = in_struct->maxMeshTotalMemorySize;
- maxMeshOutputVertices = in_struct->maxMeshOutputVertices;
- maxMeshOutputPrimitives = in_struct->maxMeshOutputPrimitives;
- maxMeshMultiviewViewCount = in_struct->maxMeshMultiviewViewCount;
- meshOutputPerVertexGranularity = in_struct->meshOutputPerVertexGranularity;
- meshOutputPerPrimitiveGranularity = in_struct->meshOutputPerPrimitiveGranularity;
- pNext = SafePnextCopy(in_struct->pNext);
- for (uint32_t i = 0; i < 3; ++i) {
- maxTaskWorkGroupSize[i] = in_struct->maxTaskWorkGroupSize[i];
- }
- for (uint32_t i = 0; i < 3; ++i) {
- maxMeshWorkGroupSize[i] = in_struct->maxMeshWorkGroupSize[i];
- }
-}
-
-void safe_VkPhysicalDeviceMeshShaderPropertiesNV::initialize(const safe_VkPhysicalDeviceMeshShaderPropertiesNV* src)
-{
- sType = src->sType;
- maxDrawMeshTasksCount = src->maxDrawMeshTasksCount;
- maxTaskWorkGroupInvocations = src->maxTaskWorkGroupInvocations;
- maxTaskTotalMemorySize = src->maxTaskTotalMemorySize;
- maxTaskOutputCount = src->maxTaskOutputCount;
- maxMeshWorkGroupInvocations = src->maxMeshWorkGroupInvocations;
- maxMeshTotalMemorySize = src->maxMeshTotalMemorySize;
- maxMeshOutputVertices = src->maxMeshOutputVertices;
- maxMeshOutputPrimitives = src->maxMeshOutputPrimitives;
- maxMeshMultiviewViewCount = src->maxMeshMultiviewViewCount;
- meshOutputPerVertexGranularity = src->meshOutputPerVertexGranularity;
- meshOutputPerPrimitiveGranularity = src->meshOutputPerPrimitiveGranularity;
- pNext = SafePnextCopy(src->pNext);
- for (uint32_t i = 0; i < 3; ++i) {
- maxTaskWorkGroupSize[i] = src->maxTaskWorkGroupSize[i];
- }
- for (uint32_t i = 0; i < 3; ++i) {
- maxMeshWorkGroupSize[i] = src->maxMeshWorkGroupSize[i];
- }
-}
-
-safe_VkPhysicalDeviceFragmentShaderBarycentricFeaturesNV::safe_VkPhysicalDeviceFragmentShaderBarycentricFeaturesNV(const VkPhysicalDeviceFragmentShaderBarycentricFeaturesNV* in_struct) :
- sType(in_struct->sType),
- fragmentShaderBarycentric(in_struct->fragmentShaderBarycentric)
-{
- pNext = SafePnextCopy(in_struct->pNext);
-}
-
-safe_VkPhysicalDeviceFragmentShaderBarycentricFeaturesNV::safe_VkPhysicalDeviceFragmentShaderBarycentricFeaturesNV() :
- pNext(nullptr)
-{}
-
-safe_VkPhysicalDeviceFragmentShaderBarycentricFeaturesNV::safe_VkPhysicalDeviceFragmentShaderBarycentricFeaturesNV(const safe_VkPhysicalDeviceFragmentShaderBarycentricFeaturesNV& src)
-{
- sType = src.sType;
- fragmentShaderBarycentric = src.fragmentShaderBarycentric;
- pNext = SafePnextCopy(src.pNext);
-}
-
-safe_VkPhysicalDeviceFragmentShaderBarycentricFeaturesNV& safe_VkPhysicalDeviceFragmentShaderBarycentricFeaturesNV::operator=(const safe_VkPhysicalDeviceFragmentShaderBarycentricFeaturesNV& src)
-{
- if (&src == this) return *this;
-
- if (pNext)
- FreePnextChain(pNext);
-
- sType = src.sType;
- fragmentShaderBarycentric = src.fragmentShaderBarycentric;
- pNext = SafePnextCopy(src.pNext);
-
- return *this;
-}
-
-safe_VkPhysicalDeviceFragmentShaderBarycentricFeaturesNV::~safe_VkPhysicalDeviceFragmentShaderBarycentricFeaturesNV()
-{
- if (pNext)
- FreePnextChain(pNext);
-}
-
-void safe_VkPhysicalDeviceFragmentShaderBarycentricFeaturesNV::initialize(const VkPhysicalDeviceFragmentShaderBarycentricFeaturesNV* in_struct)
-{
- sType = in_struct->sType;
- fragmentShaderBarycentric = in_struct->fragmentShaderBarycentric;
- pNext = SafePnextCopy(in_struct->pNext);
-}
-
-void safe_VkPhysicalDeviceFragmentShaderBarycentricFeaturesNV::initialize(const safe_VkPhysicalDeviceFragmentShaderBarycentricFeaturesNV* src)
-{
- sType = src->sType;
- fragmentShaderBarycentric = src->fragmentShaderBarycentric;
- pNext = SafePnextCopy(src->pNext);
-}
-
-safe_VkPhysicalDeviceShaderImageFootprintFeaturesNV::safe_VkPhysicalDeviceShaderImageFootprintFeaturesNV(const VkPhysicalDeviceShaderImageFootprintFeaturesNV* in_struct) :
- sType(in_struct->sType),
- imageFootprint(in_struct->imageFootprint)
-{
- pNext = SafePnextCopy(in_struct->pNext);
-}
-
-safe_VkPhysicalDeviceShaderImageFootprintFeaturesNV::safe_VkPhysicalDeviceShaderImageFootprintFeaturesNV() :
- pNext(nullptr)
-{}
-
-safe_VkPhysicalDeviceShaderImageFootprintFeaturesNV::safe_VkPhysicalDeviceShaderImageFootprintFeaturesNV(const safe_VkPhysicalDeviceShaderImageFootprintFeaturesNV& src)
-{
- sType = src.sType;
- imageFootprint = src.imageFootprint;
- pNext = SafePnextCopy(src.pNext);
-}
-
-safe_VkPhysicalDeviceShaderImageFootprintFeaturesNV& safe_VkPhysicalDeviceShaderImageFootprintFeaturesNV::operator=(const safe_VkPhysicalDeviceShaderImageFootprintFeaturesNV& src)
-{
- if (&src == this) return *this;
-
- if (pNext)
- FreePnextChain(pNext);
-
- sType = src.sType;
- imageFootprint = src.imageFootprint;
- pNext = SafePnextCopy(src.pNext);
-
- return *this;
-}
-
-safe_VkPhysicalDeviceShaderImageFootprintFeaturesNV::~safe_VkPhysicalDeviceShaderImageFootprintFeaturesNV()
-{
- if (pNext)
- FreePnextChain(pNext);
-}
-
-void safe_VkPhysicalDeviceShaderImageFootprintFeaturesNV::initialize(const VkPhysicalDeviceShaderImageFootprintFeaturesNV* in_struct)
-{
- sType = in_struct->sType;
- imageFootprint = in_struct->imageFootprint;
- pNext = SafePnextCopy(in_struct->pNext);
-}
-
-void safe_VkPhysicalDeviceShaderImageFootprintFeaturesNV::initialize(const safe_VkPhysicalDeviceShaderImageFootprintFeaturesNV* src)
-{
- sType = src->sType;
- imageFootprint = src->imageFootprint;
- pNext = SafePnextCopy(src->pNext);
-}
-
-safe_VkPipelineViewportExclusiveScissorStateCreateInfoNV::safe_VkPipelineViewportExclusiveScissorStateCreateInfoNV(const VkPipelineViewportExclusiveScissorStateCreateInfoNV* in_struct) :
- sType(in_struct->sType),
- exclusiveScissorCount(in_struct->exclusiveScissorCount),
- pExclusiveScissors(nullptr)
-{
- pNext = SafePnextCopy(in_struct->pNext);
- if (in_struct->pExclusiveScissors) {
- pExclusiveScissors = new VkRect2D[in_struct->exclusiveScissorCount];
- memcpy ((void *)pExclusiveScissors, (void *)in_struct->pExclusiveScissors, sizeof(VkRect2D)*in_struct->exclusiveScissorCount);
- }
-}
-
-safe_VkPipelineViewportExclusiveScissorStateCreateInfoNV::safe_VkPipelineViewportExclusiveScissorStateCreateInfoNV() :
- pNext(nullptr),
- pExclusiveScissors(nullptr)
-{}
-
-safe_VkPipelineViewportExclusiveScissorStateCreateInfoNV::safe_VkPipelineViewportExclusiveScissorStateCreateInfoNV(const safe_VkPipelineViewportExclusiveScissorStateCreateInfoNV& src)
-{
- sType = src.sType;
- exclusiveScissorCount = src.exclusiveScissorCount;
- pExclusiveScissors = nullptr;
- pNext = SafePnextCopy(src.pNext);
- if (src.pExclusiveScissors) {
- pExclusiveScissors = new VkRect2D[src.exclusiveScissorCount];
- memcpy ((void *)pExclusiveScissors, (void *)src.pExclusiveScissors, sizeof(VkRect2D)*src.exclusiveScissorCount);
- }
-}
-
-safe_VkPipelineViewportExclusiveScissorStateCreateInfoNV& safe_VkPipelineViewportExclusiveScissorStateCreateInfoNV::operator=(const safe_VkPipelineViewportExclusiveScissorStateCreateInfoNV& src)
-{
- if (&src == this) return *this;
-
- if (pExclusiveScissors)
- delete[] pExclusiveScissors;
- if (pNext)
- FreePnextChain(pNext);
-
- sType = src.sType;
- exclusiveScissorCount = src.exclusiveScissorCount;
- pExclusiveScissors = nullptr;
- pNext = SafePnextCopy(src.pNext);
- if (src.pExclusiveScissors) {
- pExclusiveScissors = new VkRect2D[src.exclusiveScissorCount];
- memcpy ((void *)pExclusiveScissors, (void *)src.pExclusiveScissors, sizeof(VkRect2D)*src.exclusiveScissorCount);
- }
-
- return *this;
-}
-
-safe_VkPipelineViewportExclusiveScissorStateCreateInfoNV::~safe_VkPipelineViewportExclusiveScissorStateCreateInfoNV()
-{
- if (pExclusiveScissors)
- delete[] pExclusiveScissors;
- if (pNext)
- FreePnextChain(pNext);
-}
-
-void safe_VkPipelineViewportExclusiveScissorStateCreateInfoNV::initialize(const VkPipelineViewportExclusiveScissorStateCreateInfoNV* in_struct)
-{
- sType = in_struct->sType;
- exclusiveScissorCount = in_struct->exclusiveScissorCount;
- pExclusiveScissors = nullptr;
- pNext = SafePnextCopy(in_struct->pNext);
- if (in_struct->pExclusiveScissors) {
- pExclusiveScissors = new VkRect2D[in_struct->exclusiveScissorCount];
- memcpy ((void *)pExclusiveScissors, (void *)in_struct->pExclusiveScissors, sizeof(VkRect2D)*in_struct->exclusiveScissorCount);
- }
-}
-
-void safe_VkPipelineViewportExclusiveScissorStateCreateInfoNV::initialize(const safe_VkPipelineViewportExclusiveScissorStateCreateInfoNV* src)
-{
- sType = src->sType;
- exclusiveScissorCount = src->exclusiveScissorCount;
- pExclusiveScissors = nullptr;
- pNext = SafePnextCopy(src->pNext);
- if (src->pExclusiveScissors) {
- pExclusiveScissors = new VkRect2D[src->exclusiveScissorCount];
- memcpy ((void *)pExclusiveScissors, (void *)src->pExclusiveScissors, sizeof(VkRect2D)*src->exclusiveScissorCount);
- }
-}
-
-safe_VkPhysicalDeviceExclusiveScissorFeaturesNV::safe_VkPhysicalDeviceExclusiveScissorFeaturesNV(const VkPhysicalDeviceExclusiveScissorFeaturesNV* in_struct) :
- sType(in_struct->sType),
- exclusiveScissor(in_struct->exclusiveScissor)
-{
- pNext = SafePnextCopy(in_struct->pNext);
-}
-
-safe_VkPhysicalDeviceExclusiveScissorFeaturesNV::safe_VkPhysicalDeviceExclusiveScissorFeaturesNV() :
- pNext(nullptr)
-{}
-
-safe_VkPhysicalDeviceExclusiveScissorFeaturesNV::safe_VkPhysicalDeviceExclusiveScissorFeaturesNV(const safe_VkPhysicalDeviceExclusiveScissorFeaturesNV& src)
-{
- sType = src.sType;
- exclusiveScissor = src.exclusiveScissor;
- pNext = SafePnextCopy(src.pNext);
-}
-
-safe_VkPhysicalDeviceExclusiveScissorFeaturesNV& safe_VkPhysicalDeviceExclusiveScissorFeaturesNV::operator=(const safe_VkPhysicalDeviceExclusiveScissorFeaturesNV& src)
-{
- if (&src == this) return *this;
-
- if (pNext)
- FreePnextChain(pNext);
-
- sType = src.sType;
- exclusiveScissor = src.exclusiveScissor;
- pNext = SafePnextCopy(src.pNext);
-
- return *this;
-}
-
-safe_VkPhysicalDeviceExclusiveScissorFeaturesNV::~safe_VkPhysicalDeviceExclusiveScissorFeaturesNV()
-{
- if (pNext)
- FreePnextChain(pNext);
-}
-
-void safe_VkPhysicalDeviceExclusiveScissorFeaturesNV::initialize(const VkPhysicalDeviceExclusiveScissorFeaturesNV* in_struct)
-{
- sType = in_struct->sType;
- exclusiveScissor = in_struct->exclusiveScissor;
- pNext = SafePnextCopy(in_struct->pNext);
-}
-
-void safe_VkPhysicalDeviceExclusiveScissorFeaturesNV::initialize(const safe_VkPhysicalDeviceExclusiveScissorFeaturesNV* src)
-{
- sType = src->sType;
- exclusiveScissor = src->exclusiveScissor;
- pNext = SafePnextCopy(src->pNext);
-}
-
-safe_VkQueueFamilyCheckpointPropertiesNV::safe_VkQueueFamilyCheckpointPropertiesNV(const VkQueueFamilyCheckpointPropertiesNV* in_struct) :
- sType(in_struct->sType),
- checkpointExecutionStageMask(in_struct->checkpointExecutionStageMask)
-{
- pNext = SafePnextCopy(in_struct->pNext);
-}
-
-safe_VkQueueFamilyCheckpointPropertiesNV::safe_VkQueueFamilyCheckpointPropertiesNV() :
- pNext(nullptr)
-{}
-
-safe_VkQueueFamilyCheckpointPropertiesNV::safe_VkQueueFamilyCheckpointPropertiesNV(const safe_VkQueueFamilyCheckpointPropertiesNV& src)
-{
- sType = src.sType;
- checkpointExecutionStageMask = src.checkpointExecutionStageMask;
- pNext = SafePnextCopy(src.pNext);
-}
-
-safe_VkQueueFamilyCheckpointPropertiesNV& safe_VkQueueFamilyCheckpointPropertiesNV::operator=(const safe_VkQueueFamilyCheckpointPropertiesNV& src)
-{
- if (&src == this) return *this;
-
- if (pNext)
- FreePnextChain(pNext);
-
- sType = src.sType;
- checkpointExecutionStageMask = src.checkpointExecutionStageMask;
- pNext = SafePnextCopy(src.pNext);
-
- return *this;
-}
-
-safe_VkQueueFamilyCheckpointPropertiesNV::~safe_VkQueueFamilyCheckpointPropertiesNV()
-{
- if (pNext)
- FreePnextChain(pNext);
-}
-
-void safe_VkQueueFamilyCheckpointPropertiesNV::initialize(const VkQueueFamilyCheckpointPropertiesNV* in_struct)
-{
- sType = in_struct->sType;
- checkpointExecutionStageMask = in_struct->checkpointExecutionStageMask;
- pNext = SafePnextCopy(in_struct->pNext);
-}
-
-void safe_VkQueueFamilyCheckpointPropertiesNV::initialize(const safe_VkQueueFamilyCheckpointPropertiesNV* src)
-{
- sType = src->sType;
- checkpointExecutionStageMask = src->checkpointExecutionStageMask;
- pNext = SafePnextCopy(src->pNext);
-}
-
-safe_VkCheckpointDataNV::safe_VkCheckpointDataNV(const VkCheckpointDataNV* in_struct) :
- sType(in_struct->sType),
- stage(in_struct->stage),
- pCheckpointMarker(in_struct->pCheckpointMarker)
-{
- pNext = SafePnextCopy(in_struct->pNext);
-}
-
-safe_VkCheckpointDataNV::safe_VkCheckpointDataNV() :
- pNext(nullptr),
- pCheckpointMarker(nullptr)
-{}
-
-safe_VkCheckpointDataNV::safe_VkCheckpointDataNV(const safe_VkCheckpointDataNV& src)
-{
- sType = src.sType;
- stage = src.stage;
- pCheckpointMarker = src.pCheckpointMarker;
- pNext = SafePnextCopy(src.pNext);
-}
-
-safe_VkCheckpointDataNV& safe_VkCheckpointDataNV::operator=(const safe_VkCheckpointDataNV& src)
-{
- if (&src == this) return *this;
-
- if (pNext)
- FreePnextChain(pNext);
-
- sType = src.sType;
- stage = src.stage;
- pCheckpointMarker = src.pCheckpointMarker;
- pNext = SafePnextCopy(src.pNext);
-
- return *this;
-}
-
-safe_VkCheckpointDataNV::~safe_VkCheckpointDataNV()
-{
- if (pNext)
- FreePnextChain(pNext);
-}
-
-void safe_VkCheckpointDataNV::initialize(const VkCheckpointDataNV* in_struct)
-{
- sType = in_struct->sType;
- stage = in_struct->stage;
- pCheckpointMarker = in_struct->pCheckpointMarker;
- pNext = SafePnextCopy(in_struct->pNext);
-}
-
-void safe_VkCheckpointDataNV::initialize(const safe_VkCheckpointDataNV* src)
-{
- sType = src->sType;
- stage = src->stage;
- pCheckpointMarker = src->pCheckpointMarker;
- pNext = SafePnextCopy(src->pNext);
-}
-
-safe_VkPhysicalDeviceShaderIntegerFunctions2FeaturesINTEL::safe_VkPhysicalDeviceShaderIntegerFunctions2FeaturesINTEL(const VkPhysicalDeviceShaderIntegerFunctions2FeaturesINTEL* in_struct) :
- sType(in_struct->sType),
- shaderIntegerFunctions2(in_struct->shaderIntegerFunctions2)
-{
- pNext = SafePnextCopy(in_struct->pNext);
-}
-
-safe_VkPhysicalDeviceShaderIntegerFunctions2FeaturesINTEL::safe_VkPhysicalDeviceShaderIntegerFunctions2FeaturesINTEL() :
- pNext(nullptr)
-{}
-
-safe_VkPhysicalDeviceShaderIntegerFunctions2FeaturesINTEL::safe_VkPhysicalDeviceShaderIntegerFunctions2FeaturesINTEL(const safe_VkPhysicalDeviceShaderIntegerFunctions2FeaturesINTEL& src)
-{
- sType = src.sType;
- shaderIntegerFunctions2 = src.shaderIntegerFunctions2;
- pNext = SafePnextCopy(src.pNext);
-}
-
-safe_VkPhysicalDeviceShaderIntegerFunctions2FeaturesINTEL& safe_VkPhysicalDeviceShaderIntegerFunctions2FeaturesINTEL::operator=(const safe_VkPhysicalDeviceShaderIntegerFunctions2FeaturesINTEL& src)
-{
- if (&src == this) return *this;
-
- if (pNext)
- FreePnextChain(pNext);
-
- sType = src.sType;
- shaderIntegerFunctions2 = src.shaderIntegerFunctions2;
- pNext = SafePnextCopy(src.pNext);
-
- return *this;
-}
-
-safe_VkPhysicalDeviceShaderIntegerFunctions2FeaturesINTEL::~safe_VkPhysicalDeviceShaderIntegerFunctions2FeaturesINTEL()
-{
- if (pNext)
- FreePnextChain(pNext);
-}
-
-void safe_VkPhysicalDeviceShaderIntegerFunctions2FeaturesINTEL::initialize(const VkPhysicalDeviceShaderIntegerFunctions2FeaturesINTEL* in_struct)
-{
- sType = in_struct->sType;
- shaderIntegerFunctions2 = in_struct->shaderIntegerFunctions2;
- pNext = SafePnextCopy(in_struct->pNext);
-}
-
-void safe_VkPhysicalDeviceShaderIntegerFunctions2FeaturesINTEL::initialize(const safe_VkPhysicalDeviceShaderIntegerFunctions2FeaturesINTEL* src)
-{
- sType = src->sType;
- shaderIntegerFunctions2 = src->shaderIntegerFunctions2;
- pNext = SafePnextCopy(src->pNext);
-}
-
-safe_VkPerformanceValueDataINTEL::safe_VkPerformanceValueDataINTEL(const VkPerformanceValueDataINTEL* in_struct) :
- value32(in_struct->value32),
- value64(in_struct->value64),
- valueFloat(in_struct->valueFloat),
- valueBool(in_struct->valueBool)
-{
- valueString = SafeStringCopy(in_struct->valueString);
-}
-
-safe_VkPerformanceValueDataINTEL::safe_VkPerformanceValueDataINTEL() :
- valueString(nullptr)
-{}
-
-safe_VkPerformanceValueDataINTEL::safe_VkPerformanceValueDataINTEL(const safe_VkPerformanceValueDataINTEL& src)
-{
- value32 = src.value32;
- value64 = src.value64;
- valueFloat = src.valueFloat;
- valueBool = src.valueBool;
- valueString = SafeStringCopy(src.valueString);
-}
-
-safe_VkPerformanceValueDataINTEL& safe_VkPerformanceValueDataINTEL::operator=(const safe_VkPerformanceValueDataINTEL& src)
-{
- if (&src == this) return *this;
-
- if (valueString) delete [] valueString;
-
- value32 = src.value32;
- value64 = src.value64;
- valueFloat = src.valueFloat;
- valueBool = src.valueBool;
- valueString = SafeStringCopy(src.valueString);
-
- return *this;
-}
-
-safe_VkPerformanceValueDataINTEL::~safe_VkPerformanceValueDataINTEL()
-{
- if (valueString) delete [] valueString;
-}
-
-void safe_VkPerformanceValueDataINTEL::initialize(const VkPerformanceValueDataINTEL* in_struct)
-{
- value32 = in_struct->value32;
- value64 = in_struct->value64;
- valueFloat = in_struct->valueFloat;
- valueBool = in_struct->valueBool;
- valueString = SafeStringCopy(in_struct->valueString);
-}
-
-void safe_VkPerformanceValueDataINTEL::initialize(const safe_VkPerformanceValueDataINTEL* src)
-{
- value32 = src->value32;
- value64 = src->value64;
- valueFloat = src->valueFloat;
- valueBool = src->valueBool;
- valueString = SafeStringCopy(src->valueString);
-}
-
-safe_VkInitializePerformanceApiInfoINTEL::safe_VkInitializePerformanceApiInfoINTEL(const VkInitializePerformanceApiInfoINTEL* in_struct) :
- sType(in_struct->sType),
- pUserData(in_struct->pUserData)
-{
- pNext = SafePnextCopy(in_struct->pNext);
-}
-
-safe_VkInitializePerformanceApiInfoINTEL::safe_VkInitializePerformanceApiInfoINTEL() :
- pNext(nullptr),
- pUserData(nullptr)
-{}
-
-safe_VkInitializePerformanceApiInfoINTEL::safe_VkInitializePerformanceApiInfoINTEL(const safe_VkInitializePerformanceApiInfoINTEL& src)
-{
- sType = src.sType;
- pUserData = src.pUserData;
- pNext = SafePnextCopy(src.pNext);
-}
-
-safe_VkInitializePerformanceApiInfoINTEL& safe_VkInitializePerformanceApiInfoINTEL::operator=(const safe_VkInitializePerformanceApiInfoINTEL& src)
-{
- if (&src == this) return *this;
-
- if (pNext)
- FreePnextChain(pNext);
-
- sType = src.sType;
- pUserData = src.pUserData;
- pNext = SafePnextCopy(src.pNext);
-
- return *this;
-}
-
-safe_VkInitializePerformanceApiInfoINTEL::~safe_VkInitializePerformanceApiInfoINTEL()
-{
- if (pNext)
- FreePnextChain(pNext);
-}
-
-void safe_VkInitializePerformanceApiInfoINTEL::initialize(const VkInitializePerformanceApiInfoINTEL* in_struct)
-{
- sType = in_struct->sType;
- pUserData = in_struct->pUserData;
- pNext = SafePnextCopy(in_struct->pNext);
-}
-
-void safe_VkInitializePerformanceApiInfoINTEL::initialize(const safe_VkInitializePerformanceApiInfoINTEL* src)
-{
- sType = src->sType;
- pUserData = src->pUserData;
- pNext = SafePnextCopy(src->pNext);
-}
-
-safe_VkQueryPoolCreateInfoINTEL::safe_VkQueryPoolCreateInfoINTEL(const VkQueryPoolCreateInfoINTEL* in_struct) :
- sType(in_struct->sType),
- performanceCountersSampling(in_struct->performanceCountersSampling)
-{
- pNext = SafePnextCopy(in_struct->pNext);
-}
-
-safe_VkQueryPoolCreateInfoINTEL::safe_VkQueryPoolCreateInfoINTEL() :
- pNext(nullptr)
-{}
-
-safe_VkQueryPoolCreateInfoINTEL::safe_VkQueryPoolCreateInfoINTEL(const safe_VkQueryPoolCreateInfoINTEL& src)
-{
- sType = src.sType;
- performanceCountersSampling = src.performanceCountersSampling;
- pNext = SafePnextCopy(src.pNext);
-}
-
-safe_VkQueryPoolCreateInfoINTEL& safe_VkQueryPoolCreateInfoINTEL::operator=(const safe_VkQueryPoolCreateInfoINTEL& src)
-{
- if (&src == this) return *this;
-
- if (pNext)
- FreePnextChain(pNext);
-
- sType = src.sType;
- performanceCountersSampling = src.performanceCountersSampling;
- pNext = SafePnextCopy(src.pNext);
-
- return *this;
-}
-
-safe_VkQueryPoolCreateInfoINTEL::~safe_VkQueryPoolCreateInfoINTEL()
-{
- if (pNext)
- FreePnextChain(pNext);
-}
-
-void safe_VkQueryPoolCreateInfoINTEL::initialize(const VkQueryPoolCreateInfoINTEL* in_struct)
-{
- sType = in_struct->sType;
- performanceCountersSampling = in_struct->performanceCountersSampling;
- pNext = SafePnextCopy(in_struct->pNext);
-}
-
-void safe_VkQueryPoolCreateInfoINTEL::initialize(const safe_VkQueryPoolCreateInfoINTEL* src)
-{
- sType = src->sType;
- performanceCountersSampling = src->performanceCountersSampling;
- pNext = SafePnextCopy(src->pNext);
-}
-
-safe_VkPerformanceMarkerInfoINTEL::safe_VkPerformanceMarkerInfoINTEL(const VkPerformanceMarkerInfoINTEL* in_struct) :
- sType(in_struct->sType),
- marker(in_struct->marker)
-{
- pNext = SafePnextCopy(in_struct->pNext);
-}
-
-safe_VkPerformanceMarkerInfoINTEL::safe_VkPerformanceMarkerInfoINTEL() :
- pNext(nullptr)
-{}
-
-safe_VkPerformanceMarkerInfoINTEL::safe_VkPerformanceMarkerInfoINTEL(const safe_VkPerformanceMarkerInfoINTEL& src)
-{
- sType = src.sType;
- marker = src.marker;
- pNext = SafePnextCopy(src.pNext);
-}
-
-safe_VkPerformanceMarkerInfoINTEL& safe_VkPerformanceMarkerInfoINTEL::operator=(const safe_VkPerformanceMarkerInfoINTEL& src)
-{
- if (&src == this) return *this;
-
- if (pNext)
- FreePnextChain(pNext);
-
- sType = src.sType;
- marker = src.marker;
- pNext = SafePnextCopy(src.pNext);
-
- return *this;
-}
-
-safe_VkPerformanceMarkerInfoINTEL::~safe_VkPerformanceMarkerInfoINTEL()
-{
- if (pNext)
- FreePnextChain(pNext);
-}
-
-void safe_VkPerformanceMarkerInfoINTEL::initialize(const VkPerformanceMarkerInfoINTEL* in_struct)
-{
- sType = in_struct->sType;
- marker = in_struct->marker;
- pNext = SafePnextCopy(in_struct->pNext);
-}
-
-void safe_VkPerformanceMarkerInfoINTEL::initialize(const safe_VkPerformanceMarkerInfoINTEL* src)
-{
- sType = src->sType;
- marker = src->marker;
- pNext = SafePnextCopy(src->pNext);
-}
-
-safe_VkPerformanceStreamMarkerInfoINTEL::safe_VkPerformanceStreamMarkerInfoINTEL(const VkPerformanceStreamMarkerInfoINTEL* in_struct) :
- sType(in_struct->sType),
- marker(in_struct->marker)
-{
- pNext = SafePnextCopy(in_struct->pNext);
-}
-
-safe_VkPerformanceStreamMarkerInfoINTEL::safe_VkPerformanceStreamMarkerInfoINTEL() :
- pNext(nullptr)
-{}
-
-safe_VkPerformanceStreamMarkerInfoINTEL::safe_VkPerformanceStreamMarkerInfoINTEL(const safe_VkPerformanceStreamMarkerInfoINTEL& src)
-{
- sType = src.sType;
- marker = src.marker;
- pNext = SafePnextCopy(src.pNext);
-}
-
-safe_VkPerformanceStreamMarkerInfoINTEL& safe_VkPerformanceStreamMarkerInfoINTEL::operator=(const safe_VkPerformanceStreamMarkerInfoINTEL& src)
-{
- if (&src == this) return *this;
-
- if (pNext)
- FreePnextChain(pNext);
-
- sType = src.sType;
- marker = src.marker;
- pNext = SafePnextCopy(src.pNext);
-
- return *this;
-}
-
-safe_VkPerformanceStreamMarkerInfoINTEL::~safe_VkPerformanceStreamMarkerInfoINTEL()
-{
- if (pNext)
- FreePnextChain(pNext);
-}
-
-void safe_VkPerformanceStreamMarkerInfoINTEL::initialize(const VkPerformanceStreamMarkerInfoINTEL* in_struct)
-{
- sType = in_struct->sType;
- marker = in_struct->marker;
- pNext = SafePnextCopy(in_struct->pNext);
-}
-
-void safe_VkPerformanceStreamMarkerInfoINTEL::initialize(const safe_VkPerformanceStreamMarkerInfoINTEL* src)
-{
- sType = src->sType;
- marker = src->marker;
- pNext = SafePnextCopy(src->pNext);
-}
-
-safe_VkPerformanceOverrideInfoINTEL::safe_VkPerformanceOverrideInfoINTEL(const VkPerformanceOverrideInfoINTEL* in_struct) :
- sType(in_struct->sType),
- type(in_struct->type),
- enable(in_struct->enable),
- parameter(in_struct->parameter)
-{
- pNext = SafePnextCopy(in_struct->pNext);
-}
-
-safe_VkPerformanceOverrideInfoINTEL::safe_VkPerformanceOverrideInfoINTEL() :
- pNext(nullptr)
-{}
-
-safe_VkPerformanceOverrideInfoINTEL::safe_VkPerformanceOverrideInfoINTEL(const safe_VkPerformanceOverrideInfoINTEL& src)
-{
- sType = src.sType;
- type = src.type;
- enable = src.enable;
- parameter = src.parameter;
- pNext = SafePnextCopy(src.pNext);
-}
-
-safe_VkPerformanceOverrideInfoINTEL& safe_VkPerformanceOverrideInfoINTEL::operator=(const safe_VkPerformanceOverrideInfoINTEL& src)
-{
- if (&src == this) return *this;
-
- if (pNext)
- FreePnextChain(pNext);
-
- sType = src.sType;
- type = src.type;
- enable = src.enable;
- parameter = src.parameter;
- pNext = SafePnextCopy(src.pNext);
-
- return *this;
-}
-
-safe_VkPerformanceOverrideInfoINTEL::~safe_VkPerformanceOverrideInfoINTEL()
-{
- if (pNext)
- FreePnextChain(pNext);
-}
-
-void safe_VkPerformanceOverrideInfoINTEL::initialize(const VkPerformanceOverrideInfoINTEL* in_struct)
-{
- sType = in_struct->sType;
- type = in_struct->type;
- enable = in_struct->enable;
- parameter = in_struct->parameter;
- pNext = SafePnextCopy(in_struct->pNext);
-}
-
-void safe_VkPerformanceOverrideInfoINTEL::initialize(const safe_VkPerformanceOverrideInfoINTEL* src)
-{
- sType = src->sType;
- type = src->type;
- enable = src->enable;
- parameter = src->parameter;
- pNext = SafePnextCopy(src->pNext);
-}
-
-safe_VkPerformanceConfigurationAcquireInfoINTEL::safe_VkPerformanceConfigurationAcquireInfoINTEL(const VkPerformanceConfigurationAcquireInfoINTEL* in_struct) :
- sType(in_struct->sType),
- type(in_struct->type)
-{
- pNext = SafePnextCopy(in_struct->pNext);
-}
-
-safe_VkPerformanceConfigurationAcquireInfoINTEL::safe_VkPerformanceConfigurationAcquireInfoINTEL() :
- pNext(nullptr)
-{}
-
-safe_VkPerformanceConfigurationAcquireInfoINTEL::safe_VkPerformanceConfigurationAcquireInfoINTEL(const safe_VkPerformanceConfigurationAcquireInfoINTEL& src)
-{
- sType = src.sType;
- type = src.type;
- pNext = SafePnextCopy(src.pNext);
-}
-
-safe_VkPerformanceConfigurationAcquireInfoINTEL& safe_VkPerformanceConfigurationAcquireInfoINTEL::operator=(const safe_VkPerformanceConfigurationAcquireInfoINTEL& src)
-{
- if (&src == this) return *this;
-
- if (pNext)
- FreePnextChain(pNext);
-
- sType = src.sType;
- type = src.type;
- pNext = SafePnextCopy(src.pNext);
-
- return *this;
-}
-
-safe_VkPerformanceConfigurationAcquireInfoINTEL::~safe_VkPerformanceConfigurationAcquireInfoINTEL()
-{
- if (pNext)
- FreePnextChain(pNext);
-}
-
-void safe_VkPerformanceConfigurationAcquireInfoINTEL::initialize(const VkPerformanceConfigurationAcquireInfoINTEL* in_struct)
-{
- sType = in_struct->sType;
- type = in_struct->type;
- pNext = SafePnextCopy(in_struct->pNext);
-}
-
-void safe_VkPerformanceConfigurationAcquireInfoINTEL::initialize(const safe_VkPerformanceConfigurationAcquireInfoINTEL* src)
-{
- sType = src->sType;
- type = src->type;
- pNext = SafePnextCopy(src->pNext);
-}
-
-safe_VkPhysicalDevicePCIBusInfoPropertiesEXT::safe_VkPhysicalDevicePCIBusInfoPropertiesEXT(const VkPhysicalDevicePCIBusInfoPropertiesEXT* in_struct) :
- sType(in_struct->sType),
- pciDomain(in_struct->pciDomain),
- pciBus(in_struct->pciBus),
- pciDevice(in_struct->pciDevice),
- pciFunction(in_struct->pciFunction)
-{
- pNext = SafePnextCopy(in_struct->pNext);
-}
-
-safe_VkPhysicalDevicePCIBusInfoPropertiesEXT::safe_VkPhysicalDevicePCIBusInfoPropertiesEXT() :
- pNext(nullptr)
-{}
-
-safe_VkPhysicalDevicePCIBusInfoPropertiesEXT::safe_VkPhysicalDevicePCIBusInfoPropertiesEXT(const safe_VkPhysicalDevicePCIBusInfoPropertiesEXT& src)
-{
- sType = src.sType;
- pciDomain = src.pciDomain;
- pciBus = src.pciBus;
- pciDevice = src.pciDevice;
- pciFunction = src.pciFunction;
- pNext = SafePnextCopy(src.pNext);
-}
-
-safe_VkPhysicalDevicePCIBusInfoPropertiesEXT& safe_VkPhysicalDevicePCIBusInfoPropertiesEXT::operator=(const safe_VkPhysicalDevicePCIBusInfoPropertiesEXT& src)
-{
- if (&src == this) return *this;
-
- if (pNext)
- FreePnextChain(pNext);
-
- sType = src.sType;
- pciDomain = src.pciDomain;
- pciBus = src.pciBus;
- pciDevice = src.pciDevice;
- pciFunction = src.pciFunction;
- pNext = SafePnextCopy(src.pNext);
-
- return *this;
-}
-
-safe_VkPhysicalDevicePCIBusInfoPropertiesEXT::~safe_VkPhysicalDevicePCIBusInfoPropertiesEXT()
-{
- if (pNext)
- FreePnextChain(pNext);
-}
-
-void safe_VkPhysicalDevicePCIBusInfoPropertiesEXT::initialize(const VkPhysicalDevicePCIBusInfoPropertiesEXT* in_struct)
-{
- sType = in_struct->sType;
- pciDomain = in_struct->pciDomain;
- pciBus = in_struct->pciBus;
- pciDevice = in_struct->pciDevice;
- pciFunction = in_struct->pciFunction;
- pNext = SafePnextCopy(in_struct->pNext);
-}
-
-void safe_VkPhysicalDevicePCIBusInfoPropertiesEXT::initialize(const safe_VkPhysicalDevicePCIBusInfoPropertiesEXT* src)
-{
- sType = src->sType;
- pciDomain = src->pciDomain;
- pciBus = src->pciBus;
- pciDevice = src->pciDevice;
- pciFunction = src->pciFunction;
- pNext = SafePnextCopy(src->pNext);
-}
-
-safe_VkDisplayNativeHdrSurfaceCapabilitiesAMD::safe_VkDisplayNativeHdrSurfaceCapabilitiesAMD(const VkDisplayNativeHdrSurfaceCapabilitiesAMD* in_struct) :
- sType(in_struct->sType),
- localDimmingSupport(in_struct->localDimmingSupport)
-{
- pNext = SafePnextCopy(in_struct->pNext);
-}
-
-safe_VkDisplayNativeHdrSurfaceCapabilitiesAMD::safe_VkDisplayNativeHdrSurfaceCapabilitiesAMD() :
- pNext(nullptr)
-{}
-
-safe_VkDisplayNativeHdrSurfaceCapabilitiesAMD::safe_VkDisplayNativeHdrSurfaceCapabilitiesAMD(const safe_VkDisplayNativeHdrSurfaceCapabilitiesAMD& src)
-{
- sType = src.sType;
- localDimmingSupport = src.localDimmingSupport;
- pNext = SafePnextCopy(src.pNext);
-}
-
-safe_VkDisplayNativeHdrSurfaceCapabilitiesAMD& safe_VkDisplayNativeHdrSurfaceCapabilitiesAMD::operator=(const safe_VkDisplayNativeHdrSurfaceCapabilitiesAMD& src)
-{
- if (&src == this) return *this;
-
- if (pNext)
- FreePnextChain(pNext);
-
- sType = src.sType;
- localDimmingSupport = src.localDimmingSupport;
- pNext = SafePnextCopy(src.pNext);
-
- return *this;
-}
-
-safe_VkDisplayNativeHdrSurfaceCapabilitiesAMD::~safe_VkDisplayNativeHdrSurfaceCapabilitiesAMD()
-{
- if (pNext)
- FreePnextChain(pNext);
-}
-
-void safe_VkDisplayNativeHdrSurfaceCapabilitiesAMD::initialize(const VkDisplayNativeHdrSurfaceCapabilitiesAMD* in_struct)
-{
- sType = in_struct->sType;
- localDimmingSupport = in_struct->localDimmingSupport;
- pNext = SafePnextCopy(in_struct->pNext);
-}
-
-void safe_VkDisplayNativeHdrSurfaceCapabilitiesAMD::initialize(const safe_VkDisplayNativeHdrSurfaceCapabilitiesAMD* src)
-{
- sType = src->sType;
- localDimmingSupport = src->localDimmingSupport;
- pNext = SafePnextCopy(src->pNext);
-}
-
-safe_VkSwapchainDisplayNativeHdrCreateInfoAMD::safe_VkSwapchainDisplayNativeHdrCreateInfoAMD(const VkSwapchainDisplayNativeHdrCreateInfoAMD* in_struct) :
- sType(in_struct->sType),
- localDimmingEnable(in_struct->localDimmingEnable)
-{
- pNext = SafePnextCopy(in_struct->pNext);
-}
-
-safe_VkSwapchainDisplayNativeHdrCreateInfoAMD::safe_VkSwapchainDisplayNativeHdrCreateInfoAMD() :
- pNext(nullptr)
-{}
-
-safe_VkSwapchainDisplayNativeHdrCreateInfoAMD::safe_VkSwapchainDisplayNativeHdrCreateInfoAMD(const safe_VkSwapchainDisplayNativeHdrCreateInfoAMD& src)
-{
- sType = src.sType;
- localDimmingEnable = src.localDimmingEnable;
- pNext = SafePnextCopy(src.pNext);
-}
-
-safe_VkSwapchainDisplayNativeHdrCreateInfoAMD& safe_VkSwapchainDisplayNativeHdrCreateInfoAMD::operator=(const safe_VkSwapchainDisplayNativeHdrCreateInfoAMD& src)
-{
- if (&src == this) return *this;
-
- if (pNext)
- FreePnextChain(pNext);
-
- sType = src.sType;
- localDimmingEnable = src.localDimmingEnable;
- pNext = SafePnextCopy(src.pNext);
-
- return *this;
-}
-
-safe_VkSwapchainDisplayNativeHdrCreateInfoAMD::~safe_VkSwapchainDisplayNativeHdrCreateInfoAMD()
-{
- if (pNext)
- FreePnextChain(pNext);
-}
-
-void safe_VkSwapchainDisplayNativeHdrCreateInfoAMD::initialize(const VkSwapchainDisplayNativeHdrCreateInfoAMD* in_struct)
-{
- sType = in_struct->sType;
- localDimmingEnable = in_struct->localDimmingEnable;
- pNext = SafePnextCopy(in_struct->pNext);
-}
-
-void safe_VkSwapchainDisplayNativeHdrCreateInfoAMD::initialize(const safe_VkSwapchainDisplayNativeHdrCreateInfoAMD* src)
-{
- sType = src->sType;
- localDimmingEnable = src->localDimmingEnable;
- pNext = SafePnextCopy(src->pNext);
-}
-#ifdef VK_USE_PLATFORM_FUCHSIA
-
-
-safe_VkImagePipeSurfaceCreateInfoFUCHSIA::safe_VkImagePipeSurfaceCreateInfoFUCHSIA(const VkImagePipeSurfaceCreateInfoFUCHSIA* in_struct) :
- sType(in_struct->sType),
- flags(in_struct->flags),
- imagePipeHandle(in_struct->imagePipeHandle)
-{
- pNext = SafePnextCopy(in_struct->pNext);
-}
-
-safe_VkImagePipeSurfaceCreateInfoFUCHSIA::safe_VkImagePipeSurfaceCreateInfoFUCHSIA() :
- pNext(nullptr)
-{}
-
-safe_VkImagePipeSurfaceCreateInfoFUCHSIA::safe_VkImagePipeSurfaceCreateInfoFUCHSIA(const safe_VkImagePipeSurfaceCreateInfoFUCHSIA& src)
-{
- sType = src.sType;
- flags = src.flags;
- imagePipeHandle = src.imagePipeHandle;
- pNext = SafePnextCopy(src.pNext);
-}
-
-safe_VkImagePipeSurfaceCreateInfoFUCHSIA& safe_VkImagePipeSurfaceCreateInfoFUCHSIA::operator=(const safe_VkImagePipeSurfaceCreateInfoFUCHSIA& src)
-{
- if (&src == this) return *this;
-
- if (pNext)
- FreePnextChain(pNext);
-
- sType = src.sType;
- flags = src.flags;
- imagePipeHandle = src.imagePipeHandle;
- pNext = SafePnextCopy(src.pNext);
-
- return *this;
-}
-
-safe_VkImagePipeSurfaceCreateInfoFUCHSIA::~safe_VkImagePipeSurfaceCreateInfoFUCHSIA()
-{
- if (pNext)
- FreePnextChain(pNext);
-}
-
-void safe_VkImagePipeSurfaceCreateInfoFUCHSIA::initialize(const VkImagePipeSurfaceCreateInfoFUCHSIA* in_struct)
-{
- sType = in_struct->sType;
- flags = in_struct->flags;
- imagePipeHandle = in_struct->imagePipeHandle;
- pNext = SafePnextCopy(in_struct->pNext);
-}
-
-void safe_VkImagePipeSurfaceCreateInfoFUCHSIA::initialize(const safe_VkImagePipeSurfaceCreateInfoFUCHSIA* src)
-{
- sType = src->sType;
- flags = src->flags;
- imagePipeHandle = src->imagePipeHandle;
- pNext = SafePnextCopy(src->pNext);
-}
-#endif // VK_USE_PLATFORM_FUCHSIA
-
-#ifdef VK_USE_PLATFORM_METAL_EXT
-
-
-safe_VkMetalSurfaceCreateInfoEXT::safe_VkMetalSurfaceCreateInfoEXT(const VkMetalSurfaceCreateInfoEXT* in_struct) :
- sType(in_struct->sType),
- flags(in_struct->flags),
- pLayer(nullptr)
-{
- pNext = SafePnextCopy(in_struct->pNext);
- if (in_struct->pLayer) {
- pLayer = new CAMetalLayer(*in_struct->pLayer);
- }
-}
-
-safe_VkMetalSurfaceCreateInfoEXT::safe_VkMetalSurfaceCreateInfoEXT() :
- pNext(nullptr),
- pLayer(nullptr)
-{}
-
-safe_VkMetalSurfaceCreateInfoEXT::safe_VkMetalSurfaceCreateInfoEXT(const safe_VkMetalSurfaceCreateInfoEXT& src)
-{
- sType = src.sType;
- flags = src.flags;
- pLayer = nullptr;
- pNext = SafePnextCopy(src.pNext);
- if (src.pLayer) {
- pLayer = new CAMetalLayer(*src.pLayer);
- }
-}
-
-safe_VkMetalSurfaceCreateInfoEXT& safe_VkMetalSurfaceCreateInfoEXT::operator=(const safe_VkMetalSurfaceCreateInfoEXT& src)
-{
- if (&src == this) return *this;
-
- if (pLayer)
- delete pLayer;
- if (pNext)
- FreePnextChain(pNext);
-
- sType = src.sType;
- flags = src.flags;
- pLayer = nullptr;
- pNext = SafePnextCopy(src.pNext);
- if (src.pLayer) {
- pLayer = new CAMetalLayer(*src.pLayer);
- }
-
- return *this;
-}
-
-safe_VkMetalSurfaceCreateInfoEXT::~safe_VkMetalSurfaceCreateInfoEXT()
-{
- if (pLayer)
- delete pLayer;
- if (pNext)
- FreePnextChain(pNext);
-}
-
-void safe_VkMetalSurfaceCreateInfoEXT::initialize(const VkMetalSurfaceCreateInfoEXT* in_struct)
-{
- sType = in_struct->sType;
- flags = in_struct->flags;
- pLayer = nullptr;
- pNext = SafePnextCopy(in_struct->pNext);
- if (in_struct->pLayer) {
- pLayer = new CAMetalLayer(*in_struct->pLayer);
- }
-}
-
-void safe_VkMetalSurfaceCreateInfoEXT::initialize(const safe_VkMetalSurfaceCreateInfoEXT* src)
-{
- sType = src->sType;
- flags = src->flags;
- pLayer = nullptr;
- pNext = SafePnextCopy(src->pNext);
- if (src->pLayer) {
- pLayer = new CAMetalLayer(*src->pLayer);
- }
-}
-#endif // VK_USE_PLATFORM_METAL_EXT
-
-
-safe_VkPhysicalDeviceFragmentDensityMapFeaturesEXT::safe_VkPhysicalDeviceFragmentDensityMapFeaturesEXT(const VkPhysicalDeviceFragmentDensityMapFeaturesEXT* in_struct) :
- sType(in_struct->sType),
- fragmentDensityMap(in_struct->fragmentDensityMap),
- fragmentDensityMapDynamic(in_struct->fragmentDensityMapDynamic),
- fragmentDensityMapNonSubsampledImages(in_struct->fragmentDensityMapNonSubsampledImages)
-{
- pNext = SafePnextCopy(in_struct->pNext);
-}
-
-safe_VkPhysicalDeviceFragmentDensityMapFeaturesEXT::safe_VkPhysicalDeviceFragmentDensityMapFeaturesEXT() :
- pNext(nullptr)
-{}
-
-safe_VkPhysicalDeviceFragmentDensityMapFeaturesEXT::safe_VkPhysicalDeviceFragmentDensityMapFeaturesEXT(const safe_VkPhysicalDeviceFragmentDensityMapFeaturesEXT& src)
-{
- sType = src.sType;
- fragmentDensityMap = src.fragmentDensityMap;
- fragmentDensityMapDynamic = src.fragmentDensityMapDynamic;
- fragmentDensityMapNonSubsampledImages = src.fragmentDensityMapNonSubsampledImages;
- pNext = SafePnextCopy(src.pNext);
-}
-
-safe_VkPhysicalDeviceFragmentDensityMapFeaturesEXT& safe_VkPhysicalDeviceFragmentDensityMapFeaturesEXT::operator=(const safe_VkPhysicalDeviceFragmentDensityMapFeaturesEXT& src)
-{
- if (&src == this) return *this;
-
- if (pNext)
- FreePnextChain(pNext);
-
- sType = src.sType;
- fragmentDensityMap = src.fragmentDensityMap;
- fragmentDensityMapDynamic = src.fragmentDensityMapDynamic;
- fragmentDensityMapNonSubsampledImages = src.fragmentDensityMapNonSubsampledImages;
- pNext = SafePnextCopy(src.pNext);
-
- return *this;
-}
-
-safe_VkPhysicalDeviceFragmentDensityMapFeaturesEXT::~safe_VkPhysicalDeviceFragmentDensityMapFeaturesEXT()
-{
- if (pNext)
- FreePnextChain(pNext);
-}
-
-void safe_VkPhysicalDeviceFragmentDensityMapFeaturesEXT::initialize(const VkPhysicalDeviceFragmentDensityMapFeaturesEXT* in_struct)
-{
- sType = in_struct->sType;
- fragmentDensityMap = in_struct->fragmentDensityMap;
- fragmentDensityMapDynamic = in_struct->fragmentDensityMapDynamic;
- fragmentDensityMapNonSubsampledImages = in_struct->fragmentDensityMapNonSubsampledImages;
- pNext = SafePnextCopy(in_struct->pNext);
-}
-
-void safe_VkPhysicalDeviceFragmentDensityMapFeaturesEXT::initialize(const safe_VkPhysicalDeviceFragmentDensityMapFeaturesEXT* src)
-{
- sType = src->sType;
- fragmentDensityMap = src->fragmentDensityMap;
- fragmentDensityMapDynamic = src->fragmentDensityMapDynamic;
- fragmentDensityMapNonSubsampledImages = src->fragmentDensityMapNonSubsampledImages;
- pNext = SafePnextCopy(src->pNext);
-}
-
-safe_VkPhysicalDeviceFragmentDensityMapPropertiesEXT::safe_VkPhysicalDeviceFragmentDensityMapPropertiesEXT(const VkPhysicalDeviceFragmentDensityMapPropertiesEXT* in_struct) :
- sType(in_struct->sType),
- minFragmentDensityTexelSize(in_struct->minFragmentDensityTexelSize),
- maxFragmentDensityTexelSize(in_struct->maxFragmentDensityTexelSize),
- fragmentDensityInvocations(in_struct->fragmentDensityInvocations)
-{
- pNext = SafePnextCopy(in_struct->pNext);
-}
-
-safe_VkPhysicalDeviceFragmentDensityMapPropertiesEXT::safe_VkPhysicalDeviceFragmentDensityMapPropertiesEXT() :
- pNext(nullptr)
-{}
-
-safe_VkPhysicalDeviceFragmentDensityMapPropertiesEXT::safe_VkPhysicalDeviceFragmentDensityMapPropertiesEXT(const safe_VkPhysicalDeviceFragmentDensityMapPropertiesEXT& src)
-{
- sType = src.sType;
- minFragmentDensityTexelSize = src.minFragmentDensityTexelSize;
- maxFragmentDensityTexelSize = src.maxFragmentDensityTexelSize;
- fragmentDensityInvocations = src.fragmentDensityInvocations;
- pNext = SafePnextCopy(src.pNext);
-}
-
-safe_VkPhysicalDeviceFragmentDensityMapPropertiesEXT& safe_VkPhysicalDeviceFragmentDensityMapPropertiesEXT::operator=(const safe_VkPhysicalDeviceFragmentDensityMapPropertiesEXT& src)
-{
- if (&src == this) return *this;
-
- if (pNext)
- FreePnextChain(pNext);
-
- sType = src.sType;
- minFragmentDensityTexelSize = src.minFragmentDensityTexelSize;
- maxFragmentDensityTexelSize = src.maxFragmentDensityTexelSize;
- fragmentDensityInvocations = src.fragmentDensityInvocations;
- pNext = SafePnextCopy(src.pNext);
-
- return *this;
-}
-
-safe_VkPhysicalDeviceFragmentDensityMapPropertiesEXT::~safe_VkPhysicalDeviceFragmentDensityMapPropertiesEXT()
-{
- if (pNext)
- FreePnextChain(pNext);
-}
-
-void safe_VkPhysicalDeviceFragmentDensityMapPropertiesEXT::initialize(const VkPhysicalDeviceFragmentDensityMapPropertiesEXT* in_struct)
-{
- sType = in_struct->sType;
- minFragmentDensityTexelSize = in_struct->minFragmentDensityTexelSize;
- maxFragmentDensityTexelSize = in_struct->maxFragmentDensityTexelSize;
- fragmentDensityInvocations = in_struct->fragmentDensityInvocations;
- pNext = SafePnextCopy(in_struct->pNext);
-}
-
-void safe_VkPhysicalDeviceFragmentDensityMapPropertiesEXT::initialize(const safe_VkPhysicalDeviceFragmentDensityMapPropertiesEXT* src)
-{
- sType = src->sType;
- minFragmentDensityTexelSize = src->minFragmentDensityTexelSize;
- maxFragmentDensityTexelSize = src->maxFragmentDensityTexelSize;
- fragmentDensityInvocations = src->fragmentDensityInvocations;
- pNext = SafePnextCopy(src->pNext);
-}
-
-safe_VkRenderPassFragmentDensityMapCreateInfoEXT::safe_VkRenderPassFragmentDensityMapCreateInfoEXT(const VkRenderPassFragmentDensityMapCreateInfoEXT* in_struct) :
- sType(in_struct->sType),
- fragmentDensityMapAttachment(in_struct->fragmentDensityMapAttachment)
-{
- pNext = SafePnextCopy(in_struct->pNext);
-}
-
-safe_VkRenderPassFragmentDensityMapCreateInfoEXT::safe_VkRenderPassFragmentDensityMapCreateInfoEXT() :
- pNext(nullptr)
-{}
-
-safe_VkRenderPassFragmentDensityMapCreateInfoEXT::safe_VkRenderPassFragmentDensityMapCreateInfoEXT(const safe_VkRenderPassFragmentDensityMapCreateInfoEXT& src)
-{
- sType = src.sType;
- fragmentDensityMapAttachment = src.fragmentDensityMapAttachment;
- pNext = SafePnextCopy(src.pNext);
-}
-
-safe_VkRenderPassFragmentDensityMapCreateInfoEXT& safe_VkRenderPassFragmentDensityMapCreateInfoEXT::operator=(const safe_VkRenderPassFragmentDensityMapCreateInfoEXT& src)
-{
- if (&src == this) return *this;
-
- if (pNext)
- FreePnextChain(pNext);
-
- sType = src.sType;
- fragmentDensityMapAttachment = src.fragmentDensityMapAttachment;
- pNext = SafePnextCopy(src.pNext);
-
- return *this;
-}
-
-safe_VkRenderPassFragmentDensityMapCreateInfoEXT::~safe_VkRenderPassFragmentDensityMapCreateInfoEXT()
-{
- if (pNext)
- FreePnextChain(pNext);
-}
-
-void safe_VkRenderPassFragmentDensityMapCreateInfoEXT::initialize(const VkRenderPassFragmentDensityMapCreateInfoEXT* in_struct)
-{
- sType = in_struct->sType;
- fragmentDensityMapAttachment = in_struct->fragmentDensityMapAttachment;
- pNext = SafePnextCopy(in_struct->pNext);
-}
-
-void safe_VkRenderPassFragmentDensityMapCreateInfoEXT::initialize(const safe_VkRenderPassFragmentDensityMapCreateInfoEXT* src)
-{
- sType = src->sType;
- fragmentDensityMapAttachment = src->fragmentDensityMapAttachment;
- pNext = SafePnextCopy(src->pNext);
-}
-
-safe_VkPhysicalDeviceScalarBlockLayoutFeaturesEXT::safe_VkPhysicalDeviceScalarBlockLayoutFeaturesEXT(const VkPhysicalDeviceScalarBlockLayoutFeaturesEXT* in_struct) :
- sType(in_struct->sType),
- scalarBlockLayout(in_struct->scalarBlockLayout)
-{
- pNext = SafePnextCopy(in_struct->pNext);
-}
-
-safe_VkPhysicalDeviceScalarBlockLayoutFeaturesEXT::safe_VkPhysicalDeviceScalarBlockLayoutFeaturesEXT() :
- pNext(nullptr)
-{}
-
-safe_VkPhysicalDeviceScalarBlockLayoutFeaturesEXT::safe_VkPhysicalDeviceScalarBlockLayoutFeaturesEXT(const safe_VkPhysicalDeviceScalarBlockLayoutFeaturesEXT& src)
-{
- sType = src.sType;
- scalarBlockLayout = src.scalarBlockLayout;
- pNext = SafePnextCopy(src.pNext);
-}
-
-safe_VkPhysicalDeviceScalarBlockLayoutFeaturesEXT& safe_VkPhysicalDeviceScalarBlockLayoutFeaturesEXT::operator=(const safe_VkPhysicalDeviceScalarBlockLayoutFeaturesEXT& src)
-{
- if (&src == this) return *this;
-
- if (pNext)
- FreePnextChain(pNext);
-
- sType = src.sType;
- scalarBlockLayout = src.scalarBlockLayout;
- pNext = SafePnextCopy(src.pNext);
-
- return *this;
-}
-
-safe_VkPhysicalDeviceScalarBlockLayoutFeaturesEXT::~safe_VkPhysicalDeviceScalarBlockLayoutFeaturesEXT()
-{
- if (pNext)
- FreePnextChain(pNext);
-}
-
-void safe_VkPhysicalDeviceScalarBlockLayoutFeaturesEXT::initialize(const VkPhysicalDeviceScalarBlockLayoutFeaturesEXT* in_struct)
-{
- sType = in_struct->sType;
- scalarBlockLayout = in_struct->scalarBlockLayout;
- pNext = SafePnextCopy(in_struct->pNext);
-}
-
-void safe_VkPhysicalDeviceScalarBlockLayoutFeaturesEXT::initialize(const safe_VkPhysicalDeviceScalarBlockLayoutFeaturesEXT* src)
-{
- sType = src->sType;
- scalarBlockLayout = src->scalarBlockLayout;
- pNext = SafePnextCopy(src->pNext);
-}
-
-safe_VkPhysicalDeviceSubgroupSizeControlFeaturesEXT::safe_VkPhysicalDeviceSubgroupSizeControlFeaturesEXT(const VkPhysicalDeviceSubgroupSizeControlFeaturesEXT* in_struct) :
- sType(in_struct->sType),
- subgroupSizeControl(in_struct->subgroupSizeControl),
- computeFullSubgroups(in_struct->computeFullSubgroups)
-{
- pNext = SafePnextCopy(in_struct->pNext);
-}
-
-safe_VkPhysicalDeviceSubgroupSizeControlFeaturesEXT::safe_VkPhysicalDeviceSubgroupSizeControlFeaturesEXT() :
- pNext(nullptr)
-{}
-
-safe_VkPhysicalDeviceSubgroupSizeControlFeaturesEXT::safe_VkPhysicalDeviceSubgroupSizeControlFeaturesEXT(const safe_VkPhysicalDeviceSubgroupSizeControlFeaturesEXT& src)
-{
- sType = src.sType;
- subgroupSizeControl = src.subgroupSizeControl;
- computeFullSubgroups = src.computeFullSubgroups;
- pNext = SafePnextCopy(src.pNext);
-}
-
-safe_VkPhysicalDeviceSubgroupSizeControlFeaturesEXT& safe_VkPhysicalDeviceSubgroupSizeControlFeaturesEXT::operator=(const safe_VkPhysicalDeviceSubgroupSizeControlFeaturesEXT& src)
-{
- if (&src == this) return *this;
-
- if (pNext)
- FreePnextChain(pNext);
-
- sType = src.sType;
- subgroupSizeControl = src.subgroupSizeControl;
- computeFullSubgroups = src.computeFullSubgroups;
- pNext = SafePnextCopy(src.pNext);
-
- return *this;
-}
-
-safe_VkPhysicalDeviceSubgroupSizeControlFeaturesEXT::~safe_VkPhysicalDeviceSubgroupSizeControlFeaturesEXT()
-{
- if (pNext)
- FreePnextChain(pNext);
-}
-
-void safe_VkPhysicalDeviceSubgroupSizeControlFeaturesEXT::initialize(const VkPhysicalDeviceSubgroupSizeControlFeaturesEXT* in_struct)
-{
- sType = in_struct->sType;
- subgroupSizeControl = in_struct->subgroupSizeControl;
- computeFullSubgroups = in_struct->computeFullSubgroups;
- pNext = SafePnextCopy(in_struct->pNext);
-}
-
-void safe_VkPhysicalDeviceSubgroupSizeControlFeaturesEXT::initialize(const safe_VkPhysicalDeviceSubgroupSizeControlFeaturesEXT* src)
-{
- sType = src->sType;
- subgroupSizeControl = src->subgroupSizeControl;
- computeFullSubgroups = src->computeFullSubgroups;
- pNext = SafePnextCopy(src->pNext);
-}
-
-safe_VkPhysicalDeviceSubgroupSizeControlPropertiesEXT::safe_VkPhysicalDeviceSubgroupSizeControlPropertiesEXT(const VkPhysicalDeviceSubgroupSizeControlPropertiesEXT* in_struct) :
- sType(in_struct->sType),
- minSubgroupSize(in_struct->minSubgroupSize),
- maxSubgroupSize(in_struct->maxSubgroupSize),
- maxComputeWorkgroupSubgroups(in_struct->maxComputeWorkgroupSubgroups),
- requiredSubgroupSizeStages(in_struct->requiredSubgroupSizeStages)
-{
- pNext = SafePnextCopy(in_struct->pNext);
-}
-
-safe_VkPhysicalDeviceSubgroupSizeControlPropertiesEXT::safe_VkPhysicalDeviceSubgroupSizeControlPropertiesEXT() :
- pNext(nullptr)
-{}
-
-safe_VkPhysicalDeviceSubgroupSizeControlPropertiesEXT::safe_VkPhysicalDeviceSubgroupSizeControlPropertiesEXT(const safe_VkPhysicalDeviceSubgroupSizeControlPropertiesEXT& src)
-{
- sType = src.sType;
- minSubgroupSize = src.minSubgroupSize;
- maxSubgroupSize = src.maxSubgroupSize;
- maxComputeWorkgroupSubgroups = src.maxComputeWorkgroupSubgroups;
- requiredSubgroupSizeStages = src.requiredSubgroupSizeStages;
- pNext = SafePnextCopy(src.pNext);
-}
-
-safe_VkPhysicalDeviceSubgroupSizeControlPropertiesEXT& safe_VkPhysicalDeviceSubgroupSizeControlPropertiesEXT::operator=(const safe_VkPhysicalDeviceSubgroupSizeControlPropertiesEXT& src)
-{
- if (&src == this) return *this;
-
- if (pNext)
- FreePnextChain(pNext);
-
- sType = src.sType;
- minSubgroupSize = src.minSubgroupSize;
- maxSubgroupSize = src.maxSubgroupSize;
- maxComputeWorkgroupSubgroups = src.maxComputeWorkgroupSubgroups;
- requiredSubgroupSizeStages = src.requiredSubgroupSizeStages;
- pNext = SafePnextCopy(src.pNext);
-
- return *this;
-}
-
-safe_VkPhysicalDeviceSubgroupSizeControlPropertiesEXT::~safe_VkPhysicalDeviceSubgroupSizeControlPropertiesEXT()
-{
- if (pNext)
- FreePnextChain(pNext);
-}
-
-void safe_VkPhysicalDeviceSubgroupSizeControlPropertiesEXT::initialize(const VkPhysicalDeviceSubgroupSizeControlPropertiesEXT* in_struct)
-{
- sType = in_struct->sType;
- minSubgroupSize = in_struct->minSubgroupSize;
- maxSubgroupSize = in_struct->maxSubgroupSize;
- maxComputeWorkgroupSubgroups = in_struct->maxComputeWorkgroupSubgroups;
- requiredSubgroupSizeStages = in_struct->requiredSubgroupSizeStages;
- pNext = SafePnextCopy(in_struct->pNext);
-}
-
-void safe_VkPhysicalDeviceSubgroupSizeControlPropertiesEXT::initialize(const safe_VkPhysicalDeviceSubgroupSizeControlPropertiesEXT* src)
-{
- sType = src->sType;
- minSubgroupSize = src->minSubgroupSize;
- maxSubgroupSize = src->maxSubgroupSize;
- maxComputeWorkgroupSubgroups = src->maxComputeWorkgroupSubgroups;
- requiredSubgroupSizeStages = src->requiredSubgroupSizeStages;
- pNext = SafePnextCopy(src->pNext);
-}
-
-safe_VkPipelineShaderStageRequiredSubgroupSizeCreateInfoEXT::safe_VkPipelineShaderStageRequiredSubgroupSizeCreateInfoEXT(const VkPipelineShaderStageRequiredSubgroupSizeCreateInfoEXT* in_struct) :
- sType(in_struct->sType),
- requiredSubgroupSize(in_struct->requiredSubgroupSize)
-{
- pNext = SafePnextCopy(in_struct->pNext);
-}
-
-safe_VkPipelineShaderStageRequiredSubgroupSizeCreateInfoEXT::safe_VkPipelineShaderStageRequiredSubgroupSizeCreateInfoEXT() :
- pNext(nullptr)
-{}
-
-safe_VkPipelineShaderStageRequiredSubgroupSizeCreateInfoEXT::safe_VkPipelineShaderStageRequiredSubgroupSizeCreateInfoEXT(const safe_VkPipelineShaderStageRequiredSubgroupSizeCreateInfoEXT& src)
-{
- sType = src.sType;
- requiredSubgroupSize = src.requiredSubgroupSize;
- pNext = SafePnextCopy(src.pNext);
-}
-
-safe_VkPipelineShaderStageRequiredSubgroupSizeCreateInfoEXT& safe_VkPipelineShaderStageRequiredSubgroupSizeCreateInfoEXT::operator=(const safe_VkPipelineShaderStageRequiredSubgroupSizeCreateInfoEXT& src)
-{
- if (&src == this) return *this;
-
- if (pNext)
- FreePnextChain(pNext);
-
- sType = src.sType;
- requiredSubgroupSize = src.requiredSubgroupSize;
- pNext = SafePnextCopy(src.pNext);
-
- return *this;
-}
-
-safe_VkPipelineShaderStageRequiredSubgroupSizeCreateInfoEXT::~safe_VkPipelineShaderStageRequiredSubgroupSizeCreateInfoEXT()
-{
- if (pNext)
- FreePnextChain(pNext);
-}
-
-void safe_VkPipelineShaderStageRequiredSubgroupSizeCreateInfoEXT::initialize(const VkPipelineShaderStageRequiredSubgroupSizeCreateInfoEXT* in_struct)
-{
- sType = in_struct->sType;
- requiredSubgroupSize = in_struct->requiredSubgroupSize;
- pNext = SafePnextCopy(in_struct->pNext);
-}
-
-void safe_VkPipelineShaderStageRequiredSubgroupSizeCreateInfoEXT::initialize(const safe_VkPipelineShaderStageRequiredSubgroupSizeCreateInfoEXT* src)
-{
- sType = src->sType;
- requiredSubgroupSize = src->requiredSubgroupSize;
- pNext = SafePnextCopy(src->pNext);
-}
-
-safe_VkPhysicalDeviceShaderCoreProperties2AMD::safe_VkPhysicalDeviceShaderCoreProperties2AMD(const VkPhysicalDeviceShaderCoreProperties2AMD* in_struct) :
- sType(in_struct->sType),
- shaderCoreFeatures(in_struct->shaderCoreFeatures),
- activeComputeUnitCount(in_struct->activeComputeUnitCount)
-{
- pNext = SafePnextCopy(in_struct->pNext);
-}
-
-safe_VkPhysicalDeviceShaderCoreProperties2AMD::safe_VkPhysicalDeviceShaderCoreProperties2AMD() :
- pNext(nullptr)
-{}
-
-safe_VkPhysicalDeviceShaderCoreProperties2AMD::safe_VkPhysicalDeviceShaderCoreProperties2AMD(const safe_VkPhysicalDeviceShaderCoreProperties2AMD& src)
-{
- sType = src.sType;
- shaderCoreFeatures = src.shaderCoreFeatures;
- activeComputeUnitCount = src.activeComputeUnitCount;
- pNext = SafePnextCopy(src.pNext);
-}
-
-safe_VkPhysicalDeviceShaderCoreProperties2AMD& safe_VkPhysicalDeviceShaderCoreProperties2AMD::operator=(const safe_VkPhysicalDeviceShaderCoreProperties2AMD& src)
-{
- if (&src == this) return *this;
-
- if (pNext)
- FreePnextChain(pNext);
-
- sType = src.sType;
- shaderCoreFeatures = src.shaderCoreFeatures;
- activeComputeUnitCount = src.activeComputeUnitCount;
- pNext = SafePnextCopy(src.pNext);
-
- return *this;
-}
-
-safe_VkPhysicalDeviceShaderCoreProperties2AMD::~safe_VkPhysicalDeviceShaderCoreProperties2AMD()
-{
- if (pNext)
- FreePnextChain(pNext);
-}
-
-void safe_VkPhysicalDeviceShaderCoreProperties2AMD::initialize(const VkPhysicalDeviceShaderCoreProperties2AMD* in_struct)
-{
- sType = in_struct->sType;
- shaderCoreFeatures = in_struct->shaderCoreFeatures;
- activeComputeUnitCount = in_struct->activeComputeUnitCount;
- pNext = SafePnextCopy(in_struct->pNext);
-}
-
-void safe_VkPhysicalDeviceShaderCoreProperties2AMD::initialize(const safe_VkPhysicalDeviceShaderCoreProperties2AMD* src)
-{
- sType = src->sType;
- shaderCoreFeatures = src->shaderCoreFeatures;
- activeComputeUnitCount = src->activeComputeUnitCount;
- pNext = SafePnextCopy(src->pNext);
-}
-
-safe_VkPhysicalDeviceCoherentMemoryFeaturesAMD::safe_VkPhysicalDeviceCoherentMemoryFeaturesAMD(const VkPhysicalDeviceCoherentMemoryFeaturesAMD* in_struct) :
- sType(in_struct->sType),
- deviceCoherentMemory(in_struct->deviceCoherentMemory)
-{
- pNext = SafePnextCopy(in_struct->pNext);
-}
-
-safe_VkPhysicalDeviceCoherentMemoryFeaturesAMD::safe_VkPhysicalDeviceCoherentMemoryFeaturesAMD() :
- pNext(nullptr)
-{}
-
-safe_VkPhysicalDeviceCoherentMemoryFeaturesAMD::safe_VkPhysicalDeviceCoherentMemoryFeaturesAMD(const safe_VkPhysicalDeviceCoherentMemoryFeaturesAMD& src)
-{
- sType = src.sType;
- deviceCoherentMemory = src.deviceCoherentMemory;
- pNext = SafePnextCopy(src.pNext);
-}
-
-safe_VkPhysicalDeviceCoherentMemoryFeaturesAMD& safe_VkPhysicalDeviceCoherentMemoryFeaturesAMD::operator=(const safe_VkPhysicalDeviceCoherentMemoryFeaturesAMD& src)
-{
- if (&src == this) return *this;
-
- if (pNext)
- FreePnextChain(pNext);
-
- sType = src.sType;
- deviceCoherentMemory = src.deviceCoherentMemory;
- pNext = SafePnextCopy(src.pNext);
-
- return *this;
-}
-
-safe_VkPhysicalDeviceCoherentMemoryFeaturesAMD::~safe_VkPhysicalDeviceCoherentMemoryFeaturesAMD()
-{
- if (pNext)
- FreePnextChain(pNext);
-}
-
-void safe_VkPhysicalDeviceCoherentMemoryFeaturesAMD::initialize(const VkPhysicalDeviceCoherentMemoryFeaturesAMD* in_struct)
-{
- sType = in_struct->sType;
- deviceCoherentMemory = in_struct->deviceCoherentMemory;
- pNext = SafePnextCopy(in_struct->pNext);
-}
-
-void safe_VkPhysicalDeviceCoherentMemoryFeaturesAMD::initialize(const safe_VkPhysicalDeviceCoherentMemoryFeaturesAMD* src)
-{
- sType = src->sType;
- deviceCoherentMemory = src->deviceCoherentMemory;
- pNext = SafePnextCopy(src->pNext);
-}
-
-safe_VkPhysicalDeviceMemoryBudgetPropertiesEXT::safe_VkPhysicalDeviceMemoryBudgetPropertiesEXT(const VkPhysicalDeviceMemoryBudgetPropertiesEXT* in_struct) :
- sType(in_struct->sType)
-{
- pNext = SafePnextCopy(in_struct->pNext);
- for (uint32_t i = 0; i < VK_MAX_MEMORY_HEAPS; ++i) {
- heapBudget[i] = in_struct->heapBudget[i];
- }
- for (uint32_t i = 0; i < VK_MAX_MEMORY_HEAPS; ++i) {
- heapUsage[i] = in_struct->heapUsage[i];
- }
-}
-
-safe_VkPhysicalDeviceMemoryBudgetPropertiesEXT::safe_VkPhysicalDeviceMemoryBudgetPropertiesEXT() :
- pNext(nullptr)
-{}
-
-safe_VkPhysicalDeviceMemoryBudgetPropertiesEXT::safe_VkPhysicalDeviceMemoryBudgetPropertiesEXT(const safe_VkPhysicalDeviceMemoryBudgetPropertiesEXT& src)
-{
- sType = src.sType;
- pNext = SafePnextCopy(src.pNext);
- for (uint32_t i = 0; i < VK_MAX_MEMORY_HEAPS; ++i) {
- heapBudget[i] = src.heapBudget[i];
- }
- for (uint32_t i = 0; i < VK_MAX_MEMORY_HEAPS; ++i) {
- heapUsage[i] = src.heapUsage[i];
- }
-}
-
-safe_VkPhysicalDeviceMemoryBudgetPropertiesEXT& safe_VkPhysicalDeviceMemoryBudgetPropertiesEXT::operator=(const safe_VkPhysicalDeviceMemoryBudgetPropertiesEXT& src)
-{
- if (&src == this) return *this;
-
- if (pNext)
- FreePnextChain(pNext);
-
- sType = src.sType;
- pNext = SafePnextCopy(src.pNext);
- for (uint32_t i = 0; i < VK_MAX_MEMORY_HEAPS; ++i) {
- heapBudget[i] = src.heapBudget[i];
- }
- for (uint32_t i = 0; i < VK_MAX_MEMORY_HEAPS; ++i) {
- heapUsage[i] = src.heapUsage[i];
- }
-
- return *this;
-}
-
-safe_VkPhysicalDeviceMemoryBudgetPropertiesEXT::~safe_VkPhysicalDeviceMemoryBudgetPropertiesEXT()
-{
- if (pNext)
- FreePnextChain(pNext);
-}
-
-void safe_VkPhysicalDeviceMemoryBudgetPropertiesEXT::initialize(const VkPhysicalDeviceMemoryBudgetPropertiesEXT* in_struct)
-{
- sType = in_struct->sType;
- pNext = SafePnextCopy(in_struct->pNext);
- for (uint32_t i = 0; i < VK_MAX_MEMORY_HEAPS; ++i) {
- heapBudget[i] = in_struct->heapBudget[i];
- }
- for (uint32_t i = 0; i < VK_MAX_MEMORY_HEAPS; ++i) {
- heapUsage[i] = in_struct->heapUsage[i];
- }
-}
-
-void safe_VkPhysicalDeviceMemoryBudgetPropertiesEXT::initialize(const safe_VkPhysicalDeviceMemoryBudgetPropertiesEXT* src)
-{
- sType = src->sType;
- pNext = SafePnextCopy(src->pNext);
- for (uint32_t i = 0; i < VK_MAX_MEMORY_HEAPS; ++i) {
- heapBudget[i] = src->heapBudget[i];
- }
- for (uint32_t i = 0; i < VK_MAX_MEMORY_HEAPS; ++i) {
- heapUsage[i] = src->heapUsage[i];
- }
-}
-
-safe_VkPhysicalDeviceMemoryPriorityFeaturesEXT::safe_VkPhysicalDeviceMemoryPriorityFeaturesEXT(const VkPhysicalDeviceMemoryPriorityFeaturesEXT* in_struct) :
- sType(in_struct->sType),
- memoryPriority(in_struct->memoryPriority)
-{
- pNext = SafePnextCopy(in_struct->pNext);
-}
-
-safe_VkPhysicalDeviceMemoryPriorityFeaturesEXT::safe_VkPhysicalDeviceMemoryPriorityFeaturesEXT() :
- pNext(nullptr)
-{}
-
-safe_VkPhysicalDeviceMemoryPriorityFeaturesEXT::safe_VkPhysicalDeviceMemoryPriorityFeaturesEXT(const safe_VkPhysicalDeviceMemoryPriorityFeaturesEXT& src)
-{
- sType = src.sType;
- memoryPriority = src.memoryPriority;
- pNext = SafePnextCopy(src.pNext);
-}
-
-safe_VkPhysicalDeviceMemoryPriorityFeaturesEXT& safe_VkPhysicalDeviceMemoryPriorityFeaturesEXT::operator=(const safe_VkPhysicalDeviceMemoryPriorityFeaturesEXT& src)
-{
- if (&src == this) return *this;
-
- if (pNext)
- FreePnextChain(pNext);
-
- sType = src.sType;
- memoryPriority = src.memoryPriority;
- pNext = SafePnextCopy(src.pNext);
-
- return *this;
-}
-
-safe_VkPhysicalDeviceMemoryPriorityFeaturesEXT::~safe_VkPhysicalDeviceMemoryPriorityFeaturesEXT()
-{
- if (pNext)
- FreePnextChain(pNext);
-}
-
-void safe_VkPhysicalDeviceMemoryPriorityFeaturesEXT::initialize(const VkPhysicalDeviceMemoryPriorityFeaturesEXT* in_struct)
-{
- sType = in_struct->sType;
- memoryPriority = in_struct->memoryPriority;
- pNext = SafePnextCopy(in_struct->pNext);
-}
-
-void safe_VkPhysicalDeviceMemoryPriorityFeaturesEXT::initialize(const safe_VkPhysicalDeviceMemoryPriorityFeaturesEXT* src)
-{
- sType = src->sType;
- memoryPriority = src->memoryPriority;
- pNext = SafePnextCopy(src->pNext);
-}
-
-safe_VkMemoryPriorityAllocateInfoEXT::safe_VkMemoryPriorityAllocateInfoEXT(const VkMemoryPriorityAllocateInfoEXT* in_struct) :
- sType(in_struct->sType),
- priority(in_struct->priority)
-{
- pNext = SafePnextCopy(in_struct->pNext);
-}
-
-safe_VkMemoryPriorityAllocateInfoEXT::safe_VkMemoryPriorityAllocateInfoEXT() :
- pNext(nullptr)
-{}
-
-safe_VkMemoryPriorityAllocateInfoEXT::safe_VkMemoryPriorityAllocateInfoEXT(const safe_VkMemoryPriorityAllocateInfoEXT& src)
-{
- sType = src.sType;
- priority = src.priority;
- pNext = SafePnextCopy(src.pNext);
-}
-
-safe_VkMemoryPriorityAllocateInfoEXT& safe_VkMemoryPriorityAllocateInfoEXT::operator=(const safe_VkMemoryPriorityAllocateInfoEXT& src)
-{
- if (&src == this) return *this;
-
- if (pNext)
- FreePnextChain(pNext);
-
- sType = src.sType;
- priority = src.priority;
- pNext = SafePnextCopy(src.pNext);
-
- return *this;
-}
-
-safe_VkMemoryPriorityAllocateInfoEXT::~safe_VkMemoryPriorityAllocateInfoEXT()
-{
- if (pNext)
- FreePnextChain(pNext);
-}
-
-void safe_VkMemoryPriorityAllocateInfoEXT::initialize(const VkMemoryPriorityAllocateInfoEXT* in_struct)
-{
- sType = in_struct->sType;
- priority = in_struct->priority;
- pNext = SafePnextCopy(in_struct->pNext);
-}
-
-void safe_VkMemoryPriorityAllocateInfoEXT::initialize(const safe_VkMemoryPriorityAllocateInfoEXT* src)
-{
- sType = src->sType;
- priority = src->priority;
- pNext = SafePnextCopy(src->pNext);
-}
-
-safe_VkPhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV::safe_VkPhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV(const VkPhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV* in_struct) :
- sType(in_struct->sType),
- dedicatedAllocationImageAliasing(in_struct->dedicatedAllocationImageAliasing)
-{
- pNext = SafePnextCopy(in_struct->pNext);
-}
-
-safe_VkPhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV::safe_VkPhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV() :
- pNext(nullptr)
-{}
-
-safe_VkPhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV::safe_VkPhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV(const safe_VkPhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV& src)
-{
- sType = src.sType;
- dedicatedAllocationImageAliasing = src.dedicatedAllocationImageAliasing;
- pNext = SafePnextCopy(src.pNext);
-}
-
-safe_VkPhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV& safe_VkPhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV::operator=(const safe_VkPhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV& src)
-{
- if (&src == this) return *this;
-
- if (pNext)
- FreePnextChain(pNext);
-
- sType = src.sType;
- dedicatedAllocationImageAliasing = src.dedicatedAllocationImageAliasing;
- pNext = SafePnextCopy(src.pNext);
-
- return *this;
-}
-
-safe_VkPhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV::~safe_VkPhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV()
-{
- if (pNext)
- FreePnextChain(pNext);
-}
-
-void safe_VkPhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV::initialize(const VkPhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV* in_struct)
-{
- sType = in_struct->sType;
- dedicatedAllocationImageAliasing = in_struct->dedicatedAllocationImageAliasing;
- pNext = SafePnextCopy(in_struct->pNext);
-}
-
-void safe_VkPhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV::initialize(const safe_VkPhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV* src)
-{
- sType = src->sType;
- dedicatedAllocationImageAliasing = src->dedicatedAllocationImageAliasing;
- pNext = SafePnextCopy(src->pNext);
-}
-
-safe_VkPhysicalDeviceBufferDeviceAddressFeaturesEXT::safe_VkPhysicalDeviceBufferDeviceAddressFeaturesEXT(const VkPhysicalDeviceBufferDeviceAddressFeaturesEXT* in_struct) :
- sType(in_struct->sType),
- bufferDeviceAddress(in_struct->bufferDeviceAddress),
- bufferDeviceAddressCaptureReplay(in_struct->bufferDeviceAddressCaptureReplay),
- bufferDeviceAddressMultiDevice(in_struct->bufferDeviceAddressMultiDevice)
-{
- pNext = SafePnextCopy(in_struct->pNext);
-}
-
-safe_VkPhysicalDeviceBufferDeviceAddressFeaturesEXT::safe_VkPhysicalDeviceBufferDeviceAddressFeaturesEXT() :
- pNext(nullptr)
-{}
-
-safe_VkPhysicalDeviceBufferDeviceAddressFeaturesEXT::safe_VkPhysicalDeviceBufferDeviceAddressFeaturesEXT(const safe_VkPhysicalDeviceBufferDeviceAddressFeaturesEXT& src)
-{
- sType = src.sType;
- bufferDeviceAddress = src.bufferDeviceAddress;
- bufferDeviceAddressCaptureReplay = src.bufferDeviceAddressCaptureReplay;
- bufferDeviceAddressMultiDevice = src.bufferDeviceAddressMultiDevice;
- pNext = SafePnextCopy(src.pNext);
-}
-
-safe_VkPhysicalDeviceBufferDeviceAddressFeaturesEXT& safe_VkPhysicalDeviceBufferDeviceAddressFeaturesEXT::operator=(const safe_VkPhysicalDeviceBufferDeviceAddressFeaturesEXT& src)
-{
- if (&src == this) return *this;
-
- if (pNext)
- FreePnextChain(pNext);
-
- sType = src.sType;
- bufferDeviceAddress = src.bufferDeviceAddress;
- bufferDeviceAddressCaptureReplay = src.bufferDeviceAddressCaptureReplay;
- bufferDeviceAddressMultiDevice = src.bufferDeviceAddressMultiDevice;
- pNext = SafePnextCopy(src.pNext);
-
- return *this;
-}
-
-safe_VkPhysicalDeviceBufferDeviceAddressFeaturesEXT::~safe_VkPhysicalDeviceBufferDeviceAddressFeaturesEXT()
-{
- if (pNext)
- FreePnextChain(pNext);
-}
-
-void safe_VkPhysicalDeviceBufferDeviceAddressFeaturesEXT::initialize(const VkPhysicalDeviceBufferDeviceAddressFeaturesEXT* in_struct)
-{
- sType = in_struct->sType;
- bufferDeviceAddress = in_struct->bufferDeviceAddress;
- bufferDeviceAddressCaptureReplay = in_struct->bufferDeviceAddressCaptureReplay;
- bufferDeviceAddressMultiDevice = in_struct->bufferDeviceAddressMultiDevice;
- pNext = SafePnextCopy(in_struct->pNext);
-}
-
-void safe_VkPhysicalDeviceBufferDeviceAddressFeaturesEXT::initialize(const safe_VkPhysicalDeviceBufferDeviceAddressFeaturesEXT* src)
-{
- sType = src->sType;
- bufferDeviceAddress = src->bufferDeviceAddress;
- bufferDeviceAddressCaptureReplay = src->bufferDeviceAddressCaptureReplay;
- bufferDeviceAddressMultiDevice = src->bufferDeviceAddressMultiDevice;
- pNext = SafePnextCopy(src->pNext);
-}
-
-safe_VkBufferDeviceAddressInfoEXT::safe_VkBufferDeviceAddressInfoEXT(const VkBufferDeviceAddressInfoEXT* in_struct) :
- sType(in_struct->sType),
- buffer(in_struct->buffer)
-{
- pNext = SafePnextCopy(in_struct->pNext);
-}
-
-safe_VkBufferDeviceAddressInfoEXT::safe_VkBufferDeviceAddressInfoEXT() :
- pNext(nullptr)
-{}
-
-safe_VkBufferDeviceAddressInfoEXT::safe_VkBufferDeviceAddressInfoEXT(const safe_VkBufferDeviceAddressInfoEXT& src)
-{
- sType = src.sType;
- buffer = src.buffer;
- pNext = SafePnextCopy(src.pNext);
-}
-
-safe_VkBufferDeviceAddressInfoEXT& safe_VkBufferDeviceAddressInfoEXT::operator=(const safe_VkBufferDeviceAddressInfoEXT& src)
-{
- if (&src == this) return *this;
-
- if (pNext)
- FreePnextChain(pNext);
-
- sType = src.sType;
- buffer = src.buffer;
- pNext = SafePnextCopy(src.pNext);
-
- return *this;
-}
-
-safe_VkBufferDeviceAddressInfoEXT::~safe_VkBufferDeviceAddressInfoEXT()
-{
- if (pNext)
- FreePnextChain(pNext);
-}
-
-void safe_VkBufferDeviceAddressInfoEXT::initialize(const VkBufferDeviceAddressInfoEXT* in_struct)
-{
- sType = in_struct->sType;
- buffer = in_struct->buffer;
- pNext = SafePnextCopy(in_struct->pNext);
-}
-
-void safe_VkBufferDeviceAddressInfoEXT::initialize(const safe_VkBufferDeviceAddressInfoEXT* src)
-{
- sType = src->sType;
- buffer = src->buffer;
- pNext = SafePnextCopy(src->pNext);
-}
-
-safe_VkBufferDeviceAddressCreateInfoEXT::safe_VkBufferDeviceAddressCreateInfoEXT(const VkBufferDeviceAddressCreateInfoEXT* in_struct) :
- sType(in_struct->sType),
- deviceAddress(in_struct->deviceAddress)
-{
- pNext = SafePnextCopy(in_struct->pNext);
-}
-
-safe_VkBufferDeviceAddressCreateInfoEXT::safe_VkBufferDeviceAddressCreateInfoEXT() :
- pNext(nullptr)
-{}
-
-safe_VkBufferDeviceAddressCreateInfoEXT::safe_VkBufferDeviceAddressCreateInfoEXT(const safe_VkBufferDeviceAddressCreateInfoEXT& src)
-{
- sType = src.sType;
- deviceAddress = src.deviceAddress;
- pNext = SafePnextCopy(src.pNext);
-}
-
-safe_VkBufferDeviceAddressCreateInfoEXT& safe_VkBufferDeviceAddressCreateInfoEXT::operator=(const safe_VkBufferDeviceAddressCreateInfoEXT& src)
-{
- if (&src == this) return *this;
-
- if (pNext)
- FreePnextChain(pNext);
-
- sType = src.sType;
- deviceAddress = src.deviceAddress;
- pNext = SafePnextCopy(src.pNext);
-
- return *this;
-}
-
-safe_VkBufferDeviceAddressCreateInfoEXT::~safe_VkBufferDeviceAddressCreateInfoEXT()
-{
- if (pNext)
- FreePnextChain(pNext);
-}
-
-void safe_VkBufferDeviceAddressCreateInfoEXT::initialize(const VkBufferDeviceAddressCreateInfoEXT* in_struct)
-{
- sType = in_struct->sType;
- deviceAddress = in_struct->deviceAddress;
- pNext = SafePnextCopy(in_struct->pNext);
-}
-
-void safe_VkBufferDeviceAddressCreateInfoEXT::initialize(const safe_VkBufferDeviceAddressCreateInfoEXT* src)
-{
- sType = src->sType;
- deviceAddress = src->deviceAddress;
- pNext = SafePnextCopy(src->pNext);
-}
-
-safe_VkImageStencilUsageCreateInfoEXT::safe_VkImageStencilUsageCreateInfoEXT(const VkImageStencilUsageCreateInfoEXT* in_struct) :
- sType(in_struct->sType),
- stencilUsage(in_struct->stencilUsage)
-{
- pNext = SafePnextCopy(in_struct->pNext);
-}
-
-safe_VkImageStencilUsageCreateInfoEXT::safe_VkImageStencilUsageCreateInfoEXT() :
- pNext(nullptr)
-{}
-
-safe_VkImageStencilUsageCreateInfoEXT::safe_VkImageStencilUsageCreateInfoEXT(const safe_VkImageStencilUsageCreateInfoEXT& src)
-{
- sType = src.sType;
- stencilUsage = src.stencilUsage;
- pNext = SafePnextCopy(src.pNext);
-}
-
-safe_VkImageStencilUsageCreateInfoEXT& safe_VkImageStencilUsageCreateInfoEXT::operator=(const safe_VkImageStencilUsageCreateInfoEXT& src)
-{
- if (&src == this) return *this;
-
- if (pNext)
- FreePnextChain(pNext);
-
- sType = src.sType;
- stencilUsage = src.stencilUsage;
- pNext = SafePnextCopy(src.pNext);
-
- return *this;
-}
-
-safe_VkImageStencilUsageCreateInfoEXT::~safe_VkImageStencilUsageCreateInfoEXT()
-{
- if (pNext)
- FreePnextChain(pNext);
-}
-
-void safe_VkImageStencilUsageCreateInfoEXT::initialize(const VkImageStencilUsageCreateInfoEXT* in_struct)
-{
- sType = in_struct->sType;
- stencilUsage = in_struct->stencilUsage;
- pNext = SafePnextCopy(in_struct->pNext);
-}
-
-void safe_VkImageStencilUsageCreateInfoEXT::initialize(const safe_VkImageStencilUsageCreateInfoEXT* src)
-{
- sType = src->sType;
- stencilUsage = src->stencilUsage;
- pNext = SafePnextCopy(src->pNext);
-}
-
-safe_VkValidationFeaturesEXT::safe_VkValidationFeaturesEXT(const VkValidationFeaturesEXT* in_struct) :
- sType(in_struct->sType),
- enabledValidationFeatureCount(in_struct->enabledValidationFeatureCount),
- pEnabledValidationFeatures(nullptr),
- disabledValidationFeatureCount(in_struct->disabledValidationFeatureCount),
- pDisabledValidationFeatures(nullptr)
-{
- pNext = SafePnextCopy(in_struct->pNext);
- if (in_struct->pEnabledValidationFeatures) {
- pEnabledValidationFeatures = new VkValidationFeatureEnableEXT[in_struct->enabledValidationFeatureCount];
- memcpy ((void *)pEnabledValidationFeatures, (void *)in_struct->pEnabledValidationFeatures, sizeof(VkValidationFeatureEnableEXT)*in_struct->enabledValidationFeatureCount);
- }
- if (in_struct->pDisabledValidationFeatures) {
- pDisabledValidationFeatures = new VkValidationFeatureDisableEXT[in_struct->disabledValidationFeatureCount];
- memcpy ((void *)pDisabledValidationFeatures, (void *)in_struct->pDisabledValidationFeatures, sizeof(VkValidationFeatureDisableEXT)*in_struct->disabledValidationFeatureCount);
- }
-}
-
-safe_VkValidationFeaturesEXT::safe_VkValidationFeaturesEXT() :
- pNext(nullptr),
- pEnabledValidationFeatures(nullptr),
- pDisabledValidationFeatures(nullptr)
-{}
-
-safe_VkValidationFeaturesEXT::safe_VkValidationFeaturesEXT(const safe_VkValidationFeaturesEXT& src)
-{
- sType = src.sType;
- enabledValidationFeatureCount = src.enabledValidationFeatureCount;
- pEnabledValidationFeatures = nullptr;
- disabledValidationFeatureCount = src.disabledValidationFeatureCount;
- pDisabledValidationFeatures = nullptr;
- pNext = SafePnextCopy(src.pNext);
- if (src.pEnabledValidationFeatures) {
- pEnabledValidationFeatures = new VkValidationFeatureEnableEXT[src.enabledValidationFeatureCount];
- memcpy ((void *)pEnabledValidationFeatures, (void *)src.pEnabledValidationFeatures, sizeof(VkValidationFeatureEnableEXT)*src.enabledValidationFeatureCount);
- }
- if (src.pDisabledValidationFeatures) {
- pDisabledValidationFeatures = new VkValidationFeatureDisableEXT[src.disabledValidationFeatureCount];
- memcpy ((void *)pDisabledValidationFeatures, (void *)src.pDisabledValidationFeatures, sizeof(VkValidationFeatureDisableEXT)*src.disabledValidationFeatureCount);
- }
-}
-
-safe_VkValidationFeaturesEXT& safe_VkValidationFeaturesEXT::operator=(const safe_VkValidationFeaturesEXT& src)
-{
- if (&src == this) return *this;
-
- if (pEnabledValidationFeatures)
- delete[] pEnabledValidationFeatures;
- if (pDisabledValidationFeatures)
- delete[] pDisabledValidationFeatures;
- if (pNext)
- FreePnextChain(pNext);
-
- sType = src.sType;
- enabledValidationFeatureCount = src.enabledValidationFeatureCount;
- pEnabledValidationFeatures = nullptr;
- disabledValidationFeatureCount = src.disabledValidationFeatureCount;
- pDisabledValidationFeatures = nullptr;
- pNext = SafePnextCopy(src.pNext);
- if (src.pEnabledValidationFeatures) {
- pEnabledValidationFeatures = new VkValidationFeatureEnableEXT[src.enabledValidationFeatureCount];
- memcpy ((void *)pEnabledValidationFeatures, (void *)src.pEnabledValidationFeatures, sizeof(VkValidationFeatureEnableEXT)*src.enabledValidationFeatureCount);
- }
- if (src.pDisabledValidationFeatures) {
- pDisabledValidationFeatures = new VkValidationFeatureDisableEXT[src.disabledValidationFeatureCount];
- memcpy ((void *)pDisabledValidationFeatures, (void *)src.pDisabledValidationFeatures, sizeof(VkValidationFeatureDisableEXT)*src.disabledValidationFeatureCount);
- }
-
- return *this;
-}
-
-safe_VkValidationFeaturesEXT::~safe_VkValidationFeaturesEXT()
-{
- if (pEnabledValidationFeatures)
- delete[] pEnabledValidationFeatures;
- if (pDisabledValidationFeatures)
- delete[] pDisabledValidationFeatures;
- if (pNext)
- FreePnextChain(pNext);
-}
-
-void safe_VkValidationFeaturesEXT::initialize(const VkValidationFeaturesEXT* in_struct)
-{
- sType = in_struct->sType;
- enabledValidationFeatureCount = in_struct->enabledValidationFeatureCount;
- pEnabledValidationFeatures = nullptr;
- disabledValidationFeatureCount = in_struct->disabledValidationFeatureCount;
- pDisabledValidationFeatures = nullptr;
- pNext = SafePnextCopy(in_struct->pNext);
- if (in_struct->pEnabledValidationFeatures) {
- pEnabledValidationFeatures = new VkValidationFeatureEnableEXT[in_struct->enabledValidationFeatureCount];
- memcpy ((void *)pEnabledValidationFeatures, (void *)in_struct->pEnabledValidationFeatures, sizeof(VkValidationFeatureEnableEXT)*in_struct->enabledValidationFeatureCount);
- }
- if (in_struct->pDisabledValidationFeatures) {
- pDisabledValidationFeatures = new VkValidationFeatureDisableEXT[in_struct->disabledValidationFeatureCount];
- memcpy ((void *)pDisabledValidationFeatures, (void *)in_struct->pDisabledValidationFeatures, sizeof(VkValidationFeatureDisableEXT)*in_struct->disabledValidationFeatureCount);
- }
-}
-
-void safe_VkValidationFeaturesEXT::initialize(const safe_VkValidationFeaturesEXT* src)
-{
- sType = src->sType;
- enabledValidationFeatureCount = src->enabledValidationFeatureCount;
- pEnabledValidationFeatures = nullptr;
- disabledValidationFeatureCount = src->disabledValidationFeatureCount;
- pDisabledValidationFeatures = nullptr;
- pNext = SafePnextCopy(src->pNext);
- if (src->pEnabledValidationFeatures) {
- pEnabledValidationFeatures = new VkValidationFeatureEnableEXT[src->enabledValidationFeatureCount];
- memcpy ((void *)pEnabledValidationFeatures, (void *)src->pEnabledValidationFeatures, sizeof(VkValidationFeatureEnableEXT)*src->enabledValidationFeatureCount);
- }
- if (src->pDisabledValidationFeatures) {
- pDisabledValidationFeatures = new VkValidationFeatureDisableEXT[src->disabledValidationFeatureCount];
- memcpy ((void *)pDisabledValidationFeatures, (void *)src->pDisabledValidationFeatures, sizeof(VkValidationFeatureDisableEXT)*src->disabledValidationFeatureCount);
- }
-}
-
-safe_VkCooperativeMatrixPropertiesNV::safe_VkCooperativeMatrixPropertiesNV(const VkCooperativeMatrixPropertiesNV* in_struct) :
- sType(in_struct->sType),
- MSize(in_struct->MSize),
- NSize(in_struct->NSize),
- KSize(in_struct->KSize),
- AType(in_struct->AType),
- BType(in_struct->BType),
- CType(in_struct->CType),
- DType(in_struct->DType),
- scope(in_struct->scope)
-{
- pNext = SafePnextCopy(in_struct->pNext);
-}
-
-safe_VkCooperativeMatrixPropertiesNV::safe_VkCooperativeMatrixPropertiesNV() :
- pNext(nullptr)
-{}
-
-safe_VkCooperativeMatrixPropertiesNV::safe_VkCooperativeMatrixPropertiesNV(const safe_VkCooperativeMatrixPropertiesNV& src)
-{
- sType = src.sType;
- MSize = src.MSize;
- NSize = src.NSize;
- KSize = src.KSize;
- AType = src.AType;
- BType = src.BType;
- CType = src.CType;
- DType = src.DType;
- scope = src.scope;
- pNext = SafePnextCopy(src.pNext);
-}
-
-safe_VkCooperativeMatrixPropertiesNV& safe_VkCooperativeMatrixPropertiesNV::operator=(const safe_VkCooperativeMatrixPropertiesNV& src)
-{
- if (&src == this) return *this;
-
- if (pNext)
- FreePnextChain(pNext);
-
- sType = src.sType;
- MSize = src.MSize;
- NSize = src.NSize;
- KSize = src.KSize;
- AType = src.AType;
- BType = src.BType;
- CType = src.CType;
- DType = src.DType;
- scope = src.scope;
- pNext = SafePnextCopy(src.pNext);
-
- return *this;
-}
-
-safe_VkCooperativeMatrixPropertiesNV::~safe_VkCooperativeMatrixPropertiesNV()
-{
- if (pNext)
- FreePnextChain(pNext);
-}
-
-void safe_VkCooperativeMatrixPropertiesNV::initialize(const VkCooperativeMatrixPropertiesNV* in_struct)
-{
- sType = in_struct->sType;
- MSize = in_struct->MSize;
- NSize = in_struct->NSize;
- KSize = in_struct->KSize;
- AType = in_struct->AType;
- BType = in_struct->BType;
- CType = in_struct->CType;
- DType = in_struct->DType;
- scope = in_struct->scope;
- pNext = SafePnextCopy(in_struct->pNext);
-}
-
-void safe_VkCooperativeMatrixPropertiesNV::initialize(const safe_VkCooperativeMatrixPropertiesNV* src)
-{
- sType = src->sType;
- MSize = src->MSize;
- NSize = src->NSize;
- KSize = src->KSize;
- AType = src->AType;
- BType = src->BType;
- CType = src->CType;
- DType = src->DType;
- scope = src->scope;
- pNext = SafePnextCopy(src->pNext);
-}
-
-safe_VkPhysicalDeviceCooperativeMatrixFeaturesNV::safe_VkPhysicalDeviceCooperativeMatrixFeaturesNV(const VkPhysicalDeviceCooperativeMatrixFeaturesNV* in_struct) :
- sType(in_struct->sType),
- cooperativeMatrix(in_struct->cooperativeMatrix),
- cooperativeMatrixRobustBufferAccess(in_struct->cooperativeMatrixRobustBufferAccess)
-{
- pNext = SafePnextCopy(in_struct->pNext);
-}
-
-safe_VkPhysicalDeviceCooperativeMatrixFeaturesNV::safe_VkPhysicalDeviceCooperativeMatrixFeaturesNV() :
- pNext(nullptr)
-{}
-
-safe_VkPhysicalDeviceCooperativeMatrixFeaturesNV::safe_VkPhysicalDeviceCooperativeMatrixFeaturesNV(const safe_VkPhysicalDeviceCooperativeMatrixFeaturesNV& src)
-{
- sType = src.sType;
- cooperativeMatrix = src.cooperativeMatrix;
- cooperativeMatrixRobustBufferAccess = src.cooperativeMatrixRobustBufferAccess;
- pNext = SafePnextCopy(src.pNext);
-}
-
-safe_VkPhysicalDeviceCooperativeMatrixFeaturesNV& safe_VkPhysicalDeviceCooperativeMatrixFeaturesNV::operator=(const safe_VkPhysicalDeviceCooperativeMatrixFeaturesNV& src)
-{
- if (&src == this) return *this;
-
- if (pNext)
- FreePnextChain(pNext);
-
- sType = src.sType;
- cooperativeMatrix = src.cooperativeMatrix;
- cooperativeMatrixRobustBufferAccess = src.cooperativeMatrixRobustBufferAccess;
- pNext = SafePnextCopy(src.pNext);
-
- return *this;
-}
-
-safe_VkPhysicalDeviceCooperativeMatrixFeaturesNV::~safe_VkPhysicalDeviceCooperativeMatrixFeaturesNV()
-{
- if (pNext)
- FreePnextChain(pNext);
-}
-
-void safe_VkPhysicalDeviceCooperativeMatrixFeaturesNV::initialize(const VkPhysicalDeviceCooperativeMatrixFeaturesNV* in_struct)
-{
- sType = in_struct->sType;
- cooperativeMatrix = in_struct->cooperativeMatrix;
- cooperativeMatrixRobustBufferAccess = in_struct->cooperativeMatrixRobustBufferAccess;
- pNext = SafePnextCopy(in_struct->pNext);
-}
-
-void safe_VkPhysicalDeviceCooperativeMatrixFeaturesNV::initialize(const safe_VkPhysicalDeviceCooperativeMatrixFeaturesNV* src)
-{
- sType = src->sType;
- cooperativeMatrix = src->cooperativeMatrix;
- cooperativeMatrixRobustBufferAccess = src->cooperativeMatrixRobustBufferAccess;
- pNext = SafePnextCopy(src->pNext);
-}
-
-safe_VkPhysicalDeviceCooperativeMatrixPropertiesNV::safe_VkPhysicalDeviceCooperativeMatrixPropertiesNV(const VkPhysicalDeviceCooperativeMatrixPropertiesNV* in_struct) :
- sType(in_struct->sType),
- cooperativeMatrixSupportedStages(in_struct->cooperativeMatrixSupportedStages)
-{
- pNext = SafePnextCopy(in_struct->pNext);
-}
-
-safe_VkPhysicalDeviceCooperativeMatrixPropertiesNV::safe_VkPhysicalDeviceCooperativeMatrixPropertiesNV() :
- pNext(nullptr)
-{}
-
-safe_VkPhysicalDeviceCooperativeMatrixPropertiesNV::safe_VkPhysicalDeviceCooperativeMatrixPropertiesNV(const safe_VkPhysicalDeviceCooperativeMatrixPropertiesNV& src)
-{
- sType = src.sType;
- cooperativeMatrixSupportedStages = src.cooperativeMatrixSupportedStages;
- pNext = SafePnextCopy(src.pNext);
-}
-
-safe_VkPhysicalDeviceCooperativeMatrixPropertiesNV& safe_VkPhysicalDeviceCooperativeMatrixPropertiesNV::operator=(const safe_VkPhysicalDeviceCooperativeMatrixPropertiesNV& src)
-{
- if (&src == this) return *this;
-
- if (pNext)
- FreePnextChain(pNext);
-
- sType = src.sType;
- cooperativeMatrixSupportedStages = src.cooperativeMatrixSupportedStages;
- pNext = SafePnextCopy(src.pNext);
-
- return *this;
-}
-
-safe_VkPhysicalDeviceCooperativeMatrixPropertiesNV::~safe_VkPhysicalDeviceCooperativeMatrixPropertiesNV()
-{
- if (pNext)
- FreePnextChain(pNext);
-}
-
-void safe_VkPhysicalDeviceCooperativeMatrixPropertiesNV::initialize(const VkPhysicalDeviceCooperativeMatrixPropertiesNV* in_struct)
-{
- sType = in_struct->sType;
- cooperativeMatrixSupportedStages = in_struct->cooperativeMatrixSupportedStages;
- pNext = SafePnextCopy(in_struct->pNext);
-}
-
-void safe_VkPhysicalDeviceCooperativeMatrixPropertiesNV::initialize(const safe_VkPhysicalDeviceCooperativeMatrixPropertiesNV* src)
-{
- sType = src->sType;
- cooperativeMatrixSupportedStages = src->cooperativeMatrixSupportedStages;
- pNext = SafePnextCopy(src->pNext);
-}
-
-safe_VkPhysicalDeviceCoverageReductionModeFeaturesNV::safe_VkPhysicalDeviceCoverageReductionModeFeaturesNV(const VkPhysicalDeviceCoverageReductionModeFeaturesNV* in_struct) :
- sType(in_struct->sType),
- coverageReductionMode(in_struct->coverageReductionMode)
-{
- pNext = SafePnextCopy(in_struct->pNext);
-}
-
-safe_VkPhysicalDeviceCoverageReductionModeFeaturesNV::safe_VkPhysicalDeviceCoverageReductionModeFeaturesNV() :
- pNext(nullptr)
-{}
-
-safe_VkPhysicalDeviceCoverageReductionModeFeaturesNV::safe_VkPhysicalDeviceCoverageReductionModeFeaturesNV(const safe_VkPhysicalDeviceCoverageReductionModeFeaturesNV& src)
-{
- sType = src.sType;
- coverageReductionMode = src.coverageReductionMode;
- pNext = SafePnextCopy(src.pNext);
-}
-
-safe_VkPhysicalDeviceCoverageReductionModeFeaturesNV& safe_VkPhysicalDeviceCoverageReductionModeFeaturesNV::operator=(const safe_VkPhysicalDeviceCoverageReductionModeFeaturesNV& src)
-{
- if (&src == this) return *this;
-
- if (pNext)
- FreePnextChain(pNext);
-
- sType = src.sType;
- coverageReductionMode = src.coverageReductionMode;
- pNext = SafePnextCopy(src.pNext);
-
- return *this;
-}
-
-safe_VkPhysicalDeviceCoverageReductionModeFeaturesNV::~safe_VkPhysicalDeviceCoverageReductionModeFeaturesNV()
-{
- if (pNext)
- FreePnextChain(pNext);
-}
-
-void safe_VkPhysicalDeviceCoverageReductionModeFeaturesNV::initialize(const VkPhysicalDeviceCoverageReductionModeFeaturesNV* in_struct)
-{
- sType = in_struct->sType;
- coverageReductionMode = in_struct->coverageReductionMode;
- pNext = SafePnextCopy(in_struct->pNext);
-}
-
-void safe_VkPhysicalDeviceCoverageReductionModeFeaturesNV::initialize(const safe_VkPhysicalDeviceCoverageReductionModeFeaturesNV* src)
-{
- sType = src->sType;
- coverageReductionMode = src->coverageReductionMode;
- pNext = SafePnextCopy(src->pNext);
-}
-
-safe_VkPipelineCoverageReductionStateCreateInfoNV::safe_VkPipelineCoverageReductionStateCreateInfoNV(const VkPipelineCoverageReductionStateCreateInfoNV* in_struct) :
- sType(in_struct->sType),
- flags(in_struct->flags),
- coverageReductionMode(in_struct->coverageReductionMode)
-{
- pNext = SafePnextCopy(in_struct->pNext);
-}
-
-safe_VkPipelineCoverageReductionStateCreateInfoNV::safe_VkPipelineCoverageReductionStateCreateInfoNV() :
- pNext(nullptr)
-{}
-
-safe_VkPipelineCoverageReductionStateCreateInfoNV::safe_VkPipelineCoverageReductionStateCreateInfoNV(const safe_VkPipelineCoverageReductionStateCreateInfoNV& src)
-{
- sType = src.sType;
- flags = src.flags;
- coverageReductionMode = src.coverageReductionMode;
- pNext = SafePnextCopy(src.pNext);
-}
-
-safe_VkPipelineCoverageReductionStateCreateInfoNV& safe_VkPipelineCoverageReductionStateCreateInfoNV::operator=(const safe_VkPipelineCoverageReductionStateCreateInfoNV& src)
-{
- if (&src == this) return *this;
-
- if (pNext)
- FreePnextChain(pNext);
-
- sType = src.sType;
- flags = src.flags;
- coverageReductionMode = src.coverageReductionMode;
- pNext = SafePnextCopy(src.pNext);
-
- return *this;
-}
-
-safe_VkPipelineCoverageReductionStateCreateInfoNV::~safe_VkPipelineCoverageReductionStateCreateInfoNV()
-{
- if (pNext)
- FreePnextChain(pNext);
-}
-
-void safe_VkPipelineCoverageReductionStateCreateInfoNV::initialize(const VkPipelineCoverageReductionStateCreateInfoNV* in_struct)
-{
- sType = in_struct->sType;
- flags = in_struct->flags;
- coverageReductionMode = in_struct->coverageReductionMode;
- pNext = SafePnextCopy(in_struct->pNext);
-}
-
-void safe_VkPipelineCoverageReductionStateCreateInfoNV::initialize(const safe_VkPipelineCoverageReductionStateCreateInfoNV* src)
-{
- sType = src->sType;
- flags = src->flags;
- coverageReductionMode = src->coverageReductionMode;
- pNext = SafePnextCopy(src->pNext);
-}
-
-safe_VkFramebufferMixedSamplesCombinationNV::safe_VkFramebufferMixedSamplesCombinationNV(const VkFramebufferMixedSamplesCombinationNV* in_struct) :
- sType(in_struct->sType),
- coverageReductionMode(in_struct->coverageReductionMode),
- rasterizationSamples(in_struct->rasterizationSamples),
- depthStencilSamples(in_struct->depthStencilSamples),
- colorSamples(in_struct->colorSamples)
-{
- pNext = SafePnextCopy(in_struct->pNext);
-}
-
-safe_VkFramebufferMixedSamplesCombinationNV::safe_VkFramebufferMixedSamplesCombinationNV() :
- pNext(nullptr)
-{}
-
-safe_VkFramebufferMixedSamplesCombinationNV::safe_VkFramebufferMixedSamplesCombinationNV(const safe_VkFramebufferMixedSamplesCombinationNV& src)
-{
- sType = src.sType;
- coverageReductionMode = src.coverageReductionMode;
- rasterizationSamples = src.rasterizationSamples;
- depthStencilSamples = src.depthStencilSamples;
- colorSamples = src.colorSamples;
- pNext = SafePnextCopy(src.pNext);
-}
-
-safe_VkFramebufferMixedSamplesCombinationNV& safe_VkFramebufferMixedSamplesCombinationNV::operator=(const safe_VkFramebufferMixedSamplesCombinationNV& src)
-{
- if (&src == this) return *this;
-
- if (pNext)
- FreePnextChain(pNext);
-
- sType = src.sType;
- coverageReductionMode = src.coverageReductionMode;
- rasterizationSamples = src.rasterizationSamples;
- depthStencilSamples = src.depthStencilSamples;
- colorSamples = src.colorSamples;
- pNext = SafePnextCopy(src.pNext);
-
- return *this;
-}
-
-safe_VkFramebufferMixedSamplesCombinationNV::~safe_VkFramebufferMixedSamplesCombinationNV()
-{
- if (pNext)
- FreePnextChain(pNext);
-}
-
-void safe_VkFramebufferMixedSamplesCombinationNV::initialize(const VkFramebufferMixedSamplesCombinationNV* in_struct)
-{
- sType = in_struct->sType;
- coverageReductionMode = in_struct->coverageReductionMode;
- rasterizationSamples = in_struct->rasterizationSamples;
- depthStencilSamples = in_struct->depthStencilSamples;
- colorSamples = in_struct->colorSamples;
- pNext = SafePnextCopy(in_struct->pNext);
-}
-
-void safe_VkFramebufferMixedSamplesCombinationNV::initialize(const safe_VkFramebufferMixedSamplesCombinationNV* src)
-{
- sType = src->sType;
- coverageReductionMode = src->coverageReductionMode;
- rasterizationSamples = src->rasterizationSamples;
- depthStencilSamples = src->depthStencilSamples;
- colorSamples = src->colorSamples;
- pNext = SafePnextCopy(src->pNext);
-}
-
-safe_VkPhysicalDeviceFragmentShaderInterlockFeaturesEXT::safe_VkPhysicalDeviceFragmentShaderInterlockFeaturesEXT(const VkPhysicalDeviceFragmentShaderInterlockFeaturesEXT* in_struct) :
- sType(in_struct->sType),
- fragmentShaderSampleInterlock(in_struct->fragmentShaderSampleInterlock),
- fragmentShaderPixelInterlock(in_struct->fragmentShaderPixelInterlock),
- fragmentShaderShadingRateInterlock(in_struct->fragmentShaderShadingRateInterlock)
-{
- pNext = SafePnextCopy(in_struct->pNext);
-}
-
-safe_VkPhysicalDeviceFragmentShaderInterlockFeaturesEXT::safe_VkPhysicalDeviceFragmentShaderInterlockFeaturesEXT() :
- pNext(nullptr)
-{}
-
-safe_VkPhysicalDeviceFragmentShaderInterlockFeaturesEXT::safe_VkPhysicalDeviceFragmentShaderInterlockFeaturesEXT(const safe_VkPhysicalDeviceFragmentShaderInterlockFeaturesEXT& src)
-{
- sType = src.sType;
- fragmentShaderSampleInterlock = src.fragmentShaderSampleInterlock;
- fragmentShaderPixelInterlock = src.fragmentShaderPixelInterlock;
- fragmentShaderShadingRateInterlock = src.fragmentShaderShadingRateInterlock;
- pNext = SafePnextCopy(src.pNext);
-}
-
-safe_VkPhysicalDeviceFragmentShaderInterlockFeaturesEXT& safe_VkPhysicalDeviceFragmentShaderInterlockFeaturesEXT::operator=(const safe_VkPhysicalDeviceFragmentShaderInterlockFeaturesEXT& src)
-{
- if (&src == this) return *this;
-
- if (pNext)
- FreePnextChain(pNext);
-
- sType = src.sType;
- fragmentShaderSampleInterlock = src.fragmentShaderSampleInterlock;
- fragmentShaderPixelInterlock = src.fragmentShaderPixelInterlock;
- fragmentShaderShadingRateInterlock = src.fragmentShaderShadingRateInterlock;
- pNext = SafePnextCopy(src.pNext);
-
- return *this;
-}
-
-safe_VkPhysicalDeviceFragmentShaderInterlockFeaturesEXT::~safe_VkPhysicalDeviceFragmentShaderInterlockFeaturesEXT()
-{
- if (pNext)
- FreePnextChain(pNext);
-}
-
-void safe_VkPhysicalDeviceFragmentShaderInterlockFeaturesEXT::initialize(const VkPhysicalDeviceFragmentShaderInterlockFeaturesEXT* in_struct)
-{
- sType = in_struct->sType;
- fragmentShaderSampleInterlock = in_struct->fragmentShaderSampleInterlock;
- fragmentShaderPixelInterlock = in_struct->fragmentShaderPixelInterlock;
- fragmentShaderShadingRateInterlock = in_struct->fragmentShaderShadingRateInterlock;
- pNext = SafePnextCopy(in_struct->pNext);
-}
-
-void safe_VkPhysicalDeviceFragmentShaderInterlockFeaturesEXT::initialize(const safe_VkPhysicalDeviceFragmentShaderInterlockFeaturesEXT* src)
-{
- sType = src->sType;
- fragmentShaderSampleInterlock = src->fragmentShaderSampleInterlock;
- fragmentShaderPixelInterlock = src->fragmentShaderPixelInterlock;
- fragmentShaderShadingRateInterlock = src->fragmentShaderShadingRateInterlock;
- pNext = SafePnextCopy(src->pNext);
-}
-
-safe_VkPhysicalDeviceYcbcrImageArraysFeaturesEXT::safe_VkPhysicalDeviceYcbcrImageArraysFeaturesEXT(const VkPhysicalDeviceYcbcrImageArraysFeaturesEXT* in_struct) :
- sType(in_struct->sType),
- ycbcrImageArrays(in_struct->ycbcrImageArrays)
-{
- pNext = SafePnextCopy(in_struct->pNext);
-}
-
-safe_VkPhysicalDeviceYcbcrImageArraysFeaturesEXT::safe_VkPhysicalDeviceYcbcrImageArraysFeaturesEXT() :
- pNext(nullptr)
-{}
-
-safe_VkPhysicalDeviceYcbcrImageArraysFeaturesEXT::safe_VkPhysicalDeviceYcbcrImageArraysFeaturesEXT(const safe_VkPhysicalDeviceYcbcrImageArraysFeaturesEXT& src)
-{
- sType = src.sType;
- ycbcrImageArrays = src.ycbcrImageArrays;
- pNext = SafePnextCopy(src.pNext);
-}
-
-safe_VkPhysicalDeviceYcbcrImageArraysFeaturesEXT& safe_VkPhysicalDeviceYcbcrImageArraysFeaturesEXT::operator=(const safe_VkPhysicalDeviceYcbcrImageArraysFeaturesEXT& src)
-{
- if (&src == this) return *this;
-
- if (pNext)
- FreePnextChain(pNext);
-
- sType = src.sType;
- ycbcrImageArrays = src.ycbcrImageArrays;
- pNext = SafePnextCopy(src.pNext);
-
- return *this;
-}
-
-safe_VkPhysicalDeviceYcbcrImageArraysFeaturesEXT::~safe_VkPhysicalDeviceYcbcrImageArraysFeaturesEXT()
-{
- if (pNext)
- FreePnextChain(pNext);
-}
-
-void safe_VkPhysicalDeviceYcbcrImageArraysFeaturesEXT::initialize(const VkPhysicalDeviceYcbcrImageArraysFeaturesEXT* in_struct)
-{
- sType = in_struct->sType;
- ycbcrImageArrays = in_struct->ycbcrImageArrays;
- pNext = SafePnextCopy(in_struct->pNext);
-}
-
-void safe_VkPhysicalDeviceYcbcrImageArraysFeaturesEXT::initialize(const safe_VkPhysicalDeviceYcbcrImageArraysFeaturesEXT* src)
-{
- sType = src->sType;
- ycbcrImageArrays = src->ycbcrImageArrays;
- pNext = SafePnextCopy(src->pNext);
-}
-#ifdef VK_USE_PLATFORM_WIN32_KHR
-
-
-safe_VkSurfaceFullScreenExclusiveInfoEXT::safe_VkSurfaceFullScreenExclusiveInfoEXT(const VkSurfaceFullScreenExclusiveInfoEXT* in_struct) :
- sType(in_struct->sType),
- fullScreenExclusive(in_struct->fullScreenExclusive)
-{
- pNext = SafePnextCopy(in_struct->pNext);
-}
-
-safe_VkSurfaceFullScreenExclusiveInfoEXT::safe_VkSurfaceFullScreenExclusiveInfoEXT() :
- pNext(nullptr)
-{}
-
-safe_VkSurfaceFullScreenExclusiveInfoEXT::safe_VkSurfaceFullScreenExclusiveInfoEXT(const safe_VkSurfaceFullScreenExclusiveInfoEXT& src)
-{
- sType = src.sType;
- fullScreenExclusive = src.fullScreenExclusive;
- pNext = SafePnextCopy(src.pNext);
-}
-
-safe_VkSurfaceFullScreenExclusiveInfoEXT& safe_VkSurfaceFullScreenExclusiveInfoEXT::operator=(const safe_VkSurfaceFullScreenExclusiveInfoEXT& src)
-{
- if (&src == this) return *this;
-
- if (pNext)
- FreePnextChain(pNext);
-
- sType = src.sType;
- fullScreenExclusive = src.fullScreenExclusive;
- pNext = SafePnextCopy(src.pNext);
-
- return *this;
-}
-
-safe_VkSurfaceFullScreenExclusiveInfoEXT::~safe_VkSurfaceFullScreenExclusiveInfoEXT()
-{
- if (pNext)
- FreePnextChain(pNext);
-}
-
-void safe_VkSurfaceFullScreenExclusiveInfoEXT::initialize(const VkSurfaceFullScreenExclusiveInfoEXT* in_struct)
-{
- sType = in_struct->sType;
- fullScreenExclusive = in_struct->fullScreenExclusive;
- pNext = SafePnextCopy(in_struct->pNext);
-}
-
-void safe_VkSurfaceFullScreenExclusiveInfoEXT::initialize(const safe_VkSurfaceFullScreenExclusiveInfoEXT* src)
-{
- sType = src->sType;
- fullScreenExclusive = src->fullScreenExclusive;
- pNext = SafePnextCopy(src->pNext);
-}
-#endif // VK_USE_PLATFORM_WIN32_KHR
-
-#ifdef VK_USE_PLATFORM_WIN32_KHR
-
-
-safe_VkSurfaceCapabilitiesFullScreenExclusiveEXT::safe_VkSurfaceCapabilitiesFullScreenExclusiveEXT(const VkSurfaceCapabilitiesFullScreenExclusiveEXT* in_struct) :
- sType(in_struct->sType),
- fullScreenExclusiveSupported(in_struct->fullScreenExclusiveSupported)
-{
- pNext = SafePnextCopy(in_struct->pNext);
-}
-
-safe_VkSurfaceCapabilitiesFullScreenExclusiveEXT::safe_VkSurfaceCapabilitiesFullScreenExclusiveEXT() :
- pNext(nullptr)
-{}
-
-safe_VkSurfaceCapabilitiesFullScreenExclusiveEXT::safe_VkSurfaceCapabilitiesFullScreenExclusiveEXT(const safe_VkSurfaceCapabilitiesFullScreenExclusiveEXT& src)
-{
- sType = src.sType;
- fullScreenExclusiveSupported = src.fullScreenExclusiveSupported;
- pNext = SafePnextCopy(src.pNext);
-}
-
-safe_VkSurfaceCapabilitiesFullScreenExclusiveEXT& safe_VkSurfaceCapabilitiesFullScreenExclusiveEXT::operator=(const safe_VkSurfaceCapabilitiesFullScreenExclusiveEXT& src)
-{
- if (&src == this) return *this;
-
- if (pNext)
- FreePnextChain(pNext);
-
- sType = src.sType;
- fullScreenExclusiveSupported = src.fullScreenExclusiveSupported;
- pNext = SafePnextCopy(src.pNext);
-
- return *this;
-}
-
-safe_VkSurfaceCapabilitiesFullScreenExclusiveEXT::~safe_VkSurfaceCapabilitiesFullScreenExclusiveEXT()
-{
- if (pNext)
- FreePnextChain(pNext);
-}
-
-void safe_VkSurfaceCapabilitiesFullScreenExclusiveEXT::initialize(const VkSurfaceCapabilitiesFullScreenExclusiveEXT* in_struct)
-{
- sType = in_struct->sType;
- fullScreenExclusiveSupported = in_struct->fullScreenExclusiveSupported;
- pNext = SafePnextCopy(in_struct->pNext);
-}
-
-void safe_VkSurfaceCapabilitiesFullScreenExclusiveEXT::initialize(const safe_VkSurfaceCapabilitiesFullScreenExclusiveEXT* src)
-{
- sType = src->sType;
- fullScreenExclusiveSupported = src->fullScreenExclusiveSupported;
- pNext = SafePnextCopy(src->pNext);
-}
-#endif // VK_USE_PLATFORM_WIN32_KHR
-
-#ifdef VK_USE_PLATFORM_WIN32_KHR
-
-
-safe_VkSurfaceFullScreenExclusiveWin32InfoEXT::safe_VkSurfaceFullScreenExclusiveWin32InfoEXT(const VkSurfaceFullScreenExclusiveWin32InfoEXT* in_struct) :
- sType(in_struct->sType),
- hmonitor(in_struct->hmonitor)
-{
- pNext = SafePnextCopy(in_struct->pNext);
-}
-
-safe_VkSurfaceFullScreenExclusiveWin32InfoEXT::safe_VkSurfaceFullScreenExclusiveWin32InfoEXT() :
- pNext(nullptr)
-{}
-
-safe_VkSurfaceFullScreenExclusiveWin32InfoEXT::safe_VkSurfaceFullScreenExclusiveWin32InfoEXT(const safe_VkSurfaceFullScreenExclusiveWin32InfoEXT& src)
-{
- sType = src.sType;
- hmonitor = src.hmonitor;
- pNext = SafePnextCopy(src.pNext);
-}
-
-safe_VkSurfaceFullScreenExclusiveWin32InfoEXT& safe_VkSurfaceFullScreenExclusiveWin32InfoEXT::operator=(const safe_VkSurfaceFullScreenExclusiveWin32InfoEXT& src)
-{
- if (&src == this) return *this;
-
- if (pNext)
- FreePnextChain(pNext);
-
- sType = src.sType;
- hmonitor = src.hmonitor;
- pNext = SafePnextCopy(src.pNext);
-
- return *this;
-}
-
-safe_VkSurfaceFullScreenExclusiveWin32InfoEXT::~safe_VkSurfaceFullScreenExclusiveWin32InfoEXT()
-{
- if (pNext)
- FreePnextChain(pNext);
-}
-
-void safe_VkSurfaceFullScreenExclusiveWin32InfoEXT::initialize(const VkSurfaceFullScreenExclusiveWin32InfoEXT* in_struct)
-{
- sType = in_struct->sType;
- hmonitor = in_struct->hmonitor;
- pNext = SafePnextCopy(in_struct->pNext);
-}
-
-void safe_VkSurfaceFullScreenExclusiveWin32InfoEXT::initialize(const safe_VkSurfaceFullScreenExclusiveWin32InfoEXT* src)
-{
- sType = src->sType;
- hmonitor = src->hmonitor;
- pNext = SafePnextCopy(src->pNext);
-}
-#endif // VK_USE_PLATFORM_WIN32_KHR
-
-
-safe_VkHeadlessSurfaceCreateInfoEXT::safe_VkHeadlessSurfaceCreateInfoEXT(const VkHeadlessSurfaceCreateInfoEXT* in_struct) :
- sType(in_struct->sType),
- flags(in_struct->flags)
-{
- pNext = SafePnextCopy(in_struct->pNext);
-}
-
-safe_VkHeadlessSurfaceCreateInfoEXT::safe_VkHeadlessSurfaceCreateInfoEXT() :
- pNext(nullptr)
-{}
-
-safe_VkHeadlessSurfaceCreateInfoEXT::safe_VkHeadlessSurfaceCreateInfoEXT(const safe_VkHeadlessSurfaceCreateInfoEXT& src)
-{
- sType = src.sType;
- flags = src.flags;
- pNext = SafePnextCopy(src.pNext);
-}
-
-safe_VkHeadlessSurfaceCreateInfoEXT& safe_VkHeadlessSurfaceCreateInfoEXT::operator=(const safe_VkHeadlessSurfaceCreateInfoEXT& src)
-{
- if (&src == this) return *this;
-
- if (pNext)
- FreePnextChain(pNext);
-
- sType = src.sType;
- flags = src.flags;
- pNext = SafePnextCopy(src.pNext);
-
- return *this;
-}
-
-safe_VkHeadlessSurfaceCreateInfoEXT::~safe_VkHeadlessSurfaceCreateInfoEXT()
-{
- if (pNext)
- FreePnextChain(pNext);
-}
-
-void safe_VkHeadlessSurfaceCreateInfoEXT::initialize(const VkHeadlessSurfaceCreateInfoEXT* in_struct)
-{
- sType = in_struct->sType;
- flags = in_struct->flags;
- pNext = SafePnextCopy(in_struct->pNext);
-}
-
-void safe_VkHeadlessSurfaceCreateInfoEXT::initialize(const safe_VkHeadlessSurfaceCreateInfoEXT* src)
-{
- sType = src->sType;
- flags = src->flags;
- pNext = SafePnextCopy(src->pNext);
-}
-
-safe_VkPhysicalDeviceLineRasterizationFeaturesEXT::safe_VkPhysicalDeviceLineRasterizationFeaturesEXT(const VkPhysicalDeviceLineRasterizationFeaturesEXT* in_struct) :
- sType(in_struct->sType),
- rectangularLines(in_struct->rectangularLines),
- bresenhamLines(in_struct->bresenhamLines),
- smoothLines(in_struct->smoothLines),
- stippledRectangularLines(in_struct->stippledRectangularLines),
- stippledBresenhamLines(in_struct->stippledBresenhamLines),
- stippledSmoothLines(in_struct->stippledSmoothLines)
-{
- pNext = SafePnextCopy(in_struct->pNext);
-}
-
-safe_VkPhysicalDeviceLineRasterizationFeaturesEXT::safe_VkPhysicalDeviceLineRasterizationFeaturesEXT() :
- pNext(nullptr)
-{}
-
-safe_VkPhysicalDeviceLineRasterizationFeaturesEXT::safe_VkPhysicalDeviceLineRasterizationFeaturesEXT(const safe_VkPhysicalDeviceLineRasterizationFeaturesEXT& src)
-{
- sType = src.sType;
- rectangularLines = src.rectangularLines;
- bresenhamLines = src.bresenhamLines;
- smoothLines = src.smoothLines;
- stippledRectangularLines = src.stippledRectangularLines;
- stippledBresenhamLines = src.stippledBresenhamLines;
- stippledSmoothLines = src.stippledSmoothLines;
- pNext = SafePnextCopy(src.pNext);
-}
-
-safe_VkPhysicalDeviceLineRasterizationFeaturesEXT& safe_VkPhysicalDeviceLineRasterizationFeaturesEXT::operator=(const safe_VkPhysicalDeviceLineRasterizationFeaturesEXT& src)
-{
- if (&src == this) return *this;
-
- if (pNext)
- FreePnextChain(pNext);
-
- sType = src.sType;
- rectangularLines = src.rectangularLines;
- bresenhamLines = src.bresenhamLines;
- smoothLines = src.smoothLines;
- stippledRectangularLines = src.stippledRectangularLines;
- stippledBresenhamLines = src.stippledBresenhamLines;
- stippledSmoothLines = src.stippledSmoothLines;
- pNext = SafePnextCopy(src.pNext);
-
- return *this;
-}
-
-safe_VkPhysicalDeviceLineRasterizationFeaturesEXT::~safe_VkPhysicalDeviceLineRasterizationFeaturesEXT()
-{
- if (pNext)
- FreePnextChain(pNext);
-}
-
-void safe_VkPhysicalDeviceLineRasterizationFeaturesEXT::initialize(const VkPhysicalDeviceLineRasterizationFeaturesEXT* in_struct)
-{
- sType = in_struct->sType;
- rectangularLines = in_struct->rectangularLines;
- bresenhamLines = in_struct->bresenhamLines;
- smoothLines = in_struct->smoothLines;
- stippledRectangularLines = in_struct->stippledRectangularLines;
- stippledBresenhamLines = in_struct->stippledBresenhamLines;
- stippledSmoothLines = in_struct->stippledSmoothLines;
- pNext = SafePnextCopy(in_struct->pNext);
-}
-
-void safe_VkPhysicalDeviceLineRasterizationFeaturesEXT::initialize(const safe_VkPhysicalDeviceLineRasterizationFeaturesEXT* src)
-{
- sType = src->sType;
- rectangularLines = src->rectangularLines;
- bresenhamLines = src->bresenhamLines;
- smoothLines = src->smoothLines;
- stippledRectangularLines = src->stippledRectangularLines;
- stippledBresenhamLines = src->stippledBresenhamLines;
- stippledSmoothLines = src->stippledSmoothLines;
- pNext = SafePnextCopy(src->pNext);
-}
-
-safe_VkPhysicalDeviceLineRasterizationPropertiesEXT::safe_VkPhysicalDeviceLineRasterizationPropertiesEXT(const VkPhysicalDeviceLineRasterizationPropertiesEXT* in_struct) :
- sType(in_struct->sType),
- lineSubPixelPrecisionBits(in_struct->lineSubPixelPrecisionBits)
-{
- pNext = SafePnextCopy(in_struct->pNext);
-}
-
-safe_VkPhysicalDeviceLineRasterizationPropertiesEXT::safe_VkPhysicalDeviceLineRasterizationPropertiesEXT() :
- pNext(nullptr)
-{}
-
-safe_VkPhysicalDeviceLineRasterizationPropertiesEXT::safe_VkPhysicalDeviceLineRasterizationPropertiesEXT(const safe_VkPhysicalDeviceLineRasterizationPropertiesEXT& src)
-{
- sType = src.sType;
- lineSubPixelPrecisionBits = src.lineSubPixelPrecisionBits;
- pNext = SafePnextCopy(src.pNext);
-}
-
-safe_VkPhysicalDeviceLineRasterizationPropertiesEXT& safe_VkPhysicalDeviceLineRasterizationPropertiesEXT::operator=(const safe_VkPhysicalDeviceLineRasterizationPropertiesEXT& src)
-{
- if (&src == this) return *this;
-
- if (pNext)
- FreePnextChain(pNext);
-
- sType = src.sType;
- lineSubPixelPrecisionBits = src.lineSubPixelPrecisionBits;
- pNext = SafePnextCopy(src.pNext);
-
- return *this;
-}
-
-safe_VkPhysicalDeviceLineRasterizationPropertiesEXT::~safe_VkPhysicalDeviceLineRasterizationPropertiesEXT()
-{
- if (pNext)
- FreePnextChain(pNext);
-}
-
-void safe_VkPhysicalDeviceLineRasterizationPropertiesEXT::initialize(const VkPhysicalDeviceLineRasterizationPropertiesEXT* in_struct)
-{
- sType = in_struct->sType;
- lineSubPixelPrecisionBits = in_struct->lineSubPixelPrecisionBits;
- pNext = SafePnextCopy(in_struct->pNext);
-}
-
-void safe_VkPhysicalDeviceLineRasterizationPropertiesEXT::initialize(const safe_VkPhysicalDeviceLineRasterizationPropertiesEXT* src)
-{
- sType = src->sType;
- lineSubPixelPrecisionBits = src->lineSubPixelPrecisionBits;
- pNext = SafePnextCopy(src->pNext);
-}
-
-safe_VkPipelineRasterizationLineStateCreateInfoEXT::safe_VkPipelineRasterizationLineStateCreateInfoEXT(const VkPipelineRasterizationLineStateCreateInfoEXT* in_struct) :
- sType(in_struct->sType),
- lineRasterizationMode(in_struct->lineRasterizationMode),
- stippledLineEnable(in_struct->stippledLineEnable),
- lineStippleFactor(in_struct->lineStippleFactor),
- lineStipplePattern(in_struct->lineStipplePattern)
-{
- pNext = SafePnextCopy(in_struct->pNext);
-}
-
-safe_VkPipelineRasterizationLineStateCreateInfoEXT::safe_VkPipelineRasterizationLineStateCreateInfoEXT() :
- pNext(nullptr)
-{}
-
-safe_VkPipelineRasterizationLineStateCreateInfoEXT::safe_VkPipelineRasterizationLineStateCreateInfoEXT(const safe_VkPipelineRasterizationLineStateCreateInfoEXT& src)
-{
- sType = src.sType;
- lineRasterizationMode = src.lineRasterizationMode;
- stippledLineEnable = src.stippledLineEnable;
- lineStippleFactor = src.lineStippleFactor;
- lineStipplePattern = src.lineStipplePattern;
- pNext = SafePnextCopy(src.pNext);
-}
-
-safe_VkPipelineRasterizationLineStateCreateInfoEXT& safe_VkPipelineRasterizationLineStateCreateInfoEXT::operator=(const safe_VkPipelineRasterizationLineStateCreateInfoEXT& src)
-{
- if (&src == this) return *this;
-
- if (pNext)
- FreePnextChain(pNext);
-
- sType = src.sType;
- lineRasterizationMode = src.lineRasterizationMode;
- stippledLineEnable = src.stippledLineEnable;
- lineStippleFactor = src.lineStippleFactor;
- lineStipplePattern = src.lineStipplePattern;
- pNext = SafePnextCopy(src.pNext);
-
- return *this;
-}
-
-safe_VkPipelineRasterizationLineStateCreateInfoEXT::~safe_VkPipelineRasterizationLineStateCreateInfoEXT()
-{
- if (pNext)
- FreePnextChain(pNext);
-}
-
-void safe_VkPipelineRasterizationLineStateCreateInfoEXT::initialize(const VkPipelineRasterizationLineStateCreateInfoEXT* in_struct)
-{
- sType = in_struct->sType;
- lineRasterizationMode = in_struct->lineRasterizationMode;
- stippledLineEnable = in_struct->stippledLineEnable;
- lineStippleFactor = in_struct->lineStippleFactor;
- lineStipplePattern = in_struct->lineStipplePattern;
- pNext = SafePnextCopy(in_struct->pNext);
-}
-
-void safe_VkPipelineRasterizationLineStateCreateInfoEXT::initialize(const safe_VkPipelineRasterizationLineStateCreateInfoEXT* src)
-{
- sType = src->sType;
- lineRasterizationMode = src->lineRasterizationMode;
- stippledLineEnable = src->stippledLineEnable;
- lineStippleFactor = src->lineStippleFactor;
- lineStipplePattern = src->lineStipplePattern;
- pNext = SafePnextCopy(src->pNext);
-}
-
-safe_VkPhysicalDeviceHostQueryResetFeaturesEXT::safe_VkPhysicalDeviceHostQueryResetFeaturesEXT(const VkPhysicalDeviceHostQueryResetFeaturesEXT* in_struct) :
- sType(in_struct->sType),
- hostQueryReset(in_struct->hostQueryReset)
-{
- pNext = SafePnextCopy(in_struct->pNext);
-}
-
-safe_VkPhysicalDeviceHostQueryResetFeaturesEXT::safe_VkPhysicalDeviceHostQueryResetFeaturesEXT() :
- pNext(nullptr)
-{}
-
-safe_VkPhysicalDeviceHostQueryResetFeaturesEXT::safe_VkPhysicalDeviceHostQueryResetFeaturesEXT(const safe_VkPhysicalDeviceHostQueryResetFeaturesEXT& src)
-{
- sType = src.sType;
- hostQueryReset = src.hostQueryReset;
- pNext = SafePnextCopy(src.pNext);
-}
-
-safe_VkPhysicalDeviceHostQueryResetFeaturesEXT& safe_VkPhysicalDeviceHostQueryResetFeaturesEXT::operator=(const safe_VkPhysicalDeviceHostQueryResetFeaturesEXT& src)
-{
- if (&src == this) return *this;
-
- if (pNext)
- FreePnextChain(pNext);
-
- sType = src.sType;
- hostQueryReset = src.hostQueryReset;
- pNext = SafePnextCopy(src.pNext);
-
- return *this;
-}
-
-safe_VkPhysicalDeviceHostQueryResetFeaturesEXT::~safe_VkPhysicalDeviceHostQueryResetFeaturesEXT()
-{
- if (pNext)
- FreePnextChain(pNext);
-}
-
-void safe_VkPhysicalDeviceHostQueryResetFeaturesEXT::initialize(const VkPhysicalDeviceHostQueryResetFeaturesEXT* in_struct)
-{
- sType = in_struct->sType;
- hostQueryReset = in_struct->hostQueryReset;
- pNext = SafePnextCopy(in_struct->pNext);
-}
-
-void safe_VkPhysicalDeviceHostQueryResetFeaturesEXT::initialize(const safe_VkPhysicalDeviceHostQueryResetFeaturesEXT* src)
-{
- sType = src->sType;
- hostQueryReset = src->hostQueryReset;
- pNext = SafePnextCopy(src->pNext);
-}
-
-safe_VkPhysicalDeviceIndexTypeUint8FeaturesEXT::safe_VkPhysicalDeviceIndexTypeUint8FeaturesEXT(const VkPhysicalDeviceIndexTypeUint8FeaturesEXT* in_struct) :
- sType(in_struct->sType),
- indexTypeUint8(in_struct->indexTypeUint8)
-{
- pNext = SafePnextCopy(in_struct->pNext);
-}
-
-safe_VkPhysicalDeviceIndexTypeUint8FeaturesEXT::safe_VkPhysicalDeviceIndexTypeUint8FeaturesEXT() :
- pNext(nullptr)
-{}
-
-safe_VkPhysicalDeviceIndexTypeUint8FeaturesEXT::safe_VkPhysicalDeviceIndexTypeUint8FeaturesEXT(const safe_VkPhysicalDeviceIndexTypeUint8FeaturesEXT& src)
-{
- sType = src.sType;
- indexTypeUint8 = src.indexTypeUint8;
- pNext = SafePnextCopy(src.pNext);
-}
-
-safe_VkPhysicalDeviceIndexTypeUint8FeaturesEXT& safe_VkPhysicalDeviceIndexTypeUint8FeaturesEXT::operator=(const safe_VkPhysicalDeviceIndexTypeUint8FeaturesEXT& src)
-{
- if (&src == this) return *this;
-
- if (pNext)
- FreePnextChain(pNext);
-
- sType = src.sType;
- indexTypeUint8 = src.indexTypeUint8;
- pNext = SafePnextCopy(src.pNext);
-
- return *this;
-}
-
-safe_VkPhysicalDeviceIndexTypeUint8FeaturesEXT::~safe_VkPhysicalDeviceIndexTypeUint8FeaturesEXT()
-{
- if (pNext)
- FreePnextChain(pNext);
-}
-
-void safe_VkPhysicalDeviceIndexTypeUint8FeaturesEXT::initialize(const VkPhysicalDeviceIndexTypeUint8FeaturesEXT* in_struct)
-{
- sType = in_struct->sType;
- indexTypeUint8 = in_struct->indexTypeUint8;
- pNext = SafePnextCopy(in_struct->pNext);
-}
-
-void safe_VkPhysicalDeviceIndexTypeUint8FeaturesEXT::initialize(const safe_VkPhysicalDeviceIndexTypeUint8FeaturesEXT* src)
-{
- sType = src->sType;
- indexTypeUint8 = src->indexTypeUint8;
- pNext = SafePnextCopy(src->pNext);
-}
-
-safe_VkPhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT::safe_VkPhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT(const VkPhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT* in_struct) :
- sType(in_struct->sType),
- shaderDemoteToHelperInvocation(in_struct->shaderDemoteToHelperInvocation)
-{
- pNext = SafePnextCopy(in_struct->pNext);
-}
-
-safe_VkPhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT::safe_VkPhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT() :
- pNext(nullptr)
-{}
-
-safe_VkPhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT::safe_VkPhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT(const safe_VkPhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT& src)
-{
- sType = src.sType;
- shaderDemoteToHelperInvocation = src.shaderDemoteToHelperInvocation;
- pNext = SafePnextCopy(src.pNext);
-}
-
-safe_VkPhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT& safe_VkPhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT::operator=(const safe_VkPhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT& src)
-{
- if (&src == this) return *this;
-
- if (pNext)
- FreePnextChain(pNext);
-
- sType = src.sType;
- shaderDemoteToHelperInvocation = src.shaderDemoteToHelperInvocation;
- pNext = SafePnextCopy(src.pNext);
-
- return *this;
-}
-
-safe_VkPhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT::~safe_VkPhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT()
-{
- if (pNext)
- FreePnextChain(pNext);
-}
-
-void safe_VkPhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT::initialize(const VkPhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT* in_struct)
-{
- sType = in_struct->sType;
- shaderDemoteToHelperInvocation = in_struct->shaderDemoteToHelperInvocation;
- pNext = SafePnextCopy(in_struct->pNext);
-}
-
-void safe_VkPhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT::initialize(const safe_VkPhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT* src)
-{
- sType = src->sType;
- shaderDemoteToHelperInvocation = src->shaderDemoteToHelperInvocation;
- pNext = SafePnextCopy(src->pNext);
-}
-
-safe_VkPhysicalDeviceTexelBufferAlignmentFeaturesEXT::safe_VkPhysicalDeviceTexelBufferAlignmentFeaturesEXT(const VkPhysicalDeviceTexelBufferAlignmentFeaturesEXT* in_struct) :
- sType(in_struct->sType),
- texelBufferAlignment(in_struct->texelBufferAlignment)
-{
- pNext = SafePnextCopy(in_struct->pNext);
-}
-
-safe_VkPhysicalDeviceTexelBufferAlignmentFeaturesEXT::safe_VkPhysicalDeviceTexelBufferAlignmentFeaturesEXT() :
- pNext(nullptr)
-{}
-
-safe_VkPhysicalDeviceTexelBufferAlignmentFeaturesEXT::safe_VkPhysicalDeviceTexelBufferAlignmentFeaturesEXT(const safe_VkPhysicalDeviceTexelBufferAlignmentFeaturesEXT& src)
-{
- sType = src.sType;
- texelBufferAlignment = src.texelBufferAlignment;
- pNext = SafePnextCopy(src.pNext);
-}
-
-safe_VkPhysicalDeviceTexelBufferAlignmentFeaturesEXT& safe_VkPhysicalDeviceTexelBufferAlignmentFeaturesEXT::operator=(const safe_VkPhysicalDeviceTexelBufferAlignmentFeaturesEXT& src)
-{
- if (&src == this) return *this;
-
- if (pNext)
- FreePnextChain(pNext);
-
- sType = src.sType;
- texelBufferAlignment = src.texelBufferAlignment;
- pNext = SafePnextCopy(src.pNext);
-
- return *this;
-}
-
-safe_VkPhysicalDeviceTexelBufferAlignmentFeaturesEXT::~safe_VkPhysicalDeviceTexelBufferAlignmentFeaturesEXT()
-{
- if (pNext)
- FreePnextChain(pNext);
-}
-
-void safe_VkPhysicalDeviceTexelBufferAlignmentFeaturesEXT::initialize(const VkPhysicalDeviceTexelBufferAlignmentFeaturesEXT* in_struct)
-{
- sType = in_struct->sType;
- texelBufferAlignment = in_struct->texelBufferAlignment;
- pNext = SafePnextCopy(in_struct->pNext);
-}
-
-void safe_VkPhysicalDeviceTexelBufferAlignmentFeaturesEXT::initialize(const safe_VkPhysicalDeviceTexelBufferAlignmentFeaturesEXT* src)
-{
- sType = src->sType;
- texelBufferAlignment = src->texelBufferAlignment;
- pNext = SafePnextCopy(src->pNext);
-}
-
-safe_VkPhysicalDeviceTexelBufferAlignmentPropertiesEXT::safe_VkPhysicalDeviceTexelBufferAlignmentPropertiesEXT(const VkPhysicalDeviceTexelBufferAlignmentPropertiesEXT* in_struct) :
- sType(in_struct->sType),
- storageTexelBufferOffsetAlignmentBytes(in_struct->storageTexelBufferOffsetAlignmentBytes),
- storageTexelBufferOffsetSingleTexelAlignment(in_struct->storageTexelBufferOffsetSingleTexelAlignment),
- uniformTexelBufferOffsetAlignmentBytes(in_struct->uniformTexelBufferOffsetAlignmentBytes),
- uniformTexelBufferOffsetSingleTexelAlignment(in_struct->uniformTexelBufferOffsetSingleTexelAlignment)
-{
- pNext = SafePnextCopy(in_struct->pNext);
-}
-
-safe_VkPhysicalDeviceTexelBufferAlignmentPropertiesEXT::safe_VkPhysicalDeviceTexelBufferAlignmentPropertiesEXT() :
- pNext(nullptr)
-{}
-
-safe_VkPhysicalDeviceTexelBufferAlignmentPropertiesEXT::safe_VkPhysicalDeviceTexelBufferAlignmentPropertiesEXT(const safe_VkPhysicalDeviceTexelBufferAlignmentPropertiesEXT& src)
-{
- sType = src.sType;
- storageTexelBufferOffsetAlignmentBytes = src.storageTexelBufferOffsetAlignmentBytes;
- storageTexelBufferOffsetSingleTexelAlignment = src.storageTexelBufferOffsetSingleTexelAlignment;
- uniformTexelBufferOffsetAlignmentBytes = src.uniformTexelBufferOffsetAlignmentBytes;
- uniformTexelBufferOffsetSingleTexelAlignment = src.uniformTexelBufferOffsetSingleTexelAlignment;
- pNext = SafePnextCopy(src.pNext);
-}
-
-safe_VkPhysicalDeviceTexelBufferAlignmentPropertiesEXT& safe_VkPhysicalDeviceTexelBufferAlignmentPropertiesEXT::operator=(const safe_VkPhysicalDeviceTexelBufferAlignmentPropertiesEXT& src)
-{
- if (&src == this) return *this;
-
- if (pNext)
- FreePnextChain(pNext);
-
- sType = src.sType;
- storageTexelBufferOffsetAlignmentBytes = src.storageTexelBufferOffsetAlignmentBytes;
- storageTexelBufferOffsetSingleTexelAlignment = src.storageTexelBufferOffsetSingleTexelAlignment;
- uniformTexelBufferOffsetAlignmentBytes = src.uniformTexelBufferOffsetAlignmentBytes;
- uniformTexelBufferOffsetSingleTexelAlignment = src.uniformTexelBufferOffsetSingleTexelAlignment;
- pNext = SafePnextCopy(src.pNext);
-
- return *this;
-}
-
-safe_VkPhysicalDeviceTexelBufferAlignmentPropertiesEXT::~safe_VkPhysicalDeviceTexelBufferAlignmentPropertiesEXT()
-{
- if (pNext)
- FreePnextChain(pNext);
-}
-
-void safe_VkPhysicalDeviceTexelBufferAlignmentPropertiesEXT::initialize(const VkPhysicalDeviceTexelBufferAlignmentPropertiesEXT* in_struct)
-{
- sType = in_struct->sType;
- storageTexelBufferOffsetAlignmentBytes = in_struct->storageTexelBufferOffsetAlignmentBytes;
- storageTexelBufferOffsetSingleTexelAlignment = in_struct->storageTexelBufferOffsetSingleTexelAlignment;
- uniformTexelBufferOffsetAlignmentBytes = in_struct->uniformTexelBufferOffsetAlignmentBytes;
- uniformTexelBufferOffsetSingleTexelAlignment = in_struct->uniformTexelBufferOffsetSingleTexelAlignment;
- pNext = SafePnextCopy(in_struct->pNext);
-}
-
-void safe_VkPhysicalDeviceTexelBufferAlignmentPropertiesEXT::initialize(const safe_VkPhysicalDeviceTexelBufferAlignmentPropertiesEXT* src)
-{
- sType = src->sType;
- storageTexelBufferOffsetAlignmentBytes = src->storageTexelBufferOffsetAlignmentBytes;
- storageTexelBufferOffsetSingleTexelAlignment = src->storageTexelBufferOffsetSingleTexelAlignment;
- uniformTexelBufferOffsetAlignmentBytes = src->uniformTexelBufferOffsetAlignmentBytes;
- uniformTexelBufferOffsetSingleTexelAlignment = src->uniformTexelBufferOffsetSingleTexelAlignment;
- pNext = SafePnextCopy(src->pNext);
-}
-
-char *SafeStringCopy(const char *in_string) {
- if (nullptr == in_string) return nullptr;
- char* dest = new char[std::strlen(in_string) + 1];
- return std::strcpy(dest, in_string);
-}
-
-void *SafePnextCopy(const void *pNext) {
- if (!pNext) return nullptr;
-
- void *safe_pNext;
- const VkBaseOutStructure *header = reinterpret_cast<const VkBaseOutStructure *>(pNext);
-
- switch (header->sType) {
- // Special-case Loader Instance Struct passed to/from layer in pNext chain
- case VK_STRUCTURE_TYPE_LOADER_INSTANCE_CREATE_INFO: {
- VkLayerInstanceCreateInfo *struct_copy = new VkLayerInstanceCreateInfo;
- // TODO: Uses original VkLayerInstanceLink* chain, which should be okay for our uses
- memcpy(struct_copy, pNext, sizeof(VkLayerInstanceCreateInfo));
- struct_copy->pNext = SafePnextCopy(header->pNext);
- safe_pNext = struct_copy;
- break;
- }
- // Special-case Loader Device Struct passed to/from layer in pNext chain
- case VK_STRUCTURE_TYPE_LOADER_DEVICE_CREATE_INFO: {
- VkLayerDeviceCreateInfo *struct_copy = new VkLayerDeviceCreateInfo;
- // TODO: Uses original VkLayerDeviceLink*, which should be okay for our uses
- memcpy(struct_copy, pNext, sizeof(VkLayerDeviceCreateInfo));
- struct_copy->pNext = SafePnextCopy(header->pNext);
- safe_pNext = struct_copy;
- break;
- }
- case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SUBGROUP_PROPERTIES:
- safe_pNext = new safe_VkPhysicalDeviceSubgroupProperties(reinterpret_cast<const VkPhysicalDeviceSubgroupProperties *>(pNext));
- break;
- case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_16BIT_STORAGE_FEATURES:
- safe_pNext = new safe_VkPhysicalDevice16BitStorageFeatures(reinterpret_cast<const VkPhysicalDevice16BitStorageFeatures *>(pNext));
- break;
- case VK_STRUCTURE_TYPE_MEMORY_DEDICATED_REQUIREMENTS:
- safe_pNext = new safe_VkMemoryDedicatedRequirements(reinterpret_cast<const VkMemoryDedicatedRequirements *>(pNext));
- break;
- case VK_STRUCTURE_TYPE_MEMORY_DEDICATED_ALLOCATE_INFO:
- safe_pNext = new safe_VkMemoryDedicatedAllocateInfo(reinterpret_cast<const VkMemoryDedicatedAllocateInfo *>(pNext));
- break;
- case VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_FLAGS_INFO:
- safe_pNext = new safe_VkMemoryAllocateFlagsInfo(reinterpret_cast<const VkMemoryAllocateFlagsInfo *>(pNext));
- break;
- case VK_STRUCTURE_TYPE_DEVICE_GROUP_RENDER_PASS_BEGIN_INFO:
- safe_pNext = new safe_VkDeviceGroupRenderPassBeginInfo(reinterpret_cast<const VkDeviceGroupRenderPassBeginInfo *>(pNext));
- break;
- case VK_STRUCTURE_TYPE_DEVICE_GROUP_COMMAND_BUFFER_BEGIN_INFO:
- safe_pNext = new safe_VkDeviceGroupCommandBufferBeginInfo(reinterpret_cast<const VkDeviceGroupCommandBufferBeginInfo *>(pNext));
- break;
- case VK_STRUCTURE_TYPE_DEVICE_GROUP_SUBMIT_INFO:
- safe_pNext = new safe_VkDeviceGroupSubmitInfo(reinterpret_cast<const VkDeviceGroupSubmitInfo *>(pNext));
- break;
- case VK_STRUCTURE_TYPE_DEVICE_GROUP_BIND_SPARSE_INFO:
- safe_pNext = new safe_VkDeviceGroupBindSparseInfo(reinterpret_cast<const VkDeviceGroupBindSparseInfo *>(pNext));
- break;
- case VK_STRUCTURE_TYPE_BIND_BUFFER_MEMORY_DEVICE_GROUP_INFO:
- safe_pNext = new safe_VkBindBufferMemoryDeviceGroupInfo(reinterpret_cast<const VkBindBufferMemoryDeviceGroupInfo *>(pNext));
- break;
- case VK_STRUCTURE_TYPE_BIND_IMAGE_MEMORY_DEVICE_GROUP_INFO:
- safe_pNext = new safe_VkBindImageMemoryDeviceGroupInfo(reinterpret_cast<const VkBindImageMemoryDeviceGroupInfo *>(pNext));
- break;
- case VK_STRUCTURE_TYPE_DEVICE_GROUP_DEVICE_CREATE_INFO:
- safe_pNext = new safe_VkDeviceGroupDeviceCreateInfo(reinterpret_cast<const VkDeviceGroupDeviceCreateInfo *>(pNext));
- break;
- case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FEATURES_2:
- safe_pNext = new safe_VkPhysicalDeviceFeatures2(reinterpret_cast<const VkPhysicalDeviceFeatures2 *>(pNext));
- break;
- case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_POINT_CLIPPING_PROPERTIES:
- safe_pNext = new safe_VkPhysicalDevicePointClippingProperties(reinterpret_cast<const VkPhysicalDevicePointClippingProperties *>(pNext));
- break;
- case VK_STRUCTURE_TYPE_RENDER_PASS_INPUT_ATTACHMENT_ASPECT_CREATE_INFO:
- safe_pNext = new safe_VkRenderPassInputAttachmentAspectCreateInfo(reinterpret_cast<const VkRenderPassInputAttachmentAspectCreateInfo *>(pNext));
- break;
- case VK_STRUCTURE_TYPE_IMAGE_VIEW_USAGE_CREATE_INFO:
- safe_pNext = new safe_VkImageViewUsageCreateInfo(reinterpret_cast<const VkImageViewUsageCreateInfo *>(pNext));
- break;
- case VK_STRUCTURE_TYPE_PIPELINE_TESSELLATION_DOMAIN_ORIGIN_STATE_CREATE_INFO:
- safe_pNext = new safe_VkPipelineTessellationDomainOriginStateCreateInfo(reinterpret_cast<const VkPipelineTessellationDomainOriginStateCreateInfo *>(pNext));
- break;
- case VK_STRUCTURE_TYPE_RENDER_PASS_MULTIVIEW_CREATE_INFO:
- safe_pNext = new safe_VkRenderPassMultiviewCreateInfo(reinterpret_cast<const VkRenderPassMultiviewCreateInfo *>(pNext));
- break;
- case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MULTIVIEW_FEATURES:
- safe_pNext = new safe_VkPhysicalDeviceMultiviewFeatures(reinterpret_cast<const VkPhysicalDeviceMultiviewFeatures *>(pNext));
- break;
- case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MULTIVIEW_PROPERTIES:
- safe_pNext = new safe_VkPhysicalDeviceMultiviewProperties(reinterpret_cast<const VkPhysicalDeviceMultiviewProperties *>(pNext));
- break;
- case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VARIABLE_POINTERS_FEATURES:
- safe_pNext = new safe_VkPhysicalDeviceVariablePointersFeatures(reinterpret_cast<const VkPhysicalDeviceVariablePointersFeatures *>(pNext));
- break;
- case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PROTECTED_MEMORY_FEATURES:
- safe_pNext = new safe_VkPhysicalDeviceProtectedMemoryFeatures(reinterpret_cast<const VkPhysicalDeviceProtectedMemoryFeatures *>(pNext));
- break;
- case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PROTECTED_MEMORY_PROPERTIES:
- safe_pNext = new safe_VkPhysicalDeviceProtectedMemoryProperties(reinterpret_cast<const VkPhysicalDeviceProtectedMemoryProperties *>(pNext));
- break;
- case VK_STRUCTURE_TYPE_PROTECTED_SUBMIT_INFO:
- safe_pNext = new safe_VkProtectedSubmitInfo(reinterpret_cast<const VkProtectedSubmitInfo *>(pNext));
- break;
- case VK_STRUCTURE_TYPE_SAMPLER_YCBCR_CONVERSION_INFO:
- safe_pNext = new safe_VkSamplerYcbcrConversionInfo(reinterpret_cast<const VkSamplerYcbcrConversionInfo *>(pNext));
- break;
- case VK_STRUCTURE_TYPE_BIND_IMAGE_PLANE_MEMORY_INFO:
- safe_pNext = new safe_VkBindImagePlaneMemoryInfo(reinterpret_cast<const VkBindImagePlaneMemoryInfo *>(pNext));
- break;
- case VK_STRUCTURE_TYPE_IMAGE_PLANE_MEMORY_REQUIREMENTS_INFO:
- safe_pNext = new safe_VkImagePlaneMemoryRequirementsInfo(reinterpret_cast<const VkImagePlaneMemoryRequirementsInfo *>(pNext));
- break;
- case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SAMPLER_YCBCR_CONVERSION_FEATURES:
- safe_pNext = new safe_VkPhysicalDeviceSamplerYcbcrConversionFeatures(reinterpret_cast<const VkPhysicalDeviceSamplerYcbcrConversionFeatures *>(pNext));
- break;
- case VK_STRUCTURE_TYPE_SAMPLER_YCBCR_CONVERSION_IMAGE_FORMAT_PROPERTIES:
- safe_pNext = new safe_VkSamplerYcbcrConversionImageFormatProperties(reinterpret_cast<const VkSamplerYcbcrConversionImageFormatProperties *>(pNext));
- break;
- case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_IMAGE_FORMAT_INFO:
- safe_pNext = new safe_VkPhysicalDeviceExternalImageFormatInfo(reinterpret_cast<const VkPhysicalDeviceExternalImageFormatInfo *>(pNext));
- break;
- case VK_STRUCTURE_TYPE_EXTERNAL_IMAGE_FORMAT_PROPERTIES:
- safe_pNext = new safe_VkExternalImageFormatProperties(reinterpret_cast<const VkExternalImageFormatProperties *>(pNext));
- break;
- case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_ID_PROPERTIES:
- safe_pNext = new safe_VkPhysicalDeviceIDProperties(reinterpret_cast<const VkPhysicalDeviceIDProperties *>(pNext));
- break;
- case VK_STRUCTURE_TYPE_EXTERNAL_MEMORY_IMAGE_CREATE_INFO:
- safe_pNext = new safe_VkExternalMemoryImageCreateInfo(reinterpret_cast<const VkExternalMemoryImageCreateInfo *>(pNext));
- break;
- case VK_STRUCTURE_TYPE_EXTERNAL_MEMORY_BUFFER_CREATE_INFO:
- safe_pNext = new safe_VkExternalMemoryBufferCreateInfo(reinterpret_cast<const VkExternalMemoryBufferCreateInfo *>(pNext));
- break;
- case VK_STRUCTURE_TYPE_EXPORT_MEMORY_ALLOCATE_INFO:
- safe_pNext = new safe_VkExportMemoryAllocateInfo(reinterpret_cast<const VkExportMemoryAllocateInfo *>(pNext));
- break;
- case VK_STRUCTURE_TYPE_EXPORT_FENCE_CREATE_INFO:
- safe_pNext = new safe_VkExportFenceCreateInfo(reinterpret_cast<const VkExportFenceCreateInfo *>(pNext));
- break;
- case VK_STRUCTURE_TYPE_EXPORT_SEMAPHORE_CREATE_INFO:
- safe_pNext = new safe_VkExportSemaphoreCreateInfo(reinterpret_cast<const VkExportSemaphoreCreateInfo *>(pNext));
- break;
- case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MAINTENANCE_3_PROPERTIES:
- safe_pNext = new safe_VkPhysicalDeviceMaintenance3Properties(reinterpret_cast<const VkPhysicalDeviceMaintenance3Properties *>(pNext));
- break;
- case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_DRAW_PARAMETERS_FEATURES:
- safe_pNext = new safe_VkPhysicalDeviceShaderDrawParametersFeatures(reinterpret_cast<const VkPhysicalDeviceShaderDrawParametersFeatures *>(pNext));
- break;
- case VK_STRUCTURE_TYPE_IMAGE_SWAPCHAIN_CREATE_INFO_KHR:
- safe_pNext = new safe_VkImageSwapchainCreateInfoKHR(reinterpret_cast<const VkImageSwapchainCreateInfoKHR *>(pNext));
- break;
- case VK_STRUCTURE_TYPE_BIND_IMAGE_MEMORY_SWAPCHAIN_INFO_KHR:
- safe_pNext = new safe_VkBindImageMemorySwapchainInfoKHR(reinterpret_cast<const VkBindImageMemorySwapchainInfoKHR *>(pNext));
- break;
- case VK_STRUCTURE_TYPE_DEVICE_GROUP_PRESENT_INFO_KHR:
- safe_pNext = new safe_VkDeviceGroupPresentInfoKHR(reinterpret_cast<const VkDeviceGroupPresentInfoKHR *>(pNext));
- break;
- case VK_STRUCTURE_TYPE_DEVICE_GROUP_SWAPCHAIN_CREATE_INFO_KHR:
- safe_pNext = new safe_VkDeviceGroupSwapchainCreateInfoKHR(reinterpret_cast<const VkDeviceGroupSwapchainCreateInfoKHR *>(pNext));
- break;
- case VK_STRUCTURE_TYPE_DISPLAY_PRESENT_INFO_KHR:
- safe_pNext = new safe_VkDisplayPresentInfoKHR(reinterpret_cast<const VkDisplayPresentInfoKHR *>(pNext));
- break;
- case VK_STRUCTURE_TYPE_IMPORT_MEMORY_FD_INFO_KHR:
- safe_pNext = new safe_VkImportMemoryFdInfoKHR(reinterpret_cast<const VkImportMemoryFdInfoKHR *>(pNext));
- break;
- case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PUSH_DESCRIPTOR_PROPERTIES_KHR:
- safe_pNext = new safe_VkPhysicalDevicePushDescriptorPropertiesKHR(reinterpret_cast<const VkPhysicalDevicePushDescriptorPropertiesKHR *>(pNext));
- break;
- case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_FLOAT16_INT8_FEATURES_KHR:
- safe_pNext = new safe_VkPhysicalDeviceShaderFloat16Int8FeaturesKHR(reinterpret_cast<const VkPhysicalDeviceShaderFloat16Int8FeaturesKHR *>(pNext));
- break;
- case VK_STRUCTURE_TYPE_PRESENT_REGIONS_KHR:
- safe_pNext = new safe_VkPresentRegionsKHR(reinterpret_cast<const VkPresentRegionsKHR *>(pNext));
- break;
- case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGELESS_FRAMEBUFFER_FEATURES_KHR:
- safe_pNext = new safe_VkPhysicalDeviceImagelessFramebufferFeaturesKHR(reinterpret_cast<const VkPhysicalDeviceImagelessFramebufferFeaturesKHR *>(pNext));
- break;
- case VK_STRUCTURE_TYPE_FRAMEBUFFER_ATTACHMENTS_CREATE_INFO_KHR:
- safe_pNext = new safe_VkFramebufferAttachmentsCreateInfoKHR(reinterpret_cast<const VkFramebufferAttachmentsCreateInfoKHR *>(pNext));
- break;
- case VK_STRUCTURE_TYPE_RENDER_PASS_ATTACHMENT_BEGIN_INFO_KHR:
- safe_pNext = new safe_VkRenderPassAttachmentBeginInfoKHR(reinterpret_cast<const VkRenderPassAttachmentBeginInfoKHR *>(pNext));
- break;
- case VK_STRUCTURE_TYPE_SHARED_PRESENT_SURFACE_CAPABILITIES_KHR:
- safe_pNext = new safe_VkSharedPresentSurfaceCapabilitiesKHR(reinterpret_cast<const VkSharedPresentSurfaceCapabilitiesKHR *>(pNext));
- break;
- case VK_STRUCTURE_TYPE_IMAGE_FORMAT_LIST_CREATE_INFO_KHR:
- safe_pNext = new safe_VkImageFormatListCreateInfoKHR(reinterpret_cast<const VkImageFormatListCreateInfoKHR *>(pNext));
- break;
- case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_8BIT_STORAGE_FEATURES_KHR:
- safe_pNext = new safe_VkPhysicalDevice8BitStorageFeaturesKHR(reinterpret_cast<const VkPhysicalDevice8BitStorageFeaturesKHR *>(pNext));
- break;
- case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_ATOMIC_INT64_FEATURES_KHR:
- safe_pNext = new safe_VkPhysicalDeviceShaderAtomicInt64FeaturesKHR(reinterpret_cast<const VkPhysicalDeviceShaderAtomicInt64FeaturesKHR *>(pNext));
- break;
- case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DRIVER_PROPERTIES_KHR:
- safe_pNext = new safe_VkPhysicalDeviceDriverPropertiesKHR(reinterpret_cast<const VkPhysicalDeviceDriverPropertiesKHR *>(pNext));
- break;
- case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FLOAT_CONTROLS_PROPERTIES_KHR:
- safe_pNext = new safe_VkPhysicalDeviceFloatControlsPropertiesKHR(reinterpret_cast<const VkPhysicalDeviceFloatControlsPropertiesKHR *>(pNext));
- break;
- case VK_STRUCTURE_TYPE_SUBPASS_DESCRIPTION_DEPTH_STENCIL_RESOLVE_KHR:
- safe_pNext = new safe_VkSubpassDescriptionDepthStencilResolveKHR(reinterpret_cast<const VkSubpassDescriptionDepthStencilResolveKHR *>(pNext));
- break;
- case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DEPTH_STENCIL_RESOLVE_PROPERTIES_KHR:
- safe_pNext = new safe_VkPhysicalDeviceDepthStencilResolvePropertiesKHR(reinterpret_cast<const VkPhysicalDeviceDepthStencilResolvePropertiesKHR *>(pNext));
- break;
- case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VULKAN_MEMORY_MODEL_FEATURES_KHR:
- safe_pNext = new safe_VkPhysicalDeviceVulkanMemoryModelFeaturesKHR(reinterpret_cast<const VkPhysicalDeviceVulkanMemoryModelFeaturesKHR *>(pNext));
- break;
- case VK_STRUCTURE_TYPE_SURFACE_PROTECTED_CAPABILITIES_KHR:
- safe_pNext = new safe_VkSurfaceProtectedCapabilitiesKHR(reinterpret_cast<const VkSurfaceProtectedCapabilitiesKHR *>(pNext));
- break;
- case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_UNIFORM_BUFFER_STANDARD_LAYOUT_FEATURES_KHR:
- safe_pNext = new safe_VkPhysicalDeviceUniformBufferStandardLayoutFeaturesKHR(reinterpret_cast<const VkPhysicalDeviceUniformBufferStandardLayoutFeaturesKHR *>(pNext));
- break;
- case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PIPELINE_EXECUTABLE_PROPERTIES_FEATURES_KHR:
- safe_pNext = new safe_VkPhysicalDevicePipelineExecutablePropertiesFeaturesKHR(reinterpret_cast<const VkPhysicalDevicePipelineExecutablePropertiesFeaturesKHR *>(pNext));
- break;
- case VK_STRUCTURE_TYPE_DEBUG_REPORT_CALLBACK_CREATE_INFO_EXT:
- safe_pNext = new safe_VkDebugReportCallbackCreateInfoEXT(reinterpret_cast<const VkDebugReportCallbackCreateInfoEXT *>(pNext));
- break;
- case VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_STATE_RASTERIZATION_ORDER_AMD:
- safe_pNext = new safe_VkPipelineRasterizationStateRasterizationOrderAMD(reinterpret_cast<const VkPipelineRasterizationStateRasterizationOrderAMD *>(pNext));
- break;
- case VK_STRUCTURE_TYPE_DEDICATED_ALLOCATION_IMAGE_CREATE_INFO_NV:
- safe_pNext = new safe_VkDedicatedAllocationImageCreateInfoNV(reinterpret_cast<const VkDedicatedAllocationImageCreateInfoNV *>(pNext));
- break;
- case VK_STRUCTURE_TYPE_DEDICATED_ALLOCATION_BUFFER_CREATE_INFO_NV:
- safe_pNext = new safe_VkDedicatedAllocationBufferCreateInfoNV(reinterpret_cast<const VkDedicatedAllocationBufferCreateInfoNV *>(pNext));
- break;
- case VK_STRUCTURE_TYPE_DEDICATED_ALLOCATION_MEMORY_ALLOCATE_INFO_NV:
- safe_pNext = new safe_VkDedicatedAllocationMemoryAllocateInfoNV(reinterpret_cast<const VkDedicatedAllocationMemoryAllocateInfoNV *>(pNext));
- break;
- case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TRANSFORM_FEEDBACK_FEATURES_EXT:
- safe_pNext = new safe_VkPhysicalDeviceTransformFeedbackFeaturesEXT(reinterpret_cast<const VkPhysicalDeviceTransformFeedbackFeaturesEXT *>(pNext));
- break;
- case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TRANSFORM_FEEDBACK_PROPERTIES_EXT:
- safe_pNext = new safe_VkPhysicalDeviceTransformFeedbackPropertiesEXT(reinterpret_cast<const VkPhysicalDeviceTransformFeedbackPropertiesEXT *>(pNext));
- break;
- case VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_STATE_STREAM_CREATE_INFO_EXT:
- safe_pNext = new safe_VkPipelineRasterizationStateStreamCreateInfoEXT(reinterpret_cast<const VkPipelineRasterizationStateStreamCreateInfoEXT *>(pNext));
- break;
- case VK_STRUCTURE_TYPE_TEXTURE_LOD_GATHER_FORMAT_PROPERTIES_AMD:
- safe_pNext = new safe_VkTextureLODGatherFormatPropertiesAMD(reinterpret_cast<const VkTextureLODGatherFormatPropertiesAMD *>(pNext));
- break;
- case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_CORNER_SAMPLED_IMAGE_FEATURES_NV:
- safe_pNext = new safe_VkPhysicalDeviceCornerSampledImageFeaturesNV(reinterpret_cast<const VkPhysicalDeviceCornerSampledImageFeaturesNV *>(pNext));
- break;
- case VK_STRUCTURE_TYPE_EXTERNAL_MEMORY_IMAGE_CREATE_INFO_NV:
- safe_pNext = new safe_VkExternalMemoryImageCreateInfoNV(reinterpret_cast<const VkExternalMemoryImageCreateInfoNV *>(pNext));
- break;
- case VK_STRUCTURE_TYPE_EXPORT_MEMORY_ALLOCATE_INFO_NV:
- safe_pNext = new safe_VkExportMemoryAllocateInfoNV(reinterpret_cast<const VkExportMemoryAllocateInfoNV *>(pNext));
- break;
- case VK_STRUCTURE_TYPE_VALIDATION_FLAGS_EXT:
- safe_pNext = new safe_VkValidationFlagsEXT(reinterpret_cast<const VkValidationFlagsEXT *>(pNext));
- break;
- case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TEXTURE_COMPRESSION_ASTC_HDR_FEATURES_EXT:
- safe_pNext = new safe_VkPhysicalDeviceTextureCompressionASTCHDRFeaturesEXT(reinterpret_cast<const VkPhysicalDeviceTextureCompressionASTCHDRFeaturesEXT *>(pNext));
- break;
- case VK_STRUCTURE_TYPE_IMAGE_VIEW_ASTC_DECODE_MODE_EXT:
- safe_pNext = new safe_VkImageViewASTCDecodeModeEXT(reinterpret_cast<const VkImageViewASTCDecodeModeEXT *>(pNext));
- break;
- case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_ASTC_DECODE_FEATURES_EXT:
- safe_pNext = new safe_VkPhysicalDeviceASTCDecodeFeaturesEXT(reinterpret_cast<const VkPhysicalDeviceASTCDecodeFeaturesEXT *>(pNext));
- break;
- case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_CONDITIONAL_RENDERING_FEATURES_EXT:
- safe_pNext = new safe_VkPhysicalDeviceConditionalRenderingFeaturesEXT(reinterpret_cast<const VkPhysicalDeviceConditionalRenderingFeaturesEXT *>(pNext));
- break;
- case VK_STRUCTURE_TYPE_COMMAND_BUFFER_INHERITANCE_CONDITIONAL_RENDERING_INFO_EXT:
- safe_pNext = new safe_VkCommandBufferInheritanceConditionalRenderingInfoEXT(reinterpret_cast<const VkCommandBufferInheritanceConditionalRenderingInfoEXT *>(pNext));
- break;
- case VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_W_SCALING_STATE_CREATE_INFO_NV:
- safe_pNext = new safe_VkPipelineViewportWScalingStateCreateInfoNV(reinterpret_cast<const VkPipelineViewportWScalingStateCreateInfoNV *>(pNext));
- break;
- case VK_STRUCTURE_TYPE_SWAPCHAIN_COUNTER_CREATE_INFO_EXT:
- safe_pNext = new safe_VkSwapchainCounterCreateInfoEXT(reinterpret_cast<const VkSwapchainCounterCreateInfoEXT *>(pNext));
- break;
- case VK_STRUCTURE_TYPE_PRESENT_TIMES_INFO_GOOGLE:
- safe_pNext = new safe_VkPresentTimesInfoGOOGLE(reinterpret_cast<const VkPresentTimesInfoGOOGLE *>(pNext));
- break;
- case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MULTIVIEW_PER_VIEW_ATTRIBUTES_PROPERTIES_NVX:
- safe_pNext = new safe_VkPhysicalDeviceMultiviewPerViewAttributesPropertiesNVX(reinterpret_cast<const VkPhysicalDeviceMultiviewPerViewAttributesPropertiesNVX *>(pNext));
- break;
- case VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_SWIZZLE_STATE_CREATE_INFO_NV:
- safe_pNext = new safe_VkPipelineViewportSwizzleStateCreateInfoNV(reinterpret_cast<const VkPipelineViewportSwizzleStateCreateInfoNV *>(pNext));
- break;
- case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DISCARD_RECTANGLE_PROPERTIES_EXT:
- safe_pNext = new safe_VkPhysicalDeviceDiscardRectanglePropertiesEXT(reinterpret_cast<const VkPhysicalDeviceDiscardRectanglePropertiesEXT *>(pNext));
- break;
- case VK_STRUCTURE_TYPE_PIPELINE_DISCARD_RECTANGLE_STATE_CREATE_INFO_EXT:
- safe_pNext = new safe_VkPipelineDiscardRectangleStateCreateInfoEXT(reinterpret_cast<const VkPipelineDiscardRectangleStateCreateInfoEXT *>(pNext));
- break;
- case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_CONSERVATIVE_RASTERIZATION_PROPERTIES_EXT:
- safe_pNext = new safe_VkPhysicalDeviceConservativeRasterizationPropertiesEXT(reinterpret_cast<const VkPhysicalDeviceConservativeRasterizationPropertiesEXT *>(pNext));
- break;
- case VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_CONSERVATIVE_STATE_CREATE_INFO_EXT:
- safe_pNext = new safe_VkPipelineRasterizationConservativeStateCreateInfoEXT(reinterpret_cast<const VkPipelineRasterizationConservativeStateCreateInfoEXT *>(pNext));
- break;
- case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DEPTH_CLIP_ENABLE_FEATURES_EXT:
- safe_pNext = new safe_VkPhysicalDeviceDepthClipEnableFeaturesEXT(reinterpret_cast<const VkPhysicalDeviceDepthClipEnableFeaturesEXT *>(pNext));
- break;
- case VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_DEPTH_CLIP_STATE_CREATE_INFO_EXT:
- safe_pNext = new safe_VkPipelineRasterizationDepthClipStateCreateInfoEXT(reinterpret_cast<const VkPipelineRasterizationDepthClipStateCreateInfoEXT *>(pNext));
- break;
- case VK_STRUCTURE_TYPE_DEBUG_UTILS_MESSENGER_CREATE_INFO_EXT:
- safe_pNext = new safe_VkDebugUtilsMessengerCreateInfoEXT(reinterpret_cast<const VkDebugUtilsMessengerCreateInfoEXT *>(pNext));
- break;
- case VK_STRUCTURE_TYPE_SAMPLER_REDUCTION_MODE_CREATE_INFO_EXT:
- safe_pNext = new safe_VkSamplerReductionModeCreateInfoEXT(reinterpret_cast<const VkSamplerReductionModeCreateInfoEXT *>(pNext));
- break;
- case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SAMPLER_FILTER_MINMAX_PROPERTIES_EXT:
- safe_pNext = new safe_VkPhysicalDeviceSamplerFilterMinmaxPropertiesEXT(reinterpret_cast<const VkPhysicalDeviceSamplerFilterMinmaxPropertiesEXT *>(pNext));
- break;
- case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_INLINE_UNIFORM_BLOCK_FEATURES_EXT:
- safe_pNext = new safe_VkPhysicalDeviceInlineUniformBlockFeaturesEXT(reinterpret_cast<const VkPhysicalDeviceInlineUniformBlockFeaturesEXT *>(pNext));
- break;
- case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_INLINE_UNIFORM_BLOCK_PROPERTIES_EXT:
- safe_pNext = new safe_VkPhysicalDeviceInlineUniformBlockPropertiesEXT(reinterpret_cast<const VkPhysicalDeviceInlineUniformBlockPropertiesEXT *>(pNext));
- break;
- case VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET_INLINE_UNIFORM_BLOCK_EXT:
- safe_pNext = new safe_VkWriteDescriptorSetInlineUniformBlockEXT(reinterpret_cast<const VkWriteDescriptorSetInlineUniformBlockEXT *>(pNext));
- break;
- case VK_STRUCTURE_TYPE_DESCRIPTOR_POOL_INLINE_UNIFORM_BLOCK_CREATE_INFO_EXT:
- safe_pNext = new safe_VkDescriptorPoolInlineUniformBlockCreateInfoEXT(reinterpret_cast<const VkDescriptorPoolInlineUniformBlockCreateInfoEXT *>(pNext));
- break;
- case VK_STRUCTURE_TYPE_SAMPLE_LOCATIONS_INFO_EXT:
- safe_pNext = new safe_VkSampleLocationsInfoEXT(reinterpret_cast<const VkSampleLocationsInfoEXT *>(pNext));
- break;
- case VK_STRUCTURE_TYPE_RENDER_PASS_SAMPLE_LOCATIONS_BEGIN_INFO_EXT:
- safe_pNext = new safe_VkRenderPassSampleLocationsBeginInfoEXT(reinterpret_cast<const VkRenderPassSampleLocationsBeginInfoEXT *>(pNext));
- break;
- case VK_STRUCTURE_TYPE_PIPELINE_SAMPLE_LOCATIONS_STATE_CREATE_INFO_EXT:
- safe_pNext = new safe_VkPipelineSampleLocationsStateCreateInfoEXT(reinterpret_cast<const VkPipelineSampleLocationsStateCreateInfoEXT *>(pNext));
- break;
- case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SAMPLE_LOCATIONS_PROPERTIES_EXT:
- safe_pNext = new safe_VkPhysicalDeviceSampleLocationsPropertiesEXT(reinterpret_cast<const VkPhysicalDeviceSampleLocationsPropertiesEXT *>(pNext));
- break;
- case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_BLEND_OPERATION_ADVANCED_FEATURES_EXT:
- safe_pNext = new safe_VkPhysicalDeviceBlendOperationAdvancedFeaturesEXT(reinterpret_cast<const VkPhysicalDeviceBlendOperationAdvancedFeaturesEXT *>(pNext));
- break;
- case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_BLEND_OPERATION_ADVANCED_PROPERTIES_EXT:
- safe_pNext = new safe_VkPhysicalDeviceBlendOperationAdvancedPropertiesEXT(reinterpret_cast<const VkPhysicalDeviceBlendOperationAdvancedPropertiesEXT *>(pNext));
- break;
- case VK_STRUCTURE_TYPE_PIPELINE_COLOR_BLEND_ADVANCED_STATE_CREATE_INFO_EXT:
- safe_pNext = new safe_VkPipelineColorBlendAdvancedStateCreateInfoEXT(reinterpret_cast<const VkPipelineColorBlendAdvancedStateCreateInfoEXT *>(pNext));
- break;
- case VK_STRUCTURE_TYPE_PIPELINE_COVERAGE_TO_COLOR_STATE_CREATE_INFO_NV:
- safe_pNext = new safe_VkPipelineCoverageToColorStateCreateInfoNV(reinterpret_cast<const VkPipelineCoverageToColorStateCreateInfoNV *>(pNext));
- break;
- case VK_STRUCTURE_TYPE_PIPELINE_COVERAGE_MODULATION_STATE_CREATE_INFO_NV:
- safe_pNext = new safe_VkPipelineCoverageModulationStateCreateInfoNV(reinterpret_cast<const VkPipelineCoverageModulationStateCreateInfoNV *>(pNext));
- break;
- case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_SM_BUILTINS_PROPERTIES_NV:
- safe_pNext = new safe_VkPhysicalDeviceShaderSMBuiltinsPropertiesNV(reinterpret_cast<const VkPhysicalDeviceShaderSMBuiltinsPropertiesNV *>(pNext));
- break;
- case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_SM_BUILTINS_FEATURES_NV:
- safe_pNext = new safe_VkPhysicalDeviceShaderSMBuiltinsFeaturesNV(reinterpret_cast<const VkPhysicalDeviceShaderSMBuiltinsFeaturesNV *>(pNext));
- break;
- case VK_STRUCTURE_TYPE_DRM_FORMAT_MODIFIER_PROPERTIES_LIST_EXT:
- safe_pNext = new safe_VkDrmFormatModifierPropertiesListEXT(reinterpret_cast<const VkDrmFormatModifierPropertiesListEXT *>(pNext));
- break;
- case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_DRM_FORMAT_MODIFIER_INFO_EXT:
- safe_pNext = new safe_VkPhysicalDeviceImageDrmFormatModifierInfoEXT(reinterpret_cast<const VkPhysicalDeviceImageDrmFormatModifierInfoEXT *>(pNext));
- break;
- case VK_STRUCTURE_TYPE_IMAGE_DRM_FORMAT_MODIFIER_LIST_CREATE_INFO_EXT:
- safe_pNext = new safe_VkImageDrmFormatModifierListCreateInfoEXT(reinterpret_cast<const VkImageDrmFormatModifierListCreateInfoEXT *>(pNext));
- break;
- case VK_STRUCTURE_TYPE_IMAGE_DRM_FORMAT_MODIFIER_EXPLICIT_CREATE_INFO_EXT:
- safe_pNext = new safe_VkImageDrmFormatModifierExplicitCreateInfoEXT(reinterpret_cast<const VkImageDrmFormatModifierExplicitCreateInfoEXT *>(pNext));
- break;
- case VK_STRUCTURE_TYPE_SHADER_MODULE_VALIDATION_CACHE_CREATE_INFO_EXT:
- safe_pNext = new safe_VkShaderModuleValidationCacheCreateInfoEXT(reinterpret_cast<const VkShaderModuleValidationCacheCreateInfoEXT *>(pNext));
- break;
- case VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_BINDING_FLAGS_CREATE_INFO_EXT:
- safe_pNext = new safe_VkDescriptorSetLayoutBindingFlagsCreateInfoEXT(reinterpret_cast<const VkDescriptorSetLayoutBindingFlagsCreateInfoEXT *>(pNext));
- break;
- case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DESCRIPTOR_INDEXING_FEATURES_EXT:
- safe_pNext = new safe_VkPhysicalDeviceDescriptorIndexingFeaturesEXT(reinterpret_cast<const VkPhysicalDeviceDescriptorIndexingFeaturesEXT *>(pNext));
- break;
- case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DESCRIPTOR_INDEXING_PROPERTIES_EXT:
- safe_pNext = new safe_VkPhysicalDeviceDescriptorIndexingPropertiesEXT(reinterpret_cast<const VkPhysicalDeviceDescriptorIndexingPropertiesEXT *>(pNext));
- break;
- case VK_STRUCTURE_TYPE_DESCRIPTOR_SET_VARIABLE_DESCRIPTOR_COUNT_ALLOCATE_INFO_EXT:
- safe_pNext = new safe_VkDescriptorSetVariableDescriptorCountAllocateInfoEXT(reinterpret_cast<const VkDescriptorSetVariableDescriptorCountAllocateInfoEXT *>(pNext));
- break;
- case VK_STRUCTURE_TYPE_DESCRIPTOR_SET_VARIABLE_DESCRIPTOR_COUNT_LAYOUT_SUPPORT_EXT:
- safe_pNext = new safe_VkDescriptorSetVariableDescriptorCountLayoutSupportEXT(reinterpret_cast<const VkDescriptorSetVariableDescriptorCountLayoutSupportEXT *>(pNext));
- break;
- case VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_SHADING_RATE_IMAGE_STATE_CREATE_INFO_NV:
- safe_pNext = new safe_VkPipelineViewportShadingRateImageStateCreateInfoNV(reinterpret_cast<const VkPipelineViewportShadingRateImageStateCreateInfoNV *>(pNext));
- break;
- case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADING_RATE_IMAGE_FEATURES_NV:
- safe_pNext = new safe_VkPhysicalDeviceShadingRateImageFeaturesNV(reinterpret_cast<const VkPhysicalDeviceShadingRateImageFeaturesNV *>(pNext));
- break;
- case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADING_RATE_IMAGE_PROPERTIES_NV:
- safe_pNext = new safe_VkPhysicalDeviceShadingRateImagePropertiesNV(reinterpret_cast<const VkPhysicalDeviceShadingRateImagePropertiesNV *>(pNext));
- break;
- case VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_COARSE_SAMPLE_ORDER_STATE_CREATE_INFO_NV:
- safe_pNext = new safe_VkPipelineViewportCoarseSampleOrderStateCreateInfoNV(reinterpret_cast<const VkPipelineViewportCoarseSampleOrderStateCreateInfoNV *>(pNext));
- break;
- case VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET_ACCELERATION_STRUCTURE_NV:
- safe_pNext = new safe_VkWriteDescriptorSetAccelerationStructureNV(reinterpret_cast<const VkWriteDescriptorSetAccelerationStructureNV *>(pNext));
- break;
- case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_RAY_TRACING_PROPERTIES_NV:
- safe_pNext = new safe_VkPhysicalDeviceRayTracingPropertiesNV(reinterpret_cast<const VkPhysicalDeviceRayTracingPropertiesNV *>(pNext));
- break;
- case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_REPRESENTATIVE_FRAGMENT_TEST_FEATURES_NV:
- safe_pNext = new safe_VkPhysicalDeviceRepresentativeFragmentTestFeaturesNV(reinterpret_cast<const VkPhysicalDeviceRepresentativeFragmentTestFeaturesNV *>(pNext));
- break;
- case VK_STRUCTURE_TYPE_PIPELINE_REPRESENTATIVE_FRAGMENT_TEST_STATE_CREATE_INFO_NV:
- safe_pNext = new safe_VkPipelineRepresentativeFragmentTestStateCreateInfoNV(reinterpret_cast<const VkPipelineRepresentativeFragmentTestStateCreateInfoNV *>(pNext));
- break;
- case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_VIEW_IMAGE_FORMAT_INFO_EXT:
- safe_pNext = new safe_VkPhysicalDeviceImageViewImageFormatInfoEXT(reinterpret_cast<const VkPhysicalDeviceImageViewImageFormatInfoEXT *>(pNext));
- break;
- case VK_STRUCTURE_TYPE_FILTER_CUBIC_IMAGE_VIEW_IMAGE_FORMAT_PROPERTIES_EXT:
- safe_pNext = new safe_VkFilterCubicImageViewImageFormatPropertiesEXT(reinterpret_cast<const VkFilterCubicImageViewImageFormatPropertiesEXT *>(pNext));
- break;
- case VK_STRUCTURE_TYPE_DEVICE_QUEUE_GLOBAL_PRIORITY_CREATE_INFO_EXT:
- safe_pNext = new safe_VkDeviceQueueGlobalPriorityCreateInfoEXT(reinterpret_cast<const VkDeviceQueueGlobalPriorityCreateInfoEXT *>(pNext));
- break;
- case VK_STRUCTURE_TYPE_IMPORT_MEMORY_HOST_POINTER_INFO_EXT:
- safe_pNext = new safe_VkImportMemoryHostPointerInfoEXT(reinterpret_cast<const VkImportMemoryHostPointerInfoEXT *>(pNext));
- break;
- case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_MEMORY_HOST_PROPERTIES_EXT:
- safe_pNext = new safe_VkPhysicalDeviceExternalMemoryHostPropertiesEXT(reinterpret_cast<const VkPhysicalDeviceExternalMemoryHostPropertiesEXT *>(pNext));
- break;
- case VK_STRUCTURE_TYPE_PIPELINE_COMPILER_CONTROL_CREATE_INFO_AMD:
- safe_pNext = new safe_VkPipelineCompilerControlCreateInfoAMD(reinterpret_cast<const VkPipelineCompilerControlCreateInfoAMD *>(pNext));
- break;
- case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_CORE_PROPERTIES_AMD:
- safe_pNext = new safe_VkPhysicalDeviceShaderCorePropertiesAMD(reinterpret_cast<const VkPhysicalDeviceShaderCorePropertiesAMD *>(pNext));
- break;
- case VK_STRUCTURE_TYPE_DEVICE_MEMORY_OVERALLOCATION_CREATE_INFO_AMD:
- safe_pNext = new safe_VkDeviceMemoryOverallocationCreateInfoAMD(reinterpret_cast<const VkDeviceMemoryOverallocationCreateInfoAMD *>(pNext));
- break;
- case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VERTEX_ATTRIBUTE_DIVISOR_PROPERTIES_EXT:
- safe_pNext = new safe_VkPhysicalDeviceVertexAttributeDivisorPropertiesEXT(reinterpret_cast<const VkPhysicalDeviceVertexAttributeDivisorPropertiesEXT *>(pNext));
- break;
- case VK_STRUCTURE_TYPE_PIPELINE_VERTEX_INPUT_DIVISOR_STATE_CREATE_INFO_EXT:
- safe_pNext = new safe_VkPipelineVertexInputDivisorStateCreateInfoEXT(reinterpret_cast<const VkPipelineVertexInputDivisorStateCreateInfoEXT *>(pNext));
- break;
- case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VERTEX_ATTRIBUTE_DIVISOR_FEATURES_EXT:
- safe_pNext = new safe_VkPhysicalDeviceVertexAttributeDivisorFeaturesEXT(reinterpret_cast<const VkPhysicalDeviceVertexAttributeDivisorFeaturesEXT *>(pNext));
- break;
- case VK_STRUCTURE_TYPE_PIPELINE_CREATION_FEEDBACK_CREATE_INFO_EXT:
- safe_pNext = new safe_VkPipelineCreationFeedbackCreateInfoEXT(reinterpret_cast<const VkPipelineCreationFeedbackCreateInfoEXT *>(pNext));
- break;
- case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_COMPUTE_SHADER_DERIVATIVES_FEATURES_NV:
- safe_pNext = new safe_VkPhysicalDeviceComputeShaderDerivativesFeaturesNV(reinterpret_cast<const VkPhysicalDeviceComputeShaderDerivativesFeaturesNV *>(pNext));
- break;
- case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MESH_SHADER_FEATURES_NV:
- safe_pNext = new safe_VkPhysicalDeviceMeshShaderFeaturesNV(reinterpret_cast<const VkPhysicalDeviceMeshShaderFeaturesNV *>(pNext));
- break;
- case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MESH_SHADER_PROPERTIES_NV:
- safe_pNext = new safe_VkPhysicalDeviceMeshShaderPropertiesNV(reinterpret_cast<const VkPhysicalDeviceMeshShaderPropertiesNV *>(pNext));
- break;
- case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_SHADER_BARYCENTRIC_FEATURES_NV:
- safe_pNext = new safe_VkPhysicalDeviceFragmentShaderBarycentricFeaturesNV(reinterpret_cast<const VkPhysicalDeviceFragmentShaderBarycentricFeaturesNV *>(pNext));
- break;
- case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_IMAGE_FOOTPRINT_FEATURES_NV:
- safe_pNext = new safe_VkPhysicalDeviceShaderImageFootprintFeaturesNV(reinterpret_cast<const VkPhysicalDeviceShaderImageFootprintFeaturesNV *>(pNext));
- break;
- case VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_EXCLUSIVE_SCISSOR_STATE_CREATE_INFO_NV:
- safe_pNext = new safe_VkPipelineViewportExclusiveScissorStateCreateInfoNV(reinterpret_cast<const VkPipelineViewportExclusiveScissorStateCreateInfoNV *>(pNext));
- break;
- case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXCLUSIVE_SCISSOR_FEATURES_NV:
- safe_pNext = new safe_VkPhysicalDeviceExclusiveScissorFeaturesNV(reinterpret_cast<const VkPhysicalDeviceExclusiveScissorFeaturesNV *>(pNext));
- break;
- case VK_STRUCTURE_TYPE_QUEUE_FAMILY_CHECKPOINT_PROPERTIES_NV:
- safe_pNext = new safe_VkQueueFamilyCheckpointPropertiesNV(reinterpret_cast<const VkQueueFamilyCheckpointPropertiesNV *>(pNext));
- break;
- case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_INTEGER_FUNCTIONS_2_FEATURES_INTEL:
- safe_pNext = new safe_VkPhysicalDeviceShaderIntegerFunctions2FeaturesINTEL(reinterpret_cast<const VkPhysicalDeviceShaderIntegerFunctions2FeaturesINTEL *>(pNext));
- break;
- case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PCI_BUS_INFO_PROPERTIES_EXT:
- safe_pNext = new safe_VkPhysicalDevicePCIBusInfoPropertiesEXT(reinterpret_cast<const VkPhysicalDevicePCIBusInfoPropertiesEXT *>(pNext));
- break;
- case VK_STRUCTURE_TYPE_DISPLAY_NATIVE_HDR_SURFACE_CAPABILITIES_AMD:
- safe_pNext = new safe_VkDisplayNativeHdrSurfaceCapabilitiesAMD(reinterpret_cast<const VkDisplayNativeHdrSurfaceCapabilitiesAMD *>(pNext));
- break;
- case VK_STRUCTURE_TYPE_SWAPCHAIN_DISPLAY_NATIVE_HDR_CREATE_INFO_AMD:
- safe_pNext = new safe_VkSwapchainDisplayNativeHdrCreateInfoAMD(reinterpret_cast<const VkSwapchainDisplayNativeHdrCreateInfoAMD *>(pNext));
- break;
- case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_DENSITY_MAP_FEATURES_EXT:
- safe_pNext = new safe_VkPhysicalDeviceFragmentDensityMapFeaturesEXT(reinterpret_cast<const VkPhysicalDeviceFragmentDensityMapFeaturesEXT *>(pNext));
- break;
- case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_DENSITY_MAP_PROPERTIES_EXT:
- safe_pNext = new safe_VkPhysicalDeviceFragmentDensityMapPropertiesEXT(reinterpret_cast<const VkPhysicalDeviceFragmentDensityMapPropertiesEXT *>(pNext));
- break;
- case VK_STRUCTURE_TYPE_RENDER_PASS_FRAGMENT_DENSITY_MAP_CREATE_INFO_EXT:
- safe_pNext = new safe_VkRenderPassFragmentDensityMapCreateInfoEXT(reinterpret_cast<const VkRenderPassFragmentDensityMapCreateInfoEXT *>(pNext));
- break;
- case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SCALAR_BLOCK_LAYOUT_FEATURES_EXT:
- safe_pNext = new safe_VkPhysicalDeviceScalarBlockLayoutFeaturesEXT(reinterpret_cast<const VkPhysicalDeviceScalarBlockLayoutFeaturesEXT *>(pNext));
- break;
- case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SUBGROUP_SIZE_CONTROL_FEATURES_EXT:
- safe_pNext = new safe_VkPhysicalDeviceSubgroupSizeControlFeaturesEXT(reinterpret_cast<const VkPhysicalDeviceSubgroupSizeControlFeaturesEXT *>(pNext));
- break;
- case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SUBGROUP_SIZE_CONTROL_PROPERTIES_EXT:
- safe_pNext = new safe_VkPhysicalDeviceSubgroupSizeControlPropertiesEXT(reinterpret_cast<const VkPhysicalDeviceSubgroupSizeControlPropertiesEXT *>(pNext));
- break;
- case VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_REQUIRED_SUBGROUP_SIZE_CREATE_INFO_EXT:
- safe_pNext = new safe_VkPipelineShaderStageRequiredSubgroupSizeCreateInfoEXT(reinterpret_cast<const VkPipelineShaderStageRequiredSubgroupSizeCreateInfoEXT *>(pNext));
- break;
- case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_CORE_PROPERTIES_2_AMD:
- safe_pNext = new safe_VkPhysicalDeviceShaderCoreProperties2AMD(reinterpret_cast<const VkPhysicalDeviceShaderCoreProperties2AMD *>(pNext));
- break;
- case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_COHERENT_MEMORY_FEATURES_AMD:
- safe_pNext = new safe_VkPhysicalDeviceCoherentMemoryFeaturesAMD(reinterpret_cast<const VkPhysicalDeviceCoherentMemoryFeaturesAMD *>(pNext));
- break;
- case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MEMORY_BUDGET_PROPERTIES_EXT:
- safe_pNext = new safe_VkPhysicalDeviceMemoryBudgetPropertiesEXT(reinterpret_cast<const VkPhysicalDeviceMemoryBudgetPropertiesEXT *>(pNext));
- break;
- case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MEMORY_PRIORITY_FEATURES_EXT:
- safe_pNext = new safe_VkPhysicalDeviceMemoryPriorityFeaturesEXT(reinterpret_cast<const VkPhysicalDeviceMemoryPriorityFeaturesEXT *>(pNext));
- break;
- case VK_STRUCTURE_TYPE_MEMORY_PRIORITY_ALLOCATE_INFO_EXT:
- safe_pNext = new safe_VkMemoryPriorityAllocateInfoEXT(reinterpret_cast<const VkMemoryPriorityAllocateInfoEXT *>(pNext));
- break;
- case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DEDICATED_ALLOCATION_IMAGE_ALIASING_FEATURES_NV:
- safe_pNext = new safe_VkPhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV(reinterpret_cast<const VkPhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV *>(pNext));
- break;
- case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_BUFFER_DEVICE_ADDRESS_FEATURES_EXT:
- safe_pNext = new safe_VkPhysicalDeviceBufferDeviceAddressFeaturesEXT(reinterpret_cast<const VkPhysicalDeviceBufferDeviceAddressFeaturesEXT *>(pNext));
- break;
- case VK_STRUCTURE_TYPE_BUFFER_DEVICE_ADDRESS_CREATE_INFO_EXT:
- safe_pNext = new safe_VkBufferDeviceAddressCreateInfoEXT(reinterpret_cast<const VkBufferDeviceAddressCreateInfoEXT *>(pNext));
- break;
- case VK_STRUCTURE_TYPE_IMAGE_STENCIL_USAGE_CREATE_INFO_EXT:
- safe_pNext = new safe_VkImageStencilUsageCreateInfoEXT(reinterpret_cast<const VkImageStencilUsageCreateInfoEXT *>(pNext));
- break;
- case VK_STRUCTURE_TYPE_VALIDATION_FEATURES_EXT:
- safe_pNext = new safe_VkValidationFeaturesEXT(reinterpret_cast<const VkValidationFeaturesEXT *>(pNext));
- break;
- case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_COOPERATIVE_MATRIX_FEATURES_NV:
- safe_pNext = new safe_VkPhysicalDeviceCooperativeMatrixFeaturesNV(reinterpret_cast<const VkPhysicalDeviceCooperativeMatrixFeaturesNV *>(pNext));
- break;
- case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_COOPERATIVE_MATRIX_PROPERTIES_NV:
- safe_pNext = new safe_VkPhysicalDeviceCooperativeMatrixPropertiesNV(reinterpret_cast<const VkPhysicalDeviceCooperativeMatrixPropertiesNV *>(pNext));
- break;
- case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_COVERAGE_REDUCTION_MODE_FEATURES_NV:
- safe_pNext = new safe_VkPhysicalDeviceCoverageReductionModeFeaturesNV(reinterpret_cast<const VkPhysicalDeviceCoverageReductionModeFeaturesNV *>(pNext));
- break;
- case VK_STRUCTURE_TYPE_PIPELINE_COVERAGE_REDUCTION_STATE_CREATE_INFO_NV:
- safe_pNext = new safe_VkPipelineCoverageReductionStateCreateInfoNV(reinterpret_cast<const VkPipelineCoverageReductionStateCreateInfoNV *>(pNext));
- break;
- case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_SHADER_INTERLOCK_FEATURES_EXT:
- safe_pNext = new safe_VkPhysicalDeviceFragmentShaderInterlockFeaturesEXT(reinterpret_cast<const VkPhysicalDeviceFragmentShaderInterlockFeaturesEXT *>(pNext));
- break;
- case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_YCBCR_IMAGE_ARRAYS_FEATURES_EXT:
- safe_pNext = new safe_VkPhysicalDeviceYcbcrImageArraysFeaturesEXT(reinterpret_cast<const VkPhysicalDeviceYcbcrImageArraysFeaturesEXT *>(pNext));
- break;
- case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_LINE_RASTERIZATION_FEATURES_EXT:
- safe_pNext = new safe_VkPhysicalDeviceLineRasterizationFeaturesEXT(reinterpret_cast<const VkPhysicalDeviceLineRasterizationFeaturesEXT *>(pNext));
- break;
- case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_LINE_RASTERIZATION_PROPERTIES_EXT:
- safe_pNext = new safe_VkPhysicalDeviceLineRasterizationPropertiesEXT(reinterpret_cast<const VkPhysicalDeviceLineRasterizationPropertiesEXT *>(pNext));
- break;
- case VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_LINE_STATE_CREATE_INFO_EXT:
- safe_pNext = new safe_VkPipelineRasterizationLineStateCreateInfoEXT(reinterpret_cast<const VkPipelineRasterizationLineStateCreateInfoEXT *>(pNext));
- break;
- case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_HOST_QUERY_RESET_FEATURES_EXT:
- safe_pNext = new safe_VkPhysicalDeviceHostQueryResetFeaturesEXT(reinterpret_cast<const VkPhysicalDeviceHostQueryResetFeaturesEXT *>(pNext));
- break;
- case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_INDEX_TYPE_UINT8_FEATURES_EXT:
- safe_pNext = new safe_VkPhysicalDeviceIndexTypeUint8FeaturesEXT(reinterpret_cast<const VkPhysicalDeviceIndexTypeUint8FeaturesEXT *>(pNext));
- break;
- case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_DEMOTE_TO_HELPER_INVOCATION_FEATURES_EXT:
- safe_pNext = new safe_VkPhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT(reinterpret_cast<const VkPhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT *>(pNext));
- break;
- case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TEXEL_BUFFER_ALIGNMENT_FEATURES_EXT:
- safe_pNext = new safe_VkPhysicalDeviceTexelBufferAlignmentFeaturesEXT(reinterpret_cast<const VkPhysicalDeviceTexelBufferAlignmentFeaturesEXT *>(pNext));
- break;
- case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TEXEL_BUFFER_ALIGNMENT_PROPERTIES_EXT:
- safe_pNext = new safe_VkPhysicalDeviceTexelBufferAlignmentPropertiesEXT(reinterpret_cast<const VkPhysicalDeviceTexelBufferAlignmentPropertiesEXT *>(pNext));
- break;
-#ifdef VK_USE_PLATFORM_ANDROID_KHR
- case VK_STRUCTURE_TYPE_ANDROID_HARDWARE_BUFFER_USAGE_ANDROID:
- safe_pNext = new safe_VkAndroidHardwareBufferUsageANDROID(reinterpret_cast<const VkAndroidHardwareBufferUsageANDROID *>(pNext));
- break;
- case VK_STRUCTURE_TYPE_ANDROID_HARDWARE_BUFFER_FORMAT_PROPERTIES_ANDROID:
- safe_pNext = new safe_VkAndroidHardwareBufferFormatPropertiesANDROID(reinterpret_cast<const VkAndroidHardwareBufferFormatPropertiesANDROID *>(pNext));
- break;
- case VK_STRUCTURE_TYPE_IMPORT_ANDROID_HARDWARE_BUFFER_INFO_ANDROID:
- safe_pNext = new safe_VkImportAndroidHardwareBufferInfoANDROID(reinterpret_cast<const VkImportAndroidHardwareBufferInfoANDROID *>(pNext));
- break;
- case VK_STRUCTURE_TYPE_EXTERNAL_FORMAT_ANDROID:
- safe_pNext = new safe_VkExternalFormatANDROID(reinterpret_cast<const VkExternalFormatANDROID *>(pNext));
- break;
-#endif // VK_USE_PLATFORM_ANDROID_KHR
-#ifdef VK_USE_PLATFORM_GGP
- case VK_STRUCTURE_TYPE_PRESENT_FRAME_TOKEN_GGP:
- safe_pNext = new safe_VkPresentFrameTokenGGP(reinterpret_cast<const VkPresentFrameTokenGGP *>(pNext));
- break;
-#endif // VK_USE_PLATFORM_GGP
-#ifdef VK_USE_PLATFORM_WIN32_KHR
- case VK_STRUCTURE_TYPE_IMPORT_MEMORY_WIN32_HANDLE_INFO_KHR:
- safe_pNext = new safe_VkImportMemoryWin32HandleInfoKHR(reinterpret_cast<const VkImportMemoryWin32HandleInfoKHR *>(pNext));
- break;
- case VK_STRUCTURE_TYPE_EXPORT_MEMORY_WIN32_HANDLE_INFO_KHR:
- safe_pNext = new safe_VkExportMemoryWin32HandleInfoKHR(reinterpret_cast<const VkExportMemoryWin32HandleInfoKHR *>(pNext));
- break;
- case VK_STRUCTURE_TYPE_WIN32_KEYED_MUTEX_ACQUIRE_RELEASE_INFO_KHR:
- safe_pNext = new safe_VkWin32KeyedMutexAcquireReleaseInfoKHR(reinterpret_cast<const VkWin32KeyedMutexAcquireReleaseInfoKHR *>(pNext));
- break;
- case VK_STRUCTURE_TYPE_EXPORT_SEMAPHORE_WIN32_HANDLE_INFO_KHR:
- safe_pNext = new safe_VkExportSemaphoreWin32HandleInfoKHR(reinterpret_cast<const VkExportSemaphoreWin32HandleInfoKHR *>(pNext));
- break;
- case VK_STRUCTURE_TYPE_D3D12_FENCE_SUBMIT_INFO_KHR:
- safe_pNext = new safe_VkD3D12FenceSubmitInfoKHR(reinterpret_cast<const VkD3D12FenceSubmitInfoKHR *>(pNext));
- break;
- case VK_STRUCTURE_TYPE_EXPORT_FENCE_WIN32_HANDLE_INFO_KHR:
- safe_pNext = new safe_VkExportFenceWin32HandleInfoKHR(reinterpret_cast<const VkExportFenceWin32HandleInfoKHR *>(pNext));
- break;
- case VK_STRUCTURE_TYPE_IMPORT_MEMORY_WIN32_HANDLE_INFO_NV:
- safe_pNext = new safe_VkImportMemoryWin32HandleInfoNV(reinterpret_cast<const VkImportMemoryWin32HandleInfoNV *>(pNext));
- break;
- case VK_STRUCTURE_TYPE_EXPORT_MEMORY_WIN32_HANDLE_INFO_NV:
- safe_pNext = new safe_VkExportMemoryWin32HandleInfoNV(reinterpret_cast<const VkExportMemoryWin32HandleInfoNV *>(pNext));
- break;
- case VK_STRUCTURE_TYPE_WIN32_KEYED_MUTEX_ACQUIRE_RELEASE_INFO_NV:
- safe_pNext = new safe_VkWin32KeyedMutexAcquireReleaseInfoNV(reinterpret_cast<const VkWin32KeyedMutexAcquireReleaseInfoNV *>(pNext));
- break;
- case VK_STRUCTURE_TYPE_SURFACE_FULL_SCREEN_EXCLUSIVE_INFO_EXT:
- safe_pNext = new safe_VkSurfaceFullScreenExclusiveInfoEXT(reinterpret_cast<const VkSurfaceFullScreenExclusiveInfoEXT *>(pNext));
- break;
- case VK_STRUCTURE_TYPE_SURFACE_CAPABILITIES_FULL_SCREEN_EXCLUSIVE_EXT:
- safe_pNext = new safe_VkSurfaceCapabilitiesFullScreenExclusiveEXT(reinterpret_cast<const VkSurfaceCapabilitiesFullScreenExclusiveEXT *>(pNext));
- break;
- case VK_STRUCTURE_TYPE_SURFACE_FULL_SCREEN_EXCLUSIVE_WIN32_INFO_EXT:
- safe_pNext = new safe_VkSurfaceFullScreenExclusiveWin32InfoEXT(reinterpret_cast<const VkSurfaceFullScreenExclusiveWin32InfoEXT *>(pNext));
- break;
-#endif // VK_USE_PLATFORM_WIN32_KHR
- default: // Encountered an unknown sType -- skip (do not copy) this entry in the chain
- safe_pNext = SafePnextCopy(header->pNext);
- break;
- }
-
- return safe_pNext;
-}
-
-void FreePnextChain(const void *pNext) {
- if (!pNext) return;
-
- auto header = reinterpret_cast<const VkBaseOutStructure *>(pNext);
-
- switch (header->sType) {
- // Special-case Loader Instance Struct passed to/from layer in pNext chain
- case VK_STRUCTURE_TYPE_LOADER_INSTANCE_CREATE_INFO:
- FreePnextChain(header->pNext);
- delete reinterpret_cast<const VkLayerInstanceCreateInfo *>(pNext);
- break;
- // Special-case Loader Device Struct passed to/from layer in pNext chain
- case VK_STRUCTURE_TYPE_LOADER_DEVICE_CREATE_INFO:
- FreePnextChain(header->pNext);
- delete reinterpret_cast<const VkLayerDeviceCreateInfo *>(pNext);
- break;
- case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SUBGROUP_PROPERTIES:
- delete reinterpret_cast<const safe_VkPhysicalDeviceSubgroupProperties *>(header);
- break;
- case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_16BIT_STORAGE_FEATURES:
- delete reinterpret_cast<const safe_VkPhysicalDevice16BitStorageFeatures *>(header);
- break;
- case VK_STRUCTURE_TYPE_MEMORY_DEDICATED_REQUIREMENTS:
- delete reinterpret_cast<const safe_VkMemoryDedicatedRequirements *>(header);
- break;
- case VK_STRUCTURE_TYPE_MEMORY_DEDICATED_ALLOCATE_INFO:
- delete reinterpret_cast<const safe_VkMemoryDedicatedAllocateInfo *>(header);
- break;
- case VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_FLAGS_INFO:
- delete reinterpret_cast<const safe_VkMemoryAllocateFlagsInfo *>(header);
- break;
- case VK_STRUCTURE_TYPE_DEVICE_GROUP_RENDER_PASS_BEGIN_INFO:
- delete reinterpret_cast<const safe_VkDeviceGroupRenderPassBeginInfo *>(header);
- break;
- case VK_STRUCTURE_TYPE_DEVICE_GROUP_COMMAND_BUFFER_BEGIN_INFO:
- delete reinterpret_cast<const safe_VkDeviceGroupCommandBufferBeginInfo *>(header);
- break;
- case VK_STRUCTURE_TYPE_DEVICE_GROUP_SUBMIT_INFO:
- delete reinterpret_cast<const safe_VkDeviceGroupSubmitInfo *>(header);
- break;
- case VK_STRUCTURE_TYPE_DEVICE_GROUP_BIND_SPARSE_INFO:
- delete reinterpret_cast<const safe_VkDeviceGroupBindSparseInfo *>(header);
- break;
- case VK_STRUCTURE_TYPE_BIND_BUFFER_MEMORY_DEVICE_GROUP_INFO:
- delete reinterpret_cast<const safe_VkBindBufferMemoryDeviceGroupInfo *>(header);
- break;
- case VK_STRUCTURE_TYPE_BIND_IMAGE_MEMORY_DEVICE_GROUP_INFO:
- delete reinterpret_cast<const safe_VkBindImageMemoryDeviceGroupInfo *>(header);
- break;
- case VK_STRUCTURE_TYPE_DEVICE_GROUP_DEVICE_CREATE_INFO:
- delete reinterpret_cast<const safe_VkDeviceGroupDeviceCreateInfo *>(header);
- break;
- case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FEATURES_2:
- delete reinterpret_cast<const safe_VkPhysicalDeviceFeatures2 *>(header);
- break;
- case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_POINT_CLIPPING_PROPERTIES:
- delete reinterpret_cast<const safe_VkPhysicalDevicePointClippingProperties *>(header);
- break;
- case VK_STRUCTURE_TYPE_RENDER_PASS_INPUT_ATTACHMENT_ASPECT_CREATE_INFO:
- delete reinterpret_cast<const safe_VkRenderPassInputAttachmentAspectCreateInfo *>(header);
- break;
- case VK_STRUCTURE_TYPE_IMAGE_VIEW_USAGE_CREATE_INFO:
- delete reinterpret_cast<const safe_VkImageViewUsageCreateInfo *>(header);
- break;
- case VK_STRUCTURE_TYPE_PIPELINE_TESSELLATION_DOMAIN_ORIGIN_STATE_CREATE_INFO:
- delete reinterpret_cast<const safe_VkPipelineTessellationDomainOriginStateCreateInfo *>(header);
- break;
- case VK_STRUCTURE_TYPE_RENDER_PASS_MULTIVIEW_CREATE_INFO:
- delete reinterpret_cast<const safe_VkRenderPassMultiviewCreateInfo *>(header);
- break;
- case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MULTIVIEW_FEATURES:
- delete reinterpret_cast<const safe_VkPhysicalDeviceMultiviewFeatures *>(header);
- break;
- case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MULTIVIEW_PROPERTIES:
- delete reinterpret_cast<const safe_VkPhysicalDeviceMultiviewProperties *>(header);
- break;
- case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VARIABLE_POINTERS_FEATURES:
- delete reinterpret_cast<const safe_VkPhysicalDeviceVariablePointersFeatures *>(header);
- break;
- case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PROTECTED_MEMORY_FEATURES:
- delete reinterpret_cast<const safe_VkPhysicalDeviceProtectedMemoryFeatures *>(header);
- break;
- case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PROTECTED_MEMORY_PROPERTIES:
- delete reinterpret_cast<const safe_VkPhysicalDeviceProtectedMemoryProperties *>(header);
- break;
- case VK_STRUCTURE_TYPE_PROTECTED_SUBMIT_INFO:
- delete reinterpret_cast<const safe_VkProtectedSubmitInfo *>(header);
- break;
- case VK_STRUCTURE_TYPE_SAMPLER_YCBCR_CONVERSION_INFO:
- delete reinterpret_cast<const safe_VkSamplerYcbcrConversionInfo *>(header);
- break;
- case VK_STRUCTURE_TYPE_BIND_IMAGE_PLANE_MEMORY_INFO:
- delete reinterpret_cast<const safe_VkBindImagePlaneMemoryInfo *>(header);
- break;
- case VK_STRUCTURE_TYPE_IMAGE_PLANE_MEMORY_REQUIREMENTS_INFO:
- delete reinterpret_cast<const safe_VkImagePlaneMemoryRequirementsInfo *>(header);
- break;
- case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SAMPLER_YCBCR_CONVERSION_FEATURES:
- delete reinterpret_cast<const safe_VkPhysicalDeviceSamplerYcbcrConversionFeatures *>(header);
- break;
- case VK_STRUCTURE_TYPE_SAMPLER_YCBCR_CONVERSION_IMAGE_FORMAT_PROPERTIES:
- delete reinterpret_cast<const safe_VkSamplerYcbcrConversionImageFormatProperties *>(header);
- break;
- case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_IMAGE_FORMAT_INFO:
- delete reinterpret_cast<const safe_VkPhysicalDeviceExternalImageFormatInfo *>(header);
- break;
- case VK_STRUCTURE_TYPE_EXTERNAL_IMAGE_FORMAT_PROPERTIES:
- delete reinterpret_cast<const safe_VkExternalImageFormatProperties *>(header);
- break;
- case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_ID_PROPERTIES:
- delete reinterpret_cast<const safe_VkPhysicalDeviceIDProperties *>(header);
- break;
- case VK_STRUCTURE_TYPE_EXTERNAL_MEMORY_IMAGE_CREATE_INFO:
- delete reinterpret_cast<const safe_VkExternalMemoryImageCreateInfo *>(header);
- break;
- case VK_STRUCTURE_TYPE_EXTERNAL_MEMORY_BUFFER_CREATE_INFO:
- delete reinterpret_cast<const safe_VkExternalMemoryBufferCreateInfo *>(header);
- break;
- case VK_STRUCTURE_TYPE_EXPORT_MEMORY_ALLOCATE_INFO:
- delete reinterpret_cast<const safe_VkExportMemoryAllocateInfo *>(header);
- break;
- case VK_STRUCTURE_TYPE_EXPORT_FENCE_CREATE_INFO:
- delete reinterpret_cast<const safe_VkExportFenceCreateInfo *>(header);
- break;
- case VK_STRUCTURE_TYPE_EXPORT_SEMAPHORE_CREATE_INFO:
- delete reinterpret_cast<const safe_VkExportSemaphoreCreateInfo *>(header);
- break;
- case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MAINTENANCE_3_PROPERTIES:
- delete reinterpret_cast<const safe_VkPhysicalDeviceMaintenance3Properties *>(header);
- break;
- case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_DRAW_PARAMETERS_FEATURES:
- delete reinterpret_cast<const safe_VkPhysicalDeviceShaderDrawParametersFeatures *>(header);
- break;
- case VK_STRUCTURE_TYPE_IMAGE_SWAPCHAIN_CREATE_INFO_KHR:
- delete reinterpret_cast<const safe_VkImageSwapchainCreateInfoKHR *>(header);
- break;
- case VK_STRUCTURE_TYPE_BIND_IMAGE_MEMORY_SWAPCHAIN_INFO_KHR:
- delete reinterpret_cast<const safe_VkBindImageMemorySwapchainInfoKHR *>(header);
- break;
- case VK_STRUCTURE_TYPE_DEVICE_GROUP_PRESENT_INFO_KHR:
- delete reinterpret_cast<const safe_VkDeviceGroupPresentInfoKHR *>(header);
- break;
- case VK_STRUCTURE_TYPE_DEVICE_GROUP_SWAPCHAIN_CREATE_INFO_KHR:
- delete reinterpret_cast<const safe_VkDeviceGroupSwapchainCreateInfoKHR *>(header);
- break;
- case VK_STRUCTURE_TYPE_DISPLAY_PRESENT_INFO_KHR:
- delete reinterpret_cast<const safe_VkDisplayPresentInfoKHR *>(header);
- break;
- case VK_STRUCTURE_TYPE_IMPORT_MEMORY_FD_INFO_KHR:
- delete reinterpret_cast<const safe_VkImportMemoryFdInfoKHR *>(header);
- break;
- case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PUSH_DESCRIPTOR_PROPERTIES_KHR:
- delete reinterpret_cast<const safe_VkPhysicalDevicePushDescriptorPropertiesKHR *>(header);
- break;
- case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_FLOAT16_INT8_FEATURES_KHR:
- delete reinterpret_cast<const safe_VkPhysicalDeviceShaderFloat16Int8FeaturesKHR *>(header);
- break;
- case VK_STRUCTURE_TYPE_PRESENT_REGIONS_KHR:
- delete reinterpret_cast<const safe_VkPresentRegionsKHR *>(header);
- break;
- case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGELESS_FRAMEBUFFER_FEATURES_KHR:
- delete reinterpret_cast<const safe_VkPhysicalDeviceImagelessFramebufferFeaturesKHR *>(header);
- break;
- case VK_STRUCTURE_TYPE_FRAMEBUFFER_ATTACHMENTS_CREATE_INFO_KHR:
- delete reinterpret_cast<const safe_VkFramebufferAttachmentsCreateInfoKHR *>(header);
- break;
- case VK_STRUCTURE_TYPE_RENDER_PASS_ATTACHMENT_BEGIN_INFO_KHR:
- delete reinterpret_cast<const safe_VkRenderPassAttachmentBeginInfoKHR *>(header);
- break;
- case VK_STRUCTURE_TYPE_SHARED_PRESENT_SURFACE_CAPABILITIES_KHR:
- delete reinterpret_cast<const safe_VkSharedPresentSurfaceCapabilitiesKHR *>(header);
- break;
- case VK_STRUCTURE_TYPE_IMAGE_FORMAT_LIST_CREATE_INFO_KHR:
- delete reinterpret_cast<const safe_VkImageFormatListCreateInfoKHR *>(header);
- break;
- case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_8BIT_STORAGE_FEATURES_KHR:
- delete reinterpret_cast<const safe_VkPhysicalDevice8BitStorageFeaturesKHR *>(header);
- break;
- case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_ATOMIC_INT64_FEATURES_KHR:
- delete reinterpret_cast<const safe_VkPhysicalDeviceShaderAtomicInt64FeaturesKHR *>(header);
- break;
- case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DRIVER_PROPERTIES_KHR:
- delete reinterpret_cast<const safe_VkPhysicalDeviceDriverPropertiesKHR *>(header);
- break;
- case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FLOAT_CONTROLS_PROPERTIES_KHR:
- delete reinterpret_cast<const safe_VkPhysicalDeviceFloatControlsPropertiesKHR *>(header);
- break;
- case VK_STRUCTURE_TYPE_SUBPASS_DESCRIPTION_DEPTH_STENCIL_RESOLVE_KHR:
- delete reinterpret_cast<const safe_VkSubpassDescriptionDepthStencilResolveKHR *>(header);
- break;
- case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DEPTH_STENCIL_RESOLVE_PROPERTIES_KHR:
- delete reinterpret_cast<const safe_VkPhysicalDeviceDepthStencilResolvePropertiesKHR *>(header);
- break;
- case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VULKAN_MEMORY_MODEL_FEATURES_KHR:
- delete reinterpret_cast<const safe_VkPhysicalDeviceVulkanMemoryModelFeaturesKHR *>(header);
- break;
- case VK_STRUCTURE_TYPE_SURFACE_PROTECTED_CAPABILITIES_KHR:
- delete reinterpret_cast<const safe_VkSurfaceProtectedCapabilitiesKHR *>(header);
- break;
- case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_UNIFORM_BUFFER_STANDARD_LAYOUT_FEATURES_KHR:
- delete reinterpret_cast<const safe_VkPhysicalDeviceUniformBufferStandardLayoutFeaturesKHR *>(header);
- break;
- case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PIPELINE_EXECUTABLE_PROPERTIES_FEATURES_KHR:
- delete reinterpret_cast<const safe_VkPhysicalDevicePipelineExecutablePropertiesFeaturesKHR *>(header);
- break;
- case VK_STRUCTURE_TYPE_DEBUG_REPORT_CALLBACK_CREATE_INFO_EXT:
- delete reinterpret_cast<const safe_VkDebugReportCallbackCreateInfoEXT *>(header);
- break;
- case VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_STATE_RASTERIZATION_ORDER_AMD:
- delete reinterpret_cast<const safe_VkPipelineRasterizationStateRasterizationOrderAMD *>(header);
- break;
- case VK_STRUCTURE_TYPE_DEDICATED_ALLOCATION_IMAGE_CREATE_INFO_NV:
- delete reinterpret_cast<const safe_VkDedicatedAllocationImageCreateInfoNV *>(header);
- break;
- case VK_STRUCTURE_TYPE_DEDICATED_ALLOCATION_BUFFER_CREATE_INFO_NV:
- delete reinterpret_cast<const safe_VkDedicatedAllocationBufferCreateInfoNV *>(header);
- break;
- case VK_STRUCTURE_TYPE_DEDICATED_ALLOCATION_MEMORY_ALLOCATE_INFO_NV:
- delete reinterpret_cast<const safe_VkDedicatedAllocationMemoryAllocateInfoNV *>(header);
- break;
- case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TRANSFORM_FEEDBACK_FEATURES_EXT:
- delete reinterpret_cast<const safe_VkPhysicalDeviceTransformFeedbackFeaturesEXT *>(header);
- break;
- case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TRANSFORM_FEEDBACK_PROPERTIES_EXT:
- delete reinterpret_cast<const safe_VkPhysicalDeviceTransformFeedbackPropertiesEXT *>(header);
- break;
- case VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_STATE_STREAM_CREATE_INFO_EXT:
- delete reinterpret_cast<const safe_VkPipelineRasterizationStateStreamCreateInfoEXT *>(header);
- break;
- case VK_STRUCTURE_TYPE_TEXTURE_LOD_GATHER_FORMAT_PROPERTIES_AMD:
- delete reinterpret_cast<const safe_VkTextureLODGatherFormatPropertiesAMD *>(header);
- break;
- case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_CORNER_SAMPLED_IMAGE_FEATURES_NV:
- delete reinterpret_cast<const safe_VkPhysicalDeviceCornerSampledImageFeaturesNV *>(header);
- break;
- case VK_STRUCTURE_TYPE_EXTERNAL_MEMORY_IMAGE_CREATE_INFO_NV:
- delete reinterpret_cast<const safe_VkExternalMemoryImageCreateInfoNV *>(header);
- break;
- case VK_STRUCTURE_TYPE_EXPORT_MEMORY_ALLOCATE_INFO_NV:
- delete reinterpret_cast<const safe_VkExportMemoryAllocateInfoNV *>(header);
- break;
- case VK_STRUCTURE_TYPE_VALIDATION_FLAGS_EXT:
- delete reinterpret_cast<const safe_VkValidationFlagsEXT *>(header);
- break;
- case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TEXTURE_COMPRESSION_ASTC_HDR_FEATURES_EXT:
- delete reinterpret_cast<const safe_VkPhysicalDeviceTextureCompressionASTCHDRFeaturesEXT *>(header);
- break;
- case VK_STRUCTURE_TYPE_IMAGE_VIEW_ASTC_DECODE_MODE_EXT:
- delete reinterpret_cast<const safe_VkImageViewASTCDecodeModeEXT *>(header);
- break;
- case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_ASTC_DECODE_FEATURES_EXT:
- delete reinterpret_cast<const safe_VkPhysicalDeviceASTCDecodeFeaturesEXT *>(header);
- break;
- case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_CONDITIONAL_RENDERING_FEATURES_EXT:
- delete reinterpret_cast<const safe_VkPhysicalDeviceConditionalRenderingFeaturesEXT *>(header);
- break;
- case VK_STRUCTURE_TYPE_COMMAND_BUFFER_INHERITANCE_CONDITIONAL_RENDERING_INFO_EXT:
- delete reinterpret_cast<const safe_VkCommandBufferInheritanceConditionalRenderingInfoEXT *>(header);
- break;
- case VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_W_SCALING_STATE_CREATE_INFO_NV:
- delete reinterpret_cast<const safe_VkPipelineViewportWScalingStateCreateInfoNV *>(header);
- break;
- case VK_STRUCTURE_TYPE_SWAPCHAIN_COUNTER_CREATE_INFO_EXT:
- delete reinterpret_cast<const safe_VkSwapchainCounterCreateInfoEXT *>(header);
- break;
- case VK_STRUCTURE_TYPE_PRESENT_TIMES_INFO_GOOGLE:
- delete reinterpret_cast<const safe_VkPresentTimesInfoGOOGLE *>(header);
- break;
- case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MULTIVIEW_PER_VIEW_ATTRIBUTES_PROPERTIES_NVX:
- delete reinterpret_cast<const safe_VkPhysicalDeviceMultiviewPerViewAttributesPropertiesNVX *>(header);
- break;
- case VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_SWIZZLE_STATE_CREATE_INFO_NV:
- delete reinterpret_cast<const safe_VkPipelineViewportSwizzleStateCreateInfoNV *>(header);
- break;
- case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DISCARD_RECTANGLE_PROPERTIES_EXT:
- delete reinterpret_cast<const safe_VkPhysicalDeviceDiscardRectanglePropertiesEXT *>(header);
- break;
- case VK_STRUCTURE_TYPE_PIPELINE_DISCARD_RECTANGLE_STATE_CREATE_INFO_EXT:
- delete reinterpret_cast<const safe_VkPipelineDiscardRectangleStateCreateInfoEXT *>(header);
- break;
- case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_CONSERVATIVE_RASTERIZATION_PROPERTIES_EXT:
- delete reinterpret_cast<const safe_VkPhysicalDeviceConservativeRasterizationPropertiesEXT *>(header);
- break;
- case VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_CONSERVATIVE_STATE_CREATE_INFO_EXT:
- delete reinterpret_cast<const safe_VkPipelineRasterizationConservativeStateCreateInfoEXT *>(header);
- break;
- case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DEPTH_CLIP_ENABLE_FEATURES_EXT:
- delete reinterpret_cast<const safe_VkPhysicalDeviceDepthClipEnableFeaturesEXT *>(header);
- break;
- case VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_DEPTH_CLIP_STATE_CREATE_INFO_EXT:
- delete reinterpret_cast<const safe_VkPipelineRasterizationDepthClipStateCreateInfoEXT *>(header);
- break;
- case VK_STRUCTURE_TYPE_DEBUG_UTILS_MESSENGER_CREATE_INFO_EXT:
- delete reinterpret_cast<const safe_VkDebugUtilsMessengerCreateInfoEXT *>(header);
- break;
- case VK_STRUCTURE_TYPE_SAMPLER_REDUCTION_MODE_CREATE_INFO_EXT:
- delete reinterpret_cast<const safe_VkSamplerReductionModeCreateInfoEXT *>(header);
- break;
- case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SAMPLER_FILTER_MINMAX_PROPERTIES_EXT:
- delete reinterpret_cast<const safe_VkPhysicalDeviceSamplerFilterMinmaxPropertiesEXT *>(header);
- break;
- case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_INLINE_UNIFORM_BLOCK_FEATURES_EXT:
- delete reinterpret_cast<const safe_VkPhysicalDeviceInlineUniformBlockFeaturesEXT *>(header);
- break;
- case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_INLINE_UNIFORM_BLOCK_PROPERTIES_EXT:
- delete reinterpret_cast<const safe_VkPhysicalDeviceInlineUniformBlockPropertiesEXT *>(header);
- break;
- case VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET_INLINE_UNIFORM_BLOCK_EXT:
- delete reinterpret_cast<const safe_VkWriteDescriptorSetInlineUniformBlockEXT *>(header);
- break;
- case VK_STRUCTURE_TYPE_DESCRIPTOR_POOL_INLINE_UNIFORM_BLOCK_CREATE_INFO_EXT:
- delete reinterpret_cast<const safe_VkDescriptorPoolInlineUniformBlockCreateInfoEXT *>(header);
- break;
- case VK_STRUCTURE_TYPE_SAMPLE_LOCATIONS_INFO_EXT:
- delete reinterpret_cast<const safe_VkSampleLocationsInfoEXT *>(header);
- break;
- case VK_STRUCTURE_TYPE_RENDER_PASS_SAMPLE_LOCATIONS_BEGIN_INFO_EXT:
- delete reinterpret_cast<const safe_VkRenderPassSampleLocationsBeginInfoEXT *>(header);
- break;
- case VK_STRUCTURE_TYPE_PIPELINE_SAMPLE_LOCATIONS_STATE_CREATE_INFO_EXT:
- delete reinterpret_cast<const safe_VkPipelineSampleLocationsStateCreateInfoEXT *>(header);
- break;
- case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SAMPLE_LOCATIONS_PROPERTIES_EXT:
- delete reinterpret_cast<const safe_VkPhysicalDeviceSampleLocationsPropertiesEXT *>(header);
- break;
- case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_BLEND_OPERATION_ADVANCED_FEATURES_EXT:
- delete reinterpret_cast<const safe_VkPhysicalDeviceBlendOperationAdvancedFeaturesEXT *>(header);
- break;
- case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_BLEND_OPERATION_ADVANCED_PROPERTIES_EXT:
- delete reinterpret_cast<const safe_VkPhysicalDeviceBlendOperationAdvancedPropertiesEXT *>(header);
- break;
- case VK_STRUCTURE_TYPE_PIPELINE_COLOR_BLEND_ADVANCED_STATE_CREATE_INFO_EXT:
- delete reinterpret_cast<const safe_VkPipelineColorBlendAdvancedStateCreateInfoEXT *>(header);
- break;
- case VK_STRUCTURE_TYPE_PIPELINE_COVERAGE_TO_COLOR_STATE_CREATE_INFO_NV:
- delete reinterpret_cast<const safe_VkPipelineCoverageToColorStateCreateInfoNV *>(header);
- break;
- case VK_STRUCTURE_TYPE_PIPELINE_COVERAGE_MODULATION_STATE_CREATE_INFO_NV:
- delete reinterpret_cast<const safe_VkPipelineCoverageModulationStateCreateInfoNV *>(header);
- break;
- case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_SM_BUILTINS_PROPERTIES_NV:
- delete reinterpret_cast<const safe_VkPhysicalDeviceShaderSMBuiltinsPropertiesNV *>(header);
- break;
- case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_SM_BUILTINS_FEATURES_NV:
- delete reinterpret_cast<const safe_VkPhysicalDeviceShaderSMBuiltinsFeaturesNV *>(header);
- break;
- case VK_STRUCTURE_TYPE_DRM_FORMAT_MODIFIER_PROPERTIES_LIST_EXT:
- delete reinterpret_cast<const safe_VkDrmFormatModifierPropertiesListEXT *>(header);
- break;
- case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_DRM_FORMAT_MODIFIER_INFO_EXT:
- delete reinterpret_cast<const safe_VkPhysicalDeviceImageDrmFormatModifierInfoEXT *>(header);
- break;
- case VK_STRUCTURE_TYPE_IMAGE_DRM_FORMAT_MODIFIER_LIST_CREATE_INFO_EXT:
- delete reinterpret_cast<const safe_VkImageDrmFormatModifierListCreateInfoEXT *>(header);
- break;
- case VK_STRUCTURE_TYPE_IMAGE_DRM_FORMAT_MODIFIER_EXPLICIT_CREATE_INFO_EXT:
- delete reinterpret_cast<const safe_VkImageDrmFormatModifierExplicitCreateInfoEXT *>(header);
- break;
- case VK_STRUCTURE_TYPE_SHADER_MODULE_VALIDATION_CACHE_CREATE_INFO_EXT:
- delete reinterpret_cast<const safe_VkShaderModuleValidationCacheCreateInfoEXT *>(header);
- break;
- case VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_BINDING_FLAGS_CREATE_INFO_EXT:
- delete reinterpret_cast<const safe_VkDescriptorSetLayoutBindingFlagsCreateInfoEXT *>(header);
- break;
- case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DESCRIPTOR_INDEXING_FEATURES_EXT:
- delete reinterpret_cast<const safe_VkPhysicalDeviceDescriptorIndexingFeaturesEXT *>(header);
- break;
- case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DESCRIPTOR_INDEXING_PROPERTIES_EXT:
- delete reinterpret_cast<const safe_VkPhysicalDeviceDescriptorIndexingPropertiesEXT *>(header);
- break;
- case VK_STRUCTURE_TYPE_DESCRIPTOR_SET_VARIABLE_DESCRIPTOR_COUNT_ALLOCATE_INFO_EXT:
- delete reinterpret_cast<const safe_VkDescriptorSetVariableDescriptorCountAllocateInfoEXT *>(header);
- break;
- case VK_STRUCTURE_TYPE_DESCRIPTOR_SET_VARIABLE_DESCRIPTOR_COUNT_LAYOUT_SUPPORT_EXT:
- delete reinterpret_cast<const safe_VkDescriptorSetVariableDescriptorCountLayoutSupportEXT *>(header);
- break;
- case VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_SHADING_RATE_IMAGE_STATE_CREATE_INFO_NV:
- delete reinterpret_cast<const safe_VkPipelineViewportShadingRateImageStateCreateInfoNV *>(header);
- break;
- case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADING_RATE_IMAGE_FEATURES_NV:
- delete reinterpret_cast<const safe_VkPhysicalDeviceShadingRateImageFeaturesNV *>(header);
- break;
- case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADING_RATE_IMAGE_PROPERTIES_NV:
- delete reinterpret_cast<const safe_VkPhysicalDeviceShadingRateImagePropertiesNV *>(header);
- break;
- case VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_COARSE_SAMPLE_ORDER_STATE_CREATE_INFO_NV:
- delete reinterpret_cast<const safe_VkPipelineViewportCoarseSampleOrderStateCreateInfoNV *>(header);
- break;
- case VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET_ACCELERATION_STRUCTURE_NV:
- delete reinterpret_cast<const safe_VkWriteDescriptorSetAccelerationStructureNV *>(header);
- break;
- case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_RAY_TRACING_PROPERTIES_NV:
- delete reinterpret_cast<const safe_VkPhysicalDeviceRayTracingPropertiesNV *>(header);
- break;
- case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_REPRESENTATIVE_FRAGMENT_TEST_FEATURES_NV:
- delete reinterpret_cast<const safe_VkPhysicalDeviceRepresentativeFragmentTestFeaturesNV *>(header);
- break;
- case VK_STRUCTURE_TYPE_PIPELINE_REPRESENTATIVE_FRAGMENT_TEST_STATE_CREATE_INFO_NV:
- delete reinterpret_cast<const safe_VkPipelineRepresentativeFragmentTestStateCreateInfoNV *>(header);
- break;
- case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_VIEW_IMAGE_FORMAT_INFO_EXT:
- delete reinterpret_cast<const safe_VkPhysicalDeviceImageViewImageFormatInfoEXT *>(header);
- break;
- case VK_STRUCTURE_TYPE_FILTER_CUBIC_IMAGE_VIEW_IMAGE_FORMAT_PROPERTIES_EXT:
- delete reinterpret_cast<const safe_VkFilterCubicImageViewImageFormatPropertiesEXT *>(header);
- break;
- case VK_STRUCTURE_TYPE_DEVICE_QUEUE_GLOBAL_PRIORITY_CREATE_INFO_EXT:
- delete reinterpret_cast<const safe_VkDeviceQueueGlobalPriorityCreateInfoEXT *>(header);
- break;
- case VK_STRUCTURE_TYPE_IMPORT_MEMORY_HOST_POINTER_INFO_EXT:
- delete reinterpret_cast<const safe_VkImportMemoryHostPointerInfoEXT *>(header);
- break;
- case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_MEMORY_HOST_PROPERTIES_EXT:
- delete reinterpret_cast<const safe_VkPhysicalDeviceExternalMemoryHostPropertiesEXT *>(header);
- break;
- case VK_STRUCTURE_TYPE_PIPELINE_COMPILER_CONTROL_CREATE_INFO_AMD:
- delete reinterpret_cast<const safe_VkPipelineCompilerControlCreateInfoAMD *>(header);
- break;
- case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_CORE_PROPERTIES_AMD:
- delete reinterpret_cast<const safe_VkPhysicalDeviceShaderCorePropertiesAMD *>(header);
- break;
- case VK_STRUCTURE_TYPE_DEVICE_MEMORY_OVERALLOCATION_CREATE_INFO_AMD:
- delete reinterpret_cast<const safe_VkDeviceMemoryOverallocationCreateInfoAMD *>(header);
- break;
- case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VERTEX_ATTRIBUTE_DIVISOR_PROPERTIES_EXT:
- delete reinterpret_cast<const safe_VkPhysicalDeviceVertexAttributeDivisorPropertiesEXT *>(header);
- break;
- case VK_STRUCTURE_TYPE_PIPELINE_VERTEX_INPUT_DIVISOR_STATE_CREATE_INFO_EXT:
- delete reinterpret_cast<const safe_VkPipelineVertexInputDivisorStateCreateInfoEXT *>(header);
- break;
- case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VERTEX_ATTRIBUTE_DIVISOR_FEATURES_EXT:
- delete reinterpret_cast<const safe_VkPhysicalDeviceVertexAttributeDivisorFeaturesEXT *>(header);
- break;
- case VK_STRUCTURE_TYPE_PIPELINE_CREATION_FEEDBACK_CREATE_INFO_EXT:
- delete reinterpret_cast<const safe_VkPipelineCreationFeedbackCreateInfoEXT *>(header);
- break;
- case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_COMPUTE_SHADER_DERIVATIVES_FEATURES_NV:
- delete reinterpret_cast<const safe_VkPhysicalDeviceComputeShaderDerivativesFeaturesNV *>(header);
- break;
- case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MESH_SHADER_FEATURES_NV:
- delete reinterpret_cast<const safe_VkPhysicalDeviceMeshShaderFeaturesNV *>(header);
- break;
- case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MESH_SHADER_PROPERTIES_NV:
- delete reinterpret_cast<const safe_VkPhysicalDeviceMeshShaderPropertiesNV *>(header);
- break;
- case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_SHADER_BARYCENTRIC_FEATURES_NV:
- delete reinterpret_cast<const safe_VkPhysicalDeviceFragmentShaderBarycentricFeaturesNV *>(header);
- break;
- case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_IMAGE_FOOTPRINT_FEATURES_NV:
- delete reinterpret_cast<const safe_VkPhysicalDeviceShaderImageFootprintFeaturesNV *>(header);
- break;
- case VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_EXCLUSIVE_SCISSOR_STATE_CREATE_INFO_NV:
- delete reinterpret_cast<const safe_VkPipelineViewportExclusiveScissorStateCreateInfoNV *>(header);
- break;
- case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXCLUSIVE_SCISSOR_FEATURES_NV:
- delete reinterpret_cast<const safe_VkPhysicalDeviceExclusiveScissorFeaturesNV *>(header);
- break;
- case VK_STRUCTURE_TYPE_QUEUE_FAMILY_CHECKPOINT_PROPERTIES_NV:
- delete reinterpret_cast<const safe_VkQueueFamilyCheckpointPropertiesNV *>(header);
- break;
- case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_INTEGER_FUNCTIONS_2_FEATURES_INTEL:
- delete reinterpret_cast<const safe_VkPhysicalDeviceShaderIntegerFunctions2FeaturesINTEL *>(header);
- break;
- case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PCI_BUS_INFO_PROPERTIES_EXT:
- delete reinterpret_cast<const safe_VkPhysicalDevicePCIBusInfoPropertiesEXT *>(header);
- break;
- case VK_STRUCTURE_TYPE_DISPLAY_NATIVE_HDR_SURFACE_CAPABILITIES_AMD:
- delete reinterpret_cast<const safe_VkDisplayNativeHdrSurfaceCapabilitiesAMD *>(header);
- break;
- case VK_STRUCTURE_TYPE_SWAPCHAIN_DISPLAY_NATIVE_HDR_CREATE_INFO_AMD:
- delete reinterpret_cast<const safe_VkSwapchainDisplayNativeHdrCreateInfoAMD *>(header);
- break;
- case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_DENSITY_MAP_FEATURES_EXT:
- delete reinterpret_cast<const safe_VkPhysicalDeviceFragmentDensityMapFeaturesEXT *>(header);
- break;
- case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_DENSITY_MAP_PROPERTIES_EXT:
- delete reinterpret_cast<const safe_VkPhysicalDeviceFragmentDensityMapPropertiesEXT *>(header);
- break;
- case VK_STRUCTURE_TYPE_RENDER_PASS_FRAGMENT_DENSITY_MAP_CREATE_INFO_EXT:
- delete reinterpret_cast<const safe_VkRenderPassFragmentDensityMapCreateInfoEXT *>(header);
- break;
- case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SCALAR_BLOCK_LAYOUT_FEATURES_EXT:
- delete reinterpret_cast<const safe_VkPhysicalDeviceScalarBlockLayoutFeaturesEXT *>(header);
- break;
- case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SUBGROUP_SIZE_CONTROL_FEATURES_EXT:
- delete reinterpret_cast<const safe_VkPhysicalDeviceSubgroupSizeControlFeaturesEXT *>(header);
- break;
- case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SUBGROUP_SIZE_CONTROL_PROPERTIES_EXT:
- delete reinterpret_cast<const safe_VkPhysicalDeviceSubgroupSizeControlPropertiesEXT *>(header);
- break;
- case VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_REQUIRED_SUBGROUP_SIZE_CREATE_INFO_EXT:
- delete reinterpret_cast<const safe_VkPipelineShaderStageRequiredSubgroupSizeCreateInfoEXT *>(header);
- break;
- case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_CORE_PROPERTIES_2_AMD:
- delete reinterpret_cast<const safe_VkPhysicalDeviceShaderCoreProperties2AMD *>(header);
- break;
- case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_COHERENT_MEMORY_FEATURES_AMD:
- delete reinterpret_cast<const safe_VkPhysicalDeviceCoherentMemoryFeaturesAMD *>(header);
- break;
- case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MEMORY_BUDGET_PROPERTIES_EXT:
- delete reinterpret_cast<const safe_VkPhysicalDeviceMemoryBudgetPropertiesEXT *>(header);
- break;
- case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MEMORY_PRIORITY_FEATURES_EXT:
- delete reinterpret_cast<const safe_VkPhysicalDeviceMemoryPriorityFeaturesEXT *>(header);
- break;
- case VK_STRUCTURE_TYPE_MEMORY_PRIORITY_ALLOCATE_INFO_EXT:
- delete reinterpret_cast<const safe_VkMemoryPriorityAllocateInfoEXT *>(header);
- break;
- case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DEDICATED_ALLOCATION_IMAGE_ALIASING_FEATURES_NV:
- delete reinterpret_cast<const safe_VkPhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV *>(header);
- break;
- case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_BUFFER_DEVICE_ADDRESS_FEATURES_EXT:
- delete reinterpret_cast<const safe_VkPhysicalDeviceBufferDeviceAddressFeaturesEXT *>(header);
- break;
- case VK_STRUCTURE_TYPE_BUFFER_DEVICE_ADDRESS_CREATE_INFO_EXT:
- delete reinterpret_cast<const safe_VkBufferDeviceAddressCreateInfoEXT *>(header);
- break;
- case VK_STRUCTURE_TYPE_IMAGE_STENCIL_USAGE_CREATE_INFO_EXT:
- delete reinterpret_cast<const safe_VkImageStencilUsageCreateInfoEXT *>(header);
- break;
- case VK_STRUCTURE_TYPE_VALIDATION_FEATURES_EXT:
- delete reinterpret_cast<const safe_VkValidationFeaturesEXT *>(header);
- break;
- case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_COOPERATIVE_MATRIX_FEATURES_NV:
- delete reinterpret_cast<const safe_VkPhysicalDeviceCooperativeMatrixFeaturesNV *>(header);
- break;
- case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_COOPERATIVE_MATRIX_PROPERTIES_NV:
- delete reinterpret_cast<const safe_VkPhysicalDeviceCooperativeMatrixPropertiesNV *>(header);
- break;
- case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_COVERAGE_REDUCTION_MODE_FEATURES_NV:
- delete reinterpret_cast<const safe_VkPhysicalDeviceCoverageReductionModeFeaturesNV *>(header);
- break;
- case VK_STRUCTURE_TYPE_PIPELINE_COVERAGE_REDUCTION_STATE_CREATE_INFO_NV:
- delete reinterpret_cast<const safe_VkPipelineCoverageReductionStateCreateInfoNV *>(header);
- break;
- case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_SHADER_INTERLOCK_FEATURES_EXT:
- delete reinterpret_cast<const safe_VkPhysicalDeviceFragmentShaderInterlockFeaturesEXT *>(header);
- break;
- case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_YCBCR_IMAGE_ARRAYS_FEATURES_EXT:
- delete reinterpret_cast<const safe_VkPhysicalDeviceYcbcrImageArraysFeaturesEXT *>(header);
- break;
- case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_LINE_RASTERIZATION_FEATURES_EXT:
- delete reinterpret_cast<const safe_VkPhysicalDeviceLineRasterizationFeaturesEXT *>(header);
- break;
- case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_LINE_RASTERIZATION_PROPERTIES_EXT:
- delete reinterpret_cast<const safe_VkPhysicalDeviceLineRasterizationPropertiesEXT *>(header);
- break;
- case VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_LINE_STATE_CREATE_INFO_EXT:
- delete reinterpret_cast<const safe_VkPipelineRasterizationLineStateCreateInfoEXT *>(header);
- break;
- case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_HOST_QUERY_RESET_FEATURES_EXT:
- delete reinterpret_cast<const safe_VkPhysicalDeviceHostQueryResetFeaturesEXT *>(header);
- break;
- case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_INDEX_TYPE_UINT8_FEATURES_EXT:
- delete reinterpret_cast<const safe_VkPhysicalDeviceIndexTypeUint8FeaturesEXT *>(header);
- break;
- case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_DEMOTE_TO_HELPER_INVOCATION_FEATURES_EXT:
- delete reinterpret_cast<const safe_VkPhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT *>(header);
- break;
- case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TEXEL_BUFFER_ALIGNMENT_FEATURES_EXT:
- delete reinterpret_cast<const safe_VkPhysicalDeviceTexelBufferAlignmentFeaturesEXT *>(header);
- break;
- case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TEXEL_BUFFER_ALIGNMENT_PROPERTIES_EXT:
- delete reinterpret_cast<const safe_VkPhysicalDeviceTexelBufferAlignmentPropertiesEXT *>(header);
- break;
-#ifdef VK_USE_PLATFORM_ANDROID_KHR
- case VK_STRUCTURE_TYPE_ANDROID_HARDWARE_BUFFER_USAGE_ANDROID:
- delete reinterpret_cast<const safe_VkAndroidHardwareBufferUsageANDROID *>(header);
- break;
- case VK_STRUCTURE_TYPE_ANDROID_HARDWARE_BUFFER_FORMAT_PROPERTIES_ANDROID:
- delete reinterpret_cast<const safe_VkAndroidHardwareBufferFormatPropertiesANDROID *>(header);
- break;
- case VK_STRUCTURE_TYPE_IMPORT_ANDROID_HARDWARE_BUFFER_INFO_ANDROID:
- delete reinterpret_cast<const safe_VkImportAndroidHardwareBufferInfoANDROID *>(header);
- break;
- case VK_STRUCTURE_TYPE_EXTERNAL_FORMAT_ANDROID:
- delete reinterpret_cast<const safe_VkExternalFormatANDROID *>(header);
- break;
-#endif // VK_USE_PLATFORM_ANDROID_KHR
-#ifdef VK_USE_PLATFORM_GGP
- case VK_STRUCTURE_TYPE_PRESENT_FRAME_TOKEN_GGP:
- delete reinterpret_cast<const safe_VkPresentFrameTokenGGP *>(header);
- break;
-#endif // VK_USE_PLATFORM_GGP
-#ifdef VK_USE_PLATFORM_WIN32_KHR
- case VK_STRUCTURE_TYPE_IMPORT_MEMORY_WIN32_HANDLE_INFO_KHR:
- delete reinterpret_cast<const safe_VkImportMemoryWin32HandleInfoKHR *>(header);
- break;
- case VK_STRUCTURE_TYPE_EXPORT_MEMORY_WIN32_HANDLE_INFO_KHR:
- delete reinterpret_cast<const safe_VkExportMemoryWin32HandleInfoKHR *>(header);
- break;
- case VK_STRUCTURE_TYPE_WIN32_KEYED_MUTEX_ACQUIRE_RELEASE_INFO_KHR:
- delete reinterpret_cast<const safe_VkWin32KeyedMutexAcquireReleaseInfoKHR *>(header);
- break;
- case VK_STRUCTURE_TYPE_EXPORT_SEMAPHORE_WIN32_HANDLE_INFO_KHR:
- delete reinterpret_cast<const safe_VkExportSemaphoreWin32HandleInfoKHR *>(header);
- break;
- case VK_STRUCTURE_TYPE_D3D12_FENCE_SUBMIT_INFO_KHR:
- delete reinterpret_cast<const safe_VkD3D12FenceSubmitInfoKHR *>(header);
- break;
- case VK_STRUCTURE_TYPE_EXPORT_FENCE_WIN32_HANDLE_INFO_KHR:
- delete reinterpret_cast<const safe_VkExportFenceWin32HandleInfoKHR *>(header);
- break;
- case VK_STRUCTURE_TYPE_IMPORT_MEMORY_WIN32_HANDLE_INFO_NV:
- delete reinterpret_cast<const safe_VkImportMemoryWin32HandleInfoNV *>(header);
- break;
- case VK_STRUCTURE_TYPE_EXPORT_MEMORY_WIN32_HANDLE_INFO_NV:
- delete reinterpret_cast<const safe_VkExportMemoryWin32HandleInfoNV *>(header);
- break;
- case VK_STRUCTURE_TYPE_WIN32_KEYED_MUTEX_ACQUIRE_RELEASE_INFO_NV:
- delete reinterpret_cast<const safe_VkWin32KeyedMutexAcquireReleaseInfoNV *>(header);
- break;
- case VK_STRUCTURE_TYPE_SURFACE_FULL_SCREEN_EXCLUSIVE_INFO_EXT:
- delete reinterpret_cast<const safe_VkSurfaceFullScreenExclusiveInfoEXT *>(header);
- break;
- case VK_STRUCTURE_TYPE_SURFACE_CAPABILITIES_FULL_SCREEN_EXCLUSIVE_EXT:
- delete reinterpret_cast<const safe_VkSurfaceCapabilitiesFullScreenExclusiveEXT *>(header);
- break;
- case VK_STRUCTURE_TYPE_SURFACE_FULL_SCREEN_EXCLUSIVE_WIN32_INFO_EXT:
- delete reinterpret_cast<const safe_VkSurfaceFullScreenExclusiveWin32InfoEXT *>(header);
- break;
-#endif // VK_USE_PLATFORM_WIN32_KHR
- default: // Encountered an unknown sType -- panic, there should be none such in safe chain
- assert(false);
- FreePnextChain(header->pNext);
- break;
- }
-}
diff --git a/layers/generated/vk_safe_struct.h b/layers/generated/vk_safe_struct.h
deleted file mode 100644
index ca7c83e88..000000000
--- a/layers/generated/vk_safe_struct.h
+++ /dev/null
@@ -1,6665 +0,0 @@
-// *** THIS FILE IS GENERATED - DO NOT EDIT ***
-// See helper_file_generator.py for modifications
-
-
-/***************************************************************************
- *
- * Copyright (c) 2015-2019 The Khronos Group Inc.
- * Copyright (c) 2015-2019 Valve Corporation
- * Copyright (c) 2015-2019 LunarG, Inc.
- * Copyright (c) 2015-2019 Google Inc.
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- *
- * Author: Mark Lobodzinski <mark@lunarg.com>
- * Author: Courtney Goeltzenleuchter <courtneygo@google.com>
- * Author: Tobin Ehlis <tobine@google.com>
- * Author: Chris Forbes <chrisforbes@google.com>
- * Author: John Zulauf<jzulauf@lunarg.com>
- *
- ****************************************************************************/
-
-
-#pragma once
-#include <vulkan/vulkan.h>
-
-void *SafePnextCopy(const void *pNext);
-void FreePnextChain(const void *pNext);
-char *SafeStringCopy(const char *in_string);
-
-
-struct safe_VkApplicationInfo {
- VkStructureType sType;
- const void* pNext;
- const char* pApplicationName;
- uint32_t applicationVersion;
- const char* pEngineName;
- uint32_t engineVersion;
- uint32_t apiVersion;
- safe_VkApplicationInfo(const VkApplicationInfo* in_struct);
- safe_VkApplicationInfo(const safe_VkApplicationInfo& src);
- safe_VkApplicationInfo& operator=(const safe_VkApplicationInfo& src);
- safe_VkApplicationInfo();
- ~safe_VkApplicationInfo();
- void initialize(const VkApplicationInfo* in_struct);
- void initialize(const safe_VkApplicationInfo* src);
- VkApplicationInfo *ptr() { return reinterpret_cast<VkApplicationInfo *>(this); }
- VkApplicationInfo const *ptr() const { return reinterpret_cast<VkApplicationInfo const *>(this); }
-};
-
-struct safe_VkInstanceCreateInfo {
- VkStructureType sType;
- const void* pNext;
- VkInstanceCreateFlags flags;
- safe_VkApplicationInfo* pApplicationInfo;
- uint32_t enabledLayerCount;
- const char* const* ppEnabledLayerNames;
- uint32_t enabledExtensionCount;
- const char* const* ppEnabledExtensionNames;
- safe_VkInstanceCreateInfo(const VkInstanceCreateInfo* in_struct);
- safe_VkInstanceCreateInfo(const safe_VkInstanceCreateInfo& src);
- safe_VkInstanceCreateInfo& operator=(const safe_VkInstanceCreateInfo& src);
- safe_VkInstanceCreateInfo();
- ~safe_VkInstanceCreateInfo();
- void initialize(const VkInstanceCreateInfo* in_struct);
- void initialize(const safe_VkInstanceCreateInfo* src);
- VkInstanceCreateInfo *ptr() { return reinterpret_cast<VkInstanceCreateInfo *>(this); }
- VkInstanceCreateInfo const *ptr() const { return reinterpret_cast<VkInstanceCreateInfo const *>(this); }
-};
-
-struct safe_VkAllocationCallbacks {
- void* pUserData;
- PFN_vkAllocationFunction pfnAllocation;
- PFN_vkReallocationFunction pfnReallocation;
- PFN_vkFreeFunction pfnFree;
- PFN_vkInternalAllocationNotification pfnInternalAllocation;
- PFN_vkInternalFreeNotification pfnInternalFree;
- safe_VkAllocationCallbacks(const VkAllocationCallbacks* in_struct);
- safe_VkAllocationCallbacks(const safe_VkAllocationCallbacks& src);
- safe_VkAllocationCallbacks& operator=(const safe_VkAllocationCallbacks& src);
- safe_VkAllocationCallbacks();
- ~safe_VkAllocationCallbacks();
- void initialize(const VkAllocationCallbacks* in_struct);
- void initialize(const safe_VkAllocationCallbacks* src);
- VkAllocationCallbacks *ptr() { return reinterpret_cast<VkAllocationCallbacks *>(this); }
- VkAllocationCallbacks const *ptr() const { return reinterpret_cast<VkAllocationCallbacks const *>(this); }
-};
-
-struct safe_VkDeviceQueueCreateInfo {
- VkStructureType sType;
- const void* pNext;
- VkDeviceQueueCreateFlags flags;
- uint32_t queueFamilyIndex;
- uint32_t queueCount;
- const float* pQueuePriorities;
- safe_VkDeviceQueueCreateInfo(const VkDeviceQueueCreateInfo* in_struct);
- safe_VkDeviceQueueCreateInfo(const safe_VkDeviceQueueCreateInfo& src);
- safe_VkDeviceQueueCreateInfo& operator=(const safe_VkDeviceQueueCreateInfo& src);
- safe_VkDeviceQueueCreateInfo();
- ~safe_VkDeviceQueueCreateInfo();
- void initialize(const VkDeviceQueueCreateInfo* in_struct);
- void initialize(const safe_VkDeviceQueueCreateInfo* src);
- VkDeviceQueueCreateInfo *ptr() { return reinterpret_cast<VkDeviceQueueCreateInfo *>(this); }
- VkDeviceQueueCreateInfo const *ptr() const { return reinterpret_cast<VkDeviceQueueCreateInfo const *>(this); }
-};
-
-struct safe_VkDeviceCreateInfo {
- VkStructureType sType;
- const void* pNext;
- VkDeviceCreateFlags flags;
- uint32_t queueCreateInfoCount;
- safe_VkDeviceQueueCreateInfo* pQueueCreateInfos;
- uint32_t enabledLayerCount;
- const char* const* ppEnabledLayerNames;
- uint32_t enabledExtensionCount;
- const char* const* ppEnabledExtensionNames;
- const VkPhysicalDeviceFeatures* pEnabledFeatures;
- safe_VkDeviceCreateInfo(const VkDeviceCreateInfo* in_struct);
- safe_VkDeviceCreateInfo(const safe_VkDeviceCreateInfo& src);
- safe_VkDeviceCreateInfo& operator=(const safe_VkDeviceCreateInfo& src);
- safe_VkDeviceCreateInfo();
- ~safe_VkDeviceCreateInfo();
- void initialize(const VkDeviceCreateInfo* in_struct);
- void initialize(const safe_VkDeviceCreateInfo* src);
- VkDeviceCreateInfo *ptr() { return reinterpret_cast<VkDeviceCreateInfo *>(this); }
- VkDeviceCreateInfo const *ptr() const { return reinterpret_cast<VkDeviceCreateInfo const *>(this); }
-};
-
-struct safe_VkSubmitInfo {
- VkStructureType sType;
- const void* pNext;
- uint32_t waitSemaphoreCount;
- VkSemaphore* pWaitSemaphores;
- const VkPipelineStageFlags* pWaitDstStageMask;
- uint32_t commandBufferCount;
- VkCommandBuffer* pCommandBuffers;
- uint32_t signalSemaphoreCount;
- VkSemaphore* pSignalSemaphores;
- safe_VkSubmitInfo(const VkSubmitInfo* in_struct);
- safe_VkSubmitInfo(const safe_VkSubmitInfo& src);
- safe_VkSubmitInfo& operator=(const safe_VkSubmitInfo& src);
- safe_VkSubmitInfo();
- ~safe_VkSubmitInfo();
- void initialize(const VkSubmitInfo* in_struct);
- void initialize(const safe_VkSubmitInfo* src);
- VkSubmitInfo *ptr() { return reinterpret_cast<VkSubmitInfo *>(this); }
- VkSubmitInfo const *ptr() const { return reinterpret_cast<VkSubmitInfo const *>(this); }
-};
-
-struct safe_VkMemoryAllocateInfo {
- VkStructureType sType;
- const void* pNext;
- VkDeviceSize allocationSize;
- uint32_t memoryTypeIndex;
- safe_VkMemoryAllocateInfo(const VkMemoryAllocateInfo* in_struct);
- safe_VkMemoryAllocateInfo(const safe_VkMemoryAllocateInfo& src);
- safe_VkMemoryAllocateInfo& operator=(const safe_VkMemoryAllocateInfo& src);
- safe_VkMemoryAllocateInfo();
- ~safe_VkMemoryAllocateInfo();
- void initialize(const VkMemoryAllocateInfo* in_struct);
- void initialize(const safe_VkMemoryAllocateInfo* src);
- VkMemoryAllocateInfo *ptr() { return reinterpret_cast<VkMemoryAllocateInfo *>(this); }
- VkMemoryAllocateInfo const *ptr() const { return reinterpret_cast<VkMemoryAllocateInfo const *>(this); }
-};
-
-struct safe_VkMappedMemoryRange {
- VkStructureType sType;
- const void* pNext;
- VkDeviceMemory memory;
- VkDeviceSize offset;
- VkDeviceSize size;
- safe_VkMappedMemoryRange(const VkMappedMemoryRange* in_struct);
- safe_VkMappedMemoryRange(const safe_VkMappedMemoryRange& src);
- safe_VkMappedMemoryRange& operator=(const safe_VkMappedMemoryRange& src);
- safe_VkMappedMemoryRange();
- ~safe_VkMappedMemoryRange();
- void initialize(const VkMappedMemoryRange* in_struct);
- void initialize(const safe_VkMappedMemoryRange* src);
- VkMappedMemoryRange *ptr() { return reinterpret_cast<VkMappedMemoryRange *>(this); }
- VkMappedMemoryRange const *ptr() const { return reinterpret_cast<VkMappedMemoryRange const *>(this); }
-};
-
-struct safe_VkSparseBufferMemoryBindInfo {
- VkBuffer buffer;
- uint32_t bindCount;
- VkSparseMemoryBind* pBinds;
- safe_VkSparseBufferMemoryBindInfo(const VkSparseBufferMemoryBindInfo* in_struct);
- safe_VkSparseBufferMemoryBindInfo(const safe_VkSparseBufferMemoryBindInfo& src);
- safe_VkSparseBufferMemoryBindInfo& operator=(const safe_VkSparseBufferMemoryBindInfo& src);
- safe_VkSparseBufferMemoryBindInfo();
- ~safe_VkSparseBufferMemoryBindInfo();
- void initialize(const VkSparseBufferMemoryBindInfo* in_struct);
- void initialize(const safe_VkSparseBufferMemoryBindInfo* src);
- VkSparseBufferMemoryBindInfo *ptr() { return reinterpret_cast<VkSparseBufferMemoryBindInfo *>(this); }
- VkSparseBufferMemoryBindInfo const *ptr() const { return reinterpret_cast<VkSparseBufferMemoryBindInfo const *>(this); }
-};
-
-struct safe_VkSparseImageOpaqueMemoryBindInfo {
- VkImage image;
- uint32_t bindCount;
- VkSparseMemoryBind* pBinds;
- safe_VkSparseImageOpaqueMemoryBindInfo(const VkSparseImageOpaqueMemoryBindInfo* in_struct);
- safe_VkSparseImageOpaqueMemoryBindInfo(const safe_VkSparseImageOpaqueMemoryBindInfo& src);
- safe_VkSparseImageOpaqueMemoryBindInfo& operator=(const safe_VkSparseImageOpaqueMemoryBindInfo& src);
- safe_VkSparseImageOpaqueMemoryBindInfo();
- ~safe_VkSparseImageOpaqueMemoryBindInfo();
- void initialize(const VkSparseImageOpaqueMemoryBindInfo* in_struct);
- void initialize(const safe_VkSparseImageOpaqueMemoryBindInfo* src);
- VkSparseImageOpaqueMemoryBindInfo *ptr() { return reinterpret_cast<VkSparseImageOpaqueMemoryBindInfo *>(this); }
- VkSparseImageOpaqueMemoryBindInfo const *ptr() const { return reinterpret_cast<VkSparseImageOpaqueMemoryBindInfo const *>(this); }
-};
-
-struct safe_VkSparseImageMemoryBindInfo {
- VkImage image;
- uint32_t bindCount;
- VkSparseImageMemoryBind* pBinds;
- safe_VkSparseImageMemoryBindInfo(const VkSparseImageMemoryBindInfo* in_struct);
- safe_VkSparseImageMemoryBindInfo(const safe_VkSparseImageMemoryBindInfo& src);
- safe_VkSparseImageMemoryBindInfo& operator=(const safe_VkSparseImageMemoryBindInfo& src);
- safe_VkSparseImageMemoryBindInfo();
- ~safe_VkSparseImageMemoryBindInfo();
- void initialize(const VkSparseImageMemoryBindInfo* in_struct);
- void initialize(const safe_VkSparseImageMemoryBindInfo* src);
- VkSparseImageMemoryBindInfo *ptr() { return reinterpret_cast<VkSparseImageMemoryBindInfo *>(this); }
- VkSparseImageMemoryBindInfo const *ptr() const { return reinterpret_cast<VkSparseImageMemoryBindInfo const *>(this); }
-};
-
-struct safe_VkBindSparseInfo {
- VkStructureType sType;
- const void* pNext;
- uint32_t waitSemaphoreCount;
- VkSemaphore* pWaitSemaphores;
- uint32_t bufferBindCount;
- safe_VkSparseBufferMemoryBindInfo* pBufferBinds;
- uint32_t imageOpaqueBindCount;
- safe_VkSparseImageOpaqueMemoryBindInfo* pImageOpaqueBinds;
- uint32_t imageBindCount;
- safe_VkSparseImageMemoryBindInfo* pImageBinds;
- uint32_t signalSemaphoreCount;
- VkSemaphore* pSignalSemaphores;
- safe_VkBindSparseInfo(const VkBindSparseInfo* in_struct);
- safe_VkBindSparseInfo(const safe_VkBindSparseInfo& src);
- safe_VkBindSparseInfo& operator=(const safe_VkBindSparseInfo& src);
- safe_VkBindSparseInfo();
- ~safe_VkBindSparseInfo();
- void initialize(const VkBindSparseInfo* in_struct);
- void initialize(const safe_VkBindSparseInfo* src);
- VkBindSparseInfo *ptr() { return reinterpret_cast<VkBindSparseInfo *>(this); }
- VkBindSparseInfo const *ptr() const { return reinterpret_cast<VkBindSparseInfo const *>(this); }
-};
-
-struct safe_VkFenceCreateInfo {
- VkStructureType sType;
- const void* pNext;
- VkFenceCreateFlags flags;
- safe_VkFenceCreateInfo(const VkFenceCreateInfo* in_struct);
- safe_VkFenceCreateInfo(const safe_VkFenceCreateInfo& src);
- safe_VkFenceCreateInfo& operator=(const safe_VkFenceCreateInfo& src);
- safe_VkFenceCreateInfo();
- ~safe_VkFenceCreateInfo();
- void initialize(const VkFenceCreateInfo* in_struct);
- void initialize(const safe_VkFenceCreateInfo* src);
- VkFenceCreateInfo *ptr() { return reinterpret_cast<VkFenceCreateInfo *>(this); }
- VkFenceCreateInfo const *ptr() const { return reinterpret_cast<VkFenceCreateInfo const *>(this); }
-};
-
-struct safe_VkSemaphoreCreateInfo {
- VkStructureType sType;
- const void* pNext;
- VkSemaphoreCreateFlags flags;
- safe_VkSemaphoreCreateInfo(const VkSemaphoreCreateInfo* in_struct);
- safe_VkSemaphoreCreateInfo(const safe_VkSemaphoreCreateInfo& src);
- safe_VkSemaphoreCreateInfo& operator=(const safe_VkSemaphoreCreateInfo& src);
- safe_VkSemaphoreCreateInfo();
- ~safe_VkSemaphoreCreateInfo();
- void initialize(const VkSemaphoreCreateInfo* in_struct);
- void initialize(const safe_VkSemaphoreCreateInfo* src);
- VkSemaphoreCreateInfo *ptr() { return reinterpret_cast<VkSemaphoreCreateInfo *>(this); }
- VkSemaphoreCreateInfo const *ptr() const { return reinterpret_cast<VkSemaphoreCreateInfo const *>(this); }
-};
-
-struct safe_VkEventCreateInfo {
- VkStructureType sType;
- const void* pNext;
- VkEventCreateFlags flags;
- safe_VkEventCreateInfo(const VkEventCreateInfo* in_struct);
- safe_VkEventCreateInfo(const safe_VkEventCreateInfo& src);
- safe_VkEventCreateInfo& operator=(const safe_VkEventCreateInfo& src);
- safe_VkEventCreateInfo();
- ~safe_VkEventCreateInfo();
- void initialize(const VkEventCreateInfo* in_struct);
- void initialize(const safe_VkEventCreateInfo* src);
- VkEventCreateInfo *ptr() { return reinterpret_cast<VkEventCreateInfo *>(this); }
- VkEventCreateInfo const *ptr() const { return reinterpret_cast<VkEventCreateInfo const *>(this); }
-};
-
-struct safe_VkQueryPoolCreateInfo {
- VkStructureType sType;
- const void* pNext;
- VkQueryPoolCreateFlags flags;
- VkQueryType queryType;
- uint32_t queryCount;
- VkQueryPipelineStatisticFlags pipelineStatistics;
- safe_VkQueryPoolCreateInfo(const VkQueryPoolCreateInfo* in_struct);
- safe_VkQueryPoolCreateInfo(const safe_VkQueryPoolCreateInfo& src);
- safe_VkQueryPoolCreateInfo& operator=(const safe_VkQueryPoolCreateInfo& src);
- safe_VkQueryPoolCreateInfo();
- ~safe_VkQueryPoolCreateInfo();
- void initialize(const VkQueryPoolCreateInfo* in_struct);
- void initialize(const safe_VkQueryPoolCreateInfo* src);
- VkQueryPoolCreateInfo *ptr() { return reinterpret_cast<VkQueryPoolCreateInfo *>(this); }
- VkQueryPoolCreateInfo const *ptr() const { return reinterpret_cast<VkQueryPoolCreateInfo const *>(this); }
-};
-
-struct safe_VkBufferCreateInfo {
- VkStructureType sType;
- const void* pNext;
- VkBufferCreateFlags flags;
- VkDeviceSize size;
- VkBufferUsageFlags usage;
- VkSharingMode sharingMode;
- uint32_t queueFamilyIndexCount;
- const uint32_t* pQueueFamilyIndices;
- safe_VkBufferCreateInfo(const VkBufferCreateInfo* in_struct);
- safe_VkBufferCreateInfo(const safe_VkBufferCreateInfo& src);
- safe_VkBufferCreateInfo& operator=(const safe_VkBufferCreateInfo& src);
- safe_VkBufferCreateInfo();
- ~safe_VkBufferCreateInfo();
- void initialize(const VkBufferCreateInfo* in_struct);
- void initialize(const safe_VkBufferCreateInfo* src);
- VkBufferCreateInfo *ptr() { return reinterpret_cast<VkBufferCreateInfo *>(this); }
- VkBufferCreateInfo const *ptr() const { return reinterpret_cast<VkBufferCreateInfo const *>(this); }
-};
-
-struct safe_VkBufferViewCreateInfo {
- VkStructureType sType;
- const void* pNext;
- VkBufferViewCreateFlags flags;
- VkBuffer buffer;
- VkFormat format;
- VkDeviceSize offset;
- VkDeviceSize range;
- safe_VkBufferViewCreateInfo(const VkBufferViewCreateInfo* in_struct);
- safe_VkBufferViewCreateInfo(const safe_VkBufferViewCreateInfo& src);
- safe_VkBufferViewCreateInfo& operator=(const safe_VkBufferViewCreateInfo& src);
- safe_VkBufferViewCreateInfo();
- ~safe_VkBufferViewCreateInfo();
- void initialize(const VkBufferViewCreateInfo* in_struct);
- void initialize(const safe_VkBufferViewCreateInfo* src);
- VkBufferViewCreateInfo *ptr() { return reinterpret_cast<VkBufferViewCreateInfo *>(this); }
- VkBufferViewCreateInfo const *ptr() const { return reinterpret_cast<VkBufferViewCreateInfo const *>(this); }
-};
-
-struct safe_VkImageCreateInfo {
- VkStructureType sType;
- const void* pNext;
- VkImageCreateFlags flags;
- VkImageType imageType;
- VkFormat format;
- VkExtent3D extent;
- uint32_t mipLevels;
- uint32_t arrayLayers;
- VkSampleCountFlagBits samples;
- VkImageTiling tiling;
- VkImageUsageFlags usage;
- VkSharingMode sharingMode;
- uint32_t queueFamilyIndexCount;
- const uint32_t* pQueueFamilyIndices;
- VkImageLayout initialLayout;
- safe_VkImageCreateInfo(const VkImageCreateInfo* in_struct);
- safe_VkImageCreateInfo(const safe_VkImageCreateInfo& src);
- safe_VkImageCreateInfo& operator=(const safe_VkImageCreateInfo& src);
- safe_VkImageCreateInfo();
- ~safe_VkImageCreateInfo();
- void initialize(const VkImageCreateInfo* in_struct);
- void initialize(const safe_VkImageCreateInfo* src);
- VkImageCreateInfo *ptr() { return reinterpret_cast<VkImageCreateInfo *>(this); }
- VkImageCreateInfo const *ptr() const { return reinterpret_cast<VkImageCreateInfo const *>(this); }
-};
-
-struct safe_VkImageViewCreateInfo {
- VkStructureType sType;
- const void* pNext;
- VkImageViewCreateFlags flags;
- VkImage image;
- VkImageViewType viewType;
- VkFormat format;
- VkComponentMapping components;
- VkImageSubresourceRange subresourceRange;
- safe_VkImageViewCreateInfo(const VkImageViewCreateInfo* in_struct);
- safe_VkImageViewCreateInfo(const safe_VkImageViewCreateInfo& src);
- safe_VkImageViewCreateInfo& operator=(const safe_VkImageViewCreateInfo& src);
- safe_VkImageViewCreateInfo();
- ~safe_VkImageViewCreateInfo();
- void initialize(const VkImageViewCreateInfo* in_struct);
- void initialize(const safe_VkImageViewCreateInfo* src);
- VkImageViewCreateInfo *ptr() { return reinterpret_cast<VkImageViewCreateInfo *>(this); }
- VkImageViewCreateInfo const *ptr() const { return reinterpret_cast<VkImageViewCreateInfo const *>(this); }
-};
-
-struct safe_VkShaderModuleCreateInfo {
- VkStructureType sType;
- const void* pNext;
- VkShaderModuleCreateFlags flags;
- size_t codeSize;
- const uint32_t* pCode;
- safe_VkShaderModuleCreateInfo(const VkShaderModuleCreateInfo* in_struct);
- safe_VkShaderModuleCreateInfo(const safe_VkShaderModuleCreateInfo& src);
- safe_VkShaderModuleCreateInfo& operator=(const safe_VkShaderModuleCreateInfo& src);
- safe_VkShaderModuleCreateInfo();
- ~safe_VkShaderModuleCreateInfo();
- void initialize(const VkShaderModuleCreateInfo* in_struct);
- void initialize(const safe_VkShaderModuleCreateInfo* src);
- VkShaderModuleCreateInfo *ptr() { return reinterpret_cast<VkShaderModuleCreateInfo *>(this); }
- VkShaderModuleCreateInfo const *ptr() const { return reinterpret_cast<VkShaderModuleCreateInfo const *>(this); }
-};
-
-struct safe_VkPipelineCacheCreateInfo {
- VkStructureType sType;
- const void* pNext;
- VkPipelineCacheCreateFlags flags;
- size_t initialDataSize;
- const void* pInitialData;
- safe_VkPipelineCacheCreateInfo(const VkPipelineCacheCreateInfo* in_struct);
- safe_VkPipelineCacheCreateInfo(const safe_VkPipelineCacheCreateInfo& src);
- safe_VkPipelineCacheCreateInfo& operator=(const safe_VkPipelineCacheCreateInfo& src);
- safe_VkPipelineCacheCreateInfo();
- ~safe_VkPipelineCacheCreateInfo();
- void initialize(const VkPipelineCacheCreateInfo* in_struct);
- void initialize(const safe_VkPipelineCacheCreateInfo* src);
- VkPipelineCacheCreateInfo *ptr() { return reinterpret_cast<VkPipelineCacheCreateInfo *>(this); }
- VkPipelineCacheCreateInfo const *ptr() const { return reinterpret_cast<VkPipelineCacheCreateInfo const *>(this); }
-};
-
-struct safe_VkSpecializationInfo {
- uint32_t mapEntryCount;
- const VkSpecializationMapEntry* pMapEntries;
- size_t dataSize;
- const void* pData;
- safe_VkSpecializationInfo(const VkSpecializationInfo* in_struct);
- safe_VkSpecializationInfo(const safe_VkSpecializationInfo& src);
- safe_VkSpecializationInfo& operator=(const safe_VkSpecializationInfo& src);
- safe_VkSpecializationInfo();
- ~safe_VkSpecializationInfo();
- void initialize(const VkSpecializationInfo* in_struct);
- void initialize(const safe_VkSpecializationInfo* src);
- VkSpecializationInfo *ptr() { return reinterpret_cast<VkSpecializationInfo *>(this); }
- VkSpecializationInfo const *ptr() const { return reinterpret_cast<VkSpecializationInfo const *>(this); }
-};
-
-struct safe_VkPipelineShaderStageCreateInfo {
- VkStructureType sType;
- const void* pNext;
- VkPipelineShaderStageCreateFlags flags;
- VkShaderStageFlagBits stage;
- VkShaderModule module;
- const char* pName;
- safe_VkSpecializationInfo* pSpecializationInfo;
- safe_VkPipelineShaderStageCreateInfo(const VkPipelineShaderStageCreateInfo* in_struct);
- safe_VkPipelineShaderStageCreateInfo(const safe_VkPipelineShaderStageCreateInfo& src);
- safe_VkPipelineShaderStageCreateInfo& operator=(const safe_VkPipelineShaderStageCreateInfo& src);
- safe_VkPipelineShaderStageCreateInfo();
- ~safe_VkPipelineShaderStageCreateInfo();
- void initialize(const VkPipelineShaderStageCreateInfo* in_struct);
- void initialize(const safe_VkPipelineShaderStageCreateInfo* src);
- VkPipelineShaderStageCreateInfo *ptr() { return reinterpret_cast<VkPipelineShaderStageCreateInfo *>(this); }
- VkPipelineShaderStageCreateInfo const *ptr() const { return reinterpret_cast<VkPipelineShaderStageCreateInfo const *>(this); }
-};
-
-struct safe_VkPipelineVertexInputStateCreateInfo {
- VkStructureType sType;
- const void* pNext;
- VkPipelineVertexInputStateCreateFlags flags;
- uint32_t vertexBindingDescriptionCount;
- const VkVertexInputBindingDescription* pVertexBindingDescriptions;
- uint32_t vertexAttributeDescriptionCount;
- const VkVertexInputAttributeDescription* pVertexAttributeDescriptions;
- safe_VkPipelineVertexInputStateCreateInfo(const VkPipelineVertexInputStateCreateInfo* in_struct);
- safe_VkPipelineVertexInputStateCreateInfo(const safe_VkPipelineVertexInputStateCreateInfo& src);
- safe_VkPipelineVertexInputStateCreateInfo& operator=(const safe_VkPipelineVertexInputStateCreateInfo& src);
- safe_VkPipelineVertexInputStateCreateInfo();
- ~safe_VkPipelineVertexInputStateCreateInfo();
- void initialize(const VkPipelineVertexInputStateCreateInfo* in_struct);
- void initialize(const safe_VkPipelineVertexInputStateCreateInfo* src);
- VkPipelineVertexInputStateCreateInfo *ptr() { return reinterpret_cast<VkPipelineVertexInputStateCreateInfo *>(this); }
- VkPipelineVertexInputStateCreateInfo const *ptr() const { return reinterpret_cast<VkPipelineVertexInputStateCreateInfo const *>(this); }
-};
-
-struct safe_VkPipelineInputAssemblyStateCreateInfo {
- VkStructureType sType;
- const void* pNext;
- VkPipelineInputAssemblyStateCreateFlags flags;
- VkPrimitiveTopology topology;
- VkBool32 primitiveRestartEnable;
- safe_VkPipelineInputAssemblyStateCreateInfo(const VkPipelineInputAssemblyStateCreateInfo* in_struct);
- safe_VkPipelineInputAssemblyStateCreateInfo(const safe_VkPipelineInputAssemblyStateCreateInfo& src);
- safe_VkPipelineInputAssemblyStateCreateInfo& operator=(const safe_VkPipelineInputAssemblyStateCreateInfo& src);
- safe_VkPipelineInputAssemblyStateCreateInfo();
- ~safe_VkPipelineInputAssemblyStateCreateInfo();
- void initialize(const VkPipelineInputAssemblyStateCreateInfo* in_struct);
- void initialize(const safe_VkPipelineInputAssemblyStateCreateInfo* src);
- VkPipelineInputAssemblyStateCreateInfo *ptr() { return reinterpret_cast<VkPipelineInputAssemblyStateCreateInfo *>(this); }
- VkPipelineInputAssemblyStateCreateInfo const *ptr() const { return reinterpret_cast<VkPipelineInputAssemblyStateCreateInfo const *>(this); }
-};
-
-struct safe_VkPipelineTessellationStateCreateInfo {
- VkStructureType sType;
- const void* pNext;
- VkPipelineTessellationStateCreateFlags flags;
- uint32_t patchControlPoints;
- safe_VkPipelineTessellationStateCreateInfo(const VkPipelineTessellationStateCreateInfo* in_struct);
- safe_VkPipelineTessellationStateCreateInfo(const safe_VkPipelineTessellationStateCreateInfo& src);
- safe_VkPipelineTessellationStateCreateInfo& operator=(const safe_VkPipelineTessellationStateCreateInfo& src);
- safe_VkPipelineTessellationStateCreateInfo();
- ~safe_VkPipelineTessellationStateCreateInfo();
- void initialize(const VkPipelineTessellationStateCreateInfo* in_struct);
- void initialize(const safe_VkPipelineTessellationStateCreateInfo* src);
- VkPipelineTessellationStateCreateInfo *ptr() { return reinterpret_cast<VkPipelineTessellationStateCreateInfo *>(this); }
- VkPipelineTessellationStateCreateInfo const *ptr() const { return reinterpret_cast<VkPipelineTessellationStateCreateInfo const *>(this); }
-};
-
-struct safe_VkPipelineViewportStateCreateInfo {
- VkStructureType sType;
- const void* pNext;
- VkPipelineViewportStateCreateFlags flags;
- uint32_t viewportCount;
- const VkViewport* pViewports;
- uint32_t scissorCount;
- const VkRect2D* pScissors;
- safe_VkPipelineViewportStateCreateInfo(const VkPipelineViewportStateCreateInfo* in_struct, const bool is_dynamic_viewports, const bool is_dynamic_scissors);
- safe_VkPipelineViewportStateCreateInfo(const safe_VkPipelineViewportStateCreateInfo& src);
- safe_VkPipelineViewportStateCreateInfo& operator=(const safe_VkPipelineViewportStateCreateInfo& src);
- safe_VkPipelineViewportStateCreateInfo();
- ~safe_VkPipelineViewportStateCreateInfo();
- void initialize(const VkPipelineViewportStateCreateInfo* in_struct, const bool is_dynamic_viewports, const bool is_dynamic_scissors);
- void initialize(const safe_VkPipelineViewportStateCreateInfo* src);
- VkPipelineViewportStateCreateInfo *ptr() { return reinterpret_cast<VkPipelineViewportStateCreateInfo *>(this); }
- VkPipelineViewportStateCreateInfo const *ptr() const { return reinterpret_cast<VkPipelineViewportStateCreateInfo const *>(this); }
-};
-
-struct safe_VkPipelineRasterizationStateCreateInfo {
- VkStructureType sType;
- const void* pNext;
- VkPipelineRasterizationStateCreateFlags flags;
- VkBool32 depthClampEnable;
- VkBool32 rasterizerDiscardEnable;
- VkPolygonMode polygonMode;
- VkCullModeFlags cullMode;
- VkFrontFace frontFace;
- VkBool32 depthBiasEnable;
- float depthBiasConstantFactor;
- float depthBiasClamp;
- float depthBiasSlopeFactor;
- float lineWidth;
- safe_VkPipelineRasterizationStateCreateInfo(const VkPipelineRasterizationStateCreateInfo* in_struct);
- safe_VkPipelineRasterizationStateCreateInfo(const safe_VkPipelineRasterizationStateCreateInfo& src);
- safe_VkPipelineRasterizationStateCreateInfo& operator=(const safe_VkPipelineRasterizationStateCreateInfo& src);
- safe_VkPipelineRasterizationStateCreateInfo();
- ~safe_VkPipelineRasterizationStateCreateInfo();
- void initialize(const VkPipelineRasterizationStateCreateInfo* in_struct);
- void initialize(const safe_VkPipelineRasterizationStateCreateInfo* src);
- VkPipelineRasterizationStateCreateInfo *ptr() { return reinterpret_cast<VkPipelineRasterizationStateCreateInfo *>(this); }
- VkPipelineRasterizationStateCreateInfo const *ptr() const { return reinterpret_cast<VkPipelineRasterizationStateCreateInfo const *>(this); }
-};
-
-struct safe_VkPipelineMultisampleStateCreateInfo {
- VkStructureType sType;
- const void* pNext;
- VkPipelineMultisampleStateCreateFlags flags;
- VkSampleCountFlagBits rasterizationSamples;
- VkBool32 sampleShadingEnable;
- float minSampleShading;
- const VkSampleMask* pSampleMask;
- VkBool32 alphaToCoverageEnable;
- VkBool32 alphaToOneEnable;
- safe_VkPipelineMultisampleStateCreateInfo(const VkPipelineMultisampleStateCreateInfo* in_struct);
- safe_VkPipelineMultisampleStateCreateInfo(const safe_VkPipelineMultisampleStateCreateInfo& src);
- safe_VkPipelineMultisampleStateCreateInfo& operator=(const safe_VkPipelineMultisampleStateCreateInfo& src);
- safe_VkPipelineMultisampleStateCreateInfo();
- ~safe_VkPipelineMultisampleStateCreateInfo();
- void initialize(const VkPipelineMultisampleStateCreateInfo* in_struct);
- void initialize(const safe_VkPipelineMultisampleStateCreateInfo* src);
- VkPipelineMultisampleStateCreateInfo *ptr() { return reinterpret_cast<VkPipelineMultisampleStateCreateInfo *>(this); }
- VkPipelineMultisampleStateCreateInfo const *ptr() const { return reinterpret_cast<VkPipelineMultisampleStateCreateInfo const *>(this); }
-};
-
-struct safe_VkPipelineDepthStencilStateCreateInfo {
- VkStructureType sType;
- const void* pNext;
- VkPipelineDepthStencilStateCreateFlags flags;
- VkBool32 depthTestEnable;
- VkBool32 depthWriteEnable;
- VkCompareOp depthCompareOp;
- VkBool32 depthBoundsTestEnable;
- VkBool32 stencilTestEnable;
- VkStencilOpState front;
- VkStencilOpState back;
- float minDepthBounds;
- float maxDepthBounds;
- safe_VkPipelineDepthStencilStateCreateInfo(const VkPipelineDepthStencilStateCreateInfo* in_struct);
- safe_VkPipelineDepthStencilStateCreateInfo(const safe_VkPipelineDepthStencilStateCreateInfo& src);
- safe_VkPipelineDepthStencilStateCreateInfo& operator=(const safe_VkPipelineDepthStencilStateCreateInfo& src);
- safe_VkPipelineDepthStencilStateCreateInfo();
- ~safe_VkPipelineDepthStencilStateCreateInfo();
- void initialize(const VkPipelineDepthStencilStateCreateInfo* in_struct);
- void initialize(const safe_VkPipelineDepthStencilStateCreateInfo* src);
- VkPipelineDepthStencilStateCreateInfo *ptr() { return reinterpret_cast<VkPipelineDepthStencilStateCreateInfo *>(this); }
- VkPipelineDepthStencilStateCreateInfo const *ptr() const { return reinterpret_cast<VkPipelineDepthStencilStateCreateInfo const *>(this); }
-};
-
-struct safe_VkPipelineColorBlendStateCreateInfo {
- VkStructureType sType;
- const void* pNext;
- VkPipelineColorBlendStateCreateFlags flags;
- VkBool32 logicOpEnable;
- VkLogicOp logicOp;
- uint32_t attachmentCount;
- const VkPipelineColorBlendAttachmentState* pAttachments;
- float blendConstants[4];
- safe_VkPipelineColorBlendStateCreateInfo(const VkPipelineColorBlendStateCreateInfo* in_struct);
- safe_VkPipelineColorBlendStateCreateInfo(const safe_VkPipelineColorBlendStateCreateInfo& src);
- safe_VkPipelineColorBlendStateCreateInfo& operator=(const safe_VkPipelineColorBlendStateCreateInfo& src);
- safe_VkPipelineColorBlendStateCreateInfo();
- ~safe_VkPipelineColorBlendStateCreateInfo();
- void initialize(const VkPipelineColorBlendStateCreateInfo* in_struct);
- void initialize(const safe_VkPipelineColorBlendStateCreateInfo* src);
- VkPipelineColorBlendStateCreateInfo *ptr() { return reinterpret_cast<VkPipelineColorBlendStateCreateInfo *>(this); }
- VkPipelineColorBlendStateCreateInfo const *ptr() const { return reinterpret_cast<VkPipelineColorBlendStateCreateInfo const *>(this); }
-};
-
-struct safe_VkPipelineDynamicStateCreateInfo {
- VkStructureType sType;
- const void* pNext;
- VkPipelineDynamicStateCreateFlags flags;
- uint32_t dynamicStateCount;
- const VkDynamicState* pDynamicStates;
- safe_VkPipelineDynamicStateCreateInfo(const VkPipelineDynamicStateCreateInfo* in_struct);
- safe_VkPipelineDynamicStateCreateInfo(const safe_VkPipelineDynamicStateCreateInfo& src);
- safe_VkPipelineDynamicStateCreateInfo& operator=(const safe_VkPipelineDynamicStateCreateInfo& src);
- safe_VkPipelineDynamicStateCreateInfo();
- ~safe_VkPipelineDynamicStateCreateInfo();
- void initialize(const VkPipelineDynamicStateCreateInfo* in_struct);
- void initialize(const safe_VkPipelineDynamicStateCreateInfo* src);
- VkPipelineDynamicStateCreateInfo *ptr() { return reinterpret_cast<VkPipelineDynamicStateCreateInfo *>(this); }
- VkPipelineDynamicStateCreateInfo const *ptr() const { return reinterpret_cast<VkPipelineDynamicStateCreateInfo const *>(this); }
-};
-
-struct safe_VkGraphicsPipelineCreateInfo {
- VkStructureType sType;
- const void* pNext;
- VkPipelineCreateFlags flags;
- uint32_t stageCount;
- safe_VkPipelineShaderStageCreateInfo* pStages;
- safe_VkPipelineVertexInputStateCreateInfo* pVertexInputState;
- safe_VkPipelineInputAssemblyStateCreateInfo* pInputAssemblyState;
- safe_VkPipelineTessellationStateCreateInfo* pTessellationState;
- safe_VkPipelineViewportStateCreateInfo* pViewportState;
- safe_VkPipelineRasterizationStateCreateInfo* pRasterizationState;
- safe_VkPipelineMultisampleStateCreateInfo* pMultisampleState;
- safe_VkPipelineDepthStencilStateCreateInfo* pDepthStencilState;
- safe_VkPipelineColorBlendStateCreateInfo* pColorBlendState;
- safe_VkPipelineDynamicStateCreateInfo* pDynamicState;
- VkPipelineLayout layout;
- VkRenderPass renderPass;
- uint32_t subpass;
- VkPipeline basePipelineHandle;
- int32_t basePipelineIndex;
- safe_VkGraphicsPipelineCreateInfo(const VkGraphicsPipelineCreateInfo* in_struct, const bool uses_color_attachment, const bool uses_depthstencil_attachment);
- safe_VkGraphicsPipelineCreateInfo(const safe_VkGraphicsPipelineCreateInfo& src);
- safe_VkGraphicsPipelineCreateInfo& operator=(const safe_VkGraphicsPipelineCreateInfo& src);
- safe_VkGraphicsPipelineCreateInfo();
- ~safe_VkGraphicsPipelineCreateInfo();
- void initialize(const VkGraphicsPipelineCreateInfo* in_struct, const bool uses_color_attachment, const bool uses_depthstencil_attachment);
- void initialize(const safe_VkGraphicsPipelineCreateInfo* src);
- VkGraphicsPipelineCreateInfo *ptr() { return reinterpret_cast<VkGraphicsPipelineCreateInfo *>(this); }
- VkGraphicsPipelineCreateInfo const *ptr() const { return reinterpret_cast<VkGraphicsPipelineCreateInfo const *>(this); }
-};
-
-struct safe_VkComputePipelineCreateInfo {
- VkStructureType sType;
- const void* pNext;
- VkPipelineCreateFlags flags;
- safe_VkPipelineShaderStageCreateInfo stage;
- VkPipelineLayout layout;
- VkPipeline basePipelineHandle;
- int32_t basePipelineIndex;
- safe_VkComputePipelineCreateInfo(const VkComputePipelineCreateInfo* in_struct);
- safe_VkComputePipelineCreateInfo(const safe_VkComputePipelineCreateInfo& src);
- safe_VkComputePipelineCreateInfo& operator=(const safe_VkComputePipelineCreateInfo& src);
- safe_VkComputePipelineCreateInfo();
- ~safe_VkComputePipelineCreateInfo();
- void initialize(const VkComputePipelineCreateInfo* in_struct);
- void initialize(const safe_VkComputePipelineCreateInfo* src);
- VkComputePipelineCreateInfo *ptr() { return reinterpret_cast<VkComputePipelineCreateInfo *>(this); }
- VkComputePipelineCreateInfo const *ptr() const { return reinterpret_cast<VkComputePipelineCreateInfo const *>(this); }
-};
-
-struct safe_VkPipelineLayoutCreateInfo {
- VkStructureType sType;
- const void* pNext;
- VkPipelineLayoutCreateFlags flags;
- uint32_t setLayoutCount;
- VkDescriptorSetLayout* pSetLayouts;
- uint32_t pushConstantRangeCount;
- const VkPushConstantRange* pPushConstantRanges;
- safe_VkPipelineLayoutCreateInfo(const VkPipelineLayoutCreateInfo* in_struct);
- safe_VkPipelineLayoutCreateInfo(const safe_VkPipelineLayoutCreateInfo& src);
- safe_VkPipelineLayoutCreateInfo& operator=(const safe_VkPipelineLayoutCreateInfo& src);
- safe_VkPipelineLayoutCreateInfo();
- ~safe_VkPipelineLayoutCreateInfo();
- void initialize(const VkPipelineLayoutCreateInfo* in_struct);
- void initialize(const safe_VkPipelineLayoutCreateInfo* src);
- VkPipelineLayoutCreateInfo *ptr() { return reinterpret_cast<VkPipelineLayoutCreateInfo *>(this); }
- VkPipelineLayoutCreateInfo const *ptr() const { return reinterpret_cast<VkPipelineLayoutCreateInfo const *>(this); }
-};
-
-struct safe_VkSamplerCreateInfo {
- VkStructureType sType;
- const void* pNext;
- VkSamplerCreateFlags flags;
- VkFilter magFilter;
- VkFilter minFilter;
- VkSamplerMipmapMode mipmapMode;
- VkSamplerAddressMode addressModeU;
- VkSamplerAddressMode addressModeV;
- VkSamplerAddressMode addressModeW;
- float mipLodBias;
- VkBool32 anisotropyEnable;
- float maxAnisotropy;
- VkBool32 compareEnable;
- VkCompareOp compareOp;
- float minLod;
- float maxLod;
- VkBorderColor borderColor;
- VkBool32 unnormalizedCoordinates;
- safe_VkSamplerCreateInfo(const VkSamplerCreateInfo* in_struct);
- safe_VkSamplerCreateInfo(const safe_VkSamplerCreateInfo& src);
- safe_VkSamplerCreateInfo& operator=(const safe_VkSamplerCreateInfo& src);
- safe_VkSamplerCreateInfo();
- ~safe_VkSamplerCreateInfo();
- void initialize(const VkSamplerCreateInfo* in_struct);
- void initialize(const safe_VkSamplerCreateInfo* src);
- VkSamplerCreateInfo *ptr() { return reinterpret_cast<VkSamplerCreateInfo *>(this); }
- VkSamplerCreateInfo const *ptr() const { return reinterpret_cast<VkSamplerCreateInfo const *>(this); }
-};
-
-struct safe_VkDescriptorSetLayoutBinding {
- uint32_t binding;
- VkDescriptorType descriptorType;
- uint32_t descriptorCount;
- VkShaderStageFlags stageFlags;
- VkSampler* pImmutableSamplers;
- safe_VkDescriptorSetLayoutBinding(const VkDescriptorSetLayoutBinding* in_struct);
- safe_VkDescriptorSetLayoutBinding(const safe_VkDescriptorSetLayoutBinding& src);
- safe_VkDescriptorSetLayoutBinding& operator=(const safe_VkDescriptorSetLayoutBinding& src);
- safe_VkDescriptorSetLayoutBinding();
- ~safe_VkDescriptorSetLayoutBinding();
- void initialize(const VkDescriptorSetLayoutBinding* in_struct);
- void initialize(const safe_VkDescriptorSetLayoutBinding* src);
- VkDescriptorSetLayoutBinding *ptr() { return reinterpret_cast<VkDescriptorSetLayoutBinding *>(this); }
- VkDescriptorSetLayoutBinding const *ptr() const { return reinterpret_cast<VkDescriptorSetLayoutBinding const *>(this); }
-};
-
-struct safe_VkDescriptorSetLayoutCreateInfo {
- VkStructureType sType;
- const void* pNext;
- VkDescriptorSetLayoutCreateFlags flags;
- uint32_t bindingCount;
- safe_VkDescriptorSetLayoutBinding* pBindings;
- safe_VkDescriptorSetLayoutCreateInfo(const VkDescriptorSetLayoutCreateInfo* in_struct);
- safe_VkDescriptorSetLayoutCreateInfo(const safe_VkDescriptorSetLayoutCreateInfo& src);
- safe_VkDescriptorSetLayoutCreateInfo& operator=(const safe_VkDescriptorSetLayoutCreateInfo& src);
- safe_VkDescriptorSetLayoutCreateInfo();
- ~safe_VkDescriptorSetLayoutCreateInfo();
- void initialize(const VkDescriptorSetLayoutCreateInfo* in_struct);
- void initialize(const safe_VkDescriptorSetLayoutCreateInfo* src);
- VkDescriptorSetLayoutCreateInfo *ptr() { return reinterpret_cast<VkDescriptorSetLayoutCreateInfo *>(this); }
- VkDescriptorSetLayoutCreateInfo const *ptr() const { return reinterpret_cast<VkDescriptorSetLayoutCreateInfo const *>(this); }
-};
-
-struct safe_VkDescriptorPoolCreateInfo {
- VkStructureType sType;
- const void* pNext;
- VkDescriptorPoolCreateFlags flags;
- uint32_t maxSets;
- uint32_t poolSizeCount;
- const VkDescriptorPoolSize* pPoolSizes;
- safe_VkDescriptorPoolCreateInfo(const VkDescriptorPoolCreateInfo* in_struct);
- safe_VkDescriptorPoolCreateInfo(const safe_VkDescriptorPoolCreateInfo& src);
- safe_VkDescriptorPoolCreateInfo& operator=(const safe_VkDescriptorPoolCreateInfo& src);
- safe_VkDescriptorPoolCreateInfo();
- ~safe_VkDescriptorPoolCreateInfo();
- void initialize(const VkDescriptorPoolCreateInfo* in_struct);
- void initialize(const safe_VkDescriptorPoolCreateInfo* src);
- VkDescriptorPoolCreateInfo *ptr() { return reinterpret_cast<VkDescriptorPoolCreateInfo *>(this); }
- VkDescriptorPoolCreateInfo const *ptr() const { return reinterpret_cast<VkDescriptorPoolCreateInfo const *>(this); }
-};
-
-struct safe_VkDescriptorSetAllocateInfo {
- VkStructureType sType;
- const void* pNext;
- VkDescriptorPool descriptorPool;
- uint32_t descriptorSetCount;
- VkDescriptorSetLayout* pSetLayouts;
- safe_VkDescriptorSetAllocateInfo(const VkDescriptorSetAllocateInfo* in_struct);
- safe_VkDescriptorSetAllocateInfo(const safe_VkDescriptorSetAllocateInfo& src);
- safe_VkDescriptorSetAllocateInfo& operator=(const safe_VkDescriptorSetAllocateInfo& src);
- safe_VkDescriptorSetAllocateInfo();
- ~safe_VkDescriptorSetAllocateInfo();
- void initialize(const VkDescriptorSetAllocateInfo* in_struct);
- void initialize(const safe_VkDescriptorSetAllocateInfo* src);
- VkDescriptorSetAllocateInfo *ptr() { return reinterpret_cast<VkDescriptorSetAllocateInfo *>(this); }
- VkDescriptorSetAllocateInfo const *ptr() const { return reinterpret_cast<VkDescriptorSetAllocateInfo const *>(this); }
-};
-
-struct safe_VkWriteDescriptorSet {
- VkStructureType sType;
- const void* pNext;
- VkDescriptorSet dstSet;
- uint32_t dstBinding;
- uint32_t dstArrayElement;
- uint32_t descriptorCount;
- VkDescriptorType descriptorType;
- VkDescriptorImageInfo* pImageInfo;
- VkDescriptorBufferInfo* pBufferInfo;
- VkBufferView* pTexelBufferView;
- safe_VkWriteDescriptorSet(const VkWriteDescriptorSet* in_struct);
- safe_VkWriteDescriptorSet(const safe_VkWriteDescriptorSet& src);
- safe_VkWriteDescriptorSet& operator=(const safe_VkWriteDescriptorSet& src);
- safe_VkWriteDescriptorSet();
- ~safe_VkWriteDescriptorSet();
- void initialize(const VkWriteDescriptorSet* in_struct);
- void initialize(const safe_VkWriteDescriptorSet* src);
- VkWriteDescriptorSet *ptr() { return reinterpret_cast<VkWriteDescriptorSet *>(this); }
- VkWriteDescriptorSet const *ptr() const { return reinterpret_cast<VkWriteDescriptorSet const *>(this); }
-};
-
-struct safe_VkCopyDescriptorSet {
- VkStructureType sType;
- const void* pNext;
- VkDescriptorSet srcSet;
- uint32_t srcBinding;
- uint32_t srcArrayElement;
- VkDescriptorSet dstSet;
- uint32_t dstBinding;
- uint32_t dstArrayElement;
- uint32_t descriptorCount;
- safe_VkCopyDescriptorSet(const VkCopyDescriptorSet* in_struct);
- safe_VkCopyDescriptorSet(const safe_VkCopyDescriptorSet& src);
- safe_VkCopyDescriptorSet& operator=(const safe_VkCopyDescriptorSet& src);
- safe_VkCopyDescriptorSet();
- ~safe_VkCopyDescriptorSet();
- void initialize(const VkCopyDescriptorSet* in_struct);
- void initialize(const safe_VkCopyDescriptorSet* src);
- VkCopyDescriptorSet *ptr() { return reinterpret_cast<VkCopyDescriptorSet *>(this); }
- VkCopyDescriptorSet const *ptr() const { return reinterpret_cast<VkCopyDescriptorSet const *>(this); }
-};
-
-struct safe_VkFramebufferCreateInfo {
- VkStructureType sType;
- const void* pNext;
- VkFramebufferCreateFlags flags;
- VkRenderPass renderPass;
- uint32_t attachmentCount;
- VkImageView* pAttachments;
- uint32_t width;
- uint32_t height;
- uint32_t layers;
- safe_VkFramebufferCreateInfo(const VkFramebufferCreateInfo* in_struct);
- safe_VkFramebufferCreateInfo(const safe_VkFramebufferCreateInfo& src);
- safe_VkFramebufferCreateInfo& operator=(const safe_VkFramebufferCreateInfo& src);
- safe_VkFramebufferCreateInfo();
- ~safe_VkFramebufferCreateInfo();
- void initialize(const VkFramebufferCreateInfo* in_struct);
- void initialize(const safe_VkFramebufferCreateInfo* src);
- VkFramebufferCreateInfo *ptr() { return reinterpret_cast<VkFramebufferCreateInfo *>(this); }
- VkFramebufferCreateInfo const *ptr() const { return reinterpret_cast<VkFramebufferCreateInfo const *>(this); }
-};
-
-struct safe_VkSubpassDescription {
- VkSubpassDescriptionFlags flags;
- VkPipelineBindPoint pipelineBindPoint;
- uint32_t inputAttachmentCount;
- const VkAttachmentReference* pInputAttachments;
- uint32_t colorAttachmentCount;
- const VkAttachmentReference* pColorAttachments;
- const VkAttachmentReference* pResolveAttachments;
- const VkAttachmentReference* pDepthStencilAttachment;
- uint32_t preserveAttachmentCount;
- const uint32_t* pPreserveAttachments;
- safe_VkSubpassDescription(const VkSubpassDescription* in_struct);
- safe_VkSubpassDescription(const safe_VkSubpassDescription& src);
- safe_VkSubpassDescription& operator=(const safe_VkSubpassDescription& src);
- safe_VkSubpassDescription();
- ~safe_VkSubpassDescription();
- void initialize(const VkSubpassDescription* in_struct);
- void initialize(const safe_VkSubpassDescription* src);
- VkSubpassDescription *ptr() { return reinterpret_cast<VkSubpassDescription *>(this); }
- VkSubpassDescription const *ptr() const { return reinterpret_cast<VkSubpassDescription const *>(this); }
-};
-
-struct safe_VkRenderPassCreateInfo {
- VkStructureType sType;
- const void* pNext;
- VkRenderPassCreateFlags flags;
- uint32_t attachmentCount;
- const VkAttachmentDescription* pAttachments;
- uint32_t subpassCount;
- safe_VkSubpassDescription* pSubpasses;
- uint32_t dependencyCount;
- const VkSubpassDependency* pDependencies;
- safe_VkRenderPassCreateInfo(const VkRenderPassCreateInfo* in_struct);
- safe_VkRenderPassCreateInfo(const safe_VkRenderPassCreateInfo& src);
- safe_VkRenderPassCreateInfo& operator=(const safe_VkRenderPassCreateInfo& src);
- safe_VkRenderPassCreateInfo();
- ~safe_VkRenderPassCreateInfo();
- void initialize(const VkRenderPassCreateInfo* in_struct);
- void initialize(const safe_VkRenderPassCreateInfo* src);
- VkRenderPassCreateInfo *ptr() { return reinterpret_cast<VkRenderPassCreateInfo *>(this); }
- VkRenderPassCreateInfo const *ptr() const { return reinterpret_cast<VkRenderPassCreateInfo const *>(this); }
-};
-
-struct safe_VkCommandPoolCreateInfo {
- VkStructureType sType;
- const void* pNext;
- VkCommandPoolCreateFlags flags;
- uint32_t queueFamilyIndex;
- safe_VkCommandPoolCreateInfo(const VkCommandPoolCreateInfo* in_struct);
- safe_VkCommandPoolCreateInfo(const safe_VkCommandPoolCreateInfo& src);
- safe_VkCommandPoolCreateInfo& operator=(const safe_VkCommandPoolCreateInfo& src);
- safe_VkCommandPoolCreateInfo();
- ~safe_VkCommandPoolCreateInfo();
- void initialize(const VkCommandPoolCreateInfo* in_struct);
- void initialize(const safe_VkCommandPoolCreateInfo* src);
- VkCommandPoolCreateInfo *ptr() { return reinterpret_cast<VkCommandPoolCreateInfo *>(this); }
- VkCommandPoolCreateInfo const *ptr() const { return reinterpret_cast<VkCommandPoolCreateInfo const *>(this); }
-};
-
-struct safe_VkCommandBufferAllocateInfo {
- VkStructureType sType;
- const void* pNext;
- VkCommandPool commandPool;
- VkCommandBufferLevel level;
- uint32_t commandBufferCount;
- safe_VkCommandBufferAllocateInfo(const VkCommandBufferAllocateInfo* in_struct);
- safe_VkCommandBufferAllocateInfo(const safe_VkCommandBufferAllocateInfo& src);
- safe_VkCommandBufferAllocateInfo& operator=(const safe_VkCommandBufferAllocateInfo& src);
- safe_VkCommandBufferAllocateInfo();
- ~safe_VkCommandBufferAllocateInfo();
- void initialize(const VkCommandBufferAllocateInfo* in_struct);
- void initialize(const safe_VkCommandBufferAllocateInfo* src);
- VkCommandBufferAllocateInfo *ptr() { return reinterpret_cast<VkCommandBufferAllocateInfo *>(this); }
- VkCommandBufferAllocateInfo const *ptr() const { return reinterpret_cast<VkCommandBufferAllocateInfo const *>(this); }
-};
-
-struct safe_VkCommandBufferInheritanceInfo {
- VkStructureType sType;
- const void* pNext;
- VkRenderPass renderPass;
- uint32_t subpass;
- VkFramebuffer framebuffer;
- VkBool32 occlusionQueryEnable;
- VkQueryControlFlags queryFlags;
- VkQueryPipelineStatisticFlags pipelineStatistics;
- safe_VkCommandBufferInheritanceInfo(const VkCommandBufferInheritanceInfo* in_struct);
- safe_VkCommandBufferInheritanceInfo(const safe_VkCommandBufferInheritanceInfo& src);
- safe_VkCommandBufferInheritanceInfo& operator=(const safe_VkCommandBufferInheritanceInfo& src);
- safe_VkCommandBufferInheritanceInfo();
- ~safe_VkCommandBufferInheritanceInfo();
- void initialize(const VkCommandBufferInheritanceInfo* in_struct);
- void initialize(const safe_VkCommandBufferInheritanceInfo* src);
- VkCommandBufferInheritanceInfo *ptr() { return reinterpret_cast<VkCommandBufferInheritanceInfo *>(this); }
- VkCommandBufferInheritanceInfo const *ptr() const { return reinterpret_cast<VkCommandBufferInheritanceInfo const *>(this); }
-};
-
-struct safe_VkCommandBufferBeginInfo {
- VkStructureType sType;
- const void* pNext;
- VkCommandBufferUsageFlags flags;
- safe_VkCommandBufferInheritanceInfo* pInheritanceInfo;
- safe_VkCommandBufferBeginInfo(const VkCommandBufferBeginInfo* in_struct);
- safe_VkCommandBufferBeginInfo(const safe_VkCommandBufferBeginInfo& src);
- safe_VkCommandBufferBeginInfo& operator=(const safe_VkCommandBufferBeginInfo& src);
- safe_VkCommandBufferBeginInfo();
- ~safe_VkCommandBufferBeginInfo();
- void initialize(const VkCommandBufferBeginInfo* in_struct);
- void initialize(const safe_VkCommandBufferBeginInfo* src);
- VkCommandBufferBeginInfo *ptr() { return reinterpret_cast<VkCommandBufferBeginInfo *>(this); }
- VkCommandBufferBeginInfo const *ptr() const { return reinterpret_cast<VkCommandBufferBeginInfo const *>(this); }
-};
-
-struct safe_VkMemoryBarrier {
- VkStructureType sType;
- const void* pNext;
- VkAccessFlags srcAccessMask;
- VkAccessFlags dstAccessMask;
- safe_VkMemoryBarrier(const VkMemoryBarrier* in_struct);
- safe_VkMemoryBarrier(const safe_VkMemoryBarrier& src);
- safe_VkMemoryBarrier& operator=(const safe_VkMemoryBarrier& src);
- safe_VkMemoryBarrier();
- ~safe_VkMemoryBarrier();
- void initialize(const VkMemoryBarrier* in_struct);
- void initialize(const safe_VkMemoryBarrier* src);
- VkMemoryBarrier *ptr() { return reinterpret_cast<VkMemoryBarrier *>(this); }
- VkMemoryBarrier const *ptr() const { return reinterpret_cast<VkMemoryBarrier const *>(this); }
-};
-
-struct safe_VkBufferMemoryBarrier {
- VkStructureType sType;
- const void* pNext;
- VkAccessFlags srcAccessMask;
- VkAccessFlags dstAccessMask;
- uint32_t srcQueueFamilyIndex;
- uint32_t dstQueueFamilyIndex;
- VkBuffer buffer;
- VkDeviceSize offset;
- VkDeviceSize size;
- safe_VkBufferMemoryBarrier(const VkBufferMemoryBarrier* in_struct);
- safe_VkBufferMemoryBarrier(const safe_VkBufferMemoryBarrier& src);
- safe_VkBufferMemoryBarrier& operator=(const safe_VkBufferMemoryBarrier& src);
- safe_VkBufferMemoryBarrier();
- ~safe_VkBufferMemoryBarrier();
- void initialize(const VkBufferMemoryBarrier* in_struct);
- void initialize(const safe_VkBufferMemoryBarrier* src);
- VkBufferMemoryBarrier *ptr() { return reinterpret_cast<VkBufferMemoryBarrier *>(this); }
- VkBufferMemoryBarrier const *ptr() const { return reinterpret_cast<VkBufferMemoryBarrier const *>(this); }
-};
-
-struct safe_VkImageMemoryBarrier {
- VkStructureType sType;
- const void* pNext;
- VkAccessFlags srcAccessMask;
- VkAccessFlags dstAccessMask;
- VkImageLayout oldLayout;
- VkImageLayout newLayout;
- uint32_t srcQueueFamilyIndex;
- uint32_t dstQueueFamilyIndex;
- VkImage image;
- VkImageSubresourceRange subresourceRange;
- safe_VkImageMemoryBarrier(const VkImageMemoryBarrier* in_struct);
- safe_VkImageMemoryBarrier(const safe_VkImageMemoryBarrier& src);
- safe_VkImageMemoryBarrier& operator=(const safe_VkImageMemoryBarrier& src);
- safe_VkImageMemoryBarrier();
- ~safe_VkImageMemoryBarrier();
- void initialize(const VkImageMemoryBarrier* in_struct);
- void initialize(const safe_VkImageMemoryBarrier* src);
- VkImageMemoryBarrier *ptr() { return reinterpret_cast<VkImageMemoryBarrier *>(this); }
- VkImageMemoryBarrier const *ptr() const { return reinterpret_cast<VkImageMemoryBarrier const *>(this); }
-};
-
-struct safe_VkRenderPassBeginInfo {
- VkStructureType sType;
- const void* pNext;
- VkRenderPass renderPass;
- VkFramebuffer framebuffer;
- VkRect2D renderArea;
- uint32_t clearValueCount;
- const VkClearValue* pClearValues;
- safe_VkRenderPassBeginInfo(const VkRenderPassBeginInfo* in_struct);
- safe_VkRenderPassBeginInfo(const safe_VkRenderPassBeginInfo& src);
- safe_VkRenderPassBeginInfo& operator=(const safe_VkRenderPassBeginInfo& src);
- safe_VkRenderPassBeginInfo();
- ~safe_VkRenderPassBeginInfo();
- void initialize(const VkRenderPassBeginInfo* in_struct);
- void initialize(const safe_VkRenderPassBeginInfo* src);
- VkRenderPassBeginInfo *ptr() { return reinterpret_cast<VkRenderPassBeginInfo *>(this); }
- VkRenderPassBeginInfo const *ptr() const { return reinterpret_cast<VkRenderPassBeginInfo const *>(this); }
-};
-
-struct safe_VkPhysicalDeviceSubgroupProperties {
- VkStructureType sType;
- void* pNext;
- uint32_t subgroupSize;
- VkShaderStageFlags supportedStages;
- VkSubgroupFeatureFlags supportedOperations;
- VkBool32 quadOperationsInAllStages;
- safe_VkPhysicalDeviceSubgroupProperties(const VkPhysicalDeviceSubgroupProperties* in_struct);
- safe_VkPhysicalDeviceSubgroupProperties(const safe_VkPhysicalDeviceSubgroupProperties& src);
- safe_VkPhysicalDeviceSubgroupProperties& operator=(const safe_VkPhysicalDeviceSubgroupProperties& src);
- safe_VkPhysicalDeviceSubgroupProperties();
- ~safe_VkPhysicalDeviceSubgroupProperties();
- void initialize(const VkPhysicalDeviceSubgroupProperties* in_struct);
- void initialize(const safe_VkPhysicalDeviceSubgroupProperties* src);
- VkPhysicalDeviceSubgroupProperties *ptr() { return reinterpret_cast<VkPhysicalDeviceSubgroupProperties *>(this); }
- VkPhysicalDeviceSubgroupProperties const *ptr() const { return reinterpret_cast<VkPhysicalDeviceSubgroupProperties const *>(this); }
-};
-
-struct safe_VkBindBufferMemoryInfo {
- VkStructureType sType;
- const void* pNext;
- VkBuffer buffer;
- VkDeviceMemory memory;
- VkDeviceSize memoryOffset;
- safe_VkBindBufferMemoryInfo(const VkBindBufferMemoryInfo* in_struct);
- safe_VkBindBufferMemoryInfo(const safe_VkBindBufferMemoryInfo& src);
- safe_VkBindBufferMemoryInfo& operator=(const safe_VkBindBufferMemoryInfo& src);
- safe_VkBindBufferMemoryInfo();
- ~safe_VkBindBufferMemoryInfo();
- void initialize(const VkBindBufferMemoryInfo* in_struct);
- void initialize(const safe_VkBindBufferMemoryInfo* src);
- VkBindBufferMemoryInfo *ptr() { return reinterpret_cast<VkBindBufferMemoryInfo *>(this); }
- VkBindBufferMemoryInfo const *ptr() const { return reinterpret_cast<VkBindBufferMemoryInfo const *>(this); }
-};
-
-struct safe_VkBindImageMemoryInfo {
- VkStructureType sType;
- const void* pNext;
- VkImage image;
- VkDeviceMemory memory;
- VkDeviceSize memoryOffset;
- safe_VkBindImageMemoryInfo(const VkBindImageMemoryInfo* in_struct);
- safe_VkBindImageMemoryInfo(const safe_VkBindImageMemoryInfo& src);
- safe_VkBindImageMemoryInfo& operator=(const safe_VkBindImageMemoryInfo& src);
- safe_VkBindImageMemoryInfo();
- ~safe_VkBindImageMemoryInfo();
- void initialize(const VkBindImageMemoryInfo* in_struct);
- void initialize(const safe_VkBindImageMemoryInfo* src);
- VkBindImageMemoryInfo *ptr() { return reinterpret_cast<VkBindImageMemoryInfo *>(this); }
- VkBindImageMemoryInfo const *ptr() const { return reinterpret_cast<VkBindImageMemoryInfo const *>(this); }
-};
-
-struct safe_VkPhysicalDevice16BitStorageFeatures {
- VkStructureType sType;
- void* pNext;
- VkBool32 storageBuffer16BitAccess;
- VkBool32 uniformAndStorageBuffer16BitAccess;
- VkBool32 storagePushConstant16;
- VkBool32 storageInputOutput16;
- safe_VkPhysicalDevice16BitStorageFeatures(const VkPhysicalDevice16BitStorageFeatures* in_struct);
- safe_VkPhysicalDevice16BitStorageFeatures(const safe_VkPhysicalDevice16BitStorageFeatures& src);
- safe_VkPhysicalDevice16BitStorageFeatures& operator=(const safe_VkPhysicalDevice16BitStorageFeatures& src);
- safe_VkPhysicalDevice16BitStorageFeatures();
- ~safe_VkPhysicalDevice16BitStorageFeatures();
- void initialize(const VkPhysicalDevice16BitStorageFeatures* in_struct);
- void initialize(const safe_VkPhysicalDevice16BitStorageFeatures* src);
- VkPhysicalDevice16BitStorageFeatures *ptr() { return reinterpret_cast<VkPhysicalDevice16BitStorageFeatures *>(this); }
- VkPhysicalDevice16BitStorageFeatures const *ptr() const { return reinterpret_cast<VkPhysicalDevice16BitStorageFeatures const *>(this); }
-};
-
-struct safe_VkMemoryDedicatedRequirements {
- VkStructureType sType;
- void* pNext;
- VkBool32 prefersDedicatedAllocation;
- VkBool32 requiresDedicatedAllocation;
- safe_VkMemoryDedicatedRequirements(const VkMemoryDedicatedRequirements* in_struct);
- safe_VkMemoryDedicatedRequirements(const safe_VkMemoryDedicatedRequirements& src);
- safe_VkMemoryDedicatedRequirements& operator=(const safe_VkMemoryDedicatedRequirements& src);
- safe_VkMemoryDedicatedRequirements();
- ~safe_VkMemoryDedicatedRequirements();
- void initialize(const VkMemoryDedicatedRequirements* in_struct);
- void initialize(const safe_VkMemoryDedicatedRequirements* src);
- VkMemoryDedicatedRequirements *ptr() { return reinterpret_cast<VkMemoryDedicatedRequirements *>(this); }
- VkMemoryDedicatedRequirements const *ptr() const { return reinterpret_cast<VkMemoryDedicatedRequirements const *>(this); }
-};
-
-struct safe_VkMemoryDedicatedAllocateInfo {
- VkStructureType sType;
- const void* pNext;
- VkImage image;
- VkBuffer buffer;
- safe_VkMemoryDedicatedAllocateInfo(const VkMemoryDedicatedAllocateInfo* in_struct);
- safe_VkMemoryDedicatedAllocateInfo(const safe_VkMemoryDedicatedAllocateInfo& src);
- safe_VkMemoryDedicatedAllocateInfo& operator=(const safe_VkMemoryDedicatedAllocateInfo& src);
- safe_VkMemoryDedicatedAllocateInfo();
- ~safe_VkMemoryDedicatedAllocateInfo();
- void initialize(const VkMemoryDedicatedAllocateInfo* in_struct);
- void initialize(const safe_VkMemoryDedicatedAllocateInfo* src);
- VkMemoryDedicatedAllocateInfo *ptr() { return reinterpret_cast<VkMemoryDedicatedAllocateInfo *>(this); }
- VkMemoryDedicatedAllocateInfo const *ptr() const { return reinterpret_cast<VkMemoryDedicatedAllocateInfo const *>(this); }
-};
-
-struct safe_VkMemoryAllocateFlagsInfo {
- VkStructureType sType;
- const void* pNext;
- VkMemoryAllocateFlags flags;
- uint32_t deviceMask;
- safe_VkMemoryAllocateFlagsInfo(const VkMemoryAllocateFlagsInfo* in_struct);
- safe_VkMemoryAllocateFlagsInfo(const safe_VkMemoryAllocateFlagsInfo& src);
- safe_VkMemoryAllocateFlagsInfo& operator=(const safe_VkMemoryAllocateFlagsInfo& src);
- safe_VkMemoryAllocateFlagsInfo();
- ~safe_VkMemoryAllocateFlagsInfo();
- void initialize(const VkMemoryAllocateFlagsInfo* in_struct);
- void initialize(const safe_VkMemoryAllocateFlagsInfo* src);
- VkMemoryAllocateFlagsInfo *ptr() { return reinterpret_cast<VkMemoryAllocateFlagsInfo *>(this); }
- VkMemoryAllocateFlagsInfo const *ptr() const { return reinterpret_cast<VkMemoryAllocateFlagsInfo const *>(this); }
-};
-
-struct safe_VkDeviceGroupRenderPassBeginInfo {
- VkStructureType sType;
- const void* pNext;
- uint32_t deviceMask;
- uint32_t deviceRenderAreaCount;
- const VkRect2D* pDeviceRenderAreas;
- safe_VkDeviceGroupRenderPassBeginInfo(const VkDeviceGroupRenderPassBeginInfo* in_struct);
- safe_VkDeviceGroupRenderPassBeginInfo(const safe_VkDeviceGroupRenderPassBeginInfo& src);
- safe_VkDeviceGroupRenderPassBeginInfo& operator=(const safe_VkDeviceGroupRenderPassBeginInfo& src);
- safe_VkDeviceGroupRenderPassBeginInfo();
- ~safe_VkDeviceGroupRenderPassBeginInfo();
- void initialize(const VkDeviceGroupRenderPassBeginInfo* in_struct);
- void initialize(const safe_VkDeviceGroupRenderPassBeginInfo* src);
- VkDeviceGroupRenderPassBeginInfo *ptr() { return reinterpret_cast<VkDeviceGroupRenderPassBeginInfo *>(this); }
- VkDeviceGroupRenderPassBeginInfo const *ptr() const { return reinterpret_cast<VkDeviceGroupRenderPassBeginInfo const *>(this); }
-};
-
-struct safe_VkDeviceGroupCommandBufferBeginInfo {
- VkStructureType sType;
- const void* pNext;
- uint32_t deviceMask;
- safe_VkDeviceGroupCommandBufferBeginInfo(const VkDeviceGroupCommandBufferBeginInfo* in_struct);
- safe_VkDeviceGroupCommandBufferBeginInfo(const safe_VkDeviceGroupCommandBufferBeginInfo& src);
- safe_VkDeviceGroupCommandBufferBeginInfo& operator=(const safe_VkDeviceGroupCommandBufferBeginInfo& src);
- safe_VkDeviceGroupCommandBufferBeginInfo();
- ~safe_VkDeviceGroupCommandBufferBeginInfo();
- void initialize(const VkDeviceGroupCommandBufferBeginInfo* in_struct);
- void initialize(const safe_VkDeviceGroupCommandBufferBeginInfo* src);
- VkDeviceGroupCommandBufferBeginInfo *ptr() { return reinterpret_cast<VkDeviceGroupCommandBufferBeginInfo *>(this); }
- VkDeviceGroupCommandBufferBeginInfo const *ptr() const { return reinterpret_cast<VkDeviceGroupCommandBufferBeginInfo const *>(this); }
-};
-
-struct safe_VkDeviceGroupSubmitInfo {
- VkStructureType sType;
- const void* pNext;
- uint32_t waitSemaphoreCount;
- const uint32_t* pWaitSemaphoreDeviceIndices;
- uint32_t commandBufferCount;
- const uint32_t* pCommandBufferDeviceMasks;
- uint32_t signalSemaphoreCount;
- const uint32_t* pSignalSemaphoreDeviceIndices;
- safe_VkDeviceGroupSubmitInfo(const VkDeviceGroupSubmitInfo* in_struct);
- safe_VkDeviceGroupSubmitInfo(const safe_VkDeviceGroupSubmitInfo& src);
- safe_VkDeviceGroupSubmitInfo& operator=(const safe_VkDeviceGroupSubmitInfo& src);
- safe_VkDeviceGroupSubmitInfo();
- ~safe_VkDeviceGroupSubmitInfo();
- void initialize(const VkDeviceGroupSubmitInfo* in_struct);
- void initialize(const safe_VkDeviceGroupSubmitInfo* src);
- VkDeviceGroupSubmitInfo *ptr() { return reinterpret_cast<VkDeviceGroupSubmitInfo *>(this); }
- VkDeviceGroupSubmitInfo const *ptr() const { return reinterpret_cast<VkDeviceGroupSubmitInfo const *>(this); }
-};
-
-struct safe_VkDeviceGroupBindSparseInfo {
- VkStructureType sType;
- const void* pNext;
- uint32_t resourceDeviceIndex;
- uint32_t memoryDeviceIndex;
- safe_VkDeviceGroupBindSparseInfo(const VkDeviceGroupBindSparseInfo* in_struct);
- safe_VkDeviceGroupBindSparseInfo(const safe_VkDeviceGroupBindSparseInfo& src);
- safe_VkDeviceGroupBindSparseInfo& operator=(const safe_VkDeviceGroupBindSparseInfo& src);
- safe_VkDeviceGroupBindSparseInfo();
- ~safe_VkDeviceGroupBindSparseInfo();
- void initialize(const VkDeviceGroupBindSparseInfo* in_struct);
- void initialize(const safe_VkDeviceGroupBindSparseInfo* src);
- VkDeviceGroupBindSparseInfo *ptr() { return reinterpret_cast<VkDeviceGroupBindSparseInfo *>(this); }
- VkDeviceGroupBindSparseInfo const *ptr() const { return reinterpret_cast<VkDeviceGroupBindSparseInfo const *>(this); }
-};
-
-struct safe_VkBindBufferMemoryDeviceGroupInfo {
- VkStructureType sType;
- const void* pNext;
- uint32_t deviceIndexCount;
- const uint32_t* pDeviceIndices;
- safe_VkBindBufferMemoryDeviceGroupInfo(const VkBindBufferMemoryDeviceGroupInfo* in_struct);
- safe_VkBindBufferMemoryDeviceGroupInfo(const safe_VkBindBufferMemoryDeviceGroupInfo& src);
- safe_VkBindBufferMemoryDeviceGroupInfo& operator=(const safe_VkBindBufferMemoryDeviceGroupInfo& src);
- safe_VkBindBufferMemoryDeviceGroupInfo();
- ~safe_VkBindBufferMemoryDeviceGroupInfo();
- void initialize(const VkBindBufferMemoryDeviceGroupInfo* in_struct);
- void initialize(const safe_VkBindBufferMemoryDeviceGroupInfo* src);
- VkBindBufferMemoryDeviceGroupInfo *ptr() { return reinterpret_cast<VkBindBufferMemoryDeviceGroupInfo *>(this); }
- VkBindBufferMemoryDeviceGroupInfo const *ptr() const { return reinterpret_cast<VkBindBufferMemoryDeviceGroupInfo const *>(this); }
-};
-
-struct safe_VkBindImageMemoryDeviceGroupInfo {
- VkStructureType sType;
- const void* pNext;
- uint32_t deviceIndexCount;
- const uint32_t* pDeviceIndices;
- uint32_t splitInstanceBindRegionCount;
- const VkRect2D* pSplitInstanceBindRegions;
- safe_VkBindImageMemoryDeviceGroupInfo(const VkBindImageMemoryDeviceGroupInfo* in_struct);
- safe_VkBindImageMemoryDeviceGroupInfo(const safe_VkBindImageMemoryDeviceGroupInfo& src);
- safe_VkBindImageMemoryDeviceGroupInfo& operator=(const safe_VkBindImageMemoryDeviceGroupInfo& src);
- safe_VkBindImageMemoryDeviceGroupInfo();
- ~safe_VkBindImageMemoryDeviceGroupInfo();
- void initialize(const VkBindImageMemoryDeviceGroupInfo* in_struct);
- void initialize(const safe_VkBindImageMemoryDeviceGroupInfo* src);
- VkBindImageMemoryDeviceGroupInfo *ptr() { return reinterpret_cast<VkBindImageMemoryDeviceGroupInfo *>(this); }
- VkBindImageMemoryDeviceGroupInfo const *ptr() const { return reinterpret_cast<VkBindImageMemoryDeviceGroupInfo const *>(this); }
-};
-
-struct safe_VkPhysicalDeviceGroupProperties {
- VkStructureType sType;
- void* pNext;
- uint32_t physicalDeviceCount;
- VkPhysicalDevice physicalDevices[VK_MAX_DEVICE_GROUP_SIZE];
- VkBool32 subsetAllocation;
- safe_VkPhysicalDeviceGroupProperties(const VkPhysicalDeviceGroupProperties* in_struct);
- safe_VkPhysicalDeviceGroupProperties(const safe_VkPhysicalDeviceGroupProperties& src);
- safe_VkPhysicalDeviceGroupProperties& operator=(const safe_VkPhysicalDeviceGroupProperties& src);
- safe_VkPhysicalDeviceGroupProperties();
- ~safe_VkPhysicalDeviceGroupProperties();
- void initialize(const VkPhysicalDeviceGroupProperties* in_struct);
- void initialize(const safe_VkPhysicalDeviceGroupProperties* src);
- VkPhysicalDeviceGroupProperties *ptr() { return reinterpret_cast<VkPhysicalDeviceGroupProperties *>(this); }
- VkPhysicalDeviceGroupProperties const *ptr() const { return reinterpret_cast<VkPhysicalDeviceGroupProperties const *>(this); }
-};
-
-struct safe_VkDeviceGroupDeviceCreateInfo {
- VkStructureType sType;
- const void* pNext;
- uint32_t physicalDeviceCount;
- VkPhysicalDevice* pPhysicalDevices;
- safe_VkDeviceGroupDeviceCreateInfo(const VkDeviceGroupDeviceCreateInfo* in_struct);
- safe_VkDeviceGroupDeviceCreateInfo(const safe_VkDeviceGroupDeviceCreateInfo& src);
- safe_VkDeviceGroupDeviceCreateInfo& operator=(const safe_VkDeviceGroupDeviceCreateInfo& src);
- safe_VkDeviceGroupDeviceCreateInfo();
- ~safe_VkDeviceGroupDeviceCreateInfo();
- void initialize(const VkDeviceGroupDeviceCreateInfo* in_struct);
- void initialize(const safe_VkDeviceGroupDeviceCreateInfo* src);
- VkDeviceGroupDeviceCreateInfo *ptr() { return reinterpret_cast<VkDeviceGroupDeviceCreateInfo *>(this); }
- VkDeviceGroupDeviceCreateInfo const *ptr() const { return reinterpret_cast<VkDeviceGroupDeviceCreateInfo const *>(this); }
-};
-
-struct safe_VkBufferMemoryRequirementsInfo2 {
- VkStructureType sType;
- const void* pNext;
- VkBuffer buffer;
- safe_VkBufferMemoryRequirementsInfo2(const VkBufferMemoryRequirementsInfo2* in_struct);
- safe_VkBufferMemoryRequirementsInfo2(const safe_VkBufferMemoryRequirementsInfo2& src);
- safe_VkBufferMemoryRequirementsInfo2& operator=(const safe_VkBufferMemoryRequirementsInfo2& src);
- safe_VkBufferMemoryRequirementsInfo2();
- ~safe_VkBufferMemoryRequirementsInfo2();
- void initialize(const VkBufferMemoryRequirementsInfo2* in_struct);
- void initialize(const safe_VkBufferMemoryRequirementsInfo2* src);
- VkBufferMemoryRequirementsInfo2 *ptr() { return reinterpret_cast<VkBufferMemoryRequirementsInfo2 *>(this); }
- VkBufferMemoryRequirementsInfo2 const *ptr() const { return reinterpret_cast<VkBufferMemoryRequirementsInfo2 const *>(this); }
-};
-
-struct safe_VkImageMemoryRequirementsInfo2 {
- VkStructureType sType;
- const void* pNext;
- VkImage image;
- safe_VkImageMemoryRequirementsInfo2(const VkImageMemoryRequirementsInfo2* in_struct);
- safe_VkImageMemoryRequirementsInfo2(const safe_VkImageMemoryRequirementsInfo2& src);
- safe_VkImageMemoryRequirementsInfo2& operator=(const safe_VkImageMemoryRequirementsInfo2& src);
- safe_VkImageMemoryRequirementsInfo2();
- ~safe_VkImageMemoryRequirementsInfo2();
- void initialize(const VkImageMemoryRequirementsInfo2* in_struct);
- void initialize(const safe_VkImageMemoryRequirementsInfo2* src);
- VkImageMemoryRequirementsInfo2 *ptr() { return reinterpret_cast<VkImageMemoryRequirementsInfo2 *>(this); }
- VkImageMemoryRequirementsInfo2 const *ptr() const { return reinterpret_cast<VkImageMemoryRequirementsInfo2 const *>(this); }
-};
-
-struct safe_VkImageSparseMemoryRequirementsInfo2 {
- VkStructureType sType;
- const void* pNext;
- VkImage image;
- safe_VkImageSparseMemoryRequirementsInfo2(const VkImageSparseMemoryRequirementsInfo2* in_struct);
- safe_VkImageSparseMemoryRequirementsInfo2(const safe_VkImageSparseMemoryRequirementsInfo2& src);
- safe_VkImageSparseMemoryRequirementsInfo2& operator=(const safe_VkImageSparseMemoryRequirementsInfo2& src);
- safe_VkImageSparseMemoryRequirementsInfo2();
- ~safe_VkImageSparseMemoryRequirementsInfo2();
- void initialize(const VkImageSparseMemoryRequirementsInfo2* in_struct);
- void initialize(const safe_VkImageSparseMemoryRequirementsInfo2* src);
- VkImageSparseMemoryRequirementsInfo2 *ptr() { return reinterpret_cast<VkImageSparseMemoryRequirementsInfo2 *>(this); }
- VkImageSparseMemoryRequirementsInfo2 const *ptr() const { return reinterpret_cast<VkImageSparseMemoryRequirementsInfo2 const *>(this); }
-};
-
-struct safe_VkMemoryRequirements2 {
- VkStructureType sType;
- void* pNext;
- VkMemoryRequirements memoryRequirements;
- safe_VkMemoryRequirements2(const VkMemoryRequirements2* in_struct);
- safe_VkMemoryRequirements2(const safe_VkMemoryRequirements2& src);
- safe_VkMemoryRequirements2& operator=(const safe_VkMemoryRequirements2& src);
- safe_VkMemoryRequirements2();
- ~safe_VkMemoryRequirements2();
- void initialize(const VkMemoryRequirements2* in_struct);
- void initialize(const safe_VkMemoryRequirements2* src);
- VkMemoryRequirements2 *ptr() { return reinterpret_cast<VkMemoryRequirements2 *>(this); }
- VkMemoryRequirements2 const *ptr() const { return reinterpret_cast<VkMemoryRequirements2 const *>(this); }
-};
-
-struct safe_VkSparseImageMemoryRequirements2 {
- VkStructureType sType;
- void* pNext;
- VkSparseImageMemoryRequirements memoryRequirements;
- safe_VkSparseImageMemoryRequirements2(const VkSparseImageMemoryRequirements2* in_struct);
- safe_VkSparseImageMemoryRequirements2(const safe_VkSparseImageMemoryRequirements2& src);
- safe_VkSparseImageMemoryRequirements2& operator=(const safe_VkSparseImageMemoryRequirements2& src);
- safe_VkSparseImageMemoryRequirements2();
- ~safe_VkSparseImageMemoryRequirements2();
- void initialize(const VkSparseImageMemoryRequirements2* in_struct);
- void initialize(const safe_VkSparseImageMemoryRequirements2* src);
- VkSparseImageMemoryRequirements2 *ptr() { return reinterpret_cast<VkSparseImageMemoryRequirements2 *>(this); }
- VkSparseImageMemoryRequirements2 const *ptr() const { return reinterpret_cast<VkSparseImageMemoryRequirements2 const *>(this); }
-};
-
-struct safe_VkPhysicalDeviceFeatures2 {
- VkStructureType sType;
- void* pNext;
- VkPhysicalDeviceFeatures features;
- safe_VkPhysicalDeviceFeatures2(const VkPhysicalDeviceFeatures2* in_struct);
- safe_VkPhysicalDeviceFeatures2(const safe_VkPhysicalDeviceFeatures2& src);
- safe_VkPhysicalDeviceFeatures2& operator=(const safe_VkPhysicalDeviceFeatures2& src);
- safe_VkPhysicalDeviceFeatures2();
- ~safe_VkPhysicalDeviceFeatures2();
- void initialize(const VkPhysicalDeviceFeatures2* in_struct);
- void initialize(const safe_VkPhysicalDeviceFeatures2* src);
- VkPhysicalDeviceFeatures2 *ptr() { return reinterpret_cast<VkPhysicalDeviceFeatures2 *>(this); }
- VkPhysicalDeviceFeatures2 const *ptr() const { return reinterpret_cast<VkPhysicalDeviceFeatures2 const *>(this); }
-};
-
-struct safe_VkPhysicalDeviceProperties2 {
- VkStructureType sType;
- void* pNext;
- VkPhysicalDeviceProperties properties;
- safe_VkPhysicalDeviceProperties2(const VkPhysicalDeviceProperties2* in_struct);
- safe_VkPhysicalDeviceProperties2(const safe_VkPhysicalDeviceProperties2& src);
- safe_VkPhysicalDeviceProperties2& operator=(const safe_VkPhysicalDeviceProperties2& src);
- safe_VkPhysicalDeviceProperties2();
- ~safe_VkPhysicalDeviceProperties2();
- void initialize(const VkPhysicalDeviceProperties2* in_struct);
- void initialize(const safe_VkPhysicalDeviceProperties2* src);
- VkPhysicalDeviceProperties2 *ptr() { return reinterpret_cast<VkPhysicalDeviceProperties2 *>(this); }
- VkPhysicalDeviceProperties2 const *ptr() const { return reinterpret_cast<VkPhysicalDeviceProperties2 const *>(this); }
-};
-
-struct safe_VkFormatProperties2 {
- VkStructureType sType;
- void* pNext;
- VkFormatProperties formatProperties;
- safe_VkFormatProperties2(const VkFormatProperties2* in_struct);
- safe_VkFormatProperties2(const safe_VkFormatProperties2& src);
- safe_VkFormatProperties2& operator=(const safe_VkFormatProperties2& src);
- safe_VkFormatProperties2();
- ~safe_VkFormatProperties2();
- void initialize(const VkFormatProperties2* in_struct);
- void initialize(const safe_VkFormatProperties2* src);
- VkFormatProperties2 *ptr() { return reinterpret_cast<VkFormatProperties2 *>(this); }
- VkFormatProperties2 const *ptr() const { return reinterpret_cast<VkFormatProperties2 const *>(this); }
-};
-
-struct safe_VkImageFormatProperties2 {
- VkStructureType sType;
- void* pNext;
- VkImageFormatProperties imageFormatProperties;
- safe_VkImageFormatProperties2(const VkImageFormatProperties2* in_struct);
- safe_VkImageFormatProperties2(const safe_VkImageFormatProperties2& src);
- safe_VkImageFormatProperties2& operator=(const safe_VkImageFormatProperties2& src);
- safe_VkImageFormatProperties2();
- ~safe_VkImageFormatProperties2();
- void initialize(const VkImageFormatProperties2* in_struct);
- void initialize(const safe_VkImageFormatProperties2* src);
- VkImageFormatProperties2 *ptr() { return reinterpret_cast<VkImageFormatProperties2 *>(this); }
- VkImageFormatProperties2 const *ptr() const { return reinterpret_cast<VkImageFormatProperties2 const *>(this); }
-};
-
-struct safe_VkPhysicalDeviceImageFormatInfo2 {
- VkStructureType sType;
- const void* pNext;
- VkFormat format;
- VkImageType type;
- VkImageTiling tiling;
- VkImageUsageFlags usage;
- VkImageCreateFlags flags;
- safe_VkPhysicalDeviceImageFormatInfo2(const VkPhysicalDeviceImageFormatInfo2* in_struct);
- safe_VkPhysicalDeviceImageFormatInfo2(const safe_VkPhysicalDeviceImageFormatInfo2& src);
- safe_VkPhysicalDeviceImageFormatInfo2& operator=(const safe_VkPhysicalDeviceImageFormatInfo2& src);
- safe_VkPhysicalDeviceImageFormatInfo2();
- ~safe_VkPhysicalDeviceImageFormatInfo2();
- void initialize(const VkPhysicalDeviceImageFormatInfo2* in_struct);
- void initialize(const safe_VkPhysicalDeviceImageFormatInfo2* src);
- VkPhysicalDeviceImageFormatInfo2 *ptr() { return reinterpret_cast<VkPhysicalDeviceImageFormatInfo2 *>(this); }
- VkPhysicalDeviceImageFormatInfo2 const *ptr() const { return reinterpret_cast<VkPhysicalDeviceImageFormatInfo2 const *>(this); }
-};
-
-struct safe_VkQueueFamilyProperties2 {
- VkStructureType sType;
- void* pNext;
- VkQueueFamilyProperties queueFamilyProperties;
- safe_VkQueueFamilyProperties2(const VkQueueFamilyProperties2* in_struct);
- safe_VkQueueFamilyProperties2(const safe_VkQueueFamilyProperties2& src);
- safe_VkQueueFamilyProperties2& operator=(const safe_VkQueueFamilyProperties2& src);
- safe_VkQueueFamilyProperties2();
- ~safe_VkQueueFamilyProperties2();
- void initialize(const VkQueueFamilyProperties2* in_struct);
- void initialize(const safe_VkQueueFamilyProperties2* src);
- VkQueueFamilyProperties2 *ptr() { return reinterpret_cast<VkQueueFamilyProperties2 *>(this); }
- VkQueueFamilyProperties2 const *ptr() const { return reinterpret_cast<VkQueueFamilyProperties2 const *>(this); }
-};
-
-struct safe_VkPhysicalDeviceMemoryProperties2 {
- VkStructureType sType;
- void* pNext;
- VkPhysicalDeviceMemoryProperties memoryProperties;
- safe_VkPhysicalDeviceMemoryProperties2(const VkPhysicalDeviceMemoryProperties2* in_struct);
- safe_VkPhysicalDeviceMemoryProperties2(const safe_VkPhysicalDeviceMemoryProperties2& src);
- safe_VkPhysicalDeviceMemoryProperties2& operator=(const safe_VkPhysicalDeviceMemoryProperties2& src);
- safe_VkPhysicalDeviceMemoryProperties2();
- ~safe_VkPhysicalDeviceMemoryProperties2();
- void initialize(const VkPhysicalDeviceMemoryProperties2* in_struct);
- void initialize(const safe_VkPhysicalDeviceMemoryProperties2* src);
- VkPhysicalDeviceMemoryProperties2 *ptr() { return reinterpret_cast<VkPhysicalDeviceMemoryProperties2 *>(this); }
- VkPhysicalDeviceMemoryProperties2 const *ptr() const { return reinterpret_cast<VkPhysicalDeviceMemoryProperties2 const *>(this); }
-};
-
-struct safe_VkSparseImageFormatProperties2 {
- VkStructureType sType;
- void* pNext;
- VkSparseImageFormatProperties properties;
- safe_VkSparseImageFormatProperties2(const VkSparseImageFormatProperties2* in_struct);
- safe_VkSparseImageFormatProperties2(const safe_VkSparseImageFormatProperties2& src);
- safe_VkSparseImageFormatProperties2& operator=(const safe_VkSparseImageFormatProperties2& src);
- safe_VkSparseImageFormatProperties2();
- ~safe_VkSparseImageFormatProperties2();
- void initialize(const VkSparseImageFormatProperties2* in_struct);
- void initialize(const safe_VkSparseImageFormatProperties2* src);
- VkSparseImageFormatProperties2 *ptr() { return reinterpret_cast<VkSparseImageFormatProperties2 *>(this); }
- VkSparseImageFormatProperties2 const *ptr() const { return reinterpret_cast<VkSparseImageFormatProperties2 const *>(this); }
-};
-
-struct safe_VkPhysicalDeviceSparseImageFormatInfo2 {
- VkStructureType sType;
- const void* pNext;
- VkFormat format;
- VkImageType type;
- VkSampleCountFlagBits samples;
- VkImageUsageFlags usage;
- VkImageTiling tiling;
- safe_VkPhysicalDeviceSparseImageFormatInfo2(const VkPhysicalDeviceSparseImageFormatInfo2* in_struct);
- safe_VkPhysicalDeviceSparseImageFormatInfo2(const safe_VkPhysicalDeviceSparseImageFormatInfo2& src);
- safe_VkPhysicalDeviceSparseImageFormatInfo2& operator=(const safe_VkPhysicalDeviceSparseImageFormatInfo2& src);
- safe_VkPhysicalDeviceSparseImageFormatInfo2();
- ~safe_VkPhysicalDeviceSparseImageFormatInfo2();
- void initialize(const VkPhysicalDeviceSparseImageFormatInfo2* in_struct);
- void initialize(const safe_VkPhysicalDeviceSparseImageFormatInfo2* src);
- VkPhysicalDeviceSparseImageFormatInfo2 *ptr() { return reinterpret_cast<VkPhysicalDeviceSparseImageFormatInfo2 *>(this); }
- VkPhysicalDeviceSparseImageFormatInfo2 const *ptr() const { return reinterpret_cast<VkPhysicalDeviceSparseImageFormatInfo2 const *>(this); }
-};
-
-struct safe_VkPhysicalDevicePointClippingProperties {
- VkStructureType sType;
- void* pNext;
- VkPointClippingBehavior pointClippingBehavior;
- safe_VkPhysicalDevicePointClippingProperties(const VkPhysicalDevicePointClippingProperties* in_struct);
- safe_VkPhysicalDevicePointClippingProperties(const safe_VkPhysicalDevicePointClippingProperties& src);
- safe_VkPhysicalDevicePointClippingProperties& operator=(const safe_VkPhysicalDevicePointClippingProperties& src);
- safe_VkPhysicalDevicePointClippingProperties();
- ~safe_VkPhysicalDevicePointClippingProperties();
- void initialize(const VkPhysicalDevicePointClippingProperties* in_struct);
- void initialize(const safe_VkPhysicalDevicePointClippingProperties* src);
- VkPhysicalDevicePointClippingProperties *ptr() { return reinterpret_cast<VkPhysicalDevicePointClippingProperties *>(this); }
- VkPhysicalDevicePointClippingProperties const *ptr() const { return reinterpret_cast<VkPhysicalDevicePointClippingProperties const *>(this); }
-};
-
-struct safe_VkRenderPassInputAttachmentAspectCreateInfo {
- VkStructureType sType;
- const void* pNext;
- uint32_t aspectReferenceCount;
- const VkInputAttachmentAspectReference* pAspectReferences;
- safe_VkRenderPassInputAttachmentAspectCreateInfo(const VkRenderPassInputAttachmentAspectCreateInfo* in_struct);
- safe_VkRenderPassInputAttachmentAspectCreateInfo(const safe_VkRenderPassInputAttachmentAspectCreateInfo& src);
- safe_VkRenderPassInputAttachmentAspectCreateInfo& operator=(const safe_VkRenderPassInputAttachmentAspectCreateInfo& src);
- safe_VkRenderPassInputAttachmentAspectCreateInfo();
- ~safe_VkRenderPassInputAttachmentAspectCreateInfo();
- void initialize(const VkRenderPassInputAttachmentAspectCreateInfo* in_struct);
- void initialize(const safe_VkRenderPassInputAttachmentAspectCreateInfo* src);
- VkRenderPassInputAttachmentAspectCreateInfo *ptr() { return reinterpret_cast<VkRenderPassInputAttachmentAspectCreateInfo *>(this); }
- VkRenderPassInputAttachmentAspectCreateInfo const *ptr() const { return reinterpret_cast<VkRenderPassInputAttachmentAspectCreateInfo const *>(this); }
-};
-
-struct safe_VkImageViewUsageCreateInfo {
- VkStructureType sType;
- const void* pNext;
- VkImageUsageFlags usage;
- safe_VkImageViewUsageCreateInfo(const VkImageViewUsageCreateInfo* in_struct);
- safe_VkImageViewUsageCreateInfo(const safe_VkImageViewUsageCreateInfo& src);
- safe_VkImageViewUsageCreateInfo& operator=(const safe_VkImageViewUsageCreateInfo& src);
- safe_VkImageViewUsageCreateInfo();
- ~safe_VkImageViewUsageCreateInfo();
- void initialize(const VkImageViewUsageCreateInfo* in_struct);
- void initialize(const safe_VkImageViewUsageCreateInfo* src);
- VkImageViewUsageCreateInfo *ptr() { return reinterpret_cast<VkImageViewUsageCreateInfo *>(this); }
- VkImageViewUsageCreateInfo const *ptr() const { return reinterpret_cast<VkImageViewUsageCreateInfo const *>(this); }
-};
-
-struct safe_VkPipelineTessellationDomainOriginStateCreateInfo {
- VkStructureType sType;
- const void* pNext;
- VkTessellationDomainOrigin domainOrigin;
- safe_VkPipelineTessellationDomainOriginStateCreateInfo(const VkPipelineTessellationDomainOriginStateCreateInfo* in_struct);
- safe_VkPipelineTessellationDomainOriginStateCreateInfo(const safe_VkPipelineTessellationDomainOriginStateCreateInfo& src);
- safe_VkPipelineTessellationDomainOriginStateCreateInfo& operator=(const safe_VkPipelineTessellationDomainOriginStateCreateInfo& src);
- safe_VkPipelineTessellationDomainOriginStateCreateInfo();
- ~safe_VkPipelineTessellationDomainOriginStateCreateInfo();
- void initialize(const VkPipelineTessellationDomainOriginStateCreateInfo* in_struct);
- void initialize(const safe_VkPipelineTessellationDomainOriginStateCreateInfo* src);
- VkPipelineTessellationDomainOriginStateCreateInfo *ptr() { return reinterpret_cast<VkPipelineTessellationDomainOriginStateCreateInfo *>(this); }
- VkPipelineTessellationDomainOriginStateCreateInfo const *ptr() const { return reinterpret_cast<VkPipelineTessellationDomainOriginStateCreateInfo const *>(this); }
-};
-
-struct safe_VkRenderPassMultiviewCreateInfo {
- VkStructureType sType;
- const void* pNext;
- uint32_t subpassCount;
- const uint32_t* pViewMasks;
- uint32_t dependencyCount;
- const int32_t* pViewOffsets;
- uint32_t correlationMaskCount;
- const uint32_t* pCorrelationMasks;
- safe_VkRenderPassMultiviewCreateInfo(const VkRenderPassMultiviewCreateInfo* in_struct);
- safe_VkRenderPassMultiviewCreateInfo(const safe_VkRenderPassMultiviewCreateInfo& src);
- safe_VkRenderPassMultiviewCreateInfo& operator=(const safe_VkRenderPassMultiviewCreateInfo& src);
- safe_VkRenderPassMultiviewCreateInfo();
- ~safe_VkRenderPassMultiviewCreateInfo();
- void initialize(const VkRenderPassMultiviewCreateInfo* in_struct);
- void initialize(const safe_VkRenderPassMultiviewCreateInfo* src);
- VkRenderPassMultiviewCreateInfo *ptr() { return reinterpret_cast<VkRenderPassMultiviewCreateInfo *>(this); }
- VkRenderPassMultiviewCreateInfo const *ptr() const { return reinterpret_cast<VkRenderPassMultiviewCreateInfo const *>(this); }
-};
-
-struct safe_VkPhysicalDeviceMultiviewFeatures {
- VkStructureType sType;
- void* pNext;
- VkBool32 multiview;
- VkBool32 multiviewGeometryShader;
- VkBool32 multiviewTessellationShader;
- safe_VkPhysicalDeviceMultiviewFeatures(const VkPhysicalDeviceMultiviewFeatures* in_struct);
- safe_VkPhysicalDeviceMultiviewFeatures(const safe_VkPhysicalDeviceMultiviewFeatures& src);
- safe_VkPhysicalDeviceMultiviewFeatures& operator=(const safe_VkPhysicalDeviceMultiviewFeatures& src);
- safe_VkPhysicalDeviceMultiviewFeatures();
- ~safe_VkPhysicalDeviceMultiviewFeatures();
- void initialize(const VkPhysicalDeviceMultiviewFeatures* in_struct);
- void initialize(const safe_VkPhysicalDeviceMultiviewFeatures* src);
- VkPhysicalDeviceMultiviewFeatures *ptr() { return reinterpret_cast<VkPhysicalDeviceMultiviewFeatures *>(this); }
- VkPhysicalDeviceMultiviewFeatures const *ptr() const { return reinterpret_cast<VkPhysicalDeviceMultiviewFeatures const *>(this); }
-};
-
-struct safe_VkPhysicalDeviceMultiviewProperties {
- VkStructureType sType;
- void* pNext;
- uint32_t maxMultiviewViewCount;
- uint32_t maxMultiviewInstanceIndex;
- safe_VkPhysicalDeviceMultiviewProperties(const VkPhysicalDeviceMultiviewProperties* in_struct);
- safe_VkPhysicalDeviceMultiviewProperties(const safe_VkPhysicalDeviceMultiviewProperties& src);
- safe_VkPhysicalDeviceMultiviewProperties& operator=(const safe_VkPhysicalDeviceMultiviewProperties& src);
- safe_VkPhysicalDeviceMultiviewProperties();
- ~safe_VkPhysicalDeviceMultiviewProperties();
- void initialize(const VkPhysicalDeviceMultiviewProperties* in_struct);
- void initialize(const safe_VkPhysicalDeviceMultiviewProperties* src);
- VkPhysicalDeviceMultiviewProperties *ptr() { return reinterpret_cast<VkPhysicalDeviceMultiviewProperties *>(this); }
- VkPhysicalDeviceMultiviewProperties const *ptr() const { return reinterpret_cast<VkPhysicalDeviceMultiviewProperties const *>(this); }
-};
-
-struct safe_VkPhysicalDeviceVariablePointersFeatures {
- VkStructureType sType;
- void* pNext;
- VkBool32 variablePointersStorageBuffer;
- VkBool32 variablePointers;
- safe_VkPhysicalDeviceVariablePointersFeatures(const VkPhysicalDeviceVariablePointersFeatures* in_struct);
- safe_VkPhysicalDeviceVariablePointersFeatures(const safe_VkPhysicalDeviceVariablePointersFeatures& src);
- safe_VkPhysicalDeviceVariablePointersFeatures& operator=(const safe_VkPhysicalDeviceVariablePointersFeatures& src);
- safe_VkPhysicalDeviceVariablePointersFeatures();
- ~safe_VkPhysicalDeviceVariablePointersFeatures();
- void initialize(const VkPhysicalDeviceVariablePointersFeatures* in_struct);
- void initialize(const safe_VkPhysicalDeviceVariablePointersFeatures* src);
- VkPhysicalDeviceVariablePointersFeatures *ptr() { return reinterpret_cast<VkPhysicalDeviceVariablePointersFeatures *>(this); }
- VkPhysicalDeviceVariablePointersFeatures const *ptr() const { return reinterpret_cast<VkPhysicalDeviceVariablePointersFeatures const *>(this); }
-};
-
-struct safe_VkPhysicalDeviceProtectedMemoryFeatures {
- VkStructureType sType;
- void* pNext;
- VkBool32 protectedMemory;
- safe_VkPhysicalDeviceProtectedMemoryFeatures(const VkPhysicalDeviceProtectedMemoryFeatures* in_struct);
- safe_VkPhysicalDeviceProtectedMemoryFeatures(const safe_VkPhysicalDeviceProtectedMemoryFeatures& src);
- safe_VkPhysicalDeviceProtectedMemoryFeatures& operator=(const safe_VkPhysicalDeviceProtectedMemoryFeatures& src);
- safe_VkPhysicalDeviceProtectedMemoryFeatures();
- ~safe_VkPhysicalDeviceProtectedMemoryFeatures();
- void initialize(const VkPhysicalDeviceProtectedMemoryFeatures* in_struct);
- void initialize(const safe_VkPhysicalDeviceProtectedMemoryFeatures* src);
- VkPhysicalDeviceProtectedMemoryFeatures *ptr() { return reinterpret_cast<VkPhysicalDeviceProtectedMemoryFeatures *>(this); }
- VkPhysicalDeviceProtectedMemoryFeatures const *ptr() const { return reinterpret_cast<VkPhysicalDeviceProtectedMemoryFeatures const *>(this); }
-};
-
-struct safe_VkPhysicalDeviceProtectedMemoryProperties {
- VkStructureType sType;
- void* pNext;
- VkBool32 protectedNoFault;
- safe_VkPhysicalDeviceProtectedMemoryProperties(const VkPhysicalDeviceProtectedMemoryProperties* in_struct);
- safe_VkPhysicalDeviceProtectedMemoryProperties(const safe_VkPhysicalDeviceProtectedMemoryProperties& src);
- safe_VkPhysicalDeviceProtectedMemoryProperties& operator=(const safe_VkPhysicalDeviceProtectedMemoryProperties& src);
- safe_VkPhysicalDeviceProtectedMemoryProperties();
- ~safe_VkPhysicalDeviceProtectedMemoryProperties();
- void initialize(const VkPhysicalDeviceProtectedMemoryProperties* in_struct);
- void initialize(const safe_VkPhysicalDeviceProtectedMemoryProperties* src);
- VkPhysicalDeviceProtectedMemoryProperties *ptr() { return reinterpret_cast<VkPhysicalDeviceProtectedMemoryProperties *>(this); }
- VkPhysicalDeviceProtectedMemoryProperties const *ptr() const { return reinterpret_cast<VkPhysicalDeviceProtectedMemoryProperties const *>(this); }
-};
-
-struct safe_VkDeviceQueueInfo2 {
- VkStructureType sType;
- const void* pNext;
- VkDeviceQueueCreateFlags flags;
- uint32_t queueFamilyIndex;
- uint32_t queueIndex;
- safe_VkDeviceQueueInfo2(const VkDeviceQueueInfo2* in_struct);
- safe_VkDeviceQueueInfo2(const safe_VkDeviceQueueInfo2& src);
- safe_VkDeviceQueueInfo2& operator=(const safe_VkDeviceQueueInfo2& src);
- safe_VkDeviceQueueInfo2();
- ~safe_VkDeviceQueueInfo2();
- void initialize(const VkDeviceQueueInfo2* in_struct);
- void initialize(const safe_VkDeviceQueueInfo2* src);
- VkDeviceQueueInfo2 *ptr() { return reinterpret_cast<VkDeviceQueueInfo2 *>(this); }
- VkDeviceQueueInfo2 const *ptr() const { return reinterpret_cast<VkDeviceQueueInfo2 const *>(this); }
-};
-
-struct safe_VkProtectedSubmitInfo {
- VkStructureType sType;
- const void* pNext;
- VkBool32 protectedSubmit;
- safe_VkProtectedSubmitInfo(const VkProtectedSubmitInfo* in_struct);
- safe_VkProtectedSubmitInfo(const safe_VkProtectedSubmitInfo& src);
- safe_VkProtectedSubmitInfo& operator=(const safe_VkProtectedSubmitInfo& src);
- safe_VkProtectedSubmitInfo();
- ~safe_VkProtectedSubmitInfo();
- void initialize(const VkProtectedSubmitInfo* in_struct);
- void initialize(const safe_VkProtectedSubmitInfo* src);
- VkProtectedSubmitInfo *ptr() { return reinterpret_cast<VkProtectedSubmitInfo *>(this); }
- VkProtectedSubmitInfo const *ptr() const { return reinterpret_cast<VkProtectedSubmitInfo const *>(this); }
-};
-
-struct safe_VkSamplerYcbcrConversionCreateInfo {
- VkStructureType sType;
- const void* pNext;
- VkFormat format;
- VkSamplerYcbcrModelConversion ycbcrModel;
- VkSamplerYcbcrRange ycbcrRange;
- VkComponentMapping components;
- VkChromaLocation xChromaOffset;
- VkChromaLocation yChromaOffset;
- VkFilter chromaFilter;
- VkBool32 forceExplicitReconstruction;
- safe_VkSamplerYcbcrConversionCreateInfo(const VkSamplerYcbcrConversionCreateInfo* in_struct);
- safe_VkSamplerYcbcrConversionCreateInfo(const safe_VkSamplerYcbcrConversionCreateInfo& src);
- safe_VkSamplerYcbcrConversionCreateInfo& operator=(const safe_VkSamplerYcbcrConversionCreateInfo& src);
- safe_VkSamplerYcbcrConversionCreateInfo();
- ~safe_VkSamplerYcbcrConversionCreateInfo();
- void initialize(const VkSamplerYcbcrConversionCreateInfo* in_struct);
- void initialize(const safe_VkSamplerYcbcrConversionCreateInfo* src);
- VkSamplerYcbcrConversionCreateInfo *ptr() { return reinterpret_cast<VkSamplerYcbcrConversionCreateInfo *>(this); }
- VkSamplerYcbcrConversionCreateInfo const *ptr() const { return reinterpret_cast<VkSamplerYcbcrConversionCreateInfo const *>(this); }
-};
-
-struct safe_VkSamplerYcbcrConversionInfo {
- VkStructureType sType;
- const void* pNext;
- VkSamplerYcbcrConversion conversion;
- safe_VkSamplerYcbcrConversionInfo(const VkSamplerYcbcrConversionInfo* in_struct);
- safe_VkSamplerYcbcrConversionInfo(const safe_VkSamplerYcbcrConversionInfo& src);
- safe_VkSamplerYcbcrConversionInfo& operator=(const safe_VkSamplerYcbcrConversionInfo& src);
- safe_VkSamplerYcbcrConversionInfo();
- ~safe_VkSamplerYcbcrConversionInfo();
- void initialize(const VkSamplerYcbcrConversionInfo* in_struct);
- void initialize(const safe_VkSamplerYcbcrConversionInfo* src);
- VkSamplerYcbcrConversionInfo *ptr() { return reinterpret_cast<VkSamplerYcbcrConversionInfo *>(this); }
- VkSamplerYcbcrConversionInfo const *ptr() const { return reinterpret_cast<VkSamplerYcbcrConversionInfo const *>(this); }
-};
-
-struct safe_VkBindImagePlaneMemoryInfo {
- VkStructureType sType;
- const void* pNext;
- VkImageAspectFlagBits planeAspect;
- safe_VkBindImagePlaneMemoryInfo(const VkBindImagePlaneMemoryInfo* in_struct);
- safe_VkBindImagePlaneMemoryInfo(const safe_VkBindImagePlaneMemoryInfo& src);
- safe_VkBindImagePlaneMemoryInfo& operator=(const safe_VkBindImagePlaneMemoryInfo& src);
- safe_VkBindImagePlaneMemoryInfo();
- ~safe_VkBindImagePlaneMemoryInfo();
- void initialize(const VkBindImagePlaneMemoryInfo* in_struct);
- void initialize(const safe_VkBindImagePlaneMemoryInfo* src);
- VkBindImagePlaneMemoryInfo *ptr() { return reinterpret_cast<VkBindImagePlaneMemoryInfo *>(this); }
- VkBindImagePlaneMemoryInfo const *ptr() const { return reinterpret_cast<VkBindImagePlaneMemoryInfo const *>(this); }
-};
-
-struct safe_VkImagePlaneMemoryRequirementsInfo {
- VkStructureType sType;
- const void* pNext;
- VkImageAspectFlagBits planeAspect;
- safe_VkImagePlaneMemoryRequirementsInfo(const VkImagePlaneMemoryRequirementsInfo* in_struct);
- safe_VkImagePlaneMemoryRequirementsInfo(const safe_VkImagePlaneMemoryRequirementsInfo& src);
- safe_VkImagePlaneMemoryRequirementsInfo& operator=(const safe_VkImagePlaneMemoryRequirementsInfo& src);
- safe_VkImagePlaneMemoryRequirementsInfo();
- ~safe_VkImagePlaneMemoryRequirementsInfo();
- void initialize(const VkImagePlaneMemoryRequirementsInfo* in_struct);
- void initialize(const safe_VkImagePlaneMemoryRequirementsInfo* src);
- VkImagePlaneMemoryRequirementsInfo *ptr() { return reinterpret_cast<VkImagePlaneMemoryRequirementsInfo *>(this); }
- VkImagePlaneMemoryRequirementsInfo const *ptr() const { return reinterpret_cast<VkImagePlaneMemoryRequirementsInfo const *>(this); }
-};
-
-struct safe_VkPhysicalDeviceSamplerYcbcrConversionFeatures {
- VkStructureType sType;
- void* pNext;
- VkBool32 samplerYcbcrConversion;
- safe_VkPhysicalDeviceSamplerYcbcrConversionFeatures(const VkPhysicalDeviceSamplerYcbcrConversionFeatures* in_struct);
- safe_VkPhysicalDeviceSamplerYcbcrConversionFeatures(const safe_VkPhysicalDeviceSamplerYcbcrConversionFeatures& src);
- safe_VkPhysicalDeviceSamplerYcbcrConversionFeatures& operator=(const safe_VkPhysicalDeviceSamplerYcbcrConversionFeatures& src);
- safe_VkPhysicalDeviceSamplerYcbcrConversionFeatures();
- ~safe_VkPhysicalDeviceSamplerYcbcrConversionFeatures();
- void initialize(const VkPhysicalDeviceSamplerYcbcrConversionFeatures* in_struct);
- void initialize(const safe_VkPhysicalDeviceSamplerYcbcrConversionFeatures* src);
- VkPhysicalDeviceSamplerYcbcrConversionFeatures *ptr() { return reinterpret_cast<VkPhysicalDeviceSamplerYcbcrConversionFeatures *>(this); }
- VkPhysicalDeviceSamplerYcbcrConversionFeatures const *ptr() const { return reinterpret_cast<VkPhysicalDeviceSamplerYcbcrConversionFeatures const *>(this); }
-};
-
-struct safe_VkSamplerYcbcrConversionImageFormatProperties {
- VkStructureType sType;
- void* pNext;
- uint32_t combinedImageSamplerDescriptorCount;
- safe_VkSamplerYcbcrConversionImageFormatProperties(const VkSamplerYcbcrConversionImageFormatProperties* in_struct);
- safe_VkSamplerYcbcrConversionImageFormatProperties(const safe_VkSamplerYcbcrConversionImageFormatProperties& src);
- safe_VkSamplerYcbcrConversionImageFormatProperties& operator=(const safe_VkSamplerYcbcrConversionImageFormatProperties& src);
- safe_VkSamplerYcbcrConversionImageFormatProperties();
- ~safe_VkSamplerYcbcrConversionImageFormatProperties();
- void initialize(const VkSamplerYcbcrConversionImageFormatProperties* in_struct);
- void initialize(const safe_VkSamplerYcbcrConversionImageFormatProperties* src);
- VkSamplerYcbcrConversionImageFormatProperties *ptr() { return reinterpret_cast<VkSamplerYcbcrConversionImageFormatProperties *>(this); }
- VkSamplerYcbcrConversionImageFormatProperties const *ptr() const { return reinterpret_cast<VkSamplerYcbcrConversionImageFormatProperties const *>(this); }
-};
-
-struct safe_VkDescriptorUpdateTemplateCreateInfo {
- VkStructureType sType;
- const void* pNext;
- VkDescriptorUpdateTemplateCreateFlags flags;
- uint32_t descriptorUpdateEntryCount;
- const VkDescriptorUpdateTemplateEntry* pDescriptorUpdateEntries;
- VkDescriptorUpdateTemplateType templateType;
- VkDescriptorSetLayout descriptorSetLayout;
- VkPipelineBindPoint pipelineBindPoint;
- VkPipelineLayout pipelineLayout;
- uint32_t set;
- safe_VkDescriptorUpdateTemplateCreateInfo(const VkDescriptorUpdateTemplateCreateInfo* in_struct);
- safe_VkDescriptorUpdateTemplateCreateInfo(const safe_VkDescriptorUpdateTemplateCreateInfo& src);
- safe_VkDescriptorUpdateTemplateCreateInfo& operator=(const safe_VkDescriptorUpdateTemplateCreateInfo& src);
- safe_VkDescriptorUpdateTemplateCreateInfo();
- ~safe_VkDescriptorUpdateTemplateCreateInfo();
- void initialize(const VkDescriptorUpdateTemplateCreateInfo* in_struct);
- void initialize(const safe_VkDescriptorUpdateTemplateCreateInfo* src);
- VkDescriptorUpdateTemplateCreateInfo *ptr() { return reinterpret_cast<VkDescriptorUpdateTemplateCreateInfo *>(this); }
- VkDescriptorUpdateTemplateCreateInfo const *ptr() const { return reinterpret_cast<VkDescriptorUpdateTemplateCreateInfo const *>(this); }
-};
-
-struct safe_VkPhysicalDeviceExternalImageFormatInfo {
- VkStructureType sType;
- const void* pNext;
- VkExternalMemoryHandleTypeFlagBits handleType;
- safe_VkPhysicalDeviceExternalImageFormatInfo(const VkPhysicalDeviceExternalImageFormatInfo* in_struct);
- safe_VkPhysicalDeviceExternalImageFormatInfo(const safe_VkPhysicalDeviceExternalImageFormatInfo& src);
- safe_VkPhysicalDeviceExternalImageFormatInfo& operator=(const safe_VkPhysicalDeviceExternalImageFormatInfo& src);
- safe_VkPhysicalDeviceExternalImageFormatInfo();
- ~safe_VkPhysicalDeviceExternalImageFormatInfo();
- void initialize(const VkPhysicalDeviceExternalImageFormatInfo* in_struct);
- void initialize(const safe_VkPhysicalDeviceExternalImageFormatInfo* src);
- VkPhysicalDeviceExternalImageFormatInfo *ptr() { return reinterpret_cast<VkPhysicalDeviceExternalImageFormatInfo *>(this); }
- VkPhysicalDeviceExternalImageFormatInfo const *ptr() const { return reinterpret_cast<VkPhysicalDeviceExternalImageFormatInfo const *>(this); }
-};
-
-struct safe_VkExternalImageFormatProperties {
- VkStructureType sType;
- void* pNext;
- VkExternalMemoryProperties externalMemoryProperties;
- safe_VkExternalImageFormatProperties(const VkExternalImageFormatProperties* in_struct);
- safe_VkExternalImageFormatProperties(const safe_VkExternalImageFormatProperties& src);
- safe_VkExternalImageFormatProperties& operator=(const safe_VkExternalImageFormatProperties& src);
- safe_VkExternalImageFormatProperties();
- ~safe_VkExternalImageFormatProperties();
- void initialize(const VkExternalImageFormatProperties* in_struct);
- void initialize(const safe_VkExternalImageFormatProperties* src);
- VkExternalImageFormatProperties *ptr() { return reinterpret_cast<VkExternalImageFormatProperties *>(this); }
- VkExternalImageFormatProperties const *ptr() const { return reinterpret_cast<VkExternalImageFormatProperties const *>(this); }
-};
-
-struct safe_VkPhysicalDeviceExternalBufferInfo {
- VkStructureType sType;
- const void* pNext;
- VkBufferCreateFlags flags;
- VkBufferUsageFlags usage;
- VkExternalMemoryHandleTypeFlagBits handleType;
- safe_VkPhysicalDeviceExternalBufferInfo(const VkPhysicalDeviceExternalBufferInfo* in_struct);
- safe_VkPhysicalDeviceExternalBufferInfo(const safe_VkPhysicalDeviceExternalBufferInfo& src);
- safe_VkPhysicalDeviceExternalBufferInfo& operator=(const safe_VkPhysicalDeviceExternalBufferInfo& src);
- safe_VkPhysicalDeviceExternalBufferInfo();
- ~safe_VkPhysicalDeviceExternalBufferInfo();
- void initialize(const VkPhysicalDeviceExternalBufferInfo* in_struct);
- void initialize(const safe_VkPhysicalDeviceExternalBufferInfo* src);
- VkPhysicalDeviceExternalBufferInfo *ptr() { return reinterpret_cast<VkPhysicalDeviceExternalBufferInfo *>(this); }
- VkPhysicalDeviceExternalBufferInfo const *ptr() const { return reinterpret_cast<VkPhysicalDeviceExternalBufferInfo const *>(this); }
-};
-
-struct safe_VkExternalBufferProperties {
- VkStructureType sType;
- void* pNext;
- VkExternalMemoryProperties externalMemoryProperties;
- safe_VkExternalBufferProperties(const VkExternalBufferProperties* in_struct);
- safe_VkExternalBufferProperties(const safe_VkExternalBufferProperties& src);
- safe_VkExternalBufferProperties& operator=(const safe_VkExternalBufferProperties& src);
- safe_VkExternalBufferProperties();
- ~safe_VkExternalBufferProperties();
- void initialize(const VkExternalBufferProperties* in_struct);
- void initialize(const safe_VkExternalBufferProperties* src);
- VkExternalBufferProperties *ptr() { return reinterpret_cast<VkExternalBufferProperties *>(this); }
- VkExternalBufferProperties const *ptr() const { return reinterpret_cast<VkExternalBufferProperties const *>(this); }
-};
-
-struct safe_VkPhysicalDeviceIDProperties {
- VkStructureType sType;
- void* pNext;
- uint8_t deviceUUID[VK_UUID_SIZE];
- uint8_t driverUUID[VK_UUID_SIZE];
- uint8_t deviceLUID[VK_LUID_SIZE];
- uint32_t deviceNodeMask;
- VkBool32 deviceLUIDValid;
- safe_VkPhysicalDeviceIDProperties(const VkPhysicalDeviceIDProperties* in_struct);
- safe_VkPhysicalDeviceIDProperties(const safe_VkPhysicalDeviceIDProperties& src);
- safe_VkPhysicalDeviceIDProperties& operator=(const safe_VkPhysicalDeviceIDProperties& src);
- safe_VkPhysicalDeviceIDProperties();
- ~safe_VkPhysicalDeviceIDProperties();
- void initialize(const VkPhysicalDeviceIDProperties* in_struct);
- void initialize(const safe_VkPhysicalDeviceIDProperties* src);
- VkPhysicalDeviceIDProperties *ptr() { return reinterpret_cast<VkPhysicalDeviceIDProperties *>(this); }
- VkPhysicalDeviceIDProperties const *ptr() const { return reinterpret_cast<VkPhysicalDeviceIDProperties const *>(this); }
-};
-
-struct safe_VkExternalMemoryImageCreateInfo {
- VkStructureType sType;
- const void* pNext;
- VkExternalMemoryHandleTypeFlags handleTypes;
- safe_VkExternalMemoryImageCreateInfo(const VkExternalMemoryImageCreateInfo* in_struct);
- safe_VkExternalMemoryImageCreateInfo(const safe_VkExternalMemoryImageCreateInfo& src);
- safe_VkExternalMemoryImageCreateInfo& operator=(const safe_VkExternalMemoryImageCreateInfo& src);
- safe_VkExternalMemoryImageCreateInfo();
- ~safe_VkExternalMemoryImageCreateInfo();
- void initialize(const VkExternalMemoryImageCreateInfo* in_struct);
- void initialize(const safe_VkExternalMemoryImageCreateInfo* src);
- VkExternalMemoryImageCreateInfo *ptr() { return reinterpret_cast<VkExternalMemoryImageCreateInfo *>(this); }
- VkExternalMemoryImageCreateInfo const *ptr() const { return reinterpret_cast<VkExternalMemoryImageCreateInfo const *>(this); }
-};
-
-struct safe_VkExternalMemoryBufferCreateInfo {
- VkStructureType sType;
- const void* pNext;
- VkExternalMemoryHandleTypeFlags handleTypes;
- safe_VkExternalMemoryBufferCreateInfo(const VkExternalMemoryBufferCreateInfo* in_struct);
- safe_VkExternalMemoryBufferCreateInfo(const safe_VkExternalMemoryBufferCreateInfo& src);
- safe_VkExternalMemoryBufferCreateInfo& operator=(const safe_VkExternalMemoryBufferCreateInfo& src);
- safe_VkExternalMemoryBufferCreateInfo();
- ~safe_VkExternalMemoryBufferCreateInfo();
- void initialize(const VkExternalMemoryBufferCreateInfo* in_struct);
- void initialize(const safe_VkExternalMemoryBufferCreateInfo* src);
- VkExternalMemoryBufferCreateInfo *ptr() { return reinterpret_cast<VkExternalMemoryBufferCreateInfo *>(this); }
- VkExternalMemoryBufferCreateInfo const *ptr() const { return reinterpret_cast<VkExternalMemoryBufferCreateInfo const *>(this); }
-};
-
-struct safe_VkExportMemoryAllocateInfo {
- VkStructureType sType;
- const void* pNext;
- VkExternalMemoryHandleTypeFlags handleTypes;
- safe_VkExportMemoryAllocateInfo(const VkExportMemoryAllocateInfo* in_struct);
- safe_VkExportMemoryAllocateInfo(const safe_VkExportMemoryAllocateInfo& src);
- safe_VkExportMemoryAllocateInfo& operator=(const safe_VkExportMemoryAllocateInfo& src);
- safe_VkExportMemoryAllocateInfo();
- ~safe_VkExportMemoryAllocateInfo();
- void initialize(const VkExportMemoryAllocateInfo* in_struct);
- void initialize(const safe_VkExportMemoryAllocateInfo* src);
- VkExportMemoryAllocateInfo *ptr() { return reinterpret_cast<VkExportMemoryAllocateInfo *>(this); }
- VkExportMemoryAllocateInfo const *ptr() const { return reinterpret_cast<VkExportMemoryAllocateInfo const *>(this); }
-};
-
-struct safe_VkPhysicalDeviceExternalFenceInfo {
- VkStructureType sType;
- const void* pNext;
- VkExternalFenceHandleTypeFlagBits handleType;
- safe_VkPhysicalDeviceExternalFenceInfo(const VkPhysicalDeviceExternalFenceInfo* in_struct);
- safe_VkPhysicalDeviceExternalFenceInfo(const safe_VkPhysicalDeviceExternalFenceInfo& src);
- safe_VkPhysicalDeviceExternalFenceInfo& operator=(const safe_VkPhysicalDeviceExternalFenceInfo& src);
- safe_VkPhysicalDeviceExternalFenceInfo();
- ~safe_VkPhysicalDeviceExternalFenceInfo();
- void initialize(const VkPhysicalDeviceExternalFenceInfo* in_struct);
- void initialize(const safe_VkPhysicalDeviceExternalFenceInfo* src);
- VkPhysicalDeviceExternalFenceInfo *ptr() { return reinterpret_cast<VkPhysicalDeviceExternalFenceInfo *>(this); }
- VkPhysicalDeviceExternalFenceInfo const *ptr() const { return reinterpret_cast<VkPhysicalDeviceExternalFenceInfo const *>(this); }
-};
-
-struct safe_VkExternalFenceProperties {
- VkStructureType sType;
- void* pNext;
- VkExternalFenceHandleTypeFlags exportFromImportedHandleTypes;
- VkExternalFenceHandleTypeFlags compatibleHandleTypes;
- VkExternalFenceFeatureFlags externalFenceFeatures;
- safe_VkExternalFenceProperties(const VkExternalFenceProperties* in_struct);
- safe_VkExternalFenceProperties(const safe_VkExternalFenceProperties& src);
- safe_VkExternalFenceProperties& operator=(const safe_VkExternalFenceProperties& src);
- safe_VkExternalFenceProperties();
- ~safe_VkExternalFenceProperties();
- void initialize(const VkExternalFenceProperties* in_struct);
- void initialize(const safe_VkExternalFenceProperties* src);
- VkExternalFenceProperties *ptr() { return reinterpret_cast<VkExternalFenceProperties *>(this); }
- VkExternalFenceProperties const *ptr() const { return reinterpret_cast<VkExternalFenceProperties const *>(this); }
-};
-
-struct safe_VkExportFenceCreateInfo {
- VkStructureType sType;
- const void* pNext;
- VkExternalFenceHandleTypeFlags handleTypes;
- safe_VkExportFenceCreateInfo(const VkExportFenceCreateInfo* in_struct);
- safe_VkExportFenceCreateInfo(const safe_VkExportFenceCreateInfo& src);
- safe_VkExportFenceCreateInfo& operator=(const safe_VkExportFenceCreateInfo& src);
- safe_VkExportFenceCreateInfo();
- ~safe_VkExportFenceCreateInfo();
- void initialize(const VkExportFenceCreateInfo* in_struct);
- void initialize(const safe_VkExportFenceCreateInfo* src);
- VkExportFenceCreateInfo *ptr() { return reinterpret_cast<VkExportFenceCreateInfo *>(this); }
- VkExportFenceCreateInfo const *ptr() const { return reinterpret_cast<VkExportFenceCreateInfo const *>(this); }
-};
-
-struct safe_VkExportSemaphoreCreateInfo {
- VkStructureType sType;
- const void* pNext;
- VkExternalSemaphoreHandleTypeFlags handleTypes;
- safe_VkExportSemaphoreCreateInfo(const VkExportSemaphoreCreateInfo* in_struct);
- safe_VkExportSemaphoreCreateInfo(const safe_VkExportSemaphoreCreateInfo& src);
- safe_VkExportSemaphoreCreateInfo& operator=(const safe_VkExportSemaphoreCreateInfo& src);
- safe_VkExportSemaphoreCreateInfo();
- ~safe_VkExportSemaphoreCreateInfo();
- void initialize(const VkExportSemaphoreCreateInfo* in_struct);
- void initialize(const safe_VkExportSemaphoreCreateInfo* src);
- VkExportSemaphoreCreateInfo *ptr() { return reinterpret_cast<VkExportSemaphoreCreateInfo *>(this); }
- VkExportSemaphoreCreateInfo const *ptr() const { return reinterpret_cast<VkExportSemaphoreCreateInfo const *>(this); }
-};
-
-struct safe_VkPhysicalDeviceExternalSemaphoreInfo {
- VkStructureType sType;
- const void* pNext;
- VkExternalSemaphoreHandleTypeFlagBits handleType;
- safe_VkPhysicalDeviceExternalSemaphoreInfo(const VkPhysicalDeviceExternalSemaphoreInfo* in_struct);
- safe_VkPhysicalDeviceExternalSemaphoreInfo(const safe_VkPhysicalDeviceExternalSemaphoreInfo& src);
- safe_VkPhysicalDeviceExternalSemaphoreInfo& operator=(const safe_VkPhysicalDeviceExternalSemaphoreInfo& src);
- safe_VkPhysicalDeviceExternalSemaphoreInfo();
- ~safe_VkPhysicalDeviceExternalSemaphoreInfo();
- void initialize(const VkPhysicalDeviceExternalSemaphoreInfo* in_struct);
- void initialize(const safe_VkPhysicalDeviceExternalSemaphoreInfo* src);
- VkPhysicalDeviceExternalSemaphoreInfo *ptr() { return reinterpret_cast<VkPhysicalDeviceExternalSemaphoreInfo *>(this); }
- VkPhysicalDeviceExternalSemaphoreInfo const *ptr() const { return reinterpret_cast<VkPhysicalDeviceExternalSemaphoreInfo const *>(this); }
-};
-
-struct safe_VkExternalSemaphoreProperties {
- VkStructureType sType;
- void* pNext;
- VkExternalSemaphoreHandleTypeFlags exportFromImportedHandleTypes;
- VkExternalSemaphoreHandleTypeFlags compatibleHandleTypes;
- VkExternalSemaphoreFeatureFlags externalSemaphoreFeatures;
- safe_VkExternalSemaphoreProperties(const VkExternalSemaphoreProperties* in_struct);
- safe_VkExternalSemaphoreProperties(const safe_VkExternalSemaphoreProperties& src);
- safe_VkExternalSemaphoreProperties& operator=(const safe_VkExternalSemaphoreProperties& src);
- safe_VkExternalSemaphoreProperties();
- ~safe_VkExternalSemaphoreProperties();
- void initialize(const VkExternalSemaphoreProperties* in_struct);
- void initialize(const safe_VkExternalSemaphoreProperties* src);
- VkExternalSemaphoreProperties *ptr() { return reinterpret_cast<VkExternalSemaphoreProperties *>(this); }
- VkExternalSemaphoreProperties const *ptr() const { return reinterpret_cast<VkExternalSemaphoreProperties const *>(this); }
-};
-
-struct safe_VkPhysicalDeviceMaintenance3Properties {
- VkStructureType sType;
- void* pNext;
- uint32_t maxPerSetDescriptors;
- VkDeviceSize maxMemoryAllocationSize;
- safe_VkPhysicalDeviceMaintenance3Properties(const VkPhysicalDeviceMaintenance3Properties* in_struct);
- safe_VkPhysicalDeviceMaintenance3Properties(const safe_VkPhysicalDeviceMaintenance3Properties& src);
- safe_VkPhysicalDeviceMaintenance3Properties& operator=(const safe_VkPhysicalDeviceMaintenance3Properties& src);
- safe_VkPhysicalDeviceMaintenance3Properties();
- ~safe_VkPhysicalDeviceMaintenance3Properties();
- void initialize(const VkPhysicalDeviceMaintenance3Properties* in_struct);
- void initialize(const safe_VkPhysicalDeviceMaintenance3Properties* src);
- VkPhysicalDeviceMaintenance3Properties *ptr() { return reinterpret_cast<VkPhysicalDeviceMaintenance3Properties *>(this); }
- VkPhysicalDeviceMaintenance3Properties const *ptr() const { return reinterpret_cast<VkPhysicalDeviceMaintenance3Properties const *>(this); }
-};
-
-struct safe_VkDescriptorSetLayoutSupport {
- VkStructureType sType;
- void* pNext;
- VkBool32 supported;
- safe_VkDescriptorSetLayoutSupport(const VkDescriptorSetLayoutSupport* in_struct);
- safe_VkDescriptorSetLayoutSupport(const safe_VkDescriptorSetLayoutSupport& src);
- safe_VkDescriptorSetLayoutSupport& operator=(const safe_VkDescriptorSetLayoutSupport& src);
- safe_VkDescriptorSetLayoutSupport();
- ~safe_VkDescriptorSetLayoutSupport();
- void initialize(const VkDescriptorSetLayoutSupport* in_struct);
- void initialize(const safe_VkDescriptorSetLayoutSupport* src);
- VkDescriptorSetLayoutSupport *ptr() { return reinterpret_cast<VkDescriptorSetLayoutSupport *>(this); }
- VkDescriptorSetLayoutSupport const *ptr() const { return reinterpret_cast<VkDescriptorSetLayoutSupport const *>(this); }
-};
-
-struct safe_VkPhysicalDeviceShaderDrawParametersFeatures {
- VkStructureType sType;
- void* pNext;
- VkBool32 shaderDrawParameters;
- safe_VkPhysicalDeviceShaderDrawParametersFeatures(const VkPhysicalDeviceShaderDrawParametersFeatures* in_struct);
- safe_VkPhysicalDeviceShaderDrawParametersFeatures(const safe_VkPhysicalDeviceShaderDrawParametersFeatures& src);
- safe_VkPhysicalDeviceShaderDrawParametersFeatures& operator=(const safe_VkPhysicalDeviceShaderDrawParametersFeatures& src);
- safe_VkPhysicalDeviceShaderDrawParametersFeatures();
- ~safe_VkPhysicalDeviceShaderDrawParametersFeatures();
- void initialize(const VkPhysicalDeviceShaderDrawParametersFeatures* in_struct);
- void initialize(const safe_VkPhysicalDeviceShaderDrawParametersFeatures* src);
- VkPhysicalDeviceShaderDrawParametersFeatures *ptr() { return reinterpret_cast<VkPhysicalDeviceShaderDrawParametersFeatures *>(this); }
- VkPhysicalDeviceShaderDrawParametersFeatures const *ptr() const { return reinterpret_cast<VkPhysicalDeviceShaderDrawParametersFeatures const *>(this); }
-};
-
-struct safe_VkSwapchainCreateInfoKHR {
- VkStructureType sType;
- const void* pNext;
- VkSwapchainCreateFlagsKHR flags;
- VkSurfaceKHR surface;
- uint32_t minImageCount;
- VkFormat imageFormat;
- VkColorSpaceKHR imageColorSpace;
- VkExtent2D imageExtent;
- uint32_t imageArrayLayers;
- VkImageUsageFlags imageUsage;
- VkSharingMode imageSharingMode;
- uint32_t queueFamilyIndexCount;
- const uint32_t* pQueueFamilyIndices;
- VkSurfaceTransformFlagBitsKHR preTransform;
- VkCompositeAlphaFlagBitsKHR compositeAlpha;
- VkPresentModeKHR presentMode;
- VkBool32 clipped;
- VkSwapchainKHR oldSwapchain;
- safe_VkSwapchainCreateInfoKHR(const VkSwapchainCreateInfoKHR* in_struct);
- safe_VkSwapchainCreateInfoKHR(const safe_VkSwapchainCreateInfoKHR& src);
- safe_VkSwapchainCreateInfoKHR& operator=(const safe_VkSwapchainCreateInfoKHR& src);
- safe_VkSwapchainCreateInfoKHR();
- ~safe_VkSwapchainCreateInfoKHR();
- void initialize(const VkSwapchainCreateInfoKHR* in_struct);
- void initialize(const safe_VkSwapchainCreateInfoKHR* src);
- VkSwapchainCreateInfoKHR *ptr() { return reinterpret_cast<VkSwapchainCreateInfoKHR *>(this); }
- VkSwapchainCreateInfoKHR const *ptr() const { return reinterpret_cast<VkSwapchainCreateInfoKHR const *>(this); }
-};
-
-struct safe_VkPresentInfoKHR {
- VkStructureType sType;
- const void* pNext;
- uint32_t waitSemaphoreCount;
- VkSemaphore* pWaitSemaphores;
- uint32_t swapchainCount;
- VkSwapchainKHR* pSwapchains;
- const uint32_t* pImageIndices;
- VkResult* pResults;
- safe_VkPresentInfoKHR(const VkPresentInfoKHR* in_struct);
- safe_VkPresentInfoKHR(const safe_VkPresentInfoKHR& src);
- safe_VkPresentInfoKHR& operator=(const safe_VkPresentInfoKHR& src);
- safe_VkPresentInfoKHR();
- ~safe_VkPresentInfoKHR();
- void initialize(const VkPresentInfoKHR* in_struct);
- void initialize(const safe_VkPresentInfoKHR* src);
- VkPresentInfoKHR *ptr() { return reinterpret_cast<VkPresentInfoKHR *>(this); }
- VkPresentInfoKHR const *ptr() const { return reinterpret_cast<VkPresentInfoKHR const *>(this); }
-};
-
-struct safe_VkImageSwapchainCreateInfoKHR {
- VkStructureType sType;
- const void* pNext;
- VkSwapchainKHR swapchain;
- safe_VkImageSwapchainCreateInfoKHR(const VkImageSwapchainCreateInfoKHR* in_struct);
- safe_VkImageSwapchainCreateInfoKHR(const safe_VkImageSwapchainCreateInfoKHR& src);
- safe_VkImageSwapchainCreateInfoKHR& operator=(const safe_VkImageSwapchainCreateInfoKHR& src);
- safe_VkImageSwapchainCreateInfoKHR();
- ~safe_VkImageSwapchainCreateInfoKHR();
- void initialize(const VkImageSwapchainCreateInfoKHR* in_struct);
- void initialize(const safe_VkImageSwapchainCreateInfoKHR* src);
- VkImageSwapchainCreateInfoKHR *ptr() { return reinterpret_cast<VkImageSwapchainCreateInfoKHR *>(this); }
- VkImageSwapchainCreateInfoKHR const *ptr() const { return reinterpret_cast<VkImageSwapchainCreateInfoKHR const *>(this); }
-};
-
-struct safe_VkBindImageMemorySwapchainInfoKHR {
- VkStructureType sType;
- const void* pNext;
- VkSwapchainKHR swapchain;
- uint32_t imageIndex;
- safe_VkBindImageMemorySwapchainInfoKHR(const VkBindImageMemorySwapchainInfoKHR* in_struct);
- safe_VkBindImageMemorySwapchainInfoKHR(const safe_VkBindImageMemorySwapchainInfoKHR& src);
- safe_VkBindImageMemorySwapchainInfoKHR& operator=(const safe_VkBindImageMemorySwapchainInfoKHR& src);
- safe_VkBindImageMemorySwapchainInfoKHR();
- ~safe_VkBindImageMemorySwapchainInfoKHR();
- void initialize(const VkBindImageMemorySwapchainInfoKHR* in_struct);
- void initialize(const safe_VkBindImageMemorySwapchainInfoKHR* src);
- VkBindImageMemorySwapchainInfoKHR *ptr() { return reinterpret_cast<VkBindImageMemorySwapchainInfoKHR *>(this); }
- VkBindImageMemorySwapchainInfoKHR const *ptr() const { return reinterpret_cast<VkBindImageMemorySwapchainInfoKHR const *>(this); }
-};
-
-struct safe_VkAcquireNextImageInfoKHR {
- VkStructureType sType;
- const void* pNext;
- VkSwapchainKHR swapchain;
- uint64_t timeout;
- VkSemaphore semaphore;
- VkFence fence;
- uint32_t deviceMask;
- safe_VkAcquireNextImageInfoKHR(const VkAcquireNextImageInfoKHR* in_struct);
- safe_VkAcquireNextImageInfoKHR(const safe_VkAcquireNextImageInfoKHR& src);
- safe_VkAcquireNextImageInfoKHR& operator=(const safe_VkAcquireNextImageInfoKHR& src);
- safe_VkAcquireNextImageInfoKHR();
- ~safe_VkAcquireNextImageInfoKHR();
- void initialize(const VkAcquireNextImageInfoKHR* in_struct);
- void initialize(const safe_VkAcquireNextImageInfoKHR* src);
- VkAcquireNextImageInfoKHR *ptr() { return reinterpret_cast<VkAcquireNextImageInfoKHR *>(this); }
- VkAcquireNextImageInfoKHR const *ptr() const { return reinterpret_cast<VkAcquireNextImageInfoKHR const *>(this); }
-};
-
-struct safe_VkDeviceGroupPresentCapabilitiesKHR {
- VkStructureType sType;
- const void* pNext;
- uint32_t presentMask[VK_MAX_DEVICE_GROUP_SIZE];
- VkDeviceGroupPresentModeFlagsKHR modes;
- safe_VkDeviceGroupPresentCapabilitiesKHR(const VkDeviceGroupPresentCapabilitiesKHR* in_struct);
- safe_VkDeviceGroupPresentCapabilitiesKHR(const safe_VkDeviceGroupPresentCapabilitiesKHR& src);
- safe_VkDeviceGroupPresentCapabilitiesKHR& operator=(const safe_VkDeviceGroupPresentCapabilitiesKHR& src);
- safe_VkDeviceGroupPresentCapabilitiesKHR();
- ~safe_VkDeviceGroupPresentCapabilitiesKHR();
- void initialize(const VkDeviceGroupPresentCapabilitiesKHR* in_struct);
- void initialize(const safe_VkDeviceGroupPresentCapabilitiesKHR* src);
- VkDeviceGroupPresentCapabilitiesKHR *ptr() { return reinterpret_cast<VkDeviceGroupPresentCapabilitiesKHR *>(this); }
- VkDeviceGroupPresentCapabilitiesKHR const *ptr() const { return reinterpret_cast<VkDeviceGroupPresentCapabilitiesKHR const *>(this); }
-};
-
-struct safe_VkDeviceGroupPresentInfoKHR {
- VkStructureType sType;
- const void* pNext;
- uint32_t swapchainCount;
- const uint32_t* pDeviceMasks;
- VkDeviceGroupPresentModeFlagBitsKHR mode;
- safe_VkDeviceGroupPresentInfoKHR(const VkDeviceGroupPresentInfoKHR* in_struct);
- safe_VkDeviceGroupPresentInfoKHR(const safe_VkDeviceGroupPresentInfoKHR& src);
- safe_VkDeviceGroupPresentInfoKHR& operator=(const safe_VkDeviceGroupPresentInfoKHR& src);
- safe_VkDeviceGroupPresentInfoKHR();
- ~safe_VkDeviceGroupPresentInfoKHR();
- void initialize(const VkDeviceGroupPresentInfoKHR* in_struct);
- void initialize(const safe_VkDeviceGroupPresentInfoKHR* src);
- VkDeviceGroupPresentInfoKHR *ptr() { return reinterpret_cast<VkDeviceGroupPresentInfoKHR *>(this); }
- VkDeviceGroupPresentInfoKHR const *ptr() const { return reinterpret_cast<VkDeviceGroupPresentInfoKHR const *>(this); }
-};
-
-struct safe_VkDeviceGroupSwapchainCreateInfoKHR {
- VkStructureType sType;
- const void* pNext;
- VkDeviceGroupPresentModeFlagsKHR modes;
- safe_VkDeviceGroupSwapchainCreateInfoKHR(const VkDeviceGroupSwapchainCreateInfoKHR* in_struct);
- safe_VkDeviceGroupSwapchainCreateInfoKHR(const safe_VkDeviceGroupSwapchainCreateInfoKHR& src);
- safe_VkDeviceGroupSwapchainCreateInfoKHR& operator=(const safe_VkDeviceGroupSwapchainCreateInfoKHR& src);
- safe_VkDeviceGroupSwapchainCreateInfoKHR();
- ~safe_VkDeviceGroupSwapchainCreateInfoKHR();
- void initialize(const VkDeviceGroupSwapchainCreateInfoKHR* in_struct);
- void initialize(const safe_VkDeviceGroupSwapchainCreateInfoKHR* src);
- VkDeviceGroupSwapchainCreateInfoKHR *ptr() { return reinterpret_cast<VkDeviceGroupSwapchainCreateInfoKHR *>(this); }
- VkDeviceGroupSwapchainCreateInfoKHR const *ptr() const { return reinterpret_cast<VkDeviceGroupSwapchainCreateInfoKHR const *>(this); }
-};
-
-struct safe_VkDisplayPropertiesKHR {
- VkDisplayKHR display;
- const char* displayName;
- VkExtent2D physicalDimensions;
- VkExtent2D physicalResolution;
- VkSurfaceTransformFlagsKHR supportedTransforms;
- VkBool32 planeReorderPossible;
- VkBool32 persistentContent;
- safe_VkDisplayPropertiesKHR(const VkDisplayPropertiesKHR* in_struct);
- safe_VkDisplayPropertiesKHR(const safe_VkDisplayPropertiesKHR& src);
- safe_VkDisplayPropertiesKHR& operator=(const safe_VkDisplayPropertiesKHR& src);
- safe_VkDisplayPropertiesKHR();
- ~safe_VkDisplayPropertiesKHR();
- void initialize(const VkDisplayPropertiesKHR* in_struct);
- void initialize(const safe_VkDisplayPropertiesKHR* src);
- VkDisplayPropertiesKHR *ptr() { return reinterpret_cast<VkDisplayPropertiesKHR *>(this); }
- VkDisplayPropertiesKHR const *ptr() const { return reinterpret_cast<VkDisplayPropertiesKHR const *>(this); }
-};
-
-struct safe_VkDisplayModeCreateInfoKHR {
- VkStructureType sType;
- const void* pNext;
- VkDisplayModeCreateFlagsKHR flags;
- VkDisplayModeParametersKHR parameters;
- safe_VkDisplayModeCreateInfoKHR(const VkDisplayModeCreateInfoKHR* in_struct);
- safe_VkDisplayModeCreateInfoKHR(const safe_VkDisplayModeCreateInfoKHR& src);
- safe_VkDisplayModeCreateInfoKHR& operator=(const safe_VkDisplayModeCreateInfoKHR& src);
- safe_VkDisplayModeCreateInfoKHR();
- ~safe_VkDisplayModeCreateInfoKHR();
- void initialize(const VkDisplayModeCreateInfoKHR* in_struct);
- void initialize(const safe_VkDisplayModeCreateInfoKHR* src);
- VkDisplayModeCreateInfoKHR *ptr() { return reinterpret_cast<VkDisplayModeCreateInfoKHR *>(this); }
- VkDisplayModeCreateInfoKHR const *ptr() const { return reinterpret_cast<VkDisplayModeCreateInfoKHR const *>(this); }
-};
-
-struct safe_VkDisplaySurfaceCreateInfoKHR {
- VkStructureType sType;
- const void* pNext;
- VkDisplaySurfaceCreateFlagsKHR flags;
- VkDisplayModeKHR displayMode;
- uint32_t planeIndex;
- uint32_t planeStackIndex;
- VkSurfaceTransformFlagBitsKHR transform;
- float globalAlpha;
- VkDisplayPlaneAlphaFlagBitsKHR alphaMode;
- VkExtent2D imageExtent;
- safe_VkDisplaySurfaceCreateInfoKHR(const VkDisplaySurfaceCreateInfoKHR* in_struct);
- safe_VkDisplaySurfaceCreateInfoKHR(const safe_VkDisplaySurfaceCreateInfoKHR& src);
- safe_VkDisplaySurfaceCreateInfoKHR& operator=(const safe_VkDisplaySurfaceCreateInfoKHR& src);
- safe_VkDisplaySurfaceCreateInfoKHR();
- ~safe_VkDisplaySurfaceCreateInfoKHR();
- void initialize(const VkDisplaySurfaceCreateInfoKHR* in_struct);
- void initialize(const safe_VkDisplaySurfaceCreateInfoKHR* src);
- VkDisplaySurfaceCreateInfoKHR *ptr() { return reinterpret_cast<VkDisplaySurfaceCreateInfoKHR *>(this); }
- VkDisplaySurfaceCreateInfoKHR const *ptr() const { return reinterpret_cast<VkDisplaySurfaceCreateInfoKHR const *>(this); }
-};
-
-struct safe_VkDisplayPresentInfoKHR {
- VkStructureType sType;
- const void* pNext;
- VkRect2D srcRect;
- VkRect2D dstRect;
- VkBool32 persistent;
- safe_VkDisplayPresentInfoKHR(const VkDisplayPresentInfoKHR* in_struct);
- safe_VkDisplayPresentInfoKHR(const safe_VkDisplayPresentInfoKHR& src);
- safe_VkDisplayPresentInfoKHR& operator=(const safe_VkDisplayPresentInfoKHR& src);
- safe_VkDisplayPresentInfoKHR();
- ~safe_VkDisplayPresentInfoKHR();
- void initialize(const VkDisplayPresentInfoKHR* in_struct);
- void initialize(const safe_VkDisplayPresentInfoKHR* src);
- VkDisplayPresentInfoKHR *ptr() { return reinterpret_cast<VkDisplayPresentInfoKHR *>(this); }
- VkDisplayPresentInfoKHR const *ptr() const { return reinterpret_cast<VkDisplayPresentInfoKHR const *>(this); }
-};
-
-#ifdef VK_USE_PLATFORM_XLIB_KHR
-struct safe_VkXlibSurfaceCreateInfoKHR {
- VkStructureType sType;
- const void* pNext;
- VkXlibSurfaceCreateFlagsKHR flags;
- Display* dpy;
- Window window;
- safe_VkXlibSurfaceCreateInfoKHR(const VkXlibSurfaceCreateInfoKHR* in_struct);
- safe_VkXlibSurfaceCreateInfoKHR(const safe_VkXlibSurfaceCreateInfoKHR& src);
- safe_VkXlibSurfaceCreateInfoKHR& operator=(const safe_VkXlibSurfaceCreateInfoKHR& src);
- safe_VkXlibSurfaceCreateInfoKHR();
- ~safe_VkXlibSurfaceCreateInfoKHR();
- void initialize(const VkXlibSurfaceCreateInfoKHR* in_struct);
- void initialize(const safe_VkXlibSurfaceCreateInfoKHR* src);
- VkXlibSurfaceCreateInfoKHR *ptr() { return reinterpret_cast<VkXlibSurfaceCreateInfoKHR *>(this); }
- VkXlibSurfaceCreateInfoKHR const *ptr() const { return reinterpret_cast<VkXlibSurfaceCreateInfoKHR const *>(this); }
-};
-#endif // VK_USE_PLATFORM_XLIB_KHR
-
-#ifdef VK_USE_PLATFORM_XCB_KHR
-struct safe_VkXcbSurfaceCreateInfoKHR {
- VkStructureType sType;
- const void* pNext;
- VkXcbSurfaceCreateFlagsKHR flags;
- xcb_connection_t* connection;
- xcb_window_t window;
- safe_VkXcbSurfaceCreateInfoKHR(const VkXcbSurfaceCreateInfoKHR* in_struct);
- safe_VkXcbSurfaceCreateInfoKHR(const safe_VkXcbSurfaceCreateInfoKHR& src);
- safe_VkXcbSurfaceCreateInfoKHR& operator=(const safe_VkXcbSurfaceCreateInfoKHR& src);
- safe_VkXcbSurfaceCreateInfoKHR();
- ~safe_VkXcbSurfaceCreateInfoKHR();
- void initialize(const VkXcbSurfaceCreateInfoKHR* in_struct);
- void initialize(const safe_VkXcbSurfaceCreateInfoKHR* src);
- VkXcbSurfaceCreateInfoKHR *ptr() { return reinterpret_cast<VkXcbSurfaceCreateInfoKHR *>(this); }
- VkXcbSurfaceCreateInfoKHR const *ptr() const { return reinterpret_cast<VkXcbSurfaceCreateInfoKHR const *>(this); }
-};
-#endif // VK_USE_PLATFORM_XCB_KHR
-
-#ifdef VK_USE_PLATFORM_WAYLAND_KHR
-struct safe_VkWaylandSurfaceCreateInfoKHR {
- VkStructureType sType;
- const void* pNext;
- VkWaylandSurfaceCreateFlagsKHR flags;
- struct wl_display* display;
- struct wl_surface* surface;
- safe_VkWaylandSurfaceCreateInfoKHR(const VkWaylandSurfaceCreateInfoKHR* in_struct);
- safe_VkWaylandSurfaceCreateInfoKHR(const safe_VkWaylandSurfaceCreateInfoKHR& src);
- safe_VkWaylandSurfaceCreateInfoKHR& operator=(const safe_VkWaylandSurfaceCreateInfoKHR& src);
- safe_VkWaylandSurfaceCreateInfoKHR();
- ~safe_VkWaylandSurfaceCreateInfoKHR();
- void initialize(const VkWaylandSurfaceCreateInfoKHR* in_struct);
- void initialize(const safe_VkWaylandSurfaceCreateInfoKHR* src);
- VkWaylandSurfaceCreateInfoKHR *ptr() { return reinterpret_cast<VkWaylandSurfaceCreateInfoKHR *>(this); }
- VkWaylandSurfaceCreateInfoKHR const *ptr() const { return reinterpret_cast<VkWaylandSurfaceCreateInfoKHR const *>(this); }
-};
-#endif // VK_USE_PLATFORM_WAYLAND_KHR
-
-#ifdef VK_USE_PLATFORM_ANDROID_KHR
-struct safe_VkAndroidSurfaceCreateInfoKHR {
- VkStructureType sType;
- const void* pNext;
- VkAndroidSurfaceCreateFlagsKHR flags;
- struct ANativeWindow* window;
- safe_VkAndroidSurfaceCreateInfoKHR(const VkAndroidSurfaceCreateInfoKHR* in_struct);
- safe_VkAndroidSurfaceCreateInfoKHR(const safe_VkAndroidSurfaceCreateInfoKHR& src);
- safe_VkAndroidSurfaceCreateInfoKHR& operator=(const safe_VkAndroidSurfaceCreateInfoKHR& src);
- safe_VkAndroidSurfaceCreateInfoKHR();
- ~safe_VkAndroidSurfaceCreateInfoKHR();
- void initialize(const VkAndroidSurfaceCreateInfoKHR* in_struct);
- void initialize(const safe_VkAndroidSurfaceCreateInfoKHR* src);
- VkAndroidSurfaceCreateInfoKHR *ptr() { return reinterpret_cast<VkAndroidSurfaceCreateInfoKHR *>(this); }
- VkAndroidSurfaceCreateInfoKHR const *ptr() const { return reinterpret_cast<VkAndroidSurfaceCreateInfoKHR const *>(this); }
-};
-#endif // VK_USE_PLATFORM_ANDROID_KHR
-
-#ifdef VK_USE_PLATFORM_WIN32_KHR
-struct safe_VkWin32SurfaceCreateInfoKHR {
- VkStructureType sType;
- const void* pNext;
- VkWin32SurfaceCreateFlagsKHR flags;
- HINSTANCE hinstance;
- HWND hwnd;
- safe_VkWin32SurfaceCreateInfoKHR(const VkWin32SurfaceCreateInfoKHR* in_struct);
- safe_VkWin32SurfaceCreateInfoKHR(const safe_VkWin32SurfaceCreateInfoKHR& src);
- safe_VkWin32SurfaceCreateInfoKHR& operator=(const safe_VkWin32SurfaceCreateInfoKHR& src);
- safe_VkWin32SurfaceCreateInfoKHR();
- ~safe_VkWin32SurfaceCreateInfoKHR();
- void initialize(const VkWin32SurfaceCreateInfoKHR* in_struct);
- void initialize(const safe_VkWin32SurfaceCreateInfoKHR* src);
- VkWin32SurfaceCreateInfoKHR *ptr() { return reinterpret_cast<VkWin32SurfaceCreateInfoKHR *>(this); }
- VkWin32SurfaceCreateInfoKHR const *ptr() const { return reinterpret_cast<VkWin32SurfaceCreateInfoKHR const *>(this); }
-};
-#endif // VK_USE_PLATFORM_WIN32_KHR
-
-#ifdef VK_USE_PLATFORM_WIN32_KHR
-struct safe_VkImportMemoryWin32HandleInfoKHR {
- VkStructureType sType;
- const void* pNext;
- VkExternalMemoryHandleTypeFlagBits handleType;
- HANDLE handle;
- LPCWSTR name;
- safe_VkImportMemoryWin32HandleInfoKHR(const VkImportMemoryWin32HandleInfoKHR* in_struct);
- safe_VkImportMemoryWin32HandleInfoKHR(const safe_VkImportMemoryWin32HandleInfoKHR& src);
- safe_VkImportMemoryWin32HandleInfoKHR& operator=(const safe_VkImportMemoryWin32HandleInfoKHR& src);
- safe_VkImportMemoryWin32HandleInfoKHR();
- ~safe_VkImportMemoryWin32HandleInfoKHR();
- void initialize(const VkImportMemoryWin32HandleInfoKHR* in_struct);
- void initialize(const safe_VkImportMemoryWin32HandleInfoKHR* src);
- VkImportMemoryWin32HandleInfoKHR *ptr() { return reinterpret_cast<VkImportMemoryWin32HandleInfoKHR *>(this); }
- VkImportMemoryWin32HandleInfoKHR const *ptr() const { return reinterpret_cast<VkImportMemoryWin32HandleInfoKHR const *>(this); }
-};
-#endif // VK_USE_PLATFORM_WIN32_KHR
-
-#ifdef VK_USE_PLATFORM_WIN32_KHR
-struct safe_VkExportMemoryWin32HandleInfoKHR {
- VkStructureType sType;
- const void* pNext;
- const SECURITY_ATTRIBUTES* pAttributes;
- DWORD dwAccess;
- LPCWSTR name;
- safe_VkExportMemoryWin32HandleInfoKHR(const VkExportMemoryWin32HandleInfoKHR* in_struct);
- safe_VkExportMemoryWin32HandleInfoKHR(const safe_VkExportMemoryWin32HandleInfoKHR& src);
- safe_VkExportMemoryWin32HandleInfoKHR& operator=(const safe_VkExportMemoryWin32HandleInfoKHR& src);
- safe_VkExportMemoryWin32HandleInfoKHR();
- ~safe_VkExportMemoryWin32HandleInfoKHR();
- void initialize(const VkExportMemoryWin32HandleInfoKHR* in_struct);
- void initialize(const safe_VkExportMemoryWin32HandleInfoKHR* src);
- VkExportMemoryWin32HandleInfoKHR *ptr() { return reinterpret_cast<VkExportMemoryWin32HandleInfoKHR *>(this); }
- VkExportMemoryWin32HandleInfoKHR const *ptr() const { return reinterpret_cast<VkExportMemoryWin32HandleInfoKHR const *>(this); }
-};
-#endif // VK_USE_PLATFORM_WIN32_KHR
-
-#ifdef VK_USE_PLATFORM_WIN32_KHR
-struct safe_VkMemoryWin32HandlePropertiesKHR {
- VkStructureType sType;
- void* pNext;
- uint32_t memoryTypeBits;
- safe_VkMemoryWin32HandlePropertiesKHR(const VkMemoryWin32HandlePropertiesKHR* in_struct);
- safe_VkMemoryWin32HandlePropertiesKHR(const safe_VkMemoryWin32HandlePropertiesKHR& src);
- safe_VkMemoryWin32HandlePropertiesKHR& operator=(const safe_VkMemoryWin32HandlePropertiesKHR& src);
- safe_VkMemoryWin32HandlePropertiesKHR();
- ~safe_VkMemoryWin32HandlePropertiesKHR();
- void initialize(const VkMemoryWin32HandlePropertiesKHR* in_struct);
- void initialize(const safe_VkMemoryWin32HandlePropertiesKHR* src);
- VkMemoryWin32HandlePropertiesKHR *ptr() { return reinterpret_cast<VkMemoryWin32HandlePropertiesKHR *>(this); }
- VkMemoryWin32HandlePropertiesKHR const *ptr() const { return reinterpret_cast<VkMemoryWin32HandlePropertiesKHR const *>(this); }
-};
-#endif // VK_USE_PLATFORM_WIN32_KHR
-
-#ifdef VK_USE_PLATFORM_WIN32_KHR
-struct safe_VkMemoryGetWin32HandleInfoKHR {
- VkStructureType sType;
- const void* pNext;
- VkDeviceMemory memory;
- VkExternalMemoryHandleTypeFlagBits handleType;
- safe_VkMemoryGetWin32HandleInfoKHR(const VkMemoryGetWin32HandleInfoKHR* in_struct);
- safe_VkMemoryGetWin32HandleInfoKHR(const safe_VkMemoryGetWin32HandleInfoKHR& src);
- safe_VkMemoryGetWin32HandleInfoKHR& operator=(const safe_VkMemoryGetWin32HandleInfoKHR& src);
- safe_VkMemoryGetWin32HandleInfoKHR();
- ~safe_VkMemoryGetWin32HandleInfoKHR();
- void initialize(const VkMemoryGetWin32HandleInfoKHR* in_struct);
- void initialize(const safe_VkMemoryGetWin32HandleInfoKHR* src);
- VkMemoryGetWin32HandleInfoKHR *ptr() { return reinterpret_cast<VkMemoryGetWin32HandleInfoKHR *>(this); }
- VkMemoryGetWin32HandleInfoKHR const *ptr() const { return reinterpret_cast<VkMemoryGetWin32HandleInfoKHR const *>(this); }
-};
-#endif // VK_USE_PLATFORM_WIN32_KHR
-
-struct safe_VkImportMemoryFdInfoKHR {
- VkStructureType sType;
- const void* pNext;
- VkExternalMemoryHandleTypeFlagBits handleType;
- int fd;
- safe_VkImportMemoryFdInfoKHR(const VkImportMemoryFdInfoKHR* in_struct);
- safe_VkImportMemoryFdInfoKHR(const safe_VkImportMemoryFdInfoKHR& src);
- safe_VkImportMemoryFdInfoKHR& operator=(const safe_VkImportMemoryFdInfoKHR& src);
- safe_VkImportMemoryFdInfoKHR();
- ~safe_VkImportMemoryFdInfoKHR();
- void initialize(const VkImportMemoryFdInfoKHR* in_struct);
- void initialize(const safe_VkImportMemoryFdInfoKHR* src);
- VkImportMemoryFdInfoKHR *ptr() { return reinterpret_cast<VkImportMemoryFdInfoKHR *>(this); }
- VkImportMemoryFdInfoKHR const *ptr() const { return reinterpret_cast<VkImportMemoryFdInfoKHR const *>(this); }
-};
-
-struct safe_VkMemoryFdPropertiesKHR {
- VkStructureType sType;
- void* pNext;
- uint32_t memoryTypeBits;
- safe_VkMemoryFdPropertiesKHR(const VkMemoryFdPropertiesKHR* in_struct);
- safe_VkMemoryFdPropertiesKHR(const safe_VkMemoryFdPropertiesKHR& src);
- safe_VkMemoryFdPropertiesKHR& operator=(const safe_VkMemoryFdPropertiesKHR& src);
- safe_VkMemoryFdPropertiesKHR();
- ~safe_VkMemoryFdPropertiesKHR();
- void initialize(const VkMemoryFdPropertiesKHR* in_struct);
- void initialize(const safe_VkMemoryFdPropertiesKHR* src);
- VkMemoryFdPropertiesKHR *ptr() { return reinterpret_cast<VkMemoryFdPropertiesKHR *>(this); }
- VkMemoryFdPropertiesKHR const *ptr() const { return reinterpret_cast<VkMemoryFdPropertiesKHR const *>(this); }
-};
-
-struct safe_VkMemoryGetFdInfoKHR {
- VkStructureType sType;
- const void* pNext;
- VkDeviceMemory memory;
- VkExternalMemoryHandleTypeFlagBits handleType;
- safe_VkMemoryGetFdInfoKHR(const VkMemoryGetFdInfoKHR* in_struct);
- safe_VkMemoryGetFdInfoKHR(const safe_VkMemoryGetFdInfoKHR& src);
- safe_VkMemoryGetFdInfoKHR& operator=(const safe_VkMemoryGetFdInfoKHR& src);
- safe_VkMemoryGetFdInfoKHR();
- ~safe_VkMemoryGetFdInfoKHR();
- void initialize(const VkMemoryGetFdInfoKHR* in_struct);
- void initialize(const safe_VkMemoryGetFdInfoKHR* src);
- VkMemoryGetFdInfoKHR *ptr() { return reinterpret_cast<VkMemoryGetFdInfoKHR *>(this); }
- VkMemoryGetFdInfoKHR const *ptr() const { return reinterpret_cast<VkMemoryGetFdInfoKHR const *>(this); }
-};
-
-#ifdef VK_USE_PLATFORM_WIN32_KHR
-struct safe_VkWin32KeyedMutexAcquireReleaseInfoKHR {
- VkStructureType sType;
- const void* pNext;
- uint32_t acquireCount;
- VkDeviceMemory* pAcquireSyncs;
- const uint64_t* pAcquireKeys;
- const uint32_t* pAcquireTimeouts;
- uint32_t releaseCount;
- VkDeviceMemory* pReleaseSyncs;
- const uint64_t* pReleaseKeys;
- safe_VkWin32KeyedMutexAcquireReleaseInfoKHR(const VkWin32KeyedMutexAcquireReleaseInfoKHR* in_struct);
- safe_VkWin32KeyedMutexAcquireReleaseInfoKHR(const safe_VkWin32KeyedMutexAcquireReleaseInfoKHR& src);
- safe_VkWin32KeyedMutexAcquireReleaseInfoKHR& operator=(const safe_VkWin32KeyedMutexAcquireReleaseInfoKHR& src);
- safe_VkWin32KeyedMutexAcquireReleaseInfoKHR();
- ~safe_VkWin32KeyedMutexAcquireReleaseInfoKHR();
- void initialize(const VkWin32KeyedMutexAcquireReleaseInfoKHR* in_struct);
- void initialize(const safe_VkWin32KeyedMutexAcquireReleaseInfoKHR* src);
- VkWin32KeyedMutexAcquireReleaseInfoKHR *ptr() { return reinterpret_cast<VkWin32KeyedMutexAcquireReleaseInfoKHR *>(this); }
- VkWin32KeyedMutexAcquireReleaseInfoKHR const *ptr() const { return reinterpret_cast<VkWin32KeyedMutexAcquireReleaseInfoKHR const *>(this); }
-};
-#endif // VK_USE_PLATFORM_WIN32_KHR
-
-#ifdef VK_USE_PLATFORM_WIN32_KHR
-struct safe_VkImportSemaphoreWin32HandleInfoKHR {
- VkStructureType sType;
- const void* pNext;
- VkSemaphore semaphore;
- VkSemaphoreImportFlags flags;
- VkExternalSemaphoreHandleTypeFlagBits handleType;
- HANDLE handle;
- LPCWSTR name;
- safe_VkImportSemaphoreWin32HandleInfoKHR(const VkImportSemaphoreWin32HandleInfoKHR* in_struct);
- safe_VkImportSemaphoreWin32HandleInfoKHR(const safe_VkImportSemaphoreWin32HandleInfoKHR& src);
- safe_VkImportSemaphoreWin32HandleInfoKHR& operator=(const safe_VkImportSemaphoreWin32HandleInfoKHR& src);
- safe_VkImportSemaphoreWin32HandleInfoKHR();
- ~safe_VkImportSemaphoreWin32HandleInfoKHR();
- void initialize(const VkImportSemaphoreWin32HandleInfoKHR* in_struct);
- void initialize(const safe_VkImportSemaphoreWin32HandleInfoKHR* src);
- VkImportSemaphoreWin32HandleInfoKHR *ptr() { return reinterpret_cast<VkImportSemaphoreWin32HandleInfoKHR *>(this); }
- VkImportSemaphoreWin32HandleInfoKHR const *ptr() const { return reinterpret_cast<VkImportSemaphoreWin32HandleInfoKHR const *>(this); }
-};
-#endif // VK_USE_PLATFORM_WIN32_KHR
-
-#ifdef VK_USE_PLATFORM_WIN32_KHR
-struct safe_VkExportSemaphoreWin32HandleInfoKHR {
- VkStructureType sType;
- const void* pNext;
- const SECURITY_ATTRIBUTES* pAttributes;
- DWORD dwAccess;
- LPCWSTR name;
- safe_VkExportSemaphoreWin32HandleInfoKHR(const VkExportSemaphoreWin32HandleInfoKHR* in_struct);
- safe_VkExportSemaphoreWin32HandleInfoKHR(const safe_VkExportSemaphoreWin32HandleInfoKHR& src);
- safe_VkExportSemaphoreWin32HandleInfoKHR& operator=(const safe_VkExportSemaphoreWin32HandleInfoKHR& src);
- safe_VkExportSemaphoreWin32HandleInfoKHR();
- ~safe_VkExportSemaphoreWin32HandleInfoKHR();
- void initialize(const VkExportSemaphoreWin32HandleInfoKHR* in_struct);
- void initialize(const safe_VkExportSemaphoreWin32HandleInfoKHR* src);
- VkExportSemaphoreWin32HandleInfoKHR *ptr() { return reinterpret_cast<VkExportSemaphoreWin32HandleInfoKHR *>(this); }
- VkExportSemaphoreWin32HandleInfoKHR const *ptr() const { return reinterpret_cast<VkExportSemaphoreWin32HandleInfoKHR const *>(this); }
-};
-#endif // VK_USE_PLATFORM_WIN32_KHR
-
-#ifdef VK_USE_PLATFORM_WIN32_KHR
-struct safe_VkD3D12FenceSubmitInfoKHR {
- VkStructureType sType;
- const void* pNext;
- uint32_t waitSemaphoreValuesCount;
- const uint64_t* pWaitSemaphoreValues;
- uint32_t signalSemaphoreValuesCount;
- const uint64_t* pSignalSemaphoreValues;
- safe_VkD3D12FenceSubmitInfoKHR(const VkD3D12FenceSubmitInfoKHR* in_struct);
- safe_VkD3D12FenceSubmitInfoKHR(const safe_VkD3D12FenceSubmitInfoKHR& src);
- safe_VkD3D12FenceSubmitInfoKHR& operator=(const safe_VkD3D12FenceSubmitInfoKHR& src);
- safe_VkD3D12FenceSubmitInfoKHR();
- ~safe_VkD3D12FenceSubmitInfoKHR();
- void initialize(const VkD3D12FenceSubmitInfoKHR* in_struct);
- void initialize(const safe_VkD3D12FenceSubmitInfoKHR* src);
- VkD3D12FenceSubmitInfoKHR *ptr() { return reinterpret_cast<VkD3D12FenceSubmitInfoKHR *>(this); }
- VkD3D12FenceSubmitInfoKHR const *ptr() const { return reinterpret_cast<VkD3D12FenceSubmitInfoKHR const *>(this); }
-};
-#endif // VK_USE_PLATFORM_WIN32_KHR
-
-#ifdef VK_USE_PLATFORM_WIN32_KHR
-struct safe_VkSemaphoreGetWin32HandleInfoKHR {
- VkStructureType sType;
- const void* pNext;
- VkSemaphore semaphore;
- VkExternalSemaphoreHandleTypeFlagBits handleType;
- safe_VkSemaphoreGetWin32HandleInfoKHR(const VkSemaphoreGetWin32HandleInfoKHR* in_struct);
- safe_VkSemaphoreGetWin32HandleInfoKHR(const safe_VkSemaphoreGetWin32HandleInfoKHR& src);
- safe_VkSemaphoreGetWin32HandleInfoKHR& operator=(const safe_VkSemaphoreGetWin32HandleInfoKHR& src);
- safe_VkSemaphoreGetWin32HandleInfoKHR();
- ~safe_VkSemaphoreGetWin32HandleInfoKHR();
- void initialize(const VkSemaphoreGetWin32HandleInfoKHR* in_struct);
- void initialize(const safe_VkSemaphoreGetWin32HandleInfoKHR* src);
- VkSemaphoreGetWin32HandleInfoKHR *ptr() { return reinterpret_cast<VkSemaphoreGetWin32HandleInfoKHR *>(this); }
- VkSemaphoreGetWin32HandleInfoKHR const *ptr() const { return reinterpret_cast<VkSemaphoreGetWin32HandleInfoKHR const *>(this); }
-};
-#endif // VK_USE_PLATFORM_WIN32_KHR
-
-struct safe_VkImportSemaphoreFdInfoKHR {
- VkStructureType sType;
- const void* pNext;
- VkSemaphore semaphore;
- VkSemaphoreImportFlags flags;
- VkExternalSemaphoreHandleTypeFlagBits handleType;
- int fd;
- safe_VkImportSemaphoreFdInfoKHR(const VkImportSemaphoreFdInfoKHR* in_struct);
- safe_VkImportSemaphoreFdInfoKHR(const safe_VkImportSemaphoreFdInfoKHR& src);
- safe_VkImportSemaphoreFdInfoKHR& operator=(const safe_VkImportSemaphoreFdInfoKHR& src);
- safe_VkImportSemaphoreFdInfoKHR();
- ~safe_VkImportSemaphoreFdInfoKHR();
- void initialize(const VkImportSemaphoreFdInfoKHR* in_struct);
- void initialize(const safe_VkImportSemaphoreFdInfoKHR* src);
- VkImportSemaphoreFdInfoKHR *ptr() { return reinterpret_cast<VkImportSemaphoreFdInfoKHR *>(this); }
- VkImportSemaphoreFdInfoKHR const *ptr() const { return reinterpret_cast<VkImportSemaphoreFdInfoKHR const *>(this); }
-};
-
-struct safe_VkSemaphoreGetFdInfoKHR {
- VkStructureType sType;
- const void* pNext;
- VkSemaphore semaphore;
- VkExternalSemaphoreHandleTypeFlagBits handleType;
- safe_VkSemaphoreGetFdInfoKHR(const VkSemaphoreGetFdInfoKHR* in_struct);
- safe_VkSemaphoreGetFdInfoKHR(const safe_VkSemaphoreGetFdInfoKHR& src);
- safe_VkSemaphoreGetFdInfoKHR& operator=(const safe_VkSemaphoreGetFdInfoKHR& src);
- safe_VkSemaphoreGetFdInfoKHR();
- ~safe_VkSemaphoreGetFdInfoKHR();
- void initialize(const VkSemaphoreGetFdInfoKHR* in_struct);
- void initialize(const safe_VkSemaphoreGetFdInfoKHR* src);
- VkSemaphoreGetFdInfoKHR *ptr() { return reinterpret_cast<VkSemaphoreGetFdInfoKHR *>(this); }
- VkSemaphoreGetFdInfoKHR const *ptr() const { return reinterpret_cast<VkSemaphoreGetFdInfoKHR const *>(this); }
-};
-
-struct safe_VkPhysicalDevicePushDescriptorPropertiesKHR {
- VkStructureType sType;
- void* pNext;
- uint32_t maxPushDescriptors;
- safe_VkPhysicalDevicePushDescriptorPropertiesKHR(const VkPhysicalDevicePushDescriptorPropertiesKHR* in_struct);
- safe_VkPhysicalDevicePushDescriptorPropertiesKHR(const safe_VkPhysicalDevicePushDescriptorPropertiesKHR& src);
- safe_VkPhysicalDevicePushDescriptorPropertiesKHR& operator=(const safe_VkPhysicalDevicePushDescriptorPropertiesKHR& src);
- safe_VkPhysicalDevicePushDescriptorPropertiesKHR();
- ~safe_VkPhysicalDevicePushDescriptorPropertiesKHR();
- void initialize(const VkPhysicalDevicePushDescriptorPropertiesKHR* in_struct);
- void initialize(const safe_VkPhysicalDevicePushDescriptorPropertiesKHR* src);
- VkPhysicalDevicePushDescriptorPropertiesKHR *ptr() { return reinterpret_cast<VkPhysicalDevicePushDescriptorPropertiesKHR *>(this); }
- VkPhysicalDevicePushDescriptorPropertiesKHR const *ptr() const { return reinterpret_cast<VkPhysicalDevicePushDescriptorPropertiesKHR const *>(this); }
-};
-
-struct safe_VkPhysicalDeviceShaderFloat16Int8FeaturesKHR {
- VkStructureType sType;
- void* pNext;
- VkBool32 shaderFloat16;
- VkBool32 shaderInt8;
- safe_VkPhysicalDeviceShaderFloat16Int8FeaturesKHR(const VkPhysicalDeviceShaderFloat16Int8FeaturesKHR* in_struct);
- safe_VkPhysicalDeviceShaderFloat16Int8FeaturesKHR(const safe_VkPhysicalDeviceShaderFloat16Int8FeaturesKHR& src);
- safe_VkPhysicalDeviceShaderFloat16Int8FeaturesKHR& operator=(const safe_VkPhysicalDeviceShaderFloat16Int8FeaturesKHR& src);
- safe_VkPhysicalDeviceShaderFloat16Int8FeaturesKHR();
- ~safe_VkPhysicalDeviceShaderFloat16Int8FeaturesKHR();
- void initialize(const VkPhysicalDeviceShaderFloat16Int8FeaturesKHR* in_struct);
- void initialize(const safe_VkPhysicalDeviceShaderFloat16Int8FeaturesKHR* src);
- VkPhysicalDeviceShaderFloat16Int8FeaturesKHR *ptr() { return reinterpret_cast<VkPhysicalDeviceShaderFloat16Int8FeaturesKHR *>(this); }
- VkPhysicalDeviceShaderFloat16Int8FeaturesKHR const *ptr() const { return reinterpret_cast<VkPhysicalDeviceShaderFloat16Int8FeaturesKHR const *>(this); }
-};
-
-struct safe_VkPresentRegionKHR {
- uint32_t rectangleCount;
- const VkRectLayerKHR* pRectangles;
- safe_VkPresentRegionKHR(const VkPresentRegionKHR* in_struct);
- safe_VkPresentRegionKHR(const safe_VkPresentRegionKHR& src);
- safe_VkPresentRegionKHR& operator=(const safe_VkPresentRegionKHR& src);
- safe_VkPresentRegionKHR();
- ~safe_VkPresentRegionKHR();
- void initialize(const VkPresentRegionKHR* in_struct);
- void initialize(const safe_VkPresentRegionKHR* src);
- VkPresentRegionKHR *ptr() { return reinterpret_cast<VkPresentRegionKHR *>(this); }
- VkPresentRegionKHR const *ptr() const { return reinterpret_cast<VkPresentRegionKHR const *>(this); }
-};
-
-struct safe_VkPresentRegionsKHR {
- VkStructureType sType;
- const void* pNext;
- uint32_t swapchainCount;
- safe_VkPresentRegionKHR* pRegions;
- safe_VkPresentRegionsKHR(const VkPresentRegionsKHR* in_struct);
- safe_VkPresentRegionsKHR(const safe_VkPresentRegionsKHR& src);
- safe_VkPresentRegionsKHR& operator=(const safe_VkPresentRegionsKHR& src);
- safe_VkPresentRegionsKHR();
- ~safe_VkPresentRegionsKHR();
- void initialize(const VkPresentRegionsKHR* in_struct);
- void initialize(const safe_VkPresentRegionsKHR* src);
- VkPresentRegionsKHR *ptr() { return reinterpret_cast<VkPresentRegionsKHR *>(this); }
- VkPresentRegionsKHR const *ptr() const { return reinterpret_cast<VkPresentRegionsKHR const *>(this); }
-};
-
-struct safe_VkPhysicalDeviceImagelessFramebufferFeaturesKHR {
- VkStructureType sType;
- void* pNext;
- VkBool32 imagelessFramebuffer;
- safe_VkPhysicalDeviceImagelessFramebufferFeaturesKHR(const VkPhysicalDeviceImagelessFramebufferFeaturesKHR* in_struct);
- safe_VkPhysicalDeviceImagelessFramebufferFeaturesKHR(const safe_VkPhysicalDeviceImagelessFramebufferFeaturesKHR& src);
- safe_VkPhysicalDeviceImagelessFramebufferFeaturesKHR& operator=(const safe_VkPhysicalDeviceImagelessFramebufferFeaturesKHR& src);
- safe_VkPhysicalDeviceImagelessFramebufferFeaturesKHR();
- ~safe_VkPhysicalDeviceImagelessFramebufferFeaturesKHR();
- void initialize(const VkPhysicalDeviceImagelessFramebufferFeaturesKHR* in_struct);
- void initialize(const safe_VkPhysicalDeviceImagelessFramebufferFeaturesKHR* src);
- VkPhysicalDeviceImagelessFramebufferFeaturesKHR *ptr() { return reinterpret_cast<VkPhysicalDeviceImagelessFramebufferFeaturesKHR *>(this); }
- VkPhysicalDeviceImagelessFramebufferFeaturesKHR const *ptr() const { return reinterpret_cast<VkPhysicalDeviceImagelessFramebufferFeaturesKHR const *>(this); }
-};
-
-struct safe_VkFramebufferAttachmentImageInfoKHR {
- VkStructureType sType;
- const void* pNext;
- VkImageCreateFlags flags;
- VkImageUsageFlags usage;
- uint32_t width;
- uint32_t height;
- uint32_t layerCount;
- uint32_t viewFormatCount;
- const VkFormat* pViewFormats;
- safe_VkFramebufferAttachmentImageInfoKHR(const VkFramebufferAttachmentImageInfoKHR* in_struct);
- safe_VkFramebufferAttachmentImageInfoKHR(const safe_VkFramebufferAttachmentImageInfoKHR& src);
- safe_VkFramebufferAttachmentImageInfoKHR& operator=(const safe_VkFramebufferAttachmentImageInfoKHR& src);
- safe_VkFramebufferAttachmentImageInfoKHR();
- ~safe_VkFramebufferAttachmentImageInfoKHR();
- void initialize(const VkFramebufferAttachmentImageInfoKHR* in_struct);
- void initialize(const safe_VkFramebufferAttachmentImageInfoKHR* src);
- VkFramebufferAttachmentImageInfoKHR *ptr() { return reinterpret_cast<VkFramebufferAttachmentImageInfoKHR *>(this); }
- VkFramebufferAttachmentImageInfoKHR const *ptr() const { return reinterpret_cast<VkFramebufferAttachmentImageInfoKHR const *>(this); }
-};
-
-struct safe_VkFramebufferAttachmentsCreateInfoKHR {
- VkStructureType sType;
- const void* pNext;
- uint32_t attachmentImageInfoCount;
- safe_VkFramebufferAttachmentImageInfoKHR* pAttachmentImageInfos;
- safe_VkFramebufferAttachmentsCreateInfoKHR(const VkFramebufferAttachmentsCreateInfoKHR* in_struct);
- safe_VkFramebufferAttachmentsCreateInfoKHR(const safe_VkFramebufferAttachmentsCreateInfoKHR& src);
- safe_VkFramebufferAttachmentsCreateInfoKHR& operator=(const safe_VkFramebufferAttachmentsCreateInfoKHR& src);
- safe_VkFramebufferAttachmentsCreateInfoKHR();
- ~safe_VkFramebufferAttachmentsCreateInfoKHR();
- void initialize(const VkFramebufferAttachmentsCreateInfoKHR* in_struct);
- void initialize(const safe_VkFramebufferAttachmentsCreateInfoKHR* src);
- VkFramebufferAttachmentsCreateInfoKHR *ptr() { return reinterpret_cast<VkFramebufferAttachmentsCreateInfoKHR *>(this); }
- VkFramebufferAttachmentsCreateInfoKHR const *ptr() const { return reinterpret_cast<VkFramebufferAttachmentsCreateInfoKHR const *>(this); }
-};
-
-struct safe_VkRenderPassAttachmentBeginInfoKHR {
- VkStructureType sType;
- const void* pNext;
- uint32_t attachmentCount;
- VkImageView* pAttachments;
- safe_VkRenderPassAttachmentBeginInfoKHR(const VkRenderPassAttachmentBeginInfoKHR* in_struct);
- safe_VkRenderPassAttachmentBeginInfoKHR(const safe_VkRenderPassAttachmentBeginInfoKHR& src);
- safe_VkRenderPassAttachmentBeginInfoKHR& operator=(const safe_VkRenderPassAttachmentBeginInfoKHR& src);
- safe_VkRenderPassAttachmentBeginInfoKHR();
- ~safe_VkRenderPassAttachmentBeginInfoKHR();
- void initialize(const VkRenderPassAttachmentBeginInfoKHR* in_struct);
- void initialize(const safe_VkRenderPassAttachmentBeginInfoKHR* src);
- VkRenderPassAttachmentBeginInfoKHR *ptr() { return reinterpret_cast<VkRenderPassAttachmentBeginInfoKHR *>(this); }
- VkRenderPassAttachmentBeginInfoKHR const *ptr() const { return reinterpret_cast<VkRenderPassAttachmentBeginInfoKHR const *>(this); }
-};
-
-struct safe_VkAttachmentDescription2KHR {
- VkStructureType sType;
- const void* pNext;
- VkAttachmentDescriptionFlags flags;
- VkFormat format;
- VkSampleCountFlagBits samples;
- VkAttachmentLoadOp loadOp;
- VkAttachmentStoreOp storeOp;
- VkAttachmentLoadOp stencilLoadOp;
- VkAttachmentStoreOp stencilStoreOp;
- VkImageLayout initialLayout;
- VkImageLayout finalLayout;
- safe_VkAttachmentDescription2KHR(const VkAttachmentDescription2KHR* in_struct);
- safe_VkAttachmentDescription2KHR(const safe_VkAttachmentDescription2KHR& src);
- safe_VkAttachmentDescription2KHR& operator=(const safe_VkAttachmentDescription2KHR& src);
- safe_VkAttachmentDescription2KHR();
- ~safe_VkAttachmentDescription2KHR();
- void initialize(const VkAttachmentDescription2KHR* in_struct);
- void initialize(const safe_VkAttachmentDescription2KHR* src);
- VkAttachmentDescription2KHR *ptr() { return reinterpret_cast<VkAttachmentDescription2KHR *>(this); }
- VkAttachmentDescription2KHR const *ptr() const { return reinterpret_cast<VkAttachmentDescription2KHR const *>(this); }
-};
-
-struct safe_VkAttachmentReference2KHR {
- VkStructureType sType;
- const void* pNext;
- uint32_t attachment;
- VkImageLayout layout;
- VkImageAspectFlags aspectMask;
- safe_VkAttachmentReference2KHR(const VkAttachmentReference2KHR* in_struct);
- safe_VkAttachmentReference2KHR(const safe_VkAttachmentReference2KHR& src);
- safe_VkAttachmentReference2KHR& operator=(const safe_VkAttachmentReference2KHR& src);
- safe_VkAttachmentReference2KHR();
- ~safe_VkAttachmentReference2KHR();
- void initialize(const VkAttachmentReference2KHR* in_struct);
- void initialize(const safe_VkAttachmentReference2KHR* src);
- VkAttachmentReference2KHR *ptr() { return reinterpret_cast<VkAttachmentReference2KHR *>(this); }
- VkAttachmentReference2KHR const *ptr() const { return reinterpret_cast<VkAttachmentReference2KHR const *>(this); }
-};
-
-struct safe_VkSubpassDescription2KHR {
- VkStructureType sType;
- const void* pNext;
- VkSubpassDescriptionFlags flags;
- VkPipelineBindPoint pipelineBindPoint;
- uint32_t viewMask;
- uint32_t inputAttachmentCount;
- safe_VkAttachmentReference2KHR* pInputAttachments;
- uint32_t colorAttachmentCount;
- safe_VkAttachmentReference2KHR* pColorAttachments;
- safe_VkAttachmentReference2KHR* pResolveAttachments;
- safe_VkAttachmentReference2KHR* pDepthStencilAttachment;
- uint32_t preserveAttachmentCount;
- const uint32_t* pPreserveAttachments;
- safe_VkSubpassDescription2KHR(const VkSubpassDescription2KHR* in_struct);
- safe_VkSubpassDescription2KHR(const safe_VkSubpassDescription2KHR& src);
- safe_VkSubpassDescription2KHR& operator=(const safe_VkSubpassDescription2KHR& src);
- safe_VkSubpassDescription2KHR();
- ~safe_VkSubpassDescription2KHR();
- void initialize(const VkSubpassDescription2KHR* in_struct);
- void initialize(const safe_VkSubpassDescription2KHR* src);
- VkSubpassDescription2KHR *ptr() { return reinterpret_cast<VkSubpassDescription2KHR *>(this); }
- VkSubpassDescription2KHR const *ptr() const { return reinterpret_cast<VkSubpassDescription2KHR const *>(this); }
-};
-
-struct safe_VkSubpassDependency2KHR {
- VkStructureType sType;
- const void* pNext;
- uint32_t srcSubpass;
- uint32_t dstSubpass;
- VkPipelineStageFlags srcStageMask;
- VkPipelineStageFlags dstStageMask;
- VkAccessFlags srcAccessMask;
- VkAccessFlags dstAccessMask;
- VkDependencyFlags dependencyFlags;
- int32_t viewOffset;
- safe_VkSubpassDependency2KHR(const VkSubpassDependency2KHR* in_struct);
- safe_VkSubpassDependency2KHR(const safe_VkSubpassDependency2KHR& src);
- safe_VkSubpassDependency2KHR& operator=(const safe_VkSubpassDependency2KHR& src);
- safe_VkSubpassDependency2KHR();
- ~safe_VkSubpassDependency2KHR();
- void initialize(const VkSubpassDependency2KHR* in_struct);
- void initialize(const safe_VkSubpassDependency2KHR* src);
- VkSubpassDependency2KHR *ptr() { return reinterpret_cast<VkSubpassDependency2KHR *>(this); }
- VkSubpassDependency2KHR const *ptr() const { return reinterpret_cast<VkSubpassDependency2KHR const *>(this); }
-};
-
-struct safe_VkRenderPassCreateInfo2KHR {
- VkStructureType sType;
- const void* pNext;
- VkRenderPassCreateFlags flags;
- uint32_t attachmentCount;
- safe_VkAttachmentDescription2KHR* pAttachments;
- uint32_t subpassCount;
- safe_VkSubpassDescription2KHR* pSubpasses;
- uint32_t dependencyCount;
- safe_VkSubpassDependency2KHR* pDependencies;
- uint32_t correlatedViewMaskCount;
- const uint32_t* pCorrelatedViewMasks;
- safe_VkRenderPassCreateInfo2KHR(const VkRenderPassCreateInfo2KHR* in_struct);
- safe_VkRenderPassCreateInfo2KHR(const safe_VkRenderPassCreateInfo2KHR& src);
- safe_VkRenderPassCreateInfo2KHR& operator=(const safe_VkRenderPassCreateInfo2KHR& src);
- safe_VkRenderPassCreateInfo2KHR();
- ~safe_VkRenderPassCreateInfo2KHR();
- void initialize(const VkRenderPassCreateInfo2KHR* in_struct);
- void initialize(const safe_VkRenderPassCreateInfo2KHR* src);
- VkRenderPassCreateInfo2KHR *ptr() { return reinterpret_cast<VkRenderPassCreateInfo2KHR *>(this); }
- VkRenderPassCreateInfo2KHR const *ptr() const { return reinterpret_cast<VkRenderPassCreateInfo2KHR const *>(this); }
-};
-
-struct safe_VkSubpassBeginInfoKHR {
- VkStructureType sType;
- const void* pNext;
- VkSubpassContents contents;
- safe_VkSubpassBeginInfoKHR(const VkSubpassBeginInfoKHR* in_struct);
- safe_VkSubpassBeginInfoKHR(const safe_VkSubpassBeginInfoKHR& src);
- safe_VkSubpassBeginInfoKHR& operator=(const safe_VkSubpassBeginInfoKHR& src);
- safe_VkSubpassBeginInfoKHR();
- ~safe_VkSubpassBeginInfoKHR();
- void initialize(const VkSubpassBeginInfoKHR* in_struct);
- void initialize(const safe_VkSubpassBeginInfoKHR* src);
- VkSubpassBeginInfoKHR *ptr() { return reinterpret_cast<VkSubpassBeginInfoKHR *>(this); }
- VkSubpassBeginInfoKHR const *ptr() const { return reinterpret_cast<VkSubpassBeginInfoKHR const *>(this); }
-};
-
-struct safe_VkSubpassEndInfoKHR {
- VkStructureType sType;
- const void* pNext;
- safe_VkSubpassEndInfoKHR(const VkSubpassEndInfoKHR* in_struct);
- safe_VkSubpassEndInfoKHR(const safe_VkSubpassEndInfoKHR& src);
- safe_VkSubpassEndInfoKHR& operator=(const safe_VkSubpassEndInfoKHR& src);
- safe_VkSubpassEndInfoKHR();
- ~safe_VkSubpassEndInfoKHR();
- void initialize(const VkSubpassEndInfoKHR* in_struct);
- void initialize(const safe_VkSubpassEndInfoKHR* src);
- VkSubpassEndInfoKHR *ptr() { return reinterpret_cast<VkSubpassEndInfoKHR *>(this); }
- VkSubpassEndInfoKHR const *ptr() const { return reinterpret_cast<VkSubpassEndInfoKHR const *>(this); }
-};
-
-struct safe_VkSharedPresentSurfaceCapabilitiesKHR {
- VkStructureType sType;
- void* pNext;
- VkImageUsageFlags sharedPresentSupportedUsageFlags;
- safe_VkSharedPresentSurfaceCapabilitiesKHR(const VkSharedPresentSurfaceCapabilitiesKHR* in_struct);
- safe_VkSharedPresentSurfaceCapabilitiesKHR(const safe_VkSharedPresentSurfaceCapabilitiesKHR& src);
- safe_VkSharedPresentSurfaceCapabilitiesKHR& operator=(const safe_VkSharedPresentSurfaceCapabilitiesKHR& src);
- safe_VkSharedPresentSurfaceCapabilitiesKHR();
- ~safe_VkSharedPresentSurfaceCapabilitiesKHR();
- void initialize(const VkSharedPresentSurfaceCapabilitiesKHR* in_struct);
- void initialize(const safe_VkSharedPresentSurfaceCapabilitiesKHR* src);
- VkSharedPresentSurfaceCapabilitiesKHR *ptr() { return reinterpret_cast<VkSharedPresentSurfaceCapabilitiesKHR *>(this); }
- VkSharedPresentSurfaceCapabilitiesKHR const *ptr() const { return reinterpret_cast<VkSharedPresentSurfaceCapabilitiesKHR const *>(this); }
-};
-
-#ifdef VK_USE_PLATFORM_WIN32_KHR
-struct safe_VkImportFenceWin32HandleInfoKHR {
- VkStructureType sType;
- const void* pNext;
- VkFence fence;
- VkFenceImportFlags flags;
- VkExternalFenceHandleTypeFlagBits handleType;
- HANDLE handle;
- LPCWSTR name;
- safe_VkImportFenceWin32HandleInfoKHR(const VkImportFenceWin32HandleInfoKHR* in_struct);
- safe_VkImportFenceWin32HandleInfoKHR(const safe_VkImportFenceWin32HandleInfoKHR& src);
- safe_VkImportFenceWin32HandleInfoKHR& operator=(const safe_VkImportFenceWin32HandleInfoKHR& src);
- safe_VkImportFenceWin32HandleInfoKHR();
- ~safe_VkImportFenceWin32HandleInfoKHR();
- void initialize(const VkImportFenceWin32HandleInfoKHR* in_struct);
- void initialize(const safe_VkImportFenceWin32HandleInfoKHR* src);
- VkImportFenceWin32HandleInfoKHR *ptr() { return reinterpret_cast<VkImportFenceWin32HandleInfoKHR *>(this); }
- VkImportFenceWin32HandleInfoKHR const *ptr() const { return reinterpret_cast<VkImportFenceWin32HandleInfoKHR const *>(this); }
-};
-#endif // VK_USE_PLATFORM_WIN32_KHR
-
-#ifdef VK_USE_PLATFORM_WIN32_KHR
-struct safe_VkExportFenceWin32HandleInfoKHR {
- VkStructureType sType;
- const void* pNext;
- const SECURITY_ATTRIBUTES* pAttributes;
- DWORD dwAccess;
- LPCWSTR name;
- safe_VkExportFenceWin32HandleInfoKHR(const VkExportFenceWin32HandleInfoKHR* in_struct);
- safe_VkExportFenceWin32HandleInfoKHR(const safe_VkExportFenceWin32HandleInfoKHR& src);
- safe_VkExportFenceWin32HandleInfoKHR& operator=(const safe_VkExportFenceWin32HandleInfoKHR& src);
- safe_VkExportFenceWin32HandleInfoKHR();
- ~safe_VkExportFenceWin32HandleInfoKHR();
- void initialize(const VkExportFenceWin32HandleInfoKHR* in_struct);
- void initialize(const safe_VkExportFenceWin32HandleInfoKHR* src);
- VkExportFenceWin32HandleInfoKHR *ptr() { return reinterpret_cast<VkExportFenceWin32HandleInfoKHR *>(this); }
- VkExportFenceWin32HandleInfoKHR const *ptr() const { return reinterpret_cast<VkExportFenceWin32HandleInfoKHR const *>(this); }
-};
-#endif // VK_USE_PLATFORM_WIN32_KHR
-
-#ifdef VK_USE_PLATFORM_WIN32_KHR
-struct safe_VkFenceGetWin32HandleInfoKHR {
- VkStructureType sType;
- const void* pNext;
- VkFence fence;
- VkExternalFenceHandleTypeFlagBits handleType;
- safe_VkFenceGetWin32HandleInfoKHR(const VkFenceGetWin32HandleInfoKHR* in_struct);
- safe_VkFenceGetWin32HandleInfoKHR(const safe_VkFenceGetWin32HandleInfoKHR& src);
- safe_VkFenceGetWin32HandleInfoKHR& operator=(const safe_VkFenceGetWin32HandleInfoKHR& src);
- safe_VkFenceGetWin32HandleInfoKHR();
- ~safe_VkFenceGetWin32HandleInfoKHR();
- void initialize(const VkFenceGetWin32HandleInfoKHR* in_struct);
- void initialize(const safe_VkFenceGetWin32HandleInfoKHR* src);
- VkFenceGetWin32HandleInfoKHR *ptr() { return reinterpret_cast<VkFenceGetWin32HandleInfoKHR *>(this); }
- VkFenceGetWin32HandleInfoKHR const *ptr() const { return reinterpret_cast<VkFenceGetWin32HandleInfoKHR const *>(this); }
-};
-#endif // VK_USE_PLATFORM_WIN32_KHR
-
-struct safe_VkImportFenceFdInfoKHR {
- VkStructureType sType;
- const void* pNext;
- VkFence fence;
- VkFenceImportFlags flags;
- VkExternalFenceHandleTypeFlagBits handleType;
- int fd;
- safe_VkImportFenceFdInfoKHR(const VkImportFenceFdInfoKHR* in_struct);
- safe_VkImportFenceFdInfoKHR(const safe_VkImportFenceFdInfoKHR& src);
- safe_VkImportFenceFdInfoKHR& operator=(const safe_VkImportFenceFdInfoKHR& src);
- safe_VkImportFenceFdInfoKHR();
- ~safe_VkImportFenceFdInfoKHR();
- void initialize(const VkImportFenceFdInfoKHR* in_struct);
- void initialize(const safe_VkImportFenceFdInfoKHR* src);
- VkImportFenceFdInfoKHR *ptr() { return reinterpret_cast<VkImportFenceFdInfoKHR *>(this); }
- VkImportFenceFdInfoKHR const *ptr() const { return reinterpret_cast<VkImportFenceFdInfoKHR const *>(this); }
-};
-
-struct safe_VkFenceGetFdInfoKHR {
- VkStructureType sType;
- const void* pNext;
- VkFence fence;
- VkExternalFenceHandleTypeFlagBits handleType;
- safe_VkFenceGetFdInfoKHR(const VkFenceGetFdInfoKHR* in_struct);
- safe_VkFenceGetFdInfoKHR(const safe_VkFenceGetFdInfoKHR& src);
- safe_VkFenceGetFdInfoKHR& operator=(const safe_VkFenceGetFdInfoKHR& src);
- safe_VkFenceGetFdInfoKHR();
- ~safe_VkFenceGetFdInfoKHR();
- void initialize(const VkFenceGetFdInfoKHR* in_struct);
- void initialize(const safe_VkFenceGetFdInfoKHR* src);
- VkFenceGetFdInfoKHR *ptr() { return reinterpret_cast<VkFenceGetFdInfoKHR *>(this); }
- VkFenceGetFdInfoKHR const *ptr() const { return reinterpret_cast<VkFenceGetFdInfoKHR const *>(this); }
-};
-
-struct safe_VkPhysicalDeviceSurfaceInfo2KHR {
- VkStructureType sType;
- const void* pNext;
- VkSurfaceKHR surface;
- safe_VkPhysicalDeviceSurfaceInfo2KHR(const VkPhysicalDeviceSurfaceInfo2KHR* in_struct);
- safe_VkPhysicalDeviceSurfaceInfo2KHR(const safe_VkPhysicalDeviceSurfaceInfo2KHR& src);
- safe_VkPhysicalDeviceSurfaceInfo2KHR& operator=(const safe_VkPhysicalDeviceSurfaceInfo2KHR& src);
- safe_VkPhysicalDeviceSurfaceInfo2KHR();
- ~safe_VkPhysicalDeviceSurfaceInfo2KHR();
- void initialize(const VkPhysicalDeviceSurfaceInfo2KHR* in_struct);
- void initialize(const safe_VkPhysicalDeviceSurfaceInfo2KHR* src);
- VkPhysicalDeviceSurfaceInfo2KHR *ptr() { return reinterpret_cast<VkPhysicalDeviceSurfaceInfo2KHR *>(this); }
- VkPhysicalDeviceSurfaceInfo2KHR const *ptr() const { return reinterpret_cast<VkPhysicalDeviceSurfaceInfo2KHR const *>(this); }
-};
-
-struct safe_VkSurfaceCapabilities2KHR {
- VkStructureType sType;
- void* pNext;
- VkSurfaceCapabilitiesKHR surfaceCapabilities;
- safe_VkSurfaceCapabilities2KHR(const VkSurfaceCapabilities2KHR* in_struct);
- safe_VkSurfaceCapabilities2KHR(const safe_VkSurfaceCapabilities2KHR& src);
- safe_VkSurfaceCapabilities2KHR& operator=(const safe_VkSurfaceCapabilities2KHR& src);
- safe_VkSurfaceCapabilities2KHR();
- ~safe_VkSurfaceCapabilities2KHR();
- void initialize(const VkSurfaceCapabilities2KHR* in_struct);
- void initialize(const safe_VkSurfaceCapabilities2KHR* src);
- VkSurfaceCapabilities2KHR *ptr() { return reinterpret_cast<VkSurfaceCapabilities2KHR *>(this); }
- VkSurfaceCapabilities2KHR const *ptr() const { return reinterpret_cast<VkSurfaceCapabilities2KHR const *>(this); }
-};
-
-struct safe_VkSurfaceFormat2KHR {
- VkStructureType sType;
- void* pNext;
- VkSurfaceFormatKHR surfaceFormat;
- safe_VkSurfaceFormat2KHR(const VkSurfaceFormat2KHR* in_struct);
- safe_VkSurfaceFormat2KHR(const safe_VkSurfaceFormat2KHR& src);
- safe_VkSurfaceFormat2KHR& operator=(const safe_VkSurfaceFormat2KHR& src);
- safe_VkSurfaceFormat2KHR();
- ~safe_VkSurfaceFormat2KHR();
- void initialize(const VkSurfaceFormat2KHR* in_struct);
- void initialize(const safe_VkSurfaceFormat2KHR* src);
- VkSurfaceFormat2KHR *ptr() { return reinterpret_cast<VkSurfaceFormat2KHR *>(this); }
- VkSurfaceFormat2KHR const *ptr() const { return reinterpret_cast<VkSurfaceFormat2KHR const *>(this); }
-};
-
-struct safe_VkDisplayProperties2KHR {
- VkStructureType sType;
- void* pNext;
- safe_VkDisplayPropertiesKHR displayProperties;
- safe_VkDisplayProperties2KHR(const VkDisplayProperties2KHR* in_struct);
- safe_VkDisplayProperties2KHR(const safe_VkDisplayProperties2KHR& src);
- safe_VkDisplayProperties2KHR& operator=(const safe_VkDisplayProperties2KHR& src);
- safe_VkDisplayProperties2KHR();
- ~safe_VkDisplayProperties2KHR();
- void initialize(const VkDisplayProperties2KHR* in_struct);
- void initialize(const safe_VkDisplayProperties2KHR* src);
- VkDisplayProperties2KHR *ptr() { return reinterpret_cast<VkDisplayProperties2KHR *>(this); }
- VkDisplayProperties2KHR const *ptr() const { return reinterpret_cast<VkDisplayProperties2KHR const *>(this); }
-};
-
-struct safe_VkDisplayPlaneProperties2KHR {
- VkStructureType sType;
- void* pNext;
- VkDisplayPlanePropertiesKHR displayPlaneProperties;
- safe_VkDisplayPlaneProperties2KHR(const VkDisplayPlaneProperties2KHR* in_struct);
- safe_VkDisplayPlaneProperties2KHR(const safe_VkDisplayPlaneProperties2KHR& src);
- safe_VkDisplayPlaneProperties2KHR& operator=(const safe_VkDisplayPlaneProperties2KHR& src);
- safe_VkDisplayPlaneProperties2KHR();
- ~safe_VkDisplayPlaneProperties2KHR();
- void initialize(const VkDisplayPlaneProperties2KHR* in_struct);
- void initialize(const safe_VkDisplayPlaneProperties2KHR* src);
- VkDisplayPlaneProperties2KHR *ptr() { return reinterpret_cast<VkDisplayPlaneProperties2KHR *>(this); }
- VkDisplayPlaneProperties2KHR const *ptr() const { return reinterpret_cast<VkDisplayPlaneProperties2KHR const *>(this); }
-};
-
-struct safe_VkDisplayModeProperties2KHR {
- VkStructureType sType;
- void* pNext;
- VkDisplayModePropertiesKHR displayModeProperties;
- safe_VkDisplayModeProperties2KHR(const VkDisplayModeProperties2KHR* in_struct);
- safe_VkDisplayModeProperties2KHR(const safe_VkDisplayModeProperties2KHR& src);
- safe_VkDisplayModeProperties2KHR& operator=(const safe_VkDisplayModeProperties2KHR& src);
- safe_VkDisplayModeProperties2KHR();
- ~safe_VkDisplayModeProperties2KHR();
- void initialize(const VkDisplayModeProperties2KHR* in_struct);
- void initialize(const safe_VkDisplayModeProperties2KHR* src);
- VkDisplayModeProperties2KHR *ptr() { return reinterpret_cast<VkDisplayModeProperties2KHR *>(this); }
- VkDisplayModeProperties2KHR const *ptr() const { return reinterpret_cast<VkDisplayModeProperties2KHR const *>(this); }
-};
-
-struct safe_VkDisplayPlaneInfo2KHR {
- VkStructureType sType;
- const void* pNext;
- VkDisplayModeKHR mode;
- uint32_t planeIndex;
- safe_VkDisplayPlaneInfo2KHR(const VkDisplayPlaneInfo2KHR* in_struct);
- safe_VkDisplayPlaneInfo2KHR(const safe_VkDisplayPlaneInfo2KHR& src);
- safe_VkDisplayPlaneInfo2KHR& operator=(const safe_VkDisplayPlaneInfo2KHR& src);
- safe_VkDisplayPlaneInfo2KHR();
- ~safe_VkDisplayPlaneInfo2KHR();
- void initialize(const VkDisplayPlaneInfo2KHR* in_struct);
- void initialize(const safe_VkDisplayPlaneInfo2KHR* src);
- VkDisplayPlaneInfo2KHR *ptr() { return reinterpret_cast<VkDisplayPlaneInfo2KHR *>(this); }
- VkDisplayPlaneInfo2KHR const *ptr() const { return reinterpret_cast<VkDisplayPlaneInfo2KHR const *>(this); }
-};
-
-struct safe_VkDisplayPlaneCapabilities2KHR {
- VkStructureType sType;
- void* pNext;
- VkDisplayPlaneCapabilitiesKHR capabilities;
- safe_VkDisplayPlaneCapabilities2KHR(const VkDisplayPlaneCapabilities2KHR* in_struct);
- safe_VkDisplayPlaneCapabilities2KHR(const safe_VkDisplayPlaneCapabilities2KHR& src);
- safe_VkDisplayPlaneCapabilities2KHR& operator=(const safe_VkDisplayPlaneCapabilities2KHR& src);
- safe_VkDisplayPlaneCapabilities2KHR();
- ~safe_VkDisplayPlaneCapabilities2KHR();
- void initialize(const VkDisplayPlaneCapabilities2KHR* in_struct);
- void initialize(const safe_VkDisplayPlaneCapabilities2KHR* src);
- VkDisplayPlaneCapabilities2KHR *ptr() { return reinterpret_cast<VkDisplayPlaneCapabilities2KHR *>(this); }
- VkDisplayPlaneCapabilities2KHR const *ptr() const { return reinterpret_cast<VkDisplayPlaneCapabilities2KHR const *>(this); }
-};
-
-struct safe_VkImageFormatListCreateInfoKHR {
- VkStructureType sType;
- const void* pNext;
- uint32_t viewFormatCount;
- const VkFormat* pViewFormats;
- safe_VkImageFormatListCreateInfoKHR(const VkImageFormatListCreateInfoKHR* in_struct);
- safe_VkImageFormatListCreateInfoKHR(const safe_VkImageFormatListCreateInfoKHR& src);
- safe_VkImageFormatListCreateInfoKHR& operator=(const safe_VkImageFormatListCreateInfoKHR& src);
- safe_VkImageFormatListCreateInfoKHR();
- ~safe_VkImageFormatListCreateInfoKHR();
- void initialize(const VkImageFormatListCreateInfoKHR* in_struct);
- void initialize(const safe_VkImageFormatListCreateInfoKHR* src);
- VkImageFormatListCreateInfoKHR *ptr() { return reinterpret_cast<VkImageFormatListCreateInfoKHR *>(this); }
- VkImageFormatListCreateInfoKHR const *ptr() const { return reinterpret_cast<VkImageFormatListCreateInfoKHR const *>(this); }
-};
-
-struct safe_VkPhysicalDevice8BitStorageFeaturesKHR {
- VkStructureType sType;
- void* pNext;
- VkBool32 storageBuffer8BitAccess;
- VkBool32 uniformAndStorageBuffer8BitAccess;
- VkBool32 storagePushConstant8;
- safe_VkPhysicalDevice8BitStorageFeaturesKHR(const VkPhysicalDevice8BitStorageFeaturesKHR* in_struct);
- safe_VkPhysicalDevice8BitStorageFeaturesKHR(const safe_VkPhysicalDevice8BitStorageFeaturesKHR& src);
- safe_VkPhysicalDevice8BitStorageFeaturesKHR& operator=(const safe_VkPhysicalDevice8BitStorageFeaturesKHR& src);
- safe_VkPhysicalDevice8BitStorageFeaturesKHR();
- ~safe_VkPhysicalDevice8BitStorageFeaturesKHR();
- void initialize(const VkPhysicalDevice8BitStorageFeaturesKHR* in_struct);
- void initialize(const safe_VkPhysicalDevice8BitStorageFeaturesKHR* src);
- VkPhysicalDevice8BitStorageFeaturesKHR *ptr() { return reinterpret_cast<VkPhysicalDevice8BitStorageFeaturesKHR *>(this); }
- VkPhysicalDevice8BitStorageFeaturesKHR const *ptr() const { return reinterpret_cast<VkPhysicalDevice8BitStorageFeaturesKHR const *>(this); }
-};
-
-struct safe_VkPhysicalDeviceShaderAtomicInt64FeaturesKHR {
- VkStructureType sType;
- void* pNext;
- VkBool32 shaderBufferInt64Atomics;
- VkBool32 shaderSharedInt64Atomics;
- safe_VkPhysicalDeviceShaderAtomicInt64FeaturesKHR(const VkPhysicalDeviceShaderAtomicInt64FeaturesKHR* in_struct);
- safe_VkPhysicalDeviceShaderAtomicInt64FeaturesKHR(const safe_VkPhysicalDeviceShaderAtomicInt64FeaturesKHR& src);
- safe_VkPhysicalDeviceShaderAtomicInt64FeaturesKHR& operator=(const safe_VkPhysicalDeviceShaderAtomicInt64FeaturesKHR& src);
- safe_VkPhysicalDeviceShaderAtomicInt64FeaturesKHR();
- ~safe_VkPhysicalDeviceShaderAtomicInt64FeaturesKHR();
- void initialize(const VkPhysicalDeviceShaderAtomicInt64FeaturesKHR* in_struct);
- void initialize(const safe_VkPhysicalDeviceShaderAtomicInt64FeaturesKHR* src);
- VkPhysicalDeviceShaderAtomicInt64FeaturesKHR *ptr() { return reinterpret_cast<VkPhysicalDeviceShaderAtomicInt64FeaturesKHR *>(this); }
- VkPhysicalDeviceShaderAtomicInt64FeaturesKHR const *ptr() const { return reinterpret_cast<VkPhysicalDeviceShaderAtomicInt64FeaturesKHR const *>(this); }
-};
-
-struct safe_VkPhysicalDeviceDriverPropertiesKHR {
- VkStructureType sType;
- void* pNext;
- VkDriverIdKHR driverID;
- char driverName[VK_MAX_DRIVER_NAME_SIZE_KHR];
- char driverInfo[VK_MAX_DRIVER_INFO_SIZE_KHR];
- VkConformanceVersionKHR conformanceVersion;
- safe_VkPhysicalDeviceDriverPropertiesKHR(const VkPhysicalDeviceDriverPropertiesKHR* in_struct);
- safe_VkPhysicalDeviceDriverPropertiesKHR(const safe_VkPhysicalDeviceDriverPropertiesKHR& src);
- safe_VkPhysicalDeviceDriverPropertiesKHR& operator=(const safe_VkPhysicalDeviceDriverPropertiesKHR& src);
- safe_VkPhysicalDeviceDriverPropertiesKHR();
- ~safe_VkPhysicalDeviceDriverPropertiesKHR();
- void initialize(const VkPhysicalDeviceDriverPropertiesKHR* in_struct);
- void initialize(const safe_VkPhysicalDeviceDriverPropertiesKHR* src);
- VkPhysicalDeviceDriverPropertiesKHR *ptr() { return reinterpret_cast<VkPhysicalDeviceDriverPropertiesKHR *>(this); }
- VkPhysicalDeviceDriverPropertiesKHR const *ptr() const { return reinterpret_cast<VkPhysicalDeviceDriverPropertiesKHR const *>(this); }
-};
-
-struct safe_VkPhysicalDeviceFloatControlsPropertiesKHR {
- VkStructureType sType;
- void* pNext;
- VkShaderFloatControlsIndependenceKHR denormBehaviorIndependence;
- VkShaderFloatControlsIndependenceKHR roundingModeIndependence;
- VkBool32 shaderSignedZeroInfNanPreserveFloat16;
- VkBool32 shaderSignedZeroInfNanPreserveFloat32;
- VkBool32 shaderSignedZeroInfNanPreserveFloat64;
- VkBool32 shaderDenormPreserveFloat16;
- VkBool32 shaderDenormPreserveFloat32;
- VkBool32 shaderDenormPreserveFloat64;
- VkBool32 shaderDenormFlushToZeroFloat16;
- VkBool32 shaderDenormFlushToZeroFloat32;
- VkBool32 shaderDenormFlushToZeroFloat64;
- VkBool32 shaderRoundingModeRTEFloat16;
- VkBool32 shaderRoundingModeRTEFloat32;
- VkBool32 shaderRoundingModeRTEFloat64;
- VkBool32 shaderRoundingModeRTZFloat16;
- VkBool32 shaderRoundingModeRTZFloat32;
- VkBool32 shaderRoundingModeRTZFloat64;
- safe_VkPhysicalDeviceFloatControlsPropertiesKHR(const VkPhysicalDeviceFloatControlsPropertiesKHR* in_struct);
- safe_VkPhysicalDeviceFloatControlsPropertiesKHR(const safe_VkPhysicalDeviceFloatControlsPropertiesKHR& src);
- safe_VkPhysicalDeviceFloatControlsPropertiesKHR& operator=(const safe_VkPhysicalDeviceFloatControlsPropertiesKHR& src);
- safe_VkPhysicalDeviceFloatControlsPropertiesKHR();
- ~safe_VkPhysicalDeviceFloatControlsPropertiesKHR();
- void initialize(const VkPhysicalDeviceFloatControlsPropertiesKHR* in_struct);
- void initialize(const safe_VkPhysicalDeviceFloatControlsPropertiesKHR* src);
- VkPhysicalDeviceFloatControlsPropertiesKHR *ptr() { return reinterpret_cast<VkPhysicalDeviceFloatControlsPropertiesKHR *>(this); }
- VkPhysicalDeviceFloatControlsPropertiesKHR const *ptr() const { return reinterpret_cast<VkPhysicalDeviceFloatControlsPropertiesKHR const *>(this); }
-};
-
-struct safe_VkSubpassDescriptionDepthStencilResolveKHR {
- VkStructureType sType;
- const void* pNext;
- VkResolveModeFlagBitsKHR depthResolveMode;
- VkResolveModeFlagBitsKHR stencilResolveMode;
- safe_VkAttachmentReference2KHR* pDepthStencilResolveAttachment;
- safe_VkSubpassDescriptionDepthStencilResolveKHR(const VkSubpassDescriptionDepthStencilResolveKHR* in_struct);
- safe_VkSubpassDescriptionDepthStencilResolveKHR(const safe_VkSubpassDescriptionDepthStencilResolveKHR& src);
- safe_VkSubpassDescriptionDepthStencilResolveKHR& operator=(const safe_VkSubpassDescriptionDepthStencilResolveKHR& src);
- safe_VkSubpassDescriptionDepthStencilResolveKHR();
- ~safe_VkSubpassDescriptionDepthStencilResolveKHR();
- void initialize(const VkSubpassDescriptionDepthStencilResolveKHR* in_struct);
- void initialize(const safe_VkSubpassDescriptionDepthStencilResolveKHR* src);
- VkSubpassDescriptionDepthStencilResolveKHR *ptr() { return reinterpret_cast<VkSubpassDescriptionDepthStencilResolveKHR *>(this); }
- VkSubpassDescriptionDepthStencilResolveKHR const *ptr() const { return reinterpret_cast<VkSubpassDescriptionDepthStencilResolveKHR const *>(this); }
-};
-
-struct safe_VkPhysicalDeviceDepthStencilResolvePropertiesKHR {
- VkStructureType sType;
- void* pNext;
- VkResolveModeFlagsKHR supportedDepthResolveModes;
- VkResolveModeFlagsKHR supportedStencilResolveModes;
- VkBool32 independentResolveNone;
- VkBool32 independentResolve;
- safe_VkPhysicalDeviceDepthStencilResolvePropertiesKHR(const VkPhysicalDeviceDepthStencilResolvePropertiesKHR* in_struct);
- safe_VkPhysicalDeviceDepthStencilResolvePropertiesKHR(const safe_VkPhysicalDeviceDepthStencilResolvePropertiesKHR& src);
- safe_VkPhysicalDeviceDepthStencilResolvePropertiesKHR& operator=(const safe_VkPhysicalDeviceDepthStencilResolvePropertiesKHR& src);
- safe_VkPhysicalDeviceDepthStencilResolvePropertiesKHR();
- ~safe_VkPhysicalDeviceDepthStencilResolvePropertiesKHR();
- void initialize(const VkPhysicalDeviceDepthStencilResolvePropertiesKHR* in_struct);
- void initialize(const safe_VkPhysicalDeviceDepthStencilResolvePropertiesKHR* src);
- VkPhysicalDeviceDepthStencilResolvePropertiesKHR *ptr() { return reinterpret_cast<VkPhysicalDeviceDepthStencilResolvePropertiesKHR *>(this); }
- VkPhysicalDeviceDepthStencilResolvePropertiesKHR const *ptr() const { return reinterpret_cast<VkPhysicalDeviceDepthStencilResolvePropertiesKHR const *>(this); }
-};
-
-struct safe_VkPhysicalDeviceVulkanMemoryModelFeaturesKHR {
- VkStructureType sType;
- void* pNext;
- VkBool32 vulkanMemoryModel;
- VkBool32 vulkanMemoryModelDeviceScope;
- VkBool32 vulkanMemoryModelAvailabilityVisibilityChains;
- safe_VkPhysicalDeviceVulkanMemoryModelFeaturesKHR(const VkPhysicalDeviceVulkanMemoryModelFeaturesKHR* in_struct);
- safe_VkPhysicalDeviceVulkanMemoryModelFeaturesKHR(const safe_VkPhysicalDeviceVulkanMemoryModelFeaturesKHR& src);
- safe_VkPhysicalDeviceVulkanMemoryModelFeaturesKHR& operator=(const safe_VkPhysicalDeviceVulkanMemoryModelFeaturesKHR& src);
- safe_VkPhysicalDeviceVulkanMemoryModelFeaturesKHR();
- ~safe_VkPhysicalDeviceVulkanMemoryModelFeaturesKHR();
- void initialize(const VkPhysicalDeviceVulkanMemoryModelFeaturesKHR* in_struct);
- void initialize(const safe_VkPhysicalDeviceVulkanMemoryModelFeaturesKHR* src);
- VkPhysicalDeviceVulkanMemoryModelFeaturesKHR *ptr() { return reinterpret_cast<VkPhysicalDeviceVulkanMemoryModelFeaturesKHR *>(this); }
- VkPhysicalDeviceVulkanMemoryModelFeaturesKHR const *ptr() const { return reinterpret_cast<VkPhysicalDeviceVulkanMemoryModelFeaturesKHR const *>(this); }
-};
-
-struct safe_VkSurfaceProtectedCapabilitiesKHR {
- VkStructureType sType;
- const void* pNext;
- VkBool32 supportsProtected;
- safe_VkSurfaceProtectedCapabilitiesKHR(const VkSurfaceProtectedCapabilitiesKHR* in_struct);
- safe_VkSurfaceProtectedCapabilitiesKHR(const safe_VkSurfaceProtectedCapabilitiesKHR& src);
- safe_VkSurfaceProtectedCapabilitiesKHR& operator=(const safe_VkSurfaceProtectedCapabilitiesKHR& src);
- safe_VkSurfaceProtectedCapabilitiesKHR();
- ~safe_VkSurfaceProtectedCapabilitiesKHR();
- void initialize(const VkSurfaceProtectedCapabilitiesKHR* in_struct);
- void initialize(const safe_VkSurfaceProtectedCapabilitiesKHR* src);
- VkSurfaceProtectedCapabilitiesKHR *ptr() { return reinterpret_cast<VkSurfaceProtectedCapabilitiesKHR *>(this); }
- VkSurfaceProtectedCapabilitiesKHR const *ptr() const { return reinterpret_cast<VkSurfaceProtectedCapabilitiesKHR const *>(this); }
-};
-
-struct safe_VkPhysicalDeviceUniformBufferStandardLayoutFeaturesKHR {
- VkStructureType sType;
- void* pNext;
- VkBool32 uniformBufferStandardLayout;
- safe_VkPhysicalDeviceUniformBufferStandardLayoutFeaturesKHR(const VkPhysicalDeviceUniformBufferStandardLayoutFeaturesKHR* in_struct);
- safe_VkPhysicalDeviceUniformBufferStandardLayoutFeaturesKHR(const safe_VkPhysicalDeviceUniformBufferStandardLayoutFeaturesKHR& src);
- safe_VkPhysicalDeviceUniformBufferStandardLayoutFeaturesKHR& operator=(const safe_VkPhysicalDeviceUniformBufferStandardLayoutFeaturesKHR& src);
- safe_VkPhysicalDeviceUniformBufferStandardLayoutFeaturesKHR();
- ~safe_VkPhysicalDeviceUniformBufferStandardLayoutFeaturesKHR();
- void initialize(const VkPhysicalDeviceUniformBufferStandardLayoutFeaturesKHR* in_struct);
- void initialize(const safe_VkPhysicalDeviceUniformBufferStandardLayoutFeaturesKHR* src);
- VkPhysicalDeviceUniformBufferStandardLayoutFeaturesKHR *ptr() { return reinterpret_cast<VkPhysicalDeviceUniformBufferStandardLayoutFeaturesKHR *>(this); }
- VkPhysicalDeviceUniformBufferStandardLayoutFeaturesKHR const *ptr() const { return reinterpret_cast<VkPhysicalDeviceUniformBufferStandardLayoutFeaturesKHR const *>(this); }
-};
-
-struct safe_VkPhysicalDevicePipelineExecutablePropertiesFeaturesKHR {
- VkStructureType sType;
- void* pNext;
- VkBool32 pipelineExecutableInfo;
- safe_VkPhysicalDevicePipelineExecutablePropertiesFeaturesKHR(const VkPhysicalDevicePipelineExecutablePropertiesFeaturesKHR* in_struct);
- safe_VkPhysicalDevicePipelineExecutablePropertiesFeaturesKHR(const safe_VkPhysicalDevicePipelineExecutablePropertiesFeaturesKHR& src);
- safe_VkPhysicalDevicePipelineExecutablePropertiesFeaturesKHR& operator=(const safe_VkPhysicalDevicePipelineExecutablePropertiesFeaturesKHR& src);
- safe_VkPhysicalDevicePipelineExecutablePropertiesFeaturesKHR();
- ~safe_VkPhysicalDevicePipelineExecutablePropertiesFeaturesKHR();
- void initialize(const VkPhysicalDevicePipelineExecutablePropertiesFeaturesKHR* in_struct);
- void initialize(const safe_VkPhysicalDevicePipelineExecutablePropertiesFeaturesKHR* src);
- VkPhysicalDevicePipelineExecutablePropertiesFeaturesKHR *ptr() { return reinterpret_cast<VkPhysicalDevicePipelineExecutablePropertiesFeaturesKHR *>(this); }
- VkPhysicalDevicePipelineExecutablePropertiesFeaturesKHR const *ptr() const { return reinterpret_cast<VkPhysicalDevicePipelineExecutablePropertiesFeaturesKHR const *>(this); }
-};
-
-struct safe_VkPipelineInfoKHR {
- VkStructureType sType;
- const void* pNext;
- VkPipeline pipeline;
- safe_VkPipelineInfoKHR(const VkPipelineInfoKHR* in_struct);
- safe_VkPipelineInfoKHR(const safe_VkPipelineInfoKHR& src);
- safe_VkPipelineInfoKHR& operator=(const safe_VkPipelineInfoKHR& src);
- safe_VkPipelineInfoKHR();
- ~safe_VkPipelineInfoKHR();
- void initialize(const VkPipelineInfoKHR* in_struct);
- void initialize(const safe_VkPipelineInfoKHR* src);
- VkPipelineInfoKHR *ptr() { return reinterpret_cast<VkPipelineInfoKHR *>(this); }
- VkPipelineInfoKHR const *ptr() const { return reinterpret_cast<VkPipelineInfoKHR const *>(this); }
-};
-
-struct safe_VkPipelineExecutablePropertiesKHR {
- VkStructureType sType;
- void* pNext;
- VkShaderStageFlags stages;
- char name[VK_MAX_DESCRIPTION_SIZE];
- char description[VK_MAX_DESCRIPTION_SIZE];
- uint32_t subgroupSize;
- safe_VkPipelineExecutablePropertiesKHR(const VkPipelineExecutablePropertiesKHR* in_struct);
- safe_VkPipelineExecutablePropertiesKHR(const safe_VkPipelineExecutablePropertiesKHR& src);
- safe_VkPipelineExecutablePropertiesKHR& operator=(const safe_VkPipelineExecutablePropertiesKHR& src);
- safe_VkPipelineExecutablePropertiesKHR();
- ~safe_VkPipelineExecutablePropertiesKHR();
- void initialize(const VkPipelineExecutablePropertiesKHR* in_struct);
- void initialize(const safe_VkPipelineExecutablePropertiesKHR* src);
- VkPipelineExecutablePropertiesKHR *ptr() { return reinterpret_cast<VkPipelineExecutablePropertiesKHR *>(this); }
- VkPipelineExecutablePropertiesKHR const *ptr() const { return reinterpret_cast<VkPipelineExecutablePropertiesKHR const *>(this); }
-};
-
-struct safe_VkPipelineExecutableInfoKHR {
- VkStructureType sType;
- const void* pNext;
- VkPipeline pipeline;
- uint32_t executableIndex;
- safe_VkPipelineExecutableInfoKHR(const VkPipelineExecutableInfoKHR* in_struct);
- safe_VkPipelineExecutableInfoKHR(const safe_VkPipelineExecutableInfoKHR& src);
- safe_VkPipelineExecutableInfoKHR& operator=(const safe_VkPipelineExecutableInfoKHR& src);
- safe_VkPipelineExecutableInfoKHR();
- ~safe_VkPipelineExecutableInfoKHR();
- void initialize(const VkPipelineExecutableInfoKHR* in_struct);
- void initialize(const safe_VkPipelineExecutableInfoKHR* src);
- VkPipelineExecutableInfoKHR *ptr() { return reinterpret_cast<VkPipelineExecutableInfoKHR *>(this); }
- VkPipelineExecutableInfoKHR const *ptr() const { return reinterpret_cast<VkPipelineExecutableInfoKHR const *>(this); }
-};
-
-struct safe_VkPipelineExecutableStatisticKHR {
- VkStructureType sType;
- void* pNext;
- char name[VK_MAX_DESCRIPTION_SIZE];
- char description[VK_MAX_DESCRIPTION_SIZE];
- VkPipelineExecutableStatisticFormatKHR format;
- VkPipelineExecutableStatisticValueKHR value;
- safe_VkPipelineExecutableStatisticKHR(const VkPipelineExecutableStatisticKHR* in_struct);
- safe_VkPipelineExecutableStatisticKHR(const safe_VkPipelineExecutableStatisticKHR& src);
- safe_VkPipelineExecutableStatisticKHR& operator=(const safe_VkPipelineExecutableStatisticKHR& src);
- safe_VkPipelineExecutableStatisticKHR();
- ~safe_VkPipelineExecutableStatisticKHR();
- void initialize(const VkPipelineExecutableStatisticKHR* in_struct);
- void initialize(const safe_VkPipelineExecutableStatisticKHR* src);
- VkPipelineExecutableStatisticKHR *ptr() { return reinterpret_cast<VkPipelineExecutableStatisticKHR *>(this); }
- VkPipelineExecutableStatisticKHR const *ptr() const { return reinterpret_cast<VkPipelineExecutableStatisticKHR const *>(this); }
-};
-
-struct safe_VkPipelineExecutableInternalRepresentationKHR {
- VkStructureType sType;
- void* pNext;
- char name[VK_MAX_DESCRIPTION_SIZE];
- char description[VK_MAX_DESCRIPTION_SIZE];
- VkBool32 isText;
- size_t dataSize;
- void* pData;
- safe_VkPipelineExecutableInternalRepresentationKHR(const VkPipelineExecutableInternalRepresentationKHR* in_struct);
- safe_VkPipelineExecutableInternalRepresentationKHR(const safe_VkPipelineExecutableInternalRepresentationKHR& src);
- safe_VkPipelineExecutableInternalRepresentationKHR& operator=(const safe_VkPipelineExecutableInternalRepresentationKHR& src);
- safe_VkPipelineExecutableInternalRepresentationKHR();
- ~safe_VkPipelineExecutableInternalRepresentationKHR();
- void initialize(const VkPipelineExecutableInternalRepresentationKHR* in_struct);
- void initialize(const safe_VkPipelineExecutableInternalRepresentationKHR* src);
- VkPipelineExecutableInternalRepresentationKHR *ptr() { return reinterpret_cast<VkPipelineExecutableInternalRepresentationKHR *>(this); }
- VkPipelineExecutableInternalRepresentationKHR const *ptr() const { return reinterpret_cast<VkPipelineExecutableInternalRepresentationKHR const *>(this); }
-};
-
-struct safe_VkDebugReportCallbackCreateInfoEXT {
- VkStructureType sType;
- const void* pNext;
- VkDebugReportFlagsEXT flags;
- PFN_vkDebugReportCallbackEXT pfnCallback;
- void* pUserData;
- safe_VkDebugReportCallbackCreateInfoEXT(const VkDebugReportCallbackCreateInfoEXT* in_struct);
- safe_VkDebugReportCallbackCreateInfoEXT(const safe_VkDebugReportCallbackCreateInfoEXT& src);
- safe_VkDebugReportCallbackCreateInfoEXT& operator=(const safe_VkDebugReportCallbackCreateInfoEXT& src);
- safe_VkDebugReportCallbackCreateInfoEXT();
- ~safe_VkDebugReportCallbackCreateInfoEXT();
- void initialize(const VkDebugReportCallbackCreateInfoEXT* in_struct);
- void initialize(const safe_VkDebugReportCallbackCreateInfoEXT* src);
- VkDebugReportCallbackCreateInfoEXT *ptr() { return reinterpret_cast<VkDebugReportCallbackCreateInfoEXT *>(this); }
- VkDebugReportCallbackCreateInfoEXT const *ptr() const { return reinterpret_cast<VkDebugReportCallbackCreateInfoEXT const *>(this); }
-};
-
-struct safe_VkPipelineRasterizationStateRasterizationOrderAMD {
- VkStructureType sType;
- const void* pNext;
- VkRasterizationOrderAMD rasterizationOrder;
- safe_VkPipelineRasterizationStateRasterizationOrderAMD(const VkPipelineRasterizationStateRasterizationOrderAMD* in_struct);
- safe_VkPipelineRasterizationStateRasterizationOrderAMD(const safe_VkPipelineRasterizationStateRasterizationOrderAMD& src);
- safe_VkPipelineRasterizationStateRasterizationOrderAMD& operator=(const safe_VkPipelineRasterizationStateRasterizationOrderAMD& src);
- safe_VkPipelineRasterizationStateRasterizationOrderAMD();
- ~safe_VkPipelineRasterizationStateRasterizationOrderAMD();
- void initialize(const VkPipelineRasterizationStateRasterizationOrderAMD* in_struct);
- void initialize(const safe_VkPipelineRasterizationStateRasterizationOrderAMD* src);
- VkPipelineRasterizationStateRasterizationOrderAMD *ptr() { return reinterpret_cast<VkPipelineRasterizationStateRasterizationOrderAMD *>(this); }
- VkPipelineRasterizationStateRasterizationOrderAMD const *ptr() const { return reinterpret_cast<VkPipelineRasterizationStateRasterizationOrderAMD const *>(this); }
-};
-
-struct safe_VkDebugMarkerObjectNameInfoEXT {
- VkStructureType sType;
- const void* pNext;
- VkDebugReportObjectTypeEXT objectType;
- uint64_t object;
- const char* pObjectName;
- safe_VkDebugMarkerObjectNameInfoEXT(const VkDebugMarkerObjectNameInfoEXT* in_struct);
- safe_VkDebugMarkerObjectNameInfoEXT(const safe_VkDebugMarkerObjectNameInfoEXT& src);
- safe_VkDebugMarkerObjectNameInfoEXT& operator=(const safe_VkDebugMarkerObjectNameInfoEXT& src);
- safe_VkDebugMarkerObjectNameInfoEXT();
- ~safe_VkDebugMarkerObjectNameInfoEXT();
- void initialize(const VkDebugMarkerObjectNameInfoEXT* in_struct);
- void initialize(const safe_VkDebugMarkerObjectNameInfoEXT* src);
- VkDebugMarkerObjectNameInfoEXT *ptr() { return reinterpret_cast<VkDebugMarkerObjectNameInfoEXT *>(this); }
- VkDebugMarkerObjectNameInfoEXT const *ptr() const { return reinterpret_cast<VkDebugMarkerObjectNameInfoEXT const *>(this); }
-};
-
-struct safe_VkDebugMarkerObjectTagInfoEXT {
- VkStructureType sType;
- const void* pNext;
- VkDebugReportObjectTypeEXT objectType;
- uint64_t object;
- uint64_t tagName;
- size_t tagSize;
- const void* pTag;
- safe_VkDebugMarkerObjectTagInfoEXT(const VkDebugMarkerObjectTagInfoEXT* in_struct);
- safe_VkDebugMarkerObjectTagInfoEXT(const safe_VkDebugMarkerObjectTagInfoEXT& src);
- safe_VkDebugMarkerObjectTagInfoEXT& operator=(const safe_VkDebugMarkerObjectTagInfoEXT& src);
- safe_VkDebugMarkerObjectTagInfoEXT();
- ~safe_VkDebugMarkerObjectTagInfoEXT();
- void initialize(const VkDebugMarkerObjectTagInfoEXT* in_struct);
- void initialize(const safe_VkDebugMarkerObjectTagInfoEXT* src);
- VkDebugMarkerObjectTagInfoEXT *ptr() { return reinterpret_cast<VkDebugMarkerObjectTagInfoEXT *>(this); }
- VkDebugMarkerObjectTagInfoEXT const *ptr() const { return reinterpret_cast<VkDebugMarkerObjectTagInfoEXT const *>(this); }
-};
-
-struct safe_VkDebugMarkerMarkerInfoEXT {
- VkStructureType sType;
- const void* pNext;
- const char* pMarkerName;
- float color[4];
- safe_VkDebugMarkerMarkerInfoEXT(const VkDebugMarkerMarkerInfoEXT* in_struct);
- safe_VkDebugMarkerMarkerInfoEXT(const safe_VkDebugMarkerMarkerInfoEXT& src);
- safe_VkDebugMarkerMarkerInfoEXT& operator=(const safe_VkDebugMarkerMarkerInfoEXT& src);
- safe_VkDebugMarkerMarkerInfoEXT();
- ~safe_VkDebugMarkerMarkerInfoEXT();
- void initialize(const VkDebugMarkerMarkerInfoEXT* in_struct);
- void initialize(const safe_VkDebugMarkerMarkerInfoEXT* src);
- VkDebugMarkerMarkerInfoEXT *ptr() { return reinterpret_cast<VkDebugMarkerMarkerInfoEXT *>(this); }
- VkDebugMarkerMarkerInfoEXT const *ptr() const { return reinterpret_cast<VkDebugMarkerMarkerInfoEXT const *>(this); }
-};
-
-struct safe_VkDedicatedAllocationImageCreateInfoNV {
- VkStructureType sType;
- const void* pNext;
- VkBool32 dedicatedAllocation;
- safe_VkDedicatedAllocationImageCreateInfoNV(const VkDedicatedAllocationImageCreateInfoNV* in_struct);
- safe_VkDedicatedAllocationImageCreateInfoNV(const safe_VkDedicatedAllocationImageCreateInfoNV& src);
- safe_VkDedicatedAllocationImageCreateInfoNV& operator=(const safe_VkDedicatedAllocationImageCreateInfoNV& src);
- safe_VkDedicatedAllocationImageCreateInfoNV();
- ~safe_VkDedicatedAllocationImageCreateInfoNV();
- void initialize(const VkDedicatedAllocationImageCreateInfoNV* in_struct);
- void initialize(const safe_VkDedicatedAllocationImageCreateInfoNV* src);
- VkDedicatedAllocationImageCreateInfoNV *ptr() { return reinterpret_cast<VkDedicatedAllocationImageCreateInfoNV *>(this); }
- VkDedicatedAllocationImageCreateInfoNV const *ptr() const { return reinterpret_cast<VkDedicatedAllocationImageCreateInfoNV const *>(this); }
-};
-
-struct safe_VkDedicatedAllocationBufferCreateInfoNV {
- VkStructureType sType;
- const void* pNext;
- VkBool32 dedicatedAllocation;
- safe_VkDedicatedAllocationBufferCreateInfoNV(const VkDedicatedAllocationBufferCreateInfoNV* in_struct);
- safe_VkDedicatedAllocationBufferCreateInfoNV(const safe_VkDedicatedAllocationBufferCreateInfoNV& src);
- safe_VkDedicatedAllocationBufferCreateInfoNV& operator=(const safe_VkDedicatedAllocationBufferCreateInfoNV& src);
- safe_VkDedicatedAllocationBufferCreateInfoNV();
- ~safe_VkDedicatedAllocationBufferCreateInfoNV();
- void initialize(const VkDedicatedAllocationBufferCreateInfoNV* in_struct);
- void initialize(const safe_VkDedicatedAllocationBufferCreateInfoNV* src);
- VkDedicatedAllocationBufferCreateInfoNV *ptr() { return reinterpret_cast<VkDedicatedAllocationBufferCreateInfoNV *>(this); }
- VkDedicatedAllocationBufferCreateInfoNV const *ptr() const { return reinterpret_cast<VkDedicatedAllocationBufferCreateInfoNV const *>(this); }
-};
-
-struct safe_VkDedicatedAllocationMemoryAllocateInfoNV {
- VkStructureType sType;
- const void* pNext;
- VkImage image;
- VkBuffer buffer;
- safe_VkDedicatedAllocationMemoryAllocateInfoNV(const VkDedicatedAllocationMemoryAllocateInfoNV* in_struct);
- safe_VkDedicatedAllocationMemoryAllocateInfoNV(const safe_VkDedicatedAllocationMemoryAllocateInfoNV& src);
- safe_VkDedicatedAllocationMemoryAllocateInfoNV& operator=(const safe_VkDedicatedAllocationMemoryAllocateInfoNV& src);
- safe_VkDedicatedAllocationMemoryAllocateInfoNV();
- ~safe_VkDedicatedAllocationMemoryAllocateInfoNV();
- void initialize(const VkDedicatedAllocationMemoryAllocateInfoNV* in_struct);
- void initialize(const safe_VkDedicatedAllocationMemoryAllocateInfoNV* src);
- VkDedicatedAllocationMemoryAllocateInfoNV *ptr() { return reinterpret_cast<VkDedicatedAllocationMemoryAllocateInfoNV *>(this); }
- VkDedicatedAllocationMemoryAllocateInfoNV const *ptr() const { return reinterpret_cast<VkDedicatedAllocationMemoryAllocateInfoNV const *>(this); }
-};
-
-struct safe_VkPhysicalDeviceTransformFeedbackFeaturesEXT {
- VkStructureType sType;
- void* pNext;
- VkBool32 transformFeedback;
- VkBool32 geometryStreams;
- safe_VkPhysicalDeviceTransformFeedbackFeaturesEXT(const VkPhysicalDeviceTransformFeedbackFeaturesEXT* in_struct);
- safe_VkPhysicalDeviceTransformFeedbackFeaturesEXT(const safe_VkPhysicalDeviceTransformFeedbackFeaturesEXT& src);
- safe_VkPhysicalDeviceTransformFeedbackFeaturesEXT& operator=(const safe_VkPhysicalDeviceTransformFeedbackFeaturesEXT& src);
- safe_VkPhysicalDeviceTransformFeedbackFeaturesEXT();
- ~safe_VkPhysicalDeviceTransformFeedbackFeaturesEXT();
- void initialize(const VkPhysicalDeviceTransformFeedbackFeaturesEXT* in_struct);
- void initialize(const safe_VkPhysicalDeviceTransformFeedbackFeaturesEXT* src);
- VkPhysicalDeviceTransformFeedbackFeaturesEXT *ptr() { return reinterpret_cast<VkPhysicalDeviceTransformFeedbackFeaturesEXT *>(this); }
- VkPhysicalDeviceTransformFeedbackFeaturesEXT const *ptr() const { return reinterpret_cast<VkPhysicalDeviceTransformFeedbackFeaturesEXT const *>(this); }
-};
-
-struct safe_VkPhysicalDeviceTransformFeedbackPropertiesEXT {
- VkStructureType sType;
- void* pNext;
- uint32_t maxTransformFeedbackStreams;
- uint32_t maxTransformFeedbackBuffers;
- VkDeviceSize maxTransformFeedbackBufferSize;
- uint32_t maxTransformFeedbackStreamDataSize;
- uint32_t maxTransformFeedbackBufferDataSize;
- uint32_t maxTransformFeedbackBufferDataStride;
- VkBool32 transformFeedbackQueries;
- VkBool32 transformFeedbackStreamsLinesTriangles;
- VkBool32 transformFeedbackRasterizationStreamSelect;
- VkBool32 transformFeedbackDraw;
- safe_VkPhysicalDeviceTransformFeedbackPropertiesEXT(const VkPhysicalDeviceTransformFeedbackPropertiesEXT* in_struct);
- safe_VkPhysicalDeviceTransformFeedbackPropertiesEXT(const safe_VkPhysicalDeviceTransformFeedbackPropertiesEXT& src);
- safe_VkPhysicalDeviceTransformFeedbackPropertiesEXT& operator=(const safe_VkPhysicalDeviceTransformFeedbackPropertiesEXT& src);
- safe_VkPhysicalDeviceTransformFeedbackPropertiesEXT();
- ~safe_VkPhysicalDeviceTransformFeedbackPropertiesEXT();
- void initialize(const VkPhysicalDeviceTransformFeedbackPropertiesEXT* in_struct);
- void initialize(const safe_VkPhysicalDeviceTransformFeedbackPropertiesEXT* src);
- VkPhysicalDeviceTransformFeedbackPropertiesEXT *ptr() { return reinterpret_cast<VkPhysicalDeviceTransformFeedbackPropertiesEXT *>(this); }
- VkPhysicalDeviceTransformFeedbackPropertiesEXT const *ptr() const { return reinterpret_cast<VkPhysicalDeviceTransformFeedbackPropertiesEXT const *>(this); }
-};
-
-struct safe_VkPipelineRasterizationStateStreamCreateInfoEXT {
- VkStructureType sType;
- const void* pNext;
- VkPipelineRasterizationStateStreamCreateFlagsEXT flags;
- uint32_t rasterizationStream;
- safe_VkPipelineRasterizationStateStreamCreateInfoEXT(const VkPipelineRasterizationStateStreamCreateInfoEXT* in_struct);
- safe_VkPipelineRasterizationStateStreamCreateInfoEXT(const safe_VkPipelineRasterizationStateStreamCreateInfoEXT& src);
- safe_VkPipelineRasterizationStateStreamCreateInfoEXT& operator=(const safe_VkPipelineRasterizationStateStreamCreateInfoEXT& src);
- safe_VkPipelineRasterizationStateStreamCreateInfoEXT();
- ~safe_VkPipelineRasterizationStateStreamCreateInfoEXT();
- void initialize(const VkPipelineRasterizationStateStreamCreateInfoEXT* in_struct);
- void initialize(const safe_VkPipelineRasterizationStateStreamCreateInfoEXT* src);
- VkPipelineRasterizationStateStreamCreateInfoEXT *ptr() { return reinterpret_cast<VkPipelineRasterizationStateStreamCreateInfoEXT *>(this); }
- VkPipelineRasterizationStateStreamCreateInfoEXT const *ptr() const { return reinterpret_cast<VkPipelineRasterizationStateStreamCreateInfoEXT const *>(this); }
-};
-
-struct safe_VkImageViewHandleInfoNVX {
- VkStructureType sType;
- const void* pNext;
- VkImageView imageView;
- VkDescriptorType descriptorType;
- VkSampler sampler;
- safe_VkImageViewHandleInfoNVX(const VkImageViewHandleInfoNVX* in_struct);
- safe_VkImageViewHandleInfoNVX(const safe_VkImageViewHandleInfoNVX& src);
- safe_VkImageViewHandleInfoNVX& operator=(const safe_VkImageViewHandleInfoNVX& src);
- safe_VkImageViewHandleInfoNVX();
- ~safe_VkImageViewHandleInfoNVX();
- void initialize(const VkImageViewHandleInfoNVX* in_struct);
- void initialize(const safe_VkImageViewHandleInfoNVX* src);
- VkImageViewHandleInfoNVX *ptr() { return reinterpret_cast<VkImageViewHandleInfoNVX *>(this); }
- VkImageViewHandleInfoNVX const *ptr() const { return reinterpret_cast<VkImageViewHandleInfoNVX const *>(this); }
-};
-
-struct safe_VkTextureLODGatherFormatPropertiesAMD {
- VkStructureType sType;
- void* pNext;
- VkBool32 supportsTextureGatherLODBiasAMD;
- safe_VkTextureLODGatherFormatPropertiesAMD(const VkTextureLODGatherFormatPropertiesAMD* in_struct);
- safe_VkTextureLODGatherFormatPropertiesAMD(const safe_VkTextureLODGatherFormatPropertiesAMD& src);
- safe_VkTextureLODGatherFormatPropertiesAMD& operator=(const safe_VkTextureLODGatherFormatPropertiesAMD& src);
- safe_VkTextureLODGatherFormatPropertiesAMD();
- ~safe_VkTextureLODGatherFormatPropertiesAMD();
- void initialize(const VkTextureLODGatherFormatPropertiesAMD* in_struct);
- void initialize(const safe_VkTextureLODGatherFormatPropertiesAMD* src);
- VkTextureLODGatherFormatPropertiesAMD *ptr() { return reinterpret_cast<VkTextureLODGatherFormatPropertiesAMD *>(this); }
- VkTextureLODGatherFormatPropertiesAMD const *ptr() const { return reinterpret_cast<VkTextureLODGatherFormatPropertiesAMD const *>(this); }
-};
-
-#ifdef VK_USE_PLATFORM_GGP
-struct safe_VkStreamDescriptorSurfaceCreateInfoGGP {
- VkStructureType sType;
- const void* pNext;
- VkStreamDescriptorSurfaceCreateFlagsGGP flags;
- GgpStreamDescriptor streamDescriptor;
- safe_VkStreamDescriptorSurfaceCreateInfoGGP(const VkStreamDescriptorSurfaceCreateInfoGGP* in_struct);
- safe_VkStreamDescriptorSurfaceCreateInfoGGP(const safe_VkStreamDescriptorSurfaceCreateInfoGGP& src);
- safe_VkStreamDescriptorSurfaceCreateInfoGGP& operator=(const safe_VkStreamDescriptorSurfaceCreateInfoGGP& src);
- safe_VkStreamDescriptorSurfaceCreateInfoGGP();
- ~safe_VkStreamDescriptorSurfaceCreateInfoGGP();
- void initialize(const VkStreamDescriptorSurfaceCreateInfoGGP* in_struct);
- void initialize(const safe_VkStreamDescriptorSurfaceCreateInfoGGP* src);
- VkStreamDescriptorSurfaceCreateInfoGGP *ptr() { return reinterpret_cast<VkStreamDescriptorSurfaceCreateInfoGGP *>(this); }
- VkStreamDescriptorSurfaceCreateInfoGGP const *ptr() const { return reinterpret_cast<VkStreamDescriptorSurfaceCreateInfoGGP const *>(this); }
-};
-#endif // VK_USE_PLATFORM_GGP
-
-struct safe_VkPhysicalDeviceCornerSampledImageFeaturesNV {
- VkStructureType sType;
- void* pNext;
- VkBool32 cornerSampledImage;
- safe_VkPhysicalDeviceCornerSampledImageFeaturesNV(const VkPhysicalDeviceCornerSampledImageFeaturesNV* in_struct);
- safe_VkPhysicalDeviceCornerSampledImageFeaturesNV(const safe_VkPhysicalDeviceCornerSampledImageFeaturesNV& src);
- safe_VkPhysicalDeviceCornerSampledImageFeaturesNV& operator=(const safe_VkPhysicalDeviceCornerSampledImageFeaturesNV& src);
- safe_VkPhysicalDeviceCornerSampledImageFeaturesNV();
- ~safe_VkPhysicalDeviceCornerSampledImageFeaturesNV();
- void initialize(const VkPhysicalDeviceCornerSampledImageFeaturesNV* in_struct);
- void initialize(const safe_VkPhysicalDeviceCornerSampledImageFeaturesNV* src);
- VkPhysicalDeviceCornerSampledImageFeaturesNV *ptr() { return reinterpret_cast<VkPhysicalDeviceCornerSampledImageFeaturesNV *>(this); }
- VkPhysicalDeviceCornerSampledImageFeaturesNV const *ptr() const { return reinterpret_cast<VkPhysicalDeviceCornerSampledImageFeaturesNV const *>(this); }
-};
-
-struct safe_VkExternalMemoryImageCreateInfoNV {
- VkStructureType sType;
- const void* pNext;
- VkExternalMemoryHandleTypeFlagsNV handleTypes;
- safe_VkExternalMemoryImageCreateInfoNV(const VkExternalMemoryImageCreateInfoNV* in_struct);
- safe_VkExternalMemoryImageCreateInfoNV(const safe_VkExternalMemoryImageCreateInfoNV& src);
- safe_VkExternalMemoryImageCreateInfoNV& operator=(const safe_VkExternalMemoryImageCreateInfoNV& src);
- safe_VkExternalMemoryImageCreateInfoNV();
- ~safe_VkExternalMemoryImageCreateInfoNV();
- void initialize(const VkExternalMemoryImageCreateInfoNV* in_struct);
- void initialize(const safe_VkExternalMemoryImageCreateInfoNV* src);
- VkExternalMemoryImageCreateInfoNV *ptr() { return reinterpret_cast<VkExternalMemoryImageCreateInfoNV *>(this); }
- VkExternalMemoryImageCreateInfoNV const *ptr() const { return reinterpret_cast<VkExternalMemoryImageCreateInfoNV const *>(this); }
-};
-
-struct safe_VkExportMemoryAllocateInfoNV {
- VkStructureType sType;
- const void* pNext;
- VkExternalMemoryHandleTypeFlagsNV handleTypes;
- safe_VkExportMemoryAllocateInfoNV(const VkExportMemoryAllocateInfoNV* in_struct);
- safe_VkExportMemoryAllocateInfoNV(const safe_VkExportMemoryAllocateInfoNV& src);
- safe_VkExportMemoryAllocateInfoNV& operator=(const safe_VkExportMemoryAllocateInfoNV& src);
- safe_VkExportMemoryAllocateInfoNV();
- ~safe_VkExportMemoryAllocateInfoNV();
- void initialize(const VkExportMemoryAllocateInfoNV* in_struct);
- void initialize(const safe_VkExportMemoryAllocateInfoNV* src);
- VkExportMemoryAllocateInfoNV *ptr() { return reinterpret_cast<VkExportMemoryAllocateInfoNV *>(this); }
- VkExportMemoryAllocateInfoNV const *ptr() const { return reinterpret_cast<VkExportMemoryAllocateInfoNV const *>(this); }
-};
-
-#ifdef VK_USE_PLATFORM_WIN32_KHR
-struct safe_VkImportMemoryWin32HandleInfoNV {
- VkStructureType sType;
- const void* pNext;
- VkExternalMemoryHandleTypeFlagsNV handleType;
- HANDLE handle;
- safe_VkImportMemoryWin32HandleInfoNV(const VkImportMemoryWin32HandleInfoNV* in_struct);
- safe_VkImportMemoryWin32HandleInfoNV(const safe_VkImportMemoryWin32HandleInfoNV& src);
- safe_VkImportMemoryWin32HandleInfoNV& operator=(const safe_VkImportMemoryWin32HandleInfoNV& src);
- safe_VkImportMemoryWin32HandleInfoNV();
- ~safe_VkImportMemoryWin32HandleInfoNV();
- void initialize(const VkImportMemoryWin32HandleInfoNV* in_struct);
- void initialize(const safe_VkImportMemoryWin32HandleInfoNV* src);
- VkImportMemoryWin32HandleInfoNV *ptr() { return reinterpret_cast<VkImportMemoryWin32HandleInfoNV *>(this); }
- VkImportMemoryWin32HandleInfoNV const *ptr() const { return reinterpret_cast<VkImportMemoryWin32HandleInfoNV const *>(this); }
-};
-#endif // VK_USE_PLATFORM_WIN32_KHR
-
-#ifdef VK_USE_PLATFORM_WIN32_KHR
-struct safe_VkExportMemoryWin32HandleInfoNV {
- VkStructureType sType;
- const void* pNext;
- const SECURITY_ATTRIBUTES* pAttributes;
- DWORD dwAccess;
- safe_VkExportMemoryWin32HandleInfoNV(const VkExportMemoryWin32HandleInfoNV* in_struct);
- safe_VkExportMemoryWin32HandleInfoNV(const safe_VkExportMemoryWin32HandleInfoNV& src);
- safe_VkExportMemoryWin32HandleInfoNV& operator=(const safe_VkExportMemoryWin32HandleInfoNV& src);
- safe_VkExportMemoryWin32HandleInfoNV();
- ~safe_VkExportMemoryWin32HandleInfoNV();
- void initialize(const VkExportMemoryWin32HandleInfoNV* in_struct);
- void initialize(const safe_VkExportMemoryWin32HandleInfoNV* src);
- VkExportMemoryWin32HandleInfoNV *ptr() { return reinterpret_cast<VkExportMemoryWin32HandleInfoNV *>(this); }
- VkExportMemoryWin32HandleInfoNV const *ptr() const { return reinterpret_cast<VkExportMemoryWin32HandleInfoNV const *>(this); }
-};
-#endif // VK_USE_PLATFORM_WIN32_KHR
-
-#ifdef VK_USE_PLATFORM_WIN32_KHR
-struct safe_VkWin32KeyedMutexAcquireReleaseInfoNV {
- VkStructureType sType;
- const void* pNext;
- uint32_t acquireCount;
- VkDeviceMemory* pAcquireSyncs;
- const uint64_t* pAcquireKeys;
- const uint32_t* pAcquireTimeoutMilliseconds;
- uint32_t releaseCount;
- VkDeviceMemory* pReleaseSyncs;
- const uint64_t* pReleaseKeys;
- safe_VkWin32KeyedMutexAcquireReleaseInfoNV(const VkWin32KeyedMutexAcquireReleaseInfoNV* in_struct);
- safe_VkWin32KeyedMutexAcquireReleaseInfoNV(const safe_VkWin32KeyedMutexAcquireReleaseInfoNV& src);
- safe_VkWin32KeyedMutexAcquireReleaseInfoNV& operator=(const safe_VkWin32KeyedMutexAcquireReleaseInfoNV& src);
- safe_VkWin32KeyedMutexAcquireReleaseInfoNV();
- ~safe_VkWin32KeyedMutexAcquireReleaseInfoNV();
- void initialize(const VkWin32KeyedMutexAcquireReleaseInfoNV* in_struct);
- void initialize(const safe_VkWin32KeyedMutexAcquireReleaseInfoNV* src);
- VkWin32KeyedMutexAcquireReleaseInfoNV *ptr() { return reinterpret_cast<VkWin32KeyedMutexAcquireReleaseInfoNV *>(this); }
- VkWin32KeyedMutexAcquireReleaseInfoNV const *ptr() const { return reinterpret_cast<VkWin32KeyedMutexAcquireReleaseInfoNV const *>(this); }
-};
-#endif // VK_USE_PLATFORM_WIN32_KHR
-
-struct safe_VkValidationFlagsEXT {
- VkStructureType sType;
- const void* pNext;
- uint32_t disabledValidationCheckCount;
- const VkValidationCheckEXT* pDisabledValidationChecks;
- safe_VkValidationFlagsEXT(const VkValidationFlagsEXT* in_struct);
- safe_VkValidationFlagsEXT(const safe_VkValidationFlagsEXT& src);
- safe_VkValidationFlagsEXT& operator=(const safe_VkValidationFlagsEXT& src);
- safe_VkValidationFlagsEXT();
- ~safe_VkValidationFlagsEXT();
- void initialize(const VkValidationFlagsEXT* in_struct);
- void initialize(const safe_VkValidationFlagsEXT* src);
- VkValidationFlagsEXT *ptr() { return reinterpret_cast<VkValidationFlagsEXT *>(this); }
- VkValidationFlagsEXT const *ptr() const { return reinterpret_cast<VkValidationFlagsEXT const *>(this); }
-};
-
-#ifdef VK_USE_PLATFORM_VI_NN
-struct safe_VkViSurfaceCreateInfoNN {
- VkStructureType sType;
- const void* pNext;
- VkViSurfaceCreateFlagsNN flags;
- void* window;
- safe_VkViSurfaceCreateInfoNN(const VkViSurfaceCreateInfoNN* in_struct);
- safe_VkViSurfaceCreateInfoNN(const safe_VkViSurfaceCreateInfoNN& src);
- safe_VkViSurfaceCreateInfoNN& operator=(const safe_VkViSurfaceCreateInfoNN& src);
- safe_VkViSurfaceCreateInfoNN();
- ~safe_VkViSurfaceCreateInfoNN();
- void initialize(const VkViSurfaceCreateInfoNN* in_struct);
- void initialize(const safe_VkViSurfaceCreateInfoNN* src);
- VkViSurfaceCreateInfoNN *ptr() { return reinterpret_cast<VkViSurfaceCreateInfoNN *>(this); }
- VkViSurfaceCreateInfoNN const *ptr() const { return reinterpret_cast<VkViSurfaceCreateInfoNN const *>(this); }
-};
-#endif // VK_USE_PLATFORM_VI_NN
-
-struct safe_VkPhysicalDeviceTextureCompressionASTCHDRFeaturesEXT {
- VkStructureType sType;
- const void* pNext;
- VkBool32 textureCompressionASTC_HDR;
- safe_VkPhysicalDeviceTextureCompressionASTCHDRFeaturesEXT(const VkPhysicalDeviceTextureCompressionASTCHDRFeaturesEXT* in_struct);
- safe_VkPhysicalDeviceTextureCompressionASTCHDRFeaturesEXT(const safe_VkPhysicalDeviceTextureCompressionASTCHDRFeaturesEXT& src);
- safe_VkPhysicalDeviceTextureCompressionASTCHDRFeaturesEXT& operator=(const safe_VkPhysicalDeviceTextureCompressionASTCHDRFeaturesEXT& src);
- safe_VkPhysicalDeviceTextureCompressionASTCHDRFeaturesEXT();
- ~safe_VkPhysicalDeviceTextureCompressionASTCHDRFeaturesEXT();
- void initialize(const VkPhysicalDeviceTextureCompressionASTCHDRFeaturesEXT* in_struct);
- void initialize(const safe_VkPhysicalDeviceTextureCompressionASTCHDRFeaturesEXT* src);
- VkPhysicalDeviceTextureCompressionASTCHDRFeaturesEXT *ptr() { return reinterpret_cast<VkPhysicalDeviceTextureCompressionASTCHDRFeaturesEXT *>(this); }
- VkPhysicalDeviceTextureCompressionASTCHDRFeaturesEXT const *ptr() const { return reinterpret_cast<VkPhysicalDeviceTextureCompressionASTCHDRFeaturesEXT const *>(this); }
-};
-
-struct safe_VkImageViewASTCDecodeModeEXT {
- VkStructureType sType;
- const void* pNext;
- VkFormat decodeMode;
- safe_VkImageViewASTCDecodeModeEXT(const VkImageViewASTCDecodeModeEXT* in_struct);
- safe_VkImageViewASTCDecodeModeEXT(const safe_VkImageViewASTCDecodeModeEXT& src);
- safe_VkImageViewASTCDecodeModeEXT& operator=(const safe_VkImageViewASTCDecodeModeEXT& src);
- safe_VkImageViewASTCDecodeModeEXT();
- ~safe_VkImageViewASTCDecodeModeEXT();
- void initialize(const VkImageViewASTCDecodeModeEXT* in_struct);
- void initialize(const safe_VkImageViewASTCDecodeModeEXT* src);
- VkImageViewASTCDecodeModeEXT *ptr() { return reinterpret_cast<VkImageViewASTCDecodeModeEXT *>(this); }
- VkImageViewASTCDecodeModeEXT const *ptr() const { return reinterpret_cast<VkImageViewASTCDecodeModeEXT const *>(this); }
-};
-
-struct safe_VkPhysicalDeviceASTCDecodeFeaturesEXT {
- VkStructureType sType;
- void* pNext;
- VkBool32 decodeModeSharedExponent;
- safe_VkPhysicalDeviceASTCDecodeFeaturesEXT(const VkPhysicalDeviceASTCDecodeFeaturesEXT* in_struct);
- safe_VkPhysicalDeviceASTCDecodeFeaturesEXT(const safe_VkPhysicalDeviceASTCDecodeFeaturesEXT& src);
- safe_VkPhysicalDeviceASTCDecodeFeaturesEXT& operator=(const safe_VkPhysicalDeviceASTCDecodeFeaturesEXT& src);
- safe_VkPhysicalDeviceASTCDecodeFeaturesEXT();
- ~safe_VkPhysicalDeviceASTCDecodeFeaturesEXT();
- void initialize(const VkPhysicalDeviceASTCDecodeFeaturesEXT* in_struct);
- void initialize(const safe_VkPhysicalDeviceASTCDecodeFeaturesEXT* src);
- VkPhysicalDeviceASTCDecodeFeaturesEXT *ptr() { return reinterpret_cast<VkPhysicalDeviceASTCDecodeFeaturesEXT *>(this); }
- VkPhysicalDeviceASTCDecodeFeaturesEXT const *ptr() const { return reinterpret_cast<VkPhysicalDeviceASTCDecodeFeaturesEXT const *>(this); }
-};
-
-struct safe_VkConditionalRenderingBeginInfoEXT {
- VkStructureType sType;
- const void* pNext;
- VkBuffer buffer;
- VkDeviceSize offset;
- VkConditionalRenderingFlagsEXT flags;
- safe_VkConditionalRenderingBeginInfoEXT(const VkConditionalRenderingBeginInfoEXT* in_struct);
- safe_VkConditionalRenderingBeginInfoEXT(const safe_VkConditionalRenderingBeginInfoEXT& src);
- safe_VkConditionalRenderingBeginInfoEXT& operator=(const safe_VkConditionalRenderingBeginInfoEXT& src);
- safe_VkConditionalRenderingBeginInfoEXT();
- ~safe_VkConditionalRenderingBeginInfoEXT();
- void initialize(const VkConditionalRenderingBeginInfoEXT* in_struct);
- void initialize(const safe_VkConditionalRenderingBeginInfoEXT* src);
- VkConditionalRenderingBeginInfoEXT *ptr() { return reinterpret_cast<VkConditionalRenderingBeginInfoEXT *>(this); }
- VkConditionalRenderingBeginInfoEXT const *ptr() const { return reinterpret_cast<VkConditionalRenderingBeginInfoEXT const *>(this); }
-};
-
-struct safe_VkPhysicalDeviceConditionalRenderingFeaturesEXT {
- VkStructureType sType;
- void* pNext;
- VkBool32 conditionalRendering;
- VkBool32 inheritedConditionalRendering;
- safe_VkPhysicalDeviceConditionalRenderingFeaturesEXT(const VkPhysicalDeviceConditionalRenderingFeaturesEXT* in_struct);
- safe_VkPhysicalDeviceConditionalRenderingFeaturesEXT(const safe_VkPhysicalDeviceConditionalRenderingFeaturesEXT& src);
- safe_VkPhysicalDeviceConditionalRenderingFeaturesEXT& operator=(const safe_VkPhysicalDeviceConditionalRenderingFeaturesEXT& src);
- safe_VkPhysicalDeviceConditionalRenderingFeaturesEXT();
- ~safe_VkPhysicalDeviceConditionalRenderingFeaturesEXT();
- void initialize(const VkPhysicalDeviceConditionalRenderingFeaturesEXT* in_struct);
- void initialize(const safe_VkPhysicalDeviceConditionalRenderingFeaturesEXT* src);
- VkPhysicalDeviceConditionalRenderingFeaturesEXT *ptr() { return reinterpret_cast<VkPhysicalDeviceConditionalRenderingFeaturesEXT *>(this); }
- VkPhysicalDeviceConditionalRenderingFeaturesEXT const *ptr() const { return reinterpret_cast<VkPhysicalDeviceConditionalRenderingFeaturesEXT const *>(this); }
-};
-
-struct safe_VkCommandBufferInheritanceConditionalRenderingInfoEXT {
- VkStructureType sType;
- const void* pNext;
- VkBool32 conditionalRenderingEnable;
- safe_VkCommandBufferInheritanceConditionalRenderingInfoEXT(const VkCommandBufferInheritanceConditionalRenderingInfoEXT* in_struct);
- safe_VkCommandBufferInheritanceConditionalRenderingInfoEXT(const safe_VkCommandBufferInheritanceConditionalRenderingInfoEXT& src);
- safe_VkCommandBufferInheritanceConditionalRenderingInfoEXT& operator=(const safe_VkCommandBufferInheritanceConditionalRenderingInfoEXT& src);
- safe_VkCommandBufferInheritanceConditionalRenderingInfoEXT();
- ~safe_VkCommandBufferInheritanceConditionalRenderingInfoEXT();
- void initialize(const VkCommandBufferInheritanceConditionalRenderingInfoEXT* in_struct);
- void initialize(const safe_VkCommandBufferInheritanceConditionalRenderingInfoEXT* src);
- VkCommandBufferInheritanceConditionalRenderingInfoEXT *ptr() { return reinterpret_cast<VkCommandBufferInheritanceConditionalRenderingInfoEXT *>(this); }
- VkCommandBufferInheritanceConditionalRenderingInfoEXT const *ptr() const { return reinterpret_cast<VkCommandBufferInheritanceConditionalRenderingInfoEXT const *>(this); }
-};
-
-struct safe_VkDeviceGeneratedCommandsFeaturesNVX {
- VkStructureType sType;
- const void* pNext;
- VkBool32 computeBindingPointSupport;
- safe_VkDeviceGeneratedCommandsFeaturesNVX(const VkDeviceGeneratedCommandsFeaturesNVX* in_struct);
- safe_VkDeviceGeneratedCommandsFeaturesNVX(const safe_VkDeviceGeneratedCommandsFeaturesNVX& src);
- safe_VkDeviceGeneratedCommandsFeaturesNVX& operator=(const safe_VkDeviceGeneratedCommandsFeaturesNVX& src);
- safe_VkDeviceGeneratedCommandsFeaturesNVX();
- ~safe_VkDeviceGeneratedCommandsFeaturesNVX();
- void initialize(const VkDeviceGeneratedCommandsFeaturesNVX* in_struct);
- void initialize(const safe_VkDeviceGeneratedCommandsFeaturesNVX* src);
- VkDeviceGeneratedCommandsFeaturesNVX *ptr() { return reinterpret_cast<VkDeviceGeneratedCommandsFeaturesNVX *>(this); }
- VkDeviceGeneratedCommandsFeaturesNVX const *ptr() const { return reinterpret_cast<VkDeviceGeneratedCommandsFeaturesNVX const *>(this); }
-};
-
-struct safe_VkDeviceGeneratedCommandsLimitsNVX {
- VkStructureType sType;
- const void* pNext;
- uint32_t maxIndirectCommandsLayoutTokenCount;
- uint32_t maxObjectEntryCounts;
- uint32_t minSequenceCountBufferOffsetAlignment;
- uint32_t minSequenceIndexBufferOffsetAlignment;
- uint32_t minCommandsTokenBufferOffsetAlignment;
- safe_VkDeviceGeneratedCommandsLimitsNVX(const VkDeviceGeneratedCommandsLimitsNVX* in_struct);
- safe_VkDeviceGeneratedCommandsLimitsNVX(const safe_VkDeviceGeneratedCommandsLimitsNVX& src);
- safe_VkDeviceGeneratedCommandsLimitsNVX& operator=(const safe_VkDeviceGeneratedCommandsLimitsNVX& src);
- safe_VkDeviceGeneratedCommandsLimitsNVX();
- ~safe_VkDeviceGeneratedCommandsLimitsNVX();
- void initialize(const VkDeviceGeneratedCommandsLimitsNVX* in_struct);
- void initialize(const safe_VkDeviceGeneratedCommandsLimitsNVX* src);
- VkDeviceGeneratedCommandsLimitsNVX *ptr() { return reinterpret_cast<VkDeviceGeneratedCommandsLimitsNVX *>(this); }
- VkDeviceGeneratedCommandsLimitsNVX const *ptr() const { return reinterpret_cast<VkDeviceGeneratedCommandsLimitsNVX const *>(this); }
-};
-
-struct safe_VkIndirectCommandsLayoutCreateInfoNVX {
- VkStructureType sType;
- const void* pNext;
- VkPipelineBindPoint pipelineBindPoint;
- VkIndirectCommandsLayoutUsageFlagsNVX flags;
- uint32_t tokenCount;
- const VkIndirectCommandsLayoutTokenNVX* pTokens;
- safe_VkIndirectCommandsLayoutCreateInfoNVX(const VkIndirectCommandsLayoutCreateInfoNVX* in_struct);
- safe_VkIndirectCommandsLayoutCreateInfoNVX(const safe_VkIndirectCommandsLayoutCreateInfoNVX& src);
- safe_VkIndirectCommandsLayoutCreateInfoNVX& operator=(const safe_VkIndirectCommandsLayoutCreateInfoNVX& src);
- safe_VkIndirectCommandsLayoutCreateInfoNVX();
- ~safe_VkIndirectCommandsLayoutCreateInfoNVX();
- void initialize(const VkIndirectCommandsLayoutCreateInfoNVX* in_struct);
- void initialize(const safe_VkIndirectCommandsLayoutCreateInfoNVX* src);
- VkIndirectCommandsLayoutCreateInfoNVX *ptr() { return reinterpret_cast<VkIndirectCommandsLayoutCreateInfoNVX *>(this); }
- VkIndirectCommandsLayoutCreateInfoNVX const *ptr() const { return reinterpret_cast<VkIndirectCommandsLayoutCreateInfoNVX const *>(this); }
-};
-
-struct safe_VkCmdProcessCommandsInfoNVX {
- VkStructureType sType;
- const void* pNext;
- VkObjectTableNVX objectTable;
- VkIndirectCommandsLayoutNVX indirectCommandsLayout;
- uint32_t indirectCommandsTokenCount;
- VkIndirectCommandsTokenNVX* pIndirectCommandsTokens;
- uint32_t maxSequencesCount;
- VkCommandBuffer targetCommandBuffer;
- VkBuffer sequencesCountBuffer;
- VkDeviceSize sequencesCountOffset;
- VkBuffer sequencesIndexBuffer;
- VkDeviceSize sequencesIndexOffset;
- safe_VkCmdProcessCommandsInfoNVX(const VkCmdProcessCommandsInfoNVX* in_struct);
- safe_VkCmdProcessCommandsInfoNVX(const safe_VkCmdProcessCommandsInfoNVX& src);
- safe_VkCmdProcessCommandsInfoNVX& operator=(const safe_VkCmdProcessCommandsInfoNVX& src);
- safe_VkCmdProcessCommandsInfoNVX();
- ~safe_VkCmdProcessCommandsInfoNVX();
- void initialize(const VkCmdProcessCommandsInfoNVX* in_struct);
- void initialize(const safe_VkCmdProcessCommandsInfoNVX* src);
- VkCmdProcessCommandsInfoNVX *ptr() { return reinterpret_cast<VkCmdProcessCommandsInfoNVX *>(this); }
- VkCmdProcessCommandsInfoNVX const *ptr() const { return reinterpret_cast<VkCmdProcessCommandsInfoNVX const *>(this); }
-};
-
-struct safe_VkCmdReserveSpaceForCommandsInfoNVX {
- VkStructureType sType;
- const void* pNext;
- VkObjectTableNVX objectTable;
- VkIndirectCommandsLayoutNVX indirectCommandsLayout;
- uint32_t maxSequencesCount;
- safe_VkCmdReserveSpaceForCommandsInfoNVX(const VkCmdReserveSpaceForCommandsInfoNVX* in_struct);
- safe_VkCmdReserveSpaceForCommandsInfoNVX(const safe_VkCmdReserveSpaceForCommandsInfoNVX& src);
- safe_VkCmdReserveSpaceForCommandsInfoNVX& operator=(const safe_VkCmdReserveSpaceForCommandsInfoNVX& src);
- safe_VkCmdReserveSpaceForCommandsInfoNVX();
- ~safe_VkCmdReserveSpaceForCommandsInfoNVX();
- void initialize(const VkCmdReserveSpaceForCommandsInfoNVX* in_struct);
- void initialize(const safe_VkCmdReserveSpaceForCommandsInfoNVX* src);
- VkCmdReserveSpaceForCommandsInfoNVX *ptr() { return reinterpret_cast<VkCmdReserveSpaceForCommandsInfoNVX *>(this); }
- VkCmdReserveSpaceForCommandsInfoNVX const *ptr() const { return reinterpret_cast<VkCmdReserveSpaceForCommandsInfoNVX const *>(this); }
-};
-
-struct safe_VkObjectTableCreateInfoNVX {
- VkStructureType sType;
- const void* pNext;
- uint32_t objectCount;
- const VkObjectEntryTypeNVX* pObjectEntryTypes;
- const uint32_t* pObjectEntryCounts;
- const VkObjectEntryUsageFlagsNVX* pObjectEntryUsageFlags;
- uint32_t maxUniformBuffersPerDescriptor;
- uint32_t maxStorageBuffersPerDescriptor;
- uint32_t maxStorageImagesPerDescriptor;
- uint32_t maxSampledImagesPerDescriptor;
- uint32_t maxPipelineLayouts;
- safe_VkObjectTableCreateInfoNVX(const VkObjectTableCreateInfoNVX* in_struct);
- safe_VkObjectTableCreateInfoNVX(const safe_VkObjectTableCreateInfoNVX& src);
- safe_VkObjectTableCreateInfoNVX& operator=(const safe_VkObjectTableCreateInfoNVX& src);
- safe_VkObjectTableCreateInfoNVX();
- ~safe_VkObjectTableCreateInfoNVX();
- void initialize(const VkObjectTableCreateInfoNVX* in_struct);
- void initialize(const safe_VkObjectTableCreateInfoNVX* src);
- VkObjectTableCreateInfoNVX *ptr() { return reinterpret_cast<VkObjectTableCreateInfoNVX *>(this); }
- VkObjectTableCreateInfoNVX const *ptr() const { return reinterpret_cast<VkObjectTableCreateInfoNVX const *>(this); }
-};
-
-struct safe_VkPipelineViewportWScalingStateCreateInfoNV {
- VkStructureType sType;
- const void* pNext;
- VkBool32 viewportWScalingEnable;
- uint32_t viewportCount;
- const VkViewportWScalingNV* pViewportWScalings;
- safe_VkPipelineViewportWScalingStateCreateInfoNV(const VkPipelineViewportWScalingStateCreateInfoNV* in_struct);
- safe_VkPipelineViewportWScalingStateCreateInfoNV(const safe_VkPipelineViewportWScalingStateCreateInfoNV& src);
- safe_VkPipelineViewportWScalingStateCreateInfoNV& operator=(const safe_VkPipelineViewportWScalingStateCreateInfoNV& src);
- safe_VkPipelineViewportWScalingStateCreateInfoNV();
- ~safe_VkPipelineViewportWScalingStateCreateInfoNV();
- void initialize(const VkPipelineViewportWScalingStateCreateInfoNV* in_struct);
- void initialize(const safe_VkPipelineViewportWScalingStateCreateInfoNV* src);
- VkPipelineViewportWScalingStateCreateInfoNV *ptr() { return reinterpret_cast<VkPipelineViewportWScalingStateCreateInfoNV *>(this); }
- VkPipelineViewportWScalingStateCreateInfoNV const *ptr() const { return reinterpret_cast<VkPipelineViewportWScalingStateCreateInfoNV const *>(this); }
-};
-
-struct safe_VkSurfaceCapabilities2EXT {
- VkStructureType sType;
- void* pNext;
- uint32_t minImageCount;
- uint32_t maxImageCount;
- VkExtent2D currentExtent;
- VkExtent2D minImageExtent;
- VkExtent2D maxImageExtent;
- uint32_t maxImageArrayLayers;
- VkSurfaceTransformFlagsKHR supportedTransforms;
- VkSurfaceTransformFlagBitsKHR currentTransform;
- VkCompositeAlphaFlagsKHR supportedCompositeAlpha;
- VkImageUsageFlags supportedUsageFlags;
- VkSurfaceCounterFlagsEXT supportedSurfaceCounters;
- safe_VkSurfaceCapabilities2EXT(const VkSurfaceCapabilities2EXT* in_struct);
- safe_VkSurfaceCapabilities2EXT(const safe_VkSurfaceCapabilities2EXT& src);
- safe_VkSurfaceCapabilities2EXT& operator=(const safe_VkSurfaceCapabilities2EXT& src);
- safe_VkSurfaceCapabilities2EXT();
- ~safe_VkSurfaceCapabilities2EXT();
- void initialize(const VkSurfaceCapabilities2EXT* in_struct);
- void initialize(const safe_VkSurfaceCapabilities2EXT* src);
- VkSurfaceCapabilities2EXT *ptr() { return reinterpret_cast<VkSurfaceCapabilities2EXT *>(this); }
- VkSurfaceCapabilities2EXT const *ptr() const { return reinterpret_cast<VkSurfaceCapabilities2EXT const *>(this); }
-};
-
-struct safe_VkDisplayPowerInfoEXT {
- VkStructureType sType;
- const void* pNext;
- VkDisplayPowerStateEXT powerState;
- safe_VkDisplayPowerInfoEXT(const VkDisplayPowerInfoEXT* in_struct);
- safe_VkDisplayPowerInfoEXT(const safe_VkDisplayPowerInfoEXT& src);
- safe_VkDisplayPowerInfoEXT& operator=(const safe_VkDisplayPowerInfoEXT& src);
- safe_VkDisplayPowerInfoEXT();
- ~safe_VkDisplayPowerInfoEXT();
- void initialize(const VkDisplayPowerInfoEXT* in_struct);
- void initialize(const safe_VkDisplayPowerInfoEXT* src);
- VkDisplayPowerInfoEXT *ptr() { return reinterpret_cast<VkDisplayPowerInfoEXT *>(this); }
- VkDisplayPowerInfoEXT const *ptr() const { return reinterpret_cast<VkDisplayPowerInfoEXT const *>(this); }
-};
-
-struct safe_VkDeviceEventInfoEXT {
- VkStructureType sType;
- const void* pNext;
- VkDeviceEventTypeEXT deviceEvent;
- safe_VkDeviceEventInfoEXT(const VkDeviceEventInfoEXT* in_struct);
- safe_VkDeviceEventInfoEXT(const safe_VkDeviceEventInfoEXT& src);
- safe_VkDeviceEventInfoEXT& operator=(const safe_VkDeviceEventInfoEXT& src);
- safe_VkDeviceEventInfoEXT();
- ~safe_VkDeviceEventInfoEXT();
- void initialize(const VkDeviceEventInfoEXT* in_struct);
- void initialize(const safe_VkDeviceEventInfoEXT* src);
- VkDeviceEventInfoEXT *ptr() { return reinterpret_cast<VkDeviceEventInfoEXT *>(this); }
- VkDeviceEventInfoEXT const *ptr() const { return reinterpret_cast<VkDeviceEventInfoEXT const *>(this); }
-};
-
-struct safe_VkDisplayEventInfoEXT {
- VkStructureType sType;
- const void* pNext;
- VkDisplayEventTypeEXT displayEvent;
- safe_VkDisplayEventInfoEXT(const VkDisplayEventInfoEXT* in_struct);
- safe_VkDisplayEventInfoEXT(const safe_VkDisplayEventInfoEXT& src);
- safe_VkDisplayEventInfoEXT& operator=(const safe_VkDisplayEventInfoEXT& src);
- safe_VkDisplayEventInfoEXT();
- ~safe_VkDisplayEventInfoEXT();
- void initialize(const VkDisplayEventInfoEXT* in_struct);
- void initialize(const safe_VkDisplayEventInfoEXT* src);
- VkDisplayEventInfoEXT *ptr() { return reinterpret_cast<VkDisplayEventInfoEXT *>(this); }
- VkDisplayEventInfoEXT const *ptr() const { return reinterpret_cast<VkDisplayEventInfoEXT const *>(this); }
-};
-
-struct safe_VkSwapchainCounterCreateInfoEXT {
- VkStructureType sType;
- const void* pNext;
- VkSurfaceCounterFlagsEXT surfaceCounters;
- safe_VkSwapchainCounterCreateInfoEXT(const VkSwapchainCounterCreateInfoEXT* in_struct);
- safe_VkSwapchainCounterCreateInfoEXT(const safe_VkSwapchainCounterCreateInfoEXT& src);
- safe_VkSwapchainCounterCreateInfoEXT& operator=(const safe_VkSwapchainCounterCreateInfoEXT& src);
- safe_VkSwapchainCounterCreateInfoEXT();
- ~safe_VkSwapchainCounterCreateInfoEXT();
- void initialize(const VkSwapchainCounterCreateInfoEXT* in_struct);
- void initialize(const safe_VkSwapchainCounterCreateInfoEXT* src);
- VkSwapchainCounterCreateInfoEXT *ptr() { return reinterpret_cast<VkSwapchainCounterCreateInfoEXT *>(this); }
- VkSwapchainCounterCreateInfoEXT const *ptr() const { return reinterpret_cast<VkSwapchainCounterCreateInfoEXT const *>(this); }
-};
-
-struct safe_VkPresentTimesInfoGOOGLE {
- VkStructureType sType;
- const void* pNext;
- uint32_t swapchainCount;
- const VkPresentTimeGOOGLE* pTimes;
- safe_VkPresentTimesInfoGOOGLE(const VkPresentTimesInfoGOOGLE* in_struct);
- safe_VkPresentTimesInfoGOOGLE(const safe_VkPresentTimesInfoGOOGLE& src);
- safe_VkPresentTimesInfoGOOGLE& operator=(const safe_VkPresentTimesInfoGOOGLE& src);
- safe_VkPresentTimesInfoGOOGLE();
- ~safe_VkPresentTimesInfoGOOGLE();
- void initialize(const VkPresentTimesInfoGOOGLE* in_struct);
- void initialize(const safe_VkPresentTimesInfoGOOGLE* src);
- VkPresentTimesInfoGOOGLE *ptr() { return reinterpret_cast<VkPresentTimesInfoGOOGLE *>(this); }
- VkPresentTimesInfoGOOGLE const *ptr() const { return reinterpret_cast<VkPresentTimesInfoGOOGLE const *>(this); }
-};
-
-struct safe_VkPhysicalDeviceMultiviewPerViewAttributesPropertiesNVX {
- VkStructureType sType;
- void* pNext;
- VkBool32 perViewPositionAllComponents;
- safe_VkPhysicalDeviceMultiviewPerViewAttributesPropertiesNVX(const VkPhysicalDeviceMultiviewPerViewAttributesPropertiesNVX* in_struct);
- safe_VkPhysicalDeviceMultiviewPerViewAttributesPropertiesNVX(const safe_VkPhysicalDeviceMultiviewPerViewAttributesPropertiesNVX& src);
- safe_VkPhysicalDeviceMultiviewPerViewAttributesPropertiesNVX& operator=(const safe_VkPhysicalDeviceMultiviewPerViewAttributesPropertiesNVX& src);
- safe_VkPhysicalDeviceMultiviewPerViewAttributesPropertiesNVX();
- ~safe_VkPhysicalDeviceMultiviewPerViewAttributesPropertiesNVX();
- void initialize(const VkPhysicalDeviceMultiviewPerViewAttributesPropertiesNVX* in_struct);
- void initialize(const safe_VkPhysicalDeviceMultiviewPerViewAttributesPropertiesNVX* src);
- VkPhysicalDeviceMultiviewPerViewAttributesPropertiesNVX *ptr() { return reinterpret_cast<VkPhysicalDeviceMultiviewPerViewAttributesPropertiesNVX *>(this); }
- VkPhysicalDeviceMultiviewPerViewAttributesPropertiesNVX const *ptr() const { return reinterpret_cast<VkPhysicalDeviceMultiviewPerViewAttributesPropertiesNVX const *>(this); }
-};
-
-struct safe_VkPipelineViewportSwizzleStateCreateInfoNV {
- VkStructureType sType;
- const void* pNext;
- VkPipelineViewportSwizzleStateCreateFlagsNV flags;
- uint32_t viewportCount;
- const VkViewportSwizzleNV* pViewportSwizzles;
- safe_VkPipelineViewportSwizzleStateCreateInfoNV(const VkPipelineViewportSwizzleStateCreateInfoNV* in_struct);
- safe_VkPipelineViewportSwizzleStateCreateInfoNV(const safe_VkPipelineViewportSwizzleStateCreateInfoNV& src);
- safe_VkPipelineViewportSwizzleStateCreateInfoNV& operator=(const safe_VkPipelineViewportSwizzleStateCreateInfoNV& src);
- safe_VkPipelineViewportSwizzleStateCreateInfoNV();
- ~safe_VkPipelineViewportSwizzleStateCreateInfoNV();
- void initialize(const VkPipelineViewportSwizzleStateCreateInfoNV* in_struct);
- void initialize(const safe_VkPipelineViewportSwizzleStateCreateInfoNV* src);
- VkPipelineViewportSwizzleStateCreateInfoNV *ptr() { return reinterpret_cast<VkPipelineViewportSwizzleStateCreateInfoNV *>(this); }
- VkPipelineViewportSwizzleStateCreateInfoNV const *ptr() const { return reinterpret_cast<VkPipelineViewportSwizzleStateCreateInfoNV const *>(this); }
-};
-
-struct safe_VkPhysicalDeviceDiscardRectanglePropertiesEXT {
- VkStructureType sType;
- void* pNext;
- uint32_t maxDiscardRectangles;
- safe_VkPhysicalDeviceDiscardRectanglePropertiesEXT(const VkPhysicalDeviceDiscardRectanglePropertiesEXT* in_struct);
- safe_VkPhysicalDeviceDiscardRectanglePropertiesEXT(const safe_VkPhysicalDeviceDiscardRectanglePropertiesEXT& src);
- safe_VkPhysicalDeviceDiscardRectanglePropertiesEXT& operator=(const safe_VkPhysicalDeviceDiscardRectanglePropertiesEXT& src);
- safe_VkPhysicalDeviceDiscardRectanglePropertiesEXT();
- ~safe_VkPhysicalDeviceDiscardRectanglePropertiesEXT();
- void initialize(const VkPhysicalDeviceDiscardRectanglePropertiesEXT* in_struct);
- void initialize(const safe_VkPhysicalDeviceDiscardRectanglePropertiesEXT* src);
- VkPhysicalDeviceDiscardRectanglePropertiesEXT *ptr() { return reinterpret_cast<VkPhysicalDeviceDiscardRectanglePropertiesEXT *>(this); }
- VkPhysicalDeviceDiscardRectanglePropertiesEXT const *ptr() const { return reinterpret_cast<VkPhysicalDeviceDiscardRectanglePropertiesEXT const *>(this); }
-};
-
-struct safe_VkPipelineDiscardRectangleStateCreateInfoEXT {
- VkStructureType sType;
- const void* pNext;
- VkPipelineDiscardRectangleStateCreateFlagsEXT flags;
- VkDiscardRectangleModeEXT discardRectangleMode;
- uint32_t discardRectangleCount;
- const VkRect2D* pDiscardRectangles;
- safe_VkPipelineDiscardRectangleStateCreateInfoEXT(const VkPipelineDiscardRectangleStateCreateInfoEXT* in_struct);
- safe_VkPipelineDiscardRectangleStateCreateInfoEXT(const safe_VkPipelineDiscardRectangleStateCreateInfoEXT& src);
- safe_VkPipelineDiscardRectangleStateCreateInfoEXT& operator=(const safe_VkPipelineDiscardRectangleStateCreateInfoEXT& src);
- safe_VkPipelineDiscardRectangleStateCreateInfoEXT();
- ~safe_VkPipelineDiscardRectangleStateCreateInfoEXT();
- void initialize(const VkPipelineDiscardRectangleStateCreateInfoEXT* in_struct);
- void initialize(const safe_VkPipelineDiscardRectangleStateCreateInfoEXT* src);
- VkPipelineDiscardRectangleStateCreateInfoEXT *ptr() { return reinterpret_cast<VkPipelineDiscardRectangleStateCreateInfoEXT *>(this); }
- VkPipelineDiscardRectangleStateCreateInfoEXT const *ptr() const { return reinterpret_cast<VkPipelineDiscardRectangleStateCreateInfoEXT const *>(this); }
-};
-
-struct safe_VkPhysicalDeviceConservativeRasterizationPropertiesEXT {
- VkStructureType sType;
- void* pNext;
- float primitiveOverestimationSize;
- float maxExtraPrimitiveOverestimationSize;
- float extraPrimitiveOverestimationSizeGranularity;
- VkBool32 primitiveUnderestimation;
- VkBool32 conservativePointAndLineRasterization;
- VkBool32 degenerateTrianglesRasterized;
- VkBool32 degenerateLinesRasterized;
- VkBool32 fullyCoveredFragmentShaderInputVariable;
- VkBool32 conservativeRasterizationPostDepthCoverage;
- safe_VkPhysicalDeviceConservativeRasterizationPropertiesEXT(const VkPhysicalDeviceConservativeRasterizationPropertiesEXT* in_struct);
- safe_VkPhysicalDeviceConservativeRasterizationPropertiesEXT(const safe_VkPhysicalDeviceConservativeRasterizationPropertiesEXT& src);
- safe_VkPhysicalDeviceConservativeRasterizationPropertiesEXT& operator=(const safe_VkPhysicalDeviceConservativeRasterizationPropertiesEXT& src);
- safe_VkPhysicalDeviceConservativeRasterizationPropertiesEXT();
- ~safe_VkPhysicalDeviceConservativeRasterizationPropertiesEXT();
- void initialize(const VkPhysicalDeviceConservativeRasterizationPropertiesEXT* in_struct);
- void initialize(const safe_VkPhysicalDeviceConservativeRasterizationPropertiesEXT* src);
- VkPhysicalDeviceConservativeRasterizationPropertiesEXT *ptr() { return reinterpret_cast<VkPhysicalDeviceConservativeRasterizationPropertiesEXT *>(this); }
- VkPhysicalDeviceConservativeRasterizationPropertiesEXT const *ptr() const { return reinterpret_cast<VkPhysicalDeviceConservativeRasterizationPropertiesEXT const *>(this); }
-};
-
-struct safe_VkPipelineRasterizationConservativeStateCreateInfoEXT {
- VkStructureType sType;
- const void* pNext;
- VkPipelineRasterizationConservativeStateCreateFlagsEXT flags;
- VkConservativeRasterizationModeEXT conservativeRasterizationMode;
- float extraPrimitiveOverestimationSize;
- safe_VkPipelineRasterizationConservativeStateCreateInfoEXT(const VkPipelineRasterizationConservativeStateCreateInfoEXT* in_struct);
- safe_VkPipelineRasterizationConservativeStateCreateInfoEXT(const safe_VkPipelineRasterizationConservativeStateCreateInfoEXT& src);
- safe_VkPipelineRasterizationConservativeStateCreateInfoEXT& operator=(const safe_VkPipelineRasterizationConservativeStateCreateInfoEXT& src);
- safe_VkPipelineRasterizationConservativeStateCreateInfoEXT();
- ~safe_VkPipelineRasterizationConservativeStateCreateInfoEXT();
- void initialize(const VkPipelineRasterizationConservativeStateCreateInfoEXT* in_struct);
- void initialize(const safe_VkPipelineRasterizationConservativeStateCreateInfoEXT* src);
- VkPipelineRasterizationConservativeStateCreateInfoEXT *ptr() { return reinterpret_cast<VkPipelineRasterizationConservativeStateCreateInfoEXT *>(this); }
- VkPipelineRasterizationConservativeStateCreateInfoEXT const *ptr() const { return reinterpret_cast<VkPipelineRasterizationConservativeStateCreateInfoEXT const *>(this); }
-};
-
-struct safe_VkPhysicalDeviceDepthClipEnableFeaturesEXT {
- VkStructureType sType;
- void* pNext;
- VkBool32 depthClipEnable;
- safe_VkPhysicalDeviceDepthClipEnableFeaturesEXT(const VkPhysicalDeviceDepthClipEnableFeaturesEXT* in_struct);
- safe_VkPhysicalDeviceDepthClipEnableFeaturesEXT(const safe_VkPhysicalDeviceDepthClipEnableFeaturesEXT& src);
- safe_VkPhysicalDeviceDepthClipEnableFeaturesEXT& operator=(const safe_VkPhysicalDeviceDepthClipEnableFeaturesEXT& src);
- safe_VkPhysicalDeviceDepthClipEnableFeaturesEXT();
- ~safe_VkPhysicalDeviceDepthClipEnableFeaturesEXT();
- void initialize(const VkPhysicalDeviceDepthClipEnableFeaturesEXT* in_struct);
- void initialize(const safe_VkPhysicalDeviceDepthClipEnableFeaturesEXT* src);
- VkPhysicalDeviceDepthClipEnableFeaturesEXT *ptr() { return reinterpret_cast<VkPhysicalDeviceDepthClipEnableFeaturesEXT *>(this); }
- VkPhysicalDeviceDepthClipEnableFeaturesEXT const *ptr() const { return reinterpret_cast<VkPhysicalDeviceDepthClipEnableFeaturesEXT const *>(this); }
-};
-
-struct safe_VkPipelineRasterizationDepthClipStateCreateInfoEXT {
- VkStructureType sType;
- const void* pNext;
- VkPipelineRasterizationDepthClipStateCreateFlagsEXT flags;
- VkBool32 depthClipEnable;
- safe_VkPipelineRasterizationDepthClipStateCreateInfoEXT(const VkPipelineRasterizationDepthClipStateCreateInfoEXT* in_struct);
- safe_VkPipelineRasterizationDepthClipStateCreateInfoEXT(const safe_VkPipelineRasterizationDepthClipStateCreateInfoEXT& src);
- safe_VkPipelineRasterizationDepthClipStateCreateInfoEXT& operator=(const safe_VkPipelineRasterizationDepthClipStateCreateInfoEXT& src);
- safe_VkPipelineRasterizationDepthClipStateCreateInfoEXT();
- ~safe_VkPipelineRasterizationDepthClipStateCreateInfoEXT();
- void initialize(const VkPipelineRasterizationDepthClipStateCreateInfoEXT* in_struct);
- void initialize(const safe_VkPipelineRasterizationDepthClipStateCreateInfoEXT* src);
- VkPipelineRasterizationDepthClipStateCreateInfoEXT *ptr() { return reinterpret_cast<VkPipelineRasterizationDepthClipStateCreateInfoEXT *>(this); }
- VkPipelineRasterizationDepthClipStateCreateInfoEXT const *ptr() const { return reinterpret_cast<VkPipelineRasterizationDepthClipStateCreateInfoEXT const *>(this); }
-};
-
-struct safe_VkHdrMetadataEXT {
- VkStructureType sType;
- const void* pNext;
- VkXYColorEXT displayPrimaryRed;
- VkXYColorEXT displayPrimaryGreen;
- VkXYColorEXT displayPrimaryBlue;
- VkXYColorEXT whitePoint;
- float maxLuminance;
- float minLuminance;
- float maxContentLightLevel;
- float maxFrameAverageLightLevel;
- safe_VkHdrMetadataEXT(const VkHdrMetadataEXT* in_struct);
- safe_VkHdrMetadataEXT(const safe_VkHdrMetadataEXT& src);
- safe_VkHdrMetadataEXT& operator=(const safe_VkHdrMetadataEXT& src);
- safe_VkHdrMetadataEXT();
- ~safe_VkHdrMetadataEXT();
- void initialize(const VkHdrMetadataEXT* in_struct);
- void initialize(const safe_VkHdrMetadataEXT* src);
- VkHdrMetadataEXT *ptr() { return reinterpret_cast<VkHdrMetadataEXT *>(this); }
- VkHdrMetadataEXT const *ptr() const { return reinterpret_cast<VkHdrMetadataEXT const *>(this); }
-};
-
-#ifdef VK_USE_PLATFORM_IOS_MVK
-struct safe_VkIOSSurfaceCreateInfoMVK {
- VkStructureType sType;
- const void* pNext;
- VkIOSSurfaceCreateFlagsMVK flags;
- const void* pView;
- safe_VkIOSSurfaceCreateInfoMVK(const VkIOSSurfaceCreateInfoMVK* in_struct);
- safe_VkIOSSurfaceCreateInfoMVK(const safe_VkIOSSurfaceCreateInfoMVK& src);
- safe_VkIOSSurfaceCreateInfoMVK& operator=(const safe_VkIOSSurfaceCreateInfoMVK& src);
- safe_VkIOSSurfaceCreateInfoMVK();
- ~safe_VkIOSSurfaceCreateInfoMVK();
- void initialize(const VkIOSSurfaceCreateInfoMVK* in_struct);
- void initialize(const safe_VkIOSSurfaceCreateInfoMVK* src);
- VkIOSSurfaceCreateInfoMVK *ptr() { return reinterpret_cast<VkIOSSurfaceCreateInfoMVK *>(this); }
- VkIOSSurfaceCreateInfoMVK const *ptr() const { return reinterpret_cast<VkIOSSurfaceCreateInfoMVK const *>(this); }
-};
-#endif // VK_USE_PLATFORM_IOS_MVK
-
-#ifdef VK_USE_PLATFORM_MACOS_MVK
-struct safe_VkMacOSSurfaceCreateInfoMVK {
- VkStructureType sType;
- const void* pNext;
- VkMacOSSurfaceCreateFlagsMVK flags;
- const void* pView;
- safe_VkMacOSSurfaceCreateInfoMVK(const VkMacOSSurfaceCreateInfoMVK* in_struct);
- safe_VkMacOSSurfaceCreateInfoMVK(const safe_VkMacOSSurfaceCreateInfoMVK& src);
- safe_VkMacOSSurfaceCreateInfoMVK& operator=(const safe_VkMacOSSurfaceCreateInfoMVK& src);
- safe_VkMacOSSurfaceCreateInfoMVK();
- ~safe_VkMacOSSurfaceCreateInfoMVK();
- void initialize(const VkMacOSSurfaceCreateInfoMVK* in_struct);
- void initialize(const safe_VkMacOSSurfaceCreateInfoMVK* src);
- VkMacOSSurfaceCreateInfoMVK *ptr() { return reinterpret_cast<VkMacOSSurfaceCreateInfoMVK *>(this); }
- VkMacOSSurfaceCreateInfoMVK const *ptr() const { return reinterpret_cast<VkMacOSSurfaceCreateInfoMVK const *>(this); }
-};
-#endif // VK_USE_PLATFORM_MACOS_MVK
-
-struct safe_VkDebugUtilsObjectNameInfoEXT {
- VkStructureType sType;
- const void* pNext;
- VkObjectType objectType;
- uint64_t objectHandle;
- const char* pObjectName;
- safe_VkDebugUtilsObjectNameInfoEXT(const VkDebugUtilsObjectNameInfoEXT* in_struct);
- safe_VkDebugUtilsObjectNameInfoEXT(const safe_VkDebugUtilsObjectNameInfoEXT& src);
- safe_VkDebugUtilsObjectNameInfoEXT& operator=(const safe_VkDebugUtilsObjectNameInfoEXT& src);
- safe_VkDebugUtilsObjectNameInfoEXT();
- ~safe_VkDebugUtilsObjectNameInfoEXT();
- void initialize(const VkDebugUtilsObjectNameInfoEXT* in_struct);
- void initialize(const safe_VkDebugUtilsObjectNameInfoEXT* src);
- VkDebugUtilsObjectNameInfoEXT *ptr() { return reinterpret_cast<VkDebugUtilsObjectNameInfoEXT *>(this); }
- VkDebugUtilsObjectNameInfoEXT const *ptr() const { return reinterpret_cast<VkDebugUtilsObjectNameInfoEXT const *>(this); }
-};
-
-struct safe_VkDebugUtilsObjectTagInfoEXT {
- VkStructureType sType;
- const void* pNext;
- VkObjectType objectType;
- uint64_t objectHandle;
- uint64_t tagName;
- size_t tagSize;
- const void* pTag;
- safe_VkDebugUtilsObjectTagInfoEXT(const VkDebugUtilsObjectTagInfoEXT* in_struct);
- safe_VkDebugUtilsObjectTagInfoEXT(const safe_VkDebugUtilsObjectTagInfoEXT& src);
- safe_VkDebugUtilsObjectTagInfoEXT& operator=(const safe_VkDebugUtilsObjectTagInfoEXT& src);
- safe_VkDebugUtilsObjectTagInfoEXT();
- ~safe_VkDebugUtilsObjectTagInfoEXT();
- void initialize(const VkDebugUtilsObjectTagInfoEXT* in_struct);
- void initialize(const safe_VkDebugUtilsObjectTagInfoEXT* src);
- VkDebugUtilsObjectTagInfoEXT *ptr() { return reinterpret_cast<VkDebugUtilsObjectTagInfoEXT *>(this); }
- VkDebugUtilsObjectTagInfoEXT const *ptr() const { return reinterpret_cast<VkDebugUtilsObjectTagInfoEXT const *>(this); }
-};
-
-struct safe_VkDebugUtilsLabelEXT {
- VkStructureType sType;
- const void* pNext;
- const char* pLabelName;
- float color[4];
- safe_VkDebugUtilsLabelEXT(const VkDebugUtilsLabelEXT* in_struct);
- safe_VkDebugUtilsLabelEXT(const safe_VkDebugUtilsLabelEXT& src);
- safe_VkDebugUtilsLabelEXT& operator=(const safe_VkDebugUtilsLabelEXT& src);
- safe_VkDebugUtilsLabelEXT();
- ~safe_VkDebugUtilsLabelEXT();
- void initialize(const VkDebugUtilsLabelEXT* in_struct);
- void initialize(const safe_VkDebugUtilsLabelEXT* src);
- VkDebugUtilsLabelEXT *ptr() { return reinterpret_cast<VkDebugUtilsLabelEXT *>(this); }
- VkDebugUtilsLabelEXT const *ptr() const { return reinterpret_cast<VkDebugUtilsLabelEXT const *>(this); }
-};
-
-struct safe_VkDebugUtilsMessengerCallbackDataEXT {
- VkStructureType sType;
- const void* pNext;
- VkDebugUtilsMessengerCallbackDataFlagsEXT flags;
- const char* pMessageIdName;
- int32_t messageIdNumber;
- const char* pMessage;
- uint32_t queueLabelCount;
- safe_VkDebugUtilsLabelEXT* pQueueLabels;
- uint32_t cmdBufLabelCount;
- safe_VkDebugUtilsLabelEXT* pCmdBufLabels;
- uint32_t objectCount;
- safe_VkDebugUtilsObjectNameInfoEXT* pObjects;
- safe_VkDebugUtilsMessengerCallbackDataEXT(const VkDebugUtilsMessengerCallbackDataEXT* in_struct);
- safe_VkDebugUtilsMessengerCallbackDataEXT(const safe_VkDebugUtilsMessengerCallbackDataEXT& src);
- safe_VkDebugUtilsMessengerCallbackDataEXT& operator=(const safe_VkDebugUtilsMessengerCallbackDataEXT& src);
- safe_VkDebugUtilsMessengerCallbackDataEXT();
- ~safe_VkDebugUtilsMessengerCallbackDataEXT();
- void initialize(const VkDebugUtilsMessengerCallbackDataEXT* in_struct);
- void initialize(const safe_VkDebugUtilsMessengerCallbackDataEXT* src);
- VkDebugUtilsMessengerCallbackDataEXT *ptr() { return reinterpret_cast<VkDebugUtilsMessengerCallbackDataEXT *>(this); }
- VkDebugUtilsMessengerCallbackDataEXT const *ptr() const { return reinterpret_cast<VkDebugUtilsMessengerCallbackDataEXT const *>(this); }
-};
-
-struct safe_VkDebugUtilsMessengerCreateInfoEXT {
- VkStructureType sType;
- const void* pNext;
- VkDebugUtilsMessengerCreateFlagsEXT flags;
- VkDebugUtilsMessageSeverityFlagsEXT messageSeverity;
- VkDebugUtilsMessageTypeFlagsEXT messageType;
- PFN_vkDebugUtilsMessengerCallbackEXT pfnUserCallback;
- void* pUserData;
- safe_VkDebugUtilsMessengerCreateInfoEXT(const VkDebugUtilsMessengerCreateInfoEXT* in_struct);
- safe_VkDebugUtilsMessengerCreateInfoEXT(const safe_VkDebugUtilsMessengerCreateInfoEXT& src);
- safe_VkDebugUtilsMessengerCreateInfoEXT& operator=(const safe_VkDebugUtilsMessengerCreateInfoEXT& src);
- safe_VkDebugUtilsMessengerCreateInfoEXT();
- ~safe_VkDebugUtilsMessengerCreateInfoEXT();
- void initialize(const VkDebugUtilsMessengerCreateInfoEXT* in_struct);
- void initialize(const safe_VkDebugUtilsMessengerCreateInfoEXT* src);
- VkDebugUtilsMessengerCreateInfoEXT *ptr() { return reinterpret_cast<VkDebugUtilsMessengerCreateInfoEXT *>(this); }
- VkDebugUtilsMessengerCreateInfoEXT const *ptr() const { return reinterpret_cast<VkDebugUtilsMessengerCreateInfoEXT const *>(this); }
-};
-
-#ifdef VK_USE_PLATFORM_ANDROID_KHR
-struct safe_VkAndroidHardwareBufferUsageANDROID {
- VkStructureType sType;
- void* pNext;
- uint64_t androidHardwareBufferUsage;
- safe_VkAndroidHardwareBufferUsageANDROID(const VkAndroidHardwareBufferUsageANDROID* in_struct);
- safe_VkAndroidHardwareBufferUsageANDROID(const safe_VkAndroidHardwareBufferUsageANDROID& src);
- safe_VkAndroidHardwareBufferUsageANDROID& operator=(const safe_VkAndroidHardwareBufferUsageANDROID& src);
- safe_VkAndroidHardwareBufferUsageANDROID();
- ~safe_VkAndroidHardwareBufferUsageANDROID();
- void initialize(const VkAndroidHardwareBufferUsageANDROID* in_struct);
- void initialize(const safe_VkAndroidHardwareBufferUsageANDROID* src);
- VkAndroidHardwareBufferUsageANDROID *ptr() { return reinterpret_cast<VkAndroidHardwareBufferUsageANDROID *>(this); }
- VkAndroidHardwareBufferUsageANDROID const *ptr() const { return reinterpret_cast<VkAndroidHardwareBufferUsageANDROID const *>(this); }
-};
-#endif // VK_USE_PLATFORM_ANDROID_KHR
-
-#ifdef VK_USE_PLATFORM_ANDROID_KHR
-struct safe_VkAndroidHardwareBufferPropertiesANDROID {
- VkStructureType sType;
- void* pNext;
- VkDeviceSize allocationSize;
- uint32_t memoryTypeBits;
- safe_VkAndroidHardwareBufferPropertiesANDROID(const VkAndroidHardwareBufferPropertiesANDROID* in_struct);
- safe_VkAndroidHardwareBufferPropertiesANDROID(const safe_VkAndroidHardwareBufferPropertiesANDROID& src);
- safe_VkAndroidHardwareBufferPropertiesANDROID& operator=(const safe_VkAndroidHardwareBufferPropertiesANDROID& src);
- safe_VkAndroidHardwareBufferPropertiesANDROID();
- ~safe_VkAndroidHardwareBufferPropertiesANDROID();
- void initialize(const VkAndroidHardwareBufferPropertiesANDROID* in_struct);
- void initialize(const safe_VkAndroidHardwareBufferPropertiesANDROID* src);
- VkAndroidHardwareBufferPropertiesANDROID *ptr() { return reinterpret_cast<VkAndroidHardwareBufferPropertiesANDROID *>(this); }
- VkAndroidHardwareBufferPropertiesANDROID const *ptr() const { return reinterpret_cast<VkAndroidHardwareBufferPropertiesANDROID const *>(this); }
-};
-#endif // VK_USE_PLATFORM_ANDROID_KHR
-
-#ifdef VK_USE_PLATFORM_ANDROID_KHR
-struct safe_VkAndroidHardwareBufferFormatPropertiesANDROID {
- VkStructureType sType;
- void* pNext;
- VkFormat format;
- uint64_t externalFormat;
- VkFormatFeatureFlags formatFeatures;
- VkComponentMapping samplerYcbcrConversionComponents;
- VkSamplerYcbcrModelConversion suggestedYcbcrModel;
- VkSamplerYcbcrRange suggestedYcbcrRange;
- VkChromaLocation suggestedXChromaOffset;
- VkChromaLocation suggestedYChromaOffset;
- safe_VkAndroidHardwareBufferFormatPropertiesANDROID(const VkAndroidHardwareBufferFormatPropertiesANDROID* in_struct);
- safe_VkAndroidHardwareBufferFormatPropertiesANDROID(const safe_VkAndroidHardwareBufferFormatPropertiesANDROID& src);
- safe_VkAndroidHardwareBufferFormatPropertiesANDROID& operator=(const safe_VkAndroidHardwareBufferFormatPropertiesANDROID& src);
- safe_VkAndroidHardwareBufferFormatPropertiesANDROID();
- ~safe_VkAndroidHardwareBufferFormatPropertiesANDROID();
- void initialize(const VkAndroidHardwareBufferFormatPropertiesANDROID* in_struct);
- void initialize(const safe_VkAndroidHardwareBufferFormatPropertiesANDROID* src);
- VkAndroidHardwareBufferFormatPropertiesANDROID *ptr() { return reinterpret_cast<VkAndroidHardwareBufferFormatPropertiesANDROID *>(this); }
- VkAndroidHardwareBufferFormatPropertiesANDROID const *ptr() const { return reinterpret_cast<VkAndroidHardwareBufferFormatPropertiesANDROID const *>(this); }
-};
-#endif // VK_USE_PLATFORM_ANDROID_KHR
-
-#ifdef VK_USE_PLATFORM_ANDROID_KHR
-struct safe_VkImportAndroidHardwareBufferInfoANDROID {
- VkStructureType sType;
- const void* pNext;
- struct AHardwareBuffer* buffer;
- safe_VkImportAndroidHardwareBufferInfoANDROID(const VkImportAndroidHardwareBufferInfoANDROID* in_struct);
- safe_VkImportAndroidHardwareBufferInfoANDROID(const safe_VkImportAndroidHardwareBufferInfoANDROID& src);
- safe_VkImportAndroidHardwareBufferInfoANDROID& operator=(const safe_VkImportAndroidHardwareBufferInfoANDROID& src);
- safe_VkImportAndroidHardwareBufferInfoANDROID();
- ~safe_VkImportAndroidHardwareBufferInfoANDROID();
- void initialize(const VkImportAndroidHardwareBufferInfoANDROID* in_struct);
- void initialize(const safe_VkImportAndroidHardwareBufferInfoANDROID* src);
- VkImportAndroidHardwareBufferInfoANDROID *ptr() { return reinterpret_cast<VkImportAndroidHardwareBufferInfoANDROID *>(this); }
- VkImportAndroidHardwareBufferInfoANDROID const *ptr() const { return reinterpret_cast<VkImportAndroidHardwareBufferInfoANDROID const *>(this); }
-};
-#endif // VK_USE_PLATFORM_ANDROID_KHR
-
-#ifdef VK_USE_PLATFORM_ANDROID_KHR
-struct safe_VkMemoryGetAndroidHardwareBufferInfoANDROID {
- VkStructureType sType;
- const void* pNext;
- VkDeviceMemory memory;
- safe_VkMemoryGetAndroidHardwareBufferInfoANDROID(const VkMemoryGetAndroidHardwareBufferInfoANDROID* in_struct);
- safe_VkMemoryGetAndroidHardwareBufferInfoANDROID(const safe_VkMemoryGetAndroidHardwareBufferInfoANDROID& src);
- safe_VkMemoryGetAndroidHardwareBufferInfoANDROID& operator=(const safe_VkMemoryGetAndroidHardwareBufferInfoANDROID& src);
- safe_VkMemoryGetAndroidHardwareBufferInfoANDROID();
- ~safe_VkMemoryGetAndroidHardwareBufferInfoANDROID();
- void initialize(const VkMemoryGetAndroidHardwareBufferInfoANDROID* in_struct);
- void initialize(const safe_VkMemoryGetAndroidHardwareBufferInfoANDROID* src);
- VkMemoryGetAndroidHardwareBufferInfoANDROID *ptr() { return reinterpret_cast<VkMemoryGetAndroidHardwareBufferInfoANDROID *>(this); }
- VkMemoryGetAndroidHardwareBufferInfoANDROID const *ptr() const { return reinterpret_cast<VkMemoryGetAndroidHardwareBufferInfoANDROID const *>(this); }
-};
-#endif // VK_USE_PLATFORM_ANDROID_KHR
-
-#ifdef VK_USE_PLATFORM_ANDROID_KHR
-struct safe_VkExternalFormatANDROID {
- VkStructureType sType;
- void* pNext;
- uint64_t externalFormat;
- safe_VkExternalFormatANDROID(const VkExternalFormatANDROID* in_struct);
- safe_VkExternalFormatANDROID(const safe_VkExternalFormatANDROID& src);
- safe_VkExternalFormatANDROID& operator=(const safe_VkExternalFormatANDROID& src);
- safe_VkExternalFormatANDROID();
- ~safe_VkExternalFormatANDROID();
- void initialize(const VkExternalFormatANDROID* in_struct);
- void initialize(const safe_VkExternalFormatANDROID* src);
- VkExternalFormatANDROID *ptr() { return reinterpret_cast<VkExternalFormatANDROID *>(this); }
- VkExternalFormatANDROID const *ptr() const { return reinterpret_cast<VkExternalFormatANDROID const *>(this); }
-};
-#endif // VK_USE_PLATFORM_ANDROID_KHR
-
-struct safe_VkSamplerReductionModeCreateInfoEXT {
- VkStructureType sType;
- const void* pNext;
- VkSamplerReductionModeEXT reductionMode;
- safe_VkSamplerReductionModeCreateInfoEXT(const VkSamplerReductionModeCreateInfoEXT* in_struct);
- safe_VkSamplerReductionModeCreateInfoEXT(const safe_VkSamplerReductionModeCreateInfoEXT& src);
- safe_VkSamplerReductionModeCreateInfoEXT& operator=(const safe_VkSamplerReductionModeCreateInfoEXT& src);
- safe_VkSamplerReductionModeCreateInfoEXT();
- ~safe_VkSamplerReductionModeCreateInfoEXT();
- void initialize(const VkSamplerReductionModeCreateInfoEXT* in_struct);
- void initialize(const safe_VkSamplerReductionModeCreateInfoEXT* src);
- VkSamplerReductionModeCreateInfoEXT *ptr() { return reinterpret_cast<VkSamplerReductionModeCreateInfoEXT *>(this); }
- VkSamplerReductionModeCreateInfoEXT const *ptr() const { return reinterpret_cast<VkSamplerReductionModeCreateInfoEXT const *>(this); }
-};
-
-struct safe_VkPhysicalDeviceSamplerFilterMinmaxPropertiesEXT {
- VkStructureType sType;
- void* pNext;
- VkBool32 filterMinmaxSingleComponentFormats;
- VkBool32 filterMinmaxImageComponentMapping;
- safe_VkPhysicalDeviceSamplerFilterMinmaxPropertiesEXT(const VkPhysicalDeviceSamplerFilterMinmaxPropertiesEXT* in_struct);
- safe_VkPhysicalDeviceSamplerFilterMinmaxPropertiesEXT(const safe_VkPhysicalDeviceSamplerFilterMinmaxPropertiesEXT& src);
- safe_VkPhysicalDeviceSamplerFilterMinmaxPropertiesEXT& operator=(const safe_VkPhysicalDeviceSamplerFilterMinmaxPropertiesEXT& src);
- safe_VkPhysicalDeviceSamplerFilterMinmaxPropertiesEXT();
- ~safe_VkPhysicalDeviceSamplerFilterMinmaxPropertiesEXT();
- void initialize(const VkPhysicalDeviceSamplerFilterMinmaxPropertiesEXT* in_struct);
- void initialize(const safe_VkPhysicalDeviceSamplerFilterMinmaxPropertiesEXT* src);
- VkPhysicalDeviceSamplerFilterMinmaxPropertiesEXT *ptr() { return reinterpret_cast<VkPhysicalDeviceSamplerFilterMinmaxPropertiesEXT *>(this); }
- VkPhysicalDeviceSamplerFilterMinmaxPropertiesEXT const *ptr() const { return reinterpret_cast<VkPhysicalDeviceSamplerFilterMinmaxPropertiesEXT const *>(this); }
-};
-
-struct safe_VkPhysicalDeviceInlineUniformBlockFeaturesEXT {
- VkStructureType sType;
- void* pNext;
- VkBool32 inlineUniformBlock;
- VkBool32 descriptorBindingInlineUniformBlockUpdateAfterBind;
- safe_VkPhysicalDeviceInlineUniformBlockFeaturesEXT(const VkPhysicalDeviceInlineUniformBlockFeaturesEXT* in_struct);
- safe_VkPhysicalDeviceInlineUniformBlockFeaturesEXT(const safe_VkPhysicalDeviceInlineUniformBlockFeaturesEXT& src);
- safe_VkPhysicalDeviceInlineUniformBlockFeaturesEXT& operator=(const safe_VkPhysicalDeviceInlineUniformBlockFeaturesEXT& src);
- safe_VkPhysicalDeviceInlineUniformBlockFeaturesEXT();
- ~safe_VkPhysicalDeviceInlineUniformBlockFeaturesEXT();
- void initialize(const VkPhysicalDeviceInlineUniformBlockFeaturesEXT* in_struct);
- void initialize(const safe_VkPhysicalDeviceInlineUniformBlockFeaturesEXT* src);
- VkPhysicalDeviceInlineUniformBlockFeaturesEXT *ptr() { return reinterpret_cast<VkPhysicalDeviceInlineUniformBlockFeaturesEXT *>(this); }
- VkPhysicalDeviceInlineUniformBlockFeaturesEXT const *ptr() const { return reinterpret_cast<VkPhysicalDeviceInlineUniformBlockFeaturesEXT const *>(this); }
-};
-
-struct safe_VkPhysicalDeviceInlineUniformBlockPropertiesEXT {
- VkStructureType sType;
- void* pNext;
- uint32_t maxInlineUniformBlockSize;
- uint32_t maxPerStageDescriptorInlineUniformBlocks;
- uint32_t maxPerStageDescriptorUpdateAfterBindInlineUniformBlocks;
- uint32_t maxDescriptorSetInlineUniformBlocks;
- uint32_t maxDescriptorSetUpdateAfterBindInlineUniformBlocks;
- safe_VkPhysicalDeviceInlineUniformBlockPropertiesEXT(const VkPhysicalDeviceInlineUniformBlockPropertiesEXT* in_struct);
- safe_VkPhysicalDeviceInlineUniformBlockPropertiesEXT(const safe_VkPhysicalDeviceInlineUniformBlockPropertiesEXT& src);
- safe_VkPhysicalDeviceInlineUniformBlockPropertiesEXT& operator=(const safe_VkPhysicalDeviceInlineUniformBlockPropertiesEXT& src);
- safe_VkPhysicalDeviceInlineUniformBlockPropertiesEXT();
- ~safe_VkPhysicalDeviceInlineUniformBlockPropertiesEXT();
- void initialize(const VkPhysicalDeviceInlineUniformBlockPropertiesEXT* in_struct);
- void initialize(const safe_VkPhysicalDeviceInlineUniformBlockPropertiesEXT* src);
- VkPhysicalDeviceInlineUniformBlockPropertiesEXT *ptr() { return reinterpret_cast<VkPhysicalDeviceInlineUniformBlockPropertiesEXT *>(this); }
- VkPhysicalDeviceInlineUniformBlockPropertiesEXT const *ptr() const { return reinterpret_cast<VkPhysicalDeviceInlineUniformBlockPropertiesEXT const *>(this); }
-};
-
-struct safe_VkWriteDescriptorSetInlineUniformBlockEXT {
- VkStructureType sType;
- const void* pNext;
- uint32_t dataSize;
- const void* pData;
- safe_VkWriteDescriptorSetInlineUniformBlockEXT(const VkWriteDescriptorSetInlineUniformBlockEXT* in_struct);
- safe_VkWriteDescriptorSetInlineUniformBlockEXT(const safe_VkWriteDescriptorSetInlineUniformBlockEXT& src);
- safe_VkWriteDescriptorSetInlineUniformBlockEXT& operator=(const safe_VkWriteDescriptorSetInlineUniformBlockEXT& src);
- safe_VkWriteDescriptorSetInlineUniformBlockEXT();
- ~safe_VkWriteDescriptorSetInlineUniformBlockEXT();
- void initialize(const VkWriteDescriptorSetInlineUniformBlockEXT* in_struct);
- void initialize(const safe_VkWriteDescriptorSetInlineUniformBlockEXT* src);
- VkWriteDescriptorSetInlineUniformBlockEXT *ptr() { return reinterpret_cast<VkWriteDescriptorSetInlineUniformBlockEXT *>(this); }
- VkWriteDescriptorSetInlineUniformBlockEXT const *ptr() const { return reinterpret_cast<VkWriteDescriptorSetInlineUniformBlockEXT const *>(this); }
-};
-
-struct safe_VkDescriptorPoolInlineUniformBlockCreateInfoEXT {
- VkStructureType sType;
- const void* pNext;
- uint32_t maxInlineUniformBlockBindings;
- safe_VkDescriptorPoolInlineUniformBlockCreateInfoEXT(const VkDescriptorPoolInlineUniformBlockCreateInfoEXT* in_struct);
- safe_VkDescriptorPoolInlineUniformBlockCreateInfoEXT(const safe_VkDescriptorPoolInlineUniformBlockCreateInfoEXT& src);
- safe_VkDescriptorPoolInlineUniformBlockCreateInfoEXT& operator=(const safe_VkDescriptorPoolInlineUniformBlockCreateInfoEXT& src);
- safe_VkDescriptorPoolInlineUniformBlockCreateInfoEXT();
- ~safe_VkDescriptorPoolInlineUniformBlockCreateInfoEXT();
- void initialize(const VkDescriptorPoolInlineUniformBlockCreateInfoEXT* in_struct);
- void initialize(const safe_VkDescriptorPoolInlineUniformBlockCreateInfoEXT* src);
- VkDescriptorPoolInlineUniformBlockCreateInfoEXT *ptr() { return reinterpret_cast<VkDescriptorPoolInlineUniformBlockCreateInfoEXT *>(this); }
- VkDescriptorPoolInlineUniformBlockCreateInfoEXT const *ptr() const { return reinterpret_cast<VkDescriptorPoolInlineUniformBlockCreateInfoEXT const *>(this); }
-};
-
-struct safe_VkSampleLocationsInfoEXT {
- VkStructureType sType;
- const void* pNext;
- VkSampleCountFlagBits sampleLocationsPerPixel;
- VkExtent2D sampleLocationGridSize;
- uint32_t sampleLocationsCount;
- const VkSampleLocationEXT* pSampleLocations;
- safe_VkSampleLocationsInfoEXT(const VkSampleLocationsInfoEXT* in_struct);
- safe_VkSampleLocationsInfoEXT(const safe_VkSampleLocationsInfoEXT& src);
- safe_VkSampleLocationsInfoEXT& operator=(const safe_VkSampleLocationsInfoEXT& src);
- safe_VkSampleLocationsInfoEXT();
- ~safe_VkSampleLocationsInfoEXT();
- void initialize(const VkSampleLocationsInfoEXT* in_struct);
- void initialize(const safe_VkSampleLocationsInfoEXT* src);
- VkSampleLocationsInfoEXT *ptr() { return reinterpret_cast<VkSampleLocationsInfoEXT *>(this); }
- VkSampleLocationsInfoEXT const *ptr() const { return reinterpret_cast<VkSampleLocationsInfoEXT const *>(this); }
-};
-
-struct safe_VkRenderPassSampleLocationsBeginInfoEXT {
- VkStructureType sType;
- const void* pNext;
- uint32_t attachmentInitialSampleLocationsCount;
- const VkAttachmentSampleLocationsEXT* pAttachmentInitialSampleLocations;
- uint32_t postSubpassSampleLocationsCount;
- const VkSubpassSampleLocationsEXT* pPostSubpassSampleLocations;
- safe_VkRenderPassSampleLocationsBeginInfoEXT(const VkRenderPassSampleLocationsBeginInfoEXT* in_struct);
- safe_VkRenderPassSampleLocationsBeginInfoEXT(const safe_VkRenderPassSampleLocationsBeginInfoEXT& src);
- safe_VkRenderPassSampleLocationsBeginInfoEXT& operator=(const safe_VkRenderPassSampleLocationsBeginInfoEXT& src);
- safe_VkRenderPassSampleLocationsBeginInfoEXT();
- ~safe_VkRenderPassSampleLocationsBeginInfoEXT();
- void initialize(const VkRenderPassSampleLocationsBeginInfoEXT* in_struct);
- void initialize(const safe_VkRenderPassSampleLocationsBeginInfoEXT* src);
- VkRenderPassSampleLocationsBeginInfoEXT *ptr() { return reinterpret_cast<VkRenderPassSampleLocationsBeginInfoEXT *>(this); }
- VkRenderPassSampleLocationsBeginInfoEXT const *ptr() const { return reinterpret_cast<VkRenderPassSampleLocationsBeginInfoEXT const *>(this); }
-};
-
-struct safe_VkPipelineSampleLocationsStateCreateInfoEXT {
- VkStructureType sType;
- const void* pNext;
- VkBool32 sampleLocationsEnable;
- safe_VkSampleLocationsInfoEXT sampleLocationsInfo;
- safe_VkPipelineSampleLocationsStateCreateInfoEXT(const VkPipelineSampleLocationsStateCreateInfoEXT* in_struct);
- safe_VkPipelineSampleLocationsStateCreateInfoEXT(const safe_VkPipelineSampleLocationsStateCreateInfoEXT& src);
- safe_VkPipelineSampleLocationsStateCreateInfoEXT& operator=(const safe_VkPipelineSampleLocationsStateCreateInfoEXT& src);
- safe_VkPipelineSampleLocationsStateCreateInfoEXT();
- ~safe_VkPipelineSampleLocationsStateCreateInfoEXT();
- void initialize(const VkPipelineSampleLocationsStateCreateInfoEXT* in_struct);
- void initialize(const safe_VkPipelineSampleLocationsStateCreateInfoEXT* src);
- VkPipelineSampleLocationsStateCreateInfoEXT *ptr() { return reinterpret_cast<VkPipelineSampleLocationsStateCreateInfoEXT *>(this); }
- VkPipelineSampleLocationsStateCreateInfoEXT const *ptr() const { return reinterpret_cast<VkPipelineSampleLocationsStateCreateInfoEXT const *>(this); }
-};
-
-struct safe_VkPhysicalDeviceSampleLocationsPropertiesEXT {
- VkStructureType sType;
- void* pNext;
- VkSampleCountFlags sampleLocationSampleCounts;
- VkExtent2D maxSampleLocationGridSize;
- float sampleLocationCoordinateRange[2];
- uint32_t sampleLocationSubPixelBits;
- VkBool32 variableSampleLocations;
- safe_VkPhysicalDeviceSampleLocationsPropertiesEXT(const VkPhysicalDeviceSampleLocationsPropertiesEXT* in_struct);
- safe_VkPhysicalDeviceSampleLocationsPropertiesEXT(const safe_VkPhysicalDeviceSampleLocationsPropertiesEXT& src);
- safe_VkPhysicalDeviceSampleLocationsPropertiesEXT& operator=(const safe_VkPhysicalDeviceSampleLocationsPropertiesEXT& src);
- safe_VkPhysicalDeviceSampleLocationsPropertiesEXT();
- ~safe_VkPhysicalDeviceSampleLocationsPropertiesEXT();
- void initialize(const VkPhysicalDeviceSampleLocationsPropertiesEXT* in_struct);
- void initialize(const safe_VkPhysicalDeviceSampleLocationsPropertiesEXT* src);
- VkPhysicalDeviceSampleLocationsPropertiesEXT *ptr() { return reinterpret_cast<VkPhysicalDeviceSampleLocationsPropertiesEXT *>(this); }
- VkPhysicalDeviceSampleLocationsPropertiesEXT const *ptr() const { return reinterpret_cast<VkPhysicalDeviceSampleLocationsPropertiesEXT const *>(this); }
-};
-
-struct safe_VkMultisamplePropertiesEXT {
- VkStructureType sType;
- void* pNext;
- VkExtent2D maxSampleLocationGridSize;
- safe_VkMultisamplePropertiesEXT(const VkMultisamplePropertiesEXT* in_struct);
- safe_VkMultisamplePropertiesEXT(const safe_VkMultisamplePropertiesEXT& src);
- safe_VkMultisamplePropertiesEXT& operator=(const safe_VkMultisamplePropertiesEXT& src);
- safe_VkMultisamplePropertiesEXT();
- ~safe_VkMultisamplePropertiesEXT();
- void initialize(const VkMultisamplePropertiesEXT* in_struct);
- void initialize(const safe_VkMultisamplePropertiesEXT* src);
- VkMultisamplePropertiesEXT *ptr() { return reinterpret_cast<VkMultisamplePropertiesEXT *>(this); }
- VkMultisamplePropertiesEXT const *ptr() const { return reinterpret_cast<VkMultisamplePropertiesEXT const *>(this); }
-};
-
-struct safe_VkPhysicalDeviceBlendOperationAdvancedFeaturesEXT {
- VkStructureType sType;
- void* pNext;
- VkBool32 advancedBlendCoherentOperations;
- safe_VkPhysicalDeviceBlendOperationAdvancedFeaturesEXT(const VkPhysicalDeviceBlendOperationAdvancedFeaturesEXT* in_struct);
- safe_VkPhysicalDeviceBlendOperationAdvancedFeaturesEXT(const safe_VkPhysicalDeviceBlendOperationAdvancedFeaturesEXT& src);
- safe_VkPhysicalDeviceBlendOperationAdvancedFeaturesEXT& operator=(const safe_VkPhysicalDeviceBlendOperationAdvancedFeaturesEXT& src);
- safe_VkPhysicalDeviceBlendOperationAdvancedFeaturesEXT();
- ~safe_VkPhysicalDeviceBlendOperationAdvancedFeaturesEXT();
- void initialize(const VkPhysicalDeviceBlendOperationAdvancedFeaturesEXT* in_struct);
- void initialize(const safe_VkPhysicalDeviceBlendOperationAdvancedFeaturesEXT* src);
- VkPhysicalDeviceBlendOperationAdvancedFeaturesEXT *ptr() { return reinterpret_cast<VkPhysicalDeviceBlendOperationAdvancedFeaturesEXT *>(this); }
- VkPhysicalDeviceBlendOperationAdvancedFeaturesEXT const *ptr() const { return reinterpret_cast<VkPhysicalDeviceBlendOperationAdvancedFeaturesEXT const *>(this); }
-};
-
-struct safe_VkPhysicalDeviceBlendOperationAdvancedPropertiesEXT {
- VkStructureType sType;
- void* pNext;
- uint32_t advancedBlendMaxColorAttachments;
- VkBool32 advancedBlendIndependentBlend;
- VkBool32 advancedBlendNonPremultipliedSrcColor;
- VkBool32 advancedBlendNonPremultipliedDstColor;
- VkBool32 advancedBlendCorrelatedOverlap;
- VkBool32 advancedBlendAllOperations;
- safe_VkPhysicalDeviceBlendOperationAdvancedPropertiesEXT(const VkPhysicalDeviceBlendOperationAdvancedPropertiesEXT* in_struct);
- safe_VkPhysicalDeviceBlendOperationAdvancedPropertiesEXT(const safe_VkPhysicalDeviceBlendOperationAdvancedPropertiesEXT& src);
- safe_VkPhysicalDeviceBlendOperationAdvancedPropertiesEXT& operator=(const safe_VkPhysicalDeviceBlendOperationAdvancedPropertiesEXT& src);
- safe_VkPhysicalDeviceBlendOperationAdvancedPropertiesEXT();
- ~safe_VkPhysicalDeviceBlendOperationAdvancedPropertiesEXT();
- void initialize(const VkPhysicalDeviceBlendOperationAdvancedPropertiesEXT* in_struct);
- void initialize(const safe_VkPhysicalDeviceBlendOperationAdvancedPropertiesEXT* src);
- VkPhysicalDeviceBlendOperationAdvancedPropertiesEXT *ptr() { return reinterpret_cast<VkPhysicalDeviceBlendOperationAdvancedPropertiesEXT *>(this); }
- VkPhysicalDeviceBlendOperationAdvancedPropertiesEXT const *ptr() const { return reinterpret_cast<VkPhysicalDeviceBlendOperationAdvancedPropertiesEXT const *>(this); }
-};
-
-struct safe_VkPipelineColorBlendAdvancedStateCreateInfoEXT {
- VkStructureType sType;
- const void* pNext;
- VkBool32 srcPremultiplied;
- VkBool32 dstPremultiplied;
- VkBlendOverlapEXT blendOverlap;
- safe_VkPipelineColorBlendAdvancedStateCreateInfoEXT(const VkPipelineColorBlendAdvancedStateCreateInfoEXT* in_struct);
- safe_VkPipelineColorBlendAdvancedStateCreateInfoEXT(const safe_VkPipelineColorBlendAdvancedStateCreateInfoEXT& src);
- safe_VkPipelineColorBlendAdvancedStateCreateInfoEXT& operator=(const safe_VkPipelineColorBlendAdvancedStateCreateInfoEXT& src);
- safe_VkPipelineColorBlendAdvancedStateCreateInfoEXT();
- ~safe_VkPipelineColorBlendAdvancedStateCreateInfoEXT();
- void initialize(const VkPipelineColorBlendAdvancedStateCreateInfoEXT* in_struct);
- void initialize(const safe_VkPipelineColorBlendAdvancedStateCreateInfoEXT* src);
- VkPipelineColorBlendAdvancedStateCreateInfoEXT *ptr() { return reinterpret_cast<VkPipelineColorBlendAdvancedStateCreateInfoEXT *>(this); }
- VkPipelineColorBlendAdvancedStateCreateInfoEXT const *ptr() const { return reinterpret_cast<VkPipelineColorBlendAdvancedStateCreateInfoEXT const *>(this); }
-};
-
-struct safe_VkPipelineCoverageToColorStateCreateInfoNV {
- VkStructureType sType;
- const void* pNext;
- VkPipelineCoverageToColorStateCreateFlagsNV flags;
- VkBool32 coverageToColorEnable;
- uint32_t coverageToColorLocation;
- safe_VkPipelineCoverageToColorStateCreateInfoNV(const VkPipelineCoverageToColorStateCreateInfoNV* in_struct);
- safe_VkPipelineCoverageToColorStateCreateInfoNV(const safe_VkPipelineCoverageToColorStateCreateInfoNV& src);
- safe_VkPipelineCoverageToColorStateCreateInfoNV& operator=(const safe_VkPipelineCoverageToColorStateCreateInfoNV& src);
- safe_VkPipelineCoverageToColorStateCreateInfoNV();
- ~safe_VkPipelineCoverageToColorStateCreateInfoNV();
- void initialize(const VkPipelineCoverageToColorStateCreateInfoNV* in_struct);
- void initialize(const safe_VkPipelineCoverageToColorStateCreateInfoNV* src);
- VkPipelineCoverageToColorStateCreateInfoNV *ptr() { return reinterpret_cast<VkPipelineCoverageToColorStateCreateInfoNV *>(this); }
- VkPipelineCoverageToColorStateCreateInfoNV const *ptr() const { return reinterpret_cast<VkPipelineCoverageToColorStateCreateInfoNV const *>(this); }
-};
-
-struct safe_VkPipelineCoverageModulationStateCreateInfoNV {
- VkStructureType sType;
- const void* pNext;
- VkPipelineCoverageModulationStateCreateFlagsNV flags;
- VkCoverageModulationModeNV coverageModulationMode;
- VkBool32 coverageModulationTableEnable;
- uint32_t coverageModulationTableCount;
- const float* pCoverageModulationTable;
- safe_VkPipelineCoverageModulationStateCreateInfoNV(const VkPipelineCoverageModulationStateCreateInfoNV* in_struct);
- safe_VkPipelineCoverageModulationStateCreateInfoNV(const safe_VkPipelineCoverageModulationStateCreateInfoNV& src);
- safe_VkPipelineCoverageModulationStateCreateInfoNV& operator=(const safe_VkPipelineCoverageModulationStateCreateInfoNV& src);
- safe_VkPipelineCoverageModulationStateCreateInfoNV();
- ~safe_VkPipelineCoverageModulationStateCreateInfoNV();
- void initialize(const VkPipelineCoverageModulationStateCreateInfoNV* in_struct);
- void initialize(const safe_VkPipelineCoverageModulationStateCreateInfoNV* src);
- VkPipelineCoverageModulationStateCreateInfoNV *ptr() { return reinterpret_cast<VkPipelineCoverageModulationStateCreateInfoNV *>(this); }
- VkPipelineCoverageModulationStateCreateInfoNV const *ptr() const { return reinterpret_cast<VkPipelineCoverageModulationStateCreateInfoNV const *>(this); }
-};
-
-struct safe_VkPhysicalDeviceShaderSMBuiltinsPropertiesNV {
- VkStructureType sType;
- void* pNext;
- uint32_t shaderSMCount;
- uint32_t shaderWarpsPerSM;
- safe_VkPhysicalDeviceShaderSMBuiltinsPropertiesNV(const VkPhysicalDeviceShaderSMBuiltinsPropertiesNV* in_struct);
- safe_VkPhysicalDeviceShaderSMBuiltinsPropertiesNV(const safe_VkPhysicalDeviceShaderSMBuiltinsPropertiesNV& src);
- safe_VkPhysicalDeviceShaderSMBuiltinsPropertiesNV& operator=(const safe_VkPhysicalDeviceShaderSMBuiltinsPropertiesNV& src);
- safe_VkPhysicalDeviceShaderSMBuiltinsPropertiesNV();
- ~safe_VkPhysicalDeviceShaderSMBuiltinsPropertiesNV();
- void initialize(const VkPhysicalDeviceShaderSMBuiltinsPropertiesNV* in_struct);
- void initialize(const safe_VkPhysicalDeviceShaderSMBuiltinsPropertiesNV* src);
- VkPhysicalDeviceShaderSMBuiltinsPropertiesNV *ptr() { return reinterpret_cast<VkPhysicalDeviceShaderSMBuiltinsPropertiesNV *>(this); }
- VkPhysicalDeviceShaderSMBuiltinsPropertiesNV const *ptr() const { return reinterpret_cast<VkPhysicalDeviceShaderSMBuiltinsPropertiesNV const *>(this); }
-};
-
-struct safe_VkPhysicalDeviceShaderSMBuiltinsFeaturesNV {
- VkStructureType sType;
- void* pNext;
- VkBool32 shaderSMBuiltins;
- safe_VkPhysicalDeviceShaderSMBuiltinsFeaturesNV(const VkPhysicalDeviceShaderSMBuiltinsFeaturesNV* in_struct);
- safe_VkPhysicalDeviceShaderSMBuiltinsFeaturesNV(const safe_VkPhysicalDeviceShaderSMBuiltinsFeaturesNV& src);
- safe_VkPhysicalDeviceShaderSMBuiltinsFeaturesNV& operator=(const safe_VkPhysicalDeviceShaderSMBuiltinsFeaturesNV& src);
- safe_VkPhysicalDeviceShaderSMBuiltinsFeaturesNV();
- ~safe_VkPhysicalDeviceShaderSMBuiltinsFeaturesNV();
- void initialize(const VkPhysicalDeviceShaderSMBuiltinsFeaturesNV* in_struct);
- void initialize(const safe_VkPhysicalDeviceShaderSMBuiltinsFeaturesNV* src);
- VkPhysicalDeviceShaderSMBuiltinsFeaturesNV *ptr() { return reinterpret_cast<VkPhysicalDeviceShaderSMBuiltinsFeaturesNV *>(this); }
- VkPhysicalDeviceShaderSMBuiltinsFeaturesNV const *ptr() const { return reinterpret_cast<VkPhysicalDeviceShaderSMBuiltinsFeaturesNV const *>(this); }
-};
-
-struct safe_VkDrmFormatModifierPropertiesListEXT {
- VkStructureType sType;
- void* pNext;
- uint32_t drmFormatModifierCount;
- VkDrmFormatModifierPropertiesEXT* pDrmFormatModifierProperties;
- safe_VkDrmFormatModifierPropertiesListEXT(const VkDrmFormatModifierPropertiesListEXT* in_struct);
- safe_VkDrmFormatModifierPropertiesListEXT(const safe_VkDrmFormatModifierPropertiesListEXT& src);
- safe_VkDrmFormatModifierPropertiesListEXT& operator=(const safe_VkDrmFormatModifierPropertiesListEXT& src);
- safe_VkDrmFormatModifierPropertiesListEXT();
- ~safe_VkDrmFormatModifierPropertiesListEXT();
- void initialize(const VkDrmFormatModifierPropertiesListEXT* in_struct);
- void initialize(const safe_VkDrmFormatModifierPropertiesListEXT* src);
- VkDrmFormatModifierPropertiesListEXT *ptr() { return reinterpret_cast<VkDrmFormatModifierPropertiesListEXT *>(this); }
- VkDrmFormatModifierPropertiesListEXT const *ptr() const { return reinterpret_cast<VkDrmFormatModifierPropertiesListEXT const *>(this); }
-};
-
-struct safe_VkPhysicalDeviceImageDrmFormatModifierInfoEXT {
- VkStructureType sType;
- const void* pNext;
- uint64_t drmFormatModifier;
- VkSharingMode sharingMode;
- uint32_t queueFamilyIndexCount;
- const uint32_t* pQueueFamilyIndices;
- safe_VkPhysicalDeviceImageDrmFormatModifierInfoEXT(const VkPhysicalDeviceImageDrmFormatModifierInfoEXT* in_struct);
- safe_VkPhysicalDeviceImageDrmFormatModifierInfoEXT(const safe_VkPhysicalDeviceImageDrmFormatModifierInfoEXT& src);
- safe_VkPhysicalDeviceImageDrmFormatModifierInfoEXT& operator=(const safe_VkPhysicalDeviceImageDrmFormatModifierInfoEXT& src);
- safe_VkPhysicalDeviceImageDrmFormatModifierInfoEXT();
- ~safe_VkPhysicalDeviceImageDrmFormatModifierInfoEXT();
- void initialize(const VkPhysicalDeviceImageDrmFormatModifierInfoEXT* in_struct);
- void initialize(const safe_VkPhysicalDeviceImageDrmFormatModifierInfoEXT* src);
- VkPhysicalDeviceImageDrmFormatModifierInfoEXT *ptr() { return reinterpret_cast<VkPhysicalDeviceImageDrmFormatModifierInfoEXT *>(this); }
- VkPhysicalDeviceImageDrmFormatModifierInfoEXT const *ptr() const { return reinterpret_cast<VkPhysicalDeviceImageDrmFormatModifierInfoEXT const *>(this); }
-};
-
-struct safe_VkImageDrmFormatModifierListCreateInfoEXT {
- VkStructureType sType;
- const void* pNext;
- uint32_t drmFormatModifierCount;
- const uint64_t* pDrmFormatModifiers;
- safe_VkImageDrmFormatModifierListCreateInfoEXT(const VkImageDrmFormatModifierListCreateInfoEXT* in_struct);
- safe_VkImageDrmFormatModifierListCreateInfoEXT(const safe_VkImageDrmFormatModifierListCreateInfoEXT& src);
- safe_VkImageDrmFormatModifierListCreateInfoEXT& operator=(const safe_VkImageDrmFormatModifierListCreateInfoEXT& src);
- safe_VkImageDrmFormatModifierListCreateInfoEXT();
- ~safe_VkImageDrmFormatModifierListCreateInfoEXT();
- void initialize(const VkImageDrmFormatModifierListCreateInfoEXT* in_struct);
- void initialize(const safe_VkImageDrmFormatModifierListCreateInfoEXT* src);
- VkImageDrmFormatModifierListCreateInfoEXT *ptr() { return reinterpret_cast<VkImageDrmFormatModifierListCreateInfoEXT *>(this); }
- VkImageDrmFormatModifierListCreateInfoEXT const *ptr() const { return reinterpret_cast<VkImageDrmFormatModifierListCreateInfoEXT const *>(this); }
-};
-
-struct safe_VkImageDrmFormatModifierExplicitCreateInfoEXT {
- VkStructureType sType;
- const void* pNext;
- uint64_t drmFormatModifier;
- uint32_t drmFormatModifierPlaneCount;
- const VkSubresourceLayout* pPlaneLayouts;
- safe_VkImageDrmFormatModifierExplicitCreateInfoEXT(const VkImageDrmFormatModifierExplicitCreateInfoEXT* in_struct);
- safe_VkImageDrmFormatModifierExplicitCreateInfoEXT(const safe_VkImageDrmFormatModifierExplicitCreateInfoEXT& src);
- safe_VkImageDrmFormatModifierExplicitCreateInfoEXT& operator=(const safe_VkImageDrmFormatModifierExplicitCreateInfoEXT& src);
- safe_VkImageDrmFormatModifierExplicitCreateInfoEXT();
- ~safe_VkImageDrmFormatModifierExplicitCreateInfoEXT();
- void initialize(const VkImageDrmFormatModifierExplicitCreateInfoEXT* in_struct);
- void initialize(const safe_VkImageDrmFormatModifierExplicitCreateInfoEXT* src);
- VkImageDrmFormatModifierExplicitCreateInfoEXT *ptr() { return reinterpret_cast<VkImageDrmFormatModifierExplicitCreateInfoEXT *>(this); }
- VkImageDrmFormatModifierExplicitCreateInfoEXT const *ptr() const { return reinterpret_cast<VkImageDrmFormatModifierExplicitCreateInfoEXT const *>(this); }
-};
-
-struct safe_VkImageDrmFormatModifierPropertiesEXT {
- VkStructureType sType;
- void* pNext;
- uint64_t drmFormatModifier;
- safe_VkImageDrmFormatModifierPropertiesEXT(const VkImageDrmFormatModifierPropertiesEXT* in_struct);
- safe_VkImageDrmFormatModifierPropertiesEXT(const safe_VkImageDrmFormatModifierPropertiesEXT& src);
- safe_VkImageDrmFormatModifierPropertiesEXT& operator=(const safe_VkImageDrmFormatModifierPropertiesEXT& src);
- safe_VkImageDrmFormatModifierPropertiesEXT();
- ~safe_VkImageDrmFormatModifierPropertiesEXT();
- void initialize(const VkImageDrmFormatModifierPropertiesEXT* in_struct);
- void initialize(const safe_VkImageDrmFormatModifierPropertiesEXT* src);
- VkImageDrmFormatModifierPropertiesEXT *ptr() { return reinterpret_cast<VkImageDrmFormatModifierPropertiesEXT *>(this); }
- VkImageDrmFormatModifierPropertiesEXT const *ptr() const { return reinterpret_cast<VkImageDrmFormatModifierPropertiesEXT const *>(this); }
-};
-
-struct safe_VkValidationCacheCreateInfoEXT {
- VkStructureType sType;
- const void* pNext;
- VkValidationCacheCreateFlagsEXT flags;
- size_t initialDataSize;
- const void* pInitialData;
- safe_VkValidationCacheCreateInfoEXT(const VkValidationCacheCreateInfoEXT* in_struct);
- safe_VkValidationCacheCreateInfoEXT(const safe_VkValidationCacheCreateInfoEXT& src);
- safe_VkValidationCacheCreateInfoEXT& operator=(const safe_VkValidationCacheCreateInfoEXT& src);
- safe_VkValidationCacheCreateInfoEXT();
- ~safe_VkValidationCacheCreateInfoEXT();
- void initialize(const VkValidationCacheCreateInfoEXT* in_struct);
- void initialize(const safe_VkValidationCacheCreateInfoEXT* src);
- VkValidationCacheCreateInfoEXT *ptr() { return reinterpret_cast<VkValidationCacheCreateInfoEXT *>(this); }
- VkValidationCacheCreateInfoEXT const *ptr() const { return reinterpret_cast<VkValidationCacheCreateInfoEXT const *>(this); }
-};
-
-struct safe_VkShaderModuleValidationCacheCreateInfoEXT {
- VkStructureType sType;
- const void* pNext;
- VkValidationCacheEXT validationCache;
- safe_VkShaderModuleValidationCacheCreateInfoEXT(const VkShaderModuleValidationCacheCreateInfoEXT* in_struct);
- safe_VkShaderModuleValidationCacheCreateInfoEXT(const safe_VkShaderModuleValidationCacheCreateInfoEXT& src);
- safe_VkShaderModuleValidationCacheCreateInfoEXT& operator=(const safe_VkShaderModuleValidationCacheCreateInfoEXT& src);
- safe_VkShaderModuleValidationCacheCreateInfoEXT();
- ~safe_VkShaderModuleValidationCacheCreateInfoEXT();
- void initialize(const VkShaderModuleValidationCacheCreateInfoEXT* in_struct);
- void initialize(const safe_VkShaderModuleValidationCacheCreateInfoEXT* src);
- VkShaderModuleValidationCacheCreateInfoEXT *ptr() { return reinterpret_cast<VkShaderModuleValidationCacheCreateInfoEXT *>(this); }
- VkShaderModuleValidationCacheCreateInfoEXT const *ptr() const { return reinterpret_cast<VkShaderModuleValidationCacheCreateInfoEXT const *>(this); }
-};
-
-struct safe_VkDescriptorSetLayoutBindingFlagsCreateInfoEXT {
- VkStructureType sType;
- const void* pNext;
- uint32_t bindingCount;
- const VkDescriptorBindingFlagsEXT* pBindingFlags;
- safe_VkDescriptorSetLayoutBindingFlagsCreateInfoEXT(const VkDescriptorSetLayoutBindingFlagsCreateInfoEXT* in_struct);
- safe_VkDescriptorSetLayoutBindingFlagsCreateInfoEXT(const safe_VkDescriptorSetLayoutBindingFlagsCreateInfoEXT& src);
- safe_VkDescriptorSetLayoutBindingFlagsCreateInfoEXT& operator=(const safe_VkDescriptorSetLayoutBindingFlagsCreateInfoEXT& src);
- safe_VkDescriptorSetLayoutBindingFlagsCreateInfoEXT();
- ~safe_VkDescriptorSetLayoutBindingFlagsCreateInfoEXT();
- void initialize(const VkDescriptorSetLayoutBindingFlagsCreateInfoEXT* in_struct);
- void initialize(const safe_VkDescriptorSetLayoutBindingFlagsCreateInfoEXT* src);
- VkDescriptorSetLayoutBindingFlagsCreateInfoEXT *ptr() { return reinterpret_cast<VkDescriptorSetLayoutBindingFlagsCreateInfoEXT *>(this); }
- VkDescriptorSetLayoutBindingFlagsCreateInfoEXT const *ptr() const { return reinterpret_cast<VkDescriptorSetLayoutBindingFlagsCreateInfoEXT const *>(this); }
-};
-
-struct safe_VkPhysicalDeviceDescriptorIndexingFeaturesEXT {
- VkStructureType sType;
- void* pNext;
- VkBool32 shaderInputAttachmentArrayDynamicIndexing;
- VkBool32 shaderUniformTexelBufferArrayDynamicIndexing;
- VkBool32 shaderStorageTexelBufferArrayDynamicIndexing;
- VkBool32 shaderUniformBufferArrayNonUniformIndexing;
- VkBool32 shaderSampledImageArrayNonUniformIndexing;
- VkBool32 shaderStorageBufferArrayNonUniformIndexing;
- VkBool32 shaderStorageImageArrayNonUniformIndexing;
- VkBool32 shaderInputAttachmentArrayNonUniformIndexing;
- VkBool32 shaderUniformTexelBufferArrayNonUniformIndexing;
- VkBool32 shaderStorageTexelBufferArrayNonUniformIndexing;
- VkBool32 descriptorBindingUniformBufferUpdateAfterBind;
- VkBool32 descriptorBindingSampledImageUpdateAfterBind;
- VkBool32 descriptorBindingStorageImageUpdateAfterBind;
- VkBool32 descriptorBindingStorageBufferUpdateAfterBind;
- VkBool32 descriptorBindingUniformTexelBufferUpdateAfterBind;
- VkBool32 descriptorBindingStorageTexelBufferUpdateAfterBind;
- VkBool32 descriptorBindingUpdateUnusedWhilePending;
- VkBool32 descriptorBindingPartiallyBound;
- VkBool32 descriptorBindingVariableDescriptorCount;
- VkBool32 runtimeDescriptorArray;
- safe_VkPhysicalDeviceDescriptorIndexingFeaturesEXT(const VkPhysicalDeviceDescriptorIndexingFeaturesEXT* in_struct);
- safe_VkPhysicalDeviceDescriptorIndexingFeaturesEXT(const safe_VkPhysicalDeviceDescriptorIndexingFeaturesEXT& src);
- safe_VkPhysicalDeviceDescriptorIndexingFeaturesEXT& operator=(const safe_VkPhysicalDeviceDescriptorIndexingFeaturesEXT& src);
- safe_VkPhysicalDeviceDescriptorIndexingFeaturesEXT();
- ~safe_VkPhysicalDeviceDescriptorIndexingFeaturesEXT();
- void initialize(const VkPhysicalDeviceDescriptorIndexingFeaturesEXT* in_struct);
- void initialize(const safe_VkPhysicalDeviceDescriptorIndexingFeaturesEXT* src);
- VkPhysicalDeviceDescriptorIndexingFeaturesEXT *ptr() { return reinterpret_cast<VkPhysicalDeviceDescriptorIndexingFeaturesEXT *>(this); }
- VkPhysicalDeviceDescriptorIndexingFeaturesEXT const *ptr() const { return reinterpret_cast<VkPhysicalDeviceDescriptorIndexingFeaturesEXT const *>(this); }
-};
-
-struct safe_VkPhysicalDeviceDescriptorIndexingPropertiesEXT {
- VkStructureType sType;
- void* pNext;
- uint32_t maxUpdateAfterBindDescriptorsInAllPools;
- VkBool32 shaderUniformBufferArrayNonUniformIndexingNative;
- VkBool32 shaderSampledImageArrayNonUniformIndexingNative;
- VkBool32 shaderStorageBufferArrayNonUniformIndexingNative;
- VkBool32 shaderStorageImageArrayNonUniformIndexingNative;
- VkBool32 shaderInputAttachmentArrayNonUniformIndexingNative;
- VkBool32 robustBufferAccessUpdateAfterBind;
- VkBool32 quadDivergentImplicitLod;
- uint32_t maxPerStageDescriptorUpdateAfterBindSamplers;
- uint32_t maxPerStageDescriptorUpdateAfterBindUniformBuffers;
- uint32_t maxPerStageDescriptorUpdateAfterBindStorageBuffers;
- uint32_t maxPerStageDescriptorUpdateAfterBindSampledImages;
- uint32_t maxPerStageDescriptorUpdateAfterBindStorageImages;
- uint32_t maxPerStageDescriptorUpdateAfterBindInputAttachments;
- uint32_t maxPerStageUpdateAfterBindResources;
- uint32_t maxDescriptorSetUpdateAfterBindSamplers;
- uint32_t maxDescriptorSetUpdateAfterBindUniformBuffers;
- uint32_t maxDescriptorSetUpdateAfterBindUniformBuffersDynamic;
- uint32_t maxDescriptorSetUpdateAfterBindStorageBuffers;
- uint32_t maxDescriptorSetUpdateAfterBindStorageBuffersDynamic;
- uint32_t maxDescriptorSetUpdateAfterBindSampledImages;
- uint32_t maxDescriptorSetUpdateAfterBindStorageImages;
- uint32_t maxDescriptorSetUpdateAfterBindInputAttachments;
- safe_VkPhysicalDeviceDescriptorIndexingPropertiesEXT(const VkPhysicalDeviceDescriptorIndexingPropertiesEXT* in_struct);
- safe_VkPhysicalDeviceDescriptorIndexingPropertiesEXT(const safe_VkPhysicalDeviceDescriptorIndexingPropertiesEXT& src);
- safe_VkPhysicalDeviceDescriptorIndexingPropertiesEXT& operator=(const safe_VkPhysicalDeviceDescriptorIndexingPropertiesEXT& src);
- safe_VkPhysicalDeviceDescriptorIndexingPropertiesEXT();
- ~safe_VkPhysicalDeviceDescriptorIndexingPropertiesEXT();
- void initialize(const VkPhysicalDeviceDescriptorIndexingPropertiesEXT* in_struct);
- void initialize(const safe_VkPhysicalDeviceDescriptorIndexingPropertiesEXT* src);
- VkPhysicalDeviceDescriptorIndexingPropertiesEXT *ptr() { return reinterpret_cast<VkPhysicalDeviceDescriptorIndexingPropertiesEXT *>(this); }
- VkPhysicalDeviceDescriptorIndexingPropertiesEXT const *ptr() const { return reinterpret_cast<VkPhysicalDeviceDescriptorIndexingPropertiesEXT const *>(this); }
-};
-
-struct safe_VkDescriptorSetVariableDescriptorCountAllocateInfoEXT {
- VkStructureType sType;
- const void* pNext;
- uint32_t descriptorSetCount;
- const uint32_t* pDescriptorCounts;
- safe_VkDescriptorSetVariableDescriptorCountAllocateInfoEXT(const VkDescriptorSetVariableDescriptorCountAllocateInfoEXT* in_struct);
- safe_VkDescriptorSetVariableDescriptorCountAllocateInfoEXT(const safe_VkDescriptorSetVariableDescriptorCountAllocateInfoEXT& src);
- safe_VkDescriptorSetVariableDescriptorCountAllocateInfoEXT& operator=(const safe_VkDescriptorSetVariableDescriptorCountAllocateInfoEXT& src);
- safe_VkDescriptorSetVariableDescriptorCountAllocateInfoEXT();
- ~safe_VkDescriptorSetVariableDescriptorCountAllocateInfoEXT();
- void initialize(const VkDescriptorSetVariableDescriptorCountAllocateInfoEXT* in_struct);
- void initialize(const safe_VkDescriptorSetVariableDescriptorCountAllocateInfoEXT* src);
- VkDescriptorSetVariableDescriptorCountAllocateInfoEXT *ptr() { return reinterpret_cast<VkDescriptorSetVariableDescriptorCountAllocateInfoEXT *>(this); }
- VkDescriptorSetVariableDescriptorCountAllocateInfoEXT const *ptr() const { return reinterpret_cast<VkDescriptorSetVariableDescriptorCountAllocateInfoEXT const *>(this); }
-};
-
-struct safe_VkDescriptorSetVariableDescriptorCountLayoutSupportEXT {
- VkStructureType sType;
- void* pNext;
- uint32_t maxVariableDescriptorCount;
- safe_VkDescriptorSetVariableDescriptorCountLayoutSupportEXT(const VkDescriptorSetVariableDescriptorCountLayoutSupportEXT* in_struct);
- safe_VkDescriptorSetVariableDescriptorCountLayoutSupportEXT(const safe_VkDescriptorSetVariableDescriptorCountLayoutSupportEXT& src);
- safe_VkDescriptorSetVariableDescriptorCountLayoutSupportEXT& operator=(const safe_VkDescriptorSetVariableDescriptorCountLayoutSupportEXT& src);
- safe_VkDescriptorSetVariableDescriptorCountLayoutSupportEXT();
- ~safe_VkDescriptorSetVariableDescriptorCountLayoutSupportEXT();
- void initialize(const VkDescriptorSetVariableDescriptorCountLayoutSupportEXT* in_struct);
- void initialize(const safe_VkDescriptorSetVariableDescriptorCountLayoutSupportEXT* src);
- VkDescriptorSetVariableDescriptorCountLayoutSupportEXT *ptr() { return reinterpret_cast<VkDescriptorSetVariableDescriptorCountLayoutSupportEXT *>(this); }
- VkDescriptorSetVariableDescriptorCountLayoutSupportEXT const *ptr() const { return reinterpret_cast<VkDescriptorSetVariableDescriptorCountLayoutSupportEXT const *>(this); }
-};
-
-struct safe_VkShadingRatePaletteNV {
- uint32_t shadingRatePaletteEntryCount;
- const VkShadingRatePaletteEntryNV* pShadingRatePaletteEntries;
- safe_VkShadingRatePaletteNV(const VkShadingRatePaletteNV* in_struct);
- safe_VkShadingRatePaletteNV(const safe_VkShadingRatePaletteNV& src);
- safe_VkShadingRatePaletteNV& operator=(const safe_VkShadingRatePaletteNV& src);
- safe_VkShadingRatePaletteNV();
- ~safe_VkShadingRatePaletteNV();
- void initialize(const VkShadingRatePaletteNV* in_struct);
- void initialize(const safe_VkShadingRatePaletteNV* src);
- VkShadingRatePaletteNV *ptr() { return reinterpret_cast<VkShadingRatePaletteNV *>(this); }
- VkShadingRatePaletteNV const *ptr() const { return reinterpret_cast<VkShadingRatePaletteNV const *>(this); }
-};
-
-struct safe_VkPipelineViewportShadingRateImageStateCreateInfoNV {
- VkStructureType sType;
- const void* pNext;
- VkBool32 shadingRateImageEnable;
- uint32_t viewportCount;
- safe_VkShadingRatePaletteNV* pShadingRatePalettes;
- safe_VkPipelineViewportShadingRateImageStateCreateInfoNV(const VkPipelineViewportShadingRateImageStateCreateInfoNV* in_struct);
- safe_VkPipelineViewportShadingRateImageStateCreateInfoNV(const safe_VkPipelineViewportShadingRateImageStateCreateInfoNV& src);
- safe_VkPipelineViewportShadingRateImageStateCreateInfoNV& operator=(const safe_VkPipelineViewportShadingRateImageStateCreateInfoNV& src);
- safe_VkPipelineViewportShadingRateImageStateCreateInfoNV();
- ~safe_VkPipelineViewportShadingRateImageStateCreateInfoNV();
- void initialize(const VkPipelineViewportShadingRateImageStateCreateInfoNV* in_struct);
- void initialize(const safe_VkPipelineViewportShadingRateImageStateCreateInfoNV* src);
- VkPipelineViewportShadingRateImageStateCreateInfoNV *ptr() { return reinterpret_cast<VkPipelineViewportShadingRateImageStateCreateInfoNV *>(this); }
- VkPipelineViewportShadingRateImageStateCreateInfoNV const *ptr() const { return reinterpret_cast<VkPipelineViewportShadingRateImageStateCreateInfoNV const *>(this); }
-};
-
-struct safe_VkPhysicalDeviceShadingRateImageFeaturesNV {
- VkStructureType sType;
- void* pNext;
- VkBool32 shadingRateImage;
- VkBool32 shadingRateCoarseSampleOrder;
- safe_VkPhysicalDeviceShadingRateImageFeaturesNV(const VkPhysicalDeviceShadingRateImageFeaturesNV* in_struct);
- safe_VkPhysicalDeviceShadingRateImageFeaturesNV(const safe_VkPhysicalDeviceShadingRateImageFeaturesNV& src);
- safe_VkPhysicalDeviceShadingRateImageFeaturesNV& operator=(const safe_VkPhysicalDeviceShadingRateImageFeaturesNV& src);
- safe_VkPhysicalDeviceShadingRateImageFeaturesNV();
- ~safe_VkPhysicalDeviceShadingRateImageFeaturesNV();
- void initialize(const VkPhysicalDeviceShadingRateImageFeaturesNV* in_struct);
- void initialize(const safe_VkPhysicalDeviceShadingRateImageFeaturesNV* src);
- VkPhysicalDeviceShadingRateImageFeaturesNV *ptr() { return reinterpret_cast<VkPhysicalDeviceShadingRateImageFeaturesNV *>(this); }
- VkPhysicalDeviceShadingRateImageFeaturesNV const *ptr() const { return reinterpret_cast<VkPhysicalDeviceShadingRateImageFeaturesNV const *>(this); }
-};
-
-struct safe_VkPhysicalDeviceShadingRateImagePropertiesNV {
- VkStructureType sType;
- void* pNext;
- VkExtent2D shadingRateTexelSize;
- uint32_t shadingRatePaletteSize;
- uint32_t shadingRateMaxCoarseSamples;
- safe_VkPhysicalDeviceShadingRateImagePropertiesNV(const VkPhysicalDeviceShadingRateImagePropertiesNV* in_struct);
- safe_VkPhysicalDeviceShadingRateImagePropertiesNV(const safe_VkPhysicalDeviceShadingRateImagePropertiesNV& src);
- safe_VkPhysicalDeviceShadingRateImagePropertiesNV& operator=(const safe_VkPhysicalDeviceShadingRateImagePropertiesNV& src);
- safe_VkPhysicalDeviceShadingRateImagePropertiesNV();
- ~safe_VkPhysicalDeviceShadingRateImagePropertiesNV();
- void initialize(const VkPhysicalDeviceShadingRateImagePropertiesNV* in_struct);
- void initialize(const safe_VkPhysicalDeviceShadingRateImagePropertiesNV* src);
- VkPhysicalDeviceShadingRateImagePropertiesNV *ptr() { return reinterpret_cast<VkPhysicalDeviceShadingRateImagePropertiesNV *>(this); }
- VkPhysicalDeviceShadingRateImagePropertiesNV const *ptr() const { return reinterpret_cast<VkPhysicalDeviceShadingRateImagePropertiesNV const *>(this); }
-};
-
-struct safe_VkCoarseSampleOrderCustomNV {
- VkShadingRatePaletteEntryNV shadingRate;
- uint32_t sampleCount;
- uint32_t sampleLocationCount;
- const VkCoarseSampleLocationNV* pSampleLocations;
- safe_VkCoarseSampleOrderCustomNV(const VkCoarseSampleOrderCustomNV* in_struct);
- safe_VkCoarseSampleOrderCustomNV(const safe_VkCoarseSampleOrderCustomNV& src);
- safe_VkCoarseSampleOrderCustomNV& operator=(const safe_VkCoarseSampleOrderCustomNV& src);
- safe_VkCoarseSampleOrderCustomNV();
- ~safe_VkCoarseSampleOrderCustomNV();
- void initialize(const VkCoarseSampleOrderCustomNV* in_struct);
- void initialize(const safe_VkCoarseSampleOrderCustomNV* src);
- VkCoarseSampleOrderCustomNV *ptr() { return reinterpret_cast<VkCoarseSampleOrderCustomNV *>(this); }
- VkCoarseSampleOrderCustomNV const *ptr() const { return reinterpret_cast<VkCoarseSampleOrderCustomNV const *>(this); }
-};
-
-struct safe_VkPipelineViewportCoarseSampleOrderStateCreateInfoNV {
- VkStructureType sType;
- const void* pNext;
- VkCoarseSampleOrderTypeNV sampleOrderType;
- uint32_t customSampleOrderCount;
- safe_VkCoarseSampleOrderCustomNV* pCustomSampleOrders;
- safe_VkPipelineViewportCoarseSampleOrderStateCreateInfoNV(const VkPipelineViewportCoarseSampleOrderStateCreateInfoNV* in_struct);
- safe_VkPipelineViewportCoarseSampleOrderStateCreateInfoNV(const safe_VkPipelineViewportCoarseSampleOrderStateCreateInfoNV& src);
- safe_VkPipelineViewportCoarseSampleOrderStateCreateInfoNV& operator=(const safe_VkPipelineViewportCoarseSampleOrderStateCreateInfoNV& src);
- safe_VkPipelineViewportCoarseSampleOrderStateCreateInfoNV();
- ~safe_VkPipelineViewportCoarseSampleOrderStateCreateInfoNV();
- void initialize(const VkPipelineViewportCoarseSampleOrderStateCreateInfoNV* in_struct);
- void initialize(const safe_VkPipelineViewportCoarseSampleOrderStateCreateInfoNV* src);
- VkPipelineViewportCoarseSampleOrderStateCreateInfoNV *ptr() { return reinterpret_cast<VkPipelineViewportCoarseSampleOrderStateCreateInfoNV *>(this); }
- VkPipelineViewportCoarseSampleOrderStateCreateInfoNV const *ptr() const { return reinterpret_cast<VkPipelineViewportCoarseSampleOrderStateCreateInfoNV const *>(this); }
-};
-
-struct safe_VkRayTracingShaderGroupCreateInfoNV {
- VkStructureType sType;
- const void* pNext;
- VkRayTracingShaderGroupTypeNV type;
- uint32_t generalShader;
- uint32_t closestHitShader;
- uint32_t anyHitShader;
- uint32_t intersectionShader;
- safe_VkRayTracingShaderGroupCreateInfoNV(const VkRayTracingShaderGroupCreateInfoNV* in_struct);
- safe_VkRayTracingShaderGroupCreateInfoNV(const safe_VkRayTracingShaderGroupCreateInfoNV& src);
- safe_VkRayTracingShaderGroupCreateInfoNV& operator=(const safe_VkRayTracingShaderGroupCreateInfoNV& src);
- safe_VkRayTracingShaderGroupCreateInfoNV();
- ~safe_VkRayTracingShaderGroupCreateInfoNV();
- void initialize(const VkRayTracingShaderGroupCreateInfoNV* in_struct);
- void initialize(const safe_VkRayTracingShaderGroupCreateInfoNV* src);
- VkRayTracingShaderGroupCreateInfoNV *ptr() { return reinterpret_cast<VkRayTracingShaderGroupCreateInfoNV *>(this); }
- VkRayTracingShaderGroupCreateInfoNV const *ptr() const { return reinterpret_cast<VkRayTracingShaderGroupCreateInfoNV const *>(this); }
-};
-
-struct safe_VkRayTracingPipelineCreateInfoNV {
- VkStructureType sType;
- const void* pNext;
- VkPipelineCreateFlags flags;
- uint32_t stageCount;
- safe_VkPipelineShaderStageCreateInfo* pStages;
- uint32_t groupCount;
- safe_VkRayTracingShaderGroupCreateInfoNV* pGroups;
- uint32_t maxRecursionDepth;
- VkPipelineLayout layout;
- VkPipeline basePipelineHandle;
- int32_t basePipelineIndex;
- safe_VkRayTracingPipelineCreateInfoNV(const VkRayTracingPipelineCreateInfoNV* in_struct);
- safe_VkRayTracingPipelineCreateInfoNV(const safe_VkRayTracingPipelineCreateInfoNV& src);
- safe_VkRayTracingPipelineCreateInfoNV& operator=(const safe_VkRayTracingPipelineCreateInfoNV& src);
- safe_VkRayTracingPipelineCreateInfoNV();
- ~safe_VkRayTracingPipelineCreateInfoNV();
- void initialize(const VkRayTracingPipelineCreateInfoNV* in_struct);
- void initialize(const safe_VkRayTracingPipelineCreateInfoNV* src);
- VkRayTracingPipelineCreateInfoNV *ptr() { return reinterpret_cast<VkRayTracingPipelineCreateInfoNV *>(this); }
- VkRayTracingPipelineCreateInfoNV const *ptr() const { return reinterpret_cast<VkRayTracingPipelineCreateInfoNV const *>(this); }
-};
-
-struct safe_VkGeometryTrianglesNV {
- VkStructureType sType;
- const void* pNext;
- VkBuffer vertexData;
- VkDeviceSize vertexOffset;
- uint32_t vertexCount;
- VkDeviceSize vertexStride;
- VkFormat vertexFormat;
- VkBuffer indexData;
- VkDeviceSize indexOffset;
- uint32_t indexCount;
- VkIndexType indexType;
- VkBuffer transformData;
- VkDeviceSize transformOffset;
- safe_VkGeometryTrianglesNV(const VkGeometryTrianglesNV* in_struct);
- safe_VkGeometryTrianglesNV(const safe_VkGeometryTrianglesNV& src);
- safe_VkGeometryTrianglesNV& operator=(const safe_VkGeometryTrianglesNV& src);
- safe_VkGeometryTrianglesNV();
- ~safe_VkGeometryTrianglesNV();
- void initialize(const VkGeometryTrianglesNV* in_struct);
- void initialize(const safe_VkGeometryTrianglesNV* src);
- VkGeometryTrianglesNV *ptr() { return reinterpret_cast<VkGeometryTrianglesNV *>(this); }
- VkGeometryTrianglesNV const *ptr() const { return reinterpret_cast<VkGeometryTrianglesNV const *>(this); }
-};
-
-struct safe_VkGeometryAABBNV {
- VkStructureType sType;
- const void* pNext;
- VkBuffer aabbData;
- uint32_t numAABBs;
- uint32_t stride;
- VkDeviceSize offset;
- safe_VkGeometryAABBNV(const VkGeometryAABBNV* in_struct);
- safe_VkGeometryAABBNV(const safe_VkGeometryAABBNV& src);
- safe_VkGeometryAABBNV& operator=(const safe_VkGeometryAABBNV& src);
- safe_VkGeometryAABBNV();
- ~safe_VkGeometryAABBNV();
- void initialize(const VkGeometryAABBNV* in_struct);
- void initialize(const safe_VkGeometryAABBNV* src);
- VkGeometryAABBNV *ptr() { return reinterpret_cast<VkGeometryAABBNV *>(this); }
- VkGeometryAABBNV const *ptr() const { return reinterpret_cast<VkGeometryAABBNV const *>(this); }
-};
-
-struct safe_VkGeometryNV {
- VkStructureType sType;
- const void* pNext;
- VkGeometryTypeNV geometryType;
- VkGeometryDataNV geometry;
- VkGeometryFlagsNV flags;
- safe_VkGeometryNV(const VkGeometryNV* in_struct);
- safe_VkGeometryNV(const safe_VkGeometryNV& src);
- safe_VkGeometryNV& operator=(const safe_VkGeometryNV& src);
- safe_VkGeometryNV();
- ~safe_VkGeometryNV();
- void initialize(const VkGeometryNV* in_struct);
- void initialize(const safe_VkGeometryNV* src);
- VkGeometryNV *ptr() { return reinterpret_cast<VkGeometryNV *>(this); }
- VkGeometryNV const *ptr() const { return reinterpret_cast<VkGeometryNV const *>(this); }
-};
-
-struct safe_VkAccelerationStructureInfoNV {
- VkStructureType sType;
- const void* pNext;
- VkAccelerationStructureTypeNV type;
- VkBuildAccelerationStructureFlagsNV flags;
- uint32_t instanceCount;
- uint32_t geometryCount;
- safe_VkGeometryNV* pGeometries;
- safe_VkAccelerationStructureInfoNV(const VkAccelerationStructureInfoNV* in_struct);
- safe_VkAccelerationStructureInfoNV(const safe_VkAccelerationStructureInfoNV& src);
- safe_VkAccelerationStructureInfoNV& operator=(const safe_VkAccelerationStructureInfoNV& src);
- safe_VkAccelerationStructureInfoNV();
- ~safe_VkAccelerationStructureInfoNV();
- void initialize(const VkAccelerationStructureInfoNV* in_struct);
- void initialize(const safe_VkAccelerationStructureInfoNV* src);
- VkAccelerationStructureInfoNV *ptr() { return reinterpret_cast<VkAccelerationStructureInfoNV *>(this); }
- VkAccelerationStructureInfoNV const *ptr() const { return reinterpret_cast<VkAccelerationStructureInfoNV const *>(this); }
-};
-
-struct safe_VkAccelerationStructureCreateInfoNV {
- VkStructureType sType;
- const void* pNext;
- VkDeviceSize compactedSize;
- safe_VkAccelerationStructureInfoNV info;
- safe_VkAccelerationStructureCreateInfoNV(const VkAccelerationStructureCreateInfoNV* in_struct);
- safe_VkAccelerationStructureCreateInfoNV(const safe_VkAccelerationStructureCreateInfoNV& src);
- safe_VkAccelerationStructureCreateInfoNV& operator=(const safe_VkAccelerationStructureCreateInfoNV& src);
- safe_VkAccelerationStructureCreateInfoNV();
- ~safe_VkAccelerationStructureCreateInfoNV();
- void initialize(const VkAccelerationStructureCreateInfoNV* in_struct);
- void initialize(const safe_VkAccelerationStructureCreateInfoNV* src);
- VkAccelerationStructureCreateInfoNV *ptr() { return reinterpret_cast<VkAccelerationStructureCreateInfoNV *>(this); }
- VkAccelerationStructureCreateInfoNV const *ptr() const { return reinterpret_cast<VkAccelerationStructureCreateInfoNV const *>(this); }
-};
-
-struct safe_VkBindAccelerationStructureMemoryInfoNV {
- VkStructureType sType;
- const void* pNext;
- VkAccelerationStructureNV accelerationStructure;
- VkDeviceMemory memory;
- VkDeviceSize memoryOffset;
- uint32_t deviceIndexCount;
- const uint32_t* pDeviceIndices;
- safe_VkBindAccelerationStructureMemoryInfoNV(const VkBindAccelerationStructureMemoryInfoNV* in_struct);
- safe_VkBindAccelerationStructureMemoryInfoNV(const safe_VkBindAccelerationStructureMemoryInfoNV& src);
- safe_VkBindAccelerationStructureMemoryInfoNV& operator=(const safe_VkBindAccelerationStructureMemoryInfoNV& src);
- safe_VkBindAccelerationStructureMemoryInfoNV();
- ~safe_VkBindAccelerationStructureMemoryInfoNV();
- void initialize(const VkBindAccelerationStructureMemoryInfoNV* in_struct);
- void initialize(const safe_VkBindAccelerationStructureMemoryInfoNV* src);
- VkBindAccelerationStructureMemoryInfoNV *ptr() { return reinterpret_cast<VkBindAccelerationStructureMemoryInfoNV *>(this); }
- VkBindAccelerationStructureMemoryInfoNV const *ptr() const { return reinterpret_cast<VkBindAccelerationStructureMemoryInfoNV const *>(this); }
-};
-
-struct safe_VkWriteDescriptorSetAccelerationStructureNV {
- VkStructureType sType;
- const void* pNext;
- uint32_t accelerationStructureCount;
- VkAccelerationStructureNV* pAccelerationStructures;
- safe_VkWriteDescriptorSetAccelerationStructureNV(const VkWriteDescriptorSetAccelerationStructureNV* in_struct);
- safe_VkWriteDescriptorSetAccelerationStructureNV(const safe_VkWriteDescriptorSetAccelerationStructureNV& src);
- safe_VkWriteDescriptorSetAccelerationStructureNV& operator=(const safe_VkWriteDescriptorSetAccelerationStructureNV& src);
- safe_VkWriteDescriptorSetAccelerationStructureNV();
- ~safe_VkWriteDescriptorSetAccelerationStructureNV();
- void initialize(const VkWriteDescriptorSetAccelerationStructureNV* in_struct);
- void initialize(const safe_VkWriteDescriptorSetAccelerationStructureNV* src);
- VkWriteDescriptorSetAccelerationStructureNV *ptr() { return reinterpret_cast<VkWriteDescriptorSetAccelerationStructureNV *>(this); }
- VkWriteDescriptorSetAccelerationStructureNV const *ptr() const { return reinterpret_cast<VkWriteDescriptorSetAccelerationStructureNV const *>(this); }
-};
-
-struct safe_VkAccelerationStructureMemoryRequirementsInfoNV {
- VkStructureType sType;
- const void* pNext;
- VkAccelerationStructureMemoryRequirementsTypeNV type;
- VkAccelerationStructureNV accelerationStructure;
- safe_VkAccelerationStructureMemoryRequirementsInfoNV(const VkAccelerationStructureMemoryRequirementsInfoNV* in_struct);
- safe_VkAccelerationStructureMemoryRequirementsInfoNV(const safe_VkAccelerationStructureMemoryRequirementsInfoNV& src);
- safe_VkAccelerationStructureMemoryRequirementsInfoNV& operator=(const safe_VkAccelerationStructureMemoryRequirementsInfoNV& src);
- safe_VkAccelerationStructureMemoryRequirementsInfoNV();
- ~safe_VkAccelerationStructureMemoryRequirementsInfoNV();
- void initialize(const VkAccelerationStructureMemoryRequirementsInfoNV* in_struct);
- void initialize(const safe_VkAccelerationStructureMemoryRequirementsInfoNV* src);
- VkAccelerationStructureMemoryRequirementsInfoNV *ptr() { return reinterpret_cast<VkAccelerationStructureMemoryRequirementsInfoNV *>(this); }
- VkAccelerationStructureMemoryRequirementsInfoNV const *ptr() const { return reinterpret_cast<VkAccelerationStructureMemoryRequirementsInfoNV const *>(this); }
-};
-
-struct safe_VkPhysicalDeviceRayTracingPropertiesNV {
- VkStructureType sType;
- void* pNext;
- uint32_t shaderGroupHandleSize;
- uint32_t maxRecursionDepth;
- uint32_t maxShaderGroupStride;
- uint32_t shaderGroupBaseAlignment;
- uint64_t maxGeometryCount;
- uint64_t maxInstanceCount;
- uint64_t maxTriangleCount;
- uint32_t maxDescriptorSetAccelerationStructures;
- safe_VkPhysicalDeviceRayTracingPropertiesNV(const VkPhysicalDeviceRayTracingPropertiesNV* in_struct);
- safe_VkPhysicalDeviceRayTracingPropertiesNV(const safe_VkPhysicalDeviceRayTracingPropertiesNV& src);
- safe_VkPhysicalDeviceRayTracingPropertiesNV& operator=(const safe_VkPhysicalDeviceRayTracingPropertiesNV& src);
- safe_VkPhysicalDeviceRayTracingPropertiesNV();
- ~safe_VkPhysicalDeviceRayTracingPropertiesNV();
- void initialize(const VkPhysicalDeviceRayTracingPropertiesNV* in_struct);
- void initialize(const safe_VkPhysicalDeviceRayTracingPropertiesNV* src);
- VkPhysicalDeviceRayTracingPropertiesNV *ptr() { return reinterpret_cast<VkPhysicalDeviceRayTracingPropertiesNV *>(this); }
- VkPhysicalDeviceRayTracingPropertiesNV const *ptr() const { return reinterpret_cast<VkPhysicalDeviceRayTracingPropertiesNV const *>(this); }
-};
-
-struct safe_VkPhysicalDeviceRepresentativeFragmentTestFeaturesNV {
- VkStructureType sType;
- void* pNext;
- VkBool32 representativeFragmentTest;
- safe_VkPhysicalDeviceRepresentativeFragmentTestFeaturesNV(const VkPhysicalDeviceRepresentativeFragmentTestFeaturesNV* in_struct);
- safe_VkPhysicalDeviceRepresentativeFragmentTestFeaturesNV(const safe_VkPhysicalDeviceRepresentativeFragmentTestFeaturesNV& src);
- safe_VkPhysicalDeviceRepresentativeFragmentTestFeaturesNV& operator=(const safe_VkPhysicalDeviceRepresentativeFragmentTestFeaturesNV& src);
- safe_VkPhysicalDeviceRepresentativeFragmentTestFeaturesNV();
- ~safe_VkPhysicalDeviceRepresentativeFragmentTestFeaturesNV();
- void initialize(const VkPhysicalDeviceRepresentativeFragmentTestFeaturesNV* in_struct);
- void initialize(const safe_VkPhysicalDeviceRepresentativeFragmentTestFeaturesNV* src);
- VkPhysicalDeviceRepresentativeFragmentTestFeaturesNV *ptr() { return reinterpret_cast<VkPhysicalDeviceRepresentativeFragmentTestFeaturesNV *>(this); }
- VkPhysicalDeviceRepresentativeFragmentTestFeaturesNV const *ptr() const { return reinterpret_cast<VkPhysicalDeviceRepresentativeFragmentTestFeaturesNV const *>(this); }
-};
-
-struct safe_VkPipelineRepresentativeFragmentTestStateCreateInfoNV {
- VkStructureType sType;
- const void* pNext;
- VkBool32 representativeFragmentTestEnable;
- safe_VkPipelineRepresentativeFragmentTestStateCreateInfoNV(const VkPipelineRepresentativeFragmentTestStateCreateInfoNV* in_struct);
- safe_VkPipelineRepresentativeFragmentTestStateCreateInfoNV(const safe_VkPipelineRepresentativeFragmentTestStateCreateInfoNV& src);
- safe_VkPipelineRepresentativeFragmentTestStateCreateInfoNV& operator=(const safe_VkPipelineRepresentativeFragmentTestStateCreateInfoNV& src);
- safe_VkPipelineRepresentativeFragmentTestStateCreateInfoNV();
- ~safe_VkPipelineRepresentativeFragmentTestStateCreateInfoNV();
- void initialize(const VkPipelineRepresentativeFragmentTestStateCreateInfoNV* in_struct);
- void initialize(const safe_VkPipelineRepresentativeFragmentTestStateCreateInfoNV* src);
- VkPipelineRepresentativeFragmentTestStateCreateInfoNV *ptr() { return reinterpret_cast<VkPipelineRepresentativeFragmentTestStateCreateInfoNV *>(this); }
- VkPipelineRepresentativeFragmentTestStateCreateInfoNV const *ptr() const { return reinterpret_cast<VkPipelineRepresentativeFragmentTestStateCreateInfoNV const *>(this); }
-};
-
-struct safe_VkPhysicalDeviceImageViewImageFormatInfoEXT {
- VkStructureType sType;
- void* pNext;
- VkImageViewType imageViewType;
- safe_VkPhysicalDeviceImageViewImageFormatInfoEXT(const VkPhysicalDeviceImageViewImageFormatInfoEXT* in_struct);
- safe_VkPhysicalDeviceImageViewImageFormatInfoEXT(const safe_VkPhysicalDeviceImageViewImageFormatInfoEXT& src);
- safe_VkPhysicalDeviceImageViewImageFormatInfoEXT& operator=(const safe_VkPhysicalDeviceImageViewImageFormatInfoEXT& src);
- safe_VkPhysicalDeviceImageViewImageFormatInfoEXT();
- ~safe_VkPhysicalDeviceImageViewImageFormatInfoEXT();
- void initialize(const VkPhysicalDeviceImageViewImageFormatInfoEXT* in_struct);
- void initialize(const safe_VkPhysicalDeviceImageViewImageFormatInfoEXT* src);
- VkPhysicalDeviceImageViewImageFormatInfoEXT *ptr() { return reinterpret_cast<VkPhysicalDeviceImageViewImageFormatInfoEXT *>(this); }
- VkPhysicalDeviceImageViewImageFormatInfoEXT const *ptr() const { return reinterpret_cast<VkPhysicalDeviceImageViewImageFormatInfoEXT const *>(this); }
-};
-
-struct safe_VkFilterCubicImageViewImageFormatPropertiesEXT {
- VkStructureType sType;
- void* pNext;
- VkBool32 filterCubic;
- VkBool32 filterCubicMinmax ;
- safe_VkFilterCubicImageViewImageFormatPropertiesEXT(const VkFilterCubicImageViewImageFormatPropertiesEXT* in_struct);
- safe_VkFilterCubicImageViewImageFormatPropertiesEXT(const safe_VkFilterCubicImageViewImageFormatPropertiesEXT& src);
- safe_VkFilterCubicImageViewImageFormatPropertiesEXT& operator=(const safe_VkFilterCubicImageViewImageFormatPropertiesEXT& src);
- safe_VkFilterCubicImageViewImageFormatPropertiesEXT();
- ~safe_VkFilterCubicImageViewImageFormatPropertiesEXT();
- void initialize(const VkFilterCubicImageViewImageFormatPropertiesEXT* in_struct);
- void initialize(const safe_VkFilterCubicImageViewImageFormatPropertiesEXT* src);
- VkFilterCubicImageViewImageFormatPropertiesEXT *ptr() { return reinterpret_cast<VkFilterCubicImageViewImageFormatPropertiesEXT *>(this); }
- VkFilterCubicImageViewImageFormatPropertiesEXT const *ptr() const { return reinterpret_cast<VkFilterCubicImageViewImageFormatPropertiesEXT const *>(this); }
-};
-
-struct safe_VkDeviceQueueGlobalPriorityCreateInfoEXT {
- VkStructureType sType;
- const void* pNext;
- VkQueueGlobalPriorityEXT globalPriority;
- safe_VkDeviceQueueGlobalPriorityCreateInfoEXT(const VkDeviceQueueGlobalPriorityCreateInfoEXT* in_struct);
- safe_VkDeviceQueueGlobalPriorityCreateInfoEXT(const safe_VkDeviceQueueGlobalPriorityCreateInfoEXT& src);
- safe_VkDeviceQueueGlobalPriorityCreateInfoEXT& operator=(const safe_VkDeviceQueueGlobalPriorityCreateInfoEXT& src);
- safe_VkDeviceQueueGlobalPriorityCreateInfoEXT();
- ~safe_VkDeviceQueueGlobalPriorityCreateInfoEXT();
- void initialize(const VkDeviceQueueGlobalPriorityCreateInfoEXT* in_struct);
- void initialize(const safe_VkDeviceQueueGlobalPriorityCreateInfoEXT* src);
- VkDeviceQueueGlobalPriorityCreateInfoEXT *ptr() { return reinterpret_cast<VkDeviceQueueGlobalPriorityCreateInfoEXT *>(this); }
- VkDeviceQueueGlobalPriorityCreateInfoEXT const *ptr() const { return reinterpret_cast<VkDeviceQueueGlobalPriorityCreateInfoEXT const *>(this); }
-};
-
-struct safe_VkImportMemoryHostPointerInfoEXT {
- VkStructureType sType;
- const void* pNext;
- VkExternalMemoryHandleTypeFlagBits handleType;
- void* pHostPointer;
- safe_VkImportMemoryHostPointerInfoEXT(const VkImportMemoryHostPointerInfoEXT* in_struct);
- safe_VkImportMemoryHostPointerInfoEXT(const safe_VkImportMemoryHostPointerInfoEXT& src);
- safe_VkImportMemoryHostPointerInfoEXT& operator=(const safe_VkImportMemoryHostPointerInfoEXT& src);
- safe_VkImportMemoryHostPointerInfoEXT();
- ~safe_VkImportMemoryHostPointerInfoEXT();
- void initialize(const VkImportMemoryHostPointerInfoEXT* in_struct);
- void initialize(const safe_VkImportMemoryHostPointerInfoEXT* src);
- VkImportMemoryHostPointerInfoEXT *ptr() { return reinterpret_cast<VkImportMemoryHostPointerInfoEXT *>(this); }
- VkImportMemoryHostPointerInfoEXT const *ptr() const { return reinterpret_cast<VkImportMemoryHostPointerInfoEXT const *>(this); }
-};
-
-struct safe_VkMemoryHostPointerPropertiesEXT {
- VkStructureType sType;
- void* pNext;
- uint32_t memoryTypeBits;
- safe_VkMemoryHostPointerPropertiesEXT(const VkMemoryHostPointerPropertiesEXT* in_struct);
- safe_VkMemoryHostPointerPropertiesEXT(const safe_VkMemoryHostPointerPropertiesEXT& src);
- safe_VkMemoryHostPointerPropertiesEXT& operator=(const safe_VkMemoryHostPointerPropertiesEXT& src);
- safe_VkMemoryHostPointerPropertiesEXT();
- ~safe_VkMemoryHostPointerPropertiesEXT();
- void initialize(const VkMemoryHostPointerPropertiesEXT* in_struct);
- void initialize(const safe_VkMemoryHostPointerPropertiesEXT* src);
- VkMemoryHostPointerPropertiesEXT *ptr() { return reinterpret_cast<VkMemoryHostPointerPropertiesEXT *>(this); }
- VkMemoryHostPointerPropertiesEXT const *ptr() const { return reinterpret_cast<VkMemoryHostPointerPropertiesEXT const *>(this); }
-};
-
-struct safe_VkPhysicalDeviceExternalMemoryHostPropertiesEXT {
- VkStructureType sType;
- void* pNext;
- VkDeviceSize minImportedHostPointerAlignment;
- safe_VkPhysicalDeviceExternalMemoryHostPropertiesEXT(const VkPhysicalDeviceExternalMemoryHostPropertiesEXT* in_struct);
- safe_VkPhysicalDeviceExternalMemoryHostPropertiesEXT(const safe_VkPhysicalDeviceExternalMemoryHostPropertiesEXT& src);
- safe_VkPhysicalDeviceExternalMemoryHostPropertiesEXT& operator=(const safe_VkPhysicalDeviceExternalMemoryHostPropertiesEXT& src);
- safe_VkPhysicalDeviceExternalMemoryHostPropertiesEXT();
- ~safe_VkPhysicalDeviceExternalMemoryHostPropertiesEXT();
- void initialize(const VkPhysicalDeviceExternalMemoryHostPropertiesEXT* in_struct);
- void initialize(const safe_VkPhysicalDeviceExternalMemoryHostPropertiesEXT* src);
- VkPhysicalDeviceExternalMemoryHostPropertiesEXT *ptr() { return reinterpret_cast<VkPhysicalDeviceExternalMemoryHostPropertiesEXT *>(this); }
- VkPhysicalDeviceExternalMemoryHostPropertiesEXT const *ptr() const { return reinterpret_cast<VkPhysicalDeviceExternalMemoryHostPropertiesEXT const *>(this); }
-};
-
-struct safe_VkPipelineCompilerControlCreateInfoAMD {
- VkStructureType sType;
- const void* pNext;
- VkPipelineCompilerControlFlagsAMD compilerControlFlags;
- safe_VkPipelineCompilerControlCreateInfoAMD(const VkPipelineCompilerControlCreateInfoAMD* in_struct);
- safe_VkPipelineCompilerControlCreateInfoAMD(const safe_VkPipelineCompilerControlCreateInfoAMD& src);
- safe_VkPipelineCompilerControlCreateInfoAMD& operator=(const safe_VkPipelineCompilerControlCreateInfoAMD& src);
- safe_VkPipelineCompilerControlCreateInfoAMD();
- ~safe_VkPipelineCompilerControlCreateInfoAMD();
- void initialize(const VkPipelineCompilerControlCreateInfoAMD* in_struct);
- void initialize(const safe_VkPipelineCompilerControlCreateInfoAMD* src);
- VkPipelineCompilerControlCreateInfoAMD *ptr() { return reinterpret_cast<VkPipelineCompilerControlCreateInfoAMD *>(this); }
- VkPipelineCompilerControlCreateInfoAMD const *ptr() const { return reinterpret_cast<VkPipelineCompilerControlCreateInfoAMD const *>(this); }
-};
-
-struct safe_VkCalibratedTimestampInfoEXT {
- VkStructureType sType;
- const void* pNext;
- VkTimeDomainEXT timeDomain;
- safe_VkCalibratedTimestampInfoEXT(const VkCalibratedTimestampInfoEXT* in_struct);
- safe_VkCalibratedTimestampInfoEXT(const safe_VkCalibratedTimestampInfoEXT& src);
- safe_VkCalibratedTimestampInfoEXT& operator=(const safe_VkCalibratedTimestampInfoEXT& src);
- safe_VkCalibratedTimestampInfoEXT();
- ~safe_VkCalibratedTimestampInfoEXT();
- void initialize(const VkCalibratedTimestampInfoEXT* in_struct);
- void initialize(const safe_VkCalibratedTimestampInfoEXT* src);
- VkCalibratedTimestampInfoEXT *ptr() { return reinterpret_cast<VkCalibratedTimestampInfoEXT *>(this); }
- VkCalibratedTimestampInfoEXT const *ptr() const { return reinterpret_cast<VkCalibratedTimestampInfoEXT const *>(this); }
-};
-
-struct safe_VkPhysicalDeviceShaderCorePropertiesAMD {
- VkStructureType sType;
- void* pNext;
- uint32_t shaderEngineCount;
- uint32_t shaderArraysPerEngineCount;
- uint32_t computeUnitsPerShaderArray;
- uint32_t simdPerComputeUnit;
- uint32_t wavefrontsPerSimd;
- uint32_t wavefrontSize;
- uint32_t sgprsPerSimd;
- uint32_t minSgprAllocation;
- uint32_t maxSgprAllocation;
- uint32_t sgprAllocationGranularity;
- uint32_t vgprsPerSimd;
- uint32_t minVgprAllocation;
- uint32_t maxVgprAllocation;
- uint32_t vgprAllocationGranularity;
- safe_VkPhysicalDeviceShaderCorePropertiesAMD(const VkPhysicalDeviceShaderCorePropertiesAMD* in_struct);
- safe_VkPhysicalDeviceShaderCorePropertiesAMD(const safe_VkPhysicalDeviceShaderCorePropertiesAMD& src);
- safe_VkPhysicalDeviceShaderCorePropertiesAMD& operator=(const safe_VkPhysicalDeviceShaderCorePropertiesAMD& src);
- safe_VkPhysicalDeviceShaderCorePropertiesAMD();
- ~safe_VkPhysicalDeviceShaderCorePropertiesAMD();
- void initialize(const VkPhysicalDeviceShaderCorePropertiesAMD* in_struct);
- void initialize(const safe_VkPhysicalDeviceShaderCorePropertiesAMD* src);
- VkPhysicalDeviceShaderCorePropertiesAMD *ptr() { return reinterpret_cast<VkPhysicalDeviceShaderCorePropertiesAMD *>(this); }
- VkPhysicalDeviceShaderCorePropertiesAMD const *ptr() const { return reinterpret_cast<VkPhysicalDeviceShaderCorePropertiesAMD const *>(this); }
-};
-
-struct safe_VkDeviceMemoryOverallocationCreateInfoAMD {
- VkStructureType sType;
- const void* pNext;
- VkMemoryOverallocationBehaviorAMD overallocationBehavior;
- safe_VkDeviceMemoryOverallocationCreateInfoAMD(const VkDeviceMemoryOverallocationCreateInfoAMD* in_struct);
- safe_VkDeviceMemoryOverallocationCreateInfoAMD(const safe_VkDeviceMemoryOverallocationCreateInfoAMD& src);
- safe_VkDeviceMemoryOverallocationCreateInfoAMD& operator=(const safe_VkDeviceMemoryOverallocationCreateInfoAMD& src);
- safe_VkDeviceMemoryOverallocationCreateInfoAMD();
- ~safe_VkDeviceMemoryOverallocationCreateInfoAMD();
- void initialize(const VkDeviceMemoryOverallocationCreateInfoAMD* in_struct);
- void initialize(const safe_VkDeviceMemoryOverallocationCreateInfoAMD* src);
- VkDeviceMemoryOverallocationCreateInfoAMD *ptr() { return reinterpret_cast<VkDeviceMemoryOverallocationCreateInfoAMD *>(this); }
- VkDeviceMemoryOverallocationCreateInfoAMD const *ptr() const { return reinterpret_cast<VkDeviceMemoryOverallocationCreateInfoAMD const *>(this); }
-};
-
-struct safe_VkPhysicalDeviceVertexAttributeDivisorPropertiesEXT {
- VkStructureType sType;
- void* pNext;
- uint32_t maxVertexAttribDivisor;
- safe_VkPhysicalDeviceVertexAttributeDivisorPropertiesEXT(const VkPhysicalDeviceVertexAttributeDivisorPropertiesEXT* in_struct);
- safe_VkPhysicalDeviceVertexAttributeDivisorPropertiesEXT(const safe_VkPhysicalDeviceVertexAttributeDivisorPropertiesEXT& src);
- safe_VkPhysicalDeviceVertexAttributeDivisorPropertiesEXT& operator=(const safe_VkPhysicalDeviceVertexAttributeDivisorPropertiesEXT& src);
- safe_VkPhysicalDeviceVertexAttributeDivisorPropertiesEXT();
- ~safe_VkPhysicalDeviceVertexAttributeDivisorPropertiesEXT();
- void initialize(const VkPhysicalDeviceVertexAttributeDivisorPropertiesEXT* in_struct);
- void initialize(const safe_VkPhysicalDeviceVertexAttributeDivisorPropertiesEXT* src);
- VkPhysicalDeviceVertexAttributeDivisorPropertiesEXT *ptr() { return reinterpret_cast<VkPhysicalDeviceVertexAttributeDivisorPropertiesEXT *>(this); }
- VkPhysicalDeviceVertexAttributeDivisorPropertiesEXT const *ptr() const { return reinterpret_cast<VkPhysicalDeviceVertexAttributeDivisorPropertiesEXT const *>(this); }
-};
-
-struct safe_VkPipelineVertexInputDivisorStateCreateInfoEXT {
- VkStructureType sType;
- const void* pNext;
- uint32_t vertexBindingDivisorCount;
- const VkVertexInputBindingDivisorDescriptionEXT* pVertexBindingDivisors;
- safe_VkPipelineVertexInputDivisorStateCreateInfoEXT(const VkPipelineVertexInputDivisorStateCreateInfoEXT* in_struct);
- safe_VkPipelineVertexInputDivisorStateCreateInfoEXT(const safe_VkPipelineVertexInputDivisorStateCreateInfoEXT& src);
- safe_VkPipelineVertexInputDivisorStateCreateInfoEXT& operator=(const safe_VkPipelineVertexInputDivisorStateCreateInfoEXT& src);
- safe_VkPipelineVertexInputDivisorStateCreateInfoEXT();
- ~safe_VkPipelineVertexInputDivisorStateCreateInfoEXT();
- void initialize(const VkPipelineVertexInputDivisorStateCreateInfoEXT* in_struct);
- void initialize(const safe_VkPipelineVertexInputDivisorStateCreateInfoEXT* src);
- VkPipelineVertexInputDivisorStateCreateInfoEXT *ptr() { return reinterpret_cast<VkPipelineVertexInputDivisorStateCreateInfoEXT *>(this); }
- VkPipelineVertexInputDivisorStateCreateInfoEXT const *ptr() const { return reinterpret_cast<VkPipelineVertexInputDivisorStateCreateInfoEXT const *>(this); }
-};
-
-struct safe_VkPhysicalDeviceVertexAttributeDivisorFeaturesEXT {
- VkStructureType sType;
- void* pNext;
- VkBool32 vertexAttributeInstanceRateDivisor;
- VkBool32 vertexAttributeInstanceRateZeroDivisor;
- safe_VkPhysicalDeviceVertexAttributeDivisorFeaturesEXT(const VkPhysicalDeviceVertexAttributeDivisorFeaturesEXT* in_struct);
- safe_VkPhysicalDeviceVertexAttributeDivisorFeaturesEXT(const safe_VkPhysicalDeviceVertexAttributeDivisorFeaturesEXT& src);
- safe_VkPhysicalDeviceVertexAttributeDivisorFeaturesEXT& operator=(const safe_VkPhysicalDeviceVertexAttributeDivisorFeaturesEXT& src);
- safe_VkPhysicalDeviceVertexAttributeDivisorFeaturesEXT();
- ~safe_VkPhysicalDeviceVertexAttributeDivisorFeaturesEXT();
- void initialize(const VkPhysicalDeviceVertexAttributeDivisorFeaturesEXT* in_struct);
- void initialize(const safe_VkPhysicalDeviceVertexAttributeDivisorFeaturesEXT* src);
- VkPhysicalDeviceVertexAttributeDivisorFeaturesEXT *ptr() { return reinterpret_cast<VkPhysicalDeviceVertexAttributeDivisorFeaturesEXT *>(this); }
- VkPhysicalDeviceVertexAttributeDivisorFeaturesEXT const *ptr() const { return reinterpret_cast<VkPhysicalDeviceVertexAttributeDivisorFeaturesEXT const *>(this); }
-};
-
-#ifdef VK_USE_PLATFORM_GGP
-struct safe_VkPresentFrameTokenGGP {
- VkStructureType sType;
- const void* pNext;
- GgpFrameToken frameToken;
- safe_VkPresentFrameTokenGGP(const VkPresentFrameTokenGGP* in_struct);
- safe_VkPresentFrameTokenGGP(const safe_VkPresentFrameTokenGGP& src);
- safe_VkPresentFrameTokenGGP& operator=(const safe_VkPresentFrameTokenGGP& src);
- safe_VkPresentFrameTokenGGP();
- ~safe_VkPresentFrameTokenGGP();
- void initialize(const VkPresentFrameTokenGGP* in_struct);
- void initialize(const safe_VkPresentFrameTokenGGP* src);
- VkPresentFrameTokenGGP *ptr() { return reinterpret_cast<VkPresentFrameTokenGGP *>(this); }
- VkPresentFrameTokenGGP const *ptr() const { return reinterpret_cast<VkPresentFrameTokenGGP const *>(this); }
-};
-#endif // VK_USE_PLATFORM_GGP
-
-struct safe_VkPipelineCreationFeedbackCreateInfoEXT {
- VkStructureType sType;
- const void* pNext;
- VkPipelineCreationFeedbackEXT* pPipelineCreationFeedback;
- uint32_t pipelineStageCreationFeedbackCount;
- VkPipelineCreationFeedbackEXT* pPipelineStageCreationFeedbacks;
- safe_VkPipelineCreationFeedbackCreateInfoEXT(const VkPipelineCreationFeedbackCreateInfoEXT* in_struct);
- safe_VkPipelineCreationFeedbackCreateInfoEXT(const safe_VkPipelineCreationFeedbackCreateInfoEXT& src);
- safe_VkPipelineCreationFeedbackCreateInfoEXT& operator=(const safe_VkPipelineCreationFeedbackCreateInfoEXT& src);
- safe_VkPipelineCreationFeedbackCreateInfoEXT();
- ~safe_VkPipelineCreationFeedbackCreateInfoEXT();
- void initialize(const VkPipelineCreationFeedbackCreateInfoEXT* in_struct);
- void initialize(const safe_VkPipelineCreationFeedbackCreateInfoEXT* src);
- VkPipelineCreationFeedbackCreateInfoEXT *ptr() { return reinterpret_cast<VkPipelineCreationFeedbackCreateInfoEXT *>(this); }
- VkPipelineCreationFeedbackCreateInfoEXT const *ptr() const { return reinterpret_cast<VkPipelineCreationFeedbackCreateInfoEXT const *>(this); }
-};
-
-struct safe_VkPhysicalDeviceComputeShaderDerivativesFeaturesNV {
- VkStructureType sType;
- void* pNext;
- VkBool32 computeDerivativeGroupQuads;
- VkBool32 computeDerivativeGroupLinear;
- safe_VkPhysicalDeviceComputeShaderDerivativesFeaturesNV(const VkPhysicalDeviceComputeShaderDerivativesFeaturesNV* in_struct);
- safe_VkPhysicalDeviceComputeShaderDerivativesFeaturesNV(const safe_VkPhysicalDeviceComputeShaderDerivativesFeaturesNV& src);
- safe_VkPhysicalDeviceComputeShaderDerivativesFeaturesNV& operator=(const safe_VkPhysicalDeviceComputeShaderDerivativesFeaturesNV& src);
- safe_VkPhysicalDeviceComputeShaderDerivativesFeaturesNV();
- ~safe_VkPhysicalDeviceComputeShaderDerivativesFeaturesNV();
- void initialize(const VkPhysicalDeviceComputeShaderDerivativesFeaturesNV* in_struct);
- void initialize(const safe_VkPhysicalDeviceComputeShaderDerivativesFeaturesNV* src);
- VkPhysicalDeviceComputeShaderDerivativesFeaturesNV *ptr() { return reinterpret_cast<VkPhysicalDeviceComputeShaderDerivativesFeaturesNV *>(this); }
- VkPhysicalDeviceComputeShaderDerivativesFeaturesNV const *ptr() const { return reinterpret_cast<VkPhysicalDeviceComputeShaderDerivativesFeaturesNV const *>(this); }
-};
-
-struct safe_VkPhysicalDeviceMeshShaderFeaturesNV {
- VkStructureType sType;
- void* pNext;
- VkBool32 taskShader;
- VkBool32 meshShader;
- safe_VkPhysicalDeviceMeshShaderFeaturesNV(const VkPhysicalDeviceMeshShaderFeaturesNV* in_struct);
- safe_VkPhysicalDeviceMeshShaderFeaturesNV(const safe_VkPhysicalDeviceMeshShaderFeaturesNV& src);
- safe_VkPhysicalDeviceMeshShaderFeaturesNV& operator=(const safe_VkPhysicalDeviceMeshShaderFeaturesNV& src);
- safe_VkPhysicalDeviceMeshShaderFeaturesNV();
- ~safe_VkPhysicalDeviceMeshShaderFeaturesNV();
- void initialize(const VkPhysicalDeviceMeshShaderFeaturesNV* in_struct);
- void initialize(const safe_VkPhysicalDeviceMeshShaderFeaturesNV* src);
- VkPhysicalDeviceMeshShaderFeaturesNV *ptr() { return reinterpret_cast<VkPhysicalDeviceMeshShaderFeaturesNV *>(this); }
- VkPhysicalDeviceMeshShaderFeaturesNV const *ptr() const { return reinterpret_cast<VkPhysicalDeviceMeshShaderFeaturesNV const *>(this); }
-};
-
-struct safe_VkPhysicalDeviceMeshShaderPropertiesNV {
- VkStructureType sType;
- void* pNext;
- uint32_t maxDrawMeshTasksCount;
- uint32_t maxTaskWorkGroupInvocations;
- uint32_t maxTaskWorkGroupSize[3];
- uint32_t maxTaskTotalMemorySize;
- uint32_t maxTaskOutputCount;
- uint32_t maxMeshWorkGroupInvocations;
- uint32_t maxMeshWorkGroupSize[3];
- uint32_t maxMeshTotalMemorySize;
- uint32_t maxMeshOutputVertices;
- uint32_t maxMeshOutputPrimitives;
- uint32_t maxMeshMultiviewViewCount;
- uint32_t meshOutputPerVertexGranularity;
- uint32_t meshOutputPerPrimitiveGranularity;
- safe_VkPhysicalDeviceMeshShaderPropertiesNV(const VkPhysicalDeviceMeshShaderPropertiesNV* in_struct);
- safe_VkPhysicalDeviceMeshShaderPropertiesNV(const safe_VkPhysicalDeviceMeshShaderPropertiesNV& src);
- safe_VkPhysicalDeviceMeshShaderPropertiesNV& operator=(const safe_VkPhysicalDeviceMeshShaderPropertiesNV& src);
- safe_VkPhysicalDeviceMeshShaderPropertiesNV();
- ~safe_VkPhysicalDeviceMeshShaderPropertiesNV();
- void initialize(const VkPhysicalDeviceMeshShaderPropertiesNV* in_struct);
- void initialize(const safe_VkPhysicalDeviceMeshShaderPropertiesNV* src);
- VkPhysicalDeviceMeshShaderPropertiesNV *ptr() { return reinterpret_cast<VkPhysicalDeviceMeshShaderPropertiesNV *>(this); }
- VkPhysicalDeviceMeshShaderPropertiesNV const *ptr() const { return reinterpret_cast<VkPhysicalDeviceMeshShaderPropertiesNV const *>(this); }
-};
-
-struct safe_VkPhysicalDeviceFragmentShaderBarycentricFeaturesNV {
- VkStructureType sType;
- void* pNext;
- VkBool32 fragmentShaderBarycentric;
- safe_VkPhysicalDeviceFragmentShaderBarycentricFeaturesNV(const VkPhysicalDeviceFragmentShaderBarycentricFeaturesNV* in_struct);
- safe_VkPhysicalDeviceFragmentShaderBarycentricFeaturesNV(const safe_VkPhysicalDeviceFragmentShaderBarycentricFeaturesNV& src);
- safe_VkPhysicalDeviceFragmentShaderBarycentricFeaturesNV& operator=(const safe_VkPhysicalDeviceFragmentShaderBarycentricFeaturesNV& src);
- safe_VkPhysicalDeviceFragmentShaderBarycentricFeaturesNV();
- ~safe_VkPhysicalDeviceFragmentShaderBarycentricFeaturesNV();
- void initialize(const VkPhysicalDeviceFragmentShaderBarycentricFeaturesNV* in_struct);
- void initialize(const safe_VkPhysicalDeviceFragmentShaderBarycentricFeaturesNV* src);
- VkPhysicalDeviceFragmentShaderBarycentricFeaturesNV *ptr() { return reinterpret_cast<VkPhysicalDeviceFragmentShaderBarycentricFeaturesNV *>(this); }
- VkPhysicalDeviceFragmentShaderBarycentricFeaturesNV const *ptr() const { return reinterpret_cast<VkPhysicalDeviceFragmentShaderBarycentricFeaturesNV const *>(this); }
-};
-
-struct safe_VkPhysicalDeviceShaderImageFootprintFeaturesNV {
- VkStructureType sType;
- void* pNext;
- VkBool32 imageFootprint;
- safe_VkPhysicalDeviceShaderImageFootprintFeaturesNV(const VkPhysicalDeviceShaderImageFootprintFeaturesNV* in_struct);
- safe_VkPhysicalDeviceShaderImageFootprintFeaturesNV(const safe_VkPhysicalDeviceShaderImageFootprintFeaturesNV& src);
- safe_VkPhysicalDeviceShaderImageFootprintFeaturesNV& operator=(const safe_VkPhysicalDeviceShaderImageFootprintFeaturesNV& src);
- safe_VkPhysicalDeviceShaderImageFootprintFeaturesNV();
- ~safe_VkPhysicalDeviceShaderImageFootprintFeaturesNV();
- void initialize(const VkPhysicalDeviceShaderImageFootprintFeaturesNV* in_struct);
- void initialize(const safe_VkPhysicalDeviceShaderImageFootprintFeaturesNV* src);
- VkPhysicalDeviceShaderImageFootprintFeaturesNV *ptr() { return reinterpret_cast<VkPhysicalDeviceShaderImageFootprintFeaturesNV *>(this); }
- VkPhysicalDeviceShaderImageFootprintFeaturesNV const *ptr() const { return reinterpret_cast<VkPhysicalDeviceShaderImageFootprintFeaturesNV const *>(this); }
-};
-
-struct safe_VkPipelineViewportExclusiveScissorStateCreateInfoNV {
- VkStructureType sType;
- const void* pNext;
- uint32_t exclusiveScissorCount;
- const VkRect2D* pExclusiveScissors;
- safe_VkPipelineViewportExclusiveScissorStateCreateInfoNV(const VkPipelineViewportExclusiveScissorStateCreateInfoNV* in_struct);
- safe_VkPipelineViewportExclusiveScissorStateCreateInfoNV(const safe_VkPipelineViewportExclusiveScissorStateCreateInfoNV& src);
- safe_VkPipelineViewportExclusiveScissorStateCreateInfoNV& operator=(const safe_VkPipelineViewportExclusiveScissorStateCreateInfoNV& src);
- safe_VkPipelineViewportExclusiveScissorStateCreateInfoNV();
- ~safe_VkPipelineViewportExclusiveScissorStateCreateInfoNV();
- void initialize(const VkPipelineViewportExclusiveScissorStateCreateInfoNV* in_struct);
- void initialize(const safe_VkPipelineViewportExclusiveScissorStateCreateInfoNV* src);
- VkPipelineViewportExclusiveScissorStateCreateInfoNV *ptr() { return reinterpret_cast<VkPipelineViewportExclusiveScissorStateCreateInfoNV *>(this); }
- VkPipelineViewportExclusiveScissorStateCreateInfoNV const *ptr() const { return reinterpret_cast<VkPipelineViewportExclusiveScissorStateCreateInfoNV const *>(this); }
-};
-
-struct safe_VkPhysicalDeviceExclusiveScissorFeaturesNV {
- VkStructureType sType;
- void* pNext;
- VkBool32 exclusiveScissor;
- safe_VkPhysicalDeviceExclusiveScissorFeaturesNV(const VkPhysicalDeviceExclusiveScissorFeaturesNV* in_struct);
- safe_VkPhysicalDeviceExclusiveScissorFeaturesNV(const safe_VkPhysicalDeviceExclusiveScissorFeaturesNV& src);
- safe_VkPhysicalDeviceExclusiveScissorFeaturesNV& operator=(const safe_VkPhysicalDeviceExclusiveScissorFeaturesNV& src);
- safe_VkPhysicalDeviceExclusiveScissorFeaturesNV();
- ~safe_VkPhysicalDeviceExclusiveScissorFeaturesNV();
- void initialize(const VkPhysicalDeviceExclusiveScissorFeaturesNV* in_struct);
- void initialize(const safe_VkPhysicalDeviceExclusiveScissorFeaturesNV* src);
- VkPhysicalDeviceExclusiveScissorFeaturesNV *ptr() { return reinterpret_cast<VkPhysicalDeviceExclusiveScissorFeaturesNV *>(this); }
- VkPhysicalDeviceExclusiveScissorFeaturesNV const *ptr() const { return reinterpret_cast<VkPhysicalDeviceExclusiveScissorFeaturesNV const *>(this); }
-};
-
-struct safe_VkQueueFamilyCheckpointPropertiesNV {
- VkStructureType sType;
- void* pNext;
- VkPipelineStageFlags checkpointExecutionStageMask;
- safe_VkQueueFamilyCheckpointPropertiesNV(const VkQueueFamilyCheckpointPropertiesNV* in_struct);
- safe_VkQueueFamilyCheckpointPropertiesNV(const safe_VkQueueFamilyCheckpointPropertiesNV& src);
- safe_VkQueueFamilyCheckpointPropertiesNV& operator=(const safe_VkQueueFamilyCheckpointPropertiesNV& src);
- safe_VkQueueFamilyCheckpointPropertiesNV();
- ~safe_VkQueueFamilyCheckpointPropertiesNV();
- void initialize(const VkQueueFamilyCheckpointPropertiesNV* in_struct);
- void initialize(const safe_VkQueueFamilyCheckpointPropertiesNV* src);
- VkQueueFamilyCheckpointPropertiesNV *ptr() { return reinterpret_cast<VkQueueFamilyCheckpointPropertiesNV *>(this); }
- VkQueueFamilyCheckpointPropertiesNV const *ptr() const { return reinterpret_cast<VkQueueFamilyCheckpointPropertiesNV const *>(this); }
-};
-
-struct safe_VkCheckpointDataNV {
- VkStructureType sType;
- void* pNext;
- VkPipelineStageFlagBits stage;
- void* pCheckpointMarker;
- safe_VkCheckpointDataNV(const VkCheckpointDataNV* in_struct);
- safe_VkCheckpointDataNV(const safe_VkCheckpointDataNV& src);
- safe_VkCheckpointDataNV& operator=(const safe_VkCheckpointDataNV& src);
- safe_VkCheckpointDataNV();
- ~safe_VkCheckpointDataNV();
- void initialize(const VkCheckpointDataNV* in_struct);
- void initialize(const safe_VkCheckpointDataNV* src);
- VkCheckpointDataNV *ptr() { return reinterpret_cast<VkCheckpointDataNV *>(this); }
- VkCheckpointDataNV const *ptr() const { return reinterpret_cast<VkCheckpointDataNV const *>(this); }
-};
-
-struct safe_VkPhysicalDeviceShaderIntegerFunctions2FeaturesINTEL {
- VkStructureType sType;
- void* pNext;
- VkBool32 shaderIntegerFunctions2;
- safe_VkPhysicalDeviceShaderIntegerFunctions2FeaturesINTEL(const VkPhysicalDeviceShaderIntegerFunctions2FeaturesINTEL* in_struct);
- safe_VkPhysicalDeviceShaderIntegerFunctions2FeaturesINTEL(const safe_VkPhysicalDeviceShaderIntegerFunctions2FeaturesINTEL& src);
- safe_VkPhysicalDeviceShaderIntegerFunctions2FeaturesINTEL& operator=(const safe_VkPhysicalDeviceShaderIntegerFunctions2FeaturesINTEL& src);
- safe_VkPhysicalDeviceShaderIntegerFunctions2FeaturesINTEL();
- ~safe_VkPhysicalDeviceShaderIntegerFunctions2FeaturesINTEL();
- void initialize(const VkPhysicalDeviceShaderIntegerFunctions2FeaturesINTEL* in_struct);
- void initialize(const safe_VkPhysicalDeviceShaderIntegerFunctions2FeaturesINTEL* src);
- VkPhysicalDeviceShaderIntegerFunctions2FeaturesINTEL *ptr() { return reinterpret_cast<VkPhysicalDeviceShaderIntegerFunctions2FeaturesINTEL *>(this); }
- VkPhysicalDeviceShaderIntegerFunctions2FeaturesINTEL const *ptr() const { return reinterpret_cast<VkPhysicalDeviceShaderIntegerFunctions2FeaturesINTEL const *>(this); }
-};
-
-struct safe_VkPerformanceValueDataINTEL {
- uint32_t value32;
- uint64_t value64;
- float valueFloat;
- VkBool32 valueBool;
- const char* valueString;
- safe_VkPerformanceValueDataINTEL(const VkPerformanceValueDataINTEL* in_struct);
- safe_VkPerformanceValueDataINTEL(const safe_VkPerformanceValueDataINTEL& src);
- safe_VkPerformanceValueDataINTEL& operator=(const safe_VkPerformanceValueDataINTEL& src);
- safe_VkPerformanceValueDataINTEL();
- ~safe_VkPerformanceValueDataINTEL();
- void initialize(const VkPerformanceValueDataINTEL* in_struct);
- void initialize(const safe_VkPerformanceValueDataINTEL* src);
- VkPerformanceValueDataINTEL *ptr() { return reinterpret_cast<VkPerformanceValueDataINTEL *>(this); }
- VkPerformanceValueDataINTEL const *ptr() const { return reinterpret_cast<VkPerformanceValueDataINTEL const *>(this); }
-};
-
-struct safe_VkInitializePerformanceApiInfoINTEL {
- VkStructureType sType;
- const void* pNext;
- void* pUserData;
- safe_VkInitializePerformanceApiInfoINTEL(const VkInitializePerformanceApiInfoINTEL* in_struct);
- safe_VkInitializePerformanceApiInfoINTEL(const safe_VkInitializePerformanceApiInfoINTEL& src);
- safe_VkInitializePerformanceApiInfoINTEL& operator=(const safe_VkInitializePerformanceApiInfoINTEL& src);
- safe_VkInitializePerformanceApiInfoINTEL();
- ~safe_VkInitializePerformanceApiInfoINTEL();
- void initialize(const VkInitializePerformanceApiInfoINTEL* in_struct);
- void initialize(const safe_VkInitializePerformanceApiInfoINTEL* src);
- VkInitializePerformanceApiInfoINTEL *ptr() { return reinterpret_cast<VkInitializePerformanceApiInfoINTEL *>(this); }
- VkInitializePerformanceApiInfoINTEL const *ptr() const { return reinterpret_cast<VkInitializePerformanceApiInfoINTEL const *>(this); }
-};
-
-struct safe_VkQueryPoolCreateInfoINTEL {
- VkStructureType sType;
- const void* pNext;
- VkQueryPoolSamplingModeINTEL performanceCountersSampling;
- safe_VkQueryPoolCreateInfoINTEL(const VkQueryPoolCreateInfoINTEL* in_struct);
- safe_VkQueryPoolCreateInfoINTEL(const safe_VkQueryPoolCreateInfoINTEL& src);
- safe_VkQueryPoolCreateInfoINTEL& operator=(const safe_VkQueryPoolCreateInfoINTEL& src);
- safe_VkQueryPoolCreateInfoINTEL();
- ~safe_VkQueryPoolCreateInfoINTEL();
- void initialize(const VkQueryPoolCreateInfoINTEL* in_struct);
- void initialize(const safe_VkQueryPoolCreateInfoINTEL* src);
- VkQueryPoolCreateInfoINTEL *ptr() { return reinterpret_cast<VkQueryPoolCreateInfoINTEL *>(this); }
- VkQueryPoolCreateInfoINTEL const *ptr() const { return reinterpret_cast<VkQueryPoolCreateInfoINTEL const *>(this); }
-};
-
-struct safe_VkPerformanceMarkerInfoINTEL {
- VkStructureType sType;
- const void* pNext;
- uint64_t marker;
- safe_VkPerformanceMarkerInfoINTEL(const VkPerformanceMarkerInfoINTEL* in_struct);
- safe_VkPerformanceMarkerInfoINTEL(const safe_VkPerformanceMarkerInfoINTEL& src);
- safe_VkPerformanceMarkerInfoINTEL& operator=(const safe_VkPerformanceMarkerInfoINTEL& src);
- safe_VkPerformanceMarkerInfoINTEL();
- ~safe_VkPerformanceMarkerInfoINTEL();
- void initialize(const VkPerformanceMarkerInfoINTEL* in_struct);
- void initialize(const safe_VkPerformanceMarkerInfoINTEL* src);
- VkPerformanceMarkerInfoINTEL *ptr() { return reinterpret_cast<VkPerformanceMarkerInfoINTEL *>(this); }
- VkPerformanceMarkerInfoINTEL const *ptr() const { return reinterpret_cast<VkPerformanceMarkerInfoINTEL const *>(this); }
-};
-
-struct safe_VkPerformanceStreamMarkerInfoINTEL {
- VkStructureType sType;
- const void* pNext;
- uint32_t marker;
- safe_VkPerformanceStreamMarkerInfoINTEL(const VkPerformanceStreamMarkerInfoINTEL* in_struct);
- safe_VkPerformanceStreamMarkerInfoINTEL(const safe_VkPerformanceStreamMarkerInfoINTEL& src);
- safe_VkPerformanceStreamMarkerInfoINTEL& operator=(const safe_VkPerformanceStreamMarkerInfoINTEL& src);
- safe_VkPerformanceStreamMarkerInfoINTEL();
- ~safe_VkPerformanceStreamMarkerInfoINTEL();
- void initialize(const VkPerformanceStreamMarkerInfoINTEL* in_struct);
- void initialize(const safe_VkPerformanceStreamMarkerInfoINTEL* src);
- VkPerformanceStreamMarkerInfoINTEL *ptr() { return reinterpret_cast<VkPerformanceStreamMarkerInfoINTEL *>(this); }
- VkPerformanceStreamMarkerInfoINTEL const *ptr() const { return reinterpret_cast<VkPerformanceStreamMarkerInfoINTEL const *>(this); }
-};
-
-struct safe_VkPerformanceOverrideInfoINTEL {
- VkStructureType sType;
- const void* pNext;
- VkPerformanceOverrideTypeINTEL type;
- VkBool32 enable;
- uint64_t parameter;
- safe_VkPerformanceOverrideInfoINTEL(const VkPerformanceOverrideInfoINTEL* in_struct);
- safe_VkPerformanceOverrideInfoINTEL(const safe_VkPerformanceOverrideInfoINTEL& src);
- safe_VkPerformanceOverrideInfoINTEL& operator=(const safe_VkPerformanceOverrideInfoINTEL& src);
- safe_VkPerformanceOverrideInfoINTEL();
- ~safe_VkPerformanceOverrideInfoINTEL();
- void initialize(const VkPerformanceOverrideInfoINTEL* in_struct);
- void initialize(const safe_VkPerformanceOverrideInfoINTEL* src);
- VkPerformanceOverrideInfoINTEL *ptr() { return reinterpret_cast<VkPerformanceOverrideInfoINTEL *>(this); }
- VkPerformanceOverrideInfoINTEL const *ptr() const { return reinterpret_cast<VkPerformanceOverrideInfoINTEL const *>(this); }
-};
-
-struct safe_VkPerformanceConfigurationAcquireInfoINTEL {
- VkStructureType sType;
- const void* pNext;
- VkPerformanceConfigurationTypeINTEL type;
- safe_VkPerformanceConfigurationAcquireInfoINTEL(const VkPerformanceConfigurationAcquireInfoINTEL* in_struct);
- safe_VkPerformanceConfigurationAcquireInfoINTEL(const safe_VkPerformanceConfigurationAcquireInfoINTEL& src);
- safe_VkPerformanceConfigurationAcquireInfoINTEL& operator=(const safe_VkPerformanceConfigurationAcquireInfoINTEL& src);
- safe_VkPerformanceConfigurationAcquireInfoINTEL();
- ~safe_VkPerformanceConfigurationAcquireInfoINTEL();
- void initialize(const VkPerformanceConfigurationAcquireInfoINTEL* in_struct);
- void initialize(const safe_VkPerformanceConfigurationAcquireInfoINTEL* src);
- VkPerformanceConfigurationAcquireInfoINTEL *ptr() { return reinterpret_cast<VkPerformanceConfigurationAcquireInfoINTEL *>(this); }
- VkPerformanceConfigurationAcquireInfoINTEL const *ptr() const { return reinterpret_cast<VkPerformanceConfigurationAcquireInfoINTEL const *>(this); }
-};
-
-struct safe_VkPhysicalDevicePCIBusInfoPropertiesEXT {
- VkStructureType sType;
- void* pNext;
- uint32_t pciDomain;
- uint32_t pciBus;
- uint32_t pciDevice;
- uint32_t pciFunction;
- safe_VkPhysicalDevicePCIBusInfoPropertiesEXT(const VkPhysicalDevicePCIBusInfoPropertiesEXT* in_struct);
- safe_VkPhysicalDevicePCIBusInfoPropertiesEXT(const safe_VkPhysicalDevicePCIBusInfoPropertiesEXT& src);
- safe_VkPhysicalDevicePCIBusInfoPropertiesEXT& operator=(const safe_VkPhysicalDevicePCIBusInfoPropertiesEXT& src);
- safe_VkPhysicalDevicePCIBusInfoPropertiesEXT();
- ~safe_VkPhysicalDevicePCIBusInfoPropertiesEXT();
- void initialize(const VkPhysicalDevicePCIBusInfoPropertiesEXT* in_struct);
- void initialize(const safe_VkPhysicalDevicePCIBusInfoPropertiesEXT* src);
- VkPhysicalDevicePCIBusInfoPropertiesEXT *ptr() { return reinterpret_cast<VkPhysicalDevicePCIBusInfoPropertiesEXT *>(this); }
- VkPhysicalDevicePCIBusInfoPropertiesEXT const *ptr() const { return reinterpret_cast<VkPhysicalDevicePCIBusInfoPropertiesEXT const *>(this); }
-};
-
-struct safe_VkDisplayNativeHdrSurfaceCapabilitiesAMD {
- VkStructureType sType;
- void* pNext;
- VkBool32 localDimmingSupport;
- safe_VkDisplayNativeHdrSurfaceCapabilitiesAMD(const VkDisplayNativeHdrSurfaceCapabilitiesAMD* in_struct);
- safe_VkDisplayNativeHdrSurfaceCapabilitiesAMD(const safe_VkDisplayNativeHdrSurfaceCapabilitiesAMD& src);
- safe_VkDisplayNativeHdrSurfaceCapabilitiesAMD& operator=(const safe_VkDisplayNativeHdrSurfaceCapabilitiesAMD& src);
- safe_VkDisplayNativeHdrSurfaceCapabilitiesAMD();
- ~safe_VkDisplayNativeHdrSurfaceCapabilitiesAMD();
- void initialize(const VkDisplayNativeHdrSurfaceCapabilitiesAMD* in_struct);
- void initialize(const safe_VkDisplayNativeHdrSurfaceCapabilitiesAMD* src);
- VkDisplayNativeHdrSurfaceCapabilitiesAMD *ptr() { return reinterpret_cast<VkDisplayNativeHdrSurfaceCapabilitiesAMD *>(this); }
- VkDisplayNativeHdrSurfaceCapabilitiesAMD const *ptr() const { return reinterpret_cast<VkDisplayNativeHdrSurfaceCapabilitiesAMD const *>(this); }
-};
-
-struct safe_VkSwapchainDisplayNativeHdrCreateInfoAMD {
- VkStructureType sType;
- const void* pNext;
- VkBool32 localDimmingEnable;
- safe_VkSwapchainDisplayNativeHdrCreateInfoAMD(const VkSwapchainDisplayNativeHdrCreateInfoAMD* in_struct);
- safe_VkSwapchainDisplayNativeHdrCreateInfoAMD(const safe_VkSwapchainDisplayNativeHdrCreateInfoAMD& src);
- safe_VkSwapchainDisplayNativeHdrCreateInfoAMD& operator=(const safe_VkSwapchainDisplayNativeHdrCreateInfoAMD& src);
- safe_VkSwapchainDisplayNativeHdrCreateInfoAMD();
- ~safe_VkSwapchainDisplayNativeHdrCreateInfoAMD();
- void initialize(const VkSwapchainDisplayNativeHdrCreateInfoAMD* in_struct);
- void initialize(const safe_VkSwapchainDisplayNativeHdrCreateInfoAMD* src);
- VkSwapchainDisplayNativeHdrCreateInfoAMD *ptr() { return reinterpret_cast<VkSwapchainDisplayNativeHdrCreateInfoAMD *>(this); }
- VkSwapchainDisplayNativeHdrCreateInfoAMD const *ptr() const { return reinterpret_cast<VkSwapchainDisplayNativeHdrCreateInfoAMD const *>(this); }
-};
-
-#ifdef VK_USE_PLATFORM_FUCHSIA
-struct safe_VkImagePipeSurfaceCreateInfoFUCHSIA {
- VkStructureType sType;
- const void* pNext;
- VkImagePipeSurfaceCreateFlagsFUCHSIA flags;
- zx_handle_t imagePipeHandle;
- safe_VkImagePipeSurfaceCreateInfoFUCHSIA(const VkImagePipeSurfaceCreateInfoFUCHSIA* in_struct);
- safe_VkImagePipeSurfaceCreateInfoFUCHSIA(const safe_VkImagePipeSurfaceCreateInfoFUCHSIA& src);
- safe_VkImagePipeSurfaceCreateInfoFUCHSIA& operator=(const safe_VkImagePipeSurfaceCreateInfoFUCHSIA& src);
- safe_VkImagePipeSurfaceCreateInfoFUCHSIA();
- ~safe_VkImagePipeSurfaceCreateInfoFUCHSIA();
- void initialize(const VkImagePipeSurfaceCreateInfoFUCHSIA* in_struct);
- void initialize(const safe_VkImagePipeSurfaceCreateInfoFUCHSIA* src);
- VkImagePipeSurfaceCreateInfoFUCHSIA *ptr() { return reinterpret_cast<VkImagePipeSurfaceCreateInfoFUCHSIA *>(this); }
- VkImagePipeSurfaceCreateInfoFUCHSIA const *ptr() const { return reinterpret_cast<VkImagePipeSurfaceCreateInfoFUCHSIA const *>(this); }
-};
-#endif // VK_USE_PLATFORM_FUCHSIA
-
-#ifdef VK_USE_PLATFORM_METAL_EXT
-struct safe_VkMetalSurfaceCreateInfoEXT {
- VkStructureType sType;
- const void* pNext;
- VkMetalSurfaceCreateFlagsEXT flags;
- const CAMetalLayer* pLayer;
- safe_VkMetalSurfaceCreateInfoEXT(const VkMetalSurfaceCreateInfoEXT* in_struct);
- safe_VkMetalSurfaceCreateInfoEXT(const safe_VkMetalSurfaceCreateInfoEXT& src);
- safe_VkMetalSurfaceCreateInfoEXT& operator=(const safe_VkMetalSurfaceCreateInfoEXT& src);
- safe_VkMetalSurfaceCreateInfoEXT();
- ~safe_VkMetalSurfaceCreateInfoEXT();
- void initialize(const VkMetalSurfaceCreateInfoEXT* in_struct);
- void initialize(const safe_VkMetalSurfaceCreateInfoEXT* src);
- VkMetalSurfaceCreateInfoEXT *ptr() { return reinterpret_cast<VkMetalSurfaceCreateInfoEXT *>(this); }
- VkMetalSurfaceCreateInfoEXT const *ptr() const { return reinterpret_cast<VkMetalSurfaceCreateInfoEXT const *>(this); }
-};
-#endif // VK_USE_PLATFORM_METAL_EXT
-
-struct safe_VkPhysicalDeviceFragmentDensityMapFeaturesEXT {
- VkStructureType sType;
- void* pNext;
- VkBool32 fragmentDensityMap;
- VkBool32 fragmentDensityMapDynamic;
- VkBool32 fragmentDensityMapNonSubsampledImages;
- safe_VkPhysicalDeviceFragmentDensityMapFeaturesEXT(const VkPhysicalDeviceFragmentDensityMapFeaturesEXT* in_struct);
- safe_VkPhysicalDeviceFragmentDensityMapFeaturesEXT(const safe_VkPhysicalDeviceFragmentDensityMapFeaturesEXT& src);
- safe_VkPhysicalDeviceFragmentDensityMapFeaturesEXT& operator=(const safe_VkPhysicalDeviceFragmentDensityMapFeaturesEXT& src);
- safe_VkPhysicalDeviceFragmentDensityMapFeaturesEXT();
- ~safe_VkPhysicalDeviceFragmentDensityMapFeaturesEXT();
- void initialize(const VkPhysicalDeviceFragmentDensityMapFeaturesEXT* in_struct);
- void initialize(const safe_VkPhysicalDeviceFragmentDensityMapFeaturesEXT* src);
- VkPhysicalDeviceFragmentDensityMapFeaturesEXT *ptr() { return reinterpret_cast<VkPhysicalDeviceFragmentDensityMapFeaturesEXT *>(this); }
- VkPhysicalDeviceFragmentDensityMapFeaturesEXT const *ptr() const { return reinterpret_cast<VkPhysicalDeviceFragmentDensityMapFeaturesEXT const *>(this); }
-};
-
-struct safe_VkPhysicalDeviceFragmentDensityMapPropertiesEXT {
- VkStructureType sType;
- void* pNext;
- VkExtent2D minFragmentDensityTexelSize;
- VkExtent2D maxFragmentDensityTexelSize;
- VkBool32 fragmentDensityInvocations;
- safe_VkPhysicalDeviceFragmentDensityMapPropertiesEXT(const VkPhysicalDeviceFragmentDensityMapPropertiesEXT* in_struct);
- safe_VkPhysicalDeviceFragmentDensityMapPropertiesEXT(const safe_VkPhysicalDeviceFragmentDensityMapPropertiesEXT& src);
- safe_VkPhysicalDeviceFragmentDensityMapPropertiesEXT& operator=(const safe_VkPhysicalDeviceFragmentDensityMapPropertiesEXT& src);
- safe_VkPhysicalDeviceFragmentDensityMapPropertiesEXT();
- ~safe_VkPhysicalDeviceFragmentDensityMapPropertiesEXT();
- void initialize(const VkPhysicalDeviceFragmentDensityMapPropertiesEXT* in_struct);
- void initialize(const safe_VkPhysicalDeviceFragmentDensityMapPropertiesEXT* src);
- VkPhysicalDeviceFragmentDensityMapPropertiesEXT *ptr() { return reinterpret_cast<VkPhysicalDeviceFragmentDensityMapPropertiesEXT *>(this); }
- VkPhysicalDeviceFragmentDensityMapPropertiesEXT const *ptr() const { return reinterpret_cast<VkPhysicalDeviceFragmentDensityMapPropertiesEXT const *>(this); }
-};
-
-struct safe_VkRenderPassFragmentDensityMapCreateInfoEXT {
- VkStructureType sType;
- const void* pNext;
- VkAttachmentReference fragmentDensityMapAttachment;
- safe_VkRenderPassFragmentDensityMapCreateInfoEXT(const VkRenderPassFragmentDensityMapCreateInfoEXT* in_struct);
- safe_VkRenderPassFragmentDensityMapCreateInfoEXT(const safe_VkRenderPassFragmentDensityMapCreateInfoEXT& src);
- safe_VkRenderPassFragmentDensityMapCreateInfoEXT& operator=(const safe_VkRenderPassFragmentDensityMapCreateInfoEXT& src);
- safe_VkRenderPassFragmentDensityMapCreateInfoEXT();
- ~safe_VkRenderPassFragmentDensityMapCreateInfoEXT();
- void initialize(const VkRenderPassFragmentDensityMapCreateInfoEXT* in_struct);
- void initialize(const safe_VkRenderPassFragmentDensityMapCreateInfoEXT* src);
- VkRenderPassFragmentDensityMapCreateInfoEXT *ptr() { return reinterpret_cast<VkRenderPassFragmentDensityMapCreateInfoEXT *>(this); }
- VkRenderPassFragmentDensityMapCreateInfoEXT const *ptr() const { return reinterpret_cast<VkRenderPassFragmentDensityMapCreateInfoEXT const *>(this); }
-};
-
-struct safe_VkPhysicalDeviceScalarBlockLayoutFeaturesEXT {
- VkStructureType sType;
- void* pNext;
- VkBool32 scalarBlockLayout;
- safe_VkPhysicalDeviceScalarBlockLayoutFeaturesEXT(const VkPhysicalDeviceScalarBlockLayoutFeaturesEXT* in_struct);
- safe_VkPhysicalDeviceScalarBlockLayoutFeaturesEXT(const safe_VkPhysicalDeviceScalarBlockLayoutFeaturesEXT& src);
- safe_VkPhysicalDeviceScalarBlockLayoutFeaturesEXT& operator=(const safe_VkPhysicalDeviceScalarBlockLayoutFeaturesEXT& src);
- safe_VkPhysicalDeviceScalarBlockLayoutFeaturesEXT();
- ~safe_VkPhysicalDeviceScalarBlockLayoutFeaturesEXT();
- void initialize(const VkPhysicalDeviceScalarBlockLayoutFeaturesEXT* in_struct);
- void initialize(const safe_VkPhysicalDeviceScalarBlockLayoutFeaturesEXT* src);
- VkPhysicalDeviceScalarBlockLayoutFeaturesEXT *ptr() { return reinterpret_cast<VkPhysicalDeviceScalarBlockLayoutFeaturesEXT *>(this); }
- VkPhysicalDeviceScalarBlockLayoutFeaturesEXT const *ptr() const { return reinterpret_cast<VkPhysicalDeviceScalarBlockLayoutFeaturesEXT const *>(this); }
-};
-
-struct safe_VkPhysicalDeviceSubgroupSizeControlFeaturesEXT {
- VkStructureType sType;
- void* pNext;
- VkBool32 subgroupSizeControl;
- VkBool32 computeFullSubgroups;
- safe_VkPhysicalDeviceSubgroupSizeControlFeaturesEXT(const VkPhysicalDeviceSubgroupSizeControlFeaturesEXT* in_struct);
- safe_VkPhysicalDeviceSubgroupSizeControlFeaturesEXT(const safe_VkPhysicalDeviceSubgroupSizeControlFeaturesEXT& src);
- safe_VkPhysicalDeviceSubgroupSizeControlFeaturesEXT& operator=(const safe_VkPhysicalDeviceSubgroupSizeControlFeaturesEXT& src);
- safe_VkPhysicalDeviceSubgroupSizeControlFeaturesEXT();
- ~safe_VkPhysicalDeviceSubgroupSizeControlFeaturesEXT();
- void initialize(const VkPhysicalDeviceSubgroupSizeControlFeaturesEXT* in_struct);
- void initialize(const safe_VkPhysicalDeviceSubgroupSizeControlFeaturesEXT* src);
- VkPhysicalDeviceSubgroupSizeControlFeaturesEXT *ptr() { return reinterpret_cast<VkPhysicalDeviceSubgroupSizeControlFeaturesEXT *>(this); }
- VkPhysicalDeviceSubgroupSizeControlFeaturesEXT const *ptr() const { return reinterpret_cast<VkPhysicalDeviceSubgroupSizeControlFeaturesEXT const *>(this); }
-};
-
-struct safe_VkPhysicalDeviceSubgroupSizeControlPropertiesEXT {
- VkStructureType sType;
- void* pNext;
- uint32_t minSubgroupSize;
- uint32_t maxSubgroupSize;
- uint32_t maxComputeWorkgroupSubgroups;
- VkShaderStageFlags requiredSubgroupSizeStages;
- safe_VkPhysicalDeviceSubgroupSizeControlPropertiesEXT(const VkPhysicalDeviceSubgroupSizeControlPropertiesEXT* in_struct);
- safe_VkPhysicalDeviceSubgroupSizeControlPropertiesEXT(const safe_VkPhysicalDeviceSubgroupSizeControlPropertiesEXT& src);
- safe_VkPhysicalDeviceSubgroupSizeControlPropertiesEXT& operator=(const safe_VkPhysicalDeviceSubgroupSizeControlPropertiesEXT& src);
- safe_VkPhysicalDeviceSubgroupSizeControlPropertiesEXT();
- ~safe_VkPhysicalDeviceSubgroupSizeControlPropertiesEXT();
- void initialize(const VkPhysicalDeviceSubgroupSizeControlPropertiesEXT* in_struct);
- void initialize(const safe_VkPhysicalDeviceSubgroupSizeControlPropertiesEXT* src);
- VkPhysicalDeviceSubgroupSizeControlPropertiesEXT *ptr() { return reinterpret_cast<VkPhysicalDeviceSubgroupSizeControlPropertiesEXT *>(this); }
- VkPhysicalDeviceSubgroupSizeControlPropertiesEXT const *ptr() const { return reinterpret_cast<VkPhysicalDeviceSubgroupSizeControlPropertiesEXT const *>(this); }
-};
-
-struct safe_VkPipelineShaderStageRequiredSubgroupSizeCreateInfoEXT {
- VkStructureType sType;
- void* pNext;
- uint32_t requiredSubgroupSize;
- safe_VkPipelineShaderStageRequiredSubgroupSizeCreateInfoEXT(const VkPipelineShaderStageRequiredSubgroupSizeCreateInfoEXT* in_struct);
- safe_VkPipelineShaderStageRequiredSubgroupSizeCreateInfoEXT(const safe_VkPipelineShaderStageRequiredSubgroupSizeCreateInfoEXT& src);
- safe_VkPipelineShaderStageRequiredSubgroupSizeCreateInfoEXT& operator=(const safe_VkPipelineShaderStageRequiredSubgroupSizeCreateInfoEXT& src);
- safe_VkPipelineShaderStageRequiredSubgroupSizeCreateInfoEXT();
- ~safe_VkPipelineShaderStageRequiredSubgroupSizeCreateInfoEXT();
- void initialize(const VkPipelineShaderStageRequiredSubgroupSizeCreateInfoEXT* in_struct);
- void initialize(const safe_VkPipelineShaderStageRequiredSubgroupSizeCreateInfoEXT* src);
- VkPipelineShaderStageRequiredSubgroupSizeCreateInfoEXT *ptr() { return reinterpret_cast<VkPipelineShaderStageRequiredSubgroupSizeCreateInfoEXT *>(this); }
- VkPipelineShaderStageRequiredSubgroupSizeCreateInfoEXT const *ptr() const { return reinterpret_cast<VkPipelineShaderStageRequiredSubgroupSizeCreateInfoEXT const *>(this); }
-};
-
-struct safe_VkPhysicalDeviceShaderCoreProperties2AMD {
- VkStructureType sType;
- void* pNext;
- VkShaderCorePropertiesFlagsAMD shaderCoreFeatures;
- uint32_t activeComputeUnitCount;
- safe_VkPhysicalDeviceShaderCoreProperties2AMD(const VkPhysicalDeviceShaderCoreProperties2AMD* in_struct);
- safe_VkPhysicalDeviceShaderCoreProperties2AMD(const safe_VkPhysicalDeviceShaderCoreProperties2AMD& src);
- safe_VkPhysicalDeviceShaderCoreProperties2AMD& operator=(const safe_VkPhysicalDeviceShaderCoreProperties2AMD& src);
- safe_VkPhysicalDeviceShaderCoreProperties2AMD();
- ~safe_VkPhysicalDeviceShaderCoreProperties2AMD();
- void initialize(const VkPhysicalDeviceShaderCoreProperties2AMD* in_struct);
- void initialize(const safe_VkPhysicalDeviceShaderCoreProperties2AMD* src);
- VkPhysicalDeviceShaderCoreProperties2AMD *ptr() { return reinterpret_cast<VkPhysicalDeviceShaderCoreProperties2AMD *>(this); }
- VkPhysicalDeviceShaderCoreProperties2AMD const *ptr() const { return reinterpret_cast<VkPhysicalDeviceShaderCoreProperties2AMD const *>(this); }
-};
-
-struct safe_VkPhysicalDeviceCoherentMemoryFeaturesAMD {
- VkStructureType sType;
- void* pNext;
- VkBool32 deviceCoherentMemory;
- safe_VkPhysicalDeviceCoherentMemoryFeaturesAMD(const VkPhysicalDeviceCoherentMemoryFeaturesAMD* in_struct);
- safe_VkPhysicalDeviceCoherentMemoryFeaturesAMD(const safe_VkPhysicalDeviceCoherentMemoryFeaturesAMD& src);
- safe_VkPhysicalDeviceCoherentMemoryFeaturesAMD& operator=(const safe_VkPhysicalDeviceCoherentMemoryFeaturesAMD& src);
- safe_VkPhysicalDeviceCoherentMemoryFeaturesAMD();
- ~safe_VkPhysicalDeviceCoherentMemoryFeaturesAMD();
- void initialize(const VkPhysicalDeviceCoherentMemoryFeaturesAMD* in_struct);
- void initialize(const safe_VkPhysicalDeviceCoherentMemoryFeaturesAMD* src);
- VkPhysicalDeviceCoherentMemoryFeaturesAMD *ptr() { return reinterpret_cast<VkPhysicalDeviceCoherentMemoryFeaturesAMD *>(this); }
- VkPhysicalDeviceCoherentMemoryFeaturesAMD const *ptr() const { return reinterpret_cast<VkPhysicalDeviceCoherentMemoryFeaturesAMD const *>(this); }
-};
-
-struct safe_VkPhysicalDeviceMemoryBudgetPropertiesEXT {
- VkStructureType sType;
- void* pNext;
- VkDeviceSize heapBudget[VK_MAX_MEMORY_HEAPS];
- VkDeviceSize heapUsage[VK_MAX_MEMORY_HEAPS];
- safe_VkPhysicalDeviceMemoryBudgetPropertiesEXT(const VkPhysicalDeviceMemoryBudgetPropertiesEXT* in_struct);
- safe_VkPhysicalDeviceMemoryBudgetPropertiesEXT(const safe_VkPhysicalDeviceMemoryBudgetPropertiesEXT& src);
- safe_VkPhysicalDeviceMemoryBudgetPropertiesEXT& operator=(const safe_VkPhysicalDeviceMemoryBudgetPropertiesEXT& src);
- safe_VkPhysicalDeviceMemoryBudgetPropertiesEXT();
- ~safe_VkPhysicalDeviceMemoryBudgetPropertiesEXT();
- void initialize(const VkPhysicalDeviceMemoryBudgetPropertiesEXT* in_struct);
- void initialize(const safe_VkPhysicalDeviceMemoryBudgetPropertiesEXT* src);
- VkPhysicalDeviceMemoryBudgetPropertiesEXT *ptr() { return reinterpret_cast<VkPhysicalDeviceMemoryBudgetPropertiesEXT *>(this); }
- VkPhysicalDeviceMemoryBudgetPropertiesEXT const *ptr() const { return reinterpret_cast<VkPhysicalDeviceMemoryBudgetPropertiesEXT const *>(this); }
-};
-
-struct safe_VkPhysicalDeviceMemoryPriorityFeaturesEXT {
- VkStructureType sType;
- void* pNext;
- VkBool32 memoryPriority;
- safe_VkPhysicalDeviceMemoryPriorityFeaturesEXT(const VkPhysicalDeviceMemoryPriorityFeaturesEXT* in_struct);
- safe_VkPhysicalDeviceMemoryPriorityFeaturesEXT(const safe_VkPhysicalDeviceMemoryPriorityFeaturesEXT& src);
- safe_VkPhysicalDeviceMemoryPriorityFeaturesEXT& operator=(const safe_VkPhysicalDeviceMemoryPriorityFeaturesEXT& src);
- safe_VkPhysicalDeviceMemoryPriorityFeaturesEXT();
- ~safe_VkPhysicalDeviceMemoryPriorityFeaturesEXT();
- void initialize(const VkPhysicalDeviceMemoryPriorityFeaturesEXT* in_struct);
- void initialize(const safe_VkPhysicalDeviceMemoryPriorityFeaturesEXT* src);
- VkPhysicalDeviceMemoryPriorityFeaturesEXT *ptr() { return reinterpret_cast<VkPhysicalDeviceMemoryPriorityFeaturesEXT *>(this); }
- VkPhysicalDeviceMemoryPriorityFeaturesEXT const *ptr() const { return reinterpret_cast<VkPhysicalDeviceMemoryPriorityFeaturesEXT const *>(this); }
-};
-
-struct safe_VkMemoryPriorityAllocateInfoEXT {
- VkStructureType sType;
- const void* pNext;
- float priority;
- safe_VkMemoryPriorityAllocateInfoEXT(const VkMemoryPriorityAllocateInfoEXT* in_struct);
- safe_VkMemoryPriorityAllocateInfoEXT(const safe_VkMemoryPriorityAllocateInfoEXT& src);
- safe_VkMemoryPriorityAllocateInfoEXT& operator=(const safe_VkMemoryPriorityAllocateInfoEXT& src);
- safe_VkMemoryPriorityAllocateInfoEXT();
- ~safe_VkMemoryPriorityAllocateInfoEXT();
- void initialize(const VkMemoryPriorityAllocateInfoEXT* in_struct);
- void initialize(const safe_VkMemoryPriorityAllocateInfoEXT* src);
- VkMemoryPriorityAllocateInfoEXT *ptr() { return reinterpret_cast<VkMemoryPriorityAllocateInfoEXT *>(this); }
- VkMemoryPriorityAllocateInfoEXT const *ptr() const { return reinterpret_cast<VkMemoryPriorityAllocateInfoEXT const *>(this); }
-};
-
-struct safe_VkPhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV {
- VkStructureType sType;
- void* pNext;
- VkBool32 dedicatedAllocationImageAliasing;
- safe_VkPhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV(const VkPhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV* in_struct);
- safe_VkPhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV(const safe_VkPhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV& src);
- safe_VkPhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV& operator=(const safe_VkPhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV& src);
- safe_VkPhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV();
- ~safe_VkPhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV();
- void initialize(const VkPhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV* in_struct);
- void initialize(const safe_VkPhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV* src);
- VkPhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV *ptr() { return reinterpret_cast<VkPhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV *>(this); }
- VkPhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV const *ptr() const { return reinterpret_cast<VkPhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV const *>(this); }
-};
-
-struct safe_VkPhysicalDeviceBufferDeviceAddressFeaturesEXT {
- VkStructureType sType;
- void* pNext;
- VkBool32 bufferDeviceAddress;
- VkBool32 bufferDeviceAddressCaptureReplay;
- VkBool32 bufferDeviceAddressMultiDevice;
- safe_VkPhysicalDeviceBufferDeviceAddressFeaturesEXT(const VkPhysicalDeviceBufferDeviceAddressFeaturesEXT* in_struct);
- safe_VkPhysicalDeviceBufferDeviceAddressFeaturesEXT(const safe_VkPhysicalDeviceBufferDeviceAddressFeaturesEXT& src);
- safe_VkPhysicalDeviceBufferDeviceAddressFeaturesEXT& operator=(const safe_VkPhysicalDeviceBufferDeviceAddressFeaturesEXT& src);
- safe_VkPhysicalDeviceBufferDeviceAddressFeaturesEXT();
- ~safe_VkPhysicalDeviceBufferDeviceAddressFeaturesEXT();
- void initialize(const VkPhysicalDeviceBufferDeviceAddressFeaturesEXT* in_struct);
- void initialize(const safe_VkPhysicalDeviceBufferDeviceAddressFeaturesEXT* src);
- VkPhysicalDeviceBufferDeviceAddressFeaturesEXT *ptr() { return reinterpret_cast<VkPhysicalDeviceBufferDeviceAddressFeaturesEXT *>(this); }
- VkPhysicalDeviceBufferDeviceAddressFeaturesEXT const *ptr() const { return reinterpret_cast<VkPhysicalDeviceBufferDeviceAddressFeaturesEXT const *>(this); }
-};
-
-struct safe_VkBufferDeviceAddressInfoEXT {
- VkStructureType sType;
- const void* pNext;
- VkBuffer buffer;
- safe_VkBufferDeviceAddressInfoEXT(const VkBufferDeviceAddressInfoEXT* in_struct);
- safe_VkBufferDeviceAddressInfoEXT(const safe_VkBufferDeviceAddressInfoEXT& src);
- safe_VkBufferDeviceAddressInfoEXT& operator=(const safe_VkBufferDeviceAddressInfoEXT& src);
- safe_VkBufferDeviceAddressInfoEXT();
- ~safe_VkBufferDeviceAddressInfoEXT();
- void initialize(const VkBufferDeviceAddressInfoEXT* in_struct);
- void initialize(const safe_VkBufferDeviceAddressInfoEXT* src);
- VkBufferDeviceAddressInfoEXT *ptr() { return reinterpret_cast<VkBufferDeviceAddressInfoEXT *>(this); }
- VkBufferDeviceAddressInfoEXT const *ptr() const { return reinterpret_cast<VkBufferDeviceAddressInfoEXT const *>(this); }
-};
-
-struct safe_VkBufferDeviceAddressCreateInfoEXT {
- VkStructureType sType;
- const void* pNext;
- VkDeviceAddress deviceAddress;
- safe_VkBufferDeviceAddressCreateInfoEXT(const VkBufferDeviceAddressCreateInfoEXT* in_struct);
- safe_VkBufferDeviceAddressCreateInfoEXT(const safe_VkBufferDeviceAddressCreateInfoEXT& src);
- safe_VkBufferDeviceAddressCreateInfoEXT& operator=(const safe_VkBufferDeviceAddressCreateInfoEXT& src);
- safe_VkBufferDeviceAddressCreateInfoEXT();
- ~safe_VkBufferDeviceAddressCreateInfoEXT();
- void initialize(const VkBufferDeviceAddressCreateInfoEXT* in_struct);
- void initialize(const safe_VkBufferDeviceAddressCreateInfoEXT* src);
- VkBufferDeviceAddressCreateInfoEXT *ptr() { return reinterpret_cast<VkBufferDeviceAddressCreateInfoEXT *>(this); }
- VkBufferDeviceAddressCreateInfoEXT const *ptr() const { return reinterpret_cast<VkBufferDeviceAddressCreateInfoEXT const *>(this); }
-};
-
-struct safe_VkImageStencilUsageCreateInfoEXT {
- VkStructureType sType;
- const void* pNext;
- VkImageUsageFlags stencilUsage;
- safe_VkImageStencilUsageCreateInfoEXT(const VkImageStencilUsageCreateInfoEXT* in_struct);
- safe_VkImageStencilUsageCreateInfoEXT(const safe_VkImageStencilUsageCreateInfoEXT& src);
- safe_VkImageStencilUsageCreateInfoEXT& operator=(const safe_VkImageStencilUsageCreateInfoEXT& src);
- safe_VkImageStencilUsageCreateInfoEXT();
- ~safe_VkImageStencilUsageCreateInfoEXT();
- void initialize(const VkImageStencilUsageCreateInfoEXT* in_struct);
- void initialize(const safe_VkImageStencilUsageCreateInfoEXT* src);
- VkImageStencilUsageCreateInfoEXT *ptr() { return reinterpret_cast<VkImageStencilUsageCreateInfoEXT *>(this); }
- VkImageStencilUsageCreateInfoEXT const *ptr() const { return reinterpret_cast<VkImageStencilUsageCreateInfoEXT const *>(this); }
-};
-
-struct safe_VkValidationFeaturesEXT {
- VkStructureType sType;
- const void* pNext;
- uint32_t enabledValidationFeatureCount;
- const VkValidationFeatureEnableEXT* pEnabledValidationFeatures;
- uint32_t disabledValidationFeatureCount;
- const VkValidationFeatureDisableEXT* pDisabledValidationFeatures;
- safe_VkValidationFeaturesEXT(const VkValidationFeaturesEXT* in_struct);
- safe_VkValidationFeaturesEXT(const safe_VkValidationFeaturesEXT& src);
- safe_VkValidationFeaturesEXT& operator=(const safe_VkValidationFeaturesEXT& src);
- safe_VkValidationFeaturesEXT();
- ~safe_VkValidationFeaturesEXT();
- void initialize(const VkValidationFeaturesEXT* in_struct);
- void initialize(const safe_VkValidationFeaturesEXT* src);
- VkValidationFeaturesEXT *ptr() { return reinterpret_cast<VkValidationFeaturesEXT *>(this); }
- VkValidationFeaturesEXT const *ptr() const { return reinterpret_cast<VkValidationFeaturesEXT const *>(this); }
-};
-
-struct safe_VkCooperativeMatrixPropertiesNV {
- VkStructureType sType;
- void* pNext;
- uint32_t MSize;
- uint32_t NSize;
- uint32_t KSize;
- VkComponentTypeNV AType;
- VkComponentTypeNV BType;
- VkComponentTypeNV CType;
- VkComponentTypeNV DType;
- VkScopeNV scope;
- safe_VkCooperativeMatrixPropertiesNV(const VkCooperativeMatrixPropertiesNV* in_struct);
- safe_VkCooperativeMatrixPropertiesNV(const safe_VkCooperativeMatrixPropertiesNV& src);
- safe_VkCooperativeMatrixPropertiesNV& operator=(const safe_VkCooperativeMatrixPropertiesNV& src);
- safe_VkCooperativeMatrixPropertiesNV();
- ~safe_VkCooperativeMatrixPropertiesNV();
- void initialize(const VkCooperativeMatrixPropertiesNV* in_struct);
- void initialize(const safe_VkCooperativeMatrixPropertiesNV* src);
- VkCooperativeMatrixPropertiesNV *ptr() { return reinterpret_cast<VkCooperativeMatrixPropertiesNV *>(this); }
- VkCooperativeMatrixPropertiesNV const *ptr() const { return reinterpret_cast<VkCooperativeMatrixPropertiesNV const *>(this); }
-};
-
-struct safe_VkPhysicalDeviceCooperativeMatrixFeaturesNV {
- VkStructureType sType;
- void* pNext;
- VkBool32 cooperativeMatrix;
- VkBool32 cooperativeMatrixRobustBufferAccess;
- safe_VkPhysicalDeviceCooperativeMatrixFeaturesNV(const VkPhysicalDeviceCooperativeMatrixFeaturesNV* in_struct);
- safe_VkPhysicalDeviceCooperativeMatrixFeaturesNV(const safe_VkPhysicalDeviceCooperativeMatrixFeaturesNV& src);
- safe_VkPhysicalDeviceCooperativeMatrixFeaturesNV& operator=(const safe_VkPhysicalDeviceCooperativeMatrixFeaturesNV& src);
- safe_VkPhysicalDeviceCooperativeMatrixFeaturesNV();
- ~safe_VkPhysicalDeviceCooperativeMatrixFeaturesNV();
- void initialize(const VkPhysicalDeviceCooperativeMatrixFeaturesNV* in_struct);
- void initialize(const safe_VkPhysicalDeviceCooperativeMatrixFeaturesNV* src);
- VkPhysicalDeviceCooperativeMatrixFeaturesNV *ptr() { return reinterpret_cast<VkPhysicalDeviceCooperativeMatrixFeaturesNV *>(this); }
- VkPhysicalDeviceCooperativeMatrixFeaturesNV const *ptr() const { return reinterpret_cast<VkPhysicalDeviceCooperativeMatrixFeaturesNV const *>(this); }
-};
-
-struct safe_VkPhysicalDeviceCooperativeMatrixPropertiesNV {
- VkStructureType sType;
- void* pNext;
- VkShaderStageFlags cooperativeMatrixSupportedStages;
- safe_VkPhysicalDeviceCooperativeMatrixPropertiesNV(const VkPhysicalDeviceCooperativeMatrixPropertiesNV* in_struct);
- safe_VkPhysicalDeviceCooperativeMatrixPropertiesNV(const safe_VkPhysicalDeviceCooperativeMatrixPropertiesNV& src);
- safe_VkPhysicalDeviceCooperativeMatrixPropertiesNV& operator=(const safe_VkPhysicalDeviceCooperativeMatrixPropertiesNV& src);
- safe_VkPhysicalDeviceCooperativeMatrixPropertiesNV();
- ~safe_VkPhysicalDeviceCooperativeMatrixPropertiesNV();
- void initialize(const VkPhysicalDeviceCooperativeMatrixPropertiesNV* in_struct);
- void initialize(const safe_VkPhysicalDeviceCooperativeMatrixPropertiesNV* src);
- VkPhysicalDeviceCooperativeMatrixPropertiesNV *ptr() { return reinterpret_cast<VkPhysicalDeviceCooperativeMatrixPropertiesNV *>(this); }
- VkPhysicalDeviceCooperativeMatrixPropertiesNV const *ptr() const { return reinterpret_cast<VkPhysicalDeviceCooperativeMatrixPropertiesNV const *>(this); }
-};
-
-struct safe_VkPhysicalDeviceCoverageReductionModeFeaturesNV {
- VkStructureType sType;
- void* pNext;
- VkBool32 coverageReductionMode;
- safe_VkPhysicalDeviceCoverageReductionModeFeaturesNV(const VkPhysicalDeviceCoverageReductionModeFeaturesNV* in_struct);
- safe_VkPhysicalDeviceCoverageReductionModeFeaturesNV(const safe_VkPhysicalDeviceCoverageReductionModeFeaturesNV& src);
- safe_VkPhysicalDeviceCoverageReductionModeFeaturesNV& operator=(const safe_VkPhysicalDeviceCoverageReductionModeFeaturesNV& src);
- safe_VkPhysicalDeviceCoverageReductionModeFeaturesNV();
- ~safe_VkPhysicalDeviceCoverageReductionModeFeaturesNV();
- void initialize(const VkPhysicalDeviceCoverageReductionModeFeaturesNV* in_struct);
- void initialize(const safe_VkPhysicalDeviceCoverageReductionModeFeaturesNV* src);
- VkPhysicalDeviceCoverageReductionModeFeaturesNV *ptr() { return reinterpret_cast<VkPhysicalDeviceCoverageReductionModeFeaturesNV *>(this); }
- VkPhysicalDeviceCoverageReductionModeFeaturesNV const *ptr() const { return reinterpret_cast<VkPhysicalDeviceCoverageReductionModeFeaturesNV const *>(this); }
-};
-
-struct safe_VkPipelineCoverageReductionStateCreateInfoNV {
- VkStructureType sType;
- const void* pNext;
- VkPipelineCoverageReductionStateCreateFlagsNV flags;
- VkCoverageReductionModeNV coverageReductionMode;
- safe_VkPipelineCoverageReductionStateCreateInfoNV(const VkPipelineCoverageReductionStateCreateInfoNV* in_struct);
- safe_VkPipelineCoverageReductionStateCreateInfoNV(const safe_VkPipelineCoverageReductionStateCreateInfoNV& src);
- safe_VkPipelineCoverageReductionStateCreateInfoNV& operator=(const safe_VkPipelineCoverageReductionStateCreateInfoNV& src);
- safe_VkPipelineCoverageReductionStateCreateInfoNV();
- ~safe_VkPipelineCoverageReductionStateCreateInfoNV();
- void initialize(const VkPipelineCoverageReductionStateCreateInfoNV* in_struct);
- void initialize(const safe_VkPipelineCoverageReductionStateCreateInfoNV* src);
- VkPipelineCoverageReductionStateCreateInfoNV *ptr() { return reinterpret_cast<VkPipelineCoverageReductionStateCreateInfoNV *>(this); }
- VkPipelineCoverageReductionStateCreateInfoNV const *ptr() const { return reinterpret_cast<VkPipelineCoverageReductionStateCreateInfoNV const *>(this); }
-};
-
-struct safe_VkFramebufferMixedSamplesCombinationNV {
- VkStructureType sType;
- void* pNext;
- VkCoverageReductionModeNV coverageReductionMode;
- VkSampleCountFlagBits rasterizationSamples;
- VkSampleCountFlags depthStencilSamples;
- VkSampleCountFlags colorSamples;
- safe_VkFramebufferMixedSamplesCombinationNV(const VkFramebufferMixedSamplesCombinationNV* in_struct);
- safe_VkFramebufferMixedSamplesCombinationNV(const safe_VkFramebufferMixedSamplesCombinationNV& src);
- safe_VkFramebufferMixedSamplesCombinationNV& operator=(const safe_VkFramebufferMixedSamplesCombinationNV& src);
- safe_VkFramebufferMixedSamplesCombinationNV();
- ~safe_VkFramebufferMixedSamplesCombinationNV();
- void initialize(const VkFramebufferMixedSamplesCombinationNV* in_struct);
- void initialize(const safe_VkFramebufferMixedSamplesCombinationNV* src);
- VkFramebufferMixedSamplesCombinationNV *ptr() { return reinterpret_cast<VkFramebufferMixedSamplesCombinationNV *>(this); }
- VkFramebufferMixedSamplesCombinationNV const *ptr() const { return reinterpret_cast<VkFramebufferMixedSamplesCombinationNV const *>(this); }
-};
-
-struct safe_VkPhysicalDeviceFragmentShaderInterlockFeaturesEXT {
- VkStructureType sType;
- void* pNext;
- VkBool32 fragmentShaderSampleInterlock;
- VkBool32 fragmentShaderPixelInterlock;
- VkBool32 fragmentShaderShadingRateInterlock;
- safe_VkPhysicalDeviceFragmentShaderInterlockFeaturesEXT(const VkPhysicalDeviceFragmentShaderInterlockFeaturesEXT* in_struct);
- safe_VkPhysicalDeviceFragmentShaderInterlockFeaturesEXT(const safe_VkPhysicalDeviceFragmentShaderInterlockFeaturesEXT& src);
- safe_VkPhysicalDeviceFragmentShaderInterlockFeaturesEXT& operator=(const safe_VkPhysicalDeviceFragmentShaderInterlockFeaturesEXT& src);
- safe_VkPhysicalDeviceFragmentShaderInterlockFeaturesEXT();
- ~safe_VkPhysicalDeviceFragmentShaderInterlockFeaturesEXT();
- void initialize(const VkPhysicalDeviceFragmentShaderInterlockFeaturesEXT* in_struct);
- void initialize(const safe_VkPhysicalDeviceFragmentShaderInterlockFeaturesEXT* src);
- VkPhysicalDeviceFragmentShaderInterlockFeaturesEXT *ptr() { return reinterpret_cast<VkPhysicalDeviceFragmentShaderInterlockFeaturesEXT *>(this); }
- VkPhysicalDeviceFragmentShaderInterlockFeaturesEXT const *ptr() const { return reinterpret_cast<VkPhysicalDeviceFragmentShaderInterlockFeaturesEXT const *>(this); }
-};
-
-struct safe_VkPhysicalDeviceYcbcrImageArraysFeaturesEXT {
- VkStructureType sType;
- void* pNext;
- VkBool32 ycbcrImageArrays;
- safe_VkPhysicalDeviceYcbcrImageArraysFeaturesEXT(const VkPhysicalDeviceYcbcrImageArraysFeaturesEXT* in_struct);
- safe_VkPhysicalDeviceYcbcrImageArraysFeaturesEXT(const safe_VkPhysicalDeviceYcbcrImageArraysFeaturesEXT& src);
- safe_VkPhysicalDeviceYcbcrImageArraysFeaturesEXT& operator=(const safe_VkPhysicalDeviceYcbcrImageArraysFeaturesEXT& src);
- safe_VkPhysicalDeviceYcbcrImageArraysFeaturesEXT();
- ~safe_VkPhysicalDeviceYcbcrImageArraysFeaturesEXT();
- void initialize(const VkPhysicalDeviceYcbcrImageArraysFeaturesEXT* in_struct);
- void initialize(const safe_VkPhysicalDeviceYcbcrImageArraysFeaturesEXT* src);
- VkPhysicalDeviceYcbcrImageArraysFeaturesEXT *ptr() { return reinterpret_cast<VkPhysicalDeviceYcbcrImageArraysFeaturesEXT *>(this); }
- VkPhysicalDeviceYcbcrImageArraysFeaturesEXT const *ptr() const { return reinterpret_cast<VkPhysicalDeviceYcbcrImageArraysFeaturesEXT const *>(this); }
-};
-
-#ifdef VK_USE_PLATFORM_WIN32_KHR
-struct safe_VkSurfaceFullScreenExclusiveInfoEXT {
- VkStructureType sType;
- void* pNext;
- VkFullScreenExclusiveEXT fullScreenExclusive;
- safe_VkSurfaceFullScreenExclusiveInfoEXT(const VkSurfaceFullScreenExclusiveInfoEXT* in_struct);
- safe_VkSurfaceFullScreenExclusiveInfoEXT(const safe_VkSurfaceFullScreenExclusiveInfoEXT& src);
- safe_VkSurfaceFullScreenExclusiveInfoEXT& operator=(const safe_VkSurfaceFullScreenExclusiveInfoEXT& src);
- safe_VkSurfaceFullScreenExclusiveInfoEXT();
- ~safe_VkSurfaceFullScreenExclusiveInfoEXT();
- void initialize(const VkSurfaceFullScreenExclusiveInfoEXT* in_struct);
- void initialize(const safe_VkSurfaceFullScreenExclusiveInfoEXT* src);
- VkSurfaceFullScreenExclusiveInfoEXT *ptr() { return reinterpret_cast<VkSurfaceFullScreenExclusiveInfoEXT *>(this); }
- VkSurfaceFullScreenExclusiveInfoEXT const *ptr() const { return reinterpret_cast<VkSurfaceFullScreenExclusiveInfoEXT const *>(this); }
-};
-#endif // VK_USE_PLATFORM_WIN32_KHR
-
-#ifdef VK_USE_PLATFORM_WIN32_KHR
-struct safe_VkSurfaceCapabilitiesFullScreenExclusiveEXT {
- VkStructureType sType;
- void* pNext;
- VkBool32 fullScreenExclusiveSupported;
- safe_VkSurfaceCapabilitiesFullScreenExclusiveEXT(const VkSurfaceCapabilitiesFullScreenExclusiveEXT* in_struct);
- safe_VkSurfaceCapabilitiesFullScreenExclusiveEXT(const safe_VkSurfaceCapabilitiesFullScreenExclusiveEXT& src);
- safe_VkSurfaceCapabilitiesFullScreenExclusiveEXT& operator=(const safe_VkSurfaceCapabilitiesFullScreenExclusiveEXT& src);
- safe_VkSurfaceCapabilitiesFullScreenExclusiveEXT();
- ~safe_VkSurfaceCapabilitiesFullScreenExclusiveEXT();
- void initialize(const VkSurfaceCapabilitiesFullScreenExclusiveEXT* in_struct);
- void initialize(const safe_VkSurfaceCapabilitiesFullScreenExclusiveEXT* src);
- VkSurfaceCapabilitiesFullScreenExclusiveEXT *ptr() { return reinterpret_cast<VkSurfaceCapabilitiesFullScreenExclusiveEXT *>(this); }
- VkSurfaceCapabilitiesFullScreenExclusiveEXT const *ptr() const { return reinterpret_cast<VkSurfaceCapabilitiesFullScreenExclusiveEXT const *>(this); }
-};
-#endif // VK_USE_PLATFORM_WIN32_KHR
-
-#ifdef VK_USE_PLATFORM_WIN32_KHR
-struct safe_VkSurfaceFullScreenExclusiveWin32InfoEXT {
- VkStructureType sType;
- const void* pNext;
- HMONITOR hmonitor;
- safe_VkSurfaceFullScreenExclusiveWin32InfoEXT(const VkSurfaceFullScreenExclusiveWin32InfoEXT* in_struct);
- safe_VkSurfaceFullScreenExclusiveWin32InfoEXT(const safe_VkSurfaceFullScreenExclusiveWin32InfoEXT& src);
- safe_VkSurfaceFullScreenExclusiveWin32InfoEXT& operator=(const safe_VkSurfaceFullScreenExclusiveWin32InfoEXT& src);
- safe_VkSurfaceFullScreenExclusiveWin32InfoEXT();
- ~safe_VkSurfaceFullScreenExclusiveWin32InfoEXT();
- void initialize(const VkSurfaceFullScreenExclusiveWin32InfoEXT* in_struct);
- void initialize(const safe_VkSurfaceFullScreenExclusiveWin32InfoEXT* src);
- VkSurfaceFullScreenExclusiveWin32InfoEXT *ptr() { return reinterpret_cast<VkSurfaceFullScreenExclusiveWin32InfoEXT *>(this); }
- VkSurfaceFullScreenExclusiveWin32InfoEXT const *ptr() const { return reinterpret_cast<VkSurfaceFullScreenExclusiveWin32InfoEXT const *>(this); }
-};
-#endif // VK_USE_PLATFORM_WIN32_KHR
-
-struct safe_VkHeadlessSurfaceCreateInfoEXT {
- VkStructureType sType;
- const void* pNext;
- VkHeadlessSurfaceCreateFlagsEXT flags;
- safe_VkHeadlessSurfaceCreateInfoEXT(const VkHeadlessSurfaceCreateInfoEXT* in_struct);
- safe_VkHeadlessSurfaceCreateInfoEXT(const safe_VkHeadlessSurfaceCreateInfoEXT& src);
- safe_VkHeadlessSurfaceCreateInfoEXT& operator=(const safe_VkHeadlessSurfaceCreateInfoEXT& src);
- safe_VkHeadlessSurfaceCreateInfoEXT();
- ~safe_VkHeadlessSurfaceCreateInfoEXT();
- void initialize(const VkHeadlessSurfaceCreateInfoEXT* in_struct);
- void initialize(const safe_VkHeadlessSurfaceCreateInfoEXT* src);
- VkHeadlessSurfaceCreateInfoEXT *ptr() { return reinterpret_cast<VkHeadlessSurfaceCreateInfoEXT *>(this); }
- VkHeadlessSurfaceCreateInfoEXT const *ptr() const { return reinterpret_cast<VkHeadlessSurfaceCreateInfoEXT const *>(this); }
-};
-
-struct safe_VkPhysicalDeviceLineRasterizationFeaturesEXT {
- VkStructureType sType;
- void* pNext;
- VkBool32 rectangularLines;
- VkBool32 bresenhamLines;
- VkBool32 smoothLines;
- VkBool32 stippledRectangularLines;
- VkBool32 stippledBresenhamLines;
- VkBool32 stippledSmoothLines;
- safe_VkPhysicalDeviceLineRasterizationFeaturesEXT(const VkPhysicalDeviceLineRasterizationFeaturesEXT* in_struct);
- safe_VkPhysicalDeviceLineRasterizationFeaturesEXT(const safe_VkPhysicalDeviceLineRasterizationFeaturesEXT& src);
- safe_VkPhysicalDeviceLineRasterizationFeaturesEXT& operator=(const safe_VkPhysicalDeviceLineRasterizationFeaturesEXT& src);
- safe_VkPhysicalDeviceLineRasterizationFeaturesEXT();
- ~safe_VkPhysicalDeviceLineRasterizationFeaturesEXT();
- void initialize(const VkPhysicalDeviceLineRasterizationFeaturesEXT* in_struct);
- void initialize(const safe_VkPhysicalDeviceLineRasterizationFeaturesEXT* src);
- VkPhysicalDeviceLineRasterizationFeaturesEXT *ptr() { return reinterpret_cast<VkPhysicalDeviceLineRasterizationFeaturesEXT *>(this); }
- VkPhysicalDeviceLineRasterizationFeaturesEXT const *ptr() const { return reinterpret_cast<VkPhysicalDeviceLineRasterizationFeaturesEXT const *>(this); }
-};
-
-struct safe_VkPhysicalDeviceLineRasterizationPropertiesEXT {
- VkStructureType sType;
- void* pNext;
- uint32_t lineSubPixelPrecisionBits;
- safe_VkPhysicalDeviceLineRasterizationPropertiesEXT(const VkPhysicalDeviceLineRasterizationPropertiesEXT* in_struct);
- safe_VkPhysicalDeviceLineRasterizationPropertiesEXT(const safe_VkPhysicalDeviceLineRasterizationPropertiesEXT& src);
- safe_VkPhysicalDeviceLineRasterizationPropertiesEXT& operator=(const safe_VkPhysicalDeviceLineRasterizationPropertiesEXT& src);
- safe_VkPhysicalDeviceLineRasterizationPropertiesEXT();
- ~safe_VkPhysicalDeviceLineRasterizationPropertiesEXT();
- void initialize(const VkPhysicalDeviceLineRasterizationPropertiesEXT* in_struct);
- void initialize(const safe_VkPhysicalDeviceLineRasterizationPropertiesEXT* src);
- VkPhysicalDeviceLineRasterizationPropertiesEXT *ptr() { return reinterpret_cast<VkPhysicalDeviceLineRasterizationPropertiesEXT *>(this); }
- VkPhysicalDeviceLineRasterizationPropertiesEXT const *ptr() const { return reinterpret_cast<VkPhysicalDeviceLineRasterizationPropertiesEXT const *>(this); }
-};
-
-struct safe_VkPipelineRasterizationLineStateCreateInfoEXT {
- VkStructureType sType;
- const void* pNext;
- VkLineRasterizationModeEXT lineRasterizationMode;
- VkBool32 stippledLineEnable;
- uint32_t lineStippleFactor;
- uint16_t lineStipplePattern;
- safe_VkPipelineRasterizationLineStateCreateInfoEXT(const VkPipelineRasterizationLineStateCreateInfoEXT* in_struct);
- safe_VkPipelineRasterizationLineStateCreateInfoEXT(const safe_VkPipelineRasterizationLineStateCreateInfoEXT& src);
- safe_VkPipelineRasterizationLineStateCreateInfoEXT& operator=(const safe_VkPipelineRasterizationLineStateCreateInfoEXT& src);
- safe_VkPipelineRasterizationLineStateCreateInfoEXT();
- ~safe_VkPipelineRasterizationLineStateCreateInfoEXT();
- void initialize(const VkPipelineRasterizationLineStateCreateInfoEXT* in_struct);
- void initialize(const safe_VkPipelineRasterizationLineStateCreateInfoEXT* src);
- VkPipelineRasterizationLineStateCreateInfoEXT *ptr() { return reinterpret_cast<VkPipelineRasterizationLineStateCreateInfoEXT *>(this); }
- VkPipelineRasterizationLineStateCreateInfoEXT const *ptr() const { return reinterpret_cast<VkPipelineRasterizationLineStateCreateInfoEXT const *>(this); }
-};
-
-struct safe_VkPhysicalDeviceHostQueryResetFeaturesEXT {
- VkStructureType sType;
- void* pNext;
- VkBool32 hostQueryReset;
- safe_VkPhysicalDeviceHostQueryResetFeaturesEXT(const VkPhysicalDeviceHostQueryResetFeaturesEXT* in_struct);
- safe_VkPhysicalDeviceHostQueryResetFeaturesEXT(const safe_VkPhysicalDeviceHostQueryResetFeaturesEXT& src);
- safe_VkPhysicalDeviceHostQueryResetFeaturesEXT& operator=(const safe_VkPhysicalDeviceHostQueryResetFeaturesEXT& src);
- safe_VkPhysicalDeviceHostQueryResetFeaturesEXT();
- ~safe_VkPhysicalDeviceHostQueryResetFeaturesEXT();
- void initialize(const VkPhysicalDeviceHostQueryResetFeaturesEXT* in_struct);
- void initialize(const safe_VkPhysicalDeviceHostQueryResetFeaturesEXT* src);
- VkPhysicalDeviceHostQueryResetFeaturesEXT *ptr() { return reinterpret_cast<VkPhysicalDeviceHostQueryResetFeaturesEXT *>(this); }
- VkPhysicalDeviceHostQueryResetFeaturesEXT const *ptr() const { return reinterpret_cast<VkPhysicalDeviceHostQueryResetFeaturesEXT const *>(this); }
-};
-
-struct safe_VkPhysicalDeviceIndexTypeUint8FeaturesEXT {
- VkStructureType sType;
- void* pNext;
- VkBool32 indexTypeUint8;
- safe_VkPhysicalDeviceIndexTypeUint8FeaturesEXT(const VkPhysicalDeviceIndexTypeUint8FeaturesEXT* in_struct);
- safe_VkPhysicalDeviceIndexTypeUint8FeaturesEXT(const safe_VkPhysicalDeviceIndexTypeUint8FeaturesEXT& src);
- safe_VkPhysicalDeviceIndexTypeUint8FeaturesEXT& operator=(const safe_VkPhysicalDeviceIndexTypeUint8FeaturesEXT& src);
- safe_VkPhysicalDeviceIndexTypeUint8FeaturesEXT();
- ~safe_VkPhysicalDeviceIndexTypeUint8FeaturesEXT();
- void initialize(const VkPhysicalDeviceIndexTypeUint8FeaturesEXT* in_struct);
- void initialize(const safe_VkPhysicalDeviceIndexTypeUint8FeaturesEXT* src);
- VkPhysicalDeviceIndexTypeUint8FeaturesEXT *ptr() { return reinterpret_cast<VkPhysicalDeviceIndexTypeUint8FeaturesEXT *>(this); }
- VkPhysicalDeviceIndexTypeUint8FeaturesEXT const *ptr() const { return reinterpret_cast<VkPhysicalDeviceIndexTypeUint8FeaturesEXT const *>(this); }
-};
-
-struct safe_VkPhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT {
- VkStructureType sType;
- void* pNext;
- VkBool32 shaderDemoteToHelperInvocation;
- safe_VkPhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT(const VkPhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT* in_struct);
- safe_VkPhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT(const safe_VkPhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT& src);
- safe_VkPhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT& operator=(const safe_VkPhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT& src);
- safe_VkPhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT();
- ~safe_VkPhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT();
- void initialize(const VkPhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT* in_struct);
- void initialize(const safe_VkPhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT* src);
- VkPhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT *ptr() { return reinterpret_cast<VkPhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT *>(this); }
- VkPhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT const *ptr() const { return reinterpret_cast<VkPhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT const *>(this); }
-};
-
-struct safe_VkPhysicalDeviceTexelBufferAlignmentFeaturesEXT {
- VkStructureType sType;
- void* pNext;
- VkBool32 texelBufferAlignment;
- safe_VkPhysicalDeviceTexelBufferAlignmentFeaturesEXT(const VkPhysicalDeviceTexelBufferAlignmentFeaturesEXT* in_struct);
- safe_VkPhysicalDeviceTexelBufferAlignmentFeaturesEXT(const safe_VkPhysicalDeviceTexelBufferAlignmentFeaturesEXT& src);
- safe_VkPhysicalDeviceTexelBufferAlignmentFeaturesEXT& operator=(const safe_VkPhysicalDeviceTexelBufferAlignmentFeaturesEXT& src);
- safe_VkPhysicalDeviceTexelBufferAlignmentFeaturesEXT();
- ~safe_VkPhysicalDeviceTexelBufferAlignmentFeaturesEXT();
- void initialize(const VkPhysicalDeviceTexelBufferAlignmentFeaturesEXT* in_struct);
- void initialize(const safe_VkPhysicalDeviceTexelBufferAlignmentFeaturesEXT* src);
- VkPhysicalDeviceTexelBufferAlignmentFeaturesEXT *ptr() { return reinterpret_cast<VkPhysicalDeviceTexelBufferAlignmentFeaturesEXT *>(this); }
- VkPhysicalDeviceTexelBufferAlignmentFeaturesEXT const *ptr() const { return reinterpret_cast<VkPhysicalDeviceTexelBufferAlignmentFeaturesEXT const *>(this); }
-};
-
-struct safe_VkPhysicalDeviceTexelBufferAlignmentPropertiesEXT {
- VkStructureType sType;
- void* pNext;
- VkDeviceSize storageTexelBufferOffsetAlignmentBytes;
- VkBool32 storageTexelBufferOffsetSingleTexelAlignment;
- VkDeviceSize uniformTexelBufferOffsetAlignmentBytes;
- VkBool32 uniformTexelBufferOffsetSingleTexelAlignment;
- safe_VkPhysicalDeviceTexelBufferAlignmentPropertiesEXT(const VkPhysicalDeviceTexelBufferAlignmentPropertiesEXT* in_struct);
- safe_VkPhysicalDeviceTexelBufferAlignmentPropertiesEXT(const safe_VkPhysicalDeviceTexelBufferAlignmentPropertiesEXT& src);
- safe_VkPhysicalDeviceTexelBufferAlignmentPropertiesEXT& operator=(const safe_VkPhysicalDeviceTexelBufferAlignmentPropertiesEXT& src);
- safe_VkPhysicalDeviceTexelBufferAlignmentPropertiesEXT();
- ~safe_VkPhysicalDeviceTexelBufferAlignmentPropertiesEXT();
- void initialize(const VkPhysicalDeviceTexelBufferAlignmentPropertiesEXT* in_struct);
- void initialize(const safe_VkPhysicalDeviceTexelBufferAlignmentPropertiesEXT* src);
- VkPhysicalDeviceTexelBufferAlignmentPropertiesEXT *ptr() { return reinterpret_cast<VkPhysicalDeviceTexelBufferAlignmentPropertiesEXT *>(this); }
- VkPhysicalDeviceTexelBufferAlignmentPropertiesEXT const *ptr() const { return reinterpret_cast<VkPhysicalDeviceTexelBufferAlignmentPropertiesEXT const *>(this); }
-};
diff --git a/layers/generated/vk_typemap_helper.h b/layers/generated/vk_typemap_helper.h
deleted file mode 100644
index 257be53c8..000000000
--- a/layers/generated/vk_typemap_helper.h
+++ /dev/null
@@ -1,3473 +0,0 @@
-// *** THIS FILE IS GENERATED - DO NOT EDIT ***
-// See helper_file_generator.py for modifications
-
-
-/***************************************************************************
- *
- * Copyright (c) 2015-2019 The Khronos Group Inc.
- * Copyright (c) 2015-2019 Valve Corporation
- * Copyright (c) 2015-2019 LunarG, Inc.
- * Copyright (c) 2015-2019 Google Inc.
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- *
- * Author: Mark Lobodzinski <mark@lunarg.com>
- * Author: Courtney Goeltzenleuchter <courtneygo@google.com>
- * Author: Tobin Ehlis <tobine@google.com>
- * Author: Chris Forbes <chrisforbes@google.com>
- * Author: John Zulauf<jzulauf@lunarg.com>
- *
- ****************************************************************************/
-
-#pragma once
-#include <vulkan/vulkan.h>
-
-// These empty generic templates are specialized for each type with sType
-// members and for each sType -- providing a two way map between structure
-// types and sTypes
-
-template <VkStructureType id> struct LvlSTypeMap {};
-template <typename T> struct LvlTypeMap {};
-
-// Map type VkApplicationInfo to id VK_STRUCTURE_TYPE_APPLICATION_INFO
-template <> struct LvlTypeMap<VkApplicationInfo> {
- static const VkStructureType kSType = VK_STRUCTURE_TYPE_APPLICATION_INFO;
-};
-
-template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_APPLICATION_INFO> {
- typedef VkApplicationInfo Type;
-};
-
-// Map type VkInstanceCreateInfo to id VK_STRUCTURE_TYPE_INSTANCE_CREATE_INFO
-template <> struct LvlTypeMap<VkInstanceCreateInfo> {
- static const VkStructureType kSType = VK_STRUCTURE_TYPE_INSTANCE_CREATE_INFO;
-};
-
-template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_INSTANCE_CREATE_INFO> {
- typedef VkInstanceCreateInfo Type;
-};
-
-// Map type VkDeviceQueueCreateInfo to id VK_STRUCTURE_TYPE_DEVICE_QUEUE_CREATE_INFO
-template <> struct LvlTypeMap<VkDeviceQueueCreateInfo> {
- static const VkStructureType kSType = VK_STRUCTURE_TYPE_DEVICE_QUEUE_CREATE_INFO;
-};
-
-template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_DEVICE_QUEUE_CREATE_INFO> {
- typedef VkDeviceQueueCreateInfo Type;
-};
-
-// Map type VkDeviceCreateInfo to id VK_STRUCTURE_TYPE_DEVICE_CREATE_INFO
-template <> struct LvlTypeMap<VkDeviceCreateInfo> {
- static const VkStructureType kSType = VK_STRUCTURE_TYPE_DEVICE_CREATE_INFO;
-};
-
-template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_DEVICE_CREATE_INFO> {
- typedef VkDeviceCreateInfo Type;
-};
-
-// Map type VkSubmitInfo to id VK_STRUCTURE_TYPE_SUBMIT_INFO
-template <> struct LvlTypeMap<VkSubmitInfo> {
- static const VkStructureType kSType = VK_STRUCTURE_TYPE_SUBMIT_INFO;
-};
-
-template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_SUBMIT_INFO> {
- typedef VkSubmitInfo Type;
-};
-
-// Map type VkMemoryAllocateInfo to id VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO
-template <> struct LvlTypeMap<VkMemoryAllocateInfo> {
- static const VkStructureType kSType = VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO;
-};
-
-template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO> {
- typedef VkMemoryAllocateInfo Type;
-};
-
-// Map type VkMappedMemoryRange to id VK_STRUCTURE_TYPE_MAPPED_MEMORY_RANGE
-template <> struct LvlTypeMap<VkMappedMemoryRange> {
- static const VkStructureType kSType = VK_STRUCTURE_TYPE_MAPPED_MEMORY_RANGE;
-};
-
-template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_MAPPED_MEMORY_RANGE> {
- typedef VkMappedMemoryRange Type;
-};
-
-// Map type VkBindSparseInfo to id VK_STRUCTURE_TYPE_BIND_SPARSE_INFO
-template <> struct LvlTypeMap<VkBindSparseInfo> {
- static const VkStructureType kSType = VK_STRUCTURE_TYPE_BIND_SPARSE_INFO;
-};
-
-template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_BIND_SPARSE_INFO> {
- typedef VkBindSparseInfo Type;
-};
-
-// Map type VkFenceCreateInfo to id VK_STRUCTURE_TYPE_FENCE_CREATE_INFO
-template <> struct LvlTypeMap<VkFenceCreateInfo> {
- static const VkStructureType kSType = VK_STRUCTURE_TYPE_FENCE_CREATE_INFO;
-};
-
-template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_FENCE_CREATE_INFO> {
- typedef VkFenceCreateInfo Type;
-};
-
-// Map type VkSemaphoreCreateInfo to id VK_STRUCTURE_TYPE_SEMAPHORE_CREATE_INFO
-template <> struct LvlTypeMap<VkSemaphoreCreateInfo> {
- static const VkStructureType kSType = VK_STRUCTURE_TYPE_SEMAPHORE_CREATE_INFO;
-};
-
-template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_SEMAPHORE_CREATE_INFO> {
- typedef VkSemaphoreCreateInfo Type;
-};
-
-// Map type VkEventCreateInfo to id VK_STRUCTURE_TYPE_EVENT_CREATE_INFO
-template <> struct LvlTypeMap<VkEventCreateInfo> {
- static const VkStructureType kSType = VK_STRUCTURE_TYPE_EVENT_CREATE_INFO;
-};
-
-template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_EVENT_CREATE_INFO> {
- typedef VkEventCreateInfo Type;
-};
-
-// Map type VkQueryPoolCreateInfo to id VK_STRUCTURE_TYPE_QUERY_POOL_CREATE_INFO
-template <> struct LvlTypeMap<VkQueryPoolCreateInfo> {
- static const VkStructureType kSType = VK_STRUCTURE_TYPE_QUERY_POOL_CREATE_INFO;
-};
-
-template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_QUERY_POOL_CREATE_INFO> {
- typedef VkQueryPoolCreateInfo Type;
-};
-
-// Map type VkBufferCreateInfo to id VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO
-template <> struct LvlTypeMap<VkBufferCreateInfo> {
- static const VkStructureType kSType = VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO;
-};
-
-template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO> {
- typedef VkBufferCreateInfo Type;
-};
-
-// Map type VkBufferViewCreateInfo to id VK_STRUCTURE_TYPE_BUFFER_VIEW_CREATE_INFO
-template <> struct LvlTypeMap<VkBufferViewCreateInfo> {
- static const VkStructureType kSType = VK_STRUCTURE_TYPE_BUFFER_VIEW_CREATE_INFO;
-};
-
-template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_BUFFER_VIEW_CREATE_INFO> {
- typedef VkBufferViewCreateInfo Type;
-};
-
-// Map type VkImageCreateInfo to id VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO
-template <> struct LvlTypeMap<VkImageCreateInfo> {
- static const VkStructureType kSType = VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO;
-};
-
-template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO> {
- typedef VkImageCreateInfo Type;
-};
-
-// Map type VkImageViewCreateInfo to id VK_STRUCTURE_TYPE_IMAGE_VIEW_CREATE_INFO
-template <> struct LvlTypeMap<VkImageViewCreateInfo> {
- static const VkStructureType kSType = VK_STRUCTURE_TYPE_IMAGE_VIEW_CREATE_INFO;
-};
-
-template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_IMAGE_VIEW_CREATE_INFO> {
- typedef VkImageViewCreateInfo Type;
-};
-
-// Map type VkShaderModuleCreateInfo to id VK_STRUCTURE_TYPE_SHADER_MODULE_CREATE_INFO
-template <> struct LvlTypeMap<VkShaderModuleCreateInfo> {
- static const VkStructureType kSType = VK_STRUCTURE_TYPE_SHADER_MODULE_CREATE_INFO;
-};
-
-template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_SHADER_MODULE_CREATE_INFO> {
- typedef VkShaderModuleCreateInfo Type;
-};
-
-// Map type VkPipelineCacheCreateInfo to id VK_STRUCTURE_TYPE_PIPELINE_CACHE_CREATE_INFO
-template <> struct LvlTypeMap<VkPipelineCacheCreateInfo> {
- static const VkStructureType kSType = VK_STRUCTURE_TYPE_PIPELINE_CACHE_CREATE_INFO;
-};
-
-template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_PIPELINE_CACHE_CREATE_INFO> {
- typedef VkPipelineCacheCreateInfo Type;
-};
-
-// Map type VkPipelineShaderStageCreateInfo to id VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_CREATE_INFO
-template <> struct LvlTypeMap<VkPipelineShaderStageCreateInfo> {
- static const VkStructureType kSType = VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_CREATE_INFO;
-};
-
-template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_CREATE_INFO> {
- typedef VkPipelineShaderStageCreateInfo Type;
-};
-
-// Map type VkPipelineVertexInputStateCreateInfo to id VK_STRUCTURE_TYPE_PIPELINE_VERTEX_INPUT_STATE_CREATE_INFO
-template <> struct LvlTypeMap<VkPipelineVertexInputStateCreateInfo> {
- static const VkStructureType kSType = VK_STRUCTURE_TYPE_PIPELINE_VERTEX_INPUT_STATE_CREATE_INFO;
-};
-
-template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_PIPELINE_VERTEX_INPUT_STATE_CREATE_INFO> {
- typedef VkPipelineVertexInputStateCreateInfo Type;
-};
-
-// Map type VkPipelineInputAssemblyStateCreateInfo to id VK_STRUCTURE_TYPE_PIPELINE_INPUT_ASSEMBLY_STATE_CREATE_INFO
-template <> struct LvlTypeMap<VkPipelineInputAssemblyStateCreateInfo> {
- static const VkStructureType kSType = VK_STRUCTURE_TYPE_PIPELINE_INPUT_ASSEMBLY_STATE_CREATE_INFO;
-};
-
-template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_PIPELINE_INPUT_ASSEMBLY_STATE_CREATE_INFO> {
- typedef VkPipelineInputAssemblyStateCreateInfo Type;
-};
-
-// Map type VkPipelineTessellationStateCreateInfo to id VK_STRUCTURE_TYPE_PIPELINE_TESSELLATION_STATE_CREATE_INFO
-template <> struct LvlTypeMap<VkPipelineTessellationStateCreateInfo> {
- static const VkStructureType kSType = VK_STRUCTURE_TYPE_PIPELINE_TESSELLATION_STATE_CREATE_INFO;
-};
-
-template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_PIPELINE_TESSELLATION_STATE_CREATE_INFO> {
- typedef VkPipelineTessellationStateCreateInfo Type;
-};
-
-// Map type VkPipelineViewportStateCreateInfo to id VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_STATE_CREATE_INFO
-template <> struct LvlTypeMap<VkPipelineViewportStateCreateInfo> {
- static const VkStructureType kSType = VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_STATE_CREATE_INFO;
-};
-
-template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_STATE_CREATE_INFO> {
- typedef VkPipelineViewportStateCreateInfo Type;
-};
-
-// Map type VkPipelineRasterizationStateCreateInfo to id VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_STATE_CREATE_INFO
-template <> struct LvlTypeMap<VkPipelineRasterizationStateCreateInfo> {
- static const VkStructureType kSType = VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_STATE_CREATE_INFO;
-};
-
-template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_STATE_CREATE_INFO> {
- typedef VkPipelineRasterizationStateCreateInfo Type;
-};
-
-// Map type VkPipelineMultisampleStateCreateInfo to id VK_STRUCTURE_TYPE_PIPELINE_MULTISAMPLE_STATE_CREATE_INFO
-template <> struct LvlTypeMap<VkPipelineMultisampleStateCreateInfo> {
- static const VkStructureType kSType = VK_STRUCTURE_TYPE_PIPELINE_MULTISAMPLE_STATE_CREATE_INFO;
-};
-
-template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_PIPELINE_MULTISAMPLE_STATE_CREATE_INFO> {
- typedef VkPipelineMultisampleStateCreateInfo Type;
-};
-
-// Map type VkPipelineDepthStencilStateCreateInfo to id VK_STRUCTURE_TYPE_PIPELINE_DEPTH_STENCIL_STATE_CREATE_INFO
-template <> struct LvlTypeMap<VkPipelineDepthStencilStateCreateInfo> {
- static const VkStructureType kSType = VK_STRUCTURE_TYPE_PIPELINE_DEPTH_STENCIL_STATE_CREATE_INFO;
-};
-
-template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_PIPELINE_DEPTH_STENCIL_STATE_CREATE_INFO> {
- typedef VkPipelineDepthStencilStateCreateInfo Type;
-};
-
-// Map type VkPipelineColorBlendStateCreateInfo to id VK_STRUCTURE_TYPE_PIPELINE_COLOR_BLEND_STATE_CREATE_INFO
-template <> struct LvlTypeMap<VkPipelineColorBlendStateCreateInfo> {
- static const VkStructureType kSType = VK_STRUCTURE_TYPE_PIPELINE_COLOR_BLEND_STATE_CREATE_INFO;
-};
-
-template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_PIPELINE_COLOR_BLEND_STATE_CREATE_INFO> {
- typedef VkPipelineColorBlendStateCreateInfo Type;
-};
-
-// Map type VkPipelineDynamicStateCreateInfo to id VK_STRUCTURE_TYPE_PIPELINE_DYNAMIC_STATE_CREATE_INFO
-template <> struct LvlTypeMap<VkPipelineDynamicStateCreateInfo> {
- static const VkStructureType kSType = VK_STRUCTURE_TYPE_PIPELINE_DYNAMIC_STATE_CREATE_INFO;
-};
-
-template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_PIPELINE_DYNAMIC_STATE_CREATE_INFO> {
- typedef VkPipelineDynamicStateCreateInfo Type;
-};
-
-// Map type VkGraphicsPipelineCreateInfo to id VK_STRUCTURE_TYPE_GRAPHICS_PIPELINE_CREATE_INFO
-template <> struct LvlTypeMap<VkGraphicsPipelineCreateInfo> {
- static const VkStructureType kSType = VK_STRUCTURE_TYPE_GRAPHICS_PIPELINE_CREATE_INFO;
-};
-
-template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_GRAPHICS_PIPELINE_CREATE_INFO> {
- typedef VkGraphicsPipelineCreateInfo Type;
-};
-
-// Map type VkComputePipelineCreateInfo to id VK_STRUCTURE_TYPE_COMPUTE_PIPELINE_CREATE_INFO
-template <> struct LvlTypeMap<VkComputePipelineCreateInfo> {
- static const VkStructureType kSType = VK_STRUCTURE_TYPE_COMPUTE_PIPELINE_CREATE_INFO;
-};
-
-template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_COMPUTE_PIPELINE_CREATE_INFO> {
- typedef VkComputePipelineCreateInfo Type;
-};
-
-// Map type VkPipelineLayoutCreateInfo to id VK_STRUCTURE_TYPE_PIPELINE_LAYOUT_CREATE_INFO
-template <> struct LvlTypeMap<VkPipelineLayoutCreateInfo> {
- static const VkStructureType kSType = VK_STRUCTURE_TYPE_PIPELINE_LAYOUT_CREATE_INFO;
-};
-
-template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_PIPELINE_LAYOUT_CREATE_INFO> {
- typedef VkPipelineLayoutCreateInfo Type;
-};
-
-// Map type VkSamplerCreateInfo to id VK_STRUCTURE_TYPE_SAMPLER_CREATE_INFO
-template <> struct LvlTypeMap<VkSamplerCreateInfo> {
- static const VkStructureType kSType = VK_STRUCTURE_TYPE_SAMPLER_CREATE_INFO;
-};
-
-template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_SAMPLER_CREATE_INFO> {
- typedef VkSamplerCreateInfo Type;
-};
-
-// Map type VkDescriptorSetLayoutCreateInfo to id VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_CREATE_INFO
-template <> struct LvlTypeMap<VkDescriptorSetLayoutCreateInfo> {
- static const VkStructureType kSType = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_CREATE_INFO;
-};
-
-template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_CREATE_INFO> {
- typedef VkDescriptorSetLayoutCreateInfo Type;
-};
-
-// Map type VkDescriptorPoolCreateInfo to id VK_STRUCTURE_TYPE_DESCRIPTOR_POOL_CREATE_INFO
-template <> struct LvlTypeMap<VkDescriptorPoolCreateInfo> {
- static const VkStructureType kSType = VK_STRUCTURE_TYPE_DESCRIPTOR_POOL_CREATE_INFO;
-};
-
-template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_DESCRIPTOR_POOL_CREATE_INFO> {
- typedef VkDescriptorPoolCreateInfo Type;
-};
-
-// Map type VkDescriptorSetAllocateInfo to id VK_STRUCTURE_TYPE_DESCRIPTOR_SET_ALLOCATE_INFO
-template <> struct LvlTypeMap<VkDescriptorSetAllocateInfo> {
- static const VkStructureType kSType = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_ALLOCATE_INFO;
-};
-
-template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_DESCRIPTOR_SET_ALLOCATE_INFO> {
- typedef VkDescriptorSetAllocateInfo Type;
-};
-
-// Map type VkWriteDescriptorSet to id VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET
-template <> struct LvlTypeMap<VkWriteDescriptorSet> {
- static const VkStructureType kSType = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET;
-};
-
-template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET> {
- typedef VkWriteDescriptorSet Type;
-};
-
-// Map type VkCopyDescriptorSet to id VK_STRUCTURE_TYPE_COPY_DESCRIPTOR_SET
-template <> struct LvlTypeMap<VkCopyDescriptorSet> {
- static const VkStructureType kSType = VK_STRUCTURE_TYPE_COPY_DESCRIPTOR_SET;
-};
-
-template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_COPY_DESCRIPTOR_SET> {
- typedef VkCopyDescriptorSet Type;
-};
-
-// Map type VkFramebufferCreateInfo to id VK_STRUCTURE_TYPE_FRAMEBUFFER_CREATE_INFO
-template <> struct LvlTypeMap<VkFramebufferCreateInfo> {
- static const VkStructureType kSType = VK_STRUCTURE_TYPE_FRAMEBUFFER_CREATE_INFO;
-};
-
-template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_FRAMEBUFFER_CREATE_INFO> {
- typedef VkFramebufferCreateInfo Type;
-};
-
-// Map type VkRenderPassCreateInfo to id VK_STRUCTURE_TYPE_RENDER_PASS_CREATE_INFO
-template <> struct LvlTypeMap<VkRenderPassCreateInfo> {
- static const VkStructureType kSType = VK_STRUCTURE_TYPE_RENDER_PASS_CREATE_INFO;
-};
-
-template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_RENDER_PASS_CREATE_INFO> {
- typedef VkRenderPassCreateInfo Type;
-};
-
-// Map type VkCommandPoolCreateInfo to id VK_STRUCTURE_TYPE_COMMAND_POOL_CREATE_INFO
-template <> struct LvlTypeMap<VkCommandPoolCreateInfo> {
- static const VkStructureType kSType = VK_STRUCTURE_TYPE_COMMAND_POOL_CREATE_INFO;
-};
-
-template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_COMMAND_POOL_CREATE_INFO> {
- typedef VkCommandPoolCreateInfo Type;
-};
-
-// Map type VkCommandBufferAllocateInfo to id VK_STRUCTURE_TYPE_COMMAND_BUFFER_ALLOCATE_INFO
-template <> struct LvlTypeMap<VkCommandBufferAllocateInfo> {
- static const VkStructureType kSType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_ALLOCATE_INFO;
-};
-
-template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_COMMAND_BUFFER_ALLOCATE_INFO> {
- typedef VkCommandBufferAllocateInfo Type;
-};
-
-// Map type VkCommandBufferInheritanceInfo to id VK_STRUCTURE_TYPE_COMMAND_BUFFER_INHERITANCE_INFO
-template <> struct LvlTypeMap<VkCommandBufferInheritanceInfo> {
- static const VkStructureType kSType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_INHERITANCE_INFO;
-};
-
-template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_COMMAND_BUFFER_INHERITANCE_INFO> {
- typedef VkCommandBufferInheritanceInfo Type;
-};
-
-// Map type VkCommandBufferBeginInfo to id VK_STRUCTURE_TYPE_COMMAND_BUFFER_BEGIN_INFO
-template <> struct LvlTypeMap<VkCommandBufferBeginInfo> {
- static const VkStructureType kSType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_BEGIN_INFO;
-};
-
-template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_COMMAND_BUFFER_BEGIN_INFO> {
- typedef VkCommandBufferBeginInfo Type;
-};
-
-// Map type VkMemoryBarrier to id VK_STRUCTURE_TYPE_MEMORY_BARRIER
-template <> struct LvlTypeMap<VkMemoryBarrier> {
- static const VkStructureType kSType = VK_STRUCTURE_TYPE_MEMORY_BARRIER;
-};
-
-template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_MEMORY_BARRIER> {
- typedef VkMemoryBarrier Type;
-};
-
-// Map type VkBufferMemoryBarrier to id VK_STRUCTURE_TYPE_BUFFER_MEMORY_BARRIER
-template <> struct LvlTypeMap<VkBufferMemoryBarrier> {
- static const VkStructureType kSType = VK_STRUCTURE_TYPE_BUFFER_MEMORY_BARRIER;
-};
-
-template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_BUFFER_MEMORY_BARRIER> {
- typedef VkBufferMemoryBarrier Type;
-};
-
-// Map type VkImageMemoryBarrier to id VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER
-template <> struct LvlTypeMap<VkImageMemoryBarrier> {
- static const VkStructureType kSType = VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER;
-};
-
-template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER> {
- typedef VkImageMemoryBarrier Type;
-};
-
-// Map type VkRenderPassBeginInfo to id VK_STRUCTURE_TYPE_RENDER_PASS_BEGIN_INFO
-template <> struct LvlTypeMap<VkRenderPassBeginInfo> {
- static const VkStructureType kSType = VK_STRUCTURE_TYPE_RENDER_PASS_BEGIN_INFO;
-};
-
-template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_RENDER_PASS_BEGIN_INFO> {
- typedef VkRenderPassBeginInfo Type;
-};
-
-// Map type VkPhysicalDeviceSubgroupProperties to id VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SUBGROUP_PROPERTIES
-template <> struct LvlTypeMap<VkPhysicalDeviceSubgroupProperties> {
- static const VkStructureType kSType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SUBGROUP_PROPERTIES;
-};
-
-template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SUBGROUP_PROPERTIES> {
- typedef VkPhysicalDeviceSubgroupProperties Type;
-};
-
-// Map type VkBindBufferMemoryInfo to id VK_STRUCTURE_TYPE_BIND_BUFFER_MEMORY_INFO
-template <> struct LvlTypeMap<VkBindBufferMemoryInfo> {
- static const VkStructureType kSType = VK_STRUCTURE_TYPE_BIND_BUFFER_MEMORY_INFO;
-};
-
-template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_BIND_BUFFER_MEMORY_INFO> {
- typedef VkBindBufferMemoryInfo Type;
-};
-
-// Map type VkBindImageMemoryInfo to id VK_STRUCTURE_TYPE_BIND_IMAGE_MEMORY_INFO
-template <> struct LvlTypeMap<VkBindImageMemoryInfo> {
- static const VkStructureType kSType = VK_STRUCTURE_TYPE_BIND_IMAGE_MEMORY_INFO;
-};
-
-template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_BIND_IMAGE_MEMORY_INFO> {
- typedef VkBindImageMemoryInfo Type;
-};
-
-// Map type VkPhysicalDevice16BitStorageFeatures to id VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_16BIT_STORAGE_FEATURES
-template <> struct LvlTypeMap<VkPhysicalDevice16BitStorageFeatures> {
- static const VkStructureType kSType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_16BIT_STORAGE_FEATURES;
-};
-
-template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_16BIT_STORAGE_FEATURES> {
- typedef VkPhysicalDevice16BitStorageFeatures Type;
-};
-
-// Map type VkMemoryDedicatedRequirements to id VK_STRUCTURE_TYPE_MEMORY_DEDICATED_REQUIREMENTS
-template <> struct LvlTypeMap<VkMemoryDedicatedRequirements> {
- static const VkStructureType kSType = VK_STRUCTURE_TYPE_MEMORY_DEDICATED_REQUIREMENTS;
-};
-
-template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_MEMORY_DEDICATED_REQUIREMENTS> {
- typedef VkMemoryDedicatedRequirements Type;
-};
-
-// Map type VkMemoryDedicatedAllocateInfo to id VK_STRUCTURE_TYPE_MEMORY_DEDICATED_ALLOCATE_INFO
-template <> struct LvlTypeMap<VkMemoryDedicatedAllocateInfo> {
- static const VkStructureType kSType = VK_STRUCTURE_TYPE_MEMORY_DEDICATED_ALLOCATE_INFO;
-};
-
-template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_MEMORY_DEDICATED_ALLOCATE_INFO> {
- typedef VkMemoryDedicatedAllocateInfo Type;
-};
-
-// Map type VkMemoryAllocateFlagsInfo to id VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_FLAGS_INFO
-template <> struct LvlTypeMap<VkMemoryAllocateFlagsInfo> {
- static const VkStructureType kSType = VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_FLAGS_INFO;
-};
-
-template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_FLAGS_INFO> {
- typedef VkMemoryAllocateFlagsInfo Type;
-};
-
-// Map type VkDeviceGroupRenderPassBeginInfo to id VK_STRUCTURE_TYPE_DEVICE_GROUP_RENDER_PASS_BEGIN_INFO
-template <> struct LvlTypeMap<VkDeviceGroupRenderPassBeginInfo> {
- static const VkStructureType kSType = VK_STRUCTURE_TYPE_DEVICE_GROUP_RENDER_PASS_BEGIN_INFO;
-};
-
-template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_DEVICE_GROUP_RENDER_PASS_BEGIN_INFO> {
- typedef VkDeviceGroupRenderPassBeginInfo Type;
-};
-
-// Map type VkDeviceGroupCommandBufferBeginInfo to id VK_STRUCTURE_TYPE_DEVICE_GROUP_COMMAND_BUFFER_BEGIN_INFO
-template <> struct LvlTypeMap<VkDeviceGroupCommandBufferBeginInfo> {
- static const VkStructureType kSType = VK_STRUCTURE_TYPE_DEVICE_GROUP_COMMAND_BUFFER_BEGIN_INFO;
-};
-
-template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_DEVICE_GROUP_COMMAND_BUFFER_BEGIN_INFO> {
- typedef VkDeviceGroupCommandBufferBeginInfo Type;
-};
-
-// Map type VkDeviceGroupSubmitInfo to id VK_STRUCTURE_TYPE_DEVICE_GROUP_SUBMIT_INFO
-template <> struct LvlTypeMap<VkDeviceGroupSubmitInfo> {
- static const VkStructureType kSType = VK_STRUCTURE_TYPE_DEVICE_GROUP_SUBMIT_INFO;
-};
-
-template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_DEVICE_GROUP_SUBMIT_INFO> {
- typedef VkDeviceGroupSubmitInfo Type;
-};
-
-// Map type VkDeviceGroupBindSparseInfo to id VK_STRUCTURE_TYPE_DEVICE_GROUP_BIND_SPARSE_INFO
-template <> struct LvlTypeMap<VkDeviceGroupBindSparseInfo> {
- static const VkStructureType kSType = VK_STRUCTURE_TYPE_DEVICE_GROUP_BIND_SPARSE_INFO;
-};
-
-template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_DEVICE_GROUP_BIND_SPARSE_INFO> {
- typedef VkDeviceGroupBindSparseInfo Type;
-};
-
-// Map type VkBindBufferMemoryDeviceGroupInfo to id VK_STRUCTURE_TYPE_BIND_BUFFER_MEMORY_DEVICE_GROUP_INFO
-template <> struct LvlTypeMap<VkBindBufferMemoryDeviceGroupInfo> {
- static const VkStructureType kSType = VK_STRUCTURE_TYPE_BIND_BUFFER_MEMORY_DEVICE_GROUP_INFO;
-};
-
-template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_BIND_BUFFER_MEMORY_DEVICE_GROUP_INFO> {
- typedef VkBindBufferMemoryDeviceGroupInfo Type;
-};
-
-// Map type VkBindImageMemoryDeviceGroupInfo to id VK_STRUCTURE_TYPE_BIND_IMAGE_MEMORY_DEVICE_GROUP_INFO
-template <> struct LvlTypeMap<VkBindImageMemoryDeviceGroupInfo> {
- static const VkStructureType kSType = VK_STRUCTURE_TYPE_BIND_IMAGE_MEMORY_DEVICE_GROUP_INFO;
-};
-
-template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_BIND_IMAGE_MEMORY_DEVICE_GROUP_INFO> {
- typedef VkBindImageMemoryDeviceGroupInfo Type;
-};
-
-// Map type VkPhysicalDeviceGroupProperties to id VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_GROUP_PROPERTIES
-template <> struct LvlTypeMap<VkPhysicalDeviceGroupProperties> {
- static const VkStructureType kSType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_GROUP_PROPERTIES;
-};
-
-template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_GROUP_PROPERTIES> {
- typedef VkPhysicalDeviceGroupProperties Type;
-};
-
-// Map type VkDeviceGroupDeviceCreateInfo to id VK_STRUCTURE_TYPE_DEVICE_GROUP_DEVICE_CREATE_INFO
-template <> struct LvlTypeMap<VkDeviceGroupDeviceCreateInfo> {
- static const VkStructureType kSType = VK_STRUCTURE_TYPE_DEVICE_GROUP_DEVICE_CREATE_INFO;
-};
-
-template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_DEVICE_GROUP_DEVICE_CREATE_INFO> {
- typedef VkDeviceGroupDeviceCreateInfo Type;
-};
-
-// Map type VkBufferMemoryRequirementsInfo2 to id VK_STRUCTURE_TYPE_BUFFER_MEMORY_REQUIREMENTS_INFO_2
-template <> struct LvlTypeMap<VkBufferMemoryRequirementsInfo2> {
- static const VkStructureType kSType = VK_STRUCTURE_TYPE_BUFFER_MEMORY_REQUIREMENTS_INFO_2;
-};
-
-template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_BUFFER_MEMORY_REQUIREMENTS_INFO_2> {
- typedef VkBufferMemoryRequirementsInfo2 Type;
-};
-
-// Map type VkImageMemoryRequirementsInfo2 to id VK_STRUCTURE_TYPE_IMAGE_MEMORY_REQUIREMENTS_INFO_2
-template <> struct LvlTypeMap<VkImageMemoryRequirementsInfo2> {
- static const VkStructureType kSType = VK_STRUCTURE_TYPE_IMAGE_MEMORY_REQUIREMENTS_INFO_2;
-};
-
-template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_IMAGE_MEMORY_REQUIREMENTS_INFO_2> {
- typedef VkImageMemoryRequirementsInfo2 Type;
-};
-
-// Map type VkImageSparseMemoryRequirementsInfo2 to id VK_STRUCTURE_TYPE_IMAGE_SPARSE_MEMORY_REQUIREMENTS_INFO_2
-template <> struct LvlTypeMap<VkImageSparseMemoryRequirementsInfo2> {
- static const VkStructureType kSType = VK_STRUCTURE_TYPE_IMAGE_SPARSE_MEMORY_REQUIREMENTS_INFO_2;
-};
-
-template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_IMAGE_SPARSE_MEMORY_REQUIREMENTS_INFO_2> {
- typedef VkImageSparseMemoryRequirementsInfo2 Type;
-};
-
-// Map type VkMemoryRequirements2 to id VK_STRUCTURE_TYPE_MEMORY_REQUIREMENTS_2
-template <> struct LvlTypeMap<VkMemoryRequirements2> {
- static const VkStructureType kSType = VK_STRUCTURE_TYPE_MEMORY_REQUIREMENTS_2;
-};
-
-template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_MEMORY_REQUIREMENTS_2> {
- typedef VkMemoryRequirements2 Type;
-};
-
-// Map type VkSparseImageMemoryRequirements2 to id VK_STRUCTURE_TYPE_SPARSE_IMAGE_MEMORY_REQUIREMENTS_2
-template <> struct LvlTypeMap<VkSparseImageMemoryRequirements2> {
- static const VkStructureType kSType = VK_STRUCTURE_TYPE_SPARSE_IMAGE_MEMORY_REQUIREMENTS_2;
-};
-
-template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_SPARSE_IMAGE_MEMORY_REQUIREMENTS_2> {
- typedef VkSparseImageMemoryRequirements2 Type;
-};
-
-// Map type VkPhysicalDeviceFeatures2 to id VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FEATURES_2
-template <> struct LvlTypeMap<VkPhysicalDeviceFeatures2> {
- static const VkStructureType kSType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FEATURES_2;
-};
-
-template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FEATURES_2> {
- typedef VkPhysicalDeviceFeatures2 Type;
-};
-
-// Map type VkPhysicalDeviceProperties2 to id VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PROPERTIES_2
-template <> struct LvlTypeMap<VkPhysicalDeviceProperties2> {
- static const VkStructureType kSType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PROPERTIES_2;
-};
-
-template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PROPERTIES_2> {
- typedef VkPhysicalDeviceProperties2 Type;
-};
-
-// Map type VkFormatProperties2 to id VK_STRUCTURE_TYPE_FORMAT_PROPERTIES_2
-template <> struct LvlTypeMap<VkFormatProperties2> {
- static const VkStructureType kSType = VK_STRUCTURE_TYPE_FORMAT_PROPERTIES_2;
-};
-
-template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_FORMAT_PROPERTIES_2> {
- typedef VkFormatProperties2 Type;
-};
-
-// Map type VkImageFormatProperties2 to id VK_STRUCTURE_TYPE_IMAGE_FORMAT_PROPERTIES_2
-template <> struct LvlTypeMap<VkImageFormatProperties2> {
- static const VkStructureType kSType = VK_STRUCTURE_TYPE_IMAGE_FORMAT_PROPERTIES_2;
-};
-
-template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_IMAGE_FORMAT_PROPERTIES_2> {
- typedef VkImageFormatProperties2 Type;
-};
-
-// Map type VkPhysicalDeviceImageFormatInfo2 to id VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_FORMAT_INFO_2
-template <> struct LvlTypeMap<VkPhysicalDeviceImageFormatInfo2> {
- static const VkStructureType kSType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_FORMAT_INFO_2;
-};
-
-template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_FORMAT_INFO_2> {
- typedef VkPhysicalDeviceImageFormatInfo2 Type;
-};
-
-// Map type VkQueueFamilyProperties2 to id VK_STRUCTURE_TYPE_QUEUE_FAMILY_PROPERTIES_2
-template <> struct LvlTypeMap<VkQueueFamilyProperties2> {
- static const VkStructureType kSType = VK_STRUCTURE_TYPE_QUEUE_FAMILY_PROPERTIES_2;
-};
-
-template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_QUEUE_FAMILY_PROPERTIES_2> {
- typedef VkQueueFamilyProperties2 Type;
-};
-
-// Map type VkPhysicalDeviceMemoryProperties2 to id VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MEMORY_PROPERTIES_2
-template <> struct LvlTypeMap<VkPhysicalDeviceMemoryProperties2> {
- static const VkStructureType kSType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MEMORY_PROPERTIES_2;
-};
-
-template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MEMORY_PROPERTIES_2> {
- typedef VkPhysicalDeviceMemoryProperties2 Type;
-};
-
-// Map type VkSparseImageFormatProperties2 to id VK_STRUCTURE_TYPE_SPARSE_IMAGE_FORMAT_PROPERTIES_2
-template <> struct LvlTypeMap<VkSparseImageFormatProperties2> {
- static const VkStructureType kSType = VK_STRUCTURE_TYPE_SPARSE_IMAGE_FORMAT_PROPERTIES_2;
-};
-
-template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_SPARSE_IMAGE_FORMAT_PROPERTIES_2> {
- typedef VkSparseImageFormatProperties2 Type;
-};
-
-// Map type VkPhysicalDeviceSparseImageFormatInfo2 to id VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SPARSE_IMAGE_FORMAT_INFO_2
-template <> struct LvlTypeMap<VkPhysicalDeviceSparseImageFormatInfo2> {
- static const VkStructureType kSType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SPARSE_IMAGE_FORMAT_INFO_2;
-};
-
-template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SPARSE_IMAGE_FORMAT_INFO_2> {
- typedef VkPhysicalDeviceSparseImageFormatInfo2 Type;
-};
-
-// Map type VkPhysicalDevicePointClippingProperties to id VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_POINT_CLIPPING_PROPERTIES
-template <> struct LvlTypeMap<VkPhysicalDevicePointClippingProperties> {
- static const VkStructureType kSType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_POINT_CLIPPING_PROPERTIES;
-};
-
-template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_POINT_CLIPPING_PROPERTIES> {
- typedef VkPhysicalDevicePointClippingProperties Type;
-};
-
-// Map type VkRenderPassInputAttachmentAspectCreateInfo to id VK_STRUCTURE_TYPE_RENDER_PASS_INPUT_ATTACHMENT_ASPECT_CREATE_INFO
-template <> struct LvlTypeMap<VkRenderPassInputAttachmentAspectCreateInfo> {
- static const VkStructureType kSType = VK_STRUCTURE_TYPE_RENDER_PASS_INPUT_ATTACHMENT_ASPECT_CREATE_INFO;
-};
-
-template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_RENDER_PASS_INPUT_ATTACHMENT_ASPECT_CREATE_INFO> {
- typedef VkRenderPassInputAttachmentAspectCreateInfo Type;
-};
-
-// Map type VkImageViewUsageCreateInfo to id VK_STRUCTURE_TYPE_IMAGE_VIEW_USAGE_CREATE_INFO
-template <> struct LvlTypeMap<VkImageViewUsageCreateInfo> {
- static const VkStructureType kSType = VK_STRUCTURE_TYPE_IMAGE_VIEW_USAGE_CREATE_INFO;
-};
-
-template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_IMAGE_VIEW_USAGE_CREATE_INFO> {
- typedef VkImageViewUsageCreateInfo Type;
-};
-
-// Map type VkPipelineTessellationDomainOriginStateCreateInfo to id VK_STRUCTURE_TYPE_PIPELINE_TESSELLATION_DOMAIN_ORIGIN_STATE_CREATE_INFO
-template <> struct LvlTypeMap<VkPipelineTessellationDomainOriginStateCreateInfo> {
- static const VkStructureType kSType = VK_STRUCTURE_TYPE_PIPELINE_TESSELLATION_DOMAIN_ORIGIN_STATE_CREATE_INFO;
-};
-
-template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_PIPELINE_TESSELLATION_DOMAIN_ORIGIN_STATE_CREATE_INFO> {
- typedef VkPipelineTessellationDomainOriginStateCreateInfo Type;
-};
-
-// Map type VkRenderPassMultiviewCreateInfo to id VK_STRUCTURE_TYPE_RENDER_PASS_MULTIVIEW_CREATE_INFO
-template <> struct LvlTypeMap<VkRenderPassMultiviewCreateInfo> {
- static const VkStructureType kSType = VK_STRUCTURE_TYPE_RENDER_PASS_MULTIVIEW_CREATE_INFO;
-};
-
-template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_RENDER_PASS_MULTIVIEW_CREATE_INFO> {
- typedef VkRenderPassMultiviewCreateInfo Type;
-};
-
-// Map type VkPhysicalDeviceMultiviewFeatures to id VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MULTIVIEW_FEATURES
-template <> struct LvlTypeMap<VkPhysicalDeviceMultiviewFeatures> {
- static const VkStructureType kSType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MULTIVIEW_FEATURES;
-};
-
-template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MULTIVIEW_FEATURES> {
- typedef VkPhysicalDeviceMultiviewFeatures Type;
-};
-
-// Map type VkPhysicalDeviceMultiviewProperties to id VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MULTIVIEW_PROPERTIES
-template <> struct LvlTypeMap<VkPhysicalDeviceMultiviewProperties> {
- static const VkStructureType kSType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MULTIVIEW_PROPERTIES;
-};
-
-template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MULTIVIEW_PROPERTIES> {
- typedef VkPhysicalDeviceMultiviewProperties Type;
-};
-
-// Map type VkPhysicalDeviceVariablePointersFeatures to id VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VARIABLE_POINTERS_FEATURES
-template <> struct LvlTypeMap<VkPhysicalDeviceVariablePointersFeatures> {
- static const VkStructureType kSType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VARIABLE_POINTERS_FEATURES;
-};
-
-template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VARIABLE_POINTERS_FEATURES> {
- typedef VkPhysicalDeviceVariablePointersFeatures Type;
-};
-
-// Map type VkPhysicalDeviceProtectedMemoryFeatures to id VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PROTECTED_MEMORY_FEATURES
-template <> struct LvlTypeMap<VkPhysicalDeviceProtectedMemoryFeatures> {
- static const VkStructureType kSType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PROTECTED_MEMORY_FEATURES;
-};
-
-template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PROTECTED_MEMORY_FEATURES> {
- typedef VkPhysicalDeviceProtectedMemoryFeatures Type;
-};
-
-// Map type VkPhysicalDeviceProtectedMemoryProperties to id VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PROTECTED_MEMORY_PROPERTIES
-template <> struct LvlTypeMap<VkPhysicalDeviceProtectedMemoryProperties> {
- static const VkStructureType kSType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PROTECTED_MEMORY_PROPERTIES;
-};
-
-template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PROTECTED_MEMORY_PROPERTIES> {
- typedef VkPhysicalDeviceProtectedMemoryProperties Type;
-};
-
-// Map type VkDeviceQueueInfo2 to id VK_STRUCTURE_TYPE_DEVICE_QUEUE_INFO_2
-template <> struct LvlTypeMap<VkDeviceQueueInfo2> {
- static const VkStructureType kSType = VK_STRUCTURE_TYPE_DEVICE_QUEUE_INFO_2;
-};
-
-template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_DEVICE_QUEUE_INFO_2> {
- typedef VkDeviceQueueInfo2 Type;
-};
-
-// Map type VkProtectedSubmitInfo to id VK_STRUCTURE_TYPE_PROTECTED_SUBMIT_INFO
-template <> struct LvlTypeMap<VkProtectedSubmitInfo> {
- static const VkStructureType kSType = VK_STRUCTURE_TYPE_PROTECTED_SUBMIT_INFO;
-};
-
-template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_PROTECTED_SUBMIT_INFO> {
- typedef VkProtectedSubmitInfo Type;
-};
-
-// Map type VkSamplerYcbcrConversionCreateInfo to id VK_STRUCTURE_TYPE_SAMPLER_YCBCR_CONVERSION_CREATE_INFO
-template <> struct LvlTypeMap<VkSamplerYcbcrConversionCreateInfo> {
- static const VkStructureType kSType = VK_STRUCTURE_TYPE_SAMPLER_YCBCR_CONVERSION_CREATE_INFO;
-};
-
-template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_SAMPLER_YCBCR_CONVERSION_CREATE_INFO> {
- typedef VkSamplerYcbcrConversionCreateInfo Type;
-};
-
-// Map type VkSamplerYcbcrConversionInfo to id VK_STRUCTURE_TYPE_SAMPLER_YCBCR_CONVERSION_INFO
-template <> struct LvlTypeMap<VkSamplerYcbcrConversionInfo> {
- static const VkStructureType kSType = VK_STRUCTURE_TYPE_SAMPLER_YCBCR_CONVERSION_INFO;
-};
-
-template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_SAMPLER_YCBCR_CONVERSION_INFO> {
- typedef VkSamplerYcbcrConversionInfo Type;
-};
-
-// Map type VkBindImagePlaneMemoryInfo to id VK_STRUCTURE_TYPE_BIND_IMAGE_PLANE_MEMORY_INFO
-template <> struct LvlTypeMap<VkBindImagePlaneMemoryInfo> {
- static const VkStructureType kSType = VK_STRUCTURE_TYPE_BIND_IMAGE_PLANE_MEMORY_INFO;
-};
-
-template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_BIND_IMAGE_PLANE_MEMORY_INFO> {
- typedef VkBindImagePlaneMemoryInfo Type;
-};
-
-// Map type VkImagePlaneMemoryRequirementsInfo to id VK_STRUCTURE_TYPE_IMAGE_PLANE_MEMORY_REQUIREMENTS_INFO
-template <> struct LvlTypeMap<VkImagePlaneMemoryRequirementsInfo> {
- static const VkStructureType kSType = VK_STRUCTURE_TYPE_IMAGE_PLANE_MEMORY_REQUIREMENTS_INFO;
-};
-
-template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_IMAGE_PLANE_MEMORY_REQUIREMENTS_INFO> {
- typedef VkImagePlaneMemoryRequirementsInfo Type;
-};
-
-// Map type VkPhysicalDeviceSamplerYcbcrConversionFeatures to id VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SAMPLER_YCBCR_CONVERSION_FEATURES
-template <> struct LvlTypeMap<VkPhysicalDeviceSamplerYcbcrConversionFeatures> {
- static const VkStructureType kSType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SAMPLER_YCBCR_CONVERSION_FEATURES;
-};
-
-template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SAMPLER_YCBCR_CONVERSION_FEATURES> {
- typedef VkPhysicalDeviceSamplerYcbcrConversionFeatures Type;
-};
-
-// Map type VkSamplerYcbcrConversionImageFormatProperties to id VK_STRUCTURE_TYPE_SAMPLER_YCBCR_CONVERSION_IMAGE_FORMAT_PROPERTIES
-template <> struct LvlTypeMap<VkSamplerYcbcrConversionImageFormatProperties> {
- static const VkStructureType kSType = VK_STRUCTURE_TYPE_SAMPLER_YCBCR_CONVERSION_IMAGE_FORMAT_PROPERTIES;
-};
-
-template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_SAMPLER_YCBCR_CONVERSION_IMAGE_FORMAT_PROPERTIES> {
- typedef VkSamplerYcbcrConversionImageFormatProperties Type;
-};
-
-// Map type VkDescriptorUpdateTemplateCreateInfo to id VK_STRUCTURE_TYPE_DESCRIPTOR_UPDATE_TEMPLATE_CREATE_INFO
-template <> struct LvlTypeMap<VkDescriptorUpdateTemplateCreateInfo> {
- static const VkStructureType kSType = VK_STRUCTURE_TYPE_DESCRIPTOR_UPDATE_TEMPLATE_CREATE_INFO;
-};
-
-template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_DESCRIPTOR_UPDATE_TEMPLATE_CREATE_INFO> {
- typedef VkDescriptorUpdateTemplateCreateInfo Type;
-};
-
-// Map type VkPhysicalDeviceExternalImageFormatInfo to id VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_IMAGE_FORMAT_INFO
-template <> struct LvlTypeMap<VkPhysicalDeviceExternalImageFormatInfo> {
- static const VkStructureType kSType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_IMAGE_FORMAT_INFO;
-};
-
-template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_IMAGE_FORMAT_INFO> {
- typedef VkPhysicalDeviceExternalImageFormatInfo Type;
-};
-
-// Map type VkExternalImageFormatProperties to id VK_STRUCTURE_TYPE_EXTERNAL_IMAGE_FORMAT_PROPERTIES
-template <> struct LvlTypeMap<VkExternalImageFormatProperties> {
- static const VkStructureType kSType = VK_STRUCTURE_TYPE_EXTERNAL_IMAGE_FORMAT_PROPERTIES;
-};
-
-template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_EXTERNAL_IMAGE_FORMAT_PROPERTIES> {
- typedef VkExternalImageFormatProperties Type;
-};
-
-// Map type VkPhysicalDeviceExternalBufferInfo to id VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_BUFFER_INFO
-template <> struct LvlTypeMap<VkPhysicalDeviceExternalBufferInfo> {
- static const VkStructureType kSType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_BUFFER_INFO;
-};
-
-template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_BUFFER_INFO> {
- typedef VkPhysicalDeviceExternalBufferInfo Type;
-};
-
-// Map type VkExternalBufferProperties to id VK_STRUCTURE_TYPE_EXTERNAL_BUFFER_PROPERTIES
-template <> struct LvlTypeMap<VkExternalBufferProperties> {
- static const VkStructureType kSType = VK_STRUCTURE_TYPE_EXTERNAL_BUFFER_PROPERTIES;
-};
-
-template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_EXTERNAL_BUFFER_PROPERTIES> {
- typedef VkExternalBufferProperties Type;
-};
-
-// Map type VkPhysicalDeviceIDProperties to id VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_ID_PROPERTIES
-template <> struct LvlTypeMap<VkPhysicalDeviceIDProperties> {
- static const VkStructureType kSType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_ID_PROPERTIES;
-};
-
-template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_ID_PROPERTIES> {
- typedef VkPhysicalDeviceIDProperties Type;
-};
-
-// Map type VkExternalMemoryImageCreateInfo to id VK_STRUCTURE_TYPE_EXTERNAL_MEMORY_IMAGE_CREATE_INFO
-template <> struct LvlTypeMap<VkExternalMemoryImageCreateInfo> {
- static const VkStructureType kSType = VK_STRUCTURE_TYPE_EXTERNAL_MEMORY_IMAGE_CREATE_INFO;
-};
-
-template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_EXTERNAL_MEMORY_IMAGE_CREATE_INFO> {
- typedef VkExternalMemoryImageCreateInfo Type;
-};
-
-// Map type VkExternalMemoryBufferCreateInfo to id VK_STRUCTURE_TYPE_EXTERNAL_MEMORY_BUFFER_CREATE_INFO
-template <> struct LvlTypeMap<VkExternalMemoryBufferCreateInfo> {
- static const VkStructureType kSType = VK_STRUCTURE_TYPE_EXTERNAL_MEMORY_BUFFER_CREATE_INFO;
-};
-
-template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_EXTERNAL_MEMORY_BUFFER_CREATE_INFO> {
- typedef VkExternalMemoryBufferCreateInfo Type;
-};
-
-// Map type VkExportMemoryAllocateInfo to id VK_STRUCTURE_TYPE_EXPORT_MEMORY_ALLOCATE_INFO
-template <> struct LvlTypeMap<VkExportMemoryAllocateInfo> {
- static const VkStructureType kSType = VK_STRUCTURE_TYPE_EXPORT_MEMORY_ALLOCATE_INFO;
-};
-
-template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_EXPORT_MEMORY_ALLOCATE_INFO> {
- typedef VkExportMemoryAllocateInfo Type;
-};
-
-// Map type VkPhysicalDeviceExternalFenceInfo to id VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_FENCE_INFO
-template <> struct LvlTypeMap<VkPhysicalDeviceExternalFenceInfo> {
- static const VkStructureType kSType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_FENCE_INFO;
-};
-
-template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_FENCE_INFO> {
- typedef VkPhysicalDeviceExternalFenceInfo Type;
-};
-
-// Map type VkExternalFenceProperties to id VK_STRUCTURE_TYPE_EXTERNAL_FENCE_PROPERTIES
-template <> struct LvlTypeMap<VkExternalFenceProperties> {
- static const VkStructureType kSType = VK_STRUCTURE_TYPE_EXTERNAL_FENCE_PROPERTIES;
-};
-
-template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_EXTERNAL_FENCE_PROPERTIES> {
- typedef VkExternalFenceProperties Type;
-};
-
-// Map type VkExportFenceCreateInfo to id VK_STRUCTURE_TYPE_EXPORT_FENCE_CREATE_INFO
-template <> struct LvlTypeMap<VkExportFenceCreateInfo> {
- static const VkStructureType kSType = VK_STRUCTURE_TYPE_EXPORT_FENCE_CREATE_INFO;
-};
-
-template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_EXPORT_FENCE_CREATE_INFO> {
- typedef VkExportFenceCreateInfo Type;
-};
-
-// Map type VkExportSemaphoreCreateInfo to id VK_STRUCTURE_TYPE_EXPORT_SEMAPHORE_CREATE_INFO
-template <> struct LvlTypeMap<VkExportSemaphoreCreateInfo> {
- static const VkStructureType kSType = VK_STRUCTURE_TYPE_EXPORT_SEMAPHORE_CREATE_INFO;
-};
-
-template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_EXPORT_SEMAPHORE_CREATE_INFO> {
- typedef VkExportSemaphoreCreateInfo Type;
-};
-
-// Map type VkPhysicalDeviceExternalSemaphoreInfo to id VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_SEMAPHORE_INFO
-template <> struct LvlTypeMap<VkPhysicalDeviceExternalSemaphoreInfo> {
- static const VkStructureType kSType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_SEMAPHORE_INFO;
-};
-
-template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_SEMAPHORE_INFO> {
- typedef VkPhysicalDeviceExternalSemaphoreInfo Type;
-};
-
-// Map type VkExternalSemaphoreProperties to id VK_STRUCTURE_TYPE_EXTERNAL_SEMAPHORE_PROPERTIES
-template <> struct LvlTypeMap<VkExternalSemaphoreProperties> {
- static const VkStructureType kSType = VK_STRUCTURE_TYPE_EXTERNAL_SEMAPHORE_PROPERTIES;
-};
-
-template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_EXTERNAL_SEMAPHORE_PROPERTIES> {
- typedef VkExternalSemaphoreProperties Type;
-};
-
-// Map type VkPhysicalDeviceMaintenance3Properties to id VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MAINTENANCE_3_PROPERTIES
-template <> struct LvlTypeMap<VkPhysicalDeviceMaintenance3Properties> {
- static const VkStructureType kSType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MAINTENANCE_3_PROPERTIES;
-};
-
-template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MAINTENANCE_3_PROPERTIES> {
- typedef VkPhysicalDeviceMaintenance3Properties Type;
-};
-
-// Map type VkDescriptorSetLayoutSupport to id VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_SUPPORT
-template <> struct LvlTypeMap<VkDescriptorSetLayoutSupport> {
- static const VkStructureType kSType = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_SUPPORT;
-};
-
-template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_SUPPORT> {
- typedef VkDescriptorSetLayoutSupport Type;
-};
-
-// Map type VkPhysicalDeviceShaderDrawParametersFeatures to id VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_DRAW_PARAMETERS_FEATURES
-template <> struct LvlTypeMap<VkPhysicalDeviceShaderDrawParametersFeatures> {
- static const VkStructureType kSType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_DRAW_PARAMETERS_FEATURES;
-};
-
-template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_DRAW_PARAMETERS_FEATURES> {
- typedef VkPhysicalDeviceShaderDrawParametersFeatures Type;
-};
-
-// Map type VkSwapchainCreateInfoKHR to id VK_STRUCTURE_TYPE_SWAPCHAIN_CREATE_INFO_KHR
-template <> struct LvlTypeMap<VkSwapchainCreateInfoKHR> {
- static const VkStructureType kSType = VK_STRUCTURE_TYPE_SWAPCHAIN_CREATE_INFO_KHR;
-};
-
-template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_SWAPCHAIN_CREATE_INFO_KHR> {
- typedef VkSwapchainCreateInfoKHR Type;
-};
-
-// Map type VkPresentInfoKHR to id VK_STRUCTURE_TYPE_PRESENT_INFO_KHR
-template <> struct LvlTypeMap<VkPresentInfoKHR> {
- static const VkStructureType kSType = VK_STRUCTURE_TYPE_PRESENT_INFO_KHR;
-};
-
-template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_PRESENT_INFO_KHR> {
- typedef VkPresentInfoKHR Type;
-};
-
-// Map type VkImageSwapchainCreateInfoKHR to id VK_STRUCTURE_TYPE_IMAGE_SWAPCHAIN_CREATE_INFO_KHR
-template <> struct LvlTypeMap<VkImageSwapchainCreateInfoKHR> {
- static const VkStructureType kSType = VK_STRUCTURE_TYPE_IMAGE_SWAPCHAIN_CREATE_INFO_KHR;
-};
-
-template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_IMAGE_SWAPCHAIN_CREATE_INFO_KHR> {
- typedef VkImageSwapchainCreateInfoKHR Type;
-};
-
-// Map type VkBindImageMemorySwapchainInfoKHR to id VK_STRUCTURE_TYPE_BIND_IMAGE_MEMORY_SWAPCHAIN_INFO_KHR
-template <> struct LvlTypeMap<VkBindImageMemorySwapchainInfoKHR> {
- static const VkStructureType kSType = VK_STRUCTURE_TYPE_BIND_IMAGE_MEMORY_SWAPCHAIN_INFO_KHR;
-};
-
-template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_BIND_IMAGE_MEMORY_SWAPCHAIN_INFO_KHR> {
- typedef VkBindImageMemorySwapchainInfoKHR Type;
-};
-
-// Map type VkAcquireNextImageInfoKHR to id VK_STRUCTURE_TYPE_ACQUIRE_NEXT_IMAGE_INFO_KHR
-template <> struct LvlTypeMap<VkAcquireNextImageInfoKHR> {
- static const VkStructureType kSType = VK_STRUCTURE_TYPE_ACQUIRE_NEXT_IMAGE_INFO_KHR;
-};
-
-template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_ACQUIRE_NEXT_IMAGE_INFO_KHR> {
- typedef VkAcquireNextImageInfoKHR Type;
-};
-
-// Map type VkDeviceGroupPresentCapabilitiesKHR to id VK_STRUCTURE_TYPE_DEVICE_GROUP_PRESENT_CAPABILITIES_KHR
-template <> struct LvlTypeMap<VkDeviceGroupPresentCapabilitiesKHR> {
- static const VkStructureType kSType = VK_STRUCTURE_TYPE_DEVICE_GROUP_PRESENT_CAPABILITIES_KHR;
-};
-
-template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_DEVICE_GROUP_PRESENT_CAPABILITIES_KHR> {
- typedef VkDeviceGroupPresentCapabilitiesKHR Type;
-};
-
-// Map type VkDeviceGroupPresentInfoKHR to id VK_STRUCTURE_TYPE_DEVICE_GROUP_PRESENT_INFO_KHR
-template <> struct LvlTypeMap<VkDeviceGroupPresentInfoKHR> {
- static const VkStructureType kSType = VK_STRUCTURE_TYPE_DEVICE_GROUP_PRESENT_INFO_KHR;
-};
-
-template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_DEVICE_GROUP_PRESENT_INFO_KHR> {
- typedef VkDeviceGroupPresentInfoKHR Type;
-};
-
-// Map type VkDeviceGroupSwapchainCreateInfoKHR to id VK_STRUCTURE_TYPE_DEVICE_GROUP_SWAPCHAIN_CREATE_INFO_KHR
-template <> struct LvlTypeMap<VkDeviceGroupSwapchainCreateInfoKHR> {
- static const VkStructureType kSType = VK_STRUCTURE_TYPE_DEVICE_GROUP_SWAPCHAIN_CREATE_INFO_KHR;
-};
-
-template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_DEVICE_GROUP_SWAPCHAIN_CREATE_INFO_KHR> {
- typedef VkDeviceGroupSwapchainCreateInfoKHR Type;
-};
-
-// Map type VkDisplayModeCreateInfoKHR to id VK_STRUCTURE_TYPE_DISPLAY_MODE_CREATE_INFO_KHR
-template <> struct LvlTypeMap<VkDisplayModeCreateInfoKHR> {
- static const VkStructureType kSType = VK_STRUCTURE_TYPE_DISPLAY_MODE_CREATE_INFO_KHR;
-};
-
-template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_DISPLAY_MODE_CREATE_INFO_KHR> {
- typedef VkDisplayModeCreateInfoKHR Type;
-};
-
-// Map type VkDisplaySurfaceCreateInfoKHR to id VK_STRUCTURE_TYPE_DISPLAY_SURFACE_CREATE_INFO_KHR
-template <> struct LvlTypeMap<VkDisplaySurfaceCreateInfoKHR> {
- static const VkStructureType kSType = VK_STRUCTURE_TYPE_DISPLAY_SURFACE_CREATE_INFO_KHR;
-};
-
-template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_DISPLAY_SURFACE_CREATE_INFO_KHR> {
- typedef VkDisplaySurfaceCreateInfoKHR Type;
-};
-
-// Map type VkDisplayPresentInfoKHR to id VK_STRUCTURE_TYPE_DISPLAY_PRESENT_INFO_KHR
-template <> struct LvlTypeMap<VkDisplayPresentInfoKHR> {
- static const VkStructureType kSType = VK_STRUCTURE_TYPE_DISPLAY_PRESENT_INFO_KHR;
-};
-
-template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_DISPLAY_PRESENT_INFO_KHR> {
- typedef VkDisplayPresentInfoKHR Type;
-};
-
-#ifdef VK_USE_PLATFORM_XLIB_KHR
-// Map type VkXlibSurfaceCreateInfoKHR to id VK_STRUCTURE_TYPE_XLIB_SURFACE_CREATE_INFO_KHR
-template <> struct LvlTypeMap<VkXlibSurfaceCreateInfoKHR> {
- static const VkStructureType kSType = VK_STRUCTURE_TYPE_XLIB_SURFACE_CREATE_INFO_KHR;
-};
-
-template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_XLIB_SURFACE_CREATE_INFO_KHR> {
- typedef VkXlibSurfaceCreateInfoKHR Type;
-};
-
-#endif // VK_USE_PLATFORM_XLIB_KHR
-#ifdef VK_USE_PLATFORM_XCB_KHR
-// Map type VkXcbSurfaceCreateInfoKHR to id VK_STRUCTURE_TYPE_XCB_SURFACE_CREATE_INFO_KHR
-template <> struct LvlTypeMap<VkXcbSurfaceCreateInfoKHR> {
- static const VkStructureType kSType = VK_STRUCTURE_TYPE_XCB_SURFACE_CREATE_INFO_KHR;
-};
-
-template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_XCB_SURFACE_CREATE_INFO_KHR> {
- typedef VkXcbSurfaceCreateInfoKHR Type;
-};
-
-#endif // VK_USE_PLATFORM_XCB_KHR
-#ifdef VK_USE_PLATFORM_WAYLAND_KHR
-// Map type VkWaylandSurfaceCreateInfoKHR to id VK_STRUCTURE_TYPE_WAYLAND_SURFACE_CREATE_INFO_KHR
-template <> struct LvlTypeMap<VkWaylandSurfaceCreateInfoKHR> {
- static const VkStructureType kSType = VK_STRUCTURE_TYPE_WAYLAND_SURFACE_CREATE_INFO_KHR;
-};
-
-template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_WAYLAND_SURFACE_CREATE_INFO_KHR> {
- typedef VkWaylandSurfaceCreateInfoKHR Type;
-};
-
-#endif // VK_USE_PLATFORM_WAYLAND_KHR
-#ifdef VK_USE_PLATFORM_ANDROID_KHR
-// Map type VkAndroidSurfaceCreateInfoKHR to id VK_STRUCTURE_TYPE_ANDROID_SURFACE_CREATE_INFO_KHR
-template <> struct LvlTypeMap<VkAndroidSurfaceCreateInfoKHR> {
- static const VkStructureType kSType = VK_STRUCTURE_TYPE_ANDROID_SURFACE_CREATE_INFO_KHR;
-};
-
-template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_ANDROID_SURFACE_CREATE_INFO_KHR> {
- typedef VkAndroidSurfaceCreateInfoKHR Type;
-};
-
-#endif // VK_USE_PLATFORM_ANDROID_KHR
-#ifdef VK_USE_PLATFORM_WIN32_KHR
-// Map type VkWin32SurfaceCreateInfoKHR to id VK_STRUCTURE_TYPE_WIN32_SURFACE_CREATE_INFO_KHR
-template <> struct LvlTypeMap<VkWin32SurfaceCreateInfoKHR> {
- static const VkStructureType kSType = VK_STRUCTURE_TYPE_WIN32_SURFACE_CREATE_INFO_KHR;
-};
-
-template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_WIN32_SURFACE_CREATE_INFO_KHR> {
- typedef VkWin32SurfaceCreateInfoKHR Type;
-};
-
-#endif // VK_USE_PLATFORM_WIN32_KHR
-#ifdef VK_USE_PLATFORM_WIN32_KHR
-// Map type VkImportMemoryWin32HandleInfoKHR to id VK_STRUCTURE_TYPE_IMPORT_MEMORY_WIN32_HANDLE_INFO_KHR
-template <> struct LvlTypeMap<VkImportMemoryWin32HandleInfoKHR> {
- static const VkStructureType kSType = VK_STRUCTURE_TYPE_IMPORT_MEMORY_WIN32_HANDLE_INFO_KHR;
-};
-
-template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_IMPORT_MEMORY_WIN32_HANDLE_INFO_KHR> {
- typedef VkImportMemoryWin32HandleInfoKHR Type;
-};
-
-#endif // VK_USE_PLATFORM_WIN32_KHR
-#ifdef VK_USE_PLATFORM_WIN32_KHR
-// Map type VkExportMemoryWin32HandleInfoKHR to id VK_STRUCTURE_TYPE_EXPORT_MEMORY_WIN32_HANDLE_INFO_KHR
-template <> struct LvlTypeMap<VkExportMemoryWin32HandleInfoKHR> {
- static const VkStructureType kSType = VK_STRUCTURE_TYPE_EXPORT_MEMORY_WIN32_HANDLE_INFO_KHR;
-};
-
-template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_EXPORT_MEMORY_WIN32_HANDLE_INFO_KHR> {
- typedef VkExportMemoryWin32HandleInfoKHR Type;
-};
-
-#endif // VK_USE_PLATFORM_WIN32_KHR
-#ifdef VK_USE_PLATFORM_WIN32_KHR
-// Map type VkMemoryWin32HandlePropertiesKHR to id VK_STRUCTURE_TYPE_MEMORY_WIN32_HANDLE_PROPERTIES_KHR
-template <> struct LvlTypeMap<VkMemoryWin32HandlePropertiesKHR> {
- static const VkStructureType kSType = VK_STRUCTURE_TYPE_MEMORY_WIN32_HANDLE_PROPERTIES_KHR;
-};
-
-template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_MEMORY_WIN32_HANDLE_PROPERTIES_KHR> {
- typedef VkMemoryWin32HandlePropertiesKHR Type;
-};
-
-#endif // VK_USE_PLATFORM_WIN32_KHR
-#ifdef VK_USE_PLATFORM_WIN32_KHR
-// Map type VkMemoryGetWin32HandleInfoKHR to id VK_STRUCTURE_TYPE_MEMORY_GET_WIN32_HANDLE_INFO_KHR
-template <> struct LvlTypeMap<VkMemoryGetWin32HandleInfoKHR> {
- static const VkStructureType kSType = VK_STRUCTURE_TYPE_MEMORY_GET_WIN32_HANDLE_INFO_KHR;
-};
-
-template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_MEMORY_GET_WIN32_HANDLE_INFO_KHR> {
- typedef VkMemoryGetWin32HandleInfoKHR Type;
-};
-
-#endif // VK_USE_PLATFORM_WIN32_KHR
-// Map type VkImportMemoryFdInfoKHR to id VK_STRUCTURE_TYPE_IMPORT_MEMORY_FD_INFO_KHR
-template <> struct LvlTypeMap<VkImportMemoryFdInfoKHR> {
- static const VkStructureType kSType = VK_STRUCTURE_TYPE_IMPORT_MEMORY_FD_INFO_KHR;
-};
-
-template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_IMPORT_MEMORY_FD_INFO_KHR> {
- typedef VkImportMemoryFdInfoKHR Type;
-};
-
-// Map type VkMemoryFdPropertiesKHR to id VK_STRUCTURE_TYPE_MEMORY_FD_PROPERTIES_KHR
-template <> struct LvlTypeMap<VkMemoryFdPropertiesKHR> {
- static const VkStructureType kSType = VK_STRUCTURE_TYPE_MEMORY_FD_PROPERTIES_KHR;
-};
-
-template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_MEMORY_FD_PROPERTIES_KHR> {
- typedef VkMemoryFdPropertiesKHR Type;
-};
-
-// Map type VkMemoryGetFdInfoKHR to id VK_STRUCTURE_TYPE_MEMORY_GET_FD_INFO_KHR
-template <> struct LvlTypeMap<VkMemoryGetFdInfoKHR> {
- static const VkStructureType kSType = VK_STRUCTURE_TYPE_MEMORY_GET_FD_INFO_KHR;
-};
-
-template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_MEMORY_GET_FD_INFO_KHR> {
- typedef VkMemoryGetFdInfoKHR Type;
-};
-
-#ifdef VK_USE_PLATFORM_WIN32_KHR
-// Map type VkWin32KeyedMutexAcquireReleaseInfoKHR to id VK_STRUCTURE_TYPE_WIN32_KEYED_MUTEX_ACQUIRE_RELEASE_INFO_KHR
-template <> struct LvlTypeMap<VkWin32KeyedMutexAcquireReleaseInfoKHR> {
- static const VkStructureType kSType = VK_STRUCTURE_TYPE_WIN32_KEYED_MUTEX_ACQUIRE_RELEASE_INFO_KHR;
-};
-
-template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_WIN32_KEYED_MUTEX_ACQUIRE_RELEASE_INFO_KHR> {
- typedef VkWin32KeyedMutexAcquireReleaseInfoKHR Type;
-};
-
-#endif // VK_USE_PLATFORM_WIN32_KHR
-#ifdef VK_USE_PLATFORM_WIN32_KHR
-// Map type VkImportSemaphoreWin32HandleInfoKHR to id VK_STRUCTURE_TYPE_IMPORT_SEMAPHORE_WIN32_HANDLE_INFO_KHR
-template <> struct LvlTypeMap<VkImportSemaphoreWin32HandleInfoKHR> {
- static const VkStructureType kSType = VK_STRUCTURE_TYPE_IMPORT_SEMAPHORE_WIN32_HANDLE_INFO_KHR;
-};
-
-template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_IMPORT_SEMAPHORE_WIN32_HANDLE_INFO_KHR> {
- typedef VkImportSemaphoreWin32HandleInfoKHR Type;
-};
-
-#endif // VK_USE_PLATFORM_WIN32_KHR
-#ifdef VK_USE_PLATFORM_WIN32_KHR
-// Map type VkExportSemaphoreWin32HandleInfoKHR to id VK_STRUCTURE_TYPE_EXPORT_SEMAPHORE_WIN32_HANDLE_INFO_KHR
-template <> struct LvlTypeMap<VkExportSemaphoreWin32HandleInfoKHR> {
- static const VkStructureType kSType = VK_STRUCTURE_TYPE_EXPORT_SEMAPHORE_WIN32_HANDLE_INFO_KHR;
-};
-
-template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_EXPORT_SEMAPHORE_WIN32_HANDLE_INFO_KHR> {
- typedef VkExportSemaphoreWin32HandleInfoKHR Type;
-};
-
-#endif // VK_USE_PLATFORM_WIN32_KHR
-#ifdef VK_USE_PLATFORM_WIN32_KHR
-// Map type VkD3D12FenceSubmitInfoKHR to id VK_STRUCTURE_TYPE_D3D12_FENCE_SUBMIT_INFO_KHR
-template <> struct LvlTypeMap<VkD3D12FenceSubmitInfoKHR> {
- static const VkStructureType kSType = VK_STRUCTURE_TYPE_D3D12_FENCE_SUBMIT_INFO_KHR;
-};
-
-template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_D3D12_FENCE_SUBMIT_INFO_KHR> {
- typedef VkD3D12FenceSubmitInfoKHR Type;
-};
-
-#endif // VK_USE_PLATFORM_WIN32_KHR
-#ifdef VK_USE_PLATFORM_WIN32_KHR
-// Map type VkSemaphoreGetWin32HandleInfoKHR to id VK_STRUCTURE_TYPE_SEMAPHORE_GET_WIN32_HANDLE_INFO_KHR
-template <> struct LvlTypeMap<VkSemaphoreGetWin32HandleInfoKHR> {
- static const VkStructureType kSType = VK_STRUCTURE_TYPE_SEMAPHORE_GET_WIN32_HANDLE_INFO_KHR;
-};
-
-template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_SEMAPHORE_GET_WIN32_HANDLE_INFO_KHR> {
- typedef VkSemaphoreGetWin32HandleInfoKHR Type;
-};
-
-#endif // VK_USE_PLATFORM_WIN32_KHR
-// Map type VkImportSemaphoreFdInfoKHR to id VK_STRUCTURE_TYPE_IMPORT_SEMAPHORE_FD_INFO_KHR
-template <> struct LvlTypeMap<VkImportSemaphoreFdInfoKHR> {
- static const VkStructureType kSType = VK_STRUCTURE_TYPE_IMPORT_SEMAPHORE_FD_INFO_KHR;
-};
-
-template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_IMPORT_SEMAPHORE_FD_INFO_KHR> {
- typedef VkImportSemaphoreFdInfoKHR Type;
-};
-
-// Map type VkSemaphoreGetFdInfoKHR to id VK_STRUCTURE_TYPE_SEMAPHORE_GET_FD_INFO_KHR
-template <> struct LvlTypeMap<VkSemaphoreGetFdInfoKHR> {
- static const VkStructureType kSType = VK_STRUCTURE_TYPE_SEMAPHORE_GET_FD_INFO_KHR;
-};
-
-template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_SEMAPHORE_GET_FD_INFO_KHR> {
- typedef VkSemaphoreGetFdInfoKHR Type;
-};
-
-// Map type VkPhysicalDevicePushDescriptorPropertiesKHR to id VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PUSH_DESCRIPTOR_PROPERTIES_KHR
-template <> struct LvlTypeMap<VkPhysicalDevicePushDescriptorPropertiesKHR> {
- static const VkStructureType kSType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PUSH_DESCRIPTOR_PROPERTIES_KHR;
-};
-
-template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PUSH_DESCRIPTOR_PROPERTIES_KHR> {
- typedef VkPhysicalDevicePushDescriptorPropertiesKHR Type;
-};
-
-// Map type VkPhysicalDeviceShaderFloat16Int8FeaturesKHR to id VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_FLOAT16_INT8_FEATURES_KHR
-template <> struct LvlTypeMap<VkPhysicalDeviceShaderFloat16Int8FeaturesKHR> {
- static const VkStructureType kSType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_FLOAT16_INT8_FEATURES_KHR;
-};
-
-template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_FLOAT16_INT8_FEATURES_KHR> {
- typedef VkPhysicalDeviceShaderFloat16Int8FeaturesKHR Type;
-};
-
-// Map type VkPresentRegionsKHR to id VK_STRUCTURE_TYPE_PRESENT_REGIONS_KHR
-template <> struct LvlTypeMap<VkPresentRegionsKHR> {
- static const VkStructureType kSType = VK_STRUCTURE_TYPE_PRESENT_REGIONS_KHR;
-};
-
-template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_PRESENT_REGIONS_KHR> {
- typedef VkPresentRegionsKHR Type;
-};
-
-// Map type VkPhysicalDeviceImagelessFramebufferFeaturesKHR to id VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGELESS_FRAMEBUFFER_FEATURES_KHR
-template <> struct LvlTypeMap<VkPhysicalDeviceImagelessFramebufferFeaturesKHR> {
- static const VkStructureType kSType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGELESS_FRAMEBUFFER_FEATURES_KHR;
-};
-
-template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGELESS_FRAMEBUFFER_FEATURES_KHR> {
- typedef VkPhysicalDeviceImagelessFramebufferFeaturesKHR Type;
-};
-
-// Map type VkFramebufferAttachmentImageInfoKHR to id VK_STRUCTURE_TYPE_FRAMEBUFFER_ATTACHMENT_IMAGE_INFO_KHR
-template <> struct LvlTypeMap<VkFramebufferAttachmentImageInfoKHR> {
- static const VkStructureType kSType = VK_STRUCTURE_TYPE_FRAMEBUFFER_ATTACHMENT_IMAGE_INFO_KHR;
-};
-
-template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_FRAMEBUFFER_ATTACHMENT_IMAGE_INFO_KHR> {
- typedef VkFramebufferAttachmentImageInfoKHR Type;
-};
-
-// Map type VkFramebufferAttachmentsCreateInfoKHR to id VK_STRUCTURE_TYPE_FRAMEBUFFER_ATTACHMENTS_CREATE_INFO_KHR
-template <> struct LvlTypeMap<VkFramebufferAttachmentsCreateInfoKHR> {
- static const VkStructureType kSType = VK_STRUCTURE_TYPE_FRAMEBUFFER_ATTACHMENTS_CREATE_INFO_KHR;
-};
-
-template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_FRAMEBUFFER_ATTACHMENTS_CREATE_INFO_KHR> {
- typedef VkFramebufferAttachmentsCreateInfoKHR Type;
-};
-
-// Map type VkRenderPassAttachmentBeginInfoKHR to id VK_STRUCTURE_TYPE_RENDER_PASS_ATTACHMENT_BEGIN_INFO_KHR
-template <> struct LvlTypeMap<VkRenderPassAttachmentBeginInfoKHR> {
- static const VkStructureType kSType = VK_STRUCTURE_TYPE_RENDER_PASS_ATTACHMENT_BEGIN_INFO_KHR;
-};
-
-template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_RENDER_PASS_ATTACHMENT_BEGIN_INFO_KHR> {
- typedef VkRenderPassAttachmentBeginInfoKHR Type;
-};
-
-// Map type VkAttachmentDescription2KHR to id VK_STRUCTURE_TYPE_ATTACHMENT_DESCRIPTION_2_KHR
-template <> struct LvlTypeMap<VkAttachmentDescription2KHR> {
- static const VkStructureType kSType = VK_STRUCTURE_TYPE_ATTACHMENT_DESCRIPTION_2_KHR;
-};
-
-template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_ATTACHMENT_DESCRIPTION_2_KHR> {
- typedef VkAttachmentDescription2KHR Type;
-};
-
-// Map type VkAttachmentReference2KHR to id VK_STRUCTURE_TYPE_ATTACHMENT_REFERENCE_2_KHR
-template <> struct LvlTypeMap<VkAttachmentReference2KHR> {
- static const VkStructureType kSType = VK_STRUCTURE_TYPE_ATTACHMENT_REFERENCE_2_KHR;
-};
-
-template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_ATTACHMENT_REFERENCE_2_KHR> {
- typedef VkAttachmentReference2KHR Type;
-};
-
-// Map type VkSubpassDescription2KHR to id VK_STRUCTURE_TYPE_SUBPASS_DESCRIPTION_2_KHR
-template <> struct LvlTypeMap<VkSubpassDescription2KHR> {
- static const VkStructureType kSType = VK_STRUCTURE_TYPE_SUBPASS_DESCRIPTION_2_KHR;
-};
-
-template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_SUBPASS_DESCRIPTION_2_KHR> {
- typedef VkSubpassDescription2KHR Type;
-};
-
-// Map type VkSubpassDependency2KHR to id VK_STRUCTURE_TYPE_SUBPASS_DEPENDENCY_2_KHR
-template <> struct LvlTypeMap<VkSubpassDependency2KHR> {
- static const VkStructureType kSType = VK_STRUCTURE_TYPE_SUBPASS_DEPENDENCY_2_KHR;
-};
-
-template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_SUBPASS_DEPENDENCY_2_KHR> {
- typedef VkSubpassDependency2KHR Type;
-};
-
-// Map type VkRenderPassCreateInfo2KHR to id VK_STRUCTURE_TYPE_RENDER_PASS_CREATE_INFO_2_KHR
-template <> struct LvlTypeMap<VkRenderPassCreateInfo2KHR> {
- static const VkStructureType kSType = VK_STRUCTURE_TYPE_RENDER_PASS_CREATE_INFO_2_KHR;
-};
-
-template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_RENDER_PASS_CREATE_INFO_2_KHR> {
- typedef VkRenderPassCreateInfo2KHR Type;
-};
-
-// Map type VkSubpassBeginInfoKHR to id VK_STRUCTURE_TYPE_SUBPASS_BEGIN_INFO_KHR
-template <> struct LvlTypeMap<VkSubpassBeginInfoKHR> {
- static const VkStructureType kSType = VK_STRUCTURE_TYPE_SUBPASS_BEGIN_INFO_KHR;
-};
-
-template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_SUBPASS_BEGIN_INFO_KHR> {
- typedef VkSubpassBeginInfoKHR Type;
-};
-
-// Map type VkSubpassEndInfoKHR to id VK_STRUCTURE_TYPE_SUBPASS_END_INFO_KHR
-template <> struct LvlTypeMap<VkSubpassEndInfoKHR> {
- static const VkStructureType kSType = VK_STRUCTURE_TYPE_SUBPASS_END_INFO_KHR;
-};
-
-template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_SUBPASS_END_INFO_KHR> {
- typedef VkSubpassEndInfoKHR Type;
-};
-
-// Map type VkSharedPresentSurfaceCapabilitiesKHR to id VK_STRUCTURE_TYPE_SHARED_PRESENT_SURFACE_CAPABILITIES_KHR
-template <> struct LvlTypeMap<VkSharedPresentSurfaceCapabilitiesKHR> {
- static const VkStructureType kSType = VK_STRUCTURE_TYPE_SHARED_PRESENT_SURFACE_CAPABILITIES_KHR;
-};
-
-template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_SHARED_PRESENT_SURFACE_CAPABILITIES_KHR> {
- typedef VkSharedPresentSurfaceCapabilitiesKHR Type;
-};
-
-#ifdef VK_USE_PLATFORM_WIN32_KHR
-// Map type VkImportFenceWin32HandleInfoKHR to id VK_STRUCTURE_TYPE_IMPORT_FENCE_WIN32_HANDLE_INFO_KHR
-template <> struct LvlTypeMap<VkImportFenceWin32HandleInfoKHR> {
- static const VkStructureType kSType = VK_STRUCTURE_TYPE_IMPORT_FENCE_WIN32_HANDLE_INFO_KHR;
-};
-
-template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_IMPORT_FENCE_WIN32_HANDLE_INFO_KHR> {
- typedef VkImportFenceWin32HandleInfoKHR Type;
-};
-
-#endif // VK_USE_PLATFORM_WIN32_KHR
-#ifdef VK_USE_PLATFORM_WIN32_KHR
-// Map type VkExportFenceWin32HandleInfoKHR to id VK_STRUCTURE_TYPE_EXPORT_FENCE_WIN32_HANDLE_INFO_KHR
-template <> struct LvlTypeMap<VkExportFenceWin32HandleInfoKHR> {
- static const VkStructureType kSType = VK_STRUCTURE_TYPE_EXPORT_FENCE_WIN32_HANDLE_INFO_KHR;
-};
-
-template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_EXPORT_FENCE_WIN32_HANDLE_INFO_KHR> {
- typedef VkExportFenceWin32HandleInfoKHR Type;
-};
-
-#endif // VK_USE_PLATFORM_WIN32_KHR
-#ifdef VK_USE_PLATFORM_WIN32_KHR
-// Map type VkFenceGetWin32HandleInfoKHR to id VK_STRUCTURE_TYPE_FENCE_GET_WIN32_HANDLE_INFO_KHR
-template <> struct LvlTypeMap<VkFenceGetWin32HandleInfoKHR> {
- static const VkStructureType kSType = VK_STRUCTURE_TYPE_FENCE_GET_WIN32_HANDLE_INFO_KHR;
-};
-
-template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_FENCE_GET_WIN32_HANDLE_INFO_KHR> {
- typedef VkFenceGetWin32HandleInfoKHR Type;
-};
-
-#endif // VK_USE_PLATFORM_WIN32_KHR
-// Map type VkImportFenceFdInfoKHR to id VK_STRUCTURE_TYPE_IMPORT_FENCE_FD_INFO_KHR
-template <> struct LvlTypeMap<VkImportFenceFdInfoKHR> {
- static const VkStructureType kSType = VK_STRUCTURE_TYPE_IMPORT_FENCE_FD_INFO_KHR;
-};
-
-template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_IMPORT_FENCE_FD_INFO_KHR> {
- typedef VkImportFenceFdInfoKHR Type;
-};
-
-// Map type VkFenceGetFdInfoKHR to id VK_STRUCTURE_TYPE_FENCE_GET_FD_INFO_KHR
-template <> struct LvlTypeMap<VkFenceGetFdInfoKHR> {
- static const VkStructureType kSType = VK_STRUCTURE_TYPE_FENCE_GET_FD_INFO_KHR;
-};
-
-template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_FENCE_GET_FD_INFO_KHR> {
- typedef VkFenceGetFdInfoKHR Type;
-};
-
-// Map type VkPhysicalDeviceSurfaceInfo2KHR to id VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SURFACE_INFO_2_KHR
-template <> struct LvlTypeMap<VkPhysicalDeviceSurfaceInfo2KHR> {
- static const VkStructureType kSType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SURFACE_INFO_2_KHR;
-};
-
-template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SURFACE_INFO_2_KHR> {
- typedef VkPhysicalDeviceSurfaceInfo2KHR Type;
-};
-
-// Map type VkSurfaceCapabilities2KHR to id VK_STRUCTURE_TYPE_SURFACE_CAPABILITIES_2_KHR
-template <> struct LvlTypeMap<VkSurfaceCapabilities2KHR> {
- static const VkStructureType kSType = VK_STRUCTURE_TYPE_SURFACE_CAPABILITIES_2_KHR;
-};
-
-template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_SURFACE_CAPABILITIES_2_KHR> {
- typedef VkSurfaceCapabilities2KHR Type;
-};
-
-// Map type VkSurfaceFormat2KHR to id VK_STRUCTURE_TYPE_SURFACE_FORMAT_2_KHR
-template <> struct LvlTypeMap<VkSurfaceFormat2KHR> {
- static const VkStructureType kSType = VK_STRUCTURE_TYPE_SURFACE_FORMAT_2_KHR;
-};
-
-template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_SURFACE_FORMAT_2_KHR> {
- typedef VkSurfaceFormat2KHR Type;
-};
-
-// Map type VkDisplayProperties2KHR to id VK_STRUCTURE_TYPE_DISPLAY_PROPERTIES_2_KHR
-template <> struct LvlTypeMap<VkDisplayProperties2KHR> {
- static const VkStructureType kSType = VK_STRUCTURE_TYPE_DISPLAY_PROPERTIES_2_KHR;
-};
-
-template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_DISPLAY_PROPERTIES_2_KHR> {
- typedef VkDisplayProperties2KHR Type;
-};
-
-// Map type VkDisplayPlaneProperties2KHR to id VK_STRUCTURE_TYPE_DISPLAY_PLANE_PROPERTIES_2_KHR
-template <> struct LvlTypeMap<VkDisplayPlaneProperties2KHR> {
- static const VkStructureType kSType = VK_STRUCTURE_TYPE_DISPLAY_PLANE_PROPERTIES_2_KHR;
-};
-
-template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_DISPLAY_PLANE_PROPERTIES_2_KHR> {
- typedef VkDisplayPlaneProperties2KHR Type;
-};
-
-// Map type VkDisplayModeProperties2KHR to id VK_STRUCTURE_TYPE_DISPLAY_MODE_PROPERTIES_2_KHR
-template <> struct LvlTypeMap<VkDisplayModeProperties2KHR> {
- static const VkStructureType kSType = VK_STRUCTURE_TYPE_DISPLAY_MODE_PROPERTIES_2_KHR;
-};
-
-template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_DISPLAY_MODE_PROPERTIES_2_KHR> {
- typedef VkDisplayModeProperties2KHR Type;
-};
-
-// Map type VkDisplayPlaneInfo2KHR to id VK_STRUCTURE_TYPE_DISPLAY_PLANE_INFO_2_KHR
-template <> struct LvlTypeMap<VkDisplayPlaneInfo2KHR> {
- static const VkStructureType kSType = VK_STRUCTURE_TYPE_DISPLAY_PLANE_INFO_2_KHR;
-};
-
-template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_DISPLAY_PLANE_INFO_2_KHR> {
- typedef VkDisplayPlaneInfo2KHR Type;
-};
-
-// Map type VkDisplayPlaneCapabilities2KHR to id VK_STRUCTURE_TYPE_DISPLAY_PLANE_CAPABILITIES_2_KHR
-template <> struct LvlTypeMap<VkDisplayPlaneCapabilities2KHR> {
- static const VkStructureType kSType = VK_STRUCTURE_TYPE_DISPLAY_PLANE_CAPABILITIES_2_KHR;
-};
-
-template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_DISPLAY_PLANE_CAPABILITIES_2_KHR> {
- typedef VkDisplayPlaneCapabilities2KHR Type;
-};
-
-// Map type VkImageFormatListCreateInfoKHR to id VK_STRUCTURE_TYPE_IMAGE_FORMAT_LIST_CREATE_INFO_KHR
-template <> struct LvlTypeMap<VkImageFormatListCreateInfoKHR> {
- static const VkStructureType kSType = VK_STRUCTURE_TYPE_IMAGE_FORMAT_LIST_CREATE_INFO_KHR;
-};
-
-template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_IMAGE_FORMAT_LIST_CREATE_INFO_KHR> {
- typedef VkImageFormatListCreateInfoKHR Type;
-};
-
-// Map type VkPhysicalDevice8BitStorageFeaturesKHR to id VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_8BIT_STORAGE_FEATURES_KHR
-template <> struct LvlTypeMap<VkPhysicalDevice8BitStorageFeaturesKHR> {
- static const VkStructureType kSType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_8BIT_STORAGE_FEATURES_KHR;
-};
-
-template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_8BIT_STORAGE_FEATURES_KHR> {
- typedef VkPhysicalDevice8BitStorageFeaturesKHR Type;
-};
-
-// Map type VkPhysicalDeviceShaderAtomicInt64FeaturesKHR to id VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_ATOMIC_INT64_FEATURES_KHR
-template <> struct LvlTypeMap<VkPhysicalDeviceShaderAtomicInt64FeaturesKHR> {
- static const VkStructureType kSType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_ATOMIC_INT64_FEATURES_KHR;
-};
-
-template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_ATOMIC_INT64_FEATURES_KHR> {
- typedef VkPhysicalDeviceShaderAtomicInt64FeaturesKHR Type;
-};
-
-// Map type VkPhysicalDeviceDriverPropertiesKHR to id VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DRIVER_PROPERTIES_KHR
-template <> struct LvlTypeMap<VkPhysicalDeviceDriverPropertiesKHR> {
- static const VkStructureType kSType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DRIVER_PROPERTIES_KHR;
-};
-
-template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DRIVER_PROPERTIES_KHR> {
- typedef VkPhysicalDeviceDriverPropertiesKHR Type;
-};
-
-// Map type VkPhysicalDeviceFloatControlsPropertiesKHR to id VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FLOAT_CONTROLS_PROPERTIES_KHR
-template <> struct LvlTypeMap<VkPhysicalDeviceFloatControlsPropertiesKHR> {
- static const VkStructureType kSType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FLOAT_CONTROLS_PROPERTIES_KHR;
-};
-
-template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FLOAT_CONTROLS_PROPERTIES_KHR> {
- typedef VkPhysicalDeviceFloatControlsPropertiesKHR Type;
-};
-
-// Map type VkSubpassDescriptionDepthStencilResolveKHR to id VK_STRUCTURE_TYPE_SUBPASS_DESCRIPTION_DEPTH_STENCIL_RESOLVE_KHR
-template <> struct LvlTypeMap<VkSubpassDescriptionDepthStencilResolveKHR> {
- static const VkStructureType kSType = VK_STRUCTURE_TYPE_SUBPASS_DESCRIPTION_DEPTH_STENCIL_RESOLVE_KHR;
-};
-
-template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_SUBPASS_DESCRIPTION_DEPTH_STENCIL_RESOLVE_KHR> {
- typedef VkSubpassDescriptionDepthStencilResolveKHR Type;
-};
-
-// Map type VkPhysicalDeviceDepthStencilResolvePropertiesKHR to id VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DEPTH_STENCIL_RESOLVE_PROPERTIES_KHR
-template <> struct LvlTypeMap<VkPhysicalDeviceDepthStencilResolvePropertiesKHR> {
- static const VkStructureType kSType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DEPTH_STENCIL_RESOLVE_PROPERTIES_KHR;
-};
-
-template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DEPTH_STENCIL_RESOLVE_PROPERTIES_KHR> {
- typedef VkPhysicalDeviceDepthStencilResolvePropertiesKHR Type;
-};
-
-// Map type VkPhysicalDeviceVulkanMemoryModelFeaturesKHR to id VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VULKAN_MEMORY_MODEL_FEATURES_KHR
-template <> struct LvlTypeMap<VkPhysicalDeviceVulkanMemoryModelFeaturesKHR> {
- static const VkStructureType kSType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VULKAN_MEMORY_MODEL_FEATURES_KHR;
-};
-
-template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VULKAN_MEMORY_MODEL_FEATURES_KHR> {
- typedef VkPhysicalDeviceVulkanMemoryModelFeaturesKHR Type;
-};
-
-// Map type VkSurfaceProtectedCapabilitiesKHR to id VK_STRUCTURE_TYPE_SURFACE_PROTECTED_CAPABILITIES_KHR
-template <> struct LvlTypeMap<VkSurfaceProtectedCapabilitiesKHR> {
- static const VkStructureType kSType = VK_STRUCTURE_TYPE_SURFACE_PROTECTED_CAPABILITIES_KHR;
-};
-
-template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_SURFACE_PROTECTED_CAPABILITIES_KHR> {
- typedef VkSurfaceProtectedCapabilitiesKHR Type;
-};
-
-// Map type VkPhysicalDeviceUniformBufferStandardLayoutFeaturesKHR to id VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_UNIFORM_BUFFER_STANDARD_LAYOUT_FEATURES_KHR
-template <> struct LvlTypeMap<VkPhysicalDeviceUniformBufferStandardLayoutFeaturesKHR> {
- static const VkStructureType kSType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_UNIFORM_BUFFER_STANDARD_LAYOUT_FEATURES_KHR;
-};
-
-template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_UNIFORM_BUFFER_STANDARD_LAYOUT_FEATURES_KHR> {
- typedef VkPhysicalDeviceUniformBufferStandardLayoutFeaturesKHR Type;
-};
-
-// Map type VkPhysicalDevicePipelineExecutablePropertiesFeaturesKHR to id VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PIPELINE_EXECUTABLE_PROPERTIES_FEATURES_KHR
-template <> struct LvlTypeMap<VkPhysicalDevicePipelineExecutablePropertiesFeaturesKHR> {
- static const VkStructureType kSType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PIPELINE_EXECUTABLE_PROPERTIES_FEATURES_KHR;
-};
-
-template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PIPELINE_EXECUTABLE_PROPERTIES_FEATURES_KHR> {
- typedef VkPhysicalDevicePipelineExecutablePropertiesFeaturesKHR Type;
-};
-
-// Map type VkPipelineInfoKHR to id VK_STRUCTURE_TYPE_PIPELINE_INFO_KHR
-template <> struct LvlTypeMap<VkPipelineInfoKHR> {
- static const VkStructureType kSType = VK_STRUCTURE_TYPE_PIPELINE_INFO_KHR;
-};
-
-template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_PIPELINE_INFO_KHR> {
- typedef VkPipelineInfoKHR Type;
-};
-
-// Map type VkPipelineExecutablePropertiesKHR to id VK_STRUCTURE_TYPE_PIPELINE_EXECUTABLE_PROPERTIES_KHR
-template <> struct LvlTypeMap<VkPipelineExecutablePropertiesKHR> {
- static const VkStructureType kSType = VK_STRUCTURE_TYPE_PIPELINE_EXECUTABLE_PROPERTIES_KHR;
-};
-
-template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_PIPELINE_EXECUTABLE_PROPERTIES_KHR> {
- typedef VkPipelineExecutablePropertiesKHR Type;
-};
-
-// Map type VkPipelineExecutableInfoKHR to id VK_STRUCTURE_TYPE_PIPELINE_EXECUTABLE_INFO_KHR
-template <> struct LvlTypeMap<VkPipelineExecutableInfoKHR> {
- static const VkStructureType kSType = VK_STRUCTURE_TYPE_PIPELINE_EXECUTABLE_INFO_KHR;
-};
-
-template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_PIPELINE_EXECUTABLE_INFO_KHR> {
- typedef VkPipelineExecutableInfoKHR Type;
-};
-
-// Map type VkPipelineExecutableStatisticKHR to id VK_STRUCTURE_TYPE_PIPELINE_EXECUTABLE_STATISTIC_KHR
-template <> struct LvlTypeMap<VkPipelineExecutableStatisticKHR> {
- static const VkStructureType kSType = VK_STRUCTURE_TYPE_PIPELINE_EXECUTABLE_STATISTIC_KHR;
-};
-
-template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_PIPELINE_EXECUTABLE_STATISTIC_KHR> {
- typedef VkPipelineExecutableStatisticKHR Type;
-};
-
-// Map type VkPipelineExecutableInternalRepresentationKHR to id VK_STRUCTURE_TYPE_PIPELINE_EXECUTABLE_INTERNAL_REPRESENTATION_KHR
-template <> struct LvlTypeMap<VkPipelineExecutableInternalRepresentationKHR> {
- static const VkStructureType kSType = VK_STRUCTURE_TYPE_PIPELINE_EXECUTABLE_INTERNAL_REPRESENTATION_KHR;
-};
-
-template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_PIPELINE_EXECUTABLE_INTERNAL_REPRESENTATION_KHR> {
- typedef VkPipelineExecutableInternalRepresentationKHR Type;
-};
-
-// Map type VkDebugReportCallbackCreateInfoEXT to id VK_STRUCTURE_TYPE_DEBUG_REPORT_CALLBACK_CREATE_INFO_EXT
-template <> struct LvlTypeMap<VkDebugReportCallbackCreateInfoEXT> {
- static const VkStructureType kSType = VK_STRUCTURE_TYPE_DEBUG_REPORT_CALLBACK_CREATE_INFO_EXT;
-};
-
-template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_DEBUG_REPORT_CALLBACK_CREATE_INFO_EXT> {
- typedef VkDebugReportCallbackCreateInfoEXT Type;
-};
-
-// Map type VkPipelineRasterizationStateRasterizationOrderAMD to id VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_STATE_RASTERIZATION_ORDER_AMD
-template <> struct LvlTypeMap<VkPipelineRasterizationStateRasterizationOrderAMD> {
- static const VkStructureType kSType = VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_STATE_RASTERIZATION_ORDER_AMD;
-};
-
-template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_STATE_RASTERIZATION_ORDER_AMD> {
- typedef VkPipelineRasterizationStateRasterizationOrderAMD Type;
-};
-
-// Map type VkDebugMarkerObjectNameInfoEXT to id VK_STRUCTURE_TYPE_DEBUG_MARKER_OBJECT_NAME_INFO_EXT
-template <> struct LvlTypeMap<VkDebugMarkerObjectNameInfoEXT> {
- static const VkStructureType kSType = VK_STRUCTURE_TYPE_DEBUG_MARKER_OBJECT_NAME_INFO_EXT;
-};
-
-template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_DEBUG_MARKER_OBJECT_NAME_INFO_EXT> {
- typedef VkDebugMarkerObjectNameInfoEXT Type;
-};
-
-// Map type VkDebugMarkerObjectTagInfoEXT to id VK_STRUCTURE_TYPE_DEBUG_MARKER_OBJECT_TAG_INFO_EXT
-template <> struct LvlTypeMap<VkDebugMarkerObjectTagInfoEXT> {
- static const VkStructureType kSType = VK_STRUCTURE_TYPE_DEBUG_MARKER_OBJECT_TAG_INFO_EXT;
-};
-
-template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_DEBUG_MARKER_OBJECT_TAG_INFO_EXT> {
- typedef VkDebugMarkerObjectTagInfoEXT Type;
-};
-
-// Map type VkDebugMarkerMarkerInfoEXT to id VK_STRUCTURE_TYPE_DEBUG_MARKER_MARKER_INFO_EXT
-template <> struct LvlTypeMap<VkDebugMarkerMarkerInfoEXT> {
- static const VkStructureType kSType = VK_STRUCTURE_TYPE_DEBUG_MARKER_MARKER_INFO_EXT;
-};
-
-template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_DEBUG_MARKER_MARKER_INFO_EXT> {
- typedef VkDebugMarkerMarkerInfoEXT Type;
-};
-
-// Map type VkDedicatedAllocationImageCreateInfoNV to id VK_STRUCTURE_TYPE_DEDICATED_ALLOCATION_IMAGE_CREATE_INFO_NV
-template <> struct LvlTypeMap<VkDedicatedAllocationImageCreateInfoNV> {
- static const VkStructureType kSType = VK_STRUCTURE_TYPE_DEDICATED_ALLOCATION_IMAGE_CREATE_INFO_NV;
-};
-
-template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_DEDICATED_ALLOCATION_IMAGE_CREATE_INFO_NV> {
- typedef VkDedicatedAllocationImageCreateInfoNV Type;
-};
-
-// Map type VkDedicatedAllocationBufferCreateInfoNV to id VK_STRUCTURE_TYPE_DEDICATED_ALLOCATION_BUFFER_CREATE_INFO_NV
-template <> struct LvlTypeMap<VkDedicatedAllocationBufferCreateInfoNV> {
- static const VkStructureType kSType = VK_STRUCTURE_TYPE_DEDICATED_ALLOCATION_BUFFER_CREATE_INFO_NV;
-};
-
-template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_DEDICATED_ALLOCATION_BUFFER_CREATE_INFO_NV> {
- typedef VkDedicatedAllocationBufferCreateInfoNV Type;
-};
-
-// Map type VkDedicatedAllocationMemoryAllocateInfoNV to id VK_STRUCTURE_TYPE_DEDICATED_ALLOCATION_MEMORY_ALLOCATE_INFO_NV
-template <> struct LvlTypeMap<VkDedicatedAllocationMemoryAllocateInfoNV> {
- static const VkStructureType kSType = VK_STRUCTURE_TYPE_DEDICATED_ALLOCATION_MEMORY_ALLOCATE_INFO_NV;
-};
-
-template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_DEDICATED_ALLOCATION_MEMORY_ALLOCATE_INFO_NV> {
- typedef VkDedicatedAllocationMemoryAllocateInfoNV Type;
-};
-
-// Map type VkPhysicalDeviceTransformFeedbackFeaturesEXT to id VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TRANSFORM_FEEDBACK_FEATURES_EXT
-template <> struct LvlTypeMap<VkPhysicalDeviceTransformFeedbackFeaturesEXT> {
- static const VkStructureType kSType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TRANSFORM_FEEDBACK_FEATURES_EXT;
-};
-
-template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TRANSFORM_FEEDBACK_FEATURES_EXT> {
- typedef VkPhysicalDeviceTransformFeedbackFeaturesEXT Type;
-};
-
-// Map type VkPhysicalDeviceTransformFeedbackPropertiesEXT to id VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TRANSFORM_FEEDBACK_PROPERTIES_EXT
-template <> struct LvlTypeMap<VkPhysicalDeviceTransformFeedbackPropertiesEXT> {
- static const VkStructureType kSType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TRANSFORM_FEEDBACK_PROPERTIES_EXT;
-};
-
-template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TRANSFORM_FEEDBACK_PROPERTIES_EXT> {
- typedef VkPhysicalDeviceTransformFeedbackPropertiesEXT Type;
-};
-
-// Map type VkPipelineRasterizationStateStreamCreateInfoEXT to id VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_STATE_STREAM_CREATE_INFO_EXT
-template <> struct LvlTypeMap<VkPipelineRasterizationStateStreamCreateInfoEXT> {
- static const VkStructureType kSType = VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_STATE_STREAM_CREATE_INFO_EXT;
-};
-
-template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_STATE_STREAM_CREATE_INFO_EXT> {
- typedef VkPipelineRasterizationStateStreamCreateInfoEXT Type;
-};
-
-// Map type VkImageViewHandleInfoNVX to id VK_STRUCTURE_TYPE_IMAGE_VIEW_HANDLE_INFO_NVX
-template <> struct LvlTypeMap<VkImageViewHandleInfoNVX> {
- static const VkStructureType kSType = VK_STRUCTURE_TYPE_IMAGE_VIEW_HANDLE_INFO_NVX;
-};
-
-template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_IMAGE_VIEW_HANDLE_INFO_NVX> {
- typedef VkImageViewHandleInfoNVX Type;
-};
-
-// Map type VkTextureLODGatherFormatPropertiesAMD to id VK_STRUCTURE_TYPE_TEXTURE_LOD_GATHER_FORMAT_PROPERTIES_AMD
-template <> struct LvlTypeMap<VkTextureLODGatherFormatPropertiesAMD> {
- static const VkStructureType kSType = VK_STRUCTURE_TYPE_TEXTURE_LOD_GATHER_FORMAT_PROPERTIES_AMD;
-};
-
-template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_TEXTURE_LOD_GATHER_FORMAT_PROPERTIES_AMD> {
- typedef VkTextureLODGatherFormatPropertiesAMD Type;
-};
-
-#ifdef VK_USE_PLATFORM_GGP
-// Map type VkStreamDescriptorSurfaceCreateInfoGGP to id VK_STRUCTURE_TYPE_STREAM_DESCRIPTOR_SURFACE_CREATE_INFO_GGP
-template <> struct LvlTypeMap<VkStreamDescriptorSurfaceCreateInfoGGP> {
- static const VkStructureType kSType = VK_STRUCTURE_TYPE_STREAM_DESCRIPTOR_SURFACE_CREATE_INFO_GGP;
-};
-
-template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_STREAM_DESCRIPTOR_SURFACE_CREATE_INFO_GGP> {
- typedef VkStreamDescriptorSurfaceCreateInfoGGP Type;
-};
-
-#endif // VK_USE_PLATFORM_GGP
-// Map type VkPhysicalDeviceCornerSampledImageFeaturesNV to id VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_CORNER_SAMPLED_IMAGE_FEATURES_NV
-template <> struct LvlTypeMap<VkPhysicalDeviceCornerSampledImageFeaturesNV> {
- static const VkStructureType kSType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_CORNER_SAMPLED_IMAGE_FEATURES_NV;
-};
-
-template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_CORNER_SAMPLED_IMAGE_FEATURES_NV> {
- typedef VkPhysicalDeviceCornerSampledImageFeaturesNV Type;
-};
-
-// Map type VkExternalMemoryImageCreateInfoNV to id VK_STRUCTURE_TYPE_EXTERNAL_MEMORY_IMAGE_CREATE_INFO_NV
-template <> struct LvlTypeMap<VkExternalMemoryImageCreateInfoNV> {
- static const VkStructureType kSType = VK_STRUCTURE_TYPE_EXTERNAL_MEMORY_IMAGE_CREATE_INFO_NV;
-};
-
-template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_EXTERNAL_MEMORY_IMAGE_CREATE_INFO_NV> {
- typedef VkExternalMemoryImageCreateInfoNV Type;
-};
-
-// Map type VkExportMemoryAllocateInfoNV to id VK_STRUCTURE_TYPE_EXPORT_MEMORY_ALLOCATE_INFO_NV
-template <> struct LvlTypeMap<VkExportMemoryAllocateInfoNV> {
- static const VkStructureType kSType = VK_STRUCTURE_TYPE_EXPORT_MEMORY_ALLOCATE_INFO_NV;
-};
-
-template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_EXPORT_MEMORY_ALLOCATE_INFO_NV> {
- typedef VkExportMemoryAllocateInfoNV Type;
-};
-
-#ifdef VK_USE_PLATFORM_WIN32_KHR
-// Map type VkImportMemoryWin32HandleInfoNV to id VK_STRUCTURE_TYPE_IMPORT_MEMORY_WIN32_HANDLE_INFO_NV
-template <> struct LvlTypeMap<VkImportMemoryWin32HandleInfoNV> {
- static const VkStructureType kSType = VK_STRUCTURE_TYPE_IMPORT_MEMORY_WIN32_HANDLE_INFO_NV;
-};
-
-template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_IMPORT_MEMORY_WIN32_HANDLE_INFO_NV> {
- typedef VkImportMemoryWin32HandleInfoNV Type;
-};
-
-#endif // VK_USE_PLATFORM_WIN32_KHR
-#ifdef VK_USE_PLATFORM_WIN32_KHR
-// Map type VkExportMemoryWin32HandleInfoNV to id VK_STRUCTURE_TYPE_EXPORT_MEMORY_WIN32_HANDLE_INFO_NV
-template <> struct LvlTypeMap<VkExportMemoryWin32HandleInfoNV> {
- static const VkStructureType kSType = VK_STRUCTURE_TYPE_EXPORT_MEMORY_WIN32_HANDLE_INFO_NV;
-};
-
-template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_EXPORT_MEMORY_WIN32_HANDLE_INFO_NV> {
- typedef VkExportMemoryWin32HandleInfoNV Type;
-};
-
-#endif // VK_USE_PLATFORM_WIN32_KHR
-#ifdef VK_USE_PLATFORM_WIN32_KHR
-// Map type VkWin32KeyedMutexAcquireReleaseInfoNV to id VK_STRUCTURE_TYPE_WIN32_KEYED_MUTEX_ACQUIRE_RELEASE_INFO_NV
-template <> struct LvlTypeMap<VkWin32KeyedMutexAcquireReleaseInfoNV> {
- static const VkStructureType kSType = VK_STRUCTURE_TYPE_WIN32_KEYED_MUTEX_ACQUIRE_RELEASE_INFO_NV;
-};
-
-template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_WIN32_KEYED_MUTEX_ACQUIRE_RELEASE_INFO_NV> {
- typedef VkWin32KeyedMutexAcquireReleaseInfoNV Type;
-};
-
-#endif // VK_USE_PLATFORM_WIN32_KHR
-// Map type VkValidationFlagsEXT to id VK_STRUCTURE_TYPE_VALIDATION_FLAGS_EXT
-template <> struct LvlTypeMap<VkValidationFlagsEXT> {
- static const VkStructureType kSType = VK_STRUCTURE_TYPE_VALIDATION_FLAGS_EXT;
-};
-
-template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_VALIDATION_FLAGS_EXT> {
- typedef VkValidationFlagsEXT Type;
-};
-
-#ifdef VK_USE_PLATFORM_VI_NN
-// Map type VkViSurfaceCreateInfoNN to id VK_STRUCTURE_TYPE_VI_SURFACE_CREATE_INFO_NN
-template <> struct LvlTypeMap<VkViSurfaceCreateInfoNN> {
- static const VkStructureType kSType = VK_STRUCTURE_TYPE_VI_SURFACE_CREATE_INFO_NN;
-};
-
-template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_VI_SURFACE_CREATE_INFO_NN> {
- typedef VkViSurfaceCreateInfoNN Type;
-};
-
-#endif // VK_USE_PLATFORM_VI_NN
-// Map type VkPhysicalDeviceTextureCompressionASTCHDRFeaturesEXT to id VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TEXTURE_COMPRESSION_ASTC_HDR_FEATURES_EXT
-template <> struct LvlTypeMap<VkPhysicalDeviceTextureCompressionASTCHDRFeaturesEXT> {
- static const VkStructureType kSType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TEXTURE_COMPRESSION_ASTC_HDR_FEATURES_EXT;
-};
-
-template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TEXTURE_COMPRESSION_ASTC_HDR_FEATURES_EXT> {
- typedef VkPhysicalDeviceTextureCompressionASTCHDRFeaturesEXT Type;
-};
-
-// Map type VkImageViewASTCDecodeModeEXT to id VK_STRUCTURE_TYPE_IMAGE_VIEW_ASTC_DECODE_MODE_EXT
-template <> struct LvlTypeMap<VkImageViewASTCDecodeModeEXT> {
- static const VkStructureType kSType = VK_STRUCTURE_TYPE_IMAGE_VIEW_ASTC_DECODE_MODE_EXT;
-};
-
-template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_IMAGE_VIEW_ASTC_DECODE_MODE_EXT> {
- typedef VkImageViewASTCDecodeModeEXT Type;
-};
-
-// Map type VkPhysicalDeviceASTCDecodeFeaturesEXT to id VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_ASTC_DECODE_FEATURES_EXT
-template <> struct LvlTypeMap<VkPhysicalDeviceASTCDecodeFeaturesEXT> {
- static const VkStructureType kSType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_ASTC_DECODE_FEATURES_EXT;
-};
-
-template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_ASTC_DECODE_FEATURES_EXT> {
- typedef VkPhysicalDeviceASTCDecodeFeaturesEXT Type;
-};
-
-// Map type VkConditionalRenderingBeginInfoEXT to id VK_STRUCTURE_TYPE_CONDITIONAL_RENDERING_BEGIN_INFO_EXT
-template <> struct LvlTypeMap<VkConditionalRenderingBeginInfoEXT> {
- static const VkStructureType kSType = VK_STRUCTURE_TYPE_CONDITIONAL_RENDERING_BEGIN_INFO_EXT;
-};
-
-template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_CONDITIONAL_RENDERING_BEGIN_INFO_EXT> {
- typedef VkConditionalRenderingBeginInfoEXT Type;
-};
-
-// Map type VkPhysicalDeviceConditionalRenderingFeaturesEXT to id VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_CONDITIONAL_RENDERING_FEATURES_EXT
-template <> struct LvlTypeMap<VkPhysicalDeviceConditionalRenderingFeaturesEXT> {
- static const VkStructureType kSType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_CONDITIONAL_RENDERING_FEATURES_EXT;
-};
-
-template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_CONDITIONAL_RENDERING_FEATURES_EXT> {
- typedef VkPhysicalDeviceConditionalRenderingFeaturesEXT Type;
-};
-
-// Map type VkCommandBufferInheritanceConditionalRenderingInfoEXT to id VK_STRUCTURE_TYPE_COMMAND_BUFFER_INHERITANCE_CONDITIONAL_RENDERING_INFO_EXT
-template <> struct LvlTypeMap<VkCommandBufferInheritanceConditionalRenderingInfoEXT> {
- static const VkStructureType kSType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_INHERITANCE_CONDITIONAL_RENDERING_INFO_EXT;
-};
-
-template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_COMMAND_BUFFER_INHERITANCE_CONDITIONAL_RENDERING_INFO_EXT> {
- typedef VkCommandBufferInheritanceConditionalRenderingInfoEXT Type;
-};
-
-// Map type VkDeviceGeneratedCommandsFeaturesNVX to id VK_STRUCTURE_TYPE_DEVICE_GENERATED_COMMANDS_FEATURES_NVX
-template <> struct LvlTypeMap<VkDeviceGeneratedCommandsFeaturesNVX> {
- static const VkStructureType kSType = VK_STRUCTURE_TYPE_DEVICE_GENERATED_COMMANDS_FEATURES_NVX;
-};
-
-template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_DEVICE_GENERATED_COMMANDS_FEATURES_NVX> {
- typedef VkDeviceGeneratedCommandsFeaturesNVX Type;
-};
-
-// Map type VkDeviceGeneratedCommandsLimitsNVX to id VK_STRUCTURE_TYPE_DEVICE_GENERATED_COMMANDS_LIMITS_NVX
-template <> struct LvlTypeMap<VkDeviceGeneratedCommandsLimitsNVX> {
- static const VkStructureType kSType = VK_STRUCTURE_TYPE_DEVICE_GENERATED_COMMANDS_LIMITS_NVX;
-};
-
-template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_DEVICE_GENERATED_COMMANDS_LIMITS_NVX> {
- typedef VkDeviceGeneratedCommandsLimitsNVX Type;
-};
-
-// Map type VkIndirectCommandsLayoutCreateInfoNVX to id VK_STRUCTURE_TYPE_INDIRECT_COMMANDS_LAYOUT_CREATE_INFO_NVX
-template <> struct LvlTypeMap<VkIndirectCommandsLayoutCreateInfoNVX> {
- static const VkStructureType kSType = VK_STRUCTURE_TYPE_INDIRECT_COMMANDS_LAYOUT_CREATE_INFO_NVX;
-};
-
-template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_INDIRECT_COMMANDS_LAYOUT_CREATE_INFO_NVX> {
- typedef VkIndirectCommandsLayoutCreateInfoNVX Type;
-};
-
-// Map type VkCmdProcessCommandsInfoNVX to id VK_STRUCTURE_TYPE_CMD_PROCESS_COMMANDS_INFO_NVX
-template <> struct LvlTypeMap<VkCmdProcessCommandsInfoNVX> {
- static const VkStructureType kSType = VK_STRUCTURE_TYPE_CMD_PROCESS_COMMANDS_INFO_NVX;
-};
-
-template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_CMD_PROCESS_COMMANDS_INFO_NVX> {
- typedef VkCmdProcessCommandsInfoNVX Type;
-};
-
-// Map type VkCmdReserveSpaceForCommandsInfoNVX to id VK_STRUCTURE_TYPE_CMD_RESERVE_SPACE_FOR_COMMANDS_INFO_NVX
-template <> struct LvlTypeMap<VkCmdReserveSpaceForCommandsInfoNVX> {
- static const VkStructureType kSType = VK_STRUCTURE_TYPE_CMD_RESERVE_SPACE_FOR_COMMANDS_INFO_NVX;
-};
-
-template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_CMD_RESERVE_SPACE_FOR_COMMANDS_INFO_NVX> {
- typedef VkCmdReserveSpaceForCommandsInfoNVX Type;
-};
-
-// Map type VkObjectTableCreateInfoNVX to id VK_STRUCTURE_TYPE_OBJECT_TABLE_CREATE_INFO_NVX
-template <> struct LvlTypeMap<VkObjectTableCreateInfoNVX> {
- static const VkStructureType kSType = VK_STRUCTURE_TYPE_OBJECT_TABLE_CREATE_INFO_NVX;
-};
-
-template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_OBJECT_TABLE_CREATE_INFO_NVX> {
- typedef VkObjectTableCreateInfoNVX Type;
-};
-
-// Map type VkPipelineViewportWScalingStateCreateInfoNV to id VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_W_SCALING_STATE_CREATE_INFO_NV
-template <> struct LvlTypeMap<VkPipelineViewportWScalingStateCreateInfoNV> {
- static const VkStructureType kSType = VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_W_SCALING_STATE_CREATE_INFO_NV;
-};
-
-template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_W_SCALING_STATE_CREATE_INFO_NV> {
- typedef VkPipelineViewportWScalingStateCreateInfoNV Type;
-};
-
-// Map type VkSurfaceCapabilities2EXT to id VK_STRUCTURE_TYPE_SURFACE_CAPABILITIES_2_EXT
-template <> struct LvlTypeMap<VkSurfaceCapabilities2EXT> {
- static const VkStructureType kSType = VK_STRUCTURE_TYPE_SURFACE_CAPABILITIES_2_EXT;
-};
-
-template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_SURFACE_CAPABILITIES_2_EXT> {
- typedef VkSurfaceCapabilities2EXT Type;
-};
-
-// Map type VkDisplayPowerInfoEXT to id VK_STRUCTURE_TYPE_DISPLAY_POWER_INFO_EXT
-template <> struct LvlTypeMap<VkDisplayPowerInfoEXT> {
- static const VkStructureType kSType = VK_STRUCTURE_TYPE_DISPLAY_POWER_INFO_EXT;
-};
-
-template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_DISPLAY_POWER_INFO_EXT> {
- typedef VkDisplayPowerInfoEXT Type;
-};
-
-// Map type VkDeviceEventInfoEXT to id VK_STRUCTURE_TYPE_DEVICE_EVENT_INFO_EXT
-template <> struct LvlTypeMap<VkDeviceEventInfoEXT> {
- static const VkStructureType kSType = VK_STRUCTURE_TYPE_DEVICE_EVENT_INFO_EXT;
-};
-
-template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_DEVICE_EVENT_INFO_EXT> {
- typedef VkDeviceEventInfoEXT Type;
-};
-
-// Map type VkDisplayEventInfoEXT to id VK_STRUCTURE_TYPE_DISPLAY_EVENT_INFO_EXT
-template <> struct LvlTypeMap<VkDisplayEventInfoEXT> {
- static const VkStructureType kSType = VK_STRUCTURE_TYPE_DISPLAY_EVENT_INFO_EXT;
-};
-
-template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_DISPLAY_EVENT_INFO_EXT> {
- typedef VkDisplayEventInfoEXT Type;
-};
-
-// Map type VkSwapchainCounterCreateInfoEXT to id VK_STRUCTURE_TYPE_SWAPCHAIN_COUNTER_CREATE_INFO_EXT
-template <> struct LvlTypeMap<VkSwapchainCounterCreateInfoEXT> {
- static const VkStructureType kSType = VK_STRUCTURE_TYPE_SWAPCHAIN_COUNTER_CREATE_INFO_EXT;
-};
-
-template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_SWAPCHAIN_COUNTER_CREATE_INFO_EXT> {
- typedef VkSwapchainCounterCreateInfoEXT Type;
-};
-
-// Map type VkPresentTimesInfoGOOGLE to id VK_STRUCTURE_TYPE_PRESENT_TIMES_INFO_GOOGLE
-template <> struct LvlTypeMap<VkPresentTimesInfoGOOGLE> {
- static const VkStructureType kSType = VK_STRUCTURE_TYPE_PRESENT_TIMES_INFO_GOOGLE;
-};
-
-template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_PRESENT_TIMES_INFO_GOOGLE> {
- typedef VkPresentTimesInfoGOOGLE Type;
-};
-
-// Map type VkPhysicalDeviceMultiviewPerViewAttributesPropertiesNVX to id VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MULTIVIEW_PER_VIEW_ATTRIBUTES_PROPERTIES_NVX
-template <> struct LvlTypeMap<VkPhysicalDeviceMultiviewPerViewAttributesPropertiesNVX> {
- static const VkStructureType kSType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MULTIVIEW_PER_VIEW_ATTRIBUTES_PROPERTIES_NVX;
-};
-
-template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MULTIVIEW_PER_VIEW_ATTRIBUTES_PROPERTIES_NVX> {
- typedef VkPhysicalDeviceMultiviewPerViewAttributesPropertiesNVX Type;
-};
-
-// Map type VkPipelineViewportSwizzleStateCreateInfoNV to id VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_SWIZZLE_STATE_CREATE_INFO_NV
-template <> struct LvlTypeMap<VkPipelineViewportSwizzleStateCreateInfoNV> {
- static const VkStructureType kSType = VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_SWIZZLE_STATE_CREATE_INFO_NV;
-};
-
-template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_SWIZZLE_STATE_CREATE_INFO_NV> {
- typedef VkPipelineViewportSwizzleStateCreateInfoNV Type;
-};
-
-// Map type VkPhysicalDeviceDiscardRectanglePropertiesEXT to id VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DISCARD_RECTANGLE_PROPERTIES_EXT
-template <> struct LvlTypeMap<VkPhysicalDeviceDiscardRectanglePropertiesEXT> {
- static const VkStructureType kSType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DISCARD_RECTANGLE_PROPERTIES_EXT;
-};
-
-template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DISCARD_RECTANGLE_PROPERTIES_EXT> {
- typedef VkPhysicalDeviceDiscardRectanglePropertiesEXT Type;
-};
-
-// Map type VkPipelineDiscardRectangleStateCreateInfoEXT to id VK_STRUCTURE_TYPE_PIPELINE_DISCARD_RECTANGLE_STATE_CREATE_INFO_EXT
-template <> struct LvlTypeMap<VkPipelineDiscardRectangleStateCreateInfoEXT> {
- static const VkStructureType kSType = VK_STRUCTURE_TYPE_PIPELINE_DISCARD_RECTANGLE_STATE_CREATE_INFO_EXT;
-};
-
-template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_PIPELINE_DISCARD_RECTANGLE_STATE_CREATE_INFO_EXT> {
- typedef VkPipelineDiscardRectangleStateCreateInfoEXT Type;
-};
-
-// Map type VkPhysicalDeviceConservativeRasterizationPropertiesEXT to id VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_CONSERVATIVE_RASTERIZATION_PROPERTIES_EXT
-template <> struct LvlTypeMap<VkPhysicalDeviceConservativeRasterizationPropertiesEXT> {
- static const VkStructureType kSType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_CONSERVATIVE_RASTERIZATION_PROPERTIES_EXT;
-};
-
-template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_CONSERVATIVE_RASTERIZATION_PROPERTIES_EXT> {
- typedef VkPhysicalDeviceConservativeRasterizationPropertiesEXT Type;
-};
-
-// Map type VkPipelineRasterizationConservativeStateCreateInfoEXT to id VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_CONSERVATIVE_STATE_CREATE_INFO_EXT
-template <> struct LvlTypeMap<VkPipelineRasterizationConservativeStateCreateInfoEXT> {
- static const VkStructureType kSType = VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_CONSERVATIVE_STATE_CREATE_INFO_EXT;
-};
-
-template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_CONSERVATIVE_STATE_CREATE_INFO_EXT> {
- typedef VkPipelineRasterizationConservativeStateCreateInfoEXT Type;
-};
-
-// Map type VkPhysicalDeviceDepthClipEnableFeaturesEXT to id VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DEPTH_CLIP_ENABLE_FEATURES_EXT
-template <> struct LvlTypeMap<VkPhysicalDeviceDepthClipEnableFeaturesEXT> {
- static const VkStructureType kSType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DEPTH_CLIP_ENABLE_FEATURES_EXT;
-};
-
-template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DEPTH_CLIP_ENABLE_FEATURES_EXT> {
- typedef VkPhysicalDeviceDepthClipEnableFeaturesEXT Type;
-};
-
-// Map type VkPipelineRasterizationDepthClipStateCreateInfoEXT to id VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_DEPTH_CLIP_STATE_CREATE_INFO_EXT
-template <> struct LvlTypeMap<VkPipelineRasterizationDepthClipStateCreateInfoEXT> {
- static const VkStructureType kSType = VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_DEPTH_CLIP_STATE_CREATE_INFO_EXT;
-};
-
-template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_DEPTH_CLIP_STATE_CREATE_INFO_EXT> {
- typedef VkPipelineRasterizationDepthClipStateCreateInfoEXT Type;
-};
-
-// Map type VkHdrMetadataEXT to id VK_STRUCTURE_TYPE_HDR_METADATA_EXT
-template <> struct LvlTypeMap<VkHdrMetadataEXT> {
- static const VkStructureType kSType = VK_STRUCTURE_TYPE_HDR_METADATA_EXT;
-};
-
-template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_HDR_METADATA_EXT> {
- typedef VkHdrMetadataEXT Type;
-};
-
-#ifdef VK_USE_PLATFORM_IOS_MVK
-// Map type VkIOSSurfaceCreateInfoMVK to id VK_STRUCTURE_TYPE_IOS_SURFACE_CREATE_INFO_MVK
-template <> struct LvlTypeMap<VkIOSSurfaceCreateInfoMVK> {
- static const VkStructureType kSType = VK_STRUCTURE_TYPE_IOS_SURFACE_CREATE_INFO_MVK;
-};
-
-template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_IOS_SURFACE_CREATE_INFO_MVK> {
- typedef VkIOSSurfaceCreateInfoMVK Type;
-};
-
-#endif // VK_USE_PLATFORM_IOS_MVK
-#ifdef VK_USE_PLATFORM_MACOS_MVK
-// Map type VkMacOSSurfaceCreateInfoMVK to id VK_STRUCTURE_TYPE_MACOS_SURFACE_CREATE_INFO_MVK
-template <> struct LvlTypeMap<VkMacOSSurfaceCreateInfoMVK> {
- static const VkStructureType kSType = VK_STRUCTURE_TYPE_MACOS_SURFACE_CREATE_INFO_MVK;
-};
-
-template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_MACOS_SURFACE_CREATE_INFO_MVK> {
- typedef VkMacOSSurfaceCreateInfoMVK Type;
-};
-
-#endif // VK_USE_PLATFORM_MACOS_MVK
-// Map type VkDebugUtilsObjectNameInfoEXT to id VK_STRUCTURE_TYPE_DEBUG_UTILS_OBJECT_NAME_INFO_EXT
-template <> struct LvlTypeMap<VkDebugUtilsObjectNameInfoEXT> {
- static const VkStructureType kSType = VK_STRUCTURE_TYPE_DEBUG_UTILS_OBJECT_NAME_INFO_EXT;
-};
-
-template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_DEBUG_UTILS_OBJECT_NAME_INFO_EXT> {
- typedef VkDebugUtilsObjectNameInfoEXT Type;
-};
-
-// Map type VkDebugUtilsObjectTagInfoEXT to id VK_STRUCTURE_TYPE_DEBUG_UTILS_OBJECT_TAG_INFO_EXT
-template <> struct LvlTypeMap<VkDebugUtilsObjectTagInfoEXT> {
- static const VkStructureType kSType = VK_STRUCTURE_TYPE_DEBUG_UTILS_OBJECT_TAG_INFO_EXT;
-};
-
-template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_DEBUG_UTILS_OBJECT_TAG_INFO_EXT> {
- typedef VkDebugUtilsObjectTagInfoEXT Type;
-};
-
-// Map type VkDebugUtilsLabelEXT to id VK_STRUCTURE_TYPE_DEBUG_UTILS_LABEL_EXT
-template <> struct LvlTypeMap<VkDebugUtilsLabelEXT> {
- static const VkStructureType kSType = VK_STRUCTURE_TYPE_DEBUG_UTILS_LABEL_EXT;
-};
-
-template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_DEBUG_UTILS_LABEL_EXT> {
- typedef VkDebugUtilsLabelEXT Type;
-};
-
-// Map type VkDebugUtilsMessengerCallbackDataEXT to id VK_STRUCTURE_TYPE_DEBUG_UTILS_MESSENGER_CALLBACK_DATA_EXT
-template <> struct LvlTypeMap<VkDebugUtilsMessengerCallbackDataEXT> {
- static const VkStructureType kSType = VK_STRUCTURE_TYPE_DEBUG_UTILS_MESSENGER_CALLBACK_DATA_EXT;
-};
-
-template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_DEBUG_UTILS_MESSENGER_CALLBACK_DATA_EXT> {
- typedef VkDebugUtilsMessengerCallbackDataEXT Type;
-};
-
-// Map type VkDebugUtilsMessengerCreateInfoEXT to id VK_STRUCTURE_TYPE_DEBUG_UTILS_MESSENGER_CREATE_INFO_EXT
-template <> struct LvlTypeMap<VkDebugUtilsMessengerCreateInfoEXT> {
- static const VkStructureType kSType = VK_STRUCTURE_TYPE_DEBUG_UTILS_MESSENGER_CREATE_INFO_EXT;
-};
-
-template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_DEBUG_UTILS_MESSENGER_CREATE_INFO_EXT> {
- typedef VkDebugUtilsMessengerCreateInfoEXT Type;
-};
-
-#ifdef VK_USE_PLATFORM_ANDROID_KHR
-// Map type VkAndroidHardwareBufferUsageANDROID to id VK_STRUCTURE_TYPE_ANDROID_HARDWARE_BUFFER_USAGE_ANDROID
-template <> struct LvlTypeMap<VkAndroidHardwareBufferUsageANDROID> {
- static const VkStructureType kSType = VK_STRUCTURE_TYPE_ANDROID_HARDWARE_BUFFER_USAGE_ANDROID;
-};
-
-template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_ANDROID_HARDWARE_BUFFER_USAGE_ANDROID> {
- typedef VkAndroidHardwareBufferUsageANDROID Type;
-};
-
-#endif // VK_USE_PLATFORM_ANDROID_KHR
-#ifdef VK_USE_PLATFORM_ANDROID_KHR
-// Map type VkAndroidHardwareBufferPropertiesANDROID to id VK_STRUCTURE_TYPE_ANDROID_HARDWARE_BUFFER_PROPERTIES_ANDROID
-template <> struct LvlTypeMap<VkAndroidHardwareBufferPropertiesANDROID> {
- static const VkStructureType kSType = VK_STRUCTURE_TYPE_ANDROID_HARDWARE_BUFFER_PROPERTIES_ANDROID;
-};
-
-template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_ANDROID_HARDWARE_BUFFER_PROPERTIES_ANDROID> {
- typedef VkAndroidHardwareBufferPropertiesANDROID Type;
-};
-
-#endif // VK_USE_PLATFORM_ANDROID_KHR
-#ifdef VK_USE_PLATFORM_ANDROID_KHR
-// Map type VkAndroidHardwareBufferFormatPropertiesANDROID to id VK_STRUCTURE_TYPE_ANDROID_HARDWARE_BUFFER_FORMAT_PROPERTIES_ANDROID
-template <> struct LvlTypeMap<VkAndroidHardwareBufferFormatPropertiesANDROID> {
- static const VkStructureType kSType = VK_STRUCTURE_TYPE_ANDROID_HARDWARE_BUFFER_FORMAT_PROPERTIES_ANDROID;
-};
-
-template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_ANDROID_HARDWARE_BUFFER_FORMAT_PROPERTIES_ANDROID> {
- typedef VkAndroidHardwareBufferFormatPropertiesANDROID Type;
-};
-
-#endif // VK_USE_PLATFORM_ANDROID_KHR
-#ifdef VK_USE_PLATFORM_ANDROID_KHR
-// Map type VkImportAndroidHardwareBufferInfoANDROID to id VK_STRUCTURE_TYPE_IMPORT_ANDROID_HARDWARE_BUFFER_INFO_ANDROID
-template <> struct LvlTypeMap<VkImportAndroidHardwareBufferInfoANDROID> {
- static const VkStructureType kSType = VK_STRUCTURE_TYPE_IMPORT_ANDROID_HARDWARE_BUFFER_INFO_ANDROID;
-};
-
-template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_IMPORT_ANDROID_HARDWARE_BUFFER_INFO_ANDROID> {
- typedef VkImportAndroidHardwareBufferInfoANDROID Type;
-};
-
-#endif // VK_USE_PLATFORM_ANDROID_KHR
-#ifdef VK_USE_PLATFORM_ANDROID_KHR
-// Map type VkMemoryGetAndroidHardwareBufferInfoANDROID to id VK_STRUCTURE_TYPE_MEMORY_GET_ANDROID_HARDWARE_BUFFER_INFO_ANDROID
-template <> struct LvlTypeMap<VkMemoryGetAndroidHardwareBufferInfoANDROID> {
- static const VkStructureType kSType = VK_STRUCTURE_TYPE_MEMORY_GET_ANDROID_HARDWARE_BUFFER_INFO_ANDROID;
-};
-
-template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_MEMORY_GET_ANDROID_HARDWARE_BUFFER_INFO_ANDROID> {
- typedef VkMemoryGetAndroidHardwareBufferInfoANDROID Type;
-};
-
-#endif // VK_USE_PLATFORM_ANDROID_KHR
-#ifdef VK_USE_PLATFORM_ANDROID_KHR
-// Map type VkExternalFormatANDROID to id VK_STRUCTURE_TYPE_EXTERNAL_FORMAT_ANDROID
-template <> struct LvlTypeMap<VkExternalFormatANDROID> {
- static const VkStructureType kSType = VK_STRUCTURE_TYPE_EXTERNAL_FORMAT_ANDROID;
-};
-
-template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_EXTERNAL_FORMAT_ANDROID> {
- typedef VkExternalFormatANDROID Type;
-};
-
-#endif // VK_USE_PLATFORM_ANDROID_KHR
-// Map type VkSamplerReductionModeCreateInfoEXT to id VK_STRUCTURE_TYPE_SAMPLER_REDUCTION_MODE_CREATE_INFO_EXT
-template <> struct LvlTypeMap<VkSamplerReductionModeCreateInfoEXT> {
- static const VkStructureType kSType = VK_STRUCTURE_TYPE_SAMPLER_REDUCTION_MODE_CREATE_INFO_EXT;
-};
-
-template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_SAMPLER_REDUCTION_MODE_CREATE_INFO_EXT> {
- typedef VkSamplerReductionModeCreateInfoEXT Type;
-};
-
-// Map type VkPhysicalDeviceSamplerFilterMinmaxPropertiesEXT to id VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SAMPLER_FILTER_MINMAX_PROPERTIES_EXT
-template <> struct LvlTypeMap<VkPhysicalDeviceSamplerFilterMinmaxPropertiesEXT> {
- static const VkStructureType kSType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SAMPLER_FILTER_MINMAX_PROPERTIES_EXT;
-};
-
-template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SAMPLER_FILTER_MINMAX_PROPERTIES_EXT> {
- typedef VkPhysicalDeviceSamplerFilterMinmaxPropertiesEXT Type;
-};
-
-// Map type VkPhysicalDeviceInlineUniformBlockFeaturesEXT to id VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_INLINE_UNIFORM_BLOCK_FEATURES_EXT
-template <> struct LvlTypeMap<VkPhysicalDeviceInlineUniformBlockFeaturesEXT> {
- static const VkStructureType kSType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_INLINE_UNIFORM_BLOCK_FEATURES_EXT;
-};
-
-template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_INLINE_UNIFORM_BLOCK_FEATURES_EXT> {
- typedef VkPhysicalDeviceInlineUniformBlockFeaturesEXT Type;
-};
-
-// Map type VkPhysicalDeviceInlineUniformBlockPropertiesEXT to id VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_INLINE_UNIFORM_BLOCK_PROPERTIES_EXT
-template <> struct LvlTypeMap<VkPhysicalDeviceInlineUniformBlockPropertiesEXT> {
- static const VkStructureType kSType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_INLINE_UNIFORM_BLOCK_PROPERTIES_EXT;
-};
-
-template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_INLINE_UNIFORM_BLOCK_PROPERTIES_EXT> {
- typedef VkPhysicalDeviceInlineUniformBlockPropertiesEXT Type;
-};
-
-// Map type VkWriteDescriptorSetInlineUniformBlockEXT to id VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET_INLINE_UNIFORM_BLOCK_EXT
-template <> struct LvlTypeMap<VkWriteDescriptorSetInlineUniformBlockEXT> {
- static const VkStructureType kSType = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET_INLINE_UNIFORM_BLOCK_EXT;
-};
-
-template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET_INLINE_UNIFORM_BLOCK_EXT> {
- typedef VkWriteDescriptorSetInlineUniformBlockEXT Type;
-};
-
-// Map type VkDescriptorPoolInlineUniformBlockCreateInfoEXT to id VK_STRUCTURE_TYPE_DESCRIPTOR_POOL_INLINE_UNIFORM_BLOCK_CREATE_INFO_EXT
-template <> struct LvlTypeMap<VkDescriptorPoolInlineUniformBlockCreateInfoEXT> {
- static const VkStructureType kSType = VK_STRUCTURE_TYPE_DESCRIPTOR_POOL_INLINE_UNIFORM_BLOCK_CREATE_INFO_EXT;
-};
-
-template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_DESCRIPTOR_POOL_INLINE_UNIFORM_BLOCK_CREATE_INFO_EXT> {
- typedef VkDescriptorPoolInlineUniformBlockCreateInfoEXT Type;
-};
-
-// Map type VkSampleLocationsInfoEXT to id VK_STRUCTURE_TYPE_SAMPLE_LOCATIONS_INFO_EXT
-template <> struct LvlTypeMap<VkSampleLocationsInfoEXT> {
- static const VkStructureType kSType = VK_STRUCTURE_TYPE_SAMPLE_LOCATIONS_INFO_EXT;
-};
-
-template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_SAMPLE_LOCATIONS_INFO_EXT> {
- typedef VkSampleLocationsInfoEXT Type;
-};
-
-// Map type VkRenderPassSampleLocationsBeginInfoEXT to id VK_STRUCTURE_TYPE_RENDER_PASS_SAMPLE_LOCATIONS_BEGIN_INFO_EXT
-template <> struct LvlTypeMap<VkRenderPassSampleLocationsBeginInfoEXT> {
- static const VkStructureType kSType = VK_STRUCTURE_TYPE_RENDER_PASS_SAMPLE_LOCATIONS_BEGIN_INFO_EXT;
-};
-
-template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_RENDER_PASS_SAMPLE_LOCATIONS_BEGIN_INFO_EXT> {
- typedef VkRenderPassSampleLocationsBeginInfoEXT Type;
-};
-
-// Map type VkPipelineSampleLocationsStateCreateInfoEXT to id VK_STRUCTURE_TYPE_PIPELINE_SAMPLE_LOCATIONS_STATE_CREATE_INFO_EXT
-template <> struct LvlTypeMap<VkPipelineSampleLocationsStateCreateInfoEXT> {
- static const VkStructureType kSType = VK_STRUCTURE_TYPE_PIPELINE_SAMPLE_LOCATIONS_STATE_CREATE_INFO_EXT;
-};
-
-template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_PIPELINE_SAMPLE_LOCATIONS_STATE_CREATE_INFO_EXT> {
- typedef VkPipelineSampleLocationsStateCreateInfoEXT Type;
-};
-
-// Map type VkPhysicalDeviceSampleLocationsPropertiesEXT to id VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SAMPLE_LOCATIONS_PROPERTIES_EXT
-template <> struct LvlTypeMap<VkPhysicalDeviceSampleLocationsPropertiesEXT> {
- static const VkStructureType kSType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SAMPLE_LOCATIONS_PROPERTIES_EXT;
-};
-
-template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SAMPLE_LOCATIONS_PROPERTIES_EXT> {
- typedef VkPhysicalDeviceSampleLocationsPropertiesEXT Type;
-};
-
-// Map type VkMultisamplePropertiesEXT to id VK_STRUCTURE_TYPE_MULTISAMPLE_PROPERTIES_EXT
-template <> struct LvlTypeMap<VkMultisamplePropertiesEXT> {
- static const VkStructureType kSType = VK_STRUCTURE_TYPE_MULTISAMPLE_PROPERTIES_EXT;
-};
-
-template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_MULTISAMPLE_PROPERTIES_EXT> {
- typedef VkMultisamplePropertiesEXT Type;
-};
-
-// Map type VkPhysicalDeviceBlendOperationAdvancedFeaturesEXT to id VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_BLEND_OPERATION_ADVANCED_FEATURES_EXT
-template <> struct LvlTypeMap<VkPhysicalDeviceBlendOperationAdvancedFeaturesEXT> {
- static const VkStructureType kSType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_BLEND_OPERATION_ADVANCED_FEATURES_EXT;
-};
-
-template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_BLEND_OPERATION_ADVANCED_FEATURES_EXT> {
- typedef VkPhysicalDeviceBlendOperationAdvancedFeaturesEXT Type;
-};
-
-// Map type VkPhysicalDeviceBlendOperationAdvancedPropertiesEXT to id VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_BLEND_OPERATION_ADVANCED_PROPERTIES_EXT
-template <> struct LvlTypeMap<VkPhysicalDeviceBlendOperationAdvancedPropertiesEXT> {
- static const VkStructureType kSType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_BLEND_OPERATION_ADVANCED_PROPERTIES_EXT;
-};
-
-template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_BLEND_OPERATION_ADVANCED_PROPERTIES_EXT> {
- typedef VkPhysicalDeviceBlendOperationAdvancedPropertiesEXT Type;
-};
-
-// Map type VkPipelineColorBlendAdvancedStateCreateInfoEXT to id VK_STRUCTURE_TYPE_PIPELINE_COLOR_BLEND_ADVANCED_STATE_CREATE_INFO_EXT
-template <> struct LvlTypeMap<VkPipelineColorBlendAdvancedStateCreateInfoEXT> {
- static const VkStructureType kSType = VK_STRUCTURE_TYPE_PIPELINE_COLOR_BLEND_ADVANCED_STATE_CREATE_INFO_EXT;
-};
-
-template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_PIPELINE_COLOR_BLEND_ADVANCED_STATE_CREATE_INFO_EXT> {
- typedef VkPipelineColorBlendAdvancedStateCreateInfoEXT Type;
-};
-
-// Map type VkPipelineCoverageToColorStateCreateInfoNV to id VK_STRUCTURE_TYPE_PIPELINE_COVERAGE_TO_COLOR_STATE_CREATE_INFO_NV
-template <> struct LvlTypeMap<VkPipelineCoverageToColorStateCreateInfoNV> {
- static const VkStructureType kSType = VK_STRUCTURE_TYPE_PIPELINE_COVERAGE_TO_COLOR_STATE_CREATE_INFO_NV;
-};
-
-template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_PIPELINE_COVERAGE_TO_COLOR_STATE_CREATE_INFO_NV> {
- typedef VkPipelineCoverageToColorStateCreateInfoNV Type;
-};
-
-// Map type VkPipelineCoverageModulationStateCreateInfoNV to id VK_STRUCTURE_TYPE_PIPELINE_COVERAGE_MODULATION_STATE_CREATE_INFO_NV
-template <> struct LvlTypeMap<VkPipelineCoverageModulationStateCreateInfoNV> {
- static const VkStructureType kSType = VK_STRUCTURE_TYPE_PIPELINE_COVERAGE_MODULATION_STATE_CREATE_INFO_NV;
-};
-
-template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_PIPELINE_COVERAGE_MODULATION_STATE_CREATE_INFO_NV> {
- typedef VkPipelineCoverageModulationStateCreateInfoNV Type;
-};
-
-// Map type VkPhysicalDeviceShaderSMBuiltinsPropertiesNV to id VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_SM_BUILTINS_PROPERTIES_NV
-template <> struct LvlTypeMap<VkPhysicalDeviceShaderSMBuiltinsPropertiesNV> {
- static const VkStructureType kSType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_SM_BUILTINS_PROPERTIES_NV;
-};
-
-template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_SM_BUILTINS_PROPERTIES_NV> {
- typedef VkPhysicalDeviceShaderSMBuiltinsPropertiesNV Type;
-};
-
-// Map type VkPhysicalDeviceShaderSMBuiltinsFeaturesNV to id VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_SM_BUILTINS_FEATURES_NV
-template <> struct LvlTypeMap<VkPhysicalDeviceShaderSMBuiltinsFeaturesNV> {
- static const VkStructureType kSType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_SM_BUILTINS_FEATURES_NV;
-};
-
-template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_SM_BUILTINS_FEATURES_NV> {
- typedef VkPhysicalDeviceShaderSMBuiltinsFeaturesNV Type;
-};
-
-// Map type VkDrmFormatModifierPropertiesListEXT to id VK_STRUCTURE_TYPE_DRM_FORMAT_MODIFIER_PROPERTIES_LIST_EXT
-template <> struct LvlTypeMap<VkDrmFormatModifierPropertiesListEXT> {
- static const VkStructureType kSType = VK_STRUCTURE_TYPE_DRM_FORMAT_MODIFIER_PROPERTIES_LIST_EXT;
-};
-
-template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_DRM_FORMAT_MODIFIER_PROPERTIES_LIST_EXT> {
- typedef VkDrmFormatModifierPropertiesListEXT Type;
-};
-
-// Map type VkPhysicalDeviceImageDrmFormatModifierInfoEXT to id VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_DRM_FORMAT_MODIFIER_INFO_EXT
-template <> struct LvlTypeMap<VkPhysicalDeviceImageDrmFormatModifierInfoEXT> {
- static const VkStructureType kSType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_DRM_FORMAT_MODIFIER_INFO_EXT;
-};
-
-template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_DRM_FORMAT_MODIFIER_INFO_EXT> {
- typedef VkPhysicalDeviceImageDrmFormatModifierInfoEXT Type;
-};
-
-// Map type VkImageDrmFormatModifierListCreateInfoEXT to id VK_STRUCTURE_TYPE_IMAGE_DRM_FORMAT_MODIFIER_LIST_CREATE_INFO_EXT
-template <> struct LvlTypeMap<VkImageDrmFormatModifierListCreateInfoEXT> {
- static const VkStructureType kSType = VK_STRUCTURE_TYPE_IMAGE_DRM_FORMAT_MODIFIER_LIST_CREATE_INFO_EXT;
-};
-
-template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_IMAGE_DRM_FORMAT_MODIFIER_LIST_CREATE_INFO_EXT> {
- typedef VkImageDrmFormatModifierListCreateInfoEXT Type;
-};
-
-// Map type VkImageDrmFormatModifierExplicitCreateInfoEXT to id VK_STRUCTURE_TYPE_IMAGE_DRM_FORMAT_MODIFIER_EXPLICIT_CREATE_INFO_EXT
-template <> struct LvlTypeMap<VkImageDrmFormatModifierExplicitCreateInfoEXT> {
- static const VkStructureType kSType = VK_STRUCTURE_TYPE_IMAGE_DRM_FORMAT_MODIFIER_EXPLICIT_CREATE_INFO_EXT;
-};
-
-template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_IMAGE_DRM_FORMAT_MODIFIER_EXPLICIT_CREATE_INFO_EXT> {
- typedef VkImageDrmFormatModifierExplicitCreateInfoEXT Type;
-};
-
-// Map type VkImageDrmFormatModifierPropertiesEXT to id VK_STRUCTURE_TYPE_IMAGE_DRM_FORMAT_MODIFIER_PROPERTIES_EXT
-template <> struct LvlTypeMap<VkImageDrmFormatModifierPropertiesEXT> {
- static const VkStructureType kSType = VK_STRUCTURE_TYPE_IMAGE_DRM_FORMAT_MODIFIER_PROPERTIES_EXT;
-};
-
-template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_IMAGE_DRM_FORMAT_MODIFIER_PROPERTIES_EXT> {
- typedef VkImageDrmFormatModifierPropertiesEXT Type;
-};
-
-// Map type VkValidationCacheCreateInfoEXT to id VK_STRUCTURE_TYPE_VALIDATION_CACHE_CREATE_INFO_EXT
-template <> struct LvlTypeMap<VkValidationCacheCreateInfoEXT> {
- static const VkStructureType kSType = VK_STRUCTURE_TYPE_VALIDATION_CACHE_CREATE_INFO_EXT;
-};
-
-template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_VALIDATION_CACHE_CREATE_INFO_EXT> {
- typedef VkValidationCacheCreateInfoEXT Type;
-};
-
-// Map type VkShaderModuleValidationCacheCreateInfoEXT to id VK_STRUCTURE_TYPE_SHADER_MODULE_VALIDATION_CACHE_CREATE_INFO_EXT
-template <> struct LvlTypeMap<VkShaderModuleValidationCacheCreateInfoEXT> {
- static const VkStructureType kSType = VK_STRUCTURE_TYPE_SHADER_MODULE_VALIDATION_CACHE_CREATE_INFO_EXT;
-};
-
-template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_SHADER_MODULE_VALIDATION_CACHE_CREATE_INFO_EXT> {
- typedef VkShaderModuleValidationCacheCreateInfoEXT Type;
-};
-
-// Map type VkDescriptorSetLayoutBindingFlagsCreateInfoEXT to id VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_BINDING_FLAGS_CREATE_INFO_EXT
-template <> struct LvlTypeMap<VkDescriptorSetLayoutBindingFlagsCreateInfoEXT> {
- static const VkStructureType kSType = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_BINDING_FLAGS_CREATE_INFO_EXT;
-};
-
-template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_BINDING_FLAGS_CREATE_INFO_EXT> {
- typedef VkDescriptorSetLayoutBindingFlagsCreateInfoEXT Type;
-};
-
-// Map type VkPhysicalDeviceDescriptorIndexingFeaturesEXT to id VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DESCRIPTOR_INDEXING_FEATURES_EXT
-template <> struct LvlTypeMap<VkPhysicalDeviceDescriptorIndexingFeaturesEXT> {
- static const VkStructureType kSType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DESCRIPTOR_INDEXING_FEATURES_EXT;
-};
-
-template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DESCRIPTOR_INDEXING_FEATURES_EXT> {
- typedef VkPhysicalDeviceDescriptorIndexingFeaturesEXT Type;
-};
-
-// Map type VkPhysicalDeviceDescriptorIndexingPropertiesEXT to id VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DESCRIPTOR_INDEXING_PROPERTIES_EXT
-template <> struct LvlTypeMap<VkPhysicalDeviceDescriptorIndexingPropertiesEXT> {
- static const VkStructureType kSType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DESCRIPTOR_INDEXING_PROPERTIES_EXT;
-};
-
-template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DESCRIPTOR_INDEXING_PROPERTIES_EXT> {
- typedef VkPhysicalDeviceDescriptorIndexingPropertiesEXT Type;
-};
-
-// Map type VkDescriptorSetVariableDescriptorCountAllocateInfoEXT to id VK_STRUCTURE_TYPE_DESCRIPTOR_SET_VARIABLE_DESCRIPTOR_COUNT_ALLOCATE_INFO_EXT
-template <> struct LvlTypeMap<VkDescriptorSetVariableDescriptorCountAllocateInfoEXT> {
- static const VkStructureType kSType = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_VARIABLE_DESCRIPTOR_COUNT_ALLOCATE_INFO_EXT;
-};
-
-template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_DESCRIPTOR_SET_VARIABLE_DESCRIPTOR_COUNT_ALLOCATE_INFO_EXT> {
- typedef VkDescriptorSetVariableDescriptorCountAllocateInfoEXT Type;
-};
-
-// Map type VkDescriptorSetVariableDescriptorCountLayoutSupportEXT to id VK_STRUCTURE_TYPE_DESCRIPTOR_SET_VARIABLE_DESCRIPTOR_COUNT_LAYOUT_SUPPORT_EXT
-template <> struct LvlTypeMap<VkDescriptorSetVariableDescriptorCountLayoutSupportEXT> {
- static const VkStructureType kSType = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_VARIABLE_DESCRIPTOR_COUNT_LAYOUT_SUPPORT_EXT;
-};
-
-template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_DESCRIPTOR_SET_VARIABLE_DESCRIPTOR_COUNT_LAYOUT_SUPPORT_EXT> {
- typedef VkDescriptorSetVariableDescriptorCountLayoutSupportEXT Type;
-};
-
-// Map type VkPipelineViewportShadingRateImageStateCreateInfoNV to id VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_SHADING_RATE_IMAGE_STATE_CREATE_INFO_NV
-template <> struct LvlTypeMap<VkPipelineViewportShadingRateImageStateCreateInfoNV> {
- static const VkStructureType kSType = VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_SHADING_RATE_IMAGE_STATE_CREATE_INFO_NV;
-};
-
-template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_SHADING_RATE_IMAGE_STATE_CREATE_INFO_NV> {
- typedef VkPipelineViewportShadingRateImageStateCreateInfoNV Type;
-};
-
-// Map type VkPhysicalDeviceShadingRateImageFeaturesNV to id VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADING_RATE_IMAGE_FEATURES_NV
-template <> struct LvlTypeMap<VkPhysicalDeviceShadingRateImageFeaturesNV> {
- static const VkStructureType kSType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADING_RATE_IMAGE_FEATURES_NV;
-};
-
-template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADING_RATE_IMAGE_FEATURES_NV> {
- typedef VkPhysicalDeviceShadingRateImageFeaturesNV Type;
-};
-
-// Map type VkPhysicalDeviceShadingRateImagePropertiesNV to id VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADING_RATE_IMAGE_PROPERTIES_NV
-template <> struct LvlTypeMap<VkPhysicalDeviceShadingRateImagePropertiesNV> {
- static const VkStructureType kSType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADING_RATE_IMAGE_PROPERTIES_NV;
-};
-
-template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADING_RATE_IMAGE_PROPERTIES_NV> {
- typedef VkPhysicalDeviceShadingRateImagePropertiesNV Type;
-};
-
-// Map type VkPipelineViewportCoarseSampleOrderStateCreateInfoNV to id VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_COARSE_SAMPLE_ORDER_STATE_CREATE_INFO_NV
-template <> struct LvlTypeMap<VkPipelineViewportCoarseSampleOrderStateCreateInfoNV> {
- static const VkStructureType kSType = VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_COARSE_SAMPLE_ORDER_STATE_CREATE_INFO_NV;
-};
-
-template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_COARSE_SAMPLE_ORDER_STATE_CREATE_INFO_NV> {
- typedef VkPipelineViewportCoarseSampleOrderStateCreateInfoNV Type;
-};
-
-// Map type VkRayTracingShaderGroupCreateInfoNV to id VK_STRUCTURE_TYPE_RAY_TRACING_SHADER_GROUP_CREATE_INFO_NV
-template <> struct LvlTypeMap<VkRayTracingShaderGroupCreateInfoNV> {
- static const VkStructureType kSType = VK_STRUCTURE_TYPE_RAY_TRACING_SHADER_GROUP_CREATE_INFO_NV;
-};
-
-template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_RAY_TRACING_SHADER_GROUP_CREATE_INFO_NV> {
- typedef VkRayTracingShaderGroupCreateInfoNV Type;
-};
-
-// Map type VkRayTracingPipelineCreateInfoNV to id VK_STRUCTURE_TYPE_RAY_TRACING_PIPELINE_CREATE_INFO_NV
-template <> struct LvlTypeMap<VkRayTracingPipelineCreateInfoNV> {
- static const VkStructureType kSType = VK_STRUCTURE_TYPE_RAY_TRACING_PIPELINE_CREATE_INFO_NV;
-};
-
-template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_RAY_TRACING_PIPELINE_CREATE_INFO_NV> {
- typedef VkRayTracingPipelineCreateInfoNV Type;
-};
-
-// Map type VkGeometryTrianglesNV to id VK_STRUCTURE_TYPE_GEOMETRY_TRIANGLES_NV
-template <> struct LvlTypeMap<VkGeometryTrianglesNV> {
- static const VkStructureType kSType = VK_STRUCTURE_TYPE_GEOMETRY_TRIANGLES_NV;
-};
-
-template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_GEOMETRY_TRIANGLES_NV> {
- typedef VkGeometryTrianglesNV Type;
-};
-
-// Map type VkGeometryAABBNV to id VK_STRUCTURE_TYPE_GEOMETRY_AABB_NV
-template <> struct LvlTypeMap<VkGeometryAABBNV> {
- static const VkStructureType kSType = VK_STRUCTURE_TYPE_GEOMETRY_AABB_NV;
-};
-
-template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_GEOMETRY_AABB_NV> {
- typedef VkGeometryAABBNV Type;
-};
-
-// Map type VkGeometryNV to id VK_STRUCTURE_TYPE_GEOMETRY_NV
-template <> struct LvlTypeMap<VkGeometryNV> {
- static const VkStructureType kSType = VK_STRUCTURE_TYPE_GEOMETRY_NV;
-};
-
-template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_GEOMETRY_NV> {
- typedef VkGeometryNV Type;
-};
-
-// Map type VkAccelerationStructureInfoNV to id VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_INFO_NV
-template <> struct LvlTypeMap<VkAccelerationStructureInfoNV> {
- static const VkStructureType kSType = VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_INFO_NV;
-};
-
-template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_INFO_NV> {
- typedef VkAccelerationStructureInfoNV Type;
-};
-
-// Map type VkAccelerationStructureCreateInfoNV to id VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_CREATE_INFO_NV
-template <> struct LvlTypeMap<VkAccelerationStructureCreateInfoNV> {
- static const VkStructureType kSType = VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_CREATE_INFO_NV;
-};
-
-template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_CREATE_INFO_NV> {
- typedef VkAccelerationStructureCreateInfoNV Type;
-};
-
-// Map type VkBindAccelerationStructureMemoryInfoNV to id VK_STRUCTURE_TYPE_BIND_ACCELERATION_STRUCTURE_MEMORY_INFO_NV
-template <> struct LvlTypeMap<VkBindAccelerationStructureMemoryInfoNV> {
- static const VkStructureType kSType = VK_STRUCTURE_TYPE_BIND_ACCELERATION_STRUCTURE_MEMORY_INFO_NV;
-};
-
-template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_BIND_ACCELERATION_STRUCTURE_MEMORY_INFO_NV> {
- typedef VkBindAccelerationStructureMemoryInfoNV Type;
-};
-
-// Map type VkWriteDescriptorSetAccelerationStructureNV to id VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET_ACCELERATION_STRUCTURE_NV
-template <> struct LvlTypeMap<VkWriteDescriptorSetAccelerationStructureNV> {
- static const VkStructureType kSType = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET_ACCELERATION_STRUCTURE_NV;
-};
-
-template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET_ACCELERATION_STRUCTURE_NV> {
- typedef VkWriteDescriptorSetAccelerationStructureNV Type;
-};
-
-// Map type VkAccelerationStructureMemoryRequirementsInfoNV to id VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_INFO_NV
-template <> struct LvlTypeMap<VkAccelerationStructureMemoryRequirementsInfoNV> {
- static const VkStructureType kSType = VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_INFO_NV;
-};
-
-template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_INFO_NV> {
- typedef VkAccelerationStructureMemoryRequirementsInfoNV Type;
-};
-
-// Map type VkPhysicalDeviceRayTracingPropertiesNV to id VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_RAY_TRACING_PROPERTIES_NV
-template <> struct LvlTypeMap<VkPhysicalDeviceRayTracingPropertiesNV> {
- static const VkStructureType kSType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_RAY_TRACING_PROPERTIES_NV;
-};
-
-template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_RAY_TRACING_PROPERTIES_NV> {
- typedef VkPhysicalDeviceRayTracingPropertiesNV Type;
-};
-
-// Map type VkPhysicalDeviceRepresentativeFragmentTestFeaturesNV to id VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_REPRESENTATIVE_FRAGMENT_TEST_FEATURES_NV
-template <> struct LvlTypeMap<VkPhysicalDeviceRepresentativeFragmentTestFeaturesNV> {
- static const VkStructureType kSType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_REPRESENTATIVE_FRAGMENT_TEST_FEATURES_NV;
-};
-
-template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_REPRESENTATIVE_FRAGMENT_TEST_FEATURES_NV> {
- typedef VkPhysicalDeviceRepresentativeFragmentTestFeaturesNV Type;
-};
-
-// Map type VkPipelineRepresentativeFragmentTestStateCreateInfoNV to id VK_STRUCTURE_TYPE_PIPELINE_REPRESENTATIVE_FRAGMENT_TEST_STATE_CREATE_INFO_NV
-template <> struct LvlTypeMap<VkPipelineRepresentativeFragmentTestStateCreateInfoNV> {
- static const VkStructureType kSType = VK_STRUCTURE_TYPE_PIPELINE_REPRESENTATIVE_FRAGMENT_TEST_STATE_CREATE_INFO_NV;
-};
-
-template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_PIPELINE_REPRESENTATIVE_FRAGMENT_TEST_STATE_CREATE_INFO_NV> {
- typedef VkPipelineRepresentativeFragmentTestStateCreateInfoNV Type;
-};
-
-// Map type VkPhysicalDeviceImageViewImageFormatInfoEXT to id VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_VIEW_IMAGE_FORMAT_INFO_EXT
-template <> struct LvlTypeMap<VkPhysicalDeviceImageViewImageFormatInfoEXT> {
- static const VkStructureType kSType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_VIEW_IMAGE_FORMAT_INFO_EXT;
-};
-
-template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_VIEW_IMAGE_FORMAT_INFO_EXT> {
- typedef VkPhysicalDeviceImageViewImageFormatInfoEXT Type;
-};
-
-// Map type VkFilterCubicImageViewImageFormatPropertiesEXT to id VK_STRUCTURE_TYPE_FILTER_CUBIC_IMAGE_VIEW_IMAGE_FORMAT_PROPERTIES_EXT
-template <> struct LvlTypeMap<VkFilterCubicImageViewImageFormatPropertiesEXT> {
- static const VkStructureType kSType = VK_STRUCTURE_TYPE_FILTER_CUBIC_IMAGE_VIEW_IMAGE_FORMAT_PROPERTIES_EXT;
-};
-
-template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_FILTER_CUBIC_IMAGE_VIEW_IMAGE_FORMAT_PROPERTIES_EXT> {
- typedef VkFilterCubicImageViewImageFormatPropertiesEXT Type;
-};
-
-// Map type VkDeviceQueueGlobalPriorityCreateInfoEXT to id VK_STRUCTURE_TYPE_DEVICE_QUEUE_GLOBAL_PRIORITY_CREATE_INFO_EXT
-template <> struct LvlTypeMap<VkDeviceQueueGlobalPriorityCreateInfoEXT> {
- static const VkStructureType kSType = VK_STRUCTURE_TYPE_DEVICE_QUEUE_GLOBAL_PRIORITY_CREATE_INFO_EXT;
-};
-
-template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_DEVICE_QUEUE_GLOBAL_PRIORITY_CREATE_INFO_EXT> {
- typedef VkDeviceQueueGlobalPriorityCreateInfoEXT Type;
-};
-
-// Map type VkImportMemoryHostPointerInfoEXT to id VK_STRUCTURE_TYPE_IMPORT_MEMORY_HOST_POINTER_INFO_EXT
-template <> struct LvlTypeMap<VkImportMemoryHostPointerInfoEXT> {
- static const VkStructureType kSType = VK_STRUCTURE_TYPE_IMPORT_MEMORY_HOST_POINTER_INFO_EXT;
-};
-
-template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_IMPORT_MEMORY_HOST_POINTER_INFO_EXT> {
- typedef VkImportMemoryHostPointerInfoEXT Type;
-};
-
-// Map type VkMemoryHostPointerPropertiesEXT to id VK_STRUCTURE_TYPE_MEMORY_HOST_POINTER_PROPERTIES_EXT
-template <> struct LvlTypeMap<VkMemoryHostPointerPropertiesEXT> {
- static const VkStructureType kSType = VK_STRUCTURE_TYPE_MEMORY_HOST_POINTER_PROPERTIES_EXT;
-};
-
-template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_MEMORY_HOST_POINTER_PROPERTIES_EXT> {
- typedef VkMemoryHostPointerPropertiesEXT Type;
-};
-
-// Map type VkPhysicalDeviceExternalMemoryHostPropertiesEXT to id VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_MEMORY_HOST_PROPERTIES_EXT
-template <> struct LvlTypeMap<VkPhysicalDeviceExternalMemoryHostPropertiesEXT> {
- static const VkStructureType kSType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_MEMORY_HOST_PROPERTIES_EXT;
-};
-
-template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_MEMORY_HOST_PROPERTIES_EXT> {
- typedef VkPhysicalDeviceExternalMemoryHostPropertiesEXT Type;
-};
-
-// Map type VkPipelineCompilerControlCreateInfoAMD to id VK_STRUCTURE_TYPE_PIPELINE_COMPILER_CONTROL_CREATE_INFO_AMD
-template <> struct LvlTypeMap<VkPipelineCompilerControlCreateInfoAMD> {
- static const VkStructureType kSType = VK_STRUCTURE_TYPE_PIPELINE_COMPILER_CONTROL_CREATE_INFO_AMD;
-};
-
-template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_PIPELINE_COMPILER_CONTROL_CREATE_INFO_AMD> {
- typedef VkPipelineCompilerControlCreateInfoAMD Type;
-};
-
-// Map type VkCalibratedTimestampInfoEXT to id VK_STRUCTURE_TYPE_CALIBRATED_TIMESTAMP_INFO_EXT
-template <> struct LvlTypeMap<VkCalibratedTimestampInfoEXT> {
- static const VkStructureType kSType = VK_STRUCTURE_TYPE_CALIBRATED_TIMESTAMP_INFO_EXT;
-};
-
-template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_CALIBRATED_TIMESTAMP_INFO_EXT> {
- typedef VkCalibratedTimestampInfoEXT Type;
-};
-
-// Map type VkPhysicalDeviceShaderCorePropertiesAMD to id VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_CORE_PROPERTIES_AMD
-template <> struct LvlTypeMap<VkPhysicalDeviceShaderCorePropertiesAMD> {
- static const VkStructureType kSType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_CORE_PROPERTIES_AMD;
-};
-
-template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_CORE_PROPERTIES_AMD> {
- typedef VkPhysicalDeviceShaderCorePropertiesAMD Type;
-};
-
-// Map type VkDeviceMemoryOverallocationCreateInfoAMD to id VK_STRUCTURE_TYPE_DEVICE_MEMORY_OVERALLOCATION_CREATE_INFO_AMD
-template <> struct LvlTypeMap<VkDeviceMemoryOverallocationCreateInfoAMD> {
- static const VkStructureType kSType = VK_STRUCTURE_TYPE_DEVICE_MEMORY_OVERALLOCATION_CREATE_INFO_AMD;
-};
-
-template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_DEVICE_MEMORY_OVERALLOCATION_CREATE_INFO_AMD> {
- typedef VkDeviceMemoryOverallocationCreateInfoAMD Type;
-};
-
-// Map type VkPhysicalDeviceVertexAttributeDivisorPropertiesEXT to id VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VERTEX_ATTRIBUTE_DIVISOR_PROPERTIES_EXT
-template <> struct LvlTypeMap<VkPhysicalDeviceVertexAttributeDivisorPropertiesEXT> {
- static const VkStructureType kSType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VERTEX_ATTRIBUTE_DIVISOR_PROPERTIES_EXT;
-};
-
-template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VERTEX_ATTRIBUTE_DIVISOR_PROPERTIES_EXT> {
- typedef VkPhysicalDeviceVertexAttributeDivisorPropertiesEXT Type;
-};
-
-// Map type VkPipelineVertexInputDivisorStateCreateInfoEXT to id VK_STRUCTURE_TYPE_PIPELINE_VERTEX_INPUT_DIVISOR_STATE_CREATE_INFO_EXT
-template <> struct LvlTypeMap<VkPipelineVertexInputDivisorStateCreateInfoEXT> {
- static const VkStructureType kSType = VK_STRUCTURE_TYPE_PIPELINE_VERTEX_INPUT_DIVISOR_STATE_CREATE_INFO_EXT;
-};
-
-template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_PIPELINE_VERTEX_INPUT_DIVISOR_STATE_CREATE_INFO_EXT> {
- typedef VkPipelineVertexInputDivisorStateCreateInfoEXT Type;
-};
-
-// Map type VkPhysicalDeviceVertexAttributeDivisorFeaturesEXT to id VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VERTEX_ATTRIBUTE_DIVISOR_FEATURES_EXT
-template <> struct LvlTypeMap<VkPhysicalDeviceVertexAttributeDivisorFeaturesEXT> {
- static const VkStructureType kSType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VERTEX_ATTRIBUTE_DIVISOR_FEATURES_EXT;
-};
-
-template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VERTEX_ATTRIBUTE_DIVISOR_FEATURES_EXT> {
- typedef VkPhysicalDeviceVertexAttributeDivisorFeaturesEXT Type;
-};
-
-#ifdef VK_USE_PLATFORM_GGP
-// Map type VkPresentFrameTokenGGP to id VK_STRUCTURE_TYPE_PRESENT_FRAME_TOKEN_GGP
-template <> struct LvlTypeMap<VkPresentFrameTokenGGP> {
- static const VkStructureType kSType = VK_STRUCTURE_TYPE_PRESENT_FRAME_TOKEN_GGP;
-};
-
-template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_PRESENT_FRAME_TOKEN_GGP> {
- typedef VkPresentFrameTokenGGP Type;
-};
-
-#endif // VK_USE_PLATFORM_GGP
-// Map type VkPipelineCreationFeedbackCreateInfoEXT to id VK_STRUCTURE_TYPE_PIPELINE_CREATION_FEEDBACK_CREATE_INFO_EXT
-template <> struct LvlTypeMap<VkPipelineCreationFeedbackCreateInfoEXT> {
- static const VkStructureType kSType = VK_STRUCTURE_TYPE_PIPELINE_CREATION_FEEDBACK_CREATE_INFO_EXT;
-};
-
-template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_PIPELINE_CREATION_FEEDBACK_CREATE_INFO_EXT> {
- typedef VkPipelineCreationFeedbackCreateInfoEXT Type;
-};
-
-// Map type VkPhysicalDeviceComputeShaderDerivativesFeaturesNV to id VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_COMPUTE_SHADER_DERIVATIVES_FEATURES_NV
-template <> struct LvlTypeMap<VkPhysicalDeviceComputeShaderDerivativesFeaturesNV> {
- static const VkStructureType kSType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_COMPUTE_SHADER_DERIVATIVES_FEATURES_NV;
-};
-
-template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_COMPUTE_SHADER_DERIVATIVES_FEATURES_NV> {
- typedef VkPhysicalDeviceComputeShaderDerivativesFeaturesNV Type;
-};
-
-// Map type VkPhysicalDeviceMeshShaderFeaturesNV to id VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MESH_SHADER_FEATURES_NV
-template <> struct LvlTypeMap<VkPhysicalDeviceMeshShaderFeaturesNV> {
- static const VkStructureType kSType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MESH_SHADER_FEATURES_NV;
-};
-
-template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MESH_SHADER_FEATURES_NV> {
- typedef VkPhysicalDeviceMeshShaderFeaturesNV Type;
-};
-
-// Map type VkPhysicalDeviceMeshShaderPropertiesNV to id VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MESH_SHADER_PROPERTIES_NV
-template <> struct LvlTypeMap<VkPhysicalDeviceMeshShaderPropertiesNV> {
- static const VkStructureType kSType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MESH_SHADER_PROPERTIES_NV;
-};
-
-template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MESH_SHADER_PROPERTIES_NV> {
- typedef VkPhysicalDeviceMeshShaderPropertiesNV Type;
-};
-
-// Map type VkPhysicalDeviceFragmentShaderBarycentricFeaturesNV to id VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_SHADER_BARYCENTRIC_FEATURES_NV
-template <> struct LvlTypeMap<VkPhysicalDeviceFragmentShaderBarycentricFeaturesNV> {
- static const VkStructureType kSType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_SHADER_BARYCENTRIC_FEATURES_NV;
-};
-
-template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_SHADER_BARYCENTRIC_FEATURES_NV> {
- typedef VkPhysicalDeviceFragmentShaderBarycentricFeaturesNV Type;
-};
-
-// Map type VkPhysicalDeviceShaderImageFootprintFeaturesNV to id VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_IMAGE_FOOTPRINT_FEATURES_NV
-template <> struct LvlTypeMap<VkPhysicalDeviceShaderImageFootprintFeaturesNV> {
- static const VkStructureType kSType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_IMAGE_FOOTPRINT_FEATURES_NV;
-};
-
-template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_IMAGE_FOOTPRINT_FEATURES_NV> {
- typedef VkPhysicalDeviceShaderImageFootprintFeaturesNV Type;
-};
-
-// Map type VkPipelineViewportExclusiveScissorStateCreateInfoNV to id VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_EXCLUSIVE_SCISSOR_STATE_CREATE_INFO_NV
-template <> struct LvlTypeMap<VkPipelineViewportExclusiveScissorStateCreateInfoNV> {
- static const VkStructureType kSType = VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_EXCLUSIVE_SCISSOR_STATE_CREATE_INFO_NV;
-};
-
-template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_EXCLUSIVE_SCISSOR_STATE_CREATE_INFO_NV> {
- typedef VkPipelineViewportExclusiveScissorStateCreateInfoNV Type;
-};
-
-// Map type VkPhysicalDeviceExclusiveScissorFeaturesNV to id VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXCLUSIVE_SCISSOR_FEATURES_NV
-template <> struct LvlTypeMap<VkPhysicalDeviceExclusiveScissorFeaturesNV> {
- static const VkStructureType kSType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXCLUSIVE_SCISSOR_FEATURES_NV;
-};
-
-template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXCLUSIVE_SCISSOR_FEATURES_NV> {
- typedef VkPhysicalDeviceExclusiveScissorFeaturesNV Type;
-};
-
-// Map type VkQueueFamilyCheckpointPropertiesNV to id VK_STRUCTURE_TYPE_QUEUE_FAMILY_CHECKPOINT_PROPERTIES_NV
-template <> struct LvlTypeMap<VkQueueFamilyCheckpointPropertiesNV> {
- static const VkStructureType kSType = VK_STRUCTURE_TYPE_QUEUE_FAMILY_CHECKPOINT_PROPERTIES_NV;
-};
-
-template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_QUEUE_FAMILY_CHECKPOINT_PROPERTIES_NV> {
- typedef VkQueueFamilyCheckpointPropertiesNV Type;
-};
-
-// Map type VkCheckpointDataNV to id VK_STRUCTURE_TYPE_CHECKPOINT_DATA_NV
-template <> struct LvlTypeMap<VkCheckpointDataNV> {
- static const VkStructureType kSType = VK_STRUCTURE_TYPE_CHECKPOINT_DATA_NV;
-};
-
-template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_CHECKPOINT_DATA_NV> {
- typedef VkCheckpointDataNV Type;
-};
-
-// Map type VkPhysicalDeviceShaderIntegerFunctions2FeaturesINTEL to id VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_INTEGER_FUNCTIONS_2_FEATURES_INTEL
-template <> struct LvlTypeMap<VkPhysicalDeviceShaderIntegerFunctions2FeaturesINTEL> {
- static const VkStructureType kSType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_INTEGER_FUNCTIONS_2_FEATURES_INTEL;
-};
-
-template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_INTEGER_FUNCTIONS_2_FEATURES_INTEL> {
- typedef VkPhysicalDeviceShaderIntegerFunctions2FeaturesINTEL Type;
-};
-
-// Map type VkInitializePerformanceApiInfoINTEL to id VK_STRUCTURE_TYPE_INITIALIZE_PERFORMANCE_API_INFO_INTEL
-template <> struct LvlTypeMap<VkInitializePerformanceApiInfoINTEL> {
- static const VkStructureType kSType = VK_STRUCTURE_TYPE_INITIALIZE_PERFORMANCE_API_INFO_INTEL;
-};
-
-template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_INITIALIZE_PERFORMANCE_API_INFO_INTEL> {
- typedef VkInitializePerformanceApiInfoINTEL Type;
-};
-
-// Map type VkQueryPoolCreateInfoINTEL to id VK_STRUCTURE_TYPE_QUERY_POOL_CREATE_INFO_INTEL
-template <> struct LvlTypeMap<VkQueryPoolCreateInfoINTEL> {
- static const VkStructureType kSType = VK_STRUCTURE_TYPE_QUERY_POOL_CREATE_INFO_INTEL;
-};
-
-template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_QUERY_POOL_CREATE_INFO_INTEL> {
- typedef VkQueryPoolCreateInfoINTEL Type;
-};
-
-// Map type VkPerformanceMarkerInfoINTEL to id VK_STRUCTURE_TYPE_PERFORMANCE_MARKER_INFO_INTEL
-template <> struct LvlTypeMap<VkPerformanceMarkerInfoINTEL> {
- static const VkStructureType kSType = VK_STRUCTURE_TYPE_PERFORMANCE_MARKER_INFO_INTEL;
-};
-
-template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_PERFORMANCE_MARKER_INFO_INTEL> {
- typedef VkPerformanceMarkerInfoINTEL Type;
-};
-
-// Map type VkPerformanceStreamMarkerInfoINTEL to id VK_STRUCTURE_TYPE_PERFORMANCE_STREAM_MARKER_INFO_INTEL
-template <> struct LvlTypeMap<VkPerformanceStreamMarkerInfoINTEL> {
- static const VkStructureType kSType = VK_STRUCTURE_TYPE_PERFORMANCE_STREAM_MARKER_INFO_INTEL;
-};
-
-template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_PERFORMANCE_STREAM_MARKER_INFO_INTEL> {
- typedef VkPerformanceStreamMarkerInfoINTEL Type;
-};
-
-// Map type VkPerformanceOverrideInfoINTEL to id VK_STRUCTURE_TYPE_PERFORMANCE_OVERRIDE_INFO_INTEL
-template <> struct LvlTypeMap<VkPerformanceOverrideInfoINTEL> {
- static const VkStructureType kSType = VK_STRUCTURE_TYPE_PERFORMANCE_OVERRIDE_INFO_INTEL;
-};
-
-template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_PERFORMANCE_OVERRIDE_INFO_INTEL> {
- typedef VkPerformanceOverrideInfoINTEL Type;
-};
-
-// Map type VkPerformanceConfigurationAcquireInfoINTEL to id VK_STRUCTURE_TYPE_PERFORMANCE_CONFIGURATION_ACQUIRE_INFO_INTEL
-template <> struct LvlTypeMap<VkPerformanceConfigurationAcquireInfoINTEL> {
- static const VkStructureType kSType = VK_STRUCTURE_TYPE_PERFORMANCE_CONFIGURATION_ACQUIRE_INFO_INTEL;
-};
-
-template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_PERFORMANCE_CONFIGURATION_ACQUIRE_INFO_INTEL> {
- typedef VkPerformanceConfigurationAcquireInfoINTEL Type;
-};
-
-// Map type VkPhysicalDevicePCIBusInfoPropertiesEXT to id VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PCI_BUS_INFO_PROPERTIES_EXT
-template <> struct LvlTypeMap<VkPhysicalDevicePCIBusInfoPropertiesEXT> {
- static const VkStructureType kSType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PCI_BUS_INFO_PROPERTIES_EXT;
-};
-
-template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PCI_BUS_INFO_PROPERTIES_EXT> {
- typedef VkPhysicalDevicePCIBusInfoPropertiesEXT Type;
-};
-
-// Map type VkDisplayNativeHdrSurfaceCapabilitiesAMD to id VK_STRUCTURE_TYPE_DISPLAY_NATIVE_HDR_SURFACE_CAPABILITIES_AMD
-template <> struct LvlTypeMap<VkDisplayNativeHdrSurfaceCapabilitiesAMD> {
- static const VkStructureType kSType = VK_STRUCTURE_TYPE_DISPLAY_NATIVE_HDR_SURFACE_CAPABILITIES_AMD;
-};
-
-template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_DISPLAY_NATIVE_HDR_SURFACE_CAPABILITIES_AMD> {
- typedef VkDisplayNativeHdrSurfaceCapabilitiesAMD Type;
-};
-
-// Map type VkSwapchainDisplayNativeHdrCreateInfoAMD to id VK_STRUCTURE_TYPE_SWAPCHAIN_DISPLAY_NATIVE_HDR_CREATE_INFO_AMD
-template <> struct LvlTypeMap<VkSwapchainDisplayNativeHdrCreateInfoAMD> {
- static const VkStructureType kSType = VK_STRUCTURE_TYPE_SWAPCHAIN_DISPLAY_NATIVE_HDR_CREATE_INFO_AMD;
-};
-
-template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_SWAPCHAIN_DISPLAY_NATIVE_HDR_CREATE_INFO_AMD> {
- typedef VkSwapchainDisplayNativeHdrCreateInfoAMD Type;
-};
-
-#ifdef VK_USE_PLATFORM_FUCHSIA
-// Map type VkImagePipeSurfaceCreateInfoFUCHSIA to id VK_STRUCTURE_TYPE_IMAGEPIPE_SURFACE_CREATE_INFO_FUCHSIA
-template <> struct LvlTypeMap<VkImagePipeSurfaceCreateInfoFUCHSIA> {
- static const VkStructureType kSType = VK_STRUCTURE_TYPE_IMAGEPIPE_SURFACE_CREATE_INFO_FUCHSIA;
-};
-
-template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_IMAGEPIPE_SURFACE_CREATE_INFO_FUCHSIA> {
- typedef VkImagePipeSurfaceCreateInfoFUCHSIA Type;
-};
-
-#endif // VK_USE_PLATFORM_FUCHSIA
-#ifdef VK_USE_PLATFORM_METAL_EXT
-// Map type VkMetalSurfaceCreateInfoEXT to id VK_STRUCTURE_TYPE_METAL_SURFACE_CREATE_INFO_EXT
-template <> struct LvlTypeMap<VkMetalSurfaceCreateInfoEXT> {
- static const VkStructureType kSType = VK_STRUCTURE_TYPE_METAL_SURFACE_CREATE_INFO_EXT;
-};
-
-template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_METAL_SURFACE_CREATE_INFO_EXT> {
- typedef VkMetalSurfaceCreateInfoEXT Type;
-};
-
-#endif // VK_USE_PLATFORM_METAL_EXT
-// Map type VkPhysicalDeviceFragmentDensityMapFeaturesEXT to id VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_DENSITY_MAP_FEATURES_EXT
-template <> struct LvlTypeMap<VkPhysicalDeviceFragmentDensityMapFeaturesEXT> {
- static const VkStructureType kSType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_DENSITY_MAP_FEATURES_EXT;
-};
-
-template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_DENSITY_MAP_FEATURES_EXT> {
- typedef VkPhysicalDeviceFragmentDensityMapFeaturesEXT Type;
-};
-
-// Map type VkPhysicalDeviceFragmentDensityMapPropertiesEXT to id VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_DENSITY_MAP_PROPERTIES_EXT
-template <> struct LvlTypeMap<VkPhysicalDeviceFragmentDensityMapPropertiesEXT> {
- static const VkStructureType kSType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_DENSITY_MAP_PROPERTIES_EXT;
-};
-
-template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_DENSITY_MAP_PROPERTIES_EXT> {
- typedef VkPhysicalDeviceFragmentDensityMapPropertiesEXT Type;
-};
-
-// Map type VkRenderPassFragmentDensityMapCreateInfoEXT to id VK_STRUCTURE_TYPE_RENDER_PASS_FRAGMENT_DENSITY_MAP_CREATE_INFO_EXT
-template <> struct LvlTypeMap<VkRenderPassFragmentDensityMapCreateInfoEXT> {
- static const VkStructureType kSType = VK_STRUCTURE_TYPE_RENDER_PASS_FRAGMENT_DENSITY_MAP_CREATE_INFO_EXT;
-};
-
-template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_RENDER_PASS_FRAGMENT_DENSITY_MAP_CREATE_INFO_EXT> {
- typedef VkRenderPassFragmentDensityMapCreateInfoEXT Type;
-};
-
-// Map type VkPhysicalDeviceScalarBlockLayoutFeaturesEXT to id VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SCALAR_BLOCK_LAYOUT_FEATURES_EXT
-template <> struct LvlTypeMap<VkPhysicalDeviceScalarBlockLayoutFeaturesEXT> {
- static const VkStructureType kSType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SCALAR_BLOCK_LAYOUT_FEATURES_EXT;
-};
-
-template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SCALAR_BLOCK_LAYOUT_FEATURES_EXT> {
- typedef VkPhysicalDeviceScalarBlockLayoutFeaturesEXT Type;
-};
-
-// Map type VkPhysicalDeviceSubgroupSizeControlFeaturesEXT to id VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SUBGROUP_SIZE_CONTROL_FEATURES_EXT
-template <> struct LvlTypeMap<VkPhysicalDeviceSubgroupSizeControlFeaturesEXT> {
- static const VkStructureType kSType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SUBGROUP_SIZE_CONTROL_FEATURES_EXT;
-};
-
-template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SUBGROUP_SIZE_CONTROL_FEATURES_EXT> {
- typedef VkPhysicalDeviceSubgroupSizeControlFeaturesEXT Type;
-};
-
-// Map type VkPhysicalDeviceSubgroupSizeControlPropertiesEXT to id VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SUBGROUP_SIZE_CONTROL_PROPERTIES_EXT
-template <> struct LvlTypeMap<VkPhysicalDeviceSubgroupSizeControlPropertiesEXT> {
- static const VkStructureType kSType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SUBGROUP_SIZE_CONTROL_PROPERTIES_EXT;
-};
-
-template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SUBGROUP_SIZE_CONTROL_PROPERTIES_EXT> {
- typedef VkPhysicalDeviceSubgroupSizeControlPropertiesEXT Type;
-};
-
-// Map type VkPipelineShaderStageRequiredSubgroupSizeCreateInfoEXT to id VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_REQUIRED_SUBGROUP_SIZE_CREATE_INFO_EXT
-template <> struct LvlTypeMap<VkPipelineShaderStageRequiredSubgroupSizeCreateInfoEXT> {
- static const VkStructureType kSType = VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_REQUIRED_SUBGROUP_SIZE_CREATE_INFO_EXT;
-};
-
-template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_REQUIRED_SUBGROUP_SIZE_CREATE_INFO_EXT> {
- typedef VkPipelineShaderStageRequiredSubgroupSizeCreateInfoEXT Type;
-};
-
-// Map type VkPhysicalDeviceShaderCoreProperties2AMD to id VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_CORE_PROPERTIES_2_AMD
-template <> struct LvlTypeMap<VkPhysicalDeviceShaderCoreProperties2AMD> {
- static const VkStructureType kSType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_CORE_PROPERTIES_2_AMD;
-};
-
-template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_CORE_PROPERTIES_2_AMD> {
- typedef VkPhysicalDeviceShaderCoreProperties2AMD Type;
-};
-
-// Map type VkPhysicalDeviceCoherentMemoryFeaturesAMD to id VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_COHERENT_MEMORY_FEATURES_AMD
-template <> struct LvlTypeMap<VkPhysicalDeviceCoherentMemoryFeaturesAMD> {
- static const VkStructureType kSType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_COHERENT_MEMORY_FEATURES_AMD;
-};
-
-template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_COHERENT_MEMORY_FEATURES_AMD> {
- typedef VkPhysicalDeviceCoherentMemoryFeaturesAMD Type;
-};
-
-// Map type VkPhysicalDeviceMemoryBudgetPropertiesEXT to id VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MEMORY_BUDGET_PROPERTIES_EXT
-template <> struct LvlTypeMap<VkPhysicalDeviceMemoryBudgetPropertiesEXT> {
- static const VkStructureType kSType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MEMORY_BUDGET_PROPERTIES_EXT;
-};
-
-template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MEMORY_BUDGET_PROPERTIES_EXT> {
- typedef VkPhysicalDeviceMemoryBudgetPropertiesEXT Type;
-};
-
-// Map type VkPhysicalDeviceMemoryPriorityFeaturesEXT to id VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MEMORY_PRIORITY_FEATURES_EXT
-template <> struct LvlTypeMap<VkPhysicalDeviceMemoryPriorityFeaturesEXT> {
- static const VkStructureType kSType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MEMORY_PRIORITY_FEATURES_EXT;
-};
-
-template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MEMORY_PRIORITY_FEATURES_EXT> {
- typedef VkPhysicalDeviceMemoryPriorityFeaturesEXT Type;
-};
-
-// Map type VkMemoryPriorityAllocateInfoEXT to id VK_STRUCTURE_TYPE_MEMORY_PRIORITY_ALLOCATE_INFO_EXT
-template <> struct LvlTypeMap<VkMemoryPriorityAllocateInfoEXT> {
- static const VkStructureType kSType = VK_STRUCTURE_TYPE_MEMORY_PRIORITY_ALLOCATE_INFO_EXT;
-};
-
-template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_MEMORY_PRIORITY_ALLOCATE_INFO_EXT> {
- typedef VkMemoryPriorityAllocateInfoEXT Type;
-};
-
-// Map type VkPhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV to id VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DEDICATED_ALLOCATION_IMAGE_ALIASING_FEATURES_NV
-template <> struct LvlTypeMap<VkPhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV> {
- static const VkStructureType kSType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DEDICATED_ALLOCATION_IMAGE_ALIASING_FEATURES_NV;
-};
-
-template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DEDICATED_ALLOCATION_IMAGE_ALIASING_FEATURES_NV> {
- typedef VkPhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV Type;
-};
-
-// Map type VkPhysicalDeviceBufferDeviceAddressFeaturesEXT to id VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_BUFFER_DEVICE_ADDRESS_FEATURES_EXT
-template <> struct LvlTypeMap<VkPhysicalDeviceBufferDeviceAddressFeaturesEXT> {
- static const VkStructureType kSType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_BUFFER_DEVICE_ADDRESS_FEATURES_EXT;
-};
-
-template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_BUFFER_DEVICE_ADDRESS_FEATURES_EXT> {
- typedef VkPhysicalDeviceBufferDeviceAddressFeaturesEXT Type;
-};
-
-// Map type VkBufferDeviceAddressInfoEXT to id VK_STRUCTURE_TYPE_BUFFER_DEVICE_ADDRESS_INFO_EXT
-template <> struct LvlTypeMap<VkBufferDeviceAddressInfoEXT> {
- static const VkStructureType kSType = VK_STRUCTURE_TYPE_BUFFER_DEVICE_ADDRESS_INFO_EXT;
-};
-
-template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_BUFFER_DEVICE_ADDRESS_INFO_EXT> {
- typedef VkBufferDeviceAddressInfoEXT Type;
-};
-
-// Map type VkBufferDeviceAddressCreateInfoEXT to id VK_STRUCTURE_TYPE_BUFFER_DEVICE_ADDRESS_CREATE_INFO_EXT
-template <> struct LvlTypeMap<VkBufferDeviceAddressCreateInfoEXT> {
- static const VkStructureType kSType = VK_STRUCTURE_TYPE_BUFFER_DEVICE_ADDRESS_CREATE_INFO_EXT;
-};
-
-template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_BUFFER_DEVICE_ADDRESS_CREATE_INFO_EXT> {
- typedef VkBufferDeviceAddressCreateInfoEXT Type;
-};
-
-// Map type VkImageStencilUsageCreateInfoEXT to id VK_STRUCTURE_TYPE_IMAGE_STENCIL_USAGE_CREATE_INFO_EXT
-template <> struct LvlTypeMap<VkImageStencilUsageCreateInfoEXT> {
- static const VkStructureType kSType = VK_STRUCTURE_TYPE_IMAGE_STENCIL_USAGE_CREATE_INFO_EXT;
-};
-
-template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_IMAGE_STENCIL_USAGE_CREATE_INFO_EXT> {
- typedef VkImageStencilUsageCreateInfoEXT Type;
-};
-
-// Map type VkValidationFeaturesEXT to id VK_STRUCTURE_TYPE_VALIDATION_FEATURES_EXT
-template <> struct LvlTypeMap<VkValidationFeaturesEXT> {
- static const VkStructureType kSType = VK_STRUCTURE_TYPE_VALIDATION_FEATURES_EXT;
-};
-
-template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_VALIDATION_FEATURES_EXT> {
- typedef VkValidationFeaturesEXT Type;
-};
-
-// Map type VkCooperativeMatrixPropertiesNV to id VK_STRUCTURE_TYPE_COOPERATIVE_MATRIX_PROPERTIES_NV
-template <> struct LvlTypeMap<VkCooperativeMatrixPropertiesNV> {
- static const VkStructureType kSType = VK_STRUCTURE_TYPE_COOPERATIVE_MATRIX_PROPERTIES_NV;
-};
-
-template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_COOPERATIVE_MATRIX_PROPERTIES_NV> {
- typedef VkCooperativeMatrixPropertiesNV Type;
-};
-
-// Map type VkPhysicalDeviceCooperativeMatrixFeaturesNV to id VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_COOPERATIVE_MATRIX_FEATURES_NV
-template <> struct LvlTypeMap<VkPhysicalDeviceCooperativeMatrixFeaturesNV> {
- static const VkStructureType kSType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_COOPERATIVE_MATRIX_FEATURES_NV;
-};
-
-template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_COOPERATIVE_MATRIX_FEATURES_NV> {
- typedef VkPhysicalDeviceCooperativeMatrixFeaturesNV Type;
-};
-
-// Map type VkPhysicalDeviceCooperativeMatrixPropertiesNV to id VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_COOPERATIVE_MATRIX_PROPERTIES_NV
-template <> struct LvlTypeMap<VkPhysicalDeviceCooperativeMatrixPropertiesNV> {
- static const VkStructureType kSType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_COOPERATIVE_MATRIX_PROPERTIES_NV;
-};
-
-template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_COOPERATIVE_MATRIX_PROPERTIES_NV> {
- typedef VkPhysicalDeviceCooperativeMatrixPropertiesNV Type;
-};
-
-// Map type VkPhysicalDeviceCoverageReductionModeFeaturesNV to id VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_COVERAGE_REDUCTION_MODE_FEATURES_NV
-template <> struct LvlTypeMap<VkPhysicalDeviceCoverageReductionModeFeaturesNV> {
- static const VkStructureType kSType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_COVERAGE_REDUCTION_MODE_FEATURES_NV;
-};
-
-template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_COVERAGE_REDUCTION_MODE_FEATURES_NV> {
- typedef VkPhysicalDeviceCoverageReductionModeFeaturesNV Type;
-};
-
-// Map type VkPipelineCoverageReductionStateCreateInfoNV to id VK_STRUCTURE_TYPE_PIPELINE_COVERAGE_REDUCTION_STATE_CREATE_INFO_NV
-template <> struct LvlTypeMap<VkPipelineCoverageReductionStateCreateInfoNV> {
- static const VkStructureType kSType = VK_STRUCTURE_TYPE_PIPELINE_COVERAGE_REDUCTION_STATE_CREATE_INFO_NV;
-};
-
-template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_PIPELINE_COVERAGE_REDUCTION_STATE_CREATE_INFO_NV> {
- typedef VkPipelineCoverageReductionStateCreateInfoNV Type;
-};
-
-// Map type VkFramebufferMixedSamplesCombinationNV to id VK_STRUCTURE_TYPE_FRAMEBUFFER_MIXED_SAMPLES_COMBINATION_NV
-template <> struct LvlTypeMap<VkFramebufferMixedSamplesCombinationNV> {
- static const VkStructureType kSType = VK_STRUCTURE_TYPE_FRAMEBUFFER_MIXED_SAMPLES_COMBINATION_NV;
-};
-
-template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_FRAMEBUFFER_MIXED_SAMPLES_COMBINATION_NV> {
- typedef VkFramebufferMixedSamplesCombinationNV Type;
-};
-
-// Map type VkPhysicalDeviceFragmentShaderInterlockFeaturesEXT to id VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_SHADER_INTERLOCK_FEATURES_EXT
-template <> struct LvlTypeMap<VkPhysicalDeviceFragmentShaderInterlockFeaturesEXT> {
- static const VkStructureType kSType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_SHADER_INTERLOCK_FEATURES_EXT;
-};
-
-template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_SHADER_INTERLOCK_FEATURES_EXT> {
- typedef VkPhysicalDeviceFragmentShaderInterlockFeaturesEXT Type;
-};
-
-// Map type VkPhysicalDeviceYcbcrImageArraysFeaturesEXT to id VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_YCBCR_IMAGE_ARRAYS_FEATURES_EXT
-template <> struct LvlTypeMap<VkPhysicalDeviceYcbcrImageArraysFeaturesEXT> {
- static const VkStructureType kSType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_YCBCR_IMAGE_ARRAYS_FEATURES_EXT;
-};
-
-template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_YCBCR_IMAGE_ARRAYS_FEATURES_EXT> {
- typedef VkPhysicalDeviceYcbcrImageArraysFeaturesEXT Type;
-};
-
-#ifdef VK_USE_PLATFORM_WIN32_KHR
-// Map type VkSurfaceFullScreenExclusiveInfoEXT to id VK_STRUCTURE_TYPE_SURFACE_FULL_SCREEN_EXCLUSIVE_INFO_EXT
-template <> struct LvlTypeMap<VkSurfaceFullScreenExclusiveInfoEXT> {
- static const VkStructureType kSType = VK_STRUCTURE_TYPE_SURFACE_FULL_SCREEN_EXCLUSIVE_INFO_EXT;
-};
-
-template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_SURFACE_FULL_SCREEN_EXCLUSIVE_INFO_EXT> {
- typedef VkSurfaceFullScreenExclusiveInfoEXT Type;
-};
-
-#endif // VK_USE_PLATFORM_WIN32_KHR
-#ifdef VK_USE_PLATFORM_WIN32_KHR
-// Map type VkSurfaceCapabilitiesFullScreenExclusiveEXT to id VK_STRUCTURE_TYPE_SURFACE_CAPABILITIES_FULL_SCREEN_EXCLUSIVE_EXT
-template <> struct LvlTypeMap<VkSurfaceCapabilitiesFullScreenExclusiveEXT> {
- static const VkStructureType kSType = VK_STRUCTURE_TYPE_SURFACE_CAPABILITIES_FULL_SCREEN_EXCLUSIVE_EXT;
-};
-
-template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_SURFACE_CAPABILITIES_FULL_SCREEN_EXCLUSIVE_EXT> {
- typedef VkSurfaceCapabilitiesFullScreenExclusiveEXT Type;
-};
-
-#endif // VK_USE_PLATFORM_WIN32_KHR
-#ifdef VK_USE_PLATFORM_WIN32_KHR
-// Map type VkSurfaceFullScreenExclusiveWin32InfoEXT to id VK_STRUCTURE_TYPE_SURFACE_FULL_SCREEN_EXCLUSIVE_WIN32_INFO_EXT
-template <> struct LvlTypeMap<VkSurfaceFullScreenExclusiveWin32InfoEXT> {
- static const VkStructureType kSType = VK_STRUCTURE_TYPE_SURFACE_FULL_SCREEN_EXCLUSIVE_WIN32_INFO_EXT;
-};
-
-template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_SURFACE_FULL_SCREEN_EXCLUSIVE_WIN32_INFO_EXT> {
- typedef VkSurfaceFullScreenExclusiveWin32InfoEXT Type;
-};
-
-#endif // VK_USE_PLATFORM_WIN32_KHR
-// Map type VkHeadlessSurfaceCreateInfoEXT to id VK_STRUCTURE_TYPE_HEADLESS_SURFACE_CREATE_INFO_EXT
-template <> struct LvlTypeMap<VkHeadlessSurfaceCreateInfoEXT> {
- static const VkStructureType kSType = VK_STRUCTURE_TYPE_HEADLESS_SURFACE_CREATE_INFO_EXT;
-};
-
-template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_HEADLESS_SURFACE_CREATE_INFO_EXT> {
- typedef VkHeadlessSurfaceCreateInfoEXT Type;
-};
-
-// Map type VkPhysicalDeviceLineRasterizationFeaturesEXT to id VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_LINE_RASTERIZATION_FEATURES_EXT
-template <> struct LvlTypeMap<VkPhysicalDeviceLineRasterizationFeaturesEXT> {
- static const VkStructureType kSType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_LINE_RASTERIZATION_FEATURES_EXT;
-};
-
-template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_LINE_RASTERIZATION_FEATURES_EXT> {
- typedef VkPhysicalDeviceLineRasterizationFeaturesEXT Type;
-};
-
-// Map type VkPhysicalDeviceLineRasterizationPropertiesEXT to id VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_LINE_RASTERIZATION_PROPERTIES_EXT
-template <> struct LvlTypeMap<VkPhysicalDeviceLineRasterizationPropertiesEXT> {
- static const VkStructureType kSType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_LINE_RASTERIZATION_PROPERTIES_EXT;
-};
-
-template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_LINE_RASTERIZATION_PROPERTIES_EXT> {
- typedef VkPhysicalDeviceLineRasterizationPropertiesEXT Type;
-};
-
-// Map type VkPipelineRasterizationLineStateCreateInfoEXT to id VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_LINE_STATE_CREATE_INFO_EXT
-template <> struct LvlTypeMap<VkPipelineRasterizationLineStateCreateInfoEXT> {
- static const VkStructureType kSType = VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_LINE_STATE_CREATE_INFO_EXT;
-};
-
-template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_LINE_STATE_CREATE_INFO_EXT> {
- typedef VkPipelineRasterizationLineStateCreateInfoEXT Type;
-};
-
-// Map type VkPhysicalDeviceHostQueryResetFeaturesEXT to id VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_HOST_QUERY_RESET_FEATURES_EXT
-template <> struct LvlTypeMap<VkPhysicalDeviceHostQueryResetFeaturesEXT> {
- static const VkStructureType kSType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_HOST_QUERY_RESET_FEATURES_EXT;
-};
-
-template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_HOST_QUERY_RESET_FEATURES_EXT> {
- typedef VkPhysicalDeviceHostQueryResetFeaturesEXT Type;
-};
-
-// Map type VkPhysicalDeviceIndexTypeUint8FeaturesEXT to id VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_INDEX_TYPE_UINT8_FEATURES_EXT
-template <> struct LvlTypeMap<VkPhysicalDeviceIndexTypeUint8FeaturesEXT> {
- static const VkStructureType kSType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_INDEX_TYPE_UINT8_FEATURES_EXT;
-};
-
-template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_INDEX_TYPE_UINT8_FEATURES_EXT> {
- typedef VkPhysicalDeviceIndexTypeUint8FeaturesEXT Type;
-};
-
-// Map type VkPhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT to id VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_DEMOTE_TO_HELPER_INVOCATION_FEATURES_EXT
-template <> struct LvlTypeMap<VkPhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT> {
- static const VkStructureType kSType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_DEMOTE_TO_HELPER_INVOCATION_FEATURES_EXT;
-};
-
-template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_DEMOTE_TO_HELPER_INVOCATION_FEATURES_EXT> {
- typedef VkPhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT Type;
-};
-
-// Map type VkPhysicalDeviceTexelBufferAlignmentFeaturesEXT to id VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TEXEL_BUFFER_ALIGNMENT_FEATURES_EXT
-template <> struct LvlTypeMap<VkPhysicalDeviceTexelBufferAlignmentFeaturesEXT> {
- static const VkStructureType kSType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TEXEL_BUFFER_ALIGNMENT_FEATURES_EXT;
-};
-
-template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TEXEL_BUFFER_ALIGNMENT_FEATURES_EXT> {
- typedef VkPhysicalDeviceTexelBufferAlignmentFeaturesEXT Type;
-};
-
-// Map type VkPhysicalDeviceTexelBufferAlignmentPropertiesEXT to id VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TEXEL_BUFFER_ALIGNMENT_PROPERTIES_EXT
-template <> struct LvlTypeMap<VkPhysicalDeviceTexelBufferAlignmentPropertiesEXT> {
- static const VkStructureType kSType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TEXEL_BUFFER_ALIGNMENT_PROPERTIES_EXT;
-};
-
-template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TEXEL_BUFFER_ALIGNMENT_PROPERTIES_EXT> {
- typedef VkPhysicalDeviceTexelBufferAlignmentPropertiesEXT Type;
-};
-
-// Find an entry of the given type in the pNext chain
-template <typename T> const T *lvl_find_in_chain(const void *next) {
- const VkBaseOutStructure *current = reinterpret_cast<const VkBaseOutStructure *>(next);
- const T *found = nullptr;
- while (current) {
- if (LvlTypeMap<T>::kSType == current->sType) {
- found = reinterpret_cast<const T*>(current);
- current = nullptr;
- } else {
- current = current->pNext;
- }
- }
- return found;
-}
-
-// Init the header of an sType struct with pNext
-template <typename T> T lvl_init_struct(void *p_next) {
- T out = {};
- out.sType = LvlTypeMap<T>::kSType;
- out.pNext = p_next;
- return out;
-}
-
-// Init the header of an sType struct
-template <typename T> T lvl_init_struct() {
- T out = {};
- out.sType = LvlTypeMap<T>::kSType;
- return out;
-}
-
diff --git a/layers/gpu_validation.cpp b/layers/gpu_validation.cpp
index 38737a956..6ec10c54d 100644
--- a/layers/gpu_validation.cpp
+++ b/layers/gpu_validation.cpp
@@ -22,10 +22,6 @@
#include "chassis.h"
#include "core_validation.h"
-// This define indicates to build the VMA routines themselves
-#define VMA_IMPLEMENTATION
-// This define indicates that we will supply Vulkan function pointers at initialization
-#define VMA_STATIC_VULKAN_FUNCTIONS 0
#include "gpu_validation.h"
#include "shader_validation.h"
#include "spirv-tools/libspirv.h"
@@ -36,24 +32,196 @@
#include <regex>
// This is the number of bindings in the debug descriptor set.
-static const uint32_t kNumBindingsInSet = 2;
+static const uint32_t kNumBindingsInSet = 1;
-static const VkShaderStageFlags kShaderStageAllRayTracing =
- VK_SHADER_STAGE_ANY_HIT_BIT_NV | VK_SHADER_STAGE_CALLABLE_BIT_NV | VK_SHADER_STAGE_CLOSEST_HIT_BIT_NV |
- VK_SHADER_STAGE_INTERSECTION_BIT_NV | VK_SHADER_STAGE_MISS_BIT_NV | VK_SHADER_STAGE_RAYGEN_BIT_NV;
+// Implementation for Device Memory Manager class
+GpuDeviceMemoryManager::GpuDeviceMemoryManager(layer_data *dev_data, uint32_t data_size) {
+ uint32_t align = static_cast<uint32_t>(dev_data->GetPDProperties()->limits.minStorageBufferOffsetAlignment);
+ if (0 == align) {
+ align = 1;
+ }
+ record_size_ = data_size;
+ // Round the requested size up to the next multiple of the storage buffer offset alignment
+ // so that we can address each block in the storage buffer using the offset.
+ block_size_ = ((record_size_ + align - 1) / align) * align;
+ blocks_per_chunk_ = kItemsPerChunk;
+ chunk_size_ = blocks_per_chunk_ * block_size_;
+ dev_data_ = dev_data;
+}
+
+GpuDeviceMemoryManager::~GpuDeviceMemoryManager() {
+ for (auto &chunk : chunk_list_) {
+ FreeMemoryChunk(chunk);
+ }
+ chunk_list_.clear();
+}
+
+VkResult GpuDeviceMemoryManager::GetBlock(GpuDeviceMemoryBlock *block) {
+ assert(block->buffer == VK_NULL_HANDLE); // avoid possible overwrite/leak of an allocated block
+ VkResult result = VK_SUCCESS;
+ MemoryChunk *pChunk = nullptr;
+ // Look for a chunk with available offsets.
+ for (auto &chunk : chunk_list_) {
+ if (!chunk.available_offsets.empty()) {
+ pChunk = &chunk;
+ break;
+ }
+ }
+ // If no chunks with available offsets, allocate device memory and set up offsets.
+ if (pChunk == nullptr) {
+ MemoryChunk new_chunk;
+ result = AllocMemoryChunk(new_chunk);
+ if (result == VK_SUCCESS) {
+ new_chunk.available_offsets.resize(blocks_per_chunk_);
+ for (uint32_t offset = 0, i = 0; i < blocks_per_chunk_; offset += block_size_, ++i) {
+ new_chunk.available_offsets[i] = offset;
+ }
+ chunk_list_.push_front(std::move(new_chunk));
+ pChunk = &chunk_list_.front();
+ } else {
+ // Indicate failure
+ block->buffer = VK_NULL_HANDLE;
+ block->memory = VK_NULL_HANDLE;
+ return result;
+ }
+ }
+ // Give the requester an available offset
+ block->buffer = pChunk->buffer;
+ block->memory = pChunk->memory;
+ block->offset = pChunk->available_offsets.back();
+ pChunk->available_offsets.pop_back();
+ return result;
+}
+
+void GpuDeviceMemoryManager::PutBackBlock(VkBuffer buffer, VkDeviceMemory memory, uint32_t offset) {
+ GpuDeviceMemoryBlock block = {buffer, memory, offset};
+ PutBackBlock(block);
+}
+
+void GpuDeviceMemoryManager::PutBackBlock(GpuDeviceMemoryBlock &block) {
+ // Find the chunk belonging to the allocated offset and make the offset available again
+ auto chunk = std::find_if(std::begin(chunk_list_), std::end(chunk_list_),
+ [&block](const MemoryChunk &c) { return c.buffer == block.buffer; });
+ if (chunk_list_.end() == chunk) {
+ assert(false);
+ } else {
+ chunk->available_offsets.push_back(block.offset);
+ if (chunk->available_offsets.size() == blocks_per_chunk_) {
+ // All offsets have been returned
+ FreeMemoryChunk(*chunk);
+ chunk_list_.erase(chunk);
+ }
+ }
+}
+
+void ResetBlock(GpuDeviceMemoryBlock &block) {
+ block.buffer = VK_NULL_HANDLE;
+ block.memory = VK_NULL_HANDLE;
+ block.offset = 0;
+}
+
+bool BlockUsed(GpuDeviceMemoryBlock &block) { return (block.buffer != VK_NULL_HANDLE) && (block.memory != VK_NULL_HANDLE); }
+
+bool GpuDeviceMemoryManager::MemoryTypeFromProperties(uint32_t typeBits, VkFlags requirements_mask, uint32_t *typeIndex) {
+ // Search memtypes to find first index with those properties
+ const VkPhysicalDeviceMemoryProperties *props = dev_data_->GetPhysicalDeviceMemoryProperties();
+ for (uint32_t i = 0; i < VK_MAX_MEMORY_TYPES; i++) {
+ if ((typeBits & 1) == 1) {
+ // Type is available, does it match user properties?
+ if ((props->memoryTypes[i].propertyFlags & requirements_mask) == requirements_mask) {
+ *typeIndex = i;
+ return true;
+ }
+ }
+ typeBits >>= 1;
+ }
+ // No memory types matched, return failure
+ return false;
+}
+
+VkResult GpuDeviceMemoryManager::AllocMemoryChunk(MemoryChunk &chunk) {
+ VkBuffer buffer;
+ VkDeviceMemory memory;
+ VkBufferCreateInfo buffer_create_info = {};
+ VkMemoryRequirements mem_reqs = {};
+ VkMemoryAllocateInfo mem_alloc = {};
+ VkResult result = VK_SUCCESS;
+ bool pass;
+ void *pData;
+ const auto *dispatch_table = dev_data_->GetDispatchTable();
+
+ buffer_create_info.sType = VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO;
+ buffer_create_info.usage = VK_BUFFER_USAGE_STORAGE_BUFFER_BIT;
+ buffer_create_info.size = chunk_size_;
+ result = dispatch_table->CreateBuffer(dev_data_->GetDevice(), &buffer_create_info, NULL, &buffer);
+ if (result != VK_SUCCESS) {
+ return result;
+ }
+
+ dispatch_table->GetBufferMemoryRequirements(dev_data_->GetDevice(), buffer, &mem_reqs);
+
+ mem_alloc.sType = VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO;
+ mem_alloc.pNext = NULL;
+ mem_alloc.allocationSize = mem_reqs.size;
+ pass = MemoryTypeFromProperties(mem_reqs.memoryTypeBits,
+ VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT | VK_MEMORY_PROPERTY_HOST_COHERENT_BIT,
+ &mem_alloc.memoryTypeIndex);
+ if (!pass) {
+ dispatch_table->DestroyBuffer(dev_data_->GetDevice(), buffer, NULL);
+ return result;
+ }
+ result = dispatch_table->AllocateMemory(dev_data_->GetDevice(), &mem_alloc, NULL, &memory);
+ if (result != VK_SUCCESS) {
+ dispatch_table->DestroyBuffer(dev_data_->GetDevice(), buffer, NULL);
+ return result;
+ }
+
+ result = dispatch_table->BindBufferMemory(dev_data_->GetDevice(), buffer, memory, 0);
+ if (result != VK_SUCCESS) {
+ dispatch_table->DestroyBuffer(dev_data_->GetDevice(), buffer, NULL);
+ dispatch_table->FreeMemory(dev_data_->GetDevice(), memory, NULL);
+ return result;
+ }
+
+ result = dispatch_table->MapMemory(dev_data_->GetDevice(), memory, 0, mem_alloc.allocationSize, 0, &pData);
+ if (result == VK_SUCCESS) {
+ memset(pData, 0, chunk_size_);
+ dispatch_table->UnmapMemory(dev_data_->GetDevice(), memory);
+ } else {
+ dispatch_table->DestroyBuffer(dev_data_->GetDevice(), buffer, NULL);
+ dispatch_table->FreeMemory(dev_data_->GetDevice(), memory, NULL);
+ return result;
+ }
+ chunk.buffer = buffer;
+ chunk.memory = memory;
+ return result;
+}
+
+void GpuDeviceMemoryManager::FreeMemoryChunk(MemoryChunk &chunk) {
+ dev_data_->GetDispatchTable()->DestroyBuffer(dev_data_->GetDevice(), chunk.buffer, NULL);
+ dev_data_->GetDispatchTable()->FreeMemory(dev_data_->GetDevice(), chunk.memory, NULL);
+}
+
+void GpuDeviceMemoryManager::FreeAllBlocks() {
+ for (auto &chunk : chunk_list_) {
+ FreeMemoryChunk(chunk);
+ }
+ chunk_list_.clear();
+}
// Implementation for Descriptor Set Manager class
-GpuDescriptorSetManager::GpuDescriptorSetManager(CoreChecks *dev_data) { dev_data_ = dev_data; }
+GpuDescriptorSetManager::GpuDescriptorSetManager(layer_data *dev_data) { dev_data_ = dev_data; }
GpuDescriptorSetManager::~GpuDescriptorSetManager() {
for (auto &pool : desc_pool_map_) {
- DispatchDestroyDescriptorPool(dev_data_->device, pool.first, NULL);
+ dev_data_->GetDispatchTable()->DestroyDescriptorPool(dev_data_->GetDevice(), pool.first, NULL);
}
desc_pool_map_.clear();
}
VkResult GpuDescriptorSetManager::GetDescriptorSets(uint32_t count, VkDescriptorPool *pool,
std::vector<VkDescriptorSet> *desc_sets) {
+ auto gpu_state = dev_data_->GetGpuValidationState();
const uint32_t default_pool_size = kItemsPerChunk;
VkResult result = VK_SUCCESS;
VkDescriptorPool pool_to_use = VK_NULL_HANDLE;
@@ -86,7 +254,7 @@ VkResult GpuDescriptorSetManager::GetDescriptorSets(uint32_t count, VkDescriptor
desc_pool_info.maxSets = pool_count;
desc_pool_info.poolSizeCount = 1;
desc_pool_info.pPoolSizes = &size_counts;
- result = DispatchCreateDescriptorPool(dev_data_->device, &desc_pool_info, NULL, &pool_to_use);
+ result = dev_data_->GetDispatchTable()->CreateDescriptorPool(dev_data_->GetDevice(), &desc_pool_info, NULL, &pool_to_use);
assert(result == VK_SUCCESS);
if (result != VK_SUCCESS) {
return result;
@@ -94,12 +262,12 @@ VkResult GpuDescriptorSetManager::GetDescriptorSets(uint32_t count, VkDescriptor
desc_pool_map_[pool_to_use].size = desc_pool_info.maxSets;
desc_pool_map_[pool_to_use].used = 0;
}
- std::vector<VkDescriptorSetLayout> desc_layouts(count, dev_data_->gpu_validation_state->debug_desc_layout);
+ std::vector<VkDescriptorSetLayout> desc_layouts(count, gpu_state->debug_desc_layout);
VkDescriptorSetAllocateInfo alloc_info = {VK_STRUCTURE_TYPE_DESCRIPTOR_SET_ALLOCATE_INFO, NULL, pool_to_use, count,
desc_layouts.data()};
- result = DispatchAllocateDescriptorSets(dev_data_->device, &alloc_info, desc_sets->data());
+ result = dev_data_->GetDispatchTable()->AllocateDescriptorSets(dev_data_->GetDevice(), &alloc_info, desc_sets->data());
assert(result == VK_SUCCESS);
if (result != VK_SUCCESS) {
return result;
@@ -112,193 +280,86 @@ VkResult GpuDescriptorSetManager::GetDescriptorSets(uint32_t count, VkDescriptor
void GpuDescriptorSetManager::PutBackDescriptorSet(VkDescriptorPool desc_pool, VkDescriptorSet desc_set) {
auto iter = desc_pool_map_.find(desc_pool);
if (iter != desc_pool_map_.end()) {
- VkResult result = DispatchFreeDescriptorSets(dev_data_->device, desc_pool, 1, &desc_set);
+ VkResult result = dev_data_->GetDispatchTable()->FreeDescriptorSets(dev_data_->GetDevice(), desc_pool, 1, &desc_set);
assert(result == VK_SUCCESS);
if (result != VK_SUCCESS) {
return;
}
desc_pool_map_[desc_pool].used--;
if (0 == desc_pool_map_[desc_pool].used) {
- DispatchDestroyDescriptorPool(dev_data_->device, desc_pool, NULL);
+ dev_data_->GetDispatchTable()->DestroyDescriptorPool(dev_data_->GetDevice(), desc_pool, NULL);
desc_pool_map_.erase(desc_pool);
}
}
return;
}
-// Trampolines to make VMA call Dispatch for Vulkan calls
-static VKAPI_ATTR void VKAPI_CALL gpuVkGetPhysicalDeviceProperties(VkPhysicalDevice physicalDevice,
- VkPhysicalDeviceProperties *pProperties) {
- DispatchGetPhysicalDeviceProperties(physicalDevice, pProperties);
-}
-static VKAPI_ATTR void VKAPI_CALL gpuVkGetPhysicalDeviceMemoryProperties(VkPhysicalDevice physicalDevice,
- VkPhysicalDeviceMemoryProperties *pMemoryProperties) {
- DispatchGetPhysicalDeviceMemoryProperties(physicalDevice, pMemoryProperties);
-}
-static VKAPI_ATTR VkResult VKAPI_CALL gpuVkAllocateMemory(VkDevice device, const VkMemoryAllocateInfo *pAllocateInfo,
- const VkAllocationCallbacks *pAllocator, VkDeviceMemory *pMemory) {
- return DispatchAllocateMemory(device, pAllocateInfo, pAllocator, pMemory);
-}
-static VKAPI_ATTR void VKAPI_CALL gpuVkFreeMemory(VkDevice device, VkDeviceMemory memory, const VkAllocationCallbacks *pAllocator) {
- DispatchFreeMemory(device, memory, pAllocator);
-}
-static VKAPI_ATTR VkResult VKAPI_CALL gpuVkMapMemory(VkDevice device, VkDeviceMemory memory, VkDeviceSize offset, VkDeviceSize size,
- VkMemoryMapFlags flags, void **ppData) {
- return DispatchMapMemory(device, memory, offset, size, flags, ppData);
-}
-static VKAPI_ATTR void VKAPI_CALL gpuVkUnmapMemory(VkDevice device, VkDeviceMemory memory) { DispatchUnmapMemory(device, memory); }
-static VKAPI_ATTR VkResult VKAPI_CALL gpuVkFlushMappedMemoryRanges(VkDevice device, uint32_t memoryRangeCount,
- const VkMappedMemoryRange *pMemoryRanges) {
- return DispatchFlushMappedMemoryRanges(device, memoryRangeCount, pMemoryRanges);
-}
-static VKAPI_ATTR VkResult VKAPI_CALL gpuVkInvalidateMappedMemoryRanges(VkDevice device, uint32_t memoryRangeCount,
- const VkMappedMemoryRange *pMemoryRanges) {
- return DispatchInvalidateMappedMemoryRanges(device, memoryRangeCount, pMemoryRanges);
-}
-static VKAPI_ATTR VkResult VKAPI_CALL gpuVkBindBufferMemory(VkDevice device, VkBuffer buffer, VkDeviceMemory memory,
- VkDeviceSize memoryOffset) {
- return DispatchBindBufferMemory(device, buffer, memory, memoryOffset);
-}
-static VKAPI_ATTR VkResult VKAPI_CALL gpuVkBindImageMemory(VkDevice device, VkImage image, VkDeviceMemory memory,
- VkDeviceSize memoryOffset) {
- return DispatchBindImageMemory(device, image, memory, memoryOffset);
-}
-static VKAPI_ATTR void VKAPI_CALL gpuVkGetBufferMemoryRequirements(VkDevice device, VkBuffer buffer,
- VkMemoryRequirements *pMemoryRequirements) {
- DispatchGetBufferMemoryRequirements(device, buffer, pMemoryRequirements);
-}
-static VKAPI_ATTR void VKAPI_CALL gpuVkGetImageMemoryRequirements(VkDevice device, VkImage image,
- VkMemoryRequirements *pMemoryRequirements) {
- DispatchGetImageMemoryRequirements(device, image, pMemoryRequirements);
-}
-static VKAPI_ATTR VkResult VKAPI_CALL gpuVkCreateBuffer(VkDevice device, const VkBufferCreateInfo *pCreateInfo,
- const VkAllocationCallbacks *pAllocator, VkBuffer *pBuffer) {
- return DispatchCreateBuffer(device, pCreateInfo, pAllocator, pBuffer);
-}
-static VKAPI_ATTR void VKAPI_CALL gpuVkDestroyBuffer(VkDevice device, VkBuffer buffer, const VkAllocationCallbacks *pAllocator) {
- return DispatchDestroyBuffer(device, buffer, pAllocator);
-}
-static VKAPI_ATTR VkResult VKAPI_CALL gpuVkCreateImage(VkDevice device, const VkImageCreateInfo *pCreateInfo,
- const VkAllocationCallbacks *pAllocator, VkImage *pImage) {
- return DispatchCreateImage(device, pCreateInfo, pAllocator, pImage);
-}
-static VKAPI_ATTR void VKAPI_CALL gpuVkDestroyImage(VkDevice device, VkImage image, const VkAllocationCallbacks *pAllocator) {
- DispatchDestroyImage(device, image, pAllocator);
-}
-static VKAPI_ATTR void VKAPI_CALL gpuVkCmdCopyBuffer(VkCommandBuffer commandBuffer, VkBuffer srcBuffer, VkBuffer dstBuffer,
- uint32_t regionCount, const VkBufferCopy *pRegions) {
- DispatchCmdCopyBuffer(commandBuffer, srcBuffer, dstBuffer, regionCount, pRegions);
-}
-
-VkResult CoreChecks::GpuInitializeVma() {
- VmaVulkanFunctions functions;
- VmaAllocatorCreateInfo allocatorInfo = {};
- allocatorInfo.device = device;
- ValidationObject *device_object = GetLayerDataPtr(get_dispatch_key(allocatorInfo.device), layer_data_map);
- ValidationObject *validation_data =
- ValidationObject::GetValidationObject(device_object->object_dispatch, LayerObjectTypeCoreValidation);
- CoreChecks *core_checks = static_cast<CoreChecks *>(validation_data);
- allocatorInfo.physicalDevice = core_checks->physical_device;
-
- functions.vkGetPhysicalDeviceProperties = (PFN_vkGetPhysicalDeviceProperties)gpuVkGetPhysicalDeviceProperties;
- functions.vkGetPhysicalDeviceMemoryProperties = (PFN_vkGetPhysicalDeviceMemoryProperties)gpuVkGetPhysicalDeviceMemoryProperties;
- functions.vkAllocateMemory = (PFN_vkAllocateMemory)gpuVkAllocateMemory;
- functions.vkFreeMemory = (PFN_vkFreeMemory)gpuVkFreeMemory;
- functions.vkMapMemory = (PFN_vkMapMemory)gpuVkMapMemory;
- functions.vkUnmapMemory = (PFN_vkUnmapMemory)gpuVkUnmapMemory;
- functions.vkFlushMappedMemoryRanges = (PFN_vkFlushMappedMemoryRanges)gpuVkFlushMappedMemoryRanges;
- functions.vkInvalidateMappedMemoryRanges = (PFN_vkInvalidateMappedMemoryRanges)gpuVkInvalidateMappedMemoryRanges;
- functions.vkBindBufferMemory = (PFN_vkBindBufferMemory)gpuVkBindBufferMemory;
- functions.vkBindImageMemory = (PFN_vkBindImageMemory)gpuVkBindImageMemory;
- functions.vkGetBufferMemoryRequirements = (PFN_vkGetBufferMemoryRequirements)gpuVkGetBufferMemoryRequirements;
- functions.vkGetImageMemoryRequirements = (PFN_vkGetImageMemoryRequirements)gpuVkGetImageMemoryRequirements;
- functions.vkCreateBuffer = (PFN_vkCreateBuffer)gpuVkCreateBuffer;
- functions.vkDestroyBuffer = (PFN_vkDestroyBuffer)gpuVkDestroyBuffer;
- functions.vkCreateImage = (PFN_vkCreateImage)gpuVkCreateImage;
- functions.vkDestroyImage = (PFN_vkDestroyImage)gpuVkDestroyImage;
- functions.vkCmdCopyBuffer = (PFN_vkCmdCopyBuffer)gpuVkCmdCopyBuffer;
- allocatorInfo.pVulkanFunctions = &functions;
-
- return vmaCreateAllocator(&allocatorInfo, &gpu_validation_state->vmaAllocator);
+void GpuDescriptorSetManager::DestroyDescriptorPools() {
+ for (auto &pool : desc_pool_map_) {
+ dev_data_->GetDispatchTable()->DestroyDescriptorPool(dev_data_->GetDevice(), pool.first, NULL);
+ }
+ desc_pool_map_.clear();
}
// Convenience function for reporting problems with setting up GPU Validation.
-void CoreChecks::ReportSetupProblem(VkDebugReportObjectTypeEXT object_type, uint64_t object_handle,
+void CoreChecks::ReportSetupProblem(const layer_data *dev_data, VkDebugReportObjectTypeEXT object_type, uint64_t object_handle,
const char *const specific_message) {
log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, object_type, object_handle, "UNASSIGNED-GPU-Assisted Validation Error. ",
"Detail: (%s)", specific_message);
}
// Turn on necessary device features.
-void CoreChecks::GpuPreCallRecordCreateDevice(VkPhysicalDevice gpu, safe_VkDeviceCreateInfo *modified_create_info,
+void CoreChecks::GpuPreCallRecordCreateDevice(VkPhysicalDevice gpu, std::unique_ptr<safe_VkDeviceCreateInfo> &create_info,
VkPhysicalDeviceFeatures *supported_features) {
if (supported_features->fragmentStoresAndAtomics || supported_features->vertexPipelineStoresAndAtomics) {
- VkPhysicalDeviceFeatures *features = nullptr;
- if (modified_create_info->pEnabledFeatures) {
- // If pEnabledFeatures, VkPhysicalDeviceFeatures2 in pNext chain is not allowed
- features = const_cast<VkPhysicalDeviceFeatures *>(modified_create_info->pEnabledFeatures);
- } else {
- VkPhysicalDeviceFeatures2 *features2 = nullptr;
- features2 =
- const_cast<VkPhysicalDeviceFeatures2 *>(lvl_find_in_chain<VkPhysicalDeviceFeatures2>(modified_create_info->pNext));
- if (features2) features = &features2->features;
- }
- if (features) {
- features->fragmentStoresAndAtomics = supported_features->fragmentStoresAndAtomics;
- features->vertexPipelineStoresAndAtomics = supported_features->vertexPipelineStoresAndAtomics;
- } else {
- VkPhysicalDeviceFeatures new_features = {};
- new_features.fragmentStoresAndAtomics = supported_features->fragmentStoresAndAtomics;
- new_features.vertexPipelineStoresAndAtomics = supported_features->vertexPipelineStoresAndAtomics;
- delete modified_create_info->pEnabledFeatures;
- modified_create_info->pEnabledFeatures = new VkPhysicalDeviceFeatures(new_features);
+ VkPhysicalDeviceFeatures new_features = {};
+ if (create_info->pEnabledFeatures) {
+ new_features = *create_info->pEnabledFeatures;
}
+ new_features.fragmentStoresAndAtomics = supported_features->fragmentStoresAndAtomics;
+ new_features.vertexPipelineStoresAndAtomics = supported_features->vertexPipelineStoresAndAtomics;
+ delete create_info->pEnabledFeatures;
+ create_info->pEnabledFeatures = new VkPhysicalDeviceFeatures(new_features);
}
}
// Perform initializations that can be done at Create Device time.
-void CoreChecks::GpuPostCallRecordCreateDevice(const CHECK_ENABLED *enables, const VkDeviceCreateInfo *pCreateInfo) {
- // Set instance-level enables in device-enable data structure if using legacy settings
- enabled.gpu_validation = enables->gpu_validation;
- enabled.gpu_validation_reserve_binding_slot = enables->gpu_validation_reserve_binding_slot;
+void CoreChecks::GpuPostCallRecordCreateDevice(layer_data *dev_data) {
+ auto gpu_state = GetGpuValidationState();
+ const auto *dispatch_table = GetDispatchTable();
- gpu_validation_state = std::unique_ptr<GpuValidationState>(new GpuValidationState);
- gpu_validation_state->reserve_binding_slot = enables->gpu_validation_reserve_binding_slot;
+ gpu_state->aborted = false;
+ gpu_state->reserve_binding_slot = false;
+ gpu_state->barrier_command_pool = VK_NULL_HANDLE;
+ gpu_state->barrier_command_buffer = VK_NULL_HANDLE;
- if (phys_dev_props.apiVersion < VK_API_VERSION_1_1) {
- ReportSetupProblem(VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, HandleToUint64(device),
+ if (GetPDProperties()->apiVersion < VK_API_VERSION_1_1) {
+ ReportSetupProblem(dev_data, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, HandleToUint64(GetDevice()),
"GPU-Assisted validation requires Vulkan 1.1 or later. GPU-Assisted Validation disabled.");
- gpu_validation_state->aborted = true;
+ gpu_state->aborted = true;
return;
}
-
- // If api version 1.1 or later, SetDeviceLoaderData will be in the loader
- auto chain_info = get_chain_info(pCreateInfo, VK_LOADER_DATA_CALLBACK);
- assert(chain_info->u.pfnSetDeviceLoaderData);
- gpu_validation_state->vkSetDeviceLoaderData = chain_info->u.pfnSetDeviceLoaderData;
-
// Some devices have extremely high limits here, so set a reasonable max because we have to pad
// the pipeline layout with dummy descriptor set layouts.
- gpu_validation_state->adjusted_max_desc_sets = phys_dev_props.limits.maxBoundDescriptorSets;
- gpu_validation_state->adjusted_max_desc_sets = std::min(33U, gpu_validation_state->adjusted_max_desc_sets);
+ gpu_state->adjusted_max_desc_sets = GetPDProperties()->limits.maxBoundDescriptorSets;
+ gpu_state->adjusted_max_desc_sets = std::min(33U, gpu_state->adjusted_max_desc_sets);
// We can't do anything if there is only one.
// Device probably not a legit Vulkan device, since there should be at least 4. Protect ourselves.
- if (gpu_validation_state->adjusted_max_desc_sets == 1) {
- ReportSetupProblem(VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, HandleToUint64(device),
+ if (gpu_state->adjusted_max_desc_sets == 1) {
+ ReportSetupProblem(dev_data, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, HandleToUint64(GetDevice()),
"Device can bind only a single descriptor set. GPU-Assisted Validation disabled.");
- gpu_validation_state->aborted = true;
+ gpu_state->aborted = true;
return;
}
- gpu_validation_state->desc_set_bind_index = gpu_validation_state->adjusted_max_desc_sets - 1;
- log_msg(report_data, VK_DEBUG_REPORT_INFORMATION_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, HandleToUint64(device),
- "UNASSIGNED-GPU-Assisted Validation. ", "Shaders using descriptor set at index %d. ",
- gpu_validation_state->desc_set_bind_index);
+ gpu_state->desc_set_bind_index = gpu_state->adjusted_max_desc_sets - 1;
+ log_msg(GetReportData(), VK_DEBUG_REPORT_INFORMATION_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT,
+ HandleToUint64(GetDevice()), "UNASSIGNED-GPU-Assisted Validation. ", "Shaders using descriptor set at index %d. ",
+ gpu_state->desc_set_bind_index);
- gpu_validation_state->output_buffer_size = sizeof(uint32_t) * (spvtools::kInstMaxOutCnt + 1);
- VkResult result = GpuInitializeVma();
- assert(result == VK_SUCCESS);
- std::unique_ptr<GpuDescriptorSetManager> desc_set_manager(new GpuDescriptorSetManager(this));
+ std::unique_ptr<GpuDeviceMemoryManager> memory_manager(
+ new GpuDeviceMemoryManager(dev_data, sizeof(uint32_t) * (spvtools::kInstMaxOutCnt + 1)));
+ std::unique_ptr<GpuDescriptorSetManager> desc_set_manager(new GpuDescriptorSetManager(dev_data));
// The descriptor indexing checks require only the first "output" binding.
const VkDescriptorSetLayoutBinding debug_desc_layout_bindings[kNumBindingsInSet] = {
@@ -306,14 +367,7 @@ void CoreChecks::GpuPostCallRecordCreateDevice(const CHECK_ENABLED *enables, con
0, // output
VK_DESCRIPTOR_TYPE_STORAGE_BUFFER,
1,
- VK_SHADER_STAGE_ALL_GRAPHICS | VK_SHADER_STAGE_COMPUTE_BIT | kShaderStageAllRayTracing,
- NULL,
- },
- {
- 1, // input
- VK_DESCRIPTOR_TYPE_STORAGE_BUFFER,
- 1,
- VK_SHADER_STAGE_ALL_GRAPHICS | VK_SHADER_STAGE_COMPUTE_BIT | kShaderStageAllRayTracing,
+ VK_SHADER_STAGE_ALL_GRAPHICS,
NULL,
},
};
@@ -324,263 +378,185 @@ void CoreChecks::GpuPostCallRecordCreateDevice(const CHECK_ENABLED *enables, con
const VkDescriptorSetLayoutCreateInfo dummy_desc_layout_info = {VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_CREATE_INFO, NULL, 0, 0,
NULL};
- result = DispatchCreateDescriptorSetLayout(device, &debug_desc_layout_info, NULL, &gpu_validation_state->debug_desc_layout);
+ VkResult result =
+ dispatch_table->CreateDescriptorSetLayout(GetDevice(), &debug_desc_layout_info, NULL, &gpu_state->debug_desc_layout);
// This is a layout used to "pad" a pipeline layout to fill in any gaps to the selected bind index.
VkResult result2 =
- DispatchCreateDescriptorSetLayout(device, &dummy_desc_layout_info, NULL, &gpu_validation_state->dummy_desc_layout);
+ dispatch_table->CreateDescriptorSetLayout(GetDevice(), &dummy_desc_layout_info, NULL, &gpu_state->dummy_desc_layout);
assert((result == VK_SUCCESS) && (result2 == VK_SUCCESS));
if ((result != VK_SUCCESS) || (result2 != VK_SUCCESS)) {
- ReportSetupProblem(VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, HandleToUint64(device),
+ ReportSetupProblem(dev_data, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, HandleToUint64(GetDevice()),
"Unable to create descriptor set layout. GPU-Assisted Validation disabled.");
if (result == VK_SUCCESS) {
- DispatchDestroyDescriptorSetLayout(device, gpu_validation_state->debug_desc_layout, NULL);
+ dispatch_table->DestroyDescriptorSetLayout(GetDevice(), gpu_state->debug_desc_layout, NULL);
}
if (result2 == VK_SUCCESS) {
- DispatchDestroyDescriptorSetLayout(device, gpu_validation_state->dummy_desc_layout, NULL);
+ dispatch_table->DestroyDescriptorSetLayout(GetDevice(), gpu_state->dummy_desc_layout, NULL);
}
- gpu_validation_state->debug_desc_layout = VK_NULL_HANDLE;
- gpu_validation_state->dummy_desc_layout = VK_NULL_HANDLE;
- gpu_validation_state->aborted = true;
+ gpu_state->debug_desc_layout = VK_NULL_HANDLE;
+ gpu_state->dummy_desc_layout = VK_NULL_HANDLE;
+ gpu_state->aborted = true;
return;
}
- gpu_validation_state->desc_set_manager = std::move(desc_set_manager);
+ gpu_state->memory_manager = std::move(memory_manager);
+ gpu_state->desc_set_manager = std::move(desc_set_manager);
}
// Clean up device-related resources
-void CoreChecks::GpuPreCallRecordDestroyDevice() {
- for (auto &queue_barrier_command_info_kv : gpu_validation_state->queue_barrier_command_infos) {
- GpuQueueBarrierCommandInfo &queue_barrier_command_info = queue_barrier_command_info_kv.second;
-
- DispatchFreeCommandBuffers(device, queue_barrier_command_info.barrier_command_pool, 1,
- &queue_barrier_command_info.barrier_command_buffer);
- queue_barrier_command_info.barrier_command_buffer = VK_NULL_HANDLE;
+void CoreChecks::GpuPreCallRecordDestroyDevice(layer_data *dev_data) {
+ auto gpu_state = GetGpuValidationState();
- DispatchDestroyCommandPool(device, queue_barrier_command_info.barrier_command_pool, NULL);
- queue_barrier_command_info.barrier_command_pool = VK_NULL_HANDLE;
+ if (gpu_state->barrier_command_buffer) {
+ GetDispatchTable()->FreeCommandBuffers(GetDevice(), gpu_state->barrier_command_pool, 1, &gpu_state->barrier_command_buffer);
+ gpu_state->barrier_command_buffer = VK_NULL_HANDLE;
}
- gpu_validation_state->queue_barrier_command_infos.clear();
- if (gpu_validation_state->debug_desc_layout) {
- DispatchDestroyDescriptorSetLayout(device, gpu_validation_state->debug_desc_layout, NULL);
- gpu_validation_state->debug_desc_layout = VK_NULL_HANDLE;
+ if (gpu_state->barrier_command_pool) {
+ GetDispatchTable()->DestroyCommandPool(GetDevice(), gpu_state->barrier_command_pool, NULL);
+ gpu_state->barrier_command_pool = VK_NULL_HANDLE;
}
- if (gpu_validation_state->dummy_desc_layout) {
- DispatchDestroyDescriptorSetLayout(device, gpu_validation_state->dummy_desc_layout, NULL);
- gpu_validation_state->dummy_desc_layout = VK_NULL_HANDLE;
+ if (gpu_state->debug_desc_layout) {
+ GetDispatchTable()->DestroyDescriptorSetLayout(GetDevice(), gpu_state->debug_desc_layout, NULL);
+ gpu_state->debug_desc_layout = VK_NULL_HANDLE;
}
- gpu_validation_state->desc_set_manager.reset();
- if (gpu_validation_state->vmaAllocator) {
- vmaDestroyAllocator(gpu_validation_state->vmaAllocator);
+ if (gpu_state->dummy_desc_layout) {
+ GetDispatchTable()->DestroyDescriptorSetLayout(GetDevice(), gpu_state->dummy_desc_layout, NULL);
+ gpu_state->dummy_desc_layout = VK_NULL_HANDLE;
}
+ gpu_state->memory_manager->FreeAllBlocks();
+ gpu_state->desc_set_manager->DestroyDescriptorPools();
}
// Modify the pipeline layout to include our debug descriptor set and any needed padding with the dummy descriptor set.
-bool CoreChecks::GpuPreCallCreatePipelineLayout(const VkPipelineLayoutCreateInfo *pCreateInfo,
+bool CoreChecks::GpuPreCallCreatePipelineLayout(layer_data *device_data, const VkPipelineLayoutCreateInfo *pCreateInfo,
const VkAllocationCallbacks *pAllocator, VkPipelineLayout *pPipelineLayout,
std::vector<VkDescriptorSetLayout> *new_layouts,
VkPipelineLayoutCreateInfo *modified_create_info) {
- if (gpu_validation_state->aborted) {
+ auto gpu_state = GetGpuValidationState();
+ if (gpu_state->aborted) {
return false;
}
- if (modified_create_info->setLayoutCount >= gpu_validation_state->adjusted_max_desc_sets) {
+ if (modified_create_info->setLayoutCount >= gpu_state->adjusted_max_desc_sets) {
std::ostringstream strm;
- strm << "Pipeline Layout conflict with validation's descriptor set at slot " << gpu_validation_state->desc_set_bind_index
- << ". "
+ strm << "Pipeline Layout conflict with validation's descriptor set at slot " << gpu_state->desc_set_bind_index << ". "
<< "Application has too many descriptor sets in the pipeline layout to continue with gpu validation. "
<< "Validation is not modifying the pipeline layout. "
<< "Instrumented shaders are replaced with non-instrumented shaders.";
- ReportSetupProblem(VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, HandleToUint64(device), strm.str().c_str());
+ ReportSetupProblem(device_data, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, HandleToUint64(GetDevice()), strm.str().c_str());
} else {
// Modify the pipeline layout by:
// 1. Copying the caller's descriptor set desc_layouts
// 2. Fill in dummy descriptor layouts up to the max binding
// 3. Fill in with the debug descriptor layout at the max binding slot
- new_layouts->reserve(gpu_validation_state->adjusted_max_desc_sets);
+ new_layouts->reserve(gpu_state->adjusted_max_desc_sets);
new_layouts->insert(new_layouts->end(), &pCreateInfo->pSetLayouts[0],
&pCreateInfo->pSetLayouts[pCreateInfo->setLayoutCount]);
- for (uint32_t i = pCreateInfo->setLayoutCount; i < gpu_validation_state->adjusted_max_desc_sets - 1; ++i) {
- new_layouts->push_back(gpu_validation_state->dummy_desc_layout);
+ for (uint32_t i = pCreateInfo->setLayoutCount; i < gpu_state->adjusted_max_desc_sets - 1; ++i) {
+ new_layouts->push_back(gpu_state->dummy_desc_layout);
}
- new_layouts->push_back(gpu_validation_state->debug_desc_layout);
+ new_layouts->push_back(gpu_state->debug_desc_layout);
modified_create_info->pSetLayouts = new_layouts->data();
- modified_create_info->setLayoutCount = gpu_validation_state->adjusted_max_desc_sets;
+ modified_create_info->setLayoutCount = gpu_state->adjusted_max_desc_sets;
}
return true;
}
// Clean up GPU validation after the CreatePipelineLayout call is made
-void CoreChecks::GpuPostCallCreatePipelineLayout(VkResult result) {
+void CoreChecks::GpuPostCallCreatePipelineLayout(layer_data *device_data, VkResult result) {
+ auto gpu_state = GetGpuValidationState();
// Clean up GPU validation
if (result != VK_SUCCESS) {
- ReportSetupProblem(VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, HandleToUint64(device),
+ ReportSetupProblem(device_data, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, HandleToUint64(GetDevice()),
"Unable to create pipeline layout. Device could become unstable.");
- gpu_validation_state->aborted = true;
+ gpu_state->aborted = true;
}
}
// Free the device memory and descriptor set associated with a command buffer.
-void CoreChecks::GpuResetCommandBuffer(const VkCommandBuffer commandBuffer) {
- if (gpu_validation_state->aborted) {
+void CoreChecks::GpuPreCallRecordFreeCommandBuffers(layer_data *dev_data, uint32_t commandBufferCount,
+ const VkCommandBuffer *pCommandBuffers) {
+ auto gpu_state = GetGpuValidationState();
+ if (gpu_state->aborted) {
return;
}
- auto gpu_buffer_list = gpu_validation_state->GetGpuBufferInfo(commandBuffer);
- for (auto buffer_info : gpu_buffer_list) {
- vmaDestroyBuffer(gpu_validation_state->vmaAllocator, buffer_info.output_mem_block.buffer,
- buffer_info.output_mem_block.allocation);
- if (buffer_info.input_mem_block.buffer) {
- vmaDestroyBuffer(gpu_validation_state->vmaAllocator, buffer_info.input_mem_block.buffer,
- buffer_info.input_mem_block.allocation);
- }
- if (buffer_info.desc_set != VK_NULL_HANDLE) {
- gpu_validation_state->desc_set_manager->PutBackDescriptorSet(buffer_info.desc_pool, buffer_info.desc_set);
+ for (uint32_t i = 0; i < commandBufferCount; ++i) {
+ auto cb_node = GetCBNode(pCommandBuffers[i]);
+ if (cb_node) {
+ for (auto &buffer_info : cb_node->gpu_buffer_list) {
+ if (BlockUsed(buffer_info.mem_block)) {
+ gpu_state->memory_manager->PutBackBlock(buffer_info.mem_block);
+ ResetBlock(buffer_info.mem_block);
+ }
+ if (buffer_info.desc_set != VK_NULL_HANDLE) {
+ gpu_state->desc_set_manager->PutBackDescriptorSet(buffer_info.desc_pool, buffer_info.desc_set);
+ }
+ }
+ cb_node->gpu_buffer_list.clear();
}
}
- gpu_validation_state->command_buffer_map.erase(commandBuffer);
}
// Just gives a warning about a possible deadlock.
-void CoreChecks::GpuPreCallValidateCmdWaitEvents(VkPipelineStageFlags sourceStageMask) {
+void CoreChecks::GpuPreCallValidateCmdWaitEvents(layer_data *dev_data, VkPipelineStageFlags sourceStageMask) {
if (sourceStageMask & VK_PIPELINE_STAGE_HOST_BIT) {
- ReportSetupProblem(VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, HandleToUint64(device),
+ ReportSetupProblem(dev_data, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, HandleToUint64(GetDevice()),
"CmdWaitEvents recorded with VK_PIPELINE_STAGE_HOST_BIT set. "
"GPU_Assisted validation waits on queue completion. "
"This wait could block the host's signaling of this event, resulting in deadlock.");
}
}
-std::vector<safe_VkGraphicsPipelineCreateInfo> CoreChecks::GpuPreCallRecordCreateGraphicsPipelines(
- VkPipelineCache pipelineCache, uint32_t count, const VkGraphicsPipelineCreateInfo *pCreateInfos,
- const VkAllocationCallbacks *pAllocator, VkPipeline *pPipelines, std::vector<std::unique_ptr<PIPELINE_STATE>> &pipe_state) {
- std::vector<safe_VkGraphicsPipelineCreateInfo> new_pipeline_create_infos;
- GpuPreCallRecordPipelineCreations(count, pCreateInfos, pAllocator, pPipelines, pipe_state, &new_pipeline_create_infos,
- VK_PIPELINE_BIND_POINT_GRAPHICS);
- return new_pipeline_create_infos;
-}
-std::vector<safe_VkComputePipelineCreateInfo> CoreChecks::GpuPreCallRecordCreateComputePipelines(
- VkPipelineCache pipelineCache, uint32_t count, const VkComputePipelineCreateInfo *pCreateInfos,
- const VkAllocationCallbacks *pAllocator, VkPipeline *pPipelines, std::vector<std::unique_ptr<PIPELINE_STATE>> &pipe_state) {
- std::vector<safe_VkComputePipelineCreateInfo> new_pipeline_create_infos;
- GpuPreCallRecordPipelineCreations(count, pCreateInfos, pAllocator, pPipelines, pipe_state, &new_pipeline_create_infos,
- VK_PIPELINE_BIND_POINT_COMPUTE);
- return new_pipeline_create_infos;
-}
-std::vector<safe_VkRayTracingPipelineCreateInfoNV> CoreChecks::GpuPreCallRecordCreateRayTracingPipelinesNV(
- VkPipelineCache pipelineCache, uint32_t count, const VkRayTracingPipelineCreateInfoNV *pCreateInfos,
- const VkAllocationCallbacks *pAllocator, VkPipeline *pPipelines, std::vector<std::unique_ptr<PIPELINE_STATE>> &pipe_state) {
- std::vector<safe_VkRayTracingPipelineCreateInfoNV> new_pipeline_create_infos;
- GpuPreCallRecordPipelineCreations(count, pCreateInfos, pAllocator, pPipelines, pipe_state, &new_pipeline_create_infos,
- VK_PIPELINE_BIND_POINT_RAY_TRACING_NV);
- return new_pipeline_create_infos;
-}
-template <typename CreateInfo>
-struct CreatePipelineTraits {};
-template <>
-struct CreatePipelineTraits<VkGraphicsPipelineCreateInfo> {
- using SafeType = safe_VkGraphicsPipelineCreateInfo;
- static const SafeType &GetPipelineCI(const PIPELINE_STATE *pipeline_state) { return pipeline_state->graphicsPipelineCI; }
- static uint32_t GetStageCount(const VkGraphicsPipelineCreateInfo &createInfo) { return createInfo.stageCount; }
- static VkShaderModule GetShaderModule(const VkGraphicsPipelineCreateInfo &createInfo, uint32_t stage) {
- return createInfo.pStages[stage].module;
- }
- static void SetShaderModule(SafeType *createInfo, VkShaderModule shader_module, uint32_t stage) {
- createInfo->pStages[stage].module = shader_module;
- }
-};
-
-template <>
-struct CreatePipelineTraits<VkComputePipelineCreateInfo> {
- using SafeType = safe_VkComputePipelineCreateInfo;
- static const SafeType &GetPipelineCI(const PIPELINE_STATE *pipeline_state) { return pipeline_state->computePipelineCI; }
- static uint32_t GetStageCount(const VkComputePipelineCreateInfo &createInfo) { return 1; }
- static VkShaderModule GetShaderModule(const VkComputePipelineCreateInfo &createInfo, uint32_t stage) {
- return createInfo.stage.module;
- }
- static void SetShaderModule(SafeType *createInfo, VkShaderModule shader_module, uint32_t stage) {
- assert(stage == 0);
- createInfo->stage.module = shader_module;
- }
-};
-template <>
-struct CreatePipelineTraits<VkRayTracingPipelineCreateInfoNV> {
- using SafeType = safe_VkRayTracingPipelineCreateInfoNV;
- static const SafeType &GetPipelineCI(const PIPELINE_STATE *pipeline_state) { return pipeline_state->raytracingPipelineCI; }
- static uint32_t GetStageCount(const VkRayTracingPipelineCreateInfoNV &createInfo) { return createInfo.stageCount; }
- static VkShaderModule GetShaderModule(const VkRayTracingPipelineCreateInfoNV &createInfo, uint32_t stage) {
- return createInfo.pStages[stage].module;
- }
- static void SetShaderModule(SafeType *createInfo, VkShaderModule shader_module, uint32_t stage) {
- createInfo->pStages[stage].module = shader_module;
- }
-};
-
// Examine the pipelines to see if they use the debug descriptor set binding index.
// If any do, create new non-instrumented shader modules and use them to replace the instrumented
// shaders in the pipeline. Return the (possibly) modified create infos to the caller.
-template <typename CreateInfo, typename SafeCreateInfo>
-void CoreChecks::GpuPreCallRecordPipelineCreations(uint32_t count, const CreateInfo *pCreateInfos,
- const VkAllocationCallbacks *pAllocator, VkPipeline *pPipelines,
- std::vector<std::unique_ptr<PIPELINE_STATE>> &pipe_state,
- std::vector<SafeCreateInfo> *new_pipeline_create_infos,
- const VkPipelineBindPoint bind_point) {
- using Accessor = CreatePipelineTraits<CreateInfo>;
- if (bind_point != VK_PIPELINE_BIND_POINT_GRAPHICS && bind_point != VK_PIPELINE_BIND_POINT_COMPUTE &&
- bind_point != VK_PIPELINE_BIND_POINT_RAY_TRACING_NV) {
- return;
- }
+std::vector<safe_VkGraphicsPipelineCreateInfo> CoreChecks::GpuPreCallRecordCreateGraphicsPipelines(
+ layer_data *dev_data, VkPipelineCache pipelineCache, uint32_t count, const VkGraphicsPipelineCreateInfo *pCreateInfos,
+ const VkAllocationCallbacks *pAllocator, VkPipeline *pPipelines, std::vector<std::unique_ptr<PIPELINE_STATE>> &pipe_state) {
+ auto gpu_state = GetGpuValidationState();
+
+ std::vector<safe_VkGraphicsPipelineCreateInfo> new_pipeline_create_infos;
+ std::vector<unsigned int> pipeline_uses_debug_index(count);
// Walk through all the pipelines, make a copy of each and flag each pipeline that contains a shader that uses the debug
// descriptor set index.
for (uint32_t pipeline = 0; pipeline < count; ++pipeline) {
- uint32_t stageCount = Accessor::GetStageCount(pCreateInfos[pipeline]);
- new_pipeline_create_infos->push_back(Accessor::GetPipelineCI(pipe_state[pipeline].get()));
-
- bool replace_shaders = false;
- if (pipe_state[pipeline]->active_slots.find(gpu_validation_state->desc_set_bind_index) !=
- pipe_state[pipeline]->active_slots.end()) {
- replace_shaders = true;
- }
- // If the app requests all available sets, the pipeline layout was not modified at pipeline layout creation and the already
- // instrumented shaders need to be replaced with uninstrumented shaders
- if (pipe_state[pipeline]->pipeline_layout.set_layouts.size() >= gpu_validation_state->adjusted_max_desc_sets) {
- replace_shaders = true;
+ new_pipeline_create_infos.push_back(pipe_state[pipeline]->graphicsPipelineCI);
+ if (pipe_state[pipeline]->active_slots.find(gpu_state->desc_set_bind_index) != pipe_state[pipeline]->active_slots.end()) {
+ pipeline_uses_debug_index[pipeline] = 1;
}
+ }
- if (replace_shaders) {
- for (uint32_t stage = 0; stage < stageCount; ++stage) {
- const SHADER_MODULE_STATE *shader = GetShaderModuleState(Accessor::GetShaderModule(pCreateInfos[pipeline], stage));
+ // See if any pipeline has shaders using the debug descriptor set index
+ if (std::all_of(pipeline_uses_debug_index.begin(), pipeline_uses_debug_index.end(), [](unsigned int i) { return i == 0; })) {
+ // None of the shaders in all the pipelines use the debug descriptor set index, so use the pipelines
+ // as they stand with the instrumented shaders.
+ return new_pipeline_create_infos;
+ }
+ // At least one pipeline has a shader that uses the debug descriptor set index.
+ for (uint32_t pipeline = 0; pipeline < count; ++pipeline) {
+ if (pipeline_uses_debug_index[pipeline]) {
+ for (uint32_t stage = 0; stage < pCreateInfos[pipeline].stageCount; ++stage) {
+ const shader_module *shader = GetShaderModuleState(pCreateInfos[pipeline].pStages[stage].module);
VkShaderModuleCreateInfo create_info = {};
VkShaderModule shader_module;
create_info.sType = VK_STRUCTURE_TYPE_SHADER_MODULE_CREATE_INFO;
create_info.pCode = shader->words.data();
create_info.codeSize = shader->words.size() * sizeof(uint32_t);
- VkResult result = DispatchCreateShaderModule(device, &create_info, pAllocator, &shader_module);
+ VkResult result = GetDispatchTable()->CreateShaderModule(GetDevice(), &create_info, pAllocator, &shader_module);
if (result == VK_SUCCESS) {
- Accessor::SetShaderModule(new_pipeline_create_infos[pipeline].data(), shader_module, stage);
+ new_pipeline_create_infos[pipeline].pStages[stage].module = shader_module;
} else {
- uint64_t moduleHandle = HandleToUint64(Accessor::GetShaderModule(pCreateInfos[pipeline], stage));
- ReportSetupProblem(VK_DEBUG_REPORT_OBJECT_TYPE_SHADER_MODULE_EXT, moduleHandle,
+ ReportSetupProblem(dev_data, VK_DEBUG_REPORT_OBJECT_TYPE_SHADER_MODULE_EXT,
+ HandleToUint64(pCreateInfos[pipeline].pStages[stage].module),
"Unable to replace instrumented shader with non-instrumented one. "
"Device could become unstable.");
}
}
}
}
-}
-
-void CoreChecks::GpuPostCallRecordCreateGraphicsPipelines(const uint32_t count, const VkGraphicsPipelineCreateInfo *pCreateInfos,
- const VkAllocationCallbacks *pAllocator, VkPipeline *pPipelines) {
- GpuPostCallRecordPipelineCreations(count, pCreateInfos, pAllocator, pPipelines, VK_PIPELINE_BIND_POINT_GRAPHICS);
-}
-void CoreChecks::GpuPostCallRecordCreateComputePipelines(const uint32_t count, const VkComputePipelineCreateInfo *pCreateInfos,
- const VkAllocationCallbacks *pAllocator, VkPipeline *pPipelines) {
- GpuPostCallRecordPipelineCreations(count, pCreateInfos, pAllocator, pPipelines, VK_PIPELINE_BIND_POINT_COMPUTE);
-}
-void CoreChecks::GpuPostCallRecordCreateRayTracingPipelinesNV(const uint32_t count,
- const VkRayTracingPipelineCreateInfoNV *pCreateInfos,
- const VkAllocationCallbacks *pAllocator, VkPipeline *pPipelines) {
- GpuPostCallRecordPipelineCreations(count, pCreateInfos, pAllocator, pPipelines, VK_PIPELINE_BIND_POINT_RAY_TRACING_NV);
+ return new_pipeline_create_infos;
}
// For every pipeline:
@@ -589,48 +565,18 @@ void CoreChecks::GpuPostCallRecordCreateRayTracingPipelinesNV(const uint32_t cou
// - Destroy it since it has been bound into the pipeline by now. This is our only chance to delete it.
// - Track the shader in the shader_map
// - Save the shader binary if it contains debug code
-template <typename CreateInfo>
-void CoreChecks::GpuPostCallRecordPipelineCreations(const uint32_t count, const CreateInfo *pCreateInfos,
- const VkAllocationCallbacks *pAllocator, VkPipeline *pPipelines,
- const VkPipelineBindPoint bind_point) {
- using Accessor = CreatePipelineTraits<CreateInfo>;
- if (bind_point != VK_PIPELINE_BIND_POINT_GRAPHICS && bind_point != VK_PIPELINE_BIND_POINT_COMPUTE &&
- bind_point != VK_PIPELINE_BIND_POINT_RAY_TRACING_NV) {
- return;
- }
+void CoreChecks::GpuPostCallRecordCreateGraphicsPipelines(layer_data *dev_data, const uint32_t count,
+ const VkGraphicsPipelineCreateInfo *pCreateInfos,
+ const VkAllocationCallbacks *pAllocator, VkPipeline *pPipelines) {
+ auto gpu_state = GetGpuValidationState();
for (uint32_t pipeline = 0; pipeline < count; ++pipeline) {
- auto pipeline_state = ValidationStateTracker::GetPipelineState(pPipelines[pipeline]);
+ auto pipeline_state = GetPipelineState(pPipelines[pipeline]);
if (nullptr == pipeline_state) continue;
-
- uint32_t stageCount = 0;
- if (bind_point == VK_PIPELINE_BIND_POINT_GRAPHICS) {
- stageCount = pipeline_state->graphicsPipelineCI.stageCount;
- } else if (bind_point == VK_PIPELINE_BIND_POINT_COMPUTE) {
- stageCount = 1;
- } else if (bind_point == VK_PIPELINE_BIND_POINT_RAY_TRACING_NV) {
- stageCount = pipeline_state->raytracingPipelineCI.stageCount;
- } else {
- assert(false);
- }
-
- for (uint32_t stage = 0; stage < stageCount; ++stage) {
- if (pipeline_state->active_slots.find(gpu_validation_state->desc_set_bind_index) !=
- pipeline_state->active_slots.end()) {
- DispatchDestroyShaderModule(device, Accessor::GetShaderModule(pCreateInfos[pipeline], stage), pAllocator);
+ for (uint32_t stage = 0; stage < pipeline_state->graphicsPipelineCI.stageCount; ++stage) {
+ if (pipeline_state->active_slots.find(gpu_state->desc_set_bind_index) != pipeline_state->active_slots.end()) {
+ GetDispatchTable()->DestroyShaderModule(GetDevice(), pCreateInfos->pStages[stage].module, pAllocator);
}
-
- const SHADER_MODULE_STATE *shader_state = nullptr;
- if (bind_point == VK_PIPELINE_BIND_POINT_GRAPHICS) {
- shader_state = GetShaderModuleState(pipeline_state->graphicsPipelineCI.pStages[stage].module);
- } else if (bind_point == VK_PIPELINE_BIND_POINT_COMPUTE) {
- assert(stage == 0);
- shader_state = GetShaderModuleState(pipeline_state->computePipelineCI.stage.module);
- } else if (bind_point == VK_PIPELINE_BIND_POINT_RAY_TRACING_NV) {
- shader_state = GetShaderModuleState(pipeline_state->raytracingPipelineCI.pStages[stage].module);
- } else {
- assert(false);
- }
-
+ auto shader_state = GetShaderModuleState(pipeline_state->graphicsPipelineCI.pStages[stage].module);
std::vector<unsigned int> code;
// Save the shader binary if debug info is present.
// The core_validation ShaderModule tracker saves the binary too, but discards it when the ShaderModule
@@ -644,31 +590,22 @@ void CoreChecks::GpuPostCallRecordPipelineCreations(const uint32_t count, const
}
}
}
- gpu_validation_state->shader_map[shader_state->gpu_validation_shader_id].pipeline = pipeline_state->pipeline;
+ gpu_state->shader_map[shader_state->gpu_validation_shader_id].pipeline = pipeline_state->pipeline;
// Be careful to use the originally bound (instrumented) shader here, even if PreCallRecord had to back it
// out with a non-instrumented shader. The non-instrumented shader (found in pCreateInfo) was destroyed above.
- VkShaderModule shader_module = VK_NULL_HANDLE;
- if (bind_point == VK_PIPELINE_BIND_POINT_GRAPHICS) {
- shader_module = pipeline_state->graphicsPipelineCI.pStages[stage].module;
- } else if (bind_point == VK_PIPELINE_BIND_POINT_COMPUTE) {
- assert(stage == 0);
- shader_module = pipeline_state->computePipelineCI.stage.module;
- } else if (bind_point == VK_PIPELINE_BIND_POINT_RAY_TRACING_NV) {
- shader_module = pipeline_state->raytracingPipelineCI.pStages[stage].module;
- } else {
- assert(false);
- }
- gpu_validation_state->shader_map[shader_state->gpu_validation_shader_id].shader_module = shader_module;
- gpu_validation_state->shader_map[shader_state->gpu_validation_shader_id].pgm = std::move(code);
+ gpu_state->shader_map[shader_state->gpu_validation_shader_id].shader_module =
+ pipeline_state->graphicsPipelineCI.pStages[stage].module;
+ gpu_state->shader_map[shader_state->gpu_validation_shader_id].pgm = std::move(code);
}
}
}
// Remove all the shader trackers associated with this destroyed pipeline.
-void CoreChecks::GpuPreCallRecordDestroyPipeline(const VkPipeline pipeline) {
- for (auto it = gpu_validation_state->shader_map.begin(); it != gpu_validation_state->shader_map.end();) {
+void CoreChecks::GpuPreCallRecordDestroyPipeline(layer_data *dev_data, const VkPipeline pipeline) {
+ auto gpu_state = GetGpuValidationState();
+ for (auto it = gpu_state->shader_map.begin(); it != gpu_state->shader_map.end();) {
if (it->second.pipeline == pipeline) {
- it = gpu_validation_state->shader_map.erase(it);
+ it = gpu_state->shader_map.erase(it);
} else {
++it;
}
@@ -676,9 +613,10 @@ void CoreChecks::GpuPreCallRecordDestroyPipeline(const VkPipeline pipeline) {
}
// Call the SPIR-V Optimizer to run the instrumentation pass on the shader.
-bool CoreChecks::GpuInstrumentShader(const VkShaderModuleCreateInfo *pCreateInfo, std::vector<unsigned int> &new_pgm,
- uint32_t *unique_shader_id) {
- if (gpu_validation_state->aborted) return false;
+bool CoreChecks::GpuInstrumentShader(layer_data *dev_data, const VkShaderModuleCreateInfo *pCreateInfo,
+ std::vector<unsigned int> &new_pgm, uint32_t *unique_shader_id) {
+ auto gpu_state = GetGpuValidationState();
+ if (gpu_state->aborted) return false;
if (pCreateInfo->pCode[0] != spv::MagicNumber) return false;
// Load original shader SPIR-V
@@ -689,30 +627,26 @@ bool CoreChecks::GpuInstrumentShader(const VkShaderModuleCreateInfo *pCreateInfo
// Call the optimizer to instrument the shader.
// Use the unique_shader_module_id as a shader ID so we can look up its handle later in the shader_map.
- // If descriptor indexing is enabled, enable length checks and updated descriptor checks
- const bool descriptor_indexing = device_extensions.vk_ext_descriptor_indexing;
using namespace spvtools;
spv_target_env target_env = SPV_ENV_VULKAN_1_1;
Optimizer optimizer(target_env);
- optimizer.RegisterPass(CreateInstBindlessCheckPass(gpu_validation_state->desc_set_bind_index,
- gpu_validation_state->unique_shader_module_id, descriptor_indexing,
- descriptor_indexing));
+ optimizer.RegisterPass(CreateInstBindlessCheckPass(gpu_state->desc_set_bind_index, gpu_state->unique_shader_module_id));
optimizer.RegisterPass(CreateAggressiveDCEPass());
bool pass = optimizer.Run(new_pgm.data(), new_pgm.size(), &new_pgm);
if (!pass) {
- ReportSetupProblem(VK_DEBUG_REPORT_OBJECT_TYPE_SHADER_MODULE_EXT, VK_NULL_HANDLE,
+ ReportSetupProblem(dev_data, VK_DEBUG_REPORT_OBJECT_TYPE_SHADER_MODULE_EXT, VK_NULL_HANDLE,
"Failure to instrument shader. Proceeding with non-instrumented shader.");
}
- *unique_shader_id = gpu_validation_state->unique_shader_module_id++;
+ *unique_shader_id = gpu_state->unique_shader_module_id++;
return pass;
}
// Create the instrumented shader data to provide to the driver.
-bool CoreChecks::GpuPreCallCreateShaderModule(const VkShaderModuleCreateInfo *pCreateInfo, const VkAllocationCallbacks *pAllocator,
- VkShaderModule *pShaderModule, uint32_t *unique_shader_id,
- VkShaderModuleCreateInfo *instrumented_create_info,
+bool CoreChecks::GpuPreCallCreateShaderModule(layer_data *dev_data, const VkShaderModuleCreateInfo *pCreateInfo,
+ const VkAllocationCallbacks *pAllocator, VkShaderModule *pShaderModule,
+ uint32_t *unique_shader_id, VkShaderModuleCreateInfo *instrumented_create_info,
std::vector<unsigned int> *instrumented_pgm) {
- bool pass = GpuInstrumentShader(pCreateInfo, *instrumented_pgm, unique_shader_id);
+ bool pass = GpuInstrumentShader(dev_data, pCreateInfo, *instrumented_pgm, unique_shader_id);
if (pass) {
instrumented_create_info->pCode = instrumented_pgm->data();
instrumented_create_info->codeSize = instrumented_pgm->size() * sizeof(unsigned int);
@@ -725,52 +659,28 @@ static void GenerateStageMessage(const uint32_t *debug_record, std::string &msg)
using namespace spvtools;
std::ostringstream strm;
switch (debug_record[kInstCommonOutStageIdx]) {
- case spv::ExecutionModelVertex: {
+ case 0: {
strm << "Stage = Vertex. Vertex Index = " << debug_record[kInstVertOutVertexIndex]
<< " Instance Index = " << debug_record[kInstVertOutInstanceIndex] << ". ";
} break;
- case spv::ExecutionModelTessellationControl: {
+ case 1: {
strm << "Stage = Tessellation Control. Invocation ID = " << debug_record[kInstTessOutInvocationId] << ". ";
} break;
- case spv::ExecutionModelTessellationEvaluation: {
+ case 2: {
strm << "Stage = Tessellation Eval. Invocation ID = " << debug_record[kInstTessOutInvocationId] << ". ";
} break;
- case spv::ExecutionModelGeometry: {
+ case 3: {
strm << "Stage = Geometry. Primitive ID = " << debug_record[kInstGeomOutPrimitiveId]
<< " Invocation ID = " << debug_record[kInstGeomOutInvocationId] << ". ";
} break;
- case spv::ExecutionModelFragment: {
+ case 4: {
strm << "Stage = Fragment. Fragment coord (x,y) = ("
<< *reinterpret_cast<const float *>(&debug_record[kInstFragOutFragCoordX]) << ", "
<< *reinterpret_cast<const float *>(&debug_record[kInstFragOutFragCoordY]) << "). ";
} break;
- case spv::ExecutionModelGLCompute: {
+ case 5: {
strm << "Stage = Compute. Global invocation ID = " << debug_record[kInstCompOutGlobalInvocationId] << ". ";
} break;
- case spv::ExecutionModelRayGenerationNV: {
- strm << "Stage = Ray Generation. Global Launch ID (x,y,z) = (" << debug_record[kInstRayTracingOutLaunchIdX] << ", "
- << debug_record[kInstRayTracingOutLaunchIdY] << ", " << debug_record[kInstRayTracingOutLaunchIdZ] << "). ";
- } break;
- case spv::ExecutionModelIntersectionNV: {
- strm << "Stage = Intersection. Global Launch ID (x,y,z) = (" << debug_record[kInstRayTracingOutLaunchIdX] << ", "
- << debug_record[kInstRayTracingOutLaunchIdY] << ", " << debug_record[kInstRayTracingOutLaunchIdZ] << "). ";
- } break;
- case spv::ExecutionModelAnyHitNV: {
- strm << "Stage = Any Hit. Global Launch ID (x,y,z) = (" << debug_record[kInstRayTracingOutLaunchIdX] << ", "
- << debug_record[kInstRayTracingOutLaunchIdY] << ", " << debug_record[kInstRayTracingOutLaunchIdZ] << "). ";
- } break;
- case spv::ExecutionModelClosestHitNV: {
- strm << "Stage = Closest Hit. Global Launch ID (x,y,z) = (" << debug_record[kInstRayTracingOutLaunchIdX] << ", "
- << debug_record[kInstRayTracingOutLaunchIdY] << ", " << debug_record[kInstRayTracingOutLaunchIdZ] << "). ";
- } break;
- case spv::ExecutionModelMissNV: {
- strm << "Stage = Miss. Global Launch ID (x,y,z) = (" << debug_record[kInstRayTracingOutLaunchIdX] << ", "
- << debug_record[kInstRayTracingOutLaunchIdY] << ", " << debug_record[kInstRayTracingOutLaunchIdZ] << "). ";
- } break;
- case spv::ExecutionModelCallableNV: {
- strm << "Stage = Callable. Global Launch ID (x,y,z) = (" << debug_record[kInstRayTracingOutLaunchIdX] << ", "
- << debug_record[kInstRayTracingOutLaunchIdY] << ", " << debug_record[kInstRayTracingOutLaunchIdZ] << "). ";
- } break;
default: {
strm << "Internal Error (unexpected stage = " << debug_record[kInstCommonOutStageIdx] << "). ";
assert(false);
@@ -811,10 +721,9 @@ static std::string LookupDebugUtilsName(const debug_report_data *report_data, co
}
// Generate message from the common portion of the debug report record.
-static void GenerateCommonMessage(const debug_report_data *report_data, const CMD_BUFFER_STATE *cb_node,
- const uint32_t *debug_record, const VkShaderModule shader_module_handle,
- const VkPipeline pipeline_handle, const VkPipelineBindPoint pipeline_bind_point,
- const uint32_t operation_index, std::string &msg) {
+static void GenerateCommonMessage(const debug_report_data *report_data, const GLOBAL_CB_NODE *cb_node, const uint32_t *debug_record,
+ const VkShaderModule shader_module_handle, const VkPipeline pipeline_handle,
+ const uint32_t draw_index, std::string &msg) {
using namespace spvtools;
std::ostringstream strm;
if (shader_module_handle == VK_NULL_HANDLE) {
@@ -825,18 +734,8 @@ static void GenerateCommonMessage(const debug_report_data *report_data, const CM
} else {
strm << std::hex << std::showbase << "Command buffer "
<< LookupDebugUtilsName(report_data, HandleToUint64(cb_node->commandBuffer)) << "("
- << HandleToUint64(cb_node->commandBuffer) << "). ";
- if (pipeline_bind_point == VK_PIPELINE_BIND_POINT_GRAPHICS) {
- strm << "Draw ";
- } else if (pipeline_bind_point == VK_PIPELINE_BIND_POINT_COMPUTE) {
- strm << "Compute ";
- } else if (pipeline_bind_point == VK_PIPELINE_BIND_POINT_RAY_TRACING_NV) {
- strm << "Ray Trace ";
- } else {
- assert(false);
- strm << "Unknown Pipeline Operation ";
- }
- strm << "Index " << operation_index << ". "
+ << HandleToUint64(cb_node->commandBuffer) << "). "
+ << "Draw Index " << draw_index << ". "
<< "Pipeline " << LookupDebugUtilsName(report_data, HandleToUint64(pipeline_handle)) << "("
<< HandleToUint64(pipeline_handle) << "). "
<< "Shader Module " << LookupDebugUtilsName(report_data, HandleToUint64(shader_module_handle)) << "("
@@ -849,8 +748,7 @@ static void GenerateCommonMessage(const debug_report_data *report_data, const CM
// Read the contents of the SPIR-V OpSource instruction and any following continuation instructions.
// Split the single string into a vector of strings, one for each line, for easier processing.
-static void ReadOpSource(const SHADER_MODULE_STATE &shader, const uint32_t reported_file_id,
- std::vector<std::string> &opsource_lines) {
+static void ReadOpSource(const shader_module &shader, const uint32_t reported_file_id, std::vector<std::string> &opsource_lines) {
for (auto insn : shader) {
if ((insn.opcode() == spv::OpSource) && (insn.len() >= 5) && (insn.word(3) == reported_file_id)) {
std::istringstream in_stream;
@@ -952,7 +850,7 @@ static void GenerateSourceMessages(const std::vector<unsigned int> &pgm, const u
using namespace spvtools;
std::ostringstream filename_stream;
std::ostringstream source_stream;
- SHADER_MODULE_STATE shader;
+ shader_module shader;
shader.words = pgm;
// Find the OpLine just before the failing instruction indicated by the debug info.
// SPIR-V can only be iterated in the forward direction due to its opcode/length encoding.
@@ -1063,8 +961,8 @@ static void GenerateSourceMessages(const std::vector<unsigned int> &pgm, const u
// sure it is available when the pipeline is submitted. (The ShaderModule tracking object also
// keeps a copy, but it can be destroyed after the pipeline is created and before it is submitted.)
//
-void CoreChecks::AnalyzeAndReportError(CMD_BUFFER_STATE *cb_node, VkQueue queue, VkPipelineBindPoint pipeline_bind_point,
- uint32_t operation_index, uint32_t *const debug_output_buffer) {
+void CoreChecks::AnalyzeAndReportError(const layer_data *dev_data, GLOBAL_CB_NODE *cb_node, VkQueue queue, uint32_t draw_index,
+ uint32_t *const debug_output_buffer) {
using namespace spvtools;
const uint32_t total_words = debug_output_buffer[0];
// A zero here means that the shader instrumentation didn't write anything.
@@ -1081,6 +979,7 @@ void CoreChecks::AnalyzeAndReportError(CMD_BUFFER_STATE *cb_node, VkQueue queue,
// is hard-coded to process only one record because it expects the buffer to be large enough to
// hold only one record. If there is a desire to process more than one record, this function needs
// to be modified to loop over records and the buffer size increased.
+ auto gpu_state = GetGpuValidationState();
std::string validation_message;
std::string stage_message;
std::string common_message;
@@ -1094,18 +993,17 @@ void CoreChecks::AnalyzeAndReportError(CMD_BUFFER_STATE *cb_node, VkQueue queue,
const uint32_t *debug_record = &debug_output_buffer[kDebugOutputDataOffset];
// Lookup the VkShaderModule handle and SPIR-V code used to create the shader, using the unique shader ID value returned
// by the instrumented shader.
- auto it = gpu_validation_state->shader_map.find(debug_record[kInstCommonOutShaderId]);
- if (it != gpu_validation_state->shader_map.end()) {
+ auto it = gpu_state->shader_map.find(debug_record[kInstCommonOutShaderId]);
+ if (it != gpu_state->shader_map.end()) {
shader_module_handle = it->second.shader_module;
pipeline_handle = it->second.pipeline;
pgm = it->second.pgm;
}
GenerateValidationMessage(debug_record, validation_message, vuid_msg);
GenerateStageMessage(debug_record, stage_message);
- GenerateCommonMessage(report_data, cb_node, debug_record, shader_module_handle, pipeline_handle, pipeline_bind_point,
- operation_index, common_message);
+ GenerateCommonMessage(report_data, cb_node, debug_record, shader_module_handle, pipeline_handle, draw_index, common_message);
GenerateSourceMessages(pgm, debug_record, filename_message, source_message);
- log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_QUEUE_EXT, HandleToUint64(queue),
+ log_msg(GetReportData(), VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_QUEUE_EXT, HandleToUint64(queue),
vuid_msg.c_str(), "%s %s %s %s%s", validation_message.c_str(), common_message.c_str(), stage_message.c_str(),
filename_message.c_str(), source_message.c_str());
// The debug record at word kInstCommonOutSize is the number of words in the record
@@ -1115,393 +1013,203 @@ void CoreChecks::AnalyzeAndReportError(CMD_BUFFER_STATE *cb_node, VkQueue queue,
}
// For the given command buffer, map its debug data buffers and read their contents for analysis.
-void CoreChecks::ProcessInstrumentationBuffer(VkQueue queue, CMD_BUFFER_STATE *cb_node) {
- auto gpu_buffer_list = gpu_validation_state->GetGpuBufferInfo(cb_node->commandBuffer);
- if (cb_node && (cb_node->hasDrawCmd || cb_node->hasTraceRaysCmd || cb_node->hasDispatchCmd) && gpu_buffer_list.size() > 0) {
+void CoreChecks::ProcessInstrumentationBuffer(const layer_data *dev_data, VkQueue queue, GLOBAL_CB_NODE *cb_node) {
+ auto gpu_state = GetGpuValidationState();
+ if (cb_node && cb_node->hasDrawCmd && cb_node->gpu_buffer_list.size() > 0) {
VkResult result;
char *pData;
uint32_t draw_index = 0;
- uint32_t compute_index = 0;
- uint32_t ray_trace_index = 0;
-
- for (auto &buffer_info : gpu_buffer_list) {
- result = vmaMapMemory(gpu_validation_state->vmaAllocator, buffer_info.output_mem_block.allocation, (void **)&pData);
+ for (auto &buffer_info : cb_node->gpu_buffer_list) {
+ uint32_t block_offset = buffer_info.mem_block.offset;
+ uint32_t block_size = gpu_state->memory_manager->GetBlockSize();
+ uint32_t offset_to_data = 0;
+ const uint32_t map_align = std::max(1U, static_cast<uint32_t>(GetPDProperties()->limits.minMemoryMapAlignment));
+
+ // Adjust the offset to the alignment required for mapping.
+ block_offset = (block_offset / map_align) * map_align;
+ offset_to_data = buffer_info.mem_block.offset - block_offset;
+ block_size += offset_to_data;
+ result = GetDispatchTable()->MapMemory(cb_node->device, buffer_info.mem_block.memory, block_offset, block_size, 0,
+ (void **)&pData);
// Analyze debug output buffer
if (result == VK_SUCCESS) {
- uint32_t operation_index = 0;
- if (buffer_info.pipeline_bind_point == VK_PIPELINE_BIND_POINT_GRAPHICS) {
- operation_index = draw_index;
- } else if (buffer_info.pipeline_bind_point == VK_PIPELINE_BIND_POINT_COMPUTE) {
- operation_index = compute_index;
- } else if (buffer_info.pipeline_bind_point == VK_PIPELINE_BIND_POINT_RAY_TRACING_NV) {
- operation_index = ray_trace_index;
- } else {
- assert(false);
- }
-
- AnalyzeAndReportError(cb_node, queue, buffer_info.pipeline_bind_point, operation_index, (uint32_t *)pData);
- vmaUnmapMemory(gpu_validation_state->vmaAllocator, buffer_info.output_mem_block.allocation);
- }
-
- if (buffer_info.pipeline_bind_point == VK_PIPELINE_BIND_POINT_GRAPHICS) {
- draw_index++;
- } else if (buffer_info.pipeline_bind_point == VK_PIPELINE_BIND_POINT_COMPUTE) {
- compute_index++;
- } else if (buffer_info.pipeline_bind_point == VK_PIPELINE_BIND_POINT_RAY_TRACING_NV) {
- ray_trace_index++;
- } else {
- assert(false);
- }
- }
- }
-}
-
-// For the given command buffer, map its debug data buffers and update the status of any update after bind descriptors
-void CoreChecks::UpdateInstrumentationBuffer(CMD_BUFFER_STATE *cb_node) {
- auto gpu_buffer_list = gpu_validation_state->GetGpuBufferInfo(cb_node->commandBuffer);
- uint32_t *pData;
- for (auto &buffer_info : gpu_buffer_list) {
- if (buffer_info.input_mem_block.update_at_submit.size() > 0) {
- VkResult result =
- vmaMapMemory(gpu_validation_state->vmaAllocator, buffer_info.input_mem_block.allocation, (void **)&pData);
- if (result == VK_SUCCESS) {
- for (auto update : buffer_info.input_mem_block.update_at_submit) {
- if (update.second->updated) pData[update.first] = 1;
- }
- vmaUnmapMemory(gpu_validation_state->vmaAllocator, buffer_info.input_mem_block.allocation);
+ AnalyzeAndReportError(dev_data, cb_node, queue, draw_index, (uint32_t *)(pData + offset_to_data));
+ GetDispatchTable()->UnmapMemory(cb_node->device, buffer_info.mem_block.memory);
}
+ draw_index++;
}
}
}
// Submit a memory barrier on graphics queues.
// Lazy-create and record the needed command buffer.
-void CoreChecks::SubmitBarrier(VkQueue queue) {
- auto queue_barrier_command_info_it =
- gpu_validation_state->queue_barrier_command_infos.emplace(queue, GpuQueueBarrierCommandInfo{});
- if (queue_barrier_command_info_it.second) {
- GpuQueueBarrierCommandInfo &quere_barrier_command_info = queue_barrier_command_info_it.first->second;
-
- uint32_t queue_family_index = 0;
-
- auto queue_state_it = queueMap.find(queue);
- if (queue_state_it != queueMap.end()) {
- queue_family_index = queue_state_it->second.queueFamilyIndex;
- }
+void CoreChecks::SubmitBarrier(layer_data *dev_data, VkQueue queue) {
+ auto gpu_state = GetGpuValidationState();
+ const auto *dispatch_table = GetDispatchTable();
+ uint32_t queue_family_index = 0;
+
+ auto it = dev_data->queueMap.find(queue);
+ if (it != dev_data->queueMap.end()) {
+ queue_family_index = it->second.queueFamilyIndex;
+ }
- VkResult result = VK_SUCCESS;
+ // Pay attention only to queues that support graphics.
+ // This ensures that the command buffer pool is created so that it can be used on a graphics queue.
+ VkQueueFlags queue_flags = GetPhysicalDeviceState()->queue_family_properties[queue_family_index].queueFlags;
+ if (!(queue_flags & VK_QUEUE_GRAPHICS_BIT)) {
+ return;
+ }
+ // Lazy-allocate and record the command buffer.
+ if (gpu_state->barrier_command_buffer == VK_NULL_HANDLE) {
+ VkResult result;
VkCommandPoolCreateInfo pool_create_info = {};
pool_create_info.sType = VK_STRUCTURE_TYPE_COMMAND_POOL_CREATE_INFO;
pool_create_info.queueFamilyIndex = queue_family_index;
- result = DispatchCreateCommandPool(device, &pool_create_info, nullptr, &quere_barrier_command_info.barrier_command_pool);
+ result = dispatch_table->CreateCommandPool(GetDevice(), &pool_create_info, nullptr, &gpu_state->barrier_command_pool);
if (result != VK_SUCCESS) {
- ReportSetupProblem(VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, HandleToUint64(device),
+ ReportSetupProblem(dev_data, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, HandleToUint64(GetDevice()),
"Unable to create command pool for barrier CB.");
- quere_barrier_command_info.barrier_command_pool = VK_NULL_HANDLE;
+ gpu_state->barrier_command_pool = VK_NULL_HANDLE;
return;
}
- VkCommandBufferAllocateInfo buffer_alloc_info = {};
- buffer_alloc_info.sType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_ALLOCATE_INFO;
- buffer_alloc_info.commandPool = quere_barrier_command_info.barrier_command_pool;
- buffer_alloc_info.commandBufferCount = 1;
- buffer_alloc_info.level = VK_COMMAND_BUFFER_LEVEL_PRIMARY;
- result = DispatchAllocateCommandBuffers(device, &buffer_alloc_info, &quere_barrier_command_info.barrier_command_buffer);
+ VkCommandBufferAllocateInfo command_buffer_alloc_info = {};
+ command_buffer_alloc_info.sType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_ALLOCATE_INFO;
+ command_buffer_alloc_info.commandPool = gpu_state->barrier_command_pool;
+ command_buffer_alloc_info.commandBufferCount = 1;
+ command_buffer_alloc_info.level = VK_COMMAND_BUFFER_LEVEL_PRIMARY;
+ result =
+ dispatch_table->AllocateCommandBuffers(GetDevice(), &command_buffer_alloc_info, &gpu_state->barrier_command_buffer);
if (result != VK_SUCCESS) {
- ReportSetupProblem(VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, HandleToUint64(device),
+ ReportSetupProblem(dev_data, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, HandleToUint64(GetDevice()),
"Unable to create barrier command buffer.");
- DispatchDestroyCommandPool(device, quere_barrier_command_info.barrier_command_pool, nullptr);
- quere_barrier_command_info.barrier_command_pool = VK_NULL_HANDLE;
- quere_barrier_command_info.barrier_command_buffer = VK_NULL_HANDLE;
+ dispatch_table->DestroyCommandPool(GetDevice(), gpu_state->barrier_command_pool, nullptr);
+ gpu_state->barrier_command_pool = VK_NULL_HANDLE;
+ gpu_state->barrier_command_buffer = VK_NULL_HANDLE;
return;
}
// Hook up command buffer dispatch
- gpu_validation_state->vkSetDeviceLoaderData(device, quere_barrier_command_info.barrier_command_buffer);
+ *((const void **)gpu_state->barrier_command_buffer) = *(void **)(GetDevice());
// Record a global memory barrier to force availability of device memory operations to the host domain.
VkCommandBufferBeginInfo command_buffer_begin_info = {};
command_buffer_begin_info.sType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_BEGIN_INFO;
- result = DispatchBeginCommandBuffer(quere_barrier_command_info.barrier_command_buffer, &command_buffer_begin_info);
+ result = dispatch_table->BeginCommandBuffer(gpu_state->barrier_command_buffer, &command_buffer_begin_info);
+
if (result == VK_SUCCESS) {
VkMemoryBarrier memory_barrier = {};
memory_barrier.sType = VK_STRUCTURE_TYPE_MEMORY_BARRIER;
memory_barrier.srcAccessMask = VK_ACCESS_MEMORY_WRITE_BIT;
memory_barrier.dstAccessMask = VK_ACCESS_HOST_READ_BIT;
- DispatchCmdPipelineBarrier(quere_barrier_command_info.barrier_command_buffer, VK_PIPELINE_STAGE_ALL_COMMANDS_BIT,
- VK_PIPELINE_STAGE_HOST_BIT, 0, 1, &memory_barrier, 0, nullptr, 0, nullptr);
- DispatchEndCommandBuffer(quere_barrier_command_info.barrier_command_buffer);
+ dispatch_table->CmdPipelineBarrier(gpu_state->barrier_command_buffer, VK_PIPELINE_STAGE_ALL_COMMANDS_BIT,
+ VK_PIPELINE_STAGE_HOST_BIT, 0, 1, &memory_barrier, 0, nullptr, 0, nullptr);
+ dispatch_table->EndCommandBuffer(gpu_state->barrier_command_buffer);
}
}
- GpuQueueBarrierCommandInfo &quere_barrier_command_info = queue_barrier_command_info_it.first->second;
- if (quere_barrier_command_info.barrier_command_buffer != VK_NULL_HANDLE) {
+ if (gpu_state->barrier_command_buffer) {
VkSubmitInfo submit_info = {};
submit_info.sType = VK_STRUCTURE_TYPE_SUBMIT_INFO;
submit_info.commandBufferCount = 1;
- submit_info.pCommandBuffers = &quere_barrier_command_info.barrier_command_buffer;
- DispatchQueueSubmit(queue, 1, &submit_info, VK_NULL_HANDLE);
- }
-}
-
-void CoreChecks::GpuPreCallRecordQueueSubmit(VkQueue queue, uint32_t submitCount, const VkSubmitInfo *pSubmits, VkFence fence) {
- for (uint32_t submit_idx = 0; submit_idx < submitCount; submit_idx++) {
- const VkSubmitInfo *submit = &pSubmits[submit_idx];
- for (uint32_t i = 0; i < submit->commandBufferCount; i++) {
- auto cb_node = GetCBState(submit->pCommandBuffers[i]);
- UpdateInstrumentationBuffer(cb_node);
- for (auto secondaryCmdBuffer : cb_node->linkedCommandBuffers) {
- UpdateInstrumentationBuffer(secondaryCmdBuffer);
- }
- }
+ submit_info.pCommandBuffers = &gpu_state->barrier_command_buffer;
+ dispatch_table->QueueSubmit(queue, 1, &submit_info, VK_NULL_HANDLE);
}
}
// Issue a memory barrier to make GPU-written data available to host.
// Wait for the queue to complete execution.
// Check the debug buffers for all the command buffers that were submitted.
-void CoreChecks::GpuPostCallQueueSubmit(VkQueue queue, uint32_t submitCount, const VkSubmitInfo *pSubmits, VkFence fence) {
- if (gpu_validation_state->aborted) return;
+void CoreChecks::GpuPostCallQueueSubmit(layer_data *dev_data, VkQueue queue, uint32_t submitCount, const VkSubmitInfo *pSubmits,
+ VkFence fence) {
+ auto gpu_state = GetGpuValidationState();
+ if (gpu_state->aborted) return;
- SubmitBarrier(queue);
+ SubmitBarrier(dev_data, queue);
- DispatchQueueWaitIdle(queue);
+ dev_data->device_dispatch_table.QueueWaitIdle(queue);
for (uint32_t submit_idx = 0; submit_idx < submitCount; submit_idx++) {
const VkSubmitInfo *submit = &pSubmits[submit_idx];
for (uint32_t i = 0; i < submit->commandBufferCount; i++) {
- auto cb_node = GetCBState(submit->pCommandBuffers[i]);
- ProcessInstrumentationBuffer(queue, cb_node);
+ auto cb_node = GetCBNode(submit->pCommandBuffers[i]);
+ ProcessInstrumentationBuffer(dev_data, queue, cb_node);
for (auto secondaryCmdBuffer : cb_node->linkedCommandBuffers) {
- ProcessInstrumentationBuffer(queue, secondaryCmdBuffer);
+ ProcessInstrumentationBuffer(dev_data, queue, secondaryCmdBuffer);
}
}
}
}
-void CoreChecks::GpuAllocateValidationResources(const VkCommandBuffer cmd_buffer, const VkPipelineBindPoint bind_point) {
- if (bind_point != VK_PIPELINE_BIND_POINT_GRAPHICS && bind_point != VK_PIPELINE_BIND_POINT_COMPUTE &&
- bind_point != VK_PIPELINE_BIND_POINT_RAY_TRACING_NV) {
- return;
- }
+void CoreChecks::GpuAllocateValidationResources(layer_data *dev_data, const VkCommandBuffer cmd_buffer,
+ const VkPipelineBindPoint bind_point) {
VkResult result;
- if (!(enabled.gpu_validation)) return;
+ if (!(GetEnables()->gpu_validation)) return;
- if (gpu_validation_state->aborted) return;
+ auto gpu_state = GetGpuValidationState();
+ if (gpu_state->aborted) return;
std::vector<VkDescriptorSet> desc_sets;
VkDescriptorPool desc_pool = VK_NULL_HANDLE;
- result = gpu_validation_state->desc_set_manager->GetDescriptorSets(1, &desc_pool, &desc_sets);
+ result = gpu_state->desc_set_manager->GetDescriptorSets(1, &desc_pool, &desc_sets);
assert(result == VK_SUCCESS);
if (result != VK_SUCCESS) {
- ReportSetupProblem(VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, HandleToUint64(device),
+ ReportSetupProblem(dev_data, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, HandleToUint64(GetDevice()),
"Unable to allocate descriptor sets. Device could become unstable.");
- gpu_validation_state->aborted = true;
+ gpu_state->aborted = true;
return;
}
- VkDescriptorBufferInfo output_desc_buffer_info = {};
- output_desc_buffer_info.range = gpu_validation_state->output_buffer_size;
+ VkDescriptorBufferInfo desc_buffer_info = {};
+ desc_buffer_info.range = gpu_state->memory_manager->GetBlockSize();
- auto cb_node = GetCBState(cmd_buffer);
+ auto cb_node = GetCBNode(cmd_buffer);
if (!cb_node) {
- ReportSetupProblem(VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, HandleToUint64(device), "Unrecognized command buffer");
- gpu_validation_state->aborted = true;
+ ReportSetupProblem(dev_data, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, HandleToUint64(GetDevice()),
+ "Unrecognized command buffer");
+ gpu_state->aborted = true;
return;
}
- // Allocate memory for the output block that the gpu will use to return any error information
- GpuDeviceMemoryBlock output_block = {};
- VkBufferCreateInfo bufferInfo = {VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO};
- bufferInfo.size = gpu_validation_state->output_buffer_size;
- bufferInfo.usage = VK_BUFFER_USAGE_STORAGE_BUFFER_BIT;
- VmaAllocationCreateInfo allocInfo = {};
- allocInfo.usage = VMA_MEMORY_USAGE_GPU_TO_CPU;
- result = vmaCreateBuffer(gpu_validation_state->vmaAllocator, &bufferInfo, &allocInfo, &output_block.buffer,
- &output_block.allocation, nullptr);
+ GpuDeviceMemoryBlock block = {};
+ result = gpu_state->memory_manager->GetBlock(&block);
if (result != VK_SUCCESS) {
- ReportSetupProblem(VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, HandleToUint64(device),
+ ReportSetupProblem(dev_data, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, HandleToUint64(GetDevice()),
"Unable to allocate device memory. Device could become unstable.");
- gpu_validation_state->aborted = true;
+ gpu_state->aborted = true;
return;
}
- // Clear the output block to zeros so that only error information from the gpu will be present
- uint32_t *pData;
- result = vmaMapMemory(gpu_validation_state->vmaAllocator, output_block.allocation, (void **)&pData);
- if (result == VK_SUCCESS) {
- memset(pData, 0, gpu_validation_state->output_buffer_size);
- vmaUnmapMemory(gpu_validation_state->vmaAllocator, output_block.allocation);
- }
-
- GpuDeviceMemoryBlock input_block = {};
- VkWriteDescriptorSet desc_writes[2] = {};
- uint32_t desc_count = 1;
- auto const &state = cb_node->lastBound[bind_point];
- uint32_t number_of_sets = (uint32_t)state.per_set.size();
-
- // Figure out how much memory we need for the input block based on how many sets and bindings there are
- // and how big each of the bindings is
- if (number_of_sets > 0 && device_extensions.vk_ext_descriptor_indexing) {
- uint32_t descriptor_count = 0; // Number of descriptors, including all array elements
- uint32_t binding_count = 0; // Number of bindings based on the max binding number used
- for (auto s : state.per_set) {
- auto desc = s.bound_descriptor_set;
- auto bindings = desc->GetLayout()->GetSortedBindingSet();
- if (bindings.size() > 0) {
- binding_count += desc->GetLayout()->GetMaxBinding() + 1;
- for (auto binding : bindings) {
- // Shader instrumentation is tracking inline uniform blocks as scalers. Don't try to validate inline uniform
- // blocks
- if (VK_DESCRIPTOR_TYPE_INLINE_UNIFORM_BLOCK_EXT == desc->GetLayout()->GetTypeFromBinding(binding)) {
- descriptor_count++;
- log_msg(report_data, VK_DEBUG_REPORT_WARNING_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_SET_EXT,
- VK_NULL_HANDLE, "UNASSIGNED-GPU-Assisted Validation Warning",
- "VK_DESCRIPTOR_TYPE_INLINE_UNIFORM_BLOCK_EXT descriptors will not be validated by GPU assisted "
- "validation");
- } else if (binding == desc->GetLayout()->GetMaxBinding() && desc->IsVariableDescriptorCount(binding)) {
- descriptor_count += desc->GetVariableDescriptorCount();
- } else {
- descriptor_count += desc->GetDescriptorCountFromBinding(binding);
- }
- }
- }
- }
-
- // Note that the size of the input buffer is dependent on the maximum binding number, which
- // can be very large. This is because for (set = s, binding = b, index = i), the validation
- // code is going to dereference Input[ i + Input[ b + Input[ s + Input[ Input[0] ] ] ] ] to
- // see if descriptors have been written. In gpu_validation.md, we note this and advise
- // using densely packed bindings as a best practice when using gpu-av with descriptor indexing
- uint32_t words_needed = 1 + (number_of_sets * 2) + (binding_count * 2) + descriptor_count;
- allocInfo.usage = VMA_MEMORY_USAGE_CPU_TO_GPU;
- bufferInfo.size = words_needed * 4;
- result = vmaCreateBuffer(gpu_validation_state->vmaAllocator, &bufferInfo, &allocInfo, &input_block.buffer,
- &input_block.allocation, nullptr);
- if (result != VK_SUCCESS) {
- ReportSetupProblem(VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, HandleToUint64(device),
- "Unable to allocate device memory. Device could become unstable.");
- gpu_validation_state->aborted = true;
- return;
- }
-
- // Populate input buffer first with the sizes of every descriptor in every set, then with whether
- // each element of each descriptor has been written or not. See gpu_validation.md for a more thourough
- // outline of the input buffer format
- result = vmaMapMemory(gpu_validation_state->vmaAllocator, input_block.allocation, (void **)&pData);
- memset(pData, 0, static_cast<size_t>(bufferInfo.size));
- // Pointer to a sets array that points into the sizes array
- uint32_t *sets_to_sizes = pData + 1;
- // Pointer to the sizes array that contains the array size of the descriptor at each binding
- uint32_t *sizes = sets_to_sizes + number_of_sets;
- // Pointer to another sets array that points into the bindings array that points into the written array
- uint32_t *sets_to_bindings = sizes + binding_count;
- // Pointer to the bindings array that points at the start of the writes in the writes array for each binding
- uint32_t *bindings_to_written = sets_to_bindings + number_of_sets;
- // Index of the next entry in the written array to be updated
- uint32_t written_index = 1 + (number_of_sets * 2) + (binding_count * 2);
- uint32_t bindCounter = number_of_sets + 1;
- // Index of the start of the sets_to_bindings array
- pData[0] = number_of_sets + binding_count + 1;
-
- for (auto s : state.per_set) {
- auto desc = s.bound_descriptor_set;
- auto layout = desc->GetLayout();
- auto bindings = layout->GetSortedBindingSet();
- if (bindings.size() > 0) {
- // For each set, fill in index of its bindings sizes in the sizes array
- *sets_to_sizes++ = bindCounter;
- // For each set, fill in the index of its bindings in the bindings_to_written array
- *sets_to_bindings++ = bindCounter + number_of_sets + binding_count;
- for (auto binding : bindings) {
- // For each binding, fill in its size in the sizes array
- // Shader instrumentation is tracking inline uniform blocks as scalers. Don't try to validate inline uniform
- // blocks
- if (VK_DESCRIPTOR_TYPE_INLINE_UNIFORM_BLOCK_EXT == desc->GetLayout()->GetTypeFromBinding(binding)) {
- sizes[binding] = 1;
- } else if (binding == layout->GetMaxBinding() && desc->IsVariableDescriptorCount(binding)) {
- sizes[binding] = desc->GetVariableDescriptorCount();
- } else {
- sizes[binding] = desc->GetDescriptorCountFromBinding(binding);
- }
- // Fill in the starting index for this binding in the written array in the bindings_to_written array
- bindings_to_written[binding] = written_index;
-
- // Shader instrumentation is tracking inline uniform blocks as scalers. Don't try to validate inline uniform
- // blocks
- if (VK_DESCRIPTOR_TYPE_INLINE_UNIFORM_BLOCK_EXT == desc->GetLayout()->GetTypeFromBinding(binding)) {
- pData[written_index++] = 1;
- continue;
- }
-
- auto index_range = desc->GetGlobalIndexRangeFromBinding(binding, true);
- // For each array element in the binding, update the written array with whether it has been written
- for (uint32_t i = index_range.start; i < index_range.end; ++i) {
- auto *descriptor = desc->GetDescriptorFromGlobalIndex(i);
- if (descriptor->updated) {
- pData[written_index] = 1;
- } else if (desc->IsUpdateAfterBind(binding)) {
- // If it hasn't been written now and it's update after bind, put it in a list to check at QueueSubmit
- input_block.update_at_submit[written_index] = descriptor;
- }
- written_index++;
- }
- }
- auto last = desc->GetLayout()->GetMaxBinding();
- bindings_to_written += last + 1;
- bindCounter += last + 1;
- sizes += last + 1;
- } else {
- *sets_to_sizes++ = 0;
- *sets_to_bindings++ = 0;
- }
- }
- vmaUnmapMemory(gpu_validation_state->vmaAllocator, input_block.allocation);
-
- VkDescriptorBufferInfo input_desc_buffer_info = {};
- input_desc_buffer_info.range = (words_needed * 4);
- input_desc_buffer_info.buffer = input_block.buffer;
- input_desc_buffer_info.offset = 0;
-
- desc_writes[1].sType = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET;
- desc_writes[1].dstBinding = 1;
- desc_writes[1].descriptorCount = 1;
- desc_writes[1].descriptorType = VK_DESCRIPTOR_TYPE_STORAGE_BUFFER;
- desc_writes[1].pBufferInfo = &input_desc_buffer_info;
- desc_writes[1].dstSet = desc_sets[0];
-
- desc_count = 2;
- }
+ // Record buffer and memory info in CB state tracking
+ cb_node->gpu_buffer_list.emplace_back(block, desc_sets[0], desc_pool);
// Write the descriptor
- output_desc_buffer_info.buffer = output_block.buffer;
- output_desc_buffer_info.offset = 0;
-
- desc_writes[0].sType = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET;
- desc_writes[0].descriptorCount = 1;
- desc_writes[0].descriptorType = VK_DESCRIPTOR_TYPE_STORAGE_BUFFER;
- desc_writes[0].pBufferInfo = &output_desc_buffer_info;
- desc_writes[0].dstSet = desc_sets[0];
- DispatchUpdateDescriptorSets(device, desc_count, desc_writes, 0, NULL);
-
- auto iter = cb_node->lastBound.find(bind_point); // find() allows read-only access to cb_state
+ desc_buffer_info.buffer = block.buffer;
+ desc_buffer_info.offset = block.offset;
+
+ VkWriteDescriptorSet desc_write = {};
+ desc_write.sType = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET;
+ desc_write.descriptorCount = 1;
+ desc_write.descriptorType = VK_DESCRIPTOR_TYPE_STORAGE_BUFFER;
+ desc_write.pBufferInfo = &desc_buffer_info;
+ desc_write.dstSet = desc_sets[0];
+ GetDispatchTable()->UpdateDescriptorSets(GetDevice(), 1, &desc_write, 0, NULL);
+
+ auto iter = cb_node->lastBound.find(VK_PIPELINE_BIND_POINT_GRAPHICS); // find() allows read-only access to cb_state
if (iter != cb_node->lastBound.end()) {
auto pipeline_state = iter->second.pipeline_state;
- if (pipeline_state && (pipeline_state->pipeline_layout.set_layouts.size() <= gpu_validation_state->desc_set_bind_index)) {
- DispatchCmdBindDescriptorSets(cmd_buffer, bind_point, pipeline_state->pipeline_layout.layout,
- gpu_validation_state->desc_set_bind_index, 1, desc_sets.data(), 0, nullptr);
+ if (pipeline_state && (pipeline_state->pipeline_layout.set_layouts.size() <= gpu_state->desc_set_bind_index)) {
+ GetDispatchTable()->CmdBindDescriptorSets(cmd_buffer, VK_PIPELINE_BIND_POINT_GRAPHICS,
+ pipeline_state->pipeline_layout.layout, gpu_state->desc_set_bind_index, 1,
+ desc_sets.data(), 0, nullptr);
}
- // Record buffer and memory info in CB state tracking
- gpu_validation_state->GetGpuBufferInfo(cmd_buffer)
- .emplace_back(output_block, input_block, desc_sets[0], desc_pool, bind_point);
} else {
- ReportSetupProblem(VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, HandleToUint64(device), "Unable to find pipeline state");
- vmaDestroyBuffer(gpu_validation_state->vmaAllocator, input_block.buffer, input_block.allocation);
- vmaDestroyBuffer(gpu_validation_state->vmaAllocator, output_block.buffer, output_block.allocation);
- gpu_validation_state->aborted = true;
+ ReportSetupProblem(dev_data, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, HandleToUint64(GetDevice()),
+ "Unable to find pipeline state");
+ gpu_state->aborted = true;
return;
}
}
diff --git a/layers/gpu_validation.h b/layers/gpu_validation.h
index d3dd1ab41..ea17c3af9 100644
--- a/layers/gpu_validation.h
+++ b/layers/gpu_validation.h
@@ -17,46 +17,66 @@
*
*/
-#include "vk_mem_alloc.h"
-
#ifndef VULKAN_GPU_VALIDATION_H
#define VULKAN_GPU_VALIDATION_H
-struct GpuDeviceMemoryBlock {
- VkBuffer buffer;
- VmaAllocation allocation;
- std::unordered_map<uint32_t, const cvdescriptorset::Descriptor *> update_at_submit;
-};
+// Class to encapsulate Vulkan Device Memory allocations.
+// It allocates device memory in large chunks for efficiency and to avoid
+// hitting the device limit of the number of allocations.
+// This manager handles only fixed-sized blocks of "data_size" bytes.
+// The interface allows the caller to "get" and "put back" blocks.
+// The manager allocates and frees chunks as needed.
-struct GpuBufferInfo {
- GpuDeviceMemoryBlock output_mem_block;
- GpuDeviceMemoryBlock input_mem_block;
- VkDescriptorSet desc_set;
- VkDescriptorPool desc_pool;
- VkPipelineBindPoint pipeline_bind_point;
- GpuBufferInfo(GpuDeviceMemoryBlock output_mem_block, GpuDeviceMemoryBlock input_mem_block, VkDescriptorSet desc_set,
- VkDescriptorPool desc_pool, VkPipelineBindPoint pipeline_bind_point)
- : output_mem_block(output_mem_block),
- input_mem_block(input_mem_block),
- desc_set(desc_set),
- desc_pool(desc_pool),
- pipeline_bind_point(pipeline_bind_point){};
-};
+class CoreChecks;
+typedef CoreChecks layer_data;
+
+class GpuDeviceMemoryManager {
+ public:
+ GpuDeviceMemoryManager(layer_data *dev_data, uint32_t data_size);
+ ~GpuDeviceMemoryManager();
+
+ uint32_t GetBlockSize() { return block_size_; }
-struct GpuQueueBarrierCommandInfo {
- VkCommandPool barrier_command_pool = VK_NULL_HANDLE;
- VkCommandBuffer barrier_command_buffer = VK_NULL_HANDLE;
+ VkResult GetBlock(GpuDeviceMemoryBlock *block);
+ void PutBackBlock(VkBuffer buffer, VkDeviceMemory memory, uint32_t offset);
+ void PutBackBlock(GpuDeviceMemoryBlock &block);
+ void FreeAllBlocks();
+
+ private:
+ // Define allocation granularity of Vulkan resources.
+ // Things like device memory and descriptors are allocated in "chunks".
+ // This number should be chosen to try to avoid too many chunk allocations
+ // and chunk allocations that are too large.
+ static const uint32_t kItemsPerChunk = 512;
+
+ struct MemoryChunk {
+ VkBuffer buffer;
+ VkDeviceMemory memory;
+ std::vector<uint32_t> available_offsets;
+ };
+
+ layer_data *dev_data_;
+ uint32_t record_size_;
+ uint32_t block_size_;
+ uint32_t blocks_per_chunk_;
+ uint32_t chunk_size_;
+ std::list<MemoryChunk> chunk_list_;
+
+ bool MemoryTypeFromProperties(uint32_t typeBits, VkFlags requirements_mask, uint32_t *typeIndex);
+ VkResult AllocMemoryChunk(MemoryChunk &chunk);
+ void FreeMemoryChunk(MemoryChunk &chunk);
};
// Class to encapsulate Descriptor Set allocation. This manager creates and destroys Descriptor Pools
// as needed to satisfy requests for descriptor sets.
class GpuDescriptorSetManager {
public:
- GpuDescriptorSetManager(CoreChecks *dev_data);
+ GpuDescriptorSetManager(layer_data *dev_data);
~GpuDescriptorSetManager();
VkResult GetDescriptorSets(uint32_t count, VkDescriptorPool *pool, std::vector<VkDescriptorSet> *desc_sets);
void PutBackDescriptorSet(VkDescriptorPool desc_pool, VkDescriptorSet desc_set);
+ void DestroyDescriptorPools();
private:
static const uint32_t kItemsPerChunk = 512;
@@ -65,43 +85,10 @@ class GpuDescriptorSetManager {
uint32_t used;
};
- CoreChecks *dev_data_;
+ layer_data *dev_data_;
std::unordered_map<VkDescriptorPool, struct PoolTracker> desc_pool_map_;
};
-struct GpuValidationState {
- bool aborted;
- bool reserve_binding_slot;
- VkDescriptorSetLayout debug_desc_layout;
- VkDescriptorSetLayout dummy_desc_layout;
- uint32_t adjusted_max_desc_sets;
- uint32_t desc_set_bind_index;
- uint32_t unique_shader_module_id;
- std::unordered_map<uint32_t, ShaderTracker> shader_map;
- std::unique_ptr<GpuDescriptorSetManager> desc_set_manager;
- std::map<VkQueue, GpuQueueBarrierCommandInfo> queue_barrier_command_infos;
- std::unordered_map<VkCommandBuffer, std::vector<GpuBufferInfo>> command_buffer_map; // gpu_buffer_list;
- uint32_t output_buffer_size;
- VmaAllocator vmaAllocator;
- PFN_vkSetDeviceLoaderData vkSetDeviceLoaderData;
- GpuValidationState(bool aborted = false, bool reserve_binding_slot = false, uint32_t unique_shader_module_id = 0,
- VmaAllocator vmaAllocator = {})
- : aborted(aborted),
- reserve_binding_slot(reserve_binding_slot),
- unique_shader_module_id(unique_shader_module_id),
- vmaAllocator(vmaAllocator){};
-
- std::vector<GpuBufferInfo> &GetGpuBufferInfo(const VkCommandBuffer command_buffer) {
- auto buffer_list = command_buffer_map.find(command_buffer);
- if (buffer_list == command_buffer_map.end()) {
- std::vector<GpuBufferInfo> new_list{};
- command_buffer_map[command_buffer] = new_list;
- return command_buffer_map[command_buffer];
- }
- return buffer_list->second;
- }
-};
-
using mutex_t = std::mutex;
using lock_guard_t = std::lock_guard<mutex_t>;
using unique_lock_t = std::unique_lock<mutex_t>;
diff --git a/layers/json/VkLayer_khronos_validation.json.in b/layers/json/VkLayer_khronos_validation.json.in
deleted file mode 100644
index 964fa799f..000000000
--- a/layers/json/VkLayer_khronos_validation.json.in
+++ /dev/null
@@ -1,38 +0,0 @@
-{
- "file_format_version" : "1.1.0",
- "layer" : {
- "name": "VK_LAYER_KHRONOS_validation",
- "type": "GLOBAL",
- "library_path": "@RELATIVE_LAYER_BINARY@",
- "api_version": "@VK_VERSION@",
- "implementation_version": "1",
- "description": "LunarG Validation Layer",
- "instance_extensions": [
- {
- "name": "VK_EXT_debug_report",
- "spec_version": "6"
- }
- ],
- "device_extensions": [
- {
- "name": "VK_EXT_debug_marker",
- "spec_version": "4",
- "entrypoints": ["vkDebugMarkerSetObjectTagEXT",
- "vkDebugMarkerSetObjectNameEXT",
- "vkCmdDebugMarkerBeginEXT",
- "vkCmdDebugMarkerEndEXT",
- "vkCmdDebugMarkerInsertEXT"
- ]
- },
- {
- "name": "VK_EXT_validation_cache",
- "spec_version": "1",
- "entrypoints": ["vkCreateValidationCacheEXT",
- "vkDestroyValidationCacheEXT",
- "vkGetValidationCacheDataEXT",
- "vkMergeValidationCachesEXT"
- ]
- }
- ]
- }
-}
diff --git a/layers/json/VkLayer_standard_validation.json.in b/layers/json/VkLayer_standard_validation.json.in
index dc004589d..4965dd8fc 100644
--- a/layers/json/VkLayer_standard_validation.json.in
+++ b/layers/json/VkLayer_standard_validation.json.in
@@ -7,7 +7,11 @@
"implementation_version": "1",
"description": "LunarG Standard Validation",
"component_layers": [
- "VK_LAYER_KHRONOS_validation"
+ "VK_LAYER_GOOGLE_threading",
+ "VK_LAYER_LUNARG_parameter_validation",
+ "VK_LAYER_LUNARG_object_tracker",
+ "VK_LAYER_LUNARG_core_validation",
+ "VK_LAYER_GOOGLE_unique_objects"
]
}
}
diff --git a/layers/libVkLayer_core_validation.map b/layers/libVkLayer_core_validation.map
deleted file mode 100644
index ad8ad49f9..000000000
--- a/layers/libVkLayer_core_validation.map
+++ /dev/null
@@ -1,10 +0,0 @@
-{
- global:
- vkGetInstanceProcAddr;
- vkGetDeviceProcAddr;
- vkEnumerateInstanceLayerProperties;
- vkEnumerateInstanceExtensionProperties;
- vkNegotiateLoaderLayerInterfaceVersion;
- local:
- *;
-};
diff --git a/layers/libVkLayer_khronos_validation.map b/layers/libVkLayer_khronos_validation.map
deleted file mode 100644
index ad8ad49f9..000000000
--- a/layers/libVkLayer_khronos_validation.map
+++ /dev/null
@@ -1,10 +0,0 @@
-{
- global:
- vkGetInstanceProcAddr;
- vkGetDeviceProcAddr;
- vkEnumerateInstanceLayerProperties;
- vkEnumerateInstanceExtensionProperties;
- vkNegotiateLoaderLayerInterfaceVersion;
- local:
- *;
-};
diff --git a/layers/libVkLayer_object_lifetimes.map b/layers/libVkLayer_object_lifetimes.map
deleted file mode 100644
index ad8ad49f9..000000000
--- a/layers/libVkLayer_object_lifetimes.map
+++ /dev/null
@@ -1,10 +0,0 @@
-{
- global:
- vkGetInstanceProcAddr;
- vkGetDeviceProcAddr;
- vkEnumerateInstanceLayerProperties;
- vkEnumerateInstanceExtensionProperties;
- vkNegotiateLoaderLayerInterfaceVersion;
- local:
- *;
-};
diff --git a/layers/libVkLayer_stateless_validation.map b/layers/libVkLayer_stateless_validation.map
deleted file mode 100644
index ad8ad49f9..000000000
--- a/layers/libVkLayer_stateless_validation.map
+++ /dev/null
@@ -1,10 +0,0 @@
-{
- global:
- vkGetInstanceProcAddr;
- vkGetDeviceProcAddr;
- vkEnumerateInstanceLayerProperties;
- vkEnumerateInstanceExtensionProperties;
- vkNegotiateLoaderLayerInterfaceVersion;
- local:
- *;
-};
diff --git a/layers/libVkLayer_thread_safety.map b/layers/libVkLayer_thread_safety.map
deleted file mode 100644
index ad8ad49f9..000000000
--- a/layers/libVkLayer_thread_safety.map
+++ /dev/null
@@ -1,10 +0,0 @@
-{
- global:
- vkGetInstanceProcAddr;
- vkGetDeviceProcAddr;
- vkEnumerateInstanceLayerProperties;
- vkEnumerateInstanceExtensionProperties;
- vkNegotiateLoaderLayerInterfaceVersion;
- local:
- *;
-};
diff --git a/layers/libVkLayer_unique_objects.map b/layers/libVkLayer_unique_objects.map
deleted file mode 100644
index ad8ad49f9..000000000
--- a/layers/libVkLayer_unique_objects.map
+++ /dev/null
@@ -1,10 +0,0 @@
-{
- global:
- vkGetInstanceProcAddr;
- vkGetDeviceProcAddr;
- vkEnumerateInstanceLayerProperties;
- vkEnumerateInstanceExtensionProperties;
- vkNegotiateLoaderLayerInterfaceVersion;
- local:
- *;
-};
diff --git a/layers/object_lifetime_validation.h b/layers/object_lifetime_validation.h
index bd89dc889..cdf5ee0f3 100644
--- a/layers/object_lifetime_validation.h
+++ b/layers/object_lifetime_validation.h
@@ -20,18 +20,6 @@
* Author: Tobin Ehlis <tobine@google.com>
*/
-// shared_mutex support added in MSVC 2015 update 2
-#if defined(_MSC_FULL_VER) && _MSC_FULL_VER >= 190023918 && NTDDI_VERSION > NTDDI_WIN10_RS2
-#include <shared_mutex>
-typedef std::shared_mutex object_lifetime_mutex_t;
-typedef std::shared_lock<object_lifetime_mutex_t> read_object_lifetime_mutex_t;
-typedef std::unique_lock<object_lifetime_mutex_t> write_object_lifetime_mutex_t;
-#else
-typedef std::mutex object_lifetime_mutex_t;
-typedef std::unique_lock<object_lifetime_mutex_t> read_object_lifetime_mutex_t;
-typedef std::unique_lock<object_lifetime_mutex_t> write_object_lifetime_mutex_t;
-#endif
-
// Suppress unused warning on Linux
#if defined(__GNUC__)
#define DECORATE_UNUSED __attribute__((unused))
@@ -54,8 +42,14 @@ extern uint64_t object_track_index;
typedef VkFlags ObjectStatusFlags;
enum ObjectStatusFlagBits {
OBJSTATUS_NONE = 0x00000000, // No status is set
- OBJSTATUS_COMMAND_BUFFER_SECONDARY = 0x00000001, // Command Buffer is of type SECONDARY
- OBJSTATUS_CUSTOM_ALLOCATOR = 0x00000002, // Allocated with custom allocator
+ OBJSTATUS_FENCE_IS_SUBMITTED = 0x00000001, // Fence has been submitted
+ OBJSTATUS_VIEWPORT_BOUND = 0x00000002, // Viewport state object has been bound
+ OBJSTATUS_RASTER_BOUND = 0x00000004, // Viewport state object has been bound
+ OBJSTATUS_COLOR_BLEND_BOUND = 0x00000008, // Viewport state object has been bound
+ OBJSTATUS_DEPTH_STENCIL_BOUND = 0x00000010, // Viewport state object has been bound
+ OBJSTATUS_GPU_MEM_MAPPED = 0x00000020, // Memory object is currently mapped
+ OBJSTATUS_COMMAND_BUFFER_SECONDARY = 0x00000040, // Command Buffer is of type SECONDARY
+ OBJSTATUS_CUSTOM_ALLOCATOR = 0x00000080, // Allocated with custom allocator
};
// Object and state information structure
@@ -67,57 +61,42 @@ struct ObjTrackState {
std::unique_ptr<std::unordered_set<uint64_t> > child_objects; // Child objects (used for VkDescriptorPool only)
};
-typedef vl_concurrent_unordered_map<uint64_t, std::shared_ptr<ObjTrackState>, 6> object_map_type;
+// Track Queue information
+struct ObjTrackQueueInfo {
+ uint32_t queue_node_index;
+ VkQueue queue;
+};
+
+typedef std::unordered_map<uint64_t, ObjTrackState *> object_map_type;
class ObjectLifetimes : public ValidationObject {
public:
- // Override chassis read/write locks for this validation object
- // This override takes a deferred lock. i.e. it is not acquired.
- // This class does its own locking with a shared mutex.
- virtual std::unique_lock<std::mutex> write_lock() {
- return std::unique_lock<std::mutex>(validation_object_mutex, std::defer_lock);
- }
-
- object_lifetime_mutex_t object_lifetime_mutex;
- write_object_lifetime_mutex_t write_shared_lock() { return write_object_lifetime_mutex_t(object_lifetime_mutex); }
- read_object_lifetime_mutex_t read_shared_lock() { return read_object_lifetime_mutex_t(object_lifetime_mutex); }
-
- std::atomic<uint64_t> num_objects[kVulkanObjectTypeMax + 1];
- std::atomic<uint64_t> num_total_objects;
+ uint64_t num_objects[kVulkanObjectTypeMax + 1];
+ uint64_t num_total_objects;
// Vector of unordered_maps per object type to hold ObjTrackState info
- object_map_type object_map[kVulkanObjectTypeMax + 1];
+ std::vector<object_map_type> object_map;
// Special-case map for swapchain images
- object_map_type swapchainImageMap;
+ std::unordered_map<uint64_t, ObjTrackState *> swapchainImageMap;
+ // Map of queue information structures, one per queue
+ std::unordered_map<VkQueue, ObjTrackQueueInfo *> queue_info_map;
+
+ std::vector<VkQueueFamilyProperties> queue_family_properties;
// Constructor for object lifetime tracking
- ObjectLifetimes() : num_objects{}, num_total_objects(0) {}
-
- void InsertObject(object_map_type &map, uint64_t object_handle, VulkanObjectType object_type,
- std::shared_ptr<ObjTrackState> pNode) {
- bool inserted = map.insert(object_handle, pNode);
- if (!inserted) {
- // The object should not already exist. If we couldn't add it to the map, there was probably
- // a race condition in the app. Report an error and move on.
- VkDebugReportObjectTypeEXT debug_object_type = get_debug_report_enum[object_type];
- log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, debug_object_type, object_handle, kVUID_ObjectTracker_Info,
- "Couldn't insert %s Object 0x%" PRIxLEAST64
- ", already existed. This should not happen and may indicate a "
- "race condition in the application.",
- object_string[object_type], object_handle);
- }
- }
+ ObjectLifetimes() : num_objects{}, num_total_objects(0), object_map{} { object_map.resize(kVulkanObjectTypeMax + 1); }
bool DeviceReportUndestroyedObjects(VkDevice device, VulkanObjectType object_type, const std::string &error_code);
void DeviceDestroyUndestroyedObjects(VkDevice device, VulkanObjectType object_type);
void CreateQueue(VkDevice device, VkQueue vkObj);
+ void AddQueueInfo(VkDevice device, uint32_t queue_node_index, VkQueue queue);
+ void ValidateQueueFlags(VkQueue queue, const char *function);
void AllocateCommandBuffer(VkDevice device, const VkCommandPool command_pool, const VkCommandBuffer command_buffer,
VkCommandBufferLevel level);
void AllocateDescriptorSet(VkDevice device, VkDescriptorPool descriptor_pool, VkDescriptorSet descriptor_set);
void CreateSwapchainImageObject(VkDevice dispatchable_object, VkImage swapchain_image, VkSwapchainKHR swapchain);
bool ReportUndestroyedObjects(VkDevice device, const std::string &error_code);
void DestroyUndestroyedObjects(VkDevice device);
- bool ValidateDeviceObject(const VulkanTypedHandle &device_typed, const char *invalid_handle_code,
- const char *wrong_device_code);
+ bool ValidateDeviceObject(uint64_t device_handle, const char *invalid_handle_code, const char *wrong_device_code);
void DestroyQueueDataStructures(VkDevice device);
bool ValidateCommandBuffer(VkDevice device, VkCommandPool command_pool, VkCommandBuffer command_buffer);
bool ValidateDescriptorSet(VkDevice device, VkDescriptorPool descriptor_pool, VkDescriptorSet descriptor_set);
@@ -143,13 +122,13 @@ class ObjectLifetimes : public ValidationObject {
auto object_handle = HandleToUint64(object);
if (object_type == kVulkanObjectTypeDevice) {
- return ValidateDeviceObject(VulkanTypedHandle(object, object_type), invalid_handle_code, wrong_device_code);
+ return ValidateDeviceObject(object_handle, invalid_handle_code, wrong_device_code);
}
VkDebugReportObjectTypeEXT debug_object_type = get_debug_report_enum[object_type];
// Look for object in object map
- if (!object_map[object_type].contains(object_handle)) {
+ if (object_map[object_type].find(object_handle) == object_map[object_type].end()) {
// If object is an image, also look for it in the swapchain image map
if ((object_type != kVulkanObjectTypeImage) || (swapchainImageMap.find(object_handle) == swapchainImageMap.end())) {
// Object not found, look for it in other device object maps
@@ -190,13 +169,18 @@ class ObjectLifetimes : public ValidationObject {
void CreateObject(T1 dispatchable_object, T2 object, VulkanObjectType object_type, const VkAllocationCallbacks *pAllocator) {
uint64_t object_handle = HandleToUint64(object);
bool custom_allocator = (pAllocator != nullptr);
- if (!object_map[object_type].contains(object_handle)) {
- auto pNewObjNode = std::make_shared<ObjTrackState>();
+ if (!object_map[object_type].count(object_handle)) {
+ VkDebugReportObjectTypeEXT debug_object_type = get_debug_report_enum[object_type];
+ log_msg(report_data, VK_DEBUG_REPORT_INFORMATION_BIT_EXT, debug_object_type, object_handle, kVUID_ObjectTracker_Info,
+ "OBJ[0x%" PRIxLEAST64 "] : CREATE %s object 0x%" PRIxLEAST64, object_track_index++, object_string[object_type],
+ object_handle);
+
+ ObjTrackState *pNewObjNode = new ObjTrackState;
pNewObjNode->object_type = object_type;
pNewObjNode->status = custom_allocator ? OBJSTATUS_CUSTOM_ALLOCATOR : OBJSTATUS_NONE;
pNewObjNode->handle = object_handle;
- InsertObject(object_map[object_type], object_handle, object_type, pNewObjNode);
+ object_map[object_type][object_handle] = pNewObjNode;
num_objects[object_type]++;
num_total_objects++;
@@ -211,31 +195,27 @@ class ObjectLifetimes : public ValidationObject {
auto object_handle = HandleToUint64(object);
assert(object_handle != VK_NULL_HANDLE);
- auto item = object_map[object_type].pop(object_handle);
- if (item == object_map[object_type].end()) {
- // We've already checked that the object exists. If we couldn't find and atomically remove it
- // from the map, there must have been a race condition in the app. Report an error and move on.
- VkDebugReportObjectTypeEXT debug_object_type = get_debug_report_enum[object_type];
- log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, debug_object_type, object_handle, kVUID_ObjectTracker_Info,
- "Couldn't destroy %s Object 0x%" PRIxLEAST64
- ", not found. This should not happen and may indicate a "
- "race condition in the application.",
- object_string[object_type], object_handle);
- return;
- }
+ auto item = object_map[object_type].find(object_handle);
+ assert(item != object_map[object_type].end());
+
+ ObjTrackState *pNode = item->second;
assert(num_total_objects > 0);
num_total_objects--;
- assert(num_objects[item->second->object_type] > 0);
+ assert(num_objects[pNode->object_type] > 0);
- num_objects[item->second->object_type]--;
+ num_objects[pNode->object_type]--;
+
+ delete pNode;
+ object_map[object_type].erase(item);
}
template <typename T1, typename T2>
void RecordDestroyObject(T1 dispatchable_object, T2 object, VulkanObjectType object_type) {
auto object_handle = HandleToUint64(object);
if (object_handle != VK_NULL_HANDLE) {
- if (object_map[object_type].contains(object_handle)) {
+ auto item = object_map[object_type].find(object_handle);
+ if (item != object_map[object_type].end()) {
DestroyObjectSilently(object, object_type);
}
}
@@ -250,11 +230,17 @@ class ObjectLifetimes : public ValidationObject {
VkDebugReportObjectTypeEXT debug_object_type = get_debug_report_enum[object_type];
bool skip = false;
- if ((expected_custom_allocator_code != kVUIDUndefined || expected_default_allocator_code != kVUIDUndefined) &&
- object_handle != VK_NULL_HANDLE) {
+ if (object_handle != VK_NULL_HANDLE) {
auto item = object_map[object_type].find(object_handle);
if (item != object_map[object_type].end()) {
- auto allocated_with_custom = (item->second->status & OBJSTATUS_CUSTOM_ALLOCATOR) ? true : false;
+ ObjTrackState *pNode = item->second;
+ skip |= log_msg(report_data, VK_DEBUG_REPORT_INFORMATION_BIT_EXT, debug_object_type, object_handle,
+ kVUID_ObjectTracker_Info,
+ "OBJ_STAT Destroy %s obj 0x%" PRIxLEAST64 " (%" PRIu64 " total objs remain & %" PRIu64 " %s objs).",
+ object_string[object_type], HandleToUint64(object), num_total_objects - 1,
+ num_objects[pNode->object_type] - 1, object_string[object_type]);
+
+ auto allocated_with_custom = (pNode->status & OBJSTATUS_CUSTOM_ALLOCATOR) ? true : false;
if (allocated_with_custom && !custom_allocator && expected_custom_allocator_code != kVUIDUndefined) {
// This check only verifies that custom allocation callbacks were provided to both Create and Destroy calls,
// it cannot verify that these allocation callbacks are compatible with each other.
diff --git a/layers/object_tracker_utils.cpp b/layers/object_tracker_utils.cpp
index 1b0eff903..6d5c9aee2 100644
--- a/layers/object_tracker_utils.cpp
+++ b/layers/object_tracker_utils.cpp
@@ -26,45 +26,83 @@
uint64_t object_track_index = 0;
-VulkanTypedHandle ObjTrackStateTypedHandle(const ObjTrackState &track_state) {
- // TODO: Unify Typed Handle representation (i.e. VulkanTypedHandle everywhere there are handle/type pairs)
- VulkanTypedHandle typed_handle;
- typed_handle.handle = track_state.handle;
- typed_handle.type = track_state.object_type;
- return typed_handle;
+// Add new queue to head of global queue list
+void ObjectLifetimes::AddQueueInfo(VkDevice device, uint32_t queue_node_index, VkQueue queue) {
+ auto queueItem = queue_info_map.find(queue);
+ if (queueItem == queue_info_map.end()) {
+ ObjTrackQueueInfo *p_queue_info = new ObjTrackQueueInfo;
+ if (p_queue_info != NULL) {
+ memset(p_queue_info, 0, sizeof(ObjTrackQueueInfo));
+ p_queue_info->queue = queue;
+ p_queue_info->queue_node_index = queue_node_index;
+ queue_info_map[queue] = p_queue_info;
+ } else {
+ log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_QUEUE_EXT, HandleToUint64(queue),
+ kVUID_ObjectTracker_InternalError,
+ "ERROR: VK_ERROR_OUT_OF_HOST_MEMORY -- could not allocate memory for Queue Information");
+ }
+ }
}
// Destroy memRef lists and free all memory
void ObjectLifetimes::DestroyQueueDataStructures(VkDevice device) {
+ for (auto queue_item : queue_info_map) {
+ delete queue_item.second;
+ }
+ queue_info_map.clear();
+
// Destroy the items in the queue map
- auto snapshot = object_map[kVulkanObjectTypeQueue].snapshot();
- for (const auto &queue : snapshot) {
- uint32_t obj_index = queue.second->object_type;
+ auto queue = object_map[kVulkanObjectTypeQueue].begin();
+ while (queue != object_map[kVulkanObjectTypeQueue].end()) {
+ uint32_t obj_index = queue->second->object_type;
assert(num_total_objects > 0);
num_total_objects--;
assert(num_objects[obj_index] > 0);
num_objects[obj_index]--;
- object_map[kVulkanObjectTypeQueue].erase(queue.first);
+ log_msg(report_data, VK_DEBUG_REPORT_INFORMATION_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_QUEUE_EXT, queue->second->handle,
+ kVUID_ObjectTracker_Info, "OBJ_STAT Destroy Queue obj %s (%" PRIu64 " total objs remain & %" PRIu64 " Queue objs).",
+ report_data->FormatHandle(queue->second->handle).c_str(), num_total_objects, num_objects[obj_index]);
+ delete queue->second;
+ queue = object_map[kVulkanObjectTypeQueue].erase(queue);
+ }
+}
+
+// Check Queue type flags for selected queue operations
+void ObjectLifetimes::ValidateQueueFlags(VkQueue queue, const char *function) {
+ auto queue_item = queue_info_map.find(queue);
+ if (queue_item != queue_info_map.end()) {
+ ObjTrackQueueInfo *pQueueInfo = queue_item->second;
+ if (pQueueInfo != NULL) {
+ if ((queue_family_properties[pQueueInfo->queue_node_index].queueFlags & VK_QUEUE_SPARSE_BINDING_BIT) == 0) {
+ log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_QUEUE_EXT, HandleToUint64(queue),
+ "VUID-vkQueueBindSparse-queuetype",
+ "Attempting %s on a non-memory-management capable queue -- VK_QUEUE_SPARSE_BINDING_BIT not set.", function);
+ }
+ }
}
}
// Look for this device object in any of the instance child devices lists.
// NOTE: This is of dubious value. In most circumstances Vulkan will die a flaming death if a dispatchable object is invalid.
// However, if this layer is loaded first and GetProcAddress is used to make API calls, it will detect bad DOs.
-bool ObjectLifetimes::ValidateDeviceObject(const VulkanTypedHandle &device_typed, const char *invalid_handle_code,
- const char *wrong_device_code) {
+bool ObjectLifetimes::ValidateDeviceObject(uint64_t device_handle, const char *invalid_handle_code, const char *wrong_device_code) {
auto instance_data = GetLayerDataPtr(get_dispatch_key(instance), layer_data_map);
auto instance_object_lifetime_data = GetObjectLifetimeData(instance_data->object_dispatch);
- if (instance_object_lifetime_data->object_map[kVulkanObjectTypeDevice].contains(device_typed.handle)) {
- return false;
+ for (auto object : instance_object_lifetime_data->object_map[kVulkanObjectTypeDevice]) {
+ if (object.second->handle == device_handle) return false;
}
- return log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, device_typed.handle,
- invalid_handle_code, "Invalid %s.", report_data->FormatHandle(device_typed).c_str());
+ return log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, device_handle,
+ invalid_handle_code, "Invalid Device Object %s.", report_data->FormatHandle(device_handle).c_str());
}
void ObjectLifetimes::AllocateCommandBuffer(VkDevice device, const VkCommandPool command_pool, const VkCommandBuffer command_buffer,
VkCommandBufferLevel level) {
- auto pNewObjNode = std::make_shared<ObjTrackState>();
+ log_msg(report_data, VK_DEBUG_REPORT_INFORMATION_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
+ HandleToUint64(command_buffer), kVUID_ObjectTracker_Info, "OBJ[0x%" PRIxLEAST64 "] : CREATE %s object %s.",
+ object_track_index++, "VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT",
+ report_data->FormatHandle(command_buffer).c_str());
+
+ ObjTrackState *pNewObjNode = new ObjTrackState;
pNewObjNode->object_type = kVulkanObjectTypeCommandBuffer;
pNewObjNode->handle = HandleToUint64(command_buffer);
pNewObjNode->parent_object = HandleToUint64(command_pool);
@@ -73,8 +111,7 @@ void ObjectLifetimes::AllocateCommandBuffer(VkDevice device, const VkCommandPool
} else {
pNewObjNode->status = OBJSTATUS_NONE;
}
- InsertObject(object_map[kVulkanObjectTypeCommandBuffer], HandleToUint64(command_buffer), kVulkanObjectTypeCommandBuffer,
- pNewObjNode);
+ object_map[kVulkanObjectTypeCommandBuffer][HandleToUint64(command_buffer)] = pNewObjNode;
num_objects[kVulkanObjectTypeCommandBuffer]++;
num_total_objects++;
}
@@ -82,41 +119,44 @@ void ObjectLifetimes::AllocateCommandBuffer(VkDevice device, const VkCommandPool
bool ObjectLifetimes::ValidateCommandBuffer(VkDevice device, VkCommandPool command_pool, VkCommandBuffer command_buffer) {
bool skip = false;
uint64_t object_handle = HandleToUint64(command_buffer);
- auto iter = object_map[kVulkanObjectTypeCommandBuffer].find(object_handle);
- if (iter != object_map[kVulkanObjectTypeCommandBuffer].end()) {
- auto pNode = iter->second;
+ if (object_map[kVulkanObjectTypeCommandBuffer].find(object_handle) != object_map[kVulkanObjectTypeCommandBuffer].end()) {
+ ObjTrackState *pNode = object_map[kVulkanObjectTypeCommandBuffer][HandleToUint64(command_buffer)];
if (pNode->parent_object != HandleToUint64(command_pool)) {
- // We know that the parent *must* be a command pool
- const auto parent_pool = CastFromUint64<VkCommandPool>(pNode->parent_object);
- skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
- object_handle, "VUID-vkFreeCommandBuffers-pCommandBuffers-parent",
- "FreeCommandBuffers is attempting to free %s belonging to %s from %s).",
- report_data->FormatHandle(command_buffer).c_str(), report_data->FormatHandle(parent_pool).c_str(),
- report_data->FormatHandle(command_pool).c_str());
+ skip |=
+ log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, object_handle,
+ "VUID-vkFreeCommandBuffers-pCommandBuffers-parent",
+ "FreeCommandBuffers is attempting to free Command Buffer %s belonging to Command Pool %s from pool %s).",
+ report_data->FormatHandle(command_buffer).c_str(), report_data->FormatHandle(pNode->parent_object).c_str(),
+ report_data->FormatHandle(command_pool).c_str());
}
} else {
skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, object_handle,
- "VUID-vkFreeCommandBuffers-pCommandBuffers-00048", "Invalid %s.",
- report_data->FormatHandle(command_buffer).c_str());
+ "VUID-vkFreeCommandBuffers-pCommandBuffers-00048", "Invalid %s Object %s.",
+ object_string[kVulkanObjectTypeCommandBuffer], report_data->FormatHandle(object_handle).c_str());
}
return skip;
}
void ObjectLifetimes::AllocateDescriptorSet(VkDevice device, VkDescriptorPool descriptor_pool, VkDescriptorSet descriptor_set) {
- auto pNewObjNode = std::make_shared<ObjTrackState>();
+ log_msg(report_data, VK_DEBUG_REPORT_INFORMATION_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_SET_EXT,
+ HandleToUint64(descriptor_set), kVUID_ObjectTracker_Info, "OBJ[0x%" PRIxLEAST64 "] : CREATE %s object %s.",
+ object_track_index++, "VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_SET_EXT",
+ report_data->FormatHandle(descriptor_set).c_str());
+
+ ObjTrackState *pNewObjNode = new ObjTrackState;
pNewObjNode->object_type = kVulkanObjectTypeDescriptorSet;
pNewObjNode->status = OBJSTATUS_NONE;
pNewObjNode->handle = HandleToUint64(descriptor_set);
pNewObjNode->parent_object = HandleToUint64(descriptor_pool);
- InsertObject(object_map[kVulkanObjectTypeDescriptorSet], HandleToUint64(descriptor_set), kVulkanObjectTypeDescriptorSet,
- pNewObjNode);
+ object_map[kVulkanObjectTypeDescriptorSet][HandleToUint64(descriptor_set)] = pNewObjNode;
num_objects[kVulkanObjectTypeDescriptorSet]++;
num_total_objects++;
auto itr = object_map[kVulkanObjectTypeDescriptorPool].find(HandleToUint64(descriptor_pool));
if (itr != object_map[kVulkanObjectTypeDescriptorPool].end()) {
- itr->second->child_objects->insert(HandleToUint64(descriptor_set));
+ ObjTrackState *pPoolNode = itr->second;
+ pPoolNode->child_objects->insert(HandleToUint64(descriptor_set));
}
}
@@ -125,20 +165,21 @@ bool ObjectLifetimes::ValidateDescriptorSet(VkDevice device, VkDescriptorPool de
uint64_t object_handle = HandleToUint64(descriptor_set);
auto dsItem = object_map[kVulkanObjectTypeDescriptorSet].find(object_handle);
if (dsItem != object_map[kVulkanObjectTypeDescriptorSet].end()) {
- if (dsItem->second->parent_object != HandleToUint64(descriptor_pool)) {
- // We know that the parent *must* be a descriptor pool
- const auto parent_pool = CastFromUint64<VkDescriptorPool>(dsItem->second->parent_object);
- skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_SET_EXT,
- object_handle, "VUID-vkFreeDescriptorSets-pDescriptorSets-parent",
- "FreeDescriptorSets is attempting to free %s"
- " belonging to %s from %s).",
- report_data->FormatHandle(descriptor_set).c_str(), report_data->FormatHandle(parent_pool).c_str(),
- report_data->FormatHandle(descriptor_pool).c_str());
+ ObjTrackState *pNode = dsItem->second;
+
+ if (pNode->parent_object != HandleToUint64(descriptor_pool)) {
+ skip |=
+ log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_SET_EXT, object_handle,
+ "VUID-vkFreeDescriptorSets-pDescriptorSets-parent",
+ "FreeDescriptorSets is attempting to free descriptorSet %s"
+ " belonging to Descriptor Pool %s from pool %s).",
+ report_data->FormatHandle(descriptor_set).c_str(), report_data->FormatHandle(pNode->parent_object).c_str(),
+ report_data->FormatHandle(descriptor_pool).c_str());
}
} else {
skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_SET_EXT, object_handle,
- "VUID-vkFreeDescriptorSets-pDescriptorSets-00310", "Invalid %s.",
- report_data->FormatHandle(descriptor_set).c_str());
+ "VUID-vkFreeDescriptorSets-pDescriptorSets-00310", "Invalid %s Object %s.",
+ object_string[kVulkanObjectTypeDescriptorSet], report_data->FormatHandle(object_handle).c_str());
}
return skip;
}
@@ -201,11 +242,15 @@ bool ObjectLifetimes::PreCallValidateCmdPushDescriptorSetKHR(VkCommandBuffer com
}
void ObjectLifetimes::CreateQueue(VkDevice device, VkQueue vkObj) {
- std::shared_ptr<ObjTrackState> p_obj_node = NULL;
+ log_msg(report_data, VK_DEBUG_REPORT_INFORMATION_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_QUEUE_EXT, HandleToUint64(vkObj),
+ kVUID_ObjectTracker_Info, "OBJ[0x%" PRIxLEAST64 "] : CREATE %s object %s", object_track_index++,
+ "VK_DEBUG_REPORT_OBJECT_TYPE_QUEUE_EXT", report_data->FormatHandle(vkObj).c_str());
+
+ ObjTrackState *p_obj_node = NULL;
auto queue_item = object_map[kVulkanObjectTypeQueue].find(HandleToUint64(vkObj));
if (queue_item == object_map[kVulkanObjectTypeQueue].end()) {
- p_obj_node = std::make_shared<ObjTrackState>();
- InsertObject(object_map[kVulkanObjectTypeQueue], HandleToUint64(vkObj), kVulkanObjectTypeQueue, p_obj_node);
+ p_obj_node = new ObjTrackState;
+ object_map[kVulkanObjectTypeQueue][HandleToUint64(vkObj)] = p_obj_node;
num_objects[kVulkanObjectTypeQueue]++;
num_total_objects++;
} else {
@@ -217,33 +262,35 @@ void ObjectLifetimes::CreateQueue(VkDevice device, VkQueue vkObj) {
}
void ObjectLifetimes::CreateSwapchainImageObject(VkDevice dispatchable_object, VkImage swapchain_image, VkSwapchainKHR swapchain) {
- if (!swapchainImageMap.contains(HandleToUint64(swapchain_image))) {
- auto pNewObjNode = std::make_shared<ObjTrackState>();
- pNewObjNode->object_type = kVulkanObjectTypeImage;
- pNewObjNode->status = OBJSTATUS_NONE;
- pNewObjNode->handle = HandleToUint64(swapchain_image);
- pNewObjNode->parent_object = HandleToUint64(swapchain);
- InsertObject(swapchainImageMap, HandleToUint64(swapchain_image), kVulkanObjectTypeImage, pNewObjNode);
- }
+ log_msg(report_data, VK_DEBUG_REPORT_INFORMATION_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT,
+ HandleToUint64(swapchain_image), kVUID_ObjectTracker_Info, "OBJ[0x%" PRIxLEAST64 "] : CREATE %s object %s.",
+ object_track_index++, "SwapchainImage", report_data->FormatHandle(swapchain_image).c_str());
+
+ ObjTrackState *pNewObjNode = new ObjTrackState;
+ pNewObjNode->object_type = kVulkanObjectTypeImage;
+ pNewObjNode->status = OBJSTATUS_NONE;
+ pNewObjNode->handle = HandleToUint64(swapchain_image);
+ pNewObjNode->parent_object = HandleToUint64(swapchain);
+ swapchainImageMap[HandleToUint64(swapchain_image)] = pNewObjNode;
}
bool ObjectLifetimes::DeviceReportUndestroyedObjects(VkDevice device, VulkanObjectType object_type, const std::string &error_code) {
bool skip = false;
-
- auto snapshot = object_map[object_type].snapshot();
- for (const auto &item : snapshot) {
- const auto object_info = item.second;
- skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, get_debug_report_enum[object_type], object_info->handle,
- error_code, "OBJ ERROR : For %s, %s has not been destroyed.", report_data->FormatHandle(device).c_str(),
- report_data->FormatHandle(ObjTrackStateTypedHandle(*object_info)).c_str());
+ for (const auto &item : object_map[object_type]) {
+ const ObjTrackState *object_info = item.second;
+ skip |=
+ log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, get_debug_report_enum[object_type], object_info->handle, error_code,
+ "OBJ ERROR : For device %s, %s object %s has not been destroyed.", report_data->FormatHandle(device).c_str(),
+ object_string[object_type], report_data->FormatHandle(object_info->handle).c_str());
}
return skip;
}
void ObjectLifetimes::DeviceDestroyUndestroyedObjects(VkDevice device, VulkanObjectType object_type) {
- auto snapshot = object_map[object_type].snapshot();
- for (const auto &item : snapshot) {
- auto object_info = item.second;
+ while (!object_map[object_type].empty()) {
+ auto item = object_map[object_type].begin();
+
+ ObjTrackState *object_info = item->second;
DestroyObjectSilently(object_info->handle, object_type);
}
}
@@ -256,17 +303,15 @@ bool ObjectLifetimes::PreCallValidateDestroyInstance(VkInstance instance, const
kVUIDUndefined);
// Validate that child devices have been destroyed
- auto snapshot = object_map[kVulkanObjectTypeDevice].snapshot();
- for (const auto &iit : snapshot) {
- auto pNode = iit.second;
+ for (const auto &iit : object_map[kVulkanObjectTypeDevice]) {
+ ObjTrackState *pNode = iit.second;
VkDevice device = reinterpret_cast<VkDevice>(pNode->handle);
VkDebugReportObjectTypeEXT debug_object_type = get_debug_report_enum[pNode->object_type];
- skip |=
- log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, debug_object_type, pNode->handle, kVUID_ObjectTracker_ObjectLeak,
- "OBJ ERROR : %s object %s has not been destroyed.", string_VkDebugReportObjectTypeEXT(debug_object_type),
- report_data->FormatHandle(ObjTrackStateTypedHandle(*pNode)).c_str());
+ skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, debug_object_type, pNode->handle,
+ kVUID_ObjectTracker_ObjectLeak, "OBJ ERROR : %s object %s has not been destroyed.",
+ string_VkDebugReportObjectTypeEXT(debug_object_type), report_data->FormatHandle(pNode->handle).c_str());
// Report any remaining objects in LL
skip |= ReportUndestroyedObjects(device, "VUID-vkDestroyInstance-instance-00629");
@@ -300,22 +345,25 @@ void ObjectLifetimes::PostCallRecordEnumeratePhysicalDevices(VkInstance instance
void ObjectLifetimes::PreCallRecordDestroyInstance(VkInstance instance, const VkAllocationCallbacks *pAllocator) {
// Destroy physical devices
- auto snapshot = object_map[kVulkanObjectTypePhysicalDevice].snapshot();
- for (const auto &iit : snapshot) {
- auto pNode = iit.second;
+ for (auto iit = object_map[kVulkanObjectTypePhysicalDevice].begin();
+ iit != object_map[kVulkanObjectTypePhysicalDevice].end();) {
+ ObjTrackState *pNode = iit->second;
VkPhysicalDevice physical_device = reinterpret_cast<VkPhysicalDevice>(pNode->handle);
RecordDestroyObject(instance, physical_device, kVulkanObjectTypePhysicalDevice);
+ iit = object_map[kVulkanObjectTypePhysicalDevice].begin();
}
// Destroy child devices
- auto snapshot2 = object_map[kVulkanObjectTypeDevice].snapshot();
- for (const auto &iit : snapshot2) {
- auto pNode = iit.second;
+ for (auto iit = object_map[kVulkanObjectTypeDevice].begin(); iit != object_map[kVulkanObjectTypeDevice].end();) {
+ ObjTrackState *pNode = iit->second;
VkDevice device = reinterpret_cast<VkDevice>(pNode->handle);
DestroyUndestroyedObjects(device);
RecordDestroyObject(instance, device, kVulkanObjectTypeDevice);
+ iit = object_map[kVulkanObjectTypeDevice].begin();
}
+
+ object_map[kVulkanObjectTypeDevice].clear();
}
void ObjectLifetimes::PostCallRecordDestroyInstance(VkInstance instance, const VkAllocationCallbacks *pAllocator) {
@@ -354,8 +402,8 @@ bool ObjectLifetimes::PreCallValidateGetDeviceQueue(VkDevice device, uint32_t qu
void ObjectLifetimes::PostCallRecordGetDeviceQueue(VkDevice device, uint32_t queueFamilyIndex, uint32_t queueIndex,
VkQueue *pQueue) {
- auto lock = write_shared_lock();
CreateQueue(device, *pQueue);
+ AddQueueInfo(device, queueFamilyIndex, *pQueue);
}
bool ObjectLifetimes::PreCallValidateGetDeviceQueue2(VkDevice device, const VkDeviceQueueInfo2 *pQueueInfo, VkQueue *pQueue) {
@@ -364,8 +412,8 @@ bool ObjectLifetimes::PreCallValidateGetDeviceQueue2(VkDevice device, const VkDe
}
void ObjectLifetimes::PostCallRecordGetDeviceQueue2(VkDevice device, const VkDeviceQueueInfo2 *pQueueInfo, VkQueue *pQueue) {
- auto lock = write_shared_lock();
CreateQueue(device, *pQueue);
+ AddQueueInfo(device, pQueueInfo->queueFamilyIndex, *pQueue);
}
bool ObjectLifetimes::PreCallValidateUpdateDescriptorSets(VkDevice device, uint32_t descriptorWriteCount,
@@ -398,7 +446,6 @@ bool ObjectLifetimes::PreCallValidateUpdateDescriptorSets(VkDevice device, uint3
bool ObjectLifetimes::PreCallValidateResetDescriptorPool(VkDevice device, VkDescriptorPool descriptorPool,
VkDescriptorPoolResetFlags flags) {
bool skip = false;
- auto lock = read_shared_lock();
skip |= ValidateObject(device, device, kVulkanObjectTypeDevice, false, "VUID-vkResetDescriptorPool-device-parameter",
kVUIDUndefined);
@@ -408,7 +455,7 @@ bool ObjectLifetimes::PreCallValidateResetDescriptorPool(VkDevice device, VkDesc
auto itr = object_map[kVulkanObjectTypeDescriptorPool].find(HandleToUint64(descriptorPool));
if (itr != object_map[kVulkanObjectTypeDescriptorPool].end()) {
- auto pPoolNode = itr->second;
+ ObjTrackState *pPoolNode = itr->second;
for (auto set : *pPoolNode->child_objects) {
skip |= ValidateDestroyObject(device, (VkDescriptorSet)set, kVulkanObjectTypeDescriptorSet, nullptr, kVUIDUndefined,
kVUIDUndefined);
@@ -419,12 +466,11 @@ bool ObjectLifetimes::PreCallValidateResetDescriptorPool(VkDevice device, VkDesc
void ObjectLifetimes::PreCallRecordResetDescriptorPool(VkDevice device, VkDescriptorPool descriptorPool,
VkDescriptorPoolResetFlags flags) {
- auto lock = write_shared_lock();
// A DescriptorPool's descriptor sets are implicitly deleted when the pool is reset. Remove this pool's descriptor sets from
// our descriptorSet map.
auto itr = object_map[kVulkanObjectTypeDescriptorPool].find(HandleToUint64(descriptorPool));
if (itr != object_map[kVulkanObjectTypeDescriptorPool].end()) {
- auto pPoolNode = itr->second;
+ ObjTrackState *pPoolNode = itr->second;
for (auto set : *pPoolNode->child_objects) {
RecordDestroyObject(device, (VkDescriptorSet)set, kVulkanObjectTypeDescriptorSet);
}
@@ -438,18 +484,13 @@ bool ObjectLifetimes::PreCallValidateBeginCommandBuffer(VkCommandBuffer command_
skip |= ValidateObject(command_buffer, command_buffer, kVulkanObjectTypeCommandBuffer, false,
"VUID-vkBeginCommandBuffer-commandBuffer-parameter", kVUIDUndefined);
if (begin_info) {
- auto iter = object_map[kVulkanObjectTypeCommandBuffer].find(HandleToUint64(command_buffer));
- if (iter != object_map[kVulkanObjectTypeCommandBuffer].end()) {
- auto pNode = iter->second;
- if ((begin_info->pInheritanceInfo) && (pNode->status & OBJSTATUS_COMMAND_BUFFER_SECONDARY) &&
- (begin_info->flags & VK_COMMAND_BUFFER_USAGE_RENDER_PASS_CONTINUE_BIT)) {
- skip |=
- ValidateObject(command_buffer, begin_info->pInheritanceInfo->framebuffer, kVulkanObjectTypeFramebuffer, true,
+ ObjTrackState *pNode = object_map[kVulkanObjectTypeCommandBuffer][HandleToUint64(command_buffer)];
+ if ((begin_info->pInheritanceInfo) && (pNode->status & OBJSTATUS_COMMAND_BUFFER_SECONDARY) &&
+ (begin_info->flags & VK_COMMAND_BUFFER_USAGE_RENDER_PASS_CONTINUE_BIT)) {
+ skip |= ValidateObject(command_buffer, begin_info->pInheritanceInfo->framebuffer, kVulkanObjectTypeFramebuffer, true,
"VUID-VkCommandBufferBeginInfo-flags-00055", "VUID-VkCommandBufferInheritanceInfo-commonparent");
- skip |=
- ValidateObject(command_buffer, begin_info->pInheritanceInfo->renderPass, kVulkanObjectTypeRenderPass, false,
+ skip |= ValidateObject(command_buffer, begin_info->pInheritanceInfo->renderPass, kVulkanObjectTypeRenderPass, false,
"VUID-VkCommandBufferBeginInfo-flags-00053", "VUID-VkCommandBufferInheritanceInfo-commonparent");
- }
}
}
return skip;
@@ -468,7 +509,6 @@ bool ObjectLifetimes::PreCallValidateGetSwapchainImagesKHR(VkDevice device, VkSw
void ObjectLifetimes::PostCallRecordGetSwapchainImagesKHR(VkDevice device, VkSwapchainKHR swapchain, uint32_t *pSwapchainImageCount,
VkImage *pSwapchainImages, VkResult result) {
if ((result != VK_SUCCESS) && (result != VK_INCOMPLETE)) return;
- auto lock = write_shared_lock();
if (pSwapchainImages != NULL) {
for (uint32_t i = 0; i < *pSwapchainImageCount; i++) {
CreateSwapchainImageObject(device, pSwapchainImages[i], swapchain);
@@ -554,7 +594,16 @@ bool ObjectLifetimes::PreCallValidateGetPhysicalDeviceQueueFamilyProperties(VkPh
void ObjectLifetimes::PostCallRecordGetPhysicalDeviceQueueFamilyProperties(VkPhysicalDevice physicalDevice,
uint32_t *pQueueFamilyPropertyCount,
- VkQueueFamilyProperties *pQueueFamilyProperties) {}
+ VkQueueFamilyProperties *pQueueFamilyProperties) {
+ if (pQueueFamilyProperties != NULL) {
+ if (queue_family_properties.size() < *pQueueFamilyPropertyCount) {
+ queue_family_properties.resize(*pQueueFamilyPropertyCount);
+ }
+ for (uint32_t i = 0; i < *pQueueFamilyPropertyCount; i++) {
+ queue_family_properties[i] = pQueueFamilyProperties[i];
+ }
+ }
+}
void ObjectLifetimes::PostCallRecordCreateInstance(const VkInstanceCreateInfo *pCreateInfo, const VkAllocationCallbacks *pAllocator,
VkInstance *pInstance, VkResult result) {
@@ -583,7 +632,6 @@ void ObjectLifetimes::PostCallRecordAllocateCommandBuffers(VkDevice device, cons
bool ObjectLifetimes::PreCallValidateAllocateDescriptorSets(VkDevice device, const VkDescriptorSetAllocateInfo *pAllocateInfo,
VkDescriptorSet *pDescriptorSets) {
bool skip = false;
- auto lock = read_shared_lock();
skip |= ValidateObject(device, device, kVulkanObjectTypeDevice, false, "VUID-vkAllocateDescriptorSets-device-parameter",
kVUIDUndefined);
skip |= ValidateObject(device, pAllocateInfo->descriptorPool, kVulkanObjectTypeDescriptorPool, false,
@@ -600,7 +648,6 @@ bool ObjectLifetimes::PreCallValidateAllocateDescriptorSets(VkDevice device, con
void ObjectLifetimes::PostCallRecordAllocateDescriptorSets(VkDevice device, const VkDescriptorSetAllocateInfo *pAllocateInfo,
VkDescriptorSet *pDescriptorSets, VkResult result) {
if (result != VK_SUCCESS) return;
- auto lock = write_shared_lock();
for (uint32_t i = 0; i < pAllocateInfo->descriptorSetCount; i++) {
AllocateDescriptorSet(device, pAllocateInfo->descriptorPool, pDescriptorSets[i]);
}
@@ -639,17 +686,21 @@ bool ObjectLifetimes::PreCallValidateDestroySwapchainKHR(VkDevice device, VkSwap
void ObjectLifetimes::PreCallRecordDestroySwapchainKHR(VkDevice device, VkSwapchainKHR swapchain,
const VkAllocationCallbacks *pAllocator) {
RecordDestroyObject(device, swapchain, kVulkanObjectTypeSwapchainKHR);
-
- auto snapshot = swapchainImageMap.snapshot(
- [swapchain](std::shared_ptr<ObjTrackState> pNode) { return pNode->parent_object == HandleToUint64(swapchain); });
- for (const auto &itr : snapshot) {
- swapchainImageMap.erase(itr.first);
+ std::unordered_map<uint64_t, ObjTrackState *>::iterator itr = swapchainImageMap.begin();
+ while (itr != swapchainImageMap.end()) {
+ ObjTrackState *pNode = (*itr).second;
+ if (pNode->parent_object == HandleToUint64(swapchain)) {
+ delete pNode;
+ auto delete_item = itr++;
+ swapchainImageMap.erase(delete_item);
+ } else {
+ ++itr;
+ }
}
}
bool ObjectLifetimes::PreCallValidateFreeDescriptorSets(VkDevice device, VkDescriptorPool descriptorPool,
uint32_t descriptorSetCount, const VkDescriptorSet *pDescriptorSets) {
- auto lock = read_shared_lock();
bool skip = false;
skip |= ValidateObject(device, device, kVulkanObjectTypeDevice, false, "VUID-vkFreeDescriptorSets-device-parameter",
kVUIDUndefined);
@@ -666,8 +717,7 @@ bool ObjectLifetimes::PreCallValidateFreeDescriptorSets(VkDevice device, VkDescr
}
void ObjectLifetimes::PreCallRecordFreeDescriptorSets(VkDevice device, VkDescriptorPool descriptorPool, uint32_t descriptorSetCount,
const VkDescriptorSet *pDescriptorSets) {
- auto lock = write_shared_lock();
- std::shared_ptr<ObjTrackState> pPoolNode = nullptr;
+ ObjTrackState *pPoolNode = nullptr;
auto itr = object_map[kVulkanObjectTypeDescriptorPool].find(HandleToUint64(descriptorPool));
if (itr != object_map[kVulkanObjectTypeDescriptorPool].end()) {
pPoolNode = itr->second;
@@ -682,7 +732,6 @@ void ObjectLifetimes::PreCallRecordFreeDescriptorSets(VkDevice device, VkDescrip
bool ObjectLifetimes::PreCallValidateDestroyDescriptorPool(VkDevice device, VkDescriptorPool descriptorPool,
const VkAllocationCallbacks *pAllocator) {
- auto lock = read_shared_lock();
bool skip = false;
skip |= ValidateObject(device, device, kVulkanObjectTypeDevice, false, "VUID-vkDestroyDescriptorPool-device-parameter",
kVUIDUndefined);
@@ -692,7 +741,7 @@ bool ObjectLifetimes::PreCallValidateDestroyDescriptorPool(VkDevice device, VkDe
auto itr = object_map[kVulkanObjectTypeDescriptorPool].find(HandleToUint64(descriptorPool));
if (itr != object_map[kVulkanObjectTypeDescriptorPool].end()) {
- auto pPoolNode = itr->second;
+ ObjTrackState *pPoolNode = itr->second;
for (auto set : *pPoolNode->child_objects) {
skip |= ValidateDestroyObject(device, (VkDescriptorSet)set, kVulkanObjectTypeDescriptorSet, nullptr, kVUIDUndefined,
kVUIDUndefined);
@@ -705,10 +754,9 @@ bool ObjectLifetimes::PreCallValidateDestroyDescriptorPool(VkDevice device, VkDe
}
void ObjectLifetimes::PreCallRecordDestroyDescriptorPool(VkDevice device, VkDescriptorPool descriptorPool,
const VkAllocationCallbacks *pAllocator) {
- auto lock = write_shared_lock();
auto itr = object_map[kVulkanObjectTypeDescriptorPool].find(HandleToUint64(descriptorPool));
if (itr != object_map[kVulkanObjectTypeDescriptorPool].end()) {
- auto pPoolNode = itr->second;
+ ObjTrackState *pPoolNode = itr->second;
for (auto set : *pPoolNode->child_objects) {
RecordDestroyObject(device, (VkDescriptorSet)set, kVulkanObjectTypeDescriptorSet);
}
@@ -724,14 +772,16 @@ bool ObjectLifetimes::PreCallValidateDestroyCommandPool(VkDevice device, VkComma
kVUIDUndefined);
skip |= ValidateObject(device, commandPool, kVulkanObjectTypeCommandPool, true,
"VUID-vkDestroyCommandPool-commandPool-parameter", "VUID-vkDestroyCommandPool-commandPool-parent");
-
- auto snapshot = object_map[kVulkanObjectTypeCommandBuffer].snapshot(
- [commandPool](std::shared_ptr<ObjTrackState> pNode) { return pNode->parent_object == HandleToUint64(commandPool); });
- for (const auto &itr : snapshot) {
- auto pNode = itr.second;
- skip |= ValidateCommandBuffer(device, commandPool, reinterpret_cast<VkCommandBuffer>(itr.first));
- skip |= ValidateDestroyObject(device, reinterpret_cast<VkCommandBuffer>(itr.first), kVulkanObjectTypeCommandBuffer, nullptr,
- kVUIDUndefined, kVUIDUndefined);
+ auto itr = object_map[kVulkanObjectTypeCommandBuffer].begin();
+ auto del_itr = itr;
+ while (itr != object_map[kVulkanObjectTypeCommandBuffer].end()) {
+ ObjTrackState *pNode = (*itr).second;
+ del_itr = itr++;
+ if (pNode->parent_object == HandleToUint64(commandPool)) {
+ skip |= ValidateCommandBuffer(device, commandPool, reinterpret_cast<VkCommandBuffer>((*del_itr).first));
+ skip |= ValidateDestroyObject(device, reinterpret_cast<VkCommandBuffer>((*del_itr).first),
+ kVulkanObjectTypeCommandBuffer, nullptr, kVUIDUndefined, kVUIDUndefined);
+ }
}
skip |= ValidateDestroyObject(device, commandPool, kVulkanObjectTypeCommandPool, pAllocator,
"VUID-vkDestroyCommandPool-commandPool-00042", "VUID-vkDestroyCommandPool-commandPool-00043");
@@ -740,11 +790,15 @@ bool ObjectLifetimes::PreCallValidateDestroyCommandPool(VkDevice device, VkComma
void ObjectLifetimes::PreCallRecordDestroyCommandPool(VkDevice device, VkCommandPool commandPool,
const VkAllocationCallbacks *pAllocator) {
- auto snapshot = object_map[kVulkanObjectTypeCommandBuffer].snapshot(
- [commandPool](std::shared_ptr<ObjTrackState> pNode) { return pNode->parent_object == HandleToUint64(commandPool); });
+ auto itr = object_map[kVulkanObjectTypeCommandBuffer].begin();
+ auto del_itr = itr;
// A CommandPool's cmd buffers are implicitly deleted when pool is deleted. Remove this pool's cmdBuffers from cmd buffer map.
- for (const auto &itr : snapshot) {
- RecordDestroyObject(device, reinterpret_cast<VkCommandBuffer>(itr.first), kVulkanObjectTypeCommandBuffer);
+ while (itr != object_map[kVulkanObjectTypeCommandBuffer].end()) {
+ ObjTrackState *pNode = (*itr).second;
+ del_itr = itr++;
+ if (pNode->parent_object == HandleToUint64(commandPool)) {
+ RecordDestroyObject(device, reinterpret_cast<VkCommandBuffer>((*del_itr).first), kVulkanObjectTypeCommandBuffer);
+ }
}
RecordDestroyObject(device, commandPool, kVulkanObjectTypeCommandPool);
}
@@ -765,10 +819,28 @@ bool ObjectLifetimes::PreCallValidateGetPhysicalDeviceQueueFamilyProperties2KHR(
void ObjectLifetimes::PostCallRecordGetPhysicalDeviceQueueFamilyProperties2(VkPhysicalDevice physicalDevice,
uint32_t *pQueueFamilyPropertyCount,
- VkQueueFamilyProperties2KHR *pQueueFamilyProperties) {}
+ VkQueueFamilyProperties2KHR *pQueueFamilyProperties) {
+ if (pQueueFamilyProperties != NULL) {
+ if (queue_family_properties.size() < *pQueueFamilyPropertyCount) {
+ queue_family_properties.resize(*pQueueFamilyPropertyCount);
+ }
+ for (uint32_t i = 0; i < *pQueueFamilyPropertyCount; i++) {
+ queue_family_properties[i] = pQueueFamilyProperties[i].queueFamilyProperties;
+ }
+ }
+}
void ObjectLifetimes::PostCallRecordGetPhysicalDeviceQueueFamilyProperties2KHR(
- VkPhysicalDevice physicalDevice, uint32_t *pQueueFamilyPropertyCount, VkQueueFamilyProperties2KHR *pQueueFamilyProperties) {}
+ VkPhysicalDevice physicalDevice, uint32_t *pQueueFamilyPropertyCount, VkQueueFamilyProperties2KHR *pQueueFamilyProperties) {
+ if (pQueueFamilyProperties != NULL) {
+ if (queue_family_properties.size() < *pQueueFamilyPropertyCount) {
+ queue_family_properties.resize(*pQueueFamilyPropertyCount);
+ }
+ for (uint32_t i = 0; i < *pQueueFamilyPropertyCount; i++) {
+ queue_family_properties[i] = pQueueFamilyProperties[i].queueFamilyProperties;
+ }
+ }
+}
bool ObjectLifetimes::PreCallValidateGetPhysicalDeviceDisplayPropertiesKHR(VkPhysicalDevice physicalDevice,
uint32_t *pPropertyCount,
@@ -847,58 +919,3 @@ void ObjectLifetimes::PostCallRecordGetDisplayModeProperties2KHR(VkPhysicalDevic
nullptr);
}
}
-
-bool ObjectLifetimes::PreCallValidateAcquirePerformanceConfigurationINTEL(
- VkDevice device, const VkPerformanceConfigurationAcquireInfoINTEL *pAcquireInfo,
- VkPerformanceConfigurationINTEL *pConfiguration) {
- bool skip = false;
- skip |= ValidateObject(device, device, kVulkanObjectTypeDevice, false,
- "VUID-vkAcquirePerformanceConfigurationINTEL-device-parameter", kVUIDUndefined);
-
- return skip;
-}
-
-bool ObjectLifetimes::PreCallValidateReleasePerformanceConfigurationINTEL(VkDevice device,
- VkPerformanceConfigurationINTEL configuration) {
- bool skip = false;
- skip |= ValidateObject(device, device, kVulkanObjectTypeDevice, false,
- "VUID-vkReleasePerformanceConfigurationINTEL-device-parameter", kVUIDUndefined);
-
- return skip;
-}
-
-bool ObjectLifetimes::PreCallValidateQueueSetPerformanceConfigurationINTEL(VkQueue queue,
- VkPerformanceConfigurationINTEL configuration) {
- bool skip = false;
- skip |=
- ValidateObject(queue, queue, kVulkanObjectTypeQueue, false, "VUID-vkQueueSetPerformanceConfigurationINTEL-queue-parameter",
- "VUID-vkQueueSetPerformanceConfigurationINTEL-commonparent");
-
- return skip;
-}
-
-bool ObjectLifetimes::PreCallValidateCreateFramebuffer(VkDevice device, const VkFramebufferCreateInfo *pCreateInfo,
- const VkAllocationCallbacks *pAllocator, VkFramebuffer *pFramebuffer) {
- bool skip = false;
- skip |=
- ValidateObject(device, device, kVulkanObjectTypeDevice, false, "VUID-vkCreateFramebuffer-device-parameter", kVUIDUndefined);
- if (pCreateInfo) {
- skip |= ValidateObject(device, pCreateInfo->renderPass, kVulkanObjectTypeRenderPass, false,
- "VUID-VkFramebufferCreateInfo-renderPass-parameter", "VUID-VkFramebufferCreateInfo-commonparent");
- if ((pCreateInfo->flags & VK_FRAMEBUFFER_CREATE_IMAGELESS_BIT_KHR) == 0) {
- for (uint32_t index1 = 0; index1 < pCreateInfo->attachmentCount; ++index1) {
- skip |= ValidateObject(device, pCreateInfo->pAttachments[index1], kVulkanObjectTypeImageView, true, kVUIDUndefined,
- "VUID-VkFramebufferCreateInfo-commonparent");
- }
- }
- }
-
- return skip;
-}
-
-void ObjectLifetimes::PostCallRecordCreateFramebuffer(VkDevice device, const VkFramebufferCreateInfo *pCreateInfo,
- const VkAllocationCallbacks *pAllocator, VkFramebuffer *pFramebuffer,
- VkResult result) {
- if (result != VK_SUCCESS) return;
- CreateObject(device, *pFramebuffer, kVulkanObjectTypeFramebuffer, pAllocator);
-}
diff --git a/layers/parameter_validation_utils.cpp b/layers/parameter_validation_utils.cpp
index 102509ca5..5cc5295e1 100644
--- a/layers/parameter_validation_utils.cpp
+++ b/layers/parameter_validation_utils.cpp
@@ -25,7 +25,6 @@
#include "chassis.h"
#include "stateless_validation.h"
-#include "layer_chassis_dispatch.h"
static const int MaxParamCheckerStringLength = 256;
@@ -115,21 +114,6 @@ void StatelessValidation::PostCallRecordCreateInstance(const VkInstanceCreateInf
this->instance_extensions = instance_data->instance_extensions;
}
-void StatelessValidation::PostCallRecordQueuePresentKHR(VkQueue queue, const VkPresentInfoKHR *pPresentInfo, VkResult result) {
- for (uint32_t i = 0; i < pPresentInfo->swapchainCount; ++i) {
- auto swapchains_result = pPresentInfo->pResults ? pPresentInfo->pResults[i] : result;
- if (swapchains_result == VK_SUBOPTIMAL_KHR) {
- log_msg(report_data, VK_DEBUG_REPORT_PERFORMANCE_WARNING_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_SWAPCHAIN_KHR_EXT,
- HandleToUint64(pPresentInfo->pSwapchains[i]), kVUID_PVPerfWarn_SuboptimalSwapchain,
- "vkQueuePresentKHR: %s :VK_SUBOPTIMAL_KHR was returned. VK_SUBOPTIMAL_KHR - Presentation will still succeed, "
- "subject to the window resize behavior, but the swapchain is no longer configured optimally for the surface it "
- "targets. Applications should query updated surface information and recreate their swapchain at the next "
- "convenient opportunity.",
- report_data->FormatHandle(pPresentInfo->pSwapchains[i]).c_str());
- }
- }
-}
-
void StatelessValidation::PostCallRecordCreateDevice(VkPhysicalDevice physicalDevice, const VkDeviceCreateInfo *pCreateInfo,
const VkAllocationCallbacks *pAllocator, VkDevice *pDevice, VkResult result) {
auto device_data = GetLayerDataPtr(get_dispatch_key(*pDevice), layer_data_map);
@@ -142,14 +126,15 @@ void StatelessValidation::PostCallRecordCreateDevice(VkPhysicalDevice physicalDe
VkPhysicalDeviceProperties device_properties = {};
// Need to get instance and do a getlayerdata call...
- DispatchGetPhysicalDeviceProperties(physicalDevice, &device_properties);
+ ValidationObject *instance_object = GetLayerDataPtr(get_dispatch_key(physicalDevice), layer_data_map);
+ instance_object->instance_dispatch_table.GetPhysicalDeviceProperties(physicalDevice, &device_properties);
memcpy(&stateless_validation->device_limits, &device_properties.limits, sizeof(VkPhysicalDeviceLimits));
if (device_extensions.vk_nv_shading_rate_image) {
// Get the needed shading rate image limits
auto shading_rate_image_props = lvl_init_struct<VkPhysicalDeviceShadingRateImagePropertiesNV>();
auto prop2 = lvl_init_struct<VkPhysicalDeviceProperties2KHR>(&shading_rate_image_props);
- DispatchGetPhysicalDeviceProperties2KHR(physicalDevice, &prop2);
+ instance_object->instance_dispatch_table.GetPhysicalDeviceProperties2KHR(physicalDevice, &prop2);
phys_dev_ext_props.shading_rate_image_props = shading_rate_image_props;
}
@@ -157,35 +142,26 @@ void StatelessValidation::PostCallRecordCreateDevice(VkPhysicalDevice physicalDe
// Get the needed mesh shader limits
auto mesh_shader_props = lvl_init_struct<VkPhysicalDeviceMeshShaderPropertiesNV>();
auto prop2 = lvl_init_struct<VkPhysicalDeviceProperties2KHR>(&mesh_shader_props);
- DispatchGetPhysicalDeviceProperties2KHR(physicalDevice, &prop2);
+ instance_object->instance_dispatch_table.GetPhysicalDeviceProperties2KHR(physicalDevice, &prop2);
phys_dev_ext_props.mesh_shader_props = mesh_shader_props;
}
- if (device_extensions.vk_nv_ray_tracing) {
- // Get the needed ray tracing limits
- auto ray_tracing_props = lvl_init_struct<VkPhysicalDeviceRayTracingPropertiesNV>();
- auto prop2 = lvl_init_struct<VkPhysicalDeviceProperties2KHR>(&ray_tracing_props);
- DispatchGetPhysicalDeviceProperties2KHR(physicalDevice, &prop2);
- phys_dev_ext_props.ray_tracing_props = ray_tracing_props;
- }
-
stateless_validation->phys_dev_ext_props = this->phys_dev_ext_props;
// Save app-enabled features in this device's validation object
// The enabled features can come from either pEnabledFeatures, or from the pNext chain
- const auto *features2 = lvl_find_in_chain<VkPhysicalDeviceFeatures2>(pCreateInfo->pNext);
- safe_VkPhysicalDeviceFeatures2 tmp_features2_state;
- tmp_features2_state.sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FEATURES_2;
- if (features2) {
- tmp_features2_state.features = features2->features;
- } else if (pCreateInfo->pEnabledFeatures) {
- tmp_features2_state.features = *pCreateInfo->pEnabledFeatures;
+ const VkPhysicalDeviceFeatures *enabled_features_found = pCreateInfo->pEnabledFeatures;
+ if ((nullptr == enabled_features_found) && device_extensions.vk_khr_get_physical_device_properties_2) {
+ const auto *features2 = lvl_find_in_chain<VkPhysicalDeviceFeatures2KHR>(pCreateInfo->pNext);
+ if (features2) {
+ enabled_features_found = &(features2->features);
+ }
+ }
+ if (enabled_features_found) {
+ stateless_validation->physical_device_features = *enabled_features_found;
} else {
- tmp_features2_state.features = {};
+ memset(&stateless_validation->physical_device_features, 0, sizeof(VkPhysicalDeviceFeatures));
}
- // Use pCreateInfo->pNext to get full chain
- tmp_features2_state.pNext = SafePnextCopy(pCreateInfo->pNext);
- stateless_validation->physical_device_features2 = tmp_features2_state;
}
bool StatelessValidation::manual_PreCallValidateCreateDevice(VkPhysicalDevice physicalDevice, const VkDeviceCreateInfo *pCreateInfo,
@@ -233,35 +209,6 @@ bool StatelessValidation::manual_PreCallValidateCreateDevice(VkPhysicalDevice ph
}
}
- auto features2 = lvl_find_in_chain<VkPhysicalDeviceFeatures2>(pCreateInfo->pNext);
- if (features2) {
- if (!instance_extensions.vk_khr_get_physical_device_properties_2) {
- skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
- kVUID_PVError_ExtensionNotEnabled,
- "VkDeviceCreateInfo->pNext includes a VkPhysicalDeviceFeatures2 struct, "
- "VK_KHR_get_physical_device_properties2 must be enabled when it creates an instance.");
- }
- }
-
- auto vertex_attribute_divisor_features =
- lvl_find_in_chain<VkPhysicalDeviceVertexAttributeDivisorFeaturesEXT>(pCreateInfo->pNext);
- if (vertex_attribute_divisor_features) {
- bool extension_found = false;
- for (uint32_t i = 0; i < pCreateInfo->enabledExtensionCount; ++i) {
- if (0 == strncmp(pCreateInfo->ppEnabledExtensionNames[i], VK_EXT_VERTEX_ATTRIBUTE_DIVISOR_EXTENSION_NAME,
- VK_MAX_EXTENSION_NAME_SIZE)) {
- extension_found = true;
- break;
- }
- }
- if (!extension_found) {
- skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
- kVUID_PVError_ExtensionNotEnabled,
- "VkDeviceCreateInfo->pNext includes a VkPhysicalDeviceVertexAttributeDivisorFeaturesEXT "
- "struct, VK_EXT_vertex_attribute_divisor must be enabled when it creates a device.");
- }
- }
-
// Validate pCreateInfo->pQueueCreateInfos
if (pCreateInfo->pQueueCreateInfos) {
std::unordered_set<uint32_t> set;
@@ -636,6 +583,32 @@ bool StatelessValidation::manual_PreCallValidateCreateImage(VkDevice device, con
return skip;
}
+bool StatelessValidation::manual_PreCallValidateCreateImageView(VkDevice device, const VkImageViewCreateInfo *pCreateInfo,
+ const VkAllocationCallbacks *pAllocator, VkImageView *pView) {
+ bool skip = false;
+
+ if (pCreateInfo != nullptr) {
+ // Validate chained VkImageViewUsageCreateInfo struct, if present
+ if (nullptr != pCreateInfo->pNext) {
+ auto chained_ivuci_struct = lvl_find_in_chain<VkImageViewUsageCreateInfoKHR>(pCreateInfo->pNext);
+ if (chained_ivuci_struct) {
+ if (0 == chained_ivuci_struct->usage) {
+ skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
+ "VUID-VkImageViewUsageCreateInfo-usage-requiredbitmask",
+ "vkCreateImageView: Chained VkImageViewUsageCreateInfo usage field must not be 0.");
+ } else if (chained_ivuci_struct->usage & ~AllVkImageUsageFlagBits) {
+ std::stringstream ss;
+ ss << "vkCreateImageView: Chained VkImageViewUsageCreateInfo usage field (0x" << std::hex
+ << chained_ivuci_struct->usage << ") contains invalid flag bits.";
+ skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
+ "VUID-VkImageViewUsageCreateInfo-usage-parameter", "%s", ss.str().c_str());
+ }
+ }
+ }
+ }
+ return skip;
+}
+
bool StatelessValidation::manual_PreCallValidateViewport(const VkViewport &viewport, const char *fn_name,
const ParameterName &parameter_name,
VkDebugReportObjectTypeEXT object_type, uint64_t object = 0) {
@@ -917,7 +890,6 @@ bool StatelessValidation::manual_PreCallValidateCreateGraphicsPipelines(VkDevice
bool has_dynamic_sample_locations_ext = false;
bool has_dynamic_exclusive_scissor_nv = false;
bool has_dynamic_shading_rate_palette_nv = false;
- bool has_dynamic_line_stipple = false;
if (pCreateInfos[i].pDynamicState != nullptr) {
const auto &dynamic_state_info = *pCreateInfos[i].pDynamicState;
for (uint32_t state_index = 0; state_index < dynamic_state_info.dynamicStateCount; ++state_index) {
@@ -931,138 +903,13 @@ bool StatelessValidation::manual_PreCallValidateCreateGraphicsPipelines(VkDevice
if (dynamic_state == VK_DYNAMIC_STATE_EXCLUSIVE_SCISSOR_NV) has_dynamic_exclusive_scissor_nv = true;
if (dynamic_state == VK_DYNAMIC_STATE_VIEWPORT_SHADING_RATE_PALETTE_NV)
has_dynamic_shading_rate_palette_nv = true;
- if (dynamic_state == VK_DYNAMIC_STATE_LINE_STIPPLE_EXT) has_dynamic_line_stipple = true;
}
}
- auto feedback_struct = lvl_find_in_chain<VkPipelineCreationFeedbackCreateInfoEXT>(pCreateInfos[i].pNext);
- if ((feedback_struct != nullptr) &&
- (feedback_struct->pipelineStageCreationFeedbackCount != pCreateInfos[i].stageCount)) {
- skip |=
- log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_EXT, VK_NULL_HANDLE,
- "VUID-VkPipelineCreationFeedbackCreateInfoEXT-pipelineStageCreationFeedbackCount-02668",
- "vkCreateGraphicsPipelines(): in pCreateInfo[%" PRIu32
- "], VkPipelineCreationFeedbackEXT::pipelineStageCreationFeedbackCount"
- "(=%" PRIu32 ") must equal VkGraphicsPipelineCreateInfo::stageCount(=%" PRIu32 ").",
- i, feedback_struct->pipelineStageCreationFeedbackCount, pCreateInfos[i].stageCount);
- }
-
// Validation for parameters excluded from the generated validation code due to a 'noautovalidity' tag in vk.xml
-
- // Collect active stages
- uint32_t active_shaders = 0;
- for (uint32_t stages = 0; stages < pCreateInfos[i].stageCount; stages++) {
- active_shaders |= pCreateInfos[i].pStages->stage;
- }
-
- if ((active_shaders & VK_SHADER_STAGE_TESSELLATION_CONTROL_BIT) &&
- (active_shaders & VK_SHADER_STAGE_TESSELLATION_EVALUATION_BIT) && (pCreateInfos[i].pTessellationState != nullptr)) {
- skip |= validate_struct_type("vkCreateGraphicsPipelines", "pCreateInfos[i].pTessellationState",
- "VK_STRUCTURE_TYPE_PIPELINE_TESSELLATION_STATE_CREATE_INFO",
- pCreateInfos[i].pTessellationState,
- VK_STRUCTURE_TYPE_PIPELINE_TESSELLATION_STATE_CREATE_INFO, false, kVUIDUndefined,
- "VUID-VkPipelineTessellationStateCreateInfo-sType-sType");
-
- const VkStructureType allowed_structs_VkPipelineTessellationStateCreateInfo[] = {
- VK_STRUCTURE_TYPE_PIPELINE_TESSELLATION_DOMAIN_ORIGIN_STATE_CREATE_INFO};
-
- skip |= validate_struct_pnext("vkCreateGraphicsPipelines", "pCreateInfos[i].pTessellationState->pNext",
- "VkPipelineTessellationDomainOriginStateCreateInfo",
- pCreateInfos[i].pTessellationState->pNext,
- ARRAY_SIZE(allowed_structs_VkPipelineTessellationStateCreateInfo),
- allowed_structs_VkPipelineTessellationStateCreateInfo, GeneratedVulkanHeaderVersion,
- "VUID-VkPipelineTessellationStateCreateInfo-pNext-pNext");
-
- skip |= validate_reserved_flags("vkCreateGraphicsPipelines", "pCreateInfos[i].pTessellationState->flags",
- pCreateInfos[i].pTessellationState->flags,
- "VUID-VkPipelineTessellationStateCreateInfo-flags-zerobitmask");
- }
-
- if (!(active_shaders & VK_SHADER_STAGE_MESH_BIT_NV) && (pCreateInfos[i].pInputAssemblyState != nullptr)) {
- skip |= validate_struct_type("vkCreateGraphicsPipelines", "pCreateInfos[i].pInputAssemblyState",
- "VK_STRUCTURE_TYPE_PIPELINE_INPUT_ASSEMBLY_STATE_CREATE_INFO",
- pCreateInfos[i].pInputAssemblyState,
- VK_STRUCTURE_TYPE_PIPELINE_INPUT_ASSEMBLY_STATE_CREATE_INFO, false, kVUIDUndefined,
- "VUID-VkPipelineInputAssemblyStateCreateInfo-sType-sType");
-
- skip |= validate_struct_pnext("vkCreateGraphicsPipelines", "pCreateInfos[i].pInputAssemblyState->pNext", NULL,
- pCreateInfos[i].pInputAssemblyState->pNext, 0, NULL, GeneratedVulkanHeaderVersion,
- "VUID-VkPipelineInputAssemblyStateCreateInfo-pNext-pNext");
-
- skip |= validate_reserved_flags("vkCreateGraphicsPipelines", "pCreateInfos[i].pInputAssemblyState->flags",
- pCreateInfos[i].pInputAssemblyState->flags,
- "VUID-VkPipelineInputAssemblyStateCreateInfo-flags-zerobitmask");
-
- skip |= validate_ranged_enum("vkCreateGraphicsPipelines", "pCreateInfos[i].pInputAssemblyState->topology",
- "VkPrimitiveTopology", AllVkPrimitiveTopologyEnums,
- pCreateInfos[i].pInputAssemblyState->topology,
- "VUID-VkPipelineInputAssemblyStateCreateInfo-topology-parameter");
-
- skip |= validate_bool32("vkCreateGraphicsPipelines", "pCreateInfos[i].pInputAssemblyState->primitiveRestartEnable",
- pCreateInfos[i].pInputAssemblyState->primitiveRestartEnable);
- }
-
- if (!(active_shaders & VK_SHADER_STAGE_MESH_BIT_NV) && (pCreateInfos[i].pVertexInputState != nullptr)) {
+ if (pCreateInfos[i].pVertexInputState != nullptr) {
auto const &vertex_input_state = pCreateInfos[i].pVertexInputState;
- if (pCreateInfos[i].pVertexInputState->flags != 0) {
- skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
- "VUID-VkPipelineVertexInputStateCreateInfo-flags-zerobitmask",
- "vkCreateGraphicsPipelines: pararameter "
- "pCreateInfos[%d].pVertexInputState->flags (%u) is reserved and must be zero.",
- i, vertex_input_state->flags);
- }
-
- const VkStructureType allowed_structs_VkPipelineVertexInputStateCreateInfo[] = {
- VK_STRUCTURE_TYPE_PIPELINE_VERTEX_INPUT_DIVISOR_STATE_CREATE_INFO_EXT};
- skip |= validate_struct_pnext("vkCreateGraphicsPipelines", "pCreateInfos[i].pVertexInputState->pNext",
- "VkPipelineVertexInputDivisorStateCreateInfoEXT",
- pCreateInfos[i].pVertexInputState->pNext, 1,
- allowed_structs_VkPipelineVertexInputStateCreateInfo, GeneratedVulkanHeaderVersion,
- "VUID-VkPipelineVertexInputStateCreateInfo-pNext-pNext");
- skip |= validate_struct_type("vkCreateGraphicsPipelines", "pCreateInfos[i].pVertexInputState",
- "VK_STRUCTURE_TYPE_PIPELINE_VERTEX_INPUT_STATE_CREATE_INFO", vertex_input_state,
- VK_STRUCTURE_TYPE_PIPELINE_VERTEX_INPUT_STATE_CREATE_INFO, false, kVUIDUndefined,
- "VUID-VkPipelineVertexInputStateCreateInfo-sType-sType");
- skip |=
- validate_array("vkCreateGraphicsPipelines", "pCreateInfos[i].pVertexInputState->vertexBindingDescriptionCount",
- "pCreateInfos[i].pVertexInputState->pVertexBindingDescriptions",
- pCreateInfos[i].pVertexInputState->vertexBindingDescriptionCount,
- &pCreateInfos[i].pVertexInputState->pVertexBindingDescriptions, false, true, kVUIDUndefined,
- "VUID-VkPipelineVertexInputStateCreateInfo-pVertexBindingDescriptions-parameter");
-
- skip |= validate_array(
- "vkCreateGraphicsPipelines", "pCreateInfos[i].pVertexInputState->vertexAttributeDescriptionCount",
- "pCreateInfos[i]->pVertexAttributeDescriptions", vertex_input_state->vertexAttributeDescriptionCount,
- &vertex_input_state->pVertexAttributeDescriptions, false, true, kVUIDUndefined,
- "VUID-VkPipelineVertexInputStateCreateInfo-pVertexAttributeDescriptions-parameter");
-
- if (pCreateInfos[i].pVertexInputState->pVertexBindingDescriptions != NULL) {
- for (uint32_t vertexBindingDescriptionIndex = 0;
- vertexBindingDescriptionIndex < pCreateInfos[i].pVertexInputState->vertexBindingDescriptionCount;
- ++vertexBindingDescriptionIndex) {
- skip |= validate_ranged_enum(
- "vkCreateGraphicsPipelines",
- "pCreateInfos[i].pVertexInputState->pVertexBindingDescriptions[j].inputRate", "VkVertexInputRate",
- AllVkVertexInputRateEnums,
- pCreateInfos[i].pVertexInputState->pVertexBindingDescriptions[vertexBindingDescriptionIndex].inputRate,
- "VUID-VkVertexInputBindingDescription-inputRate-parameter");
- }
- }
-
- if (pCreateInfos[i].pVertexInputState->pVertexAttributeDescriptions != NULL) {
- for (uint32_t vertexAttributeDescriptionIndex = 0;
- vertexAttributeDescriptionIndex < pCreateInfos[i].pVertexInputState->vertexAttributeDescriptionCount;
- ++vertexAttributeDescriptionIndex) {
- skip |= validate_ranged_enum(
- "vkCreateGraphicsPipelines",
- "pCreateInfos[i].pVertexInputState->pVertexAttributeDescriptions[i].format", "VkFormat",
- AllVkFormatEnums,
- pCreateInfos[i].pVertexInputState->pVertexAttributeDescriptions[vertexAttributeDescriptionIndex].format,
- "VUID-VkVertexInputAttributeDescription-format-parameter");
- }
- }
-
if (vertex_input_state->vertexBindingDescriptionCount > device_limits.maxVertexInputBindings) {
skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
"VUID-VkPipelineVertexInputStateCreateInfo-vertexBindingDescriptionCount-00613",
@@ -1078,7 +925,7 @@ bool StatelessValidation::manual_PreCallValidateCreateGraphicsPipelines(VkDevice
"vkCreateGraphicsPipelines: pararameter "
"pCreateInfo[%d].pVertexInputState->vertexAttributeDescriptionCount (%u) is "
"greater than VkPhysicalDeviceLimits::maxVertexInputAttributes (%u).",
- i, vertex_input_state->vertexAttributeDescriptionCount, device_limits.maxVertexInputAttributes);
+ i, vertex_input_state->vertexBindingDescriptionCount, device_limits.maxVertexInputAttributes);
}
std::unordered_set<uint32_t> vertex_bindings(vertex_input_state->vertexBindingDescriptionCount);
@@ -1190,13 +1037,11 @@ bool StatelessValidation::manual_PreCallValidateCreateGraphicsPipelines(VkDevice
"pCreateInfos[%d].pTessellationState must not be NULL.",
i, i);
} else {
- const VkStructureType allowed_type =
- VK_STRUCTURE_TYPE_PIPELINE_TESSELLATION_DOMAIN_ORIGIN_STATE_CREATE_INFO;
skip |= validate_struct_pnext(
"vkCreateGraphicsPipelines",
- ParameterName("pCreateInfos[%i].pTessellationState->pNext", ParameterName::IndexVector{i}),
- "VkPipelineTessellationDomainOriginStateCreateInfo", pCreateInfos[i].pTessellationState->pNext, 1,
- &allowed_type, GeneratedVulkanHeaderVersion, "VUID-VkGraphicsPipelineCreateInfo-pNext-pNext");
+ ParameterName("pCreateInfos[%i].pTessellationState->pNext", ParameterName::IndexVector{i}), NULL,
+ pCreateInfos[i].pTessellationState->pNext, 0, NULL, GeneratedVulkanHeaderVersion,
+ "VUID-VkGraphicsPipelineCreateInfo-pNext-pNext");
skip |= validate_reserved_flags(
"vkCreateGraphicsPipelines",
@@ -1563,12 +1408,6 @@ bool StatelessValidation::manual_PreCallValidateCreateGraphicsPipelines(VkDevice
pCreateInfos[i].pMultisampleState->rasterizationSamples, &pCreateInfos[i].pMultisampleState->pSampleMask,
true, false, kVUIDUndefined, kVUIDUndefined);
- skip |= validate_flags(
- "vkCreateGraphicsPipelines",
- ParameterName("pCreateInfos[%i].pMultisampleState->rasterizationSamples", ParameterName::IndexVector{i}),
- "VkSampleCountFlagBits", AllVkSampleCountFlagBits, pCreateInfos[i].pMultisampleState->rasterizationSamples,
- kRequiredSingleBit, "VUID-VkPipelineMultisampleStateCreateInfo-rasterizationSamples-parameter");
-
skip |= validate_bool32(
"vkCreateGraphicsPipelines",
ParameterName("pCreateInfos[%i].pMultisampleState->alphaToCoverageEnable", ParameterName::IndexVector{i}),
@@ -1603,113 +1442,6 @@ bool StatelessValidation::manual_PreCallValidateCreateGraphicsPipelines(VkDevice
"vkCreateGraphicsPipelines(): parameter pCreateInfos[%d].pMultisampleState->minSampleShading.", i);
}
}
-
- const auto *line_state = lvl_find_in_chain<VkPipelineRasterizationLineStateCreateInfoEXT>(
- pCreateInfos[i].pRasterizationState->pNext);
-
- if (line_state) {
- if ((line_state->lineRasterizationMode == VK_LINE_RASTERIZATION_MODE_BRESENHAM_EXT ||
- line_state->lineRasterizationMode == VK_LINE_RASTERIZATION_MODE_RECTANGULAR_SMOOTH_EXT)) {
- if (pCreateInfos[i].pMultisampleState->alphaToCoverageEnable) {
- skip |=
- log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
- "VUID-VkGraphicsPipelineCreateInfo-lineRasterizationMode-02766",
- "vkCreateGraphicsPipelines(): Bresenham/Smooth line rasterization not supported with "
- "pCreateInfos[%d].pMultisampleState->alphaToCoverageEnable == VK_TRUE.",
- i);
- }
- if (pCreateInfos[i].pMultisampleState->alphaToOneEnable) {
- skip |=
- log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
- "VUID-VkGraphicsPipelineCreateInfo-lineRasterizationMode-02766",
- "vkCreateGraphicsPipelines(): Bresenham/Smooth line rasterization not supported with "
- "pCreateInfos[%d].pMultisampleState->alphaToOneEnable == VK_TRUE.",
- i);
- }
- if (pCreateInfos[i].pMultisampleState->sampleShadingEnable) {
- skip |=
- log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
- "VUID-VkGraphicsPipelineCreateInfo-lineRasterizationMode-02766",
- "vkCreateGraphicsPipelines(): Bresenham/Smooth line rasterization not supported with "
- "pCreateInfos[%d].pMultisampleState->sampleShadingEnable == VK_TRUE.",
- i);
- }
- }
- if (line_state->stippledLineEnable && !has_dynamic_line_stipple) {
- if (line_state->lineStippleFactor < 1 || line_state->lineStippleFactor > 256) {
- skip |=
- log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
- "VUID-VkGraphicsPipelineCreateInfo-stippledLineEnable-02767",
- "vkCreateGraphicsPipelines(): pCreateInfos[%d] lineStippleFactor = %d must be in the "
- "range [1,256].",
- i, line_state->lineStippleFactor);
- }
- }
- const auto *line_features =
- lvl_find_in_chain<VkPhysicalDeviceLineRasterizationFeaturesEXT>(physical_device_features2.pNext);
- if (line_state->lineRasterizationMode == VK_LINE_RASTERIZATION_MODE_RECTANGULAR_EXT &&
- (!line_features || !line_features->rectangularLines)) {
- skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
- "VUID-VkPipelineRasterizationLineStateCreateInfoEXT-lineRasterizationMode-02768",
- "vkCreateGraphicsPipelines(): pCreateInfos[%d] lineRasterizationMode = "
- "VK_LINE_RASTERIZATION_MODE_RECTANGULAR_EXT requires the rectangularLines feature.",
- i);
- }
- if (line_state->lineRasterizationMode == VK_LINE_RASTERIZATION_MODE_BRESENHAM_EXT &&
- (!line_features || !line_features->bresenhamLines)) {
- skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
- "VUID-VkPipelineRasterizationLineStateCreateInfoEXT-lineRasterizationMode-02769",
- "vkCreateGraphicsPipelines(): pCreateInfos[%d] lineRasterizationMode = "
- "VK_LINE_RASTERIZATION_MODE_BRESENHAM_EXT requires the bresenhamLines feature.",
- i);
- }
- if (line_state->lineRasterizationMode == VK_LINE_RASTERIZATION_MODE_RECTANGULAR_SMOOTH_EXT &&
- (!line_features || !line_features->smoothLines)) {
- skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
- "VUID-VkPipelineRasterizationLineStateCreateInfoEXT-lineRasterizationMode-02770",
- "vkCreateGraphicsPipelines(): pCreateInfos[%d] lineRasterizationMode = "
- "VK_LINE_RASTERIZATION_MODE_RECTANGULAR_SMOOTH_EXT requires the smoothLines feature.",
- i);
- }
- if (line_state->stippledLineEnable) {
- if (line_state->lineRasterizationMode == VK_LINE_RASTERIZATION_MODE_RECTANGULAR_EXT &&
- (!line_features || !line_features->stippledRectangularLines)) {
- skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT,
- 0, "VUID-VkPipelineRasterizationLineStateCreateInfoEXT-stippledLineEnable-02771",
- "vkCreateGraphicsPipelines(): pCreateInfos[%d] lineRasterizationMode = "
- "VK_LINE_RASTERIZATION_MODE_RECTANGULAR_EXT with stipple requires the "
- "stippledRectangularLines feature.",
- i);
- }
- if (line_state->lineRasterizationMode == VK_LINE_RASTERIZATION_MODE_BRESENHAM_EXT &&
- (!line_features || !line_features->stippledBresenhamLines)) {
- skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT,
- 0, "VUID-VkPipelineRasterizationLineStateCreateInfoEXT-stippledLineEnable-02772",
- "vkCreateGraphicsPipelines(): pCreateInfos[%d] lineRasterizationMode = "
- "VK_LINE_RASTERIZATION_MODE_BRESENHAM_EXT with stipple requires the "
- "stippledBresenhamLines feature.",
- i);
- }
- if (line_state->lineRasterizationMode == VK_LINE_RASTERIZATION_MODE_RECTANGULAR_SMOOTH_EXT &&
- (!line_features || !line_features->stippledSmoothLines)) {
- skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT,
- 0, "VUID-VkPipelineRasterizationLineStateCreateInfoEXT-stippledLineEnable-02773",
- "vkCreateGraphicsPipelines(): pCreateInfos[%d] lineRasterizationMode = "
- "VK_LINE_RASTERIZATION_MODE_RECTANGULAR_SMOOTH_EXT with stipple requires the "
- "stippledSmoothLines feature.",
- i);
- }
- if (line_state->lineRasterizationMode == VK_LINE_RASTERIZATION_MODE_DEFAULT_EXT &&
- (!line_features || !line_features->stippledSmoothLines || !device_limits.strictLines)) {
- skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT,
- 0, "VUID-VkPipelineRasterizationLineStateCreateInfoEXT-stippledLineEnable-02774",
- "vkCreateGraphicsPipelines(): pCreateInfos[%d] lineRasterizationMode = "
- "VK_LINE_RASTERIZATION_MODE_DEFAULT_EXT with stipple requires the "
- "stippledRectangularLines and strictLines features.",
- i);
- }
- }
- }
}
bool uses_color_attachment = false;
@@ -1826,13 +1558,6 @@ bool StatelessValidation::manual_PreCallValidateCreateGraphicsPipelines(VkDevice
VK_STRUCTURE_TYPE_PIPELINE_COLOR_BLEND_ADVANCED_STATE_CREATE_INFO_EXT};
if (pCreateInfos[i].pColorBlendState != nullptr && uses_color_attachment) {
- skip |= validate_struct_type("vkCreateGraphicsPipelines",
- ParameterName("pCreateInfos[%i].pColorBlendState", ParameterName::IndexVector{i}),
- "VK_STRUCTURE_TYPE_PIPELINE_COLOR_BLEND_STATE_CREATE_INFO",
- pCreateInfos[i].pColorBlendState,
- VK_STRUCTURE_TYPE_PIPELINE_COLOR_BLEND_STATE_CREATE_INFO, false, kVUIDUndefined,
- "VUID-VkPipelineColorBlendStateCreateInfo-sType-sType");
-
skip |= validate_struct_pnext(
"vkCreateGraphicsPipelines",
ParameterName("pCreateInfos[%i].pColorBlendState->pNext", ParameterName::IndexVector{i}),
@@ -1920,7 +1645,7 @@ bool StatelessValidation::manual_PreCallValidateCreateGraphicsPipelines(VkDevice
ParameterName::IndexVector{i, attachmentIndex}),
"VkColorComponentFlagBits", AllVkColorComponentFlagBits,
pCreateInfos[i].pColorBlendState->pAttachments[attachmentIndex].colorWriteMask,
- kOptionalFlags, "VUID-VkPipelineColorBlendAttachmentState-colorWriteMask-parameter");
+ false, false, "VUID-VkPipelineColorBlendAttachmentState-colorWriteMask-parameter");
}
}
@@ -1966,33 +1691,13 @@ bool StatelessValidation::manual_PreCallValidateCreateGraphicsPipelines(VkDevice
}
if (pCreateInfos[i].pRasterizationState) {
- if (!device_extensions.vk_nv_fill_rectangle) {
- if (pCreateInfos[i].pRasterizationState->polygonMode == VK_POLYGON_MODE_FILL_RECTANGLE_NV) {
- skip |=
- log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
- "VUID-VkPipelineRasterizationStateCreateInfo-polygonMode-01414",
- "vkCreateGraphicsPipelines parameter, VkPolygonMode "
- "pCreateInfos->pRasterizationState->polygonMode cannot be VK_POLYGON_MODE_FILL_RECTANGLE_NV "
- "if the extension VK_NV_fill_rectangle is not enabled.");
- } else if ((pCreateInfos[i].pRasterizationState->polygonMode != VK_POLYGON_MODE_FILL) &&
- (physical_device_features.fillModeNonSolid == false)) {
- skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
- kVUID_PVError_DeviceFeature,
- "vkCreateGraphicsPipelines parameter, VkPolygonMode "
- "pCreateInfos->pRasterizationState->polygonMode cannot be VK_POLYGON_MODE_POINT or "
- "VK_POLYGON_MODE_LINE if VkPhysicalDeviceFeatures->fillModeNonSolid is false.");
- }
- } else {
- if ((pCreateInfos[i].pRasterizationState->polygonMode != VK_POLYGON_MODE_FILL) &&
- (pCreateInfos[i].pRasterizationState->polygonMode != VK_POLYGON_MODE_FILL_RECTANGLE_NV) &&
- (physical_device_features.fillModeNonSolid == false)) {
- skip |=
- log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
- "VUID-VkPipelineRasterizationStateCreateInfo-polygonMode-01507",
+ if ((pCreateInfos[i].pRasterizationState->polygonMode != VK_POLYGON_MODE_FILL) &&
+ (physical_device_features.fillModeNonSolid == false)) {
+ skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
+ kVUID_PVError_DeviceFeature,
"vkCreateGraphicsPipelines parameter, VkPolygonMode "
- "pCreateInfos->pRasterizationState->polygonMode must be VK_POLYGON_MODE_FILL or "
- "VK_POLYGON_MODE_FILL_RECTANGLE_NV if VkPhysicalDeviceFeatures->fillModeNonSolid is false.");
- }
+ "pCreateInfos->pRasterizationState->polygonMode cannot be VK_POLYGON_MODE_POINT or "
+ "VK_POLYGON_MODE_LINE if VkPhysicalDeviceFeatures->fillModeNonSolid is false.");
}
if (!has_dynamic_line_width && !physical_device_features.wideLines &&
@@ -2028,14 +1733,6 @@ bool StatelessValidation::manual_PreCallValidateCreateComputePipelines(VkDevice
skip |= validate_string("vkCreateComputePipelines",
ParameterName("pCreateInfos[%i].stage.pName", ParameterName::IndexVector{i}),
"VUID-VkPipelineShaderStageCreateInfo-pName-parameter", pCreateInfos[i].stage.pName);
- auto feedback_struct = lvl_find_in_chain<VkPipelineCreationFeedbackCreateInfoEXT>(pCreateInfos[i].pNext);
- if ((feedback_struct != nullptr) && (feedback_struct->pipelineStageCreationFeedbackCount != 1)) {
- skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_EXT, VK_NULL_HANDLE,
- "VUID-VkPipelineCreationFeedbackCreateInfoEXT-pipelineStageCreationFeedbackCount-02669",
- "vkCreateComputePipelines(): in pCreateInfo[%" PRIu32
- "], VkPipelineCreationFeedbackEXT::pipelineStageCreationFeedbackCount must equal 1, found %" PRIu32 ".",
- i, feedback_struct->pipelineStageCreationFeedbackCount);
- }
}
return skip;
}
@@ -2374,69 +2071,44 @@ bool StatelessValidation::manual_PreCallValidateFreeCommandBuffers(VkDevice devi
bool StatelessValidation::manual_PreCallValidateBeginCommandBuffer(VkCommandBuffer commandBuffer,
const VkCommandBufferBeginInfo *pBeginInfo) {
bool skip = false;
-
- // VkCommandBufferInheritanceInfo validation, due to a 'noautovalidity' of pBeginInfo->pInheritanceInfo in vkBeginCommandBuffer
- const char *cmd_name = "vkBeginCommandBuffer";
const VkCommandBufferInheritanceInfo *pInfo = pBeginInfo->pInheritanceInfo;
- // Implicit VUs
- // validate only sType here; pointer has to be validated in core_validation
- const bool kNotRequired = false;
- const char *kNoVUID = nullptr;
- skip |= validate_struct_type(cmd_name, "pBeginInfo->pInheritanceInfo", "VK_STRUCTURE_TYPE_COMMAND_BUFFER_INHERITANCE_INFO",
- pInfo, VK_STRUCTURE_TYPE_COMMAND_BUFFER_INHERITANCE_INFO, kNotRequired, kNoVUID,
- "VUID-VkCommandBufferInheritanceInfo-sType-sType");
-
- if (pInfo) {
- const VkStructureType allowed_structs_VkCommandBufferInheritanceInfo[] = {
- VK_STRUCTURE_TYPE_COMMAND_BUFFER_INHERITANCE_CONDITIONAL_RENDERING_INFO_EXT};
- skip |= validate_struct_pnext(
- cmd_name, "pBeginInfo->pInheritanceInfo->pNext", "VkCommandBufferInheritanceConditionalRenderingInfoEXT", pInfo->pNext,
- ARRAY_SIZE(allowed_structs_VkCommandBufferInheritanceInfo), allowed_structs_VkCommandBufferInheritanceInfo,
- GeneratedVulkanHeaderVersion, "VUID-VkCommandBufferInheritanceInfo-pNext-pNext");
-
- skip |= validate_bool32(cmd_name, "pBeginInfo->pInheritanceInfo->occlusionQueryEnable", pInfo->occlusionQueryEnable);
-
- // Explicit VUs
- if (!physical_device_features.inheritedQueries && pInfo->occlusionQueryEnable == VK_TRUE) {
- skip |= log_msg(
- report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
- HandleToUint64(commandBuffer), "VUID-VkCommandBufferInheritanceInfo-occlusionQueryEnable-00056",
- "%s: Inherited queries feature is disabled, but pBeginInfo->pInheritanceInfo->occlusionQueryEnable is VK_TRUE.",
- cmd_name);
- }
+ // Validation for parameters excluded from the generated validation code due to a 'noautovalidity' tag in vk.xml
+ // TODO: pBeginInfo->pInheritanceInfo must not be NULL if commandBuffer is a secondary command buffer
+ skip |= validate_struct_type("vkBeginCommandBuffer", "pBeginInfo->pInheritanceInfo",
+ "VK_STRUCTURE_TYPE_COMMAND_BUFFER_INHERITANCE_INFO", pBeginInfo->pInheritanceInfo,
+ VK_STRUCTURE_TYPE_COMMAND_BUFFER_INHERITANCE_INFO, false,
+ "VUID_vkBeginCommandBuffer-pBeginInfo-parameter", "VUID_VkCommandBufferBeginInfo-sType-sType");
+
+ if (pBeginInfo->pInheritanceInfo != NULL) {
+ skip |= validate_struct_pnext("vkBeginCommandBuffer", "pBeginInfo->pInheritanceInfo->pNext", NULL,
+ pBeginInfo->pInheritanceInfo->pNext, 0, NULL, GeneratedVulkanHeaderVersion,
+ "VUID-VkCommandBufferBeginInfo-pNext-pNext");
+
+ skip |= validate_bool32("vkBeginCommandBuffer", "pBeginInfo->pInheritanceInfo->occlusionQueryEnable",
+ pBeginInfo->pInheritanceInfo->occlusionQueryEnable);
+
+ // TODO: This only needs to be validated when the inherited queries feature is enabled
+ // skip |= validate_flags("vkBeginCommandBuffer", "pBeginInfo->pInheritanceInfo->queryFlags",
+ // "VkQueryControlFlagBits", AllVkQueryControlFlagBits, pBeginInfo->pInheritanceInfo->queryFlags, false);
+
+ // TODO: This must be 0 if the pipeline statistics queries feature is not enabled
+ skip |= validate_flags("vkBeginCommandBuffer", "pBeginInfo->pInheritanceInfo->pipelineStatistics",
+ "VkQueryPipelineStatisticFlagBits", AllVkQueryPipelineStatisticFlagBits,
+ pBeginInfo->pInheritanceInfo->pipelineStatistics, false, false, kVUIDUndefined);
+ }
- if (physical_device_features.inheritedQueries) {
- skip |= validate_flags(cmd_name, "pBeginInfo->pInheritanceInfo->queryFlags", "VkQueryControlFlagBits",
- AllVkQueryControlFlagBits, pInfo->queryFlags, kOptionalFlags,
+ if (pInfo != NULL) {
+ if ((physical_device_features.inheritedQueries == VK_FALSE) && (pInfo->occlusionQueryEnable != VK_FALSE)) {
+ skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
+ HandleToUint64(commandBuffer), "VUID-VkCommandBufferInheritanceInfo-occlusionQueryEnable-00056",
+ "Cannot set inherited occlusionQueryEnable in vkBeginCommandBuffer() when device does not support "
+ "inheritedQueries.");
+ }
+ if ((physical_device_features.inheritedQueries != VK_FALSE) && (pInfo->occlusionQueryEnable != VK_FALSE)) {
+ skip |= validate_flags("vkBeginCommandBuffer", "pBeginInfo->pInheritanceInfo->queryFlags", "VkQueryControlFlagBits",
+ AllVkQueryControlFlagBits, pInfo->queryFlags, false, false,
"VUID-VkCommandBufferInheritanceInfo-queryFlags-00057");
- } else { // !inheritedQueries
- skip |= validate_reserved_flags(cmd_name, "pBeginInfo->pInheritanceInfo->queryFlags", pInfo->queryFlags,
- "VUID-VkCommandBufferInheritanceInfo-queryFlags-02788");
- }
-
- if (physical_device_features.pipelineStatisticsQuery) {
- skip |= validate_flags(cmd_name, "pBeginInfo->pInheritanceInfo->pipelineStatistics", "VkQueryPipelineStatisticFlagBits",
- AllVkQueryPipelineStatisticFlagBits, pInfo->pipelineStatistics, kOptionalFlags,
- "VUID-VkCommandBufferInheritanceInfo-pipelineStatistics-02789");
- } else { // !pipelineStatisticsQuery
- skip |= validate_reserved_flags(cmd_name, "pBeginInfo->pInheritanceInfo->pipelineStatistics", pInfo->pipelineStatistics,
- "VUID-VkCommandBufferInheritanceInfo-pipelineStatistics-00058");
- }
-
- const auto *conditional_rendering = lvl_find_in_chain<VkCommandBufferInheritanceConditionalRenderingInfoEXT>(pInfo->pNext);
- if (conditional_rendering) {
- const auto *cr_features =
- lvl_find_in_chain<VkPhysicalDeviceConditionalRenderingFeaturesEXT>(physical_device_features2.pNext);
- const auto inherited_conditional_rendering = cr_features && cr_features->inheritedConditionalRendering;
- if (!inherited_conditional_rendering && conditional_rendering->conditionalRenderingEnable == VK_TRUE) {
- skip |= log_msg(
- report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
- HandleToUint64(commandBuffer),
- "VUID-VkCommandBufferInheritanceConditionalRenderingInfoEXT-conditionalRenderingEnable-01977",
- "vkBeginCommandBuffer: Inherited conditional rendering is disabled, but "
- "pBeginInfo->pInheritanceInfo->pNext<VkCommandBufferInheritanceConditionalRenderingInfoEXT> is VK_TRUE.");
- }
}
}
@@ -2607,20 +2279,6 @@ bool StatelessValidation::manual_PreCallValidateCmdDrawIndexedIndirect(VkCommand
return skip;
}
-bool StatelessValidation::manual_PreCallValidateCmdClearAttachments(VkCommandBuffer commandBuffer, uint32_t attachmentCount,
- const VkClearAttachment *pAttachments, uint32_t rectCount,
- const VkClearRect *pRects) {
- bool skip = false;
- for (uint32_t rect = 0; rect < rectCount; rect++) {
- if (pRects[rect].layerCount == 0) {
- skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
- HandleToUint64(commandBuffer), "VUID-vkCmdClearAttachments-layerCount-01934",
- "CmdClearAttachments(): pRects[%d].layerCount is zero.", rect);
- }
- }
- return skip;
-}
-
bool StatelessValidation::manual_PreCallValidateCmdCopyImage(VkCommandBuffer commandBuffer, VkImage srcImage,
VkImageLayout srcImageLayout, VkImage dstImage,
VkImageLayout dstImageLayout, uint32_t regionCount,
@@ -2933,7 +2591,7 @@ bool StatelessValidation::manual_PreCallValidateCmdDispatchIndirect(VkCommandBuf
if ((offset % 4) != 0) {
skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
- HandleToUint64(commandBuffer), "VUID-vkCmdDispatchIndirect-offset-02710",
+ HandleToUint64(commandBuffer), "VUID-vkCmdDispatchIndirect-offset-00406",
"vkCmdDispatchIndirect(): offset (%" PRIu64 ") must be a multiple of 4.", offset);
}
return skip;
@@ -3160,23 +2818,25 @@ bool StatelessValidation::manual_PreCallValidateCmdDrawMeshTasksIndirectNV(VkCom
VkDeviceSize offset, uint32_t drawCount,
uint32_t stride) {
bool skip = false;
- static const int condition_multiples = 0b0011;
- if (offset & condition_multiples) {
+
+ if (offset & 3) {
skip |= log_msg(
report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
- HandleToUint64(commandBuffer), "VUID-vkCmdDrawMeshTasksIndirectNV-offset-02710",
+ HandleToUint64(commandBuffer), "VUID-vkCmdDrawMeshTasksIndirectNV-offset-02145",
"vkCmdDrawMeshTasksIndirectNV() parameter, VkDeviceSize offset (0x%" PRIxLEAST64 "), is not a multiple of 4.", offset);
}
- if (drawCount > 1 && ((stride & condition_multiples) || stride < sizeof(VkDrawMeshTasksIndirectCommandNV))) {
+
+ if (drawCount > 1 && ((stride & 3) || stride < sizeof(VkDrawMeshTasksIndirectCommandNV))) {
skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
HandleToUint64(commandBuffer), "VUID-vkCmdDrawMeshTasksIndirectNV-drawCount-02146",
"vkCmdDrawMeshTasksIndirectNV() parameter, uint32_t stride (0x%" PRIxLEAST32
"), is not a multiple of 4 or smaller than sizeof (VkDrawMeshTasksIndirectCommandNV).",
stride);
}
+
if (!physical_device_features.multiDrawIndirect && ((drawCount > 1))) {
skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
- HandleToUint64(commandBuffer), "VUID-vkCmdDrawMeshTasksIndirectNV-drawCount-02718",
+ HandleToUint64(commandBuffer), "VUID-vkCmdDrawMeshTasksIndirectNV-drawCount-02147",
"vkCmdDrawMeshTasksIndirectNV(): Device feature multiDrawIndirect disabled: count must be 0 or 1 but is %d",
drawCount);
}
@@ -3192,7 +2852,7 @@ bool StatelessValidation::manual_PreCallValidateCmdDrawMeshTasksIndirectCountNV(
if (offset & 3) {
skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
- HandleToUint64(commandBuffer), "VUID-vkCmdDrawMeshTasksIndirectCountNV-offset-02710",
+ HandleToUint64(commandBuffer), "VUID-vkCmdDrawMeshTasksIndirectCountNV-offset-02180",
"vkCmdDrawMeshTasksIndirectCountNV() parameter, VkDeviceSize offset (0x%" PRIxLEAST64
"), is not a multiple of 4.",
offset);
@@ -3200,12 +2860,20 @@ bool StatelessValidation::manual_PreCallValidateCmdDrawMeshTasksIndirectCountNV(
if (countBufferOffset & 3) {
skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
- HandleToUint64(commandBuffer), "VUID-vkCmdDrawMeshTasksIndirectCountNV-countBufferOffset-02716",
+ HandleToUint64(commandBuffer), "VUID-vkCmdDrawMeshTasksIndirectCountNV-countBufferOffset-02181",
"vkCmdDrawMeshTasksIndirectCountNV() parameter, VkDeviceSize countBufferOffset (0x%" PRIxLEAST64
"), is not a multiple of 4.",
countBufferOffset);
}
+ if ((stride & 3) || stride < sizeof(VkDrawMeshTasksIndirectCommandNV)) {
+ skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
+ HandleToUint64(commandBuffer), "VUID-vkCmdDrawMeshTasksIndirectCountNV-stride-02182",
+ "vkCmdDrawMeshTasksIndirectCountNV() parameter, uint32_t stride (0x%" PRIxLEAST32
+ "), is not a multiple of 4 or smaller than sizeof (VkDrawMeshTasksIndirectCommandNV).",
+ stride);
+ }
+
return skip;
}
@@ -3272,313 +2940,3 @@ bool StatelessValidation::manual_PreCallValidateAllocateMemory(VkDevice device,
}
return skip;
}
-
-bool StatelessValidation::ValidateGeometryTrianglesNV(const VkGeometryTrianglesNV &triangles,
- VkDebugReportObjectTypeEXT object_type, uint64_t object_handle,
- const char *func_name) const {
- bool skip = false;
-
- if (triangles.vertexFormat != VK_FORMAT_R32G32B32_SFLOAT && triangles.vertexFormat != VK_FORMAT_R16G16B16_SFLOAT &&
- triangles.vertexFormat != VK_FORMAT_R16G16B16_SNORM && triangles.vertexFormat != VK_FORMAT_R32G32_SFLOAT &&
- triangles.vertexFormat != VK_FORMAT_R16G16_SFLOAT && triangles.vertexFormat != VK_FORMAT_R16G16_SNORM) {
- skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, object_type, object_handle,
- "VUID-VkGeometryTrianglesNV-vertexFormat-02430", "%s", func_name);
- } else {
- uint32_t vertex_component_size = 0;
- if (triangles.vertexFormat == VK_FORMAT_R32G32B32_SFLOAT || triangles.vertexFormat == VK_FORMAT_R32G32_SFLOAT) {
- vertex_component_size = 4;
- } else if (triangles.vertexFormat == VK_FORMAT_R16G16B16_SFLOAT || triangles.vertexFormat == VK_FORMAT_R16G16B16_SNORM ||
- triangles.vertexFormat == VK_FORMAT_R16G16_SFLOAT || triangles.vertexFormat == VK_FORMAT_R16G16_SNORM) {
- vertex_component_size = 2;
- }
- if (vertex_component_size > 0 && SafeModulo(triangles.vertexOffset, vertex_component_size) != 0) {
- skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, object_type, object_handle,
- "VUID-VkGeometryTrianglesNV-vertexOffset-02429", "%s", func_name);
- }
- }
-
- if (triangles.indexType != VK_INDEX_TYPE_UINT32 && triangles.indexType != VK_INDEX_TYPE_UINT16 &&
- triangles.indexType != VK_INDEX_TYPE_NONE_NV) {
- skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, object_type, object_handle,
- "VUID-VkGeometryTrianglesNV-indexType-02433", "%s", func_name);
- } else {
- uint32_t index_element_size = 0;
- if (triangles.indexType == VK_INDEX_TYPE_UINT32) {
- index_element_size = 4;
- } else if (triangles.indexType == VK_INDEX_TYPE_UINT16) {
- index_element_size = 2;
- }
- if (index_element_size > 0 && SafeModulo(triangles.indexOffset, index_element_size) != 0) {
- skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, object_type, object_handle,
- "VUID-VkGeometryTrianglesNV-indexOffset-02432", "%s", func_name);
- }
- }
- if (triangles.indexType == VK_INDEX_TYPE_NONE_NV) {
- if (triangles.indexCount != 0) {
- skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, object_type, object_handle,
- "VUID-VkGeometryTrianglesNV-indexCount-02436", "%s", func_name);
- }
- if (triangles.indexData != VK_NULL_HANDLE) {
- skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, object_type, object_handle,
- "VUID-VkGeometryTrianglesNV-indexData-02434", "%s", func_name);
- }
- }
-
- if (SafeModulo(triangles.transformOffset, 16) != 0) {
- skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, object_type, object_handle,
- "VUID-VkGeometryTrianglesNV-transformOffset-02438", "%s", func_name);
- }
-
- return skip;
-}
-
-bool StatelessValidation::ValidateGeometryAABBNV(const VkGeometryAABBNV &aabbs, VkDebugReportObjectTypeEXT object_type,
- uint64_t object_handle, const char *func_name) const {
- bool skip = false;
-
- if (SafeModulo(aabbs.offset, 8) != 0) {
- skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, object_type, object_handle,
- "VUID-VkGeometryAABBNV-offset-02440", "%s", func_name);
- }
- if (SafeModulo(aabbs.stride, 8) != 0) {
- skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, object_type, object_handle,
- "VUID-VkGeometryAABBNV-stride-02441", "%s", func_name);
- }
-
- return skip;
-}
-
-bool StatelessValidation::ValidateGeometryNV(const VkGeometryNV &geometry, VkDebugReportObjectTypeEXT object_type,
- uint64_t object_handle, const char *func_name) const {
- bool skip = false;
- if (geometry.geometryType == VK_GEOMETRY_TYPE_TRIANGLES_NV) {
- skip = ValidateGeometryTrianglesNV(geometry.geometry.triangles, object_type, object_handle, func_name);
- } else if (geometry.geometryType == VK_GEOMETRY_TYPE_AABBS_NV) {
- skip = ValidateGeometryAABBNV(geometry.geometry.aabbs, object_type, object_handle, func_name);
- }
- return skip;
-}
-
-bool StatelessValidation::ValidateAccelerationStructureInfoNV(const VkAccelerationStructureInfoNV &info,
- VkDebugReportObjectTypeEXT object_type, uint64_t object_handle,
- const char *func_name) const {
- bool skip = false;
- if (info.type == VK_ACCELERATION_STRUCTURE_TYPE_TOP_LEVEL_NV && info.geometryCount != 0) {
- skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, object_type, object_handle,
- "VUID-VkAccelerationStructureInfoNV-type-02425",
- "VkAccelerationStructureInfoNV: If type is VK_ACCELERATION_STRUCTURE_TYPE_TOP_LEVEL_NV then "
- "geometryCount must be 0.");
- }
- if (info.type == VK_ACCELERATION_STRUCTURE_TYPE_BOTTOM_LEVEL_NV && info.instanceCount != 0) {
- skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, object_type, object_handle,
- "VUID-VkAccelerationStructureInfoNV-type-02426",
- "VkAccelerationStructureInfoNV: If type is VK_ACCELERATION_STRUCTURE_TYPE_BOTTOM_LEVEL_NV then "
- "instanceCount must be 0.");
- }
- if (info.flags & VK_BUILD_ACCELERATION_STRUCTURE_PREFER_FAST_TRACE_BIT_NV &&
- info.flags & VK_BUILD_ACCELERATION_STRUCTURE_PREFER_FAST_BUILD_BIT_NV) {
- skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, object_type, object_handle,
- "VUID-VkAccelerationStructureInfoNV-flags-02592",
- "VkAccelerationStructureInfoNV: If flags has the VK_BUILD_ACCELERATION_STRUCTURE_PREFER_FAST_TRACE_BIT_NV"
- "bit set, then it must not have the VK_BUILD_ACCELERATION_STRUCTURE_PREFER_FAST_BUILD_BIT_NV bit set.");
- }
- if (info.geometryCount > phys_dev_ext_props.ray_tracing_props.maxGeometryCount) {
- skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, object_type, object_handle,
- "VUID-VkAccelerationStructureInfoNV-geometryCount-02422",
- "VkAccelerationStructureInfoNV: geometryCount must be less than or equal to "
- "VkPhysicalDeviceRayTracingPropertiesNV::maxGeometryCount.");
- }
- if (info.instanceCount > phys_dev_ext_props.ray_tracing_props.maxInstanceCount) {
- skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, object_type, object_handle,
- "VUID-VkAccelerationStructureInfoNV-instanceCount-02423",
- "VkAccelerationStructureInfoNV: instanceCount must be less than or equal to "
- "VkPhysicalDeviceRayTracingPropertiesNV::maxInstanceCount.");
- }
- if (info.type == VK_ACCELERATION_STRUCTURE_TYPE_BOTTOM_LEVEL_NV && info.geometryCount > 0) {
- uint64_t total_triangle_count = 0;
- for (uint32_t i = 0; i < info.geometryCount; i++) {
- const VkGeometryNV &geometry = info.pGeometries[i];
-
- skip |= ValidateGeometryNV(geometry, object_type, object_handle, func_name);
-
- if (geometry.geometryType != VK_GEOMETRY_TYPE_TRIANGLES_NV) {
- continue;
- }
- total_triangle_count += geometry.geometry.triangles.indexCount / 3;
- }
- if (total_triangle_count > phys_dev_ext_props.ray_tracing_props.maxTriangleCount) {
- skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, object_type, object_handle,
- "VUID-VkAccelerationStructureInfoNV-maxTriangleCount-02424",
- "VkAccelerationStructureInfoNV: The total number of triangles in all geometries must be less than "
- "or equal to VkPhysicalDeviceRayTracingPropertiesNV::maxTriangleCount.");
- }
- }
- if (info.type == VK_ACCELERATION_STRUCTURE_TYPE_BOTTOM_LEVEL_NV && info.geometryCount > 1) {
- const VkGeometryTypeNV first_geometry_type = info.pGeometries[0].geometryType;
- for (uint32_t i = 1; i < info.geometryCount; i++) {
- const VkGeometryNV &geometry = info.pGeometries[i];
- if (geometry.geometryType != first_geometry_type) {
- // TODO: update fake VUID below with the real one once it is generated.
- skip |=
- log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_ACCELERATION_STRUCTURE_NV_EXT,
- 0, "UNASSIGNED-VkAccelerationStructureInfoNV-pGeometries-XXXX",
- "VkAccelerationStructureInfoNV: info.pGeometries[%d].geometryType does not match "
- "info.pGeometries[0].geometryType.",
- i);
- }
- }
- }
- return skip;
-}
-
-bool StatelessValidation::manual_PreCallValidateCreateAccelerationStructureNV(
- VkDevice device, const VkAccelerationStructureCreateInfoNV *pCreateInfo, const VkAllocationCallbacks *pAllocator,
- VkAccelerationStructureNV *pAccelerationStructure) {
- bool skip = false;
-
- if (pCreateInfo) {
- if ((pCreateInfo->compactedSize != 0) &&
- ((pCreateInfo->info.geometryCount != 0) || (pCreateInfo->info.instanceCount != 0))) {
- skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
- "VUID-VkAccelerationStructureCreateInfoNV-compactedSize-02421",
- "vkCreateAccelerationStructureNV(): pCreateInfo->compactedSize nonzero (%" PRIu64
- ") with info.geometryCount (%" PRIu32 ") or info.instanceCount (%" PRIu32 ") nonzero.",
- pCreateInfo->compactedSize, pCreateInfo->info.geometryCount, pCreateInfo->info.instanceCount);
- }
-
- skip |= ValidateAccelerationStructureInfoNV(pCreateInfo->info, VK_DEBUG_REPORT_OBJECT_TYPE_ACCELERATION_STRUCTURE_NV_EXT, 0,
- "vkCreateAccelerationStructureNV()");
- }
-
- return skip;
-}
-
-bool StatelessValidation::manual_PreCallValidateCmdBuildAccelerationStructureNV(
- VkCommandBuffer commandBuffer, const VkAccelerationStructureInfoNV *pInfo, VkBuffer instanceData, VkDeviceSize instanceOffset,
- VkBool32 update, VkAccelerationStructureNV dst, VkAccelerationStructureNV src, VkBuffer scratch, VkDeviceSize scratchOffset) {
- bool skip = false;
-
- if (pInfo != nullptr) {
- skip |= ValidateAccelerationStructureInfoNV(*pInfo, VK_DEBUG_REPORT_OBJECT_TYPE_ACCELERATION_STRUCTURE_NV_EXT,
- HandleToUint64(dst), "vkCmdBuildAccelerationStructureNV()");
- }
-
- return skip;
-}
-
-bool StatelessValidation::manual_PreCallValidateGetAccelerationStructureHandleNV(VkDevice device,
- VkAccelerationStructureNV accelerationStructure,
- size_t dataSize, void *pData) {
- bool skip = false;
- if (dataSize < 8) {
- skip = log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_ACCELERATION_STRUCTURE_NV_EXT,
- HandleToUint64(accelerationStructure), "VUID-vkGetAccelerationStructureHandleNV-dataSize-02240",
- "vkGetAccelerationStructureHandleNV(): dataSize must be greater than or equal to 8.");
- }
- return skip;
-}
-
-bool StatelessValidation::manual_PreCallValidateCreateRayTracingPipelinesNV(VkDevice device, VkPipelineCache pipelineCache,
- uint32_t createInfoCount,
- const VkRayTracingPipelineCreateInfoNV *pCreateInfos,
- const VkAllocationCallbacks *pAllocator,
- VkPipeline *pPipelines) {
- bool skip = false;
-
- for (uint32_t i = 0; i < createInfoCount; i++) {
- auto feedback_struct = lvl_find_in_chain<VkPipelineCreationFeedbackCreateInfoEXT>(pCreateInfos[i].pNext);
- if ((feedback_struct != nullptr) && (feedback_struct->pipelineStageCreationFeedbackCount != pCreateInfos[i].stageCount)) {
- skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_EXT, VK_NULL_HANDLE,
- "VUID-VkPipelineCreationFeedbackCreateInfoEXT-pipelineStageCreationFeedbackCount-02670",
- "vkCreateRayTracingPipelinesNV(): in pCreateInfo[%" PRIu32
- "], VkPipelineCreationFeedbackEXT::pipelineStageCreationFeedbackCount"
- "(=%" PRIu32 ") must equal VkRayTracingPipelineCreateInfoNV::stageCount(=%" PRIu32 ").",
- i, feedback_struct->pipelineStageCreationFeedbackCount, pCreateInfos[i].stageCount);
- }
- }
-
- return skip;
-}
-
-#ifdef VK_USE_PLATFORM_WIN32_KHR
-bool StatelessValidation::PreCallValidateGetDeviceGroupSurfacePresentModes2EXT(VkDevice device,
- const VkPhysicalDeviceSurfaceInfo2KHR *pSurfaceInfo,
- VkDeviceGroupPresentModeFlagsKHR *pModes) {
- bool skip = false;
- if (!device_extensions.vk_khr_swapchain)
- skip |= OutputExtensionError("vkGetDeviceGroupSurfacePresentModes2EXT", VK_KHR_SWAPCHAIN_EXTENSION_NAME);
- if (!device_extensions.vk_khr_get_surface_capabilities_2)
- skip |= OutputExtensionError("vkGetDeviceGroupSurfacePresentModes2EXT", VK_KHR_GET_SURFACE_CAPABILITIES_2_EXTENSION_NAME);
- if (!device_extensions.vk_khr_surface)
- skip |= OutputExtensionError("vkGetDeviceGroupSurfacePresentModes2EXT", VK_KHR_SURFACE_EXTENSION_NAME);
- if (!device_extensions.vk_khr_get_physical_device_properties_2)
- skip |=
- OutputExtensionError("vkGetDeviceGroupSurfacePresentModes2EXT", VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME);
- if (!device_extensions.vk_ext_full_screen_exclusive)
- skip |= OutputExtensionError("vkGetDeviceGroupSurfacePresentModes2EXT", VK_EXT_FULL_SCREEN_EXCLUSIVE_EXTENSION_NAME);
- skip |= validate_struct_type(
- "vkGetDeviceGroupSurfacePresentModes2EXT", "pSurfaceInfo", "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SURFACE_INFO_2_KHR",
- pSurfaceInfo, VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SURFACE_INFO_2_KHR, true,
- "VUID-vkGetDeviceGroupSurfacePresentModes2EXT-pSurfaceInfo-parameter", "VUID-VkPhysicalDeviceSurfaceInfo2KHR-sType-sType");
- if (pSurfaceInfo != NULL) {
- const VkStructureType allowed_structs_VkPhysicalDeviceSurfaceInfo2KHR[] = {
- VK_STRUCTURE_TYPE_SURFACE_FULL_SCREEN_EXCLUSIVE_INFO_EXT,
- VK_STRUCTURE_TYPE_SURFACE_FULL_SCREEN_EXCLUSIVE_WIN32_INFO_EXT};
-
- skip |= validate_struct_pnext("vkGetDeviceGroupSurfacePresentModes2EXT", "pSurfaceInfo->pNext",
- "VkSurfaceFullScreenExclusiveInfoEXT, VkSurfaceFullScreenExclusiveWin32InfoEXT",
- pSurfaceInfo->pNext, ARRAY_SIZE(allowed_structs_VkPhysicalDeviceSurfaceInfo2KHR),
- allowed_structs_VkPhysicalDeviceSurfaceInfo2KHR, GeneratedVulkanHeaderVersion,
- "VUID-VkPhysicalDeviceSurfaceInfo2KHR-pNext-pNext");
-
- skip |= validate_required_handle("vkGetDeviceGroupSurfacePresentModes2EXT", "pSurfaceInfo->surface", pSurfaceInfo->surface);
- }
- return skip;
-}
-#endif
-
-bool StatelessValidation::manual_PreCallValidateCreateFramebuffer(VkDevice device, const VkFramebufferCreateInfo *pCreateInfo,
- const VkAllocationCallbacks *pAllocator,
- VkFramebuffer *pFramebuffer) {
- // Validation for pAttachments which is excluded from the generated validation code due to a 'noautovalidity' tag in vk.xml
- bool skip = false;
- if ((pCreateInfo->flags & VK_FRAMEBUFFER_CREATE_IMAGELESS_BIT_KHR) == 0) {
- skip |= validate_array("vkCreateFramebuffer", "attachmentCount", "pAttachments", pCreateInfo->attachmentCount,
- &pCreateInfo->pAttachments, false, true, kVUIDUndefined, kVUIDUndefined);
- }
- return skip;
-}
-
-bool StatelessValidation::manual_PreCallValidateCmdSetLineStippleEXT(VkCommandBuffer commandBuffer, uint32_t lineStippleFactor,
- uint16_t lineStipplePattern) {
- bool skip = false;
-
- if (lineStippleFactor < 1 || lineStippleFactor > 256) {
- skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
- HandleToUint64(commandBuffer), "VUID-vkCmdSetLineStippleEXT-lineStippleFactor-02776",
- "vkCmdSetLineStippleEXT::lineStippleFactor=%d is not in [1,256].", lineStippleFactor);
- }
-
- return skip;
-}
-
-bool StatelessValidation::manual_PreCallValidateCmdBindIndexBuffer(VkCommandBuffer commandBuffer, VkBuffer buffer,
- VkDeviceSize offset, VkIndexType indexType) {
- bool skip = false;
-
- if (indexType == VK_INDEX_TYPE_NONE_NV) {
- skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
- HandleToUint64(commandBuffer), "VUID-vkCmdBindIndexBuffer-indexType-02507",
- "vkCmdBindIndexBuffer() indexType must not be VK_INDEX_TYPE_NONE_NV.");
- }
-
- const auto *index_type_uint8_features =
- lvl_find_in_chain<VkPhysicalDeviceIndexTypeUint8FeaturesEXT>(physical_device_features2.pNext);
- if (indexType == VK_INDEX_TYPE_UINT8_EXT && !index_type_uint8_features->indexTypeUint8) {
- skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
- HandleToUint64(commandBuffer), "VUID-vkCmdBindIndexBuffer-indexType-02765",
- "vkCmdBindIndexBuffer() indexType is VK_INDEX_TYPE_UINT8_EXT but indexTypeUint8 feature is not enabled.");
- }
-
- return skip;
-}
diff --git a/layers/shader_validation.cpp b/layers/shader_validation.cpp
index 8f02a8de6..910f97835 100644
--- a/layers/shader_validation.cpp
+++ b/layers/shader_validation.cpp
@@ -38,50 +38,6 @@
#include "spirv-tools/libspirv.h"
#include "xxhash.h"
-void decoration_set::add(uint32_t decoration, uint32_t value) {
- switch (decoration) {
- case spv::DecorationLocation:
- flags |= location_bit;
- location = value;
- break;
- case spv::DecorationPatch:
- flags |= patch_bit;
- break;
- case spv::DecorationRelaxedPrecision:
- flags |= relaxed_precision_bit;
- break;
- case spv::DecorationBlock:
- flags |= block_bit;
- break;
- case spv::DecorationBufferBlock:
- flags |= buffer_block_bit;
- break;
- case spv::DecorationComponent:
- flags |= component_bit;
- component = value;
- break;
- case spv::DecorationInputAttachmentIndex:
- flags |= input_attachment_index_bit;
- input_attachment_index = value;
- break;
- case spv::DecorationDescriptorSet:
- flags |= descriptor_set_bit;
- descriptor_set = value;
- break;
- case spv::DecorationBinding:
- flags |= binding_bit;
- binding = value;
- break;
- case spv::DecorationNonWritable:
- flags |= nonwritable_bit;
- break;
- case spv::DecorationBuiltIn:
- flags |= builtin_bit;
- builtin = value;
- break;
- }
-}
-
enum FORMAT_TYPE {
FORMAT_TYPE_FLOAT = 1, // UNORM, SNORM, FLOAT, USCALED, SSCALED, SRGB -- anything we consider float in the shader
FORMAT_TYPE_SINT = 2,
@@ -90,25 +46,29 @@ enum FORMAT_TYPE {
typedef std::pair<unsigned, unsigned> location_t;
+struct interface_var {
+ uint32_t id;
+ uint32_t type_id;
+ uint32_t offset;
+ bool is_patch;
+ bool is_block_member;
+ bool is_relaxed_precision;
+ // TODO: collect the name, too? Isn't required to be present.
+};
+
struct shader_stage_attributes {
char const *const name;
bool arrayed_input;
bool arrayed_output;
- VkShaderStageFlags stage;
};
static shader_stage_attributes shader_stage_attribs[] = {
- {"vertex shader", false, false, VK_SHADER_STAGE_VERTEX_BIT},
- {"tessellation control shader", true, true, VK_SHADER_STAGE_TESSELLATION_CONTROL_BIT},
- {"tessellation evaluation shader", true, false, VK_SHADER_STAGE_TESSELLATION_EVALUATION_BIT},
- {"geometry shader", true, false, VK_SHADER_STAGE_GEOMETRY_BIT},
- {"fragment shader", false, false, VK_SHADER_STAGE_FRAGMENT_BIT},
+ {"vertex shader", false, false}, {"tessellation control shader", true, true}, {"tessellation evaluation shader", true, false},
+ {"geometry shader", true, false}, {"fragment shader", false, false},
};
-unsigned ExecutionModelToShaderStageFlagBits(unsigned mode);
-
// SPIRV utility functions
-void SHADER_MODULE_STATE::BuildDefIndex() {
+void shader_module::BuildDefIndex() {
for (auto insn : *this) {
switch (insn.opcode()) {
// Types
@@ -133,7 +93,6 @@ void SHADER_MODULE_STATE::BuildDefIndex() {
case spv::OpTypeQueue:
case spv::OpTypePipe:
case spv::OpTypeAccelerationStructureNV:
- case spv::OpTypeCooperativeMatrixNV:
def_index[insn.word(1)] = insn.offset();
break;
@@ -166,26 +125,6 @@ void SHADER_MODULE_STATE::BuildDefIndex() {
def_index[insn.word(2)] = insn.offset();
break;
- // Decorations
- case spv::OpDecorate: {
- auto targetId = insn.word(1);
- decorations[targetId].add(insn.word(2), insn.len() > 3u ? insn.word(3) : 0u);
- } break;
- case spv::OpGroupDecorate: {
- auto const &src = decorations[insn.word(1)];
- for (auto i = 2u; i < insn.len(); i++) decorations[insn.word(i)].merge(src);
- } break;
-
- // Entry points ... add to the entrypoint table
- case spv::OpEntryPoint: {
- // Entry points do not have an id (the id is the function id) and thus need their own table
- auto entrypoint_name = (char const *)&insn.word(3);
- auto execution_model = insn.word(1);
- auto entrypoint_stage = ExecutionModelToShaderStageFlagBits(execution_model);
- entry_points.emplace(entrypoint_name, EntryPoint{insn.offset(), entrypoint_stage});
- break;
- }
-
default:
// We don't care about any other defs for now.
break;
@@ -228,13 +167,19 @@ unsigned ExecutionModelToShaderStageFlagBits(unsigned mode) {
}
}
-static spirv_inst_iter FindEntrypoint(SHADER_MODULE_STATE const *src, char const *name, VkShaderStageFlagBits stageBits) {
- auto range = src->entry_points.equal_range(name);
- for (auto it = range.first; it != range.second; ++it) {
- if (it->second.stage == stageBits) {
- return src->at(it->second.offset);
+static spirv_inst_iter FindEntrypoint(shader_module const *src, char const *name, VkShaderStageFlagBits stageBits) {
+ for (auto insn : *src) {
+ if (insn.opcode() == spv::OpEntryPoint) {
+ auto entrypointName = (char const *)&insn.word(3);
+ auto executionModel = insn.word(1);
+ auto entrypointStageBits = ExecutionModelToShaderStageFlagBits(executionModel);
+
+ if (!strcmp(entrypointName, name) && (entrypointStageBits & stageBits)) {
+ return insn;
+ }
}
}
+
return src->end();
}
@@ -272,7 +217,7 @@ static char const *StorageClassName(unsigned sc) {
}
// Get the value of an integral constant
-unsigned GetConstantValue(SHADER_MODULE_STATE const *src, unsigned id) {
+unsigned GetConstantValue(shader_module const *src, unsigned id) {
auto value = src->get_def(id);
assert(value != src->end());
@@ -285,7 +230,7 @@ unsigned GetConstantValue(SHADER_MODULE_STATE const *src, unsigned id) {
return value.word(3);
}
-static void DescribeTypeInner(std::ostringstream &ss, SHADER_MODULE_STATE const *src, unsigned type) {
+static void DescribeTypeInner(std::ostringstream &ss, shader_module const *src, unsigned type) {
auto insn = src->get_def(type);
assert(insn != src->end());
@@ -350,7 +295,7 @@ static void DescribeTypeInner(std::ostringstream &ss, SHADER_MODULE_STATE const
}
}
-static std::string DescribeType(SHADER_MODULE_STATE const *src, unsigned type) {
+static std::string DescribeType(shader_module const *src, unsigned type) {
std::ostringstream ss;
DescribeTypeInner(ss, src, type);
return ss.str();
@@ -361,7 +306,7 @@ static bool IsNarrowNumericType(spirv_inst_iter type) {
return type.word(2) < 64;
}
-static bool TypesMatch(SHADER_MODULE_STATE const *a, SHADER_MODULE_STATE const *b, unsigned a_type, unsigned b_type, bool a_arrayed,
+static bool TypesMatch(shader_module const *a, shader_module const *b, unsigned a_type, unsigned b_type, bool a_arrayed,
bool b_arrayed, bool relaxed) {
// Walk two type trees together, and complain about differences
auto a_insn = a->get_def(a_type);
@@ -453,7 +398,7 @@ static unsigned ValueOrDefault(std::unordered_map<unsigned, unsigned> const &map
return it->second;
}
-static unsigned GetLocationsConsumedByType(SHADER_MODULE_STATE const *src, unsigned type, bool strip_array_level) {
+static unsigned GetLocationsConsumedByType(shader_module const *src, unsigned type, bool strip_array_level) {
auto insn = src->get_def(type);
assert(insn != src->end());
@@ -487,7 +432,7 @@ static unsigned GetLocationsConsumedByType(SHADER_MODULE_STATE const *src, unsig
}
}
-static unsigned GetComponentsConsumedByType(SHADER_MODULE_STATE const *src, unsigned type, bool strip_array_level) {
+static unsigned GetComponentsConsumedByType(shader_module const *src, unsigned type, bool strip_array_level) {
auto insn = src->get_def(type);
assert(insn != src->end());
@@ -503,12 +448,13 @@ static unsigned GetComponentsConsumedByType(SHADER_MODULE_STATE const *src, unsi
}
return sum;
}
- case spv::OpTypeArray:
- if (strip_array_level) {
- return GetComponentsConsumedByType(src, insn.word(2), false);
- } else {
- return GetConstantValue(src, insn.word(3)) * GetComponentsConsumedByType(src, insn.word(2), false);
+ case spv::OpTypeArray: {
+ uint32_t sum = 0;
+ for (uint32_t i = 2; i < insn.len(); i++) {
+ sum += GetComponentsConsumedByType(src, insn.word(i), false);
}
+ return sum;
+ }
case spv::OpTypeMatrix:
// Num locations is the dimension * element size
return insn.word(3) * GetComponentsConsumedByType(src, insn.word(2), false);
@@ -559,7 +505,7 @@ static unsigned GetFormatType(VkFormat fmt) {
// characterizes a SPIR-V type appearing in an interface to a FF stage, for comparison to a VkFormat's characterization above.
// also used for input attachments, as we statically know their format.
-static unsigned GetFundamentalType(SHADER_MODULE_STATE const *src, unsigned type) {
+static unsigned GetFundamentalType(shader_module const *src, unsigned type) {
auto insn = src->get_def(type);
assert(insn != src->end());
@@ -587,7 +533,7 @@ static uint32_t GetShaderStageId(VkShaderStageFlagBits stage) {
return bit_pos - 1;
}
-static spirv_inst_iter GetStructType(SHADER_MODULE_STATE const *src, spirv_inst_iter def, bool is_array_of_verts) {
+static spirv_inst_iter GetStructType(shader_module const *src, spirv_inst_iter def, bool is_array_of_verts) {
while (true) {
if (def.opcode() == spv::OpTypePointer) {
def = src->get_def(def.word(3));
@@ -602,12 +548,12 @@ static spirv_inst_iter GetStructType(SHADER_MODULE_STATE const *src, spirv_inst_
}
}
-static bool CollectInterfaceBlockMembers(SHADER_MODULE_STATE const *src, std::map<location_t, interface_var> *out,
- bool is_array_of_verts, uint32_t id, uint32_t type_id, bool is_patch,
- int /*first_location*/) {
+static bool CollectInterfaceBlockMembers(shader_module const *src, std::map<location_t, interface_var> *out,
+ std::unordered_map<unsigned, unsigned> const &blocks, bool is_array_of_verts, uint32_t id,
+ uint32_t type_id, bool is_patch, int /*first_location*/) {
// Walk down the type_id presented, trying to determine whether it's actually an interface block.
auto type = GetStructType(src, src->get_def(type_id), is_array_of_verts && !is_patch);
- if (type == src->end() || !(src->get_decorations(type.word(1)).flags & decoration_set::block_bit)) {
+ if (type == src->end() || blocks.find(type.word(1)) == blocks.end()) {
// This isn't an interface block.
return false;
}
@@ -670,10 +616,48 @@ static bool CollectInterfaceBlockMembers(SHADER_MODULE_STATE const *src, std::ma
return true;
}
-static std::vector<uint32_t> FindEntrypointInterfaces(spirv_inst_iter entrypoint) {
- assert(entrypoint.opcode() == spv::OpEntryPoint);
+static std::map<location_t, interface_var> CollectInterfaceByLocation(shader_module const *src, spirv_inst_iter entrypoint,
+ spv::StorageClass sinterface, bool is_array_of_verts) {
+ std::unordered_map<unsigned, unsigned> var_locations;
+ std::unordered_map<unsigned, unsigned> var_builtins;
+ std::unordered_map<unsigned, unsigned> var_components;
+ std::unordered_map<unsigned, unsigned> blocks;
+ std::unordered_map<unsigned, unsigned> var_patch;
+ std::unordered_map<unsigned, unsigned> var_relaxed_precision;
+
+ for (auto insn : *src) {
+ // We consider two interface models: SSO rendezvous-by-location, and builtins. Complain about anything that
+ // fits neither model.
+ if (insn.opcode() == spv::OpDecorate) {
+ if (insn.word(2) == spv::DecorationLocation) {
+ var_locations[insn.word(1)] = insn.word(3);
+ }
+
+ if (insn.word(2) == spv::DecorationBuiltIn) {
+ var_builtins[insn.word(1)] = insn.word(3);
+ }
+
+ if (insn.word(2) == spv::DecorationComponent) {
+ var_components[insn.word(1)] = insn.word(3);
+ }
+
+ if (insn.word(2) == spv::DecorationBlock) {
+ blocks[insn.word(1)] = 1;
+ }
+
+ if (insn.word(2) == spv::DecorationPatch) {
+ var_patch[insn.word(1)] = 1;
+ }
+
+ if (insn.word(2) == spv::DecorationRelaxedPrecision) {
+ var_relaxed_precision[insn.word(1)] = 1;
+ }
+ }
+ }
+
+ // TODO: handle grouped decorations
+ // TODO: handle index=1 dual source outputs from FS -- two vars will have the same location, and we DON'T want to clobber.
- std::vector<uint32_t> interfaces;
// Find the end of the entrypoint's name string. additional zero bytes follow the actual null terminator, to fill out the
// rest of the word - so we only need to look at the last byte in the word to determine which word contains the terminator.
uint32_t word = 3;
@@ -682,36 +666,26 @@ static std::vector<uint32_t> FindEntrypointInterfaces(spirv_inst_iter entrypoint
}
++word;
- for (; word < entrypoint.len(); word++) interfaces.push_back(entrypoint.word(word));
-
- return interfaces;
-}
-
-static std::map<location_t, interface_var> CollectInterfaceByLocation(SHADER_MODULE_STATE const *src, spirv_inst_iter entrypoint,
- spv::StorageClass sinterface, bool is_array_of_verts) {
- // TODO: handle index=1 dual source outputs from FS -- two vars will have the same location, and we DON'T want to clobber.
-
std::map<location_t, interface_var> out;
- for (uint32_t iid : FindEntrypointInterfaces(entrypoint)) {
- auto insn = src->get_def(iid);
+ for (; word < entrypoint.len(); word++) {
+ auto insn = src->get_def(entrypoint.word(word));
assert(insn != src->end());
assert(insn.opcode() == spv::OpVariable);
if (insn.word(3) == static_cast<uint32_t>(sinterface)) {
- auto d = src->get_decorations(iid);
unsigned id = insn.word(2);
unsigned type = insn.word(1);
- int location = d.location;
- int builtin = d.builtin;
- unsigned component = d.component;
- bool is_patch = (d.flags & decoration_set::patch_bit) != 0;
- bool is_relaxed_precision = (d.flags & decoration_set::relaxed_precision_bit) != 0;
+ int location = ValueOrDefault(var_locations, id, static_cast<unsigned>(-1));
+ int builtin = ValueOrDefault(var_builtins, id, static_cast<unsigned>(-1));
+ unsigned component = ValueOrDefault(var_components, id, 0); // Unspecified is OK, is 0
+ bool is_patch = var_patch.find(id) != var_patch.end();
+ bool is_relaxed_precision = var_relaxed_precision.find(id) != var_relaxed_precision.end();
if (builtin != -1)
continue;
- else if (!CollectInterfaceBlockMembers(src, &out, is_array_of_verts, id, type, is_patch, location)) {
+ else if (!CollectInterfaceBlockMembers(src, &out, blocks, is_array_of_verts, id, type, is_patch, location)) {
// A user-defined interface variable, with a location. Where a variable occupied multiple locations, emit
// one result for each.
unsigned num_locations = GetLocationsConsumedByType(src, type, is_array_of_verts && !is_patch);
@@ -731,88 +705,8 @@ static std::map<location_t, interface_var> CollectInterfaceByLocation(SHADER_MOD
return out;
}
-static std::vector<uint32_t> CollectBuiltinBlockMembers(SHADER_MODULE_STATE const *src, spirv_inst_iter entrypoint,
- uint32_t storageClass) {
- std::vector<uint32_t> variables;
- std::vector<uint32_t> builtinStructMembers;
- std::vector<uint32_t> builtinDecorations;
-
- for (auto insn : *src) {
- switch (insn.opcode()) {
- // Find all built-in member decorations
- case spv::OpMemberDecorate:
- if (insn.word(3) == spv::DecorationBuiltIn) {
- builtinStructMembers.push_back(insn.word(1));
- }
- break;
- // Find all built-in decorations
- case spv::OpDecorate:
- switch (insn.word(2)) {
- case spv::DecorationBlock: {
- uint32_t blockID = insn.word(1);
- for (auto builtInBlockID : builtinStructMembers) {
- // Check if one of the members of the block are built-in -> the block is built-in
- if (blockID == builtInBlockID) {
- builtinDecorations.push_back(blockID);
- break;
- }
- }
- break;
- }
- case spv::DecorationBuiltIn:
- builtinDecorations.push_back(insn.word(1));
- break;
- default:
- break;
- }
- break;
- default:
- break;
- }
- }
-
- // Find all interface variables belonging to the entrypoint and matching the storage class
- for (uint32_t id : FindEntrypointInterfaces(entrypoint)) {
- auto def = src->get_def(id);
- assert(def != src->end());
- assert(def.opcode() == spv::OpVariable);
-
- if (def.word(3) == storageClass) variables.push_back(def.word(1));
- }
-
- // Find all members belonging to the builtin block selected
- std::vector<uint32_t> builtinBlockMembers;
- for (auto &var : variables) {
- auto def = src->get_def(src->get_def(var).word(3));
-
- // It could be an array of IO blocks. The element type should be the struct defining the block contents
- if (def.opcode() == spv::OpTypeArray) def = src->get_def(def.word(2));
-
- // Now find all members belonging to the struct defining the IO block
- if (def.opcode() == spv::OpTypeStruct) {
- for (auto builtInID : builtinDecorations) {
- if (builtInID == def.word(1)) {
- for (int i = 2; i < (int)def.len(); i++)
- builtinBlockMembers.push_back(spv::BuiltInMax); // Start with undefined builtin for each struct member.
- // These shouldn't be left after replacing.
- for (auto insn : *src) {
- if (insn.opcode() == spv::OpMemberDecorate && insn.word(1) == builtInID &&
- insn.word(3) == spv::DecorationBuiltIn) {
- auto structIndex = insn.word(2);
- assert(structIndex < builtinBlockMembers.size());
- builtinBlockMembers[structIndex] = insn.word(4);
- }
- }
- }
- }
- }
- }
-
- return builtinBlockMembers;
-}
-
static std::vector<std::pair<uint32_t, interface_var>> CollectInterfaceByInputAttachmentIndex(
- SHADER_MODULE_STATE const *src, std::unordered_set<uint32_t> const &accessible_ids) {
+ shader_module const *src, std::unordered_set<uint32_t> const &accessible_ids) {
std::vector<std::pair<uint32_t, interface_var>> out;
for (auto insn : *src) {
@@ -843,7 +737,7 @@ static std::vector<std::pair<uint32_t, interface_var>> CollectInterfaceByInputAt
return out;
}
-static bool IsWritableDescriptorType(SHADER_MODULE_STATE const *module, uint32_t type_id, bool is_storage_buffer) {
+static bool IsWritableDescriptorType(shader_module const *module, uint32_t type_id, bool is_storage_buffer) {
auto type = module->get_def(type_id);
// Strip off any array or ptrs. Where we remove array levels, adjust the descriptor count for each dimension.
@@ -864,10 +758,15 @@ static bool IsWritableDescriptorType(SHADER_MODULE_STATE const *module, uint32_t
case spv::OpTypeStruct: {
std::unordered_set<unsigned> nonwritable_members;
- if (module->get_decorations(type.word(1)).flags & decoration_set::buffer_block_bit) is_storage_buffer = true;
for (auto insn : *module) {
- if (insn.opcode() == spv::OpMemberDecorate && insn.word(1) == type.word(1) &&
- insn.word(3) == spv::DecorationNonWritable) {
+ if (insn.opcode() == spv::OpDecorate && insn.word(1) == type.word(1)) {
+ if (insn.word(2) == spv::DecorationBufferBlock) {
+ // Legacy storage block in the Uniform storage class
+ // has its struct type decorated with BufferBlock.
+ is_storage_buffer = true;
+ }
+ } else if (insn.opcode() == spv::OpMemberDecorate && insn.word(1) == type.word(1) &&
+ insn.word(3) == spv::DecorationNonWritable) {
nonwritable_members.insert(insn.word(2));
}
}
@@ -882,8 +781,32 @@ static bool IsWritableDescriptorType(SHADER_MODULE_STATE const *module, uint32_t
}
static std::vector<std::pair<descriptor_slot_t, interface_var>> CollectInterfaceByDescriptorSlot(
- debug_report_data const *report_data, SHADER_MODULE_STATE const *src, std::unordered_set<uint32_t> const &accessible_ids,
+ debug_report_data const *report_data, shader_module const *src, std::unordered_set<uint32_t> const &accessible_ids,
bool *has_writable_descriptor) {
+ std::unordered_map<unsigned, unsigned> var_sets;
+ std::unordered_map<unsigned, unsigned> var_bindings;
+ std::unordered_map<unsigned, unsigned> var_nonwritable;
+
+ for (auto insn : *src) {
+ // All variables in the Uniform or UniformConstant storage classes are required to be decorated with both
+ // DecorationDescriptorSet and DecorationBinding.
+ if (insn.opcode() == spv::OpDecorate) {
+ if (insn.word(2) == spv::DecorationDescriptorSet) {
+ var_sets[insn.word(1)] = insn.word(3);
+ }
+
+ if (insn.word(2) == spv::DecorationBinding) {
+ var_bindings[insn.word(1)] = insn.word(3);
+ }
+
+ // Note: do toplevel DecorationNonWritable out here; it applies to
+ // the OpVariable rather than the type.
+ if (insn.word(2) == spv::DecorationNonWritable) {
+ var_nonwritable[insn.word(1)] = 1;
+ }
+ }
+ }
+
std::vector<std::pair<descriptor_slot_t, interface_var>> out;
for (auto id : accessible_ids) {
@@ -893,16 +816,15 @@ static std::vector<std::pair<descriptor_slot_t, interface_var>> CollectInterface
if (insn.opcode() == spv::OpVariable &&
(insn.word(3) == spv::StorageClassUniform || insn.word(3) == spv::StorageClassUniformConstant ||
insn.word(3) == spv::StorageClassStorageBuffer)) {
- auto d = src->get_decorations(insn.word(2));
- unsigned set = d.descriptor_set;
- unsigned binding = d.binding;
+ unsigned set = ValueOrDefault(var_sets, insn.word(2), 0);
+ unsigned binding = ValueOrDefault(var_bindings, insn.word(2), 0);
interface_var v = {};
v.id = insn.word(2);
v.type_id = insn.word(1);
out.emplace_back(std::make_pair(set, binding), v);
- if (!(d.flags & decoration_set::nonwritable_bit) &&
+ if (var_nonwritable.find(id) == var_nonwritable.end() &&
IsWritableDescriptorType(src, insn.word(1), insn.word(3) == spv::StorageClassStorageBuffer)) {
*has_writable_descriptor = true;
}
@@ -935,7 +857,7 @@ static bool ValidateViConsistency(debug_report_data const *report_data, VkPipeli
}
static bool ValidateViAgainstVsInputs(debug_report_data const *report_data, VkPipelineVertexInputStateCreateInfo const *vi,
- SHADER_MODULE_STATE const *vs, spirv_inst_iter entrypoint) {
+ shader_module const *vs, spirv_inst_iter entrypoint) {
bool skip = false;
auto inputs = CollectInterfaceByLocation(vs, entrypoint, spv::StorageClassInput, false);
@@ -996,7 +918,7 @@ static bool ValidateViAgainstVsInputs(debug_report_data const *report_data, VkPi
return skip;
}
-static bool ValidateFsOutputsAgainstRenderPass(debug_report_data const *report_data, SHADER_MODULE_STATE const *fs,
+static bool ValidateFsOutputsAgainstRenderPass(debug_report_data const *report_data, shader_module const *fs,
spirv_inst_iter entrypoint, PIPELINE_STATE const *pipeline, uint32_t subpass_index) {
auto rpci = pipeline->rp_state->createInfo.ptr();
@@ -1083,7 +1005,7 @@ static bool ValidateFsOutputsAgainstRenderPass(debug_report_data const *report_d
// For PointSize analysis we need to know if the variable decorated with the PointSize built-in was actually written to.
// This function examines instructions in the static call tree for a write to this variable.
-static bool IsPointSizeWritten(SHADER_MODULE_STATE const *src, spirv_inst_iter builtin_instr, spirv_inst_iter entrypoint) {
+static bool IsPointSizeWritten(shader_module const *src, spirv_inst_iter builtin_instr, spirv_inst_iter entrypoint) {
auto type = builtin_instr.opcode();
uint32_t target_id = builtin_instr.word(1);
bool init_complete = false;
@@ -1164,7 +1086,7 @@ static bool IsPointSizeWritten(SHADER_MODULE_STATE const *src, spirv_inst_iter b
//
// TODO: The set of interesting opcodes here was determined by eyeballing the SPIRV spec. It might be worth
// converting parts of this to be generated from the machine-readable spec instead.
-static std::unordered_set<uint32_t> MarkAccessibleIds(SHADER_MODULE_STATE const *src, spirv_inst_iter entrypoint) {
+static std::unordered_set<uint32_t> MarkAccessibleIds(shader_module const *src, spirv_inst_iter entrypoint) {
std::unordered_set<uint32_t> ids;
std::unordered_set<uint32_t> worklist;
worklist.insert(entrypoint.word(2));
@@ -1277,8 +1199,7 @@ static std::unordered_set<uint32_t> MarkAccessibleIds(SHADER_MODULE_STATE const
static bool ValidatePushConstantBlockAgainstPipeline(debug_report_data const *report_data,
std::vector<VkPushConstantRange> const *push_constant_ranges,
- SHADER_MODULE_STATE const *src, spirv_inst_iter type,
- VkShaderStageFlagBits stage) {
+ shader_module const *src, spirv_inst_iter type, VkShaderStageFlagBits stage) {
bool skip = false;
// Strip off ptrs etc
@@ -1323,7 +1244,7 @@ static bool ValidatePushConstantBlockAgainstPipeline(debug_report_data const *re
}
static bool ValidatePushConstantUsage(debug_report_data const *report_data,
- std::vector<VkPushConstantRange> const *push_constant_ranges, SHADER_MODULE_STATE const *src,
+ std::vector<VkPushConstantRange> const *push_constant_ranges, shader_module const *src,
std::unordered_set<uint32_t> accessible_ids, VkShaderStageFlagBits stage) {
bool skip = false;
@@ -1362,7 +1283,7 @@ static bool ValidateSpecializationOffsets(debug_report_data const *report_data,
}
// TODO (jbolz): Can this return a const reference?
-static std::set<uint32_t> TypeToDescriptorTypeSet(SHADER_MODULE_STATE const *module, uint32_t type_id, unsigned &descriptor_count) {
+static std::set<uint32_t> TypeToDescriptorTypeSet(shader_module const *module, uint32_t type_id, unsigned &descriptor_count) {
auto type = module->get_def(type_id);
bool is_storage_buffer = false;
descriptor_count = 1;
@@ -1475,18 +1396,6 @@ static std::string string_descriptorTypes(const std::set<uint32_t> &descriptor_t
return ss.str();
}
-static bool RequirePropertyFlag(debug_report_data const *report_data, VkBool32 check, char const *flag, char const *structure) {
- if (!check) {
- if (log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
- kVUID_Core_Shader_ExceedDeviceLimit, "Shader requires flag %s set in %s but it is not set on the device", flag,
- structure)) {
- return true;
- }
- }
-
- return false;
-}
-
static bool RequireFeature(debug_report_data const *report_data, VkBool32 feature, char const *feature_name) {
if (!feature) {
if (log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
@@ -1510,9 +1419,13 @@ static bool RequireExtension(debug_report_data const *report_data, bool extensio
return false;
}
-bool CoreChecks::ValidateShaderCapabilities(SHADER_MODULE_STATE const *src, VkShaderStageFlagBits stage) const {
+bool CoreChecks::ValidateShaderCapabilities(layer_data *dev_data, shader_module const *src, VkShaderStageFlagBits stage,
+ bool has_writable_descriptor) {
bool skip = false;
+ auto const &features = GetEnabledFeatures();
+ auto const &extensions = GetDeviceExtensions();
+
struct FeaturePointer {
// Callable object to test if this feature is enabled in the given aggregate feature struct
const std::function<VkBool32(const DeviceFeatures &)> IsEnabled;
@@ -1537,22 +1450,6 @@ bool CoreChecks::ValidateShaderCapabilities(SHADER_MODULE_STATE const *src, VkSh
: IsEnabled([=](const DeviceFeatures &features) { return features.float16_int8.*ptr; }) {}
FeaturePointer(VkBool32 VkPhysicalDeviceScalarBlockLayoutFeaturesEXT::*ptr)
: IsEnabled([=](const DeviceFeatures &features) { return features.scalar_block_layout_features.*ptr; }) {}
- FeaturePointer(VkBool32 VkPhysicalDeviceCooperativeMatrixFeaturesNV::*ptr)
- : IsEnabled([=](const DeviceFeatures &features) { return features.cooperative_matrix_features.*ptr; }) {}
- FeaturePointer(VkBool32 VkPhysicalDeviceFloatControlsPropertiesKHR::*ptr)
- : IsEnabled([=](const DeviceFeatures &features) { return features.float_controls.*ptr; }) {}
- FeaturePointer(VkBool32 VkPhysicalDeviceUniformBufferStandardLayoutFeaturesKHR::*ptr)
- : IsEnabled([=](const DeviceFeatures &features) { return features.uniform_buffer_standard_layout.*ptr; }) {}
- FeaturePointer(VkBool32 VkPhysicalDeviceComputeShaderDerivativesFeaturesNV::*ptr)
- : IsEnabled([=](const DeviceFeatures &features) { return features.compute_shader_derivatives_features.*ptr; }) {}
- FeaturePointer(VkBool32 VkPhysicalDeviceFragmentShaderBarycentricFeaturesNV::*ptr)
- : IsEnabled([=](const DeviceFeatures &features) { return features.fragment_shader_barycentric_features.*ptr; }) {}
- FeaturePointer(VkBool32 VkPhysicalDeviceShaderImageFootprintFeaturesNV::*ptr)
- : IsEnabled([=](const DeviceFeatures &features) { return features.shader_image_footprint_features.*ptr; }) {}
- FeaturePointer(VkBool32 VkPhysicalDeviceFragmentShaderInterlockFeaturesEXT::*ptr)
- : IsEnabled([=](const DeviceFeatures &features) { return features.fragment_shader_interlock_features.*ptr; }) {}
- FeaturePointer(VkBool32 VkPhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT::*ptr)
- : IsEnabled([=](const DeviceFeatures &features) { return features.demote_to_helper_invocation_features.*ptr; }) {}
};
struct CapabilityInfo {
@@ -1571,7 +1468,6 @@ bool CoreChecks::ValidateShaderCapabilities(SHADER_MODULE_STATE const *src, VkSh
{spv::CapabilitySampled1D, {nullptr}},
{spv::CapabilityImage1D, {nullptr}},
{spv::CapabilitySampledBuffer, {nullptr}},
- {spv::CapabilityStorageImageExtendedFormats, {nullptr}},
{spv::CapabilityImageQuery, {nullptr}},
{spv::CapabilityDerivativeControl, {nullptr}},
@@ -1597,6 +1493,7 @@ bool CoreChecks::ValidateShaderCapabilities(SHADER_MODULE_STATE const *src, VkSh
{spv::CapabilityMinLod, {"VkPhysicalDeviceFeatures::shaderResourceMinLod", &VkPhysicalDeviceFeatures::shaderResourceMinLod}},
{spv::CapabilitySampledCubeArray, {"VkPhysicalDeviceFeatures::imageCubeArray", &VkPhysicalDeviceFeatures::imageCubeArray}},
{spv::CapabilityImageMSArray, {"VkPhysicalDeviceFeatures::shaderStorageImageMultisample", &VkPhysicalDeviceFeatures::shaderStorageImageMultisample}},
+ {spv::CapabilityStorageImageExtendedFormats, {"VkPhysicalDeviceFeatures::shaderStorageImageExtendedFormats", &VkPhysicalDeviceFeatures::shaderStorageImageExtendedFormats}},
{spv::CapabilityInterpolationFunction, {"VkPhysicalDeviceFeatures::sampleRateShading", &VkPhysicalDeviceFeatures::sampleRateShading}},
{spv::CapabilityStorageImageReadWithoutFormat, {"VkPhysicalDeviceFeatures::shaderStorageImageReadWithoutFormat", &VkPhysicalDeviceFeatures::shaderStorageImageReadWithoutFormat}},
{spv::CapabilityStorageImageWriteWithoutFormat, {"VkPhysicalDeviceFeatures::shaderStorageImageWriteWithoutFormat", &VkPhysicalDeviceFeatures::shaderStorageImageWriteWithoutFormat}},
@@ -1613,7 +1510,7 @@ bool CoreChecks::ValidateShaderCapabilities(SHADER_MODULE_STATE const *src, VkSh
{spv::CapabilityStorageImageArrayNonUniformIndexingEXT, {"VkPhysicalDeviceDescriptorIndexingFeaturesEXT::shaderStorageImageArrayNonUniformIndexing", &VkPhysicalDeviceDescriptorIndexingFeaturesEXT::shaderStorageImageArrayNonUniformIndexing}},
{spv::CapabilityInputAttachmentArrayNonUniformIndexingEXT, {"VkPhysicalDeviceDescriptorIndexingFeaturesEXT::shaderInputAttachmentArrayNonUniformIndexing", &VkPhysicalDeviceDescriptorIndexingFeaturesEXT::shaderInputAttachmentArrayNonUniformIndexing}},
{spv::CapabilityUniformTexelBufferArrayNonUniformIndexingEXT, {"VkPhysicalDeviceDescriptorIndexingFeaturesEXT::shaderUniformTexelBufferArrayNonUniformIndexing", &VkPhysicalDeviceDescriptorIndexingFeaturesEXT::shaderUniformTexelBufferArrayNonUniformIndexing}},
- {spv::CapabilityStorageTexelBufferArrayNonUniformIndexingEXT, {"VkPhysicalDeviceDescriptorIndexingFeaturesEXT::shaderStorageTexelBufferArrayNonUniformIndexing", &VkPhysicalDeviceDescriptorIndexingFeaturesEXT::shaderStorageTexelBufferArrayNonUniformIndexing}},
+ {spv::CapabilityStorageTexelBufferArrayNonUniformIndexingEXT , {"VkPhysicalDeviceDescriptorIndexingFeaturesEXT::shaderStorageTexelBufferArrayNonUniformIndexing", &VkPhysicalDeviceDescriptorIndexingFeaturesEXT::shaderStorageTexelBufferArrayNonUniformIndexing}},
// Capabilities that require an extension
{spv::CapabilityDrawParameters, {VK_KHR_SHADER_DRAW_PARAMETERS_EXTENSION_NAME, nullptr, &DeviceExtensions::vk_khr_shader_draw_parameters}},
@@ -1624,47 +1521,17 @@ bool CoreChecks::ValidateShaderCapabilities(SHADER_MODULE_STATE const *src, VkSh
{spv::CapabilityShaderViewportMaskNV, {VK_NV_VIEWPORT_ARRAY2_EXTENSION_NAME, nullptr, &DeviceExtensions::vk_nv_viewport_array2}},
{spv::CapabilitySubgroupBallotKHR, {VK_EXT_SHADER_SUBGROUP_BALLOT_EXTENSION_NAME, nullptr, &DeviceExtensions::vk_ext_shader_subgroup_ballot }},
{spv::CapabilitySubgroupVoteKHR, {VK_EXT_SHADER_SUBGROUP_VOTE_EXTENSION_NAME, nullptr, &DeviceExtensions::vk_ext_shader_subgroup_vote }},
- {spv::CapabilityGroupNonUniformPartitionedNV, {VK_NV_SHADER_SUBGROUP_PARTITIONED_EXTENSION_NAME, nullptr, &DeviceExtensions::vk_nv_shader_subgroup_partitioned}},
{spv::CapabilityInt64Atomics, {VK_KHR_SHADER_ATOMIC_INT64_EXTENSION_NAME, nullptr, &DeviceExtensions::vk_khr_shader_atomic_int64 }},
- {spv::CapabilityComputeDerivativeGroupQuadsNV, {"VkPhysicalDeviceComputeShaderDerivativesFeaturesNV::computeDerivativeGroupQuads", &VkPhysicalDeviceComputeShaderDerivativesFeaturesNV::computeDerivativeGroupQuads, &DeviceExtensions::vk_nv_compute_shader_derivatives}},
- {spv::CapabilityComputeDerivativeGroupLinearNV, {"VkPhysicalDeviceComputeShaderDerivativesFeaturesNV::computeDerivativeGroupLinear", &VkPhysicalDeviceComputeShaderDerivativesFeaturesNV::computeDerivativeGroupLinear, &DeviceExtensions::vk_nv_compute_shader_derivatives}},
- {spv::CapabilityFragmentBarycentricNV, {"VkPhysicalDeviceFragmentShaderBarycentricFeaturesNV::fragmentShaderBarycentric", &VkPhysicalDeviceFragmentShaderBarycentricFeaturesNV::fragmentShaderBarycentric, &DeviceExtensions::vk_nv_fragment_shader_barycentric}},
-
- {spv::CapabilityStorageBuffer8BitAccess, {"VkPhysicalDevice8BitStorageFeaturesKHR::storageBuffer8BitAccess", &VkPhysicalDevice8BitStorageFeaturesKHR::storageBuffer8BitAccess, &DeviceExtensions::vk_khr_8bit_storage}},
- {spv::CapabilityUniformAndStorageBuffer8BitAccess, {"VkPhysicalDevice8BitStorageFeaturesKHR::uniformAndStorageBuffer8BitAccess", &VkPhysicalDevice8BitStorageFeaturesKHR::uniformAndStorageBuffer8BitAccess, &DeviceExtensions::vk_khr_8bit_storage}},
- {spv::CapabilityStoragePushConstant8, {"VkPhysicalDevice8BitStorageFeaturesKHR::storagePushConstant8", &VkPhysicalDevice8BitStorageFeaturesKHR::storagePushConstant8, &DeviceExtensions::vk_khr_8bit_storage}},
-
- {spv::CapabilityTransformFeedback, { "VkPhysicalDeviceTransformFeedbackFeaturesEXT::transformFeedback", &VkPhysicalDeviceTransformFeedbackFeaturesEXT::transformFeedback, &DeviceExtensions::vk_ext_transform_feedback}},
- {spv::CapabilityGeometryStreams, { "VkPhysicalDeviceTransformFeedbackFeaturesEXT::geometryStreams", &VkPhysicalDeviceTransformFeedbackFeaturesEXT::geometryStreams, &DeviceExtensions::vk_ext_transform_feedback}},
-
- {spv::CapabilityFloat16, {"VkPhysicalDeviceFloat16Int8FeaturesKHR::shaderFloat16", &VkPhysicalDeviceFloat16Int8FeaturesKHR::shaderFloat16, &DeviceExtensions::vk_khr_shader_float16_int8}},
- {spv::CapabilityInt8, {"VkPhysicalDeviceFloat16Int8FeaturesKHR::shaderInt8", &VkPhysicalDeviceFloat16Int8FeaturesKHR::shaderInt8, &DeviceExtensions::vk_khr_shader_float16_int8}},
-
- {spv::CapabilityImageFootprintNV, {"VkPhysicalDeviceShaderImageFootprintFeaturesNV::imageFootprint", &VkPhysicalDeviceShaderImageFootprintFeaturesNV::imageFootprint, &DeviceExtensions::vk_nv_shader_image_footprint}},
-
- {spv::CapabilityCooperativeMatrixNV, {"VkPhysicalDeviceCooperativeMatrixFeaturesNV::cooperativeMatrix", &VkPhysicalDeviceCooperativeMatrixFeaturesNV::cooperativeMatrix, &DeviceExtensions::vk_nv_cooperative_matrix}},
-
- {spv::CapabilitySignedZeroInfNanPreserve, {"VkPhysicalDeviceFloatControlsPropertiesKHR::shaderSignedZeroInfNanPreserveFloat16", &VkPhysicalDeviceFloatControlsPropertiesKHR::shaderSignedZeroInfNanPreserveFloat16, &DeviceExtensions::vk_khr_shader_float_controls}},
- {spv::CapabilitySignedZeroInfNanPreserve, {"VkPhysicalDeviceFloatControlsPropertiesKHR::shaderSignedZeroInfNanPreserveFloat32", &VkPhysicalDeviceFloatControlsPropertiesKHR::shaderSignedZeroInfNanPreserveFloat32, &DeviceExtensions::vk_khr_shader_float_controls}},
- {spv::CapabilitySignedZeroInfNanPreserve, {"VkPhysicalDeviceFloatControlsPropertiesKHR::shaderSignedZeroInfNanPreserveFloat64", &VkPhysicalDeviceFloatControlsPropertiesKHR::shaderSignedZeroInfNanPreserveFloat64, &DeviceExtensions::vk_khr_shader_float_controls}},
- {spv::CapabilityDenormPreserve, {"VkPhysicalDeviceFloatControlsPropertiesKHR::shaderDenormPreserveFloat16", &VkPhysicalDeviceFloatControlsPropertiesKHR::shaderDenormPreserveFloat16, &DeviceExtensions::vk_khr_shader_float_controls}},
- {spv::CapabilityDenormPreserve, {"VkPhysicalDeviceFloatControlsPropertiesKHR::shaderDenormPreserveFloat32", &VkPhysicalDeviceFloatControlsPropertiesKHR::shaderDenormPreserveFloat32, &DeviceExtensions::vk_khr_shader_float_controls}},
- {spv::CapabilityDenormPreserve, {"VkPhysicalDeviceFloatControlsPropertiesKHR::shaderDenormPreserveFloat64", &VkPhysicalDeviceFloatControlsPropertiesKHR::shaderDenormPreserveFloat64, &DeviceExtensions::vk_khr_shader_float_controls}},
- {spv::CapabilityDenormFlushToZero, {"VkPhysicalDeviceFloatControlsPropertiesKHR::shaderDenormFlushToZeroFloat16", &VkPhysicalDeviceFloatControlsPropertiesKHR::shaderDenormFlushToZeroFloat16, &DeviceExtensions::vk_khr_shader_float_controls}},
- {spv::CapabilityDenormFlushToZero, {"VkPhysicalDeviceFloatControlsPropertiesKHR::shaderDenormFlushToZeroFloat32", &VkPhysicalDeviceFloatControlsPropertiesKHR::shaderDenormFlushToZeroFloat32, &DeviceExtensions::vk_khr_shader_float_controls}},
- {spv::CapabilityDenormFlushToZero, {"VkPhysicalDeviceFloatControlsPropertiesKHR::shaderDenormFlushToZeroFloat64", &VkPhysicalDeviceFloatControlsPropertiesKHR::shaderDenormFlushToZeroFloat64, &DeviceExtensions::vk_khr_shader_float_controls}},
- {spv::CapabilityRoundingModeRTE, {"VkPhysicalDeviceFloatControlsPropertiesKHR::shaderRoundingModeRTEFloat16", &VkPhysicalDeviceFloatControlsPropertiesKHR::shaderRoundingModeRTEFloat16, &DeviceExtensions::vk_khr_shader_float_controls}},
- {spv::CapabilityRoundingModeRTE, {"VkPhysicalDeviceFloatControlsPropertiesKHR::shaderRoundingModeRTEFloat32", &VkPhysicalDeviceFloatControlsPropertiesKHR::shaderRoundingModeRTEFloat32, &DeviceExtensions::vk_khr_shader_float_controls}},
- {spv::CapabilityRoundingModeRTE, {"VkPhysicalDeviceFloatControlsPropertiesKHR::shaderRoundingModeRTEFloat64", &VkPhysicalDeviceFloatControlsPropertiesKHR::shaderRoundingModeRTEFloat64, &DeviceExtensions::vk_khr_shader_float_controls}},
- {spv::CapabilityRoundingModeRTZ, {"VkPhysicalDeviceFloatControlsPropertiesKHR::shaderRoundingModeRTZFloat16", &VkPhysicalDeviceFloatControlsPropertiesKHR::shaderRoundingModeRTZFloat16, &DeviceExtensions::vk_khr_shader_float_controls}},
- {spv::CapabilityRoundingModeRTZ, {"VkPhysicalDeviceFloatControlsPropertiesKHR::shaderRoundingModeRTZFloat32", &VkPhysicalDeviceFloatControlsPropertiesKHR::shaderRoundingModeRTZFloat32, &DeviceExtensions::vk_khr_shader_float_controls}},
- {spv::CapabilityRoundingModeRTZ, {"VkPhysicalDeviceFloatControlsPropertiesKHR::shaderRoundingModeRTZFloat64", &VkPhysicalDeviceFloatControlsPropertiesKHR::shaderRoundingModeRTZFloat64, &DeviceExtensions::vk_khr_shader_float_controls}},
-
- {spv::CapabilityFragmentShaderSampleInterlockEXT, {"VkPhysicalDeviceFragmentShaderInterlockFeaturesEXT::fragmentShaderSampleInterlock", &VkPhysicalDeviceFragmentShaderInterlockFeaturesEXT::fragmentShaderSampleInterlock, &DeviceExtensions::vk_ext_fragment_shader_interlock}},
- {spv::CapabilityFragmentShaderPixelInterlockEXT, {"VkPhysicalDeviceFragmentShaderInterlockFeaturesEXT::fragmentShaderPixelInterlock", &VkPhysicalDeviceFragmentShaderInterlockFeaturesEXT::fragmentShaderPixelInterlock, &DeviceExtensions::vk_ext_fragment_shader_interlock}},
- {spv::CapabilityFragmentShaderShadingRateInterlockEXT, {"VkPhysicalDeviceFragmentShaderInterlockFeaturesEXT::fragmentShaderShadingRateInterlock", &VkPhysicalDeviceFragmentShaderInterlockFeaturesEXT::fragmentShaderShadingRateInterlock, &DeviceExtensions::vk_ext_fragment_shader_interlock}},
- {spv::CapabilityDemoteToHelperInvocationEXT, {"VkPhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT::shaderDemoteToHelperInvocation", &VkPhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT::shaderDemoteToHelperInvocation, &DeviceExtensions::vk_ext_shader_demote_to_helper_invocation}},
+ {spv::CapabilityStorageBuffer8BitAccess , {"VkPhysicalDevice8BitStorageFeaturesKHR::storageBuffer8BitAccess", &VkPhysicalDevice8BitStorageFeaturesKHR::storageBuffer8BitAccess, &DeviceExtensions::vk_khr_8bit_storage}},
+ {spv::CapabilityUniformAndStorageBuffer8BitAccess , {"VkPhysicalDevice8BitStorageFeaturesKHR::uniformAndStorageBuffer8BitAccess", &VkPhysicalDevice8BitStorageFeaturesKHR::uniformAndStorageBuffer8BitAccess, &DeviceExtensions::vk_khr_8bit_storage}},
+ {spv::CapabilityStoragePushConstant8 , {"VkPhysicalDevice8BitStorageFeaturesKHR::storagePushConstant8", &VkPhysicalDevice8BitStorageFeaturesKHR::storagePushConstant8, &DeviceExtensions::vk_khr_8bit_storage}},
+
+ {spv::CapabilityTransformFeedback , { "VkPhysicalDeviceTransformFeedbackFeaturesEXT::transformFeedback", &VkPhysicalDeviceTransformFeedbackFeaturesEXT::transformFeedback, &DeviceExtensions::vk_ext_transform_feedback}},
+ {spv::CapabilityGeometryStreams , { "VkPhysicalDeviceTransformFeedbackFeaturesEXT::geometryStreams", &VkPhysicalDeviceTransformFeedbackFeaturesEXT::geometryStreams, &DeviceExtensions::vk_ext_transform_feedback}},
+
+ {spv::CapabilityFloat16 , {"VkPhysicalDeviceFloat16Int8FeaturesKHR::shaderFloat16", &VkPhysicalDeviceFloat16Int8FeaturesKHR::shaderFloat16, &DeviceExtensions::vk_khr_shader_float16_int8}},
+ {spv::CapabilityInt8 , {"VkPhysicalDeviceFloat16Int8FeaturesKHR::shaderInt8", &VkPhysicalDeviceFloat16Int8FeaturesKHR::shaderInt8, &DeviceExtensions::vk_khr_shader_float16_int8}},
};
// clang-format on
@@ -1675,10 +1542,10 @@ bool CoreChecks::ValidateShaderCapabilities(SHADER_MODULE_STATE const *src, VkSh
auto it = capabilities.find(insn.word(1));
if (it != capabilities.end()) {
if (it->second.feature) {
- skip |= RequireFeature(report_data, it->second.feature.IsEnabled(enabled_features), it->second.name);
+ skip |= RequireFeature(report_data, it->second.feature.IsEnabled(*features), it->second.name);
}
if (it->second.extension) {
- skip |= RequireExtension(report_data, device_extensions.*(it->second.extension), it->second.name);
+ skip |= RequireExtension(report_data, extensions->*(it->second.extension), it->second.name);
}
}
} else if (1 < n) { // key occurs multiple times, at least one must be enabled
@@ -1690,13 +1557,13 @@ bool CoreChecks::ValidateShaderCapabilities(SHADER_MODULE_STATE const *src, VkSh
for (auto it = caps.first; it != caps.second; ++it) {
if (it->second.feature) {
needs_feature = true;
- has_feature = has_feature || it->second.feature.IsEnabled(enabled_features);
+ has_feature = has_feature || it->second.feature.IsEnabled(*features);
feature_names += it->second.name;
feature_names += " ";
}
if (it->second.extension) {
needs_ext = true;
- has_ext = has_ext || device_extensions.*(it->second.extension);
+ has_ext = has_ext || extensions->*(it->second.extension);
extension_names += it->second.name;
extension_names += " ";
}
@@ -1709,86 +1576,10 @@ bool CoreChecks::ValidateShaderCapabilities(SHADER_MODULE_STATE const *src, VkSh
extension_names += "]";
skip |= RequireExtension(report_data, has_ext, extension_names.c_str());
}
- } else { // Do group non-uniform checks
- const VkSubgroupFeatureFlags supportedOperations = phys_dev_ext_props.subgroup_props.supportedOperations;
- const VkSubgroupFeatureFlags supportedStages = phys_dev_ext_props.subgroup_props.supportedStages;
-
- switch (insn.word(1)) {
- default:
- break;
- case spv::CapabilityGroupNonUniform:
- case spv::CapabilityGroupNonUniformVote:
- case spv::CapabilityGroupNonUniformArithmetic:
- case spv::CapabilityGroupNonUniformBallot:
- case spv::CapabilityGroupNonUniformShuffle:
- case spv::CapabilityGroupNonUniformShuffleRelative:
- case spv::CapabilityGroupNonUniformClustered:
- case spv::CapabilityGroupNonUniformQuad:
- case spv::CapabilityGroupNonUniformPartitionedNV:
- RequirePropertyFlag(report_data, supportedStages & stage, string_VkShaderStageFlagBits(stage),
- "VkPhysicalDeviceSubgroupProperties::supportedStages");
- break;
- }
-
- switch (insn.word(1)) {
- default:
- break;
- case spv::CapabilityGroupNonUniform:
- RequirePropertyFlag(report_data, supportedOperations & VK_SUBGROUP_FEATURE_BASIC_BIT,
- "VK_SUBGROUP_FEATURE_BASIC_BIT",
- "VkPhysicalDeviceSubgroupProperties::supportedOperations");
- break;
- case spv::CapabilityGroupNonUniformVote:
- RequirePropertyFlag(report_data, supportedOperations & VK_SUBGROUP_FEATURE_VOTE_BIT,
- "VK_SUBGROUP_FEATURE_VOTE_BIT",
- "VkPhysicalDeviceSubgroupProperties::supportedOperations");
- break;
- case spv::CapabilityGroupNonUniformArithmetic:
- RequirePropertyFlag(report_data, supportedOperations & VK_SUBGROUP_FEATURE_ARITHMETIC_BIT,
- "VK_SUBGROUP_FEATURE_ARITHMETIC_BIT",
- "VkPhysicalDeviceSubgroupProperties::supportedOperations");
- break;
- case spv::CapabilityGroupNonUniformBallot:
- RequirePropertyFlag(report_data, supportedOperations & VK_SUBGROUP_FEATURE_BALLOT_BIT,
- "VK_SUBGROUP_FEATURE_BALLOT_BIT",
- "VkPhysicalDeviceSubgroupProperties::supportedOperations");
- break;
- case spv::CapabilityGroupNonUniformShuffle:
- RequirePropertyFlag(report_data, supportedOperations & VK_SUBGROUP_FEATURE_SHUFFLE_BIT,
- "VK_SUBGROUP_FEATURE_SHUFFLE_BIT",
- "VkPhysicalDeviceSubgroupProperties::supportedOperations");
- break;
- case spv::CapabilityGroupNonUniformShuffleRelative:
- RequirePropertyFlag(report_data, supportedOperations & VK_SUBGROUP_FEATURE_SHUFFLE_RELATIVE_BIT,
- "VK_SUBGROUP_FEATURE_SHUFFLE_RELATIVE_BIT",
- "VkPhysicalDeviceSubgroupProperties::supportedOperations");
- break;
- case spv::CapabilityGroupNonUniformClustered:
- RequirePropertyFlag(report_data, supportedOperations & VK_SUBGROUP_FEATURE_CLUSTERED_BIT,
- "VK_SUBGROUP_FEATURE_CLUSTERED_BIT",
- "VkPhysicalDeviceSubgroupProperties::supportedOperations");
- break;
- case spv::CapabilityGroupNonUniformQuad:
- RequirePropertyFlag(report_data, supportedOperations & VK_SUBGROUP_FEATURE_QUAD_BIT,
- "VK_SUBGROUP_FEATURE_QUAD_BIT",
- "VkPhysicalDeviceSubgroupProperties::supportedOperations");
- break;
- case spv::CapabilityGroupNonUniformPartitionedNV:
- RequirePropertyFlag(report_data, supportedOperations & VK_SUBGROUP_FEATURE_PARTITIONED_BIT_NV,
- "VK_SUBGROUP_FEATURE_PARTITIONED_BIT_NV",
- "VkPhysicalDeviceSubgroupProperties::supportedOperations");
- break;
- }
}
}
}
- return skip;
-}
-
-bool CoreChecks::ValidateShaderStageWritableDescriptor(VkShaderStageFlagBits stage, bool has_writable_descriptor) const {
- bool skip = false;
-
if (has_writable_descriptor) {
switch (stage) {
case VK_SHADER_STAGE_COMPUTE_BIT:
@@ -1804,11 +1595,11 @@ bool CoreChecks::ValidateShaderStageWritableDescriptor(VkShaderStageFlagBits sta
* raytracing, or mesh stages */
break;
case VK_SHADER_STAGE_FRAGMENT_BIT:
- skip |= RequireFeature(report_data, enabled_features.core.fragmentStoresAndAtomics, "fragmentStoresAndAtomics");
+ skip |= RequireFeature(report_data, features->core.fragmentStoresAndAtomics, "fragmentStoresAndAtomics");
break;
default:
- skip |= RequireFeature(report_data, enabled_features.core.vertexPipelineStoresAndAtomics,
- "vertexPipelineStoresAndAtomics");
+ skip |=
+ RequireFeature(report_data, features->core.vertexPipelineStoresAndAtomics, "vertexPipelineStoresAndAtomics");
break;
}
}
@@ -1816,43 +1607,57 @@ bool CoreChecks::ValidateShaderStageWritableDescriptor(VkShaderStageFlagBits sta
return skip;
}
-bool CoreChecks::ValidateShaderStageGroupNonUniform(SHADER_MODULE_STATE const *module, VkShaderStageFlagBits stage,
- std::unordered_set<uint32_t> const &accessible_ids) const {
- bool skip = false;
-
- auto const subgroup_props = phys_dev_ext_props.subgroup_props;
+static bool VariableIsBuiltIn(shader_module const *src, const uint32_t ID, std::vector<uint32_t> const &builtInBlockIDs,
+ std::vector<uint32_t> const &builtInIDs) {
+ auto insn = src->get_def(ID);
- for (uint32_t id : accessible_ids) {
- auto inst = module->get_def(id);
+ switch (insn.opcode()) {
+ case spv::OpVariable: {
+ // First check if the variable is a "pure" built-in type, e.g. gl_ViewportIndex
+ uint32_t ID = insn.word(2);
+ for (auto builtInID : builtInIDs) {
+ if (ID == builtInID) {
+ return true;
+ }
+ }
- // Check the quad operations.
- switch (inst.opcode()) {
- default:
- break;
- case spv::OpGroupNonUniformQuadBroadcast:
- case spv::OpGroupNonUniformQuadSwap:
- if ((stage != VK_SHADER_STAGE_FRAGMENT_BIT) && (stage != VK_SHADER_STAGE_COMPUTE_BIT)) {
- skip |= RequireFeature(report_data, subgroup_props.quadOperationsInAllStages,
- "VkPhysicalDeviceSubgroupProperties::quadOperationsInAllStages");
+ VariableIsBuiltIn(src, insn.word(1), builtInBlockIDs, builtInIDs);
+ break;
+ }
+ case spv::OpTypePointer:
+ VariableIsBuiltIn(src, insn.word(3), builtInBlockIDs, builtInIDs);
+ break;
+ case spv::OpTypeArray:
+ VariableIsBuiltIn(src, insn.word(2), builtInBlockIDs, builtInIDs);
+ break;
+ case spv::OpTypeStruct: {
+ uint32_t ID = insn.word(1); // We only need to check the first member as either all will be, or none will be built-in
+ for (auto builtInBlockID : builtInBlockIDs) {
+ if (ID == builtInBlockID) {
+ return true;
}
- break;
+ }
+ return false;
}
+ default:
+ return false;
}
- return skip;
+ return false;
}
-bool CoreChecks::ValidateShaderStageInputOutputLimits(SHADER_MODULE_STATE const *src, VkPipelineShaderStageCreateInfo const *pStage,
- const PIPELINE_STATE *pipeline, spirv_inst_iter entrypoint) const {
+bool CoreChecks::ValidateShaderStageInputOutputLimits(layer_data *dev_data, shader_module const *src,
+ VkPipelineShaderStageCreateInfo const *pStage, PIPELINE_STATE *pipeline) {
if (pStage->stage == VK_SHADER_STAGE_COMPUTE_BIT || pStage->stage == VK_SHADER_STAGE_ALL_GRAPHICS ||
pStage->stage == VK_SHADER_STAGE_ALL) {
return false;
}
bool skip = false;
- auto const &limits = phys_dev_props.limits;
+ auto const &limits = dev_data->phys_dev_props.limits;
- std::set<uint32_t> patchIDs;
+ std::vector<uint32_t> builtInBlockIDs;
+ std::vector<uint32_t> builtInIDs;
struct Variable {
uint32_t baseTypePtrID;
uint32_t ID;
@@ -1860,17 +1665,31 @@ bool CoreChecks::ValidateShaderStageInputOutputLimits(SHADER_MODULE_STATE const
};
std::vector<Variable> variables;
- uint32_t numVertices = 0;
-
for (auto insn : *src) {
switch (insn.opcode()) {
- // Find all Patch decorations
+ // Find all built-in member decorations
+ case spv::OpMemberDecorate:
+ if (insn.word(3) == spv::DecorationBuiltIn) {
+ builtInBlockIDs.push_back(insn.word(1));
+ }
+ break;
+ // Find all built-in decorations
case spv::OpDecorate:
switch (insn.word(2)) {
- case spv::DecorationPatch: {
- patchIDs.insert(insn.word(1));
+ case spv::DecorationBlock: {
+ uint32_t blockID = insn.word(1);
+ for (auto builtInBlockID : builtInBlockIDs) {
+ // Check if one of the members of the block are built-in -> the block is built-in
+ if (blockID == builtInBlockID) {
+ builtInIDs.push_back(blockID);
+ break;
+ }
+ }
break;
}
+ case spv::DecorationBuiltIn:
+ builtInIDs.push_back(insn.word(1));
+ break;
default:
break;
}
@@ -1886,39 +1705,26 @@ bool CoreChecks::ValidateShaderStageInputOutputLimits(SHADER_MODULE_STATE const
}
break;
}
- case spv::OpExecutionMode:
- if (insn.word(1) == entrypoint.word(2)) {
- switch (insn.word(2)) {
- default:
- break;
- case spv::ExecutionModeOutputVertices:
- numVertices = insn.word(3);
- break;
- }
- }
- break;
default:
break;
}
}
- bool strip_output_array_level =
- (pStage->stage == VK_SHADER_STAGE_TESSELLATION_CONTROL_BIT || pStage->stage == VK_SHADER_STAGE_MESH_BIT_NV);
- bool strip_input_array_level =
- (pStage->stage == VK_SHADER_STAGE_TESSELLATION_CONTROL_BIT ||
- pStage->stage == VK_SHADER_STAGE_TESSELLATION_EVALUATION_BIT || pStage->stage == VK_SHADER_STAGE_GEOMETRY_BIT);
-
uint32_t numCompIn = 0, numCompOut = 0;
for (auto &var : variables) {
- // Check if the variable is a patch. Patches can also be members of blocks,
- // but if they are then the top-level arrayness has already been stripped
- // by the time GetComponentsConsumedByType gets to it.
- bool isPatch = patchIDs.find(var.ID) != patchIDs.end();
+ // Check the variable's ID
+ if (VariableIsBuiltIn(src, var.ID, builtInBlockIDs, builtInIDs)) {
+ continue;
+ }
+ // Check the variable's type's ID - e.g. gl_PerVertex is made of basic types, not built-in types
+ if (VariableIsBuiltIn(src, src->get_def(var.baseTypePtrID).word(3), builtInBlockIDs, builtInIDs)) {
+ continue;
+ }
if (var.storageClass == spv::StorageClassInput) {
- numCompIn += GetComponentsConsumedByType(src, var.baseTypePtrID, strip_input_array_level && !isPatch);
+ numCompIn += GetComponentsConsumedByType(src, var.baseTypePtrID, false);
} else { // var.storageClass == spv::StorageClassOutput
- numCompOut += GetComponentsConsumedByType(src, var.baseTypePtrID, strip_output_array_level && !isPatch);
+ numCompOut += GetComponentsConsumedByType(src, var.baseTypePtrID, false);
}
}
@@ -1993,15 +1799,6 @@ bool CoreChecks::ValidateShaderStageInputOutputLimits(SHADER_MODULE_STATE const
"components by %u components",
limits.maxGeometryOutputComponents, numCompOut - limits.maxGeometryOutputComponents);
}
- if (numCompOut * numVertices > limits.maxGeometryTotalOutputComponents) {
- skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_EXT,
- HandleToUint64(pipeline->pipeline), kVUID_Core_Shader_ExceedDeviceLimit,
- "Invalid Pipeline CreateInfo State: Geometry shader exceeds "
- "VkPhysicalDeviceLimits::maxGeometryTotalOutputComponents of %u "
- "components by %u components",
- limits.maxGeometryTotalOutputComponents,
- numCompOut * numVertices - limits.maxGeometryTotalOutputComponents);
- }
break;
case VK_SHADER_STAGE_FRAGMENT_BIT:
@@ -2031,523 +1828,7 @@ bool CoreChecks::ValidateShaderStageInputOutputLimits(SHADER_MODULE_STATE const
return skip;
}
-// copy the specialization constant value into buf, if it is present
-void GetSpecConstantValue(VkPipelineShaderStageCreateInfo const *pStage, uint32_t spec_id, void *buf) {
- VkSpecializationInfo const *spec = pStage->pSpecializationInfo;
-
- if (spec && spec_id < spec->mapEntryCount) {
- memcpy(buf, (uint8_t *)spec->pData + spec->pMapEntries[spec_id].offset, spec->pMapEntries[spec_id].size);
- }
-}
-
-// Fill in value with the constant or specialization constant value, if available.
-// Returns true if the value has been accurately filled out.
-static bool GetIntConstantValue(spirv_inst_iter insn, SHADER_MODULE_STATE const *src, VkPipelineShaderStageCreateInfo const *pStage,
- const std::unordered_map<uint32_t, uint32_t> &id_to_spec_id, uint32_t *value) {
- auto type_id = src->get_def(insn.word(1));
- if (type_id.opcode() != spv::OpTypeInt || type_id.word(2) != 32) {
- return false;
- }
- switch (insn.opcode()) {
- case spv::OpSpecConstant:
- *value = insn.word(3);
- GetSpecConstantValue(pStage, id_to_spec_id.at(insn.word(2)), value);
- return true;
- case spv::OpConstant:
- *value = insn.word(3);
- return true;
- default:
- return false;
- }
-}
-
-// Map SPIR-V type to VK_COMPONENT_TYPE enum
-VkComponentTypeNV GetComponentType(spirv_inst_iter insn, SHADER_MODULE_STATE const *src) {
- switch (insn.opcode()) {
- case spv::OpTypeInt:
- switch (insn.word(2)) {
- case 8:
- return insn.word(3) != 0 ? VK_COMPONENT_TYPE_SINT8_NV : VK_COMPONENT_TYPE_UINT8_NV;
- case 16:
- return insn.word(3) != 0 ? VK_COMPONENT_TYPE_SINT16_NV : VK_COMPONENT_TYPE_UINT16_NV;
- case 32:
- return insn.word(3) != 0 ? VK_COMPONENT_TYPE_SINT32_NV : VK_COMPONENT_TYPE_UINT32_NV;
- case 64:
- return insn.word(3) != 0 ? VK_COMPONENT_TYPE_SINT64_NV : VK_COMPONENT_TYPE_UINT64_NV;
- default:
- return VK_COMPONENT_TYPE_MAX_ENUM_NV;
- }
- case spv::OpTypeFloat:
- switch (insn.word(2)) {
- case 16:
- return VK_COMPONENT_TYPE_FLOAT16_NV;
- case 32:
- return VK_COMPONENT_TYPE_FLOAT32_NV;
- case 64:
- return VK_COMPONENT_TYPE_FLOAT64_NV;
- default:
- return VK_COMPONENT_TYPE_MAX_ENUM_NV;
- }
- default:
- return VK_COMPONENT_TYPE_MAX_ENUM_NV;
- }
-}
-
-// Validate SPV_NV_cooperative_matrix behavior that can't be statically validated
-// in SPIRV-Tools (e.g. due to specialization constant usage).
-bool CoreChecks::ValidateCooperativeMatrix(SHADER_MODULE_STATE const *src, VkPipelineShaderStageCreateInfo const *pStage,
- const PIPELINE_STATE *pipeline) const {
- bool skip = false;
-
- // Map SPIR-V result ID to specialization constant id (SpecId decoration value)
- std::unordered_map<uint32_t, uint32_t> id_to_spec_id;
- // Map SPIR-V result ID to the ID of its type.
- std::unordered_map<uint32_t, uint32_t> id_to_type_id;
-
- struct CoopMatType {
- uint32_t scope, rows, cols;
- VkComponentTypeNV component_type;
- bool all_constant;
-
- CoopMatType() : scope(0), rows(0), cols(0), component_type(VK_COMPONENT_TYPE_MAX_ENUM_NV), all_constant(false) {}
-
- void Init(uint32_t id, SHADER_MODULE_STATE const *src, VkPipelineShaderStageCreateInfo const *pStage,
- const std::unordered_map<uint32_t, uint32_t> &id_to_spec_id) {
- spirv_inst_iter insn = src->get_def(id);
- uint32_t component_type_id = insn.word(2);
- uint32_t scope_id = insn.word(3);
- uint32_t rows_id = insn.word(4);
- uint32_t cols_id = insn.word(5);
- auto component_type_iter = src->get_def(component_type_id);
- auto scope_iter = src->get_def(scope_id);
- auto rows_iter = src->get_def(rows_id);
- auto cols_iter = src->get_def(cols_id);
-
- all_constant = true;
- if (!GetIntConstantValue(scope_iter, src, pStage, id_to_spec_id, &scope)) {
- all_constant = false;
- }
- if (!GetIntConstantValue(rows_iter, src, pStage, id_to_spec_id, &rows)) {
- all_constant = false;
- }
- if (!GetIntConstantValue(cols_iter, src, pStage, id_to_spec_id, &cols)) {
- all_constant = false;
- }
- component_type = GetComponentType(component_type_iter, src);
- }
- };
-
- bool seen_coopmat_capability = false;
-
- for (auto insn : *src) {
- // Whitelist instructions whose result can be a cooperative matrix type, and
- // keep track of their types. It would be nice if SPIRV-Headers generated code
- // to identify which instructions have a result type and result id. Lacking that,
- // this whitelist is based on the set of instructions that
- // SPV_NV_cooperative_matrix says can be used with cooperative matrix types.
- switch (insn.opcode()) {
- case spv::OpLoad:
- case spv::OpCooperativeMatrixLoadNV:
- case spv::OpCooperativeMatrixMulAddNV:
- case spv::OpSNegate:
- case spv::OpFNegate:
- case spv::OpIAdd:
- case spv::OpFAdd:
- case spv::OpISub:
- case spv::OpFSub:
- case spv::OpFDiv:
- case spv::OpSDiv:
- case spv::OpUDiv:
- case spv::OpMatrixTimesScalar:
- case spv::OpConstantComposite:
- case spv::OpCompositeConstruct:
- case spv::OpConvertFToU:
- case spv::OpConvertFToS:
- case spv::OpConvertSToF:
- case spv::OpConvertUToF:
- case spv::OpUConvert:
- case spv::OpSConvert:
- case spv::OpFConvert:
- id_to_type_id[insn.word(2)] = insn.word(1);
- break;
- default:
- break;
- }
-
- switch (insn.opcode()) {
- case spv::OpDecorate:
- if (insn.word(2) == spv::DecorationSpecId) {
- id_to_spec_id[insn.word(1)] = insn.word(3);
- }
- break;
- case spv::OpCapability:
- if (insn.word(1) == spv::CapabilityCooperativeMatrixNV) {
- seen_coopmat_capability = true;
-
- if (!(pStage->stage & phys_dev_ext_props.cooperative_matrix_props.cooperativeMatrixSupportedStages)) {
- skip |=
- log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_EXT,
- HandleToUint64(pipeline->pipeline), kVUID_Core_Shader_CooperativeMatrixSupportedStages,
- "OpTypeCooperativeMatrixNV used in shader stage not in cooperativeMatrixSupportedStages (= %u)",
- phys_dev_ext_props.cooperative_matrix_props.cooperativeMatrixSupportedStages);
- }
- }
- break;
- case spv::OpMemoryModel:
- // If the capability isn't enabled, don't bother with the rest of this function.
- // OpMemoryModel is the first required instruction after all OpCapability instructions.
- if (!seen_coopmat_capability) {
- return skip;
- }
- break;
- case spv::OpTypeCooperativeMatrixNV: {
- CoopMatType M;
- M.Init(insn.word(1), src, pStage, id_to_spec_id);
-
- if (M.all_constant) {
- // Validate that the type parameters are all supported for one of the
- // operands of a cooperative matrix property.
- bool valid = false;
- for (unsigned i = 0; i < cooperative_matrix_properties.size(); ++i) {
- if (cooperative_matrix_properties[i].AType == M.component_type &&
- cooperative_matrix_properties[i].MSize == M.rows && cooperative_matrix_properties[i].KSize == M.cols &&
- cooperative_matrix_properties[i].scope == M.scope) {
- valid = true;
- break;
- }
- if (cooperative_matrix_properties[i].BType == M.component_type &&
- cooperative_matrix_properties[i].KSize == M.rows && cooperative_matrix_properties[i].NSize == M.cols &&
- cooperative_matrix_properties[i].scope == M.scope) {
- valid = true;
- break;
- }
- if (cooperative_matrix_properties[i].CType == M.component_type &&
- cooperative_matrix_properties[i].MSize == M.rows && cooperative_matrix_properties[i].NSize == M.cols &&
- cooperative_matrix_properties[i].scope == M.scope) {
- valid = true;
- break;
- }
- if (cooperative_matrix_properties[i].DType == M.component_type &&
- cooperative_matrix_properties[i].MSize == M.rows && cooperative_matrix_properties[i].NSize == M.cols &&
- cooperative_matrix_properties[i].scope == M.scope) {
- valid = true;
- break;
- }
- }
- if (!valid) {
- skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_EXT,
- HandleToUint64(pipeline->pipeline), kVUID_Core_Shader_CooperativeMatrixType,
- "OpTypeCooperativeMatrixNV (result id = %u) operands don't match a supported matrix type",
- insn.word(1));
- }
- }
- break;
- }
- case spv::OpCooperativeMatrixMulAddNV: {
- CoopMatType A, B, C, D;
- if (id_to_type_id.find(insn.word(2)) == id_to_type_id.end() ||
- id_to_type_id.find(insn.word(3)) == id_to_type_id.end() ||
- id_to_type_id.find(insn.word(4)) == id_to_type_id.end() ||
- id_to_type_id.find(insn.word(5)) == id_to_type_id.end()) {
- // Couldn't find type of matrix
- assert(false);
- break;
- }
- D.Init(id_to_type_id[insn.word(2)], src, pStage, id_to_spec_id);
- A.Init(id_to_type_id[insn.word(3)], src, pStage, id_to_spec_id);
- B.Init(id_to_type_id[insn.word(4)], src, pStage, id_to_spec_id);
- C.Init(id_to_type_id[insn.word(5)], src, pStage, id_to_spec_id);
-
- if (A.all_constant && B.all_constant && C.all_constant && D.all_constant) {
- // Validate that the type parameters are all supported for the same
- // cooperative matrix property.
- bool valid = false;
- for (unsigned i = 0; i < cooperative_matrix_properties.size(); ++i) {
- if (cooperative_matrix_properties[i].AType == A.component_type &&
- cooperative_matrix_properties[i].MSize == A.rows && cooperative_matrix_properties[i].KSize == A.cols &&
- cooperative_matrix_properties[i].scope == A.scope &&
-
- cooperative_matrix_properties[i].BType == B.component_type &&
- cooperative_matrix_properties[i].KSize == B.rows && cooperative_matrix_properties[i].NSize == B.cols &&
- cooperative_matrix_properties[i].scope == B.scope &&
-
- cooperative_matrix_properties[i].CType == C.component_type &&
- cooperative_matrix_properties[i].MSize == C.rows && cooperative_matrix_properties[i].NSize == C.cols &&
- cooperative_matrix_properties[i].scope == C.scope &&
-
- cooperative_matrix_properties[i].DType == D.component_type &&
- cooperative_matrix_properties[i].MSize == D.rows && cooperative_matrix_properties[i].NSize == D.cols &&
- cooperative_matrix_properties[i].scope == D.scope) {
- valid = true;
- break;
- }
- }
- if (!valid) {
- skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_EXT,
- HandleToUint64(pipeline->pipeline), kVUID_Core_Shader_CooperativeMatrixMulAdd,
- "OpCooperativeMatrixMulAddNV (result id = %u) operands don't match a supported matrix "
- "VkCooperativeMatrixPropertiesNV",
- insn.word(2));
- }
- }
- break;
- }
- default:
- break;
- }
- }
-
- return skip;
-}
-
-bool CoreChecks::ValidateExecutionModes(SHADER_MODULE_STATE const *src, spirv_inst_iter entrypoint) const {
- auto entrypoint_id = entrypoint.word(2);
-
- // The first denorm execution mode encountered, along with its bit width.
- // Used to check if SeparateDenormSettings is respected.
- std::pair<spv::ExecutionMode, uint32_t> first_denorm_execution_mode = std::make_pair(spv::ExecutionModeMax, 0);
-
- // The first rounding mode encountered, along with its bit width.
- // Used to check if SeparateRoundingModeSettings is respected.
- std::pair<spv::ExecutionMode, uint32_t> first_rounding_mode = std::make_pair(spv::ExecutionModeMax, 0);
-
- bool skip = false;
-
- uint32_t verticesOut = 0;
- uint32_t invocations = 0;
-
- for (auto insn : *src) {
- if (insn.opcode() == spv::OpExecutionMode && insn.word(1) == entrypoint_id) {
- auto mode = insn.word(2);
- switch (mode) {
- case spv::ExecutionModeSignedZeroInfNanPreserve: {
- auto bit_width = insn.word(3);
- if ((bit_width == 16 && !enabled_features.float_controls.shaderSignedZeroInfNanPreserveFloat16) ||
- (bit_width == 32 && !enabled_features.float_controls.shaderSignedZeroInfNanPreserveFloat32) ||
- (bit_width == 64 && !enabled_features.float_controls.shaderSignedZeroInfNanPreserveFloat64)) {
- skip |=
- log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
- kVUID_Core_Shader_FeatureNotEnabled,
- "Shader requires SignedZeroInfNanPreserve for bit width %d but it is not enabled on the device",
- bit_width);
- }
- break;
- }
-
- case spv::ExecutionModeDenormPreserve: {
- auto bit_width = insn.word(3);
- if ((bit_width == 16 && !enabled_features.float_controls.shaderDenormPreserveFloat16) ||
- (bit_width == 32 && !enabled_features.float_controls.shaderDenormPreserveFloat32) ||
- (bit_width == 64 && !enabled_features.float_controls.shaderDenormPreserveFloat64)) {
- skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
- kVUID_Core_Shader_FeatureNotEnabled,
- "Shader requires DenormPreserve for bit width %d but it is not enabled on the device",
- bit_width);
- }
-
- if (first_denorm_execution_mode.first == spv::ExecutionModeMax) {
- // Register the first denorm execution mode found
- first_denorm_execution_mode = std::make_pair(static_cast<spv::ExecutionMode>(mode), bit_width);
- } else if (first_denorm_execution_mode.first != mode && first_denorm_execution_mode.second != bit_width) {
- switch (enabled_features.float_controls.denormBehaviorIndependence) {
- case VK_SHADER_FLOAT_CONTROLS_INDEPENDENCE_32_BIT_ONLY_KHR:
- if (first_rounding_mode.second != 32 && bit_width != 32) {
- skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT,
- VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0, kVUID_Core_Shader_FeatureNotEnabled,
- "Shader uses different denorm execution modes for 16 and 64-bit but "
- "denormBehaviorIndependence is "
- "VK_SHADER_FLOAT_CONTROLS_INDEPENDENCE_32_BIT_ONLY_KHR on the device");
- }
- break;
-
- case VK_SHADER_FLOAT_CONTROLS_INDEPENDENCE_ALL_KHR:
- break;
-
- case VK_SHADER_FLOAT_CONTROLS_INDEPENDENCE_NONE_KHR:
- skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT,
- 0, kVUID_Core_Shader_FeatureNotEnabled,
- "Shader uses different denorm execution modes for different bit widths but "
- "denormBehaviorIndependence is "
- "VK_SHADER_FLOAT_CONTROLS_INDEPENDENCE_NONE_KHR on the device");
- break;
-
- default:
- break;
- }
- }
- break;
- }
-
- case spv::ExecutionModeDenormFlushToZero: {
- auto bit_width = insn.word(3);
- if ((bit_width == 16 && !enabled_features.float_controls.shaderDenormFlushToZeroFloat16) ||
- (bit_width == 32 && !enabled_features.float_controls.shaderDenormFlushToZeroFloat32) ||
- (bit_width == 64 && !enabled_features.float_controls.shaderDenormFlushToZeroFloat64)) {
- skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
- kVUID_Core_Shader_FeatureNotEnabled,
- "Shader requires DenormFlushToZero for bit width %d but it is not enabled on the device",
- bit_width);
- }
-
- if (first_denorm_execution_mode.first == spv::ExecutionModeMax) {
- // Register the first denorm execution mode found
- first_denorm_execution_mode = std::make_pair(static_cast<spv::ExecutionMode>(mode), bit_width);
- } else if (first_denorm_execution_mode.first != mode && first_denorm_execution_mode.second != bit_width) {
- switch (enabled_features.float_controls.denormBehaviorIndependence) {
- case VK_SHADER_FLOAT_CONTROLS_INDEPENDENCE_32_BIT_ONLY_KHR:
- if (first_rounding_mode.second != 32 && bit_width != 32) {
- skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT,
- VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0, kVUID_Core_Shader_FeatureNotEnabled,
- "Shader uses different denorm execution modes for 16 and 64-bit but "
- "denormBehaviorIndependence is "
- "VK_SHADER_FLOAT_CONTROLS_INDEPENDENCE_32_BIT_ONLY_KHR on the device");
- }
- break;
-
- case VK_SHADER_FLOAT_CONTROLS_INDEPENDENCE_ALL_KHR:
- break;
-
- case VK_SHADER_FLOAT_CONTROLS_INDEPENDENCE_NONE_KHR:
- skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT,
- 0, kVUID_Core_Shader_FeatureNotEnabled,
- "Shader uses different denorm execution modes for different bit widths but "
- "denormBehaviorIndependence is "
- "VK_SHADER_FLOAT_CONTROLS_INDEPENDENCE_NONE_KHR on the device");
- break;
-
- default:
- break;
- }
- }
- break;
- }
-
- case spv::ExecutionModeRoundingModeRTE: {
- auto bit_width = insn.word(3);
- if ((bit_width == 16 && !enabled_features.float_controls.shaderRoundingModeRTEFloat16) ||
- (bit_width == 32 && !enabled_features.float_controls.shaderRoundingModeRTEFloat32) ||
- (bit_width == 64 && !enabled_features.float_controls.shaderRoundingModeRTEFloat64)) {
- skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
- kVUID_Core_Shader_FeatureNotEnabled,
- "Shader requires RoundingModeRTE for bit width %d but it is not enabled on the device",
- bit_width);
- }
-
- if (first_rounding_mode.first == spv::ExecutionModeMax) {
- // Register the first rounding mode found
- first_rounding_mode = std::make_pair(static_cast<spv::ExecutionMode>(mode), bit_width);
- } else if (first_rounding_mode.first != mode && first_rounding_mode.second != bit_width) {
- switch (enabled_features.float_controls.roundingModeIndependence) {
- case VK_SHADER_FLOAT_CONTROLS_INDEPENDENCE_32_BIT_ONLY_KHR:
- if (first_rounding_mode.second != 32 && bit_width != 32) {
- skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT,
- VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0, kVUID_Core_Shader_FeatureNotEnabled,
- "Shader uses different rounding modes for 16 and 64-bit but "
- "roundingModeIndependence is "
- "VK_SHADER_FLOAT_CONTROLS_INDEPENDENCE_32_BIT_ONLY_KHR on the device");
- }
- break;
-
- case VK_SHADER_FLOAT_CONTROLS_INDEPENDENCE_ALL_KHR:
- break;
-
- case VK_SHADER_FLOAT_CONTROLS_INDEPENDENCE_NONE_KHR:
- skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT,
- 0, kVUID_Core_Shader_FeatureNotEnabled,
- "Shader uses different rounding modes for different bit widths but "
- "roundingModeIndependence is "
- "VK_SHADER_FLOAT_CONTROLS_INDEPENDENCE_NONE_KHR on the device");
- break;
-
- default:
- break;
- }
- }
- break;
- }
-
- case spv::ExecutionModeRoundingModeRTZ: {
- auto bit_width = insn.word(3);
- if ((bit_width == 16 && !enabled_features.float_controls.shaderRoundingModeRTZFloat16) ||
- (bit_width == 32 && !enabled_features.float_controls.shaderRoundingModeRTZFloat32) ||
- (bit_width == 64 && !enabled_features.float_controls.shaderRoundingModeRTZFloat64)) {
- skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
- kVUID_Core_Shader_FeatureNotEnabled,
- "Shader requires RoundingModeRTZ for bit width %d but it is not enabled on the device",
- bit_width);
- }
-
- if (first_rounding_mode.first == spv::ExecutionModeMax) {
- // Register the first rounding mode found
- first_rounding_mode = std::make_pair(static_cast<spv::ExecutionMode>(mode), bit_width);
- } else if (first_rounding_mode.first != mode && first_rounding_mode.second != bit_width) {
- switch (enabled_features.float_controls.roundingModeIndependence) {
- case VK_SHADER_FLOAT_CONTROLS_INDEPENDENCE_32_BIT_ONLY_KHR:
- if (first_rounding_mode.second != 32 && bit_width != 32) {
- skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT,
- VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0, kVUID_Core_Shader_FeatureNotEnabled,
- "Shader uses different rounding modes for 16 and 64-bit but "
- "roundingModeIndependence is "
- "VK_SHADER_FLOAT_CONTROLS_INDEPENDENCE_32_BIT_ONLY_KHR on the device");
- }
- break;
-
- case VK_SHADER_FLOAT_CONTROLS_INDEPENDENCE_ALL_KHR:
- break;
-
- case VK_SHADER_FLOAT_CONTROLS_INDEPENDENCE_NONE_KHR:
- skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT,
- 0, kVUID_Core_Shader_FeatureNotEnabled,
- "Shader uses different rounding modes for different bit widths but "
- "roundingModeIndependence is "
- "VK_SHADER_FLOAT_CONTROLS_INDEPENDENCE_NONE_KHR on the device");
- break;
-
- default:
- break;
- }
- }
- break;
- }
-
- case spv::ExecutionModeOutputVertices: {
- verticesOut = insn.word(3);
- break;
- }
-
- case spv::ExecutionModeInvocations: {
- invocations = insn.word(3);
- break;
- }
- }
- }
- }
-
- if (entrypoint.word(1) == spv::ExecutionModelGeometry) {
- if (verticesOut == 0 || verticesOut > phys_dev_props.limits.maxGeometryOutputVertices) {
- skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
- "VUID-VkPipelineShaderStageCreateInfo-stage-00714",
- "Geometry shader entry point must have an OpExecutionMode instruction that "
- "specifies a maximum output vertex count that is greater than 0 and less "
- "than or equal to maxGeometryOutputVertices. "
- "OutputVertices=%d, maxGeometryOutputVertices=%d",
- verticesOut, phys_dev_props.limits.maxGeometryOutputVertices);
- }
-
- if (invocations == 0 || invocations > phys_dev_props.limits.maxGeometryShaderInvocations) {
- skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
- "VUID-VkPipelineShaderStageCreateInfo-stage-00715",
- "Geometry shader entry point must have an OpExecutionMode instruction that "
- "specifies an invocation count that is greater than 0 and less "
- "than or equal to maxGeometryShaderInvocations. "
- "Invocations=%d, maxGeometryShaderInvocations=%d",
- invocations, phys_dev_props.limits.maxGeometryShaderInvocations);
- }
- }
- return skip;
-}
-
-static uint32_t DescriptorTypeToReqs(SHADER_MODULE_STATE const *module, uint32_t type_id) {
+uint32_t DescriptorTypeToReqs(shader_module const *module, uint32_t type_id) {
auto type = module->get_def(type_id);
while (true) {
@@ -2609,7 +1890,7 @@ static uint32_t DescriptorTypeToReqs(SHADER_MODULE_STATE const *module, uint32_t
// For given pipelineLayout verify that the set_layout_node at slot.first
// has the requested binding at slot.second and return ptr to that binding
-static VkDescriptorSetLayoutBinding const *GetDescriptorBinding(PIPELINE_LAYOUT_STATE const *pipelineLayout,
+static VkDescriptorSetLayoutBinding const *GetDescriptorBinding(PIPELINE_LAYOUT_NODE const *pipelineLayout,
descriptor_slot_t slot) {
if (!pipelineLayout) return nullptr;
@@ -2618,29 +1899,7 @@ static VkDescriptorSetLayoutBinding const *GetDescriptorBinding(PIPELINE_LAYOUT_
return pipelineLayout->set_layouts[slot.first]->GetDescriptorSetLayoutBindingPtrFromBinding(slot.second);
}
-static bool FindLocalSize(SHADER_MODULE_STATE const *src, uint32_t &local_size_x, uint32_t &local_size_y, uint32_t &local_size_z) {
- for (auto insn : *src) {
- if (insn.opcode() == spv::OpEntryPoint) {
- auto executionModel = insn.word(1);
- auto entrypointStageBits = ExecutionModelToShaderStageFlagBits(executionModel);
- if (entrypointStageBits == VK_SHADER_STAGE_COMPUTE_BIT) {
- auto entrypoint_id = insn.word(2);
- for (auto insn1 : *src) {
- if (insn1.opcode() == spv::OpExecutionMode && insn1.word(1) == entrypoint_id &&
- insn1.word(2) == spv::ExecutionModeLocalSize) {
- local_size_x = insn1.word(3);
- local_size_y = insn1.word(4);
- local_size_z = insn1.word(5);
- return true;
- }
- }
- }
- }
- }
- return false;
-}
-
-static void ProcessExecutionModes(SHADER_MODULE_STATE const *src, const spirv_inst_iter &entrypoint, PIPELINE_STATE *pipeline) {
+static void ProcessExecutionModes(shader_module const *src, spirv_inst_iter entrypoint, PIPELINE_STATE *pipeline) {
auto entrypoint_id = entrypoint.word(2);
bool is_point_mode = false;
@@ -2680,8 +1939,8 @@ static void ProcessExecutionModes(SHADER_MODULE_STATE const *src, const spirv_in
// * gl_PointSize must be written in the final geometry stage
// - If shaderTessellationAndGeometryPointSize feature is disabled:
// * gl_PointSize must NOT be written and a default of 1.0 is assumed
-bool CoreChecks::ValidatePointListShaderState(const PIPELINE_STATE *pipeline, SHADER_MODULE_STATE const *src,
- spirv_inst_iter entrypoint, VkShaderStageFlagBits stage) const {
+bool CoreChecks::ValidatePointListShaderState(const layer_data *dev_data, const PIPELINE_STATE *pipeline, shader_module const *src,
+ spirv_inst_iter entrypoint, VkShaderStageFlagBits stage) {
if (pipeline->topology_at_rasterizer != VK_PRIMITIVE_TOPOLOGY_POINT_LIST) {
return false;
}
@@ -2711,89 +1970,66 @@ bool CoreChecks::ValidatePointListShaderState(const PIPELINE_STATE *pipeline, SH
}
if ((stage == VK_SHADER_STAGE_TESSELLATION_EVALUATION_BIT || stage == VK_SHADER_STAGE_GEOMETRY_BIT) &&
- !enabled_features.core.shaderTessellationAndGeometryPointSize) {
+ !GetEnabledFeatures()->core.shaderTessellationAndGeometryPointSize) {
if (pointsize_written) {
- skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_EXT,
+ skip |= log_msg(GetReportData(), VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_EXT,
HandleToUint64(pipeline->pipeline), kVUID_Core_Shader_PointSizeBuiltInOverSpecified,
"Pipeline topology is set to POINT_LIST and geometry or tessellation shaders write PointSize which "
"is prohibited when the shaderTessellationAndGeometryPointSize feature is not enabled.");
}
} else if (!pointsize_written) {
skip |=
- log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_EXT,
+ log_msg(GetReportData(), VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_EXT,
HandleToUint64(pipeline->pipeline), kVUID_Core_Shader_MissingPointSizeBuiltIn,
"Pipeline topology is set to POINT_LIST, but PointSize is not written to in the shader corresponding to %s.",
string_VkShaderStageFlagBits(stage));
}
return skip;
}
-void ValidationStateTracker::RecordPipelineShaderStage(VkPipelineShaderStageCreateInfo const *pStage, PIPELINE_STATE *pipeline,
- PIPELINE_STATE::StageState *stage_state) {
- // Validation shouldn't rely on anything in stage state being valid if the spirv isn't
- auto module = GetShaderModuleState(pStage->module);
- if (!module->has_valid_spirv) return;
-
- // Validation shouldn't rely on anything in stage state being valid if the entrypoint isn't present
- auto entrypoint = FindEntrypoint(module, pStage->pName, pStage->stage);
- if (entrypoint == module->end()) return;
-
- // Mark accessible ids
- stage_state->accessible_ids = MarkAccessibleIds(module, entrypoint);
- ProcessExecutionModes(module, entrypoint, pipeline);
-
- stage_state->descriptor_uses =
- CollectInterfaceByDescriptorSlot(report_data, module, stage_state->accessible_ids, &stage_state->has_writable_descriptor);
- // Capture descriptor uses for the pipeline
- for (auto use : stage_state->descriptor_uses) {
- // While validating shaders capture which slots are used by the pipeline
- auto &reqs = pipeline->active_slots[use.first.first][use.first.second];
- reqs = descriptor_req(reqs | DescriptorTypeToReqs(module, use.second.type_id));
- }
-}
-bool CoreChecks::ValidatePipelineShaderStage(VkPipelineShaderStageCreateInfo const *pStage, const PIPELINE_STATE *pipeline,
- const PIPELINE_STATE::StageState &stage_state, const SHADER_MODULE_STATE *module,
- const spirv_inst_iter &entrypoint, bool check_point_size) const {
+bool CoreChecks::ValidatePipelineShaderStage(layer_data *dev_data, VkPipelineShaderStageCreateInfo const *pStage,
+ PIPELINE_STATE *pipeline, shader_module const **out_module,
+ spirv_inst_iter *out_entrypoint, bool check_point_size) {
bool skip = false;
+ auto module = *out_module = GetShaderModuleState(pStage->module);
+ auto report_data = GetReportData();
- // Check the module
- if (!module->has_valid_spirv) {
- skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
- "VUID-VkPipelineShaderStageCreateInfo-module-parameter", "%s does not contain valid spirv for stage %s.",
- report_data->FormatHandle(module->vk_shader_module).c_str(), string_VkShaderStageFlagBits(pStage->stage));
- }
+ if (!module->has_valid_spirv) return false;
- // Check the entrypoint
+ // Find the entrypoint
+ auto entrypoint = *out_entrypoint = FindEntrypoint(module, pStage->pName, pStage->stage);
if (entrypoint == module->end()) {
- skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
- "VUID-VkPipelineShaderStageCreateInfo-pName-00707", "No entrypoint found named `%s` for stage %s..",
- pStage->pName, string_VkShaderStageFlagBits(pStage->stage));
+ if (log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
+ "VUID-VkPipelineShaderStageCreateInfo-pName-00707", "No entrypoint found named `%s` for stage %s..",
+ pStage->pName, string_VkShaderStageFlagBits(pStage->stage))) {
+ return true; // no point continuing beyond here, any analysis is just going to be garbage.
+ }
}
- if (skip) return true; // no point continuing beyond here, any analysis is just going to be garbage.
// Mark accessible ids
- auto &accessible_ids = stage_state.accessible_ids;
+ auto accessible_ids = MarkAccessibleIds(module, entrypoint);
+ ProcessExecutionModes(module, entrypoint, pipeline);
// Validate descriptor set layout against what the entrypoint actually uses
- bool has_writable_descriptor = stage_state.has_writable_descriptor;
- auto &descriptor_uses = stage_state.descriptor_uses;
+ bool has_writable_descriptor = false;
+ auto descriptor_uses = CollectInterfaceByDescriptorSlot(report_data, module, accessible_ids, &has_writable_descriptor);
// Validate shader capabilities against enabled device features
- skip |= ValidateShaderCapabilities(module, pStage->stage);
- skip |= ValidateShaderStageWritableDescriptor(pStage->stage, has_writable_descriptor);
- skip |= ValidateShaderStageInputOutputLimits(module, pStage, pipeline, entrypoint);
- skip |= ValidateShaderStageGroupNonUniform(module, pStage->stage, accessible_ids);
- skip |= ValidateExecutionModes(module, entrypoint);
+ skip |= ValidateShaderCapabilities(dev_data, module, pStage->stage, has_writable_descriptor);
+ skip |= ValidateShaderStageInputOutputLimits(dev_data, module, pStage, pipeline);
skip |= ValidateSpecializationOffsets(report_data, pStage);
skip |= ValidatePushConstantUsage(report_data, pipeline->pipeline_layout.push_constant_ranges.get(), module, accessible_ids,
pStage->stage);
if (check_point_size && !pipeline->graphicsPipelineCI.pRasterizationState->rasterizerDiscardEnable) {
- skip |= ValidatePointListShaderState(pipeline, module, entrypoint, pStage->stage);
+ skip |= ValidatePointListShaderState(dev_data, pipeline, module, entrypoint, pStage->stage);
}
- skip |= ValidateCooperativeMatrix(module, pStage, pipeline);
// Validate descriptor use
for (auto use : descriptor_uses) {
+ // While validating shaders capture which slots are used by the pipeline
+ auto &reqs = pipeline->active_slots[use.first.first][use.first.second];
+ reqs = descriptor_req(reqs | DescriptorTypeToReqs(module, use.second.type_id));
+
// Verify given pipelineLayout has requested setLayout with requested binding
const auto &binding = GetDescriptorBinding(&pipeline->pipeline_layout, use.first);
unsigned required_descriptor_count;
@@ -2849,15 +2085,13 @@ bool CoreChecks::ValidatePipelineShaderStage(VkPipelineShaderStageCreateInfo con
}
}
}
- if (pStage->stage == VK_SHADER_STAGE_COMPUTE_BIT) {
- skip |= ValidateComputeWorkGroupSizes(module);
- }
+
return skip;
}
-static bool ValidateInterfaceBetweenStages(debug_report_data const *report_data, SHADER_MODULE_STATE const *producer,
+static bool ValidateInterfaceBetweenStages(debug_report_data const *report_data, shader_module const *producer,
spirv_inst_iter producer_entrypoint, shader_stage_attributes const *producer_stage,
- SHADER_MODULE_STATE const *consumer, spirv_inst_iter consumer_entrypoint,
+ shader_module const *consumer, spirv_inst_iter consumer_entrypoint,
shader_stage_attributes const *consumer_stage) {
bool skip = false;
@@ -2919,39 +2153,10 @@ static bool ValidateInterfaceBetweenStages(debug_report_data const *report_data,
}
}
- if (consumer_stage->stage != VK_SHADER_STAGE_FRAGMENT_BIT) {
- auto builtins_producer = CollectBuiltinBlockMembers(producer, producer_entrypoint, spv::StorageClassOutput);
- auto builtins_consumer = CollectBuiltinBlockMembers(consumer, consumer_entrypoint, spv::StorageClassInput);
-
- if (!builtins_producer.empty() && !builtins_consumer.empty()) {
- if (builtins_producer.size() != builtins_consumer.size()) {
- skip |=
- log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_SHADER_MODULE_EXT,
- HandleToUint64(producer->vk_shader_module), kVUID_Core_Shader_InterfaceTypeMismatch,
- "Number of elements inside builtin block differ between stages (%s %d vs %s %d).", producer_stage->name,
- (int)builtins_producer.size(), consumer_stage->name, (int)builtins_consumer.size());
- } else {
- auto it_producer = builtins_producer.begin();
- auto it_consumer = builtins_consumer.begin();
- while (it_producer != builtins_producer.end() && it_consumer != builtins_consumer.end()) {
- if (*it_producer != *it_consumer) {
- skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_SHADER_MODULE_EXT,
- HandleToUint64(producer->vk_shader_module), kVUID_Core_Shader_InterfaceTypeMismatch,
- "Builtin variable inside block doesn't match between %s and %s.", producer_stage->name,
- consumer_stage->name);
- break;
- }
- it_producer++;
- it_consumer++;
- }
- }
- }
- }
-
return skip;
}
-static inline uint32_t DetermineFinalGeomStage(const PIPELINE_STATE *pipeline, const VkGraphicsPipelineCreateInfo *pCreateInfo) {
+static inline uint32_t DetermineFinalGeomStage(PIPELINE_STATE *pipeline, VkGraphicsPipelineCreateInfo *pCreateInfo) {
uint32_t stage_mask = 0;
if (pipeline->topology_at_rasterizer == VK_PRIMITIVE_TOPOLOGY_POINT_LIST) {
for (uint32_t i = 0; i < pCreateInfo->stageCount; i++) {
@@ -2973,12 +2178,13 @@ static inline uint32_t DetermineFinalGeomStage(const PIPELINE_STATE *pipeline, c
// Validate that the shaders used by the given pipeline and store the active_slots
// that are actually used by the pipeline into pPipeline->active_slots
-bool CoreChecks::ValidateGraphicsPipelineShaderState(const PIPELINE_STATE *pipeline) const {
+bool CoreChecks::ValidateAndCapturePipelineShaderState(layer_data *dev_data, PIPELINE_STATE *pipeline) {
auto pCreateInfo = pipeline->graphicsPipelineCI.ptr();
int vertex_stage = GetShaderStageId(VK_SHADER_STAGE_VERTEX_BIT);
int fragment_stage = GetShaderStageId(VK_SHADER_STAGE_FRAGMENT_BIT);
+ auto report_data = GetReportData();
- const SHADER_MODULE_STATE *shaders[32];
+ shader_module const *shaders[32];
memset(shaders, 0, sizeof(shaders));
spirv_inst_iter entrypoints[32];
memset(entrypoints, 0, sizeof(entrypoints));
@@ -2989,10 +2195,7 @@ bool CoreChecks::ValidateGraphicsPipelineShaderState(const PIPELINE_STATE *pipel
for (uint32_t i = 0; i < pCreateInfo->stageCount; i++) {
auto pStage = &pCreateInfo->pStages[i];
auto stage_id = GetShaderStageId(pStage->stage);
- shaders[stage_id] = GetShaderModuleState(pStage->module);
- entrypoints[stage_id] = FindEntrypoint(shaders[stage_id], pStage->pName, pStage->stage);
- skip |= ValidatePipelineShaderStage(pStage, pipeline, pipeline->stage_state[i], shaders[stage_id], entrypoints[stage_id],
-
+ skip |= ValidatePipelineShaderStage(dev_data, pStage, pipeline, &shaders[stage_id], &entrypoints[stage_id],
(pointlist_stage_mask == pStage->stage));
}
@@ -3038,51 +2241,50 @@ bool CoreChecks::ValidateGraphicsPipelineShaderState(const PIPELINE_STATE *pipel
return skip;
}
-bool CoreChecks::ValidateComputePipeline(PIPELINE_STATE *pipeline) const {
- const auto &stage = *pipeline->computePipelineCI.stage.ptr();
+bool CoreChecks::ValidateComputePipeline(layer_data *dev_data, PIPELINE_STATE *pipeline) {
+ auto pCreateInfo = pipeline->computePipelineCI.ptr();
- const SHADER_MODULE_STATE *module = GetShaderModuleState(stage.module);
- const spirv_inst_iter entrypoint = FindEntrypoint(module, stage.pName, stage.stage);
+ shader_module const *module;
+ spirv_inst_iter entrypoint;
- return ValidatePipelineShaderStage(&stage, pipeline, pipeline->stage_state[0], module, entrypoint, false);
+ return ValidatePipelineShaderStage(dev_data, &pCreateInfo->stage, pipeline, &module, &entrypoint, false);
}
-bool CoreChecks::ValidateRayTracingPipelineNV(PIPELINE_STATE *pipeline) const {
- bool skip = false;
- for (uint32_t stage_index = 0; stage_index < pipeline->raytracingPipelineCI.stageCount; stage_index++) {
- const auto &stage = pipeline->raytracingPipelineCI.ptr()->pStages[stage_index];
+bool CoreChecks::ValidateRayTracingPipelineNV(layer_data *dev_data, PIPELINE_STATE *pipeline) {
+ auto pCreateInfo = pipeline->raytracingPipelineCI.ptr();
- const SHADER_MODULE_STATE *module = GetShaderModuleState(stage.module);
- const spirv_inst_iter entrypoint = FindEntrypoint(module, stage.pName, stage.stage);
+ shader_module const *module;
+ spirv_inst_iter entrypoint;
- skip |= ValidatePipelineShaderStage(&stage, pipeline, pipeline->stage_state[stage_index], module, entrypoint, false);
- }
- return skip;
+ return ValidatePipelineShaderStage(dev_data, pCreateInfo->pStages, pipeline, &module, &entrypoint, false);
}
uint32_t ValidationCache::MakeShaderHash(VkShaderModuleCreateInfo const *smci) { return XXH32(smci->pCode, smci->codeSize, 0); }
static ValidationCache *GetValidationCacheInfo(VkShaderModuleCreateInfo const *pCreateInfo) {
- const auto validation_cache_ci = lvl_find_in_chain<VkShaderModuleValidationCacheCreateInfoEXT>(pCreateInfo->pNext);
- if (validation_cache_ci) {
- return CastFromHandle<ValidationCache *>(validation_cache_ci->validationCache);
+ while ((pCreateInfo = (VkShaderModuleCreateInfo const *)pCreateInfo->pNext) != nullptr) {
+ if (pCreateInfo->sType == VK_STRUCTURE_TYPE_SHADER_MODULE_VALIDATION_CACHE_CREATE_INFO_EXT)
+ return (ValidationCache *)((VkShaderModuleValidationCacheCreateInfoEXT const *)pCreateInfo)->validationCache;
}
+
return nullptr;
}
bool CoreChecks::PreCallValidateCreateShaderModule(VkDevice device, const VkShaderModuleCreateInfo *pCreateInfo,
const VkAllocationCallbacks *pAllocator, VkShaderModule *pShaderModule) {
+ layer_data *device_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
+
bool skip = false;
spv_result_t spv_valid = SPV_SUCCESS;
- if (disabled.shader_validation) {
+ if (GetDisables()->shader_validation) {
return false;
}
- auto have_glsl_shader = device_extensions.vk_nv_glsl_shader;
+ auto have_glsl_shader = GetDeviceExtensions()->vk_nv_glsl_shader;
if (!have_glsl_shader && (pCreateInfo->codeSize % 4)) {
- skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
+ skip |= log_msg(device_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
"VUID-VkShaderModuleCreateInfo-pCode-01376",
"SPIR-V module not valid: Codesize must be a multiple of 4 but is " PRINTF_SIZE_T_SPECIFIER ".",
pCreateInfo->codeSize);
@@ -3096,31 +2298,27 @@ bool CoreChecks::PreCallValidateCreateShaderModule(VkDevice device, const VkShad
// Use SPIRV-Tools validator to try and catch any issues with the module itself
spv_target_env spirv_environment = SPV_ENV_VULKAN_1_0;
- if (api_version >= VK_API_VERSION_1_1) {
+ if (GetApiVersion() >= VK_API_VERSION_1_1) {
spirv_environment = SPV_ENV_VULKAN_1_1;
}
spv_context ctx = spvContextCreate(spirv_environment);
spv_const_binary_t binary{pCreateInfo->pCode, pCreateInfo->codeSize / sizeof(uint32_t)};
spv_diagnostic diag = nullptr;
spv_validator_options options = spvValidatorOptionsCreate();
- if (device_extensions.vk_khr_relaxed_block_layout) {
+ if (GetDeviceExtensions()->vk_khr_relaxed_block_layout) {
spvValidatorOptionsSetRelaxBlockLayout(options, true);
}
- if (device_extensions.vk_khr_uniform_buffer_standard_layout &&
- enabled_features.uniform_buffer_standard_layout.uniformBufferStandardLayout == VK_TRUE) {
- spvValidatorOptionsSetUniformBufferStandardLayout(options, true);
- }
- if (device_extensions.vk_ext_scalar_block_layout &&
- enabled_features.scalar_block_layout_features.scalarBlockLayout == VK_TRUE) {
+ if (GetDeviceExtensions()->vk_ext_scalar_block_layout &&
+ GetEnabledFeatures()->scalar_block_layout_features.scalarBlockLayout == VK_TRUE) {
spvValidatorOptionsSetScalarBlockLayout(options, true);
}
spv_valid = spvValidateWithOptions(ctx, options, &binary, &diag);
if (spv_valid != SPV_SUCCESS) {
if (!have_glsl_shader || (pCreateInfo->pCode[0] == spv::MagicNumber)) {
- skip |=
- log_msg(report_data, spv_valid == SPV_WARNING ? VK_DEBUG_REPORT_WARNING_BIT_EXT : VK_DEBUG_REPORT_ERROR_BIT_EXT,
- VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0, kVUID_Core_Shader_InconsistentSpirv,
- "SPIR-V module not valid: %s", diag && diag->error ? diag->error : "(no error text)");
+ skip |= log_msg(device_data->report_data,
+ spv_valid == SPV_WARNING ? VK_DEBUG_REPORT_WARNING_BIT_EXT : VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0, kVUID_Core_Shader_InconsistentSpirv,
+ "SPIR-V module not valid: %s", diag && diag->error ? diag->error : "(no error text)");
}
} else {
if (cache) {
@@ -3139,77 +2337,27 @@ bool CoreChecks::PreCallValidateCreateShaderModule(VkDevice device, const VkShad
void CoreChecks::PreCallRecordCreateShaderModule(VkDevice device, const VkShaderModuleCreateInfo *pCreateInfo,
const VkAllocationCallbacks *pAllocator, VkShaderModule *pShaderModule,
void *csm_state_data) {
+ layer_data *device_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
+
create_shader_module_api_state *csm_state = reinterpret_cast<create_shader_module_api_state *>(csm_state_data);
- if (enabled.gpu_validation) {
- GpuPreCallCreateShaderModule(pCreateInfo, pAllocator, pShaderModule, &csm_state->unique_shader_id,
+ if (GetEnables()->gpu_validation) {
+ GpuPreCallCreateShaderModule(device_data, pCreateInfo, pAllocator, pShaderModule, &csm_state->unique_shader_id,
&csm_state->instrumented_create_info, &csm_state->instrumented_pgm);
}
}
-void ValidationStateTracker::PostCallRecordCreateShaderModule(VkDevice device, const VkShaderModuleCreateInfo *pCreateInfo,
- const VkAllocationCallbacks *pAllocator,
- VkShaderModule *pShaderModule, VkResult result,
- void *csm_state_data) {
+void CoreChecks::PostCallRecordCreateShaderModule(VkDevice device, const VkShaderModuleCreateInfo *pCreateInfo,
+ const VkAllocationCallbacks *pAllocator, VkShaderModule *pShaderModule,
+ VkResult result, void *csm_state_data) {
+ layer_data *device_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
+
if (VK_SUCCESS != result) return;
create_shader_module_api_state *csm_state = reinterpret_cast<create_shader_module_api_state *>(csm_state_data);
- spv_target_env spirv_environment = ((api_version >= VK_API_VERSION_1_1) ? SPV_ENV_VULKAN_1_1 : SPV_ENV_VULKAN_1_0);
+ spv_target_env spirv_environment = ((GetApiVersion() >= VK_API_VERSION_1_1) ? SPV_ENV_VULKAN_1_1 : SPV_ENV_VULKAN_1_0);
bool is_spirv = (pCreateInfo->pCode[0] == spv::MagicNumber);
- std::unique_ptr<SHADER_MODULE_STATE> new_shader_module(
- is_spirv ? new SHADER_MODULE_STATE(pCreateInfo, *pShaderModule, spirv_environment, csm_state->unique_shader_id)
- : new SHADER_MODULE_STATE());
- shaderModuleMap[*pShaderModule] = std::move(new_shader_module);
-}
-
-bool CoreChecks::ValidateComputeWorkGroupSizes(const SHADER_MODULE_STATE *shader) const {
- bool skip = false;
- uint32_t local_size_x = 0;
- uint32_t local_size_y = 0;
- uint32_t local_size_z = 0;
- if (FindLocalSize(shader, local_size_x, local_size_y, local_size_z)) {
- if (local_size_x > phys_dev_props.limits.maxComputeWorkGroupSize[0]) {
- skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_SHADER_MODULE_EXT,
- HandleToUint64(shader->vk_shader_module), "UNASSIGNED-features-limits-maxComputeWorkGroupSize",
- "%s local_size_x (%" PRIu32 ") exceeds device limit maxComputeWorkGroupSize[0] (%" PRIu32 ").",
- report_data->FormatHandle(shader->vk_shader_module).c_str(), local_size_x,
- phys_dev_props.limits.maxComputeWorkGroupSize[0]);
- }
- if (local_size_y > phys_dev_props.limits.maxComputeWorkGroupSize[1]) {
- skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_SHADER_MODULE_EXT,
- HandleToUint64(shader->vk_shader_module), "UNASSIGNED-features-limits-maxComputeWorkGroupSize",
- "%s local_size_y (%" PRIu32 ") exceeds device limit maxComputeWorkGroupSize[1] (%" PRIu32 ").",
- report_data->FormatHandle(shader->vk_shader_module).c_str(), local_size_x,
- phys_dev_props.limits.maxComputeWorkGroupSize[1]);
- }
- if (local_size_z > phys_dev_props.limits.maxComputeWorkGroupSize[2]) {
- skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_SHADER_MODULE_EXT,
- HandleToUint64(shader->vk_shader_module), "UNASSIGNED-features-limits-maxComputeWorkGroupSize",
- "%s local_size_z (%" PRIu32 ") exceeds device limit maxComputeWorkGroupSize[2] (%" PRIu32 ").",
- report_data->FormatHandle(shader->vk_shader_module).c_str(), local_size_x,
- phys_dev_props.limits.maxComputeWorkGroupSize[2]);
- }
-
- uint32_t limit = phys_dev_props.limits.maxComputeWorkGroupInvocations;
- uint64_t invocations = local_size_x * local_size_y;
- // Prevent overflow.
- bool fail = false;
- if (invocations > UINT32_MAX || invocations > limit) {
- fail = true;
- }
- if (!fail) {
- invocations *= local_size_z;
- if (invocations > UINT32_MAX || invocations > limit) {
- fail = true;
- }
- }
- if (fail) {
- skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_SHADER_MODULE_EXT,
- HandleToUint64(shader->vk_shader_module), "UNASSIGNED-features-limits-maxComputeWorkGroupInvocations",
- "%s local_size (%" PRIu32 ", %" PRIu32 ", %" PRIu32
- ") exceeds device limit maxComputeWorkGroupInvocations (%" PRIu32 ").",
- report_data->FormatHandle(shader->vk_shader_module).c_str(), local_size_x, local_size_y, local_size_z,
- limit);
- }
- }
- return skip;
+ std::unique_ptr<shader_module> new_shader_module(
+ is_spirv ? new shader_module(pCreateInfo, *pShaderModule, spirv_environment, csm_state->unique_shader_id)
+ : new shader_module());
+ device_data->shaderModuleMap[*pShaderModule] = std::move(new_shader_module);
}
diff --git a/layers/shader_validation.h b/layers/shader_validation.h
index 0ecc6c794..230adc180 100644
--- a/layers/shader_validation.h
+++ b/layers/shader_validation.h
@@ -20,10 +20,7 @@
#ifndef VULKAN_SHADER_VALIDATION_H
#define VULKAN_SHADER_VALIDATION_H
-#include <unordered_map>
-
-#include <SPIRV/spirv.hpp>
-#include <generated/spirv_tools_commit_id.h>
+#include <spirv_tools_commit_id.h>
#include "spirv-tools/optimizer.hpp"
// A forward iterator over spirv instructions. Provides easy access to len, opcode, and content words
@@ -32,7 +29,7 @@ struct spirv_inst_iter {
std::vector<uint32_t>::const_iterator zero;
std::vector<uint32_t>::const_iterator it;
- uint32_t len() const {
+ uint32_t len() {
auto result = *it >> 16;
assert(result > 0);
return result;
@@ -40,7 +37,7 @@ struct spirv_inst_iter {
uint32_t opcode() { return *it & 0x0ffffu; }
- uint32_t const &word(unsigned n) const {
+ uint32_t const &word(unsigned n) {
assert(n < len());
return it[n];
}
@@ -51,9 +48,9 @@ struct spirv_inst_iter {
spirv_inst_iter(std::vector<uint32_t>::const_iterator zero, std::vector<uint32_t>::const_iterator it) : zero(zero), it(it) {}
- bool operator==(spirv_inst_iter const &other) const { return it == other.it; }
+ bool operator==(spirv_inst_iter const &other) { return it == other.it; }
- bool operator!=(spirv_inst_iter const &other) const { return it != other.it; }
+ bool operator!=(spirv_inst_iter const &other) { return it != other.it; }
spirv_inst_iter operator++(int) { // x++
spirv_inst_iter ii = *this;
@@ -71,104 +68,26 @@ struct spirv_inst_iter {
spirv_inst_iter const &operator*() const { return *this; }
};
-struct decoration_set {
- enum {
- location_bit = 1 << 0,
- patch_bit = 1 << 1,
- relaxed_precision_bit = 1 << 2,
- block_bit = 1 << 3,
- buffer_block_bit = 1 << 4,
- component_bit = 1 << 5,
- input_attachment_index_bit = 1 << 6,
- descriptor_set_bit = 1 << 7,
- binding_bit = 1 << 8,
- nonwritable_bit = 1 << 9,
- builtin_bit = 1 << 10,
- };
- uint32_t flags = 0;
- uint32_t location = static_cast<uint32_t>(-1);
- uint32_t component = 0;
- uint32_t input_attachment_index = 0;
- uint32_t descriptor_set = 0;
- uint32_t binding = 0;
- uint32_t builtin = static_cast<uint32_t>(-1);
-
- void merge(decoration_set const &other) {
- if (other.flags & location_bit) location = other.location;
- if (other.flags & component_bit) component = other.component;
- if (other.flags & input_attachment_index_bit) input_attachment_index = other.input_attachment_index;
- if (other.flags & descriptor_set_bit) descriptor_set = other.descriptor_set;
- if (other.flags & binding_bit) binding = other.binding;
- if (other.flags & builtin_bit) builtin = other.builtin;
- flags |= other.flags;
- }
-
- void add(uint32_t decoration, uint32_t value);
-};
-
-struct SHADER_MODULE_STATE {
+struct shader_module {
// The spirv image itself
std::vector<uint32_t> words;
// A mapping of <id> to the first word of its def. this is useful because walking type
// trees, constant expressions, etc requires jumping all over the instruction stream.
std::unordered_map<unsigned, unsigned> def_index;
- std::unordered_map<unsigned, decoration_set> decorations;
- struct EntryPoint {
- uint32_t offset;
- VkShaderStageFlags stage;
- };
- std::unordered_multimap<std::string, EntryPoint> entry_points;
bool has_valid_spirv;
VkShaderModule vk_shader_module;
uint32_t gpu_validation_shader_id;
std::vector<uint32_t> PreprocessShaderBinary(uint32_t *src_binary, size_t binary_size, spv_target_env env) {
- std::vector<uint32_t> src(src_binary, src_binary + binary_size / sizeof(uint32_t));
-
- // Check if there are any group decoration instructions, and flatten them if found.
- bool has_group_decoration = false;
- bool done = false;
-
- // Walk through the first part of the SPIR-V module, looking for group decoration instructions.
- // Skip the header (5 words).
- auto itr = spirv_inst_iter(src.begin(), src.begin() + 5);
- auto itrend = spirv_inst_iter(src.begin(), src.end());
- while (itr != itrend && !done) {
- spv::Op opcode = (spv::Op)itr.opcode();
- switch (opcode) {
- case spv::OpDecorationGroup:
- case spv::OpGroupDecorate:
- case spv::OpGroupMemberDecorate:
- has_group_decoration = true;
- done = true;
- break;
- case spv::OpFunction:
- // An OpFunction indicates there are no more decorations
- done = true;
- break;
- default:
- break;
- }
- itr++;
- }
-
- if (has_group_decoration) {
- spvtools::Optimizer optimizer(env);
- optimizer.RegisterPass(spvtools::CreateFlattenDecorationPass());
- std::vector<uint32_t> optimized_binary;
- // Run optimizer to flatten decorations only, set skip_validation so as to not re-run validator
- auto result =
- optimizer.Run(src_binary, binary_size / sizeof(uint32_t), &optimized_binary, spvtools::ValidatorOptions(), true);
- if (result) {
- return optimized_binary;
- }
- }
- // Return the original module.
- return src;
+ spvtools::Optimizer optimizer(env);
+ optimizer.RegisterPass(spvtools::CreateFlattenDecorationPass());
+ std::vector<uint32_t> optimized_binary;
+ auto result = optimizer.Run(src_binary, binary_size / sizeof(uint32_t), &optimized_binary);
+ return (result ? optimized_binary : std::vector<uint32_t>(src_binary, src_binary + binary_size / sizeof(uint32_t)));
}
- SHADER_MODULE_STATE(VkShaderModuleCreateInfo const *pCreateInfo, VkShaderModule shaderModule, spv_target_env env,
- uint32_t unique_shader_id)
+ shader_module(VkShaderModuleCreateInfo const *pCreateInfo, VkShaderModule shaderModule, spv_target_env env,
+ uint32_t unique_shader_id)
: words(PreprocessShaderBinary((uint32_t *)pCreateInfo->pCode, pCreateInfo->codeSize, env)),
def_index(),
has_valid_spirv(true),
@@ -177,14 +96,7 @@ struct SHADER_MODULE_STATE {
BuildDefIndex();
}
- SHADER_MODULE_STATE() : has_valid_spirv(false), vk_shader_module(VK_NULL_HANDLE) {}
-
- decoration_set get_decorations(unsigned id) const {
- // return the actual decorations for this id, or a default set.
- auto it = decorations.find(id);
- if (it != decorations.end()) return it->second;
- return decoration_set();
- }
+ shader_module() : has_valid_spirv(false), vk_shader_module(VK_NULL_HANDLE) {}
// Expose begin() / end() to enable range-based for
spirv_inst_iter begin() const { return spirv_inst_iter(words.begin(), words.begin() + 5); } // First insn
@@ -294,4 +206,6 @@ class ValidationCache {
}
};
+typedef std::pair<unsigned, unsigned> descriptor_slot_t;
+
#endif // VULKAN_SHADER_VALIDATION_H
diff --git a/layers/sparse_containers.h b/layers/sparse_containers.h
deleted file mode 100644
index 19f6a2aa7..000000000
--- a/layers/sparse_containers.h
+++ /dev/null
@@ -1,404 +0,0 @@
-/* Copyright (c) 2019 The Khronos Group Inc.
- * Copyright (c) 2019 Valve Corporation
- * Copyright (c) 2019 LunarG, Inc.
- * Copyright (C) 2019 Google Inc.
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- *
- * John Zulauf <jzulauf@lunarg.com>
- *
- */
-#ifndef SPARSE_CONTAINERS_H_
-#define SPARSE_CONTAINERS_H_
-#define NOMINMAX
-#include <cassert>
-#include <memory>
-#include <unordered_map>
-#include <vector>
-
-namespace sparse_container {
-// SparseVector:
-//
-// Defines a sparse single-dimensional container which is targeted for three distinct use cases
-// 1) Large range of indices sparsely populated ("Sparse access" below)
-// 2) Large range of indices where all values are the same ("Sparse access" below)
-// 3) Large range of values densely populated (more that 1/4 full) ("Dense access" below)
-// 4) Small range of values where direct access is most efficient ("Dense access" below)
-//
-// To update semantics are supported bases on kSetReplaces:
-// true -- updates to already set (valid) indices replace current value
-// false -- updates to already set (valid) indicies are ignored.
-//
-// Theory of operation:
-//
-// When created, a sparse vector is created (based on size relative to
-// the kSparseThreshold) in either Sparse or Dense access mode.
-//
-// In "Sparse access" mode individual values are stored in a map keyed
-// by the index. A "full range" value (if set) defines the value of all
-// entries not present in the map. Setting a full range value via
-//
-// SetRange(range_min, range_max, full_range_value )
-//
-// either clears the map (kSetReplaces==true) or prevents further
-// updates to the vector (kSetReplaces==false). If the map becomes
-// more than // 1/kConversionThreshold (4) full, the SparseVector is
-// converted into "Dense access" mode. Entries are copied from map,
-// with non-present indices set to the default value (kDefaultValue)
-// or the full range value (if present).
-//
-// In "Dense access" mode, values are stored in a vector the size of
-// the valid range indexed by the incoming index value minus range_min_.
-// The same upate semantic applies bases on kSetReplaces.
-//
-// Note that when kSparseThreshold
-//
-// Access:
-//
-// NOTE all "end" indices (in construction or access) are *exclusive*.
-//
-// Given the variable semantics and effective compression of Sparse
-// access mode, all access is through Get, Set, and SetRange functions
-// and a constant iterator. Get return either the value found (using
-// the current access mode) or the kDefaultValue. Set and SetRange
-// return whether or not state was updated, in order to support dirty
-// bit updates for any dependent state.
-//
-// The iterator ConstIterator provides basic, "by value" access. The
-// "by value" nature of the access reflect the compressed nature
-// operators *, ++, ==, and != are provided, with the latter two only
-// suitable for comparisons vs. cend. The iterator skips all
-// kDefaultValue entries in either access mode, returning a std::pair
-// containing {IndexType, ValueType}. The multiple access modes give
-// the iterator a bit more complexity than is optimal, but hides the
-// underlying complexity from the callers.
-//
-// TODO: Update iterator to use a reference (likely using
-// reference_wrapper...)
-
-template <typename IndexType_, typename T, bool kSetReplaces, T kDefaultValue = T(), size_t kSparseThreshold = 16>
-class SparseVector {
- public:
- typedef IndexType_ IndexType;
- typedef T value_type;
- typedef value_type ValueType;
- typedef std::unordered_map<IndexType, ValueType> SparseType;
- typedef std::vector<ValueType> DenseType;
-
- SparseVector(IndexType start, IndexType end)
- : range_min_(start), range_max_(end), threshold_((end - start) / kConversionThreshold) {
- assert(end > start);
- Reset();
- }
-
- // Initial access mode is set based on range size vs. kSparseThreshold. Either sparse_ or dense_ is always set, but only
- // ever one at a time
- void Reset() {
- has_full_range_value_ = false;
- full_range_value_ = kDefaultValue;
- size_t count = range_max_ - range_min_;
- if (kSparseThreshold && (count > kSparseThreshold)) {
- sparse_.reset(new SparseType());
- dense_.reset();
- } else {
- sparse_.reset();
- dense_.reset(new DenseType(count, kDefaultValue));
- }
- }
-
- const ValueType &Get(const IndexType index) const {
- // Note that here (and similarly below, the 'IsSparse' clause is
- // eliminated as dead code in release builds if kSparseThreshold==0
- if (IsSparse()) {
- if (!sparse_->empty()) { // Don't attempt lookup in empty map
- auto it = sparse_->find(index);
- if (it != sparse_->cend()) {
- return it->second;
- }
- }
- // If there is a full_range_value, return it, but there isn't a full_range_value_, it's set to kDefaultValue
- // so it's still the correct this to return
- return full_range_value_;
- } else {
- // Direct access
- assert(dense_.get());
- const ValueType &value = (*dense_)[index - range_min_];
- return value;
- }
- }
-
- // Set a indexes value, based on the access mode, update semantics are enforced within the access mode specific function
- bool Set(const IndexType index, const ValueType &value) {
- bool updated = false;
- if (IsSparse()) {
- updated = SetSparse(index, value);
- } else {
- assert(dense_.get());
- updated = SetDense(index, value);
- }
- return updated;
- }
-
- // Set a range of values based on access mode, with some update semantics applied a the range level
- bool SetRange(const IndexType start, IndexType end, ValueType value) {
- bool updated = false;
- if (IsSparse()) {
- if (!kSetReplaces && HasFullRange()) return false; // We have full coverage, we can change this no more
-
- bool is_full_range = IsFullRange(start, end);
- if (kSetReplaces && is_full_range) {
- updated = value != full_range_value_;
- full_range_value_ = value;
- if (HasSparseSubranges()) {
- updated = true;
- sparse_->clear(); // full range replaces all subranges
- }
- // has_full_range_value_ state of the full_range_value_ to avoid ValueType comparisons
- has_full_range_value_ = value != kDefaultValue;
- } else if (!kSetReplaces && (value != kDefaultValue) && is_full_range && !HasFullRange()) {
- // With the update only invalid semantics, the value becomes the fallback, and will prevent other updates
- full_range_value_ = value;
- has_full_range_value_ = true;
- updated = true;
- // Clean up the sparse map a bit
- for (auto it = sparse_->begin(); it != sparse_->end();) { // no increment clause because of erase below
- if (it->second == value) {
- it = sparse_->erase(it); // remove redundant entries
- } else {
- ++it;
- }
- }
- } else {
- for (IndexType index = start; index < end; ++index) {
- // NOTE: We can't use SetSparse here, because this may be converted to dense access mid update
- updated |= Set(index, value);
- }
- }
- } else {
- // Note that "Dense Access" does away with the full_range_value_ logic, storing empty entries using kDefaultValue
- assert(dense_);
- for (IndexType index = start; index < end; ++index) {
- updated = SetDense(index, value);
- }
- }
- return updated;
- }
-
- // Set only the non-default values from another sparse vector
- bool Merge(const SparseVector &from) {
- // Must not set from Sparse arracy with larger bounds...
- assert((range_min_ <= from.range_min_) && (range_max_ >= from.range_max_));
- bool updated = false;
- if (from.IsSparse()) {
- if (from.HasFullRange() && !from.HasSparseSubranges()) {
- // Short cut to copy a full range if that's all we have
- updated |= SetRange(from.range_min_, from.range_max_, from.full_range_value_);
- } else {
- // Have to do it the complete (potentially) slow way
- // TODO add sorted keys to iterator to reduce hash lookups
- for (auto it = from.cbegin(); it != from.cend(); ++it) {
- const IndexType index = (*it).first;
- const ValueType &value = (*it).second;
- Set(index, value);
- }
- }
- } else {
- assert(from.dense_);
- DenseType &ray = *from.dense_;
- for (IndexType entry = from.range_min_; entry < from.range_max_; ++entry) {
- IndexType index = entry - from.range_min_;
- if (ray[index] != kDefaultValue) {
- updated |= Set(entry, ray[index]);
- }
- }
- }
- return updated;
- }
-
- friend class ConstIterator;
- class ConstIterator {
- public:
- using SparseType = typename SparseVector::SparseType;
- using SparseIterator = typename SparseType::const_iterator;
- using IndexType = typename SparseVector::IndexType;
- using ValueType = typename SparseVector::ValueType;
- using IteratorValueType = std::pair<IndexType, ValueType>;
- const IteratorValueType &operator*() const { return current_value_; }
-
- ConstIterator &operator++() {
- if (delegated_) { // implies sparse
- ++it_sparse_;
- if (it_sparse_ == vec_->sparse_->cend()) {
- the_end_ = true;
- current_value_.first = vec_->range_max_;
- current_value_.second = SparseVector::DefaultValue();
- } else {
- current_value_.first = it_sparse_->first;
- current_value_.second = it_sparse_->second;
- }
- } else {
- index_++;
- SetCurrentValue();
- }
- return *this;
- }
- bool operator!=(const ConstIterator &rhs) const {
- return (the_end_ != rhs.the_end_); // Just good enough for cend checks
- }
-
- bool operator==(const ConstIterator &rhs) const {
- return (the_end_ == rhs.the_end_); // Just good enough for cend checks
- }
-
- // The iterator has two modes:
- // delegated:
- // where we are in sparse access mode and have no full_range_value
- // and thus can delegate our iteration to underlying map
- // non-delegated:
- // either dense mode or we have a full range value and thus
- // must iterate over the whole range
- ConstIterator(const SparseVector &vec) : vec_(&vec) {
- if (!vec_->IsSparse() || vec_->HasFullRange()) {
- // Must iterated over entire ranges skipping (in the case of dense access), invalid entries
- delegated_ = false;
- index_ = vec_->range_min_;
- SetCurrentValue(); // Skips invalid and sets the_end_
- } else if (vec_->HasSparseSubranges()) {
- // The subranges store the non-default values... and their is no full range value
- delegated_ = true;
- it_sparse_ = vec_->sparse_->cbegin();
- current_value_.first = it_sparse_->first;
- current_value_.second = it_sparse_->second;
- the_end_ = false; // the sparse map is non-empty (per HasSparseSubranges() above)
- } else {
- // Sparse, but with no subranges
- the_end_ = true;
- }
- }
-
- ConstIterator() : vec_(nullptr), the_end_(true) {}
-
- protected:
- const SparseVector *vec_;
- bool the_end_;
- SparseIterator it_sparse_;
- bool delegated_;
- IndexType index_;
- ValueType value_;
-
- IteratorValueType current_value_;
-
- // in the non-delegated case we use normal accessors and skip default values.
- void SetCurrentValue() {
- the_end_ = true;
- while (index_ < vec_->range_max_) {
- value_ = vec_->Get(index_);
- if (value_ != SparseVector::DefaultValue()) {
- the_end_ = false;
- current_value_ = IteratorValueType(index_, value_);
- break;
- }
- index_++;
- }
- }
- };
- typedef ConstIterator const_iterator;
-
- ConstIterator cbegin() const { return ConstIterator(*this); }
- ConstIterator cend() const { return ConstIterator(); }
-
- IndexType RangeMax() const { return range_max_; }
- IndexType RangeMin() const { return range_min_; }
-
- static const unsigned kConversionThreshold = 4;
- const IndexType range_min_; // exclusive
- const IndexType range_max_; // exclusive
- const IndexType threshold_; // exclusive
-
- // Data for sparse mode
- // We have a short cut for full range values when in sparse mode
- bool has_full_range_value_;
- ValueType full_range_value_;
- std::unique_ptr<SparseType> sparse_;
-
- // Data for dense mode
- std::unique_ptr<DenseType> dense_;
-
- static const ValueType &DefaultValue() {
- static ValueType value = kDefaultValue;
- return value;
- }
- // Note that IsSparse is compile-time reducible if kSparseThreshold is zero...
- inline bool IsSparse() const { return kSparseThreshold && sparse_.get(); }
- bool IsFullRange(IndexType start, IndexType end) const { return (start == range_min_) && (end == range_max_); }
- bool IsFullRangeValue(const ValueType &value) const { return has_full_range_value_ && (value == full_range_value_); }
- bool HasFullRange() const { return IsSparse() && has_full_range_value_; }
- bool HasSparseSubranges() const { return IsSparse() && !sparse_->empty(); }
-
- // This is called unconditionally, to encapsulate the conversion criteria and logic here
- void SparseToDenseConversion() {
- // If we're using more threshold of the sparse range, convert to dense_
- if (IsSparse() && (sparse_->size() > threshold_)) {
- ValueType default_value = HasFullRange() ? full_range_value_ : kDefaultValue;
- dense_.reset(new DenseType((range_max_ - range_min_), default_value));
- DenseType &ray = *dense_;
- for (auto const &item : *sparse_) {
- ray[item.first - range_min_] = item.second;
- }
- sparse_.reset();
- has_full_range_value_ = false;
- }
- }
-
- // Dense access mode setter with update semantics implemented
- bool SetDense(IndexType index, const ValueType &value) {
- bool updated = false;
- ValueType &current_value = (*dense_)[index - range_min_];
- if ((kSetReplaces || current_value == kDefaultValue) && (value != current_value)) {
- current_value = value;
- updated = true;
- }
- return updated;
- }
-
- // Sparse access mode setter with update full range and update semantics implemented
- bool SetSparse(IndexType index, const ValueType &value) {
- if (!kSetReplaces && HasFullRange()) {
- return false; // We have full coverage, we can change this no more
- }
-
- if (kSetReplaces && IsFullRangeValue(value) && HasSparseSubranges()) {
- auto erasure = sparse_->erase(index); // Remove duplicate record from map
- return erasure > 0;
- }
-
- // Use insert to reduce the number of hash lookups
- auto map_pair = std::make_pair(index, value);
- auto insert_pair = sparse_->insert(map_pair);
- auto &it = insert_pair.first; // use references to avoid nested pair accesses
- const bool inserted = insert_pair.second;
- bool updated = false;
- if (inserted) {
- updated = true;
- SparseToDenseConversion();
- } else if (kSetReplaces && value != it->second) {
- // Only replace value if semantics allow it and it has changed.
- it->second = value;
- updated = true;
- }
- return updated;
- }
-};
-
-} // namespace sparse_container
-#endif
diff --git a/layers/stateless_validation.h b/layers/stateless_validation.h
index 199536ccc..977cf9b51 100644
--- a/layers/stateless_validation.h
+++ b/layers/stateless_validation.h
@@ -21,6 +21,8 @@
#pragma once
+#include <bitset>
+
#include "parameter_name.h"
#include "vk_typemap_helper.h"
@@ -42,7 +44,6 @@ static const char DECORATE_UNUSED *kVUID_PVError_DeviceLimit = "UNASSIGNED-Gener
static const char DECORATE_UNUSED *kVUID_PVError_DeviceFeature = "UNASSIGNED-GeneralParameterError-DeviceFeature";
static const char DECORATE_UNUSED *kVUID_PVError_FailureCode = "UNASSIGNED-GeneralParameterError-FailureCode";
static const char DECORATE_UNUSED *kVUID_PVError_ExtensionNotEnabled = "UNASSIGNED-GeneralParameterError-ExtensionNotEnabled";
-static const char DECORATE_UNUSED *kVUID_PVPerfWarn_SuboptimalSwapchain = "UNASSIGNED-GeneralParameterPerfWarn-SuboptimalSwapchain";
#undef DECORATE_UNUSED
@@ -53,7 +54,6 @@ extern const VkColorComponentFlags AllVkColorComponentFlagBits;
extern const VkShaderStageFlags AllVkShaderStageFlagBits;
extern const VkQueryControlFlags AllVkQueryControlFlagBits;
extern const VkImageUsageFlags AllVkImageUsageFlagBits;
-extern const VkSampleCountFlags AllVkSampleCountFlagBits;
extern const std::vector<VkCompareOp> AllVkCompareOpEnums;
extern const std::vector<VkStencilOp> AllVkStencilOpEnums;
@@ -62,9 +62,11 @@ extern const std::vector<VkBlendOp> AllVkBlendOpEnums;
extern const std::vector<VkLogicOp> AllVkLogicOpEnums;
extern const std::vector<VkBorderColor> AllVkBorderColorEnums;
extern const std::vector<VkImageLayout> AllVkImageLayoutEnums;
-extern const std::vector<VkFormat> AllVkFormatEnums;
-extern const std::vector<VkVertexInputRate> AllVkVertexInputRateEnums;
-extern const std::vector<VkPrimitiveTopology> AllVkPrimitiveTopologyEnums;
+
+struct GenericHeader {
+ VkStructureType sType;
+ const void *pNext;
+};
// String returned by string_VkStructureType for an unrecognized type.
const std::string UnsupportedStructureTypeString = "Unhandled VkStructureType";
@@ -90,8 +92,9 @@ struct LogMiscParams {
class StatelessValidation : public ValidationObject {
public:
VkPhysicalDeviceLimits device_limits = {};
- safe_VkPhysicalDeviceFeatures2 physical_device_features2;
- const VkPhysicalDeviceFeatures &physical_device_features = physical_device_features2.features;
+ VkPhysicalDeviceFeatures physical_device_features = {};
+ VkDevice device = VK_NULL_HANDLE;
+ uint32_t api_version;
// Override chassis read/write locks for this validation object
// This override takes a deferred lock. i.e. it is not acquired.
@@ -101,7 +104,6 @@ class StatelessValidation : public ValidationObject {
struct DeviceExtensionProperties {
VkPhysicalDeviceShadingRateImagePropertiesNV shading_rate_image_props;
VkPhysicalDeviceMeshShaderPropertiesNV mesh_shader_props;
- VkPhysicalDeviceRayTracingPropertiesNV ray_tracing_props;
};
DeviceExtensionProperties phys_dev_ext_props = {};
@@ -474,7 +476,7 @@ class StatelessValidation : public ValidationObject {
}
// Forward declaration for pNext validation
- bool ValidatePnextStructContents(const char *api_name, const ParameterName &parameter_name, const VkBaseOutStructure *header);
+ bool ValidatePnextStructContents(const char *api_name, const ParameterName &parameter_name, const GenericHeader *header);
/**
* Validate a structure's pNext member.
@@ -521,7 +523,7 @@ class StatelessValidation : public ValidationObject {
} else {
const VkStructureType *start = allowed_types;
const VkStructureType *end = allowed_types + allowed_type_count;
- const VkBaseOutStructure *current = reinterpret_cast<const VkBaseOutStructure *>(next);
+ const GenericHeader *current = reinterpret_cast<const GenericHeader *>(next);
cycle_check.insert(next);
@@ -574,7 +576,7 @@ class StatelessValidation : public ValidationObject {
}
skip_call |= ValidatePnextStructContents(api_name, parameter_name, current);
}
- current = reinterpret_cast<const VkBaseOutStructure *>(current->pNext);
+ current = reinterpret_cast<const GenericHeader *>(current->pNext);
}
}
}
@@ -704,8 +706,6 @@ class StatelessValidation : public ValidationObject {
return skip_call;
}
- enum FlagType { kRequiredFlags, kOptionalFlags, kRequiredSingleBit, kOptionalSingleBit };
-
/**
* Validate a Vulkan bitmask value.
*
@@ -717,37 +717,27 @@ class StatelessValidation : public ValidationObject {
* @param flag_bits_name Name of the VkFlags type being validated.
* @param all_flags A bit mask combining all valid flag bits for the VkFlags type being validated.
* @param value VkFlags value to validate.
- * @param flag_type The type of flag, like optional, or single bit.
- * @param vuid VUID used for flag that is outside defined bits (or has more than one bit for Bits type).
- * @param flags_zero_vuid VUID used for non-optional Flags that are zero.
+ * @param flags_required The 'value' parameter may not be 0 when true.
+ * @param singleFlag The 'value' parameter may not contain more than one bit from all_flags.
* @return Boolean value indicating that the call should be skipped.
*/
bool validate_flags(const char *api_name, const ParameterName &parameter_name, const char *flag_bits_name, VkFlags all_flags,
- VkFlags value, const FlagType flag_type, const char *vuid, const char *flags_zero_vuid = nullptr) {
+ VkFlags value, bool flags_required, bool singleFlag, const char *vuid) {
bool skip_call = false;
- if ((value & ~all_flags) != 0) {
- skip_call |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0, vuid,
- "%s: value of %s contains flag bits that are not recognized members of %s", api_name,
- parameter_name.get_name().c_str(), flag_bits_name);
- }
-
- const bool required = flag_type == kRequiredFlags || flag_type == kRequiredSingleBit;
- const char *zero_vuid = flag_type == kRequiredFlags ? flags_zero_vuid : vuid;
- if (required && value == 0) {
- skip_call |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0, zero_vuid,
- "%s: value of %s must not be 0.", api_name, parameter_name.get_name().c_str());
- }
-
- const auto HasMaxOneBitSet = [](const VkFlags f) {
- // Decrement flips bits from right upto first 1.
- // Rest stays same, and if there was any other 1s &ded together they would be non-zero. QED
- return f == 0 || !(f & (f - 1));
- };
-
- const bool is_bits_type = flag_type == kRequiredSingleBit || flag_type == kOptionalSingleBit;
- if (is_bits_type && !HasMaxOneBitSet(value)) {
- skip_call |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0, vuid,
+ if (value == 0) {
+ if (flags_required) {
+ skip_call |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0, vuid,
+ "%s: value of %s must not be 0.", api_name, parameter_name.get_name().c_str());
+ }
+ } else if ((value & (~all_flags)) != 0) {
+ skip_call |=
+ log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
+ kVUID_PVError_UnrecognizedValue, "%s: value of %s contains flag bits that are not recognized members of %s",
+ api_name, parameter_name.get_name().c_str(), flag_bits_name);
+ } else if (singleFlag && (std::bitset<sizeof(VkFlags) * 8>(value).count() > 1)) {
+ skip_call |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
+ kVUID_PVError_UnrecognizedValue,
"%s: value of %s contains multiple members of %s when only a single value is allowed", api_name,
parameter_name.get_name().c_str(), flag_bits_name);
}
@@ -837,52 +827,6 @@ class StatelessValidation : public ValidationObject {
enum RenderPassCreateVersion { RENDER_PASS_VERSION_1 = 0, RENDER_PASS_VERSION_2 = 1 };
template <typename RenderPassCreateInfoGeneric>
- bool ValidateSubpassGraphicsFlags(const debug_report_data *report_data, const RenderPassCreateInfoGeneric *pCreateInfo,
- uint32_t dependency_index, uint32_t subpass, VkPipelineStageFlags stages, const char *vuid,
- const char *target) {
- const VkPipelineStageFlags kCommonStages = VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT | VK_PIPELINE_STAGE_BOTTOM_OF_PIPE_BIT;
- const VkPipelineStageFlags kFramebufferStages =
- VK_PIPELINE_STAGE_EARLY_FRAGMENT_TESTS_BIT | VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT |
- VK_PIPELINE_STAGE_LATE_FRAGMENT_TESTS_BIT | VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT;
- const VkPipelineStageFlags kPrimitiveShadingPipelineStages =
- kCommonStages | VK_PIPELINE_STAGE_DRAW_INDIRECT_BIT | VK_PIPELINE_STAGE_VERTEX_INPUT_BIT |
- VK_PIPELINE_STAGE_VERTEX_SHADER_BIT | VK_PIPELINE_STAGE_TESSELLATION_CONTROL_SHADER_BIT |
- VK_PIPELINE_STAGE_TESSELLATION_EVALUATION_SHADER_BIT | VK_PIPELINE_STAGE_GEOMETRY_SHADER_BIT |
- VK_PIPELINE_STAGE_TRANSFORM_FEEDBACK_BIT_EXT | VK_PIPELINE_STAGE_SHADING_RATE_IMAGE_BIT_NV | kFramebufferStages;
- const VkPipelineStageFlags kMeshShadingPipelineStages =
- kCommonStages | VK_PIPELINE_STAGE_DRAW_INDIRECT_BIT | VK_PIPELINE_STAGE_TASK_SHADER_BIT_NV |
- VK_PIPELINE_STAGE_MESH_SHADER_BIT_NV | VK_PIPELINE_STAGE_SHADING_RATE_IMAGE_BIT_NV | kFramebufferStages;
- const VkPipelineStageFlags kFragmentDensityStages = VK_PIPELINE_STAGE_FRAGMENT_DENSITY_PROCESS_BIT_EXT;
- const VkPipelineStageFlags kConditionalRenderingStages = VK_PIPELINE_STAGE_CONDITIONAL_RENDERING_BIT_EXT;
- const VkPipelineStageFlags kCommandProcessingPipelineStages = kCommonStages | VK_PIPELINE_STAGE_COMMAND_PROCESS_BIT_NVX;
-
- const VkPipelineStageFlags kGraphicsStages = VK_PIPELINE_STAGE_ALL_GRAPHICS_BIT | kPrimitiveShadingPipelineStages |
- kMeshShadingPipelineStages | kFragmentDensityStages |
- kConditionalRenderingStages | kCommandProcessingPipelineStages;
-
- bool skip = false;
-
- const auto IsPipeline = [pCreateInfo](uint32_t subpass, const VkPipelineBindPoint stage) {
- if (subpass == VK_SUBPASS_EXTERNAL)
- return false;
- else
- return pCreateInfo->pSubpasses[subpass].pipelineBindPoint == stage;
- };
-
- const bool is_all_graphics_stages = (stages & ~kGraphicsStages) == 0;
- if (IsPipeline(subpass, VK_PIPELINE_BIND_POINT_GRAPHICS) && !is_all_graphics_stages) {
- skip |=
- log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_RENDER_PASS_EXT, 0, vuid,
- "Dependency pDependencies[%" PRIu32
- "] specifies a %sStageMask that contains stages (%s) that are not part "
- "of the Graphics pipeline, as specified by the %sSubpass (= %" PRIu32 ") in pipelineBindPoint.",
- dependency_index, target, string_VkPipelineStageFlags(stages & ~kGraphicsStages).c_str(), target, subpass);
- }
-
- return skip;
- };
-
- template <typename RenderPassCreateInfoGeneric>
bool CreateRenderPassGeneric(VkDevice device, const RenderPassCreateInfoGeneric *pCreateInfo,
const VkAllocationCallbacks *pAllocator, VkRenderPass *pRenderPass,
RenderPassCreateVersion rp_version) {
@@ -921,22 +865,6 @@ class StatelessValidation : public ValidationObject {
pCreateInfo->pSubpasses[i].colorAttachmentCount, max_color_attachments);
}
}
-
- for (uint32_t i = 0; i < pCreateInfo->dependencyCount; ++i) {
- const auto &dependency = pCreateInfo->pDependencies[i];
-
- // Spec currently only supports Graphics pipeline in render pass -- so only that pipeline is currently checked
- vuid =
- use_rp2 ? "VUID-VkRenderPassCreateInfo2KHR-pDependencies-03054" : "VUID-VkRenderPassCreateInfo-pDependencies-00837";
- skip |= ValidateSubpassGraphicsFlags(report_data, pCreateInfo, i, dependency.srcSubpass, dependency.srcStageMask, vuid,
- "src");
-
- vuid =
- use_rp2 ? "VUID-VkRenderPassCreateInfo2KHR-pDependencies-03055" : "VUID-VkRenderPassCreateInfo-pDependencies-00838";
- skip |= ValidateSubpassGraphicsFlags(report_data, pCreateInfo, i, dependency.dstSubpass, dependency.dstStageMask, vuid,
- "dst");
- }
-
return skip;
}
@@ -978,15 +906,6 @@ class StatelessValidation : public ValidationObject {
bool ValidateDeviceQueueFamily(uint32_t queue_family, const char *cmd_name, const char *parameter_name,
const std::string &error_code, bool optional);
- bool ValidateGeometryTrianglesNV(const VkGeometryTrianglesNV &triangles, VkDebugReportObjectTypeEXT object_type,
- uint64_t object_handle, const char *func_name) const;
- bool ValidateGeometryAABBNV(const VkGeometryAABBNV &geometry, VkDebugReportObjectTypeEXT object_type, uint64_t object_handle,
- const char *func_name) const;
- bool ValidateGeometryNV(const VkGeometryNV &geometry, VkDebugReportObjectTypeEXT object_type, uint64_t object_handle,
- const char *func_name) const;
- bool ValidateAccelerationStructureInfoNV(const VkAccelerationStructureInfoNV &info, VkDebugReportObjectTypeEXT object_type,
- uint64_t object_handle, const char *func_nam) const;
-
bool OutputExtensionError(const std::string &api_name, const std::string &extension_name);
void PostCallRecordCreateRenderPass(VkDevice device, const VkRenderPassCreateInfo *pCreateInfo,
@@ -1000,8 +919,6 @@ class StatelessValidation : public ValidationObject {
void PostCallRecordCreateInstance(const VkInstanceCreateInfo *pCreateInfo, const VkAllocationCallbacks *pAllocator,
VkInstance *pInstance, VkResult result);
- void PostCallRecordQueuePresentKHR(VkQueue queue, const VkPresentInfoKHR *pPresentInfo, VkResult result);
-
bool manual_PreCallValidateCreateQueryPool(VkDevice device, const VkQueryPoolCreateInfo *pCreateInfo,
const VkAllocationCallbacks *pAllocator, VkQueryPool *pQueryPool);
@@ -1017,6 +934,9 @@ class StatelessValidation : public ValidationObject {
bool manual_PreCallValidateCreateImage(VkDevice device, const VkImageCreateInfo *pCreateInfo,
const VkAllocationCallbacks *pAllocator, VkImage *pImage);
+ bool manual_PreCallValidateCreateImageView(VkDevice device, const VkImageViewCreateInfo *pCreateInfo,
+ const VkAllocationCallbacks *pAllocator, VkImageView *pView);
+
bool manual_PreCallValidateViewport(const VkViewport &viewport, const char *fn_name, const ParameterName &parameter_name,
VkDebugReportObjectTypeEXT object_type, uint64_t object);
@@ -1036,7 +956,7 @@ class StatelessValidation : public ValidationObject {
bool manual_PreCallValidateUpdateDescriptorSets(VkDevice device, uint32_t descriptorWriteCount,
const VkWriteDescriptorSet *pDescriptorWrites, uint32_t descriptorCopyCount,
const VkCopyDescriptorSet *pDescriptorCopies);
-
+ ;
bool manual_PreCallValidateFreeDescriptorSets(VkDevice device, VkDescriptorPool descriptorPool, uint32_t descriptorSetCount,
const VkDescriptorSet *pDescriptorSets);
@@ -1067,10 +987,6 @@ class StatelessValidation : public ValidationObject {
bool manual_PreCallValidateCmdDrawIndexedIndirect(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset,
uint32_t count, uint32_t stride);
- bool manual_PreCallValidateCmdClearAttachments(VkCommandBuffer commandBuffer, uint32_t attachmentCount,
- const VkClearAttachment *pAttachments, uint32_t rectCount,
- const VkClearRect *pRects);
-
bool manual_PreCallValidateCmdCopyImage(VkCommandBuffer commandBuffer, VkImage srcImage, VkImageLayout srcImageLayout,
VkImage dstImage, VkImageLayout dstImageLayout, uint32_t regionCount,
const VkImageCopy *pRegions);
@@ -1133,35 +1049,5 @@ class StatelessValidation : public ValidationObject {
uint32_t *pPropertyCount, VkExtensionProperties *pProperties);
bool manual_PreCallValidateAllocateMemory(VkDevice device, const VkMemoryAllocateInfo *pAllocateInfo,
const VkAllocationCallbacks *pAllocator, VkDeviceMemory *pMemory);
-
- bool manual_PreCallValidateCreateAccelerationStructureNV(VkDevice device,
- const VkAccelerationStructureCreateInfoNV *pCreateInfo,
- const VkAllocationCallbacks *pAllocator,
- VkAccelerationStructureNV *pAccelerationStructure);
- bool manual_PreCallValidateCmdBuildAccelerationStructureNV(VkCommandBuffer commandBuffer,
- const VkAccelerationStructureInfoNV *pInfo, VkBuffer instanceData,
- VkDeviceSize instanceOffset, VkBool32 update,
- VkAccelerationStructureNV dst, VkAccelerationStructureNV src,
- VkBuffer scratch, VkDeviceSize scratchOffset);
- bool manual_PreCallValidateGetAccelerationStructureHandleNV(VkDevice device, VkAccelerationStructureNV accelerationStructure,
- size_t dataSize, void *pData);
- bool manual_PreCallValidateCreateRayTracingPipelinesNV(VkDevice device, VkPipelineCache pipelineCache, uint32_t createInfoCount,
- const VkRayTracingPipelineCreateInfoNV *pCreateInfos,
- const VkAllocationCallbacks *pAllocator, VkPipeline *pPipelines);
-
-#ifdef VK_USE_PLATFORM_WIN32_KHR
- bool PreCallValidateGetDeviceGroupSurfacePresentModes2EXT(VkDevice device, const VkPhysicalDeviceSurfaceInfo2KHR *pSurfaceInfo,
- VkDeviceGroupPresentModeFlagsKHR *pModes);
-#endif // VK_USE_PLATFORM_WIN32_KHR
-
- bool manual_PreCallValidateCreateFramebuffer(VkDevice device, const VkFramebufferCreateInfo *pCreateInfo,
- const VkAllocationCallbacks *pAllocator, VkFramebuffer *pFramebuffer);
-
- bool manual_PreCallValidateCmdSetLineStippleEXT(VkCommandBuffer commandBuffer, uint32_t lineStippleFactor,
- uint16_t lineStipplePattern);
-
- bool manual_PreCallValidateCmdBindIndexBuffer(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset,
- VkIndexType indexType);
-
#include "parameter_validation.h"
}; // Class StatelessValidation
diff --git a/layers/vk_format_utils.cpp b/layers/vk_format_utils.cpp
index 9990144b5..c4b9959fd 100644
--- a/layers/vk_format_utils.cpp
+++ b/layers/vk_format_utils.cpp
@@ -1157,25 +1157,7 @@ VK_LAYER_EXPORT VkFormatCompatibilityClass FormatCompatibilityClass(VkFormat for
// Return size, in bytes, of one element of the specified format
// For uncompressed this is one texel, for compressed it is one block
-VK_LAYER_EXPORT uint32_t FormatElementSize(VkFormat format, VkImageAspectFlags aspectMask) {
- // Handle special buffer packing rules for specific depth/stencil formats
- if (aspectMask & VK_IMAGE_ASPECT_STENCIL_BIT) {
- format = VK_FORMAT_S8_UINT;
- } else if (aspectMask & VK_IMAGE_ASPECT_DEPTH_BIT) {
- switch (format) {
- case VK_FORMAT_D16_UNORM_S8_UINT:
- format = VK_FORMAT_D16_UNORM;
- break;
- case VK_FORMAT_D32_SFLOAT_S8_UINT:
- format = VK_FORMAT_D32_SFLOAT;
- break;
- default:
- break;
- }
- } else if (FormatIsMultiplane(format)) {
- format = FindMultiplaneCompatibleFormat(format, aspectMask);
- }
-
+VK_LAYER_EXPORT uint32_t FormatElementSize(VkFormat format) {
auto item = vk_format_table.find(format);
if (item != vk_format_table.end()) {
return item->second.size;
@@ -1213,14 +1195,6 @@ VK_LAYER_EXPORT VkDeviceSize SafeModulo(VkDeviceSize dividend, VkDeviceSize divi
return result;
}
-VK_LAYER_EXPORT VkDeviceSize SafeDivision(VkDeviceSize dividend, VkDeviceSize divisor) {
- VkDeviceSize result = 0;
- if (divisor != 0) {
- result = dividend / divisor;
- }
- return result;
-}
-
struct VULKAN_PER_PLANE_COMPATIBILITY {
uint32_t width_divisor;
uint32_t height_divisor;
@@ -1298,7 +1272,6 @@ const std::map<VkFormat, VULKAN_MULTIPLANE_COMPATIBILITY> vk_multiplane_compatib
// clang-format on
uint32_t GetPlaneIndex(VkImageAspectFlags aspect) {
- // Returns an out of bounds index on error
switch (aspect) {
case VK_IMAGE_ASPECT_PLANE_0_BIT:
return 0;
@@ -1310,8 +1283,7 @@ uint32_t GetPlaneIndex(VkImageAspectFlags aspect) {
return 2;
break;
default:
- // If more than one plane bit is set, return error condition
- return VK_MULTIPLANE_FORMAT_MAX_PLANES;
+ return 0;
break;
}
}
@@ -1366,7 +1338,3 @@ VK_LAYER_EXPORT bool FormatSizesAreEqual(VkFormat srcFormat, VkFormat dstFormat,
return (dstSize == srcSize);
}
}
-
-VK_LAYER_EXPORT bool FormatRequiresYcbcrConversion(VkFormat format) {
- return format >= VK_FORMAT_G8B8G8R8_422_UNORM && format <= VK_FORMAT_G16_B16_R16_3PLANE_444_UNORM;
-}
diff --git a/layers/vk_format_utils.h b/layers/vk_format_utils.h
index 465605587..96cc3cfba 100644
--- a/layers/vk_format_utils.h
+++ b/layers/vk_format_utils.h
@@ -160,7 +160,6 @@ VK_LAYER_EXPORT bool FormatIsCompressed(VkFormat format);
VK_LAYER_EXPORT bool FormatIsPacked(VkFormat format);
VK_LAYER_EXPORT bool FormatElementIsTexel(VkFormat format);
VK_LAYER_EXPORT bool FormatSizesAreEqual(VkFormat srcFormat, VkFormat dstFormat, uint32_t region_count, const VkImageCopy *regions);
-VK_LAYER_EXPORT bool FormatRequiresYcbcrConversion(VkFormat format);
VK_LAYER_EXPORT uint32_t FormatDepthSize(VkFormat format);
VK_LAYER_EXPORT VkFormatNumericalType FormatDepthNumericalType(VkFormat format);
@@ -169,12 +168,10 @@ VK_LAYER_EXPORT VkFormatNumericalType FormatStencilNumericalType(VkFormat format
VK_LAYER_EXPORT uint32_t FormatPlaneCount(VkFormat format);
VK_LAYER_EXPORT uint32_t FormatChannelCount(VkFormat format);
VK_LAYER_EXPORT VkExtent3D FormatTexelBlockExtent(VkFormat format);
-VK_LAYER_EXPORT uint32_t FormatElementSize(VkFormat format, VkImageAspectFlags aspectMask = VK_IMAGE_ASPECT_COLOR_BIT);
+VK_LAYER_EXPORT uint32_t FormatElementSize(VkFormat format);
VK_LAYER_EXPORT double FormatTexelSize(VkFormat format);
VK_LAYER_EXPORT VkFormatCompatibilityClass FormatCompatibilityClass(VkFormat format);
VK_LAYER_EXPORT VkDeviceSize SafeModulo(VkDeviceSize dividend, VkDeviceSize divisor);
-VK_LAYER_EXPORT VkDeviceSize SafeDivision(VkDeviceSize dividend, VkDeviceSize divisor);
-VK_LAYER_EXPORT uint32_t GetPlaneIndex(VkImageAspectFlags aspect);
VK_LAYER_EXPORT VkFormat FindMultiplaneCompatibleFormat(VkFormat fmt, VkImageAspectFlags plane_aspect);
VK_LAYER_EXPORT VkExtent2D FindMultiplaneExtentDivisors(VkFormat mp_fmt, VkImageAspectFlags plane_aspect);
diff --git a/layers/vk_layer_config.cpp b/layers/vk_layer_config.cpp
index 0957511df..896791601 100644
--- a/layers/vk_layer_config.cpp
+++ b/layers/vk_layer_config.cpp
@@ -1,8 +1,7 @@
/**************************************************************************
*
- * Copyright 2014-2019 Valve Software
- * Copyright 2015-2019 Google Inc.
- * Copyright 2019 LunarG, Inc.
+ * Copyright 2014 Valve Software
+ * Copyright 2015 Google Inc.
* All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
@@ -46,7 +45,6 @@ class ConfigFile {
const char *getOption(const std::string &_option);
void setOption(const std::string &_option, const std::string &_val);
- std::string vk_layer_disables_env_var{};
private:
bool m_fileIsParsed;
@@ -78,10 +76,6 @@ std::string getEnvironment(const char *variable) {
}
VK_LAYER_EXPORT const char *getLayerOption(const char *_option) { return g_configFileObj.getOption(_option); }
-VK_LAYER_EXPORT const char *GetLayerEnvVar(const char *_option) {
- g_configFileObj.vk_layer_disables_env_var = getEnvironment(_option);
- return g_configFileObj.vk_layer_disables_env_var.c_str();
-}
// If option is NULL or stdout, return stdout, otherwise try to open option
// as a filename. If successful, return file handle, otherwise stdout
@@ -145,7 +139,6 @@ VK_LAYER_EXPORT void setLayerOption(const char *_option, const char *_val) { g_c
// Constructor for ConfigFile. Initialize layers to log error messages to stdout by default. If a vk_layer_settings file is present,
// its settings will override the defaults.
ConfigFile::ConfigFile() : m_fileIsParsed(false) {
- m_valueMap["khronos_validation.report_flags"] = "error";
m_valueMap["lunarg_core_validation.report_flags"] = "error";
m_valueMap["lunarg_object_tracker.report_flags"] = "error";
m_valueMap["lunarg_parameter_validation.report_flags"] = "error";
@@ -154,8 +147,6 @@ ConfigFile::ConfigFile() : m_fileIsParsed(false) {
#ifdef WIN32
// For Windows, enable message logging AND OutputDebugString
- m_valueMap["khronos_validation.debug_action"] =
- "VK_DBG_LAYER_ACTION_DEFAULT,VK_DBG_LAYER_ACTION_LOG_MSG,VK_DBG_LAYER_ACTION_DEBUG_OUTPUT";
m_valueMap["lunarg_core_validation.debug_action"] =
"VK_DBG_LAYER_ACTION_DEFAULT,VK_DBG_LAYER_ACTION_LOG_MSG,VK_DBG_LAYER_ACTION_DEBUG_OUTPUT";
m_valueMap["lunarg_object_tracker.debug_action"] =
@@ -167,14 +158,13 @@ ConfigFile::ConfigFile() : m_fileIsParsed(false) {
m_valueMap["google_unique_objects.debug_action"] =
"VK_DBG_LAYER_ACTION_DEFAULT,VK_DBG_LAYER_ACTION_LOG_MSG,VK_DBG_LAYER_ACTION_DEBUG_OUTPUT";
#else // WIN32
- m_valueMap["khronos_validation.debug_action"] = "VK_DBG_LAYER_ACTION_DEFAULT,VK_DBG_LAYER_ACTION_LOG_MSG";
m_valueMap["lunarg_core_validation.debug_action"] = "VK_DBG_LAYER_ACTION_DEFAULT,VK_DBG_LAYER_ACTION_LOG_MSG";
m_valueMap["lunarg_object_tracker.debug_action"] = "VK_DBG_LAYER_ACTION_DEFAULT,VK_DBG_LAYER_ACTION_LOG_MSG";
m_valueMap["lunarg_parameter_validation.debug_action"] = "VK_DBG_LAYER_ACTION_DEFAULT,VK_DBG_LAYER_ACTION_LOG_MSG";
m_valueMap["google_threading.debug_action"] = "VK_DBG_LAYER_ACTION_DEFAULT,VK_DBG_LAYER_ACTION_LOG_MSG";
m_valueMap["google_unique_objects.debug_action"] = "VK_DBG_LAYER_ACTION_DEFAULT,VK_DBG_LAYER_ACTION_LOG_MSG";
#endif // WIN32
- m_valueMap["khronos_validation.log_filename"] = "stdout";
+
m_valueMap["lunarg_core_validation.log_filename"] = "stdout";
m_valueMap["lunarg_object_tracker.log_filename"] = "stdout";
m_valueMap["lunarg_parameter_validation.log_filename"] = "stdout";
diff --git a/layers/vk_layer_config.h b/layers/vk_layer_config.h
index d426e6799..e0b9d3260 100644
--- a/layers/vk_layer_config.h
+++ b/layers/vk_layer_config.h
@@ -1,6 +1,6 @@
-/* Copyright (c) 2015-2019 The Khronos Group Inc.
- * Copyright (c) 2015-2019 Valve Corporation
- * Copyright (c) 2015-2019 LunarG, Inc.
+/* Copyright (c) 2015-2016 The Khronos Group Inc.
+ * Copyright (c) 2015-2016 Valve Corporation
+ * Copyright (c) 2015-2016 LunarG, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
@@ -58,8 +58,6 @@ const std::unordered_map<std::string, VkFlags> report_flags_option_definitions =
{std::string("debug"), VK_DEBUG_REPORT_DEBUG_BIT_EXT}};
VK_LAYER_EXPORT const char *getLayerOption(const char *_option);
-VK_LAYER_EXPORT const char *GetLayerEnvVar(const char *_option);
-
VK_LAYER_EXPORT FILE *getLayerLogOutput(const char *_option, const char *layerName);
VK_LAYER_EXPORT VkFlags GetLayerOptionFlags(std::string _option, std::unordered_map<std::string, VkFlags> const &enum_data,
uint32_t option_default);
diff --git a/layers/vk_layer_logging.h b/layers/vk_layer_logging.h
index d21e9ffa5..68b6ee167 100644
--- a/layers/vk_layer_logging.h
+++ b/layers/vk_layer_logging.h
@@ -24,32 +24,24 @@
#ifndef LAYER_LOGGING_H
#define LAYER_LOGGING_H
-#include <cinttypes>
-#include <signal.h>
-#include <stdarg.h>
-#include <stdbool.h>
-#include <stdio.h>
-
-#include <algorithm>
-#include <array>
-#include <memory>
-#include <mutex>
-#include <sstream>
-#include <string>
-#include <vector>
-#include <unordered_map>
-#include <utility>
-
-#include "vk_typemap_helper.h"
#include "vk_loader_layer.h"
#include "vk_layer_config.h"
#include "vk_layer_data.h"
#include "vk_loader_platform.h"
#include "vulkan/vk_layer.h"
#include "vk_object_types.h"
-#include "cast_utils.h"
#include "vk_validation_error_messages.h"
#include "vk_layer_dispatch_table.h"
+#include <mutex>
+#include <signal.h>
+#include <cinttypes>
+#include <stdarg.h>
+#include <stdbool.h>
+#include <stdio.h>
+#include <unordered_map>
+#include <vector>
+#include <sstream>
+#include <string>
// Suppress unused warning on Linux
#if defined(__GNUC__)
@@ -75,69 +67,17 @@ static const char DECORATE_UNUSED *kVUIDUndefined = "VUID_Undefined";
// TODO: Could be autogenerated for the specific handles for extra type safety...
template <typename HANDLE_T>
-static inline uint64_t HandleToUint64(HANDLE_T h) {
- return CastToUint64<HANDLE_T>(h);
+static inline uint64_t HandleToUint64(HANDLE_T *h) {
+ return reinterpret_cast<uint64_t>(h);
}
static inline uint64_t HandleToUint64(uint64_t h) { return h; }
// Data we store per label for logging
-struct LoggingLabel {
+typedef struct _LoggingLabelData {
std::string name;
- std::array<float, 4> color;
-
- void Reset() { *this = LoggingLabel(); }
- bool Empty() const { return name.empty(); }
-
- VkDebugUtilsLabelEXT Export() const {
- auto out = lvl_init_struct<VkDebugUtilsLabelEXT>();
- out.pLabelName = name.c_str();
- std::copy(color.cbegin(), color.cend(), out.color);
- return out;
- };
-
- LoggingLabel() : name(), color({{0.f, 0.f, 0.f, 0.f}}) {}
- LoggingLabel(const VkDebugUtilsLabelEXT *label_info) {
- if (label_info && label_info->pLabelName) {
- name = label_info->pLabelName;
- std::copy_n(std::begin(label_info->color), 4, color.begin());
- } else {
- Reset();
- }
- }
-
- LoggingLabel(const LoggingLabel &) = default;
- LoggingLabel &operator=(const LoggingLabel &) = default;
- LoggingLabel &operator=(LoggingLabel &&) = default;
- LoggingLabel(LoggingLabel &&) = default;
-
- template <typename Name, typename Vec>
- LoggingLabel(Name &&name_, Vec &&vec_) : name(std::forward<Name>(name_)), color(std::forward<Vec>(vec_)) {}
-};
-
-struct LoggingLabelState {
- std::vector<LoggingLabel> labels;
- LoggingLabel insert_label;
-
- // Export the labels, but in reverse order since we want the most recent at the top.
- std::vector<VkDebugUtilsLabelEXT> Export() const {
- size_t count = labels.size() + (insert_label.Empty() ? 0 : 1);
- std::vector<VkDebugUtilsLabelEXT> out(count);
-
- if (!count) return out;
-
- size_t index = count - 1;
- if (!insert_label.Empty()) {
- out[index--] = insert_label.Export();
- }
- for (const auto &label : labels) {
- out[index--] = label.Export();
- }
- return out;
- }
-};
-
-static inline int string_sprintf(std::string *output, const char *fmt, ...);
+ float color[4];
+} LoggingLabelData;
typedef struct _debug_report_data {
VkLayerDbgFunctionNode *debug_callback_list{nullptr};
@@ -150,8 +90,8 @@ typedef struct _debug_report_data {
bool cmdBufLabelHasInsert{false};
std::unordered_map<uint64_t, std::string> debugObjectNameMap;
std::unordered_map<uint64_t, std::string> debugUtilsObjectNameMap;
- std::unordered_map<VkQueue, std::unique_ptr<LoggingLabelState>> debugUtilsQueueLabels;
- std::unordered_map<VkCommandBuffer, std::unique_ptr<LoggingLabelState>> debugUtilsCmdBufLabels;
+ std::unordered_map<VkQueue, std::vector<LoggingLabelData>> debugUtilsQueueLabels;
+ std::unordered_map<VkCommandBuffer, std::vector<LoggingLabelData>> debugUtilsCmdBufLabels;
// This mutex is defined as mutable since the normal usage for a debug report object is as 'const'. The mutable keyword allows
// the layers to continue this pattern, but also allows them to use/change this specific member for synchronization purposes.
mutable std::mutex debug_report_mutex;
@@ -192,24 +132,26 @@ typedef struct _debug_report_data {
return label;
}
- std::string FormatHandle(const char *handle_type_name, uint64_t handle) const {
- std::string handle_name = DebugReportGetUtilsObjectName(handle);
- if (handle_name.empty()) {
- handle_name = DebugReportGetMarkerObjectName(handle);
- }
-
- std::string ret;
- string_sprintf(&ret, "%s 0x%" PRIxLEAST64 "[%s]", handle_type_name, handle, handle_name.c_str());
- return ret;
+ template <typename HANDLE_T>
+ std::string FormatHandle(HANDLE_T *h) const {
+ return FormatHandle(HandleToUint64(h));
}
- std::string FormatHandle(const VulkanTypedHandle &handle) const {
- return FormatHandle(object_string[handle.type], handle.handle);
- }
+ std::string FormatHandle(uint64_t h) const {
+ char uint64_string[64];
+ sprintf(uint64_string, "0x%" PRIxLEAST64, h);
+ std::string ret = uint64_string;
- template <typename HANDLE_T>
- std::string FormatHandle(HANDLE_T handle) const {
- return FormatHandle(VkHandleInfo<HANDLE_T>::Typename(), HandleToUint64(handle));
+ std::string name = DebugReportGetUtilsObjectName(h);
+ if (name.empty()) {
+ name = DebugReportGetMarkerObjectName(h);
+ }
+ if (!name.empty()) {
+ ret.append("[");
+ ret.append(name);
+ ret.append("]");
+ }
+ return ret;
}
} debug_report_data;
@@ -412,7 +354,6 @@ static inline bool debug_log_msg(const debug_report_data *debug_data, VkFlags ms
object_name_info.objectType = convertDebugReportObjectToCoreObject(object_type);
object_name_info.objectHandle = (uint64_t)(uintptr_t)src_object;
object_name_info.pObjectName = NULL;
- std::string object_label = {};
callback_data.sType = VK_STRUCTURE_TYPE_DEBUG_UTILS_MESSENGER_CALLBACK_DATA_EXT;
callback_data.pNext = NULL;
@@ -427,8 +368,8 @@ static inline bool debug_log_msg(const debug_report_data *debug_data, VkFlags ms
callback_data.objectCount = 1;
callback_data.pObjects = &object_name_info;
- std::vector<VkDebugUtilsLabelEXT> queue_labels;
- std::vector<VkDebugUtilsLabelEXT> cmd_buf_labels;
+ VkDebugUtilsLabelEXT *queue_labels = nullptr;
+ VkDebugUtilsLabelEXT *cmd_buf_labels = nullptr;
std::string new_debug_report_message = "";
std::ostringstream oss;
@@ -438,28 +379,58 @@ static inline bool debug_log_msg(const debug_report_data *debug_data, VkFlags ms
if (VK_OBJECT_TYPE_QUEUE == object_name_info.objectType) {
auto label_iter = debug_data->debugUtilsQueueLabels.find(reinterpret_cast<VkQueue>(src_object));
if (label_iter != debug_data->debugUtilsQueueLabels.end()) {
- queue_labels = label_iter->second->Export();
- callback_data.queueLabelCount = static_cast<uint32_t>(queue_labels.size());
- callback_data.pQueueLabels = queue_labels.empty() ? nullptr : queue_labels.data();
+ queue_labels = new VkDebugUtilsLabelEXT[label_iter->second.size()];
+ if (nullptr != queue_labels) {
+ // Record the labels, but record them in reverse order since we want the
+ // most recent at the top.
+ uint32_t label_size = static_cast<uint32_t>(label_iter->second.size());
+ uint32_t last_index = label_size - 1;
+ for (uint32_t label = 0; label < label_size; ++label) {
+ queue_labels[last_index - label].sType = VK_STRUCTURE_TYPE_DEBUG_UTILS_LABEL_EXT;
+ queue_labels[last_index - label].pNext = nullptr;
+ queue_labels[last_index - label].pLabelName = label_iter->second[label].name.c_str();
+ queue_labels[last_index - label].color[0] = label_iter->second[label].color[0];
+ queue_labels[last_index - label].color[1] = label_iter->second[label].color[1];
+ queue_labels[last_index - label].color[2] = label_iter->second[label].color[2];
+ queue_labels[last_index - label].color[3] = label_iter->second[label].color[3];
+ }
+ callback_data.queueLabelCount = label_size;
+ callback_data.pQueueLabels = queue_labels;
+ }
}
// If this is a command buffer, add any command buffer labels to the callback data.
} else if (VK_OBJECT_TYPE_COMMAND_BUFFER == object_name_info.objectType) {
auto label_iter = debug_data->debugUtilsCmdBufLabels.find(reinterpret_cast<VkCommandBuffer>(src_object));
if (label_iter != debug_data->debugUtilsCmdBufLabels.end()) {
- cmd_buf_labels = label_iter->second->Export();
- callback_data.cmdBufLabelCount = static_cast<uint32_t>(cmd_buf_labels.size());
- callback_data.pCmdBufLabels = cmd_buf_labels.empty() ? nullptr : cmd_buf_labels.data();
+ cmd_buf_labels = new VkDebugUtilsLabelEXT[label_iter->second.size()];
+ if (nullptr != cmd_buf_labels) {
+ // Record the labels, but record them in reverse order since we want the
+ // most recent at the top.
+ uint32_t label_size = static_cast<uint32_t>(label_iter->second.size());
+ uint32_t last_index = label_size - 1;
+ for (uint32_t label = 0; label < label_size; ++label) {
+ cmd_buf_labels[last_index - label].sType = VK_STRUCTURE_TYPE_DEBUG_UTILS_LABEL_EXT;
+ cmd_buf_labels[last_index - label].pNext = nullptr;
+ cmd_buf_labels[last_index - label].pLabelName = label_iter->second[label].name.c_str();
+ cmd_buf_labels[last_index - label].color[0] = label_iter->second[label].color[0];
+ cmd_buf_labels[last_index - label].color[1] = label_iter->second[label].color[1];
+ cmd_buf_labels[last_index - label].color[2] = label_iter->second[label].color[2];
+ cmd_buf_labels[last_index - label].color[3] = label_iter->second[label].color[3];
+ }
+ callback_data.cmdBufLabelCount = label_size;
+ callback_data.pCmdBufLabels = cmd_buf_labels;
+ }
}
}
// Look for any debug utils or marker names to use for this object
- object_label = debug_data->DebugReportGetUtilsObjectName(src_object);
- if (object_label.empty()) {
- object_label = debug_data->DebugReportGetMarkerObjectName(src_object);
+ std::string label = debug_data->DebugReportGetUtilsObjectName(src_object);
+ if (label.empty()) {
+ label = debug_data->DebugReportGetMarkerObjectName(src_object);
}
- if (!object_label.empty()) {
- object_name_info.pObjectName = object_label.c_str();
- oss << " (Name = " << object_label << " : Type = ";
+ if (!label.empty()) {
+ object_name_info.pObjectName = label.c_str();
+ oss << " (Name = " << label << " : Type = ";
} else {
oss << " (Type = ";
}
@@ -496,6 +467,13 @@ static inline bool debug_log_msg(const debug_report_data *debug_data, VkFlags ms
layer_dbg_node = layer_dbg_node->pNext;
}
+ if (nullptr != queue_labels) {
+ delete[] queue_labels;
+ }
+ if (nullptr != cmd_buf_labels) {
+ delete[] cmd_buf_labels;
+ }
+
return bail;
}
@@ -958,11 +936,11 @@ static inline int vasprintf(char **strp, char const *fmt, va_list ap) {
// needs to be logged
#ifndef WIN32
static inline bool log_msg(const debug_report_data *debug_data, VkFlags msg_flags, VkDebugReportObjectTypeEXT object_type,
- uint64_t src_object, const std::string &vuid_text, const char *format, ...)
+ uint64_t src_object, std::string vuid_text, const char *format, ...)
__attribute__((format(printf, 6, 7)));
#endif
static inline bool log_msg(const debug_report_data *debug_data, VkFlags msg_flags, VkDebugReportObjectTypeEXT object_type,
- uint64_t src_object, const std::string &vuid_text, const char *format, ...) {
+ uint64_t src_object, std::string vuid_text, const char *format, ...) {
if (!debug_data) return false;
std::unique_lock<std::mutex> lock(debug_data->debug_report_mutex);
VkFlags local_severity = 0;
@@ -1066,19 +1044,6 @@ static inline VKAPI_ATTR VkBool32 VKAPI_CALL DebugBreakCallback(VkFlags msgFlags
return false;
}
-static inline VKAPI_ATTR VkBool32 VKAPI_CALL MessengerBreakCallback(VkDebugUtilsMessageSeverityFlagBitsEXT message_severity,
- VkDebugUtilsMessageTypeFlagsEXT message_type,
- const VkDebugUtilsMessengerCallbackDataEXT *callback_data,
- void *user_data) {
-#ifdef WIN32
- DebugBreak();
-#else
- raise(SIGTRAP);
-#endif
-
- return false;
-}
-
static inline VKAPI_ATTR VkBool32 VKAPI_CALL messenger_log_callback(VkDebugUtilsMessageSeverityFlagBitsEXT message_severity,
VkDebugUtilsMessageTypeFlagsEXT message_type,
const VkDebugUtilsMessengerCallbackDataEXT *callback_data,
@@ -1142,110 +1107,149 @@ static inline VKAPI_ATTR VkBool32 VKAPI_CALL messenger_win32_debug_output_msg(
return false;
}
-template <typename Map>
-static LoggingLabelState *GetLoggingLabelState(Map *map, typename Map::key_type key, bool insert) {
- auto iter = map->find(key);
- LoggingLabelState *label_state = nullptr;
- if (iter == map->end()) {
- if (insert) {
- // Add a label state if not present
- label_state = new LoggingLabelState();
- auto inserted = map->insert(std::make_pair(key, std::unique_ptr<LoggingLabelState>(new LoggingLabelState())));
- assert(inserted.second);
- iter = inserted.first;
- label_state = iter->second.get();
- }
- } else {
- label_state = iter->second.get();
- }
- return label_state;
+// This utility converts from the VkDebugUtilsLabelEXT structure into the logging version of the structure.
+// In the logging version, we only record what we absolutely need to convey back to the callbacks.
+static inline void InsertLabelIntoLog(const VkDebugUtilsLabelEXT *utils_label, std::vector<LoggingLabelData> &log_vector) {
+ LoggingLabelData log_label_data = {};
+ log_label_data.name = utils_label->pLabelName;
+ log_label_data.color[0] = utils_label->color[0];
+ log_label_data.color[1] = utils_label->color[1];
+ log_label_data.color[2] = utils_label->color[2];
+ log_label_data.color[3] = utils_label->color[3];
+ log_vector.push_back(log_label_data);
}
static inline void BeginQueueDebugUtilsLabel(debug_report_data *report_data, VkQueue queue,
const VkDebugUtilsLabelEXT *label_info) {
std::unique_lock<std::mutex> lock(report_data->debug_report_mutex);
if (nullptr != label_info && nullptr != label_info->pLabelName) {
- auto *label_state = GetLoggingLabelState(&report_data->debugUtilsQueueLabels, queue, /* insert */ true);
- assert(label_state);
- label_state->labels.push_back(LoggingLabel(label_info));
-
- // TODO: Determine if this is the correct semantics for insert label vs. begin/end, perserving existing semantics for now
- label_state->insert_label.Reset();
+ auto label_iter = report_data->debugUtilsQueueLabels.find(queue);
+ if (label_iter == report_data->debugUtilsQueueLabels.end()) {
+ std::vector<LoggingLabelData> new_queue_labels;
+ InsertLabelIntoLog(label_info, new_queue_labels);
+ report_data->debugUtilsQueueLabels.insert({queue, new_queue_labels});
+ } else {
+ // If the last thing was a label insert, we need to pop it off of the label vector before any
+ // changes. This is because a label added with "vkQueueInsertDebugUtilsLabelEXT" is only a
+ // temporary location that exists until the next operation occurs. In this case, a new
+ // "vkQueueBeginDebugUtilsLabelEXT" has occurred erasing the previous inserted label.
+ if (report_data->queueLabelHasInsert) {
+ report_data->queueLabelHasInsert = false;
+ label_iter->second.pop_back();
+ }
+ InsertLabelIntoLog(label_info, label_iter->second);
+ }
}
}
static inline void EndQueueDebugUtilsLabel(debug_report_data *report_data, VkQueue queue) {
std::unique_lock<std::mutex> lock(report_data->debug_report_mutex);
- auto *label_state = GetLoggingLabelState(&report_data->debugUtilsQueueLabels, queue, /* insert */ false);
- if (label_state) {
- // Pop the normal item
- if (!label_state->labels.empty()) {
- label_state->labels.pop_back();
+ auto label_iter = report_data->debugUtilsQueueLabels.find(queue);
+ if (label_iter != report_data->debugUtilsQueueLabels.end()) {
+ // If the last thing was a label insert, we need to pop it off of the label vector before any
+ // changes. This is because a label added with "vkQueueInsertDebugUtilsLabelEXT" is only a
+ // temporary location that exists until the next operation occurs. In this case, a
+ // "vkQueueEndDebugUtilsLabelEXT" has occurred erasing the inserted label.
+ if (report_data->queueLabelHasInsert) {
+ report_data->queueLabelHasInsert = false;
+ label_iter->second.pop_back();
}
-
- // TODO: Determine if this is the correct semantics for insert label vs. begin/end, perserving existing semantics for now
- label_state->insert_label.Reset();
+ // Now pop the normal item
+ label_iter->second.pop_back();
}
}
static inline void InsertQueueDebugUtilsLabel(debug_report_data *report_data, VkQueue queue,
const VkDebugUtilsLabelEXT *label_info) {
std::unique_lock<std::mutex> lock(report_data->debug_report_mutex);
- auto *label_state = GetLoggingLabelState(&report_data->debugUtilsQueueLabels, queue, /* insert */ true);
-
- // TODO: Determine if this is the correct semantics for insert label vs. begin/end, perserving existing semantics for now
- label_state->insert_label = LoggingLabel(label_info);
+ if (nullptr != label_info && nullptr != label_info->pLabelName) {
+ auto label_iter = report_data->debugUtilsQueueLabels.find(queue);
+ if (label_iter == report_data->debugUtilsQueueLabels.end()) {
+ std::vector<LoggingLabelData> new_queue_labels;
+ InsertLabelIntoLog(label_info, new_queue_labels);
+ report_data->debugUtilsQueueLabels.insert({queue, new_queue_labels});
+ } else {
+ // If the last thing was a label insert, we need to pop it off of the label vector before any
+ // changes. This is because a label added with "vkQueueInsertDebugUtilsLabelEXT" is only a
+ // temporary location that exists until the next operation occurs. In this case, a new
+ // "vkQueueInsertDebugUtilsLabelEXT" has occurred erasing the previous inserted label.
+ if (report_data->queueLabelHasInsert) {
+ label_iter->second.pop_back();
+ }
+ // Insert this new label and mark it as one that has been "inserted" so we can remove it on
+ // the next queue label operation.
+ InsertLabelIntoLog(label_info, label_iter->second);
+ report_data->queueLabelHasInsert = true;
+ }
+ }
}
static inline void BeginCmdDebugUtilsLabel(debug_report_data *report_data, VkCommandBuffer command_buffer,
const VkDebugUtilsLabelEXT *label_info) {
std::unique_lock<std::mutex> lock(report_data->debug_report_mutex);
if (nullptr != label_info && nullptr != label_info->pLabelName) {
- auto *label_state = GetLoggingLabelState(&report_data->debugUtilsCmdBufLabels, command_buffer, /* insert */ true);
- assert(label_state);
- label_state->labels.push_back(LoggingLabel(label_info));
-
- // TODO: Determine if this is the correct semantics for insert label vs. begin/end, perserving existing semantics for now
- label_state->insert_label.Reset();
+ auto label_iter = report_data->debugUtilsCmdBufLabels.find(command_buffer);
+ if (label_iter == report_data->debugUtilsCmdBufLabels.end()) {
+ std::vector<LoggingLabelData> new_cmdbuf_labels;
+ InsertLabelIntoLog(label_info, new_cmdbuf_labels);
+ report_data->debugUtilsCmdBufLabels.insert({command_buffer, new_cmdbuf_labels});
+ } else {
+ // If the last thing was a label insert, we need to pop it off of the label vector before any
+ // changes. This is because a label added with "vkCmdInsertDebugUtilsLabelEXT" is only a
+ // temporary location that exists until the next operation occurs. In this case, a
+ // "vkCmdBeginDebugUtilsLabelEXT" has occurred erasing the inserted label.
+ if (report_data->cmdBufLabelHasInsert) {
+ report_data->cmdBufLabelHasInsert = false;
+ label_iter->second.pop_back();
+ }
+ InsertLabelIntoLog(label_info, label_iter->second);
+ }
}
}
static inline void EndCmdDebugUtilsLabel(debug_report_data *report_data, VkCommandBuffer command_buffer) {
std::unique_lock<std::mutex> lock(report_data->debug_report_mutex);
- auto *label_state = GetLoggingLabelState(&report_data->debugUtilsCmdBufLabels, command_buffer, /* insert */ false);
- if (label_state) {
- // Pop the normal item
- if (!label_state->labels.empty()) {
- label_state->labels.pop_back();
+ auto label_iter = report_data->debugUtilsCmdBufLabels.find(command_buffer);
+ if (label_iter != report_data->debugUtilsCmdBufLabels.end()) {
+ // If the last thing was a label insert, we need to pop it off of the label vector before any
+ // changes. This is because a label added with "vkCmdInsertDebugUtilsLabelEXT" is only a
+ // temporary location that exists until the next operation occurs. In this case, a
+ // "vkCmdEndDebugUtilsLabelEXT" has occurred erasing the inserted label.
+ if (report_data->cmdBufLabelHasInsert) {
+ report_data->cmdBufLabelHasInsert = false;
+ label_iter->second.pop_back();
+ }
+ // Guard against unbalanced markers.
+ if (label_iter->second.size() > 0) {
+ // Now pop the normal item
+ label_iter->second.pop_back();
}
-
- // TODO: Determine if this is the correct semantics for insert label vs. begin/end, perserving existing semantics for now
- label_state->insert_label.Reset();
}
}
static inline void InsertCmdDebugUtilsLabel(debug_report_data *report_data, VkCommandBuffer command_buffer,
const VkDebugUtilsLabelEXT *label_info) {
std::unique_lock<std::mutex> lock(report_data->debug_report_mutex);
- auto *label_state = GetLoggingLabelState(&report_data->debugUtilsCmdBufLabels, command_buffer, /* insert */ true);
- assert(label_state);
-
- // TODO: Determine if this is the correct semantics for insert label vs. begin/end, perserving existing semantics for now
- label_state->insert_label = LoggingLabel(label_info);
-}
-
-// Current tracking beyond a single command buffer scope is incorrect, and even when it is we need to be able to clean up
-static inline void ResetCmdDebugUtilsLabel(debug_report_data *report_data, VkCommandBuffer command_buffer) {
- std::unique_lock<std::mutex> lock(report_data->debug_report_mutex);
- auto *label_state = GetLoggingLabelState(&report_data->debugUtilsCmdBufLabels, command_buffer, /* insert */ false);
- if (label_state) {
- label_state->labels.clear();
- label_state->insert_label.Reset();
+ if (nullptr != label_info && nullptr != label_info->pLabelName) {
+ auto label_iter = report_data->debugUtilsCmdBufLabels.find(command_buffer);
+ if (label_iter == report_data->debugUtilsCmdBufLabels.end()) {
+ std::vector<LoggingLabelData> new_cmdbuf_labels;
+ InsertLabelIntoLog(label_info, new_cmdbuf_labels);
+ report_data->debugUtilsCmdBufLabels.insert({command_buffer, new_cmdbuf_labels});
+ } else {
+ // If the last thing was a label insert, we need to pop it off of the label vector before any
+ // changes. This is because a label added with "vkCmdInsertDebugUtilsLabelEXT" is only a
+ // temporary location that exists until the next operation occurs. In this case, a new
+ // "vkCmdInsertDebugUtilsLabelEXT" has occurred erasing the previous inserted label.
+ if (report_data->cmdBufLabelHasInsert) {
+ label_iter->second.pop_back();
+ }
+ // Insert this new label and mark it as one that has been "inserted" so we can remove it on
+ // the next command buffer label operation.
+ InsertLabelIntoLog(label_info, label_iter->second);
+ report_data->cmdBufLabelHasInsert = true;
+ }
}
}
-static inline void EraseCmdDebugUtilsLabel(debug_report_data *report_data, VkCommandBuffer command_buffer) {
- report_data->debugUtilsCmdBufLabels.erase(command_buffer);
-}
-
#endif // LAYER_LOGGING_H
diff --git a/layers/vk_layer_settings.txt b/layers/vk_layer_settings.txt
index 62394534c..4998d1c2f 100644
--- a/layers/vk_layer_settings.txt
+++ b/layers/vk_layer_settings.txt
@@ -6,8 +6,9 @@
# "<LayerIdentifier>.<SettingName> = <SettingValue>"
#
# <LayerIdentifier> is typically the official layer name, minus the VK_LAYER
-# prefix and all lower-camel-case -- i.e., for VK_LAYER_KHRONOS_validation,
-# the layer identifier is 'khronos_validation'.
+# prefix and all lower-camel-case -- i.e., for VK_LAYER_LUNARG_core_validation,
+# the layer identifier is 'lunarg_core_validation', and for
+# VK_LAYER_GOOGLE_threading the layeridentifier is 'google_threading'.
#
################################################################################
################################################################################
@@ -52,45 +53,6 @@
# filename is specified or if filename has invalid path, then stdout
# is used by default.
#
-# DISABLES:
-# =============
-# <LayerIdentifier>.disables : comma separated list of feature/flag/disable enums
-# These can include VkValidationFeatureDisableEXT flags defined in the Vulkan
-# specification, or ValidationCheckDisables enums defined in chassis.h.
-# Effects of setting these flags are described in the specification (or the
-# source code in the case of the ValidationCheckDisables). The most useful
-# flags are briefly described here:
-# VK_VALIDATION_FEATURE_DISABLE_UNIQUE_HANDLES_EXT - disables handle wrapping.
-# Disable this feature if you are running into crashes when authoring new extensions
-# or developing new Vulkan objects/structures
-# VK_VALIDATION_FEATURE_DISABLE_THREAD_SAFETY_EXT - disables thread checks. It may
-# help with performance to run with thread-checking disabled most of the time,
-# enabling it occasionally for a quick sanity check, or when debugging difficult
-# application behaviors.
-# VK_VALIDATION_FEATURE_DISABLE_CORE_CHECKS_EXT - disables the main, heavy-duty
-# validation checks. This may be valuable early in the development cycle to
-# reduce validation output while correcting paramter/object usage errors.
-# VK_VALIDATION_FEATURE_DISABLE_API_PARAMETERS_EXT - disables stateless parameter
-# checks. This may not always be necessary late in a development cycle.
-# VK_VALIDATION_FEATURE_DISABLE_OBJECT_LIFETIMES_EXT - disables object tracking.
-# This may not always be necessary late in a development cycle.
-#
-# ENABLES:
-# =============
-# <LayerIdentifier>.enables : comma separated list of feature enable enums
-# These can include VkValidationFeatureEnableEXT flags defined in the Vulkan
-# specification, where their effects are described. The most useful
-# flags are briefly described here:
-# VK_VALIDATION_FEATURE_ENABLE_GPU_ASSISTED_EXT - enables intrusive GPU-assisted
-# shader validation in core/khronos validation layers
-#
-
-# VK_LAYER_KHRONOS_validation Settings
-khronos_validation.debug_action = VK_DBG_LAYER_ACTION_LOG_MSG
-khronos_validation.report_flags = error,warn,perf
-khronos_validation.log_filename = stdout
-# Example entry showing how to disable threading checks and validation at DestroyPipeline time
-#khronos_validation.disables = VK_VALIDATION_FEATURE_DISABLE_THREAD_SAFETY_EXT,VALIDATION_CHECK_DISABLE_DESTROY_PIPELINE
# VK_LAYER_LUNARG_core_validation Settings
lunarg_core_validation.debug_action = VK_DBG_LAYER_ACTION_LOG_MSG
diff --git a/layers/vk_layer_utils.cpp b/layers/vk_layer_utils.cpp
index 44d8f7596..ed9b0c10c 100644
--- a/layers/vk_layer_utils.cpp
+++ b/layers/vk_layer_utils.cpp
@@ -1,6 +1,6 @@
-/* Copyright (c) 2015-2016, 2019 The Khronos Group Inc.
- * Copyright (c) 2015-2016, 2019 Valve Corporation
- * Copyright (c) 2015-2016, 2019 LunarG, Inc.
+/* Copyright (c) 2015-2016 The Khronos Group Inc.
+ * Copyright (c) 2015-2016 Valve Corporation
+ * Copyright (c) 2015-2016 LunarG, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
@@ -23,7 +23,6 @@
#include <string>
#include <vector>
#include <map>
-
#include "vulkan/vulkan.h"
#include "vk_layer_config.h"
#include "vk_layer_utils.h"
@@ -143,15 +142,6 @@ VK_LAYER_EXPORT void layer_debug_messenger_actions(debug_report_data *report_dat
layer_create_messenger_callback(report_data, default_layer_callback, &dbgCreateInfo, pAllocator, &messenger);
logging_messenger.push_back(messenger);
}
-
- messenger = VK_NULL_HANDLE;
-
- if (debug_action & VK_DBG_LAYER_ACTION_BREAK) {
- dbgCreateInfo.pfnUserCallback = MessengerBreakCallback;
- dbgCreateInfo.pUserData = NULL;
- layer_create_messenger_callback(report_data, default_layer_callback, &dbgCreateInfo, pAllocator, &messenger);
- logging_messenger.push_back(messenger);
- }
}
// NOTE: This function has been deprecated, and the above function (layer_debug_messenger_actions) should be
diff --git a/layers/vk_layer_utils.h b/layers/vk_layer_utils.h
index b08d30de2..41bc7fcd7 100644
--- a/layers/vk_layer_utils.h
+++ b/layers/vk_layer_utils.h
@@ -1,6 +1,6 @@
-/* Copyright (c) 2015-2017, 2019 The Khronos Group Inc.
- * Copyright (c) 2015-2017, 2019 Valve Corporation
- * Copyright (c) 2015-2017, 2019 LunarG, Inc.
+/* Copyright (c) 2015-2017 The Khronos Group Inc.
+ * Copyright (c) 2015-2017 Valve Corporation
+ * Copyright (c) 2015-2017 LunarG, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
@@ -20,15 +20,10 @@
*/
#pragma once
-
-#include <cassert>
-#include <cstddef>
-#include <functional>
#include <stdbool.h>
#include <string>
#include <vector>
#include <set>
-#include "cast_utils.h"
#include "vk_format_utils.h"
#include "vk_layer_logging.h"
@@ -159,161 +154,3 @@ static inline int u_ffs(int val) {
#ifdef __cplusplus
}
#endif
-
-// shared_mutex support added in MSVC 2015 update 2
-#if defined(_MSC_FULL_VER) && _MSC_FULL_VER >= 190023918 && NTDDI_VERSION > NTDDI_WIN10_RS2
-#include <shared_mutex>
-#endif
-
-// Limited concurrent_unordered_map that supports internally-synchronized
-// insert/erase/access. Splits locking across N buckets and uses shared_mutex
-// for read/write locking. Iterators are not supported. The following
-// operations are supported:
-//
-// insert_or_assign: Insert a new element or update an existing element.
-// insert: Insert a new element and return whether it was inserted.
-// erase: Remove an element.
-// contains: Returns true if the key is in the map.
-// find: Returns != end() if found, value is in ret->second.
-// pop: Erases and returns the erased value if found.
-//
-// find/end: find returns a vaguely iterator-like type that can be compared to
-// end and can use iter->second to retrieve the reference. This is to ease porting
-// for existing code that combines the existence check and lookup in a single
-// operation (and thus a single lock). i.e.:
-//
-// auto iter = map.find(key);
-// if (iter != map.end()) {
-// T t = iter->second;
-// ...
-//
-// snapshot: Return an array of elements (key, value pairs) that satisfy an optional
-// predicate. This can be used as a substitute for iterators in exceptional cases.
-template <typename Key, typename T, int BUCKETSLOG2 = 2>
-class vl_concurrent_unordered_map {
- public:
- void insert_or_assign(const Key &key, const T &value) {
- uint32_t h = ConcurrentMapHashObject(key);
- write_lock_guard_t lock(locks[h].lock);
- maps[h][key] = value;
- }
-
- bool insert(const Key &key, const T &value) {
- uint32_t h = ConcurrentMapHashObject(key);
- write_lock_guard_t lock(locks[h].lock);
- auto ret = maps[h].insert(typename std::unordered_map<Key, T>::value_type(key, value));
- return ret.second;
- }
-
- // returns size_type
- size_t erase(const Key &key) {
- uint32_t h = ConcurrentMapHashObject(key);
- write_lock_guard_t lock(locks[h].lock);
- return maps[h].erase(key);
- }
-
- bool contains(const Key &key) {
- uint32_t h = ConcurrentMapHashObject(key);
- read_lock_guard_t lock(locks[h].lock);
- return maps[h].count(key) != 0;
- }
-
- // type returned by find() and end().
- class FindResult {
- public:
- FindResult(bool a, T b) : result(a, std::move(b)) {}
-
- // == and != only support comparing against end()
- bool operator==(const FindResult &other) const {
- if (result.first == false && other.result.first == false) {
- return true;
- }
- return false;
- }
- bool operator!=(const FindResult &other) const { return !(*this == other); }
-
- // Make -> act kind of like an iterator.
- std::pair<bool, T> *operator->() { return &result; }
- const std::pair<bool, T> *operator->() const { return &result; }
-
- private:
- // (found, reference to element)
- std::pair<bool, T> result;
- };
-
- // find()/end() return a FindResult containing a copy of the value. For end(),
- // return a default value.
- FindResult end() { return FindResult(false, T()); }
-
- FindResult find(const Key &key) {
- uint32_t h = ConcurrentMapHashObject(key);
- read_lock_guard_t lock(locks[h].lock);
-
- auto itr = maps[h].find(key);
- bool found = itr != maps[h].end();
-
- if (found) {
- return FindResult(true, itr->second);
- } else {
- return end();
- }
- }
-
- FindResult pop(const Key &key) {
- uint32_t h = ConcurrentMapHashObject(key);
- write_lock_guard_t lock(locks[h].lock);
-
- auto itr = maps[h].find(key);
- bool found = itr != maps[h].end();
-
- if (found) {
- auto ret = std::move(FindResult(true, itr->second));
- maps[h].erase(itr);
- return ret;
- } else {
- return end();
- }
- }
-
- std::vector<std::pair<const Key, T>> snapshot(std::function<bool(T)> f = nullptr) {
- std::vector<std::pair<const Key, T>> ret;
- for (int h = 0; h < BUCKETS; ++h) {
- read_lock_guard_t lock(locks[h].lock);
- for (auto j : maps[h]) {
- if (!f || f(j.second)) {
- ret.push_back(j);
- }
- }
- }
- return ret;
- }
-
- private:
- static const int BUCKETS = (1 << BUCKETSLOG2);
-// shared_mutex support added in MSVC 2015 update 2
-#if defined(_MSC_FULL_VER) && _MSC_FULL_VER >= 190023918 && NTDDI_VERSION > NTDDI_WIN10_RS2
-#include <shared_mutex>
- typedef std::shared_mutex lock_t;
- typedef std::shared_lock<lock_t> read_lock_guard_t;
- typedef std::unique_lock<lock_t> write_lock_guard_t;
-#else
- typedef std::mutex lock_t;
- typedef std::unique_lock<lock_t> read_lock_guard_t;
- typedef std::unique_lock<lock_t> write_lock_guard_t;
-#endif
-
- std::unordered_map<Key, T> maps[BUCKETS];
- struct {
- lock_t lock;
- // Put each lock on its own cache line to avoid false cache line sharing.
- char padding[(-int(sizeof(lock_t))) & 63];
- } locks[BUCKETS];
-
- uint32_t ConcurrentMapHashObject(const Key &object) const {
- uint64_t u64 = (uint64_t)(uintptr_t)object;
- uint32_t hash = (uint32_t)(u64 >> 32) + (uint32_t)u64;
- hash ^= (hash >> BUCKETSLOG2) ^ (hash >> (2 * BUCKETSLOG2));
- hash &= (BUCKETS - 1);
- return hash;
- }
-};
diff --git a/layers/vk_mem_alloc.h b/layers/vk_mem_alloc.h
deleted file mode 100644
index 2c13549f0..000000000
--- a/layers/vk_mem_alloc.h
+++ /dev/null
@@ -1,16813 +0,0 @@
-//
-// Copyright (c) 2017-2018 Advanced Micro Devices, Inc. All rights reserved.
-//
-// Permission is hereby granted, free of charge, to any person obtaining a copy
-// of this software and associated documentation files (the "Software"), to deal
-// in the Software without restriction, including without limitation the rights
-// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
-// copies of the Software, and to permit persons to whom the Software is
-// furnished to do so, subject to the following conditions:
-//
-// The above copyright notice and this permission notice shall be included in
-// all copies or substantial portions of the Software.
-//
-// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
-// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
-// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
-// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
-// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
-// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
-// THE SOFTWARE.
-//
-
-// clang-format off
-//
-// Source: https://github.com/GPUOpen-LibrariesAndSDKs/VulkanMemoryAllocator
-// THIS FILE HAS BEEN CHANGED FROM THE ORIGINAL VERSION
-//
-// Change Log:
-// 3/27/19 - Make changes to suppress warnings from GCC
-// 4/18/19 - Make changes to suppress warnings from clang
-// 6/05/19 - Make changes to suppress warnings from clang 3.8.0
-// 6/05/19 - Make changes to suppress more warnings from GCC
-// 8/09/19 - Make changes to suppress dead code warnings (from upstream master branch)
-//
-
-#ifndef AMD_VULKAN_MEMORY_ALLOCATOR_H
-#define AMD_VULKAN_MEMORY_ALLOCATOR_H
-
-#ifdef __cplusplus
-extern "C" {
-#endif
-
-/** \mainpage Vulkan Memory Allocator
-
-<b>Version 2.2.0</b> (2018-12-13)
-
-Copyright (c) 2017-2018 Advanced Micro Devices, Inc. All rights reserved. \n
-License: MIT
-
-Documentation of all members: vk_mem_alloc.h
-
-\section main_table_of_contents Table of contents
-
-- <b>User guide</b>
- - \subpage quick_start
- - [Project setup](@ref quick_start_project_setup)
- - [Initialization](@ref quick_start_initialization)
- - [Resource allocation](@ref quick_start_resource_allocation)
- - \subpage choosing_memory_type
- - [Usage](@ref choosing_memory_type_usage)
- - [Required and preferred flags](@ref choosing_memory_type_required_preferred_flags)
- - [Explicit memory types](@ref choosing_memory_type_explicit_memory_types)
- - [Custom memory pools](@ref choosing_memory_type_custom_memory_pools)
- - \subpage memory_mapping
- - [Mapping functions](@ref memory_mapping_mapping_functions)
- - [Persistently mapped memory](@ref memory_mapping_persistently_mapped_memory)
- - [Cache control](@ref memory_mapping_cache_control)
- - [Finding out if memory is mappable](@ref memory_mapping_finding_if_memory_mappable)
- - \subpage custom_memory_pools
- - [Choosing memory type index](@ref custom_memory_pools_MemTypeIndex)
- - [Linear allocation algorithm](@ref linear_algorithm)
- - [Free-at-once](@ref linear_algorithm_free_at_once)
- - [Stack](@ref linear_algorithm_stack)
- - [Double stack](@ref linear_algorithm_double_stack)
- - [Ring buffer](@ref linear_algorithm_ring_buffer)
- - [Buddy allocation algorithm](@ref buddy_algorithm)
- - \subpage defragmentation
- - [Defragmenting CPU memory](@ref defragmentation_cpu)
- - [Defragmenting GPU memory](@ref defragmentation_gpu)
- - [Additional notes](@ref defragmentation_additional_notes)
- - [Writing custom allocation algorithm](@ref defragmentation_custom_algorithm)
- - \subpage lost_allocations
- - \subpage statistics
- - [Numeric statistics](@ref statistics_numeric_statistics)
- - [JSON dump](@ref statistics_json_dump)
- - \subpage allocation_annotation
- - [Allocation user data](@ref allocation_user_data)
- - [Allocation names](@ref allocation_names)
- - \subpage debugging_memory_usage
- - [Memory initialization](@ref debugging_memory_usage_initialization)
- - [Margins](@ref debugging_memory_usage_margins)
- - [Corruption detection](@ref debugging_memory_usage_corruption_detection)
- - \subpage record_and_replay
-- \subpage usage_patterns
- - [Simple patterns](@ref usage_patterns_simple)
- - [Advanced patterns](@ref usage_patterns_advanced)
-- \subpage configuration
- - [Pointers to Vulkan functions](@ref config_Vulkan_functions)
- - [Custom host memory allocator](@ref custom_memory_allocator)
- - [Device memory allocation callbacks](@ref allocation_callbacks)
- - [Device heap memory limit](@ref heap_memory_limit)
- - \subpage vk_khr_dedicated_allocation
-- \subpage general_considerations
- - [Thread safety](@ref general_considerations_thread_safety)
- - [Validation layer warnings](@ref general_considerations_validation_layer_warnings)
- - [Allocation algorithm](@ref general_considerations_allocation_algorithm)
- - [Features not supported](@ref general_considerations_features_not_supported)
-
-\section main_see_also See also
-
-- [Product page on GPUOpen](https://gpuopen.com/gaming-product/vulkan-memory-allocator/)
-- [Source repository on GitHub](https://github.com/GPUOpen-LibrariesAndSDKs/VulkanMemoryAllocator)
-
-
-
-
-\page quick_start Quick start
-
-\section quick_start_project_setup Project setup
-
-Vulkan Memory Allocator comes in form of a single header file.
-You don't need to build it as a separate library project.
-You can add this file directly to your project and submit it to code repository next to your other source files.
-
-"Single header" doesn't mean that everything is contained in C/C++ declarations,
-like it tends to be in case of inline functions or C++ templates.
-It means that implementation is bundled with interface in a single file and needs to be extracted using preprocessor macro.
-If you don't do it properly, you will get linker errors.
-
-To do it properly:
-
--# Include "vk_mem_alloc.h" file in each CPP file where you want to use the library.
- This includes declarations of all members of the library.
--# In exacly one CPP file define following macro before this include.
- It enables also internal definitions.
-
-\code
-#define VMA_IMPLEMENTATION
-#include "vk_mem_alloc.h"
-\endcode
-
-It may be a good idea to create dedicated CPP file just for this purpose.
-
-Note on language: This library is written in C++, but has C-compatible interface.
-Thus you can include and use vk_mem_alloc.h in C or C++ code, but full
-implementation with `VMA_IMPLEMENTATION` macro must be compiled as C++, NOT as C.
-
-Please note that this library includes header `<vulkan/vulkan.h>`, which in turn
-includes `<windows.h>` on Windows. If you need some specific macros defined
-before including these headers (like `WIN32_LEAN_AND_MEAN` or
-`WINVER` for Windows, `VK_USE_PLATFORM_WIN32_KHR` for Vulkan), you must define
-them before every `#include` of this library.
-
-
-\section quick_start_initialization Initialization
-
-At program startup:
-
--# Initialize Vulkan to have `VkPhysicalDevice` and `VkDevice` object.
--# Fill VmaAllocatorCreateInfo structure and create #VmaAllocator object by
- calling vmaCreateAllocator().
-
-\code
-VmaAllocatorCreateInfo allocatorInfo = {};
-allocatorInfo.physicalDevice = physicalDevice;
-allocatorInfo.device = device;
-
-VmaAllocator allocator;
-vmaCreateAllocator(&allocatorInfo, &allocator);
-\endcode
-
-\section quick_start_resource_allocation Resource allocation
-
-When you want to create a buffer or image:
-
--# Fill `VkBufferCreateInfo` / `VkImageCreateInfo` structure.
--# Fill VmaAllocationCreateInfo structure.
--# Call vmaCreateBuffer() / vmaCreateImage() to get `VkBuffer`/`VkImage` with memory
- already allocated and bound to it.
-
-\code
-VkBufferCreateInfo bufferInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
-bufferInfo.size = 65536;
-bufferInfo.usage = VK_BUFFER_USAGE_VERTEX_BUFFER_BIT | VK_BUFFER_USAGE_TRANSFER_DST_BIT;
-
-VmaAllocationCreateInfo allocInfo = {};
-allocInfo.usage = VMA_MEMORY_USAGE_GPU_ONLY;
-
-VkBuffer buffer;
-VmaAllocation allocation;
-vmaCreateBuffer(allocator, &bufferInfo, &allocInfo, &buffer, &allocation, nullptr);
-\endcode
-
-Don't forget to destroy your objects when no longer needed:
-
-\code
-vmaDestroyBuffer(allocator, buffer, allocation);
-vmaDestroyAllocator(allocator);
-\endcode
-
-
-\page choosing_memory_type Choosing memory type
-
-Physical devices in Vulkan support various combinations of memory heaps and
-types. Help with choosing correct and optimal memory type for your specific
-resource is one of the key features of this library. You can use it by filling
-appropriate members of VmaAllocationCreateInfo structure, as described below.
-You can also combine multiple methods.
-
--# If you just want to find memory type index that meets your requirements, you
- can use function vmaFindMemoryTypeIndex().
--# If you want to allocate a region of device memory without association with any
- specific image or buffer, you can use function vmaAllocateMemory(). Usage of
- this function is not recommended and usually not needed.
--# If you already have a buffer or an image created, you want to allocate memory
- for it and then you will bind it yourself, you can use function
- vmaAllocateMemoryForBuffer(), vmaAllocateMemoryForImage().
- For binding you should use functions: vmaBindBufferMemory(), vmaBindImageMemory().
--# If you want to create a buffer or an image, allocate memory for it and bind
- them together, all in one call, you can use function vmaCreateBuffer(),
- vmaCreateImage(). This is the recommended way to use this library.
-
-When using 3. or 4., the library internally queries Vulkan for memory types
-supported for that buffer or image (function `vkGetBufferMemoryRequirements()`)
-and uses only one of these types.
-
-If no memory type can be found that meets all the requirements, these functions
-return `VK_ERROR_FEATURE_NOT_PRESENT`.
-
-You can leave VmaAllocationCreateInfo structure completely filled with zeros.
-It means no requirements are specified for memory type.
-It is valid, although not very useful.
-
-\section choosing_memory_type_usage Usage
-
-The easiest way to specify memory requirements is to fill member
-VmaAllocationCreateInfo::usage using one of the values of enum #VmaMemoryUsage.
-It defines high level, common usage types.
-For more details, see description of this enum.
-
-For example, if you want to create a uniform buffer that will be filled using
-transfer only once or infrequently and used for rendering every frame, you can
-do it using following code:
-
-\code
-VkBufferCreateInfo bufferInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
-bufferInfo.size = 65536;
-bufferInfo.usage = VK_BUFFER_USAGE_UNIFORM_BUFFER_BIT | VK_BUFFER_USAGE_TRANSFER_DST_BIT;
-
-VmaAllocationCreateInfo allocInfo = {};
-allocInfo.usage = VMA_MEMORY_USAGE_GPU_ONLY;
-
-VkBuffer buffer;
-VmaAllocation allocation;
-vmaCreateBuffer(allocator, &bufferInfo, &allocInfo, &buffer, &allocation, nullptr);
-\endcode
-
-\section choosing_memory_type_required_preferred_flags Required and preferred flags
-
-You can specify more detailed requirements by filling members
-VmaAllocationCreateInfo::requiredFlags and VmaAllocationCreateInfo::preferredFlags
-with a combination of bits from enum `VkMemoryPropertyFlags`. For example,
-if you want to create a buffer that will be persistently mapped on host (so it
-must be `HOST_VISIBLE`) and preferably will also be `HOST_COHERENT` and `HOST_CACHED`,
-use following code:
-
-\code
-VmaAllocationCreateInfo allocInfo = {};
-allocInfo.requiredFlags = VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT;
-allocInfo.preferredFlags = VK_MEMORY_PROPERTY_HOST_COHERENT_BIT | VK_MEMORY_PROPERTY_HOST_CACHED_BIT;
-allocInfo.flags = VMA_ALLOCATION_CREATE_MAPPED_BIT;
-
-VkBuffer buffer;
-VmaAllocation allocation;
-vmaCreateBuffer(allocator, &bufferInfo, &allocInfo, &buffer, &allocation, nullptr);
-\endcode
-
-A memory type is chosen that has all the required flags and as many preferred
-flags set as possible.
-
-If you use VmaAllocationCreateInfo::usage, it is just internally converted to
-a set of required and preferred flags.
-
-\section choosing_memory_type_explicit_memory_types Explicit memory types
-
-If you inspected memory types available on the physical device and you have
-a preference for memory types that you want to use, you can fill member
-VmaAllocationCreateInfo::memoryTypeBits. It is a bit mask, where each bit set
-means that a memory type with that index is allowed to be used for the
-allocation. Special value 0, just like `UINT32_MAX`, means there are no
-restrictions to memory type index.
-
-Please note that this member is NOT just a memory type index.
-Still you can use it to choose just one, specific memory type.
-For example, if you already determined that your buffer should be created in
-memory type 2, use following code:
-
-\code
-uint32_t memoryTypeIndex = 2;
-
-VmaAllocationCreateInfo allocInfo = {};
-allocInfo.memoryTypeBits = 1u << memoryTypeIndex;
-
-VkBuffer buffer;
-VmaAllocation allocation;
-vmaCreateBuffer(allocator, &bufferInfo, &allocInfo, &buffer, &allocation, nullptr);
-\endcode
-
-\section choosing_memory_type_custom_memory_pools Custom memory pools
-
-If you allocate from custom memory pool, all the ways of specifying memory
-requirements described above are not applicable and the aforementioned members
-of VmaAllocationCreateInfo structure are ignored. Memory type is selected
-explicitly when creating the pool and then used to make all the allocations from
-that pool. For further details, see \ref custom_memory_pools.
-
-
-\page memory_mapping Memory mapping
-
-To "map memory" in Vulkan means to obtain a CPU pointer to `VkDeviceMemory`,
-to be able to read from it or write to it in CPU code.
-Mapping is possible only of memory allocated from a memory type that has
-`VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT` flag.
-Functions `vkMapMemory()`, `vkUnmapMemory()` are designed for this purpose.
-You can use them directly with memory allocated by this library,
-but it is not recommended because of following issue:
-Mapping the same `VkDeviceMemory` block multiple times is illegal - only one mapping at a time is allowed.
-This includes mapping disjoint regions. Mapping is not reference-counted internally by Vulkan.
-Because of this, Vulkan Memory Allocator provides following facilities:
-
-\section memory_mapping_mapping_functions Mapping functions
-
-The library provides following functions for mapping of a specific #VmaAllocation: vmaMapMemory(), vmaUnmapMemory().
-They are safer and more convenient to use than standard Vulkan functions.
-You can map an allocation multiple times simultaneously - mapping is reference-counted internally.
-You can also map different allocations simultaneously regardless of whether they use the same `VkDeviceMemory` block.
-The way it's implemented is that the library always maps entire memory block, not just region of the allocation.
-For further details, see description of vmaMapMemory() function.
-Example:
-
-\code
-// Having these objects initialized:
-
-struct ConstantBuffer
-{
- ...
-};
-ConstantBuffer constantBufferData;
-
-VmaAllocator allocator;
-VkBuffer constantBuffer;
-VmaAllocation constantBufferAllocation;
-
-// You can map and fill your buffer using following code:
-
-void* mappedData;
-vmaMapMemory(allocator, constantBufferAllocation, &mappedData);
-memcpy(mappedData, &constantBufferData, sizeof(constantBufferData));
-vmaUnmapMemory(allocator, constantBufferAllocation);
-\endcode
-
-When mapping, you may see a warning from Vulkan validation layer similar to this one:
-
-<i>Mapping an image with layout VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL can result in undefined behavior if this memory is used by the device. Only GENERAL or PREINITIALIZED should be used.</i>
-
-It happens because the library maps entire `VkDeviceMemory` block, where different
-types of images and buffers may end up together, especially on GPUs with unified memory like Intel.
-You can safely ignore it if you are sure you access only memory of the intended
-object that you wanted to map.
-
-
-\section memory_mapping_persistently_mapped_memory Persistently mapped memory
-
-Kepping your memory persistently mapped is generally OK in Vulkan.
-You don't need to unmap it before using its data on the GPU.
-The library provides a special feature designed for that:
-Allocations made with #VMA_ALLOCATION_CREATE_MAPPED_BIT flag set in
-VmaAllocationCreateInfo::flags stay mapped all the time,
-so you can just access CPU pointer to it any time
-without a need to call any "map" or "unmap" function.
-Example:
-
-\code
-VkBufferCreateInfo bufCreateInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
-bufCreateInfo.size = sizeof(ConstantBuffer);
-bufCreateInfo.usage = VK_BUFFER_USAGE_TRANSFER_SRC_BIT;
-
-VmaAllocationCreateInfo allocCreateInfo = {};
-allocCreateInfo.usage = VMA_MEMORY_USAGE_CPU_ONLY;
-allocCreateInfo.flags = VMA_ALLOCATION_CREATE_MAPPED_BIT;
-
-VkBuffer buf;
-VmaAllocation alloc;
-VmaAllocationInfo allocInfo;
-vmaCreateBuffer(allocator, &bufCreateInfo, &allocCreateInfo, &buf, &alloc, &allocInfo);
-
-// Buffer is already mapped. You can access its memory.
-memcpy(allocInfo.pMappedData, &constantBufferData, sizeof(constantBufferData));
-\endcode
-
-There are some exceptions though, when you should consider mapping memory only for a short period of time:
-
-- When operating system is Windows 7 or 8.x (Windows 10 is not affected because it uses WDDM2),
- device is discrete AMD GPU,
- and memory type is the special 256 MiB pool of `DEVICE_LOCAL + HOST_VISIBLE` memory
- (selected when you use #VMA_MEMORY_USAGE_CPU_TO_GPU),
- then whenever a memory block allocated from this memory type stays mapped
- for the time of any call to `vkQueueSubmit()` or `vkQueuePresentKHR()`, this
- block is migrated by WDDM to system RAM, which degrades performance. It doesn't
- matter if that particular memory block is actually used by the command buffer
- being submitted.
-- On Mac/MoltenVK there is a known bug - [Issue #175](https://github.com/KhronosGroup/MoltenVK/issues/175)
- which requires unmapping before GPU can see updated texture.
-- Keeping many large memory blocks mapped may impact performance or stability of some debugging tools.
-
-\section memory_mapping_cache_control Cache control
-
-Memory in Vulkan doesn't need to be unmapped before using it on GPU,
-but unless a memory types has `VK_MEMORY_PROPERTY_HOST_COHERENT_BIT` flag set,
-you need to manually invalidate cache before reading of mapped pointer
-and flush cache after writing to mapped pointer.
-Vulkan provides following functions for this purpose `vkFlushMappedMemoryRanges()`,
-`vkInvalidateMappedMemoryRanges()`, but this library provides more convenient
-functions that refer to given allocation object: vmaFlushAllocation(),
-vmaInvalidateAllocation().
-
-Regions of memory specified for flush/invalidate must be aligned to
-`VkPhysicalDeviceLimits::nonCoherentAtomSize`. This is automatically ensured by the library.
-In any memory type that is `HOST_VISIBLE` but not `HOST_COHERENT`, all allocations
-within blocks are aligned to this value, so their offsets are always multiply of
-`nonCoherentAtomSize` and two different allocations never share same "line" of this size.
-
-Please note that memory allocated with #VMA_MEMORY_USAGE_CPU_ONLY is guaranteed to be `HOST_COHERENT`.
-
-Also, Windows drivers from all 3 PC GPU vendors (AMD, Intel, NVIDIA)
-currently provide `HOST_COHERENT` flag on all memory types that are
-`HOST_VISIBLE`, so on this platform you may not need to bother.
-
-\section memory_mapping_finding_if_memory_mappable Finding out if memory is mappable
-
-It may happen that your allocation ends up in memory that is `HOST_VISIBLE` (available for mapping)
-despite it wasn't explicitly requested.
-For example, application may work on integrated graphics with unified memory (like Intel) or
-allocation from video memory might have failed, so the library chose system memory as fallback.
-
-You can detect this case and map such allocation to access its memory on CPU directly,
-instead of launching a transfer operation.
-In order to do that: inspect `allocInfo.memoryType`, call vmaGetMemoryTypeProperties(),
-and look for `VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT` flag in properties of that memory type.
-
-\code
-VkBufferCreateInfo bufCreateInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
-bufCreateInfo.size = sizeof(ConstantBuffer);
-bufCreateInfo.usage = VK_BUFFER_USAGE_UNIFORM_BUFFER_BIT | VK_BUFFER_USAGE_TRANSFER_DST_BIT;
-
-VmaAllocationCreateInfo allocCreateInfo = {};
-allocCreateInfo.usage = VMA_MEMORY_USAGE_GPU_ONLY;
-allocCreateInfo.preferredFlags = VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT;
-
-VkBuffer buf;
-VmaAllocation alloc;
-VmaAllocationInfo allocInfo;
-vmaCreateBuffer(allocator, &bufCreateInfo, &allocCreateInfo, &buf, &alloc, &allocInfo);
-
-VkMemoryPropertyFlags memFlags;
-vmaGetMemoryTypeProperties(allocator, allocInfo.memoryType, &memFlags);
-if((memFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) == 0)
-{
- // Allocation ended up in mappable memory. You can map it and access it directly.
- void* mappedData;
- vmaMapMemory(allocator, alloc, &mappedData);
- memcpy(mappedData, &constantBufferData, sizeof(constantBufferData));
- vmaUnmapMemory(allocator, alloc);
-}
-else
-{
- // Allocation ended up in non-mappable memory.
- // You need to create CPU-side buffer in VMA_MEMORY_USAGE_CPU_ONLY and make a transfer.
-}
-\endcode
-
-You can even use #VMA_ALLOCATION_CREATE_MAPPED_BIT flag while creating allocations
-that are not necessarily `HOST_VISIBLE` (e.g. using #VMA_MEMORY_USAGE_GPU_ONLY).
-If the allocation ends up in memory type that is `HOST_VISIBLE`, it will be persistently mapped and you can use it directly.
-If not, the flag is just ignored.
-Example:
-
-\code
-VkBufferCreateInfo bufCreateInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
-bufCreateInfo.size = sizeof(ConstantBuffer);
-bufCreateInfo.usage = VK_BUFFER_USAGE_UNIFORM_BUFFER_BIT | VK_BUFFER_USAGE_TRANSFER_DST_BIT;
-
-VmaAllocationCreateInfo allocCreateInfo = {};
-allocCreateInfo.usage = VMA_MEMORY_USAGE_GPU_ONLY;
-allocCreateInfo.flags = VMA_ALLOCATION_CREATE_MAPPED_BIT;
-
-VkBuffer buf;
-VmaAllocation alloc;
-VmaAllocationInfo allocInfo;
-vmaCreateBuffer(allocator, &bufCreateInfo, &allocCreateInfo, &buf, &alloc, &allocInfo);
-
-if(allocInfo.pUserData != nullptr)
-{
- // Allocation ended up in mappable memory.
- // It's persistently mapped. You can access it directly.
- memcpy(allocInfo.pMappedData, &constantBufferData, sizeof(constantBufferData));
-}
-else
-{
- // Allocation ended up in non-mappable memory.
- // You need to create CPU-side buffer in VMA_MEMORY_USAGE_CPU_ONLY and make a transfer.
-}
-\endcode
-
-
-\page custom_memory_pools Custom memory pools
-
-A memory pool contains a number of `VkDeviceMemory` blocks.
-The library automatically creates and manages default pool for each memory type available on the device.
-Default memory pool automatically grows in size.
-Size of allocated blocks is also variable and managed automatically.
-
-You can create custom pool and allocate memory out of it.
-It can be useful if you want to:
-
-- Keep certain kind of allocations separate from others.
-- Enforce particular, fixed size of Vulkan memory blocks.
-- Limit maximum amount of Vulkan memory allocated for that pool.
-- Reserve minimum or fixed amount of Vulkan memory always preallocated for that pool.
-
-To use custom memory pools:
-
--# Fill VmaPoolCreateInfo structure.
--# Call vmaCreatePool() to obtain #VmaPool handle.
--# When making an allocation, set VmaAllocationCreateInfo::pool to this handle.
- You don't need to specify any other parameters of this structure, like `usage`.
-
-Example:
-
-\code
-// Create a pool that can have at most 2 blocks, 128 MiB each.
-VmaPoolCreateInfo poolCreateInfo = {};
-poolCreateInfo.memoryTypeIndex = ...
-poolCreateInfo.blockSize = 128ull * 1024 * 1024;
-poolCreateInfo.maxBlockCount = 2;
-
-VmaPool pool;
-vmaCreatePool(allocator, &poolCreateInfo, &pool);
-
-// Allocate a buffer out of it.
-VkBufferCreateInfo bufCreateInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
-bufCreateInfo.size = 1024;
-bufCreateInfo.usage = VK_BUFFER_USAGE_UNIFORM_BUFFER_BIT | VK_BUFFER_USAGE_TRANSFER_DST_BIT;
-
-VmaAllocationCreateInfo allocCreateInfo = {};
-allocCreateInfo.pool = pool;
-
-VkBuffer buf;
-VmaAllocation alloc;
-VmaAllocationInfo allocInfo;
-vmaCreateBuffer(allocator, &bufCreateInfo, &allocCreateInfo, &buf, &alloc, &allocInfo);
-\endcode
-
-You have to free all allocations made from this pool before destroying it.
-
-\code
-vmaDestroyBuffer(allocator, buf, alloc);
-vmaDestroyPool(allocator, pool);
-\endcode
-
-\section custom_memory_pools_MemTypeIndex Choosing memory type index
-
-When creating a pool, you must explicitly specify memory type index.
-To find the one suitable for your buffers or images, you can use helper functions
-vmaFindMemoryTypeIndexForBufferInfo(), vmaFindMemoryTypeIndexForImageInfo().
-You need to provide structures with example parameters of buffers or images
-that you are going to create in that pool.
-
-\code
-VkBufferCreateInfo exampleBufCreateInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
-exampleBufCreateInfo.size = 1024; // Whatever.
-exampleBufCreateInfo.usage = VK_BUFFER_USAGE_UNIFORM_BUFFER_BIT | VK_BUFFER_USAGE_TRANSFER_DST_BIT; // Change if needed.
-
-VmaAllocationCreateInfo allocCreateInfo = {};
-allocCreateInfo.usage = VMA_MEMORY_USAGE_GPU_ONLY; // Change if needed.
-
-uint32_t memTypeIndex;
-vmaFindMemoryTypeIndexForBufferInfo(allocator, &exampleBufCreateInfo, &allocCreateInfo, &memTypeIndex);
-
-VmaPoolCreateInfo poolCreateInfo = {};
-poolCreateInfo.memoryTypeIndex = memTypeIndex;
-// ...
-\endcode
-
-When creating buffers/images allocated in that pool, provide following parameters:
-
-- `VkBufferCreateInfo`: Prefer to pass same parameters as above.
- Otherwise you risk creating resources in a memory type that is not suitable for them, which may result in undefined behavior.
- Using different `VK_BUFFER_USAGE_` flags may work, but you shouldn't create images in a pool intended for buffers
- or the other way around.
-- VmaAllocationCreateInfo: You don't need to pass same parameters. Fill only `pool` member.
- Other members are ignored anyway.
-
-\section linear_algorithm Linear allocation algorithm
-
-Each Vulkan memory block managed by this library has accompanying metadata that
-keeps track of used and unused regions. By default, the metadata structure and
-algorithm tries to find best place for new allocations among free regions to
-optimize memory usage. This way you can allocate and free objects in any order.
-
-![Default allocation algorithm](../gfx/Linear_allocator_1_algo_default.png)
-
-Sometimes there is a need to use simpler, linear allocation algorithm. You can
-create custom pool that uses such algorithm by adding flag
-#VMA_POOL_CREATE_LINEAR_ALGORITHM_BIT to VmaPoolCreateInfo::flags while creating
-#VmaPool object. Then an alternative metadata management is used. It always
-creates new allocations after last one and doesn't reuse free regions after
-allocations freed in the middle. It results in better allocation performance and
-less memory consumed by metadata.
-
-![Linear allocation algorithm](../gfx/Linear_allocator_2_algo_linear.png)
-
-With this one flag, you can create a custom pool that can be used in many ways:
-free-at-once, stack, double stack, and ring buffer. See below for details.
-
-\subsection linear_algorithm_free_at_once Free-at-once
-
-In a pool that uses linear algorithm, you still need to free all the allocations
-individually, e.g. by using vmaFreeMemory() or vmaDestroyBuffer(). You can free
-them in any order. New allocations are always made after last one - free space
-in the middle is not reused. However, when you release all the allocation and
-the pool becomes empty, allocation starts from the beginning again. This way you
-can use linear algorithm to speed up creation of allocations that you are going
-to release all at once.
-
-![Free-at-once](../gfx/Linear_allocator_3_free_at_once.png)
-
-This mode is also available for pools created with VmaPoolCreateInfo::maxBlockCount
-value that allows multiple memory blocks.
-
-\subsection linear_algorithm_stack Stack
-
-When you free an allocation that was created last, its space can be reused.
-Thanks to this, if you always release allocations in the order opposite to their
-creation (LIFO - Last In First Out), you can achieve behavior of a stack.
-
-![Stack](../gfx/Linear_allocator_4_stack.png)
-
-This mode is also available for pools created with VmaPoolCreateInfo::maxBlockCount
-value that allows multiple memory blocks.
-
-\subsection linear_algorithm_double_stack Double stack
-
-The space reserved by a custom pool with linear algorithm may be used by two
-stacks:
-
-- First, default one, growing up from offset 0.
-- Second, "upper" one, growing down from the end towards lower offsets.
-
-To make allocation from upper stack, add flag #VMA_ALLOCATION_CREATE_UPPER_ADDRESS_BIT
-to VmaAllocationCreateInfo::flags.
-
-![Double stack](../gfx/Linear_allocator_7_double_stack.png)
-
-Double stack is available only in pools with one memory block -
-VmaPoolCreateInfo::maxBlockCount must be 1. Otherwise behavior is undefined.
-
-When the two stacks' ends meet so there is not enough space between them for a
-new allocation, such allocation fails with usual
-`VK_ERROR_OUT_OF_DEVICE_MEMORY` error.
-
-\subsection linear_algorithm_ring_buffer Ring buffer
-
-When you free some allocations from the beginning and there is not enough free space
-for a new one at the end of a pool, allocator's "cursor" wraps around to the
-beginning and starts allocation there. Thanks to this, if you always release
-allocations in the same order as you created them (FIFO - First In First Out),
-you can achieve behavior of a ring buffer / queue.
-
-![Ring buffer](../gfx/Linear_allocator_5_ring_buffer.png)
-
-Pools with linear algorithm support [lost allocations](@ref lost_allocations) when used as ring buffer.
-If there is not enough free space for a new allocation, but existing allocations
-from the front of the queue can become lost, they become lost and the allocation
-succeeds.
-
-![Ring buffer with lost allocations](../gfx/Linear_allocator_6_ring_buffer_lost.png)
-
-Ring buffer is available only in pools with one memory block -
-VmaPoolCreateInfo::maxBlockCount must be 1. Otherwise behavior is undefined.
-
-\section buddy_algorithm Buddy allocation algorithm
-
-There is another allocation algorithm that can be used with custom pools, called
-"buddy". Its internal data structure is based on a tree of blocks, each having
-size that is a power of two and a half of its parent's size. When you want to
-allocate memory of certain size, a free node in the tree is located. If it's too
-large, it is recursively split into two halves (called "buddies"). However, if
-requested allocation size is not a power of two, the size of a tree node is
-aligned up to the nearest power of two and the remaining space is wasted. When
-two buddy nodes become free, they are merged back into one larger node.
-
-![Buddy allocator](../gfx/Buddy_allocator.png)
-
-The advantage of buddy allocation algorithm over default algorithm is faster
-allocation and deallocation, as well as smaller external fragmentation. The
-disadvantage is more wasted space (internal fragmentation).
-
-For more information, please read ["Buddy memory allocation" on Wikipedia](https://en.wikipedia.org/wiki/Buddy_memory_allocation)
-or other sources that describe this concept in general.
-
-To use buddy allocation algorithm with a custom pool, add flag
-#VMA_POOL_CREATE_BUDDY_ALGORITHM_BIT to VmaPoolCreateInfo::flags while creating
-#VmaPool object.
-
-Several limitations apply to pools that use buddy algorithm:
-
-- It is recommended to use VmaPoolCreateInfo::blockSize that is a power of two.
- Otherwise, only largest power of two smaller than the size is used for
- allocations. The remaining space always stays unused.
-- [Margins](@ref debugging_memory_usage_margins) and
- [corruption detection](@ref debugging_memory_usage_corruption_detection)
- don't work in such pools.
-- [Lost allocations](@ref lost_allocations) don't work in such pools. You can
- use them, but they never become lost. Support may be added in the future.
-- [Defragmentation](@ref defragmentation) doesn't work with allocations made from
- such pool.
-
-\page defragmentation Defragmentation
-
-Interleaved allocations and deallocations of many objects of varying size can
-cause fragmentation over time, which can lead to a situation where the library is unable
-to find a continuous range of free memory for a new allocation despite there is
-enough free space, just scattered across many small free ranges between existing
-allocations.
-
-To mitigate this problem, you can use defragmentation feature:
-structure #VmaDefragmentationInfo2, function vmaDefragmentationBegin(), vmaDefragmentationEnd().
-Given set of allocations,
-this function can move them to compact used memory, ensure more continuous free
-space and possibly also free some `VkDeviceMemory` blocks.
-
-What the defragmentation does is:
-
-- Updates #VmaAllocation objects to point to new `VkDeviceMemory` and offset.
- After allocation has been moved, its VmaAllocationInfo::deviceMemory and/or
- VmaAllocationInfo::offset changes. You must query them again using
- vmaGetAllocationInfo() if you need them.
-- Moves actual data in memory.
-
-What it doesn't do, so you need to do it yourself:
-
-- Recreate buffers and images that were bound to allocations that were defragmented and
- bind them with their new places in memory.
- You must use `vkDestroyBuffer()`, `vkDestroyImage()`,
- `vkCreateBuffer()`, `vkCreateImage()` for that purpose and NOT vmaDestroyBuffer(),
- vmaDestroyImage(), vmaCreateBuffer(), vmaCreateImage(), because you don't need to
- destroy or create allocation objects!
-- Recreate views and update descriptors that point to these buffers and images.
-
-\section defragmentation_cpu Defragmenting CPU memory
-
-Following example demonstrates how you can run defragmentation on CPU.
-Only allocations created in memory types that are `HOST_VISIBLE` can be defragmented.
-Others are ignored.
-
-The way it works is:
-
-- It temporarily maps entire memory blocks when necessary.
-- It moves data using `memmove()` function.
-
-\code
-// Given following variables already initialized:
-VkDevice device;
-VmaAllocator allocator;
-std::vector<VkBuffer> buffers;
-std::vector<VmaAllocation> allocations;
-
-
-const uint32_t allocCount = (uint32_t)allocations.size();
-std::vector<VkBool32> allocationsChanged(allocCount);
-
-VmaDefragmentationInfo2 defragInfo = {};
-defragInfo.allocationCount = allocCount;
-defragInfo.pAllocations = allocations.data();
-defragInfo.pAllocationsChanged = allocationsChanged.data();
-defragInfo.maxCpuBytesToMove = VK_WHOLE_SIZE; // No limit.
-defragInfo.maxCpuAllocationsToMove = UINT32_MAX; // No limit.
-
-VmaDefragmentationContext defragCtx;
-vmaDefragmentationBegin(allocator, &defragInfo, nullptr, &defragCtx);
-vmaDefragmentationEnd(allocator, defragCtx);
-
-for(uint32_t i = 0; i < allocCount; ++i)
-{
- if(allocationsChanged[i])
- {
- // Destroy buffer that is immutably bound to memory region which is no longer valid.
- vkDestroyBuffer(device, buffers[i], nullptr);
-
- // Create new buffer with same parameters.
- VkBufferCreateInfo bufferInfo = ...;
- vkCreateBuffer(device, &bufferInfo, nullptr, &buffers[i]);
-
- // You can make dummy call to vkGetBufferMemoryRequirements here to silence validation layer warning.
-
- // Bind new buffer to new memory region. Data contained in it is already moved.
- VmaAllocationInfo allocInfo;
- vmaGetAllocationInfo(allocator, allocations[i], &allocInfo);
- vkBindBufferMemory(device, buffers[i], allocInfo.deviceMemory, allocInfo.offset);
- }
-}
-\endcode
-
-Setting VmaDefragmentationInfo2::pAllocationsChanged is optional.
-This output array tells whether particular allocation in VmaDefragmentationInfo2::pAllocations at the same index
-has been modified during defragmentation.
-You can pass null, but you then need to query every allocation passed to defragmentation
-for new parameters using vmaGetAllocationInfo() if you might need to recreate and rebind a buffer or image associated with it.
-
-If you use [Custom memory pools](@ref choosing_memory_type_custom_memory_pools),
-you can fill VmaDefragmentationInfo2::poolCount and VmaDefragmentationInfo2::pPools
-instead of VmaDefragmentationInfo2::allocationCount and VmaDefragmentationInfo2::pAllocations
-to defragment all allocations in given pools.
-You cannot use VmaDefragmentationInfo2::pAllocationsChanged in that case.
-You can also combine both methods.
-
-\section defragmentation_gpu Defragmenting GPU memory
-
-It is also possible to defragment allocations created in memory types that are not `HOST_VISIBLE`.
-To do that, you need to pass a command buffer that meets requirements as described in
-VmaDefragmentationInfo2::commandBuffer. The way it works is:
-
-- It creates temporary buffers and binds them to entire memory blocks when necessary.
-- It issues `vkCmdCopyBuffer()` to passed command buffer.
-
-Example:
-
-\code
-// Given following variables already initialized:
-VkDevice device;
-VmaAllocator allocator;
-VkCommandBuffer commandBuffer;
-std::vector<VkBuffer> buffers;
-std::vector<VmaAllocation> allocations;
-
-
-const uint32_t allocCount = (uint32_t)allocations.size();
-std::vector<VkBool32> allocationsChanged(allocCount);
-
-VkCommandBufferBeginInfo cmdBufBeginInfo = ...;
-vkBeginCommandBuffer(commandBuffer, &cmdBufBeginInfo);
-
-VmaDefragmentationInfo2 defragInfo = {};
-defragInfo.allocationCount = allocCount;
-defragInfo.pAllocations = allocations.data();
-defragInfo.pAllocationsChanged = allocationsChanged.data();
-defragInfo.maxGpuBytesToMove = VK_WHOLE_SIZE; // Notice it's "GPU" this time.
-defragInfo.maxGpuAllocationsToMove = UINT32_MAX; // Notice it's "GPU" this time.
-defragInfo.commandBuffer = commandBuffer;
-
-VmaDefragmentationContext defragCtx;
-vmaDefragmentationBegin(allocator, &defragInfo, nullptr, &defragCtx);
-
-vkEndCommandBuffer(commandBuffer);
-
-// Submit commandBuffer.
-// Wait for a fence that ensures commandBuffer execution finished.
-
-vmaDefragmentationEnd(allocator, defragCtx);
-
-for(uint32_t i = 0; i < allocCount; ++i)
-{
- if(allocationsChanged[i])
- {
- // Destroy buffer that is immutably bound to memory region which is no longer valid.
- vkDestroyBuffer(device, buffers[i], nullptr);
-
- // Create new buffer with same parameters.
- VkBufferCreateInfo bufferInfo = ...;
- vkCreateBuffer(device, &bufferInfo, nullptr, &buffers[i]);
-
- // You can make dummy call to vkGetBufferMemoryRequirements here to silence validation layer warning.
-
- // Bind new buffer to new memory region. Data contained in it is already moved.
- VmaAllocationInfo allocInfo;
- vmaGetAllocationInfo(allocator, allocations[i], &allocInfo);
- vkBindBufferMemory(device, buffers[i], allocInfo.deviceMemory, allocInfo.offset);
- }
-}
-\endcode
-
-You can combine these two methods by specifying non-zero `maxGpu*` as well as `maxCpu*` parameters.
-The library automatically chooses best method to defragment each memory pool.
-
-You may try not to block your entire program to wait until defragmentation finishes,
-but do it in the background, as long as you carefully fullfill requirements described
-in function vmaDefragmentationBegin().
-
-\section defragmentation_additional_notes Additional notes
-
-While using defragmentation, you may experience validation layer warnings, which you just need to ignore.
-See [Validation layer warnings](@ref general_considerations_validation_layer_warnings).
-
-If you defragment allocations bound to images, these images should be created with
-`VK_IMAGE_CREATE_ALIAS_BIT` flag, to make sure that new image created with same
-parameters and pointing to data copied to another memory region will interpret
-its contents consistently. Otherwise you may experience corrupted data on some
-implementations, e.g. due to different pixel swizzling used internally by the graphics driver.
-
-If you defragment allocations bound to images, new images to be bound to new
-memory region after defragmentation should be created with `VK_IMAGE_LAYOUT_PREINITIALIZED`
-and then transitioned to their original layout from before defragmentation using
-an image memory barrier.
-
-Please don't expect memory to be fully compacted after defragmentation.
-Algorithms inside are based on some heuristics that try to maximize number of Vulkan
-memory blocks to make totally empty to release them, as well as to maximimze continuous
-empty space inside remaining blocks, while minimizing the number and size of allocations that
-need to be moved. Some fragmentation may still remain - this is normal.
-
-\section defragmentation_custom_algorithm Writing custom defragmentation algorithm
-
-If you want to implement your own, custom defragmentation algorithm,
-there is infrastructure prepared for that,
-but it is not exposed through the library API - you need to hack its source code.
-Here are steps needed to do this:
-
--# Main thing you need to do is to define your own class derived from base abstract
- class `VmaDefragmentationAlgorithm` and implement your version of its pure virtual methods.
- See definition and comments of this class for details.
--# Your code needs to interact with device memory block metadata.
- If you need more access to its data than it's provided by its public interface,
- declare your new class as a friend class e.g. in class `VmaBlockMetadata_Generic`.
--# If you want to create a flag that would enable your algorithm or pass some additional
- flags to configure it, add them to `VmaDefragmentationFlagBits` and use them in
- VmaDefragmentationInfo2::flags.
--# Modify function `VmaBlockVectorDefragmentationContext::Begin` to create object
- of your new class whenever needed.
-
-
-\page lost_allocations Lost allocations
-
-If your game oversubscribes video memory, if may work OK in previous-generation
-graphics APIs (DirectX 9, 10, 11, OpenGL) because resources are automatically
-paged to system RAM. In Vulkan you can't do it because when you run out of
-memory, an allocation just fails. If you have more data (e.g. textures) that can
-fit into VRAM and you don't need it all at once, you may want to upload them to
-GPU on demand and "push out" ones that are not used for a long time to make room
-for the new ones, effectively using VRAM (or a cartain memory pool) as a form of
-cache. Vulkan Memory Allocator can help you with that by supporting a concept of
-"lost allocations".
-
-To create an allocation that can become lost, include #VMA_ALLOCATION_CREATE_CAN_BECOME_LOST_BIT
-flag in VmaAllocationCreateInfo::flags. Before using a buffer or image bound to
-such allocation in every new frame, you need to query it if it's not lost.
-To check it, call vmaTouchAllocation().
-If the allocation is lost, you should not use it or buffer/image bound to it.
-You mustn't forget to destroy this allocation and this buffer/image.
-vmaGetAllocationInfo() can also be used for checking status of the allocation.
-Allocation is lost when returned VmaAllocationInfo::deviceMemory == `VK_NULL_HANDLE`.
-
-To create an allocation that can make some other allocations lost to make room
-for it, use #VMA_ALLOCATION_CREATE_CAN_MAKE_OTHER_LOST_BIT flag. You will
-usually use both flags #VMA_ALLOCATION_CREATE_CAN_MAKE_OTHER_LOST_BIT and
-#VMA_ALLOCATION_CREATE_CAN_BECOME_LOST_BIT at the same time.
-
-Warning! Current implementation uses quite naive, brute force algorithm,
-which can make allocation calls that use #VMA_ALLOCATION_CREATE_CAN_MAKE_OTHER_LOST_BIT
-flag quite slow. A new, more optimal algorithm and data structure to speed this
-up is planned for the future.
-
-<b>Q: When interleaving creation of new allocations with usage of existing ones,
-how do you make sure that an allocation won't become lost while it's used in the
-current frame?</b>
-
-It is ensured because vmaTouchAllocation() / vmaGetAllocationInfo() not only returns allocation
-status/parameters and checks whether it's not lost, but when it's not, it also
-atomically marks it as used in the current frame, which makes it impossible to
-become lost in that frame. It uses lockless algorithm, so it works fast and
-doesn't involve locking any internal mutex.
-
-<b>Q: What if my allocation may still be in use by the GPU when it's rendering a
-previous frame while I already submit new frame on the CPU?</b>
-
-You can make sure that allocations "touched" by vmaTouchAllocation() / vmaGetAllocationInfo() will not
-become lost for a number of additional frames back from the current one by
-specifying this number as VmaAllocatorCreateInfo::frameInUseCount (for default
-memory pool) and VmaPoolCreateInfo::frameInUseCount (for custom pool).
-
-<b>Q: How do you inform the library when new frame starts?</b>
-
-You need to call function vmaSetCurrentFrameIndex().
-
-Example code:
-
-\code
-struct MyBuffer
-{
- VkBuffer m_Buf = nullptr;
- VmaAllocation m_Alloc = nullptr;
-
- // Called when the buffer is really needed in the current frame.
- void EnsureBuffer();
-};
-
-void MyBuffer::EnsureBuffer()
-{
- // Buffer has been created.
- if(m_Buf != VK_NULL_HANDLE)
- {
- // Check if its allocation is not lost + mark it as used in current frame.
- if(vmaTouchAllocation(allocator, m_Alloc))
- {
- // It's all OK - safe to use m_Buf.
- return;
- }
- }
-
- // Buffer not yet exists or lost - destroy and recreate it.
-
- vmaDestroyBuffer(allocator, m_Buf, m_Alloc);
-
- VkBufferCreateInfo bufCreateInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
- bufCreateInfo.size = 1024;
- bufCreateInfo.usage = VK_BUFFER_USAGE_UNIFORM_BUFFER_BIT | VK_BUFFER_USAGE_TRANSFER_DST_BIT;
-
- VmaAllocationCreateInfo allocCreateInfo = {};
- allocCreateInfo.usage = VMA_MEMORY_USAGE_GPU_ONLY;
- allocCreateInfo.flags = VMA_ALLOCATION_CREATE_CAN_BECOME_LOST_BIT |
- VMA_ALLOCATION_CREATE_CAN_MAKE_OTHER_LOST_BIT;
-
- vmaCreateBuffer(allocator, &bufCreateInfo, &allocCreateInfo, &m_Buf, &m_Alloc, nullptr);
-}
-\endcode
-
-When using lost allocations, you may see some Vulkan validation layer warnings
-about overlapping regions of memory bound to different kinds of buffers and
-images. This is still valid as long as you implement proper handling of lost
-allocations (like in the example above) and don't use them.
-
-You can create an allocation that is already in lost state from the beginning using function
-vmaCreateLostAllocation(). It may be useful if you need a "dummy" allocation that is not null.
-
-You can call function vmaMakePoolAllocationsLost() to set all eligible allocations
-in a specified custom pool to lost state.
-Allocations that have been "touched" in current frame or VmaPoolCreateInfo::frameInUseCount frames back
-cannot become lost.
-
-<b>Q: Can I touch allocation that cannot become lost?</b>
-
-Yes, although it has no visible effect.
-Calls to vmaGetAllocationInfo() and vmaTouchAllocation() update last use frame index
-also for allocations that cannot become lost, but the only way to observe it is to dump
-internal allocator state using vmaBuildStatsString().
-You can use this feature for debugging purposes to explicitly mark allocations that you use
-in current frame and then analyze JSON dump to see for how long each allocation stays unused.
-
-
-\page statistics Statistics
-
-This library contains functions that return information about its internal state,
-especially the amount of memory allocated from Vulkan.
-Please keep in mind that these functions need to traverse all internal data structures
-to gather these information, so they may be quite time-consuming.
-Don't call them too often.
-
-\section statistics_numeric_statistics Numeric statistics
-
-You can query for overall statistics of the allocator using function vmaCalculateStats().
-Information are returned using structure #VmaStats.
-It contains #VmaStatInfo - number of allocated blocks, number of allocations
-(occupied ranges in these blocks), number of unused (free) ranges in these blocks,
-number of bytes used and unused (but still allocated from Vulkan) and other information.
-They are summed across memory heaps, memory types and total for whole allocator.
-
-You can query for statistics of a custom pool using function vmaGetPoolStats().
-Information are returned using structure #VmaPoolStats.
-
-You can query for information about specific allocation using function vmaGetAllocationInfo().
-It fill structure #VmaAllocationInfo.
-
-\section statistics_json_dump JSON dump
-
-You can dump internal state of the allocator to a string in JSON format using function vmaBuildStatsString().
-The result is guaranteed to be correct JSON.
-It uses ANSI encoding.
-Any strings provided by user (see [Allocation names](@ref allocation_names))
-are copied as-is and properly escaped for JSON, so if they use UTF-8, ISO-8859-2 or any other encoding,
-this JSON string can be treated as using this encoding.
-It must be freed using function vmaFreeStatsString().
-
-The format of this JSON string is not part of official documentation of the library,
-but it will not change in backward-incompatible way without increasing library major version number
-and appropriate mention in changelog.
-
-The JSON string contains all the data that can be obtained using vmaCalculateStats().
-It can also contain detailed map of allocated memory blocks and their regions -
-free and occupied by allocations.
-This allows e.g. to visualize the memory or assess fragmentation.
-
-
-\page allocation_annotation Allocation names and user data
-
-\section allocation_user_data Allocation user data
-
-You can annotate allocations with your own information, e.g. for debugging purposes.
-To do that, fill VmaAllocationCreateInfo::pUserData field when creating
-an allocation. It's an opaque `void*` pointer. You can use it e.g. as a pointer,
-some handle, index, key, ordinal number or any other value that would associate
-the allocation with your custom metadata.
-
-\code
-VkBufferCreateInfo bufferInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
-// Fill bufferInfo...
-
-MyBufferMetadata* pMetadata = CreateBufferMetadata();
-
-VmaAllocationCreateInfo allocCreateInfo = {};
-allocCreateInfo.usage = VMA_MEMORY_USAGE_GPU_ONLY;
-allocCreateInfo.pUserData = pMetadata;
-
-VkBuffer buffer;
-VmaAllocation allocation;
-vmaCreateBuffer(allocator, &bufferInfo, &allocCreateInfo, &buffer, &allocation, nullptr);
-\endcode
-
-The pointer may be later retrieved as VmaAllocationInfo::pUserData:
-
-\code
-VmaAllocationInfo allocInfo;
-vmaGetAllocationInfo(allocator, allocation, &allocInfo);
-MyBufferMetadata* pMetadata = (MyBufferMetadata*)allocInfo.pUserData;
-\endcode
-
-It can also be changed using function vmaSetAllocationUserData().
-
-Values of (non-zero) allocations' `pUserData` are printed in JSON report created by
-vmaBuildStatsString(), in hexadecimal form.
-
-\section allocation_names Allocation names
-
-There is alternative mode available where `pUserData` pointer is used to point to
-a null-terminated string, giving a name to the allocation. To use this mode,
-set #VMA_ALLOCATION_CREATE_USER_DATA_COPY_STRING_BIT flag in VmaAllocationCreateInfo::flags.
-Then `pUserData` passed as VmaAllocationCreateInfo::pUserData or argument to
-vmaSetAllocationUserData() must be either null or pointer to a null-terminated string.
-The library creates internal copy of the string, so the pointer you pass doesn't need
-to be valid for whole lifetime of the allocation. You can free it after the call.
-
-\code
-VkImageCreateInfo imageInfo = { VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO };
-// Fill imageInfo...
-
-std::string imageName = "Texture: ";
-imageName += fileName;
-
-VmaAllocationCreateInfo allocCreateInfo = {};
-allocCreateInfo.usage = VMA_MEMORY_USAGE_GPU_ONLY;
-allocCreateInfo.flags = VMA_ALLOCATION_CREATE_USER_DATA_COPY_STRING_BIT;
-allocCreateInfo.pUserData = imageName.c_str();
-
-VkImage image;
-VmaAllocation allocation;
-vmaCreateImage(allocator, &imageInfo, &allocCreateInfo, &image, &allocation, nullptr);
-\endcode
-
-The value of `pUserData` pointer of the allocation will be different than the one
-you passed when setting allocation's name - pointing to a buffer managed
-internally that holds copy of the string.
-
-\code
-VmaAllocationInfo allocInfo;
-vmaGetAllocationInfo(allocator, allocation, &allocInfo);
-const char* imageName = (const char*)allocInfo.pUserData;
-printf("Image name: %s\n", imageName);
-\endcode
-
-That string is also printed in JSON report created by vmaBuildStatsString().
-
-
-\page debugging_memory_usage Debugging incorrect memory usage
-
-If you suspect a bug with memory usage, like usage of uninitialized memory or
-memory being overwritten out of bounds of an allocation,
-you can use debug features of this library to verify this.
-
-\section debugging_memory_usage_initialization Memory initialization
-
-If you experience a bug with incorrect and nondeterministic data in your program and you suspect uninitialized memory to be used,
-you can enable automatic memory initialization to verify this.
-To do it, define macro `VMA_DEBUG_INITIALIZE_ALLOCATIONS` to 1.
-
-\code
-#define VMA_DEBUG_INITIALIZE_ALLOCATIONS 1
-#include "vk_mem_alloc.h"
-\endcode
-
-It makes memory of all new allocations initialized to bit pattern `0xDCDCDCDC`.
-Before an allocation is destroyed, its memory is filled with bit pattern `0xEFEFEFEF`.
-Memory is automatically mapped and unmapped if necessary.
-
-If you find these values while debugging your program, good chances are that you incorrectly
-read Vulkan memory that is allocated but not initialized, or already freed, respectively.
-
-Memory initialization works only with memory types that are `HOST_VISIBLE`.
-It works also with dedicated allocations.
-It doesn't work with allocations created with #VMA_ALLOCATION_CREATE_CAN_BECOME_LOST_BIT flag,
-as they cannot be mapped.
-
-\section debugging_memory_usage_margins Margins
-
-By default, allocations are laid out in memory blocks next to each other if possible
-(considering required alignment, `bufferImageGranularity`, and `nonCoherentAtomSize`).
-
-![Allocations without margin](../gfx/Margins_1.png)
-
-Define macro `VMA_DEBUG_MARGIN` to some non-zero value (e.g. 16) to enforce specified
-number of bytes as a margin before and after every allocation.
-
-\code
-#define VMA_DEBUG_MARGIN 16
-#include "vk_mem_alloc.h"
-\endcode
-
-![Allocations with margin](../gfx/Margins_2.png)
-
-If your bug goes away after enabling margins, it means it may be caused by memory
-being overwritten outside of allocation boundaries. It is not 100% certain though.
-Change in application behavior may also be caused by different order and distribution
-of allocations across memory blocks after margins are applied.
-
-The margin is applied also before first and after last allocation in a block.
-It may occur only once between two adjacent allocations.
-
-Margins work with all types of memory.
-
-Margin is applied only to allocations made out of memory blocks and not to dedicated
-allocations, which have their own memory block of specific size.
-It is thus not applied to allocations made using #VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT flag
-or those automatically decided to put into dedicated allocations, e.g. due to its
-large size or recommended by VK_KHR_dedicated_allocation extension.
-Margins are also not active in custom pools created with #VMA_POOL_CREATE_BUDDY_ALGORITHM_BIT flag.
-
-Margins appear in [JSON dump](@ref statistics_json_dump) as part of free space.
-
-Note that enabling margins increases memory usage and fragmentation.
-
-\section debugging_memory_usage_corruption_detection Corruption detection
-
-You can additionally define macro `VMA_DEBUG_DETECT_CORRUPTION` to 1 to enable validation
-of contents of the margins.
-
-\code
-#define VMA_DEBUG_MARGIN 16
-#define VMA_DEBUG_DETECT_CORRUPTION 1
-#include "vk_mem_alloc.h"
-\endcode
-
-When this feature is enabled, number of bytes specified as `VMA_DEBUG_MARGIN`
-(it must be multiply of 4) before and after every allocation is filled with a magic number.
-This idea is also know as "canary".
-Memory is automatically mapped and unmapped if necessary.
-
-This number is validated automatically when the allocation is destroyed.
-If it's not equal to the expected value, `VMA_ASSERT()` is executed.
-It clearly means that either CPU or GPU overwritten the memory outside of boundaries of the allocation,
-which indicates a serious bug.
-
-You can also explicitly request checking margins of all allocations in all memory blocks
-that belong to specified memory types by using function vmaCheckCorruption(),
-or in memory blocks that belong to specified custom pool, by using function
-vmaCheckPoolCorruption().
-
-Margin validation (corruption detection) works only for memory types that are
-`HOST_VISIBLE` and `HOST_COHERENT`.
-
-
-\page record_and_replay Record and replay
-
-\section record_and_replay_introduction Introduction
-
-While using the library, sequence of calls to its functions together with their
-parameters can be recorded to a file and later replayed using standalone player
-application. It can be useful to:
-
-- Test correctness - check if same sequence of calls will not cause crash or
- failures on a target platform.
-- Gather statistics - see number of allocations, peak memory usage, number of
- calls etc.
-- Benchmark performance - see how much time it takes to replay the whole
- sequence.
-
-\section record_and_replay_usage Usage
-
-<b>To record sequence of calls to a file:</b> Fill in
-VmaAllocatorCreateInfo::pRecordSettings member while creating #VmaAllocator
-object. File is opened and written during whole lifetime of the allocator.
-
-<b>To replay file:</b> Use VmaReplay - standalone command-line program.
-Precompiled binary can be found in "bin" directory.
-Its source can be found in "src/VmaReplay" directory.
-Its project is generated by Premake.
-Command line syntax is printed when the program is launched without parameters.
-Basic usage:
-
- VmaReplay.exe MyRecording.csv
-
-<b>Documentation of file format</b> can be found in file: "docs/Recording file format.md".
-It's a human-readable, text file in CSV format (Comma Separated Values).
-
-\section record_and_replay_additional_considerations Additional considerations
-
-- Replaying file that was recorded on a different GPU (with different parameters
- like `bufferImageGranularity`, `nonCoherentAtomSize`, and especially different
- set of memory heaps and types) may give different performance and memory usage
- results, as well as issue some warnings and errors.
-- Current implementation of recording in VMA, as well as VmaReplay application, is
- coded and tested only on Windows. Inclusion of recording code is driven by
- `VMA_RECORDING_ENABLED` macro. Support for other platforms should be easy to
- add. Contributions are welcomed.
-- Currently calls to vmaDefragment() function are not recorded.
-
-
-\page usage_patterns Recommended usage patterns
-
-See also slides from talk:
-[Sawicki, Adam. Advanced Graphics Techniques Tutorial: Memory management in Vulkan and DX12. Game Developers Conference, 2018](https://www.gdcvault.com/play/1025458/Advanced-Graphics-Techniques-Tutorial-New)
-
-
-\section usage_patterns_simple Simple patterns
-
-\subsection usage_patterns_simple_render_targets Render targets
-
-<b>When:</b>
-Any resources that you frequently write and read on GPU,
-e.g. images used as color attachments (aka "render targets"), depth-stencil attachments,
-images/buffers used as storage image/buffer (aka "Unordered Access View (UAV)").
-
-<b>What to do:</b>
-Create them in video memory that is fastest to access from GPU using
-#VMA_MEMORY_USAGE_GPU_ONLY.
-
-Consider using [VK_KHR_dedicated_allocation](@ref vk_khr_dedicated_allocation) extension
-and/or manually creating them as dedicated allocations using #VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT,
-especially if they are large or if you plan to destroy and recreate them e.g. when
-display resolution changes.
-Prefer to create such resources first and all other GPU resources (like textures and vertex buffers) later.
-
-\subsection usage_patterns_simple_immutable_resources Immutable resources
-
-<b>When:</b>
-Any resources that you fill on CPU only once (aka "immutable") or infrequently
-and then read frequently on GPU,
-e.g. textures, vertex and index buffers, constant buffers that don't change often.
-
-<b>What to do:</b>
-Create them in video memory that is fastest to access from GPU using
-#VMA_MEMORY_USAGE_GPU_ONLY.
-
-To initialize content of such resource, create a CPU-side (aka "staging") copy of it
-in system memory - #VMA_MEMORY_USAGE_CPU_ONLY, map it, fill it,
-and submit a transfer from it to the GPU resource.
-You can keep the staging copy if you need it for another upload transfer in the future.
-If you don't, you can destroy it or reuse this buffer for uploading different resource
-after the transfer finishes.
-
-Prefer to create just buffers in system memory rather than images, even for uploading textures.
-Use `vkCmdCopyBufferToImage()`.
-Dont use images with `VK_IMAGE_TILING_LINEAR`.
-
-\subsection usage_patterns_dynamic_resources Dynamic resources
-
-<b>When:</b>
-Any resources that change frequently (aka "dynamic"), e.g. every frame or every draw call,
-written on CPU, read on GPU.
-
-<b>What to do:</b>
-Create them using #VMA_MEMORY_USAGE_CPU_TO_GPU.
-You can map it and write to it directly on CPU, as well as read from it on GPU.
-
-This is a more complex situation. Different solutions are possible,
-and the best one depends on specific GPU type, but you can use this simple approach for the start.
-Prefer to write to such resource sequentially (e.g. using `memcpy`).
-Don't perform random access or any reads from it on CPU, as it may be very slow.
-
-\subsection usage_patterns_readback Readback
-
-<b>When:</b>
-Resources that contain data written by GPU that you want to read back on CPU,
-e.g. results of some computations.
-
-<b>What to do:</b>
-Create them using #VMA_MEMORY_USAGE_GPU_TO_CPU.
-You can write to them directly on GPU, as well as map and read them on CPU.
-
-\section usage_patterns_advanced Advanced patterns
-
-\subsection usage_patterns_integrated_graphics Detecting integrated graphics
-
-You can support integrated graphics (like Intel HD Graphics, AMD APU) better
-by detecting it in Vulkan.
-To do it, call `vkGetPhysicalDeviceProperties()`, inspect
-`VkPhysicalDeviceProperties::deviceType` and look for `VK_PHYSICAL_DEVICE_TYPE_INTEGRATED_GPU`.
-When you find it, you can assume that memory is unified and all memory types are comparably fast
-to access from GPU, regardless of `VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT`.
-
-You can then sum up sizes of all available memory heaps and treat them as useful for
-your GPU resources, instead of only `DEVICE_LOCAL` ones.
-You can also prefer to create your resources in memory types that are `HOST_VISIBLE` to map them
-directly instead of submitting explicit transfer (see below).
-
-\subsection usage_patterns_direct_vs_transfer Direct access versus transfer
-
-For resources that you frequently write on CPU and read on GPU, many solutions are possible:
-
--# Create one copy in video memory using #VMA_MEMORY_USAGE_GPU_ONLY,
- second copy in system memory using #VMA_MEMORY_USAGE_CPU_ONLY and submit explicit tranfer each time.
--# Create just single copy using #VMA_MEMORY_USAGE_CPU_TO_GPU, map it and fill it on CPU,
- read it directly on GPU.
--# Create just single copy using #VMA_MEMORY_USAGE_CPU_ONLY, map it and fill it on CPU,
- read it directly on GPU.
-
-Which solution is the most efficient depends on your resource and especially on the GPU.
-It is best to measure it and then make the decision.
-Some general recommendations:
-
-- On integrated graphics use (2) or (3) to avoid unnecesary time and memory overhead
- related to using a second copy and making transfer.
-- For small resources (e.g. constant buffers) use (2).
- Discrete AMD cards have special 256 MiB pool of video memory that is directly mappable.
- Even if the resource ends up in system memory, its data may be cached on GPU after first
- fetch over PCIe bus.
-- For larger resources (e.g. textures), decide between (1) and (2).
- You may want to differentiate NVIDIA and AMD, e.g. by looking for memory type that is
- both `DEVICE_LOCAL` and `HOST_VISIBLE`. When you find it, use (2), otherwise use (1).
-
-Similarly, for resources that you frequently write on GPU and read on CPU, multiple
-solutions are possible:
-
--# Create one copy in video memory using #VMA_MEMORY_USAGE_GPU_ONLY,
- second copy in system memory using #VMA_MEMORY_USAGE_GPU_TO_CPU and submit explicit tranfer each time.
--# Create just single copy using #VMA_MEMORY_USAGE_GPU_TO_CPU, write to it directly on GPU,
- map it and read it on CPU.
-
-You should take some measurements to decide which option is faster in case of your specific
-resource.
-
-If you don't want to specialize your code for specific types of GPUs, you can still make
-an simple optimization for cases when your resource ends up in mappable memory to use it
-directly in this case instead of creating CPU-side staging copy.
-For details see [Finding out if memory is mappable](@ref memory_mapping_finding_if_memory_mappable).
-
-
-\page configuration Configuration
-
-Please check "CONFIGURATION SECTION" in the code to find macros that you can define
-before each include of this file or change directly in this file to provide
-your own implementation of basic facilities like assert, `min()` and `max()` functions,
-mutex, atomic etc.
-The library uses its own implementation of containers by default, but you can switch to using
-STL containers instead.
-
-\section config_Vulkan_functions Pointers to Vulkan functions
-
-The library uses Vulkan functions straight from the `vulkan.h` header by default.
-If you want to provide your own pointers to these functions, e.g. fetched using
-`vkGetInstanceProcAddr()` and `vkGetDeviceProcAddr()`:
-
--# Define `VMA_STATIC_VULKAN_FUNCTIONS 0`.
--# Provide valid pointers through VmaAllocatorCreateInfo::pVulkanFunctions.
-
-\section custom_memory_allocator Custom host memory allocator
-
-If you use custom allocator for CPU memory rather than default operator `new`
-and `delete` from C++, you can make this library using your allocator as well
-by filling optional member VmaAllocatorCreateInfo::pAllocationCallbacks. These
-functions will be passed to Vulkan, as well as used by the library itself to
-make any CPU-side allocations.
-
-\section allocation_callbacks Device memory allocation callbacks
-
-The library makes calls to `vkAllocateMemory()` and `vkFreeMemory()` internally.
-You can setup callbacks to be informed about these calls, e.g. for the purpose
-of gathering some statistics. To do it, fill optional member
-VmaAllocatorCreateInfo::pDeviceMemoryCallbacks.
-
-\section heap_memory_limit Device heap memory limit
-
-If you want to test how your program behaves with limited amount of Vulkan device
-memory available without switching your graphics card to one that really has
-smaller VRAM, you can use a feature of this library intended for this purpose.
-To do it, fill optional member VmaAllocatorCreateInfo::pHeapSizeLimit.
-
-
-
-\page vk_khr_dedicated_allocation VK_KHR_dedicated_allocation
-
-VK_KHR_dedicated_allocation is a Vulkan extension which can be used to improve
-performance on some GPUs. It augments Vulkan API with possibility to query
-driver whether it prefers particular buffer or image to have its own, dedicated
-allocation (separate `VkDeviceMemory` block) for better efficiency - to be able
-to do some internal optimizations.
-
-The extension is supported by this library. It will be used automatically when
-enabled. To enable it:
-
-1 . When creating Vulkan device, check if following 2 device extensions are
-supported (call `vkEnumerateDeviceExtensionProperties()`).
-If yes, enable them (fill `VkDeviceCreateInfo::ppEnabledExtensionNames`).
-
-- VK_KHR_get_memory_requirements2
-- VK_KHR_dedicated_allocation
-
-If you enabled these extensions:
-
-2 . Use #VMA_ALLOCATOR_CREATE_KHR_DEDICATED_ALLOCATION_BIT flag when creating
-your #VmaAllocator`to inform the library that you enabled required extensions
-and you want the library to use them.
-
-\code
-allocatorInfo.flags |= VMA_ALLOCATOR_CREATE_KHR_DEDICATED_ALLOCATION_BIT;
-
-vmaCreateAllocator(&allocatorInfo, &allocator);
-\endcode
-
-That's all. The extension will be automatically used whenever you create a
-buffer using vmaCreateBuffer() or image using vmaCreateImage().
-
-When using the extension together with Vulkan Validation Layer, you will receive
-warnings like this:
-
- vkBindBufferMemory(): Binding memory to buffer 0x33 but vkGetBufferMemoryRequirements() has not been called on that buffer.
-
-It is OK, you should just ignore it. It happens because you use function
-`vkGetBufferMemoryRequirements2KHR()` instead of standard
-`vkGetBufferMemoryRequirements()`, while the validation layer seems to be
-unaware of it.
-
-To learn more about this extension, see:
-
-- [VK_KHR_dedicated_allocation in Vulkan specification](https://www.khronos.org/registry/vulkan/specs/1.0-extensions/html/vkspec.html#VK_KHR_dedicated_allocation)
-- [VK_KHR_dedicated_allocation unofficial manual](http://asawicki.info/articles/VK_KHR_dedicated_allocation.php5)
-
-
-
-\page general_considerations General considerations
-
-\section general_considerations_thread_safety Thread safety
-
-- The library has no global state, so separate #VmaAllocator objects can be used
- independently.
- There should be no need to create multiple such objects though - one per `VkDevice` is enough.
-- By default, all calls to functions that take #VmaAllocator as first parameter
- are safe to call from multiple threads simultaneously because they are
- synchronized internally when needed.
-- When the allocator is created with #VMA_ALLOCATOR_CREATE_EXTERNALLY_SYNCHRONIZED_BIT
- flag, calls to functions that take such #VmaAllocator object must be
- synchronized externally.
-- Access to a #VmaAllocation object must be externally synchronized. For example,
- you must not call vmaGetAllocationInfo() and vmaMapMemory() from different
- threads at the same time if you pass the same #VmaAllocation object to these
- functions.
-
-\section general_considerations_validation_layer_warnings Validation layer warnings
-
-When using this library, you can meet following types of warnings issued by
-Vulkan validation layer. They don't necessarily indicate a bug, so you may need
-to just ignore them.
-
-- *vkBindBufferMemory(): Binding memory to buffer 0xeb8e4 but vkGetBufferMemoryRequirements() has not been called on that buffer.*
- - It happens when VK_KHR_dedicated_allocation extension is enabled.
- `vkGetBufferMemoryRequirements2KHR` function is used instead, while validation layer seems to be unaware of it.
-- *Mapping an image with layout VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL can result in undefined behavior if this memory is used by the device. Only GENERAL or PREINITIALIZED should be used.*
- - It happens when you map a buffer or image, because the library maps entire
- `VkDeviceMemory` block, where different types of images and buffers may end
- up together, especially on GPUs with unified memory like Intel.
-- *Non-linear image 0xebc91 is aliased with linear buffer 0xeb8e4 which may indicate a bug.*
- - It happens when you use lost allocations, and a new image or buffer is
- created in place of an existing object that bacame lost.
- - It may happen also when you use [defragmentation](@ref defragmentation).
-
-\section general_considerations_allocation_algorithm Allocation algorithm
-
-The library uses following algorithm for allocation, in order:
-
--# Try to find free range of memory in existing blocks.
--# If failed, try to create a new block of `VkDeviceMemory`, with preferred block size.
--# If failed, try to create such block with size/2, size/4, size/8.
--# If failed and #VMA_ALLOCATION_CREATE_CAN_MAKE_OTHER_LOST_BIT flag was
- specified, try to find space in existing blocks, possilby making some other
- allocations lost.
--# If failed, try to allocate separate `VkDeviceMemory` for this allocation,
- just like when you use #VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT.
--# If failed, choose other memory type that meets the requirements specified in
- VmaAllocationCreateInfo and go to point 1.
--# If failed, return `VK_ERROR_OUT_OF_DEVICE_MEMORY`.
-
-\section general_considerations_features_not_supported Features not supported
-
-Features deliberately excluded from the scope of this library:
-
-- Data transfer. Uploading (straming) and downloading data of buffers and images
- between CPU and GPU memory and related synchronization is responsibility of the user.
-- Allocations for imported/exported external memory. They tend to require
- explicit memory type index and dedicated allocation anyway, so they don't
- interact with main features of this library. Such special purpose allocations
- should be made manually, using `vkCreateBuffer()` and `vkAllocateMemory()`.
-- Recreation of buffers and images. Although the library has functions for
- buffer and image creation (vmaCreateBuffer(), vmaCreateImage()), you need to
- recreate these objects yourself after defragmentation. That's because the big
- structures `VkBufferCreateInfo`, `VkImageCreateInfo` are not stored in
- #VmaAllocation object.
-- Handling CPU memory allocation failures. When dynamically creating small C++
- objects in CPU memory (not Vulkan memory), allocation failures are not checked
- and handled gracefully, because that would complicate code significantly and
- is usually not needed in desktop PC applications anyway.
-- Code free of any compiler warnings. Maintaining the library to compile and
- work correctly on so many different platforms is hard enough. Being free of
- any warnings, on any version of any compiler, is simply not feasible.
-- This is a C++ library with C interface.
- Bindings or ports to any other programming languages are welcomed as external projects and
- are not going to be included into this repository.
-
-*/
-
-/*
-Define this macro to 0/1 to disable/enable support for recording functionality,
-available through VmaAllocatorCreateInfo::pRecordSettings.
-*/
-#ifndef VMA_RECORDING_ENABLED
- #ifdef _WIN32
- #define VMA_RECORDING_ENABLED 1
- #else
- #define VMA_RECORDING_ENABLED 0
- #endif
-#endif
-
-#ifndef NOMINMAX
- #define NOMINMAX // For windows.h
-#endif
-
-#ifndef VULKAN_H_
- #include <vulkan/vulkan.h>
-#endif
-
-#if VMA_RECORDING_ENABLED
- #include <windows.h>
-#endif
-
-#if !defined(VMA_DEDICATED_ALLOCATION)
- #if VK_KHR_get_memory_requirements2 && VK_KHR_dedicated_allocation
- #define VMA_DEDICATED_ALLOCATION 1
- #else
- #define VMA_DEDICATED_ALLOCATION 0
- #endif
-#endif
-
-/** \struct VmaAllocator
-\brief Represents main object of this library initialized.
-
-Fill structure #VmaAllocatorCreateInfo and call function vmaCreateAllocator() to create it.
-Call function vmaDestroyAllocator() to destroy it.
-
-It is recommended to create just one object of this type per `VkDevice` object,
-right after Vulkan is initialized and keep it alive until before Vulkan device is destroyed.
-*/
-VK_DEFINE_HANDLE(VmaAllocator)
-
-/// Callback function called after successful vkAllocateMemory.
-typedef void (VKAPI_PTR *PFN_vmaAllocateDeviceMemoryFunction)(
- VmaAllocator allocator,
- uint32_t memoryType,
- VkDeviceMemory memory,
- VkDeviceSize size);
-/// Callback function called before vkFreeMemory.
-typedef void (VKAPI_PTR *PFN_vmaFreeDeviceMemoryFunction)(
- VmaAllocator allocator,
- uint32_t memoryType,
- VkDeviceMemory memory,
- VkDeviceSize size);
-
-/** \brief Set of callbacks that the library will call for `vkAllocateMemory` and `vkFreeMemory`.
-
-Provided for informative purpose, e.g. to gather statistics about number of
-allocations or total amount of memory allocated in Vulkan.
-
-Used in VmaAllocatorCreateInfo::pDeviceMemoryCallbacks.
-*/
-typedef struct VmaDeviceMemoryCallbacks {
- /// Optional, can be null.
- PFN_vmaAllocateDeviceMemoryFunction pfnAllocate;
- /// Optional, can be null.
- PFN_vmaFreeDeviceMemoryFunction pfnFree;
-} VmaDeviceMemoryCallbacks;
-
-/// Flags for created #VmaAllocator.
-typedef enum VmaAllocatorCreateFlagBits {
- /** \brief Allocator and all objects created from it will not be synchronized internally, so you must guarantee they are used from only one thread at a time or synchronized externally by you.
-
- Using this flag may increase performance because internal mutexes are not used.
- */
- VMA_ALLOCATOR_CREATE_EXTERNALLY_SYNCHRONIZED_BIT = 0x00000001,
- /** \brief Enables usage of VK_KHR_dedicated_allocation extension.
-
- Using this extenion will automatically allocate dedicated blocks of memory for
- some buffers and images instead of suballocating place for them out of bigger
- memory blocks (as if you explicitly used #VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT
- flag) when it is recommended by the driver. It may improve performance on some
- GPUs.
-
- You may set this flag only if you found out that following device extensions are
- supported, you enabled them while creating Vulkan device passed as
- VmaAllocatorCreateInfo::device, and you want them to be used internally by this
- library:
-
- - VK_KHR_get_memory_requirements2
- - VK_KHR_dedicated_allocation
-
-When this flag is set, you can experience following warnings reported by Vulkan
-validation layer. You can ignore them.
-
-> vkBindBufferMemory(): Binding memory to buffer 0x2d but vkGetBufferMemoryRequirements() has not been called on that buffer.
- */
- VMA_ALLOCATOR_CREATE_KHR_DEDICATED_ALLOCATION_BIT = 0x00000002,
-
- VMA_ALLOCATOR_CREATE_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF
-} VmaAllocatorCreateFlagBits;
-typedef VkFlags VmaAllocatorCreateFlags;
-
-/** \brief Pointers to some Vulkan functions - a subset used by the library.
-
-Used in VmaAllocatorCreateInfo::pVulkanFunctions.
-*/
-typedef struct VmaVulkanFunctions {
- PFN_vkGetPhysicalDeviceProperties vkGetPhysicalDeviceProperties;
- PFN_vkGetPhysicalDeviceMemoryProperties vkGetPhysicalDeviceMemoryProperties;
- PFN_vkAllocateMemory vkAllocateMemory;
- PFN_vkFreeMemory vkFreeMemory;
- PFN_vkMapMemory vkMapMemory;
- PFN_vkUnmapMemory vkUnmapMemory;
- PFN_vkFlushMappedMemoryRanges vkFlushMappedMemoryRanges;
- PFN_vkInvalidateMappedMemoryRanges vkInvalidateMappedMemoryRanges;
- PFN_vkBindBufferMemory vkBindBufferMemory;
- PFN_vkBindImageMemory vkBindImageMemory;
- PFN_vkGetBufferMemoryRequirements vkGetBufferMemoryRequirements;
- PFN_vkGetImageMemoryRequirements vkGetImageMemoryRequirements;
- PFN_vkCreateBuffer vkCreateBuffer;
- PFN_vkDestroyBuffer vkDestroyBuffer;
- PFN_vkCreateImage vkCreateImage;
- PFN_vkDestroyImage vkDestroyImage;
- PFN_vkCmdCopyBuffer vkCmdCopyBuffer;
-#if VMA_DEDICATED_ALLOCATION
- PFN_vkGetBufferMemoryRequirements2KHR vkGetBufferMemoryRequirements2KHR;
- PFN_vkGetImageMemoryRequirements2KHR vkGetImageMemoryRequirements2KHR;
-#endif
-} VmaVulkanFunctions;
-
-/// Flags to be used in VmaRecordSettings::flags.
-typedef enum VmaRecordFlagBits {
- /** \brief Enables flush after recording every function call.
-
- Enable it if you expect your application to crash, which may leave recording file truncated.
- It may degrade performance though.
- */
- VMA_RECORD_FLUSH_AFTER_CALL_BIT = 0x00000001,
-
- VMA_RECORD_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF
-} VmaRecordFlagBits;
-typedef VkFlags VmaRecordFlags;
-
-/// Parameters for recording calls to VMA functions. To be used in VmaAllocatorCreateInfo::pRecordSettings.
-typedef struct VmaRecordSettings
-{
- /// Flags for recording. Use #VmaRecordFlagBits enum.
- VmaRecordFlags flags;
- /** \brief Path to the file that should be written by the recording.
-
- Suggested extension: "csv".
- If the file already exists, it will be overwritten.
- It will be opened for the whole time #VmaAllocator object is alive.
- If opening this file fails, creation of the whole allocator object fails.
- */
- const char* pFilePath;
-} VmaRecordSettings;
-
-/// Description of a Allocator to be created.
-typedef struct VmaAllocatorCreateInfo
-{
- /// Flags for created allocator. Use #VmaAllocatorCreateFlagBits enum.
- VmaAllocatorCreateFlags flags;
- /// Vulkan physical device.
- /** It must be valid throughout whole lifetime of created allocator. */
- VkPhysicalDevice physicalDevice;
- /// Vulkan device.
- /** It must be valid throughout whole lifetime of created allocator. */
- VkDevice device;
- /// Preferred size of a single `VkDeviceMemory` block to be allocated from large heaps > 1 GiB. Optional.
- /** Set to 0 to use default, which is currently 256 MiB. */
- VkDeviceSize preferredLargeHeapBlockSize;
- /// Custom CPU memory allocation callbacks. Optional.
- /** Optional, can be null. When specified, will also be used for all CPU-side memory allocations. */
- const VkAllocationCallbacks* pAllocationCallbacks;
- /// Informative callbacks for `vkAllocateMemory`, `vkFreeMemory`. Optional.
- /** Optional, can be null. */
- const VmaDeviceMemoryCallbacks* pDeviceMemoryCallbacks;
- /** \brief Maximum number of additional frames that are in use at the same time as current frame.
-
- This value is used only when you make allocations with
- VMA_ALLOCATION_CREATE_CAN_BECOME_LOST_BIT flag. Such allocation cannot become
- lost if allocation.lastUseFrameIndex >= allocator.currentFrameIndex - frameInUseCount.
-
- For example, if you double-buffer your command buffers, so resources used for
- rendering in previous frame may still be in use by the GPU at the moment you
- allocate resources needed for the current frame, set this value to 1.
-
- If you want to allow any allocations other than used in the current frame to
- become lost, set this value to 0.
- */
- uint32_t frameInUseCount;
- /** \brief Either null or a pointer to an array of limits on maximum number of bytes that can be allocated out of particular Vulkan memory heap.
-
- If not NULL, it must be a pointer to an array of
- `VkPhysicalDeviceMemoryProperties::memoryHeapCount` elements, defining limit on
- maximum number of bytes that can be allocated out of particular Vulkan memory
- heap.
-
- Any of the elements may be equal to `VK_WHOLE_SIZE`, which means no limit on that
- heap. This is also the default in case of `pHeapSizeLimit` = NULL.
-
- If there is a limit defined for a heap:
-
- - If user tries to allocate more memory from that heap using this allocator,
- the allocation fails with `VK_ERROR_OUT_OF_DEVICE_MEMORY`.
- - If the limit is smaller than heap size reported in `VkMemoryHeap::size`, the
- value of this limit will be reported instead when using vmaGetMemoryProperties().
-
- Warning! Using this feature may not be equivalent to installing a GPU with
- smaller amount of memory, because graphics driver doesn't necessary fail new
- allocations with `VK_ERROR_OUT_OF_DEVICE_MEMORY` result when memory capacity is
- exceeded. It may return success and just silently migrate some device memory
- blocks to system RAM. This driver behavior can also be controlled using
- VK_AMD_memory_overallocation_behavior extension.
- */
- const VkDeviceSize* pHeapSizeLimit;
- /** \brief Pointers to Vulkan functions. Can be null if you leave define `VMA_STATIC_VULKAN_FUNCTIONS 1`.
-
- If you leave define `VMA_STATIC_VULKAN_FUNCTIONS 1` in configuration section,
- you can pass null as this member, because the library will fetch pointers to
- Vulkan functions internally in a static way, like:
-
- vulkanFunctions.vkAllocateMemory = &vkAllocateMemory;
-
- Fill this member if you want to provide your own pointers to Vulkan functions,
- e.g. fetched using `vkGetInstanceProcAddr()` and `vkGetDeviceProcAddr()`.
- */
- const VmaVulkanFunctions* pVulkanFunctions;
- /** \brief Parameters for recording of VMA calls. Can be null.
-
- If not null, it enables recording of calls to VMA functions to a file.
- If support for recording is not enabled using `VMA_RECORDING_ENABLED` macro,
- creation of the allocator object fails with `VK_ERROR_FEATURE_NOT_PRESENT`.
- */
- const VmaRecordSettings* pRecordSettings;
-} VmaAllocatorCreateInfo;
-
-/// Creates Allocator object.
-VkResult vmaCreateAllocator(
- const VmaAllocatorCreateInfo* pCreateInfo,
- VmaAllocator* pAllocator);
-
-/// Destroys allocator object.
-void vmaDestroyAllocator(
- VmaAllocator allocator);
-
-/**
-PhysicalDeviceProperties are fetched from physicalDevice by the allocator.
-You can access it here, without fetching it again on your own.
-*/
-void vmaGetPhysicalDeviceProperties(
- VmaAllocator allocator,
- const VkPhysicalDeviceProperties** ppPhysicalDeviceProperties);
-
-/**
-PhysicalDeviceMemoryProperties are fetched from physicalDevice by the allocator.
-You can access it here, without fetching it again on your own.
-*/
-void vmaGetMemoryProperties(
- VmaAllocator allocator,
- const VkPhysicalDeviceMemoryProperties** ppPhysicalDeviceMemoryProperties);
-
-/**
-\brief Given Memory Type Index, returns Property Flags of this memory type.
-
-This is just a convenience function. Same information can be obtained using
-vmaGetMemoryProperties().
-*/
-void vmaGetMemoryTypeProperties(
- VmaAllocator allocator,
- uint32_t memoryTypeIndex,
- VkMemoryPropertyFlags* pFlags);
-
-/** \brief Sets index of the current frame.
-
-This function must be used if you make allocations with
-#VMA_ALLOCATION_CREATE_CAN_BECOME_LOST_BIT and
-#VMA_ALLOCATION_CREATE_CAN_MAKE_OTHER_LOST_BIT flags to inform the allocator
-when a new frame begins. Allocations queried using vmaGetAllocationInfo() cannot
-become lost in the current frame.
-*/
-void vmaSetCurrentFrameIndex(
- VmaAllocator allocator,
- uint32_t frameIndex);
-
-/** \brief Calculated statistics of memory usage in entire allocator.
-*/
-typedef struct VmaStatInfo
-{
- /// Number of `VkDeviceMemory` Vulkan memory blocks allocated.
- uint32_t blockCount;
- /// Number of #VmaAllocation allocation objects allocated.
- uint32_t allocationCount;
- /// Number of free ranges of memory between allocations.
- uint32_t unusedRangeCount;
- /// Total number of bytes occupied by all allocations.
- VkDeviceSize usedBytes;
- /// Total number of bytes occupied by unused ranges.
- VkDeviceSize unusedBytes;
- VkDeviceSize allocationSizeMin, allocationSizeAvg, allocationSizeMax;
- VkDeviceSize unusedRangeSizeMin, unusedRangeSizeAvg, unusedRangeSizeMax;
-} VmaStatInfo;
-
-/// General statistics from current state of Allocator.
-typedef struct VmaStats
-{
- VmaStatInfo memoryType[VK_MAX_MEMORY_TYPES];
- VmaStatInfo memoryHeap[VK_MAX_MEMORY_HEAPS];
- VmaStatInfo total;
-} VmaStats;
-
-/// Retrieves statistics from current state of the Allocator.
-void vmaCalculateStats(
- VmaAllocator allocator,
- VmaStats* pStats);
-
-#define VMA_STATS_STRING_ENABLED 1
-
-#if VMA_STATS_STRING_ENABLED
-
-/// Builds and returns statistics as string in JSON format.
-/** @param[out] ppStatsString Must be freed using vmaFreeStatsString() function.
-*/
-void vmaBuildStatsString(
- VmaAllocator allocator,
- char** ppStatsString,
- VkBool32 detailedMap);
-
-void vmaFreeStatsString(
- VmaAllocator allocator,
- char* pStatsString);
-
-#endif // #if VMA_STATS_STRING_ENABLED
-
-/** \struct VmaPool
-\brief Represents custom memory pool
-
-Fill structure VmaPoolCreateInfo and call function vmaCreatePool() to create it.
-Call function vmaDestroyPool() to destroy it.
-
-For more information see [Custom memory pools](@ref choosing_memory_type_custom_memory_pools).
-*/
-VK_DEFINE_HANDLE(VmaPool)
-
-typedef enum VmaMemoryUsage
-{
- /** No intended memory usage specified.
- Use other members of VmaAllocationCreateInfo to specify your requirements.
- */
- VMA_MEMORY_USAGE_UNKNOWN = 0,
- /** Memory will be used on device only, so fast access from the device is preferred.
- It usually means device-local GPU (video) memory.
- No need to be mappable on host.
- It is roughly equivalent of `D3D12_HEAP_TYPE_DEFAULT`.
-
- Usage:
-
- - Resources written and read by device, e.g. images used as attachments.
- - Resources transferred from host once (immutable) or infrequently and read by
- device multiple times, e.g. textures to be sampled, vertex buffers, uniform
- (constant) buffers, and majority of other types of resources used on GPU.
-
- Allocation may still end up in `HOST_VISIBLE` memory on some implementations.
- In such case, you are free to map it.
- You can use #VMA_ALLOCATION_CREATE_MAPPED_BIT with this usage type.
- */
- VMA_MEMORY_USAGE_GPU_ONLY = 1,
- /** Memory will be mappable on host.
- It usually means CPU (system) memory.
- Guarantees to be `HOST_VISIBLE` and `HOST_COHERENT`.
- CPU access is typically uncached. Writes may be write-combined.
- Resources created in this pool may still be accessible to the device, but access to them can be slow.
- It is roughly equivalent of `D3D12_HEAP_TYPE_UPLOAD`.
-
- Usage: Staging copy of resources used as transfer source.
- */
- VMA_MEMORY_USAGE_CPU_ONLY = 2,
- /**
- Memory that is both mappable on host (guarantees to be `HOST_VISIBLE`) and preferably fast to access by GPU.
- CPU access is typically uncached. Writes may be write-combined.
-
- Usage: Resources written frequently by host (dynamic), read by device. E.g. textures, vertex buffers, uniform buffers updated every frame or every draw call.
- */
- VMA_MEMORY_USAGE_CPU_TO_GPU = 3,
- /** Memory mappable on host (guarantees to be `HOST_VISIBLE`) and cached.
- It is roughly equivalent of `D3D12_HEAP_TYPE_READBACK`.
-
- Usage:
-
- - Resources written by device, read by host - results of some computations, e.g. screen capture, average scene luminance for HDR tone mapping.
- - Any resources read or accessed randomly on host, e.g. CPU-side copy of vertex buffer used as source of transfer, but also used for collision detection.
- */
- VMA_MEMORY_USAGE_GPU_TO_CPU = 4,
- VMA_MEMORY_USAGE_MAX_ENUM = 0x7FFFFFFF
-} VmaMemoryUsage;
-
-/// Flags to be passed as VmaAllocationCreateInfo::flags.
-typedef enum VmaAllocationCreateFlagBits {
- /** \brief Set this flag if the allocation should have its own memory block.
-
- Use it for special, big resources, like fullscreen images used as attachments.
-
- This flag must also be used for host visible resources that you want to map
- simultaneously because otherwise they might end up as regions of the same
- `VkDeviceMemory`, while mapping same `VkDeviceMemory` multiple times
- simultaneously is illegal.
-
- You should not use this flag if VmaAllocationCreateInfo::pool is not null.
- */
- VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT = 0x00000001,
-
- /** \brief Set this flag to only try to allocate from existing `VkDeviceMemory` blocks and never create new such block.
-
- If new allocation cannot be placed in any of the existing blocks, allocation
- fails with `VK_ERROR_OUT_OF_DEVICE_MEMORY` error.
-
- You should not use #VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT and
- #VMA_ALLOCATION_CREATE_NEVER_ALLOCATE_BIT at the same time. It makes no sense.
-
- If VmaAllocationCreateInfo::pool is not null, this flag is implied and ignored. */
- VMA_ALLOCATION_CREATE_NEVER_ALLOCATE_BIT = 0x00000002,
- /** \brief Set this flag to use a memory that will be persistently mapped and retrieve pointer to it.
-
- Pointer to mapped memory will be returned through VmaAllocationInfo::pMappedData.
-
- Is it valid to use this flag for allocation made from memory type that is not
- `HOST_VISIBLE`. This flag is then ignored and memory is not mapped. This is
- useful if you need an allocation that is efficient to use on GPU
- (`DEVICE_LOCAL`) and still want to map it directly if possible on platforms that
- support it (e.g. Intel GPU).
-
- You should not use this flag together with #VMA_ALLOCATION_CREATE_CAN_BECOME_LOST_BIT.
- */
- VMA_ALLOCATION_CREATE_MAPPED_BIT = 0x00000004,
- /** Allocation created with this flag can become lost as a result of another
- allocation with #VMA_ALLOCATION_CREATE_CAN_MAKE_OTHER_LOST_BIT flag, so you
- must check it before use.
-
- To check if allocation is not lost, call vmaGetAllocationInfo() and check if
- VmaAllocationInfo::deviceMemory is not `VK_NULL_HANDLE`.
-
- For details about supporting lost allocations, see Lost Allocations
- chapter of User Guide on Main Page.
-
- You should not use this flag together with #VMA_ALLOCATION_CREATE_MAPPED_BIT.
- */
- VMA_ALLOCATION_CREATE_CAN_BECOME_LOST_BIT = 0x00000008,
- /** While creating allocation using this flag, other allocations that were
- created with flag #VMA_ALLOCATION_CREATE_CAN_BECOME_LOST_BIT can become lost.
-
- For details about supporting lost allocations, see Lost Allocations
- chapter of User Guide on Main Page.
- */
- VMA_ALLOCATION_CREATE_CAN_MAKE_OTHER_LOST_BIT = 0x00000010,
- /** Set this flag to treat VmaAllocationCreateInfo::pUserData as pointer to a
- null-terminated string. Instead of copying pointer value, a local copy of the
- string is made and stored in allocation's `pUserData`. The string is automatically
- freed together with the allocation. It is also used in vmaBuildStatsString().
- */
- VMA_ALLOCATION_CREATE_USER_DATA_COPY_STRING_BIT = 0x00000020,
- /** Allocation will be created from upper stack in a double stack pool.
-
- This flag is only allowed for custom pools created with #VMA_POOL_CREATE_LINEAR_ALGORITHM_BIT flag.
- */
- VMA_ALLOCATION_CREATE_UPPER_ADDRESS_BIT = 0x00000040,
-
- /** Allocation strategy that chooses smallest possible free range for the
- allocation.
- */
- VMA_ALLOCATION_CREATE_STRATEGY_BEST_FIT_BIT = 0x00010000,
- /** Allocation strategy that chooses biggest possible free range for the
- allocation.
- */
- VMA_ALLOCATION_CREATE_STRATEGY_WORST_FIT_BIT = 0x00020000,
- /** Allocation strategy that chooses first suitable free range for the
- allocation.
-
- "First" doesn't necessarily means the one with smallest offset in memory,
- but rather the one that is easiest and fastest to find.
- */
- VMA_ALLOCATION_CREATE_STRATEGY_FIRST_FIT_BIT = 0x00040000,
-
- /** Allocation strategy that tries to minimize memory usage.
- */
- VMA_ALLOCATION_CREATE_STRATEGY_MIN_MEMORY_BIT = VMA_ALLOCATION_CREATE_STRATEGY_BEST_FIT_BIT,
- /** Allocation strategy that tries to minimize allocation time.
- */
- VMA_ALLOCATION_CREATE_STRATEGY_MIN_TIME_BIT = VMA_ALLOCATION_CREATE_STRATEGY_FIRST_FIT_BIT,
- /** Allocation strategy that tries to minimize memory fragmentation.
- */
- VMA_ALLOCATION_CREATE_STRATEGY_MIN_FRAGMENTATION_BIT = VMA_ALLOCATION_CREATE_STRATEGY_WORST_FIT_BIT,
-
- /** A bit mask to extract only `STRATEGY` bits from entire set of flags.
- */
- VMA_ALLOCATION_CREATE_STRATEGY_MASK =
- VMA_ALLOCATION_CREATE_STRATEGY_BEST_FIT_BIT |
- VMA_ALLOCATION_CREATE_STRATEGY_WORST_FIT_BIT |
- VMA_ALLOCATION_CREATE_STRATEGY_FIRST_FIT_BIT,
-
- VMA_ALLOCATION_CREATE_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF
-} VmaAllocationCreateFlagBits;
-typedef VkFlags VmaAllocationCreateFlags;
-
-typedef struct VmaAllocationCreateInfo
-{
- /// Use #VmaAllocationCreateFlagBits enum.
- VmaAllocationCreateFlags flags;
- /** \brief Intended usage of memory.
-
- You can leave #VMA_MEMORY_USAGE_UNKNOWN if you specify memory requirements in other way. \n
- If `pool` is not null, this member is ignored.
- */
- VmaMemoryUsage usage;
- /** \brief Flags that must be set in a Memory Type chosen for an allocation.
-
- Leave 0 if you specify memory requirements in other way. \n
- If `pool` is not null, this member is ignored.*/
- VkMemoryPropertyFlags requiredFlags;
- /** \brief Flags that preferably should be set in a memory type chosen for an allocation.
-
- Set to 0 if no additional flags are prefered. \n
- If `pool` is not null, this member is ignored. */
- VkMemoryPropertyFlags preferredFlags;
- /** \brief Bitmask containing one bit set for every memory type acceptable for this allocation.
-
- Value 0 is equivalent to `UINT32_MAX` - it means any memory type is accepted if
- it meets other requirements specified by this structure, with no further
- restrictions on memory type index. \n
- If `pool` is not null, this member is ignored.
- */
- uint32_t memoryTypeBits;
- /** \brief Pool that this allocation should be created in.
-
- Leave `VK_NULL_HANDLE` to allocate from default pool. If not null, members:
- `usage`, `requiredFlags`, `preferredFlags`, `memoryTypeBits` are ignored.
- */
- VmaPool pool;
- /** \brief Custom general-purpose pointer that will be stored in #VmaAllocation, can be read as VmaAllocationInfo::pUserData and changed using vmaSetAllocationUserData().
-
- If #VMA_ALLOCATION_CREATE_USER_DATA_COPY_STRING_BIT is used, it must be either
- null or pointer to a null-terminated string. The string will be then copied to
- internal buffer, so it doesn't need to be valid after allocation call.
- */
- void* pUserData;
-} VmaAllocationCreateInfo;
-
-/**
-\brief Helps to find memoryTypeIndex, given memoryTypeBits and VmaAllocationCreateInfo.
-
-This algorithm tries to find a memory type that:
-
-- Is allowed by memoryTypeBits.
-- Contains all the flags from pAllocationCreateInfo->requiredFlags.
-- Matches intended usage.
-- Has as many flags from pAllocationCreateInfo->preferredFlags as possible.
-
-\return Returns VK_ERROR_FEATURE_NOT_PRESENT if not found. Receiving such result
-from this function or any other allocating function probably means that your
-device doesn't support any memory type with requested features for the specific
-type of resource you want to use it for. Please check parameters of your
-resource, like image layout (OPTIMAL versus LINEAR) or mip level count.
-*/
-VkResult vmaFindMemoryTypeIndex(
- VmaAllocator allocator,
- uint32_t memoryTypeBits,
- const VmaAllocationCreateInfo* pAllocationCreateInfo,
- uint32_t* pMemoryTypeIndex);
-
-/**
-\brief Helps to find memoryTypeIndex, given VkBufferCreateInfo and VmaAllocationCreateInfo.
-
-It can be useful e.g. to determine value to be used as VmaPoolCreateInfo::memoryTypeIndex.
-It internally creates a temporary, dummy buffer that never has memory bound.
-It is just a convenience function, equivalent to calling:
-
-- `vkCreateBuffer`
-- `vkGetBufferMemoryRequirements`
-- `vmaFindMemoryTypeIndex`
-- `vkDestroyBuffer`
-*/
-VkResult vmaFindMemoryTypeIndexForBufferInfo(
- VmaAllocator allocator,
- const VkBufferCreateInfo* pBufferCreateInfo,
- const VmaAllocationCreateInfo* pAllocationCreateInfo,
- uint32_t* pMemoryTypeIndex);
-
-/**
-\brief Helps to find memoryTypeIndex, given VkImageCreateInfo and VmaAllocationCreateInfo.
-
-It can be useful e.g. to determine value to be used as VmaPoolCreateInfo::memoryTypeIndex.
-It internally creates a temporary, dummy image that never has memory bound.
-It is just a convenience function, equivalent to calling:
-
-- `vkCreateImage`
-- `vkGetImageMemoryRequirements`
-- `vmaFindMemoryTypeIndex`
-- `vkDestroyImage`
-*/
-VkResult vmaFindMemoryTypeIndexForImageInfo(
- VmaAllocator allocator,
- const VkImageCreateInfo* pImageCreateInfo,
- const VmaAllocationCreateInfo* pAllocationCreateInfo,
- uint32_t* pMemoryTypeIndex);
-
-/// Flags to be passed as VmaPoolCreateInfo::flags.
-typedef enum VmaPoolCreateFlagBits {
- /** \brief Use this flag if you always allocate only buffers and linear images or only optimal images out of this pool and so Buffer-Image Granularity can be ignored.
-
- This is an optional optimization flag.
-
- If you always allocate using vmaCreateBuffer(), vmaCreateImage(),
- vmaAllocateMemoryForBuffer(), then you don't need to use it because allocator
- knows exact type of your allocations so it can handle Buffer-Image Granularity
- in the optimal way.
-
- If you also allocate using vmaAllocateMemoryForImage() or vmaAllocateMemory(),
- exact type of such allocations is not known, so allocator must be conservative
- in handling Buffer-Image Granularity, which can lead to suboptimal allocation
- (wasted memory). In that case, if you can make sure you always allocate only
- buffers and linear images or only optimal images out of this pool, use this flag
- to make allocator disregard Buffer-Image Granularity and so make allocations
- faster and more optimal.
- */
- VMA_POOL_CREATE_IGNORE_BUFFER_IMAGE_GRANULARITY_BIT = 0x00000002,
-
- /** \brief Enables alternative, linear allocation algorithm in this pool.
-
- Specify this flag to enable linear allocation algorithm, which always creates
- new allocations after last one and doesn't reuse space from allocations freed in
- between. It trades memory consumption for simplified algorithm and data
- structure, which has better performance and uses less memory for metadata.
-
- By using this flag, you can achieve behavior of free-at-once, stack,
- ring buffer, and double stack. For details, see documentation chapter
- \ref linear_algorithm.
-
- When using this flag, you must specify VmaPoolCreateInfo::maxBlockCount == 1 (or 0 for default).
-
- For more details, see [Linear allocation algorithm](@ref linear_algorithm).
- */
- VMA_POOL_CREATE_LINEAR_ALGORITHM_BIT = 0x00000004,
-
- /** \brief Enables alternative, buddy allocation algorithm in this pool.
-
- It operates on a tree of blocks, each having size that is a power of two and
- a half of its parent's size. Comparing to default algorithm, this one provides
- faster allocation and deallocation and decreased external fragmentation,
- at the expense of more memory wasted (internal fragmentation).
-
- For more details, see [Buddy allocation algorithm](@ref buddy_algorithm).
- */
- VMA_POOL_CREATE_BUDDY_ALGORITHM_BIT = 0x00000008,
-
- /** Bit mask to extract only `ALGORITHM` bits from entire set of flags.
- */
- VMA_POOL_CREATE_ALGORITHM_MASK =
- VMA_POOL_CREATE_LINEAR_ALGORITHM_BIT |
- VMA_POOL_CREATE_BUDDY_ALGORITHM_BIT,
-
- VMA_POOL_CREATE_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF
-} VmaPoolCreateFlagBits;
-typedef VkFlags VmaPoolCreateFlags;
-
-/** \brief Describes parameter of created #VmaPool.
-*/
-typedef struct VmaPoolCreateInfo {
- /** \brief Vulkan memory type index to allocate this pool from.
- */
- uint32_t memoryTypeIndex;
- /** \brief Use combination of #VmaPoolCreateFlagBits.
- */
- VmaPoolCreateFlags flags;
- /** \brief Size of a single `VkDeviceMemory` block to be allocated as part of this pool, in bytes. Optional.
-
- Specify nonzero to set explicit, constant size of memory blocks used by this
- pool.
-
- Leave 0 to use default and let the library manage block sizes automatically.
- Sizes of particular blocks may vary.
- */
- VkDeviceSize blockSize;
- /** \brief Minimum number of blocks to be always allocated in this pool, even if they stay empty.
-
- Set to 0 to have no preallocated blocks and allow the pool be completely empty.
- */
- size_t minBlockCount;
- /** \brief Maximum number of blocks that can be allocated in this pool. Optional.
-
- Set to 0 to use default, which is `SIZE_MAX`, which means no limit.
-
- Set to same value as VmaPoolCreateInfo::minBlockCount to have fixed amount of memory allocated
- throughout whole lifetime of this pool.
- */
- size_t maxBlockCount;
- /** \brief Maximum number of additional frames that are in use at the same time as current frame.
-
- This value is used only when you make allocations with
- #VMA_ALLOCATION_CREATE_CAN_BECOME_LOST_BIT flag. Such allocation cannot become
- lost if allocation.lastUseFrameIndex >= allocator.currentFrameIndex - frameInUseCount.
-
- For example, if you double-buffer your command buffers, so resources used for
- rendering in previous frame may still be in use by the GPU at the moment you
- allocate resources needed for the current frame, set this value to 1.
-
- If you want to allow any allocations other than used in the current frame to
- become lost, set this value to 0.
- */
- uint32_t frameInUseCount;
-} VmaPoolCreateInfo;
-
-/** \brief Describes parameter of existing #VmaPool.
-*/
-typedef struct VmaPoolStats {
- /** \brief Total amount of `VkDeviceMemory` allocated from Vulkan for this pool, in bytes.
- */
- VkDeviceSize size;
- /** \brief Total number of bytes in the pool not used by any #VmaAllocation.
- */
- VkDeviceSize unusedSize;
- /** \brief Number of #VmaAllocation objects created from this pool that were not destroyed or lost.
- */
- size_t allocationCount;
- /** \brief Number of continuous memory ranges in the pool not used by any #VmaAllocation.
- */
- size_t unusedRangeCount;
- /** \brief Size of the largest continuous free memory region available for new allocation.
-
- Making a new allocation of that size is not guaranteed to succeed because of
- possible additional margin required to respect alignment and buffer/image
- granularity.
- */
- VkDeviceSize unusedRangeSizeMax;
- /** \brief Number of `VkDeviceMemory` blocks allocated for this pool.
- */
- size_t blockCount;
-} VmaPoolStats;
-
-/** \brief Allocates Vulkan device memory and creates #VmaPool object.
-
-@param allocator Allocator object.
-@param pCreateInfo Parameters of pool to create.
-@param[out] pPool Handle to created pool.
-*/
-VkResult vmaCreatePool(
- VmaAllocator allocator,
- const VmaPoolCreateInfo* pCreateInfo,
- VmaPool* pPool);
-
-/** \brief Destroys #VmaPool object and frees Vulkan device memory.
-*/
-void vmaDestroyPool(
- VmaAllocator allocator,
- VmaPool pool);
-
-/** \brief Retrieves statistics of existing #VmaPool object.
-
-@param allocator Allocator object.
-@param pool Pool object.
-@param[out] pPoolStats Statistics of specified pool.
-*/
-void vmaGetPoolStats(
- VmaAllocator allocator,
- VmaPool pool,
- VmaPoolStats* pPoolStats);
-
-/** \brief Marks all allocations in given pool as lost if they are not used in current frame or VmaPoolCreateInfo::frameInUseCount back from now.
-
-@param allocator Allocator object.
-@param pool Pool.
-@param[out] pLostAllocationCount Number of allocations marked as lost. Optional - pass null if you don't need this information.
-*/
-void vmaMakePoolAllocationsLost(
- VmaAllocator allocator,
- VmaPool pool,
- size_t* pLostAllocationCount);
-
-/** \brief Checks magic number in margins around all allocations in given memory pool in search for corruptions.
-
-Corruption detection is enabled only when `VMA_DEBUG_DETECT_CORRUPTION` macro is defined to nonzero,
-`VMA_DEBUG_MARGIN` is defined to nonzero and the pool is created in memory type that is
-`HOST_VISIBLE` and `HOST_COHERENT`. For more information, see [Corruption detection](@ref debugging_memory_usage_corruption_detection).
-
-Possible return values:
-
-- `VK_ERROR_FEATURE_NOT_PRESENT` - corruption detection is not enabled for specified pool.
-- `VK_SUCCESS` - corruption detection has been performed and succeeded.
-- `VK_ERROR_VALIDATION_FAILED_EXT` - corruption detection has been performed and found memory corruptions around one of the allocations.
- `VMA_ASSERT` is also fired in that case.
-- Other value: Error returned by Vulkan, e.g. memory mapping failure.
-*/
-VkResult vmaCheckPoolCorruption(VmaAllocator allocator, VmaPool pool);
-
-/** \struct VmaAllocation
-\brief Represents single memory allocation.
-
-It may be either dedicated block of `VkDeviceMemory` or a specific region of a bigger block of this type
-plus unique offset.
-
-There are multiple ways to create such object.
-You need to fill structure VmaAllocationCreateInfo.
-For more information see [Choosing memory type](@ref choosing_memory_type).
-
-Although the library provides convenience functions that create Vulkan buffer or image,
-allocate memory for it and bind them together,
-binding of the allocation to a buffer or an image is out of scope of the allocation itself.
-Allocation object can exist without buffer/image bound,
-binding can be done manually by the user, and destruction of it can be done
-independently of destruction of the allocation.
-
-The object also remembers its size and some other information.
-To retrieve this information, use function vmaGetAllocationInfo() and inspect
-returned structure VmaAllocationInfo.
-
-Some kinds allocations can be in lost state.
-For more information, see [Lost allocations](@ref lost_allocations).
-*/
-VK_DEFINE_HANDLE(VmaAllocation)
-
-/** \brief Parameters of #VmaAllocation objects, that can be retrieved using function vmaGetAllocationInfo().
-*/
-typedef struct VmaAllocationInfo {
- /** \brief Memory type index that this allocation was allocated from.
-
- It never changes.
- */
- uint32_t memoryType;
- /** \brief Handle to Vulkan memory object.
-
- Same memory object can be shared by multiple allocations.
-
- It can change after call to vmaDefragment() if this allocation is passed to the function, or if allocation is lost.
-
- If the allocation is lost, it is equal to `VK_NULL_HANDLE`.
- */
- VkDeviceMemory deviceMemory;
- /** \brief Offset into deviceMemory object to the beginning of this allocation, in bytes. (deviceMemory, offset) pair is unique to this allocation.
-
- It can change after call to vmaDefragment() if this allocation is passed to the function, or if allocation is lost.
- */
- VkDeviceSize offset;
- /** \brief Size of this allocation, in bytes.
-
- It never changes, unless allocation is lost.
- */
- VkDeviceSize size;
- /** \brief Pointer to the beginning of this allocation as mapped data.
-
- If the allocation hasn't been mapped using vmaMapMemory() and hasn't been
- created with #VMA_ALLOCATION_CREATE_MAPPED_BIT flag, this value null.
-
- It can change after call to vmaMapMemory(), vmaUnmapMemory().
- It can also change after call to vmaDefragment() if this allocation is passed to the function.
- */
- void* pMappedData;
- /** \brief Custom general-purpose pointer that was passed as VmaAllocationCreateInfo::pUserData or set using vmaSetAllocationUserData().
-
- It can change after call to vmaSetAllocationUserData() for this allocation.
- */
- void* pUserData;
-} VmaAllocationInfo;
-
-/** \brief General purpose memory allocation.
-
-@param[out] pAllocation Handle to allocated memory.
-@param[out] pAllocationInfo Optional. Information about allocated memory. It can be later fetched using function vmaGetAllocationInfo().
-
-You should free the memory using vmaFreeMemory() or vmaFreeMemoryPages().
-
-It is recommended to use vmaAllocateMemoryForBuffer(), vmaAllocateMemoryForImage(),
-vmaCreateBuffer(), vmaCreateImage() instead whenever possible.
-*/
-VkResult vmaAllocateMemory(
- VmaAllocator allocator,
- const VkMemoryRequirements* pVkMemoryRequirements,
- const VmaAllocationCreateInfo* pCreateInfo,
- VmaAllocation* pAllocation,
- VmaAllocationInfo* pAllocationInfo);
-
-/** \brief General purpose memory allocation for multiple allocation objects at once.
-
-@param allocator Allocator object.
-@param pVkMemoryRequirements Memory requirements for each allocation.
-@param pCreateInfo Creation parameters for each alloction.
-@param allocationCount Number of allocations to make.
-@param[out] pAllocations Pointer to array that will be filled with handles to created allocations.
-@param[out] pAllocationInfo Optional. Pointer to array that will be filled with parameters of created allocations.
-
-You should free the memory using vmaFreeMemory() or vmaFreeMemoryPages().
-
-Word "pages" is just a suggestion to use this function to allocate pieces of memory needed for sparse binding.
-It is just a general purpose allocation function able to make multiple allocations at once.
-It may be internally optimized to be more efficient than calling vmaAllocateMemory() `allocationCount` times.
-
-All allocations are made using same parameters. All of them are created out of the same memory pool and type.
-If any allocation fails, all allocations already made within this function call are also freed, so that when
-returned result is not `VK_SUCCESS`, `pAllocation` array is always entirely filled with `VK_NULL_HANDLE`.
-*/
-VkResult vmaAllocateMemoryPages(
- VmaAllocator allocator,
- const VkMemoryRequirements* pVkMemoryRequirements,
- const VmaAllocationCreateInfo* pCreateInfo,
- size_t allocationCount,
- VmaAllocation* pAllocations,
- VmaAllocationInfo* pAllocationInfo);
-
-/**
-@param[out] pAllocation Handle to allocated memory.
-@param[out] pAllocationInfo Optional. Information about allocated memory. It can be later fetched using function vmaGetAllocationInfo().
-
-You should free the memory using vmaFreeMemory().
-*/
-VkResult vmaAllocateMemoryForBuffer(
- VmaAllocator allocator,
- VkBuffer buffer,
- const VmaAllocationCreateInfo* pCreateInfo,
- VmaAllocation* pAllocation,
- VmaAllocationInfo* pAllocationInfo);
-
-/// Function similar to vmaAllocateMemoryForBuffer().
-VkResult vmaAllocateMemoryForImage(
- VmaAllocator allocator,
- VkImage image,
- const VmaAllocationCreateInfo* pCreateInfo,
- VmaAllocation* pAllocation,
- VmaAllocationInfo* pAllocationInfo);
-
-/** \brief Frees memory previously allocated using vmaAllocateMemory(), vmaAllocateMemoryForBuffer(), or vmaAllocateMemoryForImage().
-
-Passing `VK_NULL_HANDLE` as `allocation` is valid. Such function call is just skipped.
-*/
-void vmaFreeMemory(
- VmaAllocator allocator,
- VmaAllocation allocation);
-
-/** \brief Frees memory and destroys multiple allocations.
-
-Word "pages" is just a suggestion to use this function to free pieces of memory used for sparse binding.
-It is just a general purpose function to free memory and destroy allocations made using e.g. vmaAllocateMemory(),
-vmaAllocateMemoryPages() and other functions.
-It may be internally optimized to be more efficient than calling vmaFreeMemory() `allocationCount` times.
-
-Allocations in `pAllocations` array can come from any memory pools and types.
-Passing `VK_NULL_HANDLE` as elements of `pAllocations` array is valid. Such entries are just skipped.
-*/
-void vmaFreeMemoryPages(
- VmaAllocator allocator,
- size_t allocationCount,
- VmaAllocation* pAllocations);
-
-/** \brief Tries to resize an allocation in place, if there is enough free memory after it.
-
-Tries to change allocation's size without moving or reallocating it.
-You can both shrink and grow allocation size.
-When growing, it succeeds only when the allocation belongs to a memory block with enough
-free space after it.
-
-Returns `VK_SUCCESS` if allocation's size has been successfully changed.
-Returns `VK_ERROR_OUT_OF_POOL_MEMORY` if allocation's size could not be changed.
-
-After successful call to this function, VmaAllocationInfo::size of this allocation changes.
-All other parameters stay the same: memory pool and type, alignment, offset, mapped pointer.
-
-- Calling this function on allocation that is in lost state fails with result `VK_ERROR_VALIDATION_FAILED_EXT`.
-- Calling this function with `newSize` same as current allocation size does nothing and returns `VK_SUCCESS`.
-- Resizing dedicated allocations, as well as allocations created in pools that use linear
- or buddy algorithm, is not supported.
- The function returns `VK_ERROR_FEATURE_NOT_PRESENT` in such cases.
- Support may be added in the future.
-*/
-VkResult vmaResizeAllocation(
- VmaAllocator allocator,
- VmaAllocation allocation,
- VkDeviceSize newSize);
-
-/** \brief Returns current information about specified allocation and atomically marks it as used in current frame.
-
-Current paramters of given allocation are returned in `pAllocationInfo`.
-
-This function also atomically "touches" allocation - marks it as used in current frame,
-just like vmaTouchAllocation().
-If the allocation is in lost state, `pAllocationInfo->deviceMemory == VK_NULL_HANDLE`.
-
-Although this function uses atomics and doesn't lock any mutex, so it should be quite efficient,
-you can avoid calling it too often.
-
-- You can retrieve same VmaAllocationInfo structure while creating your resource, from function
- vmaCreateBuffer(), vmaCreateImage(). You can remember it if you are sure parameters don't change
- (e.g. due to defragmentation or allocation becoming lost).
-- If you just want to check if allocation is not lost, vmaTouchAllocation() will work faster.
-*/
-void vmaGetAllocationInfo(
- VmaAllocator allocator,
- VmaAllocation allocation,
- VmaAllocationInfo* pAllocationInfo);
-
-/** \brief Returns `VK_TRUE` if allocation is not lost and atomically marks it as used in current frame.
-
-If the allocation has been created with #VMA_ALLOCATION_CREATE_CAN_BECOME_LOST_BIT flag,
-this function returns `VK_TRUE` if it's not in lost state, so it can still be used.
-It then also atomically "touches" the allocation - marks it as used in current frame,
-so that you can be sure it won't become lost in current frame or next `frameInUseCount` frames.
-
-If the allocation is in lost state, the function returns `VK_FALSE`.
-Memory of such allocation, as well as buffer or image bound to it, should not be used.
-Lost allocation and the buffer/image still need to be destroyed.
-
-If the allocation has been created without #VMA_ALLOCATION_CREATE_CAN_BECOME_LOST_BIT flag,
-this function always returns `VK_TRUE`.
-*/
-VkBool32 vmaTouchAllocation(
- VmaAllocator allocator,
- VmaAllocation allocation);
-
-/** \brief Sets pUserData in given allocation to new value.
-
-If the allocation was created with VMA_ALLOCATION_CREATE_USER_DATA_COPY_STRING_BIT,
-pUserData must be either null, or pointer to a null-terminated string. The function
-makes local copy of the string and sets it as allocation's `pUserData`. String
-passed as pUserData doesn't need to be valid for whole lifetime of the allocation -
-you can free it after this call. String previously pointed by allocation's
-pUserData is freed from memory.
-
-If the flag was not used, the value of pointer `pUserData` is just copied to
-allocation's `pUserData`. It is opaque, so you can use it however you want - e.g.
-as a pointer, ordinal number or some handle to you own data.
-*/
-void vmaSetAllocationUserData(
- VmaAllocator allocator,
- VmaAllocation allocation,
- void* pUserData);
-
-/** \brief Creates new allocation that is in lost state from the beginning.
-
-It can be useful if you need a dummy, non-null allocation.
-
-You still need to destroy created object using vmaFreeMemory().
-
-Returned allocation is not tied to any specific memory pool or memory type and
-not bound to any image or buffer. It has size = 0. It cannot be turned into
-a real, non-empty allocation.
-*/
-void vmaCreateLostAllocation(
- VmaAllocator allocator,
- VmaAllocation* pAllocation);
-
-/** \brief Maps memory represented by given allocation and returns pointer to it.
-
-Maps memory represented by given allocation to make it accessible to CPU code.
-When succeeded, `*ppData` contains pointer to first byte of this memory.
-If the allocation is part of bigger `VkDeviceMemory` block, the pointer is
-correctly offseted to the beginning of region assigned to this particular
-allocation.
-
-Mapping is internally reference-counted and synchronized, so despite raw Vulkan
-function `vkMapMemory()` cannot be used to map same block of `VkDeviceMemory`
-multiple times simultaneously, it is safe to call this function on allocations
-assigned to the same memory block. Actual Vulkan memory will be mapped on first
-mapping and unmapped on last unmapping.
-
-If the function succeeded, you must call vmaUnmapMemory() to unmap the
-allocation when mapping is no longer needed or before freeing the allocation, at
-the latest.
-
-It also safe to call this function multiple times on the same allocation. You
-must call vmaUnmapMemory() same number of times as you called vmaMapMemory().
-
-It is also safe to call this function on allocation created with
-#VMA_ALLOCATION_CREATE_MAPPED_BIT flag. Its memory stays mapped all the time.
-You must still call vmaUnmapMemory() same number of times as you called
-vmaMapMemory(). You must not call vmaUnmapMemory() additional time to free the
-"0-th" mapping made automatically due to #VMA_ALLOCATION_CREATE_MAPPED_BIT flag.
-
-This function fails when used on allocation made in memory type that is not
-`HOST_VISIBLE`.
-
-This function always fails when called for allocation that was created with
-#VMA_ALLOCATION_CREATE_CAN_BECOME_LOST_BIT flag. Such allocations cannot be
-mapped.
-*/
-VkResult vmaMapMemory(
- VmaAllocator allocator,
- VmaAllocation allocation,
- void** ppData);
-
-/** \brief Unmaps memory represented by given allocation, mapped previously using vmaMapMemory().
-
-For details, see description of vmaMapMemory().
-*/
-void vmaUnmapMemory(
- VmaAllocator allocator,
- VmaAllocation allocation);
-
-/** \brief Flushes memory of given allocation.
-
-Calls `vkFlushMappedMemoryRanges()` for memory associated with given range of given allocation.
-
-- `offset` must be relative to the beginning of allocation.
-- `size` can be `VK_WHOLE_SIZE`. It means all memory from `offset` the the end of given allocation.
-- `offset` and `size` don't have to be aligned.
- They are internally rounded down/up to multiply of `nonCoherentAtomSize`.
-- If `size` is 0, this call is ignored.
-- If memory type that the `allocation` belongs to is not `HOST_VISIBLE` or it is `HOST_COHERENT`,
- this call is ignored.
-*/
-void vmaFlushAllocation(VmaAllocator allocator, VmaAllocation allocation, VkDeviceSize offset, VkDeviceSize size);
-
-/** \brief Invalidates memory of given allocation.
-
-Calls `vkInvalidateMappedMemoryRanges()` for memory associated with given range of given allocation.
-
-- `offset` must be relative to the beginning of allocation.
-- `size` can be `VK_WHOLE_SIZE`. It means all memory from `offset` the the end of given allocation.
-- `offset` and `size` don't have to be aligned.
- They are internally rounded down/up to multiply of `nonCoherentAtomSize`.
-- If `size` is 0, this call is ignored.
-- If memory type that the `allocation` belongs to is not `HOST_VISIBLE` or it is `HOST_COHERENT`,
- this call is ignored.
-*/
-void vmaInvalidateAllocation(VmaAllocator allocator, VmaAllocation allocation, VkDeviceSize offset, VkDeviceSize size);
-
-/** \brief Checks magic number in margins around all allocations in given memory types (in both default and custom pools) in search for corruptions.
-
-@param memoryTypeBits Bit mask, where each bit set means that a memory type with that index should be checked.
-
-Corruption detection is enabled only when `VMA_DEBUG_DETECT_CORRUPTION` macro is defined to nonzero,
-`VMA_DEBUG_MARGIN` is defined to nonzero and only for memory types that are
-`HOST_VISIBLE` and `HOST_COHERENT`. For more information, see [Corruption detection](@ref debugging_memory_usage_corruption_detection).
-
-Possible return values:
-
-- `VK_ERROR_FEATURE_NOT_PRESENT` - corruption detection is not enabled for any of specified memory types.
-- `VK_SUCCESS` - corruption detection has been performed and succeeded.
-- `VK_ERROR_VALIDATION_FAILED_EXT` - corruption detection has been performed and found memory corruptions around one of the allocations.
- `VMA_ASSERT` is also fired in that case.
-- Other value: Error returned by Vulkan, e.g. memory mapping failure.
-*/
-VkResult vmaCheckCorruption(VmaAllocator allocator, uint32_t memoryTypeBits);
-
-/** \struct VmaDefragmentationContext
-\brief Represents Opaque object that represents started defragmentation process.
-
-Fill structure #VmaDefragmentationInfo2 and call function vmaDefragmentationBegin() to create it.
-Call function vmaDefragmentationEnd() to destroy it.
-*/
-VK_DEFINE_HANDLE(VmaDefragmentationContext)
-
-/// Flags to be used in vmaDefragmentationBegin(). None at the moment. Reserved for future use.
-typedef enum VmaDefragmentationFlagBits {
- VMA_DEFRAGMENTATION_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF
-} VmaDefragmentationFlagBits;
-typedef VkFlags VmaDefragmentationFlags;
-
-/** \brief Parameters for defragmentation.
-
-To be used with function vmaDefragmentationBegin().
-*/
-typedef struct VmaDefragmentationInfo2 {
- /** \brief Reserved for future use. Should be 0.
- */
- VmaDefragmentationFlags flags;
- /** \brief Number of allocations in `pAllocations` array.
- */
- uint32_t allocationCount;
- /** \brief Pointer to array of allocations that can be defragmented.
-
- The array should have `allocationCount` elements.
- The array should not contain nulls.
- Elements in the array should be unique - same allocation cannot occur twice.
- It is safe to pass allocations that are in the lost state - they are ignored.
- All allocations not present in this array are considered non-moveable during this defragmentation.
- */
- VmaAllocation* pAllocations;
- /** \brief Optional, output. Pointer to array that will be filled with information whether the allocation at certain index has been changed during defragmentation.
-
- The array should have `allocationCount` elements.
- You can pass null if you are not interested in this information.
- */
- VkBool32* pAllocationsChanged;
- /** \brief Numer of pools in `pPools` array.
- */
- uint32_t poolCount;
- /** \brief Either null or pointer to array of pools to be defragmented.
-
- All the allocations in the specified pools can be moved during defragmentation
- and there is no way to check if they were really moved as in `pAllocationsChanged`,
- so you must query all the allocations in all these pools for new `VkDeviceMemory`
- and offset using vmaGetAllocationInfo() if you might need to recreate buffers
- and images bound to them.
-
- The array should have `poolCount` elements.
- The array should not contain nulls.
- Elements in the array should be unique - same pool cannot occur twice.
-
- Using this array is equivalent to specifying all allocations from the pools in `pAllocations`.
- It might be more efficient.
- */
- VmaPool* pPools;
- /** \brief Maximum total numbers of bytes that can be copied while moving allocations to different places using transfers on CPU side, like `memcpy()`, `memmove()`.
-
- `VK_WHOLE_SIZE` means no limit.
- */
- VkDeviceSize maxCpuBytesToMove;
- /** \brief Maximum number of allocations that can be moved to a different place using transfers on CPU side, like `memcpy()`, `memmove()`.
-
- `UINT32_MAX` means no limit.
- */
- uint32_t maxCpuAllocationsToMove;
- /** \brief Maximum total numbers of bytes that can be copied while moving allocations to different places using transfers on GPU side, posted to `commandBuffer`.
-
- `VK_WHOLE_SIZE` means no limit.
- */
- VkDeviceSize maxGpuBytesToMove;
- /** \brief Maximum number of allocations that can be moved to a different place using transfers on GPU side, posted to `commandBuffer`.
-
- `UINT32_MAX` means no limit.
- */
- uint32_t maxGpuAllocationsToMove;
- /** \brief Optional. Command buffer where GPU copy commands will be posted.
-
- If not null, it must be a valid command buffer handle that supports Transfer queue type.
- It must be in the recording state and outside of a render pass instance.
- You need to submit it and make sure it finished execution before calling vmaDefragmentationEnd().
-
- Passing null means that only CPU defragmentation will be performed.
- */
- VkCommandBuffer commandBuffer;
-} VmaDefragmentationInfo2;
-
-/** \brief Deprecated. Optional configuration parameters to be passed to function vmaDefragment().
-
-\deprecated This is a part of the old interface. It is recommended to use structure #VmaDefragmentationInfo2 and function vmaDefragmentationBegin() instead.
-*/
-typedef struct VmaDefragmentationInfo {
- /** \brief Maximum total numbers of bytes that can be copied while moving allocations to different places.
-
- Default is `VK_WHOLE_SIZE`, which means no limit.
- */
- VkDeviceSize maxBytesToMove;
- /** \brief Maximum number of allocations that can be moved to different place.
-
- Default is `UINT32_MAX`, which means no limit.
- */
- uint32_t maxAllocationsToMove;
-} VmaDefragmentationInfo;
-
-/** \brief Statistics returned by function vmaDefragment(). */
-typedef struct VmaDefragmentationStats {
- /// Total number of bytes that have been copied while moving allocations to different places.
- VkDeviceSize bytesMoved;
- /// Total number of bytes that have been released to the system by freeing empty `VkDeviceMemory` objects.
- VkDeviceSize bytesFreed;
- /// Number of allocations that have been moved to different places.
- uint32_t allocationsMoved;
- /// Number of empty `VkDeviceMemory` objects that have been released to the system.
- uint32_t deviceMemoryBlocksFreed;
-} VmaDefragmentationStats;
-
-/** \brief Begins defragmentation process.
-
-@param allocator Allocator object.
-@param pInfo Structure filled with parameters of defragmentation.
-@param[out] pStats Optional. Statistics of defragmentation. You can pass null if you are not interested in this information.
-@param[out] pContext Context object that must be passed to vmaDefragmentationEnd() to finish defragmentation.
-@return `VK_SUCCESS` and `*pContext == null` if defragmentation finished within this function call. `VK_NOT_READY` and `*pContext != null` if defragmentation has been started and you need to call vmaDefragmentationEnd() to finish it. Negative value in case of error.
-
-Use this function instead of old, deprecated vmaDefragment().
-
-Warning! Between the call to vmaDefragmentationBegin() and vmaDefragmentationEnd():
-
-- You should not use any of allocations passed as `pInfo->pAllocations` or
- any allocations that belong to pools passed as `pInfo->pPools`,
- including calling vmaGetAllocationInfo(), vmaTouchAllocation(), or access
- their data.
-- Some mutexes protecting internal data structures may be locked, so trying to
- make or free any allocations, bind buffers or images, map memory, or launch
- another simultaneous defragmentation in between may cause stall (when done on
- another thread) or deadlock (when done on the same thread), unless you are
- 100% sure that defragmented allocations are in different pools.
-- Information returned via `pStats` and `pInfo->pAllocationsChanged` are undefined.
- They become valid after call to vmaDefragmentationEnd().
-- If `pInfo->commandBuffer` is not null, you must submit that command buffer
- and make sure it finished execution before calling vmaDefragmentationEnd().
-*/
-VkResult vmaDefragmentationBegin(
- VmaAllocator allocator,
- const VmaDefragmentationInfo2* pInfo,
- VmaDefragmentationStats* pStats,
- VmaDefragmentationContext *pContext);
-
-/** \brief Ends defragmentation process.
-
-Use this function to finish defragmentation started by vmaDefragmentationBegin().
-It is safe to pass `context == null`. The function then does nothing.
-*/
-VkResult vmaDefragmentationEnd(
- VmaAllocator allocator,
- VmaDefragmentationContext context);
-
-/** \brief Deprecated. Compacts memory by moving allocations.
-
-@param pAllocations Array of allocations that can be moved during this compation.
-@param allocationCount Number of elements in pAllocations and pAllocationsChanged arrays.
-@param[out] pAllocationsChanged Array of boolean values that will indicate whether matching allocation in pAllocations array has been moved. This parameter is optional. Pass null if you don't need this information.
-@param pDefragmentationInfo Configuration parameters. Optional - pass null to use default values.
-@param[out] pDefragmentationStats Statistics returned by the function. Optional - pass null if you don't need this information.
-@return `VK_SUCCESS` if completed, negative error code in case of error.
-
-\deprecated This is a part of the old interface. It is recommended to use structure #VmaDefragmentationInfo2 and function vmaDefragmentationBegin() instead.
-
-This function works by moving allocations to different places (different
-`VkDeviceMemory` objects and/or different offsets) in order to optimize memory
-usage. Only allocations that are in `pAllocations` array can be moved. All other
-allocations are considered nonmovable in this call. Basic rules:
-
-- Only allocations made in memory types that have
- `VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT` and `VK_MEMORY_PROPERTY_HOST_COHERENT_BIT`
- flags can be compacted. You may pass other allocations but it makes no sense -
- these will never be moved.
-- Custom pools created with #VMA_POOL_CREATE_LINEAR_ALGORITHM_BIT or
- #VMA_POOL_CREATE_BUDDY_ALGORITHM_BIT flag are not defragmented. Allocations
- passed to this function that come from such pools are ignored.
-- Allocations created with #VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT or
- created as dedicated allocations for any other reason are also ignored.
-- Both allocations made with or without #VMA_ALLOCATION_CREATE_MAPPED_BIT
- flag can be compacted. If not persistently mapped, memory will be mapped
- temporarily inside this function if needed.
-- You must not pass same #VmaAllocation object multiple times in `pAllocations` array.
-
-The function also frees empty `VkDeviceMemory` blocks.
-
-Warning: This function may be time-consuming, so you shouldn't call it too often
-(like after every resource creation/destruction).
-You can call it on special occasions (like when reloading a game level or
-when you just destroyed a lot of objects). Calling it every frame may be OK, but
-you should measure that on your platform.
-
-For more information, see [Defragmentation](@ref defragmentation) chapter.
-*/
-VkResult vmaDefragment(
- VmaAllocator allocator,
- VmaAllocation* pAllocations,
- size_t allocationCount,
- VkBool32* pAllocationsChanged,
- const VmaDefragmentationInfo *pDefragmentationInfo,
- VmaDefragmentationStats* pDefragmentationStats);
-
-/** \brief Binds buffer to allocation.
-
-Binds specified buffer to region of memory represented by specified allocation.
-Gets `VkDeviceMemory` handle and offset from the allocation.
-If you want to create a buffer, allocate memory for it and bind them together separately,
-you should use this function for binding instead of standard `vkBindBufferMemory()`,
-because it ensures proper synchronization so that when a `VkDeviceMemory` object is used by multiple
-allocations, calls to `vkBind*Memory()` or `vkMapMemory()` won't happen from multiple threads simultaneously
-(which is illegal in Vulkan).
-
-It is recommended to use function vmaCreateBuffer() instead of this one.
-*/
-VkResult vmaBindBufferMemory(
- VmaAllocator allocator,
- VmaAllocation allocation,
- VkBuffer buffer);
-
-/** \brief Binds image to allocation.
-
-Binds specified image to region of memory represented by specified allocation.
-Gets `VkDeviceMemory` handle and offset from the allocation.
-If you want to create an image, allocate memory for it and bind them together separately,
-you should use this function for binding instead of standard `vkBindImageMemory()`,
-because it ensures proper synchronization so that when a `VkDeviceMemory` object is used by multiple
-allocations, calls to `vkBind*Memory()` or `vkMapMemory()` won't happen from multiple threads simultaneously
-(which is illegal in Vulkan).
-
-It is recommended to use function vmaCreateImage() instead of this one.
-*/
-VkResult vmaBindImageMemory(
- VmaAllocator allocator,
- VmaAllocation allocation,
- VkImage image);
-
-/**
-@param[out] pBuffer Buffer that was created.
-@param[out] pAllocation Allocation that was created.
-@param[out] pAllocationInfo Optional. Information about allocated memory. It can be later fetched using function vmaGetAllocationInfo().
-
-This function automatically:
-
--# Creates buffer.
--# Allocates appropriate memory for it.
--# Binds the buffer with the memory.
-
-If any of these operations fail, buffer and allocation are not created,
-returned value is negative error code, *pBuffer and *pAllocation are null.
-
-If the function succeeded, you must destroy both buffer and allocation when you
-no longer need them using either convenience function vmaDestroyBuffer() or
-separately, using `vkDestroyBuffer()` and vmaFreeMemory().
-
-If VMA_ALLOCATOR_CREATE_KHR_DEDICATED_ALLOCATION_BIT flag was used,
-VK_KHR_dedicated_allocation extension is used internally to query driver whether
-it requires or prefers the new buffer to have dedicated allocation. If yes,
-and if dedicated allocation is possible (VmaAllocationCreateInfo::pool is null
-and VMA_ALLOCATION_CREATE_NEVER_ALLOCATE_BIT is not used), it creates dedicated
-allocation for this buffer, just like when using
-VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT.
-*/
-VkResult vmaCreateBuffer(
- VmaAllocator allocator,
- const VkBufferCreateInfo* pBufferCreateInfo,
- const VmaAllocationCreateInfo* pAllocationCreateInfo,
- VkBuffer* pBuffer,
- VmaAllocation* pAllocation,
- VmaAllocationInfo* pAllocationInfo);
-
-/** \brief Destroys Vulkan buffer and frees allocated memory.
-
-This is just a convenience function equivalent to:
-
-\code
-vkDestroyBuffer(device, buffer, allocationCallbacks);
-vmaFreeMemory(allocator, allocation);
-\endcode
-
-It it safe to pass null as buffer and/or allocation.
-*/
-void vmaDestroyBuffer(
- VmaAllocator allocator,
- VkBuffer buffer,
- VmaAllocation allocation);
-
-/// Function similar to vmaCreateBuffer().
-VkResult vmaCreateImage(
- VmaAllocator allocator,
- const VkImageCreateInfo* pImageCreateInfo,
- const VmaAllocationCreateInfo* pAllocationCreateInfo,
- VkImage* pImage,
- VmaAllocation* pAllocation,
- VmaAllocationInfo* pAllocationInfo);
-
-/** \brief Destroys Vulkan image and frees allocated memory.
-
-This is just a convenience function equivalent to:
-
-\code
-vkDestroyImage(device, image, allocationCallbacks);
-vmaFreeMemory(allocator, allocation);
-\endcode
-
-It it safe to pass null as image and/or allocation.
-*/
-void vmaDestroyImage(
- VmaAllocator allocator,
- VkImage image,
- VmaAllocation allocation);
-
-#ifdef __cplusplus
-}
-#endif
-
-#endif // AMD_VULKAN_MEMORY_ALLOCATOR_H
-
-// For Visual Studio IntelliSense.
-#if defined(__cplusplus) && defined(__INTELLISENSE__)
-#define VMA_IMPLEMENTATION
-#endif
-
-#ifdef VMA_IMPLEMENTATION
-#undef VMA_IMPLEMENTATION
-
-#include <cstdint>
-#include <cstdlib>
-#include <cstring>
-
-/*******************************************************************************
-CONFIGURATION SECTION
-
-Define some of these macros before each #include of this header or change them
-here if you need other then default behavior depending on your environment.
-*/
-
-/*
-Define this macro to 1 to make the library fetch pointers to Vulkan functions
-internally, like:
-
- vulkanFunctions.vkAllocateMemory = &vkAllocateMemory;
-
-Define to 0 if you are going to provide you own pointers to Vulkan functions via
-VmaAllocatorCreateInfo::pVulkanFunctions.
-*/
-#if !defined(VMA_STATIC_VULKAN_FUNCTIONS) && !defined(VK_NO_PROTOTYPES)
-#define VMA_STATIC_VULKAN_FUNCTIONS 1
-#endif
-
-// Define this macro to 1 to make the library use STL containers instead of its own implementation.
-//#define VMA_USE_STL_CONTAINERS 1
-
-/* Set this macro to 1 to make the library including and using STL containers:
-std::pair, std::vector, std::list, std::unordered_map.
-
-Set it to 0 or undefined to make the library using its own implementation of
-the containers.
-*/
-#if VMA_USE_STL_CONTAINERS
- #define VMA_USE_STL_VECTOR 1
- #define VMA_USE_STL_UNORDERED_MAP 1
- #define VMA_USE_STL_LIST 1
-#endif
-
-#ifndef VMA_USE_STL_SHARED_MUTEX
- // Minimum Visual Studio 2015 Update 2
- #if defined(_MSC_FULL_VER) && _MSC_FULL_VER >= 190023918 && NTDDI_VERSION > NTDDI_WIN10_RS2
- #define VMA_USE_STL_SHARED_MUTEX 1
- #endif
-#endif
-
-#if VMA_USE_STL_VECTOR
- #include <vector>
-#endif
-
-#if VMA_USE_STL_UNORDERED_MAP
- #include <unordered_map>
-#endif
-
-#if VMA_USE_STL_LIST
- #include <list>
-#endif
-
-/*
-Following headers are used in this CONFIGURATION section only, so feel free to
-remove them if not needed.
-*/
-#include <cassert> // for assert
-#include <algorithm> // for min, max
-#include <mutex>
-#include <atomic> // for std::atomic
-
-#ifndef VMA_NULL
- // Value used as null pointer. Define it to e.g.: nullptr, NULL, 0, (void*)0.
- #define VMA_NULL nullptr
-#endif
-
-#if defined(__ANDROID_API__) && (__ANDROID_API__ < 16)
-#include <cstdlib>
-void *aligned_alloc(size_t alignment, size_t size)
-{
- // alignment must be >= sizeof(void*)
- if(alignment < sizeof(void*))
- {
- alignment = sizeof(void*);
- }
-
- return memalign(alignment, size);
-}
-#elif defined(__APPLE__) || defined(__ANDROID__)
-# define ALIGNED_ALLOC_WITH_POSIX_MEMALIGN
-#elif defined(__GNU_LIBRARY__)
-# if !defined(__GLIBC_PREREQ) || !__GLIBC_PREREQ(2, 16)
-// aligned_alloc() is defined in glibc only for version >= 2.16
-# define ALIGNED_ALLOC_WITH_POSIX_MEMALIGN
-# endif
-#endif
-
-#ifdef ALIGNED_ALLOC_WITH_POSIX_MEMALIGN
-#include <cstdlib>
-void *aligned_alloc(size_t alignment, size_t size)
-{
- // alignment must be >= sizeof(void*)
- if(alignment < sizeof(void*))
- {
- alignment = sizeof(void*);
- }
-
- void *pointer;
- if(posix_memalign(&pointer, alignment, size) == 0)
- return pointer;
- return VMA_NULL;
-}
-#endif
-
-// If your compiler is not compatible with C++11 and definition of
-// aligned_alloc() function is missing, uncommeting following line may help:
-
-//#include <malloc.h>
-
-// Normal assert to check for programmer's errors, especially in Debug configuration.
-#ifndef VMA_ASSERT
- #ifdef _DEBUG
- #define VMA_ASSERT(expr) assert(expr)
- #else
- #define VMA_ASSERT(expr)
- #endif
-#endif
-
-// Assert that will be called very often, like inside data structures e.g. operator[].
-// Making it non-empty can make program slow.
-#ifndef VMA_HEAVY_ASSERT
- #ifdef _DEBUG
- #define VMA_HEAVY_ASSERT(expr) //VMA_ASSERT(expr)
- #else
- #define VMA_HEAVY_ASSERT(expr)
- #endif
-#endif
-
-#ifndef VMA_ALIGN_OF
- #define VMA_ALIGN_OF(type) (__alignof(type))
-#endif
-
-#ifndef VMA_SYSTEM_ALIGNED_MALLOC
- #if defined(_WIN32)
- #define VMA_SYSTEM_ALIGNED_MALLOC(size, alignment) (_aligned_malloc((size), (alignment)))
- #else
- #define VMA_SYSTEM_ALIGNED_MALLOC(size, alignment) (aligned_alloc((alignment), (size) ))
- #endif
-#endif
-
-#ifndef VMA_SYSTEM_FREE
- #if defined(_WIN32)
- #define VMA_SYSTEM_FREE(ptr) _aligned_free(ptr)
- #else
- #define VMA_SYSTEM_FREE(ptr) free(ptr)
- #endif
-#endif
-
-#ifndef VMA_MIN
- #define VMA_MIN(v1, v2) (std::min((v1), (v2)))
-#endif
-
-#ifndef VMA_MAX
- #define VMA_MAX(v1, v2) (std::max((v1), (v2)))
-#endif
-
-#ifndef VMA_SWAP
- #define VMA_SWAP(v1, v2) std::swap((v1), (v2))
-#endif
-
-#ifndef VMA_SORT
- #define VMA_SORT(beg, end, cmp) std::sort(beg, end, cmp)
-#endif
-
-#ifndef VMA_DEBUG_LOG
- #define VMA_DEBUG_LOG(format, ...)
- /*
- #define VMA_DEBUG_LOG(format, ...) do { \
- printf(format, __VA_ARGS__); \
- printf("\n"); \
- } while(false)
- */
-#endif
-
-// Define this macro to 1 to enable functions: vmaBuildStatsString, vmaFreeStatsString.
-#if VMA_STATS_STRING_ENABLED
- static inline void VmaUint32ToStr(char* outStr, size_t strLen, uint32_t num)
- {
- snprintf(outStr, strLen, "%u", static_cast<unsigned int>(num));
- }
- static inline void VmaUint64ToStr(char* outStr, size_t strLen, uint64_t num)
- {
- snprintf(outStr, strLen, "%llu", static_cast<unsigned long long>(num));
- }
- static inline void VmaPtrToStr(char* outStr, size_t strLen, const void* ptr)
- {
- snprintf(outStr, strLen, "%p", ptr);
- }
-#endif
-
-#ifndef VMA_MUTEX
- class VmaMutex
- {
- public:
- void Lock() { m_Mutex.lock(); }
- void Unlock() { m_Mutex.unlock(); }
- private:
- std::mutex m_Mutex;
- };
- #define VMA_MUTEX VmaMutex
-#endif
-
-// Read-write mutex, where "read" is shared access, "write" is exclusive access.
-#ifndef VMA_RW_MUTEX
- #if VMA_USE_STL_SHARED_MUTEX
- // Use std::shared_mutex from C++17.
- #include <shared_mutex>
- class VmaRWMutex
- {
- public:
- void LockRead() { m_Mutex.lock_shared(); }
- void UnlockRead() { m_Mutex.unlock_shared(); }
- void LockWrite() { m_Mutex.lock(); }
- void UnlockWrite() { m_Mutex.unlock(); }
- private:
- std::shared_mutex m_Mutex;
- };
- #define VMA_RW_MUTEX VmaRWMutex
- #elif defined(_WIN32)
- // Use SRWLOCK from WinAPI.
- class VmaRWMutex
- {
- public:
- VmaRWMutex() { InitializeSRWLock(&m_Lock); }
- void LockRead() { AcquireSRWLockShared(&m_Lock); }
- void UnlockRead() { ReleaseSRWLockShared(&m_Lock); }
- void LockWrite() { AcquireSRWLockExclusive(&m_Lock); }
- void UnlockWrite() { ReleaseSRWLockExclusive(&m_Lock); }
- private:
- SRWLOCK m_Lock;
- };
- #define VMA_RW_MUTEX VmaRWMutex
- #else
- // Less efficient fallback: Use normal mutex.
- class VmaRWMutex
- {
- public:
- void LockRead() { m_Mutex.Lock(); }
- void UnlockRead() { m_Mutex.Unlock(); }
- void LockWrite() { m_Mutex.Lock(); }
- void UnlockWrite() { m_Mutex.Unlock(); }
- private:
- VMA_MUTEX m_Mutex;
- };
- #define VMA_RW_MUTEX VmaRWMutex
- #endif // #if VMA_USE_STL_SHARED_MUTEX
-#endif // #ifndef VMA_RW_MUTEX
-
-/*
-If providing your own implementation, you need to implement a subset of std::atomic:
-
-- Constructor(uint32_t desired)
-- uint32_t load() const
-- void store(uint32_t desired)
-- bool compare_exchange_weak(uint32_t& expected, uint32_t desired)
-*/
-#ifndef VMA_ATOMIC_UINT32
- #define VMA_ATOMIC_UINT32 std::atomic<uint32_t>
-#endif
-
-#ifndef VMA_DEBUG_ALWAYS_DEDICATED_MEMORY
- /**
- Every allocation will have its own memory block.
- Define to 1 for debugging purposes only.
- */
- #define VMA_DEBUG_ALWAYS_DEDICATED_MEMORY (0)
-#endif
-
-#ifndef VMA_DEBUG_ALIGNMENT
- /**
- Minimum alignment of all allocations, in bytes.
- Set to more than 1 for debugging purposes only. Must be power of two.
- */
- #define VMA_DEBUG_ALIGNMENT (1)
-#endif
-
-#ifndef VMA_DEBUG_MARGIN
- /**
- Minimum margin before and after every allocation, in bytes.
- Set nonzero for debugging purposes only.
- */
- #define VMA_DEBUG_MARGIN (0)
-#endif
-
-#ifndef VMA_DEBUG_INITIALIZE_ALLOCATIONS
- /**
- Define this macro to 1 to automatically fill new allocations and destroyed
- allocations with some bit pattern.
- */
- #define VMA_DEBUG_INITIALIZE_ALLOCATIONS (0)
-#endif
-
-#ifndef VMA_DEBUG_DETECT_CORRUPTION
- /**
- Define this macro to 1 together with non-zero value of VMA_DEBUG_MARGIN to
- enable writing magic value to the margin before and after every allocation and
- validating it, so that memory corruptions (out-of-bounds writes) are detected.
- */
- #define VMA_DEBUG_DETECT_CORRUPTION (0)
-#endif
-
-#ifndef VMA_DEBUG_GLOBAL_MUTEX
- /**
- Set this to 1 for debugging purposes only, to enable single mutex protecting all
- entry calls to the library. Can be useful for debugging multithreading issues.
- */
- #define VMA_DEBUG_GLOBAL_MUTEX (0)
-#endif
-
-#ifndef VMA_DEBUG_MIN_BUFFER_IMAGE_GRANULARITY
- /**
- Minimum value for VkPhysicalDeviceLimits::bufferImageGranularity.
- Set to more than 1 for debugging purposes only. Must be power of two.
- */
- #define VMA_DEBUG_MIN_BUFFER_IMAGE_GRANULARITY (1)
-#endif
-
-#ifndef VMA_SMALL_HEAP_MAX_SIZE
- /// Maximum size of a memory heap in Vulkan to consider it "small".
- #define VMA_SMALL_HEAP_MAX_SIZE (1024ull * 1024 * 1024)
-#endif
-
-#ifndef VMA_DEFAULT_LARGE_HEAP_BLOCK_SIZE
- /// Default size of a block allocated as single VkDeviceMemory from a "large" heap.
- #define VMA_DEFAULT_LARGE_HEAP_BLOCK_SIZE (256ull * 1024 * 1024)
-#endif
-
-#ifndef VMA_CLASS_NO_COPY
- #define VMA_CLASS_NO_COPY(className) \
- private: \
- className(const className&) = delete; \
- className& operator=(const className&) = delete;
-#endif
-
-static const uint32_t VMA_FRAME_INDEX_LOST = UINT32_MAX;
-
-// Decimal 2139416166, float NaN, little-endian binary 66 E6 84 7F.
-static const uint32_t VMA_CORRUPTION_DETECTION_MAGIC_VALUE = 0x7F84E666;
-
-static const uint8_t VMA_ALLOCATION_FILL_PATTERN_CREATED = 0xDC;
-static const uint8_t VMA_ALLOCATION_FILL_PATTERN_DESTROYED = 0xEF;
-
-/*******************************************************************************
-END OF CONFIGURATION
-*/
-
-#if defined(__GNUC__)
-#define GCC_VERSION (__GNUC__ * 10000 + __GNUC_MINOR__ * 100 + __GNUC_PATCHLEVEL__)
-#pragma GCC diagnostic push
-#pragma GCC diagnostic ignored "-Wtype-limits"
-#pragma GCC diagnostic ignored "-Wunused-variable"
-#if defined(__clang__)
-#pragma clang diagnostic push
-#pragma clang diagnostic ignored "-Wtautological-compare"
-#endif
-#if GCC_VERSION >= 80000
-#pragma GCC diagnostic ignored "-Wclass-memaccess"
-#endif
-#if defined(ANDROID)
-#pragma GCC diagnostic ignored "-Wunused-private-field"
-#endif
-#endif
-static const uint32_t VMA_ALLOCATION_INTERNAL_STRATEGY_MIN_OFFSET = 0x10000000u;
-
-static VkAllocationCallbacks VmaEmptyAllocationCallbacks = {
- VMA_NULL, VMA_NULL, VMA_NULL, VMA_NULL, VMA_NULL, VMA_NULL };
-
-// Returns number of bits set to 1 in (v).
-static inline uint32_t VmaCountBitsSet(uint32_t v)
-{
- uint32_t c = v - ((v >> 1) & 0x55555555);
- c = ((c >> 2) & 0x33333333) + (c & 0x33333333);
- c = ((c >> 4) + c) & 0x0F0F0F0F;
- c = ((c >> 8) + c) & 0x00FF00FF;
- c = ((c >> 16) + c) & 0x0000FFFF;
- return c;
-}
-
-// Aligns given value up to nearest multiply of align value. For example: VmaAlignUp(11, 8) = 16.
-// Use types like uint32_t, uint64_t as T.
-template <typename T>
-static inline T VmaAlignUp(T val, T align)
-{
- return (val + align - 1) / align * align;
-}
-// Aligns given value down to nearest multiply of align value. For example: VmaAlignUp(11, 8) = 8.
-// Use types like uint32_t, uint64_t as T.
-template <typename T>
-static inline T VmaAlignDown(T val, T align)
-{
- return val / align * align;
-}
-
-// Division with mathematical rounding to nearest number.
-template <typename T>
-static inline T VmaRoundDiv(T x, T y)
-{
- return (x + (y / (T)2)) / y;
-}
-
-/*
-Returns true if given number is a power of two.
-T must be unsigned integer number or signed integer but always nonnegative.
-For 0 returns true.
-*/
-template <typename T>
-inline bool VmaIsPow2(T x)
-{
- return (x & (x-1)) == 0;
-}
-
-// Returns smallest power of 2 greater or equal to v.
-static inline uint32_t VmaNextPow2(uint32_t v)
-{
- v--;
- v |= v >> 1;
- v |= v >> 2;
- v |= v >> 4;
- v |= v >> 8;
- v |= v >> 16;
- v++;
- return v;
-}
-static inline uint64_t VmaNextPow2(uint64_t v)
-{
- v--;
- v |= v >> 1;
- v |= v >> 2;
- v |= v >> 4;
- v |= v >> 8;
- v |= v >> 16;
- v |= v >> 32;
- v++;
- return v;
-}
-
-// Returns largest power of 2 less or equal to v.
-static inline uint32_t VmaPrevPow2(uint32_t v)
-{
- v |= v >> 1;
- v |= v >> 2;
- v |= v >> 4;
- v |= v >> 8;
- v |= v >> 16;
- v = v ^ (v >> 1);
- return v;
-}
-static inline uint64_t VmaPrevPow2(uint64_t v)
-{
- v |= v >> 1;
- v |= v >> 2;
- v |= v >> 4;
- v |= v >> 8;
- v |= v >> 16;
- v |= v >> 32;
- v = v ^ (v >> 1);
- return v;
-}
-
-static inline bool VmaStrIsEmpty(const char* pStr)
-{
- return pStr == VMA_NULL || *pStr == '\0';
-}
-
-static const char* VmaAlgorithmToStr(uint32_t algorithm)
-{
- switch(algorithm)
- {
- case VMA_POOL_CREATE_LINEAR_ALGORITHM_BIT:
- return "Linear";
- case VMA_POOL_CREATE_BUDDY_ALGORITHM_BIT:
- return "Buddy";
- case 0:
- return "Default";
- default:
- VMA_ASSERT(0);
- return "";
- }
-}
-
-#ifndef VMA_SORT
-
-template<typename Iterator, typename Compare>
-Iterator VmaQuickSortPartition(Iterator beg, Iterator end, Compare cmp)
-{
- Iterator centerValue = end; --centerValue;
- Iterator insertIndex = beg;
- for(Iterator memTypeIndex = beg; memTypeIndex < centerValue; ++memTypeIndex)
- {
- if(cmp(*memTypeIndex, *centerValue))
- {
- if(insertIndex != memTypeIndex)
- {
- VMA_SWAP(*memTypeIndex, *insertIndex);
- }
- ++insertIndex;
- }
- }
- if(insertIndex != centerValue)
- {
- VMA_SWAP(*insertIndex, *centerValue);
- }
- return insertIndex;
-}
-
-template<typename Iterator, typename Compare>
-void VmaQuickSort(Iterator beg, Iterator end, Compare cmp)
-{
- if(beg < end)
- {
- Iterator it = VmaQuickSortPartition<Iterator, Compare>(beg, end, cmp);
- VmaQuickSort<Iterator, Compare>(beg, it, cmp);
- VmaQuickSort<Iterator, Compare>(it + 1, end, cmp);
- }
-}
-
-#define VMA_SORT(beg, end, cmp) VmaQuickSort(beg, end, cmp)
-
-#endif // #ifndef VMA_SORT
-
-/*
-Returns true if two memory blocks occupy overlapping pages.
-ResourceA must be in less memory offset than ResourceB.
-
-Algorithm is based on "Vulkan 1.0.39 - A Specification (with all registered Vulkan extensions)"
-chapter 11.6 "Resource Memory Association", paragraph "Buffer-Image Granularity".
-*/
-static inline bool VmaBlocksOnSamePage(
- VkDeviceSize resourceAOffset,
- VkDeviceSize resourceASize,
- VkDeviceSize resourceBOffset,
- VkDeviceSize pageSize)
-{
- VMA_ASSERT(resourceAOffset + resourceASize <= resourceBOffset && resourceASize > 0 && pageSize > 0);
- VkDeviceSize resourceAEnd = resourceAOffset + resourceASize - 1;
- VkDeviceSize resourceAEndPage = resourceAEnd & ~(pageSize - 1);
- VkDeviceSize resourceBStart = resourceBOffset;
- VkDeviceSize resourceBStartPage = resourceBStart & ~(pageSize - 1);
- return resourceAEndPage == resourceBStartPage;
-}
-
-enum VmaSuballocationType
-{
- VMA_SUBALLOCATION_TYPE_FREE = 0,
- VMA_SUBALLOCATION_TYPE_UNKNOWN = 1,
- VMA_SUBALLOCATION_TYPE_BUFFER = 2,
- VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN = 3,
- VMA_SUBALLOCATION_TYPE_IMAGE_LINEAR = 4,
- VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL = 5,
- VMA_SUBALLOCATION_TYPE_MAX_ENUM = 0x7FFFFFFF
-};
-
-/*
-Returns true if given suballocation types could conflict and must respect
-VkPhysicalDeviceLimits::bufferImageGranularity. They conflict if one is buffer
-or linear image and another one is optimal image. If type is unknown, behave
-conservatively.
-*/
-static inline bool VmaIsBufferImageGranularityConflict(
- VmaSuballocationType suballocType1,
- VmaSuballocationType suballocType2)
-{
- if(suballocType1 > suballocType2)
- {
- VMA_SWAP(suballocType1, suballocType2);
- }
-
- switch(suballocType1)
- {
- case VMA_SUBALLOCATION_TYPE_FREE:
- return false;
- case VMA_SUBALLOCATION_TYPE_UNKNOWN:
- return true;
- case VMA_SUBALLOCATION_TYPE_BUFFER:
- return
- suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN ||
- suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL;
- case VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN:
- return
- suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN ||
- suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_LINEAR ||
- suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL;
- case VMA_SUBALLOCATION_TYPE_IMAGE_LINEAR:
- return
- suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL;
- case VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL:
- return false;
- default:
- VMA_ASSERT(0);
- return true;
- }
-}
-
-static void VmaWriteMagicValue(void* pData, VkDeviceSize offset)
-{
-#if VMA_DEBUG_MARGIN > 0 && VMA_DEBUG_DETECT_CORRUPTION
- uint32_t* pDst = (uint32_t*)((char*)pData + offset);
- const size_t numberCount = VMA_DEBUG_MARGIN / sizeof(uint32_t);
- for(size_t i = 0; i < numberCount; ++i, ++pDst)
- {
- *pDst = VMA_CORRUPTION_DETECTION_MAGIC_VALUE;
- }
-#else
- // no-op
-#endif
-}
-
-static bool VmaValidateMagicValue(const void* pData, VkDeviceSize offset)
-{
-#if VMA_DEBUG_MARGIN > 0 && VMA_DEBUG_DETECT_CORRUPTION
- const uint32_t* pSrc = (const uint32_t*)((const char*)pData + offset);
- const size_t numberCount = VMA_DEBUG_MARGIN / sizeof(uint32_t);
- for(size_t i = 0; i < numberCount; ++i, ++pSrc)
- {
- if(*pSrc != VMA_CORRUPTION_DETECTION_MAGIC_VALUE)
- {
- return false;
- }
- }
-#endif
- return true;
-}
-
-// Helper RAII class to lock a mutex in constructor and unlock it in destructor (at the end of scope).
-struct VmaMutexLock
-{
- VMA_CLASS_NO_COPY(VmaMutexLock)
-public:
- VmaMutexLock(VMA_MUTEX& mutex, bool useMutex) :
- m_pMutex(useMutex ? &mutex : VMA_NULL)
- { if(m_pMutex) { m_pMutex->Lock(); } }
- ~VmaMutexLock()
- { if(m_pMutex) { m_pMutex->Unlock(); } }
-private:
- VMA_MUTEX* m_pMutex;
-};
-
-// Helper RAII class to lock a RW mutex in constructor and unlock it in destructor (at the end of scope), for reading.
-struct VmaMutexLockRead
-{
- VMA_CLASS_NO_COPY(VmaMutexLockRead)
-public:
- VmaMutexLockRead(VMA_RW_MUTEX& mutex, bool useMutex) :
- m_pMutex(useMutex ? &mutex : VMA_NULL)
- { if(m_pMutex) { m_pMutex->LockRead(); } }
- ~VmaMutexLockRead() { if(m_pMutex) { m_pMutex->UnlockRead(); } }
-private:
- VMA_RW_MUTEX* m_pMutex;
-};
-
-// Helper RAII class to lock a RW mutex in constructor and unlock it in destructor (at the end of scope), for writing.
-struct VmaMutexLockWrite
-{
- VMA_CLASS_NO_COPY(VmaMutexLockWrite)
-public:
- VmaMutexLockWrite(VMA_RW_MUTEX& mutex, bool useMutex) :
- m_pMutex(useMutex ? &mutex : VMA_NULL)
- { if(m_pMutex) { m_pMutex->LockWrite(); } }
- ~VmaMutexLockWrite() { if(m_pMutex) { m_pMutex->UnlockWrite(); } }
-private:
- VMA_RW_MUTEX* m_pMutex;
-};
-
-#if VMA_DEBUG_GLOBAL_MUTEX
- static VMA_MUTEX gDebugGlobalMutex;
- #define VMA_DEBUG_GLOBAL_MUTEX_LOCK VmaMutexLock debugGlobalMutexLock(gDebugGlobalMutex, true);
-#else
- #define VMA_DEBUG_GLOBAL_MUTEX_LOCK
-#endif
-
-// Minimum size of a free suballocation to register it in the free suballocation collection.
-static const VkDeviceSize VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER = 16;
-
-/*
-Performs binary search and returns iterator to first element that is greater or
-equal to (key), according to comparison (cmp).
-
-Cmp should return true if first argument is less than second argument.
-
-Returned value is the found element, if present in the collection or place where
-new element with value (key) should be inserted.
-*/
-template <typename CmpLess, typename IterT, typename KeyT>
-static IterT VmaBinaryFindFirstNotLess(IterT beg, IterT end, const KeyT &key, CmpLess cmp)
-{
- size_t down = 0, up = (end - beg);
- while(down < up)
- {
- const size_t mid = (down + up) / 2;
- if(cmp(*(beg+mid), key))
- {
- down = mid + 1;
- }
- else
- {
- up = mid;
- }
- }
- return beg + down;
-}
-
-/*
-Returns true if all pointers in the array are not-null and unique.
-Warning! O(n^2) complexity. Use only inside VMA_HEAVY_ASSERT.
-T must be pointer type, e.g. VmaAllocation, VmaPool.
-*/
-template<typename T>
-static bool VmaValidatePointerArray(uint32_t count, const T* arr)
-{
- for(uint32_t i = 0; i < count; ++i)
- {
- const T iPtr = arr[i];
- if(iPtr == VMA_NULL)
- {
- return false;
- }
- for(uint32_t j = i + 1; j < count; ++j)
- {
- if(iPtr == arr[j])
- {
- return false;
- }
- }
- }
- return true;
-}
-
-////////////////////////////////////////////////////////////////////////////////
-// Memory allocation
-
-static void* VmaMalloc(const VkAllocationCallbacks* pAllocationCallbacks, size_t size, size_t alignment)
-{
- if((pAllocationCallbacks != VMA_NULL) &&
- (pAllocationCallbacks->pfnAllocation != VMA_NULL))
- {
- return (*pAllocationCallbacks->pfnAllocation)(
- pAllocationCallbacks->pUserData,
- size,
- alignment,
- VK_SYSTEM_ALLOCATION_SCOPE_OBJECT);
- }
- else
- {
- return VMA_SYSTEM_ALIGNED_MALLOC(size, alignment);
- }
-}
-
-static void VmaFree(const VkAllocationCallbacks* pAllocationCallbacks, void* ptr)
-{
- if((pAllocationCallbacks != VMA_NULL) &&
- (pAllocationCallbacks->pfnFree != VMA_NULL))
- {
- (*pAllocationCallbacks->pfnFree)(pAllocationCallbacks->pUserData, ptr);
- }
- else
- {
- VMA_SYSTEM_FREE(ptr);
- }
-}
-
-template<typename T>
-static T* VmaAllocate(const VkAllocationCallbacks* pAllocationCallbacks)
-{
- return (T*)VmaMalloc(pAllocationCallbacks, sizeof(T), VMA_ALIGN_OF(T));
-}
-
-template<typename T>
-static T* VmaAllocateArray(const VkAllocationCallbacks* pAllocationCallbacks, size_t count)
-{
- return (T*)VmaMalloc(pAllocationCallbacks, sizeof(T) * count, VMA_ALIGN_OF(T));
-}
-
-#define vma_new(allocator, type) new(VmaAllocate<type>(allocator))(type)
-
-#define vma_new_array(allocator, type, count) new(VmaAllocateArray<type>((allocator), (count)))(type)
-
-template<typename T>
-static void vma_delete(const VkAllocationCallbacks* pAllocationCallbacks, T* ptr)
-{
- ptr->~T();
- VmaFree(pAllocationCallbacks, ptr);
-}
-
-template<typename T>
-static void vma_delete_array(const VkAllocationCallbacks* pAllocationCallbacks, T* ptr, size_t count)
-{
- if(ptr != VMA_NULL)
- {
- for(size_t i = count; i--; )
- {
- ptr[i].~T();
- }
- VmaFree(pAllocationCallbacks, ptr);
- }
-}
-
-// STL-compatible allocator.
-template<typename T>
-class VmaStlAllocator
-{
-public:
- const VkAllocationCallbacks* const m_pCallbacks;
- typedef T value_type;
-
- VmaStlAllocator(const VkAllocationCallbacks* pCallbacks) : m_pCallbacks(pCallbacks) { }
- template<typename U> VmaStlAllocator(const VmaStlAllocator<U>& src) : m_pCallbacks(src.m_pCallbacks) { }
-
- T* allocate(size_t n) { return VmaAllocateArray<T>(m_pCallbacks, n); }
- void deallocate(T* p, size_t n) { VmaFree(m_pCallbacks, p); }
-
- template<typename U>
- bool operator==(const VmaStlAllocator<U>& rhs) const
- {
- return m_pCallbacks == rhs.m_pCallbacks;
- }
- template<typename U>
- bool operator!=(const VmaStlAllocator<U>& rhs) const
- {
- return m_pCallbacks != rhs.m_pCallbacks;
- }
-
- VmaStlAllocator& operator=(const VmaStlAllocator& x) = delete;
-};
-
-#if VMA_USE_STL_VECTOR
-
-#define VmaVector std::vector
-
-template<typename T, typename allocatorT>
-static void VmaVectorInsert(std::vector<T, allocatorT>& vec, size_t index, const T& item)
-{
- vec.insert(vec.begin() + index, item);
-}
-
-template<typename T, typename allocatorT>
-static void VmaVectorRemove(std::vector<T, allocatorT>& vec, size_t index)
-{
- vec.erase(vec.begin() + index);
-}
-
-#else // #if VMA_USE_STL_VECTOR
-
-/* Class with interface compatible with subset of std::vector.
-T must be POD because constructors and destructors are not called and memcpy is
-used for these objects. */
-template<typename T, typename AllocatorT>
-class VmaVector
-{
-public:
- typedef T value_type;
-
- VmaVector(const AllocatorT& allocator) :
- m_Allocator(allocator),
- m_pArray(VMA_NULL),
- m_Count(0),
- m_Capacity(0)
- {
- }
-
- VmaVector(size_t count, const AllocatorT& allocator) :
- m_Allocator(allocator),
- m_pArray(count ? (T*)VmaAllocateArray<T>(allocator.m_pCallbacks, count) : VMA_NULL),
- m_Count(count),
- m_Capacity(count)
- {
- }
-
- VmaVector(const VmaVector<T, AllocatorT>& src) :
- m_Allocator(src.m_Allocator),
- m_pArray(src.m_Count ? (T*)VmaAllocateArray<T>(src.m_Allocator.m_pCallbacks, src.m_Count) : VMA_NULL),
- m_Count(src.m_Count),
- m_Capacity(src.m_Count)
- {
- if(m_Count != 0)
- {
- memcpy(m_pArray, src.m_pArray, m_Count * sizeof(T));
- }
- }
-
- ~VmaVector()
- {
- VmaFree(m_Allocator.m_pCallbacks, m_pArray);
- }
-
- VmaVector& operator=(const VmaVector<T, AllocatorT>& rhs)
- {
- if(&rhs != this)
- {
- resize(rhs.m_Count);
- if(m_Count != 0)
- {
- memcpy(m_pArray, rhs.m_pArray, m_Count * sizeof(T));
- }
- }
- return *this;
- }
-
- bool empty() const { return m_Count == 0; }
- size_t size() const { return m_Count; }
- T* data() { return m_pArray; }
- const T* data() const { return m_pArray; }
-
- T& operator[](size_t index)
- {
- VMA_HEAVY_ASSERT(index < m_Count);
- return m_pArray[index];
- }
- const T& operator[](size_t index) const
- {
- VMA_HEAVY_ASSERT(index < m_Count);
- return m_pArray[index];
- }
-
- T& front()
- {
- VMA_HEAVY_ASSERT(m_Count > 0);
- return m_pArray[0];
- }
- const T& front() const
- {
- VMA_HEAVY_ASSERT(m_Count > 0);
- return m_pArray[0];
- }
- T& back()
- {
- VMA_HEAVY_ASSERT(m_Count > 0);
- return m_pArray[m_Count - 1];
- }
- const T& back() const
- {
- VMA_HEAVY_ASSERT(m_Count > 0);
- return m_pArray[m_Count - 1];
- }
-
- void reserve(size_t newCapacity, bool freeMemory = false)
- {
- newCapacity = VMA_MAX(newCapacity, m_Count);
-
- if((newCapacity < m_Capacity) && !freeMemory)
- {
- newCapacity = m_Capacity;
- }
-
- if(newCapacity != m_Capacity)
- {
- T* const newArray = newCapacity ? VmaAllocateArray<T>(m_Allocator, newCapacity) : VMA_NULL;
- if(m_Count != 0)
- {
- memcpy(newArray, m_pArray, m_Count * sizeof(T));
- }
- VmaFree(m_Allocator.m_pCallbacks, m_pArray);
- m_Capacity = newCapacity;
- m_pArray = newArray;
- }
- }
-
- void resize(size_t newCount, bool freeMemory = false)
- {
- size_t newCapacity = m_Capacity;
- if(newCount > m_Capacity)
- {
- newCapacity = VMA_MAX(newCount, VMA_MAX(m_Capacity * 3 / 2, (size_t)8));
- }
- else if(freeMemory)
- {
- newCapacity = newCount;
- }
-
- if(newCapacity != m_Capacity)
- {
- T* const newArray = newCapacity ? VmaAllocateArray<T>(m_Allocator.m_pCallbacks, newCapacity) : VMA_NULL;
- const size_t elementsToCopy = VMA_MIN(m_Count, newCount);
- if(elementsToCopy != 0)
- {
- memcpy(newArray, m_pArray, elementsToCopy * sizeof(T));
- }
- VmaFree(m_Allocator.m_pCallbacks, m_pArray);
- m_Capacity = newCapacity;
- m_pArray = newArray;
- }
-
- m_Count = newCount;
- }
-
- void clear(bool freeMemory = false)
- {
- resize(0, freeMemory);
- }
-
- void insert(size_t index, const T& src)
- {
- VMA_HEAVY_ASSERT(index <= m_Count);
- const size_t oldCount = size();
- resize(oldCount + 1);
- if(index < oldCount)
- {
- memmove(m_pArray + (index + 1), m_pArray + index, (oldCount - index) * sizeof(T));
- }
- m_pArray[index] = src;
- }
-
- void remove(size_t index)
- {
- VMA_HEAVY_ASSERT(index < m_Count);
- const size_t oldCount = size();
- if(index < oldCount - 1)
- {
- memmove(m_pArray + index, m_pArray + (index + 1), (oldCount - index - 1) * sizeof(T));
- }
- resize(oldCount - 1);
- }
-
- void push_back(const T& src)
- {
- const size_t newIndex = size();
- resize(newIndex + 1);
- m_pArray[newIndex] = src;
- }
-
- void pop_back()
- {
- VMA_HEAVY_ASSERT(m_Count > 0);
- resize(size() - 1);
- }
-
- void push_front(const T& src)
- {
- insert(0, src);
- }
-
- void pop_front()
- {
- VMA_HEAVY_ASSERT(m_Count > 0);
- remove(0);
- }
-
- typedef T* iterator;
-
- iterator begin() { return m_pArray; }
- iterator end() { return m_pArray + m_Count; }
-
-private:
- AllocatorT m_Allocator;
- T* m_pArray;
- size_t m_Count;
- size_t m_Capacity;
-};
-
-template<typename T, typename allocatorT>
-static void VmaVectorInsert(VmaVector<T, allocatorT>& vec, size_t index, const T& item)
-{
- vec.insert(index, item);
-}
-
-template<typename T, typename allocatorT>
-static void VmaVectorRemove(VmaVector<T, allocatorT>& vec, size_t index)
-{
- vec.remove(index);
-}
-
-#endif // #if VMA_USE_STL_VECTOR
-
-template<typename CmpLess, typename VectorT>
-size_t VmaVectorInsertSorted(VectorT& vector, const typename VectorT::value_type& value)
-{
- const size_t indexToInsert = VmaBinaryFindFirstNotLess(
- vector.data(),
- vector.data() + vector.size(),
- value,
- CmpLess()) - vector.data();
- VmaVectorInsert(vector, indexToInsert, value);
- return indexToInsert;
-}
-
-template<typename CmpLess, typename VectorT>
-bool VmaVectorRemoveSorted(VectorT& vector, const typename VectorT::value_type& value)
-{
- CmpLess comparator;
- typename VectorT::iterator it = VmaBinaryFindFirstNotLess(
- vector.begin(),
- vector.end(),
- value,
- comparator);
- if((it != vector.end()) && !comparator(*it, value) && !comparator(value, *it))
- {
- size_t indexToRemove = it - vector.begin();
- VmaVectorRemove(vector, indexToRemove);
- return true;
- }
- return false;
-}
-
-template<typename CmpLess, typename IterT, typename KeyT>
-IterT VmaVectorFindSorted(const IterT& beg, const IterT& end, const KeyT& value)
-{
- CmpLess comparator;
- IterT it = VmaBinaryFindFirstNotLess<CmpLess, IterT, KeyT>(
- beg, end, value, comparator);
- if(it == end ||
- (!comparator(*it, value) && !comparator(value, *it)))
- {
- return it;
- }
- return end;
-}
-
-////////////////////////////////////////////////////////////////////////////////
-// class VmaPoolAllocator
-
-/*
-Allocator for objects of type T using a list of arrays (pools) to speed up
-allocation. Number of elements that can be allocated is not bounded because
-allocator can create multiple blocks.
-*/
-template<typename T>
-class VmaPoolAllocator
-{
- VMA_CLASS_NO_COPY(VmaPoolAllocator)
-public:
- VmaPoolAllocator(const VkAllocationCallbacks* pAllocationCallbacks, size_t itemsPerBlock);
- ~VmaPoolAllocator();
- void Clear();
- T* Alloc();
- void Free(T* ptr);
-
-private:
- union Item
- {
- uint32_t NextFreeIndex;
- T Value;
- };
-
- struct ItemBlock
- {
- Item* pItems;
- uint32_t FirstFreeIndex;
- };
-
- const VkAllocationCallbacks* m_pAllocationCallbacks;
- size_t m_ItemsPerBlock;
- VmaVector< ItemBlock, VmaStlAllocator<ItemBlock> > m_ItemBlocks;
-
- ItemBlock& CreateNewBlock();
-};
-
-template<typename T>
-VmaPoolAllocator<T>::VmaPoolAllocator(const VkAllocationCallbacks* pAllocationCallbacks, size_t itemsPerBlock) :
- m_pAllocationCallbacks(pAllocationCallbacks),
- m_ItemsPerBlock(itemsPerBlock),
- m_ItemBlocks(VmaStlAllocator<ItemBlock>(pAllocationCallbacks))
-{
- VMA_ASSERT(itemsPerBlock > 0);
-}
-
-template<typename T>
-VmaPoolAllocator<T>::~VmaPoolAllocator()
-{
- Clear();
-}
-
-template<typename T>
-void VmaPoolAllocator<T>::Clear()
-{
- for(size_t i = m_ItemBlocks.size(); i--; )
- vma_delete_array(m_pAllocationCallbacks, m_ItemBlocks[i].pItems, m_ItemsPerBlock);
- m_ItemBlocks.clear();
-}
-
-template<typename T>
-T* VmaPoolAllocator<T>::Alloc()
-{
- for(size_t i = m_ItemBlocks.size(); i--; )
- {
- ItemBlock& block = m_ItemBlocks[i];
- // This block has some free items: Use first one.
- if(block.FirstFreeIndex != UINT32_MAX)
- {
- Item* const pItem = &block.pItems[block.FirstFreeIndex];
- block.FirstFreeIndex = pItem->NextFreeIndex;
- return &pItem->Value;
- }
- }
-
- // No block has free item: Create new one and use it.
- ItemBlock& newBlock = CreateNewBlock();
- Item* const pItem = &newBlock.pItems[0];
- newBlock.FirstFreeIndex = pItem->NextFreeIndex;
- return &pItem->Value;
-}
-
-template<typename T>
-void VmaPoolAllocator<T>::Free(T* ptr)
-{
- // Search all memory blocks to find ptr.
- for(size_t i = 0; i < m_ItemBlocks.size(); ++i)
- {
- ItemBlock& block = m_ItemBlocks[i];
-
- // Casting to union.
- Item* pItemPtr;
- memcpy(&pItemPtr, &ptr, sizeof(pItemPtr));
-
- // Check if pItemPtr is in address range of this block.
- if((pItemPtr >= block.pItems) && (pItemPtr < block.pItems + m_ItemsPerBlock))
- {
- const uint32_t index = static_cast<uint32_t>(pItemPtr - block.pItems);
- pItemPtr->NextFreeIndex = block.FirstFreeIndex;
- block.FirstFreeIndex = index;
- return;
- }
- }
- VMA_ASSERT(0 && "Pointer doesn't belong to this memory pool.");
-}
-
-template<typename T>
-typename VmaPoolAllocator<T>::ItemBlock& VmaPoolAllocator<T>::CreateNewBlock()
-{
- ItemBlock newBlock = {
- vma_new_array(m_pAllocationCallbacks, Item, m_ItemsPerBlock), 0 };
-
- m_ItemBlocks.push_back(newBlock);
-
- // Setup singly-linked list of all free items in this block.
- for(uint32_t i = 0; i < m_ItemsPerBlock - 1; ++i)
- newBlock.pItems[i].NextFreeIndex = i + 1;
- newBlock.pItems[m_ItemsPerBlock - 1].NextFreeIndex = UINT32_MAX;
- return m_ItemBlocks.back();
-}
-
-////////////////////////////////////////////////////////////////////////////////
-// class VmaRawList, VmaList
-
-#if VMA_USE_STL_LIST
-
-#define VmaList std::list
-
-#else // #if VMA_USE_STL_LIST
-
-template<typename T>
-struct VmaListItem
-{
- VmaListItem* pPrev;
- VmaListItem* pNext;
- T Value;
-};
-
-// Doubly linked list.
-template<typename T>
-class VmaRawList
-{
- VMA_CLASS_NO_COPY(VmaRawList)
-public:
- typedef VmaListItem<T> ItemType;
-
- VmaRawList(const VkAllocationCallbacks* pAllocationCallbacks);
- ~VmaRawList();
- void Clear();
-
- size_t GetCount() const { return m_Count; }
- bool IsEmpty() const { return m_Count == 0; }
-
- ItemType* Front() { return m_pFront; }
- const ItemType* Front() const { return m_pFront; }
- ItemType* Back() { return m_pBack; }
- const ItemType* Back() const { return m_pBack; }
-
- ItemType* PushBack();
- ItemType* PushFront();
- ItemType* PushBack(const T& value);
- ItemType* PushFront(const T& value);
- void PopBack();
- void PopFront();
-
- // Item can be null - it means PushBack.
- ItemType* InsertBefore(ItemType* pItem);
- // Item can be null - it means PushFront.
- ItemType* InsertAfter(ItemType* pItem);
-
- ItemType* InsertBefore(ItemType* pItem, const T& value);
- ItemType* InsertAfter(ItemType* pItem, const T& value);
-
- void Remove(ItemType* pItem);
-
-private:
- const VkAllocationCallbacks* const m_pAllocationCallbacks;
- VmaPoolAllocator<ItemType> m_ItemAllocator;
- ItemType* m_pFront;
- ItemType* m_pBack;
- size_t m_Count;
-};
-
-template<typename T>
-VmaRawList<T>::VmaRawList(const VkAllocationCallbacks* pAllocationCallbacks) :
- m_pAllocationCallbacks(pAllocationCallbacks),
- m_ItemAllocator(pAllocationCallbacks, 128),
- m_pFront(VMA_NULL),
- m_pBack(VMA_NULL),
- m_Count(0)
-{
-}
-
-template<typename T>
-VmaRawList<T>::~VmaRawList()
-{
- // Intentionally not calling Clear, because that would be unnecessary
- // computations to return all items to m_ItemAllocator as free.
-}
-
-template<typename T>
-void VmaRawList<T>::Clear()
-{
- if(IsEmpty() == false)
- {
- ItemType* pItem = m_pBack;
- while(pItem != VMA_NULL)
- {
- ItemType* const pPrevItem = pItem->pPrev;
- m_ItemAllocator.Free(pItem);
- pItem = pPrevItem;
- }
- m_pFront = VMA_NULL;
- m_pBack = VMA_NULL;
- m_Count = 0;
- }
-}
-
-template<typename T>
-VmaListItem<T>* VmaRawList<T>::PushBack()
-{
- ItemType* const pNewItem = m_ItemAllocator.Alloc();
- pNewItem->pNext = VMA_NULL;
- if(IsEmpty())
- {
- pNewItem->pPrev = VMA_NULL;
- m_pFront = pNewItem;
- m_pBack = pNewItem;
- m_Count = 1;
- }
- else
- {
- pNewItem->pPrev = m_pBack;
- m_pBack->pNext = pNewItem;
- m_pBack = pNewItem;
- ++m_Count;
- }
- return pNewItem;
-}
-
-template<typename T>
-VmaListItem<T>* VmaRawList<T>::PushFront()
-{
- ItemType* const pNewItem = m_ItemAllocator.Alloc();
- pNewItem->pPrev = VMA_NULL;
- if(IsEmpty())
- {
- pNewItem->pNext = VMA_NULL;
- m_pFront = pNewItem;
- m_pBack = pNewItem;
- m_Count = 1;
- }
- else
- {
- pNewItem->pNext = m_pFront;
- m_pFront->pPrev = pNewItem;
- m_pFront = pNewItem;
- ++m_Count;
- }
- return pNewItem;
-}
-
-template<typename T>
-VmaListItem<T>* VmaRawList<T>::PushBack(const T& value)
-{
- ItemType* const pNewItem = PushBack();
- pNewItem->Value = value;
- return pNewItem;
-}
-
-template<typename T>
-VmaListItem<T>* VmaRawList<T>::PushFront(const T& value)
-{
- ItemType* const pNewItem = PushFront();
- pNewItem->Value = value;
- return pNewItem;
-}
-
-template<typename T>
-void VmaRawList<T>::PopBack()
-{
- VMA_HEAVY_ASSERT(m_Count > 0);
- ItemType* const pBackItem = m_pBack;
- ItemType* const pPrevItem = pBackItem->pPrev;
- if(pPrevItem != VMA_NULL)
- {
- pPrevItem->pNext = VMA_NULL;
- }
- m_pBack = pPrevItem;
- m_ItemAllocator.Free(pBackItem);
- --m_Count;
-}
-
-template<typename T>
-void VmaRawList<T>::PopFront()
-{
- VMA_HEAVY_ASSERT(m_Count > 0);
- ItemType* const pFrontItem = m_pFront;
- ItemType* const pNextItem = pFrontItem->pNext;
- if(pNextItem != VMA_NULL)
- {
- pNextItem->pPrev = VMA_NULL;
- }
- m_pFront = pNextItem;
- m_ItemAllocator.Free(pFrontItem);
- --m_Count;
-}
-
-template<typename T>
-void VmaRawList<T>::Remove(ItemType* pItem)
-{
- VMA_HEAVY_ASSERT(pItem != VMA_NULL);
- VMA_HEAVY_ASSERT(m_Count > 0);
-
- if(pItem->pPrev != VMA_NULL)
- {
- pItem->pPrev->pNext = pItem->pNext;
- }
- else
- {
- VMA_HEAVY_ASSERT(m_pFront == pItem);
- m_pFront = pItem->pNext;
- }
-
- if(pItem->pNext != VMA_NULL)
- {
- pItem->pNext->pPrev = pItem->pPrev;
- }
- else
- {
- VMA_HEAVY_ASSERT(m_pBack == pItem);
- m_pBack = pItem->pPrev;
- }
-
- m_ItemAllocator.Free(pItem);
- --m_Count;
-}
-
-template<typename T>
-VmaListItem<T>* VmaRawList<T>::InsertBefore(ItemType* pItem)
-{
- if(pItem != VMA_NULL)
- {
- ItemType* const prevItem = pItem->pPrev;
- ItemType* const newItem = m_ItemAllocator.Alloc();
- newItem->pPrev = prevItem;
- newItem->pNext = pItem;
- pItem->pPrev = newItem;
- if(prevItem != VMA_NULL)
- {
- prevItem->pNext = newItem;
- }
- else
- {
- VMA_HEAVY_ASSERT(m_pFront == pItem);
- m_pFront = newItem;
- }
- ++m_Count;
- return newItem;
- }
- else
- return PushBack();
-}
-
-template<typename T>
-VmaListItem<T>* VmaRawList<T>::InsertAfter(ItemType* pItem)
-{
- if(pItem != VMA_NULL)
- {
- ItemType* const nextItem = pItem->pNext;
- ItemType* const newItem = m_ItemAllocator.Alloc();
- newItem->pNext = nextItem;
- newItem->pPrev = pItem;
- pItem->pNext = newItem;
- if(nextItem != VMA_NULL)
- {
- nextItem->pPrev = newItem;
- }
- else
- {
- VMA_HEAVY_ASSERT(m_pBack == pItem);
- m_pBack = newItem;
- }
- ++m_Count;
- return newItem;
- }
- else
- return PushFront();
-}
-
-template<typename T>
-VmaListItem<T>* VmaRawList<T>::InsertBefore(ItemType* pItem, const T& value)
-{
- ItemType* const newItem = InsertBefore(pItem);
- newItem->Value = value;
- return newItem;
-}
-
-template<typename T>
-VmaListItem<T>* VmaRawList<T>::InsertAfter(ItemType* pItem, const T& value)
-{
- ItemType* const newItem = InsertAfter(pItem);
- newItem->Value = value;
- return newItem;
-}
-
-template<typename T, typename AllocatorT>
-class VmaList
-{
- VMA_CLASS_NO_COPY(VmaList)
-public:
- class iterator
- {
- public:
- iterator() :
- m_pList(VMA_NULL),
- m_pItem(VMA_NULL)
- {
- }
-
- T& operator*() const
- {
- VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
- return m_pItem->Value;
- }
- T* operator->() const
- {
- VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
- return &m_pItem->Value;
- }
-
- iterator& operator++()
- {
- VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
- m_pItem = m_pItem->pNext;
- return *this;
- }
- iterator& operator--()
- {
- if(m_pItem != VMA_NULL)
- {
- m_pItem = m_pItem->pPrev;
- }
- else
- {
- VMA_HEAVY_ASSERT(!m_pList->IsEmpty());
- m_pItem = m_pList->Back();
- }
- return *this;
- }
-
- iterator operator++(int)
- {
- iterator result = *this;
- ++*this;
- return result;
- }
- iterator operator--(int)
- {
- iterator result = *this;
- --*this;
- return result;
- }
-
- bool operator==(const iterator& rhs) const
- {
- VMA_HEAVY_ASSERT(m_pList == rhs.m_pList);
- return m_pItem == rhs.m_pItem;
- }
- bool operator!=(const iterator& rhs) const
- {
- VMA_HEAVY_ASSERT(m_pList == rhs.m_pList);
- return m_pItem != rhs.m_pItem;
- }
-
- private:
- VmaRawList<T>* m_pList;
- VmaListItem<T>* m_pItem;
-
- iterator(VmaRawList<T>* pList, VmaListItem<T>* pItem) :
- m_pList(pList),
- m_pItem(pItem)
- {
- }
-
- friend class VmaList<T, AllocatorT>;
- };
-
- class const_iterator
- {
- public:
- const_iterator() :
- m_pList(VMA_NULL),
- m_pItem(VMA_NULL)
- {
- }
-
- const_iterator(const iterator& src) :
- m_pList(src.m_pList),
- m_pItem(src.m_pItem)
- {
- }
-
- const T& operator*() const
- {
- VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
- return m_pItem->Value;
- }
- const T* operator->() const
- {
- VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
- return &m_pItem->Value;
- }
-
- const_iterator& operator++()
- {
- VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
- m_pItem = m_pItem->pNext;
- return *this;
- }
- const_iterator& operator--()
- {
- if(m_pItem != VMA_NULL)
- {
- m_pItem = m_pItem->pPrev;
- }
- else
- {
- VMA_HEAVY_ASSERT(!m_pList->IsEmpty());
- m_pItem = m_pList->Back();
- }
- return *this;
- }
-
- const_iterator operator++(int)
- {
- const_iterator result = *this;
- ++*this;
- return result;
- }
- const_iterator operator--(int)
- {
- const_iterator result = *this;
- --*this;
- return result;
- }
-
- bool operator==(const const_iterator& rhs) const
- {
- VMA_HEAVY_ASSERT(m_pList == rhs.m_pList);
- return m_pItem == rhs.m_pItem;
- }
- bool operator!=(const const_iterator& rhs) const
- {
- VMA_HEAVY_ASSERT(m_pList == rhs.m_pList);
- return m_pItem != rhs.m_pItem;
- }
-
- private:
- const_iterator(const VmaRawList<T>* pList, const VmaListItem<T>* pItem) :
- m_pList(pList),
- m_pItem(pItem)
- {
- }
-
- const VmaRawList<T>* m_pList;
- const VmaListItem<T>* m_pItem;
-
- friend class VmaList<T, AllocatorT>;
- };
-
- VmaList(const AllocatorT& allocator) : m_RawList(allocator.m_pCallbacks) { }
-
- bool empty() const { return m_RawList.IsEmpty(); }
- size_t size() const { return m_RawList.GetCount(); }
-
- iterator begin() { return iterator(&m_RawList, m_RawList.Front()); }
- iterator end() { return iterator(&m_RawList, VMA_NULL); }
-
- const_iterator cbegin() const { return const_iterator(&m_RawList, m_RawList.Front()); }
- const_iterator cend() const { return const_iterator(&m_RawList, VMA_NULL); }
-
- void clear() { m_RawList.Clear(); }
- void push_back(const T& value) { m_RawList.PushBack(value); }
- void erase(iterator it) { m_RawList.Remove(it.m_pItem); }
- iterator insert(iterator it, const T& value) { return iterator(&m_RawList, m_RawList.InsertBefore(it.m_pItem, value)); }
-
-private:
- VmaRawList<T> m_RawList;
-};
-
-#endif // #if VMA_USE_STL_LIST
-
-////////////////////////////////////////////////////////////////////////////////
-// class VmaMap
-
-// Unused in this version.
-#if 0
-
-#if VMA_USE_STL_UNORDERED_MAP
-
-#define VmaPair std::pair
-
-#define VMA_MAP_TYPE(KeyT, ValueT) \
- std::unordered_map< KeyT, ValueT, std::hash<KeyT>, std::equal_to<KeyT>, VmaStlAllocator< std::pair<KeyT, ValueT> > >
-
-#else // #if VMA_USE_STL_UNORDERED_MAP
-
-template<typename T1, typename T2>
-struct VmaPair
-{
- T1 first;
- T2 second;
-
- VmaPair() : first(), second() { }
- VmaPair(const T1& firstSrc, const T2& secondSrc) : first(firstSrc), second(secondSrc) { }
-};
-
-/* Class compatible with subset of interface of std::unordered_map.
-KeyT, ValueT must be POD because they will be stored in VmaVector.
-*/
-template<typename KeyT, typename ValueT>
-class VmaMap
-{
-public:
- typedef VmaPair<KeyT, ValueT> PairType;
- typedef PairType* iterator;
-
- VmaMap(const VmaStlAllocator<PairType>& allocator) : m_Vector(allocator) { }
-
- iterator begin() { return m_Vector.begin(); }
- iterator end() { return m_Vector.end(); }
-
- void insert(const PairType& pair);
- iterator find(const KeyT& key);
- void erase(iterator it);
-
-private:
- VmaVector< PairType, VmaStlAllocator<PairType> > m_Vector;
-};
-
-#define VMA_MAP_TYPE(KeyT, ValueT) VmaMap<KeyT, ValueT>
-
-template<typename FirstT, typename SecondT>
-struct VmaPairFirstLess
-{
- bool operator()(const VmaPair<FirstT, SecondT>& lhs, const VmaPair<FirstT, SecondT>& rhs) const
- {
- return lhs.first < rhs.first;
- }
- bool operator()(const VmaPair<FirstT, SecondT>& lhs, const FirstT& rhsFirst) const
- {
- return lhs.first < rhsFirst;
- }
-};
-
-template<typename KeyT, typename ValueT>
-void VmaMap<KeyT, ValueT>::insert(const PairType& pair)
-{
- const size_t indexToInsert = VmaBinaryFindFirstNotLess(
- m_Vector.data(),
- m_Vector.data() + m_Vector.size(),
- pair,
- VmaPairFirstLess<KeyT, ValueT>()) - m_Vector.data();
- VmaVectorInsert(m_Vector, indexToInsert, pair);
-}
-
-template<typename KeyT, typename ValueT>
-VmaPair<KeyT, ValueT>* VmaMap<KeyT, ValueT>::find(const KeyT& key)
-{
- PairType* it = VmaBinaryFindFirstNotLess(
- m_Vector.data(),
- m_Vector.data() + m_Vector.size(),
- key,
- VmaPairFirstLess<KeyT, ValueT>());
- if((it != m_Vector.end()) && (it->first == key))
- {
- return it;
- }
- else
- {
- return m_Vector.end();
- }
-}
-
-template<typename KeyT, typename ValueT>
-void VmaMap<KeyT, ValueT>::erase(iterator it)
-{
- VmaVectorRemove(m_Vector, it - m_Vector.begin());
-}
-
-#endif // #if VMA_USE_STL_UNORDERED_MAP
-
-#endif // #if 0
-
-////////////////////////////////////////////////////////////////////////////////
-
-class VmaDeviceMemoryBlock;
-
-enum VMA_CACHE_OPERATION { VMA_CACHE_FLUSH, VMA_CACHE_INVALIDATE };
-
-struct VmaAllocation_T
-{
- VMA_CLASS_NO_COPY(VmaAllocation_T)
-private:
- static const uint8_t MAP_COUNT_FLAG_PERSISTENT_MAP = 0x80;
-
- enum FLAGS
- {
- FLAG_USER_DATA_STRING = 0x01,
- };
-
-public:
- enum ALLOCATION_TYPE
- {
- ALLOCATION_TYPE_NONE,
- ALLOCATION_TYPE_BLOCK,
- ALLOCATION_TYPE_DEDICATED,
- };
-
- VmaAllocation_T(uint32_t currentFrameIndex, bool userDataString) :
- m_Alignment(1),
- m_Size(0),
- m_pUserData(VMA_NULL),
- m_LastUseFrameIndex(currentFrameIndex),
- m_Type((uint8_t)ALLOCATION_TYPE_NONE),
- m_SuballocationType((uint8_t)VMA_SUBALLOCATION_TYPE_UNKNOWN),
- m_MapCount(0),
- m_Flags(userDataString ? (uint8_t)FLAG_USER_DATA_STRING : 0)
- {
-#if VMA_STATS_STRING_ENABLED
- m_CreationFrameIndex = currentFrameIndex;
- m_BufferImageUsage = 0;
-#endif
- }
-
- ~VmaAllocation_T()
- {
- VMA_ASSERT((m_MapCount & ~MAP_COUNT_FLAG_PERSISTENT_MAP) == 0 && "Allocation was not unmapped before destruction.");
-
- // Check if owned string was freed.
- VMA_ASSERT(m_pUserData == VMA_NULL);
- }
-
- void InitBlockAllocation(
- VmaPool hPool,
- VmaDeviceMemoryBlock* block,
- VkDeviceSize offset,
- VkDeviceSize alignment,
- VkDeviceSize size,
- VmaSuballocationType suballocationType,
- bool mapped,
- bool canBecomeLost)
- {
- VMA_ASSERT(m_Type == ALLOCATION_TYPE_NONE);
- VMA_ASSERT(block != VMA_NULL);
- m_Type = (uint8_t)ALLOCATION_TYPE_BLOCK;
- m_Alignment = alignment;
- m_Size = size;
- m_MapCount = mapped ? MAP_COUNT_FLAG_PERSISTENT_MAP : 0;
- m_SuballocationType = (uint8_t)suballocationType;
- m_BlockAllocation.m_hPool = hPool;
- m_BlockAllocation.m_Block = block;
- m_BlockAllocation.m_Offset = offset;
- m_BlockAllocation.m_CanBecomeLost = canBecomeLost;
- }
-
- void InitLost()
- {
- VMA_ASSERT(m_Type == ALLOCATION_TYPE_NONE);
- VMA_ASSERT(m_LastUseFrameIndex.load() == VMA_FRAME_INDEX_LOST);
- m_Type = (uint8_t)ALLOCATION_TYPE_BLOCK;
- m_BlockAllocation.m_hPool = VK_NULL_HANDLE;
- m_BlockAllocation.m_Block = VMA_NULL;
- m_BlockAllocation.m_Offset = 0;
- m_BlockAllocation.m_CanBecomeLost = true;
- }
-
- void ChangeBlockAllocation(
- VmaAllocator hAllocator,
- VmaDeviceMemoryBlock* block,
- VkDeviceSize offset);
-
- void ChangeSize(VkDeviceSize newSize);
- void ChangeOffset(VkDeviceSize newOffset);
-
- // pMappedData not null means allocation is created with MAPPED flag.
- void InitDedicatedAllocation(
- uint32_t memoryTypeIndex,
- VkDeviceMemory hMemory,
- VmaSuballocationType suballocationType,
- void* pMappedData,
- VkDeviceSize size)
- {
- VMA_ASSERT(m_Type == ALLOCATION_TYPE_NONE);
- VMA_ASSERT(hMemory != VK_NULL_HANDLE);
- m_Type = (uint8_t)ALLOCATION_TYPE_DEDICATED;
- m_Alignment = 0;
- m_Size = size;
- m_SuballocationType = (uint8_t)suballocationType;
- m_MapCount = (pMappedData != VMA_NULL) ? MAP_COUNT_FLAG_PERSISTENT_MAP : 0;
- m_DedicatedAllocation.m_MemoryTypeIndex = memoryTypeIndex;
- m_DedicatedAllocation.m_hMemory = hMemory;
- m_DedicatedAllocation.m_pMappedData = pMappedData;
- }
-
- ALLOCATION_TYPE GetType() const { return (ALLOCATION_TYPE)m_Type; }
- VkDeviceSize GetAlignment() const { return m_Alignment; }
- VkDeviceSize GetSize() const { return m_Size; }
- bool IsUserDataString() const { return (m_Flags & FLAG_USER_DATA_STRING) != 0; }
- void* GetUserData() const { return m_pUserData; }
- void SetUserData(VmaAllocator hAllocator, void* pUserData);
- VmaSuballocationType GetSuballocationType() const { return (VmaSuballocationType)m_SuballocationType; }
-
- VmaDeviceMemoryBlock* GetBlock() const
- {
- VMA_ASSERT(m_Type == ALLOCATION_TYPE_BLOCK);
- return m_BlockAllocation.m_Block;
- }
- VkDeviceSize GetOffset() const;
- VkDeviceMemory GetMemory() const;
- uint32_t GetMemoryTypeIndex() const;
- bool IsPersistentMap() const { return (m_MapCount & MAP_COUNT_FLAG_PERSISTENT_MAP) != 0; }
- void* GetMappedData() const;
- bool CanBecomeLost() const;
- VmaPool GetPool() const;
-
- uint32_t GetLastUseFrameIndex() const
- {
- return m_LastUseFrameIndex.load();
- }
- bool CompareExchangeLastUseFrameIndex(uint32_t& expected, uint32_t desired)
- {
- return m_LastUseFrameIndex.compare_exchange_weak(expected, desired);
- }
- /*
- - If hAllocation.LastUseFrameIndex + frameInUseCount < allocator.CurrentFrameIndex,
- makes it lost by setting LastUseFrameIndex = VMA_FRAME_INDEX_LOST and returns true.
- - Else, returns false.
-
- If hAllocation is already lost, assert - you should not call it then.
- If hAllocation was not created with CAN_BECOME_LOST_BIT, assert.
- */
- bool MakeLost(uint32_t currentFrameIndex, uint32_t frameInUseCount);
-
- void DedicatedAllocCalcStatsInfo(VmaStatInfo& outInfo)
- {
- VMA_ASSERT(m_Type == ALLOCATION_TYPE_DEDICATED);
- outInfo.blockCount = 1;
- outInfo.allocationCount = 1;
- outInfo.unusedRangeCount = 0;
- outInfo.usedBytes = m_Size;
- outInfo.unusedBytes = 0;
- outInfo.allocationSizeMin = outInfo.allocationSizeMax = m_Size;
- outInfo.unusedRangeSizeMin = UINT64_MAX;
- outInfo.unusedRangeSizeMax = 0;
- }
-
- void BlockAllocMap();
- void BlockAllocUnmap();
- VkResult DedicatedAllocMap(VmaAllocator hAllocator, void** ppData);
- void DedicatedAllocUnmap(VmaAllocator hAllocator);
-
-#if VMA_STATS_STRING_ENABLED
- uint32_t GetCreationFrameIndex() const { return m_CreationFrameIndex; }
- uint32_t GetBufferImageUsage() const { return m_BufferImageUsage; }
-
- void InitBufferImageUsage(uint32_t bufferImageUsage)
- {
- VMA_ASSERT(m_BufferImageUsage == 0);
- m_BufferImageUsage = bufferImageUsage;
- }
-
- void PrintParameters(class VmaJsonWriter& json) const;
-#endif
-
-private:
- VkDeviceSize m_Alignment;
- VkDeviceSize m_Size;
- void* m_pUserData;
- VMA_ATOMIC_UINT32 m_LastUseFrameIndex;
- uint8_t m_Type; // ALLOCATION_TYPE
- uint8_t m_SuballocationType; // VmaSuballocationType
- // Bit 0x80 is set when allocation was created with VMA_ALLOCATION_CREATE_MAPPED_BIT.
- // Bits with mask 0x7F are reference counter for vmaMapMemory()/vmaUnmapMemory().
- uint8_t m_MapCount;
- uint8_t m_Flags; // enum FLAGS
-
- // Allocation out of VmaDeviceMemoryBlock.
- struct BlockAllocation
- {
- VmaPool m_hPool; // Null if belongs to general memory.
- VmaDeviceMemoryBlock* m_Block;
- VkDeviceSize m_Offset;
- bool m_CanBecomeLost;
- };
-
- // Allocation for an object that has its own private VkDeviceMemory.
- struct DedicatedAllocation
- {
- uint32_t m_MemoryTypeIndex;
- VkDeviceMemory m_hMemory;
- void* m_pMappedData; // Not null means memory is mapped.
- };
-
- union
- {
- // Allocation out of VmaDeviceMemoryBlock.
- BlockAllocation m_BlockAllocation;
- // Allocation for an object that has its own private VkDeviceMemory.
- DedicatedAllocation m_DedicatedAllocation;
- };
-
-#if VMA_STATS_STRING_ENABLED
- uint32_t m_CreationFrameIndex;
- uint32_t m_BufferImageUsage; // 0 if unknown.
-#endif
-
- void FreeUserDataString(VmaAllocator hAllocator);
-};
-
-/*
-Represents a region of VmaDeviceMemoryBlock that is either assigned and returned as
-allocated memory block or free.
-*/
-struct VmaSuballocation
-{
- VkDeviceSize offset;
- VkDeviceSize size;
- VmaAllocation hAllocation;
- VmaSuballocationType type;
-};
-
-// Comparator for offsets.
-struct VmaSuballocationOffsetLess
-{
- bool operator()(const VmaSuballocation& lhs, const VmaSuballocation& rhs) const
- {
- return lhs.offset < rhs.offset;
- }
-};
-struct VmaSuballocationOffsetGreater
-{
- bool operator()(const VmaSuballocation& lhs, const VmaSuballocation& rhs) const
- {
- return lhs.offset > rhs.offset;
- }
-};
-
-typedef VmaList< VmaSuballocation, VmaStlAllocator<VmaSuballocation> > VmaSuballocationList;
-
-// Cost of one additional allocation lost, as equivalent in bytes.
-static const VkDeviceSize VMA_LOST_ALLOCATION_COST = 1048576;
-
-/*
-Parameters of planned allocation inside a VmaDeviceMemoryBlock.
-
-If canMakeOtherLost was false:
-- item points to a FREE suballocation.
-- itemsToMakeLostCount is 0.
-
-If canMakeOtherLost was true:
-- item points to first of sequence of suballocations, which are either FREE,
- or point to VmaAllocations that can become lost.
-- itemsToMakeLostCount is the number of VmaAllocations that need to be made lost for
- the requested allocation to succeed.
-*/
-struct VmaAllocationRequest
-{
- VkDeviceSize offset;
- VkDeviceSize sumFreeSize; // Sum size of free items that overlap with proposed allocation.
- VkDeviceSize sumItemSize; // Sum size of items to make lost that overlap with proposed allocation.
- VmaSuballocationList::iterator item;
- size_t itemsToMakeLostCount;
- void* customData;
-
- VkDeviceSize CalcCost() const
- {
- return sumItemSize + itemsToMakeLostCount * VMA_LOST_ALLOCATION_COST;
- }
-};
-
-/*
-Data structure used for bookkeeping of allocations and unused ranges of memory
-in a single VkDeviceMemory block.
-*/
-class VmaBlockMetadata
-{
-public:
- VmaBlockMetadata(VmaAllocator hAllocator);
- virtual ~VmaBlockMetadata() { }
- virtual void Init(VkDeviceSize size) { m_Size = size; }
-
- // Validates all data structures inside this object. If not valid, returns false.
- virtual bool Validate() const = 0;
- VkDeviceSize GetSize() const { return m_Size; }
- virtual size_t GetAllocationCount() const = 0;
- virtual VkDeviceSize GetSumFreeSize() const = 0;
- virtual VkDeviceSize GetUnusedRangeSizeMax() const = 0;
- // Returns true if this block is empty - contains only single free suballocation.
- virtual bool IsEmpty() const = 0;
-
- virtual void CalcAllocationStatInfo(VmaStatInfo& outInfo) const = 0;
- // Shouldn't modify blockCount.
- virtual void AddPoolStats(VmaPoolStats& inoutStats) const = 0;
-
-#if VMA_STATS_STRING_ENABLED
- virtual void PrintDetailedMap(class VmaJsonWriter& json) const = 0;
-#endif
-
- // Tries to find a place for suballocation with given parameters inside this block.
- // If succeeded, fills pAllocationRequest and returns true.
- // If failed, returns false.
- virtual bool CreateAllocationRequest(
- uint32_t currentFrameIndex,
- uint32_t frameInUseCount,
- VkDeviceSize bufferImageGranularity,
- VkDeviceSize allocSize,
- VkDeviceSize allocAlignment,
- bool upperAddress,
- VmaSuballocationType allocType,
- bool canMakeOtherLost,
- // Always one of VMA_ALLOCATION_CREATE_STRATEGY_* or VMA_ALLOCATION_INTERNAL_STRATEGY_* flags.
- uint32_t strategy,
- VmaAllocationRequest* pAllocationRequest) = 0;
-
- virtual bool MakeRequestedAllocationsLost(
- uint32_t currentFrameIndex,
- uint32_t frameInUseCount,
- VmaAllocationRequest* pAllocationRequest) = 0;
-
- virtual uint32_t MakeAllocationsLost(uint32_t currentFrameIndex, uint32_t frameInUseCount) = 0;
-
- virtual VkResult CheckCorruption(const void* pBlockData) = 0;
-
- // Makes actual allocation based on request. Request must already be checked and valid.
- virtual void Alloc(
- const VmaAllocationRequest& request,
- VmaSuballocationType type,
- VkDeviceSize allocSize,
- bool upperAddress,
- VmaAllocation hAllocation) = 0;
-
- // Frees suballocation assigned to given memory region.
- virtual void Free(const VmaAllocation allocation) = 0;
- virtual void FreeAtOffset(VkDeviceSize offset) = 0;
-
- // Tries to resize (grow or shrink) space for given allocation, in place.
- virtual bool ResizeAllocation(const VmaAllocation alloc, VkDeviceSize newSize) { return false; }
-
-protected:
- const VkAllocationCallbacks* GetAllocationCallbacks() const { return m_pAllocationCallbacks; }
-
-#if VMA_STATS_STRING_ENABLED
- void PrintDetailedMap_Begin(class VmaJsonWriter& json,
- VkDeviceSize unusedBytes,
- size_t allocationCount,
- size_t unusedRangeCount) const;
- void PrintDetailedMap_Allocation(class VmaJsonWriter& json,
- VkDeviceSize offset,
- VmaAllocation hAllocation) const;
- void PrintDetailedMap_UnusedRange(class VmaJsonWriter& json,
- VkDeviceSize offset,
- VkDeviceSize size) const;
- void PrintDetailedMap_End(class VmaJsonWriter& json) const;
-#endif
-
-private:
- VkDeviceSize m_Size;
- const VkAllocationCallbacks* m_pAllocationCallbacks;
-};
-
-#define VMA_VALIDATE(cond) do { if(!(cond)) { \
- VMA_ASSERT(0 && "Validation failed: " #cond); \
- return false; \
- } } while(false)
-
-class VmaBlockMetadata_Generic : public VmaBlockMetadata
-{
- VMA_CLASS_NO_COPY(VmaBlockMetadata_Generic)
-public:
- VmaBlockMetadata_Generic(VmaAllocator hAllocator);
- virtual ~VmaBlockMetadata_Generic();
- virtual void Init(VkDeviceSize size);
-
- virtual bool Validate() const;
- virtual size_t GetAllocationCount() const { return m_Suballocations.size() - m_FreeCount; }
- virtual VkDeviceSize GetSumFreeSize() const { return m_SumFreeSize; }
- virtual VkDeviceSize GetUnusedRangeSizeMax() const;
- virtual bool IsEmpty() const;
-
- virtual void CalcAllocationStatInfo(VmaStatInfo& outInfo) const;
- virtual void AddPoolStats(VmaPoolStats& inoutStats) const;
-
-#if VMA_STATS_STRING_ENABLED
- virtual void PrintDetailedMap(class VmaJsonWriter& json) const;
-#endif
-
- virtual bool CreateAllocationRequest(
- uint32_t currentFrameIndex,
- uint32_t frameInUseCount,
- VkDeviceSize bufferImageGranularity,
- VkDeviceSize allocSize,
- VkDeviceSize allocAlignment,
- bool upperAddress,
- VmaSuballocationType allocType,
- bool canMakeOtherLost,
- uint32_t strategy,
- VmaAllocationRequest* pAllocationRequest);
-
- virtual bool MakeRequestedAllocationsLost(
- uint32_t currentFrameIndex,
- uint32_t frameInUseCount,
- VmaAllocationRequest* pAllocationRequest);
-
- virtual uint32_t MakeAllocationsLost(uint32_t currentFrameIndex, uint32_t frameInUseCount);
-
- virtual VkResult CheckCorruption(const void* pBlockData);
-
- virtual void Alloc(
- const VmaAllocationRequest& request,
- VmaSuballocationType type,
- VkDeviceSize allocSize,
- bool upperAddress,
- VmaAllocation hAllocation);
-
- virtual void Free(const VmaAllocation allocation);
- virtual void FreeAtOffset(VkDeviceSize offset);
-
- virtual bool ResizeAllocation(const VmaAllocation alloc, VkDeviceSize newSize);
-
- ////////////////////////////////////////////////////////////////////////////////
- // For defragmentation
-
- bool IsBufferImageGranularityConflictPossible(
- VkDeviceSize bufferImageGranularity,
- VmaSuballocationType& inOutPrevSuballocType) const;
-
-private:
- friend class VmaDefragmentationAlgorithm_Generic;
- friend class VmaDefragmentationAlgorithm_Fast;
-
- uint32_t m_FreeCount;
- VkDeviceSize m_SumFreeSize;
- VmaSuballocationList m_Suballocations;
- // Suballocations that are free and have size greater than certain threshold.
- // Sorted by size, ascending.
- VmaVector< VmaSuballocationList::iterator, VmaStlAllocator< VmaSuballocationList::iterator > > m_FreeSuballocationsBySize;
-
- bool ValidateFreeSuballocationList() const;
-
- // Checks if requested suballocation with given parameters can be placed in given pFreeSuballocItem.
- // If yes, fills pOffset and returns true. If no, returns false.
- bool CheckAllocation(
- uint32_t currentFrameIndex,
- uint32_t frameInUseCount,
- VkDeviceSize bufferImageGranularity,
- VkDeviceSize allocSize,
- VkDeviceSize allocAlignment,
- VmaSuballocationType allocType,
- VmaSuballocationList::const_iterator suballocItem,
- bool canMakeOtherLost,
- VkDeviceSize* pOffset,
- size_t* itemsToMakeLostCount,
- VkDeviceSize* pSumFreeSize,
- VkDeviceSize* pSumItemSize) const;
- // Given free suballocation, it merges it with following one, which must also be free.
- void MergeFreeWithNext(VmaSuballocationList::iterator item);
- // Releases given suballocation, making it free.
- // Merges it with adjacent free suballocations if applicable.
- // Returns iterator to new free suballocation at this place.
- VmaSuballocationList::iterator FreeSuballocation(VmaSuballocationList::iterator suballocItem);
- // Given free suballocation, it inserts it into sorted list of
- // m_FreeSuballocationsBySize if it's suitable.
- void RegisterFreeSuballocation(VmaSuballocationList::iterator item);
- // Given free suballocation, it removes it from sorted list of
- // m_FreeSuballocationsBySize if it's suitable.
- void UnregisterFreeSuballocation(VmaSuballocationList::iterator item);
-};
-
-/*
-Allocations and their references in internal data structure look like this:
-
-if(m_2ndVectorMode == SECOND_VECTOR_EMPTY):
-
- 0 +-------+
- | |
- | |
- | |
- +-------+
- | Alloc | 1st[m_1stNullItemsBeginCount]
- +-------+
- | Alloc | 1st[m_1stNullItemsBeginCount + 1]
- +-------+
- | ... |
- +-------+
- | Alloc | 1st[1st.size() - 1]
- +-------+
- | |
- | |
- | |
-GetSize() +-------+
-
-if(m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER):
-
- 0 +-------+
- | Alloc | 2nd[0]
- +-------+
- | Alloc | 2nd[1]
- +-------+
- | ... |
- +-------+
- | Alloc | 2nd[2nd.size() - 1]
- +-------+
- | |
- | |
- | |
- +-------+
- | Alloc | 1st[m_1stNullItemsBeginCount]
- +-------+
- | Alloc | 1st[m_1stNullItemsBeginCount + 1]
- +-------+
- | ... |
- +-------+
- | Alloc | 1st[1st.size() - 1]
- +-------+
- | |
-GetSize() +-------+
-
-if(m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK):
-
- 0 +-------+
- | |
- | |
- | |
- +-------+
- | Alloc | 1st[m_1stNullItemsBeginCount]
- +-------+
- | Alloc | 1st[m_1stNullItemsBeginCount + 1]
- +-------+
- | ... |
- +-------+
- | Alloc | 1st[1st.size() - 1]
- +-------+
- | |
- | |
- | |
- +-------+
- | Alloc | 2nd[2nd.size() - 1]
- +-------+
- | ... |
- +-------+
- | Alloc | 2nd[1]
- +-------+
- | Alloc | 2nd[0]
-GetSize() +-------+
-
-*/
-class VmaBlockMetadata_Linear : public VmaBlockMetadata
-{
- VMA_CLASS_NO_COPY(VmaBlockMetadata_Linear)
-public:
- VmaBlockMetadata_Linear(VmaAllocator hAllocator);
- virtual ~VmaBlockMetadata_Linear();
- virtual void Init(VkDeviceSize size);
-
- virtual bool Validate() const;
- virtual size_t GetAllocationCount() const;
- virtual VkDeviceSize GetSumFreeSize() const { return m_SumFreeSize; }
- virtual VkDeviceSize GetUnusedRangeSizeMax() const;
- virtual bool IsEmpty() const { return GetAllocationCount() == 0; }
-
- virtual void CalcAllocationStatInfo(VmaStatInfo& outInfo) const;
- virtual void AddPoolStats(VmaPoolStats& inoutStats) const;
-
-#if VMA_STATS_STRING_ENABLED
- virtual void PrintDetailedMap(class VmaJsonWriter& json) const;
-#endif
-
- virtual bool CreateAllocationRequest(
- uint32_t currentFrameIndex,
- uint32_t frameInUseCount,
- VkDeviceSize bufferImageGranularity,
- VkDeviceSize allocSize,
- VkDeviceSize allocAlignment,
- bool upperAddress,
- VmaSuballocationType allocType,
- bool canMakeOtherLost,
- uint32_t strategy,
- VmaAllocationRequest* pAllocationRequest);
-
- virtual bool MakeRequestedAllocationsLost(
- uint32_t currentFrameIndex,
- uint32_t frameInUseCount,
- VmaAllocationRequest* pAllocationRequest);
-
- virtual uint32_t MakeAllocationsLost(uint32_t currentFrameIndex, uint32_t frameInUseCount);
-
- virtual VkResult CheckCorruption(const void* pBlockData);
-
- virtual void Alloc(
- const VmaAllocationRequest& request,
- VmaSuballocationType type,
- VkDeviceSize allocSize,
- bool upperAddress,
- VmaAllocation hAllocation);
-
- virtual void Free(const VmaAllocation allocation);
- virtual void FreeAtOffset(VkDeviceSize offset);
-
-private:
- /*
- There are two suballocation vectors, used in ping-pong way.
- The one with index m_1stVectorIndex is called 1st.
- The one with index (m_1stVectorIndex ^ 1) is called 2nd.
- 2nd can be non-empty only when 1st is not empty.
- When 2nd is not empty, m_2ndVectorMode indicates its mode of operation.
- */
- typedef VmaVector< VmaSuballocation, VmaStlAllocator<VmaSuballocation> > SuballocationVectorType;
-
- enum SECOND_VECTOR_MODE
- {
- SECOND_VECTOR_EMPTY,
- /*
- Suballocations in 2nd vector are created later than the ones in 1st, but they
- all have smaller offset.
- */
- SECOND_VECTOR_RING_BUFFER,
- /*
- Suballocations in 2nd vector are upper side of double stack.
- They all have offsets higher than those in 1st vector.
- Top of this stack means smaller offsets, but higher indices in this vector.
- */
- SECOND_VECTOR_DOUBLE_STACK,
- };
-
- VkDeviceSize m_SumFreeSize;
- SuballocationVectorType m_Suballocations0, m_Suballocations1;
- uint32_t m_1stVectorIndex;
- SECOND_VECTOR_MODE m_2ndVectorMode;
-
- SuballocationVectorType& AccessSuballocations1st() { return m_1stVectorIndex ? m_Suballocations1 : m_Suballocations0; }
- SuballocationVectorType& AccessSuballocations2nd() { return m_1stVectorIndex ? m_Suballocations0 : m_Suballocations1; }
- const SuballocationVectorType& AccessSuballocations1st() const { return m_1stVectorIndex ? m_Suballocations1 : m_Suballocations0; }
- const SuballocationVectorType& AccessSuballocations2nd() const { return m_1stVectorIndex ? m_Suballocations0 : m_Suballocations1; }
-
- // Number of items in 1st vector with hAllocation = null at the beginning.
- size_t m_1stNullItemsBeginCount;
- // Number of other items in 1st vector with hAllocation = null somewhere in the middle.
- size_t m_1stNullItemsMiddleCount;
- // Number of items in 2nd vector with hAllocation = null.
- size_t m_2ndNullItemsCount;
-
- bool ShouldCompact1st() const;
- void CleanupAfterFree();
-};
-
-/*
-- GetSize() is the original size of allocated memory block.
-- m_UsableSize is this size aligned down to a power of two.
- All allocations and calculations happen relative to m_UsableSize.
-- GetUnusableSize() is the difference between them.
- It is repoted as separate, unused range, not available for allocations.
-
-Node at level 0 has size = m_UsableSize.
-Each next level contains nodes with size 2 times smaller than current level.
-m_LevelCount is the maximum number of levels to use in the current object.
-*/
-class VmaBlockMetadata_Buddy : public VmaBlockMetadata
-{
- VMA_CLASS_NO_COPY(VmaBlockMetadata_Buddy)
-public:
- VmaBlockMetadata_Buddy(VmaAllocator hAllocator);
- virtual ~VmaBlockMetadata_Buddy();
- virtual void Init(VkDeviceSize size);
-
- virtual bool Validate() const;
- virtual size_t GetAllocationCount() const { return m_AllocationCount; }
- virtual VkDeviceSize GetSumFreeSize() const { return m_SumFreeSize + GetUnusableSize(); }
- virtual VkDeviceSize GetUnusedRangeSizeMax() const;
- virtual bool IsEmpty() const { return m_Root->type == Node::TYPE_FREE; }
-
- virtual void CalcAllocationStatInfo(VmaStatInfo& outInfo) const;
- virtual void AddPoolStats(VmaPoolStats& inoutStats) const;
-
-#if VMA_STATS_STRING_ENABLED
- virtual void PrintDetailedMap(class VmaJsonWriter& json) const;
-#endif
-
- virtual bool CreateAllocationRequest(
- uint32_t currentFrameIndex,
- uint32_t frameInUseCount,
- VkDeviceSize bufferImageGranularity,
- VkDeviceSize allocSize,
- VkDeviceSize allocAlignment,
- bool upperAddress,
- VmaSuballocationType allocType,
- bool canMakeOtherLost,
- uint32_t strategy,
- VmaAllocationRequest* pAllocationRequest);
-
- virtual bool MakeRequestedAllocationsLost(
- uint32_t currentFrameIndex,
- uint32_t frameInUseCount,
- VmaAllocationRequest* pAllocationRequest);
-
- virtual uint32_t MakeAllocationsLost(uint32_t currentFrameIndex, uint32_t frameInUseCount);
-
- virtual VkResult CheckCorruption(const void* pBlockData) { return VK_ERROR_FEATURE_NOT_PRESENT; }
-
- virtual void Alloc(
- const VmaAllocationRequest& request,
- VmaSuballocationType type,
- VkDeviceSize allocSize,
- bool upperAddress,
- VmaAllocation hAllocation);
-
- virtual void Free(const VmaAllocation allocation) { FreeAtOffset(allocation, allocation->GetOffset()); }
- virtual void FreeAtOffset(VkDeviceSize offset) { FreeAtOffset(VMA_NULL, offset); }
-
-private:
- static const VkDeviceSize MIN_NODE_SIZE = 32;
- static const size_t MAX_LEVELS = 30;
-
- struct ValidationContext
- {
- size_t calculatedAllocationCount;
- size_t calculatedFreeCount;
- VkDeviceSize calculatedSumFreeSize;
-
- ValidationContext() :
- calculatedAllocationCount(0),
- calculatedFreeCount(0),
- calculatedSumFreeSize(0) { }
- };
-
- struct Node
- {
- VkDeviceSize offset;
- enum TYPE
- {
- TYPE_FREE,
- TYPE_ALLOCATION,
- TYPE_SPLIT,
- TYPE_COUNT
- } type;
- Node* parent;
- Node* buddy;
-
- union
- {
- struct
- {
- Node* prev;
- Node* next;
- } free;
- struct
- {
- VmaAllocation alloc;
- } allocation;
- struct
- {
- Node* leftChild;
- } split;
- };
- };
-
- // Size of the memory block aligned down to a power of two.
- VkDeviceSize m_UsableSize;
- uint32_t m_LevelCount;
-
- Node* m_Root;
- struct {
- Node* front;
- Node* back;
- } m_FreeList[MAX_LEVELS];
- // Number of nodes in the tree with type == TYPE_ALLOCATION.
- size_t m_AllocationCount;
- // Number of nodes in the tree with type == TYPE_FREE.
- size_t m_FreeCount;
- // This includes space wasted due to internal fragmentation. Doesn't include unusable size.
- VkDeviceSize m_SumFreeSize;
-
- VkDeviceSize GetUnusableSize() const { return GetSize() - m_UsableSize; }
- void DeleteNode(Node* node);
- bool ValidateNode(ValidationContext& ctx, const Node* parent, const Node* curr, uint32_t level, VkDeviceSize levelNodeSize) const;
- uint32_t AllocSizeToLevel(VkDeviceSize allocSize) const;
- inline VkDeviceSize LevelToNodeSize(uint32_t level) const { return m_UsableSize >> level; }
- // Alloc passed just for validation. Can be null.
- void FreeAtOffset(VmaAllocation alloc, VkDeviceSize offset);
- void CalcAllocationStatInfoNode(VmaStatInfo& outInfo, const Node* node, VkDeviceSize levelNodeSize) const;
- // Adds node to the front of FreeList at given level.
- // node->type must be FREE.
- // node->free.prev, next can be undefined.
- void AddToFreeListFront(uint32_t level, Node* node);
- // Removes node from FreeList at given level.
- // node->type must be FREE.
- // node->free.prev, next stay untouched.
- void RemoveFromFreeList(uint32_t level, Node* node);
-
-#if VMA_STATS_STRING_ENABLED
- void PrintDetailedMapNode(class VmaJsonWriter& json, const Node* node, VkDeviceSize levelNodeSize) const;
-#endif
-};
-
-/*
-Represents a single block of device memory (`VkDeviceMemory`) with all the
-data about its regions (aka suballocations, #VmaAllocation), assigned and free.
-
-Thread-safety: This class must be externally synchronized.
-*/
-class VmaDeviceMemoryBlock
-{
- VMA_CLASS_NO_COPY(VmaDeviceMemoryBlock)
-public:
- VmaBlockMetadata* m_pMetadata;
-
- VmaDeviceMemoryBlock(VmaAllocator hAllocator);
-
- ~VmaDeviceMemoryBlock()
- {
- VMA_ASSERT(m_MapCount == 0 && "VkDeviceMemory block is being destroyed while it is still mapped.");
- VMA_ASSERT(m_hMemory == VK_NULL_HANDLE);
- }
-
- // Always call after construction.
- void Init(
- VmaAllocator hAllocator,
- uint32_t newMemoryTypeIndex,
- VkDeviceMemory newMemory,
- VkDeviceSize newSize,
- uint32_t id,
- uint32_t algorithm);
- // Always call before destruction.
- void Destroy(VmaAllocator allocator);
-
- VkDeviceMemory GetDeviceMemory() const { return m_hMemory; }
- uint32_t GetMemoryTypeIndex() const { return m_MemoryTypeIndex; }
- uint32_t GetId() const { return m_Id; }
- void* GetMappedData() const { return m_pMappedData; }
-
- // Validates all data structures inside this object. If not valid, returns false.
- bool Validate() const;
-
- VkResult CheckCorruption(VmaAllocator hAllocator);
-
- // ppData can be null.
- VkResult Map(VmaAllocator hAllocator, uint32_t count, void** ppData);
- void Unmap(VmaAllocator hAllocator, uint32_t count);
-
- VkResult WriteMagicValueAroundAllocation(VmaAllocator hAllocator, VkDeviceSize allocOffset, VkDeviceSize allocSize);
- VkResult ValidateMagicValueAroundAllocation(VmaAllocator hAllocator, VkDeviceSize allocOffset, VkDeviceSize allocSize);
-
- VkResult BindBufferMemory(
- const VmaAllocator hAllocator,
- const VmaAllocation hAllocation,
- VkBuffer hBuffer);
- VkResult BindImageMemory(
- const VmaAllocator hAllocator,
- const VmaAllocation hAllocation,
- VkImage hImage);
-
-private:
- uint32_t m_MemoryTypeIndex;
- uint32_t m_Id;
- VkDeviceMemory m_hMemory;
-
- /*
- Protects access to m_hMemory so it's not used by multiple threads simultaneously, e.g. vkMapMemory, vkBindBufferMemory.
- Also protects m_MapCount, m_pMappedData.
- Allocations, deallocations, any change in m_pMetadata is protected by parent's VmaBlockVector::m_Mutex.
- */
- VMA_MUTEX m_Mutex;
- uint32_t m_MapCount;
- void* m_pMappedData;
-};
-
-struct VmaPointerLess
-{
- bool operator()(const void* lhs, const void* rhs) const
- {
- return lhs < rhs;
- }
-};
-
-struct VmaDefragmentationMove
-{
- size_t srcBlockIndex;
- size_t dstBlockIndex;
- VkDeviceSize srcOffset;
- VkDeviceSize dstOffset;
- VkDeviceSize size;
-};
-
-class VmaDefragmentationAlgorithm;
-
-/*
-Sequence of VmaDeviceMemoryBlock. Represents memory blocks allocated for a specific
-Vulkan memory type.
-
-Synchronized internally with a mutex.
-*/
-struct VmaBlockVector
-{
- VMA_CLASS_NO_COPY(VmaBlockVector)
-public:
- VmaBlockVector(
- VmaAllocator hAllocator,
- uint32_t memoryTypeIndex,
- VkDeviceSize preferredBlockSize,
- size_t minBlockCount,
- size_t maxBlockCount,
- VkDeviceSize bufferImageGranularity,
- uint32_t frameInUseCount,
- bool isCustomPool,
- bool explicitBlockSize,
- uint32_t algorithm);
- ~VmaBlockVector();
-
- VkResult CreateMinBlocks();
-
- uint32_t GetMemoryTypeIndex() const { return m_MemoryTypeIndex; }
- VkDeviceSize GetPreferredBlockSize() const { return m_PreferredBlockSize; }
- VkDeviceSize GetBufferImageGranularity() const { return m_BufferImageGranularity; }
- uint32_t GetFrameInUseCount() const { return m_FrameInUseCount; }
- uint32_t GetAlgorithm() const { return m_Algorithm; }
-
- void GetPoolStats(VmaPoolStats* pStats);
-
- bool IsEmpty() const { return m_Blocks.empty(); }
- bool IsCorruptionDetectionEnabled() const;
-
- VkResult Allocate(
- VmaPool hCurrentPool,
- uint32_t currentFrameIndex,
- VkDeviceSize size,
- VkDeviceSize alignment,
- const VmaAllocationCreateInfo& createInfo,
- VmaSuballocationType suballocType,
- size_t allocationCount,
- VmaAllocation* pAllocations);
-
- void Free(
- VmaAllocation hAllocation);
-
- // Adds statistics of this BlockVector to pStats.
- void AddStats(VmaStats* pStats);
-
-#if VMA_STATS_STRING_ENABLED
- void PrintDetailedMap(class VmaJsonWriter& json);
-#endif
-
- void MakePoolAllocationsLost(
- uint32_t currentFrameIndex,
- size_t* pLostAllocationCount);
- VkResult CheckCorruption();
-
- // Saves results in pCtx->res.
- void Defragment(
- class VmaBlockVectorDefragmentationContext* pCtx,
- VmaDefragmentationStats* pStats,
- VkDeviceSize& maxCpuBytesToMove, uint32_t& maxCpuAllocationsToMove,
- VkDeviceSize& maxGpuBytesToMove, uint32_t& maxGpuAllocationsToMove,
- VkCommandBuffer commandBuffer);
- void DefragmentationEnd(
- class VmaBlockVectorDefragmentationContext* pCtx,
- VmaDefragmentationStats* pStats);
-
- ////////////////////////////////////////////////////////////////////////////////
- // To be used only while the m_Mutex is locked. Used during defragmentation.
-
- size_t GetBlockCount() const { return m_Blocks.size(); }
- VmaDeviceMemoryBlock* GetBlock(size_t index) const { return m_Blocks[index]; }
- size_t CalcAllocationCount() const;
- bool IsBufferImageGranularityConflictPossible() const;
-
-private:
- friend class VmaDefragmentationAlgorithm_Generic;
-
- const VmaAllocator m_hAllocator;
- const uint32_t m_MemoryTypeIndex;
- const VkDeviceSize m_PreferredBlockSize;
- const size_t m_MinBlockCount;
- const size_t m_MaxBlockCount;
- const VkDeviceSize m_BufferImageGranularity;
- const uint32_t m_FrameInUseCount;
- const bool m_IsCustomPool;
- const bool m_ExplicitBlockSize;
- const uint32_t m_Algorithm;
- /* There can be at most one allocation that is completely empty - a
- hysteresis to avoid pessimistic case of alternating creation and destruction
- of a VkDeviceMemory. */
- bool m_HasEmptyBlock;
- VMA_RW_MUTEX m_Mutex;
- // Incrementally sorted by sumFreeSize, ascending.
- VmaVector< VmaDeviceMemoryBlock*, VmaStlAllocator<VmaDeviceMemoryBlock*> > m_Blocks;
- uint32_t m_NextBlockId;
-
- VkDeviceSize CalcMaxBlockSize() const;
-
- // Finds and removes given block from vector.
- void Remove(VmaDeviceMemoryBlock* pBlock);
-
- // Performs single step in sorting m_Blocks. They may not be fully sorted
- // after this call.
- void IncrementallySortBlocks();
-
- VkResult AllocatePage(
- VmaPool hCurrentPool,
- uint32_t currentFrameIndex,
- VkDeviceSize size,
- VkDeviceSize alignment,
- const VmaAllocationCreateInfo& createInfo,
- VmaSuballocationType suballocType,
- VmaAllocation* pAllocation);
-
- // To be used only without CAN_MAKE_OTHER_LOST flag.
- VkResult AllocateFromBlock(
- VmaDeviceMemoryBlock* pBlock,
- VmaPool hCurrentPool,
- uint32_t currentFrameIndex,
- VkDeviceSize size,
- VkDeviceSize alignment,
- VmaAllocationCreateFlags allocFlags,
- void* pUserData,
- VmaSuballocationType suballocType,
- uint32_t strategy,
- VmaAllocation* pAllocation);
-
- VkResult CreateBlock(VkDeviceSize blockSize, size_t* pNewBlockIndex);
-
- // Saves result to pCtx->res.
- void ApplyDefragmentationMovesCpu(
- class VmaBlockVectorDefragmentationContext* pDefragCtx,
- const VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> >& moves);
- // Saves result to pCtx->res.
- void ApplyDefragmentationMovesGpu(
- class VmaBlockVectorDefragmentationContext* pDefragCtx,
- const VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> >& moves,
- VkCommandBuffer commandBuffer);
-
- /*
- Used during defragmentation. pDefragmentationStats is optional. It's in/out
- - updated with new data.
- */
- void FreeEmptyBlocks(VmaDefragmentationStats* pDefragmentationStats);
-};
-
-struct VmaPool_T
-{
- VMA_CLASS_NO_COPY(VmaPool_T)
-public:
- VmaBlockVector m_BlockVector;
-
- VmaPool_T(
- VmaAllocator hAllocator,
- const VmaPoolCreateInfo& createInfo,
- VkDeviceSize preferredBlockSize);
- ~VmaPool_T();
-
- uint32_t GetId() const { return m_Id; }
- void SetId(uint32_t id) { VMA_ASSERT(m_Id == 0); m_Id = id; }
-
-#if VMA_STATS_STRING_ENABLED
- //void PrintDetailedMap(class VmaStringBuilder& sb);
-#endif
-
-private:
- uint32_t m_Id;
-};
-
-/*
-Performs defragmentation:
-
-- Updates `pBlockVector->m_pMetadata`.
-- Updates allocations by calling ChangeBlockAllocation() or ChangeOffset().
-- Does not move actual data, only returns requested moves as `moves`.
-*/
-class VmaDefragmentationAlgorithm
-{
- VMA_CLASS_NO_COPY(VmaDefragmentationAlgorithm)
-public:
- VmaDefragmentationAlgorithm(
- VmaAllocator hAllocator,
- VmaBlockVector* pBlockVector,
- uint32_t currentFrameIndex) :
- m_hAllocator(hAllocator),
- m_pBlockVector(pBlockVector),
- m_CurrentFrameIndex(currentFrameIndex)
- {
- }
- virtual ~VmaDefragmentationAlgorithm()
- {
- }
-
- virtual void AddAllocation(VmaAllocation hAlloc, VkBool32* pChanged) = 0;
- virtual void AddAll() = 0;
-
- virtual VkResult Defragment(
- VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> >& moves,
- VkDeviceSize maxBytesToMove,
- uint32_t maxAllocationsToMove) = 0;
-
- virtual VkDeviceSize GetBytesMoved() const = 0;
- virtual uint32_t GetAllocationsMoved() const = 0;
-
-protected:
- VmaAllocator const m_hAllocator;
- VmaBlockVector* const m_pBlockVector;
- const uint32_t m_CurrentFrameIndex;
-
- struct AllocationInfo
- {
- VmaAllocation m_hAllocation;
- VkBool32* m_pChanged;
-
- AllocationInfo() :
- m_hAllocation(VK_NULL_HANDLE),
- m_pChanged(VMA_NULL)
- {
- }
- AllocationInfo(VmaAllocation hAlloc, VkBool32* pChanged) :
- m_hAllocation(hAlloc),
- m_pChanged(pChanged)
- {
- }
- };
-};
-
-class VmaDefragmentationAlgorithm_Generic : public VmaDefragmentationAlgorithm
-{
- VMA_CLASS_NO_COPY(VmaDefragmentationAlgorithm_Generic)
-public:
- VmaDefragmentationAlgorithm_Generic(
- VmaAllocator hAllocator,
- VmaBlockVector* pBlockVector,
- uint32_t currentFrameIndex,
- bool overlappingMoveSupported);
- virtual ~VmaDefragmentationAlgorithm_Generic();
-
- virtual void AddAllocation(VmaAllocation hAlloc, VkBool32* pChanged);
- virtual void AddAll() { m_AllAllocations = true; }
-
- virtual VkResult Defragment(
- VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> >& moves,
- VkDeviceSize maxBytesToMove,
- uint32_t maxAllocationsToMove);
-
- virtual VkDeviceSize GetBytesMoved() const { return m_BytesMoved; }
- virtual uint32_t GetAllocationsMoved() const { return m_AllocationsMoved; }
-
-private:
- uint32_t m_AllocationCount;
- bool m_AllAllocations;
-
- VkDeviceSize m_BytesMoved;
- uint32_t m_AllocationsMoved;
-
- struct AllocationInfoSizeGreater
- {
- bool operator()(const AllocationInfo& lhs, const AllocationInfo& rhs) const
- {
- return lhs.m_hAllocation->GetSize() > rhs.m_hAllocation->GetSize();
- }
- };
-
- struct AllocationInfoOffsetGreater
- {
- bool operator()(const AllocationInfo& lhs, const AllocationInfo& rhs) const
- {
- return lhs.m_hAllocation->GetOffset() > rhs.m_hAllocation->GetOffset();
- }
- };
-
- struct BlockInfo
- {
- size_t m_OriginalBlockIndex;
- VmaDeviceMemoryBlock* m_pBlock;
- bool m_HasNonMovableAllocations;
- VmaVector< AllocationInfo, VmaStlAllocator<AllocationInfo> > m_Allocations;
-
- BlockInfo(const VkAllocationCallbacks* pAllocationCallbacks) :
- m_OriginalBlockIndex(SIZE_MAX),
- m_pBlock(VMA_NULL),
- m_HasNonMovableAllocations(true),
- m_Allocations(pAllocationCallbacks)
- {
- }
-
- void CalcHasNonMovableAllocations()
- {
- const size_t blockAllocCount = m_pBlock->m_pMetadata->GetAllocationCount();
- const size_t defragmentAllocCount = m_Allocations.size();
- m_HasNonMovableAllocations = blockAllocCount != defragmentAllocCount;
- }
-
- void SortAllocationsBySizeDescending()
- {
- VMA_SORT(m_Allocations.begin(), m_Allocations.end(), AllocationInfoSizeGreater());
- }
-
- void SortAllocationsByOffsetDescending()
- {
- VMA_SORT(m_Allocations.begin(), m_Allocations.end(), AllocationInfoOffsetGreater());
- }
- };
-
- struct BlockPointerLess
- {
- bool operator()(const BlockInfo* pLhsBlockInfo, const VmaDeviceMemoryBlock* pRhsBlock) const
- {
- return pLhsBlockInfo->m_pBlock < pRhsBlock;
- }
- bool operator()(const BlockInfo* pLhsBlockInfo, const BlockInfo* pRhsBlockInfo) const
- {
- return pLhsBlockInfo->m_pBlock < pRhsBlockInfo->m_pBlock;
- }
- };
-
- // 1. Blocks with some non-movable allocations go first.
- // 2. Blocks with smaller sumFreeSize go first.
- struct BlockInfoCompareMoveDestination
- {
- bool operator()(const BlockInfo* pLhsBlockInfo, const BlockInfo* pRhsBlockInfo) const
- {
- if(pLhsBlockInfo->m_HasNonMovableAllocations && !pRhsBlockInfo->m_HasNonMovableAllocations)
- {
- return true;
- }
- if(!pLhsBlockInfo->m_HasNonMovableAllocations && pRhsBlockInfo->m_HasNonMovableAllocations)
- {
- return false;
- }
- if(pLhsBlockInfo->m_pBlock->m_pMetadata->GetSumFreeSize() < pRhsBlockInfo->m_pBlock->m_pMetadata->GetSumFreeSize())
- {
- return true;
- }
- return false;
- }
- };
-
- typedef VmaVector< BlockInfo*, VmaStlAllocator<BlockInfo*> > BlockInfoVector;
- BlockInfoVector m_Blocks;
-
- VkResult DefragmentRound(
- VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> >& moves,
- VkDeviceSize maxBytesToMove,
- uint32_t maxAllocationsToMove);
-
- size_t CalcBlocksWithNonMovableCount() const;
-
- static bool MoveMakesSense(
- size_t dstBlockIndex, VkDeviceSize dstOffset,
- size_t srcBlockIndex, VkDeviceSize srcOffset);
-};
-
-class VmaDefragmentationAlgorithm_Fast : public VmaDefragmentationAlgorithm
-{
- VMA_CLASS_NO_COPY(VmaDefragmentationAlgorithm_Fast)
-public:
- VmaDefragmentationAlgorithm_Fast(
- VmaAllocator hAllocator,
- VmaBlockVector* pBlockVector,
- uint32_t currentFrameIndex,
- bool overlappingMoveSupported);
- virtual ~VmaDefragmentationAlgorithm_Fast();
-
- virtual void AddAllocation(VmaAllocation hAlloc, VkBool32* pChanged) { ++m_AllocationCount; }
- virtual void AddAll() { m_AllAllocations = true; }
-
- virtual VkResult Defragment(
- VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> >& moves,
- VkDeviceSize maxBytesToMove,
- uint32_t maxAllocationsToMove);
-
- virtual VkDeviceSize GetBytesMoved() const { return m_BytesMoved; }
- virtual uint32_t GetAllocationsMoved() const { return m_AllocationsMoved; }
-
-private:
- struct BlockInfo
- {
- size_t origBlockIndex;
- };
-
- class FreeSpaceDatabase
- {
- public:
- FreeSpaceDatabase()
- {
- FreeSpace s = {};
- s.blockInfoIndex = SIZE_MAX;
- for(size_t i = 0; i < MAX_COUNT; ++i)
- {
- m_FreeSpaces[i] = s;
- }
- }
-
- void Register(size_t blockInfoIndex, VkDeviceSize offset, VkDeviceSize size)
- {
- if(size < VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
- {
- return;
- }
-
- // Find first invalid or the smallest structure.
- size_t bestIndex = SIZE_MAX;
- for(size_t i = 0; i < MAX_COUNT; ++i)
- {
- // Empty structure.
- if(m_FreeSpaces[i].blockInfoIndex == SIZE_MAX)
- {
- bestIndex = i;
- break;
- }
- if(m_FreeSpaces[i].size < size &&
- (bestIndex == SIZE_MAX || m_FreeSpaces[bestIndex].size > m_FreeSpaces[i].size))
- {
- bestIndex = i;
- }
- }
-
- if(bestIndex != SIZE_MAX)
- {
- m_FreeSpaces[bestIndex].blockInfoIndex = blockInfoIndex;
- m_FreeSpaces[bestIndex].offset = offset;
- m_FreeSpaces[bestIndex].size = size;
- }
- }
-
- bool Fetch(VkDeviceSize alignment, VkDeviceSize size,
- size_t& outBlockInfoIndex, VkDeviceSize& outDstOffset)
- {
- size_t bestIndex = SIZE_MAX;
- VkDeviceSize bestFreeSpaceAfter = 0;
- for(size_t i = 0; i < MAX_COUNT; ++i)
- {
- // Structure is valid.
- if(m_FreeSpaces[i].blockInfoIndex != SIZE_MAX)
- {
- const VkDeviceSize dstOffset = VmaAlignUp(m_FreeSpaces[i].offset, alignment);
- // Allocation fits into this structure.
- if(dstOffset + size <= m_FreeSpaces[i].offset + m_FreeSpaces[i].size)
- {
- const VkDeviceSize freeSpaceAfter = (m_FreeSpaces[i].offset + m_FreeSpaces[i].size) -
- (dstOffset + size);
- if(bestIndex == SIZE_MAX || freeSpaceAfter > bestFreeSpaceAfter)
- {
- bestIndex = i;
- bestFreeSpaceAfter = freeSpaceAfter;
- }
- }
- }
- }
-
- if(bestIndex != SIZE_MAX)
- {
- outBlockInfoIndex = m_FreeSpaces[bestIndex].blockInfoIndex;
- outDstOffset = VmaAlignUp(m_FreeSpaces[bestIndex].offset, alignment);
-
- if(bestFreeSpaceAfter >= VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
- {
- // Leave this structure for remaining empty space.
- const VkDeviceSize alignmentPlusSize = (outDstOffset - m_FreeSpaces[bestIndex].offset) + size;
- m_FreeSpaces[bestIndex].offset += alignmentPlusSize;
- m_FreeSpaces[bestIndex].size -= alignmentPlusSize;
- }
- else
- {
- // This structure becomes invalid.
- m_FreeSpaces[bestIndex].blockInfoIndex = SIZE_MAX;
- }
-
- return true;
- }
-
- return false;
- }
-
- private:
- static const size_t MAX_COUNT = 4;
-
- struct FreeSpace
- {
- size_t blockInfoIndex; // SIZE_MAX means this structure is invalid.
- VkDeviceSize offset;
- VkDeviceSize size;
- } m_FreeSpaces[MAX_COUNT];
- };
-
- const bool m_OverlappingMoveSupported;
-
- uint32_t m_AllocationCount;
- bool m_AllAllocations;
-
- VkDeviceSize m_BytesMoved;
- uint32_t m_AllocationsMoved;
-
- VmaVector< BlockInfo, VmaStlAllocator<BlockInfo> > m_BlockInfos;
-
- void PreprocessMetadata();
- void PostprocessMetadata();
- void InsertSuballoc(VmaBlockMetadata_Generic* pMetadata, const VmaSuballocation& suballoc);
-};
-
-struct VmaBlockDefragmentationContext
-{
- enum BLOCK_FLAG
- {
- BLOCK_FLAG_USED = 0x00000001,
- };
- uint32_t flags;
- VkBuffer hBuffer;
-
- VmaBlockDefragmentationContext() :
- flags(0),
- hBuffer(VK_NULL_HANDLE)
- {
- }
-};
-
-class VmaBlockVectorDefragmentationContext
-{
- VMA_CLASS_NO_COPY(VmaBlockVectorDefragmentationContext)
-public:
- VkResult res;
- bool mutexLocked;
- VmaVector< VmaBlockDefragmentationContext, VmaStlAllocator<VmaBlockDefragmentationContext> > blockContexts;
-
- VmaBlockVectorDefragmentationContext(
- VmaAllocator hAllocator,
- VmaPool hCustomPool, // Optional.
- VmaBlockVector* pBlockVector,
- uint32_t currFrameIndex,
- uint32_t flags);
- ~VmaBlockVectorDefragmentationContext();
-
- VmaPool GetCustomPool() const { return m_hCustomPool; }
- VmaBlockVector* GetBlockVector() const { return m_pBlockVector; }
- VmaDefragmentationAlgorithm* GetAlgorithm() const { return m_pAlgorithm; }
-
- void AddAllocation(VmaAllocation hAlloc, VkBool32* pChanged);
- void AddAll() { m_AllAllocations = true; }
-
- void Begin(bool overlappingMoveSupported);
-
-private:
- const VmaAllocator m_hAllocator;
- // Null if not from custom pool.
- const VmaPool m_hCustomPool;
- // Redundant, for convenience not to fetch from m_hCustomPool->m_BlockVector or m_hAllocator->m_pBlockVectors.
- VmaBlockVector* const m_pBlockVector;
- const uint32_t m_CurrFrameIndex;
- //const uint32_t m_AlgorithmFlags;
- // Owner of this object.
- VmaDefragmentationAlgorithm* m_pAlgorithm;
-
- struct AllocInfo
- {
- VmaAllocation hAlloc;
- VkBool32* pChanged;
- };
- // Used between constructor and Begin.
- VmaVector< AllocInfo, VmaStlAllocator<AllocInfo> > m_Allocations;
- bool m_AllAllocations;
-};
-
-struct VmaDefragmentationContext_T
-{
-private:
- VMA_CLASS_NO_COPY(VmaDefragmentationContext_T)
-public:
- VmaDefragmentationContext_T(
- VmaAllocator hAllocator,
- uint32_t currFrameIndex,
- uint32_t flags,
- VmaDefragmentationStats* pStats);
- ~VmaDefragmentationContext_T();
-
- void AddPools(uint32_t poolCount, VmaPool* pPools);
- void AddAllocations(
- uint32_t allocationCount,
- VmaAllocation* pAllocations,
- VkBool32* pAllocationsChanged);
-
- /*
- Returns:
- - `VK_SUCCESS` if succeeded and object can be destroyed immediately.
- - `VK_NOT_READY` if succeeded but the object must remain alive until vmaDefragmentationEnd().
- - Negative value if error occured and object can be destroyed immediately.
- */
- VkResult Defragment(
- VkDeviceSize maxCpuBytesToMove, uint32_t maxCpuAllocationsToMove,
- VkDeviceSize maxGpuBytesToMove, uint32_t maxGpuAllocationsToMove,
- VkCommandBuffer commandBuffer, VmaDefragmentationStats* pStats);
-
-private:
- const VmaAllocator m_hAllocator;
- const uint32_t m_CurrFrameIndex;
- const uint32_t m_Flags;
- VmaDefragmentationStats* const m_pStats;
- // Owner of these objects.
- VmaBlockVectorDefragmentationContext* m_DefaultPoolContexts[VK_MAX_MEMORY_TYPES];
- // Owner of these objects.
- VmaVector< VmaBlockVectorDefragmentationContext*, VmaStlAllocator<VmaBlockVectorDefragmentationContext*> > m_CustomPoolContexts;
-};
-
-#if VMA_RECORDING_ENABLED
-
-class VmaRecorder
-{
-public:
- VmaRecorder();
- VkResult Init(const VmaRecordSettings& settings, bool useMutex);
- void WriteConfiguration(
- const VkPhysicalDeviceProperties& devProps,
- const VkPhysicalDeviceMemoryProperties& memProps,
- bool dedicatedAllocationExtensionEnabled);
- ~VmaRecorder();
-
- void RecordCreateAllocator(uint32_t frameIndex);
- void RecordDestroyAllocator(uint32_t frameIndex);
- void RecordCreatePool(uint32_t frameIndex,
- const VmaPoolCreateInfo& createInfo,
- VmaPool pool);
- void RecordDestroyPool(uint32_t frameIndex, VmaPool pool);
- void RecordAllocateMemory(uint32_t frameIndex,
- const VkMemoryRequirements& vkMemReq,
- const VmaAllocationCreateInfo& createInfo,
- VmaAllocation allocation);
- void RecordAllocateMemoryPages(uint32_t frameIndex,
- const VkMemoryRequirements& vkMemReq,
- const VmaAllocationCreateInfo& createInfo,
- uint64_t allocationCount,
- const VmaAllocation* pAllocations);
- void RecordAllocateMemoryForBuffer(uint32_t frameIndex,
- const VkMemoryRequirements& vkMemReq,
- bool requiresDedicatedAllocation,
- bool prefersDedicatedAllocation,
- const VmaAllocationCreateInfo& createInfo,
- VmaAllocation allocation);
- void RecordAllocateMemoryForImage(uint32_t frameIndex,
- const VkMemoryRequirements& vkMemReq,
- bool requiresDedicatedAllocation,
- bool prefersDedicatedAllocation,
- const VmaAllocationCreateInfo& createInfo,
- VmaAllocation allocation);
- void RecordFreeMemory(uint32_t frameIndex,
- VmaAllocation allocation);
- void RecordFreeMemoryPages(uint32_t frameIndex,
- uint64_t allocationCount,
- const VmaAllocation* pAllocations);
- void RecordResizeAllocation(
- uint32_t frameIndex,
- VmaAllocation allocation,
- VkDeviceSize newSize);
- void RecordSetAllocationUserData(uint32_t frameIndex,
- VmaAllocation allocation,
- const void* pUserData);
- void RecordCreateLostAllocation(uint32_t frameIndex,
- VmaAllocation allocation);
- void RecordMapMemory(uint32_t frameIndex,
- VmaAllocation allocation);
- void RecordUnmapMemory(uint32_t frameIndex,
- VmaAllocation allocation);
- void RecordFlushAllocation(uint32_t frameIndex,
- VmaAllocation allocation, VkDeviceSize offset, VkDeviceSize size);
- void RecordInvalidateAllocation(uint32_t frameIndex,
- VmaAllocation allocation, VkDeviceSize offset, VkDeviceSize size);
- void RecordCreateBuffer(uint32_t frameIndex,
- const VkBufferCreateInfo& bufCreateInfo,
- const VmaAllocationCreateInfo& allocCreateInfo,
- VmaAllocation allocation);
- void RecordCreateImage(uint32_t frameIndex,
- const VkImageCreateInfo& imageCreateInfo,
- const VmaAllocationCreateInfo& allocCreateInfo,
- VmaAllocation allocation);
- void RecordDestroyBuffer(uint32_t frameIndex,
- VmaAllocation allocation);
- void RecordDestroyImage(uint32_t frameIndex,
- VmaAllocation allocation);
- void RecordTouchAllocation(uint32_t frameIndex,
- VmaAllocation allocation);
- void RecordGetAllocationInfo(uint32_t frameIndex,
- VmaAllocation allocation);
- void RecordMakePoolAllocationsLost(uint32_t frameIndex,
- VmaPool pool);
- void RecordDefragmentationBegin(uint32_t frameIndex,
- const VmaDefragmentationInfo2& info,
- VmaDefragmentationContext ctx);
- void RecordDefragmentationEnd(uint32_t frameIndex,
- VmaDefragmentationContext ctx);
-
-private:
- struct CallParams
- {
- uint32_t threadId;
- double time;
- };
-
- class UserDataString
- {
- public:
- UserDataString(VmaAllocationCreateFlags allocFlags, const void* pUserData);
- const char* GetString() const { return m_Str; }
-
- private:
- char m_PtrStr[17];
- const char* m_Str;
- };
-
- bool m_UseMutex;
- VmaRecordFlags m_Flags;
- FILE* m_File;
- VMA_MUTEX m_FileMutex;
- int64_t m_Freq;
- int64_t m_StartCounter;
-
- void GetBasicParams(CallParams& outParams);
-
- // T must be a pointer type, e.g. VmaAllocation, VmaPool.
- template<typename T>
- void PrintPointerList(uint64_t count, const T* pItems)
- {
- if(count)
- {
- fprintf(m_File, "%p", pItems[0]);
- for(uint64_t i = 1; i < count; ++i)
- {
- fprintf(m_File, " %p", pItems[i]);
- }
- }
- }
-
- void PrintPointerList(uint64_t count, const VmaAllocation* pItems);
- void Flush();
-};
-
-#endif // #if VMA_RECORDING_ENABLED
-
-// Main allocator object.
-struct VmaAllocator_T
-{
- VMA_CLASS_NO_COPY(VmaAllocator_T)
-public:
- bool m_UseMutex;
- bool m_UseKhrDedicatedAllocation;
- VkDevice m_hDevice;
- bool m_AllocationCallbacksSpecified;
- VkAllocationCallbacks m_AllocationCallbacks;
- VmaDeviceMemoryCallbacks m_DeviceMemoryCallbacks;
-
- // Number of bytes free out of limit, or VK_WHOLE_SIZE if no limit for that heap.
- VkDeviceSize m_HeapSizeLimit[VK_MAX_MEMORY_HEAPS];
- VMA_MUTEX m_HeapSizeLimitMutex;
-
- VkPhysicalDeviceProperties m_PhysicalDeviceProperties;
- VkPhysicalDeviceMemoryProperties m_MemProps;
-
- // Default pools.
- VmaBlockVector* m_pBlockVectors[VK_MAX_MEMORY_TYPES];
-
- // Each vector is sorted by memory (handle value).
- typedef VmaVector< VmaAllocation, VmaStlAllocator<VmaAllocation> > AllocationVectorType;
- AllocationVectorType* m_pDedicatedAllocations[VK_MAX_MEMORY_TYPES];
- VMA_RW_MUTEX m_DedicatedAllocationsMutex[VK_MAX_MEMORY_TYPES];
-
- VmaAllocator_T(const VmaAllocatorCreateInfo* pCreateInfo);
- VkResult Init(const VmaAllocatorCreateInfo* pCreateInfo);
- ~VmaAllocator_T();
-
- const VkAllocationCallbacks* GetAllocationCallbacks() const
- {
- return m_AllocationCallbacksSpecified ? &m_AllocationCallbacks : 0;
- }
- const VmaVulkanFunctions& GetVulkanFunctions() const
- {
- return m_VulkanFunctions;
- }
-
- VkDeviceSize GetBufferImageGranularity() const
- {
- return VMA_MAX(
- static_cast<VkDeviceSize>(VMA_DEBUG_MIN_BUFFER_IMAGE_GRANULARITY),
- m_PhysicalDeviceProperties.limits.bufferImageGranularity);
- }
-
- uint32_t GetMemoryHeapCount() const { return m_MemProps.memoryHeapCount; }
- uint32_t GetMemoryTypeCount() const { return m_MemProps.memoryTypeCount; }
-
- uint32_t MemoryTypeIndexToHeapIndex(uint32_t memTypeIndex) const
- {
- VMA_ASSERT(memTypeIndex < m_MemProps.memoryTypeCount);
- return m_MemProps.memoryTypes[memTypeIndex].heapIndex;
- }
- // True when specific memory type is HOST_VISIBLE but not HOST_COHERENT.
- bool IsMemoryTypeNonCoherent(uint32_t memTypeIndex) const
- {
- return (m_MemProps.memoryTypes[memTypeIndex].propertyFlags & (VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT | VK_MEMORY_PROPERTY_HOST_COHERENT_BIT)) ==
- VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT;
- }
- // Minimum alignment for all allocations in specific memory type.
- VkDeviceSize GetMemoryTypeMinAlignment(uint32_t memTypeIndex) const
- {
- return IsMemoryTypeNonCoherent(memTypeIndex) ?
- VMA_MAX((VkDeviceSize)VMA_DEBUG_ALIGNMENT, m_PhysicalDeviceProperties.limits.nonCoherentAtomSize) :
- (VkDeviceSize)VMA_DEBUG_ALIGNMENT;
- }
-
- bool IsIntegratedGpu() const
- {
- return m_PhysicalDeviceProperties.deviceType == VK_PHYSICAL_DEVICE_TYPE_INTEGRATED_GPU;
- }
-
-#if VMA_RECORDING_ENABLED
- VmaRecorder* GetRecorder() const { return m_pRecorder; }
-#endif
-
- void GetBufferMemoryRequirements(
- VkBuffer hBuffer,
- VkMemoryRequirements& memReq,
- bool& requiresDedicatedAllocation,
- bool& prefersDedicatedAllocation) const;
- void GetImageMemoryRequirements(
- VkImage hImage,
- VkMemoryRequirements& memReq,
- bool& requiresDedicatedAllocation,
- bool& prefersDedicatedAllocation) const;
-
- // Main allocation function.
- VkResult AllocateMemory(
- const VkMemoryRequirements& vkMemReq,
- bool requiresDedicatedAllocation,
- bool prefersDedicatedAllocation,
- VkBuffer dedicatedBuffer,
- VkImage dedicatedImage,
- const VmaAllocationCreateInfo& createInfo,
- VmaSuballocationType suballocType,
- size_t allocationCount,
- VmaAllocation* pAllocations);
-
- // Main deallocation function.
- void FreeMemory(
- size_t allocationCount,
- const VmaAllocation* pAllocations);
-
- VkResult ResizeAllocation(
- const VmaAllocation alloc,
- VkDeviceSize newSize);
-
- void CalculateStats(VmaStats* pStats);
-
-#if VMA_STATS_STRING_ENABLED
- void PrintDetailedMap(class VmaJsonWriter& json);
-#endif
-
- VkResult DefragmentationBegin(
- const VmaDefragmentationInfo2& info,
- VmaDefragmentationStats* pStats,
- VmaDefragmentationContext* pContext);
- VkResult DefragmentationEnd(
- VmaDefragmentationContext context);
-
- void GetAllocationInfo(VmaAllocation hAllocation, VmaAllocationInfo* pAllocationInfo);
- bool TouchAllocation(VmaAllocation hAllocation);
-
- VkResult CreatePool(const VmaPoolCreateInfo* pCreateInfo, VmaPool* pPool);
- void DestroyPool(VmaPool pool);
- void GetPoolStats(VmaPool pool, VmaPoolStats* pPoolStats);
-
- void SetCurrentFrameIndex(uint32_t frameIndex);
- uint32_t GetCurrentFrameIndex() const { return m_CurrentFrameIndex.load(); }
-
- void MakePoolAllocationsLost(
- VmaPool hPool,
- size_t* pLostAllocationCount);
- VkResult CheckPoolCorruption(VmaPool hPool);
- VkResult CheckCorruption(uint32_t memoryTypeBits);
-
- void CreateLostAllocation(VmaAllocation* pAllocation);
-
- VkResult AllocateVulkanMemory(const VkMemoryAllocateInfo* pAllocateInfo, VkDeviceMemory* pMemory);
- void FreeVulkanMemory(uint32_t memoryType, VkDeviceSize size, VkDeviceMemory hMemory);
-
- VkResult Map(VmaAllocation hAllocation, void** ppData);
- void Unmap(VmaAllocation hAllocation);
-
- VkResult BindBufferMemory(VmaAllocation hAllocation, VkBuffer hBuffer);
- VkResult BindImageMemory(VmaAllocation hAllocation, VkImage hImage);
-
- void FlushOrInvalidateAllocation(
- VmaAllocation hAllocation,
- VkDeviceSize offset, VkDeviceSize size,
- VMA_CACHE_OPERATION op);
-
- void FillAllocation(const VmaAllocation hAllocation, uint8_t pattern);
-
-private:
- VkDeviceSize m_PreferredLargeHeapBlockSize;
-
- VkPhysicalDevice m_PhysicalDevice;
- VMA_ATOMIC_UINT32 m_CurrentFrameIndex;
-
- VMA_RW_MUTEX m_PoolsMutex;
- // Protected by m_PoolsMutex. Sorted by pointer value.
- VmaVector<VmaPool, VmaStlAllocator<VmaPool> > m_Pools;
- uint32_t m_NextPoolId;
-
- VmaVulkanFunctions m_VulkanFunctions;
-
-#if VMA_RECORDING_ENABLED
- VmaRecorder* m_pRecorder;
-#endif
-
- void ImportVulkanFunctions(const VmaVulkanFunctions* pVulkanFunctions);
-
- VkDeviceSize CalcPreferredBlockSize(uint32_t memTypeIndex);
-
- VkResult AllocateMemoryOfType(
- VkDeviceSize size,
- VkDeviceSize alignment,
- bool dedicatedAllocation,
- VkBuffer dedicatedBuffer,
- VkImage dedicatedImage,
- const VmaAllocationCreateInfo& createInfo,
- uint32_t memTypeIndex,
- VmaSuballocationType suballocType,
- size_t allocationCount,
- VmaAllocation* pAllocations);
-
- // Helper function only to be used inside AllocateDedicatedMemory.
- VkResult AllocateDedicatedMemoryPage(
- VkDeviceSize size,
- VmaSuballocationType suballocType,
- uint32_t memTypeIndex,
- const VkMemoryAllocateInfo& allocInfo,
- bool map,
- bool isUserDataString,
- void* pUserData,
- VmaAllocation* pAllocation);
-
- // Allocates and registers new VkDeviceMemory specifically for dedicated allocations.
- VkResult AllocateDedicatedMemory(
- VkDeviceSize size,
- VmaSuballocationType suballocType,
- uint32_t memTypeIndex,
- bool map,
- bool isUserDataString,
- void* pUserData,
- VkBuffer dedicatedBuffer,
- VkImage dedicatedImage,
- size_t allocationCount,
- VmaAllocation* pAllocations);
-
- // Tries to free pMemory as Dedicated Memory. Returns true if found and freed.
- void FreeDedicatedMemory(VmaAllocation allocation);
-};
-
-////////////////////////////////////////////////////////////////////////////////
-// Memory allocation #2 after VmaAllocator_T definition
-
-static void* VmaMalloc(VmaAllocator hAllocator, size_t size, size_t alignment)
-{
- return VmaMalloc(&hAllocator->m_AllocationCallbacks, size, alignment);
-}
-
-static void VmaFree(VmaAllocator hAllocator, void* ptr)
-{
- VmaFree(&hAllocator->m_AllocationCallbacks, ptr);
-}
-
-template<typename T>
-static T* VmaAllocate(VmaAllocator hAllocator)
-{
- return (T*)VmaMalloc(hAllocator, sizeof(T), VMA_ALIGN_OF(T));
-}
-
-template<typename T>
-static T* VmaAllocateArray(VmaAllocator hAllocator, size_t count)
-{
- return (T*)VmaMalloc(hAllocator, sizeof(T) * count, VMA_ALIGN_OF(T));
-}
-
-template<typename T>
-static void vma_delete(VmaAllocator hAllocator, T* ptr)
-{
- if(ptr != VMA_NULL)
- {
- ptr->~T();
- VmaFree(hAllocator, ptr);
- }
-}
-
-template<typename T>
-static void vma_delete_array(VmaAllocator hAllocator, T* ptr, size_t count)
-{
- if(ptr != VMA_NULL)
- {
- for(size_t i = count; i--; )
- ptr[i].~T();
- VmaFree(hAllocator, ptr);
- }
-}
-
-////////////////////////////////////////////////////////////////////////////////
-// VmaStringBuilder
-
-#if VMA_STATS_STRING_ENABLED
-
-class VmaStringBuilder
-{
-public:
- VmaStringBuilder(VmaAllocator alloc) : m_Data(VmaStlAllocator<char>(alloc->GetAllocationCallbacks())) { }
- size_t GetLength() const { return m_Data.size(); }
- const char* GetData() const { return m_Data.data(); }
-
- void Add(char ch) { m_Data.push_back(ch); }
- void Add(const char* pStr);
- void AddNewLine() { Add('\n'); }
- void AddNumber(uint32_t num);
- void AddNumber(uint64_t num);
- void AddPointer(const void* ptr);
-
-private:
- VmaVector< char, VmaStlAllocator<char> > m_Data;
-};
-
-void VmaStringBuilder::Add(const char* pStr)
-{
- const size_t strLen = strlen(pStr);
- if(strLen > 0)
- {
- const size_t oldCount = m_Data.size();
- m_Data.resize(oldCount + strLen);
- memcpy(m_Data.data() + oldCount, pStr, strLen);
- }
-}
-
-void VmaStringBuilder::AddNumber(uint32_t num)
-{
- char buf[11];
- VmaUint32ToStr(buf, sizeof(buf), num);
- Add(buf);
-}
-
-void VmaStringBuilder::AddNumber(uint64_t num)
-{
- char buf[21];
- VmaUint64ToStr(buf, sizeof(buf), num);
- Add(buf);
-}
-
-void VmaStringBuilder::AddPointer(const void* ptr)
-{
- char buf[21];
- VmaPtrToStr(buf, sizeof(buf), ptr);
- Add(buf);
-}
-
-#endif // #if VMA_STATS_STRING_ENABLED
-
-////////////////////////////////////////////////////////////////////////////////
-// VmaJsonWriter
-
-#if VMA_STATS_STRING_ENABLED
-
-class VmaJsonWriter
-{
- VMA_CLASS_NO_COPY(VmaJsonWriter)
-public:
- VmaJsonWriter(const VkAllocationCallbacks* pAllocationCallbacks, VmaStringBuilder& sb);
- ~VmaJsonWriter();
-
- void BeginObject(bool singleLine = false);
- void EndObject();
-
- void BeginArray(bool singleLine = false);
- void EndArray();
-
- void WriteString(const char* pStr);
- void BeginString(const char* pStr = VMA_NULL);
- void ContinueString(const char* pStr);
- void ContinueString(uint32_t n);
- void ContinueString(uint64_t n);
- void ContinueString_Pointer(const void* ptr);
- void EndString(const char* pStr = VMA_NULL);
-
- void WriteNumber(uint32_t n);
- void WriteNumber(uint64_t n);
- void WriteBool(bool b);
- void WriteNull();
-
-private:
- static const char* const INDENT;
-
- enum COLLECTION_TYPE
- {
- COLLECTION_TYPE_OBJECT,
- COLLECTION_TYPE_ARRAY,
- };
- struct StackItem
- {
- COLLECTION_TYPE type;
- uint32_t valueCount;
- bool singleLineMode;
- };
-
- VmaStringBuilder& m_SB;
- VmaVector< StackItem, VmaStlAllocator<StackItem> > m_Stack;
- bool m_InsideString;
-
- void BeginValue(bool isString);
- void WriteIndent(bool oneLess = false);
-};
-
-const char* const VmaJsonWriter::INDENT = " ";
-
-VmaJsonWriter::VmaJsonWriter(const VkAllocationCallbacks* pAllocationCallbacks, VmaStringBuilder& sb) :
- m_SB(sb),
- m_Stack(VmaStlAllocator<StackItem>(pAllocationCallbacks)),
- m_InsideString(false)
-{
-}
-
-VmaJsonWriter::~VmaJsonWriter()
-{
- VMA_ASSERT(!m_InsideString);
- VMA_ASSERT(m_Stack.empty());
-}
-
-void VmaJsonWriter::BeginObject(bool singleLine)
-{
- VMA_ASSERT(!m_InsideString);
-
- BeginValue(false);
- m_SB.Add('{');
-
- StackItem item;
- item.type = COLLECTION_TYPE_OBJECT;
- item.valueCount = 0;
- item.singleLineMode = singleLine;
- m_Stack.push_back(item);
-}
-
-void VmaJsonWriter::EndObject()
-{
- VMA_ASSERT(!m_InsideString);
-
- WriteIndent(true);
- m_SB.Add('}');
-
- VMA_ASSERT(!m_Stack.empty() && m_Stack.back().type == COLLECTION_TYPE_OBJECT);
- m_Stack.pop_back();
-}
-
-void VmaJsonWriter::BeginArray(bool singleLine)
-{
- VMA_ASSERT(!m_InsideString);
-
- BeginValue(false);
- m_SB.Add('[');
-
- StackItem item;
- item.type = COLLECTION_TYPE_ARRAY;
- item.valueCount = 0;
- item.singleLineMode = singleLine;
- m_Stack.push_back(item);
-}
-
-void VmaJsonWriter::EndArray()
-{
- VMA_ASSERT(!m_InsideString);
-
- WriteIndent(true);
- m_SB.Add(']');
-
- VMA_ASSERT(!m_Stack.empty() && m_Stack.back().type == COLLECTION_TYPE_ARRAY);
- m_Stack.pop_back();
-}
-
-void VmaJsonWriter::WriteString(const char* pStr)
-{
- BeginString(pStr);
- EndString();
-}
-
-void VmaJsonWriter::BeginString(const char* pStr)
-{
- VMA_ASSERT(!m_InsideString);
-
- BeginValue(true);
- m_SB.Add('"');
- m_InsideString = true;
- if(pStr != VMA_NULL && pStr[0] != '\0')
- {
- ContinueString(pStr);
- }
-}
-
-void VmaJsonWriter::ContinueString(const char* pStr)
-{
- VMA_ASSERT(m_InsideString);
-
- const size_t strLen = strlen(pStr);
- for(size_t i = 0; i < strLen; ++i)
- {
- char ch = pStr[i];
- if(ch == '\\')
- {
- m_SB.Add("\\\\");
- }
- else if(ch == '"')
- {
- m_SB.Add("\\\"");
- }
- else if(ch >= 32)
- {
- m_SB.Add(ch);
- }
- else switch(ch)
- {
- case '\b':
- m_SB.Add("\\b");
- break;
- case '\f':
- m_SB.Add("\\f");
- break;
- case '\n':
- m_SB.Add("\\n");
- break;
- case '\r':
- m_SB.Add("\\r");
- break;
- case '\t':
- m_SB.Add("\\t");
- break;
- default:
- VMA_ASSERT(0 && "Character not currently supported.");
- break;
- }
- }
-}
-
-void VmaJsonWriter::ContinueString(uint32_t n)
-{
- VMA_ASSERT(m_InsideString);
- m_SB.AddNumber(n);
-}
-
-void VmaJsonWriter::ContinueString(uint64_t n)
-{
- VMA_ASSERT(m_InsideString);
- m_SB.AddNumber(n);
-}
-
-void VmaJsonWriter::ContinueString_Pointer(const void* ptr)
-{
- VMA_ASSERT(m_InsideString);
- m_SB.AddPointer(ptr);
-}
-
-void VmaJsonWriter::EndString(const char* pStr)
-{
- VMA_ASSERT(m_InsideString);
- if(pStr != VMA_NULL && pStr[0] != '\0')
- {
- ContinueString(pStr);
- }
- m_SB.Add('"');
- m_InsideString = false;
-}
-
-void VmaJsonWriter::WriteNumber(uint32_t n)
-{
- VMA_ASSERT(!m_InsideString);
- BeginValue(false);
- m_SB.AddNumber(n);
-}
-
-void VmaJsonWriter::WriteNumber(uint64_t n)
-{
- VMA_ASSERT(!m_InsideString);
- BeginValue(false);
- m_SB.AddNumber(n);
-}
-
-void VmaJsonWriter::WriteBool(bool b)
-{
- VMA_ASSERT(!m_InsideString);
- BeginValue(false);
- m_SB.Add(b ? "true" : "false");
-}
-
-void VmaJsonWriter::WriteNull()
-{
- VMA_ASSERT(!m_InsideString);
- BeginValue(false);
- m_SB.Add("null");
-}
-
-void VmaJsonWriter::BeginValue(bool isString)
-{
- if(!m_Stack.empty())
- {
- StackItem& currItem = m_Stack.back();
- if(currItem.type == COLLECTION_TYPE_OBJECT &&
- currItem.valueCount % 2 == 0)
- {
- VMA_ASSERT(isString);
- }
-
- if(currItem.type == COLLECTION_TYPE_OBJECT &&
- currItem.valueCount % 2 != 0)
- {
- m_SB.Add(": ");
- }
- else if(currItem.valueCount > 0)
- {
- m_SB.Add(", ");
- WriteIndent();
- }
- else
- {
- WriteIndent();
- }
- ++currItem.valueCount;
- }
-}
-
-void VmaJsonWriter::WriteIndent(bool oneLess)
-{
- if(!m_Stack.empty() && !m_Stack.back().singleLineMode)
- {
- m_SB.AddNewLine();
-
- size_t count = m_Stack.size();
- if(count > 0 && oneLess)
- {
- --count;
- }
- for(size_t i = 0; i < count; ++i)
- {
- m_SB.Add(INDENT);
- }
- }
-}
-
-#endif // #if VMA_STATS_STRING_ENABLED
-
-////////////////////////////////////////////////////////////////////////////////
-
-void VmaAllocation_T::SetUserData(VmaAllocator hAllocator, void* pUserData)
-{
- if(IsUserDataString())
- {
- VMA_ASSERT(pUserData == VMA_NULL || pUserData != m_pUserData);
-
- FreeUserDataString(hAllocator);
-
- if(pUserData != VMA_NULL)
- {
- const char* const newStrSrc = (char*)pUserData;
- const size_t newStrLen = strlen(newStrSrc);
- char* const newStrDst = vma_new_array(hAllocator, char, newStrLen + 1);
- memcpy(newStrDst, newStrSrc, newStrLen + 1);
- m_pUserData = newStrDst;
- }
- }
- else
- {
- m_pUserData = pUserData;
- }
-}
-
-void VmaAllocation_T::ChangeBlockAllocation(
- VmaAllocator hAllocator,
- VmaDeviceMemoryBlock* block,
- VkDeviceSize offset)
-{
- VMA_ASSERT(block != VMA_NULL);
- VMA_ASSERT(m_Type == ALLOCATION_TYPE_BLOCK);
-
- // Move mapping reference counter from old block to new block.
- if(block != m_BlockAllocation.m_Block)
- {
- uint32_t mapRefCount = m_MapCount & ~MAP_COUNT_FLAG_PERSISTENT_MAP;
- if(IsPersistentMap())
- ++mapRefCount;
- m_BlockAllocation.m_Block->Unmap(hAllocator, mapRefCount);
- block->Map(hAllocator, mapRefCount, VMA_NULL);
- }
-
- m_BlockAllocation.m_Block = block;
- m_BlockAllocation.m_Offset = offset;
-}
-
-void VmaAllocation_T::ChangeSize(VkDeviceSize newSize)
-{
- VMA_ASSERT(newSize > 0);
- m_Size = newSize;
-}
-
-void VmaAllocation_T::ChangeOffset(VkDeviceSize newOffset)
-{
- VMA_ASSERT(m_Type == ALLOCATION_TYPE_BLOCK);
- m_BlockAllocation.m_Offset = newOffset;
-}
-
-VkDeviceSize VmaAllocation_T::GetOffset() const
-{
- switch(m_Type)
- {
- case ALLOCATION_TYPE_BLOCK:
- return m_BlockAllocation.m_Offset;
- case ALLOCATION_TYPE_DEDICATED:
- return 0;
- default:
- VMA_ASSERT(0);
- return 0;
- }
-}
-
-VkDeviceMemory VmaAllocation_T::GetMemory() const
-{
- switch(m_Type)
- {
- case ALLOCATION_TYPE_BLOCK:
- return m_BlockAllocation.m_Block->GetDeviceMemory();
- case ALLOCATION_TYPE_DEDICATED:
- return m_DedicatedAllocation.m_hMemory;
- default:
- VMA_ASSERT(0);
- return VK_NULL_HANDLE;
- }
-}
-
-uint32_t VmaAllocation_T::GetMemoryTypeIndex() const
-{
- switch(m_Type)
- {
- case ALLOCATION_TYPE_BLOCK:
- return m_BlockAllocation.m_Block->GetMemoryTypeIndex();
- case ALLOCATION_TYPE_DEDICATED:
- return m_DedicatedAllocation.m_MemoryTypeIndex;
- default:
- VMA_ASSERT(0);
- return UINT32_MAX;
- }
-}
-
-void* VmaAllocation_T::GetMappedData() const
-{
- switch(m_Type)
- {
- case ALLOCATION_TYPE_BLOCK:
- if(m_MapCount != 0)
- {
- void* pBlockData = m_BlockAllocation.m_Block->GetMappedData();
- VMA_ASSERT(pBlockData != VMA_NULL);
- return (char*)pBlockData + m_BlockAllocation.m_Offset;
- }
- else
- {
- return VMA_NULL;
- }
- break;
- case ALLOCATION_TYPE_DEDICATED:
- VMA_ASSERT((m_DedicatedAllocation.m_pMappedData != VMA_NULL) == (m_MapCount != 0));
- return m_DedicatedAllocation.m_pMappedData;
- default:
- VMA_ASSERT(0);
- return VMA_NULL;
- }
-}
-
-bool VmaAllocation_T::CanBecomeLost() const
-{
- switch(m_Type)
- {
- case ALLOCATION_TYPE_BLOCK:
- return m_BlockAllocation.m_CanBecomeLost;
- case ALLOCATION_TYPE_DEDICATED:
- return false;
- default:
- VMA_ASSERT(0);
- return false;
- }
-}
-
-VmaPool VmaAllocation_T::GetPool() const
-{
- VMA_ASSERT(m_Type == ALLOCATION_TYPE_BLOCK);
- return m_BlockAllocation.m_hPool;
-}
-
-bool VmaAllocation_T::MakeLost(uint32_t currentFrameIndex, uint32_t frameInUseCount)
-{
- VMA_ASSERT(CanBecomeLost());
-
- /*
- Warning: This is a carefully designed algorithm.
- Do not modify unless you really know what you're doing :)
- */
- uint32_t localLastUseFrameIndex = GetLastUseFrameIndex();
- for(;;)
- {
- if(localLastUseFrameIndex == VMA_FRAME_INDEX_LOST)
- {
- VMA_ASSERT(0);
- return false;
- }
- else if(localLastUseFrameIndex + frameInUseCount >= currentFrameIndex)
- {
- return false;
- }
- else // Last use time earlier than current time.
- {
- if(CompareExchangeLastUseFrameIndex(localLastUseFrameIndex, VMA_FRAME_INDEX_LOST))
- {
- // Setting hAllocation.LastUseFrameIndex atomic to VMA_FRAME_INDEX_LOST is enough to mark it as LOST.
- // Calling code just needs to unregister this allocation in owning VmaDeviceMemoryBlock.
- return true;
- }
- }
- }
-}
-
-#if VMA_STATS_STRING_ENABLED
-
-// Correspond to values of enum VmaSuballocationType.
-static const char* VMA_SUBALLOCATION_TYPE_NAMES[] = {
- "FREE",
- "UNKNOWN",
- "BUFFER",
- "IMAGE_UNKNOWN",
- "IMAGE_LINEAR",
- "IMAGE_OPTIMAL",
-};
-
-void VmaAllocation_T::PrintParameters(class VmaJsonWriter& json) const
-{
- json.WriteString("Type");
- json.WriteString(VMA_SUBALLOCATION_TYPE_NAMES[m_SuballocationType]);
-
- json.WriteString("Size");
- json.WriteNumber(m_Size);
-
- if(m_pUserData != VMA_NULL)
- {
- json.WriteString("UserData");
- if(IsUserDataString())
- {
- json.WriteString((const char*)m_pUserData);
- }
- else
- {
- json.BeginString();
- json.ContinueString_Pointer(m_pUserData);
- json.EndString();
- }
- }
-
- json.WriteString("CreationFrameIndex");
- json.WriteNumber(m_CreationFrameIndex);
-
- json.WriteString("LastUseFrameIndex");
- json.WriteNumber(GetLastUseFrameIndex());
-
- if(m_BufferImageUsage != 0)
- {
- json.WriteString("Usage");
- json.WriteNumber(m_BufferImageUsage);
- }
-}
-
-#endif
-
-void VmaAllocation_T::FreeUserDataString(VmaAllocator hAllocator)
-{
- VMA_ASSERT(IsUserDataString());
- if(m_pUserData != VMA_NULL)
- {
- char* const oldStr = (char*)m_pUserData;
- const size_t oldStrLen = strlen(oldStr);
- vma_delete_array(hAllocator, oldStr, oldStrLen + 1);
- m_pUserData = VMA_NULL;
- }
-}
-
-void VmaAllocation_T::BlockAllocMap()
-{
- VMA_ASSERT(GetType() == ALLOCATION_TYPE_BLOCK);
-
- if((m_MapCount & ~MAP_COUNT_FLAG_PERSISTENT_MAP) < 0x7F)
- {
- ++m_MapCount;
- }
- else
- {
- VMA_ASSERT(0 && "Allocation mapped too many times simultaneously.");
- }
-}
-
-void VmaAllocation_T::BlockAllocUnmap()
-{
- VMA_ASSERT(GetType() == ALLOCATION_TYPE_BLOCK);
-
- if((m_MapCount & ~MAP_COUNT_FLAG_PERSISTENT_MAP) != 0)
- {
- --m_MapCount;
- }
- else
- {
- VMA_ASSERT(0 && "Unmapping allocation not previously mapped.");
- }
-}
-
-VkResult VmaAllocation_T::DedicatedAllocMap(VmaAllocator hAllocator, void** ppData)
-{
- VMA_ASSERT(GetType() == ALLOCATION_TYPE_DEDICATED);
-
- if(m_MapCount != 0)
- {
- if((m_MapCount & ~MAP_COUNT_FLAG_PERSISTENT_MAP) < 0x7F)
- {
- VMA_ASSERT(m_DedicatedAllocation.m_pMappedData != VMA_NULL);
- *ppData = m_DedicatedAllocation.m_pMappedData;
- ++m_MapCount;
- return VK_SUCCESS;
- }
- else
- {
- VMA_ASSERT(0 && "Dedicated allocation mapped too many times simultaneously.");
- return VK_ERROR_MEMORY_MAP_FAILED;
- }
- }
- else
- {
- VkResult result = (*hAllocator->GetVulkanFunctions().vkMapMemory)(
- hAllocator->m_hDevice,
- m_DedicatedAllocation.m_hMemory,
- 0, // offset
- VK_WHOLE_SIZE,
- 0, // flags
- ppData);
- if(result == VK_SUCCESS)
- {
- m_DedicatedAllocation.m_pMappedData = *ppData;
- m_MapCount = 1;
- }
- return result;
- }
-}
-
-void VmaAllocation_T::DedicatedAllocUnmap(VmaAllocator hAllocator)
-{
- VMA_ASSERT(GetType() == ALLOCATION_TYPE_DEDICATED);
-
- if((m_MapCount & ~MAP_COUNT_FLAG_PERSISTENT_MAP) != 0)
- {
- --m_MapCount;
- if(m_MapCount == 0)
- {
- m_DedicatedAllocation.m_pMappedData = VMA_NULL;
- (*hAllocator->GetVulkanFunctions().vkUnmapMemory)(
- hAllocator->m_hDevice,
- m_DedicatedAllocation.m_hMemory);
- }
- }
- else
- {
- VMA_ASSERT(0 && "Unmapping dedicated allocation not previously mapped.");
- }
-}
-
-#if VMA_STATS_STRING_ENABLED
-
-static void VmaPrintStatInfo(VmaJsonWriter& json, const VmaStatInfo& stat)
-{
- json.BeginObject();
-
- json.WriteString("Blocks");
- json.WriteNumber(stat.blockCount);
-
- json.WriteString("Allocations");
- json.WriteNumber(stat.allocationCount);
-
- json.WriteString("UnusedRanges");
- json.WriteNumber(stat.unusedRangeCount);
-
- json.WriteString("UsedBytes");
- json.WriteNumber(stat.usedBytes);
-
- json.WriteString("UnusedBytes");
- json.WriteNumber(stat.unusedBytes);
-
- if(stat.allocationCount > 1)
- {
- json.WriteString("AllocationSize");
- json.BeginObject(true);
- json.WriteString("Min");
- json.WriteNumber(stat.allocationSizeMin);
- json.WriteString("Avg");
- json.WriteNumber(stat.allocationSizeAvg);
- json.WriteString("Max");
- json.WriteNumber(stat.allocationSizeMax);
- json.EndObject();
- }
-
- if(stat.unusedRangeCount > 1)
- {
- json.WriteString("UnusedRangeSize");
- json.BeginObject(true);
- json.WriteString("Min");
- json.WriteNumber(stat.unusedRangeSizeMin);
- json.WriteString("Avg");
- json.WriteNumber(stat.unusedRangeSizeAvg);
- json.WriteString("Max");
- json.WriteNumber(stat.unusedRangeSizeMax);
- json.EndObject();
- }
-
- json.EndObject();
-}
-
-#endif // #if VMA_STATS_STRING_ENABLED
-
-struct VmaSuballocationItemSizeLess
-{
- bool operator()(
- const VmaSuballocationList::iterator lhs,
- const VmaSuballocationList::iterator rhs) const
- {
- return lhs->size < rhs->size;
- }
- bool operator()(
- const VmaSuballocationList::iterator lhs,
- VkDeviceSize rhsSize) const
- {
- return lhs->size < rhsSize;
- }
-};
-
-
-////////////////////////////////////////////////////////////////////////////////
-// class VmaBlockMetadata
-
-VmaBlockMetadata::VmaBlockMetadata(VmaAllocator hAllocator) :
- m_Size(0),
- m_pAllocationCallbacks(hAllocator->GetAllocationCallbacks())
-{
-}
-
-#if VMA_STATS_STRING_ENABLED
-
-void VmaBlockMetadata::PrintDetailedMap_Begin(class VmaJsonWriter& json,
- VkDeviceSize unusedBytes,
- size_t allocationCount,
- size_t unusedRangeCount) const
-{
- json.BeginObject();
-
- json.WriteString("TotalBytes");
- json.WriteNumber(GetSize());
-
- json.WriteString("UnusedBytes");
- json.WriteNumber(unusedBytes);
-
- json.WriteString("Allocations");
- json.WriteNumber((uint64_t)allocationCount);
-
- json.WriteString("UnusedRanges");
- json.WriteNumber((uint64_t)unusedRangeCount);
-
- json.WriteString("Suballocations");
- json.BeginArray();
-}
-
-void VmaBlockMetadata::PrintDetailedMap_Allocation(class VmaJsonWriter& json,
- VkDeviceSize offset,
- VmaAllocation hAllocation) const
-{
- json.BeginObject(true);
-
- json.WriteString("Offset");
- json.WriteNumber(offset);
-
- hAllocation->PrintParameters(json);
-
- json.EndObject();
-}
-
-void VmaBlockMetadata::PrintDetailedMap_UnusedRange(class VmaJsonWriter& json,
- VkDeviceSize offset,
- VkDeviceSize size) const
-{
- json.BeginObject(true);
-
- json.WriteString("Offset");
- json.WriteNumber(offset);
-
- json.WriteString("Type");
- json.WriteString(VMA_SUBALLOCATION_TYPE_NAMES[VMA_SUBALLOCATION_TYPE_FREE]);
-
- json.WriteString("Size");
- json.WriteNumber(size);
-
- json.EndObject();
-}
-
-void VmaBlockMetadata::PrintDetailedMap_End(class VmaJsonWriter& json) const
-{
- json.EndArray();
- json.EndObject();
-}
-
-#endif // #if VMA_STATS_STRING_ENABLED
-
-////////////////////////////////////////////////////////////////////////////////
-// class VmaBlockMetadata_Generic
-
-VmaBlockMetadata_Generic::VmaBlockMetadata_Generic(VmaAllocator hAllocator) :
- VmaBlockMetadata(hAllocator),
- m_FreeCount(0),
- m_SumFreeSize(0),
- m_Suballocations(VmaStlAllocator<VmaSuballocation>(hAllocator->GetAllocationCallbacks())),
- m_FreeSuballocationsBySize(VmaStlAllocator<VmaSuballocationList::iterator>(hAllocator->GetAllocationCallbacks()))
-{
-}
-
-VmaBlockMetadata_Generic::~VmaBlockMetadata_Generic()
-{
-}
-
-void VmaBlockMetadata_Generic::Init(VkDeviceSize size)
-{
- VmaBlockMetadata::Init(size);
-
- m_FreeCount = 1;
- m_SumFreeSize = size;
-
- VmaSuballocation suballoc = {};
- suballoc.offset = 0;
- suballoc.size = size;
- suballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
- suballoc.hAllocation = VK_NULL_HANDLE;
-
- VMA_ASSERT(size > VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER);
- m_Suballocations.push_back(suballoc);
- VmaSuballocationList::iterator suballocItem = m_Suballocations.end();
- --suballocItem;
- m_FreeSuballocationsBySize.push_back(suballocItem);
-}
-
-bool VmaBlockMetadata_Generic::Validate() const
-{
- VMA_VALIDATE(!m_Suballocations.empty());
-
- // Expected offset of new suballocation as calculated from previous ones.
- VkDeviceSize calculatedOffset = 0;
- // Expected number of free suballocations as calculated from traversing their list.
- uint32_t calculatedFreeCount = 0;
- // Expected sum size of free suballocations as calculated from traversing their list.
- VkDeviceSize calculatedSumFreeSize = 0;
- // Expected number of free suballocations that should be registered in
- // m_FreeSuballocationsBySize calculated from traversing their list.
- size_t freeSuballocationsToRegister = 0;
- // True if previous visited suballocation was free.
- bool prevFree = false;
-
- for(VmaSuballocationList::const_iterator suballocItem = m_Suballocations.cbegin();
- suballocItem != m_Suballocations.cend();
- ++suballocItem)
- {
- const VmaSuballocation& subAlloc = *suballocItem;
-
- // Actual offset of this suballocation doesn't match expected one.
- VMA_VALIDATE(subAlloc.offset == calculatedOffset);
-
- const bool currFree = (subAlloc.type == VMA_SUBALLOCATION_TYPE_FREE);
- // Two adjacent free suballocations are invalid. They should be merged.
- VMA_VALIDATE(!prevFree || !currFree);
-
- VMA_VALIDATE(currFree == (subAlloc.hAllocation == VK_NULL_HANDLE));
-
- if(currFree)
- {
- calculatedSumFreeSize += subAlloc.size;
- ++calculatedFreeCount;
- if(subAlloc.size >= VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
- {
- ++freeSuballocationsToRegister;
- }
-
- // Margin required between allocations - every free space must be at least that large.
- VMA_VALIDATE(subAlloc.size >= VMA_DEBUG_MARGIN);
- }
- else
- {
- VMA_VALIDATE(subAlloc.hAllocation->GetOffset() == subAlloc.offset);
- VMA_VALIDATE(subAlloc.hAllocation->GetSize() == subAlloc.size);
-
- // Margin required between allocations - previous allocation must be free.
- VMA_VALIDATE(VMA_DEBUG_MARGIN == 0 || prevFree);
- }
-
- calculatedOffset += subAlloc.size;
- prevFree = currFree;
- }
-
- // Number of free suballocations registered in m_FreeSuballocationsBySize doesn't
- // match expected one.
- VMA_VALIDATE(m_FreeSuballocationsBySize.size() == freeSuballocationsToRegister);
-
- VkDeviceSize lastSize = 0;
- for(size_t i = 0; i < m_FreeSuballocationsBySize.size(); ++i)
- {
- VmaSuballocationList::iterator suballocItem = m_FreeSuballocationsBySize[i];
-
- // Only free suballocations can be registered in m_FreeSuballocationsBySize.
- VMA_VALIDATE(suballocItem->type == VMA_SUBALLOCATION_TYPE_FREE);
- // They must be sorted by size ascending.
- VMA_VALIDATE(suballocItem->size >= lastSize);
-
- lastSize = suballocItem->size;
- }
-
- // Check if totals match calculacted values.
- VMA_VALIDATE(ValidateFreeSuballocationList());
- VMA_VALIDATE(calculatedOffset == GetSize());
- VMA_VALIDATE(calculatedSumFreeSize == m_SumFreeSize);
- VMA_VALIDATE(calculatedFreeCount == m_FreeCount);
-
- return true;
-}
-
-VkDeviceSize VmaBlockMetadata_Generic::GetUnusedRangeSizeMax() const
-{
- if(!m_FreeSuballocationsBySize.empty())
- {
- return m_FreeSuballocationsBySize.back()->size;
- }
- else
- {
- return 0;
- }
-}
-
-bool VmaBlockMetadata_Generic::IsEmpty() const
-{
- return (m_Suballocations.size() == 1) && (m_FreeCount == 1);
-}
-
-void VmaBlockMetadata_Generic::CalcAllocationStatInfo(VmaStatInfo& outInfo) const
-{
- outInfo.blockCount = 1;
-
- const uint32_t rangeCount = (uint32_t)m_Suballocations.size();
- outInfo.allocationCount = rangeCount - m_FreeCount;
- outInfo.unusedRangeCount = m_FreeCount;
-
- outInfo.unusedBytes = m_SumFreeSize;
- outInfo.usedBytes = GetSize() - outInfo.unusedBytes;
-
- outInfo.allocationSizeMin = UINT64_MAX;
- outInfo.allocationSizeMax = 0;
- outInfo.unusedRangeSizeMin = UINT64_MAX;
- outInfo.unusedRangeSizeMax = 0;
-
- for(VmaSuballocationList::const_iterator suballocItem = m_Suballocations.cbegin();
- suballocItem != m_Suballocations.cend();
- ++suballocItem)
- {
- const VmaSuballocation& suballoc = *suballocItem;
- if(suballoc.type != VMA_SUBALLOCATION_TYPE_FREE)
- {
- outInfo.allocationSizeMin = VMA_MIN(outInfo.allocationSizeMin, suballoc.size);
- outInfo.allocationSizeMax = VMA_MAX(outInfo.allocationSizeMax, suballoc.size);
- }
- else
- {
- outInfo.unusedRangeSizeMin = VMA_MIN(outInfo.unusedRangeSizeMin, suballoc.size);
- outInfo.unusedRangeSizeMax = VMA_MAX(outInfo.unusedRangeSizeMax, suballoc.size);
- }
- }
-}
-
-void VmaBlockMetadata_Generic::AddPoolStats(VmaPoolStats& inoutStats) const
-{
- const uint32_t rangeCount = (uint32_t)m_Suballocations.size();
-
- inoutStats.size += GetSize();
- inoutStats.unusedSize += m_SumFreeSize;
- inoutStats.allocationCount += rangeCount - m_FreeCount;
- inoutStats.unusedRangeCount += m_FreeCount;
- inoutStats.unusedRangeSizeMax = VMA_MAX(inoutStats.unusedRangeSizeMax, GetUnusedRangeSizeMax());
-}
-
-#if VMA_STATS_STRING_ENABLED
-
-void VmaBlockMetadata_Generic::PrintDetailedMap(class VmaJsonWriter& json) const
-{
- PrintDetailedMap_Begin(json,
- m_SumFreeSize, // unusedBytes
- m_Suballocations.size() - (size_t)m_FreeCount, // allocationCount
- m_FreeCount); // unusedRangeCount
-
- size_t i = 0;
- for(VmaSuballocationList::const_iterator suballocItem = m_Suballocations.cbegin();
- suballocItem != m_Suballocations.cend();
- ++suballocItem, ++i)
- {
- if(suballocItem->type == VMA_SUBALLOCATION_TYPE_FREE)
- {
- PrintDetailedMap_UnusedRange(json, suballocItem->offset, suballocItem->size);
- }
- else
- {
- PrintDetailedMap_Allocation(json, suballocItem->offset, suballocItem->hAllocation);
- }
- }
-
- PrintDetailedMap_End(json);
-}
-
-#endif // #if VMA_STATS_STRING_ENABLED
-
-bool VmaBlockMetadata_Generic::CreateAllocationRequest(
- uint32_t currentFrameIndex,
- uint32_t frameInUseCount,
- VkDeviceSize bufferImageGranularity,
- VkDeviceSize allocSize,
- VkDeviceSize allocAlignment,
- bool upperAddress,
- VmaSuballocationType allocType,
- bool canMakeOtherLost,
- uint32_t strategy,
- VmaAllocationRequest* pAllocationRequest)
-{
- VMA_ASSERT(allocSize > 0);
- VMA_ASSERT(!upperAddress);
- VMA_ASSERT(allocType != VMA_SUBALLOCATION_TYPE_FREE);
- VMA_ASSERT(pAllocationRequest != VMA_NULL);
- VMA_HEAVY_ASSERT(Validate());
-
- // There is not enough total free space in this block to fullfill the request: Early return.
- if(canMakeOtherLost == false &&
- m_SumFreeSize < allocSize + 2 * VMA_DEBUG_MARGIN)
- {
- return false;
- }
-
- // New algorithm, efficiently searching freeSuballocationsBySize.
- const size_t freeSuballocCount = m_FreeSuballocationsBySize.size();
- if(freeSuballocCount > 0)
- {
- if(strategy == VMA_ALLOCATION_CREATE_STRATEGY_BEST_FIT_BIT)
- {
- // Find first free suballocation with size not less than allocSize + 2 * VMA_DEBUG_MARGIN.
- VmaSuballocationList::iterator* const it = VmaBinaryFindFirstNotLess(
- m_FreeSuballocationsBySize.data(),
- m_FreeSuballocationsBySize.data() + freeSuballocCount,
- allocSize + 2 * VMA_DEBUG_MARGIN,
- VmaSuballocationItemSizeLess());
- size_t index = it - m_FreeSuballocationsBySize.data();
- for(; index < freeSuballocCount; ++index)
- {
- if(CheckAllocation(
- currentFrameIndex,
- frameInUseCount,
- bufferImageGranularity,
- allocSize,
- allocAlignment,
- allocType,
- m_FreeSuballocationsBySize[index],
- false, // canMakeOtherLost
- &pAllocationRequest->offset,
- &pAllocationRequest->itemsToMakeLostCount,
- &pAllocationRequest->sumFreeSize,
- &pAllocationRequest->sumItemSize))
- {
- pAllocationRequest->item = m_FreeSuballocationsBySize[index];
- return true;
- }
- }
- }
- else if(strategy == VMA_ALLOCATION_INTERNAL_STRATEGY_MIN_OFFSET)
- {
- for(VmaSuballocationList::iterator it = m_Suballocations.begin();
- it != m_Suballocations.end();
- ++it)
- {
- if(it->type == VMA_SUBALLOCATION_TYPE_FREE && CheckAllocation(
- currentFrameIndex,
- frameInUseCount,
- bufferImageGranularity,
- allocSize,
- allocAlignment,
- allocType,
- it,
- false, // canMakeOtherLost
- &pAllocationRequest->offset,
- &pAllocationRequest->itemsToMakeLostCount,
- &pAllocationRequest->sumFreeSize,
- &pAllocationRequest->sumItemSize))
- {
- pAllocationRequest->item = it;
- return true;
- }
- }
- }
- else // WORST_FIT, FIRST_FIT
- {
- // Search staring from biggest suballocations.
- for(size_t index = freeSuballocCount; index--; )
- {
- if(CheckAllocation(
- currentFrameIndex,
- frameInUseCount,
- bufferImageGranularity,
- allocSize,
- allocAlignment,
- allocType,
- m_FreeSuballocationsBySize[index],
- false, // canMakeOtherLost
- &pAllocationRequest->offset,
- &pAllocationRequest->itemsToMakeLostCount,
- &pAllocationRequest->sumFreeSize,
- &pAllocationRequest->sumItemSize))
- {
- pAllocationRequest->item = m_FreeSuballocationsBySize[index];
- return true;
- }
- }
- }
- }
-
- if(canMakeOtherLost)
- {
- // Brute-force algorithm. TODO: Come up with something better.
-
- pAllocationRequest->sumFreeSize = VK_WHOLE_SIZE;
- pAllocationRequest->sumItemSize = VK_WHOLE_SIZE;
-
- VmaAllocationRequest tmpAllocRequest = {};
- for(VmaSuballocationList::iterator suballocIt = m_Suballocations.begin();
- suballocIt != m_Suballocations.end();
- ++suballocIt)
- {
- if(suballocIt->type == VMA_SUBALLOCATION_TYPE_FREE ||
- suballocIt->hAllocation->CanBecomeLost())
- {
- if(CheckAllocation(
- currentFrameIndex,
- frameInUseCount,
- bufferImageGranularity,
- allocSize,
- allocAlignment,
- allocType,
- suballocIt,
- canMakeOtherLost,
- &tmpAllocRequest.offset,
- &tmpAllocRequest.itemsToMakeLostCount,
- &tmpAllocRequest.sumFreeSize,
- &tmpAllocRequest.sumItemSize))
- {
- tmpAllocRequest.item = suballocIt;
-
- if(tmpAllocRequest.CalcCost() < pAllocationRequest->CalcCost() ||
- strategy == VMA_ALLOCATION_CREATE_STRATEGY_FIRST_FIT_BIT)
- {
- *pAllocationRequest = tmpAllocRequest;
- }
- }
- }
- }
-
- if(pAllocationRequest->sumItemSize != VK_WHOLE_SIZE)
- {
- return true;
- }
- }
-
- return false;
-}
-
-bool VmaBlockMetadata_Generic::MakeRequestedAllocationsLost(
- uint32_t currentFrameIndex,
- uint32_t frameInUseCount,
- VmaAllocationRequest* pAllocationRequest)
-{
- while(pAllocationRequest->itemsToMakeLostCount > 0)
- {
- if(pAllocationRequest->item->type == VMA_SUBALLOCATION_TYPE_FREE)
- {
- ++pAllocationRequest->item;
- }
- VMA_ASSERT(pAllocationRequest->item != m_Suballocations.end());
- VMA_ASSERT(pAllocationRequest->item->hAllocation != VK_NULL_HANDLE);
- VMA_ASSERT(pAllocationRequest->item->hAllocation->CanBecomeLost());
- if(pAllocationRequest->item->hAllocation->MakeLost(currentFrameIndex, frameInUseCount))
- {
- pAllocationRequest->item = FreeSuballocation(pAllocationRequest->item);
- --pAllocationRequest->itemsToMakeLostCount;
- }
- else
- {
- return false;
- }
- }
-
- VMA_HEAVY_ASSERT(Validate());
- VMA_ASSERT(pAllocationRequest->item != m_Suballocations.end());
- VMA_ASSERT(pAllocationRequest->item->type == VMA_SUBALLOCATION_TYPE_FREE);
-
- return true;
-}
-
-uint32_t VmaBlockMetadata_Generic::MakeAllocationsLost(uint32_t currentFrameIndex, uint32_t frameInUseCount)
-{
- uint32_t lostAllocationCount = 0;
- for(VmaSuballocationList::iterator it = m_Suballocations.begin();
- it != m_Suballocations.end();
- ++it)
- {
- if(it->type != VMA_SUBALLOCATION_TYPE_FREE &&
- it->hAllocation->CanBecomeLost() &&
- it->hAllocation->MakeLost(currentFrameIndex, frameInUseCount))
- {
- it = FreeSuballocation(it);
- ++lostAllocationCount;
- }
- }
- return lostAllocationCount;
-}
-
-VkResult VmaBlockMetadata_Generic::CheckCorruption(const void* pBlockData)
-{
- for(VmaSuballocationList::iterator it = m_Suballocations.begin();
- it != m_Suballocations.end();
- ++it)
- {
- if(it->type != VMA_SUBALLOCATION_TYPE_FREE)
- {
- if(!VmaValidateMagicValue(pBlockData, it->offset - VMA_DEBUG_MARGIN))
- {
- VMA_ASSERT(0 && "MEMORY CORRUPTION DETECTED BEFORE VALIDATED ALLOCATION!");
- return VK_ERROR_VALIDATION_FAILED_EXT;
- }
- if(!VmaValidateMagicValue(pBlockData, it->offset + it->size))
- {
- VMA_ASSERT(0 && "MEMORY CORRUPTION DETECTED AFTER VALIDATED ALLOCATION!");
- return VK_ERROR_VALIDATION_FAILED_EXT;
- }
- }
- }
-
- return VK_SUCCESS;
-}
-
-void VmaBlockMetadata_Generic::Alloc(
- const VmaAllocationRequest& request,
- VmaSuballocationType type,
- VkDeviceSize allocSize,
- bool upperAddress,
- VmaAllocation hAllocation)
-{
- VMA_ASSERT(!upperAddress);
- VMA_ASSERT(request.item != m_Suballocations.end());
- VmaSuballocation& suballoc = *request.item;
- // Given suballocation is a free block.
- VMA_ASSERT(suballoc.type == VMA_SUBALLOCATION_TYPE_FREE);
- // Given offset is inside this suballocation.
- VMA_ASSERT(request.offset >= suballoc.offset);
- const VkDeviceSize paddingBegin = request.offset - suballoc.offset;
- VMA_ASSERT(suballoc.size >= paddingBegin + allocSize);
- const VkDeviceSize paddingEnd = suballoc.size - paddingBegin - allocSize;
-
- // Unregister this free suballocation from m_FreeSuballocationsBySize and update
- // it to become used.
- UnregisterFreeSuballocation(request.item);
-
- suballoc.offset = request.offset;
- suballoc.size = allocSize;
- suballoc.type = type;
- suballoc.hAllocation = hAllocation;
-
- // If there are any free bytes remaining at the end, insert new free suballocation after current one.
- if(paddingEnd)
- {
- VmaSuballocation paddingSuballoc = {};
- paddingSuballoc.offset = request.offset + allocSize;
- paddingSuballoc.size = paddingEnd;
- paddingSuballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
- VmaSuballocationList::iterator next = request.item;
- ++next;
- const VmaSuballocationList::iterator paddingEndItem =
- m_Suballocations.insert(next, paddingSuballoc);
- RegisterFreeSuballocation(paddingEndItem);
- }
-
- // If there are any free bytes remaining at the beginning, insert new free suballocation before current one.
- if(paddingBegin)
- {
- VmaSuballocation paddingSuballoc = {};
- paddingSuballoc.offset = request.offset - paddingBegin;
- paddingSuballoc.size = paddingBegin;
- paddingSuballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
- const VmaSuballocationList::iterator paddingBeginItem =
- m_Suballocations.insert(request.item, paddingSuballoc);
- RegisterFreeSuballocation(paddingBeginItem);
- }
-
- // Update totals.
- m_FreeCount = m_FreeCount - 1;
- if(paddingBegin > 0)
- {
- ++m_FreeCount;
- }
- if(paddingEnd > 0)
- {
- ++m_FreeCount;
- }
- m_SumFreeSize -= allocSize;
-}
-
-void VmaBlockMetadata_Generic::Free(const VmaAllocation allocation)
-{
- for(VmaSuballocationList::iterator suballocItem = m_Suballocations.begin();
- suballocItem != m_Suballocations.end();
- ++suballocItem)
- {
- VmaSuballocation& suballoc = *suballocItem;
- if(suballoc.hAllocation == allocation)
- {
- FreeSuballocation(suballocItem);
- VMA_HEAVY_ASSERT(Validate());
- return;
- }
- }
- VMA_ASSERT(0 && "Not found!");
-}
-
-void VmaBlockMetadata_Generic::FreeAtOffset(VkDeviceSize offset)
-{
- for(VmaSuballocationList::iterator suballocItem = m_Suballocations.begin();
- suballocItem != m_Suballocations.end();
- ++suballocItem)
- {
- VmaSuballocation& suballoc = *suballocItem;
- if(suballoc.offset == offset)
- {
- FreeSuballocation(suballocItem);
- return;
- }
- }
- VMA_ASSERT(0 && "Not found!");
-}
-
-bool VmaBlockMetadata_Generic::ResizeAllocation(const VmaAllocation alloc, VkDeviceSize newSize)
-{
- typedef VmaSuballocationList::iterator iter_type;
- for(iter_type suballocItem = m_Suballocations.begin();
- suballocItem != m_Suballocations.end();
- ++suballocItem)
- {
- VmaSuballocation& suballoc = *suballocItem;
- if(suballoc.hAllocation == alloc)
- {
- iter_type nextItem = suballocItem;
- ++nextItem;
-
- // Should have been ensured on higher level.
- VMA_ASSERT(newSize != alloc->GetSize() && newSize > 0);
-
- // Shrinking.
- if(newSize < alloc->GetSize())
- {
- const VkDeviceSize sizeDiff = suballoc.size - newSize;
-
- // There is next item.
- if(nextItem != m_Suballocations.end())
- {
- // Next item is free.
- if(nextItem->type == VMA_SUBALLOCATION_TYPE_FREE)
- {
- // Grow this next item backward.
- UnregisterFreeSuballocation(nextItem);
- nextItem->offset -= sizeDiff;
- nextItem->size += sizeDiff;
- RegisterFreeSuballocation(nextItem);
- }
- // Next item is not free.
- else
- {
- // Create free item after current one.
- VmaSuballocation newFreeSuballoc;
- newFreeSuballoc.hAllocation = VK_NULL_HANDLE;
- newFreeSuballoc.offset = suballoc.offset + newSize;
- newFreeSuballoc.size = sizeDiff;
- newFreeSuballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
- iter_type newFreeSuballocIt = m_Suballocations.insert(nextItem, newFreeSuballoc);
- RegisterFreeSuballocation(newFreeSuballocIt);
-
- ++m_FreeCount;
- }
- }
- // This is the last item.
- else
- {
- // Create free item at the end.
- VmaSuballocation newFreeSuballoc;
- newFreeSuballoc.hAllocation = VK_NULL_HANDLE;
- newFreeSuballoc.offset = suballoc.offset + newSize;
- newFreeSuballoc.size = sizeDiff;
- newFreeSuballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
- m_Suballocations.push_back(newFreeSuballoc);
-
- iter_type newFreeSuballocIt = m_Suballocations.end();
- RegisterFreeSuballocation(--newFreeSuballocIt);
-
- ++m_FreeCount;
- }
-
- suballoc.size = newSize;
- m_SumFreeSize += sizeDiff;
- }
- // Growing.
- else
- {
- const VkDeviceSize sizeDiff = newSize - suballoc.size;
-
- // There is next item.
- if(nextItem != m_Suballocations.end())
- {
- // Next item is free.
- if(nextItem->type == VMA_SUBALLOCATION_TYPE_FREE)
- {
- // There is not enough free space, including margin.
- if(nextItem->size < sizeDiff + VMA_DEBUG_MARGIN)
- {
- return false;
- }
-
- // There is more free space than required.
- if(nextItem->size > sizeDiff)
- {
- // Move and shrink this next item.
- UnregisterFreeSuballocation(nextItem);
- nextItem->offset += sizeDiff;
- nextItem->size -= sizeDiff;
- RegisterFreeSuballocation(nextItem);
- }
- // There is exactly the amount of free space required.
- else
- {
- // Remove this next free item.
- UnregisterFreeSuballocation(nextItem);
- m_Suballocations.erase(nextItem);
- --m_FreeCount;
- }
- }
- // Next item is not free - there is no space to grow.
- else
- {
- return false;
- }
- }
- // This is the last item - there is no space to grow.
- else
- {
- return false;
- }
-
- suballoc.size = newSize;
- m_SumFreeSize -= sizeDiff;
- }
-
- // We cannot call Validate() here because alloc object is updated to new size outside of this call.
- return true;
- }
- }
- VMA_ASSERT(0 && "Not found!");
- return false;
-}
-
-bool VmaBlockMetadata_Generic::ValidateFreeSuballocationList() const
-{
- VkDeviceSize lastSize = 0;
- for(size_t i = 0, count = m_FreeSuballocationsBySize.size(); i < count; ++i)
- {
- const VmaSuballocationList::iterator it = m_FreeSuballocationsBySize[i];
-
- VMA_VALIDATE(it->type == VMA_SUBALLOCATION_TYPE_FREE);
- VMA_VALIDATE(it->size >= VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER);
- VMA_VALIDATE(it->size >= lastSize);
- lastSize = it->size;
- }
- return true;
-}
-
-bool VmaBlockMetadata_Generic::CheckAllocation(
- uint32_t currentFrameIndex,
- uint32_t frameInUseCount,
- VkDeviceSize bufferImageGranularity,
- VkDeviceSize allocSize,
- VkDeviceSize allocAlignment,
- VmaSuballocationType allocType,
- VmaSuballocationList::const_iterator suballocItem,
- bool canMakeOtherLost,
- VkDeviceSize* pOffset,
- size_t* itemsToMakeLostCount,
- VkDeviceSize* pSumFreeSize,
- VkDeviceSize* pSumItemSize) const
-{
- VMA_ASSERT(allocSize > 0);
- VMA_ASSERT(allocType != VMA_SUBALLOCATION_TYPE_FREE);
- VMA_ASSERT(suballocItem != m_Suballocations.cend());
- VMA_ASSERT(pOffset != VMA_NULL);
-
- *itemsToMakeLostCount = 0;
- *pSumFreeSize = 0;
- *pSumItemSize = 0;
-
- if(canMakeOtherLost)
- {
- if(suballocItem->type == VMA_SUBALLOCATION_TYPE_FREE)
- {
- *pSumFreeSize = suballocItem->size;
- }
- else
- {
- if(suballocItem->hAllocation->CanBecomeLost() &&
- suballocItem->hAllocation->GetLastUseFrameIndex() + frameInUseCount < currentFrameIndex)
- {
- ++*itemsToMakeLostCount;
- *pSumItemSize = suballocItem->size;
- }
- else
- {
- return false;
- }
- }
-
- // Remaining size is too small for this request: Early return.
- if(GetSize() - suballocItem->offset < allocSize)
- {
- return false;
- }
-
- // Start from offset equal to beginning of this suballocation.
- *pOffset = suballocItem->offset;
-
- // Apply VMA_DEBUG_MARGIN at the beginning.
- if(VMA_DEBUG_MARGIN > 0)
- {
- *pOffset += VMA_DEBUG_MARGIN;
- }
-
- // Apply alignment.
- *pOffset = VmaAlignUp(*pOffset, allocAlignment);
-
- // Check previous suballocations for BufferImageGranularity conflicts.
- // Make bigger alignment if necessary.
- if(bufferImageGranularity > 1)
- {
- bool bufferImageGranularityConflict = false;
- VmaSuballocationList::const_iterator prevSuballocItem = suballocItem;
- while(prevSuballocItem != m_Suballocations.cbegin())
- {
- --prevSuballocItem;
- const VmaSuballocation& prevSuballoc = *prevSuballocItem;
- if(VmaBlocksOnSamePage(prevSuballoc.offset, prevSuballoc.size, *pOffset, bufferImageGranularity))
- {
- if(VmaIsBufferImageGranularityConflict(prevSuballoc.type, allocType))
- {
- bufferImageGranularityConflict = true;
- break;
- }
- }
- else
- // Already on previous page.
- break;
- }
- if(bufferImageGranularityConflict)
- {
- *pOffset = VmaAlignUp(*pOffset, bufferImageGranularity);
- }
- }
-
- // Now that we have final *pOffset, check if we are past suballocItem.
- // If yes, return false - this function should be called for another suballocItem as starting point.
- if(*pOffset >= suballocItem->offset + suballocItem->size)
- {
- return false;
- }
-
- // Calculate padding at the beginning based on current offset.
- const VkDeviceSize paddingBegin = *pOffset - suballocItem->offset;
-
- // Calculate required margin at the end.
- const VkDeviceSize requiredEndMargin = VMA_DEBUG_MARGIN;
-
- const VkDeviceSize totalSize = paddingBegin + allocSize + requiredEndMargin;
- // Another early return check.
- if(suballocItem->offset + totalSize > GetSize())
- {
- return false;
- }
-
- // Advance lastSuballocItem until desired size is reached.
- // Update itemsToMakeLostCount.
- VmaSuballocationList::const_iterator lastSuballocItem = suballocItem;
- if(totalSize > suballocItem->size)
- {
- VkDeviceSize remainingSize = totalSize - suballocItem->size;
- while(remainingSize > 0)
- {
- ++lastSuballocItem;
- if(lastSuballocItem == m_Suballocations.cend())
- {
- return false;
- }
- if(lastSuballocItem->type == VMA_SUBALLOCATION_TYPE_FREE)
- {
- *pSumFreeSize += lastSuballocItem->size;
- }
- else
- {
- VMA_ASSERT(lastSuballocItem->hAllocation != VK_NULL_HANDLE);
- if(lastSuballocItem->hAllocation->CanBecomeLost() &&
- lastSuballocItem->hAllocation->GetLastUseFrameIndex() + frameInUseCount < currentFrameIndex)
- {
- ++*itemsToMakeLostCount;
- *pSumItemSize += lastSuballocItem->size;
- }
- else
- {
- return false;
- }
- }
- remainingSize = (lastSuballocItem->size < remainingSize) ?
- remainingSize - lastSuballocItem->size : 0;
- }
- }
-
- // Check next suballocations for BufferImageGranularity conflicts.
- // If conflict exists, we must mark more allocations lost or fail.
- if(bufferImageGranularity > 1)
- {
- VmaSuballocationList::const_iterator nextSuballocItem = lastSuballocItem;
- ++nextSuballocItem;
- while(nextSuballocItem != m_Suballocations.cend())
- {
- const VmaSuballocation& nextSuballoc = *nextSuballocItem;
- if(VmaBlocksOnSamePage(*pOffset, allocSize, nextSuballoc.offset, bufferImageGranularity))
- {
- if(VmaIsBufferImageGranularityConflict(allocType, nextSuballoc.type))
- {
- VMA_ASSERT(nextSuballoc.hAllocation != VK_NULL_HANDLE);
- if(nextSuballoc.hAllocation->CanBecomeLost() &&
- nextSuballoc.hAllocation->GetLastUseFrameIndex() + frameInUseCount < currentFrameIndex)
- {
- ++*itemsToMakeLostCount;
- }
- else
- {
- return false;
- }
- }
- }
- else
- {
- // Already on next page.
- break;
- }
- ++nextSuballocItem;
- }
- }
- }
- else
- {
- const VmaSuballocation& suballoc = *suballocItem;
- VMA_ASSERT(suballoc.type == VMA_SUBALLOCATION_TYPE_FREE);
-
- *pSumFreeSize = suballoc.size;
-
- // Size of this suballocation is too small for this request: Early return.
- if(suballoc.size < allocSize)
- {
- return false;
- }
-
- // Start from offset equal to beginning of this suballocation.
- *pOffset = suballoc.offset;
-
- // Apply VMA_DEBUG_MARGIN at the beginning.
- if(VMA_DEBUG_MARGIN > 0)
- {
- *pOffset += VMA_DEBUG_MARGIN;
- }
-
- // Apply alignment.
- *pOffset = VmaAlignUp(*pOffset, allocAlignment);
-
- // Check previous suballocations for BufferImageGranularity conflicts.
- // Make bigger alignment if necessary.
- if(bufferImageGranularity > 1)
- {
- bool bufferImageGranularityConflict = false;
- VmaSuballocationList::const_iterator prevSuballocItem = suballocItem;
- while(prevSuballocItem != m_Suballocations.cbegin())
- {
- --prevSuballocItem;
- const VmaSuballocation& prevSuballoc = *prevSuballocItem;
- if(VmaBlocksOnSamePage(prevSuballoc.offset, prevSuballoc.size, *pOffset, bufferImageGranularity))
- {
- if(VmaIsBufferImageGranularityConflict(prevSuballoc.type, allocType))
- {
- bufferImageGranularityConflict = true;
- break;
- }
- }
- else
- // Already on previous page.
- break;
- }
- if(bufferImageGranularityConflict)
- {
- *pOffset = VmaAlignUp(*pOffset, bufferImageGranularity);
- }
- }
-
- // Calculate padding at the beginning based on current offset.
- const VkDeviceSize paddingBegin = *pOffset - suballoc.offset;
-
- // Calculate required margin at the end.
- const VkDeviceSize requiredEndMargin = VMA_DEBUG_MARGIN;
-
- // Fail if requested size plus margin before and after is bigger than size of this suballocation.
- if(paddingBegin + allocSize + requiredEndMargin > suballoc.size)
- {
- return false;
- }
-
- // Check next suballocations for BufferImageGranularity conflicts.
- // If conflict exists, allocation cannot be made here.
- if(bufferImageGranularity > 1)
- {
- VmaSuballocationList::const_iterator nextSuballocItem = suballocItem;
- ++nextSuballocItem;
- while(nextSuballocItem != m_Suballocations.cend())
- {
- const VmaSuballocation& nextSuballoc = *nextSuballocItem;
- if(VmaBlocksOnSamePage(*pOffset, allocSize, nextSuballoc.offset, bufferImageGranularity))
- {
- if(VmaIsBufferImageGranularityConflict(allocType, nextSuballoc.type))
- {
- return false;
- }
- }
- else
- {
- // Already on next page.
- break;
- }
- ++nextSuballocItem;
- }
- }
- }
-
- // All tests passed: Success. pOffset is already filled.
- return true;
-}
-
-void VmaBlockMetadata_Generic::MergeFreeWithNext(VmaSuballocationList::iterator item)
-{
- VMA_ASSERT(item != m_Suballocations.end());
- VMA_ASSERT(item->type == VMA_SUBALLOCATION_TYPE_FREE);
-
- VmaSuballocationList::iterator nextItem = item;
- ++nextItem;
- VMA_ASSERT(nextItem != m_Suballocations.end());
- VMA_ASSERT(nextItem->type == VMA_SUBALLOCATION_TYPE_FREE);
-
- item->size += nextItem->size;
- --m_FreeCount;
- m_Suballocations.erase(nextItem);
-}
-
-VmaSuballocationList::iterator VmaBlockMetadata_Generic::FreeSuballocation(VmaSuballocationList::iterator suballocItem)
-{
- // Change this suballocation to be marked as free.
- VmaSuballocation& suballoc = *suballocItem;
- suballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
- suballoc.hAllocation = VK_NULL_HANDLE;
-
- // Update totals.
- ++m_FreeCount;
- m_SumFreeSize += suballoc.size;
-
- // Merge with previous and/or next suballocation if it's also free.
- bool mergeWithNext = false;
- bool mergeWithPrev = false;
-
- VmaSuballocationList::iterator nextItem = suballocItem;
- ++nextItem;
- if((nextItem != m_Suballocations.end()) && (nextItem->type == VMA_SUBALLOCATION_TYPE_FREE))
- {
- mergeWithNext = true;
- }
-
- VmaSuballocationList::iterator prevItem = suballocItem;
- if(suballocItem != m_Suballocations.begin())
- {
- --prevItem;
- if(prevItem->type == VMA_SUBALLOCATION_TYPE_FREE)
- {
- mergeWithPrev = true;
- }
- }
-
- if(mergeWithNext)
- {
- UnregisterFreeSuballocation(nextItem);
- MergeFreeWithNext(suballocItem);
- }
-
- if(mergeWithPrev)
- {
- UnregisterFreeSuballocation(prevItem);
- MergeFreeWithNext(prevItem);
- RegisterFreeSuballocation(prevItem);
- return prevItem;
- }
- else
- {
- RegisterFreeSuballocation(suballocItem);
- return suballocItem;
- }
-}
-
-void VmaBlockMetadata_Generic::RegisterFreeSuballocation(VmaSuballocationList::iterator item)
-{
- VMA_ASSERT(item->type == VMA_SUBALLOCATION_TYPE_FREE);
- VMA_ASSERT(item->size > 0);
-
- // You may want to enable this validation at the beginning or at the end of
- // this function, depending on what do you want to check.
- VMA_HEAVY_ASSERT(ValidateFreeSuballocationList());
-
- if(item->size >= VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
- {
- if(m_FreeSuballocationsBySize.empty())
- {
- m_FreeSuballocationsBySize.push_back(item);
- }
- else
- {
- VmaVectorInsertSorted<VmaSuballocationItemSizeLess>(m_FreeSuballocationsBySize, item);
- }
- }
-
- //VMA_HEAVY_ASSERT(ValidateFreeSuballocationList());
-}
-
-
-void VmaBlockMetadata_Generic::UnregisterFreeSuballocation(VmaSuballocationList::iterator item)
-{
- VMA_ASSERT(item->type == VMA_SUBALLOCATION_TYPE_FREE);
- VMA_ASSERT(item->size > 0);
-
- // You may want to enable this validation at the beginning or at the end of
- // this function, depending on what do you want to check.
- VMA_HEAVY_ASSERT(ValidateFreeSuballocationList());
-
- if(item->size >= VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
- {
- VmaSuballocationList::iterator* const it = VmaBinaryFindFirstNotLess(
- m_FreeSuballocationsBySize.data(),
- m_FreeSuballocationsBySize.data() + m_FreeSuballocationsBySize.size(),
- item,
- VmaSuballocationItemSizeLess());
- for(size_t index = it - m_FreeSuballocationsBySize.data();
- index < m_FreeSuballocationsBySize.size();
- ++index)
- {
- if(m_FreeSuballocationsBySize[index] == item)
- {
- VmaVectorRemove(m_FreeSuballocationsBySize, index);
- return;
- }
- VMA_ASSERT((m_FreeSuballocationsBySize[index]->size == item->size) && "Not found.");
- }
- VMA_ASSERT(0 && "Not found.");
- }
-
- //VMA_HEAVY_ASSERT(ValidateFreeSuballocationList());
-}
-
-bool VmaBlockMetadata_Generic::IsBufferImageGranularityConflictPossible(
- VkDeviceSize bufferImageGranularity,
- VmaSuballocationType& inOutPrevSuballocType) const
-{
- if(bufferImageGranularity == 1 || IsEmpty())
- {
- return false;
- }
-
- VkDeviceSize minAlignment = VK_WHOLE_SIZE;
- bool typeConflictFound = false;
- for(VmaSuballocationList::const_iterator it = m_Suballocations.cbegin();
- it != m_Suballocations.cend();
- ++it)
- {
- const VmaSuballocationType suballocType = it->type;
- if(suballocType != VMA_SUBALLOCATION_TYPE_FREE)
- {
- minAlignment = VMA_MIN(minAlignment, it->hAllocation->GetAlignment());
- if(VmaIsBufferImageGranularityConflict(inOutPrevSuballocType, suballocType))
- {
- typeConflictFound = true;
- }
- inOutPrevSuballocType = suballocType;
- }
- }
-
- return typeConflictFound || minAlignment >= bufferImageGranularity;
-}
-
-////////////////////////////////////////////////////////////////////////////////
-// class VmaBlockMetadata_Linear
-
-VmaBlockMetadata_Linear::VmaBlockMetadata_Linear(VmaAllocator hAllocator) :
- VmaBlockMetadata(hAllocator),
- m_SumFreeSize(0),
- m_Suballocations0(VmaStlAllocator<VmaSuballocation>(hAllocator->GetAllocationCallbacks())),
- m_Suballocations1(VmaStlAllocator<VmaSuballocation>(hAllocator->GetAllocationCallbacks())),
- m_1stVectorIndex(0),
- m_2ndVectorMode(SECOND_VECTOR_EMPTY),
- m_1stNullItemsBeginCount(0),
- m_1stNullItemsMiddleCount(0),
- m_2ndNullItemsCount(0)
-{
-}
-
-VmaBlockMetadata_Linear::~VmaBlockMetadata_Linear()
-{
-}
-
-void VmaBlockMetadata_Linear::Init(VkDeviceSize size)
-{
- VmaBlockMetadata::Init(size);
- m_SumFreeSize = size;
-}
-
-bool VmaBlockMetadata_Linear::Validate() const
-{
- const SuballocationVectorType& suballocations1st = AccessSuballocations1st();
- const SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
-
- VMA_VALIDATE(suballocations2nd.empty() == (m_2ndVectorMode == SECOND_VECTOR_EMPTY));
- VMA_VALIDATE(!suballocations1st.empty() ||
- suballocations2nd.empty() ||
- m_2ndVectorMode != SECOND_VECTOR_RING_BUFFER);
-
- if(!suballocations1st.empty())
- {
- // Null item at the beginning should be accounted into m_1stNullItemsBeginCount.
- VMA_VALIDATE(suballocations1st[m_1stNullItemsBeginCount].hAllocation != VK_NULL_HANDLE);
- // Null item at the end should be just pop_back().
- VMA_VALIDATE(suballocations1st.back().hAllocation != VK_NULL_HANDLE);
- }
- if(!suballocations2nd.empty())
- {
- // Null item at the end should be just pop_back().
- VMA_VALIDATE(suballocations2nd.back().hAllocation != VK_NULL_HANDLE);
- }
-
- VMA_VALIDATE(m_1stNullItemsBeginCount + m_1stNullItemsMiddleCount <= suballocations1st.size());
- VMA_VALIDATE(m_2ndNullItemsCount <= suballocations2nd.size());
-
- VkDeviceSize sumUsedSize = 0;
- const size_t suballoc1stCount = suballocations1st.size();
- VkDeviceSize offset = VMA_DEBUG_MARGIN;
-
- if(m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER)
- {
- const size_t suballoc2ndCount = suballocations2nd.size();
- size_t nullItem2ndCount = 0;
- for(size_t i = 0; i < suballoc2ndCount; ++i)
- {
- const VmaSuballocation& suballoc = suballocations2nd[i];
- const bool currFree = (suballoc.type == VMA_SUBALLOCATION_TYPE_FREE);
-
- VMA_VALIDATE(currFree == (suballoc.hAllocation == VK_NULL_HANDLE));
- VMA_VALIDATE(suballoc.offset >= offset);
-
- if(!currFree)
- {
- VMA_VALIDATE(suballoc.hAllocation->GetOffset() == suballoc.offset);
- VMA_VALIDATE(suballoc.hAllocation->GetSize() == suballoc.size);
- sumUsedSize += suballoc.size;
- }
- else
- {
- ++nullItem2ndCount;
- }
-
- offset = suballoc.offset + suballoc.size + VMA_DEBUG_MARGIN;
- }
-
- VMA_VALIDATE(nullItem2ndCount == m_2ndNullItemsCount);
- }
-
- for(size_t i = 0; i < m_1stNullItemsBeginCount; ++i)
- {
- const VmaSuballocation& suballoc = suballocations1st[i];
- VMA_VALIDATE(suballoc.type == VMA_SUBALLOCATION_TYPE_FREE &&
- suballoc.hAllocation == VK_NULL_HANDLE);
- }
-
- size_t nullItem1stCount = m_1stNullItemsBeginCount;
-
- for(size_t i = m_1stNullItemsBeginCount; i < suballoc1stCount; ++i)
- {
- const VmaSuballocation& suballoc = suballocations1st[i];
- const bool currFree = (suballoc.type == VMA_SUBALLOCATION_TYPE_FREE);
-
- VMA_VALIDATE(currFree == (suballoc.hAllocation == VK_NULL_HANDLE));
- VMA_VALIDATE(suballoc.offset >= offset);
- VMA_VALIDATE(i >= m_1stNullItemsBeginCount || currFree);
-
- if(!currFree)
- {
- VMA_VALIDATE(suballoc.hAllocation->GetOffset() == suballoc.offset);
- VMA_VALIDATE(suballoc.hAllocation->GetSize() == suballoc.size);
- sumUsedSize += suballoc.size;
- }
- else
- {
- ++nullItem1stCount;
- }
-
- offset = suballoc.offset + suballoc.size + VMA_DEBUG_MARGIN;
- }
- VMA_VALIDATE(nullItem1stCount == m_1stNullItemsBeginCount + m_1stNullItemsMiddleCount);
-
- if(m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK)
- {
- const size_t suballoc2ndCount = suballocations2nd.size();
- size_t nullItem2ndCount = 0;
- for(size_t i = suballoc2ndCount; i--; )
- {
- const VmaSuballocation& suballoc = suballocations2nd[i];
- const bool currFree = (suballoc.type == VMA_SUBALLOCATION_TYPE_FREE);
-
- VMA_VALIDATE(currFree == (suballoc.hAllocation == VK_NULL_HANDLE));
- VMA_VALIDATE(suballoc.offset >= offset);
-
- if(!currFree)
- {
- VMA_VALIDATE(suballoc.hAllocation->GetOffset() == suballoc.offset);
- VMA_VALIDATE(suballoc.hAllocation->GetSize() == suballoc.size);
- sumUsedSize += suballoc.size;
- }
- else
- {
- ++nullItem2ndCount;
- }
-
- offset = suballoc.offset + suballoc.size + VMA_DEBUG_MARGIN;
- }
-
- VMA_VALIDATE(nullItem2ndCount == m_2ndNullItemsCount);
- }
-
- VMA_VALIDATE(offset <= GetSize());
- VMA_VALIDATE(m_SumFreeSize == GetSize() - sumUsedSize);
-
- return true;
-}
-
-size_t VmaBlockMetadata_Linear::GetAllocationCount() const
-{
- return AccessSuballocations1st().size() - (m_1stNullItemsBeginCount + m_1stNullItemsMiddleCount) +
- AccessSuballocations2nd().size() - m_2ndNullItemsCount;
-}
-
-VkDeviceSize VmaBlockMetadata_Linear::GetUnusedRangeSizeMax() const
-{
- const VkDeviceSize size = GetSize();
-
- /*
- We don't consider gaps inside allocation vectors with freed allocations because
- they are not suitable for reuse in linear allocator. We consider only space that
- is available for new allocations.
- */
- if(IsEmpty())
- {
- return size;
- }
-
- const SuballocationVectorType& suballocations1st = AccessSuballocations1st();
-
- switch(m_2ndVectorMode)
- {
- case SECOND_VECTOR_EMPTY:
- /*
- Available space is after end of 1st, as well as before beginning of 1st (which
- whould make it a ring buffer).
- */
- {
- const size_t suballocations1stCount = suballocations1st.size();
- VMA_ASSERT(suballocations1stCount > m_1stNullItemsBeginCount);
- const VmaSuballocation& firstSuballoc = suballocations1st[m_1stNullItemsBeginCount];
- const VmaSuballocation& lastSuballoc = suballocations1st[suballocations1stCount - 1];
- return VMA_MAX(
- firstSuballoc.offset,
- size - (lastSuballoc.offset + lastSuballoc.size));
- }
- break;
-
- case SECOND_VECTOR_RING_BUFFER:
- /*
- Available space is only between end of 2nd and beginning of 1st.
- */
- {
- const SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
- const VmaSuballocation& lastSuballoc2nd = suballocations2nd.back();
- const VmaSuballocation& firstSuballoc1st = suballocations1st[m_1stNullItemsBeginCount];
- return firstSuballoc1st.offset - (lastSuballoc2nd.offset + lastSuballoc2nd.size);
- }
- break;
-
- case SECOND_VECTOR_DOUBLE_STACK:
- /*
- Available space is only between end of 1st and top of 2nd.
- */
- {
- const SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
- const VmaSuballocation& topSuballoc2nd = suballocations2nd.back();
- const VmaSuballocation& lastSuballoc1st = suballocations1st.back();
- return topSuballoc2nd.offset - (lastSuballoc1st.offset + lastSuballoc1st.size);
- }
- break;
-
- default:
- VMA_ASSERT(0);
- return 0;
- }
-}
-
-void VmaBlockMetadata_Linear::CalcAllocationStatInfo(VmaStatInfo& outInfo) const
-{
- const VkDeviceSize size = GetSize();
- const SuballocationVectorType& suballocations1st = AccessSuballocations1st();
- const SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
- const size_t suballoc1stCount = suballocations1st.size();
- const size_t suballoc2ndCount = suballocations2nd.size();
-
- outInfo.blockCount = 1;
- outInfo.allocationCount = (uint32_t)GetAllocationCount();
- outInfo.unusedRangeCount = 0;
- outInfo.usedBytes = 0;
- outInfo.allocationSizeMin = UINT64_MAX;
- outInfo.allocationSizeMax = 0;
- outInfo.unusedRangeSizeMin = UINT64_MAX;
- outInfo.unusedRangeSizeMax = 0;
-
- VkDeviceSize lastOffset = 0;
-
- if(m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER)
- {
- const VkDeviceSize freeSpace2ndTo1stEnd = suballocations1st[m_1stNullItemsBeginCount].offset;
- size_t nextAlloc2ndIndex = 0;
- while(lastOffset < freeSpace2ndTo1stEnd)
- {
- // Find next non-null allocation or move nextAllocIndex to the end.
- while(nextAlloc2ndIndex < suballoc2ndCount &&
- suballocations2nd[nextAlloc2ndIndex].hAllocation == VK_NULL_HANDLE)
- {
- ++nextAlloc2ndIndex;
- }
-
- // Found non-null allocation.
- if(nextAlloc2ndIndex < suballoc2ndCount)
- {
- const VmaSuballocation& suballoc = suballocations2nd[nextAlloc2ndIndex];
-
- // 1. Process free space before this allocation.
- if(lastOffset < suballoc.offset)
- {
- // There is free space from lastOffset to suballoc.offset.
- const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
- ++outInfo.unusedRangeCount;
- outInfo.unusedBytes += unusedRangeSize;
- outInfo.unusedRangeSizeMin = VMA_MIN(outInfo.unusedRangeSizeMin, unusedRangeSize);
- outInfo.unusedRangeSizeMax = VMA_MIN(outInfo.unusedRangeSizeMax, unusedRangeSize);
- }
-
- // 2. Process this allocation.
- // There is allocation with suballoc.offset, suballoc.size.
- outInfo.usedBytes += suballoc.size;
- outInfo.allocationSizeMin = VMA_MIN(outInfo.allocationSizeMin, suballoc.size);
- outInfo.allocationSizeMax = VMA_MIN(outInfo.allocationSizeMax, suballoc.size);
-
- // 3. Prepare for next iteration.
- lastOffset = suballoc.offset + suballoc.size;
- ++nextAlloc2ndIndex;
- }
- // We are at the end.
- else
- {
- // There is free space from lastOffset to freeSpace2ndTo1stEnd.
- if(lastOffset < freeSpace2ndTo1stEnd)
- {
- const VkDeviceSize unusedRangeSize = freeSpace2ndTo1stEnd - lastOffset;
- ++outInfo.unusedRangeCount;
- outInfo.unusedBytes += unusedRangeSize;
- outInfo.unusedRangeSizeMin = VMA_MIN(outInfo.unusedRangeSizeMin, unusedRangeSize);
- outInfo.unusedRangeSizeMax = VMA_MIN(outInfo.unusedRangeSizeMax, unusedRangeSize);
- }
-
- // End of loop.
- lastOffset = freeSpace2ndTo1stEnd;
- }
- }
- }
-
- size_t nextAlloc1stIndex = m_1stNullItemsBeginCount;
- const VkDeviceSize freeSpace1stTo2ndEnd =
- m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK ? suballocations2nd.back().offset : size;
- while(lastOffset < freeSpace1stTo2ndEnd)
- {
- // Find next non-null allocation or move nextAllocIndex to the end.
- while(nextAlloc1stIndex < suballoc1stCount &&
- suballocations1st[nextAlloc1stIndex].hAllocation == VK_NULL_HANDLE)
- {
- ++nextAlloc1stIndex;
- }
-
- // Found non-null allocation.
- if(nextAlloc1stIndex < suballoc1stCount)
- {
- const VmaSuballocation& suballoc = suballocations1st[nextAlloc1stIndex];
-
- // 1. Process free space before this allocation.
- if(lastOffset < suballoc.offset)
- {
- // There is free space from lastOffset to suballoc.offset.
- const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
- ++outInfo.unusedRangeCount;
- outInfo.unusedBytes += unusedRangeSize;
- outInfo.unusedRangeSizeMin = VMA_MIN(outInfo.unusedRangeSizeMin, unusedRangeSize);
- outInfo.unusedRangeSizeMax = VMA_MIN(outInfo.unusedRangeSizeMax, unusedRangeSize);
- }
-
- // 2. Process this allocation.
- // There is allocation with suballoc.offset, suballoc.size.
- outInfo.usedBytes += suballoc.size;
- outInfo.allocationSizeMin = VMA_MIN(outInfo.allocationSizeMin, suballoc.size);
- outInfo.allocationSizeMax = VMA_MIN(outInfo.allocationSizeMax, suballoc.size);
-
- // 3. Prepare for next iteration.
- lastOffset = suballoc.offset + suballoc.size;
- ++nextAlloc1stIndex;
- }
- // We are at the end.
- else
- {
- // There is free space from lastOffset to freeSpace1stTo2ndEnd.
- if(lastOffset < freeSpace1stTo2ndEnd)
- {
- const VkDeviceSize unusedRangeSize = freeSpace1stTo2ndEnd - lastOffset;
- ++outInfo.unusedRangeCount;
- outInfo.unusedBytes += unusedRangeSize;
- outInfo.unusedRangeSizeMin = VMA_MIN(outInfo.unusedRangeSizeMin, unusedRangeSize);
- outInfo.unusedRangeSizeMax = VMA_MIN(outInfo.unusedRangeSizeMax, unusedRangeSize);
- }
-
- // End of loop.
- lastOffset = freeSpace1stTo2ndEnd;
- }
- }
-
- if(m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK)
- {
- size_t nextAlloc2ndIndex = suballocations2nd.size() - 1;
- while(lastOffset < size)
- {
- // Find next non-null allocation or move nextAllocIndex to the end.
- while(nextAlloc2ndIndex != SIZE_MAX &&
- suballocations2nd[nextAlloc2ndIndex].hAllocation == VK_NULL_HANDLE)
- {
- --nextAlloc2ndIndex;
- }
-
- // Found non-null allocation.
- if(nextAlloc2ndIndex != SIZE_MAX)
- {
- const VmaSuballocation& suballoc = suballocations2nd[nextAlloc2ndIndex];
-
- // 1. Process free space before this allocation.
- if(lastOffset < suballoc.offset)
- {
- // There is free space from lastOffset to suballoc.offset.
- const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
- ++outInfo.unusedRangeCount;
- outInfo.unusedBytes += unusedRangeSize;
- outInfo.unusedRangeSizeMin = VMA_MIN(outInfo.unusedRangeSizeMin, unusedRangeSize);
- outInfo.unusedRangeSizeMax = VMA_MIN(outInfo.unusedRangeSizeMax, unusedRangeSize);
- }
-
- // 2. Process this allocation.
- // There is allocation with suballoc.offset, suballoc.size.
- outInfo.usedBytes += suballoc.size;
- outInfo.allocationSizeMin = VMA_MIN(outInfo.allocationSizeMin, suballoc.size);
- outInfo.allocationSizeMax = VMA_MIN(outInfo.allocationSizeMax, suballoc.size);
-
- // 3. Prepare for next iteration.
- lastOffset = suballoc.offset + suballoc.size;
- --nextAlloc2ndIndex;
- }
- // We are at the end.
- else
- {
- // There is free space from lastOffset to size.
- if(lastOffset < size)
- {
- const VkDeviceSize unusedRangeSize = size - lastOffset;
- ++outInfo.unusedRangeCount;
- outInfo.unusedBytes += unusedRangeSize;
- outInfo.unusedRangeSizeMin = VMA_MIN(outInfo.unusedRangeSizeMin, unusedRangeSize);
- outInfo.unusedRangeSizeMax = VMA_MIN(outInfo.unusedRangeSizeMax, unusedRangeSize);
- }
-
- // End of loop.
- lastOffset = size;
- }
- }
- }
-
- outInfo.unusedBytes = size - outInfo.usedBytes;
-}
-
-void VmaBlockMetadata_Linear::AddPoolStats(VmaPoolStats& inoutStats) const
-{
- const SuballocationVectorType& suballocations1st = AccessSuballocations1st();
- const SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
- const VkDeviceSize size = GetSize();
- const size_t suballoc1stCount = suballocations1st.size();
- const size_t suballoc2ndCount = suballocations2nd.size();
-
- inoutStats.size += size;
-
- VkDeviceSize lastOffset = 0;
-
- if(m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER)
- {
- const VkDeviceSize freeSpace2ndTo1stEnd = suballocations1st[m_1stNullItemsBeginCount].offset;
- size_t nextAlloc2ndIndex = m_1stNullItemsBeginCount;
- while(lastOffset < freeSpace2ndTo1stEnd)
- {
- // Find next non-null allocation or move nextAlloc2ndIndex to the end.
- while(nextAlloc2ndIndex < suballoc2ndCount &&
- suballocations2nd[nextAlloc2ndIndex].hAllocation == VK_NULL_HANDLE)
- {
- ++nextAlloc2ndIndex;
- }
-
- // Found non-null allocation.
- if(nextAlloc2ndIndex < suballoc2ndCount)
- {
- const VmaSuballocation& suballoc = suballocations2nd[nextAlloc2ndIndex];
-
- // 1. Process free space before this allocation.
- if(lastOffset < suballoc.offset)
- {
- // There is free space from lastOffset to suballoc.offset.
- const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
- inoutStats.unusedSize += unusedRangeSize;
- ++inoutStats.unusedRangeCount;
- inoutStats.unusedRangeSizeMax = VMA_MAX(inoutStats.unusedRangeSizeMax, unusedRangeSize);
- }
-
- // 2. Process this allocation.
- // There is allocation with suballoc.offset, suballoc.size.
- ++inoutStats.allocationCount;
-
- // 3. Prepare for next iteration.
- lastOffset = suballoc.offset + suballoc.size;
- ++nextAlloc2ndIndex;
- }
- // We are at the end.
- else
- {
- if(lastOffset < freeSpace2ndTo1stEnd)
- {
- // There is free space from lastOffset to freeSpace2ndTo1stEnd.
- const VkDeviceSize unusedRangeSize = freeSpace2ndTo1stEnd - lastOffset;
- inoutStats.unusedSize += unusedRangeSize;
- ++inoutStats.unusedRangeCount;
- inoutStats.unusedRangeSizeMax = VMA_MAX(inoutStats.unusedRangeSizeMax, unusedRangeSize);
- }
-
- // End of loop.
- lastOffset = freeSpace2ndTo1stEnd;
- }
- }
- }
-
- size_t nextAlloc1stIndex = m_1stNullItemsBeginCount;
- const VkDeviceSize freeSpace1stTo2ndEnd =
- m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK ? suballocations2nd.back().offset : size;
- while(lastOffset < freeSpace1stTo2ndEnd)
- {
- // Find next non-null allocation or move nextAllocIndex to the end.
- while(nextAlloc1stIndex < suballoc1stCount &&
- suballocations1st[nextAlloc1stIndex].hAllocation == VK_NULL_HANDLE)
- {
- ++nextAlloc1stIndex;
- }
-
- // Found non-null allocation.
- if(nextAlloc1stIndex < suballoc1stCount)
- {
- const VmaSuballocation& suballoc = suballocations1st[nextAlloc1stIndex];
-
- // 1. Process free space before this allocation.
- if(lastOffset < suballoc.offset)
- {
- // There is free space from lastOffset to suballoc.offset.
- const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
- inoutStats.unusedSize += unusedRangeSize;
- ++inoutStats.unusedRangeCount;
- inoutStats.unusedRangeSizeMax = VMA_MAX(inoutStats.unusedRangeSizeMax, unusedRangeSize);
- }
-
- // 2. Process this allocation.
- // There is allocation with suballoc.offset, suballoc.size.
- ++inoutStats.allocationCount;
-
- // 3. Prepare for next iteration.
- lastOffset = suballoc.offset + suballoc.size;
- ++nextAlloc1stIndex;
- }
- // We are at the end.
- else
- {
- if(lastOffset < freeSpace1stTo2ndEnd)
- {
- // There is free space from lastOffset to freeSpace1stTo2ndEnd.
- const VkDeviceSize unusedRangeSize = freeSpace1stTo2ndEnd - lastOffset;
- inoutStats.unusedSize += unusedRangeSize;
- ++inoutStats.unusedRangeCount;
- inoutStats.unusedRangeSizeMax = VMA_MAX(inoutStats.unusedRangeSizeMax, unusedRangeSize);
- }
-
- // End of loop.
- lastOffset = freeSpace1stTo2ndEnd;
- }
- }
-
- if(m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK)
- {
- size_t nextAlloc2ndIndex = suballocations2nd.size() - 1;
- while(lastOffset < size)
- {
- // Find next non-null allocation or move nextAlloc2ndIndex to the end.
- while(nextAlloc2ndIndex != SIZE_MAX &&
- suballocations2nd[nextAlloc2ndIndex].hAllocation == VK_NULL_HANDLE)
- {
- --nextAlloc2ndIndex;
- }
-
- // Found non-null allocation.
- if(nextAlloc2ndIndex != SIZE_MAX)
- {
- const VmaSuballocation& suballoc = suballocations2nd[nextAlloc2ndIndex];
-
- // 1. Process free space before this allocation.
- if(lastOffset < suballoc.offset)
- {
- // There is free space from lastOffset to suballoc.offset.
- const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
- inoutStats.unusedSize += unusedRangeSize;
- ++inoutStats.unusedRangeCount;
- inoutStats.unusedRangeSizeMax = VMA_MAX(inoutStats.unusedRangeSizeMax, unusedRangeSize);
- }
-
- // 2. Process this allocation.
- // There is allocation with suballoc.offset, suballoc.size.
- ++inoutStats.allocationCount;
-
- // 3. Prepare for next iteration.
- lastOffset = suballoc.offset + suballoc.size;
- --nextAlloc2ndIndex;
- }
- // We are at the end.
- else
- {
- if(lastOffset < size)
- {
- // There is free space from lastOffset to size.
- const VkDeviceSize unusedRangeSize = size - lastOffset;
- inoutStats.unusedSize += unusedRangeSize;
- ++inoutStats.unusedRangeCount;
- inoutStats.unusedRangeSizeMax = VMA_MAX(inoutStats.unusedRangeSizeMax, unusedRangeSize);
- }
-
- // End of loop.
- lastOffset = size;
- }
- }
- }
-}
-
-#if VMA_STATS_STRING_ENABLED
-void VmaBlockMetadata_Linear::PrintDetailedMap(class VmaJsonWriter& json) const
-{
- const VkDeviceSize size = GetSize();
- const SuballocationVectorType& suballocations1st = AccessSuballocations1st();
- const SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
- const size_t suballoc1stCount = suballocations1st.size();
- const size_t suballoc2ndCount = suballocations2nd.size();
-
- // FIRST PASS
-
- size_t unusedRangeCount = 0;
- VkDeviceSize usedBytes = 0;
-
- VkDeviceSize lastOffset = 0;
-
- size_t alloc2ndCount = 0;
- if(m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER)
- {
- const VkDeviceSize freeSpace2ndTo1stEnd = suballocations1st[m_1stNullItemsBeginCount].offset;
- size_t nextAlloc2ndIndex = 0;
- while(lastOffset < freeSpace2ndTo1stEnd)
- {
- // Find next non-null allocation or move nextAlloc2ndIndex to the end.
- while(nextAlloc2ndIndex < suballoc2ndCount &&
- suballocations2nd[nextAlloc2ndIndex].hAllocation == VK_NULL_HANDLE)
- {
- ++nextAlloc2ndIndex;
- }
-
- // Found non-null allocation.
- if(nextAlloc2ndIndex < suballoc2ndCount)
- {
- const VmaSuballocation& suballoc = suballocations2nd[nextAlloc2ndIndex];
-
- // 1. Process free space before this allocation.
- if(lastOffset < suballoc.offset)
- {
- // There is free space from lastOffset to suballoc.offset.
- ++unusedRangeCount;
- }
-
- // 2. Process this allocation.
- // There is allocation with suballoc.offset, suballoc.size.
- ++alloc2ndCount;
- usedBytes += suballoc.size;
-
- // 3. Prepare for next iteration.
- lastOffset = suballoc.offset + suballoc.size;
- ++nextAlloc2ndIndex;
- }
- // We are at the end.
- else
- {
- if(lastOffset < freeSpace2ndTo1stEnd)
- {
- // There is free space from lastOffset to freeSpace2ndTo1stEnd.
- ++unusedRangeCount;
- }
-
- // End of loop.
- lastOffset = freeSpace2ndTo1stEnd;
- }
- }
- }
-
- size_t nextAlloc1stIndex = m_1stNullItemsBeginCount;
- size_t alloc1stCount = 0;
- const VkDeviceSize freeSpace1stTo2ndEnd =
- m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK ? suballocations2nd.back().offset : size;
- while(lastOffset < freeSpace1stTo2ndEnd)
- {
- // Find next non-null allocation or move nextAllocIndex to the end.
- while(nextAlloc1stIndex < suballoc1stCount &&
- suballocations1st[nextAlloc1stIndex].hAllocation == VK_NULL_HANDLE)
- {
- ++nextAlloc1stIndex;
- }
-
- // Found non-null allocation.
- if(nextAlloc1stIndex < suballoc1stCount)
- {
- const VmaSuballocation& suballoc = suballocations1st[nextAlloc1stIndex];
-
- // 1. Process free space before this allocation.
- if(lastOffset < suballoc.offset)
- {
- // There is free space from lastOffset to suballoc.offset.
- ++unusedRangeCount;
- }
-
- // 2. Process this allocation.
- // There is allocation with suballoc.offset, suballoc.size.
- ++alloc1stCount;
- usedBytes += suballoc.size;
-
- // 3. Prepare for next iteration.
- lastOffset = suballoc.offset + suballoc.size;
- ++nextAlloc1stIndex;
- }
- // We are at the end.
- else
- {
- if(lastOffset < size)
- {
- // There is free space from lastOffset to freeSpace1stTo2ndEnd.
- ++unusedRangeCount;
- }
-
- // End of loop.
- lastOffset = freeSpace1stTo2ndEnd;
- }
- }
-
- if(m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK)
- {
- size_t nextAlloc2ndIndex = suballocations2nd.size() - 1;
- while(lastOffset < size)
- {
- // Find next non-null allocation or move nextAlloc2ndIndex to the end.
- while(nextAlloc2ndIndex != SIZE_MAX &&
- suballocations2nd[nextAlloc2ndIndex].hAllocation == VK_NULL_HANDLE)
- {
- --nextAlloc2ndIndex;
- }
-
- // Found non-null allocation.
- if(nextAlloc2ndIndex != SIZE_MAX)
- {
- const VmaSuballocation& suballoc = suballocations2nd[nextAlloc2ndIndex];
-
- // 1. Process free space before this allocation.
- if(lastOffset < suballoc.offset)
- {
- // There is free space from lastOffset to suballoc.offset.
- ++unusedRangeCount;
- }
-
- // 2. Process this allocation.
- // There is allocation with suballoc.offset, suballoc.size.
- ++alloc2ndCount;
- usedBytes += suballoc.size;
-
- // 3. Prepare for next iteration.
- lastOffset = suballoc.offset + suballoc.size;
- --nextAlloc2ndIndex;
- }
- // We are at the end.
- else
- {
- if(lastOffset < size)
- {
- // There is free space from lastOffset to size.
- ++unusedRangeCount;
- }
-
- // End of loop.
- lastOffset = size;
- }
- }
- }
-
- const VkDeviceSize unusedBytes = size - usedBytes;
- PrintDetailedMap_Begin(json, unusedBytes, alloc1stCount + alloc2ndCount, unusedRangeCount);
-
- // SECOND PASS
- lastOffset = 0;
-
- if(m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER)
- {
- const VkDeviceSize freeSpace2ndTo1stEnd = suballocations1st[m_1stNullItemsBeginCount].offset;
- size_t nextAlloc2ndIndex = 0;
- while(lastOffset < freeSpace2ndTo1stEnd)
- {
- // Find next non-null allocation or move nextAlloc2ndIndex to the end.
- while(nextAlloc2ndIndex < suballoc2ndCount &&
- suballocations2nd[nextAlloc2ndIndex].hAllocation == VK_NULL_HANDLE)
- {
- ++nextAlloc2ndIndex;
- }
-
- // Found non-null allocation.
- if(nextAlloc2ndIndex < suballoc2ndCount)
- {
- const VmaSuballocation& suballoc = suballocations2nd[nextAlloc2ndIndex];
-
- // 1. Process free space before this allocation.
- if(lastOffset < suballoc.offset)
- {
- // There is free space from lastOffset to suballoc.offset.
- const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
- PrintDetailedMap_UnusedRange(json, lastOffset, unusedRangeSize);
- }
-
- // 2. Process this allocation.
- // There is allocation with suballoc.offset, suballoc.size.
- PrintDetailedMap_Allocation(json, suballoc.offset, suballoc.hAllocation);
-
- // 3. Prepare for next iteration.
- lastOffset = suballoc.offset + suballoc.size;
- ++nextAlloc2ndIndex;
- }
- // We are at the end.
- else
- {
- if(lastOffset < freeSpace2ndTo1stEnd)
- {
- // There is free space from lastOffset to freeSpace2ndTo1stEnd.
- const VkDeviceSize unusedRangeSize = freeSpace2ndTo1stEnd - lastOffset;
- PrintDetailedMap_UnusedRange(json, lastOffset, unusedRangeSize);
- }
-
- // End of loop.
- lastOffset = freeSpace2ndTo1stEnd;
- }
- }
- }
-
- nextAlloc1stIndex = m_1stNullItemsBeginCount;
- while(lastOffset < freeSpace1stTo2ndEnd)
- {
- // Find next non-null allocation or move nextAllocIndex to the end.
- while(nextAlloc1stIndex < suballoc1stCount &&
- suballocations1st[nextAlloc1stIndex].hAllocation == VK_NULL_HANDLE)
- {
- ++nextAlloc1stIndex;
- }
-
- // Found non-null allocation.
- if(nextAlloc1stIndex < suballoc1stCount)
- {
- const VmaSuballocation& suballoc = suballocations1st[nextAlloc1stIndex];
-
- // 1. Process free space before this allocation.
- if(lastOffset < suballoc.offset)
- {
- // There is free space from lastOffset to suballoc.offset.
- const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
- PrintDetailedMap_UnusedRange(json, lastOffset, unusedRangeSize);
- }
-
- // 2. Process this allocation.
- // There is allocation with suballoc.offset, suballoc.size.
- PrintDetailedMap_Allocation(json, suballoc.offset, suballoc.hAllocation);
-
- // 3. Prepare for next iteration.
- lastOffset = suballoc.offset + suballoc.size;
- ++nextAlloc1stIndex;
- }
- // We are at the end.
- else
- {
- if(lastOffset < freeSpace1stTo2ndEnd)
- {
- // There is free space from lastOffset to freeSpace1stTo2ndEnd.
- const VkDeviceSize unusedRangeSize = freeSpace1stTo2ndEnd - lastOffset;
- PrintDetailedMap_UnusedRange(json, lastOffset, unusedRangeSize);
- }
-
- // End of loop.
- lastOffset = freeSpace1stTo2ndEnd;
- }
- }
-
- if(m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK)
- {
- size_t nextAlloc2ndIndex = suballocations2nd.size() - 1;
- while(lastOffset < size)
- {
- // Find next non-null allocation or move nextAlloc2ndIndex to the end.
- while(nextAlloc2ndIndex != SIZE_MAX &&
- suballocations2nd[nextAlloc2ndIndex].hAllocation == VK_NULL_HANDLE)
- {
- --nextAlloc2ndIndex;
- }
-
- // Found non-null allocation.
- if(nextAlloc2ndIndex != SIZE_MAX)
- {
- const VmaSuballocation& suballoc = suballocations2nd[nextAlloc2ndIndex];
-
- // 1. Process free space before this allocation.
- if(lastOffset < suballoc.offset)
- {
- // There is free space from lastOffset to suballoc.offset.
- const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
- PrintDetailedMap_UnusedRange(json, lastOffset, unusedRangeSize);
- }
-
- // 2. Process this allocation.
- // There is allocation with suballoc.offset, suballoc.size.
- PrintDetailedMap_Allocation(json, suballoc.offset, suballoc.hAllocation);
-
- // 3. Prepare for next iteration.
- lastOffset = suballoc.offset + suballoc.size;
- --nextAlloc2ndIndex;
- }
- // We are at the end.
- else
- {
- if(lastOffset < size)
- {
- // There is free space from lastOffset to size.
- const VkDeviceSize unusedRangeSize = size - lastOffset;
- PrintDetailedMap_UnusedRange(json, lastOffset, unusedRangeSize);
- }
-
- // End of loop.
- lastOffset = size;
- }
- }
- }
-
- PrintDetailedMap_End(json);
-}
-#endif // #if VMA_STATS_STRING_ENABLED
-
-bool VmaBlockMetadata_Linear::CreateAllocationRequest(
- uint32_t currentFrameIndex,
- uint32_t frameInUseCount,
- VkDeviceSize bufferImageGranularity,
- VkDeviceSize allocSize,
- VkDeviceSize allocAlignment,
- bool upperAddress,
- VmaSuballocationType allocType,
- bool canMakeOtherLost,
- uint32_t strategy,
- VmaAllocationRequest* pAllocationRequest)
-{
- VMA_ASSERT(allocSize > 0);
- VMA_ASSERT(allocType != VMA_SUBALLOCATION_TYPE_FREE);
- VMA_ASSERT(pAllocationRequest != VMA_NULL);
- VMA_HEAVY_ASSERT(Validate());
-
- const VkDeviceSize size = GetSize();
- SuballocationVectorType& suballocations1st = AccessSuballocations1st();
- SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
-
- if(upperAddress)
- {
- if(m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER)
- {
- VMA_ASSERT(0 && "Trying to use pool with linear algorithm as double stack, while it is already being used as ring buffer.");
- return false;
- }
-
- // Try to allocate before 2nd.back(), or end of block if 2nd.empty().
- if(allocSize > size)
- {
- return false;
- }
- VkDeviceSize resultBaseOffset = size - allocSize;
- if(!suballocations2nd.empty())
- {
- const VmaSuballocation& lastSuballoc = suballocations2nd.back();
- resultBaseOffset = lastSuballoc.offset - allocSize;
- if(allocSize > lastSuballoc.offset)
- {
- return false;
- }
- }
-
- // Start from offset equal to end of free space.
- VkDeviceSize resultOffset = resultBaseOffset;
-
- // Apply VMA_DEBUG_MARGIN at the end.
- if(VMA_DEBUG_MARGIN > 0)
- {
- if(resultOffset < VMA_DEBUG_MARGIN)
- {
- return false;
- }
- resultOffset -= VMA_DEBUG_MARGIN;
- }
-
- // Apply alignment.
- resultOffset = VmaAlignDown(resultOffset, allocAlignment);
-
- // Check next suballocations from 2nd for BufferImageGranularity conflicts.
- // Make bigger alignment if necessary.
- if(bufferImageGranularity > 1 && !suballocations2nd.empty())
- {
- bool bufferImageGranularityConflict = false;
- for(size_t nextSuballocIndex = suballocations2nd.size(); nextSuballocIndex--; )
- {
- const VmaSuballocation& nextSuballoc = suballocations2nd[nextSuballocIndex];
- if(VmaBlocksOnSamePage(resultOffset, allocSize, nextSuballoc.offset, bufferImageGranularity))
- {
- if(VmaIsBufferImageGranularityConflict(nextSuballoc.type, allocType))
- {
- bufferImageGranularityConflict = true;
- break;
- }
- }
- else
- // Already on previous page.
- break;
- }
- if(bufferImageGranularityConflict)
- {
- resultOffset = VmaAlignDown(resultOffset, bufferImageGranularity);
- }
- }
-
- // There is enough free space.
- const VkDeviceSize endOf1st = !suballocations1st.empty() ?
- suballocations1st.back().offset + suballocations1st.back().size :
- 0;
- if(endOf1st + VMA_DEBUG_MARGIN <= resultOffset)
- {
- // Check previous suballocations for BufferImageGranularity conflicts.
- // If conflict exists, allocation cannot be made here.
- if(bufferImageGranularity > 1)
- {
- for(size_t prevSuballocIndex = suballocations1st.size(); prevSuballocIndex--; )
- {
- const VmaSuballocation& prevSuballoc = suballocations1st[prevSuballocIndex];
- if(VmaBlocksOnSamePage(prevSuballoc.offset, prevSuballoc.size, resultOffset, bufferImageGranularity))
- {
- if(VmaIsBufferImageGranularityConflict(allocType, prevSuballoc.type))
- {
- return false;
- }
- }
- else
- {
- // Already on next page.
- break;
- }
- }
- }
-
- // All tests passed: Success.
- pAllocationRequest->offset = resultOffset;
- pAllocationRequest->sumFreeSize = resultBaseOffset + allocSize - endOf1st;
- pAllocationRequest->sumItemSize = 0;
- // pAllocationRequest->item unused.
- pAllocationRequest->itemsToMakeLostCount = 0;
- return true;
- }
- }
- else // !upperAddress
- {
- if(m_2ndVectorMode == SECOND_VECTOR_EMPTY || m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK)
- {
- // Try to allocate at the end of 1st vector.
-
- VkDeviceSize resultBaseOffset = 0;
- if(!suballocations1st.empty())
- {
- const VmaSuballocation& lastSuballoc = suballocations1st.back();
- resultBaseOffset = lastSuballoc.offset + lastSuballoc.size;
- }
-
- // Start from offset equal to beginning of free space.
- VkDeviceSize resultOffset = resultBaseOffset;
-
- // Apply VMA_DEBUG_MARGIN at the beginning.
- if(VMA_DEBUG_MARGIN > 0)
- {
- resultOffset += VMA_DEBUG_MARGIN;
- }
-
- // Apply alignment.
- resultOffset = VmaAlignUp(resultOffset, allocAlignment);
-
- // Check previous suballocations for BufferImageGranularity conflicts.
- // Make bigger alignment if necessary.
- if(bufferImageGranularity > 1 && !suballocations1st.empty())
- {
- bool bufferImageGranularityConflict = false;
- for(size_t prevSuballocIndex = suballocations1st.size(); prevSuballocIndex--; )
- {
- const VmaSuballocation& prevSuballoc = suballocations1st[prevSuballocIndex];
- if(VmaBlocksOnSamePage(prevSuballoc.offset, prevSuballoc.size, resultOffset, bufferImageGranularity))
- {
- if(VmaIsBufferImageGranularityConflict(prevSuballoc.type, allocType))
- {
- bufferImageGranularityConflict = true;
- break;
- }
- }
- else
- // Already on previous page.
- break;
- }
- if(bufferImageGranularityConflict)
- {
- resultOffset = VmaAlignUp(resultOffset, bufferImageGranularity);
- }
- }
-
- const VkDeviceSize freeSpaceEnd = m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK ?
- suballocations2nd.back().offset : size;
-
- // There is enough free space at the end after alignment.
- if(resultOffset + allocSize + VMA_DEBUG_MARGIN <= freeSpaceEnd)
- {
- // Check next suballocations for BufferImageGranularity conflicts.
- // If conflict exists, allocation cannot be made here.
- if(bufferImageGranularity > 1 && m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK)
- {
- for(size_t nextSuballocIndex = suballocations2nd.size(); nextSuballocIndex--; )
- {
- const VmaSuballocation& nextSuballoc = suballocations2nd[nextSuballocIndex];
- if(VmaBlocksOnSamePage(resultOffset, allocSize, nextSuballoc.offset, bufferImageGranularity))
- {
- if(VmaIsBufferImageGranularityConflict(allocType, nextSuballoc.type))
- {
- return false;
- }
- }
- else
- {
- // Already on previous page.
- break;
- }
- }
- }
-
- // All tests passed: Success.
- pAllocationRequest->offset = resultOffset;
- pAllocationRequest->sumFreeSize = freeSpaceEnd - resultBaseOffset;
- pAllocationRequest->sumItemSize = 0;
- // pAllocationRequest->item unused.
- pAllocationRequest->itemsToMakeLostCount = 0;
- return true;
- }
- }
-
- // Wrap-around to end of 2nd vector. Try to allocate there, watching for the
- // beginning of 1st vector as the end of free space.
- if(m_2ndVectorMode == SECOND_VECTOR_EMPTY || m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER)
- {
- VMA_ASSERT(!suballocations1st.empty());
-
- VkDeviceSize resultBaseOffset = 0;
- if(!suballocations2nd.empty())
- {
- const VmaSuballocation& lastSuballoc = suballocations2nd.back();
- resultBaseOffset = lastSuballoc.offset + lastSuballoc.size;
- }
-
- // Start from offset equal to beginning of free space.
- VkDeviceSize resultOffset = resultBaseOffset;
-
- // Apply VMA_DEBUG_MARGIN at the beginning.
- if(VMA_DEBUG_MARGIN > 0)
- {
- resultOffset += VMA_DEBUG_MARGIN;
- }
-
- // Apply alignment.
- resultOffset = VmaAlignUp(resultOffset, allocAlignment);
-
- // Check previous suballocations for BufferImageGranularity conflicts.
- // Make bigger alignment if necessary.
- if(bufferImageGranularity > 1 && !suballocations2nd.empty())
- {
- bool bufferImageGranularityConflict = false;
- for(size_t prevSuballocIndex = suballocations2nd.size(); prevSuballocIndex--; )
- {
- const VmaSuballocation& prevSuballoc = suballocations2nd[prevSuballocIndex];
- if(VmaBlocksOnSamePage(prevSuballoc.offset, prevSuballoc.size, resultOffset, bufferImageGranularity))
- {
- if(VmaIsBufferImageGranularityConflict(prevSuballoc.type, allocType))
- {
- bufferImageGranularityConflict = true;
- break;
- }
- }
- else
- // Already on previous page.
- break;
- }
- if(bufferImageGranularityConflict)
- {
- resultOffset = VmaAlignUp(resultOffset, bufferImageGranularity);
- }
- }
-
- pAllocationRequest->itemsToMakeLostCount = 0;
- pAllocationRequest->sumItemSize = 0;
- size_t index1st = m_1stNullItemsBeginCount;
-
- if(canMakeOtherLost)
- {
- while(index1st < suballocations1st.size() &&
- resultOffset + allocSize + VMA_DEBUG_MARGIN > suballocations1st[index1st].offset)
- {
- // Next colliding allocation at the beginning of 1st vector found. Try to make it lost.
- const VmaSuballocation& suballoc = suballocations1st[index1st];
- if(suballoc.type == VMA_SUBALLOCATION_TYPE_FREE)
- {
- // No problem.
- }
- else
- {
- VMA_ASSERT(suballoc.hAllocation != VK_NULL_HANDLE);
- if(suballoc.hAllocation->CanBecomeLost() &&
- suballoc.hAllocation->GetLastUseFrameIndex() + frameInUseCount < currentFrameIndex)
- {
- ++pAllocationRequest->itemsToMakeLostCount;
- pAllocationRequest->sumItemSize += suballoc.size;
- }
- else
- {
- return false;
- }
- }
- ++index1st;
- }
-
- // Check next suballocations for BufferImageGranularity conflicts.
- // If conflict exists, we must mark more allocations lost or fail.
- if(bufferImageGranularity > 1)
- {
- while(index1st < suballocations1st.size())
- {
- const VmaSuballocation& suballoc = suballocations1st[index1st];
- if(VmaBlocksOnSamePage(resultOffset, allocSize, suballoc.offset, bufferImageGranularity))
- {
- if(suballoc.hAllocation != VK_NULL_HANDLE)
- {
- // Not checking actual VmaIsBufferImageGranularityConflict(allocType, suballoc.type).
- if(suballoc.hAllocation->CanBecomeLost() &&
- suballoc.hAllocation->GetLastUseFrameIndex() + frameInUseCount < currentFrameIndex)
- {
- ++pAllocationRequest->itemsToMakeLostCount;
- pAllocationRequest->sumItemSize += suballoc.size;
- }
- else
- {
- return false;
- }
- }
- }
- else
- {
- // Already on next page.
- break;
- }
- ++index1st;
- }
- }
- }
-
- // There is enough free space at the end after alignment.
- if((index1st == suballocations1st.size() && resultOffset + allocSize + VMA_DEBUG_MARGIN < size) ||
- (index1st < suballocations1st.size() && resultOffset + allocSize + VMA_DEBUG_MARGIN <= suballocations1st[index1st].offset))
- {
- // Check next suballocations for BufferImageGranularity conflicts.
- // If conflict exists, allocation cannot be made here.
- if(bufferImageGranularity > 1)
- {
- for(size_t nextSuballocIndex = index1st;
- nextSuballocIndex < suballocations1st.size();
- nextSuballocIndex++)
- {
- const VmaSuballocation& nextSuballoc = suballocations1st[nextSuballocIndex];
- if(VmaBlocksOnSamePage(resultOffset, allocSize, nextSuballoc.offset, bufferImageGranularity))
- {
- if(VmaIsBufferImageGranularityConflict(allocType, nextSuballoc.type))
- {
- return false;
- }
- }
- else
- {
- // Already on next page.
- break;
- }
- }
- }
-
- // All tests passed: Success.
- pAllocationRequest->offset = resultOffset;
- pAllocationRequest->sumFreeSize =
- (index1st < suballocations1st.size() ? suballocations1st[index1st].offset : size)
- - resultBaseOffset
- - pAllocationRequest->sumItemSize;
- // pAllocationRequest->item unused.
- return true;
- }
- }
- }
-
- return false;
-}
-
-bool VmaBlockMetadata_Linear::MakeRequestedAllocationsLost(
- uint32_t currentFrameIndex,
- uint32_t frameInUseCount,
- VmaAllocationRequest* pAllocationRequest)
-{
- if(pAllocationRequest->itemsToMakeLostCount == 0)
- {
- return true;
- }
-
- VMA_ASSERT(m_2ndVectorMode == SECOND_VECTOR_EMPTY || m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER);
-
- SuballocationVectorType& suballocations1st = AccessSuballocations1st();
- size_t index1st = m_1stNullItemsBeginCount;
- size_t madeLostCount = 0;
- while(madeLostCount < pAllocationRequest->itemsToMakeLostCount)
- {
- VMA_ASSERT(index1st < suballocations1st.size());
- VmaSuballocation& suballoc = suballocations1st[index1st];
- if(suballoc.type != VMA_SUBALLOCATION_TYPE_FREE)
- {
- VMA_ASSERT(suballoc.hAllocation != VK_NULL_HANDLE);
- VMA_ASSERT(suballoc.hAllocation->CanBecomeLost());
- if(suballoc.hAllocation->MakeLost(currentFrameIndex, frameInUseCount))
- {
- suballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
- suballoc.hAllocation = VK_NULL_HANDLE;
- m_SumFreeSize += suballoc.size;
- ++m_1stNullItemsMiddleCount;
- ++madeLostCount;
- }
- else
- {
- return false;
- }
- }
- ++index1st;
- }
-
- CleanupAfterFree();
- //VMA_HEAVY_ASSERT(Validate()); // Already called by ClanupAfterFree().
-
- return true;
-}
-
-uint32_t VmaBlockMetadata_Linear::MakeAllocationsLost(uint32_t currentFrameIndex, uint32_t frameInUseCount)
-{
- uint32_t lostAllocationCount = 0;
-
- SuballocationVectorType& suballocations1st = AccessSuballocations1st();
- for(size_t i = m_1stNullItemsBeginCount, count = suballocations1st.size(); i < count; ++i)
- {
- VmaSuballocation& suballoc = suballocations1st[i];
- if(suballoc.type != VMA_SUBALLOCATION_TYPE_FREE &&
- suballoc.hAllocation->CanBecomeLost() &&
- suballoc.hAllocation->MakeLost(currentFrameIndex, frameInUseCount))
- {
- suballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
- suballoc.hAllocation = VK_NULL_HANDLE;
- ++m_1stNullItemsMiddleCount;
- m_SumFreeSize += suballoc.size;
- ++lostAllocationCount;
- }
- }
-
- SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
- for(size_t i = 0, count = suballocations2nd.size(); i < count; ++i)
- {
- VmaSuballocation& suballoc = suballocations2nd[i];
- if(suballoc.type != VMA_SUBALLOCATION_TYPE_FREE &&
- suballoc.hAllocation->CanBecomeLost() &&
- suballoc.hAllocation->MakeLost(currentFrameIndex, frameInUseCount))
- {
- suballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
- suballoc.hAllocation = VK_NULL_HANDLE;
- ++m_2ndNullItemsCount;
- ++lostAllocationCount;
- }
- }
-
- if(lostAllocationCount)
- {
- CleanupAfterFree();
- }
-
- return lostAllocationCount;
-}
-
-VkResult VmaBlockMetadata_Linear::CheckCorruption(const void* pBlockData)
-{
- SuballocationVectorType& suballocations1st = AccessSuballocations1st();
- for(size_t i = m_1stNullItemsBeginCount, count = suballocations1st.size(); i < count; ++i)
- {
- const VmaSuballocation& suballoc = suballocations1st[i];
- if(suballoc.type != VMA_SUBALLOCATION_TYPE_FREE)
- {
- if(!VmaValidateMagicValue(pBlockData, suballoc.offset - VMA_DEBUG_MARGIN))
- {
- VMA_ASSERT(0 && "MEMORY CORRUPTION DETECTED BEFORE VALIDATED ALLOCATION!");
- return VK_ERROR_VALIDATION_FAILED_EXT;
- }
- if(!VmaValidateMagicValue(pBlockData, suballoc.offset + suballoc.size))
- {
- VMA_ASSERT(0 && "MEMORY CORRUPTION DETECTED AFTER VALIDATED ALLOCATION!");
- return VK_ERROR_VALIDATION_FAILED_EXT;
- }
- }
- }
-
- SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
- for(size_t i = 0, count = suballocations2nd.size(); i < count; ++i)
- {
- const VmaSuballocation& suballoc = suballocations2nd[i];
- if(suballoc.type != VMA_SUBALLOCATION_TYPE_FREE)
- {
- if(!VmaValidateMagicValue(pBlockData, suballoc.offset - VMA_DEBUG_MARGIN))
- {
- VMA_ASSERT(0 && "MEMORY CORRUPTION DETECTED BEFORE VALIDATED ALLOCATION!");
- return VK_ERROR_VALIDATION_FAILED_EXT;
- }
- if(!VmaValidateMagicValue(pBlockData, suballoc.offset + suballoc.size))
- {
- VMA_ASSERT(0 && "MEMORY CORRUPTION DETECTED AFTER VALIDATED ALLOCATION!");
- return VK_ERROR_VALIDATION_FAILED_EXT;
- }
- }
- }
-
- return VK_SUCCESS;
-}
-
-void VmaBlockMetadata_Linear::Alloc(
- const VmaAllocationRequest& request,
- VmaSuballocationType type,
- VkDeviceSize allocSize,
- bool upperAddress,
- VmaAllocation hAllocation)
-{
- const VmaSuballocation newSuballoc = { request.offset, allocSize, hAllocation, type };
-
- if(upperAddress)
- {
- VMA_ASSERT(m_2ndVectorMode != SECOND_VECTOR_RING_BUFFER &&
- "CRITICAL ERROR: Trying to use linear allocator as double stack while it was already used as ring buffer.");
- SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
- suballocations2nd.push_back(newSuballoc);
- m_2ndVectorMode = SECOND_VECTOR_DOUBLE_STACK;
- }
- else
- {
- SuballocationVectorType& suballocations1st = AccessSuballocations1st();
-
- // First allocation.
- if(suballocations1st.empty())
- {
- suballocations1st.push_back(newSuballoc);
- }
- else
- {
- // New allocation at the end of 1st vector.
- if(request.offset >= suballocations1st.back().offset + suballocations1st.back().size)
- {
- // Check if it fits before the end of the block.
- VMA_ASSERT(request.offset + allocSize <= GetSize());
- suballocations1st.push_back(newSuballoc);
- }
- // New allocation at the end of 2-part ring buffer, so before first allocation from 1st vector.
- else if(request.offset + allocSize <= suballocations1st[m_1stNullItemsBeginCount].offset)
- {
- SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
-
- switch(m_2ndVectorMode)
- {
- case SECOND_VECTOR_EMPTY:
- // First allocation from second part ring buffer.
- VMA_ASSERT(suballocations2nd.empty());
- m_2ndVectorMode = SECOND_VECTOR_RING_BUFFER;
- break;
- case SECOND_VECTOR_RING_BUFFER:
- // 2-part ring buffer is already started.
- VMA_ASSERT(!suballocations2nd.empty());
- break;
- case SECOND_VECTOR_DOUBLE_STACK:
- VMA_ASSERT(0 && "CRITICAL ERROR: Trying to use linear allocator as ring buffer while it was already used as double stack.");
- break;
- default:
- VMA_ASSERT(0);
- }
-
- suballocations2nd.push_back(newSuballoc);
- }
- else
- {
- VMA_ASSERT(0 && "CRITICAL INTERNAL ERROR.");
- }
- }
- }
-
- m_SumFreeSize -= newSuballoc.size;
-}
-
-void VmaBlockMetadata_Linear::Free(const VmaAllocation allocation)
-{
- FreeAtOffset(allocation->GetOffset());
-}
-
-void VmaBlockMetadata_Linear::FreeAtOffset(VkDeviceSize offset)
-{
- SuballocationVectorType& suballocations1st = AccessSuballocations1st();
- SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
-
- if(!suballocations1st.empty())
- {
- // First allocation: Mark it as next empty at the beginning.
- VmaSuballocation& firstSuballoc = suballocations1st[m_1stNullItemsBeginCount];
- if(firstSuballoc.offset == offset)
- {
- firstSuballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
- firstSuballoc.hAllocation = VK_NULL_HANDLE;
- m_SumFreeSize += firstSuballoc.size;
- ++m_1stNullItemsBeginCount;
- CleanupAfterFree();
- return;
- }
- }
-
- // Last allocation in 2-part ring buffer or top of upper stack (same logic).
- if(m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER ||
- m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK)
- {
- VmaSuballocation& lastSuballoc = suballocations2nd.back();
- if(lastSuballoc.offset == offset)
- {
- m_SumFreeSize += lastSuballoc.size;
- suballocations2nd.pop_back();
- CleanupAfterFree();
- return;
- }
- }
- // Last allocation in 1st vector.
- else if(m_2ndVectorMode == SECOND_VECTOR_EMPTY)
- {
- VmaSuballocation& lastSuballoc = suballocations1st.back();
- if(lastSuballoc.offset == offset)
- {
- m_SumFreeSize += lastSuballoc.size;
- suballocations1st.pop_back();
- CleanupAfterFree();
- return;
- }
- }
-
- // Item from the middle of 1st vector.
- {
- VmaSuballocation refSuballoc;
- refSuballoc.offset = offset;
- // Rest of members stays uninitialized intentionally for better performance.
- SuballocationVectorType::iterator it = VmaVectorFindSorted<VmaSuballocationOffsetLess>(
- suballocations1st.begin() + m_1stNullItemsBeginCount,
- suballocations1st.end(),
- refSuballoc);
- if(it != suballocations1st.end())
- {
- it->type = VMA_SUBALLOCATION_TYPE_FREE;
- it->hAllocation = VK_NULL_HANDLE;
- ++m_1stNullItemsMiddleCount;
- m_SumFreeSize += it->size;
- CleanupAfterFree();
- return;
- }
- }
-
- if(m_2ndVectorMode != SECOND_VECTOR_EMPTY)
- {
- // Item from the middle of 2nd vector.
- VmaSuballocation refSuballoc;
- refSuballoc.offset = offset;
- // Rest of members stays uninitialized intentionally for better performance.
- SuballocationVectorType::iterator it = m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER ?
- VmaVectorFindSorted<VmaSuballocationOffsetLess>(suballocations2nd.begin(), suballocations2nd.end(), refSuballoc) :
- VmaVectorFindSorted<VmaSuballocationOffsetGreater>(suballocations2nd.begin(), suballocations2nd.end(), refSuballoc);
- if(it != suballocations2nd.end())
- {
- it->type = VMA_SUBALLOCATION_TYPE_FREE;
- it->hAllocation = VK_NULL_HANDLE;
- ++m_2ndNullItemsCount;
- m_SumFreeSize += it->size;
- CleanupAfterFree();
- return;
- }
- }
-
- VMA_ASSERT(0 && "Allocation to free not found in linear allocator!");
-}
-
-bool VmaBlockMetadata_Linear::ShouldCompact1st() const
-{
- const size_t nullItemCount = m_1stNullItemsBeginCount + m_1stNullItemsMiddleCount;
- const size_t suballocCount = AccessSuballocations1st().size();
- return suballocCount > 32 && nullItemCount * 2 >= (suballocCount - nullItemCount) * 3;
-}
-
-void VmaBlockMetadata_Linear::CleanupAfterFree()
-{
- SuballocationVectorType& suballocations1st = AccessSuballocations1st();
- SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
-
- if(IsEmpty())
- {
- suballocations1st.clear();
- suballocations2nd.clear();
- m_1stNullItemsBeginCount = 0;
- m_1stNullItemsMiddleCount = 0;
- m_2ndNullItemsCount = 0;
- m_2ndVectorMode = SECOND_VECTOR_EMPTY;
- }
- else
- {
- const size_t suballoc1stCount = suballocations1st.size();
- const size_t nullItem1stCount = m_1stNullItemsBeginCount + m_1stNullItemsMiddleCount;
- VMA_ASSERT(nullItem1stCount <= suballoc1stCount);
-
- // Find more null items at the beginning of 1st vector.
- while(m_1stNullItemsBeginCount < suballoc1stCount &&
- suballocations1st[m_1stNullItemsBeginCount].hAllocation == VK_NULL_HANDLE)
- {
- ++m_1stNullItemsBeginCount;
- --m_1stNullItemsMiddleCount;
- }
-
- // Find more null items at the end of 1st vector.
- while(m_1stNullItemsMiddleCount > 0 &&
- suballocations1st.back().hAllocation == VK_NULL_HANDLE)
- {
- --m_1stNullItemsMiddleCount;
- suballocations1st.pop_back();
- }
-
- // Find more null items at the end of 2nd vector.
- while(m_2ndNullItemsCount > 0 &&
- suballocations2nd.back().hAllocation == VK_NULL_HANDLE)
- {
- --m_2ndNullItemsCount;
- suballocations2nd.pop_back();
- }
-
- if(ShouldCompact1st())
- {
- const size_t nonNullItemCount = suballoc1stCount - nullItem1stCount;
- size_t srcIndex = m_1stNullItemsBeginCount;
- for(size_t dstIndex = 0; dstIndex < nonNullItemCount; ++dstIndex)
- {
- while(suballocations1st[srcIndex].hAllocation == VK_NULL_HANDLE)
- {
- ++srcIndex;
- }
- if(dstIndex != srcIndex)
- {
- suballocations1st[dstIndex] = suballocations1st[srcIndex];
- }
- ++srcIndex;
- }
- suballocations1st.resize(nonNullItemCount);
- m_1stNullItemsBeginCount = 0;
- m_1stNullItemsMiddleCount = 0;
- }
-
- // 2nd vector became empty.
- if(suballocations2nd.empty())
- {
- m_2ndVectorMode = SECOND_VECTOR_EMPTY;
- }
-
- // 1st vector became empty.
- if(suballocations1st.size() - m_1stNullItemsBeginCount == 0)
- {
- suballocations1st.clear();
- m_1stNullItemsBeginCount = 0;
-
- if(!suballocations2nd.empty() && m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER)
- {
- // Swap 1st with 2nd. Now 2nd is empty.
- m_2ndVectorMode = SECOND_VECTOR_EMPTY;
- m_1stNullItemsMiddleCount = m_2ndNullItemsCount;
- while(m_1stNullItemsBeginCount < suballocations2nd.size() &&
- suballocations2nd[m_1stNullItemsBeginCount].hAllocation == VK_NULL_HANDLE)
- {
- ++m_1stNullItemsBeginCount;
- --m_1stNullItemsMiddleCount;
- }
- m_2ndNullItemsCount = 0;
- m_1stVectorIndex ^= 1;
- }
- }
- }
-
- VMA_HEAVY_ASSERT(Validate());
-}
-
-
-////////////////////////////////////////////////////////////////////////////////
-// class VmaBlockMetadata_Buddy
-
-VmaBlockMetadata_Buddy::VmaBlockMetadata_Buddy(VmaAllocator hAllocator) :
- VmaBlockMetadata(hAllocator),
- m_Root(VMA_NULL),
- m_AllocationCount(0),
- m_FreeCount(1),
- m_SumFreeSize(0)
-{
- memset(m_FreeList, 0, sizeof(m_FreeList));
-}
-
-VmaBlockMetadata_Buddy::~VmaBlockMetadata_Buddy()
-{
- DeleteNode(m_Root);
-}
-
-void VmaBlockMetadata_Buddy::Init(VkDeviceSize size)
-{
- VmaBlockMetadata::Init(size);
-
- m_UsableSize = VmaPrevPow2(size);
- m_SumFreeSize = m_UsableSize;
-
- // Calculate m_LevelCount.
- m_LevelCount = 1;
- while(m_LevelCount < MAX_LEVELS &&
- LevelToNodeSize(m_LevelCount) >= MIN_NODE_SIZE)
- {
- ++m_LevelCount;
- }
-
- Node* rootNode = vma_new(GetAllocationCallbacks(), Node)();
- rootNode->offset = 0;
- rootNode->type = Node::TYPE_FREE;
- rootNode->parent = VMA_NULL;
- rootNode->buddy = VMA_NULL;
-
- m_Root = rootNode;
- AddToFreeListFront(0, rootNode);
-}
-
-bool VmaBlockMetadata_Buddy::Validate() const
-{
- // Validate tree.
- ValidationContext ctx;
- if(!ValidateNode(ctx, VMA_NULL, m_Root, 0, LevelToNodeSize(0)))
- {
- VMA_VALIDATE(false && "ValidateNode failed.");
- }
- VMA_VALIDATE(m_AllocationCount == ctx.calculatedAllocationCount);
- VMA_VALIDATE(m_SumFreeSize == ctx.calculatedSumFreeSize);
-
- // Validate free node lists.
- for(uint32_t level = 0; level < m_LevelCount; ++level)
- {
- VMA_VALIDATE(m_FreeList[level].front == VMA_NULL ||
- m_FreeList[level].front->free.prev == VMA_NULL);
-
- for(Node* node = m_FreeList[level].front;
- node != VMA_NULL;
- node = node->free.next)
- {
- VMA_VALIDATE(node->type == Node::TYPE_FREE);
-
- if(node->free.next == VMA_NULL)
- {
- VMA_VALIDATE(m_FreeList[level].back == node);
- }
- else
- {
- VMA_VALIDATE(node->free.next->free.prev == node);
- }
- }
- }
-
- // Validate that free lists ar higher levels are empty.
- for(uint32_t level = m_LevelCount; level < MAX_LEVELS; ++level)
- {
- VMA_VALIDATE(m_FreeList[level].front == VMA_NULL && m_FreeList[level].back == VMA_NULL);
- }
-
- return true;
-}
-
-VkDeviceSize VmaBlockMetadata_Buddy::GetUnusedRangeSizeMax() const
-{
- for(uint32_t level = 0; level < m_LevelCount; ++level)
- {
- if(m_FreeList[level].front != VMA_NULL)
- {
- return LevelToNodeSize(level);
- }
- }
- return 0;
-}
-
-void VmaBlockMetadata_Buddy::CalcAllocationStatInfo(VmaStatInfo& outInfo) const
-{
- const VkDeviceSize unusableSize = GetUnusableSize();
-
- outInfo.blockCount = 1;
-
- outInfo.allocationCount = outInfo.unusedRangeCount = 0;
- outInfo.usedBytes = outInfo.unusedBytes = 0;
-
- outInfo.allocationSizeMax = outInfo.unusedRangeSizeMax = 0;
- outInfo.allocationSizeMin = outInfo.unusedRangeSizeMin = UINT64_MAX;
- outInfo.allocationSizeAvg = outInfo.unusedRangeSizeAvg = 0; // Unused.
-
- CalcAllocationStatInfoNode(outInfo, m_Root, LevelToNodeSize(0));
-
- if(unusableSize > 0)
- {
- ++outInfo.unusedRangeCount;
- outInfo.unusedBytes += unusableSize;
- outInfo.unusedRangeSizeMax = VMA_MAX(outInfo.unusedRangeSizeMax, unusableSize);
- outInfo.unusedRangeSizeMin = VMA_MIN(outInfo.unusedRangeSizeMin, unusableSize);
- }
-}
-
-void VmaBlockMetadata_Buddy::AddPoolStats(VmaPoolStats& inoutStats) const
-{
- const VkDeviceSize unusableSize = GetUnusableSize();
-
- inoutStats.size += GetSize();
- inoutStats.unusedSize += m_SumFreeSize + unusableSize;
- inoutStats.allocationCount += m_AllocationCount;
- inoutStats.unusedRangeCount += m_FreeCount;
- inoutStats.unusedRangeSizeMax = VMA_MAX(inoutStats.unusedRangeSizeMax, GetUnusedRangeSizeMax());
-
- if(unusableSize > 0)
- {
- ++inoutStats.unusedRangeCount;
- // Not updating inoutStats.unusedRangeSizeMax with unusableSize because this space is not available for allocations.
- }
-}
-
-#if VMA_STATS_STRING_ENABLED
-
-void VmaBlockMetadata_Buddy::PrintDetailedMap(class VmaJsonWriter& json) const
-{
- // TODO optimize
- VmaStatInfo stat;
- CalcAllocationStatInfo(stat);
-
- PrintDetailedMap_Begin(
- json,
- stat.unusedBytes,
- stat.allocationCount,
- stat.unusedRangeCount);
-
- PrintDetailedMapNode(json, m_Root, LevelToNodeSize(0));
-
- const VkDeviceSize unusableSize = GetUnusableSize();
- if(unusableSize > 0)
- {
- PrintDetailedMap_UnusedRange(json,
- m_UsableSize, // offset
- unusableSize); // size
- }
-
- PrintDetailedMap_End(json);
-}
-
-#endif // #if VMA_STATS_STRING_ENABLED
-
-bool VmaBlockMetadata_Buddy::CreateAllocationRequest(
- uint32_t currentFrameIndex,
- uint32_t frameInUseCount,
- VkDeviceSize bufferImageGranularity,
- VkDeviceSize allocSize,
- VkDeviceSize allocAlignment,
- bool upperAddress,
- VmaSuballocationType allocType,
- bool canMakeOtherLost,
- uint32_t strategy,
- VmaAllocationRequest* pAllocationRequest)
-{
- VMA_ASSERT(!upperAddress && "VMA_ALLOCATION_CREATE_UPPER_ADDRESS_BIT can be used only with linear algorithm.");
-
- // Simple way to respect bufferImageGranularity. May be optimized some day.
- // Whenever it might be an OPTIMAL image...
- if(allocType == VMA_SUBALLOCATION_TYPE_UNKNOWN ||
- allocType == VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN ||
- allocType == VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL)
- {
- allocAlignment = VMA_MAX(allocAlignment, bufferImageGranularity);
- allocSize = VMA_MAX(allocSize, bufferImageGranularity);
- }
-
- if(allocSize > m_UsableSize)
- {
- return false;
- }
-
- const uint32_t targetLevel = AllocSizeToLevel(allocSize);
- for(uint32_t level = targetLevel + 1; level--; )
- {
- for(Node* freeNode = m_FreeList[level].front;
- freeNode != VMA_NULL;
- freeNode = freeNode->free.next)
- {
- if(freeNode->offset % allocAlignment == 0)
- {
- pAllocationRequest->offset = freeNode->offset;
- pAllocationRequest->sumFreeSize = LevelToNodeSize(level);
- pAllocationRequest->sumItemSize = 0;
- pAllocationRequest->itemsToMakeLostCount = 0;
- pAllocationRequest->customData = (void*)(uintptr_t)level;
- return true;
- }
- }
- }
-
- return false;
-}
-
-bool VmaBlockMetadata_Buddy::MakeRequestedAllocationsLost(
- uint32_t currentFrameIndex,
- uint32_t frameInUseCount,
- VmaAllocationRequest* pAllocationRequest)
-{
- /*
- Lost allocations are not supported in buddy allocator at the moment.
- Support might be added in the future.
- */
- return pAllocationRequest->itemsToMakeLostCount == 0;
-}
-
-uint32_t VmaBlockMetadata_Buddy::MakeAllocationsLost(uint32_t currentFrameIndex, uint32_t frameInUseCount)
-{
- /*
- Lost allocations are not supported in buddy allocator at the moment.
- Support might be added in the future.
- */
- return 0;
-}
-
-void VmaBlockMetadata_Buddy::Alloc(
- const VmaAllocationRequest& request,
- VmaSuballocationType type,
- VkDeviceSize allocSize,
- bool upperAddress,
- VmaAllocation hAllocation)
-{
- const uint32_t targetLevel = AllocSizeToLevel(allocSize);
- uint32_t currLevel = (uint32_t)(uintptr_t)request.customData;
-
- Node* currNode = m_FreeList[currLevel].front;
- VMA_ASSERT(currNode != VMA_NULL && currNode->type == Node::TYPE_FREE);
- while(currNode->offset != request.offset)
- {
- currNode = currNode->free.next;
- VMA_ASSERT(currNode != VMA_NULL && currNode->type == Node::TYPE_FREE);
- }
-
- // Go down, splitting free nodes.
- while(currLevel < targetLevel)
- {
- // currNode is already first free node at currLevel.
- // Remove it from list of free nodes at this currLevel.
- RemoveFromFreeList(currLevel, currNode);
-
- const uint32_t childrenLevel = currLevel + 1;
-
- // Create two free sub-nodes.
- Node* leftChild = vma_new(GetAllocationCallbacks(), Node)();
- Node* rightChild = vma_new(GetAllocationCallbacks(), Node)();
-
- leftChild->offset = currNode->offset;
- leftChild->type = Node::TYPE_FREE;
- leftChild->parent = currNode;
- leftChild->buddy = rightChild;
-
- rightChild->offset = currNode->offset + LevelToNodeSize(childrenLevel);
- rightChild->type = Node::TYPE_FREE;
- rightChild->parent = currNode;
- rightChild->buddy = leftChild;
-
- // Convert current currNode to split type.
- currNode->type = Node::TYPE_SPLIT;
- currNode->split.leftChild = leftChild;
-
- // Add child nodes to free list. Order is important!
- AddToFreeListFront(childrenLevel, rightChild);
- AddToFreeListFront(childrenLevel, leftChild);
-
- ++m_FreeCount;
- //m_SumFreeSize -= LevelToNodeSize(currLevel) % 2; // Useful only when level node sizes can be non power of 2.
- ++currLevel;
- currNode = m_FreeList[currLevel].front;
-
- /*
- We can be sure that currNode, as left child of node previously split,
- also fullfills the alignment requirement.
- */
- }
-
- // Remove from free list.
- VMA_ASSERT(currLevel == targetLevel &&
- currNode != VMA_NULL &&
- currNode->type == Node::TYPE_FREE);
- RemoveFromFreeList(currLevel, currNode);
-
- // Convert to allocation node.
- currNode->type = Node::TYPE_ALLOCATION;
- currNode->allocation.alloc = hAllocation;
-
- ++m_AllocationCount;
- --m_FreeCount;
- m_SumFreeSize -= allocSize;
-}
-
-void VmaBlockMetadata_Buddy::DeleteNode(Node* node)
-{
- if(node->type == Node::TYPE_SPLIT)
- {
- DeleteNode(node->split.leftChild->buddy);
- DeleteNode(node->split.leftChild);
- }
-
- vma_delete(GetAllocationCallbacks(), node);
-}
-
-bool VmaBlockMetadata_Buddy::ValidateNode(ValidationContext& ctx, const Node* parent, const Node* curr, uint32_t level, VkDeviceSize levelNodeSize) const
-{
- VMA_VALIDATE(level < m_LevelCount);
- VMA_VALIDATE(curr->parent == parent);
- VMA_VALIDATE((curr->buddy == VMA_NULL) == (parent == VMA_NULL));
- VMA_VALIDATE(curr->buddy == VMA_NULL || curr->buddy->buddy == curr);
- switch(curr->type)
- {
- case Node::TYPE_FREE:
- // curr->free.prev, next are validated separately.
- ctx.calculatedSumFreeSize += levelNodeSize;
- ++ctx.calculatedFreeCount;
- break;
- case Node::TYPE_ALLOCATION:
- ++ctx.calculatedAllocationCount;
- ctx.calculatedSumFreeSize += levelNodeSize - curr->allocation.alloc->GetSize();
- VMA_VALIDATE(curr->allocation.alloc != VK_NULL_HANDLE);
- break;
- case Node::TYPE_SPLIT:
- {
- const uint32_t childrenLevel = level + 1;
- const VkDeviceSize childrenLevelNodeSize = levelNodeSize / 2;
- const Node* const leftChild = curr->split.leftChild;
- VMA_VALIDATE(leftChild != VMA_NULL);
- VMA_VALIDATE(leftChild->offset == curr->offset);
- if(!ValidateNode(ctx, curr, leftChild, childrenLevel, childrenLevelNodeSize))
- {
- VMA_VALIDATE(false && "ValidateNode for left child failed.");
- }
- const Node* const rightChild = leftChild->buddy;
- VMA_VALIDATE(rightChild->offset == curr->offset + childrenLevelNodeSize);
- if(!ValidateNode(ctx, curr, rightChild, childrenLevel, childrenLevelNodeSize))
- {
- VMA_VALIDATE(false && "ValidateNode for right child failed.");
- }
- }
- break;
- default:
- return false;
- }
-
- return true;
-}
-
-uint32_t VmaBlockMetadata_Buddy::AllocSizeToLevel(VkDeviceSize allocSize) const
-{
- // I know this could be optimized somehow e.g. by using std::log2p1 from C++20.
- uint32_t level = 0;
- VkDeviceSize currLevelNodeSize = m_UsableSize;
- VkDeviceSize nextLevelNodeSize = currLevelNodeSize >> 1;
- while(allocSize <= nextLevelNodeSize && level + 1 < m_LevelCount)
- {
- ++level;
- currLevelNodeSize = nextLevelNodeSize;
- nextLevelNodeSize = currLevelNodeSize >> 1;
- }
- return level;
-}
-
-void VmaBlockMetadata_Buddy::FreeAtOffset(VmaAllocation alloc, VkDeviceSize offset)
-{
- // Find node and level.
- Node* node = m_Root;
- VkDeviceSize nodeOffset = 0;
- uint32_t level = 0;
- VkDeviceSize levelNodeSize = LevelToNodeSize(0);
- while(node->type == Node::TYPE_SPLIT)
- {
- const VkDeviceSize nextLevelSize = levelNodeSize >> 1;
- if(offset < nodeOffset + nextLevelSize)
- {
- node = node->split.leftChild;
- }
- else
- {
- node = node->split.leftChild->buddy;
- nodeOffset += nextLevelSize;
- }
- ++level;
- levelNodeSize = nextLevelSize;
- }
-
- VMA_ASSERT(node != VMA_NULL && node->type == Node::TYPE_ALLOCATION);
- VMA_ASSERT(alloc == VK_NULL_HANDLE || node->allocation.alloc == alloc);
-
- ++m_FreeCount;
- --m_AllocationCount;
- m_SumFreeSize += alloc->GetSize();
-
- node->type = Node::TYPE_FREE;
-
- // Join free nodes if possible.
- while(level > 0 && node->buddy->type == Node::TYPE_FREE)
- {
- RemoveFromFreeList(level, node->buddy);
- Node* const parent = node->parent;
-
- vma_delete(GetAllocationCallbacks(), node->buddy);
- vma_delete(GetAllocationCallbacks(), node);
- parent->type = Node::TYPE_FREE;
-
- node = parent;
- --level;
- //m_SumFreeSize += LevelToNodeSize(level) % 2; // Useful only when level node sizes can be non power of 2.
- --m_FreeCount;
- }
-
- AddToFreeListFront(level, node);
-}
-
-void VmaBlockMetadata_Buddy::CalcAllocationStatInfoNode(VmaStatInfo& outInfo, const Node* node, VkDeviceSize levelNodeSize) const
-{
- switch(node->type)
- {
- case Node::TYPE_FREE:
- ++outInfo.unusedRangeCount;
- outInfo.unusedBytes += levelNodeSize;
- outInfo.unusedRangeSizeMax = VMA_MAX(outInfo.unusedRangeSizeMax, levelNodeSize);
- outInfo.unusedRangeSizeMin = VMA_MAX(outInfo.unusedRangeSizeMin, levelNodeSize);
- break;
- case Node::TYPE_ALLOCATION:
- {
- const VkDeviceSize allocSize = node->allocation.alloc->GetSize();
- ++outInfo.allocationCount;
- outInfo.usedBytes += allocSize;
- outInfo.allocationSizeMax = VMA_MAX(outInfo.allocationSizeMax, allocSize);
- outInfo.allocationSizeMin = VMA_MAX(outInfo.allocationSizeMin, allocSize);
-
- const VkDeviceSize unusedRangeSize = levelNodeSize - allocSize;
- if(unusedRangeSize > 0)
- {
- ++outInfo.unusedRangeCount;
- outInfo.unusedBytes += unusedRangeSize;
- outInfo.unusedRangeSizeMax = VMA_MAX(outInfo.unusedRangeSizeMax, unusedRangeSize);
- outInfo.unusedRangeSizeMin = VMA_MAX(outInfo.unusedRangeSizeMin, unusedRangeSize);
- }
- }
- break;
- case Node::TYPE_SPLIT:
- {
- const VkDeviceSize childrenNodeSize = levelNodeSize / 2;
- const Node* const leftChild = node->split.leftChild;
- CalcAllocationStatInfoNode(outInfo, leftChild, childrenNodeSize);
- const Node* const rightChild = leftChild->buddy;
- CalcAllocationStatInfoNode(outInfo, rightChild, childrenNodeSize);
- }
- break;
- default:
- VMA_ASSERT(0);
- }
-}
-
-void VmaBlockMetadata_Buddy::AddToFreeListFront(uint32_t level, Node* node)
-{
- VMA_ASSERT(node->type == Node::TYPE_FREE);
-
- // List is empty.
- Node* const frontNode = m_FreeList[level].front;
- if(frontNode == VMA_NULL)
- {
- VMA_ASSERT(m_FreeList[level].back == VMA_NULL);
- node->free.prev = node->free.next = VMA_NULL;
- m_FreeList[level].front = m_FreeList[level].back = node;
- }
- else
- {
- VMA_ASSERT(frontNode->free.prev == VMA_NULL);
- node->free.prev = VMA_NULL;
- node->free.next = frontNode;
- frontNode->free.prev = node;
- m_FreeList[level].front = node;
- }
-}
-
-void VmaBlockMetadata_Buddy::RemoveFromFreeList(uint32_t level, Node* node)
-{
- VMA_ASSERT(m_FreeList[level].front != VMA_NULL);
-
- // It is at the front.
- if(node->free.prev == VMA_NULL)
- {
- VMA_ASSERT(m_FreeList[level].front == node);
- m_FreeList[level].front = node->free.next;
- }
- else
- {
- Node* const prevFreeNode = node->free.prev;
- VMA_ASSERT(prevFreeNode->free.next == node);
- prevFreeNode->free.next = node->free.next;
- }
-
- // It is at the back.
- if(node->free.next == VMA_NULL)
- {
- VMA_ASSERT(m_FreeList[level].back == node);
- m_FreeList[level].back = node->free.prev;
- }
- else
- {
- Node* const nextFreeNode = node->free.next;
- VMA_ASSERT(nextFreeNode->free.prev == node);
- nextFreeNode->free.prev = node->free.prev;
- }
-}
-
-#if VMA_STATS_STRING_ENABLED
-void VmaBlockMetadata_Buddy::PrintDetailedMapNode(class VmaJsonWriter& json, const Node* node, VkDeviceSize levelNodeSize) const
-{
- switch(node->type)
- {
- case Node::TYPE_FREE:
- PrintDetailedMap_UnusedRange(json, node->offset, levelNodeSize);
- break;
- case Node::TYPE_ALLOCATION:
- {
- PrintDetailedMap_Allocation(json, node->offset, node->allocation.alloc);
- const VkDeviceSize allocSize = node->allocation.alloc->GetSize();
- if(allocSize < levelNodeSize)
- {
- PrintDetailedMap_UnusedRange(json, node->offset + allocSize, levelNodeSize - allocSize);
- }
- }
- break;
- case Node::TYPE_SPLIT:
- {
- const VkDeviceSize childrenNodeSize = levelNodeSize / 2;
- const Node* const leftChild = node->split.leftChild;
- PrintDetailedMapNode(json, leftChild, childrenNodeSize);
- const Node* const rightChild = leftChild->buddy;
- PrintDetailedMapNode(json, rightChild, childrenNodeSize);
- }
- break;
- default:
- VMA_ASSERT(0);
- }
-}
-#endif // #if VMA_STATS_STRING_ENABLED
-
-
-////////////////////////////////////////////////////////////////////////////////
-// class VmaDeviceMemoryBlock
-
-VmaDeviceMemoryBlock::VmaDeviceMemoryBlock(VmaAllocator hAllocator) :
- m_pMetadata(VMA_NULL),
- m_MemoryTypeIndex(UINT32_MAX),
- m_Id(0),
- m_hMemory(VK_NULL_HANDLE),
- m_MapCount(0),
- m_pMappedData(VMA_NULL)
-{
-}
-
-void VmaDeviceMemoryBlock::Init(
- VmaAllocator hAllocator,
- uint32_t newMemoryTypeIndex,
- VkDeviceMemory newMemory,
- VkDeviceSize newSize,
- uint32_t id,
- uint32_t algorithm)
-{
- VMA_ASSERT(m_hMemory == VK_NULL_HANDLE);
-
- m_MemoryTypeIndex = newMemoryTypeIndex;
- m_Id = id;
- m_hMemory = newMemory;
-
- switch(algorithm)
- {
- case VMA_POOL_CREATE_LINEAR_ALGORITHM_BIT:
- m_pMetadata = vma_new(hAllocator, VmaBlockMetadata_Linear)(hAllocator);
- break;
- case VMA_POOL_CREATE_BUDDY_ALGORITHM_BIT:
- m_pMetadata = vma_new(hAllocator, VmaBlockMetadata_Buddy)(hAllocator);
- break;
- default:
- VMA_ASSERT(0);
- // Fall-through.
- case 0:
- m_pMetadata = vma_new(hAllocator, VmaBlockMetadata_Generic)(hAllocator);
- }
- m_pMetadata->Init(newSize);
-}
-
-void VmaDeviceMemoryBlock::Destroy(VmaAllocator allocator)
-{
- // This is the most important assert in the entire library.
- // Hitting it means you have some memory leak - unreleased VmaAllocation objects.
- VMA_ASSERT(m_pMetadata->IsEmpty() && "Some allocations were not freed before destruction of this memory block!");
-
- VMA_ASSERT(m_hMemory != VK_NULL_HANDLE);
- allocator->FreeVulkanMemory(m_MemoryTypeIndex, m_pMetadata->GetSize(), m_hMemory);
- m_hMemory = VK_NULL_HANDLE;
-
- vma_delete(allocator, m_pMetadata);
- m_pMetadata = VMA_NULL;
-}
-
-bool VmaDeviceMemoryBlock::Validate() const
-{
- VMA_VALIDATE((m_hMemory != VK_NULL_HANDLE) &&
- (m_pMetadata->GetSize() != 0));
-
- return m_pMetadata->Validate();
-}
-
-VkResult VmaDeviceMemoryBlock::CheckCorruption(VmaAllocator hAllocator)
-{
- void* pData = nullptr;
- VkResult res = Map(hAllocator, 1, &pData);
- if(res != VK_SUCCESS)
- {
- return res;
- }
-
- res = m_pMetadata->CheckCorruption(pData);
-
- Unmap(hAllocator, 1);
-
- return res;
-}
-
-VkResult VmaDeviceMemoryBlock::Map(VmaAllocator hAllocator, uint32_t count, void** ppData)
-{
- if(count == 0)
- {
- return VK_SUCCESS;
- }
-
- VmaMutexLock lock(m_Mutex, hAllocator->m_UseMutex);
- if(m_MapCount != 0)
- {
- m_MapCount += count;
- VMA_ASSERT(m_pMappedData != VMA_NULL);
- if(ppData != VMA_NULL)
- {
- *ppData = m_pMappedData;
- }
- return VK_SUCCESS;
- }
- else
- {
- VkResult result = (*hAllocator->GetVulkanFunctions().vkMapMemory)(
- hAllocator->m_hDevice,
- m_hMemory,
- 0, // offset
- VK_WHOLE_SIZE,
- 0, // flags
- &m_pMappedData);
- if(result == VK_SUCCESS)
- {
- if(ppData != VMA_NULL)
- {
- *ppData = m_pMappedData;
- }
- m_MapCount = count;
- }
- return result;
- }
-}
-
-void VmaDeviceMemoryBlock::Unmap(VmaAllocator hAllocator, uint32_t count)
-{
- if(count == 0)
- {
- return;
- }
-
- VmaMutexLock lock(m_Mutex, hAllocator->m_UseMutex);
- if(m_MapCount >= count)
- {
- m_MapCount -= count;
- if(m_MapCount == 0)
- {
- m_pMappedData = VMA_NULL;
- (*hAllocator->GetVulkanFunctions().vkUnmapMemory)(hAllocator->m_hDevice, m_hMemory);
- }
- }
- else
- {
- VMA_ASSERT(0 && "VkDeviceMemory block is being unmapped while it was not previously mapped.");
- }
-}
-
-VkResult VmaDeviceMemoryBlock::WriteMagicValueAroundAllocation(VmaAllocator hAllocator, VkDeviceSize allocOffset, VkDeviceSize allocSize)
-{
- VMA_ASSERT(VMA_DEBUG_MARGIN > 0 && VMA_DEBUG_MARGIN % 4 == 0 && VMA_DEBUG_DETECT_CORRUPTION);
- VMA_ASSERT(allocOffset >= VMA_DEBUG_MARGIN);
-
- void* pData;
- VkResult res = Map(hAllocator, 1, &pData);
- if(res != VK_SUCCESS)
- {
- return res;
- }
-
- VmaWriteMagicValue(pData, allocOffset - VMA_DEBUG_MARGIN);
- VmaWriteMagicValue(pData, allocOffset + allocSize);
-
- Unmap(hAllocator, 1);
-
- return VK_SUCCESS;
-}
-
-VkResult VmaDeviceMemoryBlock::ValidateMagicValueAroundAllocation(VmaAllocator hAllocator, VkDeviceSize allocOffset, VkDeviceSize allocSize)
-{
- VMA_ASSERT(VMA_DEBUG_MARGIN > 0 && VMA_DEBUG_MARGIN % 4 == 0 && VMA_DEBUG_DETECT_CORRUPTION);
- VMA_ASSERT(allocOffset >= VMA_DEBUG_MARGIN);
-
- void* pData;
- VkResult res = Map(hAllocator, 1, &pData);
- if(res != VK_SUCCESS)
- {
- return res;
- }
-
- if(!VmaValidateMagicValue(pData, allocOffset - VMA_DEBUG_MARGIN))
- {
- VMA_ASSERT(0 && "MEMORY CORRUPTION DETECTED BEFORE FREED ALLOCATION!");
- }
- else if(!VmaValidateMagicValue(pData, allocOffset + allocSize))
- {
- VMA_ASSERT(0 && "MEMORY CORRUPTION DETECTED AFTER FREED ALLOCATION!");
- }
-
- Unmap(hAllocator, 1);
-
- return VK_SUCCESS;
-}
-
-VkResult VmaDeviceMemoryBlock::BindBufferMemory(
- const VmaAllocator hAllocator,
- const VmaAllocation hAllocation,
- VkBuffer hBuffer)
-{
- VMA_ASSERT(hAllocation->GetType() == VmaAllocation_T::ALLOCATION_TYPE_BLOCK &&
- hAllocation->GetBlock() == this);
- // This lock is important so that we don't call vkBind... and/or vkMap... simultaneously on the same VkDeviceMemory from multiple threads.
- VmaMutexLock lock(m_Mutex, hAllocator->m_UseMutex);
- return hAllocator->GetVulkanFunctions().vkBindBufferMemory(
- hAllocator->m_hDevice,
- hBuffer,
- m_hMemory,
- hAllocation->GetOffset());
-}
-
-VkResult VmaDeviceMemoryBlock::BindImageMemory(
- const VmaAllocator hAllocator,
- const VmaAllocation hAllocation,
- VkImage hImage)
-{
- VMA_ASSERT(hAllocation->GetType() == VmaAllocation_T::ALLOCATION_TYPE_BLOCK &&
- hAllocation->GetBlock() == this);
- // This lock is important so that we don't call vkBind... and/or vkMap... simultaneously on the same VkDeviceMemory from multiple threads.
- VmaMutexLock lock(m_Mutex, hAllocator->m_UseMutex);
- return hAllocator->GetVulkanFunctions().vkBindImageMemory(
- hAllocator->m_hDevice,
- hImage,
- m_hMemory,
- hAllocation->GetOffset());
-}
-
-static void InitStatInfo(VmaStatInfo& outInfo)
-{
- memset(&outInfo, 0, sizeof(outInfo));
- outInfo.allocationSizeMin = UINT64_MAX;
- outInfo.unusedRangeSizeMin = UINT64_MAX;
-}
-
-// Adds statistics srcInfo into inoutInfo, like: inoutInfo += srcInfo.
-static void VmaAddStatInfo(VmaStatInfo& inoutInfo, const VmaStatInfo& srcInfo)
-{
- inoutInfo.blockCount += srcInfo.blockCount;
- inoutInfo.allocationCount += srcInfo.allocationCount;
- inoutInfo.unusedRangeCount += srcInfo.unusedRangeCount;
- inoutInfo.usedBytes += srcInfo.usedBytes;
- inoutInfo.unusedBytes += srcInfo.unusedBytes;
- inoutInfo.allocationSizeMin = VMA_MIN(inoutInfo.allocationSizeMin, srcInfo.allocationSizeMin);
- inoutInfo.allocationSizeMax = VMA_MAX(inoutInfo.allocationSizeMax, srcInfo.allocationSizeMax);
- inoutInfo.unusedRangeSizeMin = VMA_MIN(inoutInfo.unusedRangeSizeMin, srcInfo.unusedRangeSizeMin);
- inoutInfo.unusedRangeSizeMax = VMA_MAX(inoutInfo.unusedRangeSizeMax, srcInfo.unusedRangeSizeMax);
-}
-
-static void VmaPostprocessCalcStatInfo(VmaStatInfo& inoutInfo)
-{
- inoutInfo.allocationSizeAvg = (inoutInfo.allocationCount > 0) ?
- VmaRoundDiv<VkDeviceSize>(inoutInfo.usedBytes, inoutInfo.allocationCount) : 0;
- inoutInfo.unusedRangeSizeAvg = (inoutInfo.unusedRangeCount > 0) ?
- VmaRoundDiv<VkDeviceSize>(inoutInfo.unusedBytes, inoutInfo.unusedRangeCount) : 0;
-}
-
-VmaPool_T::VmaPool_T(
- VmaAllocator hAllocator,
- const VmaPoolCreateInfo& createInfo,
- VkDeviceSize preferredBlockSize) :
- m_BlockVector(
- hAllocator,
- createInfo.memoryTypeIndex,
- createInfo.blockSize != 0 ? createInfo.blockSize : preferredBlockSize,
- createInfo.minBlockCount,
- createInfo.maxBlockCount,
- (createInfo.flags & VMA_POOL_CREATE_IGNORE_BUFFER_IMAGE_GRANULARITY_BIT) != 0 ? 1 : hAllocator->GetBufferImageGranularity(),
- createInfo.frameInUseCount,
- true, // isCustomPool
- createInfo.blockSize != 0, // explicitBlockSize
- createInfo.flags & VMA_POOL_CREATE_ALGORITHM_MASK), // algorithm
- m_Id(0)
-{
-}
-
-VmaPool_T::~VmaPool_T()
-{
-}
-
-#if VMA_STATS_STRING_ENABLED
-
-#endif // #if VMA_STATS_STRING_ENABLED
-
-VmaBlockVector::VmaBlockVector(
- VmaAllocator hAllocator,
- uint32_t memoryTypeIndex,
- VkDeviceSize preferredBlockSize,
- size_t minBlockCount,
- size_t maxBlockCount,
- VkDeviceSize bufferImageGranularity,
- uint32_t frameInUseCount,
- bool isCustomPool,
- bool explicitBlockSize,
- uint32_t algorithm) :
- m_hAllocator(hAllocator),
- m_MemoryTypeIndex(memoryTypeIndex),
- m_PreferredBlockSize(preferredBlockSize),
- m_MinBlockCount(minBlockCount),
- m_MaxBlockCount(maxBlockCount),
- m_BufferImageGranularity(bufferImageGranularity),
- m_FrameInUseCount(frameInUseCount),
- m_IsCustomPool(isCustomPool),
- m_ExplicitBlockSize(explicitBlockSize),
- m_Algorithm(algorithm),
- m_HasEmptyBlock(false),
- m_Blocks(VmaStlAllocator<VmaDeviceMemoryBlock*>(hAllocator->GetAllocationCallbacks())),
- m_NextBlockId(0)
-{
-}
-
-VmaBlockVector::~VmaBlockVector()
-{
- for(size_t i = m_Blocks.size(); i--; )
- {
- m_Blocks[i]->Destroy(m_hAllocator);
- vma_delete(m_hAllocator, m_Blocks[i]);
- }
-}
-
-VkResult VmaBlockVector::CreateMinBlocks()
-{
- for(size_t i = 0; i < m_MinBlockCount; ++i)
- {
- VkResult res = CreateBlock(m_PreferredBlockSize, VMA_NULL);
- if(res != VK_SUCCESS)
- {
- return res;
- }
- }
- return VK_SUCCESS;
-}
-
-void VmaBlockVector::GetPoolStats(VmaPoolStats* pStats)
-{
- VmaMutexLockRead lock(m_Mutex, m_hAllocator->m_UseMutex);
-
- const size_t blockCount = m_Blocks.size();
-
- pStats->size = 0;
- pStats->unusedSize = 0;
- pStats->allocationCount = 0;
- pStats->unusedRangeCount = 0;
- pStats->unusedRangeSizeMax = 0;
- pStats->blockCount = blockCount;
-
- for(uint32_t blockIndex = 0; blockIndex < blockCount; ++blockIndex)
- {
- const VmaDeviceMemoryBlock* const pBlock = m_Blocks[blockIndex];
- VMA_ASSERT(pBlock);
- VMA_HEAVY_ASSERT(pBlock->Validate());
- pBlock->m_pMetadata->AddPoolStats(*pStats);
- }
-}
-
-bool VmaBlockVector::IsCorruptionDetectionEnabled() const
-{
- const uint32_t requiredMemFlags = VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT | VK_MEMORY_PROPERTY_HOST_COHERENT_BIT;
- return (VMA_DEBUG_DETECT_CORRUPTION != 0) &&
- (VMA_DEBUG_MARGIN > 0) &&
- (m_hAllocator->m_MemProps.memoryTypes[m_MemoryTypeIndex].propertyFlags & requiredMemFlags) == requiredMemFlags;
-}
-
-static const uint32_t VMA_ALLOCATION_TRY_COUNT = 32;
-
-VkResult VmaBlockVector::Allocate(
- VmaPool hCurrentPool,
- uint32_t currentFrameIndex,
- VkDeviceSize size,
- VkDeviceSize alignment,
- const VmaAllocationCreateInfo& createInfo,
- VmaSuballocationType suballocType,
- size_t allocationCount,
- VmaAllocation* pAllocations)
-{
- size_t allocIndex;
- VkResult res = VK_SUCCESS;
-
- {
- VmaMutexLockWrite lock(m_Mutex, m_hAllocator->m_UseMutex);
- for(allocIndex = 0; allocIndex < allocationCount; ++allocIndex)
- {
- res = AllocatePage(
- hCurrentPool,
- currentFrameIndex,
- size,
- alignment,
- createInfo,
- suballocType,
- pAllocations + allocIndex);
- if(res != VK_SUCCESS)
- {
- break;
- }
- }
- }
-
- if(res != VK_SUCCESS)
- {
- // Free all already created allocations.
- while(allocIndex--)
- {
- Free(pAllocations[allocIndex]);
- }
- memset(pAllocations, 0, sizeof(VmaAllocation) * allocationCount);
- }
-
- return res;
-}
-
-VkResult VmaBlockVector::AllocatePage(
- VmaPool hCurrentPool,
- uint32_t currentFrameIndex,
- VkDeviceSize size,
- VkDeviceSize alignment,
- const VmaAllocationCreateInfo& createInfo,
- VmaSuballocationType suballocType,
- VmaAllocation* pAllocation)
-{
- const bool isUpperAddress = (createInfo.flags & VMA_ALLOCATION_CREATE_UPPER_ADDRESS_BIT) != 0;
- bool canMakeOtherLost = (createInfo.flags & VMA_ALLOCATION_CREATE_CAN_MAKE_OTHER_LOST_BIT) != 0;
- const bool mapped = (createInfo.flags & VMA_ALLOCATION_CREATE_MAPPED_BIT) != 0;
- const bool isUserDataString = (createInfo.flags & VMA_ALLOCATION_CREATE_USER_DATA_COPY_STRING_BIT) != 0;
- const bool canCreateNewBlock =
- ((createInfo.flags & VMA_ALLOCATION_CREATE_NEVER_ALLOCATE_BIT) == 0) &&
- (m_Blocks.size() < m_MaxBlockCount);
- uint32_t strategy = createInfo.flags & VMA_ALLOCATION_CREATE_STRATEGY_MASK;
-
- // If linearAlgorithm is used, canMakeOtherLost is available only when used as ring buffer.
- // Which in turn is available only when maxBlockCount = 1.
- if(m_Algorithm == VMA_POOL_CREATE_LINEAR_ALGORITHM_BIT && m_MaxBlockCount > 1)
- {
- canMakeOtherLost = false;
- }
-
- // Upper address can only be used with linear allocator and within single memory block.
- if(isUpperAddress &&
- (m_Algorithm != VMA_POOL_CREATE_LINEAR_ALGORITHM_BIT || m_MaxBlockCount > 1))
- {
- return VK_ERROR_FEATURE_NOT_PRESENT;
- }
-
- // Validate strategy.
- switch(strategy)
- {
- case 0:
- strategy = VMA_ALLOCATION_CREATE_STRATEGY_BEST_FIT_BIT;
- break;
- case VMA_ALLOCATION_CREATE_STRATEGY_BEST_FIT_BIT:
- case VMA_ALLOCATION_CREATE_STRATEGY_WORST_FIT_BIT:
- case VMA_ALLOCATION_CREATE_STRATEGY_FIRST_FIT_BIT:
- break;
- default:
- return VK_ERROR_FEATURE_NOT_PRESENT;
- }
-
- // Early reject: requested allocation size is larger that maximum block size for this block vector.
- if(size + 2 * VMA_DEBUG_MARGIN > m_PreferredBlockSize)
- {
- return VK_ERROR_OUT_OF_DEVICE_MEMORY;
- }
-
- /*
- Under certain condition, this whole section can be skipped for optimization, so
- we move on directly to trying to allocate with canMakeOtherLost. That's the case
- e.g. for custom pools with linear algorithm.
- */
- if(!canMakeOtherLost || canCreateNewBlock)
- {
- // 1. Search existing allocations. Try to allocate without making other allocations lost.
- VmaAllocationCreateFlags allocFlagsCopy = createInfo.flags;
- allocFlagsCopy &= ~VMA_ALLOCATION_CREATE_CAN_MAKE_OTHER_LOST_BIT;
-
- if(m_Algorithm == VMA_POOL_CREATE_LINEAR_ALGORITHM_BIT)
- {
- // Use only last block.
- if(!m_Blocks.empty())
- {
- VmaDeviceMemoryBlock* const pCurrBlock = m_Blocks.back();
- VMA_ASSERT(pCurrBlock);
- VkResult res = AllocateFromBlock(
- pCurrBlock,
- hCurrentPool,
- currentFrameIndex,
- size,
- alignment,
- allocFlagsCopy,
- createInfo.pUserData,
- suballocType,
- strategy,
- pAllocation);
- if(res == VK_SUCCESS)
- {
- VMA_DEBUG_LOG(" Returned from last block #%u", (uint32_t)(m_Blocks.size() - 1));
- return VK_SUCCESS;
- }
- }
- }
- else
- {
- if(strategy == VMA_ALLOCATION_CREATE_STRATEGY_BEST_FIT_BIT)
- {
- // Forward order in m_Blocks - prefer blocks with smallest amount of free space.
- for(size_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex )
- {
- VmaDeviceMemoryBlock* const pCurrBlock = m_Blocks[blockIndex];
- VMA_ASSERT(pCurrBlock);
- VkResult res = AllocateFromBlock(
- pCurrBlock,
- hCurrentPool,
- currentFrameIndex,
- size,
- alignment,
- allocFlagsCopy,
- createInfo.pUserData,
- suballocType,
- strategy,
- pAllocation);
- if(res == VK_SUCCESS)
- {
- VMA_DEBUG_LOG(" Returned from existing block #%u", (uint32_t)blockIndex);
- return VK_SUCCESS;
- }
- }
- }
- else // WORST_FIT, FIRST_FIT
- {
- // Backward order in m_Blocks - prefer blocks with largest amount of free space.
- for(size_t blockIndex = m_Blocks.size(); blockIndex--; )
- {
- VmaDeviceMemoryBlock* const pCurrBlock = m_Blocks[blockIndex];
- VMA_ASSERT(pCurrBlock);
- VkResult res = AllocateFromBlock(
- pCurrBlock,
- hCurrentPool,
- currentFrameIndex,
- size,
- alignment,
- allocFlagsCopy,
- createInfo.pUserData,
- suballocType,
- strategy,
- pAllocation);
- if(res == VK_SUCCESS)
- {
- VMA_DEBUG_LOG(" Returned from existing block #%u", (uint32_t)blockIndex);
- return VK_SUCCESS;
- }
- }
- }
- }
-
- // 2. Try to create new block.
- if(canCreateNewBlock)
- {
- // Calculate optimal size for new block.
- VkDeviceSize newBlockSize = m_PreferredBlockSize;
- uint32_t newBlockSizeShift = 0;
- const uint32_t NEW_BLOCK_SIZE_SHIFT_MAX = 3;
-
- if(!m_ExplicitBlockSize)
- {
- // Allocate 1/8, 1/4, 1/2 as first blocks.
- const VkDeviceSize maxExistingBlockSize = CalcMaxBlockSize();
- for(uint32_t i = 0; i < NEW_BLOCK_SIZE_SHIFT_MAX; ++i)
- {
- const VkDeviceSize smallerNewBlockSize = newBlockSize / 2;
- if(smallerNewBlockSize > maxExistingBlockSize && smallerNewBlockSize >= size * 2)
- {
- newBlockSize = smallerNewBlockSize;
- ++newBlockSizeShift;
- }
- else
- {
- break;
- }
- }
- }
-
- size_t newBlockIndex = 0;
- VkResult res = CreateBlock(newBlockSize, &newBlockIndex);
- // Allocation of this size failed? Try 1/2, 1/4, 1/8 of m_PreferredBlockSize.
- if(!m_ExplicitBlockSize)
- {
- while(res < 0 && newBlockSizeShift < NEW_BLOCK_SIZE_SHIFT_MAX)
- {
- const VkDeviceSize smallerNewBlockSize = newBlockSize / 2;
- if(smallerNewBlockSize >= size)
- {
- newBlockSize = smallerNewBlockSize;
- ++newBlockSizeShift;
- res = CreateBlock(newBlockSize, &newBlockIndex);
- }
- else
- {
- break;
- }
- }
- }
-
- if(res == VK_SUCCESS)
- {
- VmaDeviceMemoryBlock* const pBlock = m_Blocks[newBlockIndex];
- VMA_ASSERT(pBlock->m_pMetadata->GetSize() >= size);
-
- res = AllocateFromBlock(
- pBlock,
- hCurrentPool,
- currentFrameIndex,
- size,
- alignment,
- allocFlagsCopy,
- createInfo.pUserData,
- suballocType,
- strategy,
- pAllocation);
- if(res == VK_SUCCESS)
- {
- VMA_DEBUG_LOG(" Created new block Size=%llu", newBlockSize);
- return VK_SUCCESS;
- }
- else
- {
- // Allocation from new block failed, possibly due to VMA_DEBUG_MARGIN or alignment.
- return VK_ERROR_OUT_OF_DEVICE_MEMORY;
- }
- }
- }
- }
-
- // 3. Try to allocate from existing blocks with making other allocations lost.
- if(canMakeOtherLost)
- {
- uint32_t tryIndex = 0;
- for(; tryIndex < VMA_ALLOCATION_TRY_COUNT; ++tryIndex)
- {
- VmaDeviceMemoryBlock* pBestRequestBlock = VMA_NULL;
- VmaAllocationRequest bestRequest = {};
- VkDeviceSize bestRequestCost = VK_WHOLE_SIZE;
-
- // 1. Search existing allocations.
- if(strategy == VMA_ALLOCATION_CREATE_STRATEGY_BEST_FIT_BIT)
- {
- // Forward order in m_Blocks - prefer blocks with smallest amount of free space.
- for(size_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex )
- {
- VmaDeviceMemoryBlock* const pCurrBlock = m_Blocks[blockIndex];
- VMA_ASSERT(pCurrBlock);
- VmaAllocationRequest currRequest = {};
- if(pCurrBlock->m_pMetadata->CreateAllocationRequest(
- currentFrameIndex,
- m_FrameInUseCount,
- m_BufferImageGranularity,
- size,
- alignment,
- (createInfo.flags & VMA_ALLOCATION_CREATE_UPPER_ADDRESS_BIT) != 0,
- suballocType,
- canMakeOtherLost,
- strategy,
- &currRequest))
- {
- const VkDeviceSize currRequestCost = currRequest.CalcCost();
- if(pBestRequestBlock == VMA_NULL ||
- currRequestCost < bestRequestCost)
- {
- pBestRequestBlock = pCurrBlock;
- bestRequest = currRequest;
- bestRequestCost = currRequestCost;
-
- if(bestRequestCost == 0)
- {
- break;
- }
- }
- }
- }
- }
- else // WORST_FIT, FIRST_FIT
- {
- // Backward order in m_Blocks - prefer blocks with largest amount of free space.
- for(size_t blockIndex = m_Blocks.size(); blockIndex--; )
- {
- VmaDeviceMemoryBlock* const pCurrBlock = m_Blocks[blockIndex];
- VMA_ASSERT(pCurrBlock);
- VmaAllocationRequest currRequest = {};
- if(pCurrBlock->m_pMetadata->CreateAllocationRequest(
- currentFrameIndex,
- m_FrameInUseCount,
- m_BufferImageGranularity,
- size,
- alignment,
- (createInfo.flags & VMA_ALLOCATION_CREATE_UPPER_ADDRESS_BIT) != 0,
- suballocType,
- canMakeOtherLost,
- strategy,
- &currRequest))
- {
- const VkDeviceSize currRequestCost = currRequest.CalcCost();
- if(pBestRequestBlock == VMA_NULL ||
- currRequestCost < bestRequestCost ||
- strategy == VMA_ALLOCATION_CREATE_STRATEGY_FIRST_FIT_BIT)
- {
- pBestRequestBlock = pCurrBlock;
- bestRequest = currRequest;
- bestRequestCost = currRequestCost;
-
- if(bestRequestCost == 0 ||
- strategy == VMA_ALLOCATION_CREATE_STRATEGY_FIRST_FIT_BIT)
- {
- break;
- }
- }
- }
- }
- }
-
- if(pBestRequestBlock != VMA_NULL)
- {
- if(mapped)
- {
- VkResult res = pBestRequestBlock->Map(m_hAllocator, 1, VMA_NULL);
- if(res != VK_SUCCESS)
- {
- return res;
- }
- }
-
- if(pBestRequestBlock->m_pMetadata->MakeRequestedAllocationsLost(
- currentFrameIndex,
- m_FrameInUseCount,
- &bestRequest))
- {
- // We no longer have an empty Allocation.
- if(pBestRequestBlock->m_pMetadata->IsEmpty())
- {
- m_HasEmptyBlock = false;
- }
- // Allocate from this pBlock.
- *pAllocation = vma_new(m_hAllocator, VmaAllocation_T)(currentFrameIndex, isUserDataString);
- pBestRequestBlock->m_pMetadata->Alloc(bestRequest, suballocType, size, isUpperAddress, *pAllocation);
- (*pAllocation)->InitBlockAllocation(
- hCurrentPool,
- pBestRequestBlock,
- bestRequest.offset,
- alignment,
- size,
- suballocType,
- mapped,
- (createInfo.flags & VMA_ALLOCATION_CREATE_CAN_BECOME_LOST_BIT) != 0);
- VMA_HEAVY_ASSERT(pBestRequestBlock->Validate());
- VMA_DEBUG_LOG(" Returned from existing allocation #%u", (uint32_t)blockIndex);
- (*pAllocation)->SetUserData(m_hAllocator, createInfo.pUserData);
- if(VMA_DEBUG_INITIALIZE_ALLOCATIONS)
- {
- m_hAllocator->FillAllocation(*pAllocation, VMA_ALLOCATION_FILL_PATTERN_CREATED);
- }
- if(IsCorruptionDetectionEnabled())
- {
- VkResult res = pBestRequestBlock->WriteMagicValueAroundAllocation(m_hAllocator, bestRequest.offset, size);
- VMA_ASSERT(res == VK_SUCCESS && "Couldn't map block memory to write magic value.");
- }
- return VK_SUCCESS;
- }
- // else: Some allocations must have been touched while we are here. Next try.
- }
- else
- {
- // Could not find place in any of the blocks - break outer loop.
- break;
- }
- }
- /* Maximum number of tries exceeded - a very unlike event when many other
- threads are simultaneously touching allocations making it impossible to make
- lost at the same time as we try to allocate. */
- if(tryIndex == VMA_ALLOCATION_TRY_COUNT)
- {
- return VK_ERROR_TOO_MANY_OBJECTS;
- }
- }
-
- return VK_ERROR_OUT_OF_DEVICE_MEMORY;
-}
-
-void VmaBlockVector::Free(
- VmaAllocation hAllocation)
-{
- VmaDeviceMemoryBlock* pBlockToDelete = VMA_NULL;
-
- // Scope for lock.
- {
- VmaMutexLockWrite lock(m_Mutex, m_hAllocator->m_UseMutex);
-
- VmaDeviceMemoryBlock* pBlock = hAllocation->GetBlock();
-
- if(IsCorruptionDetectionEnabled())
- {
- VkResult res = pBlock->ValidateMagicValueAroundAllocation(m_hAllocator, hAllocation->GetOffset(), hAllocation->GetSize());
- VMA_ASSERT(res == VK_SUCCESS && "Couldn't map block memory to validate magic value.");
- }
-
- if(hAllocation->IsPersistentMap())
- {
- pBlock->Unmap(m_hAllocator, 1);
- }
-
- pBlock->m_pMetadata->Free(hAllocation);
- VMA_HEAVY_ASSERT(pBlock->Validate());
-
- VMA_DEBUG_LOG(" Freed from MemoryTypeIndex=%u", memTypeIndex);
-
- // pBlock became empty after this deallocation.
- if(pBlock->m_pMetadata->IsEmpty())
- {
- // Already has empty Allocation. We don't want to have two, so delete this one.
- if(m_HasEmptyBlock && m_Blocks.size() > m_MinBlockCount)
- {
- pBlockToDelete = pBlock;
- Remove(pBlock);
- }
- // We now have first empty block.
- else
- {
- m_HasEmptyBlock = true;
- }
- }
- // pBlock didn't become empty, but we have another empty block - find and free that one.
- // (This is optional, heuristics.)
- else if(m_HasEmptyBlock)
- {
- VmaDeviceMemoryBlock* pLastBlock = m_Blocks.back();
- if(pLastBlock->m_pMetadata->IsEmpty() && m_Blocks.size() > m_MinBlockCount)
- {
- pBlockToDelete = pLastBlock;
- m_Blocks.pop_back();
- m_HasEmptyBlock = false;
- }
- }
-
- IncrementallySortBlocks();
- }
-
- // Destruction of a free Allocation. Deferred until this point, outside of mutex
- // lock, for performance reason.
- if(pBlockToDelete != VMA_NULL)
- {
- VMA_DEBUG_LOG(" Deleted empty allocation");
- pBlockToDelete->Destroy(m_hAllocator);
- vma_delete(m_hAllocator, pBlockToDelete);
- }
-}
-
-VkDeviceSize VmaBlockVector::CalcMaxBlockSize() const
-{
- VkDeviceSize result = 0;
- for(size_t i = m_Blocks.size(); i--; )
- {
- result = VMA_MAX(result, m_Blocks[i]->m_pMetadata->GetSize());
- if(result >= m_PreferredBlockSize)
- {
- break;
- }
- }
- return result;
-}
-
-void VmaBlockVector::Remove(VmaDeviceMemoryBlock* pBlock)
-{
- for(uint32_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex)
- {
- if(m_Blocks[blockIndex] == pBlock)
- {
- VmaVectorRemove(m_Blocks, blockIndex);
- return;
- }
- }
- VMA_ASSERT(0);
-}
-
-void VmaBlockVector::IncrementallySortBlocks()
-{
- if(m_Algorithm != VMA_POOL_CREATE_LINEAR_ALGORITHM_BIT)
- {
- // Bubble sort only until first swap.
- for(size_t i = 1; i < m_Blocks.size(); ++i)
- {
- if(m_Blocks[i - 1]->m_pMetadata->GetSumFreeSize() > m_Blocks[i]->m_pMetadata->GetSumFreeSize())
- {
- VMA_SWAP(m_Blocks[i - 1], m_Blocks[i]);
- return;
- }
- }
- }
-}
-
-VkResult VmaBlockVector::AllocateFromBlock(
- VmaDeviceMemoryBlock* pBlock,
- VmaPool hCurrentPool,
- uint32_t currentFrameIndex,
- VkDeviceSize size,
- VkDeviceSize alignment,
- VmaAllocationCreateFlags allocFlags,
- void* pUserData,
- VmaSuballocationType suballocType,
- uint32_t strategy,
- VmaAllocation* pAllocation)
-{
- VMA_ASSERT((allocFlags & VMA_ALLOCATION_CREATE_CAN_MAKE_OTHER_LOST_BIT) == 0);
- const bool isUpperAddress = (allocFlags & VMA_ALLOCATION_CREATE_UPPER_ADDRESS_BIT) != 0;
- const bool mapped = (allocFlags & VMA_ALLOCATION_CREATE_MAPPED_BIT) != 0;
- const bool isUserDataString = (allocFlags & VMA_ALLOCATION_CREATE_USER_DATA_COPY_STRING_BIT) != 0;
-
- VmaAllocationRequest currRequest = {};
- if(pBlock->m_pMetadata->CreateAllocationRequest(
- currentFrameIndex,
- m_FrameInUseCount,
- m_BufferImageGranularity,
- size,
- alignment,
- isUpperAddress,
- suballocType,
- false, // canMakeOtherLost
- strategy,
- &currRequest))
- {
- // Allocate from pCurrBlock.
- VMA_ASSERT(currRequest.itemsToMakeLostCount == 0);
-
- if(mapped)
- {
- VkResult res = pBlock->Map(m_hAllocator, 1, VMA_NULL);
- if(res != VK_SUCCESS)
- {
- return res;
- }
- }
-
- // We no longer have an empty Allocation.
- if(pBlock->m_pMetadata->IsEmpty())
- {
- m_HasEmptyBlock = false;
- }
-
- *pAllocation = vma_new(m_hAllocator, VmaAllocation_T)(currentFrameIndex, isUserDataString);
- pBlock->m_pMetadata->Alloc(currRequest, suballocType, size, isUpperAddress, *pAllocation);
- (*pAllocation)->InitBlockAllocation(
- hCurrentPool,
- pBlock,
- currRequest.offset,
- alignment,
- size,
- suballocType,
- mapped,
- (allocFlags & VMA_ALLOCATION_CREATE_CAN_BECOME_LOST_BIT) != 0);
- VMA_HEAVY_ASSERT(pBlock->Validate());
- (*pAllocation)->SetUserData(m_hAllocator, pUserData);
- if(VMA_DEBUG_INITIALIZE_ALLOCATIONS)
- {
- m_hAllocator->FillAllocation(*pAllocation, VMA_ALLOCATION_FILL_PATTERN_CREATED);
- }
- if(IsCorruptionDetectionEnabled())
- {
- VkResult res = pBlock->WriteMagicValueAroundAllocation(m_hAllocator, currRequest.offset, size);
- VMA_ASSERT(res == VK_SUCCESS && "Couldn't map block memory to write magic value.");
- }
- return VK_SUCCESS;
- }
- return VK_ERROR_OUT_OF_DEVICE_MEMORY;
-}
-
-VkResult VmaBlockVector::CreateBlock(VkDeviceSize blockSize, size_t* pNewBlockIndex)
-{
- VkMemoryAllocateInfo allocInfo = { VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO };
- allocInfo.memoryTypeIndex = m_MemoryTypeIndex;
- allocInfo.allocationSize = blockSize;
- VkDeviceMemory mem = VK_NULL_HANDLE;
- VkResult res = m_hAllocator->AllocateVulkanMemory(&allocInfo, &mem);
- if(res < 0)
- {
- return res;
- }
-
- // New VkDeviceMemory successfully created.
-
- // Create new Allocation for it.
- VmaDeviceMemoryBlock* const pBlock = vma_new(m_hAllocator, VmaDeviceMemoryBlock)(m_hAllocator);
- pBlock->Init(
- m_hAllocator,
- m_MemoryTypeIndex,
- mem,
- allocInfo.allocationSize,
- m_NextBlockId++,
- m_Algorithm);
-
- m_Blocks.push_back(pBlock);
- if(pNewBlockIndex != VMA_NULL)
- {
- *pNewBlockIndex = m_Blocks.size() - 1;
- }
-
- return VK_SUCCESS;
-}
-
-void VmaBlockVector::ApplyDefragmentationMovesCpu(
- class VmaBlockVectorDefragmentationContext* pDefragCtx,
- const VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> >& moves)
-{
- const size_t blockCount = m_Blocks.size();
- const bool isNonCoherent = m_hAllocator->IsMemoryTypeNonCoherent(m_MemoryTypeIndex);
-
- enum BLOCK_FLAG
- {
- BLOCK_FLAG_USED = 0x00000001,
- BLOCK_FLAG_MAPPED_FOR_DEFRAGMENTATION = 0x00000002,
- };
-
- struct BlockInfo
- {
- uint32_t flags;
- void* pMappedData;
- };
- VmaVector< BlockInfo, VmaStlAllocator<BlockInfo> >
- blockInfo(blockCount, VmaStlAllocator<BlockInfo>(m_hAllocator->GetAllocationCallbacks()));
- memset(blockInfo.data(), 0, blockCount * sizeof(BlockInfo));
-
- // Go over all moves. Mark blocks that are used with BLOCK_FLAG_USED.
- const size_t moveCount = moves.size();
- for(size_t moveIndex = 0; moveIndex < moveCount; ++moveIndex)
- {
- const VmaDefragmentationMove& move = moves[moveIndex];
- blockInfo[move.srcBlockIndex].flags |= BLOCK_FLAG_USED;
- blockInfo[move.dstBlockIndex].flags |= BLOCK_FLAG_USED;
- }
-
- VMA_ASSERT(pDefragCtx->res == VK_SUCCESS);
-
- // Go over all blocks. Get mapped pointer or map if necessary.
- for(size_t blockIndex = 0; pDefragCtx->res == VK_SUCCESS && blockIndex < blockCount; ++blockIndex)
- {
- BlockInfo& currBlockInfo = blockInfo[blockIndex];
- VmaDeviceMemoryBlock* pBlock = m_Blocks[blockIndex];
- if((currBlockInfo.flags & BLOCK_FLAG_USED) != 0)
- {
- currBlockInfo.pMappedData = pBlock->GetMappedData();
- // It is not originally mapped - map it.
- if(currBlockInfo.pMappedData == VMA_NULL)
- {
- pDefragCtx->res = pBlock->Map(m_hAllocator, 1, &currBlockInfo.pMappedData);
- if(pDefragCtx->res == VK_SUCCESS)
- {
- currBlockInfo.flags |= BLOCK_FLAG_MAPPED_FOR_DEFRAGMENTATION;
- }
- }
- }
- }
-
- // Go over all moves. Do actual data transfer.
- if(pDefragCtx->res == VK_SUCCESS)
- {
- const VkDeviceSize nonCoherentAtomSize = m_hAllocator->m_PhysicalDeviceProperties.limits.nonCoherentAtomSize;
- VkMappedMemoryRange memRange = { VK_STRUCTURE_TYPE_MAPPED_MEMORY_RANGE };
-
- for(size_t moveIndex = 0; moveIndex < moveCount; ++moveIndex)
- {
- const VmaDefragmentationMove& move = moves[moveIndex];
-
- const BlockInfo& srcBlockInfo = blockInfo[move.srcBlockIndex];
- const BlockInfo& dstBlockInfo = blockInfo[move.dstBlockIndex];
-
- VMA_ASSERT(srcBlockInfo.pMappedData && dstBlockInfo.pMappedData);
-
- // Invalidate source.
- if(isNonCoherent)
- {
- VmaDeviceMemoryBlock* const pSrcBlock = m_Blocks[move.srcBlockIndex];
- memRange.memory = pSrcBlock->GetDeviceMemory();
- memRange.offset = VmaAlignDown(move.srcOffset, nonCoherentAtomSize);
- memRange.size = VMA_MIN(
- VmaAlignUp(move.size + (move.srcOffset - memRange.offset), nonCoherentAtomSize),
- pSrcBlock->m_pMetadata->GetSize() - memRange.offset);
- (*m_hAllocator->GetVulkanFunctions().vkInvalidateMappedMemoryRanges)(m_hAllocator->m_hDevice, 1, &memRange);
- }
-
- // THE PLACE WHERE ACTUAL DATA COPY HAPPENS.
- memmove(
- reinterpret_cast<char*>(dstBlockInfo.pMappedData) + move.dstOffset,
- reinterpret_cast<char*>(srcBlockInfo.pMappedData) + move.srcOffset,
- static_cast<size_t>(move.size));
-
- if(IsCorruptionDetectionEnabled())
- {
- VmaWriteMagicValue(dstBlockInfo.pMappedData, move.dstOffset - VMA_DEBUG_MARGIN);
- VmaWriteMagicValue(dstBlockInfo.pMappedData, move.dstOffset + move.size);
- }
-
- // Flush destination.
- if(isNonCoherent)
- {
- VmaDeviceMemoryBlock* const pDstBlock = m_Blocks[move.dstBlockIndex];
- memRange.memory = pDstBlock->GetDeviceMemory();
- memRange.offset = VmaAlignDown(move.dstOffset, nonCoherentAtomSize);
- memRange.size = VMA_MIN(
- VmaAlignUp(move.size + (move.dstOffset - memRange.offset), nonCoherentAtomSize),
- pDstBlock->m_pMetadata->GetSize() - memRange.offset);
- (*m_hAllocator->GetVulkanFunctions().vkFlushMappedMemoryRanges)(m_hAllocator->m_hDevice, 1, &memRange);
- }
- }
- }
-
- // Go over all blocks in reverse order. Unmap those that were mapped just for defragmentation.
- // Regardless of pCtx->res == VK_SUCCESS.
- for(size_t blockIndex = blockCount; blockIndex--; )
- {
- const BlockInfo& currBlockInfo = blockInfo[blockIndex];
- if((currBlockInfo.flags & BLOCK_FLAG_MAPPED_FOR_DEFRAGMENTATION) != 0)
- {
- VmaDeviceMemoryBlock* pBlock = m_Blocks[blockIndex];
- pBlock->Unmap(m_hAllocator, 1);
- }
- }
-}
-
-void VmaBlockVector::ApplyDefragmentationMovesGpu(
- class VmaBlockVectorDefragmentationContext* pDefragCtx,
- const VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> >& moves,
- VkCommandBuffer commandBuffer)
-{
- const size_t blockCount = m_Blocks.size();
-
- pDefragCtx->blockContexts.resize(blockCount);
- memset(pDefragCtx->blockContexts.data(), 0, blockCount * sizeof(VmaBlockDefragmentationContext));
-
- // Go over all moves. Mark blocks that are used with BLOCK_FLAG_USED.
- const size_t moveCount = moves.size();
- for(size_t moveIndex = 0; moveIndex < moveCount; ++moveIndex)
- {
- const VmaDefragmentationMove& move = moves[moveIndex];
- pDefragCtx->blockContexts[move.srcBlockIndex].flags |= VmaBlockDefragmentationContext::BLOCK_FLAG_USED;
- pDefragCtx->blockContexts[move.dstBlockIndex].flags |= VmaBlockDefragmentationContext::BLOCK_FLAG_USED;
- }
-
- VMA_ASSERT(pDefragCtx->res == VK_SUCCESS);
-
- // Go over all blocks. Create and bind buffer for whole block if necessary.
- {
- VkBufferCreateInfo bufCreateInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
- bufCreateInfo.usage = VK_BUFFER_USAGE_TRANSFER_SRC_BIT |
- VK_BUFFER_USAGE_TRANSFER_DST_BIT;
-
- for(size_t blockIndex = 0; pDefragCtx->res == VK_SUCCESS && blockIndex < blockCount; ++blockIndex)
- {
- VmaBlockDefragmentationContext& currBlockCtx = pDefragCtx->blockContexts[blockIndex];
- VmaDeviceMemoryBlock* pBlock = m_Blocks[blockIndex];
- if((currBlockCtx.flags & VmaBlockDefragmentationContext::BLOCK_FLAG_USED) != 0)
- {
- bufCreateInfo.size = pBlock->m_pMetadata->GetSize();
- pDefragCtx->res = (*m_hAllocator->GetVulkanFunctions().vkCreateBuffer)(
- m_hAllocator->m_hDevice, &bufCreateInfo, m_hAllocator->GetAllocationCallbacks(), &currBlockCtx.hBuffer);
- if(pDefragCtx->res == VK_SUCCESS)
- {
- pDefragCtx->res = (*m_hAllocator->GetVulkanFunctions().vkBindBufferMemory)(
- m_hAllocator->m_hDevice, currBlockCtx.hBuffer, pBlock->GetDeviceMemory(), 0);
- }
- }
- }
- }
-
- // Go over all moves. Post data transfer commands to command buffer.
- if(pDefragCtx->res == VK_SUCCESS)
- {
- const VkDeviceSize nonCoherentAtomSize = m_hAllocator->m_PhysicalDeviceProperties.limits.nonCoherentAtomSize;
- VkMappedMemoryRange memRange = { VK_STRUCTURE_TYPE_MAPPED_MEMORY_RANGE };
-
- for(size_t moveIndex = 0; moveIndex < moveCount; ++moveIndex)
- {
- const VmaDefragmentationMove& move = moves[moveIndex];
-
- const VmaBlockDefragmentationContext& srcBlockCtx = pDefragCtx->blockContexts[move.srcBlockIndex];
- const VmaBlockDefragmentationContext& dstBlockCtx = pDefragCtx->blockContexts[move.dstBlockIndex];
-
- VMA_ASSERT(srcBlockCtx.hBuffer && dstBlockCtx.hBuffer);
-
- VkBufferCopy region = {
- move.srcOffset,
- move.dstOffset,
- move.size };
- (*m_hAllocator->GetVulkanFunctions().vkCmdCopyBuffer)(
- commandBuffer, srcBlockCtx.hBuffer, dstBlockCtx.hBuffer, 1, &region);
- }
- }
-
- // Save buffers to defrag context for later destruction.
- if(pDefragCtx->res == VK_SUCCESS && moveCount > 0)
- {
- pDefragCtx->res = VK_NOT_READY;
- }
-}
-
-void VmaBlockVector::FreeEmptyBlocks(VmaDefragmentationStats* pDefragmentationStats)
-{
- m_HasEmptyBlock = false;
- for(size_t blockIndex = m_Blocks.size(); blockIndex--; )
- {
- VmaDeviceMemoryBlock* pBlock = m_Blocks[blockIndex];
- if(pBlock->m_pMetadata->IsEmpty())
- {
- if(m_Blocks.size() > m_MinBlockCount)
- {
- if(pDefragmentationStats != VMA_NULL)
- {
- ++pDefragmentationStats->deviceMemoryBlocksFreed;
- pDefragmentationStats->bytesFreed += pBlock->m_pMetadata->GetSize();
- }
-
- VmaVectorRemove(m_Blocks, blockIndex);
- pBlock->Destroy(m_hAllocator);
- vma_delete(m_hAllocator, pBlock);
- }
- else
- {
- m_HasEmptyBlock = true;
- }
- }
- }
-}
-
-#if VMA_STATS_STRING_ENABLED
-
-void VmaBlockVector::PrintDetailedMap(class VmaJsonWriter& json)
-{
- VmaMutexLockRead lock(m_Mutex, m_hAllocator->m_UseMutex);
-
- json.BeginObject();
-
- if(m_IsCustomPool)
- {
- json.WriteString("MemoryTypeIndex");
- json.WriteNumber(m_MemoryTypeIndex);
-
- json.WriteString("BlockSize");
- json.WriteNumber(m_PreferredBlockSize);
-
- json.WriteString("BlockCount");
- json.BeginObject(true);
- if(m_MinBlockCount > 0)
- {
- json.WriteString("Min");
- json.WriteNumber((uint64_t)m_MinBlockCount);
- }
- if(m_MaxBlockCount < SIZE_MAX)
- {
- json.WriteString("Max");
- json.WriteNumber((uint64_t)m_MaxBlockCount);
- }
- json.WriteString("Cur");
- json.WriteNumber((uint64_t)m_Blocks.size());
- json.EndObject();
-
- if(m_FrameInUseCount > 0)
- {
- json.WriteString("FrameInUseCount");
- json.WriteNumber(m_FrameInUseCount);
- }
-
- if(m_Algorithm != 0)
- {
- json.WriteString("Algorithm");
- json.WriteString(VmaAlgorithmToStr(m_Algorithm));
- }
- }
- else
- {
- json.WriteString("PreferredBlockSize");
- json.WriteNumber(m_PreferredBlockSize);
- }
-
- json.WriteString("Blocks");
- json.BeginObject();
- for(size_t i = 0; i < m_Blocks.size(); ++i)
- {
- json.BeginString();
- json.ContinueString(m_Blocks[i]->GetId());
- json.EndString();
-
- m_Blocks[i]->m_pMetadata->PrintDetailedMap(json);
- }
- json.EndObject();
-
- json.EndObject();
-}
-
-#endif // #if VMA_STATS_STRING_ENABLED
-
-void VmaBlockVector::Defragment(
- class VmaBlockVectorDefragmentationContext* pCtx,
- VmaDefragmentationStats* pStats,
- VkDeviceSize& maxCpuBytesToMove, uint32_t& maxCpuAllocationsToMove,
- VkDeviceSize& maxGpuBytesToMove, uint32_t& maxGpuAllocationsToMove,
- VkCommandBuffer commandBuffer)
-{
- pCtx->res = VK_SUCCESS;
-
- const VkMemoryPropertyFlags memPropFlags =
- m_hAllocator->m_MemProps.memoryTypes[m_MemoryTypeIndex].propertyFlags;
- const bool isHostVisible = (memPropFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) != 0;
- const bool isHostCoherent = (memPropFlags & VK_MEMORY_PROPERTY_HOST_COHERENT_BIT) != 0;
-
- const bool canDefragmentOnCpu = maxCpuBytesToMove > 0 && maxCpuAllocationsToMove > 0 &&
- isHostVisible;
- const bool canDefragmentOnGpu = maxGpuBytesToMove > 0 && maxGpuAllocationsToMove > 0 &&
- (VMA_DEBUG_DETECT_CORRUPTION == 0 || !(isHostVisible && isHostCoherent));
-
- // There are options to defragment this memory type.
- if(canDefragmentOnCpu || canDefragmentOnGpu)
- {
- bool defragmentOnGpu;
- // There is only one option to defragment this memory type.
- if(canDefragmentOnGpu != canDefragmentOnCpu)
- {
- defragmentOnGpu = canDefragmentOnGpu;
- }
- // Both options are available: Heuristics to choose the best one.
- else
- {
- defragmentOnGpu = (memPropFlags & VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT) != 0 ||
- m_hAllocator->IsIntegratedGpu();
- }
-
- bool overlappingMoveSupported = !defragmentOnGpu;
-
- if(m_hAllocator->m_UseMutex)
- {
- m_Mutex.LockWrite();
- pCtx->mutexLocked = true;
- }
-
- pCtx->Begin(overlappingMoveSupported);
-
- // Defragment.
-
- const VkDeviceSize maxBytesToMove = defragmentOnGpu ? maxGpuBytesToMove : maxCpuBytesToMove;
- const uint32_t maxAllocationsToMove = defragmentOnGpu ? maxGpuAllocationsToMove : maxCpuAllocationsToMove;
- VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> > moves =
- VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> >(VmaStlAllocator<VmaDefragmentationMove>(m_hAllocator->GetAllocationCallbacks()));
- pCtx->res = pCtx->GetAlgorithm()->Defragment(moves, maxBytesToMove, maxAllocationsToMove);
-
- // Accumulate statistics.
- if(pStats != VMA_NULL)
- {
- const VkDeviceSize bytesMoved = pCtx->GetAlgorithm()->GetBytesMoved();
- const uint32_t allocationsMoved = pCtx->GetAlgorithm()->GetAllocationsMoved();
- pStats->bytesMoved += bytesMoved;
- pStats->allocationsMoved += allocationsMoved;
- VMA_ASSERT(bytesMoved <= maxBytesToMove);
- VMA_ASSERT(allocationsMoved <= maxAllocationsToMove);
- if(defragmentOnGpu)
- {
- maxGpuBytesToMove -= bytesMoved;
- maxGpuAllocationsToMove -= allocationsMoved;
- }
- else
- {
- maxCpuBytesToMove -= bytesMoved;
- maxCpuAllocationsToMove -= allocationsMoved;
- }
- }
-
- if(pCtx->res >= VK_SUCCESS)
- {
- if(defragmentOnGpu)
- {
- ApplyDefragmentationMovesGpu(pCtx, moves, commandBuffer);
- }
- else
- {
- ApplyDefragmentationMovesCpu(pCtx, moves);
- }
- }
- }
-}
-
-void VmaBlockVector::DefragmentationEnd(
- class VmaBlockVectorDefragmentationContext* pCtx,
- VmaDefragmentationStats* pStats)
-{
- // Destroy buffers.
- for(size_t blockIndex = pCtx->blockContexts.size(); blockIndex--; )
- {
- VmaBlockDefragmentationContext& blockCtx = pCtx->blockContexts[blockIndex];
- if(blockCtx.hBuffer)
- {
- (*m_hAllocator->GetVulkanFunctions().vkDestroyBuffer)(
- m_hAllocator->m_hDevice, blockCtx.hBuffer, m_hAllocator->GetAllocationCallbacks());
- }
- }
-
- if(pCtx->res >= VK_SUCCESS)
- {
- FreeEmptyBlocks(pStats);
- }
-
- if(pCtx->mutexLocked)
- {
- VMA_ASSERT(m_hAllocator->m_UseMutex);
- m_Mutex.UnlockWrite();
- }
-}
-
-size_t VmaBlockVector::CalcAllocationCount() const
-{
- size_t result = 0;
- for(size_t i = 0; i < m_Blocks.size(); ++i)
- {
- result += m_Blocks[i]->m_pMetadata->GetAllocationCount();
- }
- return result;
-}
-
-bool VmaBlockVector::IsBufferImageGranularityConflictPossible() const
-{
- if(m_BufferImageGranularity == 1)
- {
- return false;
- }
- VmaSuballocationType lastSuballocType = VMA_SUBALLOCATION_TYPE_FREE;
- for(size_t i = 0, count = m_Blocks.size(); i < count; ++i)
- {
- VmaDeviceMemoryBlock* const pBlock = m_Blocks[i];
- VMA_ASSERT(m_Algorithm == 0);
- VmaBlockMetadata_Generic* const pMetadata = (VmaBlockMetadata_Generic*)pBlock->m_pMetadata;
- if(pMetadata->IsBufferImageGranularityConflictPossible(m_BufferImageGranularity, lastSuballocType))
- {
- return true;
- }
- }
- return false;
-}
-
-void VmaBlockVector::MakePoolAllocationsLost(
- uint32_t currentFrameIndex,
- size_t* pLostAllocationCount)
-{
- VmaMutexLockWrite lock(m_Mutex, m_hAllocator->m_UseMutex);
- size_t lostAllocationCount = 0;
- for(uint32_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex)
- {
- VmaDeviceMemoryBlock* const pBlock = m_Blocks[blockIndex];
- VMA_ASSERT(pBlock);
- lostAllocationCount += pBlock->m_pMetadata->MakeAllocationsLost(currentFrameIndex, m_FrameInUseCount);
- }
- if(pLostAllocationCount != VMA_NULL)
- {
- *pLostAllocationCount = lostAllocationCount;
- }
-}
-
-VkResult VmaBlockVector::CheckCorruption()
-{
- if(!IsCorruptionDetectionEnabled())
- {
- return VK_ERROR_FEATURE_NOT_PRESENT;
- }
-
- VmaMutexLockRead lock(m_Mutex, m_hAllocator->m_UseMutex);
- for(uint32_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex)
- {
- VmaDeviceMemoryBlock* const pBlock = m_Blocks[blockIndex];
- VMA_ASSERT(pBlock);
- VkResult res = pBlock->CheckCorruption(m_hAllocator);
- if(res != VK_SUCCESS)
- {
- return res;
- }
- }
- return VK_SUCCESS;
-}
-
-void VmaBlockVector::AddStats(VmaStats* pStats)
-{
- const uint32_t memTypeIndex = m_MemoryTypeIndex;
- const uint32_t memHeapIndex = m_hAllocator->MemoryTypeIndexToHeapIndex(memTypeIndex);
-
- VmaMutexLockRead lock(m_Mutex, m_hAllocator->m_UseMutex);
-
- for(uint32_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex)
- {
- const VmaDeviceMemoryBlock* const pBlock = m_Blocks[blockIndex];
- VMA_ASSERT(pBlock);
- VMA_HEAVY_ASSERT(pBlock->Validate());
- VmaStatInfo allocationStatInfo;
- pBlock->m_pMetadata->CalcAllocationStatInfo(allocationStatInfo);
- VmaAddStatInfo(pStats->total, allocationStatInfo);
- VmaAddStatInfo(pStats->memoryType[memTypeIndex], allocationStatInfo);
- VmaAddStatInfo(pStats->memoryHeap[memHeapIndex], allocationStatInfo);
- }
-}
-
-////////////////////////////////////////////////////////////////////////////////
-// VmaDefragmentationAlgorithm_Generic members definition
-
-VmaDefragmentationAlgorithm_Generic::VmaDefragmentationAlgorithm_Generic(
- VmaAllocator hAllocator,
- VmaBlockVector* pBlockVector,
- uint32_t currentFrameIndex,
- bool overlappingMoveSupported) :
- VmaDefragmentationAlgorithm(hAllocator, pBlockVector, currentFrameIndex),
- m_AllocationCount(0),
- m_AllAllocations(false),
- m_BytesMoved(0),
- m_AllocationsMoved(0),
- m_Blocks(VmaStlAllocator<BlockInfo*>(hAllocator->GetAllocationCallbacks()))
-{
- // Create block info for each block.
- const size_t blockCount = m_pBlockVector->m_Blocks.size();
- for(size_t blockIndex = 0; blockIndex < blockCount; ++blockIndex)
- {
- BlockInfo* pBlockInfo = vma_new(m_hAllocator, BlockInfo)(m_hAllocator->GetAllocationCallbacks());
- pBlockInfo->m_OriginalBlockIndex = blockIndex;
- pBlockInfo->m_pBlock = m_pBlockVector->m_Blocks[blockIndex];
- m_Blocks.push_back(pBlockInfo);
- }
-
- // Sort them by m_pBlock pointer value.
- VMA_SORT(m_Blocks.begin(), m_Blocks.end(), BlockPointerLess());
-}
-
-VmaDefragmentationAlgorithm_Generic::~VmaDefragmentationAlgorithm_Generic()
-{
- for(size_t i = m_Blocks.size(); i--; )
- {
- vma_delete(m_hAllocator, m_Blocks[i]);
- }
-}
-
-void VmaDefragmentationAlgorithm_Generic::AddAllocation(VmaAllocation hAlloc, VkBool32* pChanged)
-{
- // Now as we are inside VmaBlockVector::m_Mutex, we can make final check if this allocation was not lost.
- if(hAlloc->GetLastUseFrameIndex() != VMA_FRAME_INDEX_LOST)
- {
- VmaDeviceMemoryBlock* pBlock = hAlloc->GetBlock();
- BlockInfoVector::iterator it = VmaBinaryFindFirstNotLess(m_Blocks.begin(), m_Blocks.end(), pBlock, BlockPointerLess());
- if(it != m_Blocks.end() && (*it)->m_pBlock == pBlock)
- {
- AllocationInfo allocInfo = AllocationInfo(hAlloc, pChanged);
- (*it)->m_Allocations.push_back(allocInfo);
- }
- else
- {
- VMA_ASSERT(0);
- }
-
- ++m_AllocationCount;
- }
-}
-
-VkResult VmaDefragmentationAlgorithm_Generic::DefragmentRound(
- VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> >& moves,
- VkDeviceSize maxBytesToMove,
- uint32_t maxAllocationsToMove)
-{
- if(m_Blocks.empty())
- {
- return VK_SUCCESS;
- }
-
- // This is a choice based on research.
- // Option 1:
- uint32_t strategy = VMA_ALLOCATION_CREATE_STRATEGY_MIN_TIME_BIT;
- // Option 2:
- //uint32_t strategy = VMA_ALLOCATION_CREATE_STRATEGY_MIN_MEMORY_BIT;
- // Option 3:
- //uint32_t strategy = VMA_ALLOCATION_CREATE_STRATEGY_MIN_FRAGMENTATION_BIT;
-
- size_t srcBlockMinIndex = 0;
- // When FAST_ALGORITHM, move allocations from only last out of blocks that contain non-movable allocations.
- /*
- if(m_AlgorithmFlags & VMA_DEFRAGMENTATION_FAST_ALGORITHM_BIT)
- {
- const size_t blocksWithNonMovableCount = CalcBlocksWithNonMovableCount();
- if(blocksWithNonMovableCount > 0)
- {
- srcBlockMinIndex = blocksWithNonMovableCount - 1;
- }
- }
- */
-
- size_t srcBlockIndex = m_Blocks.size() - 1;
- size_t srcAllocIndex = SIZE_MAX;
- for(;;)
- {
- // 1. Find next allocation to move.
- // 1.1. Start from last to first m_Blocks - they are sorted from most "destination" to most "source".
- // 1.2. Then start from last to first m_Allocations.
- while(srcAllocIndex >= m_Blocks[srcBlockIndex]->m_Allocations.size())
- {
- if(m_Blocks[srcBlockIndex]->m_Allocations.empty())
- {
- // Finished: no more allocations to process.
- if(srcBlockIndex == srcBlockMinIndex)
- {
- return VK_SUCCESS;
- }
- else
- {
- --srcBlockIndex;
- srcAllocIndex = SIZE_MAX;
- }
- }
- else
- {
- srcAllocIndex = m_Blocks[srcBlockIndex]->m_Allocations.size() - 1;
- }
- }
-
- BlockInfo* pSrcBlockInfo = m_Blocks[srcBlockIndex];
- AllocationInfo& allocInfo = pSrcBlockInfo->m_Allocations[srcAllocIndex];
-
- const VkDeviceSize size = allocInfo.m_hAllocation->GetSize();
- const VkDeviceSize srcOffset = allocInfo.m_hAllocation->GetOffset();
- const VkDeviceSize alignment = allocInfo.m_hAllocation->GetAlignment();
- const VmaSuballocationType suballocType = allocInfo.m_hAllocation->GetSuballocationType();
-
- // 2. Try to find new place for this allocation in preceding or current block.
- for(size_t dstBlockIndex = 0; dstBlockIndex <= srcBlockIndex; ++dstBlockIndex)
- {
- BlockInfo* pDstBlockInfo = m_Blocks[dstBlockIndex];
- VmaAllocationRequest dstAllocRequest;
- if(pDstBlockInfo->m_pBlock->m_pMetadata->CreateAllocationRequest(
- m_CurrentFrameIndex,
- m_pBlockVector->GetFrameInUseCount(),
- m_pBlockVector->GetBufferImageGranularity(),
- size,
- alignment,
- false, // upperAddress
- suballocType,
- false, // canMakeOtherLost
- strategy,
- &dstAllocRequest) &&
- MoveMakesSense(
- dstBlockIndex, dstAllocRequest.offset, srcBlockIndex, srcOffset))
- {
- VMA_ASSERT(dstAllocRequest.itemsToMakeLostCount == 0);
-
- // Reached limit on number of allocations or bytes to move.
- if((m_AllocationsMoved + 1 > maxAllocationsToMove) ||
- (m_BytesMoved + size > maxBytesToMove))
- {
- return VK_SUCCESS;
- }
-
- VmaDefragmentationMove move;
- move.srcBlockIndex = pSrcBlockInfo->m_OriginalBlockIndex;
- move.dstBlockIndex = pDstBlockInfo->m_OriginalBlockIndex;
- move.srcOffset = srcOffset;
- move.dstOffset = dstAllocRequest.offset;
- move.size = size;
- moves.push_back(move);
-
- pDstBlockInfo->m_pBlock->m_pMetadata->Alloc(
- dstAllocRequest,
- suballocType,
- size,
- false, // upperAddress
- allocInfo.m_hAllocation);
- pSrcBlockInfo->m_pBlock->m_pMetadata->FreeAtOffset(srcOffset);
-
- allocInfo.m_hAllocation->ChangeBlockAllocation(m_hAllocator, pDstBlockInfo->m_pBlock, dstAllocRequest.offset);
-
- if(allocInfo.m_pChanged != VMA_NULL)
- {
- *allocInfo.m_pChanged = VK_TRUE;
- }
-
- ++m_AllocationsMoved;
- m_BytesMoved += size;
-
- VmaVectorRemove(pSrcBlockInfo->m_Allocations, srcAllocIndex);
-
- break;
- }
- }
-
- // If not processed, this allocInfo remains in pBlockInfo->m_Allocations for next round.
-
- if(srcAllocIndex > 0)
- {
- --srcAllocIndex;
- }
- else
- {
- if(srcBlockIndex > 0)
- {
- --srcBlockIndex;
- srcAllocIndex = SIZE_MAX;
- }
- else
- {
- return VK_SUCCESS;
- }
- }
- }
-}
-
-size_t VmaDefragmentationAlgorithm_Generic::CalcBlocksWithNonMovableCount() const
-{
- size_t result = 0;
- for(size_t i = 0; i < m_Blocks.size(); ++i)
- {
- if(m_Blocks[i]->m_HasNonMovableAllocations)
- {
- ++result;
- }
- }
- return result;
-}
-
-VkResult VmaDefragmentationAlgorithm_Generic::Defragment(
- VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> >& moves,
- VkDeviceSize maxBytesToMove,
- uint32_t maxAllocationsToMove)
-{
- if(!m_AllAllocations && m_AllocationCount == 0)
- {
- return VK_SUCCESS;
- }
-
- const size_t blockCount = m_Blocks.size();
- for(size_t blockIndex = 0; blockIndex < blockCount; ++blockIndex)
- {
- BlockInfo* pBlockInfo = m_Blocks[blockIndex];
-
- if(m_AllAllocations)
- {
- VmaBlockMetadata_Generic* pMetadata = (VmaBlockMetadata_Generic*)pBlockInfo->m_pBlock->m_pMetadata;
- for(VmaSuballocationList::const_iterator it = pMetadata->m_Suballocations.begin();
- it != pMetadata->m_Suballocations.end();
- ++it)
- {
- if(it->type != VMA_SUBALLOCATION_TYPE_FREE)
- {
- AllocationInfo allocInfo = AllocationInfo(it->hAllocation, VMA_NULL);
- pBlockInfo->m_Allocations.push_back(allocInfo);
- }
- }
- }
-
- pBlockInfo->CalcHasNonMovableAllocations();
-
- // This is a choice based on research.
- // Option 1:
- pBlockInfo->SortAllocationsByOffsetDescending();
- // Option 2:
- //pBlockInfo->SortAllocationsBySizeDescending();
- }
-
- // Sort m_Blocks this time by the main criterium, from most "destination" to most "source" blocks.
- VMA_SORT(m_Blocks.begin(), m_Blocks.end(), BlockInfoCompareMoveDestination());
-
- // This is a choice based on research.
- const uint32_t roundCount = 2;
-
- // Execute defragmentation rounds (the main part).
- VkResult result = VK_SUCCESS;
- for(uint32_t round = 0; (round < roundCount) && (result == VK_SUCCESS); ++round)
- {
- result = DefragmentRound(moves, maxBytesToMove, maxAllocationsToMove);
- }
-
- return result;
-}
-
-bool VmaDefragmentationAlgorithm_Generic::MoveMakesSense(
- size_t dstBlockIndex, VkDeviceSize dstOffset,
- size_t srcBlockIndex, VkDeviceSize srcOffset)
-{
- if(dstBlockIndex < srcBlockIndex)
- {
- return true;
- }
- if(dstBlockIndex > srcBlockIndex)
- {
- return false;
- }
- if(dstOffset < srcOffset)
- {
- return true;
- }
- return false;
-}
-
-////////////////////////////////////////////////////////////////////////////////
-// VmaDefragmentationAlgorithm_Fast
-
-VmaDefragmentationAlgorithm_Fast::VmaDefragmentationAlgorithm_Fast(
- VmaAllocator hAllocator,
- VmaBlockVector* pBlockVector,
- uint32_t currentFrameIndex,
- bool overlappingMoveSupported) :
- VmaDefragmentationAlgorithm(hAllocator, pBlockVector, currentFrameIndex),
- m_OverlappingMoveSupported(overlappingMoveSupported),
- m_AllocationCount(0),
- m_AllAllocations(false),
- m_BytesMoved(0),
- m_AllocationsMoved(0),
- m_BlockInfos(VmaStlAllocator<BlockInfo>(hAllocator->GetAllocationCallbacks()))
-{
- VMA_ASSERT(VMA_DEBUG_MARGIN == 0);
-
-}
-
-VmaDefragmentationAlgorithm_Fast::~VmaDefragmentationAlgorithm_Fast()
-{
-}
-
-VkResult VmaDefragmentationAlgorithm_Fast::Defragment(
- VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> >& moves,
- VkDeviceSize maxBytesToMove,
- uint32_t maxAllocationsToMove)
-{
- VMA_ASSERT(m_AllAllocations || m_pBlockVector->CalcAllocationCount() == m_AllocationCount);
-
- const size_t blockCount = m_pBlockVector->GetBlockCount();
- if(blockCount == 0 || maxBytesToMove == 0 || maxAllocationsToMove == 0)
- {
- return VK_SUCCESS;
- }
-
- PreprocessMetadata();
-
- // Sort blocks in order from most destination.
-
- m_BlockInfos.resize(blockCount);
- for(size_t i = 0; i < blockCount; ++i)
- {
- m_BlockInfos[i].origBlockIndex = i;
- }
-
- VMA_SORT(m_BlockInfos.begin(), m_BlockInfos.end(), [this](const BlockInfo& lhs, const BlockInfo& rhs) -> bool {
- return m_pBlockVector->GetBlock(lhs.origBlockIndex)->m_pMetadata->GetSumFreeSize() <
- m_pBlockVector->GetBlock(rhs.origBlockIndex)->m_pMetadata->GetSumFreeSize();
- });
-
- // THE MAIN ALGORITHM
-
- FreeSpaceDatabase freeSpaceDb;
-
- size_t dstBlockInfoIndex = 0;
- size_t dstOrigBlockIndex = m_BlockInfos[dstBlockInfoIndex].origBlockIndex;
- VmaDeviceMemoryBlock* pDstBlock = m_pBlockVector->GetBlock(dstOrigBlockIndex);
- VmaBlockMetadata_Generic* pDstMetadata = (VmaBlockMetadata_Generic*)pDstBlock->m_pMetadata;
- VkDeviceSize dstBlockSize = pDstMetadata->GetSize();
- VkDeviceSize dstOffset = 0;
-
- bool end = false;
- for(size_t srcBlockInfoIndex = 0; !end && srcBlockInfoIndex < blockCount; ++srcBlockInfoIndex)
- {
- const size_t srcOrigBlockIndex = m_BlockInfos[srcBlockInfoIndex].origBlockIndex;
- VmaDeviceMemoryBlock* const pSrcBlock = m_pBlockVector->GetBlock(srcOrigBlockIndex);
- VmaBlockMetadata_Generic* const pSrcMetadata = (VmaBlockMetadata_Generic*)pSrcBlock->m_pMetadata;
- for(VmaSuballocationList::iterator srcSuballocIt = pSrcMetadata->m_Suballocations.begin();
- !end && srcSuballocIt != pSrcMetadata->m_Suballocations.end(); )
- {
- VmaAllocation_T* const pAlloc = srcSuballocIt->hAllocation;
- const VkDeviceSize srcAllocAlignment = pAlloc->GetAlignment();
- const VkDeviceSize srcAllocSize = srcSuballocIt->size;
- if(m_AllocationsMoved == maxAllocationsToMove ||
- m_BytesMoved + srcAllocSize > maxBytesToMove)
- {
- end = true;
- break;
- }
- const VkDeviceSize srcAllocOffset = srcSuballocIt->offset;
-
- // Try to place it in one of free spaces from the database.
- size_t freeSpaceInfoIndex;
- VkDeviceSize dstAllocOffset;
- if(freeSpaceDb.Fetch(srcAllocAlignment, srcAllocSize,
- freeSpaceInfoIndex, dstAllocOffset))
- {
- size_t freeSpaceOrigBlockIndex = m_BlockInfos[freeSpaceInfoIndex].origBlockIndex;
- VmaDeviceMemoryBlock* pFreeSpaceBlock = m_pBlockVector->GetBlock(freeSpaceOrigBlockIndex);
- VmaBlockMetadata_Generic* pFreeSpaceMetadata = (VmaBlockMetadata_Generic*)pFreeSpaceBlock->m_pMetadata;
- VkDeviceSize freeSpaceBlockSize = pFreeSpaceMetadata->GetSize();
-
- // Same block
- if(freeSpaceInfoIndex == srcBlockInfoIndex)
- {
- VMA_ASSERT(dstAllocOffset <= srcAllocOffset);
-
- // MOVE OPTION 1: Move the allocation inside the same block by decreasing offset.
-
- VmaSuballocation suballoc = *srcSuballocIt;
- suballoc.offset = dstAllocOffset;
- suballoc.hAllocation->ChangeOffset(dstAllocOffset);
- m_BytesMoved += srcAllocSize;
- ++m_AllocationsMoved;
-
- VmaSuballocationList::iterator nextSuballocIt = srcSuballocIt;
- ++nextSuballocIt;
- pSrcMetadata->m_Suballocations.erase(srcSuballocIt);
- srcSuballocIt = nextSuballocIt;
-
- InsertSuballoc(pFreeSpaceMetadata, suballoc);
-
- VmaDefragmentationMove move = {
- srcOrigBlockIndex, freeSpaceOrigBlockIndex,
- srcAllocOffset, dstAllocOffset,
- srcAllocSize };
- moves.push_back(move);
- }
- // Different block
- else
- {
- // MOVE OPTION 2: Move the allocation to a different block.
-
- VMA_ASSERT(freeSpaceInfoIndex < srcBlockInfoIndex);
-
- VmaSuballocation suballoc = *srcSuballocIt;
- suballoc.offset = dstAllocOffset;
- suballoc.hAllocation->ChangeBlockAllocation(m_hAllocator, pFreeSpaceBlock, dstAllocOffset);
- m_BytesMoved += srcAllocSize;
- ++m_AllocationsMoved;
-
- VmaSuballocationList::iterator nextSuballocIt = srcSuballocIt;
- ++nextSuballocIt;
- pSrcMetadata->m_Suballocations.erase(srcSuballocIt);
- srcSuballocIt = nextSuballocIt;
-
- InsertSuballoc(pFreeSpaceMetadata, suballoc);
-
- VmaDefragmentationMove move = {
- srcOrigBlockIndex, freeSpaceOrigBlockIndex,
- srcAllocOffset, dstAllocOffset,
- srcAllocSize };
- moves.push_back(move);
- }
- }
- else
- {
- dstAllocOffset = VmaAlignUp(dstOffset, srcAllocAlignment);
-
- // If the allocation doesn't fit before the end of dstBlock, forward to next block.
- while(dstBlockInfoIndex < srcBlockInfoIndex &&
- dstAllocOffset + srcAllocSize > dstBlockSize)
- {
- // But before that, register remaining free space at the end of dst block.
- freeSpaceDb.Register(dstBlockInfoIndex, dstOffset, dstBlockSize - dstOffset);
-
- ++dstBlockInfoIndex;
- dstOrigBlockIndex = m_BlockInfos[dstBlockInfoIndex].origBlockIndex;
- pDstBlock = m_pBlockVector->GetBlock(dstOrigBlockIndex);
- pDstMetadata = (VmaBlockMetadata_Generic*)pDstBlock->m_pMetadata;
- dstBlockSize = pDstMetadata->GetSize();
- dstOffset = 0;
- dstAllocOffset = 0;
- }
-
- // Same block
- if(dstBlockInfoIndex == srcBlockInfoIndex)
- {
- VMA_ASSERT(dstAllocOffset <= srcAllocOffset);
-
- const bool overlap = dstAllocOffset + srcAllocSize > srcAllocOffset;
-
- bool skipOver = overlap;
- if(overlap && m_OverlappingMoveSupported && dstAllocOffset < srcAllocOffset)
- {
- // If destination and source place overlap, skip if it would move it
- // by only < 1/64 of its size.
- skipOver = (srcAllocOffset - dstAllocOffset) * 64 < srcAllocSize;
- }
-
- if(skipOver)
- {
- freeSpaceDb.Register(dstBlockInfoIndex, dstOffset, srcAllocOffset - dstOffset);
-
- dstOffset = srcAllocOffset + srcAllocSize;
- ++srcSuballocIt;
- }
- // MOVE OPTION 1: Move the allocation inside the same block by decreasing offset.
- else
- {
- srcSuballocIt->offset = dstAllocOffset;
- srcSuballocIt->hAllocation->ChangeOffset(dstAllocOffset);
- dstOffset = dstAllocOffset + srcAllocSize;
- m_BytesMoved += srcAllocSize;
- ++m_AllocationsMoved;
- ++srcSuballocIt;
- VmaDefragmentationMove move = {
- srcOrigBlockIndex, dstOrigBlockIndex,
- srcAllocOffset, dstAllocOffset,
- srcAllocSize };
- moves.push_back(move);
- }
- }
- // Different block
- else
- {
- // MOVE OPTION 2: Move the allocation to a different block.
-
- VMA_ASSERT(dstBlockInfoIndex < srcBlockInfoIndex);
- VMA_ASSERT(dstAllocOffset + srcAllocSize <= dstBlockSize);
-
- VmaSuballocation suballoc = *srcSuballocIt;
- suballoc.offset = dstAllocOffset;
- suballoc.hAllocation->ChangeBlockAllocation(m_hAllocator, pDstBlock, dstAllocOffset);
- dstOffset = dstAllocOffset + srcAllocSize;
- m_BytesMoved += srcAllocSize;
- ++m_AllocationsMoved;
-
- VmaSuballocationList::iterator nextSuballocIt = srcSuballocIt;
- ++nextSuballocIt;
- pSrcMetadata->m_Suballocations.erase(srcSuballocIt);
- srcSuballocIt = nextSuballocIt;
-
- pDstMetadata->m_Suballocations.push_back(suballoc);
-
- VmaDefragmentationMove move = {
- srcOrigBlockIndex, dstOrigBlockIndex,
- srcAllocOffset, dstAllocOffset,
- srcAllocSize };
- moves.push_back(move);
- }
- }
- }
- }
-
- m_BlockInfos.clear();
-
- PostprocessMetadata();
-
- return VK_SUCCESS;
-}
-
-void VmaDefragmentationAlgorithm_Fast::PreprocessMetadata()
-{
- const size_t blockCount = m_pBlockVector->GetBlockCount();
- for(size_t blockIndex = 0; blockIndex < blockCount; ++blockIndex)
- {
- VmaBlockMetadata_Generic* const pMetadata =
- (VmaBlockMetadata_Generic*)m_pBlockVector->GetBlock(blockIndex)->m_pMetadata;
- pMetadata->m_FreeCount = 0;
- pMetadata->m_SumFreeSize = pMetadata->GetSize();
- pMetadata->m_FreeSuballocationsBySize.clear();
- for(VmaSuballocationList::iterator it = pMetadata->m_Suballocations.begin();
- it != pMetadata->m_Suballocations.end(); )
- {
- if(it->type == VMA_SUBALLOCATION_TYPE_FREE)
- {
- VmaSuballocationList::iterator nextIt = it;
- ++nextIt;
- pMetadata->m_Suballocations.erase(it);
- it = nextIt;
- }
- else
- {
- ++it;
- }
- }
- }
-}
-
-void VmaDefragmentationAlgorithm_Fast::PostprocessMetadata()
-{
- const size_t blockCount = m_pBlockVector->GetBlockCount();
- for(size_t blockIndex = 0; blockIndex < blockCount; ++blockIndex)
- {
- VmaBlockMetadata_Generic* const pMetadata =
- (VmaBlockMetadata_Generic*)m_pBlockVector->GetBlock(blockIndex)->m_pMetadata;
- const VkDeviceSize blockSize = pMetadata->GetSize();
-
- // No allocations in this block - entire area is free.
- if(pMetadata->m_Suballocations.empty())
- {
- pMetadata->m_FreeCount = 1;
- //pMetadata->m_SumFreeSize is already set to blockSize.
- VmaSuballocation suballoc = {
- 0, // offset
- blockSize, // size
- VMA_NULL, // hAllocation
- VMA_SUBALLOCATION_TYPE_FREE };
- pMetadata->m_Suballocations.push_back(suballoc);
- pMetadata->RegisterFreeSuballocation(pMetadata->m_Suballocations.begin());
- }
- // There are some allocations in this block.
- else
- {
- VkDeviceSize offset = 0;
- VmaSuballocationList::iterator it;
- for(it = pMetadata->m_Suballocations.begin();
- it != pMetadata->m_Suballocations.end();
- ++it)
- {
- VMA_ASSERT(it->type != VMA_SUBALLOCATION_TYPE_FREE);
- VMA_ASSERT(it->offset >= offset);
-
- // Need to insert preceding free space.
- if(it->offset > offset)
- {
- ++pMetadata->m_FreeCount;
- const VkDeviceSize freeSize = it->offset - offset;
- VmaSuballocation suballoc = {
- offset, // offset
- freeSize, // size
- VMA_NULL, // hAllocation
- VMA_SUBALLOCATION_TYPE_FREE };
- VmaSuballocationList::iterator precedingFreeIt = pMetadata->m_Suballocations.insert(it, suballoc);
- if(freeSize >= VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
- {
- pMetadata->m_FreeSuballocationsBySize.push_back(precedingFreeIt);
- }
- }
-
- pMetadata->m_SumFreeSize -= it->size;
- offset = it->offset + it->size;
- }
-
- // Need to insert trailing free space.
- if(offset < blockSize)
- {
- ++pMetadata->m_FreeCount;
- const VkDeviceSize freeSize = blockSize - offset;
- VmaSuballocation suballoc = {
- offset, // offset
- freeSize, // size
- VMA_NULL, // hAllocation
- VMA_SUBALLOCATION_TYPE_FREE };
- VMA_ASSERT(it == pMetadata->m_Suballocations.end());
- VmaSuballocationList::iterator trailingFreeIt = pMetadata->m_Suballocations.insert(it, suballoc);
- if(freeSize > VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
- {
- pMetadata->m_FreeSuballocationsBySize.push_back(trailingFreeIt);
- }
- }
-
- VMA_SORT(
- pMetadata->m_FreeSuballocationsBySize.begin(),
- pMetadata->m_FreeSuballocationsBySize.end(),
- VmaSuballocationItemSizeLess());
- }
-
- VMA_HEAVY_ASSERT(pMetadata->Validate());
- }
-}
-
-void VmaDefragmentationAlgorithm_Fast::InsertSuballoc(VmaBlockMetadata_Generic* pMetadata, const VmaSuballocation& suballoc)
-{
- // TODO: Optimize somehow. Remember iterator instead of searching for it linearly.
- VmaSuballocationList::iterator it = pMetadata->m_Suballocations.begin();
- while(it != pMetadata->m_Suballocations.end())
- {
- if(it->offset < suballoc.offset)
- {
- ++it;
- }
- }
- pMetadata->m_Suballocations.insert(it, suballoc);
-}
-
-////////////////////////////////////////////////////////////////////////////////
-// VmaBlockVectorDefragmentationContext
-
-VmaBlockVectorDefragmentationContext::VmaBlockVectorDefragmentationContext(
- VmaAllocator hAllocator,
- VmaPool hCustomPool,
- VmaBlockVector* pBlockVector,
- uint32_t currFrameIndex,
- uint32_t algorithmFlags) :
- res(VK_SUCCESS),
- mutexLocked(false),
- blockContexts(VmaStlAllocator<VmaBlockDefragmentationContext>(hAllocator->GetAllocationCallbacks())),
- m_hAllocator(hAllocator),
- m_hCustomPool(hCustomPool),
- m_pBlockVector(pBlockVector),
- m_CurrFrameIndex(currFrameIndex),
- //m_AlgorithmFlags(algorithmFlags),
- m_pAlgorithm(VMA_NULL),
- m_Allocations(VmaStlAllocator<AllocInfo>(hAllocator->GetAllocationCallbacks())),
- m_AllAllocations(false)
-{
-}
-
-VmaBlockVectorDefragmentationContext::~VmaBlockVectorDefragmentationContext()
-{
- vma_delete(m_hAllocator, m_pAlgorithm);
-}
-
-void VmaBlockVectorDefragmentationContext::AddAllocation(VmaAllocation hAlloc, VkBool32* pChanged)
-{
- AllocInfo info = { hAlloc, pChanged };
- m_Allocations.push_back(info);
-}
-
-void VmaBlockVectorDefragmentationContext::Begin(bool overlappingMoveSupported)
-{
- const bool allAllocations = m_AllAllocations ||
- m_Allocations.size() == m_pBlockVector->CalcAllocationCount();
-
- /********************************
- HERE IS THE CHOICE OF DEFRAGMENTATION ALGORITHM.
- ********************************/
-
- /*
- Fast algorithm is supported only when certain criteria are met:
- - VMA_DEBUG_MARGIN is 0.
- - All allocations in this block vector are moveable.
- - There is no possibility of image/buffer granularity conflict.
- */
- if(VMA_DEBUG_MARGIN == 0 &&
- allAllocations &&
- !m_pBlockVector->IsBufferImageGranularityConflictPossible())
- {
- m_pAlgorithm = vma_new(m_hAllocator, VmaDefragmentationAlgorithm_Fast)(
- m_hAllocator, m_pBlockVector, m_CurrFrameIndex, overlappingMoveSupported);
- }
- else
- {
- m_pAlgorithm = vma_new(m_hAllocator, VmaDefragmentationAlgorithm_Generic)(
- m_hAllocator, m_pBlockVector, m_CurrFrameIndex, overlappingMoveSupported);
- }
-
- if(allAllocations)
- {
- m_pAlgorithm->AddAll();
- }
- else
- {
- for(size_t i = 0, count = m_Allocations.size(); i < count; ++i)
- {
- m_pAlgorithm->AddAllocation(m_Allocations[i].hAlloc, m_Allocations[i].pChanged);
- }
- }
-}
-
-////////////////////////////////////////////////////////////////////////////////
-// VmaDefragmentationContext
-
-VmaDefragmentationContext_T::VmaDefragmentationContext_T(
- VmaAllocator hAllocator,
- uint32_t currFrameIndex,
- uint32_t flags,
- VmaDefragmentationStats* pStats) :
- m_hAllocator(hAllocator),
- m_CurrFrameIndex(currFrameIndex),
- m_Flags(flags),
- m_pStats(pStats),
- m_CustomPoolContexts(VmaStlAllocator<VmaBlockVectorDefragmentationContext*>(hAllocator->GetAllocationCallbacks()))
-{
- memset(m_DefaultPoolContexts, 0, sizeof(m_DefaultPoolContexts));
-}
-
-VmaDefragmentationContext_T::~VmaDefragmentationContext_T()
-{
- for(size_t i = m_CustomPoolContexts.size(); i--; )
- {
- VmaBlockVectorDefragmentationContext* pBlockVectorCtx = m_CustomPoolContexts[i];
- pBlockVectorCtx->GetBlockVector()->DefragmentationEnd(pBlockVectorCtx, m_pStats);
- vma_delete(m_hAllocator, pBlockVectorCtx);
- }
- for(size_t i = m_hAllocator->m_MemProps.memoryTypeCount; i--; )
- {
- VmaBlockVectorDefragmentationContext* pBlockVectorCtx = m_DefaultPoolContexts[i];
- if(pBlockVectorCtx)
- {
- pBlockVectorCtx->GetBlockVector()->DefragmentationEnd(pBlockVectorCtx, m_pStats);
- vma_delete(m_hAllocator, pBlockVectorCtx);
- }
- }
-}
-
-void VmaDefragmentationContext_T::AddPools(uint32_t poolCount, VmaPool* pPools)
-{
- for(uint32_t poolIndex = 0; poolIndex < poolCount; ++poolIndex)
- {
- VmaPool pool = pPools[poolIndex];
- VMA_ASSERT(pool);
- // Pools with algorithm other than default are not defragmented.
- if(pool->m_BlockVector.GetAlgorithm() == 0)
- {
- VmaBlockVectorDefragmentationContext* pBlockVectorDefragCtx = VMA_NULL;
-
- for(size_t i = m_CustomPoolContexts.size(); i--; )
- {
- if(m_CustomPoolContexts[i]->GetCustomPool() == pool)
- {
- pBlockVectorDefragCtx = m_CustomPoolContexts[i];
- break;
- }
- }
-
- if(!pBlockVectorDefragCtx)
- {
- pBlockVectorDefragCtx = vma_new(m_hAllocator, VmaBlockVectorDefragmentationContext)(
- m_hAllocator,
- pool,
- &pool->m_BlockVector,
- m_CurrFrameIndex,
- m_Flags);
- m_CustomPoolContexts.push_back(pBlockVectorDefragCtx);
- }
-
- pBlockVectorDefragCtx->AddAll();
- }
- }
-}
-
-void VmaDefragmentationContext_T::AddAllocations(
- uint32_t allocationCount,
- VmaAllocation* pAllocations,
- VkBool32* pAllocationsChanged)
-{
- // Dispatch pAllocations among defragmentators. Create them when necessary.
- for(uint32_t allocIndex = 0; allocIndex < allocationCount; ++allocIndex)
- {
- const VmaAllocation hAlloc = pAllocations[allocIndex];
- VMA_ASSERT(hAlloc);
- // DedicatedAlloc cannot be defragmented.
- if((hAlloc->GetType() == VmaAllocation_T::ALLOCATION_TYPE_BLOCK) &&
- // Lost allocation cannot be defragmented.
- (hAlloc->GetLastUseFrameIndex() != VMA_FRAME_INDEX_LOST))
- {
- VmaBlockVectorDefragmentationContext* pBlockVectorDefragCtx = VMA_NULL;
-
- const VmaPool hAllocPool = hAlloc->GetPool();
- // This allocation belongs to custom pool.
- if(hAllocPool != VK_NULL_HANDLE)
- {
- // Pools with algorithm other than default are not defragmented.
- if(hAllocPool->m_BlockVector.GetAlgorithm() == 0)
- {
- for(size_t i = m_CustomPoolContexts.size(); i--; )
- {
- if(m_CustomPoolContexts[i]->GetCustomPool() == hAllocPool)
- {
- pBlockVectorDefragCtx = m_CustomPoolContexts[i];
- break;
- }
- }
- if(!pBlockVectorDefragCtx)
- {
- pBlockVectorDefragCtx = vma_new(m_hAllocator, VmaBlockVectorDefragmentationContext)(
- m_hAllocator,
- hAllocPool,
- &hAllocPool->m_BlockVector,
- m_CurrFrameIndex,
- m_Flags);
- m_CustomPoolContexts.push_back(pBlockVectorDefragCtx);
- }
- }
- }
- // This allocation belongs to default pool.
- else
- {
- const uint32_t memTypeIndex = hAlloc->GetMemoryTypeIndex();
- pBlockVectorDefragCtx = m_DefaultPoolContexts[memTypeIndex];
- if(!pBlockVectorDefragCtx)
- {
- pBlockVectorDefragCtx = vma_new(m_hAllocator, VmaBlockVectorDefragmentationContext)(
- m_hAllocator,
- VMA_NULL, // hCustomPool
- m_hAllocator->m_pBlockVectors[memTypeIndex],
- m_CurrFrameIndex,
- m_Flags);
- m_DefaultPoolContexts[memTypeIndex] = pBlockVectorDefragCtx;
- }
- }
-
- if(pBlockVectorDefragCtx)
- {
- VkBool32* const pChanged = (pAllocationsChanged != VMA_NULL) ?
- &pAllocationsChanged[allocIndex] : VMA_NULL;
- pBlockVectorDefragCtx->AddAllocation(hAlloc, pChanged);
- }
- }
- }
-}
-
-VkResult VmaDefragmentationContext_T::Defragment(
- VkDeviceSize maxCpuBytesToMove, uint32_t maxCpuAllocationsToMove,
- VkDeviceSize maxGpuBytesToMove, uint32_t maxGpuAllocationsToMove,
- VkCommandBuffer commandBuffer, VmaDefragmentationStats* pStats)
-{
- if(pStats)
- {
- memset(pStats, 0, sizeof(VmaDefragmentationStats));
- }
-
- if(commandBuffer == VK_NULL_HANDLE)
- {
- maxGpuBytesToMove = 0;
- maxGpuAllocationsToMove = 0;
- }
-
- VkResult res = VK_SUCCESS;
-
- // Process default pools.
- for(uint32_t memTypeIndex = 0;
- memTypeIndex < m_hAllocator->GetMemoryTypeCount() && res >= VK_SUCCESS;
- ++memTypeIndex)
- {
- VmaBlockVectorDefragmentationContext* pBlockVectorCtx = m_DefaultPoolContexts[memTypeIndex];
- if(pBlockVectorCtx)
- {
- VMA_ASSERT(pBlockVectorCtx->GetBlockVector());
- pBlockVectorCtx->GetBlockVector()->Defragment(
- pBlockVectorCtx,
- pStats,
- maxCpuBytesToMove, maxCpuAllocationsToMove,
- maxGpuBytesToMove, maxGpuAllocationsToMove,
- commandBuffer);
- if(pBlockVectorCtx->res != VK_SUCCESS)
- {
- res = pBlockVectorCtx->res;
- }
- }
- }
-
- // Process custom pools.
- for(size_t customCtxIndex = 0, customCtxCount = m_CustomPoolContexts.size();
- customCtxIndex < customCtxCount && res >= VK_SUCCESS;
- ++customCtxIndex)
- {
- VmaBlockVectorDefragmentationContext* pBlockVectorCtx = m_CustomPoolContexts[customCtxIndex];
- VMA_ASSERT(pBlockVectorCtx && pBlockVectorCtx->GetBlockVector());
- pBlockVectorCtx->GetBlockVector()->Defragment(
- pBlockVectorCtx,
- pStats,
- maxCpuBytesToMove, maxCpuAllocationsToMove,
- maxGpuBytesToMove, maxGpuAllocationsToMove,
- commandBuffer);
- if(pBlockVectorCtx->res != VK_SUCCESS)
- {
- res = pBlockVectorCtx->res;
- }
- }
-
- return res;
-}
-
-////////////////////////////////////////////////////////////////////////////////
-// VmaRecorder
-
-#if VMA_RECORDING_ENABLED
-
-VmaRecorder::VmaRecorder() :
- m_UseMutex(true),
- m_Flags(0),
- m_File(VMA_NULL),
- m_Freq(INT64_MAX),
- m_StartCounter(INT64_MAX)
-{
-}
-
-VkResult VmaRecorder::Init(const VmaRecordSettings& settings, bool useMutex)
-{
- m_UseMutex = useMutex;
- m_Flags = settings.flags;
-
- QueryPerformanceFrequency((LARGE_INTEGER*)&m_Freq);
- QueryPerformanceCounter((LARGE_INTEGER*)&m_StartCounter);
-
- // Open file for writing.
- errno_t err = fopen_s(&m_File, settings.pFilePath, "wb");
- if(err != 0)
- {
- return VK_ERROR_INITIALIZATION_FAILED;
- }
-
- // Write header.
- fprintf(m_File, "%s\n", "Vulkan Memory Allocator,Calls recording");
- fprintf(m_File, "%s\n", "1,5");
-
- return VK_SUCCESS;
-}
-
-VmaRecorder::~VmaRecorder()
-{
- if(m_File != VMA_NULL)
- {
- fclose(m_File);
- }
-}
-
-void VmaRecorder::RecordCreateAllocator(uint32_t frameIndex)
-{
- CallParams callParams;
- GetBasicParams(callParams);
-
- VmaMutexLock lock(m_FileMutex, m_UseMutex);
- fprintf(m_File, "%u,%.3f,%u,vmaCreateAllocator\n", callParams.threadId, callParams.time, frameIndex);
- Flush();
-}
-
-void VmaRecorder::RecordDestroyAllocator(uint32_t frameIndex)
-{
- CallParams callParams;
- GetBasicParams(callParams);
-
- VmaMutexLock lock(m_FileMutex, m_UseMutex);
- fprintf(m_File, "%u,%.3f,%u,vmaDestroyAllocator\n", callParams.threadId, callParams.time, frameIndex);
- Flush();
-}
-
-void VmaRecorder::RecordCreatePool(uint32_t frameIndex, const VmaPoolCreateInfo& createInfo, VmaPool pool)
-{
- CallParams callParams;
- GetBasicParams(callParams);
-
- VmaMutexLock lock(m_FileMutex, m_UseMutex);
- fprintf(m_File, "%u,%.3f,%u,vmaCreatePool,%u,%u,%llu,%llu,%llu,%u,%p\n", callParams.threadId, callParams.time, frameIndex,
- createInfo.memoryTypeIndex,
- createInfo.flags,
- createInfo.blockSize,
- (uint64_t)createInfo.minBlockCount,
- (uint64_t)createInfo.maxBlockCount,
- createInfo.frameInUseCount,
- pool);
- Flush();
-}
-
-void VmaRecorder::RecordDestroyPool(uint32_t frameIndex, VmaPool pool)
-{
- CallParams callParams;
- GetBasicParams(callParams);
-
- VmaMutexLock lock(m_FileMutex, m_UseMutex);
- fprintf(m_File, "%u,%.3f,%u,vmaDestroyPool,%p\n", callParams.threadId, callParams.time, frameIndex,
- pool);
- Flush();
-}
-
-void VmaRecorder::RecordAllocateMemory(uint32_t frameIndex,
- const VkMemoryRequirements& vkMemReq,
- const VmaAllocationCreateInfo& createInfo,
- VmaAllocation allocation)
-{
- CallParams callParams;
- GetBasicParams(callParams);
-
- VmaMutexLock lock(m_FileMutex, m_UseMutex);
- UserDataString userDataStr(createInfo.flags, createInfo.pUserData);
- fprintf(m_File, "%u,%.3f,%u,vmaAllocateMemory,%llu,%llu,%u,%u,%u,%u,%u,%u,%p,%p,%s\n", callParams.threadId, callParams.time, frameIndex,
- vkMemReq.size,
- vkMemReq.alignment,
- vkMemReq.memoryTypeBits,
- createInfo.flags,
- createInfo.usage,
- createInfo.requiredFlags,
- createInfo.preferredFlags,
- createInfo.memoryTypeBits,
- createInfo.pool,
- allocation,
- userDataStr.GetString());
- Flush();
-}
-
-void VmaRecorder::RecordAllocateMemoryPages(uint32_t frameIndex,
- const VkMemoryRequirements& vkMemReq,
- const VmaAllocationCreateInfo& createInfo,
- uint64_t allocationCount,
- const VmaAllocation* pAllocations)
-{
- CallParams callParams;
- GetBasicParams(callParams);
-
- VmaMutexLock lock(m_FileMutex, m_UseMutex);
- UserDataString userDataStr(createInfo.flags, createInfo.pUserData);
- fprintf(m_File, "%u,%.3f,%u,vmaAllocateMemoryPages,%llu,%llu,%u,%u,%u,%u,%u,%u,%p,", callParams.threadId, callParams.time, frameIndex,
- vkMemReq.size,
- vkMemReq.alignment,
- vkMemReq.memoryTypeBits,
- createInfo.flags,
- createInfo.usage,
- createInfo.requiredFlags,
- createInfo.preferredFlags,
- createInfo.memoryTypeBits,
- createInfo.pool);
- PrintPointerList(allocationCount, pAllocations);
- fprintf(m_File, ",%s\n", userDataStr.GetString());
- Flush();
-}
-
-void VmaRecorder::RecordAllocateMemoryForBuffer(uint32_t frameIndex,
- const VkMemoryRequirements& vkMemReq,
- bool requiresDedicatedAllocation,
- bool prefersDedicatedAllocation,
- const VmaAllocationCreateInfo& createInfo,
- VmaAllocation allocation)
-{
- CallParams callParams;
- GetBasicParams(callParams);
-
- VmaMutexLock lock(m_FileMutex, m_UseMutex);
- UserDataString userDataStr(createInfo.flags, createInfo.pUserData);
- fprintf(m_File, "%u,%.3f,%u,vmaAllocateMemoryForBuffer,%llu,%llu,%u,%u,%u,%u,%u,%u,%u,%u,%p,%p,%s\n", callParams.threadId, callParams.time, frameIndex,
- vkMemReq.size,
- vkMemReq.alignment,
- vkMemReq.memoryTypeBits,
- requiresDedicatedAllocation ? 1 : 0,
- prefersDedicatedAllocation ? 1 : 0,
- createInfo.flags,
- createInfo.usage,
- createInfo.requiredFlags,
- createInfo.preferredFlags,
- createInfo.memoryTypeBits,
- createInfo.pool,
- allocation,
- userDataStr.GetString());
- Flush();
-}
-
-void VmaRecorder::RecordAllocateMemoryForImage(uint32_t frameIndex,
- const VkMemoryRequirements& vkMemReq,
- bool requiresDedicatedAllocation,
- bool prefersDedicatedAllocation,
- const VmaAllocationCreateInfo& createInfo,
- VmaAllocation allocation)
-{
- CallParams callParams;
- GetBasicParams(callParams);
-
- VmaMutexLock lock(m_FileMutex, m_UseMutex);
- UserDataString userDataStr(createInfo.flags, createInfo.pUserData);
- fprintf(m_File, "%u,%.3f,%u,vmaAllocateMemoryForImage,%llu,%llu,%u,%u,%u,%u,%u,%u,%u,%u,%p,%p,%s\n", callParams.threadId, callParams.time, frameIndex,
- vkMemReq.size,
- vkMemReq.alignment,
- vkMemReq.memoryTypeBits,
- requiresDedicatedAllocation ? 1 : 0,
- prefersDedicatedAllocation ? 1 : 0,
- createInfo.flags,
- createInfo.usage,
- createInfo.requiredFlags,
- createInfo.preferredFlags,
- createInfo.memoryTypeBits,
- createInfo.pool,
- allocation,
- userDataStr.GetString());
- Flush();
-}
-
-void VmaRecorder::RecordFreeMemory(uint32_t frameIndex,
- VmaAllocation allocation)
-{
- CallParams callParams;
- GetBasicParams(callParams);
-
- VmaMutexLock lock(m_FileMutex, m_UseMutex);
- fprintf(m_File, "%u,%.3f,%u,vmaFreeMemory,%p\n", callParams.threadId, callParams.time, frameIndex,
- allocation);
- Flush();
-}
-
-void VmaRecorder::RecordFreeMemoryPages(uint32_t frameIndex,
- uint64_t allocationCount,
- const VmaAllocation* pAllocations)
-{
- CallParams callParams;
- GetBasicParams(callParams);
-
- VmaMutexLock lock(m_FileMutex, m_UseMutex);
- fprintf(m_File, "%u,%.3f,%u,vmaFreeMemoryPages,", callParams.threadId, callParams.time, frameIndex);
- PrintPointerList(allocationCount, pAllocations);
- fprintf(m_File, "\n");
- Flush();
-}
-
-void VmaRecorder::RecordResizeAllocation(
- uint32_t frameIndex,
- VmaAllocation allocation,
- VkDeviceSize newSize)
-{
- CallParams callParams;
- GetBasicParams(callParams);
-
- VmaMutexLock lock(m_FileMutex, m_UseMutex);
- fprintf(m_File, "%u,%.3f,%u,vmaResizeAllocation,%p,%llu\n", callParams.threadId, callParams.time, frameIndex,
- allocation, newSize);
- Flush();
-}
-
-void VmaRecorder::RecordSetAllocationUserData(uint32_t frameIndex,
- VmaAllocation allocation,
- const void* pUserData)
-{
- CallParams callParams;
- GetBasicParams(callParams);
-
- VmaMutexLock lock(m_FileMutex, m_UseMutex);
- UserDataString userDataStr(
- allocation->IsUserDataString() ? VMA_ALLOCATION_CREATE_USER_DATA_COPY_STRING_BIT : 0,
- pUserData);
- fprintf(m_File, "%u,%.3f,%u,vmaSetAllocationUserData,%p,%s\n", callParams.threadId, callParams.time, frameIndex,
- allocation,
- userDataStr.GetString());
- Flush();
-}
-
-void VmaRecorder::RecordCreateLostAllocation(uint32_t frameIndex,
- VmaAllocation allocation)
-{
- CallParams callParams;
- GetBasicParams(callParams);
-
- VmaMutexLock lock(m_FileMutex, m_UseMutex);
- fprintf(m_File, "%u,%.3f,%u,vmaCreateLostAllocation,%p\n", callParams.threadId, callParams.time, frameIndex,
- allocation);
- Flush();
-}
-
-void VmaRecorder::RecordMapMemory(uint32_t frameIndex,
- VmaAllocation allocation)
-{
- CallParams callParams;
- GetBasicParams(callParams);
-
- VmaMutexLock lock(m_FileMutex, m_UseMutex);
- fprintf(m_File, "%u,%.3f,%u,vmaMapMemory,%p\n", callParams.threadId, callParams.time, frameIndex,
- allocation);
- Flush();
-}
-
-void VmaRecorder::RecordUnmapMemory(uint32_t frameIndex,
- VmaAllocation allocation)
-{
- CallParams callParams;
- GetBasicParams(callParams);
-
- VmaMutexLock lock(m_FileMutex, m_UseMutex);
- fprintf(m_File, "%u,%.3f,%u,vmaUnmapMemory,%p\n", callParams.threadId, callParams.time, frameIndex,
- allocation);
- Flush();
-}
-
-void VmaRecorder::RecordFlushAllocation(uint32_t frameIndex,
- VmaAllocation allocation, VkDeviceSize offset, VkDeviceSize size)
-{
- CallParams callParams;
- GetBasicParams(callParams);
-
- VmaMutexLock lock(m_FileMutex, m_UseMutex);
- fprintf(m_File, "%u,%.3f,%u,vmaFlushAllocation,%p,%llu,%llu\n", callParams.threadId, callParams.time, frameIndex,
- allocation,
- offset,
- size);
- Flush();
-}
-
-void VmaRecorder::RecordInvalidateAllocation(uint32_t frameIndex,
- VmaAllocation allocation, VkDeviceSize offset, VkDeviceSize size)
-{
- CallParams callParams;
- GetBasicParams(callParams);
-
- VmaMutexLock lock(m_FileMutex, m_UseMutex);
- fprintf(m_File, "%u,%.3f,%u,vmaInvalidateAllocation,%p,%llu,%llu\n", callParams.threadId, callParams.time, frameIndex,
- allocation,
- offset,
- size);
- Flush();
-}
-
-void VmaRecorder::RecordCreateBuffer(uint32_t frameIndex,
- const VkBufferCreateInfo& bufCreateInfo,
- const VmaAllocationCreateInfo& allocCreateInfo,
- VmaAllocation allocation)
-{
- CallParams callParams;
- GetBasicParams(callParams);
-
- VmaMutexLock lock(m_FileMutex, m_UseMutex);
- UserDataString userDataStr(allocCreateInfo.flags, allocCreateInfo.pUserData);
- fprintf(m_File, "%u,%.3f,%u,vmaCreateBuffer,%u,%llu,%u,%u,%u,%u,%u,%u,%u,%p,%p,%s\n", callParams.threadId, callParams.time, frameIndex,
- bufCreateInfo.flags,
- bufCreateInfo.size,
- bufCreateInfo.usage,
- bufCreateInfo.sharingMode,
- allocCreateInfo.flags,
- allocCreateInfo.usage,
- allocCreateInfo.requiredFlags,
- allocCreateInfo.preferredFlags,
- allocCreateInfo.memoryTypeBits,
- allocCreateInfo.pool,
- allocation,
- userDataStr.GetString());
- Flush();
-}
-
-void VmaRecorder::RecordCreateImage(uint32_t frameIndex,
- const VkImageCreateInfo& imageCreateInfo,
- const VmaAllocationCreateInfo& allocCreateInfo,
- VmaAllocation allocation)
-{
- CallParams callParams;
- GetBasicParams(callParams);
-
- VmaMutexLock lock(m_FileMutex, m_UseMutex);
- UserDataString userDataStr(allocCreateInfo.flags, allocCreateInfo.pUserData);
- fprintf(m_File, "%u,%.3f,%u,vmaCreateImage,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%p,%p,%s\n", callParams.threadId, callParams.time, frameIndex,
- imageCreateInfo.flags,
- imageCreateInfo.imageType,
- imageCreateInfo.format,
- imageCreateInfo.extent.width,
- imageCreateInfo.extent.height,
- imageCreateInfo.extent.depth,
- imageCreateInfo.mipLevels,
- imageCreateInfo.arrayLayers,
- imageCreateInfo.samples,
- imageCreateInfo.tiling,
- imageCreateInfo.usage,
- imageCreateInfo.sharingMode,
- imageCreateInfo.initialLayout,
- allocCreateInfo.flags,
- allocCreateInfo.usage,
- allocCreateInfo.requiredFlags,
- allocCreateInfo.preferredFlags,
- allocCreateInfo.memoryTypeBits,
- allocCreateInfo.pool,
- allocation,
- userDataStr.GetString());
- Flush();
-}
-
-void VmaRecorder::RecordDestroyBuffer(uint32_t frameIndex,
- VmaAllocation allocation)
-{
- CallParams callParams;
- GetBasicParams(callParams);
-
- VmaMutexLock lock(m_FileMutex, m_UseMutex);
- fprintf(m_File, "%u,%.3f,%u,vmaDestroyBuffer,%p\n", callParams.threadId, callParams.time, frameIndex,
- allocation);
- Flush();
-}
-
-void VmaRecorder::RecordDestroyImage(uint32_t frameIndex,
- VmaAllocation allocation)
-{
- CallParams callParams;
- GetBasicParams(callParams);
-
- VmaMutexLock lock(m_FileMutex, m_UseMutex);
- fprintf(m_File, "%u,%.3f,%u,vmaDestroyImage,%p\n", callParams.threadId, callParams.time, frameIndex,
- allocation);
- Flush();
-}
-
-void VmaRecorder::RecordTouchAllocation(uint32_t frameIndex,
- VmaAllocation allocation)
-{
- CallParams callParams;
- GetBasicParams(callParams);
-
- VmaMutexLock lock(m_FileMutex, m_UseMutex);
- fprintf(m_File, "%u,%.3f,%u,vmaTouchAllocation,%p\n", callParams.threadId, callParams.time, frameIndex,
- allocation);
- Flush();
-}
-
-void VmaRecorder::RecordGetAllocationInfo(uint32_t frameIndex,
- VmaAllocation allocation)
-{
- CallParams callParams;
- GetBasicParams(callParams);
-
- VmaMutexLock lock(m_FileMutex, m_UseMutex);
- fprintf(m_File, "%u,%.3f,%u,vmaGetAllocationInfo,%p\n", callParams.threadId, callParams.time, frameIndex,
- allocation);
- Flush();
-}
-
-void VmaRecorder::RecordMakePoolAllocationsLost(uint32_t frameIndex,
- VmaPool pool)
-{
- CallParams callParams;
- GetBasicParams(callParams);
-
- VmaMutexLock lock(m_FileMutex, m_UseMutex);
- fprintf(m_File, "%u,%.3f,%u,vmaMakePoolAllocationsLost,%p\n", callParams.threadId, callParams.time, frameIndex,
- pool);
- Flush();
-}
-
-void VmaRecorder::RecordDefragmentationBegin(uint32_t frameIndex,
- const VmaDefragmentationInfo2& info,
- VmaDefragmentationContext ctx)
-{
- CallParams callParams;
- GetBasicParams(callParams);
-
- VmaMutexLock lock(m_FileMutex, m_UseMutex);
- fprintf(m_File, "%u,%.3f,%u,vmaDefragmentationBegin,%u,", callParams.threadId, callParams.time, frameIndex,
- info.flags);
- PrintPointerList(info.allocationCount, info.pAllocations);
- fprintf(m_File, ",");
- PrintPointerList(info.poolCount, info.pPools);
- fprintf(m_File, ",%llu,%u,%llu,%u,%p,%p\n",
- info.maxCpuBytesToMove,
- info.maxCpuAllocationsToMove,
- info.maxGpuBytesToMove,
- info.maxGpuAllocationsToMove,
- info.commandBuffer,
- ctx);
- Flush();
-}
-
-void VmaRecorder::RecordDefragmentationEnd(uint32_t frameIndex,
- VmaDefragmentationContext ctx)
-{
- CallParams callParams;
- GetBasicParams(callParams);
-
- VmaMutexLock lock(m_FileMutex, m_UseMutex);
- fprintf(m_File, "%u,%.3f,%u,vmaDefragmentationEnd,%p\n", callParams.threadId, callParams.time, frameIndex,
- ctx);
- Flush();
-}
-
-VmaRecorder::UserDataString::UserDataString(VmaAllocationCreateFlags allocFlags, const void* pUserData)
-{
- if(pUserData != VMA_NULL)
- {
- if((allocFlags & VMA_ALLOCATION_CREATE_USER_DATA_COPY_STRING_BIT) != 0)
- {
- m_Str = (const char*)pUserData;
- }
- else
- {
- sprintf_s(m_PtrStr, "%p", pUserData);
- m_Str = m_PtrStr;
- }
- }
- else
- {
- m_Str = "";
- }
-}
-
-void VmaRecorder::WriteConfiguration(
- const VkPhysicalDeviceProperties& devProps,
- const VkPhysicalDeviceMemoryProperties& memProps,
- bool dedicatedAllocationExtensionEnabled)
-{
- fprintf(m_File, "Config,Begin\n");
-
- fprintf(m_File, "PhysicalDevice,apiVersion,%u\n", devProps.apiVersion);
- fprintf(m_File, "PhysicalDevice,driverVersion,%u\n", devProps.driverVersion);
- fprintf(m_File, "PhysicalDevice,vendorID,%u\n", devProps.vendorID);
- fprintf(m_File, "PhysicalDevice,deviceID,%u\n", devProps.deviceID);
- fprintf(m_File, "PhysicalDevice,deviceType,%u\n", devProps.deviceType);
- fprintf(m_File, "PhysicalDevice,deviceName,%s\n", devProps.deviceName);
-
- fprintf(m_File, "PhysicalDeviceLimits,maxMemoryAllocationCount,%u\n", devProps.limits.maxMemoryAllocationCount);
- fprintf(m_File, "PhysicalDeviceLimits,bufferImageGranularity,%llu\n", devProps.limits.bufferImageGranularity);
- fprintf(m_File, "PhysicalDeviceLimits,nonCoherentAtomSize,%llu\n", devProps.limits.nonCoherentAtomSize);
-
- fprintf(m_File, "PhysicalDeviceMemory,HeapCount,%u\n", memProps.memoryHeapCount);
- for(uint32_t i = 0; i < memProps.memoryHeapCount; ++i)
- {
- fprintf(m_File, "PhysicalDeviceMemory,Heap,%u,size,%llu\n", i, memProps.memoryHeaps[i].size);
- fprintf(m_File, "PhysicalDeviceMemory,Heap,%u,flags,%u\n", i, memProps.memoryHeaps[i].flags);
- }
- fprintf(m_File, "PhysicalDeviceMemory,TypeCount,%u\n", memProps.memoryTypeCount);
- for(uint32_t i = 0; i < memProps.memoryTypeCount; ++i)
- {
- fprintf(m_File, "PhysicalDeviceMemory,Type,%u,heapIndex,%u\n", i, memProps.memoryTypes[i].heapIndex);
- fprintf(m_File, "PhysicalDeviceMemory,Type,%u,propertyFlags,%u\n", i, memProps.memoryTypes[i].propertyFlags);
- }
-
- fprintf(m_File, "Extension,VK_KHR_dedicated_allocation,%u\n", dedicatedAllocationExtensionEnabled ? 1 : 0);
-
- fprintf(m_File, "Macro,VMA_DEBUG_ALWAYS_DEDICATED_MEMORY,%u\n", VMA_DEBUG_ALWAYS_DEDICATED_MEMORY ? 1 : 0);
- fprintf(m_File, "Macro,VMA_DEBUG_ALIGNMENT,%llu\n", (VkDeviceSize)VMA_DEBUG_ALIGNMENT);
- fprintf(m_File, "Macro,VMA_DEBUG_MARGIN,%llu\n", (VkDeviceSize)VMA_DEBUG_MARGIN);
- fprintf(m_File, "Macro,VMA_DEBUG_INITIALIZE_ALLOCATIONS,%u\n", VMA_DEBUG_INITIALIZE_ALLOCATIONS ? 1 : 0);
- fprintf(m_File, "Macro,VMA_DEBUG_DETECT_CORRUPTION,%u\n", VMA_DEBUG_DETECT_CORRUPTION ? 1 : 0);
- fprintf(m_File, "Macro,VMA_DEBUG_GLOBAL_MUTEX,%u\n", VMA_DEBUG_GLOBAL_MUTEX ? 1 : 0);
- fprintf(m_File, "Macro,VMA_DEBUG_MIN_BUFFER_IMAGE_GRANULARITY,%llu\n", (VkDeviceSize)VMA_DEBUG_MIN_BUFFER_IMAGE_GRANULARITY);
- fprintf(m_File, "Macro,VMA_SMALL_HEAP_MAX_SIZE,%llu\n", (VkDeviceSize)VMA_SMALL_HEAP_MAX_SIZE);
- fprintf(m_File, "Macro,VMA_DEFAULT_LARGE_HEAP_BLOCK_SIZE,%llu\n", (VkDeviceSize)VMA_DEFAULT_LARGE_HEAP_BLOCK_SIZE);
-
- fprintf(m_File, "Config,End\n");
-}
-
-void VmaRecorder::GetBasicParams(CallParams& outParams)
-{
- outParams.threadId = GetCurrentThreadId();
-
- LARGE_INTEGER counter;
- QueryPerformanceCounter(&counter);
- outParams.time = (double)(counter.QuadPart - m_StartCounter) / (double)m_Freq;
-}
-
-void VmaRecorder::PrintPointerList(uint64_t count, const VmaAllocation* pItems)
-{
- if(count)
- {
- fprintf(m_File, "%p", pItems[0]);
- for(uint64_t i = 1; i < count; ++i)
- {
- fprintf(m_File, " %p", pItems[i]);
- }
- }
-}
-
-void VmaRecorder::Flush()
-{
- if((m_Flags & VMA_RECORD_FLUSH_AFTER_CALL_BIT) != 0)
- {
- fflush(m_File);
- }
-}
-
-#endif // #if VMA_RECORDING_ENABLED
-
-////////////////////////////////////////////////////////////////////////////////
-// VmaAllocator_T
-
-VmaAllocator_T::VmaAllocator_T(const VmaAllocatorCreateInfo* pCreateInfo) :
- m_UseMutex((pCreateInfo->flags & VMA_ALLOCATOR_CREATE_EXTERNALLY_SYNCHRONIZED_BIT) == 0),
- m_UseKhrDedicatedAllocation((pCreateInfo->flags & VMA_ALLOCATOR_CREATE_KHR_DEDICATED_ALLOCATION_BIT) != 0),
- m_hDevice(pCreateInfo->device),
- m_AllocationCallbacksSpecified(pCreateInfo->pAllocationCallbacks != VMA_NULL),
- m_AllocationCallbacks(pCreateInfo->pAllocationCallbacks ?
- *pCreateInfo->pAllocationCallbacks : VmaEmptyAllocationCallbacks),
- m_PreferredLargeHeapBlockSize(0),
- m_PhysicalDevice(pCreateInfo->physicalDevice),
- m_CurrentFrameIndex(0),
- m_Pools(VmaStlAllocator<VmaPool>(GetAllocationCallbacks())),
- m_NextPoolId(0)
-#if VMA_RECORDING_ENABLED
- ,m_pRecorder(VMA_NULL)
-#endif
-{
- if(VMA_DEBUG_DETECT_CORRUPTION)
- {
- // Needs to be multiply of uint32_t size because we are going to write VMA_CORRUPTION_DETECTION_MAGIC_VALUE to it.
- VMA_ASSERT(VMA_DEBUG_MARGIN % sizeof(uint32_t) == 0);
- }
-
- VMA_ASSERT(pCreateInfo->physicalDevice && pCreateInfo->device);
-
-#if !(VMA_DEDICATED_ALLOCATION)
- if((pCreateInfo->flags & VMA_ALLOCATOR_CREATE_KHR_DEDICATED_ALLOCATION_BIT) != 0)
- {
- VMA_ASSERT(0 && "VMA_ALLOCATOR_CREATE_KHR_DEDICATED_ALLOCATION_BIT set but required extensions are disabled by preprocessor macros.");
- }
-#endif
-
- memset(&m_DeviceMemoryCallbacks, 0 ,sizeof(m_DeviceMemoryCallbacks));
- memset(&m_PhysicalDeviceProperties, 0, sizeof(m_PhysicalDeviceProperties));
- memset(&m_MemProps, 0, sizeof(m_MemProps));
-
- memset(&m_pBlockVectors, 0, sizeof(m_pBlockVectors));
- memset(&m_pDedicatedAllocations, 0, sizeof(m_pDedicatedAllocations));
-
- for(uint32_t i = 0; i < VK_MAX_MEMORY_HEAPS; ++i)
- {
- m_HeapSizeLimit[i] = VK_WHOLE_SIZE;
- }
-
- if(pCreateInfo->pDeviceMemoryCallbacks != VMA_NULL)
- {
- m_DeviceMemoryCallbacks.pfnAllocate = pCreateInfo->pDeviceMemoryCallbacks->pfnAllocate;
- m_DeviceMemoryCallbacks.pfnFree = pCreateInfo->pDeviceMemoryCallbacks->pfnFree;
- }
-
- ImportVulkanFunctions(pCreateInfo->pVulkanFunctions);
-
- (*m_VulkanFunctions.vkGetPhysicalDeviceProperties)(m_PhysicalDevice, &m_PhysicalDeviceProperties);
- (*m_VulkanFunctions.vkGetPhysicalDeviceMemoryProperties)(m_PhysicalDevice, &m_MemProps);
-
- VMA_ASSERT(VmaIsPow2(VMA_DEBUG_ALIGNMENT));
- VMA_ASSERT(VmaIsPow2(VMA_DEBUG_MIN_BUFFER_IMAGE_GRANULARITY));
- VMA_ASSERT(VmaIsPow2(m_PhysicalDeviceProperties.limits.bufferImageGranularity));
- VMA_ASSERT(VmaIsPow2(m_PhysicalDeviceProperties.limits.nonCoherentAtomSize));
-
- m_PreferredLargeHeapBlockSize = (pCreateInfo->preferredLargeHeapBlockSize != 0) ?
- pCreateInfo->preferredLargeHeapBlockSize : static_cast<VkDeviceSize>(VMA_DEFAULT_LARGE_HEAP_BLOCK_SIZE);
-
- if(pCreateInfo->pHeapSizeLimit != VMA_NULL)
- {
- for(uint32_t heapIndex = 0; heapIndex < GetMemoryHeapCount(); ++heapIndex)
- {
- const VkDeviceSize limit = pCreateInfo->pHeapSizeLimit[heapIndex];
- if(limit != VK_WHOLE_SIZE)
- {
- m_HeapSizeLimit[heapIndex] = limit;
- if(limit < m_MemProps.memoryHeaps[heapIndex].size)
- {
- m_MemProps.memoryHeaps[heapIndex].size = limit;
- }
- }
- }
- }
-
- for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
- {
- const VkDeviceSize preferredBlockSize = CalcPreferredBlockSize(memTypeIndex);
-
- m_pBlockVectors[memTypeIndex] = vma_new(this, VmaBlockVector)(
- this,
- memTypeIndex,
- preferredBlockSize,
- 0,
- SIZE_MAX,
- GetBufferImageGranularity(),
- pCreateInfo->frameInUseCount,
- false, // isCustomPool
- false, // explicitBlockSize
- false); // linearAlgorithm
- // No need to call m_pBlockVectors[memTypeIndex][blockVectorTypeIndex]->CreateMinBlocks here,
- // becase minBlockCount is 0.
- m_pDedicatedAllocations[memTypeIndex] = vma_new(this, AllocationVectorType)(VmaStlAllocator<VmaAllocation>(GetAllocationCallbacks()));
-
- }
-}
-
-VkResult VmaAllocator_T::Init(const VmaAllocatorCreateInfo* pCreateInfo)
-{
- VkResult res = VK_SUCCESS;
-
- if(pCreateInfo->pRecordSettings != VMA_NULL &&
- !VmaStrIsEmpty(pCreateInfo->pRecordSettings->pFilePath))
- {
-#if VMA_RECORDING_ENABLED
- m_pRecorder = vma_new(this, VmaRecorder)();
- res = m_pRecorder->Init(*pCreateInfo->pRecordSettings, m_UseMutex);
- if(res != VK_SUCCESS)
- {
- return res;
- }
- m_pRecorder->WriteConfiguration(
- m_PhysicalDeviceProperties,
- m_MemProps,
- m_UseKhrDedicatedAllocation);
- m_pRecorder->RecordCreateAllocator(GetCurrentFrameIndex());
-#else
- VMA_ASSERT(0 && "VmaAllocatorCreateInfo::pRecordSettings used, but not supported due to VMA_RECORDING_ENABLED not defined to 1.");
- return VK_ERROR_FEATURE_NOT_PRESENT;
-#endif
- }
-
- return res;
-}
-
-VmaAllocator_T::~VmaAllocator_T()
-{
-#if VMA_RECORDING_ENABLED
- if(m_pRecorder != VMA_NULL)
- {
- m_pRecorder->RecordDestroyAllocator(GetCurrentFrameIndex());
- vma_delete(this, m_pRecorder);
- }
-#endif
-
- VMA_ASSERT(m_Pools.empty());
-
- for(size_t i = GetMemoryTypeCount(); i--; )
- {
- vma_delete(this, m_pDedicatedAllocations[i]);
- vma_delete(this, m_pBlockVectors[i]);
- }
-}
-
-void VmaAllocator_T::ImportVulkanFunctions(const VmaVulkanFunctions* pVulkanFunctions)
-{
-#if VMA_STATIC_VULKAN_FUNCTIONS == 1
- m_VulkanFunctions.vkGetPhysicalDeviceProperties = &vkGetPhysicalDeviceProperties;
- m_VulkanFunctions.vkGetPhysicalDeviceMemoryProperties = &vkGetPhysicalDeviceMemoryProperties;
- m_VulkanFunctions.vkAllocateMemory = &vkAllocateMemory;
- m_VulkanFunctions.vkFreeMemory = &vkFreeMemory;
- m_VulkanFunctions.vkMapMemory = &vkMapMemory;
- m_VulkanFunctions.vkUnmapMemory = &vkUnmapMemory;
- m_VulkanFunctions.vkFlushMappedMemoryRanges = &vkFlushMappedMemoryRanges;
- m_VulkanFunctions.vkInvalidateMappedMemoryRanges = &vkInvalidateMappedMemoryRanges;
- m_VulkanFunctions.vkBindBufferMemory = &vkBindBufferMemory;
- m_VulkanFunctions.vkBindImageMemory = &vkBindImageMemory;
- m_VulkanFunctions.vkGetBufferMemoryRequirements = &vkGetBufferMemoryRequirements;
- m_VulkanFunctions.vkGetImageMemoryRequirements = &vkGetImageMemoryRequirements;
- m_VulkanFunctions.vkCreateBuffer = &vkCreateBuffer;
- m_VulkanFunctions.vkDestroyBuffer = &vkDestroyBuffer;
- m_VulkanFunctions.vkCreateImage = &vkCreateImage;
- m_VulkanFunctions.vkDestroyImage = &vkDestroyImage;
- m_VulkanFunctions.vkCmdCopyBuffer = &vkCmdCopyBuffer;
-#if VMA_DEDICATED_ALLOCATION
- if(m_UseKhrDedicatedAllocation)
- {
- m_VulkanFunctions.vkGetBufferMemoryRequirements2KHR =
- (PFN_vkGetBufferMemoryRequirements2KHR)vkGetDeviceProcAddr(m_hDevice, "vkGetBufferMemoryRequirements2KHR");
- m_VulkanFunctions.vkGetImageMemoryRequirements2KHR =
- (PFN_vkGetImageMemoryRequirements2KHR)vkGetDeviceProcAddr(m_hDevice, "vkGetImageMemoryRequirements2KHR");
- }
-#endif // #if VMA_DEDICATED_ALLOCATION
-#endif // #if VMA_STATIC_VULKAN_FUNCTIONS == 1
-
-#define VMA_COPY_IF_NOT_NULL(funcName) \
- if(pVulkanFunctions->funcName != VMA_NULL) m_VulkanFunctions.funcName = pVulkanFunctions->funcName;
-
- if(pVulkanFunctions != VMA_NULL)
- {
- VMA_COPY_IF_NOT_NULL(vkGetPhysicalDeviceProperties);
- VMA_COPY_IF_NOT_NULL(vkGetPhysicalDeviceMemoryProperties);
- VMA_COPY_IF_NOT_NULL(vkAllocateMemory);
- VMA_COPY_IF_NOT_NULL(vkFreeMemory);
- VMA_COPY_IF_NOT_NULL(vkMapMemory);
- VMA_COPY_IF_NOT_NULL(vkUnmapMemory);
- VMA_COPY_IF_NOT_NULL(vkFlushMappedMemoryRanges);
- VMA_COPY_IF_NOT_NULL(vkInvalidateMappedMemoryRanges);
- VMA_COPY_IF_NOT_NULL(vkBindBufferMemory);
- VMA_COPY_IF_NOT_NULL(vkBindImageMemory);
- VMA_COPY_IF_NOT_NULL(vkGetBufferMemoryRequirements);
- VMA_COPY_IF_NOT_NULL(vkGetImageMemoryRequirements);
- VMA_COPY_IF_NOT_NULL(vkCreateBuffer);
- VMA_COPY_IF_NOT_NULL(vkDestroyBuffer);
- VMA_COPY_IF_NOT_NULL(vkCreateImage);
- VMA_COPY_IF_NOT_NULL(vkDestroyImage);
- VMA_COPY_IF_NOT_NULL(vkCmdCopyBuffer);
-#if VMA_DEDICATED_ALLOCATION
- VMA_COPY_IF_NOT_NULL(vkGetBufferMemoryRequirements2KHR);
- VMA_COPY_IF_NOT_NULL(vkGetImageMemoryRequirements2KHR);
-#endif
- }
-
-#undef VMA_COPY_IF_NOT_NULL
-
- // If these asserts are hit, you must either #define VMA_STATIC_VULKAN_FUNCTIONS 1
- // or pass valid pointers as VmaAllocatorCreateInfo::pVulkanFunctions.
- VMA_ASSERT(m_VulkanFunctions.vkGetPhysicalDeviceProperties != VMA_NULL);
- VMA_ASSERT(m_VulkanFunctions.vkGetPhysicalDeviceMemoryProperties != VMA_NULL);
- VMA_ASSERT(m_VulkanFunctions.vkAllocateMemory != VMA_NULL);
- VMA_ASSERT(m_VulkanFunctions.vkFreeMemory != VMA_NULL);
- VMA_ASSERT(m_VulkanFunctions.vkMapMemory != VMA_NULL);
- VMA_ASSERT(m_VulkanFunctions.vkUnmapMemory != VMA_NULL);
- VMA_ASSERT(m_VulkanFunctions.vkFlushMappedMemoryRanges != VMA_NULL);
- VMA_ASSERT(m_VulkanFunctions.vkInvalidateMappedMemoryRanges != VMA_NULL);
- VMA_ASSERT(m_VulkanFunctions.vkBindBufferMemory != VMA_NULL);
- VMA_ASSERT(m_VulkanFunctions.vkBindImageMemory != VMA_NULL);
- VMA_ASSERT(m_VulkanFunctions.vkGetBufferMemoryRequirements != VMA_NULL);
- VMA_ASSERT(m_VulkanFunctions.vkGetImageMemoryRequirements != VMA_NULL);
- VMA_ASSERT(m_VulkanFunctions.vkCreateBuffer != VMA_NULL);
- VMA_ASSERT(m_VulkanFunctions.vkDestroyBuffer != VMA_NULL);
- VMA_ASSERT(m_VulkanFunctions.vkCreateImage != VMA_NULL);
- VMA_ASSERT(m_VulkanFunctions.vkDestroyImage != VMA_NULL);
- VMA_ASSERT(m_VulkanFunctions.vkCmdCopyBuffer != VMA_NULL);
-#if VMA_DEDICATED_ALLOCATION
- if(m_UseKhrDedicatedAllocation)
- {
- VMA_ASSERT(m_VulkanFunctions.vkGetBufferMemoryRequirements2KHR != VMA_NULL);
- VMA_ASSERT(m_VulkanFunctions.vkGetImageMemoryRequirements2KHR != VMA_NULL);
- }
-#endif
-}
-
-VkDeviceSize VmaAllocator_T::CalcPreferredBlockSize(uint32_t memTypeIndex)
-{
- const uint32_t heapIndex = MemoryTypeIndexToHeapIndex(memTypeIndex);
- const VkDeviceSize heapSize = m_MemProps.memoryHeaps[heapIndex].size;
- const bool isSmallHeap = heapSize <= VMA_SMALL_HEAP_MAX_SIZE;
- return isSmallHeap ? (heapSize / 8) : m_PreferredLargeHeapBlockSize;
-}
-
-VkResult VmaAllocator_T::AllocateMemoryOfType(
- VkDeviceSize size,
- VkDeviceSize alignment,
- bool dedicatedAllocation,
- VkBuffer dedicatedBuffer,
- VkImage dedicatedImage,
- const VmaAllocationCreateInfo& createInfo,
- uint32_t memTypeIndex,
- VmaSuballocationType suballocType,
- size_t allocationCount,
- VmaAllocation* pAllocations)
-{
- VMA_ASSERT(pAllocations != VMA_NULL);
- VMA_DEBUG_LOG(" AllocateMemory: MemoryTypeIndex=%u, AllocationCount=%zu, Size=%llu", memTypeIndex, allocationCount, vkMemReq.size);
-
- VmaAllocationCreateInfo finalCreateInfo = createInfo;
-
- // If memory type is not HOST_VISIBLE, disable MAPPED.
- if((finalCreateInfo.flags & VMA_ALLOCATION_CREATE_MAPPED_BIT) != 0 &&
- (m_MemProps.memoryTypes[memTypeIndex].propertyFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) == 0)
- {
- finalCreateInfo.flags &= ~VMA_ALLOCATION_CREATE_MAPPED_BIT;
- }
-
- VmaBlockVector* const blockVector = m_pBlockVectors[memTypeIndex];
- VMA_ASSERT(blockVector);
-
- const VkDeviceSize preferredBlockSize = blockVector->GetPreferredBlockSize();
- bool preferDedicatedMemory =
- VMA_DEBUG_ALWAYS_DEDICATED_MEMORY ||
- dedicatedAllocation ||
- // Heuristics: Allocate dedicated memory if requested size if greater than half of preferred block size.
- size > preferredBlockSize / 2;
-
- if(preferDedicatedMemory &&
- (finalCreateInfo.flags & VMA_ALLOCATION_CREATE_NEVER_ALLOCATE_BIT) == 0 &&
- finalCreateInfo.pool == VK_NULL_HANDLE)
- {
- finalCreateInfo.flags |= VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT;
- }
-
- if((finalCreateInfo.flags & VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT) != 0)
- {
- if((finalCreateInfo.flags & VMA_ALLOCATION_CREATE_NEVER_ALLOCATE_BIT) != 0)
- {
- return VK_ERROR_OUT_OF_DEVICE_MEMORY;
- }
- else
- {
- return AllocateDedicatedMemory(
- size,
- suballocType,
- memTypeIndex,
- (finalCreateInfo.flags & VMA_ALLOCATION_CREATE_MAPPED_BIT) != 0,
- (finalCreateInfo.flags & VMA_ALLOCATION_CREATE_USER_DATA_COPY_STRING_BIT) != 0,
- finalCreateInfo.pUserData,
- dedicatedBuffer,
- dedicatedImage,
- allocationCount,
- pAllocations);
- }
- }
- else
- {
- VkResult res = blockVector->Allocate(
- VK_NULL_HANDLE, // hCurrentPool
- m_CurrentFrameIndex.load(),
- size,
- alignment,
- finalCreateInfo,
- suballocType,
- allocationCount,
- pAllocations);
- if(res == VK_SUCCESS)
- {
- return res;
- }
-
- // 5. Try dedicated memory.
- if((finalCreateInfo.flags & VMA_ALLOCATION_CREATE_NEVER_ALLOCATE_BIT) != 0)
- {
- return VK_ERROR_OUT_OF_DEVICE_MEMORY;
- }
- else
- {
- res = AllocateDedicatedMemory(
- size,
- suballocType,
- memTypeIndex,
- (finalCreateInfo.flags & VMA_ALLOCATION_CREATE_MAPPED_BIT) != 0,
- (finalCreateInfo.flags & VMA_ALLOCATION_CREATE_USER_DATA_COPY_STRING_BIT) != 0,
- finalCreateInfo.pUserData,
- dedicatedBuffer,
- dedicatedImage,
- allocationCount,
- pAllocations);
- if(res == VK_SUCCESS)
- {
- // Succeeded: AllocateDedicatedMemory function already filld pMemory, nothing more to do here.
- VMA_DEBUG_LOG(" Allocated as DedicatedMemory");
- return VK_SUCCESS;
- }
- else
- {
- // Everything failed: Return error code.
- VMA_DEBUG_LOG(" vkAllocateMemory FAILED");
- return res;
- }
- }
- }
-}
-
-VkResult VmaAllocator_T::AllocateDedicatedMemory(
- VkDeviceSize size,
- VmaSuballocationType suballocType,
- uint32_t memTypeIndex,
- bool map,
- bool isUserDataString,
- void* pUserData,
- VkBuffer dedicatedBuffer,
- VkImage dedicatedImage,
- size_t allocationCount,
- VmaAllocation* pAllocations)
-{
- VMA_ASSERT(allocationCount > 0 && pAllocations);
-
- VkMemoryAllocateInfo allocInfo = { VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO };
- allocInfo.memoryTypeIndex = memTypeIndex;
- allocInfo.allocationSize = size;
-
-#if VMA_DEDICATED_ALLOCATION
- VkMemoryDedicatedAllocateInfoKHR dedicatedAllocInfo = { VK_STRUCTURE_TYPE_MEMORY_DEDICATED_ALLOCATE_INFO_KHR };
- if(m_UseKhrDedicatedAllocation)
- {
- if(dedicatedBuffer != VK_NULL_HANDLE)
- {
- VMA_ASSERT(dedicatedImage == VK_NULL_HANDLE);
- dedicatedAllocInfo.buffer = dedicatedBuffer;
- allocInfo.pNext = &dedicatedAllocInfo;
- }
- else if(dedicatedImage != VK_NULL_HANDLE)
- {
- dedicatedAllocInfo.image = dedicatedImage;
- allocInfo.pNext = &dedicatedAllocInfo;
- }
- }
-#endif // #if VMA_DEDICATED_ALLOCATION
-
- size_t allocIndex;
- VkResult res = VK_SUCCESS;
- for(allocIndex = 0; allocIndex < allocationCount; ++allocIndex)
- {
- res = AllocateDedicatedMemoryPage(
- size,
- suballocType,
- memTypeIndex,
- allocInfo,
- map,
- isUserDataString,
- pUserData,
- pAllocations + allocIndex);
- if(res != VK_SUCCESS)
- {
- break;
- }
- }
-
- if(res == VK_SUCCESS)
- {
- // Register them in m_pDedicatedAllocations.
- {
- VmaMutexLockWrite lock(m_DedicatedAllocationsMutex[memTypeIndex], m_UseMutex);
- AllocationVectorType* pDedicatedAllocations = m_pDedicatedAllocations[memTypeIndex];
- VMA_ASSERT(pDedicatedAllocations);
- for(allocIndex = 0; allocIndex < allocationCount; ++allocIndex)
- {
- VmaVectorInsertSorted<VmaPointerLess>(*pDedicatedAllocations, pAllocations[allocIndex]);
- }
- }
-
- VMA_DEBUG_LOG(" Allocated DedicatedMemory Count=%zu, MemoryTypeIndex=#%u", allocationCount, memTypeIndex);
- }
- else
- {
- // Free all already created allocations.
- while(allocIndex--)
- {
- VmaAllocation currAlloc = pAllocations[allocIndex];
- VkDeviceMemory hMemory = currAlloc->GetMemory();
-
- /*
- There is no need to call this, because Vulkan spec allows to skip vkUnmapMemory
- before vkFreeMemory.
-
- if(currAlloc->GetMappedData() != VMA_NULL)
- {
- (*m_VulkanFunctions.vkUnmapMemory)(m_hDevice, hMemory);
- }
- */
-
- FreeVulkanMemory(memTypeIndex, currAlloc->GetSize(), hMemory);
-
- currAlloc->SetUserData(this, VMA_NULL);
- vma_delete(this, currAlloc);
- }
-
- memset(pAllocations, 0, sizeof(VmaAllocation) * allocationCount);
- }
-
- return res;
-}
-
-VkResult VmaAllocator_T::AllocateDedicatedMemoryPage(
- VkDeviceSize size,
- VmaSuballocationType suballocType,
- uint32_t memTypeIndex,
- const VkMemoryAllocateInfo& allocInfo,
- bool map,
- bool isUserDataString,
- void* pUserData,
- VmaAllocation* pAllocation)
-{
- VkDeviceMemory hMemory = VK_NULL_HANDLE;
- VkResult res = AllocateVulkanMemory(&allocInfo, &hMemory);
- if(res < 0)
- {
- VMA_DEBUG_LOG(" vkAllocateMemory FAILED");
- return res;
- }
-
- void* pMappedData = VMA_NULL;
- if(map)
- {
- res = (*m_VulkanFunctions.vkMapMemory)(
- m_hDevice,
- hMemory,
- 0,
- VK_WHOLE_SIZE,
- 0,
- &pMappedData);
- if(res < 0)
- {
- VMA_DEBUG_LOG(" vkMapMemory FAILED");
- FreeVulkanMemory(memTypeIndex, size, hMemory);
- return res;
- }
- }
-
- *pAllocation = vma_new(this, VmaAllocation_T)(m_CurrentFrameIndex.load(), isUserDataString);
- (*pAllocation)->InitDedicatedAllocation(memTypeIndex, hMemory, suballocType, pMappedData, size);
- (*pAllocation)->SetUserData(this, pUserData);
- if(VMA_DEBUG_INITIALIZE_ALLOCATIONS)
- {
- FillAllocation(*pAllocation, VMA_ALLOCATION_FILL_PATTERN_CREATED);
- }
-
- return VK_SUCCESS;
-}
-
-void VmaAllocator_T::GetBufferMemoryRequirements(
- VkBuffer hBuffer,
- VkMemoryRequirements& memReq,
- bool& requiresDedicatedAllocation,
- bool& prefersDedicatedAllocation) const
-{
-#if VMA_DEDICATED_ALLOCATION
- if(m_UseKhrDedicatedAllocation)
- {
- VkBufferMemoryRequirementsInfo2KHR memReqInfo = { VK_STRUCTURE_TYPE_BUFFER_MEMORY_REQUIREMENTS_INFO_2_KHR };
- memReqInfo.buffer = hBuffer;
-
- VkMemoryDedicatedRequirementsKHR memDedicatedReq = { VK_STRUCTURE_TYPE_MEMORY_DEDICATED_REQUIREMENTS_KHR };
-
- VkMemoryRequirements2KHR memReq2 = { VK_STRUCTURE_TYPE_MEMORY_REQUIREMENTS_2_KHR };
- memReq2.pNext = &memDedicatedReq;
-
- (*m_VulkanFunctions.vkGetBufferMemoryRequirements2KHR)(m_hDevice, &memReqInfo, &memReq2);
-
- memReq = memReq2.memoryRequirements;
- requiresDedicatedAllocation = (memDedicatedReq.requiresDedicatedAllocation != VK_FALSE);
- prefersDedicatedAllocation = (memDedicatedReq.prefersDedicatedAllocation != VK_FALSE);
- }
- else
-#endif // #if VMA_DEDICATED_ALLOCATION
- {
- (*m_VulkanFunctions.vkGetBufferMemoryRequirements)(m_hDevice, hBuffer, &memReq);
- requiresDedicatedAllocation = false;
- prefersDedicatedAllocation = false;
- }
-}
-
-void VmaAllocator_T::GetImageMemoryRequirements(
- VkImage hImage,
- VkMemoryRequirements& memReq,
- bool& requiresDedicatedAllocation,
- bool& prefersDedicatedAllocation) const
-{
-#if VMA_DEDICATED_ALLOCATION
- if(m_UseKhrDedicatedAllocation)
- {
- VkImageMemoryRequirementsInfo2KHR memReqInfo = { VK_STRUCTURE_TYPE_IMAGE_MEMORY_REQUIREMENTS_INFO_2_KHR };
- memReqInfo.image = hImage;
-
- VkMemoryDedicatedRequirementsKHR memDedicatedReq = { VK_STRUCTURE_TYPE_MEMORY_DEDICATED_REQUIREMENTS_KHR };
-
- VkMemoryRequirements2KHR memReq2 = { VK_STRUCTURE_TYPE_MEMORY_REQUIREMENTS_2_KHR };
- memReq2.pNext = &memDedicatedReq;
-
- (*m_VulkanFunctions.vkGetImageMemoryRequirements2KHR)(m_hDevice, &memReqInfo, &memReq2);
-
- memReq = memReq2.memoryRequirements;
- requiresDedicatedAllocation = (memDedicatedReq.requiresDedicatedAllocation != VK_FALSE);
- prefersDedicatedAllocation = (memDedicatedReq.prefersDedicatedAllocation != VK_FALSE);
- }
- else
-#endif // #if VMA_DEDICATED_ALLOCATION
- {
- (*m_VulkanFunctions.vkGetImageMemoryRequirements)(m_hDevice, hImage, &memReq);
- requiresDedicatedAllocation = false;
- prefersDedicatedAllocation = false;
- }
-}
-
-VkResult VmaAllocator_T::AllocateMemory(
- const VkMemoryRequirements& vkMemReq,
- bool requiresDedicatedAllocation,
- bool prefersDedicatedAllocation,
- VkBuffer dedicatedBuffer,
- VkImage dedicatedImage,
- const VmaAllocationCreateInfo& createInfo,
- VmaSuballocationType suballocType,
- size_t allocationCount,
- VmaAllocation* pAllocations)
-{
- memset(pAllocations, 0, sizeof(VmaAllocation) * allocationCount);
-
- VMA_ASSERT(VmaIsPow2(vkMemReq.alignment));
-
- if(vkMemReq.size == 0)
- {
- return VK_ERROR_VALIDATION_FAILED_EXT;
- }
- if((createInfo.flags & VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT) != 0 &&
- (createInfo.flags & VMA_ALLOCATION_CREATE_NEVER_ALLOCATE_BIT) != 0)
- {
- VMA_ASSERT(0 && "Specifying VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT together with VMA_ALLOCATION_CREATE_NEVER_ALLOCATE_BIT makes no sense.");
- return VK_ERROR_OUT_OF_DEVICE_MEMORY;
- }
- if((createInfo.flags & VMA_ALLOCATION_CREATE_MAPPED_BIT) != 0 &&
- (createInfo.flags & VMA_ALLOCATION_CREATE_CAN_BECOME_LOST_BIT) != 0)
- {
- VMA_ASSERT(0 && "Specifying VMA_ALLOCATION_CREATE_MAPPED_BIT together with VMA_ALLOCATION_CREATE_CAN_BECOME_LOST_BIT is invalid.");
- return VK_ERROR_OUT_OF_DEVICE_MEMORY;
- }
- if(requiresDedicatedAllocation)
- {
- if((createInfo.flags & VMA_ALLOCATION_CREATE_NEVER_ALLOCATE_BIT) != 0)
- {
- VMA_ASSERT(0 && "VMA_ALLOCATION_CREATE_NEVER_ALLOCATE_BIT specified while dedicated allocation is required.");
- return VK_ERROR_OUT_OF_DEVICE_MEMORY;
- }
- if(createInfo.pool != VK_NULL_HANDLE)
- {
- VMA_ASSERT(0 && "Pool specified while dedicated allocation is required.");
- return VK_ERROR_OUT_OF_DEVICE_MEMORY;
- }
- }
- if((createInfo.pool != VK_NULL_HANDLE) &&
- ((createInfo.flags & (VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT)) != 0))
- {
- VMA_ASSERT(0 && "Specifying VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT when pool != null is invalid.");
- return VK_ERROR_OUT_OF_DEVICE_MEMORY;
- }
-
- if(createInfo.pool != VK_NULL_HANDLE)
- {
- const VkDeviceSize alignmentForPool = VMA_MAX(
- vkMemReq.alignment,
- GetMemoryTypeMinAlignment(createInfo.pool->m_BlockVector.GetMemoryTypeIndex()));
- return createInfo.pool->m_BlockVector.Allocate(
- createInfo.pool,
- m_CurrentFrameIndex.load(),
- vkMemReq.size,
- alignmentForPool,
- createInfo,
- suballocType,
- allocationCount,
- pAllocations);
- }
- else
- {
- // Bit mask of memory Vulkan types acceptable for this allocation.
- uint32_t memoryTypeBits = vkMemReq.memoryTypeBits;
- uint32_t memTypeIndex = UINT32_MAX;
- VkResult res = vmaFindMemoryTypeIndex(this, memoryTypeBits, &createInfo, &memTypeIndex);
- if(res == VK_SUCCESS)
- {
- VkDeviceSize alignmentForMemType = VMA_MAX(
- vkMemReq.alignment,
- GetMemoryTypeMinAlignment(memTypeIndex));
-
- res = AllocateMemoryOfType(
- vkMemReq.size,
- alignmentForMemType,
- requiresDedicatedAllocation || prefersDedicatedAllocation,
- dedicatedBuffer,
- dedicatedImage,
- createInfo,
- memTypeIndex,
- suballocType,
- allocationCount,
- pAllocations);
- // Succeeded on first try.
- if(res == VK_SUCCESS)
- {
- return res;
- }
- // Allocation from this memory type failed. Try other compatible memory types.
- else
- {
- for(;;)
- {
- // Remove old memTypeIndex from list of possibilities.
- memoryTypeBits &= ~(1u << memTypeIndex);
- // Find alternative memTypeIndex.
- res = vmaFindMemoryTypeIndex(this, memoryTypeBits, &createInfo, &memTypeIndex);
- if(res == VK_SUCCESS)
- {
- alignmentForMemType = VMA_MAX(
- vkMemReq.alignment,
- GetMemoryTypeMinAlignment(memTypeIndex));
-
- res = AllocateMemoryOfType(
- vkMemReq.size,
- alignmentForMemType,
- requiresDedicatedAllocation || prefersDedicatedAllocation,
- dedicatedBuffer,
- dedicatedImage,
- createInfo,
- memTypeIndex,
- suballocType,
- allocationCount,
- pAllocations);
- // Allocation from this alternative memory type succeeded.
- if(res == VK_SUCCESS)
- {
- return res;
- }
- // else: Allocation from this memory type failed. Try next one - next loop iteration.
- }
- // No other matching memory type index could be found.
- else
- {
- // Not returning res, which is VK_ERROR_FEATURE_NOT_PRESENT, because we already failed to allocate once.
- return VK_ERROR_OUT_OF_DEVICE_MEMORY;
- }
- }
- }
- }
- // Can't find any single memory type maching requirements. res is VK_ERROR_FEATURE_NOT_PRESENT.
- else
- return res;
- }
-}
-
-void VmaAllocator_T::FreeMemory(
- size_t allocationCount,
- const VmaAllocation* pAllocations)
-{
- VMA_ASSERT(pAllocations);
-
- for(size_t allocIndex = allocationCount; allocIndex--; )
- {
- VmaAllocation allocation = pAllocations[allocIndex];
-
- if(allocation != VK_NULL_HANDLE)
- {
- if(TouchAllocation(allocation))
- {
- if(VMA_DEBUG_INITIALIZE_ALLOCATIONS)
- {
- FillAllocation(allocation, VMA_ALLOCATION_FILL_PATTERN_DESTROYED);
- }
-
- switch(allocation->GetType())
- {
- case VmaAllocation_T::ALLOCATION_TYPE_BLOCK:
- {
- VmaBlockVector* pBlockVector = VMA_NULL;
- VmaPool hPool = allocation->GetPool();
- if(hPool != VK_NULL_HANDLE)
- {
- pBlockVector = &hPool->m_BlockVector;
- }
- else
- {
- const uint32_t memTypeIndex = allocation->GetMemoryTypeIndex();
- pBlockVector = m_pBlockVectors[memTypeIndex];
- }
- pBlockVector->Free(allocation);
- }
- break;
- case VmaAllocation_T::ALLOCATION_TYPE_DEDICATED:
- FreeDedicatedMemory(allocation);
- break;
- default:
- VMA_ASSERT(0);
- }
- }
-
- allocation->SetUserData(this, VMA_NULL);
- vma_delete(this, allocation);
- }
- }
-}
-
-VkResult VmaAllocator_T::ResizeAllocation(
- const VmaAllocation alloc,
- VkDeviceSize newSize)
-{
- if(newSize == 0 || alloc->GetLastUseFrameIndex() == VMA_FRAME_INDEX_LOST)
- {
- return VK_ERROR_VALIDATION_FAILED_EXT;
- }
- if(newSize == alloc->GetSize())
- {
- return VK_SUCCESS;
- }
-
- switch(alloc->GetType())
- {
- case VmaAllocation_T::ALLOCATION_TYPE_DEDICATED:
- return VK_ERROR_FEATURE_NOT_PRESENT;
- case VmaAllocation_T::ALLOCATION_TYPE_BLOCK:
- if(alloc->GetBlock()->m_pMetadata->ResizeAllocation(alloc, newSize))
- {
- alloc->ChangeSize(newSize);
- VMA_HEAVY_ASSERT(alloc->GetBlock()->m_pMetadata->Validate());
- return VK_SUCCESS;
- }
- else
- {
- return VK_ERROR_OUT_OF_POOL_MEMORY;
- }
- default:
- VMA_ASSERT(0);
- return VK_ERROR_VALIDATION_FAILED_EXT;
- }
-}
-
-void VmaAllocator_T::CalculateStats(VmaStats* pStats)
-{
- // Initialize.
- InitStatInfo(pStats->total);
- for(size_t i = 0; i < VK_MAX_MEMORY_TYPES; ++i)
- InitStatInfo(pStats->memoryType[i]);
- for(size_t i = 0; i < VK_MAX_MEMORY_HEAPS; ++i)
- InitStatInfo(pStats->memoryHeap[i]);
-
- // Process default pools.
- for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
- {
- VmaBlockVector* const pBlockVector = m_pBlockVectors[memTypeIndex];
- VMA_ASSERT(pBlockVector);
- pBlockVector->AddStats(pStats);
- }
-
- // Process custom pools.
- {
- VmaMutexLockRead lock(m_PoolsMutex, m_UseMutex);
- for(size_t poolIndex = 0, poolCount = m_Pools.size(); poolIndex < poolCount; ++poolIndex)
- {
- m_Pools[poolIndex]->m_BlockVector.AddStats(pStats);
- }
- }
-
- // Process dedicated allocations.
- for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
- {
- const uint32_t memHeapIndex = MemoryTypeIndexToHeapIndex(memTypeIndex);
- VmaMutexLockRead dedicatedAllocationsLock(m_DedicatedAllocationsMutex[memTypeIndex], m_UseMutex);
- AllocationVectorType* const pDedicatedAllocVector = m_pDedicatedAllocations[memTypeIndex];
- VMA_ASSERT(pDedicatedAllocVector);
- for(size_t allocIndex = 0, allocCount = pDedicatedAllocVector->size(); allocIndex < allocCount; ++allocIndex)
- {
- VmaStatInfo allocationStatInfo;
- (*pDedicatedAllocVector)[allocIndex]->DedicatedAllocCalcStatsInfo(allocationStatInfo);
- VmaAddStatInfo(pStats->total, allocationStatInfo);
- VmaAddStatInfo(pStats->memoryType[memTypeIndex], allocationStatInfo);
- VmaAddStatInfo(pStats->memoryHeap[memHeapIndex], allocationStatInfo);
- }
- }
-
- // Postprocess.
- VmaPostprocessCalcStatInfo(pStats->total);
- for(size_t i = 0; i < GetMemoryTypeCount(); ++i)
- VmaPostprocessCalcStatInfo(pStats->memoryType[i]);
- for(size_t i = 0; i < GetMemoryHeapCount(); ++i)
- VmaPostprocessCalcStatInfo(pStats->memoryHeap[i]);
-}
-
-static const uint32_t VMA_VENDOR_ID_AMD = 4098;
-
-VkResult VmaAllocator_T::DefragmentationBegin(
- const VmaDefragmentationInfo2& info,
- VmaDefragmentationStats* pStats,
- VmaDefragmentationContext* pContext)
-{
- if(info.pAllocationsChanged != VMA_NULL)
- {
- memset(info.pAllocationsChanged, 0, info.allocationCount * sizeof(VkBool32));
- }
-
- *pContext = vma_new(this, VmaDefragmentationContext_T)(
- this, m_CurrentFrameIndex.load(), info.flags, pStats);
-
- (*pContext)->AddPools(info.poolCount, info.pPools);
- (*pContext)->AddAllocations(
- info.allocationCount, info.pAllocations, info.pAllocationsChanged);
-
- VkResult res = (*pContext)->Defragment(
- info.maxCpuBytesToMove, info.maxCpuAllocationsToMove,
- info.maxGpuBytesToMove, info.maxGpuAllocationsToMove,
- info.commandBuffer, pStats);
-
- if(res != VK_NOT_READY)
- {
- vma_delete(this, *pContext);
- *pContext = VMA_NULL;
- }
-
- return res;
-}
-
-VkResult VmaAllocator_T::DefragmentationEnd(
- VmaDefragmentationContext context)
-{
- vma_delete(this, context);
- return VK_SUCCESS;
-}
-
-void VmaAllocator_T::GetAllocationInfo(VmaAllocation hAllocation, VmaAllocationInfo* pAllocationInfo)
-{
- if(hAllocation->CanBecomeLost())
- {
- /*
- Warning: This is a carefully designed algorithm.
- Do not modify unless you really know what you're doing :)
- */
- const uint32_t localCurrFrameIndex = m_CurrentFrameIndex.load();
- uint32_t localLastUseFrameIndex = hAllocation->GetLastUseFrameIndex();
- for(;;)
- {
- if(localLastUseFrameIndex == VMA_FRAME_INDEX_LOST)
- {
- pAllocationInfo->memoryType = UINT32_MAX;
- pAllocationInfo->deviceMemory = VK_NULL_HANDLE;
- pAllocationInfo->offset = 0;
- pAllocationInfo->size = hAllocation->GetSize();
- pAllocationInfo->pMappedData = VMA_NULL;
- pAllocationInfo->pUserData = hAllocation->GetUserData();
- return;
- }
- else if(localLastUseFrameIndex == localCurrFrameIndex)
- {
- pAllocationInfo->memoryType = hAllocation->GetMemoryTypeIndex();
- pAllocationInfo->deviceMemory = hAllocation->GetMemory();
- pAllocationInfo->offset = hAllocation->GetOffset();
- pAllocationInfo->size = hAllocation->GetSize();
- pAllocationInfo->pMappedData = VMA_NULL;
- pAllocationInfo->pUserData = hAllocation->GetUserData();
- return;
- }
- else // Last use time earlier than current time.
- {
- if(hAllocation->CompareExchangeLastUseFrameIndex(localLastUseFrameIndex, localCurrFrameIndex))
- {
- localLastUseFrameIndex = localCurrFrameIndex;
- }
- }
- }
- }
- else
- {
-#if VMA_STATS_STRING_ENABLED
- uint32_t localCurrFrameIndex = m_CurrentFrameIndex.load();
- uint32_t localLastUseFrameIndex = hAllocation->GetLastUseFrameIndex();
- for(;;)
- {
- VMA_ASSERT(localLastUseFrameIndex != VMA_FRAME_INDEX_LOST);
- if(localLastUseFrameIndex == localCurrFrameIndex)
- {
- break;
- }
- else // Last use time earlier than current time.
- {
- if(hAllocation->CompareExchangeLastUseFrameIndex(localLastUseFrameIndex, localCurrFrameIndex))
- {
- localLastUseFrameIndex = localCurrFrameIndex;
- }
- }
- }
-#endif
-
- pAllocationInfo->memoryType = hAllocation->GetMemoryTypeIndex();
- pAllocationInfo->deviceMemory = hAllocation->GetMemory();
- pAllocationInfo->offset = hAllocation->GetOffset();
- pAllocationInfo->size = hAllocation->GetSize();
- pAllocationInfo->pMappedData = hAllocation->GetMappedData();
- pAllocationInfo->pUserData = hAllocation->GetUserData();
- }
-}
-
-bool VmaAllocator_T::TouchAllocation(VmaAllocation hAllocation)
-{
- // This is a stripped-down version of VmaAllocator_T::GetAllocationInfo.
- if(hAllocation->CanBecomeLost())
- {
- uint32_t localCurrFrameIndex = m_CurrentFrameIndex.load();
- uint32_t localLastUseFrameIndex = hAllocation->GetLastUseFrameIndex();
- for(;;)
- {
- if(localLastUseFrameIndex == VMA_FRAME_INDEX_LOST)
- {
- return false;
- }
- else if(localLastUseFrameIndex == localCurrFrameIndex)
- {
- return true;
- }
- else // Last use time earlier than current time.
- {
- if(hAllocation->CompareExchangeLastUseFrameIndex(localLastUseFrameIndex, localCurrFrameIndex))
- {
- localLastUseFrameIndex = localCurrFrameIndex;
- }
- }
- }
- }
- else
- {
-#if VMA_STATS_STRING_ENABLED
- uint32_t localCurrFrameIndex = m_CurrentFrameIndex.load();
- uint32_t localLastUseFrameIndex = hAllocation->GetLastUseFrameIndex();
- for(;;)
- {
- VMA_ASSERT(localLastUseFrameIndex != VMA_FRAME_INDEX_LOST);
- if(localLastUseFrameIndex == localCurrFrameIndex)
- {
- break;
- }
- else // Last use time earlier than current time.
- {
- if(hAllocation->CompareExchangeLastUseFrameIndex(localLastUseFrameIndex, localCurrFrameIndex))
- {
- localLastUseFrameIndex = localCurrFrameIndex;
- }
- }
- }
-#endif
-
- return true;
- }
-}
-
-VkResult VmaAllocator_T::CreatePool(const VmaPoolCreateInfo* pCreateInfo, VmaPool* pPool)
-{
- VMA_DEBUG_LOG(" CreatePool: MemoryTypeIndex=%u, flags=%u", pCreateInfo->memoryTypeIndex, pCreateInfo->flags);
-
- VmaPoolCreateInfo newCreateInfo = *pCreateInfo;
-
- if(newCreateInfo.maxBlockCount == 0)
- {
- newCreateInfo.maxBlockCount = SIZE_MAX;
- }
- if(newCreateInfo.minBlockCount > newCreateInfo.maxBlockCount)
- {
- return VK_ERROR_INITIALIZATION_FAILED;
- }
-
- const VkDeviceSize preferredBlockSize = CalcPreferredBlockSize(newCreateInfo.memoryTypeIndex);
-
- *pPool = vma_new(this, VmaPool_T)(this, newCreateInfo, preferredBlockSize);
-
- VkResult res = (*pPool)->m_BlockVector.CreateMinBlocks();
- if(res != VK_SUCCESS)
- {
- vma_delete(this, *pPool);
- *pPool = VMA_NULL;
- return res;
- }
-
- // Add to m_Pools.
- {
- VmaMutexLockWrite lock(m_PoolsMutex, m_UseMutex);
- (*pPool)->SetId(m_NextPoolId++);
- VmaVectorInsertSorted<VmaPointerLess>(m_Pools, *pPool);
- }
-
- return VK_SUCCESS;
-}
-
-void VmaAllocator_T::DestroyPool(VmaPool pool)
-{
- // Remove from m_Pools.
- {
- VmaMutexLockWrite lock(m_PoolsMutex, m_UseMutex);
- bool success = VmaVectorRemoveSorted<VmaPointerLess>(m_Pools, pool);
- VMA_ASSERT(success && "Pool not found in Allocator.");
- }
-
- vma_delete(this, pool);
-}
-
-void VmaAllocator_T::GetPoolStats(VmaPool pool, VmaPoolStats* pPoolStats)
-{
- pool->m_BlockVector.GetPoolStats(pPoolStats);
-}
-
-void VmaAllocator_T::SetCurrentFrameIndex(uint32_t frameIndex)
-{
- m_CurrentFrameIndex.store(frameIndex);
-}
-
-void VmaAllocator_T::MakePoolAllocationsLost(
- VmaPool hPool,
- size_t* pLostAllocationCount)
-{
- hPool->m_BlockVector.MakePoolAllocationsLost(
- m_CurrentFrameIndex.load(),
- pLostAllocationCount);
-}
-
-VkResult VmaAllocator_T::CheckPoolCorruption(VmaPool hPool)
-{
- return hPool->m_BlockVector.CheckCorruption();
-}
-
-VkResult VmaAllocator_T::CheckCorruption(uint32_t memoryTypeBits)
-{
- VkResult finalRes = VK_ERROR_FEATURE_NOT_PRESENT;
-
- // Process default pools.
- for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
- {
- if(((1u << memTypeIndex) & memoryTypeBits) != 0)
- {
- VmaBlockVector* const pBlockVector = m_pBlockVectors[memTypeIndex];
- VMA_ASSERT(pBlockVector);
- VkResult localRes = pBlockVector->CheckCorruption();
- switch(localRes)
- {
- case VK_ERROR_FEATURE_NOT_PRESENT:
- break;
- case VK_SUCCESS:
- finalRes = VK_SUCCESS;
- break;
- default:
- return localRes;
- }
- }
- }
-
- // Process custom pools.
- {
- VmaMutexLockRead lock(m_PoolsMutex, m_UseMutex);
- for(size_t poolIndex = 0, poolCount = m_Pools.size(); poolIndex < poolCount; ++poolIndex)
- {
- if(((1u << m_Pools[poolIndex]->m_BlockVector.GetMemoryTypeIndex()) & memoryTypeBits) != 0)
- {
- VkResult localRes = m_Pools[poolIndex]->m_BlockVector.CheckCorruption();
- switch(localRes)
- {
- case VK_ERROR_FEATURE_NOT_PRESENT:
- break;
- case VK_SUCCESS:
- finalRes = VK_SUCCESS;
- break;
- default:
- return localRes;
- }
- }
- }
- }
-
- return finalRes;
-}
-
-void VmaAllocator_T::CreateLostAllocation(VmaAllocation* pAllocation)
-{
- *pAllocation = vma_new(this, VmaAllocation_T)(VMA_FRAME_INDEX_LOST, false);
- (*pAllocation)->InitLost();
-}
-
-VkResult VmaAllocator_T::AllocateVulkanMemory(const VkMemoryAllocateInfo* pAllocateInfo, VkDeviceMemory* pMemory)
-{
- const uint32_t heapIndex = MemoryTypeIndexToHeapIndex(pAllocateInfo->memoryTypeIndex);
-
- VkResult res;
- if(m_HeapSizeLimit[heapIndex] != VK_WHOLE_SIZE)
- {
- VmaMutexLock lock(m_HeapSizeLimitMutex, m_UseMutex);
- if(m_HeapSizeLimit[heapIndex] >= pAllocateInfo->allocationSize)
- {
- res = (*m_VulkanFunctions.vkAllocateMemory)(m_hDevice, pAllocateInfo, GetAllocationCallbacks(), pMemory);
- if(res == VK_SUCCESS)
- {
- m_HeapSizeLimit[heapIndex] -= pAllocateInfo->allocationSize;
- }
- }
- else
- {
- res = VK_ERROR_OUT_OF_DEVICE_MEMORY;
- }
- }
- else
- {
- res = (*m_VulkanFunctions.vkAllocateMemory)(m_hDevice, pAllocateInfo, GetAllocationCallbacks(), pMemory);
- }
-
- if(res == VK_SUCCESS && m_DeviceMemoryCallbacks.pfnAllocate != VMA_NULL)
- {
- (*m_DeviceMemoryCallbacks.pfnAllocate)(this, pAllocateInfo->memoryTypeIndex, *pMemory, pAllocateInfo->allocationSize);
- }
-
- return res;
-}
-
-void VmaAllocator_T::FreeVulkanMemory(uint32_t memoryType, VkDeviceSize size, VkDeviceMemory hMemory)
-{
- if(m_DeviceMemoryCallbacks.pfnFree != VMA_NULL)
- {
- (*m_DeviceMemoryCallbacks.pfnFree)(this, memoryType, hMemory, size);
- }
-
- (*m_VulkanFunctions.vkFreeMemory)(m_hDevice, hMemory, GetAllocationCallbacks());
-
- const uint32_t heapIndex = MemoryTypeIndexToHeapIndex(memoryType);
- if(m_HeapSizeLimit[heapIndex] != VK_WHOLE_SIZE)
- {
- VmaMutexLock lock(m_HeapSizeLimitMutex, m_UseMutex);
- m_HeapSizeLimit[heapIndex] += size;
- }
-}
-
-VkResult VmaAllocator_T::Map(VmaAllocation hAllocation, void** ppData)
-{
- if(hAllocation->CanBecomeLost())
- {
- return VK_ERROR_MEMORY_MAP_FAILED;
- }
-
- switch(hAllocation->GetType())
- {
- case VmaAllocation_T::ALLOCATION_TYPE_BLOCK:
- {
- VmaDeviceMemoryBlock* const pBlock = hAllocation->GetBlock();
- char *pBytes = VMA_NULL;
- VkResult res = pBlock->Map(this, 1, (void**)&pBytes);
- if(res == VK_SUCCESS)
- {
- *ppData = pBytes + (ptrdiff_t)hAllocation->GetOffset();
- hAllocation->BlockAllocMap();
- }
- return res;
- }
- case VmaAllocation_T::ALLOCATION_TYPE_DEDICATED:
- return hAllocation->DedicatedAllocMap(this, ppData);
- default:
- VMA_ASSERT(0);
- return VK_ERROR_MEMORY_MAP_FAILED;
- }
-}
-
-void VmaAllocator_T::Unmap(VmaAllocation hAllocation)
-{
- switch(hAllocation->GetType())
- {
- case VmaAllocation_T::ALLOCATION_TYPE_BLOCK:
- {
- VmaDeviceMemoryBlock* const pBlock = hAllocation->GetBlock();
- hAllocation->BlockAllocUnmap();
- pBlock->Unmap(this, 1);
- }
- break;
- case VmaAllocation_T::ALLOCATION_TYPE_DEDICATED:
- hAllocation->DedicatedAllocUnmap(this);
- break;
- default:
- VMA_ASSERT(0);
- }
-}
-
-VkResult VmaAllocator_T::BindBufferMemory(VmaAllocation hAllocation, VkBuffer hBuffer)
-{
- VkResult res = VK_SUCCESS;
- switch(hAllocation->GetType())
- {
- case VmaAllocation_T::ALLOCATION_TYPE_DEDICATED:
- res = GetVulkanFunctions().vkBindBufferMemory(
- m_hDevice,
- hBuffer,
- hAllocation->GetMemory(),
- 0); //memoryOffset
- break;
- case VmaAllocation_T::ALLOCATION_TYPE_BLOCK:
- {
- VmaDeviceMemoryBlock* pBlock = hAllocation->GetBlock();
- VMA_ASSERT(pBlock && "Binding buffer to allocation that doesn't belong to any block. Is the allocation lost?");
- res = pBlock->BindBufferMemory(this, hAllocation, hBuffer);
- break;
- }
- default:
- VMA_ASSERT(0);
- }
- return res;
-}
-
-VkResult VmaAllocator_T::BindImageMemory(VmaAllocation hAllocation, VkImage hImage)
-{
- VkResult res = VK_SUCCESS;
- switch(hAllocation->GetType())
- {
- case VmaAllocation_T::ALLOCATION_TYPE_DEDICATED:
- res = GetVulkanFunctions().vkBindImageMemory(
- m_hDevice,
- hImage,
- hAllocation->GetMemory(),
- 0); //memoryOffset
- break;
- case VmaAllocation_T::ALLOCATION_TYPE_BLOCK:
- {
- VmaDeviceMemoryBlock* pBlock = hAllocation->GetBlock();
- VMA_ASSERT(pBlock && "Binding image to allocation that doesn't belong to any block. Is the allocation lost?");
- res = pBlock->BindImageMemory(this, hAllocation, hImage);
- break;
- }
- default:
- VMA_ASSERT(0);
- }
- return res;
-}
-
-void VmaAllocator_T::FlushOrInvalidateAllocation(
- VmaAllocation hAllocation,
- VkDeviceSize offset, VkDeviceSize size,
- VMA_CACHE_OPERATION op)
-{
- const uint32_t memTypeIndex = hAllocation->GetMemoryTypeIndex();
- if(size > 0 && IsMemoryTypeNonCoherent(memTypeIndex))
- {
- const VkDeviceSize allocationSize = hAllocation->GetSize();
- VMA_ASSERT(offset <= allocationSize);
-
- const VkDeviceSize nonCoherentAtomSize = m_PhysicalDeviceProperties.limits.nonCoherentAtomSize;
-
- VkMappedMemoryRange memRange = { VK_STRUCTURE_TYPE_MAPPED_MEMORY_RANGE };
- memRange.memory = hAllocation->GetMemory();
-
- switch(hAllocation->GetType())
- {
- case VmaAllocation_T::ALLOCATION_TYPE_DEDICATED:
- memRange.offset = VmaAlignDown(offset, nonCoherentAtomSize);
- if(size == VK_WHOLE_SIZE)
- {
- memRange.size = allocationSize - memRange.offset;
- }
- else
- {
- VMA_ASSERT(offset + size <= allocationSize);
- memRange.size = VMA_MIN(
- VmaAlignUp(size + (offset - memRange.offset), nonCoherentAtomSize),
- allocationSize - memRange.offset);
- }
- break;
-
- case VmaAllocation_T::ALLOCATION_TYPE_BLOCK:
- {
- // 1. Still within this allocation.
- memRange.offset = VmaAlignDown(offset, nonCoherentAtomSize);
- if(size == VK_WHOLE_SIZE)
- {
- size = allocationSize - offset;
- }
- else
- {
- VMA_ASSERT(offset + size <= allocationSize);
- }
- memRange.size = VmaAlignUp(size + (offset - memRange.offset), nonCoherentAtomSize);
-
- // 2. Adjust to whole block.
- const VkDeviceSize allocationOffset = hAllocation->GetOffset();
- VMA_ASSERT(allocationOffset % nonCoherentAtomSize == 0);
- const VkDeviceSize blockSize = hAllocation->GetBlock()->m_pMetadata->GetSize();
- memRange.offset += allocationOffset;
- memRange.size = VMA_MIN(memRange.size, blockSize - memRange.offset);
-
- break;
- }
-
- default:
- VMA_ASSERT(0);
- }
-
- switch(op)
- {
- case VMA_CACHE_FLUSH:
- (*GetVulkanFunctions().vkFlushMappedMemoryRanges)(m_hDevice, 1, &memRange);
- break;
- case VMA_CACHE_INVALIDATE:
- (*GetVulkanFunctions().vkInvalidateMappedMemoryRanges)(m_hDevice, 1, &memRange);
- break;
- default:
- VMA_ASSERT(0);
- }
- }
- // else: Just ignore this call.
-}
-
-void VmaAllocator_T::FreeDedicatedMemory(VmaAllocation allocation)
-{
- VMA_ASSERT(allocation && allocation->GetType() == VmaAllocation_T::ALLOCATION_TYPE_DEDICATED);
-
- const uint32_t memTypeIndex = allocation->GetMemoryTypeIndex();
- {
- VmaMutexLockWrite lock(m_DedicatedAllocationsMutex[memTypeIndex], m_UseMutex);
- AllocationVectorType* const pDedicatedAllocations = m_pDedicatedAllocations[memTypeIndex];
- VMA_ASSERT(pDedicatedAllocations);
- bool success = VmaVectorRemoveSorted<VmaPointerLess>(*pDedicatedAllocations, allocation);
- VMA_ASSERT(success);
- }
-
- VkDeviceMemory hMemory = allocation->GetMemory();
-
- /*
- There is no need to call this, because Vulkan spec allows to skip vkUnmapMemory
- before vkFreeMemory.
-
- if(allocation->GetMappedData() != VMA_NULL)
- {
- (*m_VulkanFunctions.vkUnmapMemory)(m_hDevice, hMemory);
- }
- */
-
- FreeVulkanMemory(memTypeIndex, allocation->GetSize(), hMemory);
-
- VMA_DEBUG_LOG(" Freed DedicatedMemory MemoryTypeIndex=%u", memTypeIndex);
-}
-
-void VmaAllocator_T::FillAllocation(const VmaAllocation hAllocation, uint8_t pattern)
-{
- if(VMA_DEBUG_INITIALIZE_ALLOCATIONS &&
- !hAllocation->CanBecomeLost() &&
- (m_MemProps.memoryTypes[hAllocation->GetMemoryTypeIndex()].propertyFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) != 0)
- {
- void* pData = VMA_NULL;
- VkResult res = Map(hAllocation, &pData);
- if(res == VK_SUCCESS)
- {
- memset(pData, (int)pattern, (size_t)hAllocation->GetSize());
- FlushOrInvalidateAllocation(hAllocation, 0, VK_WHOLE_SIZE, VMA_CACHE_FLUSH);
- Unmap(hAllocation);
- }
- else
- {
- VMA_ASSERT(0 && "VMA_DEBUG_INITIALIZE_ALLOCATIONS is enabled, but couldn't map memory to fill allocation.");
- }
- }
-}
-
-#if VMA_STATS_STRING_ENABLED
-
-void VmaAllocator_T::PrintDetailedMap(VmaJsonWriter& json)
-{
- bool dedicatedAllocationsStarted = false;
- for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
- {
- VmaMutexLockRead dedicatedAllocationsLock(m_DedicatedAllocationsMutex[memTypeIndex], m_UseMutex);
- AllocationVectorType* const pDedicatedAllocVector = m_pDedicatedAllocations[memTypeIndex];
- VMA_ASSERT(pDedicatedAllocVector);
- if(pDedicatedAllocVector->empty() == false)
- {
- if(dedicatedAllocationsStarted == false)
- {
- dedicatedAllocationsStarted = true;
- json.WriteString("DedicatedAllocations");
- json.BeginObject();
- }
-
- json.BeginString("Type ");
- json.ContinueString(memTypeIndex);
- json.EndString();
-
- json.BeginArray();
-
- for(size_t i = 0; i < pDedicatedAllocVector->size(); ++i)
- {
- json.BeginObject(true);
- const VmaAllocation hAlloc = (*pDedicatedAllocVector)[i];
- hAlloc->PrintParameters(json);
- json.EndObject();
- }
-
- json.EndArray();
- }
- }
- if(dedicatedAllocationsStarted)
- {
- json.EndObject();
- }
-
- {
- bool allocationsStarted = false;
- for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
- {
- if(m_pBlockVectors[memTypeIndex]->IsEmpty() == false)
- {
- if(allocationsStarted == false)
- {
- allocationsStarted = true;
- json.WriteString("DefaultPools");
- json.BeginObject();
- }
-
- json.BeginString("Type ");
- json.ContinueString(memTypeIndex);
- json.EndString();
-
- m_pBlockVectors[memTypeIndex]->PrintDetailedMap(json);
- }
- }
- if(allocationsStarted)
- {
- json.EndObject();
- }
- }
-
- // Custom pools
- {
- VmaMutexLockRead lock(m_PoolsMutex, m_UseMutex);
- const size_t poolCount = m_Pools.size();
- if(poolCount > 0)
- {
- json.WriteString("Pools");
- json.BeginObject();
- for(size_t poolIndex = 0; poolIndex < poolCount; ++poolIndex)
- {
- json.BeginString();
- json.ContinueString(m_Pools[poolIndex]->GetId());
- json.EndString();
-
- m_Pools[poolIndex]->m_BlockVector.PrintDetailedMap(json);
- }
- json.EndObject();
- }
- }
-}
-
-#endif // #if VMA_STATS_STRING_ENABLED
-
-////////////////////////////////////////////////////////////////////////////////
-// Public interface
-
-VkResult vmaCreateAllocator(
- const VmaAllocatorCreateInfo* pCreateInfo,
- VmaAllocator* pAllocator)
-{
- VMA_ASSERT(pCreateInfo && pAllocator);
- VMA_DEBUG_LOG("vmaCreateAllocator");
- *pAllocator = vma_new(pCreateInfo->pAllocationCallbacks, VmaAllocator_T)(pCreateInfo);
- return (*pAllocator)->Init(pCreateInfo);
-}
-
-void vmaDestroyAllocator(
- VmaAllocator allocator)
-{
- if(allocator != VK_NULL_HANDLE)
- {
- VMA_DEBUG_LOG("vmaDestroyAllocator");
- VkAllocationCallbacks allocationCallbacks = allocator->m_AllocationCallbacks;
- vma_delete(&allocationCallbacks, allocator);
- }
-}
-
-void vmaGetPhysicalDeviceProperties(
- VmaAllocator allocator,
- const VkPhysicalDeviceProperties **ppPhysicalDeviceProperties)
-{
- VMA_ASSERT(allocator && ppPhysicalDeviceProperties);
- *ppPhysicalDeviceProperties = &allocator->m_PhysicalDeviceProperties;
-}
-
-void vmaGetMemoryProperties(
- VmaAllocator allocator,
- const VkPhysicalDeviceMemoryProperties** ppPhysicalDeviceMemoryProperties)
-{
- VMA_ASSERT(allocator && ppPhysicalDeviceMemoryProperties);
- *ppPhysicalDeviceMemoryProperties = &allocator->m_MemProps;
-}
-
-void vmaGetMemoryTypeProperties(
- VmaAllocator allocator,
- uint32_t memoryTypeIndex,
- VkMemoryPropertyFlags* pFlags)
-{
- VMA_ASSERT(allocator && pFlags);
- VMA_ASSERT(memoryTypeIndex < allocator->GetMemoryTypeCount());
- *pFlags = allocator->m_MemProps.memoryTypes[memoryTypeIndex].propertyFlags;
-}
-
-void vmaSetCurrentFrameIndex(
- VmaAllocator allocator,
- uint32_t frameIndex)
-{
- VMA_ASSERT(allocator);
- VMA_ASSERT(frameIndex != VMA_FRAME_INDEX_LOST);
-
- VMA_DEBUG_GLOBAL_MUTEX_LOCK
-
- allocator->SetCurrentFrameIndex(frameIndex);
-}
-
-void vmaCalculateStats(
- VmaAllocator allocator,
- VmaStats* pStats)
-{
- VMA_ASSERT(allocator && pStats);
- VMA_DEBUG_GLOBAL_MUTEX_LOCK
- allocator->CalculateStats(pStats);
-}
-
-#if VMA_STATS_STRING_ENABLED
-
-void vmaBuildStatsString(
- VmaAllocator allocator,
- char** ppStatsString,
- VkBool32 detailedMap)
-{
- VMA_ASSERT(allocator && ppStatsString);
- VMA_DEBUG_GLOBAL_MUTEX_LOCK
-
- VmaStringBuilder sb(allocator);
- {
- VmaJsonWriter json(allocator->GetAllocationCallbacks(), sb);
- json.BeginObject();
-
- VmaStats stats;
- allocator->CalculateStats(&stats);
-
- json.WriteString("Total");
- VmaPrintStatInfo(json, stats.total);
-
- for(uint32_t heapIndex = 0; heapIndex < allocator->GetMemoryHeapCount(); ++heapIndex)
- {
- json.BeginString("Heap ");
- json.ContinueString(heapIndex);
- json.EndString();
- json.BeginObject();
-
- json.WriteString("Size");
- json.WriteNumber(allocator->m_MemProps.memoryHeaps[heapIndex].size);
-
- json.WriteString("Flags");
- json.BeginArray(true);
- if((allocator->m_MemProps.memoryHeaps[heapIndex].flags & VK_MEMORY_HEAP_DEVICE_LOCAL_BIT) != 0)
- {
- json.WriteString("DEVICE_LOCAL");
- }
- json.EndArray();
-
- if(stats.memoryHeap[heapIndex].blockCount > 0)
- {
- json.WriteString("Stats");
- VmaPrintStatInfo(json, stats.memoryHeap[heapIndex]);
- }
-
- for(uint32_t typeIndex = 0; typeIndex < allocator->GetMemoryTypeCount(); ++typeIndex)
- {
- if(allocator->MemoryTypeIndexToHeapIndex(typeIndex) == heapIndex)
- {
- json.BeginString("Type ");
- json.ContinueString(typeIndex);
- json.EndString();
-
- json.BeginObject();
-
- json.WriteString("Flags");
- json.BeginArray(true);
- VkMemoryPropertyFlags flags = allocator->m_MemProps.memoryTypes[typeIndex].propertyFlags;
- if((flags & VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT) != 0)
- {
- json.WriteString("DEVICE_LOCAL");
- }
- if((flags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) != 0)
- {
- json.WriteString("HOST_VISIBLE");
- }
- if((flags & VK_MEMORY_PROPERTY_HOST_COHERENT_BIT) != 0)
- {
- json.WriteString("HOST_COHERENT");
- }
- if((flags & VK_MEMORY_PROPERTY_HOST_CACHED_BIT) != 0)
- {
- json.WriteString("HOST_CACHED");
- }
- if((flags & VK_MEMORY_PROPERTY_LAZILY_ALLOCATED_BIT) != 0)
- {
- json.WriteString("LAZILY_ALLOCATED");
- }
- json.EndArray();
-
- if(stats.memoryType[typeIndex].blockCount > 0)
- {
- json.WriteString("Stats");
- VmaPrintStatInfo(json, stats.memoryType[typeIndex]);
- }
-
- json.EndObject();
- }
- }
-
- json.EndObject();
- }
- if(detailedMap == VK_TRUE)
- {
- allocator->PrintDetailedMap(json);
- }
-
- json.EndObject();
- }
-
- const size_t len = sb.GetLength();
- char* const pChars = vma_new_array(allocator, char, len + 1);
- if(len > 0)
- {
- memcpy(pChars, sb.GetData(), len);
- }
- pChars[len] = '\0';
- *ppStatsString = pChars;
-}
-
-void vmaFreeStatsString(
- VmaAllocator allocator,
- char* pStatsString)
-{
- if(pStatsString != VMA_NULL)
- {
- VMA_ASSERT(allocator);
- size_t len = strlen(pStatsString);
- vma_delete_array(allocator, pStatsString, len + 1);
- }
-}
-
-#endif // #if VMA_STATS_STRING_ENABLED
-
-/*
-This function is not protected by any mutex because it just reads immutable data.
-*/
-VkResult vmaFindMemoryTypeIndex(
- VmaAllocator allocator,
- uint32_t memoryTypeBits,
- const VmaAllocationCreateInfo* pAllocationCreateInfo,
- uint32_t* pMemoryTypeIndex)
-{
- VMA_ASSERT(allocator != VK_NULL_HANDLE);
- VMA_ASSERT(pAllocationCreateInfo != VMA_NULL);
- VMA_ASSERT(pMemoryTypeIndex != VMA_NULL);
-
- if(pAllocationCreateInfo->memoryTypeBits != 0)
- {
- memoryTypeBits &= pAllocationCreateInfo->memoryTypeBits;
- }
-
- uint32_t requiredFlags = pAllocationCreateInfo->requiredFlags;
- uint32_t preferredFlags = pAllocationCreateInfo->preferredFlags;
-
- const bool mapped = (pAllocationCreateInfo->flags & VMA_ALLOCATION_CREATE_MAPPED_BIT) != 0;
- if(mapped)
- {
- preferredFlags |= VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT;
- }
-
- // Convert usage to requiredFlags and preferredFlags.
- switch(pAllocationCreateInfo->usage)
- {
- case VMA_MEMORY_USAGE_UNKNOWN:
- break;
- case VMA_MEMORY_USAGE_GPU_ONLY:
- if(!allocator->IsIntegratedGpu() || (preferredFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) == 0)
- {
- preferredFlags |= VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT;
- }
- break;
- case VMA_MEMORY_USAGE_CPU_ONLY:
- requiredFlags |= VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT | VK_MEMORY_PROPERTY_HOST_COHERENT_BIT;
- break;
- case VMA_MEMORY_USAGE_CPU_TO_GPU:
- requiredFlags |= VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT;
- if(!allocator->IsIntegratedGpu() || (preferredFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) == 0)
- {
- preferredFlags |= VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT;
- }
- break;
- case VMA_MEMORY_USAGE_GPU_TO_CPU:
- requiredFlags |= VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT;
- preferredFlags |= VK_MEMORY_PROPERTY_HOST_COHERENT_BIT | VK_MEMORY_PROPERTY_HOST_CACHED_BIT;
- break;
- default:
- break;
- }
-
- *pMemoryTypeIndex = UINT32_MAX;
- uint32_t minCost = UINT32_MAX;
- for(uint32_t memTypeIndex = 0, memTypeBit = 1;
- memTypeIndex < allocator->GetMemoryTypeCount();
- ++memTypeIndex, memTypeBit <<= 1)
- {
- // This memory type is acceptable according to memoryTypeBits bitmask.
- if((memTypeBit & memoryTypeBits) != 0)
- {
- const VkMemoryPropertyFlags currFlags =
- allocator->m_MemProps.memoryTypes[memTypeIndex].propertyFlags;
- // This memory type contains requiredFlags.
- if((requiredFlags & ~currFlags) == 0)
- {
- // Calculate cost as number of bits from preferredFlags not present in this memory type.
- uint32_t currCost = VmaCountBitsSet(preferredFlags & ~currFlags);
- // Remember memory type with lowest cost.
- if(currCost < minCost)
- {
- *pMemoryTypeIndex = memTypeIndex;
- if(currCost == 0)
- {
- return VK_SUCCESS;
- }
- minCost = currCost;
- }
- }
- }
- }
- return (*pMemoryTypeIndex != UINT32_MAX) ? VK_SUCCESS : VK_ERROR_FEATURE_NOT_PRESENT;
-}
-
-VkResult vmaFindMemoryTypeIndexForBufferInfo(
- VmaAllocator allocator,
- const VkBufferCreateInfo* pBufferCreateInfo,
- const VmaAllocationCreateInfo* pAllocationCreateInfo,
- uint32_t* pMemoryTypeIndex)
-{
- VMA_ASSERT(allocator != VK_NULL_HANDLE);
- VMA_ASSERT(pBufferCreateInfo != VMA_NULL);
- VMA_ASSERT(pAllocationCreateInfo != VMA_NULL);
- VMA_ASSERT(pMemoryTypeIndex != VMA_NULL);
-
- const VkDevice hDev = allocator->m_hDevice;
- VkBuffer hBuffer = VK_NULL_HANDLE;
- VkResult res = allocator->GetVulkanFunctions().vkCreateBuffer(
- hDev, pBufferCreateInfo, allocator->GetAllocationCallbacks(), &hBuffer);
- if(res == VK_SUCCESS)
- {
- VkMemoryRequirements memReq = {};
- allocator->GetVulkanFunctions().vkGetBufferMemoryRequirements(
- hDev, hBuffer, &memReq);
-
- res = vmaFindMemoryTypeIndex(
- allocator,
- memReq.memoryTypeBits,
- pAllocationCreateInfo,
- pMemoryTypeIndex);
-
- allocator->GetVulkanFunctions().vkDestroyBuffer(
- hDev, hBuffer, allocator->GetAllocationCallbacks());
- }
- return res;
-}
-
-VkResult vmaFindMemoryTypeIndexForImageInfo(
- VmaAllocator allocator,
- const VkImageCreateInfo* pImageCreateInfo,
- const VmaAllocationCreateInfo* pAllocationCreateInfo,
- uint32_t* pMemoryTypeIndex)
-{
- VMA_ASSERT(allocator != VK_NULL_HANDLE);
- VMA_ASSERT(pImageCreateInfo != VMA_NULL);
- VMA_ASSERT(pAllocationCreateInfo != VMA_NULL);
- VMA_ASSERT(pMemoryTypeIndex != VMA_NULL);
-
- const VkDevice hDev = allocator->m_hDevice;
- VkImage hImage = VK_NULL_HANDLE;
- VkResult res = allocator->GetVulkanFunctions().vkCreateImage(
- hDev, pImageCreateInfo, allocator->GetAllocationCallbacks(), &hImage);
- if(res == VK_SUCCESS)
- {
- VkMemoryRequirements memReq = {};
- allocator->GetVulkanFunctions().vkGetImageMemoryRequirements(
- hDev, hImage, &memReq);
-
- res = vmaFindMemoryTypeIndex(
- allocator,
- memReq.memoryTypeBits,
- pAllocationCreateInfo,
- pMemoryTypeIndex);
-
- allocator->GetVulkanFunctions().vkDestroyImage(
- hDev, hImage, allocator->GetAllocationCallbacks());
- }
- return res;
-}
-
-VkResult vmaCreatePool(
- VmaAllocator allocator,
- const VmaPoolCreateInfo* pCreateInfo,
- VmaPool* pPool)
-{
- VMA_ASSERT(allocator && pCreateInfo && pPool);
-
- VMA_DEBUG_LOG("vmaCreatePool");
-
- VMA_DEBUG_GLOBAL_MUTEX_LOCK
-
- VkResult res = allocator->CreatePool(pCreateInfo, pPool);
-
-#if VMA_RECORDING_ENABLED
- if(allocator->GetRecorder() != VMA_NULL)
- {
- allocator->GetRecorder()->RecordCreatePool(allocator->GetCurrentFrameIndex(), *pCreateInfo, *pPool);
- }
-#endif
-
- return res;
-}
-
-void vmaDestroyPool(
- VmaAllocator allocator,
- VmaPool pool)
-{
- VMA_ASSERT(allocator);
-
- if(pool == VK_NULL_HANDLE)
- {
- return;
- }
-
- VMA_DEBUG_LOG("vmaDestroyPool");
-
- VMA_DEBUG_GLOBAL_MUTEX_LOCK
-
-#if VMA_RECORDING_ENABLED
- if(allocator->GetRecorder() != VMA_NULL)
- {
- allocator->GetRecorder()->RecordDestroyPool(allocator->GetCurrentFrameIndex(), pool);
- }
-#endif
-
- allocator->DestroyPool(pool);
-}
-
-void vmaGetPoolStats(
- VmaAllocator allocator,
- VmaPool pool,
- VmaPoolStats* pPoolStats)
-{
- VMA_ASSERT(allocator && pool && pPoolStats);
-
- VMA_DEBUG_GLOBAL_MUTEX_LOCK
-
- allocator->GetPoolStats(pool, pPoolStats);
-}
-
-void vmaMakePoolAllocationsLost(
- VmaAllocator allocator,
- VmaPool pool,
- size_t* pLostAllocationCount)
-{
- VMA_ASSERT(allocator && pool);
-
- VMA_DEBUG_GLOBAL_MUTEX_LOCK
-
-#if VMA_RECORDING_ENABLED
- if(allocator->GetRecorder() != VMA_NULL)
- {
- allocator->GetRecorder()->RecordMakePoolAllocationsLost(allocator->GetCurrentFrameIndex(), pool);
- }
-#endif
-
- allocator->MakePoolAllocationsLost(pool, pLostAllocationCount);
-}
-
-VkResult vmaCheckPoolCorruption(VmaAllocator allocator, VmaPool pool)
-{
- VMA_ASSERT(allocator && pool);
-
- VMA_DEBUG_GLOBAL_MUTEX_LOCK
-
- VMA_DEBUG_LOG("vmaCheckPoolCorruption");
-
- return allocator->CheckPoolCorruption(pool);
-}
-
-VkResult vmaAllocateMemory(
- VmaAllocator allocator,
- const VkMemoryRequirements* pVkMemoryRequirements,
- const VmaAllocationCreateInfo* pCreateInfo,
- VmaAllocation* pAllocation,
- VmaAllocationInfo* pAllocationInfo)
-{
- VMA_ASSERT(allocator && pVkMemoryRequirements && pCreateInfo && pAllocation);
-
- VMA_DEBUG_LOG("vmaAllocateMemory");
-
- VMA_DEBUG_GLOBAL_MUTEX_LOCK
-
- VkResult result = allocator->AllocateMemory(
- *pVkMemoryRequirements,
- false, // requiresDedicatedAllocation
- false, // prefersDedicatedAllocation
- VK_NULL_HANDLE, // dedicatedBuffer
- VK_NULL_HANDLE, // dedicatedImage
- *pCreateInfo,
- VMA_SUBALLOCATION_TYPE_UNKNOWN,
- 1, // allocationCount
- pAllocation);
-
-#if VMA_RECORDING_ENABLED
- if(allocator->GetRecorder() != VMA_NULL)
- {
- allocator->GetRecorder()->RecordAllocateMemory(
- allocator->GetCurrentFrameIndex(),
- *pVkMemoryRequirements,
- *pCreateInfo,
- *pAllocation);
- }
-#endif
-
- if(pAllocationInfo != VMA_NULL && result == VK_SUCCESS)
- {
- allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
- }
-
- return result;
-}
-
-VkResult vmaAllocateMemoryPages(
- VmaAllocator allocator,
- const VkMemoryRequirements* pVkMemoryRequirements,
- const VmaAllocationCreateInfo* pCreateInfo,
- size_t allocationCount,
- VmaAllocation* pAllocations,
- VmaAllocationInfo* pAllocationInfo)
-{
- if(allocationCount == 0)
- {
- return VK_SUCCESS;
- }
-
- VMA_ASSERT(allocator && pVkMemoryRequirements && pCreateInfo && pAllocations);
-
- VMA_DEBUG_LOG("vmaAllocateMemoryPages");
-
- VMA_DEBUG_GLOBAL_MUTEX_LOCK
-
- VkResult result = allocator->AllocateMemory(
- *pVkMemoryRequirements,
- false, // requiresDedicatedAllocation
- false, // prefersDedicatedAllocation
- VK_NULL_HANDLE, // dedicatedBuffer
- VK_NULL_HANDLE, // dedicatedImage
- *pCreateInfo,
- VMA_SUBALLOCATION_TYPE_UNKNOWN,
- allocationCount,
- pAllocations);
-
-#if VMA_RECORDING_ENABLED
- if(allocator->GetRecorder() != VMA_NULL)
- {
- allocator->GetRecorder()->RecordAllocateMemoryPages(
- allocator->GetCurrentFrameIndex(),
- *pVkMemoryRequirements,
- *pCreateInfo,
- (uint64_t)allocationCount,
- pAllocations);
- }
-#endif
-
- if(pAllocationInfo != VMA_NULL && result == VK_SUCCESS)
- {
- for(size_t i = 0; i < allocationCount; ++i)
- {
- allocator->GetAllocationInfo(pAllocations[i], pAllocationInfo + i);
- }
- }
-
- return result;
-}
-
-VkResult vmaAllocateMemoryForBuffer(
- VmaAllocator allocator,
- VkBuffer buffer,
- const VmaAllocationCreateInfo* pCreateInfo,
- VmaAllocation* pAllocation,
- VmaAllocationInfo* pAllocationInfo)
-{
- VMA_ASSERT(allocator && buffer != VK_NULL_HANDLE && pCreateInfo && pAllocation);
-
- VMA_DEBUG_LOG("vmaAllocateMemoryForBuffer");
-
- VMA_DEBUG_GLOBAL_MUTEX_LOCK
-
- VkMemoryRequirements vkMemReq = {};
- bool requiresDedicatedAllocation = false;
- bool prefersDedicatedAllocation = false;
- allocator->GetBufferMemoryRequirements(buffer, vkMemReq,
- requiresDedicatedAllocation,
- prefersDedicatedAllocation);
-
- VkResult result = allocator->AllocateMemory(
- vkMemReq,
- requiresDedicatedAllocation,
- prefersDedicatedAllocation,
- buffer, // dedicatedBuffer
- VK_NULL_HANDLE, // dedicatedImage
- *pCreateInfo,
- VMA_SUBALLOCATION_TYPE_BUFFER,
- 1, // allocationCount
- pAllocation);
-
-#if VMA_RECORDING_ENABLED
- if(allocator->GetRecorder() != VMA_NULL)
- {
- allocator->GetRecorder()->RecordAllocateMemoryForBuffer(
- allocator->GetCurrentFrameIndex(),
- vkMemReq,
- requiresDedicatedAllocation,
- prefersDedicatedAllocation,
- *pCreateInfo,
- *pAllocation);
- }
-#endif
-
- if(pAllocationInfo && result == VK_SUCCESS)
- {
- allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
- }
-
- return result;
-}
-
-VkResult vmaAllocateMemoryForImage(
- VmaAllocator allocator,
- VkImage image,
- const VmaAllocationCreateInfo* pCreateInfo,
- VmaAllocation* pAllocation,
- VmaAllocationInfo* pAllocationInfo)
-{
- VMA_ASSERT(allocator && image != VK_NULL_HANDLE && pCreateInfo && pAllocation);
-
- VMA_DEBUG_LOG("vmaAllocateMemoryForImage");
-
- VMA_DEBUG_GLOBAL_MUTEX_LOCK
-
- VkMemoryRequirements vkMemReq = {};
- bool requiresDedicatedAllocation = false;
- bool prefersDedicatedAllocation = false;
- allocator->GetImageMemoryRequirements(image, vkMemReq,
- requiresDedicatedAllocation, prefersDedicatedAllocation);
-
- VkResult result = allocator->AllocateMemory(
- vkMemReq,
- requiresDedicatedAllocation,
- prefersDedicatedAllocation,
- VK_NULL_HANDLE, // dedicatedBuffer
- image, // dedicatedImage
- *pCreateInfo,
- VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN,
- 1, // allocationCount
- pAllocation);
-
-#if VMA_RECORDING_ENABLED
- if(allocator->GetRecorder() != VMA_NULL)
- {
- allocator->GetRecorder()->RecordAllocateMemoryForImage(
- allocator->GetCurrentFrameIndex(),
- vkMemReq,
- requiresDedicatedAllocation,
- prefersDedicatedAllocation,
- *pCreateInfo,
- *pAllocation);
- }
-#endif
-
- if(pAllocationInfo && result == VK_SUCCESS)
- {
- allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
- }
-
- return result;
-}
-
-void vmaFreeMemory(
- VmaAllocator allocator,
- VmaAllocation allocation)
-{
- VMA_ASSERT(allocator);
-
- if(allocation == VK_NULL_HANDLE)
- {
- return;
- }
-
- VMA_DEBUG_LOG("vmaFreeMemory");
-
- VMA_DEBUG_GLOBAL_MUTEX_LOCK
-
-#if VMA_RECORDING_ENABLED
- if(allocator->GetRecorder() != VMA_NULL)
- {
- allocator->GetRecorder()->RecordFreeMemory(
- allocator->GetCurrentFrameIndex(),
- allocation);
- }
-#endif
-
- allocator->FreeMemory(
- 1, // allocationCount
- &allocation);
-}
-
-void vmaFreeMemoryPages(
- VmaAllocator allocator,
- size_t allocationCount,
- VmaAllocation* pAllocations)
-{
- if(allocationCount == 0)
- {
- return;
- }
-
- VMA_ASSERT(allocator);
-
- VMA_DEBUG_LOG("vmaFreeMemoryPages");
-
- VMA_DEBUG_GLOBAL_MUTEX_LOCK
-
-#if VMA_RECORDING_ENABLED
- if(allocator->GetRecorder() != VMA_NULL)
- {
- allocator->GetRecorder()->RecordFreeMemoryPages(
- allocator->GetCurrentFrameIndex(),
- (uint64_t)allocationCount,
- pAllocations);
- }
-#endif
-
- allocator->FreeMemory(allocationCount, pAllocations);
-}
-
-VkResult vmaResizeAllocation(
- VmaAllocator allocator,
- VmaAllocation allocation,
- VkDeviceSize newSize)
-{
- VMA_ASSERT(allocator && allocation);
-
- VMA_DEBUG_LOG("vmaResizeAllocation");
-
- VMA_DEBUG_GLOBAL_MUTEX_LOCK
-
-#if VMA_RECORDING_ENABLED
- if(allocator->GetRecorder() != VMA_NULL)
- {
- allocator->GetRecorder()->RecordResizeAllocation(
- allocator->GetCurrentFrameIndex(),
- allocation,
- newSize);
- }
-#endif
-
- return allocator->ResizeAllocation(allocation, newSize);
-}
-
-void vmaGetAllocationInfo(
- VmaAllocator allocator,
- VmaAllocation allocation,
- VmaAllocationInfo* pAllocationInfo)
-{
- VMA_ASSERT(allocator && allocation && pAllocationInfo);
-
- VMA_DEBUG_GLOBAL_MUTEX_LOCK
-
-#if VMA_RECORDING_ENABLED
- if(allocator->GetRecorder() != VMA_NULL)
- {
- allocator->GetRecorder()->RecordGetAllocationInfo(
- allocator->GetCurrentFrameIndex(),
- allocation);
- }
-#endif
-
- allocator->GetAllocationInfo(allocation, pAllocationInfo);
-}
-
-VkBool32 vmaTouchAllocation(
- VmaAllocator allocator,
- VmaAllocation allocation)
-{
- VMA_ASSERT(allocator && allocation);
-
- VMA_DEBUG_GLOBAL_MUTEX_LOCK
-
-#if VMA_RECORDING_ENABLED
- if(allocator->GetRecorder() != VMA_NULL)
- {
- allocator->GetRecorder()->RecordTouchAllocation(
- allocator->GetCurrentFrameIndex(),
- allocation);
- }
-#endif
-
- return allocator->TouchAllocation(allocation);
-}
-
-void vmaSetAllocationUserData(
- VmaAllocator allocator,
- VmaAllocation allocation,
- void* pUserData)
-{
- VMA_ASSERT(allocator && allocation);
-
- VMA_DEBUG_GLOBAL_MUTEX_LOCK
-
- allocation->SetUserData(allocator, pUserData);
-
-#if VMA_RECORDING_ENABLED
- if(allocator->GetRecorder() != VMA_NULL)
- {
- allocator->GetRecorder()->RecordSetAllocationUserData(
- allocator->GetCurrentFrameIndex(),
- allocation,
- pUserData);
- }
-#endif
-}
-
-void vmaCreateLostAllocation(
- VmaAllocator allocator,
- VmaAllocation* pAllocation)
-{
- VMA_ASSERT(allocator && pAllocation);
-
- VMA_DEBUG_GLOBAL_MUTEX_LOCK;
-
- allocator->CreateLostAllocation(pAllocation);
-
-#if VMA_RECORDING_ENABLED
- if(allocator->GetRecorder() != VMA_NULL)
- {
- allocator->GetRecorder()->RecordCreateLostAllocation(
- allocator->GetCurrentFrameIndex(),
- *pAllocation);
- }
-#endif
-}
-
-VkResult vmaMapMemory(
- VmaAllocator allocator,
- VmaAllocation allocation,
- void** ppData)
-{
- VMA_ASSERT(allocator && allocation && ppData);
-
- VMA_DEBUG_GLOBAL_MUTEX_LOCK
-
- VkResult res = allocator->Map(allocation, ppData);
-
-#if VMA_RECORDING_ENABLED
- if(allocator->GetRecorder() != VMA_NULL)
- {
- allocator->GetRecorder()->RecordMapMemory(
- allocator->GetCurrentFrameIndex(),
- allocation);
- }
-#endif
-
- return res;
-}
-
-void vmaUnmapMemory(
- VmaAllocator allocator,
- VmaAllocation allocation)
-{
- VMA_ASSERT(allocator && allocation);
-
- VMA_DEBUG_GLOBAL_MUTEX_LOCK
-
-#if VMA_RECORDING_ENABLED
- if(allocator->GetRecorder() != VMA_NULL)
- {
- allocator->GetRecorder()->RecordUnmapMemory(
- allocator->GetCurrentFrameIndex(),
- allocation);
- }
-#endif
-
- allocator->Unmap(allocation);
-}
-
-void vmaFlushAllocation(VmaAllocator allocator, VmaAllocation allocation, VkDeviceSize offset, VkDeviceSize size)
-{
- VMA_ASSERT(allocator && allocation);
-
- VMA_DEBUG_LOG("vmaFlushAllocation");
-
- VMA_DEBUG_GLOBAL_MUTEX_LOCK
-
- allocator->FlushOrInvalidateAllocation(allocation, offset, size, VMA_CACHE_FLUSH);
-
-#if VMA_RECORDING_ENABLED
- if(allocator->GetRecorder() != VMA_NULL)
- {
- allocator->GetRecorder()->RecordFlushAllocation(
- allocator->GetCurrentFrameIndex(),
- allocation, offset, size);
- }
-#endif
-}
-
-void vmaInvalidateAllocation(VmaAllocator allocator, VmaAllocation allocation, VkDeviceSize offset, VkDeviceSize size)
-{
- VMA_ASSERT(allocator && allocation);
-
- VMA_DEBUG_LOG("vmaInvalidateAllocation");
-
- VMA_DEBUG_GLOBAL_MUTEX_LOCK
-
- allocator->FlushOrInvalidateAllocation(allocation, offset, size, VMA_CACHE_INVALIDATE);
-
-#if VMA_RECORDING_ENABLED
- if(allocator->GetRecorder() != VMA_NULL)
- {
- allocator->GetRecorder()->RecordInvalidateAllocation(
- allocator->GetCurrentFrameIndex(),
- allocation, offset, size);
- }
-#endif
-}
-
-VkResult vmaCheckCorruption(VmaAllocator allocator, uint32_t memoryTypeBits)
-{
- VMA_ASSERT(allocator);
-
- VMA_DEBUG_LOG("vmaCheckCorruption");
-
- VMA_DEBUG_GLOBAL_MUTEX_LOCK
-
- return allocator->CheckCorruption(memoryTypeBits);
-}
-
-VkResult vmaDefragment(
- VmaAllocator allocator,
- VmaAllocation* pAllocations,
- size_t allocationCount,
- VkBool32* pAllocationsChanged,
- const VmaDefragmentationInfo *pDefragmentationInfo,
- VmaDefragmentationStats* pDefragmentationStats)
-{
- // Deprecated interface, reimplemented using new one.
-
- VmaDefragmentationInfo2 info2 = {};
- info2.allocationCount = (uint32_t)allocationCount;
- info2.pAllocations = pAllocations;
- info2.pAllocationsChanged = pAllocationsChanged;
- if(pDefragmentationInfo != VMA_NULL)
- {
- info2.maxCpuAllocationsToMove = pDefragmentationInfo->maxAllocationsToMove;
- info2.maxCpuBytesToMove = pDefragmentationInfo->maxBytesToMove;
- }
- else
- {
- info2.maxCpuAllocationsToMove = UINT32_MAX;
- info2.maxCpuBytesToMove = VK_WHOLE_SIZE;
- }
- // info2.flags, maxGpuAllocationsToMove, maxGpuBytesToMove, commandBuffer deliberately left zero.
-
- VmaDefragmentationContext ctx;
- VkResult res = vmaDefragmentationBegin(allocator, &info2, pDefragmentationStats, &ctx);
- if(res == VK_NOT_READY)
- {
- res = vmaDefragmentationEnd( allocator, ctx);
- }
- return res;
-}
-
-VkResult vmaDefragmentationBegin(
- VmaAllocator allocator,
- const VmaDefragmentationInfo2* pInfo,
- VmaDefragmentationStats* pStats,
- VmaDefragmentationContext *pContext)
-{
- VMA_ASSERT(allocator && pInfo && pContext);
-
- // Degenerate case: Nothing to defragment.
- if(pInfo->allocationCount == 0 && pInfo->poolCount == 0)
- {
- return VK_SUCCESS;
- }
-
- VMA_ASSERT(pInfo->allocationCount == 0 || pInfo->pAllocations != VMA_NULL);
- VMA_ASSERT(pInfo->poolCount == 0 || pInfo->pPools != VMA_NULL);
- VMA_HEAVY_ASSERT(VmaValidatePointerArray(pInfo->allocationCount, pInfo->pAllocations));
- VMA_HEAVY_ASSERT(VmaValidatePointerArray(pInfo->poolCount, pInfo->pPools));
-
- VMA_DEBUG_LOG("vmaDefragmentationBegin");
-
- VMA_DEBUG_GLOBAL_MUTEX_LOCK
-
- VkResult res = allocator->DefragmentationBegin(*pInfo, pStats, pContext);
-
-#if VMA_RECORDING_ENABLED
- if(allocator->GetRecorder() != VMA_NULL)
- {
- allocator->GetRecorder()->RecordDefragmentationBegin(
- allocator->GetCurrentFrameIndex(), *pInfo, *pContext);
- }
-#endif
-
- return res;
-}
-
-VkResult vmaDefragmentationEnd(
- VmaAllocator allocator,
- VmaDefragmentationContext context)
-{
- VMA_ASSERT(allocator);
-
- VMA_DEBUG_LOG("vmaDefragmentationEnd");
-
- if(context != VK_NULL_HANDLE)
- {
- VMA_DEBUG_GLOBAL_MUTEX_LOCK
-
-#if VMA_RECORDING_ENABLED
- if(allocator->GetRecorder() != VMA_NULL)
- {
- allocator->GetRecorder()->RecordDefragmentationEnd(
- allocator->GetCurrentFrameIndex(), context);
- }
-#endif
-
- return allocator->DefragmentationEnd(context);
- }
- else
- {
- return VK_SUCCESS;
- }
-}
-
-VkResult vmaBindBufferMemory(
- VmaAllocator allocator,
- VmaAllocation allocation,
- VkBuffer buffer)
-{
- VMA_ASSERT(allocator && allocation && buffer);
-
- VMA_DEBUG_LOG("vmaBindBufferMemory");
-
- VMA_DEBUG_GLOBAL_MUTEX_LOCK
-
- return allocator->BindBufferMemory(allocation, buffer);
-}
-
-VkResult vmaBindImageMemory(
- VmaAllocator allocator,
- VmaAllocation allocation,
- VkImage image)
-{
- VMA_ASSERT(allocator && allocation && image);
-
- VMA_DEBUG_LOG("vmaBindImageMemory");
-
- VMA_DEBUG_GLOBAL_MUTEX_LOCK
-
- return allocator->BindImageMemory(allocation, image);
-}
-
-VkResult vmaCreateBuffer(
- VmaAllocator allocator,
- const VkBufferCreateInfo* pBufferCreateInfo,
- const VmaAllocationCreateInfo* pAllocationCreateInfo,
- VkBuffer* pBuffer,
- VmaAllocation* pAllocation,
- VmaAllocationInfo* pAllocationInfo)
-{
- VMA_ASSERT(allocator && pBufferCreateInfo && pAllocationCreateInfo && pBuffer && pAllocation);
-
- if(pBufferCreateInfo->size == 0)
- {
- return VK_ERROR_VALIDATION_FAILED_EXT;
- }
-
- VMA_DEBUG_LOG("vmaCreateBuffer");
-
- VMA_DEBUG_GLOBAL_MUTEX_LOCK
-
- *pBuffer = VK_NULL_HANDLE;
- *pAllocation = VK_NULL_HANDLE;
-
- // 1. Create VkBuffer.
- VkResult res = (*allocator->GetVulkanFunctions().vkCreateBuffer)(
- allocator->m_hDevice,
- pBufferCreateInfo,
- allocator->GetAllocationCallbacks(),
- pBuffer);
- if(res >= 0)
- {
- // 2. vkGetBufferMemoryRequirements.
- VkMemoryRequirements vkMemReq = {};
- bool requiresDedicatedAllocation = false;
- bool prefersDedicatedAllocation = false;
- allocator->GetBufferMemoryRequirements(*pBuffer, vkMemReq,
- requiresDedicatedAllocation, prefersDedicatedAllocation);
-
- // Make sure alignment requirements for specific buffer usages reported
- // in Physical Device Properties are included in alignment reported by memory requirements.
- if((pBufferCreateInfo->usage & VK_BUFFER_USAGE_UNIFORM_TEXEL_BUFFER_BIT) != 0)
- {
- VMA_ASSERT(vkMemReq.alignment %
- allocator->m_PhysicalDeviceProperties.limits.minTexelBufferOffsetAlignment == 0);
- }
- if((pBufferCreateInfo->usage & VK_BUFFER_USAGE_UNIFORM_BUFFER_BIT) != 0)
- {
- VMA_ASSERT(vkMemReq.alignment %
- allocator->m_PhysicalDeviceProperties.limits.minUniformBufferOffsetAlignment == 0);
- }
- if((pBufferCreateInfo->usage & VK_BUFFER_USAGE_STORAGE_BUFFER_BIT) != 0)
- {
- VMA_ASSERT(vkMemReq.alignment %
- allocator->m_PhysicalDeviceProperties.limits.minStorageBufferOffsetAlignment == 0);
- }
-
- // 3. Allocate memory using allocator.
- res = allocator->AllocateMemory(
- vkMemReq,
- requiresDedicatedAllocation,
- prefersDedicatedAllocation,
- *pBuffer, // dedicatedBuffer
- VK_NULL_HANDLE, // dedicatedImage
- *pAllocationCreateInfo,
- VMA_SUBALLOCATION_TYPE_BUFFER,
- 1, // allocationCount
- pAllocation);
-
-#if VMA_RECORDING_ENABLED
- if(allocator->GetRecorder() != VMA_NULL)
- {
- allocator->GetRecorder()->RecordCreateBuffer(
- allocator->GetCurrentFrameIndex(),
- *pBufferCreateInfo,
- *pAllocationCreateInfo,
- *pAllocation);
- }
-#endif
-
- if(res >= 0)
- {
- // 3. Bind buffer with memory.
- res = allocator->BindBufferMemory(*pAllocation, *pBuffer);
- if(res >= 0)
- {
- // All steps succeeded.
- #if VMA_STATS_STRING_ENABLED
- (*pAllocation)->InitBufferImageUsage(pBufferCreateInfo->usage);
- #endif
- if(pAllocationInfo != VMA_NULL)
- {
- allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
- }
-
- return VK_SUCCESS;
- }
- allocator->FreeMemory(
- 1, // allocationCount
- pAllocation);
- *pAllocation = VK_NULL_HANDLE;
- (*allocator->GetVulkanFunctions().vkDestroyBuffer)(allocator->m_hDevice, *pBuffer, allocator->GetAllocationCallbacks());
- *pBuffer = VK_NULL_HANDLE;
- return res;
- }
- (*allocator->GetVulkanFunctions().vkDestroyBuffer)(allocator->m_hDevice, *pBuffer, allocator->GetAllocationCallbacks());
- *pBuffer = VK_NULL_HANDLE;
- return res;
- }
- return res;
-}
-
-void vmaDestroyBuffer(
- VmaAllocator allocator,
- VkBuffer buffer,
- VmaAllocation allocation)
-{
- VMA_ASSERT(allocator);
-
- if(buffer == VK_NULL_HANDLE && allocation == VK_NULL_HANDLE)
- {
- return;
- }
-
- VMA_DEBUG_LOG("vmaDestroyBuffer");
-
- VMA_DEBUG_GLOBAL_MUTEX_LOCK
-
-#if VMA_RECORDING_ENABLED
- if(allocator->GetRecorder() != VMA_NULL)
- {
- allocator->GetRecorder()->RecordDestroyBuffer(
- allocator->GetCurrentFrameIndex(),
- allocation);
- }
-#endif
-
- if(buffer != VK_NULL_HANDLE)
- {
- (*allocator->GetVulkanFunctions().vkDestroyBuffer)(allocator->m_hDevice, buffer, allocator->GetAllocationCallbacks());
- }
-
- if(allocation != VK_NULL_HANDLE)
- {
- allocator->FreeMemory(
- 1, // allocationCount
- &allocation);
- }
-}
-
-VkResult vmaCreateImage(
- VmaAllocator allocator,
- const VkImageCreateInfo* pImageCreateInfo,
- const VmaAllocationCreateInfo* pAllocationCreateInfo,
- VkImage* pImage,
- VmaAllocation* pAllocation,
- VmaAllocationInfo* pAllocationInfo)
-{
- VMA_ASSERT(allocator && pImageCreateInfo && pAllocationCreateInfo && pImage && pAllocation);
-
- if(pImageCreateInfo->extent.width == 0 ||
- pImageCreateInfo->extent.height == 0 ||
- pImageCreateInfo->extent.depth == 0 ||
- pImageCreateInfo->mipLevels == 0 ||
- pImageCreateInfo->arrayLayers == 0)
- {
- return VK_ERROR_VALIDATION_FAILED_EXT;
- }
-
- VMA_DEBUG_LOG("vmaCreateImage");
-
- VMA_DEBUG_GLOBAL_MUTEX_LOCK
-
- *pImage = VK_NULL_HANDLE;
- *pAllocation = VK_NULL_HANDLE;
-
- // 1. Create VkImage.
- VkResult res = (*allocator->GetVulkanFunctions().vkCreateImage)(
- allocator->m_hDevice,
- pImageCreateInfo,
- allocator->GetAllocationCallbacks(),
- pImage);
- if(res >= 0)
- {
- VmaSuballocationType suballocType = pImageCreateInfo->tiling == VK_IMAGE_TILING_OPTIMAL ?
- VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL :
- VMA_SUBALLOCATION_TYPE_IMAGE_LINEAR;
-
- // 2. Allocate memory using allocator.
- VkMemoryRequirements vkMemReq = {};
- bool requiresDedicatedAllocation = false;
- bool prefersDedicatedAllocation = false;
- allocator->GetImageMemoryRequirements(*pImage, vkMemReq,
- requiresDedicatedAllocation, prefersDedicatedAllocation);
-
- res = allocator->AllocateMemory(
- vkMemReq,
- requiresDedicatedAllocation,
- prefersDedicatedAllocation,
- VK_NULL_HANDLE, // dedicatedBuffer
- *pImage, // dedicatedImage
- *pAllocationCreateInfo,
- suballocType,
- 1, // allocationCount
- pAllocation);
-
-#if VMA_RECORDING_ENABLED
- if(allocator->GetRecorder() != VMA_NULL)
- {
- allocator->GetRecorder()->RecordCreateImage(
- allocator->GetCurrentFrameIndex(),
- *pImageCreateInfo,
- *pAllocationCreateInfo,
- *pAllocation);
- }
-#endif
-
- if(res >= 0)
- {
- // 3. Bind image with memory.
- res = allocator->BindImageMemory(*pAllocation, *pImage);
- if(res >= 0)
- {
- // All steps succeeded.
- #if VMA_STATS_STRING_ENABLED
- (*pAllocation)->InitBufferImageUsage(pImageCreateInfo->usage);
- #endif
- if(pAllocationInfo != VMA_NULL)
- {
- allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
- }
-
- return VK_SUCCESS;
- }
- allocator->FreeMemory(
- 1, // allocationCount
- pAllocation);
- *pAllocation = VK_NULL_HANDLE;
- (*allocator->GetVulkanFunctions().vkDestroyImage)(allocator->m_hDevice, *pImage, allocator->GetAllocationCallbacks());
- *pImage = VK_NULL_HANDLE;
- return res;
- }
- (*allocator->GetVulkanFunctions().vkDestroyImage)(allocator->m_hDevice, *pImage, allocator->GetAllocationCallbacks());
- *pImage = VK_NULL_HANDLE;
- return res;
- }
- return res;
-}
-
-void vmaDestroyImage(
- VmaAllocator allocator,
- VkImage image,
- VmaAllocation allocation)
-{
- VMA_ASSERT(allocator);
-
- if(image == VK_NULL_HANDLE && allocation == VK_NULL_HANDLE)
- {
- return;
- }
-
- VMA_DEBUG_LOG("vmaDestroyImage");
-
- VMA_DEBUG_GLOBAL_MUTEX_LOCK
-
-#if VMA_RECORDING_ENABLED
- if(allocator->GetRecorder() != VMA_NULL)
- {
- allocator->GetRecorder()->RecordDestroyImage(
- allocator->GetCurrentFrameIndex(),
- allocation);
- }
-#endif
-
- if(image != VK_NULL_HANDLE)
- {
- (*allocator->GetVulkanFunctions().vkDestroyImage)(allocator->m_hDevice, image, allocator->GetAllocationCallbacks());
- }
- if(allocation != VK_NULL_HANDLE)
- {
- allocator->FreeMemory(
- 1, // allocationCount
- &allocation);
- }
-}
-#if defined(__GNUC__)
-#pragma GCC diagnostic pop
-#if defined(__clang__)
-#pragma clang diagnostic pop
-#endif
-#endif
-#endif // #ifdef VMA_IMPLEMENTATION
-// clang-format on
diff --git a/layers/generated/vk_validation_error_messages.h b/layers/vk_validation_error_messages.h
index f8b9e1d1d..1d3a0a77b 100644
--- a/layers/generated/vk_validation_error_messages.h
+++ b/layers/vk_validation_error_messages.h
@@ -1,5 +1,7 @@
/* THIS FILE IS GENERATED - DO NOT EDIT (scripts/vk_validation_stats.py) */
-/* Vulkan specification version: 1.1.121 */
+/* Vulkan specification version: 1.1.102 */
+/* Header generated: 2019-03-06 12:07:54 */
+
/*
* Vulkan
*
@@ -26,7 +28,7 @@
// Disable auto-formatting for generated file
// clang-format off
-
+
// Mapping from VUID string to the corresponding spec text
typedef struct _vuid_spec_text_pair {
const char * vuid;
@@ -42,15 +44,12 @@ static const vuid_spec_text_pair vuid_spec_text[] = {
{"VUID-VkAccelerationStructureInfoNV-flags-parameter", "flags must be a valid combination of VkBuildAccelerationStructureFlagBitsNV values (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkAccelerationStructureInfoNV-flags-parameter)"},
{"VUID-VkAccelerationStructureInfoNV-geometryCount-02422", "geometryCount must be less than or equal to VkPhysicalDeviceRayTracingPropertiesNV::maxGeometryCount (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkAccelerationStructureInfoNV-geometryCount-02422)"},
{"VUID-VkAccelerationStructureInfoNV-instanceCount-02423", "instanceCount must be less than or equal to VkPhysicalDeviceRayTracingPropertiesNV::maxInstanceCount (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkAccelerationStructureInfoNV-instanceCount-02423)"},
- {"VUID-VkAccelerationStructureInfoNV-instanceData-02782", "If instanceData is not VK_NULL_HANDLE, instanceData must have been created with VK_BUFFER_USAGE_RAY_TRACING_BIT_NV usage flag (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkAccelerationStructureInfoNV-instanceData-02782)"},
{"VUID-VkAccelerationStructureInfoNV-maxTriangleCount-02424", "The total number of triangles in all geometries must be less than or equal to VkPhysicalDeviceRayTracingPropertiesNV::maxTriangleCount (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkAccelerationStructureInfoNV-maxTriangleCount-02424)"},
{"VUID-VkAccelerationStructureInfoNV-pGeometries-parameter", "If geometryCount is not 0, pGeometries must be a valid pointer to an array of geometryCount valid VkGeometryNV structures (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkAccelerationStructureInfoNV-pGeometries-parameter)"},
{"VUID-VkAccelerationStructureInfoNV-pNext-pNext", "pNext must be NULL (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkAccelerationStructureInfoNV-pNext-pNext)"},
{"VUID-VkAccelerationStructureInfoNV-sType-sType", "sType must be VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_INFO_NV (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkAccelerationStructureInfoNV-sType-sType)"},
- {"VUID-VkAccelerationStructureInfoNV-scratch-02781", "scratch must have been created with VK_BUFFER_USAGE_RAY_TRACING_BIT_NV usage flag (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkAccelerationStructureInfoNV-scratch-02781)"},
{"VUID-VkAccelerationStructureInfoNV-type-02425", "If type is VK_ACCELERATION_STRUCTURE_TYPE_TOP_LEVEL_NV then geometryCount must be 0 (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkAccelerationStructureInfoNV-type-02425)"},
{"VUID-VkAccelerationStructureInfoNV-type-02426", "If type is VK_ACCELERATION_STRUCTURE_TYPE_BOTTOM_LEVEL_NV then instanceCount must be 0 (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkAccelerationStructureInfoNV-type-02426)"},
- {"VUID-VkAccelerationStructureInfoNV-type-02786", "If type is VK_ACCELERATION_STRUCTURE_TYPE_BOTTOM_LEVEL_NV then the geometryType member of each geometry in pGeometries must be the same (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkAccelerationStructureInfoNV-type-02786)"},
{"VUID-VkAccelerationStructureInfoNV-type-parameter", "type must be a valid VkAccelerationStructureTypeNV value (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkAccelerationStructureInfoNV-type-parameter)"},
{"VUID-VkAccelerationStructureMemoryRequirementsInfoNV-accelerationStructure-parameter", "accelerationStructure must be a valid VkAccelerationStructureNV handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkAccelerationStructureMemoryRequirementsInfoNV-accelerationStructure-parameter)"},
{"VUID-VkAccelerationStructureMemoryRequirementsInfoNV-pNext-pNext", "pNext must be NULL (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkAccelerationStructureMemoryRequirementsInfoNV-pNext-pNext)"},
@@ -66,6 +65,7 @@ static const vuid_spec_text_pair vuid_spec_text[] = {
{"VUID-VkAcquireNextImageInfoKHR-semaphore-01288", "If semaphore is not VK_NULL_HANDLE it must be unsignaled (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkAcquireNextImageInfoKHR-semaphore-01288)"},
{"VUID-VkAcquireNextImageInfoKHR-semaphore-01781", "If semaphore is not VK_NULL_HANDLE it must not have any uncompleted signal or wait operations pending (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkAcquireNextImageInfoKHR-semaphore-01781)"},
{"VUID-VkAcquireNextImageInfoKHR-semaphore-01782", "semaphore and fence must not both be equal to VK_NULL_HANDLE (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkAcquireNextImageInfoKHR-semaphore-01782)"},
+ {"VUID-VkAcquireNextImageInfoKHR-semaphore-01804", "semaphore and fence must not both be equal to VK_NULL_HANDLE. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkAcquireNextImageInfoKHR-semaphore-01804)"},
{"VUID-VkAcquireNextImageInfoKHR-semaphore-parameter", "If semaphore is not VK_NULL_HANDLE, semaphore must be a valid VkSemaphore handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkAcquireNextImageInfoKHR-semaphore-parameter)"},
{"VUID-VkAcquireNextImageInfoKHR-swapchain-01675", "swapchain must not be in the retired state (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkAcquireNextImageInfoKHR-swapchain-01675)"},
{"VUID-VkAcquireNextImageInfoKHR-swapchain-parameter", "swapchain must be a valid VkSwapchainKHR handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkAcquireNextImageInfoKHR-swapchain-parameter)"},
@@ -135,10 +135,8 @@ static const vuid_spec_text_pair vuid_spec_text[] = {
{"VUID-VkBindBufferMemoryInfo-buffer-01604", "If buffer was not created with VkDedicatedAllocationBufferCreateInfoNV::dedicatedAllocation equal to VK_TRUE, memory must not have been allocated dedicated for a specific buffer or image (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkBindBufferMemoryInfo-buffer-01604)"},
{"VUID-VkBindBufferMemoryInfo-buffer-parameter", "buffer must be a valid VkBuffer handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkBindBufferMemoryInfo-buffer-parameter)"},
{"VUID-VkBindBufferMemoryInfo-commonparent", "Both of buffer, and memory must have been created, allocated, or retrieved from the same VkDevice (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkBindBufferMemoryInfo-commonparent)"},
- {"VUID-VkBindBufferMemoryInfo-handleTypes-02791", "If the value of VkExportMemoryAllocateInfo::handleTypes used to allocate memory is not 0, it must include at least one of the handles set in VkExternalMemoryImageCreateInfo::handleTypes when image was created (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkBindBufferMemoryInfo-handleTypes-02791)"},
{"VUID-VkBindBufferMemoryInfo-memory-01599", "memory must have been allocated using one of the memory types allowed in the memoryTypeBits member of the VkMemoryRequirements structure returned from a call to vkGetBufferMemoryRequirements with buffer (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkBindBufferMemoryInfo-memory-01599)"},
{"VUID-VkBindBufferMemoryInfo-memory-01900", "If the VkMemoryAllocateInfo provided when memory was allocated included an instance of VkMemoryDedicatedAllocateInfo in its pNext chain, and VkMemoryDedicatedAllocateInfo::buffer was not VK_NULL_HANDLE, then buffer must equal VkMemoryDedicatedAllocateInfo::buffer and memoryOffset must be zero. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkBindBufferMemoryInfo-memory-01900)"},
- {"VUID-VkBindBufferMemoryInfo-memory-02792", "If memory was created by a memory import operation, the external handle type of the imported memory must also have been set in VkExternalMemoryBufferCreateInfo::handleTypes when buffer was created (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkBindBufferMemoryInfo-memory-02792)"},
{"VUID-VkBindBufferMemoryInfo-memory-parameter", "memory must be a valid VkDeviceMemory handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkBindBufferMemoryInfo-memory-parameter)"},
{"VUID-VkBindBufferMemoryInfo-memoryOffset-01595", "memoryOffset must be less than the size of memory (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkBindBufferMemoryInfo-memoryOffset-01595)"},
{"VUID-VkBindBufferMemoryInfo-memoryOffset-01600", "memoryOffset must be an integer multiple of the alignment member of the VkMemoryRequirements structure returned from a call to vkGetBufferMemoryRequirements with buffer (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkBindBufferMemoryInfo-memoryOffset-01600)"},
@@ -159,13 +157,12 @@ static const vuid_spec_text_pair vuid_spec_text[] = {
{"VUID-VkBindImageMemoryDeviceGroupInfo-sType-sType", "sType must be VK_STRUCTURE_TYPE_BIND_IMAGE_MEMORY_DEVICE_GROUP_INFO (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkBindImageMemoryDeviceGroupInfo-sType-sType)"},
{"VUID-VkBindImageMemoryDeviceGroupInfo-splitInstanceBindRegionCount-01636", "splitInstanceBindRegionCount must either be zero or equal to the number of physical devices in the logical device squared (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkBindImageMemoryDeviceGroupInfo-splitInstanceBindRegionCount-01636)"},
{"VUID-VkBindImageMemoryInfo-commonparent", "Both of image, and memory that are valid handles must have been created, allocated, or retrieved from the same VkDevice (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkBindImageMemoryInfo-commonparent)"},
- {"VUID-VkBindImageMemoryInfo-handleTypes-02793", "If the value of VkExportMemoryAllocateInfo::handleTypes used to allocate memory is not 0, it must include at least one of the handles set in VkExternalMemoryImageCreateInfo::handleTypes when image was created (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkBindImageMemoryInfo-handleTypes-02793)"},
{"VUID-VkBindImageMemoryInfo-image-01609", "image must not already be backed by a memory object (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkBindImageMemoryInfo-image-01609)"},
{"VUID-VkBindImageMemoryInfo-image-01610", "image must not have been created with any sparse memory binding flags (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkBindImageMemoryInfo-image-01610)"},
{"VUID-VkBindImageMemoryInfo-image-01622", "If image requires a dedicated allocation (as reported by vkGetImageMemoryRequirements2 in VkMemoryDedicatedRequirements::requiresDedicatedAllocation for image), memory must have been created with VkMemoryDedicatedAllocateInfo::image equal to image and memoryOffset must be zero (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkBindImageMemoryInfo-image-01622)"},
{"VUID-VkBindImageMemoryInfo-image-01623", "If image was created with VkDedicatedAllocationImageCreateInfoNV::dedicatedAllocation equal to VK_TRUE, memory must have been created with VkDedicatedAllocationMemoryAllocateInfoNV::image equal to image and memoryOffset must be zero (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkBindImageMemoryInfo-image-01623)"},
{"VUID-VkBindImageMemoryInfo-image-01624", "If image was not created with VkDedicatedAllocationImageCreateInfoNV::dedicatedAllocation equal to VK_TRUE, memory must not have been allocated dedicated for a specific buffer or image (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkBindImageMemoryInfo-image-01624)"},
- {"VUID-VkBindImageMemoryInfo-image-01630", "If image was created with a valid swapchain handle in VkImageSwapchainCreateInfoKHR::swapchain, then the pNext chain must include a valid instance of VkBindImageMemorySwapchainInfoKHR containing the same swapchain handle. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkBindImageMemoryInfo-image-01630)"},
+ {"VUID-VkBindImageMemoryInfo-image-01630", "If image was created with a valid swapchain handle in VkImageSwapchainCreateInfoKHR::swapchain, then the pNext chain must include a valid instance of VkBindImageMemorySwapchainInfoKHR (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkBindImageMemoryInfo-image-01630)"},
{"VUID-VkBindImageMemoryInfo-image-parameter", "image must be a valid VkImage handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkBindImageMemoryInfo-image-parameter)"},
{"VUID-VkBindImageMemoryInfo-memory-01612", "memory must have been allocated using one of the memory types allowed in the memoryTypeBits member of the VkMemoryRequirements structure returned from a call to vkGetImageMemoryRequirements with image (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkBindImageMemoryInfo-memory-01612)"},
{"VUID-VkBindImageMemoryInfo-memory-01614", "The difference of the size of memory and memoryOffset must be greater than or equal to the size member of the VkMemoryRequirements structure returned from a call to vkGetImageMemoryRequirements with the same image (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkBindImageMemoryInfo-memory-01614)"},
@@ -173,7 +170,6 @@ static const vuid_spec_text_pair vuid_spec_text[] = {
{"VUID-VkBindImageMemoryInfo-memory-01903", "If the VkMemoryAllocateInfo provided when memory was allocated included an instance of VkMemoryDedicatedAllocateInfo in its pNext chain, and VkMemoryDedicatedAllocateInfo::image was not VK_NULL_HANDLE, then image must equal VkMemoryDedicatedAllocateInfo::image and memoryOffset must be zero. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkBindImageMemoryInfo-memory-01903)"},
{"VUID-VkBindImageMemoryInfo-memory-02630", "If the dedicated allocation image aliasing feature is not enabled, and the VkMemoryAllocateInfo provided when memory was allocated included an instance of VkMemoryDedicatedAllocateInfo in its pNext chain, and VkMemoryDedicatedAllocateInfo::image was not VK_NULL_HANDLE, then image must equal VkMemoryDedicatedAllocateInfo::image and memoryOffset must be zero. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkBindImageMemoryInfo-memory-02630)"},
{"VUID-VkBindImageMemoryInfo-memory-02631", "If the dedicated allocation image aliasing feature is enabled, and the VkMemoryAllocateInfo provided when memory was allocated included an instance of VkMemoryDedicatedAllocateInfo in its pNext chain, and VkMemoryDedicatedAllocateInfo::image was not VK_NULL_HANDLE, then memoryOffset must be zero, and image must be either equal to VkMemoryDedicatedAllocateInfo::image or an image that was created using the same parameters in VkImageCreateInfo, with the exception that extent and arrayLayers may differ subject to the following restrictions: every dimension in the extent parameter of the image being bound must be equal to or smaller than the original image for which the allocation was created; and the arrayLayers parameter of the image being bound must be equal to or smaller than the original image for which the allocation was created. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkBindImageMemoryInfo-memory-02631)"},
- {"VUID-VkBindImageMemoryInfo-memory-02794", "If memory was created by a memory import operation, the external handle type of the imported memory must also have been set in VkExternalMemoryImageCreateInfo::handleTypes when image was created (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkBindImageMemoryInfo-memory-02794)"},
{"VUID-VkBindImageMemoryInfo-memoryOffset-01611", "memoryOffset must be less than the size of memory (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkBindImageMemoryInfo-memoryOffset-01611)"},
{"VUID-VkBindImageMemoryInfo-memoryOffset-01613", "memoryOffset must be an integer multiple of the alignment member of the VkMemoryRequirements structure returned from a call to vkGetImageMemoryRequirements with image (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkBindImageMemoryInfo-memoryOffset-01613)"},
{"VUID-VkBindImageMemoryInfo-pNext-01615", "If the pNext chain does not include an instance of the VkBindImagePlaneMemoryInfo structure, memory must have been allocated using one of the memory types allowed in the memoryTypeBits member of the VkMemoryRequirements structure returned from a call to vkGetImageMemoryRequirements2 with image (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkBindImageMemoryInfo-pNext-01615)"},
@@ -293,14 +289,12 @@ static const vuid_spec_text_pair vuid_spec_text[] = {
{"VUID-VkBufferViewCreateInfo-buffer-00933", "If buffer was created with usage containing VK_BUFFER_USAGE_UNIFORM_TEXEL_BUFFER_BIT, format must be supported for uniform texel buffers, as specified by the VK_FORMAT_FEATURE_UNIFORM_TEXEL_BUFFER_BIT flag in VkFormatProperties::bufferFeatures returned by vkGetPhysicalDeviceFormatProperties (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkBufferViewCreateInfo-buffer-00933)"},
{"VUID-VkBufferViewCreateInfo-buffer-00934", "If buffer was created with usage containing VK_BUFFER_USAGE_STORAGE_TEXEL_BUFFER_BIT, format must be supported for storage texel buffers, as specified by the VK_FORMAT_FEATURE_STORAGE_TEXEL_BUFFER_BIT flag in VkFormatProperties::bufferFeatures returned by vkGetPhysicalDeviceFormatProperties (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkBufferViewCreateInfo-buffer-00934)"},
{"VUID-VkBufferViewCreateInfo-buffer-00935", "If buffer is non-sparse then it must be bound completely and contiguously to a single VkDeviceMemory object (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkBufferViewCreateInfo-buffer-00935)"},
- {"VUID-VkBufferViewCreateInfo-buffer-02750", "If the texelBufferAlignment feature is enabled and if buffer was created with usage containing VK_BUFFER_USAGE_STORAGE_TEXEL_BUFFER_BIT, offset must be a multiple of the lesser of VkPhysicalDeviceTexelBufferAlignmentPropertiesEXT::storageTexelBufferOffsetAlignmentBytes or, if VkPhysicalDeviceTexelBufferAlignmentPropertiesEXT::storageTexelBufferOffsetSingleTexelAlignment is VK_TRUE, the size of a texel of the requested format. If the size of a texel is a multiple of three bytes, then the size of a single component of format is used instead (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkBufferViewCreateInfo-buffer-02750)"},
- {"VUID-VkBufferViewCreateInfo-buffer-02751", "If the texelBufferAlignment feature is enabled and if buffer was created with usage containing VK_BUFFER_USAGE_UNIFORM_TEXEL_BUFFER_BIT, offset must be a multiple of the lesser of VkPhysicalDeviceTexelBufferAlignmentPropertiesEXT::uniformTexelBufferOffsetAlignmentBytes or, if VkPhysicalDeviceTexelBufferAlignmentPropertiesEXT::uniformTexelBufferOffsetSingleTexelAlignment is VK_TRUE, the size of a texel of the requested format. If the size of a texel is a multiple of three bytes, then the size of a single component of format is used instead (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkBufferViewCreateInfo-buffer-02751)"},
{"VUID-VkBufferViewCreateInfo-buffer-parameter", "buffer must be a valid VkBuffer handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkBufferViewCreateInfo-buffer-parameter)"},
{"VUID-VkBufferViewCreateInfo-flags-zerobitmask", "flags must be 0 (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkBufferViewCreateInfo-flags-zerobitmask)"},
{"VUID-VkBufferViewCreateInfo-format-parameter", "format must be a valid VkFormat value (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkBufferViewCreateInfo-format-parameter)"},
{"VUID-VkBufferViewCreateInfo-offset-00925", "offset must be less than the size of buffer (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkBufferViewCreateInfo-offset-00925)"},
+ {"VUID-VkBufferViewCreateInfo-offset-00926", "offset must be a multiple of VkPhysicalDeviceLimits::minTexelBufferOffsetAlignment (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkBufferViewCreateInfo-offset-00926)"},
{"VUID-VkBufferViewCreateInfo-offset-00931", "If range is not equal to VK_WHOLE_SIZE, the sum of offset and range must be less than or equal to the size of buffer (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkBufferViewCreateInfo-offset-00931)"},
- {"VUID-VkBufferViewCreateInfo-offset-02749", "If the texelBufferAlignment feature is not enabled, offset must be a multiple of VkPhysicalDeviceLimits::minTexelBufferOffsetAlignment (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkBufferViewCreateInfo-offset-02749)"},
{"VUID-VkBufferViewCreateInfo-pNext-pNext", "pNext must be NULL (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkBufferViewCreateInfo-pNext-pNext)"},
{"VUID-VkBufferViewCreateInfo-range-00928", "If range is not equal to VK_WHOLE_SIZE, range must be greater than 0 (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkBufferViewCreateInfo-range-00928)"},
{"VUID-VkBufferViewCreateInfo-range-00929", "If range is not equal to VK_WHOLE_SIZE, range must be an integer multiple of the texel block size of format (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkBufferViewCreateInfo-range-00929)"},
@@ -374,9 +368,7 @@ static const vuid_spec_text_pair vuid_spec_text[] = {
{"VUID-VkCommandBufferInheritanceInfo-occlusionQueryEnable-00056", "If the inherited queries feature is not enabled, occlusionQueryEnable must be VK_FALSE (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkCommandBufferInheritanceInfo-occlusionQueryEnable-00056)"},
{"VUID-VkCommandBufferInheritanceInfo-pNext-pNext", "pNext must be NULL or a pointer to a valid instance of VkCommandBufferInheritanceConditionalRenderingInfoEXT (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkCommandBufferInheritanceInfo-pNext-pNext)"},
{"VUID-VkCommandBufferInheritanceInfo-pipelineStatistics-00058", "If the pipeline statistics queries feature is not enabled, pipelineStatistics must be 0 (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkCommandBufferInheritanceInfo-pipelineStatistics-00058)"},
- {"VUID-VkCommandBufferInheritanceInfo-pipelineStatistics-02789", "If the pipeline statistics queries feature is enabled, pipelineStatistics must be a valid combination of VkQueryPipelineStatisticFlagBits values (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkCommandBufferInheritanceInfo-pipelineStatistics-02789)"},
{"VUID-VkCommandBufferInheritanceInfo-queryFlags-00057", "If the inherited queries feature is enabled, queryFlags must be a valid combination of VkQueryControlFlagBits values (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkCommandBufferInheritanceInfo-queryFlags-00057)"},
- {"VUID-VkCommandBufferInheritanceInfo-queryFlags-02788", "If the inherited queries feature is not enabled, queryFlags must be 0 (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkCommandBufferInheritanceInfo-queryFlags-02788)"},
{"VUID-VkCommandBufferInheritanceInfo-sType-sType", "sType must be VK_STRUCTURE_TYPE_COMMAND_BUFFER_INHERITANCE_INFO (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkCommandBufferInheritanceInfo-sType-sType)"},
{"VUID-VkCommandPoolCreateInfo-flags-parameter", "flags must be a valid combination of VkCommandPoolCreateFlagBits values (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkCommandPoolCreateInfo-flags-parameter)"},
{"VUID-VkCommandPoolCreateInfo-pNext-pNext", "pNext must be NULL (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkCommandPoolCreateInfo-pNext-pNext)"},
@@ -394,9 +386,8 @@ static const vuid_spec_text_pair vuid_spec_text[] = {
{"VUID-VkComputePipelineCreateInfo-layout-00703", "layout must be consistent with the layout of the compute shader specified in stage (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkComputePipelineCreateInfo-layout-00703)"},
{"VUID-VkComputePipelineCreateInfo-layout-01687", "The number of resources in layout accessible to the compute shader stage must be less than or equal to VkPhysicalDeviceLimits::maxPerStageResources (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkComputePipelineCreateInfo-layout-01687)"},
{"VUID-VkComputePipelineCreateInfo-layout-parameter", "layout must be a valid VkPipelineLayout handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkComputePipelineCreateInfo-layout-parameter)"},
- {"VUID-VkComputePipelineCreateInfo-pNext-pNext", "Each pNext member of any structure (including this one) in the pNext chain must be either NULL or a pointer to a valid instance of VkPipelineCompilerControlCreateInfoAMD or VkPipelineCreationFeedbackCreateInfoEXT (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkComputePipelineCreateInfo-pNext-pNext)"},
+ {"VUID-VkComputePipelineCreateInfo-pNext-pNext", "pNext must be NULL (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkComputePipelineCreateInfo-pNext-pNext)"},
{"VUID-VkComputePipelineCreateInfo-sType-sType", "sType must be VK_STRUCTURE_TYPE_COMPUTE_PIPELINE_CREATE_INFO (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkComputePipelineCreateInfo-sType-sType)"},
- {"VUID-VkComputePipelineCreateInfo-sType-unique", "Each sType member in the pNext chain must be unique (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkComputePipelineCreateInfo-sType-unique)"},
{"VUID-VkComputePipelineCreateInfo-stage-00701", "The stage member of stage must be VK_SHADER_STAGE_COMPUTE_BIT (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkComputePipelineCreateInfo-stage-00701)"},
{"VUID-VkComputePipelineCreateInfo-stage-00702", "The shader code for the entry point identified by stage and the rest of the state identified by this structure must adhere to the pipeline linking rules described in the Shader Interfaces chapter (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkComputePipelineCreateInfo-stage-00702)"},
{"VUID-VkComputePipelineCreateInfo-stage-parameter", "stage must be a valid VkPipelineShaderStageCreateInfo structure (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkComputePipelineCreateInfo-stage-parameter)"},
@@ -420,7 +411,6 @@ static const vuid_spec_text_pair vuid_spec_text[] = {
{"VUID-VkCopyDescriptorSet-dstBinding-00347", "dstBinding must be a valid binding within dstSet (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkCopyDescriptorSet-dstBinding-00347)"},
{"VUID-VkCopyDescriptorSet-dstBinding-02224", "If the descriptor type of the descriptor set binding specified by dstBinding is VK_DESCRIPTOR_TYPE_INLINE_UNIFORM_BLOCK_EXT, dstArrayElement must be an integer multiple of 4 (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkCopyDescriptorSet-dstBinding-02224)"},
{"VUID-VkCopyDescriptorSet-dstBinding-02632", "The type of dstBinding within dstSet must be equal to the type of srcBinding within srcSet (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkCopyDescriptorSet-dstBinding-02632)"},
- {"VUID-VkCopyDescriptorSet-dstBinding-02753", "If the descriptor type of the descriptor set binding specified by dstBinding is VK_DESCRIPTOR_TYPE_SAMPLER, then dstSet must not have been allocated with a layout that included immutable samplers for dstBinding (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkCopyDescriptorSet-dstBinding-02753)"},
{"VUID-VkCopyDescriptorSet-dstSet-parameter", "dstSet must be a valid VkDescriptorSet handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkCopyDescriptorSet-dstSet-parameter)"},
{"VUID-VkCopyDescriptorSet-pNext-pNext", "pNext must be NULL (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkCopyDescriptorSet-pNext-pNext)"},
{"VUID-VkCopyDescriptorSet-sType-sType", "sType must be VK_STRUCTURE_TYPE_COPY_DESCRIPTOR_SET (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkCopyDescriptorSet-sType-sType)"},
@@ -458,7 +448,7 @@ static const vuid_spec_text_pair vuid_spec_text[] = {
{"VUID-VkDebugMarkerObjectTagInfoEXT-sType-sType", "sType must be VK_STRUCTURE_TYPE_DEBUG_MARKER_OBJECT_TAG_INFO_EXT (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkDebugMarkerObjectTagInfoEXT-sType-sType)"},
{"VUID-VkDebugMarkerObjectTagInfoEXT-tagSize-arraylength", "tagSize must be greater than 0 (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkDebugMarkerObjectTagInfoEXT-tagSize-arraylength)"},
{"VUID-VkDebugReportCallbackCreateInfoEXT-flags-parameter", "flags must be a valid combination of VkDebugReportFlagBitsEXT values (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkDebugReportCallbackCreateInfoEXT-flags-parameter)"},
- {"VUID-VkDebugReportCallbackCreateInfoEXT-pfnCallback-parameter", "pfnCallback must be a valid PFN_vkDebugReportCallbackEXT value (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkDebugReportCallbackCreateInfoEXT-pfnCallback-parameter)"},
+ {"VUID-VkDebugReportCallbackCreateInfoEXT-pfnCallback-01385", "pfnCallback must be a valid PFN_vkDebugReportCallbackEXT (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkDebugReportCallbackCreateInfoEXT-pfnCallback-01385)"},
{"VUID-VkDebugReportCallbackCreateInfoEXT-sType-sType", "sType must be VK_STRUCTURE_TYPE_DEBUG_REPORT_CALLBACK_CREATE_INFO_EXT (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkDebugReportCallbackCreateInfoEXT-sType-sType)"},
{"VUID-VkDebugUtilsLabelEXT-pLabelName-parameter", "pLabelName must be a null-terminated UTF-8 string (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkDebugUtilsLabelEXT-pLabelName-parameter)"},
{"VUID-VkDebugUtilsLabelEXT-pNext-pNext", "pNext must be NULL (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkDebugUtilsLabelEXT-pNext-pNext)"},
@@ -477,7 +467,6 @@ static const vuid_spec_text_pair vuid_spec_text[] = {
{"VUID-VkDebugUtilsMessengerCreateInfoEXT-messageType-parameter", "messageType must be a valid combination of VkDebugUtilsMessageTypeFlagBitsEXT values (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkDebugUtilsMessengerCreateInfoEXT-messageType-parameter)"},
{"VUID-VkDebugUtilsMessengerCreateInfoEXT-messageType-requiredbitmask", "messageType must not be 0 (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkDebugUtilsMessengerCreateInfoEXT-messageType-requiredbitmask)"},
{"VUID-VkDebugUtilsMessengerCreateInfoEXT-pfnUserCallback-01914", "pfnUserCallback must be a valid PFN_vkDebugUtilsMessengerCallbackEXT (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkDebugUtilsMessengerCreateInfoEXT-pfnUserCallback-01914)"},
- {"VUID-VkDebugUtilsMessengerCreateInfoEXT-pfnUserCallback-parameter", "pfnUserCallback must be a valid PFN_vkDebugUtilsMessengerCallbackEXT value (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkDebugUtilsMessengerCreateInfoEXT-pfnUserCallback-parameter)"},
{"VUID-VkDebugUtilsMessengerCreateInfoEXT-sType-sType", "sType must be VK_STRUCTURE_TYPE_DEBUG_UTILS_MESSENGER_CREATE_INFO_EXT (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkDebugUtilsMessengerCreateInfoEXT-sType-sType)"},
{"VUID-VkDebugUtilsObjectNameInfoEXT-objectType-02589", "If objectType is VK_OBJECT_TYPE_UNKNOWN, objectHandle must not be VK_NULL_HANDLE (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkDebugUtilsObjectNameInfoEXT-objectType-02589)"},
{"VUID-VkDebugUtilsObjectNameInfoEXT-objectType-02590", "If objectType is not VK_OBJECT_TYPE_UNKNOWN, objectHandle must be VK_NULL_HANDLE or a valid Vulkan handle of the type associated with objectType as defined in the VkObjectType and Vulkan Handle Relationship table (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkDebugUtilsObjectNameInfoEXT-objectType-02590)"},
@@ -595,7 +584,7 @@ static const vuid_spec_text_pair vuid_spec_text[] = {
{"VUID-VkDeviceCreateInfo-flags-zerobitmask", "flags must be 0 (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkDeviceCreateInfo-flags-zerobitmask)"},
{"VUID-VkDeviceCreateInfo-pEnabledFeatures-parameter", "If pEnabledFeatures is not NULL, pEnabledFeatures must be a valid pointer to a valid VkPhysicalDeviceFeatures structure (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkDeviceCreateInfo-pEnabledFeatures-parameter)"},
{"VUID-VkDeviceCreateInfo-pNext-00373", "If the pNext chain includes a VkPhysicalDeviceFeatures2 structure, then pEnabledFeatures must be NULL (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkDeviceCreateInfo-pNext-00373)"},
- {"VUID-VkDeviceCreateInfo-pNext-pNext", "Each pNext member of any structure (including this one) in the pNext chain must be either NULL or a pointer to a valid instance of VkDeviceGroupDeviceCreateInfo, VkDeviceMemoryOverallocationCreateInfoAMD, VkPhysicalDevice16BitStorageFeatures, VkPhysicalDevice8BitStorageFeaturesKHR, VkPhysicalDeviceASTCDecodeFeaturesEXT, VkPhysicalDeviceBlendOperationAdvancedFeaturesEXT, VkPhysicalDeviceBufferDeviceAddressFeaturesEXT, VkPhysicalDeviceCoherentMemoryFeaturesAMD, VkPhysicalDeviceComputeShaderDerivativesFeaturesNV, VkPhysicalDeviceConditionalRenderingFeaturesEXT, VkPhysicalDeviceCooperativeMatrixFeaturesNV, VkPhysicalDeviceCornerSampledImageFeaturesNV, VkPhysicalDeviceCoverageReductionModeFeaturesNV, VkPhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV, VkPhysicalDeviceDepthClipEnableFeaturesEXT, VkPhysicalDeviceDescriptorIndexingFeaturesEXT, VkPhysicalDeviceExclusiveScissorFeaturesNV, VkPhysicalDeviceFeatures2, VkPhysicalDeviceFragmentDensityMapFeaturesEXT, VkPhysicalDeviceFragmentShaderBarycentricFeaturesNV, VkPhysicalDeviceFragmentShaderInterlockFeaturesEXT, VkPhysicalDeviceHostQueryResetFeaturesEXT, VkPhysicalDeviceImagelessFramebufferFeaturesKHR, VkPhysicalDeviceIndexTypeUint8FeaturesEXT, VkPhysicalDeviceInlineUniformBlockFeaturesEXT, VkPhysicalDeviceLineRasterizationFeaturesEXT, VkPhysicalDeviceMemoryPriorityFeaturesEXT, VkPhysicalDeviceMeshShaderFeaturesNV, VkPhysicalDeviceMultiviewFeatures, VkPhysicalDevicePipelineExecutablePropertiesFeaturesKHR, VkPhysicalDeviceProtectedMemoryFeatures, VkPhysicalDeviceRepresentativeFragmentTestFeaturesNV, VkPhysicalDeviceSamplerYcbcrConversionFeatures, VkPhysicalDeviceScalarBlockLayoutFeaturesEXT, VkPhysicalDeviceShaderAtomicInt64FeaturesKHR, VkPhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT, VkPhysicalDeviceShaderDrawParametersFeatures, VkPhysicalDeviceShaderFloat16Int8FeaturesKHR, VkPhysicalDeviceShaderImageFootprintFeaturesNV, VkPhysicalDeviceShaderIntegerFunctions2FeaturesINTEL, VkPhysicalDeviceShaderSMBuiltinsFeaturesNV, VkPhysicalDeviceShadingRateImageFeaturesNV, VkPhysicalDeviceSubgroupSizeControlFeaturesEXT, VkPhysicalDeviceTexelBufferAlignmentFeaturesEXT, VkPhysicalDeviceTextureCompressionASTCHDRFeaturesEXT, VkPhysicalDeviceTransformFeedbackFeaturesEXT, VkPhysicalDeviceUniformBufferStandardLayoutFeaturesKHR, VkPhysicalDeviceVariablePointersFeatures, VkPhysicalDeviceVertexAttributeDivisorFeaturesEXT, VkPhysicalDeviceVulkanMemoryModelFeaturesKHR, or VkPhysicalDeviceYcbcrImageArraysFeaturesEXT (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkDeviceCreateInfo-pNext-pNext)"},
+ {"VUID-VkDeviceCreateInfo-pNext-pNext", "Each pNext member of any structure (including this one) in the pNext chain must be either NULL or a pointer to a valid instance of VkDeviceGroupDeviceCreateInfo, VkDeviceMemoryOverallocationCreateInfoAMD, VkPhysicalDevice16BitStorageFeatures, VkPhysicalDevice8BitStorageFeaturesKHR, VkPhysicalDeviceASTCDecodeFeaturesEXT, VkPhysicalDeviceBlendOperationAdvancedFeaturesEXT, VkPhysicalDeviceBufferAddressFeaturesEXT, VkPhysicalDeviceComputeShaderDerivativesFeaturesNV, VkPhysicalDeviceConditionalRenderingFeaturesEXT, VkPhysicalDeviceCooperativeMatrixFeaturesNV, VkPhysicalDeviceCornerSampledImageFeaturesNV, VkPhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV, VkPhysicalDeviceDepthClipEnableFeaturesEXT, VkPhysicalDeviceDescriptorIndexingFeaturesEXT, VkPhysicalDeviceExclusiveScissorFeaturesNV, VkPhysicalDeviceFeatures2, VkPhysicalDeviceFloat16Int8FeaturesKHR, VkPhysicalDeviceFragmentDensityMapFeaturesEXT, VkPhysicalDeviceFragmentShaderBarycentricFeaturesNV, VkPhysicalDeviceInlineUniformBlockFeaturesEXT, VkPhysicalDeviceMemoryPriorityFeaturesEXT, VkPhysicalDeviceMeshShaderFeaturesNV, VkPhysicalDeviceMultiviewFeatures, VkPhysicalDeviceProtectedMemoryFeatures, VkPhysicalDeviceRepresentativeFragmentTestFeaturesNV, VkPhysicalDeviceSamplerYcbcrConversionFeatures, VkPhysicalDeviceScalarBlockLayoutFeaturesEXT, VkPhysicalDeviceShaderAtomicInt64FeaturesKHR, VkPhysicalDeviceShaderDrawParameterFeatures, VkPhysicalDeviceShaderImageFootprintFeaturesNV, VkPhysicalDeviceShadingRateImageFeaturesNV, VkPhysicalDeviceTransformFeedbackFeaturesEXT, VkPhysicalDeviceVariablePointerFeatures, VkPhysicalDeviceVertexAttributeDivisorFeaturesEXT, VkPhysicalDeviceVulkanMemoryModelFeaturesKHR, or VkPhysicalDeviceYcbcrImageArraysFeaturesEXT (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkDeviceCreateInfo-pNext-pNext)"},
{"VUID-VkDeviceCreateInfo-pQueueCreateInfos-parameter", "pQueueCreateInfos must be a valid pointer to an array of queueCreateInfoCount valid VkDeviceQueueCreateInfo structures (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkDeviceCreateInfo-pQueueCreateInfos-parameter)"},
{"VUID-VkDeviceCreateInfo-ppEnabledExtensionNames-00374", "ppEnabledExtensionNames must not contain both VK_KHR_maintenance1 and VK_AMD_negative_viewport_height (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkDeviceCreateInfo-ppEnabledExtensionNames-00374)"},
{"VUID-VkDeviceCreateInfo-ppEnabledExtensionNames-01840", "ppEnabledExtensionNames must not contain VK_AMD_negative_viewport_height (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkDeviceCreateInfo-ppEnabledExtensionNames-01840)"},
@@ -686,7 +675,6 @@ static const vuid_spec_text_pair vuid_spec_text[] = {
{"VUID-VkDisplayModeParametersKHR-width-01990", "The width member of visibleRegion must be greater than 0 (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkDisplayModeParametersKHR-width-01990)"},
{"VUID-VkDisplayModeProperties2KHR-pNext-pNext", "pNext must be NULL (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkDisplayModeProperties2KHR-pNext-pNext)"},
{"VUID-VkDisplayModeProperties2KHR-sType-sType", "sType must be VK_STRUCTURE_TYPE_DISPLAY_MODE_PROPERTIES_2_KHR (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkDisplayModeProperties2KHR-sType-sType)"},
- {"VUID-VkDisplayNativeHdrSurfaceCapabilitiesAMD-sType-sType", "sType must be VK_STRUCTURE_TYPE_DISPLAY_NATIVE_HDR_SURFACE_CAPABILITIES_AMD (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkDisplayNativeHdrSurfaceCapabilitiesAMD-sType-sType)"},
{"VUID-VkDisplayPlaneCapabilities2KHR-pNext-pNext", "pNext must be NULL (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkDisplayPlaneCapabilities2KHR-pNext-pNext)"},
{"VUID-VkDisplayPlaneCapabilities2KHR-sType-sType", "sType must be VK_STRUCTURE_TYPE_DISPLAY_PLANE_CAPABILITIES_2_KHR (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkDisplayPlaneCapabilities2KHR-sType-sType)"},
{"VUID-VkDisplayPlaneInfo2KHR-mode-parameter", "mode must be a valid VkDisplayModeKHR handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkDisplayPlaneInfo2KHR-mode-parameter)"},
@@ -787,68 +775,35 @@ static const vuid_spec_text_pair vuid_spec_text[] = {
{"VUID-VkFilterCubicImageViewImageFormatPropertiesEXT-sType-sType", "sType must be VK_STRUCTURE_TYPE_FILTER_CUBIC_IMAGE_VIEW_IMAGE_FORMAT_PROPERTIES_EXT (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkFilterCubicImageViewImageFormatPropertiesEXT-sType-sType)"},
{"VUID-VkFormatProperties2-pNext-pNext", "pNext must be NULL or a pointer to a valid instance of VkDrmFormatModifierPropertiesListEXT (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkFormatProperties2-pNext-pNext)"},
{"VUID-VkFormatProperties2-sType-sType", "sType must be VK_STRUCTURE_TYPE_FORMAT_PROPERTIES_2 (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkFormatProperties2-sType-sType)"},
- {"VUID-VkFramebufferAttachmentImageInfoKHR-flags-parameter", "flags must be a valid combination of VkImageCreateFlagBits values (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkFramebufferAttachmentImageInfoKHR-flags-parameter)"},
- {"VUID-VkFramebufferAttachmentImageInfoKHR-pNext-pNext", "pNext must be NULL (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkFramebufferAttachmentImageInfoKHR-pNext-pNext)"},
- {"VUID-VkFramebufferAttachmentImageInfoKHR-pViewFormats-parameter", "If viewFormatCount is not 0, pViewFormats must be a valid pointer to an array of viewFormatCount valid VkFormat values (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkFramebufferAttachmentImageInfoKHR-pViewFormats-parameter)"},
- {"VUID-VkFramebufferAttachmentImageInfoKHR-sType-sType", "sType must be VK_STRUCTURE_TYPE_FRAMEBUFFER_ATTACHMENT_IMAGE_INFO_KHR (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkFramebufferAttachmentImageInfoKHR-sType-sType)"},
- {"VUID-VkFramebufferAttachmentImageInfoKHR-usage-parameter", "usage must be a valid combination of VkImageUsageFlagBits values (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkFramebufferAttachmentImageInfoKHR-usage-parameter)"},
- {"VUID-VkFramebufferAttachmentImageInfoKHR-usage-requiredbitmask", "usage must not be 0 (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkFramebufferAttachmentImageInfoKHR-usage-requiredbitmask)"},
- {"VUID-VkFramebufferAttachmentsCreateInfoKHR-pAttachmentImageInfos-parameter", "If attachmentImageInfoCount is not 0, pAttachmentImageInfos must be a valid pointer to an array of attachmentImageInfoCount valid VkFramebufferAttachmentImageInfoKHR structures (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkFramebufferAttachmentsCreateInfoKHR-pAttachmentImageInfos-parameter)"},
- {"VUID-VkFramebufferAttachmentsCreateInfoKHR-sType-sType", "sType must be VK_STRUCTURE_TYPE_FRAMEBUFFER_ATTACHMENTS_CREATE_INFO_KHR (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkFramebufferAttachmentsCreateInfoKHR-sType-sType)"},
{"VUID-VkFramebufferCreateInfo-attachmentCount-00876", "attachmentCount must be equal to the attachment count specified in renderPass (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkFramebufferCreateInfo-attachmentCount-00876)"},
{"VUID-VkFramebufferCreateInfo-commonparent", "Both of renderPass, and the elements of pAttachments that are valid handles must have been created, allocated, or retrieved from the same VkDevice (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkFramebufferCreateInfo-commonparent)"},
- {"VUID-VkFramebufferCreateInfo-flags-02778", "If flags does not include VK_FRAMEBUFFER_CREATE_IMAGELESS_BIT_KHR, and attachmentCount is not 0, pAttachments must be a valid pointer to an array of attachmentCount valid VkImageView handles (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkFramebufferCreateInfo-flags-02778)"},
- {"VUID-VkFramebufferCreateInfo-flags-03188", "If flags does not include VK_FRAMEBUFFER_CREATE_IMAGELESS_BIT_KHR, and attachmentCount is not 0, pAttachments must be a valid pointer to an array of attachmentCount valid VkImageView handles (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkFramebufferCreateInfo-flags-03188)"},
- {"VUID-VkFramebufferCreateInfo-flags-03189", "If the imageless framebuffer feature is not enabled, flags must not include VK_FRAMEBUFFER_CREATE_IMAGELESS_BIT_KHR (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkFramebufferCreateInfo-flags-03189)"},
- {"VUID-VkFramebufferCreateInfo-flags-03190", "If flags includes VK_FRAMEBUFFER_CREATE_IMAGELESS_BIT_KHR, the pNext chain must include an instance of VkFramebufferAttachmentsCreateInfoKHR (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkFramebufferCreateInfo-flags-03190)"},
- {"VUID-VkFramebufferCreateInfo-flags-03191", "If flags includes VK_FRAMEBUFFER_CREATE_IMAGELESS_BIT_KHR, the attachmentImageInfoCount member of an instance of VkFramebufferAttachmentsCreateInfoKHR in the pNext chain must be equal to either zero or attachmentCount (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkFramebufferCreateInfo-flags-03191)"},
- {"VUID-VkFramebufferCreateInfo-flags-03192", "If flags includes VK_FRAMEBUFFER_CREATE_IMAGELESS_BIT_KHR, the width member of any element of the pAttachmentImageInfos member of an instance of VkFramebufferAttachmentsCreateInfoKHR in the pNext chain must be greater than or equal to width (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkFramebufferCreateInfo-flags-03192)"},
- {"VUID-VkFramebufferCreateInfo-flags-03193", "If flags includes VK_FRAMEBUFFER_CREATE_IMAGELESS_BIT_KHR, the height member of any element of the pAttachmentImageInfos member of an instance of VkFramebufferAttachmentsCreateInfoKHR in the pNext chain must be greater than or equal to height (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkFramebufferCreateInfo-flags-03193)"},
- {"VUID-VkFramebufferCreateInfo-flags-03194", "If flags includes VK_FRAMEBUFFER_CREATE_IMAGELESS_BIT_KHR, the width member of any element of the pAttachmentImageInfos member of an instance of VkFramebufferAttachmentsCreateInfoKHR in the pNext chain must be greater than or equal to width, except for any element that is referenced by VkRenderPassFragmentDensityMapCreateInfoEXT::fragmentDensityMapAttachment in renderPass (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkFramebufferCreateInfo-flags-03194)"},
- {"VUID-VkFramebufferCreateInfo-flags-03195", "If flags includes VK_FRAMEBUFFER_CREATE_IMAGELESS_BIT_KHR, the height member of any element of the pAttachmentImageInfos member of an instance of VkFramebufferAttachmentsCreateInfoKHR in the pNext chain must be greater than or equal to height, except for any element that is referenced by VkRenderPassFragmentDensityMapCreateInfoEXT::fragmentDensityMapAttachment in renderPass (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkFramebufferCreateInfo-flags-03195)"},
- {"VUID-VkFramebufferCreateInfo-flags-03196", "If flags includes VK_FRAMEBUFFER_CREATE_IMAGELESS_BIT_KHR, the width member of any element of the pAttachmentImageInfos member of an instance of VkFramebufferAttachmentsCreateInfoKHR in the pNext chain that is referenced by VkRenderPassFragmentDensityMapCreateInfoEXT::fragmentDensityMapAttachment in renderPass must be greater than or equal to the ceiling of width/maxFragmentDensityTexelSize.width (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkFramebufferCreateInfo-flags-03196)"},
- {"VUID-VkFramebufferCreateInfo-flags-03197", "If flags includes VK_FRAMEBUFFER_CREATE_IMAGELESS_BIT_KHR, the height member of any element of the pAttachmentImageInfos member of an instance of VkFramebufferAttachmentsCreateInfoKHR in the pNext chain that is referenced by VkRenderPassFragmentDensityMapCreateInfoEXT::fragmentDensityMapAttachment in renderPass must be greater than or equal to the ceiling of height/maxFragmentDensityTexelSize.height (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkFramebufferCreateInfo-flags-03197)"},
- {"VUID-VkFramebufferCreateInfo-flags-03200", "If flags includes VK_FRAMEBUFFER_CREATE_IMAGELESS_BIT_KHR, the layerCount member of any element of the pAttachmentImageInfos member of an instance of VkFramebufferAttachmentsCreateInfoKHR in the pNext chain must be greater than or equal to layers (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkFramebufferCreateInfo-flags-03200)"},
- {"VUID-VkFramebufferCreateInfo-flags-03201", "If flags includes VK_FRAMEBUFFER_CREATE_IMAGELESS_BIT_KHR, the usage member of any element of the pAttachmentImageInfos member of an instance of VkFramebufferAttachmentsCreateInfoKHR in the pNext chain that refers to an attachment used as a color attachment or resolve attachment by renderPass must include VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkFramebufferCreateInfo-flags-03201)"},
- {"VUID-VkFramebufferCreateInfo-flags-03202", "If flags includes VK_FRAMEBUFFER_CREATE_IMAGELESS_BIT_KHR, the usage member of any element of the pAttachmentImageInfos member of an instance of VkFramebufferAttachmentsCreateInfoKHR in the pNext chain that refers to an attachment used as a depth/stencil attachment by renderPass must include VK_IMAGE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkFramebufferCreateInfo-flags-03202)"},
- {"VUID-VkFramebufferCreateInfo-flags-03203", "If flags includes VK_FRAMEBUFFER_CREATE_IMAGELESS_BIT_KHR, the usage member of any element of the pAttachmentImageInfos member of an instance of VkFramebufferAttachmentsCreateInfoKHR in the pNext chain that refers to an attachment used as a depth/stencil resolve attachment by renderPass must include VK_IMAGE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkFramebufferCreateInfo-flags-03203)"},
- {"VUID-VkFramebufferCreateInfo-flags-03204", "If flags includes VK_FRAMEBUFFER_CREATE_IMAGELESS_BIT_KHR, the usage member of any element of the pAttachmentImageInfos member of an instance of VkFramebufferAttachmentsCreateInfoKHR in the pNext chain that refers to an attachment used as an input attachment by renderPass must include VK_IMAGE_USAGE_INPUT_ATTACHMENT_BIT (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkFramebufferCreateInfo-flags-03204)"},
- {"VUID-VkFramebufferCreateInfo-flags-03205", "If flags includes VK_FRAMEBUFFER_CREATE_IMAGELESS_BIT_KHR, at least one element of the pViewFormats member of any element of the pAttachmentImageInfos member of an instance of VkFramebufferAttachmentsCreateInfoKHR in the pNext chain must be equal to the corresponding value of VkAttachmentDescription::format used to create renderPass (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkFramebufferCreateInfo-flags-03205)"},
- {"VUID-VkFramebufferCreateInfo-flags-parameter", "flags must be a valid combination of VkFramebufferCreateFlagBits values (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkFramebufferCreateInfo-flags-parameter)"},
+ {"VUID-VkFramebufferCreateInfo-flags-zerobitmask", "flags must be 0 (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkFramebufferCreateInfo-flags-zerobitmask)"},
{"VUID-VkFramebufferCreateInfo-height-00887", "height must be greater than 0. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkFramebufferCreateInfo-height-00887)"},
{"VUID-VkFramebufferCreateInfo-height-00888", "height must be less than or equal to VkPhysicalDeviceLimits::maxFramebufferHeight (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkFramebufferCreateInfo-height-00888)"},
{"VUID-VkFramebufferCreateInfo-layers-00889", "layers must be greater than 0. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkFramebufferCreateInfo-layers-00889)"},
{"VUID-VkFramebufferCreateInfo-layers-00890", "layers must be less than or equal to VkPhysicalDeviceLimits::maxFramebufferLayers (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkFramebufferCreateInfo-layers-00890)"},
- {"VUID-VkFramebufferCreateInfo-pAttachments-00877", "If flags does not include VK_FRAMEBUFFER_CREATE_IMAGELESS_BIT_KHR, each element of pAttachments that is used as a color attachment or resolve attachment by renderPass must have been created with a usage value including VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkFramebufferCreateInfo-pAttachments-00877)"},
- {"VUID-VkFramebufferCreateInfo-pAttachments-00879", "If flags does not include VK_FRAMEBUFFER_CREATE_IMAGELESS_BIT_KHR, each element of pAttachments that is used as an input attachment by renderPass must have been created with a usage value including VK_IMAGE_USAGE_INPUT_ATTACHMENT_BIT (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkFramebufferCreateInfo-pAttachments-00879)"},
- {"VUID-VkFramebufferCreateInfo-pAttachments-00880", "If flags does not include VK_FRAMEBUFFER_CREATE_IMAGELESS_BIT_KHR, each element of pAttachments must have been created with an VkFormat value that matches the VkFormat specified by the corresponding VkAttachmentDescription in renderPass (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkFramebufferCreateInfo-pAttachments-00880)"},
- {"VUID-VkFramebufferCreateInfo-pAttachments-00881", "If flags does not include VK_FRAMEBUFFER_CREATE_IMAGELESS_BIT_KHR, each element of pAttachments must have been created with a samples value that matches the samples value specified by the corresponding VkAttachmentDescription in renderPass (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkFramebufferCreateInfo-pAttachments-00881)"},
- {"VUID-VkFramebufferCreateInfo-pAttachments-00882", "If flags does not include VK_FRAMEBUFFER_CREATE_IMAGELESS_BIT_KHR, each element of pAttachments must have dimensions at least as large as the corresponding framebuffer dimension (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkFramebufferCreateInfo-pAttachments-00882)"},
- {"VUID-VkFramebufferCreateInfo-pAttachments-00883", "If flags does not include VK_FRAMEBUFFER_CREATE_IMAGELESS_BIT_KHR, each element of pAttachments must only specify a single mip level (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkFramebufferCreateInfo-pAttachments-00883)"},
- {"VUID-VkFramebufferCreateInfo-pAttachments-00884", "If flags does not include VK_FRAMEBUFFER_CREATE_IMAGELESS_BIT_KHR, each element of pAttachments must have been created with the identity swizzle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkFramebufferCreateInfo-pAttachments-00884)"},
- {"VUID-VkFramebufferCreateInfo-pAttachments-00891", "If flags does not include VK_FRAMEBUFFER_CREATE_IMAGELESS_BIT_KHR, each element of pAttachments that is a 2D or 2D array image view taken from a 3D image must not be a depth/stencil format (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkFramebufferCreateInfo-pAttachments-00891)"},
+ {"VUID-VkFramebufferCreateInfo-pAttachments-00877", "Each element of pAttachments that is used as a color attachment or resolve attachment by renderPass must have been created with a usage value including VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkFramebufferCreateInfo-pAttachments-00877)"},
+ {"VUID-VkFramebufferCreateInfo-pAttachments-00879", "Each element of pAttachments that is used as an input attachment by renderPass must have been created with a usage value including VK_IMAGE_USAGE_INPUT_ATTACHMENT_BIT (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkFramebufferCreateInfo-pAttachments-00879)"},
+ {"VUID-VkFramebufferCreateInfo-pAttachments-00880", "Each element of pAttachments must have been created with an VkFormat value that matches the VkFormat specified by the corresponding VkAttachmentDescription in renderPass (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkFramebufferCreateInfo-pAttachments-00880)"},
+ {"VUID-VkFramebufferCreateInfo-pAttachments-00881", "Each element of pAttachments must have been created with a samples value that matches the samples value specified by the corresponding VkAttachmentDescription in renderPass (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkFramebufferCreateInfo-pAttachments-00881)"},
+ {"VUID-VkFramebufferCreateInfo-pAttachments-00882", "Each element of pAttachments must have dimensions at least as large as the corresponding framebuffer dimension (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkFramebufferCreateInfo-pAttachments-00882)"},
+ {"VUID-VkFramebufferCreateInfo-pAttachments-00883", "Each element of pAttachments must only specify a single mip level (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkFramebufferCreateInfo-pAttachments-00883)"},
+ {"VUID-VkFramebufferCreateInfo-pAttachments-00884", "Each element of pAttachments must have been created with the identity swizzle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkFramebufferCreateInfo-pAttachments-00884)"},
+ {"VUID-VkFramebufferCreateInfo-pAttachments-00891", "Each element of pAttachments that is a 2D or 2D array image view taken from a 3D image must not be a depth/stencil format (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkFramebufferCreateInfo-pAttachments-00891)"},
{"VUID-VkFramebufferCreateInfo-pAttachments-02552", "Each element of pAttachments that is used as a fragment density map attachment by renderPass must not have been created with a flags value including VK_IMAGE_CREATE_SUBSAMPLED_BIT_EXT. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkFramebufferCreateInfo-pAttachments-02552)"},
- {"VUID-VkFramebufferCreateInfo-pAttachments-02554", "If flags does not include VK_FRAMEBUFFER_CREATE_IMAGELESS_BIT_KHR, each element of pAttachments must have dimensions at least as large as the corresponding framebuffer dimension except for any element that is referenced by fragmentDensityMapAttachment (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkFramebufferCreateInfo-pAttachments-02554)"},
- {"VUID-VkFramebufferCreateInfo-pAttachments-02555", "If flags does not include VK_FRAMEBUFFER_CREATE_IMAGELESS_BIT_KHR, an element of pAttachments that is referenced by fragmentDensityMapAttachment must have a width at least as large as the ceiling of width/maxFragmentDensityTexelSize.width (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkFramebufferCreateInfo-pAttachments-02555)"},
- {"VUID-VkFramebufferCreateInfo-pAttachments-02556", "If flags does not include VK_FRAMEBUFFER_CREATE_IMAGELESS_BIT_KHR, an element of pAttachments that is referenced by fragmentDensityMapAttachment must have a height at least as large as the ceiling of height/maxFragmentDensityTexelSize.height (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkFramebufferCreateInfo-pAttachments-02556)"},
- {"VUID-VkFramebufferCreateInfo-pAttachments-02633", "If flags does not include VK_FRAMEBUFFER_CREATE_IMAGELESS_BIT_KHR, each element of pAttachments that is used as a depth/stencil attachment by renderPass must have been created with a usage value including VK_IMAGE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkFramebufferCreateInfo-pAttachments-02633)"},
- {"VUID-VkFramebufferCreateInfo-pAttachments-02634", "If flags does not include VK_FRAMEBUFFER_CREATE_IMAGELESS_BIT_KHR, each element of pAttachments that is used as a depth/stencil resolve attachment by renderPass must have been created with a usage value including VK_IMAGE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkFramebufferCreateInfo-pAttachments-02634)"},
- {"VUID-VkFramebufferCreateInfo-pAttachments-02744", "An element of pAttachments that is referenced by fragmentDensityMapAttachment must have a layerCount equal to 1 (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkFramebufferCreateInfo-pAttachments-02744)"},
- {"VUID-VkFramebufferCreateInfo-pNext-pNext", "pNext must be NULL or a pointer to a valid instance of VkFramebufferAttachmentsCreateInfoKHR (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkFramebufferCreateInfo-pNext-pNext)"},
- {"VUID-VkFramebufferCreateInfo-renderPass-02531", "If renderPass was specified with non-zero view masks, layers must be 1 (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkFramebufferCreateInfo-renderPass-02531)"},
+ {"VUID-VkFramebufferCreateInfo-pAttachments-02554", "Each element of pAttachments must have dimensions at least as large as the corresponding framebuffer dimension except for any element that is referenced by fragmentDensityMapAttachment (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkFramebufferCreateInfo-pAttachments-02554)"},
+ {"VUID-VkFramebufferCreateInfo-pAttachments-02555", "An element of pAttachments that is referenced by fragmentDensityMapAttachment must have a width at least as large as the ceiling of width/maxFragmentDensityTexelSize.width (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkFramebufferCreateInfo-pAttachments-02555)"},
+ {"VUID-VkFramebufferCreateInfo-pAttachments-02556", "An element of pAttachments that is referenced by fragmentDensityMapAttachment must have a height at least as large as the ceiling of height/maxFragmentDensityTexelSize.height (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkFramebufferCreateInfo-pAttachments-02556)"},
+ {"VUID-VkFramebufferCreateInfo-pAttachments-02633", "Each element of pAttachments that is used as a depth/stencil attachment by renderPass must have been created with a usage value including VK_IMAGE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkFramebufferCreateInfo-pAttachments-02633)"},
+ {"VUID-VkFramebufferCreateInfo-pAttachments-02634", "Each element of pAttachments that is used as a depth/stencil resolve attachment by renderPass must have been created with a usage value including VK_IMAGE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkFramebufferCreateInfo-pAttachments-02634)"},
+ {"VUID-VkFramebufferCreateInfo-pAttachments-parameter", "If attachmentCount is not 0, pAttachments must be a valid pointer to an array of attachmentCount valid VkImageView handles (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkFramebufferCreateInfo-pAttachments-parameter)"},
+ {"VUID-VkFramebufferCreateInfo-pNext-pNext", "pNext must be NULL (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkFramebufferCreateInfo-pNext-pNext)"},
+ {"VUID-VkFramebufferCreateInfo-renderPass-02531", "If renderPass was specified with non-zero view masks, layers must be greater than or equal to the greatest position of any bit included in any of those view masks (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkFramebufferCreateInfo-renderPass-02531)"},
{"VUID-VkFramebufferCreateInfo-renderPass-02553", "If renderPass has a fragment density map attachment and non-subsample image feature is not enabled, each element of pAttachments must have been created with a flags value including VK_IMAGE_CREATE_SUBSAMPLED_BIT_EXT unless that element is the fragment density map attachment. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkFramebufferCreateInfo-renderPass-02553)"},
- {"VUID-VkFramebufferCreateInfo-renderPass-02743", "If renderPass was specified with non-zero view masks, each element of pAttachments must have a layerCount greater than the index of the most significant bit set in any of those view masks (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkFramebufferCreateInfo-renderPass-02743)"},
- {"VUID-VkFramebufferCreateInfo-renderPass-02745", "If renderPass was specified with non-zero view masks, each element of pAttachments that is not referenced by fragmentDensityMapAttachment must have a layerCount greater than the index of the most significant bit set in any of those view masks (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkFramebufferCreateInfo-renderPass-02745)"},
- {"VUID-VkFramebufferCreateInfo-renderPass-02746", "If renderPass was specified with non-zero view masks, each element of pAttachments that is referenced by fragmentDensityMapAttachment must have a layerCount equal to 1 or greater than the index of the most significant bit set in any of those view masks (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkFramebufferCreateInfo-renderPass-02746)"},
- {"VUID-VkFramebufferCreateInfo-renderPass-02747", "If renderPass was not specified with non-zero view masks, each element of pAttachments that is referenced by fragmentDensityMapAttachment must have a layerCount equal to 1 (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkFramebufferCreateInfo-renderPass-02747)"},
- {"VUID-VkFramebufferCreateInfo-renderPass-03198", "If multiview is enabled for renderPass, and flags includes VK_FRAMEBUFFER_CREATE_IMAGELESS_BIT_KHR, the layerCount member of any element of the pAttachmentImageInfos member of an instance of VkFramebufferAttachmentsCreateInfoKHR in the pNext chain must be greater than the maximum bit index set in the view mask in the subpasses in which it is used in renderPass (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkFramebufferCreateInfo-renderPass-03198)"},
- {"VUID-VkFramebufferCreateInfo-renderPass-03199", "If multiview is not enabled for renderPass, and flags includes VK_FRAMEBUFFER_CREATE_IMAGELESS_BIT_KHR, the layerCount member of any element of the pAttachmentImageInfos member of an instance of VkFramebufferAttachmentsCreateInfoKHR in the pNext chain must be greater than or equal to layers (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkFramebufferCreateInfo-renderPass-03199)"},
{"VUID-VkFramebufferCreateInfo-renderPass-parameter", "renderPass must be a valid VkRenderPass handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkFramebufferCreateInfo-renderPass-parameter)"},
{"VUID-VkFramebufferCreateInfo-sType-sType", "sType must be VK_STRUCTURE_TYPE_FRAMEBUFFER_CREATE_INFO (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkFramebufferCreateInfo-sType-sType)"},
{"VUID-VkFramebufferCreateInfo-width-00885", "width must be greater than 0. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkFramebufferCreateInfo-width-00885)"},
{"VUID-VkFramebufferCreateInfo-width-00886", "width must be less than or equal to VkPhysicalDeviceLimits::maxFramebufferWidth (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkFramebufferCreateInfo-width-00886)"},
- {"VUID-VkFramebufferMixedSamplesCombinationNV-pNext-pNext", "pNext must be NULL (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkFramebufferMixedSamplesCombinationNV-pNext-pNext)"},
- {"VUID-VkFramebufferMixedSamplesCombinationNV-sType-sType", "sType must be VK_STRUCTURE_TYPE_FRAMEBUFFER_MIXED_SAMPLES_COMBINATION_NV (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkFramebufferMixedSamplesCombinationNV-sType-sType)"},
{"VUID-VkGeometryAABBNV-aabbData-parameter", "If aabbData is not VK_NULL_HANDLE, aabbData must be a valid VkBuffer handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkGeometryAABBNV-aabbData-parameter)"},
{"VUID-VkGeometryAABBNV-offset-02439", "offset must be less than the size of aabbData (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkGeometryAABBNV-offset-02439)"},
{"VUID-VkGeometryAABBNV-offset-02440", "offset must be a multiple of 8 (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkGeometryAABBNV-offset-02440)"},
@@ -885,7 +840,6 @@ static const vuid_spec_text_pair vuid_spec_text[] = {
{"VUID-VkGraphicsPipelineCreateInfo-attachmentCount-00746", "If rasterization is not disabled and the subpass uses color attachments, the attachmentCount member of pColorBlendState must be equal to the colorAttachmentCount used to create subpass (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkGraphicsPipelineCreateInfo-attachmentCount-00746)"},
{"VUID-VkGraphicsPipelineCreateInfo-blendEnable-02023", "If rasterization is not disabled and the subpass uses color attachments, then for each color attachment in the subpass the blendEnable member of the corresponding element of the pAttachment member of pColorBlendState must be VK_FALSE if the attached image's format features does not contain VK_FORMAT_FEATURE_COLOR_ATTACHMENT_BLEND_BIT. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkGraphicsPipelineCreateInfo-blendEnable-02023)"},
{"VUID-VkGraphicsPipelineCreateInfo-commonparent", "Each of basePipelineHandle, layout, and renderPass that are valid handles must have been created, allocated, or retrieved from the same VkDevice (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkGraphicsPipelineCreateInfo-commonparent)"},
- {"VUID-VkGraphicsPipelineCreateInfo-coverageReductionMode-02722", "If the VK_NV_coverage_reduction_mode extension is enabled, the coverage reduction mode specified by VkPipelineCoverageReductionStateCreateInfoNV::coverageReductionMode, the rasterizationSamples member of pMultisampleState and the sample counts for the color and depth/stencil attachments (if the subpass has them) must be a valid combination returned by vkGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkGraphicsPipelineCreateInfo-coverageReductionMode-02722)"},
{"VUID-VkGraphicsPipelineCreateInfo-flags-00722", "If flags contains the VK_PIPELINE_CREATE_DERIVATIVE_BIT flag, and basePipelineIndex is -1, basePipelineHandle must be a valid handle to a graphics VkPipeline (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkGraphicsPipelineCreateInfo-flags-00722)"},
{"VUID-VkGraphicsPipelineCreateInfo-flags-00723", "If flags contains the VK_PIPELINE_CREATE_DERIVATIVE_BIT flag, and basePipelineHandle is VK_NULL_HANDLE, basePipelineIndex must be a valid index into the calling command's pCreateInfos parameter (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkGraphicsPipelineCreateInfo-flags-00723)"},
{"VUID-VkGraphicsPipelineCreateInfo-flags-00724", "If flags contains the VK_PIPELINE_CREATE_DERIVATIVE_BIT flag, and basePipelineIndex is not -1, basePipelineHandle must be VK_NULL_HANDLE (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkGraphicsPipelineCreateInfo-flags-00724)"},
@@ -896,7 +850,6 @@ static const vuid_spec_text_pair vuid_spec_text[] = {
{"VUID-VkGraphicsPipelineCreateInfo-layout-00756", "layout must be consistent with all shaders specified in pStages (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkGraphicsPipelineCreateInfo-layout-00756)"},
{"VUID-VkGraphicsPipelineCreateInfo-layout-01688", "The number of resources in layout accessible to each shader stage that is used by the pipeline must be less than or equal to VkPhysicalDeviceLimits::maxPerStageResources (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkGraphicsPipelineCreateInfo-layout-01688)"},
{"VUID-VkGraphicsPipelineCreateInfo-layout-parameter", "layout must be a valid VkPipelineLayout handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkGraphicsPipelineCreateInfo-layout-parameter)"},
- {"VUID-VkGraphicsPipelineCreateInfo-lineRasterizationMode-02766", "If the lineRasterizationMode member of a VkPipelineRasterizationLineStateCreateInfoEXT structure chained to the pNext chain of pRasterizationState is VK_LINE_RASTERIZATION_MODE_BRESENHAM_EXT or VK_LINE_RASTERIZATION_MODE_RECTANGULAR_SMOOTH_EXT and if rasterization is enabled, then the alphaToCoverageEnable, alphaToOneEnable, and sampleShadingEnable members of pMultisampleState must all be VK_FALSE (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkGraphicsPipelineCreateInfo-lineRasterizationMode-02766)"},
{"VUID-VkGraphicsPipelineCreateInfo-pDynamicState-parameter", "If pDynamicState is not NULL, pDynamicState must be a valid pointer to a valid VkPipelineDynamicStateCreateInfo structure (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkGraphicsPipelineCreateInfo-pDynamicState-parameter)"},
{"VUID-VkGraphicsPipelineCreateInfo-pDynamicStates-00747", "If no element of the pDynamicStates member of pDynamicState is VK_DYNAMIC_STATE_VIEWPORT, the pViewports member of pViewportState must be a valid pointer to an array of pViewportState::viewportCount valid VkViewport structures (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkGraphicsPipelineCreateInfo-pDynamicStates-00747)"},
{"VUID-VkGraphicsPipelineCreateInfo-pDynamicStates-00748", "If no element of the pDynamicStates member of pDynamicState is VK_DYNAMIC_STATE_SCISSOR, the pScissors member of pViewportState must be a valid pointer to an array of pViewportState::scissorCount VkRect2D structures (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkGraphicsPipelineCreateInfo-pDynamicStates-00748)"},
@@ -908,7 +861,7 @@ static const vuid_spec_text_pair vuid_spec_text[] = {
{"VUID-VkGraphicsPipelineCreateInfo-pDynamicStates-01523", "If no element of the pDynamicStates member of pDynamicState is VK_DYNAMIC_STATE_SAMPLE_LOCATIONS_EXT, and the sampleLocationsEnable member of a VkPipelineSampleLocationsStateCreateInfoEXT structure chained to the pNext chain of pMultisampleState is VK_TRUE, sampleLocationsInfo.sampleLocationsPerPixel must equal rasterizationSamples (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkGraphicsPipelineCreateInfo-pDynamicStates-01523)"},
{"VUID-VkGraphicsPipelineCreateInfo-pDynamicStates-01715", "If no element of the pDynamicStates member of pDynamicState is VK_DYNAMIC_STATE_VIEWPORT_W_SCALING_NV, and the viewportWScalingEnable member of a VkPipelineViewportWScalingStateCreateInfoNV structure, chained to the pNext chain of pViewportState, is VK_TRUE, the pViewportWScalings member of the VkPipelineViewportWScalingStateCreateInfoNV must be a pointer to an array of VkPipelineViewportWScalingStateCreateInfoNV::viewportCount valid VkViewportWScalingNV structures (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkGraphicsPipelineCreateInfo-pDynamicStates-01715)"},
{"VUID-VkGraphicsPipelineCreateInfo-pDynamicStates-02510", "If the VK_EXT_depth_range_unrestricted extension is not enabled and no element of the pDynamicStates member of pDynamicState is VK_DYNAMIC_STATE_DEPTH_BOUNDS, and the depthBoundsTestEnable member of pDepthStencilState is VK_TRUE, the minDepthBounds and maxDepthBounds members of pDepthStencilState must be between 0.0 and 1.0, inclusive (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkGraphicsPipelineCreateInfo-pDynamicStates-02510)"},
- {"VUID-VkGraphicsPipelineCreateInfo-pNext-pNext", "Each pNext member of any structure (including this one) in the pNext chain must be either NULL or a pointer to a valid instance of VkPipelineCompilerControlCreateInfoAMD, VkPipelineCreationFeedbackCreateInfoEXT, VkPipelineDiscardRectangleStateCreateInfoEXT, or VkPipelineRepresentativeFragmentTestStateCreateInfoNV (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkGraphicsPipelineCreateInfo-pNext-pNext)"},
+ {"VUID-VkGraphicsPipelineCreateInfo-pNext-pNext", "Each pNext member of any structure (including this one) in the pNext chain must be either NULL or a pointer to a valid instance of VkPipelineDiscardRectangleStateCreateInfoEXT or VkPipelineRepresentativeFragmentTestStateCreateInfoNV (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkGraphicsPipelineCreateInfo-pNext-pNext)"},
{"VUID-VkGraphicsPipelineCreateInfo-pRasterizationState-parameter", "pRasterizationState must be a valid pointer to a valid VkPipelineRasterizationStateCreateInfo structure (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkGraphicsPipelineCreateInfo-pRasterizationState-parameter)"},
{"VUID-VkGraphicsPipelineCreateInfo-pStages-00729", "If pStages includes a tessellation control shader stage, it must include a tessellation evaluation shader stage (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkGraphicsPipelineCreateInfo-pStages-00729)"},
{"VUID-VkGraphicsPipelineCreateInfo-pStages-00730", "If pStages includes a tessellation evaluation shader stage, it must include a tessellation control shader stage (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkGraphicsPipelineCreateInfo-pStages-00730)"},
@@ -949,7 +902,6 @@ static const vuid_spec_text_pair vuid_spec_text[] = {
{"VUID-VkGraphicsPipelineCreateInfo-stage-00728", "The stage member of each element of pStages must not be VK_SHADER_STAGE_COMPUTE_BIT (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkGraphicsPipelineCreateInfo-stage-00728)"},
{"VUID-VkGraphicsPipelineCreateInfo-stage-02096", "The stage member of one element of pStages must be either VK_SHADER_STAGE_VERTEX_BIT or VK_SHADER_STAGE_MESH_BIT_NV. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkGraphicsPipelineCreateInfo-stage-02096)"},
{"VUID-VkGraphicsPipelineCreateInfo-stageCount-arraylength", "stageCount must be greater than 0 (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkGraphicsPipelineCreateInfo-stageCount-arraylength)"},
- {"VUID-VkGraphicsPipelineCreateInfo-stippledLineEnable-02767", "If the stippledLineEnable member of VkPipelineRasterizationLineStateCreateInfoEXT is VK_TRUE and no element of the pDynamicStates member of pDynamicState is VK_DYNAMIC_STATE_LINE_STIPPLE_EXT, then the lineStippleFactor member of VkPipelineRasterizationLineStateCreateInfoEXT must be in the range [1,256] (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkGraphicsPipelineCreateInfo-stippledLineEnable-02767)"},
{"VUID-VkGraphicsPipelineCreateInfo-subpass-00743", "If rasterization is not disabled and subpass uses a depth/stencil attachment in renderPass that has a layout of VK_IMAGE_LAYOUT_DEPTH_STENCIL_READ_ONLY_OPTIMAL in the VkAttachmentReference defined by subpass, the depthWriteEnable member of pDepthStencilState must be VK_FALSE (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkGraphicsPipelineCreateInfo-subpass-00743)"},
{"VUID-VkGraphicsPipelineCreateInfo-subpass-00744", "If rasterization is not disabled and subpass uses a depth/stencil attachment in renderPass that has a layout of VK_IMAGE_LAYOUT_DEPTH_STENCIL_READ_ONLY_OPTIMAL in the VkAttachmentReference defined by subpass, the failOp, passOp and depthFailOp members of each of the front and back members of pDepthStencilState must be VK_STENCIL_OP_KEEP (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkGraphicsPipelineCreateInfo-subpass-00744)"},
{"VUID-VkGraphicsPipelineCreateInfo-subpass-00757", "If neither the VK_AMD_mixed_attachment_samples nor the VK_NV_framebuffer_mixed_samples extensions are enabled, and if subpass uses color and/or depth/stencil attachments, then the rasterizationSamples member of pMultisampleState must be the same as the sample count for those subpass attachments (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkGraphicsPipelineCreateInfo-subpass-00757)"},
@@ -963,9 +915,6 @@ static const vuid_spec_text_pair vuid_spec_text[] = {
{"VUID-VkGraphicsPipelineCreateInfo-topology-00737", "If the topology member of pInputAssembly is VK_PRIMITIVE_TOPOLOGY_PATCH_LIST, pStages must include tessellation shader stages (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkGraphicsPipelineCreateInfo-topology-00737)"},
{"VUID-VkHdrMetadataEXT-pNext-pNext", "pNext must be NULL (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkHdrMetadataEXT-pNext-pNext)"},
{"VUID-VkHdrMetadataEXT-sType-sType", "sType must be VK_STRUCTURE_TYPE_HDR_METADATA_EXT (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkHdrMetadataEXT-sType-sType)"},
- {"VUID-VkHeadlessSurfaceCreateInfoEXT-flags-zerobitmask", "flags must be 0 (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkHeadlessSurfaceCreateInfoEXT-flags-zerobitmask)"},
- {"VUID-VkHeadlessSurfaceCreateInfoEXT-pNext-pNext", "pNext must be NULL (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkHeadlessSurfaceCreateInfoEXT-pNext-pNext)"},
- {"VUID-VkHeadlessSurfaceCreateInfoEXT-sType-sType", "sType must be VK_STRUCTURE_TYPE_HEADLESS_SURFACE_CREATE_INFO_EXT (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkHeadlessSurfaceCreateInfoEXT-sType-sType)"},
{"VUID-VkIOSSurfaceCreateInfoMVK-flags-zerobitmask", "flags must be 0 (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkIOSSurfaceCreateInfoMVK-flags-zerobitmask)"},
{"VUID-VkIOSSurfaceCreateInfoMVK-pNext-pNext", "pNext must be NULL (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkIOSSurfaceCreateInfoMVK-pNext-pNext)"},
{"VUID-VkIOSSurfaceCreateInfoMVK-pView-01316", "pView must be a valid UIView and must be backed by a CALayer instance of type CAMetalLayer. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkIOSSurfaceCreateInfoMVK-pView-01316)"},
@@ -1139,7 +1088,7 @@ static const vuid_spec_text_pair vuid_spec_text[] = {
{"VUID-VkImageCreateInfo-usage-requiredbitmask", "usage must not be 0 (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkImageCreateInfo-usage-requiredbitmask)"},
{"VUID-VkImageDrmFormatModifierExplicitCreateInfoEXT-arrayPitch-02268", "For each element of pPlaneLayouts, arrayPitch must be 0 if VkImageCreateInfo::arrayLayers is 1. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkImageDrmFormatModifierExplicitCreateInfoEXT-arrayPitch-02268)"},
{"VUID-VkImageDrmFormatModifierExplicitCreateInfoEXT-depthPitch-02269", "For each element of pPlaneLayouts, depthPitch must be 0 if VkImageCreateInfo::extent::depth is 1. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkImageDrmFormatModifierExplicitCreateInfoEXT-depthPitch-02269)"},
- {"VUID-VkImageDrmFormatModifierExplicitCreateInfoEXT-drmFormatModifier-02264", "drmFormatModifier must be compatible with the parameters in VkImageCreateInfo and its pNext chain, as determined by querying VkPhysicalDeviceImageFormatInfo2 extended with VkPhysicalDeviceImageDrmFormatModifierInfoEXT. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkImageDrmFormatModifierExplicitCreateInfoEXT-drmFormatModifier-02264)"},
+ {"VUID-VkImageDrmFormatModifierExplicitCreateInfoEXT-drmFormatModifier-02264", "drmFormatModifier must be compatible with the parameters in VkImageCreateInfo and its pNext chain, as determined by querying VkPhysicalDeviceImageFormatInfo2KHR extended with VkPhysicalDeviceImageDrmFormatModifierInfoEXT. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkImageDrmFormatModifierExplicitCreateInfoEXT-drmFormatModifier-02264)"},
{"VUID-VkImageDrmFormatModifierExplicitCreateInfoEXT-drmFormatModifierPlaneCount-02265", "drmFormatModifierPlaneCount must be equal to the VkDrmFormatModifierPropertiesEXT::drmFormatModifierPlaneCount associated with VkImageCreateInfo::format and drmFormatModifier, as found by querying VkDrmFormatModifierPropertiesListEXT. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkImageDrmFormatModifierExplicitCreateInfoEXT-drmFormatModifierPlaneCount-02265)"},
{"VUID-VkImageDrmFormatModifierExplicitCreateInfoEXT-pPlaneLayouts-parameter", "If drmFormatModifierPlaneCount is not 0, pPlaneLayouts must be a valid pointer to an array of drmFormatModifierPlaneCount VkSubresourceLayout structures (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkImageDrmFormatModifierExplicitCreateInfoEXT-pPlaneLayouts-parameter)"},
{"VUID-VkImageDrmFormatModifierExplicitCreateInfoEXT-sType-sType", "sType must be VK_STRUCTURE_TYPE_IMAGE_DRM_FORMAT_MODIFIER_EXPLICIT_CREATE_INFO_EXT (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkImageDrmFormatModifierExplicitCreateInfoEXT-sType-sType)"},
@@ -1268,6 +1217,7 @@ static const vuid_spec_text_pair vuid_spec_text[] = {
{"VUID-VkImageViewCreateInfo-image-01019", "If image was not created with the VK_IMAGE_CREATE_MUTABLE_FORMAT_BIT flag, format must be identical to the format used to create image (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkImageViewCreateInfo-image-01019)"},
{"VUID-VkImageViewCreateInfo-image-01020", "If image is non-sparse then it must be bound completely and contiguously to a single VkDeviceMemory object (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkImageViewCreateInfo-image-01020)"},
{"VUID-VkImageViewCreateInfo-image-01482", "If image is not a 3D image created with VK_IMAGE_CREATE_2D_ARRAY_COMPATIBLE_BIT set, or viewType is not VK_IMAGE_VIEW_TYPE_2D or VK_IMAGE_VIEW_TYPE_2D_ARRAY, subresourceRange::baseArrayLayer must be less than the arrayLayers specified in VkImageCreateInfo when image was created (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkImageViewCreateInfo-image-01482)"},
+ {"VUID-VkImageViewCreateInfo-image-01484", "If image is a 3D image created with VK_IMAGE_CREATE_2D_ARRAY_COMPATIBLE_BIT set, and viewType is VK_IMAGE_VIEW_TYPE_2D or VK_IMAGE_VIEW_TYPE_2D_ARRAY, subresourceRange::baseArrayLayer must be less than the extent.depth specified in VkImageCreateInfo when image was created (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkImageViewCreateInfo-image-01484)"},
{"VUID-VkImageViewCreateInfo-image-01583", "If image was created with the VK_IMAGE_CREATE_BLOCK_TEXEL_VIEW_COMPATIBLE_BIT flag, format must be compatible with, or must be an uncompressed format that is size-compatible with, the format used to create image. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkImageViewCreateInfo-image-01583)"},
{"VUID-VkImageViewCreateInfo-image-01584", "If image was created with the VK_IMAGE_CREATE_BLOCK_TEXEL_VIEW_COMPATIBLE_BIT flag, the levelCount and layerCount members of subresourceRange must both be 1. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkImageViewCreateInfo-image-01584)"},
{"VUID-VkImageViewCreateInfo-image-01586", "If image was created with the VK_IMAGE_CREATE_MUTABLE_FORMAT_BIT flag, if the format of the image is a multi-planar format, and if subresourceRange.aspectMask is one of VK_IMAGE_ASPECT_PLANE_0_BIT, VK_IMAGE_ASPECT_PLANE_1_BIT, or VK_IMAGE_ASPECT_PLANE_2_BIT, then format must be compatible with the VkFormat for the plane of the image format indicated by subresourceRange.aspectMask, as defined in Compatible formats of planes of multi-planar formats (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkImageViewCreateInfo-image-01586)"},
@@ -1285,14 +1235,9 @@ static const vuid_spec_text_pair vuid_spec_text[] = {
{"VUID-VkImageViewCreateInfo-image-02570", "image must have been created with a usage value containing at least one of VK_IMAGE_USAGE_SAMPLED_BIT, VK_IMAGE_USAGE_STORAGE_BIT, VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT, VK_IMAGE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT, VK_IMAGE_USAGE_INPUT_ATTACHMENT_BIT, VK_IMAGE_USAGE_SHADING_RATE_IMAGE_BIT_NV, or VK_IMAGE_USAGE_FRAGMENT_DENSITY_MAP_BIT_EXT (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkImageViewCreateInfo-image-02570)"},
{"VUID-VkImageViewCreateInfo-image-02571", "If image was created with usage containing VK_IMAGE_USAGE_FRAGMENT_DENSITY_MAP_BIT_EXT, subresourceRange.levelCount must be 1 (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkImageViewCreateInfo-image-02571)"},
{"VUID-VkImageViewCreateInfo-image-02573", "If dynamic fragment density map feature is not enabled and image was created with usage containing VK_IMAGE_USAGE_FRAGMENT_DENSITY_MAP_BIT_EXT, flags must not contain any of VK_IMAGE_CREATE_PROTECTED_BIT, VK_IMAGE_CREATE_SPARSE_BINDING_BIT, VK_IMAGE_CREATE_SPARSE_RESIDENCY_BIT, or VK_IMAGE_CREATE_SPARSE_ALIASED_BIT (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkImageViewCreateInfo-image-02573)"},
- {"VUID-VkImageViewCreateInfo-image-02724", "If image is a 3D image created with VK_IMAGE_CREATE_2D_ARRAY_COMPATIBLE_BIT set, and viewType is VK_IMAGE_VIEW_TYPE_2D or VK_IMAGE_VIEW_TYPE_2D_ARRAY, subresourceRange::baseArrayLayer must be less than the depth computed from baseMipLevel and extent.depth specified in VkImageCreateInfo when image was created, according to the formula defined in Image Miplevel Sizing. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkImageViewCreateInfo-image-02724)"},
{"VUID-VkImageViewCreateInfo-image-parameter", "image must be a valid VkImage handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkImageViewCreateInfo-image-parameter)"},
{"VUID-VkImageViewCreateInfo-pNext-01585", "If a VkImageFormatListCreateInfoKHR structure was included in the pNext chain of the VkImageCreateInfo struct used when creating image and the viewFormatCount field of VkImageFormatListCreateInfoKHR is not zero then format must be one of the formats in VkImageFormatListCreateInfoKHR::pViewFormats. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkImageViewCreateInfo-pNext-01585)"},
{"VUID-VkImageViewCreateInfo-pNext-01970", "If the pNext chain contains an instance of VkSamplerYcbcrConversionInfo with a conversion value other than VK_NULL_HANDLE, all members of components must have the value VK_COMPONENT_SWIZZLE_IDENTITY. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkImageViewCreateInfo-pNext-01970)"},
- {"VUID-VkImageViewCreateInfo-pNext-02661", "If the pNext chain includes an instance of VkImageViewUsageCreateInfo, its usage member must not include any bits that were not set in the usage member of the VkImageCreateInfo structure used to create image. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkImageViewCreateInfo-pNext-02661)"},
- {"VUID-VkImageViewCreateInfo-pNext-02662", "If the pNext chain includes an instance of VkImageViewUsageCreateInfo, and image was not created with an instance of VkImageStencilUsageCreateInfoEXT in the pNext chain of VkImageCreateInfo, its usage member must not include any bits that were not set in the usage member of the VkImageCreateInfo structure used to create image (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkImageViewCreateInfo-pNext-02662)"},
- {"VUID-VkImageViewCreateInfo-pNext-02663", "If the pNext chain includes an instance of VkImageViewUsageCreateInfo, image was created with an instance of VkImageStencilUsageCreateInfoEXT in the pNext chain of VkImageCreateInfo, and subResourceRange.aspectMask includes VK_IMAGE_ASPECT_STENCIL_BIT, the usage member of the VkImageViewUsageCreateInfo instance must not include any bits that were not set in the usage member of the VkImageStencilUsageCreateInfoEXT structure used to create image (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkImageViewCreateInfo-pNext-02663)"},
- {"VUID-VkImageViewCreateInfo-pNext-02664", "If the pNext chain includes an instance of VkImageViewUsageCreateInfo, image was created with an instance of VkImageStencilUsageCreateInfoEXT in the pNext chain of VkImageCreateInfo, and subResourceRange.aspectMask includes bits other than VK_IMAGE_ASPECT_STENCIL_BIT, the usage member of the VkImageViewUsageCreateInfo instance must not include any bits that were not set in the usage member of the VkImageCreateInfo structure used to create image (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkImageViewCreateInfo-pNext-02664)"},
{"VUID-VkImageViewCreateInfo-pNext-pNext", "Each pNext member of any structure (including this one) in the pNext chain must be either NULL or a pointer to a valid instance of VkImageViewASTCDecodeModeEXT, VkImageViewUsageCreateInfo, or VkSamplerYcbcrConversionInfo (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkImageViewCreateInfo-pNext-pNext)"},
{"VUID-VkImageViewCreateInfo-sType-sType", "sType must be VK_STRUCTURE_TYPE_IMAGE_VIEW_CREATE_INFO (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkImageViewCreateInfo-sType-sType)"},
{"VUID-VkImageViewCreateInfo-sType-unique", "Each sType member in the pNext chain must be unique (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkImageViewCreateInfo-sType-unique)"},
@@ -1300,9 +1245,9 @@ static const vuid_spec_text_pair vuid_spec_text[] = {
{"VUID-VkImageViewCreateInfo-subresourceRange-01478", "subresourceRange.baseMipLevel must be less than the mipLevels specified in VkImageCreateInfo when image was created (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkImageViewCreateInfo-subresourceRange-01478)"},
{"VUID-VkImageViewCreateInfo-subresourceRange-01480", "subresourceRange.baseArrayLayer must be less than the arrayLayers specified in VkImageCreateInfo when image was created (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkImageViewCreateInfo-subresourceRange-01480)"},
{"VUID-VkImageViewCreateInfo-subresourceRange-01483", "If subresourceRange::layerCount is not VK_REMAINING_ARRAY_LAYERS, image is not a 3D image created with VK_IMAGE_CREATE_2D_ARRAY_COMPATIBLE_BIT set, or viewType is not VK_IMAGE_VIEW_TYPE_2D or VK_IMAGE_VIEW_TYPE_2D_ARRAY, subresourceRange::layerCount must be non-zero and subresourceRange::baseArrayLayer + subresourceRange::layerCount must be less than or equal to the arrayLayers specified in VkImageCreateInfo when image was created (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkImageViewCreateInfo-subresourceRange-01483)"},
+ {"VUID-VkImageViewCreateInfo-subresourceRange-01485", "If subresourceRange::layerCount is not VK_REMAINING_ARRAY_LAYERS, image is a 3D image created with VK_IMAGE_CREATE_2D_ARRAY_COMPATIBLE_BIT set, and viewType is VK_IMAGE_VIEW_TYPE_2D or VK_IMAGE_VIEW_TYPE_2D_ARRAY, subresourceRange::layerCount must be non-zero and subresourceRange::baseArrayLayer + subresourceRange::layerCount must be less than or equal to the extent.depth specified in VkImageCreateInfo when image was created (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkImageViewCreateInfo-subresourceRange-01485)"},
{"VUID-VkImageViewCreateInfo-subresourceRange-01718", "If subresourceRange.levelCount is not VK_REMAINING_MIP_LEVELS, subresourceRange.baseMipLevel + subresourceRange.levelCount must be less than or equal to the mipLevels specified in VkImageCreateInfo when image was created (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkImageViewCreateInfo-subresourceRange-01718)"},
{"VUID-VkImageViewCreateInfo-subresourceRange-01719", "If subresourceRange.layerCount is not VK_REMAINING_ARRAY_LAYERS, subresourceRange.baseArrayLayer + subresourceRange.layerCount must be less than or equal to the arrayLayers specified in VkImageCreateInfo when image was created (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkImageViewCreateInfo-subresourceRange-01719)"},
- {"VUID-VkImageViewCreateInfo-subresourceRange-02725", "If subresourceRange::layerCount is not VK_REMAINING_ARRAY_LAYERS, image is a 3D image created with VK_IMAGE_CREATE_2D_ARRAY_COMPATIBLE_BIT set, and viewType is VK_IMAGE_VIEW_TYPE_2D or VK_IMAGE_VIEW_TYPE_2D_ARRAY, subresourceRange::layerCount must be non-zero and subresourceRange::baseArrayLayer + subresourceRange::layerCount must be less than or equal to the depth computed from baseMipLevel and extent.depth specified in VkImageCreateInfo when image was created, according to the formula defined in Image Miplevel Sizing. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkImageViewCreateInfo-subresourceRange-02725)"},
{"VUID-VkImageViewCreateInfo-subresourceRange-parameter", "subresourceRange must be a valid VkImageSubresourceRange structure (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkImageViewCreateInfo-subresourceRange-parameter)"},
{"VUID-VkImageViewCreateInfo-usage-02274", "If usage contains VK_IMAGE_USAGE_SAMPLED_BIT, then the format features of the resultant image view must contain VK_FORMAT_FEATURE_SAMPLED_IMAGE_BIT. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkImageViewCreateInfo-usage-02274)"},
{"VUID-VkImageViewCreateInfo-usage-02275", "If usage contains VK_IMAGE_USAGE_STORAGE_BIT, then the image view's format features must contain VK_FORMAT_FEATURE_STORAGE_IMAGE_BIT. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkImageViewCreateInfo-usage-02275)"},
@@ -1314,7 +1259,7 @@ static const vuid_spec_text_pair vuid_spec_text[] = {
{"VUID-VkImageViewHandleInfoNVX-commonparent", "Both of imageView, and sampler that are valid handles must have been created, allocated, or retrieved from the same VkDevice (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkImageViewHandleInfoNVX-commonparent)"},
{"VUID-VkImageViewHandleInfoNVX-descriptorType-02654", "descriptorType must be VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE, VK_DESCRIPTOR_TYPE_STORAGE_IMAGE, or VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkImageViewHandleInfoNVX-descriptorType-02654)"},
{"VUID-VkImageViewHandleInfoNVX-descriptorType-parameter", "descriptorType must be a valid VkDescriptorType value (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkImageViewHandleInfoNVX-descriptorType-parameter)"},
- {"VUID-VkImageViewHandleInfoNVX-imageView-02656", "If descriptorType is VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE or VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER, the image that imageView was created from must have been created with the VK_IMAGE_USAGE_SAMPLED_BIT usage bit set (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkImageViewHandleInfoNVX-imageView-02656)"},
+ {"VUID-VkImageViewHandleInfoNVX-imageView-02656", "If descriptorType is VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE or VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER, the image that imageView was created from must have been created with the VK_IMAGE_USAGE_SAMPLED_BIT usage bit set (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkImageViewHandleInfoNVX-imageView-02656)"},
{"VUID-VkImageViewHandleInfoNVX-imageView-02657", "If descriptorType is VK_DESCRIPTOR_TYPE_STORAGE_IMAGE, the image that imageView was created from must have been created with the VK_IMAGE_USAGE_STORAGE_BIT usage bit set (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkImageViewHandleInfoNVX-imageView-02657)"},
{"VUID-VkImageViewHandleInfoNVX-imageView-parameter", "imageView must be a valid VkImageView handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkImageViewHandleInfoNVX-imageView-parameter)"},
{"VUID-VkImageViewHandleInfoNVX-pNext-pNext", "pNext must be NULL (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkImageViewHandleInfoNVX-pNext-pNext)"},
@@ -1322,11 +1267,12 @@ static const vuid_spec_text_pair vuid_spec_text[] = {
{"VUID-VkImageViewHandleInfoNVX-sampler-02655", "sampler must be a valid VkSampler if descriptorType is VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkImageViewHandleInfoNVX-sampler-02655)"},
{"VUID-VkImageViewHandleInfoNVX-sampler-parameter", "If sampler is not VK_NULL_HANDLE, sampler must be a valid VkSampler handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkImageViewHandleInfoNVX-sampler-parameter)"},
{"VUID-VkImageViewUsageCreateInfo-sType-sType", "sType must be VK_STRUCTURE_TYPE_IMAGE_VIEW_USAGE_CREATE_INFO (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkImageViewUsageCreateInfo-sType-sType)"},
+ {"VUID-VkImageViewUsageCreateInfo-usage-01587", "usage must not include any set bits that were not set in the usage member of the VkImageCreateInfo structure used to create the image this image view is created from. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkImageViewUsageCreateInfo-usage-01587)"},
{"VUID-VkImageViewUsageCreateInfo-usage-parameter", "usage must be a valid combination of VkImageUsageFlagBits values (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkImageViewUsageCreateInfo-usage-parameter)"},
{"VUID-VkImageViewUsageCreateInfo-usage-requiredbitmask", "usage must not be 0 (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkImageViewUsageCreateInfo-usage-requiredbitmask)"},
{"VUID-VkImportAndroidHardwareBufferInfoANDROID-buffer-01880", "If buffer is not NULL, Android hardware buffers must be supported for import, as reported by VkExternalImageFormatProperties or VkExternalBufferProperties. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkImportAndroidHardwareBufferInfoANDROID-buffer-01880)"},
{"VUID-VkImportAndroidHardwareBufferInfoANDROID-buffer-01881", "If buffer is not NULL, it must be a valid Android hardware buffer object with AHardwareBuffer_Desc::format and AHardwareBuffer_Desc::usage compatible with Vulkan as described in Android Hardware Buffers. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkImportAndroidHardwareBufferInfoANDROID-buffer-01881)"},
- {"VUID-VkImportAndroidHardwareBufferInfoANDROID-buffer-parameter", "buffer must be a valid pointer to an AHardwareBuffer value (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkImportAndroidHardwareBufferInfoANDROID-buffer-parameter)"},
+ {"VUID-VkImportAndroidHardwareBufferInfoANDROID-buffer-parameter", "buffer must be a valid pointer to a AHardwareBuffer value (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkImportAndroidHardwareBufferInfoANDROID-buffer-parameter)"},
{"VUID-VkImportAndroidHardwareBufferInfoANDROID-sType-sType", "sType must be VK_STRUCTURE_TYPE_IMPORT_ANDROID_HARDWARE_BUFFER_INFO_ANDROID (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkImportAndroidHardwareBufferInfoANDROID-sType-sType)"},
{"VUID-VkImportFenceFdInfoKHR-fd-01541", "fd must obey any requirements listed for handleType in external fence handle types compatibility. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkImportFenceFdInfoKHR-fd-01541)"},
{"VUID-VkImportFenceFdInfoKHR-fence-parameter", "fence must be a valid VkFence handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkImportFenceFdInfoKHR-fence-parameter)"},
@@ -1355,7 +1301,7 @@ static const vuid_spec_text_pair vuid_spec_text[] = {
{"VUID-VkImportMemoryFdInfoKHR-handleType-00670", "If handleType is not 0, fd must be a valid handle of the type specified by handleType. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkImportMemoryFdInfoKHR-handleType-00670)"},
{"VUID-VkImportMemoryFdInfoKHR-handleType-parameter", "If handleType is not 0, handleType must be a valid VkExternalMemoryHandleTypeFlagBits value (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkImportMemoryFdInfoKHR-handleType-parameter)"},
{"VUID-VkImportMemoryFdInfoKHR-sType-sType", "sType must be VK_STRUCTURE_TYPE_IMPORT_MEMORY_FD_INFO_KHR (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkImportMemoryFdInfoKHR-sType-sType)"},
- {"VUID-VkImportMemoryHostPointerInfoEXT-handleType-01747", "If handleType is not 0, it must be supported for import, as reported in VkExternalMemoryProperties (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkImportMemoryHostPointerInfoEXT-handleType-01747)"},
+ {"VUID-VkImportMemoryHostPointerInfoEXT-handleType-01747", "If handleType is not 0, it must be supported for import, as reported in VkExternalMemoryPropertiesKHR (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkImportMemoryHostPointerInfoEXT-handleType-01747)"},
{"VUID-VkImportMemoryHostPointerInfoEXT-handleType-01748", "If handleType is not 0, it must be VK_EXTERNAL_MEMORY_HANDLE_TYPE_HOST_ALLOCATION_BIT_EXT or VK_EXTERNAL_MEMORY_HANDLE_TYPE_HOST_MAPPED_FOREIGN_MEMORY_BIT_EXT (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkImportMemoryHostPointerInfoEXT-handleType-01748)"},
{"VUID-VkImportMemoryHostPointerInfoEXT-handleType-01750", "If handleType is VK_EXTERNAL_MEMORY_HANDLE_TYPE_HOST_ALLOCATION_BIT_EXT, pHostPointer must be a pointer to allocationSize number of bytes of host memory, where allocationSize is the member of the VkMemoryAllocateInfo structure this structure is chained to (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkImportMemoryHostPointerInfoEXT-handleType-01750)"},
{"VUID-VkImportMemoryHostPointerInfoEXT-handleType-01751", "If handleType is VK_EXTERNAL_MEMORY_HANDLE_TYPE_HOST_MAPPED_FOREIGN_MEMORY_BIT_EXT, pHostPointer must be a pointer to allocationSize number of bytes of host mapped foreign memory, where allocationSize is the member of the VkMemoryAllocateInfo structure this structure is chained to (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkImportMemoryHostPointerInfoEXT-handleType-01751)"},
@@ -1416,8 +1362,6 @@ static const vuid_spec_text_pair vuid_spec_text[] = {
{"VUID-VkIndirectCommandsTokenNVX-buffer-parameter", "buffer must be a valid VkBuffer handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkIndirectCommandsTokenNVX-buffer-parameter)"},
{"VUID-VkIndirectCommandsTokenNVX-offset-01346", "The offset must be aligned to VkDeviceGeneratedCommandsLimitsNVX::minCommandsTokenBufferOffsetAlignment. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkIndirectCommandsTokenNVX-offset-01346)"},
{"VUID-VkIndirectCommandsTokenNVX-tokenType-parameter", "tokenType must be a valid VkIndirectCommandsTokenTypeNVX value (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkIndirectCommandsTokenNVX-tokenType-parameter)"},
- {"VUID-VkInitializePerformanceApiInfoINTEL-pNext-pNext", "pNext must be NULL (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkInitializePerformanceApiInfoINTEL-pNext-pNext)"},
- {"VUID-VkInitializePerformanceApiInfoINTEL-sType-sType", "sType must be VK_STRUCTURE_TYPE_INITIALIZE_PERFORMANCE_API_INFO_INTEL (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkInitializePerformanceApiInfoINTEL-sType-sType)"},
{"VUID-VkInputAttachmentAspectReference-aspectMask-01964", "aspectMask must not include VK_IMAGE_ASPECT_METADATA_BIT (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkInputAttachmentAspectReference-aspectMask-01964)"},
{"VUID-VkInputAttachmentAspectReference-aspectMask-02250", "aspectMask must not include VK_IMAGE_ASPECT_MEMORY_PLANE_i_BIT_EXT for any index i. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkInputAttachmentAspectReference-aspectMask-02250)"},
{"VUID-VkInputAttachmentAspectReference-aspectMask-parameter", "aspectMask must be a valid combination of VkImageAspectFlagBits values (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkInputAttachmentAspectReference-aspectMask-parameter)"},
@@ -1495,7 +1439,7 @@ static const vuid_spec_text_pair vuid_spec_text[] = {
{"VUID-VkMemoryDedicatedRequirements-sType-sType", "sType must be VK_STRUCTURE_TYPE_MEMORY_DEDICATED_REQUIREMENTS (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkMemoryDedicatedRequirements-sType-sType)"},
{"VUID-VkMemoryFdPropertiesKHR-pNext-pNext", "pNext must be NULL (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkMemoryFdPropertiesKHR-pNext-pNext)"},
{"VUID-VkMemoryFdPropertiesKHR-sType-sType", "sType must be VK_STRUCTURE_TYPE_MEMORY_FD_PROPERTIES_KHR (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkMemoryFdPropertiesKHR-sType-sType)"},
- {"VUID-VkMemoryGetAndroidHardwareBufferInfoANDROID-handleTypes-01882", "VK_EXTERNAL_MEMORY_HANDLE_TYPE_ANDROID_HARDWARE_BUFFER_BIT_ANDROID must have been included in VkExportMemoryAllocateInfo::handleTypes when memory was created. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkMemoryGetAndroidHardwareBufferInfoANDROID-handleTypes-01882)"},
+ {"VUID-VkMemoryGetAndroidHardwareBufferInfoANDROID-handleTypes-01882", "VK_EXTERNAL_MEMORY_HANDLE_TYPE_ANDROID_HARDWARE_BUFFER_BIT_ANDROID must have been included in VkExportMemoryAllocateInfoKHR::handleTypes when memory was created. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkMemoryGetAndroidHardwareBufferInfoANDROID-handleTypes-01882)"},
{"VUID-VkMemoryGetAndroidHardwareBufferInfoANDROID-memory-parameter", "memory must be a valid VkDeviceMemory handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkMemoryGetAndroidHardwareBufferInfoANDROID-memory-parameter)"},
{"VUID-VkMemoryGetAndroidHardwareBufferInfoANDROID-pNext-01883", "If the pNext chain of the VkMemoryAllocateInfo used to allocate memory included a VkMemoryDedicatedAllocateInfo with non-NULL image member, then that image must already be bound to memory. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkMemoryGetAndroidHardwareBufferInfoANDROID-pNext-01883)"},
{"VUID-VkMemoryGetAndroidHardwareBufferInfoANDROID-pNext-pNext", "pNext must be NULL (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkMemoryGetAndroidHardwareBufferInfoANDROID-pNext-pNext)"},
@@ -1553,7 +1497,6 @@ static const vuid_spec_text_pair vuid_spec_text[] = {
{"VUID-VkObjectTableIndexBufferEntryNVX-buffer-parameter", "buffer must be a valid VkBuffer handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkObjectTableIndexBufferEntryNVX-buffer-parameter)"},
{"VUID-VkObjectTableIndexBufferEntryNVX-flags-parameter", "flags must be a valid combination of VkObjectEntryUsageFlagBitsNVX values (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkObjectTableIndexBufferEntryNVX-flags-parameter)"},
{"VUID-VkObjectTableIndexBufferEntryNVX-flags-requiredbitmask", "flags must not be 0 (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkObjectTableIndexBufferEntryNVX-flags-requiredbitmask)"},
- {"VUID-VkObjectTableIndexBufferEntryNVX-indexType-02783", "indexType must be VK_INDEX_TYPE_UINT16, or VK_INDEX_TYPE_UINT32 (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkObjectTableIndexBufferEntryNVX-indexType-02783)"},
{"VUID-VkObjectTableIndexBufferEntryNVX-indexType-parameter", "indexType must be a valid VkIndexType value (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkObjectTableIndexBufferEntryNVX-indexType-parameter)"},
{"VUID-VkObjectTableIndexBufferEntryNVX-type-01371", "type must be VK_OBJECT_ENTRY_TYPE_INDEX_BUFFER_NVX (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkObjectTableIndexBufferEntryNVX-type-01371)"},
{"VUID-VkObjectTableIndexBufferEntryNVX-type-parameter", "type must be a valid VkObjectEntryTypeNVX value (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkObjectTableIndexBufferEntryNVX-type-parameter)"},
@@ -1574,34 +1517,18 @@ static const vuid_spec_text_pair vuid_spec_text[] = {
{"VUID-VkObjectTableVertexBufferEntryNVX-flags-requiredbitmask", "flags must not be 0 (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkObjectTableVertexBufferEntryNVX-flags-requiredbitmask)"},
{"VUID-VkObjectTableVertexBufferEntryNVX-type-01370", "type must be VK_OBJECT_ENTRY_TYPE_VERTEX_BUFFER_NVX (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkObjectTableVertexBufferEntryNVX-type-01370)"},
{"VUID-VkObjectTableVertexBufferEntryNVX-type-parameter", "type must be a valid VkObjectEntryTypeNVX value (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkObjectTableVertexBufferEntryNVX-type-parameter)"},
- {"VUID-VkPerformanceConfigurationAcquireInfoINTEL-pNext-pNext", "pNext must be NULL (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPerformanceConfigurationAcquireInfoINTEL-pNext-pNext)"},
- {"VUID-VkPerformanceConfigurationAcquireInfoINTEL-sType-sType", "sType must be VK_STRUCTURE_TYPE_PERFORMANCE_CONFIGURATION_ACQUIRE_INFO_INTEL (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPerformanceConfigurationAcquireInfoINTEL-sType-sType)"},
- {"VUID-VkPerformanceConfigurationAcquireInfoINTEL-type-parameter", "type must be a valid VkPerformanceConfigurationTypeINTEL value (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPerformanceConfigurationAcquireInfoINTEL-type-parameter)"},
- {"VUID-VkPerformanceMarkerInfoINTEL-pNext-pNext", "pNext must be NULL (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPerformanceMarkerInfoINTEL-pNext-pNext)"},
- {"VUID-VkPerformanceMarkerInfoINTEL-sType-sType", "sType must be VK_STRUCTURE_TYPE_PERFORMANCE_MARKER_INFO_INTEL (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPerformanceMarkerInfoINTEL-sType-sType)"},
- {"VUID-VkPerformanceOverrideInfoINTEL-pNext-pNext", "pNext must be NULL (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPerformanceOverrideInfoINTEL-pNext-pNext)"},
- {"VUID-VkPerformanceOverrideInfoINTEL-sType-sType", "sType must be VK_STRUCTURE_TYPE_PERFORMANCE_OVERRIDE_INFO_INTEL (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPerformanceOverrideInfoINTEL-sType-sType)"},
- {"VUID-VkPerformanceOverrideInfoINTEL-type-parameter", "type must be a valid VkPerformanceOverrideTypeINTEL value (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPerformanceOverrideInfoINTEL-type-parameter)"},
- {"VUID-VkPerformanceStreamMarkerInfoINTEL-marker-02735", "The value written by the application into marker must only used the valid bits as reported by vkGetPerformanceParameterINTEL with the VK_PERFORMANCE_PARAMETER_TYPE_STREAM_MARKER_VALID_BITS_INTEL. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPerformanceStreamMarkerInfoINTEL-marker-02735)"},
- {"VUID-VkPerformanceStreamMarkerInfoINTEL-pNext-pNext", "pNext must be NULL (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPerformanceStreamMarkerInfoINTEL-pNext-pNext)"},
- {"VUID-VkPerformanceStreamMarkerInfoINTEL-sType-sType", "sType must be VK_STRUCTURE_TYPE_PERFORMANCE_STREAM_MARKER_INFO_INTEL (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPerformanceStreamMarkerInfoINTEL-sType-sType)"},
- {"VUID-VkPerformanceValueDataINTEL-valueString-parameter", "valueString must be a valid pointer to a valid (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPerformanceValueDataINTEL-valueString-parameter)"},
- {"VUID-VkPerformanceValueINTEL-data-parameter", "data must be a valid VkPerformanceValueDataINTEL union (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPerformanceValueINTEL-data-parameter)"},
- {"VUID-VkPerformanceValueINTEL-type-parameter", "type must be a valid VkPerformanceValueTypeINTEL value (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPerformanceValueINTEL-type-parameter)"},
{"VUID-VkPhysicalDevice16BitStorageFeatures-sType-sType", "sType must be VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_16BIT_STORAGE_FEATURES (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPhysicalDevice16BitStorageFeatures-sType-sType)"},
{"VUID-VkPhysicalDevice8BitStorageFeaturesKHR-sType-sType", "sType must be VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_8BIT_STORAGE_FEATURES_KHR (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPhysicalDevice8BitStorageFeaturesKHR-sType-sType)"},
{"VUID-VkPhysicalDeviceASTCDecodeFeaturesEXT-sType-sType", "sType must be VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_ASTC_DECODE_FEATURES_EXT (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPhysicalDeviceASTCDecodeFeaturesEXT-sType-sType)"},
{"VUID-VkPhysicalDeviceBlendOperationAdvancedFeaturesEXT-sType-sType", "sType must be VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_BLEND_OPERATION_ADVANCED_FEATURES_EXT (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPhysicalDeviceBlendOperationAdvancedFeaturesEXT-sType-sType)"},
{"VUID-VkPhysicalDeviceBlendOperationAdvancedPropertiesEXT-sType-sType", "sType must be VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_BLEND_OPERATION_ADVANCED_PROPERTIES_EXT (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPhysicalDeviceBlendOperationAdvancedPropertiesEXT-sType-sType)"},
- {"VUID-VkPhysicalDeviceBufferDeviceAddressFeaturesEXT-sType-sType", "sType must be VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_BUFFER_DEVICE_ADDRESS_FEATURES_EXT (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPhysicalDeviceBufferDeviceAddressFeaturesEXT-sType-sType)"},
- {"VUID-VkPhysicalDeviceCoherentMemoryFeaturesAMD-sType-sType", "sType must be VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_COHERENT_MEMORY_FEATURES_AMD (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPhysicalDeviceCoherentMemoryFeaturesAMD-sType-sType)"},
+ {"VUID-VkPhysicalDeviceBufferAddressFeaturesEXT-sType-sType", "sType must be VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_BUFFER_ADDRESS_FEATURES_EXT (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPhysicalDeviceBufferAddressFeaturesEXT-sType-sType)"},
{"VUID-VkPhysicalDeviceComputeShaderDerivativesFeaturesNV-sType-sType", "sType must be VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_COMPUTE_SHADER_DERIVATIVES_FEATURES_NV (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPhysicalDeviceComputeShaderDerivativesFeaturesNV-sType-sType)"},
{"VUID-VkPhysicalDeviceConditionalRenderingFeaturesEXT-sType-sType", "sType must be VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_CONDITIONAL_RENDERING_FEATURES_EXT (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPhysicalDeviceConditionalRenderingFeaturesEXT-sType-sType)"},
{"VUID-VkPhysicalDeviceConservativeRasterizationPropertiesEXT-sType-sType", "sType must be VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_CONSERVATIVE_RASTERIZATION_PROPERTIES_EXT (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPhysicalDeviceConservativeRasterizationPropertiesEXT-sType-sType)"},
{"VUID-VkPhysicalDeviceCooperativeMatrixFeaturesNV-sType-sType", "sType must be VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_COOPERATIVE_MATRIX_FEATURES_NV (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPhysicalDeviceCooperativeMatrixFeaturesNV-sType-sType)"},
{"VUID-VkPhysicalDeviceCooperativeMatrixPropertiesNV-sType-sType", "sType must be VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_COOPERATIVE_MATRIX_PROPERTIES_NV (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPhysicalDeviceCooperativeMatrixPropertiesNV-sType-sType)"},
{"VUID-VkPhysicalDeviceCornerSampledImageFeaturesNV-sType-sType", "sType must be VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_CORNER_SAMPLED_IMAGE_FEATURES_NV (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPhysicalDeviceCornerSampledImageFeaturesNV-sType-sType)"},
- {"VUID-VkPhysicalDeviceCoverageReductionModeFeaturesNV-sType-sType", "sType must be VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_COVERAGE_REDUCTION_MODE_FEATURES_NV (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPhysicalDeviceCoverageReductionModeFeaturesNV-sType-sType)"},
{"VUID-VkPhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV-sType-sType", "sType must be VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DEDICATED_ALLOCATION_IMAGE_ALIASING_FEATURES_NV (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV-sType-sType)"},
{"VUID-VkPhysicalDeviceDepthClipEnableFeaturesEXT-sType-sType", "sType must be VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DEPTH_CLIP_ENABLE_FEATURES_EXT (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPhysicalDeviceDepthClipEnableFeaturesEXT-sType-sType)"},
{"VUID-VkPhysicalDeviceDepthStencilResolvePropertiesKHR-sType-sType", "sType must be VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DEPTH_STENCIL_RESOLVE_PROPERTIES_KHR (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPhysicalDeviceDepthStencilResolvePropertiesKHR-sType-sType)"},
@@ -1626,14 +1553,13 @@ static const vuid_spec_text_pair vuid_spec_text[] = {
{"VUID-VkPhysicalDeviceExternalSemaphoreInfo-pNext-pNext", "pNext must be NULL (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPhysicalDeviceExternalSemaphoreInfo-pNext-pNext)"},
{"VUID-VkPhysicalDeviceExternalSemaphoreInfo-sType-sType", "sType must be VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_SEMAPHORE_INFO (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPhysicalDeviceExternalSemaphoreInfo-sType-sType)"},
{"VUID-VkPhysicalDeviceFeatures2-sType-sType", "sType must be VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FEATURES_2 (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPhysicalDeviceFeatures2-sType-sType)"},
+ {"VUID-VkPhysicalDeviceFloat16Int8FeaturesKHR-sType-sType", "sType must be VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FLOAT16_INT8_FEATURES_KHR (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPhysicalDeviceFloat16Int8FeaturesKHR-sType-sType)"},
{"VUID-VkPhysicalDeviceFloatControlsPropertiesKHR-sType-sType", "sType must be VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FLOAT_CONTROLS_PROPERTIES_KHR (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPhysicalDeviceFloatControlsPropertiesKHR-sType-sType)"},
{"VUID-VkPhysicalDeviceFragmentDensityMapFeaturesEXT-sType-sType", "sType must be VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_DENSITY_MAP_FEATURES_EXT (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPhysicalDeviceFragmentDensityMapFeaturesEXT-sType-sType)"},
{"VUID-VkPhysicalDeviceFragmentDensityMapPropertiesEXT-sType-sType", "sType must be VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_DENSITY_MAP_PROPERTIES_EXT (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPhysicalDeviceFragmentDensityMapPropertiesEXT-sType-sType)"},
{"VUID-VkPhysicalDeviceFragmentShaderBarycentricFeaturesNV-sType-sType", "sType must be VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_SHADER_BARYCENTRIC_FEATURES_NV (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPhysicalDeviceFragmentShaderBarycentricFeaturesNV-sType-sType)"},
- {"VUID-VkPhysicalDeviceFragmentShaderInterlockFeaturesEXT-sType-sType", "sType must be VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_SHADER_INTERLOCK_FEATURES_EXT (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPhysicalDeviceFragmentShaderInterlockFeaturesEXT-sType-sType)"},
{"VUID-VkPhysicalDeviceGroupProperties-pNext-pNext", "pNext must be NULL (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPhysicalDeviceGroupProperties-pNext-pNext)"},
{"VUID-VkPhysicalDeviceGroupProperties-sType-sType", "sType must be VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_GROUP_PROPERTIES (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPhysicalDeviceGroupProperties-sType-sType)"},
- {"VUID-VkPhysicalDeviceHostQueryResetFeaturesEXT-sType-sType", "sType must be VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_HOST_QUERY_RESET_FEATURES_EXT (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPhysicalDeviceHostQueryResetFeaturesEXT-sType-sType)"},
{"VUID-VkPhysicalDeviceIDProperties-sType-sType", "sType must be VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_ID_PROPERTIES (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPhysicalDeviceIDProperties-sType-sType)"},
{"VUID-VkPhysicalDeviceImageDrmFormatModifierInfoEXT-sType-sType", "sType must be VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_DRM_FORMAT_MODIFIER_INFO_EXT (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPhysicalDeviceImageDrmFormatModifierInfoEXT-sType-sType)"},
{"VUID-VkPhysicalDeviceImageDrmFormatModifierInfoEXT-sharingMode-02314", "If sharingMode is VK_SHARING_MODE_CONCURRENT, then pQueueFamilyIndices must be a valid pointer to an array of queueFamilyIndexCount uint32_t values. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPhysicalDeviceImageDrmFormatModifierInfoEXT-sharingMode-02314)"},
@@ -1653,12 +1579,8 @@ static const vuid_spec_text_pair vuid_spec_text[] = {
{"VUID-VkPhysicalDeviceImageFormatInfo2-usage-requiredbitmask", "usage must not be 0 (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPhysicalDeviceImageFormatInfo2-usage-requiredbitmask)"},
{"VUID-VkPhysicalDeviceImageViewImageFormatInfoEXT-imageViewType-parameter", "imageViewType must be a valid VkImageViewType value (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPhysicalDeviceImageViewImageFormatInfoEXT-imageViewType-parameter)"},
{"VUID-VkPhysicalDeviceImageViewImageFormatInfoEXT-sType-sType", "sType must be VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_VIEW_IMAGE_FORMAT_INFO_EXT (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPhysicalDeviceImageViewImageFormatInfoEXT-sType-sType)"},
- {"VUID-VkPhysicalDeviceImagelessFramebufferFeaturesKHR-sType-sType", "sType must be VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGELESS_FRAMEBUFFER_FEATURES_KHR (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPhysicalDeviceImagelessFramebufferFeaturesKHR-sType-sType)"},
- {"VUID-VkPhysicalDeviceIndexTypeUint8FeaturesEXT-sType-sType", "sType must be VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_INDEX_TYPE_UINT8_FEATURES_EXT (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPhysicalDeviceIndexTypeUint8FeaturesEXT-sType-sType)"},
{"VUID-VkPhysicalDeviceInlineUniformBlockFeaturesEXT-sType-sType", "sType must be VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_INLINE_UNIFORM_BLOCK_FEATURES_EXT (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPhysicalDeviceInlineUniformBlockFeaturesEXT-sType-sType)"},
{"VUID-VkPhysicalDeviceInlineUniformBlockPropertiesEXT-sType-sType", "sType must be VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_INLINE_UNIFORM_BLOCK_PROPERTIES_EXT (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPhysicalDeviceInlineUniformBlockPropertiesEXT-sType-sType)"},
- {"VUID-VkPhysicalDeviceLineRasterizationFeaturesEXT-sType-sType", "sType must be VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_LINE_RASTERIZATION_FEATURES_EXT (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPhysicalDeviceLineRasterizationFeaturesEXT-sType-sType)"},
- {"VUID-VkPhysicalDeviceLineRasterizationPropertiesEXT-sType-sType", "sType must be VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_LINE_RASTERIZATION_PROPERTIES_EXT (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPhysicalDeviceLineRasterizationPropertiesEXT-sType-sType)"},
{"VUID-VkPhysicalDeviceMaintenance3Properties-sType-sType", "sType must be VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MAINTENANCE_3_PROPERTIES (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPhysicalDeviceMaintenance3Properties-sType-sType)"},
{"VUID-VkPhysicalDeviceMemoryBudgetPropertiesEXT-sType-sType", "sType must be VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MEMORY_BUDGET_PROPERTIES_EXT (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPhysicalDeviceMemoryBudgetPropertiesEXT-sType-sType)"},
{"VUID-VkPhysicalDeviceMemoryPriorityFeaturesEXT-sType-sType", "sType must be VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MEMORY_PRIORITY_FEATURES_EXT (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPhysicalDeviceMemoryPriorityFeaturesEXT-sType-sType)"},
@@ -1672,9 +1594,8 @@ static const vuid_spec_text_pair vuid_spec_text[] = {
{"VUID-VkPhysicalDeviceMultiviewPerViewAttributesPropertiesNVX-sType-sType", "sType must be VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MULTIVIEW_PER_VIEW_ATTRIBUTES_PROPERTIES_NVX (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPhysicalDeviceMultiviewPerViewAttributesPropertiesNVX-sType-sType)"},
{"VUID-VkPhysicalDeviceMultiviewProperties-sType-sType", "sType must be VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MULTIVIEW_PROPERTIES (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPhysicalDeviceMultiviewProperties-sType-sType)"},
{"VUID-VkPhysicalDevicePCIBusInfoPropertiesEXT-sType-sType", "sType must be VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PCI_BUS_INFO_PROPERTIES_EXT (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPhysicalDevicePCIBusInfoPropertiesEXT-sType-sType)"},
- {"VUID-VkPhysicalDevicePipelineExecutablePropertiesFeaturesKHR-sType-sType", "sType must be VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PIPELINE_EXECUTABLE_PROPERTIES_FEATURES_KHR (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPhysicalDevicePipelineExecutablePropertiesFeaturesKHR-sType-sType)"},
{"VUID-VkPhysicalDevicePointClippingProperties-sType-sType", "sType must be VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_POINT_CLIPPING_PROPERTIES (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPhysicalDevicePointClippingProperties-sType-sType)"},
- {"VUID-VkPhysicalDeviceProperties2-pNext-pNext", "Each pNext member of any structure (including this one) in the pNext chain must be either NULL or a pointer to a valid instance of VkPhysicalDeviceBlendOperationAdvancedPropertiesEXT, VkPhysicalDeviceConservativeRasterizationPropertiesEXT, VkPhysicalDeviceCooperativeMatrixPropertiesNV, VkPhysicalDeviceDepthStencilResolvePropertiesKHR, VkPhysicalDeviceDescriptorIndexingPropertiesEXT, VkPhysicalDeviceDiscardRectanglePropertiesEXT, VkPhysicalDeviceDriverPropertiesKHR, VkPhysicalDeviceExternalMemoryHostPropertiesEXT, VkPhysicalDeviceFloatControlsPropertiesKHR, VkPhysicalDeviceFragmentDensityMapPropertiesEXT, VkPhysicalDeviceIDProperties, VkPhysicalDeviceInlineUniformBlockPropertiesEXT, VkPhysicalDeviceLineRasterizationPropertiesEXT, VkPhysicalDeviceMaintenance3Properties, VkPhysicalDeviceMeshShaderPropertiesNV, VkPhysicalDeviceMultiviewPerViewAttributesPropertiesNVX, VkPhysicalDeviceMultiviewProperties, VkPhysicalDevicePCIBusInfoPropertiesEXT, VkPhysicalDevicePointClippingProperties, VkPhysicalDeviceProtectedMemoryProperties, VkPhysicalDevicePushDescriptorPropertiesKHR, VkPhysicalDeviceRayTracingPropertiesNV, VkPhysicalDeviceSampleLocationsPropertiesEXT, VkPhysicalDeviceSamplerFilterMinmaxPropertiesEXT, VkPhysicalDeviceShaderCoreProperties2AMD, VkPhysicalDeviceShaderCorePropertiesAMD, VkPhysicalDeviceShaderSMBuiltinsPropertiesNV, VkPhysicalDeviceShadingRateImagePropertiesNV, VkPhysicalDeviceSubgroupProperties, VkPhysicalDeviceSubgroupSizeControlPropertiesEXT, VkPhysicalDeviceTexelBufferAlignmentPropertiesEXT, VkPhysicalDeviceTransformFeedbackPropertiesEXT, or VkPhysicalDeviceVertexAttributeDivisorPropertiesEXT (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPhysicalDeviceProperties2-pNext-pNext)"},
+ {"VUID-VkPhysicalDeviceProperties2-pNext-pNext", "Each pNext member of any structure (including this one) in the pNext chain must be either NULL or a pointer to a valid instance of VkPhysicalDeviceBlendOperationAdvancedPropertiesEXT, VkPhysicalDeviceConservativeRasterizationPropertiesEXT, VkPhysicalDeviceCooperativeMatrixPropertiesNV, VkPhysicalDeviceDepthStencilResolvePropertiesKHR, VkPhysicalDeviceDescriptorIndexingPropertiesEXT, VkPhysicalDeviceDiscardRectanglePropertiesEXT, VkPhysicalDeviceDriverPropertiesKHR, VkPhysicalDeviceExternalMemoryHostPropertiesEXT, VkPhysicalDeviceFloatControlsPropertiesKHR, VkPhysicalDeviceFragmentDensityMapPropertiesEXT, VkPhysicalDeviceIDProperties, VkPhysicalDeviceInlineUniformBlockPropertiesEXT, VkPhysicalDeviceMaintenance3Properties, VkPhysicalDeviceMeshShaderPropertiesNV, VkPhysicalDeviceMultiviewPerViewAttributesPropertiesNVX, VkPhysicalDeviceMultiviewProperties, VkPhysicalDevicePCIBusInfoPropertiesEXT, VkPhysicalDevicePointClippingProperties, VkPhysicalDeviceProtectedMemoryProperties, VkPhysicalDevicePushDescriptorPropertiesKHR, VkPhysicalDeviceRayTracingPropertiesNV, VkPhysicalDeviceSampleLocationsPropertiesEXT, VkPhysicalDeviceSamplerFilterMinmaxPropertiesEXT, VkPhysicalDeviceShaderCorePropertiesAMD, VkPhysicalDeviceShadingRateImagePropertiesNV, VkPhysicalDeviceSubgroupProperties, VkPhysicalDeviceTransformFeedbackPropertiesEXT, or VkPhysicalDeviceVertexAttributeDivisorPropertiesEXT (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPhysicalDeviceProperties2-pNext-pNext)"},
{"VUID-VkPhysicalDeviceProperties2-sType-sType", "sType must be VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PROPERTIES_2 (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPhysicalDeviceProperties2-sType-sType)"},
{"VUID-VkPhysicalDeviceProperties2-sType-unique", "Each sType member in the pNext chain must be unique (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPhysicalDeviceProperties2-sType-unique)"},
{"VUID-VkPhysicalDeviceProtectedMemoryFeatures-sType-sType", "sType must be VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PROTECTED_MEMORY_FEATURES (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPhysicalDeviceProtectedMemoryFeatures-sType-sType)"},
@@ -1687,15 +1608,9 @@ static const vuid_spec_text_pair vuid_spec_text[] = {
{"VUID-VkPhysicalDeviceSamplerYcbcrConversionFeatures-sType-sType", "sType must be VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SAMPLER_YCBCR_CONVERSION_FEATURES (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPhysicalDeviceSamplerYcbcrConversionFeatures-sType-sType)"},
{"VUID-VkPhysicalDeviceScalarBlockLayoutFeaturesEXT-sType-sType", "sType must be VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SCALAR_BLOCK_LAYOUT_FEATURES_EXT (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPhysicalDeviceScalarBlockLayoutFeaturesEXT-sType-sType)"},
{"VUID-VkPhysicalDeviceShaderAtomicInt64FeaturesKHR-sType-sType", "sType must be VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_ATOMIC_INT64_FEATURES_KHR (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPhysicalDeviceShaderAtomicInt64FeaturesKHR-sType-sType)"},
- {"VUID-VkPhysicalDeviceShaderCoreProperties2AMD-sType-sType", "sType must be VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_CORE_PROPERTIES_2_AMD (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPhysicalDeviceShaderCoreProperties2AMD-sType-sType)"},
{"VUID-VkPhysicalDeviceShaderCorePropertiesAMD-sType-sType", "sType must be VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_CORE_PROPERTIES_AMD (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPhysicalDeviceShaderCorePropertiesAMD-sType-sType)"},
- {"VUID-VkPhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT-sType-sType", "sType must be VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_DEMOTE_TO_HELPER_INVOCATION_FEATURES_EXT (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT-sType-sType)"},
- {"VUID-VkPhysicalDeviceShaderDrawParametersFeatures-sType-sType", "sType must be VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_DRAW_PARAMETERS_FEATURES (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPhysicalDeviceShaderDrawParametersFeatures-sType-sType)"},
- {"VUID-VkPhysicalDeviceShaderFloat16Int8FeaturesKHR-sType-sType", "sType must be VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_FLOAT16_INT8_FEATURES_KHR (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPhysicalDeviceShaderFloat16Int8FeaturesKHR-sType-sType)"},
+ {"VUID-VkPhysicalDeviceShaderDrawParameterFeatures-sType-sType", "sType must be VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_DRAW_PARAMETER_FEATURES (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPhysicalDeviceShaderDrawParameterFeatures-sType-sType)"},
{"VUID-VkPhysicalDeviceShaderImageFootprintFeaturesNV-sType-sType", "sType must be VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_IMAGE_FOOTPRINT_FEATURES_NV (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPhysicalDeviceShaderImageFootprintFeaturesNV-sType-sType)"},
- {"VUID-VkPhysicalDeviceShaderIntegerFunctions2FeaturesINTEL-sType-sType", "sType must be VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_INTEGER_FUNCTIONS_2_FEATURES_INTEL (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPhysicalDeviceShaderIntegerFunctions2FeaturesINTEL-sType-sType)"},
- {"VUID-VkPhysicalDeviceShaderSMBuiltinsFeaturesNV-sType-sType", "sType must be VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_SM_BUILTINS_FEATURES_NV (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPhysicalDeviceShaderSMBuiltinsFeaturesNV-sType-sType)"},
- {"VUID-VkPhysicalDeviceShaderSMBuiltinsPropertiesNV-sType-sType", "sType must be VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_SM_BUILTINS_PROPERTIES_NV (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPhysicalDeviceShaderSMBuiltinsPropertiesNV-sType-sType)"},
{"VUID-VkPhysicalDeviceShadingRateImageFeaturesNV-sType-sType", "sType must be VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADING_RATE_IMAGE_FEATURES_NV (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPhysicalDeviceShadingRateImageFeaturesNV-sType-sType)"},
{"VUID-VkPhysicalDeviceShadingRateImagePropertiesNV-sType-sType", "sType must be VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADING_RATE_IMAGE_PROPERTIES_NV (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPhysicalDeviceShadingRateImagePropertiesNV-sType-sType)"},
{"VUID-VkPhysicalDeviceSparseImageFormatInfo2-format-parameter", "format must be a valid VkFormat value (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPhysicalDeviceSparseImageFormatInfo2-format-parameter)"},
@@ -1708,21 +1623,13 @@ static const vuid_spec_text_pair vuid_spec_text[] = {
{"VUID-VkPhysicalDeviceSparseImageFormatInfo2-usage-parameter", "usage must be a valid combination of VkImageUsageFlagBits values (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPhysicalDeviceSparseImageFormatInfo2-usage-parameter)"},
{"VUID-VkPhysicalDeviceSparseImageFormatInfo2-usage-requiredbitmask", "usage must not be 0 (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPhysicalDeviceSparseImageFormatInfo2-usage-requiredbitmask)"},
{"VUID-VkPhysicalDeviceSubgroupProperties-sType-sType", "sType must be VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SUBGROUP_PROPERTIES (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPhysicalDeviceSubgroupProperties-sType-sType)"},
- {"VUID-VkPhysicalDeviceSubgroupSizeControlFeaturesEXT-sType-sType", "sType must be VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SUBGROUP_SIZE_CONTROL_FEATURES_EXT (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPhysicalDeviceSubgroupSizeControlFeaturesEXT-sType-sType)"},
- {"VUID-VkPhysicalDeviceSubgroupSizeControlPropertiesEXT-sType-sType", "sType must be VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SUBGROUP_SIZE_CONTROL_PROPERTIES_EXT (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPhysicalDeviceSubgroupSizeControlPropertiesEXT-sType-sType)"},
- {"VUID-VkPhysicalDeviceSurfaceInfo2KHR-pNext-02672", "If the pNext chain includes an instance of VkSurfaceFullScreenExclusiveInfoEXT with its fullScreenExclusive member set to VK_FULL_SCREEN_EXCLUSIVE_APPLICATION_CONTROLLED_EXT, and surface was created using vkCreateWin32SurfaceKHR, an instance of VkSurfaceFullScreenExclusiveWin32InfoEXT must be present in the pNext chain (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPhysicalDeviceSurfaceInfo2KHR-pNext-02672)"},
- {"VUID-VkPhysicalDeviceSurfaceInfo2KHR-pNext-pNext", "Each pNext member of any structure (including this one) in the pNext chain must be either NULL or a pointer to a valid instance of VkSurfaceFullScreenExclusiveInfoEXT or VkSurfaceFullScreenExclusiveWin32InfoEXT (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPhysicalDeviceSurfaceInfo2KHR-pNext-pNext)"},
+ {"VUID-VkPhysicalDeviceSurfaceInfo2KHR-pNext-pNext", "pNext must be NULL (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPhysicalDeviceSurfaceInfo2KHR-pNext-pNext)"},
{"VUID-VkPhysicalDeviceSurfaceInfo2KHR-sType-sType", "sType must be VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SURFACE_INFO_2_KHR (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPhysicalDeviceSurfaceInfo2KHR-sType-sType)"},
- {"VUID-VkPhysicalDeviceSurfaceInfo2KHR-sType-unique", "Each sType member in the pNext chain must be unique (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPhysicalDeviceSurfaceInfo2KHR-sType-unique)"},
{"VUID-VkPhysicalDeviceSurfaceInfo2KHR-surface-parameter", "surface must be a valid VkSurfaceKHR handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPhysicalDeviceSurfaceInfo2KHR-surface-parameter)"},
- {"VUID-VkPhysicalDeviceTexelBufferAlignmentFeaturesEXT-sType-sType", "sType must be VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TEXEL_BUFFER_ALIGNMENT_FEATURES_EXT (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPhysicalDeviceTexelBufferAlignmentFeaturesEXT-sType-sType)"},
- {"VUID-VkPhysicalDeviceTexelBufferAlignmentPropertiesEXT-sType-sType", "sType must be VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TEXEL_BUFFER_ALIGNMENT_PROPERTIES_EXT (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPhysicalDeviceTexelBufferAlignmentPropertiesEXT-sType-sType)"},
- {"VUID-VkPhysicalDeviceTextureCompressionASTCHDRFeaturesEXT-sType-sType", "sType must be VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TEXTURE_COMPRESSION_ASTC_HDR_FEATURES_EXT (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPhysicalDeviceTextureCompressionASTCHDRFeaturesEXT-sType-sType)"},
{"VUID-VkPhysicalDeviceTransformFeedbackFeaturesEXT-sType-sType", "sType must be VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TRANSFORM_FEEDBACK_FEATURES_EXT (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPhysicalDeviceTransformFeedbackFeaturesEXT-sType-sType)"},
{"VUID-VkPhysicalDeviceTransformFeedbackPropertiesEXT-sType-sType", "sType must be VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TRANSFORM_FEEDBACK_PROPERTIES_EXT (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPhysicalDeviceTransformFeedbackPropertiesEXT-sType-sType)"},
- {"VUID-VkPhysicalDeviceUniformBufferStandardLayoutFeaturesKHR-sType-sType", "sType must be VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_UNIFORM_BUFFER_STANDARD_LAYOUT_FEATURES_KHR (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPhysicalDeviceUniformBufferStandardLayoutFeaturesKHR-sType-sType)"},
- {"VUID-VkPhysicalDeviceVariablePointersFeatures-sType-sType", "sType must be VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VARIABLE_POINTERS_FEATURES (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPhysicalDeviceVariablePointersFeatures-sType-sType)"},
- {"VUID-VkPhysicalDeviceVariablePointersFeatures-variablePointers-01431", "If variablePointers is enabled then variablePointersStorageBuffer must also be enabled. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPhysicalDeviceVariablePointersFeatures-variablePointers-01431)"},
+ {"VUID-VkPhysicalDeviceVariablePointerFeatures-sType-sType", "sType must be VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VARIABLE_POINTER_FEATURES (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPhysicalDeviceVariablePointerFeatures-sType-sType)"},
+ {"VUID-VkPhysicalDeviceVariablePointerFeatures-variablePointers-01431", "If variablePointers is enabled then variablePointersStorageBuffer must also be enabled. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPhysicalDeviceVariablePointerFeatures-variablePointers-01431)"},
{"VUID-VkPhysicalDeviceVertexAttributeDivisorFeaturesEXT-sType-sType", "sType must be VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VERTEX_ATTRIBUTE_DIVISOR_FEATURES_EXT (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPhysicalDeviceVertexAttributeDivisorFeaturesEXT-sType-sType)"},
{"VUID-VkPhysicalDeviceVertexAttributeDivisorPropertiesEXT-sType-sType", "sType must be VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VERTEX_ATTRIBUTE_DIVISOR_PROPERTIES_EXT (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPhysicalDeviceVertexAttributeDivisorPropertiesEXT-sType-sType)"},
{"VUID-VkPhysicalDeviceVulkanMemoryModelFeaturesKHR-sType-sType", "sType must be VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VULKAN_MEMORY_MODEL_FEATURES_KHR (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPhysicalDeviceVulkanMemoryModelFeaturesKHR-sType-sType)"},
@@ -1761,25 +1668,13 @@ static const vuid_spec_text_pair vuid_spec_text[] = {
{"VUID-VkPipelineColorBlendStateCreateInfo-pAttachments-parameter", "If attachmentCount is not 0, pAttachments must be a valid pointer to an array of attachmentCount valid VkPipelineColorBlendAttachmentState structures (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPipelineColorBlendStateCreateInfo-pAttachments-parameter)"},
{"VUID-VkPipelineColorBlendStateCreateInfo-pNext-pNext", "pNext must be NULL or a pointer to a valid instance of VkPipelineColorBlendAdvancedStateCreateInfoEXT (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPipelineColorBlendStateCreateInfo-pNext-pNext)"},
{"VUID-VkPipelineColorBlendStateCreateInfo-sType-sType", "sType must be VK_STRUCTURE_TYPE_PIPELINE_COLOR_BLEND_STATE_CREATE_INFO (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPipelineColorBlendStateCreateInfo-sType-sType)"},
- {"VUID-VkPipelineCompilerControlCreateInfoAMD-compilerControlFlags-zerobitmask", "compilerControlFlags must be 0 (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPipelineCompilerControlCreateInfoAMD-compilerControlFlags-zerobitmask)"},
- {"VUID-VkPipelineCompilerControlCreateInfoAMD-sType-sType", "sType must be VK_STRUCTURE_TYPE_PIPELINE_COMPILER_CONTROL_CREATE_INFO_AMD (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPipelineCompilerControlCreateInfoAMD-sType-sType)"},
{"VUID-VkPipelineCoverageModulationStateCreateInfoNV-coverageModulationMode-parameter", "coverageModulationMode must be a valid VkCoverageModulationModeNV value (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPipelineCoverageModulationStateCreateInfoNV-coverageModulationMode-parameter)"},
{"VUID-VkPipelineCoverageModulationStateCreateInfoNV-coverageModulationTableEnable-01405", "If coverageModulationTableEnable is VK_TRUE, coverageModulationTableCount must be equal to the number of rasterization samples divided by the number of color samples in the subpass. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPipelineCoverageModulationStateCreateInfoNV-coverageModulationTableEnable-01405)"},
{"VUID-VkPipelineCoverageModulationStateCreateInfoNV-flags-zerobitmask", "flags must be 0 (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPipelineCoverageModulationStateCreateInfoNV-flags-zerobitmask)"},
{"VUID-VkPipelineCoverageModulationStateCreateInfoNV-sType-sType", "sType must be VK_STRUCTURE_TYPE_PIPELINE_COVERAGE_MODULATION_STATE_CREATE_INFO_NV (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPipelineCoverageModulationStateCreateInfoNV-sType-sType)"},
- {"VUID-VkPipelineCoverageReductionStateCreateInfoNV-coverageReductionMode-parameter", "coverageReductionMode must be a valid VkCoverageReductionModeNV value (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPipelineCoverageReductionStateCreateInfoNV-coverageReductionMode-parameter)"},
- {"VUID-VkPipelineCoverageReductionStateCreateInfoNV-flags-zerobitmask", "flags must be 0 (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPipelineCoverageReductionStateCreateInfoNV-flags-zerobitmask)"},
- {"VUID-VkPipelineCoverageReductionStateCreateInfoNV-sType-sType", "sType must be VK_STRUCTURE_TYPE_PIPELINE_COVERAGE_REDUCTION_STATE_CREATE_INFO_NV (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPipelineCoverageReductionStateCreateInfoNV-sType-sType)"},
{"VUID-VkPipelineCoverageToColorStateCreateInfoNV-coverageToColorEnable-01404", "If coverageToColorEnable is VK_TRUE, then the render pass subpass indicated by VkGraphicsPipelineCreateInfo::renderPass and VkGraphicsPipelineCreateInfo::subpass must have a color attachment at the location selected by coverageToColorLocation, with a VkFormat of VK_FORMAT_R8_UINT, VK_FORMAT_R8_SINT, VK_FORMAT_R16_UINT, VK_FORMAT_R16_SINT, VK_FORMAT_R32_UINT, or VK_FORMAT_R32_SINT (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPipelineCoverageToColorStateCreateInfoNV-coverageToColorEnable-01404)"},
{"VUID-VkPipelineCoverageToColorStateCreateInfoNV-flags-zerobitmask", "flags must be 0 (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPipelineCoverageToColorStateCreateInfoNV-flags-zerobitmask)"},
{"VUID-VkPipelineCoverageToColorStateCreateInfoNV-sType-sType", "sType must be VK_STRUCTURE_TYPE_PIPELINE_COVERAGE_TO_COLOR_STATE_CREATE_INFO_NV (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPipelineCoverageToColorStateCreateInfoNV-sType-sType)"},
- {"VUID-VkPipelineCreationFeedbackCreateInfoEXT-pPipelineCreationFeedback-parameter", "pPipelineCreationFeedback must be a valid pointer to a VkPipelineCreationFeedbackEXT structure (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPipelineCreationFeedbackCreateInfoEXT-pPipelineCreationFeedback-parameter)"},
- {"VUID-VkPipelineCreationFeedbackCreateInfoEXT-pPipelineStageCreationFeedbacks-parameter", "pPipelineStageCreationFeedbacks must be a valid pointer to an array of pipelineStageCreationFeedbackCount VkPipelineCreationFeedbackEXT structures (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPipelineCreationFeedbackCreateInfoEXT-pPipelineStageCreationFeedbacks-parameter)"},
- {"VUID-VkPipelineCreationFeedbackCreateInfoEXT-pipelineStageCreationFeedbackCount-02668", "When chained to VkGraphicsPipelineCreateInfo, VkPipelineCreationFeedbackEXT::pipelineStageCreationFeedbackCount must equal VkGraphicsPipelineCreateInfo::stageCount (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPipelineCreationFeedbackCreateInfoEXT-pipelineStageCreationFeedbackCount-02668)"},
- {"VUID-VkPipelineCreationFeedbackCreateInfoEXT-pipelineStageCreationFeedbackCount-02669", "When chained to VkComputePipelineCreateInfo, VkPipelineCreationFeedbackEXT::pipelineStageCreationFeedbackCount must equal 1 (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPipelineCreationFeedbackCreateInfoEXT-pipelineStageCreationFeedbackCount-02669)"},
- {"VUID-VkPipelineCreationFeedbackCreateInfoEXT-pipelineStageCreationFeedbackCount-02670", "When chained to VkRayTracingPipelineCreateInfoNV, VkPipelineCreationFeedbackEXT::pipelineStageCreationFeedbackCount must equal VkRayTracingPipelineCreateInfoNV::stageCount (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPipelineCreationFeedbackCreateInfoEXT-pipelineStageCreationFeedbackCount-02670)"},
- {"VUID-VkPipelineCreationFeedbackCreateInfoEXT-pipelineStageCreationFeedbackCount-arraylength", "pipelineStageCreationFeedbackCount must be greater than 0 (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPipelineCreationFeedbackCreateInfoEXT-pipelineStageCreationFeedbackCount-arraylength)"},
- {"VUID-VkPipelineCreationFeedbackCreateInfoEXT-sType-sType", "sType must be VK_STRUCTURE_TYPE_PIPELINE_CREATION_FEEDBACK_CREATE_INFO_EXT (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPipelineCreationFeedbackCreateInfoEXT-sType-sType)"},
{"VUID-VkPipelineDepthStencilStateCreateInfo-back-parameter", "back must be a valid VkStencilOpState structure (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPipelineDepthStencilStateCreateInfo-back-parameter)"},
{"VUID-VkPipelineDepthStencilStateCreateInfo-depthBoundsTestEnable-00598", "If the depth bounds testing feature is not enabled, depthBoundsTestEnable must be VK_FALSE (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPipelineDepthStencilStateCreateInfo-depthBoundsTestEnable-00598)"},
{"VUID-VkPipelineDepthStencilStateCreateInfo-depthCompareOp-parameter", "depthCompareOp must be a valid VkCompareOp value (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPipelineDepthStencilStateCreateInfo-depthCompareOp-parameter)"},
@@ -1796,22 +1691,6 @@ static const vuid_spec_text_pair vuid_spec_text[] = {
{"VUID-VkPipelineDynamicStateCreateInfo-pDynamicStates-parameter", "If dynamicStateCount is not 0, pDynamicStates must be a valid pointer to an array of dynamicStateCount valid VkDynamicState values (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPipelineDynamicStateCreateInfo-pDynamicStates-parameter)"},
{"VUID-VkPipelineDynamicStateCreateInfo-pNext-pNext", "pNext must be NULL (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPipelineDynamicStateCreateInfo-pNext-pNext)"},
{"VUID-VkPipelineDynamicStateCreateInfo-sType-sType", "sType must be VK_STRUCTURE_TYPE_PIPELINE_DYNAMIC_STATE_CREATE_INFO (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPipelineDynamicStateCreateInfo-sType-sType)"},
- {"VUID-VkPipelineExecutableInfoKHR-executableIndex-03275", "executableIndex must be less than the number of executables associated with pipeline as returned in the pExecutableCount parameter of vkGetPipelineExecutablePropertiesKHR. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPipelineExecutableInfoKHR-executableIndex-03275)"},
- {"VUID-VkPipelineExecutableInfoKHR-pNext-pNext", "pNext must be NULL (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPipelineExecutableInfoKHR-pNext-pNext)"},
- {"VUID-VkPipelineExecutableInfoKHR-pipeline-parameter", "pipeline must be a valid VkPipeline handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPipelineExecutableInfoKHR-pipeline-parameter)"},
- {"VUID-VkPipelineExecutableInfoKHR-sType-sType", "sType must be VK_STRUCTURE_TYPE_PIPELINE_EXECUTABLE_INFO_KHR (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPipelineExecutableInfoKHR-sType-sType)"},
- {"VUID-VkPipelineExecutableInternalRepresentationKHR-description-parameter", "description must be a null-terminated UTF-8 string whose length is less than or equal to VK_MAX_DESCRIPTION_SIZE (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPipelineExecutableInternalRepresentationKHR-description-parameter)"},
- {"VUID-VkPipelineExecutableInternalRepresentationKHR-name-parameter", "name must be a null-terminated UTF-8 string whose length is less than or equal to VK_MAX_DESCRIPTION_SIZE (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPipelineExecutableInternalRepresentationKHR-name-parameter)"},
- {"VUID-VkPipelineExecutableInternalRepresentationKHR-pData-parameter", "If dataSize is not 0, and pData is not NULL, pData must be a valid pointer to an array of dataSize bytes (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPipelineExecutableInternalRepresentationKHR-pData-parameter)"},
- {"VUID-VkPipelineExecutableInternalRepresentationKHR-pNext-pNext", "pNext must be NULL (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPipelineExecutableInternalRepresentationKHR-pNext-pNext)"},
- {"VUID-VkPipelineExecutableInternalRepresentationKHR-sType-sType", "sType must be VK_STRUCTURE_TYPE_PIPELINE_EXECUTABLE_INTERNAL_REPRESENTATION_KHR (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPipelineExecutableInternalRepresentationKHR-sType-sType)"},
- {"VUID-VkPipelineExecutablePropertiesKHR-pNext-pNext", "pNext must be NULL (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPipelineExecutablePropertiesKHR-pNext-pNext)"},
- {"VUID-VkPipelineExecutablePropertiesKHR-sType-sType", "sType must be VK_STRUCTURE_TYPE_PIPELINE_EXECUTABLE_PROPERTIES_KHR (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPipelineExecutablePropertiesKHR-sType-sType)"},
- {"VUID-VkPipelineExecutableStatisticKHR-pNext-pNext", "pNext must be NULL (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPipelineExecutableStatisticKHR-pNext-pNext)"},
- {"VUID-VkPipelineExecutableStatisticKHR-sType-sType", "sType must be VK_STRUCTURE_TYPE_PIPELINE_EXECUTABLE_STATISTIC_KHR (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPipelineExecutableStatisticKHR-sType-sType)"},
- {"VUID-VkPipelineInfoKHR-pNext-pNext", "pNext must be NULL (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPipelineInfoKHR-pNext-pNext)"},
- {"VUID-VkPipelineInfoKHR-pipeline-parameter", "pipeline must be a valid VkPipeline handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPipelineInfoKHR-pipeline-parameter)"},
- {"VUID-VkPipelineInfoKHR-sType-sType", "sType must be VK_STRUCTURE_TYPE_PIPELINE_INFO_KHR (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPipelineInfoKHR-sType-sType)"},
{"VUID-VkPipelineInputAssemblyStateCreateInfo-flags-zerobitmask", "flags must be 0 (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPipelineInputAssemblyStateCreateInfo-flags-zerobitmask)"},
{"VUID-VkPipelineInputAssemblyStateCreateInfo-pNext-pNext", "pNext must be NULL (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPipelineInputAssemblyStateCreateInfo-pNext-pNext)"},
{"VUID-VkPipelineInputAssemblyStateCreateInfo-sType-sType", "sType must be VK_STRUCTURE_TYPE_PIPELINE_INPUT_ASSEMBLY_STATE_CREATE_INFO (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPipelineInputAssemblyStateCreateInfo-sType-sType)"},
@@ -1879,7 +1758,7 @@ static const vuid_spec_text_pair vuid_spec_text[] = {
{"VUID-VkPipelineMultisampleStateCreateInfo-alphaToOneEnable-00785", "If the alpha to one feature is not enabled, alphaToOneEnable must be VK_FALSE (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPipelineMultisampleStateCreateInfo-alphaToOneEnable-00785)"},
{"VUID-VkPipelineMultisampleStateCreateInfo-flags-zerobitmask", "flags must be 0 (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPipelineMultisampleStateCreateInfo-flags-zerobitmask)"},
{"VUID-VkPipelineMultisampleStateCreateInfo-minSampleShading-00786", "minSampleShading must be in the range [0,1] (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPipelineMultisampleStateCreateInfo-minSampleShading-00786)"},
- {"VUID-VkPipelineMultisampleStateCreateInfo-pNext-pNext", "Each pNext member of any structure (including this one) in the pNext chain must be either NULL or a pointer to a valid instance of VkPipelineCoverageModulationStateCreateInfoNV, VkPipelineCoverageReductionStateCreateInfoNV, VkPipelineCoverageToColorStateCreateInfoNV, or VkPipelineSampleLocationsStateCreateInfoEXT (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPipelineMultisampleStateCreateInfo-pNext-pNext)"},
+ {"VUID-VkPipelineMultisampleStateCreateInfo-pNext-pNext", "Each pNext member of any structure (including this one) in the pNext chain must be either NULL or a pointer to a valid instance of VkPipelineCoverageModulationStateCreateInfoNV, VkPipelineCoverageToColorStateCreateInfoNV, or VkPipelineSampleLocationsStateCreateInfoEXT (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPipelineMultisampleStateCreateInfo-pNext-pNext)"},
{"VUID-VkPipelineMultisampleStateCreateInfo-pSampleMask-parameter", "If pSampleMask is not NULL, pSampleMask must be a valid pointer to an array of (rasterizationSamples/32) VkSampleMask values (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPipelineMultisampleStateCreateInfo-pSampleMask-parameter)"},
{"VUID-VkPipelineMultisampleStateCreateInfo-rasterizationSamples-01415", "If the VK_NV_framebuffer_mixed_samples extension is enabled, and if the subpass has any color attachments and rasterizationSamples is greater than the number of color samples, then sampleShadingEnable must be VK_FALSE (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPipelineMultisampleStateCreateInfo-rasterizationSamples-01415)"},
{"VUID-VkPipelineMultisampleStateCreateInfo-rasterizationSamples-parameter", "rasterizationSamples must be a valid VkSampleCountFlagBits value (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPipelineMultisampleStateCreateInfo-rasterizationSamples-parameter)"},
@@ -1892,20 +1771,11 @@ static const vuid_spec_text_pair vuid_spec_text[] = {
{"VUID-VkPipelineRasterizationConservativeStateCreateInfoEXT-sType-sType", "sType must be VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_CONSERVATIVE_STATE_CREATE_INFO_EXT (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPipelineRasterizationConservativeStateCreateInfoEXT-sType-sType)"},
{"VUID-VkPipelineRasterizationDepthClipStateCreateInfoEXT-flags-zerobitmask", "flags must be 0 (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPipelineRasterizationDepthClipStateCreateInfoEXT-flags-zerobitmask)"},
{"VUID-VkPipelineRasterizationDepthClipStateCreateInfoEXT-sType-sType", "sType must be VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_DEPTH_CLIP_STATE_CREATE_INFO_EXT (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPipelineRasterizationDepthClipStateCreateInfoEXT-sType-sType)"},
- {"VUID-VkPipelineRasterizationLineStateCreateInfoEXT-lineRasterizationMode-02768", "If lineRasterizationMode is VK_LINE_RASTERIZATION_MODE_RECTANGULAR_EXT, then the rectangularLines feature must be enabled (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPipelineRasterizationLineStateCreateInfoEXT-lineRasterizationMode-02768)"},
- {"VUID-VkPipelineRasterizationLineStateCreateInfoEXT-lineRasterizationMode-02769", "If lineRasterizationMode is VK_LINE_RASTERIZATION_MODE_BRESENHAM_EXT, then the bresenhamLines feature must be enabled (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPipelineRasterizationLineStateCreateInfoEXT-lineRasterizationMode-02769)"},
- {"VUID-VkPipelineRasterizationLineStateCreateInfoEXT-lineRasterizationMode-02770", "If lineRasterizationMode is VK_LINE_RASTERIZATION_MODE_RECTANGULAR_SMOOTH_EXT, then the smoothLines feature must be enabled (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPipelineRasterizationLineStateCreateInfoEXT-lineRasterizationMode-02770)"},
- {"VUID-VkPipelineRasterizationLineStateCreateInfoEXT-lineRasterizationMode-parameter", "lineRasterizationMode must be a valid VkLineRasterizationModeEXT value (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPipelineRasterizationLineStateCreateInfoEXT-lineRasterizationMode-parameter)"},
- {"VUID-VkPipelineRasterizationLineStateCreateInfoEXT-sType-sType", "sType must be VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_LINE_STATE_CREATE_INFO_EXT (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPipelineRasterizationLineStateCreateInfoEXT-sType-sType)"},
- {"VUID-VkPipelineRasterizationLineStateCreateInfoEXT-stippledLineEnable-02771", "If stippledLineEnable is VK_TRUE and lineRasterizationMode is VK_LINE_RASTERIZATION_MODE_RECTANGULAR_EXT, then the stippledRectangularLines feature must be enabled (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPipelineRasterizationLineStateCreateInfoEXT-stippledLineEnable-02771)"},
- {"VUID-VkPipelineRasterizationLineStateCreateInfoEXT-stippledLineEnable-02772", "If stippledLineEnable is VK_TRUE and lineRasterizationMode is VK_LINE_RASTERIZATION_MODE_BRESENHAM_EXT, then the stippledBresenhamLines feature must be enabled (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPipelineRasterizationLineStateCreateInfoEXT-stippledLineEnable-02772)"},
- {"VUID-VkPipelineRasterizationLineStateCreateInfoEXT-stippledLineEnable-02773", "If stippledLineEnable is VK_TRUE and lineRasterizationMode is VK_LINE_RASTERIZATION_MODE_RECTANGULAR_SMOOTH_EXT, then the stippledSmoothLines feature must be enabled (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPipelineRasterizationLineStateCreateInfoEXT-stippledLineEnable-02773)"},
- {"VUID-VkPipelineRasterizationLineStateCreateInfoEXT-stippledLineEnable-02774", "If stippledLineEnable is VK_TRUE and lineRasterizationMode is VK_LINE_RASTERIZATION_MODE_DEFAULT_EXT, then the stippledRectangularLines feature must be enabled and VkPhysicalDeviceLimits::strictLines must be VK_TRUE (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPipelineRasterizationLineStateCreateInfoEXT-stippledLineEnable-02774)"},
{"VUID-VkPipelineRasterizationStateCreateInfo-cullMode-parameter", "cullMode must be a valid combination of VkCullModeFlagBits values (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPipelineRasterizationStateCreateInfo-cullMode-parameter)"},
{"VUID-VkPipelineRasterizationStateCreateInfo-depthClampEnable-00782", "If the depth clamping feature is not enabled, depthClampEnable must be VK_FALSE (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPipelineRasterizationStateCreateInfo-depthClampEnable-00782)"},
{"VUID-VkPipelineRasterizationStateCreateInfo-flags-zerobitmask", "flags must be 0 (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPipelineRasterizationStateCreateInfo-flags-zerobitmask)"},
{"VUID-VkPipelineRasterizationStateCreateInfo-frontFace-parameter", "frontFace must be a valid VkFrontFace value (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPipelineRasterizationStateCreateInfo-frontFace-parameter)"},
- {"VUID-VkPipelineRasterizationStateCreateInfo-pNext-pNext", "Each pNext member of any structure (including this one) in the pNext chain must be either NULL or a pointer to a valid instance of VkPipelineRasterizationConservativeStateCreateInfoEXT, VkPipelineRasterizationDepthClipStateCreateInfoEXT, VkPipelineRasterizationLineStateCreateInfoEXT, VkPipelineRasterizationStateRasterizationOrderAMD, or VkPipelineRasterizationStateStreamCreateInfoEXT (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPipelineRasterizationStateCreateInfo-pNext-pNext)"},
+ {"VUID-VkPipelineRasterizationStateCreateInfo-pNext-pNext", "Each pNext member of any structure (including this one) in the pNext chain must be either NULL or a pointer to a valid instance of VkPipelineRasterizationConservativeStateCreateInfoEXT, VkPipelineRasterizationDepthClipStateCreateInfoEXT, VkPipelineRasterizationStateRasterizationOrderAMD, or VkPipelineRasterizationStateStreamCreateInfoEXT (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPipelineRasterizationStateCreateInfo-pNext-pNext)"},
{"VUID-VkPipelineRasterizationStateCreateInfo-polygonMode-01413", "If the non-solid fill modes feature is not enabled, polygonMode must be VK_POLYGON_MODE_FILL (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPipelineRasterizationStateCreateInfo-polygonMode-01413)"},
{"VUID-VkPipelineRasterizationStateCreateInfo-polygonMode-01414", "If the VK_NV_fill_rectangle extension is not enabled, polygonMode must not be VK_POLYGON_MODE_FILL_RECTANGLE_NV (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPipelineRasterizationStateCreateInfo-polygonMode-01414)"},
{"VUID-VkPipelineRasterizationStateCreateInfo-polygonMode-01507", "If the non-solid fill modes feature is not enabled, polygonMode must be VK_POLYGON_MODE_FILL or VK_POLYGON_MODE_FILL_RECTANGLE_NV (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPipelineRasterizationStateCreateInfo-polygonMode-01507)"},
@@ -1922,11 +1792,7 @@ static const vuid_spec_text_pair vuid_spec_text[] = {
{"VUID-VkPipelineRepresentativeFragmentTestStateCreateInfoNV-sType-sType", "sType must be VK_STRUCTURE_TYPE_PIPELINE_REPRESENTATIVE_FRAGMENT_TEST_STATE_CREATE_INFO_NV (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPipelineRepresentativeFragmentTestStateCreateInfoNV-sType-sType)"},
{"VUID-VkPipelineSampleLocationsStateCreateInfoEXT-sType-sType", "sType must be VK_STRUCTURE_TYPE_PIPELINE_SAMPLE_LOCATIONS_STATE_CREATE_INFO_EXT (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPipelineSampleLocationsStateCreateInfoEXT-sType-sType)"},
{"VUID-VkPipelineSampleLocationsStateCreateInfoEXT-sampleLocationsInfo-parameter", "sampleLocationsInfo must be a valid VkSampleLocationsInfoEXT structure (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPipelineSampleLocationsStateCreateInfoEXT-sampleLocationsInfo-parameter)"},
- {"VUID-VkPipelineShaderStageCreateInfo-flags-02758", "If flags has both the VK_PIPELINE_SHADER_STAGE_CREATE_REQUIRE_FULL_SUBGROUPS_BIT_EXT and VK_PIPELINE_SHADER_STAGE_CREATE_ALLOW_VARYING_SUBGROUP_SIZE_BIT_EXT flags set, the local workgroup size in the X dimension of the pipeline must be a multiple of maxSubgroupSize. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPipelineShaderStageCreateInfo-flags-02758)"},
- {"VUID-VkPipelineShaderStageCreateInfo-flags-02759", "If flags has the VK_PIPELINE_SHADER_STAGE_CREATE_REQUIRE_FULL_SUBGROUPS_BIT_EXT flag set and flags does not have the VK_PIPELINE_SHADER_STAGE_CREATE_ALLOW_VARYING_SUBGROUP_SIZE_BIT_EXT flag set and no VkPipelineShaderStageRequiredSubgroupSizeCreateInfoEXT structure is chained to pNext, the local workgroup size in the X dimension of the pipeline must be a multiple of subgroupSize. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPipelineShaderStageCreateInfo-flags-02759)"},
- {"VUID-VkPipelineShaderStageCreateInfo-flags-02784", "If flags has the VK_PIPELINE_SHADER_STAGE_CREATE_ALLOW_VARYING_SUBGROUP_SIZE_BIT_EXT flag set, the subgroupSizeControl feature must be enabled. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPipelineShaderStageCreateInfo-flags-02784)"},
- {"VUID-VkPipelineShaderStageCreateInfo-flags-02785", "If flags has the VK_PIPELINE_SHADER_STAGE_CREATE_REQUIRE_FULL_SUBGROUPS_BIT_EXT flag set, the computeFullSubgroups feature must be enabled. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPipelineShaderStageCreateInfo-flags-02785)"},
- {"VUID-VkPipelineShaderStageCreateInfo-flags-parameter", "flags must be a valid combination of VkPipelineShaderStageCreateFlagBits values (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPipelineShaderStageCreateInfo-flags-parameter)"},
+ {"VUID-VkPipelineShaderStageCreateInfo-flags-zerobitmask", "flags must be 0 (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPipelineShaderStageCreateInfo-flags-zerobitmask)"},
{"VUID-VkPipelineShaderStageCreateInfo-maxClipDistances-00708", "If the identified entry point includes any variable in its interface that is declared with the ClipDistance BuiltIn decoration, that variable must not have an array size greater than VkPhysicalDeviceLimits::maxClipDistances (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPipelineShaderStageCreateInfo-maxClipDistances-00708)"},
{"VUID-VkPipelineShaderStageCreateInfo-maxCombinedClipAndCullDistances-00710", "If the identified entry point includes any variables in its interface that are declared with the ClipDistance or CullDistance BuiltIn decoration, those variables must not have array sizes which sum to more than VkPhysicalDeviceLimits::maxCombinedClipAndCullDistances (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPipelineShaderStageCreateInfo-maxCombinedClipAndCullDistances-00710)"},
{"VUID-VkPipelineShaderStageCreateInfo-maxCullDistances-00709", "If the identified entry point includes any variable in its interface that is declared with the CullDistance BuiltIn decoration, that variable must not have an array size greater than VkPhysicalDeviceLimits::maxCullDistances (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPipelineShaderStageCreateInfo-maxCullDistances-00709)"},
@@ -1934,11 +1800,7 @@ static const vuid_spec_text_pair vuid_spec_text[] = {
{"VUID-VkPipelineShaderStageCreateInfo-module-parameter", "module must be a valid VkShaderModule handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPipelineShaderStageCreateInfo-module-parameter)"},
{"VUID-VkPipelineShaderStageCreateInfo-pName-00707", "pName must be the name of an OpEntryPoint in module with an execution model that matches stage (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPipelineShaderStageCreateInfo-pName-00707)"},
{"VUID-VkPipelineShaderStageCreateInfo-pName-parameter", "pName must be a null-terminated UTF-8 string (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPipelineShaderStageCreateInfo-pName-parameter)"},
- {"VUID-VkPipelineShaderStageCreateInfo-pNext-02754", "If a VkPipelineShaderStageRequiredSubgroupSizeCreateInfoEXT structure is chained to pNext, flags must not have the VK_PIPELINE_SHADER_STAGE_CREATE_ALLOW_VARYING_SUBGROUP_SIZE_BIT_EXT flag set. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPipelineShaderStageCreateInfo-pNext-02754)"},
- {"VUID-VkPipelineShaderStageCreateInfo-pNext-02755", "If a VkPipelineShaderStageRequiredSubgroupSizeCreateInfoEXT structure is chained to pNext, the subgroupSizeControl feature must be enabled, and stage must be a valid bit specified in requiredSubgroupSizeStages. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPipelineShaderStageCreateInfo-pNext-02755)"},
- {"VUID-VkPipelineShaderStageCreateInfo-pNext-02756", "If a VkPipelineShaderStageRequiredSubgroupSizeCreateInfoEXT structure is chained to pNext and stage is VK_SHADER_STAGE_COMPUTE_BIT then local workgroup size of the shader must be less than or equal to the product of VkPipelineShaderStageRequiredSubgroupSizeCreateInfoEXT::requiredSubgroupSize and maxComputeWorkgroupSubgroups. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPipelineShaderStageCreateInfo-pNext-02756)"},
- {"VUID-VkPipelineShaderStageCreateInfo-pNext-02757", "If a VkPipelineShaderStageRequiredSubgroupSizeCreateInfoEXT structure is chained to pNext, and flags has the VK_PIPELINE_SHADER_STAGE_CREATE_REQUIRE_FULL_SUBGROUPS_BIT_EXT flag set, the local workgroup size in the X dimension of the pipeline must be a multiple of VkPipelineShaderStageRequiredSubgroupSizeCreateInfoEXT::requiredSubgroupSize. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPipelineShaderStageCreateInfo-pNext-02757)"},
- {"VUID-VkPipelineShaderStageCreateInfo-pNext-pNext", "pNext must be NULL or a pointer to a valid instance of VkPipelineShaderStageRequiredSubgroupSizeCreateInfoEXT (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPipelineShaderStageCreateInfo-pNext-pNext)"},
+ {"VUID-VkPipelineShaderStageCreateInfo-pNext-pNext", "pNext must be NULL (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPipelineShaderStageCreateInfo-pNext-pNext)"},
{"VUID-VkPipelineShaderStageCreateInfo-pSpecializationInfo-parameter", "If pSpecializationInfo is not NULL, pSpecializationInfo must be a valid pointer to a valid VkSpecializationInfo structure (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPipelineShaderStageCreateInfo-pSpecializationInfo-parameter)"},
{"VUID-VkPipelineShaderStageCreateInfo-sType-sType", "sType must be VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_CREATE_INFO (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPipelineShaderStageCreateInfo-sType-sType)"},
{"VUID-VkPipelineShaderStageCreateInfo-stage-00704", "If the geometry shaders feature is not enabled, stage must not be VK_SHADER_STAGE_GEOMETRY_BIT (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPipelineShaderStageCreateInfo-stage-00704)"},
@@ -1958,10 +1820,6 @@ static const vuid_spec_text_pair vuid_spec_text[] = {
{"VUID-VkPipelineShaderStageCreateInfo-stage-02596", "If stage is a vertex processing stage, and the identified entry point writes to Layer for any primitive, it must write the same value to Layer for all vertices of a given primitive (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPipelineShaderStageCreateInfo-stage-02596)"},
{"VUID-VkPipelineShaderStageCreateInfo-stage-02597", "If stage is a vertex processing stage, and the identified entry point writes to ViewportIndex for any primitive, it must write the same value to ViewportIndex for all vertices of a given primitive (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPipelineShaderStageCreateInfo-stage-02597)"},
{"VUID-VkPipelineShaderStageCreateInfo-stage-parameter", "stage must be a valid VkShaderStageFlagBits value (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPipelineShaderStageCreateInfo-stage-parameter)"},
- {"VUID-VkPipelineShaderStageRequiredSubgroupSizeCreateInfoEXT-requiredSubgroupSize-02760", "requiredSubgroupSize must be a power-of-two integer. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPipelineShaderStageRequiredSubgroupSizeCreateInfoEXT-requiredSubgroupSize-02760)"},
- {"VUID-VkPipelineShaderStageRequiredSubgroupSizeCreateInfoEXT-requiredSubgroupSize-02761", "requiredSubgroupSize must be greater or equal to minSubgroupSize. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPipelineShaderStageRequiredSubgroupSizeCreateInfoEXT-requiredSubgroupSize-02761)"},
- {"VUID-VkPipelineShaderStageRequiredSubgroupSizeCreateInfoEXT-requiredSubgroupSize-02762", "requiredSubgroupSize must be less than or equal to maxSubgroupSize. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPipelineShaderStageRequiredSubgroupSizeCreateInfoEXT-requiredSubgroupSize-02762)"},
- {"VUID-VkPipelineShaderStageRequiredSubgroupSizeCreateInfoEXT-sType-sType", "sType must be VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_REQUIRED_SUBGROUP_SIZE_CREATE_INFO_EXT (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPipelineShaderStageRequiredSubgroupSizeCreateInfoEXT-sType-sType)"},
{"VUID-VkPipelineTessellationDomainOriginStateCreateInfo-domainOrigin-parameter", "domainOrigin must be a valid VkTessellationDomainOrigin value (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPipelineTessellationDomainOriginStateCreateInfo-domainOrigin-parameter)"},
{"VUID-VkPipelineTessellationDomainOriginStateCreateInfo-sType-sType", "sType must be VK_STRUCTURE_TYPE_PIPELINE_TESSELLATION_DOMAIN_ORIGIN_STATE_CREATE_INFO (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPipelineTessellationDomainOriginStateCreateInfo-sType-sType)"},
{"VUID-VkPipelineTessellationStateCreateInfo-flags-zerobitmask", "flags must be 0 (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPipelineTessellationStateCreateInfo-flags-zerobitmask)"},
@@ -2017,13 +1875,11 @@ static const vuid_spec_text_pair vuid_spec_text[] = {
{"VUID-VkPipelineViewportSwizzleStateCreateInfoNV-viewportCount-arraylength", "viewportCount must be greater than 0 (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPipelineViewportSwizzleStateCreateInfoNV-viewportCount-arraylength)"},
{"VUID-VkPipelineViewportWScalingStateCreateInfoNV-sType-sType", "sType must be VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_W_SCALING_STATE_CREATE_INFO_NV (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPipelineViewportWScalingStateCreateInfoNV-sType-sType)"},
{"VUID-VkPipelineViewportWScalingStateCreateInfoNV-viewportCount-arraylength", "viewportCount must be greater than 0 (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPipelineViewportWScalingStateCreateInfoNV-viewportCount-arraylength)"},
- {"VUID-VkPresentFrameTokenGGP-frameToken-02680", "frameToken must be a valid GgpFrameToken (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPresentFrameTokenGGP-frameToken-02680)"},
- {"VUID-VkPresentFrameTokenGGP-sType-sType", "sType must be VK_STRUCTURE_TYPE_PRESENT_FRAME_TOKEN_GGP (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPresentFrameTokenGGP-sType-sType)"},
{"VUID-VkPresentInfoKHR-commonparent", "Both of the elements of pSwapchains, and the elements of pWaitSemaphores that are valid handles must have been created, allocated, or retrieved from the same VkInstance (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPresentInfoKHR-commonparent)"},
{"VUID-VkPresentInfoKHR-pImageIndices-01296", "Each element of pImageIndices must be the index of a presentable image acquired from the swapchain specified by the corresponding element of the pSwapchains array, and the presented image subresource must be in the VK_IMAGE_LAYOUT_PRESENT_SRC_KHR layout at the time the operation is executed on a VkDevice (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPresentInfoKHR-pImageIndices-01296)"},
{"VUID-VkPresentInfoKHR-pImageIndices-01430", "Each element of pImageIndices must be the index of a presentable image acquired from the swapchain specified by the corresponding element of the pSwapchains array, and the presented image subresource must be in the VK_IMAGE_LAYOUT_PRESENT_SRC_KHR or VK_IMAGE_LAYOUT_SHARED_PRESENT_KHR layout at the time the operation is executed on a VkDevice (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPresentInfoKHR-pImageIndices-01430)"},
{"VUID-VkPresentInfoKHR-pImageIndices-parameter", "pImageIndices must be a valid pointer to an array of swapchainCount uint32_t values (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPresentInfoKHR-pImageIndices-parameter)"},
- {"VUID-VkPresentInfoKHR-pNext-pNext", "Each pNext member of any structure (including this one) in the pNext chain must be either NULL or a pointer to a valid instance of VkDeviceGroupPresentInfoKHR, VkDisplayPresentInfoKHR, VkPresentFrameTokenGGP, VkPresentRegionsKHR, or VkPresentTimesInfoGOOGLE (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPresentInfoKHR-pNext-pNext)"},
+ {"VUID-VkPresentInfoKHR-pNext-pNext", "Each pNext member of any structure (including this one) in the pNext chain must be either NULL or a pointer to a valid instance of VkDeviceGroupPresentInfoKHR, VkDisplayPresentInfoKHR, VkPresentRegionsKHR, or VkPresentTimesInfoGOOGLE (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPresentInfoKHR-pNext-pNext)"},
{"VUID-VkPresentInfoKHR-pResults-parameter", "If pResults is not NULL, pResults must be a valid pointer to an array of swapchainCount VkResult values (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPresentInfoKHR-pResults-parameter)"},
{"VUID-VkPresentInfoKHR-pSwapchains-parameter", "pSwapchains must be a valid pointer to an array of swapchainCount valid VkSwapchainKHR handles (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPresentInfoKHR-pSwapchains-parameter)"},
{"VUID-VkPresentInfoKHR-pWaitSemaphores-parameter", "If waitSemaphoreCount is not 0, pWaitSemaphores must be a valid pointer to an array of waitSemaphoreCount valid VkSemaphore handles (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPresentInfoKHR-pWaitSemaphores-parameter)"},
@@ -2053,14 +1909,10 @@ static const vuid_spec_text_pair vuid_spec_text[] = {
{"VUID-VkPushConstantRange-stageFlags-requiredbitmask", "stageFlags must not be 0 (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPushConstantRange-stageFlags-requiredbitmask)"},
{"VUID-VkQueryPoolCreateInfo-flags-zerobitmask", "flags must be 0 (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkQueryPoolCreateInfo-flags-zerobitmask)"},
{"VUID-VkQueryPoolCreateInfo-pNext-pNext", "pNext must be NULL (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkQueryPoolCreateInfo-pNext-pNext)"},
- {"VUID-VkQueryPoolCreateInfo-queryCount-02763", "queryCount must be greater than 0 (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkQueryPoolCreateInfo-queryCount-02763)"},
{"VUID-VkQueryPoolCreateInfo-queryType-00791", "If the pipeline statistics queries feature is not enabled, queryType must not be VK_QUERY_TYPE_PIPELINE_STATISTICS (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkQueryPoolCreateInfo-queryType-00791)"},
{"VUID-VkQueryPoolCreateInfo-queryType-00792", "If queryType is VK_QUERY_TYPE_PIPELINE_STATISTICS, pipelineStatistics must be a valid combination of VkQueryPipelineStatisticFlagBits values (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkQueryPoolCreateInfo-queryType-00792)"},
{"VUID-VkQueryPoolCreateInfo-queryType-parameter", "queryType must be a valid VkQueryType value (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkQueryPoolCreateInfo-queryType-parameter)"},
{"VUID-VkQueryPoolCreateInfo-sType-sType", "sType must be VK_STRUCTURE_TYPE_QUERY_POOL_CREATE_INFO (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkQueryPoolCreateInfo-sType-sType)"},
- {"VUID-VkQueryPoolCreateInfoINTEL-pNext-pNext", "pNext must be NULL (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkQueryPoolCreateInfoINTEL-pNext-pNext)"},
- {"VUID-VkQueryPoolCreateInfoINTEL-performanceCountersSampling-parameter", "performanceCountersSampling must be a valid VkQueryPoolSamplingModeINTEL value (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkQueryPoolCreateInfoINTEL-performanceCountersSampling-parameter)"},
- {"VUID-VkQueryPoolCreateInfoINTEL-sType-sType", "sType must be VK_STRUCTURE_TYPE_QUERY_POOL_CREATE_INFO_INTEL (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkQueryPoolCreateInfoINTEL-sType-sType)"},
{"VUID-VkQueueFamilyCheckpointPropertiesNV-sType-sType", "sType must be VK_STRUCTURE_TYPE_QUEUE_FAMILY_CHECKPOINT_PROPERTIES_NV (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkQueueFamilyCheckpointPropertiesNV-sType-sType)"},
{"VUID-VkQueueFamilyProperties2-pNext-pNext", "pNext must be NULL or a pointer to a valid instance of VkQueueFamilyCheckpointPropertiesNV (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkQueueFamilyProperties2-pNext-pNext)"},
{"VUID-VkQueueFamilyProperties2-sType-sType", "sType must be VK_STRUCTURE_TYPE_QUEUE_FAMILY_PROPERTIES_2 (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkQueueFamilyProperties2-sType-sType)"},
@@ -2076,7 +1928,7 @@ static const vuid_spec_text_pair vuid_spec_text[] = {
{"VUID-VkRayTracingPipelineCreateInfoNV-layout-parameter", "layout must be a valid VkPipelineLayout handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkRayTracingPipelineCreateInfoNV-layout-parameter)"},
{"VUID-VkRayTracingPipelineCreateInfoNV-maxRecursionDepth-02412", "maxRecursionDepth must be less than or equal to VkPhysicalDeviceRayTracingPropertiesNV::maxRecursionDepth (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkRayTracingPipelineCreateInfoNV-maxRecursionDepth-02412)"},
{"VUID-VkRayTracingPipelineCreateInfoNV-pGroups-parameter", "pGroups must be a valid pointer to an array of groupCount valid VkRayTracingShaderGroupCreateInfoNV structures (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkRayTracingPipelineCreateInfoNV-pGroups-parameter)"},
- {"VUID-VkRayTracingPipelineCreateInfoNV-pNext-pNext", "pNext must be NULL or a pointer to a valid instance of VkPipelineCreationFeedbackCreateInfoEXT (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkRayTracingPipelineCreateInfoNV-pNext-pNext)"},
+ {"VUID-VkRayTracingPipelineCreateInfoNV-pNext-pNext", "pNext must be NULL (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkRayTracingPipelineCreateInfoNV-pNext-pNext)"},
{"VUID-VkRayTracingPipelineCreateInfoNV-pStages-02409", "The shader code for the entry points identified by pStages, and the rest of the state identified by this structure must adhere to the pipeline linking rules described in the Shader Interfaces chapter (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkRayTracingPipelineCreateInfoNV-pStages-02409)"},
{"VUID-VkRayTracingPipelineCreateInfoNV-pStages-parameter", "pStages must be a valid pointer to an array of stageCount valid VkPipelineShaderStageCreateInfo structures (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkRayTracingPipelineCreateInfoNV-pStages-parameter)"},
{"VUID-VkRayTracingPipelineCreateInfoNV-sType-sType", "sType must be VK_STRUCTURE_TYPE_RAY_TRACING_PIPELINE_CREATE_INFO_NV (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkRayTracingPipelineCreateInfoNV-sType-sType)"},
@@ -2093,27 +1945,11 @@ static const vuid_spec_text_pair vuid_spec_text[] = {
{"VUID-VkRayTracingShaderGroupCreateInfoNV-type-parameter", "type must be a valid VkRayTracingShaderGroupTypeNV value (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkRayTracingShaderGroupCreateInfoNV-type-parameter)"},
{"VUID-VkRectLayerKHR-layer-01262", "layer must be less than imageArrayLayers member of the VkSwapchainCreateInfoKHR structure given to vkCreateSwapchainKHR. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkRectLayerKHR-layer-01262)"},
{"VUID-VkRectLayerKHR-offset-01261", "The sum of offset and extent must be no greater than the imageExtent member of the VkSwapchainCreateInfoKHR structure given to vkCreateSwapchainKHR. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkRectLayerKHR-offset-01261)"},
- {"VUID-VkRenderPassAttachmentBeginInfoKHR-pAttachments-03218", "Each element of pAttachments must only specify a single mip level (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkRenderPassAttachmentBeginInfoKHR-pAttachments-03218)"},
- {"VUID-VkRenderPassAttachmentBeginInfoKHR-pAttachments-03219", "Each element of pAttachments must have been created with the identity swizzle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkRenderPassAttachmentBeginInfoKHR-pAttachments-03219)"},
- {"VUID-VkRenderPassAttachmentBeginInfoKHR-pAttachments-parameter", "If attachmentCount is not 0, pAttachments must be a valid pointer to an array of attachmentCount valid VkImageView handles (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkRenderPassAttachmentBeginInfoKHR-pAttachments-parameter)"},
- {"VUID-VkRenderPassAttachmentBeginInfoKHR-sType-sType", "sType must be VK_STRUCTURE_TYPE_RENDER_PASS_ATTACHMENT_BEGIN_INFO_KHR (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkRenderPassAttachmentBeginInfoKHR-sType-sType)"},
{"VUID-VkRenderPassBeginInfo-clearValueCount-00902", "clearValueCount must be greater than the largest attachment index in renderPass that specifies a loadOp (or stencilLoadOp, if the attachment has a depth/stencil format) of VK_ATTACHMENT_LOAD_OP_CLEAR (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkRenderPassBeginInfo-clearValueCount-00902)"},
{"VUID-VkRenderPassBeginInfo-commonparent", "Both of framebuffer, and renderPass must have been created, allocated, or retrieved from the same VkDevice (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkRenderPassBeginInfo-commonparent)"},
- {"VUID-VkRenderPassBeginInfo-framebuffer-02780", "If framebuffer was created with a VkFramebufferCreateInfo::flags value that included VK_FRAMEBUFFER_CREATE_IMAGELESS_BIT_KHR, each element of the pAttachments member of an instance of VkRenderPassAttachmentBeginInfoKHR included in the pNext chain must have been created on the same VkDevice as framebuffer and renderPass (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkRenderPassBeginInfo-framebuffer-02780)"},
- {"VUID-VkRenderPassBeginInfo-framebuffer-03207", "If framebuffer was created with a VkFramebufferCreateInfo::flags value that did not include VK_FRAMEBUFFER_CREATE_IMAGELESS_BIT_KHR, and the pNext chain includes an instance of VkRenderPassAttachmentBeginInfoKHR, its attachmentCount must be zero (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkRenderPassBeginInfo-framebuffer-03207)"},
- {"VUID-VkRenderPassBeginInfo-framebuffer-03208", "If framebuffer was created with a VkFramebufferCreateInfo::flags value that included VK_FRAMEBUFFER_CREATE_IMAGELESS_BIT_KHR, the attachmentCount of an instance of VkRenderPassAttachmentBeginInfoKHR included in the pNext chain must be equal to the value of VkFramebufferAttachmentsCreateInfoKHR::attachmentImageInfoCount used to create framebuffer (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkRenderPassBeginInfo-framebuffer-03208)"},
- {"VUID-VkRenderPassBeginInfo-framebuffer-03209", "If framebuffer was created with a VkFramebufferCreateInfo::flags value that included VK_FRAMEBUFFER_CREATE_IMAGELESS_BIT_KHR, each element of the pAttachments member of an instance of VkRenderPassAttachmentBeginInfoKHR included in the pNext chain must be a VkImageView of an image created with a value of VkImageCreateInfo::flags equal to the flags member of the corresponding element of VkFramebufferAttachmentsCreateInfoKHR::pAttachments used to create framebuffer (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkRenderPassBeginInfo-framebuffer-03209)"},
- {"VUID-VkRenderPassBeginInfo-framebuffer-03210", "If framebuffer was created with a VkFramebufferCreateInfo::flags value that included VK_FRAMEBUFFER_CREATE_IMAGELESS_BIT_KHR, each element of the pAttachments member of an instance of VkRenderPassAttachmentBeginInfoKHR included in the pNext chain must be a VkImageView of an image created with a value of VkImageCreateInfo::usage equal to the usage member of the corresponding element of VkFramebufferAttachmentsCreateInfoKHR::pAttachments used to create framebuffer (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkRenderPassBeginInfo-framebuffer-03210)"},
- {"VUID-VkRenderPassBeginInfo-framebuffer-03211", "If framebuffer was created with a VkFramebufferCreateInfo::flags value that included VK_FRAMEBUFFER_CREATE_IMAGELESS_BIT_KHR, each element of the pAttachments member of an instance of VkRenderPassAttachmentBeginInfoKHR included in the pNext chain must be a VkImageView with a width equal to the width member of the corresponding element of VkFramebufferAttachmentsCreateInfoKHR::pAttachments used to create framebuffer (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkRenderPassBeginInfo-framebuffer-03211)"},
- {"VUID-VkRenderPassBeginInfo-framebuffer-03212", "If framebuffer was created with a VkFramebufferCreateInfo::flags value that included VK_FRAMEBUFFER_CREATE_IMAGELESS_BIT_KHR, each element of the pAttachments member of an instance of VkRenderPassAttachmentBeginInfoKHR included in the pNext chain must be a VkImageView with a height equal to the height member of the corresponding element of VkFramebufferAttachmentsCreateInfoKHR::pAttachments used to create framebuffer (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkRenderPassBeginInfo-framebuffer-03212)"},
- {"VUID-VkRenderPassBeginInfo-framebuffer-03213", "If framebuffer was created with a VkFramebufferCreateInfo::flags value that included VK_FRAMEBUFFER_CREATE_IMAGELESS_BIT_KHR, each element of the pAttachments member of an instance of VkRenderPassAttachmentBeginInfoKHR included in the pNext chain must be a VkImageView of an image created with a value of VkImageViewCreateInfo::subresourceRange.pname:layerCount equal to the layerCount member of the corresponding element of VkFramebufferAttachmentsCreateInfoKHR::pAttachments used to create framebuffer (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkRenderPassBeginInfo-framebuffer-03213)"},
- {"VUID-VkRenderPassBeginInfo-framebuffer-03214", "If framebuffer was created with a VkFramebufferCreateInfo::flags value that included VK_FRAMEBUFFER_CREATE_IMAGELESS_BIT_KHR, each element of the pAttachments member of an instance of VkRenderPassAttachmentBeginInfoKHR included in the pNext chain must be a VkImageView of an image created with a value of VkImageFormatListCreateInfoKHR::viewFormatCount equal to the viewFormatCount member of the corresponding element of VkFramebufferAttachmentsCreateInfoKHR::pAttachments used to create framebuffer (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkRenderPassBeginInfo-framebuffer-03214)"},
- {"VUID-VkRenderPassBeginInfo-framebuffer-03215", "If framebuffer was created with a VkFramebufferCreateInfo::flags value that included VK_FRAMEBUFFER_CREATE_IMAGELESS_BIT_KHR, each element of the pAttachments member of an instance of VkRenderPassAttachmentBeginInfoKHR included in the pNext chain must be a VkImageView of an image created with a set of elements in VkImageFormatListCreateInfoKHR::pViewFormats equal to the set of elements in the pViewFormats member of the corresponding element of VkFramebufferAttachmentsCreateInfoKHR::pAttachments used to create framebuffer (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkRenderPassBeginInfo-framebuffer-03215)"},
- {"VUID-VkRenderPassBeginInfo-framebuffer-03216", "If framebuffer was created with a VkFramebufferCreateInfo::flags value that included VK_FRAMEBUFFER_CREATE_IMAGELESS_BIT_KHR, each element of the pAttachments member of an instance of VkRenderPassAttachmentBeginInfoKHR included in the pNext chain must be a VkImageView of an image created with a value of VkImageViewCreateInfo::format equal to the corresponding value of VkAttachmentDescription::format in renderPass (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkRenderPassBeginInfo-framebuffer-03216)"},
- {"VUID-VkRenderPassBeginInfo-framebuffer-03217", "If framebuffer was created with a VkFramebufferCreateInfo::flags value that included VK_FRAMEBUFFER_CREATE_IMAGELESS_BIT_KHR, each element of the pAttachments member of an instance of VkRenderPassAttachmentBeginInfoKHR included in the pNext chain must be a VkImageView of an image created with a value of VkImageCreateInfo::samples equal to the corresponding value of VkAttachmentDescription::samples in renderPass (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkRenderPassBeginInfo-framebuffer-03217)"},
{"VUID-VkRenderPassBeginInfo-framebuffer-parameter", "framebuffer must be a valid VkFramebuffer handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkRenderPassBeginInfo-framebuffer-parameter)"},
{"VUID-VkRenderPassBeginInfo-pClearValues-parameter", "If clearValueCount is not 0, pClearValues must be a valid pointer to an array of clearValueCount VkClearValue unions (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkRenderPassBeginInfo-pClearValues-parameter)"},
- {"VUID-VkRenderPassBeginInfo-pNext-pNext", "Each pNext member of any structure (including this one) in the pNext chain must be either NULL or a pointer to a valid instance of VkDeviceGroupRenderPassBeginInfo, VkRenderPassAttachmentBeginInfoKHR, or VkRenderPassSampleLocationsBeginInfoEXT (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkRenderPassBeginInfo-pNext-pNext)"},
+ {"VUID-VkRenderPassBeginInfo-pNext-pNext", "Each pNext member of any structure (including this one) in the pNext chain must be either NULL or a pointer to a valid instance of VkDeviceGroupRenderPassBeginInfo or VkRenderPassSampleLocationsBeginInfoEXT (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkRenderPassBeginInfo-pNext-pNext)"},
{"VUID-VkRenderPassBeginInfo-renderPass-00904", "renderPass must be compatible with the renderPass member of the VkFramebufferCreateInfo structure specified when creating framebuffer. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkRenderPassBeginInfo-renderPass-00904)"},
{"VUID-VkRenderPassBeginInfo-renderPass-parameter", "renderPass must be a valid VkRenderPass handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkRenderPassBeginInfo-renderPass-parameter)"},
{"VUID-VkRenderPassBeginInfo-sType-sType", "sType must be VK_STRUCTURE_TYPE_RENDER_PASS_BEGIN_INFO (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkRenderPassBeginInfo-sType-sType)"},
@@ -2126,7 +1962,7 @@ static const vuid_spec_text_pair vuid_spec_text[] = {
{"VUID-VkRenderPassCreateInfo-pAttachments-02511", "For any member of pAttachments with a stencilLoadOp equal to VK_ATTACHMENT_LOAD_OP_CLEAR, the first use of that attachment must not specify a layout equal to VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL or VK_IMAGE_LAYOUT_DEPTH_STENCIL_READ_ONLY_OPTIMAL. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkRenderPassCreateInfo-pAttachments-02511)"},
{"VUID-VkRenderPassCreateInfo-pAttachments-parameter", "If attachmentCount is not 0, pAttachments must be a valid pointer to an array of attachmentCount valid VkAttachmentDescription structures (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkRenderPassCreateInfo-pAttachments-parameter)"},
{"VUID-VkRenderPassCreateInfo-pDependencies-00837", "For any element of pDependencies, if the srcSubpass is not VK_SUBPASS_EXTERNAL, all stage flags included in the srcStageMask member of that dependency must be a pipeline stage supported by the pipeline identified by the pipelineBindPoint member of the source subpass (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkRenderPassCreateInfo-pDependencies-00837)"},
- {"VUID-VkRenderPassCreateInfo-pDependencies-00838", "For any element of pDependencies, if the dstSubpass is not VK_SUBPASS_EXTERNAL, all stage flags included in the dstStageMask member of that dependency must be a pipeline stage supported by the pipeline identified by the pipelineBindPoint member of the destination subpass (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkRenderPassCreateInfo-pDependencies-00838)"},
+ {"VUID-VkRenderPassCreateInfo-pDependencies-00838", "For any element of pDependencies, if the dstSubpass is not VK_SUBPASS_EXTERNAL, all stage flags included in the dstStageMask member of that dependency must be a pipeline stage supported by the pipeline identified by the pipelineBindPoint member of the source subpass (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkRenderPassCreateInfo-pDependencies-00838)"},
{"VUID-VkRenderPassCreateInfo-pDependencies-parameter", "If dependencyCount is not 0, pDependencies must be a valid pointer to an array of dependencyCount valid VkSubpassDependency structures (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkRenderPassCreateInfo-pDependencies-parameter)"},
{"VUID-VkRenderPassCreateInfo-pNext-01926", "If the pNext chain includes an instance of VkRenderPassInputAttachmentAspectCreateInfo, the subpass member of each element of its pAspectReferences member must be less than subpassCount (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkRenderPassCreateInfo-pNext-01926)"},
{"VUID-VkRenderPassCreateInfo-pNext-01927", "If the pNext chain includes an instance of VkRenderPassInputAttachmentAspectCreateInfo, the inputAttachmentIndex member of each element of its pAspectReferences member must be less than the value of inputAttachmentCount in the member of pSubpasses identified by its subpass member (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkRenderPassCreateInfo-pNext-01927)"},
@@ -2134,11 +1970,11 @@ static const vuid_spec_text_pair vuid_spec_text[] = {
{"VUID-VkRenderPassCreateInfo-pNext-01929", "If the pNext chain includes an instance of VkRenderPassMultiviewCreateInfo, if its dependencyCount member is not zero, it must be equal to dependencyCount (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkRenderPassCreateInfo-pNext-01929)"},
{"VUID-VkRenderPassCreateInfo-pNext-01930", "If the pNext chain includes an instance of VkRenderPassMultiviewCreateInfo, for each non-zero element of pViewOffsets, the srcSubpass and dstSubpass members of pDependencies at the same index must not be equal (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkRenderPassCreateInfo-pNext-01930)"},
{"VUID-VkRenderPassCreateInfo-pNext-01963", "If the pNext chain includes an instance of VkRenderPassInputAttachmentAspectCreateInfo, for any element of the pInputAttachments member of any element of pSubpasses where the attachment member is not VK_ATTACHMENT_UNUSED, the aspectMask member of the corresponding element of VkRenderPassInputAttachmentAspectCreateInfo::pAspectReferences must only include aspects that are present in images of the format specified by the element of pAttachments at attachment (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkRenderPassCreateInfo-pNext-01963)"},
- {"VUID-VkRenderPassCreateInfo-pNext-02512", "If the pNext chain includes an instance of VkRenderPassMultiviewCreateInfo, for any element of pDependencies with a dependencyFlags member that does not include VK_DEPENDENCY_VIEW_LOCAL_BIT, the corresponding element of the pViewOffsets member of that VkRenderPassMultiviewCreateInfo instance must be 0 (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkRenderPassCreateInfo-pNext-02512)"},
+ {"VUID-VkRenderPassCreateInfo-pNext-02512", "If the pNext chain includes an instance of VkRenderPassMultiviewCreateInfo, for any element of pDependencies with a dependencyFlags member that doesn't include VK_DEPENDENCY_VIEW_LOCAL_BIT, the corresponding element of the pViewOffsets member of that VkRenderPassMultiviewCreateInfo instance must be 0 (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkRenderPassCreateInfo-pNext-02512)"},
{"VUID-VkRenderPassCreateInfo-pNext-02513", "If the pNext chain includes an instance of VkRenderPassMultiviewCreateInfo, elements of its pViewMasks member must either all be 0, or all not be 0 (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkRenderPassCreateInfo-pNext-02513)"},
{"VUID-VkRenderPassCreateInfo-pNext-02514", "If the pNext chain includes an instance of VkRenderPassMultiviewCreateInfo, and each element of its pViewMasks member is 0, the dependencyFlags member of each element of pDependencies must not include VK_DEPENDENCY_VIEW_LOCAL_BIT (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkRenderPassCreateInfo-pNext-02514)"},
{"VUID-VkRenderPassCreateInfo-pNext-02515", "If the pNext chain includes an instance of VkRenderPassMultiviewCreateInfo, and each element of its pViewMasks member is 0, correlatedViewMaskCount must be 0 (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkRenderPassCreateInfo-pNext-02515)"},
- {"VUID-VkRenderPassCreateInfo-pNext-02516", "If the pNext chain includes an instance of VkRenderPassMultiviewCreateInfo, each element of its pViewMask member must not have a bit set at an index greater than or equal to VkPhysicalDeviceLimits::maxFramebufferLayers (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkRenderPassCreateInfo-pNext-02516)"},
+ {"VUID-VkRenderPassCreateInfo-pNext-02516", "If the pNext chain includes an instance of VkRenderPassMultiviewCreateInfo, each element of its pViewMask member must not include a bit at a position greater than the value of VkPhysicalDeviceLimits::maxFramebufferLayers (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkRenderPassCreateInfo-pNext-02516)"},
{"VUID-VkRenderPassCreateInfo-pNext-pNext", "Each pNext member of any structure (including this one) in the pNext chain must be either NULL or a pointer to a valid instance of VkRenderPassFragmentDensityMapCreateInfoEXT, VkRenderPassInputAttachmentAspectCreateInfo, or VkRenderPassMultiviewCreateInfo (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkRenderPassCreateInfo-pNext-pNext)"},
{"VUID-VkRenderPassCreateInfo-pSubpasses-parameter", "pSubpasses must be a valid pointer to an array of subpassCount valid VkSubpassDescription structures (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkRenderPassCreateInfo-pSubpasses-parameter)"},
{"VUID-VkRenderPassCreateInfo-sType-sType", "sType must be VK_STRUCTURE_TYPE_RENDER_PASS_CREATE_INFO (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkRenderPassCreateInfo-sType-sType)"},
@@ -2155,8 +1991,8 @@ static const vuid_spec_text_pair vuid_spec_text[] = {
{"VUID-VkRenderPassCreateInfo2KHR-pAttachments-parameter", "If attachmentCount is not 0, pAttachments must be a valid pointer to an array of attachmentCount valid VkAttachmentDescription2KHR structures (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkRenderPassCreateInfo2KHR-pAttachments-parameter)"},
{"VUID-VkRenderPassCreateInfo2KHR-pCorrelatedViewMasks-03056", "The set of bits included in any element of pCorrelatedViewMasks must not overlap with the set of bits included in any other element of pCorrelatedViewMasks (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkRenderPassCreateInfo2KHR-pCorrelatedViewMasks-03056)"},
{"VUID-VkRenderPassCreateInfo2KHR-pCorrelatedViewMasks-parameter", "If correlatedViewMaskCount is not 0, pCorrelatedViewMasks must be a valid pointer to an array of correlatedViewMaskCount uint32_t values (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkRenderPassCreateInfo2KHR-pCorrelatedViewMasks-parameter)"},
- {"VUID-VkRenderPassCreateInfo2KHR-pDependencies-03054", "For any element of pDependencies, if the srcSubpass is not VK_SUBPASS_EXTERNAL, all stage flags included in the srcStageMask member of that dependency must be a pipeline stage supported by the pipeline identified by the pipelineBindPoint member of the source subpass (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkRenderPassCreateInfo2KHR-pDependencies-03054)"},
- {"VUID-VkRenderPassCreateInfo2KHR-pDependencies-03055", "For any element of pDependencies, if the dstSubpass is not VK_SUBPASS_EXTERNAL, all stage flags included in the dstStageMask member of that dependency must be a pipeline stage supported by the pipeline identified by the pipelineBindPoint member of the destination subpass (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkRenderPassCreateInfo2KHR-pDependencies-03055)"},
+ {"VUID-VkRenderPassCreateInfo2KHR-pDependencies-03054", "For any element of pDependencies, if the srcSubpass is not VK_SUBPASS_EXTERNAL, all stage flags included in the srcStageMask member of that dependency must be a pipeline stage supported by the pipeline identified by the pipelineBindPoint member of the source subpass. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkRenderPassCreateInfo2KHR-pDependencies-03054)"},
+ {"VUID-VkRenderPassCreateInfo2KHR-pDependencies-03055", "For any element of pDependencies, if the dstSubpass is not VK_SUBPASS_EXTERNAL, all stage flags included in the dstStageMask member of that dependency must be a pipeline stage supported by the pipeline identified by the pipelineBindPoint member of the source subpass. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkRenderPassCreateInfo2KHR-pDependencies-03055)"},
{"VUID-VkRenderPassCreateInfo2KHR-pDependencies-03060", "For any element of pDependencies where its srcSubpass member equals its dstSubpass member, if the viewMask member of the corresponding element of pSubpasses includes more than one bit, its dependencyFlags member must include VK_DEPENDENCY_VIEW_LOCAL_BIT (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkRenderPassCreateInfo2KHR-pDependencies-03060)"},
{"VUID-VkRenderPassCreateInfo2KHR-pDependencies-parameter", "If dependencyCount is not 0, pDependencies must be a valid pointer to an array of dependencyCount valid VkSubpassDependency2KHR structures (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkRenderPassCreateInfo2KHR-pDependencies-parameter)"},
{"VUID-VkRenderPassCreateInfo2KHR-pNext-pNext", "pNext must be NULL (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkRenderPassCreateInfo2KHR-pNext-pNext)"},
@@ -2164,7 +2000,7 @@ static const vuid_spec_text_pair vuid_spec_text[] = {
{"VUID-VkRenderPassCreateInfo2KHR-sType-sType", "sType must be VK_STRUCTURE_TYPE_RENDER_PASS_CREATE_INFO_2_KHR (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkRenderPassCreateInfo2KHR-sType-sType)"},
{"VUID-VkRenderPassCreateInfo2KHR-srcSubpass-02526", "The srcSubpass member of each element of pDependencies must be less than subpassCount (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkRenderPassCreateInfo2KHR-srcSubpass-02526)"},
{"VUID-VkRenderPassCreateInfo2KHR-subpassCount-arraylength", "subpassCount must be greater than 0 (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkRenderPassCreateInfo2KHR-subpassCount-arraylength)"},
- {"VUID-VkRenderPassCreateInfo2KHR-viewMask-02524", "The viewMask member must not have a bit set at an index greater than or equal to VkPhysicalDeviceLimits::maxFramebufferLayers (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkRenderPassCreateInfo2KHR-viewMask-02524)"},
+ {"VUID-VkRenderPassCreateInfo2KHR-viewMask-02524", "The viewMask member must not include a bit at a position greater than the value of VkPhysicalDeviceLimits::maxFramebufferLayers (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkRenderPassCreateInfo2KHR-viewMask-02524)"},
{"VUID-VkRenderPassCreateInfo2KHR-viewMask-03057", "If the VkSubpassDescription2KHR::viewMask member of all elements of pSubpasses is 0, correlatedViewMaskCount must be 0 (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkRenderPassCreateInfo2KHR-viewMask-03057)"},
{"VUID-VkRenderPassCreateInfo2KHR-viewMask-03058", "The VkSubpassDescription2KHR::viewMask member of all elements of pSubpasses must either all be 0, or all not be 0 (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkRenderPassCreateInfo2KHR-viewMask-03058)"},
{"VUID-VkRenderPassCreateInfo2KHR-viewMask-03059", "If the VkSubpassDescription2KHR::viewMask member of all elements of pSubpasses is 0, the dependencyFlags member of any element of pDependencies must not include VK_DEPENDENCY_VIEW_LOCAL_BIT (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkRenderPassCreateInfo2KHR-viewMask-03059)"},
@@ -2251,7 +2087,6 @@ static const vuid_spec_text_pair vuid_spec_text[] = {
{"VUID-VkSamplerYcbcrConversionCreateInfo-yChromaOffset-parameter", "yChromaOffset must be a valid VkChromaLocation value (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkSamplerYcbcrConversionCreateInfo-yChromaOffset-parameter)"},
{"VUID-VkSamplerYcbcrConversionCreateInfo-ycbcrModel-01655", "If ycbcrModel is not VK_SAMPLER_YCBCR_MODEL_CONVERSION_RGB_IDENTITY, then components.r, components.g, and components.b must correspond to channels of the format; that is, components.r, components.g, and components.b must not be VK_COMPONENT_SWIZZLE_ZERO or VK_COMPONENT_SWIZZLE_ONE, and must not correspond to a channel which contains zero or one as a consequence of conversion to RGBA (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkSamplerYcbcrConversionCreateInfo-ycbcrModel-01655)"},
{"VUID-VkSamplerYcbcrConversionCreateInfo-ycbcrModel-parameter", "ycbcrModel must be a valid VkSamplerYcbcrModelConversion value (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkSamplerYcbcrConversionCreateInfo-ycbcrModel-parameter)"},
- {"VUID-VkSamplerYcbcrConversionCreateInfo-ycbcrRange-02748", "If ycbcrRange is VK_SAMPLER_YCBCR_RANGE_ITU_NARROW then the R, G and B channels obtained by applying the component swizzle to format must each have a bit-depth greater than or equal to 8. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkSamplerYcbcrConversionCreateInfo-ycbcrRange-02748)"},
{"VUID-VkSamplerYcbcrConversionCreateInfo-ycbcrRange-parameter", "ycbcrRange must be a valid VkSamplerYcbcrRange value (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkSamplerYcbcrConversionCreateInfo-ycbcrRange-parameter)"},
{"VUID-VkSamplerYcbcrConversionImageFormatProperties-sType-sType", "sType must be VK_STRUCTURE_TYPE_SAMPLER_YCBCR_CONVERSION_IMAGE_FORMAT_PROPERTIES (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkSamplerYcbcrConversionImageFormatProperties-sType-sType)"},
{"VUID-VkSamplerYcbcrConversionInfo-conversion-parameter", "conversion must be a valid VkSamplerYcbcrConversion handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkSamplerYcbcrConversionInfo-conversion-parameter)"},
@@ -2281,7 +2116,7 @@ static const vuid_spec_text_pair vuid_spec_text[] = {
{"VUID-VkSemaphoreGetWin32HandleInfoKHR-semaphore-parameter", "semaphore must be a valid VkSemaphore handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkSemaphoreGetWin32HandleInfoKHR-semaphore-parameter)"},
{"VUID-VkShaderModuleCreateInfo-codeSize-01085", "codeSize must be greater than 0 (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkShaderModuleCreateInfo-codeSize-01085)"},
{"VUID-VkShaderModuleCreateInfo-codeSize-01086", "codeSize must be a multiple of 4 (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkShaderModuleCreateInfo-codeSize-01086)"},
- {"VUID-VkShaderModuleCreateInfo-flags-parameter", "flags must be a valid combination of VkShaderModuleCreateFlagBits values (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkShaderModuleCreateInfo-flags-parameter)"},
+ {"VUID-VkShaderModuleCreateInfo-flags-zerobitmask", "flags must be 0 (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkShaderModuleCreateInfo-flags-zerobitmask)"},
{"VUID-VkShaderModuleCreateInfo-pCode-01087", "pCode must point to valid SPIR-V code, formatted and packed as described by the Khronos SPIR-V Specification (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkShaderModuleCreateInfo-pCode-01087)"},
{"VUID-VkShaderModuleCreateInfo-pCode-01088", "pCode must adhere to the validation rules described by the Validation Rules within a Module section of the SPIR-V Environment appendix (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkShaderModuleCreateInfo-pCode-01088)"},
{"VUID-VkShaderModuleCreateInfo-pCode-01089", "pCode must declare the Shader capability for SPIR-V code (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkShaderModuleCreateInfo-pCode-01089)"},
@@ -2311,8 +2146,6 @@ static const vuid_spec_text_pair vuid_spec_text[] = {
{"VUID-VkSparseImageMemoryBind-flags-parameter", "flags must be a valid combination of VkSparseMemoryBindFlagBits values (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkSparseImageMemoryBind-flags-parameter)"},
{"VUID-VkSparseImageMemoryBind-memory-01104", "If the sparse aliased residency feature is not enabled, and if any other resources are bound to ranges of memory, the range of memory being bound must not overlap with those bound ranges (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkSparseImageMemoryBind-memory-01104)"},
{"VUID-VkSparseImageMemoryBind-memory-01105", "memory and memoryOffset must match the memory requirements of the calling command's image, as described in section Resource Memory Association (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkSparseImageMemoryBind-memory-01105)"},
- {"VUID-VkSparseImageMemoryBind-memory-02732", "If memory was created with VkExportMemoryAllocateInfo::handleTypes not equal to 0, at least one handle type it contained must also have been set in VkExternalMemoryImageCreateInfo::handleTypes when the image was created. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkSparseImageMemoryBind-memory-02732)"},
- {"VUID-VkSparseImageMemoryBind-memory-02733", "If memory was created by a memory import operation, the external handle type of the imported memory must also have been set in VkExternalMemoryImageCreateInfo::handleTypes when image was created. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkSparseImageMemoryBind-memory-02733)"},
{"VUID-VkSparseImageMemoryBind-memory-parameter", "If memory is not VK_NULL_HANDLE, memory must be a valid VkDeviceMemory handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkSparseImageMemoryBind-memory-parameter)"},
{"VUID-VkSparseImageMemoryBind-offset-01107", "offset.x must be a multiple of the sparse image block width (VkSparseImageFormatProperties::imageGranularity.width) of the image (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkSparseImageMemoryBind-offset-01107)"},
{"VUID-VkSparseImageMemoryBind-offset-01109", "offset.y must be a multiple of the sparse image block height (VkSparseImageFormatProperties::imageGranularity.height) of the image (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkSparseImageMemoryBind-offset-01109)"},
@@ -2333,8 +2166,6 @@ static const vuid_spec_text_pair vuid_spec_text[] = {
{"VUID-VkSparseMemoryBind-flags-parameter", "flags must be a valid combination of VkSparseMemoryBindFlagBits values (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkSparseMemoryBind-flags-parameter)"},
{"VUID-VkSparseMemoryBind-memory-01096", "If memory is not VK_NULL_HANDLE, memory and memoryOffset must match the memory requirements of the resource, as described in section Resource Memory Association (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkSparseMemoryBind-memory-01096)"},
{"VUID-VkSparseMemoryBind-memory-01097", "If memory is not VK_NULL_HANDLE, memory must not have been created with a memory type that reports VK_MEMORY_PROPERTY_LAZILY_ALLOCATED_BIT bit set (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkSparseMemoryBind-memory-01097)"},
- {"VUID-VkSparseMemoryBind-memory-02730", "If memory was created with VkExportMemoryAllocateInfo::handleTypes not equal to 0, at least one handle type it contained must also have been set in VkExternalMemoryBufferCreateInfo::handleTypes or VkExternalMemoryImageCreateInfo::handleTypes when the resource was created. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkSparseMemoryBind-memory-02730)"},
- {"VUID-VkSparseMemoryBind-memory-02731", "If memory was created by a memory import operation, the external handle type of the imported memory must also have been set in VkExternalMemoryBufferCreateInfo::handleTypes or VkExternalMemoryImageCreateInfo::handleTypes when the resource was created. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkSparseMemoryBind-memory-02731)"},
{"VUID-VkSparseMemoryBind-memory-parameter", "If memory is not VK_NULL_HANDLE, memory must be a valid VkDeviceMemory handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkSparseMemoryBind-memory-parameter)"},
{"VUID-VkSparseMemoryBind-memoryOffset-01101", "memoryOffset must be less than the size of memory (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkSparseMemoryBind-memoryOffset-01101)"},
{"VUID-VkSparseMemoryBind-resourceOffset-01099", "resourceOffset must be less than the size of the resource (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkSparseMemoryBind-resourceOffset-01099)"},
@@ -2350,10 +2181,6 @@ static const vuid_spec_text_pair vuid_spec_text[] = {
{"VUID-VkStencilOpState-depthFailOp-parameter", "depthFailOp must be a valid VkStencilOp value (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkStencilOpState-depthFailOp-parameter)"},
{"VUID-VkStencilOpState-failOp-parameter", "failOp must be a valid VkStencilOp value (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkStencilOpState-failOp-parameter)"},
{"VUID-VkStencilOpState-passOp-parameter", "passOp must be a valid VkStencilOp value (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkStencilOpState-passOp-parameter)"},
- {"VUID-VkStreamDescriptorSurfaceCreateInfoGGP-flags-zerobitmask", "flags must be 0 (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkStreamDescriptorSurfaceCreateInfoGGP-flags-zerobitmask)"},
- {"VUID-VkStreamDescriptorSurfaceCreateInfoGGP-pNext-pNext", "pNext must be NULL (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkStreamDescriptorSurfaceCreateInfoGGP-pNext-pNext)"},
- {"VUID-VkStreamDescriptorSurfaceCreateInfoGGP-sType-sType", "sType must be VK_STRUCTURE_TYPE_STREAM_DESCRIPTOR_SURFACE_CREATE_INFO_GGP (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkStreamDescriptorSurfaceCreateInfoGGP-sType-sType)"},
- {"VUID-VkStreamDescriptorSurfaceCreateInfoGGP-streamDescriptor-02681", "streamDescriptor must be a valid GgpStreamDescriptor (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkStreamDescriptorSurfaceCreateInfoGGP-streamDescriptor-02681)"},
{"VUID-VkSubmitInfo-commonparent", "Each of the elements of pCommandBuffers, the elements of pSignalSemaphores, and the elements of pWaitSemaphores that are valid handles must have been created, allocated, or retrieved from the same VkDevice (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkSubmitInfo-commonparent)"},
{"VUID-VkSubmitInfo-pCommandBuffers-00075", "Each element of pCommandBuffers must not have been allocated with VK_COMMAND_BUFFER_LEVEL_SECONDARY (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkSubmitInfo-pCommandBuffers-00075)"},
{"VUID-VkSubmitInfo-pCommandBuffers-parameter", "If commandBufferCount is not 0, pCommandBuffers must be a valid pointer to an array of commandBufferCount valid VkCommandBuffer handles (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkSubmitInfo-pCommandBuffers-parameter)"},
@@ -2383,6 +2210,7 @@ static const vuid_spec_text_pair vuid_spec_text[] = {
{"VUID-VkSubpassDependency-dstStageMask-02102", "If the task shaders feature is not enabled, dstStageMask must not contain VK_PIPELINE_STAGE_TASK_SHADER_BIT_NV (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkSubpassDependency-dstStageMask-02102)"},
{"VUID-VkSubpassDependency-dstStageMask-parameter", "dstStageMask must be a valid combination of VkPipelineStageFlagBits values (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkSubpassDependency-dstStageMask-parameter)"},
{"VUID-VkSubpassDependency-dstStageMask-requiredbitmask", "dstStageMask must not be 0 (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkSubpassDependency-dstStageMask-requiredbitmask)"},
+ {"VUID-VkSubpassDependency-dstSubpass-00859", "If dstSubpass is not VK_SUBPASS_EXTERNAL, dstStageMask must not include VK_PIPELINE_STAGE_HOST_BIT (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkSubpassDependency-dstSubpass-00859)"},
{"VUID-VkSubpassDependency-srcAccessMask-00868", "Any access flag included in srcAccessMask must be supported by one of the pipeline stages in srcStageMask, as specified in the table of supported access types (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkSubpassDependency-srcAccessMask-00868)"},
{"VUID-VkSubpassDependency-srcAccessMask-parameter", "srcAccessMask must be a valid combination of VkAccessFlagBits values (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkSubpassDependency-srcAccessMask-parameter)"},
{"VUID-VkSubpassDependency-srcStageMask-00860", "If the geometry shaders feature is not enabled, srcStageMask must not contain VK_PIPELINE_STAGE_GEOMETRY_SHADER_BIT (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkSubpassDependency-srcStageMask-00860)"},
@@ -2391,10 +2219,12 @@ static const vuid_spec_text_pair vuid_spec_text[] = {
{"VUID-VkSubpassDependency-srcStageMask-02100", "If the task shaders feature is not enabled, srcStageMask must not contain VK_PIPELINE_STAGE_TASK_SHADER_BIT_NV (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkSubpassDependency-srcStageMask-02100)"},
{"VUID-VkSubpassDependency-srcStageMask-parameter", "srcStageMask must be a valid combination of VkPipelineStageFlagBits values (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkSubpassDependency-srcStageMask-parameter)"},
{"VUID-VkSubpassDependency-srcStageMask-requiredbitmask", "srcStageMask must not be 0 (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkSubpassDependency-srcStageMask-requiredbitmask)"},
+ {"VUID-VkSubpassDependency-srcSubpass-00858", "If srcSubpass is not VK_SUBPASS_EXTERNAL, srcStageMask must not include VK_PIPELINE_STAGE_HOST_BIT (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkSubpassDependency-srcSubpass-00858)"},
{"VUID-VkSubpassDependency-srcSubpass-00864", "srcSubpass must be less than or equal to dstSubpass, unless one of them is VK_SUBPASS_EXTERNAL, to avoid cyclic dependencies and ensure a valid execution order (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkSubpassDependency-srcSubpass-00864)"},
{"VUID-VkSubpassDependency-srcSubpass-00865", "srcSubpass and dstSubpass must not both be equal to VK_SUBPASS_EXTERNAL (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkSubpassDependency-srcSubpass-00865)"},
{"VUID-VkSubpassDependency-srcSubpass-00867", "If srcSubpass is equal to dstSubpass and not all of the stages in srcStageMask and dstStageMask are framebuffer-space stages, the logically latest pipeline stage in srcStageMask must be logically earlier than or equal to the logically earliest pipeline stage in dstStageMask (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkSubpassDependency-srcSubpass-00867)"},
{"VUID-VkSubpassDependency-srcSubpass-00872", "If srcSubpass equals dstSubpass and that subpass has more than one bit set in the view mask, then dependencyFlags must include VK_DEPENDENCY_VIEW_LOCAL_BIT (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkSubpassDependency-srcSubpass-00872)"},
+ {"VUID-VkSubpassDependency-srcSubpass-01989", "If srcSubpass is equal to dstSubpass, srcStageMask and dstStageMask must not set any bits that are neither VK_PIPELINE_STAGE_ALL_GRAPHICS_BIT, nor one of the graphics pipeline stages (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkSubpassDependency-srcSubpass-01989)"},
{"VUID-VkSubpassDependency-srcSubpass-02243", "If srcSubpass equals dstSubpass, and srcStageMask and dstStageMask both include a framebuffer-space stage, then dependencyFlags must include VK_DEPENDENCY_BY_REGION_BIT (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkSubpassDependency-srcSubpass-02243)"},
{"VUID-VkSubpassDependency2KHR-dependencyFlags-03090", "If dependencyFlags includes VK_DEPENDENCY_VIEW_LOCAL_BIT, srcSubpass must not be equal to VK_SUBPASS_EXTERNAL (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkSubpassDependency2KHR-dependencyFlags-03090)"},
{"VUID-VkSubpassDependency2KHR-dependencyFlags-03091", "If dependencyFlags includes VK_DEPENDENCY_VIEW_LOCAL_BIT, dstSubpass must not be equal to VK_SUBPASS_EXTERNAL (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkSubpassDependency2KHR-dependencyFlags-03091)"},
@@ -2408,6 +2238,7 @@ static const vuid_spec_text_pair vuid_spec_text[] = {
{"VUID-VkSubpassDependency2KHR-dstStageMask-03083", "If the tessellation shaders feature is not enabled, dstStageMask must not contain VK_PIPELINE_STAGE_TESSELLATION_CONTROL_SHADER_BIT or VK_PIPELINE_STAGE_TESSELLATION_EVALUATION_SHADER_BIT (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkSubpassDependency2KHR-dstStageMask-03083)"},
{"VUID-VkSubpassDependency2KHR-dstStageMask-parameter", "dstStageMask must be a valid combination of VkPipelineStageFlagBits values (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkSubpassDependency2KHR-dstStageMask-parameter)"},
{"VUID-VkSubpassDependency2KHR-dstStageMask-requiredbitmask", "dstStageMask must not be 0 (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkSubpassDependency2KHR-dstStageMask-requiredbitmask)"},
+ {"VUID-VkSubpassDependency2KHR-dstSubpass-03079", "If dstSubpass is not VK_SUBPASS_EXTERNAL, dstStageMask must not include VK_PIPELINE_STAGE_HOST_BIT (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkSubpassDependency2KHR-dstSubpass-03079)"},
{"VUID-VkSubpassDependency2KHR-sType-sType", "sType must be VK_STRUCTURE_TYPE_SUBPASS_DEPENDENCY_2_KHR (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkSubpassDependency2KHR-sType-sType)"},
{"VUID-VkSubpassDependency2KHR-srcAccessMask-03088", "Any access flag included in srcAccessMask must be supported by one of the pipeline stages in srcStageMask, as specified in the table of supported access types (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkSubpassDependency2KHR-srcAccessMask-03088)"},
{"VUID-VkSubpassDependency2KHR-srcAccessMask-parameter", "srcAccessMask must be a valid combination of VkAccessFlagBits values (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkSubpassDependency2KHR-srcAccessMask-parameter)"},
@@ -2417,7 +2248,9 @@ static const vuid_spec_text_pair vuid_spec_text[] = {
{"VUID-VkSubpassDependency2KHR-srcStageMask-03082", "If the tessellation shaders feature is not enabled, srcStageMask must not contain VK_PIPELINE_STAGE_TESSELLATION_CONTROL_SHADER_BIT or VK_PIPELINE_STAGE_TESSELLATION_EVALUATION_SHADER_BIT (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkSubpassDependency2KHR-srcStageMask-03082)"},
{"VUID-VkSubpassDependency2KHR-srcStageMask-parameter", "srcStageMask must be a valid combination of VkPipelineStageFlagBits values (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkSubpassDependency2KHR-srcStageMask-parameter)"},
{"VUID-VkSubpassDependency2KHR-srcStageMask-requiredbitmask", "srcStageMask must not be 0 (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkSubpassDependency2KHR-srcStageMask-requiredbitmask)"},
+ {"VUID-VkSubpassDependency2KHR-srcSubpass-02244", "If srcSubpass is equal to dstSubpass, srcStageMask and dstStageMask must not set any bits that are neither VK_PIPELINE_STAGE_ALL_GRAPHICS_BIT, nor one of the graphics pipeline stages (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkSubpassDependency2KHR-srcSubpass-02244)"},
{"VUID-VkSubpassDependency2KHR-srcSubpass-02245", "If srcSubpass equals dstSubpass, and srcStageMask and dstStageMask both include a framebuffer-space stage, then dependencyFlags must include VK_DEPENDENCY_BY_REGION_BIT (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkSubpassDependency2KHR-srcSubpass-02245)"},
+ {"VUID-VkSubpassDependency2KHR-srcSubpass-03078", "If srcSubpass is not VK_SUBPASS_EXTERNAL, srcStageMask must not include VK_PIPELINE_STAGE_HOST_BIT (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkSubpassDependency2KHR-srcSubpass-03078)"},
{"VUID-VkSubpassDependency2KHR-srcSubpass-03084", "srcSubpass must be less than or equal to dstSubpass, unless one of them is VK_SUBPASS_EXTERNAL, to avoid cyclic dependencies and ensure a valid execution order (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkSubpassDependency2KHR-srcSubpass-03084)"},
{"VUID-VkSubpassDependency2KHR-srcSubpass-03085", "srcSubpass and dstSubpass must not both be equal to VK_SUBPASS_EXTERNAL (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkSubpassDependency2KHR-srcSubpass-03085)"},
{"VUID-VkSubpassDependency2KHR-srcSubpass-03087", "If srcSubpass is equal to dstSubpass and not all of the stages in srcStageMask and dstStageMask are framebuffer-space stages, the logically latest pipeline stage in srcStageMask must be logically earlier than or equal to the logically earliest pipeline stage in dstStageMask (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkSubpassDependency2KHR-srcSubpass-03087)"},
@@ -2495,17 +2328,10 @@ static const vuid_spec_text_pair vuid_spec_text[] = {
{"VUID-VkSurfaceCapabilities2EXT-pNext-pNext", "pNext must be NULL (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkSurfaceCapabilities2EXT-pNext-pNext)"},
{"VUID-VkSurfaceCapabilities2EXT-sType-sType", "sType must be VK_STRUCTURE_TYPE_SURFACE_CAPABILITIES_2_EXT (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkSurfaceCapabilities2EXT-sType-sType)"},
{"VUID-VkSurfaceCapabilities2EXT-supportedSurfaceCounters-01246", "supportedSurfaceCounters must not include VK_SURFACE_COUNTER_VBLANK_EXT unless the surface queried is a display surface. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkSurfaceCapabilities2EXT-supportedSurfaceCounters-01246)"},
- {"VUID-VkSurfaceCapabilities2KHR-pNext-pNext", "Each pNext member of any structure (including this one) in the pNext chain must be either NULL or a pointer to a valid instance of VkDisplayNativeHdrSurfaceCapabilitiesAMD, VkSharedPresentSurfaceCapabilitiesKHR, VkSurfaceCapabilitiesFullScreenExclusiveEXT, or VkSurfaceProtectedCapabilitiesKHR (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkSurfaceCapabilities2KHR-pNext-pNext)"},
+ {"VUID-VkSurfaceCapabilities2KHR-pNext-pNext", "pNext must be NULL or a pointer to a valid instance of VkSharedPresentSurfaceCapabilitiesKHR (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkSurfaceCapabilities2KHR-pNext-pNext)"},
{"VUID-VkSurfaceCapabilities2KHR-sType-sType", "sType must be VK_STRUCTURE_TYPE_SURFACE_CAPABILITIES_2_KHR (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkSurfaceCapabilities2KHR-sType-sType)"},
- {"VUID-VkSurfaceCapabilities2KHR-sType-unique", "Each sType member in the pNext chain must be unique (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkSurfaceCapabilities2KHR-sType-unique)"},
- {"VUID-VkSurfaceCapabilitiesFullScreenExclusiveEXT-sType-sType", "sType must be VK_STRUCTURE_TYPE_SURFACE_CAPABILITIES_FULL_SCREEN_EXCLUSIVE_EXT (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkSurfaceCapabilitiesFullScreenExclusiveEXT-sType-sType)"},
{"VUID-VkSurfaceFormat2KHR-pNext-pNext", "pNext must be NULL (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkSurfaceFormat2KHR-pNext-pNext)"},
{"VUID-VkSurfaceFormat2KHR-sType-sType", "sType must be VK_STRUCTURE_TYPE_SURFACE_FORMAT_2_KHR (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkSurfaceFormat2KHR-sType-sType)"},
- {"VUID-VkSurfaceFullScreenExclusiveInfoEXT-fullScreenExclusive-parameter", "fullScreenExclusive must be a valid VkFullScreenExclusiveEXT value (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkSurfaceFullScreenExclusiveInfoEXT-fullScreenExclusive-parameter)"},
- {"VUID-VkSurfaceFullScreenExclusiveInfoEXT-sType-sType", "sType must be VK_STRUCTURE_TYPE_SURFACE_FULL_SCREEN_EXCLUSIVE_INFO_EXT (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkSurfaceFullScreenExclusiveInfoEXT-sType-sType)"},
- {"VUID-VkSurfaceFullScreenExclusiveWin32InfoEXT-hmonitor-02673", "hmonitor must be a valid HMONITOR (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkSurfaceFullScreenExclusiveWin32InfoEXT-hmonitor-02673)"},
- {"VUID-VkSurfaceFullScreenExclusiveWin32InfoEXT-sType-sType", "sType must be VK_STRUCTURE_TYPE_SURFACE_FULL_SCREEN_EXCLUSIVE_WIN32_INFO_EXT (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkSurfaceFullScreenExclusiveWin32InfoEXT-sType-sType)"},
- {"VUID-VkSurfaceProtectedCapabilitiesKHR-sType-sType", "sType must be VK_STRUCTURE_TYPE_SURFACE_PROTECTED_CAPABILITIES_KHR (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkSurfaceProtectedCapabilitiesKHR-sType-sType)"},
{"VUID-VkSwapchainCounterCreateInfoEXT-sType-sType", "sType must be VK_STRUCTURE_TYPE_SWAPCHAIN_COUNTER_CREATE_INFO_EXT (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkSwapchainCounterCreateInfoEXT-sType-sType)"},
{"VUID-VkSwapchainCounterCreateInfoEXT-surfaceCounters-01244", "The bits in surfaceCounters must be supported by VkSwapchainCreateInfoKHR::surface, as reported by vkGetPhysicalDeviceSurfaceCapabilities2EXT. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkSwapchainCounterCreateInfoEXT-surfaceCounters-01244)"},
{"VUID-VkSwapchainCounterCreateInfoEXT-surfaceCounters-parameter", "surfaceCounters must be a valid combination of VkSurfaceCounterFlagBitsEXT values (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkSwapchainCounterCreateInfoEXT-surfaceCounters-parameter)"},
@@ -2513,7 +2339,6 @@ static const vuid_spec_text_pair vuid_spec_text[] = {
{"VUID-VkSwapchainCreateInfoKHR-compositeAlpha-01280", "compositeAlpha must be one of the bits present in the supportedCompositeAlpha member of the VkSurfaceCapabilitiesKHR structure returned by vkGetPhysicalDeviceSurfaceCapabilitiesKHR for the surface (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkSwapchainCreateInfoKHR-compositeAlpha-01280)"},
{"VUID-VkSwapchainCreateInfoKHR-compositeAlpha-parameter", "compositeAlpha must be a valid VkCompositeAlphaFlagBitsKHR value (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkSwapchainCreateInfoKHR-compositeAlpha-parameter)"},
{"VUID-VkSwapchainCreateInfoKHR-flags-03168", "If flags contains VK_SWAPCHAIN_CREATE_MUTABLE_FORMAT_BIT_KHR then the pNext chain must contain an instance of VkImageFormatListCreateInfoKHR with a viewFormatCount greater than zero and pViewFormats must have an element equal to imageFormat (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkSwapchainCreateInfoKHR-flags-03168)"},
- {"VUID-VkSwapchainCreateInfoKHR-flags-03187", "If flags contains VK_SWAPCHAIN_CREATE_PROTECTED_BIT_KHR, then VkSurfaceProtectedCapabilitiesKHR::supportsProtected must be VK_TRUE in the VkSurfaceProtectedCapabilitiesKHR structure returned by vkGetPhysicalDeviceSurfaceCapabilities2KHR for surface (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkSwapchainCreateInfoKHR-flags-03187)"},
{"VUID-VkSwapchainCreateInfoKHR-flags-parameter", "flags must be a valid combination of VkSwapchainCreateFlagBitsKHR values (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkSwapchainCreateInfoKHR-flags-parameter)"},
{"VUID-VkSwapchainCreateInfoKHR-imageArrayLayers-01275", "imageArrayLayers must be greater than 0 and less than or equal to the maxImageArrayLayers member of the VkSurfaceCapabilitiesKHR structure returned by vkGetPhysicalDeviceSurfaceCapabilitiesKHR for the surface (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkSwapchainCreateInfoKHR-imageArrayLayers-01275)"},
{"VUID-VkSwapchainCreateInfoKHR-imageColorSpace-parameter", "imageColorSpace must be a valid VkColorSpaceKHR value (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkSwapchainCreateInfoKHR-imageColorSpace-parameter)"},
@@ -2537,8 +2362,7 @@ static const vuid_spec_text_pair vuid_spec_text[] = {
{"VUID-VkSwapchainCreateInfoKHR-oldSwapchain-01933", "If oldSwapchain is not VK_NULL_HANDLE, oldSwapchain must be a non-retired swapchain associated with native window referred to by surface (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkSwapchainCreateInfoKHR-oldSwapchain-01933)"},
{"VUID-VkSwapchainCreateInfoKHR-oldSwapchain-parameter", "If oldSwapchain is not VK_NULL_HANDLE, oldSwapchain must be a valid VkSwapchainKHR handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkSwapchainCreateInfoKHR-oldSwapchain-parameter)"},
{"VUID-VkSwapchainCreateInfoKHR-oldSwapchain-parent", "If oldSwapchain is a valid handle, it must have been created, allocated, or retrieved from surface (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkSwapchainCreateInfoKHR-oldSwapchain-parent)"},
- {"VUID-VkSwapchainCreateInfoKHR-pNext-02679", "If the pNext chain includes an instance of VkSurfaceFullScreenExclusiveInfoEXT with its fullScreenExclusive member set to VK_FULL_SCREEN_EXCLUSIVE_APPLICATION_CONTROLLED_EXT, and surface was created using vkCreateWin32SurfaceKHR, an instance of VkSurfaceFullScreenExclusiveWin32InfoEXT must be present in the pNext chain (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkSwapchainCreateInfoKHR-pNext-02679)"},
- {"VUID-VkSwapchainCreateInfoKHR-pNext-pNext", "Each pNext member of any structure (including this one) in the pNext chain must be either NULL or a pointer to a valid instance of VkDeviceGroupSwapchainCreateInfoKHR, VkImageFormatListCreateInfoKHR, VkSurfaceFullScreenExclusiveInfoEXT, VkSurfaceFullScreenExclusiveWin32InfoEXT, VkSwapchainCounterCreateInfoEXT, or VkSwapchainDisplayNativeHdrCreateInfoAMD (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkSwapchainCreateInfoKHR-pNext-pNext)"},
+ {"VUID-VkSwapchainCreateInfoKHR-pNext-pNext", "Each pNext member of any structure (including this one) in the pNext chain must be either NULL or a pointer to a valid instance of VkDeviceGroupSwapchainCreateInfoKHR, VkImageFormatListCreateInfoKHR, or VkSwapchainCounterCreateInfoEXT (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkSwapchainCreateInfoKHR-pNext-pNext)"},
{"VUID-VkSwapchainCreateInfoKHR-physicalDeviceCount-01429", "If the logical device was created with VkDeviceGroupDeviceCreateInfo::physicalDeviceCount equal to 1, flags must not contain VK_SWAPCHAIN_CREATE_SPLIT_INSTANCE_BIND_REGIONS_BIT_KHR (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkSwapchainCreateInfoKHR-physicalDeviceCount-01429)"},
{"VUID-VkSwapchainCreateInfoKHR-preTransform-01279", "preTransform must be one of the bits present in the supportedTransforms member of the VkSurfaceCapabilitiesKHR structure returned by vkGetPhysicalDeviceSurfaceCapabilitiesKHR for the surface (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkSwapchainCreateInfoKHR-preTransform-01279)"},
{"VUID-VkSwapchainCreateInfoKHR-preTransform-parameter", "preTransform must be a valid VkSurfaceTransformFlagBitsKHR value (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkSwapchainCreateInfoKHR-preTransform-parameter)"},
@@ -2549,8 +2373,6 @@ static const vuid_spec_text_pair vuid_spec_text[] = {
{"VUID-VkSwapchainCreateInfoKHR-sType-unique", "Each sType member in the pNext chain must be unique (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkSwapchainCreateInfoKHR-sType-unique)"},
{"VUID-VkSwapchainCreateInfoKHR-surface-01270", "surface must be a surface that is supported by the device as determined using vkGetPhysicalDeviceSurfaceSupportKHR (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkSwapchainCreateInfoKHR-surface-01270)"},
{"VUID-VkSwapchainCreateInfoKHR-surface-parameter", "surface must be a valid VkSurfaceKHR handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkSwapchainCreateInfoKHR-surface-parameter)"},
- {"VUID-VkSwapchainDisplayNativeHdrCreateInfoAMD-localDimmingEnable-XXXXX", "It is only valid to set localDimmingEnable to VK_TRUE if VkDisplayNativeHdrSurfaceCapabilitiesAMD::localDimmingSupport is supported. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkSwapchainDisplayNativeHdrCreateInfoAMD-localDimmingEnable-XXXXX)"},
- {"VUID-VkSwapchainDisplayNativeHdrCreateInfoAMD-sType-sType", "sType must be VK_STRUCTURE_TYPE_SWAPCHAIN_DISPLAY_NATIVE_HDR_CREATE_INFO_AMD (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkSwapchainDisplayNativeHdrCreateInfoAMD-sType-sType)"},
{"VUID-VkTextureLODGatherFormatPropertiesAMD-sType-sType", "sType must be VK_STRUCTURE_TYPE_TEXTURE_LOD_GATHER_FORMAT_PROPERTIES_AMD (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkTextureLODGatherFormatPropertiesAMD-sType-sType)"},
{"VUID-VkValidationCacheCreateInfoEXT-flags-zerobitmask", "flags must be 0 (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkValidationCacheCreateInfoEXT-flags-zerobitmask)"},
{"VUID-VkValidationCacheCreateInfoEXT-initialDataSize-01534", "If initialDataSize is not 0, it must be equal to the size of pInitialData, as returned by vkGetValidationCacheDataEXT when pInitialData was originally retrieved (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkValidationCacheCreateInfoEXT-initialDataSize-01534)"},
@@ -2651,13 +2473,12 @@ static const vuid_spec_text_pair vuid_spec_text[] = {
{"VUID-VkWriteDescriptorSet-descriptorType-01402", "If descriptorType is VK_DESCRIPTOR_TYPE_STORAGE_IMAGE, for each descriptor that will be accessed via load or store operations the imageLayout member for corresponding elements of pImageInfo must be VK_IMAGE_LAYOUT_GENERAL (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkWriteDescriptorSet-descriptorType-01402)"},
{"VUID-VkWriteDescriptorSet-descriptorType-01403", "If descriptorType is VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE or VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER, the imageLayout member of each element of pImageInfo must be a member of the list given in Sampled Image or Combined Image Sampler, corresponding to its type (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkWriteDescriptorSet-descriptorType-01403)"},
{"VUID-VkWriteDescriptorSet-descriptorType-01946", "If descriptorType is VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE, then the imageView member of each pImageInfo element must have been created without a VkSamplerYcbcrConversionInfo structure in its pNext chain (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkWriteDescriptorSet-descriptorType-01946)"},
+ {"VUID-VkWriteDescriptorSet-descriptorType-01947", "If descriptorType is VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER, and if any element of pImageInfo has a imageView member that was created with a VkSamplerYcbcrConversionInfo structure in its pNext chain, then dstSet must have been allocated with a layout that included immutable samplers for dstBinding (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkWriteDescriptorSet-descriptorType-01947)"},
{"VUID-VkWriteDescriptorSet-descriptorType-01948", "If descriptorType is VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER, and dstSet was allocated with a layout that included immutable samplers for dstBinding, then the imageView member of each element of pImageInfo which corresponds to an immutable sampler that enables sampler Y'CBCR conversion must have been created with a VkSamplerYcbcrConversionInfo structure in its pNext chain with an identically defined VkSamplerYcbcrConversionInfo to the corresponding immutable sampler (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkWriteDescriptorSet-descriptorType-01948)"},
{"VUID-VkWriteDescriptorSet-descriptorType-02219", "If descriptorType is VK_DESCRIPTOR_TYPE_INLINE_UNIFORM_BLOCK_EXT, dstArrayElement must be an integer multiple of 4 (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkWriteDescriptorSet-descriptorType-02219)"},
{"VUID-VkWriteDescriptorSet-descriptorType-02220", "If descriptorType is VK_DESCRIPTOR_TYPE_INLINE_UNIFORM_BLOCK_EXT, descriptorCount must be an integer multiple of 4 (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkWriteDescriptorSet-descriptorType-02220)"},
{"VUID-VkWriteDescriptorSet-descriptorType-02221", "If descriptorType is VK_DESCRIPTOR_TYPE_INLINE_UNIFORM_BLOCK_EXT, the pNext chain must include a VkWriteDescriptorSetInlineUniformBlockEXT structure whose dataSize member equals descriptorCount (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkWriteDescriptorSet-descriptorType-02221)"},
{"VUID-VkWriteDescriptorSet-descriptorType-02382", "If descriptorType is VK_DESCRIPTOR_TYPE_ACCELERATION_STRUCTURE_NV, the pNext chain must include a VkWriteDescriptorSetAccelerationStructureNV structure whose accelerationStructureCount member equals descriptorCount (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkWriteDescriptorSet-descriptorType-02382)"},
- {"VUID-VkWriteDescriptorSet-descriptorType-02738", "If descriptorType is VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER, and if any element of pImageInfo has a imageView member that was created with a VkSamplerYcbcrConversionInfo structure in its pNext chain, then dstSet must have been allocated with a layout that included immutable samplers for dstBinding, and the corresponding immutable sampler must have been created with an identically defined VkSamplerYcbcrConversionInfo object (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkWriteDescriptorSet-descriptorType-02738)"},
- {"VUID-VkWriteDescriptorSet-descriptorType-02752", "If descriptorType is VK_DESCRIPTOR_TYPE_SAMPLER, then dstSet must not have been allocated with a layout that included immutable samplers for dstBinding (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkWriteDescriptorSet-descriptorType-02752)"},
{"VUID-VkWriteDescriptorSet-descriptorType-parameter", "descriptorType must be a valid VkDescriptorType value (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkWriteDescriptorSet-descriptorType-parameter)"},
{"VUID-VkWriteDescriptorSet-dstArrayElement-00321", "The sum of dstArrayElement and descriptorCount must be less than or equal to the number of array elements in the descriptor set binding specified by dstBinding, and all applicable consecutive bindings, as described by consecutive binding updates (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkWriteDescriptorSet-dstArrayElement-00321)"},
{"VUID-VkWriteDescriptorSet-dstBinding-00315", "dstBinding must be less than or equal to the maximum value of binding of all VkDescriptorSetLayoutBinding structures specified when dstSet's descriptor set layout was created (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkWriteDescriptorSet-dstBinding-00315)"},
@@ -2668,7 +2489,6 @@ static const vuid_spec_text_pair vuid_spec_text[] = {
{"VUID-VkWriteDescriptorSet-sType-unique", "Each sType member in the pNext chain must be unique (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkWriteDescriptorSet-sType-unique)"},
{"VUID-VkWriteDescriptorSetAccelerationStructureNV-accelerationStructureCount-02236", "accelerationStructureCount must be equal to descriptorCount in the extended structure (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkWriteDescriptorSetAccelerationStructureNV-accelerationStructureCount-02236)"},
{"VUID-VkWriteDescriptorSetAccelerationStructureNV-accelerationStructureCount-arraylength", "accelerationStructureCount must be greater than 0 (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkWriteDescriptorSetAccelerationStructureNV-accelerationStructureCount-arraylength)"},
- {"VUID-VkWriteDescriptorSetAccelerationStructureNV-pAccelerationStructures-02764", "Each acceleration structure in pAccelerationStructures must have been created with VK_ACCELERATION_STRUCTURE_TYPE_TOP_LEVEL_NV (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkWriteDescriptorSetAccelerationStructureNV-pAccelerationStructures-02764)"},
{"VUID-VkWriteDescriptorSetAccelerationStructureNV-pAccelerationStructures-parameter", "pAccelerationStructures must be a valid pointer to an array of accelerationStructureCount valid VkAccelerationStructureNV handles (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkWriteDescriptorSetAccelerationStructureNV-pAccelerationStructures-parameter)"},
{"VUID-VkWriteDescriptorSetAccelerationStructureNV-sType-sType", "sType must be VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET_ACCELERATION_STRUCTURE_NV (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkWriteDescriptorSetAccelerationStructureNV-sType-sType)"},
{"VUID-VkWriteDescriptorSetInlineUniformBlockEXT-dataSize-02222", "dataSize must be an integer multiple of 4 (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkWriteDescriptorSetInlineUniformBlockEXT-dataSize-02222)"},
@@ -2685,12 +2505,6 @@ static const vuid_spec_text_pair vuid_spec_text[] = {
{"VUID-VkXlibSurfaceCreateInfoKHR-pNext-pNext", "pNext must be NULL (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkXlibSurfaceCreateInfoKHR-pNext-pNext)"},
{"VUID-VkXlibSurfaceCreateInfoKHR-sType-sType", "sType must be VK_STRUCTURE_TYPE_XLIB_SURFACE_CREATE_INFO_KHR (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkXlibSurfaceCreateInfoKHR-sType-sType)"},
{"VUID-VkXlibSurfaceCreateInfoKHR-window-01314", "window must be a valid Xlib Window. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkXlibSurfaceCreateInfoKHR-window-01314)"},
- {"VUID-vkAcquireFullScreenExclusiveModeEXT-commonparent", "Both of device, and swapchain must have been created, allocated, or retrieved from the same VkInstance (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkAcquireFullScreenExclusiveModeEXT-commonparent)"},
- {"VUID-vkAcquireFullScreenExclusiveModeEXT-device-parameter", "device must be a valid VkDevice handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkAcquireFullScreenExclusiveModeEXT-device-parameter)"},
- {"VUID-vkAcquireFullScreenExclusiveModeEXT-swapchain-02674", "swapchain must not be in the retired state (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkAcquireFullScreenExclusiveModeEXT-swapchain-02674)"},
- {"VUID-vkAcquireFullScreenExclusiveModeEXT-swapchain-02675", "swapchain must be a swapchain created with an instance of VkSurfaceFullScreenExclusiveInfoEXT, with fullScreenExclusive set to VK_FULL_SCREEN_EXCLUSIVE_APPLICATION_CONTROLLED_EXT (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkAcquireFullScreenExclusiveModeEXT-swapchain-02675)"},
- {"VUID-vkAcquireFullScreenExclusiveModeEXT-swapchain-02676", "swapchain must not currently have exclusive full-screen access (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkAcquireFullScreenExclusiveModeEXT-swapchain-02676)"},
- {"VUID-vkAcquireFullScreenExclusiveModeEXT-swapchain-parameter", "swapchain must be a valid VkSwapchainKHR handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkAcquireFullScreenExclusiveModeEXT-swapchain-parameter)"},
{"VUID-vkAcquireNextImage2KHR-device-parameter", "device must be a valid VkDevice handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkAcquireNextImage2KHR-device-parameter)"},
{"VUID-vkAcquireNextImage2KHR-pAcquireInfo-parameter", "pAcquireInfo must be a valid pointer to a valid VkAcquireNextImageInfoKHR structure (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkAcquireNextImage2KHR-pAcquireInfo-parameter)"},
{"VUID-vkAcquireNextImage2KHR-pImageIndex-parameter", "pImageIndex must be a valid pointer to a uint32_t value (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkAcquireNextImage2KHR-pImageIndex-parameter)"},
@@ -2709,22 +2523,16 @@ static const vuid_spec_text_pair vuid_spec_text[] = {
{"VUID-vkAcquireNextImageKHR-swapchain-01285", "swapchain must not be in the retired state (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkAcquireNextImageKHR-swapchain-01285)"},
{"VUID-vkAcquireNextImageKHR-swapchain-01802", "If the number of currently acquired images is greater than the difference between the number of images in swapchain and the value of VkSurfaceCapabilitiesKHR::minImageCount as returned by a call to vkGetPhysicalDeviceSurfaceCapabilities2KHR with the surface used to create swapchain, timeout must not be UINT64_MAX (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkAcquireNextImageKHR-swapchain-01802)"},
{"VUID-vkAcquireNextImageKHR-swapchain-parameter", "swapchain must be a valid VkSwapchainKHR handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkAcquireNextImageKHR-swapchain-parameter)"},
- {"VUID-vkAcquirePerformanceConfigurationINTEL-device-parameter", "device must be a valid VkDevice handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkAcquirePerformanceConfigurationINTEL-device-parameter)"},
- {"VUID-vkAcquirePerformanceConfigurationINTEL-pAcquireInfo-parameter", "pAcquireInfo must be a valid pointer to a valid VkPerformanceConfigurationAcquireInfoINTEL structure (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkAcquirePerformanceConfigurationINTEL-pAcquireInfo-parameter)"},
- {"VUID-vkAcquirePerformanceConfigurationINTEL-pConfiguration-parameter", "pConfiguration must be a valid pointer to a VkPerformanceConfigurationINTEL handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkAcquirePerformanceConfigurationINTEL-pConfiguration-parameter)"},
{"VUID-vkAcquireXlibDisplayEXT-display-parameter", "display must be a valid VkDisplayKHR handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkAcquireXlibDisplayEXT-display-parameter)"},
{"VUID-vkAcquireXlibDisplayEXT-dpy-parameter", "dpy must be a valid pointer to a Display value (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkAcquireXlibDisplayEXT-dpy-parameter)"},
{"VUID-vkAcquireXlibDisplayEXT-physicalDevice-parameter", "physicalDevice must be a valid VkPhysicalDevice handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkAcquireXlibDisplayEXT-physicalDevice-parameter)"},
{"VUID-vkAllocateCommandBuffers-device-parameter", "device must be a valid VkDevice handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkAllocateCommandBuffers-device-parameter)"},
{"VUID-vkAllocateCommandBuffers-pAllocateInfo-parameter", "pAllocateInfo must be a valid pointer to a valid VkCommandBufferAllocateInfo structure (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkAllocateCommandBuffers-pAllocateInfo-parameter)"},
- {"VUID-vkAllocateCommandBuffers-pAllocateInfo::commandBufferCount-arraylength", "The value referenced by pAllocateInfo::commandBufferCount must be greater than 0 (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkAllocateCommandBuffers-pAllocateInfo::commandBufferCount-arraylength)"},
{"VUID-vkAllocateCommandBuffers-pCommandBuffers-parameter", "pCommandBuffers must be a valid pointer to an array of pAllocateInfo::commandBufferCount VkCommandBuffer handles (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkAllocateCommandBuffers-pCommandBuffers-parameter)"},
{"VUID-vkAllocateDescriptorSets-device-parameter", "device must be a valid VkDevice handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkAllocateDescriptorSets-device-parameter)"},
{"VUID-vkAllocateDescriptorSets-pAllocateInfo-parameter", "pAllocateInfo must be a valid pointer to a valid VkDescriptorSetAllocateInfo structure (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkAllocateDescriptorSets-pAllocateInfo-parameter)"},
- {"VUID-vkAllocateDescriptorSets-pAllocateInfo::descriptorSetCount-arraylength", "The value referenced by pAllocateInfo::descriptorSetCount must be greater than 0 (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkAllocateDescriptorSets-pAllocateInfo::descriptorSetCount-arraylength)"},
{"VUID-vkAllocateDescriptorSets-pDescriptorSets-parameter", "pDescriptorSets must be a valid pointer to an array of pAllocateInfo::descriptorSetCount VkDescriptorSet handles (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkAllocateDescriptorSets-pDescriptorSets-parameter)"},
{"VUID-vkAllocateMemory-device-parameter", "device must be a valid VkDevice handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkAllocateMemory-device-parameter)"},
- {"VUID-vkAllocateMemory-deviceCoherentMemory-02790", "If the deviceCoherentMemory feature is not enabled, pAllocateInfo->memoryTypeIndex must not identify a memory type supporting VK_MEMORY_PROPERTY_DEVICE_COHERENT_BIT_AMD (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkAllocateMemory-deviceCoherentMemory-02790)"},
{"VUID-vkAllocateMemory-pAllocateInfo-01713", "pAllocateInfo->allocationSize must be less than or equal to VkPhysicalDeviceMemoryProperties::memoryHeaps[pAllocateInfo->memoryTypeIndex].size as returned by vkGetPhysicalDeviceMemoryProperties for the VkPhysicalDevice that device was created from. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkAllocateMemory-pAllocateInfo-01713)"},
{"VUID-vkAllocateMemory-pAllocateInfo-01714", "pAllocateInfo->memoryTypeIndex must be less than VkPhysicalDeviceMemoryProperties::memoryTypeCount as returned by vkGetPhysicalDeviceMemoryProperties for the VkPhysicalDevice that device was created from. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkAllocateMemory-pAllocateInfo-01714)"},
{"VUID-vkAllocateMemory-pAllocateInfo-parameter", "pAllocateInfo must be a valid pointer to a valid VkMemoryAllocateInfo structure (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkAllocateMemory-pAllocateInfo-parameter)"},
@@ -2751,8 +2559,6 @@ static const vuid_spec_text_pair vuid_spec_text[] = {
{"VUID-vkBindBufferMemory-device-parameter", "device must be a valid VkDevice handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkBindBufferMemory-device-parameter)"},
{"VUID-vkBindBufferMemory-memory-01035", "memory must have been allocated using one of the memory types allowed in the memoryTypeBits member of the VkMemoryRequirements structure returned from a call to vkGetBufferMemoryRequirements with buffer (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkBindBufferMemory-memory-01035)"},
{"VUID-vkBindBufferMemory-memory-01508", "If the VkMemoryAllocateInfo provided when memory was allocated included an instance of VkMemoryDedicatedAllocateInfo in its pNext chain, and VkMemoryDedicatedAllocateInfo::buffer was not VK_NULL_HANDLE, then buffer must equal VkMemoryDedicatedAllocateInfo::buffer, and memoryOffset must be zero. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkBindBufferMemory-memory-01508)"},
- {"VUID-vkBindBufferMemory-memory-02726", "If the value of VkExportMemoryAllocateInfo::handleTypes used to allocate memory is not 0, it must include at least one of the handles set in VkExternalMemoryBufferCreateInfo::handleTypes when buffer was created (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkBindBufferMemory-memory-02726)"},
- {"VUID-vkBindBufferMemory-memory-02727", "If memory was created by a memory import operation, the external handle type of the imported memory must also have been set in VkExternalMemoryBufferCreateInfo::handleTypes when buffer was created (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkBindBufferMemory-memory-02727)"},
{"VUID-vkBindBufferMemory-memory-parameter", "memory must be a valid VkDeviceMemory handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkBindBufferMemory-memory-parameter)"},
{"VUID-vkBindBufferMemory-memory-parent", "memory must have been created, allocated, or retrieved from device (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkBindBufferMemory-memory-parent)"},
{"VUID-vkBindBufferMemory-memoryOffset-01031", "memoryOffset must be less than the size of memory (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkBindBufferMemory-memoryOffset-01031)"},
@@ -2773,11 +2579,9 @@ static const vuid_spec_text_pair vuid_spec_text[] = {
{"VUID-vkBindImageMemory-image-parameter", "image must be a valid VkImage handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkBindImageMemory-image-parameter)"},
{"VUID-vkBindImageMemory-image-parent", "image must have been created, allocated, or retrieved from device (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkBindImageMemory-image-parent)"},
{"VUID-vkBindImageMemory-memory-01047", "memory must have been allocated using one of the memory types allowed in the memoryTypeBits member of the VkMemoryRequirements structure returned from a call to vkGetImageMemoryRequirements with image (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkBindImageMemory-memory-01047)"},
- {"VUID-vkBindImageMemory-memory-01509", "If the VkMemoryAllocateInfo provided when memory was allocated included an instance of VkMemoryDedicatedAllocateInfo in its pNext chain, and VkMemoryDedicatedAllocateInfo::image was not VK_NULL_HANDLE, then image must equal VkMemoryDedicatedAllocateInfo::image and memoryOffset must be zero (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkBindImageMemory-memory-01509)"},
+ {"VUID-vkBindImageMemory-memory-01509", "If the VkMemoryAllocateInfo provided when memory was allocated included an instance of VkMemoryDedicatedAllocateInfo in its pNext chain, and VkMemoryDedicatedAllocateInfo::image was not VK_NULL_HANDLE, then image must equal VkMemoryDedicatedAllocateInfo::image and memoryOffset must be zero. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkBindImageMemory-memory-01509)"},
{"VUID-vkBindImageMemory-memory-02628", "If the dedicated allocation image aliasing feature is not enabled, and the VkMemoryAllocateInfo provided when memory was allocated included an instance of VkMemoryDedicatedAllocateInfo in its pNext chain, and VkMemoryDedicatedAllocateInfo::image was not VK_NULL_HANDLE, then image must equal VkMemoryDedicatedAllocateInfo::image and memoryOffset must be zero. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkBindImageMemory-memory-02628)"},
{"VUID-vkBindImageMemory-memory-02629", "If the dedicated allocation image aliasing feature is enabled, and the VkMemoryAllocateInfo provided when memory was allocated included an instance of VkMemoryDedicatedAllocateInfo in its pNext chain, and VkMemoryDedicatedAllocateInfo::image was not VK_NULL_HANDLE, then memoryOffset must be zero, and image must be either equal to VkMemoryDedicatedAllocateInfo::image or an image that was created using the same parameters in VkImageCreateInfo, with the exception that extent and arrayLayers may differ subject to the following restrictions: every dimension in the extent parameter of the image being bound must be equal to or smaller than the original image for which the allocation was created; and the arrayLayers parameter of the image being bound must be equal to or smaller than the original image for which the allocation was created. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkBindImageMemory-memory-02629)"},
- {"VUID-vkBindImageMemory-memory-02728", "If the value of VkExportMemoryAllocateInfo::handleTypes used to allocate memory is not 0, it must include at least one of the handles set in VkExternalMemoryImageCreateInfo::handleTypes when image was created (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkBindImageMemory-memory-02728)"},
- {"VUID-vkBindImageMemory-memory-02729", "If memory was created by a memory import operation, the external handle type of the imported memory must also have been set in VkExternalMemoryImageCreateInfo::handleTypes when image was created (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkBindImageMemory-memory-02729)"},
{"VUID-vkBindImageMemory-memory-parameter", "memory must be a valid VkDeviceMemory handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkBindImageMemory-memory-parameter)"},
{"VUID-vkBindImageMemory-memory-parent", "memory must have been created, allocated, or retrieved from device (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkBindImageMemory-memory-parent)"},
{"VUID-vkBindImageMemory-memoryOffset-01046", "memoryOffset must be less than the size of memory (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkBindImageMemory-memoryOffset-01046)"},
@@ -2803,7 +2607,7 @@ static const vuid_spec_text_pair vuid_spec_text[] = {
{"VUID-vkCmdBeginQuery-commonparent", "Both of commandBuffer, and queryPool must have been created, allocated, or retrieved from the same VkDevice (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdBeginQuery-commonparent)"},
{"VUID-vkCmdBeginQuery-flags-parameter", "flags must be a valid combination of VkQueryControlFlagBits values (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdBeginQuery-flags-parameter)"},
{"VUID-vkCmdBeginQuery-query-00802", "query must be less than the number of queries in queryPool (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdBeginQuery-query-00802)"},
- {"VUID-vkCmdBeginQuery-query-00808", "If called within a render pass instance, the sum of query and the number of bits set in the current subpass's view mask must be less than or equal to the number of queries in queryPool (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdBeginQuery-query-00808)"},
+ {"VUID-vkCmdBeginQuery-query-00808", "If vkCmdBeginQuery is called within a render pass instance, the sum of query and the number of bits set in the current subpass's view mask must be less than or equal to the number of queries in queryPool (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdBeginQuery-query-00808)"},
{"VUID-vkCmdBeginQuery-queryPool-01922", "queryPool must have been created with a queryType that differs from that of any queries that are active within commandBuffer (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdBeginQuery-queryPool-01922)"},
{"VUID-vkCmdBeginQuery-queryPool-parameter", "queryPool must be a valid VkQueryPool handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdBeginQuery-queryPool-parameter)"},
{"VUID-vkCmdBeginQuery-queryType-00800", "If the precise occlusion queries feature is not enabled, or the queryType used to create queryPool was not VK_QUERY_TYPE_OCCLUSION, flags must not contain VK_QUERY_CONTROL_PRECISE_BIT (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdBeginQuery-queryType-00800)"},
@@ -2812,21 +2616,21 @@ static const vuid_spec_text_pair vuid_spec_text[] = {
{"VUID-vkCmdBeginQuery-queryType-00805", "If the queryType used to create queryPool was VK_QUERY_TYPE_PIPELINE_STATISTICS and any of the pipelineStatistics indicate compute operations, the VkCommandPool that commandBuffer was allocated from must support compute operations (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdBeginQuery-queryType-00805)"},
{"VUID-vkCmdBeginQuery-queryType-02327", "If the queryType used to create queryPool was VK_QUERY_TYPE_TRANSFORM_FEEDBACK_STREAM_EXT the VkCommandPool that commandBuffer was allocated from must support graphics operations (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdBeginQuery-queryType-02327)"},
{"VUID-vkCmdBeginQuery-queryType-02328", "If the queryType used to create queryPool was VK_QUERY_TYPE_TRANSFORM_FEEDBACK_STREAM_EXT then VkPhysicalDeviceTransformFeedbackPropertiesEXT::transformFeedbackQueries must be supported (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdBeginQuery-queryType-02328)"},
- {"VUID-vkCmdBeginQueryIndexedEXT-None-00807", "All queries used by the command must be unavailable (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdBeginQueryIndexedEXT-None-00807)"},
- {"VUID-vkCmdBeginQueryIndexedEXT-commandBuffer-01885", "commandBuffer must not be a protected command buffer (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdBeginQueryIndexedEXT-commandBuffer-01885)"},
+ {"VUID-vkCmdBeginQueryIndexedEXT-None-02330", "All queries used by the command must be unavailable (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdBeginQueryIndexedEXT-None-02330)"},
+ {"VUID-vkCmdBeginQueryIndexedEXT-commandBuffer-02336", "commandBuffer must not be a protected command buffer (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdBeginQueryIndexedEXT-commandBuffer-02336)"},
{"VUID-vkCmdBeginQueryIndexedEXT-commandBuffer-cmdpool", "The VkCommandPool that commandBuffer was allocated from must support graphics, or compute operations (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdBeginQueryIndexedEXT-commandBuffer-cmdpool)"},
{"VUID-vkCmdBeginQueryIndexedEXT-commandBuffer-parameter", "commandBuffer must be a valid VkCommandBuffer handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdBeginQueryIndexedEXT-commandBuffer-parameter)"},
{"VUID-vkCmdBeginQueryIndexedEXT-commandBuffer-recording", "commandBuffer must be in the recording state (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdBeginQueryIndexedEXT-commandBuffer-recording)"},
{"VUID-vkCmdBeginQueryIndexedEXT-commonparent", "Both of commandBuffer, and queryPool must have been created, allocated, or retrieved from the same VkDevice (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdBeginQueryIndexedEXT-commonparent)"},
{"VUID-vkCmdBeginQueryIndexedEXT-flags-parameter", "flags must be a valid combination of VkQueryControlFlagBits values (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdBeginQueryIndexedEXT-flags-parameter)"},
- {"VUID-vkCmdBeginQueryIndexedEXT-query-00802", "query must be less than the number of queries in queryPool (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdBeginQueryIndexedEXT-query-00802)"},
- {"VUID-vkCmdBeginQueryIndexedEXT-query-00808", "If called within a render pass instance, the sum of query and the number of bits set in the current subpass's view mask must be less than or equal to the number of queries in queryPool (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdBeginQueryIndexedEXT-query-00808)"},
- {"VUID-vkCmdBeginQueryIndexedEXT-queryPool-01922", "queryPool must have been created with a queryType that differs from that of any queries that are active within commandBuffer (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdBeginQueryIndexedEXT-queryPool-01922)"},
+ {"VUID-vkCmdBeginQueryIndexedEXT-query-02332", "query must be less than the number of queries in queryPool (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdBeginQueryIndexedEXT-query-02332)"},
+ {"VUID-vkCmdBeginQueryIndexedEXT-query-02337", "If vkCmdBeginQuery is called within a render pass instance, the sum of query and the number of bits set in the current subpass's view mask must be less than or equal to the number of queries in queryPool (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdBeginQueryIndexedEXT-query-02337)"},
+ {"VUID-vkCmdBeginQueryIndexedEXT-queryPool-02329", "queryPool must have been created with a queryType that differs from that of any queries that are active within commandBuffer (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdBeginQueryIndexedEXT-queryPool-02329)"},
{"VUID-vkCmdBeginQueryIndexedEXT-queryPool-parameter", "queryPool must be a valid VkQueryPool handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdBeginQueryIndexedEXT-queryPool-parameter)"},
- {"VUID-vkCmdBeginQueryIndexedEXT-queryType-00800", "If the precise occlusion queries feature is not enabled, or the queryType used to create queryPool was not VK_QUERY_TYPE_OCCLUSION, flags must not contain VK_QUERY_CONTROL_PRECISE_BIT (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdBeginQueryIndexedEXT-queryType-00800)"},
- {"VUID-vkCmdBeginQueryIndexedEXT-queryType-00803", "If the queryType used to create queryPool was VK_QUERY_TYPE_OCCLUSION, the VkCommandPool that commandBuffer was allocated from must support graphics operations (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdBeginQueryIndexedEXT-queryType-00803)"},
- {"VUID-vkCmdBeginQueryIndexedEXT-queryType-00804", "If the queryType used to create queryPool was VK_QUERY_TYPE_PIPELINE_STATISTICS and any of the pipelineStatistics indicate graphics operations, the VkCommandPool that commandBuffer was allocated from must support graphics operations (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdBeginQueryIndexedEXT-queryType-00804)"},
- {"VUID-vkCmdBeginQueryIndexedEXT-queryType-00805", "If the queryType used to create queryPool was VK_QUERY_TYPE_PIPELINE_STATISTICS and any of the pipelineStatistics indicate compute operations, the VkCommandPool that commandBuffer was allocated from must support compute operations (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdBeginQueryIndexedEXT-queryType-00805)"},
+ {"VUID-vkCmdBeginQueryIndexedEXT-queryType-02331", "If the precise occlusion queries feature is not enabled, or the queryType used to create queryPool was not VK_QUERY_TYPE_OCCLUSION, flags must not contain VK_QUERY_CONTROL_PRECISE_BIT (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdBeginQueryIndexedEXT-queryType-02331)"},
+ {"VUID-vkCmdBeginQueryIndexedEXT-queryType-02333", "If the queryType used to create queryPool was VK_QUERY_TYPE_OCCLUSION, the VkCommandPool that commandBuffer was allocated from must support graphics operations (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdBeginQueryIndexedEXT-queryType-02333)"},
+ {"VUID-vkCmdBeginQueryIndexedEXT-queryType-02334", "If the queryType used to create queryPool was VK_QUERY_TYPE_PIPELINE_STATISTICS and any of the pipelineStatistics indicate graphics operations, the VkCommandPool that commandBuffer was allocated from must support graphics operations (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdBeginQueryIndexedEXT-queryType-02334)"},
+ {"VUID-vkCmdBeginQueryIndexedEXT-queryType-02335", "If the queryType used to create queryPool was VK_QUERY_TYPE_PIPELINE_STATISTICS and any of the pipelineStatistics indicate compute operations, the VkCommandPool that commandBuffer was allocated from must support compute operations (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdBeginQueryIndexedEXT-queryType-02335)"},
{"VUID-vkCmdBeginQueryIndexedEXT-queryType-02338", "If the queryType used to create queryPool was VK_QUERY_TYPE_TRANSFORM_FEEDBACK_STREAM_EXT the VkCommandPool that commandBuffer was allocated from must support graphics operations (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdBeginQueryIndexedEXT-queryType-02338)"},
{"VUID-vkCmdBeginQueryIndexedEXT-queryType-02339", "If the queryType used to create queryPool was VK_QUERY_TYPE_TRANSFORM_FEEDBACK_STREAM_EXT the index parameter must be less than VkPhysicalDeviceTransformFeedbackPropertiesEXT::maxTransformFeedbackStreams (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdBeginQueryIndexedEXT-queryType-02339)"},
{"VUID-vkCmdBeginQueryIndexedEXT-queryType-02340", "If the queryType used to create queryPool was not VK_QUERY_TYPE_TRANSFORM_FEEDBACK_STREAM_EXT the index must be zero (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdBeginQueryIndexedEXT-queryType-02340)"},
@@ -2852,7 +2656,6 @@ static const vuid_spec_text_pair vuid_spec_text[] = {
{"VUID-vkCmdBeginRenderPass2KHR-commandBuffer-parameter", "commandBuffer must be a valid VkCommandBuffer handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdBeginRenderPass2KHR-commandBuffer-parameter)"},
{"VUID-vkCmdBeginRenderPass2KHR-commandBuffer-recording", "commandBuffer must be in the recording state (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdBeginRenderPass2KHR-commandBuffer-recording)"},
{"VUID-vkCmdBeginRenderPass2KHR-framebuffer-02533", "For any attachment in framebuffer that is used by renderPass and is bound to memory locations that are also bound to another attachment used by renderPass, and if at least one of those uses causes either attachment to be written to, both attachments must have had the VK_ATTACHMENT_DESCRIPTION_MAY_ALIAS_BIT set (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdBeginRenderPass2KHR-framebuffer-02533)"},
- {"VUID-vkCmdBeginRenderPass2KHR-framebuffer-02779", "Both the framebuffer and renderPass members of pRenderPassBegin must have been created on the same VkDevice that commandBuffer was allocated on (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdBeginRenderPass2KHR-framebuffer-02779)"},
{"VUID-vkCmdBeginRenderPass2KHR-initialLayout-03094", "If any of the initialLayout or finalLayout member of the VkAttachmentDescription structures or the layout member of the VkAttachmentReference structures specified when creating the render pass specified in the renderPass member of pRenderPassBegin is VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL then the corresponding attachment image view of the framebuffer specified in the framebuffer member of pRenderPassBegin must have been created with a usage value including VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdBeginRenderPass2KHR-initialLayout-03094)"},
{"VUID-vkCmdBeginRenderPass2KHR-initialLayout-03096", "If any of the initialLayout or finalLayout member of the VkAttachmentDescription structures or the layout member of the VkAttachmentReference structures specified when creating the render pass specified in the renderPass member of pRenderPassBegin is VK_IMAGE_LAYOUT_DEPTH_READ_ONLY_STENCIL_ATTACHMENT_OPTIMAL, VK_IMAGE_LAYOUT_DEPTH_ATTACHMENT_STENCIL_READ_ONLY_OPTIMAL, VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL, or VK_IMAGE_LAYOUT_DEPTH_STENCIL_READ_ONLY_OPTIMAL then the corresponding attachment image view of the framebuffer specified in the framebuffer member of pRenderPassBegin must have been created with a usage value including VK_IMAGE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdBeginRenderPass2KHR-initialLayout-03096)"},
{"VUID-vkCmdBeginRenderPass2KHR-initialLayout-03097", "If any of the initialLayout or finalLayout member of the VkAttachmentDescription structures or the layout member of the VkAttachmentReference structures specified when creating the render pass specified in the renderPass member of pRenderPassBegin is VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL then the corresponding attachment image view of the framebuffer specified in the framebuffer member of pRenderPassBegin must have been created with a usage value including VK_IMAGE_USAGE_SAMPLED_BIT or VK_IMAGE_USAGE_INPUT_ATTACHMENT_BIT (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdBeginRenderPass2KHR-initialLayout-03097)"},
@@ -2902,7 +2705,6 @@ static const vuid_spec_text_pair vuid_spec_text[] = {
{"VUID-vkCmdBindIndexBuffer-commandBuffer-recording", "commandBuffer must be in the recording state (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdBindIndexBuffer-commandBuffer-recording)"},
{"VUID-vkCmdBindIndexBuffer-commonparent", "Both of buffer, and commandBuffer must have been created, allocated, or retrieved from the same VkDevice (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdBindIndexBuffer-commonparent)"},
{"VUID-vkCmdBindIndexBuffer-indexType-02507", "indexType must not be VK_INDEX_TYPE_NONE_NV. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdBindIndexBuffer-indexType-02507)"},
- {"VUID-vkCmdBindIndexBuffer-indexType-02765", "If indexType is VK_INDEX_TYPE_UINT8_EXT, the indexTypeUint8 feature must be enabled (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdBindIndexBuffer-indexType-02765)"},
{"VUID-vkCmdBindIndexBuffer-indexType-parameter", "indexType must be a valid VkIndexType value (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdBindIndexBuffer-indexType-parameter)"},
{"VUID-vkCmdBindIndexBuffer-offset-00431", "offset must be less than the size of buffer (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdBindIndexBuffer-offset-00431)"},
{"VUID-vkCmdBindIndexBuffer-offset-00432", "The sum of offset and the address of the range of VkDeviceMemory object that is backing buffer, must be a multiple of the type indicated by indexType (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdBindIndexBuffer-offset-00432)"},
@@ -2925,14 +2727,14 @@ static const vuid_spec_text_pair vuid_spec_text[] = {
{"VUID-vkCmdBindShadingRateImageNV-commandBuffer-cmdpool", "The VkCommandPool that commandBuffer was allocated from must support graphics operations (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdBindShadingRateImageNV-commandBuffer-cmdpool)"},
{"VUID-vkCmdBindShadingRateImageNV-commandBuffer-parameter", "commandBuffer must be a valid VkCommandBuffer handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdBindShadingRateImageNV-commandBuffer-parameter)"},
{"VUID-vkCmdBindShadingRateImageNV-commandBuffer-recording", "commandBuffer must be in the recording state (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdBindShadingRateImageNV-commandBuffer-recording)"},
- {"VUID-vkCmdBindShadingRateImageNV-commonparent", "Both of commandBuffer, and imageView that are valid handles must have been created, allocated, or retrieved from the same VkDevice (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdBindShadingRateImageNV-commonparent)"},
+ {"VUID-vkCmdBindShadingRateImageNV-commonparent", "Both of commandBuffer, and imageView must have been created, allocated, or retrieved from the same VkDevice (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdBindShadingRateImageNV-commonparent)"},
{"VUID-vkCmdBindShadingRateImageNV-imageLayout-02063", "If imageView is not VK_NULL_HANDLE, imageLayout must be VK_IMAGE_LAYOUT_SHADING_RATE_OPTIMAL_NV or VK_IMAGE_LAYOUT_GENERAL. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdBindShadingRateImageNV-imageLayout-02063)"},
{"VUID-vkCmdBindShadingRateImageNV-imageLayout-parameter", "imageLayout must be a valid VkImageLayout value (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdBindShadingRateImageNV-imageLayout-parameter)"},
{"VUID-vkCmdBindShadingRateImageNV-imageView-02059", "If imageView is not VK_NULL_HANDLE, it must be a valid VkImageView handle of type VK_IMAGE_VIEW_TYPE_2D or VK_IMAGE_VIEW_TYPE_2D_ARRAY. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdBindShadingRateImageNV-imageView-02059)"},
{"VUID-vkCmdBindShadingRateImageNV-imageView-02060", "If imageView is not VK_NULL_HANDLE, it must have a format of VK_FORMAT_R8_UINT. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdBindShadingRateImageNV-imageView-02060)"},
{"VUID-vkCmdBindShadingRateImageNV-imageView-02061", "If imageView is not VK_NULL_HANDLE, it must have been created with a usage value including VK_IMAGE_USAGE_SHADING_RATE_IMAGE_BIT_NV (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdBindShadingRateImageNV-imageView-02061)"},
{"VUID-vkCmdBindShadingRateImageNV-imageView-02062", "If imageView is not VK_NULL_HANDLE, imageLayout must match the actual VkImageLayout of each subresource accessible from imageView at the time the subresource is accessed. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdBindShadingRateImageNV-imageView-02062)"},
- {"VUID-vkCmdBindShadingRateImageNV-imageView-parameter", "If imageView is not VK_NULL_HANDLE, imageView must be a valid VkImageView handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdBindShadingRateImageNV-imageView-parameter)"},
+ {"VUID-vkCmdBindShadingRateImageNV-imageView-parameter", "imageView must be a valid VkImageView handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdBindShadingRateImageNV-imageView-parameter)"},
{"VUID-vkCmdBindTransformFeedbackBuffersEXT-None-02365", "Transform feedback must not be active when the vkCmdBindTransformFeedbackBuffersEXT command is recorded (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdBindTransformFeedbackBuffersEXT-None-02365)"},
{"VUID-vkCmdBindTransformFeedbackBuffersEXT-bindingCount-arraylength", "If pSizes is not NULL, bindingCount must be greater than 0 (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdBindTransformFeedbackBuffersEXT-bindingCount-arraylength)"},
{"VUID-vkCmdBindTransformFeedbackBuffersEXT-commandBuffer-cmdpool", "The VkCommandPool that commandBuffer was allocated from must support graphics operations (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdBindTransformFeedbackBuffersEXT-commandBuffer-cmdpool)"},
@@ -3015,12 +2817,11 @@ static const vuid_spec_text_pair vuid_spec_text[] = {
{"VUID-vkCmdBuildAccelerationStructureNV-commandBuffer-parameter", "commandBuffer must be a valid VkCommandBuffer handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdBuildAccelerationStructureNV-commandBuffer-parameter)"},
{"VUID-vkCmdBuildAccelerationStructureNV-commandBuffer-recording", "commandBuffer must be in the recording state (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdBuildAccelerationStructureNV-commandBuffer-recording)"},
{"VUID-vkCmdBuildAccelerationStructureNV-commonparent", "Each of commandBuffer, dst, instanceData, scratch, and src that are valid handles must have been created, allocated, or retrieved from the same VkDevice (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdBuildAccelerationStructureNV-commonparent)"},
- {"VUID-vkCmdBuildAccelerationStructureNV-dst-02488", "dst must have been created with compatible VkAccelerationStructureInfoNV where VkAccelerationStructureInfoNV::type and VkAccelerationStructureInfoNV::flags are identical, VkAccelerationStructureInfoNV::instanceCount and VkAccelerationStructureInfoNV::geometryCount for dst are greater than or equal to the build size and each geometry in VkAccelerationStructureInfoNV::pGeometries for dst has greater than or equal to the number of vertices, indices, and AABBs. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdBuildAccelerationStructureNV-dst-02488)"},
+ {"VUID-vkCmdBuildAccelerationStructureNV-dst-02488", "dst must have been created with compatible VkAccelerationStructureInfoNV where VkAccelerationStructureInfoNV:::type and VkAccelerationStructureInfoNV::flags are identical, VkAccelerationStructureInfoNV::instanceCount and VkAccelerationStructureInfoNV::geometryCount for dst are greater than or equal to the build size and each geometry in VkAccelerationStructureInfoNV::pGeometries for dst has greater than or equal to the number of vertices, indices, and AABBs. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdBuildAccelerationStructureNV-dst-02488)"},
{"VUID-vkCmdBuildAccelerationStructureNV-dst-parameter", "dst must be a valid VkAccelerationStructureNV handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdBuildAccelerationStructureNV-dst-parameter)"},
{"VUID-vkCmdBuildAccelerationStructureNV-geometryCount-02241", "geometryCount must be less than or equal to VkPhysicalDeviceRayTracingPropertiesNV::maxGeometryCount (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdBuildAccelerationStructureNV-geometryCount-02241)"},
{"VUID-vkCmdBuildAccelerationStructureNV-instanceData-parameter", "If instanceData is not VK_NULL_HANDLE, instanceData must be a valid VkBuffer handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdBuildAccelerationStructureNV-instanceData-parameter)"},
{"VUID-vkCmdBuildAccelerationStructureNV-pInfo-parameter", "pInfo must be a valid pointer to a valid VkAccelerationStructureInfoNV structure (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdBuildAccelerationStructureNV-pInfo-parameter)"},
- {"VUID-vkCmdBuildAccelerationStructureNV-renderpass", "This command must only be called outside of a render pass instance (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdBuildAccelerationStructureNV-renderpass)"},
{"VUID-vkCmdBuildAccelerationStructureNV-scratch-parameter", "scratch must be a valid VkBuffer handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdBuildAccelerationStructureNV-scratch-parameter)"},
{"VUID-vkCmdBuildAccelerationStructureNV-src-parameter", "If src is not VK_NULL_HANDLE, src must be a valid VkAccelerationStructureNV handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdBuildAccelerationStructureNV-src-parameter)"},
{"VUID-vkCmdBuildAccelerationStructureNV-update-02489", "If update is VK_TRUE, src must not be VK_NULL_HANDLE (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdBuildAccelerationStructureNV-update-02489)"},
@@ -3042,8 +2843,6 @@ static const vuid_spec_text_pair vuid_spec_text[] = {
{"VUID-vkCmdClearAttachments-pRects-00016", "The rectangular region specified by each element of pRects must be contained within the render area of the current render pass instance (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdClearAttachments-pRects-00016)"},
{"VUID-vkCmdClearAttachments-pRects-00017", "The layers specified by each element of pRects must be contained within every attachment that pAttachments refers to (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdClearAttachments-pRects-00017)"},
{"VUID-vkCmdClearAttachments-pRects-parameter", "pRects must be a valid pointer to an array of rectCount VkClearRect structures (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdClearAttachments-pRects-parameter)"},
- {"VUID-vkCmdClearAttachments-rect-02682", "The rect member of each element of pRects must have an extent.width greater than 0 (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdClearAttachments-rect-02682)"},
- {"VUID-vkCmdClearAttachments-rect-02683", "The rect member of each element of pRects must have an extent.height greater than 0 (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdClearAttachments-rect-02683)"},
{"VUID-vkCmdClearAttachments-rectCount-arraylength", "rectCount must be greater than 0 (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdClearAttachments-rectCount-arraylength)"},
{"VUID-vkCmdClearAttachments-renderpass", "This command must only be called inside of a render pass instance (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdClearAttachments-renderpass)"},
{"VUID-vkCmdClearColorImage-aspectMask-02498", "The VkImageSubresourceRange::aspectMask members of the elements of the pRanges array must each only include VK_IMAGE_ASPECT_COLOR_BIT (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdClearColorImage-aspectMask-02498)"},
@@ -3092,9 +2891,6 @@ static const vuid_spec_text_pair vuid_spec_text[] = {
{"VUID-vkCmdClearDepthStencilImage-pDepthStencil-parameter", "pDepthStencil must be a valid pointer to a valid VkClearDepthStencilValue structure (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdClearDepthStencilImage-pDepthStencil-parameter)"},
{"VUID-vkCmdClearDepthStencilImage-pRanges-01694", "For each VkImageSubresourceRange element of pRanges, if the levelCount member is not VK_REMAINING_MIP_LEVELS, then baseMipLevel + levelCount must be less than the mipLevels specified in VkImageCreateInfo when image was created (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdClearDepthStencilImage-pRanges-01694)"},
{"VUID-vkCmdClearDepthStencilImage-pRanges-01695", "For each VkImageSubresourceRange element of pRanges, if the layerCount member is not VK_REMAINING_ARRAY_LAYERS, then baseArrayLayer + layerCount must be less than the arrayLayers specified in VkImageCreateInfo when image was created (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdClearDepthStencilImage-pRanges-01695)"},
- {"VUID-vkCmdClearDepthStencilImage-pRanges-02658", "If any element of pRanges.aspect includes VK_IMAGE_ASPECT_STENCIL_BIT, and image was created with separate stencil usage, VK_IMAGE_USAGE_TRANSFER_DST_BIT must have been included in the VkImageStencilUsageCreateInfoEXT::stencilUsage used to create image (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdClearDepthStencilImage-pRanges-02658)"},
- {"VUID-vkCmdClearDepthStencilImage-pRanges-02659", "If any element of pRanges.aspect includes VK_IMAGE_ASPECT_STENCIL_BIT, and image was not created with separate stencil usage, VK_IMAGE_USAGE_TRANSFER_DST_BIT must have been included in the VkImageCreateInfo::usage used to create image (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdClearDepthStencilImage-pRanges-02659)"},
- {"VUID-vkCmdClearDepthStencilImage-pRanges-02660", "If any element of pRanges.aspect includes VK_IMAGE_ASPECT_DEPTH_BIT, VK_IMAGE_USAGE_TRANSFER_DST_BIT must have been included in the VkImageCreateInfo::usage used to create image (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdClearDepthStencilImage-pRanges-02660)"},
{"VUID-vkCmdClearDepthStencilImage-pRanges-parameter", "pRanges must be a valid pointer to an array of rangeCount valid VkImageSubresourceRange structures (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdClearDepthStencilImage-pRanges-parameter)"},
{"VUID-vkCmdClearDepthStencilImage-rangeCount-arraylength", "rangeCount must be greater than 0 (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdClearDepthStencilImage-rangeCount-arraylength)"},
{"VUID-vkCmdClearDepthStencilImage-renderpass", "This command must only be called outside of a render pass instance (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdClearDepthStencilImage-renderpass)"},
@@ -3105,7 +2901,6 @@ static const vuid_spec_text_pair vuid_spec_text[] = {
{"VUID-vkCmdCopyAccelerationStructureNV-dst-parameter", "dst must be a valid VkAccelerationStructureNV handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdCopyAccelerationStructureNV-dst-parameter)"},
{"VUID-vkCmdCopyAccelerationStructureNV-mode-02496", "mode must be VK_COPY_ACCELERATION_STRUCTURE_MODE_COMPACT_NV or VK_COPY_ACCELERATION_STRUCTURE_MODE_CLONE_NV (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdCopyAccelerationStructureNV-mode-02496)"},
{"VUID-vkCmdCopyAccelerationStructureNV-mode-parameter", "mode must be a valid VkCopyAccelerationStructureModeNV value (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdCopyAccelerationStructureNV-mode-parameter)"},
- {"VUID-vkCmdCopyAccelerationStructureNV-renderpass", "This command must only be called outside of a render pass instance (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdCopyAccelerationStructureNV-renderpass)"},
{"VUID-vkCmdCopyAccelerationStructureNV-src-02497", "src must have been built with VK_BUILD_ACCELERATION_STRUCTURE_ALLOW_COMPACTION_BIT_NV if mode is VK_COPY_ACCELERATION_STRUCTURE_MODE_COMPACT_NV (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdCopyAccelerationStructureNV-src-02497)"},
{"VUID-vkCmdCopyAccelerationStructureNV-src-parameter", "src must be a valid VkAccelerationStructureNV handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdCopyAccelerationStructureNV-src-parameter)"},
{"VUID-vkCmdCopyBuffer-commandBuffer-01822", "If commandBuffer is an unprotected command buffer, then srcBuffer must not be a protected buffer (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdCopyBuffer-commandBuffer-01822)"},
@@ -3246,7 +3041,6 @@ static const vuid_spec_text_pair vuid_spec_text[] = {
{"VUID-vkCmdCopyQueryPoolResults-flags-parameter", "flags must be a valid combination of VkQueryResultFlagBits values (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdCopyQueryPoolResults-flags-parameter)"},
{"VUID-vkCmdCopyQueryPoolResults-queryPool-parameter", "queryPool must be a valid VkQueryPool handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdCopyQueryPoolResults-queryPool-parameter)"},
{"VUID-vkCmdCopyQueryPoolResults-queryType-00827", "If the queryType used to create queryPool was VK_QUERY_TYPE_TIMESTAMP, flags must not contain VK_QUERY_RESULT_PARTIAL_BIT (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdCopyQueryPoolResults-queryType-00827)"},
- {"VUID-vkCmdCopyQueryPoolResults-queryType-02734", "vkCmdCopyQueryPoolResults must not be called if the queryType used to create queryPool was VK_QUERY_TYPE_PERFORMANCE_QUERY_INTEL. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdCopyQueryPoolResults-queryType-02734)"},
{"VUID-vkCmdCopyQueryPoolResults-renderpass", "This command must only be called outside of a render pass instance (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdCopyQueryPoolResults-renderpass)"},
{"VUID-vkCmdDebugMarkerBeginEXT-commandBuffer-cmdpool", "The VkCommandPool that commandBuffer was allocated from must support graphics, or compute operations (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDebugMarkerBeginEXT-commandBuffer-cmdpool)"},
{"VUID-vkCmdDebugMarkerBeginEXT-commandBuffer-parameter", "commandBuffer must be a valid VkCommandBuffer handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDebugMarkerBeginEXT-commandBuffer-parameter)"},
@@ -3261,477 +3055,517 @@ static const vuid_spec_text_pair vuid_spec_text[] = {
{"VUID-vkCmdDebugMarkerInsertEXT-commandBuffer-parameter", "commandBuffer must be a valid VkCommandBuffer handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDebugMarkerInsertEXT-commandBuffer-parameter)"},
{"VUID-vkCmdDebugMarkerInsertEXT-commandBuffer-recording", "commandBuffer must be in the recording state (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDebugMarkerInsertEXT-commandBuffer-recording)"},
{"VUID-vkCmdDebugMarkerInsertEXT-pMarkerInfo-parameter", "pMarkerInfo must be a valid pointer to a valid VkDebugMarkerMarkerInfoEXT structure (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDebugMarkerInsertEXT-pMarkerInfo-parameter)"},
- {"VUID-vkCmdDispatch-None-02690", "If a VkImageView is sampled with VK_FILTER_LINEAR as a result of this command, then the image view's format features must contain VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_LINEAR_BIT (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDispatch-None-02690)"},
- {"VUID-vkCmdDispatch-None-02691", "If a VkImageView is accessed using atomic operations as a result of this command, then the image view's format features must contain VK_FORMAT_FEATURE_STORAGE_IMAGE_ATOMIC_BIT (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDispatch-None-02691)"},
- {"VUID-vkCmdDispatch-None-02692", "If a VkImageView is sampled with VK_FILTER_CUBIC_EXT as a result of this command, then the image view's format features must contain VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_CUBIC_BIT_EXT (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDispatch-None-02692)"},
- {"VUID-vkCmdDispatch-None-02693", "Any VkImageView being sampled with VK_FILTER_CUBIC_EXT as a result of this command must not have a VkImageViewType of VK_IMAGE_VIEW_TYPE_3D, VK_IMAGE_VIEW_TYPE_CUBE, or VK_IMAGE_VIEW_TYPE_CUBE_ARRAY (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDispatch-None-02693)"},
- {"VUID-vkCmdDispatch-None-02697", "For each set n that is statically used by the VkPipeline bound to the pipeline bind point used by this command, a descriptor set must have been bound to n at the same pipeline bind point, with a VkPipelineLayout that is compatible for set n, with the VkPipelineLayout used to create the current VkPipeline, as described in Pipeline Layout Compatibility (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDispatch-None-02697)"},
- {"VUID-vkCmdDispatch-None-02698", "For each push constant that is statically used by the VkPipeline bound to the pipeline bind point used by this command, a push constant value must have been set for the same pipeline bind point, with a VkPipelineLayout that is compatible for push constants, with the VkPipelineLayout used to create the current VkPipeline, as described in Pipeline Layout Compatibility (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDispatch-None-02698)"},
- {"VUID-vkCmdDispatch-None-02699", "Descriptors in each bound descriptor set, specified via vkCmdBindDescriptorSets, must be valid if they are statically used by the VkPipeline bound to the pipeline bind point used by this command (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDispatch-None-02699)"},
- {"VUID-vkCmdDispatch-None-02700", "A valid pipeline must be bound to the pipeline bind point used by this command (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDispatch-None-02700)"},
- {"VUID-vkCmdDispatch-None-02702", "If the VkPipeline object bound to the pipeline bind point used by this command accesses a VkSampler object that uses unnormalized coordinates, that sampler must not be used to sample from any VkImage with a VkImageView of the type VK_IMAGE_VIEW_TYPE_3D, VK_IMAGE_VIEW_TYPE_CUBE, VK_IMAGE_VIEW_TYPE_1D_ARRAY, VK_IMAGE_VIEW_TYPE_2D_ARRAY or VK_IMAGE_VIEW_TYPE_CUBE_ARRAY, in any shader stage (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDispatch-None-02702)"},
- {"VUID-vkCmdDispatch-None-02703", "If the VkPipeline object bound to the pipeline bind point used by this command accesses a VkSampler object that uses unnormalized coordinates, that sampler must not be used with any of the SPIR-V OpImageSample* or OpImageSparseSample* instructions with ImplicitLod, Dref or Proj in their name, in any shader stage (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDispatch-None-02703)"},
- {"VUID-vkCmdDispatch-None-02704", "If the VkPipeline object bound to the pipeline bind point used by this command accesses a VkSampler object that uses unnormalized coordinates, that sampler must not be used with any of the SPIR-V OpImageSample* or OpImageSparseSample* instructions that includes a LOD bias or any offset values, in any shader stage (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDispatch-None-02704)"},
- {"VUID-vkCmdDispatch-None-02705", "If the robust buffer access feature is not enabled, and if the VkPipeline object bound to the pipeline bind point used by this command accesses a uniform buffer, it must not access values outside of the range of the buffer as specified in the descriptor set bound to the same pipeline bind point (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDispatch-None-02705)"},
- {"VUID-vkCmdDispatch-None-02706", "If the robust buffer access feature is not enabled, and if the VkPipeline object bound to the pipeline bind point used by this command accesses a storage buffer, it must not access values outside of the range of the buffer as specified in the descriptor set bound to the same pipeline bind point (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDispatch-None-02706)"},
- {"VUID-vkCmdDispatch-commandBuffer-02701", "If the VkPipeline object bound to the pipeline bind point used by this command requires any dynamic state, that state must have been set for commandBuffer (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDispatch-commandBuffer-02701)"},
- {"VUID-vkCmdDispatch-commandBuffer-02707", "If commandBuffer is an unprotected command buffer, any resource accessed by the VkPipeline object bound to the pipeline bind point used by this command must not be a protected resource (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDispatch-commandBuffer-02707)"},
- {"VUID-vkCmdDispatch-commandBuffer-02712", "If commandBuffer is a protected command buffer, any resource written to by the VkPipeline object bound to the pipeline bind point used by this command must not be an unprotected resource (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDispatch-commandBuffer-02712)"},
- {"VUID-vkCmdDispatch-commandBuffer-02713", "If commandBuffer is a protected command buffer, pipeline stages other than the framebuffer-space and compute stages in the VkPipeline object bound to the pipeline bind point must not write to any resource (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDispatch-commandBuffer-02713)"},
+ {"VUID-vkCmdDispatch-None-00389", "For each set n that is statically used by the VkPipeline bound to VK_PIPELINE_BIND_POINT_COMPUTE, a descriptor set must have been bound to n at VK_PIPELINE_BIND_POINT_COMPUTE, with a VkPipelineLayout that is compatible for set n, with the VkPipelineLayout used to create the current VkPipeline, as described in Pipeline Layout Compatibility (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDispatch-None-00389)"},
+ {"VUID-vkCmdDispatch-None-00390", "Descriptors in each bound descriptor set, specified via vkCmdBindDescriptorSets, must be valid if they are statically used by the bound VkPipeline object, specified via vkCmdBindPipeline (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDispatch-None-00390)"},
+ {"VUID-vkCmdDispatch-None-00391", "A valid compute pipeline must be bound to the current command buffer with VK_PIPELINE_BIND_POINT_COMPUTE (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDispatch-None-00391)"},
+ {"VUID-vkCmdDispatch-None-00392", "For each push constant that is statically used by the VkPipeline bound to VK_PIPELINE_BIND_POINT_COMPUTE, a push constant value must have been set for VK_PIPELINE_BIND_POINT_COMPUTE, with a VkPipelineLayout that is compatible for push constants with the one used to create the current VkPipeline, as described in Pipeline Layout Compatibility (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDispatch-None-00392)"},
+ {"VUID-vkCmdDispatch-None-00393", "If any VkSampler object that is accessed from a shader by the VkPipeline bound to VK_PIPELINE_BIND_POINT_COMPUTE uses unnormalized coordinates, it must not be used to sample from any VkImage with a VkImageView of the type VK_IMAGE_VIEW_TYPE_3D, VK_IMAGE_VIEW_TYPE_CUBE, VK_IMAGE_VIEW_TYPE_1D_ARRAY, VK_IMAGE_VIEW_TYPE_2D_ARRAY or VK_IMAGE_VIEW_TYPE_CUBE_ARRAY, in any shader stage (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDispatch-None-00393)"},
+ {"VUID-vkCmdDispatch-None-00394", "If any VkSampler object that is accessed from a shader by the VkPipeline bound to VK_PIPELINE_BIND_POINT_COMPUTE uses unnormalized coordinates, it must not be used with any of the SPIR-V OpImageSample* or OpImageSparseSample* instructions with ImplicitLod, Dref or Proj in their name, in any shader stage (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDispatch-None-00394)"},
+ {"VUID-vkCmdDispatch-None-00395", "If any VkSampler object that is accessed from a shader by the VkPipeline bound to VK_PIPELINE_BIND_POINT_COMPUTE uses unnormalized coordinates, it must not be used with any of the SPIR-V OpImageSample* or OpImageSparseSample* instructions that includes a LOD bias or any offset values, in any shader stage (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDispatch-None-00395)"},
+ {"VUID-vkCmdDispatch-None-00396", "If the robust buffer access feature is not enabled, and any shader stage in the VkPipeline object bound to VK_PIPELINE_BIND_POINT_COMPUTE accesses a uniform buffer, it must not access values outside of the range of that buffer specified in the bound descriptor set (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDispatch-None-00396)"},
+ {"VUID-vkCmdDispatch-None-00397", "If the robust buffer access feature is not enabled, and any shader stage in the VkPipeline object bound to VK_PIPELINE_BIND_POINT_COMPUTE accesses a storage buffer, it must not access values outside of the range of that buffer specified in the bound descriptor set (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDispatch-None-00397)"},
+ {"VUID-vkCmdDispatch-None-00400", "Any VkImageView being sampled with VK_FILTER_CUBIC_EXT as a result of this command must not have a VkImageViewType of VK_IMAGE_VIEW_TYPE_3D, VK_IMAGE_VIEW_TYPE_CUBE, or VK_IMAGE_VIEW_TYPE_CUBE_ARRAY (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDispatch-None-00400)"},
+ {"VUID-vkCmdDispatch-None-02005", "If a VkImageView is sampled with VK_FILTER_LINEAR as a result of this command, then the image view's format features must contain VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_LINEAR_BIT. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDispatch-None-02005)"},
+ {"VUID-vkCmdDispatch-None-02006", "If a VkImageView is sampled with VK_FILTER_CUBIC_EXT as a result of this command, then the image view's format features must contain VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_CUBIC_BIT_EXT. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDispatch-None-02006)"},
+ {"VUID-vkCmdDispatch-commandBuffer-01844", "If commandBuffer is an unprotected command buffer, and any pipeline stage in the VkPipeline object bound to VK_PIPELINE_BIND_POINT_COMPUTE reads from or writes to any image or buffer, that image or buffer must not be a protected image or protected buffer. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDispatch-commandBuffer-01844)"},
+ {"VUID-vkCmdDispatch-commandBuffer-01845", "If commandBuffer is a protected command buffer, and any pipeline stage in the VkPipeline object bound to VK_PIPELINE_BIND_POINT_COMPUTE writes to any image or buffer, that image or buffer must not be an unprotected image or unprotected buffer. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDispatch-commandBuffer-01845)"},
+ {"VUID-vkCmdDispatch-commandBuffer-01846", "If commandBuffer is a protected command buffer, and any pipeline stage other than the compute pipeline stage in the VkPipeline object bound to VK_PIPELINE_BIND_POINT_COMPUTE reads from any image or buffer, the image or buffer must not be a protected image or protected buffer. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDispatch-commandBuffer-01846)"},
{"VUID-vkCmdDispatch-commandBuffer-cmdpool", "The VkCommandPool that commandBuffer was allocated from must support compute operations (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDispatch-commandBuffer-cmdpool)"},
{"VUID-vkCmdDispatch-commandBuffer-parameter", "commandBuffer must be a valid VkCommandBuffer handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDispatch-commandBuffer-parameter)"},
{"VUID-vkCmdDispatch-commandBuffer-recording", "commandBuffer must be in the recording state (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDispatch-commandBuffer-recording)"},
- {"VUID-vkCmdDispatch-filterCubic-02694", "Any VkImageView being sampled with VK_FILTER_CUBIC_EXT as a result of this command must have a VkImageViewType and format that supports cubic filtering, as specified by VkFilterCubicImageViewImageFormatPropertiesEXT::filterCubic returned by vkGetPhysicalDeviceImageFormatProperties2 (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDispatch-filterCubic-02694)"},
- {"VUID-vkCmdDispatch-filterCubicMinmax-02695", "Any VkImageView being sampled with VK_FILTER_CUBIC_EXT with a reduction mode of either VK_SAMPLER_REDUCTION_MODE_MIN_EXT or VK_SAMPLER_REDUCTION_MODE_MAX_EXT as a result of this command must have a VkImageViewType and format that supports cubic filtering together with minmax filtering, as specified by VkFilterCubicImageViewImageFormatPropertiesEXT::filterCubicMinmax returned by vkGetPhysicalDeviceImageFormatProperties2 (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDispatch-filterCubicMinmax-02695)"},
- {"VUID-vkCmdDispatch-flags-02696", "Any VkImage created with a VkImageCreateInfo::flags containing VK_IMAGE_CREATE_CORNER_SAMPLED_BIT_NV sampled as a result of this command must only be sampled using a VkSamplerAddressMode of VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_EDGE. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDispatch-flags-02696)"},
+ {"VUID-vkCmdDispatch-filterCubic-02609", "Any VkImageView being sampled with VK_FILTER_CUBIC_EXT as a result of this command must have a VkImageViewType and format that supports cubic filtering, as specified by VkFilterCubicImageViewImageFormatPropertiesEXT::filterCubic returned by vkGetPhysicalDeviceImageFormatProperties2 (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDispatch-filterCubic-02609)"},
+ {"VUID-vkCmdDispatch-filterCubicMinmax-02610", "Any VkImageView being sampled with VK_FILTER_CUBIC_EXT with a reduction mode of either VK_SAMPLER_REDUCTION_MODE_MIN_EXT or VK_SAMPLER_REDUCTION_MODE_MAX_EXT as a result of this command must have a VkImageViewType and format that supports cubic filtering together with minmax filtering, as specified by VkFilterCubicImageViewImageFormatPropertiesEXT::filterCubicMinmax returned by vkGetPhysicalDeviceImageFormatProperties2 (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDispatch-filterCubicMinmax-02610)"},
+ {"VUID-vkCmdDispatch-flags-02040", "Any VkImage created with a VkImageCreateInfo::flags containing VK_IMAGE_CREATE_CORNER_SAMPLED_BIT_NV sampled as a result of this command must only be sampled using a VkSamplerAddressMode of VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_EDGE. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDispatch-flags-02040)"},
{"VUID-vkCmdDispatch-groupCountX-00386", "groupCountX must be less than or equal to VkPhysicalDeviceLimits::maxComputeWorkGroupCount[0] (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDispatch-groupCountX-00386)"},
{"VUID-vkCmdDispatch-groupCountY-00387", "groupCountY must be less than or equal to VkPhysicalDeviceLimits::maxComputeWorkGroupCount[1] (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDispatch-groupCountY-00387)"},
{"VUID-vkCmdDispatch-groupCountZ-00388", "groupCountZ must be less than or equal to VkPhysicalDeviceLimits::maxComputeWorkGroupCount[2] (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDispatch-groupCountZ-00388)"},
{"VUID-vkCmdDispatch-renderpass", "This command must only be called outside of a render pass instance (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDispatch-renderpass)"},
- {"VUID-vkCmdDispatchBase-None-02690", "If a VkImageView is sampled with VK_FILTER_LINEAR as a result of this command, then the image view's format features must contain VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_LINEAR_BIT (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDispatchBase-None-02690)"},
- {"VUID-vkCmdDispatchBase-None-02691", "If a VkImageView is accessed using atomic operations as a result of this command, then the image view's format features must contain VK_FORMAT_FEATURE_STORAGE_IMAGE_ATOMIC_BIT (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDispatchBase-None-02691)"},
- {"VUID-vkCmdDispatchBase-None-02692", "If a VkImageView is sampled with VK_FILTER_CUBIC_EXT as a result of this command, then the image view's format features must contain VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_CUBIC_BIT_EXT (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDispatchBase-None-02692)"},
- {"VUID-vkCmdDispatchBase-None-02693", "Any VkImageView being sampled with VK_FILTER_CUBIC_EXT as a result of this command must not have a VkImageViewType of VK_IMAGE_VIEW_TYPE_3D, VK_IMAGE_VIEW_TYPE_CUBE, or VK_IMAGE_VIEW_TYPE_CUBE_ARRAY (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDispatchBase-None-02693)"},
- {"VUID-vkCmdDispatchBase-None-02697", "For each set n that is statically used by the VkPipeline bound to the pipeline bind point used by this command, a descriptor set must have been bound to n at the same pipeline bind point, with a VkPipelineLayout that is compatible for set n, with the VkPipelineLayout used to create the current VkPipeline, as described in Pipeline Layout Compatibility (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDispatchBase-None-02697)"},
- {"VUID-vkCmdDispatchBase-None-02698", "For each push constant that is statically used by the VkPipeline bound to the pipeline bind point used by this command, a push constant value must have been set for the same pipeline bind point, with a VkPipelineLayout that is compatible for push constants, with the VkPipelineLayout used to create the current VkPipeline, as described in Pipeline Layout Compatibility (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDispatchBase-None-02698)"},
- {"VUID-vkCmdDispatchBase-None-02699", "Descriptors in each bound descriptor set, specified via vkCmdBindDescriptorSets, must be valid if they are statically used by the VkPipeline bound to the pipeline bind point used by this command (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDispatchBase-None-02699)"},
- {"VUID-vkCmdDispatchBase-None-02700", "A valid pipeline must be bound to the pipeline bind point used by this command (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDispatchBase-None-02700)"},
- {"VUID-vkCmdDispatchBase-None-02702", "If the VkPipeline object bound to the pipeline bind point used by this command accesses a VkSampler object that uses unnormalized coordinates, that sampler must not be used to sample from any VkImage with a VkImageView of the type VK_IMAGE_VIEW_TYPE_3D, VK_IMAGE_VIEW_TYPE_CUBE, VK_IMAGE_VIEW_TYPE_1D_ARRAY, VK_IMAGE_VIEW_TYPE_2D_ARRAY or VK_IMAGE_VIEW_TYPE_CUBE_ARRAY, in any shader stage (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDispatchBase-None-02702)"},
- {"VUID-vkCmdDispatchBase-None-02703", "If the VkPipeline object bound to the pipeline bind point used by this command accesses a VkSampler object that uses unnormalized coordinates, that sampler must not be used with any of the SPIR-V OpImageSample* or OpImageSparseSample* instructions with ImplicitLod, Dref or Proj in their name, in any shader stage (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDispatchBase-None-02703)"},
- {"VUID-vkCmdDispatchBase-None-02704", "If the VkPipeline object bound to the pipeline bind point used by this command accesses a VkSampler object that uses unnormalized coordinates, that sampler must not be used with any of the SPIR-V OpImageSample* or OpImageSparseSample* instructions that includes a LOD bias or any offset values, in any shader stage (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDispatchBase-None-02704)"},
- {"VUID-vkCmdDispatchBase-None-02705", "If the robust buffer access feature is not enabled, and if the VkPipeline object bound to the pipeline bind point used by this command accesses a uniform buffer, it must not access values outside of the range of the buffer as specified in the descriptor set bound to the same pipeline bind point (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDispatchBase-None-02705)"},
- {"VUID-vkCmdDispatchBase-None-02706", "If the robust buffer access feature is not enabled, and if the VkPipeline object bound to the pipeline bind point used by this command accesses a storage buffer, it must not access values outside of the range of the buffer as specified in the descriptor set bound to the same pipeline bind point (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDispatchBase-None-02706)"},
+ {"VUID-vkCmdDispatchBase-None-00420", "All valid usage rules from vkCmdDispatch apply (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDispatchBase-None-00420)"},
{"VUID-vkCmdDispatchBase-baseGroupX-00421", "baseGroupX must be less than VkPhysicalDeviceLimits::maxComputeWorkGroupCount[0] (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDispatchBase-baseGroupX-00421)"},
{"VUID-vkCmdDispatchBase-baseGroupX-00422", "baseGroupX must be less than VkPhysicalDeviceLimits::maxComputeWorkGroupCount[1] (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDispatchBase-baseGroupX-00422)"},
{"VUID-vkCmdDispatchBase-baseGroupX-00427", "If any of baseGroupX, baseGroupY, or baseGroupZ are not zero, then the bound compute pipeline must have been created with the VK_PIPELINE_CREATE_DISPATCH_BASE flag. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDispatchBase-baseGroupX-00427)"},
{"VUID-vkCmdDispatchBase-baseGroupZ-00423", "baseGroupZ must be less than VkPhysicalDeviceLimits::maxComputeWorkGroupCount[2] (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDispatchBase-baseGroupZ-00423)"},
- {"VUID-vkCmdDispatchBase-commandBuffer-02701", "If the VkPipeline object bound to the pipeline bind point used by this command requires any dynamic state, that state must have been set for commandBuffer (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDispatchBase-commandBuffer-02701)"},
- {"VUID-vkCmdDispatchBase-commandBuffer-02707", "If commandBuffer is an unprotected command buffer, any resource accessed by the VkPipeline object bound to the pipeline bind point used by this command must not be a protected resource (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDispatchBase-commandBuffer-02707)"},
{"VUID-vkCmdDispatchBase-commandBuffer-cmdpool", "The VkCommandPool that commandBuffer was allocated from must support compute operations (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDispatchBase-commandBuffer-cmdpool)"},
{"VUID-vkCmdDispatchBase-commandBuffer-parameter", "commandBuffer must be a valid VkCommandBuffer handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDispatchBase-commandBuffer-parameter)"},
{"VUID-vkCmdDispatchBase-commandBuffer-recording", "commandBuffer must be in the recording state (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDispatchBase-commandBuffer-recording)"},
- {"VUID-vkCmdDispatchBase-filterCubic-02694", "Any VkImageView being sampled with VK_FILTER_CUBIC_EXT as a result of this command must have a VkImageViewType and format that supports cubic filtering, as specified by VkFilterCubicImageViewImageFormatPropertiesEXT::filterCubic returned by vkGetPhysicalDeviceImageFormatProperties2 (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDispatchBase-filterCubic-02694)"},
- {"VUID-vkCmdDispatchBase-filterCubicMinmax-02695", "Any VkImageView being sampled with VK_FILTER_CUBIC_EXT with a reduction mode of either VK_SAMPLER_REDUCTION_MODE_MIN_EXT or VK_SAMPLER_REDUCTION_MODE_MAX_EXT as a result of this command must have a VkImageViewType and format that supports cubic filtering together with minmax filtering, as specified by VkFilterCubicImageViewImageFormatPropertiesEXT::filterCubicMinmax returned by vkGetPhysicalDeviceImageFormatProperties2 (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDispatchBase-filterCubicMinmax-02695)"},
- {"VUID-vkCmdDispatchBase-flags-02696", "Any VkImage created with a VkImageCreateInfo::flags containing VK_IMAGE_CREATE_CORNER_SAMPLED_BIT_NV sampled as a result of this command must only be sampled using a VkSamplerAddressMode of VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_EDGE. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDispatchBase-flags-02696)"},
{"VUID-vkCmdDispatchBase-groupCountX-00424", "groupCountX must be less than or equal to VkPhysicalDeviceLimits::maxComputeWorkGroupCount[0] minus baseGroupX (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDispatchBase-groupCountX-00424)"},
{"VUID-vkCmdDispatchBase-groupCountY-00425", "groupCountY must be less than or equal to VkPhysicalDeviceLimits::maxComputeWorkGroupCount[1] minus baseGroupY (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDispatchBase-groupCountY-00425)"},
{"VUID-vkCmdDispatchBase-groupCountZ-00426", "groupCountZ must be less than or equal to VkPhysicalDeviceLimits::maxComputeWorkGroupCount[2] minus baseGroupZ (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDispatchBase-groupCountZ-00426)"},
{"VUID-vkCmdDispatchBase-renderpass", "This command must only be called outside of a render pass instance (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDispatchBase-renderpass)"},
- {"VUID-vkCmdDispatchIndirect-None-02690", "If a VkImageView is sampled with VK_FILTER_LINEAR as a result of this command, then the image view's format features must contain VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_LINEAR_BIT (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDispatchIndirect-None-02690)"},
- {"VUID-vkCmdDispatchIndirect-None-02691", "If a VkImageView is accessed using atomic operations as a result of this command, then the image view's format features must contain VK_FORMAT_FEATURE_STORAGE_IMAGE_ATOMIC_BIT (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDispatchIndirect-None-02691)"},
- {"VUID-vkCmdDispatchIndirect-None-02692", "If a VkImageView is sampled with VK_FILTER_CUBIC_EXT as a result of this command, then the image view's format features must contain VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_CUBIC_BIT_EXT (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDispatchIndirect-None-02692)"},
- {"VUID-vkCmdDispatchIndirect-None-02693", "Any VkImageView being sampled with VK_FILTER_CUBIC_EXT as a result of this command must not have a VkImageViewType of VK_IMAGE_VIEW_TYPE_3D, VK_IMAGE_VIEW_TYPE_CUBE, or VK_IMAGE_VIEW_TYPE_CUBE_ARRAY (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDispatchIndirect-None-02693)"},
- {"VUID-vkCmdDispatchIndirect-None-02697", "For each set n that is statically used by the VkPipeline bound to the pipeline bind point used by this command, a descriptor set must have been bound to n at the same pipeline bind point, with a VkPipelineLayout that is compatible for set n, with the VkPipelineLayout used to create the current VkPipeline, as described in Pipeline Layout Compatibility (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDispatchIndirect-None-02697)"},
- {"VUID-vkCmdDispatchIndirect-None-02698", "For each push constant that is statically used by the VkPipeline bound to the pipeline bind point used by this command, a push constant value must have been set for the same pipeline bind point, with a VkPipelineLayout that is compatible for push constants, with the VkPipelineLayout used to create the current VkPipeline, as described in Pipeline Layout Compatibility (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDispatchIndirect-None-02698)"},
- {"VUID-vkCmdDispatchIndirect-None-02699", "Descriptors in each bound descriptor set, specified via vkCmdBindDescriptorSets, must be valid if they are statically used by the VkPipeline bound to the pipeline bind point used by this command (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDispatchIndirect-None-02699)"},
- {"VUID-vkCmdDispatchIndirect-None-02700", "A valid pipeline must be bound to the pipeline bind point used by this command (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDispatchIndirect-None-02700)"},
- {"VUID-vkCmdDispatchIndirect-None-02702", "If the VkPipeline object bound to the pipeline bind point used by this command accesses a VkSampler object that uses unnormalized coordinates, that sampler must not be used to sample from any VkImage with a VkImageView of the type VK_IMAGE_VIEW_TYPE_3D, VK_IMAGE_VIEW_TYPE_CUBE, VK_IMAGE_VIEW_TYPE_1D_ARRAY, VK_IMAGE_VIEW_TYPE_2D_ARRAY or VK_IMAGE_VIEW_TYPE_CUBE_ARRAY, in any shader stage (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDispatchIndirect-None-02702)"},
- {"VUID-vkCmdDispatchIndirect-None-02703", "If the VkPipeline object bound to the pipeline bind point used by this command accesses a VkSampler object that uses unnormalized coordinates, that sampler must not be used with any of the SPIR-V OpImageSample* or OpImageSparseSample* instructions with ImplicitLod, Dref or Proj in their name, in any shader stage (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDispatchIndirect-None-02703)"},
- {"VUID-vkCmdDispatchIndirect-None-02704", "If the VkPipeline object bound to the pipeline bind point used by this command accesses a VkSampler object that uses unnormalized coordinates, that sampler must not be used with any of the SPIR-V OpImageSample* or OpImageSparseSample* instructions that includes a LOD bias or any offset values, in any shader stage (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDispatchIndirect-None-02704)"},
- {"VUID-vkCmdDispatchIndirect-None-02705", "If the robust buffer access feature is not enabled, and if the VkPipeline object bound to the pipeline bind point used by this command accesses a uniform buffer, it must not access values outside of the range of the buffer as specified in the descriptor set bound to the same pipeline bind point (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDispatchIndirect-None-02705)"},
- {"VUID-vkCmdDispatchIndirect-None-02706", "If the robust buffer access feature is not enabled, and if the VkPipeline object bound to the pipeline bind point used by this command accesses a storage buffer, it must not access values outside of the range of the buffer as specified in the descriptor set bound to the same pipeline bind point (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDispatchIndirect-None-02706)"},
- {"VUID-vkCmdDispatchIndirect-buffer-02708", "If buffer is non-sparse then it must be bound completely and contiguously to a single VkDeviceMemory object (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDispatchIndirect-buffer-02708)"},
- {"VUID-vkCmdDispatchIndirect-buffer-02709", "buffer must have been created with the VK_BUFFER_USAGE_INDIRECT_BUFFER_BIT bit set (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDispatchIndirect-buffer-02709)"},
+ {"VUID-vkCmdDispatchIndirect-None-00402", "For each set n that is statically used by the VkPipeline bound to VK_PIPELINE_BIND_POINT_COMPUTE, a descriptor set must have been bound to n at VK_PIPELINE_BIND_POINT_COMPUTE, with a VkPipelineLayout that is compatible for set n, with the VkPipelineLayout used to create the current VkPipeline, as described in Pipeline Layout Compatibility (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDispatchIndirect-None-00402)"},
+ {"VUID-vkCmdDispatchIndirect-None-00403", "Descriptors in each bound descriptor set, specified via vkCmdBindDescriptorSets, must be valid if they are statically used by the bound VkPipeline object, specified via vkCmdBindPipeline (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDispatchIndirect-None-00403)"},
+ {"VUID-vkCmdDispatchIndirect-None-00404", "A valid compute pipeline must be bound to the current command buffer with VK_PIPELINE_BIND_POINT_COMPUTE (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDispatchIndirect-None-00404)"},
+ {"VUID-vkCmdDispatchIndirect-None-00408", "For each push constant that is statically used by the VkPipeline bound to VK_PIPELINE_BIND_POINT_COMPUTE, a push constant value must have been set for VK_PIPELINE_BIND_POINT_COMPUTE, with a VkPipelineLayout that is compatible for push constants with the one used to create the current VkPipeline, as described in Pipeline Layout Compatibility (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDispatchIndirect-None-00408)"},
+ {"VUID-vkCmdDispatchIndirect-None-00409", "If any VkSampler object that is accessed from a shader by the VkPipeline bound to VK_PIPELINE_BIND_POINT_COMPUTE uses unnormalized coordinates, it must not be used to sample from any VkImage with a VkImageView of the type VK_IMAGE_VIEW_TYPE_3D, VK_IMAGE_VIEW_TYPE_CUBE, VK_IMAGE_VIEW_TYPE_1D_ARRAY, VK_IMAGE_VIEW_TYPE_2D_ARRAY or VK_IMAGE_VIEW_TYPE_CUBE_ARRAY, in any shader stage (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDispatchIndirect-None-00409)"},
+ {"VUID-vkCmdDispatchIndirect-None-00410", "If any VkSampler object that is accessed from a shader by the VkPipeline bound to VK_PIPELINE_BIND_POINT_COMPUTE uses unnormalized coordinates, it must not be used with any of the SPIR-V OpImageSample* or OpImageSparseSample* instructions with ImplicitLod, Dref or Proj in their name, in any shader stage (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDispatchIndirect-None-00410)"},
+ {"VUID-vkCmdDispatchIndirect-None-00411", "If any VkSampler object that is accessed from a shader by the VkPipeline bound to VK_PIPELINE_BIND_POINT_COMPUTE uses unnormalized coordinates, it must not be used with any of the SPIR-V OpImageSample* or OpImageSparseSample* instructions that includes a LOD bias or any offset values, in any shader stage (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDispatchIndirect-None-00411)"},
+ {"VUID-vkCmdDispatchIndirect-None-00412", "If the robust buffer access feature is not enabled, and any shader stage in the VkPipeline object bound to VK_PIPELINE_BIND_POINT_COMPUTE accesses a uniform buffer, it must not access values outside of the range of that buffer specified in the bound descriptor set (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDispatchIndirect-None-00412)"},
+ {"VUID-vkCmdDispatchIndirect-None-00413", "If the robust buffer access feature is not enabled, and any shader stage in the VkPipeline object bound to VK_PIPELINE_BIND_POINT_COMPUTE accesses a storage buffer, it must not access values outside of the range of that buffer specified in the bound descriptor set (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDispatchIndirect-None-00413)"},
+ {"VUID-vkCmdDispatchIndirect-None-00416", "Any VkImageView being sampled with VK_FILTER_CUBIC_EXT as a result of this command must not have a VkImageViewType of VK_IMAGE_VIEW_TYPE_3D, VK_IMAGE_VIEW_TYPE_CUBE, or VK_IMAGE_VIEW_TYPE_CUBE_ARRAY (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDispatchIndirect-None-00416)"},
+ {"VUID-vkCmdDispatchIndirect-None-02007", "If a VkImageView is sampled with VK_FILTER_LINEAR as a result of this command, then the image view's format features must contain VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_LINEAR_BIT. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDispatchIndirect-None-02007)"},
+ {"VUID-vkCmdDispatchIndirect-None-02008", "If a VkImageView is sampled with VK_FILTER_CUBIC_EXT as a result of this command, then the image view's format features must contain VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_CUBIC_BIT_EXT. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDispatchIndirect-None-02008)"},
+ {"VUID-vkCmdDispatchIndirect-buffer-00401", "If buffer is non-sparse then it must be bound completely and contiguously to a single VkDeviceMemory object (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDispatchIndirect-buffer-00401)"},
+ {"VUID-vkCmdDispatchIndirect-buffer-00405", "buffer must have been created with the VK_BUFFER_USAGE_INDIRECT_BUFFER_BIT bit set (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDispatchIndirect-buffer-00405)"},
{"VUID-vkCmdDispatchIndirect-buffer-parameter", "buffer must be a valid VkBuffer handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDispatchIndirect-buffer-parameter)"},
- {"VUID-vkCmdDispatchIndirect-commandBuffer-02701", "If the VkPipeline object bound to the pipeline bind point used by this command requires any dynamic state, that state must have been set for commandBuffer (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDispatchIndirect-commandBuffer-02701)"},
- {"VUID-vkCmdDispatchIndirect-commandBuffer-02707", "If commandBuffer is an unprotected command buffer, any resource accessed by the VkPipeline object bound to the pipeline bind point used by this command must not be a protected resource (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDispatchIndirect-commandBuffer-02707)"},
- {"VUID-vkCmdDispatchIndirect-commandBuffer-02711", "commandBuffer must not be a protected command buffer (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDispatchIndirect-commandBuffer-02711)"},
+ {"VUID-vkCmdDispatchIndirect-commandBuffer-01847", "If commandBuffer is an unprotected command buffer, and any pipeline stage in the VkPipeline object bound to VK_PIPELINE_BIND_POINT_COMPUTE reads from or writes to any image or buffer, that image or buffer must not be a protected image or protected buffer. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDispatchIndirect-commandBuffer-01847)"},
+ {"VUID-vkCmdDispatchIndirect-commandBuffer-02639", "commandBuffer must not be a protected command buffer (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDispatchIndirect-commandBuffer-02639)"},
{"VUID-vkCmdDispatchIndirect-commandBuffer-cmdpool", "The VkCommandPool that commandBuffer was allocated from must support compute operations (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDispatchIndirect-commandBuffer-cmdpool)"},
{"VUID-vkCmdDispatchIndirect-commandBuffer-parameter", "commandBuffer must be a valid VkCommandBuffer handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDispatchIndirect-commandBuffer-parameter)"},
{"VUID-vkCmdDispatchIndirect-commandBuffer-recording", "commandBuffer must be in the recording state (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDispatchIndirect-commandBuffer-recording)"},
{"VUID-vkCmdDispatchIndirect-commonparent", "Both of buffer, and commandBuffer must have been created, allocated, or retrieved from the same VkDevice (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDispatchIndirect-commonparent)"},
- {"VUID-vkCmdDispatchIndirect-filterCubic-02694", "Any VkImageView being sampled with VK_FILTER_CUBIC_EXT as a result of this command must have a VkImageViewType and format that supports cubic filtering, as specified by VkFilterCubicImageViewImageFormatPropertiesEXT::filterCubic returned by vkGetPhysicalDeviceImageFormatProperties2 (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDispatchIndirect-filterCubic-02694)"},
- {"VUID-vkCmdDispatchIndirect-filterCubicMinmax-02695", "Any VkImageView being sampled with VK_FILTER_CUBIC_EXT with a reduction mode of either VK_SAMPLER_REDUCTION_MODE_MIN_EXT or VK_SAMPLER_REDUCTION_MODE_MAX_EXT as a result of this command must have a VkImageViewType and format that supports cubic filtering together with minmax filtering, as specified by VkFilterCubicImageViewImageFormatPropertiesEXT::filterCubicMinmax returned by vkGetPhysicalDeviceImageFormatProperties2 (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDispatchIndirect-filterCubicMinmax-02695)"},
- {"VUID-vkCmdDispatchIndirect-flags-02696", "Any VkImage created with a VkImageCreateInfo::flags containing VK_IMAGE_CREATE_CORNER_SAMPLED_BIT_NV sampled as a result of this command must only be sampled using a VkSamplerAddressMode of VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_EDGE. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDispatchIndirect-flags-02696)"},
+ {"VUID-vkCmdDispatchIndirect-filterCubic-02611", "Any VkImageView being sampled with VK_FILTER_CUBIC_EXT as a result of this command must have a VkImageViewType and format that supports cubic filtering, as specified by VkFilterCubicImageViewImageFormatPropertiesEXT::filterCubic returned by vkGetPhysicalDeviceImageFormatProperties2 (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDispatchIndirect-filterCubic-02611)"},
+ {"VUID-vkCmdDispatchIndirect-filterCubicMinmax-02612", "Any VkImageView being sampled with VK_FILTER_CUBIC_EXT with a reduction mode of either VK_SAMPLER_REDUCTION_MODE_MIN_EXT or VK_SAMPLER_REDUCTION_MODE_MAX_EXT as a result of this command must have a VkImageViewType and format that supports cubic filtering together with minmax filtering, as specified by VkFilterCubicImageViewImageFormatPropertiesEXT::filterCubicMinmax returned by vkGetPhysicalDeviceImageFormatProperties2 (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDispatchIndirect-filterCubicMinmax-02612)"},
+ {"VUID-vkCmdDispatchIndirect-flags-02041", "Any VkImage created with a VkImageCreateInfo::flags containing VK_IMAGE_CREATE_CORNER_SAMPLED_BIT_NV sampled as a result of this command must only be sampled using a VkSamplerAddressMode of VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_EDGE. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDispatchIndirect-flags-02041)"},
+ {"VUID-vkCmdDispatchIndirect-offset-00406", "offset must be a multiple of 4 (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDispatchIndirect-offset-00406)"},
{"VUID-vkCmdDispatchIndirect-offset-00407", "The sum of offset and the size of VkDispatchIndirectCommand must be less than or equal to the size of buffer (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDispatchIndirect-offset-00407)"},
- {"VUID-vkCmdDispatchIndirect-offset-02710", "offset must be a multiple of 4 (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDispatchIndirect-offset-02710)"},
{"VUID-vkCmdDispatchIndirect-renderpass", "This command must only be called outside of a render pass instance (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDispatchIndirect-renderpass)"},
- {"VUID-vkCmdDraw-None-02686", "Every input attachment used by the current subpass must be bound to the pipeline via a descriptor set (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDraw-None-02686)"},
- {"VUID-vkCmdDraw-None-02687", "Image subresources used as attachments in the current render pass must not be accessed in any way other than as an attachment by this command. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDraw-None-02687)"},
- {"VUID-vkCmdDraw-None-02690", "If a VkImageView is sampled with VK_FILTER_LINEAR as a result of this command, then the image view's format features must contain VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_LINEAR_BIT (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDraw-None-02690)"},
- {"VUID-vkCmdDraw-None-02691", "If a VkImageView is accessed using atomic operations as a result of this command, then the image view's format features must contain VK_FORMAT_FEATURE_STORAGE_IMAGE_ATOMIC_BIT (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDraw-None-02691)"},
- {"VUID-vkCmdDraw-None-02692", "If a VkImageView is sampled with VK_FILTER_CUBIC_EXT as a result of this command, then the image view's format features must contain VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_CUBIC_BIT_EXT (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDraw-None-02692)"},
- {"VUID-vkCmdDraw-None-02693", "Any VkImageView being sampled with VK_FILTER_CUBIC_EXT as a result of this command must not have a VkImageViewType of VK_IMAGE_VIEW_TYPE_3D, VK_IMAGE_VIEW_TYPE_CUBE, or VK_IMAGE_VIEW_TYPE_CUBE_ARRAY (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDraw-None-02693)"},
- {"VUID-vkCmdDraw-None-02697", "For each set n that is statically used by the VkPipeline bound to the pipeline bind point used by this command, a descriptor set must have been bound to n at the same pipeline bind point, with a VkPipelineLayout that is compatible for set n, with the VkPipelineLayout used to create the current VkPipeline, as described in Pipeline Layout Compatibility (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDraw-None-02697)"},
- {"VUID-vkCmdDraw-None-02698", "For each push constant that is statically used by the VkPipeline bound to the pipeline bind point used by this command, a push constant value must have been set for the same pipeline bind point, with a VkPipelineLayout that is compatible for push constants, with the VkPipelineLayout used to create the current VkPipeline, as described in Pipeline Layout Compatibility (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDraw-None-02698)"},
- {"VUID-vkCmdDraw-None-02699", "Descriptors in each bound descriptor set, specified via vkCmdBindDescriptorSets, must be valid if they are statically used by the VkPipeline bound to the pipeline bind point used by this command (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDraw-None-02699)"},
- {"VUID-vkCmdDraw-None-02700", "A valid pipeline must be bound to the pipeline bind point used by this command (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDraw-None-02700)"},
- {"VUID-vkCmdDraw-None-02702", "If the VkPipeline object bound to the pipeline bind point used by this command accesses a VkSampler object that uses unnormalized coordinates, that sampler must not be used to sample from any VkImage with a VkImageView of the type VK_IMAGE_VIEW_TYPE_3D, VK_IMAGE_VIEW_TYPE_CUBE, VK_IMAGE_VIEW_TYPE_1D_ARRAY, VK_IMAGE_VIEW_TYPE_2D_ARRAY or VK_IMAGE_VIEW_TYPE_CUBE_ARRAY, in any shader stage (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDraw-None-02702)"},
- {"VUID-vkCmdDraw-None-02703", "If the VkPipeline object bound to the pipeline bind point used by this command accesses a VkSampler object that uses unnormalized coordinates, that sampler must not be used with any of the SPIR-V OpImageSample* or OpImageSparseSample* instructions with ImplicitLod, Dref or Proj in their name, in any shader stage (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDraw-None-02703)"},
- {"VUID-vkCmdDraw-None-02704", "If the VkPipeline object bound to the pipeline bind point used by this command accesses a VkSampler object that uses unnormalized coordinates, that sampler must not be used with any of the SPIR-V OpImageSample* or OpImageSparseSample* instructions that includes a LOD bias or any offset values, in any shader stage (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDraw-None-02704)"},
- {"VUID-vkCmdDraw-None-02705", "If the robust buffer access feature is not enabled, and if the VkPipeline object bound to the pipeline bind point used by this command accesses a uniform buffer, it must not access values outside of the range of the buffer as specified in the descriptor set bound to the same pipeline bind point (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDraw-None-02705)"},
- {"VUID-vkCmdDraw-None-02706", "If the robust buffer access feature is not enabled, and if the VkPipeline object bound to the pipeline bind point used by this command accesses a storage buffer, it must not access values outside of the range of the buffer as specified in the descriptor set bound to the same pipeline bind point (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDraw-None-02706)"},
- {"VUID-vkCmdDraw-None-02720", "All vertex input bindings accessed via vertex input variables declared in the vertex shader entry point's interface must have valid buffers bound (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDraw-None-02720)"},
- {"VUID-vkCmdDraw-None-02721", "For a given vertex buffer binding, any attribute data fetched must be entirely contained within the corresponding vertex buffer binding, as described in Vertex Input Description (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDraw-None-02721)"},
- {"VUID-vkCmdDraw-commandBuffer-02701", "If the VkPipeline object bound to the pipeline bind point used by this command requires any dynamic state, that state must have been set for commandBuffer (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDraw-commandBuffer-02701)"},
- {"VUID-vkCmdDraw-commandBuffer-02707", "If commandBuffer is an unprotected command buffer, any resource accessed by the VkPipeline object bound to the pipeline bind point used by this command must not be a protected resource (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDraw-commandBuffer-02707)"},
- {"VUID-vkCmdDraw-commandBuffer-02712", "If commandBuffer is a protected command buffer, any resource written to by the VkPipeline object bound to the pipeline bind point used by this command must not be an unprotected resource (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDraw-commandBuffer-02712)"},
- {"VUID-vkCmdDraw-commandBuffer-02713", "If commandBuffer is a protected command buffer, pipeline stages other than the framebuffer-space and compute stages in the VkPipeline object bound to the pipeline bind point must not write to any resource (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDraw-commandBuffer-02713)"},
+ {"VUID-vkCmdDraw-None-00437", "For each set n that is statically used by the VkPipeline bound to VK_PIPELINE_BIND_POINT_GRAPHICS, a descriptor set must have been bound to n at VK_PIPELINE_BIND_POINT_GRAPHICS, with a VkPipelineLayout that is compatible for set n, with the VkPipelineLayout used to create the current VkPipeline, as described in Pipeline Layout Compatibility (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDraw-None-00437)"},
+ {"VUID-vkCmdDraw-None-00438", "For each push constant that is statically used by the VkPipeline bound to VK_PIPELINE_BIND_POINT_GRAPHICS, a push constant value must have been set for VK_PIPELINE_BIND_POINT_GRAPHICS, with a VkPipelineLayout that is compatible for push constants, with the VkPipelineLayout used to create the current VkPipeline, as described in Pipeline Layout Compatibility (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDraw-None-00438)"},
+ {"VUID-vkCmdDraw-None-00439", "Descriptors in each bound descriptor set, specified via vkCmdBindDescriptorSets, must be valid if they are statically used by the bound VkPipeline object, specified via vkCmdBindPipeline (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDraw-None-00439)"},
+ {"VUID-vkCmdDraw-None-00440", "All vertex input bindings accessed via vertex input variables declared in the vertex shader entry point's interface must have valid buffers bound (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDraw-None-00440)"},
+ {"VUID-vkCmdDraw-None-00441", "For a given vertex buffer binding, any attribute data fetched must be entirely contained within the corresponding vertex buffer binding, as described in Vertex Input Description (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDraw-None-00441)"},
+ {"VUID-vkCmdDraw-None-00442", "A valid graphics pipeline must be bound to the current command buffer with VK_PIPELINE_BIND_POINT_GRAPHICS (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDraw-None-00442)"},
+ {"VUID-vkCmdDraw-None-00443", "If the VkPipeline object bound to VK_PIPELINE_BIND_POINT_GRAPHICS requires any dynamic state, that state must have been set on the current command buffer (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDraw-None-00443)"},
+ {"VUID-vkCmdDraw-None-00444", "Every input attachment used by the current subpass must be bound to the pipeline via a descriptor set (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDraw-None-00444)"},
+ {"VUID-vkCmdDraw-None-00445", "If any VkSampler object that is accessed from a shader by the VkPipeline bound to VK_PIPELINE_BIND_POINT_GRAPHICS uses unnormalized coordinates, it must not be used to sample from any VkImage with a VkImageView of the type VK_IMAGE_VIEW_TYPE_3D, VK_IMAGE_VIEW_TYPE_CUBE, VK_IMAGE_VIEW_TYPE_1D_ARRAY, VK_IMAGE_VIEW_TYPE_2D_ARRAY or VK_IMAGE_VIEW_TYPE_CUBE_ARRAY, in any shader stage (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDraw-None-00445)"},
+ {"VUID-vkCmdDraw-None-00446", "If any VkSampler object that is accessed from a shader by the VkPipeline bound to VK_PIPELINE_BIND_POINT_GRAPHICS uses unnormalized coordinates, it must not be used with any of the SPIR-V OpImageSample* or OpImageSparseSample* instructions with ImplicitLod, Dref or Proj in their name, in any shader stage (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDraw-None-00446)"},
+ {"VUID-vkCmdDraw-None-00447", "If any VkSampler object that is accessed from a shader by the VkPipeline bound to VK_PIPELINE_BIND_POINT_GRAPHICS uses unnormalized coordinates, it must not be used with any of the SPIR-V OpImageSample* or OpImageSparseSample* instructions that includes a LOD bias or any offset values, in any shader stage (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDraw-None-00447)"},
+ {"VUID-vkCmdDraw-None-00448", "If the robust buffer access feature is not enabled, and any shader stage in the VkPipeline object bound to VK_PIPELINE_BIND_POINT_GRAPHICS accesses a uniform buffer, it must not access values outside of the range of that buffer specified in the bound descriptor set (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDraw-None-00448)"},
+ {"VUID-vkCmdDraw-None-00449", "If the robust buffer access feature is not enabled, and any shader stage in the VkPipeline object bound to VK_PIPELINE_BIND_POINT_GRAPHICS accesses a storage buffer, it must not access values outside of the range of that buffer specified in the bound descriptor set (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDraw-None-00449)"},
+ {"VUID-vkCmdDraw-None-00452", "Any VkImageView being sampled with VK_FILTER_CUBIC_EXT as a result of this command must not have a VkImageViewType of VK_IMAGE_VIEW_TYPE_3D, VK_IMAGE_VIEW_TYPE_CUBE, or VK_IMAGE_VIEW_TYPE_CUBE_ARRAY (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDraw-None-00452)"},
+ {"VUID-vkCmdDraw-None-01499", "Image subresources used as attachments in the current render pass must not be accessed in any way other than as an attachment by this command. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDraw-None-01499)"},
+ {"VUID-vkCmdDraw-None-02009", "If a VkImageView is sampled with VK_FILTER_LINEAR as a result of this command, then the image view's format features must contain VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_LINEAR_BIT. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDraw-None-02009)"},
+ {"VUID-vkCmdDraw-None-02010", "If a VkImageView is sampled with VK_FILTER_CUBIC_EXT as a result of this command, then the image view's format features must contain VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_CUBIC_BIT_EXT (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDraw-None-02010)"},
+ {"VUID-vkCmdDraw-commandBuffer-01850", "If commandBuffer is an unprotected command buffer, and any pipeline stage in the VkPipeline object bound to VK_PIPELINE_BIND_POINT_GRAPHICS reads from or writes to any image or buffer, that image or buffer must not be a protected image or protected buffer. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDraw-commandBuffer-01850)"},
+ {"VUID-vkCmdDraw-commandBuffer-01851", "If commandBuffer is a protected command buffer, and any pipeline stage in the VkPipeline object bound to VK_PIPELINE_BIND_POINT_GRAPHICS writes to any image or buffer, that image or buffer must not be an unprotected image or unprotected buffer. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDraw-commandBuffer-01851)"},
+ {"VUID-vkCmdDraw-commandBuffer-01852", "If commandBuffer is a protected command buffer, and any pipeline stage other than the framebuffer-space pipeline stages in the VkPipeline object bound to VK_PIPELINE_BIND_POINT_GRAPHICS reads from or writes to any image or buffer, the image or buffer must not be a protected image or protected buffer. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDraw-commandBuffer-01852)"},
{"VUID-vkCmdDraw-commandBuffer-cmdpool", "The VkCommandPool that commandBuffer was allocated from must support graphics operations (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDraw-commandBuffer-cmdpool)"},
{"VUID-vkCmdDraw-commandBuffer-parameter", "commandBuffer must be a valid VkCommandBuffer handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDraw-commandBuffer-parameter)"},
{"VUID-vkCmdDraw-commandBuffer-recording", "commandBuffer must be in the recording state (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDraw-commandBuffer-recording)"},
- {"VUID-vkCmdDraw-filterCubic-02694", "Any VkImageView being sampled with VK_FILTER_CUBIC_EXT as a result of this command must have a VkImageViewType and format that supports cubic filtering, as specified by VkFilterCubicImageViewImageFormatPropertiesEXT::filterCubic returned by vkGetPhysicalDeviceImageFormatProperties2 (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDraw-filterCubic-02694)"},
- {"VUID-vkCmdDraw-filterCubicMinmax-02695", "Any VkImageView being sampled with VK_FILTER_CUBIC_EXT with a reduction mode of either VK_SAMPLER_REDUCTION_MODE_MIN_EXT or VK_SAMPLER_REDUCTION_MODE_MAX_EXT as a result of this command must have a VkImageViewType and format that supports cubic filtering together with minmax filtering, as specified by VkFilterCubicImageViewImageFormatPropertiesEXT::filterCubicMinmax returned by vkGetPhysicalDeviceImageFormatProperties2 (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDraw-filterCubicMinmax-02695)"},
- {"VUID-vkCmdDraw-flags-02696", "Any VkImage created with a VkImageCreateInfo::flags containing VK_IMAGE_CREATE_CORNER_SAMPLED_BIT_NV sampled as a result of this command must only be sampled using a VkSamplerAddressMode of VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_EDGE. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDraw-flags-02696)"},
- {"VUID-vkCmdDraw-maxMultiviewInstanceIndex-02688", "If the draw is recorded in a render pass instance with multiview enabled, the maximum instance index must be less than or equal to VkPhysicalDeviceMultiviewProperties::maxMultiviewInstanceIndex. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDraw-maxMultiviewInstanceIndex-02688)"},
- {"VUID-vkCmdDraw-renderPass-02684", "The current render pass must be compatible with the renderPass member of the VkGraphicsPipelineCreateInfo structure specified when creating the VkPipeline bound to VK_PIPELINE_BIND_POINT_GRAPHICS. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDraw-renderPass-02684)"},
+ {"VUID-vkCmdDraw-filterCubic-02613", "Any VkImageView being sampled with VK_FILTER_CUBIC_EXT as a result of this command must have a VkImageViewType and format that supports cubic filtering, as specified by VkFilterCubicImageViewImageFormatPropertiesEXT::filterCubic returned by vkGetPhysicalDeviceImageFormatProperties2 (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDraw-filterCubic-02613)"},
+ {"VUID-vkCmdDraw-filterCubicMinmax-02614", "Any VkImageView being sampled with VK_FILTER_CUBIC_EXT with a reduction mode of either VK_SAMPLER_REDUCTION_MODE_MIN_EXT or VK_SAMPLER_REDUCTION_MODE_MAX_EXT as a result of this command must have a VkImageViewType and format that supports cubic filtering together with minmax filtering, as specified by VkFilterCubicImageViewImageFormatPropertiesEXT::filterCubicMinmax returned by vkGetPhysicalDeviceImageFormatProperties2 (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDraw-filterCubicMinmax-02614)"},
+ {"VUID-vkCmdDraw-flags-02042", "Any VkImage created with a VkImageCreateInfo::flags containing VK_IMAGE_CREATE_CORNER_SAMPLED_BIT_NV sampled as a result of this command must only be sampled using a VkSamplerAddressMode of VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_EDGE. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDraw-flags-02042)"},
+ {"VUID-vkCmdDraw-maxMultiviewInstanceIndex-00453", "If the draw is recorded in a render pass instance with multiview enabled, the maximum instance index must be less than or equal to VkPhysicalDeviceMultiviewProperties::maxMultiviewInstanceIndex. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDraw-maxMultiviewInstanceIndex-00453)"},
+ {"VUID-vkCmdDraw-renderPass-00435", "The current render pass must be compatible with the renderPass member of the VkGraphicsPipelineCreateInfo structure specified when creating the VkPipeline bound to VK_PIPELINE_BIND_POINT_GRAPHICS. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDraw-renderPass-00435)"},
{"VUID-vkCmdDraw-renderpass", "This command must only be called inside of a render pass instance (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDraw-renderpass)"},
- {"VUID-vkCmdDraw-sampleLocationsEnable-02689", "If the bound graphics pipeline was created with VkPipelineSampleLocationsStateCreateInfoEXT::sampleLocationsEnable set to VK_TRUE and the current subpass has a depth/stencil attachment, then that attachment must have been created with the VK_IMAGE_CREATE_SAMPLE_LOCATIONS_COMPATIBLE_DEPTH_BIT_EXT bit set (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDraw-sampleLocationsEnable-02689)"},
- {"VUID-vkCmdDraw-subpass-02685", "The subpass index of the current render pass must be equal to the subpass member of the VkGraphicsPipelineCreateInfo structure specified when creating the VkPipeline bound to VK_PIPELINE_BIND_POINT_GRAPHICS. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDraw-subpass-02685)"},
- {"VUID-vkCmdDrawIndexed-None-02686", "Every input attachment used by the current subpass must be bound to the pipeline via a descriptor set (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndexed-None-02686)"},
- {"VUID-vkCmdDrawIndexed-None-02687", "Image subresources used as attachments in the current render pass must not be accessed in any way other than as an attachment by this command. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndexed-None-02687)"},
- {"VUID-vkCmdDrawIndexed-None-02690", "If a VkImageView is sampled with VK_FILTER_LINEAR as a result of this command, then the image view's format features must contain VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_LINEAR_BIT (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndexed-None-02690)"},
- {"VUID-vkCmdDrawIndexed-None-02691", "If a VkImageView is accessed using atomic operations as a result of this command, then the image view's format features must contain VK_FORMAT_FEATURE_STORAGE_IMAGE_ATOMIC_BIT (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndexed-None-02691)"},
- {"VUID-vkCmdDrawIndexed-None-02692", "If a VkImageView is sampled with VK_FILTER_CUBIC_EXT as a result of this command, then the image view's format features must contain VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_CUBIC_BIT_EXT (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndexed-None-02692)"},
- {"VUID-vkCmdDrawIndexed-None-02693", "Any VkImageView being sampled with VK_FILTER_CUBIC_EXT as a result of this command must not have a VkImageViewType of VK_IMAGE_VIEW_TYPE_3D, VK_IMAGE_VIEW_TYPE_CUBE, or VK_IMAGE_VIEW_TYPE_CUBE_ARRAY (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndexed-None-02693)"},
- {"VUID-vkCmdDrawIndexed-None-02697", "For each set n that is statically used by the VkPipeline bound to the pipeline bind point used by this command, a descriptor set must have been bound to n at the same pipeline bind point, with a VkPipelineLayout that is compatible for set n, with the VkPipelineLayout used to create the current VkPipeline, as described in Pipeline Layout Compatibility (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndexed-None-02697)"},
- {"VUID-vkCmdDrawIndexed-None-02698", "For each push constant that is statically used by the VkPipeline bound to the pipeline bind point used by this command, a push constant value must have been set for the same pipeline bind point, with a VkPipelineLayout that is compatible for push constants, with the VkPipelineLayout used to create the current VkPipeline, as described in Pipeline Layout Compatibility (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndexed-None-02698)"},
- {"VUID-vkCmdDrawIndexed-None-02699", "Descriptors in each bound descriptor set, specified via vkCmdBindDescriptorSets, must be valid if they are statically used by the VkPipeline bound to the pipeline bind point used by this command (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndexed-None-02699)"},
- {"VUID-vkCmdDrawIndexed-None-02700", "A valid pipeline must be bound to the pipeline bind point used by this command (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndexed-None-02700)"},
- {"VUID-vkCmdDrawIndexed-None-02702", "If the VkPipeline object bound to the pipeline bind point used by this command accesses a VkSampler object that uses unnormalized coordinates, that sampler must not be used to sample from any VkImage with a VkImageView of the type VK_IMAGE_VIEW_TYPE_3D, VK_IMAGE_VIEW_TYPE_CUBE, VK_IMAGE_VIEW_TYPE_1D_ARRAY, VK_IMAGE_VIEW_TYPE_2D_ARRAY or VK_IMAGE_VIEW_TYPE_CUBE_ARRAY, in any shader stage (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndexed-None-02702)"},
- {"VUID-vkCmdDrawIndexed-None-02703", "If the VkPipeline object bound to the pipeline bind point used by this command accesses a VkSampler object that uses unnormalized coordinates, that sampler must not be used with any of the SPIR-V OpImageSample* or OpImageSparseSample* instructions with ImplicitLod, Dref or Proj in their name, in any shader stage (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndexed-None-02703)"},
- {"VUID-vkCmdDrawIndexed-None-02704", "If the VkPipeline object bound to the pipeline bind point used by this command accesses a VkSampler object that uses unnormalized coordinates, that sampler must not be used with any of the SPIR-V OpImageSample* or OpImageSparseSample* instructions that includes a LOD bias or any offset values, in any shader stage (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndexed-None-02704)"},
- {"VUID-vkCmdDrawIndexed-None-02705", "If the robust buffer access feature is not enabled, and if the VkPipeline object bound to the pipeline bind point used by this command accesses a uniform buffer, it must not access values outside of the range of the buffer as specified in the descriptor set bound to the same pipeline bind point (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndexed-None-02705)"},
- {"VUID-vkCmdDrawIndexed-None-02706", "If the robust buffer access feature is not enabled, and if the VkPipeline object bound to the pipeline bind point used by this command accesses a storage buffer, it must not access values outside of the range of the buffer as specified in the descriptor set bound to the same pipeline bind point (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndexed-None-02706)"},
- {"VUID-vkCmdDrawIndexed-None-02720", "All vertex input bindings accessed via vertex input variables declared in the vertex shader entry point's interface must have valid buffers bound (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndexed-None-02720)"},
- {"VUID-vkCmdDrawIndexed-None-02721", "For a given vertex buffer binding, any attribute data fetched must be entirely contained within the corresponding vertex buffer binding, as described in Vertex Input Description (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndexed-None-02721)"},
- {"VUID-vkCmdDrawIndexed-commandBuffer-02701", "If the VkPipeline object bound to the pipeline bind point used by this command requires any dynamic state, that state must have been set for commandBuffer (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndexed-commandBuffer-02701)"},
- {"VUID-vkCmdDrawIndexed-commandBuffer-02707", "If commandBuffer is an unprotected command buffer, any resource accessed by the VkPipeline object bound to the pipeline bind point used by this command must not be a protected resource (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndexed-commandBuffer-02707)"},
- {"VUID-vkCmdDrawIndexed-commandBuffer-02712", "If commandBuffer is a protected command buffer, any resource written to by the VkPipeline object bound to the pipeline bind point used by this command must not be an unprotected resource (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndexed-commandBuffer-02712)"},
- {"VUID-vkCmdDrawIndexed-commandBuffer-02713", "If commandBuffer is a protected command buffer, pipeline stages other than the framebuffer-space and compute stages in the VkPipeline object bound to the pipeline bind point must not write to any resource (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndexed-commandBuffer-02713)"},
+ {"VUID-vkCmdDraw-sampleLocationsEnable-01512", "If the bound graphics pipeline was created with VkPipelineSampleLocationsStateCreateInfoEXT::sampleLocationsEnable set to VK_TRUE and the current subpass has a depth/stencil attachment, then that attachment must have been created with the VK_IMAGE_CREATE_SAMPLE_LOCATIONS_COMPATIBLE_DEPTH_BIT_EXT bit set (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDraw-sampleLocationsEnable-01512)"},
+ {"VUID-vkCmdDraw-subpass-00436", "The subpass index of the current render pass must be equal to the subpass member of the VkGraphicsPipelineCreateInfo structure specified when creating the VkPipeline bound to VK_PIPELINE_BIND_POINT_GRAPHICS. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDraw-subpass-00436)"},
+ {"VUID-vkCmdDrawIndexed-None-00456", "For each set n that is statically used by the VkPipeline bound to VK_PIPELINE_BIND_POINT_GRAPHICS, a descriptor set must have been bound to n at VK_PIPELINE_BIND_POINT_GRAPHICS, with a VkPipelineLayout that is compatible for set n, with the VkPipelineLayout used to create the current VkPipeline, as described in Pipeline Layout Compatibility (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndexed-None-00456)"},
+ {"VUID-vkCmdDrawIndexed-None-00457", "For each push constant that is statically used by the VkPipeline bound to VK_PIPELINE_BIND_POINT_GRAPHICS, a push constant value must have been set for VK_PIPELINE_BIND_POINT_GRAPHICS, with a VkPipelineLayout that is compatible for push constants, with the VkPipelineLayout used to create the current VkPipeline, as described in Pipeline Layout Compatibility (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndexed-None-00457)"},
+ {"VUID-vkCmdDrawIndexed-None-00458", "Descriptors in each bound descriptor set, specified via vkCmdBindDescriptorSets, must be valid if they are statically used by the bound VkPipeline object, specified via vkCmdBindPipeline (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndexed-None-00458)"},
+ {"VUID-vkCmdDrawIndexed-None-00459", "All vertex input bindings accessed via vertex input variables declared in the vertex shader entry point's interface must have valid buffers bound (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndexed-None-00459)"},
+ {"VUID-vkCmdDrawIndexed-None-00460", "For a given vertex buffer binding, any attribute data fetched must be entirely contained within the corresponding vertex buffer binding, as described in Vertex Input Description (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndexed-None-00460)"},
+ {"VUID-vkCmdDrawIndexed-None-00461", "A valid graphics pipeline must be bound to the current command buffer with VK_PIPELINE_BIND_POINT_GRAPHICS (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndexed-None-00461)"},
+ {"VUID-vkCmdDrawIndexed-None-00462", "If the VkPipeline object bound to VK_PIPELINE_BIND_POINT_GRAPHICS requires any dynamic state, that state must have been set on the current command buffer (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndexed-None-00462)"},
+ {"VUID-vkCmdDrawIndexed-None-00464", "Every input attachment used by the current subpass must be bound to the pipeline via a descriptor set (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndexed-None-00464)"},
+ {"VUID-vkCmdDrawIndexed-None-00465", "If any VkSampler object that is accessed from a shader by the VkPipeline bound to VK_PIPELINE_BIND_POINT_GRAPHICS uses unnormalized coordinates, it must not be used to sample from any VkImage with a VkImageView of the type VK_IMAGE_VIEW_TYPE_3D, VK_IMAGE_VIEW_TYPE_CUBE, VK_IMAGE_VIEW_TYPE_1D_ARRAY, VK_IMAGE_VIEW_TYPE_2D_ARRAY or VK_IMAGE_VIEW_TYPE_CUBE_ARRAY, in any shader stage (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndexed-None-00465)"},
+ {"VUID-vkCmdDrawIndexed-None-00466", "If any VkSampler object that is accessed from a shader by the VkPipeline bound to VK_PIPELINE_BIND_POINT_GRAPHICS uses unnormalized coordinates, it must not be used with any of the SPIR-V OpImageSample* or OpImageSparseSample* instructions with ImplicitLod, Dref or Proj in their name, in any shader stage (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndexed-None-00466)"},
+ {"VUID-vkCmdDrawIndexed-None-00467", "If any VkSampler object that is accessed from a shader by the VkPipeline bound to VK_PIPELINE_BIND_POINT_GRAPHICS uses unnormalized coordinates, it must not be used with any of the SPIR-V OpImageSample* or OpImageSparseSample* instructions that includes a LOD bias or any offset values, in any shader stage (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndexed-None-00467)"},
+ {"VUID-vkCmdDrawIndexed-None-00468", "If the robust buffer access feature is not enabled, and any shader stage in the VkPipeline object bound to VK_PIPELINE_BIND_POINT_GRAPHICS accesses a uniform buffer, it must not access values outside of the range of that buffer specified in the bound descriptor set (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndexed-None-00468)"},
+ {"VUID-vkCmdDrawIndexed-None-00469", "If the robust buffer access feature is not enabled, and any shader stage in the VkPipeline object bound to VK_PIPELINE_BIND_POINT_GRAPHICS accesses a storage buffer, it must not access values outside of the range of that buffer specified in the bound descriptor set (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndexed-None-00469)"},
+ {"VUID-vkCmdDrawIndexed-None-00472", "Any VkImageView being sampled with VK_FILTER_CUBIC_EXT as a result of this command must not have a VkImageViewType of VK_IMAGE_VIEW_TYPE_3D, VK_IMAGE_VIEW_TYPE_CUBE, or VK_IMAGE_VIEW_TYPE_CUBE_ARRAY (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndexed-None-00472)"},
+ {"VUID-vkCmdDrawIndexed-None-01500", "Image subresources used as attachments in the current render pass must not be accessed in any way other than as an attachment by this command. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndexed-None-01500)"},
+ {"VUID-vkCmdDrawIndexed-None-02011", "If a VkImageView is sampled with VK_FILTER_LINEAR as a result of this command, then the image view's format features must contain VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_LINEAR_BIT. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndexed-None-02011)"},
+ {"VUID-vkCmdDrawIndexed-None-02012", "If a VkImageView is sampled with VK_FILTER_CUBIC_EXT as a result of this command, then the image view's format features must contain VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_CUBIC_BIT_EXT (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndexed-None-02012)"},
+ {"VUID-vkCmdDrawIndexed-commandBuffer-01853", "If commandBuffer is an unprotected command buffer, and any pipeline stage in the VkPipeline object bound to VK_PIPELINE_BIND_POINT_GRAPHICS reads from or writes to any image or buffer, that image or buffer must not be a protected image or protected buffer. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndexed-commandBuffer-01853)"},
+ {"VUID-vkCmdDrawIndexed-commandBuffer-01854", "If commandBuffer is a protected command buffer, and any pipeline stage in the VkPipeline object bound to VK_PIPELINE_BIND_POINT_GRAPHICS writes to any image or buffer, that image or buffer must not be an unprotected image or unprotected buffer. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndexed-commandBuffer-01854)"},
+ {"VUID-vkCmdDrawIndexed-commandBuffer-01855", "If commandBuffer is a protected command buffer, and any pipeline stage other than the framebuffer-space pipeline stages in the VkPipeline object bound to VK_PIPELINE_BIND_POINT_GRAPHICS reads from or writes to any image or buffer, the image or buffer must not be a protected image or protected buffer. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndexed-commandBuffer-01855)"},
{"VUID-vkCmdDrawIndexed-commandBuffer-cmdpool", "The VkCommandPool that commandBuffer was allocated from must support graphics operations (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndexed-commandBuffer-cmdpool)"},
{"VUID-vkCmdDrawIndexed-commandBuffer-parameter", "commandBuffer must be a valid VkCommandBuffer handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndexed-commandBuffer-parameter)"},
{"VUID-vkCmdDrawIndexed-commandBuffer-recording", "commandBuffer must be in the recording state (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndexed-commandBuffer-recording)"},
- {"VUID-vkCmdDrawIndexed-filterCubic-02694", "Any VkImageView being sampled with VK_FILTER_CUBIC_EXT as a result of this command must have a VkImageViewType and format that supports cubic filtering, as specified by VkFilterCubicImageViewImageFormatPropertiesEXT::filterCubic returned by vkGetPhysicalDeviceImageFormatProperties2 (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndexed-filterCubic-02694)"},
- {"VUID-vkCmdDrawIndexed-filterCubicMinmax-02695", "Any VkImageView being sampled with VK_FILTER_CUBIC_EXT with a reduction mode of either VK_SAMPLER_REDUCTION_MODE_MIN_EXT or VK_SAMPLER_REDUCTION_MODE_MAX_EXT as a result of this command must have a VkImageViewType and format that supports cubic filtering together with minmax filtering, as specified by VkFilterCubicImageViewImageFormatPropertiesEXT::filterCubicMinmax returned by vkGetPhysicalDeviceImageFormatProperties2 (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndexed-filterCubicMinmax-02695)"},
- {"VUID-vkCmdDrawIndexed-flags-02696", "Any VkImage created with a VkImageCreateInfo::flags containing VK_IMAGE_CREATE_CORNER_SAMPLED_BIT_NV sampled as a result of this command must only be sampled using a VkSamplerAddressMode of VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_EDGE. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndexed-flags-02696)"},
+ {"VUID-vkCmdDrawIndexed-filterCubic-02615", "Any VkImageView being sampled with VK_FILTER_CUBIC_EXT as a result of this command must have a VkImageViewType and format that supports cubic filtering, as specified by VkFilterCubicImageViewImageFormatPropertiesEXT::filterCubic returned by vkGetPhysicalDeviceImageFormatProperties2 (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndexed-filterCubic-02615)"},
+ {"VUID-vkCmdDrawIndexed-filterCubicMinmax-02616", "Any VkImageView being sampled with VK_FILTER_CUBIC_EXT with a reduction mode of either VK_SAMPLER_REDUCTION_MODE_MIN_EXT or VK_SAMPLER_REDUCTION_MODE_MAX_EXT as a result of this command must have a VkImageViewType and format that supports cubic filtering together with minmax filtering, as specified by VkFilterCubicImageViewImageFormatPropertiesEXT::filterCubicMinmax returned by vkGetPhysicalDeviceImageFormatProperties2 (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndexed-filterCubicMinmax-02616)"},
+ {"VUID-vkCmdDrawIndexed-flags-02043", "Any VkImage created with a VkImageCreateInfo::flags containing VK_IMAGE_CREATE_CORNER_SAMPLED_BIT_NV sampled as a result of this command must only be sampled using a VkSamplerAddressMode of VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_EDGE. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndexed-flags-02043)"},
{"VUID-vkCmdDrawIndexed-indexSize-00463", "(indexSize * (firstIndex + indexCount) + offset) must be less than or equal to the size of the bound index buffer, with indexSize being based on the type specified by indexType, where the index buffer, indexType, and offset are specified via vkCmdBindIndexBuffer (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndexed-indexSize-00463)"},
- {"VUID-vkCmdDrawIndexed-maxMultiviewInstanceIndex-02688", "If the draw is recorded in a render pass instance with multiview enabled, the maximum instance index must be less than or equal to VkPhysicalDeviceMultiviewProperties::maxMultiviewInstanceIndex. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndexed-maxMultiviewInstanceIndex-02688)"},
- {"VUID-vkCmdDrawIndexed-renderPass-02684", "The current render pass must be compatible with the renderPass member of the VkGraphicsPipelineCreateInfo structure specified when creating the VkPipeline bound to VK_PIPELINE_BIND_POINT_GRAPHICS. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndexed-renderPass-02684)"},
+ {"VUID-vkCmdDrawIndexed-maxMultiviewInstanceIndex-00473", "If the draw is recorded in a render pass instance with multiview enabled, the maximum instance index must be less than or equal to VkPhysicalDeviceMultiviewProperties::maxMultiviewInstanceIndex. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndexed-maxMultiviewInstanceIndex-00473)"},
+ {"VUID-vkCmdDrawIndexed-renderPass-00454", "The current render pass must be compatible with the renderPass member of the VkGraphicsPipelineCreateInfo structure specified when creating the VkPipeline bound to VK_PIPELINE_BIND_POINT_GRAPHICS. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndexed-renderPass-00454)"},
{"VUID-vkCmdDrawIndexed-renderpass", "This command must only be called inside of a render pass instance (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndexed-renderpass)"},
- {"VUID-vkCmdDrawIndexed-sampleLocationsEnable-02689", "If the bound graphics pipeline was created with VkPipelineSampleLocationsStateCreateInfoEXT::sampleLocationsEnable set to VK_TRUE and the current subpass has a depth/stencil attachment, then that attachment must have been created with the VK_IMAGE_CREATE_SAMPLE_LOCATIONS_COMPATIBLE_DEPTH_BIT_EXT bit set (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndexed-sampleLocationsEnable-02689)"},
- {"VUID-vkCmdDrawIndexed-subpass-02685", "The subpass index of the current render pass must be equal to the subpass member of the VkGraphicsPipelineCreateInfo structure specified when creating the VkPipeline bound to VK_PIPELINE_BIND_POINT_GRAPHICS. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndexed-subpass-02685)"},
- {"VUID-vkCmdDrawIndexedIndirect-None-02686", "Every input attachment used by the current subpass must be bound to the pipeline via a descriptor set (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndexedIndirect-None-02686)"},
- {"VUID-vkCmdDrawIndexedIndirect-None-02687", "Image subresources used as attachments in the current render pass must not be accessed in any way other than as an attachment by this command. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndexedIndirect-None-02687)"},
- {"VUID-vkCmdDrawIndexedIndirect-None-02690", "If a VkImageView is sampled with VK_FILTER_LINEAR as a result of this command, then the image view's format features must contain VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_LINEAR_BIT (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndexedIndirect-None-02690)"},
- {"VUID-vkCmdDrawIndexedIndirect-None-02691", "If a VkImageView is accessed using atomic operations as a result of this command, then the image view's format features must contain VK_FORMAT_FEATURE_STORAGE_IMAGE_ATOMIC_BIT (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndexedIndirect-None-02691)"},
- {"VUID-vkCmdDrawIndexedIndirect-None-02692", "If a VkImageView is sampled with VK_FILTER_CUBIC_EXT as a result of this command, then the image view's format features must contain VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_CUBIC_BIT_EXT (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndexedIndirect-None-02692)"},
- {"VUID-vkCmdDrawIndexedIndirect-None-02693", "Any VkImageView being sampled with VK_FILTER_CUBIC_EXT as a result of this command must not have a VkImageViewType of VK_IMAGE_VIEW_TYPE_3D, VK_IMAGE_VIEW_TYPE_CUBE, or VK_IMAGE_VIEW_TYPE_CUBE_ARRAY (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndexedIndirect-None-02693)"},
- {"VUID-vkCmdDrawIndexedIndirect-None-02697", "For each set n that is statically used by the VkPipeline bound to the pipeline bind point used by this command, a descriptor set must have been bound to n at the same pipeline bind point, with a VkPipelineLayout that is compatible for set n, with the VkPipelineLayout used to create the current VkPipeline, as described in Pipeline Layout Compatibility (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndexedIndirect-None-02697)"},
- {"VUID-vkCmdDrawIndexedIndirect-None-02698", "For each push constant that is statically used by the VkPipeline bound to the pipeline bind point used by this command, a push constant value must have been set for the same pipeline bind point, with a VkPipelineLayout that is compatible for push constants, with the VkPipelineLayout used to create the current VkPipeline, as described in Pipeline Layout Compatibility (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndexedIndirect-None-02698)"},
- {"VUID-vkCmdDrawIndexedIndirect-None-02699", "Descriptors in each bound descriptor set, specified via vkCmdBindDescriptorSets, must be valid if they are statically used by the VkPipeline bound to the pipeline bind point used by this command (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndexedIndirect-None-02699)"},
- {"VUID-vkCmdDrawIndexedIndirect-None-02700", "A valid pipeline must be bound to the pipeline bind point used by this command (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndexedIndirect-None-02700)"},
- {"VUID-vkCmdDrawIndexedIndirect-None-02702", "If the VkPipeline object bound to the pipeline bind point used by this command accesses a VkSampler object that uses unnormalized coordinates, that sampler must not be used to sample from any VkImage with a VkImageView of the type VK_IMAGE_VIEW_TYPE_3D, VK_IMAGE_VIEW_TYPE_CUBE, VK_IMAGE_VIEW_TYPE_1D_ARRAY, VK_IMAGE_VIEW_TYPE_2D_ARRAY or VK_IMAGE_VIEW_TYPE_CUBE_ARRAY, in any shader stage (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndexedIndirect-None-02702)"},
- {"VUID-vkCmdDrawIndexedIndirect-None-02703", "If the VkPipeline object bound to the pipeline bind point used by this command accesses a VkSampler object that uses unnormalized coordinates, that sampler must not be used with any of the SPIR-V OpImageSample* or OpImageSparseSample* instructions with ImplicitLod, Dref or Proj in their name, in any shader stage (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndexedIndirect-None-02703)"},
- {"VUID-vkCmdDrawIndexedIndirect-None-02704", "If the VkPipeline object bound to the pipeline bind point used by this command accesses a VkSampler object that uses unnormalized coordinates, that sampler must not be used with any of the SPIR-V OpImageSample* or OpImageSparseSample* instructions that includes a LOD bias or any offset values, in any shader stage (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndexedIndirect-None-02704)"},
- {"VUID-vkCmdDrawIndexedIndirect-None-02705", "If the robust buffer access feature is not enabled, and if the VkPipeline object bound to the pipeline bind point used by this command accesses a uniform buffer, it must not access values outside of the range of the buffer as specified in the descriptor set bound to the same pipeline bind point (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndexedIndirect-None-02705)"},
- {"VUID-vkCmdDrawIndexedIndirect-None-02706", "If the robust buffer access feature is not enabled, and if the VkPipeline object bound to the pipeline bind point used by this command accesses a storage buffer, it must not access values outside of the range of the buffer as specified in the descriptor set bound to the same pipeline bind point (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndexedIndirect-None-02706)"},
- {"VUID-vkCmdDrawIndexedIndirect-None-02720", "All vertex input bindings accessed via vertex input variables declared in the vertex shader entry point's interface must have valid buffers bound (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndexedIndirect-None-02720)"},
- {"VUID-vkCmdDrawIndexedIndirect-None-02721", "For a given vertex buffer binding, any attribute data fetched must be entirely contained within the corresponding vertex buffer binding, as described in Vertex Input Description (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndexedIndirect-None-02721)"},
- {"VUID-vkCmdDrawIndexedIndirect-buffer-02708", "If buffer is non-sparse then it must be bound completely and contiguously to a single VkDeviceMemory object (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndexedIndirect-buffer-02708)"},
- {"VUID-vkCmdDrawIndexedIndirect-buffer-02709", "buffer must have been created with the VK_BUFFER_USAGE_INDIRECT_BUFFER_BIT bit set (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndexedIndirect-buffer-02709)"},
+ {"VUID-vkCmdDrawIndexed-sampleLocationsEnable-01513", "If the bound graphics pipeline was created with VkPipelineSampleLocationsStateCreateInfoEXT::sampleLocationsEnable set to VK_TRUE and the current subpass has a depth/stencil attachment, then that attachment must have been created with the VK_IMAGE_CREATE_SAMPLE_LOCATIONS_COMPATIBLE_DEPTH_BIT_EXT bit set (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndexed-sampleLocationsEnable-01513)"},
+ {"VUID-vkCmdDrawIndexed-subpass-00455", "The subpass index of the current render pass must be equal to the subpass member of the VkGraphicsPipelineCreateInfo structure specified when creating the VkPipeline bound to VK_PIPELINE_BIND_POINT_GRAPHICS. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndexed-subpass-00455)"},
+ {"VUID-vkCmdDrawIndexedIndirect-None-00533", "For each set n that is statically used by the VkPipeline bound to VK_PIPELINE_BIND_POINT_GRAPHICS, a descriptor set must have been bound to n at VK_PIPELINE_BIND_POINT_GRAPHICS, with a VkPipelineLayout that is compatible for set n, with the VkPipelineLayout used to create the current VkPipeline, as described in Pipeline Layout Compatibility (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndexedIndirect-None-00533)"},
+ {"VUID-vkCmdDrawIndexedIndirect-None-00534", "For each push constant that is statically used by the VkPipeline bound to VK_PIPELINE_BIND_POINT_GRAPHICS, a push constant value must have been set for VK_PIPELINE_BIND_POINT_GRAPHICS, with a VkPipelineLayout that is compatible for push constants, with the VkPipelineLayout used to create the current VkPipeline, as described in Pipeline Layout Compatibility (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndexedIndirect-None-00534)"},
+ {"VUID-vkCmdDrawIndexedIndirect-None-00535", "Descriptors in each bound descriptor set, specified via vkCmdBindDescriptorSets, must be valid if they are statically used by the bound VkPipeline object, specified via vkCmdBindPipeline (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndexedIndirect-None-00535)"},
+ {"VUID-vkCmdDrawIndexedIndirect-None-00536", "All vertex input bindings accessed via vertex input variables declared in the vertex shader entry point's interface must have valid buffers bound (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndexedIndirect-None-00536)"},
+ {"VUID-vkCmdDrawIndexedIndirect-None-00537", "A valid graphics pipeline must be bound to the current command buffer with VK_PIPELINE_BIND_POINT_GRAPHICS (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndexedIndirect-None-00537)"},
+ {"VUID-vkCmdDrawIndexedIndirect-None-00538", "If the VkPipeline object bound to VK_PIPELINE_BIND_POINT_GRAPHICS requires any dynamic state, that state must have been set on the current command buffer (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndexedIndirect-None-00538)"},
+ {"VUID-vkCmdDrawIndexedIndirect-None-00542", "Every input attachment used by the current subpass must be bound to the pipeline via a descriptor set (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndexedIndirect-None-00542)"},
+ {"VUID-vkCmdDrawIndexedIndirect-None-00543", "If any VkSampler object that is accessed from a shader by the VkPipeline bound to VK_PIPELINE_BIND_POINT_GRAPHICS uses unnormalized coordinates, it must not be used to sample from any VkImage with a VkImageView of the type VK_IMAGE_VIEW_TYPE_3D, VK_IMAGE_VIEW_TYPE_CUBE, VK_IMAGE_VIEW_TYPE_1D_ARRAY, VK_IMAGE_VIEW_TYPE_2D_ARRAY or VK_IMAGE_VIEW_TYPE_CUBE_ARRAY, in any shader stage (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndexedIndirect-None-00543)"},
+ {"VUID-vkCmdDrawIndexedIndirect-None-00544", "If any VkSampler object that is accessed from a shader by the VkPipeline bound to VK_PIPELINE_BIND_POINT_GRAPHICS uses unnormalized coordinates, it must not be used with any of the SPIR-V OpImageSample* or OpImageSparseSample* instructions with ImplicitLod, Dref or Proj in their name, in any shader stage (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndexedIndirect-None-00544)"},
+ {"VUID-vkCmdDrawIndexedIndirect-None-00545", "If any VkSampler object that is accessed from a shader by the VkPipeline bound to VK_PIPELINE_BIND_POINT_GRAPHICS uses unnormalized coordinates, it must not be used with any of the SPIR-V OpImageSample* or OpImageSparseSample* instructions that includes a LOD bias or any offset values, in any shader stage (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndexedIndirect-None-00545)"},
+ {"VUID-vkCmdDrawIndexedIndirect-None-00546", "If the robust buffer access feature is not enabled, and any shader stage in the VkPipeline object bound to VK_PIPELINE_BIND_POINT_GRAPHICS accesses a uniform buffer, it must not access values outside of the range of that buffer specified in the bound descriptor set (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndexedIndirect-None-00546)"},
+ {"VUID-vkCmdDrawIndexedIndirect-None-00547", "If the robust buffer access feature is not enabled, and any shader stage in the VkPipeline object bound to VK_PIPELINE_BIND_POINT_GRAPHICS accesses a storage buffer, it must not access values outside of the range of that buffer specified in the bound descriptor set (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndexedIndirect-None-00547)"},
+ {"VUID-vkCmdDrawIndexedIndirect-None-00550", "Any VkImageView being sampled with VK_FILTER_CUBIC_EXT as a result of this command must not have a VkImageViewType of VK_IMAGE_VIEW_TYPE_3D, VK_IMAGE_VIEW_TYPE_CUBE, or VK_IMAGE_VIEW_TYPE_CUBE_ARRAY (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndexedIndirect-None-00550)"},
+ {"VUID-vkCmdDrawIndexedIndirect-None-01503", "Image subresources used as attachments in the current render pass must not be accessed in any way other than as an attachment by this command. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndexedIndirect-None-01503)"},
+ {"VUID-vkCmdDrawIndexedIndirect-None-02018", "If a VkImageView is sampled with VK_FILTER_LINEAR as a result of this command, then the image view's format features must contain VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_LINEAR_BIT. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndexedIndirect-None-02018)"},
+ {"VUID-vkCmdDrawIndexedIndirect-None-02019", "If a VkImageView is sampled with VK_FILTER_CUBIC_EXT as a result of this command, then the image view's format features must contain VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_CUBIC_BIT_EXT (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndexedIndirect-None-02019)"},
+ {"VUID-vkCmdDrawIndexedIndirect-buffer-00526", "If buffer is non-sparse then it must be bound completely and contiguously to a single VkDeviceMemory object (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndexedIndirect-buffer-00526)"},
+ {"VUID-vkCmdDrawIndexedIndirect-buffer-01665", "buffer must have been created with the VK_BUFFER_USAGE_INDIRECT_BUFFER_BIT bit set (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndexedIndirect-buffer-01665)"},
{"VUID-vkCmdDrawIndexedIndirect-buffer-parameter", "buffer must be a valid VkBuffer handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndexedIndirect-buffer-parameter)"},
- {"VUID-vkCmdDrawIndexedIndirect-commandBuffer-02701", "If the VkPipeline object bound to the pipeline bind point used by this command requires any dynamic state, that state must have been set for commandBuffer (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndexedIndirect-commandBuffer-02701)"},
- {"VUID-vkCmdDrawIndexedIndirect-commandBuffer-02707", "If commandBuffer is an unprotected command buffer, any resource accessed by the VkPipeline object bound to the pipeline bind point used by this command must not be a protected resource (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndexedIndirect-commandBuffer-02707)"},
- {"VUID-vkCmdDrawIndexedIndirect-commandBuffer-02711", "commandBuffer must not be a protected command buffer (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndexedIndirect-commandBuffer-02711)"},
+ {"VUID-vkCmdDrawIndexedIndirect-commandBuffer-01862", "If commandBuffer is an unprotected command buffer, and any pipeline stage in the VkPipeline object bound to VK_PIPELINE_BIND_POINT_GRAPHICS reads from or writes to any image or buffer, that image or buffer must not be a protected image or protected buffer. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndexedIndirect-commandBuffer-01862)"},
+ {"VUID-vkCmdDrawIndexedIndirect-commandBuffer-02643", "commandBuffer must not be a protected command buffer (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndexedIndirect-commandBuffer-02643)"},
{"VUID-vkCmdDrawIndexedIndirect-commandBuffer-cmdpool", "The VkCommandPool that commandBuffer was allocated from must support graphics operations (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndexedIndirect-commandBuffer-cmdpool)"},
{"VUID-vkCmdDrawIndexedIndirect-commandBuffer-parameter", "commandBuffer must be a valid VkCommandBuffer handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndexedIndirect-commandBuffer-parameter)"},
{"VUID-vkCmdDrawIndexedIndirect-commandBuffer-recording", "commandBuffer must be in the recording state (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndexedIndirect-commandBuffer-recording)"},
{"VUID-vkCmdDrawIndexedIndirect-commonparent", "Both of buffer, and commandBuffer must have been created, allocated, or retrieved from the same VkDevice (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndexedIndirect-commonparent)"},
{"VUID-vkCmdDrawIndexedIndirect-drawCount-00528", "If drawCount is greater than 1, stride must be a multiple of 4 and must be greater than or equal to sizeof(VkDrawIndexedIndirectCommand) (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndexedIndirect-drawCount-00528)"},
+ {"VUID-vkCmdDrawIndexedIndirect-drawCount-00529", "If the multi-draw indirect feature is not enabled, drawCount must be 0 or 1 (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndexedIndirect-drawCount-00529)"},
{"VUID-vkCmdDrawIndexedIndirect-drawCount-00539", "If drawCount is equal to 1, (offset + sizeof(VkDrawIndexedIndirectCommand)) must be less than or equal to the size of buffer (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndexedIndirect-drawCount-00539)"},
{"VUID-vkCmdDrawIndexedIndirect-drawCount-00540", "If drawCount is greater than 1, (stride {times} (drawCount - 1) + offset + sizeof(VkDrawIndexedIndirectCommand)) must be less than or equal to the size of buffer (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndexedIndirect-drawCount-00540)"},
- {"VUID-vkCmdDrawIndexedIndirect-drawCount-02718", "If the multi-draw indirect feature is not enabled, drawCount must be 0 or 1 (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndexedIndirect-drawCount-02718)"},
- {"VUID-vkCmdDrawIndexedIndirect-drawCount-02719", "drawCount must be less than or equal to VkPhysicalDeviceLimits::maxDrawIndirectCount (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndexedIndirect-drawCount-02719)"},
- {"VUID-vkCmdDrawIndexedIndirect-filterCubic-02694", "Any VkImageView being sampled with VK_FILTER_CUBIC_EXT as a result of this command must have a VkImageViewType and format that supports cubic filtering, as specified by VkFilterCubicImageViewImageFormatPropertiesEXT::filterCubic returned by vkGetPhysicalDeviceImageFormatProperties2 (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndexedIndirect-filterCubic-02694)"},
- {"VUID-vkCmdDrawIndexedIndirect-filterCubicMinmax-02695", "Any VkImageView being sampled with VK_FILTER_CUBIC_EXT with a reduction mode of either VK_SAMPLER_REDUCTION_MODE_MIN_EXT or VK_SAMPLER_REDUCTION_MODE_MAX_EXT as a result of this command must have a VkImageViewType and format that supports cubic filtering together with minmax filtering, as specified by VkFilterCubicImageViewImageFormatPropertiesEXT::filterCubicMinmax returned by vkGetPhysicalDeviceImageFormatProperties2 (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndexedIndirect-filterCubicMinmax-02695)"},
+ {"VUID-vkCmdDrawIndexedIndirect-drawCount-00541", "drawCount must be less than or equal to VkPhysicalDeviceLimits::maxDrawIndirectCount (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndexedIndirect-drawCount-00541)"},
+ {"VUID-vkCmdDrawIndexedIndirect-filterCubic-02621", "Any VkImageView being sampled with VK_FILTER_CUBIC_EXT as a result of this command must have a VkImageViewType and format that supports cubic filtering, as specified by VkFilterCubicImageViewImageFormatPropertiesEXT::filterCubic returned by vkGetPhysicalDeviceImageFormatProperties2 (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndexedIndirect-filterCubic-02621)"},
+ {"VUID-vkCmdDrawIndexedIndirect-filterCubicMinmax-02622", "Any VkImageView being sampled with VK_FILTER_CUBIC_EXT with a reduction mode of either VK_SAMPLER_REDUCTION_MODE_MIN_EXT or VK_SAMPLER_REDUCTION_MODE_MAX_EXT as a result of this command must have a VkImageViewType and format that supports cubic filtering together with minmax filtering, as specified by VkFilterCubicImageViewImageFormatPropertiesEXT::filterCubicMinmax returned by vkGetPhysicalDeviceImageFormatProperties2 (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndexedIndirect-filterCubicMinmax-02622)"},
{"VUID-vkCmdDrawIndexedIndirect-firstInstance-00530", "If the drawIndirectFirstInstance feature is not enabled, all the firstInstance members of the VkDrawIndexedIndirectCommand structures accessed by this command must be 0 (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndexedIndirect-firstInstance-00530)"},
- {"VUID-vkCmdDrawIndexedIndirect-flags-02696", "Any VkImage created with a VkImageCreateInfo::flags containing VK_IMAGE_CREATE_CORNER_SAMPLED_BIT_NV sampled as a result of this command must only be sampled using a VkSamplerAddressMode of VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_EDGE. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndexedIndirect-flags-02696)"},
- {"VUID-vkCmdDrawIndexedIndirect-maxMultiviewInstanceIndex-02688", "If the draw is recorded in a render pass instance with multiview enabled, the maximum instance index must be less than or equal to VkPhysicalDeviceMultiviewProperties::maxMultiviewInstanceIndex. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndexedIndirect-maxMultiviewInstanceIndex-02688)"},
- {"VUID-vkCmdDrawIndexedIndirect-offset-02710", "offset must be a multiple of 4 (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndexedIndirect-offset-02710)"},
- {"VUID-vkCmdDrawIndexedIndirect-renderPass-02684", "The current render pass must be compatible with the renderPass member of the VkGraphicsPipelineCreateInfo structure specified when creating the VkPipeline bound to VK_PIPELINE_BIND_POINT_GRAPHICS. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndexedIndirect-renderPass-02684)"},
+ {"VUID-vkCmdDrawIndexedIndirect-flags-02047", "Any VkImage created with a VkImageCreateInfo::flags containing VK_IMAGE_CREATE_CORNER_SAMPLED_BIT_NV sampled as a result of this command must only be sampled using a VkSamplerAddressMode of VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_EDGE. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndexedIndirect-flags-02047)"},
+ {"VUID-vkCmdDrawIndexedIndirect-maxMultiviewInstanceIndex-00551", "If the draw is recorded in a render pass instance with multiview enabled, the maximum instance index must be less than or equal to VkPhysicalDeviceMultiviewProperties::maxMultiviewInstanceIndex. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndexedIndirect-maxMultiviewInstanceIndex-00551)"},
+ {"VUID-vkCmdDrawIndexedIndirect-offset-00527", "offset must be a multiple of 4 (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndexedIndirect-offset-00527)"},
+ {"VUID-vkCmdDrawIndexedIndirect-renderPass-00531", "The current render pass must be compatible with the renderPass member of the VkGraphicsPipelineCreateInfo structure specified when creating the VkPipeline bound to VK_PIPELINE_BIND_POINT_GRAPHICS. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndexedIndirect-renderPass-00531)"},
{"VUID-vkCmdDrawIndexedIndirect-renderpass", "This command must only be called inside of a render pass instance (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndexedIndirect-renderpass)"},
- {"VUID-vkCmdDrawIndexedIndirect-sampleLocationsEnable-02689", "If the bound graphics pipeline was created with VkPipelineSampleLocationsStateCreateInfoEXT::sampleLocationsEnable set to VK_TRUE and the current subpass has a depth/stencil attachment, then that attachment must have been created with the VK_IMAGE_CREATE_SAMPLE_LOCATIONS_COMPATIBLE_DEPTH_BIT_EXT bit set (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndexedIndirect-sampleLocationsEnable-02689)"},
- {"VUID-vkCmdDrawIndexedIndirect-subpass-02685", "The subpass index of the current render pass must be equal to the subpass member of the VkGraphicsPipelineCreateInfo structure specified when creating the VkPipeline bound to VK_PIPELINE_BIND_POINT_GRAPHICS. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndexedIndirect-subpass-02685)"},
- {"VUID-vkCmdDrawIndexedIndirectCountKHR-None-02686", "Every input attachment used by the current subpass must be bound to the pipeline via a descriptor set (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndexedIndirectCountKHR-None-02686)"},
- {"VUID-vkCmdDrawIndexedIndirectCountKHR-None-02687", "Image subresources used as attachments in the current render pass must not be accessed in any way other than as an attachment by this command. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndexedIndirectCountKHR-None-02687)"},
- {"VUID-vkCmdDrawIndexedIndirectCountKHR-None-02690", "If a VkImageView is sampled with VK_FILTER_LINEAR as a result of this command, then the image view's format features must contain VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_LINEAR_BIT (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndexedIndirectCountKHR-None-02690)"},
- {"VUID-vkCmdDrawIndexedIndirectCountKHR-None-02691", "If a VkImageView is accessed using atomic operations as a result of this command, then the image view's format features must contain VK_FORMAT_FEATURE_STORAGE_IMAGE_ATOMIC_BIT (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndexedIndirectCountKHR-None-02691)"},
- {"VUID-vkCmdDrawIndexedIndirectCountKHR-None-02692", "If a VkImageView is sampled with VK_FILTER_CUBIC_EXT as a result of this command, then the image view's format features must contain VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_CUBIC_BIT_EXT (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndexedIndirectCountKHR-None-02692)"},
- {"VUID-vkCmdDrawIndexedIndirectCountKHR-None-02693", "Any VkImageView being sampled with VK_FILTER_CUBIC_EXT as a result of this command must not have a VkImageViewType of VK_IMAGE_VIEW_TYPE_3D, VK_IMAGE_VIEW_TYPE_CUBE, or VK_IMAGE_VIEW_TYPE_CUBE_ARRAY (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndexedIndirectCountKHR-None-02693)"},
- {"VUID-vkCmdDrawIndexedIndirectCountKHR-None-02697", "For each set n that is statically used by the VkPipeline bound to the pipeline bind point used by this command, a descriptor set must have been bound to n at the same pipeline bind point, with a VkPipelineLayout that is compatible for set n, with the VkPipelineLayout used to create the current VkPipeline, as described in Pipeline Layout Compatibility (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndexedIndirectCountKHR-None-02697)"},
- {"VUID-vkCmdDrawIndexedIndirectCountKHR-None-02698", "For each push constant that is statically used by the VkPipeline bound to the pipeline bind point used by this command, a push constant value must have been set for the same pipeline bind point, with a VkPipelineLayout that is compatible for push constants, with the VkPipelineLayout used to create the current VkPipeline, as described in Pipeline Layout Compatibility (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndexedIndirectCountKHR-None-02698)"},
- {"VUID-vkCmdDrawIndexedIndirectCountKHR-None-02699", "Descriptors in each bound descriptor set, specified via vkCmdBindDescriptorSets, must be valid if they are statically used by the VkPipeline bound to the pipeline bind point used by this command (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndexedIndirectCountKHR-None-02699)"},
- {"VUID-vkCmdDrawIndexedIndirectCountKHR-None-02700", "A valid pipeline must be bound to the pipeline bind point used by this command (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndexedIndirectCountKHR-None-02700)"},
- {"VUID-vkCmdDrawIndexedIndirectCountKHR-None-02702", "If the VkPipeline object bound to the pipeline bind point used by this command accesses a VkSampler object that uses unnormalized coordinates, that sampler must not be used to sample from any VkImage with a VkImageView of the type VK_IMAGE_VIEW_TYPE_3D, VK_IMAGE_VIEW_TYPE_CUBE, VK_IMAGE_VIEW_TYPE_1D_ARRAY, VK_IMAGE_VIEW_TYPE_2D_ARRAY or VK_IMAGE_VIEW_TYPE_CUBE_ARRAY, in any shader stage (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndexedIndirectCountKHR-None-02702)"},
- {"VUID-vkCmdDrawIndexedIndirectCountKHR-None-02703", "If the VkPipeline object bound to the pipeline bind point used by this command accesses a VkSampler object that uses unnormalized coordinates, that sampler must not be used with any of the SPIR-V OpImageSample* or OpImageSparseSample* instructions with ImplicitLod, Dref or Proj in their name, in any shader stage (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndexedIndirectCountKHR-None-02703)"},
- {"VUID-vkCmdDrawIndexedIndirectCountKHR-None-02704", "If the VkPipeline object bound to the pipeline bind point used by this command accesses a VkSampler object that uses unnormalized coordinates, that sampler must not be used with any of the SPIR-V OpImageSample* or OpImageSparseSample* instructions that includes a LOD bias or any offset values, in any shader stage (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndexedIndirectCountKHR-None-02704)"},
- {"VUID-vkCmdDrawIndexedIndirectCountKHR-None-02705", "If the robust buffer access feature is not enabled, and if the VkPipeline object bound to the pipeline bind point used by this command accesses a uniform buffer, it must not access values outside of the range of the buffer as specified in the descriptor set bound to the same pipeline bind point (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndexedIndirectCountKHR-None-02705)"},
- {"VUID-vkCmdDrawIndexedIndirectCountKHR-None-02706", "If the robust buffer access feature is not enabled, and if the VkPipeline object bound to the pipeline bind point used by this command accesses a storage buffer, it must not access values outside of the range of the buffer as specified in the descriptor set bound to the same pipeline bind point (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndexedIndirectCountKHR-None-02706)"},
- {"VUID-vkCmdDrawIndexedIndirectCountKHR-None-02720", "All vertex input bindings accessed via vertex input variables declared in the vertex shader entry point's interface must have valid buffers bound (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndexedIndirectCountKHR-None-02720)"},
- {"VUID-vkCmdDrawIndexedIndirectCountKHR-None-02721", "For a given vertex buffer binding, any attribute data fetched must be entirely contained within the corresponding vertex buffer binding, as described in Vertex Input Description (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndexedIndirectCountKHR-None-02721)"},
- {"VUID-vkCmdDrawIndexedIndirectCountKHR-buffer-02708", "If buffer is non-sparse then it must be bound completely and contiguously to a single VkDeviceMemory object (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndexedIndirectCountKHR-buffer-02708)"},
- {"VUID-vkCmdDrawIndexedIndirectCountKHR-buffer-02709", "buffer must have been created with the VK_BUFFER_USAGE_INDIRECT_BUFFER_BIT bit set (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndexedIndirectCountKHR-buffer-02709)"},
+ {"VUID-vkCmdDrawIndexedIndirect-sampleLocationsEnable-01516", "If the bound graphics pipeline was created with VkPipelineSampleLocationsStateCreateInfoEXT::sampleLocationsEnable set to VK_TRUE and the current subpass has a depth/stencil attachment, then that attachment must have been created with the VK_IMAGE_CREATE_SAMPLE_LOCATIONS_COMPATIBLE_DEPTH_BIT_EXT bit set (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndexedIndirect-sampleLocationsEnable-01516)"},
+ {"VUID-vkCmdDrawIndexedIndirect-subpass-00532", "The subpass index of the current render pass must be equal to the subpass member of the VkGraphicsPipelineCreateInfo structure specified when creating the VkPipeline bound to VK_PIPELINE_BIND_POINT_GRAPHICS. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndexedIndirect-subpass-00532)"},
+ {"VUID-vkCmdDrawIndexedIndirectCountAMD-None-00562", "For each set n that is statically used by the VkPipeline bound to VK_PIPELINE_BIND_POINT_GRAPHICS, a descriptor set must have been bound to n at VK_PIPELINE_BIND_POINT_GRAPHICS, with a VkPipelineLayout that is compatible for set n, with the VkPipelineLayout used to create the current VkPipeline, as described in Pipeline Layout Compatibility (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndexedIndirectCountAMD-None-00562)"},
+ {"VUID-vkCmdDrawIndexedIndirectCountAMD-None-00563", "For each push constant that is statically used by the VkPipeline bound to VK_PIPELINE_BIND_POINT_GRAPHICS, a push constant value must have been set for VK_PIPELINE_BIND_POINT_GRAPHICS, with a VkPipelineLayout that is compatible for push constants, with the VkPipelineLayout used to create the current VkPipeline, as described in Pipeline Layout Compatibility (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndexedIndirectCountAMD-None-00563)"},
+ {"VUID-vkCmdDrawIndexedIndirectCountAMD-None-00564", "Descriptors in each bound descriptor set, specified via vkCmdBindDescriptorSets, must be valid if they are statically used by the bound VkPipeline object, specified via vkCmdBindPipeline (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndexedIndirectCountAMD-None-00564)"},
+ {"VUID-vkCmdDrawIndexedIndirectCountAMD-None-00565", "All vertex input bindings accessed via vertex input variables declared in the vertex shader entry point's interface must have valid buffers bound (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndexedIndirectCountAMD-None-00565)"},
+ {"VUID-vkCmdDrawIndexedIndirectCountAMD-None-00566", "A valid graphics pipeline must be bound to the current command buffer with VK_PIPELINE_BIND_POINT_GRAPHICS (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndexedIndirectCountAMD-None-00566)"},
+ {"VUID-vkCmdDrawIndexedIndirectCountAMD-None-00567", "If the VkPipeline object bound to VK_PIPELINE_BIND_POINT_GRAPHICS requires any dynamic state, that state must have been set on the current command buffer (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndexedIndirectCountAMD-None-00567)"},
+ {"VUID-vkCmdDrawIndexedIndirectCountAMD-None-00571", "Every input attachment used by the current subpass must be bound to the pipeline via a descriptor set (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndexedIndirectCountAMD-None-00571)"},
+ {"VUID-vkCmdDrawIndexedIndirectCountAMD-None-00572", "If any VkSampler object that is accessed from a shader by the VkPipeline bound to VK_PIPELINE_BIND_POINT_GRAPHICS uses unnormalized coordinates, it must not be used to sample from any VkImage with a VkImageView of the type VK_IMAGE_VIEW_TYPE_3D, VK_IMAGE_VIEW_TYPE_CUBE, VK_IMAGE_VIEW_TYPE_1D_ARRAY, VK_IMAGE_VIEW_TYPE_2D_ARRAY or VK_IMAGE_VIEW_TYPE_CUBE_ARRAY, in any shader stage (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndexedIndirectCountAMD-None-00572)"},
+ {"VUID-vkCmdDrawIndexedIndirectCountAMD-None-00573", "If any VkSampler object that is accessed from a shader by the VkPipeline bound to VK_PIPELINE_BIND_POINT_GRAPHICS uses unnormalized coordinates, it must not be used with any of the SPIR-V OpImageSample* or OpImageSparseSample* instructions with ImplicitLod, Dref or Proj in their name, in any shader stage (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndexedIndirectCountAMD-None-00573)"},
+ {"VUID-vkCmdDrawIndexedIndirectCountAMD-None-00574", "If any VkSampler object that is accessed from a shader by the VkPipeline bound to VK_PIPELINE_BIND_POINT_GRAPHICS uses unnormalized coordinates, it must not be used with any of the SPIR-V OpImageSample* or OpImageSparseSample* instructions that includes a LOD bias or any offset values, in any shader stage (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndexedIndirectCountAMD-None-00574)"},
+ {"VUID-vkCmdDrawIndexedIndirectCountAMD-None-00575", "If the robust buffer access feature is not enabled, and any shader stage in the VkPipeline object bound to VK_PIPELINE_BIND_POINT_GRAPHICS accesses a uniform buffer, it must not access values outside of the range of that buffer specified in the bound descriptor set (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndexedIndirectCountAMD-None-00575)"},
+ {"VUID-vkCmdDrawIndexedIndirectCountAMD-None-00576", "If the robust buffer access feature is not enabled, and any shader stage in the VkPipeline object bound to VK_PIPELINE_BIND_POINT_GRAPHICS accesses a storage buffer, it must not access values outside of the range of that buffer specified in the bound descriptor set (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndexedIndirectCountAMD-None-00576)"},
+ {"VUID-vkCmdDrawIndexedIndirectCountAMD-None-01504", "Image subresources used as attachments in the current render pass must not be accessed in any way other than as an attachment by this command. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndexedIndirectCountAMD-None-01504)"},
+ {"VUID-vkCmdDrawIndexedIndirectCountAMD-None-02022", "If a VkImageView is sampled with VK_FILTER_LINEAR as a result of this command, then the image view's format features must contain VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_LINEAR_BIT. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndexedIndirectCountAMD-None-02022)"},
+ {"VUID-vkCmdDrawIndexedIndirectCountAMD-buffer-01666", "If buffer is non-sparse then it must be bound completely and contiguously to a single VkDeviceMemory object (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndexedIndirectCountAMD-buffer-01666)"},
+ {"VUID-vkCmdDrawIndexedIndirectCountAMD-buffer-01667", "buffer must have been created with the VK_BUFFER_USAGE_INDIRECT_BUFFER_BIT bit set (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndexedIndirectCountAMD-buffer-01667)"},
+ {"VUID-vkCmdDrawIndexedIndirectCountAMD-buffer-parameter", "buffer must be a valid VkBuffer handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndexedIndirectCountAMD-buffer-parameter)"},
+ {"VUID-vkCmdDrawIndexedIndirectCountAMD-commandBuffer-01865", "If commandBuffer is an unprotected command buffer, and any pipeline stage in the VkPipeline object bound to VK_PIPELINE_BIND_POINT_GRAPHICS reads from or writes to any image or buffer, that image or buffer must not be a protected image or protected buffer. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndexedIndirectCountAMD-commandBuffer-01865)"},
+ {"VUID-vkCmdDrawIndexedIndirectCountAMD-commandBuffer-02645", "commandBuffer must not be a protected command buffer (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndexedIndirectCountAMD-commandBuffer-02645)"},
+ {"VUID-vkCmdDrawIndexedIndirectCountAMD-commandBuffer-cmdpool", "The VkCommandPool that commandBuffer was allocated from must support graphics operations (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndexedIndirectCountAMD-commandBuffer-cmdpool)"},
+ {"VUID-vkCmdDrawIndexedIndirectCountAMD-commandBuffer-parameter", "commandBuffer must be a valid VkCommandBuffer handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndexedIndirectCountAMD-commandBuffer-parameter)"},
+ {"VUID-vkCmdDrawIndexedIndirectCountAMD-commandBuffer-recording", "commandBuffer must be in the recording state (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndexedIndirectCountAMD-commandBuffer-recording)"},
+ {"VUID-vkCmdDrawIndexedIndirectCountAMD-commonparent", "Each of buffer, commandBuffer, and countBuffer must have been created, allocated, or retrieved from the same VkDevice (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndexedIndirectCountAMD-commonparent)"},
+ {"VUID-vkCmdDrawIndexedIndirectCountAMD-countBuffer-00568", "If count stored in countBuffer is equal to 1, (offset + sizeof(VkDrawIndexedIndirectCommand)) must be less than or equal to the size of buffer (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndexedIndirectCountAMD-countBuffer-00568)"},
+ {"VUID-vkCmdDrawIndexedIndirectCountAMD-countBuffer-00569", "If count stored in countBuffer is greater than 1, (stride {times} (drawCount - 1) + offset + sizeof(VkDrawIndexedIndirectCommand)) must be less than or equal to the size of buffer (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndexedIndirectCountAMD-countBuffer-00569)"},
+ {"VUID-vkCmdDrawIndexedIndirectCountAMD-countBuffer-01668", "If countBuffer is non-sparse then it must be bound completely and contiguously to a single VkDeviceMemory object (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndexedIndirectCountAMD-countBuffer-01668)"},
+ {"VUID-vkCmdDrawIndexedIndirectCountAMD-countBuffer-01669", "countBuffer must have been created with the VK_BUFFER_USAGE_INDIRECT_BUFFER_BIT bit set (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndexedIndirectCountAMD-countBuffer-01669)"},
+ {"VUID-vkCmdDrawIndexedIndirectCountAMD-countBuffer-parameter", "countBuffer must be a valid VkBuffer handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndexedIndirectCountAMD-countBuffer-parameter)"},
+ {"VUID-vkCmdDrawIndexedIndirectCountAMD-countBufferOffset-00556", "countBufferOffset must be a multiple of 4 (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndexedIndirectCountAMD-countBufferOffset-00556)"},
+ {"VUID-vkCmdDrawIndexedIndirectCountAMD-drawCount-00570", "drawCount must be less than or equal to VkPhysicalDeviceLimits::maxDrawIndirectCount (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndexedIndirectCountAMD-drawCount-00570)"},
+ {"VUID-vkCmdDrawIndexedIndirectCountAMD-firstInstance-00559", "If the drawIndirectFirstInstance feature is not enabled, all the firstInstance members of the VkDrawIndexedIndirectCommand structures accessed by this command must be 0 (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndexedIndirectCountAMD-firstInstance-00559)"},
+ {"VUID-vkCmdDrawIndexedIndirectCountAMD-flags-02049", "Any VkImage created with a VkImageCreateInfo::flags containing VK_IMAGE_CREATE_CORNER_SAMPLED_BIT_NV sampled as a result of this command must only be sampled using a VkSamplerAddressMode of VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_EDGE. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndexedIndirectCountAMD-flags-02049)"},
+ {"VUID-vkCmdDrawIndexedIndirectCountAMD-maxDrawCount-00558", "If maxDrawCount is greater than or equal to 1, (stride {times} (maxDrawCount - 1) + offset + sizeof(VkDrawIndexedIndirectCommand)) must be less than or equal to the size of buffer (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndexedIndirectCountAMD-maxDrawCount-00558)"},
+ {"VUID-vkCmdDrawIndexedIndirectCountAMD-maxMultiviewInstanceIndex-00578", "If the draw is recorded in a render pass instance with multiview enabled, the maximum instance index must be less than or equal to VkPhysicalDeviceMultiviewProperties::maxMultiviewInstanceIndex. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndexedIndirectCountAMD-maxMultiviewInstanceIndex-00578)"},
+ {"VUID-vkCmdDrawIndexedIndirectCountAMD-offset-00555", "offset must be a multiple of 4 (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndexedIndirectCountAMD-offset-00555)"},
+ {"VUID-vkCmdDrawIndexedIndirectCountAMD-renderPass-00560", "The current render pass must be compatible with the renderPass member of the VkGraphicsPipelineCreateInfo structure specified when creating the VkPipeline bound to VK_PIPELINE_BIND_POINT_GRAPHICS. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndexedIndirectCountAMD-renderPass-00560)"},
+ {"VUID-vkCmdDrawIndexedIndirectCountAMD-renderpass", "This command must only be called inside of a render pass instance (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndexedIndirectCountAMD-renderpass)"},
+ {"VUID-vkCmdDrawIndexedIndirectCountAMD-sampleLocationsEnable-01517", "If the bound graphics pipeline was created with VkPipelineSampleLocationsStateCreateInfoEXT::sampleLocationsEnable set to VK_TRUE and the current subpass has a depth/stencil attachment, then that attachment must have been created with the VK_IMAGE_CREATE_SAMPLE_LOCATIONS_COMPATIBLE_DEPTH_BIT_EXT bit set (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndexedIndirectCountAMD-sampleLocationsEnable-01517)"},
+ {"VUID-vkCmdDrawIndexedIndirectCountAMD-stride-00557", "stride must be a multiple of 4 and must be greater than or equal to sizeof(VkDrawIndexedIndirectCommand) (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndexedIndirectCountAMD-stride-00557)"},
+ {"VUID-vkCmdDrawIndexedIndirectCountAMD-subpass-00561", "The subpass index of the current render pass must be equal to the subpass member of the VkGraphicsPipelineCreateInfo structure specified when creating the VkPipeline bound to VK_PIPELINE_BIND_POINT_GRAPHICS. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndexedIndirectCountAMD-subpass-00561)"},
+ {"VUID-vkCmdDrawIndexedIndirectCountKHR-None-02020", "If a VkImageView is sampled with VK_FILTER_LINEAR as a result of this command, then the image view's format features must contain VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_LINEAR_BIT. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndexedIndirectCountKHR-None-02020)"},
+ {"VUID-vkCmdDrawIndexedIndirectCountKHR-None-02021", "If a VkImageView is sampled with VK_FILTER_CUBIC_EXT as a result of this command, then the image view's format features must contain VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_CUBIC_BIT_EXT (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndexedIndirectCountKHR-None-02021)"},
+ {"VUID-vkCmdDrawIndexedIndirectCountKHR-None-03147", "For each set n that is statically used by the VkPipeline bound to VK_PIPELINE_BIND_POINT_GRAPHICS, a descriptor set must have been bound to n at VK_PIPELINE_BIND_POINT_GRAPHICS, with a VkPipelineLayout that is compatible for set n, with the VkPipelineLayout used to create the current VkPipeline, as described in Pipeline Layout Compatibility (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndexedIndirectCountKHR-None-03147)"},
+ {"VUID-vkCmdDrawIndexedIndirectCountKHR-None-03148", "For each push constant that is statically used by the VkPipeline bound to VK_PIPELINE_BIND_POINT_GRAPHICS, a push constant value must have been set for VK_PIPELINE_BIND_POINT_GRAPHICS, with a VkPipelineLayout that is compatible for push constants, with the VkPipelineLayout used to create the current VkPipeline, as described in Pipeline Layout Compatibility (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndexedIndirectCountKHR-None-03148)"},
+ {"VUID-vkCmdDrawIndexedIndirectCountKHR-None-03149", "Descriptors in each bound descriptor set, specified via vkCmdBindDescriptorSets, must be valid if they are statically used by the bound VkPipeline object, specified via vkCmdBindPipeline (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndexedIndirectCountKHR-None-03149)"},
+ {"VUID-vkCmdDrawIndexedIndirectCountKHR-None-03150", "All vertex input bindings accessed via vertex input variables declared in the vertex shader entry point's interface must have valid buffers bound (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndexedIndirectCountKHR-None-03150)"},
+ {"VUID-vkCmdDrawIndexedIndirectCountKHR-None-03151", "A valid graphics pipeline must be bound to the current command buffer with VK_PIPELINE_BIND_POINT_GRAPHICS (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndexedIndirectCountKHR-None-03151)"},
+ {"VUID-vkCmdDrawIndexedIndirectCountKHR-None-03152", "If the VkPipeline object bound to VK_PIPELINE_BIND_POINT_GRAPHICS requires any dynamic state, that state must have been set on the current command buffer (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndexedIndirectCountKHR-None-03152)"},
+ {"VUID-vkCmdDrawIndexedIndirectCountKHR-None-03156", "Every input attachment used by the current subpass must be bound to the pipeline via a descriptor set (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndexedIndirectCountKHR-None-03156)"},
+ {"VUID-vkCmdDrawIndexedIndirectCountKHR-None-03157", "If any VkSampler object that is accessed from a shader by the VkPipeline bound to VK_PIPELINE_BIND_POINT_GRAPHICS uses unnormalized coordinates, it must not be used to sample from any VkImage with a VkImageView of the type VK_IMAGE_VIEW_TYPE_3D, VK_IMAGE_VIEW_TYPE_CUBE, VK_IMAGE_VIEW_TYPE_1D_ARRAY, VK_IMAGE_VIEW_TYPE_2D_ARRAY or VK_IMAGE_VIEW_TYPE_CUBE_ARRAY, in any shader stage (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndexedIndirectCountKHR-None-03157)"},
+ {"VUID-vkCmdDrawIndexedIndirectCountKHR-None-03158", "If any VkSampler object that is accessed from a shader by the VkPipeline bound to VK_PIPELINE_BIND_POINT_GRAPHICS uses unnormalized coordinates, it must not be used with any of the SPIR-V OpImageSample* or OpImageSparseSample* instructions with ImplicitLod, Dref or Proj in their name, in any shader stage (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndexedIndirectCountKHR-None-03158)"},
+ {"VUID-vkCmdDrawIndexedIndirectCountKHR-None-03159", "If any VkSampler object that is accessed from a shader by the VkPipeline bound to VK_PIPELINE_BIND_POINT_GRAPHICS uses unnormalized coordinates, it must not be used with any of the SPIR-V OpImageSample* or OpImageSparseSample* instructions that includes a LOD bias or any offset values, in any shader stage (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndexedIndirectCountKHR-None-03159)"},
+ {"VUID-vkCmdDrawIndexedIndirectCountKHR-None-03160", "If the robust buffer access feature is not enabled, and any shader stage in the VkPipeline object bound to VK_PIPELINE_BIND_POINT_GRAPHICS accesses a uniform buffer, it must not access values outside of the range of that buffer specified in the bound descriptor set (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndexedIndirectCountKHR-None-03160)"},
+ {"VUID-vkCmdDrawIndexedIndirectCountKHR-None-03161", "If the robust buffer access feature is not enabled, and any shader stage in the VkPipeline object bound to VK_PIPELINE_BIND_POINT_GRAPHICS accesses a storage buffer, it must not access values outside of the range of that buffer specified in the bound descriptor set (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndexedIndirectCountKHR-None-03161)"},
+ {"VUID-vkCmdDrawIndexedIndirectCountKHR-None-03163", "Image subresources used as attachments in the current render pass must not be accessed in any way other than as an attachment by this command. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndexedIndirectCountKHR-None-03163)"},
+ {"VUID-vkCmdDrawIndexedIndirectCountKHR-None-03173", "Any VkImageView being sampled with VK_FILTER_CUBIC_EXT as a result of this command must not have a VkImageViewType of VK_IMAGE_VIEW_TYPE_3D, VK_IMAGE_VIEW_TYPE_CUBE, or VK_IMAGE_VIEW_TYPE_CUBE_ARRAY (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndexedIndirectCountKHR-None-03173)"},
+ {"VUID-vkCmdDrawIndexedIndirectCountKHR-buffer-03136", "If buffer is non-sparse then it must be bound completely and contiguously to a single VkDeviceMemory object (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndexedIndirectCountKHR-buffer-03136)"},
+ {"VUID-vkCmdDrawIndexedIndirectCountKHR-buffer-03137", "buffer must have been created with the VK_BUFFER_USAGE_INDIRECT_BUFFER_BIT bit set (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndexedIndirectCountKHR-buffer-03137)"},
{"VUID-vkCmdDrawIndexedIndirectCountKHR-buffer-parameter", "buffer must be a valid VkBuffer handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndexedIndirectCountKHR-buffer-parameter)"},
- {"VUID-vkCmdDrawIndexedIndirectCountKHR-commandBuffer-02701", "If the VkPipeline object bound to the pipeline bind point used by this command requires any dynamic state, that state must have been set for commandBuffer (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndexedIndirectCountKHR-commandBuffer-02701)"},
- {"VUID-vkCmdDrawIndexedIndirectCountKHR-commandBuffer-02707", "If commandBuffer is an unprotected command buffer, any resource accessed by the VkPipeline object bound to the pipeline bind point used by this command must not be a protected resource (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndexedIndirectCountKHR-commandBuffer-02707)"},
- {"VUID-vkCmdDrawIndexedIndirectCountKHR-commandBuffer-02711", "commandBuffer must not be a protected command buffer (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndexedIndirectCountKHR-commandBuffer-02711)"},
+ {"VUID-vkCmdDrawIndexedIndirectCountKHR-commandBuffer-02644", "commandBuffer must not be a protected command buffer (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndexedIndirectCountKHR-commandBuffer-02644)"},
+ {"VUID-vkCmdDrawIndexedIndirectCountKHR-commandBuffer-03165", "If commandBuffer is an unprotected command buffer, and any pipeline stage in the VkPipeline object bound to VK_PIPELINE_BIND_POINT_GRAPHICS reads from or writes to any image or buffer, that image or buffer must not be a protected image or protected buffer. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndexedIndirectCountKHR-commandBuffer-03165)"},
{"VUID-vkCmdDrawIndexedIndirectCountKHR-commandBuffer-cmdpool", "The VkCommandPool that commandBuffer was allocated from must support graphics operations (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndexedIndirectCountKHR-commandBuffer-cmdpool)"},
{"VUID-vkCmdDrawIndexedIndirectCountKHR-commandBuffer-parameter", "commandBuffer must be a valid VkCommandBuffer handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndexedIndirectCountKHR-commandBuffer-parameter)"},
{"VUID-vkCmdDrawIndexedIndirectCountKHR-commandBuffer-recording", "commandBuffer must be in the recording state (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndexedIndirectCountKHR-commandBuffer-recording)"},
{"VUID-vkCmdDrawIndexedIndirectCountKHR-commonparent", "Each of buffer, commandBuffer, and countBuffer must have been created, allocated, or retrieved from the same VkDevice (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndexedIndirectCountKHR-commonparent)"},
- {"VUID-vkCmdDrawIndexedIndirectCountKHR-countBuffer-02714", "If countBuffer is non-sparse then it must be bound completely and contiguously to a single VkDeviceMemory object (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndexedIndirectCountKHR-countBuffer-02714)"},
- {"VUID-vkCmdDrawIndexedIndirectCountKHR-countBuffer-02715", "countBuffer must have been created with the VK_BUFFER_USAGE_INDIRECT_BUFFER_BIT bit set (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndexedIndirectCountKHR-countBuffer-02715)"},
- {"VUID-vkCmdDrawIndexedIndirectCountKHR-countBuffer-02717", "The count stored in countBuffer must be less than or equal to VkPhysicalDeviceLimits::maxDrawIndirectCount (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndexedIndirectCountKHR-countBuffer-02717)"},
+ {"VUID-vkCmdDrawIndexedIndirectCountKHR-countBuffer-03138", "If countBuffer is non-sparse then it must be bound completely and contiguously to a single VkDeviceMemory object (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndexedIndirectCountKHR-countBuffer-03138)"},
+ {"VUID-vkCmdDrawIndexedIndirectCountKHR-countBuffer-03139", "countBuffer must have been created with the VK_BUFFER_USAGE_INDIRECT_BUFFER_BIT bit set (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndexedIndirectCountKHR-countBuffer-03139)"},
{"VUID-vkCmdDrawIndexedIndirectCountKHR-countBuffer-03153", "If count stored in countBuffer is equal to 1, (offset + sizeof(VkDrawIndexedIndirectCommand)) must be less than or equal to the size of buffer (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndexedIndirectCountKHR-countBuffer-03153)"},
{"VUID-vkCmdDrawIndexedIndirectCountKHR-countBuffer-03154", "If count stored in countBuffer is greater than 1, (stride {times} (drawCount - 1) + offset + sizeof(VkDrawIndexedIndirectCommand)) must be less than or equal to the size of buffer (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndexedIndirectCountKHR-countBuffer-03154)"},
{"VUID-vkCmdDrawIndexedIndirectCountKHR-countBuffer-parameter", "countBuffer must be a valid VkBuffer handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndexedIndirectCountKHR-countBuffer-parameter)"},
- {"VUID-vkCmdDrawIndexedIndirectCountKHR-countBufferOffset-02716", "countBufferOffset must be a multiple of 4 (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndexedIndirectCountKHR-countBufferOffset-02716)"},
- {"VUID-vkCmdDrawIndexedIndirectCountKHR-filterCubic-02694", "Any VkImageView being sampled with VK_FILTER_CUBIC_EXT as a result of this command must have a VkImageViewType and format that supports cubic filtering, as specified by VkFilterCubicImageViewImageFormatPropertiesEXT::filterCubic returned by vkGetPhysicalDeviceImageFormatProperties2 (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndexedIndirectCountKHR-filterCubic-02694)"},
- {"VUID-vkCmdDrawIndexedIndirectCountKHR-filterCubicMinmax-02695", "Any VkImageView being sampled with VK_FILTER_CUBIC_EXT with a reduction mode of either VK_SAMPLER_REDUCTION_MODE_MIN_EXT or VK_SAMPLER_REDUCTION_MODE_MAX_EXT as a result of this command must have a VkImageViewType and format that supports cubic filtering together with minmax filtering, as specified by VkFilterCubicImageViewImageFormatPropertiesEXT::filterCubicMinmax returned by vkGetPhysicalDeviceImageFormatProperties2 (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndexedIndirectCountKHR-filterCubicMinmax-02695)"},
- {"VUID-vkCmdDrawIndexedIndirectCountKHR-flags-02696", "Any VkImage created with a VkImageCreateInfo::flags containing VK_IMAGE_CREATE_CORNER_SAMPLED_BIT_NV sampled as a result of this command must only be sampled using a VkSamplerAddressMode of VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_EDGE. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndexedIndirectCountKHR-flags-02696)"},
+ {"VUID-vkCmdDrawIndexedIndirectCountKHR-countBufferOffset-03141", "countBufferOffset must be a multiple of 4 (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndexedIndirectCountKHR-countBufferOffset-03141)"},
+ {"VUID-vkCmdDrawIndexedIndirectCountKHR-drawCount-03155", "drawCount must be less than or equal to VkPhysicalDeviceLimits::maxDrawIndirectCount (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndexedIndirectCountKHR-drawCount-03155)"},
+ {"VUID-vkCmdDrawIndexedIndirectCountKHR-filterCubic-02623", "Any VkImageView being sampled with VK_FILTER_CUBIC_EXT as a result of this command must have a VkImageViewType and format that supports cubic filtering, as specified by VkFilterCubicImageViewImageFormatPropertiesEXT::filterCubic returned by vkGetPhysicalDeviceImageFormatProperties2 (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndexedIndirectCountKHR-filterCubic-02623)"},
+ {"VUID-vkCmdDrawIndexedIndirectCountKHR-filterCubicMinmax-02624", "Any VkImageView being sampled with VK_FILTER_CUBIC_EXT with a reduction mode of either VK_SAMPLER_REDUCTION_MODE_MIN_EXT or VK_SAMPLER_REDUCTION_MODE_MAX_EXT as a result of this command must have a VkImageViewType and format that supports cubic filtering together with minmax filtering, as specified by VkFilterCubicImageViewImageFormatPropertiesEXT::filterCubicMinmax returned by vkGetPhysicalDeviceImageFormatProperties2 (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndexedIndirectCountKHR-filterCubicMinmax-02624)"},
+ {"VUID-vkCmdDrawIndexedIndirectCountKHR-firstInstance-03144", "If the drawIndirectFirstInstance feature is not enabled, all the firstInstance members of the VkDrawIndexedIndirectCommand structures accessed by this command must be 0 (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndexedIndirectCountKHR-firstInstance-03144)"},
+ {"VUID-vkCmdDrawIndexedIndirectCountKHR-flags-02048", "Any VkImage created with a VkImageCreateInfo::flags containing VK_IMAGE_CREATE_CORNER_SAMPLED_BIT_NV sampled as a result of this command must only be sampled using a VkSamplerAddressMode of VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_EDGE. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndexedIndirectCountKHR-flags-02048)"},
{"VUID-vkCmdDrawIndexedIndirectCountKHR-maxDrawCount-03143", "If maxDrawCount is greater than or equal to 1, (stride {times} (maxDrawCount - 1) + offset + sizeof(VkDrawIndexedIndirectCommand)) must be less than or equal to the size of buffer (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndexedIndirectCountKHR-maxDrawCount-03143)"},
- {"VUID-vkCmdDrawIndexedIndirectCountKHR-maxMultiviewInstanceIndex-02688", "If the draw is recorded in a render pass instance with multiview enabled, the maximum instance index must be less than or equal to VkPhysicalDeviceMultiviewProperties::maxMultiviewInstanceIndex. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndexedIndirectCountKHR-maxMultiviewInstanceIndex-02688)"},
- {"VUID-vkCmdDrawIndexedIndirectCountKHR-offset-02710", "offset must be a multiple of 4 (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndexedIndirectCountKHR-offset-02710)"},
- {"VUID-vkCmdDrawIndexedIndirectCountKHR-renderPass-02684", "The current render pass must be compatible with the renderPass member of the VkGraphicsPipelineCreateInfo structure specified when creating the VkPipeline bound to VK_PIPELINE_BIND_POINT_GRAPHICS. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndexedIndirectCountKHR-renderPass-02684)"},
+ {"VUID-vkCmdDrawIndexedIndirectCountKHR-maxMultiviewInstanceIndex-03164", "If the draw is recorded in a render pass instance with multiview enabled, the maximum instance index must be less than or equal to VkPhysicalDeviceMultiviewProperties::maxMultiviewInstanceIndex. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndexedIndirectCountKHR-maxMultiviewInstanceIndex-03164)"},
+ {"VUID-vkCmdDrawIndexedIndirectCountKHR-offset-03140", "offset must be a multiple of 4 (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndexedIndirectCountKHR-offset-03140)"},
+ {"VUID-vkCmdDrawIndexedIndirectCountKHR-renderPass-03145", "The current render pass must be compatible with the renderPass member of the VkGraphicsPipelineCreateInfo structure specified when creating the VkPipeline bound to VK_PIPELINE_BIND_POINT_GRAPHICS. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndexedIndirectCountKHR-renderPass-03145)"},
{"VUID-vkCmdDrawIndexedIndirectCountKHR-renderpass", "This command must only be called inside of a render pass instance (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndexedIndirectCountKHR-renderpass)"},
- {"VUID-vkCmdDrawIndexedIndirectCountKHR-sampleLocationsEnable-02689", "If the bound graphics pipeline was created with VkPipelineSampleLocationsStateCreateInfoEXT::sampleLocationsEnable set to VK_TRUE and the current subpass has a depth/stencil attachment, then that attachment must have been created with the VK_IMAGE_CREATE_SAMPLE_LOCATIONS_COMPATIBLE_DEPTH_BIT_EXT bit set (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndexedIndirectCountKHR-sampleLocationsEnable-02689)"},
+ {"VUID-vkCmdDrawIndexedIndirectCountKHR-sampleLocationsEnable-03174", "If the bound graphics pipeline was created with VkPipelineSampleLocationsStateCreateInfoEXT::sampleLocationsEnable set to VK_TRUE and the current subpass has a depth/stencil attachment, then that attachment must have been created with the VK_IMAGE_CREATE_SAMPLE_LOCATIONS_COMPATIBLE_DEPTH_BIT_EXT bit set (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndexedIndirectCountKHR-sampleLocationsEnable-03174)"},
{"VUID-vkCmdDrawIndexedIndirectCountKHR-stride-03142", "stride must be a multiple of 4 and must be greater than or equal to sizeof(VkDrawIndexedIndirectCommand) (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndexedIndirectCountKHR-stride-03142)"},
- {"VUID-vkCmdDrawIndexedIndirectCountKHR-subpass-02685", "The subpass index of the current render pass must be equal to the subpass member of the VkGraphicsPipelineCreateInfo structure specified when creating the VkPipeline bound to VK_PIPELINE_BIND_POINT_GRAPHICS. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndexedIndirectCountKHR-subpass-02685)"},
- {"VUID-vkCmdDrawIndirect-None-02686", "Every input attachment used by the current subpass must be bound to the pipeline via a descriptor set (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndirect-None-02686)"},
- {"VUID-vkCmdDrawIndirect-None-02687", "Image subresources used as attachments in the current render pass must not be accessed in any way other than as an attachment by this command. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndirect-None-02687)"},
- {"VUID-vkCmdDrawIndirect-None-02690", "If a VkImageView is sampled with VK_FILTER_LINEAR as a result of this command, then the image view's format features must contain VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_LINEAR_BIT (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndirect-None-02690)"},
- {"VUID-vkCmdDrawIndirect-None-02691", "If a VkImageView is accessed using atomic operations as a result of this command, then the image view's format features must contain VK_FORMAT_FEATURE_STORAGE_IMAGE_ATOMIC_BIT (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndirect-None-02691)"},
- {"VUID-vkCmdDrawIndirect-None-02692", "If a VkImageView is sampled with VK_FILTER_CUBIC_EXT as a result of this command, then the image view's format features must contain VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_CUBIC_BIT_EXT (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndirect-None-02692)"},
- {"VUID-vkCmdDrawIndirect-None-02693", "Any VkImageView being sampled with VK_FILTER_CUBIC_EXT as a result of this command must not have a VkImageViewType of VK_IMAGE_VIEW_TYPE_3D, VK_IMAGE_VIEW_TYPE_CUBE, or VK_IMAGE_VIEW_TYPE_CUBE_ARRAY (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndirect-None-02693)"},
- {"VUID-vkCmdDrawIndirect-None-02697", "For each set n that is statically used by the VkPipeline bound to the pipeline bind point used by this command, a descriptor set must have been bound to n at the same pipeline bind point, with a VkPipelineLayout that is compatible for set n, with the VkPipelineLayout used to create the current VkPipeline, as described in Pipeline Layout Compatibility (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndirect-None-02697)"},
- {"VUID-vkCmdDrawIndirect-None-02698", "For each push constant that is statically used by the VkPipeline bound to the pipeline bind point used by this command, a push constant value must have been set for the same pipeline bind point, with a VkPipelineLayout that is compatible for push constants, with the VkPipelineLayout used to create the current VkPipeline, as described in Pipeline Layout Compatibility (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndirect-None-02698)"},
- {"VUID-vkCmdDrawIndirect-None-02699", "Descriptors in each bound descriptor set, specified via vkCmdBindDescriptorSets, must be valid if they are statically used by the VkPipeline bound to the pipeline bind point used by this command (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndirect-None-02699)"},
- {"VUID-vkCmdDrawIndirect-None-02700", "A valid pipeline must be bound to the pipeline bind point used by this command (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndirect-None-02700)"},
- {"VUID-vkCmdDrawIndirect-None-02702", "If the VkPipeline object bound to the pipeline bind point used by this command accesses a VkSampler object that uses unnormalized coordinates, that sampler must not be used to sample from any VkImage with a VkImageView of the type VK_IMAGE_VIEW_TYPE_3D, VK_IMAGE_VIEW_TYPE_CUBE, VK_IMAGE_VIEW_TYPE_1D_ARRAY, VK_IMAGE_VIEW_TYPE_2D_ARRAY or VK_IMAGE_VIEW_TYPE_CUBE_ARRAY, in any shader stage (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndirect-None-02702)"},
- {"VUID-vkCmdDrawIndirect-None-02703", "If the VkPipeline object bound to the pipeline bind point used by this command accesses a VkSampler object that uses unnormalized coordinates, that sampler must not be used with any of the SPIR-V OpImageSample* or OpImageSparseSample* instructions with ImplicitLod, Dref or Proj in their name, in any shader stage (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndirect-None-02703)"},
- {"VUID-vkCmdDrawIndirect-None-02704", "If the VkPipeline object bound to the pipeline bind point used by this command accesses a VkSampler object that uses unnormalized coordinates, that sampler must not be used with any of the SPIR-V OpImageSample* or OpImageSparseSample* instructions that includes a LOD bias or any offset values, in any shader stage (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndirect-None-02704)"},
- {"VUID-vkCmdDrawIndirect-None-02705", "If the robust buffer access feature is not enabled, and if the VkPipeline object bound to the pipeline bind point used by this command accesses a uniform buffer, it must not access values outside of the range of the buffer as specified in the descriptor set bound to the same pipeline bind point (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndirect-None-02705)"},
- {"VUID-vkCmdDrawIndirect-None-02706", "If the robust buffer access feature is not enabled, and if the VkPipeline object bound to the pipeline bind point used by this command accesses a storage buffer, it must not access values outside of the range of the buffer as specified in the descriptor set bound to the same pipeline bind point (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndirect-None-02706)"},
- {"VUID-vkCmdDrawIndirect-None-02720", "All vertex input bindings accessed via vertex input variables declared in the vertex shader entry point's interface must have valid buffers bound (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndirect-None-02720)"},
- {"VUID-vkCmdDrawIndirect-None-02721", "For a given vertex buffer binding, any attribute data fetched must be entirely contained within the corresponding vertex buffer binding, as described in Vertex Input Description (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndirect-None-02721)"},
- {"VUID-vkCmdDrawIndirect-buffer-02708", "If buffer is non-sparse then it must be bound completely and contiguously to a single VkDeviceMemory object (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndirect-buffer-02708)"},
- {"VUID-vkCmdDrawIndirect-buffer-02709", "buffer must have been created with the VK_BUFFER_USAGE_INDIRECT_BUFFER_BIT bit set (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndirect-buffer-02709)"},
+ {"VUID-vkCmdDrawIndexedIndirectCountKHR-subpass-03146", "The subpass index of the current render pass must be equal to the subpass member of the VkGraphicsPipelineCreateInfo structure specified when creating the VkPipeline bound to VK_PIPELINE_BIND_POINT_GRAPHICS. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndexedIndirectCountKHR-subpass-03146)"},
+ {"VUID-vkCmdDrawIndirect-None-00481", "For each set n that is statically used by the VkPipeline bound to VK_PIPELINE_BIND_POINT_GRAPHICS, a descriptor set must have been bound to n at VK_PIPELINE_BIND_POINT_GRAPHICS, with a VkPipelineLayout that is compatible for set n, with the VkPipelineLayout used to create the current VkPipeline, as described in Pipeline Layout Compatibility (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndirect-None-00481)"},
+ {"VUID-vkCmdDrawIndirect-None-00482", "For each push constant that is statically used by the VkPipeline bound to VK_PIPELINE_BIND_POINT_GRAPHICS, a push constant value must have been set for VK_PIPELINE_BIND_POINT_GRAPHICS, with a VkPipelineLayout that is compatible for push constants, with the VkPipelineLayout used to create the current VkPipeline, as described in Pipeline Layout Compatibility (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndirect-None-00482)"},
+ {"VUID-vkCmdDrawIndirect-None-00483", "Descriptors in each bound descriptor set, specified via vkCmdBindDescriptorSets, must be valid if they are statically used by the bound VkPipeline object, specified via vkCmdBindPipeline (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndirect-None-00483)"},
+ {"VUID-vkCmdDrawIndirect-None-00484", "All vertex input bindings accessed via vertex input variables declared in the vertex shader entry point's interface must have valid buffers bound (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndirect-None-00484)"},
+ {"VUID-vkCmdDrawIndirect-None-00485", "A valid graphics pipeline must be bound to the current command buffer with VK_PIPELINE_BIND_POINT_GRAPHICS (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndirect-None-00485)"},
+ {"VUID-vkCmdDrawIndirect-None-00486", "If the VkPipeline object bound to VK_PIPELINE_BIND_POINT_GRAPHICS requires any dynamic state, that state must have been set on the current command buffer (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndirect-None-00486)"},
+ {"VUID-vkCmdDrawIndirect-None-00490", "Every input attachment used by the current subpass must be bound to the pipeline via a descriptor set (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndirect-None-00490)"},
+ {"VUID-vkCmdDrawIndirect-None-00491", "If any VkSampler object that is accessed from a shader by the VkPipeline bound to VK_PIPELINE_BIND_POINT_GRAPHICS uses unnormalized coordinates, it must not be used to sample from any VkImage with a VkImageView of the type VK_IMAGE_VIEW_TYPE_3D, VK_IMAGE_VIEW_TYPE_CUBE, VK_IMAGE_VIEW_TYPE_1D_ARRAY, VK_IMAGE_VIEW_TYPE_2D_ARRAY or VK_IMAGE_VIEW_TYPE_CUBE_ARRAY, in any shader stage (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndirect-None-00491)"},
+ {"VUID-vkCmdDrawIndirect-None-00492", "If any VkSampler object that is accessed from a shader by the VkPipeline bound to VK_PIPELINE_BIND_POINT_GRAPHICS uses unnormalized coordinates, it must not be used with any of the SPIR-V OpImageSample* or OpImageSparseSample* instructions with ImplicitLod, Dref or Proj in their name, in any shader stage (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndirect-None-00492)"},
+ {"VUID-vkCmdDrawIndirect-None-00493", "If any VkSampler object that is accessed from a shader by the VkPipeline bound to VK_PIPELINE_BIND_POINT_GRAPHICS uses unnormalized coordinates, it must not be used with any of the SPIR-V OpImageSample* or OpImageSparseSample* instructions that includes a LOD bias or any offset values, in any shader stage (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndirect-None-00493)"},
+ {"VUID-vkCmdDrawIndirect-None-00494", "If the robust buffer access feature is not enabled, and any shader stage in the VkPipeline object bound to VK_PIPELINE_BIND_POINT_GRAPHICS accesses a uniform buffer, it must not access values outside of the range of that buffer specified in the bound descriptor set (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndirect-None-00494)"},
+ {"VUID-vkCmdDrawIndirect-None-00495", "If the robust buffer access feature is not enabled, and any shader stage in the VkPipeline object bound to VK_PIPELINE_BIND_POINT_GRAPHICS accesses a storage buffer, it must not access values outside of the range of that buffer specified in the bound descriptor set (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndirect-None-00495)"},
+ {"VUID-vkCmdDrawIndirect-None-00498", "Any VkImageView being sampled with VK_FILTER_CUBIC_EXT as a result of this command must not have a VkImageViewType of VK_IMAGE_VIEW_TYPE_3D, VK_IMAGE_VIEW_TYPE_CUBE, or VK_IMAGE_VIEW_TYPE_CUBE_ARRAY (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndirect-None-00498)"},
+ {"VUID-vkCmdDrawIndirect-None-01501", "Image subresources used as attachments in the current render pass must not be accessed in any way other than as an attachment by this command. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndirect-None-01501)"},
+ {"VUID-vkCmdDrawIndirect-None-02013", "If a VkImageView is sampled with VK_FILTER_LINEAR as a result of this command, then the image view's format features must contain VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_LINEAR_BIT. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndirect-None-02013)"},
+ {"VUID-vkCmdDrawIndirect-None-02014", "If a VkImageView is sampled with VK_FILTER_CUBIC_EXT as a result of this command, then the image view's format features must contain VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_CUBIC_BIT_EXT (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndirect-None-02014)"},
+ {"VUID-vkCmdDrawIndirect-buffer-00474", "If buffer is non-sparse then it must be bound completely and contiguously to a single VkDeviceMemory object (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndirect-buffer-00474)"},
+ {"VUID-vkCmdDrawIndirect-buffer-01660", "buffer must have been created with the VK_BUFFER_USAGE_INDIRECT_BUFFER_BIT bit set (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndirect-buffer-01660)"},
{"VUID-vkCmdDrawIndirect-buffer-parameter", "buffer must be a valid VkBuffer handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndirect-buffer-parameter)"},
- {"VUID-vkCmdDrawIndirect-commandBuffer-02701", "If the VkPipeline object bound to the pipeline bind point used by this command requires any dynamic state, that state must have been set for commandBuffer (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndirect-commandBuffer-02701)"},
- {"VUID-vkCmdDrawIndirect-commandBuffer-02707", "If commandBuffer is an unprotected command buffer, any resource accessed by the VkPipeline object bound to the pipeline bind point used by this command must not be a protected resource (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndirect-commandBuffer-02707)"},
- {"VUID-vkCmdDrawIndirect-commandBuffer-02711", "commandBuffer must not be a protected command buffer (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndirect-commandBuffer-02711)"},
+ {"VUID-vkCmdDrawIndirect-commandBuffer-01856", "If commandBuffer is an unprotected command buffer, and any pipeline stage in the VkPipeline object bound to VK_PIPELINE_BIND_POINT_GRAPHICS reads from or writes to any image or buffer, that image or buffer must not be a protected image or protected buffer. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndirect-commandBuffer-01856)"},
+ {"VUID-vkCmdDrawIndirect-commandBuffer-02640", "commandBuffer must not be a protected command buffer (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndirect-commandBuffer-02640)"},
{"VUID-vkCmdDrawIndirect-commandBuffer-cmdpool", "The VkCommandPool that commandBuffer was allocated from must support graphics operations (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndirect-commandBuffer-cmdpool)"},
{"VUID-vkCmdDrawIndirect-commandBuffer-parameter", "commandBuffer must be a valid VkCommandBuffer handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndirect-commandBuffer-parameter)"},
{"VUID-vkCmdDrawIndirect-commandBuffer-recording", "commandBuffer must be in the recording state (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndirect-commandBuffer-recording)"},
{"VUID-vkCmdDrawIndirect-commonparent", "Both of buffer, and commandBuffer must have been created, allocated, or retrieved from the same VkDevice (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndirect-commonparent)"},
{"VUID-vkCmdDrawIndirect-drawCount-00476", "If drawCount is greater than 1, stride must be a multiple of 4 and must be greater than or equal to sizeof(VkDrawIndirectCommand) (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndirect-drawCount-00476)"},
+ {"VUID-vkCmdDrawIndirect-drawCount-00477", "If the multi-draw indirect feature is not enabled, drawCount must be 0 or 1 (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndirect-drawCount-00477)"},
{"VUID-vkCmdDrawIndirect-drawCount-00487", "If drawCount is equal to 1, (offset + sizeof(VkDrawIndirectCommand)) must be less than or equal to the size of buffer (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndirect-drawCount-00487)"},
{"VUID-vkCmdDrawIndirect-drawCount-00488", "If drawCount is greater than 1, (stride {times} (drawCount - 1) + offset + sizeof(VkDrawIndirectCommand)) must be less than or equal to the size of buffer (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndirect-drawCount-00488)"},
- {"VUID-vkCmdDrawIndirect-drawCount-02718", "If the multi-draw indirect feature is not enabled, drawCount must be 0 or 1 (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndirect-drawCount-02718)"},
- {"VUID-vkCmdDrawIndirect-drawCount-02719", "drawCount must be less than or equal to VkPhysicalDeviceLimits::maxDrawIndirectCount (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndirect-drawCount-02719)"},
- {"VUID-vkCmdDrawIndirect-filterCubic-02694", "Any VkImageView being sampled with VK_FILTER_CUBIC_EXT as a result of this command must have a VkImageViewType and format that supports cubic filtering, as specified by VkFilterCubicImageViewImageFormatPropertiesEXT::filterCubic returned by vkGetPhysicalDeviceImageFormatProperties2 (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndirect-filterCubic-02694)"},
- {"VUID-vkCmdDrawIndirect-filterCubicMinmax-02695", "Any VkImageView being sampled with VK_FILTER_CUBIC_EXT with a reduction mode of either VK_SAMPLER_REDUCTION_MODE_MIN_EXT or VK_SAMPLER_REDUCTION_MODE_MAX_EXT as a result of this command must have a VkImageViewType and format that supports cubic filtering together with minmax filtering, as specified by VkFilterCubicImageViewImageFormatPropertiesEXT::filterCubicMinmax returned by vkGetPhysicalDeviceImageFormatProperties2 (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndirect-filterCubicMinmax-02695)"},
+ {"VUID-vkCmdDrawIndirect-drawCount-00489", "drawCount must be less than or equal to VkPhysicalDeviceLimits::maxDrawIndirectCount (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndirect-drawCount-00489)"},
+ {"VUID-vkCmdDrawIndirect-filterCubic-02617", "Any VkImageView being sampled with VK_FILTER_CUBIC_EXT as a result of this command must have a VkImageViewType and format that supports cubic filtering, as specified by VkFilterCubicImageViewImageFormatPropertiesEXT::filterCubic returned by vkGetPhysicalDeviceImageFormatProperties2 (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndirect-filterCubic-02617)"},
+ {"VUID-vkCmdDrawIndirect-filterCubicMinmax-02618", "Any VkImageView being sampled with VK_FILTER_CUBIC_EXT with a reduction mode of either VK_SAMPLER_REDUCTION_MODE_MIN_EXT or VK_SAMPLER_REDUCTION_MODE_MAX_EXT as a result of this command must have a VkImageViewType and format that supports cubic filtering together with minmax filtering, as specified by VkFilterCubicImageViewImageFormatPropertiesEXT::filterCubicMinmax returned by vkGetPhysicalDeviceImageFormatProperties2 (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndirect-filterCubicMinmax-02618)"},
{"VUID-vkCmdDrawIndirect-firstInstance-00478", "If the drawIndirectFirstInstance feature is not enabled, all the firstInstance members of the VkDrawIndirectCommand structures accessed by this command must be 0 (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndirect-firstInstance-00478)"},
- {"VUID-vkCmdDrawIndirect-flags-02696", "Any VkImage created with a VkImageCreateInfo::flags containing VK_IMAGE_CREATE_CORNER_SAMPLED_BIT_NV sampled as a result of this command must only be sampled using a VkSamplerAddressMode of VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_EDGE. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndirect-flags-02696)"},
- {"VUID-vkCmdDrawIndirect-maxMultiviewInstanceIndex-02688", "If the draw is recorded in a render pass instance with multiview enabled, the maximum instance index must be less than or equal to VkPhysicalDeviceMultiviewProperties::maxMultiviewInstanceIndex. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndirect-maxMultiviewInstanceIndex-02688)"},
- {"VUID-vkCmdDrawIndirect-offset-02710", "offset must be a multiple of 4 (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndirect-offset-02710)"},
- {"VUID-vkCmdDrawIndirect-renderPass-02684", "The current render pass must be compatible with the renderPass member of the VkGraphicsPipelineCreateInfo structure specified when creating the VkPipeline bound to VK_PIPELINE_BIND_POINT_GRAPHICS. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndirect-renderPass-02684)"},
+ {"VUID-vkCmdDrawIndirect-flags-02044", "Any VkImage created with a VkImageCreateInfo::flags containing VK_IMAGE_CREATE_CORNER_SAMPLED_BIT_NV sampled as a result of this command must only be sampled using a VkSamplerAddressMode of VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_EDGE. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndirect-flags-02044)"},
+ {"VUID-vkCmdDrawIndirect-maxMultiviewInstanceIndex-00499", "If the draw is recorded in a render pass instance with multiview enabled, the maximum instance index must be less than or equal to VkPhysicalDeviceMultiviewProperties::maxMultiviewInstanceIndex. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndirect-maxMultiviewInstanceIndex-00499)"},
+ {"VUID-vkCmdDrawIndirect-offset-00475", "offset must be a multiple of 4 (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndirect-offset-00475)"},
+ {"VUID-vkCmdDrawIndirect-renderPass-00479", "The current render pass must be compatible with the renderPass member of the VkGraphicsPipelineCreateInfo structure specified when creating the VkPipeline bound to VK_PIPELINE_BIND_POINT_GRAPHICS. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndirect-renderPass-00479)"},
{"VUID-vkCmdDrawIndirect-renderpass", "This command must only be called inside of a render pass instance (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndirect-renderpass)"},
- {"VUID-vkCmdDrawIndirect-sampleLocationsEnable-02689", "If the bound graphics pipeline was created with VkPipelineSampleLocationsStateCreateInfoEXT::sampleLocationsEnable set to VK_TRUE and the current subpass has a depth/stencil attachment, then that attachment must have been created with the VK_IMAGE_CREATE_SAMPLE_LOCATIONS_COMPATIBLE_DEPTH_BIT_EXT bit set (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndirect-sampleLocationsEnable-02689)"},
- {"VUID-vkCmdDrawIndirect-subpass-02685", "The subpass index of the current render pass must be equal to the subpass member of the VkGraphicsPipelineCreateInfo structure specified when creating the VkPipeline bound to VK_PIPELINE_BIND_POINT_GRAPHICS. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndirect-subpass-02685)"},
- {"VUID-vkCmdDrawIndirectByteCountEXT-None-02686", "Every input attachment used by the current subpass must be bound to the pipeline via a descriptor set (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndirectByteCountEXT-None-02686)"},
- {"VUID-vkCmdDrawIndirectByteCountEXT-None-02687", "Image subresources used as attachments in the current render pass must not be accessed in any way other than as an attachment by this command. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndirectByteCountEXT-None-02687)"},
- {"VUID-vkCmdDrawIndirectByteCountEXT-None-02690", "If a VkImageView is sampled with VK_FILTER_LINEAR as a result of this command, then the image view's format features must contain VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_LINEAR_BIT (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndirectByteCountEXT-None-02690)"},
- {"VUID-vkCmdDrawIndirectByteCountEXT-None-02691", "If a VkImageView is accessed using atomic operations as a result of this command, then the image view's format features must contain VK_FORMAT_FEATURE_STORAGE_IMAGE_ATOMIC_BIT (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndirectByteCountEXT-None-02691)"},
- {"VUID-vkCmdDrawIndirectByteCountEXT-None-02692", "If a VkImageView is sampled with VK_FILTER_CUBIC_EXT as a result of this command, then the image view's format features must contain VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_CUBIC_BIT_EXT (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndirectByteCountEXT-None-02692)"},
- {"VUID-vkCmdDrawIndirectByteCountEXT-None-02693", "Any VkImageView being sampled with VK_FILTER_CUBIC_EXT as a result of this command must not have a VkImageViewType of VK_IMAGE_VIEW_TYPE_3D, VK_IMAGE_VIEW_TYPE_CUBE, or VK_IMAGE_VIEW_TYPE_CUBE_ARRAY (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndirectByteCountEXT-None-02693)"},
- {"VUID-vkCmdDrawIndirectByteCountEXT-None-02697", "For each set n that is statically used by the VkPipeline bound to the pipeline bind point used by this command, a descriptor set must have been bound to n at the same pipeline bind point, with a VkPipelineLayout that is compatible for set n, with the VkPipelineLayout used to create the current VkPipeline, as described in Pipeline Layout Compatibility (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndirectByteCountEXT-None-02697)"},
- {"VUID-vkCmdDrawIndirectByteCountEXT-None-02698", "For each push constant that is statically used by the VkPipeline bound to the pipeline bind point used by this command, a push constant value must have been set for the same pipeline bind point, with a VkPipelineLayout that is compatible for push constants, with the VkPipelineLayout used to create the current VkPipeline, as described in Pipeline Layout Compatibility (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndirectByteCountEXT-None-02698)"},
- {"VUID-vkCmdDrawIndirectByteCountEXT-None-02699", "Descriptors in each bound descriptor set, specified via vkCmdBindDescriptorSets, must be valid if they are statically used by the VkPipeline bound to the pipeline bind point used by this command (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndirectByteCountEXT-None-02699)"},
- {"VUID-vkCmdDrawIndirectByteCountEXT-None-02700", "A valid pipeline must be bound to the pipeline bind point used by this command (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndirectByteCountEXT-None-02700)"},
- {"VUID-vkCmdDrawIndirectByteCountEXT-None-02702", "If the VkPipeline object bound to the pipeline bind point used by this command accesses a VkSampler object that uses unnormalized coordinates, that sampler must not be used to sample from any VkImage with a VkImageView of the type VK_IMAGE_VIEW_TYPE_3D, VK_IMAGE_VIEW_TYPE_CUBE, VK_IMAGE_VIEW_TYPE_1D_ARRAY, VK_IMAGE_VIEW_TYPE_2D_ARRAY or VK_IMAGE_VIEW_TYPE_CUBE_ARRAY, in any shader stage (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndirectByteCountEXT-None-02702)"},
- {"VUID-vkCmdDrawIndirectByteCountEXT-None-02703", "If the VkPipeline object bound to the pipeline bind point used by this command accesses a VkSampler object that uses unnormalized coordinates, that sampler must not be used with any of the SPIR-V OpImageSample* or OpImageSparseSample* instructions with ImplicitLod, Dref or Proj in their name, in any shader stage (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndirectByteCountEXT-None-02703)"},
- {"VUID-vkCmdDrawIndirectByteCountEXT-None-02704", "If the VkPipeline object bound to the pipeline bind point used by this command accesses a VkSampler object that uses unnormalized coordinates, that sampler must not be used with any of the SPIR-V OpImageSample* or OpImageSparseSample* instructions that includes a LOD bias or any offset values, in any shader stage (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndirectByteCountEXT-None-02704)"},
- {"VUID-vkCmdDrawIndirectByteCountEXT-None-02705", "If the robust buffer access feature is not enabled, and if the VkPipeline object bound to the pipeline bind point used by this command accesses a uniform buffer, it must not access values outside of the range of the buffer as specified in the descriptor set bound to the same pipeline bind point (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndirectByteCountEXT-None-02705)"},
- {"VUID-vkCmdDrawIndirectByteCountEXT-None-02706", "If the robust buffer access feature is not enabled, and if the VkPipeline object bound to the pipeline bind point used by this command accesses a storage buffer, it must not access values outside of the range of the buffer as specified in the descriptor set bound to the same pipeline bind point (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndirectByteCountEXT-None-02706)"},
- {"VUID-vkCmdDrawIndirectByteCountEXT-None-02720", "All vertex input bindings accessed via vertex input variables declared in the vertex shader entry point's interface must have valid buffers bound (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndirectByteCountEXT-None-02720)"},
- {"VUID-vkCmdDrawIndirectByteCountEXT-None-02721", "For a given vertex buffer binding, any attribute data fetched must be entirely contained within the corresponding vertex buffer binding, as described in Vertex Input Description (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndirectByteCountEXT-None-02721)"},
+ {"VUID-vkCmdDrawIndirect-sampleLocationsEnable-01514", "If the bound graphics pipeline was created with VkPipelineSampleLocationsStateCreateInfoEXT::sampleLocationsEnable set to VK_TRUE and the current subpass has a depth/stencil attachment, then that attachment must have been created with the VK_IMAGE_CREATE_SAMPLE_LOCATIONS_COMPATIBLE_DEPTH_BIT_EXT bit set (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndirect-sampleLocationsEnable-01514)"},
+ {"VUID-vkCmdDrawIndirect-subpass-00480", "The subpass index of the current render pass must be equal to the subpass member of the VkGraphicsPipelineCreateInfo structure specified when creating the VkPipeline bound to VK_PIPELINE_BIND_POINT_GRAPHICS. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndirect-subpass-00480)"},
+ {"VUID-vkCmdDrawIndirectByteCountEXT-None-02293", "For each set n that is statically used by the VkPipeline bound to VK_PIPELINE_BIND_POINT_GRAPHICS, a descriptor set must have been bound to n at VK_PIPELINE_BIND_POINT_GRAPHICS, with a VkPipelineLayout that is compatible for set n, with the VkPipelineLayout used to create the current VkPipeline, as described in Pipeline Layout Compatibility (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndirectByteCountEXT-None-02293)"},
+ {"VUID-vkCmdDrawIndirectByteCountEXT-None-02294", "For each push constant that is statically used by the VkPipeline bound to VK_PIPELINE_BIND_POINT_GRAPHICS, a push constant value must have been set for VK_PIPELINE_BIND_POINT_GRAPHICS, with a VkPipelineLayout that is compatible for push constants, with the VkPipelineLayout used to create the current VkPipeline, as described in Pipeline Layout Compatibility (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndirectByteCountEXT-None-02294)"},
+ {"VUID-vkCmdDrawIndirectByteCountEXT-None-02295", "Descriptors in each bound descriptor set, specified via vkCmdBindDescriptorSets, must be valid if they are statically used by the bound VkPipeline object, specified via vkCmdBindPipeline (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndirectByteCountEXT-None-02295)"},
+ {"VUID-vkCmdDrawIndirectByteCountEXT-None-02296", "All vertex input bindings accessed via vertex input variables declared in the vertex shader entry point's interface must have valid buffers bound (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndirectByteCountEXT-None-02296)"},
+ {"VUID-vkCmdDrawIndirectByteCountEXT-None-02297", "For a given vertex buffer binding, any attribute data fetched must be entirely contained within the corresponding vertex buffer binding, as described in Vertex Input Description (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndirectByteCountEXT-None-02297)"},
+ {"VUID-vkCmdDrawIndirectByteCountEXT-None-02298", "A valid graphics pipeline must be bound to the current command buffer with VK_PIPELINE_BIND_POINT_GRAPHICS (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndirectByteCountEXT-None-02298)"},
+ {"VUID-vkCmdDrawIndirectByteCountEXT-None-02299", "If the VkPipeline object bound to VK_PIPELINE_BIND_POINT_GRAPHICS requires any dynamic state, that state must have been set on the current command buffer (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndirectByteCountEXT-None-02299)"},
+ {"VUID-vkCmdDrawIndirectByteCountEXT-None-02300", "Every input attachment used by the current subpass must be bound to the pipeline via a descriptor set (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndirectByteCountEXT-None-02300)"},
+ {"VUID-vkCmdDrawIndirectByteCountEXT-None-02301", "If any VkSampler object that is accessed from a shader by the VkPipeline bound to VK_PIPELINE_BIND_POINT_GRAPHICS uses unnormalized coordinates, it must not be used to sample from any VkImage with a VkImageView of the type VK_IMAGE_VIEW_TYPE_3D, VK_IMAGE_VIEW_TYPE_CUBE, VK_IMAGE_VIEW_TYPE_1D_ARRAY, VK_IMAGE_VIEW_TYPE_2D_ARRAY or VK_IMAGE_VIEW_TYPE_CUBE_ARRAY, in any shader stage (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndirectByteCountEXT-None-02301)"},
+ {"VUID-vkCmdDrawIndirectByteCountEXT-None-02302", "If any VkSampler object that is accessed from a shader by the VkPipeline bound to VK_PIPELINE_BIND_POINT_GRAPHICS uses unnormalized coordinates, it must not be used with any of the SPIR-V OpImageSample* or OpImageSparseSample* instructions with ImplicitLod, Dref or Proj in their name, in any shader stage (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndirectByteCountEXT-None-02302)"},
+ {"VUID-vkCmdDrawIndirectByteCountEXT-None-02303", "If any VkSampler object that is accessed from a shader by the VkPipeline bound to VK_PIPELINE_BIND_POINT_GRAPHICS uses unnormalized coordinates, it must not be used with any of the SPIR-V OpImageSample* or OpImageSparseSample* instructions that includes a LOD bias or any offset values, in any shader stage (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndirectByteCountEXT-None-02303)"},
+ {"VUID-vkCmdDrawIndirectByteCountEXT-None-02304", "Image subresources used as attachments in the current render pass must not be accessed in any way other than as an attachment by this command (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndirectByteCountEXT-None-02304)"},
+ {"VUID-vkCmdDrawIndirectByteCountEXT-None-02305", "If a VkImageView is sampled with VK_FILTER_LINEAR as a result of this command, then the image view's format features must contain VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_LINEAR_BIT (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndirectByteCountEXT-None-02305)"},
+ {"VUID-vkCmdDrawIndirectByteCountEXT-None-02306", "If a VkImageView is sampled with VK_FILTER_CUBIC_EXT as a result of this command, then the image view's format features must contain VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_CUBIC_BIT_EXT (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndirectByteCountEXT-None-02306)"},
+ {"VUID-vkCmdDrawIndirectByteCountEXT-None-02307", "Any VkImageView being sampled with VK_FILTER_CUBIC_EXT as a result of this command must not have a VkImageViewType of VK_IMAGE_VIEW_TYPE_3D, VK_IMAGE_VIEW_TYPE_CUBE, or VK_IMAGE_VIEW_TYPE_CUBE_ARRAY (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndirectByteCountEXT-None-02307)"},
+ {"VUID-vkCmdDrawIndirectByteCountEXT-commandBuffer-02309", "If commandBuffer is an unprotected command buffer, and any pipeline stage in the VkPipeline object bound to VK_PIPELINE_BIND_POINT_GRAPHICS reads from or writes to any image or buffer, that image or buffer must not be a protected image or protected buffer (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndirectByteCountEXT-commandBuffer-02309)"},
{"VUID-vkCmdDrawIndirectByteCountEXT-commandBuffer-02646", "commandBuffer must not be a protected command buffer (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndirectByteCountEXT-commandBuffer-02646)"},
- {"VUID-vkCmdDrawIndirectByteCountEXT-commandBuffer-02701", "If the VkPipeline object bound to the pipeline bind point used by this command requires any dynamic state, that state must have been set for commandBuffer (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndirectByteCountEXT-commandBuffer-02701)"},
- {"VUID-vkCmdDrawIndirectByteCountEXT-commandBuffer-02707", "If commandBuffer is an unprotected command buffer, any resource accessed by the VkPipeline object bound to the pipeline bind point used by this command must not be a protected resource (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndirectByteCountEXT-commandBuffer-02707)"},
{"VUID-vkCmdDrawIndirectByteCountEXT-commandBuffer-cmdpool", "The VkCommandPool that commandBuffer was allocated from must support graphics operations (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndirectByteCountEXT-commandBuffer-cmdpool)"},
{"VUID-vkCmdDrawIndirectByteCountEXT-commandBuffer-parameter", "commandBuffer must be a valid VkCommandBuffer handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndirectByteCountEXT-commandBuffer-parameter)"},
{"VUID-vkCmdDrawIndirectByteCountEXT-commandBuffer-recording", "commandBuffer must be in the recording state (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndirectByteCountEXT-commandBuffer-recording)"},
{"VUID-vkCmdDrawIndirectByteCountEXT-commonparent", "Both of commandBuffer, and counterBuffer must have been created, allocated, or retrieved from the same VkDevice (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndirectByteCountEXT-commonparent)"},
{"VUID-vkCmdDrawIndirectByteCountEXT-counterBuffer-02290", "counterBuffer must have been created with the VK_BUFFER_USAGE_INDIRECT_BUFFER_BIT bit set (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndirectByteCountEXT-counterBuffer-02290)"},
{"VUID-vkCmdDrawIndirectByteCountEXT-counterBuffer-parameter", "counterBuffer must be a valid VkBuffer handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndirectByteCountEXT-counterBuffer-parameter)"},
- {"VUID-vkCmdDrawIndirectByteCountEXT-filterCubic-02694", "Any VkImageView being sampled with VK_FILTER_CUBIC_EXT as a result of this command must have a VkImageViewType and format that supports cubic filtering, as specified by VkFilterCubicImageViewImageFormatPropertiesEXT::filterCubic returned by vkGetPhysicalDeviceImageFormatProperties2 (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndirectByteCountEXT-filterCubic-02694)"},
- {"VUID-vkCmdDrawIndirectByteCountEXT-filterCubicMinmax-02695", "Any VkImageView being sampled with VK_FILTER_CUBIC_EXT with a reduction mode of either VK_SAMPLER_REDUCTION_MODE_MIN_EXT or VK_SAMPLER_REDUCTION_MODE_MAX_EXT as a result of this command must have a VkImageViewType and format that supports cubic filtering together with minmax filtering, as specified by VkFilterCubicImageViewImageFormatPropertiesEXT::filterCubicMinmax returned by vkGetPhysicalDeviceImageFormatProperties2 (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndirectByteCountEXT-filterCubicMinmax-02695)"},
- {"VUID-vkCmdDrawIndirectByteCountEXT-flags-02696", "Any VkImage created with a VkImageCreateInfo::flags containing VK_IMAGE_CREATE_CORNER_SAMPLED_BIT_NV sampled as a result of this command must only be sampled using a VkSamplerAddressMode of VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_EDGE. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndirectByteCountEXT-flags-02696)"},
- {"VUID-vkCmdDrawIndirectByteCountEXT-maxMultiviewInstanceIndex-02688", "If the draw is recorded in a render pass instance with multiview enabled, the maximum instance index must be less than or equal to VkPhysicalDeviceMultiviewProperties::maxMultiviewInstanceIndex. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndirectByteCountEXT-maxMultiviewInstanceIndex-02688)"},
- {"VUID-vkCmdDrawIndirectByteCountEXT-renderPass-02684", "The current render pass must be compatible with the renderPass member of the VkGraphicsPipelineCreateInfo structure specified when creating the VkPipeline bound to VK_PIPELINE_BIND_POINT_GRAPHICS. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndirectByteCountEXT-renderPass-02684)"},
+ {"VUID-vkCmdDrawIndirectByteCountEXT-filterCubic-02625", "Any VkImageView being sampled with VK_FILTER_CUBIC_EXT as a result of this command must have a VkImageViewType and format that supports cubic filtering, as specified by VkFilterCubicImageViewImageFormatPropertiesEXT::filterCubic returned by vkGetPhysicalDeviceImageFormatProperties2 (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndirectByteCountEXT-filterCubic-02625)"},
+ {"VUID-vkCmdDrawIndirectByteCountEXT-filterCubicMinmax-02626", "Any VkImageView being sampled with VK_FILTER_CUBIC_EXT with a reduction mode of either VK_SAMPLER_REDUCTION_MODE_MIN_EXT or VK_SAMPLER_REDUCTION_MODE_MAX_EXT as a result of this command must have a VkImageViewType and format that supports cubic filtering together with minmax filtering, as specified by VkFilterCubicImageViewImageFormatPropertiesEXT::filterCubicMinmax returned by vkGetPhysicalDeviceImageFormatProperties2 (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndirectByteCountEXT-filterCubicMinmax-02626)"},
+ {"VUID-vkCmdDrawIndirectByteCountEXT-maxMultiviewInstanceIndex-02308", "If the draw is recorded in a render pass instance with multiview enabled, the maximum instance index must be less than or equal to VkPhysicalDeviceMultiviewProperties::maxMultiviewInstanceIndex (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndirectByteCountEXT-maxMultiviewInstanceIndex-02308)"},
+ {"VUID-vkCmdDrawIndirectByteCountEXT-renderPass-02291", "The current render pass must be compatible with the renderPass member of the VkGraphicsPipelineCreateInfo structure specified when creating the VkPipeline bound to VK_PIPELINE_BIND_POINT_GRAPHICS (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndirectByteCountEXT-renderPass-02291)"},
{"VUID-vkCmdDrawIndirectByteCountEXT-renderpass", "This command must only be called inside of a render pass instance (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndirectByteCountEXT-renderpass)"},
- {"VUID-vkCmdDrawIndirectByteCountEXT-sampleLocationsEnable-02689", "If the bound graphics pipeline was created with VkPipelineSampleLocationsStateCreateInfoEXT::sampleLocationsEnable set to VK_TRUE and the current subpass has a depth/stencil attachment, then that attachment must have been created with the VK_IMAGE_CREATE_SAMPLE_LOCATIONS_COMPATIBLE_DEPTH_BIT_EXT bit set (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndirectByteCountEXT-sampleLocationsEnable-02689)"},
- {"VUID-vkCmdDrawIndirectByteCountEXT-subpass-02685", "The subpass index of the current render pass must be equal to the subpass member of the VkGraphicsPipelineCreateInfo structure specified when creating the VkPipeline bound to VK_PIPELINE_BIND_POINT_GRAPHICS. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndirectByteCountEXT-subpass-02685)"},
+ {"VUID-vkCmdDrawIndirectByteCountEXT-sampleLocationsEnable-02312", "If the bound graphics pipeline was created with VkPipelineSampleLocationsStateCreateInfoEXT::sampleLocationsEnable set to VK_TRUE and the current subpass has a depth/stencil attachment, then that attachment must have been created with the VK_IMAGE_CREATE_SAMPLE_LOCATIONS_COMPATIBLE_DEPTH_BIT_EXT bit set (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndirectByteCountEXT-sampleLocationsEnable-02312)"},
+ {"VUID-vkCmdDrawIndirectByteCountEXT-subpass-02292", "The subpass index of the current render pass must be equal to the subpass member of the VkGraphicsPipelineCreateInfo structure specified when creating the VkPipeline bound to VK_PIPELINE_BIND_POINT_GRAPHICS (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndirectByteCountEXT-subpass-02292)"},
{"VUID-vkCmdDrawIndirectByteCountEXT-transformFeedback-02287", "VkPhysicalDeviceTransformFeedbackFeaturesEXT::transformFeedback must be enabled (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndirectByteCountEXT-transformFeedback-02287)"},
{"VUID-vkCmdDrawIndirectByteCountEXT-transformFeedbackDraw-02288", "The implementation must support VkPhysicalDeviceTransformFeedbackPropertiesEXT::transformFeedbackDraw (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndirectByteCountEXT-transformFeedbackDraw-02288)"},
{"VUID-vkCmdDrawIndirectByteCountEXT-vertexStride-02289", "vertexStride must be greater than 0 and less than or equal to VkPhysicalDeviceLimits::maxTransformFeedbackBufferDataStride (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndirectByteCountEXT-vertexStride-02289)"},
- {"VUID-vkCmdDrawIndirectCountKHR-None-02686", "Every input attachment used by the current subpass must be bound to the pipeline via a descriptor set (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndirectCountKHR-None-02686)"},
- {"VUID-vkCmdDrawIndirectCountKHR-None-02687", "Image subresources used as attachments in the current render pass must not be accessed in any way other than as an attachment by this command. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndirectCountKHR-None-02687)"},
- {"VUID-vkCmdDrawIndirectCountKHR-None-02690", "If a VkImageView is sampled with VK_FILTER_LINEAR as a result of this command, then the image view's format features must contain VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_LINEAR_BIT (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndirectCountKHR-None-02690)"},
- {"VUID-vkCmdDrawIndirectCountKHR-None-02691", "If a VkImageView is accessed using atomic operations as a result of this command, then the image view's format features must contain VK_FORMAT_FEATURE_STORAGE_IMAGE_ATOMIC_BIT (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndirectCountKHR-None-02691)"},
- {"VUID-vkCmdDrawIndirectCountKHR-None-02692", "If a VkImageView is sampled with VK_FILTER_CUBIC_EXT as a result of this command, then the image view's format features must contain VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_CUBIC_BIT_EXT (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndirectCountKHR-None-02692)"},
- {"VUID-vkCmdDrawIndirectCountKHR-None-02693", "Any VkImageView being sampled with VK_FILTER_CUBIC_EXT as a result of this command must not have a VkImageViewType of VK_IMAGE_VIEW_TYPE_3D, VK_IMAGE_VIEW_TYPE_CUBE, or VK_IMAGE_VIEW_TYPE_CUBE_ARRAY (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndirectCountKHR-None-02693)"},
- {"VUID-vkCmdDrawIndirectCountKHR-None-02697", "For each set n that is statically used by the VkPipeline bound to the pipeline bind point used by this command, a descriptor set must have been bound to n at the same pipeline bind point, with a VkPipelineLayout that is compatible for set n, with the VkPipelineLayout used to create the current VkPipeline, as described in Pipeline Layout Compatibility (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndirectCountKHR-None-02697)"},
- {"VUID-vkCmdDrawIndirectCountKHR-None-02698", "For each push constant that is statically used by the VkPipeline bound to the pipeline bind point used by this command, a push constant value must have been set for the same pipeline bind point, with a VkPipelineLayout that is compatible for push constants, with the VkPipelineLayout used to create the current VkPipeline, as described in Pipeline Layout Compatibility (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndirectCountKHR-None-02698)"},
- {"VUID-vkCmdDrawIndirectCountKHR-None-02699", "Descriptors in each bound descriptor set, specified via vkCmdBindDescriptorSets, must be valid if they are statically used by the VkPipeline bound to the pipeline bind point used by this command (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndirectCountKHR-None-02699)"},
- {"VUID-vkCmdDrawIndirectCountKHR-None-02700", "A valid pipeline must be bound to the pipeline bind point used by this command (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndirectCountKHR-None-02700)"},
- {"VUID-vkCmdDrawIndirectCountKHR-None-02702", "If the VkPipeline object bound to the pipeline bind point used by this command accesses a VkSampler object that uses unnormalized coordinates, that sampler must not be used to sample from any VkImage with a VkImageView of the type VK_IMAGE_VIEW_TYPE_3D, VK_IMAGE_VIEW_TYPE_CUBE, VK_IMAGE_VIEW_TYPE_1D_ARRAY, VK_IMAGE_VIEW_TYPE_2D_ARRAY or VK_IMAGE_VIEW_TYPE_CUBE_ARRAY, in any shader stage (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndirectCountKHR-None-02702)"},
- {"VUID-vkCmdDrawIndirectCountKHR-None-02703", "If the VkPipeline object bound to the pipeline bind point used by this command accesses a VkSampler object that uses unnormalized coordinates, that sampler must not be used with any of the SPIR-V OpImageSample* or OpImageSparseSample* instructions with ImplicitLod, Dref or Proj in their name, in any shader stage (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndirectCountKHR-None-02703)"},
- {"VUID-vkCmdDrawIndirectCountKHR-None-02704", "If the VkPipeline object bound to the pipeline bind point used by this command accesses a VkSampler object that uses unnormalized coordinates, that sampler must not be used with any of the SPIR-V OpImageSample* or OpImageSparseSample* instructions that includes a LOD bias or any offset values, in any shader stage (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndirectCountKHR-None-02704)"},
- {"VUID-vkCmdDrawIndirectCountKHR-None-02705", "If the robust buffer access feature is not enabled, and if the VkPipeline object bound to the pipeline bind point used by this command accesses a uniform buffer, it must not access values outside of the range of the buffer as specified in the descriptor set bound to the same pipeline bind point (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndirectCountKHR-None-02705)"},
- {"VUID-vkCmdDrawIndirectCountKHR-None-02706", "If the robust buffer access feature is not enabled, and if the VkPipeline object bound to the pipeline bind point used by this command accesses a storage buffer, it must not access values outside of the range of the buffer as specified in the descriptor set bound to the same pipeline bind point (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndirectCountKHR-None-02706)"},
- {"VUID-vkCmdDrawIndirectCountKHR-None-02720", "All vertex input bindings accessed via vertex input variables declared in the vertex shader entry point's interface must have valid buffers bound (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndirectCountKHR-None-02720)"},
- {"VUID-vkCmdDrawIndirectCountKHR-None-02721", "For a given vertex buffer binding, any attribute data fetched must be entirely contained within the corresponding vertex buffer binding, as described in Vertex Input Description (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndirectCountKHR-None-02721)"},
- {"VUID-vkCmdDrawIndirectCountKHR-buffer-02708", "If buffer is non-sparse then it must be bound completely and contiguously to a single VkDeviceMemory object (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndirectCountKHR-buffer-02708)"},
- {"VUID-vkCmdDrawIndirectCountKHR-buffer-02709", "buffer must have been created with the VK_BUFFER_USAGE_INDIRECT_BUFFER_BIT bit set (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndirectCountKHR-buffer-02709)"},
+ {"VUID-vkCmdDrawIndirectCountAMD-None-00509", "For each set n that is statically used by the VkPipeline bound to VK_PIPELINE_BIND_POINT_GRAPHICS, a descriptor set must have been bound to n at VK_PIPELINE_BIND_POINT_GRAPHICS, with a VkPipelineLayout that is compatible for set n, with the VkPipelineLayout used to create the current VkPipeline, as described in Pipeline Layout Compatibility (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndirectCountAMD-None-00509)"},
+ {"VUID-vkCmdDrawIndirectCountAMD-None-00510", "For each push constant that is statically used by the VkPipeline bound to VK_PIPELINE_BIND_POINT_GRAPHICS, a push constant value must have been set for VK_PIPELINE_BIND_POINT_GRAPHICS, with a VkPipelineLayout that is compatible for push constants, with the VkPipelineLayout used to create the current VkPipeline, as described in Pipeline Layout Compatibility (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndirectCountAMD-None-00510)"},
+ {"VUID-vkCmdDrawIndirectCountAMD-None-00511", "Descriptors in each bound descriptor set, specified via vkCmdBindDescriptorSets, must be valid if they are statically used by the bound VkPipeline object, specified via vkCmdBindPipeline (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndirectCountAMD-None-00511)"},
+ {"VUID-vkCmdDrawIndirectCountAMD-None-00512", "All vertex input bindings accessed via vertex input variables declared in the vertex shader entry point's interface must have valid buffers bound (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndirectCountAMD-None-00512)"},
+ {"VUID-vkCmdDrawIndirectCountAMD-None-00513", "A valid graphics pipeline must be bound to the current command buffer with VK_PIPELINE_BIND_POINT_GRAPHICS (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndirectCountAMD-None-00513)"},
+ {"VUID-vkCmdDrawIndirectCountAMD-None-00514", "If the VkPipeline object bound to VK_PIPELINE_BIND_POINT_GRAPHICS requires any dynamic state, that state must have been set on the current command buffer (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndirectCountAMD-None-00514)"},
+ {"VUID-vkCmdDrawIndirectCountAMD-None-00518", "Every input attachment used by the current subpass must be bound to the pipeline via a descriptor set (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndirectCountAMD-None-00518)"},
+ {"VUID-vkCmdDrawIndirectCountAMD-None-00519", "If any VkSampler object that is accessed from a shader by the VkPipeline bound to VK_PIPELINE_BIND_POINT_GRAPHICS uses unnormalized coordinates, it must not be used to sample from any VkImage with a VkImageView of the type VK_IMAGE_VIEW_TYPE_3D, VK_IMAGE_VIEW_TYPE_CUBE, VK_IMAGE_VIEW_TYPE_1D_ARRAY, VK_IMAGE_VIEW_TYPE_2D_ARRAY or VK_IMAGE_VIEW_TYPE_CUBE_ARRAY, in any shader stage (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndirectCountAMD-None-00519)"},
+ {"VUID-vkCmdDrawIndirectCountAMD-None-00520", "If any VkSampler object that is accessed from a shader by the VkPipeline bound to VK_PIPELINE_BIND_POINT_GRAPHICS uses unnormalized coordinates, it must not be used with any of the SPIR-V OpImageSample* or OpImageSparseSample* instructions with ImplicitLod, Dref or Proj in their name, in any shader stage (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndirectCountAMD-None-00520)"},
+ {"VUID-vkCmdDrawIndirectCountAMD-None-00521", "If any VkSampler object that is accessed from a shader by the VkPipeline bound to VK_PIPELINE_BIND_POINT_GRAPHICS uses unnormalized coordinates, it must not be used with any of the SPIR-V OpImageSample* or OpImageSparseSample* instructions that includes a LOD bias or any offset values, in any shader stage (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndirectCountAMD-None-00521)"},
+ {"VUID-vkCmdDrawIndirectCountAMD-None-00522", "If the robust buffer access feature is not enabled, and any shader stage in the VkPipeline object bound to VK_PIPELINE_BIND_POINT_GRAPHICS accesses a uniform buffer, it must not access values outside of the range of that buffer specified in the bound descriptor set (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndirectCountAMD-None-00522)"},
+ {"VUID-vkCmdDrawIndirectCountAMD-None-00523", "If the robust buffer access feature is not enabled, and any shader stage in the VkPipeline object bound to VK_PIPELINE_BIND_POINT_GRAPHICS accesses a storage buffer, it must not access values outside of the range of that buffer specified in the bound descriptor set (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndirectCountAMD-None-00523)"},
+ {"VUID-vkCmdDrawIndirectCountAMD-None-01502", "Image subresources used as attachments in the current render pass must not be accessed in any way other than as an attachment by this command. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndirectCountAMD-None-01502)"},
+ {"VUID-vkCmdDrawIndirectCountAMD-None-02017", "If a VkImageView is sampled with VK_FILTER_LINEAR as a result of this command, then the image view's format features must contain VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_LINEAR_BIT. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndirectCountAMD-None-02017)"},
+ {"VUID-vkCmdDrawIndirectCountAMD-buffer-01661", "If buffer is non-sparse then it must be bound completely and contiguously to a single VkDeviceMemory object (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndirectCountAMD-buffer-01661)"},
+ {"VUID-vkCmdDrawIndirectCountAMD-buffer-01662", "buffer must have been created with the VK_BUFFER_USAGE_INDIRECT_BUFFER_BIT bit set (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndirectCountAMD-buffer-01662)"},
+ {"VUID-vkCmdDrawIndirectCountAMD-buffer-parameter", "buffer must be a valid VkBuffer handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndirectCountAMD-buffer-parameter)"},
+ {"VUID-vkCmdDrawIndirectCountAMD-commandBuffer-01859", "If commandBuffer is an unprotected command buffer, and any pipeline stage in the VkPipeline object bound to VK_PIPELINE_BIND_POINT_GRAPHICS reads from or writes to any image or buffer, that image or buffer must not be a protected image or protected buffer. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndirectCountAMD-commandBuffer-01859)"},
+ {"VUID-vkCmdDrawIndirectCountAMD-commandBuffer-02642", "commandBuffer must not be a protected command buffer (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndirectCountAMD-commandBuffer-02642)"},
+ {"VUID-vkCmdDrawIndirectCountAMD-commandBuffer-cmdpool", "The VkCommandPool that commandBuffer was allocated from must support graphics operations (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndirectCountAMD-commandBuffer-cmdpool)"},
+ {"VUID-vkCmdDrawIndirectCountAMD-commandBuffer-parameter", "commandBuffer must be a valid VkCommandBuffer handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndirectCountAMD-commandBuffer-parameter)"},
+ {"VUID-vkCmdDrawIndirectCountAMD-commandBuffer-recording", "commandBuffer must be in the recording state (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndirectCountAMD-commandBuffer-recording)"},
+ {"VUID-vkCmdDrawIndirectCountAMD-commonparent", "Each of buffer, commandBuffer, and countBuffer must have been created, allocated, or retrieved from the same VkDevice (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndirectCountAMD-commonparent)"},
+ {"VUID-vkCmdDrawIndirectCountAMD-countBuffer-00515", "If the count stored in countBuffer is equal to 1, (offset + sizeof(VkDrawIndirectCommand)) must be less than or equal to the size of buffer (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndirectCountAMD-countBuffer-00515)"},
+ {"VUID-vkCmdDrawIndirectCountAMD-countBuffer-00516", "If the count stored in countBuffer is greater than 1, (stride {times} (drawCount - 1) + offset + sizeof(VkDrawIndirectCommand)) must be less than or equal to the size of buffer (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndirectCountAMD-countBuffer-00516)"},
+ {"VUID-vkCmdDrawIndirectCountAMD-countBuffer-00517", "The count stored in countBuffer must be less than or equal to VkPhysicalDeviceLimits::maxDrawIndirectCount (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndirectCountAMD-countBuffer-00517)"},
+ {"VUID-vkCmdDrawIndirectCountAMD-countBuffer-01663", "If countBuffer is non-sparse then it must be bound completely and contiguously to a single VkDeviceMemory object (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndirectCountAMD-countBuffer-01663)"},
+ {"VUID-vkCmdDrawIndirectCountAMD-countBuffer-01664", "countBuffer must have been created with the VK_BUFFER_USAGE_INDIRECT_BUFFER_BIT bit set (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndirectCountAMD-countBuffer-01664)"},
+ {"VUID-vkCmdDrawIndirectCountAMD-countBuffer-parameter", "countBuffer must be a valid VkBuffer handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndirectCountAMD-countBuffer-parameter)"},
+ {"VUID-vkCmdDrawIndirectCountAMD-countBufferOffset-00503", "countBufferOffset must be a multiple of 4 (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndirectCountAMD-countBufferOffset-00503)"},
+ {"VUID-vkCmdDrawIndirectCountAMD-firstInstance-00506", "If the drawIndirectFirstInstance feature is not enabled, all the firstInstance members of the VkDrawIndirectCommand structures accessed by this command must be 0 (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndirectCountAMD-firstInstance-00506)"},
+ {"VUID-vkCmdDrawIndirectCountAMD-flags-02046", "Any VkImage created with a VkImageCreateInfo::flags containing VK_IMAGE_CREATE_CORNER_SAMPLED_BIT_NV sampled as a result of this command must only be sampled using a VkSamplerAddressMode of VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_EDGE. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndirectCountAMD-flags-02046)"},
+ {"VUID-vkCmdDrawIndirectCountAMD-maxDrawCount-00505", "If maxDrawCount is greater than or equal to 1, (stride {times} (maxDrawCount - 1) + offset + sizeof(VkDrawIndirectCommand)) must be less than or equal to the size of buffer (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndirectCountAMD-maxDrawCount-00505)"},
+ {"VUID-vkCmdDrawIndirectCountAMD-maxMultiviewInstanceIndex-00525", "If the draw is recorded in a render pass instance with multiview enabled, the maximum instance index must be less than or equal to VkPhysicalDeviceMultiviewProperties::maxMultiviewInstanceIndex. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndirectCountAMD-maxMultiviewInstanceIndex-00525)"},
+ {"VUID-vkCmdDrawIndirectCountAMD-offset-00502", "offset must be a multiple of 4 (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndirectCountAMD-offset-00502)"},
+ {"VUID-vkCmdDrawIndirectCountAMD-renderPass-00507", "The current render pass must be compatible with the renderPass member of the VkGraphicsPipelineCreateInfo structure specified when creating the VkPipeline bound to VK_PIPELINE_BIND_POINT_GRAPHICS. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndirectCountAMD-renderPass-00507)"},
+ {"VUID-vkCmdDrawIndirectCountAMD-renderpass", "This command must only be called inside of a render pass instance (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndirectCountAMD-renderpass)"},
+ {"VUID-vkCmdDrawIndirectCountAMD-sampleLocationsEnable-01515", "If the bound graphics pipeline was created with VkPipelineSampleLocationsStateCreateInfoEXT::sampleLocationsEnable set to VK_TRUE and the current subpass has a depth/stencil attachment, then that attachment must have been created with the VK_IMAGE_CREATE_SAMPLE_LOCATIONS_COMPATIBLE_DEPTH_BIT_EXT bit set (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndirectCountAMD-sampleLocationsEnable-01515)"},
+ {"VUID-vkCmdDrawIndirectCountAMD-stride-00504", "stride must be a multiple of 4 and must be greater than or equal to sizeof(VkDrawIndirectCommand) (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndirectCountAMD-stride-00504)"},
+ {"VUID-vkCmdDrawIndirectCountAMD-subpass-00508", "The subpass index of the current render pass must be equal to the subpass member of the VkGraphicsPipelineCreateInfo structure specified when creating the VkPipeline bound to VK_PIPELINE_BIND_POINT_GRAPHICS. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndirectCountAMD-subpass-00508)"},
+ {"VUID-vkCmdDrawIndirectCountKHR-None-02015", "If a VkImageView is sampled with VK_FILTER_LINEAR as a result of this command, then the image view's format features must contain VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_LINEAR_BIT. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndirectCountKHR-None-02015)"},
+ {"VUID-vkCmdDrawIndirectCountKHR-None-02016", "If a VkImageView is sampled with VK_FILTER_CUBIC_EXT as a result of this command, then the image view's format features must contain VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_CUBIC_BIT_EXT (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndirectCountKHR-None-02016)"},
+ {"VUID-vkCmdDrawIndirectCountKHR-None-03115", "For each set n that is statically used by the VkPipeline bound to VK_PIPELINE_BIND_POINT_GRAPHICS, a descriptor set must have been bound to n at VK_PIPELINE_BIND_POINT_GRAPHICS, with a VkPipelineLayout that is compatible for set n, with the VkPipelineLayout used to create the current VkPipeline, as described in Pipeline Layout Compatibility (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndirectCountKHR-None-03115)"},
+ {"VUID-vkCmdDrawIndirectCountKHR-None-03116", "For each push constant that is statically used by the VkPipeline bound to VK_PIPELINE_BIND_POINT_GRAPHICS, a push constant value must have been set for VK_PIPELINE_BIND_POINT_GRAPHICS, with a VkPipelineLayout that is compatible for push constants, with the VkPipelineLayout used to create the current VkPipeline, as described in Pipeline Layout Compatibility (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndirectCountKHR-None-03116)"},
+ {"VUID-vkCmdDrawIndirectCountKHR-None-03117", "Descriptors in each bound descriptor set, specified via vkCmdBindDescriptorSets, must be valid if they are statically used by the bound VkPipeline object, specified via vkCmdBindPipeline (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndirectCountKHR-None-03117)"},
+ {"VUID-vkCmdDrawIndirectCountKHR-None-03118", "All vertex input bindings accessed via vertex input variables declared in the vertex shader entry point's interface must have valid buffers bound (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndirectCountKHR-None-03118)"},
+ {"VUID-vkCmdDrawIndirectCountKHR-None-03119", "A valid graphics pipeline must be bound to the current command buffer with VK_PIPELINE_BIND_POINT_GRAPHICS (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndirectCountKHR-None-03119)"},
+ {"VUID-vkCmdDrawIndirectCountKHR-None-03120", "If the VkPipeline object bound to VK_PIPELINE_BIND_POINT_GRAPHICS requires any dynamic state, that state must have been set on the current command buffer (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndirectCountKHR-None-03120)"},
+ {"VUID-vkCmdDrawIndirectCountKHR-None-03124", "Every input attachment used by the current subpass must be bound to the pipeline via a descriptor set (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndirectCountKHR-None-03124)"},
+ {"VUID-vkCmdDrawIndirectCountKHR-None-03125", "If any VkSampler object that is accessed from a shader by the VkPipeline bound to VK_PIPELINE_BIND_POINT_GRAPHICS uses unnormalized coordinates, it must not be used to sample from any VkImage with a VkImageView of the type VK_IMAGE_VIEW_TYPE_3D, VK_IMAGE_VIEW_TYPE_CUBE, VK_IMAGE_VIEW_TYPE_1D_ARRAY, VK_IMAGE_VIEW_TYPE_2D_ARRAY or VK_IMAGE_VIEW_TYPE_CUBE_ARRAY, in any shader stage (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndirectCountKHR-None-03125)"},
+ {"VUID-vkCmdDrawIndirectCountKHR-None-03126", "If any VkSampler object that is accessed from a shader by the VkPipeline bound to VK_PIPELINE_BIND_POINT_GRAPHICS uses unnormalized coordinates, it must not be used with any of the SPIR-V OpImageSample* or OpImageSparseSample* instructions with ImplicitLod, Dref or Proj in their name, in any shader stage (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndirectCountKHR-None-03126)"},
+ {"VUID-vkCmdDrawIndirectCountKHR-None-03127", "If any VkSampler object that is accessed from a shader by the VkPipeline bound to VK_PIPELINE_BIND_POINT_GRAPHICS uses unnormalized coordinates, it must not be used with any of the SPIR-V OpImageSample* or OpImageSparseSample* instructions that includes a LOD bias or any offset values, in any shader stage (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndirectCountKHR-None-03127)"},
+ {"VUID-vkCmdDrawIndirectCountKHR-None-03128", "If the robust buffer access feature is not enabled, and any shader stage in the VkPipeline object bound to VK_PIPELINE_BIND_POINT_GRAPHICS accesses a uniform buffer, it must not access values outside of the range of that buffer specified in the bound descriptor set (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndirectCountKHR-None-03128)"},
+ {"VUID-vkCmdDrawIndirectCountKHR-None-03129", "If the robust buffer access feature is not enabled, and any shader stage in the VkPipeline object bound to VK_PIPELINE_BIND_POINT_GRAPHICS accesses a storage buffer, it must not access values outside of the range of that buffer specified in the bound descriptor set (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndirectCountKHR-None-03129)"},
+ {"VUID-vkCmdDrawIndirectCountKHR-None-03131", "Image subresources used as attachments in the current render pass must not be accessed in any way other than as an attachment by this command. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndirectCountKHR-None-03131)"},
+ {"VUID-vkCmdDrawIndirectCountKHR-None-03170", "Any VkImageView being sampled with VK_FILTER_CUBIC_EXT as a result of this command must not have a VkImageViewType of VK_IMAGE_VIEW_TYPE_3D, VK_IMAGE_VIEW_TYPE_CUBE, or VK_IMAGE_VIEW_TYPE_CUBE_ARRAY (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndirectCountKHR-None-03170)"},
+ {"VUID-vkCmdDrawIndirectCountKHR-buffer-03104", "If buffer is non-sparse then it must be bound completely and contiguously to a single VkDeviceMemory object (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndirectCountKHR-buffer-03104)"},
+ {"VUID-vkCmdDrawIndirectCountKHR-buffer-03105", "buffer must have been created with the VK_BUFFER_USAGE_INDIRECT_BUFFER_BIT bit set (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndirectCountKHR-buffer-03105)"},
{"VUID-vkCmdDrawIndirectCountKHR-buffer-parameter", "buffer must be a valid VkBuffer handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndirectCountKHR-buffer-parameter)"},
- {"VUID-vkCmdDrawIndirectCountKHR-commandBuffer-02701", "If the VkPipeline object bound to the pipeline bind point used by this command requires any dynamic state, that state must have been set for commandBuffer (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndirectCountKHR-commandBuffer-02701)"},
- {"VUID-vkCmdDrawIndirectCountKHR-commandBuffer-02707", "If commandBuffer is an unprotected command buffer, any resource accessed by the VkPipeline object bound to the pipeline bind point used by this command must not be a protected resource (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndirectCountKHR-commandBuffer-02707)"},
- {"VUID-vkCmdDrawIndirectCountKHR-commandBuffer-02711", "commandBuffer must not be a protected command buffer (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndirectCountKHR-commandBuffer-02711)"},
+ {"VUID-vkCmdDrawIndirectCountKHR-commandBuffer-02641", "commandBuffer must not be a protected command buffer (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndirectCountKHR-commandBuffer-02641)"},
+ {"VUID-vkCmdDrawIndirectCountKHR-commandBuffer-03133", "If commandBuffer is an unprotected command buffer, and any pipeline stage in the VkPipeline object bound to VK_PIPELINE_BIND_POINT_GRAPHICS reads from or writes to any image or buffer, that image or buffer must not be a protected image or protected buffer. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndirectCountKHR-commandBuffer-03133)"},
{"VUID-vkCmdDrawIndirectCountKHR-commandBuffer-cmdpool", "The VkCommandPool that commandBuffer was allocated from must support graphics operations (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndirectCountKHR-commandBuffer-cmdpool)"},
{"VUID-vkCmdDrawIndirectCountKHR-commandBuffer-parameter", "commandBuffer must be a valid VkCommandBuffer handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndirectCountKHR-commandBuffer-parameter)"},
{"VUID-vkCmdDrawIndirectCountKHR-commandBuffer-recording", "commandBuffer must be in the recording state (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndirectCountKHR-commandBuffer-recording)"},
{"VUID-vkCmdDrawIndirectCountKHR-commonparent", "Each of buffer, commandBuffer, and countBuffer must have been created, allocated, or retrieved from the same VkDevice (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndirectCountKHR-commonparent)"},
- {"VUID-vkCmdDrawIndirectCountKHR-countBuffer-02714", "If countBuffer is non-sparse then it must be bound completely and contiguously to a single VkDeviceMemory object (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndirectCountKHR-countBuffer-02714)"},
- {"VUID-vkCmdDrawIndirectCountKHR-countBuffer-02715", "countBuffer must have been created with the VK_BUFFER_USAGE_INDIRECT_BUFFER_BIT bit set (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndirectCountKHR-countBuffer-02715)"},
- {"VUID-vkCmdDrawIndirectCountKHR-countBuffer-02717", "The count stored in countBuffer must be less than or equal to VkPhysicalDeviceLimits::maxDrawIndirectCount (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndirectCountKHR-countBuffer-02717)"},
+ {"VUID-vkCmdDrawIndirectCountKHR-countBuffer-03106", "If countBuffer is non-sparse then it must be bound completely and contiguously to a single VkDeviceMemory object (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndirectCountKHR-countBuffer-03106)"},
+ {"VUID-vkCmdDrawIndirectCountKHR-countBuffer-03107", "countBuffer must have been created with the VK_BUFFER_USAGE_INDIRECT_BUFFER_BIT bit set (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndirectCountKHR-countBuffer-03107)"},
{"VUID-vkCmdDrawIndirectCountKHR-countBuffer-03121", "If the count stored in countBuffer is equal to 1, (offset + sizeof(VkDrawIndirectCommand)) must be less than or equal to the size of buffer (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndirectCountKHR-countBuffer-03121)"},
{"VUID-vkCmdDrawIndirectCountKHR-countBuffer-03122", "If the count stored in countBuffer is greater than 1, (stride {times} (drawCount - 1) + offset + sizeof(VkDrawIndirectCommand)) must be less than or equal to the size of buffer (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndirectCountKHR-countBuffer-03122)"},
+ {"VUID-vkCmdDrawIndirectCountKHR-countBuffer-03123", "The count stored in countBuffer must be less than or equal to VkPhysicalDeviceLimits::maxDrawIndirectCount (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndirectCountKHR-countBuffer-03123)"},
{"VUID-vkCmdDrawIndirectCountKHR-countBuffer-parameter", "countBuffer must be a valid VkBuffer handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndirectCountKHR-countBuffer-parameter)"},
- {"VUID-vkCmdDrawIndirectCountKHR-countBufferOffset-02716", "countBufferOffset must be a multiple of 4 (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndirectCountKHR-countBufferOffset-02716)"},
- {"VUID-vkCmdDrawIndirectCountKHR-filterCubic-02694", "Any VkImageView being sampled with VK_FILTER_CUBIC_EXT as a result of this command must have a VkImageViewType and format that supports cubic filtering, as specified by VkFilterCubicImageViewImageFormatPropertiesEXT::filterCubic returned by vkGetPhysicalDeviceImageFormatProperties2 (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndirectCountKHR-filterCubic-02694)"},
- {"VUID-vkCmdDrawIndirectCountKHR-filterCubicMinmax-02695", "Any VkImageView being sampled with VK_FILTER_CUBIC_EXT with a reduction mode of either VK_SAMPLER_REDUCTION_MODE_MIN_EXT or VK_SAMPLER_REDUCTION_MODE_MAX_EXT as a result of this command must have a VkImageViewType and format that supports cubic filtering together with minmax filtering, as specified by VkFilterCubicImageViewImageFormatPropertiesEXT::filterCubicMinmax returned by vkGetPhysicalDeviceImageFormatProperties2 (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndirectCountKHR-filterCubicMinmax-02695)"},
- {"VUID-vkCmdDrawIndirectCountKHR-flags-02696", "Any VkImage created with a VkImageCreateInfo::flags containing VK_IMAGE_CREATE_CORNER_SAMPLED_BIT_NV sampled as a result of this command must only be sampled using a VkSamplerAddressMode of VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_EDGE. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndirectCountKHR-flags-02696)"},
+ {"VUID-vkCmdDrawIndirectCountKHR-countBufferOffset-03109", "countBufferOffset must be a multiple of 4 (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndirectCountKHR-countBufferOffset-03109)"},
+ {"VUID-vkCmdDrawIndirectCountKHR-filterCubic-02619", "Any VkImageView being sampled with VK_FILTER_CUBIC_EXT as a result of this command must have a VkImageViewType and format that supports cubic filtering, as specified by VkFilterCubicImageViewImageFormatPropertiesEXT::filterCubic returned by vkGetPhysicalDeviceImageFormatProperties2 (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndirectCountKHR-filterCubic-02619)"},
+ {"VUID-vkCmdDrawIndirectCountKHR-filterCubicMinmax-02620", "Any VkImageView being sampled with VK_FILTER_CUBIC_EXT with a reduction mode of either VK_SAMPLER_REDUCTION_MODE_MIN_EXT or VK_SAMPLER_REDUCTION_MODE_MAX_EXT as a result of this command must have a VkImageViewType and format that supports cubic filtering together with minmax filtering, as specified by VkFilterCubicImageViewImageFormatPropertiesEXT::filterCubicMinmax returned by vkGetPhysicalDeviceImageFormatProperties2 (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndirectCountKHR-filterCubicMinmax-02620)"},
+ {"VUID-vkCmdDrawIndirectCountKHR-firstInstance-03112", "If the drawIndirectFirstInstance feature is not enabled, all the firstInstance members of the VkDrawIndirectCommand structures accessed by this command must be 0 (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndirectCountKHR-firstInstance-03112)"},
+ {"VUID-vkCmdDrawIndirectCountKHR-flags-02045", "Any VkImage created with a VkImageCreateInfo::flags containing VK_IMAGE_CREATE_CORNER_SAMPLED_BIT_NV sampled as a result of this command must only be sampled using a VkSamplerAddressMode of VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_EDGE. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndirectCountKHR-flags-02045)"},
{"VUID-vkCmdDrawIndirectCountKHR-maxDrawCount-03111", "If maxDrawCount is greater than or equal to 1, (stride {times} (maxDrawCount - 1) + offset + sizeof(VkDrawIndirectCommand)) must be less than or equal to the size of buffer (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndirectCountKHR-maxDrawCount-03111)"},
- {"VUID-vkCmdDrawIndirectCountKHR-maxMultiviewInstanceIndex-02688", "If the draw is recorded in a render pass instance with multiview enabled, the maximum instance index must be less than or equal to VkPhysicalDeviceMultiviewProperties::maxMultiviewInstanceIndex. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndirectCountKHR-maxMultiviewInstanceIndex-02688)"},
- {"VUID-vkCmdDrawIndirectCountKHR-offset-02710", "offset must be a multiple of 4 (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndirectCountKHR-offset-02710)"},
- {"VUID-vkCmdDrawIndirectCountKHR-renderPass-02684", "The current render pass must be compatible with the renderPass member of the VkGraphicsPipelineCreateInfo structure specified when creating the VkPipeline bound to VK_PIPELINE_BIND_POINT_GRAPHICS. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndirectCountKHR-renderPass-02684)"},
+ {"VUID-vkCmdDrawIndirectCountKHR-maxMultiviewInstanceIndex-03132", "If the draw is recorded in a render pass instance with multiview enabled, the maximum instance index must be less than or equal to VkPhysicalDeviceMultiviewProperties::maxMultiviewInstanceIndex. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndirectCountKHR-maxMultiviewInstanceIndex-03132)"},
+ {"VUID-vkCmdDrawIndirectCountKHR-offset-03108", "offset must be a multiple of 4 (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndirectCountKHR-offset-03108)"},
+ {"VUID-vkCmdDrawIndirectCountKHR-renderPass-03113", "The current render pass must be compatible with the renderPass member of the VkGraphicsPipelineCreateInfo structure specified when creating the VkPipeline bound to VK_PIPELINE_BIND_POINT_GRAPHICS. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndirectCountKHR-renderPass-03113)"},
{"VUID-vkCmdDrawIndirectCountKHR-renderpass", "This command must only be called inside of a render pass instance (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndirectCountKHR-renderpass)"},
- {"VUID-vkCmdDrawIndirectCountKHR-sampleLocationsEnable-02689", "If the bound graphics pipeline was created with VkPipelineSampleLocationsStateCreateInfoEXT::sampleLocationsEnable set to VK_TRUE and the current subpass has a depth/stencil attachment, then that attachment must have been created with the VK_IMAGE_CREATE_SAMPLE_LOCATIONS_COMPATIBLE_DEPTH_BIT_EXT bit set (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndirectCountKHR-sampleLocationsEnable-02689)"},
+ {"VUID-vkCmdDrawIndirectCountKHR-sampleLocationsEnable-03171", "If the bound graphics pipeline was created with VkPipelineSampleLocationsStateCreateInfoEXT::sampleLocationsEnable set to VK_TRUE and the current subpass has a depth/stencil attachment, then that attachment must have been created with the VK_IMAGE_CREATE_SAMPLE_LOCATIONS_COMPATIBLE_DEPTH_BIT_EXT bit set (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndirectCountKHR-sampleLocationsEnable-03171)"},
{"VUID-vkCmdDrawIndirectCountKHR-stride-03110", "stride must be a multiple of 4 and must be greater than or equal to sizeof(VkDrawIndirectCommand) (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndirectCountKHR-stride-03110)"},
- {"VUID-vkCmdDrawIndirectCountKHR-subpass-02685", "The subpass index of the current render pass must be equal to the subpass member of the VkGraphicsPipelineCreateInfo structure specified when creating the VkPipeline bound to VK_PIPELINE_BIND_POINT_GRAPHICS. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndirectCountKHR-subpass-02685)"},
- {"VUID-vkCmdDrawMeshTasksIndirectCountNV-None-02686", "Every input attachment used by the current subpass must be bound to the pipeline via a descriptor set (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawMeshTasksIndirectCountNV-None-02686)"},
- {"VUID-vkCmdDrawMeshTasksIndirectCountNV-None-02687", "Image subresources used as attachments in the current render pass must not be accessed in any way other than as an attachment by this command. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawMeshTasksIndirectCountNV-None-02687)"},
- {"VUID-vkCmdDrawMeshTasksIndirectCountNV-None-02690", "If a VkImageView is sampled with VK_FILTER_LINEAR as a result of this command, then the image view's format features must contain VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_LINEAR_BIT (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawMeshTasksIndirectCountNV-None-02690)"},
- {"VUID-vkCmdDrawMeshTasksIndirectCountNV-None-02691", "If a VkImageView is accessed using atomic operations as a result of this command, then the image view's format features must contain VK_FORMAT_FEATURE_STORAGE_IMAGE_ATOMIC_BIT (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawMeshTasksIndirectCountNV-None-02691)"},
- {"VUID-vkCmdDrawMeshTasksIndirectCountNV-None-02692", "If a VkImageView is sampled with VK_FILTER_CUBIC_EXT as a result of this command, then the image view's format features must contain VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_CUBIC_BIT_EXT (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawMeshTasksIndirectCountNV-None-02692)"},
- {"VUID-vkCmdDrawMeshTasksIndirectCountNV-None-02693", "Any VkImageView being sampled with VK_FILTER_CUBIC_EXT as a result of this command must not have a VkImageViewType of VK_IMAGE_VIEW_TYPE_3D, VK_IMAGE_VIEW_TYPE_CUBE, or VK_IMAGE_VIEW_TYPE_CUBE_ARRAY (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawMeshTasksIndirectCountNV-None-02693)"},
- {"VUID-vkCmdDrawMeshTasksIndirectCountNV-None-02697", "For each set n that is statically used by the VkPipeline bound to the pipeline bind point used by this command, a descriptor set must have been bound to n at the same pipeline bind point, with a VkPipelineLayout that is compatible for set n, with the VkPipelineLayout used to create the current VkPipeline, as described in Pipeline Layout Compatibility (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawMeshTasksIndirectCountNV-None-02697)"},
- {"VUID-vkCmdDrawMeshTasksIndirectCountNV-None-02698", "For each push constant that is statically used by the VkPipeline bound to the pipeline bind point used by this command, a push constant value must have been set for the same pipeline bind point, with a VkPipelineLayout that is compatible for push constants, with the VkPipelineLayout used to create the current VkPipeline, as described in Pipeline Layout Compatibility (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawMeshTasksIndirectCountNV-None-02698)"},
- {"VUID-vkCmdDrawMeshTasksIndirectCountNV-None-02699", "Descriptors in each bound descriptor set, specified via vkCmdBindDescriptorSets, must be valid if they are statically used by the VkPipeline bound to the pipeline bind point used by this command (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawMeshTasksIndirectCountNV-None-02699)"},
- {"VUID-vkCmdDrawMeshTasksIndirectCountNV-None-02700", "A valid pipeline must be bound to the pipeline bind point used by this command (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawMeshTasksIndirectCountNV-None-02700)"},
- {"VUID-vkCmdDrawMeshTasksIndirectCountNV-None-02702", "If the VkPipeline object bound to the pipeline bind point used by this command accesses a VkSampler object that uses unnormalized coordinates, that sampler must not be used to sample from any VkImage with a VkImageView of the type VK_IMAGE_VIEW_TYPE_3D, VK_IMAGE_VIEW_TYPE_CUBE, VK_IMAGE_VIEW_TYPE_1D_ARRAY, VK_IMAGE_VIEW_TYPE_2D_ARRAY or VK_IMAGE_VIEW_TYPE_CUBE_ARRAY, in any shader stage (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawMeshTasksIndirectCountNV-None-02702)"},
- {"VUID-vkCmdDrawMeshTasksIndirectCountNV-None-02703", "If the VkPipeline object bound to the pipeline bind point used by this command accesses a VkSampler object that uses unnormalized coordinates, that sampler must not be used with any of the SPIR-V OpImageSample* or OpImageSparseSample* instructions with ImplicitLod, Dref or Proj in their name, in any shader stage (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawMeshTasksIndirectCountNV-None-02703)"},
- {"VUID-vkCmdDrawMeshTasksIndirectCountNV-None-02704", "If the VkPipeline object bound to the pipeline bind point used by this command accesses a VkSampler object that uses unnormalized coordinates, that sampler must not be used with any of the SPIR-V OpImageSample* or OpImageSparseSample* instructions that includes a LOD bias or any offset values, in any shader stage (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawMeshTasksIndirectCountNV-None-02704)"},
- {"VUID-vkCmdDrawMeshTasksIndirectCountNV-None-02705", "If the robust buffer access feature is not enabled, and if the VkPipeline object bound to the pipeline bind point used by this command accesses a uniform buffer, it must not access values outside of the range of the buffer as specified in the descriptor set bound to the same pipeline bind point (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawMeshTasksIndirectCountNV-None-02705)"},
- {"VUID-vkCmdDrawMeshTasksIndirectCountNV-None-02706", "If the robust buffer access feature is not enabled, and if the VkPipeline object bound to the pipeline bind point used by this command accesses a storage buffer, it must not access values outside of the range of the buffer as specified in the descriptor set bound to the same pipeline bind point (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawMeshTasksIndirectCountNV-None-02706)"},
- {"VUID-vkCmdDrawMeshTasksIndirectCountNV-buffer-02708", "If buffer is non-sparse then it must be bound completely and contiguously to a single VkDeviceMemory object (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawMeshTasksIndirectCountNV-buffer-02708)"},
- {"VUID-vkCmdDrawMeshTasksIndirectCountNV-buffer-02709", "buffer must have been created with the VK_BUFFER_USAGE_INDIRECT_BUFFER_BIT bit set (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawMeshTasksIndirectCountNV-buffer-02709)"},
+ {"VUID-vkCmdDrawIndirectCountKHR-subpass-03114", "The subpass index of the current render pass must be equal to the subpass member of the VkGraphicsPipelineCreateInfo structure specified when creating the VkPipeline bound to VK_PIPELINE_BIND_POINT_GRAPHICS. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndirectCountKHR-subpass-03114)"},
+ {"VUID-vkCmdDrawMeshTasksIndirectCountNV-None-02186", "For each set n that is statically used by the VkPipeline currently bound to VK_PIPELINE_BIND_POINT_GRAPHICS, a descriptor set must have been bound to n at VK_PIPELINE_BIND_POINT_GRAPHICS, with a VkPipelineLayout that is compatible for set n, with the VkPipelineLayout used to create the current VkPipeline, as described in Pipeline Layout Compatibility (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawMeshTasksIndirectCountNV-None-02186)"},
+ {"VUID-vkCmdDrawMeshTasksIndirectCountNV-None-02187", "For each push constant that is statically used by the VkPipeline currently bound to VK_PIPELINE_BIND_POINT_GRAPHICS, a push constant value must have been set for VK_PIPELINE_BIND_POINT_GRAPHICS, with a VkPipelineLayout that is compatible for push constants, with the VkPipelineLayout used to create the current VkPipeline, as described in Pipeline Layout Compatibility (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawMeshTasksIndirectCountNV-None-02187)"},
+ {"VUID-vkCmdDrawMeshTasksIndirectCountNV-None-02188", "Descriptors in each bound descriptor set, specified via vkCmdBindDescriptorSets, must be valid if they are statically used by the currently bound VkPipeline object, specified via vkCmdBindPipeline (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawMeshTasksIndirectCountNV-None-02188)"},
+ {"VUID-vkCmdDrawMeshTasksIndirectCountNV-None-02189", "A valid graphics pipeline must be bound to the current command buffer with VK_PIPELINE_BIND_POINT_GRAPHICS (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawMeshTasksIndirectCountNV-None-02189)"},
+ {"VUID-vkCmdDrawMeshTasksIndirectCountNV-None-02190", "If the VkPipeline object currently bound to VK_PIPELINE_BIND_POINT_GRAPHICS requires any dynamic state, that state must have been set on the current command buffer (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawMeshTasksIndirectCountNV-None-02190)"},
+ {"VUID-vkCmdDrawMeshTasksIndirectCountNV-None-02194", "Every input attachment used by the current subpass must be bound to the pipeline via a descriptor set (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawMeshTasksIndirectCountNV-None-02194)"},
+ {"VUID-vkCmdDrawMeshTasksIndirectCountNV-None-02195", "If any VkSampler object that is accessed from a shader by the VkPipeline currently bound to VK_PIPELINE_BIND_POINT_GRAPHICS uses unnormalized coordinates, it must not be used to sample from any VkImage with a VkImageView of the type VK_IMAGE_VIEW_TYPE_3D, VK_IMAGE_VIEW_TYPE_CUBE, VK_IMAGE_VIEW_TYPE_1D_ARRAY, VK_IMAGE_VIEW_TYPE_2D_ARRAY or VK_IMAGE_VIEW_TYPE_CUBE_ARRAY, in any shader stage (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawMeshTasksIndirectCountNV-None-02195)"},
+ {"VUID-vkCmdDrawMeshTasksIndirectCountNV-None-02196", "If any VkSampler object that is accessed from a shader by the VkPipeline currently bound to VK_PIPELINE_BIND_POINT_GRAPHICS uses unnormalized coordinates, it must not be used with any of the SPIR-V OpImageSample* or OpImageSparseSample* instructions with ImplicitLod, Dref or Proj in their name, in any shader stage (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawMeshTasksIndirectCountNV-None-02196)"},
+ {"VUID-vkCmdDrawMeshTasksIndirectCountNV-None-02197", "If any VkSampler object that is accessed from a shader by the VkPipeline currently bound to VK_PIPELINE_BIND_POINT_GRAPHICS uses unnormalized coordinates, it must not be used with any of the SPIR-V OpImageSample* or OpImageSparseSample* instructions that includes a LOD bias or any offset values, in any shader stage (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawMeshTasksIndirectCountNV-None-02197)"},
+ {"VUID-vkCmdDrawMeshTasksIndirectCountNV-None-02198", "If the robust buffer access feature is not enabled, and any shader stage in the VkPipeline object currently bound to VK_PIPELINE_BIND_POINT_GRAPHICS accesses a uniform buffer, it must not access values outside of the range of that buffer specified in the currently bound descriptor set (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawMeshTasksIndirectCountNV-None-02198)"},
+ {"VUID-vkCmdDrawMeshTasksIndirectCountNV-None-02199", "If the robust buffer access feature is not enabled, and any shader stage in the VkPipeline object currently bound to VK_PIPELINE_BIND_POINT_GRAPHICS accesses a storage buffer, it must not access values outside of the range of that buffer specified in the currently bound descriptor set (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawMeshTasksIndirectCountNV-None-02199)"},
+ {"VUID-vkCmdDrawMeshTasksIndirectCountNV-None-02201", "Image subresources used as attachments in the current render pass must not be accessed in any way other than as an attachment by this command. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawMeshTasksIndirectCountNV-None-02201)"},
+ {"VUID-vkCmdDrawMeshTasksIndirectCountNV-buffer-02176", "If buffer is non-sparse then it must be bound completely and contiguously to a single VkDeviceMemory object (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawMeshTasksIndirectCountNV-buffer-02176)"},
+ {"VUID-vkCmdDrawMeshTasksIndirectCountNV-buffer-02177", "buffer must have been created with the VK_BUFFER_USAGE_INDIRECT_BUFFER_BIT bit set (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawMeshTasksIndirectCountNV-buffer-02177)"},
{"VUID-vkCmdDrawMeshTasksIndirectCountNV-buffer-parameter", "buffer must be a valid VkBuffer handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawMeshTasksIndirectCountNV-buffer-parameter)"},
- {"VUID-vkCmdDrawMeshTasksIndirectCountNV-commandBuffer-02701", "If the VkPipeline object bound to the pipeline bind point used by this command requires any dynamic state, that state must have been set for commandBuffer (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawMeshTasksIndirectCountNV-commandBuffer-02701)"},
- {"VUID-vkCmdDrawMeshTasksIndirectCountNV-commandBuffer-02707", "If commandBuffer is an unprotected command buffer, any resource accessed by the VkPipeline object bound to the pipeline bind point used by this command must not be a protected resource (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawMeshTasksIndirectCountNV-commandBuffer-02707)"},
- {"VUID-vkCmdDrawMeshTasksIndirectCountNV-commandBuffer-02711", "commandBuffer must not be a protected command buffer (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawMeshTasksIndirectCountNV-commandBuffer-02711)"},
+ {"VUID-vkCmdDrawMeshTasksIndirectCountNV-commandBuffer-02203", "If commandBuffer is an unprotected command buffer, and any pipeline stage in the VkPipeline object currently bound to VK_PIPELINE_BIND_POINT_GRAPHICS reads from or writes to any image or buffer, that image or buffer must not be a protected image or protected buffer. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawMeshTasksIndirectCountNV-commandBuffer-02203)"},
+ {"VUID-vkCmdDrawMeshTasksIndirectCountNV-commandBuffer-02204", "If commandBuffer is a protected command buffer, and any pipeline stage in the VkPipeline object currently bound to VK_PIPELINE_BIND_POINT_GRAPHICS writes to any image or buffer, that image or buffer must not be an unprotected image or unprotected buffer. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawMeshTasksIndirectCountNV-commandBuffer-02204)"},
+ {"VUID-vkCmdDrawMeshTasksIndirectCountNV-commandBuffer-02205", "If commandBuffer is a protected command buffer, and any pipeline stage other than the framebuffer-space pipeline stages in the VkPipeline object currently bound to VK_PIPELINE_BIND_POINT_GRAPHICS reads from or writes to any image or buffer, the image or buffer must not be a protected image or protected buffer. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawMeshTasksIndirectCountNV-commandBuffer-02205)"},
{"VUID-vkCmdDrawMeshTasksIndirectCountNV-commandBuffer-cmdpool", "The VkCommandPool that commandBuffer was allocated from must support graphics operations (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawMeshTasksIndirectCountNV-commandBuffer-cmdpool)"},
{"VUID-vkCmdDrawMeshTasksIndirectCountNV-commandBuffer-parameter", "commandBuffer must be a valid VkCommandBuffer handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawMeshTasksIndirectCountNV-commandBuffer-parameter)"},
{"VUID-vkCmdDrawMeshTasksIndirectCountNV-commandBuffer-recording", "commandBuffer must be in the recording state (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawMeshTasksIndirectCountNV-commandBuffer-recording)"},
{"VUID-vkCmdDrawMeshTasksIndirectCountNV-commonparent", "Each of buffer, commandBuffer, and countBuffer must have been created, allocated, or retrieved from the same VkDevice (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawMeshTasksIndirectCountNV-commonparent)"},
+ {"VUID-vkCmdDrawMeshTasksIndirectCountNV-countBuffer-02178", "If countBuffer is non-sparse then it must be bound completely and contiguously to a single VkDeviceMemory object (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawMeshTasksIndirectCountNV-countBuffer-02178)"},
+ {"VUID-vkCmdDrawMeshTasksIndirectCountNV-countBuffer-02179", "countBuffer must have been created with the VK_BUFFER_USAGE_INDIRECT_BUFFER_BIT bit set (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawMeshTasksIndirectCountNV-countBuffer-02179)"},
{"VUID-vkCmdDrawMeshTasksIndirectCountNV-countBuffer-02191", "If the count stored in countBuffer is equal to 1, (offset + sizeof(VkDrawMeshTasksIndirectCommandNV)) must be less than or equal to the size of buffer (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawMeshTasksIndirectCountNV-countBuffer-02191)"},
{"VUID-vkCmdDrawMeshTasksIndirectCountNV-countBuffer-02192", "If the count stored in countBuffer is greater than 1, (stride {times} (drawCount - 1) + offset + sizeof(VkDrawMeshTasksIndirectCommandNV)) must be less than or equal to the size of buffer (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawMeshTasksIndirectCountNV-countBuffer-02192)"},
- {"VUID-vkCmdDrawMeshTasksIndirectCountNV-countBuffer-02714", "If countBuffer is non-sparse then it must be bound completely and contiguously to a single VkDeviceMemory object (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawMeshTasksIndirectCountNV-countBuffer-02714)"},
- {"VUID-vkCmdDrawMeshTasksIndirectCountNV-countBuffer-02715", "countBuffer must have been created with the VK_BUFFER_USAGE_INDIRECT_BUFFER_BIT bit set (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawMeshTasksIndirectCountNV-countBuffer-02715)"},
- {"VUID-vkCmdDrawMeshTasksIndirectCountNV-countBuffer-02717", "The count stored in countBuffer must be less than or equal to VkPhysicalDeviceLimits::maxDrawIndirectCount (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawMeshTasksIndirectCountNV-countBuffer-02717)"},
+ {"VUID-vkCmdDrawMeshTasksIndirectCountNV-countBuffer-02193", "The count stored in countBuffer must be less than or equal to VkPhysicalDeviceLimits::maxDrawIndirectCount (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawMeshTasksIndirectCountNV-countBuffer-02193)"},
{"VUID-vkCmdDrawMeshTasksIndirectCountNV-countBuffer-parameter", "countBuffer must be a valid VkBuffer handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawMeshTasksIndirectCountNV-countBuffer-parameter)"},
- {"VUID-vkCmdDrawMeshTasksIndirectCountNV-countBufferOffset-02716", "countBufferOffset must be a multiple of 4 (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawMeshTasksIndirectCountNV-countBufferOffset-02716)"},
- {"VUID-vkCmdDrawMeshTasksIndirectCountNV-filterCubic-02694", "Any VkImageView being sampled with VK_FILTER_CUBIC_EXT as a result of this command must have a VkImageViewType and format that supports cubic filtering, as specified by VkFilterCubicImageViewImageFormatPropertiesEXT::filterCubic returned by vkGetPhysicalDeviceImageFormatProperties2 (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawMeshTasksIndirectCountNV-filterCubic-02694)"},
- {"VUID-vkCmdDrawMeshTasksIndirectCountNV-filterCubicMinmax-02695", "Any VkImageView being sampled with VK_FILTER_CUBIC_EXT with a reduction mode of either VK_SAMPLER_REDUCTION_MODE_MIN_EXT or VK_SAMPLER_REDUCTION_MODE_MAX_EXT as a result of this command must have a VkImageViewType and format that supports cubic filtering together with minmax filtering, as specified by VkFilterCubicImageViewImageFormatPropertiesEXT::filterCubicMinmax returned by vkGetPhysicalDeviceImageFormatProperties2 (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawMeshTasksIndirectCountNV-filterCubicMinmax-02695)"},
- {"VUID-vkCmdDrawMeshTasksIndirectCountNV-flags-02696", "Any VkImage created with a VkImageCreateInfo::flags containing VK_IMAGE_CREATE_CORNER_SAMPLED_BIT_NV sampled as a result of this command must only be sampled using a VkSamplerAddressMode of VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_EDGE. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawMeshTasksIndirectCountNV-flags-02696)"},
+ {"VUID-vkCmdDrawMeshTasksIndirectCountNV-countBufferOffset-02181", "countBufferOffset must be a multiple of 4 (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawMeshTasksIndirectCountNV-countBufferOffset-02181)"},
+ {"VUID-vkCmdDrawMeshTasksIndirectCountNV-linearTilingFeatures-02200", "Any VkImageView being sampled with VK_FILTER_LINEAR as a result of this command must be of a format which supports linear filtering, as specified by the VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_LINEAR_BIT flag in VkFormatProperties::linearTilingFeatures (for a linear image) or VkFormatProperties::optimalTilingFeatures(for an optimally tiled image) returned by vkGetPhysicalDeviceFormatProperties (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawMeshTasksIndirectCountNV-linearTilingFeatures-02200)"},
{"VUID-vkCmdDrawMeshTasksIndirectCountNV-maxDrawCount-02183", "If maxDrawCount is greater than or equal to 1, (stride {times} (maxDrawCount - 1) + offset + sizeof(VkDrawMeshTasksIndirectCommandNV)) must be less than or equal to the size of buffer (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawMeshTasksIndirectCountNV-maxDrawCount-02183)"},
- {"VUID-vkCmdDrawMeshTasksIndirectCountNV-maxMultiviewInstanceIndex-02688", "If the draw is recorded in a render pass instance with multiview enabled, the maximum instance index must be less than or equal to VkPhysicalDeviceMultiviewProperties::maxMultiviewInstanceIndex. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawMeshTasksIndirectCountNV-maxMultiviewInstanceIndex-02688)"},
- {"VUID-vkCmdDrawMeshTasksIndirectCountNV-offset-02710", "offset must be a multiple of 4 (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawMeshTasksIndirectCountNV-offset-02710)"},
- {"VUID-vkCmdDrawMeshTasksIndirectCountNV-renderPass-02684", "The current render pass must be compatible with the renderPass member of the VkGraphicsPipelineCreateInfo structure specified when creating the VkPipeline bound to VK_PIPELINE_BIND_POINT_GRAPHICS. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawMeshTasksIndirectCountNV-renderPass-02684)"},
+ {"VUID-vkCmdDrawMeshTasksIndirectCountNV-maxMultiviewInstanceIndex-02202", "If the draw is recorded in a render pass instance with multiview enabled, the maximum instance index must be less than or equal to VkPhysicalDeviceMultiviewProperties::maxMultiviewInstanceIndex. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawMeshTasksIndirectCountNV-maxMultiviewInstanceIndex-02202)"},
+ {"VUID-vkCmdDrawMeshTasksIndirectCountNV-offset-02180", "offset must be a multiple of 4 (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawMeshTasksIndirectCountNV-offset-02180)"},
+ {"VUID-vkCmdDrawMeshTasksIndirectCountNV-renderPass-02184", "The current render pass must be compatible with the renderPass member of the VkGraphicsPipelineCreateInfo structure specified when creating the VkPipeline currently bound to VK_PIPELINE_BIND_POINT_GRAPHICS. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawMeshTasksIndirectCountNV-renderPass-02184)"},
{"VUID-vkCmdDrawMeshTasksIndirectCountNV-renderpass", "This command must only be called inside of a render pass instance (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawMeshTasksIndirectCountNV-renderpass)"},
- {"VUID-vkCmdDrawMeshTasksIndirectCountNV-sampleLocationsEnable-02689", "If the bound graphics pipeline was created with VkPipelineSampleLocationsStateCreateInfoEXT::sampleLocationsEnable set to VK_TRUE and the current subpass has a depth/stencil attachment, then that attachment must have been created with the VK_IMAGE_CREATE_SAMPLE_LOCATIONS_COMPATIBLE_DEPTH_BIT_EXT bit set (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawMeshTasksIndirectCountNV-sampleLocationsEnable-02689)"},
+ {"VUID-vkCmdDrawMeshTasksIndirectCountNV-sampleLocationsEnable-02206", "If the currently bound graphics pipeline was created with VkPipelineSampleLocationsStateCreateInfoEXT::sampleLocationsEnable set to VK_TRUE and the current subpass has a depth/stencil attachment, then that attachment must have been created with the VK_IMAGE_CREATE_SAMPLE_LOCATIONS_COMPATIBLE_DEPTH_BIT_EXT bit set (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawMeshTasksIndirectCountNV-sampleLocationsEnable-02206)"},
{"VUID-vkCmdDrawMeshTasksIndirectCountNV-stride-02182", "stride must be a multiple of 4 and must be greater than or equal to sizeof(VkDrawMeshTasksIndirectCommandNV) (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawMeshTasksIndirectCountNV-stride-02182)"},
- {"VUID-vkCmdDrawMeshTasksIndirectCountNV-subpass-02685", "The subpass index of the current render pass must be equal to the subpass member of the VkGraphicsPipelineCreateInfo structure specified when creating the VkPipeline bound to VK_PIPELINE_BIND_POINT_GRAPHICS. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawMeshTasksIndirectCountNV-subpass-02685)"},
- {"VUID-vkCmdDrawMeshTasksIndirectNV-None-02686", "Every input attachment used by the current subpass must be bound to the pipeline via a descriptor set (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawMeshTasksIndirectNV-None-02686)"},
- {"VUID-vkCmdDrawMeshTasksIndirectNV-None-02687", "Image subresources used as attachments in the current render pass must not be accessed in any way other than as an attachment by this command. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawMeshTasksIndirectNV-None-02687)"},
- {"VUID-vkCmdDrawMeshTasksIndirectNV-None-02690", "If a VkImageView is sampled with VK_FILTER_LINEAR as a result of this command, then the image view's format features must contain VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_LINEAR_BIT (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawMeshTasksIndirectNV-None-02690)"},
- {"VUID-vkCmdDrawMeshTasksIndirectNV-None-02691", "If a VkImageView is accessed using atomic operations as a result of this command, then the image view's format features must contain VK_FORMAT_FEATURE_STORAGE_IMAGE_ATOMIC_BIT (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawMeshTasksIndirectNV-None-02691)"},
- {"VUID-vkCmdDrawMeshTasksIndirectNV-None-02692", "If a VkImageView is sampled with VK_FILTER_CUBIC_EXT as a result of this command, then the image view's format features must contain VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_CUBIC_BIT_EXT (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawMeshTasksIndirectNV-None-02692)"},
- {"VUID-vkCmdDrawMeshTasksIndirectNV-None-02693", "Any VkImageView being sampled with VK_FILTER_CUBIC_EXT as a result of this command must not have a VkImageViewType of VK_IMAGE_VIEW_TYPE_3D, VK_IMAGE_VIEW_TYPE_CUBE, or VK_IMAGE_VIEW_TYPE_CUBE_ARRAY (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawMeshTasksIndirectNV-None-02693)"},
- {"VUID-vkCmdDrawMeshTasksIndirectNV-None-02697", "For each set n that is statically used by the VkPipeline bound to the pipeline bind point used by this command, a descriptor set must have been bound to n at the same pipeline bind point, with a VkPipelineLayout that is compatible for set n, with the VkPipelineLayout used to create the current VkPipeline, as described in Pipeline Layout Compatibility (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawMeshTasksIndirectNV-None-02697)"},
- {"VUID-vkCmdDrawMeshTasksIndirectNV-None-02698", "For each push constant that is statically used by the VkPipeline bound to the pipeline bind point used by this command, a push constant value must have been set for the same pipeline bind point, with a VkPipelineLayout that is compatible for push constants, with the VkPipelineLayout used to create the current VkPipeline, as described in Pipeline Layout Compatibility (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawMeshTasksIndirectNV-None-02698)"},
- {"VUID-vkCmdDrawMeshTasksIndirectNV-None-02699", "Descriptors in each bound descriptor set, specified via vkCmdBindDescriptorSets, must be valid if they are statically used by the VkPipeline bound to the pipeline bind point used by this command (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawMeshTasksIndirectNV-None-02699)"},
- {"VUID-vkCmdDrawMeshTasksIndirectNV-None-02700", "A valid pipeline must be bound to the pipeline bind point used by this command (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawMeshTasksIndirectNV-None-02700)"},
- {"VUID-vkCmdDrawMeshTasksIndirectNV-None-02702", "If the VkPipeline object bound to the pipeline bind point used by this command accesses a VkSampler object that uses unnormalized coordinates, that sampler must not be used to sample from any VkImage with a VkImageView of the type VK_IMAGE_VIEW_TYPE_3D, VK_IMAGE_VIEW_TYPE_CUBE, VK_IMAGE_VIEW_TYPE_1D_ARRAY, VK_IMAGE_VIEW_TYPE_2D_ARRAY or VK_IMAGE_VIEW_TYPE_CUBE_ARRAY, in any shader stage (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawMeshTasksIndirectNV-None-02702)"},
- {"VUID-vkCmdDrawMeshTasksIndirectNV-None-02703", "If the VkPipeline object bound to the pipeline bind point used by this command accesses a VkSampler object that uses unnormalized coordinates, that sampler must not be used with any of the SPIR-V OpImageSample* or OpImageSparseSample* instructions with ImplicitLod, Dref or Proj in their name, in any shader stage (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawMeshTasksIndirectNV-None-02703)"},
- {"VUID-vkCmdDrawMeshTasksIndirectNV-None-02704", "If the VkPipeline object bound to the pipeline bind point used by this command accesses a VkSampler object that uses unnormalized coordinates, that sampler must not be used with any of the SPIR-V OpImageSample* or OpImageSparseSample* instructions that includes a LOD bias or any offset values, in any shader stage (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawMeshTasksIndirectNV-None-02704)"},
- {"VUID-vkCmdDrawMeshTasksIndirectNV-None-02705", "If the robust buffer access feature is not enabled, and if the VkPipeline object bound to the pipeline bind point used by this command accesses a uniform buffer, it must not access values outside of the range of the buffer as specified in the descriptor set bound to the same pipeline bind point (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawMeshTasksIndirectNV-None-02705)"},
- {"VUID-vkCmdDrawMeshTasksIndirectNV-None-02706", "If the robust buffer access feature is not enabled, and if the VkPipeline object bound to the pipeline bind point used by this command accesses a storage buffer, it must not access values outside of the range of the buffer as specified in the descriptor set bound to the same pipeline bind point (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawMeshTasksIndirectNV-None-02706)"},
- {"VUID-vkCmdDrawMeshTasksIndirectNV-buffer-02708", "If buffer is non-sparse then it must be bound completely and contiguously to a single VkDeviceMemory object (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawMeshTasksIndirectNV-buffer-02708)"},
- {"VUID-vkCmdDrawMeshTasksIndirectNV-buffer-02709", "buffer must have been created with the VK_BUFFER_USAGE_INDIRECT_BUFFER_BIT bit set (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawMeshTasksIndirectNV-buffer-02709)"},
+ {"VUID-vkCmdDrawMeshTasksIndirectCountNV-subpass-02185", "The subpass index of the current render pass must be equal to the subpass member of the VkGraphicsPipelineCreateInfo structure specified when creating the VkPipeline currently bound to VK_PIPELINE_BIND_POINT_GRAPHICS. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawMeshTasksIndirectCountNV-subpass-02185)"},
+ {"VUID-vkCmdDrawMeshTasksIndirectNV-None-02150", "For each set n that is statically used by the VkPipeline currently bound to VK_PIPELINE_BIND_POINT_GRAPHICS, a descriptor set must have been bound to n at VK_PIPELINE_BIND_POINT_GRAPHICS, with a VkPipelineLayout that is compatible for set n, with the VkPipelineLayout used to create the current VkPipeline, as described in Pipeline Layout Compatibility (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawMeshTasksIndirectNV-None-02150)"},
+ {"VUID-vkCmdDrawMeshTasksIndirectNV-None-02151", "For each push constant that is statically used by the VkPipeline currently bound to VK_PIPELINE_BIND_POINT_GRAPHICS, a push constant value must have been set for VK_PIPELINE_BIND_POINT_GRAPHICS, with a VkPipelineLayout that is compatible for push constants, with the VkPipelineLayout used to create the current VkPipeline, as described in Pipeline Layout Compatibility (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawMeshTasksIndirectNV-None-02151)"},
+ {"VUID-vkCmdDrawMeshTasksIndirectNV-None-02152", "Descriptors in each bound descriptor set, specified via vkCmdBindDescriptorSets, must be valid if they are statically used by the currently bound VkPipeline object, specified via vkCmdBindPipeline (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawMeshTasksIndirectNV-None-02152)"},
+ {"VUID-vkCmdDrawMeshTasksIndirectNV-None-02153", "All vertex input bindings accessed via vertex input variables declared in the vertex shader entry point's interface must have valid buffers bound (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawMeshTasksIndirectNV-None-02153)"},
+ {"VUID-vkCmdDrawMeshTasksIndirectNV-None-02154", "A valid graphics pipeline must be bound to the current command buffer with VK_PIPELINE_BIND_POINT_GRAPHICS (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawMeshTasksIndirectNV-None-02154)"},
+ {"VUID-vkCmdDrawMeshTasksIndirectNV-None-02155", "If the VkPipeline object currently bound to VK_PIPELINE_BIND_POINT_GRAPHICS requires any dynamic state, that state must have been set on the current command buffer (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawMeshTasksIndirectNV-None-02155)"},
+ {"VUID-vkCmdDrawMeshTasksIndirectNV-None-02159", "Every input attachment used by the current subpass must be bound to the pipeline via a descriptor set (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawMeshTasksIndirectNV-None-02159)"},
+ {"VUID-vkCmdDrawMeshTasksIndirectNV-None-02160", "If any VkSampler object that is accessed from a shader by the VkPipeline currently bound to VK_PIPELINE_BIND_POINT_GRAPHICS uses unnormalized coordinates, it must not be used to sample from any VkImage with a VkImageView of the type VK_IMAGE_VIEW_TYPE_3D, VK_IMAGE_VIEW_TYPE_CUBE, VK_IMAGE_VIEW_TYPE_1D_ARRAY, VK_IMAGE_VIEW_TYPE_2D_ARRAY or VK_IMAGE_VIEW_TYPE_CUBE_ARRAY, in any shader stage (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawMeshTasksIndirectNV-None-02160)"},
+ {"VUID-vkCmdDrawMeshTasksIndirectNV-None-02161", "If any VkSampler object that is accessed from a shader by the VkPipeline currently bound to VK_PIPELINE_BIND_POINT_GRAPHICS uses unnormalized coordinates, it must not be used with any of the SPIR-V OpImageSample* or OpImageSparseSample* instructions with ImplicitLod, Dref or Proj in their name, in any shader stage (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawMeshTasksIndirectNV-None-02161)"},
+ {"VUID-vkCmdDrawMeshTasksIndirectNV-None-02162", "If any VkSampler object that is accessed from a shader by the VkPipeline currently bound to VK_PIPELINE_BIND_POINT_GRAPHICS uses unnormalized coordinates, it must not be used with any of the SPIR-V OpImageSample* or OpImageSparseSample* instructions that includes a LOD bias or any offset values, in any shader stage (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawMeshTasksIndirectNV-None-02162)"},
+ {"VUID-vkCmdDrawMeshTasksIndirectNV-None-02163", "If the robust buffer access feature is not enabled, and any shader stage in the VkPipeline object currently bound to VK_PIPELINE_BIND_POINT_GRAPHICS accesses a uniform buffer, it must not access values outside of the range of that buffer specified in the currently bound descriptor set (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawMeshTasksIndirectNV-None-02163)"},
+ {"VUID-vkCmdDrawMeshTasksIndirectNV-None-02164", "If the robust buffer access feature is not enabled, and any shader stage in the VkPipeline object currently bound to VK_PIPELINE_BIND_POINT_GRAPHICS accesses a storage buffer, it must not access values outside of the range of that buffer specified in the currently bound descriptor set (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawMeshTasksIndirectNV-None-02164)"},
+ {"VUID-vkCmdDrawMeshTasksIndirectNV-None-02166", "Image subresources used as attachments in the current render pass must not be accessed in any way other than as an attachment by this command. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawMeshTasksIndirectNV-None-02166)"},
+ {"VUID-vkCmdDrawMeshTasksIndirectNV-None-02168", "Any VkImageView being sampled with VK_FILTER_CUBIC_IMG as a result of this command must not have a VkImageViewType of VK_IMAGE_VIEW_TYPE_3D, VK_IMAGE_VIEW_TYPE_CUBE, or VK_IMAGE_VIEW_TYPE_CUBE_ARRAY (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawMeshTasksIndirectNV-None-02168)"},
+ {"VUID-vkCmdDrawMeshTasksIndirectNV-buffer-02143", "If buffer is non-sparse then it must be bound completely and contiguously to a single VkDeviceMemory object (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawMeshTasksIndirectNV-buffer-02143)"},
+ {"VUID-vkCmdDrawMeshTasksIndirectNV-buffer-02144", "buffer must have been created with the VK_BUFFER_USAGE_INDIRECT_BUFFER_BIT bit set (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawMeshTasksIndirectNV-buffer-02144)"},
{"VUID-vkCmdDrawMeshTasksIndirectNV-buffer-parameter", "buffer must be a valid VkBuffer handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawMeshTasksIndirectNV-buffer-parameter)"},
- {"VUID-vkCmdDrawMeshTasksIndirectNV-commandBuffer-02701", "If the VkPipeline object bound to the pipeline bind point used by this command requires any dynamic state, that state must have been set for commandBuffer (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawMeshTasksIndirectNV-commandBuffer-02701)"},
- {"VUID-vkCmdDrawMeshTasksIndirectNV-commandBuffer-02707", "If commandBuffer is an unprotected command buffer, any resource accessed by the VkPipeline object bound to the pipeline bind point used by this command must not be a protected resource (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawMeshTasksIndirectNV-commandBuffer-02707)"},
- {"VUID-vkCmdDrawMeshTasksIndirectNV-commandBuffer-02711", "commandBuffer must not be a protected command buffer (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawMeshTasksIndirectNV-commandBuffer-02711)"},
+ {"VUID-vkCmdDrawMeshTasksIndirectNV-commandBuffer-02170", "If commandBuffer is an unprotected command buffer, and any pipeline stage in the VkPipeline object currently bound to VK_PIPELINE_BIND_POINT_GRAPHICS reads from or writes to any image or buffer, that image or buffer must not be a protected image or protected buffer. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawMeshTasksIndirectNV-commandBuffer-02170)"},
+ {"VUID-vkCmdDrawMeshTasksIndirectNV-commandBuffer-02171", "If commandBuffer is a protected command buffer, and any pipeline stage in the VkPipeline object currently bound to VK_PIPELINE_BIND_POINT_GRAPHICS writes to any image or buffer, that image or buffer must not be an unprotected image or unprotected buffer. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawMeshTasksIndirectNV-commandBuffer-02171)"},
+ {"VUID-vkCmdDrawMeshTasksIndirectNV-commandBuffer-02172", "If commandBuffer is a protected command buffer, and any pipeline stage other than the framebuffer-space pipeline stages in the VkPipeline object currently bound to VK_PIPELINE_BIND_POINT_GRAPHICS reads from or writes to any image or buffer, the image or buffer must not be a protected image or protected buffer. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawMeshTasksIndirectNV-commandBuffer-02172)"},
{"VUID-vkCmdDrawMeshTasksIndirectNV-commandBuffer-cmdpool", "The VkCommandPool that commandBuffer was allocated from must support graphics operations (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawMeshTasksIndirectNV-commandBuffer-cmdpool)"},
{"VUID-vkCmdDrawMeshTasksIndirectNV-commandBuffer-parameter", "commandBuffer must be a valid VkCommandBuffer handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawMeshTasksIndirectNV-commandBuffer-parameter)"},
{"VUID-vkCmdDrawMeshTasksIndirectNV-commandBuffer-recording", "commandBuffer must be in the recording state (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawMeshTasksIndirectNV-commandBuffer-recording)"},
{"VUID-vkCmdDrawMeshTasksIndirectNV-commonparent", "Both of buffer, and commandBuffer must have been created, allocated, or retrieved from the same VkDevice (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawMeshTasksIndirectNV-commonparent)"},
{"VUID-vkCmdDrawMeshTasksIndirectNV-drawCount-02146", "If drawCount is greater than 1, stride must be a multiple of 4 and must be greater than or equal to sizeof(VkDrawMeshTasksIndirectCommandNV) (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawMeshTasksIndirectNV-drawCount-02146)"},
+ {"VUID-vkCmdDrawMeshTasksIndirectNV-drawCount-02147", "If the multi-draw indirect feature is not enabled, drawCount must be 0 or 1 (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawMeshTasksIndirectNV-drawCount-02147)"},
{"VUID-vkCmdDrawMeshTasksIndirectNV-drawCount-02156", "If drawCount is equal to 1, (offset + sizeof(VkDrawMeshTasksIndirectCommandNV)) must be less than or equal to the size of buffer (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawMeshTasksIndirectNV-drawCount-02156)"},
{"VUID-vkCmdDrawMeshTasksIndirectNV-drawCount-02157", "If drawCount is greater than 1, (stride {times} (drawCount - 1) + offset + sizeof(VkDrawMeshTasksIndirectCommandNV)) must be less than or equal to the size of buffer (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawMeshTasksIndirectNV-drawCount-02157)"},
- {"VUID-vkCmdDrawMeshTasksIndirectNV-drawCount-02718", "If the multi-draw indirect feature is not enabled, drawCount must be 0 or 1 (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawMeshTasksIndirectNV-drawCount-02718)"},
- {"VUID-vkCmdDrawMeshTasksIndirectNV-drawCount-02719", "drawCount must be less than or equal to VkPhysicalDeviceLimits::maxDrawIndirectCount (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawMeshTasksIndirectNV-drawCount-02719)"},
- {"VUID-vkCmdDrawMeshTasksIndirectNV-filterCubic-02694", "Any VkImageView being sampled with VK_FILTER_CUBIC_EXT as a result of this command must have a VkImageViewType and format that supports cubic filtering, as specified by VkFilterCubicImageViewImageFormatPropertiesEXT::filterCubic returned by vkGetPhysicalDeviceImageFormatProperties2 (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawMeshTasksIndirectNV-filterCubic-02694)"},
- {"VUID-vkCmdDrawMeshTasksIndirectNV-filterCubicMinmax-02695", "Any VkImageView being sampled with VK_FILTER_CUBIC_EXT with a reduction mode of either VK_SAMPLER_REDUCTION_MODE_MIN_EXT or VK_SAMPLER_REDUCTION_MODE_MAX_EXT as a result of this command must have a VkImageViewType and format that supports cubic filtering together with minmax filtering, as specified by VkFilterCubicImageViewImageFormatPropertiesEXT::filterCubicMinmax returned by vkGetPhysicalDeviceImageFormatProperties2 (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawMeshTasksIndirectNV-filterCubicMinmax-02695)"},
- {"VUID-vkCmdDrawMeshTasksIndirectNV-flags-02696", "Any VkImage created with a VkImageCreateInfo::flags containing VK_IMAGE_CREATE_CORNER_SAMPLED_BIT_NV sampled as a result of this command must only be sampled using a VkSamplerAddressMode of VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_EDGE. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawMeshTasksIndirectNV-flags-02696)"},
- {"VUID-vkCmdDrawMeshTasksIndirectNV-maxMultiviewInstanceIndex-02688", "If the draw is recorded in a render pass instance with multiview enabled, the maximum instance index must be less than or equal to VkPhysicalDeviceMultiviewProperties::maxMultiviewInstanceIndex. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawMeshTasksIndirectNV-maxMultiviewInstanceIndex-02688)"},
- {"VUID-vkCmdDrawMeshTasksIndirectNV-offset-02710", "offset must be a multiple of 4 (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawMeshTasksIndirectNV-offset-02710)"},
- {"VUID-vkCmdDrawMeshTasksIndirectNV-renderPass-02684", "The current render pass must be compatible with the renderPass member of the VkGraphicsPipelineCreateInfo structure specified when creating the VkPipeline bound to VK_PIPELINE_BIND_POINT_GRAPHICS. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawMeshTasksIndirectNV-renderPass-02684)"},
+ {"VUID-vkCmdDrawMeshTasksIndirectNV-drawCount-02158", "drawCount must be less than or equal to VkPhysicalDeviceLimits::maxDrawIndirectCount (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawMeshTasksIndirectNV-drawCount-02158)"},
+ {"VUID-vkCmdDrawMeshTasksIndirectNV-flags-02174", "Any VkImage created with a VkImageCreateInfo::flags containing VK_IMAGE_CREATE_CORNER_SAMPLED_BIT_NV sampled as a result of this command must only be sampled using a VkSamplerAddressMode of VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_EDGE. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawMeshTasksIndirectNV-flags-02174)"},
+ {"VUID-vkCmdDrawMeshTasksIndirectNV-linearTilingFeatures-02165", "Any VkImageView being sampled with VK_FILTER_LINEAR as a result of this command must be of a format which supports linear filtering, as specified by the VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_LINEAR_BIT flag in VkFormatProperties::linearTilingFeatures (for a linear image) or VkFormatProperties::optimalTilingFeatures(for an optimally tiled image) returned by vkGetPhysicalDeviceFormatProperties (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawMeshTasksIndirectNV-linearTilingFeatures-02165)"},
+ {"VUID-vkCmdDrawMeshTasksIndirectNV-linearTilingFeatures-02167", "Any VkImageView being sampled with VK_FILTER_CUBIC_IMG as a result of this command must be of a format which supports cubic filtering, as specified by the VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_CUBIC_BIT_IMG flag in VkFormatProperties::linearTilingFeatures (for a linear image) or VkFormatProperties::optimalTilingFeatures(for an optimally tiled image) returned by vkGetPhysicalDeviceFormatProperties (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawMeshTasksIndirectNV-linearTilingFeatures-02167)"},
+ {"VUID-vkCmdDrawMeshTasksIndirectNV-maxMultiviewInstanceIndex-02169", "If the draw is recorded in a render pass instance with multiview enabled, the maximum instance index must be less than or equal to VkPhysicalDeviceMultiviewProperties::maxMultiviewInstanceIndex. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawMeshTasksIndirectNV-maxMultiviewInstanceIndex-02169)"},
+ {"VUID-vkCmdDrawMeshTasksIndirectNV-offset-02145", "offset must be a multiple of 4 (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawMeshTasksIndirectNV-offset-02145)"},
+ {"VUID-vkCmdDrawMeshTasksIndirectNV-renderPass-02148", "The current render pass must be compatible with the renderPass member of the VkGraphicsPipelineCreateInfo structure specified when creating the VkPipeline currently bound to VK_PIPELINE_BIND_POINT_GRAPHICS. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawMeshTasksIndirectNV-renderPass-02148)"},
{"VUID-vkCmdDrawMeshTasksIndirectNV-renderpass", "This command must only be called inside of a render pass instance (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawMeshTasksIndirectNV-renderpass)"},
- {"VUID-vkCmdDrawMeshTasksIndirectNV-sampleLocationsEnable-02689", "If the bound graphics pipeline was created with VkPipelineSampleLocationsStateCreateInfoEXT::sampleLocationsEnable set to VK_TRUE and the current subpass has a depth/stencil attachment, then that attachment must have been created with the VK_IMAGE_CREATE_SAMPLE_LOCATIONS_COMPATIBLE_DEPTH_BIT_EXT bit set (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawMeshTasksIndirectNV-sampleLocationsEnable-02689)"},
- {"VUID-vkCmdDrawMeshTasksIndirectNV-subpass-02685", "The subpass index of the current render pass must be equal to the subpass member of the VkGraphicsPipelineCreateInfo structure specified when creating the VkPipeline bound to VK_PIPELINE_BIND_POINT_GRAPHICS. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawMeshTasksIndirectNV-subpass-02685)"},
- {"VUID-vkCmdDrawMeshTasksNV-None-02686", "Every input attachment used by the current subpass must be bound to the pipeline via a descriptor set (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawMeshTasksNV-None-02686)"},
- {"VUID-vkCmdDrawMeshTasksNV-None-02687", "Image subresources used as attachments in the current render pass must not be accessed in any way other than as an attachment by this command. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawMeshTasksNV-None-02687)"},
- {"VUID-vkCmdDrawMeshTasksNV-None-02690", "If a VkImageView is sampled with VK_FILTER_LINEAR as a result of this command, then the image view's format features must contain VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_LINEAR_BIT (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawMeshTasksNV-None-02690)"},
- {"VUID-vkCmdDrawMeshTasksNV-None-02691", "If a VkImageView is accessed using atomic operations as a result of this command, then the image view's format features must contain VK_FORMAT_FEATURE_STORAGE_IMAGE_ATOMIC_BIT (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawMeshTasksNV-None-02691)"},
- {"VUID-vkCmdDrawMeshTasksNV-None-02692", "If a VkImageView is sampled with VK_FILTER_CUBIC_EXT as a result of this command, then the image view's format features must contain VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_CUBIC_BIT_EXT (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawMeshTasksNV-None-02692)"},
- {"VUID-vkCmdDrawMeshTasksNV-None-02693", "Any VkImageView being sampled with VK_FILTER_CUBIC_EXT as a result of this command must not have a VkImageViewType of VK_IMAGE_VIEW_TYPE_3D, VK_IMAGE_VIEW_TYPE_CUBE, or VK_IMAGE_VIEW_TYPE_CUBE_ARRAY (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawMeshTasksNV-None-02693)"},
- {"VUID-vkCmdDrawMeshTasksNV-None-02697", "For each set n that is statically used by the VkPipeline bound to the pipeline bind point used by this command, a descriptor set must have been bound to n at the same pipeline bind point, with a VkPipelineLayout that is compatible for set n, with the VkPipelineLayout used to create the current VkPipeline, as described in Pipeline Layout Compatibility (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawMeshTasksNV-None-02697)"},
- {"VUID-vkCmdDrawMeshTasksNV-None-02698", "For each push constant that is statically used by the VkPipeline bound to the pipeline bind point used by this command, a push constant value must have been set for the same pipeline bind point, with a VkPipelineLayout that is compatible for push constants, with the VkPipelineLayout used to create the current VkPipeline, as described in Pipeline Layout Compatibility (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawMeshTasksNV-None-02698)"},
- {"VUID-vkCmdDrawMeshTasksNV-None-02699", "Descriptors in each bound descriptor set, specified via vkCmdBindDescriptorSets, must be valid if they are statically used by the VkPipeline bound to the pipeline bind point used by this command (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawMeshTasksNV-None-02699)"},
- {"VUID-vkCmdDrawMeshTasksNV-None-02700", "A valid pipeline must be bound to the pipeline bind point used by this command (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawMeshTasksNV-None-02700)"},
- {"VUID-vkCmdDrawMeshTasksNV-None-02702", "If the VkPipeline object bound to the pipeline bind point used by this command accesses a VkSampler object that uses unnormalized coordinates, that sampler must not be used to sample from any VkImage with a VkImageView of the type VK_IMAGE_VIEW_TYPE_3D, VK_IMAGE_VIEW_TYPE_CUBE, VK_IMAGE_VIEW_TYPE_1D_ARRAY, VK_IMAGE_VIEW_TYPE_2D_ARRAY or VK_IMAGE_VIEW_TYPE_CUBE_ARRAY, in any shader stage (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawMeshTasksNV-None-02702)"},
- {"VUID-vkCmdDrawMeshTasksNV-None-02703", "If the VkPipeline object bound to the pipeline bind point used by this command accesses a VkSampler object that uses unnormalized coordinates, that sampler must not be used with any of the SPIR-V OpImageSample* or OpImageSparseSample* instructions with ImplicitLod, Dref or Proj in their name, in any shader stage (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawMeshTasksNV-None-02703)"},
- {"VUID-vkCmdDrawMeshTasksNV-None-02704", "If the VkPipeline object bound to the pipeline bind point used by this command accesses a VkSampler object that uses unnormalized coordinates, that sampler must not be used with any of the SPIR-V OpImageSample* or OpImageSparseSample* instructions that includes a LOD bias or any offset values, in any shader stage (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawMeshTasksNV-None-02704)"},
- {"VUID-vkCmdDrawMeshTasksNV-None-02705", "If the robust buffer access feature is not enabled, and if the VkPipeline object bound to the pipeline bind point used by this command accesses a uniform buffer, it must not access values outside of the range of the buffer as specified in the descriptor set bound to the same pipeline bind point (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawMeshTasksNV-None-02705)"},
- {"VUID-vkCmdDrawMeshTasksNV-None-02706", "If the robust buffer access feature is not enabled, and if the VkPipeline object bound to the pipeline bind point used by this command accesses a storage buffer, it must not access values outside of the range of the buffer as specified in the descriptor set bound to the same pipeline bind point (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawMeshTasksNV-None-02706)"},
- {"VUID-vkCmdDrawMeshTasksNV-commandBuffer-02701", "If the VkPipeline object bound to the pipeline bind point used by this command requires any dynamic state, that state must have been set for commandBuffer (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawMeshTasksNV-commandBuffer-02701)"},
- {"VUID-vkCmdDrawMeshTasksNV-commandBuffer-02707", "If commandBuffer is an unprotected command buffer, any resource accessed by the VkPipeline object bound to the pipeline bind point used by this command must not be a protected resource (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawMeshTasksNV-commandBuffer-02707)"},
+ {"VUID-vkCmdDrawMeshTasksIndirectNV-sampleLocationsEnable-02173", "If the currently bound graphics pipeline was created with VkPipelineSampleLocationsStateCreateInfoEXT::sampleLocationsEnable set to VK_TRUE and the current subpass has a depth/stencil attachment, then that attachment must have been created with the VK_IMAGE_CREATE_SAMPLE_LOCATIONS_COMPATIBLE_DEPTH_BIT_EXT bit set (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawMeshTasksIndirectNV-sampleLocationsEnable-02173)"},
+ {"VUID-vkCmdDrawMeshTasksIndirectNV-subpass-02149", "The subpass index of the current render pass must be equal to the subpass member of the VkGraphicsPipelineCreateInfo structure specified when creating the VkPipeline currently bound to VK_PIPELINE_BIND_POINT_GRAPHICS. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawMeshTasksIndirectNV-subpass-02149)"},
+ {"VUID-vkCmdDrawMeshTasksNV-None-02122", "For each set n that is statically used by the VkPipeline currently bound to VK_PIPELINE_BIND_POINT_GRAPHICS, a descriptor set must have been bound to n at VK_PIPELINE_BIND_POINT_GRAPHICS, with a VkPipelineLayout that is compatible for set n, with the VkPipelineLayout used to create the current VkPipeline, as described in Pipeline Layout Compatibility (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawMeshTasksNV-None-02122)"},
+ {"VUID-vkCmdDrawMeshTasksNV-None-02123", "For each push constant that is statically used by the VkPipeline currently bound to VK_PIPELINE_BIND_POINT_GRAPHICS, a push constant value must have been set for VK_PIPELINE_BIND_POINT_GRAPHICS, with a VkPipelineLayout that is compatible for push constants, with the VkPipelineLayout used to create the current VkPipeline, as described in Pipeline Layout Compatibility (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawMeshTasksNV-None-02123)"},
+ {"VUID-vkCmdDrawMeshTasksNV-None-02124", "Descriptors in each bound descriptor set, specified via vkCmdBindDescriptorSets, must be valid if they are statically used by the currently bound VkPipeline object, specified via vkCmdBindPipeline (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawMeshTasksNV-None-02124)"},
+ {"VUID-vkCmdDrawMeshTasksNV-None-02125", "A valid graphics pipeline must be bound to the current command buffer with VK_PIPELINE_BIND_POINT_GRAPHICS (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawMeshTasksNV-None-02125)"},
+ {"VUID-vkCmdDrawMeshTasksNV-None-02126", "If the VkPipeline object currently bound to VK_PIPELINE_BIND_POINT_GRAPHICS requires any dynamic state, that state must have been set on the current command buffer (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawMeshTasksNV-None-02126)"},
+ {"VUID-vkCmdDrawMeshTasksNV-None-02127", "Every input attachment used by the current subpass must be bound to the pipeline via a descriptor set (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawMeshTasksNV-None-02127)"},
+ {"VUID-vkCmdDrawMeshTasksNV-None-02128", "If any VkSampler object that is accessed from a shader by the VkPipeline currently bound to VK_PIPELINE_BIND_POINT_GRAPHICS uses unnormalized coordinates, it must not be used to sample from any VkImage with a VkImageView of the type VK_IMAGE_VIEW_TYPE_3D, VK_IMAGE_VIEW_TYPE_CUBE, VK_IMAGE_VIEW_TYPE_1D_ARRAY, VK_IMAGE_VIEW_TYPE_2D_ARRAY or VK_IMAGE_VIEW_TYPE_CUBE_ARRAY, in any shader stage (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawMeshTasksNV-None-02128)"},
+ {"VUID-vkCmdDrawMeshTasksNV-None-02129", "If any VkSampler object that is accessed from a shader by the VkPipeline currently bound to VK_PIPELINE_BIND_POINT_GRAPHICS uses unnormalized coordinates, it must not be used with any of the SPIR-V OpImageSample* or OpImageSparseSample* instructions with ImplicitLod, Dref or Proj in their name, in any shader stage (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawMeshTasksNV-None-02129)"},
+ {"VUID-vkCmdDrawMeshTasksNV-None-02130", "If any VkSampler object that is accessed from a shader by the VkPipeline currently bound to VK_PIPELINE_BIND_POINT_GRAPHICS uses unnormalized coordinates, it must not be used with any of the SPIR-V OpImageSample* or OpImageSparseSample* instructions that includes a LOD bias or any offset values, in any shader stage (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawMeshTasksNV-None-02130)"},
+ {"VUID-vkCmdDrawMeshTasksNV-None-02131", "If the robust buffer access feature is not enabled, and any shader stage in the VkPipeline object currently bound to VK_PIPELINE_BIND_POINT_GRAPHICS accesses a uniform buffer, it must not access values outside of the range of that buffer specified in the currently bound descriptor set (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawMeshTasksNV-None-02131)"},
+ {"VUID-vkCmdDrawMeshTasksNV-None-02132", "If the robust buffer access feature is not enabled, and any shader stage in the VkPipeline object currently bound to VK_PIPELINE_BIND_POINT_GRAPHICS accesses a storage buffer, it must not access values outside of the range of that buffer specified in the currently bound descriptor set (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawMeshTasksNV-None-02132)"},
+ {"VUID-vkCmdDrawMeshTasksNV-None-02134", "Image subresources used as attachments in the current render pass must not be accessed in any way other than as an attachment by this command. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawMeshTasksNV-None-02134)"},
+ {"VUID-vkCmdDrawMeshTasksNV-None-02136", "Any VkImageView being sampled with VK_FILTER_CUBIC_IMG as a result of this command must not have a VkImageViewType of VK_IMAGE_VIEW_TYPE_3D, VK_IMAGE_VIEW_TYPE_CUBE, or VK_IMAGE_VIEW_TYPE_CUBE_ARRAY (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawMeshTasksNV-None-02136)"},
+ {"VUID-vkCmdDrawMeshTasksNV-commandBuffer-02138", "If commandBuffer is an unprotected command buffer, and any pipeline stage in the VkPipeline object currently bound to VK_PIPELINE_BIND_POINT_GRAPHICS reads from or writes to any image or buffer, that image or buffer must not be a protected image or protected buffer. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawMeshTasksNV-commandBuffer-02138)"},
+ {"VUID-vkCmdDrawMeshTasksNV-commandBuffer-02139", "If commandBuffer is a protected command buffer, and any pipeline stage in the VkPipeline object currently bound to VK_PIPELINE_BIND_POINT_GRAPHICS writes to any image or buffer, that image or buffer must not be an unprotected image or unprotected buffer. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawMeshTasksNV-commandBuffer-02139)"},
+ {"VUID-vkCmdDrawMeshTasksNV-commandBuffer-02140", "If commandBuffer is a protected command buffer, and any pipeline stage other than the framebuffer-space pipeline stages in the VkPipeline object currently bound to VK_PIPELINE_BIND_POINT_GRAPHICS reads from or writes to any image or buffer, the image or buffer must not be a protected image or protected buffer. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawMeshTasksNV-commandBuffer-02140)"},
{"VUID-vkCmdDrawMeshTasksNV-commandBuffer-cmdpool", "The VkCommandPool that commandBuffer was allocated from must support graphics operations (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawMeshTasksNV-commandBuffer-cmdpool)"},
{"VUID-vkCmdDrawMeshTasksNV-commandBuffer-parameter", "commandBuffer must be a valid VkCommandBuffer handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawMeshTasksNV-commandBuffer-parameter)"},
{"VUID-vkCmdDrawMeshTasksNV-commandBuffer-recording", "commandBuffer must be in the recording state (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawMeshTasksNV-commandBuffer-recording)"},
- {"VUID-vkCmdDrawMeshTasksNV-filterCubic-02694", "Any VkImageView being sampled with VK_FILTER_CUBIC_EXT as a result of this command must have a VkImageViewType and format that supports cubic filtering, as specified by VkFilterCubicImageViewImageFormatPropertiesEXT::filterCubic returned by vkGetPhysicalDeviceImageFormatProperties2 (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawMeshTasksNV-filterCubic-02694)"},
- {"VUID-vkCmdDrawMeshTasksNV-filterCubicMinmax-02695", "Any VkImageView being sampled with VK_FILTER_CUBIC_EXT with a reduction mode of either VK_SAMPLER_REDUCTION_MODE_MIN_EXT or VK_SAMPLER_REDUCTION_MODE_MAX_EXT as a result of this command must have a VkImageViewType and format that supports cubic filtering together with minmax filtering, as specified by VkFilterCubicImageViewImageFormatPropertiesEXT::filterCubicMinmax returned by vkGetPhysicalDeviceImageFormatProperties2 (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawMeshTasksNV-filterCubicMinmax-02695)"},
- {"VUID-vkCmdDrawMeshTasksNV-flags-02696", "Any VkImage created with a VkImageCreateInfo::flags containing VK_IMAGE_CREATE_CORNER_SAMPLED_BIT_NV sampled as a result of this command must only be sampled using a VkSamplerAddressMode of VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_EDGE. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawMeshTasksNV-flags-02696)"},
- {"VUID-vkCmdDrawMeshTasksNV-maxMultiviewInstanceIndex-02688", "If the draw is recorded in a render pass instance with multiview enabled, the maximum instance index must be less than or equal to VkPhysicalDeviceMultiviewProperties::maxMultiviewInstanceIndex. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawMeshTasksNV-maxMultiviewInstanceIndex-02688)"},
- {"VUID-vkCmdDrawMeshTasksNV-renderPass-02684", "The current render pass must be compatible with the renderPass member of the VkGraphicsPipelineCreateInfo structure specified when creating the VkPipeline bound to VK_PIPELINE_BIND_POINT_GRAPHICS. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawMeshTasksNV-renderPass-02684)"},
+ {"VUID-vkCmdDrawMeshTasksNV-flags-02142", "Any VkImage created with a VkImageCreateInfo::flags containing VK_IMAGE_CREATE_CORNER_SAMPLED_BIT_NV sampled as a result of this command must only be sampled using a VkSamplerAddressMode of VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_EDGE. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawMeshTasksNV-flags-02142)"},
+ {"VUID-vkCmdDrawMeshTasksNV-linearTilingFeatures-02133", "Any VkImageView being sampled with VK_FILTER_LINEAR as a result of this command must be of a format which supports linear filtering, as specified by the VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_LINEAR_BIT flag in VkFormatProperties::linearTilingFeatures (for a linear image) or VkFormatProperties::optimalTilingFeatures(for an optimally tiled image) returned by vkGetPhysicalDeviceFormatProperties (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawMeshTasksNV-linearTilingFeatures-02133)"},
+ {"VUID-vkCmdDrawMeshTasksNV-linearTilingFeatures-02135", "Any VkImageView being sampled with VK_FILTER_CUBIC_IMG as a result of this command must be of a format which supports cubic filtering, as specified by the VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_CUBIC_BIT_IMG flag in VkFormatProperties::linearTilingFeatures (for a linear image) or VkFormatProperties::optimalTilingFeatures(for an optimally tiled image) returned by vkGetPhysicalDeviceFormatProperties (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawMeshTasksNV-linearTilingFeatures-02135)"},
+ {"VUID-vkCmdDrawMeshTasksNV-maxMultiviewInstanceIndex-02137", "If the draw is recorded in a render pass instance with multiview enabled, the maximum instance index must be less than or equal to VkPhysicalDeviceMultiviewProperties::maxMultiviewInstanceIndex. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawMeshTasksNV-maxMultiviewInstanceIndex-02137)"},
+ {"VUID-vkCmdDrawMeshTasksNV-renderPass-02120", "The current render pass must be compatible with the renderPass member of the VkGraphicsPipelineCreateInfo structure specified when creating the VkPipeline currently bound to VK_PIPELINE_BIND_POINT_GRAPHICS. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawMeshTasksNV-renderPass-02120)"},
{"VUID-vkCmdDrawMeshTasksNV-renderpass", "This command must only be called inside of a render pass instance (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawMeshTasksNV-renderpass)"},
- {"VUID-vkCmdDrawMeshTasksNV-sampleLocationsEnable-02689", "If the bound graphics pipeline was created with VkPipelineSampleLocationsStateCreateInfoEXT::sampleLocationsEnable set to VK_TRUE and the current subpass has a depth/stencil attachment, then that attachment must have been created with the VK_IMAGE_CREATE_SAMPLE_LOCATIONS_COMPATIBLE_DEPTH_BIT_EXT bit set (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawMeshTasksNV-sampleLocationsEnable-02689)"},
- {"VUID-vkCmdDrawMeshTasksNV-subpass-02685", "The subpass index of the current render pass must be equal to the subpass member of the VkGraphicsPipelineCreateInfo structure specified when creating the VkPipeline bound to VK_PIPELINE_BIND_POINT_GRAPHICS. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawMeshTasksNV-subpass-02685)"},
+ {"VUID-vkCmdDrawMeshTasksNV-sampleLocationsEnable-02141", "If the currently bound graphics pipeline was created with VkPipelineSampleLocationsStateCreateInfoEXT::sampleLocationsEnable set to VK_TRUE and the current subpass has a depth/stencil attachment, then that attachment must have been created with the VK_IMAGE_CREATE_SAMPLE_LOCATIONS_COMPATIBLE_DEPTH_BIT_EXT bit set (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawMeshTasksNV-sampleLocationsEnable-02141)"},
+ {"VUID-vkCmdDrawMeshTasksNV-subpass-02121", "The subpass index of the current render pass must be equal to the subpass member of the VkGraphicsPipelineCreateInfo structure specified when creating the VkPipeline currently bound to VK_PIPELINE_BIND_POINT_GRAPHICS. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawMeshTasksNV-subpass-02121)"},
{"VUID-vkCmdDrawMeshTasksNV-taskCount-02119", "taskCount must be less than or equal to VkPhysicalDeviceMeshShaderPropertiesNV::maxDrawMeshTasksCount (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawMeshTasksNV-taskCount-02119)"},
{"VUID-vkCmdEndConditionalRenderingEXT-None-01985", "Conditional rendering must be active (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdEndConditionalRenderingEXT-None-01985)"},
{"VUID-vkCmdEndConditionalRenderingEXT-None-01986", "If conditional rendering was made active outside of a render pass instance, it must not be ended inside a render pass instance (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdEndConditionalRenderingEXT-None-01986)"},
@@ -3760,11 +3594,10 @@ static const vuid_spec_text_pair vuid_spec_text[] = {
{"VUID-vkCmdEndQueryIndexedEXT-commandBuffer-recording", "commandBuffer must be in the recording state (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdEndQueryIndexedEXT-commandBuffer-recording)"},
{"VUID-vkCmdEndQueryIndexedEXT-commonparent", "Both of commandBuffer, and queryPool must have been created, allocated, or retrieved from the same VkDevice (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdEndQueryIndexedEXT-commonparent)"},
{"VUID-vkCmdEndQueryIndexedEXT-query-02343", "query must be less than the number of queries in queryPool (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdEndQueryIndexedEXT-query-02343)"},
- {"VUID-vkCmdEndQueryIndexedEXT-query-02345", "If vkCmdEndQueryIndexedEXT is called within a render pass instance, the sum of query and the number of bits set in the current subpass's view mask must be less than or equal to the number of queries in queryPool (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdEndQueryIndexedEXT-query-02345)"},
+ {"VUID-vkCmdEndQueryIndexedEXT-query-02345", "If vkCmdEndQuery is called within a render pass instance, the sum of query and the number of bits set in the current subpass's view mask must be less than or equal to the number of queries in queryPool (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdEndQueryIndexedEXT-query-02345)"},
{"VUID-vkCmdEndQueryIndexedEXT-queryPool-parameter", "queryPool must be a valid VkQueryPool handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdEndQueryIndexedEXT-queryPool-parameter)"},
{"VUID-vkCmdEndQueryIndexedEXT-queryType-02346", "If the queryType used to create queryPool was VK_QUERY_TYPE_TRANSFORM_FEEDBACK_STREAM_EXT the index parameter must be less than VkPhysicalDeviceTransformFeedbackPropertiesEXT::maxTransformFeedbackStreams (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdEndQueryIndexedEXT-queryType-02346)"},
{"VUID-vkCmdEndQueryIndexedEXT-queryType-02347", "If the queryType used to create queryPool was not VK_QUERY_TYPE_TRANSFORM_FEEDBACK_STREAM_EXT the index must be zero (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdEndQueryIndexedEXT-queryType-02347)"},
- {"VUID-vkCmdEndQueryIndexedEXT-queryType-02723", "If the queryType used to create queryPool was VK_QUERY_TYPE_TRANSFORM_FEEDBACK_STREAM_EXT index must equal the index used to begin the query (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdEndQueryIndexedEXT-queryType-02723)"},
{"VUID-vkCmdEndRenderPass-None-00910", "The current subpass index must be equal to the number of subpasses in the render pass minus one (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdEndRenderPass-None-00910)"},
{"VUID-vkCmdEndRenderPass-None-02351", "This command must not be recorded when transform feedback is active (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdEndRenderPass-None-02351)"},
{"VUID-vkCmdEndRenderPass-bufferlevel", "commandBuffer must be a primary VkCommandBuffer (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdEndRenderPass-bufferlevel)"},
@@ -4067,29 +3900,11 @@ static const vuid_spec_text_pair vuid_spec_text[] = {
{"VUID-vkCmdSetExclusiveScissorNV-offset-02039", "Evaluation of (offset.y + extent.height) for each member of pExclusiveScissors must not cause a signed integer addition overflow (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdSetExclusiveScissorNV-offset-02039)"},
{"VUID-vkCmdSetExclusiveScissorNV-pExclusiveScissors-parameter", "pExclusiveScissors must be a valid pointer to an array of exclusiveScissorCount VkRect2D structures (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdSetExclusiveScissorNV-pExclusiveScissors-parameter)"},
{"VUID-vkCmdSetExclusiveScissorNV-x-02037", "The x and y members of offset in each member of pExclusiveScissors must be greater than or equal to 0 (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdSetExclusiveScissorNV-x-02037)"},
- {"VUID-vkCmdSetLineStippleEXT-None-02775", "The bound graphics pipeline must have been created with the VK_DYNAMIC_STATE_LINE_STIPPLE_EXT dynamic state enabled (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdSetLineStippleEXT-None-02775)"},
- {"VUID-vkCmdSetLineStippleEXT-commandBuffer-cmdpool", "The VkCommandPool that commandBuffer was allocated from must support graphics operations (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdSetLineStippleEXT-commandBuffer-cmdpool)"},
- {"VUID-vkCmdSetLineStippleEXT-commandBuffer-parameter", "commandBuffer must be a valid VkCommandBuffer handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdSetLineStippleEXT-commandBuffer-parameter)"},
- {"VUID-vkCmdSetLineStippleEXT-commandBuffer-recording", "commandBuffer must be in the recording state (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdSetLineStippleEXT-commandBuffer-recording)"},
- {"VUID-vkCmdSetLineStippleEXT-lineStippleFactor-02776", "lineStippleFactor must be in the range [1,256] (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdSetLineStippleEXT-lineStippleFactor-02776)"},
{"VUID-vkCmdSetLineWidth-None-00787", "The bound graphics pipeline must have been created with the VK_DYNAMIC_STATE_LINE_WIDTH dynamic state enabled (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdSetLineWidth-None-00787)"},
{"VUID-vkCmdSetLineWidth-commandBuffer-cmdpool", "The VkCommandPool that commandBuffer was allocated from must support graphics operations (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdSetLineWidth-commandBuffer-cmdpool)"},
{"VUID-vkCmdSetLineWidth-commandBuffer-parameter", "commandBuffer must be a valid VkCommandBuffer handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdSetLineWidth-commandBuffer-parameter)"},
{"VUID-vkCmdSetLineWidth-commandBuffer-recording", "commandBuffer must be in the recording state (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdSetLineWidth-commandBuffer-recording)"},
{"VUID-vkCmdSetLineWidth-lineWidth-00788", "If the wide lines feature is not enabled, lineWidth must be 1.0 (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdSetLineWidth-lineWidth-00788)"},
- {"VUID-vkCmdSetPerformanceMarkerINTEL-commandBuffer-cmdpool", "The VkCommandPool that commandBuffer was allocated from must support graphics, compute, or transfer operations (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdSetPerformanceMarkerINTEL-commandBuffer-cmdpool)"},
- {"VUID-vkCmdSetPerformanceMarkerINTEL-commandBuffer-parameter", "commandBuffer must be a valid VkCommandBuffer handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdSetPerformanceMarkerINTEL-commandBuffer-parameter)"},
- {"VUID-vkCmdSetPerformanceMarkerINTEL-commandBuffer-recording", "commandBuffer must be in the recording state (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdSetPerformanceMarkerINTEL-commandBuffer-recording)"},
- {"VUID-vkCmdSetPerformanceMarkerINTEL-pMarkerInfo-parameter", "pMarkerInfo must be a valid pointer to a valid VkPerformanceMarkerInfoINTEL structure (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdSetPerformanceMarkerINTEL-pMarkerInfo-parameter)"},
- {"VUID-vkCmdSetPerformanceOverrideINTEL-commandBuffer-cmdpool", "The VkCommandPool that commandBuffer was allocated from must support graphics, compute, or transfer operations (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdSetPerformanceOverrideINTEL-commandBuffer-cmdpool)"},
- {"VUID-vkCmdSetPerformanceOverrideINTEL-commandBuffer-parameter", "commandBuffer must be a valid VkCommandBuffer handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdSetPerformanceOverrideINTEL-commandBuffer-parameter)"},
- {"VUID-vkCmdSetPerformanceOverrideINTEL-commandBuffer-recording", "commandBuffer must be in the recording state (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdSetPerformanceOverrideINTEL-commandBuffer-recording)"},
- {"VUID-vkCmdSetPerformanceOverrideINTEL-pOverrideInfo-02736", "pOverrideInfo must not be used with a VkPerformanceOverrideTypeINTEL that is not reported available by vkGetPerformanceParameterINTEL. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdSetPerformanceOverrideINTEL-pOverrideInfo-02736)"},
- {"VUID-vkCmdSetPerformanceOverrideINTEL-pOverrideInfo-parameter", "pOverrideInfo must be a valid pointer to a valid VkPerformanceOverrideInfoINTEL structure (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdSetPerformanceOverrideINTEL-pOverrideInfo-parameter)"},
- {"VUID-vkCmdSetPerformanceStreamMarkerINTEL-commandBuffer-cmdpool", "The VkCommandPool that commandBuffer was allocated from must support graphics, compute, or transfer operations (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdSetPerformanceStreamMarkerINTEL-commandBuffer-cmdpool)"},
- {"VUID-vkCmdSetPerformanceStreamMarkerINTEL-commandBuffer-parameter", "commandBuffer must be a valid VkCommandBuffer handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdSetPerformanceStreamMarkerINTEL-commandBuffer-parameter)"},
- {"VUID-vkCmdSetPerformanceStreamMarkerINTEL-commandBuffer-recording", "commandBuffer must be in the recording state (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdSetPerformanceStreamMarkerINTEL-commandBuffer-recording)"},
- {"VUID-vkCmdSetPerformanceStreamMarkerINTEL-pMarkerInfo-parameter", "pMarkerInfo must be a valid pointer to a valid VkPerformanceStreamMarkerInfoINTEL structure (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdSetPerformanceStreamMarkerINTEL-pMarkerInfo-parameter)"},
{"VUID-vkCmdSetSampleLocationsEXT-None-01528", "The bound graphics pipeline must have been created with the VK_DYNAMIC_STATE_SAMPLE_LOCATIONS_EXT dynamic state enabled (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdSetSampleLocationsEXT-None-01528)"},
{"VUID-vkCmdSetSampleLocationsEXT-commandBuffer-cmdpool", "The VkCommandPool that commandBuffer was allocated from must support graphics operations (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdSetSampleLocationsEXT-commandBuffer-cmdpool)"},
{"VUID-vkCmdSetSampleLocationsEXT-commandBuffer-parameter", "commandBuffer must be a valid VkCommandBuffer handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdSetSampleLocationsEXT-commandBuffer-parameter)"},
@@ -4157,36 +3972,32 @@ static const vuid_spec_text_pair vuid_spec_text[] = {
{"VUID-vkCmdSetViewportWScalingNV-firstViewport-01324", "The sum of firstViewport and viewportCount must be between 1 and VkPhysicalDeviceLimits::maxViewports, inclusive (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdSetViewportWScalingNV-firstViewport-01324)"},
{"VUID-vkCmdSetViewportWScalingNV-pViewportWScalings-parameter", "pViewportWScalings must be a valid pointer to an array of viewportCount VkViewportWScalingNV structures (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdSetViewportWScalingNV-pViewportWScalings-parameter)"},
{"VUID-vkCmdSetViewportWScalingNV-viewportCount-arraylength", "viewportCount must be greater than 0 (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdSetViewportWScalingNV-viewportCount-arraylength)"},
- {"VUID-vkCmdTraceRaysNV-None-02690", "If a VkImageView is sampled with VK_FILTER_LINEAR as a result of this command, then the image view's format features must contain VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_LINEAR_BIT (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdTraceRaysNV-None-02690)"},
- {"VUID-vkCmdTraceRaysNV-None-02691", "If a VkImageView is accessed using atomic operations as a result of this command, then the image view's format features must contain VK_FORMAT_FEATURE_STORAGE_IMAGE_ATOMIC_BIT (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdTraceRaysNV-None-02691)"},
- {"VUID-vkCmdTraceRaysNV-None-02692", "If a VkImageView is sampled with VK_FILTER_CUBIC_EXT as a result of this command, then the image view's format features must contain VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_CUBIC_BIT_EXT (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdTraceRaysNV-None-02692)"},
- {"VUID-vkCmdTraceRaysNV-None-02693", "Any VkImageView being sampled with VK_FILTER_CUBIC_EXT as a result of this command must not have a VkImageViewType of VK_IMAGE_VIEW_TYPE_3D, VK_IMAGE_VIEW_TYPE_CUBE, or VK_IMAGE_VIEW_TYPE_CUBE_ARRAY (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdTraceRaysNV-None-02693)"},
- {"VUID-vkCmdTraceRaysNV-None-02697", "For each set n that is statically used by the VkPipeline bound to the pipeline bind point used by this command, a descriptor set must have been bound to n at the same pipeline bind point, with a VkPipelineLayout that is compatible for set n, with the VkPipelineLayout used to create the current VkPipeline, as described in Pipeline Layout Compatibility (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdTraceRaysNV-None-02697)"},
- {"VUID-vkCmdTraceRaysNV-None-02698", "For each push constant that is statically used by the VkPipeline bound to the pipeline bind point used by this command, a push constant value must have been set for the same pipeline bind point, with a VkPipelineLayout that is compatible for push constants, with the VkPipelineLayout used to create the current VkPipeline, as described in Pipeline Layout Compatibility (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdTraceRaysNV-None-02698)"},
- {"VUID-vkCmdTraceRaysNV-None-02699", "Descriptors in each bound descriptor set, specified via vkCmdBindDescriptorSets, must be valid if they are statically used by the VkPipeline bound to the pipeline bind point used by this command (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdTraceRaysNV-None-02699)"},
- {"VUID-vkCmdTraceRaysNV-None-02700", "A valid pipeline must be bound to the pipeline bind point used by this command (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdTraceRaysNV-None-02700)"},
- {"VUID-vkCmdTraceRaysNV-None-02702", "If the VkPipeline object bound to the pipeline bind point used by this command accesses a VkSampler object that uses unnormalized coordinates, that sampler must not be used to sample from any VkImage with a VkImageView of the type VK_IMAGE_VIEW_TYPE_3D, VK_IMAGE_VIEW_TYPE_CUBE, VK_IMAGE_VIEW_TYPE_1D_ARRAY, VK_IMAGE_VIEW_TYPE_2D_ARRAY or VK_IMAGE_VIEW_TYPE_CUBE_ARRAY, in any shader stage (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdTraceRaysNV-None-02702)"},
- {"VUID-vkCmdTraceRaysNV-None-02703", "If the VkPipeline object bound to the pipeline bind point used by this command accesses a VkSampler object that uses unnormalized coordinates, that sampler must not be used with any of the SPIR-V OpImageSample* or OpImageSparseSample* instructions with ImplicitLod, Dref or Proj in their name, in any shader stage (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdTraceRaysNV-None-02703)"},
- {"VUID-vkCmdTraceRaysNV-None-02704", "If the VkPipeline object bound to the pipeline bind point used by this command accesses a VkSampler object that uses unnormalized coordinates, that sampler must not be used with any of the SPIR-V OpImageSample* or OpImageSparseSample* instructions that includes a LOD bias or any offset values, in any shader stage (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdTraceRaysNV-None-02704)"},
- {"VUID-vkCmdTraceRaysNV-None-02705", "If the robust buffer access feature is not enabled, and if the VkPipeline object bound to the pipeline bind point used by this command accesses a uniform buffer, it must not access values outside of the range of the buffer as specified in the descriptor set bound to the same pipeline bind point (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdTraceRaysNV-None-02705)"},
- {"VUID-vkCmdTraceRaysNV-None-02706", "If the robust buffer access feature is not enabled, and if the VkPipeline object bound to the pipeline bind point used by this command accesses a storage buffer, it must not access values outside of the range of the buffer as specified in the descriptor set bound to the same pipeline bind point (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdTraceRaysNV-None-02706)"},
+ {"VUID-vkCmdTraceRaysNV-None-02472", "For each set n that is statically used by the VkPipeline bound to VK_PIPELINE_BIND_POINT_RAY_TRACING_NV, a descriptor set must have been bound to n at VK_PIPELINE_BIND_POINT_RAY_TRACING_NV, with a VkPipelineLayout that is compatible for set n, with the VkPipelineLayout used to create the current VkPipeline, as described in Pipeline Layout Compatibility (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdTraceRaysNV-None-02472)"},
+ {"VUID-vkCmdTraceRaysNV-None-02473", "Descriptors in each bound descriptor set, specified via vkCmdBindDescriptorSets, must be valid if they are statically used by the bound VkPipeline object, specified via vkCmdBindPipeline (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdTraceRaysNV-None-02473)"},
+ {"VUID-vkCmdTraceRaysNV-None-02474", "A valid ray tracing pipeline must be bound to the current command buffer with VK_PIPELINE_BIND_POINT_RAY_TRACING_NV (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdTraceRaysNV-None-02474)"},
+ {"VUID-vkCmdTraceRaysNV-None-02475", "For each push constant that is statically used by the VkPipeline bound to VK_PIPELINE_BIND_POINT_RAY_TRACING_NV, a push constant value must have been set for VK_PIPELINE_BIND_POINT_RAY_TRACING_NV, with a VkPipelineLayout that is compatible for push constants with the one used to create the current VkPipeline, as described in Pipeline Layout Compatibility (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdTraceRaysNV-None-02475)"},
+ {"VUID-vkCmdTraceRaysNV-None-02476", "If any VkSampler object that is accessed from a shader by the VkPipeline bound to VK_PIPELINE_BIND_POINT_RAY_TRACING_NV uses unnormalized coordinates, it must not be used to sample from any VkImage with a VkImageView of the type VK_IMAGE_VIEW_TYPE_3D, VK_IMAGE_VIEW_TYPE_CUBE, VK_IMAGE_VIEW_TYPE_1D_ARRAY, VK_IMAGE_VIEW_TYPE_2D_ARRAY or VK_IMAGE_VIEW_TYPE_CUBE_ARRAY, in any shader stage (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdTraceRaysNV-None-02476)"},
+ {"VUID-vkCmdTraceRaysNV-None-02477", "If any VkSampler object that is accessed from a shader by the VkPipeline bound to VK_PIPELINE_BIND_POINT_RAY_TRACING_NV uses unnormalized coordinates, it must not be used with any of the SPIR-V OpImageSample* or OpImageSparseSample* instructions with ImplicitLod, Dref or Proj in their name, in any shader stage (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdTraceRaysNV-None-02477)"},
+ {"VUID-vkCmdTraceRaysNV-None-02478", "If any VkSampler object that is accessed from a shader by the VkPipeline bound to VK_PIPELINE_BIND_POINT_RAY_TRACING_NV uses unnormalized coordinates, it must not be used with any of the SPIR-V OpImageSample* or OpImageSparseSample* instructions that includes a LOD bias or any offset values, in any shader stage (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdTraceRaysNV-None-02478)"},
+ {"VUID-vkCmdTraceRaysNV-None-02479", "If the robust buffer access feature is not enabled, and any shader stage in the VkPipeline object bound to VK_PIPELINE_BIND_POINT_RAY_TRACING_NV accesses a uniform buffer, it must not access values outside of the range of that buffer specified in the bound descriptor set (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdTraceRaysNV-None-02479)"},
+ {"VUID-vkCmdTraceRaysNV-None-02480", "If the robust buffer access feature is not enabled, and any shader stage in the VkPipeline object bound to VK_PIPELINE_BIND_POINT_RAY_TRACING_NV accesses a storage buffer, it must not access values outside of the range of that buffer specified in the bound descriptor set (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdTraceRaysNV-None-02480)"},
+ {"VUID-vkCmdTraceRaysNV-None-02481", "If a VkImageView is sampled with VK_FILTER_LINEAR as a result of this command, then the image view's format features must contain VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_LINEAR_BIT (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdTraceRaysNV-None-02481)"},
+ {"VUID-vkCmdTraceRaysNV-None-02482", "If a VkImageView is sampled with VK_FILTER_CUBIC_IMG as a result of this command, then the image view's format features must contain VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_CUBIC_BIT_IMG (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdTraceRaysNV-None-02482)"},
+ {"VUID-vkCmdTraceRaysNV-None-02483", "Any VkImageView being sampled with VK_FILTER_CUBIC_IMG as a result of this command must not have a VkImageViewType of VK_IMAGE_VIEW_TYPE_3D, VK_IMAGE_VIEW_TYPE_CUBE, or VK_IMAGE_VIEW_TYPE_CUBE_ARRAY (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdTraceRaysNV-None-02483)"},
{"VUID-vkCmdTraceRaysNV-callableShaderBindingOffset-02461", "callableShaderBindingOffset must be less than the size of callableShaderBindingTableBuffer (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdTraceRaysNV-callableShaderBindingOffset-02461)"},
{"VUID-vkCmdTraceRaysNV-callableShaderBindingOffset-02462", "callableShaderBindingOffset must be a multiple of VkPhysicalDeviceRayTracingPropertiesNV::shaderGroupBaseAlignment (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdTraceRaysNV-callableShaderBindingOffset-02462)"},
{"VUID-vkCmdTraceRaysNV-callableShaderBindingStride-02465", "callableShaderBindingStride must be a multiple of VkPhysicalDeviceRayTracingPropertiesNV::shaderGroupHandleSize (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdTraceRaysNV-callableShaderBindingStride-02465)"},
{"VUID-vkCmdTraceRaysNV-callableShaderBindingStride-02468", "callableShaderBindingStride must be a less than or equal to VkPhysicalDeviceRayTracingPropertiesNV::maxShaderGroupStride (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdTraceRaysNV-callableShaderBindingStride-02468)"},
{"VUID-vkCmdTraceRaysNV-callableShaderBindingTableBuffer-parameter", "If callableShaderBindingTableBuffer is not VK_NULL_HANDLE, callableShaderBindingTableBuffer must be a valid VkBuffer handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdTraceRaysNV-callableShaderBindingTableBuffer-parameter)"},
- {"VUID-vkCmdTraceRaysNV-commandBuffer-02701", "If the VkPipeline object bound to the pipeline bind point used by this command requires any dynamic state, that state must have been set for commandBuffer (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdTraceRaysNV-commandBuffer-02701)"},
- {"VUID-vkCmdTraceRaysNV-commandBuffer-02707", "If commandBuffer is an unprotected command buffer, any resource accessed by the VkPipeline object bound to the pipeline bind point used by this command must not be a protected resource (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdTraceRaysNV-commandBuffer-02707)"},
- {"VUID-vkCmdTraceRaysNV-commandBuffer-02712", "If commandBuffer is a protected command buffer, any resource written to by the VkPipeline object bound to the pipeline bind point used by this command must not be an unprotected resource (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdTraceRaysNV-commandBuffer-02712)"},
- {"VUID-vkCmdTraceRaysNV-commandBuffer-02713", "If commandBuffer is a protected command buffer, pipeline stages other than the framebuffer-space and compute stages in the VkPipeline object bound to the pipeline bind point must not write to any resource (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdTraceRaysNV-commandBuffer-02713)"},
+ {"VUID-vkCmdTraceRaysNV-commandBuffer-02484", "If commandBuffer is an unprotected command buffer, and any pipeline stage in the VkPipeline object bound to VK_PIPELINE_BIND_POINT_RAY_TRACING_NV reads from or writes to any image or buffer, that image or buffer must not be a protected image or protected buffer (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdTraceRaysNV-commandBuffer-02484)"},
+ {"VUID-vkCmdTraceRaysNV-commandBuffer-02485", "If commandBuffer is a protected command buffer, and any pipeline stage in the VkPipeline object bound to VK_PIPELINE_BIND_POINT_RAY_TRACING_NV writes to any image or buffer, that image or buffer must not be an unprotected image or unprotected buffer (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdTraceRaysNV-commandBuffer-02485)"},
+ {"VUID-vkCmdTraceRaysNV-commandBuffer-02486", "If commandBuffer is a protected command buffer, and any pipeline stage other than the ray tracing pipeline stage in the VkPipeline object bound to VK_PIPELINE_BIND_POINT_RAY_TRACING_NV reads from any image or buffer, the image or buffer must not be a protected image or protected buffer (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdTraceRaysNV-commandBuffer-02486)"},
{"VUID-vkCmdTraceRaysNV-commandBuffer-cmdpool", "The VkCommandPool that commandBuffer was allocated from must support compute operations (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdTraceRaysNV-commandBuffer-cmdpool)"},
{"VUID-vkCmdTraceRaysNV-commandBuffer-parameter", "commandBuffer must be a valid VkCommandBuffer handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdTraceRaysNV-commandBuffer-parameter)"},
{"VUID-vkCmdTraceRaysNV-commandBuffer-recording", "commandBuffer must be in the recording state (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdTraceRaysNV-commandBuffer-recording)"},
{"VUID-vkCmdTraceRaysNV-commonparent", "Each of callableShaderBindingTableBuffer, commandBuffer, hitShaderBindingTableBuffer, missShaderBindingTableBuffer, and raygenShaderBindingTableBuffer that are valid handles must have been created, allocated, or retrieved from the same VkDevice (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdTraceRaysNV-commonparent)"},
{"VUID-vkCmdTraceRaysNV-depth-02471", "depth must be less than or equal to VkPhysicalDeviceLimits::maxComputeWorkGroupCount[2] (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdTraceRaysNV-depth-02471)"},
- {"VUID-vkCmdTraceRaysNV-filterCubic-02694", "Any VkImageView being sampled with VK_FILTER_CUBIC_EXT as a result of this command must have a VkImageViewType and format that supports cubic filtering, as specified by VkFilterCubicImageViewImageFormatPropertiesEXT::filterCubic returned by vkGetPhysicalDeviceImageFormatProperties2 (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdTraceRaysNV-filterCubic-02694)"},
- {"VUID-vkCmdTraceRaysNV-filterCubicMinmax-02695", "Any VkImageView being sampled with VK_FILTER_CUBIC_EXT with a reduction mode of either VK_SAMPLER_REDUCTION_MODE_MIN_EXT or VK_SAMPLER_REDUCTION_MODE_MAX_EXT as a result of this command must have a VkImageViewType and format that supports cubic filtering together with minmax filtering, as specified by VkFilterCubicImageViewImageFormatPropertiesEXT::filterCubicMinmax returned by vkGetPhysicalDeviceImageFormatProperties2 (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdTraceRaysNV-filterCubicMinmax-02695)"},
- {"VUID-vkCmdTraceRaysNV-flags-02696", "Any VkImage created with a VkImageCreateInfo::flags containing VK_IMAGE_CREATE_CORNER_SAMPLED_BIT_NV sampled as a result of this command must only be sampled using a VkSamplerAddressMode of VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_EDGE. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdTraceRaysNV-flags-02696)"},
+ {"VUID-vkCmdTraceRaysNV-flags-02487", "Any VkImage created with a VkImageCreateInfo::flags containing VK_IMAGE_CREATE_CORNER_SAMPLED_BIT_NV sampled as a result of this command must only be sampled using a VkSamplerAddressMode of VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_EDGE (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdTraceRaysNV-flags-02487)"},
{"VUID-vkCmdTraceRaysNV-height-02470", "height must be less than or equal to VkPhysicalDeviceLimits::maxComputeWorkGroupCount[1] (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdTraceRaysNV-height-02470)"},
{"VUID-vkCmdTraceRaysNV-hitShaderBindingOffset-02459", "hitShaderBindingOffset must be less than the size of hitShaderBindingTableBuffer (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdTraceRaysNV-hitShaderBindingOffset-02459)"},
{"VUID-vkCmdTraceRaysNV-hitShaderBindingOffset-02460", "hitShaderBindingOffset must be a multiple of VkPhysicalDeviceRayTracingPropertiesNV::shaderGroupBaseAlignment (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdTraceRaysNV-hitShaderBindingOffset-02460)"},
@@ -4201,7 +4012,6 @@ static const vuid_spec_text_pair vuid_spec_text[] = {
{"VUID-vkCmdTraceRaysNV-raygenShaderBindingOffset-02455", "raygenShaderBindingOffset must be less than the size of raygenShaderBindingTableBuffer (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdTraceRaysNV-raygenShaderBindingOffset-02455)"},
{"VUID-vkCmdTraceRaysNV-raygenShaderBindingOffset-02456", "raygenShaderBindingOffset must be a multiple of VkPhysicalDeviceRayTracingPropertiesNV::shaderGroupBaseAlignment (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdTraceRaysNV-raygenShaderBindingOffset-02456)"},
{"VUID-vkCmdTraceRaysNV-raygenShaderBindingTableBuffer-parameter", "raygenShaderBindingTableBuffer must be a valid VkBuffer handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdTraceRaysNV-raygenShaderBindingTableBuffer-parameter)"},
- {"VUID-vkCmdTraceRaysNV-renderpass", "This command must only be called outside of a render pass instance (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdTraceRaysNV-renderpass)"},
{"VUID-vkCmdTraceRaysNV-width-02469", "width must be less than or equal to VkPhysicalDeviceLimits::maxComputeWorkGroupCount[0] (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdTraceRaysNV-width-02469)"},
{"VUID-vkCmdUpdateBuffer-commandBuffer-01813", "If commandBuffer is an unprotected command buffer, then dstBuffer must not be a protected buffer (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdUpdateBuffer-commandBuffer-01813)"},
{"VUID-vkCmdUpdateBuffer-commandBuffer-01814", "If commandBuffer is a protected command buffer, then dstBuffer must not be an unprotected buffer (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdUpdateBuffer-commandBuffer-01814)"},
@@ -4259,7 +4069,6 @@ static const vuid_spec_text_pair vuid_spec_text[] = {
{"VUID-vkCmdWriteAccelerationStructuresPropertiesNV-queryPool-parameter", "queryPool must be a valid VkQueryPool handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdWriteAccelerationStructuresPropertiesNV-queryPool-parameter)"},
{"VUID-vkCmdWriteAccelerationStructuresPropertiesNV-queryType-02242", "queryType must be VK_QUERY_TYPE_ACCELERATION_STRUCTURE_COMPACTED_SIZE_NV (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdWriteAccelerationStructuresPropertiesNV-queryType-02242)"},
{"VUID-vkCmdWriteAccelerationStructuresPropertiesNV-queryType-parameter", "queryType must be a valid VkQueryType value (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdWriteAccelerationStructuresPropertiesNV-queryType-parameter)"},
- {"VUID-vkCmdWriteAccelerationStructuresPropertiesNV-renderpass", "This command must only be called outside of a render pass instance (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdWriteAccelerationStructuresPropertiesNV-renderpass)"},
{"VUID-vkCmdWriteBufferMarkerAMD-commandBuffer-cmdpool", "The VkCommandPool that commandBuffer was allocated from must support transfer, graphics, or compute operations (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdWriteBufferMarkerAMD-commandBuffer-cmdpool)"},
{"VUID-vkCmdWriteBufferMarkerAMD-commandBuffer-parameter", "commandBuffer must be a valid VkCommandBuffer handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdWriteBufferMarkerAMD-commandBuffer-parameter)"},
{"VUID-vkCmdWriteBufferMarkerAMD-commandBuffer-recording", "commandBuffer must be in the recording state (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdWriteBufferMarkerAMD-commandBuffer-recording)"},
@@ -4361,7 +4170,6 @@ static const vuid_spec_text_pair vuid_spec_text[] = {
{"VUID-vkCreateFence-pFence-parameter", "pFence must be a valid pointer to a VkFence handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCreateFence-pFence-parameter)"},
{"VUID-vkCreateFramebuffer-device-parameter", "device must be a valid VkDevice handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCreateFramebuffer-device-parameter)"},
{"VUID-vkCreateFramebuffer-pAllocator-parameter", "If pAllocator is not NULL, pAllocator must be a valid pointer to a valid VkAllocationCallbacks structure (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCreateFramebuffer-pAllocator-parameter)"},
- {"VUID-vkCreateFramebuffer-pCreateInfo-02777", "If pCreateInfo->flags does not include VK_FRAMEBUFFER_CREATE_IMAGELESS_BIT_KHR, and attachmentCount is not 0, each element of pCreateInfo->pAttachments must have been created on device (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCreateFramebuffer-pCreateInfo-02777)"},
{"VUID-vkCreateFramebuffer-pCreateInfo-parameter", "pCreateInfo must be a valid pointer to a valid VkFramebufferCreateInfo structure (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCreateFramebuffer-pCreateInfo-parameter)"},
{"VUID-vkCreateFramebuffer-pFramebuffer-parameter", "pFramebuffer must be a valid pointer to a VkFramebuffer handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCreateFramebuffer-pFramebuffer-parameter)"},
{"VUID-vkCreateGraphicsPipelines-createInfoCount-arraylength", "createInfoCount must be greater than 0 (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCreateGraphicsPipelines-createInfoCount-arraylength)"},
@@ -4373,10 +4181,6 @@ static const vuid_spec_text_pair vuid_spec_text[] = {
{"VUID-vkCreateGraphicsPipelines-pPipelines-parameter", "pPipelines must be a valid pointer to an array of createInfoCount VkPipeline handles (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCreateGraphicsPipelines-pPipelines-parameter)"},
{"VUID-vkCreateGraphicsPipelines-pipelineCache-parameter", "If pipelineCache is not VK_NULL_HANDLE, pipelineCache must be a valid VkPipelineCache handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCreateGraphicsPipelines-pipelineCache-parameter)"},
{"VUID-vkCreateGraphicsPipelines-pipelineCache-parent", "If pipelineCache is a valid handle, it must have been created, allocated, or retrieved from device (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCreateGraphicsPipelines-pipelineCache-parent)"},
- {"VUID-vkCreateHeadlessSurfaceEXT-instance-parameter", "instance must be a valid VkInstance handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCreateHeadlessSurfaceEXT-instance-parameter)"},
- {"VUID-vkCreateHeadlessSurfaceEXT-pAllocator-parameter", "If pAllocator is not NULL, pAllocator must be a valid pointer to a valid VkAllocationCallbacks structure (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCreateHeadlessSurfaceEXT-pAllocator-parameter)"},
- {"VUID-vkCreateHeadlessSurfaceEXT-pCreateInfo-parameter", "pCreateInfo must be a valid pointer to a valid VkHeadlessSurfaceCreateInfoEXT structure (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCreateHeadlessSurfaceEXT-pCreateInfo-parameter)"},
- {"VUID-vkCreateHeadlessSurfaceEXT-pSurface-parameter", "pSurface must be a valid pointer to a VkSurfaceKHR handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCreateHeadlessSurfaceEXT-pSurface-parameter)"},
{"VUID-vkCreateIOSSurfaceMVK-instance-parameter", "instance must be a valid VkInstance handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCreateIOSSurfaceMVK-instance-parameter)"},
{"VUID-vkCreateIOSSurfaceMVK-pAllocator-parameter", "If pAllocator is not NULL, pAllocator must be a valid pointer to a valid VkAllocationCallbacks structure (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCreateIOSSurfaceMVK-pAllocator-parameter)"},
{"VUID-vkCreateIOSSurfaceMVK-pCreateInfo-parameter", "pCreateInfo must be a valid pointer to a valid VkIOSSurfaceCreateInfoMVK structure (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCreateIOSSurfaceMVK-pCreateInfo-parameter)"},
@@ -4465,10 +4269,6 @@ static const vuid_spec_text_pair vuid_spec_text[] = {
{"VUID-vkCreateSharedSwapchainsKHR-pCreateInfos-parameter", "pCreateInfos must be a valid pointer to an array of swapchainCount valid VkSwapchainCreateInfoKHR structures (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCreateSharedSwapchainsKHR-pCreateInfos-parameter)"},
{"VUID-vkCreateSharedSwapchainsKHR-pSwapchains-parameter", "pSwapchains must be a valid pointer to an array of swapchainCount VkSwapchainKHR handles (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCreateSharedSwapchainsKHR-pSwapchains-parameter)"},
{"VUID-vkCreateSharedSwapchainsKHR-swapchainCount-arraylength", "swapchainCount must be greater than 0 (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCreateSharedSwapchainsKHR-swapchainCount-arraylength)"},
- {"VUID-vkCreateStreamDescriptorSurfaceGGP-instance-parameter", "instance must be a valid VkInstance handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCreateStreamDescriptorSurfaceGGP-instance-parameter)"},
- {"VUID-vkCreateStreamDescriptorSurfaceGGP-pAllocator-parameter", "If pAllocator is not NULL, pAllocator must be a valid pointer to a valid VkAllocationCallbacks structure (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCreateStreamDescriptorSurfaceGGP-pAllocator-parameter)"},
- {"VUID-vkCreateStreamDescriptorSurfaceGGP-pCreateInfo-parameter", "pCreateInfo must be a valid pointer to a valid VkStreamDescriptorSurfaceCreateInfoGGP structure (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCreateStreamDescriptorSurfaceGGP-pCreateInfo-parameter)"},
- {"VUID-vkCreateStreamDescriptorSurfaceGGP-pSurface-parameter", "pSurface must be a valid pointer to a VkSurfaceKHR handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCreateStreamDescriptorSurfaceGGP-pSurface-parameter)"},
{"VUID-vkCreateSwapchainKHR-device-parameter", "device must be a valid VkDevice handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCreateSwapchainKHR-device-parameter)"},
{"VUID-vkCreateSwapchainKHR-pAllocator-parameter", "If pAllocator is not NULL, pAllocator must be a valid pointer to a valid VkAllocationCallbacks structure (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCreateSwapchainKHR-pAllocator-parameter)"},
{"VUID-vkCreateSwapchainKHR-pCreateInfo-parameter", "pCreateInfo must be a valid pointer to a valid VkSwapchainCreateInfoKHR structure (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCreateSwapchainKHR-pCreateInfo-parameter)"},
@@ -4759,7 +4559,6 @@ static const vuid_spec_text_pair vuid_spec_text[] = {
{"VUID-vkFreeMemory-memory-parameter", "If memory is not VK_NULL_HANDLE, memory must be a valid VkDeviceMemory handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkFreeMemory-memory-parameter)"},
{"VUID-vkFreeMemory-memory-parent", "If memory is a valid handle, it must have been created, allocated, or retrieved from device (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkFreeMemory-memory-parent)"},
{"VUID-vkFreeMemory-pAllocator-parameter", "If pAllocator is not NULL, pAllocator must be a valid pointer to a valid VkAllocationCallbacks structure (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkFreeMemory-pAllocator-parameter)"},
- {"VUID-vkGetAccelerationStructureHandleNV-accelerationStructure-02787", "accelerationStructure must be bound completely and contiguously to a single VkDeviceMemory object via vkBindAccelerationStructureMemoryNV (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkGetAccelerationStructureHandleNV-accelerationStructure-02787)"},
{"VUID-vkGetAccelerationStructureHandleNV-accelerationStructure-parameter", "accelerationStructure must be a valid VkAccelerationStructureNV handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkGetAccelerationStructureHandleNV-accelerationStructure-parameter)"},
{"VUID-vkGetAccelerationStructureHandleNV-accelerationStructure-parent", "accelerationStructure must have been created, allocated, or retrieved from device (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkGetAccelerationStructureHandleNV-accelerationStructure-parent)"},
{"VUID-vkGetAccelerationStructureHandleNV-dataSize-02240", "dataSize must be large enough to contain the result of the query, as described above (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkGetAccelerationStructureHandleNV-dataSize-02240)"},
@@ -4800,9 +4599,6 @@ static const vuid_spec_text_pair vuid_spec_text[] = {
{"VUID-vkGetDeviceGroupPeerMemoryFeatures-remoteDeviceIndex-00693", "remoteDeviceIndex must be a valid device index (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkGetDeviceGroupPeerMemoryFeatures-remoteDeviceIndex-00693)"},
{"VUID-vkGetDeviceGroupPresentCapabilitiesKHR-device-parameter", "device must be a valid VkDevice handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkGetDeviceGroupPresentCapabilitiesKHR-device-parameter)"},
{"VUID-vkGetDeviceGroupPresentCapabilitiesKHR-pDeviceGroupPresentCapabilities-parameter", "pDeviceGroupPresentCapabilities must be a valid pointer to a VkDeviceGroupPresentCapabilitiesKHR structure (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkGetDeviceGroupPresentCapabilitiesKHR-pDeviceGroupPresentCapabilities-parameter)"},
- {"VUID-vkGetDeviceGroupSurfacePresentModes2EXT-device-parameter", "device must be a valid VkDevice handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkGetDeviceGroupSurfacePresentModes2EXT-device-parameter)"},
- {"VUID-vkGetDeviceGroupSurfacePresentModes2EXT-pModes-parameter", "pModes must be a valid pointer to a VkDeviceGroupPresentModeFlagsKHR value (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkGetDeviceGroupSurfacePresentModes2EXT-pModes-parameter)"},
- {"VUID-vkGetDeviceGroupSurfacePresentModes2EXT-pSurfaceInfo-parameter", "pSurfaceInfo must be a valid pointer to a valid VkPhysicalDeviceSurfaceInfo2KHR structure (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkGetDeviceGroupSurfacePresentModes2EXT-pSurfaceInfo-parameter)"},
{"VUID-vkGetDeviceGroupSurfacePresentModesKHR-commonparent", "Both of device, and surface must have been created, allocated, or retrieved from the same VkInstance (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkGetDeviceGroupSurfacePresentModesKHR-commonparent)"},
{"VUID-vkGetDeviceGroupSurfacePresentModesKHR-device-parameter", "device must be a valid VkDevice handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkGetDeviceGroupSurfacePresentModesKHR-device-parameter)"},
{"VUID-vkGetDeviceGroupSurfacePresentModesKHR-pModes-parameter", "pModes must be a valid pointer to a VkDeviceGroupPresentModeFlagsKHR value (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkGetDeviceGroupSurfacePresentModesKHR-pModes-parameter)"},
@@ -4844,7 +4640,7 @@ static const vuid_spec_text_pair vuid_spec_text[] = {
{"VUID-vkGetEventStatus-event-parameter", "event must be a valid VkEvent handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkGetEventStatus-event-parameter)"},
{"VUID-vkGetEventStatus-event-parent", "event must have been created, allocated, or retrieved from device (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkGetEventStatus-event-parent)"},
{"VUID-vkGetFenceFdKHR-device-parameter", "device must be a valid VkDevice handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkGetFenceFdKHR-device-parameter)"},
- {"VUID-vkGetFenceFdKHR-pFd-parameter", "pFd must be a valid pointer to an int value (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkGetFenceFdKHR-pFd-parameter)"},
+ {"VUID-vkGetFenceFdKHR-pFd-parameter", "pFd must be a valid pointer to a int value (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkGetFenceFdKHR-pFd-parameter)"},
{"VUID-vkGetFenceFdKHR-pGetFdInfo-parameter", "pGetFdInfo must be a valid pointer to a valid VkFenceGetFdInfoKHR structure (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkGetFenceFdKHR-pGetFdInfo-parameter)"},
{"VUID-vkGetFenceStatus-device-parameter", "device must be a valid VkDevice handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkGetFenceStatus-device-parameter)"},
{"VUID-vkGetFenceStatus-fence-parameter", "fence must be a valid VkFence handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkGetFenceStatus-fence-parameter)"},
@@ -4887,16 +4683,16 @@ static const vuid_spec_text_pair vuid_spec_text[] = {
{"VUID-vkGetImageSubresourceLayout-mipLevel-01716", "The mipLevel member of pSubresource must be less than the mipLevels specified in VkImageCreateInfo when image was created (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkGetImageSubresourceLayout-mipLevel-01716)"},
{"VUID-vkGetImageSubresourceLayout-pLayout-parameter", "pLayout must be a valid pointer to a VkSubresourceLayout structure (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkGetImageSubresourceLayout-pLayout-parameter)"},
{"VUID-vkGetImageSubresourceLayout-pSubresource-parameter", "pSubresource must be a valid pointer to a valid VkImageSubresource structure (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkGetImageSubresourceLayout-pSubresource-parameter)"},
- {"VUID-vkGetImageSubresourceLayout-tiling-02271", "If the tiling of the image is VK_IMAGE_TILING_DRM_FORMAT_MODIFIER_EXT, then the aspectMask member of pSubresource must be VK_IMAGE_ASPECT_MEMORY_PLANE_i_BIT_EXT and the index i must be less than the drmFormatModifierPlaneCount associated with the image's format and drmFormatModifier. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkGetImageSubresourceLayout-tiling-02271)"},
+ {"VUID-vkGetImageSubresourceLayout-tiling-02271", "If the tiling of the image is VK_IMAGE_TILING_DRM_FORMAT_MODIFIER_EXT, then the aspectMask member of pSubresource must be VK_IMAGE_ASPECT_MEMORY_PLANE_i_BIT_EXT and the index i must be less than the drmFormatModifierPlaneCount associated with the image's format and drmFormatModifier. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkGetImageSubresourceLayout-tiling-02271)"},
{"VUID-vkGetImageViewHandleNVX-device-parameter", "device must be a valid VkDevice handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkGetImageViewHandleNVX-device-parameter)"},
{"VUID-vkGetImageViewHandleNVX-pInfo-parameter", "pInfo must be a valid pointer to a valid VkImageViewHandleInfoNVX structure (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkGetImageViewHandleNVX-pInfo-parameter)"},
{"VUID-vkGetInstanceProcAddr-instance-parameter", "If instance is not NULL, instance must be a valid VkInstance handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkGetInstanceProcAddr-instance-parameter)"},
{"VUID-vkGetInstanceProcAddr-pName-parameter", "pName must be a null-terminated UTF-8 string (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkGetInstanceProcAddr-pName-parameter)"},
{"VUID-vkGetMemoryAndroidHardwareBufferANDROID-device-parameter", "device must be a valid VkDevice handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkGetMemoryAndroidHardwareBufferANDROID-device-parameter)"},
- {"VUID-vkGetMemoryAndroidHardwareBufferANDROID-pBuffer-parameter", "pBuffer must be a valid pointer to a valid pointer to an AHardwareBuffer value (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkGetMemoryAndroidHardwareBufferANDROID-pBuffer-parameter)"},
+ {"VUID-vkGetMemoryAndroidHardwareBufferANDROID-pBuffer-parameter", "pBuffer must be a valid pointer to a valid pointer to a AHardwareBuffer value (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkGetMemoryAndroidHardwareBufferANDROID-pBuffer-parameter)"},
{"VUID-vkGetMemoryAndroidHardwareBufferANDROID-pInfo-parameter", "pInfo must be a valid pointer to a valid VkMemoryGetAndroidHardwareBufferInfoANDROID structure (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkGetMemoryAndroidHardwareBufferANDROID-pInfo-parameter)"},
{"VUID-vkGetMemoryFdKHR-device-parameter", "device must be a valid VkDevice handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkGetMemoryFdKHR-device-parameter)"},
- {"VUID-vkGetMemoryFdKHR-pFd-parameter", "pFd must be a valid pointer to an int value (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkGetMemoryFdKHR-pFd-parameter)"},
+ {"VUID-vkGetMemoryFdKHR-pFd-parameter", "pFd must be a valid pointer to a int value (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkGetMemoryFdKHR-pFd-parameter)"},
{"VUID-vkGetMemoryFdKHR-pGetFdInfo-parameter", "pGetFdInfo must be a valid pointer to a valid VkMemoryGetFdInfoKHR structure (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkGetMemoryFdKHR-pGetFdInfo-parameter)"},
{"VUID-vkGetMemoryFdPropertiesKHR-device-parameter", "device must be a valid VkDevice handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkGetMemoryFdPropertiesKHR-device-parameter)"},
{"VUID-vkGetMemoryFdPropertiesKHR-fd-00673", "fd must be an external memory handle created outside of the Vulkan API. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkGetMemoryFdPropertiesKHR-fd-00673)"},
@@ -4930,9 +4726,6 @@ static const vuid_spec_text_pair vuid_spec_text[] = {
{"VUID-vkGetPastPresentationTimingGOOGLE-pPresentationTimingCount-parameter", "pPresentationTimingCount must be a valid pointer to a uint32_t value (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkGetPastPresentationTimingGOOGLE-pPresentationTimingCount-parameter)"},
{"VUID-vkGetPastPresentationTimingGOOGLE-pPresentationTimings-parameter", "If the value referenced by pPresentationTimingCount is not 0, and pPresentationTimings is not NULL, pPresentationTimings must be a valid pointer to an array of pPresentationTimingCount VkPastPresentationTimingGOOGLE structures (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkGetPastPresentationTimingGOOGLE-pPresentationTimings-parameter)"},
{"VUID-vkGetPastPresentationTimingGOOGLE-swapchain-parameter", "swapchain must be a valid VkSwapchainKHR handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkGetPastPresentationTimingGOOGLE-swapchain-parameter)"},
- {"VUID-vkGetPerformanceParameterINTEL-device-parameter", "device must be a valid VkDevice handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkGetPerformanceParameterINTEL-device-parameter)"},
- {"VUID-vkGetPerformanceParameterINTEL-pValue-parameter", "pValue must be a valid pointer to a VkPerformanceValueINTEL structure (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkGetPerformanceParameterINTEL-pValue-parameter)"},
- {"VUID-vkGetPerformanceParameterINTEL-parameter-parameter", "parameter must be a valid VkPerformanceParameterTypeINTEL value (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkGetPerformanceParameterINTEL-parameter-parameter)"},
{"VUID-vkGetPhysicalDeviceCalibrateableTimeDomainsEXT-pTimeDomainCount-parameter", "pTimeDomainCount must be a valid pointer to a uint32_t value (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkGetPhysicalDeviceCalibrateableTimeDomainsEXT-pTimeDomainCount-parameter)"},
{"VUID-vkGetPhysicalDeviceCalibrateableTimeDomainsEXT-pTimeDomains-parameter", "If the value referenced by pTimeDomainCount is not 0, and pTimeDomains is not NULL, pTimeDomains must be a valid pointer to an array of pTimeDomainCount VkTimeDomainEXT values (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkGetPhysicalDeviceCalibrateableTimeDomainsEXT-pTimeDomains-parameter)"},
{"VUID-vkGetPhysicalDeviceCalibrateableTimeDomainsEXT-physicalDevice-parameter", "physicalDevice must be a valid VkPhysicalDevice handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkGetPhysicalDeviceCalibrateableTimeDomainsEXT-physicalDevice-parameter)"},
@@ -5031,14 +4824,10 @@ static const vuid_spec_text_pair vuid_spec_text[] = {
{"VUID-vkGetPhysicalDeviceSparseImageFormatProperties2-pProperties-parameter", "If the value referenced by pPropertyCount is not 0, and pProperties is not NULL, pProperties must be a valid pointer to an array of pPropertyCount VkSparseImageFormatProperties2 structures (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkGetPhysicalDeviceSparseImageFormatProperties2-pProperties-parameter)"},
{"VUID-vkGetPhysicalDeviceSparseImageFormatProperties2-pPropertyCount-parameter", "pPropertyCount must be a valid pointer to a uint32_t value (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkGetPhysicalDeviceSparseImageFormatProperties2-pPropertyCount-parameter)"},
{"VUID-vkGetPhysicalDeviceSparseImageFormatProperties2-physicalDevice-parameter", "physicalDevice must be a valid VkPhysicalDevice handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkGetPhysicalDeviceSparseImageFormatProperties2-physicalDevice-parameter)"},
- {"VUID-vkGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV-pCombinationCount-parameter", "pCombinationCount must be a valid pointer to a uint32_t value (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV-pCombinationCount-parameter)"},
- {"VUID-vkGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV-pCombinations-parameter", "If the value referenced by pCombinationCount is not 0, and pCombinations is not NULL, pCombinations must be a valid pointer to an array of pCombinationCount VkFramebufferMixedSamplesCombinationNV structures (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV-pCombinations-parameter)"},
- {"VUID-vkGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV-physicalDevice-parameter", "physicalDevice must be a valid VkPhysicalDevice handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV-physicalDevice-parameter)"},
{"VUID-vkGetPhysicalDeviceSurfaceCapabilities2EXT-commonparent", "Both of physicalDevice, and surface must have been created, allocated, or retrieved from the same VkInstance (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkGetPhysicalDeviceSurfaceCapabilities2EXT-commonparent)"},
{"VUID-vkGetPhysicalDeviceSurfaceCapabilities2EXT-pSurfaceCapabilities-parameter", "pSurfaceCapabilities must be a valid pointer to a VkSurfaceCapabilities2EXT structure (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkGetPhysicalDeviceSurfaceCapabilities2EXT-pSurfaceCapabilities-parameter)"},
{"VUID-vkGetPhysicalDeviceSurfaceCapabilities2EXT-physicalDevice-parameter", "physicalDevice must be a valid VkPhysicalDevice handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkGetPhysicalDeviceSurfaceCapabilities2EXT-physicalDevice-parameter)"},
{"VUID-vkGetPhysicalDeviceSurfaceCapabilities2EXT-surface-parameter", "surface must be a valid VkSurfaceKHR handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkGetPhysicalDeviceSurfaceCapabilities2EXT-surface-parameter)"},
- {"VUID-vkGetPhysicalDeviceSurfaceCapabilities2KHR-pNext-02671", "If an instance of VkSurfaceCapabilitiesFullScreenExclusiveEXT is included in the pNext chain of pSurfaceCapabilities, an instance of VkSurfaceFullScreenExclusiveWin32InfoEXT must be included in the pNext chain of pSurfaceInfo. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkGetPhysicalDeviceSurfaceCapabilities2KHR-pNext-02671)"},
{"VUID-vkGetPhysicalDeviceSurfaceCapabilities2KHR-pSurfaceCapabilities-parameter", "pSurfaceCapabilities must be a valid pointer to a VkSurfaceCapabilities2KHR structure (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkGetPhysicalDeviceSurfaceCapabilities2KHR-pSurfaceCapabilities-parameter)"},
{"VUID-vkGetPhysicalDeviceSurfaceCapabilities2KHR-pSurfaceInfo-parameter", "pSurfaceInfo must be a valid pointer to a valid VkPhysicalDeviceSurfaceInfo2KHR structure (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkGetPhysicalDeviceSurfaceCapabilities2KHR-pSurfaceInfo-parameter)"},
{"VUID-vkGetPhysicalDeviceSurfaceCapabilities2KHR-physicalDevice-parameter", "physicalDevice must be a valid VkPhysicalDevice handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkGetPhysicalDeviceSurfaceCapabilities2KHR-physicalDevice-parameter)"},
@@ -5048,19 +4837,13 @@ static const vuid_spec_text_pair vuid_spec_text[] = {
{"VUID-vkGetPhysicalDeviceSurfaceCapabilitiesKHR-surface-parameter", "surface must be a valid VkSurfaceKHR handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkGetPhysicalDeviceSurfaceCapabilitiesKHR-surface-parameter)"},
{"VUID-vkGetPhysicalDeviceSurfaceFormats2KHR-pSurfaceFormatCount-parameter", "pSurfaceFormatCount must be a valid pointer to a uint32_t value (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkGetPhysicalDeviceSurfaceFormats2KHR-pSurfaceFormatCount-parameter)"},
{"VUID-vkGetPhysicalDeviceSurfaceFormats2KHR-pSurfaceFormats-parameter", "If the value referenced by pSurfaceFormatCount is not 0, and pSurfaceFormats is not NULL, pSurfaceFormats must be a valid pointer to an array of pSurfaceFormatCount VkSurfaceFormat2KHR structures (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkGetPhysicalDeviceSurfaceFormats2KHR-pSurfaceFormats-parameter)"},
- {"VUID-vkGetPhysicalDeviceSurfaceFormats2KHR-pSurfaceInfo-02740", "pSurfaceInfo::surface must be supported by physicalDevice, as reported by vkGetPhysicalDeviceSurfaceSupportKHR or an equivalent platform-specific mechanism. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkGetPhysicalDeviceSurfaceFormats2KHR-pSurfaceInfo-02740)"},
{"VUID-vkGetPhysicalDeviceSurfaceFormats2KHR-pSurfaceInfo-parameter", "pSurfaceInfo must be a valid pointer to a valid VkPhysicalDeviceSurfaceInfo2KHR structure (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkGetPhysicalDeviceSurfaceFormats2KHR-pSurfaceInfo-parameter)"},
{"VUID-vkGetPhysicalDeviceSurfaceFormats2KHR-physicalDevice-parameter", "physicalDevice must be a valid VkPhysicalDevice handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkGetPhysicalDeviceSurfaceFormats2KHR-physicalDevice-parameter)"},
{"VUID-vkGetPhysicalDeviceSurfaceFormatsKHR-commonparent", "Both of physicalDevice, and surface must have been created, allocated, or retrieved from the same VkInstance (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkGetPhysicalDeviceSurfaceFormatsKHR-commonparent)"},
{"VUID-vkGetPhysicalDeviceSurfaceFormatsKHR-pSurfaceFormatCount-parameter", "pSurfaceFormatCount must be a valid pointer to a uint32_t value (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkGetPhysicalDeviceSurfaceFormatsKHR-pSurfaceFormatCount-parameter)"},
{"VUID-vkGetPhysicalDeviceSurfaceFormatsKHR-pSurfaceFormats-parameter", "If the value referenced by pSurfaceFormatCount is not 0, and pSurfaceFormats is not NULL, pSurfaceFormats must be a valid pointer to an array of pSurfaceFormatCount VkSurfaceFormatKHR structures (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkGetPhysicalDeviceSurfaceFormatsKHR-pSurfaceFormats-parameter)"},
{"VUID-vkGetPhysicalDeviceSurfaceFormatsKHR-physicalDevice-parameter", "physicalDevice must be a valid VkPhysicalDevice handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkGetPhysicalDeviceSurfaceFormatsKHR-physicalDevice-parameter)"},
- {"VUID-vkGetPhysicalDeviceSurfaceFormatsKHR-surface-02739", "surface must be supported by physicalDevice, as reported by vkGetPhysicalDeviceSurfaceSupportKHR or an equivalent platform-specific mechanism. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkGetPhysicalDeviceSurfaceFormatsKHR-surface-02739)"},
{"VUID-vkGetPhysicalDeviceSurfaceFormatsKHR-surface-parameter", "surface must be a valid VkSurfaceKHR handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkGetPhysicalDeviceSurfaceFormatsKHR-surface-parameter)"},
- {"VUID-vkGetPhysicalDeviceSurfacePresentModes2EXT-pPresentModeCount-parameter", "pPresentModeCount must be a valid pointer to a uint32_t value (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkGetPhysicalDeviceSurfacePresentModes2EXT-pPresentModeCount-parameter)"},
- {"VUID-vkGetPhysicalDeviceSurfacePresentModes2EXT-pPresentModes-parameter", "If the value referenced by pPresentModeCount is not 0, and pPresentModes is not NULL, pPresentModes must be a valid pointer to an array of pPresentModeCount VkPresentModeKHR values (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkGetPhysicalDeviceSurfacePresentModes2EXT-pPresentModes-parameter)"},
- {"VUID-vkGetPhysicalDeviceSurfacePresentModes2EXT-pSurfaceInfo-parameter", "pSurfaceInfo must be a valid pointer to a valid VkPhysicalDeviceSurfaceInfo2KHR structure (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkGetPhysicalDeviceSurfacePresentModes2EXT-pSurfaceInfo-parameter)"},
- {"VUID-vkGetPhysicalDeviceSurfacePresentModes2EXT-physicalDevice-parameter", "physicalDevice must be a valid VkPhysicalDevice handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkGetPhysicalDeviceSurfacePresentModes2EXT-physicalDevice-parameter)"},
{"VUID-vkGetPhysicalDeviceSurfacePresentModesKHR-commonparent", "Both of physicalDevice, and surface must have been created, allocated, or retrieved from the same VkInstance (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkGetPhysicalDeviceSurfacePresentModesKHR-commonparent)"},
{"VUID-vkGetPhysicalDeviceSurfacePresentModesKHR-pPresentModeCount-parameter", "pPresentModeCount must be a valid pointer to a uint32_t value (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkGetPhysicalDeviceSurfacePresentModesKHR-pPresentModeCount-parameter)"},
{"VUID-vkGetPhysicalDeviceSurfacePresentModesKHR-pPresentModes-parameter", "If the value referenced by pPresentModeCount is not 0, and pPresentModes is not NULL, pPresentModes must be a valid pointer to an array of pPresentModeCount VkPresentModeKHR values (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkGetPhysicalDeviceSurfacePresentModesKHR-pPresentModes-parameter)"},
@@ -5076,7 +4859,7 @@ static const vuid_spec_text_pair vuid_spec_text[] = {
{"VUID-vkGetPhysicalDeviceWaylandPresentationSupportKHR-queueFamilyIndex-01306", "queueFamilyIndex must be less than pQueueFamilyPropertyCount returned by vkGetPhysicalDeviceQueueFamilyProperties for the given physicalDevice (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkGetPhysicalDeviceWaylandPresentationSupportKHR-queueFamilyIndex-01306)"},
{"VUID-vkGetPhysicalDeviceWin32PresentationSupportKHR-physicalDevice-parameter", "physicalDevice must be a valid VkPhysicalDevice handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkGetPhysicalDeviceWin32PresentationSupportKHR-physicalDevice-parameter)"},
{"VUID-vkGetPhysicalDeviceWin32PresentationSupportKHR-queueFamilyIndex-01309", "queueFamilyIndex must be less than pQueueFamilyPropertyCount returned by vkGetPhysicalDeviceQueueFamilyProperties for the given physicalDevice (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkGetPhysicalDeviceWin32PresentationSupportKHR-queueFamilyIndex-01309)"},
- {"VUID-vkGetPhysicalDeviceXcbPresentationSupportKHR-connection-parameter", "connection must be a valid pointer to an xcb_connection_t value (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkGetPhysicalDeviceXcbPresentationSupportKHR-connection-parameter)"},
+ {"VUID-vkGetPhysicalDeviceXcbPresentationSupportKHR-connection-parameter", "connection must be a valid pointer to a xcb_connection_t value (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkGetPhysicalDeviceXcbPresentationSupportKHR-connection-parameter)"},
{"VUID-vkGetPhysicalDeviceXcbPresentationSupportKHR-physicalDevice-parameter", "physicalDevice must be a valid VkPhysicalDevice handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkGetPhysicalDeviceXcbPresentationSupportKHR-physicalDevice-parameter)"},
{"VUID-vkGetPhysicalDeviceXcbPresentationSupportKHR-queueFamilyIndex-01312", "queueFamilyIndex must be less than pQueueFamilyPropertyCount returned by vkGetPhysicalDeviceQueueFamilyProperties for the given physicalDevice (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkGetPhysicalDeviceXcbPresentationSupportKHR-queueFamilyIndex-01312)"},
{"VUID-vkGetPhysicalDeviceXlibPresentationSupportKHR-dpy-parameter", "dpy must be a valid pointer to a Display value (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkGetPhysicalDeviceXlibPresentationSupportKHR-dpy-parameter)"},
@@ -5087,26 +4870,6 @@ static const vuid_spec_text_pair vuid_spec_text[] = {
{"VUID-vkGetPipelineCacheData-pDataSize-parameter", "pDataSize must be a valid pointer to a size_t value (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkGetPipelineCacheData-pDataSize-parameter)"},
{"VUID-vkGetPipelineCacheData-pipelineCache-parameter", "pipelineCache must be a valid VkPipelineCache handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkGetPipelineCacheData-pipelineCache-parameter)"},
{"VUID-vkGetPipelineCacheData-pipelineCache-parent", "pipelineCache must have been created, allocated, or retrieved from device (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkGetPipelineCacheData-pipelineCache-parent)"},
- {"VUID-vkGetPipelineExecutableInternalRepresentationsKHR-device-parameter", "device must be a valid VkDevice handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkGetPipelineExecutableInternalRepresentationsKHR-device-parameter)"},
- {"VUID-vkGetPipelineExecutableInternalRepresentationsKHR-pExecutableInfo-parameter", "pExecutableInfo must be a valid pointer to a valid VkPipelineExecutableInfoKHR structure (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkGetPipelineExecutableInternalRepresentationsKHR-pExecutableInfo-parameter)"},
- {"VUID-vkGetPipelineExecutableInternalRepresentationsKHR-pInternalRepresentationCount-parameter", "pInternalRepresentationCount must be a valid pointer to a uint32_t value (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkGetPipelineExecutableInternalRepresentationsKHR-pInternalRepresentationCount-parameter)"},
- {"VUID-vkGetPipelineExecutableInternalRepresentationsKHR-pInternalRepresentations-parameter", "If the value referenced by pInternalRepresentationCount is not 0, and pInternalRepresentations is not NULL, pInternalRepresentations must be a valid pointer to an array of pInternalRepresentationCount VkPipelineExecutableInternalRepresentationKHR structures (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkGetPipelineExecutableInternalRepresentationsKHR-pInternalRepresentations-parameter)"},
- {"VUID-vkGetPipelineExecutableInternalRepresentationsKHR-pipeline-03277", "pipeline member of pExecutableInfo must have been created with device. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkGetPipelineExecutableInternalRepresentationsKHR-pipeline-03277)"},
- {"VUID-vkGetPipelineExecutableInternalRepresentationsKHR-pipeline-03278", "pipeline member of pExecutableInfo must have been created with VK_PIPELINE_CREATE_CAPTURE_INTERNAL_REPRESENTATIONS_BIT_KHR set in the flags field of VkGraphicsPipelineCreateInfo or VkComputePipelineCreateInfo. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkGetPipelineExecutableInternalRepresentationsKHR-pipeline-03278)"},
- {"VUID-vkGetPipelineExecutableInternalRepresentationsKHR-pipelineExecutableProperties-03276", "pipelineExecutableProperties must be enabled. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkGetPipelineExecutableInternalRepresentationsKHR-pipelineExecutableProperties-03276)"},
- {"VUID-vkGetPipelineExecutablePropertiesKHR-device-parameter", "device must be a valid VkDevice handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkGetPipelineExecutablePropertiesKHR-device-parameter)"},
- {"VUID-vkGetPipelineExecutablePropertiesKHR-pExecutableCount-parameter", "pExecutableCount must be a valid pointer to a uint32_t value (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkGetPipelineExecutablePropertiesKHR-pExecutableCount-parameter)"},
- {"VUID-vkGetPipelineExecutablePropertiesKHR-pPipelineInfo-parameter", "pPipelineInfo must be a valid pointer to a valid VkPipelineInfoKHR structure (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkGetPipelineExecutablePropertiesKHR-pPipelineInfo-parameter)"},
- {"VUID-vkGetPipelineExecutablePropertiesKHR-pProperties-parameter", "If the value referenced by pExecutableCount is not 0, and pProperties is not NULL, pProperties must be a valid pointer to an array of pExecutableCount VkPipelineExecutablePropertiesKHR structures (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkGetPipelineExecutablePropertiesKHR-pProperties-parameter)"},
- {"VUID-vkGetPipelineExecutablePropertiesKHR-pipeline-03271", "pipeline member of pPipelineInfo must have been created with device. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkGetPipelineExecutablePropertiesKHR-pipeline-03271)"},
- {"VUID-vkGetPipelineExecutablePropertiesKHR-pipelineExecutableProperties-03270", "pipelineExecutableProperties must be enabled. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkGetPipelineExecutablePropertiesKHR-pipelineExecutableProperties-03270)"},
- {"VUID-vkGetPipelineExecutableStatisticsKHR-device-parameter", "device must be a valid VkDevice handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkGetPipelineExecutableStatisticsKHR-device-parameter)"},
- {"VUID-vkGetPipelineExecutableStatisticsKHR-pExecutableInfo-parameter", "pExecutableInfo must be a valid pointer to a valid VkPipelineExecutableInfoKHR structure (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkGetPipelineExecutableStatisticsKHR-pExecutableInfo-parameter)"},
- {"VUID-vkGetPipelineExecutableStatisticsKHR-pStatisticCount-parameter", "pStatisticCount must be a valid pointer to a uint32_t value (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkGetPipelineExecutableStatisticsKHR-pStatisticCount-parameter)"},
- {"VUID-vkGetPipelineExecutableStatisticsKHR-pStatistics-parameter", "If the value referenced by pStatisticCount is not 0, and pStatistics is not NULL, pStatistics must be a valid pointer to an array of pStatisticCount VkPipelineExecutableStatisticKHR structures (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkGetPipelineExecutableStatisticsKHR-pStatistics-parameter)"},
- {"VUID-vkGetPipelineExecutableStatisticsKHR-pipeline-03273", "pipeline member of pExecutableInfo must have been created with device. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkGetPipelineExecutableStatisticsKHR-pipeline-03273)"},
- {"VUID-vkGetPipelineExecutableStatisticsKHR-pipeline-03274", "pipeline member of pExecutableInfo must have been created with VK_PIPELINE_CREATE_CAPTURE_STATISTICS_BIT_KHR set in the flags field of VkGraphicsPipelineCreateInfo or VkComputePipelineCreateInfo. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkGetPipelineExecutableStatisticsKHR-pipeline-03274)"},
- {"VUID-vkGetPipelineExecutableStatisticsKHR-pipelineExecutableInfo-03272", "pipelineExecutableInfo must be enabled. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkGetPipelineExecutableStatisticsKHR-pipelineExecutableInfo-03272)"},
{"VUID-vkGetQueryPoolResults-dataSize-00817", "dataSize must be large enough to contain the result of each query, as described here (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkGetQueryPoolResults-dataSize-00817)"},
{"VUID-vkGetQueryPoolResults-dataSize-arraylength", "dataSize must be greater than 0 (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkGetQueryPoolResults-dataSize-arraylength)"},
{"VUID-vkGetQueryPoolResults-device-parameter", "device must be a valid VkDevice handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkGetQueryPoolResults-device-parameter)"},
@@ -5142,7 +4905,7 @@ static const vuid_spec_text_pair vuid_spec_text[] = {
{"VUID-vkGetRenderAreaGranularity-renderPass-parameter", "renderPass must be a valid VkRenderPass handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkGetRenderAreaGranularity-renderPass-parameter)"},
{"VUID-vkGetRenderAreaGranularity-renderPass-parent", "renderPass must have been created, allocated, or retrieved from device (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkGetRenderAreaGranularity-renderPass-parent)"},
{"VUID-vkGetSemaphoreFdKHR-device-parameter", "device must be a valid VkDevice handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkGetSemaphoreFdKHR-device-parameter)"},
- {"VUID-vkGetSemaphoreFdKHR-pFd-parameter", "pFd must be a valid pointer to an int value (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkGetSemaphoreFdKHR-pFd-parameter)"},
+ {"VUID-vkGetSemaphoreFdKHR-pFd-parameter", "pFd must be a valid pointer to a int value (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkGetSemaphoreFdKHR-pFd-parameter)"},
{"VUID-vkGetSemaphoreFdKHR-pGetFdInfo-parameter", "pGetFdInfo must be a valid pointer to a valid VkSemaphoreGetFdInfoKHR structure (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkGetSemaphoreFdKHR-pGetFdInfo-parameter)"},
{"VUID-vkGetSemaphoreWin32HandleKHR-device-parameter", "device must be a valid VkDevice handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkGetSemaphoreWin32HandleKHR-device-parameter)"},
{"VUID-vkGetSemaphoreWin32HandleKHR-pGetWin32HandleInfo-parameter", "pGetWin32HandleInfo must be a valid pointer to a valid VkSemaphoreGetWin32HandleInfoKHR structure (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkGetSemaphoreWin32HandleKHR-pGetWin32HandleInfo-parameter)"},
@@ -5183,8 +4946,6 @@ static const vuid_spec_text_pair vuid_spec_text[] = {
{"VUID-vkImportSemaphoreFdKHR-semaphore-01142", "semaphore must not be associated with any queue command that has not yet completed execution on that queue (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkImportSemaphoreFdKHR-semaphore-01142)"},
{"VUID-vkImportSemaphoreWin32HandleKHR-device-parameter", "device must be a valid VkDevice handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkImportSemaphoreWin32HandleKHR-device-parameter)"},
{"VUID-vkImportSemaphoreWin32HandleKHR-pImportSemaphoreWin32HandleInfo-parameter", "pImportSemaphoreWin32HandleInfo must be a valid pointer to a valid VkImportSemaphoreWin32HandleInfoKHR structure (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkImportSemaphoreWin32HandleKHR-pImportSemaphoreWin32HandleInfo-parameter)"},
- {"VUID-vkInitializePerformanceApiINTEL-device-parameter", "device must be a valid VkDevice handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkInitializePerformanceApiINTEL-device-parameter)"},
- {"VUID-vkInitializePerformanceApiINTEL-pInitializeInfo-parameter", "pInitializeInfo must be a valid pointer to a valid VkInitializePerformanceApiInfoINTEL structure (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkInitializePerformanceApiINTEL-pInitializeInfo-parameter)"},
{"VUID-vkInvalidateMappedMemoryRanges-device-parameter", "device must be a valid VkDevice handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkInvalidateMappedMemoryRanges-device-parameter)"},
{"VUID-vkInvalidateMappedMemoryRanges-memoryRangeCount-arraylength", "memoryRangeCount must be greater than 0 (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkInvalidateMappedMemoryRanges-memoryRangeCount-arraylength)"},
{"VUID-vkInvalidateMappedMemoryRanges-pMemoryRanges-parameter", "pMemoryRanges must be a valid pointer to an array of memoryRangeCount valid VkMappedMemoryRange structures (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkInvalidateMappedMemoryRanges-pMemoryRanges-parameter)"},
@@ -5221,7 +4982,7 @@ static const vuid_spec_text_pair vuid_spec_text[] = {
{"VUID-vkQueueBindSparse-fence-parameter", "If fence is not VK_NULL_HANDLE, fence must be a valid VkFence handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkQueueBindSparse-fence-parameter)"},
{"VUID-vkQueueBindSparse-pBindInfo-parameter", "If bindInfoCount is not 0, pBindInfo must be a valid pointer to an array of bindInfoCount valid VkBindSparseInfo structures (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkQueueBindSparse-pBindInfo-parameter)"},
{"VUID-vkQueueBindSparse-pSignalSemaphores-01115", "Each element of the pSignalSemaphores member of each element of pBindInfo must be unsignaled when the semaphore signal operation it defines is executed on the device (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkQueueBindSparse-pSignalSemaphores-01115)"},
- {"VUID-vkQueueBindSparse-pWaitSemaphores-01116", "When a semaphore unsignal operation defined by any element of the pWaitSemaphores member of any element of pBindInfo executes on queue, there must be no other queues waiting on the same semaphore. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkQueueBindSparse-pWaitSemaphores-01116)"},
+ {"VUID-vkQueueBindSparse-pWaitSemaphores-01116", "When a semaphore unsignal operation defined by any element of the pWaitSemaphores member of any element of pBindInfo executes on queue, no other queue must be waiting on the same semaphore. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkQueueBindSparse-pWaitSemaphores-01116)"},
{"VUID-vkQueueBindSparse-pWaitSemaphores-01117", "All elements of the pWaitSemaphores member of all elements of pBindInfo must be semaphores that are signaled, or have semaphore signal operations previously submitted for execution. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkQueueBindSparse-pWaitSemaphores-01117)"},
{"VUID-vkQueueBindSparse-queue-parameter", "queue must be a valid VkQueue handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkQueueBindSparse-queue-parameter)"},
{"VUID-vkQueueBindSparse-queuetype", "The queue must support sparse binding operations (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkQueueBindSparse-queuetype)"},
@@ -5232,12 +4993,9 @@ static const vuid_spec_text_pair vuid_spec_text[] = {
{"VUID-vkQueuePresentKHR-pPresentInfo-parameter", "pPresentInfo must be a valid pointer to a valid VkPresentInfoKHR structure (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkQueuePresentKHR-pPresentInfo-parameter)"},
{"VUID-vkQueuePresentKHR-pSwapchains-01292", "Each element of pSwapchains member of pPresentInfo must be a swapchain that is created for a surface for which presentation is supported from queue as determined using a call to vkGetPhysicalDeviceSurfaceSupportKHR (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkQueuePresentKHR-pSwapchains-01292)"},
{"VUID-vkQueuePresentKHR-pSwapchains-01293", "If more than one member of pSwapchains was created from a display surface, all display surfaces referenced that refer to the same display must use the same display mode (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkQueuePresentKHR-pSwapchains-01293)"},
- {"VUID-vkQueuePresentKHR-pWaitSemaphores-01294", "When a semaphore unsignal operation defined by the elements of the pWaitSemaphores member of pPresentInfo executes on queue, there must be no other queues waiting on the same semaphore. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkQueuePresentKHR-pWaitSemaphores-01294)"},
+ {"VUID-vkQueuePresentKHR-pWaitSemaphores-01294", "When a semaphore unsignal operation defined by the elements of the pWaitSemaphores member of pPresentInfo executes on queue, no other queue must be waiting on the same semaphore. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkQueuePresentKHR-pWaitSemaphores-01294)"},
{"VUID-vkQueuePresentKHR-pWaitSemaphores-01295", "All elements of the pWaitSemaphores member of pPresentInfo must be semaphores that are signaled, or have semaphore signal operations previously submitted for execution. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkQueuePresentKHR-pWaitSemaphores-01295)"},
{"VUID-vkQueuePresentKHR-queue-parameter", "queue must be a valid VkQueue handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkQueuePresentKHR-queue-parameter)"},
- {"VUID-vkQueueSetPerformanceConfigurationINTEL-commonparent", "Both of configuration, and queue must have been created, allocated, or retrieved from the same VkDevice (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkQueueSetPerformanceConfigurationINTEL-commonparent)"},
- {"VUID-vkQueueSetPerformanceConfigurationINTEL-configuration-parameter", "configuration must be a valid VkPerformanceConfigurationINTEL handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkQueueSetPerformanceConfigurationINTEL-configuration-parameter)"},
- {"VUID-vkQueueSetPerformanceConfigurationINTEL-queue-parameter", "queue must be a valid VkQueue handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkQueueSetPerformanceConfigurationINTEL-queue-parameter)"},
{"VUID-vkQueueSubmit-commonparent", "Both of fence, and queue that are valid handles must have been created, allocated, or retrieved from the same VkDevice (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkQueueSubmit-commonparent)"},
{"VUID-vkQueueSubmit-fence-00063", "If fence is not VK_NULL_HANDLE, fence must be unsignaled (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkQueueSubmit-fence-00063)"},
{"VUID-vkQueueSubmit-fence-00064", "If fence is not VK_NULL_HANDLE, fence must not be associated with any other queue command that has not yet completed execution on that queue (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkQueueSubmit-fence-00064)"},
@@ -5252,7 +5010,7 @@ static const vuid_spec_text_pair vuid_spec_text[] = {
{"VUID-vkQueueSubmit-pSubmits-02207", "If any element of pSubmits->pCommandBuffers includes a Queue Family Transfer Acquire Operation, there must exist a previously submitted Queue Family Transfer Release Operation on a queue in the queue family identified by the acquire operation, with parameters matching the acquire operation as defined in the definition of such acquire operations, and which happens before the acquire operation. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkQueueSubmit-pSubmits-02207)"},
{"VUID-vkQueueSubmit-pSubmits-parameter", "If submitCount is not 0, pSubmits must be a valid pointer to an array of submitCount valid VkSubmitInfo structures (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkQueueSubmit-pSubmits-parameter)"},
{"VUID-vkQueueSubmit-pWaitDstStageMask-00066", "Any stage flag included in any element of the pWaitDstStageMask member of any element of pSubmits must be a pipeline stage supported by one of the capabilities of queue, as specified in the table of supported pipeline stages. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkQueueSubmit-pWaitDstStageMask-00066)"},
- {"VUID-vkQueueSubmit-pWaitSemaphores-00068", "When a semaphore unsignal operation defined by any element of the pWaitSemaphores member of any element of pSubmits executes on queue, there must be no other queues waiting on the same semaphore. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkQueueSubmit-pWaitSemaphores-00068)"},
+ {"VUID-vkQueueSubmit-pWaitSemaphores-00068", "When a semaphore unsignal operation defined by any element of the pWaitSemaphores member of any element of pSubmits executes on queue, no other queue must be waiting on the same semaphore. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkQueueSubmit-pWaitSemaphores-00068)"},
{"VUID-vkQueueSubmit-pWaitSemaphores-00069", "All elements of the pWaitSemaphores member of all elements of pSubmits must be semaphores that are signaled, or have semaphore signal operations previously submitted for execution. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkQueueSubmit-pWaitSemaphores-00069)"},
{"VUID-vkQueueSubmit-queue-parameter", "queue must be a valid VkQueue handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkQueueSubmit-queue-parameter)"},
{"VUID-vkQueueWaitIdle-queue-parameter", "queue must be a valid VkQueue handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkQueueWaitIdle-queue-parameter)"},
@@ -5276,12 +5034,6 @@ static const vuid_spec_text_pair vuid_spec_text[] = {
{"VUID-vkRegisterObjectsNVX-ppObjectTableEntries-parameter", "ppObjectTableEntries must be a valid pointer to an array of objectCount valid VkObjectTableEntryNVX structures (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkRegisterObjectsNVX-ppObjectTableEntries-parameter)"},
{"VUID-vkReleaseDisplayEXT-display-parameter", "display must be a valid VkDisplayKHR handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkReleaseDisplayEXT-display-parameter)"},
{"VUID-vkReleaseDisplayEXT-physicalDevice-parameter", "physicalDevice must be a valid VkPhysicalDevice handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkReleaseDisplayEXT-physicalDevice-parameter)"},
- {"VUID-vkReleaseFullScreenExclusiveModeEXT-swapchain-02677", "swapchain must not be in the retired state (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkReleaseFullScreenExclusiveModeEXT-swapchain-02677)"},
- {"VUID-vkReleaseFullScreenExclusiveModeEXT-swapchain-02678", "swapchain must be a swapchain created with an instance of VkSurfaceFullScreenExclusiveInfoEXT, with fullScreenExclusive set to VK_FULL_SCREEN_EXCLUSIVE_APPLICATION_CONTROLLED_EXT (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkReleaseFullScreenExclusiveModeEXT-swapchain-02678)"},
- {"VUID-vkReleasePerformanceConfigurationINTEL-configuration-02737", "configuration must not be released before all command buffers submitted while the configuration was set are in pending state. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkReleasePerformanceConfigurationINTEL-configuration-02737)"},
- {"VUID-vkReleasePerformanceConfigurationINTEL-configuration-parameter", "configuration must be a valid VkPerformanceConfigurationINTEL handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkReleasePerformanceConfigurationINTEL-configuration-parameter)"},
- {"VUID-vkReleasePerformanceConfigurationINTEL-configuration-parent", "configuration must have been created, allocated, or retrieved from device (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkReleasePerformanceConfigurationINTEL-configuration-parent)"},
- {"VUID-vkReleasePerformanceConfigurationINTEL-device-parameter", "device must be a valid VkDevice handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkReleasePerformanceConfigurationINTEL-device-parameter)"},
{"VUID-vkResetCommandBuffer-commandBuffer-00045", "commandBuffer must not be in the pending state (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkResetCommandBuffer-commandBuffer-00045)"},
{"VUID-vkResetCommandBuffer-commandBuffer-00046", "commandBuffer must have been allocated from a pool that was created with the VK_COMMAND_POOL_CREATE_RESET_COMMAND_BUFFER_BIT (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkResetCommandBuffer-commandBuffer-00046)"},
{"VUID-vkResetCommandBuffer-commandBuffer-parameter", "commandBuffer must be a valid VkCommandBuffer handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkResetCommandBuffer-commandBuffer-parameter)"},
@@ -5305,14 +5057,6 @@ static const vuid_spec_text_pair vuid_spec_text[] = {
{"VUID-vkResetFences-pFences-01123", "Each element of pFences must not be currently associated with any queue command that has not yet completed execution on that queue (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkResetFences-pFences-01123)"},
{"VUID-vkResetFences-pFences-parameter", "pFences must be a valid pointer to an array of fenceCount valid VkFence handles (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkResetFences-pFences-parameter)"},
{"VUID-vkResetFences-pFences-parent", "Each element of pFences must have been created, allocated, or retrieved from device (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkResetFences-pFences-parent)"},
- {"VUID-vkResetQueryPoolEXT-None-02665", "The hostQueryReset feature must be enabled (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkResetQueryPoolEXT-None-02665)"},
- {"VUID-vkResetQueryPoolEXT-device-parameter", "device must be a valid VkDevice handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkResetQueryPoolEXT-device-parameter)"},
- {"VUID-vkResetQueryPoolEXT-firstQuery-02666", "firstQuery must be less than the number of queries in queryPool (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkResetQueryPoolEXT-firstQuery-02666)"},
- {"VUID-vkResetQueryPoolEXT-firstQuery-02667", "The sum of firstQuery and queryCount must be less than or equal to the number of queries in queryPool (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkResetQueryPoolEXT-firstQuery-02667)"},
- {"VUID-vkResetQueryPoolEXT-firstQuery-02741", "Submitted commands that refer to the range specified by firstQuery and queryCount in queryPool must have completed execution (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkResetQueryPoolEXT-firstQuery-02741)"},
- {"VUID-vkResetQueryPoolEXT-firstQuery-02742", "The range of queries specified by firstQuery and queryCount in queryPool must not be in use by calls to vkGetQueryPoolResults or vkResetQueryPoolEXT in other threads (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkResetQueryPoolEXT-firstQuery-02742)"},
- {"VUID-vkResetQueryPoolEXT-queryPool-parameter", "queryPool must be a valid VkQueryPool handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkResetQueryPoolEXT-queryPool-parameter)"},
- {"VUID-vkResetQueryPoolEXT-queryPool-parent", "queryPool must have been created, allocated, or retrieved from device (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkResetQueryPoolEXT-queryPool-parent)"},
{"VUID-vkSetDebugUtilsObjectNameEXT-device-parameter", "device must be a valid VkDevice handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkSetDebugUtilsObjectNameEXT-device-parameter)"},
{"VUID-vkSetDebugUtilsObjectNameEXT-pNameInfo-02587", "pNameInfo->objectType must not be VK_OBJECT_TYPE_UNKNOWN (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkSetDebugUtilsObjectNameEXT-pNameInfo-02587)"},
{"VUID-vkSetDebugUtilsObjectNameEXT-pNameInfo-02588", "pNameInfo->objectHandle must not be VK_NULL_HANDLE (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkSetDebugUtilsObjectNameEXT-pNameInfo-02588)"},
@@ -5327,10 +5071,6 @@ static const vuid_spec_text_pair vuid_spec_text[] = {
{"VUID-vkSetHdrMetadataEXT-pMetadata-parameter", "pMetadata must be a valid pointer to an array of swapchainCount valid VkHdrMetadataEXT structures (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkSetHdrMetadataEXT-pMetadata-parameter)"},
{"VUID-vkSetHdrMetadataEXT-pSwapchains-parameter", "pSwapchains must be a valid pointer to an array of swapchainCount valid VkSwapchainKHR handles (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkSetHdrMetadataEXT-pSwapchains-parameter)"},
{"VUID-vkSetHdrMetadataEXT-swapchainCount-arraylength", "swapchainCount must be greater than 0 (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkSetHdrMetadataEXT-swapchainCount-arraylength)"},
- {"VUID-vkSetLocalDimmingAMD-XXXXX", "It is only valid to call vkSetLocalDimmingAMD if VkDisplayNativeHdrSurfaceCapabilitiesAMD::localDimmingSupport is supported. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkSetLocalDimmingAMD-XXXXX)"},
- {"VUID-vkSetLocalDimmingAMD-commonparent", "Both of device, and swapChain must have been created, allocated, or retrieved from the same VkInstance (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkSetLocalDimmingAMD-commonparent)"},
- {"VUID-vkSetLocalDimmingAMD-device-parameter", "device must be a valid VkDevice handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkSetLocalDimmingAMD-device-parameter)"},
- {"VUID-vkSetLocalDimmingAMD-swapChain-parameter", "swapChain must be a valid VkSwapchainKHR handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkSetLocalDimmingAMD-swapChain-parameter)"},
{"VUID-vkSubmitDebugUtilsMessageEXT-instance-parameter", "instance must be a valid VkInstance handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkSubmitDebugUtilsMessageEXT-instance-parameter)"},
{"VUID-vkSubmitDebugUtilsMessageEXT-messageSeverity-parameter", "messageSeverity must be a valid VkDebugUtilsMessageSeverityFlagBitsEXT value (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkSubmitDebugUtilsMessageEXT-messageSeverity-parameter)"},
{"VUID-vkSubmitDebugUtilsMessageEXT-messageTypes-parameter", "messageTypes must be a valid combination of VkDebugUtilsMessageTypeFlagBitsEXT values (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkSubmitDebugUtilsMessageEXT-messageTypes-parameter)"},
@@ -5341,7 +5081,6 @@ static const vuid_spec_text_pair vuid_spec_text[] = {
{"VUID-vkTrimCommandPool-commandPool-parent", "commandPool must have been created, allocated, or retrieved from device (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkTrimCommandPool-commandPool-parent)"},
{"VUID-vkTrimCommandPool-device-parameter", "device must be a valid VkDevice handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkTrimCommandPool-device-parameter)"},
{"VUID-vkTrimCommandPool-flags-zerobitmask", "flags must be 0 (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkTrimCommandPool-flags-zerobitmask)"},
- {"VUID-vkUninitializePerformanceApiINTEL-device-parameter", "device must be a valid VkDevice handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkUninitializePerformanceApiINTEL-device-parameter)"},
{"VUID-vkUnmapMemory-device-parameter", "device must be a valid VkDevice handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkUnmapMemory-device-parameter)"},
{"VUID-vkUnmapMemory-memory-00689", "memory must be currently host mapped (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkUnmapMemory-memory-00689)"},
{"VUID-vkUnmapMemory-memory-parameter", "memory must be a valid VkDeviceMemory handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkUnmapMemory-memory-parameter)"},
@@ -5370,284 +5109,3 @@ static const vuid_spec_text_pair vuid_spec_text[] = {
{"VUID-vkWaitForFences-pFences-parameter", "pFences must be a valid pointer to an array of fenceCount valid VkFence handles (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkWaitForFences-pFences-parameter)"},
{"VUID-vkWaitForFences-pFences-parent", "Each element of pFences must have been created, allocated, or retrieved from device (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkWaitForFences-pFences-parent)"},
};
-
-// Defines to allow creating "must be recording" meta data
-#define VUID_CMD_ENUM_LIST(prefix)\
- prefix##NONE = 0,\
- prefix##BEGINCONDITIONALRENDERINGEXT = 1,\
- prefix##BEGINDEBUGUTILSLABELEXT = 2,\
- prefix##BEGINQUERY = 3,\
- prefix##BEGINQUERYINDEXEDEXT = 4,\
- prefix##BEGINRENDERPASS = 5,\
- prefix##BEGINRENDERPASS2KHR = 6,\
- prefix##BEGINTRANSFORMFEEDBACKEXT = 7,\
- prefix##BINDDESCRIPTORSETS = 8,\
- prefix##BINDINDEXBUFFER = 9,\
- prefix##BINDPIPELINE = 10,\
- prefix##BINDSHADINGRATEIMAGENV = 11,\
- prefix##BINDTRANSFORMFEEDBACKBUFFERSEXT = 12,\
- prefix##BINDVERTEXBUFFERS = 13,\
- prefix##BLITIMAGE = 14,\
- prefix##BUILDACCELERATIONSTRUCTURENV = 15,\
- prefix##CLEARATTACHMENTS = 16,\
- prefix##CLEARCOLORIMAGE = 17,\
- prefix##CLEARDEPTHSTENCILIMAGE = 18,\
- prefix##COPYACCELERATIONSTRUCTURENV = 19,\
- prefix##COPYBUFFER = 20,\
- prefix##COPYBUFFERTOIMAGE = 21,\
- prefix##COPYIMAGE = 22,\
- prefix##COPYIMAGETOBUFFER = 23,\
- prefix##COPYQUERYPOOLRESULTS = 24,\
- prefix##DEBUGMARKERBEGINEXT = 25,\
- prefix##DEBUGMARKERENDEXT = 26,\
- prefix##DEBUGMARKERINSERTEXT = 27,\
- prefix##DISPATCH = 28,\
- prefix##DISPATCHBASE = 29,\
- prefix##DISPATCHINDIRECT = 30,\
- prefix##DRAW = 31,\
- prefix##DRAWINDEXED = 32,\
- prefix##DRAWINDEXEDINDIRECT = 33,\
- prefix##DRAWINDEXEDINDIRECTCOUNTKHR = 34,\
- prefix##DRAWINDIRECT = 35,\
- prefix##DRAWINDIRECTBYTECOUNTEXT = 36,\
- prefix##DRAWINDIRECTCOUNTKHR = 37,\
- prefix##DRAWMESHTASKSINDIRECTCOUNTNV = 38,\
- prefix##DRAWMESHTASKSINDIRECTNV = 39,\
- prefix##DRAWMESHTASKSNV = 40,\
- prefix##ENDCONDITIONALRENDERINGEXT = 41,\
- prefix##ENDDEBUGUTILSLABELEXT = 42,\
- prefix##ENDQUERY = 43,\
- prefix##ENDQUERYINDEXEDEXT = 44,\
- prefix##ENDRENDERPASS = 45,\
- prefix##ENDRENDERPASS2KHR = 46,\
- prefix##ENDTRANSFORMFEEDBACKEXT = 47,\
- prefix##EXECUTECOMMANDS = 48,\
- prefix##FILLBUFFER = 49,\
- prefix##INSERTDEBUGUTILSLABELEXT = 50,\
- prefix##NEXTSUBPASS = 51,\
- prefix##NEXTSUBPASS2KHR = 52,\
- prefix##PIPELINEBARRIER = 53,\
- prefix##PROCESSCOMMANDSNVX = 54,\
- prefix##PUSHCONSTANTS = 55,\
- prefix##PUSHDESCRIPTORSETKHR = 56,\
- prefix##PUSHDESCRIPTORSETWITHTEMPLATEKHR = 57,\
- prefix##RESERVESPACEFORCOMMANDSNVX = 58,\
- prefix##RESETEVENT = 59,\
- prefix##RESETQUERYPOOL = 60,\
- prefix##RESOLVEIMAGE = 61,\
- prefix##SETBLENDCONSTANTS = 62,\
- prefix##SETCHECKPOINTNV = 63,\
- prefix##SETCOARSESAMPLEORDERNV = 64,\
- prefix##SETDEPTHBIAS = 65,\
- prefix##SETDEPTHBOUNDS = 66,\
- prefix##SETDEVICEMASK = 67,\
- prefix##SETDISCARDRECTANGLEEXT = 68,\
- prefix##SETEVENT = 69,\
- prefix##SETEXCLUSIVESCISSORNV = 70,\
- prefix##SETLINESTIPPLEEXT = 71,\
- prefix##SETLINEWIDTH = 72,\
- prefix##SETPERFORMANCEMARKERINTEL = 73,\
- prefix##SETPERFORMANCEOVERRIDEINTEL = 74,\
- prefix##SETPERFORMANCESTREAMMARKERINTEL = 75,\
- prefix##SETSAMPLELOCATIONSEXT = 76,\
- prefix##SETSCISSOR = 77,\
- prefix##SETSTENCILCOMPAREMASK = 78,\
- prefix##SETSTENCILREFERENCE = 79,\
- prefix##SETSTENCILWRITEMASK = 80,\
- prefix##SETVIEWPORT = 81,\
- prefix##SETVIEWPORTSHADINGRATEPALETTENV = 82,\
- prefix##SETVIEWPORTWSCALINGNV = 83,\
- prefix##TRACERAYSNV = 84,\
- prefix##UPDATEBUFFER = 85,\
- prefix##WAITEVENTS = 86,\
- prefix##WRITEACCELERATIONSTRUCTURESPROPERTIESNV = 87,\
- prefix##WRITEBUFFERMARKERAMD = 88,\
- prefix##WRITETIMESTAMP = 89,\
- prefix##ENDCOMMANDBUFFER = 90,\
- prefix##RANGE_SIZE = 91
-
-#define VUID_CMD_NAME_LIST\
- "Command_Undefined",\
- "vkCmdBeginConditionalRenderingEXT",\
- "vkCmdBeginDebugUtilsLabelEXT",\
- "vkCmdBeginQuery",\
- "vkCmdBeginQueryIndexedEXT",\
- "vkCmdBeginRenderPass",\
- "vkCmdBeginRenderPass2KHR",\
- "vkCmdBeginTransformFeedbackEXT",\
- "vkCmdBindDescriptorSets",\
- "vkCmdBindIndexBuffer",\
- "vkCmdBindPipeline",\
- "vkCmdBindShadingRateImageNV",\
- "vkCmdBindTransformFeedbackBuffersEXT",\
- "vkCmdBindVertexBuffers",\
- "vkCmdBlitImage",\
- "vkCmdBuildAccelerationStructureNV",\
- "vkCmdClearAttachments",\
- "vkCmdClearColorImage",\
- "vkCmdClearDepthStencilImage",\
- "vkCmdCopyAccelerationStructureNV",\
- "vkCmdCopyBuffer",\
- "vkCmdCopyBufferToImage",\
- "vkCmdCopyImage",\
- "vkCmdCopyImageToBuffer",\
- "vkCmdCopyQueryPoolResults",\
- "vkCmdDebugMarkerBeginEXT",\
- "vkCmdDebugMarkerEndEXT",\
- "vkCmdDebugMarkerInsertEXT",\
- "vkCmdDispatch",\
- "vkCmdDispatchBase",\
- "vkCmdDispatchIndirect",\
- "vkCmdDraw",\
- "vkCmdDrawIndexed",\
- "vkCmdDrawIndexedIndirect",\
- "vkCmdDrawIndexedIndirectCountKHR",\
- "vkCmdDrawIndirect",\
- "vkCmdDrawIndirectByteCountEXT",\
- "vkCmdDrawIndirectCountKHR",\
- "vkCmdDrawMeshTasksIndirectCountNV",\
- "vkCmdDrawMeshTasksIndirectNV",\
- "vkCmdDrawMeshTasksNV",\
- "vkCmdEndConditionalRenderingEXT",\
- "vkCmdEndDebugUtilsLabelEXT",\
- "vkCmdEndQuery",\
- "vkCmdEndQueryIndexedEXT",\
- "vkCmdEndRenderPass",\
- "vkCmdEndRenderPass2KHR",\
- "vkCmdEndTransformFeedbackEXT",\
- "vkCmdExecuteCommands",\
- "vkCmdFillBuffer",\
- "vkCmdInsertDebugUtilsLabelEXT",\
- "vkCmdNextSubpass",\
- "vkCmdNextSubpass2KHR",\
- "vkCmdPipelineBarrier",\
- "vkCmdProcessCommandsNVX",\
- "vkCmdPushConstants",\
- "vkCmdPushDescriptorSetKHR",\
- "vkCmdPushDescriptorSetWithTemplateKHR",\
- "vkCmdReserveSpaceForCommandsNVX",\
- "vkCmdResetEvent",\
- "vkCmdResetQueryPool",\
- "vkCmdResolveImage",\
- "vkCmdSetBlendConstants",\
- "vkCmdSetCheckpointNV",\
- "vkCmdSetCoarseSampleOrderNV",\
- "vkCmdSetDepthBias",\
- "vkCmdSetDepthBounds",\
- "vkCmdSetDeviceMask",\
- "vkCmdSetDiscardRectangleEXT",\
- "vkCmdSetEvent",\
- "vkCmdSetExclusiveScissorNV",\
- "vkCmdSetLineStippleEXT",\
- "vkCmdSetLineWidth",\
- "vkCmdSetPerformanceMarkerINTEL",\
- "vkCmdSetPerformanceOverrideINTEL",\
- "vkCmdSetPerformanceStreamMarkerINTEL",\
- "vkCmdSetSampleLocationsEXT",\
- "vkCmdSetScissor",\
- "vkCmdSetStencilCompareMask",\
- "vkCmdSetStencilReference",\
- "vkCmdSetStencilWriteMask",\
- "vkCmdSetViewport",\
- "vkCmdSetViewportShadingRatePaletteNV",\
- "vkCmdSetViewportWScalingNV",\
- "vkCmdTraceRaysNV",\
- "vkCmdUpdateBuffer",\
- "vkCmdWaitEvents",\
- "vkCmdWriteAccelerationStructuresPropertiesNV",\
- "vkCmdWriteBufferMarkerAMD",\
- "vkCmdWriteTimestamp",\
- "vkEndCommandBuffer"
-
-#define VUID_MUST_BE_RECORDING_LIST\
- "VUID_Undefined",\
- "VUID-vkCmdBeginConditionalRenderingEXT-commandBuffer-recording",\
- "VUID-vkCmdBeginDebugUtilsLabelEXT-commandBuffer-recording",\
- "VUID-vkCmdBeginQuery-commandBuffer-recording",\
- "VUID-vkCmdBeginQueryIndexedEXT-commandBuffer-recording",\
- "VUID-vkCmdBeginRenderPass-commandBuffer-recording",\
- "VUID-vkCmdBeginRenderPass2KHR-commandBuffer-recording",\
- "VUID-vkCmdBeginTransformFeedbackEXT-commandBuffer-recording",\
- "VUID-vkCmdBindDescriptorSets-commandBuffer-recording",\
- "VUID-vkCmdBindIndexBuffer-commandBuffer-recording",\
- "VUID-vkCmdBindPipeline-commandBuffer-recording",\
- "VUID-vkCmdBindShadingRateImageNV-commandBuffer-recording",\
- "VUID-vkCmdBindTransformFeedbackBuffersEXT-commandBuffer-recording",\
- "VUID-vkCmdBindVertexBuffers-commandBuffer-recording",\
- "VUID-vkCmdBlitImage-commandBuffer-recording",\
- "VUID-vkCmdBuildAccelerationStructureNV-commandBuffer-recording",\
- "VUID-vkCmdClearAttachments-commandBuffer-recording",\
- "VUID-vkCmdClearColorImage-commandBuffer-recording",\
- "VUID-vkCmdClearDepthStencilImage-commandBuffer-recording",\
- "VUID-vkCmdCopyAccelerationStructureNV-commandBuffer-recording",\
- "VUID-vkCmdCopyBuffer-commandBuffer-recording",\
- "VUID-vkCmdCopyBufferToImage-commandBuffer-recording",\
- "VUID-vkCmdCopyImage-commandBuffer-recording",\
- "VUID-vkCmdCopyImageToBuffer-commandBuffer-recording",\
- "VUID-vkCmdCopyQueryPoolResults-commandBuffer-recording",\
- "VUID-vkCmdDebugMarkerBeginEXT-commandBuffer-recording",\
- "VUID-vkCmdDebugMarkerEndEXT-commandBuffer-recording",\
- "VUID-vkCmdDebugMarkerInsertEXT-commandBuffer-recording",\
- "VUID-vkCmdDispatch-commandBuffer-recording",\
- "VUID-vkCmdDispatchBase-commandBuffer-recording",\
- "VUID-vkCmdDispatchIndirect-commandBuffer-recording",\
- "VUID-vkCmdDraw-commandBuffer-recording",\
- "VUID-vkCmdDrawIndexed-commandBuffer-recording",\
- "VUID-vkCmdDrawIndexedIndirect-commandBuffer-recording",\
- "VUID-vkCmdDrawIndexedIndirectCountKHR-commandBuffer-recording",\
- "VUID-vkCmdDrawIndirect-commandBuffer-recording",\
- "VUID-vkCmdDrawIndirectByteCountEXT-commandBuffer-recording",\
- "VUID-vkCmdDrawIndirectCountKHR-commandBuffer-recording",\
- "VUID-vkCmdDrawMeshTasksIndirectCountNV-commandBuffer-recording",\
- "VUID-vkCmdDrawMeshTasksIndirectNV-commandBuffer-recording",\
- "VUID-vkCmdDrawMeshTasksNV-commandBuffer-recording",\
- "VUID-vkCmdEndConditionalRenderingEXT-commandBuffer-recording",\
- "VUID-vkCmdEndDebugUtilsLabelEXT-commandBuffer-recording",\
- "VUID-vkCmdEndQuery-commandBuffer-recording",\
- "VUID-vkCmdEndQueryIndexedEXT-commandBuffer-recording",\
- "VUID-vkCmdEndRenderPass-commandBuffer-recording",\
- "VUID-vkCmdEndRenderPass2KHR-commandBuffer-recording",\
- "VUID-vkCmdEndTransformFeedbackEXT-commandBuffer-recording",\
- "VUID-vkCmdExecuteCommands-commandBuffer-recording",\
- "VUID-vkCmdFillBuffer-commandBuffer-recording",\
- "VUID-vkCmdInsertDebugUtilsLabelEXT-commandBuffer-recording",\
- "VUID-vkCmdNextSubpass-commandBuffer-recording",\
- "VUID-vkCmdNextSubpass2KHR-commandBuffer-recording",\
- "VUID-vkCmdPipelineBarrier-commandBuffer-recording",\
- "VUID-vkCmdProcessCommandsNVX-commandBuffer-recording",\
- "VUID-vkCmdPushConstants-commandBuffer-recording",\
- "VUID-vkCmdPushDescriptorSetKHR-commandBuffer-recording",\
- "VUID-vkCmdPushDescriptorSetWithTemplateKHR-commandBuffer-recording",\
- "VUID-vkCmdReserveSpaceForCommandsNVX-commandBuffer-recording",\
- "VUID-vkCmdResetEvent-commandBuffer-recording",\
- "VUID-vkCmdResetQueryPool-commandBuffer-recording",\
- "VUID-vkCmdResolveImage-commandBuffer-recording",\
- "VUID-vkCmdSetBlendConstants-commandBuffer-recording",\
- "VUID-vkCmdSetCheckpointNV-commandBuffer-recording",\
- "VUID-vkCmdSetCoarseSampleOrderNV-commandBuffer-recording",\
- "VUID-vkCmdSetDepthBias-commandBuffer-recording",\
- "VUID-vkCmdSetDepthBounds-commandBuffer-recording",\
- "VUID-vkCmdSetDeviceMask-commandBuffer-recording",\
- "VUID-vkCmdSetDiscardRectangleEXT-commandBuffer-recording",\
- "VUID-vkCmdSetEvent-commandBuffer-recording",\
- "VUID-vkCmdSetExclusiveScissorNV-commandBuffer-recording",\
- "VUID-vkCmdSetLineStippleEXT-commandBuffer-recording",\
- "VUID-vkCmdSetLineWidth-commandBuffer-recording",\
- "VUID-vkCmdSetPerformanceMarkerINTEL-commandBuffer-recording",\
- "VUID-vkCmdSetPerformanceOverrideINTEL-commandBuffer-recording",\
- "VUID-vkCmdSetPerformanceStreamMarkerINTEL-commandBuffer-recording",\
- "VUID-vkCmdSetSampleLocationsEXT-commandBuffer-recording",\
- "VUID-vkCmdSetScissor-commandBuffer-recording",\
- "VUID-vkCmdSetStencilCompareMask-commandBuffer-recording",\
- "VUID-vkCmdSetStencilReference-commandBuffer-recording",\
- "VUID-vkCmdSetStencilWriteMask-commandBuffer-recording",\
- "VUID-vkCmdSetViewport-commandBuffer-recording",\
- "VUID-vkCmdSetViewportShadingRatePaletteNV-commandBuffer-recording",\
- "VUID-vkCmdSetViewportWScalingNV-commandBuffer-recording",\
- "VUID-vkCmdTraceRaysNV-commandBuffer-recording",\
- "VUID-vkCmdUpdateBuffer-commandBuffer-recording",\
- "VUID-vkCmdWaitEvents-commandBuffer-recording",\
- "VUID-vkCmdWriteAccelerationStructuresPropertiesNV-commandBuffer-recording",\
- "VUID-vkCmdWriteBufferMarkerAMD-commandBuffer-recording",\
- "VUID-vkCmdWriteTimestamp-commandBuffer-recording",\
- "VUID-vkEndCommandBuffer-commandBuffer-00059"
diff --git a/scripts/check_commit_message_format.sh b/scripts/check_commit_message_format.sh
index 8b7c3ceab..29666356a 100755
--- a/scripts/check_commit_message_format.sh
+++ b/scripts/check_commit_message_format.sh
@@ -30,7 +30,6 @@ NC='\033[0m' # No Color
# Get user-supplied commit message text for applicable commits and insert
# a unique separator string identifier. The git command returns ONLY the
# subject line and body for each of the commits.
-TRAVIS_COMMIT_RANGE="${TRAVIS_COMMIT_RANGE/.../..}"
COMMIT_TEXT=$(git log ${TRAVIS_COMMIT_RANGE} --pretty=format:"XXXNEWLINEXXX"%n%B)
# Bail if there are none
@@ -56,17 +55,17 @@ printf %s "$COMMIT_TEXT" | while IFS='' read -r line; do
fi
chars=${#line}
if [ $current_line -eq 1 ]; then
- # Subject line should be 64 chars or less
- if [ $chars -gt 64 ]; then
- echo "The following subject line exceeds 64 characters in length."
+ # Subject line should be 50 chars or less (but give some slack here)
+ if [ $chars -gt 54 ]; then
+ echo "The following subject line exceeds 50 characters in length."
echo " '$line'"
success=0
fi
i=$(($chars-1))
last_char=${line:$i:1}
# Output error if last char of subject line is not alpha-numeric
- if [[ $last_char =~ [.,] ]]; then
- echo "For the following commit, the last character of the subject line must not be a period or comma."
+ if [[ ! $last_char =~ [0-9a-zA-Z] ]]; then
+ echo "For the following commit, the last character of the subject line must not be non-alphanumeric."
echo " '$line'"
success=0
fi
@@ -77,14 +76,6 @@ printf %s "$COMMIT_TEXT" | while IFS='' read -r line; do
echo " '$line'"
success=0
fi
- # Check if first character after the colon is lower-case
- subject=$(echo $line | cut -f2 -d " ")
- firstchar=$(echo ${subject} | cut -c 1)
- if [[ "${firstchar}" =~ [a-z] ]]; then
- echo "The first word of the subject line after the ':' character must be capitalized."
- echo " '$line'"
- success=0
- fi
elif [ $current_line -eq 2 ]; then
# Commit message must have a blank line between subject and body
if [ $chars -ne 0 ]; then
diff --git a/scripts/common_codegen.py b/scripts/common_codegen.py
index 7aa7cee81..47bb6329a 100644
--- a/scripts/common_codegen.py
+++ b/scripts/common_codegen.py
@@ -20,7 +20,8 @@
import os,re,sys,string
import xml.etree.ElementTree as etree
-from collections import namedtuple, OrderedDict
+from generator import *
+from collections import namedtuple
# Copyright text prefixing all headers (list of strings).
prefixStrings = [
@@ -49,7 +50,6 @@ prefixStrings = [
platform_dict = {
'android' : 'VK_USE_PLATFORM_ANDROID_KHR',
'fuchsia' : 'VK_USE_PLATFORM_FUCHSIA',
- 'ggp': 'VK_USE_PLATFORM_GGP',
'ios' : 'VK_USE_PLATFORM_IOS_MVK',
'macos' : 'VK_USE_PLATFORM_MACOS_MVK',
'metal' : 'VK_USE_PLATFORM_METAL_EXT',
@@ -70,50 +70,3 @@ def GetFeatureProtect(interface):
if platform is not None:
protect = platform_dict[platform]
return protect
-
-# Return a dict containing the dispatchable/non-dispatchable type of every handle
-def GetHandleTypes(tree):
- # Extend OrderedDict with common handle operations
- class HandleDict(OrderedDict):
- def IsDispatchable(self, handle_type):
- return self.get(handle_type) == 'VK_DEFINE_HANDLE'
- def IsNonDispatchable(self, handle_type):
- return self.get(handle_type) == 'VK_DEFINE_NON_DISPATCHABLE_HANDLE'
-
- handles = HandleDict()
- for elem in tree.findall("types/type/[@category='handle']"):
- if not elem.get('alias'):
- name = elem.get('name')
- handles[name] = elem.find('type').text
- return handles
-
-# Return a dict containing the category attribute of every type
-def GetTypeCategories(tree):
- type_categories = OrderedDict()
- for elem in tree.findall("types/type"):
- if not elem.get('alias'):
- # name is either an attribute or the text of a child <name> tag
- name = elem.get('name') or (elem.find("name") and elem.find('name').text)
- type_categories[name] = elem.get('category')
- return type_categories
-
-# Treats outdents a multiline string by the leading whitespace on the first line
-# Optionally indenting by the given prefix
-def Outdent(string_in, indent=''):
- string_out = re.sub('^ *', '', string_in) # kill stray leading spaces
- if string_out[0] != '\n':
- return string_in # needs new line to find the first line's indent level
-
- first_indent = string_out[1:]
- fake_indent = '\n' + ' ' * (len(first_indent) - len(first_indent.lstrip()))
- indent = '\n' + indent
-
- string_out = string_out.rstrip() + '\n' # remove trailing whitespace except for a newline
- outdent = re.sub(fake_indent, indent, string_out)
- return outdent[1:]
-
-
-# helper to define paths relative to the repo root
-def repo_relative(path):
- return os.path.abspath(os.path.join(os.path.dirname(__file__), '..', path))
-
diff --git a/scripts/dispatch_table_helper_generator.py b/scripts/dispatch_table_helper_generator.py
index 848cb4eee..488d8f5ed 100644
--- a/scripts/dispatch_table_helper_generator.py
+++ b/scripts/dispatch_table_helper_generator.py
@@ -1,9 +1,9 @@
#!/usr/bin/python3 -i
#
-# Copyright (c) 2015-2019 The Khronos Group Inc.
-# Copyright (c) 2015-2019 Valve Corporation
-# Copyright (c) 2015-2019 LunarG, Inc.
-# Copyright (c) 2015-2019 Google Inc.
+# Copyright (c) 2015-2017 The Khronos Group Inc.
+# Copyright (c) 2015-2017 Valve Corporation
+# Copyright (c) 2015-2017 LunarG, Inc.
+# Copyright (c) 2015-2017 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
@@ -29,7 +29,6 @@ from common_codegen import *
# DispatchTableHelperOutputGeneratorOptions - subclass of GeneratorOptions.
class DispatchTableHelperOutputGeneratorOptions(GeneratorOptions):
def __init__(self,
- conventions = None,
filename = None,
directory = '.',
apiname = None,
@@ -48,7 +47,7 @@ class DispatchTableHelperOutputGeneratorOptions(GeneratorOptions):
apientryp = '',
alignFuncParam = 0,
expandEnumerants = True):
- GeneratorOptions.__init__(self, conventions, filename, directory, apiname, profile,
+ GeneratorOptions.__init__(self, filename, directory, apiname, profile,
versions, emitversions, defaultExtensions,
addExtensions, removeExtensions, emitExtensions, sortProcedure)
self.prefixText = prefixText
@@ -79,10 +78,6 @@ class DispatchTableHelperOutputGenerator(OutputGenerator):
# Called once at the beginning of each run
def beginFile(self, genOpts):
OutputGenerator.beginFile(self, genOpts)
-
- # Initialize members that require the tree
- self.handle_types = GetHandleTypes(self.registry.tree)
-
write("#pragma once", file=self.outFile)
# User-supplied prefix text, if any (list of strings)
if (genOpts.prefixText):
@@ -94,9 +89,9 @@ class DispatchTableHelperOutputGenerator(OutputGenerator):
write(file_comment, file=self.outFile)
# Copyright Notice
copyright = '/*\n'
- copyright += ' * Copyright (c) 2015-2019 The Khronos Group Inc.\n'
- copyright += ' * Copyright (c) 2015-2019 Valve Corporation\n'
- copyright += ' * Copyright (c) 2015-2019 LunarG, Inc.\n'
+ copyright += ' * Copyright (c) 2015-2017 The Khronos Group Inc.\n'
+ copyright += ' * Copyright (c) 2015-2017 Valve Corporation\n'
+ copyright += ' * Copyright (c) 2015-2017 LunarG, Inc.\n'
copyright += ' *\n'
copyright += ' * Licensed under the Apache License, Version 2.0 (the "License");\n'
copyright += ' * you may not use this file except in compliance with the License.\n'
@@ -123,7 +118,6 @@ class DispatchTableHelperOutputGenerator(OutputGenerator):
preamble += '#include <unordered_set>\n'
preamble += '#include <unordered_map>\n'
preamble += '#include "vk_layer_dispatch_table.h"\n'
- preamble += '#include "vk_extension_helper.h"\n'
write(copyright, file=self.outFile)
write(preamble, file=self.outFile)
@@ -173,16 +167,17 @@ class DispatchTableHelperOutputGenerator(OutputGenerator):
#
# Determine if this API should be ignored or added to the instance or device dispatch table
def AddCommandToDispatchList(self, name, handle_type, protect, cmdinfo):
- if handle_type not in self.handle_types:
+ handle = self.registry.tree.find("types/type/[name='" + handle_type + "'][@category='handle']")
+ if handle is None:
return
if handle_type != 'VkInstance' and handle_type != 'VkPhysicalDevice' and name != 'vkGetInstanceProcAddr':
self.device_dispatch_list.append((name, self.featureExtraProtect))
extension = "VK_VERSION" not in self.featureName
promoted = not extension and "VK_VERSION_1_0" != self.featureName
if promoted or extension:
- # We want feature written for all promoted entrypoints, in addition to extensions
self.device_stub_list.append([name, self.featureName])
- self.device_extension_list.append([name, self.featureName])
+ if extension:
+ self.device_extension_list.append([name, self.featureName])
# Build up stub function
return_type = ''
decl = self.makeCDecls(cmdinfo.elem)[1]
@@ -232,13 +227,12 @@ class DispatchTableHelperOutputGenerator(OutputGenerator):
ext_fcn += '// o Determine if the API has an associated extension\n'
ext_fcn += '// o If it does, determine if that extension name is present in the passed-in set of enabled_ext_names \n'
ext_fcn += '// If the APIname has no parent extension, OR its parent extension name is IN the set, return TRUE, else FALSE\n'
- ext_fcn += 'static inline bool ApiParentExtensionEnabled(const std::string api_name, const DeviceExtensions *device_extension_info) {\n'
+ ext_fcn += 'static inline bool ApiParentExtensionEnabled(const std::string api_name, const std::unordered_set<std::string> &enabled_ext_names) {\n'
ext_fcn += ' auto has_ext = api_extension_map.find(api_name);\n'
- ext_fcn += ' // Is this API part of an extension or feature group?\n'
+ ext_fcn += ' // Is this API part of an extension?\n'
ext_fcn += ' if (has_ext != api_extension_map.end()) {\n'
ext_fcn += ' // Was the extension for this API enabled in the CreateDevice call?\n'
- ext_fcn += ' auto info = device_extension_info->get_info(has_ext->second.c_str());\n'
- ext_fcn += ' if ((!info.state) || (device_extension_info->*(info.state) != true)) {\n'
+ ext_fcn += ' if (enabled_ext_names.find(has_ext->second) == enabled_ext_names.end()) {\n'
ext_fcn += ' return false;\n'
ext_fcn += ' }\n'
ext_fcn += ' }\n'
diff --git a/scripts/external_revision_generator.py b/scripts/external_revision_generator.py
index 497291ae1..a7992fdc4 100644
--- a/scripts/external_revision_generator.py
+++ b/scripts/external_revision_generator.py
@@ -1,9 +1,9 @@
#!/usr/bin/env python3
#
-# Copyright (c) 2015-2019 The Khronos Group Inc.
-# Copyright (c) 2015-2019 Valve Corporation
-# Copyright (c) 2015-2019 LunarG, Inc.
-# Copyright (c) 2015-2019 Google Inc.
+# Copyright (c) 2015-2017 The Khronos Group Inc.
+# Copyright (c) 2015-2017 Valve Corporation
+# Copyright (c) 2015-2017 LunarG, Inc.
+# Copyright (c) 2015-2017 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
@@ -24,7 +24,6 @@ import argparse
import hashlib
import subprocess
import uuid
-import json
def generate(symbol_name, commit_id, output_header_file):
# Write commit ID to output header file
@@ -38,10 +37,10 @@ def generate(symbol_name, commit_id, output_header_file):
copyright += '\n'
copyright += '/***************************************************************************\n'
copyright += ' *\n'
- copyright += ' * Copyright (c) 2015-2019 The Khronos Group Inc.\n'
- copyright += ' * Copyright (c) 2015-2019 Valve Corporation\n'
- copyright += ' * Copyright (c) 2015-2019 LunarG, Inc.\n'
- copyright += ' * Copyright (c) 2015-2019 Google Inc.\n'
+ copyright += ' * Copyright (c) 2015-2017 The Khronos Group Inc.\n'
+ copyright += ' * Copyright (c) 2015-2017 Valve Corporation\n'
+ copyright += ' * Copyright (c) 2015-2017 LunarG, Inc.\n'
+ copyright += ' * Copyright (c) 2015-2017 Google Inc.\n'
copyright += ' *\n'
copyright += ' * Licensed under the Apache License, Version 2.0 (the "License");\n'
copyright += ' * you may not use this file except in compliance with the License.\n'
@@ -86,36 +85,21 @@ def get_commit_id_from_file(rev_file):
return sha1.hexdigest()
def get_commit_id_from_uuid():
- unique_uuid = str(uuid.uuid4())
- sha1 = hashlib.sha1();
- sha1.update(unique_uuid.encode())
- return sha1.hexdigest()
-
-def get_commit_id_from_json(json_file, json_keys):
- with open(json_file) as json_stream:
- json_data = json.load(json_stream)
- for key in json_keys.split(','):
- if type(json_data) == list:
- json_data = json_data[int(key)]
- else:
- json_data = json_data[key]
- return json_data
+ unique_uuid = str(uuid.uuid4())
+ sha1 = hashlib.sha1();
+ sha1.update(unique_uuid.encode())
+ return sha1.hexdigest()
def main():
parser = argparse.ArgumentParser()
+ parser.add_argument("-s", "--symbol_name", metavar="SYMBOL_NAME", required=True, help="C symbol name")
+ parser.add_argument("-o", "--output_header_file", metavar="OUTPUT_HEADER_FILE", required=True, help="output header file path")
rev_method_group = parser.add_mutually_exclusive_group(required=True)
rev_method_group.add_argument("--git_dir", metavar="SOURCE_DIR", help="git working copy directory")
rev_method_group.add_argument("--rev_file", metavar="REVISION_FILE", help="source revision file path (must contain a SHA1 hash")
rev_method_group.add_argument("--from_uuid", action='store_true', help="base SHA1 on a dynamically generated UUID")
- rev_method_group.add_argument("--json_file", metavar="JSON_FILE", help="path to json file")
- parser.add_argument("-s", "--symbol_name", metavar="SYMBOL_NAME", required=True, help="C symbol name")
- parser.add_argument("-o", "--output_header_file", metavar="OUTPUT_HEADER_FILE", required=True, help="output header file path")
- parser.add_argument("--json_keys", action='store', metavar="JSON_KEYS", help="comma-separated list of keys specifying SHA1 location in root json object for --json_file option")
args = parser.parse_args()
- if ('json_file' in args) != ('json_keys' in args):
- parser.error('--json_file and --json_keys must be provided together')
-
# We can either parse the latest Git commit ID out of the specified repository (preferred where possible),
# or computing the SHA1 hash of the contents of a file passed on the command line and (where necessary --
# e.g. when building the layers outside of a Git environment).
@@ -129,9 +113,7 @@ def main():
elif args.rev_file is not None:
# Read the commit ID from a file.
commit_id = get_commit_id_from_file(args.rev_file)
- elif args.json_file is not None:
- commit_id = get_commit_id_from_json(args.json_file, args.json_keys)
- elif args.from_uuid:
+ elif args.from_uuid is not None:
commit_id = get_commit_id_from_uuid()
if not is_sha1(commit_id):
diff --git a/scripts/generate_source.py b/scripts/generate_source.py
deleted file mode 100755
index db350c6e3..000000000
--- a/scripts/generate_source.py
+++ /dev/null
@@ -1,132 +0,0 @@
-#!/usr/bin/env python3
-# Copyright (c) 2019 The Khronos Group Inc.
-# Copyright (c) 2019 Valve Corporation
-# Copyright (c) 2019 LunarG, Inc.
-# Copyright (c) 2019 Google Inc.
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-# Author: Mike Schuchardt <mikes@lunarg.com>
-
-import argparse
-import filecmp
-import os
-import shutil
-import subprocess
-import sys
-import tempfile
-
-import common_codegen
-
-# files to exclude from --verify check
-verify_exclude = ['.clang-format']
-
-def main(argv):
- parser = argparse.ArgumentParser(description='Generate source code for this repository')
- parser.add_argument('registry', metavar='REGISTRY_PATH', help='path to the Vulkan-Headers registry directory')
- group = parser.add_mutually_exclusive_group()
- group.add_argument('-i', '--incremental', action='store_true', help='only update repo files that change')
- group.add_argument('-v', '--verify', action='store_true', help='verify repo files match generator output')
- args = parser.parse_args(argv)
-
- gen_cmds = [*[[common_codegen.repo_relative('scripts/lvl_genvk.py'),
- '-registry', os.path.abspath(os.path.join(args.registry, 'vk.xml')),
- '-quiet',
- filename] for filename in ["chassis.cpp",
- "chassis.h",
- "layer_chassis_dispatch.cpp",
- "layer_chassis_dispatch.h",
- "object_tracker.cpp",
- "object_tracker.h",
- "parameter_validation.cpp",
- "parameter_validation.h",
- "thread_safety.cpp",
- "thread_safety.h",
- "vk_dispatch_table_helper.h",
- "vk_enum_string_helper.h",
- "vk_extension_helper.h",
- "vk_layer_dispatch_table.h",
- "vk_object_types.h",
- "vk_safe_struct.cpp",
- "vk_safe_struct.h",
- "vk_typemap_helper.h"]],
- [common_codegen.repo_relative('scripts/vk_validation_stats.py'),
- os.path.abspath(os.path.join(args.registry, 'validusage.json')),
- '-export_header'],
- [common_codegen.repo_relative('scripts/external_revision_generator.py'),
- '--json_file', common_codegen.repo_relative('scripts/known_good.json'),
- '--json_keys', 'repos,0,commit',
- '-s', 'SPIRV_TOOLS_COMMIT_ID',
- '-o', 'spirv_tools_commit_id.h']]
-
- repo_dir = common_codegen.repo_relative('layers/generated')
-
- # get directory where generators will run
- if args.verify or args.incremental:
- # generate in temp directory so we can compare or copy later
- temp_obj = tempfile.TemporaryDirectory(prefix='VulkanVL_generated_source_')
- temp_dir = temp_obj.name
- gen_dir = temp_dir
- else:
- # generate directly in the repo
- gen_dir = repo_dir
-
- # run each code generator
- for cmd in gen_cmds:
- print(' '.join(cmd))
- try:
- subprocess.check_call([sys.executable] + cmd, cwd=gen_dir)
- except Exception as e:
- print('ERROR:', str(e))
- return 1
-
- # optional post-generation steps
- if args.verify:
- # compare contents of temp dir and repo
- temp_files = set(os.listdir(temp_dir))
- repo_files = set(os.listdir(repo_dir))
- files_match = True
- for filename in sorted((temp_files | repo_files) - set(verify_exclude)):
- if filename not in repo_files:
- print('ERROR: Missing repo file', filename)
- files_match = False
- elif filename not in temp_files:
- print('ERROR: Missing generator for', filename)
- files_match = False
- elif not filecmp.cmp(os.path.join(temp_dir, filename),
- os.path.join(repo_dir, filename),
- shallow=False):
- print('ERROR: Repo files do not match generator output for', filename)
- files_match = False
-
- # return code for test scripts
- if files_match:
- print('SUCCESS: Repo files match generator output')
- return 0
- return 1
-
- elif args.incremental:
- # copy missing or differing files from temp directory to repo
- for filename in os.listdir(temp_dir):
- temp_filename = os.path.join(temp_dir, filename)
- repo_filename = os.path.join(repo_dir, filename)
- if not os.path.exists(repo_filename) or \
- not filecmp.cmp(temp_filename, repo_filename, shallow=False):
- print('update', repo_filename)
- shutil.copyfile(temp_filename, repo_filename)
-
- return 0
-
-if __name__ == '__main__':
- sys.exit(main(sys.argv[1:]))
-
diff --git a/scripts/helper_file_generator.py b/scripts/helper_file_generator.py
index e78d1b91c..12084e905 100644
--- a/scripts/helper_file_generator.py
+++ b/scripts/helper_file_generator.py
@@ -1,9 +1,9 @@
#!/usr/bin/python3 -i
#
-# Copyright (c) 2015-2019 The Khronos Group Inc.
-# Copyright (c) 2015-2019 Valve Corporation
-# Copyright (c) 2015-2019 LunarG, Inc.
-# Copyright (c) 2015-2019 Google Inc.
+# Copyright (c) 2015-2017 The Khronos Group Inc.
+# Copyright (c) 2015-2017 Valve Corporation
+# Copyright (c) 2015-2017 LunarG, Inc.
+# Copyright (c) 2015-2017 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
@@ -31,7 +31,6 @@ from common_codegen import *
# HelperFileOutputGeneratorOptions - subclass of GeneratorOptions.
class HelperFileOutputGeneratorOptions(GeneratorOptions):
def __init__(self,
- conventions = None,
filename = None,
directory = '.',
apiname = None,
@@ -54,7 +53,7 @@ class HelperFileOutputGeneratorOptions(GeneratorOptions):
library_name = '',
expandEnumerants = True,
helper_file_type = ''):
- GeneratorOptions.__init__(self, conventions, filename, directory, apiname, profile,
+ GeneratorOptions.__init__(self, filename, directory, apiname, profile,
versions, emitversions, defaultExtensions,
addExtensions, removeExtensions, emitExtensions, sortProcedure)
self.prefixText = prefixText
@@ -88,8 +87,6 @@ class HelperFileOutputGenerator(OutputGenerator):
self.core_object_types = [] # Handy copy of core_object_type enum data
self.device_extension_info = dict() # Dict of device extension name defines and ifdef values
self.instance_extension_info = dict() # Dict of instance extension name defines and ifdef values
- self.structextends_list = [] # List of structs which extend another struct via pNext
-
# Named tuples to store struct and command data
self.StructType = namedtuple('StructType', ['name', 'value'])
@@ -108,8 +105,6 @@ class HelperFileOutputGenerator(OutputGenerator):
# Called once at the beginning of each run
def beginFile(self, genOpts):
OutputGenerator.beginFile(self, genOpts)
- # Initialize members that require the tree
- self.handle_types = GetHandleTypes(self.registry.tree)
# User-supplied prefix text, if any (list of strings)
self.helper_file_type = genOpts.helper_file_type
self.library_name = genOpts.library_name
@@ -122,10 +117,10 @@ class HelperFileOutputGenerator(OutputGenerator):
copyright += '\n'
copyright += '/***************************************************************************\n'
copyright += ' *\n'
- copyright += ' * Copyright (c) 2015-2019 The Khronos Group Inc.\n'
- copyright += ' * Copyright (c) 2015-2019 Valve Corporation\n'
- copyright += ' * Copyright (c) 2015-2019 LunarG, Inc.\n'
- copyright += ' * Copyright (c) 2015-2019 Google Inc.\n'
+ copyright += ' * Copyright (c) 2015-2017 The Khronos Group Inc.\n'
+ copyright += ' * Copyright (c) 2015-2017 Valve Corporation\n'
+ copyright += ' * Copyright (c) 2015-2017 LunarG, Inc.\n'
+ copyright += ' * Copyright (c) 2015-2017 Google Inc.\n'
copyright += ' *\n'
copyright += ' * Licensed under the Apache License, Version 2.0 (the "License");\n'
copyright += ' * you may not use this file except in compliance with the License.\n'
@@ -205,16 +200,13 @@ class HelperFileOutputGenerator(OutputGenerator):
if groupName == 'VkDebugReportObjectTypeEXT':
for elem in groupElem.findall('enum'):
if elem.get('supported') != 'disabled':
- if elem.get('alias') is None: # TODO: Strangely the "alias" fn parameter does not work
- item_name = elem.get('name')
- if self.debug_report_object_types.count(item_name) == 0: # TODO: Strangely there are duplicates
- self.debug_report_object_types.append(item_name)
+ item_name = elem.get('name')
+ self.debug_report_object_types.append(item_name)
elif groupName == 'VkObjectType':
for elem in groupElem.findall('enum'):
if elem.get('supported') != 'disabled':
- if elem.get('alias') is None: # TODO: Strangely the "alias" fn parameter does not work
- item_name = elem.get('name')
- self.core_object_types.append(item_name)
+ item_name = elem.get('name')
+ self.core_object_types.append(item_name)
#
# Called for each type -- if the type is a struct/union, grab the metadata
@@ -237,7 +229,7 @@ class HelperFileOutputGenerator(OutputGenerator):
def paramIsPointer(self, param):
ispointer = False
for elem in param:
- if elem.tag == 'type' and elem.tail is not None and '*' in elem.tail:
+ if ((elem.tag is not 'type') and (elem.tail is not None)) and '*' in elem.tail:
ispointer = True
return ispointer
#
@@ -309,17 +301,19 @@ class HelperFileOutputGenerator(OutputGenerator):
# non-dispatchable (dispatchable = False) handle
def TypeContainsObjectHandle(self, handle_type, dispatchable):
if dispatchable:
- type_check = self.handle_types.IsDispatchable
+ type_key = 'VK_DEFINE_HANDLE'
else:
- type_check = self.handle_types.IsNonDispatchable
- if type_check(handle_type):
+ type_key = 'VK_DEFINE_NON_DISPATCHABLE_HANDLE'
+ handle = self.registry.tree.find("types/type/[name='" + handle_type + "'][@category='handle']")
+ if handle is not None and handle.find('type').text == type_key:
return True
# if handle_type is a struct, search its members
if handle_type in self.structNames:
member_index = next((i for i, v in enumerate(self.structMembers) if v[0] == handle_type), None)
if member_index is not None:
for item in self.structMembers[member_index].members:
- if type_check(item.type):
+ handle = self.registry.tree.find("types/type/[name='" + item.type + "'][@category='handle']")
+ if handle is not None and handle.find('type').text == type_key:
return True
return False
#
@@ -353,7 +347,6 @@ class HelperFileOutputGenerator(OutputGenerator):
self.structTypes[typeName] = self.StructType(name=name, value=value)
# Store pointer/array/string info
isstaticarray = self.paramIsStaticArray(member)
- structextends = False
membersInfo.append(self.CommandParam(type=type,
name=name,
ispointer=self.paramIsPointer(member),
@@ -363,16 +356,11 @@ class HelperFileOutputGenerator(OutputGenerator):
len=self.getLen(member),
extstructs=self.registry.validextensionstructs[typeName] if name == 'pNext' else None,
cdecl=cdecl))
- # If this struct extends another, keep its name in list for further processing
- if typeinfo.elem.attrib.get('structextends') is not None:
- self.structextends_list.append(typeName)
self.structMembers.append(self.StructMemberData(name=typeName, members=membersInfo, ifdef_protect=self.featureExtraProtect))
#
# Enum_string_header: Create a routine to convert an enumerated value into a string
def GenerateEnumStringConversion(self, groupName, value_list):
outstring = '\n'
- if self.featureExtraProtect is not None:
- outstring += '\n#ifdef %s\n\n' % self.featureExtraProtect
outstring += 'static inline const char* string_%s(%s input_value)\n' % (groupName, groupName)
outstring += '{\n'
outstring += ' switch ((%s)input_value)\n' % groupName
@@ -386,29 +374,6 @@ class HelperFileOutputGenerator(OutputGenerator):
outstring += ' return "Unhandled %s";\n' % groupName
outstring += ' }\n'
outstring += '}\n'
-
- bitsIndex = groupName.find('Bits')
- if (bitsIndex != -1):
- outstring += '\n'
- flagsName = groupName[0:bitsIndex] + "s" + groupName[bitsIndex+4:]
- outstring += 'static inline std::string string_%s(%s input_value)\n' % (flagsName, flagsName)
- outstring += '{\n'
- outstring += ' std::string ret;\n'
- outstring += ' int index = 0;\n'
- outstring += ' while(input_value) {\n'
- outstring += ' if (input_value & 1) {\n'
- outstring += ' if( !ret.empty()) ret.append("|");\n'
- outstring += ' ret.append(string_%s(static_cast<%s>(1 << index)));\n' % (groupName, groupName)
- outstring += ' }\n'
- outstring += ' ++index;\n'
- outstring += ' input_value >>= 1;\n'
- outstring += ' }\n'
- outstring += ' if( ret.empty()) ret.append(string_%s(static_cast<%s>(0)));\n' % (groupName, groupName)
- outstring += ' return ret;\n'
- outstring += '}\n'
-
- if self.featureExtraProtect is not None:
- outstring += '#endif // %s\n' % self.featureExtraProtect
return outstring
#
# Tack on a helper which, given an index into a VkPhysicalDeviceFeatures structure, will print the corresponding feature name
@@ -436,7 +401,6 @@ class HelperFileOutputGenerator(OutputGenerator):
enum_string_helper_header += '#pragma warning( disable : 4065 )\n'
enum_string_helper_header += '#endif\n'
enum_string_helper_header += '\n'
- enum_string_helper_header += '#include <string>\n'
enum_string_helper_header += '#include <vulkan/vulkan.h>\n'
enum_string_helper_header += '\n'
enum_string_helper_header += self.enum_output
@@ -456,10 +420,6 @@ class HelperFileOutputGenerator(OutputGenerator):
safe_struct_helper_header += '#pragma once\n'
safe_struct_helper_header += '#include <vulkan/vulkan.h>\n'
safe_struct_helper_header += '\n'
- safe_struct_helper_header += 'void *SafePnextCopy(const void *pNext);\n'
- safe_struct_helper_header += 'void FreePnextChain(const void *pNext);\n'
- safe_struct_helper_header += 'char *SafeStringCopy(const char *in_string);\n'
- safe_struct_helper_header += '\n'
safe_struct_helper_header += self.GenerateSafeStructHeader()
return safe_struct_helper_header
#
@@ -502,11 +462,7 @@ class HelperFileOutputGenerator(OutputGenerator):
# Generate extension helper header file
def GenerateExtensionHelperHeader(self):
- V_1_1_level_feature_set = [
- 'VK_VERSION_1_1',
- ]
-
- V_1_0_instance_extensions_promoted_to_V_1_1_core = [
+ V_1_0_instance_extensions_promoted_to_core = [
'vk_khr_device_group_creation',
'vk_khr_external_fence_capabilities',
'vk_khr_external_memory_capabilities',
@@ -514,7 +470,7 @@ class HelperFileOutputGenerator(OutputGenerator):
'vk_khr_get_physical_device_properties_2',
]
- V_1_0_device_extensions_promoted_to_V_1_1_core = [
+ V_1_0_device_extensions_promoted_to_core = [
'vk_khr_16bit_storage',
'vk_khr_bind_memory_2',
'vk_khr_dedicated_allocation',
@@ -544,7 +500,6 @@ class HelperFileOutputGenerator(OutputGenerator):
'#include <unordered_map>',
'#include <utility>',
'#include <set>',
- '#include <vector>',
'',
'#include <vulkan/vulkan.h>',
'']
@@ -559,18 +514,17 @@ class HelperFileOutputGenerator(OutputGenerator):
struct_type = '%sExtensions' % type
if type == 'Instance':
extension_dict = self.instance_extension_info
- promoted_ext_list = V_1_0_instance_extensions_promoted_to_V_1_1_core
+ promoted_ext_list = V_1_0_instance_extensions_promoted_to_core
struct_decl = 'struct %s {' % struct_type
instance_struct_type = struct_type
else:
extension_dict = self.device_extension_info
- promoted_ext_list = V_1_0_device_extensions_promoted_to_V_1_1_core
+ promoted_ext_list = V_1_0_device_extensions_promoted_to_core
struct_decl = 'struct %s : public %s {' % (struct_type, instance_struct_type)
extension_items = sorted(extension_dict.items())
field_name = { ext_name: re.sub('_extension_name', '', info['define'].lower()) for ext_name, info in extension_items }
-
if type == 'Instance':
instance_field_name = field_name
instance_extension_dict = extension_dict
@@ -582,9 +536,14 @@ class HelperFileOutputGenerator(OutputGenerator):
# Output the data member list
struct = [struct_decl]
- struct.extend([ ' bool vk_feature_version_1_1{false};'])
struct.extend([ ' bool %s{false};' % field_name[ext_name] for ext_name, info in extension_items])
+ # Create struct entries for saving extension count and extension list from Instance, DeviceCreateInfo
+ if type == 'Instance':
+ struct.extend([
+ '',
+ ' std::unordered_set<std::string> device_extension_set;'])
+
# Construct the extension information map -- mapping name to data member (field), and required extensions
# The map is contained within a static function member for portability reasons.
info_type = '%sInfo' % type
@@ -607,8 +566,6 @@ class HelperFileOutputGenerator(OutputGenerator):
' typedef std::unordered_map<std::string,%s> %s;' % (info_type, info_map_type),
' static const %s &get_info(const char *name) {' %info_type,
' static const %s info_map = {' % info_map_type ])
- struct.extend([
- ' std::make_pair("VK_VERSION_1_1", %sInfo(&%sExtensions::vk_feature_version_1_1, {})),' % (type, type)])
field_format = '&' + struct_type + '::%s'
req_format = '{' + field_format+ ', %s}'
@@ -652,10 +609,14 @@ class HelperFileOutputGenerator(OutputGenerator):
' *this = %s(*instance_extensions);' % struct_type,
'']),
struct.extend([
+ '',
+ ' // Save pCreateInfo device extension list',
+ ' for (uint32_t extn = 0; extn < pCreateInfo->enabledExtensionCount; extn++) {',
+ ' device_extension_set.insert(pCreateInfo->ppEnabledExtensionNames[extn]);',
+ ' }',
'',
- ' static const std::vector<const char *> V_1_1_promoted_%s_apis = {' % type.lower() ])
+ ' static const std::vector<const char *> V_1_0_promoted_%s_extensions = {' % type.lower() ])
struct.extend([' %s_EXTENSION_NAME,' % ext_name.upper() for ext_name in promoted_ext_list])
- struct.extend([' "VK_VERSION_1_1",'])
struct.extend([
' };',
'',
@@ -669,7 +630,7 @@ class HelperFileOutputGenerator(OutputGenerator):
' }',
' uint32_t api_version = NormalizeApiVersion(requested_api_version);',
' if (api_version >= VK_API_VERSION_1_1) {',
- ' for (auto promoted_ext : V_1_1_promoted_%s_apis) {' % type.lower(),
+ ' for (auto promoted_ext : V_1_0_promoted_%s_extensions) {' % type.lower(),
' auto info = get_info(promoted_ext);',
' assert(info.state);',
' if (info.state) this->*(info.state) = true;',
@@ -693,22 +654,19 @@ class HelperFileOutputGenerator(OutputGenerator):
object_types_helper_header = '\n'
object_types_helper_header += '#pragma once\n'
object_types_helper_header += '\n'
+ object_types_helper_header += '#include <vulkan/vulkan.h>\n\n'
object_types_helper_header += self.GenerateObjectTypesHeader()
return object_types_helper_header
#
# Object types header: create object enum type header file
def GenerateObjectTypesHeader(self):
- object_types_header = '#include "cast_utils.h"\n'
- object_types_header += '\n'
+ object_types_header = ''
object_types_header += '// Object Type enum for validation layer internal object handling\n'
object_types_header += 'typedef enum VulkanObjectType {\n'
object_types_header += ' kVulkanObjectTypeUnknown = 0,\n'
enum_num = 1
type_list = [];
enum_entry_map = {}
- non_dispatchable = {}
- dispatchable = {}
- object_type_info = {}
# Output enum definition as each handle is processed, saving the names to use for the conversion routine
for item in self.object_types:
@@ -719,13 +677,6 @@ class HelperFileOutputGenerator(OutputGenerator):
object_types_header += ' = %d,\n' % enum_num
enum_num += 1
type_list.append(enum_entry)
- object_type_info[enum_entry] = { 'VkType': item }
- # We'll want lists of the dispatchable and non dispatchable handles below with access to the same info
- if self.handle_types.IsNonDispatchable(item):
- non_dispatchable[item] = enum_entry
- else:
- dispatchable[item] = enum_entry
-
object_types_header += ' kVulkanObjectTypeMax = %d,\n' % enum_num
object_types_header += ' // Aliases for backwards compatibilty of "promoted" types\n'
for (name, alias) in self.object_type_aliases:
@@ -736,30 +687,28 @@ class HelperFileOutputGenerator(OutputGenerator):
# Output name string helper
object_types_header += '// Array of object name strings for OBJECT_TYPE enum conversion\n'
object_types_header += 'static const char * const object_string[kVulkanObjectTypeMax] = {\n'
- object_types_header += ' "VkNonDispatchableHandle",\n'
+ object_types_header += ' "Unknown",\n'
for item in self.object_types:
- object_types_header += ' "%s",\n' % item
+ fixup_name = item[2:]
+ object_types_header += ' "%s",\n' % fixup_name
object_types_header += '};\n'
- # Helpers to create unified dict key from k<Name>, VK_OBJECT_TYPE_<Name>, and VK_DEBUG_REPORT_OBJECT_TYPE_<Name>
- def dro_to_key(raw_key): return re.search('^VK_DEBUG_REPORT_OBJECT_TYPE_(.*)_EXT$', raw_key).group(1).lower().replace("_","")
- def vko_to_key(raw_key): return re.search('^VK_OBJECT_TYPE_(.*)', raw_key).group(1).lower().replace("_","")
- def kenum_to_key(raw_key): return re.search('^kVulkanObjectType(.*)', raw_key).group(1).lower()
-
- dro_dict = {dro_to_key(dro) : dro for dro in self.debug_report_object_types}
- vko_dict = {vko_to_key(vko) : vko for vko in self.core_object_types}
+ # Key creation helper for map comprehensions that convert between k<Name> and VK<Name> symbols
+ def to_key(regex, raw_key): return re.search(regex, raw_key).group(1).lower().replace("_","")
# Output a conversion routine from the layer object definitions to the debug report definitions
+ # As the VK_DEBUG_REPORT types are not being updated, specify UNKNOWN for unmatched types
object_types_header += '\n'
object_types_header += '// Helper array to get Vulkan VK_EXT_debug_report object type enum from the internal layers version\n'
object_types_header += 'const VkDebugReportObjectTypeEXT get_debug_report_enum[] = {\n'
- object_types_header += ' VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, // kVulkanObjectTypeUnknown\n' # no unknown handle, so this must be here explicitly
+ object_types_header += ' VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, // kVulkanObjectTypeUnknown\n'
+ dbg_re = '^VK_DEBUG_REPORT_OBJECT_TYPE_(.*)_EXT$'
+ dbg_map = {to_key(dbg_re, dbg) : dbg for dbg in self.debug_report_object_types}
+ dbg_default = 'VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT'
for object_type in type_list:
- # VK_DEBUG_REPORT is not updated anymore; there might be missing object types
- kenum_type = dro_dict.get(kenum_to_key(object_type), 'VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT')
- object_types_header += ' %s, // %s\n' % (kenum_type, object_type)
- object_type_info[object_type]['DbgType'] = kenum_type
+ vk_object_type = dbg_map.get(object_type.replace("kVulkanObjectType", "").lower(), dbg_default)
+ object_types_header += ' %s, // %s\n' % (vk_object_type, object_type)
object_types_header += '};\n'
# Output a conversion routine from the layer object definitions to the core object type definitions
@@ -767,210 +716,61 @@ class HelperFileOutputGenerator(OutputGenerator):
object_types_header += '\n'
object_types_header += '// Helper array to get Official Vulkan VkObjectType enum from the internal layers version\n'
object_types_header += 'const VkObjectType get_object_type_enum[] = {\n'
- object_types_header += ' VK_OBJECT_TYPE_UNKNOWN, // kVulkanObjectTypeUnknown\n' # no unknown handle, so must be here explicitly
+ object_types_header += ' VK_OBJECT_TYPE_UNKNOWN, // kVulkanObjectTypeUnknown\n'
+ vko_re = '^VK_OBJECT_TYPE_(.*)'
+ vko_map = {to_key(vko_re, vko) : vko for vko in self.core_object_types}
for object_type in type_list:
- kenum_type = vko_dict[kenum_to_key(object_type)]
- object_types_header += ' %s, // %s\n' % (kenum_type, object_type)
- object_type_info[object_type]['VkoType'] = kenum_type
+ vk_object_type = vko_map[object_type.replace("kVulkanObjectType", "").lower()]
+ object_types_header += ' %s, // %s\n' % (vk_object_type, object_type)
object_types_header += '};\n'
- # Create a functions to convert between VkDebugReportObjectTypeEXT and VkObjectType
- object_types_header += '\n'
- object_types_header += 'static inline VkObjectType convertDebugReportObjectToCoreObject(VkDebugReportObjectTypeEXT debug_report_obj) {\n'
- object_types_header += ' switch (debug_report_obj) {\n'
- for dr_object_type in self.debug_report_object_types:
- object_types_header += ' case %s: return %s;\n' % (dr_object_type, vko_dict[dro_to_key(dr_object_type)])
- object_types_header += ' default: return VK_OBJECT_TYPE_UNKNOWN;\n'
- object_types_header += ' }\n'
- object_types_header += '}\n'
-
- object_types_header += '\n'
- object_types_header += 'static inline VkDebugReportObjectTypeEXT convertCoreObjectToDebugReportObject(VkObjectType core_report_obj) {\n'
- object_types_header += ' switch (core_report_obj) {\n'
+ # Create a function to convert from VkDebugReportObjectTypeEXT to VkObjectType
+ object_types_header += '\n'
+ object_types_header += '// Helper function to convert from VkDebugReportObjectTypeEXT to VkObjectType\n'
+ object_types_header += 'static inline VkObjectType convertDebugReportObjectToCoreObject(VkDebugReportObjectTypeEXT debug_report_obj){\n'
+ object_types_header += ' if (debug_report_obj == VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT) {\n'
+ object_types_header += ' return VK_OBJECT_TYPE_UNKNOWN;\n'
for core_object_type in self.core_object_types:
- # VK_DEBUG_REPORT is not updated anymore; there might be missing object types
- dr_object_type = dro_dict.get(vko_to_key(core_object_type))
- if dr_object_type is not None:
- object_types_header += ' case %s: return %s;\n' % (core_object_type, dr_object_type)
- object_types_header += ' default: return VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT;\n'
- object_types_header += ' }\n'
- object_types_header += '}\n'
-
- #
+ core_target_type = core_object_type.replace("VK_OBJECT_TYPE_", "").lower()
+ core_target_type = core_target_type.replace("_", "")
+ for dr_object_type in self.debug_report_object_types:
+ dr_target_type = dr_object_type.replace("VK_DEBUG_REPORT_OBJECT_TYPE_", "").lower()
+ dr_target_type = dr_target_type[:-4]
+ dr_target_type = dr_target_type.replace("_", "")
+ if core_target_type == dr_target_type:
+ object_types_header += ' } else if (debug_report_obj == %s) {\n' % dr_object_type
+ object_types_header += ' return %s;\n' % core_object_type
+ break
+ object_types_header += ' }\n'
+ object_types_header += ' return VK_OBJECT_TYPE_UNKNOWN;\n'
+ object_types_header += '}\n'
+
+ # Create a function to convert from VkObjectType to VkDebugReportObjectTypeEXT
object_types_header += '\n'
- traits_format = Outdent('''
- template <> struct VkHandleInfo<{vk_type}> {{
- static const VulkanObjectType kVulkanObjectType = {obj_type};
- static const VkDebugReportObjectTypeEXT kDebugReportObjectType = {dbg_type};
- static const VkObjectType kVkObjectType = {vko_type};
- static const char* Typename() {{
- return "{vk_type}";
- }}
- }};
- template <> struct VulkanObjectTypeInfo<{obj_type}> {{
- typedef {vk_type} Type;
- }};
- ''')
-
- object_types_header += Outdent('''
- // Traits objects from each type statically map from Vk<handleType> to the various enums
- template <typename VkType> struct VkHandleInfo {};
- template <VulkanObjectType id> struct VulkanObjectTypeInfo {};
-
- // The following line must match the vulkan_core.h condition guarding VK_DEFINE_NON_DISPATCHABLE_HANDLE
- #if defined(__LP64__) || defined(_WIN64) || (defined(__x86_64__) && !defined(__ILP32__)) || defined(_M_X64) || defined(__ia64) || \
- defined(_M_IA64) || defined(__aarch64__) || defined(__powerpc64__)
- #define TYPESAFE_NONDISPATCHABLE_HANDLES
- #else
- VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkNonDispatchableHandle)
- ''') +'\n'
- object_types_header += traits_format.format(vk_type='VkNonDispatchableHandle', obj_type='kVulkanObjectTypeUnknown',
- dbg_type='VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT',
- vko_type='VK_OBJECT_TYPE_UNKNOWN') + '\n'
- object_types_header += '#endif // VK_DEFINE_HANDLE logic duplication\n'
-
- for vk_type, object_type in sorted(dispatchable.items()):
- info = object_type_info[object_type]
- object_types_header += traits_format.format(vk_type=vk_type, obj_type=object_type, dbg_type=info['DbgType'],
- vko_type=info['VkoType'])
- object_types_header += '#ifdef TYPESAFE_NONDISPATCHABLE_HANDLES\n'
- for vk_type, object_type in sorted(non_dispatchable.items()):
- info = object_type_info[object_type]
- object_types_header += traits_format.format(vk_type=vk_type, obj_type=object_type, dbg_type=info['DbgType'],
- vko_type=info['VkoType'])
- object_types_header += '#endif // TYPESAFE_NONDISPATCHABLE_HANDLES\n'
-
- object_types_header += Outdent('''
- struct VulkanTypedHandle {
- uint64_t handle;
- VulkanObjectType type;
- template <typename Handle>
- VulkanTypedHandle(Handle handle_, VulkanObjectType type_) :
- handle(CastToUint64(handle_)),
- type(type_) {
- #ifdef TYPESAFE_NONDISPATCHABLE_HANDLES
- // For 32 bit it's not always safe to check for traits <-> type
- // as all non-dispatchable handles have the same type-id and thus traits,
- // but on 64 bit we can validate the passed type matches the passed handle
- assert(type == VkHandleInfo<Handle>::kVulkanObjectType);
- #endif // TYPESAFE_NONDISPATCHABLE_HANDLES
- }
- template <typename Handle>
- Handle Cast() const {
- #ifdef TYPESAFE_NONDISPATCHABLE_HANDLES
- assert(type == VkHandleInfo<Handle>::kVulkanObjectType);
- #endif // TYPESAFE_NONDISPATCHABLE_HANDLES
- return CastFromUint64<Handle>(handle);
- }
- VulkanTypedHandle() :
- handle(VK_NULL_HANDLE),
- type(kVulkanObjectTypeUnknown) {}
- }; ''') +'\n'
-
+ object_types_header += '// Helper function to convert from VkDebugReportObjectTypeEXT to VkObjectType\n'
+ object_types_header += 'static inline VkDebugReportObjectTypeEXT convertCoreObjectToDebugReportObject(VkObjectType core_report_obj){\n'
+ object_types_header += ' if (core_report_obj == VK_OBJECT_TYPE_UNKNOWN) {\n'
+ object_types_header += ' return VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT;\n'
+ for core_object_type in self.core_object_types:
+ core_target_type = core_object_type.replace("VK_OBJECT_TYPE_", "").lower()
+ core_target_type = core_target_type.replace("_", "")
+ for dr_object_type in self.debug_report_object_types:
+ dr_target_type = dr_object_type.replace("VK_DEBUG_REPORT_OBJECT_TYPE_", "").lower()
+ dr_target_type = dr_target_type[:-4]
+ dr_target_type = dr_target_type.replace("_", "")
+ if core_target_type == dr_target_type:
+ object_types_header += ' } else if (core_report_obj == %s) {\n' % core_object_type
+ object_types_header += ' return %s;\n' % dr_object_type
+ break
+ object_types_header += ' }\n'
+ object_types_header += ' return VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT;\n'
+ object_types_header += '}\n'
return object_types_header
#
- # Generate pNext handling function
- def build_safe_struct_utility_funcs(self):
- # Construct Safe-struct helper functions
-
- string_copy_proc = '\n\n'
- string_copy_proc += 'char *SafeStringCopy(const char *in_string) {\n'
- string_copy_proc += ' if (nullptr == in_string) return nullptr;\n'
- string_copy_proc += ' char* dest = new char[std::strlen(in_string) + 1];\n'
- string_copy_proc += ' return std::strcpy(dest, in_string);\n'
- string_copy_proc += '}\n'
-
- build_pnext_proc = '\n'
- build_pnext_proc += 'void *SafePnextCopy(const void *pNext) {\n'
- build_pnext_proc += ' if (!pNext) return nullptr;\n'
- build_pnext_proc += '\n'
- build_pnext_proc += ' void *safe_pNext;\n'
- build_pnext_proc += ' const VkBaseOutStructure *header = reinterpret_cast<const VkBaseOutStructure *>(pNext);\n'
- build_pnext_proc += '\n'
- build_pnext_proc += ' switch (header->sType) {\n'
- # Add special-case code to copy beloved secret loader structs
- build_pnext_proc += ' // Special-case Loader Instance Struct passed to/from layer in pNext chain\n'
- build_pnext_proc += ' case VK_STRUCTURE_TYPE_LOADER_INSTANCE_CREATE_INFO: {\n'
- build_pnext_proc += ' VkLayerInstanceCreateInfo *struct_copy = new VkLayerInstanceCreateInfo;\n'
- build_pnext_proc += ' // TODO: Uses original VkLayerInstanceLink* chain, which should be okay for our uses\n'
- build_pnext_proc += ' memcpy(struct_copy, pNext, sizeof(VkLayerInstanceCreateInfo));\n'
- build_pnext_proc += ' struct_copy->pNext = SafePnextCopy(header->pNext);\n'
- build_pnext_proc += ' safe_pNext = struct_copy;\n'
- build_pnext_proc += ' break;\n'
- build_pnext_proc += ' }\n'
- build_pnext_proc += ' // Special-case Loader Device Struct passed to/from layer in pNext chain\n'
- build_pnext_proc += ' case VK_STRUCTURE_TYPE_LOADER_DEVICE_CREATE_INFO: {\n'
- build_pnext_proc += ' VkLayerDeviceCreateInfo *struct_copy = new VkLayerDeviceCreateInfo;\n'
- build_pnext_proc += ' // TODO: Uses original VkLayerDeviceLink*, which should be okay for our uses\n'
- build_pnext_proc += ' memcpy(struct_copy, pNext, sizeof(VkLayerDeviceCreateInfo));\n'
- build_pnext_proc += ' struct_copy->pNext = SafePnextCopy(header->pNext);\n'
- build_pnext_proc += ' safe_pNext = struct_copy;\n'
- build_pnext_proc += ' break;\n'
- build_pnext_proc += ' }\n'
-
- free_pnext_proc = '\n'
- free_pnext_proc += 'void FreePnextChain(const void *pNext) {\n'
- free_pnext_proc += ' if (!pNext) return;\n'
- free_pnext_proc += '\n'
- free_pnext_proc += ' auto header = reinterpret_cast<const VkBaseOutStructure *>(pNext);\n'
- free_pnext_proc += '\n'
- free_pnext_proc += ' switch (header->sType) {\n'
- free_pnext_proc += ' // Special-case Loader Instance Struct passed to/from layer in pNext chain\n'
- free_pnext_proc += ' case VK_STRUCTURE_TYPE_LOADER_INSTANCE_CREATE_INFO:\n'
- free_pnext_proc += ' FreePnextChain(header->pNext);\n'
- free_pnext_proc += ' delete reinterpret_cast<const VkLayerInstanceCreateInfo *>(pNext);\n'
- free_pnext_proc += ' break;\n'
- free_pnext_proc += ' // Special-case Loader Device Struct passed to/from layer in pNext chain\n'
- free_pnext_proc += ' case VK_STRUCTURE_TYPE_LOADER_DEVICE_CREATE_INFO:\n'
- free_pnext_proc += ' FreePnextChain(header->pNext);\n'
- free_pnext_proc += ' delete reinterpret_cast<const VkLayerDeviceCreateInfo *>(pNext);\n'
- free_pnext_proc += ' break;\n'
-
- chain_structs = tuple(s for s in self.structMembers if s.name in self.structextends_list)
- ifdefs = sorted({cs.ifdef_protect for cs in chain_structs}, key = lambda i : i if i is not None else '')
- for ifdef in ifdefs:
- if ifdef is not None:
- build_pnext_proc += '#ifdef %s\n' % ifdef
- free_pnext_proc += '#ifdef %s\n' % ifdef
-
- assorted_chain_structs = tuple(s for s in chain_structs if s.ifdef_protect == ifdef)
- for struct in assorted_chain_structs:
- build_pnext_proc += ' case %s:\n' % self.structTypes[struct.name].value
- build_pnext_proc += ' safe_pNext = new safe_%s(reinterpret_cast<const %s *>(pNext));\n' % (struct.name, struct.name)
- build_pnext_proc += ' break;\n'
-
- free_pnext_proc += ' case %s:\n' % self.structTypes[struct.name].value
- free_pnext_proc += ' delete reinterpret_cast<const safe_%s *>(header);\n' % struct.name
- free_pnext_proc += ' break;\n'
-
- if ifdef is not None:
- build_pnext_proc += '#endif // %s\n' % ifdef
- free_pnext_proc += '#endif // %s\n' % ifdef
-
- build_pnext_proc += ' default: // Encountered an unknown sType -- skip (do not copy) this entry in the chain\n'
- build_pnext_proc += ' safe_pNext = SafePnextCopy(header->pNext);\n'
- build_pnext_proc += ' break;\n'
- build_pnext_proc += ' }\n'
- build_pnext_proc += '\n'
- build_pnext_proc += ' return safe_pNext;\n'
- build_pnext_proc += '}\n'
-
- free_pnext_proc += ' default: // Encountered an unknown sType -- panic, there should be none such in safe chain\n'
- free_pnext_proc += ' assert(false);\n'
- free_pnext_proc += ' FreePnextChain(header->pNext);\n'
- free_pnext_proc += ' break;\n'
- free_pnext_proc += ' }\n'
- free_pnext_proc += '}\n'
-
- pnext_procs = string_copy_proc + build_pnext_proc + free_pnext_proc
- return pnext_procs
- #
# Determine if a structure needs a safe_struct helper function
# That is, it has an sType or one of its members is a pointer
def NeedSafeStruct(self, structure):
- if 'VkBase' in structure.name:
- return False
if 'sType' == structure.name:
return True
for member in structure.members:
@@ -982,16 +782,9 @@ class HelperFileOutputGenerator(OutputGenerator):
def GenerateSafeStructHelperSource(self):
safe_struct_helper_source = '\n'
safe_struct_helper_source += '#include "vk_safe_struct.h"\n'
- safe_struct_helper_source += '\n'
safe_struct_helper_source += '#include <string.h>\n'
- safe_struct_helper_source += '#include <cassert>\n'
- safe_struct_helper_source += '#include <cstring>\n'
- safe_struct_helper_source += '\n'
- safe_struct_helper_source += '#include <vulkan/vk_layer.h>\n'
safe_struct_helper_source += '\n'
safe_struct_helper_source += self.GenerateSafeStructSource()
- safe_struct_helper_source += self.build_safe_struct_utility_funcs()
-
return safe_struct_helper_source
#
# safe_struct source -- create bodies of safe struct helper functions
@@ -1034,7 +827,7 @@ class HelperFileOutputGenerator(OutputGenerator):
' case VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT:\n'
' if (descriptorCount && in_struct->pImageInfo) {\n'
' pImageInfo = new VkDescriptorImageInfo[descriptorCount];\n'
- ' for (uint32_t i = 0; i < descriptorCount; ++i) {\n'
+ ' for (uint32_t i=0; i<descriptorCount; ++i) {\n'
' pImageInfo[i] = in_struct->pImageInfo[i];\n'
' }\n'
' }\n'
@@ -1045,7 +838,7 @@ class HelperFileOutputGenerator(OutputGenerator):
' case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC:\n'
' if (descriptorCount && in_struct->pBufferInfo) {\n'
' pBufferInfo = new VkDescriptorBufferInfo[descriptorCount];\n'
- ' for (uint32_t i = 0; i < descriptorCount; ++i) {\n'
+ ' for (uint32_t i=0; i<descriptorCount; ++i) {\n'
' pBufferInfo[i] = in_struct->pBufferInfo[i];\n'
' }\n'
' }\n'
@@ -1054,7 +847,7 @@ class HelperFileOutputGenerator(OutputGenerator):
' case VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER:\n'
' if (descriptorCount && in_struct->pTexelBufferView) {\n'
' pTexelBufferView = new VkBufferView[descriptorCount];\n'
- ' for (uint32_t i = 0; i < descriptorCount; ++i) {\n'
+ ' for (uint32_t i=0; i<descriptorCount; ++i) {\n'
' pTexelBufferView[i] = in_struct->pTexelBufferView[i];\n'
' }\n'
' }\n'
@@ -1071,7 +864,7 @@ class HelperFileOutputGenerator(OutputGenerator):
'VkGraphicsPipelineCreateInfo' :
' if (stageCount && in_struct->pStages) {\n'
' pStages = new safe_VkPipelineShaderStageCreateInfo[stageCount];\n'
- ' for (uint32_t i = 0; i < stageCount; ++i) {\n'
+ ' for (uint32_t i=0; i<stageCount; ++i) {\n'
' pStages[i].initialize(&in_struct->pStages[i]);\n'
' }\n'
' }\n'
@@ -1085,7 +878,7 @@ class HelperFileOutputGenerator(OutputGenerator):
' pInputAssemblyState = NULL;\n'
' bool has_tessellation_stage = false;\n'
' if (stageCount && pStages)\n'
- ' for (uint32_t i = 0; i < stageCount && !has_tessellation_stage; ++i)\n'
+ ' for (uint32_t i=0; i<stageCount && !has_tessellation_stage; ++i)\n'
' if (pStages[i].stage == VK_SHADER_STAGE_TESSELLATION_CONTROL_BIT || pStages[i].stage == VK_SHADER_STAGE_TESSELLATION_EVALUATION_BIT)\n'
' has_tessellation_stage = true;\n'
' if (in_struct->pTessellationState && has_tessellation_stage)\n'
@@ -1148,7 +941,7 @@ class HelperFileOutputGenerator(OutputGenerator):
' const bool sampler_type = in_struct->descriptorType == VK_DESCRIPTOR_TYPE_SAMPLER || in_struct->descriptorType == VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER;\n'
' if (descriptorCount && in_struct->pImmutableSamplers && sampler_type) {\n'
' pImmutableSamplers = new VkSampler[descriptorCount];\n'
- ' for (uint32_t i = 0; i < descriptorCount; ++i) {\n'
+ ' for (uint32_t i=0; i<descriptorCount; ++i) {\n'
' pImmutableSamplers[i] = in_struct->pImmutableSamplers[i];\n'
' }\n'
' }\n',
@@ -1157,10 +950,9 @@ class HelperFileOutputGenerator(OutputGenerator):
custom_copy_txt = {
# VkGraphicsPipelineCreateInfo is special case because it has custom construct parameters
'VkGraphicsPipelineCreateInfo' :
- ' pNext = SafePnextCopy(src.pNext);\n'
' if (stageCount && src.pStages) {\n'
' pStages = new safe_VkPipelineShaderStageCreateInfo[stageCount];\n'
- ' for (uint32_t i = 0; i < stageCount; ++i) {\n'
+ ' for (uint32_t i=0; i<stageCount; ++i) {\n'
' pStages[i].initialize(&src.pStages[i]);\n'
' }\n'
' }\n'
@@ -1174,7 +966,7 @@ class HelperFileOutputGenerator(OutputGenerator):
' pInputAssemblyState = NULL;\n'
' bool has_tessellation_stage = false;\n'
' if (stageCount && pStages)\n'
- ' for (uint32_t i = 0; i < stageCount && !has_tessellation_stage; ++i)\n'
+ ' for (uint32_t i=0; i<stageCount && !has_tessellation_stage; ++i)\n'
' if (pStages[i].stage == VK_SHADER_STAGE_TESSELLATION_CONTROL_BIT || pStages[i].stage == VK_SHADER_STAGE_TESSELLATION_EVALUATION_BIT)\n'
' has_tessellation_stage = true;\n'
' if (src.pTessellationState && has_tessellation_stage)\n'
@@ -1208,7 +1000,6 @@ class HelperFileOutputGenerator(OutputGenerator):
' pDynamicState = NULL;\n',
# VkPipelineViewportStateCreateInfo is special case because it has custom construct parameters
'VkPipelineViewportStateCreateInfo' :
- ' pNext = SafePnextCopy(src.pNext);\n'
' if (src.pViewports) {\n'
' pViewports = new VkViewport[src.viewportCount];\n'
' memcpy ((void *)pViewports, (void *)src.pViewports, sizeof(VkViewport)*src.viewportCount);\n'
@@ -1226,12 +1017,9 @@ class HelperFileOutputGenerator(OutputGenerator):
custom_destruct_txt = {'VkShaderModuleCreateInfo' :
' if (pCode)\n'
' delete[] reinterpret_cast<const uint8_t *>(pCode);\n' }
- copy_pnext = ''
- copy_strings = ''
+
for member in item.members:
m_type = member.type
- if member.name == 'pNext':
- copy_pnext = ' pNext = SafePnextCopy(in_struct->pNext);\n'
if member.type in self.structNames:
member_index = next((i for i, v in enumerate(self.structMembers) if v[0] == member.type), None)
if member_index is not None and self.NeedSafeStruct(self.structMembers[member_index]) == True:
@@ -1239,30 +1027,9 @@ class HelperFileOutputGenerator(OutputGenerator):
if member.ispointer and 'safe_' not in m_type and self.TypeContainsObjectHandle(member.type, False) == False:
# Ptr types w/o a safe_struct, for non-null case need to allocate new ptr and copy data in
if m_type in ['void', 'char']:
- if member.name != 'pNext':
- if m_type == 'char':
- # Create deep copies of strings
- if member.len:
- copy_strings += ' char **tmp_%s = new char *[in_struct->%s];\n' % (member.name, member.len)
- copy_strings += ' for (uint32_t i = 0; i < %s; ++i) {\n' % member.len
- copy_strings += ' tmp_%s[i] = SafeStringCopy(in_struct->%s[i]);\n' % (member.name, member.name)
- copy_strings += ' }\n'
- copy_strings += ' %s = tmp_%s;\n' % (member.name, member.name)
-
- destruct_txt += ' if (%s) {\n' % member.name
- destruct_txt += ' for (uint32_t i = 0; i < %s; ++i) {\n' % member.len
- destruct_txt += ' delete [] %s[i];\n' % member.name
- destruct_txt += ' }\n'
- destruct_txt += ' delete [] %s;\n' % member.name
- destruct_txt += ' }\n'
- else:
- copy_strings += ' %s = SafeStringCopy(in_struct->%s);\n' % (member.name, member.name)
- destruct_txt += ' if (%s) delete [] %s;\n' % (member.name, member.name)
- else:
- # For these exceptions just copy initial value over for now
- init_list += '\n %s(in_struct->%s),' % (member.name, member.name)
- init_func_txt += ' %s = in_struct->%s;\n' % (member.name, member.name)
- default_init_list += '\n %s(nullptr),' % (member.name)
+ # For these exceptions just copy initial value over for now
+ init_list += '\n %s(in_struct->%s),' % (member.name, member.name)
+ init_func_txt += ' %s = in_struct->%s;\n' % (member.name, member.name)
else:
default_init_list += '\n %s(nullptr),' % (member.name)
init_list += '\n %s(nullptr),' % (member.name)
@@ -1270,24 +1037,25 @@ class HelperFileOutputGenerator(OutputGenerator):
construct_txt += ' %s = in_struct->%s;\n' % (member.name, member.name)
else:
init_func_txt += ' %s = nullptr;\n' % (member.name)
- if not member.isstaticarray and (member.len is None or '/' in member.len):
- construct_txt += ' if (in_struct->%s) {\n' % member.name
- construct_txt += ' %s = new %s(*in_struct->%s);\n' % (member.name, m_type, member.name)
- construct_txt += ' }\n'
- destruct_txt += ' if (%s)\n' % member.name
- destruct_txt += ' delete %s;\n' % member.name
- else:
- construct_txt += ' if (in_struct->%s) {\n' % member.name
- construct_txt += ' %s = new %s[in_struct->%s];\n' % (member.name, m_type, member.len)
- construct_txt += ' memcpy ((void *)%s, (void *)in_struct->%s, sizeof(%s)*in_struct->%s);\n' % (member.name, member.name, m_type, member.len)
- construct_txt += ' }\n'
- destruct_txt += ' if (%s)\n' % member.name
- destruct_txt += ' delete[] %s;\n' % member.name
+ if 'pNext' != member.name and 'void' not in m_type:
+ if not member.isstaticarray and (member.len is None or '/' in member.len):
+ construct_txt += ' if (in_struct->%s) {\n' % member.name
+ construct_txt += ' %s = new %s(*in_struct->%s);\n' % (member.name, m_type, member.name)
+ construct_txt += ' }\n'
+ destruct_txt += ' if (%s)\n' % member.name
+ destruct_txt += ' delete %s;\n' % member.name
+ else:
+ construct_txt += ' if (in_struct->%s) {\n' % member.name
+ construct_txt += ' %s = new %s[in_struct->%s];\n' % (member.name, m_type, member.len)
+ construct_txt += ' memcpy ((void *)%s, (void *)in_struct->%s, sizeof(%s)*in_struct->%s);\n' % (member.name, member.name, m_type, member.len)
+ construct_txt += ' }\n'
+ destruct_txt += ' if (%s)\n' % member.name
+ destruct_txt += ' delete[] %s;\n' % member.name
elif member.isstaticarray or member.len is not None:
if member.len is None:
# Extract length of static array by grabbing val between []
static_array_size = re.match(r"[^[]*\[([^]]*)\]", member.cdecl)
- construct_txt += ' for (uint32_t i = 0; i < %s; ++i) {\n' % static_array_size.group(1)
+ construct_txt += ' for (uint32_t i=0; i<%s; ++i) {\n' % static_array_size.group(1)
construct_txt += ' %s[i] = in_struct->%s[i];\n' % (member.name, member.name)
construct_txt += ' }\n'
else:
@@ -1304,7 +1072,7 @@ class HelperFileOutputGenerator(OutputGenerator):
construct_txt += ' %s = new %s[%s];\n' % (member.name, m_type, member.len)
destruct_txt += ' if (%s)\n' % member.name
destruct_txt += ' delete[] %s;\n' % member.name
- construct_txt += ' for (uint32_t i = 0; i < %s; ++i) {\n' % (member.len)
+ construct_txt += ' for (uint32_t i=0; i<%s; ++i) {\n' % (member.len)
if 'safe_' in m_type:
construct_txt += ' %s[i].initialize(&in_struct->%s[i]);\n' % (member.name, member.name)
else:
@@ -1312,11 +1080,10 @@ class HelperFileOutputGenerator(OutputGenerator):
construct_txt += ' }\n'
construct_txt += ' }\n'
elif member.ispointer == True:
- default_init_list += '\n %s(nullptr),' % (member.name)
- init_list += '\n %s(nullptr),' % (member.name)
- init_func_txt += ' %s = nullptr;\n' % (member.name)
construct_txt += ' if (in_struct->%s)\n' % member.name
construct_txt += ' %s = new %s(in_struct->%s);\n' % (member.name, m_type, member.name)
+ construct_txt += ' else\n'
+ construct_txt += ' %s = NULL;\n' % member.name
destruct_txt += ' if (%s)\n' % member.name
destruct_txt += ' delete %s;\n' % member.name
elif 'safe_' in m_type:
@@ -1327,29 +1094,19 @@ class HelperFileOutputGenerator(OutputGenerator):
init_func_txt += ' %s = in_struct->%s;\n' % (member.name, member.name)
if '' != init_list:
init_list = init_list[:-1] # hack off final comma
-
-
if item.name in custom_construct_txt:
construct_txt = custom_construct_txt[item.name]
-
- construct_txt = copy_pnext + copy_strings + construct_txt
-
if item.name in custom_destruct_txt:
destruct_txt = custom_destruct_txt[item.name]
-
- if copy_pnext:
- destruct_txt += ' if (pNext)\n FreePnextChain(pNext);\n'
-
safe_struct_body.append("\n%s::%s(const %s* in_struct%s) :%s\n{\n%s}" % (ss_name, ss_name, item.name, self.custom_construct_params.get(item.name, ''), init_list, construct_txt))
if '' != default_init_list:
default_init_list = " :%s" % (default_init_list[:-1])
safe_struct_body.append("\n%s::%s()%s\n{}" % (ss_name, ss_name, default_init_list))
# Create slight variation of init and construct txt for copy constructor that takes a src object reference vs. struct ptr
copy_construct_init = init_func_txt.replace('in_struct->', 'src.')
- copy_construct_txt = construct_txt.replace(' (in_struct->', ' (src.') # Exclude 'if' blocks from next line
- copy_construct_txt = construct_txt.replace(' (in_struct->', ' (src.') # Exclude 'if' blocks from next line
- copy_construct_txt = re.sub('(new \\w+)\\(in_struct->', '\\1(*src.', construct_txt) # Pass object to copy constructors
- copy_construct_txt = copy_construct_txt.replace('in_struct->', 'src.') # Modify remaining struct refs for src object
+ copy_construct_txt = construct_txt.replace(' (in_struct->', ' (src.') # Exclude 'if' blocks from next line
+ copy_construct_txt = copy_construct_txt.replace('(in_struct->', '(*src.') # Pass object to copy constructors
+ copy_construct_txt = copy_construct_txt.replace('in_struct->', 'src.') # Modify remaining struct refs for src object
if item.name in custom_copy_txt:
copy_construct_txt = custom_copy_txt[item.name]
copy_assign_txt = ' if (&src == this) return *this;\n\n' + destruct_txt + '\n' + copy_construct_init + copy_construct_txt + '\n return *this;'
@@ -1374,7 +1131,7 @@ class HelperFileOutputGenerator(OutputGenerator):
type_member = 'Type'
id_member = 'kSType'
id_decl = 'static const VkStructureType '
- generic_header = 'VkBaseOutStructure'
+ generic_header = prefix + 'GenericHeader'
typename_func = fprefix + 'typename'
idname_func = fprefix + 'stype_name'
find_func = fprefix + 'find_in_chain'
@@ -1398,6 +1155,12 @@ class HelperFileOutputGenerator(OutputGenerator):
# Define the utilities (here so any renaming stays consistent), if this grows large, refactor to a fixed .h file
utilities_format = '\n'.join((
+ '// Header "base class" for pNext chain traversal',
+ 'struct {header} {{',
+ ' VkStructureType sType;',
+ ' const {header} *pNext;',
+ '}};',
+ '',
'// Find an entry of the given type in the pNext chain',
'template <typename T> const T *{find_func}(const void *next) {{',
' const {header} *current = reinterpret_cast<const {header} *>(next);',
diff --git a/scripts/known_good.json b/scripts/known_good.json
index 311d18028..8fc326844 100644
--- a/scripts/known_good.json
+++ b/scripts/known_good.json
@@ -6,7 +6,7 @@
"sub_dir" : "glslang",
"build_dir" : "glslang/build",
"install_dir" : "glslang/build/install",
- "commit" : "333d1c95792692205472c457d7bec915a94c8000",
+ "commit" : "5432f0dd8f331f15182681664d7486681e8514e6",
"prebuild" : [
"python update_glslang_sources.py"
],
@@ -20,7 +20,7 @@
"sub_dir" : "Vulkan-Headers",
"build_dir" : "Vulkan-Headers/build",
"install_dir" : "Vulkan-Headers/build/install",
- "commit" : "v1.1.121"
+ "commit" : "v1.1.102"
},
{
"name" : "Vulkan-Loader",
@@ -28,7 +28,7 @@
"sub_dir" : "Vulkan-Loader",
"build_dir" : "Vulkan-Loader/build",
"install_dir" : "Vulkan-Loader/build/install",
- "commit" : "v1.1.121",
+ "commit" : "v1.1.102",
"deps" : [
{
"var_name" : "VULKAN_HEADERS_INSTALL_DIR",
@@ -37,10 +37,6 @@
],
"cmake_options" : [
"-DBUILD_TESTS=NO"
- ],
- "build_platforms" : [
- "linux",
- "darwin"
]
},
{
@@ -49,7 +45,7 @@
"sub_dir" : "VulkanTools",
"build_dir" : "VulkanTools/build",
"install_dir" : "VulkanTools/build/install",
- "commit" : "206108f07f531cab5523ff36d7947f39cdd7ad6b",
+ "commit" : "748a291e0e8b0368d3c7b654b35ebbe226ed622d",
"deps" : [
{
"var_name" : "VULKAN_HEADERS_INSTALL_DIR",
@@ -87,7 +83,7 @@
"sub_dir" : "Vulkan-Tools",
"build_dir" : "Vulkan-Tools/build",
"install_dir" : "Vulkan-Tools/build/install",
- "commit" : "e36c760bbde1c56b5b2a934347ff0fb9dce4d793",
+ "commit" : "v1.1.102",
"deps" : [
{
"var_name" : "VULKAN_HEADERS_INSTALL_DIR",
diff --git a/scripts/layer_chassis_dispatch_generator.py b/scripts/layer_chassis_dispatch_generator.py
index 3088bcd22..f21cb542e 100644
--- a/scripts/layer_chassis_dispatch_generator.py
+++ b/scripts/layer_chassis_dispatch_generator.py
@@ -61,7 +61,6 @@ from common_codegen import *
# separate line, align parameter names at the specified column
class LayerChassisDispatchGeneratorOptions(GeneratorOptions):
def __init__(self,
- conventions = None,
filename = None,
directory = '.',
apiname = None,
@@ -84,7 +83,7 @@ class LayerChassisDispatchGeneratorOptions(GeneratorOptions):
indentFuncPointer = False,
alignFuncParam = 0,
expandEnumerants = True):
- GeneratorOptions.__init__(self, conventions, filename, directory, apiname, profile,
+ GeneratorOptions.__init__(self, filename, directory, apiname, profile,
versions, emitversions, defaultExtensions,
addExtensions, removeExtensions, emitExtensions, sortProcedure)
self.prefixText = prefixText
@@ -140,14 +139,15 @@ class LayerChassisDispatchOutputGenerator(OutputGenerator):
*/"""
inline_custom_source_preamble = """
-VkResult DispatchCreateComputePipelines(VkDevice device, VkPipelineCache pipelineCache, uint32_t createInfoCount,
+VkResult DispatchCreateComputePipelines(ValidationObject *layer_data,
+ VkDevice device, VkPipelineCache pipelineCache, uint32_t createInfoCount,
const VkComputePipelineCreateInfo *pCreateInfos,
const VkAllocationCallbacks *pAllocator, VkPipeline *pPipelines) {
- auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
if (!wrap_handles) return layer_data->device_dispatch_table.CreateComputePipelines(device, pipelineCache, createInfoCount,
pCreateInfos, pAllocator, pPipelines);
safe_VkComputePipelineCreateInfo *local_pCreateInfos = NULL;
if (pCreateInfos) {
+ std::lock_guard<std::mutex> lock(dispatch_lock);
local_pCreateInfos = new safe_VkComputePipelineCreateInfo[createInfoCount];
for (uint32_t idx0 = 0; idx0 < createInfoCount; ++idx0) {
local_pCreateInfos[idx0].initialize(&pCreateInfos[idx0]);
@@ -163,6 +163,7 @@ VkResult DispatchCreateComputePipelines(VkDevice device, VkPipelineCache pipelin
}
}
if (pipelineCache) {
+ std::lock_guard<std::mutex> lock(dispatch_lock);
pipelineCache = layer_data->Unwrap(pipelineCache);
}
@@ -170,6 +171,7 @@ VkResult DispatchCreateComputePipelines(VkDevice device, VkPipelineCache pipelin
local_pCreateInfos->ptr(), pAllocator, pPipelines);
delete[] local_pCreateInfos;
{
+ std::lock_guard<std::mutex> lock(dispatch_lock);
for (uint32_t i = 0; i < createInfoCount; ++i) {
if (pPipelines[i] != VK_NULL_HANDLE) {
pPipelines[i] = layer_data->WrapNew(pPipelines[i]);
@@ -179,16 +181,16 @@ VkResult DispatchCreateComputePipelines(VkDevice device, VkPipelineCache pipelin
return result;
}
-VkResult DispatchCreateGraphicsPipelines(VkDevice device, VkPipelineCache pipelineCache, uint32_t createInfoCount,
+VkResult DispatchCreateGraphicsPipelines(ValidationObject *layer_data,
+ VkDevice device, VkPipelineCache pipelineCache, uint32_t createInfoCount,
const VkGraphicsPipelineCreateInfo *pCreateInfos,
const VkAllocationCallbacks *pAllocator, VkPipeline *pPipelines) {
- auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
if (!wrap_handles) return layer_data->device_dispatch_table.CreateGraphicsPipelines(device, pipelineCache, createInfoCount,
pCreateInfos, pAllocator, pPipelines);
safe_VkGraphicsPipelineCreateInfo *local_pCreateInfos = nullptr;
if (pCreateInfos) {
local_pCreateInfos = new safe_VkGraphicsPipelineCreateInfo[createInfoCount];
- read_dispatch_lock_guard_t lock(dispatch_lock);
+ std::lock_guard<std::mutex> lock(dispatch_lock);
for (uint32_t idx0 = 0; idx0 < createInfoCount; ++idx0) {
bool uses_color_attachment = false;
bool uses_depthstencil_attachment = false;
@@ -224,6 +226,7 @@ VkResult DispatchCreateGraphicsPipelines(VkDevice device, VkPipelineCache pipeli
}
}
if (pipelineCache) {
+ std::lock_guard<std::mutex> lock(dispatch_lock);
pipelineCache = layer_data->Unwrap(pipelineCache);
}
@@ -231,6 +234,7 @@ VkResult DispatchCreateGraphicsPipelines(VkDevice device, VkPipelineCache pipeli
local_pCreateInfos->ptr(), pAllocator, pPipelines);
delete[] local_pCreateInfos;
{
+ std::lock_guard<std::mutex> lock(dispatch_lock);
for (uint32_t i = 0; i < createInfoCount; ++i) {
if (pPipelines[i] != VK_NULL_HANDLE) {
pPipelines[i] = layer_data->WrapNew(pPipelines[i]);
@@ -259,56 +263,53 @@ static void UpdateCreateRenderPassState(ValidationObject *layer_data, const T *p
}
}
-VkResult DispatchCreateRenderPass(VkDevice device, const VkRenderPassCreateInfo *pCreateInfo,
+VkResult DispatchCreateRenderPass(ValidationObject *layer_data,
+ VkDevice device, const VkRenderPassCreateInfo *pCreateInfo,
const VkAllocationCallbacks *pAllocator, VkRenderPass *pRenderPass) {
- auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
VkResult result = layer_data->device_dispatch_table.CreateRenderPass(device, pCreateInfo, pAllocator, pRenderPass);
if (!wrap_handles) return result;
if (VK_SUCCESS == result) {
- write_dispatch_lock_guard_t lock(dispatch_lock);
+ std::lock_guard<std::mutex> lock(dispatch_lock);
UpdateCreateRenderPassState(layer_data, pCreateInfo, *pRenderPass);
*pRenderPass = layer_data->WrapNew(*pRenderPass);
}
return result;
}
-VkResult DispatchCreateRenderPass2KHR(VkDevice device, const VkRenderPassCreateInfo2KHR *pCreateInfo,
+VkResult DispatchCreateRenderPass2KHR(ValidationObject *layer_data,
+ VkDevice device, const VkRenderPassCreateInfo2KHR *pCreateInfo,
const VkAllocationCallbacks *pAllocator, VkRenderPass *pRenderPass) {
- auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
VkResult result = layer_data->device_dispatch_table.CreateRenderPass2KHR(device, pCreateInfo, pAllocator, pRenderPass);
if (!wrap_handles) return result;
if (VK_SUCCESS == result) {
- write_dispatch_lock_guard_t lock(dispatch_lock);
+ std::lock_guard<std::mutex> lock(dispatch_lock);
UpdateCreateRenderPassState(layer_data, pCreateInfo, *pRenderPass);
*pRenderPass = layer_data->WrapNew(*pRenderPass);
}
return result;
}
-void DispatchDestroyRenderPass(VkDevice device, VkRenderPass renderPass, const VkAllocationCallbacks *pAllocator) {
- auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
+void DispatchDestroyRenderPass(ValidationObject *layer_data,
+ VkDevice device, VkRenderPass renderPass, const VkAllocationCallbacks *pAllocator) {
if (!wrap_handles) return layer_data->device_dispatch_table.DestroyRenderPass(device, renderPass, pAllocator);
+ std::unique_lock<std::mutex> lock(dispatch_lock);
uint64_t renderPass_id = reinterpret_cast<uint64_t &>(renderPass);
-
- auto iter = unique_id_mapping.pop(renderPass_id);
- if (iter != unique_id_mapping.end()) {
- renderPass = (VkRenderPass)iter->second;
- } else {
- renderPass = (VkRenderPass)0;
- }
-
+ renderPass = (VkRenderPass)unique_id_mapping[renderPass_id];
+ unique_id_mapping.erase(renderPass_id);
+ lock.unlock();
layer_data->device_dispatch_table.DestroyRenderPass(device, renderPass, pAllocator);
- write_dispatch_lock_guard_t lock(dispatch_lock);
+ lock.lock();
layer_data->renderpasses_states.erase(renderPass);
}
-VkResult DispatchCreateSwapchainKHR(VkDevice device, const VkSwapchainCreateInfoKHR *pCreateInfo,
+VkResult DispatchCreateSwapchainKHR(ValidationObject *layer_data,
+ VkDevice device, const VkSwapchainCreateInfoKHR *pCreateInfo,
const VkAllocationCallbacks *pAllocator, VkSwapchainKHR *pSwapchain) {
- auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
if (!wrap_handles) return layer_data->device_dispatch_table.CreateSwapchainKHR(device, pCreateInfo, pAllocator, pSwapchain);
safe_VkSwapchainCreateInfoKHR *local_pCreateInfo = NULL;
if (pCreateInfo) {
+ std::lock_guard<std::mutex> lock(dispatch_lock);
local_pCreateInfo = new safe_VkSwapchainCreateInfoKHR(pCreateInfo);
local_pCreateInfo->oldSwapchain = layer_data->Unwrap(pCreateInfo->oldSwapchain);
// Surface is instance-level object
@@ -319,19 +320,21 @@ VkResult DispatchCreateSwapchainKHR(VkDevice device, const VkSwapchainCreateInfo
delete local_pCreateInfo;
if (VK_SUCCESS == result) {
+ std::lock_guard<std::mutex> lock(dispatch_lock);
*pSwapchain = layer_data->WrapNew(*pSwapchain);
}
return result;
}
-VkResult DispatchCreateSharedSwapchainsKHR(VkDevice device, uint32_t swapchainCount, const VkSwapchainCreateInfoKHR *pCreateInfos,
+VkResult DispatchCreateSharedSwapchainsKHR(ValidationObject *layer_data,
+ VkDevice device, uint32_t swapchainCount,
+ const VkSwapchainCreateInfoKHR *pCreateInfos,
const VkAllocationCallbacks *pAllocator, VkSwapchainKHR *pSwapchains) {
- auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
- if (!wrap_handles)
- return layer_data->device_dispatch_table.CreateSharedSwapchainsKHR(device, swapchainCount, pCreateInfos, pAllocator,
- pSwapchains);
+ if (!wrap_handles) return layer_data->device_dispatch_table.CreateSharedSwapchainsKHR(device, swapchainCount, pCreateInfos,
+ pAllocator, pSwapchains);
safe_VkSwapchainCreateInfoKHR *local_pCreateInfos = NULL;
{
+ std::lock_guard<std::mutex> lock(dispatch_lock);
if (pCreateInfos) {
local_pCreateInfos = new safe_VkSwapchainCreateInfoKHR[swapchainCount];
for (uint32_t i = 0; i < swapchainCount; ++i) {
@@ -350,6 +353,7 @@ VkResult DispatchCreateSharedSwapchainsKHR(VkDevice device, uint32_t swapchainCo
pAllocator, pSwapchains);
delete[] local_pCreateInfos;
if (VK_SUCCESS == result) {
+ std::lock_guard<std::mutex> lock(dispatch_lock);
for (uint32_t i = 0; i < swapchainCount; i++) {
pSwapchains[i] = layer_data->WrapNew(pSwapchains[i]);
}
@@ -357,20 +361,20 @@ VkResult DispatchCreateSharedSwapchainsKHR(VkDevice device, uint32_t swapchainCo
return result;
}
-VkResult DispatchGetSwapchainImagesKHR(VkDevice device, VkSwapchainKHR swapchain, uint32_t *pSwapchainImageCount,
+VkResult DispatchGetSwapchainImagesKHR(ValidationObject *layer_data,
+ VkDevice device, VkSwapchainKHR swapchain, uint32_t *pSwapchainImageCount,
VkImage *pSwapchainImages) {
- auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
- if (!wrap_handles)
- return layer_data->device_dispatch_table.GetSwapchainImagesKHR(device, swapchain, pSwapchainImageCount, pSwapchainImages);
+ if (!wrap_handles) return layer_data->device_dispatch_table.GetSwapchainImagesKHR(device, swapchain, pSwapchainImageCount, pSwapchainImages);
VkSwapchainKHR wrapped_swapchain_handle = swapchain;
if (VK_NULL_HANDLE != swapchain) {
+ std::lock_guard<std::mutex> lock(dispatch_lock);
swapchain = layer_data->Unwrap(swapchain);
}
VkResult result =
layer_data->device_dispatch_table.GetSwapchainImagesKHR(device, swapchain, pSwapchainImageCount, pSwapchainImages);
if ((VK_SUCCESS == result) || (VK_INCOMPLETE == result)) {
if ((*pSwapchainImageCount > 0) && pSwapchainImages) {
- write_dispatch_lock_guard_t lock(dispatch_lock);
+ std::lock_guard<std::mutex> lock(dispatch_lock);
auto &wrapped_swapchain_image_handles = layer_data->swapchain_wrapped_image_handle_map[wrapped_swapchain_handle];
for (uint32_t i = static_cast<uint32_t>(wrapped_swapchain_image_handles.size()); i < *pSwapchainImageCount; i++) {
wrapped_swapchain_image_handles.emplace_back(layer_data->WrapNew(pSwapchainImages[i]));
@@ -383,35 +387,30 @@ VkResult DispatchGetSwapchainImagesKHR(VkDevice device, VkSwapchainKHR swapchain
return result;
}
-void DispatchDestroySwapchainKHR(VkDevice device, VkSwapchainKHR swapchain, const VkAllocationCallbacks *pAllocator) {
- auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
+void DispatchDestroySwapchainKHR(ValidationObject *layer_data,
+ VkDevice device, VkSwapchainKHR swapchain, const VkAllocationCallbacks *pAllocator) {
if (!wrap_handles) return layer_data->device_dispatch_table.DestroySwapchainKHR(device, swapchain, pAllocator);
- write_dispatch_lock_guard_t lock(dispatch_lock);
+ std::unique_lock<std::mutex> lock(dispatch_lock);
auto &image_array = layer_data->swapchain_wrapped_image_handle_map[swapchain];
for (auto &image_handle : image_array) {
unique_id_mapping.erase(HandleToUint64(image_handle));
}
layer_data->swapchain_wrapped_image_handle_map.erase(swapchain);
- lock.unlock();
uint64_t swapchain_id = HandleToUint64(swapchain);
-
- auto iter = unique_id_mapping.pop(swapchain_id);
- if (iter != unique_id_mapping.end()) {
- swapchain = (VkSwapchainKHR)iter->second;
- } else {
- swapchain = (VkSwapchainKHR)0;
- }
-
+ swapchain = (VkSwapchainKHR)unique_id_mapping[swapchain_id];
+ unique_id_mapping.erase(swapchain_id);
+ lock.unlock();
layer_data->device_dispatch_table.DestroySwapchainKHR(device, swapchain, pAllocator);
}
-VkResult DispatchQueuePresentKHR(VkQueue queue, const VkPresentInfoKHR *pPresentInfo) {
- auto layer_data = GetLayerDataPtr(get_dispatch_key(queue), layer_data_map);
+VkResult DispatchQueuePresentKHR(ValidationObject *layer_data,
+ VkQueue queue, const VkPresentInfoKHR *pPresentInfo) {
if (!wrap_handles) return layer_data->device_dispatch_table.QueuePresentKHR(queue, pPresentInfo);
safe_VkPresentInfoKHR *local_pPresentInfo = NULL;
{
+ std::lock_guard<std::mutex> lock(dispatch_lock);
if (pPresentInfo) {
local_pPresentInfo = new safe_VkPresentInfoKHR(pPresentInfo);
if (local_pPresentInfo->pWaitSemaphores) {
@@ -439,40 +438,35 @@ VkResult DispatchQueuePresentKHR(VkQueue queue, const VkPresentInfoKHR *pPresent
return result;
}
-void DispatchDestroyDescriptorPool(VkDevice device, VkDescriptorPool descriptorPool, const VkAllocationCallbacks *pAllocator) {
- auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
+void DispatchDestroyDescriptorPool(ValidationObject *layer_data, VkDevice device, VkDescriptorPool descriptorPool,
+ const VkAllocationCallbacks *pAllocator) {
if (!wrap_handles) return layer_data->device_dispatch_table.DestroyDescriptorPool(device, descriptorPool, pAllocator);
- write_dispatch_lock_guard_t lock(dispatch_lock);
+ std::unique_lock<std::mutex> lock(dispatch_lock);
// remove references to implicitly freed descriptor sets
for(auto descriptor_set : layer_data->pool_descriptor_sets_map[descriptorPool]) {
unique_id_mapping.erase(reinterpret_cast<uint64_t &>(descriptor_set));
}
layer_data->pool_descriptor_sets_map.erase(descriptorPool);
- lock.unlock();
uint64_t descriptorPool_id = reinterpret_cast<uint64_t &>(descriptorPool);
-
- auto iter = unique_id_mapping.pop(descriptorPool_id);
- if (iter != unique_id_mapping.end()) {
- descriptorPool = (VkDescriptorPool)iter->second;
- } else {
- descriptorPool = (VkDescriptorPool)0;
- }
-
+ descriptorPool = (VkDescriptorPool)unique_id_mapping[descriptorPool_id];
+ unique_id_mapping.erase(descriptorPool_id);
+ lock.unlock();
layer_data->device_dispatch_table.DestroyDescriptorPool(device, descriptorPool, pAllocator);
}
-VkResult DispatchResetDescriptorPool(VkDevice device, VkDescriptorPool descriptorPool, VkDescriptorPoolResetFlags flags) {
- auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
+VkResult DispatchResetDescriptorPool(ValidationObject *layer_data, VkDevice device, VkDescriptorPool descriptorPool,
+ VkDescriptorPoolResetFlags flags) {
if (!wrap_handles) return layer_data->device_dispatch_table.ResetDescriptorPool(device, descriptorPool, flags);
VkDescriptorPool local_descriptor_pool = VK_NULL_HANDLE;
{
+ std::lock_guard<std::mutex> lock(dispatch_lock);
local_descriptor_pool = layer_data->Unwrap(descriptorPool);
}
VkResult result = layer_data->device_dispatch_table.ResetDescriptorPool(device, local_descriptor_pool, flags);
if (VK_SUCCESS == result) {
- write_dispatch_lock_guard_t lock(dispatch_lock);
+ std::lock_guard<std::mutex> lock(dispatch_lock);
// remove references to implicitly freed descriptor sets
for(auto descriptor_set : layer_data->pool_descriptor_sets_map[descriptorPool]) {
unique_id_mapping.erase(reinterpret_cast<uint64_t &>(descriptor_set));
@@ -483,12 +477,12 @@ VkResult DispatchResetDescriptorPool(VkDevice device, VkDescriptorPool descripto
return result;
}
-VkResult DispatchAllocateDescriptorSets(VkDevice device, const VkDescriptorSetAllocateInfo *pAllocateInfo,
- VkDescriptorSet *pDescriptorSets) {
- auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
+VkResult DispatchAllocateDescriptorSets(ValidationObject *layer_data, VkDevice device,
+ const VkDescriptorSetAllocateInfo *pAllocateInfo, VkDescriptorSet *pDescriptorSets) {
if (!wrap_handles) return layer_data->device_dispatch_table.AllocateDescriptorSets(device, pAllocateInfo, pDescriptorSets);
safe_VkDescriptorSetAllocateInfo *local_pAllocateInfo = NULL;
{
+ std::lock_guard<std::mutex> lock(dispatch_lock);
if (pAllocateInfo) {
local_pAllocateInfo = new safe_VkDescriptorSetAllocateInfo(pAllocateInfo);
if (pAllocateInfo->descriptorPool) {
@@ -507,7 +501,7 @@ VkResult DispatchAllocateDescriptorSets(VkDevice device, const VkDescriptorSetAl
delete local_pAllocateInfo;
}
if (VK_SUCCESS == result) {
- write_dispatch_lock_guard_t lock(dispatch_lock);
+ std::lock_guard<std::mutex> lock(dispatch_lock);
auto &pool_descriptor_sets = layer_data->pool_descriptor_sets_map[pAllocateInfo->descriptorPool];
for (uint32_t index0 = 0; index0 < pAllocateInfo->descriptorSetCount; index0++) {
pDescriptorSets[index0] = layer_data->WrapNew(pDescriptorSets[index0]);
@@ -517,14 +511,14 @@ VkResult DispatchAllocateDescriptorSets(VkDevice device, const VkDescriptorSetAl
return result;
}
-VkResult DispatchFreeDescriptorSets(VkDevice device, VkDescriptorPool descriptorPool, uint32_t descriptorSetCount,
- const VkDescriptorSet *pDescriptorSets) {
- auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
+VkResult DispatchFreeDescriptorSets(ValidationObject *layer_data, VkDevice device, VkDescriptorPool descriptorPool,
+ uint32_t descriptorSetCount, const VkDescriptorSet *pDescriptorSets) {
if (!wrap_handles)
return layer_data->device_dispatch_table.FreeDescriptorSets(device, descriptorPool, descriptorSetCount, pDescriptorSets);
VkDescriptorSet *local_pDescriptorSets = NULL;
VkDescriptorPool local_descriptor_pool = VK_NULL_HANDLE;
{
+ std::lock_guard<std::mutex> lock(dispatch_lock);
local_descriptor_pool = layer_data->Unwrap(descriptorPool);
if (pDescriptorSets) {
local_pDescriptorSets = new VkDescriptorSet[descriptorSetCount];
@@ -537,7 +531,7 @@ VkResult DispatchFreeDescriptorSets(VkDevice device, VkDescriptorPool descriptor
(const VkDescriptorSet *)local_pDescriptorSets);
if (local_pDescriptorSets) delete[] local_pDescriptorSets;
if ((VK_SUCCESS == result) && (pDescriptorSets)) {
- write_dispatch_lock_guard_t lock(dispatch_lock);
+ std::unique_lock<std::mutex> lock(dispatch_lock);
auto &pool_descriptor_sets = layer_data->pool_descriptor_sets_map[descriptorPool];
for (uint32_t index0 = 0; index0 < descriptorSetCount; index0++) {
VkDescriptorSet handle = pDescriptorSets[index0];
@@ -549,16 +543,18 @@ VkResult DispatchFreeDescriptorSets(VkDevice device, VkDescriptorPool descriptor
return result;
}
+
// This is the core version of this routine. The extension version is below.
-VkResult DispatchCreateDescriptorUpdateTemplate(VkDevice device, const VkDescriptorUpdateTemplateCreateInfoKHR *pCreateInfo,
+VkResult DispatchCreateDescriptorUpdateTemplate(ValidationObject *layer_data,
+ VkDevice device,
+ const VkDescriptorUpdateTemplateCreateInfoKHR *pCreateInfo,
const VkAllocationCallbacks *pAllocator,
VkDescriptorUpdateTemplateKHR *pDescriptorUpdateTemplate) {
- auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
- if (!wrap_handles)
- return layer_data->device_dispatch_table.CreateDescriptorUpdateTemplate(device, pCreateInfo, pAllocator,
- pDescriptorUpdateTemplate);
+ if (!wrap_handles) return layer_data->device_dispatch_table.CreateDescriptorUpdateTemplate(device, pCreateInfo, pAllocator,
+ pDescriptorUpdateTemplate);
safe_VkDescriptorUpdateTemplateCreateInfo *local_create_info = NULL;
{
+ std::lock_guard<std::mutex> lock(dispatch_lock);
if (pCreateInfo) {
local_create_info = new safe_VkDescriptorUpdateTemplateCreateInfo(pCreateInfo);
if (pCreateInfo->descriptorSetLayout) {
@@ -572,26 +568,27 @@ VkResult DispatchCreateDescriptorUpdateTemplate(VkDevice device, const VkDescrip
VkResult result = layer_data->device_dispatch_table.CreateDescriptorUpdateTemplate(device, local_create_info->ptr(), pAllocator,
pDescriptorUpdateTemplate);
if (VK_SUCCESS == result) {
- write_dispatch_lock_guard_t lock(dispatch_lock);
+ std::lock_guard<std::mutex> lock(dispatch_lock);
*pDescriptorUpdateTemplate = layer_data->WrapNew(*pDescriptorUpdateTemplate);
// Shadow template createInfo for later updates
std::unique_ptr<TEMPLATE_STATE> template_state(new TEMPLATE_STATE(*pDescriptorUpdateTemplate, local_create_info));
- layer_data->desc_template_createinfo_map[(uint64_t)*pDescriptorUpdateTemplate] = std::move(template_state);
+ layer_data->desc_template_map[(uint64_t)*pDescriptorUpdateTemplate] = std::move(template_state);
}
return result;
}
// This is the extension version of this routine. The core version is above.
-VkResult DispatchCreateDescriptorUpdateTemplateKHR(VkDevice device, const VkDescriptorUpdateTemplateCreateInfoKHR *pCreateInfo,
+VkResult DispatchCreateDescriptorUpdateTemplateKHR(ValidationObject *layer_data,
+ VkDevice device,
+ const VkDescriptorUpdateTemplateCreateInfoKHR *pCreateInfo,
const VkAllocationCallbacks *pAllocator,
VkDescriptorUpdateTemplateKHR *pDescriptorUpdateTemplate) {
- auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
- if (!wrap_handles)
- return layer_data->device_dispatch_table.CreateDescriptorUpdateTemplateKHR(device, pCreateInfo, pAllocator,
- pDescriptorUpdateTemplate);
+ if (!wrap_handles) return layer_data->device_dispatch_table.CreateDescriptorUpdateTemplateKHR(device, pCreateInfo, pAllocator,
+ pDescriptorUpdateTemplate);
safe_VkDescriptorUpdateTemplateCreateInfo *local_create_info = NULL;
{
+ std::lock_guard<std::mutex> lock(dispatch_lock);
if (pCreateInfo) {
local_create_info = new safe_VkDescriptorUpdateTemplateCreateInfo(pCreateInfo);
if (pCreateInfo->descriptorSetLayout) {
@@ -605,66 +602,53 @@ VkResult DispatchCreateDescriptorUpdateTemplateKHR(VkDevice device, const VkDesc
VkResult result = layer_data->device_dispatch_table.CreateDescriptorUpdateTemplateKHR(device, local_create_info->ptr(), pAllocator,
pDescriptorUpdateTemplate);
if (VK_SUCCESS == result) {
- write_dispatch_lock_guard_t lock(dispatch_lock);
+ std::lock_guard<std::mutex> lock(dispatch_lock);
*pDescriptorUpdateTemplate = layer_data->WrapNew(*pDescriptorUpdateTemplate);
// Shadow template createInfo for later updates
std::unique_ptr<TEMPLATE_STATE> template_state(new TEMPLATE_STATE(*pDescriptorUpdateTemplate, local_create_info));
- layer_data->desc_template_createinfo_map[(uint64_t)*pDescriptorUpdateTemplate] = std::move(template_state);
+ layer_data->desc_template_map[(uint64_t)*pDescriptorUpdateTemplate] = std::move(template_state);
}
return result;
}
// This is the core version of this routine. The extension version is below.
-void DispatchDestroyDescriptorUpdateTemplate(VkDevice device, VkDescriptorUpdateTemplateKHR descriptorUpdateTemplate,
+void DispatchDestroyDescriptorUpdateTemplate(ValidationObject *layer_data,
+ VkDevice device, VkDescriptorUpdateTemplateKHR descriptorUpdateTemplate,
const VkAllocationCallbacks *pAllocator) {
- auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
- if (!wrap_handles)
- return layer_data->device_dispatch_table.DestroyDescriptorUpdateTemplate(device, descriptorUpdateTemplate, pAllocator);
- write_dispatch_lock_guard_t lock(dispatch_lock);
+ if (!wrap_handles) return layer_data->device_dispatch_table.DestroyDescriptorUpdateTemplate(device, descriptorUpdateTemplate, pAllocator);
+ std::unique_lock<std::mutex> lock(dispatch_lock);
uint64_t descriptor_update_template_id = reinterpret_cast<uint64_t &>(descriptorUpdateTemplate);
- layer_data->desc_template_createinfo_map.erase(descriptor_update_template_id);
+ layer_data->desc_template_map.erase(descriptor_update_template_id);
+ descriptorUpdateTemplate = (VkDescriptorUpdateTemplate)unique_id_mapping[descriptor_update_template_id];
+ unique_id_mapping.erase(descriptor_update_template_id);
lock.unlock();
-
- auto iter = unique_id_mapping.pop(descriptor_update_template_id);
- if (iter != unique_id_mapping.end()) {
- descriptorUpdateTemplate = (VkDescriptorUpdateTemplate)iter->second;
- } else {
- descriptorUpdateTemplate = (VkDescriptorUpdateTemplate)0;
- }
-
layer_data->device_dispatch_table.DestroyDescriptorUpdateTemplate(device, descriptorUpdateTemplate, pAllocator);
}
// This is the extension version of this routine. The core version is above.
-void DispatchDestroyDescriptorUpdateTemplateKHR(VkDevice device, VkDescriptorUpdateTemplateKHR descriptorUpdateTemplate,
+void DispatchDestroyDescriptorUpdateTemplateKHR(ValidationObject *layer_data,
+ VkDevice device,
+ VkDescriptorUpdateTemplateKHR descriptorUpdateTemplate,
const VkAllocationCallbacks *pAllocator) {
- auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
- if (!wrap_handles)
- return layer_data->device_dispatch_table.DestroyDescriptorUpdateTemplateKHR(device, descriptorUpdateTemplate, pAllocator);
- write_dispatch_lock_guard_t lock(dispatch_lock);
+ if (!wrap_handles) return layer_data->device_dispatch_table.DestroyDescriptorUpdateTemplateKHR(device, descriptorUpdateTemplate, pAllocator);
+ std::unique_lock<std::mutex> lock(dispatch_lock);
uint64_t descriptor_update_template_id = reinterpret_cast<uint64_t &>(descriptorUpdateTemplate);
- layer_data->desc_template_createinfo_map.erase(descriptor_update_template_id);
+ layer_data->desc_template_map.erase(descriptor_update_template_id);
+ descriptorUpdateTemplate = (VkDescriptorUpdateTemplate)unique_id_mapping[descriptor_update_template_id];
+ unique_id_mapping.erase(descriptor_update_template_id);
lock.unlock();
-
- auto iter = unique_id_mapping.pop(descriptor_update_template_id);
- if (iter != unique_id_mapping.end()) {
- descriptorUpdateTemplate = (VkDescriptorUpdateTemplate)iter->second;
- } else {
- descriptorUpdateTemplate = (VkDescriptorUpdateTemplate)0;
- }
-
layer_data->device_dispatch_table.DestroyDescriptorUpdateTemplateKHR(device, descriptorUpdateTemplate, pAllocator);
}
void *BuildUnwrappedUpdateTemplateBuffer(ValidationObject *layer_data, uint64_t descriptorUpdateTemplate, const void *pData) {
- auto const template_map_entry = layer_data->desc_template_createinfo_map.find(descriptorUpdateTemplate);
- if (template_map_entry == layer_data->desc_template_createinfo_map.end()) {
+ auto const template_map_entry = layer_data->desc_template_map.find(descriptorUpdateTemplate);
+ if (template_map_entry == layer_data->desc_template_map.end()) {
assert(0);
}
auto const &create_info = template_map_entry->second->create_info;
size_t allocation_size = 0;
- std::vector<std::tuple<size_t, VulkanObjectType, uint64_t, size_t>> template_entries;
+ std::vector<std::tuple<size_t, VulkanObjectType, void *, size_t>> template_entries;
for (uint32_t i = 0; i < create_info.descriptorUpdateEntryCount; i++) {
for (uint32_t j = 0; j < create_info.pDescriptorUpdateEntries[i].descriptorCount; j++) {
@@ -683,7 +667,7 @@ void *BuildUnwrappedUpdateTemplateBuffer(ValidationObject *layer_data, uint64_t
VkDescriptorImageInfo *wrapped_entry = new VkDescriptorImageInfo(*image_entry);
wrapped_entry->sampler = layer_data->Unwrap(image_entry->sampler);
wrapped_entry->imageView = layer_data->Unwrap(image_entry->imageView);
- template_entries.emplace_back(offset, kVulkanObjectTypeImage, CastToUint64(wrapped_entry), 0);
+ template_entries.emplace_back(offset, kVulkanObjectTypeImage, reinterpret_cast<void *>(wrapped_entry), 0);
} break;
case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER:
@@ -695,7 +679,7 @@ void *BuildUnwrappedUpdateTemplateBuffer(ValidationObject *layer_data, uint64_t
VkDescriptorBufferInfo *wrapped_entry = new VkDescriptorBufferInfo(*buffer_entry);
wrapped_entry->buffer = layer_data->Unwrap(buffer_entry->buffer);
- template_entries.emplace_back(offset, kVulkanObjectTypeBuffer, CastToUint64(wrapped_entry), 0);
+ template_entries.emplace_back(offset, kVulkanObjectTypeBuffer, reinterpret_cast<void *>(wrapped_entry), 0);
} break;
case VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER:
@@ -704,13 +688,13 @@ void *BuildUnwrappedUpdateTemplateBuffer(ValidationObject *layer_data, uint64_t
allocation_size = std::max(allocation_size, offset + sizeof(VkBufferView));
VkBufferView wrapped_entry = layer_data->Unwrap(*buffer_view_handle);
- template_entries.emplace_back(offset, kVulkanObjectTypeBufferView, CastToUint64(wrapped_entry), 0);
+ template_entries.emplace_back(offset, kVulkanObjectTypeBufferView, reinterpret_cast<void *>(wrapped_entry), 0);
} break;
case VK_DESCRIPTOR_TYPE_INLINE_UNIFORM_BLOCK_EXT: {
size_t numBytes = create_info.pDescriptorUpdateEntries[i].descriptorCount;
allocation_size = std::max(allocation_size, offset + numBytes);
// nothing to unwrap, just plain data
- template_entries.emplace_back(offset, kVulkanObjectTypeUnknown, CastToUint64(update_entry),
+ template_entries.emplace_back(offset, kVulkanObjectTypeUnknown, reinterpret_cast<void *>(update_entry),
numBytes);
// to break out of the loop
j = create_info.pDescriptorUpdateEntries[i].descriptorCount;
@@ -726,26 +710,26 @@ void *BuildUnwrappedUpdateTemplateBuffer(ValidationObject *layer_data, uint64_t
for (auto &this_entry : template_entries) {
VulkanObjectType type = std::get<1>(this_entry);
void *destination = (char *)unwrapped_data + std::get<0>(this_entry);
- uint64_t source = std::get<2>(this_entry);
+ void *source = (char *)std::get<2>(this_entry);
size_t size = std::get<3>(this_entry);
if (size != 0) {
assert(type == kVulkanObjectTypeUnknown);
- memcpy(destination, CastFromUint64<void *>(source), size);
+ memcpy(destination, source, size);
} else {
switch (type) {
case kVulkanObjectTypeImage:
*(reinterpret_cast<VkDescriptorImageInfo *>(destination)) =
*(reinterpret_cast<VkDescriptorImageInfo *>(source));
- delete CastFromUint64<VkDescriptorImageInfo *>(source);
+ delete reinterpret_cast<VkDescriptorImageInfo *>(source);
break;
case kVulkanObjectTypeBuffer:
*(reinterpret_cast<VkDescriptorBufferInfo *>(destination)) =
- *(CastFromUint64<VkDescriptorBufferInfo *>(source));
- delete CastFromUint64<VkDescriptorBufferInfo *>(source);
+ *(reinterpret_cast<VkDescriptorBufferInfo *>(source));
+ delete reinterpret_cast<VkDescriptorBufferInfo *>(source);
break;
case kVulkanObjectTypeBufferView:
- *(reinterpret_cast<VkBufferView *>(destination)) = CastFromUint64<VkBufferView>(source);
+ *(reinterpret_cast<VkBufferView *>(destination)) = reinterpret_cast<VkBufferView>(source);
break;
default:
assert(0);
@@ -756,53 +740,48 @@ void *BuildUnwrappedUpdateTemplateBuffer(ValidationObject *layer_data, uint64_t
return (void *)unwrapped_data;
}
-void DispatchUpdateDescriptorSetWithTemplate(VkDevice device, VkDescriptorSet descriptorSet,
- VkDescriptorUpdateTemplateKHR descriptorUpdateTemplate, const void *pData) {
- auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
- if (!wrap_handles)
- return layer_data->device_dispatch_table.UpdateDescriptorSetWithTemplate(device, descriptorSet, descriptorUpdateTemplate,
- pData);
+void DispatchUpdateDescriptorSetWithTemplate(ValidationObject *layer_data,
+ VkDevice device, VkDescriptorSet descriptorSet,
+ VkDescriptorUpdateTemplateKHR descriptorUpdateTemplate,
+ const void *pData) {
+ if (!wrap_handles) return layer_data->device_dispatch_table.UpdateDescriptorSetWithTemplate(device, descriptorSet, descriptorUpdateTemplate, pData);
uint64_t template_handle = reinterpret_cast<uint64_t &>(descriptorUpdateTemplate);
- void *unwrapped_buffer = nullptr;
{
- read_dispatch_lock_guard_t lock(dispatch_lock);
+ std::lock_guard<std::mutex> lock(dispatch_lock);
descriptorSet = layer_data->Unwrap(descriptorSet);
- descriptorUpdateTemplate = (VkDescriptorUpdateTemplate)layer_data->Unwrap(descriptorUpdateTemplate);
- unwrapped_buffer = BuildUnwrappedUpdateTemplateBuffer(layer_data, template_handle, pData);
+ descriptorUpdateTemplate = (VkDescriptorUpdateTemplate)unique_id_mapping[template_handle];
}
+ void *unwrapped_buffer = BuildUnwrappedUpdateTemplateBuffer(layer_data, template_handle, pData);
layer_data->device_dispatch_table.UpdateDescriptorSetWithTemplate(device, descriptorSet, descriptorUpdateTemplate, unwrapped_buffer);
free(unwrapped_buffer);
}
-void DispatchUpdateDescriptorSetWithTemplateKHR(VkDevice device, VkDescriptorSet descriptorSet,
- VkDescriptorUpdateTemplateKHR descriptorUpdateTemplate, const void *pData) {
- auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
- if (!wrap_handles)
- return layer_data->device_dispatch_table.UpdateDescriptorSetWithTemplateKHR(device, descriptorSet, descriptorUpdateTemplate,
- pData);
+void DispatchUpdateDescriptorSetWithTemplateKHR(ValidationObject *layer_data,
+ VkDevice device, VkDescriptorSet descriptorSet,
+ VkDescriptorUpdateTemplateKHR descriptorUpdateTemplate,
+ const void *pData) {
+ if (!wrap_handles) return layer_data->device_dispatch_table.UpdateDescriptorSetWithTemplateKHR(device, descriptorSet, descriptorUpdateTemplate, pData);
uint64_t template_handle = reinterpret_cast<uint64_t &>(descriptorUpdateTemplate);
void *unwrapped_buffer = nullptr;
{
- read_dispatch_lock_guard_t lock(dispatch_lock);
+ std::lock_guard<std::mutex> lock(dispatch_lock);
descriptorSet = layer_data->Unwrap(descriptorSet);
- descriptorUpdateTemplate = layer_data->Unwrap(descriptorUpdateTemplate);
+ descriptorUpdateTemplate = (VkDescriptorUpdateTemplate)unique_id_mapping[template_handle];
unwrapped_buffer = BuildUnwrappedUpdateTemplateBuffer(layer_data, template_handle, pData);
}
layer_data->device_dispatch_table.UpdateDescriptorSetWithTemplateKHR(device, descriptorSet, descriptorUpdateTemplate, unwrapped_buffer);
free(unwrapped_buffer);
}
-void DispatchCmdPushDescriptorSetWithTemplateKHR(VkCommandBuffer commandBuffer,
- VkDescriptorUpdateTemplateKHR descriptorUpdateTemplate, VkPipelineLayout layout,
- uint32_t set, const void *pData) {
- auto layer_data = GetLayerDataPtr(get_dispatch_key(commandBuffer), layer_data_map);
- if (!wrap_handles)
- return layer_data->device_dispatch_table.CmdPushDescriptorSetWithTemplateKHR(commandBuffer, descriptorUpdateTemplate,
- layout, set, pData);
+void DispatchCmdPushDescriptorSetWithTemplateKHR(ValidationObject *layer_data,
+ VkCommandBuffer commandBuffer,
+ VkDescriptorUpdateTemplateKHR descriptorUpdateTemplate,
+ VkPipelineLayout layout, uint32_t set, const void *pData) {
+ if (!wrap_handles) return layer_data->device_dispatch_table.CmdPushDescriptorSetWithTemplateKHR(commandBuffer, descriptorUpdateTemplate, layout, set, pData);
uint64_t template_handle = reinterpret_cast<uint64_t &>(descriptorUpdateTemplate);
void *unwrapped_buffer = nullptr;
{
- read_dispatch_lock_guard_t lock(dispatch_lock);
+ std::lock_guard<std::mutex> lock(dispatch_lock);
descriptorUpdateTemplate = layer_data->Unwrap(descriptorUpdateTemplate);
layout = layer_data->Unwrap(layout);
unwrapped_buffer = BuildUnwrappedUpdateTemplateBuffer(layer_data, template_handle, pData);
@@ -812,13 +791,14 @@ void DispatchCmdPushDescriptorSetWithTemplateKHR(VkCommandBuffer commandBuffer,
free(unwrapped_buffer);
}
-VkResult DispatchGetPhysicalDeviceDisplayPropertiesKHR(VkPhysicalDevice physicalDevice, uint32_t *pPropertyCount,
+VkResult DispatchGetPhysicalDeviceDisplayPropertiesKHR(ValidationObject *layer_data,
+ VkPhysicalDevice physicalDevice, uint32_t *pPropertyCount,
VkDisplayPropertiesKHR *pProperties) {
- auto layer_data = GetLayerDataPtr(get_dispatch_key(physicalDevice), layer_data_map);
VkResult result =
layer_data->instance_dispatch_table.GetPhysicalDeviceDisplayPropertiesKHR(physicalDevice, pPropertyCount, pProperties);
if (!wrap_handles) return result;
if ((result == VK_SUCCESS || result == VK_INCOMPLETE) && pProperties) {
+ std::lock_guard<std::mutex> lock(dispatch_lock);
for (uint32_t idx0 = 0; idx0 < *pPropertyCount; ++idx0) {
pProperties[idx0].display = layer_data->MaybeWrapDisplay(pProperties[idx0].display, layer_data);
}
@@ -826,13 +806,14 @@ VkResult DispatchGetPhysicalDeviceDisplayPropertiesKHR(VkPhysicalDevice physical
return result;
}
-VkResult DispatchGetPhysicalDeviceDisplayProperties2KHR(VkPhysicalDevice physicalDevice, uint32_t *pPropertyCount,
+VkResult DispatchGetPhysicalDeviceDisplayProperties2KHR(ValidationObject *layer_data,
+ VkPhysicalDevice physicalDevice, uint32_t *pPropertyCount,
VkDisplayProperties2KHR *pProperties) {
- auto layer_data = GetLayerDataPtr(get_dispatch_key(physicalDevice), layer_data_map);
VkResult result =
layer_data->instance_dispatch_table.GetPhysicalDeviceDisplayProperties2KHR(physicalDevice, pPropertyCount, pProperties);
if (!wrap_handles) return result;
if ((result == VK_SUCCESS || result == VK_INCOMPLETE) && pProperties) {
+ std::lock_guard<std::mutex> lock(dispatch_lock);
for (uint32_t idx0 = 0; idx0 < *pPropertyCount; ++idx0) {
pProperties[idx0].displayProperties.display =
layer_data->MaybeWrapDisplay(pProperties[idx0].displayProperties.display, layer_data);
@@ -841,13 +822,15 @@ VkResult DispatchGetPhysicalDeviceDisplayProperties2KHR(VkPhysicalDevice physica
return result;
}
-VkResult DispatchGetPhysicalDeviceDisplayPlanePropertiesKHR(VkPhysicalDevice physicalDevice, uint32_t *pPropertyCount,
+VkResult DispatchGetPhysicalDeviceDisplayPlanePropertiesKHR(ValidationObject *layer_data,
+ VkPhysicalDevice physicalDevice, uint32_t *pPropertyCount,
VkDisplayPlanePropertiesKHR *pProperties) {
- auto layer_data = GetLayerDataPtr(get_dispatch_key(physicalDevice), layer_data_map);
+
VkResult result =
layer_data->instance_dispatch_table.GetPhysicalDeviceDisplayPlanePropertiesKHR(physicalDevice, pPropertyCount, pProperties);
if (!wrap_handles) return result;
if ((result == VK_SUCCESS || result == VK_INCOMPLETE) && pProperties) {
+ std::lock_guard<std::mutex> lock(dispatch_lock);
for (uint32_t idx0 = 0; idx0 < *pPropertyCount; ++idx0) {
VkDisplayKHR &opt_display = pProperties[idx0].currentDisplay;
if (opt_display) opt_display = layer_data->MaybeWrapDisplay(opt_display, layer_data);
@@ -856,13 +839,15 @@ VkResult DispatchGetPhysicalDeviceDisplayPlanePropertiesKHR(VkPhysicalDevice phy
return result;
}
-VkResult DispatchGetPhysicalDeviceDisplayPlaneProperties2KHR(VkPhysicalDevice physicalDevice, uint32_t *pPropertyCount,
+VkResult DispatchGetPhysicalDeviceDisplayPlaneProperties2KHR(ValidationObject *layer_data,VkPhysicalDevice physicalDevice,
+ uint32_t *pPropertyCount,
VkDisplayPlaneProperties2KHR *pProperties) {
- auto layer_data = GetLayerDataPtr(get_dispatch_key(physicalDevice), layer_data_map);
- VkResult result = layer_data->instance_dispatch_table.GetPhysicalDeviceDisplayPlaneProperties2KHR(physicalDevice,
- pPropertyCount, pProperties);
+
+ VkResult result =
+ layer_data->instance_dispatch_table.GetPhysicalDeviceDisplayPlaneProperties2KHR(physicalDevice, pPropertyCount, pProperties);
if (!wrap_handles) return result;
if ((result == VK_SUCCESS || result == VK_INCOMPLETE) && pProperties) {
+ std::lock_guard<std::mutex> lock(dispatch_lock);
for (uint32_t idx0 = 0; idx0 < *pPropertyCount; ++idx0) {
VkDisplayKHR &opt_display = pProperties[idx0].displayPlaneProperties.currentDisplay;
if (opt_display) opt_display = layer_data->MaybeWrapDisplay(opt_display, layer_data);
@@ -871,13 +856,14 @@ VkResult DispatchGetPhysicalDeviceDisplayPlaneProperties2KHR(VkPhysicalDevice ph
return result;
}
-VkResult DispatchGetDisplayPlaneSupportedDisplaysKHR(VkPhysicalDevice physicalDevice, uint32_t planeIndex, uint32_t *pDisplayCount,
- VkDisplayKHR *pDisplays) {
- auto layer_data = GetLayerDataPtr(get_dispatch_key(physicalDevice), layer_data_map);
- VkResult result = layer_data->instance_dispatch_table.GetDisplayPlaneSupportedDisplaysKHR(physicalDevice, planeIndex,
- pDisplayCount, pDisplays);
+VkResult DispatchGetDisplayPlaneSupportedDisplaysKHR(ValidationObject *layer_data,
+ VkPhysicalDevice physicalDevice, uint32_t planeIndex,
+ uint32_t *pDisplayCount, VkDisplayKHR *pDisplays) {
+ VkResult result =
+ layer_data->instance_dispatch_table.GetDisplayPlaneSupportedDisplaysKHR(physicalDevice, planeIndex, pDisplayCount, pDisplays);
if ((result == VK_SUCCESS || result == VK_INCOMPLETE) && pDisplays) {
if (!wrap_handles) return result;
+ std::lock_guard<std::mutex> lock(dispatch_lock);
for (uint32_t i = 0; i < *pDisplayCount; ++i) {
if (pDisplays[i]) pDisplays[i] = layer_data->MaybeWrapDisplay(pDisplays[i], layer_data);
}
@@ -885,18 +871,18 @@ VkResult DispatchGetDisplayPlaneSupportedDisplaysKHR(VkPhysicalDevice physicalDe
return result;
}
-VkResult DispatchGetDisplayModePropertiesKHR(VkPhysicalDevice physicalDevice, VkDisplayKHR display, uint32_t *pPropertyCount,
- VkDisplayModePropertiesKHR *pProperties) {
- auto layer_data = GetLayerDataPtr(get_dispatch_key(physicalDevice), layer_data_map);
- if (!wrap_handles)
- return layer_data->instance_dispatch_table.GetDisplayModePropertiesKHR(physicalDevice, display, pPropertyCount,
- pProperties);
+VkResult DispatchGetDisplayModePropertiesKHR(ValidationObject *layer_data,
+ VkPhysicalDevice physicalDevice, VkDisplayKHR display,
+ uint32_t *pPropertyCount, VkDisplayModePropertiesKHR *pProperties) {
+ if (!wrap_handles) return layer_data->instance_dispatch_table.GetDisplayModePropertiesKHR(physicalDevice, display, pPropertyCount, pProperties);
{
+ std::lock_guard<std::mutex> lock(dispatch_lock);
display = layer_data->Unwrap(display);
}
VkResult result = layer_data->instance_dispatch_table.GetDisplayModePropertiesKHR(physicalDevice, display, pPropertyCount, pProperties);
if ((result == VK_SUCCESS || result == VK_INCOMPLETE) && pProperties) {
+ std::lock_guard<std::mutex> lock(dispatch_lock);
for (uint32_t idx0 = 0; idx0 < *pPropertyCount; ++idx0) {
pProperties[idx0].displayMode = layer_data->WrapNew(pProperties[idx0].displayMode);
}
@@ -904,19 +890,19 @@ VkResult DispatchGetDisplayModePropertiesKHR(VkPhysicalDevice physicalDevice, Vk
return result;
}
-VkResult DispatchGetDisplayModeProperties2KHR(VkPhysicalDevice physicalDevice, VkDisplayKHR display, uint32_t *pPropertyCount,
- VkDisplayModeProperties2KHR *pProperties) {
- auto layer_data = GetLayerDataPtr(get_dispatch_key(physicalDevice), layer_data_map);
- if (!wrap_handles)
- return layer_data->instance_dispatch_table.GetDisplayModeProperties2KHR(physicalDevice, display, pPropertyCount,
- pProperties);
+VkResult DispatchGetDisplayModeProperties2KHR(ValidationObject *layer_data,
+ VkPhysicalDevice physicalDevice, VkDisplayKHR display,
+ uint32_t *pPropertyCount, VkDisplayModeProperties2KHR *pProperties) {
+ if (!wrap_handles) return layer_data->instance_dispatch_table.GetDisplayModeProperties2KHR(physicalDevice, display, pPropertyCount, pProperties);
{
+ std::lock_guard<std::mutex> lock(dispatch_lock);
display = layer_data->Unwrap(display);
}
VkResult result =
layer_data->instance_dispatch_table.GetDisplayModeProperties2KHR(physicalDevice, display, pPropertyCount, pProperties);
if ((result == VK_SUCCESS || result == VK_INCOMPLETE) && pProperties) {
+ std::lock_guard<std::mutex> lock(dispatch_lock);
for (uint32_t idx0 = 0; idx0 < *pPropertyCount; ++idx0) {
pProperties[idx0].displayModeProperties.displayMode = layer_data->WrapNew(pProperties[idx0].displayModeProperties.displayMode);
}
@@ -924,11 +910,12 @@ VkResult DispatchGetDisplayModeProperties2KHR(VkPhysicalDevice physicalDevice, V
return result;
}
-VkResult DispatchDebugMarkerSetObjectTagEXT(VkDevice device, const VkDebugMarkerObjectTagInfoEXT *pTagInfo) {
- auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
+VkResult DispatchDebugMarkerSetObjectTagEXT(ValidationObject *layer_data,
+ VkDevice device, const VkDebugMarkerObjectTagInfoEXT *pTagInfo) {
if (!wrap_handles) return layer_data->device_dispatch_table.DebugMarkerSetObjectTagEXT(device, pTagInfo);
safe_VkDebugMarkerObjectTagInfoEXT local_tag_info(pTagInfo);
{
+ std::lock_guard<std::mutex> lock(dispatch_lock);
auto it = unique_id_mapping.find(reinterpret_cast<uint64_t &>(local_tag_info.object));
if (it != unique_id_mapping.end()) {
local_tag_info.object = it->second;
@@ -939,11 +926,12 @@ VkResult DispatchDebugMarkerSetObjectTagEXT(VkDevice device, const VkDebugMarker
return result;
}
-VkResult DispatchDebugMarkerSetObjectNameEXT(VkDevice device, const VkDebugMarkerObjectNameInfoEXT *pNameInfo) {
- auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
+VkResult DispatchDebugMarkerSetObjectNameEXT(ValidationObject *layer_data,
+ VkDevice device, const VkDebugMarkerObjectNameInfoEXT *pNameInfo) {
if (!wrap_handles) return layer_data->device_dispatch_table.DebugMarkerSetObjectNameEXT(device, pNameInfo);
safe_VkDebugMarkerObjectNameInfoEXT local_name_info(pNameInfo);
{
+ std::lock_guard<std::mutex> lock(dispatch_lock);
auto it = unique_id_mapping.find(reinterpret_cast<uint64_t &>(local_name_info.object));
if (it != unique_id_mapping.end()) {
local_name_info.object = it->second;
@@ -955,11 +943,12 @@ VkResult DispatchDebugMarkerSetObjectNameEXT(VkDevice device, const VkDebugMarke
}
// VK_EXT_debug_utils
-VkResult DispatchSetDebugUtilsObjectTagEXT(VkDevice device, const VkDebugUtilsObjectTagInfoEXT *pTagInfo) {
- auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
+VkResult DispatchSetDebugUtilsObjectTagEXT(ValidationObject *layer_data,
+ VkDevice device, const VkDebugUtilsObjectTagInfoEXT *pTagInfo) {
if (!wrap_handles) return layer_data->device_dispatch_table.SetDebugUtilsObjectTagEXT(device, pTagInfo);
safe_VkDebugUtilsObjectTagInfoEXT local_tag_info(pTagInfo);
{
+ std::lock_guard<std::mutex> lock(dispatch_lock);
auto it = unique_id_mapping.find(reinterpret_cast<uint64_t &>(local_tag_info.objectHandle));
if (it != unique_id_mapping.end()) {
local_tag_info.objectHandle = it->second;
@@ -970,11 +959,12 @@ VkResult DispatchSetDebugUtilsObjectTagEXT(VkDevice device, const VkDebugUtilsOb
return result;
}
-VkResult DispatchSetDebugUtilsObjectNameEXT(VkDevice device, const VkDebugUtilsObjectNameInfoEXT *pNameInfo) {
- auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
+VkResult DispatchSetDebugUtilsObjectNameEXT(ValidationObject *layer_data,
+ VkDevice device, const VkDebugUtilsObjectNameInfoEXT *pNameInfo) {
if (!wrap_handles) return layer_data->device_dispatch_table.SetDebugUtilsObjectNameEXT(device, pNameInfo);
safe_VkDebugUtilsObjectNameInfoEXT local_name_info(pNameInfo);
{
+ std::lock_guard<std::mutex> lock(dispatch_lock);
auto it = unique_id_mapping.find(reinterpret_cast<uint64_t &>(local_name_info.objectHandle));
if (it != unique_id_mapping.end()) {
local_name_info.objectHandle = it->second;
@@ -1083,10 +1073,6 @@ VkResult DispatchSetDebugUtilsObjectNameEXT(VkDevice device, const VkDebugUtilsO
#
def beginFile(self, genOpts):
OutputGenerator.beginFile(self, genOpts)
- # Initialize members that require the tree
- self.handle_types = GetHandleTypes(self.registry.tree)
- self.type_categories = GetTypeCategories(self.registry.tree)
- # Output Copyright
self.appendSection('header_file', self.inline_copyright_message)
# Multiple inclusion protection & C++ namespace.
self.header = False
@@ -1097,7 +1083,7 @@ VkResult DispatchSetDebugUtilsObjectNameEXT(VkDevice device, const VkDebugUtilsO
self.appendSection('header_file', '#if defined(LAYER_CHASSIS_CAN_WRAP_HANDLES)')
self.appendSection('header_file', 'extern bool wrap_handles;')
self.appendSection('header_file', '#else')
- self.appendSection('header_file', 'extern bool wrap_handles;')
+ self.appendSection('header_file', 'extern const bool wrap_handles;')
self.appendSection('header_file', '#endif')
# Now that the data is all collected and complete, generate and output the wrapping/unwrapping routines
@@ -1116,23 +1102,11 @@ VkResult DispatchSetDebugUtilsObjectNameEXT(VkDevice device, const VkDebugUtilsO
write('#include <mutex>', file=self.outFile)
write('#include "chassis.h"', file=self.outFile)
write('#include "layer_chassis_dispatch.h"', file=self.outFile)
- write('#include "vk_layer_utils.h"', file=self.outFile)
self.newline()
write('// This intentionally includes a cpp file', file=self.outFile)
write('#include "vk_safe_struct.cpp"', file=self.outFile)
self.newline()
- write('// shared_mutex support added in MSVC 2015 update 2', file=self.outFile)
- write('#if defined(_MSC_FULL_VER) && _MSC_FULL_VER >= 190023918 && NTDDI_VERSION > NTDDI_WIN10_RS2', file=self.outFile)
- write(' #include <shared_mutex>', file=self.outFile)
- write(' typedef std::shared_mutex dispatch_lock_t;', file=self.outFile)
- write(' typedef std::shared_lock<dispatch_lock_t> read_dispatch_lock_guard_t;', file=self.outFile)
- write(' typedef std::unique_lock<dispatch_lock_t> write_dispatch_lock_guard_t;', file=self.outFile)
- write('#else', file=self.outFile)
- write(' typedef std::mutex dispatch_lock_t;', file=self.outFile)
- write(' typedef std::unique_lock<dispatch_lock_t> read_dispatch_lock_guard_t;', file=self.outFile)
- write(' typedef std::unique_lock<dispatch_lock_t> write_dispatch_lock_guard_t;', file=self.outFile)
- write('#endif', file=self.outFile)
- write('dispatch_lock_t dispatch_lock;', file=self.outFile)
+ write('std::mutex dispatch_lock;', file=self.outFile)
self.newline()
write('// Unique Objects pNext extension handling function', file=self.outFile)
write('%s' % extension_proc, file=self.outFile)
@@ -1190,10 +1164,25 @@ VkResult DispatchSetDebugUtilsObjectNameEXT(VkDevice device, const VkDebugUtilsO
def paramIsPointer(self, param):
ispointer = False
for elem in param:
- if elem.tag == 'type' and elem.tail is not None and '*' in elem.tail:
+ if ((elem.tag is not 'type') and (elem.tail is not None)) and '*' in elem.tail:
ispointer = True
return ispointer
#
+ # Get the category of a type
+ def getTypeCategory(self, typename):
+ types = self.registry.tree.findall("types/type")
+ for elem in types:
+ if (elem.find("name") is not None and elem.find('name').text == typename) or elem.attrib.get('name') == typename:
+ return elem.attrib.get('category')
+ #
+ # Check if a parent object is dispatchable or not
+ def isHandleTypeNonDispatchable(self, handletype):
+ handle = self.registry.tree.find("types/type/[name='" + handletype + "'][@category='handle']")
+ if handle is not None and handle.find('type').text == 'VK_DEFINE_NON_DISPATCHABLE_HANDLE':
+ return True
+ else:
+ return False
+ #
# Retrieve the type and name for a parameter
def getTypeNameTuple(self, param):
type = ''
@@ -1281,13 +1270,17 @@ VkResult DispatchSetDebugUtilsObjectNameEXT(VkDevice device, const VkDebugUtilsO
self.structMembers.append(self.StructMemberData(name=typeName, members=membersInfo))
#
+ # Insert a lock_guard line
+ def lock_guard(self, indent):
+ return '%sstd::lock_guard<std::mutex> lock(dispatch_lock);\n' % indent
+ #
# Determine if a struct has an NDO as a member or an embedded member
def struct_contains_ndo(self, struct_item):
struct_member_dict = dict(self.structMembers)
struct_members = struct_member_dict[struct_item]
for member in struct_members:
- if self.handle_types.IsNonDispatchable(member.type):
+ if self.isHandleTypeNonDispatchable(member.type):
return True
elif member.type in struct_member_dict:
if self.struct_contains_ndo(member.type) == True:
@@ -1300,7 +1293,7 @@ VkResult DispatchSetDebugUtilsObjectNameEXT(VkDevice device, const VkDebugUtilsO
struct_list = set()
for item in item_list:
paramtype = item.find('type')
- typecategory = self.type_categories[paramtype.text]
+ typecategory = self.getTypeCategory(paramtype.text)
if typecategory == 'struct':
if self.struct_contains_ndo(paramtype.text) == True:
struct_list.add(item)
@@ -1314,7 +1307,7 @@ VkResult DispatchSetDebugUtilsObjectNameEXT(VkDevice device, const VkDebugUtilsO
else:
member_list = item_list
for item in member_list:
- if self.handle_types.IsNonDispatchable(paramtype.text):
+ if self.isHandleTypeNonDispatchable(paramtype.text):
ndo_list.add(item)
return ndo_list
#
@@ -1349,24 +1342,26 @@ VkResult DispatchSetDebugUtilsObjectNameEXT(VkDevice device, const VkDebugUtilsO
def build_extension_processing_func(self):
# Construct helper functions to build and free pNext extension chains
pnext_proc = ''
- pnext_proc += 'void WrapPnextChainHandles(ValidationObject *layer_data, const void *pNext) {\n'
+ pnext_proc += 'void *CreateUnwrappedExtensionStructs(ValidationObject *layer_data, const void *pNext) {\n'
pnext_proc += ' void *cur_pnext = const_cast<void *>(pNext);\n'
+ pnext_proc += ' void *head_pnext = NULL;\n'
+ pnext_proc += ' void *prev_ext_struct = NULL;\n'
+ pnext_proc += ' void *cur_ext_struct = NULL;\n\n'
pnext_proc += ' while (cur_pnext != NULL) {\n'
pnext_proc += ' VkBaseOutStructure *header = reinterpret_cast<VkBaseOutStructure *>(cur_pnext);\n\n'
pnext_proc += ' switch (header->sType) {\n'
for item in self.pnext_extension_structs:
struct_info = self.struct_member_dict[item]
- indent = ' '
- (tmp_decl, tmp_pre, tmp_post) = self.uniquify_members(struct_info, indent, 'safe_struct->', 0, False, False, False, False)
- # Only process extension structs containing handles
- if not tmp_pre:
- continue
if struct_info[0].feature_protect is not None:
pnext_proc += '#ifdef %s \n' % struct_info[0].feature_protect
pnext_proc += ' case %s: {\n' % self.structTypes[item].value
- pnext_proc += ' safe_%s *safe_struct = reinterpret_cast<safe_%s *>(cur_pnext);\n' % (item, item)
+ pnext_proc += ' safe_%s *safe_struct = new safe_%s;\n' % (item, item)
+ pnext_proc += ' safe_struct->initialize(reinterpret_cast<const %s *>(cur_pnext));\n' % item
# Generate code to unwrap the handles
+ indent = ' '
+ (tmp_decl, tmp_pre, tmp_post) = self.uniquify_members(struct_info, indent, 'safe_struct->', 0, False, False, False, False)
pnext_proc += tmp_pre
+ pnext_proc += ' cur_ext_struct = reinterpret_cast<void *>(safe_struct);\n'
pnext_proc += ' } break;\n'
if struct_info[0].feature_protect is not None:
pnext_proc += '#endif // %s \n' % struct_info[0].feature_protect
@@ -1374,9 +1369,39 @@ VkResult DispatchSetDebugUtilsObjectNameEXT(VkDevice device, const VkDebugUtilsO
pnext_proc += ' default:\n'
pnext_proc += ' break;\n'
pnext_proc += ' }\n\n'
+ pnext_proc += ' // Save pointer to the first structure in the pNext chain\n'
+ pnext_proc += ' head_pnext = (head_pnext ? head_pnext : cur_ext_struct);\n\n'
+ pnext_proc += ' // For any extension structure but the first, link the last struct\'s pNext to the current ext struct\n'
+ pnext_proc += ' if (prev_ext_struct) {\n'
+ pnext_proc += ' reinterpret_cast<VkBaseOutStructure *>(prev_ext_struct)->pNext = reinterpret_cast<VkBaseOutStructure *>(cur_ext_struct);\n'
+ pnext_proc += ' }\n'
+ pnext_proc += ' prev_ext_struct = cur_ext_struct;\n\n'
pnext_proc += ' // Process the next structure in the chain\n'
pnext_proc += ' cur_pnext = header->pNext;\n'
pnext_proc += ' }\n'
+ pnext_proc += ' return head_pnext;\n'
+ pnext_proc += '}\n\n'
+ pnext_proc += '// Free a pNext extension chain\n'
+ pnext_proc += 'void FreeUnwrappedExtensionStructs(void *head) {\n'
+ pnext_proc += ' VkBaseOutStructure *curr_ptr = reinterpret_cast<VkBaseOutStructure *>(head);\n'
+ pnext_proc += ' while (curr_ptr) {\n'
+ pnext_proc += ' VkBaseOutStructure *header = curr_ptr;\n'
+ pnext_proc += ' curr_ptr = reinterpret_cast<VkBaseOutStructure *>(header->pNext);\n\n'
+ pnext_proc += ' switch (header->sType) {\n';
+ for item in self.pnext_extension_structs:
+ struct_info = self.struct_member_dict[item]
+ if struct_info[0].feature_protect is not None:
+ pnext_proc += '#ifdef %s \n' % struct_info[0].feature_protect
+ pnext_proc += ' case %s:\n' % self.structTypes[item].value
+ pnext_proc += ' delete reinterpret_cast<safe_%s *>(header);\n' % item
+ pnext_proc += ' break;\n'
+ if struct_info[0].feature_protect is not None:
+ pnext_proc += '#endif // %s \n' % struct_info[0].feature_protect
+ pnext_proc += '\n'
+ pnext_proc += ' default:\n'
+ pnext_proc += ' assert(0);\n'
+ pnext_proc += ' }\n'
+ pnext_proc += ' }\n'
pnext_proc += '}\n'
return pnext_proc
@@ -1385,7 +1410,7 @@ VkResult DispatchSetDebugUtilsObjectNameEXT(VkDevice device, const VkDebugUtilsO
def generate_create_ndo_code(self, indent, proto, params, cmd_info):
create_ndo_code = ''
handle_type = params[-1].find('type')
- if self.handle_types.IsNonDispatchable(handle_type.text):
+ if self.isHandleTypeNonDispatchable(handle_type.text):
# Check for special case where multiple handles are returned
ndo_array = False
if cmd_info[-1].len is not None:
@@ -1393,6 +1418,7 @@ VkResult DispatchSetDebugUtilsObjectNameEXT(VkDevice device, const VkDebugUtilsO
handle_name = params[-1].find('name')
create_ndo_code += '%sif (VK_SUCCESS == result) {\n' % (indent)
indent = self.incIndent(indent)
+ create_ndo_code += '%sstd::lock_guard<std::mutex> lock(dispatch_lock);\n' % (indent)
ndo_dest = '*%s' % handle_name.text
if ndo_array == True:
create_ndo_code += '%sfor (uint32_t index0 = 0; index0 < %s; index0++) {\n' % (indent, cmd_info[-1].len)
@@ -1417,11 +1443,12 @@ VkResult DispatchSetDebugUtilsObjectNameEXT(VkDevice device, const VkDebugUtilsO
param = -1
else:
param = -2
- if self.handle_types.IsNonDispatchable(cmd_info[param].type):
+ if self.isHandleTypeNonDispatchable(cmd_info[param].type) == True:
if ndo_array == True:
# This API is freeing an array of handles. Remove them from the unique_id map.
destroy_ndo_code += '%sif ((VK_SUCCESS == result) && (%s)) {\n' % (indent, cmd_info[param].name)
indent = self.incIndent(indent)
+ destroy_ndo_code += '%sstd::unique_lock<std::mutex> lock(dispatch_lock);\n' % (indent)
destroy_ndo_code += '%sfor (uint32_t index0 = 0; index0 < %s; index0++) {\n' % (indent, cmd_info[param].len)
indent = self.incIndent(indent)
destroy_ndo_code += '%s%s handle = %s[index0];\n' % (indent, cmd_info[param].type, cmd_info[param].name)
@@ -1433,27 +1460,26 @@ VkResult DispatchSetDebugUtilsObjectNameEXT(VkDevice device, const VkDebugUtilsO
destroy_ndo_code += '%s}\n' % indent
else:
# Remove a single handle from the map
+ destroy_ndo_code += '%sstd::unique_lock<std::mutex> lock(dispatch_lock);\n' % (indent)
destroy_ndo_code += '%suint64_t %s_id = reinterpret_cast<uint64_t &>(%s);\n' % (indent, cmd_info[param].name, cmd_info[param].name)
- destroy_ndo_code += '%sauto iter = unique_id_mapping.pop(%s_id);\n' % (indent, cmd_info[param].name)
- destroy_ndo_code += '%sif (iter != unique_id_mapping.end()) {\n' % (indent)
- indent = self.incIndent(indent)
- destroy_ndo_code += '%s%s = (%s)iter->second;\n' % (indent, cmd_info[param].name, cmd_info[param].type)
- indent = self.decIndent(indent);
- destroy_ndo_code += '%s} else {\n' % (indent)
- indent = self.incIndent(indent)
- destroy_ndo_code += '%s%s = (%s)0;\n' % (indent, cmd_info[param].name, cmd_info[param].type)
- indent = self.decIndent(indent);
- destroy_ndo_code += '%s}\n' % (indent)
-
+ destroy_ndo_code += '%s%s = (%s)unique_id_mapping[%s_id];\n' % (indent, cmd_info[param].name, cmd_info[param].type, cmd_info[param].name)
+ destroy_ndo_code += '%sunique_id_mapping.erase(%s_id);\n' % (indent, cmd_info[param].name)
+ destroy_ndo_code += '%slock.unlock();\n' % (indent)
return ndo_array, destroy_ndo_code
#
# Clean up local declarations
- def cleanUpLocalDeclarations(self, indent, prefix, name, len, index):
+ def cleanUpLocalDeclarations(self, indent, prefix, name, len, index, process_pnext):
cleanup = '%sif (local_%s%s) {\n' % (indent, prefix, name)
if len is not None:
+ if process_pnext:
+ cleanup += '%s for (uint32_t %s = 0; %s < %s%s; ++%s) {\n' % (indent, index, index, prefix, len, index)
+ cleanup += '%s FreeUnwrappedExtensionStructs(const_cast<void *>(local_%s%s[%s].pNext));\n' % (indent, prefix, name, index)
+ cleanup += '%s }\n' % indent
cleanup += '%s delete[] local_%s%s;\n' % (indent, prefix, name)
else:
+ if process_pnext:
+ cleanup += '%s FreeUnwrappedExtensionStructs(const_cast<void *>(local_%s%s->pNext));\n' % (indent, prefix, name)
cleanup += '%s delete local_%s%s;\n' % (indent, prefix, name)
cleanup += "%s}\n" % (indent)
return cleanup
@@ -1514,7 +1540,7 @@ VkResult DispatchSetDebugUtilsObjectNameEXT(VkDevice device, const VkDebugUtilsO
for member in members:
process_pnext = self.StructWithExtensions(member.type)
# Handle NDOs
- if self.handle_types.IsNonDispatchable(member.type):
+ if self.isHandleTypeNonDispatchable(member.type) == True:
count_name = member.len
if (count_name is not None):
if first_level_param == False:
@@ -1550,7 +1576,7 @@ VkResult DispatchSetDebugUtilsObjectNameEXT(VkDevice device, const VkDebugUtilsO
if first_level_param == True:
pre_code += '%s %s[%s].initialize(&%s[%s]);\n' % (indent, new_prefix, index, member.name, index)
if process_pnext:
- pre_code += '%s WrapPnextChainHandles(layer_data, %s[%s].pNext);\n' % (indent, new_prefix, index)
+ pre_code += '%s %s[%s].pNext = CreateUnwrappedExtensionStructs(layer_data, %s[%s].pNext);\n' % (indent, new_prefix, index, new_prefix, index)
local_prefix = '%s[%s].' % (new_prefix, index)
# Process sub-structs in this struct
(tmp_decl, tmp_pre, tmp_post) = self.uniquify_members(struct_info, indent, local_prefix, array_index, create_func, destroy_func, destroy_array, False)
@@ -1562,7 +1588,7 @@ VkResult DispatchSetDebugUtilsObjectNameEXT(VkDevice device, const VkDebugUtilsO
indent = self.decIndent(indent)
pre_code += '%s }\n' % indent
if first_level_param == True:
- post_code += self.cleanUpLocalDeclarations(indent, prefix, member.name, member.len, index)
+ post_code += self.cleanUpLocalDeclarations(indent, prefix, member.name, member.len, index, process_pnext)
# Single Struct
elif ispointer:
# Update struct prefix
@@ -1582,11 +1608,11 @@ VkResult DispatchSetDebugUtilsObjectNameEXT(VkDevice device, const VkDebugUtilsO
pre_code += tmp_pre
post_code += tmp_post
if process_pnext:
- pre_code += '%s WrapPnextChainHandles(layer_data, local_%s%s->pNext);\n' % (indent, prefix, member.name)
+ pre_code += '%s local_%s%s->pNext = CreateUnwrappedExtensionStructs(layer_data, local_%s%s->pNext);\n' % (indent, prefix, member.name, prefix, member.name)
indent = self.decIndent(indent)
pre_code += '%s }\n' % indent
if first_level_param == True:
- post_code += self.cleanUpLocalDeclarations(indent, prefix, member.name, member.len, index)
+ post_code += self.cleanUpLocalDeclarations(indent, prefix, member.name, member.len, index, process_pnext)
else:
# Update struct prefix
if first_level_param == True:
@@ -1599,7 +1625,7 @@ VkResult DispatchSetDebugUtilsObjectNameEXT(VkDevice device, const VkDebugUtilsO
pre_code += tmp_pre
post_code += tmp_post
if process_pnext:
- pre_code += '%s WrapPnextChainHandles(layer_data, local_%s%s.pNext);\n' % (indent, prefix, member.name)
+ pre_code += '%s local_%s%s.pNext = CreateUnwrappedExtensionStructs(layer_data, local_%s%s.pNext);\n' % (indent, prefix, member.name, prefix, member.name)
return decls, pre_code, post_code
#
# For a particular API, generate the non-dispatchable-object wrapping/unwrapping code
@@ -1636,7 +1662,7 @@ VkResult DispatchSetDebugUtilsObjectNameEXT(VkDevice device, const VkDebugUtilsO
param_pre_code += destroy_ndo_code
if param_pre_code:
if (not destroy_func) or (destroy_array):
- param_pre_code = '%s{\n%s%s}\n' % (' ', param_pre_code, indent)
+ param_pre_code = '%s{\n%s%s%s%s}\n' % (' ', indent, self.lock_guard(indent), param_pre_code, indent)
return paramdecl, param_pre_code, param_post_code
#
# Capture command parameter info needed to wrap NDOs as well as handling some boilerplate code
@@ -1667,7 +1693,7 @@ VkResult DispatchSetDebugUtilsObjectNameEXT(VkDevice device, const VkDebugUtilsO
ispointer = self.paramIsPointer(member)
# Mark param as local if it is an array of NDOs
islocal = False;
- if self.handle_types.IsNonDispatchable(type):
+ if self.isHandleTypeNonDispatchable(type) == True:
if (len is not None) and (isconst == True):
islocal = True
# Or if it's a struct that contains an NDO
@@ -1699,6 +1725,7 @@ VkResult DispatchSetDebugUtilsObjectNameEXT(VkDevice device, const VkDebugUtilsO
func_sig = decls[0][:-1]
func_sig = func_sig.replace("VKAPI_ATTR ", "")
func_sig = func_sig.replace("VKAPI_CALL ", "Dispatch")
+ func_sig = func_sig.replace("(", "(ValidationObject *layer_data, ")
func_sig += ';'
dispatch_prototype = ''
if ifdef_text is not None:
@@ -1742,6 +1769,7 @@ VkResult DispatchSetDebugUtilsObjectNameEXT(VkDevice device, const VkDebugUtilsO
func_sig = decls[0][:-1]
func_sig = func_sig.replace("VKAPI_ATTR ", "")
func_sig = func_sig.replace("VKAPI_CALL ", "Dispatch")
+ func_sig = func_sig.replace("(", "(ValidationObject *layer_data, ")
self.appendSection('source_file', '')
self.appendSection('source_file', func_sig)
self.appendSection('source_file', '{')
@@ -1769,8 +1797,7 @@ VkResult DispatchSetDebugUtilsObjectNameEXT(VkDevice device, const VkDebugUtilsO
dispatch_table_type = "instance_dispatch_table"
api_func = cmdinfo.elem.attrib.get('name').replace('vk','layer_data->%s.',1) % dispatch_table_type
- # Call to get the layer_data pointer
- self.appendSection('source_file', ' auto layer_data = GetLayerDataPtr(get_dispatch_key(%s), layer_data_map);' % dispatchable_name)
+
# Put all this together for the final down-chain call
if not down_chain_call_only:
unwrapped_dispatch_call = api_func + '(' + paramstext + ')'
diff --git a/scripts/layer_chassis_generator.py b/scripts/layer_chassis_generator.py
index c34e5ad81..87c1a1f50 100644
--- a/scripts/layer_chassis_generator.py
+++ b/scripts/layer_chassis_generator.py
@@ -62,7 +62,6 @@ from common_codegen import *
# separate line, align parameter names at the specified column
class LayerChassisGeneratorOptions(GeneratorOptions):
def __init__(self,
- conventions = None,
filename = None,
directory = '.',
apiname = None,
@@ -86,7 +85,7 @@ class LayerChassisGeneratorOptions(GeneratorOptions):
alignFuncParam = 0,
helper_file_type = '',
expandEnumerants = True):
- GeneratorOptions.__init__(self, conventions, filename, directory, apiname, profile,
+ GeneratorOptions.__init__(self, filename, directory, apiname, profile,
versions, emitversions, defaultExtensions,
addExtensions, removeExtensions, emitExtensions, sortProcedure)
self.prefixText = prefixText
@@ -128,6 +127,7 @@ class LayerChassisOutputGenerator(OutputGenerator):
# Include functions here to be interecpted w/ manually implemented function bodies
'vkGetDeviceProcAddr',
'vkGetInstanceProcAddr',
+ 'vkGetPhysicalDeviceProcAddr',
'vkCreateDevice',
'vkDestroyDevice',
'vkCreateInstance',
@@ -148,8 +148,6 @@ class LayerChassisOutputGenerator(OutputGenerator):
'vkDestroyValidationCacheEXT',
'vkMergeValidationCachesEXT',
'vkGetValidationCacheDataEXT',
- # We don't wanna hook this function
- 'vkGetPhysicalDeviceProcAddr',
]
alt_ret_codes = [
@@ -182,10 +180,14 @@ class LayerChassisOutputGenerator(OutputGenerator):
'vkSetDebugUtilsObjectNameEXT' : 'layer_data->report_data->DebugReportSetUtilsObjectName(pNameInfo);',
'vkQueueBeginDebugUtilsLabelEXT' : 'BeginQueueDebugUtilsLabel(layer_data->report_data, queue, pLabelInfo);',
'vkQueueInsertDebugUtilsLabelEXT' : 'InsertQueueDebugUtilsLabel(layer_data->report_data, queue, pLabelInfo);',
+ 'vkCmdBeginDebugUtilsLabelEXT' : 'BeginCmdDebugUtilsLabel(layer_data->report_data, commandBuffer, pLabelInfo);',
+ 'vkCmdInsertDebugUtilsLabelEXT' : 'InsertCmdDebugUtilsLabel(layer_data->report_data, commandBuffer, pLabelInfo);'
}
post_dispatch_debug_utils_functions = {
'vkQueueEndDebugUtilsLabelEXT' : 'EndQueueDebugUtilsLabel(layer_data->report_data, queue);',
+ 'vkCmdEndDebugUtilsLabelEXT' : 'EndCmdDebugUtilsLabel(layer_data->report_data, commandBuffer);',
+ 'vkCmdInsertDebugUtilsLabelEXT' : 'InsertCmdDebugUtilsLabel(layer_data->report_data, commandBuffer, pLabelInfo);',
'vkCreateDebugReportCallbackEXT' : 'layer_create_report_callback(layer_data->report_data, false, pCreateInfo, pAllocator, pCallback);',
'vkDestroyDebugReportCallbackEXT' : 'layer_destroy_report_callback(layer_data->report_data, callback, pAllocator);',
'vkCreateDebugUtilsMessengerEXT' : 'layer_create_messenger_callback(layer_data->report_data, false, pCreateInfo, pAllocator, pMessenger);',
@@ -198,7 +200,6 @@ class LayerChassisOutputGenerator(OutputGenerator):
inline_custom_header_preamble = """
#define NOMINMAX
-#include <atomic>
#include <mutex>
#include <cinttypes>
#include <stdio.h>
@@ -221,13 +222,14 @@ class LayerChassisOutputGenerator(OutputGenerator):
#include "vk_layer_utils.h"
#include "vulkan/vk_layer.h"
#include "vk_dispatch_table_helper.h"
+#include "vk_validation_error_messages.h"
#include "vk_extension_helper.h"
#include "vk_safe_struct.h"
#include "vk_typemap_helper.h"
-extern std::atomic<uint64_t> global_unique_id;
-extern vl_concurrent_unordered_map<uint64_t, uint64_t, 4> unique_id_mapping;
+extern uint64_t global_unique_id;
+extern std::unordered_map<uint64_t, uint64_t> unique_id_mapping;
"""
inline_custom_header_class_definition = """
@@ -240,7 +242,6 @@ enum LayerObjectTypeId {
LayerObjectTypeParameterValidation, // Instance or device parameter validation layer object
LayerObjectTypeObjectTracker, // Instance or device object tracker layer object
LayerObjectTypeCoreValidation, // Instance or device core validation layer object
- LayerObjectTypeBestPractices, // Instance or device best practices layer object
};
struct TEMPLATE_STATE {
@@ -257,37 +258,42 @@ public:
std::vector<VkQueueFamilyProperties> queue_family_properties;
};
-typedef enum ValidationCheckDisables {
- VALIDATION_CHECK_DISABLE_COMMAND_BUFFER_STATE,
- VALIDATION_CHECK_DISABLE_OBJECT_IN_USE,
- VALIDATION_CHECK_DISABLE_IDLE_DESCRIPTOR_SET,
- VALIDATION_CHECK_DISABLE_PUSH_CONSTANT_RANGE,
- VALIDATION_CHECK_DISABLE_QUERY_VALIDATION,
- VALIDATION_CHECK_DISABLE_IMAGE_LAYOUT_VALIDATION,
-} ValidationCheckDisables;
-
-typedef enum VkValidationFeatureEnable {
- VK_VALIDATION_FEATURE_ENABLE_BEST_PRACTICES,
-} VkValidationFeatureEnable;
-
-
// CHECK_DISABLED struct is a container for bools that can block validation checks from being performed.
-// These bools are all "false" by default meaning that all checks are enabled. Enum values can be specified
-// via the vk_layer_setting.txt config file or at CreateInstance time via the VK_EXT_validation_features extension
-// that can selectively disable checks.
+// The end goal is to have all checks guarded by a bool. The bools are all "false" by default meaning that all checks
+// are enabled. At CreateInstance time, the user can use the VK_EXT_validation_flags extension to pass in enum values
+// of VkValidationCheckEXT that will selectively disable checks.
+// The VK_EXT_validation_features extension can also be used with the VkValidationFeaturesEXT structure to set
+// disables in the CHECK_DISABLED struct and/or enables in the CHECK_ENABLED struct.
struct CHECK_DISABLED {
- bool command_buffer_state; // Skip command buffer state validation
- bool object_in_use; // Skip all object in_use checking
- bool idle_descriptor_set; // Skip check to verify that descriptor set is not in-use
- bool push_constant_range; // Skip push constant range checks
- bool query_validation; // Disable all core validation query-related checks
- bool image_layout_validation; // Disable image layout validation
- bool object_tracking; // Disable object lifetime validation
- bool core_checks; // Disable core validation checks
- bool thread_safety; // Disable thread safety validation
- bool stateless_checks; // Disable stateless validation checks
- bool handle_wrapping; // Disable unique handles/handle wrapping
- bool shader_validation; // Skip validation for shaders
+ bool command_buffer_state;
+ bool create_descriptor_set_layout;
+ bool destroy_buffer_view; // Skip validation at DestroyBufferView time
+ bool destroy_image_view; // Skip validation at DestroyImageView time
+ bool destroy_pipeline; // Skip validation at DestroyPipeline time
+ bool destroy_descriptor_pool; // Skip validation at DestroyDescriptorPool time
+ bool destroy_framebuffer; // Skip validation at DestroyFramebuffer time
+ bool destroy_renderpass; // Skip validation at DestroyRenderpass time
+ bool destroy_image; // Skip validation at DestroyImage time
+ bool destroy_sampler; // Skip validation at DestroySampler time
+ bool destroy_command_pool; // Skip validation at DestroyCommandPool time
+ bool destroy_event; // Skip validation at DestroyEvent time
+ bool free_memory; // Skip validation at FreeMemory time
+ bool object_in_use; // Skip all object in_use checking
+ bool idle_descriptor_set; // Skip check to verify that descriptor set is no in-use
+ bool push_constant_range; // Skip push constant range checks
+ bool free_descriptor_sets; // Skip validation prior to vkFreeDescriptorSets()
+ bool allocate_descriptor_sets; // Skip validation prior to vkAllocateDescriptorSets()
+ bool update_descriptor_sets; // Skip validation prior to vkUpdateDescriptorSets()
+ bool wait_for_fences;
+ bool get_fence_state;
+ bool queue_wait_idle;
+ bool device_wait_idle;
+ bool destroy_fence;
+ bool destroy_semaphore;
+ bool destroy_query_pool;
+ bool get_query_pool_results;
+ bool destroy_buffer;
+ bool shader_validation; // Skip validation for shaders
void SetAll(bool value) { std::fill(&command_buffer_state, &shader_validation + 1, value); }
};
@@ -295,7 +301,6 @@ struct CHECK_DISABLED {
struct CHECK_ENABLED {
bool gpu_validation;
bool gpu_validation_reserve_binding_slot;
- bool best_practices;
void SetAll(bool value) { std::fill(&gpu_validation, &gpu_validation_reserve_binding_slot + 1, value); }
};
@@ -347,9 +352,8 @@ class ValidationObject {
// Handle Wrapping Data
// Reverse map display handles
- vl_concurrent_unordered_map<VkDisplayKHR, uint64_t, 0> display_id_reverse_mapping;
- // Wrapping Descriptor Template Update structures requires access to the template createinfo structs
- std::unordered_map<uint64_t, std::unique_ptr<TEMPLATE_STATE>> desc_template_createinfo_map;
+ std::unordered_map<VkDisplayKHR, uint64_t> display_id_reverse_mapping;
+ std::unordered_map<uint64_t, std::unique_ptr<TEMPLATE_STATE>> desc_template_map;
struct SubpassesUsageStates {
std::unordered_set<uint32_t> subpasses_using_color_attachment;
std::unordered_set<uint32_t> subpasses_using_depthstencil_attachment;
@@ -363,33 +367,31 @@ class ValidationObject {
std::unordered_map<VkDescriptorPool, std::unordered_set<VkDescriptorSet>> pool_descriptor_sets_map;
- // Unwrap a handle.
+ // Unwrap a handle. Must hold lock.
template <typename HandleType>
HandleType Unwrap(HandleType wrappedHandle) {
- auto iter = unique_id_mapping.find(reinterpret_cast<uint64_t const &>(wrappedHandle));
- if (iter == unique_id_mapping.end())
- return (HandleType)0;
- return (HandleType)iter->second;
+ // TODO: don't use operator[] here.
+ return (HandleType)unique_id_mapping[reinterpret_cast<uint64_t const &>(wrappedHandle)];
}
- // Wrap a newly created handle with a new unique ID, and return the new ID.
+ // Wrap a newly created handle with a new unique ID, and return the new ID -- must hold lock.
template <typename HandleType>
HandleType WrapNew(HandleType newlyCreatedHandle) {
auto unique_id = global_unique_id++;
- unique_id_mapping.insert_or_assign(unique_id, reinterpret_cast<uint64_t const &>(newlyCreatedHandle));
+ unique_id_mapping[unique_id] = reinterpret_cast<uint64_t const &>(newlyCreatedHandle);
return (HandleType)unique_id;
}
- // Specialized handling for VkDisplayKHR. Adds an entry to enable reverse-lookup.
+ // Specialized handling for VkDisplayKHR. Adds an entry to enable reverse-lookup. Must hold lock.
VkDisplayKHR WrapDisplay(VkDisplayKHR newlyCreatedHandle, ValidationObject *map_data) {
auto unique_id = global_unique_id++;
- unique_id_mapping.insert_or_assign(unique_id, reinterpret_cast<uint64_t const &>(newlyCreatedHandle));
- map_data->display_id_reverse_mapping.insert_or_assign(newlyCreatedHandle, unique_id);
+ unique_id_mapping[unique_id] = reinterpret_cast<uint64_t const &>(newlyCreatedHandle);
+ map_data->display_id_reverse_mapping[newlyCreatedHandle] = unique_id;
return (VkDisplayKHR)unique_id;
}
// VkDisplayKHR objects don't have a single point of creation, so we need to see if one already exists in the map before
- // creating another.
+ // creating another. Must hold lock.
VkDisplayKHR MaybeWrapDisplay(VkDisplayKHR handle, ValidationObject *map_data) {
// See if this display is already known
auto it = map_data->display_id_reverse_mapping.find(handle);
@@ -437,56 +439,34 @@ class ValidationObject {
std::unordered_map<void*, ValidationObject*> layer_data_map;
-// Global unique object identifier.
-std::atomic<uint64_t> global_unique_id(1ULL);
-// Map uniqueID to actual object handle. Accesses to the map itself are
-// internally synchronized.
-vl_concurrent_unordered_map<uint64_t, uint64_t, 4> unique_id_mapping;
+// Global unique object identifier. All increments must be guarded by a lock.
+uint64_t global_unique_id = 1;
+// Map uniqueID to actual object handle
+std::unordered_map<uint64_t, uint64_t> unique_id_mapping;
// TODO: This variable controls handle wrapping -- in the future it should be hooked
// up to the new VALIDATION_FEATURES extension. Temporarily, control with a compile-time flag.
#if defined(LAYER_CHASSIS_CAN_WRAP_HANDLES)
bool wrap_handles = true;
#else
-bool wrap_handles = false;
+const bool wrap_handles = false;
#endif
-// Set layer name -- Khronos layer name overrides any other defined names
-#if BUILD_KHRONOS_VALIDATION
-#define OBJECT_LAYER_NAME "VK_LAYER_KHRONOS_validation"
-#define OBJECT_LAYER_DESCRIPTION "khronos_validation"
-#elif BUILD_OBJECT_TRACKER
+// Include child object (layer) definitions
+#if BUILD_OBJECT_TRACKER
+#include "object_lifetime_validation.h"
#define OBJECT_LAYER_NAME "VK_LAYER_LUNARG_object_tracker"
-#define OBJECT_LAYER_DESCRIPTION "lunarg_object_tracker"
#elif BUILD_THREAD_SAFETY
+#include "thread_safety.h"
#define OBJECT_LAYER_NAME "VK_LAYER_GOOGLE_threading"
-#define OBJECT_LAYER_DESCRIPTION "google_thread_checker"
#elif BUILD_PARAMETER_VALIDATION
+#include "stateless_validation.h"
#define OBJECT_LAYER_NAME "VK_LAYER_LUNARG_parameter_validation"
-#define OBJECT_LAYER_DESCRIPTION "lunarg_parameter_validation"
#elif BUILD_CORE_VALIDATION
+#include "core_validation.h"
#define OBJECT_LAYER_NAME "VK_LAYER_LUNARG_core_validation"
-#define OBJECT_LAYER_DESCRIPTION "lunarg_core_validation"
#else
#define OBJECT_LAYER_NAME "VK_LAYER_GOOGLE_unique_objects"
-#define OBJECT_LAYER_DESCRIPTION "lunarg_unique_objects"
-#endif
-
-// Include layer validation object definitions
-#if BUILD_OBJECT_TRACKER
-#include "object_lifetime_validation.h"
-#endif
-#if BUILD_THREAD_SAFETY
-#include "thread_safety.h"
-#endif
-#if BUILD_PARAMETER_VALIDATION
-#include "stateless_validation.h"
-#endif
-#if BUILD_CORE_VALIDATION
-#include "core_validation.h"
-#endif
-#if BUILD_BEST_PRACTICES
-#include "best_practices.h"
#endif
namespace vulkan_layer_chassis {
@@ -497,19 +477,10 @@ static const VkLayerProperties global_layer = {
OBJECT_LAYER_NAME, VK_LAYER_API_VERSION, 1, "LunarG validation Layer",
};
-static const VkExtensionProperties instance_extensions[] = {{VK_EXT_DEBUG_REPORT_EXTENSION_NAME, VK_EXT_DEBUG_REPORT_SPEC_VERSION},
- {VK_EXT_DEBUG_UTILS_EXTENSION_NAME, VK_EXT_DEBUG_UTILS_SPEC_VERSION}};
-static const VkExtensionProperties device_extensions[] = {
- {VK_EXT_VALIDATION_CACHE_EXTENSION_NAME, VK_EXT_VALIDATION_CACHE_SPEC_VERSION},
- {VK_EXT_DEBUG_MARKER_EXTENSION_NAME, VK_EXT_DEBUG_MARKER_SPEC_VERSION},
-};
+static const VkExtensionProperties instance_extensions[] = {{VK_EXT_DEBUG_REPORT_EXTENSION_NAME, VK_EXT_DEBUG_REPORT_SPEC_VERSION}};
-typedef struct {
- bool is_instance_api;
- void* funcptr;
-} function_data;
+extern const std::unordered_map<std::string, void*> name_to_funcptr_map;
-extern const std::unordered_map<std::string, function_data> name_to_funcptr_map;
// Manually written functions
@@ -518,7 +489,7 @@ static void InstanceExtensionWhitelist(ValidationObject *layer_data, const VkIns
for (uint32_t i = 0; i < pCreateInfo->enabledExtensionCount; i++) {
// Check for recognized instance extensions
if (!white_list(pCreateInfo->ppEnabledExtensionNames[i], kInstanceExtensionNames)) {
- log_msg(layer_data->report_data, VK_DEBUG_REPORT_WARNING_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
+ log_msg(layer_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
kVUIDUndefined,
"Instance Extension %s is not supported by this layer. Using this extension may adversely affect validation "
"results and/or produce undefined behavior.",
@@ -532,7 +503,7 @@ static void DeviceExtensionWhitelist(ValidationObject *layer_data, const VkDevic
for (uint32_t i = 0; i < pCreateInfo->enabledExtensionCount; i++) {
// Check for recognized device extensions
if (!white_list(pCreateInfo->ppEnabledExtensionNames[i], kDeviceExtensionNames)) {
- log_msg(layer_data->report_data, VK_DEBUG_REPORT_WARNING_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
+ log_msg(layer_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
kVUIDUndefined,
"Device Extension %s is not supported by this layer. Using this extension may adversely affect validation "
"results and/or produce undefined behavior.",
@@ -541,236 +512,59 @@ static void DeviceExtensionWhitelist(ValidationObject *layer_data, const VkDevic
}
}
-
-// Process validation features, flags and settings specified through extensions, a layer settings file, or environment variables
-
-static const std::unordered_map<std::string, VkValidationFeatureDisableEXT> VkValFeatureDisableLookup = {
- {"VK_VALIDATION_FEATURE_DISABLE_SHADERS_EXT", VK_VALIDATION_FEATURE_DISABLE_SHADERS_EXT},
- {"VK_VALIDATION_FEATURE_DISABLE_THREAD_SAFETY_EXT", VK_VALIDATION_FEATURE_DISABLE_THREAD_SAFETY_EXT},
- {"VK_VALIDATION_FEATURE_DISABLE_API_PARAMETERS_EXT", VK_VALIDATION_FEATURE_DISABLE_API_PARAMETERS_EXT},
- {"VK_VALIDATION_FEATURE_DISABLE_OBJECT_LIFETIMES_EXT", VK_VALIDATION_FEATURE_DISABLE_OBJECT_LIFETIMES_EXT},
- {"VK_VALIDATION_FEATURE_DISABLE_CORE_CHECKS_EXT", VK_VALIDATION_FEATURE_DISABLE_CORE_CHECKS_EXT},
- {"VK_VALIDATION_FEATURE_DISABLE_UNIQUE_HANDLES_EXT", VK_VALIDATION_FEATURE_DISABLE_UNIQUE_HANDLES_EXT},
- {"VK_VALIDATION_FEATURE_DISABLE_ALL_EXT", VK_VALIDATION_FEATURE_DISABLE_ALL_EXT},
-};
-
-static const std::unordered_map<std::string, VkValidationFeatureEnableEXT> VkValFeatureEnableLookup = {
- {"VK_VALIDATION_FEATURE_ENABLE_GPU_ASSISTED_EXT", VK_VALIDATION_FEATURE_ENABLE_GPU_ASSISTED_EXT},
- {"VK_VALIDATION_FEATURE_ENABLE_GPU_ASSISTED_RESERVE_BINDING_SLOT_EXT", VK_VALIDATION_FEATURE_ENABLE_GPU_ASSISTED_RESERVE_BINDING_SLOT_EXT},
-};
-
-static const std::unordered_map<std::string, VkValidationFeatureEnable> VkValFeatureEnableLookup2 = {
- {"VK_VALIDATION_FEATURE_ENABLE_BEST_PRACTICES", VK_VALIDATION_FEATURE_ENABLE_BEST_PRACTICES},
-};
-
-static const std::unordered_map<std::string, ValidationCheckDisables> ValidationDisableLookup = {
- {"VALIDATION_CHECK_DISABLE_COMMAND_BUFFER_STATE", VALIDATION_CHECK_DISABLE_COMMAND_BUFFER_STATE},
- {"VALIDATION_CHECK_DISABLE_OBJECT_IN_USE", VALIDATION_CHECK_DISABLE_OBJECT_IN_USE},
- {"VALIDATION_CHECK_DISABLE_IDLE_DESCRIPTOR_SET", VALIDATION_CHECK_DISABLE_IDLE_DESCRIPTOR_SET},
- {"VALIDATION_CHECK_DISABLE_PUSH_CONSTANT_RANGE", VALIDATION_CHECK_DISABLE_PUSH_CONSTANT_RANGE},
- {"VALIDATION_CHECK_DISABLE_QUERY_VALIDATION", VALIDATION_CHECK_DISABLE_QUERY_VALIDATION},
- {"VALIDATION_CHECK_DISABLE_IMAGE_LAYOUT_VALIDATION", VALIDATION_CHECK_DISABLE_IMAGE_LAYOUT_VALIDATION},
-};
-
-// Set the local disable flag for the appropriate VALIDATION_CHECK_DISABLE enum
-void SetValidationDisable(CHECK_DISABLED* disable_data, const ValidationCheckDisables disable_id) {
- switch (disable_id) {
- case VALIDATION_CHECK_DISABLE_COMMAND_BUFFER_STATE:
- disable_data->command_buffer_state = true;
- break;
- case VALIDATION_CHECK_DISABLE_OBJECT_IN_USE:
- disable_data->object_in_use = true;
- break;
- case VALIDATION_CHECK_DISABLE_IDLE_DESCRIPTOR_SET:
- disable_data->idle_descriptor_set = true;
- break;
- case VALIDATION_CHECK_DISABLE_PUSH_CONSTANT_RANGE:
- disable_data->push_constant_range = true;
- break;
- case VALIDATION_CHECK_DISABLE_QUERY_VALIDATION:
- disable_data->query_validation = true;
+// For the given ValidationCheck enum, set all relevant instance disabled flags to true
+void SetDisabledFlags(ValidationObject *instance_data, const VkValidationFlagsEXT *val_flags_struct) {
+ for (uint32_t i = 0; i < val_flags_struct->disabledValidationCheckCount; ++i) {
+ switch (val_flags_struct->pDisabledValidationChecks[i]) {
+ case VK_VALIDATION_CHECK_SHADERS_EXT:
+ instance_data->disabled.shader_validation = true;
break;
- case VALIDATION_CHECK_DISABLE_IMAGE_LAYOUT_VALIDATION:
- disable_data->image_layout_validation = true;
+ case VK_VALIDATION_CHECK_ALL_EXT:
+ // Set all disabled flags to true
+ instance_data->disabled.SetAll(true);
break;
default:
- assert(true);
+ break;
+ }
}
}
-// Set the local disable flag for a single VK_VALIDATION_FEATURE_DISABLE_* flag
-void SetValidationFeatureDisable(CHECK_DISABLED* disable_data, const VkValidationFeatureDisableEXT feature_disable) {
- switch (feature_disable) {
+void SetValidationFeatures(ValidationObject *instance_data, const VkValidationFeaturesEXT *val_features_struct) {
+ for (uint32_t i = 0; i < val_features_struct->disabledValidationFeatureCount; ++i) {
+ switch (val_features_struct->pDisabledValidationFeatures[i]) {
case VK_VALIDATION_FEATURE_DISABLE_SHADERS_EXT:
- disable_data->shader_validation = true;
- break;
- case VK_VALIDATION_FEATURE_DISABLE_THREAD_SAFETY_EXT:
- disable_data->thread_safety = true;
- break;
- case VK_VALIDATION_FEATURE_DISABLE_API_PARAMETERS_EXT:
- disable_data->stateless_checks = true;
- break;
- case VK_VALIDATION_FEATURE_DISABLE_OBJECT_LIFETIMES_EXT:
- disable_data->object_tracking = true;
- break;
- case VK_VALIDATION_FEATURE_DISABLE_CORE_CHECKS_EXT:
- disable_data->core_checks = true;
- break;
- case VK_VALIDATION_FEATURE_DISABLE_UNIQUE_HANDLES_EXT:
- disable_data->handle_wrapping = true;
+ instance_data->disabled.shader_validation = true;
break;
case VK_VALIDATION_FEATURE_DISABLE_ALL_EXT:
// Set all disabled flags to true
- disable_data->SetAll(true);
+ instance_data->disabled.SetAll(true);
break;
default:
break;
+ }
}
-}
-
-// Set the local enable flag for a single VK_VALIDATION_FEATURE_ENABLE_* flag
-void SetValidationFeatureEnable(CHECK_ENABLED *enable_data, const VkValidationFeatureEnableEXT feature_enable) {
- switch (feature_enable) {
+ for (uint32_t i = 0; i < val_features_struct->enabledValidationFeatureCount; ++i) {
+ switch (val_features_struct->pEnabledValidationFeatures[i]) {
case VK_VALIDATION_FEATURE_ENABLE_GPU_ASSISTED_EXT:
- enable_data->gpu_validation = true;
+ instance_data->enabled.gpu_validation = true;
break;
case VK_VALIDATION_FEATURE_ENABLE_GPU_ASSISTED_RESERVE_BINDING_SLOT_EXT:
- enable_data->gpu_validation_reserve_binding_slot = true;
- break;
- default:
- break;
- }
-}
-
-void SetValidationFeatureEnable(CHECK_ENABLED *enable_data, const VkValidationFeatureEnable feature_enable) {
- switch(feature_enable) {
- case VK_VALIDATION_FEATURE_ENABLE_BEST_PRACTICES:
- enable_data->best_practices = true;
+ instance_data->enabled.gpu_validation_reserve_binding_slot = true;
break;
default:
break;
- }
-}
-
-// Set the local disable flag for settings specified through the VK_EXT_validation_flags extension
-void SetValidationFlags(CHECK_DISABLED* disables, const VkValidationFlagsEXT* val_flags_struct) {
- for (uint32_t i = 0; i < val_flags_struct->disabledValidationCheckCount; ++i) {
- switch (val_flags_struct->pDisabledValidationChecks[i]) {
- case VK_VALIDATION_CHECK_SHADERS_EXT:
- disables->shader_validation = true;
- break;
- case VK_VALIDATION_CHECK_ALL_EXT:
- // Set all disabled flags to true
- disables->SetAll(true);
- break;
- default:
- break;
- }
- }
-}
-
-// Process Validation Features flags specified through the ValidationFeature extension
-void SetValidationFeatures(CHECK_DISABLED *disable_data, CHECK_ENABLED *enable_data,
- const VkValidationFeaturesEXT *val_features_struct) {
- for (uint32_t i = 0; i < val_features_struct->disabledValidationFeatureCount; ++i) {
- SetValidationFeatureDisable(disable_data, val_features_struct->pDisabledValidationFeatures[i]);
- }
- for (uint32_t i = 0; i < val_features_struct->enabledValidationFeatureCount; ++i) {
- SetValidationFeatureEnable(enable_data, val_features_struct->pEnabledValidationFeatures[i]);
- }
-}
-
-// Given a string representation of a list of enable enum values, call the appropriate setter function
-void SetLocalEnableSetting(std::string list_of_enables, std::string delimiter, CHECK_ENABLED* enables) {
- size_t pos = 0;
- std::string token;
- while (list_of_enables.length() != 0) {
- pos = list_of_enables.find(delimiter);
- if (pos != std::string::npos) {
- token = list_of_enables.substr(0, pos);
- } else {
- pos = list_of_enables.length() - delimiter.length();
- token = list_of_enables;
- }
- if (token.find("VK_VALIDATION_FEATURE_ENABLE_") != std::string::npos) {
- auto result = VkValFeatureEnableLookup.find(token);
- if (result != VkValFeatureEnableLookup.end()) {
- SetValidationFeatureEnable(enables, result->second);
- } else {
- auto result2 = VkValFeatureEnableLookup2.find(token);
- if (result2 != VkValFeatureEnableLookup2.end()) {
- SetValidationFeatureEnable(enables, result2->second);
- }
- }
- }
- list_of_enables.erase(0, pos + delimiter.length());
- }
-}
-
-// Given a string representation of a list of disable enum values, call the appropriate setter function
-void SetLocalDisableSetting(std::string list_of_disables, std::string delimiter, CHECK_DISABLED* disables) {
- size_t pos = 0;
- std::string token;
- while (list_of_disables.length() != 0) {
- pos = list_of_disables.find(delimiter);
- if (pos != std::string::npos) {
- token = list_of_disables.substr(0, pos);
- } else {
- pos = list_of_disables.length() - delimiter.length();
- token = list_of_disables;
- }
- if (token.find("VK_VALIDATION_FEATURE_DISABLE_") != std::string::npos) {
- auto result = VkValFeatureDisableLookup.find(token);
- if (result != VkValFeatureDisableLookup.end()) {
- SetValidationFeatureDisable(disables, result->second);
- }
}
- if (token.find("VALIDATION_CHECK_DISABLE_") != std::string::npos) {
- auto result = ValidationDisableLookup.find(token);
- if (result != ValidationDisableLookup.end()) {
- SetValidationDisable(disables, result->second);
- }
- }
- list_of_disables.erase(0, pos + delimiter.length());
}
}
-// Process enables and disables set though the vk_layer_settings.txt config file or through an environment variable
-void ProcessConfigAndEnvSettings(const char* layer_description, CHECK_ENABLED* enables, CHECK_DISABLED* disables) {
- std::string enable_key = layer_description;
- std::string disable_key = layer_description;
- enable_key.append(".enables");
- disable_key.append(".disables");
- std::string list_of_config_enables = getLayerOption(enable_key.c_str());
- std::string list_of_env_enables = GetLayerEnvVar("VK_LAYER_ENABLES");
- std::string list_of_config_disables = getLayerOption(disable_key.c_str());
- std::string list_of_env_disables = GetLayerEnvVar("VK_LAYER_DISABLES");
-#if defined(_WIN32)
- std::string env_delimiter = ";";
-#else
- std::string env_delimiter = ":";
-#endif
- SetLocalEnableSetting(list_of_config_enables, ",", enables);
- SetLocalEnableSetting(list_of_env_enables, env_delimiter, enables);
- SetLocalDisableSetting(list_of_config_disables, ",", disables);
- SetLocalDisableSetting(list_of_env_disables, env_delimiter, disables);
-}
-
-
-// Non-code-generated chassis API functions
-
VKAPI_ATTR PFN_vkVoidFunction VKAPI_CALL GetDeviceProcAddr(VkDevice device, const char *funcName) {
auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
- if (!ApiParentExtensionEnabled(funcName, &layer_data->device_extensions)) {
+ if (!ApiParentExtensionEnabled(funcName, layer_data->device_extensions.device_extension_set)) {
return nullptr;
}
const auto &item = name_to_funcptr_map.find(funcName);
if (item != name_to_funcptr_map.end()) {
- if (item->second.is_instance_api) {
- return nullptr;
- } else {
- return reinterpret_cast<PFN_vkVoidFunction>(item->second.funcptr);
- }
+ return reinterpret_cast<PFN_vkVoidFunction>(item->second);
}
auto &table = layer_data->device_dispatch_table;
if (!table.GetDeviceProcAddr) return nullptr;
@@ -780,7 +574,7 @@ VKAPI_ATTR PFN_vkVoidFunction VKAPI_CALL GetDeviceProcAddr(VkDevice device, cons
VKAPI_ATTR PFN_vkVoidFunction VKAPI_CALL GetInstanceProcAddr(VkInstance instance, const char *funcName) {
const auto &item = name_to_funcptr_map.find(funcName);
if (item != name_to_funcptr_map.end()) {
- return reinterpret_cast<PFN_vkVoidFunction>(item->second.funcptr);
+ return reinterpret_cast<PFN_vkVoidFunction>(item->second);
}
auto layer_data = GetLayerDataPtr(get_dispatch_key(instance), layer_data_map);
auto &table = layer_data->instance_dispatch_table;
@@ -788,6 +582,13 @@ VKAPI_ATTR PFN_vkVoidFunction VKAPI_CALL GetInstanceProcAddr(VkInstance instance
return table.GetInstanceProcAddr(instance, funcName);
}
+VKAPI_ATTR PFN_vkVoidFunction VKAPI_CALL GetPhysicalDeviceProcAddr(VkInstance instance, const char *funcName) {
+ auto layer_data = GetLayerDataPtr(get_dispatch_key(instance), layer_data_map);
+ auto &table = layer_data->instance_dispatch_table;
+ if (!table.GetPhysicalDeviceProcAddr) return nullptr;
+ return table.GetPhysicalDeviceProcAddr(instance, funcName);
+}
+
VKAPI_ATTR VkResult VKAPI_CALL EnumerateInstanceLayerProperties(uint32_t *pCount, VkLayerProperties *pProperties) {
return util_GetLayerProperties(1, &global_layer, pCount, pProperties);
}
@@ -800,17 +601,17 @@ VKAPI_ATTR VkResult VKAPI_CALL EnumerateDeviceLayerProperties(VkPhysicalDevice p
VKAPI_ATTR VkResult VKAPI_CALL EnumerateInstanceExtensionProperties(const char *pLayerName, uint32_t *pCount,
VkExtensionProperties *pProperties) {
if (pLayerName && !strcmp(pLayerName, global_layer.layerName))
- return util_GetExtensionProperties(ARRAY_SIZE(instance_extensions), instance_extensions, pCount, pProperties);
+ return util_GetExtensionProperties(1, instance_extensions, pCount, pProperties);
return VK_ERROR_LAYER_NOT_PRESENT;
}
VKAPI_ATTR VkResult VKAPI_CALL EnumerateDeviceExtensionProperties(VkPhysicalDevice physicalDevice, const char *pLayerName,
uint32_t *pCount, VkExtensionProperties *pProperties) {
- if (pLayerName && !strcmp(pLayerName, global_layer.layerName)) return util_GetExtensionProperties(ARRAY_SIZE(device_extensions), device_extensions, pCount, pProperties);
+ if (pLayerName && !strcmp(pLayerName, global_layer.layerName)) return util_GetExtensionProperties(0, NULL, pCount, pProperties);
assert(physicalDevice);
auto layer_data = GetLayerDataPtr(get_dispatch_key(physicalDevice), layer_data_map);
- return layer_data->instance_dispatch_table.EnumerateDeviceExtensionProperties(physicalDevice, pLayerName, pCount, pProperties);
+ return layer_data->instance_dispatch_table.EnumerateDeviceExtensionProperties(physicalDevice, NULL, pCount, pProperties);
}
VKAPI_ATTR VkResult VKAPI_CALL CreateInstance(const VkInstanceCreateInfo *pCreateInfo, const VkAllocationCallbacks *pAllocator,
@@ -825,66 +626,30 @@ VKAPI_ATTR VkResult VKAPI_CALL CreateInstance(const VkInstanceCreateInfo *pCreat
uint32_t specified_version = (pCreateInfo->pApplicationInfo ? pCreateInfo->pApplicationInfo->apiVersion : VK_API_VERSION_1_0);
uint32_t api_version = (specified_version < VK_API_VERSION_1_1) ? VK_API_VERSION_1_0 : VK_API_VERSION_1_1;
- CHECK_ENABLED local_enables {};
- CHECK_DISABLED local_disables {};
- const auto *validation_features_ext = lvl_find_in_chain<VkValidationFeaturesEXT>(pCreateInfo->pNext);
- if (validation_features_ext) {
- SetValidationFeatures(&local_disables, &local_enables, validation_features_ext);
- }
- const auto *validation_flags_ext = lvl_find_in_chain<VkValidationFlagsEXT>(pCreateInfo->pNext);
- if (validation_flags_ext) {
- SetValidationFlags(&local_disables, validation_flags_ext);
- }
- ProcessConfigAndEnvSettings(OBJECT_LAYER_DESCRIPTION, &local_enables, &local_disables);
// Create temporary dispatch vector for pre-calls until instance is created
std::vector<ValidationObject*> local_object_dispatch;
- // Add VOs to dispatch vector. Order here will be the validation dispatch order!
-#if BUILD_THREAD_SAFETY
+#if BUILD_OBJECT_TRACKER
+ auto object_tracker = new ObjectLifetimes;
+ local_object_dispatch.emplace_back(object_tracker);
+ object_tracker->container_type = LayerObjectTypeObjectTracker;
+ object_tracker->api_version = api_version;
+#elif BUILD_THREAD_SAFETY
auto thread_checker = new ThreadSafety;
- if (!local_disables.thread_safety) {
- local_object_dispatch.emplace_back(thread_checker);
- }
+ local_object_dispatch.emplace_back(thread_checker);
thread_checker->container_type = LayerObjectTypeThreading;
thread_checker->api_version = api_version;
-#endif
-#if BUILD_PARAMETER_VALIDATION
+#elif BUILD_PARAMETER_VALIDATION
auto parameter_validation = new StatelessValidation;
- if (!local_disables.stateless_checks) {
- local_object_dispatch.emplace_back(parameter_validation);
- }
+ local_object_dispatch.emplace_back(parameter_validation);
parameter_validation->container_type = LayerObjectTypeParameterValidation;
parameter_validation->api_version = api_version;
-#endif
-#if BUILD_OBJECT_TRACKER
- auto object_tracker = new ObjectLifetimes;
- if (!local_disables.object_tracking) {
- local_object_dispatch.emplace_back(object_tracker);
- }
- object_tracker->container_type = LayerObjectTypeObjectTracker;
- object_tracker->api_version = api_version;
-#endif
-#if BUILD_CORE_VALIDATION
+#elif BUILD_CORE_VALIDATION
auto core_checks = new CoreChecks;
- if (!local_disables.core_checks) {
- local_object_dispatch.emplace_back(core_checks);
- }
+ local_object_dispatch.emplace_back(core_checks);
core_checks->container_type = LayerObjectTypeCoreValidation;
core_checks->api_version = api_version;
#endif
-#if BUILD_BEST_PRACTICES
- auto best_practices = new BestPractices;
- if (local_enables.best_practices) {
- local_object_dispatch.emplace_back(best_practices);
- }
- best_practices->container_type = LayerObjectTypeBestPractices;
- best_practices->api_version = api_version;
-#endif
-
- // If handle wrapping is disabled via the ValidationFeatures extension, override build flag
- if (local_disables.handle_wrapping) {
- wrap_handles = false;
- }
// Init dispatch array and call registration functions
for (auto intercept : local_object_dispatch) {
@@ -901,8 +666,6 @@ VKAPI_ATTR VkResult VKAPI_CALL CreateInstance(const VkInstanceCreateInfo *pCreat
framework->object_dispatch = local_object_dispatch;
framework->container_type = LayerObjectTypeInstance;
- framework->disabled = local_disables;
- framework->enabled = local_enables;
framework->instance = *pInstance;
layer_init_instance_dispatch_table(*pInstance, &framework->instance_dispatch_table, fpGetInstanceProcAddr);
@@ -911,39 +674,35 @@ VKAPI_ATTR VkResult VKAPI_CALL CreateInstance(const VkInstanceCreateInfo *pCreat
framework->api_version = api_version;
framework->instance_extensions.InitFromInstanceCreateInfo(specified_version, pCreateInfo);
- layer_debug_messenger_actions(framework->report_data, framework->logging_messenger, pAllocator, OBJECT_LAYER_DESCRIPTION);
+ // Parse any pNext chains for validation features and flags
+ const auto *validation_flags_ext = lvl_find_in_chain<VkValidationFlagsEXT>(pCreateInfo->pNext);
+ if (validation_flags_ext) {
+ SetDisabledFlags(framework, validation_flags_ext);
+ }
+ const auto *validation_features_ext = lvl_find_in_chain<VkValidationFeaturesEXT>(pCreateInfo->pNext);
+ if (validation_features_ext) {
+ SetValidationFeatures(framework, validation_features_ext);
+ }
#if BUILD_OBJECT_TRACKER
+ layer_debug_messenger_actions(framework->report_data, framework->logging_messenger, pAllocator, "lunarg_object_tracker");
object_tracker->report_data = framework->report_data;
- object_tracker->instance_dispatch_table = framework->instance_dispatch_table;
- object_tracker->enabled = framework->enabled;
- object_tracker->disabled = framework->disabled;
-#endif
-#if BUILD_THREAD_SAFETY
+#elif BUILD_THREAD_SAFETY
+ layer_debug_messenger_actions(framework->report_data, framework->logging_messenger, pAllocator, "google_thread_checker");
thread_checker->report_data = framework->report_data;
- thread_checker->instance_dispatch_table = framework->instance_dispatch_table;
- thread_checker->enabled = framework->enabled;
- thread_checker->disabled = framework->disabled;
-#endif
-#if BUILD_PARAMETER_VALIDATION
+#elif BUILD_PARAMETER_VALIDATION
+ layer_debug_messenger_actions(framework->report_data, framework->logging_messenger, pAllocator, "lunarg_parameter_validation");
parameter_validation->report_data = framework->report_data;
- parameter_validation->instance_dispatch_table = framework->instance_dispatch_table;
- parameter_validation->enabled = framework->enabled;
- parameter_validation->disabled = framework->disabled;
-#endif
-#if BUILD_CORE_VALIDATION
+#elif BUILD_CORE_VALIDATION
+ layer_debug_messenger_actions(framework->report_data, framework->logging_messenger, pAllocator, "lunarg_core_validation");
core_checks->report_data = framework->report_data;
core_checks->instance_dispatch_table = framework->instance_dispatch_table;
core_checks->instance = *pInstance;
core_checks->enabled = framework->enabled;
core_checks->disabled = framework->disabled;
core_checks->instance_state = core_checks;
-#endif
-#if BUILD_BEST_PRACTICES
- best_practices->report_data = framework->report_data;
- best_practices->instance_dispatch_table = framework->instance_dispatch_table;
- best_practices->enabled = framework->enabled;
- best_practices->disabled = framework->disabled;
+#else
+ layer_debug_messenger_actions(framework->report_data, framework->logging_messenger, pAllocator, "lunarg_unique_objects");
#endif
for (auto intercept : framework->object_dispatch) {
@@ -1020,7 +779,7 @@ VKAPI_ATTR VkResult VKAPI_CALL CreateDevice(VkPhysicalDevice gpu, const VkDevice
item->device_extensions = device_extensions;
}
- safe_VkDeviceCreateInfo modified_create_info(pCreateInfo);
+ std::unique_ptr<safe_VkDeviceCreateInfo> modified_create_info(new safe_VkDeviceCreateInfo(pCreateInfo));
bool skip = false;
for (auto intercept : instance_interceptor->object_dispatch) {
@@ -1030,10 +789,10 @@ VKAPI_ATTR VkResult VKAPI_CALL CreateDevice(VkPhysicalDevice gpu, const VkDevice
}
for (auto intercept : instance_interceptor->object_dispatch) {
auto lock = intercept->write_lock();
- intercept->PreCallRecordCreateDevice(gpu, pCreateInfo, pAllocator, pDevice, &modified_create_info);
+ intercept->PreCallRecordCreateDevice(gpu, pCreateInfo, pAllocator, pDevice, modified_create_info);
}
- VkResult result = fpCreateDevice(gpu, reinterpret_cast<VkDeviceCreateInfo *>(&modified_create_info), pAllocator, pDevice);
+ VkResult result = fpCreateDevice(gpu, reinterpret_cast<VkDeviceCreateInfo *>(modified_create_info.get()), pAllocator, pDevice);
if (result != VK_SUCCESS) {
return result;
}
@@ -1054,59 +813,54 @@ VKAPI_ATTR VkResult VKAPI_CALL CreateDevice(VkPhysicalDevice gpu, const VkDevice
device_interceptor->instance = instance_interceptor->instance;
device_interceptor->report_data = layer_debug_utils_create_device(instance_interceptor->report_data, *pDevice);
- // Note that this defines the order in which the layer validation objects are called
-#if BUILD_THREAD_SAFETY
+#if BUILD_OBJECT_TRACKER
+ // Create child layer objects for this key and add to dispatch vector
+ auto object_tracker = new ObjectLifetimes;
+ // TODO: Initialize child objects with parent info thru constuctor taking a parent object
+ object_tracker->container_type = LayerObjectTypeObjectTracker;
+ object_tracker->physical_device = gpu;
+ object_tracker->instance = instance_interceptor->instance;
+ object_tracker->report_data = device_interceptor->report_data;
+ object_tracker->device_dispatch_table = device_interceptor->device_dispatch_table;
+ object_tracker->api_version = device_interceptor->api_version;
+ device_interceptor->object_dispatch.emplace_back(object_tracker);
+#elif BUILD_THREAD_SAFETY
auto thread_safety = new ThreadSafety;
+ // TODO: Initialize child objects with parent info thru constuctor taking a parent object
thread_safety->container_type = LayerObjectTypeThreading;
- if (!instance_interceptor->disabled.thread_safety) {
- device_interceptor->object_dispatch.emplace_back(thread_safety);
- }
-#endif
-#if BUILD_PARAMETER_VALIDATION
+ thread_safety->physical_device = gpu;
+ thread_safety->instance = instance_interceptor->instance;
+ thread_safety->report_data = device_interceptor->report_data;
+ thread_safety->device_dispatch_table = device_interceptor->device_dispatch_table;
+ thread_safety->api_version = device_interceptor->api_version;
+ device_interceptor->object_dispatch.emplace_back(thread_safety);
+#elif BUILD_PARAMETER_VALIDATION
auto stateless_validation = new StatelessValidation;
+ // TODO: Initialize child objects with parent info thru constuctor taking a parent object
stateless_validation->container_type = LayerObjectTypeParameterValidation;
- if (!instance_interceptor->disabled.stateless_checks) {
- device_interceptor->object_dispatch.emplace_back(stateless_validation);
- }
-#endif
-#if BUILD_OBJECT_TRACKER
- auto object_tracker = new ObjectLifetimes;
- object_tracker->container_type = LayerObjectTypeObjectTracker;
- if (!instance_interceptor->disabled.object_tracking) {
- device_interceptor->object_dispatch.emplace_back(object_tracker);
- }
-#endif
-#if BUILD_CORE_VALIDATION
+ stateless_validation->physical_device = gpu;
+ stateless_validation->instance = instance_interceptor->instance;
+ stateless_validation->report_data = device_interceptor->report_data;
+ stateless_validation->device_dispatch_table = device_interceptor->device_dispatch_table;
+ stateless_validation->api_version = device_interceptor->api_version;
+ device_interceptor->object_dispatch.emplace_back(stateless_validation);
+#elif BUILD_CORE_VALIDATION
auto core_checks = new CoreChecks;
+ // TODO: Initialize child objects with parent info thru constuctor taking a parent object
core_checks->container_type = LayerObjectTypeCoreValidation;
+ core_checks->physical_device = gpu;
+ core_checks->instance = instance_interceptor->instance;
+ core_checks->report_data = device_interceptor->report_data;
+ core_checks->device_dispatch_table = device_interceptor->device_dispatch_table;
+ core_checks->instance_dispatch_table = instance_interceptor->instance_dispatch_table;
+ core_checks->api_version = device_interceptor->api_version;
+ core_checks->instance_extensions = instance_interceptor->instance_extensions;
+ core_checks->device_extensions = device_interceptor->device_extensions;
core_checks->instance_state = reinterpret_cast<CoreChecks *>(
core_checks->GetValidationObject(instance_interceptor->object_dispatch, LayerObjectTypeCoreValidation));
- if (!instance_interceptor->disabled.core_checks) {
- device_interceptor->object_dispatch.emplace_back(core_checks);
- }
+ core_checks->device = *pDevice;
+ device_interceptor->object_dispatch.emplace_back(core_checks);
#endif
-#if BUILD_BEST_PRACTICES
- auto best_practices = new BestPractices;
- best_practices->container_type = LayerObjectTypeBestPractices;
- if (instance_interceptor->enabled.best_practices) {
- device_interceptor->object_dispatch.emplace_back(best_practices);
- }
-#endif
-
- // Set per-intercept common data items
- for (auto dev_intercept : device_interceptor->object_dispatch) {
- dev_intercept->device = *pDevice;
- dev_intercept->physical_device = gpu;
- dev_intercept->instance = instance_interceptor->instance;
- dev_intercept->report_data = device_interceptor->report_data;
- dev_intercept->device_dispatch_table = device_interceptor->device_dispatch_table;
- dev_intercept->api_version = device_interceptor->api_version;
- dev_intercept->disabled = instance_interceptor->disabled;
- dev_intercept->enabled = instance_interceptor->enabled;
- dev_intercept->instance_dispatch_table = instance_interceptor->instance_dispatch_table;
- dev_intercept->instance_extensions = instance_interceptor->instance_extensions;
- dev_intercept->device_extensions = device_interceptor->device_extensions;
- }
for (auto intercept : instance_interceptor->object_dispatch) {
auto lock = intercept->write_lock();
@@ -1163,8 +917,8 @@ VKAPI_ATTR VkResult VKAPI_CALL CreateGraphicsPipelines(
struct create_graphics_pipeline_api_state {
const VkGraphicsPipelineCreateInfo* pCreateInfos;
} cgpl_state;
+ cgpl_state.pCreateInfos = pCreateInfos;
#endif
- cgpl_state.pCreateInfos = pCreateInfos;
for (auto intercept : layer_data->object_dispatch) {
auto lock = intercept->write_lock();
@@ -1176,7 +930,7 @@ VKAPI_ATTR VkResult VKAPI_CALL CreateGraphicsPipelines(
intercept->PreCallRecordCreateGraphicsPipelines(device, pipelineCache, createInfoCount, pCreateInfos, pAllocator, pPipelines, &cgpl_state);
}
- VkResult result = DispatchCreateGraphicsPipelines(device, pipelineCache, createInfoCount, cgpl_state.pCreateInfos, pAllocator, pPipelines);
+ VkResult result = DispatchCreateGraphicsPipelines(layer_data, device, pipelineCache, createInfoCount, cgpl_state.pCreateInfos, pAllocator, pPipelines);
for (auto intercept : layer_data->object_dispatch) {
auto lock = intercept->write_lock();
@@ -1196,28 +950,25 @@ VKAPI_ATTR VkResult VKAPI_CALL CreateComputePipelines(
auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
bool skip = false;
-#ifdef BUILD_CORE_VALIDATION
- create_compute_pipeline_api_state ccpl_state{};
-#else
- struct create_compute_pipeline_api_state {
- const VkComputePipelineCreateInfo* pCreateInfos;
- } ccpl_state;
+#ifndef BUILD_CORE_VALIDATION
+ struct PIPELINE_STATE {};
#endif
- ccpl_state.pCreateInfos = pCreateInfos;
+
+ std::vector<std::unique_ptr<PIPELINE_STATE>> pipe_state;
for (auto intercept : layer_data->object_dispatch) {
auto lock = intercept->write_lock();
- skip |= intercept->PreCallValidateCreateComputePipelines(device, pipelineCache, createInfoCount, pCreateInfos, pAllocator, pPipelines, &ccpl_state);
+ skip |= intercept->PreCallValidateCreateComputePipelines(device, pipelineCache, createInfoCount, pCreateInfos, pAllocator, pPipelines, &pipe_state);
if (skip) return VK_ERROR_VALIDATION_FAILED_EXT;
}
for (auto intercept : layer_data->object_dispatch) {
auto lock = intercept->write_lock();
- intercept->PreCallRecordCreateComputePipelines(device, pipelineCache, createInfoCount, pCreateInfos, pAllocator, pPipelines, &ccpl_state);
+ intercept->PreCallRecordCreateComputePipelines(device, pipelineCache, createInfoCount, pCreateInfos, pAllocator, pPipelines);
}
- VkResult result = DispatchCreateComputePipelines(device, pipelineCache, createInfoCount, ccpl_state.pCreateInfos, pAllocator, pPipelines);
+ VkResult result = DispatchCreateComputePipelines(layer_data, device, pipelineCache, createInfoCount, pCreateInfos, pAllocator, pPipelines);
for (auto intercept : layer_data->object_dispatch) {
auto lock = intercept->write_lock();
- intercept->PostCallRecordCreateComputePipelines(device, pipelineCache, createInfoCount, pCreateInfos, pAllocator, pPipelines, result, &ccpl_state);
+ intercept->PostCallRecordCreateComputePipelines(device, pipelineCache, createInfoCount, pCreateInfos, pAllocator, pPipelines, result, &pipe_state);
}
return result;
}
@@ -1232,31 +983,25 @@ VKAPI_ATTR VkResult VKAPI_CALL CreateRayTracingPipelinesNV(
auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
bool skip = false;
-#ifdef BUILD_CORE_VALIDATION
- create_ray_tracing_pipeline_api_state crtpl_state{};
-#else
- struct create_ray_tracing_pipeline_api_state {
- const VkRayTracingPipelineCreateInfoNV* pCreateInfos;
- } crtpl_state;
+#ifndef BUILD_CORE_VALIDATION
+ struct PIPELINE_STATE {};
#endif
- crtpl_state.pCreateInfos = pCreateInfos;
+
+ std::vector<std::unique_ptr<PIPELINE_STATE>> pipe_state;
for (auto intercept : layer_data->object_dispatch) {
auto lock = intercept->write_lock();
- skip |= intercept->PreCallValidateCreateRayTracingPipelinesNV(device, pipelineCache, createInfoCount, pCreateInfos,
- pAllocator, pPipelines, &crtpl_state);
+ skip |= intercept->PreCallValidateCreateRayTracingPipelinesNV(device, pipelineCache, createInfoCount, pCreateInfos, pAllocator, pPipelines, &pipe_state);
if (skip) return VK_ERROR_VALIDATION_FAILED_EXT;
}
for (auto intercept : layer_data->object_dispatch) {
auto lock = intercept->write_lock();
- intercept->PreCallRecordCreateRayTracingPipelinesNV(device, pipelineCache, createInfoCount, pCreateInfos, pAllocator,
- pPipelines, &crtpl_state);
+ intercept->PreCallRecordCreateRayTracingPipelinesNV(device, pipelineCache, createInfoCount, pCreateInfos, pAllocator, pPipelines);
}
- VkResult result = DispatchCreateRayTracingPipelinesNV(device, pipelineCache, createInfoCount, pCreateInfos, pAllocator, pPipelines);
+ VkResult result = DispatchCreateRayTracingPipelinesNV(layer_data, device, pipelineCache, createInfoCount, pCreateInfos, pAllocator, pPipelines);
for (auto intercept : layer_data->object_dispatch) {
auto lock = intercept->write_lock();
- intercept->PostCallRecordCreateRayTracingPipelinesNV(device, pipelineCache, createInfoCount, pCreateInfos, pAllocator,
- pPipelines, result, &crtpl_state);
+ intercept->PostCallRecordCreateRayTracingPipelinesNV(device, pipelineCache, createInfoCount, pCreateInfos, pAllocator, pPipelines, result, &pipe_state);
}
return result;
}
@@ -1287,7 +1032,7 @@ VKAPI_ATTR VkResult VKAPI_CALL CreatePipelineLayout(
auto lock = intercept->write_lock();
intercept->PreCallRecordCreatePipelineLayout(device, pCreateInfo, pAllocator, pPipelineLayout, &cpl_state);
}
- VkResult result = DispatchCreatePipelineLayout(device, &cpl_state.modified_create_info, pAllocator, pPipelineLayout);
+ VkResult result = DispatchCreatePipelineLayout(layer_data, device, &cpl_state.modified_create_info, pAllocator, pPipelineLayout);
for (auto intercept : layer_data->object_dispatch) {
auto lock = intercept->write_lock();
intercept->PostCallRecordCreatePipelineLayout(device, pCreateInfo, pAllocator, pPipelineLayout, result);
@@ -1321,7 +1066,7 @@ VKAPI_ATTR VkResult VKAPI_CALL CreateShaderModule(
auto lock = intercept->write_lock();
intercept->PreCallRecordCreateShaderModule(device, pCreateInfo, pAllocator, pShaderModule, &csm_state);
}
- VkResult result = DispatchCreateShaderModule(device, &csm_state.instrumented_create_info, pAllocator, pShaderModule);
+ VkResult result = DispatchCreateShaderModule(layer_data, device, &csm_state.instrumented_create_info, pAllocator, pShaderModule);
for (auto intercept : layer_data->object_dispatch) {
auto lock = intercept->write_lock();
intercept->PostCallRecordCreateShaderModule(device, pCreateInfo, pAllocator, pShaderModule, result, &csm_state);
@@ -1351,7 +1096,7 @@ VKAPI_ATTR VkResult VKAPI_CALL AllocateDescriptorSets(
auto lock = intercept->write_lock();
intercept->PreCallRecordAllocateDescriptorSets(device, pAllocateInfo, pDescriptorSets);
}
- VkResult result = DispatchAllocateDescriptorSets(device, pAllocateInfo, pDescriptorSets);
+ VkResult result = DispatchAllocateDescriptorSets(layer_data, device, pAllocateInfo, pDescriptorSets);
for (auto intercept : layer_data->object_dispatch) {
auto lock = intercept->write_lock();
intercept->PostCallRecordAllocateDescriptorSets(device, pAllocateInfo, pDescriptorSets, result, &ads_state);
@@ -1433,7 +1178,7 @@ VKAPI_ATTR VkResult VKAPI_CALL GetValidationCacheDataEXT(
virtual VkResult CoreLayerMergeValidationCachesEXT(VkDevice device, VkValidationCacheEXT dstCache, uint32_t srcCacheCount, const VkValidationCacheEXT* pSrcCaches) { return VK_SUCCESS; };
virtual VkResult CoreLayerGetValidationCacheDataEXT(VkDevice device, VkValidationCacheEXT validationCache, size_t* pDataSize, void* pData) { return VK_SUCCESS; };
- // Allow additional state parameter for CreateGraphicsPipelines
+ // Allow additional parameter for CreateGraphicsPipelines
virtual bool PreCallValidateCreateGraphicsPipelines(VkDevice device, VkPipelineCache pipelineCache, uint32_t createInfoCount, const VkGraphicsPipelineCreateInfo* pCreateInfos, const VkAllocationCallbacks* pAllocator, VkPipeline* pPipelines, void* cgpl_state) {
return PreCallValidateCreateGraphicsPipelines(device, pipelineCache, createInfoCount, pCreateInfos, pAllocator, pPipelines);
};
@@ -1448,9 +1193,6 @@ VKAPI_ATTR VkResult VKAPI_CALL GetValidationCacheDataEXT(
virtual bool PreCallValidateCreateComputePipelines(VkDevice device, VkPipelineCache pipelineCache, uint32_t createInfoCount, const VkComputePipelineCreateInfo* pCreateInfos, const VkAllocationCallbacks* pAllocator, VkPipeline* pPipelines, void* pipe_state) {
return PreCallValidateCreateComputePipelines(device, pipelineCache, createInfoCount, pCreateInfos, pAllocator, pPipelines);
};
- virtual void PreCallRecordCreateComputePipelines(VkDevice device, VkPipelineCache pipelineCache, uint32_t createInfoCount, const VkComputePipelineCreateInfo* pCreateInfos, const VkAllocationCallbacks* pAllocator, VkPipeline* pPipelines, void* ccpl_state) {
- PreCallRecordCreateComputePipelines(device, pipelineCache, createInfoCount, pCreateInfos, pAllocator, pPipelines);
- };
virtual void PostCallRecordCreateComputePipelines(VkDevice device, VkPipelineCache pipelineCache, uint32_t createInfoCount, const VkComputePipelineCreateInfo* pCreateInfos, const VkAllocationCallbacks* pAllocator, VkPipeline* pPipelines, VkResult result, void* pipe_state) {
PostCallRecordCreateComputePipelines(device, pipelineCache, createInfoCount, pCreateInfos, pAllocator, pPipelines, result);
};
@@ -1459,9 +1201,6 @@ VKAPI_ATTR VkResult VKAPI_CALL GetValidationCacheDataEXT(
virtual bool PreCallValidateCreateRayTracingPipelinesNV(VkDevice device, VkPipelineCache pipelineCache, uint32_t createInfoCount, const VkRayTracingPipelineCreateInfoNV* pCreateInfos, const VkAllocationCallbacks* pAllocator, VkPipeline* pPipelines, void* pipe_state) {
return PreCallValidateCreateRayTracingPipelinesNV(device, pipelineCache, createInfoCount, pCreateInfos, pAllocator, pPipelines);
};
- virtual void PreCallRecordCreateRayTracingPipelinesNV(VkDevice device, VkPipelineCache pipelineCache, uint32_t createInfoCount, const VkRayTracingPipelineCreateInfoNV* pCreateInfos, const VkAllocationCallbacks* pAllocator, VkPipeline* pPipelines, void* ccpl_state) {
- PreCallRecordCreateRayTracingPipelinesNV(device, pipelineCache, createInfoCount, pCreateInfos, pAllocator, pPipelines);
- };
virtual void PostCallRecordCreateRayTracingPipelinesNV(VkDevice device, VkPipelineCache pipelineCache, uint32_t createInfoCount, const VkRayTracingPipelineCreateInfoNV* pCreateInfos, const VkAllocationCallbacks* pAllocator, VkPipeline* pPipelines, VkResult result, void* pipe_state) {
PostCallRecordCreateRayTracingPipelinesNV(device, pipelineCache, createInfoCount, pCreateInfos, pAllocator, pPipelines, result);
};
@@ -1491,7 +1230,7 @@ VKAPI_ATTR VkResult VKAPI_CALL GetValidationCacheDataEXT(
};
// Modify a parameter to CreateDevice
- virtual void PreCallRecordCreateDevice(VkPhysicalDevice physicalDevice, const VkDeviceCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkDevice* pDevice, safe_VkDeviceCreateInfo *modified_create_info) {
+ virtual void PreCallRecordCreateDevice(VkPhysicalDevice physicalDevice, const VkDeviceCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkDevice* pDevice, std::unique_ptr<safe_VkDeviceCreateInfo> &modified_create_info) {
PreCallRecordCreateDevice(physicalDevice, pCreateInfo, pAllocator, pDevice);
};
"""
@@ -1532,6 +1271,11 @@ VK_LAYER_EXPORT VKAPI_ATTR PFN_vkVoidFunction VKAPI_CALL vkGetInstanceProcAddr(V
return vulkan_layer_chassis::GetInstanceProcAddr(instance, funcName);
}
+VK_LAYER_EXPORT VKAPI_ATTR PFN_vkVoidFunction VKAPI_CALL vk_layerGetPhysicalDeviceProcAddr(VkInstance instance,
+ const char *funcName) {
+ return vulkan_layer_chassis::GetPhysicalDeviceProcAddr(instance, funcName);
+}
+
VK_LAYER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL vkNegotiateLoaderLayerInterfaceVersion(VkNegotiateLayerInterface *pVersionStruct) {
assert(pVersionStruct != NULL);
assert(pVersionStruct->sType == LAYER_NEGOTIATE_INTERFACE_STRUCT);
@@ -1540,7 +1284,7 @@ VK_LAYER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL vkNegotiateLoaderLayerInterfaceVe
if (pVersionStruct->loaderLayerInterfaceVersion >= 2) {
pVersionStruct->pfnGetInstanceProcAddr = vkGetInstanceProcAddr;
pVersionStruct->pfnGetDeviceProcAddr = vkGetDeviceProcAddr;
- pVersionStruct->pfnGetPhysicalDeviceProcAddr = nullptr;
+ pVersionStruct->pfnGetPhysicalDeviceProcAddr = vk_layerGetPhysicalDeviceProcAddr;
}
return VK_SUCCESS;
@@ -1565,10 +1309,25 @@ VK_LAYER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL vkNegotiateLoaderLayerInterfaceVe
def paramIsPointer(self, param):
ispointer = False
for elem in param:
- if elem.tag == 'type' and elem.tail is not None and '*' in elem.tail:
+ if ((elem.tag is not 'type') and (elem.tail is not None)) and '*' in elem.tail:
ispointer = True
return ispointer
+ # Check if an object is a non-dispatchable handle
+ def isHandleTypeNonDispatchable(self, handletype):
+ handle = self.registry.tree.find("types/type/[name='" + handletype + "'][@category='handle']")
+ if handle is not None and handle.find('type').text == 'VK_DEFINE_NON_DISPATCHABLE_HANDLE':
+ return True
+ else:
+ return False
+
+ # Check if an object is a dispatchable handle
+ def isHandleTypeDispatchable(self, handletype):
+ handle = self.registry.tree.find("types/type/[name='" + handletype + "'][@category='handle']")
+ if handle is not None and handle.find('type').text == 'VK_DEFINE_HANDLE':
+ return True
+ else:
+ return False
#
#
def beginFile(self, genOpts):
@@ -1593,8 +1352,8 @@ VK_LAYER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL vkNegotiateLoaderLayerInterfaceVe
self.newline()
if not self.header:
# Record intercepted procedures
- write('// Map of intercepted ApiName to its associated function data', file=self.outFile)
- write('const std::unordered_map<std::string, function_data> name_to_funcptr_map = {', file=self.outFile)
+ write('// Map of all APIs to be intercepted by this layer', file=self.outFile)
+ write('const std::unordered_map<std::string, void*> name_to_funcptr_map = {', file=self.outFile)
write('\n'.join(self.intercepts), file=self.outFile)
write('};\n', file=self.outFile)
self.newline()
@@ -1711,24 +1470,18 @@ VK_LAYER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL vkNegotiateLoaderLayerInterfaceVe
self.layer_factory += '#endif\n'
return
- is_instance = 'false'
- dispatchable_type = cmdinfo.elem.find('param/type').text
- if dispatchable_type in ["VkPhysicalDevice", "VkInstance"] or name == 'vkCreateInstance':
- is_instance = 'true'
-
if name in self.manual_functions:
if 'ValidationCache' not in name:
- self.intercepts += [ ' {"%s", {%s, (void*)%s}},' % (name, is_instance, name[2:]) ]
+ self.intercepts += [ ' {"%s", (void*)%s},' % (name,name[2:]) ]
else:
self.intercepts += [ '#ifdef BUILD_CORE_VALIDATION' ]
-
- self.intercepts += [ ' {"%s", {%s, (void*)%s}},' % (name, is_instance, name[2:]) ]
+ self.intercepts += [ ' {"%s", (void*)%s},' % (name,name[2:]) ]
self.intercepts += [ '#endif' ]
return
# Record that the function will be intercepted
if (self.featureExtraProtect != None):
self.intercepts += [ '#ifdef %s' % self.featureExtraProtect ]
- self.intercepts += [ ' {"%s", {%s, (void*)%s}},' % (name, is_instance, name[2:]) ]
+ self.intercepts += [ ' {"%s", (void*)%s},' % (name,name[2:]) ]
if (self.featureExtraProtect != None):
self.intercepts += [ '#endif' ]
OutputGenerator.genCmd(self, cmdinfo, name, alias)
@@ -1737,12 +1490,20 @@ VK_LAYER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL vkNegotiateLoaderLayerInterfaceVe
self.appendSection('command', '')
self.appendSection('command', '%s {' % decls[0][:-1])
# Setup common to call wrappers. First parameter is always dispatchable
+ dispatchable_type = cmdinfo.elem.find('param/type').text
dispatchable_name = cmdinfo.elem.find('param/name').text
+ # Default to device
+ device_or_instance = 'device'
+ dispatch_table_name = 'VkLayerDispatchTable'
+ # Set to instance as necessary
+ if dispatchable_type in ["VkPhysicalDevice", "VkInstance"] or name == 'vkCreateInstance':
+ device_or_instance = 'instance'
+ dispatch_table_name = 'VkLayerInstanceDispatchTable'
self.appendSection('command', ' auto layer_data = GetLayerDataPtr(get_dispatch_key(%s), layer_data_map);' % (dispatchable_name))
api_function_name = cmdinfo.elem.attrib.get('name')
params = cmdinfo.elem.findall('param/name')
paramstext = ', '.join([str(param.text) for param in params])
- API = api_function_name.replace('vk','Dispatch') + '('
+ API = api_function_name.replace('vk','Dispatch') + '(layer_data, '
# Declare result variable, if any.
return_map = {
diff --git a/scripts/layer_dispatch_table_generator.py b/scripts/layer_dispatch_table_generator.py
index 85b9cd716..d5406f19d 100644
--- a/scripts/layer_dispatch_table_generator.py
+++ b/scripts/layer_dispatch_table_generator.py
@@ -1,9 +1,9 @@
#!/usr/bin/python3 -i
#
-# Copyright (c) 2015-2019 The Khronos Group Inc.
-# Copyright (c) 2015-2019 Valve Corporation
-# Copyright (c) 2015-2019 LunarG, Inc.
-# Copyright (c) 2015-2019 Google Inc.
+# Copyright (c) 2015-2017 The Khronos Group Inc.
+# Copyright (c) 2015-2017 Valve Corporation
+# Copyright (c) 2015-2017 LunarG, Inc.
+# Copyright (c) 2015-2017 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
@@ -35,7 +35,6 @@ ADD_INST_CMDS = ['vkCreateInstance',
# LayerDispatchTableGeneratorOptions - subclass of GeneratorOptions.
class LayerDispatchTableGeneratorOptions(GeneratorOptions):
def __init__(self,
- conventions = None,
filename = None,
directory = '.',
apiname = None,
@@ -58,7 +57,7 @@ class LayerDispatchTableGeneratorOptions(GeneratorOptions):
indentFuncPointer = False,
alignFuncParam = 0,
expandEnumerants = True):
- GeneratorOptions.__init__(self, conventions, filename, directory, apiname, profile,
+ GeneratorOptions.__init__(self, filename, directory, apiname, profile,
versions, emitversions, defaultExtensions,
addExtensions, removeExtensions, emitExtensions, sortProcedure)
self.prefixText = prefixText
@@ -93,9 +92,6 @@ class LayerDispatchTableOutputGenerator(OutputGenerator):
def beginFile(self, genOpts):
OutputGenerator.beginFile(self, genOpts)
- # Initialize members that require the tree
- self.handle_types = GetHandleTypes(self.registry.tree)
-
# User-supplied prefix text, if any (list of strings)
if (genOpts.prefixText):
for s in genOpts.prefixText:
@@ -108,9 +104,9 @@ class LayerDispatchTableOutputGenerator(OutputGenerator):
# Copyright Notice
copyright = '/*\n'
- copyright += ' * Copyright (c) 2015-2019 The Khronos Group Inc.\n'
- copyright += ' * Copyright (c) 2015-2019 Valve Corporation\n'
- copyright += ' * Copyright (c) 2015-2019 LunarG, Inc.\n'
+ copyright += ' * Copyright (c) 2015-2018 The Khronos Group Inc.\n'
+ copyright += ' * Copyright (c) 2015-2018 Valve Corporation\n'
+ copyright += ' * Copyright (c) 2015-2018 LunarG, Inc.\n'
copyright += ' *\n'
copyright += ' * Licensed under the Apache License, Version 2.0 (the "License");\n'
copyright += ' * you may not use this file except in compliance with the License.\n'
@@ -201,6 +197,8 @@ class LayerDispatchTableOutputGenerator(OutputGenerator):
#
# Determine if this API should be ignored or added to the instance or device dispatch table
def AddCommandToDispatchList(self, extension_name, extension_type, name, cmdinfo, handle_type):
+ handle = self.registry.tree.find("types/type/[name='" + handle_type + "'][@category='handle']")
+
return_type = cmdinfo.elem.find('proto/type')
if (return_type is not None and return_type.text == 'void'):
return_type = None
@@ -224,7 +222,7 @@ class LayerDispatchTableOutputGenerator(OutputGenerator):
cmd_params.append(self.CommandParam(type=param_type, name=param_name,
cdecl=param_cdecl))
- if handle_type in self.handle_types and handle_type != 'VkInstance' and handle_type != 'VkPhysicalDevice':
+ if handle is not None and handle_type != 'VkInstance' and handle_type != 'VkPhysicalDevice':
# The Core Vulkan code will be wrapped in a feature called VK_VERSION_#_#
# For example: VK_VERSION_1_0 wraps the core 1.0 Vulkan functionality
if 'VK_VERSION_' in extension_name:
diff --git a/scripts/lvl_genvk.py b/scripts/lvl_genvk.py
index b35b3a962..50d0294b0 100644
--- a/scripts/lvl_genvk.py
+++ b/scripts/lvl_genvk.py
@@ -111,15 +111,11 @@ def makeGenOpts(args):
# Defaults for generating re-inclusion protection wrappers (or not)
protectFeature = protect
- # An API style convention object
- conventions = VulkanConventions()
-
# ValidationLayer Generators
# Options for thread safety header code-generation
genOpts['thread_safety.h'] = [
ThreadOutputGenerator,
ThreadGeneratorOptions(
- conventions = conventions,
filename = 'thread_safety.h',
directory = directory,
apiname = 'vulkan',
@@ -143,7 +139,6 @@ def makeGenOpts(args):
genOpts['thread_safety.cpp'] = [
ThreadOutputGenerator,
ThreadGeneratorOptions(
- conventions = conventions,
filename = 'thread_safety.cpp',
directory = directory,
apiname = 'vulkan',
@@ -167,7 +162,6 @@ def makeGenOpts(args):
genOpts['parameter_validation.cpp'] = [
ParameterValidationOutputGenerator,
ParameterValidationGeneratorOptions(
- conventions = conventions,
filename = 'parameter_validation.cpp',
directory = directory,
apiname = 'vulkan',
@@ -191,7 +185,6 @@ def makeGenOpts(args):
genOpts['parameter_validation.h'] = [
ParameterValidationOutputGenerator,
ParameterValidationGeneratorOptions(
- conventions = conventions,
filename = 'parameter_validation.h',
directory = directory,
apiname = 'vulkan',
@@ -215,7 +208,6 @@ def makeGenOpts(args):
genOpts['object_tracker.cpp'] = [
ObjectTrackerOutputGenerator,
ObjectTrackerGeneratorOptions(
- conventions = conventions,
filename = 'object_tracker.cpp',
directory = directory,
apiname = 'vulkan',
@@ -240,7 +232,6 @@ def makeGenOpts(args):
genOpts['object_tracker.h'] = [
ObjectTrackerOutputGenerator,
ObjectTrackerGeneratorOptions(
- conventions = conventions,
filename = 'object_tracker.h',
directory = directory,
apiname = 'vulkan',
@@ -265,7 +256,6 @@ def makeGenOpts(args):
genOpts['vk_dispatch_table_helper.h'] = [
DispatchTableHelperOutputGenerator,
DispatchTableHelperOutputGeneratorOptions(
- conventions = conventions,
filename = 'vk_dispatch_table_helper.h',
directory = directory,
apiname = 'vulkan',
@@ -288,7 +278,6 @@ def makeGenOpts(args):
genOpts['vk_layer_dispatch_table.h'] = [
LayerDispatchTableOutputGenerator,
LayerDispatchTableGeneratorOptions(
- conventions = conventions,
filename = 'vk_layer_dispatch_table.h',
directory = directory,
apiname = 'vulkan',
@@ -311,7 +300,6 @@ def makeGenOpts(args):
genOpts['vk_enum_string_helper.h'] = [
HelperFileOutputGenerator,
HelperFileOutputGeneratorOptions(
- conventions = conventions,
filename = 'vk_enum_string_helper.h',
directory = directory,
apiname = 'vulkan',
@@ -335,7 +323,6 @@ def makeGenOpts(args):
genOpts['vk_safe_struct.h'] = [
HelperFileOutputGenerator,
HelperFileOutputGeneratorOptions(
- conventions = conventions,
filename = 'vk_safe_struct.h',
directory = directory,
apiname = 'vulkan',
@@ -359,7 +346,6 @@ def makeGenOpts(args):
genOpts['vk_safe_struct.cpp'] = [
HelperFileOutputGenerator,
HelperFileOutputGeneratorOptions(
- conventions = conventions,
filename = 'vk_safe_struct.cpp',
directory = directory,
apiname = 'vulkan',
@@ -383,7 +369,6 @@ def makeGenOpts(args):
genOpts['vk_object_types.h'] = [
HelperFileOutputGenerator,
HelperFileOutputGeneratorOptions(
- conventions = conventions,
filename = 'vk_object_types.h',
directory = directory,
apiname = 'vulkan',
@@ -407,7 +392,6 @@ def makeGenOpts(args):
genOpts['vk_extension_helper.h'] = [
HelperFileOutputGenerator,
HelperFileOutputGeneratorOptions(
- conventions = conventions,
filename = 'vk_extension_helper.h',
directory = directory,
apiname = 'vulkan',
@@ -431,7 +415,6 @@ def makeGenOpts(args):
genOpts['vk_typemap_helper.h'] = [
HelperFileOutputGenerator,
HelperFileOutputGeneratorOptions(
- conventions = conventions,
filename = 'vk_typemap_helper.h',
directory = directory,
apiname = 'vulkan',
@@ -457,7 +440,6 @@ def makeGenOpts(args):
genOpts['chassis.h'] = [
LayerChassisOutputGenerator,
LayerChassisGeneratorOptions(
- conventions = conventions,
filename = 'chassis.h',
directory = directory,
apiname = 'vulkan',
@@ -481,7 +463,6 @@ def makeGenOpts(args):
genOpts['chassis.cpp'] = [
LayerChassisOutputGenerator,
LayerChassisGeneratorOptions(
- conventions = conventions,
filename = 'chassis.cpp',
directory = directory,
apiname = 'vulkan',
@@ -505,7 +486,6 @@ def makeGenOpts(args):
genOpts['layer_chassis_dispatch.cpp'] = [
LayerChassisDispatchOutputGenerator,
LayerChassisDispatchGeneratorOptions(
- conventions = conventions,
filename = 'layer_chassis_dispatch.cpp',
directory = directory,
apiname = 'vulkan',
@@ -529,7 +509,6 @@ def makeGenOpts(args):
genOpts['layer_chassis_dispatch.h'] = [
LayerChassisDispatchOutputGenerator,
LayerChassisDispatchGeneratorOptions(
- conventions = conventions,
filename = 'layer_chassis_dispatch.h',
directory = directory,
apiname = 'vulkan',
@@ -651,10 +630,6 @@ if __name__ == '__main__':
args = parser.parse_args()
- # default scripts path to be same as registry
- if not args.scripts:
- args.scripts = os.path.dirname(args.registry)
-
scripts_directory_path = os.path.dirname(os.path.abspath(__file__))
registry_headers_path = os.path.join(scripts_directory_path, args.scripts)
sys.path.insert(0, registry_headers_path)
@@ -672,9 +647,6 @@ if __name__ == '__main__':
from layer_dispatch_table_generator import LayerDispatchTableOutputGenerator, LayerDispatchTableGeneratorOptions
from layer_chassis_generator import LayerChassisOutputGenerator, LayerChassisGeneratorOptions
from layer_chassis_dispatch_generator import LayerChassisDispatchOutputGenerator, LayerChassisDispatchGeneratorOptions
- # Temporary workaround for vkconventions python2 compatibility
- import abc; abc.ABC = abc.ABCMeta('ABC', (object,), {})
- from vkconventions import VulkanConventions
# This splits arguments which are space-separated lists
args.feature = [name for arg in args.feature for name in arg.split()]
diff --git a/scripts/object_tracker_generator.py b/scripts/object_tracker_generator.py
index ffb6a9df9..2176df2e3 100644
--- a/scripts/object_tracker_generator.py
+++ b/scripts/object_tracker_generator.py
@@ -64,7 +64,6 @@ from io import open
# separate line, align parameter names at the specified column
class ObjectTrackerGeneratorOptions(GeneratorOptions):
def __init__(self,
- conventions = None,
filename = None,
directory = '.',
apiname = None,
@@ -88,7 +87,7 @@ class ObjectTrackerGeneratorOptions(GeneratorOptions):
alignFuncParam = 0,
expandEnumerants = True,
valid_usage_path = ''):
- GeneratorOptions.__init__(self, conventions, filename, directory, apiname, profile,
+ GeneratorOptions.__init__(self, filename, directory, apiname, profile,
versions, emitversions, defaultExtensions,
addExtensions, removeExtensions, emitExtensions, sortProcedure)
self.prefixText = prefixText
@@ -161,10 +160,6 @@ class ObjectTrackerOutputGenerator(OutputGenerator):
'vkGetPhysicalDeviceDisplayProperties2KHR',
'vkGetDisplayModePropertiesKHR',
'vkGetDisplayModeProperties2KHR',
- 'vkAcquirePerformanceConfigurationINTEL',
- 'vkReleasePerformanceConfigurationINTEL',
- 'vkQueueSetPerformanceConfigurationINTEL',
- 'vkCreateFramebuffer',
]
# These VUIDS are not implicit, but are best handled in this layer. Codegen for vkDestroy calls will generate a key
# which is translated here into a good VU. Saves ~40 checks.
@@ -270,7 +265,7 @@ class ObjectTrackerOutputGenerator(OutputGenerator):
if alias:
alias_string = 'VUID-%s-%s' % (alias, suffix)
if alias_string in self.valid_vuids:
- vuid = "\"%s\"" % alias_string
+ vuid = "\"%s\"" % vuid_string
return vuid
#
# Increases indent by 4 spaces and tracks it globally
@@ -302,7 +297,7 @@ class ObjectTrackerOutputGenerator(OutputGenerator):
output_func += ' bool skip = false;\n'
output_func += ' skip |= DeviceReportUndestroyedObjects(device, kVulkanObjectTypeCommandBuffer, error_code);\n'
for handle in self.object_types:
- if self.handle_types.IsNonDispatchable(handle):
+ if self.isHandleTypeNonDispatchable(handle):
output_func += ' skip |= DeviceReportUndestroyedObjects(device, %s, error_code);\n' % (self.GetVulkanObjType(handle))
output_func += ' return skip;\n'
output_func += '}\n'
@@ -315,7 +310,7 @@ class ObjectTrackerOutputGenerator(OutputGenerator):
output_func += 'void ObjectLifetimes::DestroyUndestroyedObjects(VkDevice device) {\n'
output_func += ' DeviceDestroyUndestroyedObjects(device, kVulkanObjectTypeCommandBuffer);\n'
for handle in self.object_types:
- if self.handle_types.IsNonDispatchable(handle):
+ if self.isHandleTypeNonDispatchable(handle):
output_func += ' DeviceDestroyUndestroyedObjects(device, %s);\n' % (self.GetVulkanObjType(handle))
output_func += '}\n'
return output_func
@@ -347,10 +342,6 @@ class ObjectTrackerOutputGenerator(OutputGenerator):
def beginFile(self, genOpts):
OutputGenerator.beginFile(self, genOpts)
- # Initialize members that require the tree
- self.handle_types = GetHandleTypes(self.registry.tree)
- self.type_categories = GetTypeCategories(self.registry.tree)
-
header_file = (genOpts.filename == 'object_tracker.h')
source_file = (genOpts.filename == 'object_tracker.cpp')
@@ -499,10 +490,33 @@ class ObjectTrackerOutputGenerator(OutputGenerator):
def paramIsPointer(self, param):
ispointer = False
for elem in param:
- if elem.tag == 'type' and elem.tail is not None and '*' in elem.tail:
+ if ((elem.tag is not 'type') and (elem.tail is not None)) and '*' in elem.tail:
ispointer = True
return ispointer
#
+ # Get the category of a type
+ def getTypeCategory(self, typename):
+ types = self.registry.tree.findall("types/type")
+ for elem in types:
+ if (elem.find("name") is not None and elem.find('name').text == typename) or elem.attrib.get('name') == typename:
+ return elem.attrib.get('category')
+ #
+ # Check if a parent object is dispatchable or not
+ def isHandleTypeObject(self, handletype):
+ handle = self.registry.tree.find("types/type/[name='" + handletype + "'][@category='handle']")
+ if handle is not None:
+ return True
+ else:
+ return False
+ #
+ # Check if a parent object is dispatchable or not
+ def isHandleTypeNonDispatchable(self, handletype):
+ handle = self.registry.tree.find("types/type/[name='" + handletype + "'][@category='handle']")
+ if handle is not None and handle.find('type').text == 'VK_DEFINE_NON_DISPATCHABLE_HANDLE':
+ return True
+ else:
+ return False
+ #
# Retrieve the type and name for a parameter
def getTypeNameTuple(self, param):
type = ''
@@ -597,7 +611,7 @@ class ObjectTrackerOutputGenerator(OutputGenerator):
struct_members = struct_member_dict[struct_item]
for member in struct_members:
- if member.type in self.handle_types:
+ if self.isHandleTypeObject(member.type):
return True
# recurse for member structs, guard against infinite recursion
elif member.type in struct_member_dict and member.type != struct_item:
@@ -610,7 +624,7 @@ class ObjectTrackerOutputGenerator(OutputGenerator):
struct_list = set()
for item in item_list:
paramtype = item.find('type')
- typecategory = self.type_categories[paramtype.text]
+ typecategory = self.getTypeCategory(paramtype.text)
if typecategory == 'struct':
if self.struct_contains_object(paramtype.text) == True:
struct_list.add(item)
@@ -624,7 +638,7 @@ class ObjectTrackerOutputGenerator(OutputGenerator):
else:
member_list = item_list
for item in member_list:
- if paramtype.text in self.handle_types:
+ if self.isHandleTypeObject(paramtype.text):
object_list.add(item)
return object_list
#
@@ -666,7 +680,7 @@ class ObjectTrackerOutputGenerator(OutputGenerator):
handle_type = params[-1].find('type')
is_create_pipelines = False
- if handle_type.text in self.handle_types:
+ if self.isHandleTypeObject(handle_type.text):
# Check for special case where multiple handles are returned
object_array = False
if cmd_info[-1].len is not None:
@@ -677,8 +691,6 @@ class ObjectTrackerOutputGenerator(OutputGenerator):
if 'CreateGraphicsPipelines' in proto.text or 'CreateComputePipelines' in proto.text or 'CreateRayTracingPipelines' in proto.text:
is_create_pipelines = True
create_obj_code += '%sif (VK_ERROR_VALIDATION_FAILED_EXT == result) return;\n' % indent
- create_obj_code += '%sif (%s) {\n' % (indent, handle_name.text)
- indent = self.incIndent(indent)
countispointer = ''
if 'uint32_t*' in cmd_info[-2].cdecl:
countispointer = '*'
@@ -693,10 +705,7 @@ class ObjectTrackerOutputGenerator(OutputGenerator):
if object_array == True:
indent = self.decIndent(indent)
create_obj_code += '%s}\n' % indent
- indent = self.decIndent(indent)
- create_obj_code += '%s}\n' % indent
indent = self.decIndent(indent)
-
return create_obj_code
#
# Generate source for destroying a non-dispatchable object
@@ -715,7 +724,7 @@ class ObjectTrackerOutputGenerator(OutputGenerator):
nullalloc_vuid_string = '%s-nullalloc' % cmd_info[param].name
compatalloc_vuid = self.manual_vuids.get(compatalloc_vuid_string, "kVUIDUndefined")
nullalloc_vuid = self.manual_vuids.get(nullalloc_vuid_string, "kVUIDUndefined")
- if cmd_info[param].type in self.handle_types:
+ if self.isHandleTypeObject(cmd_info[param].type) == True:
if object_array == True:
# This API is freeing an array of handles -- add loop control
validate_code += 'HEY, NEED TO DESTROY AN ARRAY\n'
@@ -738,16 +747,11 @@ class ObjectTrackerOutputGenerator(OutputGenerator):
if parent_vuid == 'kVUIDUndefined':
parent_vuid = self.GetVuid(parent_name, 'commonparent')
if obj_count is not None:
-
- pre_call_code += '%sif (%s%s) {\n' % (indent, prefix, obj_name)
- indent = self.incIndent(indent)
pre_call_code += '%sfor (uint32_t %s = 0; %s < %s; ++%s) {\n' % (indent, index, index, obj_count, index)
indent = self.incIndent(indent)
pre_call_code += '%sskip |= ValidateObject(%s, %s%s[%s], %s, %s, %s, %s);\n' % (indent, disp_name, prefix, obj_name, index, self.GetVulkanObjType(obj_type), null_allowed, param_vuid, parent_vuid)
indent = self.decIndent(indent)
pre_call_code += '%s}\n' % indent
- indent = self.decIndent(indent)
- pre_call_code += '%s}\n' % indent
else:
pre_call_code += '%sskip |= ValidateObject(%s, %s%s, %s, %s, %s, %s);\n' % (indent, disp_name, prefix, obj_name, self.GetVulkanObjType(obj_type), null_allowed, param_vuid, parent_vuid)
return pre_call_code
@@ -762,7 +766,7 @@ class ObjectTrackerOutputGenerator(OutputGenerator):
# Handle objects
if member.iscreate and first_level_param and member == members[-1]:
continue
- if member.type in self.handle_types:
+ if self.isHandleTypeObject(member.type) == True:
count_name = member.len
if (count_name is not None):
count_name = '%s%s' % (prefix, member.len)
@@ -792,7 +796,7 @@ class ObjectTrackerOutputGenerator(OutputGenerator):
pre_code += '%s}\n' % indent
indent = self.decIndent(indent)
pre_code += '%s}\n' % indent
- # Single Struct Pointer
+ # Single Struct
elif ispointer:
# Update struct prefix
new_prefix = '%s%s->' % (prefix, member.name)
@@ -804,13 +808,6 @@ class ObjectTrackerOutputGenerator(OutputGenerator):
pre_code += tmp_pre
indent = self.decIndent(indent)
pre_code += '%s}\n' % indent
- # Single Nested Struct
- else:
- # Update struct prefix
- new_prefix = '%s%s.' % (prefix, member.name)
- # Process sub-structs
- tmp_pre = self.validate_objects(struct_info, indent, new_prefix, array_index, disp_name, member.type, False)
- pre_code += tmp_pre
return pre_code
#
# For a particular API, generate the object handling code
@@ -845,6 +842,7 @@ class ObjectTrackerOutputGenerator(OutputGenerator):
#
# Capture command parameter info needed to create, destroy, and validate objects
def genCmd(self, cmdinfo, cmdname, alias):
+
# Add struct-member type information to command parameter information
OutputGenerator.genCmd(self, cmdinfo, cmdname, alias)
members = cmdinfo.elem.findall('.//param')
@@ -864,6 +862,7 @@ class ObjectTrackerOutputGenerator(OutputGenerator):
# Generate member info
membersInfo = []
+ constains_extension_structs = False
allocator = 'nullptr'
for member in members:
# Get type and name of member
@@ -877,7 +876,7 @@ class ObjectTrackerOutputGenerator(OutputGenerator):
isconst = True if 'const' in cdecl else False
# Mark param as local if it is an array of objects
islocal = False;
- if type in self.handle_types:
+ if self.isHandleTypeObject(type) == True:
if (length is not None) and (isconst == True):
islocal = True
# Or if it's a struct that contains an object
@@ -987,6 +986,7 @@ class ObjectTrackerOutputGenerator(OutputGenerator):
post_cr_func_decl = post_cr_func_decl.replace('{', '{\n if (result != VK_SUCCESS) return;')
self.appendSection('command', post_cr_func_decl)
+
self.appendSection('command', post_call_record)
self.appendSection('command', '}')
diff --git a/scripts/parameter_validation_generator.py b/scripts/parameter_validation_generator.py
index 8aa410f18..e8f6dcc15 100644
--- a/scripts/parameter_validation_generator.py
+++ b/scripts/parameter_validation_generator.py
@@ -64,7 +64,6 @@ from io import open
# separate line, align parameter names at the specified column
class ParameterValidationGeneratorOptions(GeneratorOptions):
def __init__(self,
- conventions = None,
filename = None,
directory = '.',
apiname = None,
@@ -85,7 +84,7 @@ class ParameterValidationGeneratorOptions(GeneratorOptions):
alignFuncParam = 0,
expandEnumerants = True,
valid_usage_path = ''):
- GeneratorOptions.__init__(self, conventions, filename, directory, apiname, profile,
+ GeneratorOptions.__init__(self, filename, directory, apiname, profile,
versions, emitversions, defaultExtensions,
addExtensions, removeExtensions, emitExtensions, sortProcedure)
self.prefixText = prefixText
@@ -140,9 +139,9 @@ class ParameterValidationOutputGenerator(OutputGenerator):
'vkCreateRenderPass2KHR',
'vkCreateBuffer',
'vkCreateImage',
+ 'vkCreateImageView',
'vkCreateGraphicsPipelines',
'vkCreateComputePipelines',
- "vkCreateRayTracingPipelinesNV",
'vkCreateSampler',
'vkCreateDescriptorSetLayout',
'vkFreeDescriptorSets',
@@ -156,9 +155,7 @@ class ParameterValidationOutputGenerator(OutputGenerator):
'vkCmdDraw',
'vkCmdDrawIndirect',
'vkCmdDrawIndexedIndirect',
- 'vkCmdClearAttachments',
'vkCmdCopyImage',
- 'vkCmdBindIndexBuffer',
'vkCmdBlitImage',
'vkCmdCopyBufferToImage',
'vkCmdCopyImageToBuffer',
@@ -177,11 +174,6 @@ class ParameterValidationOutputGenerator(OutputGenerator):
'vkCmdDrawMeshTasksIndirectNV',
'vkCmdDrawMeshTasksIndirectCountNV',
'vkAllocateMemory',
- 'vkCreateAccelerationStructureNV',
- 'vkGetAccelerationStructureHandleNV',
- 'vkCmdBuildAccelerationStructureNV',
- 'vkCreateFramebuffer',
- 'vkCmdSetLineStippleEXT',
]
# Commands to ignore
@@ -193,7 +185,6 @@ class ParameterValidationOutputGenerator(OutputGenerator):
'vkEnumerateInstanceExtensionProperties',
'vkEnumerateDeviceLayerProperties',
'vkEnumerateDeviceExtensionProperties',
- 'vkGetDeviceGroupSurfacePresentModes2EXT'
]
# Structure fields to ignore
@@ -239,9 +230,9 @@ class ParameterValidationOutputGenerator(OutputGenerator):
copyright = '/* *** THIS FILE IS GENERATED - DO NOT EDIT! ***\n'
copyright += ' * See parameter_validation_generator.py for modifications\n'
copyright += ' *\n'
- copyright += ' * Copyright (c) 2015-2019 The Khronos Group Inc.\n'
- copyright += ' * Copyright (c) 2015-2019 LunarG, Inc.\n'
- copyright += ' * Copyright (C) 2015-2019 Google Inc.\n'
+ copyright += ' * Copyright (c) 2015-2018 The Khronos Group Inc.\n'
+ copyright += ' * Copyright (c) 2015-2018 LunarG, Inc.\n'
+ copyright += ' * Copyright (C) 2015-2018 Google Inc.\n'
copyright += ' *\n'
copyright += ' * Licensed under the Apache License, Version 2.0 (the "License");\n'
copyright += ' * you may not use this file except in compliance with the License.\n'
@@ -343,7 +334,7 @@ class ParameterValidationOutputGenerator(OutputGenerator):
write(self.enumValueLists, file=self.outFile)
self.newline()
- pnext_handler = 'bool StatelessValidation::ValidatePnextStructContents(const char *api_name, const ParameterName &parameter_name, const VkBaseOutStructure* header) {\n'
+ pnext_handler = 'bool StatelessValidation::ValidatePnextStructContents(const char *api_name, const ParameterName &parameter_name, const GenericHeader* header) {\n'
pnext_handler += ' bool skip = false;\n'
pnext_handler += ' switch(header->sType) {\n'
@@ -367,10 +358,10 @@ class ParameterValidationOutputGenerator(OutputGenerator):
struct_validation_source = self.ScrubStructCode(expr)
pnext_case += '%s' % struct_validation_source
pnext_case += ' } break;\n'
- if protect:
+ if protect is not '':
pnext_case += '#endif // %s\n' % protect
# Skip functions containing no validation
- if struct_validation_source:
+ if struct_validation_source != '':
pnext_handler += pnext_case;
pnext_handler += ' default:\n'
pnext_handler += ' skip = false;\n'
@@ -601,18 +592,12 @@ class ParameterValidationOutputGenerator(OutputGenerator):
if isEnum:
self.enumRanges[groupName] = (expandPrefix + '_BEGIN_RANGE' + expandSuffix, expandPrefix + '_END_RANGE' + expandSuffix)
# Create definition for a list containing valid enum values for this enumerated type
- if self.featureExtraProtect is not None:
- enum_entry = '\n#ifdef %s\n' % self.featureExtraProtect
- else:
- enum_entry = ''
- enum_entry += 'const std::vector<%s> All%sEnums = {' % (groupName, groupName)
+ enum_entry = 'const std::vector<%s> All%sEnums = {' % (groupName, groupName)
for enum in groupElem:
name = enum.get('name')
if name is not None and enum.get('supported') != 'disabled':
enum_entry += '%s, ' % name
enum_entry += '};\n'
- if self.featureExtraProtect is not None:
- enum_entry += '#endif // %s\n' % self.featureExtraProtect
self.enumValueLists += enum_entry
#
# Capture command parameter info to be used for param check code generation.
@@ -929,9 +914,6 @@ class ParameterValidationOutputGenerator(OutputGenerator):
if lenValue:
count_required_vuid = self.GetVuid(vuid_tag_name, "%s-arraylength" % (lenValue.name))
array_required_vuid = self.GetVuid(vuid_tag_name, "%s-parameter" % (value.name))
- # TODO: Remove workaround for missing optional tag in vk.xml
- if array_required_vuid == '"VUID-VkFramebufferCreateInfo-pAttachments-parameter"':
- return []
# This is an array with a pointer to a count value
if lenValue.ispointer:
# If count and array parameters are optional, there will be no validation
@@ -962,6 +944,7 @@ class ParameterValidationOutputGenerator(OutputGenerator):
allocator_dict = {'pfnAllocation': '"VUID-VkAllocationCallbacks-pfnAllocation-00632"',
'pfnReallocation': '"VUID-VkAllocationCallbacks-pfnReallocation-00633"',
'pfnFree': '"VUID-VkAllocationCallbacks-pfnFree-00634"',
+ 'pfnInternalAllocation': '"VUID-VkAllocationCallbacks-pfnInternalAllocation-00635"'
}
vuid = allocator_dict.get(value.name)
if vuid is not None:
@@ -969,27 +952,7 @@ class ParameterValidationOutputGenerator(OutputGenerator):
checkExpr.append('skip |= validate_required_pointer("{}", {ppp}"{}"{pps}, reinterpret_cast<const void*>({}{}), {});\n'.format(funcPrintName, valuePrintName, prefix, value.name, ptr_required_vuid, **postProcSpec))
else:
checkExpr.append('skip |= validate_required_pointer("{}", {ppp}"{}"{pps}, {}{}, {});\n'.format(funcPrintName, valuePrintName, prefix, value.name, ptr_required_vuid, **postProcSpec))
- else:
- # Special case for optional internal allocation function pointers.
- if (value.type, value.name) == ('PFN_vkInternalAllocationNotification', 'pfnInternalAllocation'):
- checkExpr.extend(self.internalAllocationCheck(funcPrintName, prefix, value.name, 'pfnInternalFree', postProcSpec))
- elif (value.type, value.name) == ('PFN_vkInternalFreeNotification', 'pfnInternalFree'):
- checkExpr.extend(self.internalAllocationCheck(funcPrintName, prefix, value.name, 'pfnInternalAllocation', postProcSpec))
return checkExpr
-
- #
- # Generate internal allocation function pointer check.
- def internalAllocationCheck(self, funcPrintName, prefix, name, complementaryName, postProcSpec):
- checkExpr = []
- vuid = '"VUID-VkAllocationCallbacks-pfnInternalAllocation-00635"'
- checkExpr.append('if ({}{} != NULL)'.format(prefix, name))
- checkExpr.append('{')
- local_indent = self.incIndent('')
- # Function pointers need a reinterpret_cast to void*
- checkExpr.append(local_indent + 'skip |= validate_required_pointer("{}", {ppp}"{}{}"{pps}, reinterpret_cast<const void*>({}{}), {});\n'.format(funcPrintName, prefix, complementaryName, prefix, complementaryName, vuid, **postProcSpec))
- checkExpr.append('}\n')
- return checkExpr
-
#
# Process struct member validation code, performing name substitution if required
def processStructMemberCode(self, line, funcName, memberNamePrefix, memberDisplayNamePrefix, postProcSpec):
@@ -1152,7 +1115,7 @@ class ParameterValidationOutputGenerator(OutputGenerator):
# For the pointer to struct case, the struct pointer will not be validated, but any
# members not tagged as 'noautovalidity' will be validated
# We special-case the custom allocator checks, as they are explicit but can be auto-generated.
- AllocatorFunctions = ['PFN_vkAllocationFunction', 'PFN_vkReallocationFunction', 'PFN_vkFreeFunction', 'PFN_vkInternalAllocationNotification', 'PFN_vkInternalFreeNotification']
+ AllocatorFunctions = ['PFN_vkAllocationFunction', 'PFN_vkReallocationFunction', 'PFN_vkFreeFunction']
if value.noautovalidity and value.type not in AllocatorFunctions:
# Log a diagnostic message when validation cannot be automatically generated and must be implemented manually
self.logMsg('diag', 'ParameterValidation: No validation for {} {}'.format(structTypeName if structTypeName else funcName, value.name))
@@ -1199,26 +1162,25 @@ class ParameterValidationOutputGenerator(OutputGenerator):
elif value.type in self.handleTypes:
if not self.isHandleOptional(value, None):
usedLines.append('skip |= validate_required_handle("{}", {ppp}"{}"{pps}, {}{});\n'.format(funcName, valueDisplayName, valuePrefix, value.name, **postProcSpec))
- elif value.type in self.flags and value.type.replace('Flags', 'FlagBits') not in self.flagBits:
- vuid = self.GetVuid(vuid_name_tag, "%s-zerobitmask" % (value.name))
- usedLines.append('skip |= validate_reserved_flags("{}", {ppp}"{}"{pps}, {pf}{}, {});\n'.format(funcName, valueDisplayName, value.name, vuid, pf=valuePrefix, **postProcSpec))
- elif value.type in self.flags or value.type in self.flagBits:
- if value.type in self.flags:
- flagBitsName = value.type.replace('Flags', 'FlagBits')
- flagsType = 'kOptionalFlags' if value.isoptional else 'kRequiredFlags'
- invalidVuid = self.GetVuid(vuid_name_tag, "%s-parameter" % (value.name))
- zeroVuid = self.GetVuid(vuid_name_tag, "%s-requiredbitmask" % (value.name))
- elif value.type in self.flagBits:
- flagBitsName = value.type
- flagsType = 'kOptionalSingleBit' if value.isoptional else 'kRequiredSingleBit'
- invalidVuid = self.GetVuid(vuid_name_tag, "%s-parameter" % (value.name))
- zeroVuid = invalidVuid
- allFlagsName = 'All' + flagBitsName
-
- invalid_vuid = self.GetVuid(vuid_name_tag, "%s-parameter" % (value.name))
- allFlagsName = 'All' + flagBitsName
- zeroVuidArg = '' if value.isoptional else ', ' + zeroVuid
- usedLines.append('skip |= validate_flags("{}", {ppp}"{}"{pps}, "{}", {}, {pf}{}, {}, {}{});\n'.format(funcName, valueDisplayName, flagBitsName, allFlagsName, value.name, flagsType, invalidVuid, zeroVuidArg, pf=valuePrefix, **postProcSpec))
+ elif value.type in self.flags:
+ flagBitsName = value.type.replace('Flags', 'FlagBits')
+ if not flagBitsName in self.flagBits:
+ vuid = self.GetVuid(vuid_name_tag, "%s-zerobitmask" % (value.name))
+ usedLines.append('skip |= validate_reserved_flags("{}", {ppp}"{}"{pps}, {pf}{}, {});\n'.format(funcName, valueDisplayName, value.name, vuid, pf=valuePrefix, **postProcSpec))
+ else:
+ if value.isoptional:
+ flagsRequired = 'false'
+ vuid = self.GetVuid(vuid_name_tag, "%s-parameter" % (value.name))
+ else:
+ flagsRequired = 'true'
+ vuid = self.GetVuid(vuid_name_tag, "%s-requiredbitmask" % (value.name))
+ allFlagsName = 'All' + flagBitsName
+ usedLines.append('skip |= validate_flags("{}", {ppp}"{}"{pps}, "{}", {}, {pf}{}, {}, false, {});\n'.format(funcName, valueDisplayName, flagBitsName, allFlagsName, value.name, flagsRequired, vuid, pf=valuePrefix, **postProcSpec))
+ elif value.type in self.flagBits:
+ flagsRequired = 'false' if value.isoptional else 'true'
+ allFlagsName = 'All' + value.type
+ vuid = self.GetVuid(vuid_name_tag, "%s-parameter" % (value.name))
+ usedLines.append('skip |= validate_flags("{}", {ppp}"{}"{pps}, "{}", {}, {pf}{}, {}, true, {});\n'.format(funcName, valueDisplayName, value.type, allFlagsName, value.name, flagsRequired, vuid, pf=valuePrefix, **postProcSpec))
elif value.isbool:
usedLines.append('skip |= validate_bool32("{}", {ppp}"{}"{pps}, {}{});\n'.format(funcName, valueDisplayName, valuePrefix, value.name, **postProcSpec))
elif value.israngedenum:
@@ -1284,12 +1246,6 @@ class ParameterValidationOutputGenerator(OutputGenerator):
func_sig = func_sig.split('VKAPI_CALL vk')[1]
cmdDef = 'bool StatelessValidation::PreCallValidate' + func_sig
cmdDef += '%sbool skip = false;\n' % indent
- for line in lines:
- if type(line) is list:
- for sub in line:
- cmdDef += indent + sub
- else:
- cmdDef += indent + line
# Insert call to custom-written function if present
if command.name in self.functions_with_manual_checks:
# Generate parameter list for manual fcn and down-chain calls
@@ -1297,7 +1253,13 @@ class ParameterValidationOutputGenerator(OutputGenerator):
for param in command.params:
params_text += '%s, ' % param.name
params_text = params_text[:-2] + ');\n'
- cmdDef += ' if (!skip) skip |= manual_PreCallValidate'+ command.name[2:] + '(' + params_text
+ cmdDef += ' skip |= manual_PreCallValidate'+ command.name[2:] + '(' + params_text
+ for line in lines:
+ if type(line) is list:
+ for sub in line:
+ cmdDef += indent + sub
+ else:
+ cmdDef += indent + line
cmdDef += '%sreturn skip;\n' % indent
cmdDef += '}\n'
self.validation.append(cmdDef)
diff --git a/scripts/thread_safety_generator.py b/scripts/thread_safety_generator.py
index da0a1f906..76196b158 100644
--- a/scripts/thread_safety_generator.py
+++ b/scripts/thread_safety_generator.py
@@ -59,7 +59,6 @@ from common_codegen import *
# separate line, align parameter names at the specified column
class ThreadGeneratorOptions(GeneratorOptions):
def __init__(self,
- conventions = None,
filename = None,
directory = '.',
apiname = None,
@@ -82,7 +81,7 @@ class ThreadGeneratorOptions(GeneratorOptions):
indentFuncPointer = False,
alignFuncParam = 0,
expandEnumerants = True):
- GeneratorOptions.__init__(self, conventions, filename, directory, apiname, profile,
+ GeneratorOptions.__init__(self, filename, directory, apiname, profile,
versions, emitversions, defaultExtensions,
addExtensions, removeExtensions, emitExtensions, sortProcedure)
self.prefixText = prefixText
@@ -119,7 +118,7 @@ class ThreadOutputGenerator(OutputGenerator):
inline_copyright_message = """
// This file is ***GENERATED***. Do Not Edit.
-// See thread_safety_generator.py for modifications.
+// See layer_chassis_dispatch_generator.py for modifications.
/* Copyright (c) 2015-2019 The Khronos Group Inc.
* Copyright (c) 2015-2019 Valve Corporation
@@ -149,8 +148,7 @@ class ThreadOutputGenerator(OutputGenerator):
inline_custom_header_preamble = """
#pragma once
-#include <chrono>
-#include <thread>
+#include <condition_variable>
#include <mutex>
#include <vector>
#include <unordered_set>
@@ -243,53 +241,32 @@ public:
}
};
-#define THREAD_SAFETY_BUCKETS_LOG2 6
-#define THREAD_SAFETY_BUCKETS (1 << THREAD_SAFETY_BUCKETS_LOG2)
-
-template <typename T> inline uint32_t ThreadSafetyHashObject(T object)
-{
- uint64_t u64 = (uint64_t)(uintptr_t)object;
- uint32_t hash = (uint32_t)(u64 >> 32) + (uint32_t)u64;
- hash ^= (hash >> THREAD_SAFETY_BUCKETS_LOG2) ^ (hash >> (2*THREAD_SAFETY_BUCKETS_LOG2));
- hash &= (THREAD_SAFETY_BUCKETS-1);
- return hash;
-}
-
template <typename T>
class counter {
public:
const char *typeName;
VkDebugReportObjectTypeEXT objectType;
debug_report_data **report_data;
+ small_unordered_map<T, object_use_data> uses;
+ std::mutex counter_lock;
+ std::condition_variable counter_condition;
- // Per-bucket locking, to reduce contention.
- struct CounterBucket {
- small_unordered_map<T, object_use_data> uses;
- std::mutex counter_lock;
- };
-
- CounterBucket buckets[THREAD_SAFETY_BUCKETS];
- CounterBucket &GetBucket(T object)
- {
- return buckets[ThreadSafetyHashObject(object)];
- }
void StartWrite(T object) {
if (object == VK_NULL_HANDLE) {
return;
}
- auto &bucket = GetBucket(object);
bool skip = false;
loader_platform_thread_id tid = loader_platform_get_thread_id();
- std::unique_lock<std::mutex> lock(bucket.counter_lock);
- if (!bucket.uses.contains(object)) {
+ std::unique_lock<std::mutex> lock(counter_lock);
+ if (!uses.contains(object)) {
// There is no current use of the object. Record writer thread.
- struct object_use_data *use_data = &bucket.uses[object];
+ struct object_use_data *use_data = &uses[object];
use_data->reader_count = 0;
use_data->writer_count = 1;
use_data->thread = tid;
} else {
- struct object_use_data *use_data = &bucket.uses[object];
+ struct object_use_data *use_data = &uses[object];
if (use_data->reader_count == 0) {
// There are no readers. Two writers just collided.
if (use_data->thread != tid) {
@@ -299,9 +276,12 @@ public:
"thread 0x%" PRIx64 " and thread 0x%" PRIx64,
typeName, (uint64_t)use_data->thread, (uint64_t)tid);
if (skip) {
- WaitForObjectIdle(bucket, object, lock);
+ // Wait for thread-safe access to object instead of skipping call.
+ while (uses.contains(object)) {
+ counter_condition.wait(lock);
+ }
// There is now no current use of the object. Record writer thread.
- struct object_use_data *new_use_data = &bucket.uses[object];
+ struct object_use_data *new_use_data = &uses[object];
new_use_data->thread = tid;
new_use_data->reader_count = 0;
new_use_data->writer_count = 1;
@@ -324,9 +304,12 @@ public:
"thread 0x%" PRIx64 " and thread 0x%" PRIx64,
typeName, (uint64_t)use_data->thread, (uint64_t)tid);
if (skip) {
- WaitForObjectIdle(bucket, object, lock);
+ // Wait for thread-safe access to object instead of skipping call.
+ while (uses.contains(object)) {
+ counter_condition.wait(lock);
+ }
// There is now no current use of the object. Record writer thread.
- struct object_use_data *new_use_data = &bucket.uses[object];
+ struct object_use_data *new_use_data = &uses[object];
new_use_data->thread = tid;
new_use_data->reader_count = 0;
new_use_data->writer_count = 1;
@@ -348,81 +331,73 @@ public:
if (object == VK_NULL_HANDLE) {
return;
}
- auto &bucket = GetBucket(object);
// Object is no longer in use
- std::unique_lock<std::mutex> lock(bucket.counter_lock);
- struct object_use_data *use_data = &bucket.uses[object];
- use_data->writer_count -= 1;
- if ((use_data->reader_count == 0) && (use_data->writer_count == 0)) {
- bucket.uses.erase(object);
+ std::unique_lock<std::mutex> lock(counter_lock);
+ uses[object].writer_count -= 1;
+ if ((uses[object].reader_count == 0) && (uses[object].writer_count == 0)) {
+ uses.erase(object);
}
+ // Notify any waiting threads that this object may be safe to use
+ lock.unlock();
+ counter_condition.notify_all();
}
void StartRead(T object) {
if (object == VK_NULL_HANDLE) {
return;
}
- auto &bucket = GetBucket(object);
bool skip = false;
loader_platform_thread_id tid = loader_platform_get_thread_id();
- std::unique_lock<std::mutex> lock(bucket.counter_lock);
- if (!bucket.uses.contains(object)) {
+ std::unique_lock<std::mutex> lock(counter_lock);
+ if (!uses.contains(object)) {
// There is no current use of the object. Record reader count
- struct object_use_data *use_data = &bucket.uses[object];
+ struct object_use_data *use_data = &uses[object];
use_data->reader_count = 1;
use_data->writer_count = 0;
use_data->thread = tid;
- } else if (bucket.uses[object].writer_count > 0 && bucket.uses[object].thread != tid) {
+ } else if (uses[object].writer_count > 0 && uses[object].thread != tid) {
// There is a writer of the object.
- skip |= log_msg(*report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, objectType, (uint64_t)(object),
- kVUID_Threading_MultipleThreads,
+ skip |= false;
+ log_msg(*report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, objectType, (uint64_t)(object), kVUID_Threading_MultipleThreads,
"THREADING ERROR : object of type %s is simultaneously used in "
"thread 0x%" PRIx64 " and thread 0x%" PRIx64,
- typeName, (uint64_t)bucket.uses[object].thread, (uint64_t)tid);
+ typeName, (uint64_t)uses[object].thread, (uint64_t)tid);
if (skip) {
- WaitForObjectIdle(bucket, object, lock);
+ // Wait for thread-safe access to object instead of skipping call.
+ while (uses.contains(object)) {
+ counter_condition.wait(lock);
+ }
// There is no current use of the object. Record reader count
- struct object_use_data *use_data = &bucket.uses[object];
+ struct object_use_data *use_data = &uses[object];
use_data->reader_count = 1;
use_data->writer_count = 0;
use_data->thread = tid;
} else {
- bucket.uses[object].reader_count += 1;
+ uses[object].reader_count += 1;
}
} else {
// There are other readers of the object. Increase reader count
- bucket.uses[object].reader_count += 1;
+ uses[object].reader_count += 1;
}
}
void FinishRead(T object) {
if (object == VK_NULL_HANDLE) {
return;
}
- auto &bucket = GetBucket(object);
- std::unique_lock<std::mutex> lock(bucket.counter_lock);
- struct object_use_data *use_data = &bucket.uses[object];
- use_data->reader_count -= 1;
- if ((use_data->reader_count == 0) && (use_data->writer_count == 0)) {
- bucket.uses.erase(object);
+ std::unique_lock<std::mutex> lock(counter_lock);
+ uses[object].reader_count -= 1;
+ if ((uses[object].reader_count == 0) && (uses[object].writer_count == 0)) {
+ uses.erase(object);
}
+ // Notify any waiting threads that this object may be safe to use
+ lock.unlock();
+ counter_condition.notify_all();
}
counter(const char *name = "", VkDebugReportObjectTypeEXT type = VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, debug_report_data **rep_data = nullptr) {
typeName = name;
objectType = type;
report_data = rep_data;
}
-
-private:
- void WaitForObjectIdle(CounterBucket &bucket, T object, std::unique_lock<std::mutex> &lock) {
- // Wait for thread-safe access to object instead of skipping call.
- // Don't use condition_variable to wait because it should be extremely
- // rare to have collisions, but signaling would be very frequent.
- while (bucket.uses.contains(object)) {
- lock.unlock();
- std::this_thread::sleep_for(std::chrono::microseconds(1));
- lock.lock();
- }
- }
};
@@ -436,17 +411,8 @@ public:
return std::unique_lock<std::mutex>(validation_object_mutex, std::defer_lock);
}
- // Per-bucket locking, to reduce contention.
- struct CommandBufferBucket {
- std::mutex command_pool_lock;
- small_unordered_map<VkCommandBuffer, VkCommandPool> command_pool_map;
- };
-
- CommandBufferBucket buckets[THREAD_SAFETY_BUCKETS];
- CommandBufferBucket &GetBucket(VkCommandBuffer object)
- {
- return buckets[ThreadSafetyHashObject(object)];
- }
+ std::mutex command_pool_lock;
+ std::unordered_map<VkCommandBuffer, VkCommandPool> command_pool_map;
counter<VkCommandBuffer> c_VkCommandBuffer;
counter<VkDevice> c_VkDevice;
@@ -508,9 +474,8 @@ WRAPPER(uint64_t)
// VkCommandBuffer needs check for implicit use of command pool
void StartWriteObject(VkCommandBuffer object, bool lockPool = true) {
if (lockPool) {
- auto &bucket = GetBucket(object);
- std::unique_lock<std::mutex> lock(bucket.command_pool_lock);
- VkCommandPool pool = bucket.command_pool_map[object];
+ std::unique_lock<std::mutex> lock(command_pool_lock);
+ VkCommandPool pool = command_pool_map[object];
lock.unlock();
StartWriteObject(pool);
}
@@ -519,17 +484,15 @@ WRAPPER(uint64_t)
void FinishWriteObject(VkCommandBuffer object, bool lockPool = true) {
c_VkCommandBuffer.FinishWrite(object);
if (lockPool) {
- auto &bucket = GetBucket(object);
- std::unique_lock<std::mutex> lock(bucket.command_pool_lock);
- VkCommandPool pool = bucket.command_pool_map[object];
+ std::unique_lock<std::mutex> lock(command_pool_lock);
+ VkCommandPool pool = command_pool_map[object];
lock.unlock();
FinishWriteObject(pool);
}
}
void StartReadObject(VkCommandBuffer object) {
- auto &bucket = GetBucket(object);
- std::unique_lock<std::mutex> lock(bucket.command_pool_lock);
- VkCommandPool pool = bucket.command_pool_map[object];
+ std::unique_lock<std::mutex> lock(command_pool_lock);
+ VkCommandPool pool = command_pool_map[object];
lock.unlock();
// We set up a read guard against the "Contents" counter to catch conflict vs. vkResetCommandPool and vkDestroyCommandPool
// while *not* establishing a read guard against the command pool counter itself to avoid false postives for
@@ -538,10 +501,9 @@ WRAPPER(uint64_t)
c_VkCommandBuffer.StartRead(object);
}
void FinishReadObject(VkCommandBuffer object) {
- auto &bucket = GetBucket(object);
c_VkCommandBuffer.FinishRead(object);
- std::unique_lock<std::mutex> lock(bucket.command_pool_lock);
- VkCommandPool pool = bucket.command_pool_map[object];
+ std::unique_lock<std::mutex> lock(command_pool_lock);
+ VkCommandPool pool = command_pool_map[object];
lock.unlock();
c_VkCommandPoolContents.FinishRead(pool);
} """
@@ -560,12 +522,9 @@ void ThreadSafety::PostCallRecordAllocateCommandBuffers(VkDevice device, const V
FinishWriteObject(pAllocateInfo->commandPool);
// Record mapping from command buffer to command pool
- if(pCommandBuffers) {
- for (uint32_t index = 0; index < pAllocateInfo->commandBufferCount; index++) {
- auto &bucket = GetBucket(pCommandBuffers[index]);
- std::lock_guard<std::mutex> lock(bucket.command_pool_lock);
- bucket.command_pool_map[pCommandBuffers[index]] = pAllocateInfo->commandPool;
- }
+ for (uint32_t index = 0; index < pAllocateInfo->commandBufferCount; index++) {
+ std::lock_guard<std::mutex> lock(command_pool_lock);
+ command_pool_map[pCommandBuffers[index]] = pAllocateInfo->commandPool;
}
}
@@ -588,23 +547,15 @@ void ThreadSafety::PreCallRecordFreeCommandBuffers(VkDevice device, VkCommandPoo
const bool lockCommandPool = false; // pool is already directly locked
StartReadObject(device);
StartWriteObject(commandPool);
- if(pCommandBuffers) {
- // Even though we're immediately "finishing" below, we still are testing for concurrency with any call in process
- // so this isn't a no-op
- for (uint32_t index = 0; index < commandBufferCount; index++) {
- StartWriteObject(pCommandBuffers[index], lockCommandPool);
- }
- // The driver may immediately reuse command buffers in another thread.
- // These updates need to be done before calling down to the driver.
- for (uint32_t index = 0; index < commandBufferCount; index++) {
- FinishWriteObject(pCommandBuffers[index], lockCommandPool);
- }
- // Holding the lock for the shortest time while we update the map
- for (uint32_t index = 0; index < commandBufferCount; index++) {
- auto &bucket = GetBucket(pCommandBuffers[index]);
- std::lock_guard<std::mutex> lock(bucket.command_pool_lock);
- bucket.command_pool_map.erase(pCommandBuffers[index]);
- }
+ for (uint32_t index = 0; index < commandBufferCount; index++) {
+ StartWriteObject(pCommandBuffers[index], lockCommandPool);
+ }
+ // The driver may immediately reuse command buffers in another thread.
+ // These updates need to be done before calling down to the driver.
+ for (uint32_t index = 0; index < commandBufferCount; index++) {
+ FinishWriteObject(pCommandBuffers[index], lockCommandPool);
+ std::lock_guard<std::mutex> lock(command_pool_lock);
+ command_pool_map.erase(pCommandBuffers[index]);
}
}
@@ -716,10 +667,26 @@ void ThreadSafety::PostCallRecordGetSwapchainImagesKHR(VkDevice device, VkSwapch
def paramIsPointer(self, param):
ispointer = False
for elem in param:
- if elem.tag == 'type' and elem.tail is not None and '*' in elem.tail:
+ if ((elem.tag is not 'type') and (elem.tail is not None)) and '*' in elem.tail:
ispointer = True
return ispointer
+ # Check if an object is a non-dispatchable handle
+ def isHandleTypeNonDispatchable(self, handletype):
+ handle = self.registry.tree.find("types/type/[name='" + handletype + "'][@category='handle']")
+ if handle is not None and handle.find('type').text == 'VK_DEFINE_NON_DISPATCHABLE_HANDLE':
+ return True
+ else:
+ return False
+
+ # Check if an object is a dispatchable handle
+ def isHandleTypeDispatchable(self, handletype):
+ handle = self.registry.tree.find("types/type/[name='" + handletype + "'][@category='handle']")
+ if handle is not None and handle.find('type').text == 'VK_DEFINE_HANDLE':
+ return True
+ else:
+ return False
+
def makeThreadUseBlock(self, cmd, functionprefix):
"""Generate C function pointer typedef for <command> Element"""
paramdecl = ''
@@ -733,19 +700,16 @@ void ThreadSafety::PostCallRecordGetSwapchainImagesKHR(VkDevice device, VkSwapch
externsync = param.attrib.get('externsync')
if externsync == 'true':
if self.paramIsArray(param):
- paramdecl += 'if (' + paramname.text + ') {\n'
- paramdecl += ' for (uint32_t index=0; index < ' + param.attrib.get('len') + '; index++) {\n'
- paramdecl += ' ' + functionprefix + 'WriteObject(' + paramname.text + '[index]);\n'
- paramdecl += ' }\n'
+ paramdecl += 'for (uint32_t index=0;index<' + param.attrib.get('len') + ';index++) {\n'
+ paramdecl += ' ' + functionprefix + 'WriteObject(' + paramname.text + '[index]);\n'
paramdecl += '}\n'
else:
paramdecl += functionprefix + 'WriteObject(' + paramname.text + ');\n'
elif (param.attrib.get('externsync')):
if self.paramIsArray(param):
# Externsync can list pointers to arrays of members to synchronize
- paramdecl += 'if (' + paramname.text + ') {\n'
- paramdecl += ' for (uint32_t index=0; index < ' + param.attrib.get('len') + '; index++) {\n'
- second_indent = ' '
+ paramdecl += 'for (uint32_t index=0;index<' + param.attrib.get('len') + ';index++) {\n'
+ second_indent = ''
for member in externsync.split(","):
# Replace first empty [] in member name with index
element = member.replace('[]','[index]',1)
@@ -753,22 +717,21 @@ void ThreadSafety::PostCallRecordGetSwapchainImagesKHR(VkDevice device, VkSwapch
# TODO: These null checks can be removed if threading ends up behind parameter
# validation in layer order
element_ptr = element.split('[]')[0]
- paramdecl += ' if (' + element_ptr + ') {\n'
+ paramdecl += ' if (' + element_ptr + ') {\n'
# Replace any second empty [] in element name with inner array index based on mapping array
# names like "pSomeThings[]" to "someThingCount" array size. This could be more robust by
# mapping a param member name to a struct type and "len" attribute.
limit = element[0:element.find('s[]')] + 'Count'
dotp = limit.rfind('.p')
limit = limit[0:dotp+1] + limit[dotp+2:dotp+3].lower() + limit[dotp+3:]
- paramdecl += ' for (uint32_t index2=0; index2 < '+limit+'; index2++) {\n'
+ paramdecl += ' for(uint32_t index2=0;index2<'+limit+';index2++) {\n'
element = element.replace('[]','[index2]')
- second_indent = ' '
+ second_indent = ' '
paramdecl += ' ' + second_indent + functionprefix + 'WriteObject(' + element + ');\n'
- paramdecl += ' }\n'
paramdecl += ' }\n'
+ paramdecl += ' }\n'
else:
paramdecl += ' ' + second_indent + functionprefix + 'WriteObject(' + element + ');\n'
- paramdecl += ' }\n'
paramdecl += '}\n'
else:
# externsync can list members to synchronize
@@ -782,7 +745,7 @@ void ThreadSafety::PostCallRecordGetSwapchainImagesKHR(VkDevice device, VkSwapch
paramtype = paramtype.text
else:
paramtype = 'None'
- if paramtype in self.handle_types and paramtype != 'VkPhysicalDevice':
+ if (self.isHandleTypeDispatchable(paramtype) or self.isHandleTypeNonDispatchable(paramtype)) and paramtype != 'VkPhysicalDevice':
if self.paramIsArray(param) and ('pPipelines' != paramname.text):
# Add pointer dereference for array counts that are pointer values
dereference = ''
@@ -791,10 +754,8 @@ void ThreadSafety::PostCallRecordGetSwapchainImagesKHR(VkDevice device, VkSwapch
if self.paramIsPointer(candidate):
dereference = '*'
param_len = str(param.attrib.get('len')).replace("::", "->")
- paramdecl += 'if (' + paramname.text + ') {\n'
- paramdecl += ' for (uint32_t index = 0; index < ' + dereference + param_len + '; index++) {\n'
- paramdecl += ' ' + functionprefix + 'ReadObject(' + paramname.text + '[index]);\n'
- paramdecl += ' }\n'
+ paramdecl += 'for (uint32_t index = 0; index < ' + dereference + param_len + '; index++) {\n'
+ paramdecl += ' ' + functionprefix + 'ReadObject(' + paramname.text + '[index]);\n'
paramdecl += '}\n'
elif not self.paramIsPointer(param):
# Pointer params are often being created.
@@ -831,10 +792,7 @@ void ThreadSafety::PostCallRecordGetSwapchainImagesKHR(VkDevice device, VkSwapch
return paramdecl
def beginFile(self, genOpts):
OutputGenerator.beginFile(self, genOpts)
-
- # Initialize members that require the tree
- self.handle_types = GetHandleTypes(self.registry.tree)
-
+ #
# TODO: LUGMAL -- remove this and add our copyright
# User-supplied prefix text, if any (list of strings)
write(self.inline_copyright_message, file=self.outFile)
@@ -860,7 +818,7 @@ void ThreadSafety::PostCallRecordGetSwapchainImagesKHR(VkDevice device, VkSwapch
counter_class_instances = ''
counter_class_bodies = ''
- for obj in sorted(self.non_dispatchable_types):
+ for obj in self.non_dispatchable_types:
counter_class_defs += ' counter<%s> c_%s;\n' % (obj, obj)
if obj in self.object_to_debug_report_type:
obj_type = self.object_to_debug_report_type[obj]
@@ -908,8 +866,11 @@ void ThreadSafety::PostCallRecordGetSwapchainImagesKHR(VkDevice device, VkSwapch
# Type generation
def genType(self, typeinfo, name, alias):
OutputGenerator.genType(self, typeinfo, name, alias)
- if self.handle_types.IsNonDispatchable(name):
- self.non_dispatchable_types.add(name)
+ type_elem = typeinfo.elem
+ category = type_elem.get('category')
+ if category == 'handle':
+ if self.isHandleTypeNonDispatchable(name):
+ self.non_dispatchable_types.add(name)
#
# Struct (e.g. C "struct" type) generation.
# This is a special case of the <type> tag where the contents are
diff --git a/scripts/update_deps.py b/scripts/update_deps.py
index f1fe36dd9..5d00eb5bd 100755
--- a/scripts/update_deps.py
+++ b/scripts/update_deps.py
@@ -343,7 +343,7 @@ class GoodRepo(object):
def Checkout(self):
print('Checking out {n} in {d}'.format(n=self.name, d=self.repo_dir))
if self._args.do_clean_repo:
- shutil.rmtree(self.repo_dir, ignore_errors=True)
+ shutil.rmtree(self.repo_dir)
if not os.path.exists(os.path.join(self.repo_dir, '.git')):
self.Clone()
self.Fetch()
@@ -415,12 +415,6 @@ class GoodRepo(object):
cmake_cmd.append('-A')
cmake_cmd.append('x64')
- # Apply a generator, if one is specified. This can be used to supply
- # a specific generator for the dependent repositories to match
- # that of the main repository.
- if self._args.generator is not None:
- cmake_cmd.extend(['-G', self._args.generator])
-
if VERBOSE:
print("CMake command: " + " ".join(cmake_cmd))
@@ -441,15 +435,8 @@ class GoodRepo(object):
# Speed up the build.
if platform.system() == 'Linux' or platform.system() == 'Darwin':
cmake_cmd.append('--')
- num_make_jobs = multiprocessing.cpu_count()
- env_make_jobs = os.environ.get('MAKE_JOBS', None)
- if env_make_jobs is not None:
- try:
- num_make_jobs = min(num_make_jobs, int(env_make_jobs))
- except ValueError:
- print('warning: environment variable MAKE_JOBS has non-numeric value "{}". '
- 'Using {} (CPU count) instead.'.format(env_make_jobs, num_make_jobs))
- cmake_cmd.append('-j{}'.format(num_make_jobs))
+ cmake_cmd.append('-j{ncpu}'
+ .format(ncpu=multiprocessing.cpu_count()))
if platform.system() == 'Windows':
cmake_cmd.append('--')
cmake_cmd.append('/maxcpucount')
@@ -607,11 +594,6 @@ def main():
type=str.lower,
help="Set build files configuration",
default='debug')
- parser.add_argument(
- '--generator',
- dest='generator',
- help="Set the CMake generator",
- default=None)
args = parser.parse_args()
save_cwd = os.getcwd()
@@ -646,7 +628,7 @@ def main():
'build_platforms',
'repo_dir',
'on_build_platform')
- repo_dict[repo.name] = {field: getattr(repo, field) for field in field_list}
+ repo_dict[repo.name] = {field: getattr(repo, field) for field in field_list};
# If the repo has a CI whitelist, skip the repo unless
# one of the CI's environment variable is set to true.
diff --git a/scripts/vk_validation_stats.py b/scripts/vk_validation_stats.py
index 8f6c87fcc..795a0f396 100755
--- a/scripts/vk_validation_stats.py
+++ b/scripts/vk_validation_stats.py
@@ -21,16 +21,14 @@
# Author: Shannon McPherson <shannon@lunarg.com>
import argparse
-import common_codegen
-import csv
-import glob
-import html
-import json
-import operator
import os
+import sys
+import operator
import platform
+import json
import re
-import sys
+import csv
+import html
import time
from collections import defaultdict
@@ -41,7 +39,8 @@ html_db = False
txt_filename = "validation_error_database.txt"
csv_filename = "validation_error_database.csv"
html_filename = "validation_error_database.html"
-header_filename = "vk_validation_error_messages.h"
+header_filename = "../layers/vk_validation_error_messages.h"
+test_file = '../tests/layer_validation_tests.cpp'
vuid_prefixes = ['VUID-', 'UNASSIGNED-']
# Hard-coded flags that could be command line args, if we decide that's useful
@@ -49,20 +48,25 @@ vuid_prefixes = ['VUID-', 'UNASSIGNED-']
dealias_khr = True
ignore_unassigned = True # These are not found in layer code unless they appear explicitly (most don't), so produce false positives
-layer_source_files = [common_codegen.repo_relative(path) for path in [
- 'layers/buffer_validation.cpp',
- 'layers/core_validation.cpp',
- 'layers/descriptor_sets.cpp',
- 'layers/drawdispatch.cpp',
- 'layers/parameter_validation_utils.cpp',
- 'layers/object_tracker_utils.cpp',
- 'layers/shader_validation.cpp',
- 'layers/stateless_validation.h',
- 'layers/generated/parameter_validation.cpp',
- 'layers/generated/object_tracker.cpp',
-]]
-
-test_source_files = glob.glob(os.path.join(common_codegen.repo_relative('tests'), '*.cpp'))
+generated_layer_source_directories = [
+'build',
+'dbuild',
+'release',
+'../build/Vulkan-ValidationLayers/'
+]
+generated_layer_source_files = [
+'parameter_validation.cpp',
+'object_tracker.cpp',
+]
+layer_source_files = [
+'../layers/buffer_validation.cpp',
+'../layers/core_validation.cpp',
+'../layers/descriptor_sets.cpp',
+'../layers/parameter_validation_utils.cpp',
+'../layers/object_tracker_utils.cpp',
+'../layers/shader_validation.cpp',
+'../layers/stateless_validation.h'
+]
# This needs to be updated as new extensions roll in
khr_aliases = {
@@ -144,7 +148,6 @@ def printHelp():
print (" [ -csv [ <csv_out_filename>] ]")
print (" [ -html [ <html_out_filename>] ]")
print (" [ -export_header ]")
- print (" [ -summary ]")
print (" [ -verbose ]")
print (" [ -help ]")
print ("\n The vk_validation_stats script parses validation layer source files to")
@@ -163,7 +166,6 @@ def printHelp():
print (" -html [filename] output the error database in html to <html_database_filename>,")
print (" defaults to 'validation_error_database.html'")
print (" -export_header export a new VUID error text header file to <%s>" % header_filename)
- print (" -summary output summary of VUID coverage")
print (" -verbose show your work (to stdout)")
class ValidationJSON:
@@ -234,8 +236,10 @@ class ValidationJSON:
class ValidationSource:
- def __init__(self, source_file_list):
+ def __init__(self, source_file_list, generated_source_file_list, generated_source_directories):
self.source_files = source_file_list
+ self.generated_source_files = generated_source_file_list
+ self.generated_source_dirs = generated_source_directories
self.vuid_count_dict = {} # dict of vuid values to the count of how much they're used, and location of where they're used
self.duplicated_checks = 0
self.explicit_vuids = set()
@@ -243,6 +247,22 @@ class ValidationSource:
self.unassigned_vuids = set()
self.all_vuids = set()
+ if len(self.generated_source_files) > 0:
+ qualified_paths = []
+ for source in self.generated_source_files:
+ for build_dir in self.generated_source_dirs:
+ filepath = '../%s/layers/%s' % (build_dir, source)
+ if os.path.isfile(filepath):
+ qualified_paths.append(filepath)
+ break
+ if len(self.generated_source_files) != len(qualified_paths):
+ print("Error: Unable to locate one or more of the following source files in the %s directories" % (", ".join(generated_source_directories)))
+ print(self.generated_source_files)
+ print("Failed to locate one or more codegen files in layer source code - cannot proceed.")
+ exit(1)
+ else:
+ self.source_files.extend(qualified_paths)
+
def parse(self):
prepend = None
for sf in self.source_files:
@@ -257,8 +277,6 @@ class ValidationSource:
line = prepend[:-2] + line.lstrip().lstrip('"') # join lines skipping CR, whitespace and trailing/leading quote char
prepend = None
if any(prefix in line for prefix in vuid_prefixes):
- # Replace the '(' of lines containing validation helper functions with ' ' to make them easier to parse
- line = line.replace("(", " ")
line_list = line.split()
# A VUID string that has been broken by clang will start with a vuid prefix and end with -, and will be last in the list
@@ -457,6 +475,7 @@ class OutputDatabase:
self.vt = val_tests
self.header_version = "/* THIS FILE IS GENERATED - DO NOT EDIT (scripts/vk_validation_stats.py) */"
self.header_version += "\n/* Vulkan specification version: %s */" % val_json.apiversion
+ self.header_version += "\n/* Header generated: %s */\n" % time.strftime('%Y-%m-%d %H:%M:%S')
self.header_preamble = """
/*
* Vulkan
@@ -573,55 +592,21 @@ static const vuid_spec_text_pair vuid_spec_text[] = {
hfile.write('</table>\n</body>\n</html>\n')
def export_header(self):
- if verbose_mode:
- print("\n Exporting header file to: %s" % header_filename)
+ print("\n Exporting header file to: %s" % header_filename)
with open (header_filename, 'w') as hfile:
hfile.write(self.header_version)
hfile.write(self.header_preamble)
vuid_list = list(self.vj.all_vuids)
vuid_list.sort()
- cmd_dict = {}
for vuid in vuid_list:
db_entry = self.vj.vuid_db[vuid][0]
- db_text = db_entry['text'].strip(' ')
- hfile.write(' {"%s", "%s (%s#%s)"},\n' % (vuid, db_text, self.spec_url, vuid))
+ hfile.write(' {"%s", "%s (%s#%s)"},\n' % (vuid, db_entry['text'].strip(' '), self.spec_url, vuid))
# For multiply-defined VUIDs, include versions with extension appended
if len(self.vj.vuid_db[vuid]) > 1:
for db_entry in self.vj.vuid_db[vuid]:
- hfile.write(' {"%s[%s]", "%s (%s#%s)"},\n' % (vuid, db_entry['ext'].strip(' '), db_text, self.spec_url, vuid))
- if 'commandBuffer must be in the recording state' in db_text:
- cmd_dict[vuid] = db_text
+ hfile.write(' {"%s[%s]", "%s (%s#%s)"},\n' % (vuid, db_entry['ext'].strip(' '), db_entry['text'].strip(' '), self.spec_url, vuid))
hfile.write(self.header_postamble)
- # Generate the information for validating recording state VUID's
- cmd_prefix = 'prefix##'
- cmd_regex = re.compile(r'VUID-vk(Cmd|End)(\w+)')
- cmd_vuid_vector = [' "VUID_Undefined"']
- cmd_name_vector = [ ' "Command_Undefined"' ]
- cmd_enum = [' ' + cmd_prefix + 'NONE = 0']
-
- cmd_ordinal = 1
- for vuid, db_text in sorted(cmd_dict.items()):
- cmd_match = cmd_regex.match(vuid)
- if cmd_match.group(1) == "End":
- end = "END"
- else:
- end = ""
- cmd_name_vector.append(' "vk'+ cmd_match.group(1) + cmd_match.group(2) + '"')
- cmd_name = cmd_prefix + end + cmd_match.group(2).upper()
- cmd_enum.append(' {} = {}'.format(cmd_name, cmd_ordinal))
- cmd_ordinal += 1
- cmd_vuid_vector.append(' "{}"'.format(vuid))
-
- hfile.write('\n// Defines to allow creating "must be recording" meta data\n')
- cmd_enum.append(' {}RANGE_SIZE = {}'.format(cmd_prefix, cmd_ordinal))
- cmd_enum_string = '#define VUID_CMD_ENUM_LIST(prefix)\\\n' + ',\\\n'.join(cmd_enum) + '\n\n'
- hfile.write(cmd_enum_string)
- cmd_name_list_string = '#define VUID_CMD_NAME_LIST\\\n' + ',\\\n'.join(cmd_name_vector) + '\n\n'
- hfile.write(cmd_name_list_string)
- vuid_vector_string = '#define VUID_MUST_BE_RECORDING_LIST\\\n' + ',\\\n'.join(cmd_vuid_vector) + '\n'
- hfile.write(vuid_vector_string)
-
def main(argv):
global verbose_mode
global txt_filename
@@ -635,7 +620,6 @@ def main(argv):
csv_out = False
html_out = False
header_out = False
- show_summary = False
if (1 > len(argv)):
printHelp()
@@ -676,8 +660,6 @@ def main(argv):
header_out = True
elif (arg in ['-verbose']):
verbose_mode = True
- elif (arg in ['-summary']):
- show_summary = True
elif (arg in ['-help', '-h']):
printHelp()
sys.exit()
@@ -706,7 +688,7 @@ def main(argv):
print(" with extension: %s" % ext['ext'])
# Parse layer source files
- val_source = ValidationSource(layer_source_files)
+ val_source = ValidationSource(layer_source_files, generated_layer_source_files, generated_layer_source_directories)
val_source.parse()
exp_checks = len(val_source.explicit_vuids)
imp_checks = len(val_source.implicit_vuids)
@@ -719,33 +701,32 @@ def main(argv):
print(" %d checks are implemented more that once" % val_source.duplicated_checks)
# Parse test files
- val_tests = ValidationTests(test_source_files)
+ val_tests = ValidationTests([test_file, ])
val_tests.parse()
exp_tests = len(val_tests.explicit_vuids)
imp_tests = len(val_tests.implicit_vuids)
all_tests = len(val_tests.all_vuids)
if verbose_mode:
- print("Found %d unique error vuids in test source code." % all_tests)
+ print("Found %d unique error vuids in test file %s." % (all_tests, test_file))
print(" %d explicit" % exp_tests)
print(" %d implicit" % imp_tests)
print(" %d unassigned" % len(val_tests.unassigned_vuids))
# Process stats
- if show_summary:
- print("\nValidation Statistics (using validusage.json version %s)" % val_json.apiversion)
- print(" VUIDs defined in JSON file: %04d explicit, %04d implicit, %04d total." % (exp_json, imp_json, all_json))
- print(" VUIDs checked in layer code: %04d explicit, %04d implicit, %04d total." % (exp_checks, imp_checks, all_checks))
- print(" VUIDs tested in layer tests: %04d explicit, %04d implicit, %04d total." % (exp_tests, imp_tests, all_tests))
-
- print("\nVUID check coverage")
- print(" Explicit VUIDs checked: %.1f%% (%d checked vs %d defined)" % ((100.0 * exp_checks / exp_json), exp_checks, exp_json))
- print(" Implicit VUIDs checked: %.1f%% (%d checked vs %d defined)" % ((100.0 * imp_checks / imp_json), imp_checks, imp_json))
- print(" Overall VUIDs checked: %.1f%% (%d checked vs %d defined)" % ((100.0 * all_checks / all_json), all_checks, all_json))
-
- print("\nVUID test coverage")
- print(" Explicit VUIDs tested: %.1f%% (%d tested vs %d checks)" % ((100.0 * exp_tests / exp_checks), exp_tests, exp_checks))
- print(" Implicit VUIDs tested: %.1f%% (%d tested vs %d checks)" % ((100.0 * imp_tests / imp_checks), imp_tests, imp_checks))
- print(" Overall VUIDs tested: %.1f%% (%d tested vs %d checks)" % ((100.0 * all_tests / all_checks), all_tests, all_checks))
+ print("\nValidation Statistics (using validusage.json version %s)" % val_json.apiversion)
+ print(" VUIDs defined in JSON file: %04d explicit, %04d implicit, %04d total." % (exp_json, imp_json, all_json))
+ print(" VUIDs checked in layer code: %04d explicit, %04d implicit, %04d total." % (exp_checks, imp_checks, all_checks))
+ print(" VUIDs tested in layer tests: %04d explicit, %04d implicit, %04d total." % (exp_tests, imp_tests, all_tests))
+
+ print("\nVUID check coverage")
+ print(" Explicit VUIDs checked: %.1f%% (%d checked vs %d defined)" % ((100.0 * exp_checks / exp_json), exp_checks, exp_json))
+ print(" Implicit VUIDs checked: %.1f%% (%d checked vs %d defined)" % ((100.0 * imp_checks / imp_json), imp_checks, imp_json))
+ print(" Overall VUIDs checked: %.1f%% (%d checked vs %d defined)" % ((100.0 * all_checks / all_json), all_checks, all_json))
+
+ print("\nVUID test coverage")
+ print(" Explicit VUIDs tested: %.1f%% (%d tested vs %d checks)" % ((100.0 * exp_tests / exp_checks), exp_tests, exp_checks))
+ print(" Implicit VUIDs tested: %.1f%% (%d tested vs %d checks)" % ((100.0 * imp_tests / imp_checks), imp_tests, imp_checks))
+ print(" Overall VUIDs tested: %.1f%% (%d tested vs %d checks)" % ((100.0 * all_tests / all_checks), all_tests, all_checks))
# Report status of a single VUID
if len(get_vuid_status) > 1:
diff --git a/tests/CMakeLists.txt b/tests/CMakeLists.txt
index 57b247348..8842edc0d 100644
--- a/tests/CMakeLists.txt
+++ b/tests/CMakeLists.txt
@@ -15,6 +15,8 @@
# limitations under the License.
# ~~~
+set(GTEST_LOCATION ${CMAKE_CURRENT_SOURCE_DIR}/../external/googletest)
+
# Needed to make structure definitions match with glslang libraries
add_definitions(-DNV_EXTENSIONS -DAMD_EXTENSIONS)
@@ -60,8 +62,7 @@ endif()
set(LIBGLM_INCLUDE_DIR ${PROJECT_SOURCE_DIR}/libs)
-set(COMMON_CPP vklayertests_pipeline_shader.cpp vklayertests_buffer_image_memory_sampler.cpp vklayertests_others.cpp vklayertests_descriptor_renderpass_framebuffer.cpp
- vklayertests_command.cpp vklayertests_imageless_framebuffer.cpp vkpositivelayertests.cpp vkrenderframework.cpp vktestbinding.cpp vktestframework.cpp test_environment.cpp)
+set(COMMON_CPP vkrenderframework.cpp vktestbinding.cpp vktestframework.cpp test_environment.cpp)
if(NOT WIN32)
# extra setup for out-of-tree builds
@@ -93,13 +94,6 @@ if(NOT TARGET vulkan)
message(STATUS "VULKAN_LOADER_INSTALL_DIR specified, using find_package to locate Vulkan")
elseif(ENV{VULKAN_LOADER_INSTALL_DIR})
message(STATUS "VULKAN_LOADER_INSTALL_DIR environment variable specified, using find_package to locate Vulkan")
- else()
- set(LOCAL_LOADER TRUE)
- if(CMAKE_CL_64)
- set(VULKAN_LOADER_INSTALL_DIR "${CMAKE_CURRENT_SOURCE_DIR}/../external/x64")
- else()
- set(VULKAN_LOADER_INSTALL_DIR "${CMAKE_CURRENT_SOURCE_DIR}/../external/x86")
- endif()
endif()
set(
CMAKE_PREFIX_PATH
@@ -113,18 +107,16 @@ add_executable(vk_layer_validation_tests
layer_validation_tests.cpp
../layers/vk_format_utils.cpp
../layers/convert_to_renderpass2.cpp
- ../layers/generated/vk_safe_struct.cpp
+ ${PROJECT_BINARY_DIR}/vk_safe_struct.cpp
${COMMON_CPP})
-add_test(NAME vk_layer_validation_tests COMMAND vk_layer_validation_tests)
-add_dependencies(vk_layer_validation_tests Vulkan::Vulkan VkLayer_khronos_validation VkLayer_khronos_validation-json)
+add_dependencies(vk_layer_validation_tests Vulkan::Vulkan)
if(NOT GTEST_IS_STATIC_LIB)
set_target_properties(vk_layer_validation_tests PROPERTIES COMPILE_DEFINITIONS "GTEST_LINKED_AS_SHARED_LIBRARY=1")
endif()
-# Note that there is no need to add GTEST directories here due to googletest exporting them via the gtest target.
target_include_directories(vk_layer_validation_tests
PUBLIC ${CMAKE_CURRENT_SOURCE_DIR}
+ ${GTEST_LOCATION}/googletest/include
${PROJECT_SOURCE_DIR}/layers
- ${PROJECT_SOURCE_DIR}/layers/generated
${GLSLANG_SPIRV_INCLUDE_DIR}
${SPIRV_TOOLS_INCLUDE_DIR}
${CMAKE_CURRENT_BINARY_DIR}
@@ -132,7 +124,14 @@ target_include_directories(vk_layer_validation_tests
${PROJECT_BINARY_DIR}
${PROJECT_BINARY_DIR}/layers)
add_dependencies(vk_layer_validation_tests
- VkLayer_utils)
+ VkLayer_utils
+ VkLayer_core_validation-json
+ VkLayer_device_profile_api-json
+ VkLayer_object_lifetimes-json
+ VkLayer_stateless_validation-json
+ VkLayer_standard_validation-json
+ VkLayer_thread_safety-json
+ VkLayer_unique_objects-json)
# Specify target_link_libraries
if(WIN32)
@@ -169,7 +168,7 @@ if(WIN32)
COMMAND xcopy /Y /I ${SRC_GTEST_DLLS} ${DST_GTEST_DLLS})
endif()
# Copy the loader shared lib (if supplied) to the test application directory so the test app finds it.
- if(VULKAN_LOADER_INSTALL_DIR AND NOT LOCAL_LOADER)
+ if(VULKAN_LOADER_INSTALL_DIR)
add_custom_command(TARGET vk_layer_validation_tests POST_BUILD
COMMAND ${CMAKE_COMMAND} -E copy ${VULKAN_LOADER_INSTALL_DIR}/bin/vulkan-1.dll
$<TARGET_FILE_DIR:vk_layer_validation_tests>)
diff --git a/tests/layer_validation_tests.cpp b/tests/layer_validation_tests.cpp
index f87ffa46a..19f221cdc 100644
--- a/tests/layer_validation_tests.cpp
+++ b/tests/layer_validation_tests.cpp
@@ -21,14 +21,90 @@
* Author: Dave Houlton <daveh@lunarg.com>
* Author: Jeremy Kniager <jeremyk@lunarg.com>
* Author: Shannon McPherson <shannon@lunarg.com>
- * Author: John Zulauf <jzulauf@lunarg.com>
*/
-#include "cast_utils.h"
-#include "layer_validation_tests.h"
+#ifdef ANDROID
+#include "vulkan_wrapper.h"
+#else
+#define NOMINMAX
+#include <vulkan/vulkan.h>
+#endif
+
+#include "layers/vk_device_profile_api_layer.h"
+
+#if defined(ANDROID) && defined(VALIDATION_APK)
+#include <android/log.h>
+#include <android_native_app_glue.h>
+#endif
+
+#include "icd-spv.h"
+#include "test_common.h"
+#include "vk_layer_config.h"
+#include "vk_format_utils.h"
+#include "vkrenderframework.h"
+#include "vk_typemap_helper.h"
+#include "convert_to_renderpass2.h"
+
+#include <algorithm>
+#include <cmath>
+#include <functional>
+#include <limits>
+#include <memory>
+#include <unordered_set>
+
+//--------------------------------------------------------------------------------------
+// Mesh and VertexFormat Data
+//--------------------------------------------------------------------------------------
+
+const char *kSkipPrefix = " TEST SKIPPED:";
+
+enum BsoFailSelect {
+ BsoFailNone,
+ BsoFailLineWidth,
+ BsoFailDepthBias,
+ BsoFailViewport,
+ BsoFailScissor,
+ BsoFailBlend,
+ BsoFailDepthBounds,
+ BsoFailStencilReadMask,
+ BsoFailStencilWriteMask,
+ BsoFailStencilReference,
+ BsoFailCmdClearAttachments,
+ BsoFailIndexBuffer,
+ BsoFailIndexBufferBadSize,
+ BsoFailIndexBufferBadOffset,
+ BsoFailIndexBufferBadMapSize,
+ BsoFailIndexBufferBadMapOffset
+};
+
+static const char bindStateVertShaderText[] =
+ "#version 450\n"
+ "vec2 vertices[3];\n"
+ "void main() {\n"
+ " vertices[0] = vec2(-1.0, -1.0);\n"
+ " vertices[1] = vec2( 1.0, -1.0);\n"
+ " vertices[2] = vec2( 0.0, 1.0);\n"
+ " gl_Position = vec4(vertices[gl_VertexIndex % 3], 0.0, 1.0);\n"
+ "}\n";
+
+static const char bindStateFragShaderText[] =
+ "#version 450\n"
+ "\n"
+ "layout(location = 0) out vec4 uFragColor;\n"
+ "void main(){\n"
+ " uFragColor = vec4(0,1,0,1);\n"
+ "}\n";
+
+// Static arrays helper
+template <class ElementT, size_t array_size>
+size_t size(ElementT (&)[array_size]) {
+ return array_size;
+}
+
+// Format search helper
VkFormat FindSupportedDepthStencilFormat(VkPhysicalDevice phy) {
- const VkFormat ds_formats[] = {VK_FORMAT_D16_UNORM_S8_UINT, VK_FORMAT_D24_UNORM_S8_UINT, VK_FORMAT_D32_SFLOAT_S8_UINT};
- for (uint32_t i = 0; i < size(ds_formats); ++i) {
+ VkFormat ds_formats[] = {VK_FORMAT_D16_UNORM_S8_UINT, VK_FORMAT_D24_UNORM_S8_UINT, VK_FORMAT_D32_SFLOAT_S8_UINT};
+ for (uint32_t i = 0; i < sizeof(ds_formats); i++) {
VkFormatProperties format_props;
vkGetPhysicalDeviceFormatProperties(phy, ds_formats[i], &format_props);
@@ -39,7 +115,11 @@ VkFormat FindSupportedDepthStencilFormat(VkPhysicalDevice phy) {
return VK_FORMAT_UNDEFINED;
}
-bool ImageFormatIsSupported(VkPhysicalDevice phy, VkFormat format, VkImageTiling tiling, VkFormatFeatureFlags features) {
+// Returns true if *any* requested features are available.
+// Assumption is that the framework can successfully create an image as
+// long as at least one of the feature bits is present (excepting VTX_BUF).
+bool ImageFormatIsSupported(VkPhysicalDevice phy, VkFormat format, VkImageTiling tiling = VK_IMAGE_TILING_OPTIMAL,
+ VkFormatFeatureFlags features = ~VK_FORMAT_FEATURE_VERTEX_BUFFER_BIT) {
VkFormatProperties format_props;
vkGetPhysicalDeviceFormatProperties(phy, format, &format_props);
VkFormatFeatureFlags phy_features =
@@ -47,6 +127,7 @@ bool ImageFormatIsSupported(VkPhysicalDevice phy, VkFormat format, VkImageTiling
return (0 != (phy_features & features));
}
+// Returns true if format and *all* requested features are available.
bool ImageFormatAndFeaturesSupported(VkPhysicalDevice phy, VkFormat format, VkImageTiling tiling, VkFormatFeatureFlags features) {
VkFormatProperties format_props;
vkGetPhysicalDeviceFormatProperties(phy, format, &format_props);
@@ -55,6 +136,7 @@ bool ImageFormatAndFeaturesSupported(VkPhysicalDevice phy, VkFormat format, VkIm
return (features == (phy_features & features));
}
+// Returns true if format and *all* requested features are available.
bool ImageFormatAndFeaturesSupported(const VkInstance inst, const VkPhysicalDevice phy, const VkImageCreateInfo info,
const VkFormatFeatureFlags features) {
// Verify physical device support of format features
@@ -100,8 +182,253 @@ bool ImageFormatAndFeaturesSupported(const VkInstance inst, const VkPhysicalDevi
return true;
}
-VKAPI_ATTR VkBool32 VKAPI_CALL myDbgFunc(VkFlags msgFlags, VkDebugReportObjectTypeEXT objType, uint64_t srcObject, size_t location,
- int32_t msgCode, const char *pLayerPrefix, const char *pMsg, void *pUserData) {
+// Validation report callback prototype
+static VKAPI_ATTR VkBool32 VKAPI_CALL myDbgFunc(VkFlags msgFlags, VkDebugReportObjectTypeEXT objType, uint64_t srcObject,
+ size_t location, int32_t msgCode, const char *pLayerPrefix, const char *pMsg,
+ void *pUserData);
+
+// Simple sane SamplerCreateInfo boilerplate
+static VkSamplerCreateInfo SafeSaneSamplerCreateInfo() {
+ VkSamplerCreateInfo sampler_create_info = {};
+ sampler_create_info.sType = VK_STRUCTURE_TYPE_SAMPLER_CREATE_INFO;
+ sampler_create_info.pNext = nullptr;
+ sampler_create_info.magFilter = VK_FILTER_NEAREST;
+ sampler_create_info.minFilter = VK_FILTER_NEAREST;
+ sampler_create_info.mipmapMode = VK_SAMPLER_MIPMAP_MODE_NEAREST;
+ sampler_create_info.addressModeU = VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_EDGE;
+ sampler_create_info.addressModeV = VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_EDGE;
+ sampler_create_info.addressModeW = VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_EDGE;
+ sampler_create_info.mipLodBias = 0.0;
+ sampler_create_info.anisotropyEnable = VK_FALSE;
+ sampler_create_info.maxAnisotropy = 1.0;
+ sampler_create_info.compareEnable = VK_FALSE;
+ sampler_create_info.compareOp = VK_COMPARE_OP_NEVER;
+ sampler_create_info.minLod = 0.0;
+ sampler_create_info.maxLod = 16.0;
+ sampler_create_info.borderColor = VK_BORDER_COLOR_FLOAT_OPAQUE_WHITE;
+ sampler_create_info.unnormalizedCoordinates = VK_FALSE;
+
+ return sampler_create_info;
+}
+
+// Helper for checking createRenderPass2 support and adding related extensions.
+static bool CheckCreateRenderPass2Support(VkRenderFramework *renderFramework, std::vector<const char *> &device_extension_names) {
+ if (renderFramework->DeviceExtensionSupported(renderFramework->gpu(), nullptr, VK_KHR_CREATE_RENDERPASS_2_EXTENSION_NAME)) {
+ device_extension_names.push_back(VK_KHR_MULTIVIEW_EXTENSION_NAME);
+ device_extension_names.push_back(VK_KHR_MAINTENANCE2_EXTENSION_NAME);
+ device_extension_names.push_back(VK_KHR_CREATE_RENDERPASS_2_EXTENSION_NAME);
+ return true;
+ }
+ return false;
+}
+
+// Dependent "false" type for the static assert, as GCC will evaluate
+// non-dependent static_asserts even for non-instantiated templates
+template <typename T>
+struct AlwaysFalse : std::false_type {};
+
+// Helpers to get nearest greater or smaller value (of float) -- useful for testing the boundary cases of Vulkan limits
+template <typename T>
+T NearestGreater(const T from) {
+ using Lim = std::numeric_limits<T>;
+ const auto positive_direction = Lim::has_infinity ? Lim::infinity() : Lim::max();
+
+ return std::nextafter(from, positive_direction);
+}
+
+template <typename T>
+T NearestSmaller(const T from) {
+ using Lim = std::numeric_limits<T>;
+ const auto negative_direction = Lim::has_infinity ? -Lim::infinity() : Lim::lowest();
+
+ return std::nextafter(from, negative_direction);
+}
+
+// ErrorMonitor Usage:
+//
+// Call SetDesiredFailureMsg with a string to be compared against all
+// encountered log messages, or a validation error enum identifying
+// desired error message. Passing NULL or VALIDATION_ERROR_MAX_ENUM
+// will match all log messages. logMsg will return true for skipCall
+// only if msg is matched or NULL.
+//
+// Call VerifyFound to determine if all desired failure messages
+// were encountered. Call VerifyNotFound to determine if any unexpected
+// failure was encountered.
+class ErrorMonitor {
+ public:
+ ErrorMonitor() {
+ test_platform_thread_create_mutex(&mutex_);
+ test_platform_thread_lock_mutex(&mutex_);
+ Reset();
+ test_platform_thread_unlock_mutex(&mutex_);
+ }
+
+ ~ErrorMonitor() { test_platform_thread_delete_mutex(&mutex_); }
+
+ // Set monitor to pristine state
+ void Reset() {
+ message_flags_ = VK_DEBUG_REPORT_ERROR_BIT_EXT;
+ bailout_ = NULL;
+ message_found_ = VK_FALSE;
+ failure_message_strings_.clear();
+ desired_message_strings_.clear();
+ ignore_message_strings_.clear();
+ other_messages_.clear();
+ }
+
+ // ErrorMonitor will look for an error message containing the specified string(s)
+ void SetDesiredFailureMsg(const VkFlags msgFlags, const std::string msg) { SetDesiredFailureMsg(msgFlags, msg.c_str()); }
+ void SetDesiredFailureMsg(const VkFlags msgFlags, const char *const msgString) {
+ test_platform_thread_lock_mutex(&mutex_);
+ desired_message_strings_.insert(msgString);
+ message_flags_ |= msgFlags;
+ test_platform_thread_unlock_mutex(&mutex_);
+ }
+
+ // ErrorMonitor will look for an error message containing the specified string(s)
+ template <typename Iter>
+ void SetDesiredFailureMsg(const VkFlags msgFlags, Iter iter, const Iter end) {
+ for (; iter != end; ++iter) {
+ SetDesiredFailureMsg(msgFlags, *iter);
+ }
+ }
+
+ // Set an error that the error monitor will ignore. Do not use this function if you are creating a new test.
+ // TODO: This is stopgap to block new unexpected errors from being introduced. The long-term goal is to remove the use of this
+ // function and its definition.
+ void SetUnexpectedError(const char *const msg) {
+ test_platform_thread_lock_mutex(&mutex_);
+
+ ignore_message_strings_.emplace_back(msg);
+
+ test_platform_thread_unlock_mutex(&mutex_);
+ }
+
+ VkBool32 CheckForDesiredMsg(const char *const msgString) {
+ VkBool32 result = VK_FALSE;
+ test_platform_thread_lock_mutex(&mutex_);
+ if (bailout_ != nullptr) {
+ *bailout_ = true;
+ }
+ string errorString(msgString);
+ bool found_expected = false;
+
+ if (!IgnoreMessage(errorString)) {
+ for (auto desired_msg_it = desired_message_strings_.begin(); desired_msg_it != desired_message_strings_.end();
+ ++desired_msg_it) {
+ if ((*desired_msg_it).length() == 0) {
+ // An empty desired_msg string "" indicates a positive test - not expecting an error.
+ // Return true to avoid calling layers/driver with this error.
+ // And don't erase the "" string, so it remains if another error is found.
+ result = VK_TRUE;
+ found_expected = true;
+ message_found_ = true;
+ failure_message_strings_.insert(errorString);
+ } else if (errorString.find(*desired_msg_it) != string::npos) {
+ found_expected = true;
+ failure_message_strings_.insert(errorString);
+ message_found_ = true;
+ result = VK_TRUE;
+ // Remove a maximum of one failure message from the set
+ // Multiset mutation is acceptable because `break` causes flow of control to exit the for loop
+ desired_message_strings_.erase(desired_msg_it);
+ break;
+ }
+ }
+
+ if (!found_expected) {
+ printf("Unexpected: %s\n", msgString);
+ other_messages_.push_back(errorString);
+ }
+ }
+
+ test_platform_thread_unlock_mutex(&mutex_);
+ return result;
+ }
+
+ vector<string> GetOtherFailureMsgs() const { return other_messages_; }
+
+ VkDebugReportFlagsEXT GetMessageFlags() const { return message_flags_; }
+
+ bool AnyDesiredMsgFound() const { return message_found_; }
+
+ bool AllDesiredMsgsFound() const { return desired_message_strings_.empty(); }
+
+ void SetError(const char *const errorString) {
+ message_found_ = true;
+ failure_message_strings_.insert(errorString);
+ }
+
+ void SetBailout(bool *bailout) { bailout_ = bailout; }
+
+ void DumpFailureMsgs() const {
+ vector<string> otherMsgs = GetOtherFailureMsgs();
+ if (otherMsgs.size()) {
+ cout << "Other error messages logged for this test were:" << endl;
+ for (auto iter = otherMsgs.begin(); iter != otherMsgs.end(); iter++) {
+ cout << " " << *iter << endl;
+ }
+ }
+ }
+
+ // Helpers
+
+ // ExpectSuccess now takes an optional argument allowing a custom combination of debug flags
+ void ExpectSuccess(VkDebugReportFlagsEXT const message_flag_mask = VK_DEBUG_REPORT_ERROR_BIT_EXT) {
+ // Match ANY message matching specified type
+ SetDesiredFailureMsg(message_flag_mask, "");
+ message_flags_ = message_flag_mask; // override mask handling in SetDesired...
+ }
+
+ void VerifyFound() {
+ // Not receiving expected message(s) is a failure. /Before/ throwing, dump any other messages
+ if (!AllDesiredMsgsFound()) {
+ DumpFailureMsgs();
+ for (const auto desired_msg : desired_message_strings_) {
+ ADD_FAILURE() << "Did not receive expected error '" << desired_msg << "'";
+ }
+ }
+ Reset();
+ }
+
+ void VerifyNotFound() {
+ // ExpectSuccess() configured us to match anything. Any error is a failure.
+ if (AnyDesiredMsgFound()) {
+ DumpFailureMsgs();
+ for (const auto msg : failure_message_strings_) {
+ ADD_FAILURE() << "Expected to succeed but got error: " << msg;
+ }
+ }
+ Reset();
+ }
+
+ private:
+ // TODO: This is stopgap to block new unexpected errors from being introduced. The long-term goal is to remove the use of this
+ // function and its definition.
+ bool IgnoreMessage(std::string const &msg) const {
+ if (ignore_message_strings_.empty()) {
+ return false;
+ }
+
+ return std::find_if(ignore_message_strings_.begin(), ignore_message_strings_.end(), [&msg](std::string const &str) {
+ return msg.find(str) != std::string::npos;
+ }) != ignore_message_strings_.end();
+ }
+
+ VkFlags message_flags_;
+ std::unordered_multiset<std::string> desired_message_strings_;
+ std::unordered_multiset<std::string> failure_message_strings_;
+ std::vector<std::string> ignore_message_strings_;
+ vector<string> other_messages_;
+ test_platform_thread_mutex mutex_;
+ bool *bailout_;
+ bool message_found_;
+};
+
+static VKAPI_ATTR VkBool32 VKAPI_CALL myDbgFunc(VkFlags msgFlags, VkDebugReportObjectTypeEXT objType, uint64_t srcObject,
+ size_t location, int32_t msgCode, const char *pLayerPrefix, const char *pMsg,
+ void *pUserData) {
ErrorMonitor *errMonitor = (ErrorMonitor *)pUserData;
if (msgFlags & errMonitor->GetMessageFlags()) {
return errMonitor->CheckForDesiredMsg(pMsg);
@@ -109,6 +436,871 @@ VKAPI_ATTR VkBool32 VKAPI_CALL myDbgFunc(VkFlags msgFlags, VkDebugReportObjectTy
return VK_FALSE;
}
+class VkLayerTest : public VkRenderFramework {
+ public:
+ void VKTriangleTest(BsoFailSelect failCase);
+ void GenericDrawPreparation(VkCommandBufferObj *commandBuffer, VkPipelineObj &pipelineobj, VkDescriptorSetObj &descriptorSet,
+ BsoFailSelect failCase);
+
+ void Init(VkPhysicalDeviceFeatures *features = nullptr, VkPhysicalDeviceFeatures2 *features2 = nullptr,
+ const VkCommandPoolCreateFlags flags = 0, void *instance_pnext = nullptr) {
+ InitFramework(myDbgFunc, m_errorMonitor, instance_pnext);
+ InitState(features, features2, flags);
+ }
+
+ protected:
+ ErrorMonitor *m_errorMonitor;
+ uint32_t m_instance_api_version = 0;
+ uint32_t m_target_api_version = 0;
+
+ public:
+ ErrorMonitor *Monitor() { return m_errorMonitor; }
+ VkCommandBufferObj *CommandBuffer() { return m_commandBuffer; }
+
+ protected:
+ bool m_enableWSI;
+
+ virtual void SetUp() {
+ m_instance_layer_names.clear();
+ m_instance_extension_names.clear();
+ m_device_extension_names.clear();
+
+ // Add default instance extensions to the list
+ m_instance_extension_names.push_back(VK_EXT_DEBUG_REPORT_EXTENSION_NAME);
+
+ // Use Threading layer first to protect others from
+ // ThreadCommandBufferCollision test
+ m_instance_layer_names.push_back("VK_LAYER_GOOGLE_threading");
+ m_instance_layer_names.push_back("VK_LAYER_LUNARG_parameter_validation");
+ m_instance_layer_names.push_back("VK_LAYER_LUNARG_object_tracker");
+ m_instance_layer_names.push_back("VK_LAYER_LUNARG_core_validation");
+ m_instance_layer_names.push_back("VK_LAYER_GOOGLE_unique_objects");
+ if (VkTestFramework::m_devsim_layer) {
+ if (InstanceLayerSupported("VK_LAYER_LUNARG_device_simulation")) {
+ m_instance_layer_names.push_back("VK_LAYER_LUNARG_device_simulation");
+ } else {
+ VkTestFramework::m_devsim_layer = false;
+ printf(" Did not find VK_LAYER_LUNARG_device_simulation layer so it will not be enabled.\n");
+ }
+ }
+ if (m_enableWSI) {
+ m_instance_extension_names.push_back(VK_KHR_SURFACE_EXTENSION_NAME);
+ m_device_extension_names.push_back(VK_KHR_SWAPCHAIN_EXTENSION_NAME);
+#ifdef NEED_TO_TEST_THIS_ON_PLATFORM
+#if defined(VK_USE_PLATFORM_ANDROID_KHR)
+ m_instance_extension_names.push_back(VK_KHR_ANDROID_SURFACE_EXTENSION_NAME);
+#endif // VK_USE_PLATFORM_ANDROID_KHR
+#if defined(VK_USE_PLATFORM_WAYLAND_KHR)
+ m_instance_extension_names.push_back(VK_KHR_WAYLAND_SURFACE_EXTENSION_NAME);
+#endif // VK_USE_PLATFORM_WAYLAND_KHR
+#if defined(VK_USE_PLATFORM_WIN32_KHR)
+ m_instance_extension_names.push_back(VK_KHR_WIN32_SURFACE_EXTENSION_NAME);
+#endif // VK_USE_PLATFORM_WIN32_KHR
+#endif // NEED_TO_TEST_THIS_ON_PLATFORM
+#if defined(VK_USE_PLATFORM_XCB_KHR)
+ m_instance_extension_names.push_back(VK_KHR_XCB_SURFACE_EXTENSION_NAME);
+#elif defined(VK_USE_PLATFORM_XLIB_KHR)
+ m_instance_extension_names.push_back(VK_KHR_XLIB_SURFACE_EXTENSION_NAME);
+#endif // VK_USE_PLATFORM_XLIB_KHR
+ }
+
+ this->app_info.sType = VK_STRUCTURE_TYPE_APPLICATION_INFO;
+ this->app_info.pNext = NULL;
+ this->app_info.pApplicationName = "layer_tests";
+ this->app_info.applicationVersion = 1;
+ this->app_info.pEngineName = "unittest";
+ this->app_info.engineVersion = 1;
+ this->app_info.apiVersion = VK_API_VERSION_1_0;
+
+ m_errorMonitor = new ErrorMonitor;
+
+ // Find out what version the instance supports and record the default target instance
+ auto enumerateInstanceVersion =
+ (PFN_vkEnumerateInstanceVersion)vkGetInstanceProcAddr(nullptr, "vkEnumerateInstanceVersion");
+ if (enumerateInstanceVersion) {
+ enumerateInstanceVersion(&m_instance_api_version);
+ } else {
+ m_instance_api_version = VK_API_VERSION_1_0;
+ }
+ m_target_api_version = app_info.apiVersion;
+ }
+
+ uint32_t SetTargetApiVersion(uint32_t target_api_version) {
+ if (target_api_version == 0) target_api_version = VK_API_VERSION_1_0;
+ if (target_api_version <= m_instance_api_version) {
+ m_target_api_version = target_api_version;
+ app_info.apiVersion = m_target_api_version;
+ }
+ return m_target_api_version;
+ }
+ uint32_t DeviceValidationVersion() {
+ // The validation layers, assume the version we are validating to is the apiVersion unless the device apiVersion is lower
+ VkPhysicalDeviceProperties props;
+ GetPhysicalDeviceProperties(&props);
+ return std::min(m_target_api_version, props.apiVersion);
+ }
+
+ bool LoadDeviceProfileLayer(
+ PFN_vkSetPhysicalDeviceFormatPropertiesEXT &fpvkSetPhysicalDeviceFormatPropertiesEXT,
+ PFN_vkGetOriginalPhysicalDeviceFormatPropertiesEXT &fpvkGetOriginalPhysicalDeviceFormatPropertiesEXT) {
+ // Load required functions
+ fpvkSetPhysicalDeviceFormatPropertiesEXT =
+ (PFN_vkSetPhysicalDeviceFormatPropertiesEXT)vkGetInstanceProcAddr(instance(), "vkSetPhysicalDeviceFormatPropertiesEXT");
+ fpvkGetOriginalPhysicalDeviceFormatPropertiesEXT =
+ (PFN_vkGetOriginalPhysicalDeviceFormatPropertiesEXT)vkGetInstanceProcAddr(
+ instance(), "vkGetOriginalPhysicalDeviceFormatPropertiesEXT");
+
+ if (!(fpvkSetPhysicalDeviceFormatPropertiesEXT) || !(fpvkGetOriginalPhysicalDeviceFormatPropertiesEXT)) {
+ printf("%s Can't find device_profile_api functions; skipped.\n", kSkipPrefix);
+ return 0;
+ }
+
+ return 1;
+ }
+
+ virtual void TearDown() {
+ // Clean up resources before we reset
+ ShutdownFramework();
+ delete m_errorMonitor;
+ }
+
+ VkLayerTest() { m_enableWSI = false; }
+};
+
+void VkLayerTest::VKTriangleTest(BsoFailSelect failCase) {
+ ASSERT_TRUE(m_device && m_device->initialized()); // VKTriangleTest assumes Init() has finished
+
+ ASSERT_NO_FATAL_FAILURE(InitViewport());
+
+ VkShaderObj vs(m_device, bindStateVertShaderText, VK_SHADER_STAGE_VERTEX_BIT, this);
+ VkShaderObj ps(m_device, bindStateFragShaderText, VK_SHADER_STAGE_FRAGMENT_BIT, this);
+
+ VkPipelineObj pipelineobj(m_device);
+ pipelineobj.AddDefaultColorAttachment();
+ pipelineobj.AddShader(&vs);
+ pipelineobj.AddShader(&ps);
+
+ bool failcase_needs_depth = false; // to mark cases that need depth attachment
+
+ VkBufferObj index_buffer;
+
+ switch (failCase) {
+ case BsoFailLineWidth: {
+ pipelineobj.MakeDynamic(VK_DYNAMIC_STATE_LINE_WIDTH);
+ VkPipelineInputAssemblyStateCreateInfo ia_state = {};
+ ia_state.sType = VK_STRUCTURE_TYPE_PIPELINE_INPUT_ASSEMBLY_STATE_CREATE_INFO;
+ ia_state.topology = VK_PRIMITIVE_TOPOLOGY_LINE_LIST;
+ pipelineobj.SetInputAssembly(&ia_state);
+ break;
+ }
+ case BsoFailDepthBias: {
+ pipelineobj.MakeDynamic(VK_DYNAMIC_STATE_DEPTH_BIAS);
+ VkPipelineRasterizationStateCreateInfo rs_state = {};
+ rs_state.sType = VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_STATE_CREATE_INFO;
+ rs_state.depthBiasEnable = VK_TRUE;
+ rs_state.lineWidth = 1.0f;
+ pipelineobj.SetRasterization(&rs_state);
+ break;
+ }
+ case BsoFailViewport: {
+ pipelineobj.MakeDynamic(VK_DYNAMIC_STATE_VIEWPORT);
+ break;
+ }
+ case BsoFailScissor: {
+ pipelineobj.MakeDynamic(VK_DYNAMIC_STATE_SCISSOR);
+ break;
+ }
+ case BsoFailBlend: {
+ pipelineobj.MakeDynamic(VK_DYNAMIC_STATE_BLEND_CONSTANTS);
+ VkPipelineColorBlendAttachmentState att_state = {};
+ att_state.dstAlphaBlendFactor = VK_BLEND_FACTOR_CONSTANT_COLOR;
+ att_state.blendEnable = VK_TRUE;
+ pipelineobj.AddColorAttachment(0, att_state);
+ break;
+ }
+ case BsoFailDepthBounds: {
+ failcase_needs_depth = true;
+ pipelineobj.MakeDynamic(VK_DYNAMIC_STATE_DEPTH_BOUNDS);
+ break;
+ }
+ case BsoFailStencilReadMask: {
+ failcase_needs_depth = true;
+ pipelineobj.MakeDynamic(VK_DYNAMIC_STATE_STENCIL_COMPARE_MASK);
+ break;
+ }
+ case BsoFailStencilWriteMask: {
+ failcase_needs_depth = true;
+ pipelineobj.MakeDynamic(VK_DYNAMIC_STATE_STENCIL_WRITE_MASK);
+ break;
+ }
+ case BsoFailStencilReference: {
+ failcase_needs_depth = true;
+ pipelineobj.MakeDynamic(VK_DYNAMIC_STATE_STENCIL_REFERENCE);
+ break;
+ }
+
+ case BsoFailIndexBuffer:
+ break;
+ case BsoFailIndexBufferBadSize:
+ case BsoFailIndexBufferBadOffset:
+ case BsoFailIndexBufferBadMapSize:
+ case BsoFailIndexBufferBadMapOffset: {
+ // Create an index buffer for these tests.
+ // There is no need to populate it because we should bail before trying to draw.
+ uint32_t const indices[] = {0};
+ VkBufferCreateInfo buffer_info = {};
+ buffer_info.sType = VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO;
+ buffer_info.size = 1024;
+ buffer_info.usage = VK_BUFFER_USAGE_INDEX_BUFFER_BIT;
+ buffer_info.queueFamilyIndexCount = 1;
+ buffer_info.pQueueFamilyIndices = indices;
+ index_buffer.init(*m_device, buffer_info, (VkMemoryPropertyFlags)VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT);
+ } break;
+ case BsoFailCmdClearAttachments:
+ break;
+ case BsoFailNone:
+ break;
+ default:
+ break;
+ }
+
+ VkDescriptorSetObj descriptorSet(m_device);
+
+ VkImageView *depth_attachment = nullptr;
+ if (failcase_needs_depth) {
+ m_depth_stencil_fmt = FindSupportedDepthStencilFormat(gpu());
+ ASSERT_TRUE(m_depth_stencil_fmt != VK_FORMAT_UNDEFINED);
+
+ m_depthStencil->Init(m_device, static_cast<uint32_t>(m_width), static_cast<uint32_t>(m_height), m_depth_stencil_fmt,
+ VK_IMAGE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT | VK_IMAGE_USAGE_TRANSFER_DST_BIT);
+ depth_attachment = m_depthStencil->BindInfo();
+ }
+
+ ASSERT_NO_FATAL_FAILURE(InitRenderTarget(1, depth_attachment));
+ m_commandBuffer->begin();
+
+ GenericDrawPreparation(m_commandBuffer, pipelineobj, descriptorSet, failCase);
+
+ m_commandBuffer->BeginRenderPass(m_renderPassBeginInfo);
+
+ // render triangle
+ if (failCase == BsoFailIndexBuffer) {
+ // Use DrawIndexed w/o an index buffer bound
+ m_commandBuffer->DrawIndexed(3, 1, 0, 0, 0);
+ } else if (failCase == BsoFailIndexBufferBadSize) {
+ // Bind the index buffer and draw one too many indices
+ m_commandBuffer->BindIndexBuffer(&index_buffer, 0, VK_INDEX_TYPE_UINT16);
+ m_commandBuffer->DrawIndexed(513, 1, 0, 0, 0);
+ } else if (failCase == BsoFailIndexBufferBadOffset) {
+ // Bind the index buffer and draw one past the end of the buffer using the offset
+ m_commandBuffer->BindIndexBuffer(&index_buffer, 0, VK_INDEX_TYPE_UINT16);
+ m_commandBuffer->DrawIndexed(512, 1, 1, 0, 0);
+ } else if (failCase == BsoFailIndexBufferBadMapSize) {
+ // Bind the index buffer at the middle point and draw one too many indices
+ m_commandBuffer->BindIndexBuffer(&index_buffer, 512, VK_INDEX_TYPE_UINT16);
+ m_commandBuffer->DrawIndexed(257, 1, 0, 0, 0);
+ } else if (failCase == BsoFailIndexBufferBadMapOffset) {
+ // Bind the index buffer at the middle point and draw one past the end of the buffer
+ m_commandBuffer->BindIndexBuffer(&index_buffer, 512, VK_INDEX_TYPE_UINT16);
+ m_commandBuffer->DrawIndexed(256, 1, 1, 0, 0);
+ } else {
+ m_commandBuffer->Draw(3, 1, 0, 0);
+ }
+
+ if (failCase == BsoFailCmdClearAttachments) {
+ VkClearAttachment color_attachment = {};
+ color_attachment.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
+ color_attachment.colorAttachment = 2000000000; // Someone who knew what they were doing would use 0 for the index;
+ VkClearRect clear_rect = {{{0, 0}, {static_cast<uint32_t>(m_width), static_cast<uint32_t>(m_height)}}, 0, 0};
+
+ vkCmdClearAttachments(m_commandBuffer->handle(), 1, &color_attachment, 1, &clear_rect);
+ }
+
+ // finalize recording of the command buffer
+ m_commandBuffer->EndRenderPass();
+ m_commandBuffer->end();
+ m_commandBuffer->QueueCommandBuffer(true);
+ DestroyRenderTarget();
+}
+
+void VkLayerTest::GenericDrawPreparation(VkCommandBufferObj *commandBuffer, VkPipelineObj &pipelineobj,
+ VkDescriptorSetObj &descriptorSet, BsoFailSelect failCase) {
+ commandBuffer->ClearAllBuffers(m_renderTargets, m_clear_color, m_depthStencil, m_depth_clear_color, m_stencil_clear_color);
+
+ commandBuffer->PrepareAttachments(m_renderTargets, m_depthStencil);
+ // Make sure depthWriteEnable is set so that Depth fail test will work
+ // correctly
+ // Make sure stencilTestEnable is set so that Stencil fail test will work
+ // correctly
+ VkStencilOpState stencil = {};
+ stencil.failOp = VK_STENCIL_OP_KEEP;
+ stencil.passOp = VK_STENCIL_OP_KEEP;
+ stencil.depthFailOp = VK_STENCIL_OP_KEEP;
+ stencil.compareOp = VK_COMPARE_OP_NEVER;
+
+ VkPipelineDepthStencilStateCreateInfo ds_ci = {};
+ ds_ci.sType = VK_STRUCTURE_TYPE_PIPELINE_DEPTH_STENCIL_STATE_CREATE_INFO;
+ ds_ci.pNext = NULL;
+ ds_ci.depthTestEnable = VK_FALSE;
+ ds_ci.depthWriteEnable = VK_TRUE;
+ ds_ci.depthCompareOp = VK_COMPARE_OP_NEVER;
+ ds_ci.depthBoundsTestEnable = VK_FALSE;
+ if (failCase == BsoFailDepthBounds) {
+ ds_ci.depthBoundsTestEnable = VK_TRUE;
+ ds_ci.maxDepthBounds = 0.0f;
+ ds_ci.minDepthBounds = 0.0f;
+ }
+ ds_ci.stencilTestEnable = VK_TRUE;
+ ds_ci.front = stencil;
+ ds_ci.back = stencil;
+
+ pipelineobj.SetDepthStencil(&ds_ci);
+ pipelineobj.SetViewport(m_viewports);
+ pipelineobj.SetScissor(m_scissors);
+ descriptorSet.CreateVKDescriptorSet(commandBuffer);
+ VkResult err = pipelineobj.CreateVKPipeline(descriptorSet.GetPipelineLayout(), renderPass());
+ ASSERT_VK_SUCCESS(err);
+ vkCmdBindPipeline(commandBuffer->handle(), VK_PIPELINE_BIND_POINT_GRAPHICS, pipelineobj.handle());
+ commandBuffer->BindDescriptorSet(descriptorSet);
+}
+
+class VkPositiveLayerTest : public VkLayerTest {
+ public:
+ protected:
+};
+
+class VkWsiEnabledLayerTest : public VkLayerTest {
+ public:
+ protected:
+ VkWsiEnabledLayerTest() { m_enableWSI = true; }
+};
+
+class VkBufferTest {
+ public:
+ enum eTestEnFlags {
+ eDoubleDelete,
+ eInvalidDeviceOffset,
+ eInvalidMemoryOffset,
+ eBindNullBuffer,
+ eBindFakeBuffer,
+ eFreeInvalidHandle,
+ eNone,
+ };
+
+ enum eTestConditions { eOffsetAlignment = 1 };
+
+ static bool GetTestConditionValid(VkDeviceObj *aVulkanDevice, eTestEnFlags aTestFlag, VkBufferUsageFlags aBufferUsage = 0) {
+ if (eInvalidDeviceOffset != aTestFlag && eInvalidMemoryOffset != aTestFlag) {
+ return true;
+ }
+ VkDeviceSize offset_limit = 0;
+ if (eInvalidMemoryOffset == aTestFlag) {
+ VkBuffer vulkanBuffer;
+ VkBufferCreateInfo buffer_create_info = {};
+ buffer_create_info.sType = VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO;
+ buffer_create_info.size = 32;
+ buffer_create_info.usage = aBufferUsage;
+
+ vkCreateBuffer(aVulkanDevice->device(), &buffer_create_info, nullptr, &vulkanBuffer);
+ VkMemoryRequirements memory_reqs = {};
+
+ vkGetBufferMemoryRequirements(aVulkanDevice->device(), vulkanBuffer, &memory_reqs);
+ vkDestroyBuffer(aVulkanDevice->device(), vulkanBuffer, nullptr);
+ offset_limit = memory_reqs.alignment;
+ } else if ((VK_BUFFER_USAGE_UNIFORM_TEXEL_BUFFER_BIT | VK_BUFFER_USAGE_STORAGE_TEXEL_BUFFER_BIT) & aBufferUsage) {
+ offset_limit = aVulkanDevice->props.limits.minTexelBufferOffsetAlignment;
+ } else if (VK_BUFFER_USAGE_UNIFORM_BUFFER_BIT & aBufferUsage) {
+ offset_limit = aVulkanDevice->props.limits.minUniformBufferOffsetAlignment;
+ } else if (VK_BUFFER_USAGE_STORAGE_BUFFER_BIT & aBufferUsage) {
+ offset_limit = aVulkanDevice->props.limits.minStorageBufferOffsetAlignment;
+ }
+ return eOffsetAlignment < offset_limit;
+ }
+
+ // A constructor which performs validation tests within construction.
+ VkBufferTest(VkDeviceObj *aVulkanDevice, VkBufferUsageFlags aBufferUsage, eTestEnFlags aTestFlag = eNone)
+ : AllocateCurrent(true),
+ BoundCurrent(false),
+ CreateCurrent(false),
+ InvalidDeleteEn(false),
+ VulkanDevice(aVulkanDevice->device()) {
+ if (eBindNullBuffer == aTestFlag || eBindFakeBuffer == aTestFlag) {
+ VkMemoryAllocateInfo memory_allocate_info = {};
+ memory_allocate_info.sType = VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO;
+ memory_allocate_info.allocationSize = 1; // fake size -- shouldn't matter for the test
+ memory_allocate_info.memoryTypeIndex = 0; // fake type -- shouldn't matter for the test
+ vkAllocateMemory(VulkanDevice, &memory_allocate_info, nullptr, &VulkanMemory);
+
+ VulkanBuffer = (aTestFlag == eBindNullBuffer) ? VK_NULL_HANDLE : (VkBuffer)0xCDCDCDCDCDCDCDCD;
+
+ vkBindBufferMemory(VulkanDevice, VulkanBuffer, VulkanMemory, 0);
+ } else {
+ VkBufferCreateInfo buffer_create_info = {};
+ buffer_create_info.sType = VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO;
+ buffer_create_info.size = 32;
+ buffer_create_info.usage = aBufferUsage;
+
+ vkCreateBuffer(VulkanDevice, &buffer_create_info, nullptr, &VulkanBuffer);
+
+ CreateCurrent = true;
+
+ VkMemoryRequirements memory_requirements;
+ vkGetBufferMemoryRequirements(VulkanDevice, VulkanBuffer, &memory_requirements);
+
+ VkMemoryAllocateInfo memory_allocate_info = {};
+ memory_allocate_info.sType = VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO;
+ memory_allocate_info.allocationSize = memory_requirements.size + eOffsetAlignment;
+ bool pass = aVulkanDevice->phy().set_memory_type(memory_requirements.memoryTypeBits, &memory_allocate_info,
+ VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT);
+ if (!pass) {
+ CreateCurrent = false;
+ vkDestroyBuffer(VulkanDevice, VulkanBuffer, nullptr);
+ return;
+ }
+
+ vkAllocateMemory(VulkanDevice, &memory_allocate_info, NULL, &VulkanMemory);
+ // NB: 1 is intentionally an invalid offset value
+ const bool offset_en = eInvalidDeviceOffset == aTestFlag || eInvalidMemoryOffset == aTestFlag;
+ vkBindBufferMemory(VulkanDevice, VulkanBuffer, VulkanMemory, offset_en ? eOffsetAlignment : 0);
+ BoundCurrent = true;
+
+ InvalidDeleteEn = (eFreeInvalidHandle == aTestFlag);
+ }
+ }
+
+ ~VkBufferTest() {
+ if (CreateCurrent) {
+ vkDestroyBuffer(VulkanDevice, VulkanBuffer, nullptr);
+ }
+ if (AllocateCurrent) {
+ if (InvalidDeleteEn) {
+ union {
+ VkDeviceMemory device_memory;
+ unsigned long long index_access;
+ } bad_index;
+
+ bad_index.device_memory = VulkanMemory;
+ bad_index.index_access++;
+
+ vkFreeMemory(VulkanDevice, bad_index.device_memory, nullptr);
+ }
+ vkFreeMemory(VulkanDevice, VulkanMemory, nullptr);
+ }
+ }
+
+ bool GetBufferCurrent() { return AllocateCurrent && BoundCurrent && CreateCurrent; }
+
+ const VkBuffer &GetBuffer() { return VulkanBuffer; }
+
+ void TestDoubleDestroy() {
+ // Destroy the buffer but leave the flag set, which will cause
+ // the buffer to be destroyed again in the destructor.
+ vkDestroyBuffer(VulkanDevice, VulkanBuffer, nullptr);
+ }
+
+ protected:
+ bool AllocateCurrent;
+ bool BoundCurrent;
+ bool CreateCurrent;
+ bool InvalidDeleteEn;
+
+ VkBuffer VulkanBuffer;
+ VkDevice VulkanDevice;
+ VkDeviceMemory VulkanMemory;
+};
+
+class VkVerticesObj {
+ public:
+ VkVerticesObj(VkDeviceObj *aVulkanDevice, unsigned aAttributeCount, unsigned aBindingCount, unsigned aByteStride,
+ VkDeviceSize aVertexCount, const float *aVerticies)
+ : BoundCurrent(false),
+ AttributeCount(aAttributeCount),
+ BindingCount(aBindingCount),
+ BindId(BindIdGenerator),
+ PipelineVertexInputStateCreateInfo(),
+ VulkanMemoryBuffer(aVulkanDevice, static_cast<int>(aByteStride * aVertexCount),
+ reinterpret_cast<const void *>(aVerticies), VK_BUFFER_USAGE_VERTEX_BUFFER_BIT) {
+ BindIdGenerator++; // NB: This can wrap w/misuse
+
+ VertexInputAttributeDescription = new VkVertexInputAttributeDescription[AttributeCount];
+ VertexInputBindingDescription = new VkVertexInputBindingDescription[BindingCount];
+
+ PipelineVertexInputStateCreateInfo.pVertexAttributeDescriptions = VertexInputAttributeDescription;
+ PipelineVertexInputStateCreateInfo.vertexAttributeDescriptionCount = AttributeCount;
+ PipelineVertexInputStateCreateInfo.pVertexBindingDescriptions = VertexInputBindingDescription;
+ PipelineVertexInputStateCreateInfo.vertexBindingDescriptionCount = BindingCount;
+ PipelineVertexInputStateCreateInfo.sType = VK_STRUCTURE_TYPE_PIPELINE_VERTEX_INPUT_STATE_CREATE_INFO;
+
+ unsigned i = 0;
+ do {
+ VertexInputAttributeDescription[i].binding = BindId;
+ VertexInputAttributeDescription[i].location = i;
+ VertexInputAttributeDescription[i].format = VK_FORMAT_R32G32B32_SFLOAT;
+ VertexInputAttributeDescription[i].offset = sizeof(float) * aByteStride;
+ i++;
+ } while (AttributeCount < i);
+
+ i = 0;
+ do {
+ VertexInputBindingDescription[i].binding = BindId;
+ VertexInputBindingDescription[i].stride = aByteStride;
+ VertexInputBindingDescription[i].inputRate = VK_VERTEX_INPUT_RATE_VERTEX;
+ i++;
+ } while (BindingCount < i);
+ }
+
+ ~VkVerticesObj() {
+ if (VertexInputAttributeDescription) {
+ delete[] VertexInputAttributeDescription;
+ }
+ if (VertexInputBindingDescription) {
+ delete[] VertexInputBindingDescription;
+ }
+ }
+
+ bool AddVertexInputToPipe(VkPipelineObj &aPipelineObj) {
+ aPipelineObj.AddVertexInputAttribs(VertexInputAttributeDescription, AttributeCount);
+ aPipelineObj.AddVertexInputBindings(VertexInputBindingDescription, BindingCount);
+ return true;
+ }
+
+ void BindVertexBuffers(VkCommandBuffer aCommandBuffer, unsigned aOffsetCount = 0, VkDeviceSize *aOffsetList = nullptr) {
+ VkDeviceSize *offsetList;
+ unsigned offsetCount;
+
+ if (aOffsetCount) {
+ offsetList = aOffsetList;
+ offsetCount = aOffsetCount;
+ } else {
+ offsetList = new VkDeviceSize[1]();
+ offsetCount = 1;
+ }
+
+ vkCmdBindVertexBuffers(aCommandBuffer, BindId, offsetCount, &VulkanMemoryBuffer.handle(), offsetList);
+ BoundCurrent = true;
+
+ if (!aOffsetCount) {
+ delete[] offsetList;
+ }
+ }
+
+ protected:
+ static uint32_t BindIdGenerator;
+
+ bool BoundCurrent;
+ unsigned AttributeCount;
+ unsigned BindingCount;
+ uint32_t BindId;
+
+ VkPipelineVertexInputStateCreateInfo PipelineVertexInputStateCreateInfo;
+ VkVertexInputAttributeDescription *VertexInputAttributeDescription;
+ VkVertexInputBindingDescription *VertexInputBindingDescription;
+ VkConstantBufferObj VulkanMemoryBuffer;
+};
+
+uint32_t VkVerticesObj::BindIdGenerator;
+
+struct OneOffDescriptorSet {
+ VkDeviceObj *device_;
+ VkDescriptorPool pool_;
+ VkDescriptorSetLayoutObj layout_;
+ VkDescriptorSet set_;
+ typedef std::vector<VkDescriptorSetLayoutBinding> Bindings;
+
+ OneOffDescriptorSet(VkDeviceObj *device, const Bindings &bindings)
+ : device_{device}, pool_{}, layout_(device, bindings), set_{} {
+ VkResult err;
+
+ std::vector<VkDescriptorPoolSize> sizes;
+ for (const auto &b : bindings) sizes.push_back({b.descriptorType, std::max(1u, b.descriptorCount)});
+
+ VkDescriptorPoolCreateInfo dspci = {
+ VK_STRUCTURE_TYPE_DESCRIPTOR_POOL_CREATE_INFO, nullptr, 0, 1, uint32_t(sizes.size()), sizes.data()};
+ err = vkCreateDescriptorPool(device_->handle(), &dspci, nullptr, &pool_);
+ if (err != VK_SUCCESS) return;
+
+ VkDescriptorSetAllocateInfo alloc_info = {VK_STRUCTURE_TYPE_DESCRIPTOR_SET_ALLOCATE_INFO, nullptr, pool_, 1,
+ &layout_.handle()};
+ err = vkAllocateDescriptorSets(device_->handle(), &alloc_info, &set_);
+ }
+
+ ~OneOffDescriptorSet() {
+ // No need to destroy set-- it's going away with the pool.
+ vkDestroyDescriptorPool(device_->handle(), pool_, nullptr);
+ }
+
+ bool Initialized() { return pool_ != VK_NULL_HANDLE && layout_.initialized() && set_ != VK_NULL_HANDLE; }
+};
+
+template <typename T>
+bool IsValidVkStruct(const T &s) {
+ return LvlTypeMap<T>::kSType == s.sType;
+}
+
+// Helper class for tersely creating create pipeline tests
+//
+// Designed with minimal error checking to ensure easy error state creation
+// See OneshotTest for typical usage
+struct CreatePipelineHelper {
+ public:
+ std::vector<VkDescriptorSetLayoutBinding> dsl_bindings_;
+ std::unique_ptr<OneOffDescriptorSet> descriptor_set_;
+ std::vector<VkPipelineShaderStageCreateInfo> shader_stages_;
+ VkPipelineVertexInputStateCreateInfo vi_ci_ = {};
+ VkPipelineInputAssemblyStateCreateInfo ia_ci_ = {};
+ VkPipelineTessellationStateCreateInfo tess_ci_ = {};
+ VkViewport viewport_ = {};
+ VkRect2D scissor_ = {};
+ VkPipelineViewportStateCreateInfo vp_state_ci_ = {};
+ VkPipelineMultisampleStateCreateInfo pipe_ms_state_ci_ = {};
+ VkPipelineLayoutCreateInfo pipeline_layout_ci_ = {};
+ VkPipelineLayoutObj pipeline_layout_;
+ VkPipelineDynamicStateCreateInfo dyn_state_ci_ = {};
+ VkPipelineRasterizationStateCreateInfo rs_state_ci_ = {};
+ VkPipelineColorBlendAttachmentState cb_attachments_ = {};
+ VkPipelineColorBlendStateCreateInfo cb_ci_ = {};
+ VkGraphicsPipelineCreateInfo gp_ci_ = {};
+ VkPipelineCacheCreateInfo pc_ci_ = {};
+ VkPipeline pipeline_ = VK_NULL_HANDLE;
+ VkPipelineCache pipeline_cache_ = VK_NULL_HANDLE;
+ std::unique_ptr<VkShaderObj> vs_;
+ std::unique_ptr<VkShaderObj> fs_;
+ VkLayerTest &layer_test_;
+ CreatePipelineHelper(VkLayerTest &test) : layer_test_(test) {}
+ ~CreatePipelineHelper() {
+ VkDevice device = layer_test_.device();
+ vkDestroyPipelineCache(device, pipeline_cache_, nullptr);
+ vkDestroyPipeline(device, pipeline_, nullptr);
+ }
+
+ void InitDescriptorSetInfo() { dsl_bindings_ = {{0, VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER, 1, VK_SHADER_STAGE_ALL, nullptr}}; }
+
+ void InitInputAndVertexInfo() {
+ vi_ci_.sType = VK_STRUCTURE_TYPE_PIPELINE_VERTEX_INPUT_STATE_CREATE_INFO;
+
+ ia_ci_.sType = VK_STRUCTURE_TYPE_PIPELINE_INPUT_ASSEMBLY_STATE_CREATE_INFO;
+ ia_ci_.topology = VK_PRIMITIVE_TOPOLOGY_TRIANGLE_STRIP;
+ }
+
+ void InitMultisampleInfo() {
+ pipe_ms_state_ci_.sType = VK_STRUCTURE_TYPE_PIPELINE_MULTISAMPLE_STATE_CREATE_INFO;
+ pipe_ms_state_ci_.pNext = nullptr;
+ pipe_ms_state_ci_.rasterizationSamples = VK_SAMPLE_COUNT_1_BIT;
+ pipe_ms_state_ci_.sampleShadingEnable = VK_FALSE;
+ pipe_ms_state_ci_.minSampleShading = 1.0;
+ pipe_ms_state_ci_.pSampleMask = NULL;
+ }
+
+ void InitPipelineLayoutInfo() {
+ pipeline_layout_ci_.sType = VK_STRUCTURE_TYPE_PIPELINE_LAYOUT_CREATE_INFO;
+ pipeline_layout_ci_.setLayoutCount = 1; // Not really changeable because InitState() sets exactly one pSetLayout
+ pipeline_layout_ci_.pSetLayouts = nullptr; // must bound after it is created
+ }
+
+ void InitViewportInfo() {
+ viewport_ = {0.0f, 0.0f, 64.0f, 64.0f, 0.0f, 1.0f};
+ scissor_ = {{0, 0}, {64, 64}};
+
+ vp_state_ci_.sType = VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_STATE_CREATE_INFO;
+ vp_state_ci_.pNext = nullptr;
+ vp_state_ci_.viewportCount = 1;
+ vp_state_ci_.pViewports = &viewport_; // ignored if dynamic
+ vp_state_ci_.scissorCount = 1;
+ vp_state_ci_.pScissors = &scissor_; // ignored if dynamic
+ }
+
+ void InitDynamicStateInfo() {
+ // Use a "validity" check on the {} initialized structure to detect initialization
+ // during late bind
+ }
+
+ void InitShaderInfo() {
+ vs_.reset(new VkShaderObj(layer_test_.DeviceObj(), bindStateVertShaderText, VK_SHADER_STAGE_VERTEX_BIT, &layer_test_));
+ fs_.reset(new VkShaderObj(layer_test_.DeviceObj(), bindStateFragShaderText, VK_SHADER_STAGE_FRAGMENT_BIT, &layer_test_));
+ // We shouldn't need a fragment shader but add it to be able to run on more devices
+ shader_stages_ = {vs_->GetStageCreateInfo(), fs_->GetStageCreateInfo()};
+ }
+
+ void InitRasterizationInfo() {
+ rs_state_ci_.sType = VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_STATE_CREATE_INFO;
+ rs_state_ci_.pNext = nullptr;
+ rs_state_ci_.flags = 0;
+ rs_state_ci_.depthClampEnable = VK_FALSE;
+ rs_state_ci_.rasterizerDiscardEnable = VK_FALSE;
+ rs_state_ci_.polygonMode = VK_POLYGON_MODE_FILL;
+ rs_state_ci_.cullMode = VK_CULL_MODE_BACK_BIT;
+ rs_state_ci_.frontFace = VK_FRONT_FACE_COUNTER_CLOCKWISE;
+ rs_state_ci_.depthBiasEnable = VK_FALSE;
+ rs_state_ci_.lineWidth = 1.0F;
+ }
+
+ void InitBlendStateInfo() {
+ cb_ci_.sType = VK_STRUCTURE_TYPE_PIPELINE_COLOR_BLEND_STATE_CREATE_INFO;
+ cb_ci_.logicOpEnable = VK_FALSE;
+ cb_ci_.logicOp = VK_LOGIC_OP_COPY; // ignored if enable is VK_FALSE above
+ cb_ci_.attachmentCount = layer_test_.RenderPassInfo().subpassCount;
+ ASSERT_TRUE(IsValidVkStruct(layer_test_.RenderPassInfo()));
+ cb_ci_.pAttachments = &cb_attachments_;
+ for (int i = 0; i < 4; i++) {
+ cb_ci_.blendConstants[0] = 1.0F;
+ }
+ }
+
+ void InitGraphicsPipelineInfo() {
+ // Color-only rendering in a subpass with no depth/stencil attachment
+ // Active Pipeline Shader Stages
+ // Vertex Shader
+ // Fragment Shader
+ // Required: Fixed-Function Pipeline Stages
+ // VkPipelineVertexInputStateCreateInfo
+ // VkPipelineInputAssemblyStateCreateInfo
+ // VkPipelineViewportStateCreateInfo
+ // VkPipelineRasterizationStateCreateInfo
+ // VkPipelineMultisampleStateCreateInfo
+ // VkPipelineColorBlendStateCreateInfo
+ gp_ci_.sType = VK_STRUCTURE_TYPE_GRAPHICS_PIPELINE_CREATE_INFO;
+ gp_ci_.pNext = nullptr;
+ gp_ci_.flags = VK_PIPELINE_CREATE_DISABLE_OPTIMIZATION_BIT;
+ gp_ci_.pVertexInputState = &vi_ci_;
+ gp_ci_.pInputAssemblyState = &ia_ci_;
+ gp_ci_.pTessellationState = nullptr;
+ gp_ci_.pViewportState = &vp_state_ci_;
+ gp_ci_.pRasterizationState = &rs_state_ci_;
+ gp_ci_.pMultisampleState = &pipe_ms_state_ci_;
+ gp_ci_.pDepthStencilState = nullptr;
+ gp_ci_.pColorBlendState = &cb_ci_;
+ gp_ci_.pDynamicState = nullptr;
+ gp_ci_.renderPass = layer_test_.renderPass();
+ }
+
+ void InitPipelineCacheInfo() {
+ pc_ci_.sType = VK_STRUCTURE_TYPE_PIPELINE_CACHE_CREATE_INFO;
+ pc_ci_.pNext = nullptr;
+ pc_ci_.flags = 0;
+ pc_ci_.initialDataSize = 0;
+ pc_ci_.pInitialData = nullptr;
+ }
+
+ // Not called by default during init_info
+ void InitTesselationState() {
+ // TBD -- add shaders and create_info
+ }
+
+ // TDB -- add control for optional and/or additional initialization
+ void InitInfo() {
+ InitDescriptorSetInfo();
+ InitInputAndVertexInfo();
+ InitMultisampleInfo();
+ InitPipelineLayoutInfo();
+ InitViewportInfo();
+ InitDynamicStateInfo();
+ InitShaderInfo();
+ InitRasterizationInfo();
+ InitBlendStateInfo();
+ InitGraphicsPipelineInfo();
+ InitPipelineCacheInfo();
+ }
+
+ void InitState() {
+ VkResult err;
+ descriptor_set_.reset(new OneOffDescriptorSet(layer_test_.DeviceObj(), dsl_bindings_));
+ ASSERT_TRUE(descriptor_set_->Initialized());
+
+ const std::vector<VkPushConstantRange> push_ranges(
+ pipeline_layout_ci_.pPushConstantRanges,
+ pipeline_layout_ci_.pPushConstantRanges + pipeline_layout_ci_.pushConstantRangeCount);
+ pipeline_layout_ = VkPipelineLayoutObj(layer_test_.DeviceObj(), {&descriptor_set_->layout_}, push_ranges);
+
+ err = vkCreatePipelineCache(layer_test_.device(), &pc_ci_, NULL, &pipeline_cache_);
+ ASSERT_VK_SUCCESS(err);
+ }
+
+ void LateBindPipelineInfo() {
+ // By value or dynamically located items must be late bound
+ gp_ci_.layout = pipeline_layout_.handle();
+ gp_ci_.stageCount = shader_stages_.size();
+ gp_ci_.pStages = shader_stages_.data();
+ if ((gp_ci_.pTessellationState == nullptr) && IsValidVkStruct(tess_ci_)) {
+ gp_ci_.pTessellationState = &tess_ci_;
+ }
+ if ((gp_ci_.pDynamicState == nullptr) && IsValidVkStruct(dyn_state_ci_)) {
+ gp_ci_.pDynamicState = &dyn_state_ci_;
+ }
+ }
+
+ VkResult CreateGraphicsPipeline(bool implicit_destroy = true, bool do_late_bind = true) {
+ VkResult err;
+ if (do_late_bind) {
+ LateBindPipelineInfo();
+ }
+ if (implicit_destroy && (pipeline_ != VK_NULL_HANDLE)) {
+ vkDestroyPipeline(layer_test_.device(), pipeline_, nullptr);
+ pipeline_ = VK_NULL_HANDLE;
+ }
+ err = vkCreateGraphicsPipelines(layer_test_.device(), pipeline_cache_, 1, &gp_ci_, NULL, &pipeline_);
+ return err;
+ }
+
+ // Helper function to create a simple test case (positive or negative)
+ //
+ // info_override can be any callable that takes a CreatePipelineHeper &
+ // flags, error can be any args accepted by "SetDesiredFailure".
+ template <typename Test, typename OverrideFunc, typename Error>
+ static void OneshotTest(Test &test, OverrideFunc &info_override, const VkFlags flags, const std::vector<Error> &errors,
+ bool positive_test = false) {
+ CreatePipelineHelper helper(test);
+ helper.InitInfo();
+ info_override(helper);
+ helper.InitState();
+
+ for (const auto &error : errors) test.Monitor()->SetDesiredFailureMsg(flags, error);
+ helper.CreateGraphicsPipeline();
+
+ if (positive_test) {
+ test.Monitor()->VerifyNotFound();
+ } else {
+ test.Monitor()->VerifyFound();
+ }
+ }
+
+ template <typename Test, typename OverrideFunc, typename Error>
+ static void OneshotTest(Test &test, OverrideFunc &info_override, const VkFlags flags, Error error, bool positive_test = false) {
+ OneshotTest(test, info_override, flags, std::vector<Error>(1, error), positive_test);
+ }
+};
+namespace chain_util {
+template <typename T>
+T Init(const void *pnext_in = nullptr) {
+ T pnext_obj = {};
+ pnext_obj.sType = LvlTypeMap<T>::kSType;
+ pnext_obj.pNext = pnext_in;
+ return pnext_obj;
+}
+class ExtensionChain {
+ const void *head_ = nullptr;
+ typedef std::function<bool(const char *)> AddIfFunction;
+ AddIfFunction add_if_;
+ typedef std::vector<const char *> List;
+ List *list_;
+
+ public:
+ template <typename F>
+ ExtensionChain(F &add_if, List *list) : add_if_(add_if), list_(list) {}
+ template <typename T>
+ void Add(const char *name, T &obj) {
+ if (add_if_(name)) {
+ if (list_) {
+ list_->push_back(name);
+ }
+ obj.pNext = head_;
+ head_ = &obj;
+ }
+ }
+ const void *Head() const { return head_; }
+};
+} // namespace chain_util
+
+// PushDescriptorProperties helper
VkPhysicalDevicePushDescriptorPropertiesKHR GetPushDescriptorProperties(VkInstance instance, VkPhysicalDevice gpu) {
// Find address of extension call and make the call -- assumes needed extensions are enabled.
PFN_vkGetPhysicalDeviceProperties2KHR vkGetPhysicalDeviceProperties2KHR =
@@ -122,63 +1314,4386 @@ VkPhysicalDevicePushDescriptorPropertiesKHR GetPushDescriptorProperties(VkInstan
return push_descriptor_prop;
}
-VkPhysicalDeviceSubgroupProperties GetSubgroupProperties(VkInstance instance, VkPhysicalDevice gpu) {
- auto subgroup_prop = lvl_init_struct<VkPhysicalDeviceSubgroupProperties>();
-
- auto prop2 = lvl_init_struct<VkPhysicalDeviceProperties2>(&subgroup_prop);
- vkGetPhysicalDeviceProperties2(gpu, &prop2);
- return subgroup_prop;
+// ********************************************************************************************************************
+// ********************************************************************************************************************
+// ********************************************************************************************************************
+// ********************************************************************************************************************
+TEST_F(VkLayerTest, RequiredParameter) {
+ TEST_DESCRIPTION("Specify VK_NULL_HANDLE, NULL, and 0 for required handle, pointer, array, and array count parameters");
+
+ ASSERT_NO_FATAL_FAILURE(Init());
+
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "required parameter pFeatures specified as NULL");
+ // Specify NULL for a pointer to a handle
+ // Expected to trigger an error with
+ // parameter_validation::validate_required_pointer
+ vkGetPhysicalDeviceFeatures(gpu(), NULL);
+ m_errorMonitor->VerifyFound();
+
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ "required parameter pQueueFamilyPropertyCount specified as NULL");
+ // Specify NULL for pointer to array count
+ // Expected to trigger an error with parameter_validation::validate_array
+ vkGetPhysicalDeviceQueueFamilyProperties(gpu(), NULL, NULL);
+ m_errorMonitor->VerifyFound();
+
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdSetViewport-viewportCount-arraylength");
+ // Specify 0 for a required array count
+ // Expected to trigger an error with parameter_validation::validate_array
+ VkViewport viewport = {0.0f, 0.0f, 64.0f, 64.0f, 0.0f, 1.0f};
+ m_commandBuffer->SetViewport(0, 0, &viewport);
+ m_errorMonitor->VerifyFound();
+
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCreateImage-pCreateInfo-parameter");
+ // Specify a null pImageCreateInfo struct pointer
+ VkImage test_image;
+ vkCreateImage(device(), NULL, NULL, &test_image);
+ m_errorMonitor->VerifyFound();
+
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdSetViewport-pViewports-parameter");
+ // Specify NULL for a required array
+ // Expected to trigger an error with parameter_validation::validate_array
+ m_commandBuffer->SetViewport(0, 1, NULL);
+ m_errorMonitor->VerifyFound();
+
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "required parameter memory specified as VK_NULL_HANDLE");
+ // Specify VK_NULL_HANDLE for a required handle
+ // Expected to trigger an error with
+ // parameter_validation::validate_required_handle
+ vkUnmapMemory(device(), VK_NULL_HANDLE);
+ m_errorMonitor->VerifyFound();
+
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ "required parameter pFences[0] specified as VK_NULL_HANDLE");
+ // Specify VK_NULL_HANDLE for a required handle array entry
+ // Expected to trigger an error with
+ // parameter_validation::validate_required_handle_array
+ VkFence fence = VK_NULL_HANDLE;
+ vkResetFences(device(), 1, &fence);
+ m_errorMonitor->VerifyFound();
+
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "required parameter pAllocateInfo specified as NULL");
+ // Specify NULL for a required struct pointer
+ // Expected to trigger an error with
+ // parameter_validation::validate_struct_type
+ VkDeviceMemory memory = VK_NULL_HANDLE;
+ vkAllocateMemory(device(), NULL, NULL, &memory);
+ m_errorMonitor->VerifyFound();
+
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "value of faceMask must not be 0");
+ // Specify 0 for a required VkFlags parameter
+ // Expected to trigger an error with parameter_validation::validate_flags
+ m_commandBuffer->SetStencilReference(0, 0);
+ m_errorMonitor->VerifyFound();
+
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "value of pSubmits[0].pWaitDstStageMask[0] must not be 0");
+ // Specify 0 for a required VkFlags array entry
+ // Expected to trigger an error with
+ // parameter_validation::validate_flags_array
+ VkSemaphore semaphore = VK_NULL_HANDLE;
+ VkPipelineStageFlags stageFlags = 0;
+ VkSubmitInfo submitInfo = {};
+ submitInfo.sType = VK_STRUCTURE_TYPE_SUBMIT_INFO;
+ submitInfo.waitSemaphoreCount = 1;
+ submitInfo.pWaitSemaphores = &semaphore;
+ submitInfo.pWaitDstStageMask = &stageFlags;
+ vkQueueSubmit(m_device->m_queue, 1, &submitInfo, VK_NULL_HANDLE);
+ m_errorMonitor->VerifyFound();
+
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkSubmitInfo-sType-sType");
+ stageFlags = VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT;
+ // Set a bogus sType and see what happens
+ submitInfo.sType = VK_STRUCTURE_TYPE_FENCE_CREATE_INFO;
+ submitInfo.waitSemaphoreCount = 1;
+ submitInfo.pWaitSemaphores = &semaphore;
+ submitInfo.pWaitDstStageMask = &stageFlags;
+ vkQueueSubmit(m_device->m_queue, 1, &submitInfo, VK_NULL_HANDLE);
+ m_errorMonitor->VerifyFound();
+
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkSubmitInfo-pWaitSemaphores-parameter");
+ stageFlags = VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT;
+ submitInfo.sType = VK_STRUCTURE_TYPE_SUBMIT_INFO;
+ submitInfo.waitSemaphoreCount = 1;
+ // Set a null pointer for pWaitSemaphores
+ submitInfo.pWaitSemaphores = NULL;
+ submitInfo.pWaitDstStageMask = &stageFlags;
+ vkQueueSubmit(m_device->m_queue, 1, &submitInfo, VK_NULL_HANDLE);
+ m_errorMonitor->VerifyFound();
}
-bool operator==(const VkDebugUtilsLabelEXT &rhs, const VkDebugUtilsLabelEXT &lhs) {
- bool is_equal = (rhs.color[0] == lhs.color[0]) && (rhs.color[1] == lhs.color[1]) && (rhs.color[2] == lhs.color[2]) &&
- (rhs.color[3] == lhs.color[3]);
- if (is_equal) {
- if (rhs.pLabelName && lhs.pLabelName) {
- is_equal = (0 == strcmp(rhs.pLabelName, lhs.pLabelName));
+TEST_F(VkLayerTest, PnextOnlyStructValidation) {
+ TEST_DESCRIPTION("See if checks occur on structs ONLY used in pnext chains.");
+
+ if (InstanceExtensionSupported(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME)) {
+ m_instance_extension_names.push_back(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME);
+ } else {
+ printf("%s Did not find required instance extension %s; skipped.\n", kSkipPrefix,
+ VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME);
+ return;
+ }
+ ASSERT_NO_FATAL_FAILURE(InitFramework(myDbgFunc, m_errorMonitor));
+
+ std::array<const char *, 2> required_device_extensions = {
+ {VK_KHR_MAINTENANCE3_EXTENSION_NAME, VK_EXT_DESCRIPTOR_INDEXING_EXTENSION_NAME}};
+ for (auto device_extension : required_device_extensions) {
+ if (DeviceExtensionSupported(gpu(), nullptr, device_extension)) {
+ m_device_extension_names.push_back(device_extension);
} else {
- is_equal = (rhs.pLabelName == nullptr) && (lhs.pLabelName == nullptr);
+ printf("%s %s Extension not supported, skipping tests\n", kSkipPrefix, device_extension);
+ return;
}
}
- return is_equal;
+
+ PFN_vkGetPhysicalDeviceFeatures2KHR vkGetPhysicalDeviceFeatures2KHR =
+ (PFN_vkGetPhysicalDeviceFeatures2KHR)vkGetInstanceProcAddr(instance(), "vkGetPhysicalDeviceFeatures2KHR");
+ ASSERT_TRUE(vkGetPhysicalDeviceFeatures2KHR != nullptr);
+
+ // Create a device passing in a bad PdevFeatures2 value
+ auto indexing_features = lvl_init_struct<VkPhysicalDeviceDescriptorIndexingFeaturesEXT>();
+ auto features2 = lvl_init_struct<VkPhysicalDeviceFeatures2KHR>(&indexing_features);
+ vkGetPhysicalDeviceFeatures2KHR(gpu(), &features2);
+ // Set one of the features values to an invalid boolean value
+ indexing_features.descriptorBindingUniformBufferUpdateAfterBind = 800;
+
+ uint32_t queue_node_count;
+ vkGetPhysicalDeviceQueueFamilyProperties(gpu(), &queue_node_count, NULL);
+ VkQueueFamilyProperties *queue_props = new VkQueueFamilyProperties[queue_node_count];
+ vkGetPhysicalDeviceQueueFamilyProperties(gpu(), &queue_node_count, queue_props);
+ float priorities[] = {1.0f};
+ VkDeviceQueueCreateInfo queue_info{};
+ queue_info.sType = VK_STRUCTURE_TYPE_DEVICE_QUEUE_CREATE_INFO;
+ queue_info.pNext = NULL;
+ queue_info.flags = 0;
+ queue_info.queueFamilyIndex = 0;
+ queue_info.queueCount = 1;
+ queue_info.pQueuePriorities = &priorities[0];
+ VkDeviceCreateInfo dev_info = {};
+ dev_info.sType = VK_STRUCTURE_TYPE_DEVICE_CREATE_INFO;
+ dev_info.pNext = NULL;
+ dev_info.queueCreateInfoCount = 1;
+ dev_info.pQueueCreateInfos = &queue_info;
+ dev_info.enabledLayerCount = 0;
+ dev_info.ppEnabledLayerNames = NULL;
+ dev_info.enabledExtensionCount = m_device_extension_names.size();
+ dev_info.ppEnabledExtensionNames = m_device_extension_names.data();
+ dev_info.pNext = &features2;
+ VkDevice dev;
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_WARNING_BIT_EXT, "is neither VK_TRUE nor VK_FALSE");
+ m_errorMonitor->SetUnexpectedError("Failed to create");
+ vkCreateDevice(gpu(), &dev_info, NULL, &dev);
+ m_errorMonitor->VerifyFound();
}
-VKAPI_ATTR VkBool32 VKAPI_CALL DebugUtilsCallback(VkDebugUtilsMessageSeverityFlagBitsEXT messageSeverity,
- VkDebugUtilsMessageTypeFlagsEXT messageTypes,
- const VkDebugUtilsMessengerCallbackDataEXT *pCallbackData, void *pUserData) {
- auto *data = reinterpret_cast<DebugUtilsLabelCheckData *>(pUserData);
- data->callback(pCallbackData, data);
- return VK_FALSE;
+TEST_F(VkLayerTest, ReservedParameter) {
+ TEST_DESCRIPTION("Specify a non-zero value for a reserved parameter");
+
+ ASSERT_NO_FATAL_FAILURE(Init());
+
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, " must be 0");
+ // Specify 0 for a reserved VkFlags parameter
+ // Expected to trigger an error with
+ // parameter_validation::validate_reserved_flags
+ VkEvent event_handle = VK_NULL_HANDLE;
+ VkEventCreateInfo event_info = {};
+ event_info.sType = VK_STRUCTURE_TYPE_EVENT_CREATE_INFO;
+ event_info.flags = 1;
+ vkCreateEvent(device(), &event_info, NULL, &event_handle);
+ m_errorMonitor->VerifyFound();
}
-#if GTEST_IS_THREADSAFE
-extern "C" void *AddToCommandBuffer(void *arg) {
- struct thread_data_struct *data = (struct thread_data_struct *)arg;
+TEST_F(VkLayerTest, DebugMarkerNameTest) {
+ ASSERT_NO_FATAL_FAILURE(InitFramework(myDbgFunc, m_errorMonitor));
+ if (DeviceExtensionSupported(gpu(), "VK_LAYER_LUNARG_core_validation", VK_EXT_DEBUG_MARKER_EXTENSION_NAME)) {
+ m_device_extension_names.push_back(VK_EXT_DEBUG_MARKER_EXTENSION_NAME);
+ } else {
+ printf("%s Debug Marker Extension not supported, skipping test\n", kSkipPrefix);
+ return;
+ }
+ ASSERT_NO_FATAL_FAILURE(InitState());
- for (int i = 0; i < 80000; i++) {
- vkCmdSetEvent(data->commandBuffer, data->event, VK_PIPELINE_STAGE_ALL_COMMANDS_BIT);
- if (data->bailout) {
+ PFN_vkDebugMarkerSetObjectNameEXT fpvkDebugMarkerSetObjectNameEXT =
+ (PFN_vkDebugMarkerSetObjectNameEXT)vkGetInstanceProcAddr(instance(), "vkDebugMarkerSetObjectNameEXT");
+ if (!(fpvkDebugMarkerSetObjectNameEXT)) {
+ printf("%s Can't find fpvkDebugMarkerSetObjectNameEXT; skipped.\n", kSkipPrefix);
+ return;
+ }
+
+ VkEvent event_handle = VK_NULL_HANDLE;
+ VkEventCreateInfo event_info = {};
+ event_info.sType = VK_STRUCTURE_TYPE_EVENT_CREATE_INFO;
+ vkCreateEvent(device(), &event_info, NULL, &event_handle);
+ VkDebugMarkerObjectNameInfoEXT name_info = {};
+ name_info.sType = VK_STRUCTURE_TYPE_DEBUG_MARKER_OBJECT_NAME_INFO_EXT;
+ name_info.pNext = nullptr;
+ name_info.object = (uint64_t)event_handle;
+ name_info.objectType = VK_DEBUG_REPORT_OBJECT_TYPE_EVENT_EXT;
+ name_info.pObjectName = "UnimaginablyImprobableString";
+ fpvkDebugMarkerSetObjectNameEXT(device(), &name_info);
+
+ m_commandBuffer->begin();
+ vkCmdSetEvent(m_commandBuffer->handle(), event_handle, VK_PIPELINE_STAGE_BOTTOM_OF_PIPE_BIT);
+ m_commandBuffer->end();
+ VkSubmitInfo submit_info = {};
+ submit_info.sType = VK_STRUCTURE_TYPE_SUBMIT_INFO;
+ submit_info.commandBufferCount = 1;
+ submit_info.pCommandBuffers = &m_commandBuffer->handle();
+ vkQueueSubmit(m_device->m_queue, 1, &submit_info, VK_NULL_HANDLE);
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "UnimaginablyImprobableString");
+ vkDestroyEvent(m_device->device(), event_handle, NULL);
+ m_errorMonitor->VerifyFound();
+ vkQueueWaitIdle(m_device->m_queue);
+
+ VkBuffer buffer;
+ VkDeviceMemory memory_1, memory_2;
+ std::string memory_name = "memory_name";
+
+ VkBufferCreateInfo buffer_create_info = {};
+ buffer_create_info.sType = VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO;
+ buffer_create_info.usage = VK_BUFFER_USAGE_UNIFORM_BUFFER_BIT;
+ buffer_create_info.size = 1;
+
+ vkCreateBuffer(device(), &buffer_create_info, nullptr, &buffer);
+
+ VkMemoryRequirements memRequirements;
+ vkGetBufferMemoryRequirements(device(), buffer, &memRequirements);
+
+ VkMemoryAllocateInfo memory_allocate_info = {};
+ memory_allocate_info.sType = VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO;
+ memory_allocate_info.allocationSize = memRequirements.size;
+ memory_allocate_info.memoryTypeIndex = 0;
+
+ vkAllocateMemory(device(), &memory_allocate_info, nullptr, &memory_1);
+ vkAllocateMemory(device(), &memory_allocate_info, nullptr, &memory_2);
+
+ name_info.object = (uint64_t)memory_2;
+ name_info.objectType = VK_DEBUG_REPORT_OBJECT_TYPE_BUFFER_EXT;
+ name_info.pObjectName = memory_name.c_str();
+ fpvkDebugMarkerSetObjectNameEXT(device(), &name_info);
+
+ vkBindBufferMemory(device(), buffer, memory_1, 0);
+
+ // Test core_validation layer
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, memory_name);
+ vkBindBufferMemory(device(), buffer, memory_2, 0);
+ m_errorMonitor->VerifyFound();
+
+ vkFreeMemory(device(), memory_1, nullptr);
+ memory_1 = VK_NULL_HANDLE;
+ vkFreeMemory(device(), memory_2, nullptr);
+ memory_2 = VK_NULL_HANDLE;
+ vkDestroyBuffer(device(), buffer, nullptr);
+ buffer = VK_NULL_HANDLE;
+
+ VkCommandBuffer commandBuffer;
+ std::string commandBuffer_name = "command_buffer_name";
+ VkCommandPool commandpool_1;
+ VkCommandPool commandpool_2;
+ VkCommandPoolCreateInfo pool_create_info{};
+ pool_create_info.sType = VK_STRUCTURE_TYPE_COMMAND_POOL_CREATE_INFO;
+ pool_create_info.queueFamilyIndex = m_device->graphics_queue_node_index_;
+ pool_create_info.flags = VK_COMMAND_POOL_CREATE_RESET_COMMAND_BUFFER_BIT;
+ vkCreateCommandPool(device(), &pool_create_info, nullptr, &commandpool_1);
+ vkCreateCommandPool(device(), &pool_create_info, nullptr, &commandpool_2);
+
+ VkCommandBufferAllocateInfo command_buffer_allocate_info{};
+ command_buffer_allocate_info.sType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_ALLOCATE_INFO;
+ command_buffer_allocate_info.commandPool = commandpool_1;
+ command_buffer_allocate_info.commandBufferCount = 1;
+ command_buffer_allocate_info.level = VK_COMMAND_BUFFER_LEVEL_PRIMARY;
+ vkAllocateCommandBuffers(device(), &command_buffer_allocate_info, &commandBuffer);
+
+ name_info.object = (uint64_t)commandBuffer;
+ name_info.objectType = VK_DEBUG_REPORT_OBJECT_TYPE_BUFFER_EXT;
+ name_info.pObjectName = commandBuffer_name.c_str();
+ fpvkDebugMarkerSetObjectNameEXT(device(), &name_info);
+
+ VkCommandBufferBeginInfo cb_begin_Info = {};
+ cb_begin_Info.sType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_BEGIN_INFO;
+ cb_begin_Info.flags = VK_COMMAND_BUFFER_USAGE_ONE_TIME_SUBMIT_BIT;
+ vkBeginCommandBuffer(commandBuffer, &cb_begin_Info);
+
+ const VkRect2D scissor = {{-1, 0}, {16, 16}};
+ const VkRect2D scissors[] = {scissor, scissor};
+
+ // Test parameter_validation layer
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, commandBuffer_name);
+ vkCmdSetScissor(commandBuffer, 1, 1, scissors);
+ m_errorMonitor->VerifyFound();
+
+ // Test object_tracker layer
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, commandBuffer_name);
+ vkFreeCommandBuffers(device(), commandpool_2, 1, &commandBuffer);
+ m_errorMonitor->VerifyFound();
+
+ vkDestroyCommandPool(device(), commandpool_1, NULL);
+ vkDestroyCommandPool(device(), commandpool_2, NULL);
+}
+
+TEST_F(VkLayerTest, DebugUtilsNameTest) {
+ // Check for external semaphore instance extensions
+ if (InstanceExtensionSupported(VK_EXT_DEBUG_UTILS_EXTENSION_NAME)) {
+ m_instance_extension_names.push_back(VK_EXT_DEBUG_UTILS_EXTENSION_NAME);
+ } else {
+ printf("%s Debug Utils Extension not supported, skipping test\n", kSkipPrefix);
+ return;
+ }
+ ASSERT_NO_FATAL_FAILURE(InitFramework(myDbgFunc, m_errorMonitor));
+ ASSERT_NO_FATAL_FAILURE(InitState());
+
+ PFN_vkSetDebugUtilsObjectNameEXT fpvkSetDebugUtilsObjectNameEXT =
+ (PFN_vkSetDebugUtilsObjectNameEXT)vkGetDeviceProcAddr(m_device->device(), "vkSetDebugUtilsObjectNameEXT");
+ if (!(fpvkSetDebugUtilsObjectNameEXT)) {
+ printf("%s Can't find fpvkSetDebugUtilsObjectNameEXT; skipped.\n", kSkipPrefix);
+ return;
+ }
+
+ VkEvent event_handle = VK_NULL_HANDLE;
+ VkEventCreateInfo event_info = {};
+ event_info.sType = VK_STRUCTURE_TYPE_EVENT_CREATE_INFO;
+ vkCreateEvent(device(), &event_info, NULL, &event_handle);
+ VkDebugUtilsObjectNameInfoEXT name_info = {};
+ name_info.sType = VK_STRUCTURE_TYPE_DEBUG_UTILS_OBJECT_NAME_INFO_EXT;
+ name_info.pNext = nullptr;
+ name_info.objectHandle = (uint64_t)event_handle;
+ name_info.objectType = VK_OBJECT_TYPE_EVENT;
+ name_info.pObjectName = "Popbutton_T_Bumfuzzle";
+ fpvkSetDebugUtilsObjectNameEXT(device(), &name_info);
+
+ m_commandBuffer->begin();
+ vkCmdSetEvent(m_commandBuffer->handle(), event_handle, VK_PIPELINE_STAGE_BOTTOM_OF_PIPE_BIT);
+ m_commandBuffer->end();
+ VkSubmitInfo submit_info = {};
+ submit_info.sType = VK_STRUCTURE_TYPE_SUBMIT_INFO;
+ submit_info.commandBufferCount = 1;
+ submit_info.pCommandBuffers = &m_commandBuffer->handle();
+ vkQueueSubmit(m_device->m_queue, 1, &submit_info, VK_NULL_HANDLE);
+ // Provoke an error from the core_validation layer
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "Popbutton_T_Bumfuzzle");
+ vkDestroyEvent(m_device->device(), event_handle, NULL);
+ m_errorMonitor->VerifyFound();
+ vkQueueWaitIdle(m_device->m_queue);
+
+ // Provoke an error from the object tracker layer
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "Popbutton_T_Bumfuzzle");
+ vkDestroyEvent(m_device->device(), event_handle, NULL);
+ m_errorMonitor->VerifyFound();
+
+ // Change label for a given object, then provoke an error from core_validation and look for the new name
+ name_info.pObjectName = "A_Totally_Different_Name";
+ fpvkSetDebugUtilsObjectNameEXT(device(), &name_info);
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "A_Totally_Different_Name");
+ vkDestroyEvent(m_device->device(), event_handle, NULL);
+ m_errorMonitor->VerifyFound();
+
+ vkQueueWaitIdle(m_device->m_queue);
+}
+
+TEST_F(VkLayerTest, InvalidStructSType) {
+ TEST_DESCRIPTION("Specify an invalid VkStructureType for a Vulkan structure's sType field");
+
+ ASSERT_NO_FATAL_FAILURE(Init());
+
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "parameter pAllocateInfo->sType must be");
+ // Zero struct memory, effectively setting sType to
+ // VK_STRUCTURE_TYPE_APPLICATION_INFO
+ // Expected to trigger an error with
+ // parameter_validation::validate_struct_type
+ VkMemoryAllocateInfo alloc_info = {};
+ VkDeviceMemory memory = VK_NULL_HANDLE;
+ vkAllocateMemory(device(), &alloc_info, NULL, &memory);
+ m_errorMonitor->VerifyFound();
+
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "parameter pSubmits[0].sType must be");
+ // Zero struct memory, effectively setting sType to
+ // VK_STRUCTURE_TYPE_APPLICATION_INFO
+ // Expected to trigger an error with
+ // parameter_validation::validate_struct_type_array
+ VkSubmitInfo submit_info = {};
+ vkQueueSubmit(m_device->m_queue, 1, &submit_info, VK_NULL_HANDLE);
+ m_errorMonitor->VerifyFound();
+}
+
+TEST_F(VkLayerTest, InvalidStructPNext) {
+ TEST_DESCRIPTION("Specify an invalid value for a Vulkan structure's pNext field");
+
+ ASSERT_NO_FATAL_FAILURE(Init());
+
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_WARNING_BIT_EXT, "value of pCreateInfo->pNext must be NULL");
+ // Set VkMemoryAllocateInfo::pNext to a non-NULL value, when pNext must be NULL.
+ // Need to pick a function that has no allowed pNext structure types.
+ // Expected to trigger an error with parameter_validation::validate_struct_pnext
+ VkEvent event = VK_NULL_HANDLE;
+ VkEventCreateInfo event_alloc_info = {};
+ // Zero-initialization will provide the correct sType
+ VkApplicationInfo app_info = {};
+ event_alloc_info.sType = VK_STRUCTURE_TYPE_EVENT_CREATE_INFO;
+ event_alloc_info.pNext = &app_info;
+ vkCreateEvent(device(), &event_alloc_info, NULL, &event);
+ m_errorMonitor->VerifyFound();
+
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_WARNING_BIT_EXT,
+ " chain includes a structure with unexpected VkStructureType ");
+ // Set VkMemoryAllocateInfo::pNext to a non-NULL value, but use
+ // a function that has allowed pNext structure types and specify
+ // a structure type that is not allowed.
+ // Expected to trigger an error with parameter_validation::validate_struct_pnext
+ VkDeviceMemory memory = VK_NULL_HANDLE;
+ VkMemoryAllocateInfo memory_alloc_info = {};
+ memory_alloc_info.sType = VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO;
+ memory_alloc_info.pNext = &app_info;
+ vkAllocateMemory(device(), &memory_alloc_info, NULL, &memory);
+ m_errorMonitor->VerifyFound();
+}
+
+TEST_F(VkLayerTest, UnrecognizedValueOutOfRange) {
+ ASSERT_NO_FATAL_FAILURE(Init());
+
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ "does not fall within the begin..end range of the core VkFormat enumeration tokens");
+ // Specify an invalid VkFormat value
+ // Expected to trigger an error with
+ // parameter_validation::validate_ranged_enum
+ VkFormatProperties format_properties;
+ vkGetPhysicalDeviceFormatProperties(gpu(), static_cast<VkFormat>(8000), &format_properties);
+ m_errorMonitor->VerifyFound();
+}
+
+TEST_F(VkLayerTest, UnrecognizedValueBadMask) {
+ ASSERT_NO_FATAL_FAILURE(Init());
+
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "contains flag bits that are not recognized members of");
+ // Specify an invalid VkFlags bitmask value
+ // Expected to trigger an error with parameter_validation::validate_flags
+ VkImageFormatProperties image_format_properties;
+ vkGetPhysicalDeviceImageFormatProperties(gpu(), VK_FORMAT_R8G8B8A8_UNORM, VK_IMAGE_TYPE_2D, VK_IMAGE_TILING_OPTIMAL,
+ static_cast<VkImageUsageFlags>(1 << 25), 0, &image_format_properties);
+ m_errorMonitor->VerifyFound();
+}
+
+TEST_F(VkLayerTest, UnrecognizedValueBadFlag) {
+ ASSERT_NO_FATAL_FAILURE(Init());
+
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "contains flag bits that are not recognized members of");
+ // Specify an invalid VkFlags array entry
+ // Expected to trigger an error with parameter_validation::validate_flags_array
+ VkSemaphore semaphore;
+ VkSemaphoreCreateInfo semaphore_create_info{};
+ semaphore_create_info.sType = VK_STRUCTURE_TYPE_SEMAPHORE_CREATE_INFO;
+ vkCreateSemaphore(m_device->device(), &semaphore_create_info, nullptr, &semaphore);
+ // `stage_flags` is set to a value which, currently, is not a defined stage flag
+ // `VK_IMAGE_ASPECT_FLAG_BITS_MAX_ENUM` works well for this
+ VkPipelineStageFlags stage_flags = VK_IMAGE_ASPECT_FLAG_BITS_MAX_ENUM;
+ // `waitSemaphoreCount` *must* be greater than 0 to perform this check
+ VkSubmitInfo submit_info = {};
+ submit_info.sType = VK_STRUCTURE_TYPE_SUBMIT_INFO;
+ submit_info.waitSemaphoreCount = 1;
+ submit_info.pWaitSemaphores = &semaphore;
+ submit_info.pWaitDstStageMask = &stage_flags;
+ vkQueueSubmit(m_device->m_queue, 1, &submit_info, VK_NULL_HANDLE);
+ vkDestroySemaphore(m_device->device(), semaphore, nullptr);
+
+ m_errorMonitor->VerifyFound();
+}
+
+TEST_F(VkLayerTest, UnrecognizedValueBadBool) {
+ // Make sure using VK_SAMPLER_ADDRESS_MODE_MIRROR_CLAMP_TO_EDGE doesn't trigger a false positive.
+ ASSERT_NO_FATAL_FAILURE(InitFramework(myDbgFunc, m_errorMonitor));
+ if (DeviceExtensionSupported(gpu(), nullptr, VK_KHR_SAMPLER_MIRROR_CLAMP_TO_EDGE_EXTENSION_NAME)) {
+ m_device_extension_names.push_back(VK_KHR_SAMPLER_MIRROR_CLAMP_TO_EDGE_EXTENSION_NAME);
+ } else {
+ printf("%s VK_KHR_sampler_mirror_clamp_to_edge extension not supported, skipping test\n", kSkipPrefix);
+ return;
+ }
+ ASSERT_NO_FATAL_FAILURE(InitState());
+
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_WARNING_BIT_EXT, "is neither VK_TRUE nor VK_FALSE");
+ // Specify an invalid VkBool32 value, expecting a warning with parameter_validation::validate_bool32
+ VkSampler sampler = VK_NULL_HANDLE;
+ VkSamplerCreateInfo sampler_info = SafeSaneSamplerCreateInfo();
+ sampler_info.addressModeU = VK_SAMPLER_ADDRESS_MODE_MIRROR_CLAMP_TO_EDGE;
+ sampler_info.addressModeV = VK_SAMPLER_ADDRESS_MODE_MIRROR_CLAMP_TO_EDGE;
+ sampler_info.addressModeW = VK_SAMPLER_ADDRESS_MODE_MIRROR_CLAMP_TO_EDGE;
+
+ // Not VK_TRUE or VK_FALSE
+ sampler_info.anisotropyEnable = 3;
+ vkCreateSampler(m_device->device(), &sampler_info, NULL, &sampler);
+ m_errorMonitor->VerifyFound();
+}
+
+TEST_F(VkLayerTest, MirrorClampToEdgeNotEnabled) {
+ TEST_DESCRIPTION("Validation should catch using CLAMP_TO_EDGE addressing mode if the extension is not enabled.");
+
+ ASSERT_NO_FATAL_FAILURE(Init());
+
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkSamplerCreateInfo-addressModeU-01079");
+ VkSampler sampler = VK_NULL_HANDLE;
+ VkSamplerCreateInfo sampler_info = SafeSaneSamplerCreateInfo();
+ // Set the modes to cause the error
+ sampler_info.addressModeU = VK_SAMPLER_ADDRESS_MODE_MIRROR_CLAMP_TO_EDGE;
+ sampler_info.addressModeV = VK_SAMPLER_ADDRESS_MODE_MIRROR_CLAMP_TO_EDGE;
+ sampler_info.addressModeW = VK_SAMPLER_ADDRESS_MODE_MIRROR_CLAMP_TO_EDGE;
+
+ vkCreateSampler(m_device->device(), &sampler_info, NULL, &sampler);
+ m_errorMonitor->VerifyFound();
+}
+
+TEST_F(VkLayerTest, AnisotropyFeatureDisabled) {
+ TEST_DESCRIPTION("Validation should check anisotropy parameters are correct with samplerAnisotropy disabled.");
+
+ // Determine if required device features are available
+ VkPhysicalDeviceFeatures device_features = {};
+ ASSERT_NO_FATAL_FAILURE(InitFramework(myDbgFunc, m_errorMonitor));
+ ASSERT_NO_FATAL_FAILURE(GetPhysicalDeviceFeatures(&device_features));
+ device_features.samplerAnisotropy = VK_FALSE; // force anisotropy off
+ ASSERT_NO_FATAL_FAILURE(InitState(&device_features));
+
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkSamplerCreateInfo-anisotropyEnable-01070");
+ VkSamplerCreateInfo sampler_info = SafeSaneSamplerCreateInfo();
+ // With the samplerAnisotropy disable, the sampler must not enable it.
+ sampler_info.anisotropyEnable = VK_TRUE;
+ VkSampler sampler = VK_NULL_HANDLE;
+
+ VkResult err;
+ err = vkCreateSampler(m_device->device(), &sampler_info, NULL, &sampler);
+ m_errorMonitor->VerifyFound();
+ if (VK_SUCCESS == err) {
+ vkDestroySampler(m_device->device(), sampler, NULL);
+ }
+ sampler = VK_NULL_HANDLE;
+}
+
+TEST_F(VkLayerTest, AnisotropyFeatureEnabled) {
+ TEST_DESCRIPTION("Validation must check several conditions that apply only when Anisotropy is enabled.");
+
+ // Determine if required device features are available
+ VkPhysicalDeviceFeatures device_features = {};
+ ASSERT_NO_FATAL_FAILURE(InitFramework(myDbgFunc, m_errorMonitor));
+ ASSERT_NO_FATAL_FAILURE(GetPhysicalDeviceFeatures(&device_features));
+
+ // These tests require that the device support anisotropic filtering
+ if (VK_TRUE != device_features.samplerAnisotropy) {
+ printf("%s Test requires unsupported samplerAnisotropy feature. Skipped.\n", kSkipPrefix);
+ return;
+ }
+
+ bool cubic_support = false;
+ if (DeviceExtensionSupported(gpu(), nullptr, "VK_IMG_filter_cubic")) {
+ m_device_extension_names.push_back("VK_IMG_filter_cubic");
+ cubic_support = true;
+ }
+
+ VkSamplerCreateInfo sampler_info_ref = SafeSaneSamplerCreateInfo();
+ sampler_info_ref.anisotropyEnable = VK_TRUE;
+ VkSamplerCreateInfo sampler_info = sampler_info_ref;
+ ASSERT_NO_FATAL_FAILURE(InitState());
+
+ auto do_test = [this](std::string code, const VkSamplerCreateInfo *pCreateInfo) -> void {
+ VkResult err;
+ VkSampler sampler = VK_NULL_HANDLE;
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, code);
+ err = vkCreateSampler(m_device->device(), pCreateInfo, NULL, &sampler);
+ m_errorMonitor->VerifyFound();
+ if (VK_SUCCESS == err) {
+ vkDestroySampler(m_device->device(), sampler, NULL);
+ }
+ };
+
+ // maxAnisotropy out-of-bounds low.
+ sampler_info.maxAnisotropy = NearestSmaller(1.0F);
+ do_test("VUID-VkSamplerCreateInfo-anisotropyEnable-01071", &sampler_info);
+ sampler_info.maxAnisotropy = sampler_info_ref.maxAnisotropy;
+
+ // maxAnisotropy out-of-bounds high.
+ sampler_info.maxAnisotropy = NearestGreater(m_device->phy().properties().limits.maxSamplerAnisotropy);
+ do_test("VUID-VkSamplerCreateInfo-anisotropyEnable-01071", &sampler_info);
+ sampler_info.maxAnisotropy = sampler_info_ref.maxAnisotropy;
+
+ // Both anisotropy and unnormalized coords enabled
+ sampler_info.unnormalizedCoordinates = VK_TRUE;
+ // If unnormalizedCoordinates is VK_TRUE, minLod and maxLod must be zero
+ sampler_info.minLod = 0;
+ sampler_info.maxLod = 0;
+ do_test("VUID-VkSamplerCreateInfo-unnormalizedCoordinates-01076", &sampler_info);
+ sampler_info.unnormalizedCoordinates = sampler_info_ref.unnormalizedCoordinates;
+
+ // Both anisotropy and cubic filtering enabled
+ if (cubic_support) {
+ sampler_info.minFilter = VK_FILTER_CUBIC_IMG;
+ do_test("VUID-VkSamplerCreateInfo-magFilter-01081", &sampler_info);
+ sampler_info.minFilter = sampler_info_ref.minFilter;
+
+ sampler_info.magFilter = VK_FILTER_CUBIC_IMG;
+ do_test("VUID-VkSamplerCreateInfo-magFilter-01081", &sampler_info);
+ sampler_info.magFilter = sampler_info_ref.magFilter;
+ } else {
+ printf("%s Test requires unsupported extension \"VK_IMG_filter_cubic\". Skipped.\n", kSkipPrefix);
+ }
+}
+
+TEST_F(VkLayerTest, UnnormalizedCoordinatesEnabled) {
+ TEST_DESCRIPTION("Validate restrictions on sampler parameters when unnormalizedCoordinates is true.");
+
+ ASSERT_NO_FATAL_FAILURE(InitFramework(myDbgFunc, m_errorMonitor));
+ VkSamplerCreateInfo sampler_info_ref = SafeSaneSamplerCreateInfo();
+ sampler_info_ref.unnormalizedCoordinates = VK_TRUE;
+ sampler_info_ref.minLod = 0.0f;
+ sampler_info_ref.maxLod = 0.0f;
+ VkSamplerCreateInfo sampler_info = sampler_info_ref;
+ ASSERT_NO_FATAL_FAILURE(InitState());
+
+ auto do_test = [this](std::string code, const VkSamplerCreateInfo *pCreateInfo) -> void {
+ VkResult err;
+ VkSampler sampler = VK_NULL_HANDLE;
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, code);
+ err = vkCreateSampler(m_device->device(), pCreateInfo, NULL, &sampler);
+ m_errorMonitor->VerifyFound();
+ if (VK_SUCCESS == err) {
+ vkDestroySampler(m_device->device(), sampler, NULL);
+ }
+ };
+
+ // min and mag filters must be the same
+ sampler_info.minFilter = VK_FILTER_NEAREST;
+ sampler_info.magFilter = VK_FILTER_LINEAR;
+ do_test("VUID-VkSamplerCreateInfo-unnormalizedCoordinates-01072", &sampler_info);
+ std::swap(sampler_info.minFilter, sampler_info.magFilter);
+ do_test("VUID-VkSamplerCreateInfo-unnormalizedCoordinates-01072", &sampler_info);
+ sampler_info = sampler_info_ref;
+
+ // mipmapMode must be NEAREST
+ sampler_info.mipmapMode = VK_SAMPLER_MIPMAP_MODE_LINEAR;
+ do_test("VUID-VkSamplerCreateInfo-unnormalizedCoordinates-01073", &sampler_info);
+ sampler_info = sampler_info_ref;
+
+ // minlod and maxlod must be zero
+ sampler_info.maxLod = 3.14159f;
+ do_test("VUID-VkSamplerCreateInfo-unnormalizedCoordinates-01074", &sampler_info);
+ sampler_info.minLod = 2.71828f;
+ do_test("VUID-VkSamplerCreateInfo-unnormalizedCoordinates-01074", &sampler_info);
+ sampler_info = sampler_info_ref;
+
+ // addressModeU and addressModeV must both be CLAMP_TO_EDGE or CLAMP_TO_BORDER
+ // checks all 12 invalid combinations out of 16 total combinations
+ const std::array<VkSamplerAddressMode, 4> kAddressModes = {{
+ VK_SAMPLER_ADDRESS_MODE_REPEAT,
+ VK_SAMPLER_ADDRESS_MODE_MIRRORED_REPEAT,
+ VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_EDGE,
+ VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_BORDER,
+ }};
+ for (const auto umode : kAddressModes) {
+ for (const auto vmode : kAddressModes) {
+ if ((umode != VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_EDGE && umode != VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_BORDER) ||
+ (vmode != VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_EDGE && vmode != VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_BORDER)) {
+ sampler_info.addressModeU = umode;
+ sampler_info.addressModeV = vmode;
+ do_test("VUID-VkSamplerCreateInfo-unnormalizedCoordinates-01075", &sampler_info);
+ }
+ }
+ }
+ sampler_info = sampler_info_ref;
+
+ // VUID-VkSamplerCreateInfo-unnormalizedCoordinates-01076 is tested in AnisotropyFeatureEnabled above
+ // Since it requires checking/enabling the anisotropic filtering feature, it's easier to do it
+ // with the other anisotropic tests.
+
+ // compareEnable must be VK_FALSE
+ sampler_info.compareEnable = VK_TRUE;
+ do_test("VUID-VkSamplerCreateInfo-unnormalizedCoordinates-01077", &sampler_info);
+ sampler_info = sampler_info_ref;
+}
+
+TEST_F(VkLayerTest, UnrecognizedValueMaxEnum) {
+ ASSERT_NO_FATAL_FAILURE(Init());
+
+ // Specify MAX_ENUM
+ VkFormatProperties format_properties;
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "does not fall within the begin..end range");
+ vkGetPhysicalDeviceFormatProperties(gpu(), VK_FORMAT_MAX_ENUM, &format_properties);
+ m_errorMonitor->VerifyFound();
+}
+
+TEST_F(VkLayerTest, UpdateBufferAlignment) {
+ TEST_DESCRIPTION("Check alignment parameters for vkCmdUpdateBuffer");
+ uint32_t updateData[] = {1, 2, 3, 4, 5, 6, 7, 8};
+
+ ASSERT_NO_FATAL_FAILURE(Init());
+
+ VkMemoryPropertyFlags reqs = VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT;
+ VkBufferObj buffer;
+ buffer.init_as_dst(*m_device, (VkDeviceSize)20, reqs);
+
+ m_commandBuffer->begin();
+ // Introduce failure by using dstOffset that is not multiple of 4
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, " is not a multiple of 4");
+ m_commandBuffer->UpdateBuffer(buffer.handle(), 1, 4, updateData);
+ m_errorMonitor->VerifyFound();
+
+ // Introduce failure by using dataSize that is not multiple of 4
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, " is not a multiple of 4");
+ m_commandBuffer->UpdateBuffer(buffer.handle(), 0, 6, updateData);
+ m_errorMonitor->VerifyFound();
+
+ // Introduce failure by using dataSize that is < 0
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ "must be greater than zero and less than or equal to 65536");
+ m_commandBuffer->UpdateBuffer(buffer.handle(), 0, (VkDeviceSize)-44, updateData);
+ m_errorMonitor->VerifyFound();
+
+ // Introduce failure by using dataSize that is > 65536
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ "must be greater than zero and less than or equal to 65536");
+ m_commandBuffer->UpdateBuffer(buffer.handle(), 0, (VkDeviceSize)80000, updateData);
+ m_errorMonitor->VerifyFound();
+
+ m_commandBuffer->end();
+}
+
+TEST_F(VkLayerTest, FillBufferAlignment) {
+ TEST_DESCRIPTION("Check alignment parameters for vkCmdFillBuffer");
+
+ ASSERT_NO_FATAL_FAILURE(Init());
+
+ VkMemoryPropertyFlags reqs = VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT;
+ VkBufferObj buffer;
+ buffer.init_as_dst(*m_device, (VkDeviceSize)20, reqs);
+
+ m_commandBuffer->begin();
+
+ // Introduce failure by using dstOffset that is not multiple of 4
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, " is not a multiple of 4");
+ m_commandBuffer->FillBuffer(buffer.handle(), 1, 4, 0x11111111);
+ m_errorMonitor->VerifyFound();
+
+ // Introduce failure by using size that is not multiple of 4
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, " is not a multiple of 4");
+ m_commandBuffer->FillBuffer(buffer.handle(), 0, 6, 0x11111111);
+ m_errorMonitor->VerifyFound();
+
+ // Introduce failure by using size that is zero
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "must be greater than zero");
+ m_commandBuffer->FillBuffer(buffer.handle(), 0, 0, 0x11111111);
+ m_errorMonitor->VerifyFound();
+
+ m_commandBuffer->end();
+}
+
+TEST_F(VkLayerTest, PSOPolygonModeInvalid) {
+ TEST_DESCRIPTION("Attempt to use a non-solid polygon fill mode in a pipeline when this feature is not enabled.");
+
+ ASSERT_NO_FATAL_FAILURE(Init());
+ ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
+
+ std::vector<const char *> device_extension_names;
+ auto features = m_device->phy().features();
+ // Artificially disable support for non-solid fill modes
+ features.fillModeNonSolid = VK_FALSE;
+ // The sacrificial device object
+ VkDeviceObj test_device(0, gpu(), device_extension_names, &features);
+
+ VkRenderpassObj render_pass(&test_device);
+
+ const VkPipelineLayoutObj pipeline_layout(&test_device);
+
+ VkPipelineRasterizationStateCreateInfo rs_ci = {};
+ rs_ci.sType = VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_STATE_CREATE_INFO;
+ rs_ci.pNext = nullptr;
+ rs_ci.lineWidth = 1.0f;
+ rs_ci.rasterizerDiscardEnable = VK_TRUE;
+
+ VkShaderObj vs(&test_device, bindStateVertShaderText, VK_SHADER_STAGE_VERTEX_BIT, this);
+ VkShaderObj fs(&test_device, bindStateFragShaderText, VK_SHADER_STAGE_FRAGMENT_BIT, this);
+
+ // Set polygonMode to unsupported value POINT, should fail
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ "polygonMode cannot be VK_POLYGON_MODE_POINT or VK_POLYGON_MODE_LINE");
+ {
+ VkPipelineObj pipe(&test_device);
+ pipe.AddShader(&vs);
+ pipe.AddShader(&fs);
+ pipe.AddDefaultColorAttachment();
+ // Introduce failure by setting unsupported polygon mode
+ rs_ci.polygonMode = VK_POLYGON_MODE_POINT;
+ pipe.SetRasterization(&rs_ci);
+ pipe.CreateVKPipeline(pipeline_layout.handle(), render_pass.handle());
+ }
+ m_errorMonitor->VerifyFound();
+
+ // Try again with polygonMode=LINE, should fail
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ "polygonMode cannot be VK_POLYGON_MODE_POINT or VK_POLYGON_MODE_LINE");
+ {
+ VkPipelineObj pipe(&test_device);
+ pipe.AddShader(&vs);
+ pipe.AddShader(&fs);
+ pipe.AddDefaultColorAttachment();
+ // Introduce failure by setting unsupported polygon mode
+ rs_ci.polygonMode = VK_POLYGON_MODE_LINE;
+ pipe.SetRasterization(&rs_ci);
+ pipe.CreateVKPipeline(pipeline_layout.handle(), render_pass.handle());
+ }
+ m_errorMonitor->VerifyFound();
+}
+
+TEST_F(VkLayerTest, SparseBindingImageBufferCreate) {
+ TEST_DESCRIPTION("Create buffer/image with sparse attributes but without the sparse_binding bit set");
+
+ ASSERT_NO_FATAL_FAILURE(Init());
+
+ VkBuffer buffer;
+ VkBufferCreateInfo buf_info = {};
+ buf_info.sType = VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO;
+ buf_info.pNext = NULL;
+ buf_info.usage = VK_BUFFER_USAGE_TRANSFER_SRC_BIT;
+ buf_info.size = 2048;
+ buf_info.queueFamilyIndexCount = 0;
+ buf_info.pQueueFamilyIndices = NULL;
+ buf_info.sharingMode = VK_SHARING_MODE_EXCLUSIVE;
+
+ if (m_device->phy().features().sparseResidencyBuffer) {
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkBufferCreateInfo-flags-00918");
+
+ buf_info.flags = VK_BUFFER_CREATE_SPARSE_RESIDENCY_BIT;
+ vkCreateBuffer(m_device->device(), &buf_info, NULL, &buffer);
+ m_errorMonitor->VerifyFound();
+ } else {
+ printf("%s Test requires unsupported sparseResidencyBuffer feature. Skipped.\n", kSkipPrefix);
+ return;
+ }
+
+ if (m_device->phy().features().sparseResidencyAliased) {
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkBufferCreateInfo-flags-00918");
+
+ buf_info.flags = VK_BUFFER_CREATE_SPARSE_ALIASED_BIT;
+ vkCreateBuffer(m_device->device(), &buf_info, NULL, &buffer);
+ m_errorMonitor->VerifyFound();
+ } else {
+ printf("%s Test requires unsupported sparseResidencyAliased feature. Skipped.\n", kSkipPrefix);
+ return;
+ }
+
+ VkImage image;
+ VkImageCreateInfo image_create_info = {};
+ image_create_info.sType = VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO;
+ image_create_info.pNext = NULL;
+ image_create_info.imageType = VK_IMAGE_TYPE_2D;
+ image_create_info.format = VK_FORMAT_R8G8B8A8_UNORM;
+ image_create_info.extent.width = 512;
+ image_create_info.extent.height = 64;
+ image_create_info.extent.depth = 1;
+ image_create_info.mipLevels = 1;
+ image_create_info.arrayLayers = 1;
+ image_create_info.samples = VK_SAMPLE_COUNT_1_BIT;
+ image_create_info.tiling = VK_IMAGE_TILING_OPTIMAL;
+ image_create_info.initialLayout = VK_IMAGE_LAYOUT_PREINITIALIZED;
+ image_create_info.usage = VK_IMAGE_USAGE_TRANSFER_SRC_BIT;
+ image_create_info.queueFamilyIndexCount = 0;
+ image_create_info.pQueueFamilyIndices = NULL;
+ image_create_info.sharingMode = VK_SHARING_MODE_EXCLUSIVE;
+
+ if (m_device->phy().features().sparseResidencyImage2D) {
+ image_create_info.flags = VK_IMAGE_CREATE_SPARSE_RESIDENCY_BIT;
+
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkImageCreateInfo-flags-00987");
+ vkCreateImage(m_device->device(), &image_create_info, NULL, &image);
+ m_errorMonitor->VerifyFound();
+ } else {
+ printf("%s Test requires unsupported sparseResidencyImage2D feature. Skipped.\n", kSkipPrefix);
+ return;
+ }
+
+ if (m_device->phy().features().sparseResidencyAliased) {
+ image_create_info.flags = VK_IMAGE_CREATE_SPARSE_ALIASED_BIT;
+
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkImageCreateInfo-flags-00987");
+ vkCreateImage(m_device->device(), &image_create_info, NULL, &image);
+ m_errorMonitor->VerifyFound();
+ } else {
+ printf("%s Test requires unsupported sparseResidencyAliased feature. Skipped.\n", kSkipPrefix);
+ return;
+ }
+}
+
+TEST_F(VkLayerTest, SparseResidencyImageCreateUnsupportedTypes) {
+ TEST_DESCRIPTION("Create images with sparse residency with unsupported types");
+
+ // Determine which device feature are available
+ VkPhysicalDeviceFeatures device_features = {};
+ ASSERT_NO_FATAL_FAILURE(InitFramework(myDbgFunc, m_errorMonitor));
+ ASSERT_NO_FATAL_FAILURE(GetPhysicalDeviceFeatures(&device_features));
+
+ // Mask out device features we don't want and initialize device state
+ device_features.sparseResidencyImage2D = VK_FALSE;
+ device_features.sparseResidencyImage3D = VK_FALSE;
+ ASSERT_NO_FATAL_FAILURE(InitState(&device_features));
+
+ if (!m_device->phy().features().sparseBinding) {
+ printf("%s Test requires unsupported sparseBinding feature. Skipped.\n", kSkipPrefix);
+ return;
+ }
+
+ VkImage image = VK_NULL_HANDLE;
+ VkResult result = VK_RESULT_MAX_ENUM;
+ VkImageCreateInfo image_create_info = {};
+ image_create_info.sType = VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO;
+ image_create_info.pNext = NULL;
+ image_create_info.imageType = VK_IMAGE_TYPE_1D;
+ image_create_info.format = VK_FORMAT_R8G8B8A8_UNORM;
+ image_create_info.extent.width = 512;
+ image_create_info.extent.height = 1;
+ image_create_info.extent.depth = 1;
+ image_create_info.mipLevels = 1;
+ image_create_info.arrayLayers = 1;
+ image_create_info.samples = VK_SAMPLE_COUNT_1_BIT;
+ image_create_info.tiling = VK_IMAGE_TILING_OPTIMAL;
+ image_create_info.initialLayout = VK_IMAGE_LAYOUT_PREINITIALIZED;
+ image_create_info.usage = VK_IMAGE_USAGE_TRANSFER_SRC_BIT;
+ image_create_info.queueFamilyIndexCount = 0;
+ image_create_info.pQueueFamilyIndices = NULL;
+ image_create_info.sharingMode = VK_SHARING_MODE_EXCLUSIVE;
+ image_create_info.flags = VK_IMAGE_CREATE_SPARSE_RESIDENCY_BIT | VK_BUFFER_CREATE_SPARSE_BINDING_BIT;
+
+ // 1D image w/ sparse residency is an error
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkImageCreateInfo-imageType-00970");
+ result = vkCreateImage(m_device->device(), &image_create_info, NULL, &image);
+ m_errorMonitor->VerifyFound();
+ if (VK_SUCCESS == result) {
+ vkDestroyImage(m_device->device(), image, NULL);
+ image = VK_NULL_HANDLE;
+ }
+
+ // 2D image w/ sparse residency when feature isn't available
+ image_create_info.imageType = VK_IMAGE_TYPE_2D;
+ image_create_info.extent.height = 64;
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkImageCreateInfo-imageType-00971");
+ result = vkCreateImage(m_device->device(), &image_create_info, NULL, &image);
+ m_errorMonitor->VerifyFound();
+ if (VK_SUCCESS == result) {
+ vkDestroyImage(m_device->device(), image, NULL);
+ image = VK_NULL_HANDLE;
+ }
+
+ // 3D image w/ sparse residency when feature isn't available
+ image_create_info.imageType = VK_IMAGE_TYPE_3D;
+ image_create_info.extent.depth = 8;
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkImageCreateInfo-imageType-00972");
+ result = vkCreateImage(m_device->device(), &image_create_info, NULL, &image);
+ m_errorMonitor->VerifyFound();
+ if (VK_SUCCESS == result) {
+ vkDestroyImage(m_device->device(), image, NULL);
+ image = VK_NULL_HANDLE;
+ }
+}
+
+TEST_F(VkLayerTest, SparseResidencyImageCreateUnsupportedSamples) {
+ TEST_DESCRIPTION("Create images with sparse residency with unsupported tiling or sample counts");
+
+ // Determine which device feature are available
+ VkPhysicalDeviceFeatures device_features = {};
+ ASSERT_NO_FATAL_FAILURE(InitFramework(myDbgFunc, m_errorMonitor));
+ ASSERT_NO_FATAL_FAILURE(GetPhysicalDeviceFeatures(&device_features));
+
+ // These tests require that the device support sparse residency for 2D images
+ if (VK_TRUE != device_features.sparseResidencyImage2D) {
+ printf("%s Test requires unsupported SparseResidencyImage2D feature. Skipped.\n", kSkipPrefix);
+ return;
+ }
+
+ // Mask out device features we don't want and initialize device state
+ device_features.sparseResidency2Samples = VK_FALSE;
+ device_features.sparseResidency4Samples = VK_FALSE;
+ device_features.sparseResidency8Samples = VK_FALSE;
+ device_features.sparseResidency16Samples = VK_FALSE;
+ ASSERT_NO_FATAL_FAILURE(InitState(&device_features));
+
+ VkImage image = VK_NULL_HANDLE;
+ VkResult result = VK_RESULT_MAX_ENUM;
+ VkImageCreateInfo image_create_info = {};
+ image_create_info.sType = VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO;
+ image_create_info.pNext = NULL;
+ image_create_info.imageType = VK_IMAGE_TYPE_2D;
+ image_create_info.format = VK_FORMAT_R8G8B8A8_UNORM;
+ image_create_info.extent.width = 64;
+ image_create_info.extent.height = 64;
+ image_create_info.extent.depth = 1;
+ image_create_info.mipLevels = 1;
+ image_create_info.arrayLayers = 1;
+ image_create_info.samples = VK_SAMPLE_COUNT_1_BIT;
+ image_create_info.tiling = VK_IMAGE_TILING_LINEAR;
+ image_create_info.initialLayout = VK_IMAGE_LAYOUT_PREINITIALIZED;
+ image_create_info.usage = VK_IMAGE_USAGE_TRANSFER_SRC_BIT;
+ image_create_info.queueFamilyIndexCount = 0;
+ image_create_info.pQueueFamilyIndices = NULL;
+ image_create_info.sharingMode = VK_SHARING_MODE_EXCLUSIVE;
+ image_create_info.flags = VK_IMAGE_CREATE_SPARSE_RESIDENCY_BIT | VK_BUFFER_CREATE_SPARSE_BINDING_BIT;
+
+ // 2D image w/ sparse residency and linear tiling is an error
+ m_errorMonitor->SetDesiredFailureMsg(
+ VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ "VK_IMAGE_CREATE_SPARSE_RESIDENCY_BIT then image tiling of VK_IMAGE_TILING_LINEAR is not supported");
+ result = vkCreateImage(m_device->device(), &image_create_info, NULL, &image);
+ m_errorMonitor->VerifyFound();
+ if (VK_SUCCESS == result) {
+ vkDestroyImage(m_device->device(), image, NULL);
+ image = VK_NULL_HANDLE;
+ }
+ image_create_info.tiling = VK_IMAGE_TILING_OPTIMAL;
+
+ // Multi-sample image w/ sparse residency when feature isn't available (4 flavors)
+ image_create_info.samples = VK_SAMPLE_COUNT_2_BIT;
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkImageCreateInfo-imageType-00973");
+ result = vkCreateImage(m_device->device(), &image_create_info, NULL, &image);
+ m_errorMonitor->VerifyFound();
+ if (VK_SUCCESS == result) {
+ vkDestroyImage(m_device->device(), image, NULL);
+ image = VK_NULL_HANDLE;
+ }
+
+ image_create_info.samples = VK_SAMPLE_COUNT_4_BIT;
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkImageCreateInfo-imageType-00974");
+ result = vkCreateImage(m_device->device(), &image_create_info, NULL, &image);
+ m_errorMonitor->VerifyFound();
+ if (VK_SUCCESS == result) {
+ vkDestroyImage(m_device->device(), image, NULL);
+ image = VK_NULL_HANDLE;
+ }
+
+ image_create_info.samples = VK_SAMPLE_COUNT_8_BIT;
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkImageCreateInfo-imageType-00975");
+ result = vkCreateImage(m_device->device(), &image_create_info, NULL, &image);
+ m_errorMonitor->VerifyFound();
+ if (VK_SUCCESS == result) {
+ vkDestroyImage(m_device->device(), image, NULL);
+ image = VK_NULL_HANDLE;
+ }
+
+ image_create_info.samples = VK_SAMPLE_COUNT_16_BIT;
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkImageCreateInfo-imageType-00976");
+ result = vkCreateImage(m_device->device(), &image_create_info, NULL, &image);
+ m_errorMonitor->VerifyFound();
+ if (VK_SUCCESS == result) {
+ vkDestroyImage(m_device->device(), image, NULL);
+ image = VK_NULL_HANDLE;
+ }
+}
+
+TEST_F(VkLayerTest, GpuValidationArrayOOB) {
+ TEST_DESCRIPTION("GPU validation: Verify detection of out-of-bounds descriptor array indexing.");
+ if (!VkRenderFramework::DeviceCanDraw()) {
+ printf("%s GPU-Assisted validation test requires a driver that can draw.\n", kSkipPrefix);
+ return;
+ }
+#if defined(ANDROID)
+ if (instance() == VK_NULL_HANDLE) {
+ printf("%s Skipping test on Android temporarily while debugging test execution failure.\n", kSkipPrefix);
+ return;
+ }
+#endif
+ VkValidationFeatureEnableEXT enables[] = {VK_VALIDATION_FEATURE_ENABLE_GPU_ASSISTED_EXT};
+ VkValidationFeaturesEXT features = {};
+ features.sType = VK_STRUCTURE_TYPE_VALIDATION_FEATURES_EXT;
+ features.enabledValidationFeatureCount = 1;
+ features.pEnabledValidationFeatures = enables;
+ VkCommandPoolCreateFlags pool_flags = VK_COMMAND_POOL_CREATE_RESET_COMMAND_BUFFER_BIT;
+ ASSERT_NO_FATAL_FAILURE(Init(nullptr, nullptr, pool_flags, &features));
+ if (m_device->props.apiVersion < VK_API_VERSION_1_1) {
+ printf("%s GPU-Assisted validation test requires Vulkan 1.1+.\n", kSkipPrefix);
+ return;
+ }
+ ASSERT_NO_FATAL_FAILURE(InitViewport());
+ ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
+
+ // Make a uniform buffer to be passed to the shader that contains the invalid array index.
+ uint32_t qfi = 0;
+ VkBufferCreateInfo bci = {};
+ bci.sType = VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO;
+ bci.usage = VK_BUFFER_USAGE_UNIFORM_BUFFER_BIT;
+ bci.size = 1024;
+ bci.queueFamilyIndexCount = 1;
+ bci.pQueueFamilyIndices = &qfi;
+ VkBufferObj buffer0;
+ VkMemoryPropertyFlags mem_props = VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT | VK_MEMORY_PROPERTY_HOST_COHERENT_BIT;
+ buffer0.init(*m_device, bci, mem_props);
+ uint32_t *data = (uint32_t *)buffer0.memory().map();
+ data[0] = 25;
+ buffer0.memory().unmap();
+
+ // Prepare descriptors
+ OneOffDescriptorSet ds(m_device, {
+ {0, VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER, 1, VK_SHADER_STAGE_ALL, nullptr},
+ {1, VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER, 6, VK_SHADER_STAGE_ALL, nullptr},
+ });
+
+ const VkPipelineLayoutObj pipeline_layout(m_device, {&ds.layout_});
+ VkTextureObj texture(m_device, nullptr);
+ VkSamplerObj sampler(m_device);
+
+ VkDescriptorBufferInfo buffer_info[1] = {};
+ buffer_info[0].buffer = buffer0.handle();
+ buffer_info[0].offset = 0;
+ buffer_info[0].range = sizeof(uint32_t);
+
+ VkDescriptorImageInfo image_info[6] = {};
+ for (int i = 0; i < 6; i++) {
+ image_info[i] = texture.DescriptorImageInfo();
+ image_info[i].sampler = sampler.handle();
+ image_info[i].imageLayout = VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL;
+ }
+
+ VkWriteDescriptorSet descriptor_writes[2] = {};
+ descriptor_writes[0].sType = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET;
+ descriptor_writes[0].dstSet = ds.set_; // descriptor_set;
+ descriptor_writes[0].dstBinding = 0;
+ descriptor_writes[0].descriptorCount = 1;
+ descriptor_writes[0].descriptorType = VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER;
+ descriptor_writes[0].pBufferInfo = buffer_info;
+ descriptor_writes[1].sType = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET;
+ descriptor_writes[1].dstSet = ds.set_; // descriptor_set;
+ descriptor_writes[1].dstBinding = 1;
+ descriptor_writes[1].descriptorCount = 6;
+ descriptor_writes[1].descriptorType = VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER;
+ descriptor_writes[1].pImageInfo = image_info;
+ vkUpdateDescriptorSets(m_device->device(), 2, descriptor_writes, 0, NULL);
+
+ // Shader programs for array OOB test in vertex stage:
+ // - The vertex shader fetches the invalid index from the uniform buffer and uses it to make an invalid index into another
+ // array.
+ char const *vsSource_vert =
+ "#version 450\n"
+ "\n"
+ "layout(std140, set = 0, binding = 0) uniform foo { uint tex_index[1]; } uniform_index_buffer;\n"
+ "layout(set = 0, binding = 1) uniform sampler2D tex[6];\n"
+ "vec2 vertices[3];\n"
+ "void main(){\n"
+ " vertices[0] = vec2(-1.0, -1.0);\n"
+ " vertices[1] = vec2( 1.0, -1.0);\n"
+ " vertices[2] = vec2( 0.0, 1.0);\n"
+ " gl_Position = vec4(vertices[gl_VertexIndex % 3], 0.0, 1.0);\n"
+ " gl_Position += 1e-30 * texture(tex[uniform_index_buffer.tex_index[0]], vec2(0, 0));\n"
+ "}\n";
+ char const *fsSource_vert =
+ "#version 450\n"
+ "\n"
+ "layout(set = 0, binding = 1) uniform sampler2D tex[6];\n"
+ "layout(location = 0) out vec4 uFragColor;\n"
+ "void main(){\n"
+ " uFragColor = texture(tex[0], vec2(0, 0));\n"
+ "}\n";
+
+ // Shader programs for array OOB test in fragment stage:
+ // - The vertex shader fetches the invalid index from the uniform buffer and passes it to the fragment shader.
+ // - The fragment shader makes the invalid array access.
+ char const *vsSource_frag =
+ "#version 450\n"
+ "\n"
+ "layout(std140, binding = 0) uniform foo { uint tex_index[1]; } uniform_index_buffer;\n"
+ "layout(location = 0) out flat uint tex_ind;\n"
+ "vec2 vertices[3];\n"
+ "void main(){\n"
+ " vertices[0] = vec2(-1.0, -1.0);\n"
+ " vertices[1] = vec2( 1.0, -1.0);\n"
+ " vertices[2] = vec2( 0.0, 1.0);\n"
+ " gl_Position = vec4(vertices[gl_VertexIndex % 3], 0.0, 1.0);\n"
+ " tex_ind = uniform_index_buffer.tex_index[0];\n"
+ "}\n";
+ char const *fsSource_frag =
+ "#version 450\n"
+ "\n"
+ "layout(set = 0, binding = 1) uniform sampler2D tex[6];\n"
+ "layout(location = 0) out vec4 uFragColor;\n"
+ "layout(location = 0) in flat uint tex_ind;\n"
+ "void main(){\n"
+ " uFragColor = texture(tex[tex_ind], vec2(0, 0));\n"
+ "}\n";
+
+ struct TestCase {
+ char const *vertex_source;
+ char const *fragment_source;
+ bool debug;
+ char const *expected_error;
+ };
+
+ std::vector<TestCase> tests;
+ tests.push_back({vsSource_vert, fsSource_vert, false, "Index of 25 used to index descriptor array of length 6."});
+ tests.push_back({vsSource_frag, fsSource_frag, false, "Index of 25 used to index descriptor array of length 6."});
+#if !defined(ANDROID)
+ // The Android test framework uses shaderc for online compilations. Even when configured to compile with debug info,
+ // shaderc seems to drop the OpLine instructions from the shader binary. This causes the following two tests to fail
+ // on Android platforms. Skip these tests until the shaderc issue is understood/resolved.
+ tests.push_back({vsSource_vert, fsSource_vert, true,
+ "gl_Position += 1e-30 * texture(tex[uniform_index_buffer.tex_index[0]], vec2(0, 0));"});
+ tests.push_back({vsSource_frag, fsSource_frag, true, "uFragColor = texture(tex[tex_ind], vec2(0, 0));"});
+#endif
+
+ VkViewport viewport = m_viewports[0];
+ VkRect2D scissors = m_scissors[0];
+
+ VkSubmitInfo submit_info = {};
+ submit_info.sType = VK_STRUCTURE_TYPE_SUBMIT_INFO;
+ submit_info.commandBufferCount = 1;
+ submit_info.pCommandBuffers = &m_commandBuffer->handle();
+
+ for (const auto &iter : tests) {
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, iter.expected_error);
+ VkShaderObj vs(m_device, iter.vertex_source, VK_SHADER_STAGE_VERTEX_BIT, this, "main", iter.debug);
+ VkShaderObj fs(m_device, iter.fragment_source, VK_SHADER_STAGE_FRAGMENT_BIT, this, "main", iter.debug);
+ VkPipelineObj pipe(m_device);
+ pipe.AddShader(&vs);
+ pipe.AddShader(&fs);
+ pipe.AddDefaultColorAttachment();
+ VkResult err = pipe.CreateVKPipeline(pipeline_layout.handle(), renderPass());
+ ASSERT_VK_SUCCESS(err);
+ m_commandBuffer->begin();
+ m_commandBuffer->BeginRenderPass(m_renderPassBeginInfo);
+ vkCmdBindPipeline(m_commandBuffer->handle(), VK_PIPELINE_BIND_POINT_GRAPHICS, pipe.handle());
+ vkCmdBindDescriptorSets(m_commandBuffer->handle(), VK_PIPELINE_BIND_POINT_GRAPHICS, pipeline_layout.handle(), 0, 1,
+ &ds.set_, 0, nullptr);
+ vkCmdSetViewport(m_commandBuffer->handle(), 0, 1, &viewport);
+ vkCmdSetScissor(m_commandBuffer->handle(), 0, 1, &scissors);
+ vkCmdDraw(m_commandBuffer->handle(), 3, 1, 0, 0);
+ vkCmdEndRenderPass(m_commandBuffer->handle());
+ m_commandBuffer->end();
+ vkQueueSubmit(m_device->m_queue, 1, &submit_info, VK_NULL_HANDLE);
+ vkQueueWaitIdle(m_device->m_queue);
+ m_errorMonitor->VerifyFound();
+ }
+ return;
+}
+
+TEST_F(VkLayerTest, InvalidMemoryAliasing) {
+ TEST_DESCRIPTION(
+ "Create a buffer and image, allocate memory, and bind the buffer and image to memory such that they will alias.");
+ VkResult err;
+ bool pass;
+ ASSERT_NO_FATAL_FAILURE(Init());
+
+ VkBuffer buffer, buffer2;
+ VkImage image;
+ VkImage image2;
+ VkDeviceMemory mem; // buffer will be bound first
+ VkDeviceMemory mem_img; // image bound first
+ VkMemoryRequirements buff_mem_reqs, img_mem_reqs;
+ VkMemoryRequirements buff_mem_reqs2, img_mem_reqs2;
+
+ VkBufferCreateInfo buf_info = {};
+ buf_info.sType = VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO;
+ buf_info.pNext = NULL;
+ buf_info.usage = VK_BUFFER_USAGE_UNIFORM_BUFFER_BIT;
+ buf_info.size = 256;
+ buf_info.queueFamilyIndexCount = 0;
+ buf_info.pQueueFamilyIndices = NULL;
+ buf_info.sharingMode = VK_SHARING_MODE_EXCLUSIVE;
+ buf_info.flags = 0;
+ err = vkCreateBuffer(m_device->device(), &buf_info, NULL, &buffer);
+ ASSERT_VK_SUCCESS(err);
+
+ vkGetBufferMemoryRequirements(m_device->device(), buffer, &buff_mem_reqs);
+
+ VkImageCreateInfo image_create_info = {};
+ image_create_info.sType = VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO;
+ image_create_info.pNext = NULL;
+ image_create_info.imageType = VK_IMAGE_TYPE_2D;
+ image_create_info.format = VK_FORMAT_R8G8B8A8_UNORM;
+ image_create_info.extent.width = 64;
+ image_create_info.extent.height = 64;
+ image_create_info.extent.depth = 1;
+ image_create_info.mipLevels = 1;
+ image_create_info.arrayLayers = 1;
+ image_create_info.samples = VK_SAMPLE_COUNT_1_BIT;
+ // Image tiling must be optimal to trigger error when aliasing linear buffer
+ image_create_info.tiling = VK_IMAGE_TILING_OPTIMAL;
+ image_create_info.initialLayout = VK_IMAGE_LAYOUT_PREINITIALIZED;
+ image_create_info.usage = VK_IMAGE_USAGE_TRANSFER_SRC_BIT;
+ image_create_info.queueFamilyIndexCount = 0;
+ image_create_info.pQueueFamilyIndices = NULL;
+ image_create_info.sharingMode = VK_SHARING_MODE_EXCLUSIVE;
+ image_create_info.flags = 0;
+
+ err = vkCreateImage(m_device->device(), &image_create_info, NULL, &image);
+ ASSERT_VK_SUCCESS(err);
+ err = vkCreateImage(m_device->device(), &image_create_info, NULL, &image2);
+ ASSERT_VK_SUCCESS(err);
+
+ vkGetImageMemoryRequirements(m_device->device(), image, &img_mem_reqs);
+
+ VkMemoryAllocateInfo alloc_info = {};
+ alloc_info.sType = VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO;
+ alloc_info.pNext = NULL;
+ alloc_info.memoryTypeIndex = 0;
+ // Ensure memory is big enough for both bindings
+ alloc_info.allocationSize = buff_mem_reqs.size + img_mem_reqs.size;
+ pass = m_device->phy().set_memory_type(buff_mem_reqs.memoryTypeBits & img_mem_reqs.memoryTypeBits, &alloc_info,
+ VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT);
+ if (!pass) {
+ printf("%s Failed to set memory type.\n", kSkipPrefix);
+ vkDestroyBuffer(m_device->device(), buffer, NULL);
+ vkDestroyImage(m_device->device(), image, NULL);
+ vkDestroyImage(m_device->device(), image2, NULL);
+ return;
+ }
+ err = vkAllocateMemory(m_device->device(), &alloc_info, NULL, &mem);
+ ASSERT_VK_SUCCESS(err);
+ err = vkBindBufferMemory(m_device->device(), buffer, mem, 0);
+ ASSERT_VK_SUCCESS(err);
+
+ vkGetImageMemoryRequirements(m_device->device(), image2, &img_mem_reqs2);
+
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_WARNING_BIT_EXT, " is aliased with linear buffer 0x");
+ // VALIDATION FAILURE due to image mapping overlapping buffer mapping
+ err = vkBindImageMemory(m_device->device(), image, mem, 0);
+ m_errorMonitor->VerifyFound();
+
+ // Now correctly bind image2 to second mem allocation before incorrectly
+ // aliasing buffer2
+ err = vkCreateBuffer(m_device->device(), &buf_info, NULL, &buffer2);
+ ASSERT_VK_SUCCESS(err);
+ err = vkAllocateMemory(m_device->device(), &alloc_info, NULL, &mem_img);
+ ASSERT_VK_SUCCESS(err);
+ err = vkBindImageMemory(m_device->device(), image2, mem_img, 0);
+ ASSERT_VK_SUCCESS(err);
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_WARNING_BIT_EXT, "is aliased with non-linear image 0x");
+ vkGetBufferMemoryRequirements(m_device->device(), buffer2, &buff_mem_reqs2);
+ err = vkBindBufferMemory(m_device->device(), buffer2, mem_img, 0);
+ m_errorMonitor->VerifyFound();
+
+ vkDestroyBuffer(m_device->device(), buffer, NULL);
+ vkDestroyBuffer(m_device->device(), buffer2, NULL);
+ vkDestroyImage(m_device->device(), image, NULL);
+ vkDestroyImage(m_device->device(), image2, NULL);
+ vkFreeMemory(m_device->device(), mem, NULL);
+ vkFreeMemory(m_device->device(), mem_img, NULL);
+}
+
+TEST_F(VkLayerTest, InvalidMemoryMapping) {
+ TEST_DESCRIPTION("Attempt to map memory in a number of incorrect ways");
+ VkResult err;
+ bool pass;
+ ASSERT_NO_FATAL_FAILURE(Init());
+
+ VkBuffer buffer;
+ VkDeviceMemory mem;
+ VkMemoryRequirements mem_reqs;
+
+ const VkDeviceSize atom_size = m_device->props.limits.nonCoherentAtomSize;
+
+ VkBufferCreateInfo buf_info = {};
+ buf_info.sType = VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO;
+ buf_info.pNext = NULL;
+ buf_info.usage = VK_BUFFER_USAGE_UNIFORM_BUFFER_BIT;
+ buf_info.size = 256;
+ buf_info.queueFamilyIndexCount = 0;
+ buf_info.pQueueFamilyIndices = NULL;
+ buf_info.sharingMode = VK_SHARING_MODE_EXCLUSIVE;
+ buf_info.flags = 0;
+ err = vkCreateBuffer(m_device->device(), &buf_info, NULL, &buffer);
+ ASSERT_VK_SUCCESS(err);
+
+ vkGetBufferMemoryRequirements(m_device->device(), buffer, &mem_reqs);
+ VkMemoryAllocateInfo alloc_info = {};
+ alloc_info.sType = VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO;
+ alloc_info.pNext = NULL;
+ alloc_info.memoryTypeIndex = 0;
+
+ // Ensure memory is big enough for both bindings
+ static const VkDeviceSize allocation_size = 0x10000;
+ alloc_info.allocationSize = allocation_size;
+ pass = m_device->phy().set_memory_type(mem_reqs.memoryTypeBits, &alloc_info, VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT);
+ if (!pass) {
+ printf("%s Failed to set memory type.\n", kSkipPrefix);
+ vkDestroyBuffer(m_device->device(), buffer, NULL);
+ return;
+ }
+ err = vkAllocateMemory(m_device->device(), &alloc_info, NULL, &mem);
+ ASSERT_VK_SUCCESS(err);
+
+ uint8_t *pData;
+ // Attempt to map memory size 0 is invalid
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VkMapMemory: Attempting to map memory range of size zero");
+ err = vkMapMemory(m_device->device(), mem, 0, 0, 0, (void **)&pData);
+ m_errorMonitor->VerifyFound();
+ // Map memory twice
+ err = vkMapMemory(m_device->device(), mem, 0, mem_reqs.size, 0, (void **)&pData);
+ ASSERT_VK_SUCCESS(err);
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ "VkMapMemory: Attempting to map memory on an already-mapped object ");
+ err = vkMapMemory(m_device->device(), mem, 0, mem_reqs.size, 0, (void **)&pData);
+ m_errorMonitor->VerifyFound();
+
+ // Unmap the memory to avoid re-map error
+ vkUnmapMemory(m_device->device(), mem);
+ // overstep allocation with VK_WHOLE_SIZE
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ " with size of VK_WHOLE_SIZE oversteps total array size 0x");
+ err = vkMapMemory(m_device->device(), mem, allocation_size + 1, VK_WHOLE_SIZE, 0, (void **)&pData);
+ m_errorMonitor->VerifyFound();
+ // overstep allocation w/o VK_WHOLE_SIZE
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, " oversteps total array size 0x");
+ err = vkMapMemory(m_device->device(), mem, 1, allocation_size, 0, (void **)&pData);
+ m_errorMonitor->VerifyFound();
+ // Now error due to unmapping memory that's not mapped
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "Unmapping Memory without memory being mapped: ");
+ vkUnmapMemory(m_device->device(), mem);
+ m_errorMonitor->VerifyFound();
+
+ // Now map memory and cause errors due to flushing invalid ranges
+ err = vkMapMemory(m_device->device(), mem, 4 * atom_size, VK_WHOLE_SIZE, 0, (void **)&pData);
+ ASSERT_VK_SUCCESS(err);
+ VkMappedMemoryRange mmr = {};
+ mmr.sType = VK_STRUCTURE_TYPE_MAPPED_MEMORY_RANGE;
+ mmr.memory = mem;
+ mmr.offset = atom_size; // Error b/c offset less than offset of mapped mem
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkMappedMemoryRange-size-00685");
+ vkFlushMappedMemoryRanges(m_device->device(), 1, &mmr);
+ m_errorMonitor->VerifyFound();
+
+ // Now flush range that oversteps mapped range
+ vkUnmapMemory(m_device->device(), mem);
+ err = vkMapMemory(m_device->device(), mem, 0, 4 * atom_size, 0, (void **)&pData);
+ ASSERT_VK_SUCCESS(err);
+ mmr.offset = atom_size;
+ mmr.size = 4 * atom_size; // Flushing bounds exceed mapped bounds
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkMappedMemoryRange-size-00685");
+ vkFlushMappedMemoryRanges(m_device->device(), 1, &mmr);
+ m_errorMonitor->VerifyFound();
+
+ // Now flush range with VK_WHOLE_SIZE that oversteps offset
+ vkUnmapMemory(m_device->device(), mem);
+ err = vkMapMemory(m_device->device(), mem, 2 * atom_size, 4 * atom_size, 0, (void **)&pData);
+ ASSERT_VK_SUCCESS(err);
+ mmr.offset = atom_size;
+ mmr.size = VK_WHOLE_SIZE;
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkMappedMemoryRange-size-00686");
+ vkFlushMappedMemoryRanges(m_device->device(), 1, &mmr);
+ m_errorMonitor->VerifyFound();
+
+ // Some platforms have an atomsize of 1 which makes the test meaningless
+ if (atom_size > 3) {
+ // Now with an offset NOT a multiple of the device limit
+ vkUnmapMemory(m_device->device(), mem);
+ err = vkMapMemory(m_device->device(), mem, 0, 4 * atom_size, 0, (void **)&pData);
+ ASSERT_VK_SUCCESS(err);
+ mmr.offset = 3; // Not a multiple of atom_size
+ mmr.size = VK_WHOLE_SIZE;
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkMappedMemoryRange-offset-00687");
+ vkFlushMappedMemoryRanges(m_device->device(), 1, &mmr);
+ m_errorMonitor->VerifyFound();
+
+ // Now with a size NOT a multiple of the device limit
+ vkUnmapMemory(m_device->device(), mem);
+ err = vkMapMemory(m_device->device(), mem, 0, 4 * atom_size, 0, (void **)&pData);
+ ASSERT_VK_SUCCESS(err);
+ mmr.offset = atom_size;
+ mmr.size = 2 * atom_size + 1; // Not a multiple of atom_size
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkMappedMemoryRange-size-01390");
+ vkFlushMappedMemoryRanges(m_device->device(), 1, &mmr);
+ m_errorMonitor->VerifyFound();
+ }
+
+ pass = m_device->phy().set_memory_type(mem_reqs.memoryTypeBits, &alloc_info, VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT,
+ VK_MEMORY_PROPERTY_HOST_COHERENT_BIT);
+ if (!pass) {
+ printf("%s Failed to set memory type.\n", kSkipPrefix);
+ vkFreeMemory(m_device->device(), mem, NULL);
+ vkDestroyBuffer(m_device->device(), buffer, NULL);
+ return;
+ }
+ // TODO : If we can get HOST_VISIBLE w/o HOST_COHERENT we can test cases of
+ // kVUID_Core_MemTrack_InvalidMap in validateAndCopyNoncoherentMemoryToDriver()
+
+ vkDestroyBuffer(m_device->device(), buffer, NULL);
+ vkFreeMemory(m_device->device(), mem, NULL);
+}
+
+TEST_F(VkLayerTest, MapMemWithoutHostVisibleBit) {
+ TEST_DESCRIPTION("Allocate memory that is not mappable and then attempt to map it.");
+ VkResult err;
+ bool pass;
+
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkMapMemory-memory-00682");
+ ASSERT_NO_FATAL_FAILURE(Init());
+
+ VkMemoryAllocateInfo mem_alloc = {};
+ mem_alloc.sType = VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO;
+ mem_alloc.pNext = NULL;
+ mem_alloc.allocationSize = 1024;
+
+ pass = m_device->phy().set_memory_type(0xFFFFFFFF, &mem_alloc, 0, VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT);
+ if (!pass) { // If we can't find any unmappable memory this test doesn't
+ // make sense
+ printf("%s No unmappable memory types found, skipping test\n", kSkipPrefix);
+ return;
+ }
+
+ VkDeviceMemory mem;
+ err = vkAllocateMemory(m_device->device(), &mem_alloc, NULL, &mem);
+ ASSERT_VK_SUCCESS(err);
+
+ void *mappedAddress = NULL;
+ err = vkMapMemory(m_device->device(), mem, 0, VK_WHOLE_SIZE, 0, &mappedAddress);
+ m_errorMonitor->VerifyFound();
+
+ vkFreeMemory(m_device->device(), mem, NULL);
+}
+
+TEST_F(VkLayerTest, RebindMemory) {
+ VkResult err;
+ bool pass;
+
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "which has already been bound to mem object");
+
+ ASSERT_NO_FATAL_FAILURE(Init());
+
+ // Create an image, allocate memory, free it, and then try to bind it
+ VkImage image;
+ VkDeviceMemory mem1;
+ VkDeviceMemory mem2;
+ VkMemoryRequirements mem_reqs;
+
+ const VkFormat tex_format = VK_FORMAT_B8G8R8A8_UNORM;
+ const int32_t tex_width = 32;
+ const int32_t tex_height = 32;
+
+ VkImageCreateInfo image_create_info = {};
+ image_create_info.sType = VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO;
+ image_create_info.pNext = NULL;
+ image_create_info.imageType = VK_IMAGE_TYPE_2D;
+ image_create_info.format = tex_format;
+ image_create_info.extent.width = tex_width;
+ image_create_info.extent.height = tex_height;
+ image_create_info.extent.depth = 1;
+ image_create_info.mipLevels = 1;
+ image_create_info.arrayLayers = 1;
+ image_create_info.samples = VK_SAMPLE_COUNT_1_BIT;
+ image_create_info.tiling = VK_IMAGE_TILING_OPTIMAL;
+ image_create_info.usage = VK_IMAGE_USAGE_SAMPLED_BIT;
+ image_create_info.flags = 0;
+
+ VkMemoryAllocateInfo mem_alloc = {};
+ mem_alloc.sType = VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO;
+ mem_alloc.pNext = NULL;
+ mem_alloc.allocationSize = 0;
+ mem_alloc.memoryTypeIndex = 0;
+
+ // Introduce failure, do NOT set memProps to
+ // VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT
+ mem_alloc.memoryTypeIndex = 1;
+ err = vkCreateImage(m_device->device(), &image_create_info, NULL, &image);
+ ASSERT_VK_SUCCESS(err);
+
+ vkGetImageMemoryRequirements(m_device->device(), image, &mem_reqs);
+
+ mem_alloc.allocationSize = mem_reqs.size;
+ pass = m_device->phy().set_memory_type(mem_reqs.memoryTypeBits, &mem_alloc, 0);
+ ASSERT_TRUE(pass);
+
+ // allocate 2 memory objects
+ err = vkAllocateMemory(m_device->device(), &mem_alloc, NULL, &mem1);
+ ASSERT_VK_SUCCESS(err);
+ err = vkAllocateMemory(m_device->device(), &mem_alloc, NULL, &mem2);
+ ASSERT_VK_SUCCESS(err);
+
+ // Bind first memory object to Image object
+ err = vkBindImageMemory(m_device->device(), image, mem1, 0);
+ ASSERT_VK_SUCCESS(err);
+
+ // Introduce validation failure, try to bind a different memory object to
+ // the same image object
+ err = vkBindImageMemory(m_device->device(), image, mem2, 0);
+
+ m_errorMonitor->VerifyFound();
+
+ vkDestroyImage(m_device->device(), image, NULL);
+ vkFreeMemory(m_device->device(), mem1, NULL);
+ vkFreeMemory(m_device->device(), mem2, NULL);
+}
+
+TEST_F(VkLayerTest, QueryMemoryCommitmentWithoutLazyProperty) {
+ TEST_DESCRIPTION("Attempt to query memory commitment on memory without lazy allocation");
+ ASSERT_NO_FATAL_FAILURE(Init());
+
+ auto image_ci = vk_testing::Image::create_info();
+ image_ci.imageType = VK_IMAGE_TYPE_2D;
+ image_ci.format = VK_FORMAT_B8G8R8A8_UNORM;
+ image_ci.extent.width = 32;
+ image_ci.extent.height = 32;
+ image_ci.tiling = VK_IMAGE_TILING_OPTIMAL;
+ image_ci.usage = VK_IMAGE_USAGE_TRANSIENT_ATTACHMENT_BIT | VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT;
+ vk_testing::Image image;
+ image.init_no_mem(*m_device, image_ci);
+
+ auto mem_reqs = image.memory_requirements();
+ // memory_type_index is set to 0 here, but is set properly below
+ auto image_alloc_info = vk_testing::DeviceMemory::alloc_info(mem_reqs.size, 0);
+
+ bool pass;
+ // the last argument is the "forbid" argument for set_memory_type, disallowing
+ // that particular memory type rather than requiring it
+ pass = m_device->phy().set_memory_type(mem_reqs.memoryTypeBits, &image_alloc_info, 0, VK_MEMORY_PROPERTY_LAZILY_ALLOCATED_BIT);
+ if (!pass) {
+ printf("%s Failed to set memory type.\n", kSkipPrefix);
+ return;
+ }
+ vk_testing::DeviceMemory mem;
+ mem.init(*m_device, image_alloc_info);
+
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkGetDeviceMemoryCommitment-memory-00690");
+ VkDeviceSize size;
+ vkGetDeviceMemoryCommitment(m_device->device(), mem.handle(), &size);
+ m_errorMonitor->VerifyFound();
+}
+
+TEST_F(VkLayerTest, SubmitSignaledFence) {
+ vk_testing::Fence testFence;
+
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ "submitted in SIGNALED state. Fences must be reset before being submitted");
+
+ VkFenceCreateInfo fenceInfo = {};
+ fenceInfo.sType = VK_STRUCTURE_TYPE_FENCE_CREATE_INFO;
+ fenceInfo.pNext = NULL;
+ fenceInfo.flags = VK_FENCE_CREATE_SIGNALED_BIT;
+
+ ASSERT_NO_FATAL_FAILURE(Init());
+ ASSERT_NO_FATAL_FAILURE(InitViewport());
+ ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
+
+ m_commandBuffer->begin();
+ m_commandBuffer->ClearAllBuffers(m_renderTargets, m_clear_color, nullptr, m_depth_clear_color, m_stencil_clear_color);
+ m_commandBuffer->end();
+
+ testFence.init(*m_device, fenceInfo);
+
+ VkSubmitInfo submit_info;
+ submit_info.sType = VK_STRUCTURE_TYPE_SUBMIT_INFO;
+ submit_info.pNext = NULL;
+ submit_info.waitSemaphoreCount = 0;
+ submit_info.pWaitSemaphores = NULL;
+ submit_info.pWaitDstStageMask = NULL;
+ submit_info.commandBufferCount = 1;
+ submit_info.pCommandBuffers = &m_commandBuffer->handle();
+ submit_info.signalSemaphoreCount = 0;
+ submit_info.pSignalSemaphores = NULL;
+
+ vkQueueSubmit(m_device->m_queue, 1, &submit_info, testFence.handle());
+ vkQueueWaitIdle(m_device->m_queue);
+
+ m_errorMonitor->VerifyFound();
+}
+
+TEST_F(VkLayerTest, InvalidUsageBits) {
+ TEST_DESCRIPTION(
+ "Specify wrong usage for image then create conflicting view of image Initialize buffer with wrong usage then perform copy "
+ "expecting errors from both the image and the buffer (2 calls)");
+
+ ASSERT_NO_FATAL_FAILURE(Init());
+ auto format = FindSupportedDepthStencilFormat(gpu());
+ if (!format) {
+ printf("%s No Depth + Stencil format found. Skipped.\n", kSkipPrefix);
+ return;
+ }
+
+ VkImageObj image(m_device);
+ // Initialize image with transfer source usage
+ image.Init(128, 128, 1, format, VK_IMAGE_USAGE_TRANSFER_SRC_BIT, VK_IMAGE_TILING_OPTIMAL, 0);
+ ASSERT_TRUE(image.initialized());
+
+ VkImageView dsv;
+ VkImageViewCreateInfo dsvci = {};
+ dsvci.sType = VK_STRUCTURE_TYPE_IMAGE_VIEW_CREATE_INFO;
+ dsvci.image = image.handle();
+ dsvci.viewType = VK_IMAGE_VIEW_TYPE_2D;
+ dsvci.format = format;
+ dsvci.subresourceRange.layerCount = 1;
+ dsvci.subresourceRange.baseMipLevel = 0;
+ dsvci.subresourceRange.levelCount = 1;
+ dsvci.subresourceRange.aspectMask = VK_IMAGE_ASPECT_DEPTH_BIT | VK_IMAGE_ASPECT_STENCIL_BIT;
+
+ // Create a view with depth / stencil aspect for image with different usage
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "Invalid usage flag for Image ");
+ vkCreateImageView(m_device->device(), &dsvci, NULL, &dsv);
+ m_errorMonitor->VerifyFound();
+
+ // Initialize buffer with TRANSFER_DST usage
+ VkBufferObj buffer;
+ VkMemoryPropertyFlags reqs = 0;
+ buffer.init_as_dst(*m_device, 128 * 128, reqs);
+ VkBufferImageCopy region = {};
+ region.bufferRowLength = 128;
+ region.bufferImageHeight = 128;
+ region.imageSubresource.aspectMask = VK_IMAGE_ASPECT_DEPTH_BIT;
+ region.imageSubresource.layerCount = 1;
+ region.imageExtent.height = 16;
+ region.imageExtent.width = 16;
+ region.imageExtent.depth = 1;
+
+ // Buffer usage not set to TRANSFER_SRC and image usage not set to TRANSFER_DST
+ m_commandBuffer->begin();
+
+ // two separate errors from this call:
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdCopyBufferToImage-dstImage-00177");
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdCopyBufferToImage-srcBuffer-00174");
+
+ vkCmdCopyBufferToImage(m_commandBuffer->handle(), buffer.handle(), image.handle(), VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, 1,
+ &region);
+ m_errorMonitor->VerifyFound();
+}
+
+TEST_F(VkLayerTest, LeakAnObject) {
+ VkResult err;
+
+ TEST_DESCRIPTION("Create a fence and destroy its device without first destroying the fence.");
+
+ // Note that we have to create a new device since destroying the
+ // framework's device causes Teardown() to fail and just calling Teardown
+ // will destroy the errorMonitor.
+
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "has not been destroyed.");
+
+ ASSERT_NO_FATAL_FAILURE(Init());
+
+ vk_testing::QueueCreateInfoArray queue_info(m_device->queue_props);
+
+ // The sacrificial device object
+ VkDevice testDevice;
+ VkDeviceCreateInfo device_create_info = {};
+ auto features = m_device->phy().features();
+ device_create_info.sType = VK_STRUCTURE_TYPE_DEVICE_CREATE_INFO;
+ device_create_info.pNext = NULL;
+ device_create_info.queueCreateInfoCount = queue_info.size();
+ device_create_info.pQueueCreateInfos = queue_info.data();
+ device_create_info.enabledLayerCount = 0;
+ device_create_info.ppEnabledLayerNames = NULL;
+ device_create_info.pEnabledFeatures = &features;
+ err = vkCreateDevice(gpu(), &device_create_info, NULL, &testDevice);
+ ASSERT_VK_SUCCESS(err);
+
+ VkFence fence;
+ VkFenceCreateInfo fence_create_info = {};
+ fence_create_info.sType = VK_STRUCTURE_TYPE_FENCE_CREATE_INFO;
+ fence_create_info.pNext = NULL;
+ fence_create_info.flags = 0;
+ err = vkCreateFence(testDevice, &fence_create_info, NULL, &fence);
+ ASSERT_VK_SUCCESS(err);
+
+ // Induce failure by not calling vkDestroyFence
+ vkDestroyDevice(testDevice, NULL);
+ m_errorMonitor->VerifyFound();
+}
+
+TEST_F(VkLayerTest, InvalidCommandPoolConsistency) {
+ TEST_DESCRIPTION("Allocate command buffers from one command pool and attempt to delete them from another.");
+
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "FreeCommandBuffers is attempting to free Command Buffer");
+
+ ASSERT_NO_FATAL_FAILURE(Init());
+ VkCommandPool command_pool_one;
+ VkCommandPool command_pool_two;
+
+ VkCommandPoolCreateInfo pool_create_info{};
+ pool_create_info.sType = VK_STRUCTURE_TYPE_COMMAND_POOL_CREATE_INFO;
+ pool_create_info.queueFamilyIndex = m_device->graphics_queue_node_index_;
+ pool_create_info.flags = VK_COMMAND_POOL_CREATE_RESET_COMMAND_BUFFER_BIT;
+
+ vkCreateCommandPool(m_device->device(), &pool_create_info, nullptr, &command_pool_one);
+
+ vkCreateCommandPool(m_device->device(), &pool_create_info, nullptr, &command_pool_two);
+
+ VkCommandBuffer cb;
+ VkCommandBufferAllocateInfo command_buffer_allocate_info{};
+ command_buffer_allocate_info.sType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_ALLOCATE_INFO;
+ command_buffer_allocate_info.commandPool = command_pool_one;
+ command_buffer_allocate_info.commandBufferCount = 1;
+ command_buffer_allocate_info.level = VK_COMMAND_BUFFER_LEVEL_PRIMARY;
+ vkAllocateCommandBuffers(m_device->device(), &command_buffer_allocate_info, &cb);
+
+ vkFreeCommandBuffers(m_device->device(), command_pool_two, 1, &cb);
+
+ m_errorMonitor->VerifyFound();
+
+ vkDestroyCommandPool(m_device->device(), command_pool_one, NULL);
+ vkDestroyCommandPool(m_device->device(), command_pool_two, NULL);
+}
+
+TEST_F(VkLayerTest, InvalidDescriptorPoolConsistency) {
+ VkResult err;
+
+ TEST_DESCRIPTION("Allocate descriptor sets from one DS pool and attempt to delete them from another.");
+
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "FreeDescriptorSets is attempting to free descriptorSet");
+
+ ASSERT_NO_FATAL_FAILURE(Init());
+ ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
+
+ VkDescriptorPoolSize ds_type_count = {};
+ ds_type_count.type = VK_DESCRIPTOR_TYPE_SAMPLER;
+ ds_type_count.descriptorCount = 1;
+
+ VkDescriptorPoolCreateInfo ds_pool_ci = {};
+ ds_pool_ci.sType = VK_STRUCTURE_TYPE_DESCRIPTOR_POOL_CREATE_INFO;
+ ds_pool_ci.pNext = NULL;
+ ds_pool_ci.flags = 0;
+ ds_pool_ci.maxSets = 1;
+ ds_pool_ci.poolSizeCount = 1;
+ ds_pool_ci.pPoolSizes = &ds_type_count;
+
+ VkDescriptorPool bad_pool;
+ err = vkCreateDescriptorPool(m_device->device(), &ds_pool_ci, NULL, &bad_pool);
+ ASSERT_VK_SUCCESS(err);
+
+ OneOffDescriptorSet ds(m_device, {
+ {0, VK_DESCRIPTOR_TYPE_SAMPLER, 1, VK_SHADER_STAGE_ALL, nullptr},
+ });
+
+ err = vkFreeDescriptorSets(m_device->device(), bad_pool, 1, &ds.set_);
+
+ m_errorMonitor->VerifyFound();
+
+ vkDestroyDescriptorPool(m_device->device(), bad_pool, NULL);
+}
+
+TEST_F(VkLayerTest, CreateUnknownObject) {
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkGetImageMemoryRequirements-image-parameter");
+
+ TEST_DESCRIPTION("Pass an invalid image object handle into a Vulkan API call.");
+
+ ASSERT_NO_FATAL_FAILURE(Init());
+
+ // Pass bogus handle into GetImageMemoryRequirements
+ VkMemoryRequirements mem_reqs;
+ uint64_t fakeImageHandle = 0xCADECADE;
+ VkImage fauxImage = reinterpret_cast<VkImage &>(fakeImageHandle);
+
+ vkGetImageMemoryRequirements(m_device->device(), fauxImage, &mem_reqs);
+
+ m_errorMonitor->VerifyFound();
+}
+
+TEST_F(VkLayerTest, UseObjectWithWrongDevice) {
+ TEST_DESCRIPTION(
+ "Try to destroy a render pass object using a device other than the one it was created on. This should generate a distinct "
+ "error from the invalid handle error.");
+ // Create first device and renderpass
+ ASSERT_NO_FATAL_FAILURE(Init());
+ ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
+
+ // Create second device
+ float priorities[] = {1.0f};
+ VkDeviceQueueCreateInfo queue_info{};
+ queue_info.sType = VK_STRUCTURE_TYPE_DEVICE_QUEUE_CREATE_INFO;
+ queue_info.pNext = NULL;
+ queue_info.flags = 0;
+ queue_info.queueFamilyIndex = 0;
+ queue_info.queueCount = 1;
+ queue_info.pQueuePriorities = &priorities[0];
+
+ VkDeviceCreateInfo device_create_info = {};
+ auto features = m_device->phy().features();
+ device_create_info.sType = VK_STRUCTURE_TYPE_DEVICE_CREATE_INFO;
+ device_create_info.pNext = NULL;
+ device_create_info.queueCreateInfoCount = 1;
+ device_create_info.pQueueCreateInfos = &queue_info;
+ device_create_info.enabledLayerCount = 0;
+ device_create_info.ppEnabledLayerNames = NULL;
+ device_create_info.pEnabledFeatures = &features;
+
+ VkDevice second_device;
+ ASSERT_VK_SUCCESS(vkCreateDevice(gpu(), &device_create_info, NULL, &second_device));
+
+ // Try to destroy the renderpass from the first device using the second device
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkDestroyRenderPass-renderPass-parent");
+ vkDestroyRenderPass(second_device, m_renderPass, NULL);
+ m_errorMonitor->VerifyFound();
+
+ vkDestroyDevice(second_device, NULL);
+}
+
+TEST_F(VkLayerTest, PipelineNotBound) {
+ TEST_DESCRIPTION("Pass in an invalid pipeline object handle into a Vulkan API call.");
+
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdBindPipeline-pipeline-parameter");
+
+ ASSERT_NO_FATAL_FAILURE(Init());
+ ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
+
+ VkPipeline badPipeline = (VkPipeline)((size_t)0xbaadb1be);
+
+ m_commandBuffer->begin();
+ vkCmdBindPipeline(m_commandBuffer->handle(), VK_PIPELINE_BIND_POINT_GRAPHICS, badPipeline);
+
+ m_errorMonitor->VerifyFound();
+}
+
+TEST_F(VkLayerTest, BindImageInvalidMemoryType) {
+ VkResult err;
+
+ TEST_DESCRIPTION("Test validation check for an invalid memory type index during bind[Buffer|Image]Memory time");
+
+ ASSERT_NO_FATAL_FAILURE(Init());
+
+ // Create an image, allocate memory, set a bad typeIndex and then try to
+ // bind it
+ VkImage image;
+ VkDeviceMemory mem;
+ VkMemoryRequirements mem_reqs;
+ const VkFormat tex_format = VK_FORMAT_B8G8R8A8_UNORM;
+ const int32_t tex_width = 32;
+ const int32_t tex_height = 32;
+
+ VkImageCreateInfo image_create_info = {};
+ image_create_info.sType = VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO;
+ image_create_info.pNext = NULL;
+ image_create_info.imageType = VK_IMAGE_TYPE_2D;
+ image_create_info.format = tex_format;
+ image_create_info.extent.width = tex_width;
+ image_create_info.extent.height = tex_height;
+ image_create_info.extent.depth = 1;
+ image_create_info.mipLevels = 1;
+ image_create_info.arrayLayers = 1;
+ image_create_info.samples = VK_SAMPLE_COUNT_1_BIT;
+ image_create_info.tiling = VK_IMAGE_TILING_OPTIMAL;
+ image_create_info.usage = VK_IMAGE_USAGE_SAMPLED_BIT;
+ image_create_info.flags = 0;
+
+ VkMemoryAllocateInfo mem_alloc = {};
+ mem_alloc.sType = VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO;
+ mem_alloc.pNext = NULL;
+ mem_alloc.allocationSize = 0;
+ mem_alloc.memoryTypeIndex = 0;
+
+ err = vkCreateImage(m_device->device(), &image_create_info, NULL, &image);
+ ASSERT_VK_SUCCESS(err);
+
+ vkGetImageMemoryRequirements(m_device->device(), image, &mem_reqs);
+ mem_alloc.allocationSize = mem_reqs.size;
+
+ // Introduce Failure, select invalid TypeIndex
+ VkPhysicalDeviceMemoryProperties memory_info;
+
+ vkGetPhysicalDeviceMemoryProperties(gpu(), &memory_info);
+ unsigned int i;
+ for (i = 0; i < memory_info.memoryTypeCount; i++) {
+ if ((mem_reqs.memoryTypeBits & (1 << i)) == 0) {
+ mem_alloc.memoryTypeIndex = i;
break;
}
}
- return NULL;
+ if (i >= memory_info.memoryTypeCount) {
+ printf("%s No invalid memory type index could be found; skipped.\n", kSkipPrefix);
+ vkDestroyImage(m_device->device(), image, NULL);
+ return;
+ }
+
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "for this object type are not compatible with the memory");
+
+ err = vkAllocateMemory(m_device->device(), &mem_alloc, NULL, &mem);
+ ASSERT_VK_SUCCESS(err);
+
+ err = vkBindImageMemory(m_device->device(), image, mem, 0);
+ (void)err;
+
+ m_errorMonitor->VerifyFound();
+
+ vkDestroyImage(m_device->device(), image, NULL);
+ vkFreeMemory(m_device->device(), mem, NULL);
}
-#endif // GTEST_IS_THREADSAFE
-extern "C" void *ReleaseNullFence(void *arg) {
- struct thread_data_struct *data = (struct thread_data_struct *)arg;
+TEST_F(VkLayerTest, BindInvalidMemory) {
+ VkResult err;
+ bool pass;
+
+ ASSERT_NO_FATAL_FAILURE(Init());
+
+ const VkFormat tex_format = VK_FORMAT_R8G8B8A8_UNORM;
+ const int32_t tex_width = 256;
+ const int32_t tex_height = 256;
+
+ VkImageCreateInfo image_create_info = {};
+ image_create_info.sType = VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO;
+ image_create_info.pNext = NULL;
+ image_create_info.imageType = VK_IMAGE_TYPE_2D;
+ image_create_info.format = tex_format;
+ image_create_info.extent.width = tex_width;
+ image_create_info.extent.height = tex_height;
+ image_create_info.extent.depth = 1;
+ image_create_info.mipLevels = 1;
+ image_create_info.arrayLayers = 1;
+ image_create_info.samples = VK_SAMPLE_COUNT_1_BIT;
+ image_create_info.tiling = VK_IMAGE_TILING_OPTIMAL;
+ image_create_info.usage = VK_IMAGE_USAGE_SAMPLED_BIT;
+ image_create_info.flags = 0;
+
+ VkBufferCreateInfo buffer_create_info = {};
+ buffer_create_info.sType = VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO;
+ buffer_create_info.pNext = NULL;
+ buffer_create_info.flags = 0;
+ buffer_create_info.size = 4 * 1024 * 1024;
+ buffer_create_info.usage = VK_BUFFER_USAGE_VERTEX_BUFFER_BIT;
+ buffer_create_info.sharingMode = VK_SHARING_MODE_EXCLUSIVE;
+
+ // Create an image/buffer, allocate memory, free it, and then try to bind it
+ {
+ VkImage image = VK_NULL_HANDLE;
+ VkBuffer buffer = VK_NULL_HANDLE;
+ err = vkCreateImage(device(), &image_create_info, NULL, &image);
+ ASSERT_VK_SUCCESS(err);
+ err = vkCreateBuffer(device(), &buffer_create_info, NULL, &buffer);
+ ASSERT_VK_SUCCESS(err);
+ VkMemoryRequirements image_mem_reqs = {}, buffer_mem_reqs = {};
+ vkGetImageMemoryRequirements(device(), image, &image_mem_reqs);
+ vkGetBufferMemoryRequirements(device(), buffer, &buffer_mem_reqs);
+
+ VkMemoryAllocateInfo image_mem_alloc = {}, buffer_mem_alloc = {};
+ image_mem_alloc.sType = VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO;
+ image_mem_alloc.allocationSize = image_mem_reqs.size;
+ pass = m_device->phy().set_memory_type(image_mem_reqs.memoryTypeBits, &image_mem_alloc, 0);
+ ASSERT_TRUE(pass);
+ buffer_mem_alloc.sType = VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO;
+ buffer_mem_alloc.allocationSize = buffer_mem_reqs.size;
+ pass = m_device->phy().set_memory_type(buffer_mem_reqs.memoryTypeBits, &buffer_mem_alloc, 0);
+ ASSERT_TRUE(pass);
+
+ VkDeviceMemory image_mem = VK_NULL_HANDLE, buffer_mem = VK_NULL_HANDLE;
+ err = vkAllocateMemory(device(), &image_mem_alloc, NULL, &image_mem);
+ ASSERT_VK_SUCCESS(err);
+ err = vkAllocateMemory(device(), &buffer_mem_alloc, NULL, &buffer_mem);
+ ASSERT_VK_SUCCESS(err);
+
+ vkFreeMemory(device(), image_mem, NULL);
+ vkFreeMemory(device(), buffer_mem, NULL);
+
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkBindImageMemory-memory-parameter");
+ err = vkBindImageMemory(device(), image, image_mem, 0);
+ (void)err; // This may very well return an error.
+ m_errorMonitor->VerifyFound();
- for (int i = 0; i < 40000; i++) {
- vkDestroyFence(data->device, VK_NULL_HANDLE, NULL);
- if (data->bailout) {
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkBindBufferMemory-memory-parameter");
+ err = vkBindBufferMemory(device(), buffer, buffer_mem, 0);
+ (void)err; // This may very well return an error.
+ m_errorMonitor->VerifyFound();
+
+ vkDestroyImage(m_device->device(), image, NULL);
+ vkDestroyBuffer(m_device->device(), buffer, NULL);
+ }
+
+ // Try to bind memory to an object that already has a memory binding
+ {
+ VkImage image = VK_NULL_HANDLE;
+ err = vkCreateImage(device(), &image_create_info, NULL, &image);
+ ASSERT_VK_SUCCESS(err);
+ VkBuffer buffer = VK_NULL_HANDLE;
+ err = vkCreateBuffer(device(), &buffer_create_info, NULL, &buffer);
+ ASSERT_VK_SUCCESS(err);
+ VkMemoryRequirements image_mem_reqs = {}, buffer_mem_reqs = {};
+ vkGetImageMemoryRequirements(device(), image, &image_mem_reqs);
+ vkGetBufferMemoryRequirements(device(), buffer, &buffer_mem_reqs);
+ VkMemoryAllocateInfo image_alloc_info = {}, buffer_alloc_info = {};
+ image_alloc_info.sType = VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO;
+ image_alloc_info.allocationSize = image_mem_reqs.size;
+ buffer_alloc_info.sType = VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO;
+ buffer_alloc_info.allocationSize = buffer_mem_reqs.size;
+ pass = m_device->phy().set_memory_type(image_mem_reqs.memoryTypeBits, &image_alloc_info, 0);
+ ASSERT_TRUE(pass);
+ pass = m_device->phy().set_memory_type(buffer_mem_reqs.memoryTypeBits, &buffer_alloc_info, 0);
+ ASSERT_TRUE(pass);
+ VkDeviceMemory image_mem, buffer_mem;
+ err = vkAllocateMemory(device(), &image_alloc_info, NULL, &image_mem);
+ ASSERT_VK_SUCCESS(err);
+ err = vkAllocateMemory(device(), &buffer_alloc_info, NULL, &buffer_mem);
+ ASSERT_VK_SUCCESS(err);
+
+ err = vkBindImageMemory(device(), image, image_mem, 0);
+ ASSERT_VK_SUCCESS(err);
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkBindImageMemory-image-01044");
+ err = vkBindImageMemory(device(), image, image_mem, 0);
+ (void)err; // This may very well return an error.
+ m_errorMonitor->VerifyFound();
+
+ err = vkBindBufferMemory(device(), buffer, buffer_mem, 0);
+ ASSERT_VK_SUCCESS(err);
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkBindBufferMemory-buffer-01029");
+ err = vkBindBufferMemory(device(), buffer, buffer_mem, 0);
+ (void)err; // This may very well return an error.
+ m_errorMonitor->VerifyFound();
+
+ vkFreeMemory(device(), image_mem, NULL);
+ vkFreeMemory(device(), buffer_mem, NULL);
+ vkDestroyImage(device(), image, NULL);
+ vkDestroyBuffer(device(), buffer, NULL);
+ }
+
+ // Try to bind memory to an object with an invalid memoryOffset
+ {
+ VkImage image = VK_NULL_HANDLE;
+ err = vkCreateImage(device(), &image_create_info, NULL, &image);
+ ASSERT_VK_SUCCESS(err);
+ VkBuffer buffer = VK_NULL_HANDLE;
+ err = vkCreateBuffer(device(), &buffer_create_info, NULL, &buffer);
+ ASSERT_VK_SUCCESS(err);
+ VkMemoryRequirements image_mem_reqs = {}, buffer_mem_reqs = {};
+ vkGetImageMemoryRequirements(device(), image, &image_mem_reqs);
+ vkGetBufferMemoryRequirements(device(), buffer, &buffer_mem_reqs);
+ VkMemoryAllocateInfo image_alloc_info = {}, buffer_alloc_info = {};
+ image_alloc_info.sType = VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO;
+ // Leave some extra space for alignment wiggle room
+ image_alloc_info.allocationSize = image_mem_reqs.size + image_mem_reqs.alignment;
+ buffer_alloc_info.sType = VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO;
+ buffer_alloc_info.allocationSize = buffer_mem_reqs.size + buffer_mem_reqs.alignment;
+ pass = m_device->phy().set_memory_type(image_mem_reqs.memoryTypeBits, &image_alloc_info, 0);
+ ASSERT_TRUE(pass);
+ pass = m_device->phy().set_memory_type(buffer_mem_reqs.memoryTypeBits, &buffer_alloc_info, 0);
+ ASSERT_TRUE(pass);
+ VkDeviceMemory image_mem, buffer_mem;
+ err = vkAllocateMemory(device(), &image_alloc_info, NULL, &image_mem);
+ ASSERT_VK_SUCCESS(err);
+ err = vkAllocateMemory(device(), &buffer_alloc_info, NULL, &buffer_mem);
+ ASSERT_VK_SUCCESS(err);
+
+ // Test unaligned memory offset
+ {
+ if (image_mem_reqs.alignment > 1) {
+ VkDeviceSize image_offset = 1;
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkBindImageMemory-memoryOffset-01048");
+ err = vkBindImageMemory(device(), image, image_mem, image_offset);
+ (void)err; // This may very well return an error.
+ m_errorMonitor->VerifyFound();
+ }
+
+ if (buffer_mem_reqs.alignment > 1) {
+ VkDeviceSize buffer_offset = 1;
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkBindBufferMemory-memoryOffset-01036");
+ err = vkBindBufferMemory(device(), buffer, buffer_mem, buffer_offset);
+ (void)err; // This may very well return an error.
+ m_errorMonitor->VerifyFound();
+ }
+ }
+
+ // Test memory offsets outside the memory allocation
+ {
+ VkDeviceSize image_offset =
+ (image_alloc_info.allocationSize + image_mem_reqs.alignment) & ~(image_mem_reqs.alignment - 1);
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkBindImageMemory-memoryOffset-01046");
+ err = vkBindImageMemory(device(), image, image_mem, image_offset);
+ (void)err; // This may very well return an error.
+ m_errorMonitor->VerifyFound();
+
+ VkDeviceSize buffer_offset =
+ (buffer_alloc_info.allocationSize + buffer_mem_reqs.alignment) & ~(buffer_mem_reqs.alignment - 1);
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkBindBufferMemory-memoryOffset-01031");
+ err = vkBindBufferMemory(device(), buffer, buffer_mem, buffer_offset);
+ (void)err; // This may very well return an error.
+ m_errorMonitor->VerifyFound();
+ }
+
+ // Test memory offsets within the memory allocation, but which leave too little memory for
+ // the resource.
+ {
+ VkDeviceSize image_offset = (image_mem_reqs.size - 1) & ~(image_mem_reqs.alignment - 1);
+ if ((image_offset > 0) && (image_mem_reqs.size < (image_alloc_info.allocationSize - image_mem_reqs.alignment))) {
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkBindImageMemory-size-01049");
+ err = vkBindImageMemory(device(), image, image_mem, image_offset);
+ (void)err; // This may very well return an error.
+ m_errorMonitor->VerifyFound();
+ }
+
+ VkDeviceSize buffer_offset = (buffer_mem_reqs.size - 1) & ~(buffer_mem_reqs.alignment - 1);
+ if (buffer_offset > 0) {
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkBindBufferMemory-size-01037");
+ err = vkBindBufferMemory(device(), buffer, buffer_mem, buffer_offset);
+ (void)err; // This may very well return an error.
+ m_errorMonitor->VerifyFound();
+ }
+ }
+
+ vkFreeMemory(device(), image_mem, NULL);
+ vkFreeMemory(device(), buffer_mem, NULL);
+ vkDestroyImage(device(), image, NULL);
+ vkDestroyBuffer(device(), buffer, NULL);
+ }
+
+ // Try to bind memory to an object with an invalid memory type
+ {
+ VkImage image = VK_NULL_HANDLE;
+ err = vkCreateImage(device(), &image_create_info, NULL, &image);
+ ASSERT_VK_SUCCESS(err);
+ VkBuffer buffer = VK_NULL_HANDLE;
+ err = vkCreateBuffer(device(), &buffer_create_info, NULL, &buffer);
+ ASSERT_VK_SUCCESS(err);
+ VkMemoryRequirements image_mem_reqs = {}, buffer_mem_reqs = {};
+ vkGetImageMemoryRequirements(device(), image, &image_mem_reqs);
+ vkGetBufferMemoryRequirements(device(), buffer, &buffer_mem_reqs);
+ VkMemoryAllocateInfo image_alloc_info = {}, buffer_alloc_info = {};
+ image_alloc_info.sType = VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO;
+ image_alloc_info.allocationSize = image_mem_reqs.size;
+ buffer_alloc_info.sType = VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO;
+ buffer_alloc_info.allocationSize = buffer_mem_reqs.size;
+ // Create a mask of available memory types *not* supported by these resources,
+ // and try to use one of them.
+ VkPhysicalDeviceMemoryProperties memory_properties = {};
+ vkGetPhysicalDeviceMemoryProperties(m_device->phy().handle(), &memory_properties);
+ VkDeviceMemory image_mem, buffer_mem;
+
+ uint32_t image_unsupported_mem_type_bits = ((1 << memory_properties.memoryTypeCount) - 1) & ~image_mem_reqs.memoryTypeBits;
+ if (image_unsupported_mem_type_bits != 0) {
+ pass = m_device->phy().set_memory_type(image_unsupported_mem_type_bits, &image_alloc_info, 0);
+ ASSERT_TRUE(pass);
+ err = vkAllocateMemory(device(), &image_alloc_info, NULL, &image_mem);
+ ASSERT_VK_SUCCESS(err);
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkBindImageMemory-memory-01047");
+ err = vkBindImageMemory(device(), image, image_mem, 0);
+ (void)err; // This may very well return an error.
+ m_errorMonitor->VerifyFound();
+ vkFreeMemory(device(), image_mem, NULL);
+ }
+
+ uint32_t buffer_unsupported_mem_type_bits =
+ ((1 << memory_properties.memoryTypeCount) - 1) & ~buffer_mem_reqs.memoryTypeBits;
+ if (buffer_unsupported_mem_type_bits != 0) {
+ pass = m_device->phy().set_memory_type(buffer_unsupported_mem_type_bits, &buffer_alloc_info, 0);
+ ASSERT_TRUE(pass);
+ err = vkAllocateMemory(device(), &buffer_alloc_info, NULL, &buffer_mem);
+ ASSERT_VK_SUCCESS(err);
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkBindBufferMemory-memory-01035");
+ err = vkBindBufferMemory(device(), buffer, buffer_mem, 0);
+ (void)err; // This may very well return an error.
+ m_errorMonitor->VerifyFound();
+ vkFreeMemory(device(), buffer_mem, NULL);
+ }
+
+ vkDestroyImage(device(), image, NULL);
+ vkDestroyBuffer(device(), buffer, NULL);
+ }
+
+ // Try to bind memory to an image created with sparse memory flags
+ {
+ VkImageCreateInfo sparse_image_create_info = image_create_info;
+ sparse_image_create_info.flags |= VK_IMAGE_CREATE_SPARSE_BINDING_BIT;
+ VkImageFormatProperties image_format_properties = {};
+ err = vkGetPhysicalDeviceImageFormatProperties(m_device->phy().handle(), sparse_image_create_info.format,
+ sparse_image_create_info.imageType, sparse_image_create_info.tiling,
+ sparse_image_create_info.usage, sparse_image_create_info.flags,
+ &image_format_properties);
+ if (!m_device->phy().features().sparseResidencyImage2D || err == VK_ERROR_FORMAT_NOT_SUPPORTED) {
+ // most likely means sparse formats aren't supported here; skip this test.
+ } else {
+ ASSERT_VK_SUCCESS(err);
+ if (image_format_properties.maxExtent.width == 0) {
+ printf("%s Sparse image format not supported; skipped.\n", kSkipPrefix);
+ return;
+ } else {
+ VkImage sparse_image = VK_NULL_HANDLE;
+ err = vkCreateImage(m_device->device(), &sparse_image_create_info, NULL, &sparse_image);
+ ASSERT_VK_SUCCESS(err);
+ VkMemoryRequirements sparse_mem_reqs = {};
+ vkGetImageMemoryRequirements(m_device->device(), sparse_image, &sparse_mem_reqs);
+ if (sparse_mem_reqs.memoryTypeBits != 0) {
+ VkMemoryAllocateInfo sparse_mem_alloc = {};
+ sparse_mem_alloc.sType = VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO;
+ sparse_mem_alloc.pNext = NULL;
+ sparse_mem_alloc.allocationSize = sparse_mem_reqs.size;
+ sparse_mem_alloc.memoryTypeIndex = 0;
+ pass = m_device->phy().set_memory_type(sparse_mem_reqs.memoryTypeBits, &sparse_mem_alloc, 0);
+ ASSERT_TRUE(pass);
+ VkDeviceMemory sparse_mem = VK_NULL_HANDLE;
+ err = vkAllocateMemory(m_device->device(), &sparse_mem_alloc, NULL, &sparse_mem);
+ ASSERT_VK_SUCCESS(err);
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkBindImageMemory-image-01045");
+ err = vkBindImageMemory(m_device->device(), sparse_image, sparse_mem, 0);
+ // This may very well return an error.
+ (void)err;
+ m_errorMonitor->VerifyFound();
+ vkFreeMemory(m_device->device(), sparse_mem, NULL);
+ }
+ vkDestroyImage(m_device->device(), sparse_image, NULL);
+ }
+ }
+ }
+
+ // Try to bind memory to a buffer created with sparse memory flags
+ {
+ VkBufferCreateInfo sparse_buffer_create_info = buffer_create_info;
+ sparse_buffer_create_info.flags |= VK_IMAGE_CREATE_SPARSE_BINDING_BIT;
+ if (!m_device->phy().features().sparseResidencyBuffer) {
+ // most likely means sparse formats aren't supported here; skip this test.
+ } else {
+ VkBuffer sparse_buffer = VK_NULL_HANDLE;
+ err = vkCreateBuffer(m_device->device(), &sparse_buffer_create_info, NULL, &sparse_buffer);
+ ASSERT_VK_SUCCESS(err);
+ VkMemoryRequirements sparse_mem_reqs = {};
+ vkGetBufferMemoryRequirements(m_device->device(), sparse_buffer, &sparse_mem_reqs);
+ if (sparse_mem_reqs.memoryTypeBits != 0) {
+ VkMemoryAllocateInfo sparse_mem_alloc = {};
+ sparse_mem_alloc.sType = VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO;
+ sparse_mem_alloc.pNext = NULL;
+ sparse_mem_alloc.allocationSize = sparse_mem_reqs.size;
+ sparse_mem_alloc.memoryTypeIndex = 0;
+ pass = m_device->phy().set_memory_type(sparse_mem_reqs.memoryTypeBits, &sparse_mem_alloc, 0);
+ ASSERT_TRUE(pass);
+ VkDeviceMemory sparse_mem = VK_NULL_HANDLE;
+ err = vkAllocateMemory(m_device->device(), &sparse_mem_alloc, NULL, &sparse_mem);
+ ASSERT_VK_SUCCESS(err);
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkBindBufferMemory-buffer-01030");
+ err = vkBindBufferMemory(m_device->device(), sparse_buffer, sparse_mem, 0);
+ // This may very well return an error.
+ (void)err;
+ m_errorMonitor->VerifyFound();
+ vkFreeMemory(m_device->device(), sparse_mem, NULL);
+ }
+ vkDestroyBuffer(m_device->device(), sparse_buffer, NULL);
+ }
+ }
+}
+
+TEST_F(VkLayerTest, BindMemoryToDestroyedObject) {
+ VkResult err;
+ bool pass;
+
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkBindImageMemory-image-parameter");
+
+ ASSERT_NO_FATAL_FAILURE(Init());
+
+ // Create an image object, allocate memory, destroy the object and then try
+ // to bind it
+ VkImage image;
+ VkDeviceMemory mem;
+ VkMemoryRequirements mem_reqs;
+
+ const VkFormat tex_format = VK_FORMAT_B8G8R8A8_UNORM;
+ const int32_t tex_width = 32;
+ const int32_t tex_height = 32;
+
+ VkImageCreateInfo image_create_info = {};
+ image_create_info.sType = VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO;
+ image_create_info.pNext = NULL;
+ image_create_info.imageType = VK_IMAGE_TYPE_2D;
+ image_create_info.format = tex_format;
+ image_create_info.extent.width = tex_width;
+ image_create_info.extent.height = tex_height;
+ image_create_info.extent.depth = 1;
+ image_create_info.mipLevels = 1;
+ image_create_info.arrayLayers = 1;
+ image_create_info.samples = VK_SAMPLE_COUNT_1_BIT;
+ image_create_info.tiling = VK_IMAGE_TILING_OPTIMAL;
+ image_create_info.usage = VK_IMAGE_USAGE_SAMPLED_BIT;
+ image_create_info.flags = 0;
+
+ VkMemoryAllocateInfo mem_alloc = {};
+ mem_alloc.sType = VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO;
+ mem_alloc.pNext = NULL;
+ mem_alloc.allocationSize = 0;
+ mem_alloc.memoryTypeIndex = 0;
+
+ err = vkCreateImage(m_device->device(), &image_create_info, NULL, &image);
+ ASSERT_VK_SUCCESS(err);
+
+ vkGetImageMemoryRequirements(m_device->device(), image, &mem_reqs);
+
+ mem_alloc.allocationSize = mem_reqs.size;
+ pass = m_device->phy().set_memory_type(mem_reqs.memoryTypeBits, &mem_alloc, 0);
+ ASSERT_TRUE(pass);
+
+ // Allocate memory
+ err = vkAllocateMemory(m_device->device(), &mem_alloc, NULL, &mem);
+ ASSERT_VK_SUCCESS(err);
+
+ // Introduce validation failure, destroy Image object before binding
+ vkDestroyImage(m_device->device(), image, NULL);
+ ASSERT_VK_SUCCESS(err);
+
+ // Now Try to bind memory to this destroyed object
+ err = vkBindImageMemory(m_device->device(), image, mem, 0);
+ // This may very well return an error.
+ (void)err;
+
+ m_errorMonitor->VerifyFound();
+
+ vkFreeMemory(m_device->device(), mem, NULL);
+}
+
+TEST_F(VkLayerTest, ExceedMemoryAllocationCount) {
+ VkResult err = VK_SUCCESS;
+ const int max_mems = 32;
+ VkDeviceMemory mems[max_mems + 1];
+
+ if (!EnableDeviceProfileLayer()) {
+ printf("%s Failed to enable device profile layer.\n", kSkipPrefix);
+ return;
+ }
+
+ ASSERT_NO_FATAL_FAILURE(InitFramework(myDbgFunc, m_errorMonitor));
+
+ PFN_vkSetPhysicalDeviceLimitsEXT fpvkSetPhysicalDeviceLimitsEXT =
+ (PFN_vkSetPhysicalDeviceLimitsEXT)vkGetInstanceProcAddr(instance(), "vkSetPhysicalDeviceLimitsEXT");
+ PFN_vkGetOriginalPhysicalDeviceLimitsEXT fpvkGetOriginalPhysicalDeviceLimitsEXT =
+ (PFN_vkGetOriginalPhysicalDeviceLimitsEXT)vkGetInstanceProcAddr(instance(), "vkGetOriginalPhysicalDeviceLimitsEXT");
+
+ if (!(fpvkSetPhysicalDeviceLimitsEXT) || !(fpvkGetOriginalPhysicalDeviceLimitsEXT)) {
+ printf("%s Can't find device_profile_api functions; skipped.\n", kSkipPrefix);
+ return;
+ }
+ VkPhysicalDeviceProperties props;
+ fpvkGetOriginalPhysicalDeviceLimitsEXT(gpu(), &props.limits);
+ if (props.limits.maxMemoryAllocationCount > max_mems) {
+ props.limits.maxMemoryAllocationCount = max_mems;
+ fpvkSetPhysicalDeviceLimitsEXT(gpu(), &props.limits);
+ }
+ ASSERT_NO_FATAL_FAILURE(InitState());
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ "Number of currently valid memory objects is not less than the maximum allowed");
+
+ VkMemoryAllocateInfo mem_alloc = {};
+ mem_alloc.sType = VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO;
+ mem_alloc.pNext = NULL;
+ mem_alloc.memoryTypeIndex = 0;
+ mem_alloc.allocationSize = 4;
+
+ int i;
+ for (i = 0; i <= max_mems; i++) {
+ err = vkAllocateMemory(m_device->device(), &mem_alloc, NULL, &mems[i]);
+ if (err != VK_SUCCESS) {
break;
}
}
- return NULL;
+ m_errorMonitor->VerifyFound();
+
+ for (int j = 0; j < i; j++) {
+ vkFreeMemory(m_device->device(), mems[j], NULL);
+ }
+}
+
+TEST_F(VkLayerTest, CreatePipelineBadVertexAttributeFormat) {
+ TEST_DESCRIPTION("Test that pipeline validation catches invalid vertex attribute formats");
+
+ ASSERT_NO_FATAL_FAILURE(Init());
+ ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
+
+ VkVertexInputBindingDescription input_binding;
+ memset(&input_binding, 0, sizeof(input_binding));
+
+ VkVertexInputAttributeDescription input_attribs;
+ memset(&input_attribs, 0, sizeof(input_attribs));
+
+ // Pick a really bad format for this purpose and make sure it should fail
+ input_attribs.format = VK_FORMAT_BC2_UNORM_BLOCK;
+ VkFormatProperties format_props = m_device->format_properties(input_attribs.format);
+ if ((format_props.bufferFeatures & VK_FORMAT_FEATURE_VERTEX_BUFFER_BIT) != 0) {
+ printf("%s Format unsuitable for test; skipped.\n", kSkipPrefix);
+ return;
+ }
+
+ input_attribs.location = 0;
+ char const *vsSource =
+ "#version 450\n"
+ "\n"
+ "void main(){\n"
+ " gl_Position = vec4(1);\n"
+ "}\n";
+ char const *fsSource =
+ "#version 450\n"
+ "\n"
+ "layout(location=0) out vec4 color;\n"
+ "void main(){\n"
+ " color = vec4(1);\n"
+ "}\n";
+
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkVertexInputAttributeDescription-format-00623");
+ VkShaderObj vs(m_device, vsSource, VK_SHADER_STAGE_VERTEX_BIT, this);
+ VkShaderObj fs(m_device, fsSource, VK_SHADER_STAGE_FRAGMENT_BIT, this);
+
+ VkPipelineObj pipe(m_device);
+ pipe.AddDefaultColorAttachment();
+ pipe.AddShader(&vs);
+ pipe.AddShader(&fs);
+
+ pipe.AddVertexInputBindings(&input_binding, 1);
+ pipe.AddVertexInputAttribs(&input_attribs, 1);
+
+ VkDescriptorSetObj descriptorSet(m_device);
+ descriptorSet.AppendDummy();
+ descriptorSet.CreateVKDescriptorSet(m_commandBuffer);
+
+ pipe.CreateVKPipeline(descriptorSet.GetPipelineLayout(), renderPass());
+
+ m_errorMonitor->VerifyFound();
+}
+
+TEST_F(VkLayerTest, ImageSampleCounts) {
+ TEST_DESCRIPTION("Use bad sample counts in image transfer calls to trigger validation errors.");
+ ASSERT_NO_FATAL_FAILURE(Init(nullptr, nullptr, VK_COMMAND_POOL_CREATE_RESET_COMMAND_BUFFER_BIT));
+
+ VkMemoryPropertyFlags reqs = 0;
+ VkImageCreateInfo image_create_info = {};
+ image_create_info.sType = VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO;
+ image_create_info.pNext = NULL;
+ image_create_info.imageType = VK_IMAGE_TYPE_2D;
+ image_create_info.format = VK_FORMAT_B8G8R8A8_UNORM;
+ image_create_info.extent.width = 256;
+ image_create_info.extent.height = 256;
+ image_create_info.extent.depth = 1;
+ image_create_info.mipLevels = 1;
+ image_create_info.arrayLayers = 1;
+ image_create_info.tiling = VK_IMAGE_TILING_OPTIMAL;
+ image_create_info.flags = 0;
+
+ VkImageBlit blit_region = {};
+ blit_region.srcSubresource.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
+ blit_region.srcSubresource.baseArrayLayer = 0;
+ blit_region.srcSubresource.layerCount = 1;
+ blit_region.srcSubresource.mipLevel = 0;
+ blit_region.dstSubresource.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
+ blit_region.dstSubresource.baseArrayLayer = 0;
+ blit_region.dstSubresource.layerCount = 1;
+ blit_region.dstSubresource.mipLevel = 0;
+ blit_region.srcOffsets[0] = {0, 0, 0};
+ blit_region.srcOffsets[1] = {256, 256, 1};
+ blit_region.dstOffsets[0] = {0, 0, 0};
+ blit_region.dstOffsets[1] = {128, 128, 1};
+
+ // Create two images, the source with sampleCount = 4, and attempt to blit
+ // between them
+ {
+ image_create_info.samples = VK_SAMPLE_COUNT_4_BIT;
+ image_create_info.usage = VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT | VK_IMAGE_USAGE_TRANSFER_SRC_BIT;
+ VkImageObj src_image(m_device);
+ src_image.init(&image_create_info);
+ src_image.SetLayout(VK_IMAGE_ASPECT_COLOR_BIT, VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL);
+ image_create_info.samples = VK_SAMPLE_COUNT_1_BIT;
+ image_create_info.usage = VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT | VK_IMAGE_USAGE_TRANSFER_DST_BIT;
+ VkImageObj dst_image(m_device);
+ dst_image.init(&image_create_info);
+ dst_image.SetLayout(VK_IMAGE_ASPECT_COLOR_BIT, VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL);
+ m_commandBuffer->begin();
+ // TODO: These 2 VUs are redundant - expect one of them to go away
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdBlitImage-srcImage-00233");
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdBlitImage-srcImage-00228");
+ vkCmdBlitImage(m_commandBuffer->handle(), src_image.handle(), VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL, dst_image.handle(),
+ VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, 1, &blit_region, VK_FILTER_NEAREST);
+ m_errorMonitor->VerifyFound();
+ m_commandBuffer->end();
+ }
+
+ // Create two images, the dest with sampleCount = 4, and attempt to blit
+ // between them
+ {
+ image_create_info.samples = VK_SAMPLE_COUNT_1_BIT;
+ image_create_info.usage = VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT | VK_IMAGE_USAGE_TRANSFER_SRC_BIT;
+ VkImageObj src_image(m_device);
+ src_image.init(&image_create_info);
+ src_image.SetLayout(VK_IMAGE_ASPECT_COLOR_BIT, VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL);
+ image_create_info.samples = VK_SAMPLE_COUNT_4_BIT;
+ image_create_info.usage = VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT | VK_IMAGE_USAGE_TRANSFER_DST_BIT;
+ VkImageObj dst_image(m_device);
+ dst_image.init(&image_create_info);
+ dst_image.SetLayout(VK_IMAGE_ASPECT_COLOR_BIT, VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL);
+ m_commandBuffer->begin();
+ // TODO: These 2 VUs are redundant - expect one of them to go away
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdBlitImage-dstImage-00234");
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdBlitImage-srcImage-00228");
+ vkCmdBlitImage(m_commandBuffer->handle(), src_image.handle(), VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL, dst_image.handle(),
+ VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, 1, &blit_region, VK_FILTER_NEAREST);
+ m_errorMonitor->VerifyFound();
+ m_commandBuffer->end();
+ }
+
+ VkBufferImageCopy copy_region = {};
+ copy_region.bufferRowLength = 128;
+ copy_region.bufferImageHeight = 128;
+ copy_region.imageSubresource.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
+ copy_region.imageSubresource.layerCount = 1;
+ copy_region.imageExtent.height = 64;
+ copy_region.imageExtent.width = 64;
+ copy_region.imageExtent.depth = 1;
+
+ // Create src buffer and dst image with sampleCount = 4 and attempt to copy
+ // buffer to image
+ {
+ VkBufferObj src_buffer;
+ src_buffer.init_as_src(*m_device, 128 * 128 * 4, reqs);
+ image_create_info.samples = VK_SAMPLE_COUNT_4_BIT;
+ image_create_info.usage = VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT | VK_IMAGE_USAGE_TRANSFER_DST_BIT;
+ VkImageObj dst_image(m_device);
+ dst_image.init(&image_create_info);
+ dst_image.SetLayout(VK_IMAGE_ASPECT_COLOR_BIT, VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL);
+ m_commandBuffer->begin();
+ m_errorMonitor->SetDesiredFailureMsg(
+ VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ "was created with a sample count of VK_SAMPLE_COUNT_4_BIT but must be VK_SAMPLE_COUNT_1_BIT");
+ vkCmdCopyBufferToImage(m_commandBuffer->handle(), src_buffer.handle(), dst_image.handle(),
+ VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, 1, &copy_region);
+ m_errorMonitor->VerifyFound();
+ m_commandBuffer->end();
+ }
+
+ // Create dst buffer and src image with sampleCount = 4 and attempt to copy
+ // image to buffer
+ {
+ VkBufferObj dst_buffer;
+ dst_buffer.init_as_dst(*m_device, 128 * 128 * 4, reqs);
+ image_create_info.samples = VK_SAMPLE_COUNT_4_BIT;
+ image_create_info.usage = VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT | VK_IMAGE_USAGE_TRANSFER_SRC_BIT;
+ vk_testing::Image src_image;
+ src_image.init(*m_device, (const VkImageCreateInfo &)image_create_info, reqs);
+ m_commandBuffer->begin();
+ m_errorMonitor->SetDesiredFailureMsg(
+ VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ "was created with a sample count of VK_SAMPLE_COUNT_4_BIT but must be VK_SAMPLE_COUNT_1_BIT");
+ vkCmdCopyImageToBuffer(m_commandBuffer->handle(), src_image.handle(), VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL,
+ dst_buffer.handle(), 1, &copy_region);
+ m_errorMonitor->VerifyFound();
+ m_commandBuffer->end();
+ }
+}
+
+TEST_F(VkLayerTest, BlitImageFormatTypes) {
+ ASSERT_NO_FATAL_FAILURE(Init());
+
+ VkFormat f_unsigned = VK_FORMAT_R8G8B8A8_UINT;
+ VkFormat f_signed = VK_FORMAT_R8G8B8A8_SINT;
+ VkFormat f_float = VK_FORMAT_R32_SFLOAT;
+ VkFormat f_depth = VK_FORMAT_D32_SFLOAT_S8_UINT;
+ VkFormat f_depth2 = VK_FORMAT_D32_SFLOAT;
+
+ if (!ImageFormatIsSupported(gpu(), f_unsigned, VK_IMAGE_TILING_OPTIMAL) ||
+ !ImageFormatIsSupported(gpu(), f_signed, VK_IMAGE_TILING_OPTIMAL) ||
+ !ImageFormatIsSupported(gpu(), f_float, VK_IMAGE_TILING_OPTIMAL) ||
+ !ImageFormatIsSupported(gpu(), f_depth, VK_IMAGE_TILING_OPTIMAL) ||
+ !ImageFormatIsSupported(gpu(), f_depth2, VK_IMAGE_TILING_OPTIMAL)) {
+ printf("%s Requested formats not supported - BlitImageFormatTypes skipped.\n", kSkipPrefix);
+ return;
+ }
+
+ // Note any missing feature bits
+ bool usrc = !ImageFormatAndFeaturesSupported(gpu(), f_unsigned, VK_IMAGE_TILING_OPTIMAL, VK_FORMAT_FEATURE_BLIT_SRC_BIT);
+ bool udst = !ImageFormatAndFeaturesSupported(gpu(), f_unsigned, VK_IMAGE_TILING_OPTIMAL, VK_FORMAT_FEATURE_BLIT_DST_BIT);
+ bool ssrc = !ImageFormatAndFeaturesSupported(gpu(), f_signed, VK_IMAGE_TILING_OPTIMAL, VK_FORMAT_FEATURE_BLIT_SRC_BIT);
+ bool sdst = !ImageFormatAndFeaturesSupported(gpu(), f_signed, VK_IMAGE_TILING_OPTIMAL, VK_FORMAT_FEATURE_BLIT_DST_BIT);
+ bool fsrc = !ImageFormatAndFeaturesSupported(gpu(), f_float, VK_IMAGE_TILING_OPTIMAL, VK_FORMAT_FEATURE_BLIT_SRC_BIT);
+ bool fdst = !ImageFormatAndFeaturesSupported(gpu(), f_float, VK_IMAGE_TILING_OPTIMAL, VK_FORMAT_FEATURE_BLIT_DST_BIT);
+ bool d1dst = !ImageFormatAndFeaturesSupported(gpu(), f_depth, VK_IMAGE_TILING_OPTIMAL, VK_FORMAT_FEATURE_BLIT_DST_BIT);
+ bool d2src = !ImageFormatAndFeaturesSupported(gpu(), f_depth2, VK_IMAGE_TILING_OPTIMAL, VK_FORMAT_FEATURE_BLIT_SRC_BIT);
+
+ VkImageObj unsigned_image(m_device);
+ unsigned_image.Init(64, 64, 1, f_unsigned, VK_IMAGE_USAGE_TRANSFER_SRC_BIT | VK_IMAGE_USAGE_TRANSFER_DST_BIT,
+ VK_IMAGE_TILING_OPTIMAL, 0);
+ ASSERT_TRUE(unsigned_image.initialized());
+ unsigned_image.SetLayout(VK_IMAGE_ASPECT_COLOR_BIT, VK_IMAGE_LAYOUT_GENERAL);
+
+ VkImageObj signed_image(m_device);
+ signed_image.Init(64, 64, 1, f_signed, VK_IMAGE_USAGE_TRANSFER_SRC_BIT | VK_IMAGE_USAGE_TRANSFER_DST_BIT,
+ VK_IMAGE_TILING_OPTIMAL, 0);
+ ASSERT_TRUE(signed_image.initialized());
+ signed_image.SetLayout(VK_IMAGE_ASPECT_COLOR_BIT, VK_IMAGE_LAYOUT_GENERAL);
+
+ VkImageObj float_image(m_device);
+ float_image.Init(64, 64, 1, f_float, VK_IMAGE_USAGE_TRANSFER_SRC_BIT | VK_IMAGE_USAGE_TRANSFER_DST_BIT, VK_IMAGE_TILING_OPTIMAL,
+ 0);
+ ASSERT_TRUE(float_image.initialized());
+ float_image.SetLayout(VK_IMAGE_ASPECT_COLOR_BIT, VK_IMAGE_LAYOUT_GENERAL);
+
+ VkImageObj depth_image(m_device);
+ depth_image.Init(64, 64, 1, f_depth, VK_IMAGE_USAGE_TRANSFER_SRC_BIT | VK_IMAGE_USAGE_TRANSFER_DST_BIT, VK_IMAGE_TILING_OPTIMAL,
+ 0);
+ ASSERT_TRUE(depth_image.initialized());
+ depth_image.SetLayout(VK_IMAGE_ASPECT_STENCIL_BIT | VK_IMAGE_ASPECT_DEPTH_BIT, VK_IMAGE_LAYOUT_GENERAL);
+
+ VkImageObj depth_image2(m_device);
+ depth_image2.Init(64, 64, 1, f_depth2, VK_IMAGE_USAGE_TRANSFER_SRC_BIT | VK_IMAGE_USAGE_TRANSFER_DST_BIT,
+ VK_IMAGE_TILING_OPTIMAL, 0);
+ ASSERT_TRUE(depth_image2.initialized());
+ depth_image2.SetLayout(VK_IMAGE_ASPECT_DEPTH_BIT, VK_IMAGE_LAYOUT_GENERAL);
+
+ VkImageBlit blitRegion = {};
+ blitRegion.srcSubresource.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
+ blitRegion.srcSubresource.baseArrayLayer = 0;
+ blitRegion.srcSubresource.layerCount = 1;
+ blitRegion.srcSubresource.mipLevel = 0;
+ blitRegion.dstSubresource.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
+ blitRegion.dstSubresource.baseArrayLayer = 0;
+ blitRegion.dstSubresource.layerCount = 1;
+ blitRegion.dstSubresource.mipLevel = 0;
+ blitRegion.srcOffsets[0] = {0, 0, 0};
+ blitRegion.srcOffsets[1] = {64, 64, 1};
+ blitRegion.dstOffsets[0] = {0, 0, 0};
+ blitRegion.dstOffsets[1] = {32, 32, 1};
+
+ m_commandBuffer->begin();
+
+ // Unsigned int vs not an int
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdBlitImage-srcImage-00230");
+ if (usrc) m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdBlitImage-srcImage-01999");
+ if (fdst) m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdBlitImage-dstImage-02000");
+ vkCmdBlitImage(m_commandBuffer->handle(), unsigned_image.image(), unsigned_image.Layout(), float_image.image(),
+ float_image.Layout(), 1, &blitRegion, VK_FILTER_NEAREST);
+ m_errorMonitor->VerifyFound();
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdBlitImage-srcImage-00230");
+ if (fsrc) m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdBlitImage-srcImage-01999");
+ if (udst) m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdBlitImage-dstImage-02000");
+ vkCmdBlitImage(m_commandBuffer->handle(), float_image.image(), float_image.Layout(), unsigned_image.image(),
+ unsigned_image.Layout(), 1, &blitRegion, VK_FILTER_NEAREST);
+ m_errorMonitor->VerifyFound();
+
+ // Signed int vs not an int,
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdBlitImage-srcImage-00229");
+ if (ssrc) m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdBlitImage-srcImage-01999");
+ if (fdst) m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdBlitImage-dstImage-02000");
+ vkCmdBlitImage(m_commandBuffer->handle(), signed_image.image(), signed_image.Layout(), float_image.image(),
+ float_image.Layout(), 1, &blitRegion, VK_FILTER_NEAREST);
+ m_errorMonitor->VerifyFound();
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdBlitImage-srcImage-00229");
+ if (fsrc) m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdBlitImage-srcImage-01999");
+ if (sdst) m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdBlitImage-dstImage-02000");
+ vkCmdBlitImage(m_commandBuffer->handle(), float_image.image(), float_image.Layout(), signed_image.image(),
+ signed_image.Layout(), 1, &blitRegion, VK_FILTER_NEAREST);
+ m_errorMonitor->VerifyFound();
+
+ // Signed vs Unsigned int - generates both VUs
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdBlitImage-srcImage-00229");
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdBlitImage-srcImage-00230");
+ if (ssrc) m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdBlitImage-srcImage-01999");
+ if (udst) m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdBlitImage-dstImage-02000");
+ vkCmdBlitImage(m_commandBuffer->handle(), signed_image.image(), signed_image.Layout(), unsigned_image.image(),
+ unsigned_image.Layout(), 1, &blitRegion, VK_FILTER_NEAREST);
+ m_errorMonitor->VerifyFound();
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdBlitImage-srcImage-00229");
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdBlitImage-srcImage-00230");
+ if (usrc) m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdBlitImage-srcImage-01999");
+ if (sdst) m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdBlitImage-dstImage-02000");
+ vkCmdBlitImage(m_commandBuffer->handle(), unsigned_image.image(), unsigned_image.Layout(), signed_image.image(),
+ signed_image.Layout(), 1, &blitRegion, VK_FILTER_NEAREST);
+ m_errorMonitor->VerifyFound();
+
+ // Depth vs any non-identical depth format
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdBlitImage-srcImage-00231");
+ blitRegion.srcSubresource.aspectMask = VK_IMAGE_ASPECT_DEPTH_BIT;
+ blitRegion.dstSubresource.aspectMask = VK_IMAGE_ASPECT_DEPTH_BIT;
+ if (d2src) m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdBlitImage-srcImage-01999");
+ if (d1dst) m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdBlitImage-dstImage-02000");
+ vkCmdBlitImage(m_commandBuffer->handle(), depth_image2.image(), depth_image2.Layout(), depth_image.image(),
+ depth_image.Layout(), 1, &blitRegion, VK_FILTER_NEAREST);
+ m_errorMonitor->VerifyFound();
+
+ m_commandBuffer->end();
+}
+
+TEST_F(VkLayerTest, BlitImageFilters) {
+ bool cubic_support = false;
+ ASSERT_NO_FATAL_FAILURE(InitFramework(myDbgFunc, m_errorMonitor));
+ if (DeviceExtensionSupported(gpu(), nullptr, "VK_IMG_filter_cubic")) {
+ m_device_extension_names.push_back("VK_IMG_filter_cubic");
+ cubic_support = true;
+ }
+ ASSERT_NO_FATAL_FAILURE(InitState());
+
+ VkFormat fmt = VK_FORMAT_R8_UINT;
+ if (!ImageFormatIsSupported(gpu(), fmt, VK_IMAGE_TILING_OPTIMAL)) {
+ printf("%s No R8_UINT format support - BlitImageFilters skipped.\n", kSkipPrefix);
+ return;
+ }
+
+ // Create 2D images
+ VkImageObj src2D(m_device);
+ VkImageObj dst2D(m_device);
+ src2D.Init(64, 64, 1, fmt, VK_IMAGE_USAGE_TRANSFER_SRC_BIT, VK_IMAGE_TILING_OPTIMAL, 0);
+ dst2D.Init(64, 64, 1, fmt, VK_IMAGE_USAGE_TRANSFER_DST_BIT, VK_IMAGE_TILING_OPTIMAL, 0);
+ ASSERT_TRUE(src2D.initialized());
+ ASSERT_TRUE(dst2D.initialized());
+ src2D.SetLayout(VK_IMAGE_ASPECT_COLOR_BIT, VK_IMAGE_LAYOUT_GENERAL);
+ dst2D.SetLayout(VK_IMAGE_ASPECT_COLOR_BIT, VK_IMAGE_LAYOUT_GENERAL);
+
+ // Create 3D image
+ VkImageCreateInfo ci;
+ ci.sType = VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO;
+ ci.pNext = NULL;
+ ci.flags = 0;
+ ci.imageType = VK_IMAGE_TYPE_3D;
+ ci.format = fmt;
+ ci.extent = {64, 64, 4};
+ ci.mipLevels = 1;
+ ci.arrayLayers = 1;
+ ci.samples = VK_SAMPLE_COUNT_1_BIT;
+ ci.tiling = VK_IMAGE_TILING_OPTIMAL;
+ ci.usage = VK_IMAGE_USAGE_TRANSFER_SRC_BIT;
+ ci.sharingMode = VK_SHARING_MODE_EXCLUSIVE;
+ ci.queueFamilyIndexCount = 0;
+ ci.pQueueFamilyIndices = NULL;
+ ci.initialLayout = VK_IMAGE_LAYOUT_UNDEFINED;
+
+ VkImageObj src3D(m_device);
+ src3D.init(&ci);
+ ASSERT_TRUE(src3D.initialized());
+
+ VkImageBlit blitRegion = {};
+ blitRegion.srcSubresource.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
+ blitRegion.srcSubresource.baseArrayLayer = 0;
+ blitRegion.srcSubresource.layerCount = 1;
+ blitRegion.srcSubresource.mipLevel = 0;
+ blitRegion.dstSubresource.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
+ blitRegion.dstSubresource.baseArrayLayer = 0;
+ blitRegion.dstSubresource.layerCount = 1;
+ blitRegion.dstSubresource.mipLevel = 0;
+ blitRegion.srcOffsets[0] = {0, 0, 0};
+ blitRegion.srcOffsets[1] = {48, 48, 1};
+ blitRegion.dstOffsets[0] = {0, 0, 0};
+ blitRegion.dstOffsets[1] = {64, 64, 1};
+
+ m_commandBuffer->begin();
+
+ // UINT format should not support linear filtering, but check to be sure
+ if (!ImageFormatAndFeaturesSupported(gpu(), fmt, VK_IMAGE_TILING_OPTIMAL, VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_LINEAR_BIT)) {
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdBlitImage-filter-02001");
+ vkCmdBlitImage(m_commandBuffer->handle(), src2D.image(), src2D.Layout(), dst2D.image(), dst2D.Layout(), 1, &blitRegion,
+ VK_FILTER_LINEAR);
+ m_errorMonitor->VerifyFound();
+ }
+
+ if (cubic_support && !ImageFormatAndFeaturesSupported(gpu(), fmt, VK_IMAGE_TILING_OPTIMAL,
+ VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_CUBIC_BIT_IMG)) {
+ // Invalid filter CUBIC_IMG
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdBlitImage-filter-02002");
+ vkCmdBlitImage(m_commandBuffer->handle(), src3D.image(), src3D.Layout(), dst2D.image(), dst2D.Layout(), 1, &blitRegion,
+ VK_FILTER_CUBIC_IMG);
+ m_errorMonitor->VerifyFound();
+
+ // Invalid filter CUBIC_IMG + invalid 2D source image
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdBlitImage-filter-02002");
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdBlitImage-filter-00237");
+ vkCmdBlitImage(m_commandBuffer->handle(), src2D.image(), src2D.Layout(), dst2D.image(), dst2D.Layout(), 1, &blitRegion,
+ VK_FILTER_CUBIC_IMG);
+ m_errorMonitor->VerifyFound();
+ }
+
+ m_commandBuffer->end();
+}
+
+TEST_F(VkLayerTest, BlitImageLayout) {
+ TEST_DESCRIPTION("Incorrect vkCmdBlitImage layouts");
+
+ ASSERT_NO_FATAL_FAILURE(Init(nullptr, nullptr, VK_COMMAND_POOL_CREATE_RESET_COMMAND_BUFFER_BIT));
+
+ VkResult err;
+ VkFormat fmt = VK_FORMAT_R8G8B8A8_UNORM;
+
+ VkSubmitInfo submit_info = {};
+ submit_info.sType = VK_STRUCTURE_TYPE_SUBMIT_INFO;
+ submit_info.commandBufferCount = 1;
+ submit_info.pCommandBuffers = &m_commandBuffer->handle();
+
+ // Create images
+ VkImageObj img_src_transfer(m_device);
+ VkImageObj img_dst_transfer(m_device);
+ VkImageObj img_general(m_device);
+ VkImageObj img_color(m_device);
+
+ img_src_transfer.InitNoLayout(64, 64, 1, fmt, VK_IMAGE_USAGE_TRANSFER_SRC_BIT | VK_IMAGE_USAGE_TRANSFER_DST_BIT,
+ VK_IMAGE_TILING_OPTIMAL, 0);
+ img_dst_transfer.InitNoLayout(64, 64, 1, fmt, VK_IMAGE_USAGE_TRANSFER_SRC_BIT | VK_IMAGE_USAGE_TRANSFER_DST_BIT,
+ VK_IMAGE_TILING_OPTIMAL, 0);
+ img_general.InitNoLayout(64, 64, 1, fmt, VK_IMAGE_USAGE_TRANSFER_SRC_BIT | VK_IMAGE_USAGE_TRANSFER_DST_BIT,
+ VK_IMAGE_TILING_OPTIMAL, 0);
+ img_color.InitNoLayout(64, 64, 1, fmt,
+ VK_IMAGE_USAGE_TRANSFER_SRC_BIT | VK_IMAGE_USAGE_TRANSFER_DST_BIT | VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT,
+ VK_IMAGE_TILING_OPTIMAL, 0);
+
+ ASSERT_TRUE(img_src_transfer.initialized());
+ ASSERT_TRUE(img_dst_transfer.initialized());
+ ASSERT_TRUE(img_general.initialized());
+ ASSERT_TRUE(img_color.initialized());
+
+ img_src_transfer.SetLayout(VK_IMAGE_ASPECT_COLOR_BIT, VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL);
+ img_dst_transfer.SetLayout(VK_IMAGE_ASPECT_COLOR_BIT, VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL);
+ img_general.SetLayout(VK_IMAGE_ASPECT_COLOR_BIT, VK_IMAGE_LAYOUT_GENERAL);
+ img_color.SetLayout(VK_IMAGE_ASPECT_COLOR_BIT, VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL);
+
+ VkImageBlit blit_region = {};
+ blit_region.srcSubresource.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
+ blit_region.srcSubresource.baseArrayLayer = 0;
+ blit_region.srcSubresource.layerCount = 1;
+ blit_region.srcSubresource.mipLevel = 0;
+ blit_region.dstSubresource.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
+ blit_region.dstSubresource.baseArrayLayer = 0;
+ blit_region.dstSubresource.layerCount = 1;
+ blit_region.dstSubresource.mipLevel = 0;
+ blit_region.srcOffsets[0] = {0, 0, 0};
+ blit_region.srcOffsets[1] = {48, 48, 1};
+ blit_region.dstOffsets[0] = {0, 0, 0};
+ blit_region.dstOffsets[1] = {64, 64, 1};
+
+ m_commandBuffer->begin();
+
+ // Illegal srcImageLayout
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdBlitImage-srcImageLayout-00222");
+ vkCmdBlitImage(m_commandBuffer->handle(), img_src_transfer.image(), VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL,
+ img_dst_transfer.image(), img_dst_transfer.Layout(), 1, &blit_region, VK_FILTER_LINEAR);
+ m_errorMonitor->VerifyFound();
+
+ // Illegal destImageLayout
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdBlitImage-dstImageLayout-00227");
+ vkCmdBlitImage(m_commandBuffer->handle(), img_src_transfer.image(), img_src_transfer.Layout(), img_dst_transfer.image(),
+ VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL, 1, &blit_region, VK_FILTER_LINEAR);
+
+ m_commandBuffer->end();
+ vkQueueSubmit(m_device->m_queue, 1, &submit_info, VK_NULL_HANDLE);
+ m_errorMonitor->VerifyFound();
+
+ err = vkQueueWaitIdle(m_device->m_queue);
+ ASSERT_VK_SUCCESS(err);
+
+ m_commandBuffer->reset(0);
+ m_commandBuffer->begin();
+
+ // Source image in invalid layout at start of the CB
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "UNASSIGNED-CoreValidation-DrawState-InvalidImageLayout");
+ vkCmdBlitImage(m_commandBuffer->handle(), img_src_transfer.image(), img_src_transfer.Layout(), img_color.image(),
+ VK_IMAGE_LAYOUT_GENERAL, 1, &blit_region, VK_FILTER_LINEAR);
+
+ m_commandBuffer->end();
+ vkQueueSubmit(m_device->m_queue, 1, &submit_info, VK_NULL_HANDLE);
+ m_errorMonitor->VerifyFound();
+ err = vkQueueWaitIdle(m_device->m_queue);
+ ASSERT_VK_SUCCESS(err);
+
+ m_commandBuffer->reset(0);
+ m_commandBuffer->begin();
+
+ // Destination image in invalid layout at start of the CB
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "UNASSIGNED-CoreValidation-DrawState-InvalidImageLayout");
+ vkCmdBlitImage(m_commandBuffer->handle(), img_color.image(), VK_IMAGE_LAYOUT_GENERAL, img_dst_transfer.image(),
+ img_dst_transfer.Layout(), 1, &blit_region, VK_FILTER_LINEAR);
+
+ m_commandBuffer->end();
+ vkQueueSubmit(m_device->m_queue, 1, &submit_info, VK_NULL_HANDLE);
+ m_errorMonitor->VerifyFound();
+ err = vkQueueWaitIdle(m_device->m_queue);
+ ASSERT_VK_SUCCESS(err);
+
+ // Source image in invalid layout in the middle of CB
+ m_commandBuffer->reset(0);
+ m_commandBuffer->begin();
+
+ VkImageMemoryBarrier img_barrier = {};
+ img_barrier.sType = VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER;
+ img_barrier.pNext = nullptr;
+ img_barrier.srcAccessMask = 0;
+ img_barrier.dstAccessMask = 0;
+ img_barrier.oldLayout = VK_IMAGE_LAYOUT_GENERAL;
+ img_barrier.newLayout = VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL;
+ img_barrier.image = img_general.handle();
+ img_barrier.srcQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED;
+ img_barrier.dstQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED;
+ img_barrier.subresourceRange.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
+ img_barrier.subresourceRange.baseArrayLayer = 0;
+ img_barrier.subresourceRange.baseMipLevel = 0;
+ img_barrier.subresourceRange.layerCount = 1;
+ img_barrier.subresourceRange.levelCount = 1;
+
+ vkCmdPipelineBarrier(m_commandBuffer->handle(), VK_PIPELINE_STAGE_BOTTOM_OF_PIPE_BIT, VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT, 0, 0,
+ nullptr, 0, nullptr, 1, &img_barrier);
+
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdBlitImage-srcImageLayout-00221");
+ vkCmdBlitImage(m_commandBuffer->handle(), img_general.image(), VK_IMAGE_LAYOUT_GENERAL, img_dst_transfer.image(),
+ img_dst_transfer.Layout(), 1, &blit_region, VK_FILTER_LINEAR);
+
+ m_commandBuffer->end();
+ vkQueueSubmit(m_device->m_queue, 1, &submit_info, VK_NULL_HANDLE);
+ m_errorMonitor->VerifyFound();
+ err = vkQueueWaitIdle(m_device->m_queue);
+ ASSERT_VK_SUCCESS(err);
+
+ // Destination image in invalid layout in the middle of CB
+ m_commandBuffer->reset(0);
+ m_commandBuffer->begin();
+
+ img_barrier.oldLayout = VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL;
+ img_barrier.newLayout = VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL;
+ img_barrier.image = img_dst_transfer.handle();
+
+ vkCmdPipelineBarrier(m_commandBuffer->handle(), VK_PIPELINE_STAGE_BOTTOM_OF_PIPE_BIT, VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT, 0, 0,
+ nullptr, 0, nullptr, 1, &img_barrier);
+
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdBlitImage-dstImageLayout-00226");
+ vkCmdBlitImage(m_commandBuffer->handle(), img_src_transfer.image(), img_src_transfer.Layout(), img_dst_transfer.image(),
+ VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, 1, &blit_region, VK_FILTER_LINEAR);
+
+ m_commandBuffer->end();
+ vkQueueSubmit(m_device->m_queue, 1, &submit_info, VK_NULL_HANDLE);
+ m_errorMonitor->VerifyFound();
+ err = vkQueueWaitIdle(m_device->m_queue);
+ ASSERT_VK_SUCCESS(err);
+}
+
+TEST_F(VkLayerTest, BlitImageOffsets) {
+ ASSERT_NO_FATAL_FAILURE(Init());
+
+ VkFormat fmt = VK_FORMAT_R8G8B8A8_UNORM;
+ if (!ImageFormatAndFeaturesSupported(gpu(), fmt, VK_IMAGE_TILING_OPTIMAL,
+ VK_FORMAT_FEATURE_BLIT_SRC_BIT | VK_FORMAT_FEATURE_BLIT_DST_BIT)) {
+ printf("%s No blit feature bits - BlitImageOffsets skipped.\n", kSkipPrefix);
+ return;
+ }
+
+ VkImageCreateInfo ci;
+ ci.sType = VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO;
+ ci.pNext = NULL;
+ ci.flags = 0;
+ ci.imageType = VK_IMAGE_TYPE_1D;
+ ci.format = fmt;
+ ci.extent = {64, 1, 1};
+ ci.mipLevels = 1;
+ ci.arrayLayers = 1;
+ ci.samples = VK_SAMPLE_COUNT_1_BIT;
+ ci.tiling = VK_IMAGE_TILING_OPTIMAL;
+ ci.usage = VK_IMAGE_USAGE_TRANSFER_SRC_BIT | VK_IMAGE_USAGE_TRANSFER_DST_BIT;
+ ci.sharingMode = VK_SHARING_MODE_EXCLUSIVE;
+ ci.queueFamilyIndexCount = 0;
+ ci.pQueueFamilyIndices = NULL;
+ ci.initialLayout = VK_IMAGE_LAYOUT_UNDEFINED;
+
+ VkImageObj image_1D(m_device);
+ image_1D.init(&ci);
+ ASSERT_TRUE(image_1D.initialized());
+
+ ci.imageType = VK_IMAGE_TYPE_2D;
+ ci.extent = {64, 64, 1};
+ VkImageObj image_2D(m_device);
+ image_2D.init(&ci);
+ ASSERT_TRUE(image_2D.initialized());
+
+ ci.imageType = VK_IMAGE_TYPE_3D;
+ ci.extent = {64, 64, 64};
+ VkImageObj image_3D(m_device);
+ image_3D.init(&ci);
+ ASSERT_TRUE(image_3D.initialized());
+
+ VkImageBlit blit_region = {};
+ blit_region.srcSubresource.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
+ blit_region.srcSubresource.baseArrayLayer = 0;
+ blit_region.srcSubresource.layerCount = 1;
+ blit_region.srcSubresource.mipLevel = 0;
+ blit_region.dstSubresource.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
+ blit_region.dstSubresource.baseArrayLayer = 0;
+ blit_region.dstSubresource.layerCount = 1;
+ blit_region.dstSubresource.mipLevel = 0;
+
+ m_commandBuffer->begin();
+
+ // 1D, with src/dest y offsets other than (0,1)
+ blit_region.srcOffsets[0] = {0, 1, 0};
+ blit_region.srcOffsets[1] = {30, 1, 1};
+ blit_region.dstOffsets[0] = {32, 0, 0};
+ blit_region.dstOffsets[1] = {64, 1, 1};
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkImageBlit-srcImage-00245");
+ vkCmdBlitImage(m_commandBuffer->handle(), image_1D.image(), image_1D.Layout(), image_1D.image(), image_1D.Layout(), 1,
+ &blit_region, VK_FILTER_NEAREST);
+ m_errorMonitor->VerifyFound();
+
+ blit_region.srcOffsets[0] = {0, 0, 0};
+ blit_region.dstOffsets[0] = {32, 1, 0};
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkImageBlit-dstImage-00250");
+ vkCmdBlitImage(m_commandBuffer->handle(), image_1D.image(), image_1D.Layout(), image_1D.image(), image_1D.Layout(), 1,
+ &blit_region, VK_FILTER_NEAREST);
+ m_errorMonitor->VerifyFound();
+
+ // 2D, with src/dest z offsets other than (0,1)
+ blit_region.srcOffsets[0] = {0, 0, 1};
+ blit_region.srcOffsets[1] = {24, 31, 1};
+ blit_region.dstOffsets[0] = {32, 32, 0};
+ blit_region.dstOffsets[1] = {64, 64, 1};
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkImageBlit-srcImage-00247");
+ vkCmdBlitImage(m_commandBuffer->handle(), image_2D.image(), image_2D.Layout(), image_2D.image(), image_2D.Layout(), 1,
+ &blit_region, VK_FILTER_NEAREST);
+ m_errorMonitor->VerifyFound();
+
+ blit_region.srcOffsets[0] = {0, 0, 0};
+ blit_region.dstOffsets[0] = {32, 32, 1};
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkImageBlit-dstImage-00252");
+ vkCmdBlitImage(m_commandBuffer->handle(), image_2D.image(), image_2D.Layout(), image_2D.image(), image_2D.Layout(), 1,
+ &blit_region, VK_FILTER_NEAREST);
+ m_errorMonitor->VerifyFound();
+
+ // Source offsets exceeding source image dimensions
+ blit_region.srcOffsets[0] = {0, 0, 0};
+ blit_region.srcOffsets[1] = {65, 64, 1}; // src x
+ blit_region.dstOffsets[0] = {0, 0, 0};
+ blit_region.dstOffsets[1] = {64, 64, 1};
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkImageBlit-srcOffset-00243"); // x
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdBlitImage-pRegions-00215"); // src region
+ vkCmdBlitImage(m_commandBuffer->handle(), image_3D.image(), image_3D.Layout(), image_2D.image(), image_2D.Layout(), 1,
+ &blit_region, VK_FILTER_NEAREST);
+ m_errorMonitor->VerifyFound();
+
+ blit_region.srcOffsets[1] = {64, 65, 1}; // src y
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkImageBlit-srcOffset-00244"); // y
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdBlitImage-pRegions-00215"); // src region
+ vkCmdBlitImage(m_commandBuffer->handle(), image_3D.image(), image_3D.Layout(), image_2D.image(), image_2D.Layout(), 1,
+ &blit_region, VK_FILTER_NEAREST);
+ m_errorMonitor->VerifyFound();
+
+ blit_region.srcOffsets[0] = {0, 0, 65}; // src z
+ blit_region.srcOffsets[1] = {64, 64, 64};
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkImageBlit-srcOffset-00246"); // z
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdBlitImage-pRegions-00215"); // src region
+ vkCmdBlitImage(m_commandBuffer->handle(), image_3D.image(), image_3D.Layout(), image_2D.image(), image_2D.Layout(), 1,
+ &blit_region, VK_FILTER_NEAREST);
+ m_errorMonitor->VerifyFound();
+
+ // Dest offsets exceeding source image dimensions
+ blit_region.srcOffsets[0] = {0, 0, 0};
+ blit_region.srcOffsets[1] = {64, 64, 1};
+ blit_region.dstOffsets[0] = {96, 64, 32}; // dst x
+ blit_region.dstOffsets[1] = {64, 0, 33};
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkImageBlit-dstOffset-00248"); // x
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdBlitImage-pRegions-00216"); // dst region
+ vkCmdBlitImage(m_commandBuffer->handle(), image_2D.image(), image_2D.Layout(), image_3D.image(), image_3D.Layout(), 1,
+ &blit_region, VK_FILTER_NEAREST);
+ m_errorMonitor->VerifyFound();
+
+ blit_region.dstOffsets[0] = {0, 65, 32}; // dst y
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkImageBlit-dstOffset-00249"); // y
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdBlitImage-pRegions-00216"); // dst region
+ vkCmdBlitImage(m_commandBuffer->handle(), image_2D.image(), image_2D.Layout(), image_3D.image(), image_3D.Layout(), 1,
+ &blit_region, VK_FILTER_NEAREST);
+ m_errorMonitor->VerifyFound();
+
+ blit_region.dstOffsets[0] = {0, 64, 65}; // dst z
+ blit_region.dstOffsets[1] = {64, 0, 64};
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkImageBlit-dstOffset-00251"); // z
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdBlitImage-pRegions-00216"); // dst region
+ vkCmdBlitImage(m_commandBuffer->handle(), image_2D.image(), image_2D.Layout(), image_3D.image(), image_3D.Layout(), 1,
+ &blit_region, VK_FILTER_NEAREST);
+ m_errorMonitor->VerifyFound();
+
+ m_commandBuffer->end();
+}
+
+TEST_F(VkLayerTest, MiscBlitImageTests) {
+ ASSERT_NO_FATAL_FAILURE(Init());
+
+ VkFormat f_color = VK_FORMAT_R32_SFLOAT; // Need features ..BLIT_SRC_BIT & ..BLIT_DST_BIT
+
+ if (!ImageFormatAndFeaturesSupported(gpu(), f_color, VK_IMAGE_TILING_OPTIMAL,
+ VK_FORMAT_FEATURE_BLIT_SRC_BIT | VK_FORMAT_FEATURE_BLIT_DST_BIT)) {
+ printf("%s Requested format features unavailable - MiscBlitImageTests skipped.\n", kSkipPrefix);
+ return;
+ }
+
+ VkImageCreateInfo ci;
+ ci.sType = VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO;
+ ci.pNext = NULL;
+ ci.flags = 0;
+ ci.imageType = VK_IMAGE_TYPE_2D;
+ ci.format = f_color;
+ ci.extent = {64, 64, 1};
+ ci.mipLevels = 1;
+ ci.arrayLayers = 1;
+ ci.samples = VK_SAMPLE_COUNT_1_BIT;
+ ci.tiling = VK_IMAGE_TILING_OPTIMAL;
+ ci.usage = VK_IMAGE_USAGE_TRANSFER_SRC_BIT | VK_IMAGE_USAGE_TRANSFER_DST_BIT;
+ ci.sharingMode = VK_SHARING_MODE_EXCLUSIVE;
+ ci.queueFamilyIndexCount = 0;
+ ci.pQueueFamilyIndices = NULL;
+ ci.initialLayout = VK_IMAGE_LAYOUT_UNDEFINED;
+
+ // 2D color image
+ VkImageObj color_img(m_device);
+ color_img.init(&ci);
+ ASSERT_TRUE(color_img.initialized());
+
+ // 2D multi-sample image
+ ci.samples = VK_SAMPLE_COUNT_4_BIT;
+ VkImageObj ms_img(m_device);
+ ms_img.init(&ci);
+ ASSERT_TRUE(ms_img.initialized());
+
+ // 3D color image
+ ci.samples = VK_SAMPLE_COUNT_1_BIT;
+ ci.imageType = VK_IMAGE_TYPE_3D;
+ ci.extent = {64, 64, 8};
+ VkImageObj color_3D_img(m_device);
+ color_3D_img.init(&ci);
+ ASSERT_TRUE(color_3D_img.initialized());
+
+ VkImageBlit blitRegion = {};
+ blitRegion.srcSubresource.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
+ blitRegion.srcSubresource.baseArrayLayer = 0;
+ blitRegion.srcSubresource.layerCount = 1;
+ blitRegion.srcSubresource.mipLevel = 0;
+ blitRegion.dstSubresource.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
+ blitRegion.dstSubresource.baseArrayLayer = 0;
+ blitRegion.dstSubresource.layerCount = 1;
+ blitRegion.dstSubresource.mipLevel = 0;
+ blitRegion.srcOffsets[0] = {0, 0, 0};
+ blitRegion.srcOffsets[1] = {16, 16, 1};
+ blitRegion.dstOffsets[0] = {32, 32, 0};
+ blitRegion.dstOffsets[1] = {64, 64, 1};
+
+ m_commandBuffer->begin();
+
+ // Blit with aspectMask errors
+ blitRegion.srcSubresource.aspectMask = VK_IMAGE_ASPECT_DEPTH_BIT;
+ blitRegion.dstSubresource.aspectMask = VK_IMAGE_ASPECT_DEPTH_BIT;
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkImageBlit-aspectMask-00241");
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkImageBlit-aspectMask-00242");
+ vkCmdBlitImage(m_commandBuffer->handle(), color_img.image(), color_img.Layout(), color_img.image(), color_img.Layout(), 1,
+ &blitRegion, VK_FILTER_NEAREST);
+ m_errorMonitor->VerifyFound();
+
+ // Blit with invalid src mip level
+ blitRegion.srcSubresource.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
+ blitRegion.dstSubresource.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
+ blitRegion.srcSubresource.mipLevel = ci.mipLevels;
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ "VUID-vkCmdBlitImage-srcSubresource-01705"); // invalid srcSubresource.mipLevel
+ // Redundant unavoidable errors
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ "VUID-VkImageBlit-srcOffset-00243"); // out-of-bounds srcOffset.x
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ "VUID-VkImageBlit-srcOffset-00244"); // out-of-bounds srcOffset.y
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ "VUID-VkImageBlit-srcOffset-00246"); // out-of-bounds srcOffset.z
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ "VUID-vkCmdBlitImage-pRegions-00215"); // region not contained within src image
+ vkCmdBlitImage(m_commandBuffer->handle(), color_img.image(), color_img.Layout(), color_img.image(), color_img.Layout(), 1,
+ &blitRegion, VK_FILTER_NEAREST);
+ m_errorMonitor->VerifyFound();
+
+ // Blit with invalid dst mip level
+ blitRegion.srcSubresource.mipLevel = 0;
+ blitRegion.dstSubresource.mipLevel = ci.mipLevels;
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ "VUID-vkCmdBlitImage-dstSubresource-01706"); // invalid dstSubresource.mipLevel
+ // Redundant unavoidable errors
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ "VUID-VkImageBlit-dstOffset-00248"); // out-of-bounds dstOffset.x
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ "VUID-VkImageBlit-dstOffset-00249"); // out-of-bounds dstOffset.y
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ "VUID-VkImageBlit-dstOffset-00251"); // out-of-bounds dstOffset.z
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ "VUID-vkCmdBlitImage-pRegions-00216"); // region not contained within dst image
+ vkCmdBlitImage(m_commandBuffer->handle(), color_img.image(), color_img.Layout(), color_img.image(), color_img.Layout(), 1,
+ &blitRegion, VK_FILTER_NEAREST);
+ m_errorMonitor->VerifyFound();
+
+ // Blit with invalid src array layer
+ blitRegion.dstSubresource.mipLevel = 0;
+ blitRegion.srcSubresource.baseArrayLayer = ci.arrayLayers;
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ "VUID-vkCmdBlitImage-srcSubresource-01707"); // invalid srcSubresource layer range
+ vkCmdBlitImage(m_commandBuffer->handle(), color_img.image(), color_img.Layout(), color_img.image(), color_img.Layout(), 1,
+ &blitRegion, VK_FILTER_NEAREST);
+ m_errorMonitor->VerifyFound();
+
+ // Blit with invalid dst array layer
+ blitRegion.srcSubresource.baseArrayLayer = 0;
+ blitRegion.dstSubresource.baseArrayLayer = ci.arrayLayers;
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ "VUID-vkCmdBlitImage-dstSubresource-01708"); // invalid dstSubresource layer range
+ // Redundant unavoidable errors
+ vkCmdBlitImage(m_commandBuffer->handle(), color_img.image(), color_img.Layout(), color_img.image(), color_img.Layout(), 1,
+ &blitRegion, VK_FILTER_NEAREST);
+ m_errorMonitor->VerifyFound();
+
+ blitRegion.dstSubresource.baseArrayLayer = 0;
+
+ // Blit multi-sample image
+ // TODO: redundant VUs, one (1c8) or two (1d2 & 1d4) should be eliminated.
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdBlitImage-srcImage-00228");
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdBlitImage-srcImage-00233");
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdBlitImage-dstImage-00234");
+ vkCmdBlitImage(m_commandBuffer->handle(), ms_img.image(), ms_img.Layout(), ms_img.image(), ms_img.Layout(), 1, &blitRegion,
+ VK_FILTER_NEAREST);
+ m_errorMonitor->VerifyFound();
+
+ // Blit 3D with baseArrayLayer != 0 or layerCount != 1
+ blitRegion.srcSubresource.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
+ blitRegion.dstSubresource.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
+ blitRegion.srcSubresource.baseArrayLayer = 1;
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkImageBlit-srcImage-00240");
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ "VUID-vkCmdBlitImage-srcSubresource-01707"); // base+count > total layer count
+ vkCmdBlitImage(m_commandBuffer->handle(), color_3D_img.image(), color_3D_img.Layout(), color_3D_img.image(),
+ color_3D_img.Layout(), 1, &blitRegion, VK_FILTER_NEAREST);
+ m_errorMonitor->VerifyFound();
+ blitRegion.srcSubresource.baseArrayLayer = 0;
+ blitRegion.srcSubresource.layerCount = 0;
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkImageBlit-srcImage-00240");
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ "VUID-VkImageSubresourceLayers-layerCount-01700"); // layer count == 0 (src)
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ "VUID-VkImageBlit-layerCount-00239"); // src/dst layer count mismatch
+ vkCmdBlitImage(m_commandBuffer->handle(), color_3D_img.image(), color_3D_img.Layout(), color_3D_img.image(),
+ color_3D_img.Layout(), 1, &blitRegion, VK_FILTER_NEAREST);
+ m_errorMonitor->VerifyFound();
+
+ m_commandBuffer->end();
+}
+
+TEST_F(VkLayerTest, BlitToDepthImageTests) {
+ ASSERT_NO_FATAL_FAILURE(Init());
+
+ // Need feature ..BLIT_SRC_BIT but not ..BLIT_DST_BIT
+ // TODO: provide more choices here; supporting D32_SFLOAT as BLIT_DST isn't unheard of.
+ VkFormat f_depth = VK_FORMAT_D32_SFLOAT;
+
+ if (!ImageFormatAndFeaturesSupported(gpu(), f_depth, VK_IMAGE_TILING_OPTIMAL, VK_FORMAT_FEATURE_BLIT_SRC_BIT) ||
+ ImageFormatAndFeaturesSupported(gpu(), f_depth, VK_IMAGE_TILING_OPTIMAL, VK_FORMAT_FEATURE_BLIT_DST_BIT)) {
+ printf("%s Requested format features unavailable - BlitToDepthImageTests skipped.\n", kSkipPrefix);
+ return;
+ }
+
+ VkImageCreateInfo ci;
+ ci.sType = VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO;
+ ci.pNext = NULL;
+ ci.flags = 0;
+ ci.imageType = VK_IMAGE_TYPE_2D;
+ ci.format = f_depth;
+ ci.extent = {64, 64, 1};
+ ci.mipLevels = 1;
+ ci.arrayLayers = 1;
+ ci.samples = VK_SAMPLE_COUNT_1_BIT;
+ ci.tiling = VK_IMAGE_TILING_OPTIMAL;
+ ci.usage = VK_IMAGE_USAGE_TRANSFER_SRC_BIT | VK_IMAGE_USAGE_TRANSFER_DST_BIT;
+ ci.sharingMode = VK_SHARING_MODE_EXCLUSIVE;
+ ci.queueFamilyIndexCount = 0;
+ ci.pQueueFamilyIndices = NULL;
+ ci.initialLayout = VK_IMAGE_LAYOUT_UNDEFINED;
+
+ // 2D depth image
+ VkImageObj depth_img(m_device);
+ depth_img.init(&ci);
+ ASSERT_TRUE(depth_img.initialized());
+
+ VkImageBlit blitRegion = {};
+ blitRegion.srcSubresource.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
+ blitRegion.srcSubresource.baseArrayLayer = 0;
+ blitRegion.srcSubresource.layerCount = 1;
+ blitRegion.srcSubresource.mipLevel = 0;
+ blitRegion.dstSubresource.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
+ blitRegion.dstSubresource.baseArrayLayer = 0;
+ blitRegion.dstSubresource.layerCount = 1;
+ blitRegion.dstSubresource.mipLevel = 0;
+ blitRegion.srcOffsets[0] = {0, 0, 0};
+ blitRegion.srcOffsets[1] = {16, 16, 1};
+ blitRegion.dstOffsets[0] = {32, 32, 0};
+ blitRegion.dstOffsets[1] = {64, 64, 1};
+
+ m_commandBuffer->begin();
+
+ // Blit depth image - has SRC_BIT but not DST_BIT
+ blitRegion.srcSubresource.aspectMask = VK_IMAGE_ASPECT_DEPTH_BIT;
+ blitRegion.dstSubresource.aspectMask = VK_IMAGE_ASPECT_DEPTH_BIT;
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdBlitImage-dstImage-02000");
+ vkCmdBlitImage(m_commandBuffer->handle(), depth_img.image(), depth_img.Layout(), depth_img.image(), depth_img.Layout(), 1,
+ &blitRegion, VK_FILTER_NEAREST);
+ m_errorMonitor->VerifyFound();
+
+ m_commandBuffer->end();
+}
+
+TEST_F(VkLayerTest, MinImageTransferGranularity) {
+ TEST_DESCRIPTION("Tests for validation of Queue Family property minImageTransferGranularity.");
+ ASSERT_NO_FATAL_FAILURE(Init());
+
+ auto queue_family_properties = m_device->phy().queue_properties();
+ auto large_granularity_family =
+ std::find_if(queue_family_properties.begin(), queue_family_properties.end(), [](VkQueueFamilyProperties family_properties) {
+ VkExtent3D family_granularity = family_properties.minImageTransferGranularity;
+ // We need a queue family that supports copy operations and has a large enough minImageTransferGranularity for the tests
+ // below to make sense.
+ return (family_properties.queueFlags & VK_QUEUE_TRANSFER_BIT || family_properties.queueFlags & VK_QUEUE_GRAPHICS_BIT ||
+ family_properties.queueFlags & VK_QUEUE_COMPUTE_BIT) &&
+ family_granularity.depth >= 4 && family_granularity.width >= 4 && family_granularity.height >= 4;
+ });
+
+ if (large_granularity_family == queue_family_properties.end()) {
+ printf("%s No queue family has a large enough granularity for this test to be meaningful, skipping test\n", kSkipPrefix);
+ return;
+ }
+ const size_t queue_family_index = std::distance(queue_family_properties.begin(), large_granularity_family);
+ VkExtent3D granularity = queue_family_properties[queue_family_index].minImageTransferGranularity;
+ VkCommandPoolObj command_pool(m_device, queue_family_index, 0);
+
+ // Create two images of different types and try to copy between them
+ VkImage srcImage;
+ VkImage dstImage;
+
+ VkImageCreateInfo image_create_info = {};
+ image_create_info.sType = VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO;
+ image_create_info.pNext = NULL;
+ image_create_info.imageType = VK_IMAGE_TYPE_3D;
+ image_create_info.format = VK_FORMAT_B8G8R8A8_UNORM;
+ image_create_info.extent.width = granularity.width * 2;
+ image_create_info.extent.height = granularity.height * 2;
+ image_create_info.extent.depth = granularity.depth * 2;
+ image_create_info.mipLevels = 1;
+ image_create_info.arrayLayers = 1;
+ image_create_info.samples = VK_SAMPLE_COUNT_1_BIT;
+ image_create_info.tiling = VK_IMAGE_TILING_OPTIMAL;
+ image_create_info.usage = VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT | VK_IMAGE_USAGE_TRANSFER_SRC_BIT;
+ image_create_info.flags = 0;
+
+ VkImageObj src_image_obj(m_device);
+ src_image_obj.init(&image_create_info);
+ ASSERT_TRUE(src_image_obj.initialized());
+ srcImage = src_image_obj.handle();
+
+ image_create_info.usage = VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT | VK_IMAGE_USAGE_TRANSFER_DST_BIT;
+
+ VkImageObj dst_image_obj(m_device);
+ dst_image_obj.init(&image_create_info);
+ ASSERT_TRUE(dst_image_obj.initialized());
+ dstImage = dst_image_obj.handle();
+
+ VkCommandBufferObj command_buffer(m_device, &command_pool);
+ ASSERT_TRUE(command_buffer.initialized());
+ command_buffer.begin();
+
+ VkImageCopy copyRegion;
+ copyRegion.srcSubresource.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
+ copyRegion.srcSubresource.mipLevel = 0;
+ copyRegion.srcSubresource.baseArrayLayer = 0;
+ copyRegion.srcSubresource.layerCount = 1;
+ copyRegion.srcOffset.x = 0;
+ copyRegion.srcOffset.y = 0;
+ copyRegion.srcOffset.z = 0;
+ copyRegion.dstSubresource.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
+ copyRegion.dstSubresource.mipLevel = 0;
+ copyRegion.dstSubresource.baseArrayLayer = 0;
+ copyRegion.dstSubresource.layerCount = 1;
+ copyRegion.dstOffset.x = 0;
+ copyRegion.dstOffset.y = 0;
+ copyRegion.dstOffset.z = 0;
+ copyRegion.extent.width = granularity.width;
+ copyRegion.extent.height = granularity.height;
+ copyRegion.extent.depth = granularity.depth;
+
+ // Introduce failure by setting srcOffset to a bad granularity value
+ copyRegion.srcOffset.y = 3;
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ "VUID-vkCmdCopyImage-srcOffset-01783"); // srcOffset image transfer granularity
+ command_buffer.CopyImage(srcImage, VK_IMAGE_LAYOUT_GENERAL, dstImage, VK_IMAGE_LAYOUT_GENERAL, 1, &copyRegion);
+ m_errorMonitor->VerifyFound();
+
+ // Introduce failure by setting extent to a granularity value that is bad
+ // for both the source and destination image.
+ copyRegion.srcOffset.y = 0;
+ copyRegion.extent.width = 3;
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ "VUID-vkCmdCopyImage-srcOffset-01783"); // src extent image transfer granularity
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ "VUID-vkCmdCopyImage-dstOffset-01784"); // dst extent image transfer granularity
+ command_buffer.CopyImage(srcImage, VK_IMAGE_LAYOUT_GENERAL, dstImage, VK_IMAGE_LAYOUT_GENERAL, 1, &copyRegion);
+ m_errorMonitor->VerifyFound();
+
+ // Now do some buffer/image copies
+ VkBufferObj buffer;
+ VkMemoryPropertyFlags reqs = 0;
+ buffer.init_as_src_and_dst(*m_device, 8 * granularity.height * granularity.width * granularity.depth, reqs);
+ VkBufferImageCopy region = {};
+ region.bufferOffset = 0;
+ region.bufferRowLength = 0;
+ region.bufferImageHeight = 0;
+ region.imageSubresource.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
+ region.imageSubresource.layerCount = 1;
+ region.imageExtent.height = granularity.height;
+ region.imageExtent.width = granularity.width;
+ region.imageExtent.depth = granularity.depth;
+ region.imageOffset.x = 0;
+ region.imageOffset.y = 0;
+ region.imageOffset.z = 0;
+
+ // Introduce failure by setting imageExtent to a bad granularity value
+ region.imageExtent.width = 3;
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ "VUID-vkCmdCopyImageToBuffer-imageOffset-01794"); // image transfer granularity
+ vkCmdCopyImageToBuffer(command_buffer.handle(), srcImage, VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL, buffer.handle(), 1, &region);
+ m_errorMonitor->VerifyFound();
+ region.imageExtent.width = granularity.width;
+
+ // Introduce failure by setting imageOffset to a bad granularity value
+ region.imageOffset.z = 3;
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ "VUID-vkCmdCopyBufferToImage-imageOffset-01793"); // image transfer granularity
+ vkCmdCopyBufferToImage(command_buffer.handle(), buffer.handle(), dstImage, VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, 1, &region);
+ m_errorMonitor->VerifyFound();
+
+ command_buffer.end();
+}
+
+TEST_F(VkLayerTest, MismatchedQueueFamiliesOnSubmit) {
+ TEST_DESCRIPTION(
+ "Submit command buffer created using one queue family and attempt to submit them on a queue created in a different queue "
+ "family.");
+
+ ASSERT_NO_FATAL_FAILURE(Init()); // assumes it initializes all queue families on vkCreateDevice
+
+ // This test is meaningless unless we have multiple queue families
+ auto queue_family_properties = m_device->phy().queue_properties();
+ std::vector<uint32_t> queue_families;
+ for (uint32_t i = 0; i < queue_family_properties.size(); ++i)
+ if (queue_family_properties[i].queueCount > 0) queue_families.push_back(i);
+
+ if (queue_families.size() < 2) {
+ printf("%s Device only has one queue family; skipped.\n", kSkipPrefix);
+ return;
+ }
+
+ const uint32_t queue_family = queue_families[0];
+
+ const uint32_t other_queue_family = queue_families[1];
+ VkQueue other_queue;
+ vkGetDeviceQueue(m_device->device(), other_queue_family, 0, &other_queue);
+
+ VkCommandPoolObj cmd_pool(m_device, queue_family);
+ VkCommandBufferObj cmd_buff(m_device, &cmd_pool);
+
+ cmd_buff.begin();
+ cmd_buff.end();
+
+ // Submit on the wrong queue
+ VkSubmitInfo submit_info = {};
+ submit_info.sType = VK_STRUCTURE_TYPE_SUBMIT_INFO;
+ submit_info.commandBufferCount = 1;
+ submit_info.pCommandBuffers = &cmd_buff.handle();
+
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkQueueSubmit-pCommandBuffers-00074");
+ vkQueueSubmit(other_queue, 1, &submit_info, VK_NULL_HANDLE);
+ m_errorMonitor->VerifyFound();
+}
+
+TEST_F(VkLayerTest, DrawWithPipelineIncompatibleWithSubpass) {
+ TEST_DESCRIPTION("Use a pipeline for the wrong subpass in a render pass instance");
+
+ ASSERT_NO_FATAL_FAILURE(Init());
+
+ // A renderpass with two subpasses, both writing the same attachment.
+ VkAttachmentDescription attach[] = {
+ {0, VK_FORMAT_R8G8B8A8_UNORM, VK_SAMPLE_COUNT_1_BIT, VK_ATTACHMENT_LOAD_OP_DONT_CARE, VK_ATTACHMENT_STORE_OP_DONT_CARE,
+ VK_ATTACHMENT_LOAD_OP_DONT_CARE, VK_ATTACHMENT_STORE_OP_DONT_CARE, VK_IMAGE_LAYOUT_UNDEFINED,
+ VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL},
+ };
+ VkAttachmentReference ref = {0, VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL};
+ VkSubpassDescription subpasses[] = {
+ {0, VK_PIPELINE_BIND_POINT_GRAPHICS, 0, nullptr, 1, &ref, nullptr, nullptr, 0, nullptr},
+ {0, VK_PIPELINE_BIND_POINT_GRAPHICS, 0, nullptr, 1, &ref, nullptr, nullptr, 0, nullptr},
+ };
+ VkSubpassDependency dep = {0,
+ 1,
+ VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT,
+ VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT,
+ VK_ACCESS_COLOR_ATTACHMENT_WRITE_BIT,
+ VK_ACCESS_COLOR_ATTACHMENT_WRITE_BIT,
+ VK_DEPENDENCY_BY_REGION_BIT};
+ VkRenderPassCreateInfo rpci = {VK_STRUCTURE_TYPE_RENDER_PASS_CREATE_INFO, nullptr, 0, 1, attach, 2, subpasses, 1, &dep};
+ VkRenderPass rp;
+ VkResult err = vkCreateRenderPass(m_device->device(), &rpci, nullptr, &rp);
+ ASSERT_VK_SUCCESS(err);
+
+ VkImageObj image(m_device);
+ image.InitNoLayout(32, 32, 1, VK_FORMAT_R8G8B8A8_UNORM, VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT, VK_IMAGE_TILING_OPTIMAL, 0);
+ VkImageView imageView = image.targetView(VK_FORMAT_R8G8B8A8_UNORM);
+
+ VkFramebufferCreateInfo fbci = {VK_STRUCTURE_TYPE_FRAMEBUFFER_CREATE_INFO, nullptr, 0, rp, 1, &imageView, 32, 32, 1};
+ VkFramebuffer fb;
+ err = vkCreateFramebuffer(m_device->device(), &fbci, nullptr, &fb);
+ ASSERT_VK_SUCCESS(err);
+
+ char const *vsSource =
+ "#version 450\n"
+ "void main() { gl_Position = vec4(1); }\n";
+ char const *fsSource =
+ "#version 450\n"
+ "layout(location=0) out vec4 color;\n"
+ "void main() { color = vec4(1); }\n";
+
+ VkShaderObj vs(m_device, vsSource, VK_SHADER_STAGE_VERTEX_BIT, this);
+ VkShaderObj fs(m_device, fsSource, VK_SHADER_STAGE_FRAGMENT_BIT, this);
+ VkPipelineObj pipe(m_device);
+ pipe.AddDefaultColorAttachment();
+ pipe.AddShader(&vs);
+ pipe.AddShader(&fs);
+ VkViewport viewport = {0.0f, 0.0f, 64.0f, 64.0f, 0.0f, 1.0f};
+ m_viewports.push_back(viewport);
+ pipe.SetViewport(m_viewports);
+ VkRect2D rect = {};
+ m_scissors.push_back(rect);
+ pipe.SetScissor(m_scissors);
+
+ const VkPipelineLayoutObj pl(m_device);
+ pipe.CreateVKPipeline(pl.handle(), rp);
+
+ m_commandBuffer->begin();
+
+ VkRenderPassBeginInfo rpbi = {VK_STRUCTURE_TYPE_RENDER_PASS_BEGIN_INFO,
+ nullptr,
+ rp,
+ fb,
+ {{
+ 0,
+ 0,
+ },
+ {32, 32}},
+ 0,
+ nullptr};
+
+ // subtest 1: bind in the wrong subpass
+ vkCmdBeginRenderPass(m_commandBuffer->handle(), &rpbi, VK_SUBPASS_CONTENTS_INLINE);
+ vkCmdNextSubpass(m_commandBuffer->handle(), VK_SUBPASS_CONTENTS_INLINE);
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "built for subpass 0 but used in subpass 1");
+ vkCmdBindPipeline(m_commandBuffer->handle(), VK_PIPELINE_BIND_POINT_GRAPHICS, pipe.handle());
+ vkCmdDraw(m_commandBuffer->handle(), 3, 1, 0, 0);
+ m_errorMonitor->VerifyFound();
+
+ vkCmdEndRenderPass(m_commandBuffer->handle());
+
+ // subtest 2: bind in correct subpass, then transition to next subpass
+ vkCmdBeginRenderPass(m_commandBuffer->handle(), &rpbi, VK_SUBPASS_CONTENTS_INLINE);
+ vkCmdBindPipeline(m_commandBuffer->handle(), VK_PIPELINE_BIND_POINT_GRAPHICS, pipe.handle());
+ vkCmdNextSubpass(m_commandBuffer->handle(), VK_SUBPASS_CONTENTS_INLINE);
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "built for subpass 0 but used in subpass 1");
+ vkCmdDraw(m_commandBuffer->handle(), 3, 1, 0, 0);
+ m_errorMonitor->VerifyFound();
+
+ vkCmdEndRenderPass(m_commandBuffer->handle());
+
+ m_commandBuffer->end();
+
+ vkDestroyFramebuffer(m_device->device(), fb, nullptr);
+ vkDestroyRenderPass(m_device->device(), rp, nullptr);
+}
+
+TEST_F(VkLayerTest, ImageBarrierSubpassConflicts) {
+ TEST_DESCRIPTION("Add a pipeline barrier within a subpass that has conflicting state");
+ ASSERT_NO_FATAL_FAILURE(Init());
+
+ // A renderpass with a single subpass that declared a self-dependency
+ VkAttachmentDescription attach[] = {
+ {0, VK_FORMAT_R8G8B8A8_UNORM, VK_SAMPLE_COUNT_1_BIT, VK_ATTACHMENT_LOAD_OP_DONT_CARE, VK_ATTACHMENT_STORE_OP_DONT_CARE,
+ VK_ATTACHMENT_LOAD_OP_DONT_CARE, VK_ATTACHMENT_STORE_OP_DONT_CARE, VK_IMAGE_LAYOUT_UNDEFINED,
+ VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL},
+ };
+ VkAttachmentReference ref = {0, VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL};
+ VkSubpassDescription subpasses[] = {
+ {0, VK_PIPELINE_BIND_POINT_GRAPHICS, 0, nullptr, 1, &ref, nullptr, nullptr, 0, nullptr},
+ };
+ VkSubpassDependency dep = {0,
+ 0,
+ VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT,
+ VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT,
+ VK_ACCESS_COLOR_ATTACHMENT_WRITE_BIT,
+ VK_ACCESS_COLOR_ATTACHMENT_WRITE_BIT,
+ VK_DEPENDENCY_BY_REGION_BIT};
+ VkRenderPassCreateInfo rpci = {VK_STRUCTURE_TYPE_RENDER_PASS_CREATE_INFO, nullptr, 0, 1, attach, 1, subpasses, 1, &dep};
+ VkRenderPass rp;
+ VkRenderPass rp_noselfdep;
+
+ VkResult err = vkCreateRenderPass(m_device->device(), &rpci, nullptr, &rp);
+ ASSERT_VK_SUCCESS(err);
+ rpci.dependencyCount = 0;
+ rpci.pDependencies = nullptr;
+ err = vkCreateRenderPass(m_device->device(), &rpci, nullptr, &rp_noselfdep);
+ ASSERT_VK_SUCCESS(err);
+
+ VkImageObj image(m_device);
+ image.InitNoLayout(32, 32, 1, VK_FORMAT_R8G8B8A8_UNORM, VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT, VK_IMAGE_TILING_OPTIMAL, 0);
+ VkImageView imageView = image.targetView(VK_FORMAT_R8G8B8A8_UNORM);
+
+ VkFramebufferCreateInfo fbci = {VK_STRUCTURE_TYPE_FRAMEBUFFER_CREATE_INFO, nullptr, 0, rp, 1, &imageView, 32, 32, 1};
+ VkFramebuffer fb;
+ err = vkCreateFramebuffer(m_device->device(), &fbci, nullptr, &fb);
+ ASSERT_VK_SUCCESS(err);
+
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdPipelineBarrier-pDependencies-02285");
+ m_commandBuffer->begin();
+ VkRenderPassBeginInfo rpbi = {VK_STRUCTURE_TYPE_RENDER_PASS_BEGIN_INFO,
+ nullptr,
+ rp_noselfdep,
+ fb,
+ {{
+ 0,
+ 0,
+ },
+ {32, 32}},
+ 0,
+ nullptr};
+
+ vkCmdBeginRenderPass(m_commandBuffer->handle(), &rpbi, VK_SUBPASS_CONTENTS_INLINE);
+ VkMemoryBarrier mem_barrier = {};
+ mem_barrier.sType = VK_STRUCTURE_TYPE_MEMORY_BARRIER;
+ mem_barrier.pNext = NULL;
+ mem_barrier.srcAccessMask = VK_ACCESS_HOST_WRITE_BIT;
+ mem_barrier.dstAccessMask = VK_ACCESS_SHADER_READ_BIT;
+ vkCmdPipelineBarrier(m_commandBuffer->handle(), VK_PIPELINE_STAGE_HOST_BIT, VK_PIPELINE_STAGE_VERTEX_SHADER_BIT, 0, 1,
+ &mem_barrier, 0, nullptr, 0, nullptr);
+ m_errorMonitor->VerifyFound();
+ vkCmdEndRenderPass(m_commandBuffer->handle());
+
+ rpbi.renderPass = rp;
+ vkCmdBeginRenderPass(m_commandBuffer->handle(), &rpbi, VK_SUBPASS_CONTENTS_INLINE);
+ VkImageMemoryBarrier img_barrier = {};
+ img_barrier.sType = VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER;
+ img_barrier.srcAccessMask = VK_ACCESS_COLOR_ATTACHMENT_WRITE_BIT;
+ img_barrier.dstAccessMask = VK_ACCESS_COLOR_ATTACHMENT_WRITE_BIT;
+ img_barrier.oldLayout = VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL;
+ img_barrier.newLayout = VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL;
+ img_barrier.image = image.handle();
+ img_barrier.srcQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED;
+ img_barrier.dstQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED;
+ img_barrier.subresourceRange.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
+ img_barrier.subresourceRange.baseArrayLayer = 0;
+ img_barrier.subresourceRange.baseMipLevel = 0;
+ img_barrier.subresourceRange.layerCount = 1;
+ img_barrier.subresourceRange.levelCount = 1;
+ // Mis-match src stage mask
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdPipelineBarrier-pDependencies-02285");
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdPipelineBarrier-pDependencies-02285");
+ vkCmdPipelineBarrier(m_commandBuffer->handle(), VK_PIPELINE_STAGE_HOST_BIT, VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT,
+ VK_DEPENDENCY_BY_REGION_BIT, 0, nullptr, 0, nullptr, 1, &img_barrier);
+ m_errorMonitor->VerifyFound();
+ // Now mis-match dst stage mask
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdPipelineBarrier-pDependencies-02285");
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdPipelineBarrier-pDependencies-02285");
+ vkCmdPipelineBarrier(m_commandBuffer->handle(), VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT, VK_PIPELINE_STAGE_HOST_BIT,
+ VK_DEPENDENCY_BY_REGION_BIT, 0, nullptr, 0, nullptr, 1, &img_barrier);
+ m_errorMonitor->VerifyFound();
+ // Set srcQueueFamilyIndex to something other than IGNORED
+ img_barrier.srcQueueFamilyIndex = 0;
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdPipelineBarrier-srcQueueFamilyIndex-01182");
+ vkCmdPipelineBarrier(m_commandBuffer->handle(), VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT,
+ VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT, VK_DEPENDENCY_BY_REGION_BIT, 0, nullptr, 0, nullptr, 1,
+ &img_barrier);
+ m_errorMonitor->VerifyFound();
+ img_barrier.srcQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED;
+ // Mis-match mem barrier src access mask
+ mem_barrier = {};
+ mem_barrier.sType = VK_STRUCTURE_TYPE_MEMORY_BARRIER;
+ mem_barrier.srcAccessMask = VK_ACCESS_SHADER_READ_BIT;
+ mem_barrier.dstAccessMask = VK_ACCESS_COLOR_ATTACHMENT_WRITE_BIT;
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdPipelineBarrier-pDependencies-02285");
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdPipelineBarrier-pDependencies-02285");
+ vkCmdPipelineBarrier(m_commandBuffer->handle(), VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT,
+ VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT, VK_DEPENDENCY_BY_REGION_BIT, 1, &mem_barrier, 0, nullptr, 0,
+ nullptr);
+ m_errorMonitor->VerifyFound();
+ // Mis-match mem barrier dst access mask. Also set srcAccessMask to 0 which should not cause an error
+ mem_barrier.srcAccessMask = 0;
+ mem_barrier.dstAccessMask = VK_ACCESS_HOST_WRITE_BIT;
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdPipelineBarrier-pDependencies-02285");
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdPipelineBarrier-pDependencies-02285");
+ vkCmdPipelineBarrier(m_commandBuffer->handle(), VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT,
+ VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT, VK_DEPENDENCY_BY_REGION_BIT, 1, &mem_barrier, 0, nullptr, 0,
+ nullptr);
+ m_errorMonitor->VerifyFound();
+ // Mis-match image barrier src access mask
+ img_barrier.srcAccessMask = VK_ACCESS_SHADER_READ_BIT;
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdPipelineBarrier-pDependencies-02285");
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdPipelineBarrier-pDependencies-02285");
+ vkCmdPipelineBarrier(m_commandBuffer->handle(), VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT,
+ VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT, VK_DEPENDENCY_BY_REGION_BIT, 0, nullptr, 0, nullptr, 1,
+ &img_barrier);
+ m_errorMonitor->VerifyFound();
+ // Mis-match image barrier dst access mask
+ img_barrier.srcAccessMask = VK_ACCESS_COLOR_ATTACHMENT_WRITE_BIT;
+ img_barrier.dstAccessMask = VK_ACCESS_HOST_WRITE_BIT;
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdPipelineBarrier-pDependencies-02285");
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdPipelineBarrier-pDependencies-02285");
+ vkCmdPipelineBarrier(m_commandBuffer->handle(), VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT,
+ VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT, VK_DEPENDENCY_BY_REGION_BIT, 0, nullptr, 0, nullptr, 1,
+ &img_barrier);
+ m_errorMonitor->VerifyFound();
+ // Mis-match dependencyFlags
+ img_barrier.dstAccessMask = VK_ACCESS_COLOR_ATTACHMENT_WRITE_BIT;
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdPipelineBarrier-pDependencies-02285");
+ vkCmdPipelineBarrier(m_commandBuffer->handle(), VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT,
+ VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT, 0 /* wrong */, 0, nullptr, 0, nullptr, 1, &img_barrier);
+ m_errorMonitor->VerifyFound();
+ // Send non-zero bufferMemoryBarrierCount
+ // Construct a valid BufferMemoryBarrier to avoid any parameter errors
+ // First we need a valid buffer to reference
+ VkBufferObj buffer;
+ VkMemoryPropertyFlags mem_reqs = VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT;
+ buffer.init_as_src_and_dst(*m_device, 256, mem_reqs);
+ VkBufferMemoryBarrier bmb = {};
+ bmb.sType = VK_STRUCTURE_TYPE_BUFFER_MEMORY_BARRIER;
+ bmb.srcAccessMask = VK_ACCESS_HOST_WRITE_BIT;
+ bmb.dstAccessMask = VK_ACCESS_SHADER_READ_BIT;
+ bmb.srcQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED;
+ bmb.dstQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED;
+ bmb.buffer = buffer.handle();
+ bmb.offset = 0;
+ bmb.size = VK_WHOLE_SIZE;
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdPipelineBarrier-bufferMemoryBarrierCount-01178");
+ vkCmdPipelineBarrier(m_commandBuffer->handle(), VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT,
+ VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT, VK_DEPENDENCY_BY_REGION_BIT, 0, nullptr, 1, &bmb, 0,
+ nullptr);
+ m_errorMonitor->VerifyFound();
+ // Add image barrier w/ image handle that's not in framebuffer
+ VkImageObj lone_image(m_device);
+ lone_image.InitNoLayout(32, 32, 1, VK_FORMAT_R8G8B8A8_UNORM, VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT, VK_IMAGE_TILING_OPTIMAL, 0);
+ img_barrier.image = lone_image.handle();
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdPipelineBarrier-image-02635");
+ vkCmdPipelineBarrier(m_commandBuffer->handle(), VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT,
+ VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT, VK_DEPENDENCY_BY_REGION_BIT, 0, nullptr, 0, nullptr, 1,
+ &img_barrier);
+ m_errorMonitor->VerifyFound();
+ // Have image barrier with mis-matched layouts
+ img_barrier.image = image.handle();
+ img_barrier.oldLayout = VK_IMAGE_LAYOUT_UNDEFINED;
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdPipelineBarrier-oldLayout-01181");
+ vkCmdPipelineBarrier(m_commandBuffer->handle(), VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT,
+ VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT, VK_DEPENDENCY_BY_REGION_BIT, 0, nullptr, 0, nullptr, 1,
+ &img_barrier);
+ m_errorMonitor->VerifyFound();
+
+ img_barrier.oldLayout = VK_IMAGE_LAYOUT_GENERAL;
+ img_barrier.newLayout = VK_IMAGE_LAYOUT_GENERAL;
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdPipelineBarrier-oldLayout-02636");
+ vkCmdPipelineBarrier(m_commandBuffer->handle(), VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT,
+ VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT, VK_DEPENDENCY_BY_REGION_BIT, 0, nullptr, 0, nullptr, 1,
+ &img_barrier);
+ m_errorMonitor->VerifyFound();
+ vkCmdEndRenderPass(m_commandBuffer->handle());
+
+ vkDestroyFramebuffer(m_device->device(), fb, nullptr);
+ vkDestroyRenderPass(m_device->device(), rp, nullptr);
+ vkDestroyRenderPass(m_device->device(), rp_noselfdep, nullptr);
+}
+
+TEST_F(VkLayerTest, InvalidSecondaryCommandBufferBarrier) {
+ TEST_DESCRIPTION("Add an invalid image barrier in a secondary command buffer");
+ ASSERT_NO_FATAL_FAILURE(Init());
+
+ // A renderpass with a single subpass that declared a self-dependency
+ VkAttachmentDescription attach[] = {
+ {0, VK_FORMAT_R8G8B8A8_UNORM, VK_SAMPLE_COUNT_1_BIT, VK_ATTACHMENT_LOAD_OP_DONT_CARE, VK_ATTACHMENT_STORE_OP_DONT_CARE,
+ VK_ATTACHMENT_LOAD_OP_DONT_CARE, VK_ATTACHMENT_STORE_OP_DONT_CARE, VK_IMAGE_LAYOUT_UNDEFINED,
+ VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL},
+ };
+ VkAttachmentReference ref = {0, VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL};
+ VkSubpassDescription subpasses[] = {
+ {0, VK_PIPELINE_BIND_POINT_GRAPHICS, 0, nullptr, 1, &ref, nullptr, nullptr, 0, nullptr},
+ };
+ VkSubpassDependency dep = {0,
+ 0,
+ VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT,
+ VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT,
+ VK_ACCESS_SHADER_WRITE_BIT,
+ VK_ACCESS_SHADER_WRITE_BIT,
+ VK_DEPENDENCY_BY_REGION_BIT};
+ VkRenderPassCreateInfo rpci = {VK_STRUCTURE_TYPE_RENDER_PASS_CREATE_INFO, nullptr, 0, 1, attach, 1, subpasses, 1, &dep};
+ VkRenderPass rp;
+
+ VkResult err = vkCreateRenderPass(m_device->device(), &rpci, nullptr, &rp);
+ ASSERT_VK_SUCCESS(err);
+
+ VkImageObj image(m_device);
+ image.Init(32, 32, 1, VK_FORMAT_R8G8B8A8_UNORM, VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT, VK_IMAGE_TILING_OPTIMAL, 0);
+ VkImageView imageView = image.targetView(VK_FORMAT_R8G8B8A8_UNORM);
+ // Second image that img_barrier will incorrectly use
+ VkImageObj image2(m_device);
+ image2.Init(32, 32, 1, VK_FORMAT_R8G8B8A8_UNORM, VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT, VK_IMAGE_TILING_OPTIMAL, 0);
+
+ VkFramebufferCreateInfo fbci = {VK_STRUCTURE_TYPE_FRAMEBUFFER_CREATE_INFO, nullptr, 0, rp, 1, &imageView, 32, 32, 1};
+ VkFramebuffer fb;
+ err = vkCreateFramebuffer(m_device->device(), &fbci, nullptr, &fb);
+ ASSERT_VK_SUCCESS(err);
+
+ m_commandBuffer->begin();
+
+ VkRenderPassBeginInfo rpbi = {VK_STRUCTURE_TYPE_RENDER_PASS_BEGIN_INFO,
+ nullptr,
+ rp,
+ fb,
+ {{
+ 0,
+ 0,
+ },
+ {32, 32}},
+ 0,
+ nullptr};
+
+ vkCmdBeginRenderPass(m_commandBuffer->handle(), &rpbi, VK_SUBPASS_CONTENTS_SECONDARY_COMMAND_BUFFERS);
+
+ VkCommandPoolObj pool(m_device, m_device->graphics_queue_node_index_, VK_COMMAND_POOL_CREATE_RESET_COMMAND_BUFFER_BIT);
+ VkCommandBufferObj secondary(m_device, &pool, VK_COMMAND_BUFFER_LEVEL_SECONDARY);
+
+ VkCommandBufferInheritanceInfo cbii = {VK_STRUCTURE_TYPE_COMMAND_BUFFER_INHERITANCE_INFO,
+ nullptr,
+ rp,
+ 0,
+ VK_NULL_HANDLE, // Set to NULL FB handle intentionally to flesh out any errors
+ VK_FALSE,
+ 0,
+ 0};
+ VkCommandBufferBeginInfo cbbi = {VK_STRUCTURE_TYPE_COMMAND_BUFFER_BEGIN_INFO, nullptr,
+ VK_COMMAND_BUFFER_USAGE_ONE_TIME_SUBMIT_BIT | VK_COMMAND_BUFFER_USAGE_RENDER_PASS_CONTINUE_BIT,
+ &cbii};
+ vkBeginCommandBuffer(secondary.handle(), &cbbi);
+ VkImageMemoryBarrier img_barrier = {};
+ img_barrier.sType = VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER;
+ img_barrier.srcAccessMask = VK_ACCESS_SHADER_WRITE_BIT;
+ img_barrier.dstAccessMask = VK_ACCESS_SHADER_WRITE_BIT;
+ img_barrier.oldLayout = VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL;
+ img_barrier.newLayout = VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL;
+ img_barrier.image = image2.handle(); // Image mis-matches with FB image
+ img_barrier.srcQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED;
+ img_barrier.dstQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED;
+ img_barrier.subresourceRange.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
+ img_barrier.subresourceRange.baseArrayLayer = 0;
+ img_barrier.subresourceRange.baseMipLevel = 0;
+ img_barrier.subresourceRange.layerCount = 1;
+ img_barrier.subresourceRange.levelCount = 1;
+ vkCmdPipelineBarrier(secondary.handle(), VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT, VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT,
+ VK_DEPENDENCY_BY_REGION_BIT, 0, nullptr, 0, nullptr, 1, &img_barrier);
+ secondary.end();
+
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdPipelineBarrier-image-02635");
+ vkCmdExecuteCommands(m_commandBuffer->handle(), 1, &secondary.handle());
+ m_errorMonitor->VerifyFound();
+
+ vkDestroyFramebuffer(m_device->device(), fb, nullptr);
+ vkDestroyRenderPass(m_device->device(), rp, nullptr);
+}
+
+TEST_F(VkLayerTest, ImageBarrierSubpassConflict) {
+ TEST_DESCRIPTION("Check case where subpass index references different image from image barrier");
+ ASSERT_NO_FATAL_FAILURE(Init());
+
+ // Create RP/FB combo where subpass has incorrect index attachment, this is 2nd half of "VUID-vkCmdPipelineBarrier-image-02635"
+ VkAttachmentDescription attach[] = {
+ {0, VK_FORMAT_R8G8B8A8_UNORM, VK_SAMPLE_COUNT_1_BIT, VK_ATTACHMENT_LOAD_OP_DONT_CARE, VK_ATTACHMENT_STORE_OP_DONT_CARE,
+ VK_ATTACHMENT_LOAD_OP_DONT_CARE, VK_ATTACHMENT_STORE_OP_DONT_CARE, VK_IMAGE_LAYOUT_UNDEFINED,
+ VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL},
+ {0, VK_FORMAT_R8G8B8A8_UNORM, VK_SAMPLE_COUNT_1_BIT, VK_ATTACHMENT_LOAD_OP_DONT_CARE, VK_ATTACHMENT_STORE_OP_DONT_CARE,
+ VK_ATTACHMENT_LOAD_OP_DONT_CARE, VK_ATTACHMENT_STORE_OP_DONT_CARE, VK_IMAGE_LAYOUT_UNDEFINED,
+ VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL},
+ };
+ // ref attachment points to wrong attachment index compared to img_barrier below
+ VkAttachmentReference ref = {1, VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL};
+ VkSubpassDescription subpasses[] = {
+ {0, VK_PIPELINE_BIND_POINT_GRAPHICS, 0, nullptr, 1, &ref, nullptr, nullptr, 0, nullptr},
+ };
+ VkSubpassDependency dep = {0,
+ 0,
+ VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT,
+ VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT,
+ VK_ACCESS_COLOR_ATTACHMENT_WRITE_BIT,
+ VK_ACCESS_COLOR_ATTACHMENT_WRITE_BIT,
+ VK_DEPENDENCY_BY_REGION_BIT};
+
+ VkRenderPassCreateInfo rpci = {VK_STRUCTURE_TYPE_RENDER_PASS_CREATE_INFO, nullptr, 0, 2, attach, 1, subpasses, 1, &dep};
+ VkRenderPass rp;
+
+ VkResult err = vkCreateRenderPass(m_device->device(), &rpci, nullptr, &rp);
+ ASSERT_VK_SUCCESS(err);
+
+ VkImageObj image(m_device);
+ image.InitNoLayout(32, 32, 1, VK_FORMAT_R8G8B8A8_UNORM, VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT, VK_IMAGE_TILING_OPTIMAL, 0);
+ VkImageView imageView = image.targetView(VK_FORMAT_R8G8B8A8_UNORM);
+ VkImageObj image2(m_device);
+ image2.InitNoLayout(32, 32, 1, VK_FORMAT_R8G8B8A8_UNORM, VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT, VK_IMAGE_TILING_OPTIMAL, 0);
+ VkImageView imageView2 = image2.targetView(VK_FORMAT_R8G8B8A8_UNORM);
+ // re-use imageView from start of test
+ VkImageView iv_array[2] = {imageView, imageView2};
+
+ VkFramebufferCreateInfo fbci = {VK_STRUCTURE_TYPE_FRAMEBUFFER_CREATE_INFO, nullptr, 0, rp, 2, iv_array, 32, 32, 1};
+ VkFramebuffer fb;
+ err = vkCreateFramebuffer(m_device->device(), &fbci, nullptr, &fb);
+ ASSERT_VK_SUCCESS(err);
+
+ VkRenderPassBeginInfo rpbi = {VK_STRUCTURE_TYPE_RENDER_PASS_BEGIN_INFO,
+ nullptr,
+ rp,
+ fb,
+ {{
+ 0,
+ 0,
+ },
+ {32, 32}},
+ 0,
+ nullptr};
+
+ VkImageMemoryBarrier img_barrier = {};
+ img_barrier.sType = VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER;
+ img_barrier.srcAccessMask = VK_ACCESS_COLOR_ATTACHMENT_WRITE_BIT;
+ img_barrier.dstAccessMask = VK_ACCESS_COLOR_ATTACHMENT_WRITE_BIT;
+ img_barrier.oldLayout = VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL;
+ img_barrier.newLayout = VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL;
+ img_barrier.image = image.handle(); /* barrier references image from attachment index 0 */
+ img_barrier.srcQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED;
+ img_barrier.dstQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED;
+ img_barrier.subresourceRange.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
+ img_barrier.subresourceRange.baseArrayLayer = 0;
+ img_barrier.subresourceRange.baseMipLevel = 0;
+ img_barrier.subresourceRange.layerCount = 1;
+ img_barrier.subresourceRange.levelCount = 1;
+ m_commandBuffer->begin();
+ vkCmdBeginRenderPass(m_commandBuffer->handle(), &rpbi, VK_SUBPASS_CONTENTS_INLINE);
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdPipelineBarrier-image-02635");
+ vkCmdPipelineBarrier(m_commandBuffer->handle(), VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT,
+ VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT, VK_DEPENDENCY_BY_REGION_BIT, 0, nullptr, 0, nullptr, 1,
+ &img_barrier);
+ m_errorMonitor->VerifyFound();
+
+ vkDestroyFramebuffer(m_device->device(), fb, nullptr);
+ vkDestroyRenderPass(m_device->device(), rp, nullptr);
+}
+
+TEST_F(VkLayerTest, TemporaryExternalSemaphore) {
+#ifdef _WIN32
+ const auto extension_name = VK_KHR_EXTERNAL_SEMAPHORE_WIN32_EXTENSION_NAME;
+ const auto handle_type = VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_OPAQUE_WIN32_KMT_BIT_KHR;
+#else
+ const auto extension_name = VK_KHR_EXTERNAL_SEMAPHORE_FD_EXTENSION_NAME;
+ const auto handle_type = VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_OPAQUE_FD_BIT_KHR;
+#endif
+ // Check for external semaphore instance extensions
+ if (InstanceExtensionSupported(VK_KHR_EXTERNAL_SEMAPHORE_CAPABILITIES_EXTENSION_NAME)) {
+ m_instance_extension_names.push_back(VK_KHR_EXTERNAL_SEMAPHORE_CAPABILITIES_EXTENSION_NAME);
+ m_instance_extension_names.push_back(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME);
+ } else {
+ printf("%s External semaphore extension not supported, skipping test\n", kSkipPrefix);
+ return;
+ }
+ ASSERT_NO_FATAL_FAILURE(InitFramework(myDbgFunc, m_errorMonitor));
+
+ // Check for external semaphore device extensions
+ if (DeviceExtensionSupported(gpu(), nullptr, extension_name)) {
+ m_device_extension_names.push_back(extension_name);
+ m_device_extension_names.push_back(VK_KHR_EXTERNAL_SEMAPHORE_EXTENSION_NAME);
+ } else {
+ printf("%s External semaphore extension not supported, skipping test\n", kSkipPrefix);
+ return;
+ }
+ ASSERT_NO_FATAL_FAILURE(InitState());
+
+ // Check for external semaphore import and export capability
+ VkPhysicalDeviceExternalSemaphoreInfoKHR esi = {VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_SEMAPHORE_INFO_KHR, nullptr,
+ handle_type};
+ VkExternalSemaphorePropertiesKHR esp = {VK_STRUCTURE_TYPE_EXTERNAL_SEMAPHORE_PROPERTIES_KHR, nullptr};
+ auto vkGetPhysicalDeviceExternalSemaphorePropertiesKHR =
+ (PFN_vkGetPhysicalDeviceExternalSemaphorePropertiesKHR)vkGetInstanceProcAddr(
+ instance(), "vkGetPhysicalDeviceExternalSemaphorePropertiesKHR");
+ vkGetPhysicalDeviceExternalSemaphorePropertiesKHR(gpu(), &esi, &esp);
+
+ if (!(esp.externalSemaphoreFeatures & VK_EXTERNAL_SEMAPHORE_FEATURE_EXPORTABLE_BIT_KHR) ||
+ !(esp.externalSemaphoreFeatures & VK_EXTERNAL_SEMAPHORE_FEATURE_IMPORTABLE_BIT_KHR)) {
+ printf("%s External semaphore does not support importing and exporting, skipping test\n", kSkipPrefix);
+ return;
+ }
+
+ VkResult err;
+
+ // Create a semaphore to export payload from
+ VkExportSemaphoreCreateInfoKHR esci = {VK_STRUCTURE_TYPE_EXPORT_SEMAPHORE_CREATE_INFO_KHR, nullptr, handle_type};
+ VkSemaphoreCreateInfo sci = {VK_STRUCTURE_TYPE_SEMAPHORE_CREATE_INFO, &esci, 0};
+
+ VkSemaphore export_semaphore;
+ err = vkCreateSemaphore(m_device->device(), &sci, nullptr, &export_semaphore);
+ ASSERT_VK_SUCCESS(err);
+
+ // Create a semaphore to import payload into
+ sci.pNext = nullptr;
+ VkSemaphore import_semaphore;
+ err = vkCreateSemaphore(m_device->device(), &sci, nullptr, &import_semaphore);
+ ASSERT_VK_SUCCESS(err);
+
+#ifdef _WIN32
+ // Export semaphore payload to an opaque handle
+ HANDLE handle = nullptr;
+ VkSemaphoreGetWin32HandleInfoKHR ghi = {VK_STRUCTURE_TYPE_SEMAPHORE_GET_WIN32_HANDLE_INFO_KHR, nullptr, export_semaphore,
+ handle_type};
+ auto vkGetSemaphoreWin32HandleKHR =
+ (PFN_vkGetSemaphoreWin32HandleKHR)vkGetDeviceProcAddr(m_device->device(), "vkGetSemaphoreWin32HandleKHR");
+ err = vkGetSemaphoreWin32HandleKHR(m_device->device(), &ghi, &handle);
+ ASSERT_VK_SUCCESS(err);
+
+ // Import opaque handle exported above *temporarily*
+ VkImportSemaphoreWin32HandleInfoKHR ihi = {VK_STRUCTURE_TYPE_IMPORT_SEMAPHORE_WIN32_HANDLE_INFO_KHR,
+ nullptr,
+ import_semaphore,
+ VK_SEMAPHORE_IMPORT_TEMPORARY_BIT_KHR,
+ handle_type,
+ handle,
+ nullptr};
+ auto vkImportSemaphoreWin32HandleKHR =
+ (PFN_vkImportSemaphoreWin32HandleKHR)vkGetDeviceProcAddr(m_device->device(), "vkImportSemaphoreWin32HandleKHR");
+ err = vkImportSemaphoreWin32HandleKHR(m_device->device(), &ihi);
+ ASSERT_VK_SUCCESS(err);
+#else
+ // Export semaphore payload to an opaque handle
+ int fd = 0;
+ VkSemaphoreGetFdInfoKHR ghi = {VK_STRUCTURE_TYPE_SEMAPHORE_GET_FD_INFO_KHR, nullptr, export_semaphore, handle_type};
+ auto vkGetSemaphoreFdKHR = (PFN_vkGetSemaphoreFdKHR)vkGetDeviceProcAddr(m_device->device(), "vkGetSemaphoreFdKHR");
+ err = vkGetSemaphoreFdKHR(m_device->device(), &ghi, &fd);
+ ASSERT_VK_SUCCESS(err);
+
+ // Import opaque handle exported above *temporarily*
+ VkImportSemaphoreFdInfoKHR ihi = {VK_STRUCTURE_TYPE_IMPORT_SEMAPHORE_FD_INFO_KHR, nullptr, import_semaphore,
+ VK_SEMAPHORE_IMPORT_TEMPORARY_BIT_KHR, handle_type, fd};
+ auto vkImportSemaphoreFdKHR = (PFN_vkImportSemaphoreFdKHR)vkGetDeviceProcAddr(m_device->device(), "vkImportSemaphoreFdKHR");
+ err = vkImportSemaphoreFdKHR(m_device->device(), &ihi);
+ ASSERT_VK_SUCCESS(err);
+#endif
+
+ // Wait on the imported semaphore twice in vkQueueSubmit, the second wait should be an error
+ VkPipelineStageFlags flags = VK_PIPELINE_STAGE_BOTTOM_OF_PIPE_BIT;
+ VkSubmitInfo si[] = {
+ {VK_STRUCTURE_TYPE_SUBMIT_INFO, nullptr, 0, nullptr, &flags, 0, nullptr, 1, &export_semaphore},
+ {VK_STRUCTURE_TYPE_SUBMIT_INFO, nullptr, 1, &import_semaphore, &flags, 0, nullptr, 0, nullptr},
+ {VK_STRUCTURE_TYPE_SUBMIT_INFO, nullptr, 0, nullptr, &flags, 0, nullptr, 1, &export_semaphore},
+ {VK_STRUCTURE_TYPE_SUBMIT_INFO, nullptr, 1, &import_semaphore, &flags, 0, nullptr, 0, nullptr},
+ };
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "has no way to be signaled");
+ vkQueueSubmit(m_device->m_queue, 4, si, VK_NULL_HANDLE);
+ m_errorMonitor->VerifyFound();
+
+ // Wait on the imported semaphore twice in vkQueueBindSparse, the second wait should be an error
+ VkBindSparseInfo bi[] = {
+ {VK_STRUCTURE_TYPE_BIND_SPARSE_INFO, nullptr, 0, nullptr, 0, nullptr, 0, nullptr, 0, nullptr, 1, &export_semaphore},
+ {VK_STRUCTURE_TYPE_BIND_SPARSE_INFO, nullptr, 1, &import_semaphore, 0, nullptr, 0, nullptr, 0, nullptr, 0, nullptr},
+ {VK_STRUCTURE_TYPE_BIND_SPARSE_INFO, nullptr, 0, nullptr, 0, nullptr, 0, nullptr, 0, nullptr, 1, &export_semaphore},
+ {VK_STRUCTURE_TYPE_BIND_SPARSE_INFO, nullptr, 1, &import_semaphore, 0, nullptr, 0, nullptr, 0, nullptr, 0, nullptr},
+ };
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "has no way to be signaled");
+ vkQueueBindSparse(m_device->m_queue, 4, bi, VK_NULL_HANDLE);
+ m_errorMonitor->VerifyFound();
+
+ // Cleanup
+ err = vkQueueWaitIdle(m_device->m_queue);
+ ASSERT_VK_SUCCESS(err);
+ vkDestroySemaphore(m_device->device(), export_semaphore, nullptr);
+ vkDestroySemaphore(m_device->device(), import_semaphore, nullptr);
+}
+
+TEST_F(VkLayerTest, TemporaryExternalFence) {
+#ifdef _WIN32
+ const auto extension_name = VK_KHR_EXTERNAL_FENCE_WIN32_EXTENSION_NAME;
+ const auto handle_type = VK_EXTERNAL_FENCE_HANDLE_TYPE_OPAQUE_WIN32_BIT_KHR;
+#else
+ const auto extension_name = VK_KHR_EXTERNAL_FENCE_FD_EXTENSION_NAME;
+ const auto handle_type = VK_EXTERNAL_FENCE_HANDLE_TYPE_OPAQUE_FD_BIT_KHR;
+#endif
+ // Check for external fence instance extensions
+ if (InstanceExtensionSupported(VK_KHR_EXTERNAL_FENCE_CAPABILITIES_EXTENSION_NAME)) {
+ m_instance_extension_names.push_back(VK_KHR_EXTERNAL_FENCE_CAPABILITIES_EXTENSION_NAME);
+ m_instance_extension_names.push_back(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME);
+ } else {
+ printf("%s External fence extension not supported, skipping test\n", kSkipPrefix);
+ return;
+ }
+ ASSERT_NO_FATAL_FAILURE(InitFramework(myDbgFunc, m_errorMonitor));
+
+ // Check for external fence device extensions
+ if (DeviceExtensionSupported(gpu(), nullptr, extension_name)) {
+ m_device_extension_names.push_back(extension_name);
+ m_device_extension_names.push_back(VK_KHR_EXTERNAL_FENCE_EXTENSION_NAME);
+ } else {
+ printf("%s External fence extension not supported, skipping test\n", kSkipPrefix);
+ return;
+ }
+ ASSERT_NO_FATAL_FAILURE(InitState());
+
+ // Check for external fence import and export capability
+ VkPhysicalDeviceExternalFenceInfoKHR efi = {VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_FENCE_INFO_KHR, nullptr, handle_type};
+ VkExternalFencePropertiesKHR efp = {VK_STRUCTURE_TYPE_EXTERNAL_FENCE_PROPERTIES_KHR, nullptr};
+ auto vkGetPhysicalDeviceExternalFencePropertiesKHR = (PFN_vkGetPhysicalDeviceExternalFencePropertiesKHR)vkGetInstanceProcAddr(
+ instance(), "vkGetPhysicalDeviceExternalFencePropertiesKHR");
+ vkGetPhysicalDeviceExternalFencePropertiesKHR(gpu(), &efi, &efp);
+
+ if (!(efp.externalFenceFeatures & VK_EXTERNAL_FENCE_FEATURE_EXPORTABLE_BIT_KHR) ||
+ !(efp.externalFenceFeatures & VK_EXTERNAL_FENCE_FEATURE_IMPORTABLE_BIT_KHR)) {
+ printf("%s External fence does not support importing and exporting, skipping test\n", kSkipPrefix);
+ return;
+ }
+
+ VkResult err;
+
+ // Create a fence to export payload from
+ VkFence export_fence;
+ {
+ VkExportFenceCreateInfoKHR efci = {VK_STRUCTURE_TYPE_EXPORT_FENCE_CREATE_INFO_KHR, nullptr, handle_type};
+ VkFenceCreateInfo fci = {VK_STRUCTURE_TYPE_FENCE_CREATE_INFO, &efci, 0};
+ err = vkCreateFence(m_device->device(), &fci, nullptr, &export_fence);
+ ASSERT_VK_SUCCESS(err);
+ }
+
+ // Create a fence to import payload into
+ VkFence import_fence;
+ {
+ VkFenceCreateInfo fci = {VK_STRUCTURE_TYPE_FENCE_CREATE_INFO, nullptr, 0};
+ err = vkCreateFence(m_device->device(), &fci, nullptr, &import_fence);
+ ASSERT_VK_SUCCESS(err);
+ }
+
+#ifdef _WIN32
+ // Export fence payload to an opaque handle
+ HANDLE handle = nullptr;
+ {
+ VkFenceGetWin32HandleInfoKHR ghi = {VK_STRUCTURE_TYPE_FENCE_GET_WIN32_HANDLE_INFO_KHR, nullptr, export_fence, handle_type};
+ auto vkGetFenceWin32HandleKHR =
+ (PFN_vkGetFenceWin32HandleKHR)vkGetDeviceProcAddr(m_device->device(), "vkGetFenceWin32HandleKHR");
+ err = vkGetFenceWin32HandleKHR(m_device->device(), &ghi, &handle);
+ ASSERT_VK_SUCCESS(err);
+ }
+
+ // Import opaque handle exported above
+ {
+ VkImportFenceWin32HandleInfoKHR ifi = {VK_STRUCTURE_TYPE_IMPORT_FENCE_WIN32_HANDLE_INFO_KHR,
+ nullptr,
+ import_fence,
+ VK_FENCE_IMPORT_TEMPORARY_BIT_KHR,
+ handle_type,
+ handle,
+ nullptr};
+ auto vkImportFenceWin32HandleKHR =
+ (PFN_vkImportFenceWin32HandleKHR)vkGetDeviceProcAddr(m_device->device(), "vkImportFenceWin32HandleKHR");
+ err = vkImportFenceWin32HandleKHR(m_device->device(), &ifi);
+ ASSERT_VK_SUCCESS(err);
+ }
+#else
+ // Export fence payload to an opaque handle
+ int fd = 0;
+ {
+ VkFenceGetFdInfoKHR gfi = {VK_STRUCTURE_TYPE_FENCE_GET_FD_INFO_KHR, nullptr, export_fence, handle_type};
+ auto vkGetFenceFdKHR = (PFN_vkGetFenceFdKHR)vkGetDeviceProcAddr(m_device->device(), "vkGetFenceFdKHR");
+ err = vkGetFenceFdKHR(m_device->device(), &gfi, &fd);
+ ASSERT_VK_SUCCESS(err);
+ }
+
+ // Import opaque handle exported above
+ {
+ VkImportFenceFdInfoKHR ifi = {VK_STRUCTURE_TYPE_IMPORT_FENCE_FD_INFO_KHR, nullptr, import_fence,
+ VK_FENCE_IMPORT_TEMPORARY_BIT_KHR, handle_type, fd};
+ auto vkImportFenceFdKHR = (PFN_vkImportFenceFdKHR)vkGetDeviceProcAddr(m_device->device(), "vkImportFenceFdKHR");
+ err = vkImportFenceFdKHR(m_device->device(), &ifi);
+ ASSERT_VK_SUCCESS(err);
+ }
+#endif
+
+ // Undo the temporary import
+ vkResetFences(m_device->device(), 1, &import_fence);
+
+ // Signal the previously imported fence twice, the second signal should produce a validation error
+ vkQueueSubmit(m_device->m_queue, 0, nullptr, import_fence);
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "is already in use by another submission.");
+ vkQueueSubmit(m_device->m_queue, 0, nullptr, import_fence);
+ m_errorMonitor->VerifyFound();
+
+ // Cleanup
+ err = vkQueueWaitIdle(m_device->m_queue);
+ ASSERT_VK_SUCCESS(err);
+ vkDestroyFence(m_device->device(), export_fence, nullptr);
+ vkDestroyFence(m_device->device(), import_fence, nullptr);
+}
+
+TEST_F(VkPositiveLayerTest, SecondaryCommandBufferBarrier) {
+ TEST_DESCRIPTION("Add a pipeline barrier in a secondary command buffer");
+ ASSERT_NO_FATAL_FAILURE(Init());
+
+ // A renderpass with a single subpass that declared a self-dependency
+ VkAttachmentDescription attach[] = {
+ {0, VK_FORMAT_R8G8B8A8_UNORM, VK_SAMPLE_COUNT_1_BIT, VK_ATTACHMENT_LOAD_OP_DONT_CARE, VK_ATTACHMENT_STORE_OP_DONT_CARE,
+ VK_ATTACHMENT_LOAD_OP_DONT_CARE, VK_ATTACHMENT_STORE_OP_DONT_CARE, VK_IMAGE_LAYOUT_UNDEFINED,
+ VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL},
+ };
+ VkAttachmentReference ref = {0, VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL};
+ VkSubpassDescription subpasses[] = {
+ {0, VK_PIPELINE_BIND_POINT_GRAPHICS, 0, nullptr, 1, &ref, nullptr, nullptr, 0, nullptr},
+ };
+ VkSubpassDependency dep = {0,
+ 0,
+ VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT,
+ VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT,
+ VK_ACCESS_SHADER_WRITE_BIT,
+ VK_ACCESS_SHADER_WRITE_BIT,
+ VK_DEPENDENCY_BY_REGION_BIT};
+ VkRenderPassCreateInfo rpci = {VK_STRUCTURE_TYPE_RENDER_PASS_CREATE_INFO, nullptr, 0, 1, attach, 1, subpasses, 1, &dep};
+ VkRenderPass rp;
+
+ VkResult err = vkCreateRenderPass(m_device->device(), &rpci, nullptr, &rp);
+ ASSERT_VK_SUCCESS(err);
+
+ VkImageObj image(m_device);
+ image.Init(32, 32, 1, VK_FORMAT_R8G8B8A8_UNORM, VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT, VK_IMAGE_TILING_OPTIMAL, 0);
+ VkImageView imageView = image.targetView(VK_FORMAT_R8G8B8A8_UNORM);
+
+ VkFramebufferCreateInfo fbci = {VK_STRUCTURE_TYPE_FRAMEBUFFER_CREATE_INFO, nullptr, 0, rp, 1, &imageView, 32, 32, 1};
+ VkFramebuffer fb;
+ err = vkCreateFramebuffer(m_device->device(), &fbci, nullptr, &fb);
+ ASSERT_VK_SUCCESS(err);
+
+ m_commandBuffer->begin();
+
+ VkRenderPassBeginInfo rpbi = {VK_STRUCTURE_TYPE_RENDER_PASS_BEGIN_INFO,
+ nullptr,
+ rp,
+ fb,
+ {{
+ 0,
+ 0,
+ },
+ {32, 32}},
+ 0,
+ nullptr};
+
+ vkCmdBeginRenderPass(m_commandBuffer->handle(), &rpbi, VK_SUBPASS_CONTENTS_SECONDARY_COMMAND_BUFFERS);
+
+ VkCommandPoolObj pool(m_device, m_device->graphics_queue_node_index_, VK_COMMAND_POOL_CREATE_RESET_COMMAND_BUFFER_BIT);
+ VkCommandBufferObj secondary(m_device, &pool, VK_COMMAND_BUFFER_LEVEL_SECONDARY);
+
+ VkCommandBufferInheritanceInfo cbii = {VK_STRUCTURE_TYPE_COMMAND_BUFFER_INHERITANCE_INFO,
+ nullptr,
+ rp,
+ 0,
+ VK_NULL_HANDLE, // Set to NULL FB handle intentionally to flesh out any errors
+ VK_FALSE,
+ 0,
+ 0};
+ VkCommandBufferBeginInfo cbbi = {VK_STRUCTURE_TYPE_COMMAND_BUFFER_BEGIN_INFO, nullptr,
+ VK_COMMAND_BUFFER_USAGE_ONE_TIME_SUBMIT_BIT | VK_COMMAND_BUFFER_USAGE_RENDER_PASS_CONTINUE_BIT,
+ &cbii};
+ vkBeginCommandBuffer(secondary.handle(), &cbbi);
+ VkMemoryBarrier mem_barrier = {};
+ mem_barrier.sType = VK_STRUCTURE_TYPE_MEMORY_BARRIER;
+ mem_barrier.pNext = NULL;
+ mem_barrier.srcAccessMask = VK_ACCESS_SHADER_WRITE_BIT;
+ mem_barrier.dstAccessMask = VK_ACCESS_SHADER_WRITE_BIT;
+ vkCmdPipelineBarrier(secondary.handle(), VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT, VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT,
+ VK_DEPENDENCY_BY_REGION_BIT, 1, &mem_barrier, 0, nullptr, 0, nullptr);
+ VkImageMemoryBarrier img_barrier = {};
+ img_barrier.sType = VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER;
+ img_barrier.srcAccessMask = VK_ACCESS_SHADER_WRITE_BIT;
+ img_barrier.dstAccessMask = VK_ACCESS_SHADER_WRITE_BIT;
+ img_barrier.oldLayout = VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL;
+ img_barrier.newLayout = VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL;
+ img_barrier.image = image.handle();
+ img_barrier.srcQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED;
+ img_barrier.dstQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED;
+ img_barrier.subresourceRange.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
+ img_barrier.subresourceRange.baseArrayLayer = 0;
+ img_barrier.subresourceRange.baseMipLevel = 0;
+ img_barrier.subresourceRange.layerCount = 1;
+ img_barrier.subresourceRange.levelCount = 1;
+ vkCmdPipelineBarrier(secondary.handle(), VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT, VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT,
+ VK_DEPENDENCY_BY_REGION_BIT, 0, nullptr, 0, nullptr, 1, &img_barrier);
+ secondary.end();
+
+ vkCmdExecuteCommands(m_commandBuffer->handle(), 1, &secondary.handle());
+ vkCmdEndRenderPass(m_commandBuffer->handle());
+ m_commandBuffer->end();
+
+ VkSubmitInfo submit_info = {};
+ submit_info.sType = VK_STRUCTURE_TYPE_SUBMIT_INFO;
+ submit_info.commandBufferCount = 1;
+ submit_info.pCommandBuffers = &m_commandBuffer->handle();
+ vkQueueSubmit(m_device->m_queue, 1, &submit_info, VK_NULL_HANDLE);
+ vkQueueWaitIdle(m_device->m_queue);
+
+ vkDestroyFramebuffer(m_device->device(), fb, nullptr);
+ vkDestroyRenderPass(m_device->device(), rp, nullptr);
}
-void TestRenderPassCreate(ErrorMonitor *error_monitor, const VkDevice device, const VkRenderPassCreateInfo *create_info,
- bool rp2_supported, const char *rp1_vuid, const char *rp2_vuid) {
+static void TestRenderPassCreate(ErrorMonitor *error_monitor, const VkDevice device, const VkRenderPassCreateInfo *create_info,
+ bool rp2Supported, const char *rp1_vuid, const char *rp2_vuid) {
VkRenderPass render_pass = VK_NULL_HANDLE;
VkResult err;
@@ -189,7 +5704,7 @@ void TestRenderPassCreate(ErrorMonitor *error_monitor, const VkDevice device, co
error_monitor->VerifyFound();
}
- if (rp2_supported && rp2_vuid) {
+ if (rp2Supported && rp2_vuid) {
PFN_vkCreateRenderPass2KHR vkCreateRenderPass2KHR =
(PFN_vkCreateRenderPass2KHR)vkGetDeviceProcAddr(device, "vkCreateRenderPass2KHR");
safe_VkRenderPassCreateInfo2KHR create_info2;
@@ -202,44 +5717,941 @@ void TestRenderPassCreate(ErrorMonitor *error_monitor, const VkDevice device, co
}
}
-void PositiveTestRenderPassCreate(ErrorMonitor *error_monitor, const VkDevice device, const VkRenderPassCreateInfo *create_info,
- bool rp2_supported) {
- VkRenderPass render_pass = VK_NULL_HANDLE;
- VkResult err;
+TEST_F(VkLayerTest, RenderPassCreateAttachmentIndexOutOfRange) {
+ // Check for VK_KHR_get_physical_device_properties2
+ if (InstanceExtensionSupported(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME)) {
+ m_instance_extension_names.push_back(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME);
+ }
+
+ ASSERT_NO_FATAL_FAILURE(InitFramework(myDbgFunc, m_errorMonitor));
+ bool rp2Supported = CheckCreateRenderPass2Support(this, m_device_extension_names);
+ ASSERT_NO_FATAL_FAILURE(InitState());
+
+ // There are no attachments, but refer to attachment 0.
+ VkAttachmentReference ref = {0, VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL};
+ VkSubpassDescription subpasses[] = {
+ {0, VK_PIPELINE_BIND_POINT_GRAPHICS, 0, nullptr, 1, &ref, nullptr, nullptr, 0, nullptr},
+ };
+
+ VkRenderPassCreateInfo rpci = {VK_STRUCTURE_TYPE_RENDER_PASS_CREATE_INFO, nullptr, 0, 0, nullptr, 1, subpasses, 0, nullptr};
+
+ // "... must be less than the total number of attachments ..."
+ TestRenderPassCreate(m_errorMonitor, m_device->device(), &rpci, rp2Supported, "VUID-VkRenderPassCreateInfo-attachment-00834",
+ "VUID-VkRenderPassCreateInfo2KHR-attachment-03051");
+}
+
+TEST_F(VkLayerTest, RenderPassCreateAttachmentReadOnlyButCleared) {
+ // Check for VK_KHR_get_physical_device_properties2
+ if (InstanceExtensionSupported(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME)) {
+ m_instance_extension_names.push_back(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME);
+ }
+
+ ASSERT_NO_FATAL_FAILURE(InitFramework(myDbgFunc, m_errorMonitor));
+
+ bool rp2Supported = CheckCreateRenderPass2Support(this, m_device_extension_names);
+ bool maintenance2Supported = rp2Supported;
+
+ // Check for VK_KHR_maintenance2
+ if (!rp2Supported && DeviceExtensionSupported(gpu(), nullptr, VK_KHR_MAINTENANCE2_EXTENSION_NAME)) {
+ m_device_extension_names.push_back(VK_KHR_MAINTENANCE2_EXTENSION_NAME);
+ maintenance2Supported = true;
+ }
+
+ ASSERT_NO_FATAL_FAILURE(InitState());
+
+ if (m_device->props.apiVersion < VK_API_VERSION_1_1) {
+ maintenance2Supported = true;
+ }
+
+ VkAttachmentDescription description = {0,
+ VK_FORMAT_D32_SFLOAT_S8_UINT,
+ VK_SAMPLE_COUNT_1_BIT,
+ VK_ATTACHMENT_LOAD_OP_CLEAR,
+ VK_ATTACHMENT_STORE_OP_DONT_CARE,
+ VK_ATTACHMENT_LOAD_OP_CLEAR,
+ VK_ATTACHMENT_STORE_OP_DONT_CARE,
+ VK_IMAGE_LAYOUT_GENERAL,
+ VK_IMAGE_LAYOUT_GENERAL};
+
+ VkAttachmentReference depth_stencil_ref = {0, VK_IMAGE_LAYOUT_DEPTH_STENCIL_READ_ONLY_OPTIMAL};
+
+ VkSubpassDescription subpass = {0, VK_PIPELINE_BIND_POINT_GRAPHICS, 0, nullptr, 0, nullptr, nullptr, &depth_stencil_ref, 0,
+ nullptr};
+
+ VkRenderPassCreateInfo rpci = {VK_STRUCTURE_TYPE_RENDER_PASS_CREATE_INFO, nullptr, 0, 1, &description, 1, &subpass, 0, nullptr};
+
+ // VK_IMAGE_LAYOUT_DEPTH_STENCIL_READ_ONLY_OPTIMAL but depth cleared
+ TestRenderPassCreate(m_errorMonitor, m_device->device(), &rpci, rp2Supported, "VUID-VkRenderPassCreateInfo-pAttachments-00836",
+ "VUID-VkRenderPassCreateInfo2KHR-pAttachments-02522");
+
+ if (maintenance2Supported) {
+ // VK_IMAGE_LAYOUT_DEPTH_READ_ONLY_STENCIL_ATTACHMENT_OPTIMAL but depth cleared
+ depth_stencil_ref.layout = VK_IMAGE_LAYOUT_DEPTH_READ_ONLY_STENCIL_ATTACHMENT_OPTIMAL;
+
+ TestRenderPassCreate(m_errorMonitor, m_device->device(), &rpci, rp2Supported,
+ "VUID-VkRenderPassCreateInfo-pAttachments-01566", nullptr);
+
+ // VK_IMAGE_LAYOUT_DEPTH_ATTACHMENT_STENCIL_READ_ONLY_OPTIMAL but depth cleared
+ depth_stencil_ref.layout = VK_IMAGE_LAYOUT_DEPTH_ATTACHMENT_STENCIL_READ_ONLY_OPTIMAL;
+
+ TestRenderPassCreate(m_errorMonitor, m_device->device(), &rpci, rp2Supported,
+ "VUID-VkRenderPassCreateInfo-pAttachments-01567", nullptr);
+ }
+}
+
+TEST_F(VkLayerTest, RenderPassCreateAttachmentMismatchingLayoutsColor) {
+ TEST_DESCRIPTION("Attachment is used simultaneously as two color attachments with different layouts.");
+
+ // Check for VK_KHR_get_physical_device_properties2
+ if (InstanceExtensionSupported(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME)) {
+ m_instance_extension_names.push_back(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME);
+ }
+
+ ASSERT_NO_FATAL_FAILURE(InitFramework(myDbgFunc, m_errorMonitor));
+ bool rp2Supported = CheckCreateRenderPass2Support(this, m_device_extension_names);
+ ASSERT_NO_FATAL_FAILURE(InitState());
- error_monitor->ExpectSuccess();
- err = vkCreateRenderPass(device, create_info, nullptr, &render_pass);
- if (err == VK_SUCCESS) vkDestroyRenderPass(device, render_pass, nullptr);
- error_monitor->VerifyNotFound();
+ VkAttachmentDescription attach[] = {
+ {0, VK_FORMAT_R8G8B8A8_UNORM, VK_SAMPLE_COUNT_1_BIT, VK_ATTACHMENT_LOAD_OP_DONT_CARE, VK_ATTACHMENT_STORE_OP_DONT_CARE,
+ VK_ATTACHMENT_LOAD_OP_DONT_CARE, VK_ATTACHMENT_STORE_OP_DONT_CARE, VK_IMAGE_LAYOUT_UNDEFINED,
+ VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL},
+ };
+ VkAttachmentReference refs[] = {
+ {0, VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL},
+ {0, VK_IMAGE_LAYOUT_GENERAL},
+ };
+ VkSubpassDescription subpasses[] = {
+ {0, VK_PIPELINE_BIND_POINT_GRAPHICS, 0, nullptr, 2, refs, nullptr, nullptr, 0, nullptr},
+ };
+
+ VkRenderPassCreateInfo rpci = {VK_STRUCTURE_TYPE_RENDER_PASS_CREATE_INFO, nullptr, 0, 1, attach, 1, subpasses, 0, nullptr};
+
+ TestRenderPassCreate(m_errorMonitor, m_device->device(), &rpci, rp2Supported,
+ "subpass 0 already uses attachment 0 with a different image layout",
+ "subpass 0 already uses attachment 0 with a different image layout");
+}
+
+TEST_F(VkLayerTest, RenderPassCreateAttachmentDescriptionInvalidFinalLayout) {
+ TEST_DESCRIPTION("VkAttachmentDescription's finalLayout must not be UNDEFINED or PREINITIALIZED");
+
+ // Check for VK_KHR_get_physical_device_properties2
+ if (InstanceExtensionSupported(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME)) {
+ m_instance_extension_names.push_back(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME);
+ }
+
+ ASSERT_NO_FATAL_FAILURE(InitFramework(myDbgFunc, m_errorMonitor));
+ bool rp2Supported = CheckCreateRenderPass2Support(this, m_device_extension_names);
+ ASSERT_NO_FATAL_FAILURE(InitState());
+
+ VkAttachmentDescription attach_desc = {};
+ attach_desc.format = VK_FORMAT_R8G8B8A8_UNORM;
+ attach_desc.samples = VK_SAMPLE_COUNT_1_BIT;
+ attach_desc.loadOp = VK_ATTACHMENT_LOAD_OP_CLEAR;
+ attach_desc.storeOp = VK_ATTACHMENT_STORE_OP_STORE;
+ attach_desc.stencilLoadOp = VK_ATTACHMENT_LOAD_OP_DONT_CARE;
+ attach_desc.stencilStoreOp = VK_ATTACHMENT_STORE_OP_DONT_CARE;
+ attach_desc.initialLayout = VK_IMAGE_LAYOUT_UNDEFINED;
+ attach_desc.finalLayout = VK_IMAGE_LAYOUT_UNDEFINED;
+ VkAttachmentReference attach_ref = {};
+ attach_ref.attachment = 0;
+ attach_ref.layout = VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL;
+ VkSubpassDescription subpass = {};
+ subpass.pipelineBindPoint = VK_PIPELINE_BIND_POINT_GRAPHICS;
+ subpass.colorAttachmentCount = 1;
+ subpass.pColorAttachments = &attach_ref;
+ VkRenderPassCreateInfo rpci = {};
+ rpci.sType = VK_STRUCTURE_TYPE_RENDER_PASS_CREATE_INFO;
+ rpci.attachmentCount = 1;
+ rpci.pAttachments = &attach_desc;
+ rpci.subpassCount = 1;
+ rpci.pSubpasses = &subpass;
+
+ TestRenderPassCreate(m_errorMonitor, m_device->device(), &rpci, rp2Supported, "VUID-VkAttachmentDescription-finalLayout-00843",
+ "VUID-VkAttachmentDescription2KHR-finalLayout-03061");
+
+ attach_desc.finalLayout = VK_IMAGE_LAYOUT_PREINITIALIZED;
+ TestRenderPassCreate(m_errorMonitor, m_device->device(), &rpci, rp2Supported, "VUID-VkAttachmentDescription-finalLayout-00843",
+ "VUID-VkAttachmentDescription2KHR-finalLayout-03061");
+}
+
+TEST_F(VkLayerTest, RenderPassCreateAttachmentsMisc) {
+ TEST_DESCRIPTION(
+ "Ensure that CreateRenderPass produces the expected validation errors when a subpass's attachments violate the valid usage "
+ "conditions.");
+
+ // Check for VK_KHR_get_physical_device_properties2
+ if (InstanceExtensionSupported(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME)) {
+ m_instance_extension_names.push_back(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME);
+ }
+
+ ASSERT_NO_FATAL_FAILURE(InitFramework(myDbgFunc, m_errorMonitor));
+ bool rp2Supported = CheckCreateRenderPass2Support(this, m_device_extension_names);
+ ASSERT_NO_FATAL_FAILURE(InitState());
+
+ std::vector<VkAttachmentDescription> attachments = {
+ // input attachments
+ {0, VK_FORMAT_R8G8B8A8_UNORM, VK_SAMPLE_COUNT_4_BIT, VK_ATTACHMENT_LOAD_OP_DONT_CARE, VK_ATTACHMENT_STORE_OP_DONT_CARE,
+ VK_ATTACHMENT_LOAD_OP_DONT_CARE, VK_ATTACHMENT_STORE_OP_DONT_CARE, VK_IMAGE_LAYOUT_GENERAL, VK_IMAGE_LAYOUT_GENERAL},
+ // color attachments
+ {0, VK_FORMAT_R8G8B8A8_UNORM, VK_SAMPLE_COUNT_4_BIT, VK_ATTACHMENT_LOAD_OP_DONT_CARE, VK_ATTACHMENT_STORE_OP_DONT_CARE,
+ VK_ATTACHMENT_LOAD_OP_DONT_CARE, VK_ATTACHMENT_STORE_OP_DONT_CARE, VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL,
+ VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL},
+ {0, VK_FORMAT_R8G8B8A8_UNORM, VK_SAMPLE_COUNT_4_BIT, VK_ATTACHMENT_LOAD_OP_DONT_CARE, VK_ATTACHMENT_STORE_OP_DONT_CARE,
+ VK_ATTACHMENT_LOAD_OP_DONT_CARE, VK_ATTACHMENT_STORE_OP_DONT_CARE, VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL,
+ VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL},
+ // depth attachment
+ {0, VK_FORMAT_D24_UNORM_S8_UINT, VK_SAMPLE_COUNT_4_BIT, VK_ATTACHMENT_LOAD_OP_DONT_CARE, VK_ATTACHMENT_STORE_OP_DONT_CARE,
+ VK_ATTACHMENT_LOAD_OP_DONT_CARE, VK_ATTACHMENT_STORE_OP_DONT_CARE, VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL,
+ VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL},
+ // resolve attachment
+ {0, VK_FORMAT_R8G8B8A8_UNORM, VK_SAMPLE_COUNT_1_BIT, VK_ATTACHMENT_LOAD_OP_DONT_CARE, VK_ATTACHMENT_STORE_OP_DONT_CARE,
+ VK_ATTACHMENT_LOAD_OP_DONT_CARE, VK_ATTACHMENT_STORE_OP_DONT_CARE, VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL,
+ VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL},
+ // preserve attachments
+ {0, VK_FORMAT_R8G8B8A8_UNORM, VK_SAMPLE_COUNT_4_BIT, VK_ATTACHMENT_LOAD_OP_DONT_CARE, VK_ATTACHMENT_STORE_OP_DONT_CARE,
+ VK_ATTACHMENT_LOAD_OP_DONT_CARE, VK_ATTACHMENT_STORE_OP_DONT_CARE, VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL,
+ VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL},
+ };
+
+ std::vector<VkAttachmentReference> input = {
+ {0, VK_IMAGE_LAYOUT_GENERAL},
+ };
+ std::vector<VkAttachmentReference> color = {
+ {1, VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL},
+ {2, VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL},
+ };
+ VkAttachmentReference depth = {3, VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL};
+ std::vector<VkAttachmentReference> resolve = {
+ {4, VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL},
+ {VK_ATTACHMENT_UNUSED, VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL},
+ };
+ std::vector<uint32_t> preserve = {5};
+
+ VkSubpassDescription subpass = {0,
+ VK_PIPELINE_BIND_POINT_GRAPHICS,
+ (uint32_t)input.size(),
+ input.data(),
+ (uint32_t)color.size(),
+ color.data(),
+ resolve.data(),
+ &depth,
+ (uint32_t)preserve.size(),
+ preserve.data()};
+
+ VkRenderPassCreateInfo rpci = {VK_STRUCTURE_TYPE_RENDER_PASS_CREATE_INFO,
+ nullptr,
+ 0,
+ (uint32_t)attachments.size(),
+ attachments.data(),
+ 1,
+ &subpass,
+ 0,
+ nullptr};
+
+ // Test too many color attachments
+ {
+ std::vector<VkAttachmentReference> too_many_colors(m_device->props.limits.maxColorAttachments + 1, color[0]);
+ subpass.colorAttachmentCount = (uint32_t)too_many_colors.size();
+ subpass.pColorAttachments = too_many_colors.data();
+ subpass.pResolveAttachments = NULL;
+
+ TestRenderPassCreate(m_errorMonitor, m_device->device(), &rpci, rp2Supported,
+ "VUID-VkSubpassDescription-colorAttachmentCount-00845",
+ "VUID-VkSubpassDescription2KHR-colorAttachmentCount-03063");
+
+ subpass.colorAttachmentCount = (uint32_t)color.size();
+ subpass.pColorAttachments = color.data();
+ subpass.pResolveAttachments = resolve.data();
+ }
+
+ // Test sample count mismatch between color buffers
+ attachments[subpass.pColorAttachments[1].attachment].samples = VK_SAMPLE_COUNT_8_BIT;
+ depth.attachment = VK_ATTACHMENT_UNUSED; // Avoids triggering 01418
+
+ TestRenderPassCreate(m_errorMonitor, m_device->device(), &rpci, rp2Supported,
+ "VUID-VkSubpassDescription-pColorAttachments-01417",
+ "VUID-VkSubpassDescription2KHR-pColorAttachments-03069");
+
+ depth.attachment = 3;
+ attachments[subpass.pColorAttachments[1].attachment].samples = attachments[subpass.pColorAttachments[0].attachment].samples;
+
+ // Test sample count mismatch between color buffers and depth buffer
+ attachments[subpass.pDepthStencilAttachment->attachment].samples = VK_SAMPLE_COUNT_8_BIT;
+ subpass.colorAttachmentCount = 1;
+
+ TestRenderPassCreate(m_errorMonitor, m_device->device(), &rpci, rp2Supported,
+ "VUID-VkSubpassDescription-pDepthStencilAttachment-01418",
+ "VUID-VkSubpassDescription2KHR-pDepthStencilAttachment-03071");
+
+ attachments[subpass.pDepthStencilAttachment->attachment].samples = attachments[subpass.pColorAttachments[0].attachment].samples;
+ subpass.colorAttachmentCount = (uint32_t)color.size();
+
+ // Test resolve attachment with UNUSED color attachment
+ color[0].attachment = VK_ATTACHMENT_UNUSED;
+
+ TestRenderPassCreate(m_errorMonitor, m_device->device(), &rpci, rp2Supported,
+ "VUID-VkSubpassDescription-pResolveAttachments-00847",
+ "VUID-VkSubpassDescription2KHR-pResolveAttachments-03065");
- if (rp2_supported) {
+ color[0].attachment = 1;
+
+ // Test resolve from a single-sampled color attachment
+ attachments[subpass.pColorAttachments[0].attachment].samples = VK_SAMPLE_COUNT_1_BIT;
+ subpass.colorAttachmentCount = 1; // avoid mismatch (00337), and avoid double report
+ subpass.pDepthStencilAttachment = nullptr; // avoid mismatch (01418)
+
+ TestRenderPassCreate(m_errorMonitor, m_device->device(), &rpci, rp2Supported,
+ "VUID-VkSubpassDescription-pResolveAttachments-00848",
+ "VUID-VkSubpassDescription2KHR-pResolveAttachments-03066");
+
+ attachments[subpass.pColorAttachments[0].attachment].samples = VK_SAMPLE_COUNT_4_BIT;
+ subpass.colorAttachmentCount = (uint32_t)color.size();
+ subpass.pDepthStencilAttachment = &depth;
+
+ // Test resolve to a multi-sampled resolve attachment
+ attachments[subpass.pResolveAttachments[0].attachment].samples = VK_SAMPLE_COUNT_4_BIT;
+
+ TestRenderPassCreate(m_errorMonitor, m_device->device(), &rpci, rp2Supported,
+ "VUID-VkSubpassDescription-pResolveAttachments-00849",
+ "VUID-VkSubpassDescription2KHR-pResolveAttachments-03067");
+
+ attachments[subpass.pResolveAttachments[0].attachment].samples = VK_SAMPLE_COUNT_1_BIT;
+
+ // Test with color/resolve format mismatch
+ attachments[subpass.pColorAttachments[0].attachment].format = VK_FORMAT_R8G8B8A8_SRGB;
+
+ TestRenderPassCreate(m_errorMonitor, m_device->device(), &rpci, rp2Supported,
+ "VUID-VkSubpassDescription-pResolveAttachments-00850",
+ "VUID-VkSubpassDescription2KHR-pResolveAttachments-03068");
+
+ attachments[subpass.pColorAttachments[0].attachment].format = attachments[subpass.pResolveAttachments[0].attachment].format;
+
+ // Test for UNUSED preserve attachments
+ preserve[0] = VK_ATTACHMENT_UNUSED;
+
+ TestRenderPassCreate(m_errorMonitor, m_device->device(), &rpci, rp2Supported, "VUID-VkSubpassDescription-attachment-00853",
+ "VUID-VkSubpassDescription2KHR-attachment-03073");
+
+ preserve[0] = 5;
+ // Test for preserve attachments used elsewhere in the subpass
+ color[0].attachment = preserve[0];
+
+ TestRenderPassCreate(m_errorMonitor, m_device->device(), &rpci, rp2Supported,
+ "VUID-VkSubpassDescription-pPreserveAttachments-00854",
+ "VUID-VkSubpassDescription2KHR-pPreserveAttachments-03074");
+
+ color[0].attachment = 1;
+ input[0].attachment = 0;
+ input[0].layout = VK_IMAGE_LAYOUT_GENERAL;
+
+ // Test for attachment used first as input with loadOp=CLEAR
+ {
+ std::vector<VkSubpassDescription> subpasses = {subpass, subpass, subpass};
+ subpasses[0].inputAttachmentCount = 0;
+ subpasses[1].inputAttachmentCount = 0;
+ attachments[input[0].attachment].loadOp = VK_ATTACHMENT_LOAD_OP_CLEAR;
+ VkRenderPassCreateInfo rpci_multipass = {VK_STRUCTURE_TYPE_RENDER_PASS_CREATE_INFO,
+ nullptr,
+ 0,
+ (uint32_t)attachments.size(),
+ attachments.data(),
+ (uint32_t)subpasses.size(),
+ subpasses.data(),
+ 0,
+ nullptr};
+
+ TestRenderPassCreate(m_errorMonitor, m_device->device(), &rpci_multipass, rp2Supported,
+ "VUID-VkSubpassDescription-loadOp-00846", "VUID-VkSubpassDescription2KHR-loadOp-03064");
+
+ attachments[input[0].attachment].loadOp = VK_ATTACHMENT_LOAD_OP_DONT_CARE;
+ }
+}
+
+TEST_F(VkLayerTest, RenderPassCreateAttachmentReferenceInvalidLayout) {
+ TEST_DESCRIPTION("Attachment reference uses PREINITIALIZED or UNDEFINED layouts");
+
+ // Check for VK_KHR_get_physical_device_properties2
+ if (InstanceExtensionSupported(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME)) {
+ m_instance_extension_names.push_back(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME);
+ }
+
+ ASSERT_NO_FATAL_FAILURE(InitFramework(myDbgFunc, m_errorMonitor));
+ bool rp2Supported = CheckCreateRenderPass2Support(this, m_device_extension_names);
+ ASSERT_NO_FATAL_FAILURE(InitState());
+
+ VkAttachmentDescription attach[] = {
+ {0, VK_FORMAT_R8G8B8A8_UNORM, VK_SAMPLE_COUNT_1_BIT, VK_ATTACHMENT_LOAD_OP_DONT_CARE, VK_ATTACHMENT_STORE_OP_DONT_CARE,
+ VK_ATTACHMENT_LOAD_OP_DONT_CARE, VK_ATTACHMENT_STORE_OP_DONT_CARE, VK_IMAGE_LAYOUT_UNDEFINED,
+ VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL},
+ };
+ VkAttachmentReference refs[] = {
+ {0, VK_IMAGE_LAYOUT_UNDEFINED},
+ };
+ VkSubpassDescription subpasses[] = {
+ {0, VK_PIPELINE_BIND_POINT_GRAPHICS, 0, nullptr, 1, refs, nullptr, nullptr, 0, nullptr},
+ };
+
+ VkRenderPassCreateInfo rpci = {VK_STRUCTURE_TYPE_RENDER_PASS_CREATE_INFO, nullptr, 0, 1, attach, 1, subpasses, 0, nullptr};
+
+ // Use UNDEFINED layout
+ TestRenderPassCreate(m_errorMonitor, m_device->device(), &rpci, rp2Supported, "VUID-VkAttachmentReference-layout-00857",
+ "VUID-VkAttachmentReference2KHR-layout-03077");
+
+ // Use PREINITIALIZED layout
+ refs[0].layout = VK_IMAGE_LAYOUT_PREINITIALIZED;
+ TestRenderPassCreate(m_errorMonitor, m_device->device(), &rpci, rp2Supported, "VUID-VkAttachmentReference-layout-00857",
+ "VUID-VkAttachmentReference2KHR-layout-03077");
+}
+
+TEST_F(VkLayerTest, RenderPassCreateOverlappingCorrelationMasks) {
+ TEST_DESCRIPTION("Create a subpass with overlapping correlation masks");
+
+ // Check for VK_KHR_get_physical_device_properties2
+ if (InstanceExtensionSupported(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME)) {
+ m_instance_extension_names.push_back(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME);
+ }
+
+ ASSERT_NO_FATAL_FAILURE(InitFramework(myDbgFunc, m_errorMonitor));
+ bool rp2Supported = CheckCreateRenderPass2Support(this, m_device_extension_names);
+
+ if (!rp2Supported) {
+ if (DeviceExtensionSupported(gpu(), nullptr, VK_KHR_MULTIVIEW_EXTENSION_NAME)) {
+ m_device_extension_names.push_back(VK_KHR_MULTIVIEW_EXTENSION_NAME);
+ } else {
+ printf("%s Extension %s is not supported.\n", kSkipPrefix, VK_KHR_MULTIVIEW_EXTENSION_NAME);
+ return;
+ }
+ }
+
+ ASSERT_NO_FATAL_FAILURE(InitState());
+
+ VkSubpassDescription subpass = {0, VK_PIPELINE_BIND_POINT_GRAPHICS, 0, nullptr, 0, nullptr, nullptr, nullptr, 0, nullptr};
+ uint32_t viewMasks[] = {0x3u};
+ uint32_t correlationMasks[] = {0x1u, 0x3u};
+ VkRenderPassMultiviewCreateInfo rpmvci = {
+ VK_STRUCTURE_TYPE_RENDER_PASS_MULTIVIEW_CREATE_INFO, nullptr, 1, viewMasks, 0, nullptr, 2, correlationMasks};
+
+ VkRenderPassCreateInfo rpci = {VK_STRUCTURE_TYPE_RENDER_PASS_CREATE_INFO, &rpmvci, 0, 0, nullptr, 1, &subpass, 0, nullptr};
+
+ // Correlation masks must not overlap
+ TestRenderPassCreate(m_errorMonitor, m_device->device(), &rpci, rp2Supported,
+ "VUID-VkRenderPassMultiviewCreateInfo-pCorrelationMasks-00841",
+ "VUID-VkRenderPassCreateInfo2KHR-pCorrelatedViewMasks-03056");
+
+ // Check for more specific "don't set any correlation masks when multiview is not enabled"
+ if (rp2Supported) {
PFN_vkCreateRenderPass2KHR vkCreateRenderPass2KHR =
- (PFN_vkCreateRenderPass2KHR)vkGetDeviceProcAddr(device, "vkCreateRenderPass2KHR");
- safe_VkRenderPassCreateInfo2KHR create_info2;
- ConvertVkRenderPassCreateInfoToV2KHR(create_info, &create_info2);
+ (PFN_vkCreateRenderPass2KHR)vkGetDeviceProcAddr(m_device->device(), "vkCreateRenderPass2KHR");
+
+ viewMasks[0] = 0;
+ correlationMasks[0] = 0;
+ correlationMasks[1] = 0;
+ safe_VkRenderPassCreateInfo2KHR safe_rpci2;
+ ConvertVkRenderPassCreateInfoToV2KHR(&rpci, &safe_rpci2);
+
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkRenderPassCreateInfo2KHR-viewMask-03057");
+ VkRenderPass rp;
+ VkResult err = vkCreateRenderPass2KHR(m_device->device(), safe_rpci2.ptr(), nullptr, &rp);
+ if (err == VK_SUCCESS) vkDestroyRenderPass(m_device->device(), rp, nullptr);
+ m_errorMonitor->VerifyFound();
+ }
+}
- error_monitor->ExpectSuccess();
- err = vkCreateRenderPass2KHR(device, create_info2.ptr(), nullptr, &render_pass);
- if (err == VK_SUCCESS) vkDestroyRenderPass(device, render_pass, nullptr);
- error_monitor->VerifyNotFound();
+TEST_F(VkLayerTest, RenderPassCreateInvalidViewMasks) {
+ TEST_DESCRIPTION("Create a subpass with the wrong number of view masks, or inconsistent setting of view masks");
+
+ // Check for VK_KHR_get_physical_device_properties2
+ if (InstanceExtensionSupported(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME)) {
+ m_instance_extension_names.push_back(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME);
+ }
+
+ ASSERT_NO_FATAL_FAILURE(InitFramework(myDbgFunc, m_errorMonitor));
+ bool rp2Supported = CheckCreateRenderPass2Support(this, m_device_extension_names);
+
+ if (!rp2Supported) {
+ if (DeviceExtensionSupported(gpu(), nullptr, VK_KHR_MULTIVIEW_EXTENSION_NAME)) {
+ m_device_extension_names.push_back(VK_KHR_MULTIVIEW_EXTENSION_NAME);
+ } else {
+ printf("%s Extension %s is not supported.\n", kSkipPrefix, VK_KHR_MULTIVIEW_EXTENSION_NAME);
+ return;
+ }
+ }
+
+ ASSERT_NO_FATAL_FAILURE(InitState());
+
+ VkSubpassDescription subpasses[] = {
+ {0, VK_PIPELINE_BIND_POINT_GRAPHICS, 0, nullptr, 0, nullptr, nullptr, nullptr, 0, nullptr},
+ {0, VK_PIPELINE_BIND_POINT_GRAPHICS, 0, nullptr, 0, nullptr, nullptr, nullptr, 0, nullptr},
+ };
+ uint32_t viewMasks[] = {0x3u, 0u};
+ VkRenderPassMultiviewCreateInfo rpmvci = {
+ VK_STRUCTURE_TYPE_RENDER_PASS_MULTIVIEW_CREATE_INFO, nullptr, 1, viewMasks, 0, nullptr, 0, nullptr};
+
+ VkRenderPassCreateInfo rpci = {VK_STRUCTURE_TYPE_RENDER_PASS_CREATE_INFO, &rpmvci, 0, 0, nullptr, 2, subpasses, 0, nullptr};
+
+ // Not enough view masks
+ TestRenderPassCreate(m_errorMonitor, m_device->device(), &rpci, rp2Supported, "VUID-VkRenderPassCreateInfo-pNext-01928",
+ "VUID-VkRenderPassCreateInfo2KHR-viewMask-03058");
+}
+
+TEST_F(VkLayerTest, RenderPassCreateInvalidInputAttachmentReferences) {
+ TEST_DESCRIPTION("Create a subpass with the meta data aspect mask set for an input attachment");
+
+ ASSERT_NO_FATAL_FAILURE(InitFramework(myDbgFunc, m_errorMonitor));
+
+ if (DeviceExtensionSupported(gpu(), nullptr, VK_KHR_MAINTENANCE2_EXTENSION_NAME)) {
+ m_device_extension_names.push_back(VK_KHR_MAINTENANCE2_EXTENSION_NAME);
+ } else {
+ printf("%s Extension %s is not supported.\n", kSkipPrefix, VK_KHR_MAINTENANCE2_EXTENSION_NAME);
+ return;
}
+
+ ASSERT_NO_FATAL_FAILURE(InitState());
+
+ VkAttachmentDescription attach = {0,
+ VK_FORMAT_R8G8B8A8_UNORM,
+ VK_SAMPLE_COUNT_1_BIT,
+ VK_ATTACHMENT_LOAD_OP_DONT_CARE,
+ VK_ATTACHMENT_STORE_OP_DONT_CARE,
+ VK_ATTACHMENT_LOAD_OP_DONT_CARE,
+ VK_ATTACHMENT_STORE_OP_DONT_CARE,
+ VK_IMAGE_LAYOUT_UNDEFINED,
+ VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL};
+ VkAttachmentReference ref = {0, VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL};
+
+ VkSubpassDescription subpass = {0, VK_PIPELINE_BIND_POINT_GRAPHICS, 1, &ref, 0, nullptr, nullptr, nullptr, 0, nullptr};
+ VkInputAttachmentAspectReference iaar = {0, 0, VK_IMAGE_ASPECT_METADATA_BIT};
+ VkRenderPassInputAttachmentAspectCreateInfo rpiaaci = {VK_STRUCTURE_TYPE_RENDER_PASS_INPUT_ATTACHMENT_ASPECT_CREATE_INFO,
+ nullptr, 1, &iaar};
+
+ VkRenderPassCreateInfo rpci = {VK_STRUCTURE_TYPE_RENDER_PASS_CREATE_INFO, &rpiaaci, 0, 1, &attach, 1, &subpass, 0, nullptr};
+
+ // Invalid meta data aspect
+ m_errorMonitor->SetDesiredFailureMsg(
+ VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkRenderPassCreateInfo-pNext-01963"); // Cannot/should not avoid getting this one too
+ TestRenderPassCreate(m_errorMonitor, m_device->device(), &rpci, false, "VUID-VkInputAttachmentAspectReference-aspectMask-01964",
+ nullptr);
+
+ // Aspect not present
+ iaar.aspectMask = VK_IMAGE_ASPECT_DEPTH_BIT;
+ TestRenderPassCreate(m_errorMonitor, m_device->device(), &rpci, false, "VUID-VkRenderPassCreateInfo-pNext-01963", nullptr);
+
+ // Invalid subpass index
+ iaar.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
+ iaar.subpass = 1;
+ TestRenderPassCreate(m_errorMonitor, m_device->device(), &rpci, false, "VUID-VkRenderPassCreateInfo-pNext-01926", nullptr);
+ iaar.subpass = 0;
+
+ // Invalid input attachment index
+ iaar.inputAttachmentIndex = 1;
+ TestRenderPassCreate(m_errorMonitor, m_device->device(), &rpci, false, "VUID-VkRenderPassCreateInfo-pNext-01927", nullptr);
}
-void TestRenderPass2KHRCreate(ErrorMonitor *error_monitor, const VkDevice device, const VkRenderPassCreateInfo2KHR *create_info,
- const char *rp2_vuid) {
- VkRenderPass render_pass = VK_NULL_HANDLE;
- VkResult err;
+TEST_F(VkLayerTest, RenderPassCreateSubpassNonGraphicsPipeline) {
+ TEST_DESCRIPTION("Create a subpass with the compute pipeline bind point");
+ // Check for VK_KHR_get_physical_device_properties2
+ if (InstanceExtensionSupported(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME)) {
+ m_instance_extension_names.push_back(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME);
+ }
+
+ ASSERT_NO_FATAL_FAILURE(InitFramework(myDbgFunc, m_errorMonitor));
+ bool rp2Supported = CheckCreateRenderPass2Support(this, m_device_extension_names);
+ ASSERT_NO_FATAL_FAILURE(InitState());
+
+ VkSubpassDescription subpasses[] = {
+ {0, VK_PIPELINE_BIND_POINT_COMPUTE, 0, nullptr, 0, nullptr, nullptr, nullptr, 0, nullptr},
+ };
+
+ VkRenderPassCreateInfo rpci = {VK_STRUCTURE_TYPE_RENDER_PASS_CREATE_INFO, nullptr, 0, 0, nullptr, 1, subpasses, 0, nullptr};
+
+ TestRenderPassCreate(m_errorMonitor, m_device->device(), &rpci, rp2Supported,
+ "VUID-VkSubpassDescription-pipelineBindPoint-00844",
+ "VUID-VkSubpassDescription2KHR-pipelineBindPoint-03062");
+}
+
+TEST_F(VkLayerTest, RenderPassCreateSubpassMissingAttributesBitMultiviewNVX) {
+ TEST_DESCRIPTION("Create a subpass with the VK_SUBPASS_DESCRIPTION_PER_VIEW_ATTRIBUTES_BIT_NVX flag missing");
+
+ // Check for VK_KHR_get_physical_device_properties2
+ if (InstanceExtensionSupported(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME)) {
+ m_instance_extension_names.push_back(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME);
+ } else {
+ printf("%s Extension %s is not supported.\n", kSkipPrefix, VK_NVX_MULTIVIEW_PER_VIEW_ATTRIBUTES_EXTENSION_NAME);
+ return;
+ }
+
+ ASSERT_NO_FATAL_FAILURE(InitFramework(myDbgFunc, m_errorMonitor));
+
+ if (DeviceExtensionSupported(gpu(), nullptr, VK_NVX_MULTIVIEW_PER_VIEW_ATTRIBUTES_EXTENSION_NAME) &&
+ DeviceExtensionSupported(gpu(), nullptr, VK_KHR_MULTIVIEW_EXTENSION_NAME)) {
+ m_device_extension_names.push_back(VK_NVX_MULTIVIEW_PER_VIEW_ATTRIBUTES_EXTENSION_NAME);
+ m_device_extension_names.push_back(VK_KHR_MULTIVIEW_EXTENSION_NAME);
+ } else {
+ printf("%s Extension %s is not supported.\n", kSkipPrefix, VK_NVX_MULTIVIEW_PER_VIEW_ATTRIBUTES_EXTENSION_NAME);
+ return;
+ }
+
+ bool rp2Supported = CheckCreateRenderPass2Support(this, m_device_extension_names);
+ ASSERT_NO_FATAL_FAILURE(InitState());
+
+ VkSubpassDescription subpasses[] = {
+ {VK_SUBPASS_DESCRIPTION_PER_VIEW_POSITION_X_ONLY_BIT_NVX, VK_PIPELINE_BIND_POINT_GRAPHICS, 0, nullptr, 0, nullptr, nullptr,
+ nullptr, 0, nullptr},
+ };
+
+ VkRenderPassCreateInfo rpci = {VK_STRUCTURE_TYPE_RENDER_PASS_CREATE_INFO, nullptr, 0, 0, nullptr, 1, subpasses, 0, nullptr};
+
+ TestRenderPassCreate(m_errorMonitor, m_device->device(), &rpci, rp2Supported, "VUID-VkSubpassDescription-flags-00856",
+ "VUID-VkSubpassDescription2KHR-flags-03076");
+}
+
+TEST_F(VkLayerTest, RenderPassCreate2SubpassInvalidInputAttachmentParameters) {
+ TEST_DESCRIPTION("Create a subpass with parameters in the input attachment ref which are invalid");
+
+ // Check for VK_KHR_get_physical_device_properties2
+ if (InstanceExtensionSupported(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME)) {
+ m_instance_extension_names.push_back(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME);
+ } else {
+ printf("%s Extension %s is not supported.\n", kSkipPrefix, VK_KHR_CREATE_RENDERPASS_2_EXTENSION_NAME);
+ return;
+ }
+
+ ASSERT_NO_FATAL_FAILURE(InitFramework(myDbgFunc, m_errorMonitor));
+
+ bool rp2Supported = CheckCreateRenderPass2Support(this, m_device_extension_names);
+
+ if (!rp2Supported) {
+ printf("%s Extension %s is not supported.\n", kSkipPrefix, VK_KHR_CREATE_RENDERPASS_2_EXTENSION_NAME);
+ return;
+ }
+
+ ASSERT_NO_FATAL_FAILURE(InitState());
+
PFN_vkCreateRenderPass2KHR vkCreateRenderPass2KHR =
- (PFN_vkCreateRenderPass2KHR)vkGetDeviceProcAddr(device, "vkCreateRenderPass2KHR");
+ rp2Supported ? (PFN_vkCreateRenderPass2KHR)vkGetDeviceProcAddr(m_device->device(), "vkCreateRenderPass2KHR") : nullptr;
+
+ VkResult err;
+
+ VkAttachmentReference2KHR reference = {VK_STRUCTURE_TYPE_ATTACHMENT_REFERENCE_2_KHR, nullptr, VK_ATTACHMENT_UNUSED,
+ VK_IMAGE_LAYOUT_UNDEFINED, 0};
+ VkSubpassDescription2KHR subpass = {VK_STRUCTURE_TYPE_SUBPASS_DESCRIPTION_2_KHR,
+ nullptr,
+ 0,
+ VK_PIPELINE_BIND_POINT_GRAPHICS,
+ 0,
+ 1,
+ &reference,
+ 0,
+ nullptr,
+ nullptr,
+ nullptr,
+ 0,
+ nullptr};
+
+ VkRenderPassCreateInfo2KHR rpci2 = {
+ VK_STRUCTURE_TYPE_RENDER_PASS_CREATE_INFO_2_KHR, nullptr, 0, 0, nullptr, 1, &subpass, 0, nullptr, 0, nullptr};
+ VkRenderPass rp;
+
+ // Test for aspect mask of 0
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkSubpassDescription2KHR-aspectMask-03176");
+ err = vkCreateRenderPass2KHR(m_device->device(), &rpci2, nullptr, &rp);
+ if (err == VK_SUCCESS) vkDestroyRenderPass(m_device->device(), rp, nullptr);
+ m_errorMonitor->VerifyFound();
+
+ // Test for invalid aspect mask bits
+ reference.aspectMask |= VK_IMAGE_ASPECT_FLAG_BITS_MAX_ENUM;
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkSubpassDescription2KHR-aspectMask-03175");
+ err = vkCreateRenderPass2KHR(m_device->device(), &rpci2, nullptr, &rp);
+ if (err == VK_SUCCESS) vkDestroyRenderPass(m_device->device(), rp, nullptr);
+ m_errorMonitor->VerifyFound();
+}
+
+TEST_F(VkLayerTest, RenderPassCreateInvalidSubpassDependencies) {
+ // Check for VK_KHR_get_physical_device_properties2
+ if (InstanceExtensionSupported(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME)) {
+ m_instance_extension_names.push_back(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME);
+ }
+
+ ASSERT_NO_FATAL_FAILURE(InitFramework(myDbgFunc, m_errorMonitor));
+
+ bool rp2Supported = CheckCreateRenderPass2Support(this, m_device_extension_names);
+ bool multiviewSupported = rp2Supported;
+
+ if (!rp2Supported && DeviceExtensionSupported(gpu(), nullptr, VK_KHR_MULTIVIEW_EXTENSION_NAME)) {
+ m_device_extension_names.push_back(VK_KHR_MULTIVIEW_EXTENSION_NAME);
+ multiviewSupported = true;
+ }
+
+ // Add a device features struct enabling NO features
+ VkPhysicalDeviceFeatures features = {0};
+ ASSERT_NO_FATAL_FAILURE(InitState(&features));
+
+ if (m_device->props.apiVersion >= VK_API_VERSION_1_1) {
+ multiviewSupported = true;
+ }
+
+ // Create two dummy subpasses
+ VkSubpassDescription subpasses[] = {
+ {0, VK_PIPELINE_BIND_POINT_GRAPHICS, 0, nullptr, 0, nullptr, nullptr, nullptr, 0, nullptr},
+ {0, VK_PIPELINE_BIND_POINT_GRAPHICS, 0, nullptr, 0, nullptr, nullptr, nullptr, 0, nullptr},
+ };
+
+ VkSubpassDependency dependency;
+ VkRenderPassCreateInfo rpci = {VK_STRUCTURE_TYPE_RENDER_PASS_CREATE_INFO, nullptr, 0, 0, nullptr, 2, subpasses, 1, &dependency};
+ // dependency = { 0, 1, VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT, VK_PIPELINE_STAGE_BOTTOM_OF_PIPE_BIT, 0, 0, 0 };
+
+ // Source subpass is not EXTERNAL, so source stage mask must not include HOST
+ dependency = {0, 1, VK_PIPELINE_STAGE_HOST_BIT, VK_PIPELINE_STAGE_BOTTOM_OF_PIPE_BIT, 0, 0, 0};
+
+ TestRenderPassCreate(m_errorMonitor, m_device->device(), &rpci, rp2Supported, "VUID-VkSubpassDependency-srcSubpass-00858",
+ "VUID-VkSubpassDependency2KHR-srcSubpass-03078");
+
+ // Destination subpass is not EXTERNAL, so destination stage mask must not include HOST
+ dependency = {0, 1, VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT, VK_PIPELINE_STAGE_HOST_BIT, 0, 0, 0};
+
+ TestRenderPassCreate(m_errorMonitor, m_device->device(), &rpci, rp2Supported, "VUID-VkSubpassDependency-dstSubpass-00859",
+ "VUID-VkSubpassDependency2KHR-dstSubpass-03079");
+
+ // Geometry shaders not enabled source
+ dependency = {0, 1, VK_PIPELINE_STAGE_GEOMETRY_SHADER_BIT, VK_PIPELINE_STAGE_BOTTOM_OF_PIPE_BIT, 0, 0, 0};
+
+ TestRenderPassCreate(m_errorMonitor, m_device->device(), &rpci, rp2Supported, "VUID-VkSubpassDependency-srcStageMask-00860",
+ "VUID-VkSubpassDependency2KHR-srcStageMask-03080");
+
+ // Geometry shaders not enabled destination
+ dependency = {0, 1, VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT, VK_PIPELINE_STAGE_GEOMETRY_SHADER_BIT, 0, 0, 0};
+
+ TestRenderPassCreate(m_errorMonitor, m_device->device(), &rpci, rp2Supported, "VUID-VkSubpassDependency-dstStageMask-00861",
+ "VUID-VkSubpassDependency2KHR-dstStageMask-03081");
+
+ // Tessellation not enabled source
+ dependency = {0, 1, VK_PIPELINE_STAGE_TESSELLATION_CONTROL_SHADER_BIT, VK_PIPELINE_STAGE_BOTTOM_OF_PIPE_BIT, 0, 0, 0};
+
+ TestRenderPassCreate(m_errorMonitor, m_device->device(), &rpci, rp2Supported, "VUID-VkSubpassDependency-srcStageMask-00862",
+ "VUID-VkSubpassDependency2KHR-srcStageMask-03082");
+
+ // Tessellation not enabled destination
+ dependency = {0, 1, VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT, VK_PIPELINE_STAGE_TESSELLATION_EVALUATION_SHADER_BIT, 0, 0, 0};
+
+ TestRenderPassCreate(m_errorMonitor, m_device->device(), &rpci, rp2Supported, "VUID-VkSubpassDependency-dstStageMask-00863",
+ "VUID-VkSubpassDependency2KHR-dstStageMask-03083");
+
+ // Potential cyclical dependency
+ dependency = {1, 0, VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT, VK_PIPELINE_STAGE_BOTTOM_OF_PIPE_BIT, 0, 0, 0};
+
+ TestRenderPassCreate(m_errorMonitor, m_device->device(), &rpci, rp2Supported, "VUID-VkSubpassDependency-srcSubpass-00864",
+ "VUID-VkSubpassDependency2KHR-srcSubpass-03084");
+
+ // EXTERNAL to EXTERNAL dependency
+ dependency = {
+ VK_SUBPASS_EXTERNAL, VK_SUBPASS_EXTERNAL, VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT, VK_PIPELINE_STAGE_BOTTOM_OF_PIPE_BIT, 0, 0, 0};
+
+ TestRenderPassCreate(m_errorMonitor, m_device->device(), &rpci, rp2Supported, "VUID-VkSubpassDependency-srcSubpass-00865",
+ "VUID-VkSubpassDependency2KHR-srcSubpass-03085");
+
+ // Source compute stage not part of subpass 0's GRAPHICS pipeline
+ dependency = {0, VK_SUBPASS_EXTERNAL, VK_PIPELINE_STAGE_COMPUTE_SHADER_BIT, VK_PIPELINE_STAGE_COMPUTE_SHADER_BIT, 0, 0, 0};
+
+ TestRenderPassCreate(m_errorMonitor, m_device->device(), &rpci, rp2Supported, "VUID-VkRenderPassCreateInfo-pDependencies-00837",
+ "VUID-VkRenderPassCreateInfo2KHR-pDependencies-03054");
+
+ // Destination compute stage not part of subpass 0's GRAPHICS pipeline
+ dependency = {VK_SUBPASS_EXTERNAL, 0, VK_PIPELINE_STAGE_COMPUTE_SHADER_BIT, VK_PIPELINE_STAGE_COMPUTE_SHADER_BIT, 0, 0, 0};
+
+ TestRenderPassCreate(m_errorMonitor, m_device->device(), &rpci, rp2Supported, "VUID-VkRenderPassCreateInfo-pDependencies-00838",
+ "VUID-VkRenderPassCreateInfo2KHR-pDependencies-03055");
+
+ // Non graphics stage in self dependency
+ dependency = {0, 0, VK_PIPELINE_STAGE_COMPUTE_SHADER_BIT, VK_PIPELINE_STAGE_COMPUTE_SHADER_BIT, 0, 0, 0};
+
+ TestRenderPassCreate(m_errorMonitor, m_device->device(), &rpci, rp2Supported, "VUID-VkSubpassDependency-srcSubpass-01989",
+ "VUID-VkSubpassDependency2KHR-srcSubpass-02244");
+
+ // Logically later source stages in self dependency
+ dependency = {0, 0, VK_PIPELINE_STAGE_VERTEX_SHADER_BIT, VK_PIPELINE_STAGE_VERTEX_INPUT_BIT, 0, 0, 0};
+
+ TestRenderPassCreate(m_errorMonitor, m_device->device(), &rpci, rp2Supported, "VUID-VkSubpassDependency-srcSubpass-00867",
+ "VUID-VkSubpassDependency2KHR-srcSubpass-03087");
- error_monitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, rp2_vuid);
- err = vkCreateRenderPass2KHR(device, create_info, nullptr, &render_pass);
- if (err == VK_SUCCESS) vkDestroyRenderPass(device, render_pass, nullptr);
- error_monitor->VerifyFound();
+ // Source access mask mismatch with source stage mask
+ dependency = {0, 1, VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT, VK_PIPELINE_STAGE_BOTTOM_OF_PIPE_BIT, VK_ACCESS_UNIFORM_READ_BIT, 0, 0};
+
+ TestRenderPassCreate(m_errorMonitor, m_device->device(), &rpci, rp2Supported, "VUID-VkSubpassDependency-srcAccessMask-00868",
+ "VUID-VkSubpassDependency2KHR-srcAccessMask-03088");
+
+ // Destination access mask mismatch with destination stage mask
+ dependency = {
+ 0, 1, VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT, VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT, 0, VK_ACCESS_COLOR_ATTACHMENT_WRITE_BIT, 0};
+
+ TestRenderPassCreate(m_errorMonitor, m_device->device(), &rpci, rp2Supported, "VUID-VkSubpassDependency-dstAccessMask-00869",
+ "VUID-VkSubpassDependency2KHR-dstAccessMask-03089");
+
+ if (multiviewSupported) {
+ // VIEW_LOCAL_BIT but multiview is not enabled
+ dependency = {0, 1, VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT, VK_PIPELINE_STAGE_BOTTOM_OF_PIPE_BIT,
+ 0, 0, VK_DEPENDENCY_VIEW_LOCAL_BIT};
+
+ TestRenderPassCreate(m_errorMonitor, m_device->device(), &rpci, rp2Supported, nullptr,
+ "VUID-VkRenderPassCreateInfo2KHR-viewMask-03059");
+
+ // Enable multiview
+ uint32_t pViewMasks[2] = {0x3u, 0x3u};
+ int32_t pViewOffsets[2] = {0, 0};
+ VkRenderPassMultiviewCreateInfo rpmvci = {
+ VK_STRUCTURE_TYPE_RENDER_PASS_MULTIVIEW_CREATE_INFO, nullptr, 2, pViewMasks, 0, nullptr, 0, nullptr};
+ rpci.pNext = &rpmvci;
+
+ // Excessive view offsets
+ dependency = {0, 1, VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT, VK_PIPELINE_STAGE_BOTTOM_OF_PIPE_BIT,
+ 0, 0, VK_DEPENDENCY_VIEW_LOCAL_BIT};
+ rpmvci.pViewOffsets = pViewOffsets;
+ rpmvci.dependencyCount = 2;
+
+ TestRenderPassCreate(m_errorMonitor, m_device->device(), &rpci, false, "VUID-VkRenderPassCreateInfo-pNext-01929", nullptr);
+
+ rpmvci.dependencyCount = 0;
+
+ // View offset with subpass self dependency
+ dependency = {0, 0, VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT, VK_PIPELINE_STAGE_BOTTOM_OF_PIPE_BIT,
+ 0, 0, VK_DEPENDENCY_VIEW_LOCAL_BIT};
+ rpmvci.pViewOffsets = pViewOffsets;
+ pViewOffsets[0] = 1;
+ rpmvci.dependencyCount = 1;
+
+ TestRenderPassCreate(m_errorMonitor, m_device->device(), &rpci, false, "VUID-VkRenderPassCreateInfo-pNext-01930", nullptr);
+
+ rpmvci.dependencyCount = 0;
+
+ // View offset with no view local bit
+ if (rp2Supported) {
+ dependency = {0, VK_SUBPASS_EXTERNAL, VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT, VK_PIPELINE_STAGE_BOTTOM_OF_PIPE_BIT, 0, 0, 0};
+ rpmvci.pViewOffsets = pViewOffsets;
+ pViewOffsets[0] = 1;
+ rpmvci.dependencyCount = 1;
+
+ safe_VkRenderPassCreateInfo2KHR safe_rpci2;
+ ConvertVkRenderPassCreateInfoToV2KHR(&rpci, &safe_rpci2);
+
+ TestRenderPassCreate(m_errorMonitor, m_device->device(), &rpci, rp2Supported, nullptr,
+ "VUID-VkSubpassDependency2KHR-dependencyFlags-03092");
+
+ rpmvci.dependencyCount = 0;
+ }
+
+ // EXTERNAL subpass with VIEW_LOCAL_BIT - source subpass
+ dependency = {VK_SUBPASS_EXTERNAL, 1, VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT, VK_PIPELINE_STAGE_BOTTOM_OF_PIPE_BIT, 0, 0,
+ VK_DEPENDENCY_VIEW_LOCAL_BIT};
+
+ TestRenderPassCreate(m_errorMonitor, m_device->device(), &rpci, rp2Supported,
+ "VUID-VkSubpassDependency-dependencyFlags-02520",
+ "VUID-VkSubpassDependency2KHR-dependencyFlags-03090");
+
+ // EXTERNAL subpass with VIEW_LOCAL_BIT - destination subpass
+ dependency = {0, VK_SUBPASS_EXTERNAL, VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT, VK_PIPELINE_STAGE_BOTTOM_OF_PIPE_BIT, 0,
+ 0, VK_DEPENDENCY_VIEW_LOCAL_BIT};
+
+ TestRenderPassCreate(m_errorMonitor, m_device->device(), &rpci, rp2Supported,
+ "VUID-VkSubpassDependency-dependencyFlags-02521",
+ "VUID-VkSubpassDependency2KHR-dependencyFlags-03091");
+
+ // Multiple views but no view local bit in self-dependency
+ dependency = {0, 0, VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT, VK_PIPELINE_STAGE_BOTTOM_OF_PIPE_BIT, 0, 0, 0};
+
+ TestRenderPassCreate(m_errorMonitor, m_device->device(), &rpci, rp2Supported, "VUID-VkSubpassDependency-srcSubpass-00872",
+ "VUID-VkRenderPassCreateInfo2KHR-pDependencies-03060");
+ }
+}
+
+TEST_F(VkLayerTest, RenderPassCreateInvalidMixedAttachmentSamplesAMD) {
+ TEST_DESCRIPTION("Verify error messages for supported and unsupported sample counts in render pass attachments.");
+
+ // Check for VK_KHR_get_physical_device_properties2
+ if (InstanceExtensionSupported(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME)) {
+ m_instance_extension_names.push_back(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME);
+ }
+
+ ASSERT_NO_FATAL_FAILURE(InitFramework(myDbgFunc, m_errorMonitor));
+
+ if (DeviceExtensionSupported(gpu(), nullptr, VK_AMD_MIXED_ATTACHMENT_SAMPLES_EXTENSION_NAME)) {
+ m_device_extension_names.push_back(VK_AMD_MIXED_ATTACHMENT_SAMPLES_EXTENSION_NAME);
+ } else {
+ printf("%s Extension %s is not supported.\n", kSkipPrefix, VK_AMD_MIXED_ATTACHMENT_SAMPLES_EXTENSION_NAME);
+ return;
+ }
+
+ bool rp2Supported = CheckCreateRenderPass2Support(this, m_device_extension_names);
+
+ ASSERT_NO_FATAL_FAILURE(InitState());
+
+ std::vector<VkAttachmentDescription> attachments;
+
+ {
+ VkAttachmentDescription att = {};
+ att.format = VK_FORMAT_R8G8B8A8_UNORM;
+ att.samples = VK_SAMPLE_COUNT_1_BIT;
+ att.loadOp = VK_ATTACHMENT_LOAD_OP_CLEAR;
+ att.storeOp = VK_ATTACHMENT_STORE_OP_STORE;
+ att.stencilLoadOp = VK_ATTACHMENT_LOAD_OP_DONT_CARE;
+ att.stencilStoreOp = VK_ATTACHMENT_STORE_OP_DONT_CARE;
+ att.initialLayout = VK_IMAGE_LAYOUT_UNDEFINED;
+ att.finalLayout = VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL;
+
+ attachments.push_back(att);
+
+ att.format = VK_FORMAT_D16_UNORM;
+ att.samples = VK_SAMPLE_COUNT_4_BIT;
+ att.loadOp = VK_ATTACHMENT_LOAD_OP_CLEAR;
+ att.storeOp = VK_ATTACHMENT_STORE_OP_STORE;
+ att.stencilLoadOp = VK_ATTACHMENT_LOAD_OP_CLEAR;
+ att.stencilStoreOp = VK_ATTACHMENT_STORE_OP_STORE;
+ att.initialLayout = VK_IMAGE_LAYOUT_UNDEFINED;
+ att.finalLayout = VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL;
+
+ attachments.push_back(att);
+ }
+
+ VkAttachmentReference color_ref = {};
+ color_ref.attachment = 0;
+ color_ref.layout = VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL;
+
+ VkAttachmentReference depth_ref = {};
+ depth_ref.attachment = 1;
+ depth_ref.layout = VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL;
+
+ VkSubpassDescription subpass = {};
+ subpass.pipelineBindPoint = VK_PIPELINE_BIND_POINT_GRAPHICS;
+ subpass.colorAttachmentCount = 1;
+ subpass.pColorAttachments = &color_ref;
+ subpass.pDepthStencilAttachment = &depth_ref;
+
+ VkRenderPassCreateInfo rpci = {};
+ rpci.sType = VK_STRUCTURE_TYPE_RENDER_PASS_CREATE_INFO;
+ rpci.attachmentCount = attachments.size();
+ rpci.pAttachments = attachments.data();
+ rpci.subpassCount = 1;
+ rpci.pSubpasses = &subpass;
+
+ m_errorMonitor->ExpectSuccess();
+
+ VkRenderPass rp;
+ VkResult err;
+
+ err = vkCreateRenderPass(device(), &rpci, NULL, &rp);
+ m_errorMonitor->VerifyNotFound();
+ if (err == VK_SUCCESS) vkDestroyRenderPass(m_device->device(), rp, nullptr);
+
+ // Expect an error message for invalid sample counts
+ attachments[0].samples = VK_SAMPLE_COUNT_4_BIT;
+ attachments[1].samples = VK_SAMPLE_COUNT_1_BIT;
+
+ TestRenderPassCreate(m_errorMonitor, m_device->device(), &rpci, rp2Supported,
+ "VUID-VkSubpassDescription-pColorAttachments-01506",
+ "VUID-VkSubpassDescription2KHR-pColorAttachments-03070");
}
-void TestRenderPassBegin(ErrorMonitor *error_monitor, const VkDevice device, const VkCommandBuffer command_buffer,
- const VkRenderPassBeginInfo *begin_info, bool rp2Supported, const char *rp1_vuid, const char *rp2_vuid) {
+static void TestRenderPassBegin(ErrorMonitor *error_monitor, const VkDevice device, const VkCommandBuffer command_buffer,
+ const VkRenderPassBeginInfo *begin_info, bool rp2Supported, const char *rp1_vuid,
+ const char *rp2_vuid) {
VkCommandBufferBeginInfo cmd_begin_info = {VK_STRUCTURE_TYPE_COMMAND_BUFFER_BEGIN_INFO, nullptr,
VK_COMMAND_BUFFER_USAGE_ONE_TIME_SUBMIT_BIT, nullptr};
@@ -262,9 +6674,8463 @@ void TestRenderPassBegin(ErrorMonitor *error_monitor, const VkDevice device, con
}
}
-void ValidOwnershipTransferOp(ErrorMonitor *monitor, VkCommandBufferObj *cb, VkPipelineStageFlags src_stages,
- VkPipelineStageFlags dst_stages, const VkBufferMemoryBarrier *buf_barrier,
- const VkImageMemoryBarrier *img_barrier) {
+TEST_F(VkLayerTest, RenderPassBeginInvalidRenderArea) {
+ TEST_DESCRIPTION("Generate INVALID_RENDER_AREA error by beginning renderpass with extent outside of framebuffer");
+ // Check for VK_KHR_get_physical_device_properties2
+ if (InstanceExtensionSupported(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME)) {
+ m_instance_extension_names.push_back(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME);
+ }
+
+ ASSERT_NO_FATAL_FAILURE(InitFramework(myDbgFunc, m_errorMonitor));
+ bool rp2Supported = CheckCreateRenderPass2Support(this, m_device_extension_names);
+ ASSERT_NO_FATAL_FAILURE(InitState(nullptr, nullptr, VK_COMMAND_POOL_CREATE_RESET_COMMAND_BUFFER_BIT));
+
+ ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
+
+ // Framebuffer for render target is 256x256, exceed that for INVALID_RENDER_AREA
+ m_renderPassBeginInfo.renderArea.extent.width = 257;
+ m_renderPassBeginInfo.renderArea.extent.height = 257;
+
+ TestRenderPassBegin(m_errorMonitor, m_device->device(), m_commandBuffer->handle(), &m_renderPassBeginInfo, rp2Supported,
+ "Cannot execute a render pass with renderArea not within the bound of the framebuffer.",
+ "Cannot execute a render pass with renderArea not within the bound of the framebuffer.");
+}
+
+TEST_F(VkLayerTest, RenderPassBeginWithinRenderPass) {
+ // Check for VK_KHR_get_physical_device_properties2
+ if (InstanceExtensionSupported(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME)) {
+ m_instance_extension_names.push_back(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME);
+ }
+
+ ASSERT_NO_FATAL_FAILURE(InitFramework(myDbgFunc, m_errorMonitor));
+ PFN_vkCmdBeginRenderPass2KHR vkCmdBeginRenderPass2KHR = nullptr;
+ bool rp2Supported = CheckCreateRenderPass2Support(this, m_device_extension_names);
+ ASSERT_NO_FATAL_FAILURE(InitState());
+
+ if (rp2Supported) {
+ vkCmdBeginRenderPass2KHR =
+ (PFN_vkCmdBeginRenderPass2KHR)vkGetDeviceProcAddr(m_device->device(), "vkCmdBeginRenderPass2KHR");
+ }
+
+ ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
+
+ // Bind a BeginRenderPass within an active RenderPass
+ m_commandBuffer->begin();
+ m_commandBuffer->BeginRenderPass(m_renderPassBeginInfo);
+
+ // Just use a dummy Renderpass
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdBeginRenderPass-renderpass");
+ vkCmdBeginRenderPass(m_commandBuffer->handle(), &m_renderPassBeginInfo, VK_SUBPASS_CONTENTS_INLINE);
+
+ m_errorMonitor->VerifyFound();
+
+ if (rp2Supported) {
+ VkSubpassBeginInfoKHR subpassBeginInfo = {VK_STRUCTURE_TYPE_SUBPASS_BEGIN_INFO_KHR, nullptr, VK_SUBPASS_CONTENTS_INLINE};
+
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdBeginRenderPass2KHR-renderpass");
+ vkCmdBeginRenderPass2KHR(m_commandBuffer->handle(), &m_renderPassBeginInfo, &subpassBeginInfo);
+ m_errorMonitor->VerifyFound();
+ }
+}
+
+TEST_F(VkLayerTest, RenderPassBeginIncompatibleFramebufferRenderPass) {
+ TEST_DESCRIPTION("Test that renderpass begin is compatible with the framebuffer renderpass ");
+
+ ASSERT_NO_FATAL_FAILURE(Init(nullptr, nullptr, VK_COMMAND_POOL_CREATE_RESET_COMMAND_BUFFER_BIT));
+
+ // Create a depth stencil image view
+ VkImageObj image(m_device);
+
+ image.Init(128, 128, 1, VK_FORMAT_D16_UNORM, VK_IMAGE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT, VK_IMAGE_TILING_OPTIMAL);
+ ASSERT_TRUE(image.initialized());
+
+ VkImageView dsv;
+ VkImageViewCreateInfo dsvci = {};
+ dsvci.sType = VK_STRUCTURE_TYPE_IMAGE_VIEW_CREATE_INFO;
+ dsvci.pNext = nullptr;
+ dsvci.image = image.handle();
+ dsvci.viewType = VK_IMAGE_VIEW_TYPE_2D;
+ dsvci.format = VK_FORMAT_D16_UNORM;
+ dsvci.subresourceRange.layerCount = 1;
+ dsvci.subresourceRange.baseMipLevel = 0;
+ dsvci.subresourceRange.levelCount = 1;
+ dsvci.subresourceRange.aspectMask = VK_IMAGE_ASPECT_DEPTH_BIT;
+ vkCreateImageView(m_device->device(), &dsvci, NULL, &dsv);
+
+ // Create a renderPass with a single attachment that uses loadOp CLEAR
+ VkAttachmentDescription description = {0,
+ VK_FORMAT_D16_UNORM,
+ VK_SAMPLE_COUNT_1_BIT,
+ VK_ATTACHMENT_LOAD_OP_LOAD,
+ VK_ATTACHMENT_STORE_OP_DONT_CARE,
+ VK_ATTACHMENT_LOAD_OP_CLEAR,
+ VK_ATTACHMENT_STORE_OP_DONT_CARE,
+ VK_IMAGE_LAYOUT_GENERAL,
+ VK_IMAGE_LAYOUT_GENERAL};
+
+ VkAttachmentReference depth_stencil_ref = {0, VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL};
+
+ VkSubpassDescription subpass = {0, VK_PIPELINE_BIND_POINT_GRAPHICS, 0, nullptr, 0, nullptr, nullptr, &depth_stencil_ref, 0,
+ nullptr};
+
+ VkRenderPassCreateInfo rpci = {VK_STRUCTURE_TYPE_RENDER_PASS_CREATE_INFO, nullptr, 0, 1, &description, 1, &subpass, 0, nullptr};
+ VkRenderPass rp1, rp2;
+
+ vkCreateRenderPass(m_device->device(), &rpci, NULL, &rp1);
+ subpass.pDepthStencilAttachment = nullptr;
+ vkCreateRenderPass(m_device->device(), &rpci, NULL, &rp2);
+
+ // Create a framebuffer
+
+ VkFramebufferCreateInfo fbci = {VK_STRUCTURE_TYPE_FRAMEBUFFER_CREATE_INFO, nullptr, 0, rp1, 1, &dsv, 128, 128, 1};
+ VkFramebuffer fb;
+
+ vkCreateFramebuffer(m_device->handle(), &fbci, nullptr, &fb);
+
+ VkRenderPassBeginInfo rp_begin = {VK_STRUCTURE_TYPE_RENDER_PASS_BEGIN_INFO, nullptr, rp2, fb, {{0, 0}, {128, 128}}, 0, nullptr};
+
+ TestRenderPassBegin(m_errorMonitor, m_device->device(), m_commandBuffer->handle(), &rp_begin, false,
+ "VUID-VkRenderPassBeginInfo-renderPass-00904", nullptr);
+
+ vkDestroyRenderPass(m_device->device(), rp1, nullptr);
+ vkDestroyRenderPass(m_device->device(), rp2, nullptr);
+ vkDestroyFramebuffer(m_device->device(), fb, nullptr);
+ vkDestroyImageView(m_device->device(), dsv, nullptr);
+}
+
+TEST_F(VkLayerTest, RenderPassBeginLayoutsFramebufferImageUsageMismatches) {
+ TEST_DESCRIPTION(
+ "Test that renderpass initial/final layouts match up with the usage bits set for each attachment of the framebuffer");
+
+ // Check for VK_KHR_get_physical_device_properties2
+ if (InstanceExtensionSupported(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME)) {
+ m_instance_extension_names.push_back(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME);
+ }
+
+ ASSERT_NO_FATAL_FAILURE(InitFramework(myDbgFunc, m_errorMonitor));
+ bool rp2Supported = CheckCreateRenderPass2Support(this, m_device_extension_names);
+ bool maintenance2Supported = rp2Supported;
+
+ // Check for VK_KHR_maintenance2
+ if (!rp2Supported && DeviceExtensionSupported(gpu(), nullptr, VK_KHR_MAINTENANCE2_EXTENSION_NAME)) {
+ m_device_extension_names.push_back(VK_KHR_MAINTENANCE2_EXTENSION_NAME);
+ maintenance2Supported = true;
+ }
+
+ ASSERT_NO_FATAL_FAILURE(InitState(nullptr, nullptr, VK_COMMAND_POOL_CREATE_RESET_COMMAND_BUFFER_BIT));
+
+ if (m_device->props.apiVersion >= VK_API_VERSION_1_1) {
+ maintenance2Supported = true;
+ }
+
+ // Create an input attachment view
+ VkImageObj iai(m_device);
+
+ iai.InitNoLayout(128, 128, 1, VK_FORMAT_R8G8B8A8_UNORM, VK_IMAGE_USAGE_INPUT_ATTACHMENT_BIT, VK_IMAGE_TILING_OPTIMAL);
+ ASSERT_TRUE(iai.initialized());
+
+ VkImageView iav;
+ VkImageViewCreateInfo iavci = {};
+ iavci.sType = VK_STRUCTURE_TYPE_IMAGE_VIEW_CREATE_INFO;
+ iavci.pNext = nullptr;
+ iavci.image = iai.handle();
+ iavci.viewType = VK_IMAGE_VIEW_TYPE_2D;
+ iavci.format = VK_FORMAT_R8G8B8A8_UNORM;
+ iavci.subresourceRange.layerCount = 1;
+ iavci.subresourceRange.baseMipLevel = 0;
+ iavci.subresourceRange.levelCount = 1;
+ iavci.subresourceRange.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
+ vkCreateImageView(m_device->device(), &iavci, NULL, &iav);
+
+ // Create a color attachment view
+ VkImageObj cai(m_device);
+
+ cai.InitNoLayout(128, 128, 1, VK_FORMAT_R8G8B8A8_UNORM, VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT, VK_IMAGE_TILING_OPTIMAL);
+ ASSERT_TRUE(cai.initialized());
+
+ VkImageView cav;
+ VkImageViewCreateInfo cavci = {};
+ cavci.sType = VK_STRUCTURE_TYPE_IMAGE_VIEW_CREATE_INFO;
+ cavci.pNext = nullptr;
+ cavci.image = cai.handle();
+ cavci.viewType = VK_IMAGE_VIEW_TYPE_2D;
+ cavci.format = VK_FORMAT_R8G8B8A8_UNORM;
+ cavci.subresourceRange.layerCount = 1;
+ cavci.subresourceRange.baseMipLevel = 0;
+ cavci.subresourceRange.levelCount = 1;
+ cavci.subresourceRange.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
+ vkCreateImageView(m_device->device(), &cavci, NULL, &cav);
+
+ // Create a renderPass with those attachments
+ VkAttachmentDescription descriptions[] = {
+ {0, VK_FORMAT_R8G8B8A8_UNORM, VK_SAMPLE_COUNT_1_BIT, VK_ATTACHMENT_LOAD_OP_DONT_CARE, VK_ATTACHMENT_STORE_OP_DONT_CARE,
+ VK_ATTACHMENT_LOAD_OP_CLEAR, VK_ATTACHMENT_STORE_OP_DONT_CARE, VK_IMAGE_LAYOUT_GENERAL, VK_IMAGE_LAYOUT_GENERAL},
+ {1, VK_FORMAT_R8G8B8A8_UNORM, VK_SAMPLE_COUNT_1_BIT, VK_ATTACHMENT_LOAD_OP_DONT_CARE, VK_ATTACHMENT_STORE_OP_DONT_CARE,
+ VK_ATTACHMENT_LOAD_OP_CLEAR, VK_ATTACHMENT_STORE_OP_DONT_CARE, VK_IMAGE_LAYOUT_GENERAL, VK_IMAGE_LAYOUT_GENERAL}};
+
+ VkAttachmentReference input_ref = {0, VK_IMAGE_LAYOUT_GENERAL};
+ VkAttachmentReference color_ref = {1, VK_IMAGE_LAYOUT_GENERAL};
+
+ VkSubpassDescription subpass = {0, VK_PIPELINE_BIND_POINT_GRAPHICS, 1, &input_ref, 1, &color_ref, nullptr, nullptr, 0, nullptr};
+
+ VkRenderPassCreateInfo rpci = {VK_STRUCTURE_TYPE_RENDER_PASS_CREATE_INFO, nullptr, 0, 2, descriptions, 1, &subpass, 0, nullptr};
+
+ VkRenderPass rp;
+
+ vkCreateRenderPass(m_device->device(), &rpci, NULL, &rp);
+
+ // Create a framebuffer
+
+ VkImageView views[] = {iav, cav};
+
+ VkFramebufferCreateInfo fbci = {VK_STRUCTURE_TYPE_FRAMEBUFFER_CREATE_INFO, nullptr, 0, rp, 2, views, 128, 128, 1};
+ VkFramebuffer fb;
+
+ vkCreateFramebuffer(m_device->handle(), &fbci, nullptr, &fb);
+
+ VkRenderPassBeginInfo rp_begin = {VK_STRUCTURE_TYPE_RENDER_PASS_BEGIN_INFO, nullptr, rp, fb, {{0, 0}, {128, 128}}, 0, nullptr};
+
+ VkRenderPass rp_invalid;
+
+ // Initial layout is VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL but attachment doesn't support IMAGE_USAGE_COLOR_ATTACHMENT_BIT
+ descriptions[0].initialLayout = VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL;
+ vkCreateRenderPass(m_device->device(), &rpci, NULL, &rp_invalid);
+ rp_begin.renderPass = rp_invalid;
+ TestRenderPassBegin(m_errorMonitor, m_device->device(), m_commandBuffer->handle(), &rp_begin, rp2Supported,
+ "VUID-vkCmdBeginRenderPass-initialLayout-00895", "VUID-vkCmdBeginRenderPass2KHR-initialLayout-03094");
+
+ vkDestroyRenderPass(m_device->handle(), rp_invalid, nullptr);
+
+ // Initial layout is VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL but attachment doesn't support VK_IMAGE_USAGE_INPUT_ATTACHMENT_BIT
+ // / VK_IMAGE_USAGE_SAMPLED_BIT
+ descriptions[0].initialLayout = VK_IMAGE_LAYOUT_GENERAL;
+ descriptions[1].initialLayout = VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL;
+ vkCreateRenderPass(m_device->device(), &rpci, NULL, &rp_invalid);
+ rp_begin.renderPass = rp_invalid;
+
+ TestRenderPassBegin(m_errorMonitor, m_device->device(), m_commandBuffer->handle(), &rp_begin, rp2Supported,
+ "VUID-vkCmdBeginRenderPass-initialLayout-00897", "VUID-vkCmdBeginRenderPass2KHR-initialLayout-03097");
+
+ vkDestroyRenderPass(m_device->handle(), rp_invalid, nullptr);
+ descriptions[1].initialLayout = VK_IMAGE_LAYOUT_GENERAL;
+
+ // Initial layout is VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL but attachment doesn't support VK_IMAGE_USAGE_TRANSFER_SRC_BIT
+ descriptions[0].initialLayout = VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL;
+ vkCreateRenderPass(m_device->device(), &rpci, NULL, &rp_invalid);
+ rp_begin.renderPass = rp_invalid;
+
+ TestRenderPassBegin(m_errorMonitor, m_device->device(), m_commandBuffer->handle(), &rp_begin, rp2Supported,
+ "VUID-vkCmdBeginRenderPass-initialLayout-00898", "VUID-vkCmdBeginRenderPass2KHR-initialLayout-03098");
+
+ vkDestroyRenderPass(m_device->handle(), rp_invalid, nullptr);
+
+ // Initial layout is VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL but attachment doesn't support VK_IMAGE_USAGE_TRANSFER_DST_BIT
+ descriptions[0].initialLayout = VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL;
+ vkCreateRenderPass(m_device->device(), &rpci, NULL, &rp_invalid);
+ rp_begin.renderPass = rp_invalid;
+
+ TestRenderPassBegin(m_errorMonitor, m_device->device(), m_commandBuffer->handle(), &rp_begin, rp2Supported,
+ "VUID-vkCmdBeginRenderPass-initialLayout-00899", "VUID-vkCmdBeginRenderPass2KHR-initialLayout-03099");
+
+ vkDestroyRenderPass(m_device->handle(), rp_invalid, nullptr);
+
+ // Initial layout is VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL but attachment doesn't support
+ // VK_IMAGE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT
+ descriptions[0].initialLayout = VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL;
+ vkCreateRenderPass(m_device->device(), &rpci, NULL, &rp_invalid);
+ rp_begin.renderPass = rp_invalid;
+ const char *initial_layout_vuid_rp1 =
+ maintenance2Supported ? "VUID-vkCmdBeginRenderPass-initialLayout-01758" : "VUID-vkCmdBeginRenderPass-initialLayout-00896";
+
+ TestRenderPassBegin(m_errorMonitor, m_device->device(), m_commandBuffer->handle(), &rp_begin, rp2Supported,
+ initial_layout_vuid_rp1, "VUID-vkCmdBeginRenderPass2KHR-initialLayout-03096");
+
+ vkDestroyRenderPass(m_device->handle(), rp_invalid, nullptr);
+
+ // Initial layout is VK_IMAGE_LAYOUT_DEPTH_STENCIL_READ_ONLY_OPTIMAL but attachment doesn't support
+ // VK_IMAGE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT
+ descriptions[0].initialLayout = VK_IMAGE_LAYOUT_DEPTH_STENCIL_READ_ONLY_OPTIMAL;
+ vkCreateRenderPass(m_device->device(), &rpci, NULL, &rp_invalid);
+ rp_begin.renderPass = rp_invalid;
+
+ TestRenderPassBegin(m_errorMonitor, m_device->device(), m_commandBuffer->handle(), &rp_begin, rp2Supported,
+ initial_layout_vuid_rp1, "VUID-vkCmdBeginRenderPass2KHR-initialLayout-03096");
+
+ vkDestroyRenderPass(m_device->handle(), rp_invalid, nullptr);
+
+ if (maintenance2Supported || rp2Supported) {
+ // Initial layout is VK_IMAGE_LAYOUT_DEPTH_READ_ONLY_STENCIL_ATTACHMENT_OPTIMAL but attachment doesn't support
+ // VK_IMAGE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT
+ descriptions[0].initialLayout = VK_IMAGE_LAYOUT_DEPTH_READ_ONLY_STENCIL_ATTACHMENT_OPTIMAL;
+ vkCreateRenderPass(m_device->device(), &rpci, NULL, &rp_invalid);
+ rp_begin.renderPass = rp_invalid;
+
+ TestRenderPassBegin(m_errorMonitor, m_device->device(), m_commandBuffer->handle(), &rp_begin, rp2Supported,
+ "VUID-vkCmdBeginRenderPass-initialLayout-01758", "VUID-vkCmdBeginRenderPass2KHR-initialLayout-03096");
+
+ vkDestroyRenderPass(m_device->handle(), rp_invalid, nullptr);
+
+ // Initial layout is VK_IMAGE_LAYOUT_DEPTH_ATTACHMENT_STENCIL_READ_ONLY_OPTIMAL but attachment doesn't support
+ // VK_IMAGE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT
+ descriptions[0].initialLayout = VK_IMAGE_LAYOUT_DEPTH_ATTACHMENT_STENCIL_READ_ONLY_OPTIMAL;
+ vkCreateRenderPass(m_device->device(), &rpci, NULL, &rp_invalid);
+ rp_begin.renderPass = rp_invalid;
+
+ TestRenderPassBegin(m_errorMonitor, m_device->device(), m_commandBuffer->handle(), &rp_begin, rp2Supported,
+ "VUID-vkCmdBeginRenderPass-initialLayout-01758", "VUID-vkCmdBeginRenderPass2KHR-initialLayout-03096");
+
+ vkDestroyRenderPass(m_device->handle(), rp_invalid, nullptr);
+ }
+
+ vkDestroyRenderPass(m_device->device(), rp, nullptr);
+ vkDestroyFramebuffer(m_device->device(), fb, nullptr);
+ vkDestroyImageView(m_device->device(), iav, nullptr);
+ vkDestroyImageView(m_device->device(), cav, nullptr);
+}
+
+TEST_F(VkLayerTest, RenderPassBeginClearOpMismatch) {
+ TEST_DESCRIPTION(
+ "Begin a renderPass where clearValueCount is less than the number of renderPass attachments that use "
+ "loadOp VK_ATTACHMENT_LOAD_OP_CLEAR.");
+
+ // Check for VK_KHR_get_physical_device_properties2
+ if (InstanceExtensionSupported(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME)) {
+ m_instance_extension_names.push_back(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME);
+ }
+
+ ASSERT_NO_FATAL_FAILURE(InitFramework(myDbgFunc, m_errorMonitor));
+ bool rp2Supported = CheckCreateRenderPass2Support(this, m_device_extension_names);
+ ASSERT_NO_FATAL_FAILURE(InitState(nullptr, nullptr, VK_COMMAND_POOL_CREATE_RESET_COMMAND_BUFFER_BIT));
+
+ ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
+
+ // Create a renderPass with a single attachment that uses loadOp CLEAR
+ VkAttachmentReference attach = {};
+ attach.layout = VK_IMAGE_LAYOUT_GENERAL;
+ VkSubpassDescription subpass = {};
+ subpass.colorAttachmentCount = 1;
+ subpass.pColorAttachments = &attach;
+ VkRenderPassCreateInfo rpci = {};
+ rpci.subpassCount = 1;
+ rpci.pSubpasses = &subpass;
+ rpci.attachmentCount = 1;
+ VkAttachmentDescription attach_desc = {};
+ attach_desc.format = VK_FORMAT_B8G8R8A8_UNORM;
+ // Set loadOp to CLEAR
+ attach_desc.loadOp = VK_ATTACHMENT_LOAD_OP_CLEAR;
+ attach_desc.samples = VK_SAMPLE_COUNT_1_BIT;
+ attach_desc.finalLayout = VK_IMAGE_LAYOUT_GENERAL;
+ rpci.pAttachments = &attach_desc;
+ rpci.sType = VK_STRUCTURE_TYPE_RENDER_PASS_CREATE_INFO;
+ VkRenderPass rp;
+ vkCreateRenderPass(m_device->device(), &rpci, NULL, &rp);
+
+ VkRenderPassBeginInfo rp_begin = {};
+ rp_begin.sType = VK_STRUCTURE_TYPE_RENDER_PASS_BEGIN_INFO;
+ rp_begin.pNext = NULL;
+ rp_begin.renderPass = renderPass();
+ rp_begin.framebuffer = framebuffer();
+ rp_begin.clearValueCount = 0; // Should be 1
+
+ TestRenderPassBegin(m_errorMonitor, m_device->device(), m_commandBuffer->handle(), &rp_begin, rp2Supported,
+ "VUID-VkRenderPassBeginInfo-clearValueCount-00902", "VUID-VkRenderPassBeginInfo-clearValueCount-00902");
+
+ vkDestroyRenderPass(m_device->device(), rp, NULL);
+}
+
+TEST_F(VkLayerTest, RenderPassBeginSampleLocationsInvalidIndicesEXT) {
+ TEST_DESCRIPTION("Test that attachment indices and subpass indices specifed by sample locations structures are valid");
+
+ // Check for VK_KHR_get_physical_device_properties2
+ if (InstanceExtensionSupported(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME)) {
+ m_instance_extension_names.push_back(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME);
+ }
+ ASSERT_NO_FATAL_FAILURE(InitFramework(myDbgFunc, m_errorMonitor));
+ if (DeviceExtensionSupported(gpu(), nullptr, VK_EXT_SAMPLE_LOCATIONS_EXTENSION_NAME)) {
+ m_device_extension_names.push_back(VK_EXT_SAMPLE_LOCATIONS_EXTENSION_NAME);
+ } else {
+ printf("%s Extension %s is not supported.\n", kSkipPrefix, VK_EXT_SAMPLE_LOCATIONS_EXTENSION_NAME);
+ return;
+ }
+
+ ASSERT_NO_FATAL_FAILURE(InitState(nullptr, nullptr, VK_COMMAND_POOL_CREATE_RESET_COMMAND_BUFFER_BIT));
+
+ // Create a depth stencil image view
+ VkImageObj image(m_device);
+
+ image.Init(128, 128, 1, VK_FORMAT_D16_UNORM, VK_IMAGE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT, VK_IMAGE_TILING_OPTIMAL);
+ ASSERT_TRUE(image.initialized());
+
+ VkImageView dsv;
+ VkImageViewCreateInfo dsvci = {};
+ dsvci.sType = VK_STRUCTURE_TYPE_IMAGE_VIEW_CREATE_INFO;
+ dsvci.pNext = nullptr;
+ dsvci.image = image.handle();
+ dsvci.viewType = VK_IMAGE_VIEW_TYPE_2D;
+ dsvci.format = VK_FORMAT_D16_UNORM;
+ dsvci.subresourceRange.layerCount = 1;
+ dsvci.subresourceRange.baseMipLevel = 0;
+ dsvci.subresourceRange.levelCount = 1;
+ dsvci.subresourceRange.aspectMask = VK_IMAGE_ASPECT_DEPTH_BIT;
+ vkCreateImageView(m_device->device(), &dsvci, NULL, &dsv);
+
+ // Create a renderPass with a single attachment that uses loadOp CLEAR
+ VkAttachmentDescription description = {0,
+ VK_FORMAT_D16_UNORM,
+ VK_SAMPLE_COUNT_1_BIT,
+ VK_ATTACHMENT_LOAD_OP_LOAD,
+ VK_ATTACHMENT_STORE_OP_DONT_CARE,
+ VK_ATTACHMENT_LOAD_OP_CLEAR,
+ VK_ATTACHMENT_STORE_OP_DONT_CARE,
+ VK_IMAGE_LAYOUT_GENERAL,
+ VK_IMAGE_LAYOUT_GENERAL};
+
+ VkAttachmentReference depth_stencil_ref = {0, VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL};
+
+ VkSubpassDescription subpass = {0, VK_PIPELINE_BIND_POINT_GRAPHICS, 0, nullptr, 0, nullptr, nullptr, &depth_stencil_ref, 0,
+ nullptr};
+
+ VkRenderPassCreateInfo rpci = {VK_STRUCTURE_TYPE_RENDER_PASS_CREATE_INFO, nullptr, 0, 1, &description, 1, &subpass, 0, nullptr};
+ VkRenderPass rp;
+
+ vkCreateRenderPass(m_device->device(), &rpci, NULL, &rp);
+
+ // Create a framebuffer
+
+ VkFramebufferCreateInfo fbci = {VK_STRUCTURE_TYPE_FRAMEBUFFER_CREATE_INFO, nullptr, 0, rp, 1, &dsv, 128, 128, 1};
+ VkFramebuffer fb;
+
+ vkCreateFramebuffer(m_device->handle(), &fbci, nullptr, &fb);
+
+ VkSampleLocationEXT sample_location = {0.5, 0.5};
+
+ VkSampleLocationsInfoEXT sample_locations_info = {
+ VK_STRUCTURE_TYPE_SAMPLE_LOCATIONS_INFO_EXT, nullptr, VK_SAMPLE_COUNT_1_BIT, {1, 1}, 1, &sample_location};
+
+ VkAttachmentSampleLocationsEXT attachment_sample_locations = {0, sample_locations_info};
+ VkSubpassSampleLocationsEXT subpass_sample_locations = {0, sample_locations_info};
+
+ VkRenderPassSampleLocationsBeginInfoEXT rp_sl_begin = {VK_STRUCTURE_TYPE_RENDER_PASS_SAMPLE_LOCATIONS_BEGIN_INFO_EXT,
+ nullptr,
+ 1,
+ &attachment_sample_locations,
+ 1,
+ &subpass_sample_locations};
+
+ VkRenderPassBeginInfo rp_begin = {
+ VK_STRUCTURE_TYPE_RENDER_PASS_BEGIN_INFO, &rp_sl_begin, rp, fb, {{0, 0}, {128, 128}}, 0, nullptr};
+
+ attachment_sample_locations.attachmentIndex = 1;
+ TestRenderPassBegin(m_errorMonitor, m_device->device(), m_commandBuffer->handle(), &rp_begin, false,
+ "VUID-VkAttachmentSampleLocationsEXT-attachmentIndex-01531", nullptr);
+ attachment_sample_locations.attachmentIndex = 0;
+
+ subpass_sample_locations.subpassIndex = 1;
+ TestRenderPassBegin(m_errorMonitor, m_device->device(), m_commandBuffer->handle(), &rp_begin, false,
+ "VUID-VkSubpassSampleLocationsEXT-subpassIndex-01532", nullptr);
+ subpass_sample_locations.subpassIndex = 0;
+
+ vkDestroyRenderPass(m_device->device(), rp, nullptr);
+ vkDestroyFramebuffer(m_device->device(), fb, nullptr);
+ vkDestroyImageView(m_device->device(), dsv, nullptr);
+}
+
+TEST_F(VkLayerTest, RenderPassNextSubpassExcessive) {
+ TEST_DESCRIPTION("Test that an error is produced when CmdNextSubpass is called too many times in a renderpass instance");
+
+ // Check for VK_KHR_get_physical_device_properties2
+ if (InstanceExtensionSupported(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME)) {
+ m_instance_extension_names.push_back(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME);
+ }
+
+ ASSERT_NO_FATAL_FAILURE(InitFramework(myDbgFunc, m_errorMonitor));
+ PFN_vkCmdNextSubpass2KHR vkCmdNextSubpass2KHR = nullptr;
+ bool rp2Supported = CheckCreateRenderPass2Support(this, m_device_extension_names);
+ ASSERT_NO_FATAL_FAILURE(InitState());
+
+ if (rp2Supported) {
+ vkCmdNextSubpass2KHR = (PFN_vkCmdNextSubpass2KHR)vkGetDeviceProcAddr(m_device->device(), "vkCmdNextSubpass2KHR");
+ }
+
+ ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
+
+ m_commandBuffer->begin();
+ m_commandBuffer->BeginRenderPass(m_renderPassBeginInfo);
+
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdNextSubpass-None-00909");
+ vkCmdNextSubpass(m_commandBuffer->handle(), VK_SUBPASS_CONTENTS_INLINE);
+ m_errorMonitor->VerifyFound();
+
+ if (rp2Supported) {
+ VkSubpassBeginInfoKHR subpassBeginInfo = {VK_STRUCTURE_TYPE_SUBPASS_BEGIN_INFO_KHR, nullptr, VK_SUBPASS_CONTENTS_INLINE};
+ VkSubpassEndInfoKHR subpassEndInfo = {VK_STRUCTURE_TYPE_SUBPASS_END_INFO_KHR, nullptr};
+
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdNextSubpass2KHR-None-03102");
+
+ vkCmdNextSubpass2KHR(m_commandBuffer->handle(), &subpassBeginInfo, &subpassEndInfo);
+ m_errorMonitor->VerifyFound();
+ }
+
+ m_commandBuffer->EndRenderPass();
+ m_commandBuffer->end();
+}
+
+TEST_F(VkLayerTest, RenderPassEndBeforeFinalSubpass) {
+ TEST_DESCRIPTION("Test that an error is produced when CmdEndRenderPass is called before the final subpass has been reached");
+
+ // Check for VK_KHR_get_physical_device_properties2
+ if (InstanceExtensionSupported(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME)) {
+ m_instance_extension_names.push_back(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME);
+ }
+
+ ASSERT_NO_FATAL_FAILURE(InitFramework(myDbgFunc, m_errorMonitor));
+ PFN_vkCmdEndRenderPass2KHR vkCmdEndRenderPass2KHR = nullptr;
+ bool rp2Supported = CheckCreateRenderPass2Support(this, m_device_extension_names);
+ ASSERT_NO_FATAL_FAILURE(InitState(nullptr, nullptr, VK_COMMAND_POOL_CREATE_RESET_COMMAND_BUFFER_BIT));
+
+ if (rp2Supported) {
+ vkCmdEndRenderPass2KHR = (PFN_vkCmdEndRenderPass2KHR)vkGetDeviceProcAddr(m_device->device(), "vkCmdEndRenderPass2KHR");
+ }
+
+ VkSubpassDescription sd[2] = {{0, VK_PIPELINE_BIND_POINT_GRAPHICS, 0, nullptr, 0, nullptr, nullptr, nullptr, 0, nullptr},
+ {0, VK_PIPELINE_BIND_POINT_GRAPHICS, 0, nullptr, 0, nullptr, nullptr, nullptr, 0, nullptr}};
+
+ VkRenderPassCreateInfo rcpi = {VK_STRUCTURE_TYPE_RENDER_PASS_CREATE_INFO, nullptr, 0, 0, nullptr, 2, sd, 0, nullptr};
+
+ VkRenderPass rp;
+ VkResult err = vkCreateRenderPass(m_device->device(), &rcpi, nullptr, &rp);
+ ASSERT_VK_SUCCESS(err);
+
+ VkFramebufferCreateInfo fbci = {VK_STRUCTURE_TYPE_FRAMEBUFFER_CREATE_INFO, nullptr, 0, rp, 0, nullptr, 16, 16, 1};
+
+ VkFramebuffer fb;
+ err = vkCreateFramebuffer(m_device->device(), &fbci, nullptr, &fb);
+ ASSERT_VK_SUCCESS(err);
+
+ m_commandBuffer->begin();
+
+ VkRenderPassBeginInfo rpbi = {VK_STRUCTURE_TYPE_RENDER_PASS_BEGIN_INFO, nullptr, rp, fb, {{0, 0}, {16, 16}}, 0, nullptr};
+
+ vkCmdBeginRenderPass(m_commandBuffer->handle(), &rpbi, VK_SUBPASS_CONTENTS_INLINE);
+
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdEndRenderPass-None-00910");
+ vkCmdEndRenderPass(m_commandBuffer->handle());
+ m_errorMonitor->VerifyFound();
+
+ if (rp2Supported) {
+ VkSubpassEndInfoKHR subpassEndInfo = {VK_STRUCTURE_TYPE_SUBPASS_END_INFO_KHR, nullptr};
+
+ m_commandBuffer->reset();
+ m_commandBuffer->begin();
+ vkCmdBeginRenderPass(m_commandBuffer->handle(), &rpbi, VK_SUBPASS_CONTENTS_INLINE);
+
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdEndRenderPass2KHR-None-03103");
+ vkCmdEndRenderPass2KHR(m_commandBuffer->handle(), &subpassEndInfo);
+ m_errorMonitor->VerifyFound();
+ }
+
+ // Clean up.
+ vkDestroyFramebuffer(m_device->device(), fb, nullptr);
+ vkDestroyRenderPass(m_device->device(), rp, nullptr);
+}
+
+TEST_F(VkLayerTest, RenderPassDestroyWhileInUse) {
+ TEST_DESCRIPTION("Delete in-use renderPass.");
+
+ ASSERT_NO_FATAL_FAILURE(Init());
+ ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
+
+ // Create simple renderpass
+ VkAttachmentReference attach = {};
+ attach.layout = VK_IMAGE_LAYOUT_GENERAL;
+ VkSubpassDescription subpass = {};
+ subpass.colorAttachmentCount = 1;
+ subpass.pColorAttachments = &attach;
+ VkRenderPassCreateInfo rpci = {};
+ rpci.subpassCount = 1;
+ rpci.pSubpasses = &subpass;
+ rpci.attachmentCount = 1;
+ VkAttachmentDescription attach_desc = {};
+ attach_desc.format = VK_FORMAT_B8G8R8A8_UNORM;
+ attach_desc.samples = VK_SAMPLE_COUNT_1_BIT;
+ attach_desc.finalLayout = VK_IMAGE_LAYOUT_GENERAL;
+ rpci.pAttachments = &attach_desc;
+ rpci.sType = VK_STRUCTURE_TYPE_RENDER_PASS_CREATE_INFO;
+ VkRenderPass rp;
+ VkResult err = vkCreateRenderPass(m_device->device(), &rpci, NULL, &rp);
+ ASSERT_VK_SUCCESS(err);
+
+ m_errorMonitor->ExpectSuccess();
+
+ m_commandBuffer->begin();
+ VkRenderPassBeginInfo rpbi = {};
+ rpbi.sType = VK_STRUCTURE_TYPE_RENDER_PASS_BEGIN_INFO;
+ rpbi.framebuffer = m_framebuffer;
+ rpbi.renderPass = rp;
+ m_commandBuffer->BeginRenderPass(rpbi);
+ m_commandBuffer->EndRenderPass();
+ m_commandBuffer->end();
+
+ VkSubmitInfo submit_info = {};
+ submit_info.sType = VK_STRUCTURE_TYPE_SUBMIT_INFO;
+ submit_info.commandBufferCount = 1;
+ submit_info.pCommandBuffers = &m_commandBuffer->handle();
+ vkQueueSubmit(m_device->m_queue, 1, &submit_info, VK_NULL_HANDLE);
+ m_errorMonitor->VerifyNotFound();
+
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkDestroyRenderPass-renderPass-00873");
+ vkDestroyRenderPass(m_device->device(), rp, nullptr);
+ m_errorMonitor->VerifyFound();
+
+ // Wait for queue to complete so we can safely destroy rp
+ vkQueueWaitIdle(m_device->m_queue);
+ m_errorMonitor->SetUnexpectedError("If renderPass is not VK_NULL_HANDLE, renderPass must be a valid VkRenderPass handle");
+ m_errorMonitor->SetUnexpectedError("Was it created? Has it already been destroyed?");
+ vkDestroyRenderPass(m_device->device(), rp, nullptr);
+}
+
+TEST_F(VkPositiveLayerTest, RenderPassCreateAttachmentUsedTwiceOK) {
+ TEST_DESCRIPTION("Attachment is used simultaneously as color and input, with the same layout. This is OK.");
+
+ ASSERT_NO_FATAL_FAILURE(Init());
+
+ VkAttachmentDescription attach[] = {
+ {0, VK_FORMAT_R8G8B8A8_UNORM, VK_SAMPLE_COUNT_1_BIT, VK_ATTACHMENT_LOAD_OP_LOAD, VK_ATTACHMENT_STORE_OP_DONT_CARE,
+ VK_ATTACHMENT_LOAD_OP_DONT_CARE, VK_ATTACHMENT_STORE_OP_DONT_CARE, VK_IMAGE_LAYOUT_GENERAL, VK_IMAGE_LAYOUT_GENERAL},
+ };
+ VkAttachmentReference ref = {0, VK_IMAGE_LAYOUT_GENERAL};
+ VkSubpassDescription subpasses[] = {
+ {0, VK_PIPELINE_BIND_POINT_GRAPHICS, 1, &ref, 1, &ref, nullptr, nullptr, 0, nullptr},
+ };
+
+ VkRenderPassCreateInfo rpci = {VK_STRUCTURE_TYPE_RENDER_PASS_CREATE_INFO, nullptr, 0, 1, attach, 1, subpasses, 0, nullptr};
+ VkRenderPass rp;
+
+ m_errorMonitor->ExpectSuccess();
+ vkCreateRenderPass(m_device->device(), &rpci, nullptr, &rp);
+ m_errorMonitor->VerifyNotFound();
+ vkDestroyRenderPass(m_device->device(), rp, nullptr);
+}
+
+TEST_F(VkPositiveLayerTest, RenderPassCreateInitialLayoutUndefined) {
+ TEST_DESCRIPTION(
+ "Ensure that CmdBeginRenderPass with an attachment's initialLayout of VK_IMAGE_LAYOUT_UNDEFINED works when the command "
+ "buffer has prior knowledge of that attachment's layout.");
+
+ m_errorMonitor->ExpectSuccess();
+
+ ASSERT_NO_FATAL_FAILURE(Init());
+
+ // A renderpass with one color attachment.
+ VkAttachmentDescription attachment = {0,
+ VK_FORMAT_R8G8B8A8_UNORM,
+ VK_SAMPLE_COUNT_1_BIT,
+ VK_ATTACHMENT_LOAD_OP_DONT_CARE,
+ VK_ATTACHMENT_STORE_OP_STORE,
+ VK_ATTACHMENT_LOAD_OP_DONT_CARE,
+ VK_ATTACHMENT_STORE_OP_DONT_CARE,
+ VK_IMAGE_LAYOUT_UNDEFINED,
+ VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL};
+
+ VkAttachmentReference att_ref = {0, VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL};
+
+ VkSubpassDescription subpass = {0, VK_PIPELINE_BIND_POINT_GRAPHICS, 0, nullptr, 1, &att_ref, nullptr, nullptr, 0, nullptr};
+
+ VkRenderPassCreateInfo rpci = {VK_STRUCTURE_TYPE_RENDER_PASS_CREATE_INFO, nullptr, 0, 1, &attachment, 1, &subpass, 0, nullptr};
+
+ VkRenderPass rp;
+ VkResult err = vkCreateRenderPass(m_device->device(), &rpci, nullptr, &rp);
+ ASSERT_VK_SUCCESS(err);
+
+ // A compatible framebuffer.
+ VkImageObj image(m_device);
+ image.Init(32, 32, 1, VK_FORMAT_R8G8B8A8_UNORM, VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT, VK_IMAGE_TILING_OPTIMAL, 0);
+ ASSERT_TRUE(image.initialized());
+
+ VkImageViewCreateInfo ivci = {
+ VK_STRUCTURE_TYPE_IMAGE_VIEW_CREATE_INFO,
+ nullptr,
+ 0,
+ image.handle(),
+ VK_IMAGE_VIEW_TYPE_2D,
+ VK_FORMAT_R8G8B8A8_UNORM,
+ {VK_COMPONENT_SWIZZLE_IDENTITY, VK_COMPONENT_SWIZZLE_IDENTITY, VK_COMPONENT_SWIZZLE_IDENTITY,
+ VK_COMPONENT_SWIZZLE_IDENTITY},
+ {VK_IMAGE_ASPECT_COLOR_BIT, 0, 1, 0, 1},
+ };
+ VkImageView view;
+ err = vkCreateImageView(m_device->device(), &ivci, nullptr, &view);
+ ASSERT_VK_SUCCESS(err);
+
+ VkFramebufferCreateInfo fci = {VK_STRUCTURE_TYPE_FRAMEBUFFER_CREATE_INFO, nullptr, 0, rp, 1, &view, 32, 32, 1};
+ VkFramebuffer fb;
+ err = vkCreateFramebuffer(m_device->device(), &fci, nullptr, &fb);
+ ASSERT_VK_SUCCESS(err);
+
+ // Record a single command buffer which uses this renderpass twice. The
+ // bug is triggered at the beginning of the second renderpass, when the
+ // command buffer already has a layout recorded for the attachment.
+ VkRenderPassBeginInfo rpbi = {VK_STRUCTURE_TYPE_RENDER_PASS_BEGIN_INFO, nullptr, rp, fb, {{0, 0}, {32, 32}}, 0, nullptr};
+ m_commandBuffer->begin();
+ vkCmdBeginRenderPass(m_commandBuffer->handle(), &rpbi, VK_SUBPASS_CONTENTS_INLINE);
+ vkCmdEndRenderPass(m_commandBuffer->handle());
+ vkCmdBeginRenderPass(m_commandBuffer->handle(), &rpbi, VK_SUBPASS_CONTENTS_INLINE);
+
+ m_errorMonitor->VerifyNotFound();
+
+ vkCmdEndRenderPass(m_commandBuffer->handle());
+ m_commandBuffer->end();
+
+ vkDestroyFramebuffer(m_device->device(), fb, nullptr);
+ vkDestroyRenderPass(m_device->device(), rp, nullptr);
+ vkDestroyImageView(m_device->device(), view, nullptr);
+}
+
+TEST_F(VkPositiveLayerTest, RenderPassCreateAttachmentLayoutWithLoadOpThenReadOnly) {
+ TEST_DESCRIPTION(
+ "Positive test where we create a renderpass with an attachment that uses LOAD_OP_CLEAR, the first subpass has a valid "
+ "layout, and a second subpass then uses a valid *READ_ONLY* layout.");
+ m_errorMonitor->ExpectSuccess();
+ ASSERT_NO_FATAL_FAILURE(Init());
+ auto depth_format = FindSupportedDepthStencilFormat(gpu());
+ if (!depth_format) {
+ printf("%s No Depth + Stencil format found. Skipped.\n", kSkipPrefix);
+ return;
+ }
+
+ VkAttachmentReference attach[2] = {};
+ attach[0].attachment = 0;
+ attach[0].layout = VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL;
+ attach[1].attachment = 0;
+ attach[1].layout = VK_IMAGE_LAYOUT_DEPTH_STENCIL_READ_ONLY_OPTIMAL;
+ VkSubpassDescription subpasses[2] = {};
+ // First subpass clears DS attach on load
+ subpasses[0].pDepthStencilAttachment = &attach[0];
+ // 2nd subpass reads in DS as input attachment
+ subpasses[1].inputAttachmentCount = 1;
+ subpasses[1].pInputAttachments = &attach[1];
+ VkAttachmentDescription attach_desc = {};
+ attach_desc.format = depth_format;
+ attach_desc.samples = VK_SAMPLE_COUNT_1_BIT;
+ attach_desc.storeOp = VK_ATTACHMENT_STORE_OP_STORE;
+ attach_desc.stencilStoreOp = VK_ATTACHMENT_STORE_OP_DONT_CARE;
+ attach_desc.loadOp = VK_ATTACHMENT_LOAD_OP_CLEAR;
+ attach_desc.stencilLoadOp = VK_ATTACHMENT_LOAD_OP_CLEAR;
+ attach_desc.initialLayout = VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL;
+ attach_desc.finalLayout = VK_IMAGE_LAYOUT_DEPTH_STENCIL_READ_ONLY_OPTIMAL;
+ VkRenderPassCreateInfo rpci = {};
+ rpci.sType = VK_STRUCTURE_TYPE_RENDER_PASS_CREATE_INFO;
+ rpci.attachmentCount = 1;
+ rpci.pAttachments = &attach_desc;
+ rpci.subpassCount = 2;
+ rpci.pSubpasses = subpasses;
+
+ // Now create RenderPass and verify no errors
+ VkRenderPass rp;
+ vkCreateRenderPass(m_device->device(), &rpci, NULL, &rp);
+ m_errorMonitor->VerifyNotFound();
+
+ vkDestroyRenderPass(m_device->device(), rp, NULL);
+}
+
+TEST_F(VkPositiveLayerTest, RenderPassBeginSubpassZeroTransitionsApplied) {
+ TEST_DESCRIPTION("Ensure that CmdBeginRenderPass applies the layout transitions for the first subpass");
+
+ m_errorMonitor->ExpectSuccess();
+
+ ASSERT_NO_FATAL_FAILURE(Init());
+
+ // A renderpass with one color attachment.
+ VkAttachmentDescription attachment = {0,
+ VK_FORMAT_R8G8B8A8_UNORM,
+ VK_SAMPLE_COUNT_1_BIT,
+ VK_ATTACHMENT_LOAD_OP_DONT_CARE,
+ VK_ATTACHMENT_STORE_OP_STORE,
+ VK_ATTACHMENT_LOAD_OP_DONT_CARE,
+ VK_ATTACHMENT_STORE_OP_DONT_CARE,
+ VK_IMAGE_LAYOUT_UNDEFINED,
+ VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL};
+
+ VkAttachmentReference att_ref = {0, VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL};
+
+ VkSubpassDescription subpass = {0, VK_PIPELINE_BIND_POINT_GRAPHICS, 0, nullptr, 1, &att_ref, nullptr, nullptr, 0, nullptr};
+
+ VkSubpassDependency dep = {0,
+ 0,
+ VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT,
+ VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT,
+ VK_ACCESS_COLOR_ATTACHMENT_WRITE_BIT,
+ VK_ACCESS_COLOR_ATTACHMENT_WRITE_BIT,
+ VK_DEPENDENCY_BY_REGION_BIT};
+
+ VkRenderPassCreateInfo rpci = {VK_STRUCTURE_TYPE_RENDER_PASS_CREATE_INFO, nullptr, 0, 1, &attachment, 1, &subpass, 1, &dep};
+
+ VkResult err;
+ VkRenderPass rp;
+ err = vkCreateRenderPass(m_device->device(), &rpci, nullptr, &rp);
+ ASSERT_VK_SUCCESS(err);
+
+ // A compatible framebuffer.
+ VkImageObj image(m_device);
+ image.Init(32, 32, 1, VK_FORMAT_R8G8B8A8_UNORM, VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT, VK_IMAGE_TILING_OPTIMAL, 0);
+ ASSERT_TRUE(image.initialized());
+
+ VkImageView view = image.targetView(VK_FORMAT_R8G8B8A8_UNORM);
+
+ VkFramebufferCreateInfo fci = {VK_STRUCTURE_TYPE_FRAMEBUFFER_CREATE_INFO, nullptr, 0, rp, 1, &view, 32, 32, 1};
+ VkFramebuffer fb;
+ err = vkCreateFramebuffer(m_device->device(), &fci, nullptr, &fb);
+ ASSERT_VK_SUCCESS(err);
+
+ // Record a single command buffer which issues a pipeline barrier w/
+ // image memory barrier for the attachment. This detects the previously
+ // missing tracking of the subpass layout by throwing a validation error
+ // if it doesn't occur.
+ VkRenderPassBeginInfo rpbi = {VK_STRUCTURE_TYPE_RENDER_PASS_BEGIN_INFO, nullptr, rp, fb, {{0, 0}, {32, 32}}, 0, nullptr};
+ m_commandBuffer->begin();
+ vkCmdBeginRenderPass(m_commandBuffer->handle(), &rpbi, VK_SUBPASS_CONTENTS_INLINE);
+
+ VkImageMemoryBarrier imb = {VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER,
+ nullptr,
+ VK_ACCESS_COLOR_ATTACHMENT_WRITE_BIT,
+ VK_ACCESS_COLOR_ATTACHMENT_WRITE_BIT,
+ VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL,
+ VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL,
+ VK_QUEUE_FAMILY_IGNORED,
+ VK_QUEUE_FAMILY_IGNORED,
+ image.handle(),
+ {VK_IMAGE_ASPECT_COLOR_BIT, 0, 1, 0, 1}};
+ vkCmdPipelineBarrier(m_commandBuffer->handle(), VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT,
+ VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT, VK_DEPENDENCY_BY_REGION_BIT, 0, nullptr, 0, nullptr, 1,
+ &imb);
+
+ vkCmdEndRenderPass(m_commandBuffer->handle());
+ m_errorMonitor->VerifyNotFound();
+ m_commandBuffer->end();
+
+ vkDestroyFramebuffer(m_device->device(), fb, nullptr);
+ vkDestroyRenderPass(m_device->device(), rp, nullptr);
+}
+
+TEST_F(VkPositiveLayerTest, RenderPassBeginTransitionsAttachmentUnused) {
+ TEST_DESCRIPTION(
+ "Ensure that layout transitions work correctly without errors, when an attachment reference is VK_ATTACHMENT_UNUSED");
+
+ m_errorMonitor->ExpectSuccess();
+
+ ASSERT_NO_FATAL_FAILURE(Init());
+
+ // A renderpass with no attachments
+ VkAttachmentReference att_ref = {VK_ATTACHMENT_UNUSED, VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL};
+
+ VkSubpassDescription subpass = {0, VK_PIPELINE_BIND_POINT_GRAPHICS, 0, nullptr, 1, &att_ref, nullptr, nullptr, 0, nullptr};
+
+ VkRenderPassCreateInfo rpci = {VK_STRUCTURE_TYPE_RENDER_PASS_CREATE_INFO, nullptr, 0, 0, nullptr, 1, &subpass, 0, nullptr};
+
+ VkRenderPass rp;
+ VkResult err = vkCreateRenderPass(m_device->device(), &rpci, nullptr, &rp);
+ ASSERT_VK_SUCCESS(err);
+
+ // A compatible framebuffer.
+ VkFramebufferCreateInfo fci = {VK_STRUCTURE_TYPE_FRAMEBUFFER_CREATE_INFO, nullptr, 0, rp, 0, nullptr, 32, 32, 1};
+ VkFramebuffer fb;
+ err = vkCreateFramebuffer(m_device->device(), &fci, nullptr, &fb);
+ ASSERT_VK_SUCCESS(err);
+
+ // Record a command buffer which just begins and ends the renderpass. The
+ // bug manifests in BeginRenderPass.
+ VkRenderPassBeginInfo rpbi = {VK_STRUCTURE_TYPE_RENDER_PASS_BEGIN_INFO, nullptr, rp, fb, {{0, 0}, {32, 32}}, 0, nullptr};
+ m_commandBuffer->begin();
+ vkCmdBeginRenderPass(m_commandBuffer->handle(), &rpbi, VK_SUBPASS_CONTENTS_INLINE);
+ vkCmdEndRenderPass(m_commandBuffer->handle());
+ m_errorMonitor->VerifyNotFound();
+ m_commandBuffer->end();
+
+ vkDestroyFramebuffer(m_device->device(), fb, nullptr);
+ vkDestroyRenderPass(m_device->device(), rp, nullptr);
+}
+
+TEST_F(VkPositiveLayerTest, RenderPassBeginStencilLoadOp) {
+ TEST_DESCRIPTION("Create a stencil-only attachment with a LOAD_OP set to CLEAR. stencil[Load|Store]Op used to be ignored.");
+ VkResult result = VK_SUCCESS;
+ ASSERT_NO_FATAL_FAILURE(Init());
+ auto depth_format = FindSupportedDepthStencilFormat(gpu());
+ if (!depth_format) {
+ printf("%s No Depth + Stencil format found. Skipped.\n", kSkipPrefix);
+ return;
+ }
+ VkImageFormatProperties formatProps;
+ vkGetPhysicalDeviceImageFormatProperties(gpu(), depth_format, VK_IMAGE_TYPE_2D, VK_IMAGE_TILING_OPTIMAL,
+ VK_IMAGE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT | VK_IMAGE_USAGE_TRANSFER_SRC_BIT, 0,
+ &formatProps);
+ if (formatProps.maxExtent.width < 100 || formatProps.maxExtent.height < 100) {
+ printf("%s Image format max extent is too small.\n", kSkipPrefix);
+ return;
+ }
+
+ VkFormat depth_stencil_fmt = depth_format;
+ m_depthStencil->Init(m_device, 100, 100, depth_stencil_fmt,
+ VK_IMAGE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT | VK_IMAGE_USAGE_TRANSFER_SRC_BIT);
+ VkAttachmentDescription att = {};
+ VkAttachmentReference ref = {};
+ att.format = depth_stencil_fmt;
+ att.samples = VK_SAMPLE_COUNT_1_BIT;
+ att.loadOp = VK_ATTACHMENT_LOAD_OP_DONT_CARE;
+ att.storeOp = VK_ATTACHMENT_STORE_OP_DONT_CARE;
+ att.stencilLoadOp = VK_ATTACHMENT_LOAD_OP_CLEAR;
+ att.stencilStoreOp = VK_ATTACHMENT_STORE_OP_STORE;
+ att.initialLayout = VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL;
+ att.finalLayout = VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL;
+
+ VkClearValue clear;
+ clear.depthStencil.depth = 1.0;
+ clear.depthStencil.stencil = 0;
+ ref.attachment = 0;
+ ref.layout = VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL;
+
+ VkSubpassDescription subpass = {};
+ subpass.pipelineBindPoint = VK_PIPELINE_BIND_POINT_GRAPHICS;
+ subpass.flags = 0;
+ subpass.inputAttachmentCount = 0;
+ subpass.pInputAttachments = NULL;
+ subpass.colorAttachmentCount = 0;
+ subpass.pColorAttachments = NULL;
+ subpass.pResolveAttachments = NULL;
+ subpass.pDepthStencilAttachment = &ref;
+ subpass.preserveAttachmentCount = 0;
+ subpass.pPreserveAttachments = NULL;
+
+ VkRenderPass rp;
+ VkRenderPassCreateInfo rp_info = {};
+ rp_info.sType = VK_STRUCTURE_TYPE_RENDER_PASS_CREATE_INFO;
+ rp_info.attachmentCount = 1;
+ rp_info.pAttachments = &att;
+ rp_info.subpassCount = 1;
+ rp_info.pSubpasses = &subpass;
+ result = vkCreateRenderPass(device(), &rp_info, NULL, &rp);
+ ASSERT_VK_SUCCESS(result);
+
+ VkImageView *depthView = m_depthStencil->BindInfo();
+ VkFramebufferCreateInfo fb_info = {};
+ fb_info.sType = VK_STRUCTURE_TYPE_FRAMEBUFFER_CREATE_INFO;
+ fb_info.pNext = NULL;
+ fb_info.renderPass = rp;
+ fb_info.attachmentCount = 1;
+ fb_info.pAttachments = depthView;
+ fb_info.width = 100;
+ fb_info.height = 100;
+ fb_info.layers = 1;
+ VkFramebuffer fb;
+ result = vkCreateFramebuffer(device(), &fb_info, NULL, &fb);
+ ASSERT_VK_SUCCESS(result);
+
+ VkRenderPassBeginInfo rpbinfo = {};
+ rpbinfo.clearValueCount = 1;
+ rpbinfo.pClearValues = &clear;
+ rpbinfo.pNext = NULL;
+ rpbinfo.renderPass = rp;
+ rpbinfo.sType = VK_STRUCTURE_TYPE_RENDER_PASS_BEGIN_INFO;
+ rpbinfo.renderArea.extent.width = 100;
+ rpbinfo.renderArea.extent.height = 100;
+ rpbinfo.renderArea.offset.x = 0;
+ rpbinfo.renderArea.offset.y = 0;
+ rpbinfo.framebuffer = fb;
+
+ VkFenceObj fence;
+ fence.init(*m_device, VkFenceObj::create_info());
+ ASSERT_TRUE(fence.initialized());
+
+ m_commandBuffer->begin();
+ m_commandBuffer->BeginRenderPass(rpbinfo);
+ m_commandBuffer->EndRenderPass();
+ m_commandBuffer->end();
+ m_commandBuffer->QueueCommandBuffer(fence);
+
+ VkImageObj destImage(m_device);
+ destImage.Init(100, 100, 1, depth_stencil_fmt, VK_IMAGE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT | VK_IMAGE_USAGE_TRANSFER_DST_BIT,
+ VK_IMAGE_TILING_OPTIMAL, 0);
+ VkImageMemoryBarrier barrier = {};
+ VkImageSubresourceRange range;
+ barrier.sType = VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER;
+ barrier.srcAccessMask = VK_ACCESS_TRANSFER_WRITE_BIT | VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_WRITE_BIT;
+ barrier.dstAccessMask = VK_ACCESS_TRANSFER_READ_BIT | VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_READ_BIT;
+ barrier.oldLayout = VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL;
+ barrier.newLayout = VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL;
+ barrier.image = m_depthStencil->handle();
+ range.aspectMask = VK_IMAGE_ASPECT_DEPTH_BIT | VK_IMAGE_ASPECT_STENCIL_BIT;
+ range.baseMipLevel = 0;
+ range.levelCount = 1;
+ range.baseArrayLayer = 0;
+ range.layerCount = 1;
+ barrier.subresourceRange = range;
+ fence.wait(VK_TRUE, UINT64_MAX);
+ VkCommandBufferObj cmdbuf(m_device, m_commandPool);
+ cmdbuf.begin();
+ cmdbuf.PipelineBarrier(VK_PIPELINE_STAGE_ALL_COMMANDS_BIT, VK_PIPELINE_STAGE_ALL_COMMANDS_BIT, 0, 0, nullptr, 0, nullptr, 1,
+ &barrier);
+ barrier.srcAccessMask = 0;
+ barrier.oldLayout = VK_IMAGE_LAYOUT_UNDEFINED;
+ barrier.newLayout = VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL;
+ barrier.image = destImage.handle();
+ barrier.dstAccessMask = VK_ACCESS_TRANSFER_WRITE_BIT | VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_WRITE_BIT;
+ cmdbuf.PipelineBarrier(VK_PIPELINE_STAGE_ALL_COMMANDS_BIT, VK_PIPELINE_STAGE_ALL_COMMANDS_BIT, 0, 0, nullptr, 0, nullptr, 1,
+ &barrier);
+ VkImageCopy cregion;
+ cregion.srcSubresource.aspectMask = VK_IMAGE_ASPECT_DEPTH_BIT | VK_IMAGE_ASPECT_STENCIL_BIT;
+ cregion.srcSubresource.mipLevel = 0;
+ cregion.srcSubresource.baseArrayLayer = 0;
+ cregion.srcSubresource.layerCount = 1;
+ cregion.srcOffset.x = 0;
+ cregion.srcOffset.y = 0;
+ cregion.srcOffset.z = 0;
+ cregion.dstSubresource.aspectMask = VK_IMAGE_ASPECT_DEPTH_BIT | VK_IMAGE_ASPECT_STENCIL_BIT;
+ cregion.dstSubresource.mipLevel = 0;
+ cregion.dstSubresource.baseArrayLayer = 0;
+ cregion.dstSubresource.layerCount = 1;
+ cregion.dstOffset.x = 0;
+ cregion.dstOffset.y = 0;
+ cregion.dstOffset.z = 0;
+ cregion.extent.width = 100;
+ cregion.extent.height = 100;
+ cregion.extent.depth = 1;
+ cmdbuf.CopyImage(m_depthStencil->handle(), VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL, destImage.handle(),
+ VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, 1, &cregion);
+ cmdbuf.end();
+
+ VkSubmitInfo submit_info;
+ submit_info.sType = VK_STRUCTURE_TYPE_SUBMIT_INFO;
+ submit_info.pNext = NULL;
+ submit_info.waitSemaphoreCount = 0;
+ submit_info.pWaitSemaphores = NULL;
+ submit_info.pWaitDstStageMask = NULL;
+ submit_info.commandBufferCount = 1;
+ submit_info.pCommandBuffers = &cmdbuf.handle();
+ submit_info.signalSemaphoreCount = 0;
+ submit_info.pSignalSemaphores = NULL;
+
+ m_errorMonitor->ExpectSuccess();
+ vkQueueSubmit(m_device->m_queue, 1, &submit_info, VK_NULL_HANDLE);
+ m_errorMonitor->VerifyNotFound();
+
+ vkQueueWaitIdle(m_device->m_queue);
+ vkDestroyRenderPass(m_device->device(), rp, nullptr);
+ vkDestroyFramebuffer(m_device->device(), fb, nullptr);
+}
+
+TEST_F(VkPositiveLayerTest, RenderPassBeginInlineAndSecondaryCommandBuffers) {
+ m_errorMonitor->ExpectSuccess();
+
+ ASSERT_NO_FATAL_FAILURE(Init());
+ ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
+
+ m_commandBuffer->begin();
+
+ vkCmdBeginRenderPass(m_commandBuffer->handle(), &m_renderPassBeginInfo, VK_SUBPASS_CONTENTS_SECONDARY_COMMAND_BUFFERS);
+ vkCmdEndRenderPass(m_commandBuffer->handle());
+ m_errorMonitor->VerifyNotFound();
+ vkCmdBeginRenderPass(m_commandBuffer->handle(), &m_renderPassBeginInfo, VK_SUBPASS_CONTENTS_INLINE);
+ m_errorMonitor->VerifyNotFound();
+ vkCmdEndRenderPass(m_commandBuffer->handle());
+ m_errorMonitor->VerifyNotFound();
+
+ m_commandBuffer->end();
+ m_errorMonitor->VerifyNotFound();
+}
+
+TEST_F(VkPositiveLayerTest, RenderPassBeginDepthStencilLayoutTransitionFromUndefined) {
+ TEST_DESCRIPTION(
+ "Create a render pass with depth-stencil attachment where layout transition from UNDEFINED TO DS_READ_ONLY_OPTIMAL is set "
+ "by render pass and verify that transition has correctly occurred at queue submit time with no validation errors.");
+
+ ASSERT_NO_FATAL_FAILURE(Init());
+ auto depth_format = FindSupportedDepthStencilFormat(gpu());
+ if (!depth_format) {
+ printf("%s No Depth + Stencil format found. Skipped.\n", kSkipPrefix);
+ return;
+ }
+ VkImageFormatProperties format_props;
+ vkGetPhysicalDeviceImageFormatProperties(gpu(), depth_format, VK_IMAGE_TYPE_2D, VK_IMAGE_TILING_OPTIMAL,
+ VK_IMAGE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT, 0, &format_props);
+ if (format_props.maxExtent.width < 32 || format_props.maxExtent.height < 32) {
+ printf("%s Depth extent too small, RenderPassDepthStencilLayoutTransition skipped.\n", kSkipPrefix);
+ return;
+ }
+
+ m_errorMonitor->ExpectSuccess();
+ ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
+
+ // A renderpass with one depth/stencil attachment.
+ VkAttachmentDescription attachment = {0,
+ depth_format,
+ VK_SAMPLE_COUNT_1_BIT,
+ VK_ATTACHMENT_LOAD_OP_DONT_CARE,
+ VK_ATTACHMENT_STORE_OP_DONT_CARE,
+ VK_ATTACHMENT_LOAD_OP_DONT_CARE,
+ VK_ATTACHMENT_STORE_OP_DONT_CARE,
+ VK_IMAGE_LAYOUT_UNDEFINED,
+ VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL};
+
+ VkAttachmentReference att_ref = {0, VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL};
+
+ VkSubpassDescription subpass = {0, VK_PIPELINE_BIND_POINT_GRAPHICS, 0, nullptr, 0, nullptr, nullptr, &att_ref, 0, nullptr};
+
+ VkRenderPassCreateInfo rpci = {VK_STRUCTURE_TYPE_RENDER_PASS_CREATE_INFO, nullptr, 0, 1, &attachment, 1, &subpass, 0, nullptr};
+
+ VkRenderPass rp;
+ VkResult err = vkCreateRenderPass(m_device->device(), &rpci, nullptr, &rp);
+ ASSERT_VK_SUCCESS(err);
+ // A compatible ds image.
+ VkImageObj image(m_device);
+ image.Init(32, 32, 1, depth_format, VK_IMAGE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT, VK_IMAGE_TILING_OPTIMAL, 0);
+ ASSERT_TRUE(image.initialized());
+
+ VkImageViewCreateInfo ivci = {
+ VK_STRUCTURE_TYPE_IMAGE_VIEW_CREATE_INFO,
+ nullptr,
+ 0,
+ image.handle(),
+ VK_IMAGE_VIEW_TYPE_2D,
+ depth_format,
+ {VK_COMPONENT_SWIZZLE_IDENTITY, VK_COMPONENT_SWIZZLE_IDENTITY, VK_COMPONENT_SWIZZLE_IDENTITY,
+ VK_COMPONENT_SWIZZLE_IDENTITY},
+ {VK_IMAGE_ASPECT_DEPTH_BIT, 0, 1, 0, 1},
+ };
+ VkImageView view;
+ err = vkCreateImageView(m_device->device(), &ivci, nullptr, &view);
+ ASSERT_VK_SUCCESS(err);
+
+ VkFramebufferCreateInfo fci = {VK_STRUCTURE_TYPE_FRAMEBUFFER_CREATE_INFO, nullptr, 0, rp, 1, &view, 32, 32, 1};
+ VkFramebuffer fb;
+ err = vkCreateFramebuffer(m_device->device(), &fci, nullptr, &fb);
+ ASSERT_VK_SUCCESS(err);
+
+ VkRenderPassBeginInfo rpbi = {VK_STRUCTURE_TYPE_RENDER_PASS_BEGIN_INFO, nullptr, rp, fb, {{0, 0}, {32, 32}}, 0, nullptr};
+ m_commandBuffer->begin();
+ vkCmdBeginRenderPass(m_commandBuffer->handle(), &rpbi, VK_SUBPASS_CONTENTS_INLINE);
+ vkCmdEndRenderPass(m_commandBuffer->handle());
+ m_commandBuffer->end();
+ m_commandBuffer->QueueCommandBuffer(false);
+ m_errorMonitor->VerifyNotFound();
+
+ // Cleanup
+ vkDestroyImageView(m_device->device(), view, NULL);
+ vkDestroyRenderPass(m_device->device(), rp, NULL);
+ vkDestroyFramebuffer(m_device->device(), fb, NULL);
+}
+
+TEST_F(VkLayerTest, DisabledIndependentBlend) {
+ TEST_DESCRIPTION(
+ "Generate INDEPENDENT_BLEND by disabling independent blend and then specifying different blend states for two "
+ "attachments");
+ VkPhysicalDeviceFeatures features = {};
+ features.independentBlend = VK_FALSE;
+ ASSERT_NO_FATAL_FAILURE(Init(&features));
+
+ m_errorMonitor->SetDesiredFailureMsg(
+ VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ "Invalid Pipeline CreateInfo: If independent blend feature not enabled, all elements of pAttachments must be identical");
+
+ VkDescriptorSetObj descriptorSet(m_device);
+ descriptorSet.AppendDummy();
+ descriptorSet.CreateVKDescriptorSet(m_commandBuffer);
+
+ VkPipelineObj pipeline(m_device);
+ // Create a renderPass with two color attachments
+ VkAttachmentReference attachments[2] = {};
+ attachments[0].layout = VK_IMAGE_LAYOUT_GENERAL;
+ attachments[1].attachment = 1;
+ attachments[1].layout = VK_IMAGE_LAYOUT_GENERAL;
+
+ VkSubpassDescription subpass = {};
+ subpass.pColorAttachments = attachments;
+ subpass.colorAttachmentCount = 2;
+
+ VkRenderPassCreateInfo rpci = {};
+ rpci.subpassCount = 1;
+ rpci.pSubpasses = &subpass;
+ rpci.attachmentCount = 2;
+
+ VkAttachmentDescription attach_desc[2] = {};
+ attach_desc[0].format = VK_FORMAT_B8G8R8A8_UNORM;
+ attach_desc[0].samples = VK_SAMPLE_COUNT_1_BIT;
+ attach_desc[0].initialLayout = VK_IMAGE_LAYOUT_UNDEFINED;
+ attach_desc[0].finalLayout = VK_IMAGE_LAYOUT_GENERAL;
+ attach_desc[1].format = VK_FORMAT_B8G8R8A8_UNORM;
+ attach_desc[1].samples = VK_SAMPLE_COUNT_1_BIT;
+ attach_desc[1].initialLayout = VK_IMAGE_LAYOUT_UNDEFINED;
+ attach_desc[1].finalLayout = VK_IMAGE_LAYOUT_GENERAL;
+
+ rpci.pAttachments = attach_desc;
+ rpci.sType = VK_STRUCTURE_TYPE_RENDER_PASS_CREATE_INFO;
+
+ VkRenderPass renderpass;
+ vkCreateRenderPass(m_device->device(), &rpci, NULL, &renderpass);
+ VkShaderObj vs(m_device, bindStateVertShaderText, VK_SHADER_STAGE_VERTEX_BIT, this);
+ pipeline.AddShader(&vs);
+
+ VkPipelineColorBlendAttachmentState att_state1 = {}, att_state2 = {};
+ att_state1.dstAlphaBlendFactor = VK_BLEND_FACTOR_CONSTANT_COLOR;
+ att_state1.blendEnable = VK_TRUE;
+ att_state2.dstAlphaBlendFactor = VK_BLEND_FACTOR_CONSTANT_COLOR;
+ att_state2.blendEnable = VK_FALSE;
+ pipeline.AddColorAttachment(0, att_state1);
+ pipeline.AddColorAttachment(1, att_state2);
+ pipeline.CreateVKPipeline(descriptorSet.GetPipelineLayout(), renderpass);
+ m_errorMonitor->VerifyFound();
+ vkDestroyRenderPass(m_device->device(), renderpass, NULL);
+}
+
+// Is the Pipeline compatible with the expectations of the Renderpass/subpasses?
+TEST_F(VkLayerTest, PipelineRenderpassCompatibility) {
+ TEST_DESCRIPTION(
+ "Create a graphics pipeline that is incompatible with the requirements of its contained Renderpass/subpasses.");
+ ASSERT_NO_FATAL_FAILURE(Init());
+
+ VkDescriptorSetObj ds_obj(m_device);
+ ds_obj.AppendDummy();
+ ds_obj.CreateVKDescriptorSet(m_commandBuffer);
+
+ VkShaderObj vs_obj(m_device, bindStateVertShaderText, VK_SHADER_STAGE_VERTEX_BIT, this);
+
+ VkPipelineColorBlendAttachmentState att_state1 = {};
+ att_state1.dstAlphaBlendFactor = VK_BLEND_FACTOR_CONSTANT_COLOR;
+ att_state1.blendEnable = VK_TRUE;
+
+ VkRenderpassObj rp_obj(m_device);
+
+ {
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ "VUID-VkGraphicsPipelineCreateInfo-rasterizerDiscardEnable-00753");
+ VkPipelineObj pipeline(m_device);
+ pipeline.AddShader(&vs_obj);
+ pipeline.AddColorAttachment(0, att_state1);
+
+ VkGraphicsPipelineCreateInfo info = {};
+ pipeline.InitGraphicsPipelineCreateInfo(&info);
+ info.pColorBlendState = nullptr;
+
+ pipeline.CreateVKPipeline(ds_obj.GetPipelineLayout(), rp_obj.handle(), &info);
+ m_errorMonitor->VerifyFound();
+ }
+}
+
+TEST_F(VkLayerTest, FramebufferCreateErrors) {
+ TEST_DESCRIPTION(
+ "Hit errors when attempting to create a framebuffer :\n"
+ " 1. Mismatch between framebuffer & renderPass attachmentCount\n"
+ " 2. Use a color image as depthStencil attachment\n"
+ " 3. Mismatch framebuffer & renderPass attachment formats\n"
+ " 4. Mismatch framebuffer & renderPass attachment #samples\n"
+ " 5. Framebuffer attachment w/ non-1 mip-levels\n"
+ " 6. Framebuffer attachment where dimensions don't match\n"
+ " 7. Framebuffer attachment where dimensions don't match\n"
+ " 8. Framebuffer attachment w/o identity swizzle\n"
+ " 9. framebuffer dimensions exceed physical device limits\n");
+
+ ASSERT_NO_FATAL_FAILURE(Init());
+ ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
+
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkFramebufferCreateInfo-attachmentCount-00876");
+
+ // Create a renderPass with a single color attachment
+ VkAttachmentReference attach = {};
+ attach.layout = VK_IMAGE_LAYOUT_GENERAL;
+ VkSubpassDescription subpass = {};
+ subpass.pColorAttachments = &attach;
+ VkRenderPassCreateInfo rpci = {};
+ rpci.subpassCount = 1;
+ rpci.pSubpasses = &subpass;
+ rpci.attachmentCount = 1;
+ VkAttachmentDescription attach_desc = {};
+ attach_desc.format = VK_FORMAT_B8G8R8A8_UNORM;
+ attach_desc.samples = VK_SAMPLE_COUNT_1_BIT;
+ attach_desc.finalLayout = VK_IMAGE_LAYOUT_GENERAL;
+ rpci.pAttachments = &attach_desc;
+ rpci.sType = VK_STRUCTURE_TYPE_RENDER_PASS_CREATE_INFO;
+ VkRenderPass rp;
+ VkResult err = vkCreateRenderPass(m_device->device(), &rpci, NULL, &rp);
+ ASSERT_VK_SUCCESS(err);
+
+ VkImageView ivs[2];
+ ivs[0] = m_renderTargets[0]->targetView(VK_FORMAT_B8G8R8A8_UNORM);
+ ivs[1] = m_renderTargets[0]->targetView(VK_FORMAT_B8G8R8A8_UNORM);
+ VkFramebufferCreateInfo fb_info = {};
+ fb_info.sType = VK_STRUCTURE_TYPE_FRAMEBUFFER_CREATE_INFO;
+ fb_info.pNext = NULL;
+ fb_info.renderPass = rp;
+ // Set mis-matching attachmentCount
+ fb_info.attachmentCount = 2;
+ fb_info.pAttachments = ivs;
+ fb_info.width = 100;
+ fb_info.height = 100;
+ fb_info.layers = 1;
+
+ VkFramebuffer fb;
+ err = vkCreateFramebuffer(device(), &fb_info, NULL, &fb);
+
+ m_errorMonitor->VerifyFound();
+ if (err == VK_SUCCESS) {
+ vkDestroyFramebuffer(m_device->device(), fb, NULL);
+ }
+ vkDestroyRenderPass(m_device->device(), rp, NULL);
+
+ // Create a renderPass with a depth-stencil attachment created with
+ // IMAGE_USAGE_COLOR_ATTACHMENT
+ // Add our color attachment to pDepthStencilAttachment
+ subpass.pDepthStencilAttachment = &attach;
+ subpass.pColorAttachments = NULL;
+ VkRenderPass rp_ds;
+ err = vkCreateRenderPass(m_device->device(), &rpci, NULL, &rp_ds);
+ ASSERT_VK_SUCCESS(err);
+ // Set correct attachment count, but attachment has COLOR usage bit set
+ fb_info.attachmentCount = 1;
+ fb_info.renderPass = rp_ds;
+
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkFramebufferCreateInfo-pAttachments-02633");
+ err = vkCreateFramebuffer(device(), &fb_info, NULL, &fb);
+
+ m_errorMonitor->VerifyFound();
+ if (err == VK_SUCCESS) {
+ vkDestroyFramebuffer(m_device->device(), fb, NULL);
+ }
+ vkDestroyRenderPass(m_device->device(), rp_ds, NULL);
+
+ // Create new renderpass with alternate attachment format from fb
+ attach_desc.format = VK_FORMAT_R8G8B8A8_UNORM;
+ subpass.pDepthStencilAttachment = NULL;
+ subpass.pColorAttachments = &attach;
+ err = vkCreateRenderPass(m_device->device(), &rpci, NULL, &rp);
+ ASSERT_VK_SUCCESS(err);
+
+ // Cause error due to mis-matched formats between rp & fb
+ // rp attachment 0 now has RGBA8 but corresponding fb attach is BGRA8
+ fb_info.renderPass = rp;
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkFramebufferCreateInfo-pAttachments-00880");
+ err = vkCreateFramebuffer(device(), &fb_info, NULL, &fb);
+
+ m_errorMonitor->VerifyFound();
+ if (err == VK_SUCCESS) {
+ vkDestroyFramebuffer(m_device->device(), fb, NULL);
+ }
+ vkDestroyRenderPass(m_device->device(), rp, NULL);
+
+ // Create new renderpass with alternate sample count from fb
+ attach_desc.format = VK_FORMAT_B8G8R8A8_UNORM;
+ attach_desc.samples = VK_SAMPLE_COUNT_4_BIT;
+ err = vkCreateRenderPass(m_device->device(), &rpci, NULL, &rp);
+ ASSERT_VK_SUCCESS(err);
+
+ // Cause error due to mis-matched sample count between rp & fb
+ fb_info.renderPass = rp;
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkFramebufferCreateInfo-pAttachments-00881");
+ err = vkCreateFramebuffer(device(), &fb_info, NULL, &fb);
+
+ m_errorMonitor->VerifyFound();
+ if (err == VK_SUCCESS) {
+ vkDestroyFramebuffer(m_device->device(), fb, NULL);
+ }
+
+ vkDestroyRenderPass(m_device->device(), rp, NULL);
+
+ {
+ // Create an image with 2 mip levels.
+ VkImageObj image(m_device);
+ image.Init(128, 128, 2, VK_FORMAT_B8G8R8A8_UNORM, VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT, VK_IMAGE_TILING_OPTIMAL, 0);
+ ASSERT_TRUE(image.initialized());
+
+ // Create a image view with two mip levels.
+ VkImageView view;
+ VkImageViewCreateInfo ivci = {};
+ ivci.sType = VK_STRUCTURE_TYPE_IMAGE_VIEW_CREATE_INFO;
+ ivci.image = image.handle();
+ ivci.viewType = VK_IMAGE_VIEW_TYPE_2D;
+ ivci.format = VK_FORMAT_B8G8R8A8_UNORM;
+ ivci.subresourceRange.layerCount = 1;
+ ivci.subresourceRange.baseMipLevel = 0;
+ // Set level count to 2 (only 1 is allowed for FB attachment)
+ ivci.subresourceRange.levelCount = 2;
+ ivci.subresourceRange.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
+ err = vkCreateImageView(m_device->device(), &ivci, NULL, &view);
+ ASSERT_VK_SUCCESS(err);
+
+ // Re-create renderpass to have matching sample count
+ attach_desc.samples = VK_SAMPLE_COUNT_1_BIT;
+ err = vkCreateRenderPass(m_device->device(), &rpci, NULL, &rp);
+ ASSERT_VK_SUCCESS(err);
+
+ fb_info.renderPass = rp;
+ fb_info.pAttachments = &view;
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkFramebufferCreateInfo-pAttachments-00883");
+ err = vkCreateFramebuffer(device(), &fb_info, NULL, &fb);
+
+ m_errorMonitor->VerifyFound();
+ if (err == VK_SUCCESS) {
+ vkDestroyFramebuffer(m_device->device(), fb, NULL);
+ }
+ vkDestroyImageView(m_device->device(), view, NULL);
+ }
+
+ // Update view to original color buffer and grow FB dimensions too big
+ fb_info.pAttachments = ivs;
+ fb_info.height = 1024;
+ fb_info.width = 1024;
+ fb_info.layers = 2;
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkFramebufferCreateInfo-pAttachments-00882");
+ err = vkCreateFramebuffer(device(), &fb_info, NULL, &fb);
+
+ m_errorMonitor->VerifyFound();
+ if (err == VK_SUCCESS) {
+ vkDestroyFramebuffer(m_device->device(), fb, NULL);
+ }
+
+ {
+ // Create an image with one mip level.
+ VkImageObj image(m_device);
+ image.Init(128, 128, 1, VK_FORMAT_B8G8R8A8_UNORM, VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT, VK_IMAGE_TILING_OPTIMAL, 0);
+ ASSERT_TRUE(image.initialized());
+
+ // Create view attachment with non-identity swizzle
+ VkImageView view;
+ VkImageViewCreateInfo ivci = {};
+ ivci.sType = VK_STRUCTURE_TYPE_IMAGE_VIEW_CREATE_INFO;
+ ivci.image = image.handle();
+ ivci.viewType = VK_IMAGE_VIEW_TYPE_2D;
+ ivci.format = VK_FORMAT_B8G8R8A8_UNORM;
+ ivci.subresourceRange.layerCount = 1;
+ ivci.subresourceRange.baseMipLevel = 0;
+ ivci.subresourceRange.levelCount = 1;
+ ivci.subresourceRange.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
+ ivci.components.r = VK_COMPONENT_SWIZZLE_G;
+ ivci.components.g = VK_COMPONENT_SWIZZLE_R;
+ ivci.components.b = VK_COMPONENT_SWIZZLE_A;
+ ivci.components.a = VK_COMPONENT_SWIZZLE_B;
+ err = vkCreateImageView(m_device->device(), &ivci, NULL, &view);
+ ASSERT_VK_SUCCESS(err);
+
+ fb_info.pAttachments = &view;
+ fb_info.height = 100;
+ fb_info.width = 100;
+ fb_info.layers = 1;
+
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkFramebufferCreateInfo-pAttachments-00884");
+ err = vkCreateFramebuffer(device(), &fb_info, NULL, &fb);
+
+ m_errorMonitor->VerifyFound();
+ if (err == VK_SUCCESS) {
+ vkDestroyFramebuffer(m_device->device(), fb, NULL);
+ }
+ vkDestroyImageView(m_device->device(), view, NULL);
+ }
+
+ // reset attachment to color attachment
+ fb_info.pAttachments = ivs;
+
+ // Request fb that exceeds max width
+ fb_info.width = m_device->props.limits.maxFramebufferWidth + 1;
+ fb_info.height = 100;
+ fb_info.layers = 1;
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkFramebufferCreateInfo-width-00886");
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkFramebufferCreateInfo-pAttachments-00882");
+ err = vkCreateFramebuffer(device(), &fb_info, NULL, &fb);
+ m_errorMonitor->VerifyFound();
+ if (err == VK_SUCCESS) {
+ vkDestroyFramebuffer(m_device->device(), fb, NULL);
+ }
+ // and width=0
+ fb_info.width = 0;
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkFramebufferCreateInfo-width-00885");
+ err = vkCreateFramebuffer(device(), &fb_info, NULL, &fb);
+ m_errorMonitor->VerifyFound();
+ if (err == VK_SUCCESS) {
+ vkDestroyFramebuffer(m_device->device(), fb, NULL);
+ }
+
+ // Request fb that exceeds max height
+ fb_info.width = 100;
+ fb_info.height = m_device->props.limits.maxFramebufferHeight + 1;
+ fb_info.layers = 1;
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkFramebufferCreateInfo-height-00888");
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkFramebufferCreateInfo-pAttachments-00882");
+ err = vkCreateFramebuffer(device(), &fb_info, NULL, &fb);
+ m_errorMonitor->VerifyFound();
+ if (err == VK_SUCCESS) {
+ vkDestroyFramebuffer(m_device->device(), fb, NULL);
+ }
+ // and height=0
+ fb_info.height = 0;
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkFramebufferCreateInfo-height-00887");
+ err = vkCreateFramebuffer(device(), &fb_info, NULL, &fb);
+ m_errorMonitor->VerifyFound();
+ if (err == VK_SUCCESS) {
+ vkDestroyFramebuffer(m_device->device(), fb, NULL);
+ }
+
+ // Request fb that exceeds max layers
+ fb_info.width = 100;
+ fb_info.height = 100;
+ fb_info.layers = m_device->props.limits.maxFramebufferLayers + 1;
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkFramebufferCreateInfo-layers-00890");
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkFramebufferCreateInfo-pAttachments-00882");
+ err = vkCreateFramebuffer(device(), &fb_info, NULL, &fb);
+ m_errorMonitor->VerifyFound();
+ if (err == VK_SUCCESS) {
+ vkDestroyFramebuffer(m_device->device(), fb, NULL);
+ }
+ // and layers=0
+ fb_info.layers = 0;
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkFramebufferCreateInfo-layers-00889");
+ err = vkCreateFramebuffer(device(), &fb_info, NULL, &fb);
+ m_errorMonitor->VerifyFound();
+ if (err == VK_SUCCESS) {
+ vkDestroyFramebuffer(m_device->device(), fb, NULL);
+ }
+
+ vkDestroyRenderPass(m_device->device(), rp, NULL);
+}
+
+TEST_F(VkLayerTest, PointSizeFailure) {
+ TEST_DESCRIPTION("Create a pipeline using TOPOLOGY_POINT_LIST but do not set PointSize in vertex shader.");
+
+ ASSERT_NO_FATAL_FAILURE(Init());
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "Pipeline topology is set to POINT_LIST");
+
+ ASSERT_NO_FATAL_FAILURE(InitViewport());
+
+ // Create VS declaring PointSize but not writing to it
+ static const char NoPointSizeVertShader[] =
+ "#version 450\n"
+ "vec2 vertices[3];\n"
+ "out gl_PerVertex\n"
+ "{\n"
+ " vec4 gl_Position;\n"
+ " float gl_PointSize;\n"
+ "};\n"
+ "void main() {\n"
+ " vertices[0] = vec2(-1.0, -1.0);\n"
+ " vertices[1] = vec2( 1.0, -1.0);\n"
+ " vertices[2] = vec2( 0.0, 1.0);\n"
+ " gl_Position = vec4(vertices[gl_VertexIndex % 3], 0.0, 1.0);\n"
+ "}\n";
+
+ VkShaderObj vs(m_device, NoPointSizeVertShader, VK_SHADER_STAGE_VERTEX_BIT, this);
+ VkShaderObj ps(m_device, bindStateFragShaderText, VK_SHADER_STAGE_FRAGMENT_BIT, this);
+
+ VkPipelineObj pipelineobj(m_device);
+ pipelineobj.AddDefaultColorAttachment();
+ pipelineobj.AddShader(&vs);
+ pipelineobj.AddShader(&ps);
+
+ // Set Input Assembly to TOPOLOGY POINT LIST
+ VkPipelineInputAssemblyStateCreateInfo ia_state = {};
+ ia_state.sType = VK_STRUCTURE_TYPE_PIPELINE_INPUT_ASSEMBLY_STATE_CREATE_INFO;
+ ia_state.topology = VK_PRIMITIVE_TOPOLOGY_POINT_LIST;
+ pipelineobj.SetInputAssembly(&ia_state);
+
+ ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
+ m_commandBuffer->begin();
+ m_commandBuffer->ClearAllBuffers(m_renderTargets, m_clear_color, m_depthStencil, m_depth_clear_color, m_stencil_clear_color);
+ m_commandBuffer->PrepareAttachments(m_renderTargets, m_depthStencil);
+ VkDescriptorSetObj descriptorSet(m_device);
+ descriptorSet.CreateVKDescriptorSet(m_commandBuffer);
+ pipelineobj.CreateVKPipeline(descriptorSet.GetPipelineLayout(), renderPass());
+ m_errorMonitor->VerifyFound();
+}
+
+TEST_F(VkLayerTest, PointSizeGeomShaderFailure) {
+ TEST_DESCRIPTION(
+ "Create a pipeline using TOPOLOGY_POINT_LIST, set PointSize vertex shader, but not in the final geometry stage.");
+
+ ASSERT_NO_FATAL_FAILURE(Init());
+
+ if ((!m_device->phy().features().geometryShader) || (!m_device->phy().features().shaderTessellationAndGeometryPointSize)) {
+ printf("%s Device does not support the required geometry shader features; skipped.\n", kSkipPrefix);
+ return;
+ }
+
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "Pipeline topology is set to POINT_LIST");
+
+ ASSERT_NO_FATAL_FAILURE(InitViewport());
+
+ // Create VS declaring PointSize and writing to it
+ static const char PointSizeVertShader[] =
+ "#version 450\n"
+ "vec2 vertices[3];\n"
+ "out gl_PerVertex\n"
+ "{\n"
+ " vec4 gl_Position;\n"
+ " float gl_PointSize;\n"
+ "};\n"
+ "void main() {\n"
+ " vertices[0] = vec2(-1.0, -1.0);\n"
+ " vertices[1] = vec2( 1.0, -1.0);\n"
+ " vertices[2] = vec2( 0.0, 1.0);\n"
+ " gl_Position = vec4(vertices[gl_VertexIndex % 3], 0.0, 1.0);\n"
+ " gl_PointSize = 5.0;\n"
+ "}\n";
+ static char const *gsSource =
+ "#version 450\n"
+ "layout (points) in;\n"
+ "layout (points) out;\n"
+ "layout (max_vertices = 1) out;\n"
+ "void main() {\n"
+ " gl_Position = vec4(1.0, 0.5, 0.5, 0.0);\n"
+ " EmitVertex();\n"
+ "}\n";
+
+ VkShaderObj vs(m_device, PointSizeVertShader, VK_SHADER_STAGE_VERTEX_BIT, this);
+ VkShaderObj gs(m_device, gsSource, VK_SHADER_STAGE_GEOMETRY_BIT, this);
+ VkShaderObj ps(m_device, bindStateFragShaderText, VK_SHADER_STAGE_FRAGMENT_BIT, this);
+
+ VkPipelineObj pipelineobj(m_device);
+ pipelineobj.AddDefaultColorAttachment();
+ pipelineobj.AddShader(&vs);
+ pipelineobj.AddShader(&gs);
+ pipelineobj.AddShader(&ps);
+
+ // Set Input Assembly to TOPOLOGY POINT LIST
+ VkPipelineInputAssemblyStateCreateInfo ia_state = {};
+ ia_state.sType = VK_STRUCTURE_TYPE_PIPELINE_INPUT_ASSEMBLY_STATE_CREATE_INFO;
+ ia_state.topology = VK_PRIMITIVE_TOPOLOGY_POINT_LIST;
+ pipelineobj.SetInputAssembly(&ia_state);
+
+ ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
+ m_commandBuffer->begin();
+ m_commandBuffer->ClearAllBuffers(m_renderTargets, m_clear_color, m_depthStencil, m_depth_clear_color, m_stencil_clear_color);
+ m_commandBuffer->PrepareAttachments(m_renderTargets, m_depthStencil);
+ VkDescriptorSetObj descriptorSet(m_device);
+ descriptorSet.CreateVKDescriptorSet(m_commandBuffer);
+ pipelineobj.CreateVKPipeline(descriptorSet.GetPipelineLayout(), renderPass());
+ m_errorMonitor->VerifyFound();
+}
+
+TEST_F(VkLayerTest, DynamicDepthBiasNotBound) {
+ TEST_DESCRIPTION(
+ "Run a simple draw calls to validate failure when Depth Bias dynamic state is required but not correctly bound.");
+
+ ASSERT_NO_FATAL_FAILURE(Init());
+ // Dynamic depth bias
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "Dynamic depth bias state not set for this command buffer");
+ VKTriangleTest(BsoFailDepthBias);
+ m_errorMonitor->VerifyFound();
+}
+
+TEST_F(VkLayerTest, DynamicLineWidthNotBound) {
+ TEST_DESCRIPTION(
+ "Run a simple draw calls to validate failure when Line Width dynamic state is required but not correctly bound.");
+
+ ASSERT_NO_FATAL_FAILURE(Init());
+ // Dynamic line width
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "Dynamic line width state not set for this command buffer");
+ VKTriangleTest(BsoFailLineWidth);
+ m_errorMonitor->VerifyFound();
+}
+
+TEST_F(VkLayerTest, DynamicViewportNotBound) {
+ TEST_DESCRIPTION(
+ "Run a simple draw calls to validate failure when Viewport dynamic state is required but not correctly bound.");
+
+ ASSERT_NO_FATAL_FAILURE(Init());
+ // Dynamic viewport state
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ "Dynamic viewport(s) 0 are used by pipeline state object, but were not provided");
+ VKTriangleTest(BsoFailViewport);
+ m_errorMonitor->VerifyFound();
+}
+
+TEST_F(VkLayerTest, DynamicScissorNotBound) {
+ TEST_DESCRIPTION("Run a simple draw calls to validate failure when Scissor dynamic state is required but not correctly bound.");
+
+ ASSERT_NO_FATAL_FAILURE(Init());
+ // Dynamic scissor state
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ "Dynamic scissor(s) 0 are used by pipeline state object, but were not provided");
+ VKTriangleTest(BsoFailScissor);
+ m_errorMonitor->VerifyFound();
+}
+
+TEST_F(VkLayerTest, DynamicBlendConstantsNotBound) {
+ TEST_DESCRIPTION(
+ "Run a simple draw calls to validate failure when Blend Constants dynamic state is required but not correctly bound.");
+
+ ASSERT_NO_FATAL_FAILURE(Init());
+ // Dynamic blend constant state
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ "Dynamic blend constants state not set for this command buffer");
+ VKTriangleTest(BsoFailBlend);
+ m_errorMonitor->VerifyFound();
+}
+
+TEST_F(VkLayerTest, DynamicDepthBoundsNotBound) {
+ TEST_DESCRIPTION(
+ "Run a simple draw calls to validate failure when Depth Bounds dynamic state is required but not correctly bound.");
+
+ ASSERT_NO_FATAL_FAILURE(Init());
+ if (!m_device->phy().features().depthBounds) {
+ printf("%s Device does not support depthBounds test; skipped.\n", kSkipPrefix);
+ return;
+ }
+ // Dynamic depth bounds
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ "Dynamic depth bounds state not set for this command buffer");
+ VKTriangleTest(BsoFailDepthBounds);
+ m_errorMonitor->VerifyFound();
+}
+
+TEST_F(VkLayerTest, DynamicStencilReadNotBound) {
+ TEST_DESCRIPTION(
+ "Run a simple draw calls to validate failure when Stencil Read dynamic state is required but not correctly bound.");
+
+ ASSERT_NO_FATAL_FAILURE(Init());
+ // Dynamic stencil read mask
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ "Dynamic stencil read mask state not set for this command buffer");
+ VKTriangleTest(BsoFailStencilReadMask);
+ m_errorMonitor->VerifyFound();
+}
+
+TEST_F(VkLayerTest, DynamicStencilWriteNotBound) {
+ TEST_DESCRIPTION(
+ "Run a simple draw calls to validate failure when Stencil Write dynamic state is required but not correctly bound.");
+
+ ASSERT_NO_FATAL_FAILURE(Init());
+ // Dynamic stencil write mask
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ "Dynamic stencil write mask state not set for this command buffer");
+ VKTriangleTest(BsoFailStencilWriteMask);
+ m_errorMonitor->VerifyFound();
+}
+
+TEST_F(VkLayerTest, DynamicStencilRefNotBound) {
+ TEST_DESCRIPTION(
+ "Run a simple draw calls to validate failure when Stencil Ref dynamic state is required but not correctly bound.");
+
+ ASSERT_NO_FATAL_FAILURE(Init());
+ // Dynamic stencil reference
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ "Dynamic stencil reference state not set for this command buffer");
+ VKTriangleTest(BsoFailStencilReference);
+ m_errorMonitor->VerifyFound();
+}
+
+TEST_F(VkLayerTest, IndexBufferNotBound) {
+ TEST_DESCRIPTION("Run an indexed draw call without an index buffer bound.");
+
+ ASSERT_NO_FATAL_FAILURE(Init());
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ "Index buffer object not bound to this command buffer when Indexed ");
+ VKTriangleTest(BsoFailIndexBuffer);
+ m_errorMonitor->VerifyFound();
+}
+
+TEST_F(VkLayerTest, IndexBufferBadSize) {
+ TEST_DESCRIPTION("Run indexed draw call with bad index buffer size.");
+
+ ASSERT_NO_FATAL_FAILURE(Init(nullptr, nullptr, VK_COMMAND_POOL_CREATE_RESET_COMMAND_BUFFER_BIT));
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "vkCmdDrawIndexed() index size ");
+ VKTriangleTest(BsoFailIndexBufferBadSize);
+ m_errorMonitor->VerifyFound();
+}
+
+TEST_F(VkLayerTest, IndexBufferBadOffset) {
+ TEST_DESCRIPTION("Run indexed draw call with bad index buffer offset.");
+
+ ASSERT_NO_FATAL_FAILURE(Init(nullptr, nullptr, VK_COMMAND_POOL_CREATE_RESET_COMMAND_BUFFER_BIT));
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "vkCmdDrawIndexed() index size ");
+ VKTriangleTest(BsoFailIndexBufferBadOffset);
+ m_errorMonitor->VerifyFound();
+}
+
+TEST_F(VkLayerTest, IndexBufferBadBindSize) {
+ TEST_DESCRIPTION("Run bind index buffer with a size greater than the index buffer.");
+
+ ASSERT_NO_FATAL_FAILURE(Init(nullptr, nullptr, VK_COMMAND_POOL_CREATE_RESET_COMMAND_BUFFER_BIT));
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "vkCmdDrawIndexed() index size ");
+ VKTriangleTest(BsoFailIndexBufferBadMapSize);
+ m_errorMonitor->VerifyFound();
+}
+
+TEST_F(VkLayerTest, IndexBufferBadBindOffset) {
+ TEST_DESCRIPTION("Run bind index buffer with an offset greater than the size of the index buffer.");
+
+ ASSERT_NO_FATAL_FAILURE(Init(nullptr, nullptr, VK_COMMAND_POOL_CREATE_RESET_COMMAND_BUFFER_BIT));
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "vkCmdDrawIndexed() index size ");
+ VKTriangleTest(BsoFailIndexBufferBadMapOffset);
+ m_errorMonitor->VerifyFound();
+}
+
+TEST_F(VkLayerTest, CommandBufferTwoSubmits) {
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ "was begun w/ VK_COMMAND_BUFFER_USAGE_ONE_TIME_SUBMIT_BIT set, but has been submitted");
+
+ ASSERT_NO_FATAL_FAILURE(Init());
+ ASSERT_NO_FATAL_FAILURE(InitViewport());
+ ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
+
+ // We luck out b/c by default the framework creates CB w/ the
+ // VK_COMMAND_BUFFER_USAGE_ONE_TIME_SUBMIT_BIT set
+ m_commandBuffer->begin();
+ m_commandBuffer->ClearAllBuffers(m_renderTargets, m_clear_color, nullptr, m_depth_clear_color, m_stencil_clear_color);
+ m_commandBuffer->end();
+
+ // Bypass framework since it does the waits automatically
+ VkResult err = VK_SUCCESS;
+ VkSubmitInfo submit_info;
+ submit_info.sType = VK_STRUCTURE_TYPE_SUBMIT_INFO;
+ submit_info.pNext = NULL;
+ submit_info.waitSemaphoreCount = 0;
+ submit_info.pWaitSemaphores = NULL;
+ submit_info.pWaitDstStageMask = NULL;
+ submit_info.commandBufferCount = 1;
+ submit_info.pCommandBuffers = &m_commandBuffer->handle();
+ submit_info.signalSemaphoreCount = 0;
+ submit_info.pSignalSemaphores = NULL;
+
+ err = vkQueueSubmit(m_device->m_queue, 1, &submit_info, VK_NULL_HANDLE);
+ ASSERT_VK_SUCCESS(err);
+ vkQueueWaitIdle(m_device->m_queue);
+
+ // Cause validation error by re-submitting cmd buffer that should only be
+ // submitted once
+ err = vkQueueSubmit(m_device->m_queue, 1, &submit_info, VK_NULL_HANDLE);
+ vkQueueWaitIdle(m_device->m_queue);
+
+ m_errorMonitor->VerifyFound();
+}
+
+TEST_F(VkLayerTest, AllocDescriptorFromEmptyPool) {
+ TEST_DESCRIPTION("Attempt to allocate more sets and descriptors than descriptor pool has available.");
+ VkResult err;
+
+ ASSERT_NO_FATAL_FAILURE(Init());
+ ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
+
+ // This test is valid for Vulkan 1.0 only -- skip if device has an API version greater than 1.0.
+ if (m_device->props.apiVersion >= VK_API_VERSION_1_1) {
+ printf("%s Device has apiVersion greater than 1.0 -- skipping Descriptor Set checks.\n", kSkipPrefix);
+ return;
+ }
+
+ // Create Pool w/ 1 Sampler descriptor, but try to alloc Uniform Buffer
+ // descriptor from it
+ VkDescriptorPoolSize ds_type_count = {};
+ ds_type_count.type = VK_DESCRIPTOR_TYPE_SAMPLER;
+ ds_type_count.descriptorCount = 2;
+
+ VkDescriptorPoolCreateInfo ds_pool_ci = {};
+ ds_pool_ci.sType = VK_STRUCTURE_TYPE_DESCRIPTOR_POOL_CREATE_INFO;
+ ds_pool_ci.pNext = NULL;
+ ds_pool_ci.flags = 0;
+ ds_pool_ci.maxSets = 1;
+ ds_pool_ci.poolSizeCount = 1;
+ ds_pool_ci.pPoolSizes = &ds_type_count;
+
+ VkDescriptorPool ds_pool;
+ err = vkCreateDescriptorPool(m_device->device(), &ds_pool_ci, NULL, &ds_pool);
+ ASSERT_VK_SUCCESS(err);
+
+ VkDescriptorSetLayoutBinding dsl_binding_samp = {};
+ dsl_binding_samp.binding = 0;
+ dsl_binding_samp.descriptorType = VK_DESCRIPTOR_TYPE_SAMPLER;
+ dsl_binding_samp.descriptorCount = 1;
+ dsl_binding_samp.stageFlags = VK_SHADER_STAGE_ALL;
+ dsl_binding_samp.pImmutableSamplers = NULL;
+
+ const VkDescriptorSetLayoutObj ds_layout_samp(m_device, {dsl_binding_samp});
+
+ // Try to allocate 2 sets when pool only has 1 set
+ VkDescriptorSet descriptor_sets[2];
+ VkDescriptorSetLayout set_layouts[2] = {ds_layout_samp.handle(), ds_layout_samp.handle()};
+ VkDescriptorSetAllocateInfo alloc_info = {};
+ alloc_info.sType = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_ALLOCATE_INFO;
+ alloc_info.descriptorSetCount = 2;
+ alloc_info.descriptorPool = ds_pool;
+ alloc_info.pSetLayouts = set_layouts;
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ "VUID-VkDescriptorSetAllocateInfo-descriptorSetCount-00306");
+ err = vkAllocateDescriptorSets(m_device->device(), &alloc_info, descriptor_sets);
+ m_errorMonitor->VerifyFound();
+
+ alloc_info.descriptorSetCount = 1;
+ // Create layout w/ descriptor type not available in pool
+ VkDescriptorSetLayoutBinding dsl_binding = {};
+ dsl_binding.binding = 0;
+ dsl_binding.descriptorType = VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER;
+ dsl_binding.descriptorCount = 1;
+ dsl_binding.stageFlags = VK_SHADER_STAGE_ALL;
+ dsl_binding.pImmutableSamplers = NULL;
+
+ const VkDescriptorSetLayoutObj ds_layout_ub(m_device, {dsl_binding});
+
+ VkDescriptorSet descriptor_set;
+ alloc_info.descriptorSetCount = 1;
+ alloc_info.pSetLayouts = &ds_layout_ub.handle();
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkDescriptorSetAllocateInfo-descriptorPool-00307");
+ err = vkAllocateDescriptorSets(m_device->device(), &alloc_info, &descriptor_set);
+
+ m_errorMonitor->VerifyFound();
+
+ vkDestroyDescriptorPool(m_device->device(), ds_pool, NULL);
+}
+
+TEST_F(VkLayerTest, FreeDescriptorFromOneShotPool) {
+ VkResult err;
+
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkFreeDescriptorSets-descriptorPool-00312");
+
+ ASSERT_NO_FATAL_FAILURE(Init());
+ ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
+
+ VkDescriptorPoolSize ds_type_count = {};
+ ds_type_count.type = VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER;
+ ds_type_count.descriptorCount = 1;
+
+ VkDescriptorPoolCreateInfo ds_pool_ci = {};
+ ds_pool_ci.sType = VK_STRUCTURE_TYPE_DESCRIPTOR_POOL_CREATE_INFO;
+ ds_pool_ci.pNext = NULL;
+ ds_pool_ci.maxSets = 1;
+ ds_pool_ci.poolSizeCount = 1;
+ ds_pool_ci.flags = 0;
+ // Not specifying VK_DESCRIPTOR_POOL_CREATE_FREE_DESCRIPTOR_SET_BIT means
+ // app can only call vkResetDescriptorPool on this pool.;
+ ds_pool_ci.pPoolSizes = &ds_type_count;
+
+ VkDescriptorPool ds_pool;
+ err = vkCreateDescriptorPool(m_device->device(), &ds_pool_ci, NULL, &ds_pool);
+ ASSERT_VK_SUCCESS(err);
+
+ VkDescriptorSetLayoutBinding dsl_binding = {};
+ dsl_binding.binding = 0;
+ dsl_binding.descriptorType = VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER;
+ dsl_binding.descriptorCount = 1;
+ dsl_binding.stageFlags = VK_SHADER_STAGE_ALL;
+ dsl_binding.pImmutableSamplers = NULL;
+
+ const VkDescriptorSetLayoutObj ds_layout(m_device, {dsl_binding});
+
+ VkDescriptorSet descriptorSet;
+ VkDescriptorSetAllocateInfo alloc_info = {};
+ alloc_info.sType = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_ALLOCATE_INFO;
+ alloc_info.descriptorSetCount = 1;
+ alloc_info.descriptorPool = ds_pool;
+ alloc_info.pSetLayouts = &ds_layout.handle();
+ err = vkAllocateDescriptorSets(m_device->device(), &alloc_info, &descriptorSet);
+ ASSERT_VK_SUCCESS(err);
+
+ err = vkFreeDescriptorSets(m_device->device(), ds_pool, 1, &descriptorSet);
+ m_errorMonitor->VerifyFound();
+
+ vkDestroyDescriptorPool(m_device->device(), ds_pool, NULL);
+}
+
+TEST_F(VkLayerTest, InvalidDescriptorPool) {
+ // Attempt to clear Descriptor Pool with bad object.
+ // ObjectTracker should catch this.
+
+ ASSERT_NO_FATAL_FAILURE(Init());
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkResetDescriptorPool-descriptorPool-parameter");
+ uint64_t fake_pool_handle = 0xbaad6001;
+ VkDescriptorPool bad_pool = reinterpret_cast<VkDescriptorPool &>(fake_pool_handle);
+ vkResetDescriptorPool(device(), bad_pool, 0);
+ m_errorMonitor->VerifyFound();
+}
+
+TEST_F(VkLayerTest, InvalidDescriptorSet) {
+ // Attempt to bind an invalid Descriptor Set to a valid Command Buffer
+ // ObjectTracker should catch this.
+ // Create a valid cmd buffer
+ // call vkCmdBindDescriptorSets w/ false Descriptor Set
+
+ uint64_t fake_set_handle = 0xbaad6001;
+ VkDescriptorSet bad_set = reinterpret_cast<VkDescriptorSet &>(fake_set_handle);
+
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdBindDescriptorSets-pDescriptorSets-parameter");
+
+ ASSERT_NO_FATAL_FAILURE(Init());
+
+ VkDescriptorSetLayoutBinding layout_binding = {};
+ layout_binding.binding = 0;
+ layout_binding.descriptorType = VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER;
+ layout_binding.descriptorCount = 1;
+ layout_binding.stageFlags = VK_SHADER_STAGE_VERTEX_BIT;
+ layout_binding.pImmutableSamplers = NULL;
+
+ const VkDescriptorSetLayoutObj descriptor_set_layout(m_device, {layout_binding});
+
+ const VkPipelineLayoutObj pipeline_layout(DeviceObj(), {&descriptor_set_layout});
+
+ m_commandBuffer->begin();
+ vkCmdBindDescriptorSets(m_commandBuffer->handle(), VK_PIPELINE_BIND_POINT_GRAPHICS, pipeline_layout.handle(), 0, 1, &bad_set, 0,
+ NULL);
+ m_errorMonitor->VerifyFound();
+ m_commandBuffer->end();
+}
+
+TEST_F(VkLayerTest, InvalidDescriptorSetLayout) {
+ // Attempt to create a Pipeline Layout with an invalid Descriptor Set Layout.
+ // ObjectTracker should catch this.
+ uint64_t fake_layout_handle = 0xbaad6001;
+ VkDescriptorSetLayout bad_layout = reinterpret_cast<VkDescriptorSetLayout &>(fake_layout_handle);
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkPipelineLayoutCreateInfo-pSetLayouts-parameter");
+ ASSERT_NO_FATAL_FAILURE(Init());
+ VkPipelineLayout pipeline_layout;
+ VkPipelineLayoutCreateInfo plci = {};
+ plci.sType = VK_STRUCTURE_TYPE_PIPELINE_LAYOUT_CREATE_INFO;
+ plci.pNext = NULL;
+ plci.setLayoutCount = 1;
+ plci.pSetLayouts = &bad_layout;
+ vkCreatePipelineLayout(device(), &plci, NULL, &pipeline_layout);
+
+ m_errorMonitor->VerifyFound();
+}
+
+TEST_F(VkLayerTest, WriteDescriptorSetIntegrityCheck) {
+ TEST_DESCRIPTION(
+ "This test verifies some requirements of chapter 13.2.3 of the Vulkan Spec "
+ "1) A uniform buffer update must have a valid buffer index. "
+ "2) When using an array of descriptors in a single WriteDescriptor, the descriptor types and stageflags "
+ "must all be the same. "
+ "3) Immutable Sampler state must match across descriptors. "
+ "4) That sampled image descriptors have required layouts. ");
+
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkWriteDescriptorSet-descriptorType-00324");
+
+ ASSERT_NO_FATAL_FAILURE(Init());
+
+ VkSamplerCreateInfo sampler_ci = SafeSaneSamplerCreateInfo();
+ VkSampler sampler;
+ VkResult err = vkCreateSampler(m_device->device(), &sampler_ci, NULL, &sampler);
+ ASSERT_VK_SUCCESS(err);
+
+ OneOffDescriptorSet::Bindings bindings = {
+ {0, VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER, 1, VK_SHADER_STAGE_ALL, NULL},
+ {1, VK_DESCRIPTOR_TYPE_SAMPLER, 1, VK_SHADER_STAGE_FRAGMENT_BIT, NULL},
+ {2, VK_DESCRIPTOR_TYPE_SAMPLER, 1, VK_SHADER_STAGE_FRAGMENT_BIT, static_cast<VkSampler *>(&sampler)},
+ {3, VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE, 1, VK_SHADER_STAGE_FRAGMENT_BIT, NULL}};
+ OneOffDescriptorSet descriptor_set(m_device, bindings);
+ ASSERT_TRUE(descriptor_set.Initialized());
+
+ VkWriteDescriptorSet descriptor_write = {};
+ descriptor_write.sType = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET;
+ descriptor_write.dstSet = descriptor_set.set_;
+ descriptor_write.dstBinding = 0;
+ descriptor_write.descriptorCount = 1;
+ descriptor_write.descriptorType = VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER;
+
+ // 1) The uniform buffer is intentionally invalid here
+ vkUpdateDescriptorSets(m_device->device(), 1, &descriptor_write, 0, NULL);
+ m_errorMonitor->VerifyFound();
+
+ // Create a buffer to update the descriptor with
+ uint32_t qfi = 0;
+ VkBufferCreateInfo buffCI = {};
+ buffCI.sType = VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO;
+ buffCI.size = 1024;
+ buffCI.usage = VK_BUFFER_USAGE_UNIFORM_BUFFER_BIT;
+ buffCI.queueFamilyIndexCount = 1;
+ buffCI.pQueueFamilyIndices = &qfi;
+
+ VkBuffer dyub;
+ err = vkCreateBuffer(m_device->device(), &buffCI, NULL, &dyub);
+ ASSERT_VK_SUCCESS(err);
+
+ VkDeviceMemory mem;
+ VkMemoryRequirements mem_reqs;
+ vkGetBufferMemoryRequirements(m_device->device(), dyub, &mem_reqs);
+
+ VkMemoryAllocateInfo mem_alloc_info = {};
+ mem_alloc_info.sType = VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO;
+ mem_alloc_info.allocationSize = mem_reqs.size;
+ m_device->phy().set_memory_type(mem_reqs.memoryTypeBits, &mem_alloc_info, VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT);
+ err = vkAllocateMemory(m_device->device(), &mem_alloc_info, NULL, &mem);
+ ASSERT_VK_SUCCESS(err);
+
+ err = vkBindBufferMemory(m_device->device(), dyub, mem, 0);
+ ASSERT_VK_SUCCESS(err);
+
+ VkDescriptorBufferInfo buffInfo[2] = {};
+ buffInfo[0].buffer = dyub;
+ buffInfo[0].offset = 0;
+ buffInfo[0].range = 1024;
+ buffInfo[1].buffer = dyub;
+ buffInfo[1].offset = 0;
+ buffInfo[1].range = 1024;
+ descriptor_write.pBufferInfo = buffInfo;
+ descriptor_write.descriptorCount = 2;
+
+ // 2) The stateFlags don't match between the first and second descriptor
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkWriteDescriptorSet-dstArrayElement-00321");
+ vkUpdateDescriptorSets(m_device->device(), 1, &descriptor_write, 0, NULL);
+ m_errorMonitor->VerifyFound();
+
+ // 3) The second descriptor has a null_ptr pImmutableSamplers and
+ // the third descriptor contains an immutable sampler
+ descriptor_write.dstBinding = 1;
+ descriptor_write.descriptorType = VK_DESCRIPTOR_TYPE_SAMPLER;
+
+ // Make pImageInfo index non-null to avoid complaints of it missing
+ VkDescriptorImageInfo imageInfo = {};
+ imageInfo.imageLayout = VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL;
+ descriptor_write.pImageInfo = &imageInfo;
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkWriteDescriptorSet-dstArrayElement-00321");
+ vkUpdateDescriptorSets(m_device->device(), 1, &descriptor_write, 0, NULL);
+ m_errorMonitor->VerifyFound();
+
+ // 4) That sampled image descriptors have required layouts
+ // Create images to update the descriptor with
+ VkImageObj image(m_device);
+ const VkFormat tex_format = VK_FORMAT_B8G8R8A8_UNORM;
+ image.Init(32, 32, 1, tex_format, VK_IMAGE_USAGE_SAMPLED_BIT, VK_IMAGE_TILING_OPTIMAL, 0);
+ ASSERT_TRUE(image.initialized());
+
+ // Attmept write with incorrect layout for sampled descriptor
+ imageInfo.sampler = VK_NULL_HANDLE;
+ imageInfo.imageView = image.targetView(tex_format);
+ imageInfo.imageLayout = VK_IMAGE_LAYOUT_UNDEFINED;
+
+ descriptor_write.dstBinding = 3;
+ descriptor_write.descriptorCount = 1;
+ descriptor_write.descriptorType = VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE;
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkWriteDescriptorSet-descriptorType-01403");
+ vkUpdateDescriptorSets(m_device->device(), 1, &descriptor_write, 0, NULL);
+ m_errorMonitor->VerifyFound();
+
+ vkDestroyBuffer(m_device->device(), dyub, NULL);
+ vkFreeMemory(m_device->device(), mem, NULL);
+ vkDestroySampler(m_device->device(), sampler, NULL);
+}
+
+TEST_F(VkLayerTest, WriteDescriptorSetConsecutiveUpdates) {
+ TEST_DESCRIPTION(
+ "Verifies that updates rolling over to next descriptor work correctly by destroying buffer from consecutive update known "
+ "to be used in descriptor set and verifying that error is flagged.");
+
+ ASSERT_NO_FATAL_FAILURE(Init());
+ ASSERT_NO_FATAL_FAILURE(InitViewport());
+ ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
+
+ OneOffDescriptorSet ds(m_device, {
+ {0, VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER, 2, VK_SHADER_STAGE_ALL, nullptr},
+ {1, VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER, 1, VK_SHADER_STAGE_ALL, nullptr},
+ });
+
+ const VkPipelineLayoutObj pipeline_layout(m_device, {&ds.layout_});
+
+ uint32_t qfi = 0;
+ VkBufferCreateInfo bci = {};
+ bci.sType = VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO;
+ bci.usage = VK_BUFFER_USAGE_UNIFORM_BUFFER_BIT;
+ bci.size = 2048;
+ bci.queueFamilyIndexCount = 1;
+ bci.pQueueFamilyIndices = &qfi;
+ VkBufferObj buffer0;
+ buffer0.init(*m_device, bci);
+ VkPipelineObj pipe(m_device);
+ { // Scope 2nd buffer to cause early destruction
+ VkBufferObj buffer1;
+ bci.size = 1024;
+ buffer1.init(*m_device, bci);
+
+ VkDescriptorBufferInfo buffer_info[3] = {};
+ buffer_info[0].buffer = buffer0.handle();
+ buffer_info[0].offset = 0;
+ buffer_info[0].range = 1024;
+ buffer_info[1].buffer = buffer0.handle();
+ buffer_info[1].offset = 1024;
+ buffer_info[1].range = 1024;
+ buffer_info[2].buffer = buffer1.handle();
+ buffer_info[2].offset = 0;
+ buffer_info[2].range = 1024;
+
+ VkWriteDescriptorSet descriptor_write = {};
+ descriptor_write.sType = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET;
+ descriptor_write.dstSet = ds.set_; // descriptor_set;
+ descriptor_write.dstBinding = 0;
+ descriptor_write.descriptorCount = 3;
+ descriptor_write.descriptorType = VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER;
+ descriptor_write.pBufferInfo = buffer_info;
+
+ // Update descriptor
+ vkUpdateDescriptorSets(m_device->device(), 1, &descriptor_write, 0, NULL);
+
+ // Create PSO that uses the uniform buffers
+ char const *vsSource =
+ "#version 450\n"
+ "\n"
+ "void main(){\n"
+ " gl_Position = vec4(1);\n"
+ "}\n";
+ char const *fsSource =
+ "#version 450\n"
+ "\n"
+ "layout(location=0) out vec4 x;\n"
+ "layout(set=0) layout(binding=0) uniform foo { int x; int y; } bar;\n"
+ "layout(set=0) layout(binding=1) uniform blah { int x; } duh;\n"
+ "void main(){\n"
+ " x = vec4(duh.x, bar.y, bar.x, 1);\n"
+ "}\n";
+ VkShaderObj vs(m_device, vsSource, VK_SHADER_STAGE_VERTEX_BIT, this);
+ VkShaderObj fs(m_device, fsSource, VK_SHADER_STAGE_FRAGMENT_BIT, this);
+
+ pipe.AddShader(&vs);
+ pipe.AddShader(&fs);
+ pipe.AddDefaultColorAttachment();
+
+ VkResult err = pipe.CreateVKPipeline(pipeline_layout.handle(), renderPass());
+ ASSERT_VK_SUCCESS(err);
+
+ m_commandBuffer->begin();
+ m_commandBuffer->BeginRenderPass(m_renderPassBeginInfo);
+
+ vkCmdBindPipeline(m_commandBuffer->handle(), VK_PIPELINE_BIND_POINT_GRAPHICS, pipe.handle());
+ vkCmdBindDescriptorSets(m_commandBuffer->handle(), VK_PIPELINE_BIND_POINT_GRAPHICS, pipeline_layout.handle(), 0, 1,
+ &ds.set_, 0, nullptr);
+
+ VkViewport viewport = {0, 0, 16, 16, 0, 1};
+ vkCmdSetViewport(m_commandBuffer->handle(), 0, 1, &viewport);
+ VkRect2D scissor = {{0, 0}, {16, 16}};
+ vkCmdSetScissor(m_commandBuffer->handle(), 0, 1, &scissor);
+ vkCmdDraw(m_commandBuffer->handle(), 3, 1, 0, 0);
+ vkCmdEndRenderPass(m_commandBuffer->handle());
+ m_commandBuffer->end();
+ }
+ // buffer2 just went out of scope and was destroyed along with its memory
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, " that is invalid because bound Buffer ");
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, " that is invalid because bound DeviceMemory ");
+ VkSubmitInfo submit_info = {};
+ submit_info.sType = VK_STRUCTURE_TYPE_SUBMIT_INFO;
+ submit_info.commandBufferCount = 1;
+ submit_info.pCommandBuffers = &m_commandBuffer->handle();
+ vkQueueSubmit(m_device->m_queue, 1, &submit_info, VK_NULL_HANDLE);
+ m_errorMonitor->VerifyFound();
+}
+
+TEST_F(VkLayerTest, CreatePipelineLayoutExceedsSetLimit) {
+ TEST_DESCRIPTION("Attempt to create a pipeline layout using more than the physical limit of SetLayouts.");
+
+ ASSERT_NO_FATAL_FAILURE(Init());
+
+ VkDescriptorSetLayoutBinding layout_binding = {};
+ layout_binding.binding = 0;
+ layout_binding.descriptorType = VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER;
+ layout_binding.descriptorCount = 1;
+ layout_binding.stageFlags = VK_SHADER_STAGE_VERTEX_BIT;
+ layout_binding.pImmutableSamplers = NULL;
+
+ VkDescriptorSetLayoutCreateInfo ds_layout_ci = {};
+ ds_layout_ci.sType = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_CREATE_INFO;
+ ds_layout_ci.bindingCount = 1;
+ ds_layout_ci.pBindings = &layout_binding;
+ VkDescriptorSetLayout ds_layout = {};
+ VkResult err = vkCreateDescriptorSetLayout(m_device->device(), &ds_layout_ci, NULL, &ds_layout);
+ ASSERT_VK_SUCCESS(err);
+
+ // Create an array of DSLs, one larger than the physical limit
+ const auto excess_layouts = 1 + m_device->phy().properties().limits.maxBoundDescriptorSets;
+ std::vector<VkDescriptorSetLayout> dsl_array(excess_layouts, ds_layout);
+
+ VkPipelineLayoutCreateInfo pipeline_layout_ci = {};
+ pipeline_layout_ci.sType = VK_STRUCTURE_TYPE_PIPELINE_LAYOUT_CREATE_INFO;
+ pipeline_layout_ci.pNext = NULL;
+ pipeline_layout_ci.setLayoutCount = excess_layouts;
+ pipeline_layout_ci.pSetLayouts = dsl_array.data();
+
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkPipelineLayoutCreateInfo-setLayoutCount-00286");
+ VkPipelineLayout pipeline_layout = VK_NULL_HANDLE;
+ err = vkCreatePipelineLayout(m_device->device(), &pipeline_layout_ci, NULL, &pipeline_layout);
+ m_errorMonitor->VerifyFound();
+
+ // Clean up
+ vkDestroyDescriptorSetLayout(m_device->device(), ds_layout, NULL);
+}
+
+TEST_F(VkLayerTest, CreatePipelineLayoutExcessPerStageDescriptors) {
+ TEST_DESCRIPTION("Attempt to create a pipeline layout where total descriptors exceed per-stage limits");
+
+ ASSERT_NO_FATAL_FAILURE(Init());
+
+ uint32_t max_uniform_buffers = m_device->phy().properties().limits.maxPerStageDescriptorUniformBuffers;
+ uint32_t max_storage_buffers = m_device->phy().properties().limits.maxPerStageDescriptorStorageBuffers;
+ uint32_t max_sampled_images = m_device->phy().properties().limits.maxPerStageDescriptorSampledImages;
+ uint32_t max_storage_images = m_device->phy().properties().limits.maxPerStageDescriptorStorageImages;
+ uint32_t max_samplers = m_device->phy().properties().limits.maxPerStageDescriptorSamplers;
+ uint32_t max_combined = std::min(max_samplers, max_sampled_images);
+ uint32_t max_input_attachments = m_device->phy().properties().limits.maxPerStageDescriptorInputAttachments;
+
+ uint32_t sum_dyn_uniform_buffers = m_device->phy().properties().limits.maxDescriptorSetUniformBuffersDynamic;
+ uint32_t sum_uniform_buffers = m_device->phy().properties().limits.maxDescriptorSetUniformBuffers;
+ uint32_t sum_dyn_storage_buffers = m_device->phy().properties().limits.maxDescriptorSetStorageBuffersDynamic;
+ uint32_t sum_storage_buffers = m_device->phy().properties().limits.maxDescriptorSetStorageBuffers;
+ uint32_t sum_sampled_images = m_device->phy().properties().limits.maxDescriptorSetSampledImages;
+ uint32_t sum_storage_images = m_device->phy().properties().limits.maxDescriptorSetStorageImages;
+ uint32_t sum_samplers = m_device->phy().properties().limits.maxDescriptorSetSamplers;
+ uint32_t sum_input_attachments = m_device->phy().properties().limits.maxDescriptorSetInputAttachments;
+
+ // Devices that report UINT32_MAX for any of these limits can't run this test
+ if (UINT32_MAX == std::max({max_uniform_buffers, max_storage_buffers, max_sampled_images, max_storage_images, max_samplers})) {
+ printf("%s Physical device limits report as 2^32-1. Skipping test.\n", kSkipPrefix);
+ return;
+ }
+
+ VkDescriptorSetLayoutBinding dslb = {};
+ std::vector<VkDescriptorSetLayoutBinding> dslb_vec = {};
+ VkDescriptorSetLayout ds_layout = VK_NULL_HANDLE;
+ VkDescriptorSetLayoutCreateInfo ds_layout_ci = {};
+ ds_layout_ci.sType = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_CREATE_INFO;
+ ds_layout_ci.pNext = NULL;
+ VkPipelineLayoutCreateInfo pipeline_layout_ci = {};
+ pipeline_layout_ci.sType = VK_STRUCTURE_TYPE_PIPELINE_LAYOUT_CREATE_INFO;
+ pipeline_layout_ci.pNext = NULL;
+ pipeline_layout_ci.setLayoutCount = 1;
+ pipeline_layout_ci.pSetLayouts = &ds_layout;
+ VkPipelineLayout pipeline_layout = VK_NULL_HANDLE;
+
+ // VU 0fe0023e - too many sampler type descriptors in fragment stage
+ dslb_vec.clear();
+ dslb.binding = 0;
+ dslb.descriptorType = VK_DESCRIPTOR_TYPE_SAMPLER;
+ dslb.descriptorCount = max_samplers;
+ dslb.stageFlags = VK_SHADER_STAGE_ALL_GRAPHICS;
+ dslb.pImmutableSamplers = NULL;
+ dslb_vec.push_back(dslb);
+ dslb.binding = 1;
+ dslb.descriptorType = VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER;
+ dslb.descriptorCount = max_combined;
+ dslb.stageFlags = VK_SHADER_STAGE_FRAGMENT_BIT;
+ dslb_vec.push_back(dslb);
+
+ ds_layout_ci.bindingCount = dslb_vec.size();
+ ds_layout_ci.pBindings = dslb_vec.data();
+ VkResult err = vkCreateDescriptorSetLayout(m_device->device(), &ds_layout_ci, NULL, &ds_layout);
+ ASSERT_VK_SUCCESS(err);
+
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkPipelineLayoutCreateInfo-pSetLayouts-00287");
+ if ((max_samplers + max_combined) > sum_samplers) {
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ "VUID-VkPipelineLayoutCreateInfo-pSetLayouts-01677"); // expect all-stages sum too
+ }
+ if (max_combined > sum_sampled_images) {
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ "VUID-VkPipelineLayoutCreateInfo-pSetLayouts-01682"); // expect all-stages sum too
+ }
+ err = vkCreatePipelineLayout(m_device->device(), &pipeline_layout_ci, NULL, &pipeline_layout);
+ m_errorMonitor->VerifyFound();
+ vkDestroyPipelineLayout(m_device->device(), pipeline_layout, NULL); // Unnecessary but harmless if test passed
+ pipeline_layout = VK_NULL_HANDLE;
+ vkDestroyDescriptorSetLayout(m_device->device(), ds_layout, NULL);
+
+ // VU 0fe00240 - too many uniform buffer type descriptors in vertex stage
+ dslb_vec.clear();
+ dslb.binding = 0;
+ dslb.descriptorType = VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER;
+ dslb.descriptorCount = max_uniform_buffers + 1;
+ dslb.stageFlags = VK_SHADER_STAGE_VERTEX_BIT | VK_SHADER_STAGE_FRAGMENT_BIT;
+ dslb_vec.push_back(dslb);
+ dslb.binding = 1;
+ dslb.descriptorType = VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC;
+ dslb.stageFlags = VK_SHADER_STAGE_VERTEX_BIT;
+ dslb_vec.push_back(dslb);
+
+ ds_layout_ci.bindingCount = dslb_vec.size();
+ ds_layout_ci.pBindings = dslb_vec.data();
+ err = vkCreateDescriptorSetLayout(m_device->device(), &ds_layout_ci, NULL, &ds_layout);
+ ASSERT_VK_SUCCESS(err);
+
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkPipelineLayoutCreateInfo-pSetLayouts-00288");
+ if (dslb.descriptorCount > sum_uniform_buffers) {
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ "VUID-VkPipelineLayoutCreateInfo-pSetLayouts-01678"); // expect all-stages sum too
+ }
+ if (dslb.descriptorCount > sum_dyn_uniform_buffers) {
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ "VUID-VkPipelineLayoutCreateInfo-pSetLayouts-01679"); // expect all-stages sum too
+ }
+ err = vkCreatePipelineLayout(m_device->device(), &pipeline_layout_ci, NULL, &pipeline_layout);
+ m_errorMonitor->VerifyFound();
+ vkDestroyPipelineLayout(m_device->device(), pipeline_layout, NULL); // Unnecessary but harmless if test passed
+ pipeline_layout = VK_NULL_HANDLE;
+ vkDestroyDescriptorSetLayout(m_device->device(), ds_layout, NULL);
+
+ // VU 0fe00242 - too many storage buffer type descriptors in compute stage
+ dslb_vec.clear();
+ dslb.binding = 0;
+ dslb.descriptorType = VK_DESCRIPTOR_TYPE_STORAGE_BUFFER;
+ dslb.descriptorCount = max_storage_buffers + 1;
+ dslb.stageFlags = VK_SHADER_STAGE_ALL;
+ dslb_vec.push_back(dslb);
+ dslb.binding = 1;
+ dslb.descriptorType = VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC;
+ dslb_vec.push_back(dslb);
+ dslb.binding = 2;
+ dslb.descriptorType = VK_DESCRIPTOR_TYPE_STORAGE_BUFFER;
+ dslb.stageFlags = VK_SHADER_STAGE_COMPUTE_BIT;
+ dslb_vec.push_back(dslb);
+
+ ds_layout_ci.bindingCount = dslb_vec.size();
+ ds_layout_ci.pBindings = dslb_vec.data();
+ err = vkCreateDescriptorSetLayout(m_device->device(), &ds_layout_ci, NULL, &ds_layout);
+ ASSERT_VK_SUCCESS(err);
+
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkPipelineLayoutCreateInfo-pSetLayouts-00289");
+ if (dslb.descriptorCount > sum_dyn_storage_buffers) {
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ "VUID-VkPipelineLayoutCreateInfo-pSetLayouts-01681"); // expect all-stages sum too
+ }
+ if (dslb_vec[0].descriptorCount + dslb_vec[2].descriptorCount > sum_storage_buffers) {
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ "VUID-VkPipelineLayoutCreateInfo-pSetLayouts-01680"); // expect all-stages sum too
+ }
+ err = vkCreatePipelineLayout(m_device->device(), &pipeline_layout_ci, NULL, &pipeline_layout);
+ m_errorMonitor->VerifyFound();
+ vkDestroyPipelineLayout(m_device->device(), pipeline_layout, NULL); // Unnecessary but harmless if test passed
+ pipeline_layout = VK_NULL_HANDLE;
+ vkDestroyDescriptorSetLayout(m_device->device(), ds_layout, NULL);
+
+ // VU 0fe00244 - too many sampled image type descriptors in multiple stages
+ dslb_vec.clear();
+ dslb.binding = 0;
+ dslb.descriptorType = VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE;
+ dslb.descriptorCount = max_sampled_images;
+ dslb.stageFlags = VK_SHADER_STAGE_VERTEX_BIT | VK_SHADER_STAGE_FRAGMENT_BIT;
+ dslb_vec.push_back(dslb);
+ dslb.binding = 1;
+ dslb.descriptorType = VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER;
+ dslb.stageFlags = VK_SHADER_STAGE_ALL_GRAPHICS;
+ dslb_vec.push_back(dslb);
+ dslb.binding = 2;
+ dslb.descriptorCount = max_combined;
+ dslb.descriptorType = VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER;
+ dslb_vec.push_back(dslb);
+
+ ds_layout_ci.bindingCount = dslb_vec.size();
+ ds_layout_ci.pBindings = dslb_vec.data();
+ err = vkCreateDescriptorSetLayout(m_device->device(), &ds_layout_ci, NULL, &ds_layout);
+ ASSERT_VK_SUCCESS(err);
+
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkPipelineLayoutCreateInfo-pSetLayouts-00290");
+ if (max_combined + 2 * max_sampled_images > sum_sampled_images) {
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ "VUID-VkPipelineLayoutCreateInfo-pSetLayouts-01682"); // expect all-stages sum too
+ }
+ if (max_combined > sum_samplers) {
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ "VUID-VkPipelineLayoutCreateInfo-pSetLayouts-01677"); // expect all-stages sum too
+ }
+ err = vkCreatePipelineLayout(m_device->device(), &pipeline_layout_ci, NULL, &pipeline_layout);
+ m_errorMonitor->VerifyFound();
+ vkDestroyPipelineLayout(m_device->device(), pipeline_layout, NULL); // Unnecessary but harmless if test passed
+ pipeline_layout = VK_NULL_HANDLE;
+ vkDestroyDescriptorSetLayout(m_device->device(), ds_layout, NULL);
+
+ // VU 0fe00246 - too many storage image type descriptors in fragment stage
+ dslb_vec.clear();
+ dslb.binding = 0;
+ dslb.descriptorType = VK_DESCRIPTOR_TYPE_STORAGE_IMAGE;
+ dslb.descriptorCount = 1 + (max_storage_images / 2);
+ dslb.stageFlags = VK_SHADER_STAGE_FRAGMENT_BIT;
+ dslb_vec.push_back(dslb);
+ dslb.binding = 1;
+ dslb.descriptorType = VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER;
+ dslb.stageFlags = VK_SHADER_STAGE_VERTEX_BIT | VK_SHADER_STAGE_FRAGMENT_BIT | VK_SHADER_STAGE_COMPUTE_BIT;
+ dslb_vec.push_back(dslb);
+
+ ds_layout_ci.bindingCount = dslb_vec.size();
+ ds_layout_ci.pBindings = dslb_vec.data();
+ err = vkCreateDescriptorSetLayout(m_device->device(), &ds_layout_ci, NULL, &ds_layout);
+ ASSERT_VK_SUCCESS(err);
+
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkPipelineLayoutCreateInfo-pSetLayouts-00291");
+ if (2 * dslb.descriptorCount > sum_storage_images) {
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ "VUID-VkPipelineLayoutCreateInfo-pSetLayouts-01683"); // expect all-stages sum too
+ }
+ err = vkCreatePipelineLayout(m_device->device(), &pipeline_layout_ci, NULL, &pipeline_layout);
+ m_errorMonitor->VerifyFound();
+ vkDestroyPipelineLayout(m_device->device(), pipeline_layout, NULL); // Unnecessary but harmless if test passed
+ pipeline_layout = VK_NULL_HANDLE;
+ vkDestroyDescriptorSetLayout(m_device->device(), ds_layout, NULL);
+
+ // VU 0fe00d18 - too many input attachments in fragment stage
+ dslb_vec.clear();
+ dslb.binding = 0;
+ dslb.descriptorType = VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT;
+ dslb.descriptorCount = 1 + max_input_attachments;
+ dslb.stageFlags = VK_SHADER_STAGE_FRAGMENT_BIT;
+ dslb_vec.push_back(dslb);
+
+ ds_layout_ci.bindingCount = dslb_vec.size();
+ ds_layout_ci.pBindings = dslb_vec.data();
+ err = vkCreateDescriptorSetLayout(m_device->device(), &ds_layout_ci, NULL, &ds_layout);
+ ASSERT_VK_SUCCESS(err);
+
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkPipelineLayoutCreateInfo-pSetLayouts-01676");
+ if (dslb.descriptorCount > sum_input_attachments) {
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ "VUID-VkPipelineLayoutCreateInfo-pSetLayouts-01684"); // expect all-stages sum too
+ }
+ err = vkCreatePipelineLayout(m_device->device(), &pipeline_layout_ci, NULL, &pipeline_layout);
+ m_errorMonitor->VerifyFound();
+ vkDestroyPipelineLayout(m_device->device(), pipeline_layout, NULL); // Unnecessary but harmless if test passed
+ pipeline_layout = VK_NULL_HANDLE;
+ vkDestroyDescriptorSetLayout(m_device->device(), ds_layout, NULL);
+}
+
+TEST_F(VkLayerTest, CreatePipelineLayoutExcessDescriptorsOverall) {
+ TEST_DESCRIPTION("Attempt to create a pipeline layout where total descriptors exceed limits");
+
+ ASSERT_NO_FATAL_FAILURE(Init());
+
+ uint32_t max_uniform_buffers = m_device->phy().properties().limits.maxPerStageDescriptorUniformBuffers;
+ uint32_t max_storage_buffers = m_device->phy().properties().limits.maxPerStageDescriptorStorageBuffers;
+ uint32_t max_sampled_images = m_device->phy().properties().limits.maxPerStageDescriptorSampledImages;
+ uint32_t max_storage_images = m_device->phy().properties().limits.maxPerStageDescriptorStorageImages;
+ uint32_t max_samplers = m_device->phy().properties().limits.maxPerStageDescriptorSamplers;
+ uint32_t max_input_attachments = m_device->phy().properties().limits.maxPerStageDescriptorInputAttachments;
+
+ uint32_t sum_dyn_uniform_buffers = m_device->phy().properties().limits.maxDescriptorSetUniformBuffersDynamic;
+ uint32_t sum_uniform_buffers = m_device->phy().properties().limits.maxDescriptorSetUniformBuffers;
+ uint32_t sum_dyn_storage_buffers = m_device->phy().properties().limits.maxDescriptorSetStorageBuffersDynamic;
+ uint32_t sum_storage_buffers = m_device->phy().properties().limits.maxDescriptorSetStorageBuffers;
+ uint32_t sum_sampled_images = m_device->phy().properties().limits.maxDescriptorSetSampledImages;
+ uint32_t sum_storage_images = m_device->phy().properties().limits.maxDescriptorSetStorageImages;
+ uint32_t sum_samplers = m_device->phy().properties().limits.maxDescriptorSetSamplers;
+ uint32_t sum_input_attachments = m_device->phy().properties().limits.maxDescriptorSetInputAttachments;
+
+ // Devices that report UINT32_MAX for any of these limits can't run this test
+ if (UINT32_MAX == std::max({sum_dyn_uniform_buffers, sum_uniform_buffers, sum_dyn_storage_buffers, sum_storage_buffers,
+ sum_sampled_images, sum_storage_images, sum_samplers, sum_input_attachments})) {
+ printf("%s Physical device limits report as 2^32-1. Skipping test.\n", kSkipPrefix);
+ return;
+ }
+
+ VkDescriptorSetLayoutBinding dslb = {};
+ std::vector<VkDescriptorSetLayoutBinding> dslb_vec = {};
+ VkDescriptorSetLayout ds_layout = VK_NULL_HANDLE;
+ VkDescriptorSetLayoutCreateInfo ds_layout_ci = {};
+ ds_layout_ci.sType = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_CREATE_INFO;
+ ds_layout_ci.pNext = NULL;
+ VkPipelineLayoutCreateInfo pipeline_layout_ci = {};
+ pipeline_layout_ci.sType = VK_STRUCTURE_TYPE_PIPELINE_LAYOUT_CREATE_INFO;
+ pipeline_layout_ci.pNext = NULL;
+ pipeline_layout_ci.setLayoutCount = 1;
+ pipeline_layout_ci.pSetLayouts = &ds_layout;
+ VkPipelineLayout pipeline_layout = VK_NULL_HANDLE;
+
+ // VU 0fe00d1a - too many sampler type descriptors overall
+ dslb_vec.clear();
+ dslb.binding = 0;
+ dslb.descriptorType = VK_DESCRIPTOR_TYPE_SAMPLER;
+ dslb.descriptorCount = sum_samplers / 2;
+ dslb.stageFlags = VK_SHADER_STAGE_VERTEX_BIT;
+ dslb.pImmutableSamplers = NULL;
+ dslb_vec.push_back(dslb);
+ dslb.binding = 1;
+ dslb.descriptorType = VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER;
+ dslb.descriptorCount = sum_samplers - dslb.descriptorCount + 1;
+ dslb.stageFlags = VK_SHADER_STAGE_FRAGMENT_BIT;
+ dslb_vec.push_back(dslb);
+
+ ds_layout_ci.bindingCount = dslb_vec.size();
+ ds_layout_ci.pBindings = dslb_vec.data();
+ VkResult err = vkCreateDescriptorSetLayout(m_device->device(), &ds_layout_ci, NULL, &ds_layout);
+ ASSERT_VK_SUCCESS(err);
+
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkPipelineLayoutCreateInfo-pSetLayouts-01677");
+ if (dslb.descriptorCount > max_samplers) {
+ m_errorMonitor->SetDesiredFailureMsg(
+ VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ "VUID-VkPipelineLayoutCreateInfo-pSetLayouts-00287"); // Expect max-per-stage samplers exceeds limits
+ }
+ if (dslb.descriptorCount > sum_sampled_images) {
+ m_errorMonitor->SetDesiredFailureMsg(
+ VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ "VUID-VkPipelineLayoutCreateInfo-pSetLayouts-01682"); // Expect max overall sampled image count exceeds limits
+ }
+ if (dslb.descriptorCount > max_sampled_images) {
+ m_errorMonitor->SetDesiredFailureMsg(
+ VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ "VUID-VkPipelineLayoutCreateInfo-pSetLayouts-00290"); // Expect max per-stage sampled image count exceeds limits
+ }
+ err = vkCreatePipelineLayout(m_device->device(), &pipeline_layout_ci, NULL, &pipeline_layout);
+ m_errorMonitor->VerifyFound();
+ vkDestroyPipelineLayout(m_device->device(), pipeline_layout, NULL); // Unnecessary but harmless if test passed
+ pipeline_layout = VK_NULL_HANDLE;
+ vkDestroyDescriptorSetLayout(m_device->device(), ds_layout, NULL);
+
+ // VU 0fe00d1c - too many uniform buffer type descriptors overall
+ dslb_vec.clear();
+ dslb.binding = 0;
+ dslb.descriptorType = VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER;
+ dslb.descriptorCount = sum_uniform_buffers + 1;
+ dslb.stageFlags = VK_SHADER_STAGE_VERTEX_BIT | VK_SHADER_STAGE_FRAGMENT_BIT;
+ dslb.pImmutableSamplers = NULL;
+ dslb_vec.push_back(dslb);
+
+ ds_layout_ci.bindingCount = dslb_vec.size();
+ ds_layout_ci.pBindings = dslb_vec.data();
+ err = vkCreateDescriptorSetLayout(m_device->device(), &ds_layout_ci, NULL, &ds_layout);
+ ASSERT_VK_SUCCESS(err);
+
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkPipelineLayoutCreateInfo-pSetLayouts-01678");
+ if (dslb.descriptorCount > max_uniform_buffers) {
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ "VUID-VkPipelineLayoutCreateInfo-pSetLayouts-00288"); // expect max-per-stage too
+ }
+ err = vkCreatePipelineLayout(m_device->device(), &pipeline_layout_ci, NULL, &pipeline_layout);
+ m_errorMonitor->VerifyFound();
+ vkDestroyPipelineLayout(m_device->device(), pipeline_layout, NULL); // Unnecessary but harmless if test passed
+ pipeline_layout = VK_NULL_HANDLE;
+ vkDestroyDescriptorSetLayout(m_device->device(), ds_layout, NULL);
+
+ // VU 0fe00d1e - too many dynamic uniform buffer type descriptors overall
+ dslb_vec.clear();
+ dslb.binding = 0;
+ dslb.descriptorType = VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC;
+ dslb.descriptorCount = sum_dyn_uniform_buffers + 1;
+ dslb.stageFlags = VK_SHADER_STAGE_VERTEX_BIT | VK_SHADER_STAGE_FRAGMENT_BIT;
+ dslb.pImmutableSamplers = NULL;
+ dslb_vec.push_back(dslb);
+
+ ds_layout_ci.bindingCount = dslb_vec.size();
+ ds_layout_ci.pBindings = dslb_vec.data();
+ err = vkCreateDescriptorSetLayout(m_device->device(), &ds_layout_ci, NULL, &ds_layout);
+ ASSERT_VK_SUCCESS(err);
+
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkPipelineLayoutCreateInfo-pSetLayouts-01679");
+ if (dslb.descriptorCount > max_uniform_buffers) {
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ "VUID-VkPipelineLayoutCreateInfo-pSetLayouts-00288"); // expect max-per-stage too
+ }
+ err = vkCreatePipelineLayout(m_device->device(), &pipeline_layout_ci, NULL, &pipeline_layout);
+ m_errorMonitor->VerifyFound();
+ vkDestroyPipelineLayout(m_device->device(), pipeline_layout, NULL); // Unnecessary but harmless if test passed
+ pipeline_layout = VK_NULL_HANDLE;
+ vkDestroyDescriptorSetLayout(m_device->device(), ds_layout, NULL);
+
+ // VU 0fe00d20 - too many storage buffer type descriptors overall
+ dslb_vec.clear();
+ dslb.binding = 0;
+ dslb.descriptorType = VK_DESCRIPTOR_TYPE_STORAGE_BUFFER;
+ dslb.descriptorCount = sum_storage_buffers + 1;
+ dslb.stageFlags = VK_SHADER_STAGE_VERTEX_BIT | VK_SHADER_STAGE_FRAGMENT_BIT;
+ dslb.pImmutableSamplers = NULL;
+ dslb_vec.push_back(dslb);
+
+ ds_layout_ci.bindingCount = dslb_vec.size();
+ ds_layout_ci.pBindings = dslb_vec.data();
+ err = vkCreateDescriptorSetLayout(m_device->device(), &ds_layout_ci, NULL, &ds_layout);
+ ASSERT_VK_SUCCESS(err);
+
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkPipelineLayoutCreateInfo-pSetLayouts-01680");
+ if (dslb.descriptorCount > max_storage_buffers) {
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ "VUID-VkPipelineLayoutCreateInfo-pSetLayouts-00289"); // expect max-per-stage too
+ }
+ err = vkCreatePipelineLayout(m_device->device(), &pipeline_layout_ci, NULL, &pipeline_layout);
+ m_errorMonitor->VerifyFound();
+ vkDestroyPipelineLayout(m_device->device(), pipeline_layout, NULL); // Unnecessary but harmless if test passed
+ pipeline_layout = VK_NULL_HANDLE;
+ vkDestroyDescriptorSetLayout(m_device->device(), ds_layout, NULL);
+
+ // VU 0fe00d22 - too many dynamic storage buffer type descriptors overall
+ dslb_vec.clear();
+ dslb.binding = 0;
+ dslb.descriptorType = VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC;
+ dslb.descriptorCount = sum_dyn_storage_buffers + 1;
+ dslb.stageFlags = VK_SHADER_STAGE_VERTEX_BIT | VK_SHADER_STAGE_FRAGMENT_BIT;
+ dslb.pImmutableSamplers = NULL;
+ dslb_vec.push_back(dslb);
+
+ ds_layout_ci.bindingCount = dslb_vec.size();
+ ds_layout_ci.pBindings = dslb_vec.data();
+ err = vkCreateDescriptorSetLayout(m_device->device(), &ds_layout_ci, NULL, &ds_layout);
+ ASSERT_VK_SUCCESS(err);
+
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkPipelineLayoutCreateInfo-pSetLayouts-01681");
+ if (dslb.descriptorCount > max_storage_buffers) {
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ "VUID-VkPipelineLayoutCreateInfo-pSetLayouts-00289"); // expect max-per-stage too
+ }
+ err = vkCreatePipelineLayout(m_device->device(), &pipeline_layout_ci, NULL, &pipeline_layout);
+ m_errorMonitor->VerifyFound();
+ vkDestroyPipelineLayout(m_device->device(), pipeline_layout, NULL); // Unnecessary but harmless if test passed
+ pipeline_layout = VK_NULL_HANDLE;
+ vkDestroyDescriptorSetLayout(m_device->device(), ds_layout, NULL);
+
+ // VU 0fe00d24 - too many sampled image type descriptors overall
+ dslb_vec.clear();
+ dslb.binding = 0;
+ dslb.descriptorType = VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER;
+ dslb.descriptorCount = max_samplers;
+ dslb.stageFlags = VK_SHADER_STAGE_VERTEX_BIT;
+ dslb.pImmutableSamplers = NULL;
+ dslb_vec.push_back(dslb);
+ dslb.binding = 1;
+ dslb.descriptorType = VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE;
+ // revisit: not robust to odd limits.
+ uint32_t remaining = (max_samplers > sum_sampled_images ? 0 : (sum_sampled_images - max_samplers) / 2);
+ dslb.descriptorCount = 1 + remaining;
+ dslb.stageFlags = VK_SHADER_STAGE_FRAGMENT_BIT;
+ dslb_vec.push_back(dslb);
+ dslb.binding = 2;
+ dslb.descriptorType = VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER;
+ dslb.stageFlags = VK_SHADER_STAGE_COMPUTE_BIT;
+ dslb_vec.push_back(dslb);
+
+ ds_layout_ci.bindingCount = dslb_vec.size();
+ ds_layout_ci.pBindings = dslb_vec.data();
+ err = vkCreateDescriptorSetLayout(m_device->device(), &ds_layout_ci, NULL, &ds_layout);
+ ASSERT_VK_SUCCESS(err);
+
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkPipelineLayoutCreateInfo-pSetLayouts-01682");
+ if (std::max(dslb_vec[0].descriptorCount, dslb_vec[1].descriptorCount) > max_sampled_images) {
+ m_errorMonitor->SetDesiredFailureMsg(
+ VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ "VUID-VkPipelineLayoutCreateInfo-pSetLayouts-00290"); // Expect max-per-stage sampled images to exceed limits
+ }
+ err = vkCreatePipelineLayout(m_device->device(), &pipeline_layout_ci, NULL, &pipeline_layout);
+ m_errorMonitor->VerifyFound();
+ vkDestroyPipelineLayout(m_device->device(), pipeline_layout, NULL); // Unnecessary but harmless if test passed
+ pipeline_layout = VK_NULL_HANDLE;
+ vkDestroyDescriptorSetLayout(m_device->device(), ds_layout, NULL);
+
+ // VU 0fe00d26 - too many storage image type descriptors overall
+ dslb_vec.clear();
+ dslb.binding = 0;
+ dslb.descriptorType = VK_DESCRIPTOR_TYPE_STORAGE_IMAGE;
+ dslb.descriptorCount = sum_storage_images / 2;
+ dslb.stageFlags = VK_SHADER_STAGE_VERTEX_BIT;
+ dslb.pImmutableSamplers = NULL;
+ dslb_vec.push_back(dslb);
+ dslb.binding = 1;
+ dslb.descriptorType = VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER;
+ dslb.descriptorCount = sum_storage_images - dslb.descriptorCount + 1;
+ dslb.stageFlags = VK_SHADER_STAGE_FRAGMENT_BIT;
+ dslb_vec.push_back(dslb);
+
+ ds_layout_ci.bindingCount = dslb_vec.size();
+ ds_layout_ci.pBindings = dslb_vec.data();
+ err = vkCreateDescriptorSetLayout(m_device->device(), &ds_layout_ci, NULL, &ds_layout);
+ ASSERT_VK_SUCCESS(err);
+
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkPipelineLayoutCreateInfo-pSetLayouts-01683");
+ if (dslb.descriptorCount > max_storage_images) {
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ "VUID-VkPipelineLayoutCreateInfo-pSetLayouts-00291"); // expect max-per-stage too
+ }
+ err = vkCreatePipelineLayout(m_device->device(), &pipeline_layout_ci, NULL, &pipeline_layout);
+ m_errorMonitor->VerifyFound();
+ vkDestroyPipelineLayout(m_device->device(), pipeline_layout, NULL); // Unnecessary but harmless if test passed
+ pipeline_layout = VK_NULL_HANDLE;
+ vkDestroyDescriptorSetLayout(m_device->device(), ds_layout, NULL);
+
+ // VU 0fe00d28 - too many input attachment type descriptors overall
+ dslb_vec.clear();
+ dslb.binding = 0;
+ dslb.descriptorType = VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT;
+ dslb.descriptorCount = sum_input_attachments + 1;
+ dslb.stageFlags = VK_SHADER_STAGE_FRAGMENT_BIT;
+ dslb.pImmutableSamplers = NULL;
+ dslb_vec.push_back(dslb);
+
+ ds_layout_ci.bindingCount = dslb_vec.size();
+ ds_layout_ci.pBindings = dslb_vec.data();
+ err = vkCreateDescriptorSetLayout(m_device->device(), &ds_layout_ci, NULL, &ds_layout);
+ ASSERT_VK_SUCCESS(err);
+
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkPipelineLayoutCreateInfo-pSetLayouts-01684");
+ if (dslb.descriptorCount > max_input_attachments) {
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ "VUID-VkPipelineLayoutCreateInfo-pSetLayouts-01676"); // expect max-per-stage too
+ }
+ err = vkCreatePipelineLayout(m_device->device(), &pipeline_layout_ci, NULL, &pipeline_layout);
+ m_errorMonitor->VerifyFound();
+ vkDestroyPipelineLayout(m_device->device(), pipeline_layout, NULL); // Unnecessary but harmless if test passed
+ pipeline_layout = VK_NULL_HANDLE;
+ vkDestroyDescriptorSetLayout(m_device->device(), ds_layout, NULL);
+}
+
+TEST_F(VkLayerTest, InvalidCmdBufferBufferDestroyed) {
+ TEST_DESCRIPTION("Attempt to draw with a command buffer that is invalid due to a buffer dependency being destroyed.");
+ ASSERT_NO_FATAL_FAILURE(Init());
+
+ VkBuffer buffer;
+ VkDeviceMemory mem;
+ VkMemoryRequirements mem_reqs;
+
+ VkBufferCreateInfo buf_info = {};
+ buf_info.sType = VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO;
+ buf_info.usage = VK_BUFFER_USAGE_TRANSFER_DST_BIT;
+ buf_info.size = 256;
+ buf_info.sharingMode = VK_SHARING_MODE_EXCLUSIVE;
+ VkResult err = vkCreateBuffer(m_device->device(), &buf_info, NULL, &buffer);
+ ASSERT_VK_SUCCESS(err);
+
+ vkGetBufferMemoryRequirements(m_device->device(), buffer, &mem_reqs);
+
+ VkMemoryAllocateInfo alloc_info = {};
+ alloc_info.sType = VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO;
+ alloc_info.allocationSize = mem_reqs.size;
+ bool pass = false;
+ pass = m_device->phy().set_memory_type(mem_reqs.memoryTypeBits, &alloc_info, VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT);
+ if (!pass) {
+ printf("%s Failed to set memory type.\n", kSkipPrefix);
+ vkDestroyBuffer(m_device->device(), buffer, NULL);
+ return;
+ }
+ err = vkAllocateMemory(m_device->device(), &alloc_info, NULL, &mem);
+ ASSERT_VK_SUCCESS(err);
+
+ err = vkBindBufferMemory(m_device->device(), buffer, mem, 0);
+ ASSERT_VK_SUCCESS(err);
+
+ m_commandBuffer->begin();
+ vkCmdFillBuffer(m_commandBuffer->handle(), buffer, 0, VK_WHOLE_SIZE, 0);
+ m_commandBuffer->end();
+
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, " that is invalid because bound Buffer ");
+ // Destroy buffer dependency prior to submit to cause ERROR
+ vkDestroyBuffer(m_device->device(), buffer, NULL);
+
+ VkSubmitInfo submit_info = {};
+ submit_info.sType = VK_STRUCTURE_TYPE_SUBMIT_INFO;
+ submit_info.commandBufferCount = 1;
+ submit_info.pCommandBuffers = &m_commandBuffer->handle();
+ vkQueueSubmit(m_device->m_queue, 1, &submit_info, VK_NULL_HANDLE);
+
+ m_errorMonitor->VerifyFound();
+ vkQueueWaitIdle(m_device->m_queue);
+ vkFreeMemory(m_device->handle(), mem, NULL);
+}
+
+TEST_F(VkLayerTest, InvalidCmdBufferBufferViewDestroyed) {
+ TEST_DESCRIPTION("Delete bufferView bound to cmd buffer, then attempt to submit cmd buffer.");
+
+ ASSERT_NO_FATAL_FAILURE(Init());
+ ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
+
+ VkDescriptorPoolSize ds_type_count;
+ ds_type_count.type = VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER;
+ ds_type_count.descriptorCount = 1;
+
+ VkDescriptorPoolCreateInfo ds_pool_ci = {};
+ ds_pool_ci.sType = VK_STRUCTURE_TYPE_DESCRIPTOR_POOL_CREATE_INFO;
+ ds_pool_ci.maxSets = 1;
+ ds_pool_ci.poolSizeCount = 1;
+ ds_pool_ci.pPoolSizes = &ds_type_count;
+
+ VkDescriptorPool ds_pool;
+ VkResult err = vkCreateDescriptorPool(m_device->device(), &ds_pool_ci, NULL, &ds_pool);
+ ASSERT_VK_SUCCESS(err);
+
+ VkDescriptorSetLayoutBinding layout_binding;
+ layout_binding.binding = 0;
+ layout_binding.descriptorType = VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER;
+ layout_binding.descriptorCount = 1;
+ layout_binding.stageFlags = VK_SHADER_STAGE_FRAGMENT_BIT;
+ layout_binding.pImmutableSamplers = NULL;
+
+ const VkDescriptorSetLayoutObj ds_layout(m_device, {layout_binding});
+
+ VkDescriptorSetAllocateInfo alloc_info = {};
+ alloc_info.sType = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_ALLOCATE_INFO;
+ alloc_info.descriptorSetCount = 1;
+ alloc_info.descriptorPool = ds_pool;
+ alloc_info.pSetLayouts = &ds_layout.handle();
+ VkDescriptorSet descriptor_set;
+ err = vkAllocateDescriptorSets(m_device->device(), &alloc_info, &descriptor_set);
+ ASSERT_VK_SUCCESS(err);
+
+ const VkPipelineLayoutObj pipeline_layout(m_device, {&ds_layout});
+
+ VkBuffer buffer;
+ uint32_t queue_family_index = 0;
+ VkBufferCreateInfo buffer_create_info = {};
+ buffer_create_info.sType = VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO;
+ buffer_create_info.size = 1024;
+ buffer_create_info.usage = VK_BUFFER_USAGE_STORAGE_TEXEL_BUFFER_BIT;
+ buffer_create_info.queueFamilyIndexCount = 1;
+ buffer_create_info.pQueueFamilyIndices = &queue_family_index;
+
+ err = vkCreateBuffer(m_device->device(), &buffer_create_info, NULL, &buffer);
+ ASSERT_VK_SUCCESS(err);
+
+ VkMemoryRequirements memory_reqs;
+ VkDeviceMemory buffer_memory;
+
+ VkMemoryAllocateInfo memory_info = {};
+ memory_info.sType = VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO;
+ memory_info.allocationSize = 0;
+ memory_info.memoryTypeIndex = 0;
+
+ vkGetBufferMemoryRequirements(m_device->device(), buffer, &memory_reqs);
+ memory_info.allocationSize = memory_reqs.size;
+ bool pass = m_device->phy().set_memory_type(memory_reqs.memoryTypeBits, &memory_info, 0);
+ ASSERT_TRUE(pass);
+
+ err = vkAllocateMemory(m_device->device(), &memory_info, NULL, &buffer_memory);
+ ASSERT_VK_SUCCESS(err);
+ err = vkBindBufferMemory(m_device->device(), buffer, buffer_memory, 0);
+ ASSERT_VK_SUCCESS(err);
+
+ VkBufferView view;
+ VkBufferViewCreateInfo bvci = {};
+ bvci.sType = VK_STRUCTURE_TYPE_BUFFER_VIEW_CREATE_INFO;
+ bvci.buffer = buffer;
+ bvci.format = VK_FORMAT_R32_SFLOAT;
+ bvci.range = VK_WHOLE_SIZE;
+
+ err = vkCreateBufferView(m_device->device(), &bvci, NULL, &view);
+ ASSERT_VK_SUCCESS(err);
+
+ VkWriteDescriptorSet descriptor_write = {};
+ descriptor_write.sType = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET;
+ descriptor_write.dstSet = descriptor_set;
+ descriptor_write.dstBinding = 0;
+ descriptor_write.descriptorCount = 1;
+ descriptor_write.descriptorType = VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER;
+ descriptor_write.pTexelBufferView = &view;
+
+ vkUpdateDescriptorSets(m_device->device(), 1, &descriptor_write, 0, NULL);
+
+ char const *vsSource =
+ "#version 450\n"
+ "\n"
+ "void main(){\n"
+ " gl_Position = vec4(1);\n"
+ "}\n";
+ char const *fsSource =
+ "#version 450\n"
+ "\n"
+ "layout(set=0, binding=0, r32f) uniform readonly imageBuffer s;\n"
+ "layout(location=0) out vec4 x;\n"
+ "void main(){\n"
+ " x = imageLoad(s, 0);\n"
+ "}\n";
+ VkShaderObj vs(m_device, vsSource, VK_SHADER_STAGE_VERTEX_BIT, this);
+ VkShaderObj fs(m_device, fsSource, VK_SHADER_STAGE_FRAGMENT_BIT, this);
+ VkPipelineObj pipe(m_device);
+ pipe.AddShader(&vs);
+ pipe.AddShader(&fs);
+ pipe.AddDefaultColorAttachment();
+ pipe.CreateVKPipeline(pipeline_layout.handle(), renderPass());
+
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, " that is invalid because bound BufferView ");
+
+ m_commandBuffer->begin();
+ m_commandBuffer->BeginRenderPass(m_renderPassBeginInfo);
+
+ VkViewport viewport = {0, 0, 16, 16, 0, 1};
+ vkCmdSetViewport(m_commandBuffer->handle(), 0, 1, &viewport);
+ VkRect2D scissor = {{0, 0}, {16, 16}};
+ vkCmdSetScissor(m_commandBuffer->handle(), 0, 1, &scissor);
+ // Bind pipeline to cmd buffer - This causes crash on Mali
+ vkCmdBindPipeline(m_commandBuffer->handle(), VK_PIPELINE_BIND_POINT_GRAPHICS, pipe.handle());
+ vkCmdBindDescriptorSets(m_commandBuffer->handle(), VK_PIPELINE_BIND_POINT_GRAPHICS, pipeline_layout.handle(), 0, 1,
+ &descriptor_set, 0, nullptr);
+ m_commandBuffer->Draw(1, 0, 0, 0);
+ m_commandBuffer->EndRenderPass();
+ m_commandBuffer->end();
+
+ // Delete BufferView in order to invalidate cmd buffer
+ vkDestroyBufferView(m_device->device(), view, NULL);
+ // Now attempt submit of cmd buffer
+ VkSubmitInfo submit_info = {};
+ submit_info.sType = VK_STRUCTURE_TYPE_SUBMIT_INFO;
+ submit_info.commandBufferCount = 1;
+ submit_info.pCommandBuffers = &m_commandBuffer->handle();
+ vkQueueSubmit(m_device->m_queue, 1, &submit_info, VK_NULL_HANDLE);
+ m_errorMonitor->VerifyFound();
+
+ // Clean-up
+ vkDestroyBuffer(m_device->device(), buffer, NULL);
+ vkFreeMemory(m_device->device(), buffer_memory, NULL);
+ vkDestroyDescriptorPool(m_device->device(), ds_pool, NULL);
+}
+
+TEST_F(VkLayerTest, InvalidCmdBufferImageDestroyed) {
+ TEST_DESCRIPTION("Attempt to draw with a command buffer that is invalid due to an image dependency being destroyed.");
+ ASSERT_NO_FATAL_FAILURE(Init());
+
+ VkImage image;
+ const VkFormat tex_format = VK_FORMAT_B8G8R8A8_UNORM;
+ VkImageCreateInfo image_create_info = {};
+ image_create_info.sType = VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO;
+ image_create_info.pNext = NULL;
+ image_create_info.imageType = VK_IMAGE_TYPE_2D;
+ image_create_info.format = tex_format;
+ image_create_info.extent.width = 32;
+ image_create_info.extent.height = 32;
+ image_create_info.extent.depth = 1;
+ image_create_info.mipLevels = 1;
+ image_create_info.arrayLayers = 1;
+ image_create_info.samples = VK_SAMPLE_COUNT_1_BIT;
+ image_create_info.tiling = VK_IMAGE_TILING_OPTIMAL;
+ image_create_info.usage = VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT | VK_IMAGE_USAGE_TRANSFER_DST_BIT;
+ image_create_info.flags = 0;
+ VkResult err = vkCreateImage(m_device->device(), &image_create_info, NULL, &image);
+ ASSERT_VK_SUCCESS(err);
+ // Have to bind memory to image before recording cmd in cmd buffer using it
+ VkMemoryRequirements mem_reqs;
+ VkDeviceMemory image_mem;
+ bool pass;
+ VkMemoryAllocateInfo mem_alloc = {};
+ mem_alloc.sType = VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO;
+ mem_alloc.pNext = NULL;
+ mem_alloc.memoryTypeIndex = 0;
+ vkGetImageMemoryRequirements(m_device->device(), image, &mem_reqs);
+ mem_alloc.allocationSize = mem_reqs.size;
+ pass = m_device->phy().set_memory_type(mem_reqs.memoryTypeBits, &mem_alloc, 0);
+ ASSERT_TRUE(pass);
+ err = vkAllocateMemory(m_device->device(), &mem_alloc, NULL, &image_mem);
+ ASSERT_VK_SUCCESS(err);
+ err = vkBindImageMemory(m_device->device(), image, image_mem, 0);
+ ASSERT_VK_SUCCESS(err);
+
+ m_commandBuffer->begin();
+ VkClearColorValue ccv;
+ ccv.float32[0] = 1.0f;
+ ccv.float32[1] = 1.0f;
+ ccv.float32[2] = 1.0f;
+ ccv.float32[3] = 1.0f;
+ VkImageSubresourceRange isr = {};
+ isr.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
+ isr.baseArrayLayer = 0;
+ isr.baseMipLevel = 0;
+ isr.layerCount = 1;
+ isr.levelCount = 1;
+ vkCmdClearColorImage(m_commandBuffer->handle(), image, VK_IMAGE_LAYOUT_GENERAL, &ccv, 1, &isr);
+ m_commandBuffer->end();
+
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, " that is invalid because bound Image ");
+ // Destroy image dependency prior to submit to cause ERROR
+ vkDestroyImage(m_device->device(), image, NULL);
+
+ VkSubmitInfo submit_info = {};
+ submit_info.sType = VK_STRUCTURE_TYPE_SUBMIT_INFO;
+ submit_info.commandBufferCount = 1;
+ submit_info.pCommandBuffers = &m_commandBuffer->handle();
+ vkQueueSubmit(m_device->m_queue, 1, &submit_info, VK_NULL_HANDLE);
+
+ m_errorMonitor->VerifyFound();
+ vkFreeMemory(m_device->device(), image_mem, nullptr);
+}
+
+TEST_F(VkLayerTest, InvalidCmdBufferFramebufferImageDestroyed) {
+ TEST_DESCRIPTION(
+ "Attempt to draw with a command buffer that is invalid due to a framebuffer image dependency being destroyed.");
+ ASSERT_NO_FATAL_FAILURE(Init());
+ VkFormatProperties format_properties;
+ VkResult err = VK_SUCCESS;
+ vkGetPhysicalDeviceFormatProperties(gpu(), VK_FORMAT_B8G8R8A8_UNORM, &format_properties);
+ if (!(format_properties.optimalTilingFeatures & VK_FORMAT_FEATURE_SAMPLED_IMAGE_BIT)) {
+ printf("%s Image format doesn't support required features.\n", kSkipPrefix);
+ return;
+ }
+
+ ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
+
+ VkImageCreateInfo image_ci = {};
+ image_ci.sType = VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO;
+ image_ci.pNext = NULL;
+ image_ci.imageType = VK_IMAGE_TYPE_2D;
+ image_ci.format = VK_FORMAT_B8G8R8A8_UNORM;
+ image_ci.extent.width = 32;
+ image_ci.extent.height = 32;
+ image_ci.extent.depth = 1;
+ image_ci.mipLevels = 1;
+ image_ci.arrayLayers = 1;
+ image_ci.samples = VK_SAMPLE_COUNT_1_BIT;
+ image_ci.tiling = VK_IMAGE_TILING_OPTIMAL;
+ image_ci.usage = VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT | VK_IMAGE_USAGE_SAMPLED_BIT;
+ image_ci.initialLayout = VK_IMAGE_LAYOUT_UNDEFINED;
+ image_ci.flags = 0;
+ VkImage image;
+ ASSERT_VK_SUCCESS(vkCreateImage(m_device->handle(), &image_ci, NULL, &image));
+
+ VkMemoryRequirements memory_reqs;
+ VkDeviceMemory image_memory;
+ bool pass;
+ VkMemoryAllocateInfo memory_info = {};
+ memory_info.sType = VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO;
+ memory_info.pNext = NULL;
+ memory_info.allocationSize = 0;
+ memory_info.memoryTypeIndex = 0;
+ vkGetImageMemoryRequirements(m_device->device(), image, &memory_reqs);
+ memory_info.allocationSize = memory_reqs.size;
+ pass = m_device->phy().set_memory_type(memory_reqs.memoryTypeBits, &memory_info, 0);
+ ASSERT_TRUE(pass);
+ err = vkAllocateMemory(m_device->device(), &memory_info, NULL, &image_memory);
+ ASSERT_VK_SUCCESS(err);
+ err = vkBindImageMemory(m_device->device(), image, image_memory, 0);
+ ASSERT_VK_SUCCESS(err);
+
+ VkImageViewCreateInfo ivci = {
+ VK_STRUCTURE_TYPE_IMAGE_VIEW_CREATE_INFO,
+ nullptr,
+ 0,
+ image,
+ VK_IMAGE_VIEW_TYPE_2D,
+ VK_FORMAT_B8G8R8A8_UNORM,
+ {VK_COMPONENT_SWIZZLE_R, VK_COMPONENT_SWIZZLE_G, VK_COMPONENT_SWIZZLE_B, VK_COMPONENT_SWIZZLE_A},
+ {VK_IMAGE_ASPECT_COLOR_BIT, 0, 1, 0, 1},
+ };
+ VkImageView view;
+ err = vkCreateImageView(m_device->device(), &ivci, nullptr, &view);
+ ASSERT_VK_SUCCESS(err);
+
+ VkFramebufferCreateInfo fci = {VK_STRUCTURE_TYPE_FRAMEBUFFER_CREATE_INFO, nullptr, 0, m_renderPass, 1, &view, 32, 32, 1};
+ VkFramebuffer fb;
+ err = vkCreateFramebuffer(m_device->device(), &fci, nullptr, &fb);
+ ASSERT_VK_SUCCESS(err);
+
+ // Just use default renderpass with our framebuffer
+ m_renderPassBeginInfo.framebuffer = fb;
+ m_renderPassBeginInfo.renderArea.extent.width = 32;
+ m_renderPassBeginInfo.renderArea.extent.height = 32;
+ // Create Null cmd buffer for submit
+ m_commandBuffer->begin();
+ m_commandBuffer->BeginRenderPass(m_renderPassBeginInfo);
+ m_commandBuffer->EndRenderPass();
+ m_commandBuffer->end();
+ // Destroy image attached to framebuffer to invalidate cmd buffer
+ vkDestroyImage(m_device->device(), image, NULL);
+ // Now attempt to submit cmd buffer and verify error
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, " that is invalid because bound Image ");
+ m_commandBuffer->QueueCommandBuffer(false);
+ m_errorMonitor->VerifyFound();
+
+ vkDestroyFramebuffer(m_device->device(), fb, nullptr);
+ vkDestroyImageView(m_device->device(), view, nullptr);
+ vkFreeMemory(m_device->device(), image_memory, nullptr);
+}
+
+TEST_F(VkLayerTest, FramebufferInUseDestroyedSignaled) {
+ TEST_DESCRIPTION("Delete in-use framebuffer.");
+ ASSERT_NO_FATAL_FAILURE(Init());
+ VkFormatProperties format_properties;
+ VkResult err = VK_SUCCESS;
+ vkGetPhysicalDeviceFormatProperties(gpu(), VK_FORMAT_B8G8R8A8_UNORM, &format_properties);
+
+ ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
+
+ VkImageObj image(m_device);
+ image.Init(256, 256, 1, VK_FORMAT_B8G8R8A8_UNORM, VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT, VK_IMAGE_TILING_OPTIMAL, 0);
+ ASSERT_TRUE(image.initialized());
+ VkImageView view = image.targetView(VK_FORMAT_B8G8R8A8_UNORM);
+
+ VkFramebufferCreateInfo fci = {VK_STRUCTURE_TYPE_FRAMEBUFFER_CREATE_INFO, nullptr, 0, m_renderPass, 1, &view, 256, 256, 1};
+ VkFramebuffer fb;
+ err = vkCreateFramebuffer(m_device->device(), &fci, nullptr, &fb);
+ ASSERT_VK_SUCCESS(err);
+
+ // Just use default renderpass with our framebuffer
+ m_renderPassBeginInfo.framebuffer = fb;
+ // Create Null cmd buffer for submit
+ m_commandBuffer->begin();
+ m_commandBuffer->BeginRenderPass(m_renderPassBeginInfo);
+ m_commandBuffer->EndRenderPass();
+ m_commandBuffer->end();
+ // Submit cmd buffer to put it in-flight
+ VkSubmitInfo submit_info = {};
+ submit_info.sType = VK_STRUCTURE_TYPE_SUBMIT_INFO;
+ submit_info.commandBufferCount = 1;
+ submit_info.pCommandBuffers = &m_commandBuffer->handle();
+ vkQueueSubmit(m_device->m_queue, 1, &submit_info, VK_NULL_HANDLE);
+ // Destroy framebuffer while in-flight
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkDestroyFramebuffer-framebuffer-00892");
+ vkDestroyFramebuffer(m_device->device(), fb, NULL);
+ m_errorMonitor->VerifyFound();
+ // Wait for queue to complete so we can safely destroy everything
+ vkQueueWaitIdle(m_device->m_queue);
+ m_errorMonitor->SetUnexpectedError("If framebuffer is not VK_NULL_HANDLE, framebuffer must be a valid VkFramebuffer handle");
+ m_errorMonitor->SetUnexpectedError("Unable to remove Framebuffer obj");
+ vkDestroyFramebuffer(m_device->device(), fb, nullptr);
+}
+
+TEST_F(VkLayerTest, FramebufferImageInUseDestroyedSignaled) {
+ TEST_DESCRIPTION("Delete in-use image that's child of framebuffer.");
+ ASSERT_NO_FATAL_FAILURE(Init());
+ VkFormatProperties format_properties;
+ VkResult err = VK_SUCCESS;
+ vkGetPhysicalDeviceFormatProperties(gpu(), VK_FORMAT_B8G8R8A8_UNORM, &format_properties);
+
+ ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
+
+ VkImageCreateInfo image_ci = {};
+ image_ci.sType = VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO;
+ image_ci.pNext = NULL;
+ image_ci.imageType = VK_IMAGE_TYPE_2D;
+ image_ci.format = VK_FORMAT_B8G8R8A8_UNORM;
+ image_ci.extent.width = 256;
+ image_ci.extent.height = 256;
+ image_ci.extent.depth = 1;
+ image_ci.mipLevels = 1;
+ image_ci.arrayLayers = 1;
+ image_ci.samples = VK_SAMPLE_COUNT_1_BIT;
+ image_ci.tiling = VK_IMAGE_TILING_OPTIMAL;
+ image_ci.usage = VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT;
+ image_ci.initialLayout = VK_IMAGE_LAYOUT_UNDEFINED;
+ image_ci.flags = 0;
+ VkImage image;
+ ASSERT_VK_SUCCESS(vkCreateImage(m_device->handle(), &image_ci, NULL, &image));
+
+ VkMemoryRequirements memory_reqs;
+ VkDeviceMemory image_memory;
+ bool pass;
+ VkMemoryAllocateInfo memory_info = {};
+ memory_info.sType = VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO;
+ memory_info.pNext = NULL;
+ memory_info.allocationSize = 0;
+ memory_info.memoryTypeIndex = 0;
+ vkGetImageMemoryRequirements(m_device->device(), image, &memory_reqs);
+ memory_info.allocationSize = memory_reqs.size;
+ pass = m_device->phy().set_memory_type(memory_reqs.memoryTypeBits, &memory_info, 0);
+ ASSERT_TRUE(pass);
+ err = vkAllocateMemory(m_device->device(), &memory_info, NULL, &image_memory);
+ ASSERT_VK_SUCCESS(err);
+ err = vkBindImageMemory(m_device->device(), image, image_memory, 0);
+ ASSERT_VK_SUCCESS(err);
+
+ VkImageViewCreateInfo ivci = {
+ VK_STRUCTURE_TYPE_IMAGE_VIEW_CREATE_INFO,
+ nullptr,
+ 0,
+ image,
+ VK_IMAGE_VIEW_TYPE_2D,
+ VK_FORMAT_B8G8R8A8_UNORM,
+ {VK_COMPONENT_SWIZZLE_R, VK_COMPONENT_SWIZZLE_G, VK_COMPONENT_SWIZZLE_B, VK_COMPONENT_SWIZZLE_A},
+ {VK_IMAGE_ASPECT_COLOR_BIT, 0, 1, 0, 1},
+ };
+ VkImageView view;
+ err = vkCreateImageView(m_device->device(), &ivci, nullptr, &view);
+ ASSERT_VK_SUCCESS(err);
+
+ VkFramebufferCreateInfo fci = {VK_STRUCTURE_TYPE_FRAMEBUFFER_CREATE_INFO, nullptr, 0, m_renderPass, 1, &view, 256, 256, 1};
+ VkFramebuffer fb;
+ err = vkCreateFramebuffer(m_device->device(), &fci, nullptr, &fb);
+ ASSERT_VK_SUCCESS(err);
+
+ // Just use default renderpass with our framebuffer
+ m_renderPassBeginInfo.framebuffer = fb;
+ // Create Null cmd buffer for submit
+ m_commandBuffer->begin();
+ m_commandBuffer->BeginRenderPass(m_renderPassBeginInfo);
+ m_commandBuffer->EndRenderPass();
+ m_commandBuffer->end();
+ // Submit cmd buffer to put it (and attached imageView) in-flight
+ VkSubmitInfo submit_info = {};
+ submit_info.sType = VK_STRUCTURE_TYPE_SUBMIT_INFO;
+ submit_info.commandBufferCount = 1;
+ submit_info.pCommandBuffers = &m_commandBuffer->handle();
+ // Submit cmd buffer to put framebuffer and children in-flight
+ vkQueueSubmit(m_device->m_queue, 1, &submit_info, VK_NULL_HANDLE);
+ // Destroy image attached to framebuffer while in-flight
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkDestroyImage-image-01000");
+ vkDestroyImage(m_device->device(), image, NULL);
+ m_errorMonitor->VerifyFound();
+ // Wait for queue to complete so we can safely destroy image and other objects
+ vkQueueWaitIdle(m_device->m_queue);
+ m_errorMonitor->SetUnexpectedError("If image is not VK_NULL_HANDLE, image must be a valid VkImage handle");
+ m_errorMonitor->SetUnexpectedError("Unable to remove Image obj");
+ vkDestroyImage(m_device->device(), image, NULL);
+ vkDestroyFramebuffer(m_device->device(), fb, nullptr);
+ vkDestroyImageView(m_device->device(), view, nullptr);
+ vkFreeMemory(m_device->device(), image_memory, nullptr);
+}
+
+TEST_F(VkLayerTest, ImageMemoryNotBound) {
+ TEST_DESCRIPTION("Attempt to draw with an image which has not had memory bound to it.");
+ ASSERT_NO_FATAL_FAILURE(Init());
+
+ VkImage image;
+ const VkFormat tex_format = VK_FORMAT_B8G8R8A8_UNORM;
+ VkImageCreateInfo image_create_info = {};
+ image_create_info.sType = VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO;
+ image_create_info.pNext = NULL;
+ image_create_info.imageType = VK_IMAGE_TYPE_2D;
+ image_create_info.format = tex_format;
+ image_create_info.extent.width = 32;
+ image_create_info.extent.height = 32;
+ image_create_info.extent.depth = 1;
+ image_create_info.mipLevels = 1;
+ image_create_info.arrayLayers = 1;
+ image_create_info.samples = VK_SAMPLE_COUNT_1_BIT;
+ image_create_info.tiling = VK_IMAGE_TILING_OPTIMAL;
+ image_create_info.usage = VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT | VK_IMAGE_USAGE_TRANSFER_DST_BIT;
+ image_create_info.flags = 0;
+ VkResult err = vkCreateImage(m_device->device(), &image_create_info, NULL, &image);
+ ASSERT_VK_SUCCESS(err);
+ // Have to bind memory to image before recording cmd in cmd buffer using it
+ VkMemoryRequirements mem_reqs;
+ VkDeviceMemory image_mem;
+ bool pass;
+ VkMemoryAllocateInfo mem_alloc = {};
+ mem_alloc.sType = VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO;
+ mem_alloc.pNext = NULL;
+ mem_alloc.memoryTypeIndex = 0;
+ vkGetImageMemoryRequirements(m_device->device(), image, &mem_reqs);
+ mem_alloc.allocationSize = mem_reqs.size;
+ pass = m_device->phy().set_memory_type(mem_reqs.memoryTypeBits, &mem_alloc, 0);
+ ASSERT_TRUE(pass);
+ err = vkAllocateMemory(m_device->device(), &mem_alloc, NULL, &image_mem);
+ ASSERT_VK_SUCCESS(err);
+
+ // Introduce error, do not call vkBindImageMemory(m_device->device(), image, image_mem, 0);
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ " used with no memory bound. Memory should be bound by calling vkBindImageMemory().");
+
+ m_commandBuffer->begin();
+ VkClearColorValue ccv;
+ ccv.float32[0] = 1.0f;
+ ccv.float32[1] = 1.0f;
+ ccv.float32[2] = 1.0f;
+ ccv.float32[3] = 1.0f;
+ VkImageSubresourceRange isr = {};
+ isr.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
+ isr.baseArrayLayer = 0;
+ isr.baseMipLevel = 0;
+ isr.layerCount = 1;
+ isr.levelCount = 1;
+ vkCmdClearColorImage(m_commandBuffer->handle(), image, VK_IMAGE_LAYOUT_GENERAL, &ccv, 1, &isr);
+ m_commandBuffer->end();
+
+ m_errorMonitor->VerifyFound();
+ vkDestroyImage(m_device->device(), image, NULL);
+ vkFreeMemory(m_device->device(), image_mem, nullptr);
+}
+
+TEST_F(VkLayerTest, BufferMemoryNotBound) {
+ TEST_DESCRIPTION("Attempt to copy from a buffer which has not had memory bound to it.");
+ ASSERT_NO_FATAL_FAILURE(Init());
+
+ VkImageObj image(m_device);
+ image.Init(128, 128, 1, VK_FORMAT_B8G8R8A8_UNORM, VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT | VK_IMAGE_USAGE_TRANSFER_DST_BIT,
+ VK_IMAGE_TILING_OPTIMAL, 0);
+ ASSERT_TRUE(image.initialized());
+
+ VkBuffer buffer;
+ VkDeviceMemory mem;
+ VkMemoryRequirements mem_reqs;
+
+ VkBufferCreateInfo buf_info = {};
+ buf_info.sType = VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO;
+ buf_info.usage = VK_BUFFER_USAGE_TRANSFER_SRC_BIT;
+ buf_info.size = 1024;
+ buf_info.sharingMode = VK_SHARING_MODE_EXCLUSIVE;
+ VkResult err = vkCreateBuffer(m_device->device(), &buf_info, NULL, &buffer);
+ ASSERT_VK_SUCCESS(err);
+
+ vkGetBufferMemoryRequirements(m_device->device(), buffer, &mem_reqs);
+
+ VkMemoryAllocateInfo alloc_info = {};
+ alloc_info.sType = VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO;
+ alloc_info.allocationSize = 1024;
+ bool pass = false;
+ pass = m_device->phy().set_memory_type(mem_reqs.memoryTypeBits, &alloc_info, VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT);
+ if (!pass) {
+ printf("%s Failed to set memory type.\n", kSkipPrefix);
+ vkDestroyBuffer(m_device->device(), buffer, NULL);
+ return;
+ }
+ err = vkAllocateMemory(m_device->device(), &alloc_info, NULL, &mem);
+ ASSERT_VK_SUCCESS(err);
+
+ // Introduce failure by not calling vkBindBufferMemory(m_device->device(), buffer, mem, 0);
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ " used with no memory bound. Memory should be bound by calling vkBindBufferMemory().");
+ VkBufferImageCopy region = {};
+ region.bufferRowLength = 16;
+ region.bufferImageHeight = 16;
+ region.imageSubresource.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
+
+ region.imageSubresource.layerCount = 1;
+ region.imageExtent.height = 4;
+ region.imageExtent.width = 4;
+ region.imageExtent.depth = 1;
+ m_commandBuffer->begin();
+ vkCmdCopyBufferToImage(m_commandBuffer->handle(), buffer, image.handle(), VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, 1, &region);
+ m_commandBuffer->end();
+
+ m_errorMonitor->VerifyFound();
+
+ vkDestroyBuffer(m_device->device(), buffer, NULL);
+ vkFreeMemory(m_device->handle(), mem, NULL);
+}
+
+TEST_F(VkLayerTest, InvalidCmdBufferEventDestroyed) {
+ TEST_DESCRIPTION("Attempt to draw with a command buffer that is invalid due to an event dependency being destroyed.");
+ ASSERT_NO_FATAL_FAILURE(Init());
+
+ VkEvent event;
+ VkEventCreateInfo evci = {};
+ evci.sType = VK_STRUCTURE_TYPE_EVENT_CREATE_INFO;
+ VkResult result = vkCreateEvent(m_device->device(), &evci, NULL, &event);
+ ASSERT_VK_SUCCESS(result);
+
+ m_commandBuffer->begin();
+ vkCmdSetEvent(m_commandBuffer->handle(), event, VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT);
+ m_commandBuffer->end();
+
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, " that is invalid because bound Event ");
+ // Destroy event dependency prior to submit to cause ERROR
+ vkDestroyEvent(m_device->device(), event, NULL);
+
+ VkSubmitInfo submit_info = {};
+ submit_info.sType = VK_STRUCTURE_TYPE_SUBMIT_INFO;
+ submit_info.commandBufferCount = 1;
+ submit_info.pCommandBuffers = &m_commandBuffer->handle();
+ vkQueueSubmit(m_device->m_queue, 1, &submit_info, VK_NULL_HANDLE);
+
+ m_errorMonitor->VerifyFound();
+}
+
+TEST_F(VkLayerTest, InvalidCmdBufferQueryPoolDestroyed) {
+ TEST_DESCRIPTION("Attempt to draw with a command buffer that is invalid due to a query pool dependency being destroyed.");
+ ASSERT_NO_FATAL_FAILURE(Init());
+
+ VkQueryPool query_pool;
+ VkQueryPoolCreateInfo qpci{};
+ qpci.sType = VK_STRUCTURE_TYPE_QUERY_POOL_CREATE_INFO;
+ qpci.queryType = VK_QUERY_TYPE_TIMESTAMP;
+ qpci.queryCount = 1;
+ VkResult result = vkCreateQueryPool(m_device->device(), &qpci, nullptr, &query_pool);
+ ASSERT_VK_SUCCESS(result);
+
+ m_commandBuffer->begin();
+ vkCmdResetQueryPool(m_commandBuffer->handle(), query_pool, 0, 1);
+ m_commandBuffer->end();
+
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, " that is invalid because bound QueryPool ");
+ // Destroy query pool dependency prior to submit to cause ERROR
+ vkDestroyQueryPool(m_device->device(), query_pool, NULL);
+
+ VkSubmitInfo submit_info = {};
+ submit_info.sType = VK_STRUCTURE_TYPE_SUBMIT_INFO;
+ submit_info.commandBufferCount = 1;
+ submit_info.pCommandBuffers = &m_commandBuffer->handle();
+ vkQueueSubmit(m_device->m_queue, 1, &submit_info, VK_NULL_HANDLE);
+
+ m_errorMonitor->VerifyFound();
+}
+
+TEST_F(VkLayerTest, InvalidCmdBufferPipelineDestroyed) {
+ TEST_DESCRIPTION("Attempt to draw with a command buffer that is invalid due to a pipeline dependency being destroyed.");
+ ASSERT_NO_FATAL_FAILURE(Init());
+ ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
+
+ {
+ // Use helper to create graphics pipeline
+ CreatePipelineHelper helper(*this);
+ helper.InitInfo();
+ helper.InitState();
+ helper.CreateGraphicsPipeline();
+
+ // Bind helper pipeline to command buffer
+ m_commandBuffer->begin();
+ vkCmdBindPipeline(m_commandBuffer->handle(), VK_PIPELINE_BIND_POINT_GRAPHICS, helper.pipeline_);
+ m_commandBuffer->end();
+
+ // pipeline will be destroyed when helper goes out of scope
+ }
+
+ // Cause error by submitting command buffer that references destroyed pipeline
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, " that is invalid because bound Pipeline ");
+ m_commandBuffer->QueueCommandBuffer(false);
+ m_errorMonitor->VerifyFound();
+}
+
+TEST_F(VkPositiveLayerTest, DestroyPipelineRenderPass) {
+ TEST_DESCRIPTION("Draw using a pipeline whose create renderPass has been destroyed.");
+ m_errorMonitor->ExpectSuccess();
+ ASSERT_NO_FATAL_FAILURE(Init());
+ ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
+
+ VkResult err;
+
+ // Create a renderPass that's compatible with Draw-time renderPass
+ VkAttachmentDescription att = {};
+ att.format = m_render_target_fmt;
+ att.samples = VK_SAMPLE_COUNT_1_BIT;
+ att.loadOp = VK_ATTACHMENT_LOAD_OP_CLEAR;
+ att.storeOp = VK_ATTACHMENT_STORE_OP_STORE;
+ att.stencilLoadOp = VK_ATTACHMENT_LOAD_OP_DONT_CARE;
+ att.stencilStoreOp = VK_ATTACHMENT_STORE_OP_DONT_CARE;
+ att.initialLayout = VK_IMAGE_LAYOUT_UNDEFINED;
+ att.finalLayout = VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL;
+
+ VkAttachmentReference ref = {};
+ ref.layout = VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL;
+ ref.attachment = 0;
+
+ m_renderPassClearValues.clear();
+ VkClearValue clear = {};
+ clear.color = m_clear_color;
+
+ VkSubpassDescription subpass = {};
+ subpass.pipelineBindPoint = VK_PIPELINE_BIND_POINT_GRAPHICS;
+ subpass.flags = 0;
+ subpass.inputAttachmentCount = 0;
+ subpass.pInputAttachments = NULL;
+ subpass.colorAttachmentCount = 1;
+ subpass.pColorAttachments = &ref;
+ subpass.pResolveAttachments = NULL;
+
+ subpass.pDepthStencilAttachment = NULL;
+ subpass.preserveAttachmentCount = 0;
+ subpass.pPreserveAttachments = NULL;
+
+ VkRenderPassCreateInfo rp_info = {};
+ rp_info.sType = VK_STRUCTURE_TYPE_RENDER_PASS_CREATE_INFO;
+ rp_info.attachmentCount = 1;
+ rp_info.pAttachments = &att;
+ rp_info.subpassCount = 1;
+ rp_info.pSubpasses = &subpass;
+
+ VkRenderPass rp;
+ err = vkCreateRenderPass(device(), &rp_info, NULL, &rp);
+ ASSERT_VK_SUCCESS(err);
+
+ VkShaderObj vs(m_device, bindStateVertShaderText, VK_SHADER_STAGE_VERTEX_BIT, this);
+ VkShaderObj fs(m_device, bindStateFragShaderText, VK_SHADER_STAGE_FRAGMENT_BIT, this);
+
+ VkPipelineObj pipe(m_device);
+ pipe.AddDefaultColorAttachment();
+ pipe.AddShader(&vs);
+ pipe.AddShader(&fs);
+ VkViewport viewport = {0.0f, 0.0f, 64.0f, 64.0f, 0.0f, 1.0f};
+ m_viewports.push_back(viewport);
+ pipe.SetViewport(m_viewports);
+ VkRect2D rect = {{0, 0}, {64, 64}};
+ m_scissors.push_back(rect);
+ pipe.SetScissor(m_scissors);
+
+ const VkPipelineLayoutObj pl(m_device);
+ pipe.CreateVKPipeline(pl.handle(), rp);
+
+ m_commandBuffer->begin();
+ m_commandBuffer->BeginRenderPass(m_renderPassBeginInfo);
+ vkCmdBindPipeline(m_commandBuffer->handle(), VK_PIPELINE_BIND_POINT_GRAPHICS, pipe.handle());
+ // Destroy renderPass before pipeline is used in Draw
+ // We delay until after CmdBindPipeline to verify that invalid binding isn't
+ // created between CB & renderPass, which we used to do.
+ vkDestroyRenderPass(m_device->device(), rp, nullptr);
+ vkCmdDraw(m_commandBuffer->handle(), 3, 1, 0, 0);
+ vkCmdEndRenderPass(m_commandBuffer->handle());
+ m_commandBuffer->end();
+
+ VkSubmitInfo submit_info = {};
+ submit_info.sType = VK_STRUCTURE_TYPE_SUBMIT_INFO;
+ submit_info.commandBufferCount = 1;
+ submit_info.pCommandBuffers = &m_commandBuffer->handle();
+ vkQueueSubmit(m_device->m_queue, 1, &submit_info, VK_NULL_HANDLE);
+ m_errorMonitor->VerifyNotFound();
+ vkQueueWaitIdle(m_device->m_queue);
+}
+
+TEST_F(VkLayerTest, InvalidCmdBufferDescriptorSetBufferDestroyed) {
+ TEST_DESCRIPTION(
+ "Attempt to draw with a command buffer that is invalid due to a bound descriptor set with a buffer dependency being "
+ "destroyed.");
+ ASSERT_NO_FATAL_FAILURE(Init());
+ ASSERT_NO_FATAL_FAILURE(InitViewport());
+ ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
+
+ VkDescriptorPoolSize ds_type_count = {};
+ ds_type_count.type = VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER;
+ ds_type_count.descriptorCount = 1;
+
+ VkDescriptorPoolCreateInfo ds_pool_ci = {};
+ ds_pool_ci.sType = VK_STRUCTURE_TYPE_DESCRIPTOR_POOL_CREATE_INFO;
+ ds_pool_ci.pNext = NULL;
+ ds_pool_ci.maxSets = 1;
+ ds_pool_ci.poolSizeCount = 1;
+ ds_pool_ci.pPoolSizes = &ds_type_count;
+
+ VkDescriptorPool ds_pool;
+ VkResult err = vkCreateDescriptorPool(m_device->device(), &ds_pool_ci, NULL, &ds_pool);
+ ASSERT_VK_SUCCESS(err);
+
+ VkDescriptorSetLayoutBinding dsl_binding = {};
+ dsl_binding.binding = 0;
+ dsl_binding.descriptorType = VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER;
+ dsl_binding.descriptorCount = 1;
+ dsl_binding.stageFlags = VK_SHADER_STAGE_ALL;
+ dsl_binding.pImmutableSamplers = NULL;
+
+ const VkDescriptorSetLayoutObj ds_layout(m_device, {dsl_binding});
+
+ VkDescriptorSet descriptorSet;
+ VkDescriptorSetAllocateInfo alloc_info = {};
+ alloc_info.sType = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_ALLOCATE_INFO;
+ alloc_info.descriptorSetCount = 1;
+ alloc_info.descriptorPool = ds_pool;
+ alloc_info.pSetLayouts = &ds_layout.handle();
+ err = vkAllocateDescriptorSets(m_device->device(), &alloc_info, &descriptorSet);
+ ASSERT_VK_SUCCESS(err);
+
+ const VkPipelineLayoutObj pipeline_layout(m_device, {&ds_layout});
+
+ // Create a buffer to update the descriptor with
+ uint32_t qfi = 0;
+ VkBufferCreateInfo buffCI = {};
+ buffCI.sType = VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO;
+ buffCI.size = 1024;
+ buffCI.usage = VK_BUFFER_USAGE_UNIFORM_BUFFER_BIT;
+ buffCI.queueFamilyIndexCount = 1;
+ buffCI.pQueueFamilyIndices = &qfi;
+
+ VkBuffer buffer;
+ err = vkCreateBuffer(m_device->device(), &buffCI, NULL, &buffer);
+ ASSERT_VK_SUCCESS(err);
+ // Allocate memory and bind to buffer so we can make it to the appropriate
+ // error
+ VkMemoryRequirements memReqs;
+ vkGetBufferMemoryRequirements(m_device->device(), buffer, &memReqs);
+ VkMemoryAllocateInfo mem_alloc = {};
+ mem_alloc.sType = VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO;
+ mem_alloc.pNext = NULL;
+ mem_alloc.allocationSize = memReqs.size;
+ mem_alloc.memoryTypeIndex = 0;
+ bool pass = m_device->phy().set_memory_type(memReqs.memoryTypeBits, &mem_alloc, 0);
+ if (!pass) {
+ printf("%s Failed to set memory type.\n", kSkipPrefix);
+ vkDestroyBuffer(m_device->device(), buffer, NULL);
+ return;
+ }
+
+ VkDeviceMemory mem;
+ err = vkAllocateMemory(m_device->device(), &mem_alloc, NULL, &mem);
+ ASSERT_VK_SUCCESS(err);
+ err = vkBindBufferMemory(m_device->device(), buffer, mem, 0);
+ ASSERT_VK_SUCCESS(err);
+ // Correctly update descriptor to avoid "NOT_UPDATED" error
+ VkDescriptorBufferInfo buffInfo = {};
+ buffInfo.buffer = buffer;
+ buffInfo.offset = 0;
+ buffInfo.range = 1024;
+
+ VkWriteDescriptorSet descriptor_write;
+ memset(&descriptor_write, 0, sizeof(descriptor_write));
+ descriptor_write.sType = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET;
+ descriptor_write.dstSet = descriptorSet;
+ descriptor_write.dstBinding = 0;
+ descriptor_write.descriptorCount = 1;
+ descriptor_write.descriptorType = VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER;
+ descriptor_write.pBufferInfo = &buffInfo;
+
+ vkUpdateDescriptorSets(m_device->device(), 1, &descriptor_write, 0, NULL);
+
+ // Create PSO to be used for draw-time errors below
+ char const *vsSource =
+ "#version 450\n"
+ "\n"
+ "void main(){\n"
+ " gl_Position = vec4(1);\n"
+ "}\n";
+ char const *fsSource =
+ "#version 450\n"
+ "\n"
+ "layout(location=0) out vec4 x;\n"
+ "layout(set=0) layout(binding=0) uniform foo { int x; int y; } bar;\n"
+ "void main(){\n"
+ " x = vec4(bar.y);\n"
+ "}\n";
+ VkShaderObj vs(m_device, vsSource, VK_SHADER_STAGE_VERTEX_BIT, this);
+ VkShaderObj fs(m_device, fsSource, VK_SHADER_STAGE_FRAGMENT_BIT, this);
+ VkPipelineObj pipe(m_device);
+ pipe.AddShader(&vs);
+ pipe.AddShader(&fs);
+ pipe.AddDefaultColorAttachment();
+ pipe.CreateVKPipeline(pipeline_layout.handle(), renderPass());
+
+ m_commandBuffer->begin();
+ m_commandBuffer->BeginRenderPass(m_renderPassBeginInfo);
+ vkCmdBindPipeline(m_commandBuffer->handle(), VK_PIPELINE_BIND_POINT_GRAPHICS, pipe.handle());
+ vkCmdBindDescriptorSets(m_commandBuffer->handle(), VK_PIPELINE_BIND_POINT_GRAPHICS, pipeline_layout.handle(), 0, 1,
+ &descriptorSet, 0, NULL);
+
+ vkCmdSetViewport(m_commandBuffer->handle(), 0, 1, &m_viewports[0]);
+ vkCmdSetScissor(m_commandBuffer->handle(), 0, 1, &m_scissors[0]);
+
+ m_commandBuffer->Draw(1, 0, 0, 0);
+ m_commandBuffer->EndRenderPass();
+ m_commandBuffer->end();
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, " that is invalid because bound Buffer ");
+ // Destroy buffer should invalidate the cmd buffer, causing error on submit
+ vkDestroyBuffer(m_device->device(), buffer, NULL);
+ // Attempt to submit cmd buffer
+ VkSubmitInfo submit_info = {};
+ submit_info.sType = VK_STRUCTURE_TYPE_SUBMIT_INFO;
+ submit_info.commandBufferCount = 1;
+ submit_info.pCommandBuffers = &m_commandBuffer->handle();
+ vkQueueSubmit(m_device->m_queue, 1, &submit_info, VK_NULL_HANDLE);
+ m_errorMonitor->VerifyFound();
+ // Cleanup
+ vkFreeMemory(m_device->device(), mem, NULL);
+
+ vkDestroyDescriptorPool(m_device->device(), ds_pool, NULL);
+}
+
+TEST_F(VkLayerTest, InvalidCmdBufferDescriptorSetImageSamplerDestroyed) {
+ TEST_DESCRIPTION(
+ "Attempt to draw with a command buffer that is invalid due to a bound descriptor sets with a combined image sampler having "
+ "their image, sampler, and descriptor set each respectively destroyed and then attempting to submit associated cmd "
+ "buffers. Attempt to destroy a DescriptorSet that is in use.");
+ ASSERT_NO_FATAL_FAILURE(Init(nullptr, nullptr, VK_COMMAND_POOL_CREATE_RESET_COMMAND_BUFFER_BIT));
+ ASSERT_NO_FATAL_FAILURE(InitViewport());
+ ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
+
+ VkDescriptorPoolSize ds_type_count = {};
+ ds_type_count.type = VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER;
+ ds_type_count.descriptorCount = 1;
+
+ VkDescriptorPoolCreateInfo ds_pool_ci = {};
+ ds_pool_ci.sType = VK_STRUCTURE_TYPE_DESCRIPTOR_POOL_CREATE_INFO;
+ ds_pool_ci.pNext = NULL;
+ ds_pool_ci.flags = VK_DESCRIPTOR_POOL_CREATE_FREE_DESCRIPTOR_SET_BIT;
+ ds_pool_ci.maxSets = 1;
+ ds_pool_ci.poolSizeCount = 1;
+ ds_pool_ci.pPoolSizes = &ds_type_count;
+
+ VkDescriptorPool ds_pool;
+ VkResult err = vkCreateDescriptorPool(m_device->device(), &ds_pool_ci, NULL, &ds_pool);
+ ASSERT_VK_SUCCESS(err);
+
+ VkDescriptorSetLayoutBinding dsl_binding = {};
+ dsl_binding.binding = 0;
+ dsl_binding.descriptorType = VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER;
+ dsl_binding.descriptorCount = 1;
+ dsl_binding.stageFlags = VK_SHADER_STAGE_ALL;
+ dsl_binding.pImmutableSamplers = NULL;
+
+ const VkDescriptorSetLayoutObj ds_layout(m_device, {dsl_binding});
+
+ VkDescriptorSet descriptorSet;
+ VkDescriptorSetAllocateInfo alloc_info = {};
+ alloc_info.sType = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_ALLOCATE_INFO;
+ alloc_info.descriptorSetCount = 1;
+ alloc_info.descriptorPool = ds_pool;
+ alloc_info.pSetLayouts = &ds_layout.handle();
+ err = vkAllocateDescriptorSets(m_device->device(), &alloc_info, &descriptorSet);
+ ASSERT_VK_SUCCESS(err);
+
+ const VkPipelineLayoutObj pipeline_layout(m_device, {&ds_layout});
+
+ // Create images to update the descriptor with
+ VkImage image;
+ VkImage image2;
+ const VkFormat tex_format = VK_FORMAT_B8G8R8A8_UNORM;
+ const int32_t tex_width = 32;
+ const int32_t tex_height = 32;
+ VkImageCreateInfo image_create_info = {};
+ image_create_info.sType = VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO;
+ image_create_info.pNext = NULL;
+ image_create_info.imageType = VK_IMAGE_TYPE_2D;
+ image_create_info.format = tex_format;
+ image_create_info.extent.width = tex_width;
+ image_create_info.extent.height = tex_height;
+ image_create_info.extent.depth = 1;
+ image_create_info.mipLevels = 1;
+ image_create_info.arrayLayers = 1;
+ image_create_info.samples = VK_SAMPLE_COUNT_1_BIT;
+ image_create_info.tiling = VK_IMAGE_TILING_OPTIMAL;
+ image_create_info.usage = VK_IMAGE_USAGE_SAMPLED_BIT;
+ image_create_info.flags = 0;
+ err = vkCreateImage(m_device->device(), &image_create_info, NULL, &image);
+ ASSERT_VK_SUCCESS(err);
+ err = vkCreateImage(m_device->device(), &image_create_info, NULL, &image2);
+ ASSERT_VK_SUCCESS(err);
+
+ VkMemoryRequirements memory_reqs;
+ VkDeviceMemory image_memory;
+ bool pass;
+ VkMemoryAllocateInfo memory_info = {};
+ memory_info.sType = VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO;
+ memory_info.pNext = NULL;
+ memory_info.allocationSize = 0;
+ memory_info.memoryTypeIndex = 0;
+ vkGetImageMemoryRequirements(m_device->device(), image, &memory_reqs);
+ // Allocate enough memory for both images
+ VkDeviceSize align_mod = memory_reqs.size % memory_reqs.alignment;
+ VkDeviceSize aligned_size = ((align_mod == 0) ? memory_reqs.size : (memory_reqs.size + memory_reqs.alignment - align_mod));
+ memory_info.allocationSize = aligned_size * 2;
+ pass = m_device->phy().set_memory_type(memory_reqs.memoryTypeBits, &memory_info, 0);
+ ASSERT_TRUE(pass);
+ err = vkAllocateMemory(m_device->device(), &memory_info, NULL, &image_memory);
+ ASSERT_VK_SUCCESS(err);
+ err = vkBindImageMemory(m_device->device(), image, image_memory, 0);
+ ASSERT_VK_SUCCESS(err);
+ // Bind second image to memory right after first image
+ err = vkBindImageMemory(m_device->device(), image2, image_memory, aligned_size);
+ ASSERT_VK_SUCCESS(err);
+
+ VkImageViewCreateInfo image_view_create_info = {};
+ image_view_create_info.sType = VK_STRUCTURE_TYPE_IMAGE_VIEW_CREATE_INFO;
+ image_view_create_info.image = image;
+ image_view_create_info.viewType = VK_IMAGE_VIEW_TYPE_2D;
+ image_view_create_info.format = tex_format;
+ image_view_create_info.subresourceRange.layerCount = 1;
+ image_view_create_info.subresourceRange.baseMipLevel = 0;
+ image_view_create_info.subresourceRange.levelCount = 1;
+ image_view_create_info.subresourceRange.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
+
+ VkImageView tmp_view; // First test deletes this view
+ VkImageView view;
+ VkImageView view2;
+ err = vkCreateImageView(m_device->device(), &image_view_create_info, NULL, &tmp_view);
+ ASSERT_VK_SUCCESS(err);
+ err = vkCreateImageView(m_device->device(), &image_view_create_info, NULL, &view);
+ ASSERT_VK_SUCCESS(err);
+ image_view_create_info.image = image2;
+ err = vkCreateImageView(m_device->device(), &image_view_create_info, NULL, &view2);
+ ASSERT_VK_SUCCESS(err);
+ // Create Samplers
+ VkSamplerCreateInfo sampler_ci = SafeSaneSamplerCreateInfo();
+ VkSampler sampler;
+ VkSampler sampler2;
+ err = vkCreateSampler(m_device->device(), &sampler_ci, NULL, &sampler);
+ ASSERT_VK_SUCCESS(err);
+ err = vkCreateSampler(m_device->device(), &sampler_ci, NULL, &sampler2);
+ ASSERT_VK_SUCCESS(err);
+ // Update descriptor with image and sampler
+ VkDescriptorImageInfo img_info = {};
+ img_info.sampler = sampler;
+ img_info.imageView = tmp_view;
+ img_info.imageLayout = VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL;
+
+ VkWriteDescriptorSet descriptor_write;
+ memset(&descriptor_write, 0, sizeof(descriptor_write));
+ descriptor_write.sType = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET;
+ descriptor_write.dstSet = descriptorSet;
+ descriptor_write.dstBinding = 0;
+ descriptor_write.descriptorCount = 1;
+ descriptor_write.descriptorType = VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER;
+ descriptor_write.pImageInfo = &img_info;
+
+ vkUpdateDescriptorSets(m_device->device(), 1, &descriptor_write, 0, NULL);
+
+ // Create PSO to be used for draw-time errors below
+ char const *vsSource =
+ "#version 450\n"
+ "\n"
+ "void main(){\n"
+ " gl_Position = vec4(1);\n"
+ "}\n";
+ char const *fsSource =
+ "#version 450\n"
+ "\n"
+ "layout(set=0, binding=0) uniform sampler2D s;\n"
+ "layout(location=0) out vec4 x;\n"
+ "void main(){\n"
+ " x = texture(s, vec2(1));\n"
+ "}\n";
+ VkShaderObj vs(m_device, vsSource, VK_SHADER_STAGE_VERTEX_BIT, this);
+ VkShaderObj fs(m_device, fsSource, VK_SHADER_STAGE_FRAGMENT_BIT, this);
+ VkPipelineObj pipe(m_device);
+ pipe.AddShader(&vs);
+ pipe.AddShader(&fs);
+ pipe.AddDefaultColorAttachment();
+ pipe.CreateVKPipeline(pipeline_layout.handle(), renderPass());
+
+ // First error case is destroying sampler prior to cmd buffer submission
+ m_commandBuffer->begin();
+
+ // Transit image layout from VK_IMAGE_LAYOUT_UNDEFINED into VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL
+ VkImageMemoryBarrier barrier = {};
+ barrier.sType = VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER;
+ barrier.oldLayout = VK_IMAGE_LAYOUT_UNDEFINED;
+ barrier.newLayout = VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL;
+ barrier.srcQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED;
+ barrier.dstQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED;
+ barrier.image = image;
+ barrier.srcAccessMask = 0;
+ barrier.dstAccessMask = VK_ACCESS_SHADER_READ_BIT;
+ barrier.subresourceRange.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
+ barrier.subresourceRange.baseMipLevel = 0;
+ barrier.subresourceRange.levelCount = 1;
+ barrier.subresourceRange.baseArrayLayer = 0;
+ barrier.subresourceRange.layerCount = 1;
+ vkCmdPipelineBarrier(m_commandBuffer->handle(), VK_PIPELINE_STAGE_ALL_COMMANDS_BIT, VK_PIPELINE_STAGE_ALL_COMMANDS_BIT, 0, 0,
+ nullptr, 0, nullptr, 1, &barrier);
+
+ m_commandBuffer->BeginRenderPass(m_renderPassBeginInfo);
+ vkCmdBindPipeline(m_commandBuffer->handle(), VK_PIPELINE_BIND_POINT_GRAPHICS, pipe.handle());
+ vkCmdBindDescriptorSets(m_commandBuffer->handle(), VK_PIPELINE_BIND_POINT_GRAPHICS, pipeline_layout.handle(), 0, 1,
+ &descriptorSet, 0, NULL);
+ VkViewport viewport = {0, 0, 16, 16, 0, 1};
+ VkRect2D scissor = {{0, 0}, {16, 16}};
+ vkCmdSetViewport(m_commandBuffer->handle(), 0, 1, &viewport);
+ vkCmdSetScissor(m_commandBuffer->handle(), 0, 1, &scissor);
+ m_commandBuffer->Draw(1, 0, 0, 0);
+ m_commandBuffer->EndRenderPass();
+ m_commandBuffer->end();
+ VkSubmitInfo submit_info = {};
+ submit_info.sType = VK_STRUCTURE_TYPE_SUBMIT_INFO;
+ submit_info.commandBufferCount = 1;
+ submit_info.pCommandBuffers = &m_commandBuffer->handle();
+ // This first submit should be successful
+ vkQueueSubmit(m_device->m_queue, 1, &submit_info, VK_NULL_HANDLE);
+ vkQueueWaitIdle(m_device->m_queue);
+
+ // Now destroy imageview and reset cmdBuffer
+ vkDestroyImageView(m_device->device(), tmp_view, NULL);
+ m_commandBuffer->reset(0);
+ m_commandBuffer->begin();
+ m_commandBuffer->BeginRenderPass(m_renderPassBeginInfo);
+ vkCmdBindPipeline(m_commandBuffer->handle(), VK_PIPELINE_BIND_POINT_GRAPHICS, pipe.handle());
+ vkCmdBindDescriptorSets(m_commandBuffer->handle(), VK_PIPELINE_BIND_POINT_GRAPHICS, pipeline_layout.handle(), 0, 1,
+ &descriptorSet, 0, NULL);
+ vkCmdSetViewport(m_commandBuffer->handle(), 0, 1, &viewport);
+ vkCmdSetScissor(m_commandBuffer->handle(), 0, 1, &scissor);
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, " that has been destroyed.");
+ m_commandBuffer->Draw(1, 0, 0, 0);
+ m_errorMonitor->VerifyFound();
+ m_commandBuffer->EndRenderPass();
+ m_commandBuffer->end();
+
+ // Re-update descriptor with new view
+ img_info.imageView = view;
+ vkUpdateDescriptorSets(m_device->device(), 1, &descriptor_write, 0, NULL);
+ // Now test destroying sampler prior to cmd buffer submission
+ m_commandBuffer->begin();
+ m_commandBuffer->BeginRenderPass(m_renderPassBeginInfo);
+ vkCmdBindPipeline(m_commandBuffer->handle(), VK_PIPELINE_BIND_POINT_GRAPHICS, pipe.handle());
+ vkCmdBindDescriptorSets(m_commandBuffer->handle(), VK_PIPELINE_BIND_POINT_GRAPHICS, pipeline_layout.handle(), 0, 1,
+ &descriptorSet, 0, NULL);
+ vkCmdSetViewport(m_commandBuffer->handle(), 0, 1, &viewport);
+ vkCmdSetScissor(m_commandBuffer->handle(), 0, 1, &scissor);
+ m_commandBuffer->Draw(1, 0, 0, 0);
+ m_commandBuffer->EndRenderPass();
+ m_commandBuffer->end();
+ // Destroy sampler invalidates the cmd buffer, causing error on submit
+ vkDestroySampler(m_device->device(), sampler, NULL);
+ // Attempt to submit cmd buffer
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "that is invalid because bound Sampler");
+ submit_info = {};
+ submit_info.sType = VK_STRUCTURE_TYPE_SUBMIT_INFO;
+ submit_info.commandBufferCount = 1;
+ submit_info.pCommandBuffers = &m_commandBuffer->handle();
+ vkQueueSubmit(m_device->m_queue, 1, &submit_info, VK_NULL_HANDLE);
+ m_errorMonitor->VerifyFound();
+
+ // Now re-update descriptor with valid sampler and delete image
+ img_info.sampler = sampler2;
+ vkUpdateDescriptorSets(m_device->device(), 1, &descriptor_write, 0, NULL);
+
+ VkCommandBufferBeginInfo info = {};
+ info.sType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_BEGIN_INFO;
+ info.flags = VK_COMMAND_BUFFER_USAGE_SIMULTANEOUS_USE_BIT;
+
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, " that is invalid because bound Image ");
+ m_commandBuffer->begin(&info);
+ m_commandBuffer->BeginRenderPass(m_renderPassBeginInfo);
+ vkCmdBindPipeline(m_commandBuffer->handle(), VK_PIPELINE_BIND_POINT_GRAPHICS, pipe.handle());
+ vkCmdBindDescriptorSets(m_commandBuffer->handle(), VK_PIPELINE_BIND_POINT_GRAPHICS, pipeline_layout.handle(), 0, 1,
+ &descriptorSet, 0, NULL);
+ vkCmdSetViewport(m_commandBuffer->handle(), 0, 1, &viewport);
+ vkCmdSetScissor(m_commandBuffer->handle(), 0, 1, &scissor);
+ m_commandBuffer->Draw(1, 0, 0, 0);
+ m_commandBuffer->EndRenderPass();
+ m_commandBuffer->end();
+ // Destroy image invalidates the cmd buffer, causing error on submit
+ vkDestroyImage(m_device->device(), image, NULL);
+ // Attempt to submit cmd buffer
+ submit_info = {};
+ submit_info.sType = VK_STRUCTURE_TYPE_SUBMIT_INFO;
+ submit_info.commandBufferCount = 1;
+ submit_info.pCommandBuffers = &m_commandBuffer->handle();
+ vkQueueSubmit(m_device->m_queue, 1, &submit_info, VK_NULL_HANDLE);
+ m_errorMonitor->VerifyFound();
+ // Now update descriptor to be valid, but then free descriptor
+ img_info.imageView = view2;
+ vkUpdateDescriptorSets(m_device->device(), 1, &descriptor_write, 0, NULL);
+ m_commandBuffer->begin(&info);
+
+ // Transit image2 layout from VK_IMAGE_LAYOUT_UNDEFINED into VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL
+ barrier.image = image2;
+ vkCmdPipelineBarrier(m_commandBuffer->handle(), VK_PIPELINE_STAGE_ALL_COMMANDS_BIT, VK_PIPELINE_STAGE_ALL_COMMANDS_BIT, 0, 0,
+ nullptr, 0, nullptr, 1, &barrier);
+
+ m_commandBuffer->BeginRenderPass(m_renderPassBeginInfo);
+ vkCmdBindPipeline(m_commandBuffer->handle(), VK_PIPELINE_BIND_POINT_GRAPHICS, pipe.handle());
+ vkCmdBindDescriptorSets(m_commandBuffer->handle(), VK_PIPELINE_BIND_POINT_GRAPHICS, pipeline_layout.handle(), 0, 1,
+ &descriptorSet, 0, NULL);
+ vkCmdSetViewport(m_commandBuffer->handle(), 0, 1, &viewport);
+ vkCmdSetScissor(m_commandBuffer->handle(), 0, 1, &scissor);
+ m_commandBuffer->Draw(1, 0, 0, 0);
+ m_commandBuffer->EndRenderPass();
+ m_commandBuffer->end();
+ vkQueueSubmit(m_device->m_queue, 1, &submit_info, VK_NULL_HANDLE);
+
+ // Immediately try to destroy the descriptor set in the active command buffer - failure expected
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "Cannot call vkFreeDescriptorSets() on descriptor set 0x");
+ vkFreeDescriptorSets(m_device->device(), ds_pool, 1, &descriptorSet);
+ m_errorMonitor->VerifyFound();
+
+ // Try again once the queue is idle - should succeed w/o error
+ // TODO - though the particular error above doesn't re-occur, there are other 'unexpecteds' still to clean up
+ vkQueueWaitIdle(m_device->m_queue);
+ m_errorMonitor->SetUnexpectedError(
+ "pDescriptorSets must be a valid pointer to an array of descriptorSetCount VkDescriptorSet handles, each element of which "
+ "must either be a valid handle or VK_NULL_HANDLE");
+ m_errorMonitor->SetUnexpectedError("Unable to remove DescriptorSet obj");
+ vkFreeDescriptorSets(m_device->device(), ds_pool, 1, &descriptorSet);
+
+ // Attempt to submit cmd buffer containing the freed descriptor set
+ submit_info = {};
+ submit_info.sType = VK_STRUCTURE_TYPE_SUBMIT_INFO;
+ submit_info.commandBufferCount = 1;
+ submit_info.pCommandBuffers = &m_commandBuffer->handle();
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, " that is invalid because bound DescriptorSet ");
+ vkQueueSubmit(m_device->m_queue, 1, &submit_info, VK_NULL_HANDLE);
+ m_errorMonitor->VerifyFound();
+
+ // Cleanup
+ vkFreeMemory(m_device->device(), image_memory, NULL);
+ vkDestroySampler(m_device->device(), sampler2, NULL);
+ vkDestroyImage(m_device->device(), image2, NULL);
+ vkDestroyImageView(m_device->device(), view, NULL);
+ vkDestroyImageView(m_device->device(), view2, NULL);
+ vkDestroyDescriptorPool(m_device->device(), ds_pool, NULL);
+}
+
+TEST_F(VkLayerTest, InvalidDescriptorSetSamplerDestroyed) {
+ TEST_DESCRIPTION("Attempt to draw with a bound descriptor sets with a combined image sampler where sampler has been deleted.");
+ ASSERT_NO_FATAL_FAILURE(Init(nullptr, nullptr, VK_COMMAND_POOL_CREATE_RESET_COMMAND_BUFFER_BIT));
+ ASSERT_NO_FATAL_FAILURE(InitViewport());
+ ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
+
+ OneOffDescriptorSet ds(m_device, {
+ {0, VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER, 1, VK_SHADER_STAGE_ALL, nullptr},
+ });
+
+ const VkPipelineLayoutObj pipeline_layout(m_device, {&ds.layout_});
+ // Create images to update the descriptor with
+ VkImageObj image(m_device);
+ const VkFormat tex_format = VK_FORMAT_B8G8R8A8_UNORM;
+ image.Init(32, 32, 1, tex_format, VK_IMAGE_USAGE_SAMPLED_BIT, VK_IMAGE_TILING_OPTIMAL, 0);
+ ASSERT_TRUE(image.initialized());
+
+ VkImageViewCreateInfo image_view_create_info = {};
+ image_view_create_info.sType = VK_STRUCTURE_TYPE_IMAGE_VIEW_CREATE_INFO;
+ image_view_create_info.image = image.handle();
+ image_view_create_info.viewType = VK_IMAGE_VIEW_TYPE_2D;
+ image_view_create_info.format = tex_format;
+ image_view_create_info.subresourceRange.layerCount = 1;
+ image_view_create_info.subresourceRange.baseMipLevel = 0;
+ image_view_create_info.subresourceRange.levelCount = 1;
+ image_view_create_info.subresourceRange.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
+
+ VkImageView view;
+ VkResult err = vkCreateImageView(m_device->device(), &image_view_create_info, NULL, &view);
+ ASSERT_VK_SUCCESS(err);
+ // Create Samplers
+ VkSamplerCreateInfo sampler_ci = SafeSaneSamplerCreateInfo();
+ VkSampler sampler;
+ err = vkCreateSampler(m_device->device(), &sampler_ci, NULL, &sampler);
+ ASSERT_VK_SUCCESS(err);
+ // Update descriptor with image and sampler
+ VkDescriptorImageInfo img_info = {};
+ img_info.sampler = sampler;
+ img_info.imageView = view;
+ img_info.imageLayout = VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL;
+
+ VkWriteDescriptorSet descriptor_write;
+ memset(&descriptor_write, 0, sizeof(descriptor_write));
+ descriptor_write.sType = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET;
+ descriptor_write.dstSet = ds.set_;
+ descriptor_write.dstBinding = 0;
+ descriptor_write.descriptorCount = 1;
+ descriptor_write.descriptorType = VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER;
+ descriptor_write.pImageInfo = &img_info;
+
+ vkUpdateDescriptorSets(m_device->device(), 1, &descriptor_write, 0, NULL);
+ // Destroy the sampler before it's bound to the cmd buffer
+ vkDestroySampler(m_device->device(), sampler, NULL);
+
+ // Create PSO to be used for draw-time errors below
+ char const *vsSource =
+ "#version 450\n"
+ "\n"
+ "void main(){\n"
+ " gl_Position = vec4(1);\n"
+ "}\n";
+ char const *fsSource =
+ "#version 450\n"
+ "\n"
+ "layout(set=0, binding=0) uniform sampler2D s;\n"
+ "layout(location=0) out vec4 x;\n"
+ "void main(){\n"
+ " x = texture(s, vec2(1));\n"
+ "}\n";
+ VkShaderObj vs(m_device, vsSource, VK_SHADER_STAGE_VERTEX_BIT, this);
+ VkShaderObj fs(m_device, fsSource, VK_SHADER_STAGE_FRAGMENT_BIT, this);
+ VkPipelineObj pipe(m_device);
+ pipe.AddShader(&vs);
+ pipe.AddShader(&fs);
+ pipe.AddDefaultColorAttachment();
+ pipe.CreateVKPipeline(pipeline_layout.handle(), renderPass());
+
+ // First error case is destroying sampler prior to cmd buffer submission
+ m_commandBuffer->begin();
+ m_commandBuffer->BeginRenderPass(m_renderPassBeginInfo);
+ vkCmdBindPipeline(m_commandBuffer->handle(), VK_PIPELINE_BIND_POINT_GRAPHICS, pipe.handle());
+ vkCmdBindDescriptorSets(m_commandBuffer->handle(), VK_PIPELINE_BIND_POINT_GRAPHICS, pipeline_layout.handle(), 0, 1, &ds.set_, 0,
+ NULL);
+ VkViewport viewport = {0, 0, 16, 16, 0, 1};
+ VkRect2D scissor = {{0, 0}, {16, 16}};
+ vkCmdSetViewport(m_commandBuffer->handle(), 0, 1, &viewport);
+ vkCmdSetScissor(m_commandBuffer->handle(), 0, 1, &scissor);
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ " Descriptor in binding #0 at global descriptor index 0 is using sampler ");
+ m_commandBuffer->Draw(1, 0, 0, 0);
+ m_errorMonitor->VerifyFound();
+
+ m_commandBuffer->EndRenderPass();
+ m_commandBuffer->end();
+
+ vkDestroyImageView(m_device->device(), view, NULL);
+}
+
+TEST_F(VkLayerTest, ImageDescriptorLayoutMismatchInternal) {
+ TEST_DESCRIPTION("Create an image sampler layout->image layout mismatch within a command buffer");
+ ASSERT_NO_FATAL_FAILURE(Init(nullptr, nullptr, VK_COMMAND_POOL_CREATE_RESET_COMMAND_BUFFER_BIT));
+ ASSERT_NO_FATAL_FAILURE(InitViewport());
+ ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
+
+ VkDescriptorPoolSize ds_type_count = {};
+ ds_type_count.type = VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER;
+ ds_type_count.descriptorCount = 1;
+
+ VkDescriptorPoolCreateInfo ds_pool_ci = {};
+ ds_pool_ci.sType = VK_STRUCTURE_TYPE_DESCRIPTOR_POOL_CREATE_INFO;
+ ds_pool_ci.pNext = NULL;
+ ds_pool_ci.flags = VK_DESCRIPTOR_POOL_CREATE_FREE_DESCRIPTOR_SET_BIT;
+ ds_pool_ci.maxSets = 1;
+ ds_pool_ci.poolSizeCount = 1;
+ ds_pool_ci.pPoolSizes = &ds_type_count;
+
+ VkDescriptorPool ds_pool;
+ VkResult err = vkCreateDescriptorPool(m_device->device(), &ds_pool_ci, NULL, &ds_pool);
+ ASSERT_VK_SUCCESS(err);
+
+ VkDescriptorSetLayoutBinding dsl_binding = {};
+ dsl_binding.binding = 0;
+ dsl_binding.descriptorType = VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER;
+ dsl_binding.descriptorCount = 1;
+ dsl_binding.stageFlags = VK_SHADER_STAGE_ALL;
+ dsl_binding.pImmutableSamplers = NULL;
+
+ const VkDescriptorSetLayoutObj ds_layout(m_device, {dsl_binding});
+
+ VkDescriptorSet descriptorSet;
+ VkDescriptorSetAllocateInfo alloc_info = {};
+ alloc_info.sType = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_ALLOCATE_INFO;
+ alloc_info.descriptorSetCount = 1;
+ alloc_info.descriptorPool = ds_pool;
+ alloc_info.pSetLayouts = &ds_layout.handle();
+ err = vkAllocateDescriptorSets(m_device->device(), &alloc_info, &descriptorSet);
+ ASSERT_VK_SUCCESS(err);
+
+ const VkPipelineLayoutObj pipeline_layout(m_device, {&ds_layout});
+
+ // Create images to update the descriptor with
+ const VkFormat format = VK_FORMAT_B8G8R8A8_UNORM;
+ VkImageObj image(m_device);
+ image.Init(32, 32, 1, format, VK_IMAGE_USAGE_SAMPLED_BIT | VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL, VK_IMAGE_TILING_OPTIMAL,
+ 0);
+ ASSERT_TRUE(image.initialized());
+
+ VkImageViewCreateInfo image_view_create_info = {};
+ image_view_create_info.sType = VK_STRUCTURE_TYPE_IMAGE_VIEW_CREATE_INFO;
+ image_view_create_info.image = image.handle();
+ image_view_create_info.viewType = VK_IMAGE_VIEW_TYPE_2D;
+ image_view_create_info.format = format;
+ image_view_create_info.subresourceRange.layerCount = 1;
+ image_view_create_info.subresourceRange.baseMipLevel = 0;
+ image_view_create_info.subresourceRange.levelCount = 1;
+ image_view_create_info.subresourceRange.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
+
+ VkImageView view;
+ err = vkCreateImageView(m_device->device(), &image_view_create_info, NULL, &view);
+ ASSERT_VK_SUCCESS(err);
+ // Create Sampler
+ VkSamplerCreateInfo sampler_ci = SafeSaneSamplerCreateInfo();
+ VkSampler sampler;
+ err = vkCreateSampler(m_device->device(), &sampler_ci, NULL, &sampler);
+ ASSERT_VK_SUCCESS(err);
+ // Update descriptor with image and sampler
+ VkDescriptorImageInfo img_info = {};
+ img_info.sampler = sampler;
+ img_info.imageView = view;
+ // This should cause a mis-match. Actual layout at use time is SHADER_RO
+ img_info.imageLayout = VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL;
+
+ VkWriteDescriptorSet descriptor_write;
+ memset(&descriptor_write, 0, sizeof(descriptor_write));
+ descriptor_write.sType = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET;
+ descriptor_write.dstSet = descriptorSet;
+ descriptor_write.dstBinding = 0;
+ descriptor_write.descriptorCount = 1;
+ descriptor_write.descriptorType = VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER;
+ descriptor_write.pImageInfo = &img_info;
+
+ vkUpdateDescriptorSets(m_device->device(), 1, &descriptor_write, 0, NULL);
+
+ // Create PSO to be used for draw-time errors below
+ char const *vsSource =
+ "#version 450\n"
+ "\n"
+ "void main(){\n"
+ " gl_Position = vec4(1);\n"
+ "}\n";
+ char const *fsSource =
+ "#version 450\n"
+ "\n"
+ "layout(set=0, binding=0) uniform sampler2D s;\n"
+ "layout(location=0) out vec4 x;\n"
+ "void main(){\n"
+ " x = texture(s, vec2(1));\n"
+ "}\n";
+ VkShaderObj vs(m_device, vsSource, VK_SHADER_STAGE_VERTEX_BIT, this);
+ VkShaderObj fs(m_device, fsSource, VK_SHADER_STAGE_FRAGMENT_BIT, this);
+ VkPipelineObj pipe(m_device);
+ pipe.AddShader(&vs);
+ pipe.AddShader(&fs);
+ pipe.AddDefaultColorAttachment();
+ pipe.CreateVKPipeline(pipeline_layout.handle(), renderPass());
+
+ VkCommandBufferObj cmd_buf(m_device, m_commandPool);
+ cmd_buf.begin();
+ // record layout different than actual descriptor layout of SHADER_RO
+ image.SetLayout(&cmd_buf, VK_IMAGE_ASPECT_COLOR_BIT, VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL);
+ cmd_buf.BeginRenderPass(m_renderPassBeginInfo);
+ vkCmdBindPipeline(cmd_buf.handle(), VK_PIPELINE_BIND_POINT_GRAPHICS, pipe.handle());
+ vkCmdBindDescriptorSets(cmd_buf.handle(), VK_PIPELINE_BIND_POINT_GRAPHICS, pipeline_layout.handle(), 0, 1, &descriptorSet, 0,
+ NULL);
+ VkViewport viewport = {0, 0, 16, 16, 0, 1};
+ VkRect2D scissor = {{0, 0}, {16, 16}};
+ vkCmdSetViewport(cmd_buf.handle(), 0, 1, &viewport);
+ vkCmdSetScissor(cmd_buf.handle(), 0, 1, &scissor);
+ // At draw time the update layout will mis-match the actual layout
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkDescriptorImageInfo-imageLayout-00344");
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ "UNASSIGNED-CoreValidation-DrawState-DescriptorSetNotUpdated");
+ cmd_buf.Draw(1, 0, 0, 0);
+ m_errorMonitor->VerifyFound();
+ cmd_buf.EndRenderPass();
+ cmd_buf.end();
+ // Submit cmd buffer
+ VkSubmitInfo submit_info = {};
+ submit_info.sType = VK_STRUCTURE_TYPE_SUBMIT_INFO;
+ submit_info.commandBufferCount = 1;
+ submit_info.pCommandBuffers = &cmd_buf.handle();
+ vkQueueSubmit(m_device->m_queue, 1, &submit_info, VK_NULL_HANDLE);
+ vkQueueWaitIdle(m_device->m_queue);
+ // Cleanup
+ vkDestroySampler(m_device->device(), sampler, NULL);
+ vkDestroyImageView(m_device->device(), view, NULL);
+ vkDestroyDescriptorPool(m_device->device(), ds_pool, NULL);
+}
+
+TEST_F(VkLayerTest, ImageDescriptorLayoutMismatchExternal) {
+ TEST_DESCRIPTION("Create an image sampler layout->image layout mismatch external to a command buffer");
+
+ ASSERT_NO_FATAL_FAILURE(Init());
+ ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
+
+ OneOffDescriptorSet ds(m_device, {
+ {0, VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER, 1, VK_SHADER_STAGE_FRAGMENT_BIT, nullptr},
+ });
+
+ VkSamplerCreateInfo sampler_ci = SafeSaneSamplerCreateInfo();
+ VkSampler sampler;
+
+ VkResult err;
+ err = vkCreateSampler(m_device->device(), &sampler_ci, NULL, &sampler);
+ ASSERT_VK_SUCCESS(err);
+
+ const VkPipelineLayoutObj pipeline_layout(m_device, {&ds.layout_});
+
+ VkImageObj image(m_device);
+ // Transition image to be used in shader to SHADER_READ_ONLY_OPTIMAL
+ image.Init(128, 128, 1, VK_FORMAT_R8G8B8A8_UNORM, VK_IMAGE_USAGE_SAMPLED_BIT | VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL,
+ VK_IMAGE_TILING_OPTIMAL, 0);
+
+ ASSERT_TRUE(image.initialized());
+
+ VkImageView view;
+ VkImageViewCreateInfo ivci = {};
+ ivci.sType = VK_STRUCTURE_TYPE_IMAGE_VIEW_CREATE_INFO;
+ ivci.image = image.handle();
+ ivci.viewType = VK_IMAGE_VIEW_TYPE_2D;
+ ivci.format = VK_FORMAT_R8G8B8A8_UNORM;
+ ivci.subresourceRange.layerCount = 1;
+ ivci.subresourceRange.baseMipLevel = 0;
+ ivci.subresourceRange.levelCount = 1;
+ ivci.subresourceRange.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
+
+ err = vkCreateImageView(m_device->device(), &ivci, NULL, &view);
+ ASSERT_VK_SUCCESS(err);
+
+ VkDescriptorImageInfo image_info{};
+ // Set error condition -- anything but Shader_Read_Only_Optimal which is the current image layout
+ image_info.imageLayout = VK_IMAGE_LAYOUT_GENERAL;
+ image_info.imageView = view;
+ image_info.sampler = sampler;
+
+ VkWriteDescriptorSet descriptor_write = {};
+ descriptor_write.sType = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET;
+ descriptor_write.dstSet = ds.set_;
+ descriptor_write.dstBinding = 0;
+ descriptor_write.descriptorCount = 1;
+ descriptor_write.descriptorType = VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER;
+ descriptor_write.pImageInfo = &image_info;
+
+ vkUpdateDescriptorSets(m_device->device(), 1, &descriptor_write, 0, NULL);
+
+ char const *vsSource =
+ "#version 450\n"
+ "\n"
+ "void main(){\n"
+ " gl_Position = vec4(1);\n"
+ "}\n";
+ char const *fsSource =
+ "#version 450\n"
+ "\n"
+ "layout(set=0, binding=0) uniform sampler2D s;\n"
+ "layout(location=0) out vec4 x;\n"
+ "void main(){\n"
+ " x = texture(s, vec2(1));\n"
+ "}\n";
+
+ VkShaderObj vs(m_device, vsSource, VK_SHADER_STAGE_VERTEX_BIT, this);
+ VkShaderObj fs(m_device, fsSource, VK_SHADER_STAGE_FRAGMENT_BIT, this);
+ VkPipelineObj pipe(m_device);
+ pipe.AddShader(&vs);
+ pipe.AddShader(&fs);
+ pipe.AddDefaultColorAttachment();
+ pipe.CreateVKPipeline(pipeline_layout.handle(), renderPass());
+
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "UNASSIGNED-CoreValidation-DrawState-InvalidImageLayout");
+
+ m_commandBuffer->begin();
+ m_commandBuffer->BeginRenderPass(m_renderPassBeginInfo);
+ // Bind pipeline to cmd buffer
+ vkCmdBindPipeline(m_commandBuffer->handle(), VK_PIPELINE_BIND_POINT_GRAPHICS, pipe.handle());
+ vkCmdBindDescriptorSets(m_commandBuffer->handle(), VK_PIPELINE_BIND_POINT_GRAPHICS, pipeline_layout.handle(), 0, 1, &ds.set_, 0,
+ nullptr);
+
+ VkViewport viewport = {0, 0, 16, 16, 0, 1};
+ VkRect2D scissor = {{0, 0}, {16, 16}};
+ vkCmdSetViewport(m_commandBuffer->handle(), 0, 1, &viewport);
+ vkCmdSetScissor(m_commandBuffer->handle(), 0, 1, &scissor);
+
+ m_commandBuffer->Draw(1, 0, 0, 0);
+ m_commandBuffer->EndRenderPass();
+ m_commandBuffer->end();
+ VkSubmitInfo submit_info = {};
+ submit_info.sType = VK_STRUCTURE_TYPE_SUBMIT_INFO;
+ submit_info.commandBufferCount = 1;
+ submit_info.pCommandBuffers = &m_commandBuffer->handle();
+ vkQueueSubmit(m_device->m_queue, 1, &submit_info, VK_NULL_HANDLE);
+
+ m_errorMonitor->VerifyFound();
+ vkQueueWaitIdle(m_device->m_queue);
+ vkDestroyImageView(m_device->device(), view, NULL);
+ vkDestroySampler(m_device->device(), sampler, nullptr);
+}
+
+TEST_F(VkLayerTest, DescriptorPoolInUseDestroyedSignaled) {
+ TEST_DESCRIPTION("Delete a DescriptorPool with a DescriptorSet that is in use.");
+ ASSERT_NO_FATAL_FAILURE(Init());
+ ASSERT_NO_FATAL_FAILURE(InitViewport());
+ ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
+
+ VkDescriptorPoolSize ds_type_count = {};
+ ds_type_count.type = VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER;
+ ds_type_count.descriptorCount = 1;
+
+ VkDescriptorPoolCreateInfo ds_pool_ci = {};
+ ds_pool_ci.sType = VK_STRUCTURE_TYPE_DESCRIPTOR_POOL_CREATE_INFO;
+ ds_pool_ci.pNext = NULL;
+ ds_pool_ci.maxSets = 1;
+ ds_pool_ci.poolSizeCount = 1;
+ ds_pool_ci.pPoolSizes = &ds_type_count;
+
+ VkDescriptorPool ds_pool;
+ VkResult err = vkCreateDescriptorPool(m_device->device(), &ds_pool_ci, NULL, &ds_pool);
+ ASSERT_VK_SUCCESS(err);
+
+ VkDescriptorSetLayoutBinding dsl_binding = {};
+ dsl_binding.binding = 0;
+ dsl_binding.descriptorType = VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER;
+ dsl_binding.descriptorCount = 1;
+ dsl_binding.stageFlags = VK_SHADER_STAGE_ALL;
+ dsl_binding.pImmutableSamplers = NULL;
+
+ const VkDescriptorSetLayoutObj ds_layout(m_device, {dsl_binding});
+
+ VkDescriptorSet descriptor_set;
+ VkDescriptorSetAllocateInfo alloc_info = {};
+ alloc_info.sType = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_ALLOCATE_INFO;
+ alloc_info.descriptorSetCount = 1;
+ alloc_info.descriptorPool = ds_pool;
+ alloc_info.pSetLayouts = &ds_layout.handle();
+ err = vkAllocateDescriptorSets(m_device->device(), &alloc_info, &descriptor_set);
+ ASSERT_VK_SUCCESS(err);
+
+ const VkPipelineLayoutObj pipeline_layout(m_device, {&ds_layout});
+
+ // Create image to update the descriptor with
+ VkImageObj image(m_device);
+ image.Init(32, 32, 1, VK_FORMAT_B8G8R8A8_UNORM, VK_IMAGE_USAGE_SAMPLED_BIT, VK_IMAGE_TILING_OPTIMAL, 0);
+ ASSERT_TRUE(image.initialized());
+
+ VkImageView view = image.targetView(VK_FORMAT_B8G8R8A8_UNORM);
+ // Create Sampler
+ VkSamplerCreateInfo sampler_ci = SafeSaneSamplerCreateInfo();
+ VkSampler sampler;
+ err = vkCreateSampler(m_device->device(), &sampler_ci, NULL, &sampler);
+ ASSERT_VK_SUCCESS(err);
+ // Update descriptor with image and sampler
+ VkDescriptorImageInfo img_info = {};
+ img_info.sampler = sampler;
+ img_info.imageView = view;
+ img_info.imageLayout = VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL;
+
+ VkWriteDescriptorSet descriptor_write;
+ memset(&descriptor_write, 0, sizeof(descriptor_write));
+ descriptor_write.sType = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET;
+ descriptor_write.dstSet = descriptor_set;
+ descriptor_write.dstBinding = 0;
+ descriptor_write.descriptorCount = 1;
+ descriptor_write.descriptorType = VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER;
+ descriptor_write.pImageInfo = &img_info;
+
+ vkUpdateDescriptorSets(m_device->device(), 1, &descriptor_write, 0, NULL);
+
+ // Create PSO to be used for draw-time errors below
+ char const *vsSource =
+ "#version 450\n"
+ "\n"
+ "void main(){\n"
+ " gl_Position = vec4(1);\n"
+ "}\n";
+ char const *fsSource =
+ "#version 450\n"
+ "\n"
+ "layout(set=0, binding=0) uniform sampler2D s;\n"
+ "layout(location=0) out vec4 x;\n"
+ "void main(){\n"
+ " x = texture(s, vec2(1));\n"
+ "}\n";
+ VkShaderObj vs(m_device, vsSource, VK_SHADER_STAGE_VERTEX_BIT, this);
+ VkShaderObj fs(m_device, fsSource, VK_SHADER_STAGE_FRAGMENT_BIT, this);
+ VkPipelineObj pipe(m_device);
+ pipe.AddShader(&vs);
+ pipe.AddShader(&fs);
+ pipe.AddDefaultColorAttachment();
+ pipe.CreateVKPipeline(pipeline_layout.handle(), renderPass());
+
+ m_commandBuffer->begin();
+ m_commandBuffer->BeginRenderPass(m_renderPassBeginInfo);
+ vkCmdBindPipeline(m_commandBuffer->handle(), VK_PIPELINE_BIND_POINT_GRAPHICS, pipe.handle());
+ vkCmdBindDescriptorSets(m_commandBuffer->handle(), VK_PIPELINE_BIND_POINT_GRAPHICS, pipeline_layout.handle(), 0, 1,
+ &descriptor_set, 0, NULL);
+
+ VkViewport viewport = {0, 0, 16, 16, 0, 1};
+ VkRect2D scissor = {{0, 0}, {16, 16}};
+ vkCmdSetViewport(m_commandBuffer->handle(), 0, 1, &viewport);
+ vkCmdSetScissor(m_commandBuffer->handle(), 0, 1, &scissor);
+
+ m_commandBuffer->Draw(1, 0, 0, 0);
+ m_commandBuffer->EndRenderPass();
+ m_commandBuffer->end();
+ // Submit cmd buffer to put pool in-flight
+ VkSubmitInfo submit_info = {};
+ submit_info.sType = VK_STRUCTURE_TYPE_SUBMIT_INFO;
+ submit_info.commandBufferCount = 1;
+ submit_info.pCommandBuffers = &m_commandBuffer->handle();
+ vkQueueSubmit(m_device->m_queue, 1, &submit_info, VK_NULL_HANDLE);
+ // Destroy pool while in-flight, causing error
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkDestroyDescriptorPool-descriptorPool-00303");
+ vkDestroyDescriptorPool(m_device->device(), ds_pool, NULL);
+ m_errorMonitor->VerifyFound();
+ vkQueueWaitIdle(m_device->m_queue);
+ // Cleanup
+ vkDestroySampler(m_device->device(), sampler, NULL);
+ m_errorMonitor->SetUnexpectedError(
+ "If descriptorPool is not VK_NULL_HANDLE, descriptorPool must be a valid VkDescriptorPool handle");
+ m_errorMonitor->SetUnexpectedError("Unable to remove DescriptorPool obj");
+ vkDestroyDescriptorPool(m_device->device(), ds_pool, NULL);
+ // TODO : It seems Validation layers think ds_pool was already destroyed, even though it wasn't?
+}
+
+TEST_F(VkLayerTest, DescriptorPoolInUseResetSignaled) {
+ TEST_DESCRIPTION("Reset a DescriptorPool with a DescriptorSet that is in use.");
+ ASSERT_NO_FATAL_FAILURE(Init());
+ ASSERT_NO_FATAL_FAILURE(InitViewport());
+ ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
+
+ VkDescriptorPoolSize ds_type_count = {};
+ ds_type_count.type = VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER;
+ ds_type_count.descriptorCount = 1;
+
+ VkDescriptorPoolCreateInfo ds_pool_ci = {};
+ ds_pool_ci.sType = VK_STRUCTURE_TYPE_DESCRIPTOR_POOL_CREATE_INFO;
+ ds_pool_ci.pNext = nullptr;
+ ds_pool_ci.maxSets = 1;
+ ds_pool_ci.poolSizeCount = 1;
+ ds_pool_ci.pPoolSizes = &ds_type_count;
+
+ VkDescriptorPool ds_pool;
+ VkResult err = vkCreateDescriptorPool(m_device->device(), &ds_pool_ci, nullptr, &ds_pool);
+ ASSERT_VK_SUCCESS(err);
+
+ VkDescriptorSetLayoutBinding dsl_binding = {};
+ dsl_binding.binding = 0;
+ dsl_binding.descriptorType = VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER;
+ dsl_binding.descriptorCount = 1;
+ dsl_binding.stageFlags = VK_SHADER_STAGE_ALL;
+ dsl_binding.pImmutableSamplers = nullptr;
+
+ const VkDescriptorSetLayoutObj ds_layout(m_device, {dsl_binding});
+
+ VkDescriptorSet descriptor_set;
+ VkDescriptorSetAllocateInfo alloc_info = {};
+ alloc_info.sType = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_ALLOCATE_INFO;
+ alloc_info.descriptorSetCount = 1;
+ alloc_info.descriptorPool = ds_pool;
+ alloc_info.pSetLayouts = &ds_layout.handle();
+ err = vkAllocateDescriptorSets(m_device->device(), &alloc_info, &descriptor_set);
+ ASSERT_VK_SUCCESS(err);
+
+ const VkPipelineLayoutObj pipeline_layout(m_device, {&ds_layout});
+
+ // Create image to update the descriptor with
+ VkImageObj image(m_device);
+ image.Init(32, 32, 1, VK_FORMAT_B8G8R8A8_UNORM, VK_IMAGE_USAGE_SAMPLED_BIT, VK_IMAGE_TILING_OPTIMAL, 0);
+ ASSERT_TRUE(image.initialized());
+
+ VkImageView view = image.targetView(VK_FORMAT_B8G8R8A8_UNORM);
+ // Create Sampler
+ VkSamplerCreateInfo sampler_ci = SafeSaneSamplerCreateInfo();
+ VkSampler sampler;
+ err = vkCreateSampler(m_device->device(), &sampler_ci, nullptr, &sampler);
+ ASSERT_VK_SUCCESS(err);
+ // Update descriptor with image and sampler
+ VkDescriptorImageInfo img_info = {};
+ img_info.sampler = sampler;
+ img_info.imageView = view;
+ img_info.imageLayout = VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL;
+
+ VkWriteDescriptorSet descriptor_write;
+ memset(&descriptor_write, 0, sizeof(descriptor_write));
+ descriptor_write.sType = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET;
+ descriptor_write.dstSet = descriptor_set;
+ descriptor_write.dstBinding = 0;
+ descriptor_write.descriptorCount = 1;
+ descriptor_write.descriptorType = VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER;
+ descriptor_write.pImageInfo = &img_info;
+
+ vkUpdateDescriptorSets(m_device->device(), 1, &descriptor_write, 0, nullptr);
+
+ // Create PSO to be used for draw-time errors below
+ char const *vsSource =
+ "#version 450\n"
+ "\n"
+ "void main(){\n"
+ " gl_Position = vec4(1);\n"
+ "}\n";
+ char const *fsSource =
+ "#version 450\n"
+ "\n"
+ "layout(set=0, binding=0) uniform sampler2D s;\n"
+ "layout(location=0) out vec4 x;\n"
+ "void main(){\n"
+ " x = texture(s, vec2(1));\n"
+ "}\n";
+ VkShaderObj vs(m_device, vsSource, VK_SHADER_STAGE_VERTEX_BIT, this);
+ VkShaderObj fs(m_device, fsSource, VK_SHADER_STAGE_FRAGMENT_BIT, this);
+ VkPipelineObj pipe(m_device);
+ pipe.AddShader(&vs);
+ pipe.AddShader(&fs);
+ pipe.AddDefaultColorAttachment();
+ pipe.CreateVKPipeline(pipeline_layout.handle(), renderPass());
+
+ m_commandBuffer->begin();
+ m_commandBuffer->BeginRenderPass(m_renderPassBeginInfo);
+ vkCmdBindPipeline(m_commandBuffer->handle(), VK_PIPELINE_BIND_POINT_GRAPHICS, pipe.handle());
+ vkCmdBindDescriptorSets(m_commandBuffer->handle(), VK_PIPELINE_BIND_POINT_GRAPHICS, pipeline_layout.handle(), 0, 1,
+ &descriptor_set, 0, nullptr);
+
+ VkViewport viewport = {0, 0, 16, 16, 0, 1};
+ VkRect2D scissor = {{0, 0}, {16, 16}};
+ vkCmdSetViewport(m_commandBuffer->handle(), 0, 1, &viewport);
+ vkCmdSetScissor(m_commandBuffer->handle(), 0, 1, &scissor);
+
+ m_commandBuffer->Draw(1, 0, 0, 0);
+ m_commandBuffer->EndRenderPass();
+ m_commandBuffer->end();
+ // Submit cmd buffer to put pool in-flight
+ VkSubmitInfo submit_info = {};
+ submit_info.sType = VK_STRUCTURE_TYPE_SUBMIT_INFO;
+ submit_info.commandBufferCount = 1;
+ submit_info.pCommandBuffers = &m_commandBuffer->handle();
+ vkQueueSubmit(m_device->m_queue, 1, &submit_info, VK_NULL_HANDLE);
+ // Reset pool while in-flight, causing error
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkResetDescriptorPool-descriptorPool-00313");
+ vkResetDescriptorPool(m_device->device(), ds_pool, 0);
+ m_errorMonitor->VerifyFound();
+ vkQueueWaitIdle(m_device->m_queue);
+ // Cleanup
+ vkDestroySampler(m_device->device(), sampler, nullptr);
+ m_errorMonitor->SetUnexpectedError(
+ "If descriptorPool is not VK_NULL_HANDLE, descriptorPool must be a valid VkDescriptorPool handle");
+ m_errorMonitor->SetUnexpectedError("Unable to remove DescriptorPool obj");
+ vkDestroyDescriptorPool(m_device->device(), ds_pool, nullptr);
+}
+
+TEST_F(VkLayerTest, DescriptorImageUpdateNoMemoryBound) {
+ TEST_DESCRIPTION("Attempt an image descriptor set update where image's bound memory has been freed.");
+ ASSERT_NO_FATAL_FAILURE(Init());
+ ASSERT_NO_FATAL_FAILURE(InitViewport());
+ ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
+
+ VkDescriptorPoolSize ds_type_count = {};
+ ds_type_count.type = VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER;
+ ds_type_count.descriptorCount = 1;
+
+ VkDescriptorPoolCreateInfo ds_pool_ci = {};
+ ds_pool_ci.sType = VK_STRUCTURE_TYPE_DESCRIPTOR_POOL_CREATE_INFO;
+ ds_pool_ci.pNext = NULL;
+ ds_pool_ci.maxSets = 1;
+ ds_pool_ci.poolSizeCount = 1;
+ ds_pool_ci.pPoolSizes = &ds_type_count;
+
+ VkDescriptorPool ds_pool;
+ VkResult err = vkCreateDescriptorPool(m_device->device(), &ds_pool_ci, NULL, &ds_pool);
+ ASSERT_VK_SUCCESS(err);
+
+ VkDescriptorSetLayoutBinding dsl_binding = {};
+ dsl_binding.binding = 0;
+ dsl_binding.descriptorType = VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER;
+ dsl_binding.descriptorCount = 1;
+ dsl_binding.stageFlags = VK_SHADER_STAGE_ALL;
+ dsl_binding.pImmutableSamplers = NULL;
+
+ const VkDescriptorSetLayoutObj ds_layout(m_device, {dsl_binding});
+
+ VkDescriptorSet descriptorSet;
+ VkDescriptorSetAllocateInfo alloc_info = {};
+ alloc_info.sType = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_ALLOCATE_INFO;
+ alloc_info.descriptorSetCount = 1;
+ alloc_info.descriptorPool = ds_pool;
+ alloc_info.pSetLayouts = &ds_layout.handle();
+ err = vkAllocateDescriptorSets(m_device->device(), &alloc_info, &descriptorSet);
+ ASSERT_VK_SUCCESS(err);
+
+ const VkPipelineLayoutObj pipeline_layout(m_device, {&ds_layout});
+
+ // Create images to update the descriptor with
+ VkImage image;
+ const VkFormat tex_format = VK_FORMAT_B8G8R8A8_UNORM;
+ const int32_t tex_width = 32;
+ const int32_t tex_height = 32;
+ VkImageCreateInfo image_create_info = {};
+ image_create_info.sType = VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO;
+ image_create_info.pNext = NULL;
+ image_create_info.imageType = VK_IMAGE_TYPE_2D;
+ image_create_info.format = tex_format;
+ image_create_info.extent.width = tex_width;
+ image_create_info.extent.height = tex_height;
+ image_create_info.extent.depth = 1;
+ image_create_info.mipLevels = 1;
+ image_create_info.arrayLayers = 1;
+ image_create_info.samples = VK_SAMPLE_COUNT_1_BIT;
+ image_create_info.tiling = VK_IMAGE_TILING_OPTIMAL;
+ image_create_info.usage = VK_IMAGE_USAGE_SAMPLED_BIT;
+ image_create_info.flags = 0;
+ err = vkCreateImage(m_device->device(), &image_create_info, NULL, &image);
+ ASSERT_VK_SUCCESS(err);
+ // Initially bind memory to avoid error at bind view time. We'll break binding before update.
+ VkMemoryRequirements memory_reqs;
+ VkDeviceMemory image_memory;
+ bool pass;
+ VkMemoryAllocateInfo memory_info = {};
+ memory_info.sType = VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO;
+ memory_info.pNext = NULL;
+ memory_info.allocationSize = 0;
+ memory_info.memoryTypeIndex = 0;
+ vkGetImageMemoryRequirements(m_device->device(), image, &memory_reqs);
+ // Allocate enough memory for image
+ memory_info.allocationSize = memory_reqs.size;
+ pass = m_device->phy().set_memory_type(memory_reqs.memoryTypeBits, &memory_info, 0);
+ ASSERT_TRUE(pass);
+ err = vkAllocateMemory(m_device->device(), &memory_info, NULL, &image_memory);
+ ASSERT_VK_SUCCESS(err);
+ err = vkBindImageMemory(m_device->device(), image, image_memory, 0);
+ ASSERT_VK_SUCCESS(err);
+
+ VkImageViewCreateInfo image_view_create_info = {};
+ image_view_create_info.sType = VK_STRUCTURE_TYPE_IMAGE_VIEW_CREATE_INFO;
+ image_view_create_info.image = image;
+ image_view_create_info.viewType = VK_IMAGE_VIEW_TYPE_2D;
+ image_view_create_info.format = tex_format;
+ image_view_create_info.subresourceRange.layerCount = 1;
+ image_view_create_info.subresourceRange.baseMipLevel = 0;
+ image_view_create_info.subresourceRange.levelCount = 1;
+ image_view_create_info.subresourceRange.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
+
+ VkImageView view;
+ err = vkCreateImageView(m_device->device(), &image_view_create_info, NULL, &view);
+ ASSERT_VK_SUCCESS(err);
+ // Create Samplers
+ VkSamplerCreateInfo sampler_ci = SafeSaneSamplerCreateInfo();
+ VkSampler sampler;
+ err = vkCreateSampler(m_device->device(), &sampler_ci, NULL, &sampler);
+ ASSERT_VK_SUCCESS(err);
+ // Update descriptor with image and sampler
+ VkDescriptorImageInfo img_info = {};
+ img_info.sampler = sampler;
+ img_info.imageView = view;
+ img_info.imageLayout = VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL;
+
+ VkWriteDescriptorSet descriptor_write;
+ memset(&descriptor_write, 0, sizeof(descriptor_write));
+ descriptor_write.sType = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET;
+ descriptor_write.dstSet = descriptorSet;
+ descriptor_write.dstBinding = 0;
+ descriptor_write.descriptorCount = 1;
+ descriptor_write.descriptorType = VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER;
+ descriptor_write.pImageInfo = &img_info;
+ // Break memory binding and attempt update
+ vkFreeMemory(m_device->device(), image_memory, nullptr);
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ " previously bound memory was freed. Memory must not be freed prior to this operation.");
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ "vkUpdateDescriptorSets() failed write update validation for Descriptor Set 0x");
+ vkUpdateDescriptorSets(m_device->device(), 1, &descriptor_write, 0, NULL);
+ m_errorMonitor->VerifyFound();
+ // Cleanup
+ vkDestroyImage(m_device->device(), image, NULL);
+ vkDestroySampler(m_device->device(), sampler, NULL);
+ vkDestroyImageView(m_device->device(), view, NULL);
+ vkDestroyDescriptorPool(m_device->device(), ds_pool, NULL);
+}
+
+TEST_F(VkLayerTest, InvalidPipeline) {
+ uint64_t fake_pipeline_handle = 0xbaad6001;
+ VkPipeline bad_pipeline = reinterpret_cast<VkPipeline &>(fake_pipeline_handle);
+
+ // Enable VK_KHR_draw_indirect_count for KHR variants
+ ASSERT_NO_FATAL_FAILURE(InitFramework(myDbgFunc, m_errorMonitor));
+ if (DeviceExtensionSupported(gpu(), nullptr, VK_KHR_DRAW_INDIRECT_COUNT_EXTENSION_NAME)) {
+ m_device_extension_names.push_back(VK_KHR_DRAW_INDIRECT_COUNT_EXTENSION_NAME);
+ }
+ ASSERT_NO_FATAL_FAILURE(InitState());
+ bool has_khr_indirect = DeviceExtensionEnabled(VK_KHR_DRAW_INDIRECT_COUNT_EXTENSION_NAME);
+ ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
+
+ // Attempt to bind an invalid Pipeline to a valid Command Buffer
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdBindPipeline-pipeline-parameter");
+ m_commandBuffer->begin();
+ vkCmdBindPipeline(m_commandBuffer->handle(), VK_PIPELINE_BIND_POINT_GRAPHICS, bad_pipeline);
+ m_errorMonitor->VerifyFound();
+
+ // Try each of the 6 flavors of Draw()
+ m_commandBuffer->BeginRenderPass(m_renderPassBeginInfo); // Draw*() calls must be submitted within a renderpass
+
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdDraw-None-00442");
+ m_commandBuffer->Draw(1, 0, 0, 0);
+ m_errorMonitor->VerifyFound();
+
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdDrawIndexed-None-00461");
+ m_commandBuffer->DrawIndexed(1, 1, 0, 0, 0);
+ m_errorMonitor->VerifyFound();
+
+ VkBufferObj buffer;
+ VkBufferCreateInfo ci = {};
+ ci.sType = VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO;
+ ci.usage = VK_BUFFER_USAGE_INDIRECT_BUFFER_BIT;
+ ci.size = 1024;
+ buffer.init(*m_device, ci);
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdDrawIndirect-None-00485");
+ vkCmdDrawIndirect(m_commandBuffer->handle(), buffer.handle(), 0, 1, 0);
+ m_errorMonitor->VerifyFound();
+
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdDrawIndexedIndirect-None-00537");
+ vkCmdDrawIndexedIndirect(m_commandBuffer->handle(), buffer.handle(), 0, 1, 0);
+ m_errorMonitor->VerifyFound();
+
+ if (has_khr_indirect) {
+ auto fpCmdDrawIndirectCountKHR =
+ (PFN_vkCmdDrawIndirectCountKHR)vkGetDeviceProcAddr(m_device->device(), "vkCmdDrawIndirectCountKHR");
+ ASSERT_NE(fpCmdDrawIndirectCountKHR, nullptr);
+
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdDrawIndirectCountKHR-None-03119");
+ // stride must be a multiple of 4 and must be greater than or equal to sizeof(VkDrawIndirectCommand)
+ fpCmdDrawIndirectCountKHR(m_commandBuffer->handle(), buffer.handle(), 0, buffer.handle(), 512, 1, 512);
+ m_errorMonitor->VerifyFound();
+
+ auto fpCmdDrawIndexedIndirectCountKHR =
+ (PFN_vkCmdDrawIndexedIndirectCountKHR)vkGetDeviceProcAddr(m_device->device(), "vkCmdDrawIndexedIndirectCountKHR");
+ ASSERT_NE(fpCmdDrawIndexedIndirectCountKHR, nullptr);
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdDrawIndexedIndirectCountKHR-None-03151");
+ // stride must be a multiple of 4 and must be greater than or equal to sizeof(VkDrawIndexedIndirectCommand)
+ fpCmdDrawIndexedIndirectCountKHR(m_commandBuffer->handle(), buffer.handle(), 0, buffer.handle(), 512, 1, 512);
+ m_errorMonitor->VerifyFound();
+ }
+
+ // Also try the Dispatch variants
+ vkCmdEndRenderPass(m_commandBuffer->handle()); // Compute submissions must be outside a renderpass
+
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdDispatch-None-00391");
+ vkCmdDispatch(m_commandBuffer->handle(), 0, 0, 0);
+ m_errorMonitor->VerifyFound();
+
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdDispatchIndirect-None-00404");
+ vkCmdDispatchIndirect(m_commandBuffer->handle(), buffer.handle(), 0);
+ m_errorMonitor->VerifyFound();
+}
+
+TEST_F(VkLayerTest, CmdDispatchExceedLimits) {
+ TEST_DESCRIPTION("Compute dispatch with dimensions that exceed device limits");
+
+ // Enable KHX device group extensions, if available
+ if (InstanceExtensionSupported(VK_KHR_DEVICE_GROUP_CREATION_EXTENSION_NAME)) {
+ m_instance_extension_names.push_back(VK_KHR_DEVICE_GROUP_CREATION_EXTENSION_NAME);
+ }
+ ASSERT_NO_FATAL_FAILURE(InitFramework(myDbgFunc, m_errorMonitor));
+ bool khx_dg_ext_available = false;
+ if (DeviceExtensionSupported(gpu(), nullptr, VK_KHR_DEVICE_GROUP_EXTENSION_NAME)) {
+ m_device_extension_names.push_back(VK_KHR_DEVICE_GROUP_EXTENSION_NAME);
+ khx_dg_ext_available = true;
+ }
+ ASSERT_NO_FATAL_FAILURE(InitState());
+
+ uint32_t x_limit = m_device->props.limits.maxComputeWorkGroupCount[0];
+ uint32_t y_limit = m_device->props.limits.maxComputeWorkGroupCount[1];
+ uint32_t z_limit = m_device->props.limits.maxComputeWorkGroupCount[2];
+ if (std::max({x_limit, y_limit, z_limit}) == UINT32_MAX) {
+ printf("%s device maxComputeWorkGroupCount limit reports UINT32_MAX, test not possible, skipping.\n", kSkipPrefix);
+ return;
+ }
+
+ // Create a minimal compute pipeline
+ std::string cs_text = "#version 450\nvoid main() {}\n"; // minimal no-op shader
+ VkShaderObj cs_obj(m_device, cs_text.c_str(), VK_SHADER_STAGE_COMPUTE_BIT, this);
+
+ VkPipelineLayoutCreateInfo info = {};
+ info.sType = VK_STRUCTURE_TYPE_PIPELINE_LAYOUT_CREATE_INFO;
+ info.pNext = nullptr;
+ VkPipelineLayout pipe_layout;
+ vkCreatePipelineLayout(device(), &info, nullptr, &pipe_layout);
+
+ VkComputePipelineCreateInfo pipeline_info = {};
+ pipeline_info.sType = VK_STRUCTURE_TYPE_COMPUTE_PIPELINE_CREATE_INFO;
+ pipeline_info.pNext = nullptr;
+ pipeline_info.flags = khx_dg_ext_available ? VK_PIPELINE_CREATE_DISPATCH_BASE_KHR : 0;
+ pipeline_info.layout = pipe_layout;
+ pipeline_info.basePipelineHandle = VK_NULL_HANDLE;
+ pipeline_info.basePipelineIndex = -1;
+ pipeline_info.stage.sType = VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_CREATE_INFO;
+ pipeline_info.stage.pNext = nullptr;
+ pipeline_info.stage.flags = 0;
+ pipeline_info.stage.stage = VK_SHADER_STAGE_COMPUTE_BIT;
+ pipeline_info.stage.module = cs_obj.handle();
+ pipeline_info.stage.pName = "main";
+ pipeline_info.stage.pSpecializationInfo = nullptr;
+ VkPipeline cs_pipeline;
+ vkCreateComputePipelines(device(), VK_NULL_HANDLE, 1, &pipeline_info, nullptr, &cs_pipeline);
+
+ // Bind pipeline to command buffer
+ m_commandBuffer->begin();
+ vkCmdBindPipeline(m_commandBuffer->handle(), VK_PIPELINE_BIND_POINT_COMPUTE, cs_pipeline);
+
+ // Dispatch counts that exceed device limits
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdDispatch-groupCountX-00386");
+ vkCmdDispatch(m_commandBuffer->handle(), x_limit + 1, y_limit, z_limit);
+ m_errorMonitor->VerifyFound();
+
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdDispatch-groupCountY-00387");
+ vkCmdDispatch(m_commandBuffer->handle(), x_limit, y_limit + 1, z_limit);
+ m_errorMonitor->VerifyFound();
+
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdDispatch-groupCountZ-00388");
+ vkCmdDispatch(m_commandBuffer->handle(), x_limit, y_limit, z_limit + 1);
+ m_errorMonitor->VerifyFound();
+
+ if (khx_dg_ext_available) {
+ PFN_vkCmdDispatchBaseKHR fp_vkCmdDispatchBaseKHR =
+ (PFN_vkCmdDispatchBaseKHR)vkGetInstanceProcAddr(instance(), "vkCmdDispatchBaseKHR");
+
+ // Base equals or exceeds limit
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdDispatchBase-baseGroupX-00421");
+ fp_vkCmdDispatchBaseKHR(m_commandBuffer->handle(), x_limit, y_limit - 1, z_limit - 1, 0, 0, 0);
+ m_errorMonitor->VerifyFound();
+
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdDispatchBase-baseGroupX-00422");
+ fp_vkCmdDispatchBaseKHR(m_commandBuffer->handle(), x_limit - 1, y_limit, z_limit - 1, 0, 0, 0);
+ m_errorMonitor->VerifyFound();
+
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdDispatchBase-baseGroupZ-00423");
+ fp_vkCmdDispatchBaseKHR(m_commandBuffer->handle(), x_limit - 1, y_limit - 1, z_limit, 0, 0, 0);
+ m_errorMonitor->VerifyFound();
+
+ // (Base + count) exceeds limit
+ uint32_t x_base = x_limit / 2;
+ uint32_t y_base = y_limit / 2;
+ uint32_t z_base = z_limit / 2;
+ x_limit -= x_base;
+ y_limit -= y_base;
+ z_limit -= z_base;
+
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdDispatchBase-groupCountX-00424");
+ fp_vkCmdDispatchBaseKHR(m_commandBuffer->handle(), x_base, y_base, z_base, x_limit + 1, y_limit, z_limit);
+ m_errorMonitor->VerifyFound();
+
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdDispatchBase-groupCountY-00425");
+ fp_vkCmdDispatchBaseKHR(m_commandBuffer->handle(), x_base, y_base, z_base, x_limit, y_limit + 1, z_limit);
+ m_errorMonitor->VerifyFound();
+
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdDispatchBase-groupCountZ-00426");
+ fp_vkCmdDispatchBaseKHR(m_commandBuffer->handle(), x_base, y_base, z_base, x_limit, y_limit, z_limit + 1);
+ m_errorMonitor->VerifyFound();
+ } else {
+ printf("%s KHX_DEVICE_GROUP_* extensions not supported, skipping CmdDispatchBaseKHR() tests.\n", kSkipPrefix);
+ }
+
+ // Clean up
+ vkDestroyPipeline(device(), cs_pipeline, nullptr);
+ vkDestroyPipelineLayout(device(), pipe_layout, nullptr);
+}
+
+TEST_F(VkLayerTest, MultiplaneImageLayoutBadAspectFlags) {
+ TEST_DESCRIPTION("Query layout of a multiplane image using illegal aspect flag masks");
+
+ // Enable KHR multiplane req'd extensions
+ bool mp_extensions = InstanceExtensionSupported(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME,
+ VK_KHR_GET_MEMORY_REQUIREMENTS_2_SPEC_VERSION);
+ if (mp_extensions) {
+ m_instance_extension_names.push_back(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME);
+ }
+ ASSERT_NO_FATAL_FAILURE(InitFramework(myDbgFunc, m_errorMonitor));
+ mp_extensions = mp_extensions && DeviceExtensionSupported(gpu(), nullptr, VK_KHR_MAINTENANCE1_EXTENSION_NAME);
+ mp_extensions = mp_extensions && DeviceExtensionSupported(gpu(), nullptr, VK_KHR_GET_MEMORY_REQUIREMENTS_2_EXTENSION_NAME);
+ mp_extensions = mp_extensions && DeviceExtensionSupported(gpu(), nullptr, VK_KHR_BIND_MEMORY_2_EXTENSION_NAME);
+ mp_extensions = mp_extensions && DeviceExtensionSupported(gpu(), nullptr, VK_KHR_SAMPLER_YCBCR_CONVERSION_EXTENSION_NAME);
+ if (mp_extensions) {
+ m_device_extension_names.push_back(VK_KHR_MAINTENANCE1_EXTENSION_NAME);
+ m_device_extension_names.push_back(VK_KHR_GET_MEMORY_REQUIREMENTS_2_EXTENSION_NAME);
+ m_device_extension_names.push_back(VK_KHR_BIND_MEMORY_2_EXTENSION_NAME);
+ m_device_extension_names.push_back(VK_KHR_SAMPLER_YCBCR_CONVERSION_EXTENSION_NAME);
+ } else {
+ printf("%s test requires KHR multiplane extensions, not available. Skipping.\n", kSkipPrefix);
+ return;
+ }
+ ASSERT_NO_FATAL_FAILURE(InitState());
+
+ VkImageCreateInfo ci = {};
+ ci.sType = VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO;
+ ci.pNext = NULL;
+ ci.flags = 0;
+ ci.imageType = VK_IMAGE_TYPE_2D;
+ ci.format = VK_FORMAT_G8_B8R8_2PLANE_420_UNORM_KHR;
+ ci.extent = {128, 128, 1};
+ ci.mipLevels = 1;
+ ci.arrayLayers = 1;
+ ci.samples = VK_SAMPLE_COUNT_1_BIT;
+ ci.tiling = VK_IMAGE_TILING_LINEAR;
+ ci.usage = VK_IMAGE_USAGE_TRANSFER_SRC_BIT;
+ ci.sharingMode = VK_SHARING_MODE_EXCLUSIVE;
+ ci.initialLayout = VK_IMAGE_LAYOUT_UNDEFINED;
+
+ // Verify formats
+ bool supported = ImageFormatAndFeaturesSupported(instance(), gpu(), ci, VK_FORMAT_FEATURE_TRANSFER_SRC_BIT);
+ ci.format = VK_FORMAT_G8_B8_R8_3PLANE_420_UNORM_KHR;
+ supported = supported && ImageFormatAndFeaturesSupported(instance(), gpu(), ci, VK_FORMAT_FEATURE_TRANSFER_SRC_BIT);
+ if (!supported) {
+ printf("%s Multiplane image format not supported. Skipping test.\n", kSkipPrefix);
+ return; // Assume there's low ROI on searching for different mp formats
+ }
+
+ VkImage image_2plane, image_3plane;
+ ci.format = VK_FORMAT_G8_B8R8_2PLANE_420_UNORM_KHR;
+ VkResult err = vkCreateImage(device(), &ci, NULL, &image_2plane);
+ ASSERT_VK_SUCCESS(err);
+
+ ci.format = VK_FORMAT_G8_B8_R8_3PLANE_420_UNORM_KHR;
+ err = vkCreateImage(device(), &ci, NULL, &image_3plane);
+ ASSERT_VK_SUCCESS(err);
+
+ // Query layout of 3rd plane, for a 2-plane image
+ VkImageSubresource subres = {};
+ subres.aspectMask = VK_IMAGE_ASPECT_PLANE_2_BIT_KHR;
+ subres.mipLevel = 0;
+ subres.arrayLayer = 0;
+ VkSubresourceLayout layout = {};
+
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkGetImageSubresourceLayout-format-01581");
+ vkGetImageSubresourceLayout(device(), image_2plane, &subres, &layout);
+ m_errorMonitor->VerifyFound();
+
+ // Query layout using color aspect, for a 3-plane image
+ subres.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkGetImageSubresourceLayout-format-01582");
+ vkGetImageSubresourceLayout(device(), image_3plane, &subres, &layout);
+ m_errorMonitor->VerifyFound();
+
+ // Clean up
+ vkDestroyImage(device(), image_2plane, NULL);
+ vkDestroyImage(device(), image_3plane, NULL);
+}
+
+TEST_F(VkPositiveLayerTest, MultiplaneGetImageSubresourceLayout) {
+ TEST_DESCRIPTION("Positive test, query layout of a single plane of a multiplane image. (repro Github #2530)");
+
+ // Enable KHR multiplane req'd extensions
+ bool mp_extensions = InstanceExtensionSupported(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME,
+ VK_KHR_GET_MEMORY_REQUIREMENTS_2_SPEC_VERSION);
+ if (mp_extensions) {
+ m_instance_extension_names.push_back(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME);
+ }
+ ASSERT_NO_FATAL_FAILURE(InitFramework(myDbgFunc, m_errorMonitor));
+ mp_extensions = mp_extensions && DeviceExtensionSupported(gpu(), nullptr, VK_KHR_MAINTENANCE1_EXTENSION_NAME);
+ mp_extensions = mp_extensions && DeviceExtensionSupported(gpu(), nullptr, VK_KHR_GET_MEMORY_REQUIREMENTS_2_EXTENSION_NAME);
+ mp_extensions = mp_extensions && DeviceExtensionSupported(gpu(), nullptr, VK_KHR_BIND_MEMORY_2_EXTENSION_NAME);
+ mp_extensions = mp_extensions && DeviceExtensionSupported(gpu(), nullptr, VK_KHR_SAMPLER_YCBCR_CONVERSION_EXTENSION_NAME);
+ if (mp_extensions) {
+ m_device_extension_names.push_back(VK_KHR_MAINTENANCE1_EXTENSION_NAME);
+ m_device_extension_names.push_back(VK_KHR_GET_MEMORY_REQUIREMENTS_2_EXTENSION_NAME);
+ m_device_extension_names.push_back(VK_KHR_BIND_MEMORY_2_EXTENSION_NAME);
+ m_device_extension_names.push_back(VK_KHR_SAMPLER_YCBCR_CONVERSION_EXTENSION_NAME);
+ } else {
+ printf("%s test requires KHR multiplane extensions, not available. Skipping.\n", kSkipPrefix);
+ return;
+ }
+ ASSERT_NO_FATAL_FAILURE(InitState());
+
+ VkImageCreateInfo ci = {};
+ ci.sType = VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO;
+ ci.pNext = NULL;
+ ci.flags = 0;
+ ci.imageType = VK_IMAGE_TYPE_2D;
+ ci.format = VK_FORMAT_G8_B8_R8_3PLANE_420_UNORM_KHR;
+ ci.extent = {128, 128, 1};
+ ci.mipLevels = 1;
+ ci.arrayLayers = 1;
+ ci.samples = VK_SAMPLE_COUNT_1_BIT;
+ ci.tiling = VK_IMAGE_TILING_LINEAR;
+ ci.usage = VK_IMAGE_USAGE_TRANSFER_SRC_BIT;
+ ci.sharingMode = VK_SHARING_MODE_EXCLUSIVE;
+ ci.initialLayout = VK_IMAGE_LAYOUT_UNDEFINED;
+
+ // Verify format
+ bool supported = ImageFormatAndFeaturesSupported(instance(), gpu(), ci, VK_FORMAT_FEATURE_TRANSFER_SRC_BIT);
+ if (!supported) {
+ printf("%s Multiplane image format not supported. Skipping test.\n", kSkipPrefix);
+ return; // Assume there's low ROI on searching for different mp formats
+ }
+
+ VkImage image;
+ VkResult err = vkCreateImage(device(), &ci, NULL, &image);
+ ASSERT_VK_SUCCESS(err);
+
+ // Query layout of 3rd plane
+ VkImageSubresource subres = {};
+ subres.aspectMask = VK_IMAGE_ASPECT_PLANE_2_BIT_KHR;
+ subres.mipLevel = 0;
+ subres.arrayLayer = 0;
+ VkSubresourceLayout layout = {};
+
+ m_errorMonitor->ExpectSuccess();
+ vkGetImageSubresourceLayout(device(), image, &subres, &layout);
+ m_errorMonitor->VerifyNotFound();
+
+ vkDestroyImage(device(), image, NULL);
+}
+
+TEST_F(VkLayerTest, InvalidBufferViewObject) {
+ // Create a single TEXEL_BUFFER descriptor and send it an invalid bufferView
+ // First, cause the bufferView to be invalid due to underlying buffer being destroyed
+ // Then destroy view itself and verify that same error is hit
+ VkResult err;
+
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkWriteDescriptorSet-descriptorType-00323");
+
+ ASSERT_NO_FATAL_FAILURE(Init());
+ VkDescriptorPoolSize ds_type_count = {};
+ ds_type_count.type = VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER;
+ ds_type_count.descriptorCount = 1;
+
+ VkDescriptorPoolCreateInfo ds_pool_ci = {};
+ ds_pool_ci.sType = VK_STRUCTURE_TYPE_DESCRIPTOR_POOL_CREATE_INFO;
+ ds_pool_ci.pNext = NULL;
+ ds_pool_ci.maxSets = 1;
+ ds_pool_ci.poolSizeCount = 1;
+ ds_pool_ci.pPoolSizes = &ds_type_count;
+
+ VkDescriptorPool ds_pool;
+ err = vkCreateDescriptorPool(m_device->device(), &ds_pool_ci, NULL, &ds_pool);
+ ASSERT_VK_SUCCESS(err);
+
+ VkDescriptorSetLayoutBinding dsl_binding = {};
+ dsl_binding.binding = 0;
+ dsl_binding.descriptorType = VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER;
+ dsl_binding.descriptorCount = 1;
+ dsl_binding.stageFlags = VK_SHADER_STAGE_ALL;
+ dsl_binding.pImmutableSamplers = NULL;
+
+ const VkDescriptorSetLayoutObj ds_layout(m_device, {dsl_binding});
+
+ VkDescriptorSet descriptorSet;
+ VkDescriptorSetAllocateInfo alloc_info = {};
+ alloc_info.sType = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_ALLOCATE_INFO;
+ alloc_info.descriptorSetCount = 1;
+ alloc_info.descriptorPool = ds_pool;
+ alloc_info.pSetLayouts = &ds_layout.handle();
+ err = vkAllocateDescriptorSets(m_device->device(), &alloc_info, &descriptorSet);
+ ASSERT_VK_SUCCESS(err);
+
+ // Create a valid bufferView to start with
+ VkBuffer buffer;
+ uint32_t queue_family_index = 0;
+ VkBufferCreateInfo buffer_create_info = {};
+ buffer_create_info.sType = VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO;
+ buffer_create_info.size = 1024;
+ buffer_create_info.usage = VK_BUFFER_USAGE_STORAGE_TEXEL_BUFFER_BIT;
+ buffer_create_info.queueFamilyIndexCount = 1;
+ buffer_create_info.pQueueFamilyIndices = &queue_family_index;
+
+ err = vkCreateBuffer(m_device->device(), &buffer_create_info, NULL, &buffer);
+ ASSERT_VK_SUCCESS(err);
+
+ VkMemoryRequirements memory_reqs;
+ VkDeviceMemory buffer_memory;
+
+ VkMemoryAllocateInfo memory_info = {};
+ memory_info.sType = VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO;
+ memory_info.allocationSize = 0;
+ memory_info.memoryTypeIndex = 0;
+
+ vkGetBufferMemoryRequirements(m_device->device(), buffer, &memory_reqs);
+ memory_info.allocationSize = memory_reqs.size;
+ bool pass = m_device->phy().set_memory_type(memory_reqs.memoryTypeBits, &memory_info, 0);
+ ASSERT_TRUE(pass);
+
+ err = vkAllocateMemory(m_device->device(), &memory_info, NULL, &buffer_memory);
+ ASSERT_VK_SUCCESS(err);
+ err = vkBindBufferMemory(m_device->device(), buffer, buffer_memory, 0);
+ ASSERT_VK_SUCCESS(err);
+
+ VkBufferView view;
+ VkBufferViewCreateInfo bvci = {};
+ bvci.sType = VK_STRUCTURE_TYPE_BUFFER_VIEW_CREATE_INFO;
+ bvci.buffer = buffer;
+ bvci.format = VK_FORMAT_R32_SFLOAT;
+ bvci.range = VK_WHOLE_SIZE;
+
+ err = vkCreateBufferView(m_device->device(), &bvci, NULL, &view);
+ ASSERT_VK_SUCCESS(err);
+
+ // First Destroy buffer underlying view which should hit error in CV
+ vkDestroyBuffer(m_device->device(), buffer, NULL);
+
+ VkWriteDescriptorSet descriptor_write;
+ memset(&descriptor_write, 0, sizeof(descriptor_write));
+ descriptor_write.sType = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET;
+ descriptor_write.dstSet = descriptorSet;
+ descriptor_write.dstBinding = 0;
+ descriptor_write.descriptorCount = 1;
+ descriptor_write.descriptorType = VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER;
+ descriptor_write.pTexelBufferView = &view;
+
+ vkUpdateDescriptorSets(m_device->device(), 1, &descriptor_write, 0, NULL);
+ m_errorMonitor->VerifyFound();
+
+ // Now destroy view itself and verify same error, which is hit in PV this time
+ vkDestroyBufferView(m_device->device(), view, NULL);
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkWriteDescriptorSet-descriptorType-00323");
+ vkUpdateDescriptorSets(m_device->device(), 1, &descriptor_write, 0, NULL);
+ m_errorMonitor->VerifyFound();
+
+ vkFreeMemory(m_device->device(), buffer_memory, NULL);
+ vkDestroyDescriptorPool(m_device->device(), ds_pool, NULL);
+}
+
+TEST_F(VkLayerTest, CreateBufferViewNoMemoryBoundToBuffer) {
+ TEST_DESCRIPTION("Attempt to create a buffer view with a buffer that has no memory bound to it.");
+
+ VkResult err;
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ " used with no memory bound. Memory should be bound by calling vkBindBufferMemory().");
+
+ ASSERT_NO_FATAL_FAILURE(Init());
+
+ // Create a buffer with no bound memory and then attempt to create
+ // a buffer view.
+ VkBufferCreateInfo buff_ci = {};
+ buff_ci.sType = VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO;
+ buff_ci.usage = VK_BUFFER_USAGE_UNIFORM_TEXEL_BUFFER_BIT;
+ buff_ci.size = 256;
+ buff_ci.sharingMode = VK_SHARING_MODE_EXCLUSIVE;
+ VkBuffer buffer;
+ err = vkCreateBuffer(m_device->device(), &buff_ci, NULL, &buffer);
+ ASSERT_VK_SUCCESS(err);
+
+ VkBufferViewCreateInfo buff_view_ci = {};
+ buff_view_ci.sType = VK_STRUCTURE_TYPE_BUFFER_VIEW_CREATE_INFO;
+ buff_view_ci.buffer = buffer;
+ buff_view_ci.format = VK_FORMAT_R8_UNORM;
+ buff_view_ci.range = VK_WHOLE_SIZE;
+ VkBufferView buff_view;
+ err = vkCreateBufferView(m_device->device(), &buff_view_ci, NULL, &buff_view);
+
+ m_errorMonitor->VerifyFound();
+ vkDestroyBuffer(m_device->device(), buffer, NULL);
+ // If last error is success, it still created the view, so delete it.
+ if (err == VK_SUCCESS) {
+ vkDestroyBufferView(m_device->device(), buff_view, NULL);
+ }
+}
+
+TEST_F(VkLayerTest, InvalidBufferViewCreateInfoEntries) {
+ TEST_DESCRIPTION("Attempt to create a buffer view with invalid create info.");
+
+ ASSERT_NO_FATAL_FAILURE(Init());
+
+ const VkPhysicalDeviceLimits &dev_limits = m_device->props.limits;
+ const VkDeviceSize minTexelBufferOffsetAlignment = dev_limits.minTexelBufferOffsetAlignment;
+ if (minTexelBufferOffsetAlignment == 1) {
+ printf("%s Test requires minTexelOffsetAlignment to not be equal to 1. \n", kSkipPrefix);
+ return;
+ }
+
+ const VkFormat format_with_uniform_texel_support = VK_FORMAT_R8G8B8A8_UNORM;
+ const char *format_with_uniform_texel_support_string = "VK_FORMAT_R8G8B8A8_UNORM";
+ const VkFormat format_without_texel_support = VK_FORMAT_R8G8B8_UNORM;
+ const char *format_without_texel_support_string = "VK_FORMAT_R8G8B8_UNORM";
+ VkFormatProperties format_properties;
+ vkGetPhysicalDeviceFormatProperties(gpu(), format_with_uniform_texel_support, &format_properties);
+ if (!(format_properties.bufferFeatures & VK_FORMAT_FEATURE_UNIFORM_TEXEL_BUFFER_BIT)) {
+ printf("%s Test requires %s to support VK_BUFFER_USAGE_UNIFORM_TEXEL_BUFFER_BIT\n", kSkipPrefix,
+ format_with_uniform_texel_support_string);
+ return;
+ }
+ vkGetPhysicalDeviceFormatProperties(gpu(), format_without_texel_support, &format_properties);
+ if ((format_properties.bufferFeatures & VK_FORMAT_FEATURE_STORAGE_TEXEL_BUFFER_BIT) ||
+ (format_properties.bufferFeatures & VK_FORMAT_FEATURE_UNIFORM_TEXEL_BUFFER_BIT)) {
+ printf(
+ "%s Test requires %s to not support VK_BUFFER_USAGE_UNIFORM_TEXEL_BUFFER_BIT nor "
+ "VK_BUFFER_USAGE_STORAGE_TEXEL_BUFFER_BIT\n",
+ kSkipPrefix, format_without_texel_support_string);
+ return;
+ }
+
+ // Create a test buffer--buffer must have been created using VK_BUFFER_USAGE_UNIFORM_TEXEL_BUFFER_BIT or
+ // VK_BUFFER_USAGE_STORAGE_TEXEL_BUFFER_BIT, so use a different usage value instead to cause an error
+ const VkDeviceSize resource_size = 1024;
+ const VkBufferCreateInfo bad_buffer_info = VkBufferObj::create_info(resource_size, VK_BUFFER_USAGE_INDEX_BUFFER_BIT);
+ VkBufferObj bad_buffer;
+ bad_buffer.init(*m_device, bad_buffer_info, (VkMemoryPropertyFlags)VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT);
+
+ // Create a test buffer view
+ VkBufferViewCreateInfo buff_view_ci = {};
+ buff_view_ci.sType = VK_STRUCTURE_TYPE_BUFFER_VIEW_CREATE_INFO;
+ buff_view_ci.buffer = bad_buffer.handle();
+ buff_view_ci.format = format_with_uniform_texel_support;
+ buff_view_ci.range = VK_WHOLE_SIZE;
+
+ auto CatchError = [this, &buff_view_ci](const string &desired_error_string) {
+ VkBufferView buff_view;
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, desired_error_string);
+ VkResult err = vkCreateBufferView(m_device->device(), &buff_view_ci, NULL, &buff_view);
+ m_errorMonitor->VerifyFound();
+ // If previous error is success, it still created the view, so delete it
+ if (err == VK_SUCCESS) {
+ vkDestroyBufferView(m_device->device(), buff_view, NULL);
+ }
+ };
+
+ CatchError("VUID-VkBufferViewCreateInfo-buffer-00932");
+
+ // Create a better test buffer
+ const VkBufferCreateInfo buffer_info = VkBufferObj::create_info(resource_size, VK_BUFFER_USAGE_UNIFORM_TEXEL_BUFFER_BIT);
+ VkBufferObj buffer;
+ buffer.init(*m_device, buffer_info, (VkMemoryPropertyFlags)VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT);
+
+ // Offset must be less than the size of the buffer, so set it equal to the buffer size to cause an error
+ buff_view_ci.buffer = buffer.handle();
+ buff_view_ci.offset = buffer.create_info().size;
+ CatchError("VUID-VkBufferViewCreateInfo-offset-00925");
+
+ // Offset must be a multiple of VkPhysicalDeviceLimits::minTexelBufferOffsetAlignment so add 1 to ensure it is not
+ buff_view_ci.offset = minTexelBufferOffsetAlignment + 1;
+ CatchError("VUID-VkBufferViewCreateInfo-offset-00926");
+
+ // Set offset to acceptable value for range tests
+ buff_view_ci.offset = minTexelBufferOffsetAlignment;
+ // Setting range equal to 0 will cause an error to occur
+ buff_view_ci.range = 0;
+ CatchError("VUID-VkBufferViewCreateInfo-range-00928");
+
+ uint32_t format_size = FormatElementSize(buff_view_ci.format);
+ // Range must be a multiple of the element size of format, so add one to ensure it is not
+ buff_view_ci.range = format_size + 1;
+ CatchError("VUID-VkBufferViewCreateInfo-range-00929");
+
+ // Twice the element size of format multiplied by VkPhysicalDeviceLimits::maxTexelBufferElements guarantees range divided by the
+ // element size is greater than maxTexelBufferElements, causing failure
+ buff_view_ci.range = 2 * format_size * dev_limits.maxTexelBufferElements;
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkBufferViewCreateInfo-range-00930");
+ CatchError("VUID-VkBufferViewCreateInfo-offset-00931");
+
+ // Set rage to acceptable value for buffer tests
+ buff_view_ci.format = format_without_texel_support;
+ buff_view_ci.range = VK_WHOLE_SIZE;
+
+ // `buffer` was created using VK_BUFFER_USAGE_UNIFORM_TEXEL_BUFFER_BIT so we can use that for the first buffer test
+ CatchError("VUID-VkBufferViewCreateInfo-buffer-00933");
+
+ // Create a new buffer using VK_BUFFER_USAGE_STORAGE_TEXEL_BUFFER_BIT
+ const VkBufferCreateInfo storage_buffer_info =
+ VkBufferObj::create_info(resource_size, VK_BUFFER_USAGE_STORAGE_TEXEL_BUFFER_BIT);
+ VkBufferObj storage_buffer;
+ storage_buffer.init(*m_device, storage_buffer_info, (VkMemoryPropertyFlags)VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT);
+
+ buff_view_ci.buffer = storage_buffer.handle();
+ CatchError("VUID-VkBufferViewCreateInfo-buffer-00934");
+}
+
+TEST_F(VkLayerTest, InvalidDynamicOffsetCases) {
+ // Create a descriptorSet w/ dynamic descriptor and then hit 3 offset error
+ // cases:
+ // 1. No dynamicOffset supplied
+ // 2. Too many dynamicOffsets supplied
+ // 3. Dynamic offset oversteps buffer being updated
+ VkResult err;
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ " requires 1 dynamicOffsets, but only 0 dynamicOffsets are left in pDynamicOffsets ");
+
+ ASSERT_NO_FATAL_FAILURE(Init());
+ ASSERT_NO_FATAL_FAILURE(InitViewport());
+ ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
+
+ VkDescriptorPoolSize ds_type_count = {};
+ ds_type_count.type = VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC;
+ ds_type_count.descriptorCount = 1;
+
+ VkDescriptorPoolCreateInfo ds_pool_ci = {};
+ ds_pool_ci.sType = VK_STRUCTURE_TYPE_DESCRIPTOR_POOL_CREATE_INFO;
+ ds_pool_ci.pNext = NULL;
+ ds_pool_ci.maxSets = 1;
+ ds_pool_ci.poolSizeCount = 1;
+ ds_pool_ci.pPoolSizes = &ds_type_count;
+
+ VkDescriptorPool ds_pool;
+ err = vkCreateDescriptorPool(m_device->device(), &ds_pool_ci, NULL, &ds_pool);
+ ASSERT_VK_SUCCESS(err);
+
+ VkDescriptorSetLayoutBinding dsl_binding = {};
+ dsl_binding.binding = 0;
+ dsl_binding.descriptorType = VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC;
+ dsl_binding.descriptorCount = 1;
+ dsl_binding.stageFlags = VK_SHADER_STAGE_ALL;
+ dsl_binding.pImmutableSamplers = NULL;
+
+ const VkDescriptorSetLayoutObj ds_layout(m_device, {dsl_binding});
+
+ VkDescriptorSet descriptorSet;
+ VkDescriptorSetAllocateInfo alloc_info = {};
+ alloc_info.sType = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_ALLOCATE_INFO;
+ alloc_info.descriptorSetCount = 1;
+ alloc_info.descriptorPool = ds_pool;
+ alloc_info.pSetLayouts = &ds_layout.handle();
+ err = vkAllocateDescriptorSets(m_device->device(), &alloc_info, &descriptorSet);
+ ASSERT_VK_SUCCESS(err);
+
+ const VkPipelineLayoutObj pipeline_layout(m_device, {&ds_layout});
+
+ // Create a buffer to update the descriptor with
+ uint32_t qfi = 0;
+ VkBufferCreateInfo buffCI = {};
+ buffCI.sType = VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO;
+ buffCI.size = 1024;
+ buffCI.usage = VK_BUFFER_USAGE_UNIFORM_BUFFER_BIT;
+ buffCI.queueFamilyIndexCount = 1;
+ buffCI.pQueueFamilyIndices = &qfi;
+
+ VkBuffer dyub;
+ err = vkCreateBuffer(m_device->device(), &buffCI, NULL, &dyub);
+ ASSERT_VK_SUCCESS(err);
+ // Allocate memory and bind to buffer so we can make it to the appropriate error
+ VkMemoryRequirements memReqs;
+ vkGetBufferMemoryRequirements(m_device->device(), dyub, &memReqs);
+ VkMemoryAllocateInfo mem_alloc = {};
+ mem_alloc.sType = VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO;
+ mem_alloc.pNext = NULL;
+ mem_alloc.allocationSize = memReqs.size;
+ mem_alloc.memoryTypeIndex = 0;
+ bool pass = m_device->phy().set_memory_type(memReqs.memoryTypeBits, &mem_alloc, 0);
+ if (!pass) {
+ printf("%s Failed to allocate memory.\n", kSkipPrefix);
+ vkDestroyBuffer(m_device->device(), dyub, NULL);
+ return;
+ }
+
+ VkDeviceMemory mem;
+ err = vkAllocateMemory(m_device->device(), &mem_alloc, NULL, &mem);
+ ASSERT_VK_SUCCESS(err);
+ err = vkBindBufferMemory(m_device->device(), dyub, mem, 0);
+ ASSERT_VK_SUCCESS(err);
+ // Correctly update descriptor to avoid "NOT_UPDATED" error
+ VkDescriptorBufferInfo buffInfo = {};
+ buffInfo.buffer = dyub;
+ buffInfo.offset = 0;
+ buffInfo.range = 1024;
+
+ VkWriteDescriptorSet descriptor_write;
+ memset(&descriptor_write, 0, sizeof(descriptor_write));
+ descriptor_write.sType = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET;
+ descriptor_write.dstSet = descriptorSet;
+ descriptor_write.dstBinding = 0;
+ descriptor_write.descriptorCount = 1;
+ descriptor_write.descriptorType = VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC;
+ descriptor_write.pBufferInfo = &buffInfo;
+
+ vkUpdateDescriptorSets(m_device->device(), 1, &descriptor_write, 0, NULL);
+
+ m_commandBuffer->begin();
+ m_commandBuffer->BeginRenderPass(m_renderPassBeginInfo);
+ vkCmdBindDescriptorSets(m_commandBuffer->handle(), VK_PIPELINE_BIND_POINT_GRAPHICS, pipeline_layout.handle(), 0, 1,
+ &descriptorSet, 0, NULL);
+ m_errorMonitor->VerifyFound();
+ uint32_t pDynOff[2] = {512, 756};
+ // Now cause error b/c too many dynOffsets in array for # of dyn descriptors
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ "Attempting to bind 1 descriptorSets with 1 dynamic descriptors, but ");
+ vkCmdBindDescriptorSets(m_commandBuffer->handle(), VK_PIPELINE_BIND_POINT_GRAPHICS, pipeline_layout.handle(), 0, 1,
+ &descriptorSet, 2, pDynOff);
+ m_errorMonitor->VerifyFound();
+ // Finally cause error due to dynamicOffset being too big
+ m_errorMonitor->SetDesiredFailureMsg(
+ VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ " dynamic offset 512 combined with offset 0 and range 1024 that oversteps the buffer size of 1024");
+ // Create PSO to be used for draw-time errors below
+ char const *vsSource =
+ "#version 450\n"
+ "\n"
+ "void main(){\n"
+ " gl_Position = vec4(1);\n"
+ "}\n";
+ char const *fsSource =
+ "#version 450\n"
+ "\n"
+ "layout(location=0) out vec4 x;\n"
+ "layout(set=0) layout(binding=0) uniform foo { int x; int y; } bar;\n"
+ "void main(){\n"
+ " x = vec4(bar.y);\n"
+ "}\n";
+ VkShaderObj vs(m_device, vsSource, VK_SHADER_STAGE_VERTEX_BIT, this);
+ VkShaderObj fs(m_device, fsSource, VK_SHADER_STAGE_FRAGMENT_BIT, this);
+ VkPipelineObj pipe(m_device);
+ pipe.AddShader(&vs);
+ pipe.AddShader(&fs);
+ pipe.AddDefaultColorAttachment();
+ pipe.CreateVKPipeline(pipeline_layout.handle(), renderPass());
+
+ VkViewport viewport = {0, 0, 16, 16, 0, 1};
+ vkCmdSetViewport(m_commandBuffer->handle(), 0, 1, &viewport);
+ VkRect2D scissor = {{0, 0}, {16, 16}};
+ vkCmdSetScissor(m_commandBuffer->handle(), 0, 1, &scissor);
+
+ vkCmdBindPipeline(m_commandBuffer->handle(), VK_PIPELINE_BIND_POINT_GRAPHICS, pipe.handle());
+ // This update should succeed, but offset size of 512 will overstep buffer
+ // /w range 1024 & size 1024
+ vkCmdBindDescriptorSets(m_commandBuffer->handle(), VK_PIPELINE_BIND_POINT_GRAPHICS, pipeline_layout.handle(), 0, 1,
+ &descriptorSet, 1, pDynOff);
+ m_commandBuffer->Draw(1, 0, 0, 0);
+ m_errorMonitor->VerifyFound();
+
+ m_commandBuffer->EndRenderPass();
+ m_commandBuffer->end();
+
+ vkDestroyBuffer(m_device->device(), dyub, NULL);
+ vkFreeMemory(m_device->device(), mem, NULL);
+
+ vkDestroyDescriptorPool(m_device->device(), ds_pool, NULL);
+}
+
+TEST_F(VkLayerTest, DescriptorBufferUpdateNoMemoryBound) {
+ TEST_DESCRIPTION("Attempt to update a descriptor with a non-sparse buffer that doesn't have memory bound");
+ VkResult err;
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ " used with no memory bound. Memory should be bound by calling vkBindBufferMemory().");
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ "vkUpdateDescriptorSets() failed write update validation for Descriptor Set 0x");
+
+ ASSERT_NO_FATAL_FAILURE(Init());
+ ASSERT_NO_FATAL_FAILURE(InitViewport());
+ ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
+
+ VkDescriptorPoolSize ds_type_count = {};
+ ds_type_count.type = VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC;
+ ds_type_count.descriptorCount = 1;
+
+ VkDescriptorPoolCreateInfo ds_pool_ci = {};
+ ds_pool_ci.sType = VK_STRUCTURE_TYPE_DESCRIPTOR_POOL_CREATE_INFO;
+ ds_pool_ci.pNext = NULL;
+ ds_pool_ci.maxSets = 1;
+ ds_pool_ci.poolSizeCount = 1;
+ ds_pool_ci.pPoolSizes = &ds_type_count;
+
+ VkDescriptorPool ds_pool;
+ err = vkCreateDescriptorPool(m_device->device(), &ds_pool_ci, NULL, &ds_pool);
+ ASSERT_VK_SUCCESS(err);
+
+ VkDescriptorSetLayoutBinding dsl_binding = {};
+ dsl_binding.binding = 0;
+ dsl_binding.descriptorType = VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC;
+ dsl_binding.descriptorCount = 1;
+ dsl_binding.stageFlags = VK_SHADER_STAGE_ALL;
+ dsl_binding.pImmutableSamplers = NULL;
+
+ const VkDescriptorSetLayoutObj ds_layout(m_device, {dsl_binding});
+
+ VkDescriptorSet descriptorSet;
+ VkDescriptorSetAllocateInfo alloc_info = {};
+ alloc_info.sType = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_ALLOCATE_INFO;
+ alloc_info.descriptorSetCount = 1;
+ alloc_info.descriptorPool = ds_pool;
+ alloc_info.pSetLayouts = &ds_layout.handle();
+ err = vkAllocateDescriptorSets(m_device->device(), &alloc_info, &descriptorSet);
+ ASSERT_VK_SUCCESS(err);
+
+ // Create a buffer to update the descriptor with
+ uint32_t qfi = 0;
+ VkBufferCreateInfo buffCI = {};
+ buffCI.sType = VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO;
+ buffCI.size = 1024;
+ buffCI.usage = VK_BUFFER_USAGE_UNIFORM_BUFFER_BIT;
+ buffCI.queueFamilyIndexCount = 1;
+ buffCI.pQueueFamilyIndices = &qfi;
+
+ VkBuffer dyub;
+ err = vkCreateBuffer(m_device->device(), &buffCI, NULL, &dyub);
+ ASSERT_VK_SUCCESS(err);
+
+ // Attempt to update descriptor without binding memory to it
+ VkDescriptorBufferInfo buffInfo = {};
+ buffInfo.buffer = dyub;
+ buffInfo.offset = 0;
+ buffInfo.range = 1024;
+
+ VkWriteDescriptorSet descriptor_write;
+ memset(&descriptor_write, 0, sizeof(descriptor_write));
+ descriptor_write.sType = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET;
+ descriptor_write.dstSet = descriptorSet;
+ descriptor_write.dstBinding = 0;
+ descriptor_write.descriptorCount = 1;
+ descriptor_write.descriptorType = VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC;
+ descriptor_write.pBufferInfo = &buffInfo;
+
+ vkUpdateDescriptorSets(m_device->device(), 1, &descriptor_write, 0, NULL);
+ m_errorMonitor->VerifyFound();
+
+ vkDestroyBuffer(m_device->device(), dyub, NULL);
+ vkDestroyDescriptorPool(m_device->device(), ds_pool, NULL);
+}
+
+TEST_F(VkLayerTest, InvalidPushConstants) {
+ ASSERT_NO_FATAL_FAILURE(Init());
+ ASSERT_NO_FATAL_FAILURE(InitViewport());
+ ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
+
+ VkPipelineLayout pipeline_layout;
+ VkPushConstantRange pc_range = {};
+ VkPipelineLayoutCreateInfo pipeline_layout_ci = {};
+ pipeline_layout_ci.sType = VK_STRUCTURE_TYPE_PIPELINE_LAYOUT_CREATE_INFO;
+ pipeline_layout_ci.pushConstantRangeCount = 1;
+ pipeline_layout_ci.pPushConstantRanges = &pc_range;
+
+ //
+ // Check for invalid push constant ranges in pipeline layouts.
+ //
+ struct PipelineLayoutTestCase {
+ VkPushConstantRange const range;
+ char const *msg;
+ };
+
+ const uint32_t too_big = m_device->props.limits.maxPushConstantsSize + 0x4;
+ const std::array<PipelineLayoutTestCase, 10> range_tests = {{
+ {{VK_SHADER_STAGE_VERTEX_BIT, 0, 0}, "vkCreatePipelineLayout() call has push constants index 0 with size 0."},
+ {{VK_SHADER_STAGE_VERTEX_BIT, 0, 1}, "vkCreatePipelineLayout() call has push constants index 0 with size 1."},
+ {{VK_SHADER_STAGE_VERTEX_BIT, 4, 1}, "vkCreatePipelineLayout() call has push constants index 0 with size 1."},
+ {{VK_SHADER_STAGE_VERTEX_BIT, 4, 0}, "vkCreatePipelineLayout() call has push constants index 0 with size 0."},
+ {{VK_SHADER_STAGE_VERTEX_BIT, 1, 4}, "vkCreatePipelineLayout() call has push constants index 0 with offset 1. Offset must"},
+ {{VK_SHADER_STAGE_VERTEX_BIT, 0, too_big}, "vkCreatePipelineLayout() call has push constants index 0 with offset "},
+ {{VK_SHADER_STAGE_VERTEX_BIT, too_big, too_big}, "vkCreatePipelineLayout() call has push constants index 0 with offset "},
+ {{VK_SHADER_STAGE_VERTEX_BIT, too_big, 4}, "vkCreatePipelineLayout() call has push constants index 0 with offset "},
+ {{VK_SHADER_STAGE_VERTEX_BIT, 0xFFFFFFF0, 0x00000020},
+ "vkCreatePipelineLayout() call has push constants index 0 with offset "},
+ {{VK_SHADER_STAGE_VERTEX_BIT, 0x00000020, 0xFFFFFFF0},
+ "vkCreatePipelineLayout() call has push constants index 0 with offset "},
+ }};
+
+ // Check for invalid offset and size
+ for (const auto &iter : range_tests) {
+ pc_range = iter.range;
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, iter.msg);
+ vkCreatePipelineLayout(m_device->device(), &pipeline_layout_ci, NULL, &pipeline_layout);
+ m_errorMonitor->VerifyFound();
+ }
+
+ // Check for invalid stage flag
+ pc_range.offset = 0;
+ pc_range.size = 16;
+ pc_range.stageFlags = 0;
+ m_errorMonitor->SetDesiredFailureMsg(
+ VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ "vkCreatePipelineLayout: value of pCreateInfo->pPushConstantRanges[0].stageFlags must not be 0");
+ vkCreatePipelineLayout(m_device->device(), &pipeline_layout_ci, NULL, &pipeline_layout);
+ m_errorMonitor->VerifyFound();
+
+ // Check for duplicate stage flags in a list of push constant ranges.
+ // A shader can only have one push constant block and that block is mapped
+ // to the push constant range that has that shader's stage flag set.
+ // The shader's stage flag can only appear once in all the ranges, so the
+ // implementation can find the one and only range to map it to.
+ const uint32_t ranges_per_test = 5;
+ struct DuplicateStageFlagsTestCase {
+ VkPushConstantRange const ranges[ranges_per_test];
+ std::vector<char const *> const msg;
+ };
+ // Overlapping ranges are OK, but a stage flag can appear only once.
+ const std::array<DuplicateStageFlagsTestCase, 3> duplicate_stageFlags_tests = {
+ {
+ {{{VK_SHADER_STAGE_VERTEX_BIT, 0, 4},
+ {VK_SHADER_STAGE_VERTEX_BIT, 0, 4},
+ {VK_SHADER_STAGE_VERTEX_BIT, 0, 4},
+ {VK_SHADER_STAGE_VERTEX_BIT, 0, 4},
+ {VK_SHADER_STAGE_VERTEX_BIT, 0, 4}},
+ {
+ "vkCreatePipelineLayout() Duplicate stage flags found in ranges 0 and 1.",
+ "vkCreatePipelineLayout() Duplicate stage flags found in ranges 0 and 2.",
+ "vkCreatePipelineLayout() Duplicate stage flags found in ranges 0 and 3.",
+ "vkCreatePipelineLayout() Duplicate stage flags found in ranges 0 and 4.",
+ "vkCreatePipelineLayout() Duplicate stage flags found in ranges 1 and 2.",
+ "vkCreatePipelineLayout() Duplicate stage flags found in ranges 1 and 3.",
+ "vkCreatePipelineLayout() Duplicate stage flags found in ranges 1 and 4.",
+ "vkCreatePipelineLayout() Duplicate stage flags found in ranges 2 and 3.",
+ "vkCreatePipelineLayout() Duplicate stage flags found in ranges 2 and 4.",
+ "vkCreatePipelineLayout() Duplicate stage flags found in ranges 3 and 4.",
+ }},
+ {{{VK_SHADER_STAGE_VERTEX_BIT, 0, 4},
+ {VK_SHADER_STAGE_GEOMETRY_BIT, 0, 4},
+ {VK_SHADER_STAGE_FRAGMENT_BIT, 0, 4},
+ {VK_SHADER_STAGE_VERTEX_BIT, 0, 4},
+ {VK_SHADER_STAGE_GEOMETRY_BIT, 0, 4}},
+ {
+ "vkCreatePipelineLayout() Duplicate stage flags found in ranges 0 and 3.",
+ "vkCreatePipelineLayout() Duplicate stage flags found in ranges 1 and 4.",
+ }},
+ {{{VK_SHADER_STAGE_FRAGMENT_BIT, 0, 4},
+ {VK_SHADER_STAGE_TESSELLATION_CONTROL_BIT, 0, 4},
+ {VK_SHADER_STAGE_VERTEX_BIT, 0, 4},
+ {VK_SHADER_STAGE_VERTEX_BIT, 0, 4},
+ {VK_SHADER_STAGE_GEOMETRY_BIT, 0, 4}},
+ {
+ "vkCreatePipelineLayout() Duplicate stage flags found in ranges 2 and 3.",
+ }},
+ },
+ };
+
+ for (const auto &iter : duplicate_stageFlags_tests) {
+ pipeline_layout_ci.pPushConstantRanges = iter.ranges;
+ pipeline_layout_ci.pushConstantRangeCount = ranges_per_test;
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, iter.msg.begin(), iter.msg.end());
+ vkCreatePipelineLayout(m_device->device(), &pipeline_layout_ci, NULL, &pipeline_layout);
+ m_errorMonitor->VerifyFound();
+ }
+
+ //
+ // CmdPushConstants tests
+ //
+
+ // Setup a pipeline layout with ranges: [0,32) [16,80)
+ const std::vector<VkPushConstantRange> pc_range2 = {{VK_SHADER_STAGE_VERTEX_BIT, 16, 64},
+ {VK_SHADER_STAGE_FRAGMENT_BIT, 0, 32}};
+ const VkPipelineLayoutObj pipeline_layout_obj(m_device, {}, pc_range2);
+
+ const uint8_t dummy_values[100] = {};
+
+ m_commandBuffer->begin();
+ m_commandBuffer->BeginRenderPass(m_renderPassBeginInfo);
+
+ // Check for invalid stage flag
+ // Note that VU 00996 isn't reached due to parameter validation
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "vkCmdPushConstants: value of stageFlags must not be 0");
+ vkCmdPushConstants(m_commandBuffer->handle(), pipeline_layout_obj.handle(), 0, 0, 16, dummy_values);
+ m_errorMonitor->VerifyFound();
+
+ // Positive tests for the overlapping ranges
+ m_errorMonitor->ExpectSuccess();
+ vkCmdPushConstants(m_commandBuffer->handle(), pipeline_layout_obj.handle(), VK_SHADER_STAGE_FRAGMENT_BIT, 0, 16, dummy_values);
+ m_errorMonitor->VerifyNotFound();
+ m_errorMonitor->ExpectSuccess();
+ vkCmdPushConstants(m_commandBuffer->handle(), pipeline_layout_obj.handle(), VK_SHADER_STAGE_VERTEX_BIT, 32, 48, dummy_values);
+ m_errorMonitor->VerifyNotFound();
+ m_errorMonitor->ExpectSuccess();
+ vkCmdPushConstants(m_commandBuffer->handle(), pipeline_layout_obj.handle(),
+ VK_SHADER_STAGE_VERTEX_BIT | VK_SHADER_STAGE_FRAGMENT_BIT, 16, 16, dummy_values);
+ m_errorMonitor->VerifyNotFound();
+
+ // Wrong cmd stages for extant range
+ // No range for all cmd stages -- "VUID-vkCmdPushConstants-offset-01795" VUID-vkCmdPushConstants-offset-01795
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdPushConstants-offset-01795");
+ // Missing cmd stages for found overlapping range -- "VUID-vkCmdPushConstants-offset-01796" VUID-vkCmdPushConstants-offset-01796
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdPushConstants-offset-01796");
+ vkCmdPushConstants(m_commandBuffer->handle(), pipeline_layout_obj.handle(), VK_SHADER_STAGE_GEOMETRY_BIT, 0, 16, dummy_values);
+ m_errorMonitor->VerifyFound();
+
+ // Wrong no extant range
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdPushConstants-offset-01795");
+ vkCmdPushConstants(m_commandBuffer->handle(), pipeline_layout_obj.handle(), VK_SHADER_STAGE_FRAGMENT_BIT, 80, 4, dummy_values);
+ m_errorMonitor->VerifyFound();
+
+ // Wrong overlapping extent
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdPushConstants-offset-01795");
+ vkCmdPushConstants(m_commandBuffer->handle(), pipeline_layout_obj.handle(),
+ VK_SHADER_STAGE_VERTEX_BIT | VK_SHADER_STAGE_FRAGMENT_BIT, 0, 20, dummy_values);
+ m_errorMonitor->VerifyFound();
+
+ // Wrong stage flags for valid overlapping range
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdPushConstants-offset-01796");
+ vkCmdPushConstants(m_commandBuffer->handle(), pipeline_layout_obj.handle(), VK_SHADER_STAGE_VERTEX_BIT, 16, 16, dummy_values);
+ m_errorMonitor->VerifyFound();
+
+ m_commandBuffer->EndRenderPass();
+ m_commandBuffer->end();
+}
+
+TEST_F(VkLayerTest, DescriptorSetCompatibility) {
+ // Test various desriptorSet errors with bad binding combinations
+ using std::vector;
+ VkResult err;
+
+ ASSERT_NO_FATAL_FAILURE(Init());
+ ASSERT_NO_FATAL_FAILURE(InitViewport());
+ ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
+
+ static const uint32_t NUM_DESCRIPTOR_TYPES = 5;
+ VkDescriptorPoolSize ds_type_count[NUM_DESCRIPTOR_TYPES] = {};
+ ds_type_count[0].type = VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER;
+ ds_type_count[0].descriptorCount = 10;
+ ds_type_count[1].type = VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE;
+ ds_type_count[1].descriptorCount = 2;
+ ds_type_count[2].type = VK_DESCRIPTOR_TYPE_STORAGE_IMAGE;
+ ds_type_count[2].descriptorCount = 2;
+ ds_type_count[3].type = VK_DESCRIPTOR_TYPE_SAMPLER;
+ ds_type_count[3].descriptorCount = 5;
+ // TODO : LunarG ILO driver currently asserts in desc.c w/ INPUT_ATTACHMENT
+ // type
+ // ds_type_count[4].type = VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT;
+ ds_type_count[4].type = VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER;
+ ds_type_count[4].descriptorCount = 2;
+
+ VkDescriptorPoolCreateInfo ds_pool_ci = {};
+ ds_pool_ci.sType = VK_STRUCTURE_TYPE_DESCRIPTOR_POOL_CREATE_INFO;
+ ds_pool_ci.pNext = NULL;
+ ds_pool_ci.maxSets = 5;
+ ds_pool_ci.poolSizeCount = NUM_DESCRIPTOR_TYPES;
+ ds_pool_ci.pPoolSizes = ds_type_count;
+
+ VkDescriptorPool ds_pool;
+ err = vkCreateDescriptorPool(m_device->device(), &ds_pool_ci, NULL, &ds_pool);
+ ASSERT_VK_SUCCESS(err);
+
+ static const uint32_t MAX_DS_TYPES_IN_LAYOUT = 2;
+ VkDescriptorSetLayoutBinding dsl_binding[MAX_DS_TYPES_IN_LAYOUT] = {};
+ dsl_binding[0].binding = 0;
+ dsl_binding[0].descriptorType = VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER;
+ dsl_binding[0].descriptorCount = 5;
+ dsl_binding[0].stageFlags = VK_SHADER_STAGE_ALL;
+ dsl_binding[0].pImmutableSamplers = NULL;
+
+ // Create layout identical to set0 layout but w/ different stageFlags
+ VkDescriptorSetLayoutBinding dsl_fs_stage_only = {};
+ dsl_fs_stage_only.binding = 0;
+ dsl_fs_stage_only.descriptorType = VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER;
+ dsl_fs_stage_only.descriptorCount = 5;
+ dsl_fs_stage_only.stageFlags = VK_SHADER_STAGE_FRAGMENT_BIT; // Different stageFlags to cause error at
+ // bind time
+ dsl_fs_stage_only.pImmutableSamplers = NULL;
+
+ vector<VkDescriptorSetLayoutObj> ds_layouts;
+ // Create 4 unique layouts for full pipelineLayout, and 1 special fs-only
+ // layout for error case
+ ds_layouts.emplace_back(m_device, std::vector<VkDescriptorSetLayoutBinding>(1, dsl_binding[0]));
+
+ const VkDescriptorSetLayoutObj ds_layout_fs_only(m_device, {dsl_fs_stage_only});
+
+ dsl_binding[0].binding = 0;
+ dsl_binding[0].descriptorType = VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE;
+ dsl_binding[0].descriptorCount = 2;
+ dsl_binding[1].binding = 1;
+ dsl_binding[1].descriptorType = VK_DESCRIPTOR_TYPE_STORAGE_IMAGE;
+ dsl_binding[1].descriptorCount = 2;
+ dsl_binding[1].stageFlags = VK_SHADER_STAGE_ALL;
+ dsl_binding[1].pImmutableSamplers = NULL;
+ ds_layouts.emplace_back(m_device, std::vector<VkDescriptorSetLayoutBinding>({dsl_binding[0], dsl_binding[1]}));
+
+ dsl_binding[0].binding = 0;
+ dsl_binding[0].descriptorType = VK_DESCRIPTOR_TYPE_SAMPLER;
+ dsl_binding[0].descriptorCount = 5;
+ ds_layouts.emplace_back(m_device, std::vector<VkDescriptorSetLayoutBinding>(1, dsl_binding[0]));
+
+ dsl_binding[0].descriptorType = VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER;
+ dsl_binding[0].descriptorCount = 2;
+ ds_layouts.emplace_back(m_device, std::vector<VkDescriptorSetLayoutBinding>(1, dsl_binding[0]));
+
+ const auto &ds_vk_layouts = MakeVkHandles<VkDescriptorSetLayout>(ds_layouts);
+
+ static const uint32_t NUM_SETS = 4;
+ VkDescriptorSet descriptorSet[NUM_SETS] = {};
+ VkDescriptorSetAllocateInfo alloc_info = {};
+ alloc_info.sType = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_ALLOCATE_INFO;
+ alloc_info.descriptorPool = ds_pool;
+ alloc_info.descriptorSetCount = ds_vk_layouts.size();
+ alloc_info.pSetLayouts = ds_vk_layouts.data();
+ err = vkAllocateDescriptorSets(m_device->device(), &alloc_info, descriptorSet);
+ ASSERT_VK_SUCCESS(err);
+ VkDescriptorSet ds0_fs_only = {};
+ alloc_info.descriptorSetCount = 1;
+ alloc_info.pSetLayouts = &ds_layout_fs_only.handle();
+ err = vkAllocateDescriptorSets(m_device->device(), &alloc_info, &ds0_fs_only);
+ ASSERT_VK_SUCCESS(err);
+
+ const VkPipelineLayoutObj pipeline_layout(m_device, {&ds_layouts[0], &ds_layouts[1]});
+ // Create pipelineLayout with only one setLayout
+ const VkPipelineLayoutObj single_pipe_layout(m_device, {&ds_layouts[0]});
+ // Create pipelineLayout with 2 descriptor setLayout at index 0
+ const VkPipelineLayoutObj pipe_layout_one_desc(m_device, {&ds_layouts[3]});
+ // Create pipelineLayout with 5 SAMPLER descriptor setLayout at index 0
+ const VkPipelineLayoutObj pipe_layout_five_samp(m_device, {&ds_layouts[2]});
+ // Create pipelineLayout with UB type, but stageFlags for FS only
+ VkPipelineLayoutObj pipe_layout_fs_only(m_device, {&ds_layout_fs_only});
+ // Create pipelineLayout w/ incompatible set0 layout, but set1 is fine
+ const VkPipelineLayoutObj pipe_layout_bad_set0(m_device, {&ds_layout_fs_only, &ds_layouts[1]});
+
+ // Create PSO to be used for draw-time errors below
+ char const *vsSource =
+ "#version 450\n"
+ "\n"
+ "void main(){\n"
+ " gl_Position = vec4(1);\n"
+ "}\n";
+ char const *fsSource =
+ "#version 450\n"
+ "\n"
+ "layout(location=0) out vec4 x;\n"
+ "layout(set=0) layout(binding=0) uniform foo { int x; int y; } bar;\n"
+ "void main(){\n"
+ " x = vec4(bar.y);\n"
+ "}\n";
+ VkShaderObj vs(m_device, vsSource, VK_SHADER_STAGE_VERTEX_BIT, this);
+ VkShaderObj fs(m_device, fsSource, VK_SHADER_STAGE_FRAGMENT_BIT, this);
+ VkPipelineObj pipe(m_device);
+ pipe.AddShader(&vs);
+ pipe.AddShader(&fs);
+ pipe.AddDefaultColorAttachment();
+ pipe.CreateVKPipeline(pipe_layout_fs_only.handle(), renderPass());
+
+ m_commandBuffer->begin();
+ m_commandBuffer->BeginRenderPass(m_renderPassBeginInfo);
+
+ vkCmdBindPipeline(m_commandBuffer->handle(), VK_PIPELINE_BIND_POINT_GRAPHICS, pipe.handle());
+ // TODO : Want to cause various binding incompatibility issues here to test
+ // DrawState
+ // First cause various verify_layout_compatibility() fails
+ // Second disturb early and late sets and verify INFO msgs
+ // VerifySetLayoutCompatibility fail cases:
+ // 1. invalid VkPipelineLayout (layout) passed into vkCmdBindDescriptorSets
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdBindDescriptorSets-layout-parameter");
+ vkCmdBindDescriptorSets(m_commandBuffer->handle(), VK_PIPELINE_BIND_POINT_GRAPHICS, (VkPipelineLayout)((size_t)0xbaadb1be), 0,
+ 1, &descriptorSet[0], 0, NULL);
+ m_errorMonitor->VerifyFound();
+
+ // 2. layoutIndex exceeds # of layouts in layout
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, " attempting to bind set to index 1");
+ vkCmdBindDescriptorSets(m_commandBuffer->handle(), VK_PIPELINE_BIND_POINT_GRAPHICS, single_pipe_layout.handle(), 0, 2,
+ &descriptorSet[0], 0, NULL);
+ m_errorMonitor->VerifyFound();
+
+ // 3. Pipeline setLayout[0] has 2 descriptors, but set being bound has 5
+ // descriptors
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, " has 2 descriptors, but DescriptorSetLayout ");
+ vkCmdBindDescriptorSets(m_commandBuffer->handle(), VK_PIPELINE_BIND_POINT_GRAPHICS, pipe_layout_one_desc.handle(), 0, 1,
+ &descriptorSet[0], 0, NULL);
+ m_errorMonitor->VerifyFound();
+
+ // 4. same # of descriptors but mismatch in type
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, " is type 'VK_DESCRIPTOR_TYPE_SAMPLER' but binding ");
+ vkCmdBindDescriptorSets(m_commandBuffer->handle(), VK_PIPELINE_BIND_POINT_GRAPHICS, pipe_layout_five_samp.handle(), 0, 1,
+ &descriptorSet[0], 0, NULL);
+ m_errorMonitor->VerifyFound();
+
+ // 5. same # of descriptors but mismatch in stageFlags
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ " has stageFlags 16 but binding 0 for DescriptorSetLayout ");
+ vkCmdBindDescriptorSets(m_commandBuffer->handle(), VK_PIPELINE_BIND_POINT_GRAPHICS, pipe_layout_fs_only.handle(), 0, 1,
+ &descriptorSet[0], 0, NULL);
+ m_errorMonitor->VerifyFound();
+
+ // Now that we're done actively using the pipelineLayout that gfx pipeline
+ // was created with, we should be able to delete it. Do that now to verify
+ // that validation obeys pipelineLayout lifetime
+ pipe_layout_fs_only.Reset();
+
+ // Cause draw-time errors due to PSO incompatibilities
+ // 1. Error due to not binding required set (we actually use same code as
+ // above to disturb set0)
+ vkCmdBindDescriptorSets(m_commandBuffer->handle(), VK_PIPELINE_BIND_POINT_GRAPHICS, pipeline_layout.handle(), 0, 2,
+ &descriptorSet[0], 0, NULL);
+ vkCmdBindDescriptorSets(m_commandBuffer->handle(), VK_PIPELINE_BIND_POINT_GRAPHICS, pipe_layout_bad_set0.handle(), 1, 1,
+ &descriptorSet[1], 0, NULL);
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, " uses set #0 but that set is not bound.");
+
+ VkViewport viewport = {0, 0, 16, 16, 0, 1};
+ VkRect2D scissor = {{0, 0}, {16, 16}};
+ vkCmdSetViewport(m_commandBuffer->handle(), 0, 1, &viewport);
+ vkCmdSetScissor(m_commandBuffer->handle(), 0, 1, &scissor);
+
+ m_commandBuffer->Draw(1, 0, 0, 0);
+ m_errorMonitor->VerifyFound();
+
+ // 2. Error due to bound set not being compatible with PSO's
+ // VkPipelineLayout (diff stageFlags in this case)
+ vkCmdBindDescriptorSets(m_commandBuffer->handle(), VK_PIPELINE_BIND_POINT_GRAPHICS, pipeline_layout.handle(), 0, 2,
+ &descriptorSet[0], 0, NULL);
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, " bound as set #0 is not compatible with ");
+ m_commandBuffer->Draw(1, 0, 0, 0);
+ m_errorMonitor->VerifyFound();
+
+ // Remaining clean-up
+ m_commandBuffer->EndRenderPass();
+ m_commandBuffer->end();
+
+ vkDestroyDescriptorPool(m_device->device(), ds_pool, NULL);
+}
+
+TEST_F(VkLayerTest, NoBeginCommandBuffer) {
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ "You must call vkBeginCommandBuffer() before this call to ");
+
+ ASSERT_NO_FATAL_FAILURE(Init());
+ VkCommandBufferObj commandBuffer(m_device, m_commandPool);
+ // Call EndCommandBuffer() w/o calling BeginCommandBuffer()
+ vkEndCommandBuffer(commandBuffer.handle());
+
+ m_errorMonitor->VerifyFound();
+}
+
+TEST_F(VkLayerTest, SecondaryCommandBufferNullRenderpass) {
+ ASSERT_NO_FATAL_FAILURE(Init());
+
+ VkCommandBufferObj cb(m_device, m_commandPool, VK_COMMAND_BUFFER_LEVEL_SECONDARY);
+
+ // Force the failure by not setting the Renderpass and Framebuffer fields
+ VkCommandBufferInheritanceInfo cmd_buf_hinfo = {};
+ cmd_buf_hinfo.sType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_INHERITANCE_INFO;
+
+ VkCommandBufferBeginInfo cmd_buf_info = {};
+ cmd_buf_info.sType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_BEGIN_INFO;
+ cmd_buf_info.pNext = NULL;
+ cmd_buf_info.flags = VK_COMMAND_BUFFER_USAGE_ONE_TIME_SUBMIT_BIT | VK_COMMAND_BUFFER_USAGE_RENDER_PASS_CONTINUE_BIT;
+ cmd_buf_info.pInheritanceInfo = &cmd_buf_hinfo;
+
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkCommandBufferBeginInfo-flags-00053");
+ vkBeginCommandBuffer(cb.handle(), &cmd_buf_info);
+ m_errorMonitor->VerifyFound();
+}
+
+TEST_F(VkLayerTest, SecondaryCommandBufferRerecordedExplicitReset) {
+ ASSERT_NO_FATAL_FAILURE(Init());
+
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "was destroyed or rerecorded");
+
+ // A pool we can reset in.
+ VkCommandPoolObj pool(m_device, m_device->graphics_queue_node_index_, VK_COMMAND_POOL_CREATE_RESET_COMMAND_BUFFER_BIT);
+ VkCommandBufferObj secondary(m_device, &pool, VK_COMMAND_BUFFER_LEVEL_SECONDARY);
+
+ secondary.begin();
+ secondary.end();
+
+ m_commandBuffer->begin();
+ vkCmdExecuteCommands(m_commandBuffer->handle(), 1, &secondary.handle());
+
+ // rerecording of secondary
+ secondary.reset(); // explicit reset here.
+ secondary.begin();
+ secondary.end();
+
+ vkCmdExecuteCommands(m_commandBuffer->handle(), 1, &secondary.handle());
+ m_errorMonitor->VerifyFound();
+}
+
+TEST_F(VkLayerTest, SecondaryCommandBufferRerecordedNoReset) {
+ ASSERT_NO_FATAL_FAILURE(Init());
+
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "was destroyed or rerecorded");
+
+ // A pool we can reset in.
+ VkCommandPoolObj pool(m_device, m_device->graphics_queue_node_index_, VK_COMMAND_POOL_CREATE_RESET_COMMAND_BUFFER_BIT);
+ VkCommandBufferObj secondary(m_device, &pool, VK_COMMAND_BUFFER_LEVEL_SECONDARY);
+
+ secondary.begin();
+ secondary.end();
+
+ m_commandBuffer->begin();
+ vkCmdExecuteCommands(m_commandBuffer->handle(), 1, &secondary.handle());
+
+ // rerecording of secondary
+ secondary.begin(); // implicit reset in begin
+ secondary.end();
+
+ vkCmdExecuteCommands(m_commandBuffer->handle(), 1, &secondary.handle());
+ m_errorMonitor->VerifyFound();
+}
+
+TEST_F(VkLayerTest, CascadedInvalidation) {
+ ASSERT_NO_FATAL_FAILURE(Init());
+
+ VkEventCreateInfo eci = {VK_STRUCTURE_TYPE_EVENT_CREATE_INFO, nullptr, 0};
+ VkEvent event;
+ vkCreateEvent(m_device->device(), &eci, nullptr, &event);
+
+ VkCommandBufferObj secondary(m_device, m_commandPool, VK_COMMAND_BUFFER_LEVEL_SECONDARY);
+ secondary.begin();
+ vkCmdSetEvent(secondary.handle(), event, VK_PIPELINE_STAGE_BOTTOM_OF_PIPE_BIT);
+ secondary.end();
+
+ m_commandBuffer->begin();
+ vkCmdExecuteCommands(m_commandBuffer->handle(), 1, &secondary.handle());
+ m_commandBuffer->end();
+
+ // destroying the event should invalidate both primary and secondary CB
+ vkDestroyEvent(m_device->device(), event, nullptr);
+
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "invalid because bound Event");
+ m_commandBuffer->QueueCommandBuffer(false);
+ m_errorMonitor->VerifyFound();
+}
+
+TEST_F(VkLayerTest, CommandBufferResetErrors) {
+ // Cause error due to Begin while recording CB
+ // Then cause 2 errors for attempting to reset CB w/o having
+ // VK_COMMAND_POOL_CREATE_RESET_COMMAND_BUFFER_BIT set for the pool from
+ // which CBs were allocated. Note that this bit is off by default.
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "Cannot call Begin on command buffer");
+
+ ASSERT_NO_FATAL_FAILURE(Init());
+
+ // Calls AllocateCommandBuffers
+ VkCommandBufferObj commandBuffer(m_device, m_commandPool);
+
+ // Force the failure by setting the Renderpass and Framebuffer fields with (fake) data
+ VkCommandBufferInheritanceInfo cmd_buf_hinfo = {};
+ cmd_buf_hinfo.sType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_INHERITANCE_INFO;
+ VkCommandBufferBeginInfo cmd_buf_info = {};
+ cmd_buf_info.sType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_BEGIN_INFO;
+ cmd_buf_info.pNext = NULL;
+ cmd_buf_info.flags = VK_COMMAND_BUFFER_USAGE_ONE_TIME_SUBMIT_BIT;
+ cmd_buf_info.pInheritanceInfo = &cmd_buf_hinfo;
+
+ // Begin CB to transition to recording state
+ vkBeginCommandBuffer(commandBuffer.handle(), &cmd_buf_info);
+ // Can't re-begin. This should trigger error
+ vkBeginCommandBuffer(commandBuffer.handle(), &cmd_buf_info);
+ m_errorMonitor->VerifyFound();
+
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkResetCommandBuffer-commandBuffer-00046");
+ VkCommandBufferResetFlags flags = 0; // Don't care about flags for this test
+ // Reset attempt will trigger error due to incorrect CommandPool state
+ vkResetCommandBuffer(commandBuffer.handle(), flags);
+ m_errorMonitor->VerifyFound();
+
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkBeginCommandBuffer-commandBuffer-00050");
+ // Transition CB to RECORDED state
+ vkEndCommandBuffer(commandBuffer.handle());
+ // Now attempting to Begin will implicitly reset, which triggers error
+ vkBeginCommandBuffer(commandBuffer.handle(), &cmd_buf_info);
+ m_errorMonitor->VerifyFound();
+}
+
+TEST_F(VkLayerTest, InvalidPipelineCreateState) {
+ // Attempt to Create Gfx Pipeline w/o a VS
+ VkResult err;
+
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ "Invalid Pipeline CreateInfo State: Vertex Shader required");
+
+ ASSERT_NO_FATAL_FAILURE(Init());
+ ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
+
+ VkDescriptorPoolSize ds_type_count = {};
+ ds_type_count.type = VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER;
+ ds_type_count.descriptorCount = 1;
+
+ VkDescriptorPoolCreateInfo ds_pool_ci = {};
+ ds_pool_ci.sType = VK_STRUCTURE_TYPE_DESCRIPTOR_POOL_CREATE_INFO;
+ ds_pool_ci.pNext = NULL;
+ ds_pool_ci.maxSets = 1;
+ ds_pool_ci.poolSizeCount = 1;
+ ds_pool_ci.pPoolSizes = &ds_type_count;
+
+ VkDescriptorPool ds_pool;
+ err = vkCreateDescriptorPool(m_device->device(), &ds_pool_ci, NULL, &ds_pool);
+ ASSERT_VK_SUCCESS(err);
+
+ VkDescriptorSetLayoutBinding dsl_binding = {};
+ dsl_binding.binding = 0;
+ dsl_binding.descriptorType = VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER;
+ dsl_binding.descriptorCount = 1;
+ dsl_binding.stageFlags = VK_SHADER_STAGE_ALL;
+ dsl_binding.pImmutableSamplers = NULL;
+
+ const VkDescriptorSetLayoutObj ds_layout(m_device, {dsl_binding});
+
+ VkDescriptorSet descriptorSet;
+ VkDescriptorSetAllocateInfo alloc_info = {};
+ alloc_info.sType = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_ALLOCATE_INFO;
+ alloc_info.descriptorSetCount = 1;
+ alloc_info.descriptorPool = ds_pool;
+ alloc_info.pSetLayouts = &ds_layout.handle();
+ err = vkAllocateDescriptorSets(m_device->device(), &alloc_info, &descriptorSet);
+ ASSERT_VK_SUCCESS(err);
+
+ const VkPipelineLayoutObj pipeline_layout(m_device, {&ds_layout});
+
+ VkPipelineRasterizationStateCreateInfo rs_state_ci = {};
+ rs_state_ci.sType = VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_STATE_CREATE_INFO;
+ rs_state_ci.polygonMode = VK_POLYGON_MODE_FILL;
+ rs_state_ci.cullMode = VK_CULL_MODE_BACK_BIT;
+ rs_state_ci.frontFace = VK_FRONT_FACE_COUNTER_CLOCKWISE;
+ rs_state_ci.depthClampEnable = VK_FALSE;
+ rs_state_ci.rasterizerDiscardEnable = VK_TRUE;
+ rs_state_ci.depthBiasEnable = VK_FALSE;
+ rs_state_ci.lineWidth = 1.0f;
+
+ VkPipelineVertexInputStateCreateInfo vi_ci = {};
+ vi_ci.sType = VK_STRUCTURE_TYPE_PIPELINE_VERTEX_INPUT_STATE_CREATE_INFO;
+ vi_ci.pNext = nullptr;
+ vi_ci.vertexBindingDescriptionCount = 0;
+ vi_ci.pVertexBindingDescriptions = nullptr;
+ vi_ci.vertexAttributeDescriptionCount = 0;
+ vi_ci.pVertexAttributeDescriptions = nullptr;
+
+ VkPipelineInputAssemblyStateCreateInfo ia_ci = {};
+ ia_ci.sType = VK_STRUCTURE_TYPE_PIPELINE_INPUT_ASSEMBLY_STATE_CREATE_INFO;
+ ia_ci.topology = VK_PRIMITIVE_TOPOLOGY_TRIANGLE_STRIP;
+
+ VkPipelineShaderStageCreateInfo shaderStages[2];
+ memset(&shaderStages, 0, 2 * sizeof(VkPipelineShaderStageCreateInfo));
+
+ VkShaderObj vs(m_device, bindStateVertShaderText, VK_SHADER_STAGE_VERTEX_BIT, this);
+ VkShaderObj fs(m_device, bindStateFragShaderText, VK_SHADER_STAGE_FRAGMENT_BIT, this);
+ shaderStages[0] = fs.GetStageCreateInfo(); // should be: vs.GetStageCreateInfo();
+ shaderStages[1] = fs.GetStageCreateInfo();
+
+ VkGraphicsPipelineCreateInfo gp_ci = {};
+ gp_ci.sType = VK_STRUCTURE_TYPE_GRAPHICS_PIPELINE_CREATE_INFO;
+ gp_ci.pViewportState = nullptr; // no viewport b/c rasterizer is disabled
+ gp_ci.pRasterizationState = &rs_state_ci;
+ gp_ci.flags = VK_PIPELINE_CREATE_DISABLE_OPTIMIZATION_BIT;
+ gp_ci.layout = pipeline_layout.handle();
+ gp_ci.renderPass = renderPass();
+ gp_ci.pVertexInputState = &vi_ci;
+ gp_ci.pInputAssemblyState = &ia_ci;
+
+ gp_ci.stageCount = 1;
+ gp_ci.pStages = shaderStages;
+
+ VkPipelineCacheCreateInfo pc_ci = {};
+ pc_ci.sType = VK_STRUCTURE_TYPE_PIPELINE_CACHE_CREATE_INFO;
+ pc_ci.initialDataSize = 0;
+ pc_ci.pInitialData = 0;
+
+ VkPipeline pipeline;
+ VkPipelineCache pipelineCache;
+
+ err = vkCreatePipelineCache(m_device->device(), &pc_ci, NULL, &pipelineCache);
+ ASSERT_VK_SUCCESS(err);
+ err = vkCreateGraphicsPipelines(m_device->device(), pipelineCache, 1, &gp_ci, NULL, &pipeline);
+ m_errorMonitor->VerifyFound();
+
+ // Finally, check the string validation for the shader stage pName variable. Correct the shader stage data, and bork the
+ // string before calling again
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "contains invalid characters or is badly formed");
+ shaderStages[0] = vs.GetStageCreateInfo();
+ const uint8_t cont_char = 0xf8;
+ char bad_string[] = {static_cast<char>(cont_char), static_cast<char>(cont_char), static_cast<char>(cont_char),
+ static_cast<char>(cont_char)};
+ shaderStages[0].pName = bad_string;
+ err = vkCreateGraphicsPipelines(m_device->device(), pipelineCache, 1, &gp_ci, NULL, &pipeline);
+ m_errorMonitor->VerifyFound();
+
+ vkDestroyPipelineCache(m_device->device(), pipelineCache, NULL);
+ vkDestroyDescriptorPool(m_device->device(), ds_pool, NULL);
+}
+
+TEST_F(VkLayerTest, InvalidPipelineSampleRateFeatureDisable) {
+ // Enable sample shading in pipeline when the feature is disabled.
+ ASSERT_NO_FATAL_FAILURE(InitFramework(myDbgFunc, m_errorMonitor));
+
+ // Disable sampleRateShading here
+ VkPhysicalDeviceFeatures device_features = {};
+ ASSERT_NO_FATAL_FAILURE(GetPhysicalDeviceFeatures(&device_features));
+ device_features.sampleRateShading = VK_FALSE;
+
+ ASSERT_NO_FATAL_FAILURE(InitState(&device_features));
+ ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
+
+ // Cause the error by enabling sample shading...
+ auto set_shading_enable = [](CreatePipelineHelper &helper) { helper.pipe_ms_state_ci_.sampleShadingEnable = VK_TRUE; };
+ CreatePipelineHelper::OneshotTest(*this, set_shading_enable, VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ "VUID-VkPipelineMultisampleStateCreateInfo-sampleShadingEnable-00784");
+}
+
+TEST_F(VkLayerTest, InvalidPipelineSampleRateFeatureEnable) {
+ // Enable sample shading in pipeline when the feature is disabled.
+ ASSERT_NO_FATAL_FAILURE(InitFramework(myDbgFunc, m_errorMonitor));
+
+ // Require sampleRateShading here
+ VkPhysicalDeviceFeatures device_features = {};
+ ASSERT_NO_FATAL_FAILURE(GetPhysicalDeviceFeatures(&device_features));
+ if (device_features.sampleRateShading == VK_FALSE) {
+ printf("%s SampleRateShading feature is disabled -- skipping related checks.\n", kSkipPrefix);
+ return;
+ }
+
+ ASSERT_NO_FATAL_FAILURE(InitState(&device_features));
+ ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
+
+ auto range_test = [this](float value, bool positive_test) {
+ auto info_override = [value](CreatePipelineHelper &helper) {
+ helper.pipe_ms_state_ci_.sampleShadingEnable = VK_TRUE;
+ helper.pipe_ms_state_ci_.minSampleShading = value;
+ };
+ CreatePipelineHelper::OneshotTest(*this, info_override, VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ "VUID-VkPipelineMultisampleStateCreateInfo-minSampleShading-00786", positive_test);
+ };
+
+ range_test(NearestSmaller(0.0F), false);
+ range_test(NearestGreater(1.0F), false);
+ range_test(0.0F, /* positive_test= */ true);
+ range_test(1.0F, /* positive_test= */ true);
+}
+
+TEST_F(VkLayerTest, InvalidPipelineSamplePNext) {
+ // Enable sample shading in pipeline when the feature is disabled.
+ // Check for VK_KHR_get_physical_device_properties2
+ if (InstanceExtensionSupported(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME)) {
+ m_instance_extension_names.push_back(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME);
+ }
+ ASSERT_NO_FATAL_FAILURE(InitFramework(myDbgFunc, m_errorMonitor));
+
+ // Set up the extension structs
+ auto sampleLocations = chain_util::Init<VkPipelineSampleLocationsStateCreateInfoEXT>();
+ sampleLocations.sampleLocationsInfo.sType = VK_STRUCTURE_TYPE_SAMPLE_LOCATIONS_INFO_EXT;
+ auto coverageToColor = chain_util::Init<VkPipelineCoverageToColorStateCreateInfoNV>();
+ auto coverageModulation = chain_util::Init<VkPipelineCoverageModulationStateCreateInfoNV>();
+ auto discriminatrix = [this](const char *name) { return DeviceExtensionSupported(gpu(), nullptr, name); };
+ chain_util::ExtensionChain chain(discriminatrix, &m_device_extension_names);
+ chain.Add(VK_EXT_SAMPLE_LOCATIONS_EXTENSION_NAME, sampleLocations);
+ chain.Add(VK_NV_FRAGMENT_COVERAGE_TO_COLOR_EXTENSION_NAME, coverageToColor);
+ chain.Add(VK_NV_FRAMEBUFFER_MIXED_SAMPLES_EXTENSION_NAME, coverageModulation);
+ const void *extension_head = chain.Head();
+
+ ASSERT_NO_FATAL_FAILURE(InitState());
+ ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
+
+ if (extension_head) {
+ auto good_chain = [extension_head](CreatePipelineHelper &helper) { helper.pipe_ms_state_ci_.pNext = extension_head; };
+ CreatePipelineHelper::OneshotTest(*this, good_chain, (VK_DEBUG_REPORT_ERROR_BIT_EXT | VK_DEBUG_REPORT_WARNING_BIT_EXT),
+ "No error", true);
+ } else {
+ printf("%s Required extension not present -- skipping positive checks.\n", kSkipPrefix);
+ }
+
+ auto instance_ci = chain_util::Init<VkInstanceCreateInfo>();
+ auto bad_chain = [&instance_ci](CreatePipelineHelper &helper) { helper.pipe_ms_state_ci_.pNext = &instance_ci; };
+ CreatePipelineHelper::OneshotTest(*this, bad_chain, VK_DEBUG_REPORT_WARNING_BIT_EXT,
+ "VUID-VkPipelineMultisampleStateCreateInfo-pNext-pNext");
+}
+
+TEST_F(VkLayerTest, VertexAttributeDivisorExtension) {
+ TEST_DESCRIPTION("Test VUIDs added with VK_EXT_vertex_attribute_divisor extension.");
+
+ bool inst_ext = InstanceExtensionSupported(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME);
+ if (inst_ext) {
+ m_instance_extension_names.push_back(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME);
+ ASSERT_NO_FATAL_FAILURE(InitFramework(myDbgFunc, m_errorMonitor));
+ }
+ if (inst_ext && DeviceExtensionSupported(gpu(), nullptr, VK_EXT_VERTEX_ATTRIBUTE_DIVISOR_EXTENSION_NAME)) {
+ m_device_extension_names.push_back(VK_EXT_VERTEX_ATTRIBUTE_DIVISOR_EXTENSION_NAME);
+ } else {
+ printf("%s %s Extension not supported, skipping tests\n", kSkipPrefix, VK_EXT_VERTEX_ATTRIBUTE_DIVISOR_EXTENSION_NAME);
+ return;
+ }
+
+ VkPhysicalDeviceVertexAttributeDivisorFeaturesEXT vadf = {};
+ vadf.sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VERTEX_ATTRIBUTE_DIVISOR_FEATURES_EXT;
+ vadf.vertexAttributeInstanceRateDivisor = VK_TRUE;
+ vadf.vertexAttributeInstanceRateZeroDivisor = VK_TRUE;
+
+ VkPhysicalDeviceFeatures2 pd_features2 = {};
+ pd_features2.sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FEATURES_2;
+ pd_features2.pNext = &vadf;
+
+ ASSERT_NO_FATAL_FAILURE(InitState(nullptr, &pd_features2));
+ ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
+
+ const VkPhysicalDeviceLimits &dev_limits = m_device->props.limits;
+ VkPhysicalDeviceVertexAttributeDivisorPropertiesEXT pdvad_props = {};
+ pdvad_props.sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VERTEX_ATTRIBUTE_DIVISOR_PROPERTIES_EXT;
+ VkPhysicalDeviceProperties2 pd_props2 = {};
+ pd_props2.sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PROPERTIES_2;
+ pd_props2.pNext = &pdvad_props;
+ vkGetPhysicalDeviceProperties2(gpu(), &pd_props2);
+
+ VkVertexInputBindingDivisorDescriptionEXT vibdd = {};
+ VkPipelineVertexInputDivisorStateCreateInfoEXT pvids_ci = {};
+ pvids_ci.sType = VK_STRUCTURE_TYPE_PIPELINE_VERTEX_INPUT_DIVISOR_STATE_CREATE_INFO_EXT;
+ pvids_ci.vertexBindingDivisorCount = 1;
+ pvids_ci.pVertexBindingDivisors = &vibdd;
+ VkVertexInputBindingDescription vibd = {};
+ vibd.stride = 12;
+ vibd.inputRate = VK_VERTEX_INPUT_RATE_VERTEX;
+
+ using std::vector;
+ struct TestCase {
+ uint32_t div_binding;
+ uint32_t div_divisor;
+ uint32_t desc_binding;
+ VkVertexInputRate desc_rate;
+ vector<std::string> vuids;
+ };
+
+ // clang-format off
+ vector<TestCase> test_cases = {
+ { 0,
+ 1,
+ 0,
+ VK_VERTEX_INPUT_RATE_VERTEX,
+ {"VUID-VkVertexInputBindingDivisorDescriptionEXT-inputRate-01871"}
+ },
+ { dev_limits.maxVertexInputBindings + 1,
+ 1,
+ 0,
+ VK_VERTEX_INPUT_RATE_INSTANCE,
+ {"VUID-VkVertexInputBindingDivisorDescriptionEXT-binding-01869",
+ "VUID-VkVertexInputBindingDivisorDescriptionEXT-inputRate-01871"}
+ }
+ };
+
+ if (UINT32_MAX != pdvad_props.maxVertexAttribDivisor) { // Can't test overflow if maxVAD is UINT32_MAX
+ test_cases.push_back(
+ { 0,
+ pdvad_props.maxVertexAttribDivisor + 1,
+ 0,
+ VK_VERTEX_INPUT_RATE_INSTANCE,
+ {"VUID-VkVertexInputBindingDivisorDescriptionEXT-divisor-01870"}
+ } );
+ }
+ // clang-format on
+
+ for (const auto &test_case : test_cases) {
+ const auto bad_divisor_state = [&test_case, &vibdd, &pvids_ci, &vibd](CreatePipelineHelper &helper) {
+ vibdd.binding = test_case.div_binding;
+ vibdd.divisor = test_case.div_divisor;
+ vibd.binding = test_case.desc_binding;
+ vibd.inputRate = test_case.desc_rate;
+ helper.vi_ci_.pNext = &pvids_ci;
+ helper.vi_ci_.vertexBindingDescriptionCount = 1;
+ helper.vi_ci_.pVertexBindingDescriptions = &vibd;
+ };
+ CreatePipelineHelper::OneshotTest(*this, bad_divisor_state, VK_DEBUG_REPORT_ERROR_BIT_EXT, test_case.vuids);
+ }
+}
+
+TEST_F(VkLayerTest, VertexAttributeDivisorDisabled) {
+ TEST_DESCRIPTION("Test instance divisor feature disabled for VK_EXT_vertex_attribute_divisor extension.");
+
+ bool inst_ext = InstanceExtensionSupported(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME);
+ if (inst_ext) {
+ m_instance_extension_names.push_back(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME);
+ ASSERT_NO_FATAL_FAILURE(InitFramework(myDbgFunc, m_errorMonitor));
+ }
+ if (inst_ext && DeviceExtensionSupported(gpu(), nullptr, VK_EXT_VERTEX_ATTRIBUTE_DIVISOR_EXTENSION_NAME)) {
+ m_device_extension_names.push_back(VK_EXT_VERTEX_ATTRIBUTE_DIVISOR_EXTENSION_NAME);
+ } else {
+ printf("%s %s Extension not supported, skipping tests\n", kSkipPrefix, VK_EXT_VERTEX_ATTRIBUTE_DIVISOR_EXTENSION_NAME);
+ return;
+ }
+
+ VkPhysicalDeviceVertexAttributeDivisorFeaturesEXT vadf = {};
+ vadf.sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VERTEX_ATTRIBUTE_DIVISOR_FEATURES_EXT;
+ vadf.vertexAttributeInstanceRateDivisor = VK_FALSE;
+ vadf.vertexAttributeInstanceRateZeroDivisor = VK_FALSE;
+ VkPhysicalDeviceFeatures2 pd_features2 = {};
+ pd_features2.sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FEATURES_2;
+ pd_features2.pNext = &vadf;
+
+ ASSERT_NO_FATAL_FAILURE(InitState(nullptr, &pd_features2));
+ ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
+
+ VkVertexInputBindingDivisorDescriptionEXT vibdd = {};
+ vibdd.binding = 0;
+ vibdd.divisor = 2;
+ VkPipelineVertexInputDivisorStateCreateInfoEXT pvids_ci = {};
+ pvids_ci.sType = VK_STRUCTURE_TYPE_PIPELINE_VERTEX_INPUT_DIVISOR_STATE_CREATE_INFO_EXT;
+ pvids_ci.vertexBindingDivisorCount = 1;
+ pvids_ci.pVertexBindingDivisors = &vibdd;
+ VkVertexInputBindingDescription vibd = {};
+ vibd.binding = vibdd.binding;
+ vibd.stride = 12;
+ vibd.inputRate = VK_VERTEX_INPUT_RATE_INSTANCE;
+
+ const auto instance_rate = [&pvids_ci, &vibd](CreatePipelineHelper &helper) {
+ helper.vi_ci_.pNext = &pvids_ci;
+ helper.vi_ci_.vertexBindingDescriptionCount = 1;
+ helper.vi_ci_.pVertexBindingDescriptions = &vibd;
+ };
+ CreatePipelineHelper::OneshotTest(*this, instance_rate, VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ "VUID-VkVertexInputBindingDivisorDescriptionEXT-vertexAttributeInstanceRateDivisor-02229");
+}
+
+TEST_F(VkLayerTest, VertexAttributeDivisorInstanceRateZero) {
+ TEST_DESCRIPTION("Test instanceRateZero feature of VK_EXT_vertex_attribute_divisor extension.");
+
+ bool inst_ext = InstanceExtensionSupported(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME);
+ if (inst_ext) {
+ m_instance_extension_names.push_back(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME);
+ ASSERT_NO_FATAL_FAILURE(InitFramework(myDbgFunc, m_errorMonitor));
+ }
+ if (inst_ext && DeviceExtensionSupported(gpu(), nullptr, VK_EXT_VERTEX_ATTRIBUTE_DIVISOR_EXTENSION_NAME)) {
+ m_device_extension_names.push_back(VK_EXT_VERTEX_ATTRIBUTE_DIVISOR_EXTENSION_NAME);
+ } else {
+ printf("%s %s Extension not supported, skipping tests\n", kSkipPrefix, VK_EXT_VERTEX_ATTRIBUTE_DIVISOR_EXTENSION_NAME);
+ return;
+ }
+
+ VkPhysicalDeviceVertexAttributeDivisorFeaturesEXT vadf = {};
+ vadf.sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VERTEX_ATTRIBUTE_DIVISOR_FEATURES_EXT;
+ vadf.vertexAttributeInstanceRateDivisor = VK_TRUE;
+ vadf.vertexAttributeInstanceRateZeroDivisor = VK_FALSE;
+ VkPhysicalDeviceFeatures2 pd_features2 = {};
+ pd_features2.sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FEATURES_2;
+ pd_features2.pNext = &vadf;
+
+ ASSERT_NO_FATAL_FAILURE(InitState(nullptr, &pd_features2));
+ ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
+
+ VkVertexInputBindingDivisorDescriptionEXT vibdd = {};
+ vibdd.binding = 0;
+ vibdd.divisor = 0;
+ VkPipelineVertexInputDivisorStateCreateInfoEXT pvids_ci = {};
+ pvids_ci.sType = VK_STRUCTURE_TYPE_PIPELINE_VERTEX_INPUT_DIVISOR_STATE_CREATE_INFO_EXT;
+ pvids_ci.vertexBindingDivisorCount = 1;
+ pvids_ci.pVertexBindingDivisors = &vibdd;
+ VkVertexInputBindingDescription vibd = {};
+ vibd.binding = vibdd.binding;
+ vibd.stride = 12;
+ vibd.inputRate = VK_VERTEX_INPUT_RATE_INSTANCE;
+
+ const auto instance_rate = [&pvids_ci, &vibd](CreatePipelineHelper &helper) {
+ helper.vi_ci_.pNext = &pvids_ci;
+ helper.vi_ci_.vertexBindingDescriptionCount = 1;
+ helper.vi_ci_.pVertexBindingDescriptions = &vibd;
+ };
+ CreatePipelineHelper::OneshotTest(
+ *this, instance_rate, VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ "VUID-VkVertexInputBindingDivisorDescriptionEXT-vertexAttributeInstanceRateZeroDivisor-02228");
+}
+
+/*// TODO : This test should be good, but needs Tess support in compiler to run
+TEST_F(VkLayerTest, InvalidPatchControlPoints)
+{
+ // Attempt to Create Gfx Pipeline w/o a VS
+ VkResult err;
+
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ "Invalid Pipeline CreateInfo State: VK_PRIMITIVE_TOPOLOGY_PATCH
+primitive ");
+
+ ASSERT_NO_FATAL_FAILURE(Init());
+ ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
+
+ VkDescriptorPoolSize ds_type_count = {};
+ ds_type_count.type = VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER;
+ ds_type_count.descriptorCount = 1;
+
+ VkDescriptorPoolCreateInfo ds_pool_ci = {};
+ ds_pool_ci.sType = VK_STRUCTURE_TYPE_DESCRIPTOR_POOL_CREATE_INFO;
+ ds_pool_ci.pNext = NULL;
+ ds_pool_ci.poolSizeCount = 1;
+ ds_pool_ci.pPoolSizes = &ds_type_count;
+
+ VkDescriptorPool ds_pool;
+ err = vkCreateDescriptorPool(m_device->device(),
+VK_DESCRIPTOR_POOL_USAGE_NON_FREE, 1, &ds_pool_ci, NULL, &ds_pool);
+ ASSERT_VK_SUCCESS(err);
+
+ VkDescriptorSetLayoutBinding dsl_binding = {};
+ dsl_binding.binding = 0;
+ dsl_binding.descriptorType = VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER;
+ dsl_binding.descriptorCount = 1;
+ dsl_binding.stageFlags = VK_SHADER_STAGE_ALL;
+ dsl_binding.pImmutableSamplers = NULL;
+
+ VkDescriptorSetLayoutCreateInfo ds_layout_ci = {};
+ ds_layout_ci.sType =
+VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_CREATE_INFO;
+ ds_layout_ci.pNext = NULL;
+ ds_layout_ci.bindingCount = 1;
+ ds_layout_ci.pBindings = &dsl_binding;
+
+ VkDescriptorSetLayout ds_layout;
+ err = vkCreateDescriptorSetLayout(m_device->device(), &ds_layout_ci, NULL,
+&ds_layout);
+ ASSERT_VK_SUCCESS(err);
+
+ VkDescriptorSet descriptorSet;
+ err = vkAllocateDescriptorSets(m_device->device(), ds_pool,
+VK_DESCRIPTOR_SET_USAGE_NON_FREE, 1, &ds_layout, &descriptorSet);
+ ASSERT_VK_SUCCESS(err);
+
+ VkPipelineLayoutCreateInfo pipeline_layout_ci = {};
+ pipeline_layout_ci.sType =
+VK_STRUCTURE_TYPE_PIPELINE_LAYOUT_CREATE_INFO;
+ pipeline_layout_ci.pNext = NULL;
+ pipeline_layout_ci.setLayoutCount = 1;
+ pipeline_layout_ci.pSetLayouts = &ds_layout;
+
+ VkPipelineLayout pipeline_layout;
+ err = vkCreatePipelineLayout(m_device->device(), &pipeline_layout_ci, NULL,
+&pipeline_layout);
+ ASSERT_VK_SUCCESS(err);
+
+ VkPipelineShaderStageCreateInfo shaderStages[3];
+ memset(&shaderStages, 0, 3 * sizeof(VkPipelineShaderStageCreateInfo));
+
+ VkShaderObj vs(m_device,bindStateVertShaderText,VK_SHADER_STAGE_VERTEX_BIT,
+this);
+ // Just using VS txt for Tess shaders as we don't care about functionality
+ VkShaderObj
+tc(m_device,bindStateVertShaderText,VK_SHADER_STAGE_TESSELLATION_CONTROL_BIT,
+this);
+ VkShaderObj
+te(m_device,bindStateVertShaderText,VK_SHADER_STAGE_TESSELLATION_EVALUATION_BIT,
+this);
+
+ shaderStages[0].sType =
+VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_CREATE_INFO;
+ shaderStages[0].stage = VK_SHADER_STAGE_VERTEX_BIT;
+ shaderStages[0].shader = vs.handle();
+ shaderStages[1].sType =
+VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_CREATE_INFO;
+ shaderStages[1].stage = VK_SHADER_STAGE_TESSELLATION_CONTROL_BIT;
+ shaderStages[1].shader = tc.handle();
+ shaderStages[2].sType =
+VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_CREATE_INFO;
+ shaderStages[2].stage = VK_SHADER_STAGE_TESSELLATION_EVALUATION_BIT;
+ shaderStages[2].shader = te.handle();
+
+ VkPipelineInputAssemblyStateCreateInfo iaCI = {};
+ iaCI.sType =
+VK_STRUCTURE_TYPE_PIPELINE_INPUT_ASSEMBLY_STATE_CREATE_INFO;
+ iaCI.topology = VK_PRIMITIVE_TOPOLOGY_PATCH_LIST;
+
+ VkPipelineTessellationStateCreateInfo tsCI = {};
+ tsCI.sType = VK_STRUCTURE_TYPE_PIPELINE_TESSELLATION_STATE_CREATE_INFO;
+ tsCI.patchControlPoints = 0; // This will cause an error
+
+ VkGraphicsPipelineCreateInfo gp_ci = {};
+ gp_ci.sType = VK_STRUCTURE_TYPE_GRAPHICS_PIPELINE_CREATE_INFO;
+ gp_ci.pNext = NULL;
+ gp_ci.stageCount = 3;
+ gp_ci.pStages = shaderStages;
+ gp_ci.pVertexInputState = NULL;
+ gp_ci.pInputAssemblyState = &iaCI;
+ gp_ci.pTessellationState = &tsCI;
+ gp_ci.pViewportState = NULL;
+ gp_ci.pRasterizationState = NULL;
+ gp_ci.pMultisampleState = NULL;
+ gp_ci.pDepthStencilState = NULL;
+ gp_ci.pColorBlendState = NULL;
+ gp_ci.flags = VK_PIPELINE_CREATE_DISABLE_OPTIMIZATION_BIT;
+ gp_ci.layout = pipeline_layout;
+ gp_ci.renderPass = renderPass();
+
+ VkPipelineCacheCreateInfo pc_ci = {};
+ pc_ci.sType = VK_STRUCTURE_TYPE_PIPELINE_CACHE_CREATE_INFO;
+ pc_ci.pNext = NULL;
+ pc_ci.initialSize = 0;
+ pc_ci.initialData = 0;
+ pc_ci.maxSize = 0;
+
+ VkPipeline pipeline;
+ VkPipelineCache pipelineCache;
+
+ err = vkCreatePipelineCache(m_device->device(), &pc_ci, NULL,
+&pipelineCache);
+ ASSERT_VK_SUCCESS(err);
+ err = vkCreateGraphicsPipelines(m_device->device(), pipelineCache, 1,
+&gp_ci, NULL, &pipeline);
+
+ m_errorMonitor->VerifyFound();
+
+ vkDestroyPipelineCache(m_device->device(), pipelineCache, NULL);
+ vkDestroyPipelineLayout(m_device->device(), pipeline_layout, NULL);
+ vkDestroyDescriptorSetLayout(m_device->device(), ds_layout, NULL);
+ vkDestroyDescriptorPool(m_device->device(), ds_pool, NULL);
+}
+*/
+
+TEST_F(VkLayerTest, PSOViewportStateTests) {
+ TEST_DESCRIPTION("Test VkPipelineViewportStateCreateInfo viewport and scissor count validation for non-multiViewport");
+
+ VkPhysicalDeviceFeatures features{};
+ ASSERT_NO_FATAL_FAILURE(Init(&features));
+ ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
+
+ const auto break_vp_state = [](CreatePipelineHelper &helper) {
+ helper.rs_state_ci_.rasterizerDiscardEnable = VK_FALSE;
+ helper.gp_ci_.pViewportState = nullptr;
+ };
+ CreatePipelineHelper::OneshotTest(*this, break_vp_state, VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ "VUID-VkGraphicsPipelineCreateInfo-rasterizerDiscardEnable-00750");
+
+ VkViewport viewport = {0.0f, 0.0f, 64.0f, 64.0f, 0.0f, 1.0f};
+ VkViewport viewports[] = {viewport, viewport};
+ VkRect2D scissor = {{0, 0}, {64, 64}};
+ VkRect2D scissors[] = {scissor, scissor};
+
+ // test viewport and scissor arrays
+ using std::vector;
+ struct TestCase {
+ uint32_t viewport_count;
+ VkViewport *viewports;
+ uint32_t scissor_count;
+ VkRect2D *scissors;
+
+ vector<std::string> vuids;
+ };
+
+ vector<TestCase> test_cases = {
+ {0,
+ viewports,
+ 1,
+ scissors,
+ {"VUID-VkPipelineViewportStateCreateInfo-viewportCount-01216",
+ "VUID-VkPipelineViewportStateCreateInfo-scissorCount-01220"}},
+ {2,
+ viewports,
+ 1,
+ scissors,
+ {"VUID-VkPipelineViewportStateCreateInfo-viewportCount-01216",
+ "VUID-VkPipelineViewportStateCreateInfo-scissorCount-01220"}},
+ {1,
+ viewports,
+ 0,
+ scissors,
+ {"VUID-VkPipelineViewportStateCreateInfo-scissorCount-01217",
+ "VUID-VkPipelineViewportStateCreateInfo-scissorCount-01220"}},
+ {1,
+ viewports,
+ 2,
+ scissors,
+ {"VUID-VkPipelineViewportStateCreateInfo-scissorCount-01217",
+ "VUID-VkPipelineViewportStateCreateInfo-scissorCount-01220"}},
+ {0,
+ viewports,
+ 0,
+ scissors,
+ {"VUID-VkPipelineViewportStateCreateInfo-viewportCount-01216",
+ "VUID-VkPipelineViewportStateCreateInfo-scissorCount-01217"}},
+ {2,
+ viewports,
+ 2,
+ scissors,
+ {"VUID-VkPipelineViewportStateCreateInfo-viewportCount-01216",
+ "VUID-VkPipelineViewportStateCreateInfo-scissorCount-01217"}},
+ {0,
+ viewports,
+ 2,
+ scissors,
+ {"VUID-VkPipelineViewportStateCreateInfo-viewportCount-01216", "VUID-VkPipelineViewportStateCreateInfo-scissorCount-01217",
+ "VUID-VkPipelineViewportStateCreateInfo-scissorCount-01220"}},
+ {2,
+ viewports,
+ 0,
+ scissors,
+ {"VUID-VkPipelineViewportStateCreateInfo-viewportCount-01216", "VUID-VkPipelineViewportStateCreateInfo-scissorCount-01217",
+ "VUID-VkPipelineViewportStateCreateInfo-scissorCount-01220"}},
+ {1, nullptr, 1, scissors, {"VUID-VkGraphicsPipelineCreateInfo-pDynamicStates-00747"}},
+ {1, viewports, 1, nullptr, {"VUID-VkGraphicsPipelineCreateInfo-pDynamicStates-00748"}},
+ {1,
+ nullptr,
+ 1,
+ nullptr,
+ {"VUID-VkGraphicsPipelineCreateInfo-pDynamicStates-00747", "VUID-VkGraphicsPipelineCreateInfo-pDynamicStates-00748"}},
+ {2,
+ nullptr,
+ 3,
+ nullptr,
+ {"VUID-VkPipelineViewportStateCreateInfo-viewportCount-01216", "VUID-VkPipelineViewportStateCreateInfo-scissorCount-01217",
+ "VUID-VkPipelineViewportStateCreateInfo-scissorCount-01220", "VUID-VkGraphicsPipelineCreateInfo-pDynamicStates-00747",
+ "VUID-VkGraphicsPipelineCreateInfo-pDynamicStates-00748"}},
+ {0,
+ nullptr,
+ 0,
+ nullptr,
+ {"VUID-VkPipelineViewportStateCreateInfo-viewportCount-01216",
+ "VUID-VkPipelineViewportStateCreateInfo-scissorCount-01217"}},
+ };
+
+ for (const auto &test_case : test_cases) {
+ const auto break_vp = [&test_case](CreatePipelineHelper &helper) {
+ helper.vp_state_ci_.viewportCount = test_case.viewport_count;
+ helper.vp_state_ci_.pViewports = test_case.viewports;
+ helper.vp_state_ci_.scissorCount = test_case.scissor_count;
+ helper.vp_state_ci_.pScissors = test_case.scissors;
+ };
+ CreatePipelineHelper::OneshotTest(*this, break_vp, VK_DEBUG_REPORT_ERROR_BIT_EXT, test_case.vuids);
+ }
+
+ vector<TestCase> dyn_test_cases = {
+ {0,
+ viewports,
+ 1,
+ scissors,
+ {"VUID-VkPipelineViewportStateCreateInfo-viewportCount-01216",
+ "VUID-VkPipelineViewportStateCreateInfo-scissorCount-01220"}},
+ {2,
+ viewports,
+ 1,
+ scissors,
+ {"VUID-VkPipelineViewportStateCreateInfo-viewportCount-01216",
+ "VUID-VkPipelineViewportStateCreateInfo-scissorCount-01220"}},
+ {1,
+ viewports,
+ 0,
+ scissors,
+ {"VUID-VkPipelineViewportStateCreateInfo-scissorCount-01217",
+ "VUID-VkPipelineViewportStateCreateInfo-scissorCount-01220"}},
+ {1,
+ viewports,
+ 2,
+ scissors,
+ {"VUID-VkPipelineViewportStateCreateInfo-scissorCount-01217",
+ "VUID-VkPipelineViewportStateCreateInfo-scissorCount-01220"}},
+ {0,
+ viewports,
+ 0,
+ scissors,
+ {"VUID-VkPipelineViewportStateCreateInfo-viewportCount-01216",
+ "VUID-VkPipelineViewportStateCreateInfo-scissorCount-01217"}},
+ {2,
+ viewports,
+ 2,
+ scissors,
+ {"VUID-VkPipelineViewportStateCreateInfo-viewportCount-01216",
+ "VUID-VkPipelineViewportStateCreateInfo-scissorCount-01217"}},
+ {0,
+ viewports,
+ 2,
+ scissors,
+ {"VUID-VkPipelineViewportStateCreateInfo-viewportCount-01216", "VUID-VkPipelineViewportStateCreateInfo-scissorCount-01217",
+ "VUID-VkPipelineViewportStateCreateInfo-scissorCount-01220"}},
+ {2,
+ viewports,
+ 0,
+ scissors,
+ {"VUID-VkPipelineViewportStateCreateInfo-viewportCount-01216", "VUID-VkPipelineViewportStateCreateInfo-scissorCount-01217",
+ "VUID-VkPipelineViewportStateCreateInfo-scissorCount-01220"}},
+ {2,
+ nullptr,
+ 3,
+ nullptr,
+ {"VUID-VkPipelineViewportStateCreateInfo-viewportCount-01216", "VUID-VkPipelineViewportStateCreateInfo-scissorCount-01217",
+ "VUID-VkPipelineViewportStateCreateInfo-scissorCount-01220"}},
+ {0,
+ nullptr,
+ 0,
+ nullptr,
+ {"VUID-VkPipelineViewportStateCreateInfo-viewportCount-01216",
+ "VUID-VkPipelineViewportStateCreateInfo-scissorCount-01217"}},
+ };
+
+ const VkDynamicState dyn_states[] = {VK_DYNAMIC_STATE_VIEWPORT, VK_DYNAMIC_STATE_SCISSOR};
+
+ for (const auto &test_case : dyn_test_cases) {
+ const auto break_vp = [&](CreatePipelineHelper &helper) {
+ VkPipelineDynamicStateCreateInfo dyn_state_ci = {};
+ dyn_state_ci.sType = VK_STRUCTURE_TYPE_PIPELINE_DYNAMIC_STATE_CREATE_INFO;
+ dyn_state_ci.dynamicStateCount = size(dyn_states);
+ dyn_state_ci.pDynamicStates = dyn_states;
+ helper.dyn_state_ci_ = dyn_state_ci;
+
+ helper.vp_state_ci_.viewportCount = test_case.viewport_count;
+ helper.vp_state_ci_.pViewports = test_case.viewports;
+ helper.vp_state_ci_.scissorCount = test_case.scissor_count;
+ helper.vp_state_ci_.pScissors = test_case.scissors;
+ };
+ CreatePipelineHelper::OneshotTest(*this, break_vp, VK_DEBUG_REPORT_ERROR_BIT_EXT, test_case.vuids);
+ }
+}
+
+// Set Extension dynamic states without enabling the required Extensions.
+TEST_F(VkLayerTest, ExtensionDynamicStatesSetWOExtensionEnabled) {
+ TEST_DESCRIPTION("Create a graphics pipeline with Extension dynamic states without enabling the required Extensions.");
+
+ ASSERT_NO_FATAL_FAILURE(Init());
+ ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
+
+ using std::vector;
+ struct TestCase {
+ uint32_t dynamic_state_count;
+ VkDynamicState dynamic_state;
+
+ char const *errmsg;
+ };
+
+ vector<TestCase> dyn_test_cases = {
+ {1, VK_DYNAMIC_STATE_VIEWPORT_W_SCALING_NV,
+ "contains VK_DYNAMIC_STATE_VIEWPORT_W_SCALING_NV, but VK_NV_clip_space_w_scaling"},
+ {1, VK_DYNAMIC_STATE_DISCARD_RECTANGLE_EXT,
+ "contains VK_DYNAMIC_STATE_DISCARD_RECTANGLE_EXT, but VK_EXT_discard_rectangles"},
+ {1, VK_DYNAMIC_STATE_SAMPLE_LOCATIONS_EXT, "contains VK_DYNAMIC_STATE_SAMPLE_LOCATIONS_EXT, but VK_EXT_sample_locations"},
+ };
+
+ for (const auto &test_case : dyn_test_cases) {
+ VkDynamicState state[1];
+ state[0] = test_case.dynamic_state;
+ const auto break_vp = [&](CreatePipelineHelper &helper) {
+ VkPipelineDynamicStateCreateInfo dyn_state_ci = {};
+ dyn_state_ci.sType = VK_STRUCTURE_TYPE_PIPELINE_DYNAMIC_STATE_CREATE_INFO;
+ dyn_state_ci.dynamicStateCount = test_case.dynamic_state_count;
+ dyn_state_ci.pDynamicStates = state;
+ helper.dyn_state_ci_ = dyn_state_ci;
+ };
+ CreatePipelineHelper::OneshotTest(*this, break_vp, VK_DEBUG_REPORT_ERROR_BIT_EXT, test_case.errmsg);
+ }
+}
+
+TEST_F(VkLayerTest, PSOViewportStateMultiViewportTests) {
+ TEST_DESCRIPTION("Test VkPipelineViewportStateCreateInfo viewport and scissor count validation for multiViewport feature");
+
+ ASSERT_NO_FATAL_FAILURE(Init()); // enables all supported features
+
+ if (!m_device->phy().features().multiViewport) {
+ printf("%s VkPhysicalDeviceFeatures::multiViewport is not supported -- skipping test.\n", kSkipPrefix);
+ return;
+ }
+ // at least 16 viewports supported from here on
+
+ ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
+
+ VkViewport viewport = {0.0f, 0.0f, 64.0f, 64.0f, 0.0f, 1.0f};
+ VkViewport viewports[] = {viewport, viewport};
+ VkRect2D scissor = {{0, 0}, {64, 64}};
+ VkRect2D scissors[] = {scissor, scissor};
+
+ using std::vector;
+ struct TestCase {
+ uint32_t viewport_count;
+ VkViewport *viewports;
+ uint32_t scissor_count;
+ VkRect2D *scissors;
+
+ vector<std::string> vuids;
+ };
+
+ vector<TestCase> test_cases = {
+ {0,
+ viewports,
+ 2,
+ scissors,
+ {"VUID-VkPipelineViewportStateCreateInfo-viewportCount-arraylength",
+ "VUID-VkPipelineViewportStateCreateInfo-scissorCount-01220"}},
+ {2,
+ viewports,
+ 0,
+ scissors,
+ {"VUID-VkPipelineViewportStateCreateInfo-scissorCount-arraylength",
+ "VUID-VkPipelineViewportStateCreateInfo-scissorCount-01220"}},
+ {0,
+ viewports,
+ 0,
+ scissors,
+ {"VUID-VkPipelineViewportStateCreateInfo-viewportCount-arraylength",
+ "VUID-VkPipelineViewportStateCreateInfo-scissorCount-arraylength"}},
+ {2, nullptr, 2, scissors, {"VUID-VkGraphicsPipelineCreateInfo-pDynamicStates-00747"}},
+ {2, viewports, 2, nullptr, {"VUID-VkGraphicsPipelineCreateInfo-pDynamicStates-00748"}},
+ {2,
+ nullptr,
+ 2,
+ nullptr,
+ {"VUID-VkGraphicsPipelineCreateInfo-pDynamicStates-00747", "VUID-VkGraphicsPipelineCreateInfo-pDynamicStates-00748"}},
+ {0,
+ nullptr,
+ 0,
+ nullptr,
+ {"VUID-VkPipelineViewportStateCreateInfo-viewportCount-arraylength",
+ "VUID-VkPipelineViewportStateCreateInfo-scissorCount-arraylength"}},
+ };
+
+ const auto max_viewports = m_device->phy().properties().limits.maxViewports;
+ const bool max_viewports_maxxed = max_viewports == std::numeric_limits<decltype(max_viewports)>::max();
+ if (max_viewports_maxxed) {
+ printf("%s VkPhysicalDeviceLimits::maxViewports is UINT32_MAX -- skipping part of test requiring to exceed maxViewports.\n",
+ kSkipPrefix);
+ } else {
+ const auto too_much_viewports = max_viewports + 1;
+ // avoid potentially big allocations by using only nullptr
+ test_cases.push_back({too_much_viewports,
+ nullptr,
+ 2,
+ scissors,
+ {"VUID-VkPipelineViewportStateCreateInfo-viewportCount-01218",
+ "VUID-VkPipelineViewportStateCreateInfo-scissorCount-01220",
+ "VUID-VkGraphicsPipelineCreateInfo-pDynamicStates-00747"}});
+ test_cases.push_back({2,
+ viewports,
+ too_much_viewports,
+ nullptr,
+ {"VUID-VkPipelineViewportStateCreateInfo-scissorCount-01219",
+ "VUID-VkPipelineViewportStateCreateInfo-scissorCount-01220",
+ "VUID-VkGraphicsPipelineCreateInfo-pDynamicStates-00748"}});
+ test_cases.push_back(
+ {too_much_viewports,
+ nullptr,
+ too_much_viewports,
+ nullptr,
+ {"VUID-VkPipelineViewportStateCreateInfo-viewportCount-01218",
+ "VUID-VkPipelineViewportStateCreateInfo-scissorCount-01219", "VUID-VkGraphicsPipelineCreateInfo-pDynamicStates-00747",
+ "VUID-VkGraphicsPipelineCreateInfo-pDynamicStates-00748"}});
+ }
+
+ for (const auto &test_case : test_cases) {
+ const auto break_vp = [&test_case](CreatePipelineHelper &helper) {
+ helper.vp_state_ci_.viewportCount = test_case.viewport_count;
+ helper.vp_state_ci_.pViewports = test_case.viewports;
+ helper.vp_state_ci_.scissorCount = test_case.scissor_count;
+ helper.vp_state_ci_.pScissors = test_case.scissors;
+ };
+ CreatePipelineHelper::OneshotTest(*this, break_vp, VK_DEBUG_REPORT_ERROR_BIT_EXT, test_case.vuids);
+ }
+
+ vector<TestCase> dyn_test_cases = {
+ {0,
+ viewports,
+ 2,
+ scissors,
+ {"VUID-VkPipelineViewportStateCreateInfo-viewportCount-arraylength",
+ "VUID-VkPipelineViewportStateCreateInfo-scissorCount-01220"}},
+ {2,
+ viewports,
+ 0,
+ scissors,
+ {"VUID-VkPipelineViewportStateCreateInfo-scissorCount-arraylength",
+ "VUID-VkPipelineViewportStateCreateInfo-scissorCount-01220"}},
+ {0,
+ viewports,
+ 0,
+ scissors,
+ {"VUID-VkPipelineViewportStateCreateInfo-viewportCount-arraylength",
+ "VUID-VkPipelineViewportStateCreateInfo-scissorCount-arraylength"}},
+ {0,
+ nullptr,
+ 0,
+ nullptr,
+ {"VUID-VkPipelineViewportStateCreateInfo-viewportCount-arraylength",
+ "VUID-VkPipelineViewportStateCreateInfo-scissorCount-arraylength"}},
+ };
+
+ if (!max_viewports_maxxed) {
+ const auto too_much_viewports = max_viewports + 1;
+ // avoid potentially big allocations by using only nullptr
+ dyn_test_cases.push_back({too_much_viewports,
+ nullptr,
+ 2,
+ scissors,
+ {"VUID-VkPipelineViewportStateCreateInfo-viewportCount-01218",
+ "VUID-VkPipelineViewportStateCreateInfo-scissorCount-01220"}});
+ dyn_test_cases.push_back({2,
+ viewports,
+ too_much_viewports,
+ nullptr,
+ {"VUID-VkPipelineViewportStateCreateInfo-scissorCount-01219",
+ "VUID-VkPipelineViewportStateCreateInfo-scissorCount-01220"}});
+ dyn_test_cases.push_back({too_much_viewports,
+ nullptr,
+ too_much_viewports,
+ nullptr,
+ {"VUID-VkPipelineViewportStateCreateInfo-viewportCount-01218",
+ "VUID-VkPipelineViewportStateCreateInfo-scissorCount-01219"}});
+ }
+
+ const VkDynamicState dyn_states[] = {VK_DYNAMIC_STATE_VIEWPORT, VK_DYNAMIC_STATE_SCISSOR};
+
+ for (const auto &test_case : dyn_test_cases) {
+ const auto break_vp = [&](CreatePipelineHelper &helper) {
+ VkPipelineDynamicStateCreateInfo dyn_state_ci = {};
+ dyn_state_ci.sType = VK_STRUCTURE_TYPE_PIPELINE_DYNAMIC_STATE_CREATE_INFO;
+ dyn_state_ci.dynamicStateCount = size(dyn_states);
+ dyn_state_ci.pDynamicStates = dyn_states;
+ helper.dyn_state_ci_ = dyn_state_ci;
+
+ helper.vp_state_ci_.viewportCount = test_case.viewport_count;
+ helper.vp_state_ci_.pViewports = test_case.viewports;
+ helper.vp_state_ci_.scissorCount = test_case.scissor_count;
+ helper.vp_state_ci_.pScissors = test_case.scissors;
+ };
+ CreatePipelineHelper::OneshotTest(*this, break_vp, VK_DEBUG_REPORT_ERROR_BIT_EXT, test_case.vuids);
+ }
+}
+
+TEST_F(VkLayerTest, DynViewportAndScissorUndefinedDrawState) {
+ TEST_DESCRIPTION("Test viewport and scissor dynamic state that is not set before draw");
+
+ ASSERT_NO_FATAL_FAILURE(Init());
+
+ // TODO: should also test on !multiViewport
+ if (!m_device->phy().features().multiViewport) {
+ printf("%s Device does not support multiple viewports/scissors; skipped.\n", kSkipPrefix);
+ return;
+ }
+
+ ASSERT_NO_FATAL_FAILURE(InitViewport());
+ ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
+
+ VkShaderObj vs(m_device, bindStateVertShaderText, VK_SHADER_STAGE_VERTEX_BIT, this);
+ VkShaderObj fs(m_device, bindStateFragShaderText, VK_SHADER_STAGE_FRAGMENT_BIT, this);
+
+ const VkPipelineLayoutObj pipeline_layout(m_device);
+
+ VkPipelineObj pipeline_dyn_vp(m_device);
+ pipeline_dyn_vp.AddShader(&vs);
+ pipeline_dyn_vp.AddShader(&fs);
+ pipeline_dyn_vp.AddDefaultColorAttachment();
+ pipeline_dyn_vp.MakeDynamic(VK_DYNAMIC_STATE_VIEWPORT);
+ pipeline_dyn_vp.SetScissor(m_scissors);
+ ASSERT_VK_SUCCESS(pipeline_dyn_vp.CreateVKPipeline(pipeline_layout.handle(), m_renderPass));
+
+ VkPipelineObj pipeline_dyn_sc(m_device);
+ pipeline_dyn_sc.AddShader(&vs);
+ pipeline_dyn_sc.AddShader(&fs);
+ pipeline_dyn_sc.AddDefaultColorAttachment();
+ pipeline_dyn_sc.SetViewport(m_viewports);
+ pipeline_dyn_sc.MakeDynamic(VK_DYNAMIC_STATE_SCISSOR);
+ ASSERT_VK_SUCCESS(pipeline_dyn_sc.CreateVKPipeline(pipeline_layout.handle(), m_renderPass));
+
+ m_commandBuffer->begin();
+ m_commandBuffer->BeginRenderPass(m_renderPassBeginInfo);
+
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ "Dynamic viewport(s) 0 are used by pipeline state object, ");
+ vkCmdBindPipeline(m_commandBuffer->handle(), VK_PIPELINE_BIND_POINT_GRAPHICS, pipeline_dyn_vp.handle());
+ vkCmdSetViewport(m_commandBuffer->handle(), 1, 1,
+ &m_viewports[0]); // Forgetting to set needed 0th viewport (PSO viewportCount == 1)
+ m_commandBuffer->Draw(1, 0, 0, 0);
+ m_errorMonitor->VerifyFound();
+
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "Dynamic scissor(s) 0 are used by pipeline state object, ");
+ vkCmdBindPipeline(m_commandBuffer->handle(), VK_PIPELINE_BIND_POINT_GRAPHICS, pipeline_dyn_sc.handle());
+ vkCmdSetScissor(m_commandBuffer->handle(), 1, 1,
+ &m_scissors[0]); // Forgetting to set needed 0th scissor (PSO scissorCount == 1)
+ m_commandBuffer->Draw(1, 0, 0, 0);
+ m_errorMonitor->VerifyFound();
+
+ m_commandBuffer->EndRenderPass();
+ m_commandBuffer->end();
+}
+
+TEST_F(VkLayerTest, PSOLineWidthInvalid) {
+ TEST_DESCRIPTION("Test non-1.0 lineWidth errors when pipeline is created and in vkCmdSetLineWidth");
+ VkPhysicalDeviceFeatures features{};
+ ASSERT_NO_FATAL_FAILURE(Init(&features));
+ ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
+
+ VkShaderObj vs(m_device, bindStateVertShaderText, VK_SHADER_STAGE_VERTEX_BIT, this);
+ VkShaderObj fs(m_device, bindStateFragShaderText, VK_SHADER_STAGE_FRAGMENT_BIT, this);
+ VkPipelineShaderStageCreateInfo shader_state_cis[] = {vs.GetStageCreateInfo(), fs.GetStageCreateInfo()};
+
+ VkPipelineVertexInputStateCreateInfo vi_state_ci = {};
+ vi_state_ci.sType = VK_STRUCTURE_TYPE_PIPELINE_VERTEX_INPUT_STATE_CREATE_INFO;
+
+ VkPipelineInputAssemblyStateCreateInfo ia_state_ci = {};
+ ia_state_ci.sType = VK_STRUCTURE_TYPE_PIPELINE_INPUT_ASSEMBLY_STATE_CREATE_INFO;
+ ia_state_ci.topology = VK_PRIMITIVE_TOPOLOGY_TRIANGLE_STRIP;
+
+ VkViewport viewport = {0.0f, 0.0f, 64.0f, 64.0f, 0.0f, 1.0f};
+ VkRect2D scissor = {{0, 0}, {64, 64}};
+ VkPipelineViewportStateCreateInfo vp_state_ci = {};
+ vp_state_ci.sType = VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_STATE_CREATE_INFO;
+ vp_state_ci.viewportCount = 1;
+ vp_state_ci.pViewports = &viewport;
+ vp_state_ci.scissorCount = 1;
+ vp_state_ci.pScissors = &scissor;
+
+ VkPipelineRasterizationStateCreateInfo rs_state_ci = {};
+ rs_state_ci.sType = VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_STATE_CREATE_INFO;
+ rs_state_ci.rasterizerDiscardEnable = VK_FALSE;
+ // lineWidth to be set by checks
+
+ VkPipelineMultisampleStateCreateInfo ms_state_ci = {};
+ ms_state_ci.sType = VK_STRUCTURE_TYPE_PIPELINE_MULTISAMPLE_STATE_CREATE_INFO;
+ ms_state_ci.rasterizationSamples = VK_SAMPLE_COUNT_1_BIT; // must match subpass att.
+
+ VkPipelineColorBlendAttachmentState cba_state = {};
+
+ VkPipelineColorBlendStateCreateInfo cb_state_ci = {};
+ cb_state_ci.sType = VK_STRUCTURE_TYPE_PIPELINE_COLOR_BLEND_STATE_CREATE_INFO;
+ cb_state_ci.attachmentCount = 1; // must match count in subpass
+ cb_state_ci.pAttachments = &cba_state;
+
+ const VkPipelineLayoutObj pipeline_layout(m_device);
+
+ VkGraphicsPipelineCreateInfo gp_ci = {};
+ gp_ci.sType = VK_STRUCTURE_TYPE_GRAPHICS_PIPELINE_CREATE_INFO;
+ gp_ci.stageCount = sizeof(shader_state_cis) / sizeof(VkPipelineShaderStageCreateInfo);
+ gp_ci.pStages = shader_state_cis;
+ gp_ci.pVertexInputState = &vi_state_ci;
+ gp_ci.pInputAssemblyState = &ia_state_ci;
+ gp_ci.pViewportState = &vp_state_ci;
+ gp_ci.pRasterizationState = &rs_state_ci;
+ gp_ci.pMultisampleState = &ms_state_ci;
+ gp_ci.pColorBlendState = &cb_state_ci;
+ gp_ci.layout = pipeline_layout.handle();
+ gp_ci.renderPass = renderPass();
+ gp_ci.subpass = 0;
+
+ const std::vector<float> test_cases = {-1.0f, 0.0f, NearestSmaller(1.0f), NearestGreater(1.0f), NAN};
+
+ // test VkPipelineRasterizationStateCreateInfo::lineWidth
+ for (const auto test_case : test_cases) {
+ rs_state_ci.lineWidth = test_case;
+
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ "VUID-VkGraphicsPipelineCreateInfo-pDynamicStates-00749");
+ VkPipeline pipeline;
+ vkCreateGraphicsPipelines(m_device->device(), VK_NULL_HANDLE, 1, &gp_ci, nullptr, &pipeline);
+ m_errorMonitor->VerifyFound();
+ }
+
+ // test vkCmdSetLineWidth
+ m_commandBuffer->begin();
+
+ for (const auto test_case : test_cases) {
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdSetLineWidth-lineWidth-00788");
+ vkCmdSetLineWidth(m_commandBuffer->handle(), test_case);
+ m_errorMonitor->VerifyFound();
+ }
+}
+
+TEST_F(VkLayerTest, VUID_VkVertexInputBindingDescription_binding_00618) {
+ TEST_DESCRIPTION(
+ "Test VUID-VkVertexInputBindingDescription-binding-00618: binding must be less than "
+ "VkPhysicalDeviceLimits::maxVertexInputBindings");
+
+ ASSERT_NO_FATAL_FAILURE(Init());
+ ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
+
+ VkPipelineCache pipeline_cache;
+ {
+ VkPipelineCacheCreateInfo create_info{};
+ create_info.sType = VK_STRUCTURE_TYPE_PIPELINE_CACHE_CREATE_INFO;
+
+ VkResult err = vkCreatePipelineCache(m_device->device(), &create_info, nullptr, &pipeline_cache);
+ ASSERT_VK_SUCCESS(err);
+ }
+
+ VkShaderObj vs(m_device, bindStateVertShaderText, VK_SHADER_STAGE_VERTEX_BIT, this);
+ VkShaderObj fs(m_device, bindStateFragShaderText, VK_SHADER_STAGE_FRAGMENT_BIT, this);
+
+ VkPipelineShaderStageCreateInfo stages[2]{{}};
+ stages[0] = vs.GetStageCreateInfo();
+ stages[1] = fs.GetStageCreateInfo();
+
+ // Test when binding is greater than or equal to VkPhysicalDeviceLimits::maxVertexInputBindings.
+ VkVertexInputBindingDescription vertex_input_binding_description{};
+ vertex_input_binding_description.binding = m_device->props.limits.maxVertexInputBindings;
+
+ VkPipelineVertexInputStateCreateInfo vertex_input_state{};
+ vertex_input_state.sType = VK_STRUCTURE_TYPE_PIPELINE_VERTEX_INPUT_STATE_CREATE_INFO;
+ vertex_input_state.pNext = nullptr;
+ vertex_input_state.vertexBindingDescriptionCount = 1;
+ vertex_input_state.pVertexBindingDescriptions = &vertex_input_binding_description;
+ vertex_input_state.vertexAttributeDescriptionCount = 0;
+ vertex_input_state.pVertexAttributeDescriptions = nullptr;
+
+ VkPipelineInputAssemblyStateCreateInfo input_assembly_state{};
+ input_assembly_state.sType = VK_STRUCTURE_TYPE_PIPELINE_INPUT_ASSEMBLY_STATE_CREATE_INFO;
+ input_assembly_state.topology = VK_PRIMITIVE_TOPOLOGY_TRIANGLE_STRIP;
+
+ VkViewport viewport = {0.0f, 0.0f, 64.0f, 64.0f, 0.0f, 1.0f};
+ VkRect2D scissor = {{0, 0}, {64, 64}};
+ VkPipelineViewportStateCreateInfo viewport_state{};
+ viewport_state.sType = VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_STATE_CREATE_INFO;
+ viewport_state.viewportCount = 1;
+ viewport_state.pViewports = &viewport;
+ viewport_state.scissorCount = 1;
+ viewport_state.pScissors = &scissor;
+
+ VkPipelineMultisampleStateCreateInfo multisample_state{};
+ multisample_state.sType = VK_STRUCTURE_TYPE_PIPELINE_MULTISAMPLE_STATE_CREATE_INFO;
+ multisample_state.pNext = nullptr;
+ multisample_state.rasterizationSamples = VK_SAMPLE_COUNT_1_BIT;
+ multisample_state.sampleShadingEnable = 0;
+ multisample_state.minSampleShading = 1.0;
+ multisample_state.pSampleMask = nullptr;
+
+ VkPipelineRasterizationStateCreateInfo rasterization_state{};
+ rasterization_state.sType = VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_STATE_CREATE_INFO;
+ rasterization_state.polygonMode = VK_POLYGON_MODE_FILL;
+ rasterization_state.cullMode = VK_CULL_MODE_BACK_BIT;
+ rasterization_state.frontFace = VK_FRONT_FACE_COUNTER_CLOCKWISE;
+ rasterization_state.depthClampEnable = VK_FALSE;
+ rasterization_state.rasterizerDiscardEnable = VK_FALSE;
+ rasterization_state.depthBiasEnable = VK_FALSE;
+ rasterization_state.lineWidth = 1.0f;
+
+ const VkPipelineLayoutObj pipeline_layout(m_device);
+
+ {
+ VkGraphicsPipelineCreateInfo create_info{};
+ create_info.sType = VK_STRUCTURE_TYPE_GRAPHICS_PIPELINE_CREATE_INFO;
+ create_info.stageCount = 2;
+ create_info.pStages = stages;
+ create_info.pVertexInputState = &vertex_input_state;
+ create_info.pInputAssemblyState = &input_assembly_state;
+ create_info.pViewportState = &viewport_state;
+ create_info.pMultisampleState = &multisample_state;
+ create_info.pRasterizationState = &rasterization_state;
+ create_info.flags = VK_PIPELINE_CREATE_DISABLE_OPTIMIZATION_BIT;
+ create_info.layout = pipeline_layout.handle();
+ create_info.renderPass = renderPass();
+
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkVertexInputBindingDescription-binding-00618");
+ VkPipeline pipeline;
+ vkCreateGraphicsPipelines(m_device->device(), pipeline_cache, 1, &create_info, nullptr, &pipeline);
+ m_errorMonitor->VerifyFound();
+ }
+
+ vkDestroyPipelineCache(m_device->device(), pipeline_cache, nullptr);
+}
+
+TEST_F(VkLayerTest, VUID_VkVertexInputBindingDescription_stride_00619) {
+ TEST_DESCRIPTION(
+ "Test VUID-VkVertexInputBindingDescription-stride-00619: stride must be less than or equal to "
+ "VkPhysicalDeviceLimits::maxVertexInputBindingStride");
+
+ ASSERT_NO_FATAL_FAILURE(Init());
+ ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
+
+ VkPipelineCache pipeline_cache;
+ {
+ VkPipelineCacheCreateInfo create_info{};
+ create_info.sType = VK_STRUCTURE_TYPE_PIPELINE_CACHE_CREATE_INFO;
+
+ VkResult err = vkCreatePipelineCache(m_device->device(), &create_info, nullptr, &pipeline_cache);
+ ASSERT_VK_SUCCESS(err);
+ }
+
+ VkShaderObj vs(m_device, bindStateVertShaderText, VK_SHADER_STAGE_VERTEX_BIT, this);
+ VkShaderObj fs(m_device, bindStateFragShaderText, VK_SHADER_STAGE_FRAGMENT_BIT, this);
+
+ VkPipelineShaderStageCreateInfo stages[2]{{}};
+ stages[0] = vs.GetStageCreateInfo();
+ stages[1] = fs.GetStageCreateInfo();
+
+ // Test when stride is greater than VkPhysicalDeviceLimits::maxVertexInputBindingStride.
+ VkVertexInputBindingDescription vertex_input_binding_description{};
+ vertex_input_binding_description.stride = m_device->props.limits.maxVertexInputBindingStride + 1;
+
+ VkPipelineVertexInputStateCreateInfo vertex_input_state{};
+ vertex_input_state.sType = VK_STRUCTURE_TYPE_PIPELINE_VERTEX_INPUT_STATE_CREATE_INFO;
+ vertex_input_state.pNext = nullptr;
+ vertex_input_state.vertexBindingDescriptionCount = 1;
+ vertex_input_state.pVertexBindingDescriptions = &vertex_input_binding_description;
+ vertex_input_state.vertexAttributeDescriptionCount = 0;
+ vertex_input_state.pVertexAttributeDescriptions = nullptr;
+
+ VkPipelineInputAssemblyStateCreateInfo input_assembly_state{};
+ input_assembly_state.sType = VK_STRUCTURE_TYPE_PIPELINE_INPUT_ASSEMBLY_STATE_CREATE_INFO;
+ input_assembly_state.topology = VK_PRIMITIVE_TOPOLOGY_TRIANGLE_STRIP;
+
+ VkViewport viewport = {0.0f, 0.0f, 64.0f, 64.0f, 0.0f, 1.0f};
+ VkRect2D scissor = {{0, 0}, {64, 64}};
+ VkPipelineViewportStateCreateInfo viewport_state{};
+ viewport_state.sType = VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_STATE_CREATE_INFO;
+ viewport_state.viewportCount = 1;
+ viewport_state.pViewports = &viewport;
+ viewport_state.scissorCount = 1;
+ viewport_state.pScissors = &scissor;
+
+ VkPipelineMultisampleStateCreateInfo multisample_state{};
+ multisample_state.sType = VK_STRUCTURE_TYPE_PIPELINE_MULTISAMPLE_STATE_CREATE_INFO;
+ multisample_state.pNext = nullptr;
+ multisample_state.rasterizationSamples = VK_SAMPLE_COUNT_1_BIT;
+ multisample_state.sampleShadingEnable = 0;
+ multisample_state.minSampleShading = 1.0;
+ multisample_state.pSampleMask = nullptr;
+
+ VkPipelineRasterizationStateCreateInfo rasterization_state{};
+ rasterization_state.sType = VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_STATE_CREATE_INFO;
+ rasterization_state.polygonMode = VK_POLYGON_MODE_FILL;
+ rasterization_state.cullMode = VK_CULL_MODE_BACK_BIT;
+ rasterization_state.frontFace = VK_FRONT_FACE_COUNTER_CLOCKWISE;
+ rasterization_state.depthClampEnable = VK_FALSE;
+ rasterization_state.rasterizerDiscardEnable = VK_FALSE;
+ rasterization_state.depthBiasEnable = VK_FALSE;
+ rasterization_state.lineWidth = 1.0f;
+
+ const VkPipelineLayoutObj pipeline_layout(m_device);
+
+ {
+ VkGraphicsPipelineCreateInfo create_info{};
+ create_info.sType = VK_STRUCTURE_TYPE_GRAPHICS_PIPELINE_CREATE_INFO;
+ create_info.stageCount = 2;
+ create_info.pStages = stages;
+ create_info.pVertexInputState = &vertex_input_state;
+ create_info.pInputAssemblyState = &input_assembly_state;
+ create_info.pViewportState = &viewport_state;
+ create_info.pMultisampleState = &multisample_state;
+ create_info.pRasterizationState = &rasterization_state;
+ create_info.flags = VK_PIPELINE_CREATE_DISABLE_OPTIMIZATION_BIT;
+ create_info.layout = pipeline_layout.handle();
+ create_info.renderPass = renderPass();
+
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkVertexInputBindingDescription-stride-00619");
+ VkPipeline pipeline;
+ vkCreateGraphicsPipelines(m_device->device(), pipeline_cache, 1, &create_info, nullptr, &pipeline);
+ m_errorMonitor->VerifyFound();
+ }
+
+ vkDestroyPipelineCache(m_device->device(), pipeline_cache, nullptr);
+}
+
+TEST_F(VkLayerTest, VUID_VkVertexInputAttributeDescription_location_00620) {
+ TEST_DESCRIPTION(
+ "Test VUID-VkVertexInputAttributeDescription-location-00620: location must be less than "
+ "VkPhysicalDeviceLimits::maxVertexInputAttributes");
+
+ ASSERT_NO_FATAL_FAILURE(Init());
+ ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
+
+ VkPipelineCache pipeline_cache;
+ {
+ VkPipelineCacheCreateInfo create_info{};
+ create_info.sType = VK_STRUCTURE_TYPE_PIPELINE_CACHE_CREATE_INFO;
+
+ VkResult err = vkCreatePipelineCache(m_device->device(), &create_info, nullptr, &pipeline_cache);
+ ASSERT_VK_SUCCESS(err);
+ }
+
+ VkShaderObj vs(m_device, bindStateVertShaderText, VK_SHADER_STAGE_VERTEX_BIT, this);
+ VkShaderObj fs(m_device, bindStateFragShaderText, VK_SHADER_STAGE_FRAGMENT_BIT, this);
+
+ VkPipelineShaderStageCreateInfo stages[2]{{}};
+ stages[0] = vs.GetStageCreateInfo();
+ stages[1] = fs.GetStageCreateInfo();
+
+ // Test when location is greater than or equal to VkPhysicalDeviceLimits::maxVertexInputAttributes.
+ VkVertexInputAttributeDescription vertex_input_attribute_description{};
+ vertex_input_attribute_description.location = m_device->props.limits.maxVertexInputAttributes;
+
+ VkPipelineVertexInputStateCreateInfo vertex_input_state{};
+ vertex_input_state.sType = VK_STRUCTURE_TYPE_PIPELINE_VERTEX_INPUT_STATE_CREATE_INFO;
+ vertex_input_state.pNext = nullptr;
+ vertex_input_state.vertexBindingDescriptionCount = 0;
+ vertex_input_state.pVertexBindingDescriptions = nullptr;
+ vertex_input_state.vertexAttributeDescriptionCount = 1;
+ vertex_input_state.pVertexAttributeDescriptions = &vertex_input_attribute_description;
+
+ VkPipelineInputAssemblyStateCreateInfo input_assembly_state{};
+ input_assembly_state.sType = VK_STRUCTURE_TYPE_PIPELINE_INPUT_ASSEMBLY_STATE_CREATE_INFO;
+ input_assembly_state.topology = VK_PRIMITIVE_TOPOLOGY_TRIANGLE_STRIP;
+
+ VkViewport viewport = {0.0f, 0.0f, 64.0f, 64.0f, 0.0f, 1.0f};
+ VkRect2D scissor = {{0, 0}, {64, 64}};
+ VkPipelineViewportStateCreateInfo viewport_state{};
+ viewport_state.sType = VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_STATE_CREATE_INFO;
+ viewport_state.viewportCount = 1;
+ viewport_state.pViewports = &viewport;
+ viewport_state.scissorCount = 1;
+ viewport_state.pScissors = &scissor;
+
+ VkPipelineMultisampleStateCreateInfo multisample_state{};
+ multisample_state.sType = VK_STRUCTURE_TYPE_PIPELINE_MULTISAMPLE_STATE_CREATE_INFO;
+ multisample_state.pNext = nullptr;
+ multisample_state.rasterizationSamples = VK_SAMPLE_COUNT_1_BIT;
+ multisample_state.sampleShadingEnable = 0;
+ multisample_state.minSampleShading = 1.0;
+ multisample_state.pSampleMask = nullptr;
+
+ VkPipelineRasterizationStateCreateInfo rasterization_state{};
+ rasterization_state.sType = VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_STATE_CREATE_INFO;
+ rasterization_state.polygonMode = VK_POLYGON_MODE_FILL;
+ rasterization_state.cullMode = VK_CULL_MODE_BACK_BIT;
+ rasterization_state.frontFace = VK_FRONT_FACE_COUNTER_CLOCKWISE;
+ rasterization_state.depthClampEnable = VK_FALSE;
+ rasterization_state.rasterizerDiscardEnable = VK_FALSE;
+ rasterization_state.depthBiasEnable = VK_FALSE;
+ rasterization_state.lineWidth = 1.0f;
+
+ const VkPipelineLayoutObj pipeline_layout(m_device);
+
+ {
+ VkGraphicsPipelineCreateInfo create_info{};
+ create_info.sType = VK_STRUCTURE_TYPE_GRAPHICS_PIPELINE_CREATE_INFO;
+ create_info.stageCount = 2;
+ create_info.pStages = stages;
+ create_info.pVertexInputState = &vertex_input_state;
+ create_info.pInputAssemblyState = &input_assembly_state;
+ create_info.pViewportState = &viewport_state;
+ create_info.pMultisampleState = &multisample_state;
+ create_info.pRasterizationState = &rasterization_state;
+ create_info.flags = VK_PIPELINE_CREATE_DISABLE_OPTIMIZATION_BIT;
+ create_info.layout = pipeline_layout.handle();
+ create_info.renderPass = renderPass();
+
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ "VUID-VkVertexInputAttributeDescription-location-00620");
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ "VUID-VkPipelineVertexInputStateCreateInfo-binding-00615");
+ VkPipeline pipeline;
+ vkCreateGraphicsPipelines(m_device->device(), pipeline_cache, 1, &create_info, nullptr, &pipeline);
+ m_errorMonitor->VerifyFound();
+ }
+
+ vkDestroyPipelineCache(m_device->device(), pipeline_cache, nullptr);
+}
+
+TEST_F(VkLayerTest, VUID_VkVertexInputAttributeDescription_binding_00621) {
+ TEST_DESCRIPTION(
+ "Test VUID-VkVertexInputAttributeDescription-binding-00621: binding must be less than "
+ "VkPhysicalDeviceLimits::maxVertexInputBindings");
+
+ ASSERT_NO_FATAL_FAILURE(Init());
+ ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
+
+ VkPipelineCache pipeline_cache;
+ {
+ VkPipelineCacheCreateInfo create_info{};
+ create_info.sType = VK_STRUCTURE_TYPE_PIPELINE_CACHE_CREATE_INFO;
+
+ VkResult err = vkCreatePipelineCache(m_device->device(), &create_info, nullptr, &pipeline_cache);
+ ASSERT_VK_SUCCESS(err);
+ }
+
+ VkShaderObj vs(m_device, bindStateVertShaderText, VK_SHADER_STAGE_VERTEX_BIT, this);
+ VkShaderObj fs(m_device, bindStateFragShaderText, VK_SHADER_STAGE_FRAGMENT_BIT, this);
+
+ VkPipelineShaderStageCreateInfo stages[2]{{}};
+ stages[0] = vs.GetStageCreateInfo();
+ stages[1] = fs.GetStageCreateInfo();
+
+ // Test when binding is greater than or equal to VkPhysicalDeviceLimits::maxVertexInputBindings.
+ VkVertexInputAttributeDescription vertex_input_attribute_description{};
+ vertex_input_attribute_description.binding = m_device->props.limits.maxVertexInputBindings;
+
+ VkPipelineVertexInputStateCreateInfo vertex_input_state{};
+ vertex_input_state.sType = VK_STRUCTURE_TYPE_PIPELINE_VERTEX_INPUT_STATE_CREATE_INFO;
+ vertex_input_state.pNext = nullptr;
+ vertex_input_state.vertexBindingDescriptionCount = 0;
+ vertex_input_state.pVertexBindingDescriptions = nullptr;
+ vertex_input_state.vertexAttributeDescriptionCount = 1;
+ vertex_input_state.pVertexAttributeDescriptions = &vertex_input_attribute_description;
+
+ VkPipelineInputAssemblyStateCreateInfo input_assembly_state{};
+ input_assembly_state.sType = VK_STRUCTURE_TYPE_PIPELINE_INPUT_ASSEMBLY_STATE_CREATE_INFO;
+ input_assembly_state.topology = VK_PRIMITIVE_TOPOLOGY_TRIANGLE_STRIP;
+
+ VkViewport viewport = {0.0f, 0.0f, 64.0f, 64.0f, 0.0f, 1.0f};
+ VkRect2D scissor = {{0, 0}, {64, 64}};
+ VkPipelineViewportStateCreateInfo viewport_state{};
+ viewport_state.sType = VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_STATE_CREATE_INFO;
+ viewport_state.viewportCount = 1;
+ viewport_state.pViewports = &viewport;
+ viewport_state.scissorCount = 1;
+ viewport_state.pScissors = &scissor;
+
+ VkPipelineMultisampleStateCreateInfo multisample_state{};
+ multisample_state.sType = VK_STRUCTURE_TYPE_PIPELINE_MULTISAMPLE_STATE_CREATE_INFO;
+ multisample_state.pNext = nullptr;
+ multisample_state.rasterizationSamples = VK_SAMPLE_COUNT_1_BIT;
+ multisample_state.sampleShadingEnable = 0;
+ multisample_state.minSampleShading = 1.0;
+ multisample_state.pSampleMask = nullptr;
+
+ VkPipelineRasterizationStateCreateInfo rasterization_state{};
+ rasterization_state.sType = VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_STATE_CREATE_INFO;
+ rasterization_state.polygonMode = VK_POLYGON_MODE_FILL;
+ rasterization_state.cullMode = VK_CULL_MODE_BACK_BIT;
+ rasterization_state.frontFace = VK_FRONT_FACE_COUNTER_CLOCKWISE;
+ rasterization_state.depthClampEnable = VK_FALSE;
+ rasterization_state.rasterizerDiscardEnable = VK_FALSE;
+ rasterization_state.depthBiasEnable = VK_FALSE;
+ rasterization_state.lineWidth = 1.0f;
+
+ const VkPipelineLayoutObj pipeline_layout(m_device);
+
+ {
+ VkGraphicsPipelineCreateInfo create_info{};
+ create_info.sType = VK_STRUCTURE_TYPE_GRAPHICS_PIPELINE_CREATE_INFO;
+ create_info.stageCount = 2;
+ create_info.pStages = stages;
+ create_info.pVertexInputState = &vertex_input_state;
+ create_info.pInputAssemblyState = &input_assembly_state;
+ create_info.pViewportState = &viewport_state;
+ create_info.pMultisampleState = &multisample_state;
+ create_info.pRasterizationState = &rasterization_state;
+ create_info.flags = VK_PIPELINE_CREATE_DISABLE_OPTIMIZATION_BIT;
+ create_info.layout = pipeline_layout.handle();
+ create_info.renderPass = renderPass();
+
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkVertexInputAttributeDescription-binding-00621");
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ "VUID-VkPipelineVertexInputStateCreateInfo-binding-00615");
+ VkPipeline pipeline;
+ vkCreateGraphicsPipelines(m_device->device(), pipeline_cache, 1, &create_info, nullptr, &pipeline);
+ m_errorMonitor->VerifyFound();
+ }
+
+ vkDestroyPipelineCache(m_device->device(), pipeline_cache, nullptr);
+}
+
+TEST_F(VkLayerTest, VUID_VkVertexInputAttributeDescription_offset_00622) {
+ TEST_DESCRIPTION(
+ "Test VUID-VkVertexInputAttributeDescription-offset-00622: offset must be less than or equal to "
+ "VkPhysicalDeviceLimits::maxVertexInputAttributeOffset");
+
+ EnableDeviceProfileLayer();
+
+ ASSERT_NO_FATAL_FAILURE(InitFramework(myDbgFunc, m_errorMonitor));
+
+ uint32_t maxVertexInputAttributeOffset = 0;
+ {
+ VkPhysicalDeviceProperties device_props = {};
+ vkGetPhysicalDeviceProperties(gpu(), &device_props);
+ maxVertexInputAttributeOffset = device_props.limits.maxVertexInputAttributeOffset;
+ if (maxVertexInputAttributeOffset == 0xFFFFFFFF) {
+ // Attempt to artificially lower maximum offset
+ PFN_vkSetPhysicalDeviceLimitsEXT fpvkSetPhysicalDeviceLimitsEXT =
+ (PFN_vkSetPhysicalDeviceLimitsEXT)vkGetInstanceProcAddr(instance(), "vkSetPhysicalDeviceLimitsEXT");
+ if (!fpvkSetPhysicalDeviceLimitsEXT) {
+ printf("%s All offsets are valid & device_profile_api not found; skipped.\n", kSkipPrefix);
+ return;
+ }
+ device_props.limits.maxVertexInputAttributeOffset = device_props.limits.maxVertexInputBindingStride - 2;
+ fpvkSetPhysicalDeviceLimitsEXT(gpu(), &device_props.limits);
+ maxVertexInputAttributeOffset = device_props.limits.maxVertexInputAttributeOffset;
+ }
+ }
+ ASSERT_NO_FATAL_FAILURE(InitState());
+ ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
+
+ VkPipelineCache pipeline_cache;
+ {
+ VkPipelineCacheCreateInfo create_info{};
+ create_info.sType = VK_STRUCTURE_TYPE_PIPELINE_CACHE_CREATE_INFO;
+
+ VkResult err = vkCreatePipelineCache(m_device->device(), &create_info, nullptr, &pipeline_cache);
+ ASSERT_VK_SUCCESS(err);
+ }
+
+ VkShaderObj vs(m_device, bindStateVertShaderText, VK_SHADER_STAGE_VERTEX_BIT, this);
+ VkShaderObj fs(m_device, bindStateFragShaderText, VK_SHADER_STAGE_FRAGMENT_BIT, this);
+
+ VkPipelineShaderStageCreateInfo stages[2]{{}};
+ stages[0] = vs.GetStageCreateInfo();
+ stages[1] = fs.GetStageCreateInfo();
+
+ VkVertexInputBindingDescription vertex_input_binding_description{};
+ vertex_input_binding_description.binding = 0;
+ vertex_input_binding_description.stride = m_device->props.limits.maxVertexInputBindingStride;
+ vertex_input_binding_description.inputRate = VK_VERTEX_INPUT_RATE_VERTEX;
+ // Test when offset is greater than maximum.
+ VkVertexInputAttributeDescription vertex_input_attribute_description{};
+ vertex_input_attribute_description.format = VK_FORMAT_R8_UNORM;
+ vertex_input_attribute_description.offset = maxVertexInputAttributeOffset + 1;
+
+ VkPipelineVertexInputStateCreateInfo vertex_input_state{};
+ vertex_input_state.sType = VK_STRUCTURE_TYPE_PIPELINE_VERTEX_INPUT_STATE_CREATE_INFO;
+ vertex_input_state.pNext = nullptr;
+ vertex_input_state.vertexBindingDescriptionCount = 1;
+ vertex_input_state.pVertexBindingDescriptions = &vertex_input_binding_description;
+ vertex_input_state.vertexAttributeDescriptionCount = 1;
+ vertex_input_state.pVertexAttributeDescriptions = &vertex_input_attribute_description;
+
+ VkPipelineInputAssemblyStateCreateInfo input_assembly_state{};
+ input_assembly_state.sType = VK_STRUCTURE_TYPE_PIPELINE_INPUT_ASSEMBLY_STATE_CREATE_INFO;
+ input_assembly_state.topology = VK_PRIMITIVE_TOPOLOGY_TRIANGLE_STRIP;
+
+ VkPipelineMultisampleStateCreateInfo multisample_state{};
+ multisample_state.sType = VK_STRUCTURE_TYPE_PIPELINE_MULTISAMPLE_STATE_CREATE_INFO;
+ multisample_state.pNext = nullptr;
+ multisample_state.rasterizationSamples = VK_SAMPLE_COUNT_1_BIT;
+ multisample_state.sampleShadingEnable = 0;
+ multisample_state.minSampleShading = 1.0;
+ multisample_state.pSampleMask = nullptr;
+
+ VkPipelineRasterizationStateCreateInfo rasterization_state{};
+ rasterization_state.sType = VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_STATE_CREATE_INFO;
+ rasterization_state.polygonMode = VK_POLYGON_MODE_FILL;
+ rasterization_state.cullMode = VK_CULL_MODE_BACK_BIT;
+ rasterization_state.frontFace = VK_FRONT_FACE_COUNTER_CLOCKWISE;
+ rasterization_state.depthClampEnable = VK_FALSE;
+ rasterization_state.rasterizerDiscardEnable = VK_TRUE;
+ rasterization_state.depthBiasEnable = VK_FALSE;
+ rasterization_state.lineWidth = 1.0f;
+
+ const VkPipelineLayoutObj pipeline_layout(m_device);
+
+ {
+ VkGraphicsPipelineCreateInfo create_info{};
+ create_info.sType = VK_STRUCTURE_TYPE_GRAPHICS_PIPELINE_CREATE_INFO;
+ create_info.stageCount = 2;
+ create_info.pStages = stages;
+ create_info.pVertexInputState = &vertex_input_state;
+ create_info.pInputAssemblyState = &input_assembly_state;
+ create_info.pViewportState = nullptr; // no viewport b/c rasterizer is disabled
+ create_info.pMultisampleState = &multisample_state;
+ create_info.pRasterizationState = &rasterization_state;
+ create_info.flags = VK_PIPELINE_CREATE_DISABLE_OPTIMIZATION_BIT;
+ create_info.layout = pipeline_layout.handle();
+ create_info.renderPass = renderPass();
+
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkVertexInputAttributeDescription-offset-00622");
+ VkPipeline pipeline;
+ vkCreateGraphicsPipelines(m_device->device(), pipeline_cache, 1, &create_info, nullptr, &pipeline);
+ m_errorMonitor->VerifyFound();
+ }
+
+ vkDestroyPipelineCache(m_device->device(), pipeline_cache, nullptr);
+}
+
+TEST_F(VkLayerTest, NullRenderPass) {
+ // Bind a NULL RenderPass
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ "vkCmdBeginRenderPass: required parameter pRenderPassBegin specified as NULL");
+
+ ASSERT_NO_FATAL_FAILURE(Init());
+ ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
+
+ m_commandBuffer->begin();
+ // Don't care about RenderPass handle b/c error should be flagged before
+ // that
+ vkCmdBeginRenderPass(m_commandBuffer->handle(), NULL, VK_SUBPASS_CONTENTS_INLINE);
+
+ m_errorMonitor->VerifyFound();
+
+ m_commandBuffer->end();
+}
+
+TEST_F(VkLayerTest, EndCommandBufferWithinRenderPass) {
+ TEST_DESCRIPTION("End a command buffer with an active render pass");
+
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ "It is invalid to issue this call inside an active render pass");
+
+ ASSERT_NO_FATAL_FAILURE(Init());
+ ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
+
+ m_commandBuffer->begin();
+ m_commandBuffer->BeginRenderPass(m_renderPassBeginInfo);
+ vkEndCommandBuffer(m_commandBuffer->handle());
+
+ m_errorMonitor->VerifyFound();
+
+ // End command buffer properly to avoid driver issues. This is safe -- the
+ // previous vkEndCommandBuffer should not have reached the driver.
+ m_commandBuffer->EndRenderPass();
+ m_commandBuffer->end();
+
+ // TODO: Add test for VK_COMMAND_BUFFER_LEVEL_SECONDARY
+ // TODO: Add test for VK_COMMAND_BUFFER_USAGE_RENDER_PASS_CONTINUE_BIT
+}
+
+TEST_F(VkLayerTest, FillBufferWithinRenderPass) {
+ // Call CmdFillBuffer within an active renderpass
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ "It is invalid to issue this call inside an active render pass");
+
+ ASSERT_NO_FATAL_FAILURE(Init());
+ ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
+
+ m_commandBuffer->begin();
+ m_commandBuffer->BeginRenderPass(m_renderPassBeginInfo);
+
+ VkMemoryPropertyFlags reqs = VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT;
+ VkBufferObj dstBuffer;
+ dstBuffer.init_as_dst(*m_device, (VkDeviceSize)1024, reqs);
+
+ m_commandBuffer->FillBuffer(dstBuffer.handle(), 0, 4, 0x11111111);
+
+ m_errorMonitor->VerifyFound();
+
+ m_commandBuffer->EndRenderPass();
+ m_commandBuffer->end();
+}
+
+TEST_F(VkLayerTest, UpdateBufferWithinRenderPass) {
+ // Call CmdUpdateBuffer within an active renderpass
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ "It is invalid to issue this call inside an active render pass");
+
+ ASSERT_NO_FATAL_FAILURE(Init());
+ ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
+
+ m_commandBuffer->begin();
+ m_commandBuffer->BeginRenderPass(m_renderPassBeginInfo);
+
+ VkMemoryPropertyFlags reqs = VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT;
+ VkBufferObj dstBuffer;
+ dstBuffer.init_as_dst(*m_device, (VkDeviceSize)1024, reqs);
+
+ VkDeviceSize dstOffset = 0;
+ uint32_t Data[] = {1, 2, 3, 4, 5, 6, 7, 8};
+ VkDeviceSize dataSize = sizeof(Data) / sizeof(uint32_t);
+ vkCmdUpdateBuffer(m_commandBuffer->handle(), dstBuffer.handle(), dstOffset, dataSize, &Data);
+
+ m_errorMonitor->VerifyFound();
+
+ m_commandBuffer->EndRenderPass();
+ m_commandBuffer->end();
+}
+
+TEST_F(VkLayerTest, ClearColorImageWithBadRange) {
+ TEST_DESCRIPTION("Record clear color with an invalid VkImageSubresourceRange");
+
+ ASSERT_NO_FATAL_FAILURE(Init());
+ ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
+
+ VkImageObj image(m_device);
+ image.Init(32, 32, 1, VK_FORMAT_B8G8R8A8_UNORM, VK_IMAGE_USAGE_TRANSFER_DST_BIT, VK_IMAGE_TILING_OPTIMAL);
+ ASSERT_TRUE(image.create_info().arrayLayers == 1);
+ ASSERT_TRUE(image.initialized());
+ image.SetLayout(VK_IMAGE_ASPECT_COLOR_BIT, VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL);
+
+ const VkClearColorValue clear_color = {{0.0f, 0.0f, 0.0f, 1.0f}};
+
+ m_commandBuffer->begin();
+ const auto cb_handle = m_commandBuffer->handle();
+
+ // Try baseMipLevel >= image.mipLevels with VK_REMAINING_MIP_LEVELS
+ {
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdClearColorImage-baseMipLevel-01470");
+ const VkImageSubresourceRange range = {VK_IMAGE_ASPECT_COLOR_BIT, 1, VK_REMAINING_MIP_LEVELS, 0, 1};
+ vkCmdClearColorImage(cb_handle, image.handle(), image.Layout(), &clear_color, 1, &range);
+ m_errorMonitor->VerifyFound();
+ }
+
+ // Try baseMipLevel >= image.mipLevels without VK_REMAINING_MIP_LEVELS
+ {
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdClearColorImage-baseMipLevel-01470");
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdClearColorImage-pRanges-01692");
+ const VkImageSubresourceRange range = {VK_IMAGE_ASPECT_COLOR_BIT, 1, 1, 0, 1};
+ vkCmdClearColorImage(cb_handle, image.handle(), image.Layout(), &clear_color, 1, &range);
+ m_errorMonitor->VerifyFound();
+ }
+
+ // Try levelCount = 0
+ {
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdClearColorImage-pRanges-01692");
+ const VkImageSubresourceRange range = {VK_IMAGE_ASPECT_COLOR_BIT, 0, 0, 0, 1};
+ vkCmdClearColorImage(cb_handle, image.handle(), image.Layout(), &clear_color, 1, &range);
+ m_errorMonitor->VerifyFound();
+ }
+
+ // Try baseMipLevel + levelCount > image.mipLevels
+ {
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdClearColorImage-pRanges-01692");
+ const VkImageSubresourceRange range = {VK_IMAGE_ASPECT_COLOR_BIT, 0, 2, 0, 1};
+ vkCmdClearColorImage(cb_handle, image.handle(), image.Layout(), &clear_color, 1, &range);
+ m_errorMonitor->VerifyFound();
+ }
+
+ // Try baseArrayLayer >= image.arrayLayers with VK_REMAINING_ARRAY_LAYERS
+ {
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdClearColorImage-baseArrayLayer-01472");
+ const VkImageSubresourceRange range = {VK_IMAGE_ASPECT_COLOR_BIT, 0, 1, 1, VK_REMAINING_ARRAY_LAYERS};
+ vkCmdClearColorImage(cb_handle, image.handle(), image.Layout(), &clear_color, 1, &range);
+ m_errorMonitor->VerifyFound();
+ }
+
+ // Try baseArrayLayer >= image.arrayLayers without VK_REMAINING_ARRAY_LAYERS
+ {
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdClearColorImage-baseArrayLayer-01472");
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdClearColorImage-pRanges-01693");
+ const VkImageSubresourceRange range = {VK_IMAGE_ASPECT_COLOR_BIT, 0, 1, 1, 1};
+ vkCmdClearColorImage(cb_handle, image.handle(), image.Layout(), &clear_color, 1, &range);
+ m_errorMonitor->VerifyFound();
+ }
+
+ // Try layerCount = 0
+ {
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdClearColorImage-pRanges-01693");
+ const VkImageSubresourceRange range = {VK_IMAGE_ASPECT_COLOR_BIT, 0, 1, 0, 0};
+ vkCmdClearColorImage(cb_handle, image.handle(), image.Layout(), &clear_color, 1, &range);
+ m_errorMonitor->VerifyFound();
+ }
+
+ // Try baseArrayLayer + layerCount > image.arrayLayers
+ {
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdClearColorImage-pRanges-01693");
+ const VkImageSubresourceRange range = {VK_IMAGE_ASPECT_COLOR_BIT, 0, 1, 0, 2};
+ vkCmdClearColorImage(cb_handle, image.handle(), image.Layout(), &clear_color, 1, &range);
+ m_errorMonitor->VerifyFound();
+ }
+}
+
+TEST_F(VkLayerTest, ClearDepthStencilWithBadRange) {
+ TEST_DESCRIPTION("Record clear depth with an invalid VkImageSubresourceRange");
+
+ ASSERT_NO_FATAL_FAILURE(Init());
+ ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
+
+ const auto depth_format = FindSupportedDepthStencilFormat(gpu());
+ if (!depth_format) {
+ printf("%s No Depth + Stencil format found. Skipped.\n", kSkipPrefix);
+ return;
+ }
+
+ VkImageObj image(m_device);
+ image.Init(32, 32, 1, depth_format, VK_IMAGE_USAGE_TRANSFER_DST_BIT, VK_IMAGE_TILING_OPTIMAL);
+ ASSERT_TRUE(image.create_info().arrayLayers == 1);
+ ASSERT_TRUE(image.initialized());
+ const VkImageAspectFlags ds_aspect = VK_IMAGE_ASPECT_DEPTH_BIT | VK_IMAGE_ASPECT_STENCIL_BIT;
+ image.SetLayout(ds_aspect, VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL);
+
+ const VkClearDepthStencilValue clear_value = {};
+
+ m_commandBuffer->begin();
+ const auto cb_handle = m_commandBuffer->handle();
+
+ // Try baseMipLevel >= image.mipLevels with VK_REMAINING_MIP_LEVELS
+ {
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdClearDepthStencilImage-baseMipLevel-01474");
+ const VkImageSubresourceRange range = {ds_aspect, 1, VK_REMAINING_MIP_LEVELS, 0, 1};
+ vkCmdClearDepthStencilImage(cb_handle, image.handle(), image.Layout(), &clear_value, 1, &range);
+ m_errorMonitor->VerifyFound();
+ }
+
+ // Try baseMipLevel >= image.mipLevels without VK_REMAINING_MIP_LEVELS
+ {
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdClearDepthStencilImage-baseMipLevel-01474");
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdClearDepthStencilImage-pRanges-01694");
+ const VkImageSubresourceRange range = {ds_aspect, 1, 1, 0, 1};
+ vkCmdClearDepthStencilImage(cb_handle, image.handle(), image.Layout(), &clear_value, 1, &range);
+ m_errorMonitor->VerifyFound();
+ }
+
+ // Try levelCount = 0
+ {
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdClearDepthStencilImage-pRanges-01694");
+ const VkImageSubresourceRange range = {ds_aspect, 0, 0, 0, 1};
+ vkCmdClearDepthStencilImage(cb_handle, image.handle(), image.Layout(), &clear_value, 1, &range);
+ m_errorMonitor->VerifyFound();
+ }
+
+ // Try baseMipLevel + levelCount > image.mipLevels
+ {
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdClearDepthStencilImage-pRanges-01694");
+ const VkImageSubresourceRange range = {ds_aspect, 0, 2, 0, 1};
+ vkCmdClearDepthStencilImage(cb_handle, image.handle(), image.Layout(), &clear_value, 1, &range);
+ m_errorMonitor->VerifyFound();
+ }
+
+ // Try baseArrayLayer >= image.arrayLayers with VK_REMAINING_ARRAY_LAYERS
+ {
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ "VUID-vkCmdClearDepthStencilImage-baseArrayLayer-01476");
+ const VkImageSubresourceRange range = {ds_aspect, 0, 1, 1, VK_REMAINING_ARRAY_LAYERS};
+ vkCmdClearDepthStencilImage(cb_handle, image.handle(), image.Layout(), &clear_value, 1, &range);
+ m_errorMonitor->VerifyFound();
+ }
+
+ // Try baseArrayLayer >= image.arrayLayers without VK_REMAINING_ARRAY_LAYERS
+ {
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ "VUID-vkCmdClearDepthStencilImage-baseArrayLayer-01476");
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdClearDepthStencilImage-pRanges-01695");
+ const VkImageSubresourceRange range = {ds_aspect, 0, 1, 1, 1};
+ vkCmdClearDepthStencilImage(cb_handle, image.handle(), image.Layout(), &clear_value, 1, &range);
+ m_errorMonitor->VerifyFound();
+ }
+
+ // Try layerCount = 0
+ {
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdClearDepthStencilImage-pRanges-01695");
+ const VkImageSubresourceRange range = {ds_aspect, 0, 1, 0, 0};
+ vkCmdClearDepthStencilImage(cb_handle, image.handle(), image.Layout(), &clear_value, 1, &range);
+ m_errorMonitor->VerifyFound();
+ }
+
+ // Try baseArrayLayer + layerCount > image.arrayLayers
+ {
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdClearDepthStencilImage-pRanges-01695");
+ const VkImageSubresourceRange range = {ds_aspect, 0, 1, 0, 2};
+ vkCmdClearDepthStencilImage(cb_handle, image.handle(), image.Layout(), &clear_value, 1, &range);
+ m_errorMonitor->VerifyFound();
+ }
+}
+
+TEST_F(VkLayerTest, ClearColorImageWithinRenderPass) {
+ // Call CmdClearColorImage within an active RenderPass
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ "It is invalid to issue this call inside an active render pass");
+
+ ASSERT_NO_FATAL_FAILURE(Init());
+ ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
+
+ m_commandBuffer->begin();
+ m_commandBuffer->BeginRenderPass(m_renderPassBeginInfo);
+
+ VkClearColorValue clear_color;
+ memset(clear_color.uint32, 0, sizeof(uint32_t) * 4);
+ const VkFormat tex_format = VK_FORMAT_B8G8R8A8_UNORM;
+ const int32_t tex_width = 32;
+ const int32_t tex_height = 32;
+ VkImageCreateInfo image_create_info = {};
+ image_create_info.sType = VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO;
+ image_create_info.pNext = NULL;
+ image_create_info.imageType = VK_IMAGE_TYPE_2D;
+ image_create_info.format = tex_format;
+ image_create_info.extent.width = tex_width;
+ image_create_info.extent.height = tex_height;
+ image_create_info.extent.depth = 1;
+ image_create_info.mipLevels = 1;
+ image_create_info.arrayLayers = 1;
+ image_create_info.samples = VK_SAMPLE_COUNT_1_BIT;
+ image_create_info.tiling = VK_IMAGE_TILING_OPTIMAL;
+ image_create_info.usage = VK_IMAGE_USAGE_SAMPLED_BIT | VK_IMAGE_USAGE_TRANSFER_DST_BIT;
+
+ vk_testing::Image dstImage;
+ dstImage.init(*m_device, (const VkImageCreateInfo &)image_create_info);
+
+ const VkImageSubresourceRange range = vk_testing::Image::subresource_range(image_create_info, VK_IMAGE_ASPECT_COLOR_BIT);
+
+ vkCmdClearColorImage(m_commandBuffer->handle(), dstImage.handle(), VK_IMAGE_LAYOUT_GENERAL, &clear_color, 1, &range);
+
+ m_errorMonitor->VerifyFound();
+
+ m_commandBuffer->EndRenderPass();
+ m_commandBuffer->end();
+}
+
+TEST_F(VkLayerTest, ClearDepthStencilImageErrors) {
+ // Hit errors related to vkCmdClearDepthStencilImage()
+ // 1. Use an image that doesn't have VK_IMAGE_USAGE_TRANSFER_DST_BIT set
+ // 2. Call CmdClearDepthStencilImage within an active RenderPass
+
+ ASSERT_NO_FATAL_FAILURE(Init());
+ ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
+
+ auto depth_format = FindSupportedDepthStencilFormat(gpu());
+ if (!depth_format) {
+ printf("%s No Depth + Stencil format found. Skipped.\n", kSkipPrefix);
+ return;
+ }
+
+ VkClearDepthStencilValue clear_value = {0};
+ VkMemoryPropertyFlags reqs = 0;
+ VkImageCreateInfo image_create_info = vk_testing::Image::create_info();
+ image_create_info.imageType = VK_IMAGE_TYPE_2D;
+ image_create_info.format = depth_format;
+ image_create_info.extent.width = 64;
+ image_create_info.extent.height = 64;
+ image_create_info.tiling = VK_IMAGE_TILING_OPTIMAL;
+ // Error here is that VK_IMAGE_USAGE_TRANSFER_DST_BIT is excluded for DS image that we'll call Clear on below
+ image_create_info.usage = VK_IMAGE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT;
+
+ vk_testing::Image dst_image_bad_usage;
+ dst_image_bad_usage.init(*m_device, (const VkImageCreateInfo &)image_create_info, reqs);
+ const VkImageSubresourceRange range = vk_testing::Image::subresource_range(image_create_info, VK_IMAGE_ASPECT_DEPTH_BIT);
+
+ m_commandBuffer->begin();
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdClearDepthStencilImage-image-00009");
+ vkCmdClearDepthStencilImage(m_commandBuffer->handle(), dst_image_bad_usage.handle(), VK_IMAGE_LAYOUT_GENERAL, &clear_value, 1,
+ &range);
+ m_errorMonitor->VerifyFound();
+
+ // Fix usage for next test case
+ image_create_info.usage = VK_IMAGE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT | VK_IMAGE_USAGE_TRANSFER_DST_BIT;
+ vk_testing::Image dst_image;
+ dst_image.init(*m_device, (const VkImageCreateInfo &)image_create_info, reqs);
+
+ m_commandBuffer->BeginRenderPass(m_renderPassBeginInfo);
+
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdClearDepthStencilImage-renderpass");
+ vkCmdClearDepthStencilImage(m_commandBuffer->handle(), dst_image.handle(), VK_IMAGE_LAYOUT_GENERAL, &clear_value, 1, &range);
+ m_errorMonitor->VerifyFound();
+
+ m_commandBuffer->EndRenderPass();
+ m_commandBuffer->end();
+}
+
+TEST_F(VkLayerTest, ClearColorAttachmentsOutsideRenderPass) {
+ // Call CmdClearAttachmentss outside of an active RenderPass
+
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ "vkCmdClearAttachments(): This call must be issued inside an active render pass");
+
+ ASSERT_NO_FATAL_FAILURE(Init());
+ ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
+
+ // Start no RenderPass
+ m_commandBuffer->begin();
+
+ VkClearAttachment color_attachment;
+ color_attachment.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
+ color_attachment.clearValue.color.float32[0] = 0;
+ color_attachment.clearValue.color.float32[1] = 0;
+ color_attachment.clearValue.color.float32[2] = 0;
+ color_attachment.clearValue.color.float32[3] = 0;
+ color_attachment.colorAttachment = 0;
+ VkClearRect clear_rect = {{{0, 0}, {32, 32}}};
+ vkCmdClearAttachments(m_commandBuffer->handle(), 1, &color_attachment, 1, &clear_rect);
+
+ m_errorMonitor->VerifyFound();
+}
+
+TEST_F(VkLayerTest, BufferMemoryBarrierNoBuffer) {
+ // Try to add a buffer memory barrier with no buffer.
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ "required parameter pBufferMemoryBarriers[0].buffer specified as VK_NULL_HANDLE");
+
+ ASSERT_NO_FATAL_FAILURE(Init());
+ m_commandBuffer->begin();
+
+ VkBufferMemoryBarrier buf_barrier = {};
+ buf_barrier.sType = VK_STRUCTURE_TYPE_BUFFER_MEMORY_BARRIER;
+ buf_barrier.srcAccessMask = VK_ACCESS_HOST_WRITE_BIT;
+ buf_barrier.dstAccessMask = VK_ACCESS_SHADER_READ_BIT;
+ buf_barrier.srcQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED;
+ buf_barrier.dstQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED;
+ buf_barrier.buffer = VK_NULL_HANDLE;
+ buf_barrier.offset = 0;
+ buf_barrier.size = VK_WHOLE_SIZE;
+ vkCmdPipelineBarrier(m_commandBuffer->handle(), VK_PIPELINE_STAGE_HOST_BIT, VK_PIPELINE_STAGE_VERTEX_SHADER_BIT, 0, 0, nullptr,
+ 1, &buf_barrier, 0, nullptr);
+
+ m_errorMonitor->VerifyFound();
+}
+
+TEST_F(VkLayerTest, InvalidBarriers) {
+ TEST_DESCRIPTION("A variety of ways to get VK_INVALID_BARRIER ");
+
+ ASSERT_NO_FATAL_FAILURE(Init());
+ auto depth_format = FindSupportedDepthStencilFormat(gpu());
+ if (!depth_format) {
+ printf("%s No Depth + Stencil format found. Skipped.\n", kSkipPrefix);
+ return;
+ }
+ // Add a token self-dependency for this test to avoid unexpected errors
+ m_addRenderPassSelfDependency = true;
+ ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
+
+ m_commandBuffer->begin();
+
+ // Use image unbound to memory in barrier
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ " used with no memory bound. Memory should be bound by calling vkBindImageMemory()");
+ vk_testing::Image unbound_image;
+ auto unbound_image_info = vk_testing::Image::create_info();
+ unbound_image_info.format = VK_FORMAT_B8G8R8A8_UNORM;
+ unbound_image_info.usage = VK_IMAGE_USAGE_TRANSFER_DST_BIT | VK_IMAGE_USAGE_INPUT_ATTACHMENT_BIT;
+ unbound_image.init_no_mem(*m_device, unbound_image_info);
+ auto unbound_subresource = vk_testing::Image::subresource_range(unbound_image_info, VK_IMAGE_ASPECT_COLOR_BIT);
+ auto unbound_image_barrier = unbound_image.image_memory_barrier(0, VK_ACCESS_TRANSFER_WRITE_BIT, VK_IMAGE_LAYOUT_UNDEFINED,
+ VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, unbound_subresource);
+ vkCmdPipelineBarrier(m_commandBuffer->handle(), VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT, VK_PIPELINE_STAGE_TRANSFER_BIT, 0, 0,
+ nullptr, 0, nullptr, 1, &unbound_image_barrier);
+ m_errorMonitor->VerifyFound();
+
+ // Use buffer unbound to memory in barrier
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ " used with no memory bound. Memory should be bound by calling vkBindBufferMemory()");
+ VkBufferObj unbound_buffer;
+ auto unbound_buffer_info = VkBufferObj::create_info(16, VK_IMAGE_USAGE_TRANSFER_DST_BIT);
+ unbound_buffer.init_no_mem(*m_device, unbound_buffer_info);
+ auto unbound_buffer_barrier = unbound_buffer.buffer_memory_barrier(0, VK_ACCESS_TRANSFER_WRITE_BIT, 0, 16);
+ vkCmdPipelineBarrier(m_commandBuffer->handle(), VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT, VK_PIPELINE_STAGE_TRANSFER_BIT, 0, 0,
+ nullptr, 1, &unbound_buffer_barrier, 0, nullptr);
+ m_errorMonitor->VerifyFound();
+
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkImageMemoryBarrier-newLayout-01198");
+ VkImageObj image(m_device);
+ image.Init(128, 128, 1, VK_FORMAT_B8G8R8A8_UNORM, VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT, VK_IMAGE_TILING_OPTIMAL, 0);
+ ASSERT_TRUE(image.initialized());
+ VkImageMemoryBarrier img_barrier = {};
+ img_barrier.sType = VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER;
+ img_barrier.pNext = NULL;
+ img_barrier.srcAccessMask = VK_ACCESS_COLOR_ATTACHMENT_WRITE_BIT;
+ img_barrier.dstAccessMask = VK_ACCESS_SHADER_READ_BIT;
+ img_barrier.oldLayout = VK_IMAGE_LAYOUT_UNDEFINED;
+ // New layout can't be UNDEFINED
+ img_barrier.newLayout = VK_IMAGE_LAYOUT_UNDEFINED;
+ img_barrier.image = m_renderTargets[0]->handle();
+ img_barrier.srcQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED;
+ img_barrier.dstQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED;
+ img_barrier.subresourceRange.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
+ img_barrier.subresourceRange.baseArrayLayer = 0;
+ img_barrier.subresourceRange.baseMipLevel = 0;
+ img_barrier.subresourceRange.layerCount = 1;
+ img_barrier.subresourceRange.levelCount = 1;
+ vkCmdPipelineBarrier(m_commandBuffer->handle(), VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT,
+ VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT, VK_DEPENDENCY_BY_REGION_BIT, 0, nullptr, 0, nullptr, 1,
+ &img_barrier);
+ m_errorMonitor->VerifyFound();
+
+ // Transition image to color attachment optimal
+ img_barrier.newLayout = VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL;
+ vkCmdPipelineBarrier(m_commandBuffer->handle(), VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT,
+ VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT, VK_DEPENDENCY_BY_REGION_BIT, 0, nullptr, 0, nullptr, 1,
+ &img_barrier);
+
+ // TODO: this looks vestigal or incomplete...
+ m_commandBuffer->BeginRenderPass(m_renderPassBeginInfo);
+
+ img_barrier.oldLayout = VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL;
+
+ // Can't send buffer memory barrier during a render pass
+ vkCmdEndRenderPass(m_commandBuffer->handle());
+
+ // Duplicate barriers that change layout
+ img_barrier.image = image.handle();
+ img_barrier.oldLayout = VK_IMAGE_LAYOUT_UNDEFINED;
+ img_barrier.newLayout = VK_IMAGE_LAYOUT_GENERAL;
+ VkImageMemoryBarrier img_barriers[2] = {img_barrier, img_barrier};
+
+ // Transitions from UNDEFINED are valid, even if duplicated
+ m_errorMonitor->ExpectSuccess();
+ vkCmdPipelineBarrier(m_commandBuffer->handle(), VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT,
+ VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT, VK_DEPENDENCY_BY_REGION_BIT, 0, nullptr, 0, nullptr, 2,
+ img_barriers);
+ m_errorMonitor->VerifyNotFound();
+
+ // Duplication of layout transitions (not from undefined) are not valid
+ img_barriers[0].oldLayout = VK_IMAGE_LAYOUT_GENERAL;
+ img_barriers[0].newLayout = VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL;
+ img_barriers[1].oldLayout = img_barriers[0].oldLayout;
+ img_barriers[1].newLayout = img_barriers[0].newLayout;
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkImageMemoryBarrier-oldLayout-01197");
+ vkCmdPipelineBarrier(m_commandBuffer->handle(), VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT,
+ VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT, VK_DEPENDENCY_BY_REGION_BIT, 0, nullptr, 0, nullptr, 2,
+ img_barriers);
+ m_errorMonitor->VerifyFound();
+
+ VkBufferObj buffer;
+ VkMemoryPropertyFlags mem_reqs = VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT;
+ buffer.init_as_src_and_dst(*m_device, 256, mem_reqs);
+ VkBufferMemoryBarrier buf_barrier = {};
+ buf_barrier.sType = VK_STRUCTURE_TYPE_BUFFER_MEMORY_BARRIER;
+ buf_barrier.pNext = NULL;
+ buf_barrier.srcAccessMask = VK_ACCESS_COLOR_ATTACHMENT_WRITE_BIT;
+ buf_barrier.dstAccessMask = VK_ACCESS_SHADER_READ_BIT;
+ buf_barrier.buffer = buffer.handle();
+ buf_barrier.srcQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED;
+ buf_barrier.dstQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED;
+ buf_barrier.offset = 0;
+ buf_barrier.size = VK_WHOLE_SIZE;
+
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkBufferMemoryBarrier-offset-01187");
+ // Exceed the buffer size
+ buf_barrier.offset = buffer.create_info().size + 1;
+ // Offset greater than total size
+ vkCmdPipelineBarrier(m_commandBuffer->handle(), VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT,
+ VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT, VK_DEPENDENCY_BY_REGION_BIT, 0, nullptr, 1, &buf_barrier, 0,
+ nullptr);
+ m_errorMonitor->VerifyFound();
+ buf_barrier.offset = 0;
+
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkBufferMemoryBarrier-size-01189");
+ buf_barrier.size = buffer.create_info().size + 1;
+ // Size greater than total size
+ vkCmdPipelineBarrier(m_commandBuffer->handle(), VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT,
+ VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT, VK_DEPENDENCY_BY_REGION_BIT, 0, nullptr, 1, &buf_barrier, 0,
+ nullptr);
+ m_errorMonitor->VerifyFound();
+
+ // Now exercise barrier aspect bit errors, first DS
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkImageSubresource-aspectMask-parameter");
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkImageMemoryBarrier-image-01207");
+ VkDepthStencilObj ds_image(m_device);
+ ds_image.Init(m_device, 128, 128, depth_format);
+ ASSERT_TRUE(ds_image.initialized());
+ img_barrier.oldLayout = VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL;
+ img_barrier.newLayout = VK_IMAGE_LAYOUT_GENERAL;
+ img_barrier.image = ds_image.handle();
+
+ // Not having DEPTH or STENCIL set is an error
+ img_barrier.subresourceRange.aspectMask = VK_IMAGE_ASPECT_METADATA_BIT;
+ vkCmdPipelineBarrier(m_commandBuffer->handle(), VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT,
+ VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT, VK_DEPENDENCY_BY_REGION_BIT, 0, nullptr, 0, nullptr, 1,
+ &img_barrier);
+ m_errorMonitor->VerifyFound();
+
+ // Having only one of depth or stencil set for DS image is an error
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkImageMemoryBarrier-image-01207");
+ img_barrier.subresourceRange.aspectMask = VK_IMAGE_ASPECT_STENCIL_BIT;
+ vkCmdPipelineBarrier(m_commandBuffer->handle(), VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT,
+ VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT, VK_DEPENDENCY_BY_REGION_BIT, 0, nullptr, 0, nullptr, 1,
+ &img_barrier);
+ m_errorMonitor->VerifyFound();
+
+ // Having anything other than DEPTH and STENCIL is an error
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkImageSubresource-aspectMask-parameter");
+ img_barrier.subresourceRange.aspectMask = VK_IMAGE_ASPECT_DEPTH_BIT | VK_IMAGE_ASPECT_STENCIL_BIT | VK_IMAGE_ASPECT_COLOR_BIT;
+ vkCmdPipelineBarrier(m_commandBuffer->handle(), VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT,
+ VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT, VK_DEPENDENCY_BY_REGION_BIT, 0, nullptr, 0, nullptr, 1,
+ &img_barrier);
+ m_errorMonitor->VerifyFound();
+
+ // Now test depth-only
+ VkFormatProperties format_props;
+ vkGetPhysicalDeviceFormatProperties(m_device->phy().handle(), VK_FORMAT_D16_UNORM, &format_props);
+ if (format_props.optimalTilingFeatures & VK_FORMAT_FEATURE_DEPTH_STENCIL_ATTACHMENT_BIT) {
+ VkDepthStencilObj d_image(m_device);
+ d_image.Init(m_device, 128, 128, VK_FORMAT_D16_UNORM);
+ ASSERT_TRUE(d_image.initialized());
+ img_barrier.oldLayout = VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL;
+ img_barrier.newLayout = VK_IMAGE_LAYOUT_GENERAL;
+ img_barrier.image = d_image.handle();
+
+ // DEPTH bit must be set
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ "Depth-only image formats must have the VK_IMAGE_ASPECT_DEPTH_BIT set.");
+ img_barrier.subresourceRange.aspectMask = VK_IMAGE_ASPECT_METADATA_BIT;
+ vkCmdPipelineBarrier(m_commandBuffer->handle(), VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT,
+ VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT, VK_DEPENDENCY_BY_REGION_BIT, 0, nullptr, 0, nullptr, 1,
+ &img_barrier);
+ m_errorMonitor->VerifyFound();
+
+ // No bits other than DEPTH may be set
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ "Depth-only image formats can have only the VK_IMAGE_ASPECT_DEPTH_BIT set.");
+ img_barrier.subresourceRange.aspectMask = VK_IMAGE_ASPECT_DEPTH_BIT | VK_IMAGE_ASPECT_COLOR_BIT;
+ vkCmdPipelineBarrier(m_commandBuffer->handle(), VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT,
+ VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT, VK_DEPENDENCY_BY_REGION_BIT, 0, nullptr, 0, nullptr, 1,
+ &img_barrier);
+ m_errorMonitor->VerifyFound();
+ }
+
+ // Now test stencil-only
+ vkGetPhysicalDeviceFormatProperties(m_device->phy().handle(), VK_FORMAT_S8_UINT, &format_props);
+ if (format_props.optimalTilingFeatures & VK_FORMAT_FEATURE_DEPTH_STENCIL_ATTACHMENT_BIT) {
+ VkDepthStencilObj s_image(m_device);
+ s_image.Init(m_device, 128, 128, VK_FORMAT_S8_UINT);
+ ASSERT_TRUE(s_image.initialized());
+ img_barrier.oldLayout = VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL;
+ img_barrier.newLayout = VK_IMAGE_LAYOUT_GENERAL;
+ img_barrier.image = s_image.handle();
+ // Use of COLOR aspect on depth image is error
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ "Stencil-only image formats must have the VK_IMAGE_ASPECT_STENCIL_BIT set.");
+ img_barrier.subresourceRange.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
+ vkCmdPipelineBarrier(m_commandBuffer->handle(), VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT,
+ VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT, VK_DEPENDENCY_BY_REGION_BIT, 0, nullptr, 0, nullptr, 1,
+ &img_barrier);
+ m_errorMonitor->VerifyFound();
+ }
+
+ // Finally test color
+ VkImageObj c_image(m_device);
+ c_image.Init(128, 128, 1, VK_FORMAT_B8G8R8A8_UNORM, VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT, VK_IMAGE_TILING_OPTIMAL, 0);
+ ASSERT_TRUE(c_image.initialized());
+ img_barrier.oldLayout = VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL;
+ img_barrier.newLayout = VK_IMAGE_LAYOUT_GENERAL;
+ img_barrier.image = c_image.handle();
+
+ // COLOR bit must be set
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ "Color image formats must have the VK_IMAGE_ASPECT_COLOR_BIT set.");
+ img_barrier.subresourceRange.aspectMask = VK_IMAGE_ASPECT_METADATA_BIT;
+ vkCmdPipelineBarrier(m_commandBuffer->handle(), VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT,
+ VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT, VK_DEPENDENCY_BY_REGION_BIT, 0, nullptr, 0, nullptr, 1,
+ &img_barrier);
+ m_errorMonitor->VerifyFound();
+
+ // No bits other than COLOR may be set
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ "Color image formats must have ONLY the VK_IMAGE_ASPECT_COLOR_BIT set.");
+ img_barrier.subresourceRange.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT | VK_IMAGE_ASPECT_DEPTH_BIT;
+ vkCmdPipelineBarrier(m_commandBuffer->handle(), VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT,
+ VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT, VK_DEPENDENCY_BY_REGION_BIT, 0, nullptr, 0, nullptr, 1,
+ &img_barrier);
+ m_errorMonitor->VerifyFound();
+
+ // A barrier's new and old VkImageLayout must be compatible with an image's VkImageUsageFlags.
+ {
+ VkImageObj img_color(m_device);
+ img_color.Init(128, 128, 1, VK_FORMAT_B8G8R8A8_UNORM, VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT, VK_IMAGE_TILING_OPTIMAL);
+ ASSERT_TRUE(img_color.initialized());
+
+ VkImageObj img_ds(m_device);
+ img_ds.Init(128, 128, 1, depth_format, VK_IMAGE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT, VK_IMAGE_TILING_OPTIMAL);
+ ASSERT_TRUE(img_ds.initialized());
+
+ VkImageObj img_xfer_src(m_device);
+ img_xfer_src.Init(128, 128, 1, VK_FORMAT_B8G8R8A8_UNORM, VK_IMAGE_USAGE_TRANSFER_SRC_BIT, VK_IMAGE_TILING_OPTIMAL);
+ ASSERT_TRUE(img_xfer_src.initialized());
+
+ VkImageObj img_xfer_dst(m_device);
+ img_xfer_dst.Init(128, 128, 1, VK_FORMAT_B8G8R8A8_UNORM, VK_IMAGE_USAGE_TRANSFER_DST_BIT, VK_IMAGE_TILING_OPTIMAL);
+ ASSERT_TRUE(img_xfer_dst.initialized());
+
+ VkImageObj img_sampled(m_device);
+ img_sampled.Init(32, 32, 1, VK_FORMAT_B8G8R8A8_UNORM, VK_IMAGE_USAGE_SAMPLED_BIT, VK_IMAGE_TILING_OPTIMAL);
+ ASSERT_TRUE(img_sampled.initialized());
+
+ VkImageObj img_input(m_device);
+ img_input.Init(128, 128, 1, VK_FORMAT_R8G8B8A8_UNORM, VK_IMAGE_USAGE_INPUT_ATTACHMENT_BIT, VK_IMAGE_TILING_OPTIMAL);
+ ASSERT_TRUE(img_input.initialized());
+
+ const struct {
+ VkImageObj &image_obj;
+ VkImageLayout bad_layout;
+ std::string msg_code;
+ } bad_buffer_layouts[] = {
+ // clang-format off
+ // images _without_ VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT
+ {img_ds, VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL, "VUID-VkImageMemoryBarrier-oldLayout-01208"},
+ {img_xfer_src, VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL, "VUID-VkImageMemoryBarrier-oldLayout-01208"},
+ {img_xfer_dst, VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL, "VUID-VkImageMemoryBarrier-oldLayout-01208"},
+ {img_sampled, VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL, "VUID-VkImageMemoryBarrier-oldLayout-01208"},
+ {img_input, VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL, "VUID-VkImageMemoryBarrier-oldLayout-01208"},
+ // images _without_ VK_IMAGE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT
+ {img_color, VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL, "VUID-VkImageMemoryBarrier-oldLayout-01209"},
+ {img_xfer_src, VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL, "VUID-VkImageMemoryBarrier-oldLayout-01209"},
+ {img_xfer_dst, VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL, "VUID-VkImageMemoryBarrier-oldLayout-01209"},
+ {img_sampled, VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL, "VUID-VkImageMemoryBarrier-oldLayout-01209"},
+ {img_input, VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL, "VUID-VkImageMemoryBarrier-oldLayout-01209"},
+ {img_color, VK_IMAGE_LAYOUT_DEPTH_STENCIL_READ_ONLY_OPTIMAL, "VUID-VkImageMemoryBarrier-oldLayout-01210"},
+ {img_xfer_src, VK_IMAGE_LAYOUT_DEPTH_STENCIL_READ_ONLY_OPTIMAL, "VUID-VkImageMemoryBarrier-oldLayout-01210"},
+ {img_xfer_dst, VK_IMAGE_LAYOUT_DEPTH_STENCIL_READ_ONLY_OPTIMAL, "VUID-VkImageMemoryBarrier-oldLayout-01210"},
+ {img_sampled, VK_IMAGE_LAYOUT_DEPTH_STENCIL_READ_ONLY_OPTIMAL, "VUID-VkImageMemoryBarrier-oldLayout-01210"},
+ {img_input, VK_IMAGE_LAYOUT_DEPTH_STENCIL_READ_ONLY_OPTIMAL, "VUID-VkImageMemoryBarrier-oldLayout-01210"},
+ // images _without_ VK_IMAGE_USAGE_SAMPLED_BIT or VK_IMAGE_USAGE_INPUT_ATTACHMENT_BIT
+ {img_color, VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL, "VUID-VkImageMemoryBarrier-oldLayout-01211"},
+ {img_ds, VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL, "VUID-VkImageMemoryBarrier-oldLayout-01211"},
+ {img_xfer_src, VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL, "VUID-VkImageMemoryBarrier-oldLayout-01211"},
+ {img_xfer_dst, VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL, "VUID-VkImageMemoryBarrier-oldLayout-01211"},
+ // images _without_ VK_IMAGE_USAGE_TRANSFER_SRC_BIT
+ {img_color, VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL, "VUID-VkImageMemoryBarrier-oldLayout-01212"},
+ {img_ds, VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL, "VUID-VkImageMemoryBarrier-oldLayout-01212"},
+ {img_xfer_dst, VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL, "VUID-VkImageMemoryBarrier-oldLayout-01212"},
+ {img_sampled, VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL, "VUID-VkImageMemoryBarrier-oldLayout-01212"},
+ {img_input, VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL, "VUID-VkImageMemoryBarrier-oldLayout-01212"},
+ // images _without_ VK_IMAGE_USAGE_TRANSFER_DST_BIT
+ {img_color, VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, "VUID-VkImageMemoryBarrier-oldLayout-01213"},
+ {img_ds, VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, "VUID-VkImageMemoryBarrier-oldLayout-01213"},
+ {img_xfer_src, VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, "VUID-VkImageMemoryBarrier-oldLayout-01213"},
+ {img_sampled, VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, "VUID-VkImageMemoryBarrier-oldLayout-01213"},
+ {img_input, VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, "VUID-VkImageMemoryBarrier-oldLayout-01213"},
+ // clang-format on
+ };
+ const uint32_t layout_count = sizeof(bad_buffer_layouts) / sizeof(bad_buffer_layouts[0]);
+
+ for (uint32_t i = 0; i < layout_count; ++i) {
+ img_barrier.image = bad_buffer_layouts[i].image_obj.handle();
+ const VkImageUsageFlags usage = bad_buffer_layouts[i].image_obj.usage();
+ img_barrier.subresourceRange.aspectMask = (usage == VK_IMAGE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT)
+ ? (VK_IMAGE_ASPECT_DEPTH_BIT | VK_IMAGE_ASPECT_STENCIL_BIT)
+ : VK_IMAGE_ASPECT_COLOR_BIT;
+
+ img_barrier.oldLayout = bad_buffer_layouts[i].bad_layout;
+ img_barrier.newLayout = VK_IMAGE_LAYOUT_GENERAL;
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, bad_buffer_layouts[i].msg_code);
+ vkCmdPipelineBarrier(m_commandBuffer->handle(), VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT,
+ VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT, VK_DEPENDENCY_BY_REGION_BIT, 0, nullptr, 0, nullptr, 1,
+ &img_barrier);
+ m_errorMonitor->VerifyFound();
+
+ img_barrier.oldLayout = VK_IMAGE_LAYOUT_GENERAL;
+ img_barrier.newLayout = bad_buffer_layouts[i].bad_layout;
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, bad_buffer_layouts[i].msg_code);
+ vkCmdPipelineBarrier(m_commandBuffer->handle(), VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT,
+ VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT, VK_DEPENDENCY_BY_REGION_BIT, 0, nullptr, 0, nullptr, 1,
+ &img_barrier);
+ m_errorMonitor->VerifyFound();
+ }
+
+ img_barrier.oldLayout = VK_IMAGE_LAYOUT_GENERAL;
+ img_barrier.newLayout = VK_IMAGE_LAYOUT_GENERAL;
+ }
+ // Attempt barrier where srcAccessMask is not supported by srcStageMask
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdPipelineBarrier-pMemoryBarriers-01184");
+ // Have lower-order bit that's supported (shader write), but higher-order bit not supported to verify multi-bit validation
+ buf_barrier.srcAccessMask = VK_ACCESS_COLOR_ATTACHMENT_WRITE_BIT | VK_ACCESS_SHADER_WRITE_BIT;
+ buf_barrier.offset = 0;
+ buf_barrier.size = VK_WHOLE_SIZE;
+ vkCmdPipelineBarrier(m_commandBuffer->handle(), VK_PIPELINE_STAGE_COMPUTE_SHADER_BIT, VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT,
+ VK_DEPENDENCY_BY_REGION_BIT, 0, nullptr, 1, &buf_barrier, 0, nullptr);
+ m_errorMonitor->VerifyFound();
+ // Attempt barrier where dsAccessMask is not supported by dstStageMask
+ buf_barrier.srcAccessMask = VK_ACCESS_COLOR_ATTACHMENT_WRITE_BIT;
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdPipelineBarrier-pMemoryBarriers-01185");
+ vkCmdPipelineBarrier(m_commandBuffer->handle(), VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT,
+ VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT, VK_DEPENDENCY_BY_REGION_BIT, 0, nullptr, 1, &buf_barrier, 0,
+ nullptr);
+ m_errorMonitor->VerifyFound();
+
+ // Attempt to mismatch barriers/waitEvents calls with incompatible queues
+ // Create command pool with incompatible queueflags
+ const std::vector<VkQueueFamilyProperties> queue_props = m_device->queue_props;
+ uint32_t queue_family_index = m_device->QueueFamilyMatching(VK_QUEUE_GRAPHICS_BIT, VK_QUEUE_COMPUTE_BIT);
+ if (queue_family_index == UINT32_MAX) {
+ printf("%s No non-compute queue supporting graphics found; skipped.\n", kSkipPrefix);
+ return; // NOTE: this exits the test function!
+ }
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdPipelineBarrier-srcStageMask-01183");
+
+ VkCommandPoolObj command_pool(m_device, queue_family_index, VK_COMMAND_POOL_CREATE_RESET_COMMAND_BUFFER_BIT);
+ VkCommandBufferObj bad_command_buffer(m_device, &command_pool);
+
+ bad_command_buffer.begin();
+ buf_barrier.srcAccessMask = VK_ACCESS_SHADER_WRITE_BIT;
+ // Set two bits that should both be supported as a bonus positive check
+ buf_barrier.dstAccessMask = VK_ACCESS_TRANSFER_WRITE_BIT | VK_ACCESS_TRANSFER_READ_BIT;
+ vkCmdPipelineBarrier(bad_command_buffer.handle(), VK_PIPELINE_STAGE_COMPUTE_SHADER_BIT, VK_PIPELINE_STAGE_TRANSFER_BIT,
+ VK_DEPENDENCY_BY_REGION_BIT, 0, nullptr, 1, &buf_barrier, 0, nullptr);
+ m_errorMonitor->VerifyFound();
+
+ // Check for error for trying to wait on pipeline stage not supported by this queue. Specifically since our queue is not a
+ // compute queue, vkCmdWaitEvents cannot have it's source stage mask be VK_PIPELINE_STAGE_COMPUTE_SHADER_BIT
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdWaitEvents-srcStageMask-01164");
+ VkEvent event;
+ VkEventCreateInfo event_create_info{};
+ event_create_info.sType = VK_STRUCTURE_TYPE_EVENT_CREATE_INFO;
+ vkCreateEvent(m_device->device(), &event_create_info, nullptr, &event);
+ vkCmdWaitEvents(bad_command_buffer.handle(), 1, &event, /*source stage mask*/ VK_PIPELINE_STAGE_COMPUTE_SHADER_BIT,
+ VK_PIPELINE_STAGE_VERTEX_SHADER_BIT, 0, nullptr, 0, nullptr, 0, nullptr);
+ m_errorMonitor->VerifyFound();
+ bad_command_buffer.end();
+
+ vkDestroyEvent(m_device->device(), event, nullptr);
+}
+
+// Helpers for the tests below
+static void ValidOwnershipTransferOp(ErrorMonitor *monitor, VkCommandBufferObj *cb, VkPipelineStageFlags src_stages,
+ VkPipelineStageFlags dst_stages, const VkBufferMemoryBarrier *buf_barrier,
+ const VkImageMemoryBarrier *img_barrier) {
monitor->ExpectSuccess();
cb->begin();
uint32_t num_buf_barrier = (buf_barrier) ? 1 : 0;
@@ -274,20 +15140,9593 @@ void ValidOwnershipTransferOp(ErrorMonitor *monitor, VkCommandBufferObj *cb, VkP
cb->QueueCommandBuffer(); // Implicitly waits
monitor->VerifyNotFound();
}
-
-void ValidOwnershipTransfer(ErrorMonitor *monitor, VkCommandBufferObj *cb_from, VkCommandBufferObj *cb_to,
- VkPipelineStageFlags src_stages, VkPipelineStageFlags dst_stages,
- const VkBufferMemoryBarrier *buf_barrier, const VkImageMemoryBarrier *img_barrier) {
+static void ValidOwnershipTransfer(ErrorMonitor *monitor, VkCommandBufferObj *cb_from, VkCommandBufferObj *cb_to,
+ VkPipelineStageFlags src_stages, VkPipelineStageFlags dst_stages,
+ const VkBufferMemoryBarrier *buf_barrier, const VkImageMemoryBarrier *img_barrier) {
ValidOwnershipTransferOp(monitor, cb_from, src_stages, dst_stages, buf_barrier, img_barrier);
ValidOwnershipTransferOp(monitor, cb_to, src_stages, dst_stages, buf_barrier, img_barrier);
}
-VkResult GPDIFPHelper(VkPhysicalDevice dev, const VkImageCreateInfo *ci, VkImageFormatProperties *limits) {
+TEST_F(VkPositiveLayerTest, OwnershipTranfersImage) {
+ TEST_DESCRIPTION("Valid image ownership transfers that shouldn't create errors");
+ ASSERT_NO_FATAL_FAILURE(Init(nullptr, nullptr, VK_COMMAND_POOL_CREATE_RESET_COMMAND_BUFFER_BIT));
+
+ uint32_t no_gfx = m_device->QueueFamilyWithoutCapabilities(VK_QUEUE_GRAPHICS_BIT);
+ if (no_gfx == UINT32_MAX) {
+ printf("%s Required queue families not present (non-graphics capable required).\n", kSkipPrefix);
+ return;
+ }
+ VkQueueObj *no_gfx_queue = m_device->queue_family_queues(no_gfx)[0].get();
+
+ VkCommandPoolObj no_gfx_pool(m_device, no_gfx, VK_COMMAND_POOL_CREATE_RESET_COMMAND_BUFFER_BIT);
+ VkCommandBufferObj no_gfx_cb(m_device, &no_gfx_pool, VK_COMMAND_BUFFER_LEVEL_PRIMARY, no_gfx_queue);
+
+ // Create an "exclusive" image owned by the graphics queue.
+ VkImageObj image(m_device);
+ VkFlags image_use = VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT | VK_IMAGE_USAGE_TRANSFER_DST_BIT | VK_IMAGE_USAGE_SAMPLED_BIT;
+ image.Init(32, 32, 1, VK_FORMAT_B8G8R8A8_UNORM, image_use, VK_IMAGE_TILING_OPTIMAL, 0);
+ ASSERT_TRUE(image.initialized());
+ auto image_subres = image.subresource_range(VK_IMAGE_ASPECT_COLOR_BIT, 0, 1, 0, 1);
+ auto image_barrier = image.image_memory_barrier(0, 0, image.Layout(), image.Layout(), image_subres);
+ image_barrier.srcQueueFamilyIndex = m_device->graphics_queue_node_index_;
+ image_barrier.dstQueueFamilyIndex = no_gfx;
+
+ ValidOwnershipTransfer(m_errorMonitor, m_commandBuffer, &no_gfx_cb, VK_PIPELINE_STAGE_ALL_GRAPHICS_BIT,
+ VK_PIPELINE_STAGE_TRANSFER_BIT, nullptr, &image_barrier);
+
+ // Change layouts while changing ownership
+ image_barrier.srcQueueFamilyIndex = no_gfx;
+ image_barrier.dstQueueFamilyIndex = m_device->graphics_queue_node_index_;
+ image_barrier.oldLayout = image.Layout();
+ // Make sure the new layout is different from the old
+ if (image_barrier.oldLayout == VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL) {
+ image_barrier.newLayout = VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL;
+ } else {
+ image_barrier.newLayout = VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL;
+ }
+
+ ValidOwnershipTransfer(m_errorMonitor, &no_gfx_cb, m_commandBuffer, VK_PIPELINE_STAGE_TRANSFER_BIT,
+ VK_PIPELINE_STAGE_ALL_GRAPHICS_BIT, nullptr, &image_barrier);
+}
+
+TEST_F(VkPositiveLayerTest, OwnershipTranfersBuffer) {
+ TEST_DESCRIPTION("Valid buffer ownership transfers that shouldn't create errors");
+ ASSERT_NO_FATAL_FAILURE(Init(nullptr, nullptr, VK_COMMAND_POOL_CREATE_RESET_COMMAND_BUFFER_BIT));
+
+ uint32_t no_gfx = m_device->QueueFamilyWithoutCapabilities(VK_QUEUE_GRAPHICS_BIT);
+ if (no_gfx == UINT32_MAX) {
+ printf("%s Required queue families not present (non-graphics capable required).\n", kSkipPrefix);
+ return;
+ }
+ VkQueueObj *no_gfx_queue = m_device->queue_family_queues(no_gfx)[0].get();
+
+ VkCommandPoolObj no_gfx_pool(m_device, no_gfx, VK_COMMAND_POOL_CREATE_RESET_COMMAND_BUFFER_BIT);
+ VkCommandBufferObj no_gfx_cb(m_device, &no_gfx_pool, VK_COMMAND_BUFFER_LEVEL_PRIMARY, no_gfx_queue);
+
+ // Create a buffer
+ const VkDeviceSize buffer_size = 256;
+ uint8_t data[buffer_size] = {0xFF};
+ VkConstantBufferObj buffer(m_device, buffer_size, data, VK_BUFFER_USAGE_UNIFORM_TEXEL_BUFFER_BIT);
+ ASSERT_TRUE(buffer.initialized());
+ auto buffer_barrier = buffer.buffer_memory_barrier(0, 0, 0, VK_WHOLE_SIZE);
+
+ // Let gfx own it.
+ buffer_barrier.srcQueueFamilyIndex = m_device->graphics_queue_node_index_;
+ buffer_barrier.dstQueueFamilyIndex = m_device->graphics_queue_node_index_;
+ ValidOwnershipTransferOp(m_errorMonitor, m_commandBuffer, VK_PIPELINE_STAGE_ALL_GRAPHICS_BIT, VK_PIPELINE_STAGE_TRANSFER_BIT,
+ &buffer_barrier, nullptr);
+
+ // Transfer it to non-gfx
+ buffer_barrier.dstQueueFamilyIndex = no_gfx;
+ ValidOwnershipTransfer(m_errorMonitor, m_commandBuffer, &no_gfx_cb, VK_PIPELINE_STAGE_ALL_GRAPHICS_BIT,
+ VK_PIPELINE_STAGE_TRANSFER_BIT, &buffer_barrier, nullptr);
+
+ // Transfer it to gfx
+ buffer_barrier.srcQueueFamilyIndex = no_gfx;
+ buffer_barrier.dstQueueFamilyIndex = m_device->graphics_queue_node_index_;
+ ValidOwnershipTransfer(m_errorMonitor, &no_gfx_cb, m_commandBuffer, VK_PIPELINE_STAGE_TRANSFER_BIT,
+ VK_PIPELINE_STAGE_ALL_GRAPHICS_BIT, &buffer_barrier, nullptr);
+}
+
+class BarrierQueueFamilyTestHelper {
+ public:
+ struct QueueFamilyObjs {
+ uint32_t index;
+ // We would use std::unique_ptr, but this triggers a compiler error on older compilers
+ VkQueueObj *queue = nullptr;
+ VkCommandPoolObj *command_pool = nullptr;
+ VkCommandBufferObj *command_buffer = nullptr;
+ VkCommandBufferObj *command_buffer2 = nullptr;
+ ~QueueFamilyObjs() {
+ delete command_buffer2;
+ delete command_buffer;
+ delete command_pool;
+ delete queue;
+ }
+
+ void Init(VkDeviceObj *device, uint32_t qf_index, VkQueue qf_queue, VkCommandPoolCreateFlags cp_flags) {
+ index = qf_index;
+ queue = new VkQueueObj(qf_queue, qf_index);
+ command_pool = new VkCommandPoolObj(device, qf_index, cp_flags);
+ command_buffer = new VkCommandBufferObj(device, command_pool, VK_COMMAND_BUFFER_LEVEL_PRIMARY, queue);
+ command_buffer2 = new VkCommandBufferObj(device, command_pool, VK_COMMAND_BUFFER_LEVEL_PRIMARY, queue);
+ };
+ };
+
+ struct Context {
+ VkLayerTest *layer_test;
+ uint32_t default_index;
+ std::unordered_map<uint32_t, QueueFamilyObjs> queue_families;
+ Context(VkLayerTest *test, const std::vector<uint32_t> &queue_family_indices) : layer_test(test) {
+ if (0 == queue_family_indices.size()) {
+ return; // This is invalid
+ }
+ VkDeviceObj *device_obj = layer_test->DeviceObj();
+ queue_families.reserve(queue_family_indices.size());
+ default_index = queue_family_indices[0];
+ for (auto qfi : queue_family_indices) {
+ VkQueue queue = device_obj->queue_family_queues(qfi)[0]->handle();
+ queue_families.emplace(std::make_pair(qfi, QueueFamilyObjs()));
+ queue_families[qfi].Init(device_obj, qfi, queue, VK_COMMAND_POOL_CREATE_RESET_COMMAND_BUFFER_BIT);
+ }
+ Reset();
+ }
+ void Reset() {
+ layer_test->DeviceObj()->wait();
+ for (auto &qf : queue_families) {
+ vkResetCommandPool(layer_test->device(), qf.second.command_pool->handle(), 0);
+ }
+ }
+ };
+
+ BarrierQueueFamilyTestHelper(Context *context) : context_(context), image_(context->layer_test->DeviceObj()) {}
+ // Init with queue families non-null for CONCURRENT sharing mode (which requires them)
+ void Init(std::vector<uint32_t> *families) {
+ VkDeviceObj *device_obj = context_->layer_test->DeviceObj();
+ image_.Init(32, 32, 1, VK_FORMAT_B8G8R8A8_UNORM, VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT, VK_IMAGE_TILING_OPTIMAL, 0, families);
+ ASSERT_TRUE(image_.initialized());
+
+ image_barrier_ =
+ image_.image_memory_barrier(VK_ACCESS_TRANSFER_READ_BIT, VK_ACCESS_TRANSFER_READ_BIT, image_.Layout(), image_.Layout(),
+ image_.subresource_range(VK_IMAGE_ASPECT_COLOR_BIT, 0, 1, 0, 1));
+
+ VkMemoryPropertyFlags mem_prop = VK_MEMORY_PROPERTY_HOST_COHERENT_BIT;
+ buffer_.init_as_src_and_dst(*device_obj, 256, mem_prop, families);
+ ASSERT_TRUE(buffer_.initialized());
+ buffer_barrier_ = buffer_.buffer_memory_barrier(VK_ACCESS_TRANSFER_READ_BIT, VK_ACCESS_TRANSFER_READ_BIT, 0, VK_WHOLE_SIZE);
+ }
+
+ QueueFamilyObjs *GetQueueFamilyInfo(Context *context, uint32_t qfi) {
+ QueueFamilyObjs *qf;
+
+ auto qf_it = context->queue_families.find(qfi);
+ if (qf_it != context->queue_families.end()) {
+ qf = &(qf_it->second);
+ } else {
+ qf = &(context->queue_families[context->default_index]);
+ }
+ return qf;
+ }
+ enum Modifier {
+ NONE,
+ DOUBLE_RECORD,
+ DOUBLE_COMMAND_BUFFER,
+ };
+
+ void operator()(std::string img_err, std::string buf_err, uint32_t src, uint32_t dst, bool positive = false,
+ uint32_t queue_family_index = kInvalidQueueFamily, Modifier mod = Modifier::NONE) {
+ auto monitor = context_->layer_test->Monitor();
+ monitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT | VK_DEBUG_REPORT_WARNING_BIT_EXT, img_err);
+ monitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT | VK_DEBUG_REPORT_WARNING_BIT_EXT, buf_err);
+
+ image_barrier_.srcQueueFamilyIndex = src;
+ image_barrier_.dstQueueFamilyIndex = dst;
+ buffer_barrier_.srcQueueFamilyIndex = src;
+ buffer_barrier_.dstQueueFamilyIndex = dst;
+
+ QueueFamilyObjs *qf = GetQueueFamilyInfo(context_, queue_family_index);
+
+ VkCommandBufferObj *command_buffer = qf->command_buffer;
+ for (int cb_repeat = 0; cb_repeat < (mod == Modifier::DOUBLE_COMMAND_BUFFER ? 2 : 1); cb_repeat++) {
+ command_buffer->begin();
+ for (int repeat = 0; repeat < (mod == Modifier::DOUBLE_RECORD ? 2 : 1); repeat++) {
+ vkCmdPipelineBarrier(command_buffer->handle(), VK_PIPELINE_STAGE_TRANSFER_BIT, VK_PIPELINE_STAGE_TRANSFER_BIT,
+ VK_DEPENDENCY_BY_REGION_BIT, 0, nullptr, 1, &buffer_barrier_, 1, &image_barrier_);
+ }
+ command_buffer->end();
+ command_buffer = qf->command_buffer2; // Second pass (if any) goes to the secondary command_buffer.
+ }
+
+ if (queue_family_index != kInvalidQueueFamily) {
+ if (mod == Modifier::DOUBLE_COMMAND_BUFFER) {
+ // the Fence resolves to VK_NULL_HANLE... i.e. no fence
+ qf->queue->submit({{qf->command_buffer, qf->command_buffer2}}, vk_testing::Fence(), positive);
+ } else {
+ qf->command_buffer->QueueCommandBuffer(positive); // Check for success on positive tests only
+ }
+ }
+
+ if (positive) {
+ monitor->VerifyNotFound();
+ } else {
+ monitor->VerifyFound();
+ }
+ context_->Reset();
+ };
+
+ protected:
+ static const uint32_t kInvalidQueueFamily = UINT32_MAX;
+ Context *context_;
+ VkImageObj image_;
+ VkImageMemoryBarrier image_barrier_;
+ VkBufferObj buffer_;
+ VkBufferMemoryBarrier buffer_barrier_;
+};
+
+TEST_F(VkLayerTest, InvalidBarrierQueueFamily) {
+ TEST_DESCRIPTION("Create and submit barriers with invalid queue families");
+ ASSERT_NO_FATAL_FAILURE(Init(nullptr, nullptr, VK_COMMAND_POOL_CREATE_RESET_COMMAND_BUFFER_BIT));
+
+ // Find queues of two families
+ const uint32_t submit_family = m_device->graphics_queue_node_index_;
+ const uint32_t invalid = static_cast<uint32_t>(m_device->queue_props.size());
+ const uint32_t other_family = submit_family != 0 ? 0 : 1;
+ const bool only_one_family = (invalid == 1) || (m_device->queue_props[other_family].queueCount == 0);
+
+ std::vector<uint32_t> qf_indices{{submit_family, other_family}};
+ if (only_one_family) {
+ qf_indices.resize(1);
+ }
+ BarrierQueueFamilyTestHelper::Context test_context(this, qf_indices);
+
+ if (m_device->props.apiVersion >= VK_API_VERSION_1_1) {
+ printf(
+ "%s Device has apiVersion greater than 1.0 -- skipping test cases that require external memory "
+ "to be "
+ "disabled.\n",
+ kSkipPrefix);
+ } else {
+ if (only_one_family) {
+ printf("%s Single queue family found -- VK_SHARING_MODE_CONCURRENT testcases skipped.\n", kSkipPrefix);
+ } else {
+ std::vector<uint32_t> families = {submit_family, other_family};
+ BarrierQueueFamilyTestHelper conc_test(&test_context);
+ conc_test.Init(&families);
+ // core_validation::barrier_queue_families::kSrcAndDestMustBeIgnore
+ conc_test("VUID-VkImageMemoryBarrier-image-01199", "VUID-VkBufferMemoryBarrier-buffer-01190", VK_QUEUE_FAMILY_IGNORED,
+ submit_family);
+ conc_test("VUID-VkImageMemoryBarrier-image-01199", "VUID-VkBufferMemoryBarrier-buffer-01190", submit_family,
+ VK_QUEUE_FAMILY_IGNORED);
+ conc_test("VUID-VkImageMemoryBarrier-image-01199", "VUID-VkBufferMemoryBarrier-buffer-01190", submit_family,
+ submit_family);
+ // true -> positive test
+ conc_test("VUID-VkImageMemoryBarrier-image-01199", "VUID-VkBufferMemoryBarrier-buffer-01190", VK_QUEUE_FAMILY_IGNORED,
+ VK_QUEUE_FAMILY_IGNORED, true);
+ }
+
+ BarrierQueueFamilyTestHelper excl_test(&test_context);
+ excl_test.Init(nullptr); // no queue families means *exclusive* sharing mode.
+
+ // core_validation::barrier_queue_families::kBothIgnoreOrBothValid
+ excl_test("VUID-VkImageMemoryBarrier-image-01200", "VUID-VkBufferMemoryBarrier-buffer-01192", VK_QUEUE_FAMILY_IGNORED,
+ submit_family);
+ excl_test("VUID-VkImageMemoryBarrier-image-01200", "VUID-VkBufferMemoryBarrier-buffer-01192", submit_family,
+ VK_QUEUE_FAMILY_IGNORED);
+ // true -> positive test
+ excl_test("VUID-VkImageMemoryBarrier-image-01200", "VUID-VkBufferMemoryBarrier-buffer-01192", submit_family, submit_family,
+ true);
+ excl_test("VUID-VkImageMemoryBarrier-image-01200", "VUID-VkBufferMemoryBarrier-buffer-01192", VK_QUEUE_FAMILY_IGNORED,
+ VK_QUEUE_FAMILY_IGNORED, true);
+ }
+
+ if (only_one_family) {
+ printf("%s Single queue family found -- VK_SHARING_MODE_EXCLUSIVE submit testcases skipped.\n", kSkipPrefix);
+ } else {
+ BarrierQueueFamilyTestHelper excl_test(&test_context);
+ excl_test.Init(nullptr);
+
+ // core_validation::barrier_queue_families::kSubmitQueueMustMatchSrcOrDst
+ excl_test("VUID-VkImageMemoryBarrier-image-01205", "VUID-VkBufferMemoryBarrier-buffer-01196", other_family, other_family,
+ false, submit_family);
+
+ // true -> positive test (testing both the index logic and the QFO transfer tracking.
+ excl_test("POSITIVE_TEST", "POSITIVE_TEST", submit_family, other_family, true, submit_family);
+ excl_test("POSITIVE_TEST", "POSITIVE_TEST", submit_family, other_family, true, other_family);
+ excl_test("POSITIVE_TEST", "POSITIVE_TEST", other_family, submit_family, true, other_family);
+ excl_test("POSITIVE_TEST", "POSITIVE_TEST", other_family, submit_family, true, submit_family);
+
+ // negative testing for QFO transfer tracking
+ // Duplicate release in one CB
+ excl_test("UNASSIGNED-VkImageMemoryBarrier-image-00001", "UNASSIGNED-VkBufferMemoryBarrier-buffer-00001", submit_family,
+ other_family, false, submit_family, BarrierQueueFamilyTestHelper::DOUBLE_RECORD);
+ // Duplicate pending release
+ excl_test("UNASSIGNED-VkImageMemoryBarrier-image-00003", "UNASSIGNED-VkBufferMemoryBarrier-buffer-00003", submit_family,
+ other_family, false, submit_family);
+ // Duplicate acquire in one CB
+ excl_test("UNASSIGNED-VkImageMemoryBarrier-image-00001", "UNASSIGNED-VkBufferMemoryBarrier-buffer-00001", submit_family,
+ other_family, false, other_family, BarrierQueueFamilyTestHelper::DOUBLE_RECORD);
+ // No pending release
+ excl_test("UNASSIGNED-VkImageMemoryBarrier-image-00004", "UNASSIGNED-VkBufferMemoryBarrier-buffer-00004", submit_family,
+ other_family, false, other_family);
+ // Duplicate release in two CB
+ excl_test("UNASSIGNED-VkImageMemoryBarrier-image-00002", "UNASSIGNED-VkBufferMemoryBarrier-buffer-00002", submit_family,
+ other_family, false, submit_family, BarrierQueueFamilyTestHelper::DOUBLE_COMMAND_BUFFER);
+ // Duplicate acquire in two CB
+ excl_test("POSITIVE_TEST", "POSITIVE_TEST", submit_family, other_family, true, submit_family); // need a succesful release
+ excl_test("UNASSIGNED-VkImageMemoryBarrier-image-00002", "UNASSIGNED-VkBufferMemoryBarrier-buffer-00002", submit_family,
+ other_family, false, other_family, BarrierQueueFamilyTestHelper::DOUBLE_COMMAND_BUFFER);
+ }
+}
+
+TEST_F(VkLayerTest, InvalidBarrierQueueFamilyWithMemExt) {
+ TEST_DESCRIPTION("Create and submit barriers with invalid queue families when memory extension is enabled ");
+ std::vector<const char *> reqd_instance_extensions = {
+ {VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME, VK_KHR_EXTERNAL_MEMORY_CAPABILITIES_EXTENSION_NAME}};
+ for (auto extension_name : reqd_instance_extensions) {
+ if (InstanceExtensionSupported(extension_name)) {
+ m_instance_extension_names.push_back(extension_name);
+ } else {
+ printf("%s Required instance extension %s not supported, skipping test\n", kSkipPrefix, extension_name);
+ return;
+ }
+ }
+
+ ASSERT_NO_FATAL_FAILURE(InitFramework(myDbgFunc, m_errorMonitor));
+ // Check for external memory device extensions
+ if (DeviceExtensionSupported(gpu(), nullptr, VK_KHR_EXTERNAL_MEMORY_EXTENSION_NAME)) {
+ m_device_extension_names.push_back(VK_KHR_EXTERNAL_MEMORY_EXTENSION_NAME);
+ } else {
+ printf("%s External memory extension not supported, skipping test\n", kSkipPrefix);
+ return;
+ }
+
+ ASSERT_NO_FATAL_FAILURE(InitState(nullptr, nullptr, VK_COMMAND_POOL_CREATE_RESET_COMMAND_BUFFER_BIT));
+
+ // Find queues of two families
+ const uint32_t submit_family = m_device->graphics_queue_node_index_;
+ const uint32_t invalid = static_cast<uint32_t>(m_device->queue_props.size());
+ const uint32_t other_family = submit_family != 0 ? 0 : 1;
+ const bool only_one_family = (invalid == 1) || (m_device->queue_props[other_family].queueCount == 0);
+
+ std::vector<uint32_t> qf_indices{{submit_family, other_family}};
+ if (only_one_family) {
+ qf_indices.resize(1);
+ }
+ BarrierQueueFamilyTestHelper::Context test_context(this, qf_indices);
+
+ if (only_one_family) {
+ printf("%s Single queue family found -- VK_SHARING_MODE_CONCURRENT testcases skipped.\n", kSkipPrefix);
+ } else {
+ std::vector<uint32_t> families = {submit_family, other_family};
+ BarrierQueueFamilyTestHelper conc_test(&test_context);
+
+ // core_validation::barrier_queue_families::kSrcOrDstMustBeIgnore
+ conc_test.Init(&families);
+ conc_test("VUID-VkImageMemoryBarrier-image-01381", "VUID-VkBufferMemoryBarrier-buffer-01191", submit_family, submit_family);
+ // true -> positive test
+ conc_test("VUID-VkImageMemoryBarrier-image-01381", "VUID-VkBufferMemoryBarrier-buffer-01191", VK_QUEUE_FAMILY_IGNORED,
+ VK_QUEUE_FAMILY_IGNORED, true);
+ conc_test("VUID-VkImageMemoryBarrier-image-01381", "VUID-VkBufferMemoryBarrier-buffer-01191", VK_QUEUE_FAMILY_IGNORED,
+ VK_QUEUE_FAMILY_EXTERNAL_KHR, true);
+ conc_test("VUID-VkImageMemoryBarrier-image-01381", "VUID-VkBufferMemoryBarrier-buffer-01191", VK_QUEUE_FAMILY_EXTERNAL_KHR,
+ VK_QUEUE_FAMILY_IGNORED, true);
+
+ // core_validation::barrier_queue_families::kSpecialOrIgnoreOnly
+ conc_test("VUID-VkImageMemoryBarrier-image-01766", "VUID-VkBufferMemoryBarrier-buffer-01763", submit_family,
+ VK_QUEUE_FAMILY_IGNORED);
+ conc_test("VUID-VkImageMemoryBarrier-image-01766", "VUID-VkBufferMemoryBarrier-buffer-01763", VK_QUEUE_FAMILY_IGNORED,
+ submit_family);
+ // This is to flag the errors that would be considered only "unexpected" in the parallel case above
+ // true -> positive test
+ conc_test("VUID-VkImageMemoryBarrier-image-01766", "VUID-VkBufferMemoryBarrier-buffer-01763", VK_QUEUE_FAMILY_IGNORED,
+ VK_QUEUE_FAMILY_EXTERNAL_KHR, true);
+ conc_test("VUID-VkImageMemoryBarrier-image-01766", "VUID-VkBufferMemoryBarrier-buffer-01763", VK_QUEUE_FAMILY_EXTERNAL_KHR,
+ VK_QUEUE_FAMILY_IGNORED, true);
+ }
+
+ BarrierQueueFamilyTestHelper excl_test(&test_context);
+ excl_test.Init(nullptr); // no queue families means *exclusive* sharing mode.
+
+ // core_validation::barrier_queue_families::kSrcIgnoreRequiresDstIgnore
+ excl_test("VUID-VkImageMemoryBarrier-image-01201", "VUID-VkBufferMemoryBarrier-buffer-01193", VK_QUEUE_FAMILY_IGNORED,
+ submit_family);
+ excl_test("VUID-VkImageMemoryBarrier-image-01201", "VUID-VkBufferMemoryBarrier-buffer-01193", VK_QUEUE_FAMILY_IGNORED,
+ VK_QUEUE_FAMILY_EXTERNAL_KHR);
+ // true -> positive test
+ excl_test("VUID-VkImageMemoryBarrier-image-01201", "VUID-VkBufferMemoryBarrier-buffer-01193", VK_QUEUE_FAMILY_IGNORED,
+ VK_QUEUE_FAMILY_IGNORED, true);
+
+ // core_validation::barrier_queue_families::kDstValidOrSpecialIfNotIgnore
+ excl_test("VUID-VkImageMemoryBarrier-image-01768", "VUID-VkBufferMemoryBarrier-buffer-01765", submit_family, invalid);
+ // true -> positive test
+ excl_test("VUID-VkImageMemoryBarrier-image-01768", "VUID-VkBufferMemoryBarrier-buffer-01765", submit_family, submit_family,
+ true);
+ excl_test("VUID-VkImageMemoryBarrier-image-01768", "VUID-VkBufferMemoryBarrier-buffer-01765", submit_family,
+ VK_QUEUE_FAMILY_IGNORED, true);
+ excl_test("VUID-VkImageMemoryBarrier-image-01768", "VUID-VkBufferMemoryBarrier-buffer-01765", submit_family,
+ VK_QUEUE_FAMILY_EXTERNAL_KHR, true);
+
+ // core_validation::barrier_queue_families::kSrcValidOrSpecialIfNotIgnore
+ excl_test("VUID-VkImageMemoryBarrier-image-01767", "VUID-VkBufferMemoryBarrier-buffer-01764", invalid, submit_family);
+ // true -> positive test
+ excl_test("VUID-VkImageMemoryBarrier-image-01767", "VUID-VkBufferMemoryBarrier-buffer-01764", submit_family, submit_family,
+ true);
+ excl_test("VUID-VkImageMemoryBarrier-image-01767", "VUID-VkBufferMemoryBarrier-buffer-01764", VK_QUEUE_FAMILY_IGNORED,
+ VK_QUEUE_FAMILY_IGNORED, true);
+ excl_test("VUID-VkImageMemoryBarrier-image-01767", "VUID-VkBufferMemoryBarrier-buffer-01764", VK_QUEUE_FAMILY_EXTERNAL_KHR,
+ submit_family, true);
+}
+
+TEST_F(VkLayerTest, ImageBarrierWithBadRange) {
+ TEST_DESCRIPTION("VkImageMemoryBarrier with an invalid subresourceRange");
+
+ ASSERT_NO_FATAL_FAILURE(Init());
+ ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
+
+ VkImageObj image(m_device);
+ image.Init(32, 32, 1, VK_FORMAT_B8G8R8A8_UNORM, VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT, VK_IMAGE_TILING_OPTIMAL);
+ ASSERT_TRUE(image.create_info().arrayLayers == 1);
+ ASSERT_TRUE(image.initialized());
+
+ VkImageMemoryBarrier img_barrier_template = {};
+ img_barrier_template.sType = VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER;
+ img_barrier_template.pNext = NULL;
+ img_barrier_template.srcAccessMask = 0;
+ img_barrier_template.dstAccessMask = 0;
+ img_barrier_template.oldLayout = VK_IMAGE_LAYOUT_UNDEFINED;
+ img_barrier_template.newLayout = VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL;
+ img_barrier_template.srcQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED;
+ img_barrier_template.dstQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED;
+ img_barrier_template.image = image.handle();
+ // subresourceRange to be set later for the for the purposes of this test
+ img_barrier_template.subresourceRange.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
+ img_barrier_template.subresourceRange.baseArrayLayer = 0;
+ img_barrier_template.subresourceRange.baseMipLevel = 0;
+ img_barrier_template.subresourceRange.layerCount = 0;
+ img_barrier_template.subresourceRange.levelCount = 0;
+
+ m_commandBuffer->begin();
+
+ // Nested scope here confuses clang-format, somehow
+ // clang-format off
+
+ // try for vkCmdPipelineBarrier
+ {
+ // Try baseMipLevel >= image.mipLevels with VK_REMAINING_MIP_LEVELS
+ {
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkImageMemoryBarrier-subresourceRange-01486");
+ const VkImageSubresourceRange range = {VK_IMAGE_ASPECT_COLOR_BIT, 1, VK_REMAINING_MIP_LEVELS, 0, 1};
+ VkImageMemoryBarrier img_barrier = img_barrier_template;
+ img_barrier.subresourceRange = range;
+ vkCmdPipelineBarrier(m_commandBuffer->handle(), VK_PIPELINE_STAGE_ALL_COMMANDS_BIT, VK_PIPELINE_STAGE_ALL_COMMANDS_BIT, 0, 0,
+ nullptr, 0, nullptr, 1, &img_barrier);
+ m_errorMonitor->VerifyFound();
+ }
+
+ // Try baseMipLevel >= image.mipLevels without VK_REMAINING_MIP_LEVELS
+ {
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkImageMemoryBarrier-subresourceRange-01486");
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkImageMemoryBarrier-subresourceRange-01724");
+ const VkImageSubresourceRange range = {VK_IMAGE_ASPECT_COLOR_BIT, 1, 1, 0, 1};
+ VkImageMemoryBarrier img_barrier = img_barrier_template;
+ img_barrier.subresourceRange = range;
+ vkCmdPipelineBarrier(m_commandBuffer->handle(), VK_PIPELINE_STAGE_ALL_COMMANDS_BIT, VK_PIPELINE_STAGE_ALL_COMMANDS_BIT, 0, 0,
+ nullptr, 0, nullptr, 1, &img_barrier);
+ m_errorMonitor->VerifyFound();
+ }
+
+ // Try levelCount = 0
+ {
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkImageMemoryBarrier-subresourceRange-01724");
+ const VkImageSubresourceRange range = {VK_IMAGE_ASPECT_COLOR_BIT, 0, 0, 0, 1};
+ VkImageMemoryBarrier img_barrier = img_barrier_template;
+ img_barrier.subresourceRange = range;
+ vkCmdPipelineBarrier(m_commandBuffer->handle(), VK_PIPELINE_STAGE_ALL_COMMANDS_BIT, VK_PIPELINE_STAGE_ALL_COMMANDS_BIT, 0, 0,
+ nullptr, 0, nullptr, 1, &img_barrier);
+ m_errorMonitor->VerifyFound();
+ }
+
+ // Try baseMipLevel + levelCount > image.mipLevels
+ {
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkImageMemoryBarrier-subresourceRange-01724");
+ const VkImageSubresourceRange range = {VK_IMAGE_ASPECT_COLOR_BIT, 0, 2, 0, 1};
+ VkImageMemoryBarrier img_barrier = img_barrier_template;
+ img_barrier.subresourceRange = range;
+ vkCmdPipelineBarrier(m_commandBuffer->handle(), VK_PIPELINE_STAGE_ALL_COMMANDS_BIT, VK_PIPELINE_STAGE_ALL_COMMANDS_BIT, 0, 0,
+ nullptr, 0, nullptr, 1, &img_barrier);
+ m_errorMonitor->VerifyFound();
+ }
+
+ // Try baseArrayLayer >= image.arrayLayers with VK_REMAINING_ARRAY_LAYERS
+ {
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkImageMemoryBarrier-subresourceRange-01488");
+ const VkImageSubresourceRange range = {VK_IMAGE_ASPECT_COLOR_BIT, 0, 1, 1, VK_REMAINING_ARRAY_LAYERS};
+ VkImageMemoryBarrier img_barrier = img_barrier_template;
+ img_barrier.subresourceRange = range;
+ vkCmdPipelineBarrier(m_commandBuffer->handle(), VK_PIPELINE_STAGE_ALL_COMMANDS_BIT, VK_PIPELINE_STAGE_ALL_COMMANDS_BIT, 0, 0,
+ nullptr, 0, nullptr, 1, &img_barrier);
+ m_errorMonitor->VerifyFound();
+ }
+
+ // Try baseArrayLayer >= image.arrayLayers without VK_REMAINING_ARRAY_LAYERS
+ {
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkImageMemoryBarrier-subresourceRange-01488");
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkImageMemoryBarrier-subresourceRange-01725");
+ const VkImageSubresourceRange range = {VK_IMAGE_ASPECT_COLOR_BIT, 0, 1, 1, 1};
+ VkImageMemoryBarrier img_barrier = img_barrier_template;
+ img_barrier.subresourceRange = range;
+ vkCmdPipelineBarrier(m_commandBuffer->handle(), VK_PIPELINE_STAGE_ALL_COMMANDS_BIT, VK_PIPELINE_STAGE_ALL_COMMANDS_BIT, 0, 0,
+ nullptr, 0, nullptr, 1, &img_barrier);
+ m_errorMonitor->VerifyFound();
+ }
+
+ // Try layerCount = 0
+ {
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkImageMemoryBarrier-subresourceRange-01725");
+ const VkImageSubresourceRange range = {VK_IMAGE_ASPECT_COLOR_BIT, 0, 1, 0, 0};
+ VkImageMemoryBarrier img_barrier = img_barrier_template;
+ img_barrier.subresourceRange = range;
+ vkCmdPipelineBarrier(m_commandBuffer->handle(), VK_PIPELINE_STAGE_ALL_COMMANDS_BIT, VK_PIPELINE_STAGE_ALL_COMMANDS_BIT, 0, 0,
+ nullptr, 0, nullptr, 1, &img_barrier);
+ m_errorMonitor->VerifyFound();
+ }
+
+ // Try baseArrayLayer + layerCount > image.arrayLayers
+ {
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkImageMemoryBarrier-subresourceRange-01725");
+ const VkImageSubresourceRange range = {VK_IMAGE_ASPECT_COLOR_BIT, 0, 1, 0, 2};
+ VkImageMemoryBarrier img_barrier = img_barrier_template;
+ img_barrier.subresourceRange = range;
+ vkCmdPipelineBarrier(m_commandBuffer->handle(), VK_PIPELINE_STAGE_ALL_COMMANDS_BIT, VK_PIPELINE_STAGE_ALL_COMMANDS_BIT, 0, 0,
+ nullptr, 0, nullptr, 1, &img_barrier);
+ m_errorMonitor->VerifyFound();
+ }
+ }
+
+ // try for vkCmdWaitEvents
+ {
+ VkEvent event;
+ VkEventCreateInfo eci{VK_STRUCTURE_TYPE_EVENT_CREATE_INFO, NULL, 0};
+ VkResult err = vkCreateEvent(m_device->handle(), &eci, nullptr, &event);
+ ASSERT_VK_SUCCESS(err);
+
+ // Try baseMipLevel >= image.mipLevels with VK_REMAINING_MIP_LEVELS
+ {
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkImageMemoryBarrier-subresourceRange-01486");
+ const VkImageSubresourceRange range = {VK_IMAGE_ASPECT_COLOR_BIT, 1, VK_REMAINING_MIP_LEVELS, 0, 1};
+ VkImageMemoryBarrier img_barrier = img_barrier_template;
+ img_barrier.subresourceRange = range;
+ vkCmdWaitEvents(m_commandBuffer->handle(), 1, &event, VK_PIPELINE_STAGE_ALL_COMMANDS_BIT,
+ VK_PIPELINE_STAGE_ALL_COMMANDS_BIT, 0, nullptr, 0, nullptr, 1, &img_barrier);
+ m_errorMonitor->VerifyFound();
+ }
+
+ // Try baseMipLevel >= image.mipLevels without VK_REMAINING_MIP_LEVELS
+ {
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkImageMemoryBarrier-subresourceRange-01486");
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkImageMemoryBarrier-subresourceRange-01724");
+ const VkImageSubresourceRange range = {VK_IMAGE_ASPECT_COLOR_BIT, 1, 1, 0, 1};
+ VkImageMemoryBarrier img_barrier = img_barrier_template;
+ img_barrier.subresourceRange = range;
+ vkCmdWaitEvents(m_commandBuffer->handle(), 1, &event, VK_PIPELINE_STAGE_ALL_COMMANDS_BIT,
+ VK_PIPELINE_STAGE_ALL_COMMANDS_BIT, 0, nullptr, 0, nullptr, 1, &img_barrier);
+ m_errorMonitor->VerifyFound();
+ }
+
+ // Try levelCount = 0
+ {
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkImageMemoryBarrier-subresourceRange-01724");
+ const VkImageSubresourceRange range = {VK_IMAGE_ASPECT_COLOR_BIT, 0, 0, 0, 1};
+ VkImageMemoryBarrier img_barrier = img_barrier_template;
+ img_barrier.subresourceRange = range;
+ vkCmdWaitEvents(m_commandBuffer->handle(), 1, &event, VK_PIPELINE_STAGE_ALL_COMMANDS_BIT,
+ VK_PIPELINE_STAGE_ALL_COMMANDS_BIT, 0, nullptr, 0, nullptr, 1, &img_barrier);
+ m_errorMonitor->VerifyFound();
+ }
+
+ // Try baseMipLevel + levelCount > image.mipLevels
+ {
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkImageMemoryBarrier-subresourceRange-01724");
+ const VkImageSubresourceRange range = {VK_IMAGE_ASPECT_COLOR_BIT, 0, 2, 0, 1};
+ VkImageMemoryBarrier img_barrier = img_barrier_template;
+ img_barrier.subresourceRange = range;
+ vkCmdWaitEvents(m_commandBuffer->handle(), 1, &event, VK_PIPELINE_STAGE_ALL_COMMANDS_BIT,
+ VK_PIPELINE_STAGE_ALL_COMMANDS_BIT, 0, nullptr, 0, nullptr, 1, &img_barrier);
+ m_errorMonitor->VerifyFound();
+ }
+
+ // Try baseArrayLayer >= image.arrayLayers with VK_REMAINING_ARRAY_LAYERS
+ {
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkImageMemoryBarrier-subresourceRange-01488");
+ const VkImageSubresourceRange range = {VK_IMAGE_ASPECT_COLOR_BIT, 0, 1, 1, VK_REMAINING_ARRAY_LAYERS};
+ VkImageMemoryBarrier img_barrier = img_barrier_template;
+ img_barrier.subresourceRange = range;
+ vkCmdWaitEvents(m_commandBuffer->handle(), 1, &event, VK_PIPELINE_STAGE_ALL_COMMANDS_BIT,
+ VK_PIPELINE_STAGE_ALL_COMMANDS_BIT, 0, nullptr, 0, nullptr, 1, &img_barrier);
+ m_errorMonitor->VerifyFound();
+ }
+
+ // Try baseArrayLayer >= image.arrayLayers without VK_REMAINING_ARRAY_LAYERS
+ {
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkImageMemoryBarrier-subresourceRange-01488");
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkImageMemoryBarrier-subresourceRange-01725");
+ const VkImageSubresourceRange range = {VK_IMAGE_ASPECT_COLOR_BIT, 0, 1, 1, 1};
+ VkImageMemoryBarrier img_barrier = img_barrier_template;
+ img_barrier.subresourceRange = range;
+ vkCmdWaitEvents(m_commandBuffer->handle(), 1, &event, VK_PIPELINE_STAGE_ALL_COMMANDS_BIT,
+ VK_PIPELINE_STAGE_ALL_COMMANDS_BIT, 0, nullptr, 0, nullptr, 1, &img_barrier);
+ m_errorMonitor->VerifyFound();
+ }
+
+ // Try layerCount = 0
+ {
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkImageMemoryBarrier-subresourceRange-01725");
+ const VkImageSubresourceRange range = {VK_IMAGE_ASPECT_COLOR_BIT, 0, 1, 0, 0};
+ VkImageMemoryBarrier img_barrier = img_barrier_template;
+ img_barrier.subresourceRange = range;
+ vkCmdWaitEvents(m_commandBuffer->handle(), 1, &event, VK_PIPELINE_STAGE_ALL_COMMANDS_BIT,
+ VK_PIPELINE_STAGE_ALL_COMMANDS_BIT, 0, nullptr, 0, nullptr, 1, &img_barrier);
+ m_errorMonitor->VerifyFound();
+ }
+
+ // Try baseArrayLayer + layerCount > image.arrayLayers
+ {
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkImageMemoryBarrier-subresourceRange-01725");
+ const VkImageSubresourceRange range = {VK_IMAGE_ASPECT_COLOR_BIT, 0, 1, 0, 2};
+ VkImageMemoryBarrier img_barrier = img_barrier_template;
+ img_barrier.subresourceRange = range;
+ vkCmdWaitEvents(m_commandBuffer->handle(), 1, &event, VK_PIPELINE_STAGE_ALL_COMMANDS_BIT,
+ VK_PIPELINE_STAGE_ALL_COMMANDS_BIT, 0, nullptr, 0, nullptr, 1, &img_barrier);
+ m_errorMonitor->VerifyFound();
+ }
+
+ vkDestroyEvent(m_device->handle(), event, nullptr);
+ }
+// clang-format on
+}
+
+TEST_F(VkLayerTest, ValidationCacheTestBadMerge) {
+ ASSERT_NO_FATAL_FAILURE(InitFramework(myDbgFunc, m_errorMonitor));
+ if (DeviceExtensionSupported(gpu(), "VK_LAYER_LUNARG_core_validation", VK_EXT_VALIDATION_CACHE_EXTENSION_NAME)) {
+ m_device_extension_names.push_back(VK_EXT_VALIDATION_CACHE_EXTENSION_NAME);
+ } else {
+ printf("%s %s not supported, skipping test\n", kSkipPrefix, VK_EXT_VALIDATION_CACHE_EXTENSION_NAME);
+ return;
+ }
+ ASSERT_NO_FATAL_FAILURE(InitState());
+
+ // Load extension functions
+ auto fpCreateValidationCache =
+ (PFN_vkCreateValidationCacheEXT)vkGetDeviceProcAddr(m_device->device(), "vkCreateValidationCacheEXT");
+ auto fpDestroyValidationCache =
+ (PFN_vkDestroyValidationCacheEXT)vkGetDeviceProcAddr(m_device->device(), "vkDestroyValidationCacheEXT");
+ auto fpMergeValidationCaches =
+ (PFN_vkMergeValidationCachesEXT)vkGetDeviceProcAddr(m_device->device(), "vkMergeValidationCachesEXT");
+ if (!fpCreateValidationCache || !fpDestroyValidationCache || !fpMergeValidationCaches) {
+ printf("%s Failed to load function pointers for %s\n", kSkipPrefix, VK_EXT_VALIDATION_CACHE_EXTENSION_NAME);
+ return;
+ }
+
+ VkValidationCacheCreateInfoEXT validationCacheCreateInfo;
+ validationCacheCreateInfo.sType = VK_STRUCTURE_TYPE_VALIDATION_CACHE_CREATE_INFO_EXT;
+ validationCacheCreateInfo.pNext = NULL;
+ validationCacheCreateInfo.initialDataSize = 0;
+ validationCacheCreateInfo.pInitialData = NULL;
+ validationCacheCreateInfo.flags = 0;
+ VkValidationCacheEXT validationCache = VK_NULL_HANDLE;
+ VkResult res = fpCreateValidationCache(m_device->device(), &validationCacheCreateInfo, nullptr, &validationCache);
+ ASSERT_VK_SUCCESS(res);
+
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkMergeValidationCachesEXT-dstCache-01536");
+ res = fpMergeValidationCaches(m_device->device(), validationCache, 1, &validationCache);
+ m_errorMonitor->VerifyFound();
+
+ fpDestroyValidationCache(m_device->device(), validationCache, nullptr);
+}
+
+TEST_F(VkPositiveLayerTest, LayoutFromPresentWithoutAccessMemoryRead) {
+ // Transition an image away from PRESENT_SRC_KHR without ACCESS_MEMORY_READ
+ // in srcAccessMask.
+
+ // The required behavior here was a bit unclear in earlier versions of the
+ // spec, but there is no memory dependency required here, so this should
+ // work without warnings.
+
+ m_errorMonitor->ExpectSuccess();
+ ASSERT_NO_FATAL_FAILURE(Init());
+ VkImageObj image(m_device);
+ image.Init(128, 128, 1, VK_FORMAT_B8G8R8A8_UNORM, (VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT | VK_IMAGE_USAGE_TRANSFER_DST_BIT),
+ VK_IMAGE_TILING_OPTIMAL, 0);
+ ASSERT_TRUE(image.initialized());
+
+ VkImageMemoryBarrier barrier = {};
+ VkImageSubresourceRange range;
+ barrier.sType = VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER;
+ barrier.srcAccessMask = VK_ACCESS_COLOR_ATTACHMENT_WRITE_BIT;
+ barrier.dstAccessMask = 0;
+ barrier.oldLayout = VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL;
+ barrier.newLayout = VK_IMAGE_LAYOUT_PRESENT_SRC_KHR;
+ barrier.image = image.handle();
+ range.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
+ range.baseMipLevel = 0;
+ range.levelCount = 1;
+ range.baseArrayLayer = 0;
+ range.layerCount = 1;
+ barrier.subresourceRange = range;
+ VkCommandBufferObj cmdbuf(m_device, m_commandPool);
+ cmdbuf.begin();
+ cmdbuf.PipelineBarrier(VK_PIPELINE_STAGE_ALL_COMMANDS_BIT, VK_PIPELINE_STAGE_ALL_COMMANDS_BIT, 0, 0, nullptr, 0, nullptr, 1,
+ &barrier);
+ barrier.oldLayout = VK_IMAGE_LAYOUT_PRESENT_SRC_KHR;
+ barrier.newLayout = VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL;
+ barrier.srcAccessMask = 0;
+ barrier.dstAccessMask = VK_ACCESS_TRANSFER_WRITE_BIT;
+ cmdbuf.PipelineBarrier(VK_PIPELINE_STAGE_ALL_COMMANDS_BIT, VK_PIPELINE_STAGE_ALL_COMMANDS_BIT, 0, 0, nullptr, 0, nullptr, 1,
+ &barrier);
+
+ m_errorMonitor->VerifyNotFound();
+}
+
+TEST_F(VkLayerTest, IdxBufferAlignmentError) {
+ // Bind a BeginRenderPass within an active RenderPass
+ ASSERT_NO_FATAL_FAILURE(Init());
+ ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
+
+ uint32_t const indices[] = {0};
+ VkBufferCreateInfo buf_info = {};
+ buf_info.sType = VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO;
+ buf_info.size = 1024;
+ buf_info.usage = VK_BUFFER_USAGE_INDEX_BUFFER_BIT;
+ buf_info.queueFamilyIndexCount = 1;
+ buf_info.pQueueFamilyIndices = indices;
+
+ VkBuffer buffer;
+ VkResult err = vkCreateBuffer(m_device->device(), &buf_info, NULL, &buffer);
+ ASSERT_VK_SUCCESS(err);
+
+ VkMemoryRequirements requirements;
+ vkGetBufferMemoryRequirements(m_device->device(), buffer, &requirements);
+
+ VkMemoryAllocateInfo alloc_info{};
+ alloc_info.sType = VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO;
+ alloc_info.pNext = NULL;
+ alloc_info.memoryTypeIndex = 0;
+ alloc_info.allocationSize = requirements.size;
+ bool pass = m_device->phy().set_memory_type(requirements.memoryTypeBits, &alloc_info, VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT);
+ ASSERT_TRUE(pass);
+
+ VkDeviceMemory memory;
+ err = vkAllocateMemory(m_device->device(), &alloc_info, NULL, &memory);
+ ASSERT_VK_SUCCESS(err);
+
+ err = vkBindBufferMemory(m_device->device(), buffer, memory, 0);
+ ASSERT_VK_SUCCESS(err);
+
+ m_commandBuffer->begin();
+ ASSERT_VK_SUCCESS(err);
+
+ // vkCmdBindPipeline(m_commandBuffer->handle(),
+ // VK_PIPELINE_BIND_POINT_GRAPHICS, pipe.handle());
+ // Should error before calling to driver so don't care about actual data
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "vkCmdBindIndexBuffer() offset (0x7) does not fall on ");
+ vkCmdBindIndexBuffer(m_commandBuffer->handle(), buffer, 7, VK_INDEX_TYPE_UINT16);
+ m_errorMonitor->VerifyFound();
+
+ vkFreeMemory(m_device->device(), memory, NULL);
+ vkDestroyBuffer(m_device->device(), buffer, NULL);
+}
+
+TEST_F(VkLayerTest, InvalidQueueFamilyIndex) {
+ // Miscellaneous queueFamilyIndex validation tests
+ ASSERT_NO_FATAL_FAILURE(Init());
+ ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
+ VkBufferCreateInfo buffCI = {};
+ buffCI.sType = VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO;
+ buffCI.size = 1024;
+ buffCI.usage = VK_BUFFER_USAGE_TRANSFER_DST_BIT;
+ buffCI.queueFamilyIndexCount = 2;
+ // Introduce failure by specifying invalid queue_family_index
+ uint32_t qfi[2];
+ qfi[0] = 777;
+ qfi[1] = 0;
+
+ buffCI.pQueueFamilyIndices = qfi;
+ buffCI.sharingMode = VK_SHARING_MODE_CONCURRENT; // qfi only matters in CONCURRENT mode
+
+ VkBuffer ib;
+ // Test for queue family index out of range
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkBufferCreateInfo-sharingMode-01419");
+ vkCreateBuffer(m_device->device(), &buffCI, NULL, &ib);
+ m_errorMonitor->VerifyFound();
+
+ // Test for non-unique QFI in array
+ qfi[0] = 0;
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkBufferCreateInfo-sharingMode-01419");
+ vkCreateBuffer(m_device->device(), &buffCI, NULL, &ib);
+ m_errorMonitor->VerifyFound();
+
+ if (m_device->queue_props.size() > 2) {
+ VkBuffer ib2;
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "which was not created allowing concurrent");
+
+ // Create buffer shared to queue families 1 and 2, but submitted on queue family 0
+ buffCI.queueFamilyIndexCount = 2;
+ qfi[0] = 1;
+ qfi[1] = 2;
+ vkCreateBuffer(m_device->device(), &buffCI, NULL, &ib2);
+ VkDeviceMemory mem;
+ VkMemoryRequirements mem_reqs;
+ vkGetBufferMemoryRequirements(m_device->device(), ib2, &mem_reqs);
+
+ VkMemoryAllocateInfo alloc_info = {};
+ alloc_info.sType = VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO;
+ alloc_info.allocationSize = mem_reqs.size;
+ bool pass = false;
+ pass = m_device->phy().set_memory_type(mem_reqs.memoryTypeBits, &alloc_info, VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT);
+ if (!pass) {
+ printf("%s Failed to allocate required memory.\n", kSkipPrefix);
+ vkDestroyBuffer(m_device->device(), ib2, NULL);
+ return;
+ }
+ vkAllocateMemory(m_device->device(), &alloc_info, NULL, &mem);
+ vkBindBufferMemory(m_device->device(), ib2, mem, 0);
+
+ m_commandBuffer->begin();
+ vkCmdFillBuffer(m_commandBuffer->handle(), ib2, 0, 16, 5);
+ m_commandBuffer->end();
+ m_commandBuffer->QueueCommandBuffer(false);
+ m_errorMonitor->VerifyFound();
+ vkDestroyBuffer(m_device->device(), ib2, NULL);
+ vkFreeMemory(m_device->device(), mem, NULL);
+ }
+}
+
+TEST_F(VkLayerTest, ExecuteCommandsPrimaryCB) {
+ TEST_DESCRIPTION("Attempt vkCmdExecuteCommands with a primary command buffer (should only be secondary)");
+
+ ASSERT_NO_FATAL_FAILURE(Init());
+ ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
+
+ // An empty primary command buffer
+ VkCommandBufferObj cb(m_device, m_commandPool);
+ cb.begin();
+ cb.end();
+
+ m_commandBuffer->begin();
+ vkCmdBeginRenderPass(m_commandBuffer->handle(), &renderPassBeginInfo(), VK_SUBPASS_CONTENTS_SECONDARY_COMMAND_BUFFERS);
+ VkCommandBuffer handle = cb.handle();
+
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "vkCmdExecuteCommands() called w/ Primary Cmd Buffer ");
+ vkCmdExecuteCommands(m_commandBuffer->handle(), 1, &handle);
+ m_errorMonitor->VerifyFound();
+
+ m_errorMonitor->SetUnexpectedError("All elements of pCommandBuffers must not be in the pending state");
+
+ m_commandBuffer->EndRenderPass();
+ m_commandBuffer->end();
+}
+
+TEST_F(VkLayerTest, DSUsageBitsErrors) {
+ TEST_DESCRIPTION("Attempt to update descriptor sets for images and buffers that do not have correct usage bits sets.");
+
+ ASSERT_NO_FATAL_FAILURE(Init());
+ std::array<VkDescriptorPoolSize, VK_DESCRIPTOR_TYPE_RANGE_SIZE> ds_type_count;
+ for (uint32_t i = 0; i < ds_type_count.size(); ++i) {
+ ds_type_count[i].type = VkDescriptorType(i);
+ ds_type_count[i].descriptorCount = 1;
+ }
+
+ vk_testing::DescriptorPool ds_pool;
+ ds_pool.init(*m_device, vk_testing::DescriptorPool::create_info(0, VK_DESCRIPTOR_TYPE_RANGE_SIZE, ds_type_count));
+ ASSERT_TRUE(ds_pool.initialized());
+
+ std::vector<VkDescriptorSetLayoutBinding> dsl_bindings(1);
+ dsl_bindings[0].binding = 0;
+ dsl_bindings[0].descriptorType = VkDescriptorType(0);
+ dsl_bindings[0].descriptorCount = 1;
+ dsl_bindings[0].stageFlags = VK_SHADER_STAGE_ALL;
+ dsl_bindings[0].pImmutableSamplers = NULL;
+
+ // Create arrays of layout and descriptor objects
+ using UpDescriptorSet = std::unique_ptr<vk_testing::DescriptorSet>;
+ std::vector<UpDescriptorSet> descriptor_sets;
+ using UpDescriptorSetLayout = std::unique_ptr<VkDescriptorSetLayoutObj>;
+ std::vector<UpDescriptorSetLayout> ds_layouts;
+ descriptor_sets.reserve(VK_DESCRIPTOR_TYPE_RANGE_SIZE);
+ ds_layouts.reserve(VK_DESCRIPTOR_TYPE_RANGE_SIZE);
+ for (uint32_t i = 0; i < VK_DESCRIPTOR_TYPE_RANGE_SIZE; ++i) {
+ dsl_bindings[0].descriptorType = VkDescriptorType(i);
+ ds_layouts.push_back(UpDescriptorSetLayout(new VkDescriptorSetLayoutObj(m_device, dsl_bindings)));
+ descriptor_sets.push_back(UpDescriptorSet(ds_pool.alloc_sets(*m_device, *ds_layouts.back())));
+ ASSERT_TRUE(descriptor_sets.back()->initialized());
+ }
+
+ // Create a buffer & bufferView to be used for invalid updates
+ const VkDeviceSize buffer_size = 256;
+ uint8_t data[buffer_size];
+ VkConstantBufferObj buffer(m_device, buffer_size, data, VK_BUFFER_USAGE_UNIFORM_TEXEL_BUFFER_BIT);
+ VkConstantBufferObj storage_texel_buffer(m_device, buffer_size, data, VK_BUFFER_USAGE_STORAGE_TEXEL_BUFFER_BIT);
+ ASSERT_TRUE(buffer.initialized() && storage_texel_buffer.initialized());
+
+ auto buff_view_ci = vk_testing::BufferView::createInfo(buffer.handle(), VK_FORMAT_R8_UNORM);
+ vk_testing::BufferView buffer_view_obj, storage_texel_buffer_view_obj;
+ buffer_view_obj.init(*m_device, buff_view_ci);
+ buff_view_ci.buffer = storage_texel_buffer.handle();
+ storage_texel_buffer_view_obj.init(*m_device, buff_view_ci);
+ ASSERT_TRUE(buffer_view_obj.initialized() && storage_texel_buffer_view_obj.initialized());
+ VkBufferView buffer_view = buffer_view_obj.handle();
+ VkBufferView storage_texel_buffer_view = storage_texel_buffer_view_obj.handle();
+
+ // Create an image to be used for invalid updates
+ VkImageObj image_obj(m_device);
+ image_obj.InitNoLayout(64, 64, 1, VK_FORMAT_R8G8B8A8_UNORM, VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT, VK_IMAGE_TILING_OPTIMAL, 0);
+ ASSERT_TRUE(image_obj.initialized());
+ VkImageView image_view = image_obj.targetView(VK_FORMAT_R8G8B8A8_UNORM);
+
+ VkDescriptorBufferInfo buff_info = {};
+ buff_info.buffer = buffer.handle();
+ VkDescriptorImageInfo img_info = {};
+ img_info.imageView = image_view;
+ VkWriteDescriptorSet descriptor_write = {};
+ descriptor_write.sType = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET;
+ descriptor_write.dstBinding = 0;
+ descriptor_write.descriptorCount = 1;
+ descriptor_write.pTexelBufferView = &buffer_view;
+ descriptor_write.pBufferInfo = &buff_info;
+ descriptor_write.pImageInfo = &img_info;
+
+ // These error messages align with VkDescriptorType struct
+ std::string error_codes[] = {
+ "VUID-VkWriteDescriptorSet-descriptorType-00326", // placeholder, no error for SAMPLER descriptor
+ "VUID-VkWriteDescriptorSet-descriptorType-00326", // COMBINED_IMAGE_SAMPLER
+ "VUID-VkWriteDescriptorSet-descriptorType-00326", // SAMPLED_IMAGE
+ "VUID-VkWriteDescriptorSet-descriptorType-00326", // STORAGE_IMAGE
+ "VUID-VkWriteDescriptorSet-descriptorType-00334", // UNIFORM_TEXEL_BUFFER
+ "VUID-VkWriteDescriptorSet-descriptorType-00335", // STORAGE_TEXEL_BUFFER
+ "VUID-VkWriteDescriptorSet-descriptorType-00330", // UNIFORM_BUFFER
+ "VUID-VkWriteDescriptorSet-descriptorType-00331", // STORAGE_BUFFER
+ "VUID-VkWriteDescriptorSet-descriptorType-00330", // UNIFORM_BUFFER_DYNAMIC
+ "VUID-VkWriteDescriptorSet-descriptorType-00331", // STORAGE_BUFFER_DYNAMIC
+ "VUID-VkWriteDescriptorSet-descriptorType-00326" // INPUT_ATTACHMENT
+ };
+ // Start loop at 1 as SAMPLER desc type has no usage bit error
+ for (uint32_t i = 1; i < VK_DESCRIPTOR_TYPE_RANGE_SIZE; ++i) {
+ if (VkDescriptorType(i) == VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER) {
+ // Now check for UNIFORM_TEXEL_BUFFER using storage_texel_buffer_view
+ descriptor_write.pTexelBufferView = &storage_texel_buffer_view;
+ }
+ descriptor_write.descriptorType = VkDescriptorType(i);
+ descriptor_write.dstSet = descriptor_sets[i]->handle();
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, error_codes[i]);
+
+ vkUpdateDescriptorSets(m_device->device(), 1, &descriptor_write, 0, NULL);
+
+ m_errorMonitor->VerifyFound();
+ if (VkDescriptorType(i) == VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER) {
+ descriptor_write.pTexelBufferView = &buffer_view;
+ }
+ }
+}
+
+TEST_F(VkLayerTest, DSBufferInfoErrors) {
+ TEST_DESCRIPTION(
+ "Attempt to update buffer descriptor set that has incorrect parameters in VkDescriptorBufferInfo struct. This includes:\n"
+ "1. offset value greater than or equal to buffer size\n"
+ "2. range value of 0\n"
+ "3. range value greater than buffer (size - offset)");
+ VkResult err;
+
+ // GPDDP2 needed for push descriptors support below
+ bool gpdp2_support = InstanceExtensionSupported(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME,
+ VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_SPEC_VERSION);
+ if (gpdp2_support) {
+ m_instance_extension_names.push_back(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME);
+ }
+ ASSERT_NO_FATAL_FAILURE(InitFramework(myDbgFunc, m_errorMonitor));
+ bool update_template_support = DeviceExtensionSupported(gpu(), nullptr, VK_KHR_DESCRIPTOR_UPDATE_TEMPLATE_EXTENSION_NAME);
+ if (update_template_support) {
+ m_device_extension_names.push_back(VK_KHR_DESCRIPTOR_UPDATE_TEMPLATE_EXTENSION_NAME);
+ } else {
+ printf("%s Descriptor Update Template Extensions not supported, template cases skipped.\n", kSkipPrefix);
+ }
+
+ // Note: Includes workaround for some implementations which incorrectly return 0 maxPushDescriptors
+ bool push_descriptor_support = gpdp2_support &&
+ DeviceExtensionSupported(gpu(), nullptr, VK_KHR_PUSH_DESCRIPTOR_EXTENSION_NAME) &&
+ (GetPushDescriptorProperties(instance(), gpu()).maxPushDescriptors > 0);
+ if (push_descriptor_support) {
+ m_device_extension_names.push_back(VK_KHR_PUSH_DESCRIPTOR_EXTENSION_NAME);
+ } else {
+ printf("%s Push Descriptor Extension not supported, push descriptor cases skipped.\n", kSkipPrefix);
+ }
+
+ ASSERT_NO_FATAL_FAILURE(InitState(nullptr, nullptr, VK_COMMAND_POOL_CREATE_RESET_COMMAND_BUFFER_BIT));
+
+ std::vector<VkDescriptorSetLayoutBinding> ds_bindings = {
+ {0, VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER, 1, VK_SHADER_STAGE_ALL, nullptr}};
+ OneOffDescriptorSet ds(m_device, ds_bindings);
+
+ // Create a buffer to be used for invalid updates
+ VkBufferCreateInfo buff_ci = {};
+ buff_ci.sType = VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO;
+ buff_ci.usage = VK_BUFFER_USAGE_UNIFORM_BUFFER_BIT;
+ buff_ci.size = m_device->props.limits.minUniformBufferOffsetAlignment;
+ buff_ci.sharingMode = VK_SHARING_MODE_EXCLUSIVE;
+ VkBuffer buffer;
+ err = vkCreateBuffer(m_device->device(), &buff_ci, NULL, &buffer);
+ ASSERT_VK_SUCCESS(err);
+
+ // Have to bind memory to buffer before descriptor update
+ VkMemoryRequirements mem_reqs;
+ vkGetBufferMemoryRequirements(m_device->device(), buffer, &mem_reqs);
+ VkMemoryAllocateInfo mem_alloc = {};
+ mem_alloc.sType = VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO;
+ mem_alloc.pNext = NULL;
+ mem_alloc.allocationSize = mem_reqs.size;
+ mem_alloc.memoryTypeIndex = 0;
+ bool pass = m_device->phy().set_memory_type(mem_reqs.memoryTypeBits, &mem_alloc, 0);
+ if (!pass) {
+ printf("%s Failed to allocate memory.\n", kSkipPrefix);
+ vkDestroyBuffer(m_device->device(), buffer, NULL);
+ return;
+ }
+
+ VkDeviceMemory mem;
+ err = vkAllocateMemory(m_device->device(), &mem_alloc, NULL, &mem);
+ ASSERT_VK_SUCCESS(err);
+ err = vkBindBufferMemory(m_device->device(), buffer, mem, 0);
+ ASSERT_VK_SUCCESS(err);
+
+ VkDescriptorBufferInfo buff_info = {};
+ buff_info.buffer = buffer;
+ VkWriteDescriptorSet descriptor_write = {};
+ descriptor_write.sType = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET;
+ descriptor_write.dstBinding = 0;
+ descriptor_write.descriptorCount = 1;
+ descriptor_write.pTexelBufferView = nullptr;
+ descriptor_write.pBufferInfo = &buff_info;
+ descriptor_write.pImageInfo = nullptr;
+
+ descriptor_write.descriptorType = VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER;
+ descriptor_write.dstSet = ds.set_;
+
+ // Relying on the "return nullptr for non-enabled extensions
+ auto vkCreateDescriptorUpdateTemplateKHR =
+ (PFN_vkCreateDescriptorUpdateTemplateKHR)vkGetDeviceProcAddr(m_device->device(), "vkCreateDescriptorUpdateTemplateKHR");
+ auto vkDestroyDescriptorUpdateTemplateKHR =
+ (PFN_vkDestroyDescriptorUpdateTemplateKHR)vkGetDeviceProcAddr(m_device->device(), "vkDestroyDescriptorUpdateTemplateKHR");
+ auto vkUpdateDescriptorSetWithTemplateKHR =
+ (PFN_vkUpdateDescriptorSetWithTemplateKHR)vkGetDeviceProcAddr(m_device->device(), "vkUpdateDescriptorSetWithTemplateKHR");
+
+ if (update_template_support) {
+ ASSERT_NE(vkCreateDescriptorUpdateTemplateKHR, nullptr);
+ ASSERT_NE(vkDestroyDescriptorUpdateTemplateKHR, nullptr);
+ ASSERT_NE(vkUpdateDescriptorSetWithTemplateKHR, nullptr);
+ }
+
+ // Setup for update w/ template tests
+ // Create a template of descriptor set updates
+ struct SimpleTemplateData {
+ uint8_t padding[7];
+ VkDescriptorBufferInfo buff_info;
+ uint32_t other_padding[4];
+ };
+ SimpleTemplateData update_template_data = {};
+
+ VkDescriptorUpdateTemplateEntry update_template_entry = {};
+ update_template_entry.dstBinding = 0;
+ update_template_entry.dstArrayElement = 0;
+ update_template_entry.descriptorCount = 1;
+ update_template_entry.descriptorType = VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER;
+ update_template_entry.offset = offsetof(SimpleTemplateData, buff_info);
+ update_template_entry.stride = sizeof(SimpleTemplateData);
+
+ auto update_template_ci = lvl_init_struct<VkDescriptorUpdateTemplateCreateInfoKHR>();
+ update_template_ci.descriptorUpdateEntryCount = 1;
+ update_template_ci.pDescriptorUpdateEntries = &update_template_entry;
+ update_template_ci.templateType = VK_DESCRIPTOR_UPDATE_TEMPLATE_TYPE_DESCRIPTOR_SET;
+ update_template_ci.descriptorSetLayout = ds.layout_.handle();
+
+ VkDescriptorUpdateTemplate update_template = VK_NULL_HANDLE;
+ if (update_template_support) {
+ auto result = vkCreateDescriptorUpdateTemplateKHR(m_device->device(), &update_template_ci, nullptr, &update_template);
+ ASSERT_VK_SUCCESS(result);
+ }
+
+ // VK_KHR_push_descriptor support
+ auto vkCmdPushDescriptorSetKHR =
+ (PFN_vkCmdPushDescriptorSetKHR)vkGetDeviceProcAddr(m_device->device(), "vkCmdPushDescriptorSetKHR");
+ auto vkCmdPushDescriptorSetWithTemplateKHR =
+ (PFN_vkCmdPushDescriptorSetWithTemplateKHR)vkGetDeviceProcAddr(m_device->device(), "vkCmdPushDescriptorSetWithTemplateKHR");
+
+ std::unique_ptr<VkDescriptorSetLayoutObj> push_dsl = nullptr;
+ std::unique_ptr<VkPipelineLayoutObj> pipeline_layout = nullptr;
+ VkDescriptorUpdateTemplate push_template = VK_NULL_HANDLE;
+ if (push_descriptor_support) {
+ ASSERT_NE(vkCmdPushDescriptorSetKHR, nullptr);
+ push_dsl.reset(
+ new VkDescriptorSetLayoutObj(m_device, ds_bindings, VK_DESCRIPTOR_SET_LAYOUT_CREATE_PUSH_DESCRIPTOR_BIT_KHR));
+ pipeline_layout.reset(new VkPipelineLayoutObj(m_device, {push_dsl.get()}));
+ ASSERT_TRUE(push_dsl->initialized());
+
+ if (update_template_support) {
+ ASSERT_NE(vkCmdPushDescriptorSetWithTemplateKHR, nullptr);
+ auto push_template_ci = lvl_init_struct<VkDescriptorUpdateTemplateCreateInfoKHR>();
+ push_template_ci.descriptorUpdateEntryCount = 1;
+ push_template_ci.pDescriptorUpdateEntries = &update_template_entry;
+ push_template_ci.templateType = VK_DESCRIPTOR_UPDATE_TEMPLATE_TYPE_PUSH_DESCRIPTORS_KHR;
+ push_template_ci.descriptorSetLayout = VK_NULL_HANDLE;
+ push_template_ci.pipelineBindPoint = VK_PIPELINE_BIND_POINT_GRAPHICS;
+ push_template_ci.pipelineLayout = pipeline_layout->handle();
+ push_template_ci.set = 0;
+ auto result = vkCreateDescriptorUpdateTemplateKHR(m_device->device(), &push_template_ci, nullptr, &push_template);
+ ASSERT_VK_SUCCESS(result);
+ }
+ }
+
+ auto do_test = [&](const char *desired_failure) {
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, desired_failure);
+ vkUpdateDescriptorSets(m_device->device(), 1, &descriptor_write, 0, NULL);
+ m_errorMonitor->VerifyFound();
+
+ if (push_descriptor_support) {
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, desired_failure);
+ m_commandBuffer->begin();
+ vkCmdPushDescriptorSetKHR(m_commandBuffer->handle(), VK_PIPELINE_BIND_POINT_GRAPHICS, pipeline_layout->handle(), 0, 1,
+ &descriptor_write);
+ m_commandBuffer->end();
+ m_errorMonitor->VerifyFound();
+ }
+
+ if (update_template_support) {
+ update_template_data.buff_info = buff_info; // copy the test case information into our "pData"
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, desired_failure);
+ vkUpdateDescriptorSetWithTemplateKHR(m_device->device(), ds.set_, update_template, &update_template_data);
+ m_errorMonitor->VerifyFound();
+ if (push_descriptor_support) {
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, desired_failure);
+ m_commandBuffer->begin();
+ vkCmdPushDescriptorSetWithTemplateKHR(m_commandBuffer->handle(), push_template, pipeline_layout->handle(), 0,
+ &update_template_data);
+ m_commandBuffer->end();
+ m_errorMonitor->VerifyFound();
+ }
+ }
+ };
+
+ // Cause error due to offset out of range
+ buff_info.offset = buff_ci.size;
+ buff_info.range = VK_WHOLE_SIZE;
+ do_test("VUID-VkDescriptorBufferInfo-offset-00340");
+
+ // Now cause error due to range of 0
+ buff_info.offset = 0;
+ buff_info.range = 0;
+ do_test("VUID-VkDescriptorBufferInfo-range-00341");
+
+ // Now cause error due to range exceeding buffer size - offset
+ buff_info.offset = 0;
+ buff_info.range = buff_ci.size + 1;
+ do_test("VUID-VkDescriptorBufferInfo-range-00342");
+
+ if (update_template_support) {
+ vkDestroyDescriptorUpdateTemplateKHR(m_device->device(), update_template, nullptr);
+ if (push_descriptor_support) {
+ vkDestroyDescriptorUpdateTemplateKHR(m_device->device(), push_template, nullptr);
+ }
+ }
+ vkFreeMemory(m_device->device(), mem, NULL);
+ vkDestroyBuffer(m_device->device(), buffer, NULL);
+}
+
+TEST_F(VkLayerTest, DSBufferLimitErrors) {
+ TEST_DESCRIPTION(
+ "Attempt to update buffer descriptor set that has VkDescriptorBufferInfo values that violate device limits.\n"
+ "Test cases include:\n"
+ "1. range of uniform buffer update exceeds maxUniformBufferRange\n"
+ "2. offset of uniform buffer update is not multiple of minUniformBufferOffsetAlignment\n"
+ "3. using VK_WHOLE_SIZE with uniform buffer size exceeding maxUniformBufferRange\n"
+ "4. range of storage buffer update exceeds maxStorageBufferRange\n"
+ "5. offset of storage buffer update is not multiple of minStorageBufferOffsetAlignment\n"
+ "6. using VK_WHOLE_SIZE with storage buffer size exceeding maxStorageBufferRange");
+ VkResult err;
+
+ ASSERT_NO_FATAL_FAILURE(Init());
+
+ struct TestCase {
+ VkDescriptorType descriptor_type;
+ VkBufferUsageFlagBits buffer_usage;
+ VkDeviceSize max_range;
+ std::string max_range_vu;
+ VkDeviceSize min_align;
+ std::string min_align_vu;
+ };
+
+ for (const auto &test_case : {
+ TestCase({VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER, VK_BUFFER_USAGE_UNIFORM_BUFFER_BIT,
+ m_device->props.limits.maxUniformBufferRange, "VUID-VkWriteDescriptorSet-descriptorType-00332",
+ m_device->props.limits.minUniformBufferOffsetAlignment, "VUID-VkWriteDescriptorSet-descriptorType-00327"}),
+ TestCase({VK_DESCRIPTOR_TYPE_STORAGE_BUFFER, VK_BUFFER_USAGE_STORAGE_BUFFER_BIT,
+ m_device->props.limits.maxStorageBufferRange, "VUID-VkWriteDescriptorSet-descriptorType-00333",
+ m_device->props.limits.minStorageBufferOffsetAlignment, "VUID-VkWriteDescriptorSet-descriptorType-00328"}),
+ }) {
+ // Create layout with single buffer
+ OneOffDescriptorSet ds(m_device, {
+ {0, test_case.descriptor_type, 1, VK_SHADER_STAGE_ALL, nullptr},
+ });
+
+ // Create a buffer to be used for invalid updates
+ VkBufferCreateInfo bci = {};
+ bci.sType = VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO;
+ bci.usage = test_case.buffer_usage;
+ bci.size = test_case.max_range + test_case.min_align; // Make buffer bigger than range limit
+ bci.sharingMode = VK_SHARING_MODE_EXCLUSIVE;
+ VkBuffer buffer;
+ err = vkCreateBuffer(m_device->device(), &bci, NULL, &buffer);
+ ASSERT_VK_SUCCESS(err);
+
+ // Have to bind memory to buffer before descriptor update
+ VkMemoryRequirements mem_reqs;
+ vkGetBufferMemoryRequirements(m_device->device(), buffer, &mem_reqs);
+
+ VkMemoryAllocateInfo mem_alloc = {};
+ mem_alloc.sType = VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO;
+ mem_alloc.pNext = NULL;
+ mem_alloc.allocationSize = mem_reqs.size;
+ bool pass = m_device->phy().set_memory_type(mem_reqs.memoryTypeBits, &mem_alloc, 0);
+ if (!pass) {
+ printf("%s Failed to allocate memory in DSBufferLimitErrors; skipped.\n", kSkipPrefix);
+ vkDestroyBuffer(m_device->device(), buffer, NULL);
+ continue;
+ }
+
+ VkDeviceMemory mem;
+ err = vkAllocateMemory(m_device->device(), &mem_alloc, NULL, &mem);
+ if (VK_SUCCESS != err) {
+ printf("%s Failed to allocate memory in DSBufferLimitErrors; skipped.\n", kSkipPrefix);
+ vkDestroyBuffer(m_device->device(), buffer, NULL);
+ continue;
+ }
+ err = vkBindBufferMemory(m_device->device(), buffer, mem, 0);
+ ASSERT_VK_SUCCESS(err);
+
+ VkDescriptorBufferInfo buff_info = {};
+ buff_info.buffer = buffer;
+ VkWriteDescriptorSet descriptor_write = {};
+ descriptor_write.sType = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET;
+ descriptor_write.dstBinding = 0;
+ descriptor_write.descriptorCount = 1;
+ descriptor_write.pTexelBufferView = nullptr;
+ descriptor_write.pBufferInfo = &buff_info;
+ descriptor_write.pImageInfo = nullptr;
+ descriptor_write.descriptorType = test_case.descriptor_type;
+ descriptor_write.dstSet = ds.set_;
+
+ // Exceed range limit
+ if (test_case.max_range != UINT32_MAX) {
+ buff_info.range = test_case.max_range + 1;
+ buff_info.offset = 0;
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, test_case.max_range_vu);
+ vkUpdateDescriptorSets(m_device->device(), 1, &descriptor_write, 0, NULL);
+ m_errorMonitor->VerifyFound();
+ }
+
+ // Reduce size of range to acceptable limit and cause offset error
+ if (test_case.min_align > 1) {
+ buff_info.range = test_case.max_range;
+ buff_info.offset = test_case.min_align - 1;
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, test_case.min_align_vu);
+ vkUpdateDescriptorSets(m_device->device(), 1, &descriptor_write, 0, NULL);
+ m_errorMonitor->VerifyFound();
+ }
+
+ // Exceed effective range limit by using VK_WHOLE_SIZE
+ buff_info.range = VK_WHOLE_SIZE;
+ buff_info.offset = 0;
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, test_case.max_range_vu);
+ vkUpdateDescriptorSets(m_device->device(), 1, &descriptor_write, 0, NULL);
+ m_errorMonitor->VerifyFound();
+
+ // Cleanup
+ vkFreeMemory(m_device->device(), mem, NULL);
+ vkDestroyBuffer(m_device->device(), buffer, NULL);
+ }
+}
+
+TEST_F(VkLayerTest, DSAspectBitsErrors) {
+ // TODO : Initially only catching case where DEPTH & STENCIL aspect bits
+ // are set, but could expand this test to hit more cases.
+ TEST_DESCRIPTION("Attempt to update descriptor sets for images that do not have correct aspect bits sets.");
+ VkResult err;
+
+ ASSERT_NO_FATAL_FAILURE(Init());
+ auto depth_format = FindSupportedDepthStencilFormat(gpu());
+ if (!depth_format) {
+ printf("%s No Depth + Stencil format found. Skipped.\n", kSkipPrefix);
+ return;
+ }
+
+ OneOffDescriptorSet ds(m_device, {
+ {0, VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT, 1, VK_SHADER_STAGE_ALL, nullptr},
+ });
+
+ // Create an image to be used for invalid updates
+ VkImageObj image_obj(m_device);
+ image_obj.Init(64, 64, 1, depth_format, VK_IMAGE_USAGE_SAMPLED_BIT);
+ if (!image_obj.initialized()) {
+ printf("%s Depth + Stencil format cannot be sampled. Skipped.\n", kSkipPrefix);
+ return;
+ }
+ VkImage image = image_obj.image();
+
+ // Now create view for image
+ VkImageViewCreateInfo image_view_ci = {};
+ image_view_ci.sType = VK_STRUCTURE_TYPE_IMAGE_VIEW_CREATE_INFO;
+ image_view_ci.image = image;
+ image_view_ci.format = depth_format;
+ image_view_ci.viewType = VK_IMAGE_VIEW_TYPE_2D;
+ image_view_ci.subresourceRange.layerCount = 1;
+ image_view_ci.subresourceRange.baseArrayLayer = 0;
+ image_view_ci.subresourceRange.levelCount = 1;
+ // Setting both depth & stencil aspect bits is illegal for an imageView used
+ // to populate a descriptor set.
+ image_view_ci.subresourceRange.aspectMask = VK_IMAGE_ASPECT_DEPTH_BIT | VK_IMAGE_ASPECT_STENCIL_BIT;
+
+ VkImageView image_view;
+ err = vkCreateImageView(m_device->device(), &image_view_ci, NULL, &image_view);
+ ASSERT_VK_SUCCESS(err);
+
+ VkDescriptorImageInfo img_info = {};
+ img_info.imageView = image_view;
+ VkWriteDescriptorSet descriptor_write = {};
+ descriptor_write.sType = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET;
+ descriptor_write.dstBinding = 0;
+ descriptor_write.descriptorCount = 1;
+ descriptor_write.pTexelBufferView = NULL;
+ descriptor_write.pBufferInfo = NULL;
+ descriptor_write.pImageInfo = &img_info;
+ descriptor_write.descriptorType = VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT;
+ descriptor_write.dstSet = ds.set_;
+ // TODO(whenning42): Update this check to look for a VUID when this error is
+ // assigned one.
+ const char *error_msg = " please only set either VK_IMAGE_ASPECT_DEPTH_BIT or VK_IMAGE_ASPECT_STENCIL_BIT ";
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, error_msg);
+
+ vkUpdateDescriptorSets(m_device->device(), 1, &descriptor_write, 0, NULL);
+
+ m_errorMonitor->VerifyFound();
+ vkDestroyImageView(m_device->device(), image_view, NULL);
+}
+
+TEST_F(VkLayerTest, DSTypeMismatch) {
+ // Create DS w/ layout of one type and attempt Update w/ mis-matched type
+ VkResult err;
+
+ m_errorMonitor->SetDesiredFailureMsg(
+ VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ " binding #0 with type VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER but update type is VK_DESCRIPTOR_TYPE_SAMPLER");
+
+ ASSERT_NO_FATAL_FAILURE(Init());
+ OneOffDescriptorSet ds(m_device, {
+ {0, VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER, 1, VK_SHADER_STAGE_ALL, nullptr},
+ });
+
+ VkSamplerCreateInfo sampler_ci = SafeSaneSamplerCreateInfo();
+ VkSampler sampler;
+ err = vkCreateSampler(m_device->device(), &sampler_ci, NULL, &sampler);
+ ASSERT_VK_SUCCESS(err);
+
+ VkDescriptorImageInfo info = {};
+ info.sampler = sampler;
+
+ VkWriteDescriptorSet descriptor_write;
+ memset(&descriptor_write, 0, sizeof(descriptor_write));
+ descriptor_write.sType = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET;
+ descriptor_write.dstSet = ds.set_;
+ descriptor_write.descriptorCount = 1;
+ // This is a mismatched type for the layout which expects BUFFER
+ descriptor_write.descriptorType = VK_DESCRIPTOR_TYPE_SAMPLER;
+ descriptor_write.pImageInfo = &info;
+
+ vkUpdateDescriptorSets(m_device->device(), 1, &descriptor_write, 0, NULL);
+
+ m_errorMonitor->VerifyFound();
+
+ vkDestroySampler(m_device->device(), sampler, NULL);
+}
+
+TEST_F(VkLayerTest, DSUpdateOutOfBounds) {
+ // For overlapping Update, have arrayIndex exceed that of layout
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkWriteDescriptorSet-dstArrayElement-00321");
+
+ ASSERT_NO_FATAL_FAILURE(Init());
+ OneOffDescriptorSet ds(m_device, {
+ {0, VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER, 1, VK_SHADER_STAGE_ALL, nullptr},
+ });
+
+ VkBufferTest buffer_test(m_device, VK_BUFFER_USAGE_UNIFORM_BUFFER_BIT);
+ if (!buffer_test.GetBufferCurrent()) {
+ // Something prevented creation of buffer so abort
+ printf("%s Buffer creation failed, skipping test\n", kSkipPrefix);
+ return;
+ }
+
+ // Correctly update descriptor to avoid "NOT_UPDATED" error
+ VkDescriptorBufferInfo buff_info = {};
+ buff_info.buffer = buffer_test.GetBuffer();
+ buff_info.offset = 0;
+ buff_info.range = 1024;
+
+ VkWriteDescriptorSet descriptor_write;
+ memset(&descriptor_write, 0, sizeof(descriptor_write));
+ descriptor_write.sType = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET;
+ descriptor_write.dstSet = ds.set_;
+ descriptor_write.dstArrayElement = 1; /* This index out of bounds for the update */
+ descriptor_write.descriptorCount = 1;
+ descriptor_write.descriptorType = VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER;
+ descriptor_write.pBufferInfo = &buff_info;
+
+ vkUpdateDescriptorSets(m_device->device(), 1, &descriptor_write, 0, NULL);
+
+ m_errorMonitor->VerifyFound();
+}
+
+TEST_F(VkLayerTest, InvalidDSUpdateIndex) {
+ // Create layout w/ count of 1 and attempt update to that layout w/ binding index 2
+ VkResult err;
+
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkWriteDescriptorSet-dstBinding-00315");
+
+ ASSERT_NO_FATAL_FAILURE(Init());
+ OneOffDescriptorSet ds(m_device, {
+ {0, VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER, 1, VK_SHADER_STAGE_ALL, nullptr},
+ });
+
+ VkSamplerCreateInfo sampler_ci = SafeSaneSamplerCreateInfo();
+ VkSampler sampler;
+ err = vkCreateSampler(m_device->device(), &sampler_ci, NULL, &sampler);
+ ASSERT_VK_SUCCESS(err);
+
+ VkDescriptorImageInfo info = {};
+ info.sampler = sampler;
+
+ VkWriteDescriptorSet descriptor_write;
+ memset(&descriptor_write, 0, sizeof(descriptor_write));
+ descriptor_write.sType = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET;
+ descriptor_write.dstSet = ds.set_;
+ descriptor_write.dstBinding = 2;
+ descriptor_write.descriptorCount = 1;
+ // This is the wrong type, but out of bounds will be flagged first
+ descriptor_write.descriptorType = VK_DESCRIPTOR_TYPE_SAMPLER;
+ descriptor_write.pImageInfo = &info;
+
+ vkUpdateDescriptorSets(m_device->device(), 1, &descriptor_write, 0, NULL);
+
+ m_errorMonitor->VerifyFound();
+
+ vkDestroySampler(m_device->device(), sampler, NULL);
+}
+
+TEST_F(VkLayerTest, DSUpdateEmptyBinding) {
+ // Create layout w/ empty binding and attempt to update it
+ VkResult err;
+
+ ASSERT_NO_FATAL_FAILURE(Init());
+
+ OneOffDescriptorSet ds(m_device, {
+ {0, VK_DESCRIPTOR_TYPE_SAMPLER, 0 /* !! */, VK_SHADER_STAGE_ALL, nullptr},
+ });
+
+ VkSamplerCreateInfo sampler_ci = SafeSaneSamplerCreateInfo();
+ VkSampler sampler;
+ err = vkCreateSampler(m_device->device(), &sampler_ci, NULL, &sampler);
+ ASSERT_VK_SUCCESS(err);
+
+ VkDescriptorImageInfo info = {};
+ info.sampler = sampler;
+
+ VkWriteDescriptorSet descriptor_write;
+ memset(&descriptor_write, 0, sizeof(descriptor_write));
+ descriptor_write.sType = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET;
+ descriptor_write.dstSet = ds.set_;
+ descriptor_write.dstBinding = 0;
+ descriptor_write.descriptorCount = 1; // Lie here to avoid parameter_validation error
+ // This is the wrong type, but empty binding error will be flagged first
+ descriptor_write.descriptorType = VK_DESCRIPTOR_TYPE_SAMPLER;
+ descriptor_write.pImageInfo = &info;
+
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkWriteDescriptorSet-dstBinding-00316");
+ vkUpdateDescriptorSets(m_device->device(), 1, &descriptor_write, 0, NULL);
+ m_errorMonitor->VerifyFound();
+
+ vkDestroySampler(m_device->device(), sampler, NULL);
+}
+
+TEST_F(VkLayerTest, InvalidDSUpdateStruct) {
+ // Call UpdateDS w/ struct type other than valid VK_STRUCTUR_TYPE_UPDATE_*
+ // types
+ VkResult err;
+
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, ".sType must be VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET");
+
+ ASSERT_NO_FATAL_FAILURE(Init());
+
+ OneOffDescriptorSet ds(m_device, {
+ {0, VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER, 1, VK_SHADER_STAGE_ALL, nullptr},
+ });
+
+ VkSamplerCreateInfo sampler_ci = SafeSaneSamplerCreateInfo();
+ VkSampler sampler;
+ err = vkCreateSampler(m_device->device(), &sampler_ci, NULL, &sampler);
+ ASSERT_VK_SUCCESS(err);
+
+ VkDescriptorImageInfo info = {};
+ info.sampler = sampler;
+
+ VkWriteDescriptorSet descriptor_write;
+ memset(&descriptor_write, 0, sizeof(descriptor_write));
+ descriptor_write.sType = VK_STRUCTURE_TYPE_SUBMIT_INFO; /* Intentionally broken struct type */
+ descriptor_write.dstSet = ds.set_;
+ descriptor_write.descriptorCount = 1;
+ // This is the wrong type, but out of bounds will be flagged first
+ descriptor_write.descriptorType = VK_DESCRIPTOR_TYPE_SAMPLER;
+ descriptor_write.pImageInfo = &info;
+
+ vkUpdateDescriptorSets(m_device->device(), 1, &descriptor_write, 0, NULL);
+
+ m_errorMonitor->VerifyFound();
+
+ vkDestroySampler(m_device->device(), sampler, NULL);
+}
+
+TEST_F(VkLayerTest, SampleDescriptorUpdateError) {
+ // Create a single Sampler descriptor and send it an invalid Sampler
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkWriteDescriptorSet-descriptorType-00325");
+
+ ASSERT_NO_FATAL_FAILURE(Init());
+ OneOffDescriptorSet ds(m_device, {
+ {0, VK_DESCRIPTOR_TYPE_SAMPLER, 1, VK_SHADER_STAGE_ALL, nullptr},
+ });
+
+ VkSampler sampler = (VkSampler)((size_t)0xbaadbeef); // Sampler with invalid handle
+
+ VkDescriptorImageInfo descriptor_info;
+ memset(&descriptor_info, 0, sizeof(VkDescriptorImageInfo));
+ descriptor_info.sampler = sampler;
+
+ VkWriteDescriptorSet descriptor_write;
+ memset(&descriptor_write, 0, sizeof(descriptor_write));
+ descriptor_write.sType = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET;
+ descriptor_write.dstSet = ds.set_;
+ descriptor_write.dstBinding = 0;
+ descriptor_write.descriptorCount = 1;
+ descriptor_write.descriptorType = VK_DESCRIPTOR_TYPE_SAMPLER;
+ descriptor_write.pImageInfo = &descriptor_info;
+
+ vkUpdateDescriptorSets(m_device->device(), 1, &descriptor_write, 0, NULL);
+
+ m_errorMonitor->VerifyFound();
+}
+
+TEST_F(VkLayerTest, ImageViewDescriptorUpdateError) {
+ // Create a single combined Image/Sampler descriptor and send it an invalid
+ // imageView
+ VkResult err;
+
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkWriteDescriptorSet-descriptorType-00326");
+
+ ASSERT_NO_FATAL_FAILURE(Init());
+ OneOffDescriptorSet ds(m_device, {
+ {0, VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER, 1, VK_SHADER_STAGE_ALL, nullptr},
+ });
+
+ VkSamplerCreateInfo sampler_ci = SafeSaneSamplerCreateInfo();
+ VkSampler sampler;
+ err = vkCreateSampler(m_device->device(), &sampler_ci, NULL, &sampler);
+ ASSERT_VK_SUCCESS(err);
+
+ VkImageView view = (VkImageView)((size_t)0xbaadbeef); // invalid imageView object
+
+ VkDescriptorImageInfo descriptor_info;
+ memset(&descriptor_info, 0, sizeof(VkDescriptorImageInfo));
+ descriptor_info.sampler = sampler;
+ descriptor_info.imageView = view;
+
+ VkWriteDescriptorSet descriptor_write;
+ memset(&descriptor_write, 0, sizeof(descriptor_write));
+ descriptor_write.sType = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET;
+ descriptor_write.dstSet = ds.set_;
+ descriptor_write.dstBinding = 0;
+ descriptor_write.descriptorCount = 1;
+ descriptor_write.descriptorType = VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER;
+ descriptor_write.pImageInfo = &descriptor_info;
+
+ vkUpdateDescriptorSets(m_device->device(), 1, &descriptor_write, 0, NULL);
+
+ m_errorMonitor->VerifyFound();
+
+ vkDestroySampler(m_device->device(), sampler, NULL);
+}
+
+TEST_F(VkLayerTest, CopyDescriptorUpdateErrors) {
+ // Create DS w/ layout of 2 types, write update 1 and attempt to copy-update
+ // into the other
+ VkResult err;
+
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ " binding #1 with type VK_DESCRIPTOR_TYPE_SAMPLER. Types do not match.");
+
+ ASSERT_NO_FATAL_FAILURE(Init());
+ OneOffDescriptorSet ds(m_device, {
+ {0, VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER, 1, VK_SHADER_STAGE_ALL, nullptr},
+ {1, VK_DESCRIPTOR_TYPE_SAMPLER, 1, VK_SHADER_STAGE_ALL, nullptr},
+ });
+
+ VkSamplerCreateInfo sampler_ci = SafeSaneSamplerCreateInfo();
+ VkSampler sampler;
+ err = vkCreateSampler(m_device->device(), &sampler_ci, NULL, &sampler);
+ ASSERT_VK_SUCCESS(err);
+
+ VkDescriptorImageInfo info = {};
+ info.sampler = sampler;
+
+ VkWriteDescriptorSet descriptor_write;
+ memset(&descriptor_write, 0, sizeof(VkWriteDescriptorSet));
+ descriptor_write.sType = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET;
+ descriptor_write.dstSet = ds.set_;
+ descriptor_write.dstBinding = 1; // SAMPLER binding from layout above
+ descriptor_write.descriptorCount = 1;
+ descriptor_write.descriptorType = VK_DESCRIPTOR_TYPE_SAMPLER;
+ descriptor_write.pImageInfo = &info;
+ // This write update should succeed
+ vkUpdateDescriptorSets(m_device->device(), 1, &descriptor_write, 0, NULL);
+ // Now perform a copy update that fails due to type mismatch
+ VkCopyDescriptorSet copy_ds_update;
+ memset(&copy_ds_update, 0, sizeof(VkCopyDescriptorSet));
+ copy_ds_update.sType = VK_STRUCTURE_TYPE_COPY_DESCRIPTOR_SET;
+ copy_ds_update.srcSet = ds.set_;
+ copy_ds_update.srcBinding = 1; // Copy from SAMPLER binding
+ copy_ds_update.dstSet = ds.set_;
+ copy_ds_update.dstBinding = 0; // ERROR : copy to UNIFORM binding
+ copy_ds_update.descriptorCount = 1; // copy 1 descriptor
+ vkUpdateDescriptorSets(m_device->device(), 0, NULL, 1, &copy_ds_update);
+
+ m_errorMonitor->VerifyFound();
+ // Now perform a copy update that fails due to binding out of bounds
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, " does not have copy update src binding of 3.");
+ memset(&copy_ds_update, 0, sizeof(VkCopyDescriptorSet));
+ copy_ds_update.sType = VK_STRUCTURE_TYPE_COPY_DESCRIPTOR_SET;
+ copy_ds_update.srcSet = ds.set_;
+ copy_ds_update.srcBinding = 3; // ERROR : Invalid binding for matching layout
+ copy_ds_update.dstSet = ds.set_;
+ copy_ds_update.dstBinding = 0;
+ copy_ds_update.descriptorCount = 1; // Copy 1 descriptor
+ vkUpdateDescriptorSets(m_device->device(), 0, NULL, 1, &copy_ds_update);
+
+ m_errorMonitor->VerifyFound();
+
+ // Now perform a copy update that fails due to binding out of bounds
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ " binding#1 with offset index of 1 plus update array offset of 0 and update of 5 "
+ "descriptors oversteps total number of descriptors in set: 2.");
+
+ memset(&copy_ds_update, 0, sizeof(VkCopyDescriptorSet));
+ copy_ds_update.sType = VK_STRUCTURE_TYPE_COPY_DESCRIPTOR_SET;
+ copy_ds_update.srcSet = ds.set_;
+ copy_ds_update.srcBinding = 1;
+ copy_ds_update.dstSet = ds.set_;
+ copy_ds_update.dstBinding = 0;
+ copy_ds_update.descriptorCount = 5; // ERROR copy 5 descriptors (out of bounds for layout)
+ vkUpdateDescriptorSets(m_device->device(), 0, NULL, 1, &copy_ds_update);
+
+ m_errorMonitor->VerifyFound();
+
+ vkDestroySampler(m_device->device(), sampler, NULL);
+}
+
+TEST_F(VkPositiveLayerTest, CopyNonupdatedDescriptors) {
+ TEST_DESCRIPTION("Copy non-updated descriptors");
+ unsigned int i;
+
+ ASSERT_NO_FATAL_FAILURE(Init());
+ OneOffDescriptorSet src_ds(m_device, {
+ {0, VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER, 1, VK_SHADER_STAGE_ALL, nullptr},
+ {1, VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE, 1, VK_SHADER_STAGE_ALL, nullptr},
+ {2, VK_DESCRIPTOR_TYPE_SAMPLER, 1, VK_SHADER_STAGE_ALL, nullptr},
+ });
+ OneOffDescriptorSet dst_ds(m_device, {
+ {0, VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER, 1, VK_SHADER_STAGE_ALL, nullptr},
+ {1, VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE, 1, VK_SHADER_STAGE_ALL, nullptr},
+ });
+
+ m_errorMonitor->ExpectSuccess();
+
+ const unsigned int copy_size = 2;
+ VkCopyDescriptorSet copy_ds_update[copy_size];
+ memset(copy_ds_update, 0, sizeof(copy_ds_update));
+ for (i = 0; i < copy_size; i++) {
+ copy_ds_update[i].sType = VK_STRUCTURE_TYPE_COPY_DESCRIPTOR_SET;
+ copy_ds_update[i].srcSet = src_ds.set_;
+ copy_ds_update[i].srcBinding = i;
+ copy_ds_update[i].dstSet = dst_ds.set_;
+ copy_ds_update[i].dstBinding = i;
+ copy_ds_update[i].descriptorCount = 1;
+ }
+ vkUpdateDescriptorSets(m_device->device(), 0, NULL, copy_size, copy_ds_update);
+
+ m_errorMonitor->VerifyNotFound();
+}
+
+TEST_F(VkLayerTest, NumSamplesMismatch) {
+ // Create CommandBuffer where MSAA samples doesn't match RenderPass
+ // sampleCount
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "Num samples mismatch! ");
+
+ ASSERT_NO_FATAL_FAILURE(Init());
+ ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
+
+ OneOffDescriptorSet ds(m_device, {
+ {0, VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER, 1, VK_SHADER_STAGE_ALL, nullptr},
+ });
+
+ VkPipelineMultisampleStateCreateInfo pipe_ms_state_ci = {};
+ pipe_ms_state_ci.sType = VK_STRUCTURE_TYPE_PIPELINE_MULTISAMPLE_STATE_CREATE_INFO;
+ pipe_ms_state_ci.pNext = NULL;
+ pipe_ms_state_ci.rasterizationSamples = VK_SAMPLE_COUNT_4_BIT;
+ pipe_ms_state_ci.sampleShadingEnable = 0;
+ pipe_ms_state_ci.minSampleShading = 1.0;
+ pipe_ms_state_ci.pSampleMask = NULL;
+
+ const VkPipelineLayoutObj pipeline_layout(m_device, {&ds.layout_});
+
+ VkShaderObj vs(m_device, bindStateVertShaderText, VK_SHADER_STAGE_VERTEX_BIT, this);
+ VkShaderObj fs(m_device, bindStateFragShaderText, VK_SHADER_STAGE_FRAGMENT_BIT, this); // We shouldn't need a fragment shader
+ // but add it to be able to run on more devices
+ VkPipelineObj pipe(m_device);
+ pipe.AddShader(&vs);
+ pipe.AddShader(&fs);
+ pipe.AddDefaultColorAttachment();
+ pipe.SetMSAA(&pipe_ms_state_ci);
+
+ m_errorMonitor->SetUnexpectedError("VUID-VkGraphicsPipelineCreateInfo-subpass-00757");
+ pipe.CreateVKPipeline(pipeline_layout.handle(), renderPass());
+
+ m_commandBuffer->begin();
+ m_commandBuffer->BeginRenderPass(m_renderPassBeginInfo);
+ vkCmdBindPipeline(m_commandBuffer->handle(), VK_PIPELINE_BIND_POINT_GRAPHICS, pipe.handle());
+
+ VkViewport viewport = {0, 0, 16, 16, 0, 1};
+ vkCmdSetViewport(m_commandBuffer->handle(), 0, 1, &viewport);
+ VkRect2D scissor = {{0, 0}, {16, 16}};
+ vkCmdSetScissor(m_commandBuffer->handle(), 0, 1, &scissor);
+
+ // Render triangle (the error should trigger on the attempt to draw).
+ m_commandBuffer->Draw(3, 1, 0, 0);
+
+ // Finalize recording of the command buffer
+ m_commandBuffer->EndRenderPass();
+ m_commandBuffer->end();
+
+ m_errorMonitor->VerifyFound();
+}
+
+TEST_F(VkLayerTest, DrawWithPipelineIncompatibleWithRenderPass) {
+ TEST_DESCRIPTION(
+ "Hit RenderPass incompatible cases. Initial case is drawing with an active renderpass that's not compatible with the bound "
+ "pipeline state object's creation renderpass");
+
+ ASSERT_NO_FATAL_FAILURE(Init());
+ ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
+
+ OneOffDescriptorSet ds(m_device, {
+ {0, VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER, 1, VK_SHADER_STAGE_ALL, nullptr},
+ });
+
+ const VkPipelineLayoutObj pipeline_layout(m_device, {&ds.layout_});
+
+ VkShaderObj vs(m_device, bindStateVertShaderText, VK_SHADER_STAGE_VERTEX_BIT, this);
+ VkShaderObj fs(m_device, bindStateFragShaderText, VK_SHADER_STAGE_FRAGMENT_BIT, this); // We shouldn't need a fragment shader
+ // but add it to be able to run on more devices
+ // Create a renderpass that will be incompatible with default renderpass
+ VkAttachmentReference color_att = {};
+ color_att.layout = VK_IMAGE_LAYOUT_GENERAL;
+ VkSubpassDescription subpass = {};
+ subpass.colorAttachmentCount = 1;
+ subpass.pColorAttachments = &color_att;
+ VkRenderPassCreateInfo rpci = {};
+ rpci.subpassCount = 1;
+ rpci.pSubpasses = &subpass;
+ rpci.attachmentCount = 1;
+ VkAttachmentDescription attach_desc = {};
+ attach_desc.samples = VK_SAMPLE_COUNT_1_BIT;
+ // Format incompatible with PSO RP color attach format B8G8R8A8_UNORM
+ attach_desc.format = VK_FORMAT_R8G8B8A8_UNORM;
+ attach_desc.finalLayout = VK_IMAGE_LAYOUT_GENERAL;
+ rpci.pAttachments = &attach_desc;
+ rpci.sType = VK_STRUCTURE_TYPE_RENDER_PASS_CREATE_INFO;
+ VkRenderPass rp;
+ vkCreateRenderPass(m_device->device(), &rpci, NULL, &rp);
+ VkPipelineObj pipe(m_device);
+ pipe.AddShader(&vs);
+ pipe.AddShader(&fs);
+ pipe.AddDefaultColorAttachment();
+ VkViewport viewport = {0.0f, 0.0f, 64.0f, 64.0f, 0.0f, 1.0f};
+ m_viewports.push_back(viewport);
+ pipe.SetViewport(m_viewports);
+ VkRect2D rect = {{0, 0}, {64, 64}};
+ m_scissors.push_back(rect);
+ pipe.SetScissor(m_scissors);
+ pipe.CreateVKPipeline(pipeline_layout.handle(), rp);
+
+ VkCommandBufferInheritanceInfo cbii = {};
+ cbii.sType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_INHERITANCE_INFO;
+ cbii.renderPass = rp;
+ cbii.subpass = 0;
+ VkCommandBufferBeginInfo cbbi = {};
+ cbbi.sType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_BEGIN_INFO;
+ cbbi.pInheritanceInfo = &cbii;
+ vkBeginCommandBuffer(m_commandBuffer->handle(), &cbbi);
+ vkCmdBeginRenderPass(m_commandBuffer->handle(), &m_renderPassBeginInfo, VK_SUBPASS_CONTENTS_INLINE);
+ vkCmdBindPipeline(m_commandBuffer->handle(), VK_PIPELINE_BIND_POINT_GRAPHICS, pipe.handle());
+
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdDraw-renderPass-00435");
+ // Render triangle (the error should trigger on the attempt to draw).
+ m_commandBuffer->Draw(3, 1, 0, 0);
+
+ // Finalize recording of the command buffer
+ m_commandBuffer->EndRenderPass();
+ m_commandBuffer->end();
+
+ m_errorMonitor->VerifyFound();
+
+ vkDestroyRenderPass(m_device->device(), rp, NULL);
+}
+
+TEST_F(VkLayerTest, NumBlendAttachMismatch) {
+ // Create Pipeline where the number of blend attachments doesn't match the
+ // number of color attachments. In this case, we don't add any color
+ // blend attachments even though we have a color attachment.
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkGraphicsPipelineCreateInfo-attachmentCount-00746");
+
+ ASSERT_NO_FATAL_FAILURE(Init());
+ ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
+
+ OneOffDescriptorSet ds(m_device, {
+ {0, VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER, 1, VK_SHADER_STAGE_ALL, nullptr},
+ });
+
+ VkPipelineMultisampleStateCreateInfo pipe_ms_state_ci = {};
+ pipe_ms_state_ci.sType = VK_STRUCTURE_TYPE_PIPELINE_MULTISAMPLE_STATE_CREATE_INFO;
+ pipe_ms_state_ci.pNext = NULL;
+ pipe_ms_state_ci.rasterizationSamples = VK_SAMPLE_COUNT_1_BIT;
+ pipe_ms_state_ci.sampleShadingEnable = 0;
+ pipe_ms_state_ci.minSampleShading = 1.0;
+ pipe_ms_state_ci.pSampleMask = NULL;
+
+ const VkPipelineLayoutObj pipeline_layout(m_device, {&ds.layout_});
+
+ VkShaderObj vs(m_device, bindStateVertShaderText, VK_SHADER_STAGE_VERTEX_BIT, this);
+ VkShaderObj fs(m_device, bindStateFragShaderText, VK_SHADER_STAGE_FRAGMENT_BIT, this); // We shouldn't need a fragment shader
+ // but add it to be able to run on more devices
+ VkPipelineObj pipe(m_device);
+ pipe.AddShader(&vs);
+ pipe.AddShader(&fs);
+ pipe.SetMSAA(&pipe_ms_state_ci);
+ pipe.CreateVKPipeline(pipeline_layout.handle(), renderPass());
+ m_errorMonitor->VerifyFound();
+}
+
+TEST_F(VkLayerTest, Bad2DArrayImageType) {
+ TEST_DESCRIPTION("Create an image with a flag specifying 2D_ARRAY_COMPATIBLE but not of imageType 3D.");
+
+ ASSERT_NO_FATAL_FAILURE(InitFramework(myDbgFunc, m_errorMonitor));
+ if (DeviceExtensionSupported(gpu(), nullptr, VK_KHR_MAINTENANCE1_EXTENSION_NAME)) {
+ m_device_extension_names.push_back(VK_KHR_MAINTENANCE1_EXTENSION_NAME);
+ } else {
+ printf("%s %s is not supported; skipping\n", kSkipPrefix, VK_KHR_MAINTENANCE1_EXTENSION_NAME);
+ return;
+ }
+ ASSERT_NO_FATAL_FAILURE(InitState());
+
+ // Trigger check by setting imagecreateflags to 2d_array_compat and imageType to 2D
+ VkImageCreateInfo ici = {VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO,
+ nullptr,
+ VK_IMAGE_CREATE_2D_ARRAY_COMPATIBLE_BIT_KHR,
+ VK_IMAGE_TYPE_2D,
+ VK_FORMAT_R8G8B8A8_UNORM,
+ {32, 32, 1},
+ 1,
+ 1,
+ VK_SAMPLE_COUNT_1_BIT,
+ VK_IMAGE_TILING_OPTIMAL,
+ VK_IMAGE_USAGE_SAMPLED_BIT,
+ VK_SHARING_MODE_EXCLUSIVE,
+ 0,
+ nullptr,
+ VK_IMAGE_LAYOUT_UNDEFINED};
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkImageCreateInfo-flags-00950");
+ VkImage image;
+ vkCreateImage(m_device->device(), &ici, NULL, &image);
+ m_errorMonitor->VerifyFound();
+}
+
+TEST_F(VkLayerTest, Maint1BindingSliceOf3DImage) {
+ TEST_DESCRIPTION(
+ "Attempt to bind a slice of a 3D texture in a descriptor set. This is explicitly disallowed by KHR_maintenance1 to keep "
+ "things simple for drivers.");
+ ASSERT_NO_FATAL_FAILURE(InitFramework(myDbgFunc, m_errorMonitor));
+ if (DeviceExtensionSupported(gpu(), nullptr, VK_KHR_MAINTENANCE1_EXTENSION_NAME)) {
+ m_device_extension_names.push_back(VK_KHR_MAINTENANCE1_EXTENSION_NAME);
+ } else {
+ printf("%s %s is not supported; skipping\n", kSkipPrefix, VK_KHR_MAINTENANCE1_EXTENSION_NAME);
+ return;
+ }
+ ASSERT_NO_FATAL_FAILURE(InitState());
+
+ VkResult err;
+
+ OneOffDescriptorSet set(m_device, {
+ {0, VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE, 1, VK_SHADER_STAGE_FRAGMENT_BIT, nullptr},
+ });
+
+ VkImageCreateInfo ici = {VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO,
+ nullptr,
+ VK_IMAGE_CREATE_2D_ARRAY_COMPATIBLE_BIT_KHR,
+ VK_IMAGE_TYPE_3D,
+ VK_FORMAT_R8G8B8A8_UNORM,
+ {32, 32, 32},
+ 1,
+ 1,
+ VK_SAMPLE_COUNT_1_BIT,
+ VK_IMAGE_TILING_OPTIMAL,
+ VK_IMAGE_USAGE_SAMPLED_BIT,
+ VK_SHARING_MODE_EXCLUSIVE,
+ 0,
+ nullptr,
+ VK_IMAGE_LAYOUT_UNDEFINED};
+ VkImageObj image(m_device);
+ image.init(&ici);
+ ASSERT_TRUE(image.initialized());
+
+ VkImageViewCreateInfo ivci = {
+ VK_STRUCTURE_TYPE_IMAGE_VIEW_CREATE_INFO,
+ nullptr,
+ 0,
+ image.handle(),
+ VK_IMAGE_VIEW_TYPE_2D,
+ VK_FORMAT_R8G8B8A8_UNORM,
+ {VK_COMPONENT_SWIZZLE_IDENTITY, VK_COMPONENT_SWIZZLE_IDENTITY, VK_COMPONENT_SWIZZLE_IDENTITY,
+ VK_COMPONENT_SWIZZLE_IDENTITY},
+ {VK_IMAGE_ASPECT_COLOR_BIT, 0, 1, 0, 1},
+ };
+ VkImageView view;
+ err = vkCreateImageView(m_device->device(), &ivci, nullptr, &view);
+ ASSERT_VK_SUCCESS(err);
+
+ // Meat of the test.
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkDescriptorImageInfo-imageView-00343");
+
+ VkDescriptorImageInfo dii = {VK_NULL_HANDLE, view, VK_IMAGE_LAYOUT_GENERAL};
+ VkWriteDescriptorSet write = {VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET, nullptr, set.set_, 0, 0, 1,
+ VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE, &dii, nullptr, nullptr};
+ vkUpdateDescriptorSets(m_device->device(), 1, &write, 0, nullptr);
+
+ m_errorMonitor->VerifyFound();
+
+ vkDestroyImageView(m_device->device(), view, nullptr);
+}
+
+TEST_F(VkLayerTest, MissingClearAttachment) {
+ TEST_DESCRIPTION("Points to a wrong colorAttachment index in a VkClearAttachment structure passed to vkCmdClearAttachments");
+ ASSERT_NO_FATAL_FAILURE(Init());
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdClearAttachments-aspectMask-02501");
+
+ VKTriangleTest(BsoFailCmdClearAttachments);
+ m_errorMonitor->VerifyFound();
+}
+
+TEST_F(VkPositiveLayerTest, ConfirmNoVLErrorWhenVkCmdClearAttachmentsCalledInSecondaryCB) {
+ TEST_DESCRIPTION(
+ "This test is to verify that when vkCmdClearAttachments is called by a secondary commandbuffer, the validation layers do "
+ "not throw an error if the primary commandbuffer begins a renderpass before executing the secondary commandbuffer.");
+
+ ASSERT_NO_FATAL_FAILURE(Init());
+ ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
+
+ VkCommandBufferObj secondary(m_device, m_commandPool, VK_COMMAND_BUFFER_LEVEL_SECONDARY);
+
+ VkCommandBufferBeginInfo info = {};
+ VkCommandBufferInheritanceInfo hinfo = {};
+ info.flags = VK_COMMAND_BUFFER_USAGE_ONE_TIME_SUBMIT_BIT | VK_COMMAND_BUFFER_USAGE_RENDER_PASS_CONTINUE_BIT;
+ info.sType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_BEGIN_INFO;
+ info.pInheritanceInfo = &hinfo;
+ hinfo.sType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_INHERITANCE_INFO;
+ hinfo.pNext = NULL;
+ hinfo.renderPass = renderPass();
+ hinfo.subpass = 0;
+ hinfo.framebuffer = m_framebuffer;
+ hinfo.occlusionQueryEnable = VK_FALSE;
+ hinfo.queryFlags = 0;
+ hinfo.pipelineStatistics = 0;
+
+ secondary.begin(&info);
+ VkClearAttachment color_attachment;
+ color_attachment.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
+ color_attachment.clearValue.color.float32[0] = 0.0;
+ color_attachment.clearValue.color.float32[1] = 0.0;
+ color_attachment.clearValue.color.float32[2] = 0.0;
+ color_attachment.clearValue.color.float32[3] = 0.0;
+ color_attachment.colorAttachment = 0;
+ VkClearRect clear_rect = {{{0, 0}, {(uint32_t)m_width, (uint32_t)m_height}}, 0, 1};
+ vkCmdClearAttachments(secondary.handle(), 1, &color_attachment, 1, &clear_rect);
+ secondary.end();
+ // Modify clear rect here to verify that it doesn't cause validation error
+ clear_rect = {{{0, 0}, {99999999, 99999999}}, 0, 0};
+
+ m_commandBuffer->begin();
+ vkCmdBeginRenderPass(m_commandBuffer->handle(), &m_renderPassBeginInfo, VK_SUBPASS_CONTENTS_SECONDARY_COMMAND_BUFFERS);
+ vkCmdExecuteCommands(m_commandBuffer->handle(), 1, &secondary.handle());
+ vkCmdEndRenderPass(m_commandBuffer->handle());
+ m_commandBuffer->end();
+ m_errorMonitor->VerifyNotFound();
+}
+
+TEST_F(VkLayerTest, CmdClearAttachmentTests) {
+ TEST_DESCRIPTION("Various tests for validating usage of vkCmdClearAttachments");
+
+ ASSERT_NO_FATAL_FAILURE(Init());
+ ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
+
+ OneOffDescriptorSet ds(m_device, {
+ {0, VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER, 1, VK_SHADER_STAGE_ALL, nullptr},
+ });
+
+ VkPipelineMultisampleStateCreateInfo pipe_ms_state_ci = {};
+ pipe_ms_state_ci.sType = VK_STRUCTURE_TYPE_PIPELINE_MULTISAMPLE_STATE_CREATE_INFO;
+ pipe_ms_state_ci.pNext = NULL;
+ pipe_ms_state_ci.rasterizationSamples = VK_SAMPLE_COUNT_1_BIT;
+ pipe_ms_state_ci.sampleShadingEnable = 0;
+ pipe_ms_state_ci.minSampleShading = 1.0;
+ pipe_ms_state_ci.pSampleMask = NULL;
+
+ const VkPipelineLayoutObj pipeline_layout(m_device, {&ds.layout_});
+
+ VkShaderObj vs(m_device, bindStateVertShaderText, VK_SHADER_STAGE_VERTEX_BIT, this);
+ // We shouldn't need a fragment shader but add it to be able to run
+ // on more devices
+ VkShaderObj fs(m_device, bindStateFragShaderText, VK_SHADER_STAGE_FRAGMENT_BIT, this);
+
+ VkPipelineObj pipe(m_device);
+ pipe.AddShader(&vs);
+ pipe.AddShader(&fs);
+ pipe.AddDefaultColorAttachment();
+ pipe.SetMSAA(&pipe_ms_state_ci);
+ pipe.CreateVKPipeline(pipeline_layout.handle(), renderPass());
+
+ m_commandBuffer->begin();
+ m_commandBuffer->BeginRenderPass(m_renderPassBeginInfo);
+
+ // Main thing we care about for this test is that the VkImage obj we're
+ // clearing matches Color Attachment of FB
+ // Also pass down other dummy params to keep driver and paramchecker happy
+ VkClearAttachment color_attachment;
+ color_attachment.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
+ color_attachment.clearValue.color.float32[0] = 1.0;
+ color_attachment.clearValue.color.float32[1] = 1.0;
+ color_attachment.clearValue.color.float32[2] = 1.0;
+ color_attachment.clearValue.color.float32[3] = 1.0;
+ color_attachment.colorAttachment = 0;
+ VkClearRect clear_rect = {{{0, 0}, {(uint32_t)m_width, (uint32_t)m_height}}, 0, 1};
+
+ // Call for full-sized FB Color attachment prior to issuing a Draw
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_PERFORMANCE_WARNING_BIT_EXT,
+ "vkCmdClearAttachments() issued on command buffer object ");
+ vkCmdClearAttachments(m_commandBuffer->handle(), 1, &color_attachment, 1, &clear_rect);
+ m_errorMonitor->VerifyFound();
+
+ clear_rect.rect.extent.width = renderPassBeginInfo().renderArea.extent.width + 4;
+ clear_rect.rect.extent.height = clear_rect.rect.extent.height / 2;
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdClearAttachments-pRects-00016");
+ vkCmdClearAttachments(m_commandBuffer->handle(), 1, &color_attachment, 1, &clear_rect);
+ m_errorMonitor->VerifyFound();
+
+ // baseLayer >= view layers
+ clear_rect.rect.extent.width = (uint32_t)m_width;
+ clear_rect.baseArrayLayer = 1;
+ clear_rect.layerCount = 0;
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdClearAttachments-pRects-00017");
+ vkCmdClearAttachments(m_commandBuffer->handle(), 1, &color_attachment, 1, &clear_rect);
+ m_errorMonitor->VerifyFound();
+
+ // baseLayer + layerCount > view layers
+ clear_rect.rect.extent.width = (uint32_t)m_width;
+ clear_rect.baseArrayLayer = 0;
+ clear_rect.layerCount = 2;
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdClearAttachments-pRects-00017");
+ vkCmdClearAttachments(m_commandBuffer->handle(), 1, &color_attachment, 1, &clear_rect);
+ m_errorMonitor->VerifyFound();
+
+ m_commandBuffer->EndRenderPass();
+ m_commandBuffer->end();
+}
+
+TEST_F(VkLayerTest, VtxBufferBadIndex) {
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_PERFORMANCE_WARNING_BIT_EXT,
+ "but no vertex buffers are attached to this Pipeline State Object");
+
+ ASSERT_NO_FATAL_FAILURE(Init());
+ ASSERT_NO_FATAL_FAILURE(InitViewport());
+ ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
+
+ OneOffDescriptorSet ds(m_device, {
+ {0, VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER, 1, VK_SHADER_STAGE_ALL, nullptr},
+ });
+
+ VkPipelineMultisampleStateCreateInfo pipe_ms_state_ci = {};
+ pipe_ms_state_ci.sType = VK_STRUCTURE_TYPE_PIPELINE_MULTISAMPLE_STATE_CREATE_INFO;
+ pipe_ms_state_ci.pNext = NULL;
+ pipe_ms_state_ci.rasterizationSamples = VK_SAMPLE_COUNT_1_BIT;
+ pipe_ms_state_ci.sampleShadingEnable = 0;
+ pipe_ms_state_ci.minSampleShading = 1.0;
+ pipe_ms_state_ci.pSampleMask = NULL;
+
+ const VkPipelineLayoutObj pipeline_layout(m_device, {&ds.layout_});
+
+ VkShaderObj vs(m_device, bindStateVertShaderText, VK_SHADER_STAGE_VERTEX_BIT, this);
+ VkShaderObj fs(m_device, bindStateFragShaderText, VK_SHADER_STAGE_FRAGMENT_BIT, this); // We shouldn't need a fragment shader
+ // but add it to be able to run on more devices
+ VkPipelineObj pipe(m_device);
+ pipe.AddShader(&vs);
+ pipe.AddShader(&fs);
+ pipe.AddDefaultColorAttachment();
+ pipe.SetMSAA(&pipe_ms_state_ci);
+ pipe.SetViewport(m_viewports);
+ pipe.SetScissor(m_scissors);
+ pipe.CreateVKPipeline(pipeline_layout.handle(), renderPass());
+
+ m_commandBuffer->begin();
+ m_commandBuffer->BeginRenderPass(m_renderPassBeginInfo);
+ vkCmdBindPipeline(m_commandBuffer->handle(), VK_PIPELINE_BIND_POINT_GRAPHICS, pipe.handle());
+ // Don't care about actual data, just need to get to draw to flag error
+ static const float vbo_data[3] = {1.f, 0.f, 1.f};
+ VkConstantBufferObj vbo(m_device, sizeof(vbo_data), (const void *)&vbo_data, VK_BUFFER_USAGE_VERTEX_BUFFER_BIT);
+ m_commandBuffer->BindVertexBuffer(&vbo, (VkDeviceSize)0, 1); // VBO idx 1, but no VBO in PSO
+ m_commandBuffer->Draw(1, 0, 0, 0);
+
+ m_errorMonitor->VerifyFound();
+
+ m_commandBuffer->EndRenderPass();
+ m_commandBuffer->end();
+}
+
+TEST_F(VkLayerTest, InvalidQueryPoolCreate) {
+ TEST_DESCRIPTION("Attempt to create a query pool for PIPELINE_STATISTICS without enabling pipeline stats for the device.");
+
+ ASSERT_NO_FATAL_FAILURE(Init());
+
+ vk_testing::QueueCreateInfoArray queue_info(m_device->queue_props);
+
+ VkDevice local_device;
+ VkDeviceCreateInfo device_create_info = {};
+ auto features = m_device->phy().features();
+ // Intentionally disable pipeline stats
+ features.pipelineStatisticsQuery = VK_FALSE;
+ device_create_info.sType = VK_STRUCTURE_TYPE_DEVICE_CREATE_INFO;
+ device_create_info.pNext = NULL;
+ device_create_info.queueCreateInfoCount = queue_info.size();
+ device_create_info.pQueueCreateInfos = queue_info.data();
+ device_create_info.enabledLayerCount = 0;
+ device_create_info.ppEnabledLayerNames = NULL;
+ device_create_info.pEnabledFeatures = &features;
+ VkResult err = vkCreateDevice(gpu(), &device_create_info, nullptr, &local_device);
+ ASSERT_VK_SUCCESS(err);
+
+ VkQueryPoolCreateInfo qpci{};
+ qpci.sType = VK_STRUCTURE_TYPE_QUERY_POOL_CREATE_INFO;
+ qpci.queryType = VK_QUERY_TYPE_PIPELINE_STATISTICS;
+ qpci.queryCount = 1;
+ VkQueryPool query_pool;
+
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkQueryPoolCreateInfo-queryType-00791");
+ vkCreateQueryPool(local_device, &qpci, nullptr, &query_pool);
+ m_errorMonitor->VerifyFound();
+
+ vkDestroyDevice(local_device, nullptr);
+}
+
+TEST_F(VkLayerTest, UnclosedQuery) {
+ TEST_DESCRIPTION("End a command buffer with a query still in progress.");
+
+ const char *invalid_query = "Ending command buffer with in progress query: queryPool 0x";
+
+ ASSERT_NO_FATAL_FAILURE(Init());
+
+ VkEvent event;
+ VkEventCreateInfo event_create_info{};
+ event_create_info.sType = VK_STRUCTURE_TYPE_EVENT_CREATE_INFO;
+ vkCreateEvent(m_device->device(), &event_create_info, nullptr, &event);
+
+ VkQueue queue = VK_NULL_HANDLE;
+ vkGetDeviceQueue(m_device->device(), m_device->graphics_queue_node_index_, 0, &queue);
+
+ m_commandBuffer->begin();
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, invalid_query);
+
+ VkQueryPool query_pool;
+ VkQueryPoolCreateInfo query_pool_create_info = {};
+ query_pool_create_info.sType = VK_STRUCTURE_TYPE_QUERY_POOL_CREATE_INFO;
+ query_pool_create_info.queryType = VK_QUERY_TYPE_OCCLUSION;
+ query_pool_create_info.queryCount = 1;
+ vkCreateQueryPool(m_device->device(), &query_pool_create_info, nullptr, &query_pool);
+
+ vkCmdResetQueryPool(m_commandBuffer->handle(), query_pool, 0 /*startQuery*/, 1 /*queryCount*/);
+ vkCmdBeginQuery(m_commandBuffer->handle(), query_pool, 0, 0);
+
+ vkEndCommandBuffer(m_commandBuffer->handle());
+ m_errorMonitor->VerifyFound();
+
+ vkDestroyQueryPool(m_device->device(), query_pool, nullptr);
+ vkDestroyEvent(m_device->device(), event, nullptr);
+}
+
+TEST_F(VkLayerTest, QueryPreciseBit) {
+ TEST_DESCRIPTION("Check for correct Query Precise Bit circumstances.");
+ ASSERT_NO_FATAL_FAILURE(Init());
+
+ // These tests require that the device support pipeline statistics query
+ VkPhysicalDeviceFeatures device_features = {};
+ ASSERT_NO_FATAL_FAILURE(GetPhysicalDeviceFeatures(&device_features));
+ if (VK_TRUE != device_features.pipelineStatisticsQuery) {
+ printf("%s Test requires unsupported pipelineStatisticsQuery feature. Skipped.\n", kSkipPrefix);
+ return;
+ }
+
+ std::vector<const char *> device_extension_names;
+ auto features = m_device->phy().features();
+
+ // Test for precise bit when query type is not OCCLUSION
+ if (features.occlusionQueryPrecise) {
+ VkEvent event;
+ VkEventCreateInfo event_create_info{};
+ event_create_info.sType = VK_STRUCTURE_TYPE_EVENT_CREATE_INFO;
+ vkCreateEvent(m_device->handle(), &event_create_info, nullptr, &event);
+
+ m_commandBuffer->begin();
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdBeginQuery-queryType-00800");
+
+ VkQueryPool query_pool;
+ VkQueryPoolCreateInfo query_pool_create_info = {};
+ query_pool_create_info.sType = VK_STRUCTURE_TYPE_QUERY_POOL_CREATE_INFO;
+ query_pool_create_info.queryType = VK_QUERY_TYPE_PIPELINE_STATISTICS;
+ query_pool_create_info.queryCount = 1;
+ vkCreateQueryPool(m_device->handle(), &query_pool_create_info, nullptr, &query_pool);
+
+ vkCmdResetQueryPool(m_commandBuffer->handle(), query_pool, 0, 1);
+ vkCmdBeginQuery(m_commandBuffer->handle(), query_pool, 0, VK_QUERY_CONTROL_PRECISE_BIT);
+ vkCmdEndQuery(m_commandBuffer->handle(), query_pool, 0);
+ m_errorMonitor->VerifyFound();
+
+ m_commandBuffer->end();
+ vkDestroyQueryPool(m_device->handle(), query_pool, nullptr);
+ vkDestroyEvent(m_device->handle(), event, nullptr);
+ }
+
+ // Test for precise bit when precise feature is not available
+ features.occlusionQueryPrecise = false;
+ VkDeviceObj test_device(0, gpu(), device_extension_names, &features);
+
+ VkCommandPoolCreateInfo pool_create_info{};
+ pool_create_info.sType = VK_STRUCTURE_TYPE_COMMAND_POOL_CREATE_INFO;
+ pool_create_info.queueFamilyIndex = test_device.graphics_queue_node_index_;
+
+ VkCommandPool command_pool;
+ vkCreateCommandPool(test_device.handle(), &pool_create_info, nullptr, &command_pool);
+
+ VkCommandBufferAllocateInfo cmd = {};
+ cmd.sType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_ALLOCATE_INFO;
+ cmd.pNext = NULL;
+ cmd.commandPool = command_pool;
+ cmd.level = VK_COMMAND_BUFFER_LEVEL_PRIMARY;
+ cmd.commandBufferCount = 1;
+
+ VkCommandBuffer cmd_buffer;
+ VkResult err = vkAllocateCommandBuffers(test_device.handle(), &cmd, &cmd_buffer);
+ ASSERT_VK_SUCCESS(err);
+
+ VkEvent event;
+ VkEventCreateInfo event_create_info{};
+ event_create_info.sType = VK_STRUCTURE_TYPE_EVENT_CREATE_INFO;
+ vkCreateEvent(test_device.handle(), &event_create_info, nullptr, &event);
+
+ VkCommandBufferBeginInfo begin_info = {VK_STRUCTURE_TYPE_COMMAND_BUFFER_BEGIN_INFO, nullptr,
+ VK_COMMAND_BUFFER_USAGE_ONE_TIME_SUBMIT_BIT, nullptr};
+
+ vkBeginCommandBuffer(cmd_buffer, &begin_info);
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdBeginQuery-queryType-00800");
+
+ VkQueryPool query_pool;
+ VkQueryPoolCreateInfo query_pool_create_info = {};
+ query_pool_create_info.sType = VK_STRUCTURE_TYPE_QUERY_POOL_CREATE_INFO;
+ query_pool_create_info.queryType = VK_QUERY_TYPE_OCCLUSION;
+ query_pool_create_info.queryCount = 1;
+ vkCreateQueryPool(test_device.handle(), &query_pool_create_info, nullptr, &query_pool);
+
+ vkCmdResetQueryPool(cmd_buffer, query_pool, 0, 1);
+ vkCmdBeginQuery(cmd_buffer, query_pool, 0, VK_QUERY_CONTROL_PRECISE_BIT);
+ vkCmdEndQuery(cmd_buffer, query_pool, 0);
+ m_errorMonitor->VerifyFound();
+
+ vkEndCommandBuffer(cmd_buffer);
+ vkDestroyQueryPool(test_device.handle(), query_pool, nullptr);
+ vkDestroyEvent(test_device.handle(), event, nullptr);
+ vkDestroyCommandPool(test_device.handle(), command_pool, nullptr);
+}
+
+TEST_F(VkLayerTest, VertexBufferInvalid) {
+ TEST_DESCRIPTION(
+ "Submit a command buffer using deleted vertex buffer, delete a buffer twice, use an invalid offset for each buffer type, "
+ "and attempt to bind a null buffer");
+
+ const char *deleted_buffer_in_command_buffer = "Cannot submit cmd buffer using deleted buffer ";
+ const char *invalid_offset_message = "VUID-vkBindBufferMemory-memoryOffset-01036";
+
+ ASSERT_NO_FATAL_FAILURE(Init());
+ ASSERT_NO_FATAL_FAILURE(InitViewport());
+ ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
+
+ VkPipelineMultisampleStateCreateInfo pipe_ms_state_ci = {};
+ pipe_ms_state_ci.sType = VK_STRUCTURE_TYPE_PIPELINE_MULTISAMPLE_STATE_CREATE_INFO;
+ pipe_ms_state_ci.pNext = NULL;
+ pipe_ms_state_ci.rasterizationSamples = VK_SAMPLE_COUNT_1_BIT;
+ pipe_ms_state_ci.sampleShadingEnable = 0;
+ pipe_ms_state_ci.minSampleShading = 1.0;
+ pipe_ms_state_ci.pSampleMask = nullptr;
+
+ const VkPipelineLayoutObj pipeline_layout(m_device);
+
+ VkShaderObj vs(m_device, bindStateVertShaderText, VK_SHADER_STAGE_VERTEX_BIT, this);
+ VkShaderObj fs(m_device, bindStateFragShaderText, VK_SHADER_STAGE_FRAGMENT_BIT, this);
+ VkPipelineObj pipe(m_device);
+ pipe.AddShader(&vs);
+ pipe.AddShader(&fs);
+ pipe.AddDefaultColorAttachment();
+ pipe.SetMSAA(&pipe_ms_state_ci);
+ pipe.SetViewport(m_viewports);
+ pipe.SetScissor(m_scissors);
+ pipe.CreateVKPipeline(pipeline_layout.handle(), renderPass());
+
+ m_commandBuffer->begin();
+ m_commandBuffer->BeginRenderPass(m_renderPassBeginInfo);
+ vkCmdBindPipeline(m_commandBuffer->handle(), VK_PIPELINE_BIND_POINT_GRAPHICS, pipe.handle());
+
+ {
+ // Create and bind a vertex buffer in a reduced scope, which will cause
+ // it to be deleted upon leaving this scope
+ const float vbo_data[3] = {1.f, 0.f, 1.f};
+ VkVerticesObj draw_verticies(m_device, 1, 1, sizeof(vbo_data[0]), sizeof(vbo_data) / sizeof(vbo_data[0]), vbo_data);
+ draw_verticies.BindVertexBuffers(m_commandBuffer->handle());
+ draw_verticies.AddVertexInputToPipe(pipe);
+ }
+
+ m_commandBuffer->Draw(1, 0, 0, 0);
+
+ m_commandBuffer->EndRenderPass();
+ m_commandBuffer->end();
+
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, deleted_buffer_in_command_buffer);
+ m_commandBuffer->QueueCommandBuffer(false);
+ m_errorMonitor->VerifyFound();
+
+ {
+ // Create and bind a vertex buffer in a reduced scope, and delete it
+ // twice, the second through the destructor
+ VkBufferTest buffer_test(m_device, VK_BUFFER_USAGE_STORAGE_BUFFER_BIT, VkBufferTest::eDoubleDelete);
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkDestroyBuffer-buffer-parameter");
+ buffer_test.TestDoubleDestroy();
+ }
+ m_errorMonitor->VerifyFound();
+
+ m_errorMonitor->SetUnexpectedError("value of pCreateInfo->usage must not be 0");
+ if (VkBufferTest::GetTestConditionValid(m_device, VkBufferTest::eInvalidMemoryOffset)) {
+ // Create and bind a memory buffer with an invalid offset.
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, invalid_offset_message);
+ m_errorMonitor->SetUnexpectedError(
+ "If buffer was created with the VK_BUFFER_USAGE_UNIFORM_TEXEL_BUFFER_BIT or VK_BUFFER_USAGE_STORAGE_TEXEL_BUFFER_BIT, "
+ "memoryOffset must be a multiple of VkPhysicalDeviceLimits::minTexelBufferOffsetAlignment");
+ VkBufferTest buffer_test(m_device, VK_BUFFER_USAGE_UNIFORM_TEXEL_BUFFER_BIT, VkBufferTest::eInvalidMemoryOffset);
+ (void)buffer_test;
+ m_errorMonitor->VerifyFound();
+ }
+
+ {
+ // Attempt to bind a null buffer.
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ "vkBindBufferMemory: required parameter buffer specified as VK_NULL_HANDLE");
+ VkBufferTest buffer_test(m_device, 0, VkBufferTest::eBindNullBuffer);
+ (void)buffer_test;
+ m_errorMonitor->VerifyFound();
+ }
+
+ {
+ // Attempt to bind a fake buffer.
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkBindBufferMemory-buffer-parameter");
+ VkBufferTest buffer_test(m_device, 0, VkBufferTest::eBindFakeBuffer);
+ (void)buffer_test;
+ m_errorMonitor->VerifyFound();
+ }
+
+ {
+ // Attempt to use an invalid handle to delete a buffer.
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkFreeMemory-memory-parameter");
+ VkBufferTest buffer_test(m_device, VK_BUFFER_USAGE_STORAGE_BUFFER_BIT, VkBufferTest::eFreeInvalidHandle);
+ (void)buffer_test;
+ }
+ m_errorMonitor->VerifyFound();
+}
+
+TEST_F(VkLayerTest, BadVertexBufferOffset) {
+ TEST_DESCRIPTION("Submit an offset past the end of a vertex buffer");
+
+ ASSERT_NO_FATAL_FAILURE(Init());
+ ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
+ static const float vbo_data[3] = {1.f, 0.f, 1.f};
+ VkConstantBufferObj vbo(m_device, sizeof(vbo_data), (const void *)&vbo_data, VK_BUFFER_USAGE_VERTEX_BUFFER_BIT);
+ m_commandBuffer->begin();
+ m_commandBuffer->BeginRenderPass(m_renderPassBeginInfo);
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdBindVertexBuffers-pOffsets-00626");
+ m_commandBuffer->BindVertexBuffer(&vbo, (VkDeviceSize)(3 * sizeof(float)), 1); // Offset at the end of the buffer
+ m_errorMonitor->VerifyFound();
+
+ m_commandBuffer->EndRenderPass();
+ m_commandBuffer->end();
+}
+
+TEST_F(VkLayerTest, InvalidVertexAttributeAlignment) {
+ TEST_DESCRIPTION("Check for proper aligment of attribAddress which depends on a bound pipeline and on a bound vertex buffer");
+
+ ASSERT_NO_FATAL_FAILURE(Init());
+ ASSERT_NO_FATAL_FAILURE(InitViewport());
+ ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
+
+ const VkPipelineLayoutObj pipeline_layout(m_device);
+
+ struct VboEntry {
+ uint16_t input0[2];
+ uint32_t input1;
+ float input2[4];
+ };
+
+ const unsigned vbo_entry_count = 3;
+ const VboEntry vbo_data[vbo_entry_count] = {};
+
+ VkConstantBufferObj vbo(m_device, static_cast<int>(sizeof(VboEntry) * vbo_entry_count),
+ reinterpret_cast<const void *>(vbo_data), VK_BUFFER_USAGE_VERTEX_BUFFER_BIT);
+
+ VkVertexInputBindingDescription input_binding;
+ input_binding.binding = 0;
+ input_binding.stride = sizeof(VboEntry);
+ input_binding.inputRate = VK_VERTEX_INPUT_RATE_VERTEX;
+
+ VkVertexInputAttributeDescription input_attribs[3];
+
+ input_attribs[0].binding = 0;
+ // Location switch between attrib[0] and attrib[1] is intentional
+ input_attribs[0].location = 1;
+ input_attribs[0].format = VK_FORMAT_A8B8G8R8_UNORM_PACK32;
+ input_attribs[0].offset = offsetof(VboEntry, input1);
+
+ input_attribs[1].binding = 0;
+ input_attribs[1].location = 0;
+ input_attribs[1].format = VK_FORMAT_R16G16_UNORM;
+ input_attribs[1].offset = offsetof(VboEntry, input0);
+
+ input_attribs[2].binding = 0;
+ input_attribs[2].location = 2;
+ input_attribs[2].format = VK_FORMAT_R32G32B32A32_SFLOAT;
+ input_attribs[2].offset = offsetof(VboEntry, input2);
+
+ char const *vsSource =
+ "#version 450\n"
+ "\n"
+ "layout(location = 0) in vec2 input0;"
+ "layout(location = 1) in vec4 input1;"
+ "layout(location = 2) in vec4 input2;"
+ "\n"
+ "void main(){\n"
+ " gl_Position = input1 + input2;\n"
+ " gl_Position.xy += input0;\n"
+ "}\n";
+ char const *fsSource =
+ "#version 450\n"
+ "\n"
+ "layout(location=0) out vec4 color;\n"
+ "void main(){\n"
+ " color = vec4(1);\n"
+ "}\n";
+
+ VkShaderObj vs(m_device, vsSource, VK_SHADER_STAGE_VERTEX_BIT, this);
+ VkShaderObj fs(m_device, fsSource, VK_SHADER_STAGE_FRAGMENT_BIT, this);
+
+ VkPipelineObj pipe1(m_device);
+ pipe1.AddDefaultColorAttachment();
+ pipe1.AddShader(&vs);
+ pipe1.AddShader(&fs);
+ pipe1.AddVertexInputBindings(&input_binding, 1);
+ pipe1.AddVertexInputAttribs(&input_attribs[0], 3);
+ pipe1.SetViewport(m_viewports);
+ pipe1.SetScissor(m_scissors);
+ pipe1.CreateVKPipeline(pipeline_layout.handle(), renderPass());
+
+ input_binding.stride = 6;
+
+ VkPipelineObj pipe2(m_device);
+ pipe2.AddDefaultColorAttachment();
+ pipe2.AddShader(&vs);
+ pipe2.AddShader(&fs);
+ pipe2.AddVertexInputBindings(&input_binding, 1);
+ pipe2.AddVertexInputAttribs(&input_attribs[0], 3);
+ pipe2.SetViewport(m_viewports);
+ pipe2.SetScissor(m_scissors);
+ pipe2.CreateVKPipeline(pipeline_layout.handle(), renderPass());
+
+ m_commandBuffer->begin();
+ m_commandBuffer->BeginRenderPass(m_renderPassBeginInfo);
+
+ // Test with invalid buffer offset
+ VkDeviceSize offset = 1;
+ vkCmdBindPipeline(m_commandBuffer->handle(), VK_PIPELINE_BIND_POINT_GRAPHICS, pipe1.handle());
+ vkCmdBindVertexBuffers(m_commandBuffer->handle(), 0, 1, &vbo.handle(), &offset);
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "Invalid attribAddress alignment for vertex attribute 0");
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "Invalid attribAddress alignment for vertex attribute 1");
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "Invalid attribAddress alignment for vertex attribute 2");
+ m_commandBuffer->Draw(1, 0, 0, 0);
+ m_errorMonitor->VerifyFound();
+
+ // Test with invalid buffer stride
+ offset = 0;
+ vkCmdBindPipeline(m_commandBuffer->handle(), VK_PIPELINE_BIND_POINT_GRAPHICS, pipe2.handle());
+ vkCmdBindVertexBuffers(m_commandBuffer->handle(), 0, 1, &vbo.handle(), &offset);
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "Invalid attribAddress alignment for vertex attribute 0");
+ // Attribute[1] is aligned properly even with a wrong stride
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "Invalid attribAddress alignment for vertex attribute 2");
+ m_commandBuffer->Draw(1, 0, 0, 0);
+ m_errorMonitor->VerifyFound();
+
+ m_commandBuffer->EndRenderPass();
+ m_commandBuffer->end();
+}
+
+TEST_F(VkLayerTest, InvalidVertexBindingDescriptions) {
+ TEST_DESCRIPTION(
+ "Attempt to create a graphics pipeline where:"
+ "1) count of vertex bindings exceeds device's maxVertexInputBindings limit"
+ "2) requested bindings include a duplicate binding value");
+
+ ASSERT_NO_FATAL_FAILURE(Init());
+ ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
+
+ const VkPipelineLayoutObj pipeline_layout(m_device);
+
+ const uint32_t binding_count = m_device->props.limits.maxVertexInputBindings + 1;
+
+ std::vector<VkVertexInputBindingDescription> input_bindings(binding_count);
+ for (uint32_t i = 0; i < binding_count; ++i) {
+ input_bindings[i].binding = i;
+ input_bindings[i].stride = 4;
+ input_bindings[i].inputRate = VK_VERTEX_INPUT_RATE_VERTEX;
+ }
+ // Let the last binding description use same binding as the first one
+ input_bindings[binding_count - 1].binding = 0;
+
+ VkVertexInputAttributeDescription input_attrib;
+ input_attrib.binding = 0;
+ input_attrib.location = 0;
+ input_attrib.format = VK_FORMAT_R32G32B32_SFLOAT;
+ input_attrib.offset = 0;
+
+ VkShaderObj vs(m_device, bindStateVertShaderText, VK_SHADER_STAGE_VERTEX_BIT, this);
+ VkShaderObj fs(m_device, bindStateFragShaderText, VK_SHADER_STAGE_FRAGMENT_BIT, this);
+
+ VkPipelineObj pipe(m_device);
+ pipe.AddDefaultColorAttachment();
+ pipe.AddShader(&vs);
+ pipe.AddShader(&fs);
+ pipe.AddVertexInputBindings(input_bindings.data(), binding_count);
+ pipe.AddVertexInputAttribs(&input_attrib, 1);
+
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ "VUID-VkPipelineVertexInputStateCreateInfo-vertexBindingDescriptionCount-00613");
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ "VUID-VkPipelineVertexInputStateCreateInfo-pVertexBindingDescriptions-00616");
+ pipe.CreateVKPipeline(pipeline_layout.handle(), renderPass());
+ m_errorMonitor->VerifyFound();
+}
+
+TEST_F(VkLayerTest, InvalidVertexAttributeDescriptions) {
+ TEST_DESCRIPTION(
+ "Attempt to create a graphics pipeline where:"
+ "1) count of vertex attributes exceeds device's maxVertexInputAttributes limit"
+ "2) requested location include a duplicate location value"
+ "3) binding used by one attribute is not defined by a binding description");
+
+ ASSERT_NO_FATAL_FAILURE(Init());
+ ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
+
+ const VkPipelineLayoutObj pipeline_layout(m_device);
+
+ VkVertexInputBindingDescription input_binding;
+ input_binding.binding = 0;
+ input_binding.stride = 4;
+ input_binding.inputRate = VK_VERTEX_INPUT_RATE_VERTEX;
+
+ const uint32_t attribute_count = m_device->props.limits.maxVertexInputAttributes + 1;
+ std::vector<VkVertexInputAttributeDescription> input_attribs(attribute_count);
+ for (uint32_t i = 0; i < attribute_count; ++i) {
+ input_attribs[i].binding = 0;
+ input_attribs[i].location = i;
+ input_attribs[i].format = VK_FORMAT_R32G32B32_SFLOAT;
+ input_attribs[i].offset = 0;
+ }
+ // Let the last input_attribs description use same location as the first one
+ input_attribs[attribute_count - 1].location = 0;
+ // Let the last input_attribs description use binding which is not defined
+ input_attribs[attribute_count - 1].binding = 1;
+
+ VkShaderObj vs(m_device, bindStateVertShaderText, VK_SHADER_STAGE_VERTEX_BIT, this);
+ VkShaderObj fs(m_device, bindStateFragShaderText, VK_SHADER_STAGE_FRAGMENT_BIT, this);
+
+ VkPipelineObj pipe(m_device);
+ pipe.AddDefaultColorAttachment();
+ pipe.AddShader(&vs);
+ pipe.AddShader(&fs);
+ pipe.AddVertexInputBindings(&input_binding, 1);
+ pipe.AddVertexInputAttribs(input_attribs.data(), attribute_count);
+
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ "VUID-VkPipelineVertexInputStateCreateInfo-vertexAttributeDescriptionCount-00614");
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkPipelineVertexInputStateCreateInfo-binding-00615");
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ "VUID-VkPipelineVertexInputStateCreateInfo-pVertexAttributeDescriptions-00617");
+ pipe.CreateVKPipeline(pipeline_layout.handle(), renderPass());
+ m_errorMonitor->VerifyFound();
+}
+
+// INVALID_IMAGE_LAYOUT tests (one other case is hit by MapMemWithoutHostVisibleBit and not here)
+TEST_F(VkLayerTest, InvalidImageLayout) {
+ TEST_DESCRIPTION(
+ "Hit all possible validation checks associated with the UNASSIGNED-CoreValidation-DrawState-InvalidImageLayout error. "
+ "Generally these involve having images in the wrong layout when they're copied or transitioned.");
+ // 3 in ValidateCmdBufImageLayouts
+ // * -1 Attempt to submit cmd buf w/ deleted image
+ // * -2 Cmd buf submit of image w/ layout not matching first use w/ subresource
+ // * -3 Cmd buf submit of image w/ layout not matching first use w/o subresource
+
+ ASSERT_NO_FATAL_FAILURE(Init());
+ auto depth_format = FindSupportedDepthStencilFormat(gpu());
+ if (!depth_format) {
+ printf("%s No Depth + Stencil format found. Skipped.\n", kSkipPrefix);
+ return;
+ }
+ // Create src & dst images to use for copy operations
+ VkImage src_image;
+ VkImage dst_image;
+ VkImage depth_image;
+
+ const VkFormat tex_format = VK_FORMAT_B8G8R8A8_UNORM;
+ const int32_t tex_width = 32;
+ const int32_t tex_height = 32;
+
+ VkImageCreateInfo image_create_info = {};
+ image_create_info.sType = VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO;
+ image_create_info.pNext = NULL;
+ image_create_info.imageType = VK_IMAGE_TYPE_2D;
+ image_create_info.format = tex_format;
+ image_create_info.extent.width = tex_width;
+ image_create_info.extent.height = tex_height;
+ image_create_info.extent.depth = 1;
+ image_create_info.mipLevels = 1;
+ image_create_info.arrayLayers = 4;
+ image_create_info.samples = VK_SAMPLE_COUNT_1_BIT;
+ image_create_info.tiling = VK_IMAGE_TILING_OPTIMAL;
+ image_create_info.usage = VK_IMAGE_USAGE_TRANSFER_SRC_BIT;
+ image_create_info.initialLayout = VK_IMAGE_LAYOUT_UNDEFINED;
+ image_create_info.flags = 0;
+
+ VkResult err = vkCreateImage(m_device->device(), &image_create_info, NULL, &src_image);
+ ASSERT_VK_SUCCESS(err);
+ image_create_info.usage = VK_IMAGE_USAGE_TRANSFER_DST_BIT;
+ err = vkCreateImage(m_device->device(), &image_create_info, NULL, &dst_image);
+ ASSERT_VK_SUCCESS(err);
+ image_create_info.format = VK_FORMAT_D16_UNORM;
+ image_create_info.usage |= VK_IMAGE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT;
+ err = vkCreateImage(m_device->device(), &image_create_info, NULL, &depth_image);
+ ASSERT_VK_SUCCESS(err);
+
+ // Allocate memory
+ VkMemoryRequirements img_mem_reqs = {};
+ VkMemoryAllocateInfo mem_alloc = {};
+ VkDeviceMemory src_image_mem, dst_image_mem, depth_image_mem;
+ mem_alloc.sType = VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO;
+ mem_alloc.pNext = NULL;
+ mem_alloc.allocationSize = 0;
+ mem_alloc.memoryTypeIndex = 0;
+
+ vkGetImageMemoryRequirements(m_device->device(), src_image, &img_mem_reqs);
+ mem_alloc.allocationSize = img_mem_reqs.size;
+ bool pass = m_device->phy().set_memory_type(img_mem_reqs.memoryTypeBits, &mem_alloc, 0);
+ ASSERT_TRUE(pass);
+ err = vkAllocateMemory(m_device->device(), &mem_alloc, NULL, &src_image_mem);
+ ASSERT_VK_SUCCESS(err);
+
+ vkGetImageMemoryRequirements(m_device->device(), dst_image, &img_mem_reqs);
+ mem_alloc.allocationSize = img_mem_reqs.size;
+ pass = m_device->phy().set_memory_type(img_mem_reqs.memoryTypeBits, &mem_alloc, 0);
+ ASSERT_VK_SUCCESS(err);
+ err = vkAllocateMemory(m_device->device(), &mem_alloc, NULL, &dst_image_mem);
+ ASSERT_VK_SUCCESS(err);
+
+ vkGetImageMemoryRequirements(m_device->device(), depth_image, &img_mem_reqs);
+ mem_alloc.allocationSize = img_mem_reqs.size;
+ pass = m_device->phy().set_memory_type(img_mem_reqs.memoryTypeBits, &mem_alloc, 0);
+ ASSERT_VK_SUCCESS(err);
+ err = vkAllocateMemory(m_device->device(), &mem_alloc, NULL, &depth_image_mem);
+ ASSERT_VK_SUCCESS(err);
+
+ err = vkBindImageMemory(m_device->device(), src_image, src_image_mem, 0);
+ ASSERT_VK_SUCCESS(err);
+ err = vkBindImageMemory(m_device->device(), dst_image, dst_image_mem, 0);
+ ASSERT_VK_SUCCESS(err);
+ err = vkBindImageMemory(m_device->device(), depth_image, depth_image_mem, 0);
+ ASSERT_VK_SUCCESS(err);
+
+ m_commandBuffer->begin();
+ VkImageCopy copy_region;
+ copy_region.srcSubresource.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
+ copy_region.srcSubresource.mipLevel = 0;
+ copy_region.srcSubresource.baseArrayLayer = 0;
+ copy_region.srcSubresource.layerCount = 1;
+ copy_region.srcOffset.x = 0;
+ copy_region.srcOffset.y = 0;
+ copy_region.srcOffset.z = 0;
+ copy_region.dstSubresource.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
+ copy_region.dstSubresource.mipLevel = 0;
+ copy_region.dstSubresource.baseArrayLayer = 0;
+ copy_region.dstSubresource.layerCount = 1;
+ copy_region.dstOffset.x = 0;
+ copy_region.dstOffset.y = 0;
+ copy_region.dstOffset.z = 0;
+ copy_region.extent.width = 1;
+ copy_region.extent.height = 1;
+ copy_region.extent.depth = 1;
+
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_PERFORMANCE_WARNING_BIT_EXT,
+ "layout should be VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL instead of GENERAL.");
+ m_errorMonitor->SetUnexpectedError("layout should be VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL instead of GENERAL.");
+
+ m_commandBuffer->CopyImage(src_image, VK_IMAGE_LAYOUT_GENERAL, dst_image, VK_IMAGE_LAYOUT_GENERAL, 1, &copy_region);
+ m_errorMonitor->VerifyFound();
+ // The first call hits the expected WARNING and skips the call down the chain, so call a second time to call down chain and
+ // update layer state
+ m_errorMonitor->SetUnexpectedError("layout should be VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL instead of GENERAL.");
+ m_errorMonitor->SetUnexpectedError("layout should be VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL instead of GENERAL.");
+ m_commandBuffer->CopyImage(src_image, VK_IMAGE_LAYOUT_GENERAL, dst_image, VK_IMAGE_LAYOUT_GENERAL, 1, &copy_region);
+ // Now cause error due to src image layout changing
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdCopyImage-srcImageLayout-00128");
+ m_errorMonitor->SetUnexpectedError("is VK_IMAGE_LAYOUT_UNDEFINED but can only be VK_IMAGE_LAYOUT");
+ m_commandBuffer->CopyImage(src_image, VK_IMAGE_LAYOUT_UNDEFINED, dst_image, VK_IMAGE_LAYOUT_GENERAL, 1, &copy_region);
+ m_errorMonitor->VerifyFound();
+ // Final src error is due to bad layout type
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdCopyImage-srcImageLayout-00129");
+ m_errorMonitor->SetUnexpectedError(
+ "with specific layout VK_IMAGE_LAYOUT_UNDEFINED that doesn't match the actual current layout VK_IMAGE_LAYOUT_GENERAL.");
+ m_commandBuffer->CopyImage(src_image, VK_IMAGE_LAYOUT_UNDEFINED, dst_image, VK_IMAGE_LAYOUT_GENERAL, 1, &copy_region);
+ m_errorMonitor->VerifyFound();
+ // Now verify same checks for dst
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_PERFORMANCE_WARNING_BIT_EXT,
+ "layout should be VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL instead of GENERAL.");
+ m_errorMonitor->SetUnexpectedError("layout should be VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL instead of GENERAL.");
+ m_commandBuffer->CopyImage(src_image, VK_IMAGE_LAYOUT_GENERAL, dst_image, VK_IMAGE_LAYOUT_GENERAL, 1, &copy_region);
+ m_errorMonitor->VerifyFound();
+ // Now cause error due to src image layout changing
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdCopyImage-dstImageLayout-00133");
+ m_errorMonitor->SetUnexpectedError(
+ "is VK_IMAGE_LAYOUT_UNDEFINED but can only be VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL or VK_IMAGE_LAYOUT_GENERAL.");
+ m_commandBuffer->CopyImage(src_image, VK_IMAGE_LAYOUT_GENERAL, dst_image, VK_IMAGE_LAYOUT_UNDEFINED, 1, &copy_region);
+ m_errorMonitor->VerifyFound();
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdCopyImage-dstImageLayout-00134");
+ m_errorMonitor->SetUnexpectedError(
+ "with specific layout VK_IMAGE_LAYOUT_UNDEFINED that doesn't match the actual current layout VK_IMAGE_LAYOUT_GENERAL.");
+ m_commandBuffer->CopyImage(src_image, VK_IMAGE_LAYOUT_GENERAL, dst_image, VK_IMAGE_LAYOUT_UNDEFINED, 1, &copy_region);
+ m_errorMonitor->VerifyFound();
+
+ // Convert dst and depth images to TRANSFER_DST for subsequent tests
+ VkImageMemoryBarrier transfer_dst_image_barrier[1] = {};
+ transfer_dst_image_barrier[0].sType = VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER;
+ transfer_dst_image_barrier[0].oldLayout = VK_IMAGE_LAYOUT_UNDEFINED;
+ transfer_dst_image_barrier[0].newLayout = VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL;
+ transfer_dst_image_barrier[0].srcAccessMask = 0;
+ transfer_dst_image_barrier[0].dstAccessMask = VK_ACCESS_TRANSFER_WRITE_BIT;
+ transfer_dst_image_barrier[0].image = dst_image;
+ transfer_dst_image_barrier[0].subresourceRange.layerCount = image_create_info.arrayLayers;
+ transfer_dst_image_barrier[0].subresourceRange.levelCount = image_create_info.mipLevels;
+ transfer_dst_image_barrier[0].subresourceRange.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
+ vkCmdPipelineBarrier(m_commandBuffer->handle(), VK_PIPELINE_STAGE_ALL_COMMANDS_BIT, VK_PIPELINE_STAGE_ALL_COMMANDS_BIT, 0, 0,
+ NULL, 0, NULL, 1, transfer_dst_image_barrier);
+ transfer_dst_image_barrier[0].image = depth_image;
+ transfer_dst_image_barrier[0].subresourceRange.aspectMask = VK_IMAGE_ASPECT_DEPTH_BIT;
+ vkCmdPipelineBarrier(m_commandBuffer->handle(), VK_PIPELINE_STAGE_ALL_COMMANDS_BIT, VK_PIPELINE_STAGE_ALL_COMMANDS_BIT, 0, 0,
+ NULL, 0, NULL, 1, transfer_dst_image_barrier);
+
+ // Cause errors due to clearing with invalid image layouts
+ VkClearColorValue color_clear_value = {};
+ VkImageSubresourceRange clear_range;
+ clear_range.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
+ clear_range.baseMipLevel = 0;
+ clear_range.baseArrayLayer = 0;
+ clear_range.layerCount = 1;
+ clear_range.levelCount = 1;
+
+ // Fail due to explicitly prohibited layout for color clear (only GENERAL and TRANSFER_DST are permitted).
+ // Since the image is currently not in UNDEFINED layout, this will emit two errors.
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdClearColorImage-imageLayout-00005");
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdClearColorImage-imageLayout-00004");
+ m_commandBuffer->ClearColorImage(dst_image, VK_IMAGE_LAYOUT_UNDEFINED, &color_clear_value, 1, &clear_range);
+ m_errorMonitor->VerifyFound();
+ // Fail due to provided layout not matching actual current layout for color clear.
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdClearColorImage-imageLayout-00004");
+ m_commandBuffer->ClearColorImage(dst_image, VK_IMAGE_LAYOUT_GENERAL, &color_clear_value, 1, &clear_range);
+ m_errorMonitor->VerifyFound();
+
+ VkClearDepthStencilValue depth_clear_value = {};
+ clear_range.aspectMask = VK_IMAGE_ASPECT_DEPTH_BIT;
+
+ // Fail due to explicitly prohibited layout for depth clear (only GENERAL and TRANSFER_DST are permitted).
+ // Since the image is currently not in UNDEFINED layout, this will emit two errors.
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdClearDepthStencilImage-imageLayout-00012");
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdClearDepthStencilImage-imageLayout-00011");
+ m_commandBuffer->ClearDepthStencilImage(depth_image, VK_IMAGE_LAYOUT_UNDEFINED, &depth_clear_value, 1, &clear_range);
+ m_errorMonitor->VerifyFound();
+ // Fail due to provided layout not matching actual current layout for depth clear.
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdClearDepthStencilImage-imageLayout-00011");
+ m_commandBuffer->ClearDepthStencilImage(depth_image, VK_IMAGE_LAYOUT_GENERAL, &depth_clear_value, 1, &clear_range);
+ m_errorMonitor->VerifyFound();
+
+ // Now cause error due to bad image layout transition in PipelineBarrier
+ VkImageMemoryBarrier image_barrier[1] = {};
+ image_barrier[0].sType = VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER;
+ image_barrier[0].oldLayout = VK_IMAGE_LAYOUT_DEPTH_STENCIL_READ_ONLY_OPTIMAL;
+ image_barrier[0].newLayout = VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL;
+ image_barrier[0].image = src_image;
+ image_barrier[0].subresourceRange.layerCount = image_create_info.arrayLayers;
+ image_barrier[0].subresourceRange.levelCount = image_create_info.mipLevels;
+ image_barrier[0].subresourceRange.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkImageMemoryBarrier-oldLayout-01197");
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkImageMemoryBarrier-oldLayout-01210");
+ vkCmdPipelineBarrier(m_commandBuffer->handle(), VK_PIPELINE_STAGE_ALL_COMMANDS_BIT, VK_PIPELINE_STAGE_ALL_COMMANDS_BIT, 0, 0,
+ NULL, 0, NULL, 1, image_barrier);
+ m_errorMonitor->VerifyFound();
+
+ // Finally some layout errors at RenderPass create time
+ // Just hacking in specific state to get to the errors we want so don't copy this unless you know what you're doing.
+ VkAttachmentReference attach = {};
+ // perf warning for GENERAL layout w/ non-DS input attachment
+ attach.layout = VK_IMAGE_LAYOUT_GENERAL;
+ VkSubpassDescription subpass = {};
+ subpass.inputAttachmentCount = 1;
+ subpass.pInputAttachments = &attach;
+ VkRenderPassCreateInfo rpci = {};
+ rpci.subpassCount = 1;
+ rpci.pSubpasses = &subpass;
+ rpci.attachmentCount = 1;
+ VkAttachmentDescription attach_desc = {};
+ attach_desc.format = VK_FORMAT_UNDEFINED;
+ attach_desc.samples = VK_SAMPLE_COUNT_1_BIT;
+ attach_desc.finalLayout = VK_IMAGE_LAYOUT_GENERAL;
+ rpci.pAttachments = &attach_desc;
+ rpci.sType = VK_STRUCTURE_TYPE_RENDER_PASS_CREATE_INFO;
+ VkRenderPass rp;
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_PERFORMANCE_WARNING_BIT_EXT,
+ "Layout for input attachment is GENERAL but should be READ_ONLY_OPTIMAL.");
+ vkCreateRenderPass(m_device->device(), &rpci, NULL, &rp);
+ m_errorMonitor->VerifyFound();
+ // error w/ non-general layout
+ attach.layout = VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL;
+
+ m_errorMonitor->SetDesiredFailureMsg(
+ VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ "Layout for input attachment is VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL but can only be READ_ONLY_OPTIMAL or GENERAL.");
+ vkCreateRenderPass(m_device->device(), &rpci, NULL, &rp);
+ m_errorMonitor->VerifyFound();
+ subpass.inputAttachmentCount = 0;
+ subpass.colorAttachmentCount = 1;
+ subpass.pColorAttachments = &attach;
+ attach.layout = VK_IMAGE_LAYOUT_GENERAL;
+ // perf warning for GENERAL layout on color attachment
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_PERFORMANCE_WARNING_BIT_EXT,
+ "Layout for color attachment is GENERAL but should be COLOR_ATTACHMENT_OPTIMAL.");
+ vkCreateRenderPass(m_device->device(), &rpci, NULL, &rp);
+ m_errorMonitor->VerifyFound();
+ // error w/ non-color opt or GENERAL layout for color attachment
+ attach.layout = VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL;
+ m_errorMonitor->SetDesiredFailureMsg(
+ VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ "Layout for color attachment is VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL but can only be COLOR_ATTACHMENT_OPTIMAL or GENERAL.");
+ vkCreateRenderPass(m_device->device(), &rpci, NULL, &rp);
+ m_errorMonitor->VerifyFound();
+ subpass.colorAttachmentCount = 0;
+ subpass.pDepthStencilAttachment = &attach;
+ attach.layout = VK_IMAGE_LAYOUT_GENERAL;
+ // perf warning for GENERAL layout on DS attachment
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_PERFORMANCE_WARNING_BIT_EXT,
+ "GENERAL layout for depth attachment may not give optimal performance.");
+ vkCreateRenderPass(m_device->device(), &rpci, NULL, &rp);
+ m_errorMonitor->VerifyFound();
+ // error w/ non-ds opt or GENERAL layout for color attachment
+ attach.layout = VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL;
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ "Layout for depth attachment is VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL but can only be "
+ "DEPTH_STENCIL_ATTACHMENT_OPTIMAL, DEPTH_STENCIL_READ_ONLY_OPTIMAL or GENERAL.");
+ vkCreateRenderPass(m_device->device(), &rpci, NULL, &rp);
+ m_errorMonitor->VerifyFound();
+ // For this error we need a valid renderpass so create default one
+ attach.layout = VK_IMAGE_LAYOUT_DEPTH_STENCIL_READ_ONLY_OPTIMAL;
+ attach.attachment = 0;
+ attach_desc.format = depth_format;
+ attach_desc.samples = VK_SAMPLE_COUNT_1_BIT;
+ attach_desc.storeOp = VK_ATTACHMENT_STORE_OP_STORE;
+ attach_desc.stencilLoadOp = VK_ATTACHMENT_LOAD_OP_DONT_CARE;
+ attach_desc.stencilStoreOp = VK_ATTACHMENT_STORE_OP_DONT_CARE;
+ // Can't do a CLEAR load on READ_ONLY initialLayout
+ attach_desc.loadOp = VK_ATTACHMENT_LOAD_OP_CLEAR;
+ attach_desc.initialLayout = VK_IMAGE_LAYOUT_DEPTH_STENCIL_READ_ONLY_OPTIMAL;
+ attach_desc.finalLayout = VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL;
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ "with invalid first layout VK_IMAGE_LAYOUT_DEPTH_STENCIL_READ_ONLY_OPTIMAL");
+ vkCreateRenderPass(m_device->device(), &rpci, NULL, &rp);
+ m_errorMonitor->VerifyFound();
+
+ vkFreeMemory(m_device->device(), src_image_mem, NULL);
+ vkFreeMemory(m_device->device(), dst_image_mem, NULL);
+ vkFreeMemory(m_device->device(), depth_image_mem, NULL);
+ vkDestroyImage(m_device->device(), src_image, NULL);
+ vkDestroyImage(m_device->device(), dst_image, NULL);
+ vkDestroyImage(m_device->device(), depth_image, NULL);
+}
+
+TEST_F(VkLayerTest, InvalidStorageImageLayout) {
+ TEST_DESCRIPTION("Attempt to update a STORAGE_IMAGE descriptor w/o GENERAL layout.");
+
+ ASSERT_NO_FATAL_FAILURE(Init());
+
+ const VkFormat tex_format = VK_FORMAT_R8G8B8A8_UNORM;
+ VkImageTiling tiling;
+ VkFormatProperties format_properties;
+ vkGetPhysicalDeviceFormatProperties(gpu(), tex_format, &format_properties);
+ if (format_properties.linearTilingFeatures & VK_FORMAT_FEATURE_STORAGE_IMAGE_BIT) {
+ tiling = VK_IMAGE_TILING_LINEAR;
+ } else if (format_properties.optimalTilingFeatures & VK_FORMAT_FEATURE_STORAGE_IMAGE_BIT) {
+ tiling = VK_IMAGE_TILING_OPTIMAL;
+ } else {
+ printf("%s Device does not support VK_FORMAT_FEATURE_STORAGE_IMAGE_BIT; skipped.\n", kSkipPrefix);
+ return;
+ }
+
+ OneOffDescriptorSet ds(m_device, {
+ {0, VK_DESCRIPTOR_TYPE_STORAGE_IMAGE, 1, VK_SHADER_STAGE_FRAGMENT_BIT, nullptr},
+ });
+
+ VkImageObj image(m_device);
+ image.Init(32, 32, 1, tex_format, VK_IMAGE_USAGE_STORAGE_BIT, tiling, 0);
+ ASSERT_TRUE(image.initialized());
+ VkImageView view = image.targetView(tex_format);
+
+ VkDescriptorImageInfo image_info = {};
+ image_info.imageView = view;
+ image_info.imageLayout = VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL;
+
+ VkWriteDescriptorSet descriptor_write = {};
+ descriptor_write.sType = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET;
+ descriptor_write.dstSet = ds.set_;
+ descriptor_write.dstBinding = 0;
+ descriptor_write.descriptorCount = 1;
+ descriptor_write.descriptorType = VK_DESCRIPTOR_TYPE_STORAGE_IMAGE;
+ descriptor_write.pImageInfo = &image_info;
+
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ " of VK_DESCRIPTOR_TYPE_STORAGE_IMAGE type is being updated with layout "
+ "VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL but according to spec ");
+ vkUpdateDescriptorSets(m_device->device(), 1, &descriptor_write, 0, NULL);
+ m_errorMonitor->VerifyFound();
+}
+
+TEST_F(VkLayerTest, NonSimultaneousSecondaryMarksPrimary) {
+ ASSERT_NO_FATAL_FAILURE(Init());
+ const char *simultaneous_use_message =
+ "does not have VK_COMMAND_BUFFER_USAGE_SIMULTANEOUS_USE_BIT set and will cause primary command buffer";
+
+ VkCommandBufferObj secondary(m_device, m_commandPool, VK_COMMAND_BUFFER_LEVEL_SECONDARY);
+
+ secondary.begin();
+ secondary.end();
+
+ VkCommandBufferBeginInfo cbbi = {
+ VK_STRUCTURE_TYPE_COMMAND_BUFFER_BEGIN_INFO,
+ nullptr,
+ VK_COMMAND_BUFFER_USAGE_SIMULTANEOUS_USE_BIT,
+ nullptr,
+ };
+
+ m_commandBuffer->begin(&cbbi);
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_WARNING_BIT_EXT, simultaneous_use_message);
+ vkCmdExecuteCommands(m_commandBuffer->handle(), 1, &secondary.handle());
+ m_errorMonitor->VerifyFound();
+ m_commandBuffer->end();
+}
+
+TEST_F(VkLayerTest, SimultaneousUseSecondaryTwoExecutes) {
+ ASSERT_NO_FATAL_FAILURE(Init());
+
+ const char *simultaneous_use_message = "without VK_COMMAND_BUFFER_USAGE_SIMULTANEOUS_USE_BIT set!";
+
+ VkCommandBufferObj secondary(m_device, m_commandPool, VK_COMMAND_BUFFER_LEVEL_SECONDARY);
+
+ VkCommandBufferInheritanceInfo inh = {
+ VK_STRUCTURE_TYPE_COMMAND_BUFFER_INHERITANCE_INFO,
+ nullptr,
+ };
+ VkCommandBufferBeginInfo cbbi = {VK_STRUCTURE_TYPE_COMMAND_BUFFER_BEGIN_INFO, nullptr, 0, &inh};
+
+ secondary.begin(&cbbi);
+ secondary.end();
+
+ m_commandBuffer->begin();
+ vkCmdExecuteCommands(m_commandBuffer->handle(), 1, &secondary.handle());
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, simultaneous_use_message);
+ vkCmdExecuteCommands(m_commandBuffer->handle(), 1, &secondary.handle());
+ m_errorMonitor->VerifyFound();
+ m_commandBuffer->end();
+}
+
+TEST_F(VkLayerTest, SimultaneousUseSecondarySingleExecute) {
+ ASSERT_NO_FATAL_FAILURE(Init());
+
+ // variation on previous test executing the same CB twice in the same
+ // CmdExecuteCommands call
+
+ const char *simultaneous_use_message = "without VK_COMMAND_BUFFER_USAGE_SIMULTANEOUS_USE_BIT set!";
+
+ VkCommandBufferObj secondary(m_device, m_commandPool, VK_COMMAND_BUFFER_LEVEL_SECONDARY);
+
+ VkCommandBufferInheritanceInfo inh = {
+ VK_STRUCTURE_TYPE_COMMAND_BUFFER_INHERITANCE_INFO,
+ nullptr,
+ };
+ VkCommandBufferBeginInfo cbbi = {VK_STRUCTURE_TYPE_COMMAND_BUFFER_BEGIN_INFO, nullptr, 0, &inh};
+
+ secondary.begin(&cbbi);
+ secondary.end();
+
+ m_commandBuffer->begin();
+ VkCommandBuffer cbs[] = {secondary.handle(), secondary.handle()};
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, simultaneous_use_message);
+ vkCmdExecuteCommands(m_commandBuffer->handle(), 2, cbs);
+ m_errorMonitor->VerifyFound();
+ m_commandBuffer->end();
+}
+
+TEST_F(VkLayerTest, SimultaneousUseOneShot) {
+ TEST_DESCRIPTION("Submit the same command buffer twice in one submit looking for simultaneous use and one time submit errors");
+ const char *simultaneous_use_message = "is already in use and is not marked for simultaneous use";
+ const char *one_shot_message = "VK_COMMAND_BUFFER_USAGE_ONE_TIME_SUBMIT_BIT set, but has been submitted";
+ ASSERT_NO_FATAL_FAILURE(Init());
+
+ VkCommandBuffer cmd_bufs[2];
+ VkCommandBufferAllocateInfo alloc_info;
+ alloc_info.sType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_ALLOCATE_INFO;
+ alloc_info.pNext = NULL;
+ alloc_info.commandBufferCount = 2;
+ alloc_info.commandPool = m_commandPool->handle();
+ alloc_info.level = VK_COMMAND_BUFFER_LEVEL_PRIMARY;
+ vkAllocateCommandBuffers(m_device->device(), &alloc_info, cmd_bufs);
+
+ VkCommandBufferBeginInfo cb_binfo;
+ cb_binfo.pNext = NULL;
+ cb_binfo.sType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_BEGIN_INFO;
+ cb_binfo.pInheritanceInfo = VK_NULL_HANDLE;
+ cb_binfo.flags = 0;
+ vkBeginCommandBuffer(cmd_bufs[0], &cb_binfo);
+ VkViewport viewport = {0, 0, 16, 16, 0, 1};
+ vkCmdSetViewport(cmd_bufs[0], 0, 1, &viewport);
+ vkEndCommandBuffer(cmd_bufs[0]);
+ VkCommandBuffer duplicates[2] = {cmd_bufs[0], cmd_bufs[0]};
+
+ VkSubmitInfo submit_info = {};
+ submit_info.sType = VK_STRUCTURE_TYPE_SUBMIT_INFO;
+ submit_info.commandBufferCount = 2;
+ submit_info.pCommandBuffers = duplicates;
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, simultaneous_use_message);
+ vkQueueSubmit(m_device->m_queue, 1, &submit_info, VK_NULL_HANDLE);
+ m_errorMonitor->VerifyFound();
+ vkQueueWaitIdle(m_device->m_queue);
+
+ // Set one time use and now look for one time submit
+ duplicates[0] = duplicates[1] = cmd_bufs[1];
+ cb_binfo.flags = VK_COMMAND_BUFFER_USAGE_SIMULTANEOUS_USE_BIT | VK_COMMAND_BUFFER_USAGE_ONE_TIME_SUBMIT_BIT;
+ vkBeginCommandBuffer(cmd_bufs[1], &cb_binfo);
+ vkCmdSetViewport(cmd_bufs[1], 0, 1, &viewport);
+ vkEndCommandBuffer(cmd_bufs[1]);
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, one_shot_message);
+ vkQueueSubmit(m_device->m_queue, 1, &submit_info, VK_NULL_HANDLE);
+ m_errorMonitor->VerifyFound();
+ vkQueueWaitIdle(m_device->m_queue);
+}
+
+TEST_F(VkLayerTest, StageMaskGsTsEnabled) {
+ TEST_DESCRIPTION(
+ "Attempt to use a stageMask w/ geometry shader and tesselation shader bits enabled when those features are disabled on the "
+ "device.");
+
+ ASSERT_NO_FATAL_FAILURE(Init());
+ ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
+
+ std::vector<const char *> device_extension_names;
+ auto features = m_device->phy().features();
+ // Make sure gs & ts are disabled
+ features.geometryShader = false;
+ features.tessellationShader = false;
+ // The sacrificial device object
+ VkDeviceObj test_device(0, gpu(), device_extension_names, &features);
+
+ VkCommandPoolCreateInfo pool_create_info{};
+ pool_create_info.sType = VK_STRUCTURE_TYPE_COMMAND_POOL_CREATE_INFO;
+ pool_create_info.queueFamilyIndex = test_device.graphics_queue_node_index_;
+
+ VkCommandPool command_pool;
+ vkCreateCommandPool(test_device.handle(), &pool_create_info, nullptr, &command_pool);
+
+ VkCommandBufferAllocateInfo cmd = {};
+ cmd.sType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_ALLOCATE_INFO;
+ cmd.pNext = NULL;
+ cmd.commandPool = command_pool;
+ cmd.level = VK_COMMAND_BUFFER_LEVEL_PRIMARY;
+ cmd.commandBufferCount = 1;
+
+ VkCommandBuffer cmd_buffer;
+ VkResult err = vkAllocateCommandBuffers(test_device.handle(), &cmd, &cmd_buffer);
+ ASSERT_VK_SUCCESS(err);
+
+ VkEvent event;
+ VkEventCreateInfo evci = {};
+ evci.sType = VK_STRUCTURE_TYPE_EVENT_CREATE_INFO;
+ VkResult result = vkCreateEvent(test_device.handle(), &evci, NULL, &event);
+ ASSERT_VK_SUCCESS(result);
+
+ VkCommandBufferBeginInfo cbbi = {};
+ cbbi.sType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_BEGIN_INFO;
+ vkBeginCommandBuffer(cmd_buffer, &cbbi);
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdSetEvent-stageMask-01150");
+ vkCmdSetEvent(cmd_buffer, event, VK_PIPELINE_STAGE_GEOMETRY_SHADER_BIT);
+ m_errorMonitor->VerifyFound();
+
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdSetEvent-stageMask-01151");
+ vkCmdSetEvent(cmd_buffer, event, VK_PIPELINE_STAGE_TESSELLATION_CONTROL_SHADER_BIT);
+ m_errorMonitor->VerifyFound();
+
+ vkDestroyEvent(test_device.handle(), event, NULL);
+ vkDestroyCommandPool(test_device.handle(), command_pool, NULL);
+}
+
+TEST_F(VkLayerTest, EventInUseDestroyedSignaled) {
+ ASSERT_NO_FATAL_FAILURE(Init());
+ ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
+
+ m_commandBuffer->begin();
+
+ VkEvent event;
+ VkEventCreateInfo event_create_info = {};
+ event_create_info.sType = VK_STRUCTURE_TYPE_EVENT_CREATE_INFO;
+ vkCreateEvent(m_device->device(), &event_create_info, nullptr, &event);
+ vkCmdSetEvent(m_commandBuffer->handle(), event, VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT);
+
+ m_commandBuffer->end();
+ vkDestroyEvent(m_device->device(), event, nullptr);
+
+ VkSubmitInfo submit_info = {};
+ submit_info.sType = VK_STRUCTURE_TYPE_SUBMIT_INFO;
+ submit_info.commandBufferCount = 1;
+ submit_info.pCommandBuffers = &m_commandBuffer->handle();
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "that is invalid because bound");
+ vkQueueSubmit(m_device->m_queue, 1, &submit_info, VK_NULL_HANDLE);
+ m_errorMonitor->VerifyFound();
+}
+
+TEST_F(VkLayerTest, InUseDestroyedSignaled) {
+ TEST_DESCRIPTION(
+ "Use vkCmdExecuteCommands with invalid state in primary and secondary command buffers. Delete objects that are in use. "
+ "Call VkQueueSubmit with an event that has been deleted.");
+
+ ASSERT_NO_FATAL_FAILURE(Init());
+ ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
+
+ m_errorMonitor->ExpectSuccess();
+
+ VkSemaphoreCreateInfo semaphore_create_info = {};
+ semaphore_create_info.sType = VK_STRUCTURE_TYPE_SEMAPHORE_CREATE_INFO;
+ VkSemaphore semaphore;
+ ASSERT_VK_SUCCESS(vkCreateSemaphore(m_device->device(), &semaphore_create_info, nullptr, &semaphore));
+ VkFenceCreateInfo fence_create_info = {};
+ fence_create_info.sType = VK_STRUCTURE_TYPE_FENCE_CREATE_INFO;
+ VkFence fence;
+ ASSERT_VK_SUCCESS(vkCreateFence(m_device->device(), &fence_create_info, nullptr, &fence));
+
+ OneOffDescriptorSet ds(m_device, {
+ {0, VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER, 1, VK_SHADER_STAGE_ALL, nullptr},
+ });
+
+ VkBufferTest buffer_test(m_device, VK_BUFFER_USAGE_UNIFORM_BUFFER_BIT);
+
+ VkDescriptorBufferInfo buffer_info = {};
+ buffer_info.buffer = buffer_test.GetBuffer();
+ buffer_info.offset = 0;
+ buffer_info.range = 1024;
+
+ VkWriteDescriptorSet write_descriptor_set = {};
+ write_descriptor_set.sType = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET;
+ write_descriptor_set.dstSet = ds.set_;
+ write_descriptor_set.descriptorCount = 1;
+ write_descriptor_set.descriptorType = VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER;
+ write_descriptor_set.pBufferInfo = &buffer_info;
+
+ vkUpdateDescriptorSets(m_device->device(), 1, &write_descriptor_set, 0, nullptr);
+
+ VkShaderObj vs(m_device, bindStateVertShaderText, VK_SHADER_STAGE_VERTEX_BIT, this);
+ VkShaderObj fs(m_device, bindStateFragShaderText, VK_SHADER_STAGE_FRAGMENT_BIT, this);
+
+ VkPipelineObj pipe(m_device);
+ pipe.AddDefaultColorAttachment();
+ pipe.AddShader(&vs);
+ pipe.AddShader(&fs);
+
+ const VkPipelineLayoutObj pipeline_layout(m_device, {&ds.layout_});
+
+ pipe.CreateVKPipeline(pipeline_layout.handle(), m_renderPass);
+
+ VkEvent event;
+ VkEventCreateInfo event_create_info = {};
+ event_create_info.sType = VK_STRUCTURE_TYPE_EVENT_CREATE_INFO;
+ vkCreateEvent(m_device->device(), &event_create_info, nullptr, &event);
+
+ m_commandBuffer->begin();
+
+ vkCmdSetEvent(m_commandBuffer->handle(), event, VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT);
+
+ vkCmdBindPipeline(m_commandBuffer->handle(), VK_PIPELINE_BIND_POINT_GRAPHICS, pipe.handle());
+ vkCmdBindDescriptorSets(m_commandBuffer->handle(), VK_PIPELINE_BIND_POINT_GRAPHICS, pipeline_layout.handle(), 0, 1, &ds.set_, 0,
+ NULL);
+
+ m_commandBuffer->end();
+
+ VkSubmitInfo submit_info = {};
+ submit_info.sType = VK_STRUCTURE_TYPE_SUBMIT_INFO;
+ submit_info.commandBufferCount = 1;
+ submit_info.pCommandBuffers = &m_commandBuffer->handle();
+ submit_info.signalSemaphoreCount = 1;
+ submit_info.pSignalSemaphores = &semaphore;
+ vkQueueSubmit(m_device->m_queue, 1, &submit_info, fence);
+ m_errorMonitor->Reset(); // resume logmsg processing
+
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkDestroyEvent-event-01145");
+ vkDestroyEvent(m_device->device(), event, nullptr);
+ m_errorMonitor->VerifyFound();
+
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkDestroySemaphore-semaphore-01137");
+ vkDestroySemaphore(m_device->device(), semaphore, nullptr);
+ m_errorMonitor->VerifyFound();
+
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "Fence 0x");
+ vkDestroyFence(m_device->device(), fence, nullptr);
+ m_errorMonitor->VerifyFound();
+
+ vkQueueWaitIdle(m_device->m_queue);
+ m_errorMonitor->SetUnexpectedError("If semaphore is not VK_NULL_HANDLE, semaphore must be a valid VkSemaphore handle");
+ m_errorMonitor->SetUnexpectedError("Unable to remove Semaphore obj");
+ vkDestroySemaphore(m_device->device(), semaphore, nullptr);
+ m_errorMonitor->SetUnexpectedError("If fence is not VK_NULL_HANDLE, fence must be a valid VkFence handle");
+ m_errorMonitor->SetUnexpectedError("Unable to remove Fence obj");
+ vkDestroyFence(m_device->device(), fence, nullptr);
+ m_errorMonitor->SetUnexpectedError("If event is not VK_NULL_HANDLE, event must be a valid VkEvent handle");
+ m_errorMonitor->SetUnexpectedError("Unable to remove Event obj");
+ vkDestroyEvent(m_device->device(), event, nullptr);
+}
+
+TEST_F(VkLayerTest, QueryPoolInUseDestroyedSignaled) {
+ TEST_DESCRIPTION("Delete in-use query pool.");
+
+ ASSERT_NO_FATAL_FAILURE(Init());
+ ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
+
+ VkQueryPool query_pool;
+ VkQueryPoolCreateInfo query_pool_ci{};
+ query_pool_ci.sType = VK_STRUCTURE_TYPE_QUERY_POOL_CREATE_INFO;
+ query_pool_ci.queryType = VK_QUERY_TYPE_TIMESTAMP;
+ query_pool_ci.queryCount = 1;
+ vkCreateQueryPool(m_device->device(), &query_pool_ci, nullptr, &query_pool);
+ m_commandBuffer->begin();
+ // Reset query pool to create binding with cmd buffer
+ vkCmdResetQueryPool(m_commandBuffer->handle(), query_pool, 0, 1);
+
+ m_commandBuffer->end();
+
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkGetQueryPoolResults-queryType-00818");
+ uint32_t data_space[16];
+ m_errorMonitor->SetUnexpectedError("Cannot get query results on queryPool");
+ vkGetQueryPoolResults(m_device->handle(), query_pool, 0, 1, sizeof(data_space), &data_space, sizeof(uint32_t),
+ VK_QUERY_RESULT_PARTIAL_BIT);
+ m_errorMonitor->VerifyFound();
+
+ VkSubmitInfo submit_info = {};
+ submit_info.sType = VK_STRUCTURE_TYPE_SUBMIT_INFO;
+ submit_info.commandBufferCount = 1;
+ submit_info.pCommandBuffers = &m_commandBuffer->handle();
+ // Submit cmd buffer and then destroy query pool while in-flight
+ vkQueueSubmit(m_device->m_queue, 1, &submit_info, VK_NULL_HANDLE);
+
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkDestroyQueryPool-queryPool-00793");
+ vkDestroyQueryPool(m_device->handle(), query_pool, NULL);
+ m_errorMonitor->VerifyFound();
+
+ vkQueueWaitIdle(m_device->m_queue);
+ // Now that cmd buffer done we can safely destroy query_pool
+ m_errorMonitor->SetUnexpectedError("If queryPool is not VK_NULL_HANDLE, queryPool must be a valid VkQueryPool handle");
+ m_errorMonitor->SetUnexpectedError("Unable to remove QueryPool obj");
+ vkDestroyQueryPool(m_device->handle(), query_pool, NULL);
+}
+
+TEST_F(VkLayerTest, PipelineInUseDestroyedSignaled) {
+ TEST_DESCRIPTION("Delete in-use pipeline.");
+
+ ASSERT_NO_FATAL_FAILURE(Init());
+ ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
+
+ const VkPipelineLayoutObj pipeline_layout(m_device);
+
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkDestroyPipeline-pipeline-00765");
+ // Create PSO to be used for draw-time errors below
+ VkShaderObj vs(m_device, bindStateVertShaderText, VK_SHADER_STAGE_VERTEX_BIT, this);
+ VkShaderObj fs(m_device, bindStateFragShaderText, VK_SHADER_STAGE_FRAGMENT_BIT, this);
+ // Store pipeline handle so we can actually delete it before test finishes
+ VkPipeline delete_this_pipeline;
+ { // Scope pipeline so it will be auto-deleted
+ VkPipelineObj pipe(m_device);
+ pipe.AddShader(&vs);
+ pipe.AddShader(&fs);
+ pipe.AddDefaultColorAttachment();
+ pipe.CreateVKPipeline(pipeline_layout.handle(), renderPass());
+ delete_this_pipeline = pipe.handle();
+
+ m_commandBuffer->begin();
+ // Bind pipeline to cmd buffer
+ vkCmdBindPipeline(m_commandBuffer->handle(), VK_PIPELINE_BIND_POINT_GRAPHICS, pipe.handle());
+
+ m_commandBuffer->end();
+
+ VkSubmitInfo submit_info = {};
+ submit_info.sType = VK_STRUCTURE_TYPE_SUBMIT_INFO;
+ submit_info.commandBufferCount = 1;
+ submit_info.pCommandBuffers = &m_commandBuffer->handle();
+ // Submit cmd buffer and then pipeline destroyed while in-flight
+ vkQueueSubmit(m_device->m_queue, 1, &submit_info, VK_NULL_HANDLE);
+ } // Pipeline deletion triggered here
+ m_errorMonitor->VerifyFound();
+ // Make sure queue finished and then actually delete pipeline
+ vkQueueWaitIdle(m_device->m_queue);
+ m_errorMonitor->SetUnexpectedError("If pipeline is not VK_NULL_HANDLE, pipeline must be a valid VkPipeline handle");
+ m_errorMonitor->SetUnexpectedError("Unable to remove Pipeline obj");
+ vkDestroyPipeline(m_device->handle(), delete_this_pipeline, nullptr);
+}
+
+TEST_F(VkLayerTest, CreateImageViewBreaksParameterCompatibilityRequirements) {
+ TEST_DESCRIPTION(
+ "Attempts to create an Image View with a view type that does not match the image type it is being created from.");
+
+ ASSERT_NO_FATAL_FAILURE(InitFramework(myDbgFunc, m_errorMonitor));
+ if (DeviceExtensionSupported(gpu(), nullptr, VK_KHR_MAINTENANCE1_EXTENSION_NAME)) {
+ m_device_extension_names.push_back(VK_KHR_MAINTENANCE1_EXTENSION_NAME);
+ }
+ ASSERT_NO_FATAL_FAILURE(InitState());
+
+ VkPhysicalDeviceMemoryProperties memProps;
+ vkGetPhysicalDeviceMemoryProperties(m_device->phy().handle(), &memProps);
+
+ // Test mismatch detection for image of type VK_IMAGE_TYPE_1D
+ VkImageCreateInfo imgInfo = {VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO,
+ nullptr,
+ VK_IMAGE_CREATE_MUTABLE_FORMAT_BIT,
+ VK_IMAGE_TYPE_1D,
+ VK_FORMAT_R8G8B8A8_UNORM,
+ {1, 1, 1},
+ 1,
+ 1,
+ VK_SAMPLE_COUNT_1_BIT,
+ VK_IMAGE_TILING_OPTIMAL,
+ VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT,
+ VK_SHARING_MODE_EXCLUSIVE,
+ 0,
+ nullptr,
+ VK_IMAGE_LAYOUT_UNDEFINED};
+ VkImageObj image1D(m_device);
+ image1D.init(&imgInfo);
+ ASSERT_TRUE(image1D.initialized());
+
+ // Initialize VkImageViewCreateInfo with mismatched viewType
+ VkImageView imageView;
+ VkImageViewCreateInfo ivci = {};
+ ivci.sType = VK_STRUCTURE_TYPE_IMAGE_VIEW_CREATE_INFO;
+ ivci.image = image1D.handle();
+ ivci.viewType = VK_IMAGE_VIEW_TYPE_2D;
+ ivci.format = VK_FORMAT_R8G8B8A8_UNORM;
+ ivci.subresourceRange.layerCount = 1;
+ ivci.subresourceRange.baseMipLevel = 0;
+ ivci.subresourceRange.levelCount = 1;
+ ivci.subresourceRange.baseArrayLayer = 0;
+ ivci.subresourceRange.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
+
+ // Test for error message
+ m_errorMonitor->SetDesiredFailureMsg(
+ VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ "vkCreateImageView(): pCreateInfo->viewType VK_IMAGE_VIEW_TYPE_2D is not compatible with image");
+ vkCreateImageView(m_device->device(), &ivci, NULL, &imageView);
+ m_errorMonitor->VerifyFound();
+
+ // Test mismatch detection for image of type VK_IMAGE_TYPE_2D
+ imgInfo = {VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO,
+ nullptr,
+ VK_IMAGE_CREATE_MUTABLE_FORMAT_BIT,
+ VK_IMAGE_TYPE_2D,
+ VK_FORMAT_R8G8B8A8_UNORM,
+ {1, 1, 1},
+ 1,
+ 6,
+ VK_SAMPLE_COUNT_1_BIT,
+ VK_IMAGE_TILING_OPTIMAL,
+ VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT,
+ VK_SHARING_MODE_EXCLUSIVE,
+ 0,
+ nullptr,
+ VK_IMAGE_LAYOUT_UNDEFINED};
+ VkImageObj image2D(m_device);
+ image2D.init(&imgInfo);
+ ASSERT_TRUE(image2D.initialized());
+
+ // Initialize VkImageViewCreateInfo with mismatched viewType
+ ivci = {};
+ ivci.sType = VK_STRUCTURE_TYPE_IMAGE_VIEW_CREATE_INFO;
+ ivci.image = image2D.handle();
+ ivci.viewType = VK_IMAGE_VIEW_TYPE_3D;
+ ivci.format = VK_FORMAT_R8G8B8A8_UNORM;
+ ivci.subresourceRange.layerCount = 1;
+ ivci.subresourceRange.baseMipLevel = 0;
+ ivci.subresourceRange.levelCount = 1;
+ ivci.subresourceRange.baseArrayLayer = 0;
+ ivci.subresourceRange.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
+
+ // Test for error message
+ m_errorMonitor->SetDesiredFailureMsg(
+ VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ "vkCreateImageView(): pCreateInfo->viewType VK_IMAGE_VIEW_TYPE_3D is not compatible with image");
+ vkCreateImageView(m_device->device(), &ivci, NULL, &imageView);
+ m_errorMonitor->VerifyFound();
+
+ // Change VkImageViewCreateInfo to different mismatched viewType
+ ivci.viewType = VK_IMAGE_VIEW_TYPE_CUBE;
+ ivci.subresourceRange.layerCount = 6;
+
+ // Test for error message
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkImageViewCreateInfo-image-01003");
+ vkCreateImageView(m_device->device(), &ivci, NULL, &imageView);
+ m_errorMonitor->VerifyFound();
+
+ // Test mismatch detection for image of type VK_IMAGE_TYPE_3D
+ imgInfo = {VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO,
+ nullptr,
+ VK_IMAGE_CREATE_MUTABLE_FORMAT_BIT,
+ VK_IMAGE_TYPE_3D,
+ VK_FORMAT_R8G8B8A8_UNORM,
+ {1, 1, 1},
+ 1,
+ 1,
+ VK_SAMPLE_COUNT_1_BIT,
+ VK_IMAGE_TILING_OPTIMAL,
+ VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT,
+ VK_SHARING_MODE_EXCLUSIVE,
+ 0,
+ nullptr,
+ VK_IMAGE_LAYOUT_UNDEFINED};
+ VkImageObj image3D(m_device);
+ image3D.init(&imgInfo);
+ ASSERT_TRUE(image3D.initialized());
+
+ // Initialize VkImageViewCreateInfo with mismatched viewType
+ ivci = {};
+ ivci.sType = VK_STRUCTURE_TYPE_IMAGE_VIEW_CREATE_INFO;
+ ivci.image = image3D.handle();
+ ivci.viewType = VK_IMAGE_VIEW_TYPE_1D;
+ ivci.format = VK_FORMAT_R8G8B8A8_UNORM;
+ ivci.subresourceRange.layerCount = 1;
+ ivci.subresourceRange.baseMipLevel = 0;
+ ivci.subresourceRange.levelCount = 1;
+ ivci.subresourceRange.baseArrayLayer = 0;
+ ivci.subresourceRange.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
+
+ // Test for error message
+ m_errorMonitor->SetDesiredFailureMsg(
+ VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ "vkCreateImageView(): pCreateInfo->viewType VK_IMAGE_VIEW_TYPE_1D is not compatible with image");
+ vkCreateImageView(m_device->device(), &ivci, NULL, &imageView);
+ m_errorMonitor->VerifyFound();
+
+ // Change VkImageViewCreateInfo to different mismatched viewType
+ ivci.viewType = VK_IMAGE_VIEW_TYPE_2D;
+
+ // Test for error message
+ if (DeviceExtensionSupported(gpu(), nullptr, VK_KHR_MAINTENANCE1_EXTENSION_NAME)) {
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkImageViewCreateInfo-image-01005");
+ } else {
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkImageViewCreateInfo-subResourceRange-01021");
+ }
+
+ vkCreateImageView(m_device->device(), &ivci, NULL, &imageView);
+ m_errorMonitor->VerifyFound();
+
+ // Check if the device can make the image required for this test case.
+ VkImageFormatProperties formProps = {{0, 0, 0}, 0, 0, 0, 0};
+ VkResult res = vkGetPhysicalDeviceImageFormatProperties(
+ m_device->phy().handle(), VK_FORMAT_R8G8B8A8_UNORM, VK_IMAGE_TYPE_3D, VK_IMAGE_TILING_OPTIMAL,
+ VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT,
+ VK_IMAGE_CREATE_MUTABLE_FORMAT_BIT | VK_IMAGE_CREATE_2D_ARRAY_COMPATIBLE_BIT_KHR | VK_IMAGE_CREATE_SPARSE_BINDING_BIT,
+ &formProps);
+
+ // If not, skip this part of the test.
+ if (res || !m_device->phy().features().sparseBinding ||
+ !DeviceExtensionSupported(gpu(), nullptr, VK_KHR_MAINTENANCE1_EXTENSION_NAME)) {
+ printf("%s %s is not supported.\n", kSkipPrefix, VK_KHR_MAINTENANCE1_EXTENSION_NAME);
+ return;
+ }
+
+ // Initialize VkImageCreateInfo with VK_IMAGE_CREATE_2D_ARRAY_COMPATIBLE_BIT_KHR and VK_IMAGE_CREATE_SPARSE_BINDING_BIT which
+ // are incompatible create flags.
+ imgInfo = {
+ VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO,
+ nullptr,
+ VK_IMAGE_CREATE_MUTABLE_FORMAT_BIT | VK_IMAGE_CREATE_2D_ARRAY_COMPATIBLE_BIT_KHR | VK_IMAGE_CREATE_SPARSE_BINDING_BIT,
+ VK_IMAGE_TYPE_3D,
+ VK_FORMAT_R8G8B8A8_UNORM,
+ {1, 1, 1},
+ 1,
+ 1,
+ VK_SAMPLE_COUNT_1_BIT,
+ VK_IMAGE_TILING_OPTIMAL,
+ VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT,
+ VK_SHARING_MODE_EXCLUSIVE,
+ 0,
+ nullptr,
+ VK_IMAGE_LAYOUT_UNDEFINED};
+ VkImage imageSparse;
+
+ // Creating a sparse image means we should not bind memory to it.
+ res = vkCreateImage(m_device->device(), &imgInfo, NULL, &imageSparse);
+ ASSERT_FALSE(res);
+
+ // Initialize VkImageViewCreateInfo to create a view that will attempt to utilize VK_IMAGE_CREATE_2D_ARRAY_COMPATIBLE_BIT_KHR.
+ ivci = {};
+ ivci.sType = VK_STRUCTURE_TYPE_IMAGE_VIEW_CREATE_INFO;
+ ivci.image = imageSparse;
+ ivci.viewType = VK_IMAGE_VIEW_TYPE_2D;
+ ivci.format = VK_FORMAT_R8G8B8A8_UNORM;
+ ivci.subresourceRange.layerCount = 1;
+ ivci.subresourceRange.baseMipLevel = 0;
+ ivci.subresourceRange.levelCount = 1;
+ ivci.subresourceRange.baseArrayLayer = 0;
+ ivci.subresourceRange.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
+
+ // Test for error message
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ " when the VK_IMAGE_CREATE_SPARSE_BINDING_BIT, VK_IMAGE_CREATE_SPARSE_RESIDENCY_BIT, or "
+ "VK_IMAGE_CREATE_SPARSE_ALIASED_BIT flags are enabled.");
+ vkCreateImageView(m_device->device(), &ivci, NULL, &imageView);
+ m_errorMonitor->VerifyFound();
+
+ // Clean up
+ vkDestroyImage(m_device->device(), imageSparse, nullptr);
+}
+
+TEST_F(VkLayerTest, CreateImageViewFormatFeatureMismatch) {
+ TEST_DESCRIPTION("Create view with a format that does not have the same features as the image format.");
+
+ if (!EnableDeviceProfileLayer()) {
+ printf("%s Failed to enable device profile layer.\n", kSkipPrefix);
+ return;
+ }
+
+ ASSERT_NO_FATAL_FAILURE(InitFramework(myDbgFunc, m_errorMonitor));
+ ASSERT_NO_FATAL_FAILURE(InitState());
+
+ PFN_vkSetPhysicalDeviceFormatPropertiesEXT fpvkSetPhysicalDeviceFormatPropertiesEXT = nullptr;
+ PFN_vkGetOriginalPhysicalDeviceFormatPropertiesEXT fpvkGetOriginalPhysicalDeviceFormatPropertiesEXT = nullptr;
+
+ // Load required functions
+ if (!LoadDeviceProfileLayer(fpvkSetPhysicalDeviceFormatPropertiesEXT, fpvkGetOriginalPhysicalDeviceFormatPropertiesEXT)) {
+ printf("%s Failed to device profile layer.\n", kSkipPrefix);
+ return;
+ }
+
+ // List of features to be tested
+ VkFormatFeatureFlagBits features[] = {VK_FORMAT_FEATURE_SAMPLED_IMAGE_BIT, VK_FORMAT_FEATURE_STORAGE_IMAGE_BIT,
+ VK_FORMAT_FEATURE_COLOR_ATTACHMENT_BIT, VK_FORMAT_FEATURE_DEPTH_STENCIL_ATTACHMENT_BIT};
+ uint32_t feature_count = 4;
+ // List of usage cases for each feature test
+ VkImageUsageFlags usages[] = {VK_IMAGE_USAGE_SAMPLED_BIT, VK_IMAGE_USAGE_STORAGE_BIT, VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT,
+ VK_IMAGE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT};
+ // List of errors that will be thrown in order of tests run
+ std::string optimal_error_codes[] = {
+ "VUID-VkImageViewCreateInfo-usage-02274",
+ "VUID-VkImageViewCreateInfo-usage-02275",
+ "VUID-VkImageViewCreateInfo-usage-02276",
+ "VUID-VkImageViewCreateInfo-usage-02277",
+ };
+
+ VkFormatProperties formatProps;
+
+ // First three tests
+ uint32_t i = 0;
+ for (i = 0; i < (feature_count - 1); i++) {
+ // Modify formats to have mismatched features
+
+ // Format for image
+ fpvkGetOriginalPhysicalDeviceFormatPropertiesEXT(gpu(), VK_FORMAT_R32G32B32A32_UINT, &formatProps);
+ formatProps.optimalTilingFeatures |= features[i];
+ fpvkSetPhysicalDeviceFormatPropertiesEXT(gpu(), VK_FORMAT_R32G32B32A32_UINT, formatProps);
+
+ memset(&formatProps, 0, sizeof(formatProps));
+
+ // Format for view
+ fpvkGetOriginalPhysicalDeviceFormatPropertiesEXT(gpu(), VK_FORMAT_R32G32B32A32_SINT, &formatProps);
+ formatProps.optimalTilingFeatures = features[(i + 1) % feature_count];
+ fpvkSetPhysicalDeviceFormatPropertiesEXT(gpu(), VK_FORMAT_R32G32B32A32_SINT, formatProps);
+
+ // Create image with modified format
+ VkImageCreateInfo imgInfo = {VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO,
+ nullptr,
+ VK_IMAGE_CREATE_MUTABLE_FORMAT_BIT,
+ VK_IMAGE_TYPE_2D,
+ VK_FORMAT_R32G32B32A32_UINT,
+ {1, 1, 1},
+ 1,
+ 1,
+ VK_SAMPLE_COUNT_1_BIT,
+ VK_IMAGE_TILING_OPTIMAL,
+ usages[i],
+ VK_SHARING_MODE_EXCLUSIVE,
+ 0,
+ nullptr,
+ VK_IMAGE_LAYOUT_UNDEFINED};
+ VkImageObj image(m_device);
+ image.init(&imgInfo);
+ ASSERT_TRUE(image.initialized());
+
+ VkImageView imageView;
+
+ // Initialize VkImageViewCreateInfo with modified format
+ VkImageViewCreateInfo ivci = {};
+ ivci.sType = VK_STRUCTURE_TYPE_IMAGE_VIEW_CREATE_INFO;
+ ivci.image = image.handle();
+ ivci.viewType = VK_IMAGE_VIEW_TYPE_2D;
+ ivci.format = VK_FORMAT_R32G32B32A32_SINT;
+ ivci.subresourceRange.layerCount = 1;
+ ivci.subresourceRange.baseMipLevel = 0;
+ ivci.subresourceRange.levelCount = 1;
+ ivci.subresourceRange.baseArrayLayer = 0;
+ ivci.subresourceRange.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
+
+ // Test for error message
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, optimal_error_codes[i]);
+ VkResult res = vkCreateImageView(m_device->device(), &ivci, NULL, &imageView);
+ m_errorMonitor->VerifyFound();
+
+ if (!res) {
+ vkDestroyImageView(m_device->device(), imageView, nullptr);
+ }
+ }
+
+ // Test for VK_FORMAT_FEATURE_DEPTH_STENCIL_ATTACHMENT_BIT. Needs special formats
+
+ // Only run this test if format supported
+ if (!ImageFormatIsSupported(gpu(), VK_FORMAT_D24_UNORM_S8_UINT, VK_IMAGE_TILING_OPTIMAL)) {
+ printf("%s VK_FORMAT_D24_UNORM_S8_UINT format not supported - skipped.\n", kSkipPrefix);
+ return;
+ }
+ // Modify formats to have mismatched features
+
+ // Format for image
+ fpvkGetOriginalPhysicalDeviceFormatPropertiesEXT(gpu(), VK_FORMAT_D24_UNORM_S8_UINT, &formatProps);
+ formatProps.optimalTilingFeatures |= features[i];
+ fpvkSetPhysicalDeviceFormatPropertiesEXT(gpu(), VK_FORMAT_D24_UNORM_S8_UINT, formatProps);
+
+ memset(&formatProps, 0, sizeof(formatProps));
+
+ // Format for view
+ fpvkGetOriginalPhysicalDeviceFormatPropertiesEXT(gpu(), VK_FORMAT_D32_SFLOAT_S8_UINT, &formatProps);
+ formatProps.optimalTilingFeatures = features[(i + 1) % feature_count];
+ fpvkSetPhysicalDeviceFormatPropertiesEXT(gpu(), VK_FORMAT_D32_SFLOAT_S8_UINT, formatProps);
+
+ // Create image with modified format
+ VkImageCreateInfo imgInfo = {VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO,
+ nullptr,
+ VK_IMAGE_CREATE_MUTABLE_FORMAT_BIT,
+ VK_IMAGE_TYPE_2D,
+ VK_FORMAT_D24_UNORM_S8_UINT,
+ {1, 1, 1},
+ 1,
+ 1,
+ VK_SAMPLE_COUNT_1_BIT,
+ VK_IMAGE_TILING_OPTIMAL,
+ usages[i],
+ VK_SHARING_MODE_EXCLUSIVE,
+ 0,
+ nullptr,
+ VK_IMAGE_LAYOUT_UNDEFINED};
+ VkImageObj image(m_device);
+ image.init(&imgInfo);
+ ASSERT_TRUE(image.initialized());
+
+ VkImageView imageView;
+
+ // Initialize VkImageViewCreateInfo with modified format
+ VkImageViewCreateInfo ivci = {};
+ ivci.sType = VK_STRUCTURE_TYPE_IMAGE_VIEW_CREATE_INFO;
+ ivci.image = image.handle();
+ ivci.viewType = VK_IMAGE_VIEW_TYPE_2D;
+ ivci.format = VK_FORMAT_D32_SFLOAT_S8_UINT;
+ ivci.subresourceRange.layerCount = 1;
+ ivci.subresourceRange.baseMipLevel = 0;
+ ivci.subresourceRange.levelCount = 1;
+ ivci.subresourceRange.baseArrayLayer = 0;
+ ivci.subresourceRange.aspectMask = VK_IMAGE_ASPECT_STENCIL_BIT;
+
+ // Test for error message
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, optimal_error_codes[i]);
+ VkResult res = vkCreateImageView(m_device->device(), &ivci, NULL, &imageView);
+ m_errorMonitor->VerifyFound();
+
+ if (!res) {
+ vkDestroyImageView(m_device->device(), imageView, nullptr);
+ }
+}
+
+TEST_F(VkLayerTest, InvalidImageViewUsageCreateInfo) {
+ TEST_DESCRIPTION("Usage modification via a chained VkImageViewUsageCreateInfo struct");
+
+ if (!EnableDeviceProfileLayer()) {
+ printf("%s Test requires DeviceProfileLayer, unavailable - skipped.\n", kSkipPrefix);
+ return;
+ }
+
+ ASSERT_NO_FATAL_FAILURE(InitFramework(myDbgFunc, m_errorMonitor));
+ if (!DeviceExtensionSupported(gpu(), nullptr, VK_KHR_MAINTENANCE2_EXTENSION_NAME)) {
+ printf("%s Test requires API >= 1.1 or KHR_MAINTENANCE2 extension, unavailable - skipped.\n", kSkipPrefix);
+ return;
+ }
+ m_device_extension_names.push_back(VK_KHR_MAINTENANCE2_EXTENSION_NAME);
+ ASSERT_NO_FATAL_FAILURE(InitState());
+
+ PFN_vkSetPhysicalDeviceFormatPropertiesEXT fpvkSetPhysicalDeviceFormatPropertiesEXT = nullptr;
+ PFN_vkGetOriginalPhysicalDeviceFormatPropertiesEXT fpvkGetOriginalPhysicalDeviceFormatPropertiesEXT = nullptr;
+
+ // Load required functions
+ if (!LoadDeviceProfileLayer(fpvkSetPhysicalDeviceFormatPropertiesEXT, fpvkGetOriginalPhysicalDeviceFormatPropertiesEXT)) {
+ printf("%s Required extensions are not avaiable.\n", kSkipPrefix);
+ return;
+ }
+
+ VkFormatProperties formatProps;
+
+ // Ensure image format claims support for sampled and storage, excludes color attachment
+ memset(&formatProps, 0, sizeof(formatProps));
+ fpvkGetOriginalPhysicalDeviceFormatPropertiesEXT(gpu(), VK_FORMAT_R32G32B32A32_UINT, &formatProps);
+ formatProps.optimalTilingFeatures |= (VK_FORMAT_FEATURE_SAMPLED_IMAGE_BIT | VK_FORMAT_FEATURE_STORAGE_IMAGE_BIT);
+ formatProps.optimalTilingFeatures = formatProps.optimalTilingFeatures & ~VK_FORMAT_FEATURE_COLOR_ATTACHMENT_BIT;
+ fpvkSetPhysicalDeviceFormatPropertiesEXT(gpu(), VK_FORMAT_R32G32B32A32_UINT, formatProps);
+
+ // Create image with sampled and storage usages
+ VkImageCreateInfo imgInfo = {VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO,
+ nullptr,
+ VK_IMAGE_CREATE_MUTABLE_FORMAT_BIT,
+ VK_IMAGE_TYPE_2D,
+ VK_FORMAT_R32G32B32A32_UINT,
+ {1, 1, 1},
+ 1,
+ 1,
+ VK_SAMPLE_COUNT_1_BIT,
+ VK_IMAGE_TILING_OPTIMAL,
+ VK_IMAGE_USAGE_SAMPLED_BIT | VK_IMAGE_USAGE_STORAGE_BIT,
+ VK_SHARING_MODE_EXCLUSIVE,
+ 0,
+ nullptr,
+ VK_IMAGE_LAYOUT_UNDEFINED};
+ VkImageObj image(m_device);
+ image.init(&imgInfo);
+ ASSERT_TRUE(image.initialized());
+
+ // Force the imageview format to exclude storage feature, include color attachment
+ memset(&formatProps, 0, sizeof(formatProps));
+ fpvkGetOriginalPhysicalDeviceFormatPropertiesEXT(gpu(), VK_FORMAT_R32G32B32A32_SINT, &formatProps);
+ formatProps.optimalTilingFeatures |= VK_FORMAT_FEATURE_COLOR_ATTACHMENT_BIT;
+ formatProps.optimalTilingFeatures = (formatProps.optimalTilingFeatures & ~VK_FORMAT_FEATURE_STORAGE_IMAGE_BIT);
+ fpvkSetPhysicalDeviceFormatPropertiesEXT(gpu(), VK_FORMAT_R32G32B32A32_SINT, formatProps);
+
+ VkImageViewCreateInfo ivci = {};
+ ivci.sType = VK_STRUCTURE_TYPE_IMAGE_VIEW_CREATE_INFO;
+ ivci.image = image.handle();
+ ivci.viewType = VK_IMAGE_VIEW_TYPE_2D;
+ ivci.format = VK_FORMAT_R32G32B32A32_SINT;
+ ivci.subresourceRange.layerCount = 1;
+ ivci.subresourceRange.baseMipLevel = 0;
+ ivci.subresourceRange.levelCount = 1;
+ ivci.subresourceRange.baseArrayLayer = 0;
+ ivci.subresourceRange.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
+
+ // ImageView creation should fail because view format doesn't support all the underlying image's usages
+ VkImageView imageView;
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkImageViewCreateInfo-usage-02275");
+ VkResult res = vkCreateImageView(m_device->device(), &ivci, NULL, &imageView);
+ m_errorMonitor->VerifyFound();
+
+ // Add a chained VkImageViewUsageCreateInfo to override original image usage bits, removing storage
+ VkImageViewUsageCreateInfo usage_ci = {VK_STRUCTURE_TYPE_IMAGE_VIEW_USAGE_CREATE_INFO, nullptr, VK_IMAGE_USAGE_SAMPLED_BIT};
+ // Link the VkImageViewUsageCreateInfo struct into the view's create info pNext chain
+ ivci.pNext = &usage_ci;
+
+ // ImageView should now succeed without error
+ m_errorMonitor->ExpectSuccess();
+ res = vkCreateImageView(m_device->device(), &ivci, NULL, &imageView);
+ m_errorMonitor->VerifyNotFound();
+ if (VK_SUCCESS == res) {
+ vkDestroyImageView(m_device->device(), imageView, nullptr);
+ }
+
+ // Try a zero usage field
+ usage_ci.usage = 0;
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ "vkCreateImageView: Chained VkImageViewUsageCreateInfo usage field must not be 0");
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VkImageViewUsageCreateInfo: value of usage must not be 0");
+ res = vkCreateImageView(m_device->device(), &ivci, NULL, &imageView);
+ m_errorMonitor->VerifyFound();
+ if (VK_SUCCESS == res) {
+ vkDestroyImageView(m_device->device(), imageView, nullptr);
+ }
+
+ // Try a usage field with a bit not supported by underlying image
+ usage_ci.usage = VK_IMAGE_USAGE_SAMPLED_BIT | VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT;
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkImageViewUsageCreateInfo-usage-01587");
+ res = vkCreateImageView(m_device->device(), &ivci, NULL, &imageView);
+ m_errorMonitor->VerifyFound();
+ if (VK_SUCCESS == res) {
+ vkDestroyImageView(m_device->device(), imageView, nullptr);
+ }
+
+ // Try an illegal bit in usage field
+ usage_ci.usage = 0x10000000 | VK_IMAGE_USAGE_SAMPLED_BIT;
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkImageViewUsageCreateInfo-usage-parameter");
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "UNASSIGNED-GeneralParameterError-UnrecognizedValue");
+ res = vkCreateImageView(m_device->device(), &ivci, NULL, &imageView);
+ m_errorMonitor->VerifyFound();
+ if (VK_SUCCESS == res) {
+ vkDestroyImageView(m_device->device(), imageView, nullptr);
+ }
+}
+
+TEST_F(VkLayerTest, ImageViewInUseDestroyedSignaled) {
+ TEST_DESCRIPTION("Delete in-use imageView.");
+
+ ASSERT_NO_FATAL_FAILURE(Init());
+ ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
+
+ OneOffDescriptorSet ds(m_device, {
+ {0, VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER, 1, VK_SHADER_STAGE_FRAGMENT_BIT, nullptr},
+ });
+
+ VkSamplerCreateInfo sampler_ci = SafeSaneSamplerCreateInfo();
+ VkSampler sampler;
+
+ VkResult err;
+ err = vkCreateSampler(m_device->device(), &sampler_ci, NULL, &sampler);
+ ASSERT_VK_SUCCESS(err);
+
+ const VkPipelineLayoutObj pipeline_layout(m_device, {&ds.layout_});
+
+ VkImageObj image(m_device);
+ image.Init(128, 128, 1, VK_FORMAT_R8G8B8A8_UNORM, VK_IMAGE_USAGE_SAMPLED_BIT, VK_IMAGE_TILING_OPTIMAL, 0);
+ ASSERT_TRUE(image.initialized());
+
+ VkImageView view;
+ VkImageViewCreateInfo ivci = {};
+ ivci.sType = VK_STRUCTURE_TYPE_IMAGE_VIEW_CREATE_INFO;
+ ivci.image = image.handle();
+ ivci.viewType = VK_IMAGE_VIEW_TYPE_2D;
+ ivci.format = VK_FORMAT_R8G8B8A8_UNORM;
+ ivci.subresourceRange.layerCount = 1;
+ ivci.subresourceRange.baseMipLevel = 0;
+ ivci.subresourceRange.levelCount = 1;
+ ivci.subresourceRange.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
+
+ err = vkCreateImageView(m_device->device(), &ivci, NULL, &view);
+ ASSERT_VK_SUCCESS(err);
+
+ VkDescriptorImageInfo image_info{};
+ image_info.imageLayout = VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL;
+ image_info.imageView = view;
+ image_info.sampler = sampler;
+
+ VkWriteDescriptorSet descriptor_write = {};
+ descriptor_write.sType = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET;
+ descriptor_write.dstSet = ds.set_;
+ descriptor_write.dstBinding = 0;
+ descriptor_write.descriptorCount = 1;
+ descriptor_write.descriptorType = VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER;
+ descriptor_write.pImageInfo = &image_info;
+
+ vkUpdateDescriptorSets(m_device->device(), 1, &descriptor_write, 0, NULL);
+
+ // Create PSO to use the sampler
+ char const *vsSource =
+ "#version 450\n"
+ "\n"
+ "void main(){\n"
+ " gl_Position = vec4(1);\n"
+ "}\n";
+ char const *fsSource =
+ "#version 450\n"
+ "\n"
+ "layout(set=0, binding=0) uniform sampler2D s;\n"
+ "layout(location=0) out vec4 x;\n"
+ "void main(){\n"
+ " x = texture(s, vec2(1));\n"
+ "}\n";
+ VkShaderObj vs(m_device, vsSource, VK_SHADER_STAGE_VERTEX_BIT, this);
+ VkShaderObj fs(m_device, fsSource, VK_SHADER_STAGE_FRAGMENT_BIT, this);
+ VkPipelineObj pipe(m_device);
+ pipe.AddShader(&vs);
+ pipe.AddShader(&fs);
+ pipe.AddDefaultColorAttachment();
+ pipe.CreateVKPipeline(pipeline_layout.handle(), renderPass());
+
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkDestroyImageView-imageView-01026");
+
+ m_commandBuffer->begin();
+ m_commandBuffer->BeginRenderPass(m_renderPassBeginInfo);
+ // Bind pipeline to cmd buffer
+ vkCmdBindPipeline(m_commandBuffer->handle(), VK_PIPELINE_BIND_POINT_GRAPHICS, pipe.handle());
+ vkCmdBindDescriptorSets(m_commandBuffer->handle(), VK_PIPELINE_BIND_POINT_GRAPHICS, pipeline_layout.handle(), 0, 1, &ds.set_, 0,
+ nullptr);
+
+ VkViewport viewport = {0, 0, 16, 16, 0, 1};
+ VkRect2D scissor = {{0, 0}, {16, 16}};
+ vkCmdSetViewport(m_commandBuffer->handle(), 0, 1, &viewport);
+ vkCmdSetScissor(m_commandBuffer->handle(), 0, 1, &scissor);
+
+ m_commandBuffer->Draw(1, 0, 0, 0);
+ m_commandBuffer->EndRenderPass();
+ m_commandBuffer->end();
+ // Submit cmd buffer then destroy sampler
+ VkSubmitInfo submit_info = {};
+ submit_info.sType = VK_STRUCTURE_TYPE_SUBMIT_INFO;
+ submit_info.commandBufferCount = 1;
+ submit_info.pCommandBuffers = &m_commandBuffer->handle();
+ // Submit cmd buffer and then destroy imageView while in-flight
+ vkQueueSubmit(m_device->m_queue, 1, &submit_info, VK_NULL_HANDLE);
+
+ vkDestroyImageView(m_device->device(), view, nullptr);
+ m_errorMonitor->VerifyFound();
+ vkQueueWaitIdle(m_device->m_queue);
+ // Now we can actually destroy imageView
+ m_errorMonitor->SetUnexpectedError("If imageView is not VK_NULL_HANDLE, imageView must be a valid VkImageView handle");
+ m_errorMonitor->SetUnexpectedError("Unable to remove ImageView obj");
+ vkDestroyImageView(m_device->device(), view, NULL);
+ vkDestroySampler(m_device->device(), sampler, nullptr);
+}
+
+TEST_F(VkLayerTest, BufferViewInUseDestroyedSignaled) {
+ TEST_DESCRIPTION("Delete in-use bufferView.");
+
+ ASSERT_NO_FATAL_FAILURE(Init());
+ ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
+
+ OneOffDescriptorSet ds(m_device, {
+ {0, VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER, 1, VK_SHADER_STAGE_FRAGMENT_BIT, nullptr},
+ });
+
+ const VkPipelineLayoutObj pipeline_layout(m_device, {&ds.layout_});
+
+ VkBuffer buffer;
+ uint32_t queue_family_index = 0;
+ VkBufferCreateInfo buffer_create_info = {};
+ buffer_create_info.sType = VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO;
+ buffer_create_info.size = 1024;
+ buffer_create_info.usage = VK_BUFFER_USAGE_STORAGE_TEXEL_BUFFER_BIT;
+ buffer_create_info.queueFamilyIndexCount = 1;
+ buffer_create_info.pQueueFamilyIndices = &queue_family_index;
+
+ VkResult err = vkCreateBuffer(m_device->device(), &buffer_create_info, NULL, &buffer);
+ ASSERT_VK_SUCCESS(err);
+
+ VkMemoryRequirements memory_reqs;
+ VkDeviceMemory buffer_memory;
+
+ VkMemoryAllocateInfo memory_info = {};
+ memory_info.sType = VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO;
+ memory_info.allocationSize = 0;
+ memory_info.memoryTypeIndex = 0;
+
+ vkGetBufferMemoryRequirements(m_device->device(), buffer, &memory_reqs);
+ memory_info.allocationSize = memory_reqs.size;
+ bool pass = m_device->phy().set_memory_type(memory_reqs.memoryTypeBits, &memory_info, 0);
+ ASSERT_TRUE(pass);
+
+ err = vkAllocateMemory(m_device->device(), &memory_info, NULL, &buffer_memory);
+ ASSERT_VK_SUCCESS(err);
+ err = vkBindBufferMemory(m_device->device(), buffer, buffer_memory, 0);
+ ASSERT_VK_SUCCESS(err);
+
+ VkBufferView view;
+ VkBufferViewCreateInfo bvci = {};
+ bvci.sType = VK_STRUCTURE_TYPE_BUFFER_VIEW_CREATE_INFO;
+ bvci.buffer = buffer;
+ bvci.format = VK_FORMAT_R32_SFLOAT;
+ bvci.range = VK_WHOLE_SIZE;
+
+ err = vkCreateBufferView(m_device->device(), &bvci, NULL, &view);
+ ASSERT_VK_SUCCESS(err);
+
+ VkWriteDescriptorSet descriptor_write = {};
+ descriptor_write.sType = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET;
+ descriptor_write.dstSet = ds.set_;
+ descriptor_write.dstBinding = 0;
+ descriptor_write.descriptorCount = 1;
+ descriptor_write.descriptorType = VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER;
+ descriptor_write.pTexelBufferView = &view;
+
+ vkUpdateDescriptorSets(m_device->device(), 1, &descriptor_write, 0, NULL);
+
+ char const *vsSource =
+ "#version 450\n"
+ "\n"
+ "void main(){\n"
+ " gl_Position = vec4(1);\n"
+ "}\n";
+ char const *fsSource =
+ "#version 450\n"
+ "\n"
+ "layout(set=0, binding=0, r32f) uniform readonly imageBuffer s;\n"
+ "layout(location=0) out vec4 x;\n"
+ "void main(){\n"
+ " x = imageLoad(s, 0);\n"
+ "}\n";
+ VkShaderObj vs(m_device, vsSource, VK_SHADER_STAGE_VERTEX_BIT, this);
+ VkShaderObj fs(m_device, fsSource, VK_SHADER_STAGE_FRAGMENT_BIT, this);
+ VkPipelineObj pipe(m_device);
+ pipe.AddShader(&vs);
+ pipe.AddShader(&fs);
+ pipe.AddDefaultColorAttachment();
+ pipe.CreateVKPipeline(pipeline_layout.handle(), renderPass());
+
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkDestroyBufferView-bufferView-00936");
+
+ m_commandBuffer->begin();
+ m_commandBuffer->BeginRenderPass(m_renderPassBeginInfo);
+ VkViewport viewport = {0, 0, 16, 16, 0, 1};
+ vkCmdSetViewport(m_commandBuffer->handle(), 0, 1, &viewport);
+ VkRect2D scissor = {{0, 0}, {16, 16}};
+ vkCmdSetScissor(m_commandBuffer->handle(), 0, 1, &scissor);
+ // Bind pipeline to cmd buffer
+ vkCmdBindPipeline(m_commandBuffer->handle(), VK_PIPELINE_BIND_POINT_GRAPHICS, pipe.handle());
+ vkCmdBindDescriptorSets(m_commandBuffer->handle(), VK_PIPELINE_BIND_POINT_GRAPHICS, pipeline_layout.handle(), 0, 1, &ds.set_, 0,
+ nullptr);
+ m_commandBuffer->Draw(1, 0, 0, 0);
+ m_commandBuffer->EndRenderPass();
+ m_commandBuffer->end();
+
+ VkSubmitInfo submit_info = {};
+ submit_info.sType = VK_STRUCTURE_TYPE_SUBMIT_INFO;
+ submit_info.commandBufferCount = 1;
+ submit_info.pCommandBuffers = &m_commandBuffer->handle();
+ // Submit cmd buffer and then destroy bufferView while in-flight
+ vkQueueSubmit(m_device->m_queue, 1, &submit_info, VK_NULL_HANDLE);
+
+ vkDestroyBufferView(m_device->device(), view, nullptr);
+ m_errorMonitor->VerifyFound();
+ vkQueueWaitIdle(m_device->m_queue);
+ // Now we can actually destroy bufferView
+ m_errorMonitor->SetUnexpectedError("If bufferView is not VK_NULL_HANDLE, bufferView must be a valid VkBufferView handle");
+ m_errorMonitor->SetUnexpectedError("Unable to remove BufferView obj");
+ vkDestroyBufferView(m_device->device(), view, NULL);
+ vkDestroyBuffer(m_device->device(), buffer, NULL);
+ vkFreeMemory(m_device->device(), buffer_memory, NULL);
+}
+
+TEST_F(VkLayerTest, SamplerInUseDestroyedSignaled) {
+ TEST_DESCRIPTION("Delete in-use sampler.");
+
+ ASSERT_NO_FATAL_FAILURE(Init());
+ ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
+
+ OneOffDescriptorSet ds(m_device, {
+ {0, VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER, 1, VK_SHADER_STAGE_FRAGMENT_BIT, nullptr},
+ });
+
+ VkSamplerCreateInfo sampler_ci = SafeSaneSamplerCreateInfo();
+ VkSampler sampler;
+
+ VkResult err;
+ err = vkCreateSampler(m_device->device(), &sampler_ci, NULL, &sampler);
+ ASSERT_VK_SUCCESS(err);
+
+ const VkPipelineLayoutObj pipeline_layout(m_device, {&ds.layout_});
+
+ VkImageObj image(m_device);
+ image.Init(128, 128, 1, VK_FORMAT_R8G8B8A8_UNORM, VK_IMAGE_USAGE_SAMPLED_BIT, VK_IMAGE_TILING_OPTIMAL, 0);
+ ASSERT_TRUE(image.initialized());
+
+ VkImageView view;
+ VkImageViewCreateInfo ivci = {};
+ ivci.sType = VK_STRUCTURE_TYPE_IMAGE_VIEW_CREATE_INFO;
+ ivci.image = image.handle();
+ ivci.viewType = VK_IMAGE_VIEW_TYPE_2D;
+ ivci.format = VK_FORMAT_R8G8B8A8_UNORM;
+ ivci.subresourceRange.layerCount = 1;
+ ivci.subresourceRange.baseMipLevel = 0;
+ ivci.subresourceRange.levelCount = 1;
+ ivci.subresourceRange.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
+
+ err = vkCreateImageView(m_device->device(), &ivci, NULL, &view);
+ ASSERT_VK_SUCCESS(err);
+
+ VkDescriptorImageInfo image_info{};
+ image_info.imageLayout = VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL;
+ image_info.imageView = view;
+ image_info.sampler = sampler;
+
+ VkWriteDescriptorSet descriptor_write = {};
+ descriptor_write.sType = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET;
+ descriptor_write.dstSet = ds.set_;
+ descriptor_write.dstBinding = 0;
+ descriptor_write.descriptorCount = 1;
+ descriptor_write.descriptorType = VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER;
+ descriptor_write.pImageInfo = &image_info;
+
+ vkUpdateDescriptorSets(m_device->device(), 1, &descriptor_write, 0, NULL);
+
+ // Create PSO to use the sampler
+ char const *vsSource =
+ "#version 450\n"
+ "\n"
+ "void main(){\n"
+ " gl_Position = vec4(1);\n"
+ "}\n";
+ char const *fsSource =
+ "#version 450\n"
+ "\n"
+ "layout(set=0, binding=0) uniform sampler2D s;\n"
+ "layout(location=0) out vec4 x;\n"
+ "void main(){\n"
+ " x = texture(s, vec2(1));\n"
+ "}\n";
+ VkShaderObj vs(m_device, vsSource, VK_SHADER_STAGE_VERTEX_BIT, this);
+ VkShaderObj fs(m_device, fsSource, VK_SHADER_STAGE_FRAGMENT_BIT, this);
+ VkPipelineObj pipe(m_device);
+ pipe.AddShader(&vs);
+ pipe.AddShader(&fs);
+ pipe.AddDefaultColorAttachment();
+ pipe.CreateVKPipeline(pipeline_layout.handle(), renderPass());
+
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkDestroySampler-sampler-01082");
+
+ m_commandBuffer->begin();
+ m_commandBuffer->BeginRenderPass(m_renderPassBeginInfo);
+ // Bind pipeline to cmd buffer
+ vkCmdBindPipeline(m_commandBuffer->handle(), VK_PIPELINE_BIND_POINT_GRAPHICS, pipe.handle());
+ vkCmdBindDescriptorSets(m_commandBuffer->handle(), VK_PIPELINE_BIND_POINT_GRAPHICS, pipeline_layout.handle(), 0, 1, &ds.set_, 0,
+ nullptr);
+
+ VkViewport viewport = {0, 0, 16, 16, 0, 1};
+ VkRect2D scissor = {{0, 0}, {16, 16}};
+ vkCmdSetViewport(m_commandBuffer->handle(), 0, 1, &viewport);
+ vkCmdSetScissor(m_commandBuffer->handle(), 0, 1, &scissor);
+
+ m_commandBuffer->Draw(1, 0, 0, 0);
+ m_commandBuffer->EndRenderPass();
+ m_commandBuffer->end();
+ // Submit cmd buffer then destroy sampler
+ VkSubmitInfo submit_info = {};
+ submit_info.sType = VK_STRUCTURE_TYPE_SUBMIT_INFO;
+ submit_info.commandBufferCount = 1;
+ submit_info.pCommandBuffers = &m_commandBuffer->handle();
+ // Submit cmd buffer and then destroy sampler while in-flight
+ vkQueueSubmit(m_device->m_queue, 1, &submit_info, VK_NULL_HANDLE);
+
+ vkDestroySampler(m_device->device(), sampler, nullptr); // Destroyed too soon
+ m_errorMonitor->VerifyFound();
+ vkQueueWaitIdle(m_device->m_queue);
+
+ // Now we can actually destroy sampler
+ m_errorMonitor->SetUnexpectedError("If sampler is not VK_NULL_HANDLE, sampler must be a valid VkSampler handle");
+ m_errorMonitor->SetUnexpectedError("Unable to remove Sampler obj");
+ vkDestroySampler(m_device->device(), sampler, NULL); // Destroyed for real
+ vkDestroyImageView(m_device->device(), view, NULL);
+}
+
+TEST_F(VkLayerTest, UpdateDestroyDescriptorSetLayout) {
+ TEST_DESCRIPTION("Attempt updates to descriptor sets with destroyed descriptor set layouts");
+ // TODO: Update to match the descriptor set layout specific VUIDs/VALIDATION_ERROR_* when present
+ const auto kWriteDestroyedLayout = "VUID-VkWriteDescriptorSet-dstSet-00320";
+ const auto kCopyDstDestroyedLayout = "VUID-VkCopyDescriptorSet-dstSet-parameter";
+ const auto kCopySrcDestroyedLayout = "VUID-VkCopyDescriptorSet-srcSet-parameter";
+
+ ASSERT_NO_FATAL_FAILURE(Init());
+
+ // Set up the descriptor (resource) and write/copy operations to use.
+ float data[16] = {};
+ VkConstantBufferObj buffer(m_device, sizeof(data), data, VK_BUFFER_USAGE_UNIFORM_BUFFER_BIT);
+ ASSERT_TRUE(buffer.initialized());
+
+ VkDescriptorBufferInfo info = {};
+ info.buffer = buffer.handle();
+ info.range = VK_WHOLE_SIZE;
+
+ VkWriteDescriptorSet write_descriptor = {};
+ write_descriptor.sType = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET;
+ write_descriptor.dstSet = VK_NULL_HANDLE; // must update this
+ write_descriptor.dstBinding = 0;
+ write_descriptor.descriptorCount = 1;
+ write_descriptor.descriptorType = VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER;
+ write_descriptor.pBufferInfo = &info;
+
+ VkCopyDescriptorSet copy_descriptor = {};
+ copy_descriptor.sType = VK_STRUCTURE_TYPE_COPY_DESCRIPTOR_SET;
+ copy_descriptor.srcSet = VK_NULL_HANDLE; // must update
+ copy_descriptor.srcBinding = 0;
+ copy_descriptor.dstSet = VK_NULL_HANDLE; // must update
+ copy_descriptor.dstBinding = 0;
+ copy_descriptor.descriptorCount = 1;
+
+ // Create valid and invalid source and destination descriptor sets
+ std::vector<VkDescriptorSetLayoutBinding> one_uniform_buffer = {
+ {0, VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER, 1, VK_SHADER_STAGE_ALL, nullptr},
+ };
+ OneOffDescriptorSet good_dst(m_device, one_uniform_buffer);
+ ASSERT_TRUE(good_dst.Initialized());
+
+ OneOffDescriptorSet bad_dst(m_device, one_uniform_buffer);
+ // Must assert before invalidating it below
+ ASSERT_TRUE(bad_dst.Initialized());
+ bad_dst.layout_ = VkDescriptorSetLayoutObj();
+
+ OneOffDescriptorSet good_src(m_device, one_uniform_buffer);
+ ASSERT_TRUE(good_src.Initialized());
+
+ // Put valid data in the good and bad sources, simultaneously doing a positive test on write and copy operations
+ m_errorMonitor->ExpectSuccess();
+ write_descriptor.dstSet = good_src.set_;
+ vkUpdateDescriptorSets(m_device->device(), 1, &write_descriptor, 0, NULL);
+ m_errorMonitor->VerifyNotFound();
+
+ OneOffDescriptorSet bad_src(m_device, one_uniform_buffer);
+ ASSERT_TRUE(bad_src.Initialized());
+
+ // to complete our positive testing use copy, where above we used write.
+ copy_descriptor.srcSet = good_src.set_;
+ copy_descriptor.dstSet = bad_src.set_;
+ vkUpdateDescriptorSets(m_device->device(), 0, nullptr, 1, &copy_descriptor);
+ bad_src.layout_ = VkDescriptorSetLayoutObj();
+ m_errorMonitor->VerifyNotFound();
+
+ // Trigger the three invalid use errors
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, kWriteDestroyedLayout);
+ write_descriptor.dstSet = bad_dst.set_;
+ vkUpdateDescriptorSets(m_device->device(), 1, &write_descriptor, 0, NULL);
+ m_errorMonitor->VerifyFound();
+
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, kCopyDstDestroyedLayout);
+ copy_descriptor.dstSet = bad_dst.set_;
+ vkUpdateDescriptorSets(m_device->device(), 0, nullptr, 1, &copy_descriptor);
+ m_errorMonitor->VerifyFound();
+
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, kCopySrcDestroyedLayout);
+ copy_descriptor.srcSet = bad_src.set_;
+ copy_descriptor.dstSet = good_dst.set_;
+ vkUpdateDescriptorSets(m_device->device(), 0, nullptr, 1, &copy_descriptor);
+ m_errorMonitor->VerifyFound();
+}
+
+TEST_F(VkLayerTest, QueueForwardProgressFenceWait) {
+ TEST_DESCRIPTION(
+ "Call VkQueueSubmit with a semaphore that is already signaled but not waited on by the queue. Wait on a fence that has not "
+ "yet been submitted to a queue.");
+
+ ASSERT_NO_FATAL_FAILURE(Init());
+ ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
+
+ const char *queue_forward_progress_message = " that was previously signaled by queue 0x";
+ const char *invalid_fence_wait_message = " which has not been submitted on a Queue or during acquire next image.";
+
+ VkCommandBufferObj cb1(m_device, m_commandPool);
+ cb1.begin();
+ cb1.end();
+
+ VkSemaphoreCreateInfo semaphore_create_info = {};
+ semaphore_create_info.sType = VK_STRUCTURE_TYPE_SEMAPHORE_CREATE_INFO;
+ VkSemaphore semaphore;
+ ASSERT_VK_SUCCESS(vkCreateSemaphore(m_device->device(), &semaphore_create_info, nullptr, &semaphore));
+ VkSubmitInfo submit_info = {};
+ submit_info.sType = VK_STRUCTURE_TYPE_SUBMIT_INFO;
+ submit_info.commandBufferCount = 1;
+ submit_info.pCommandBuffers = &cb1.handle();
+ submit_info.signalSemaphoreCount = 1;
+ submit_info.pSignalSemaphores = &semaphore;
+ vkQueueSubmit(m_device->m_queue, 1, &submit_info, VK_NULL_HANDLE);
+
+ m_commandBuffer->begin();
+ m_commandBuffer->end();
+ submit_info.pCommandBuffers = &m_commandBuffer->handle();
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, queue_forward_progress_message);
+ vkQueueSubmit(m_device->m_queue, 1, &submit_info, VK_NULL_HANDLE);
+ m_errorMonitor->VerifyFound();
+
+ VkFenceCreateInfo fence_create_info = {};
+ fence_create_info.sType = VK_STRUCTURE_TYPE_FENCE_CREATE_INFO;
+ VkFence fence;
+ ASSERT_VK_SUCCESS(vkCreateFence(m_device->device(), &fence_create_info, nullptr, &fence));
+
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_WARNING_BIT_EXT, invalid_fence_wait_message);
+ vkWaitForFences(m_device->device(), 1, &fence, VK_TRUE, UINT64_MAX);
+ m_errorMonitor->VerifyFound();
+
+ vkDeviceWaitIdle(m_device->device());
+ vkDestroyFence(m_device->device(), fence, nullptr);
+ vkDestroySemaphore(m_device->device(), semaphore, nullptr);
+}
+
+TEST_F(VkLayerTest, FramebufferIncompatible) {
+ TEST_DESCRIPTION(
+ "Bind a secondary command buffer with a framebuffer that does not match the framebuffer for the active renderpass.");
+ ASSERT_NO_FATAL_FAILURE(Init());
+ ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
+
+ // A renderpass with one color attachment.
+ VkAttachmentDescription attachment = {0,
+ VK_FORMAT_B8G8R8A8_UNORM,
+ VK_SAMPLE_COUNT_1_BIT,
+ VK_ATTACHMENT_LOAD_OP_DONT_CARE,
+ VK_ATTACHMENT_STORE_OP_STORE,
+ VK_ATTACHMENT_LOAD_OP_DONT_CARE,
+ VK_ATTACHMENT_STORE_OP_DONT_CARE,
+ VK_IMAGE_LAYOUT_UNDEFINED,
+ VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL};
+
+ VkAttachmentReference att_ref = {0, VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL};
+
+ VkSubpassDescription subpass = {0, VK_PIPELINE_BIND_POINT_GRAPHICS, 0, nullptr, 1, &att_ref, nullptr, nullptr, 0, nullptr};
+
+ VkRenderPassCreateInfo rpci = {VK_STRUCTURE_TYPE_RENDER_PASS_CREATE_INFO, nullptr, 0, 1, &attachment, 1, &subpass, 0, nullptr};
+
+ VkRenderPass rp;
+ VkResult err = vkCreateRenderPass(m_device->device(), &rpci, nullptr, &rp);
+ ASSERT_VK_SUCCESS(err);
+
+ // A compatible framebuffer.
+ VkImageObj image(m_device);
+ image.Init(32, 32, 1, VK_FORMAT_B8G8R8A8_UNORM, VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT, VK_IMAGE_TILING_OPTIMAL, 0);
+ ASSERT_TRUE(image.initialized());
+
+ VkImageViewCreateInfo ivci = {
+ VK_STRUCTURE_TYPE_IMAGE_VIEW_CREATE_INFO,
+ nullptr,
+ 0,
+ image.handle(),
+ VK_IMAGE_VIEW_TYPE_2D,
+ VK_FORMAT_B8G8R8A8_UNORM,
+ {VK_COMPONENT_SWIZZLE_IDENTITY, VK_COMPONENT_SWIZZLE_IDENTITY, VK_COMPONENT_SWIZZLE_IDENTITY,
+ VK_COMPONENT_SWIZZLE_IDENTITY},
+ {VK_IMAGE_ASPECT_COLOR_BIT, 0, 1, 0, 1},
+ };
+ VkImageView view;
+ err = vkCreateImageView(m_device->device(), &ivci, nullptr, &view);
+ ASSERT_VK_SUCCESS(err);
+
+ VkFramebufferCreateInfo fci = {VK_STRUCTURE_TYPE_FRAMEBUFFER_CREATE_INFO, nullptr, 0, rp, 1, &view, 32, 32, 1};
+ VkFramebuffer fb;
+ err = vkCreateFramebuffer(m_device->device(), &fci, nullptr, &fb);
+ ASSERT_VK_SUCCESS(err);
+
+ VkCommandBufferAllocateInfo cbai = {};
+ cbai.sType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_ALLOCATE_INFO;
+ cbai.commandPool = m_commandPool->handle();
+ cbai.level = VK_COMMAND_BUFFER_LEVEL_SECONDARY;
+ cbai.commandBufferCount = 1;
+
+ VkCommandBuffer sec_cb;
+ err = vkAllocateCommandBuffers(m_device->device(), &cbai, &sec_cb);
+ ASSERT_VK_SUCCESS(err);
+ VkCommandBufferBeginInfo cbbi = {};
+ VkCommandBufferInheritanceInfo cbii = {};
+ cbii.sType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_INHERITANCE_INFO;
+ cbii.renderPass = renderPass();
+ cbii.framebuffer = fb;
+ cbbi.sType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_BEGIN_INFO;
+ cbbi.pNext = NULL;
+ cbbi.flags = VK_COMMAND_BUFFER_USAGE_ONE_TIME_SUBMIT_BIT | VK_COMMAND_BUFFER_USAGE_RENDER_PASS_CONTINUE_BIT;
+ cbbi.pInheritanceInfo = &cbii;
+ vkBeginCommandBuffer(sec_cb, &cbbi);
+ vkEndCommandBuffer(sec_cb);
+
+ VkCommandBufferBeginInfo cbbi2 = {VK_STRUCTURE_TYPE_COMMAND_BUFFER_BEGIN_INFO, nullptr, 0, nullptr};
+ vkBeginCommandBuffer(m_commandBuffer->handle(), &cbbi2);
+ vkCmdBeginRenderPass(m_commandBuffer->handle(), &m_renderPassBeginInfo, VK_SUBPASS_CONTENTS_SECONDARY_COMMAND_BUFFERS);
+
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ " that is not the same as the primary command buffer's current active framebuffer ");
+ vkCmdExecuteCommands(m_commandBuffer->handle(), 1, &sec_cb);
+ m_errorMonitor->VerifyFound();
+ // Cleanup
+
+ vkCmdEndRenderPass(m_commandBuffer->handle());
+ vkEndCommandBuffer(m_commandBuffer->handle());
+
+ vkDestroyImageView(m_device->device(), view, NULL);
+ vkDestroyRenderPass(m_device->device(), rp, NULL);
+ vkDestroyFramebuffer(m_device->device(), fb, NULL);
+}
+
+TEST_F(VkLayerTest, RenderPassMissingAttachment) {
+ TEST_DESCRIPTION("Begin render pass with missing framebuffer attachment");
+ ASSERT_NO_FATAL_FAILURE(Init());
+ ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
+
+ // Create a renderPass with a single color attachment
+ VkAttachmentReference attach = {};
+ attach.layout = VK_IMAGE_LAYOUT_GENERAL;
+ VkSubpassDescription subpass = {};
+ subpass.pColorAttachments = &attach;
+ VkRenderPassCreateInfo rpci = {};
+ rpci.subpassCount = 1;
+ rpci.pSubpasses = &subpass;
+ rpci.attachmentCount = 1;
+ VkAttachmentDescription attach_desc = {};
+ attach_desc.format = VK_FORMAT_B8G8R8A8_UNORM;
+ attach_desc.samples = VK_SAMPLE_COUNT_1_BIT;
+ attach_desc.finalLayout = VK_IMAGE_LAYOUT_GENERAL;
+ rpci.pAttachments = &attach_desc;
+ rpci.sType = VK_STRUCTURE_TYPE_RENDER_PASS_CREATE_INFO;
+ VkRenderPass rp;
+ VkResult err = vkCreateRenderPass(m_device->device(), &rpci, NULL, &rp);
+ ASSERT_VK_SUCCESS(err);
+
+ auto createView = lvl_init_struct<VkImageViewCreateInfo>();
+ createView.image = m_renderTargets[0]->handle();
+ createView.viewType = VK_IMAGE_VIEW_TYPE_2D;
+ createView.format = VK_FORMAT_B8G8R8A8_UNORM;
+ createView.components.r = VK_COMPONENT_SWIZZLE_R;
+ createView.components.g = VK_COMPONENT_SWIZZLE_G;
+ createView.components.b = VK_COMPONENT_SWIZZLE_B;
+ createView.components.a = VK_COMPONENT_SWIZZLE_A;
+ createView.subresourceRange = {VK_IMAGE_ASPECT_COLOR_BIT, 0, 1, 0, 1};
+ createView.flags = 0;
+
+ VkImageView iv;
+ vkCreateImageView(m_device->handle(), &createView, nullptr, &iv);
+
+ auto fb_info = lvl_init_struct<VkFramebufferCreateInfo>();
+ fb_info.renderPass = rp;
+ fb_info.attachmentCount = 1;
+ fb_info.pAttachments = &iv;
+ fb_info.width = 100;
+ fb_info.height = 100;
+ fb_info.layers = 1;
+
+ // Create the framebuffer then destory the view it uses.
+ VkFramebuffer fb;
+ err = vkCreateFramebuffer(device(), &fb_info, NULL, &fb);
+ vkDestroyImageView(device(), iv, NULL);
+ ASSERT_VK_SUCCESS(err);
+
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkRenderPassBeginInfo-framebuffer-parameter");
+
+ auto rpbi = lvl_init_struct<VkRenderPassBeginInfo>();
+ rpbi.renderPass = rp;
+ rpbi.framebuffer = fb;
+ rpbi.renderArea = {{0, 0}, {32, 32}};
+
+ m_commandBuffer->begin();
+ vkCmdBeginRenderPass(m_commandBuffer->handle(), &rpbi, VK_SUBPASS_CONTENTS_INLINE);
+ // Don't call vkCmdEndRenderPass; as the begin has been "skipped" based on the error condition
+ m_errorMonitor->VerifyFound();
+ m_commandBuffer->end();
+
+ vkDestroyFramebuffer(m_device->device(), fb, NULL);
+ vkDestroyRenderPass(m_device->device(), rp, NULL);
+}
+
+TEST_F(VkLayerTest, ColorBlendInvalidLogicOp) {
+ TEST_DESCRIPTION("Attempt to use invalid VkPipelineColorBlendStateCreateInfo::logicOp value.");
+
+ ASSERT_NO_FATAL_FAILURE(Init()); // enables all supported features
+ ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
+
+ if (!m_device->phy().features().logicOp) {
+ printf("%s Device does not support logicOp feature; skipped.\n", kSkipPrefix);
+ return;
+ }
+
+ const auto set_shading_enable = [](CreatePipelineHelper &helper) {
+ helper.cb_ci_.logicOpEnable = VK_TRUE;
+ helper.cb_ci_.logicOp = static_cast<VkLogicOp>(VK_LOGIC_OP_END_RANGE + 1); // invalid logicOp to be tested
+ };
+ CreatePipelineHelper::OneshotTest(*this, set_shading_enable, VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ "VUID-VkPipelineColorBlendStateCreateInfo-logicOpEnable-00607");
+}
+
+TEST_F(VkLayerTest, ColorBlendUnsupportedLogicOp) {
+ TEST_DESCRIPTION("Attempt enabling VkPipelineColorBlendStateCreateInfo::logicOpEnable when logicOp feature is disabled.");
+
+ VkPhysicalDeviceFeatures features{};
+ ASSERT_NO_FATAL_FAILURE(Init(&features));
+ ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
+
+ const auto set_shading_enable = [](CreatePipelineHelper &helper) { helper.cb_ci_.logicOpEnable = VK_TRUE; };
+ CreatePipelineHelper::OneshotTest(*this, set_shading_enable, VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ "VUID-VkPipelineColorBlendStateCreateInfo-logicOpEnable-00606");
+}
+
+TEST_F(VkLayerTest, ColorBlendUnsupportedDualSourceBlend) {
+ TEST_DESCRIPTION("Attempt to use dual-source blending when dualSrcBlend feature is disabled.");
+
+ VkPhysicalDeviceFeatures features{};
+ ASSERT_NO_FATAL_FAILURE(Init(&features));
+ ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
+
+ const auto set_dsb_src_color_enable = [](CreatePipelineHelper &helper) {
+ helper.cb_attachments_.blendEnable = VK_TRUE;
+ helper.cb_attachments_.srcColorBlendFactor = VK_BLEND_FACTOR_SRC1_COLOR; // bad!
+ helper.cb_attachments_.dstColorBlendFactor = VK_BLEND_FACTOR_ONE_MINUS_SRC_COLOR;
+ helper.cb_attachments_.colorBlendOp = VK_BLEND_OP_ADD;
+ helper.cb_attachments_.srcAlphaBlendFactor = VK_BLEND_FACTOR_SRC_ALPHA;
+ helper.cb_attachments_.dstAlphaBlendFactor = VK_BLEND_FACTOR_ONE_MINUS_SRC_ALPHA;
+ helper.cb_attachments_.alphaBlendOp = VK_BLEND_OP_ADD;
+ };
+ CreatePipelineHelper::OneshotTest(*this, set_dsb_src_color_enable, VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ "VUID-VkPipelineColorBlendAttachmentState-srcColorBlendFactor-00608");
+
+ const auto set_dsb_dst_color_enable = [](CreatePipelineHelper &helper) {
+ helper.cb_attachments_.blendEnable = VK_TRUE;
+ helper.cb_attachments_.srcColorBlendFactor = VK_BLEND_FACTOR_SRC_COLOR;
+ helper.cb_attachments_.dstColorBlendFactor = VK_BLEND_FACTOR_ONE_MINUS_SRC1_COLOR; // bad
+ helper.cb_attachments_.colorBlendOp = VK_BLEND_OP_ADD;
+ helper.cb_attachments_.srcAlphaBlendFactor = VK_BLEND_FACTOR_SRC_ALPHA;
+ helper.cb_attachments_.dstAlphaBlendFactor = VK_BLEND_FACTOR_ONE_MINUS_SRC_ALPHA;
+ helper.cb_attachments_.alphaBlendOp = VK_BLEND_OP_ADD;
+ };
+ CreatePipelineHelper::OneshotTest(*this, set_dsb_dst_color_enable, VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ "VUID-VkPipelineColorBlendAttachmentState-dstColorBlendFactor-00609");
+
+ const auto set_dsb_src_alpha_enable = [](CreatePipelineHelper &helper) {
+ helper.cb_attachments_.blendEnable = VK_TRUE;
+ helper.cb_attachments_.srcColorBlendFactor = VK_BLEND_FACTOR_SRC_COLOR;
+ helper.cb_attachments_.dstColorBlendFactor = VK_BLEND_FACTOR_ONE_MINUS_SRC_COLOR;
+ helper.cb_attachments_.colorBlendOp = VK_BLEND_OP_ADD;
+ helper.cb_attachments_.srcAlphaBlendFactor = VK_BLEND_FACTOR_SRC1_ALPHA; // bad
+ helper.cb_attachments_.dstAlphaBlendFactor = VK_BLEND_FACTOR_ONE_MINUS_SRC_ALPHA;
+ helper.cb_attachments_.alphaBlendOp = VK_BLEND_OP_ADD;
+ };
+ CreatePipelineHelper::OneshotTest(*this, set_dsb_src_alpha_enable, VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ "VUID-VkPipelineColorBlendAttachmentState-srcAlphaBlendFactor-00610");
+
+ const auto set_dsb_dst_alpha_enable = [](CreatePipelineHelper &helper) {
+ helper.cb_attachments_.blendEnable = VK_TRUE;
+ helper.cb_attachments_.srcColorBlendFactor = VK_BLEND_FACTOR_SRC_COLOR;
+ helper.cb_attachments_.dstColorBlendFactor = VK_BLEND_FACTOR_ONE_MINUS_SRC_COLOR;
+ helper.cb_attachments_.colorBlendOp = VK_BLEND_OP_ADD;
+ helper.cb_attachments_.srcAlphaBlendFactor = VK_BLEND_FACTOR_SRC_ALPHA;
+ helper.cb_attachments_.dstAlphaBlendFactor = VK_BLEND_FACTOR_ONE_MINUS_SRC1_ALPHA; // bad!
+ helper.cb_attachments_.alphaBlendOp = VK_BLEND_OP_ADD;
+ };
+ CreatePipelineHelper::OneshotTest(*this, set_dsb_dst_alpha_enable, VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ "VUID-VkPipelineColorBlendAttachmentState-dstAlphaBlendFactor-00611");
+}
+
+#if GTEST_IS_THREADSAFE
+struct thread_data_struct {
+ VkCommandBuffer commandBuffer;
+ VkDevice device;
+ VkEvent event;
+ bool bailout;
+};
+
+extern "C" void *AddToCommandBuffer(void *arg) {
+ struct thread_data_struct *data = (struct thread_data_struct *)arg;
+
+ for (int i = 0; i < 80000; i++) {
+ vkCmdSetEvent(data->commandBuffer, data->event, VK_PIPELINE_STAGE_ALL_COMMANDS_BIT);
+ if (data->bailout) {
+ break;
+ }
+ }
+ return NULL;
+}
+
+TEST_F(VkLayerTest, ThreadCommandBufferCollision) {
+ test_platform_thread thread;
+
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "THREADING ERROR");
+
+ ASSERT_NO_FATAL_FAILURE(Init());
+ ASSERT_NO_FATAL_FAILURE(InitViewport());
+ ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
+
+ // Calls AllocateCommandBuffers
+ VkCommandBufferObj commandBuffer(m_device, m_commandPool);
+
+ commandBuffer.begin();
+
+ VkEventCreateInfo event_info;
+ VkEvent event;
+ VkResult err;
+
+ memset(&event_info, 0, sizeof(event_info));
+ event_info.sType = VK_STRUCTURE_TYPE_EVENT_CREATE_INFO;
+
+ err = vkCreateEvent(device(), &event_info, NULL, &event);
+ ASSERT_VK_SUCCESS(err);
+
+ err = vkResetEvent(device(), event);
+ ASSERT_VK_SUCCESS(err);
+
+ struct thread_data_struct data;
+ data.commandBuffer = commandBuffer.handle();
+ data.event = event;
+ data.bailout = false;
+ m_errorMonitor->SetBailout(&data.bailout);
+
+ // First do some correct operations using multiple threads.
+ // Add many entries to command buffer from another thread.
+ test_platform_thread_create(&thread, AddToCommandBuffer, (void *)&data);
+ // Make non-conflicting calls from this thread at the same time.
+ for (int i = 0; i < 80000; i++) {
+ uint32_t count;
+ vkEnumeratePhysicalDevices(instance(), &count, NULL);
+ }
+ test_platform_thread_join(thread, NULL);
+
+ // Then do some incorrect operations using multiple threads.
+ // Add many entries to command buffer from another thread.
+ test_platform_thread_create(&thread, AddToCommandBuffer, (void *)&data);
+ // Add many entries to command buffer from this thread at the same time.
+ AddToCommandBuffer(&data);
+
+ test_platform_thread_join(thread, NULL);
+ commandBuffer.end();
+
+ m_errorMonitor->SetBailout(NULL);
+
+ m_errorMonitor->VerifyFound();
+
+ vkDestroyEvent(device(), event, NULL);
+}
+#endif // GTEST_IS_THREADSAFE
+
+TEST_F(VkLayerTest, InvalidSPIRVCodeSize) {
+ TEST_DESCRIPTION("Test that errors are produced for a spirv modules with invalid code sizes");
+
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "Invalid SPIR-V header");
+
+ ASSERT_NO_FATAL_FAILURE(Init());
+ ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
+
+ VkShaderModule module;
+ VkShaderModuleCreateInfo moduleCreateInfo;
+ struct icd_spv_header spv;
+
+ spv.magic = ICD_SPV_MAGIC;
+ spv.version = ICD_SPV_VERSION;
+ spv.gen_magic = 0;
+
+ moduleCreateInfo.sType = VK_STRUCTURE_TYPE_SHADER_MODULE_CREATE_INFO;
+ moduleCreateInfo.pNext = NULL;
+ moduleCreateInfo.pCode = (const uint32_t *)&spv;
+ moduleCreateInfo.codeSize = 4;
+ moduleCreateInfo.flags = 0;
+ vkCreateShaderModule(m_device->device(), &moduleCreateInfo, NULL, &module);
+
+ m_errorMonitor->VerifyFound();
+
+ char const *vsSource =
+ "#version 450\n"
+ "\n"
+ "layout(location=0) out float x;\n"
+ "void main(){\n"
+ " gl_Position = vec4(1);\n"
+ " x = 0;\n"
+ "}\n";
+
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkShaderModuleCreateInfo-pCode-01376");
+ std::vector<unsigned int> shader;
+ VkShaderModuleCreateInfo module_create_info;
+ VkShaderModule shader_module;
+ module_create_info.sType = VK_STRUCTURE_TYPE_SHADER_MODULE_CREATE_INFO;
+ module_create_info.pNext = NULL;
+ this->GLSLtoSPV(VK_SHADER_STAGE_VERTEX_BIT, vsSource, shader);
+ module_create_info.pCode = shader.data();
+ // Introduce failure by making codeSize a non-multiple of 4
+ module_create_info.codeSize = shader.size() * sizeof(unsigned int) - 1;
+ module_create_info.flags = 0;
+ vkCreateShaderModule(m_device->handle(), &module_create_info, NULL, &shader_module);
+
+ m_errorMonitor->VerifyFound();
+}
+
+TEST_F(VkLayerTest, InvalidSPIRVMagic) {
+ TEST_DESCRIPTION("Test that an error is produced for a spirv module with a bad magic number");
+
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "Invalid SPIR-V magic number");
+
+ ASSERT_NO_FATAL_FAILURE(Init());
+ ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
+
+ VkShaderModule module;
+ VkShaderModuleCreateInfo moduleCreateInfo;
+ struct icd_spv_header spv;
+
+ spv.magic = (uint32_t)~ICD_SPV_MAGIC;
+ spv.version = ICD_SPV_VERSION;
+ spv.gen_magic = 0;
+
+ moduleCreateInfo.sType = VK_STRUCTURE_TYPE_SHADER_MODULE_CREATE_INFO;
+ moduleCreateInfo.pNext = NULL;
+ moduleCreateInfo.pCode = (const uint32_t *)&spv;
+ moduleCreateInfo.codeSize = sizeof(spv) + 16;
+ moduleCreateInfo.flags = 0;
+ vkCreateShaderModule(m_device->device(), &moduleCreateInfo, NULL, &module);
+
+ m_errorMonitor->VerifyFound();
+}
+
+TEST_F(VkLayerTest, CreatePipelineVertexOutputNotConsumed) {
+ TEST_DESCRIPTION("Test that a warning is produced for a vertex output that is not consumed by the fragment stage");
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_PERFORMANCE_WARNING_BIT_EXT, "not consumed by fragment shader");
+
+ ASSERT_NO_FATAL_FAILURE(Init());
+ ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
+
+ char const *vsSource =
+ "#version 450\n"
+ "\n"
+ "layout(location=0) out float x;\n"
+ "void main(){\n"
+ " gl_Position = vec4(1);\n"
+ " x = 0;\n"
+ "}\n";
+ char const *fsSource =
+ "#version 450\n"
+ "\n"
+ "layout(location=0) out vec4 color;\n"
+ "void main(){\n"
+ " color = vec4(1);\n"
+ "}\n";
+
+ VkShaderObj vs(m_device, vsSource, VK_SHADER_STAGE_VERTEX_BIT, this);
+ VkShaderObj fs(m_device, fsSource, VK_SHADER_STAGE_FRAGMENT_BIT, this);
+
+ VkPipelineObj pipe(m_device);
+ pipe.AddDefaultColorAttachment();
+ pipe.AddShader(&vs);
+ pipe.AddShader(&fs);
+
+ VkDescriptorSetObj descriptorSet(m_device);
+ descriptorSet.AppendDummy();
+ descriptorSet.CreateVKDescriptorSet(m_commandBuffer);
+
+ pipe.CreateVKPipeline(descriptorSet.GetPipelineLayout(), renderPass());
+
+ m_errorMonitor->VerifyFound();
+}
+
+TEST_F(VkPositiveLayerTest, CreatePipelineComplexTypes) {
+ TEST_DESCRIPTION("Smoke test for complex types across VS/FS boundary");
+ ASSERT_NO_FATAL_FAILURE(Init());
+ ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
+
+ if (!m_device->phy().features().tessellationShader) {
+ printf("%s Device does not support tessellation shaders; skipped.\n", kSkipPrefix);
+ return;
+ }
+
+ m_errorMonitor->ExpectSuccess();
+
+ char const *vsSource =
+ "#version 450\n"
+ "void main() {}";
+ char const *tcsSource =
+ "#version 450\n"
+ "layout(vertices=3) out;\n"
+ "struct S { int x; };\n"
+ "layout(location=2) patch out B { S s; } b;\n"
+ "void main() {\n"
+ " gl_TessLevelOuter[0] = gl_TessLevelOuter[1] = gl_TessLevelOuter[2] = 1;\n"
+ " gl_TessLevelInner[0] = 1;\n"
+ " b.s.x = 1;\n"
+ "}\n";
+
+ char const *tesSource =
+ "#version 450\n"
+ "layout(triangles, equal_spacing, cw) in;\n"
+ "struct S { int x; };\n"
+ "layout(location=2) patch in B { S s; } b;\n"
+ "void main() { gl_Position = vec4(b.s.x); }\n";
+
+ char const *fsSource =
+ "#version 450\n"
+ "layout(location=0) out vec4 c;\n"
+ "void main() { c = vec4(1); }\n";
+
+ VkShaderObj vs(m_device, vsSource, VK_SHADER_STAGE_VERTEX_BIT, this);
+ VkShaderObj tcs(m_device, tcsSource, VK_SHADER_STAGE_TESSELLATION_CONTROL_BIT, this);
+ VkShaderObj tes(m_device, tesSource, VK_SHADER_STAGE_TESSELLATION_EVALUATION_BIT, this);
+ VkShaderObj fs(m_device, fsSource, VK_SHADER_STAGE_FRAGMENT_BIT, this);
+
+ VkPipelineInputAssemblyStateCreateInfo iasci{VK_STRUCTURE_TYPE_PIPELINE_INPUT_ASSEMBLY_STATE_CREATE_INFO, nullptr, 0,
+ VK_PRIMITIVE_TOPOLOGY_PATCH_LIST, VK_FALSE};
+
+ VkPipelineTessellationStateCreateInfo tsci{VK_STRUCTURE_TYPE_PIPELINE_TESSELLATION_STATE_CREATE_INFO, nullptr, 0, 3};
+
+ VkPipelineObj pipe(m_device);
+
+ pipe.AddDefaultColorAttachment();
+ pipe.AddShader(&vs);
+ pipe.AddShader(&tcs);
+ pipe.AddShader(&tes);
+ pipe.AddShader(&fs);
+ pipe.SetInputAssembly(&iasci);
+ pipe.SetTessellation(&tsci);
+
+ VkDescriptorSetObj descriptorSet(m_device);
+ descriptorSet.AppendDummy();
+ descriptorSet.CreateVKDescriptorSet(m_commandBuffer);
+
+ pipe.CreateVKPipeline(descriptorSet.GetPipelineLayout(), renderPass());
+
+ m_errorMonitor->VerifyNotFound();
+}
+
+TEST_F(VkLayerTest, CreatePipelineCheckShaderBadSpecialization) {
+ TEST_DESCRIPTION("Challenge core_validation with shader validation issues related to vkCreateGraphicsPipelines.");
+
+ ASSERT_NO_FATAL_FAILURE(Init());
+ ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
+
+ const char *bad_specialization_message =
+ "Specialization entry 0 (for constant id 0) references memory outside provided specialization data ";
+
+ char const *vsSource =
+ "#version 450\n"
+ "\n"
+ "void main(){\n"
+ " gl_Position = vec4(1);\n"
+ "}\n";
+
+ char const *fsSource =
+ "#version 450\n"
+ "\n"
+ "layout (constant_id = 0) const float r = 0.0f;\n"
+ "layout(location = 0) out vec4 uFragColor;\n"
+ "void main(){\n"
+ " uFragColor = vec4(r,1,0,1);\n"
+ "}\n";
+
+ VkShaderObj vs(m_device, vsSource, VK_SHADER_STAGE_VERTEX_BIT, this);
+ VkShaderObj fs(m_device, fsSource, VK_SHADER_STAGE_FRAGMENT_BIT, this);
+
+ const VkPipelineLayoutObj pipeline_layout(m_device);
+
+ VkPipelineViewportStateCreateInfo vp_state_create_info = {};
+ vp_state_create_info.sType = VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_STATE_CREATE_INFO;
+ vp_state_create_info.viewportCount = 1;
+ VkViewport viewport = {0.0f, 0.0f, 64.0f, 64.0f, 0.0f, 1.0f};
+ vp_state_create_info.pViewports = &viewport;
+ vp_state_create_info.scissorCount = 1;
+
+ VkDynamicState scissor_state = VK_DYNAMIC_STATE_SCISSOR;
+
+ VkPipelineDynamicStateCreateInfo pipeline_dynamic_state_create_info = {};
+ pipeline_dynamic_state_create_info.sType = VK_STRUCTURE_TYPE_PIPELINE_DYNAMIC_STATE_CREATE_INFO;
+ pipeline_dynamic_state_create_info.dynamicStateCount = 1;
+ pipeline_dynamic_state_create_info.pDynamicStates = &scissor_state;
+
+ VkPipelineShaderStageCreateInfo shader_stage_create_info[2] = {vs.GetStageCreateInfo(), fs.GetStageCreateInfo()};
+
+ VkPipelineVertexInputStateCreateInfo vertex_input_create_info = {};
+ vertex_input_create_info.sType = VK_STRUCTURE_TYPE_PIPELINE_VERTEX_INPUT_STATE_CREATE_INFO;
+
+ VkPipelineInputAssemblyStateCreateInfo input_assembly_create_info = {};
+ input_assembly_create_info.sType = VK_STRUCTURE_TYPE_PIPELINE_INPUT_ASSEMBLY_STATE_CREATE_INFO;
+ input_assembly_create_info.topology = VK_PRIMITIVE_TOPOLOGY_TRIANGLE_STRIP;
+
+ VkPipelineRasterizationStateCreateInfo rasterization_state_create_info = {};
+ rasterization_state_create_info.sType = VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_STATE_CREATE_INFO;
+ rasterization_state_create_info.pNext = nullptr;
+ rasterization_state_create_info.lineWidth = 1.0f;
+ rasterization_state_create_info.rasterizerDiscardEnable = true;
+
+ VkPipelineColorBlendAttachmentState color_blend_attachment_state = {};
+ color_blend_attachment_state.blendEnable = VK_FALSE;
+ color_blend_attachment_state.colorWriteMask = 0xf;
+
+ VkPipelineColorBlendStateCreateInfo color_blend_state_create_info = {};
+ color_blend_state_create_info.sType = VK_STRUCTURE_TYPE_PIPELINE_COLOR_BLEND_STATE_CREATE_INFO;
+ color_blend_state_create_info.attachmentCount = 1;
+ color_blend_state_create_info.pAttachments = &color_blend_attachment_state;
+
+ VkGraphicsPipelineCreateInfo graphicspipe_create_info = {};
+ graphicspipe_create_info.sType = VK_STRUCTURE_TYPE_GRAPHICS_PIPELINE_CREATE_INFO;
+ graphicspipe_create_info.stageCount = 2;
+ graphicspipe_create_info.pStages = shader_stage_create_info;
+ graphicspipe_create_info.pVertexInputState = &vertex_input_create_info;
+ graphicspipe_create_info.pInputAssemblyState = &input_assembly_create_info;
+ graphicspipe_create_info.pViewportState = &vp_state_create_info;
+ graphicspipe_create_info.pRasterizationState = &rasterization_state_create_info;
+ graphicspipe_create_info.pColorBlendState = &color_blend_state_create_info;
+ graphicspipe_create_info.pDynamicState = &pipeline_dynamic_state_create_info;
+ graphicspipe_create_info.flags = VK_PIPELINE_CREATE_DISABLE_OPTIMIZATION_BIT;
+ graphicspipe_create_info.layout = pipeline_layout.handle();
+ graphicspipe_create_info.renderPass = renderPass();
+
+ VkPipelineCacheCreateInfo pipeline_cache_create_info = {};
+ pipeline_cache_create_info.sType = VK_STRUCTURE_TYPE_PIPELINE_CACHE_CREATE_INFO;
+
+ VkPipelineCache pipelineCache;
+ ASSERT_VK_SUCCESS(vkCreatePipelineCache(m_device->device(), &pipeline_cache_create_info, nullptr, &pipelineCache));
+
+ // This structure maps constant ids to data locations.
+ const VkSpecializationMapEntry entry =
+ // id, offset, size
+ {0, 4, sizeof(uint32_t)}; // Challenge core validation by using a bogus offset.
+
+ uint32_t data = 1;
+
+ // Set up the info describing spec map and data
+ const VkSpecializationInfo specialization_info = {
+ 1,
+ &entry,
+ 1 * sizeof(float),
+ &data,
+ };
+ shader_stage_create_info[0].pSpecializationInfo = &specialization_info;
+
+ VkPipeline pipeline;
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, bad_specialization_message);
+ vkCreateGraphicsPipelines(m_device->device(), pipelineCache, 1, &graphicspipe_create_info, nullptr, &pipeline);
+ m_errorMonitor->VerifyFound();
+
+ vkDestroyPipelineCache(m_device->device(), pipelineCache, nullptr);
+}
+
+TEST_F(VkLayerTest, CreatePipelineCheckShaderDescriptorTypeMismatch) {
+ TEST_DESCRIPTION("Challenge core_validation with shader validation issues related to vkCreateGraphicsPipelines.");
+
+ ASSERT_NO_FATAL_FAILURE(Init());
+ ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
+
+ const char *descriptor_type_mismatch_message = "Type mismatch on descriptor slot 0.0 ";
+
+ OneOffDescriptorSet ds(m_device, {
+ {0, VK_DESCRIPTOR_TYPE_STORAGE_BUFFER, 1, VK_SHADER_STAGE_ALL, nullptr},
+ });
+
+ char const *vsSource =
+ "#version 450\n"
+ "\n"
+ "layout (std140, set = 0, binding = 0) uniform buf {\n"
+ " mat4 mvp;\n"
+ "} ubuf;\n"
+ "void main(){\n"
+ " gl_Position = ubuf.mvp * vec4(1);\n"
+ "}\n";
+
+ char const *fsSource =
+ "#version 450\n"
+ "\n"
+ "layout(location = 0) out vec4 uFragColor;\n"
+ "void main(){\n"
+ " uFragColor = vec4(0,1,0,1);\n"
+ "}\n";
+
+ VkShaderObj vs(m_device, vsSource, VK_SHADER_STAGE_VERTEX_BIT, this);
+ VkShaderObj fs(m_device, fsSource, VK_SHADER_STAGE_FRAGMENT_BIT, this);
+
+ const VkPipelineLayoutObj pipeline_layout(m_device, {&ds.layout_});
+
+ VkPipelineObj pipe(m_device);
+ pipe.AddDefaultColorAttachment();
+ pipe.AddShader(&vs);
+ pipe.AddShader(&fs);
+
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, descriptor_type_mismatch_message);
+ pipe.CreateVKPipeline(pipeline_layout.handle(), renderPass());
+ m_errorMonitor->VerifyFound();
+}
+
+TEST_F(VkLayerTest, CreatePipelineCheckShaderDescriptorNotAccessible) {
+ TEST_DESCRIPTION(
+ "Create a pipeline in which a descriptor used by a shader stage does not include that stage in its stageFlags.");
+
+ ASSERT_NO_FATAL_FAILURE(Init());
+ ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
+
+ const char *descriptor_not_accessible_message = "Shader uses descriptor slot 0.0 ";
+
+ OneOffDescriptorSet ds(m_device, {
+ {0, VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER, 1, VK_SHADER_STAGE_FRAGMENT_BIT /*!*/, nullptr},
+ });
+
+ char const *vsSource =
+ "#version 450\n"
+ "\n"
+ "layout (std140, set = 0, binding = 0) uniform buf {\n"
+ " mat4 mvp;\n"
+ "} ubuf;\n"
+ "void main(){\n"
+ " gl_Position = ubuf.mvp * vec4(1);\n"
+ "}\n";
+
+ char const *fsSource =
+ "#version 450\n"
+ "\n"
+ "layout(location = 0) out vec4 uFragColor;\n"
+ "void main(){\n"
+ " uFragColor = vec4(0,1,0,1);\n"
+ "}\n";
+
+ VkShaderObj vs(m_device, vsSource, VK_SHADER_STAGE_VERTEX_BIT, this);
+ VkShaderObj fs(m_device, fsSource, VK_SHADER_STAGE_FRAGMENT_BIT, this);
+
+ const VkPipelineLayoutObj pipeline_layout(m_device, {&ds.layout_});
+
+ VkPipelineObj pipe(m_device);
+ pipe.AddDefaultColorAttachment();
+ pipe.AddShader(&vs);
+ pipe.AddShader(&fs);
+
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, descriptor_not_accessible_message);
+ pipe.CreateVKPipeline(pipeline_layout.handle(), renderPass());
+ m_errorMonitor->VerifyFound();
+}
+
+TEST_F(VkLayerTest, CreatePipelineCheckShaderPushConstantNotAccessible) {
+ TEST_DESCRIPTION(
+ "Create a graphics pipeline in which a push constant range containing a push constant block member is not accessible from "
+ "the current shader stage.");
+
+ ASSERT_NO_FATAL_FAILURE(Init());
+ ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
+
+ const char *push_constant_not_accessible_message =
+ "Push constant range covering variable starting at offset 0 not accessible from stage VK_SHADER_STAGE_VERTEX_BIT";
+
+ char const *vsSource =
+ "#version 450\n"
+ "\n"
+ "layout(push_constant, std430) uniform foo { float x; } consts;\n"
+ "void main(){\n"
+ " gl_Position = vec4(consts.x);\n"
+ "}\n";
+
+ char const *fsSource =
+ "#version 450\n"
+ "\n"
+ "layout(location = 0) out vec4 uFragColor;\n"
+ "void main(){\n"
+ " uFragColor = vec4(0,1,0,1);\n"
+ "}\n";
+
+ VkShaderObj vs(m_device, vsSource, VK_SHADER_STAGE_VERTEX_BIT, this);
+ VkShaderObj fs(m_device, fsSource, VK_SHADER_STAGE_FRAGMENT_BIT, this);
+
+ // Set up a push constant range
+ VkPushConstantRange push_constant_range = {};
+ // Set to the wrong stage to challenge core_validation
+ push_constant_range.stageFlags = VK_SHADER_STAGE_FRAGMENT_BIT;
+ push_constant_range.size = 4;
+
+ const VkPipelineLayoutObj pipeline_layout(m_device, {}, {push_constant_range});
+
+ VkPipelineObj pipe(m_device);
+ pipe.AddDefaultColorAttachment();
+ pipe.AddShader(&vs);
+ pipe.AddShader(&fs);
+
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, push_constant_not_accessible_message);
+ pipe.CreateVKPipeline(pipeline_layout.handle(), renderPass());
+ m_errorMonitor->VerifyFound();
+}
+
+TEST_F(VkLayerTest, CreatePipelineCheckShaderNotEnabled) {
+ TEST_DESCRIPTION(
+ "Create a graphics pipeline in which a capability declared by the shader requires a feature not enabled on the device.");
+
+ ASSERT_NO_FATAL_FAILURE(Init());
+ ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
+
+ const char *feature_not_enabled_message =
+ "Shader requires VkPhysicalDeviceFeatures::shaderFloat64 but is not enabled on the device";
+
+ // Some awkward steps are required to test with custom device features.
+ std::vector<const char *> device_extension_names;
+ auto features = m_device->phy().features();
+ // Disable support for 64 bit floats
+ features.shaderFloat64 = false;
+ // The sacrificial device object
+ VkDeviceObj test_device(0, gpu(), device_extension_names, &features);
+
+ char const *vsSource =
+ "#version 450\n"
+ "\n"
+ "void main(){\n"
+ " gl_Position = vec4(1);\n"
+ "}\n";
+ char const *fsSource =
+ "#version 450\n"
+ "\n"
+ "layout(location=0) out vec4 color;\n"
+ "void main(){\n"
+ " dvec4 green = vec4(0.0, 1.0, 0.0, 1.0);\n"
+ " color = vec4(green);\n"
+ "}\n";
+
+ VkShaderObj vs(&test_device, vsSource, VK_SHADER_STAGE_VERTEX_BIT, this);
+ VkShaderObj fs(&test_device, fsSource, VK_SHADER_STAGE_FRAGMENT_BIT, this);
+
+ VkRenderpassObj render_pass(&test_device);
+
+ VkPipelineObj pipe(&test_device);
+ pipe.AddDefaultColorAttachment();
+ pipe.AddShader(&vs);
+ pipe.AddShader(&fs);
+
+ const VkPipelineLayoutObj pipeline_layout(&test_device);
+
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, feature_not_enabled_message);
+ pipe.CreateVKPipeline(pipeline_layout.handle(), render_pass.handle());
+ m_errorMonitor->VerifyFound();
+}
+
+TEST_F(VkLayerTest, CreateShaderModuleCheckBadCapability) {
+ TEST_DESCRIPTION("Create a shader in which a capability declared by the shader is not supported.");
+ // Note that this failure message comes from spirv-tools, specifically the validator.
+
+ ASSERT_NO_FATAL_FAILURE(Init());
+ ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
+
+ const std::string spv_source = R"(
+ OpCapability ImageRect
+ OpEntryPoint Vertex %main "main"
+ %main = OpFunction %void None %3
+ OpReturn
+ OpFunctionEnd
+ )";
+
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "Capability ImageRect is not allowed by Vulkan");
+
+ std::vector<unsigned int> spv;
+ VkShaderModuleCreateInfo module_create_info;
+ VkShaderModule shader_module;
+ module_create_info.sType = VK_STRUCTURE_TYPE_SHADER_MODULE_CREATE_INFO;
+ module_create_info.pNext = NULL;
+ ASMtoSPV(SPV_ENV_VULKAN_1_0, 0, spv_source.data(), spv);
+ module_create_info.pCode = spv.data();
+ module_create_info.codeSize = spv.size() * sizeof(unsigned int);
+ module_create_info.flags = 0;
+
+ VkResult err = vkCreateShaderModule(m_device->handle(), &module_create_info, NULL, &shader_module);
+ m_errorMonitor->VerifyFound();
+ if (err == VK_SUCCESS) {
+ vkDestroyShaderModule(m_device->handle(), shader_module, NULL);
+ }
+}
+
+TEST_F(VkPositiveLayerTest, ShaderRelaxedBlockLayout) {
+ // This is a positive test, no errors expected
+ // Verifies the ability to relax block layout rules with a shader that requires them to be relaxed
+ TEST_DESCRIPTION("Create a shader that requires relaxed block layout.");
+
+ ASSERT_NO_FATAL_FAILURE(InitFramework(myDbgFunc, m_errorMonitor));
+
+ // The Relaxed Block Layout extension was promoted to core in 1.1.
+ // Go ahead and check for it and turn it on in case a 1.0 device has it.
+ if (!DeviceExtensionSupported(gpu(), nullptr, VK_KHR_RELAXED_BLOCK_LAYOUT_EXTENSION_NAME)) {
+ printf("%s Extension %s not supported, skipping this pass. \n", kSkipPrefix, VK_KHR_RELAXED_BLOCK_LAYOUT_EXTENSION_NAME);
+ return;
+ }
+ m_device_extension_names.push_back(VK_KHR_RELAXED_BLOCK_LAYOUT_EXTENSION_NAME);
+ ASSERT_NO_FATAL_FAILURE(InitState());
+ ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
+
+ // Vertex shader requiring relaxed layout.
+ // Without relaxed layout, we would expect a message like:
+ // "Structure id 2 decorated as Block for variable in Uniform storage class
+ // must follow standard uniform buffer layout rules: member 1 at offset 4 is not aligned to 16"
+
+ const std::string spv_source = R"(
+ OpCapability Shader
+ OpMemoryModel Logical GLSL450
+ OpEntryPoint Vertex %main "main"
+ OpSource GLSL 450
+ OpMemberDecorate %S 0 Offset 0
+ OpMemberDecorate %S 1 Offset 4
+ OpDecorate %S Block
+ OpDecorate %B DescriptorSet 0
+ OpDecorate %B Binding 0
+ %void = OpTypeVoid
+ %3 = OpTypeFunction %void
+ %float = OpTypeFloat 32
+ %v3float = OpTypeVector %float 3
+ %S = OpTypeStruct %float %v3float
+%_ptr_Uniform_S = OpTypePointer Uniform %S
+ %B = OpVariable %_ptr_Uniform_S Uniform
+ %main = OpFunction %void None %3
+ %5 = OpLabel
+ OpReturn
+ OpFunctionEnd
+ )";
+
+ std::vector<unsigned int> spv;
+ VkShaderModuleCreateInfo module_create_info;
+ VkShaderModule shader_module;
+ module_create_info.sType = VK_STRUCTURE_TYPE_SHADER_MODULE_CREATE_INFO;
+ module_create_info.pNext = NULL;
+ ASMtoSPV(SPV_ENV_VULKAN_1_0, 0, spv_source.data(), spv);
+ module_create_info.pCode = spv.data();
+ module_create_info.codeSize = spv.size() * sizeof(unsigned int);
+ module_create_info.flags = 0;
+
+ m_errorMonitor->ExpectSuccess();
+ VkResult err = vkCreateShaderModule(m_device->handle(), &module_create_info, NULL, &shader_module);
+ m_errorMonitor->VerifyNotFound();
+ if (err == VK_SUCCESS) {
+ vkDestroyShaderModule(m_device->handle(), shader_module, NULL);
+ }
+}
+
+TEST_F(VkPositiveLayerTest, ShaderScalarBlockLayout) {
+ // This is a positive test, no errors expected
+ // Verifies the ability to scalar block layout rules with a shader that requires them to be relaxed
+ TEST_DESCRIPTION("Create a shader that requires scalar block layout.");
+ // Enable req'd extensions
+ if (!InstanceExtensionSupported(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME)) {
+ printf("%s Extension %s not supported, skipping this pass. \n", kSkipPrefix,
+ VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME);
+ return;
+ }
+ m_instance_extension_names.push_back(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME);
+ ASSERT_NO_FATAL_FAILURE(InitFramework(myDbgFunc, m_errorMonitor));
+
+ // Check for the Scalar Block Layout extension and turn it on if it's available
+ if (!DeviceExtensionSupported(gpu(), nullptr, VK_EXT_SCALAR_BLOCK_LAYOUT_EXTENSION_NAME)) {
+ printf("%s Extension %s not supported, skipping this pass. \n", kSkipPrefix, VK_EXT_SCALAR_BLOCK_LAYOUT_EXTENSION_NAME);
+ return;
+ }
+ m_device_extension_names.push_back(VK_EXT_SCALAR_BLOCK_LAYOUT_EXTENSION_NAME);
+
+ PFN_vkGetPhysicalDeviceFeatures2 vkGetPhysicalDeviceFeatures2 =
+ (PFN_vkGetPhysicalDeviceFeatures2)vkGetInstanceProcAddr(instance(), "vkGetPhysicalDeviceFeatures2KHR");
+
+ auto scalar_block_features = lvl_init_struct<VkPhysicalDeviceScalarBlockLayoutFeaturesEXT>(NULL);
+ scalar_block_features.scalarBlockLayout = VK_TRUE;
+ auto query_features2 = lvl_init_struct<VkPhysicalDeviceFeatures2>(&scalar_block_features);
+ vkGetPhysicalDeviceFeatures2(gpu(), &query_features2);
+
+ auto set_features2 = lvl_init_struct<VkPhysicalDeviceFeatures2>(&scalar_block_features);
+
+ ASSERT_NO_FATAL_FAILURE(InitState(nullptr, &set_features2));
+ ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
+
+ // Vertex shader requiring scalar layout.
+ // Without scalar layout, we would expect a message like:
+ // "Structure id 2 decorated as Block for variable in Uniform storage class
+ // must follow standard uniform buffer layout rules: member 1 at offset 4 is not aligned to 16"
+
+ const std::string spv_source = R"(
+ OpCapability Shader
+ OpMemoryModel Logical GLSL450
+ OpEntryPoint Vertex %main "main"
+ OpSource GLSL 450
+ OpMemberDecorate %S 0 Offset 0
+ OpMemberDecorate %S 1 Offset 4
+ OpMemberDecorate %S 2 Offset 8
+ OpDecorate %S Block
+ OpDecorate %B DescriptorSet 0
+ OpDecorate %B Binding 0
+ %void = OpTypeVoid
+ %3 = OpTypeFunction %void
+ %float = OpTypeFloat 32
+ %v3float = OpTypeVector %float 3
+ %S = OpTypeStruct %float %float %v3float
+%_ptr_Uniform_S = OpTypePointer Uniform %S
+ %B = OpVariable %_ptr_Uniform_S Uniform
+ %main = OpFunction %void None %3
+ %5 = OpLabel
+ OpReturn
+ OpFunctionEnd
+ )";
+
+ std::vector<unsigned int> spv;
+ VkShaderModuleCreateInfo module_create_info;
+ VkShaderModule shader_module;
+ module_create_info.sType = VK_STRUCTURE_TYPE_SHADER_MODULE_CREATE_INFO;
+ module_create_info.pNext = NULL;
+ ASMtoSPV(SPV_ENV_VULKAN_1_0, 0, spv_source.data(), spv);
+ module_create_info.pCode = spv.data();
+ module_create_info.codeSize = spv.size() * sizeof(unsigned int);
+ module_create_info.flags = 0;
+
+ m_errorMonitor->ExpectSuccess();
+ VkResult err = vkCreateShaderModule(m_device->handle(), &module_create_info, NULL, &shader_module);
+ m_errorMonitor->VerifyNotFound();
+ if (err == VK_SUCCESS) {
+ vkDestroyShaderModule(m_device->handle(), shader_module, NULL);
+ }
+}
+
+TEST_F(VkPositiveLayerTest, SpirvGroupDecorations) {
+ TEST_DESCRIPTION("Test shader validation support for group decorations.");
+ ASSERT_NO_FATAL_FAILURE(InitFramework(myDbgFunc, m_errorMonitor));
+ ASSERT_NO_FATAL_FAILURE(InitState());
+ ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
+
+ const std::string spv_source = R"(
+ OpCapability Shader
+ OpMemoryModel Logical GLSL450
+ OpEntryPoint GLCompute %main "main" %gl_GlobalInvocationID
+ OpExecutionMode %main LocalSize 1 1 1
+ OpSource GLSL 430
+ OpName %main "main"
+ OpName %gl_GlobalInvocationID "gl_GlobalInvocationID"
+ OpDecorate %gl_GlobalInvocationID BuiltIn GlobalInvocationId
+ OpDecorate %_runtimearr_float ArrayStride 4
+ OpDecorate %4 BufferBlock
+ OpDecorate %5 Offset 0
+ %4 = OpDecorationGroup
+ %5 = OpDecorationGroup
+ OpGroupDecorate %4 %_struct_6 %_struct_7 %_struct_8 %_struct_9 %_struct_10 %_struct_11
+ OpGroupMemberDecorate %5 %_struct_6 0 %_struct_7 0 %_struct_8 0 %_struct_9 0 %_struct_10 0 %_struct_11 0
+ OpDecorate %12 DescriptorSet 0
+ OpDecorate %13 DescriptorSet 0
+ OpDecorate %13 NonWritable
+ OpDecorate %13 Restrict
+ %14 = OpDecorationGroup
+ %12 = OpDecorationGroup
+ %13 = OpDecorationGroup
+ OpGroupDecorate %12 %15
+ OpGroupDecorate %12 %15
+ OpGroupDecorate %12 %15
+ OpDecorate %15 DescriptorSet 0
+ OpDecorate %15 Binding 5
+ OpGroupDecorate %14 %16
+ OpDecorate %16 DescriptorSet 0
+ OpDecorate %16 Binding 0
+ OpGroupDecorate %12 %17
+ OpDecorate %17 Binding 1
+ OpGroupDecorate %13 %18 %19
+ OpDecorate %18 Binding 2
+ OpDecorate %19 Binding 3
+ OpGroupDecorate %14 %20
+ OpGroupDecorate %12 %20
+ OpGroupDecorate %13 %20
+ OpDecorate %20 Binding 4
+ %bool = OpTypeBool
+ %void = OpTypeVoid
+ %23 = OpTypeFunction %void
+ %uint = OpTypeInt 32 0
+ %int = OpTypeInt 32 1
+ %float = OpTypeFloat 32
+ %v3uint = OpTypeVector %uint 3
+ %v3float = OpTypeVector %float 3
+%_ptr_Input_v3uint = OpTypePointer Input %v3uint
+%_ptr_Uniform_int = OpTypePointer Uniform %int
+%_ptr_Uniform_float = OpTypePointer Uniform %float
+%_runtimearr_int = OpTypeRuntimeArray %int
+%_runtimearr_float = OpTypeRuntimeArray %float
+%gl_GlobalInvocationID = OpVariable %_ptr_Input_v3uint Input
+ %int_0 = OpConstant %int 0
+ %_struct_6 = OpTypeStruct %_runtimearr_float
+%_ptr_Uniform__struct_6 = OpTypePointer Uniform %_struct_6
+ %15 = OpVariable %_ptr_Uniform__struct_6 Uniform
+ %_struct_7 = OpTypeStruct %_runtimearr_float
+%_ptr_Uniform__struct_7 = OpTypePointer Uniform %_struct_7
+ %16 = OpVariable %_ptr_Uniform__struct_7 Uniform
+ %_struct_8 = OpTypeStruct %_runtimearr_float
+%_ptr_Uniform__struct_8 = OpTypePointer Uniform %_struct_8
+ %17 = OpVariable %_ptr_Uniform__struct_8 Uniform
+ %_struct_9 = OpTypeStruct %_runtimearr_float
+%_ptr_Uniform__struct_9 = OpTypePointer Uniform %_struct_9
+ %18 = OpVariable %_ptr_Uniform__struct_9 Uniform
+ %_struct_10 = OpTypeStruct %_runtimearr_float
+%_ptr_Uniform__struct_10 = OpTypePointer Uniform %_struct_10
+ %19 = OpVariable %_ptr_Uniform__struct_10 Uniform
+ %_struct_11 = OpTypeStruct %_runtimearr_float
+%_ptr_Uniform__struct_11 = OpTypePointer Uniform %_struct_11
+ %20 = OpVariable %_ptr_Uniform__struct_11 Uniform
+ %main = OpFunction %void None %23
+ %40 = OpLabel
+ %41 = OpLoad %v3uint %gl_GlobalInvocationID
+ %42 = OpCompositeExtract %uint %41 0
+ %43 = OpAccessChain %_ptr_Uniform_float %16 %int_0 %42
+ %44 = OpAccessChain %_ptr_Uniform_float %17 %int_0 %42
+ %45 = OpAccessChain %_ptr_Uniform_float %18 %int_0 %42
+ %46 = OpAccessChain %_ptr_Uniform_float %19 %int_0 %42
+ %47 = OpAccessChain %_ptr_Uniform_float %20 %int_0 %42
+ %48 = OpAccessChain %_ptr_Uniform_float %15 %int_0 %42
+ %49 = OpLoad %float %43
+ %50 = OpLoad %float %44
+ %51 = OpLoad %float %45
+ %52 = OpLoad %float %46
+ %53 = OpLoad %float %47
+ %54 = OpFAdd %float %49 %50
+ %55 = OpFAdd %float %54 %51
+ %56 = OpFAdd %float %55 %52
+ %57 = OpFAdd %float %56 %53
+ OpStore %48 %57
+ OpReturn
+ OpFunctionEnd
+)";
+
+ // CreateDescriptorSetLayout
+ VkDescriptorSetLayoutBinding dslb[6] = {};
+ for (auto i = 0; i < 6; i++) {
+ dslb[i].binding = i;
+ dslb[i].descriptorCount = 1;
+ dslb[i].descriptorType = VK_DESCRIPTOR_TYPE_STORAGE_BUFFER;
+ dslb[i].pImmutableSamplers = NULL;
+ dslb[i].stageFlags = VK_SHADER_STAGE_COMPUTE_BIT | VK_SHADER_STAGE_ALL;
+ }
+
+ VkDescriptorSetLayoutCreateInfo ds_layout_ci = {};
+ ds_layout_ci.sType = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_CREATE_INFO;
+ ds_layout_ci.flags = 0;
+ ds_layout_ci.bindingCount = 6;
+ ds_layout_ci.pBindings = dslb;
+
+ VkDescriptorSetLayout ds_layout = {};
+ vkCreateDescriptorSetLayout(m_device->device(), &ds_layout_ci, NULL, &ds_layout);
+
+ // CreatePipelineLayout
+ VkPipelineLayoutCreateInfo pipeline_layout_ci = {};
+ pipeline_layout_ci.sType = VK_STRUCTURE_TYPE_PIPELINE_LAYOUT_CREATE_INFO;
+ pipeline_layout_ci.pNext = NULL;
+ pipeline_layout_ci.flags = 0;
+ pipeline_layout_ci.setLayoutCount = 1;
+ pipeline_layout_ci.pSetLayouts = &ds_layout;
+ VkPipelineLayout pipeline_layout = VK_NULL_HANDLE;
+ vkCreatePipelineLayout(m_device->device(), &pipeline_layout_ci, NULL, &pipeline_layout);
+
+ // Create DescriptorPool
+ VkDescriptorPoolSize ds_type_count = {};
+ ds_type_count.type = VK_DESCRIPTOR_TYPE_STORAGE_BUFFER;
+ ds_type_count.descriptorCount = 6;
+
+ VkDescriptorPoolCreateInfo ds_pool_ci = {};
+ ds_pool_ci.sType = VK_STRUCTURE_TYPE_DESCRIPTOR_POOL_CREATE_INFO;
+ ds_pool_ci.pNext = NULL;
+ ds_pool_ci.flags = VK_DESCRIPTOR_POOL_CREATE_FREE_DESCRIPTOR_SET_BIT;
+ ds_pool_ci.maxSets = 1;
+ ds_pool_ci.poolSizeCount = 1;
+ ds_pool_ci.pPoolSizes = &ds_type_count;
+
+ VkDescriptorPool ds_pool = VK_NULL_HANDLE;
+ vkCreateDescriptorPool(m_device->device(), &ds_pool_ci, NULL, &ds_pool);
+
+ // AllocateDescriptorSets
+ VkDescriptorSetAllocateInfo ds_alloc_info = {};
+ ds_alloc_info.sType = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_ALLOCATE_INFO;
+ ds_alloc_info.descriptorSetCount = 1;
+ ds_alloc_info.descriptorPool = ds_pool;
+ ds_alloc_info.pSetLayouts = &ds_layout;
+
+ VkDescriptorSet descriptorSet;
+ vkAllocateDescriptorSets(m_device->device(), &ds_alloc_info, &descriptorSet);
+
+ // CreateShaderModule
+ std::vector<unsigned int> spv;
+ VkShaderModuleCreateInfo module_create_info;
+ VkShaderModule shader_module;
+ module_create_info.sType = VK_STRUCTURE_TYPE_SHADER_MODULE_CREATE_INFO;
+ module_create_info.pNext = NULL;
+ ASMtoSPV(SPV_ENV_VULKAN_1_0, 0, spv_source.data(), spv);
+ module_create_info.pCode = spv.data();
+ module_create_info.codeSize = spv.size() * sizeof(unsigned int);
+ module_create_info.flags = 0;
+ vkCreateShaderModule(m_device->handle(), &module_create_info, NULL, &shader_module);
+
+ // CreateComputePipelines
+ VkComputePipelineCreateInfo pipeline_info = {};
+ pipeline_info.sType = VK_STRUCTURE_TYPE_COMPUTE_PIPELINE_CREATE_INFO;
+ pipeline_info.pNext = nullptr;
+ pipeline_info.flags = 0;
+ pipeline_info.layout = pipeline_layout;
+ pipeline_info.basePipelineHandle = VK_NULL_HANDLE;
+ pipeline_info.basePipelineIndex = -1;
+ pipeline_info.stage.sType = VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_CREATE_INFO;
+ pipeline_info.stage.pNext = nullptr;
+ pipeline_info.stage.flags = 0;
+ pipeline_info.stage.stage = VK_SHADER_STAGE_COMPUTE_BIT;
+ pipeline_info.stage.module = shader_module;
+ pipeline_info.stage.pName = "main";
+ pipeline_info.stage.pSpecializationInfo = nullptr;
+ VkPipeline cs_pipeline;
+
+ m_errorMonitor->ExpectSuccess();
+ vkCreateComputePipelines(device(), VK_NULL_HANDLE, 1, &pipeline_info, nullptr, &cs_pipeline);
+ m_errorMonitor->VerifyNotFound();
+
+ vkDestroyPipeline(device(), cs_pipeline, nullptr);
+ vkDestroyShaderModule(device(), shader_module, nullptr);
+ vkDestroyDescriptorPool(device(), ds_pool, nullptr);
+ vkDestroyPipelineLayout(device(), pipeline_layout, nullptr);
+ vkDestroyDescriptorSetLayout(device(), ds_layout, nullptr);
+}
+
+TEST_F(VkPositiveLayerTest, CreatePipelineCheckShaderCapabilityExtension1of2) {
+ // This is a positive test, no errors expected
+ // Verifies the ability to deal with a shader that declares a non-unique SPIRV capability ID
+ TEST_DESCRIPTION("Create a shader in which uses a non-unique capability ID extension, 1 of 2");
+
+ ASSERT_NO_FATAL_FAILURE(InitFramework(myDbgFunc, m_errorMonitor));
+ if (!DeviceExtensionSupported(gpu(), nullptr, VK_EXT_SHADER_VIEWPORT_INDEX_LAYER_EXTENSION_NAME)) {
+ printf("%s Extension %s not supported, skipping this pass. \n", kSkipPrefix,
+ VK_EXT_SHADER_VIEWPORT_INDEX_LAYER_EXTENSION_NAME);
+ return;
+ }
+ m_device_extension_names.push_back(VK_EXT_SHADER_VIEWPORT_INDEX_LAYER_EXTENSION_NAME);
+ ASSERT_NO_FATAL_FAILURE(InitState());
+
+ // These tests require that the device support multiViewport
+ if (!m_device->phy().features().multiViewport) {
+ printf("%s Device does not support multiViewport, test skipped.\n", kSkipPrefix);
+ return;
+ }
+ ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
+
+ // Vertex shader using viewport array capability
+ char const *vsSource =
+ "#version 450\n"
+ "#extension GL_ARB_shader_viewport_layer_array : enable\n"
+ "void main() {\n"
+ " gl_ViewportIndex = 1;\n"
+ "}\n";
+
+ VkShaderObj vs(m_device, vsSource, VK_SHADER_STAGE_VERTEX_BIT, this);
+
+ VkPipelineObj pipe(m_device);
+ pipe.AddDefaultColorAttachment();
+ pipe.AddShader(&vs);
+
+ const VkPipelineLayoutObj pipe_layout(m_device, {});
+
+ m_errorMonitor->ExpectSuccess();
+ pipe.CreateVKPipeline(pipe_layout.handle(), renderPass());
+ m_errorMonitor->VerifyNotFound();
+}
+
+TEST_F(VkPositiveLayerTest, CreatePipelineCheckShaderCapabilityExtension2of2) {
+ // This is a positive test, no errors expected
+ // Verifies the ability to deal with a shader that declares a non-unique SPIRV capability ID
+ TEST_DESCRIPTION("Create a shader in which uses a non-unique capability ID extension, 2 of 2");
+
+ ASSERT_NO_FATAL_FAILURE(InitFramework(myDbgFunc, m_errorMonitor));
+ if (!DeviceExtensionSupported(gpu(), nullptr, VK_NV_VIEWPORT_ARRAY2_EXTENSION_NAME)) {
+ printf("%s Extension %s not supported, skipping this pass. \n", kSkipPrefix, VK_NV_VIEWPORT_ARRAY2_EXTENSION_NAME);
+ return;
+ }
+ m_device_extension_names.push_back(VK_NV_VIEWPORT_ARRAY2_EXTENSION_NAME);
+ ASSERT_NO_FATAL_FAILURE(InitState());
+
+ // These tests require that the device support multiViewport
+ if (!m_device->phy().features().multiViewport) {
+ printf("%s Device does not support multiViewport, test skipped.\n", kSkipPrefix);
+ return;
+ }
+ ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
+
+ // Vertex shader using viewport array capability
+ char const *vsSource =
+ "#version 450\n"
+ "#extension GL_ARB_shader_viewport_layer_array : enable\n"
+ "void main() {\n"
+ " gl_ViewportIndex = 1;\n"
+ "}\n";
+
+ VkShaderObj vs(m_device, vsSource, VK_SHADER_STAGE_VERTEX_BIT, this);
+
+ VkPipelineObj pipe(m_device);
+ pipe.AddDefaultColorAttachment();
+ pipe.AddShader(&vs);
+
+ const VkPipelineLayoutObj pipe_layout(m_device, {});
+
+ m_errorMonitor->ExpectSuccess();
+ pipe.CreateVKPipeline(pipe_layout.handle(), renderPass());
+ m_errorMonitor->VerifyNotFound();
+}
+
+TEST_F(VkLayerTest, CreatePipelineFragmentInputNotProvided) {
+ TEST_DESCRIPTION(
+ "Test that an error is produced for a fragment shader input which is not present in the outputs of the previous stage");
+
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "not written by vertex shader");
+
+ ASSERT_NO_FATAL_FAILURE(Init());
+ ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
+
+ char const *vsSource =
+ "#version 450\n"
+ "\n"
+ "void main(){\n"
+ " gl_Position = vec4(1);\n"
+ "}\n";
+ char const *fsSource =
+ "#version 450\n"
+ "\n"
+ "layout(location=0) in float x;\n"
+ "layout(location=0) out vec4 color;\n"
+ "void main(){\n"
+ " color = vec4(x);\n"
+ "}\n";
+
+ VkShaderObj vs(m_device, vsSource, VK_SHADER_STAGE_VERTEX_BIT, this);
+ VkShaderObj fs(m_device, fsSource, VK_SHADER_STAGE_FRAGMENT_BIT, this);
+
+ VkPipelineObj pipe(m_device);
+ pipe.AddDefaultColorAttachment();
+ pipe.AddShader(&vs);
+ pipe.AddShader(&fs);
+
+ VkDescriptorSetObj descriptorSet(m_device);
+ descriptorSet.AppendDummy();
+ descriptorSet.CreateVKDescriptorSet(m_commandBuffer);
+
+ pipe.CreateVKPipeline(descriptorSet.GetPipelineLayout(), renderPass());
+
+ m_errorMonitor->VerifyFound();
+}
+
+TEST_F(VkLayerTest, CreatePipelineFragmentInputNotProvidedInBlock) {
+ TEST_DESCRIPTION(
+ "Test that an error is produced for a fragment shader input within an interace block, which is not present in the outputs "
+ "of the previous stage.");
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "not written by vertex shader");
+
+ ASSERT_NO_FATAL_FAILURE(Init());
+ ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
+
+ char const *vsSource =
+ "#version 450\n"
+ "\n"
+ "void main(){\n"
+ " gl_Position = vec4(1);\n"
+ "}\n";
+ char const *fsSource =
+ "#version 450\n"
+ "\n"
+ "in block { layout(location=0) float x; } ins;\n"
+ "layout(location=0) out vec4 color;\n"
+ "void main(){\n"
+ " color = vec4(ins.x);\n"
+ "}\n";
+
+ VkShaderObj vs(m_device, vsSource, VK_SHADER_STAGE_VERTEX_BIT, this);
+ VkShaderObj fs(m_device, fsSource, VK_SHADER_STAGE_FRAGMENT_BIT, this);
+
+ VkPipelineObj pipe(m_device);
+ pipe.AddDefaultColorAttachment();
+ pipe.AddShader(&vs);
+ pipe.AddShader(&fs);
+
+ VkDescriptorSetObj descriptorSet(m_device);
+ descriptorSet.AppendDummy();
+ descriptorSet.CreateVKDescriptorSet(m_commandBuffer);
+
+ pipe.CreateVKPipeline(descriptorSet.GetPipelineLayout(), renderPass());
+
+ m_errorMonitor->VerifyFound();
+}
+
+TEST_F(VkLayerTest, CreatePipelineVsFsTypeMismatchArraySize) {
+ TEST_DESCRIPTION("Test that an error is produced for mismatched array sizes across the vertex->fragment shader interface");
+ m_errorMonitor->SetDesiredFailureMsg(
+ VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ "Type mismatch on location 0.0: 'ptr to output arr[2] of float32' vs 'ptr to input arr[1] of float32'");
+
+ ASSERT_NO_FATAL_FAILURE(Init());
+ ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
+
+ char const *vsSource =
+ "#version 450\n"
+ "\n"
+ "layout(location=0) out float x[2];\n"
+ "void main(){\n"
+ " x[0] = 0; x[1] = 0;\n"
+ " gl_Position = vec4(1);\n"
+ "}\n";
+ char const *fsSource =
+ "#version 450\n"
+ "\n"
+ "layout(location=0) in float x[1];\n"
+ "layout(location=0) out vec4 color;\n"
+ "void main(){\n"
+ " color = vec4(x[0]);\n"
+ "}\n";
+
+ VkShaderObj vs(m_device, vsSource, VK_SHADER_STAGE_VERTEX_BIT, this);
+ VkShaderObj fs(m_device, fsSource, VK_SHADER_STAGE_FRAGMENT_BIT, this);
+
+ VkPipelineObj pipe(m_device);
+ pipe.AddDefaultColorAttachment();
+ pipe.AddShader(&vs);
+ pipe.AddShader(&fs);
+
+ VkDescriptorSetObj descriptorSet(m_device);
+ descriptorSet.AppendDummy();
+ descriptorSet.CreateVKDescriptorSet(m_commandBuffer);
+
+ pipe.CreateVKPipeline(descriptorSet.GetPipelineLayout(), renderPass());
+
+ m_errorMonitor->VerifyFound();
+}
+
+TEST_F(VkLayerTest, CreatePipelineVsFsTypeMismatch) {
+ TEST_DESCRIPTION("Test that an error is produced for mismatched types across the vertex->fragment shader interface");
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "Type mismatch on location 0");
+
+ ASSERT_NO_FATAL_FAILURE(Init());
+ ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
+
+ char const *vsSource =
+ "#version 450\n"
+ "\n"
+ "layout(location=0) out int x;\n"
+ "void main(){\n"
+ " x = 0;\n"
+ " gl_Position = vec4(1);\n"
+ "}\n";
+ char const *fsSource =
+ "#version 450\n"
+ "\n"
+ "layout(location=0) in float x;\n" /* VS writes int */
+ "layout(location=0) out vec4 color;\n"
+ "void main(){\n"
+ " color = vec4(x);\n"
+ "}\n";
+
+ VkShaderObj vs(m_device, vsSource, VK_SHADER_STAGE_VERTEX_BIT, this);
+ VkShaderObj fs(m_device, fsSource, VK_SHADER_STAGE_FRAGMENT_BIT, this);
+
+ VkPipelineObj pipe(m_device);
+ pipe.AddDefaultColorAttachment();
+ pipe.AddShader(&vs);
+ pipe.AddShader(&fs);
+
+ VkDescriptorSetObj descriptorSet(m_device);
+ descriptorSet.AppendDummy();
+ descriptorSet.CreateVKDescriptorSet(m_commandBuffer);
+
+ pipe.CreateVKPipeline(descriptorSet.GetPipelineLayout(), renderPass());
+
+ m_errorMonitor->VerifyFound();
+}
+
+TEST_F(VkLayerTest, CreatePipelineVsFsTypeMismatchInBlock) {
+ TEST_DESCRIPTION(
+ "Test that an error is produced for mismatched types across the vertex->fragment shader interface, when the variable is "
+ "contained within an interface block");
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "Type mismatch on location 0");
+
+ ASSERT_NO_FATAL_FAILURE(Init());
+ ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
+
+ char const *vsSource =
+ "#version 450\n"
+ "\n"
+ "out block { layout(location=0) int x; } outs;\n"
+ "void main(){\n"
+ " outs.x = 0;\n"
+ " gl_Position = vec4(1);\n"
+ "}\n";
+ char const *fsSource =
+ "#version 450\n"
+ "\n"
+ "in block { layout(location=0) float x; } ins;\n" /* VS writes int */
+ "layout(location=0) out vec4 color;\n"
+ "void main(){\n"
+ " color = vec4(ins.x);\n"
+ "}\n";
+
+ VkShaderObj vs(m_device, vsSource, VK_SHADER_STAGE_VERTEX_BIT, this);
+ VkShaderObj fs(m_device, fsSource, VK_SHADER_STAGE_FRAGMENT_BIT, this);
+
+ VkPipelineObj pipe(m_device);
+ pipe.AddDefaultColorAttachment();
+ pipe.AddShader(&vs);
+ pipe.AddShader(&fs);
+
+ VkDescriptorSetObj descriptorSet(m_device);
+ descriptorSet.AppendDummy();
+ descriptorSet.CreateVKDescriptorSet(m_commandBuffer);
+
+ pipe.CreateVKPipeline(descriptorSet.GetPipelineLayout(), renderPass());
+
+ m_errorMonitor->VerifyFound();
+}
+
+TEST_F(VkLayerTest, CreatePipelineVsFsMismatchByLocation) {
+ TEST_DESCRIPTION(
+ "Test that an error is produced for location mismatches across the vertex->fragment shader interface; This should manifest "
+ "as a not-written/not-consumed pair, but flushes out broken walking of the interfaces");
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "location 0.0 which is not written by vertex shader");
+
+ ASSERT_NO_FATAL_FAILURE(Init());
+ ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
+
+ char const *vsSource =
+ "#version 450\n"
+ "\n"
+ "out block { layout(location=1) float x; } outs;\n"
+ "void main(){\n"
+ " outs.x = 0;\n"
+ " gl_Position = vec4(1);\n"
+ "}\n";
+ char const *fsSource =
+ "#version 450\n"
+ "\n"
+ "in block { layout(location=0) float x; } ins;\n"
+ "layout(location=0) out vec4 color;\n"
+ "void main(){\n"
+ " color = vec4(ins.x);\n"
+ "}\n";
+
+ VkShaderObj vs(m_device, vsSource, VK_SHADER_STAGE_VERTEX_BIT, this);
+ VkShaderObj fs(m_device, fsSource, VK_SHADER_STAGE_FRAGMENT_BIT, this);
+
+ VkPipelineObj pipe(m_device);
+ pipe.AddDefaultColorAttachment();
+ pipe.AddShader(&vs);
+ pipe.AddShader(&fs);
+
+ VkDescriptorSetObj descriptorSet(m_device);
+ descriptorSet.AppendDummy();
+ descriptorSet.CreateVKDescriptorSet(m_commandBuffer);
+
+ pipe.CreateVKPipeline(descriptorSet.GetPipelineLayout(), renderPass());
+
+ m_errorMonitor->VerifyFound();
+}
+
+TEST_F(VkLayerTest, CreatePipelineVsFsMismatchByComponent) {
+ TEST_DESCRIPTION(
+ "Test that an error is produced for component mismatches across the vertex->fragment shader interface. It's not enough to "
+ "have the same set of locations in use; matching is defined in terms of spirv variables.");
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "location 0.1 which is not written by vertex shader");
+
+ ASSERT_NO_FATAL_FAILURE(Init());
+ ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
+
+ char const *vsSource =
+ "#version 450\n"
+ "\n"
+ "out block { layout(location=0, component=0) float x; } outs;\n"
+ "void main(){\n"
+ " outs.x = 0;\n"
+ " gl_Position = vec4(1);\n"
+ "}\n";
+ char const *fsSource =
+ "#version 450\n"
+ "\n"
+ "in block { layout(location=0, component=1) float x; } ins;\n"
+ "layout(location=0) out vec4 color;\n"
+ "void main(){\n"
+ " color = vec4(ins.x);\n"
+ "}\n";
+
+ VkShaderObj vs(m_device, vsSource, VK_SHADER_STAGE_VERTEX_BIT, this);
+ VkShaderObj fs(m_device, fsSource, VK_SHADER_STAGE_FRAGMENT_BIT, this);
+
+ VkPipelineObj pipe(m_device);
+ pipe.AddDefaultColorAttachment();
+ pipe.AddShader(&vs);
+ pipe.AddShader(&fs);
+
+ VkDescriptorSetObj descriptorSet(m_device);
+ descriptorSet.AppendDummy();
+ descriptorSet.CreateVKDescriptorSet(m_commandBuffer);
+
+ pipe.CreateVKPipeline(descriptorSet.GetPipelineLayout(), renderPass());
+
+ m_errorMonitor->VerifyFound();
+}
+
+TEST_F(VkLayerTest, CreatePipelineVsFsMismatchByPrecision) {
+ TEST_DESCRIPTION("Test that the RelaxedPrecision decoration is validated to match");
+
+ ASSERT_NO_FATAL_FAILURE(Init());
+ ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
+
+ char const *vsSource =
+ "#version 450\n"
+ "layout(location=0) out mediump float x;\n"
+ "void main() { gl_Position = vec4(0); x = 1.0; }\n";
+ char const *fsSource =
+ "#version 450\n"
+ "layout(location=0) in highp float x;\n"
+ "layout(location=0) out vec4 color;\n"
+ "void main() { color = vec4(x); }\n";
+
+ VkShaderObj vs(m_device, vsSource, VK_SHADER_STAGE_VERTEX_BIT, this);
+ VkShaderObj fs(m_device, fsSource, VK_SHADER_STAGE_FRAGMENT_BIT, this);
+
+ VkPipelineObj pipe(m_device);
+ pipe.AddDefaultColorAttachment();
+ pipe.AddShader(&vs);
+ pipe.AddShader(&fs);
+
+ VkDescriptorSetObj descriptorSet(m_device);
+ descriptorSet.AppendDummy();
+ descriptorSet.CreateVKDescriptorSet(m_commandBuffer);
+
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "differ in precision");
+
+ pipe.CreateVKPipeline(descriptorSet.GetPipelineLayout(), renderPass());
+
+ m_errorMonitor->VerifyFound();
+}
+
+TEST_F(VkLayerTest, CreatePipelineVsFsMismatchByPrecisionBlock) {
+ TEST_DESCRIPTION("Test that the RelaxedPrecision decoration is validated to match");
+
+ ASSERT_NO_FATAL_FAILURE(Init());
+ ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
+
+ char const *vsSource =
+ "#version 450\n"
+ "out block { layout(location=0) mediump float x; };\n"
+ "void main() { gl_Position = vec4(0); x = 1.0; }\n";
+ char const *fsSource =
+ "#version 450\n"
+ "in block { layout(location=0) highp float x; };\n"
+ "layout(location=0) out vec4 color;\n"
+ "void main() { color = vec4(x); }\n";
+
+ VkShaderObj vs(m_device, vsSource, VK_SHADER_STAGE_VERTEX_BIT, this);
+ VkShaderObj fs(m_device, fsSource, VK_SHADER_STAGE_FRAGMENT_BIT, this);
+
+ VkPipelineObj pipe(m_device);
+ pipe.AddDefaultColorAttachment();
+ pipe.AddShader(&vs);
+ pipe.AddShader(&fs);
+
+ VkDescriptorSetObj descriptorSet(m_device);
+ descriptorSet.AppendDummy();
+ descriptorSet.CreateVKDescriptorSet(m_commandBuffer);
+
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "differ in precision");
+
+ pipe.CreateVKPipeline(descriptorSet.GetPipelineLayout(), renderPass());
+
+ m_errorMonitor->VerifyFound();
+}
+
+TEST_F(VkLayerTest, CreatePipelineAttribNotConsumed) {
+ TEST_DESCRIPTION("Test that a warning is produced for a vertex attribute which is not consumed by the vertex shader");
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_PERFORMANCE_WARNING_BIT_EXT, "location 0 not consumed by vertex shader");
+
+ ASSERT_NO_FATAL_FAILURE(Init());
+ ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
+
+ VkVertexInputBindingDescription input_binding;
+ memset(&input_binding, 0, sizeof(input_binding));
+
+ VkVertexInputAttributeDescription input_attrib;
+ memset(&input_attrib, 0, sizeof(input_attrib));
+ input_attrib.format = VK_FORMAT_R32_SFLOAT;
+
+ char const *vsSource =
+ "#version 450\n"
+ "\n"
+ "void main(){\n"
+ " gl_Position = vec4(1);\n"
+ "}\n";
+ char const *fsSource =
+ "#version 450\n"
+ "\n"
+ "layout(location=0) out vec4 color;\n"
+ "void main(){\n"
+ " color = vec4(1);\n"
+ "}\n";
+
+ VkShaderObj vs(m_device, vsSource, VK_SHADER_STAGE_VERTEX_BIT, this);
+ VkShaderObj fs(m_device, fsSource, VK_SHADER_STAGE_FRAGMENT_BIT, this);
+
+ VkPipelineObj pipe(m_device);
+ pipe.AddDefaultColorAttachment();
+ pipe.AddShader(&vs);
+ pipe.AddShader(&fs);
+
+ pipe.AddVertexInputBindings(&input_binding, 1);
+ pipe.AddVertexInputAttribs(&input_attrib, 1);
+
+ VkDescriptorSetObj descriptorSet(m_device);
+ descriptorSet.AppendDummy();
+ descriptorSet.CreateVKDescriptorSet(m_commandBuffer);
+
+ pipe.CreateVKPipeline(descriptorSet.GetPipelineLayout(), renderPass());
+
+ m_errorMonitor->VerifyFound();
+}
+
+TEST_F(VkLayerTest, CreatePipelineAttribLocationMismatch) {
+ TEST_DESCRIPTION(
+ "Test that a warning is produced for a location mismatch on vertex attributes. This flushes out bad behavior in the "
+ "interface walker");
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_PERFORMANCE_WARNING_BIT_EXT, "location 0 not consumed by vertex shader");
+
+ ASSERT_NO_FATAL_FAILURE(Init());
+ ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
+
+ VkVertexInputBindingDescription input_binding;
+ memset(&input_binding, 0, sizeof(input_binding));
+
+ VkVertexInputAttributeDescription input_attrib;
+ memset(&input_attrib, 0, sizeof(input_attrib));
+ input_attrib.format = VK_FORMAT_R32_SFLOAT;
+
+ char const *vsSource =
+ "#version 450\n"
+ "\n"
+ "layout(location=1) in float x;\n"
+ "void main(){\n"
+ " gl_Position = vec4(x);\n"
+ "}\n";
+ char const *fsSource =
+ "#version 450\n"
+ "\n"
+ "layout(location=0) out vec4 color;\n"
+ "void main(){\n"
+ " color = vec4(1);\n"
+ "}\n";
+
+ VkShaderObj vs(m_device, vsSource, VK_SHADER_STAGE_VERTEX_BIT, this);
+ VkShaderObj fs(m_device, fsSource, VK_SHADER_STAGE_FRAGMENT_BIT, this);
+
+ VkPipelineObj pipe(m_device);
+ pipe.AddDefaultColorAttachment();
+ pipe.AddShader(&vs);
+ pipe.AddShader(&fs);
+
+ pipe.AddVertexInputBindings(&input_binding, 1);
+ pipe.AddVertexInputAttribs(&input_attrib, 1);
+
+ VkDescriptorSetObj descriptorSet(m_device);
+ descriptorSet.AppendDummy();
+ descriptorSet.CreateVKDescriptorSet(m_commandBuffer);
+
+ m_errorMonitor->SetUnexpectedError("Vertex shader consumes input at location 1 but not provided");
+ pipe.CreateVKPipeline(descriptorSet.GetPipelineLayout(), renderPass());
+
+ m_errorMonitor->VerifyFound();
+}
+
+TEST_F(VkLayerTest, CreatePipelineAttribNotProvided) {
+ TEST_DESCRIPTION("Test that an error is produced for a vertex shader input which is not provided by a vertex attribute");
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ "Vertex shader consumes input at location 0 but not provided");
+
+ ASSERT_NO_FATAL_FAILURE(Init());
+ ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
+
+ char const *vsSource =
+ "#version 450\n"
+ "\n"
+ "layout(location=0) in vec4 x;\n" /* not provided */
+ "void main(){\n"
+ " gl_Position = x;\n"
+ "}\n";
+ char const *fsSource =
+ "#version 450\n"
+ "\n"
+ "layout(location=0) out vec4 color;\n"
+ "void main(){\n"
+ " color = vec4(1);\n"
+ "}\n";
+
+ VkShaderObj vs(m_device, vsSource, VK_SHADER_STAGE_VERTEX_BIT, this);
+ VkShaderObj fs(m_device, fsSource, VK_SHADER_STAGE_FRAGMENT_BIT, this);
+
+ VkPipelineObj pipe(m_device);
+ pipe.AddDefaultColorAttachment();
+ pipe.AddShader(&vs);
+ pipe.AddShader(&fs);
+
+ VkDescriptorSetObj descriptorSet(m_device);
+ descriptorSet.AppendDummy();
+ descriptorSet.CreateVKDescriptorSet(m_commandBuffer);
+
+ pipe.CreateVKPipeline(descriptorSet.GetPipelineLayout(), renderPass());
+
+ m_errorMonitor->VerifyFound();
+}
+
+TEST_F(VkLayerTest, CreatePipelineAttribTypeMismatch) {
+ TEST_DESCRIPTION(
+ "Test that an error is produced for a mismatch between the fundamental type (float/int/uint) of an attribute and the "
+ "vertex shader input that consumes it");
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "location 0 does not match vertex shader input type");
+
+ ASSERT_NO_FATAL_FAILURE(Init());
+ ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
+
+ VkVertexInputBindingDescription input_binding;
+ memset(&input_binding, 0, sizeof(input_binding));
+
+ VkVertexInputAttributeDescription input_attrib;
+ memset(&input_attrib, 0, sizeof(input_attrib));
+ input_attrib.format = VK_FORMAT_R32_SFLOAT;
+
+ char const *vsSource =
+ "#version 450\n"
+ "\n"
+ "layout(location=0) in int x;\n" /* attrib provided float */
+ "void main(){\n"
+ " gl_Position = vec4(x);\n"
+ "}\n";
+ char const *fsSource =
+ "#version 450\n"
+ "\n"
+ "layout(location=0) out vec4 color;\n"
+ "void main(){\n"
+ " color = vec4(1);\n"
+ "}\n";
+
+ VkShaderObj vs(m_device, vsSource, VK_SHADER_STAGE_VERTEX_BIT, this);
+ VkShaderObj fs(m_device, fsSource, VK_SHADER_STAGE_FRAGMENT_BIT, this);
+
+ VkPipelineObj pipe(m_device);
+ pipe.AddDefaultColorAttachment();
+ pipe.AddShader(&vs);
+ pipe.AddShader(&fs);
+
+ pipe.AddVertexInputBindings(&input_binding, 1);
+ pipe.AddVertexInputAttribs(&input_attrib, 1);
+
+ VkDescriptorSetObj descriptorSet(m_device);
+ descriptorSet.AppendDummy();
+ descriptorSet.CreateVKDescriptorSet(m_commandBuffer);
+
+ pipe.CreateVKPipeline(descriptorSet.GetPipelineLayout(), renderPass());
+
+ m_errorMonitor->VerifyFound();
+}
+
+TEST_F(VkLayerTest, CreatePipelineDuplicateStage) {
+ TEST_DESCRIPTION("Test that an error is produced for a pipeline containing multiple shaders for the same stage");
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ "Multiple shaders provided for stage VK_SHADER_STAGE_VERTEX_BIT");
+
+ ASSERT_NO_FATAL_FAILURE(Init());
+ ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
+
+ char const *vsSource =
+ "#version 450\n"
+ "\n"
+ "void main(){\n"
+ " gl_Position = vec4(1);\n"
+ "}\n";
+ char const *fsSource =
+ "#version 450\n"
+ "\n"
+ "layout(location=0) out vec4 color;\n"
+ "void main(){\n"
+ " color = vec4(1);\n"
+ "}\n";
+
+ VkShaderObj vs(m_device, vsSource, VK_SHADER_STAGE_VERTEX_BIT, this);
+ VkShaderObj fs(m_device, fsSource, VK_SHADER_STAGE_FRAGMENT_BIT, this);
+
+ VkPipelineObj pipe(m_device);
+ pipe.AddDefaultColorAttachment();
+ pipe.AddShader(&vs);
+ pipe.AddShader(&vs); // intentionally duplicate vertex shader attachment
+ pipe.AddShader(&fs);
+
+ VkDescriptorSetObj descriptorSet(m_device);
+ descriptorSet.AppendDummy();
+ descriptorSet.CreateVKDescriptorSet(m_commandBuffer);
+
+ pipe.CreateVKPipeline(descriptorSet.GetPipelineLayout(), renderPass());
+
+ m_errorMonitor->VerifyFound();
+}
+
+TEST_F(VkLayerTest, CreatePipelineMissingEntrypoint) {
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "No entrypoint found named `foo`");
+
+ ASSERT_NO_FATAL_FAILURE(Init());
+ ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
+
+ char const *vsSource =
+ "#version 450\n"
+ "void main(){\n"
+ " gl_Position = vec4(0);\n"
+ "}\n";
+ char const *fsSource =
+ "#version 450\n"
+ "\n"
+ "layout(location=0) out vec4 color;\n"
+ "void main(){\n"
+ " color = vec4(1);\n"
+ "}\n";
+
+ VkShaderObj vs(m_device, vsSource, VK_SHADER_STAGE_VERTEX_BIT, this);
+ VkShaderObj fs(m_device, fsSource, VK_SHADER_STAGE_FRAGMENT_BIT, this, "foo");
+
+ VkPipelineObj pipe(m_device);
+ pipe.AddDefaultColorAttachment();
+ pipe.AddShader(&vs);
+ pipe.AddShader(&fs);
+
+ VkDescriptorSetObj descriptorSet(m_device);
+ descriptorSet.AppendDummy();
+ descriptorSet.CreateVKDescriptorSet(m_commandBuffer);
+
+ pipe.CreateVKPipeline(descriptorSet.GetPipelineLayout(), renderPass());
+
+ m_errorMonitor->VerifyFound();
+}
+
+TEST_F(VkLayerTest, CreatePipelineDepthStencilRequired) {
+ m_errorMonitor->SetDesiredFailureMsg(
+ VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ "pDepthStencilState is NULL when rasterization is enabled and subpass uses a depth/stencil attachment");
+
+ ASSERT_NO_FATAL_FAILURE(Init());
+ ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
+
+ char const *vsSource =
+ "#version 450\n"
+ "void main(){ gl_Position = vec4(0); }\n";
+ char const *fsSource =
+ "#version 450\n"
+ "\n"
+ "layout(location=0) out vec4 color;\n"
+ "void main(){\n"
+ " color = vec4(1);\n"
+ "}\n";
+
+ VkShaderObj vs(m_device, vsSource, VK_SHADER_STAGE_VERTEX_BIT, this);
+ VkShaderObj fs(m_device, fsSource, VK_SHADER_STAGE_FRAGMENT_BIT, this);
+
+ VkPipelineObj pipe(m_device);
+ pipe.AddDefaultColorAttachment();
+ pipe.AddShader(&vs);
+ pipe.AddShader(&fs);
+
+ VkDescriptorSetObj descriptorSet(m_device);
+ descriptorSet.AppendDummy();
+ descriptorSet.CreateVKDescriptorSet(m_commandBuffer);
+
+ VkAttachmentDescription attachments[] = {
+ {
+ 0,
+ VK_FORMAT_B8G8R8A8_UNORM,
+ VK_SAMPLE_COUNT_1_BIT,
+ VK_ATTACHMENT_LOAD_OP_DONT_CARE,
+ VK_ATTACHMENT_STORE_OP_DONT_CARE,
+ VK_ATTACHMENT_LOAD_OP_DONT_CARE,
+ VK_ATTACHMENT_STORE_OP_DONT_CARE,
+ VK_IMAGE_LAYOUT_UNDEFINED,
+ VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL,
+ },
+ {
+ 0,
+ VK_FORMAT_D16_UNORM,
+ VK_SAMPLE_COUNT_1_BIT,
+ VK_ATTACHMENT_LOAD_OP_DONT_CARE,
+ VK_ATTACHMENT_STORE_OP_DONT_CARE,
+ VK_ATTACHMENT_LOAD_OP_DONT_CARE,
+ VK_ATTACHMENT_STORE_OP_DONT_CARE,
+ VK_IMAGE_LAYOUT_UNDEFINED,
+ VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL,
+ },
+ };
+ VkAttachmentReference refs[] = {
+ {0, VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL},
+ {1, VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL},
+ };
+ VkSubpassDescription subpass = {0, VK_PIPELINE_BIND_POINT_GRAPHICS, 0, nullptr, 1, &refs[0], nullptr, &refs[1], 0, nullptr};
+ VkRenderPassCreateInfo rpci = {VK_STRUCTURE_TYPE_RENDER_PASS_CREATE_INFO, nullptr, 0, 2, attachments, 1, &subpass, 0, nullptr};
+ VkRenderPass rp;
+ VkResult err = vkCreateRenderPass(m_device->device(), &rpci, nullptr, &rp);
+ ASSERT_VK_SUCCESS(err);
+
+ pipe.CreateVKPipeline(descriptorSet.GetPipelineLayout(), rp);
+
+ m_errorMonitor->VerifyFound();
+
+ vkDestroyRenderPass(m_device->device(), rp, nullptr);
+}
+
+TEST_F(VkLayerTest, CreatePipelineTessPatchDecorationMismatch) {
+ TEST_DESCRIPTION(
+ "Test that an error is produced for a variable output from the TCS without the patch decoration, but consumed in the TES "
+ "with the decoration.");
+ m_errorMonitor->SetDesiredFailureMsg(
+ VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ "is per-vertex in tessellation control shader stage but per-patch in tessellation evaluation shader stage");
+
+ ASSERT_NO_FATAL_FAILURE(Init());
+ ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
+
+ if (!m_device->phy().features().tessellationShader) {
+ printf("%s Device does not support tessellation shaders; skipped.\n", kSkipPrefix);
+ return;
+ }
+
+ char const *vsSource =
+ "#version 450\n"
+ "void main(){}\n";
+ char const *tcsSource =
+ "#version 450\n"
+ "layout(location=0) out int x[];\n"
+ "layout(vertices=3) out;\n"
+ "void main(){\n"
+ " gl_TessLevelOuter[0] = gl_TessLevelOuter[1] = gl_TessLevelOuter[2] = 1;\n"
+ " gl_TessLevelInner[0] = 1;\n"
+ " x[gl_InvocationID] = gl_InvocationID;\n"
+ "}\n";
+ char const *tesSource =
+ "#version 450\n"
+ "layout(triangles, equal_spacing, cw) in;\n"
+ "layout(location=0) patch in int x;\n"
+ "void main(){\n"
+ " gl_Position.xyz = gl_TessCoord;\n"
+ " gl_Position.w = x;\n"
+ "}\n";
+ char const *fsSource =
+ "#version 450\n"
+ "layout(location=0) out vec4 color;\n"
+ "void main(){\n"
+ " color = vec4(1);\n"
+ "}\n";
+
+ VkShaderObj vs(m_device, vsSource, VK_SHADER_STAGE_VERTEX_BIT, this);
+ VkShaderObj tcs(m_device, tcsSource, VK_SHADER_STAGE_TESSELLATION_CONTROL_BIT, this);
+ VkShaderObj tes(m_device, tesSource, VK_SHADER_STAGE_TESSELLATION_EVALUATION_BIT, this);
+ VkShaderObj fs(m_device, fsSource, VK_SHADER_STAGE_FRAGMENT_BIT, this);
+
+ VkPipelineInputAssemblyStateCreateInfo iasci{VK_STRUCTURE_TYPE_PIPELINE_INPUT_ASSEMBLY_STATE_CREATE_INFO, nullptr, 0,
+ VK_PRIMITIVE_TOPOLOGY_PATCH_LIST, VK_FALSE};
+
+ VkPipelineTessellationStateCreateInfo tsci{VK_STRUCTURE_TYPE_PIPELINE_TESSELLATION_STATE_CREATE_INFO, nullptr, 0, 3};
+
+ VkPipelineObj pipe(m_device);
+ pipe.SetInputAssembly(&iasci);
+ pipe.SetTessellation(&tsci);
+ pipe.AddDefaultColorAttachment();
+ pipe.AddShader(&vs);
+ pipe.AddShader(&tcs);
+ pipe.AddShader(&tes);
+ pipe.AddShader(&fs);
+
+ VkDescriptorSetObj descriptorSet(m_device);
+ descriptorSet.AppendDummy();
+ descriptorSet.CreateVKDescriptorSet(m_commandBuffer);
+
+ pipe.CreateVKPipeline(descriptorSet.GetPipelineLayout(), renderPass());
+
+ m_errorMonitor->VerifyFound();
+}
+
+TEST_F(VkLayerTest, CreatePipelineTessErrors) {
+ TEST_DESCRIPTION("Test various errors when creating a graphics pipeline with tessellation stages active.");
+
+ ASSERT_NO_FATAL_FAILURE(Init());
+ ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
+
+ if (!m_device->phy().features().tessellationShader) {
+ printf("%s Device does not support tessellation shaders; skipped.\n", kSkipPrefix);
+ return;
+ }
+
+ char const *vsSource =
+ "#version 450\n"
+ "void main(){}\n";
+ char const *tcsSource =
+ "#version 450\n"
+ "layout(vertices=3) out;\n"
+ "void main(){\n"
+ " gl_TessLevelOuter[0] = gl_TessLevelOuter[1] = gl_TessLevelOuter[2] = 1;\n"
+ " gl_TessLevelInner[0] = 1;\n"
+ "}\n";
+ char const *tesSource =
+ "#version 450\n"
+ "layout(triangles, equal_spacing, cw) in;\n"
+ "void main(){\n"
+ " gl_Position.xyz = gl_TessCoord;\n"
+ " gl_Position.w = 0;\n"
+ "}\n";
+ char const *fsSource =
+ "#version 450\n"
+ "layout(location=0) out vec4 color;\n"
+ "void main(){\n"
+ " color = vec4(1);\n"
+ "}\n";
+
+ VkShaderObj vs(m_device, vsSource, VK_SHADER_STAGE_VERTEX_BIT, this);
+ VkShaderObj tcs(m_device, tcsSource, VK_SHADER_STAGE_TESSELLATION_CONTROL_BIT, this);
+ VkShaderObj tes(m_device, tesSource, VK_SHADER_STAGE_TESSELLATION_EVALUATION_BIT, this);
+ VkShaderObj fs(m_device, fsSource, VK_SHADER_STAGE_FRAGMENT_BIT, this);
+
+ VkPipelineInputAssemblyStateCreateInfo iasci{VK_STRUCTURE_TYPE_PIPELINE_INPUT_ASSEMBLY_STATE_CREATE_INFO, nullptr, 0,
+ VK_PRIMITIVE_TOPOLOGY_PATCH_LIST, VK_FALSE};
+
+ VkPipelineTessellationStateCreateInfo tsci{VK_STRUCTURE_TYPE_PIPELINE_TESSELLATION_STATE_CREATE_INFO, nullptr, 0, 3};
+
+ VkDescriptorSetObj descriptorSet(m_device);
+ descriptorSet.AppendDummy();
+ descriptorSet.CreateVKDescriptorSet(m_commandBuffer);
+
+ {
+ VkPipelineObj pipe(m_device);
+ VkPipelineInputAssemblyStateCreateInfo iasci_bad = iasci;
+ iasci_bad.topology = VK_PRIMITIVE_TOPOLOGY_TRIANGLE_LIST; // otherwise we get a failure about invalid topology
+ pipe.SetInputAssembly(&iasci_bad);
+ pipe.AddDefaultColorAttachment();
+ pipe.AddShader(&vs);
+ pipe.AddShader(&fs);
+
+ // Pass a tess control shader without a tess eval shader
+ pipe.AddShader(&tcs);
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkGraphicsPipelineCreateInfo-pStages-00729");
+ pipe.CreateVKPipeline(descriptorSet.GetPipelineLayout(), renderPass());
+ m_errorMonitor->VerifyFound();
+ }
+
+ {
+ VkPipelineObj pipe(m_device);
+ VkPipelineInputAssemblyStateCreateInfo iasci_bad = iasci;
+ iasci_bad.topology = VK_PRIMITIVE_TOPOLOGY_TRIANGLE_LIST; // otherwise we get a failure about invalid topology
+ pipe.SetInputAssembly(&iasci_bad);
+ pipe.AddDefaultColorAttachment();
+ pipe.AddShader(&vs);
+ pipe.AddShader(&fs);
+
+ // Pass a tess eval shader without a tess control shader
+ pipe.AddShader(&tes);
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkGraphicsPipelineCreateInfo-pStages-00730");
+ pipe.CreateVKPipeline(descriptorSet.GetPipelineLayout(), renderPass());
+ m_errorMonitor->VerifyFound();
+ }
+
+ {
+ VkPipelineObj pipe(m_device);
+ pipe.SetInputAssembly(&iasci);
+ pipe.AddDefaultColorAttachment();
+ pipe.AddShader(&vs);
+ pipe.AddShader(&fs);
+
+ // Pass patch topology without tessellation shaders
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkGraphicsPipelineCreateInfo-topology-00737");
+ pipe.CreateVKPipeline(descriptorSet.GetPipelineLayout(), renderPass());
+ m_errorMonitor->VerifyFound();
+
+ pipe.AddShader(&tcs);
+ pipe.AddShader(&tes);
+ // Pass a NULL pTessellationState (with active tessellation shader stages)
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkGraphicsPipelineCreateInfo-pStages-00731");
+ pipe.CreateVKPipeline(descriptorSet.GetPipelineLayout(), renderPass());
+ m_errorMonitor->VerifyFound();
+
+ // Pass an invalid pTessellationState (bad sType)
+ VkPipelineTessellationStateCreateInfo tsci_bad = tsci;
+ tsci_bad.sType = VK_STRUCTURE_TYPE_SUBMIT_INFO;
+ pipe.SetTessellation(&tsci_bad);
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ "VUID-VkPipelineTessellationStateCreateInfo-sType-sType");
+ pipe.CreateVKPipeline(descriptorSet.GetPipelineLayout(), renderPass());
+ m_errorMonitor->VerifyFound();
+ // Pass out-of-range patchControlPoints
+ tsci_bad = tsci;
+ tsci_bad.patchControlPoints = 0;
+ pipe.SetTessellation(&tsci);
+ pipe.SetTessellation(&tsci_bad);
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ "VUID-VkPipelineTessellationStateCreateInfo-patchControlPoints-01214");
+ pipe.CreateVKPipeline(descriptorSet.GetPipelineLayout(), renderPass());
+ m_errorMonitor->VerifyFound();
+ tsci_bad.patchControlPoints = m_device->props.limits.maxTessellationPatchSize + 1;
+ pipe.SetTessellation(&tsci_bad);
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ "VUID-VkPipelineTessellationStateCreateInfo-patchControlPoints-01214");
+ pipe.CreateVKPipeline(descriptorSet.GetPipelineLayout(), renderPass());
+ m_errorMonitor->VerifyFound();
+ pipe.SetTessellation(&tsci);
+
+ // Pass an invalid primitive topology
+ VkPipelineInputAssemblyStateCreateInfo iasci_bad = iasci;
+ iasci_bad.topology = VK_PRIMITIVE_TOPOLOGY_TRIANGLE_LIST;
+ pipe.SetInputAssembly(&iasci_bad);
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkGraphicsPipelineCreateInfo-pStages-00736");
+ pipe.CreateVKPipeline(descriptorSet.GetPipelineLayout(), renderPass());
+ m_errorMonitor->VerifyFound();
+ pipe.SetInputAssembly(&iasci);
+ }
+}
+
+TEST_F(VkLayerTest, CreatePipelineAttribBindingConflict) {
+ TEST_DESCRIPTION(
+ "Test that an error is produced for a vertex attribute setup where multiple bindings provide the same location");
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ "Duplicate vertex input binding descriptions for binding 0");
+
+ ASSERT_NO_FATAL_FAILURE(Init());
+ ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
+
+ /* Two binding descriptions for binding 0 */
+ VkVertexInputBindingDescription input_bindings[2];
+ memset(input_bindings, 0, sizeof(input_bindings));
+
+ VkVertexInputAttributeDescription input_attrib;
+ memset(&input_attrib, 0, sizeof(input_attrib));
+ input_attrib.format = VK_FORMAT_R32_SFLOAT;
+
+ char const *vsSource =
+ "#version 450\n"
+ "\n"
+ "layout(location=0) in float x;\n" /* attrib provided float */
+ "void main(){\n"
+ " gl_Position = vec4(x);\n"
+ "}\n";
+ char const *fsSource =
+ "#version 450\n"
+ "\n"
+ "layout(location=0) out vec4 color;\n"
+ "void main(){\n"
+ " color = vec4(1);\n"
+ "}\n";
+
+ VkShaderObj vs(m_device, vsSource, VK_SHADER_STAGE_VERTEX_BIT, this);
+ VkShaderObj fs(m_device, fsSource, VK_SHADER_STAGE_FRAGMENT_BIT, this);
+
+ VkPipelineObj pipe(m_device);
+ pipe.AddDefaultColorAttachment();
+ pipe.AddShader(&vs);
+ pipe.AddShader(&fs);
+
+ pipe.AddVertexInputBindings(input_bindings, 2);
+ pipe.AddVertexInputAttribs(&input_attrib, 1);
+
+ VkDescriptorSetObj descriptorSet(m_device);
+ descriptorSet.AppendDummy();
+ descriptorSet.CreateVKDescriptorSet(m_commandBuffer);
+
+ m_errorMonitor->SetUnexpectedError("VUID-VkPipelineVertexInputStateCreateInfo-pVertexBindingDescriptions-00616 ");
+ pipe.CreateVKPipeline(descriptorSet.GetPipelineLayout(), renderPass());
+
+ m_errorMonitor->VerifyFound();
+}
+
+TEST_F(VkLayerTest, CreatePipelineFragmentOutputNotWritten) {
+ TEST_DESCRIPTION(
+ "Test that an error is produced for a fragment shader which does not provide an output for one of the pipeline's color "
+ "attachments");
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_WARNING_BIT_EXT, "Attachment 0 not written by fragment shader");
+
+ ASSERT_NO_FATAL_FAILURE(Init());
+
+ char const *vsSource =
+ "#version 450\n"
+ "\n"
+ "void main(){\n"
+ " gl_Position = vec4(1);\n"
+ "}\n";
+ char const *fsSource =
+ "#version 450\n"
+ "\n"
+ "void main(){\n"
+ "}\n";
+
+ VkShaderObj vs(m_device, vsSource, VK_SHADER_STAGE_VERTEX_BIT, this);
+ VkShaderObj fs(m_device, fsSource, VK_SHADER_STAGE_FRAGMENT_BIT, this);
+
+ VkPipelineObj pipe(m_device);
+ pipe.AddShader(&vs);
+ pipe.AddShader(&fs);
+
+ /* set up CB 0, not written */
+ pipe.AddDefaultColorAttachment();
+ ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
+
+ VkDescriptorSetObj descriptorSet(m_device);
+ descriptorSet.AppendDummy();
+ descriptorSet.CreateVKDescriptorSet(m_commandBuffer);
+
+ pipe.CreateVKPipeline(descriptorSet.GetPipelineLayout(), renderPass());
+
+ m_errorMonitor->VerifyFound();
+}
+
+TEST_F(VkPositiveLayerTest, CreatePipelineFragmentOutputNotWrittenButMasked) {
+ TEST_DESCRIPTION(
+ "Test that no error is produced when the fragment shader fails to declare an output, but the corresponding attachment's "
+ "write mask is 0.");
+ m_errorMonitor->ExpectSuccess();
+
+ ASSERT_NO_FATAL_FAILURE(Init());
+
+ char const *vsSource =
+ "#version 450\n"
+ "\n"
+ "void main(){\n"
+ " gl_Position = vec4(1);\n"
+ "}\n";
+ char const *fsSource =
+ "#version 450\n"
+ "\n"
+ "void main(){\n"
+ "}\n";
+
+ VkShaderObj vs(m_device, vsSource, VK_SHADER_STAGE_VERTEX_BIT, this);
+ VkShaderObj fs(m_device, fsSource, VK_SHADER_STAGE_FRAGMENT_BIT, this);
+
+ VkPipelineObj pipe(m_device);
+ pipe.AddShader(&vs);
+ pipe.AddShader(&fs);
+
+ /* set up CB 0, not written, but also masked */
+ pipe.AddDefaultColorAttachment(0);
+ ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
+
+ VkDescriptorSetObj descriptorSet(m_device);
+ descriptorSet.AppendDummy();
+ descriptorSet.CreateVKDescriptorSet(m_commandBuffer);
+
+ pipe.CreateVKPipeline(descriptorSet.GetPipelineLayout(), renderPass());
+
+ m_errorMonitor->VerifyNotFound();
+}
+
+TEST_F(VkLayerTest, CreatePipelineFragmentOutputNotConsumed) {
+ TEST_DESCRIPTION(
+ "Test that a warning is produced for a fragment shader which provides a spurious output with no matching attachment");
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_WARNING_BIT_EXT,
+ "fragment shader writes to output location 1 with no matching attachment");
+
+ ASSERT_NO_FATAL_FAILURE(Init());
+
+ char const *vsSource =
+ "#version 450\n"
+ "\n"
+ "void main(){\n"
+ " gl_Position = vec4(1);\n"
+ "}\n";
+ char const *fsSource =
+ "#version 450\n"
+ "\n"
+ "layout(location=0) out vec4 x;\n"
+ "layout(location=1) out vec4 y;\n" /* no matching attachment for this */
+ "void main(){\n"
+ " x = vec4(1);\n"
+ " y = vec4(1);\n"
+ "}\n";
+
+ VkShaderObj vs(m_device, vsSource, VK_SHADER_STAGE_VERTEX_BIT, this);
+ VkShaderObj fs(m_device, fsSource, VK_SHADER_STAGE_FRAGMENT_BIT, this);
+
+ VkPipelineObj pipe(m_device);
+ pipe.AddShader(&vs);
+ pipe.AddShader(&fs);
+
+ /* set up CB 0, not written */
+ pipe.AddDefaultColorAttachment();
+ ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
+ /* FS writes CB 1, but we don't configure it */
+
+ VkDescriptorSetObj descriptorSet(m_device);
+ descriptorSet.AppendDummy();
+ descriptorSet.CreateVKDescriptorSet(m_commandBuffer);
+
+ pipe.CreateVKPipeline(descriptorSet.GetPipelineLayout(), renderPass());
+
+ m_errorMonitor->VerifyFound();
+}
+
+TEST_F(VkLayerTest, CreatePipelineFragmentOutputNotConsumedButAlphaToCoverageEnabled) {
+ TEST_DESCRIPTION(
+ "Test that no warning is produced when writing to non-existing color attachment if alpha to coverage is enabled.");
+
+ m_errorMonitor->ExpectSuccess(VK_DEBUG_REPORT_ERROR_BIT_EXT | VK_DEBUG_REPORT_WARNING_BIT_EXT);
+
+ ASSERT_NO_FATAL_FAILURE(Init());
+
+ char const *vsSource =
+ "#version 450\n"
+ "\n"
+ "void main(){\n"
+ " gl_Position = vec4(1);\n"
+ "}\n";
+ char const *fsSource =
+ "#version 450\n"
+ "\n"
+ "layout(location=0) out vec4 x;\n"
+ "void main(){\n"
+ " x = vec4(1);\n"
+ "}\n";
+
+ VkShaderObj vs(m_device, vsSource, VK_SHADER_STAGE_VERTEX_BIT, this);
+ VkShaderObj fs(m_device, fsSource, VK_SHADER_STAGE_FRAGMENT_BIT, this);
+
+ VkPipelineObj pipe(m_device);
+ pipe.AddShader(&vs);
+ pipe.AddShader(&fs);
+
+ VkPipelineMultisampleStateCreateInfo ms_state_ci = {};
+ ms_state_ci.sType = VK_STRUCTURE_TYPE_PIPELINE_MULTISAMPLE_STATE_CREATE_INFO;
+ ms_state_ci.rasterizationSamples = VK_SAMPLE_COUNT_1_BIT;
+ ms_state_ci.alphaToCoverageEnable = VK_TRUE;
+ pipe.SetMSAA(&ms_state_ci);
+
+ ASSERT_NO_FATAL_FAILURE(InitRenderTarget(0u));
+
+ VkDescriptorSetObj descriptorSet(m_device);
+ descriptorSet.AppendDummy();
+ descriptorSet.CreateVKDescriptorSet(m_commandBuffer);
+
+ pipe.CreateVKPipeline(descriptorSet.GetPipelineLayout(), renderPass());
+
+ m_errorMonitor->VerifyNotFound();
+}
+
+TEST_F(VkLayerTest, CreatePipelineFragmentNoOutputLocation0ButAlphaToCoverageEnabled) {
+ TEST_DESCRIPTION("Test that an error is produced when alpha to coverage is enabled but no output at location 0 is declared.");
+
+ m_errorMonitor->SetDesiredFailureMsg(
+ VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ "fragment shader doesn't declare alpha output at location 0 even though alpha to coverage is enabled.");
+
+ ASSERT_NO_FATAL_FAILURE(Init());
+
+ char const *vsSource =
+ "#version 450\n"
+ "\n"
+ "void main(){\n"
+ " gl_Position = vec4(1);\n"
+ "}\n";
+ char const *fsSource =
+ "#version 450\n"
+ "\n"
+ "void main(){\n"
+ "}\n";
+
+ VkShaderObj vs(m_device, vsSource, VK_SHADER_STAGE_VERTEX_BIT, this);
+ VkShaderObj fs(m_device, fsSource, VK_SHADER_STAGE_FRAGMENT_BIT, this);
+
+ VkPipelineObj pipe(m_device);
+ pipe.AddShader(&vs);
+ pipe.AddShader(&fs);
+
+ VkPipelineMultisampleStateCreateInfo ms_state_ci = {};
+ ms_state_ci.sType = VK_STRUCTURE_TYPE_PIPELINE_MULTISAMPLE_STATE_CREATE_INFO;
+ ms_state_ci.rasterizationSamples = VK_SAMPLE_COUNT_1_BIT;
+ ms_state_ci.alphaToCoverageEnable = VK_TRUE;
+ pipe.SetMSAA(&ms_state_ci);
+
+ ASSERT_NO_FATAL_FAILURE(InitRenderTarget(0u));
+
+ VkDescriptorSetObj descriptorSet(m_device);
+ descriptorSet.AppendDummy();
+ descriptorSet.CreateVKDescriptorSet(m_commandBuffer);
+
+ pipe.CreateVKPipeline(descriptorSet.GetPipelineLayout(), renderPass());
+
+ m_errorMonitor->VerifyFound();
+}
+
+TEST_F(VkLayerTest, CreatePipelineFragmentNoAlphaLocation0ButAlphaToCoverageEnabled) {
+ TEST_DESCRIPTION(
+ "Test that an error is produced when alpha to coverage is enabled but output at location 0 doesn't have alpha channel.");
+
+ m_errorMonitor->SetDesiredFailureMsg(
+ VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ "fragment shader doesn't declare alpha output at location 0 even though alpha to coverage is enabled.");
+
+ ASSERT_NO_FATAL_FAILURE(Init());
+
+ char const *vsSource =
+ "#version 450\n"
+ "\n"
+ "void main(){\n"
+ " gl_Position = vec4(1);\n"
+ "}\n";
+ char const *fsSource =
+ "#version 450\n"
+ "layout(location=0) out vec3 x;\n"
+ "\n"
+ "void main(){\n"
+ " x = vec3(1);\n"
+ "}\n";
+
+ VkShaderObj vs(m_device, vsSource, VK_SHADER_STAGE_VERTEX_BIT, this);
+ VkShaderObj fs(m_device, fsSource, VK_SHADER_STAGE_FRAGMENT_BIT, this);
+
+ VkPipelineObj pipe(m_device);
+ pipe.AddShader(&vs);
+ pipe.AddShader(&fs);
+
+ VkPipelineMultisampleStateCreateInfo ms_state_ci = {};
+ ms_state_ci.sType = VK_STRUCTURE_TYPE_PIPELINE_MULTISAMPLE_STATE_CREATE_INFO;
+ ms_state_ci.rasterizationSamples = VK_SAMPLE_COUNT_1_BIT;
+ ms_state_ci.alphaToCoverageEnable = VK_TRUE;
+ pipe.SetMSAA(&ms_state_ci);
+
+ ASSERT_NO_FATAL_FAILURE(InitRenderTarget(0u));
+
+ VkDescriptorSetObj descriptorSet(m_device);
+ descriptorSet.AppendDummy();
+ descriptorSet.CreateVKDescriptorSet(m_commandBuffer);
+
+ pipe.CreateVKPipeline(descriptorSet.GetPipelineLayout(), renderPass());
+
+ m_errorMonitor->VerifyFound();
+}
+
+TEST_F(VkLayerTest, CreatePipelineFragmentOutputTypeMismatch) {
+ TEST_DESCRIPTION(
+ "Test that an error is produced for a mismatch between the fundamental type of an fragment shader output variable, and the "
+ "format of the corresponding attachment");
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_WARNING_BIT_EXT, "does not match fragment shader output type");
+
+ ASSERT_NO_FATAL_FAILURE(Init());
+
+ char const *vsSource =
+ "#version 450\n"
+ "\n"
+ "void main(){\n"
+ " gl_Position = vec4(1);\n"
+ "}\n";
+ char const *fsSource =
+ "#version 450\n"
+ "\n"
+ "layout(location=0) out ivec4 x;\n" /* not UNORM */
+ "void main(){\n"
+ " x = ivec4(1);\n"
+ "}\n";
+
+ VkShaderObj vs(m_device, vsSource, VK_SHADER_STAGE_VERTEX_BIT, this);
+ VkShaderObj fs(m_device, fsSource, VK_SHADER_STAGE_FRAGMENT_BIT, this);
+
+ VkPipelineObj pipe(m_device);
+ pipe.AddShader(&vs);
+ pipe.AddShader(&fs);
+
+ /* set up CB 0; type is UNORM by default */
+ pipe.AddDefaultColorAttachment();
+ ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
+
+ VkDescriptorSetObj descriptorSet(m_device);
+ descriptorSet.AppendDummy();
+ descriptorSet.CreateVKDescriptorSet(m_commandBuffer);
+
+ pipe.CreateVKPipeline(descriptorSet.GetPipelineLayout(), renderPass());
+
+ m_errorMonitor->VerifyFound();
+}
+
+TEST_F(VkLayerTest, CreatePipelineExceedMaxVertexOutputComponents) {
+ TEST_DESCRIPTION(
+ "Test that an error is produced when the number of output components from the vertex stage exceeds the device limit");
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ "Vertex shader exceeds "
+ "VkPhysicalDeviceLimits::maxVertexOutputComponents");
+
+ ASSERT_NO_FATAL_FAILURE(Init());
+
+ const uint32_t maxVsOutComp = m_device->props.limits.maxVertexOutputComponents;
+ std::string vsSourceStr = "#version 450\n\n";
+ const uint32_t numVec4 = maxVsOutComp / 4;
+ uint32_t location = 0;
+ for (uint32_t i = 0; i < numVec4; i++) {
+ vsSourceStr += "layout(location=" + std::to_string(location) + ") out vec4 v" + std::to_string(i) + ";\n";
+ location += 1;
+ }
+ const uint32_t remainder = maxVsOutComp % 4;
+ if (remainder != 0) {
+ if (remainder == 1) {
+ vsSourceStr += "layout(location=" + std::to_string(location) + ") out float" + " vn;\n";
+ } else {
+ vsSourceStr += "layout(location=" + std::to_string(location) + ") out vec" + std::to_string(remainder) + " vn;\n";
+ }
+ location += 1;
+ }
+ vsSourceStr += "layout(location=" + std::to_string(location) +
+ ") out vec4 exceedLimit;\n"
+ "\n"
+ "void main(){\n"
+ " gl_Position = vec4(1);\n"
+ "}\n";
+
+ std::string fsSourceStr =
+ "#version 450\n"
+ "\n"
+ "layout(location=0) out vec4 color;\n"
+ "\n"
+ "void main(){\n"
+ " color = vec4(1);\n"
+ "}\n";
+
+ VkShaderObj vs(m_device, vsSourceStr.c_str(), VK_SHADER_STAGE_VERTEX_BIT, this);
+ VkShaderObj fs(m_device, fsSourceStr.c_str(), VK_SHADER_STAGE_FRAGMENT_BIT, this);
+
+ VkPipelineObj pipe(m_device);
+ pipe.AddShader(&vs);
+ pipe.AddShader(&fs);
+
+ // Set up CB 0; type is UNORM by default
+ pipe.AddDefaultColorAttachment();
+ ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
+
+ VkDescriptorSetObj descriptorSet(m_device);
+ descriptorSet.AppendDummy();
+ descriptorSet.CreateVKDescriptorSet(m_commandBuffer);
+
+ pipe.CreateVKPipeline(descriptorSet.GetPipelineLayout(), renderPass());
+
+ m_errorMonitor->VerifyFound();
+}
+
+TEST_F(VkLayerTest, CreatePipelineExceedMaxTessellationControlInputOutputComponents) {
+ TEST_DESCRIPTION(
+ "Test that errors are produced when the number of per-vertex input and/or output components to the tessellation control "
+ "stage exceeds the device limit");
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ "Tessellation control shader exceeds "
+ "VkPhysicalDeviceLimits::maxTessellationControlPerVertexInputComponents");
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ "Tessellation control shader exceeds "
+ "VkPhysicalDeviceLimits::maxTessellationControlPerVertexOutputComponents");
+
+ ASSERT_NO_FATAL_FAILURE(Init());
+
+ VkPhysicalDeviceFeatures feat;
+ vkGetPhysicalDeviceFeatures(gpu(), &feat);
+ if (!feat.tessellationShader) {
+ printf("%s tessellation shader stage(s) unsupported.\n", kSkipPrefix);
+ return;
+ }
+
+ std::string vsSourceStr =
+ "#version 450\n"
+ "\n"
+ "void main(){\n"
+ " gl_Position = vec4(1);\n"
+ "}\n";
+
+ // Tessellation control stage
+ std::string tcsSourceStr =
+ "#version 450\n"
+ "\n";
+ // Input components
+ const uint32_t maxTescInComp = m_device->props.limits.maxTessellationControlPerVertexInputComponents;
+ const uint32_t numInVec4 = maxTescInComp / 4;
+ uint32_t inLocation = 0;
+ for (uint32_t i = 0; i < numInVec4; i++) {
+ tcsSourceStr += "layout(location=" + std::to_string(inLocation) + ") in vec4 v" + std::to_string(i) + "In[];\n";
+ inLocation += 1;
+ }
+ const uint32_t inRemainder = maxTescInComp % 4;
+ if (inRemainder != 0) {
+ if (inRemainder == 1) {
+ tcsSourceStr += "layout(location=" + std::to_string(inLocation) + ") in float" + " vnIn[];\n";
+ } else {
+ tcsSourceStr +=
+ "layout(location=" + std::to_string(inLocation) + ") in vec" + std::to_string(inRemainder) + " vnIn[];\n";
+ }
+ inLocation += 1;
+ }
+ tcsSourceStr += "layout(location=" + std::to_string(inLocation) + ") in vec4 exceedLimitIn[];\n\n";
+ // Output components
+ const uint32_t maxTescOutComp = m_device->props.limits.maxTessellationControlPerVertexOutputComponents;
+ const uint32_t numOutVec4 = maxTescOutComp / 4;
+ uint32_t outLocation = 0;
+ for (uint32_t i = 0; i < numOutVec4; i++) {
+ tcsSourceStr += "layout(location=" + std::to_string(outLocation) + ") out vec4 v" + std::to_string(i) + "Out[3];\n";
+ outLocation += 1;
+ }
+ const uint32_t outRemainder = maxTescOutComp % 4;
+ if (outRemainder != 0) {
+ if (outRemainder == 1) {
+ tcsSourceStr += "layout(location=" + std::to_string(outLocation) + ") out float" + " vnOut[3];\n";
+ } else {
+ tcsSourceStr +=
+ "layout(location=" + std::to_string(outLocation) + ") out vec" + std::to_string(outRemainder) + " vnOut[3];\n";
+ }
+ outLocation += 1;
+ }
+ tcsSourceStr += "layout(location=" + std::to_string(outLocation) + ") out vec4 exceedLimitOut[3];\n";
+ tcsSourceStr += "layout(vertices=3) out;\n";
+ // Finalize
+ tcsSourceStr +=
+ "\n"
+ "void main(){\n"
+ " gl_out[gl_InvocationID].gl_Position = gl_in[gl_InvocationID].gl_Position;\n"
+ "}\n";
+
+ std::string tesSourceStr =
+ "#version 450\n"
+ "\n"
+ "layout(triangles) in;"
+ "\n"
+ "void main(){\n"
+ " gl_Position = vec4(1);\n"
+ "}\n";
+
+ std::string fsSourceStr =
+ "#version 450\n"
+ "\n"
+ "layout(location=0) out vec4 color;"
+ "\n"
+ "void main(){\n"
+ " color = vec4(1);\n"
+ "}\n";
+
+ VkShaderObj vs(m_device, vsSourceStr.c_str(), VK_SHADER_STAGE_VERTEX_BIT, this);
+ VkShaderObj tcs(m_device, tcsSourceStr.c_str(), VK_SHADER_STAGE_TESSELLATION_CONTROL_BIT, this);
+ VkShaderObj tes(m_device, tesSourceStr.c_str(), VK_SHADER_STAGE_TESSELLATION_EVALUATION_BIT, this);
+ VkShaderObj fs(m_device, fsSourceStr.c_str(), VK_SHADER_STAGE_FRAGMENT_BIT, this);
+
+ VkPipelineObj pipe(m_device);
+ pipe.AddShader(&vs);
+ pipe.AddShader(&tcs);
+ pipe.AddShader(&tes);
+ pipe.AddShader(&fs);
+
+ // Set up CB 0; type is UNORM by default
+ pipe.AddDefaultColorAttachment();
+ ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
+
+ VkDescriptorSetObj descriptorSet(m_device);
+ descriptorSet.AppendDummy();
+ descriptorSet.CreateVKDescriptorSet(m_commandBuffer);
+
+ VkPipelineInputAssemblyStateCreateInfo inputAssemblyInfo = {};
+ inputAssemblyInfo.sType = VK_STRUCTURE_TYPE_PIPELINE_INPUT_ASSEMBLY_STATE_CREATE_INFO;
+ inputAssemblyInfo.pNext = NULL;
+ inputAssemblyInfo.flags = 0;
+ inputAssemblyInfo.topology = VK_PRIMITIVE_TOPOLOGY_PATCH_LIST;
+ inputAssemblyInfo.primitiveRestartEnable = VK_FALSE;
+ pipe.SetInputAssembly(&inputAssemblyInfo);
+
+ VkPipelineTessellationStateCreateInfo tessInfo = {};
+ tessInfo.sType = VK_STRUCTURE_TYPE_PIPELINE_TESSELLATION_STATE_CREATE_INFO;
+ tessInfo.pNext = NULL;
+ tessInfo.flags = 0;
+ tessInfo.patchControlPoints = 3;
+ pipe.SetTessellation(&tessInfo);
+
+ pipe.CreateVKPipeline(descriptorSet.GetPipelineLayout(), renderPass());
+
+ m_errorMonitor->VerifyFound();
+}
+
+TEST_F(VkLayerTest, CreatePipelineExceedMaxTessellationEvaluationInputOutputComponents) {
+ TEST_DESCRIPTION(
+ "Test that errors are produced when the number of input and/or output components to the tessellation evaluation stage "
+ "exceeds the device limit");
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ "Tessellation evaluation shader exceeds "
+ "VkPhysicalDeviceLimits::maxTessellationEvaluationInputComponents");
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ "Tessellation evaluation shader exceeds "
+ "VkPhysicalDeviceLimits::maxTessellationEvaluationOutputComponents");
+
+ ASSERT_NO_FATAL_FAILURE(Init());
+
+ VkPhysicalDeviceFeatures feat;
+ vkGetPhysicalDeviceFeatures(gpu(), &feat);
+ if (!feat.tessellationShader) {
+ printf("%s tessellation shader stage(s) unsupported.\n", kSkipPrefix);
+ return;
+ }
+
+ std::string vsSourceStr =
+ "#version 450\n"
+ "\n"
+ "void main(){\n"
+ " gl_Position = vec4(1);\n"
+ "}\n";
+
+ std::string tcsSourceStr =
+ "#version 450\n"
+ "\n"
+ "layout (vertices = 3) out;\n"
+ "\n"
+ "void main(){\n"
+ " gl_out[gl_InvocationID].gl_Position = gl_in[gl_InvocationID].gl_Position;\n"
+ "}\n";
+
+ // Tessellation evaluation stage
+ std::string tesSourceStr =
+ "#version 450\n"
+ "\n"
+ "layout (triangles) in;\n"
+ "\n";
+ // Input components
+ const uint32_t maxTeseInComp = m_device->props.limits.maxTessellationEvaluationInputComponents;
+ const uint32_t numInVec4 = maxTeseInComp / 4;
+ uint32_t inLocation = 0;
+ for (uint32_t i = 0; i < numInVec4; i++) {
+ tesSourceStr += "layout(location=" + std::to_string(inLocation) + ") in vec4 v" + std::to_string(i) + "In[];\n";
+ inLocation += 1;
+ }
+ const uint32_t inRemainder = maxTeseInComp % 4;
+ if (inRemainder != 0) {
+ if (inRemainder == 1) {
+ tesSourceStr += "layout(location=" + std::to_string(inLocation) + ") in float" + " vnIn[];\n";
+ } else {
+ tesSourceStr +=
+ "layout(location=" + std::to_string(inLocation) + ") in vec" + std::to_string(inRemainder) + " vnIn[];\n";
+ }
+ inLocation += 1;
+ }
+ tesSourceStr += "layout(location=" + std::to_string(inLocation) + ") in vec4 exceedLimitIn[];\n\n";
+ // Output components
+ const uint32_t maxTeseOutComp = m_device->props.limits.maxTessellationEvaluationOutputComponents;
+ const uint32_t numOutVec4 = maxTeseOutComp / 4;
+ uint32_t outLocation = 0;
+ for (uint32_t i = 0; i < numOutVec4; i++) {
+ tesSourceStr += "layout(location=" + std::to_string(outLocation) + ") out vec4 v" + std::to_string(i) + "Out;\n";
+ outLocation += 1;
+ }
+ const uint32_t outRemainder = maxTeseOutComp % 4;
+ if (outRemainder != 0) {
+ if (outRemainder == 1) {
+ tesSourceStr += "layout(location=" + std::to_string(outLocation) + ") out float" + " vnOut;\n";
+ } else {
+ tesSourceStr +=
+ "layout(location=" + std::to_string(outLocation) + ") out vec" + std::to_string(outRemainder) + " vnOut;\n";
+ }
+ outLocation += 1;
+ }
+ tesSourceStr += "layout(location=" + std::to_string(outLocation) + ") out vec4 exceedLimitOut;\n";
+ // Finalize
+ tesSourceStr +=
+ "\n"
+ "void main(){\n"
+ " gl_Position = vec4(1);\n"
+ "}\n";
+
+ std::string fsSourceStr =
+ "#version 450\n"
+ "\n"
+ "layout(location=0) out vec4 color;"
+ "\n"
+ "void main(){\n"
+ " color = vec4(1);\n"
+ "}\n";
+
+ VkShaderObj vs(m_device, vsSourceStr.c_str(), VK_SHADER_STAGE_VERTEX_BIT, this);
+ VkShaderObj tcs(m_device, tcsSourceStr.c_str(), VK_SHADER_STAGE_TESSELLATION_CONTROL_BIT, this);
+ VkShaderObj tes(m_device, tesSourceStr.c_str(), VK_SHADER_STAGE_TESSELLATION_EVALUATION_BIT, this);
+ VkShaderObj fs(m_device, fsSourceStr.c_str(), VK_SHADER_STAGE_FRAGMENT_BIT, this);
+
+ VkPipelineObj pipe(m_device);
+ pipe.AddShader(&vs);
+ pipe.AddShader(&tcs);
+ pipe.AddShader(&tes);
+ pipe.AddShader(&fs);
+
+ // Set up CB 0; type is UNORM by default
+ pipe.AddDefaultColorAttachment();
+ ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
+
+ VkDescriptorSetObj descriptorSet(m_device);
+ descriptorSet.AppendDummy();
+ descriptorSet.CreateVKDescriptorSet(m_commandBuffer);
+
+ VkPipelineInputAssemblyStateCreateInfo inputAssemblyInfo = {};
+ inputAssemblyInfo.sType = VK_STRUCTURE_TYPE_PIPELINE_INPUT_ASSEMBLY_STATE_CREATE_INFO;
+ inputAssemblyInfo.pNext = NULL;
+ inputAssemblyInfo.flags = 0;
+ inputAssemblyInfo.topology = VK_PRIMITIVE_TOPOLOGY_PATCH_LIST;
+ inputAssemblyInfo.primitiveRestartEnable = VK_FALSE;
+ pipe.SetInputAssembly(&inputAssemblyInfo);
+
+ VkPipelineTessellationStateCreateInfo tessInfo = {};
+ tessInfo.sType = VK_STRUCTURE_TYPE_PIPELINE_TESSELLATION_STATE_CREATE_INFO;
+ tessInfo.pNext = NULL;
+ tessInfo.flags = 0;
+ tessInfo.patchControlPoints = 3;
+ pipe.SetTessellation(&tessInfo);
+
+ pipe.CreateVKPipeline(descriptorSet.GetPipelineLayout(), renderPass());
+
+ m_errorMonitor->VerifyFound();
+}
+
+TEST_F(VkLayerTest, CreatePipelineExceedMaxGeometryInputOutputComponents) {
+ TEST_DESCRIPTION(
+ "Test that errors are produced when the number of input and/or output components to the geometry stage exceeds the device "
+ "limit");
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ "Geometry shader exceeds "
+ "VkPhysicalDeviceLimits::maxGeometryInputComponents");
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ "Geometry shader exceeds "
+ "VkPhysicalDeviceLimits::maxGeometryOutputComponents");
+
+ ASSERT_NO_FATAL_FAILURE(Init());
+
+ VkPhysicalDeviceFeatures feat;
+ vkGetPhysicalDeviceFeatures(gpu(), &feat);
+ if (!feat.geometryShader) {
+ printf("%s geometry shader stage unsupported.\n", kSkipPrefix);
+ return;
+ }
+
+ std::string vsSourceStr =
+ "#version 450\n"
+ "\n"
+ "void main(){\n"
+ " gl_Position = vec4(1);\n"
+ "}\n";
+
+ std::string gsSourceStr =
+ "#version 450\n"
+ "\n"
+ "layout(triangles) in;\n"
+ "layout(invocations=1) in;\n";
+ // Input components
+ const uint32_t maxGeomInComp = m_device->props.limits.maxGeometryInputComponents;
+ const uint32_t numInVec4 = maxGeomInComp / 4;
+ uint32_t inLocation = 0;
+ for (uint32_t i = 0; i < numInVec4; i++) {
+ gsSourceStr += "layout(location=" + std::to_string(inLocation) + ") in vec4 v" + std::to_string(i) + "In[];\n";
+ inLocation += 1;
+ }
+ const uint32_t inRemainder = maxGeomInComp % 4;
+ if (inRemainder != 0) {
+ if (inRemainder == 1) {
+ gsSourceStr += "layout(location=" + std::to_string(inLocation) + ") in float" + " vnIn[];\n";
+ } else {
+ gsSourceStr +=
+ "layout(location=" + std::to_string(inLocation) + ") in vec" + std::to_string(inRemainder) + " vnIn[];\n";
+ }
+ inLocation += 1;
+ }
+ gsSourceStr += "layout(location=" + std::to_string(inLocation) + ") in vec4 exceedLimitIn[];\n\n";
+ // Output components
+ const uint32_t maxGeomOutComp = m_device->props.limits.maxGeometryOutputComponents;
+ const uint32_t numOutVec4 = maxGeomOutComp / 4;
+ uint32_t outLocation = 0;
+ for (uint32_t i = 0; i < numOutVec4; i++) {
+ gsSourceStr += "layout(location=" + std::to_string(outLocation) + ") out vec4 v" + std::to_string(i) + "Out;\n";
+ outLocation += 1;
+ }
+ const uint32_t outRemainder = maxGeomOutComp % 4;
+ if (outRemainder != 0) {
+ if (outRemainder == 1) {
+ gsSourceStr += "layout(location=" + std::to_string(outLocation) + ") out float" + " vnOut;\n";
+ } else {
+ gsSourceStr +=
+ "layout(location=" + std::to_string(outLocation) + ") out vec" + std::to_string(outRemainder) + " vnOut;\n";
+ }
+ outLocation += 1;
+ }
+ gsSourceStr += "layout(location=" + std::to_string(outLocation) + ") out vec4 exceedLimitOut;\n";
+ // Finalize
+ gsSourceStr +=
+ "layout(triangle_strip, max_vertices=3) out;\n"
+ "\n"
+ "void main(){\n"
+ " exceedLimitOut = vec4(1);\n"
+ "}\n";
+
+ std::string fsSourceStr =
+ "#version 450\n"
+ "\n"
+ "layout(location=0) out vec4 color;"
+ "\n"
+ "void main(){\n"
+ " color = vec4(1);\n"
+ "}\n";
+
+ VkShaderObj vs(m_device, vsSourceStr.c_str(), VK_SHADER_STAGE_VERTEX_BIT, this);
+ VkShaderObj gs(m_device, gsSourceStr.c_str(), VK_SHADER_STAGE_GEOMETRY_BIT, this);
+ VkShaderObj fs(m_device, fsSourceStr.c_str(), VK_SHADER_STAGE_FRAGMENT_BIT, this);
+
+ VkPipelineObj pipe(m_device);
+ pipe.AddShader(&vs);
+ pipe.AddShader(&gs);
+ pipe.AddShader(&fs);
+
+ // Set up CB 0; type is UNORM by default
+ pipe.AddDefaultColorAttachment();
+ ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
+
+ VkDescriptorSetObj descriptorSet(m_device);
+ descriptorSet.AppendDummy();
+ descriptorSet.CreateVKDescriptorSet(m_commandBuffer);
+
+ pipe.CreateVKPipeline(descriptorSet.GetPipelineLayout(), renderPass());
+ m_errorMonitor->VerifyFound();
+}
+
+TEST_F(VkLayerTest, CreatePipelineExceedMaxFragmentInputComponents) {
+ TEST_DESCRIPTION(
+ "Test that an error is produced when the number of input components from the fragment stage exceeds the device limit");
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ "Fragment shader exceeds "
+ "VkPhysicalDeviceLimits::maxFragmentInputComponents");
+
+ ASSERT_NO_FATAL_FAILURE(Init());
+
+ std::string vsSourceStr =
+ "#version 450\n"
+ "\n"
+ "void main(){\n"
+ " gl_Position = vec4(1);\n"
+ "}\n";
+
+ const uint32_t maxFsInComp = m_device->props.limits.maxFragmentInputComponents;
+ std::string fsSourceStr = "#version 450\n\n";
+ const uint32_t numVec4 = maxFsInComp / 4;
+ uint32_t location = 0;
+ for (uint32_t i = 0; i < numVec4; i++) {
+ fsSourceStr += "layout(location=" + std::to_string(location) + ") in vec4 v" + std::to_string(i) + ";\n";
+ location += 1;
+ }
+ const uint32_t remainder = maxFsInComp % 4;
+ if (remainder != 0) {
+ if (remainder == 1) {
+ fsSourceStr += "layout(location=" + std::to_string(location) + ") in float" + " vn;\n";
+ } else {
+ fsSourceStr += "layout(location=" + std::to_string(location) + ") in vec" + std::to_string(remainder) + " vn;\n";
+ }
+ location += 1;
+ }
+ fsSourceStr += "layout(location=" + std::to_string(location) +
+ ") in vec4 exceedLimit;\n"
+ "\n"
+ "layout(location=0) out vec4 color;"
+ "\n"
+ "void main(){\n"
+ " color = vec4(1);\n"
+ "}\n";
+
+ VkShaderObj vs(m_device, vsSourceStr.c_str(), VK_SHADER_STAGE_VERTEX_BIT, this);
+ VkShaderObj fs(m_device, fsSourceStr.c_str(), VK_SHADER_STAGE_FRAGMENT_BIT, this);
+
+ VkPipelineObj pipe(m_device);
+ pipe.AddShader(&vs);
+ pipe.AddShader(&fs);
+
+ // Set up CB 0; type is UNORM by default
+ pipe.AddDefaultColorAttachment();
+ ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
+
+ VkDescriptorSetObj descriptorSet(m_device);
+ descriptorSet.AppendDummy();
+ descriptorSet.CreateVKDescriptorSet(m_commandBuffer);
+
+ pipe.CreateVKPipeline(descriptorSet.GetPipelineLayout(), renderPass());
+
+ m_errorMonitor->VerifyFound();
+}
+
+TEST_F(VkLayerTest, CreatePipelineUniformBlockNotProvided) {
+ TEST_DESCRIPTION(
+ "Test that an error is produced for a shader consuming a uniform block which has no corresponding binding in the pipeline "
+ "layout");
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "not declared in pipeline layout");
+
+ ASSERT_NO_FATAL_FAILURE(Init());
+
+ char const *vsSource =
+ "#version 450\n"
+ "\n"
+ "void main(){\n"
+ " gl_Position = vec4(1);\n"
+ "}\n";
+ char const *fsSource =
+ "#version 450\n"
+ "\n"
+ "layout(location=0) out vec4 x;\n"
+ "layout(set=0) layout(binding=0) uniform foo { int x; int y; } bar;\n"
+ "void main(){\n"
+ " x = vec4(bar.y);\n"
+ "}\n";
+
+ VkShaderObj vs(m_device, vsSource, VK_SHADER_STAGE_VERTEX_BIT, this);
+ VkShaderObj fs(m_device, fsSource, VK_SHADER_STAGE_FRAGMENT_BIT, this);
+
+ VkPipelineObj pipe(m_device);
+ pipe.AddShader(&vs);
+ pipe.AddShader(&fs);
+
+ /* set up CB 0; type is UNORM by default */
+ pipe.AddDefaultColorAttachment();
+ ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
+
+ VkDescriptorSetObj descriptorSet(m_device);
+ descriptorSet.CreateVKDescriptorSet(m_commandBuffer);
+
+ pipe.CreateVKPipeline(descriptorSet.GetPipelineLayout(), renderPass());
+
+ m_errorMonitor->VerifyFound();
+}
+
+TEST_F(VkLayerTest, CreatePipelinePushConstantsNotInLayout) {
+ TEST_DESCRIPTION(
+ "Test that an error is produced for a shader consuming push constants which are not provided in the pipeline layout");
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "not declared in layout");
+
+ ASSERT_NO_FATAL_FAILURE(Init());
+
+ char const *vsSource =
+ "#version 450\n"
+ "\n"
+ "layout(push_constant, std430) uniform foo { float x; } consts;\n"
+ "void main(){\n"
+ " gl_Position = vec4(consts.x);\n"
+ "}\n";
+ char const *fsSource =
+ "#version 450\n"
+ "\n"
+ "layout(location=0) out vec4 x;\n"
+ "void main(){\n"
+ " x = vec4(1);\n"
+ "}\n";
+
+ VkShaderObj vs(m_device, vsSource, VK_SHADER_STAGE_VERTEX_BIT, this);
+ VkShaderObj fs(m_device, fsSource, VK_SHADER_STAGE_FRAGMENT_BIT, this);
+
+ VkPipelineObj pipe(m_device);
+ pipe.AddShader(&vs);
+ pipe.AddShader(&fs);
+
+ /* set up CB 0; type is UNORM by default */
+ pipe.AddDefaultColorAttachment();
+ ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
+
+ VkDescriptorSetObj descriptorSet(m_device);
+ descriptorSet.CreateVKDescriptorSet(m_commandBuffer);
+
+ pipe.CreateVKPipeline(descriptorSet.GetPipelineLayout(), renderPass());
+
+ /* should have generated an error -- no push constant ranges provided! */
+ m_errorMonitor->VerifyFound();
+}
+
+TEST_F(VkLayerTest, CreatePipelineInputAttachmentMissing) {
+ TEST_DESCRIPTION(
+ "Test that an error is produced for a shader consuming an input attachment which is not included in the subpass "
+ "description");
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ "consumes input attachment index 0 but not provided in subpass");
+
+ ASSERT_NO_FATAL_FAILURE(Init());
+
+ char const *vsSource =
+ "#version 450\n"
+ "\n"
+ "void main(){\n"
+ " gl_Position = vec4(1);\n"
+ "}\n";
+ char const *fsSource =
+ "#version 450\n"
+ "\n"
+ "layout(input_attachment_index=0, set=0, binding=0) uniform subpassInput x;\n"
+ "layout(location=0) out vec4 color;\n"
+ "void main() {\n"
+ " color = subpassLoad(x);\n"
+ "}\n";
+
+ VkShaderObj vs(m_device, vsSource, VK_SHADER_STAGE_VERTEX_BIT, this);
+ VkShaderObj fs(m_device, fsSource, VK_SHADER_STAGE_FRAGMENT_BIT, this);
+
+ VkPipelineObj pipe(m_device);
+ pipe.AddShader(&vs);
+ pipe.AddShader(&fs);
+ pipe.AddDefaultColorAttachment();
+ ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
+
+ VkDescriptorSetLayoutBinding dslb = {0, VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT, 1, VK_SHADER_STAGE_FRAGMENT_BIT, nullptr};
+ const VkDescriptorSetLayoutObj dsl(m_device, {dslb});
+
+ const VkPipelineLayoutObj pl(m_device, {&dsl});
+
+ // error here.
+ pipe.CreateVKPipeline(pl.handle(), renderPass());
+
+ m_errorMonitor->VerifyFound();
+}
+
+TEST_F(VkLayerTest, CreatePipelineInputAttachmentTypeMismatch) {
+ TEST_DESCRIPTION(
+ "Test that an error is produced for a shader consuming an input attachment with a format having a different fundamental "
+ "type");
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ "input attachment 0 format of VK_FORMAT_R8G8B8A8_UINT does not match");
+
+ ASSERT_NO_FATAL_FAILURE(Init());
+
+ char const *vsSource =
+ "#version 450\n"
+ "\n"
+ "void main(){\n"
+ " gl_Position = vec4(1);\n"
+ "}\n";
+ char const *fsSource =
+ "#version 450\n"
+ "\n"
+ "layout(input_attachment_index=0, set=0, binding=0) uniform subpassInput x;\n"
+ "layout(location=0) out vec4 color;\n"
+ "void main() {\n"
+ " color = subpassLoad(x);\n"
+ "}\n";
+
+ VkShaderObj vs(m_device, vsSource, VK_SHADER_STAGE_VERTEX_BIT, this);
+ VkShaderObj fs(m_device, fsSource, VK_SHADER_STAGE_FRAGMENT_BIT, this);
+
+ VkPipelineObj pipe(m_device);
+ pipe.AddShader(&vs);
+ pipe.AddShader(&fs);
+ pipe.AddDefaultColorAttachment();
+ ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
+
+ VkDescriptorSetLayoutBinding dslb = {0, VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT, 1, VK_SHADER_STAGE_FRAGMENT_BIT, nullptr};
+ const VkDescriptorSetLayoutObj dsl(m_device, {dslb});
+
+ const VkPipelineLayoutObj pl(m_device, {&dsl});
+
+ VkAttachmentDescription descs[2] = {
+ {0, VK_FORMAT_R8G8B8A8_UNORM, VK_SAMPLE_COUNT_1_BIT, VK_ATTACHMENT_LOAD_OP_LOAD, VK_ATTACHMENT_STORE_OP_STORE,
+ VK_ATTACHMENT_LOAD_OP_LOAD, VK_ATTACHMENT_STORE_OP_STORE, VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL,
+ VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL},
+ {0, VK_FORMAT_R8G8B8A8_UINT, VK_SAMPLE_COUNT_1_BIT, VK_ATTACHMENT_LOAD_OP_LOAD, VK_ATTACHMENT_STORE_OP_STORE,
+ VK_ATTACHMENT_LOAD_OP_LOAD, VK_ATTACHMENT_STORE_OP_STORE, VK_IMAGE_LAYOUT_GENERAL, VK_IMAGE_LAYOUT_GENERAL},
+ };
+ VkAttachmentReference color = {
+ 0,
+ VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL,
+ };
+ VkAttachmentReference input = {
+ 1,
+ VK_IMAGE_LAYOUT_GENERAL,
+ };
+
+ VkSubpassDescription sd = {0, VK_PIPELINE_BIND_POINT_GRAPHICS, 1, &input, 1, &color, nullptr, nullptr, 0, nullptr};
+
+ VkRenderPassCreateInfo rpci = {VK_STRUCTURE_TYPE_RENDER_PASS_CREATE_INFO, nullptr, 0, 2, descs, 1, &sd, 0, nullptr};
+ VkRenderPass rp;
+ VkResult err = vkCreateRenderPass(m_device->device(), &rpci, nullptr, &rp);
+ ASSERT_VK_SUCCESS(err);
+
+ // error here.
+ pipe.CreateVKPipeline(pl.handle(), rp);
+
+ m_errorMonitor->VerifyFound();
+
+ vkDestroyRenderPass(m_device->device(), rp, nullptr);
+}
+
+TEST_F(VkLayerTest, CreatePipelineInputAttachmentMissingArray) {
+ TEST_DESCRIPTION(
+ "Test that an error is produced for a shader consuming an input attachment which is not included in the subpass "
+ "description -- array case");
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ "consumes input attachment index 0 but not provided in subpass");
+
+ ASSERT_NO_FATAL_FAILURE(Init());
+
+ char const *vsSource =
+ "#version 450\n"
+ "\n"
+ "void main(){\n"
+ " gl_Position = vec4(1);\n"
+ "}\n";
+ char const *fsSource =
+ "#version 450\n"
+ "\n"
+ "layout(input_attachment_index=0, set=0, binding=0) uniform subpassInput xs[1];\n"
+ "layout(location=0) out vec4 color;\n"
+ "void main() {\n"
+ " color = subpassLoad(xs[0]);\n"
+ "}\n";
+
+ VkShaderObj vs(m_device, vsSource, VK_SHADER_STAGE_VERTEX_BIT, this);
+ VkShaderObj fs(m_device, fsSource, VK_SHADER_STAGE_FRAGMENT_BIT, this);
+
+ VkPipelineObj pipe(m_device);
+ pipe.AddShader(&vs);
+ pipe.AddShader(&fs);
+ pipe.AddDefaultColorAttachment();
+ ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
+
+ VkDescriptorSetLayoutBinding dslb = {0, VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT, 2, VK_SHADER_STAGE_FRAGMENT_BIT, nullptr};
+ const VkDescriptorSetLayoutObj dsl(m_device, {dslb});
+
+ const VkPipelineLayoutObj pl(m_device, {&dsl});
+
+ // error here.
+ pipe.CreateVKPipeline(pl.handle(), renderPass());
+
+ m_errorMonitor->VerifyFound();
+}
+
+TEST_F(VkLayerTest, CreateComputePipelineMissingDescriptor) {
+ TEST_DESCRIPTION(
+ "Test that an error is produced for a compute pipeline consuming a descriptor which is not provided in the pipeline "
+ "layout");
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "Shader uses descriptor slot 0.0");
+
+ ASSERT_NO_FATAL_FAILURE(Init());
+
+ char const *csSource =
+ "#version 450\n"
+ "\n"
+ "layout(local_size_x=1) in;\n"
+ "layout(set=0, binding=0) buffer block { vec4 x; };\n"
+ "void main(){\n"
+ " x = vec4(1);\n"
+ "}\n";
+
+ VkShaderObj cs(m_device, csSource, VK_SHADER_STAGE_COMPUTE_BIT, this);
+
+ VkDescriptorSetObj descriptorSet(m_device);
+ descriptorSet.CreateVKDescriptorSet(m_commandBuffer);
+
+ VkComputePipelineCreateInfo cpci = {VK_STRUCTURE_TYPE_COMPUTE_PIPELINE_CREATE_INFO,
+ nullptr,
+ 0,
+ {VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_CREATE_INFO, nullptr, 0,
+ VK_SHADER_STAGE_COMPUTE_BIT, cs.handle(), "main", nullptr},
+ descriptorSet.GetPipelineLayout(),
+ VK_NULL_HANDLE,
+ -1};
+
+ VkPipeline pipe;
+ VkResult err = vkCreateComputePipelines(m_device->device(), VK_NULL_HANDLE, 1, &cpci, nullptr, &pipe);
+
+ m_errorMonitor->VerifyFound();
+
+ if (err == VK_SUCCESS) {
+ vkDestroyPipeline(m_device->device(), pipe, nullptr);
+ }
+}
+
+TEST_F(VkLayerTest, CreateComputePipelineDescriptorTypeMismatch) {
+ TEST_DESCRIPTION("Test that an error is produced for a pipeline consuming a descriptor-backed resource of a mismatched type");
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ "but descriptor of type VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER");
+
+ ASSERT_NO_FATAL_FAILURE(Init());
+
+ VkDescriptorSetLayoutBinding binding = {0, VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER, 1, VK_SHADER_STAGE_COMPUTE_BIT, nullptr};
+ const VkDescriptorSetLayoutObj dsl(m_device, {binding});
+
+ const VkPipelineLayoutObj pl(m_device, {&dsl});
+
+ char const *csSource =
+ "#version 450\n"
+ "\n"
+ "layout(local_size_x=1) in;\n"
+ "layout(set=0, binding=0) buffer block { vec4 x; };\n"
+ "void main() {\n"
+ " x.x = 1.0f;\n"
+ "}\n";
+ VkShaderObj cs(m_device, csSource, VK_SHADER_STAGE_COMPUTE_BIT, this);
+
+ VkComputePipelineCreateInfo cpci = {VK_STRUCTURE_TYPE_COMPUTE_PIPELINE_CREATE_INFO,
+ nullptr,
+ 0,
+ {VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_CREATE_INFO, nullptr, 0,
+ VK_SHADER_STAGE_COMPUTE_BIT, cs.handle(), "main", nullptr},
+ pl.handle(),
+ VK_NULL_HANDLE,
+ -1};
+
+ VkPipeline pipe;
+ VkResult err = vkCreateComputePipelines(m_device->device(), VK_NULL_HANDLE, 1, &cpci, nullptr, &pipe);
+
+ m_errorMonitor->VerifyFound();
+
+ if (err == VK_SUCCESS) {
+ vkDestroyPipeline(m_device->device(), pipe, nullptr);
+ }
+}
+
+TEST_F(VkLayerTest, DrawTimeImageViewTypeMismatchWithPipeline) {
+ TEST_DESCRIPTION(
+ "Test that an error is produced when an image view type does not match the dimensionality declared in the shader");
+
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "requires an image view of type VK_IMAGE_VIEW_TYPE_3D");
+
+ ASSERT_NO_FATAL_FAILURE(Init());
+ ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
+
+ char const *vsSource =
+ "#version 450\n"
+ "\n"
+ "void main() { gl_Position = vec4(0); }\n";
+ char const *fsSource =
+ "#version 450\n"
+ "\n"
+ "layout(set=0, binding=0) uniform sampler3D s;\n"
+ "layout(location=0) out vec4 color;\n"
+ "void main() {\n"
+ " color = texture(s, vec3(0));\n"
+ "}\n";
+ VkShaderObj vs(m_device, vsSource, VK_SHADER_STAGE_VERTEX_BIT, this);
+ VkShaderObj fs(m_device, fsSource, VK_SHADER_STAGE_FRAGMENT_BIT, this);
+
+ VkPipelineObj pipe(m_device);
+ pipe.AddShader(&vs);
+ pipe.AddShader(&fs);
+ pipe.AddDefaultColorAttachment();
+
+ VkTextureObj texture(m_device, nullptr);
+ VkSamplerObj sampler(m_device);
+
+ VkDescriptorSetObj descriptorSet(m_device);
+ descriptorSet.AppendSamplerTexture(&sampler, &texture);
+ descriptorSet.CreateVKDescriptorSet(m_commandBuffer);
+
+ VkResult err = pipe.CreateVKPipeline(descriptorSet.GetPipelineLayout(), renderPass());
+ ASSERT_VK_SUCCESS(err);
+
+ m_commandBuffer->begin();
+ m_commandBuffer->BeginRenderPass(m_renderPassBeginInfo);
+
+ vkCmdBindPipeline(m_commandBuffer->handle(), VK_PIPELINE_BIND_POINT_GRAPHICS, pipe.handle());
+ m_commandBuffer->BindDescriptorSet(descriptorSet);
+
+ VkViewport viewport = {0, 0, 16, 16, 0, 1};
+ vkCmdSetViewport(m_commandBuffer->handle(), 0, 1, &viewport);
+ VkRect2D scissor = {{0, 0}, {16, 16}};
+ vkCmdSetScissor(m_commandBuffer->handle(), 0, 1, &scissor);
+
+ // error produced here.
+ vkCmdDraw(m_commandBuffer->handle(), 3, 1, 0, 0);
+
+ m_errorMonitor->VerifyFound();
+
+ m_commandBuffer->EndRenderPass();
+ m_commandBuffer->end();
+}
+
+TEST_F(VkLayerTest, DrawTimeImageMultisampleMismatchWithPipeline) {
+ TEST_DESCRIPTION(
+ "Test that an error is produced when a multisampled images are consumed via singlesample images types in the shader, or "
+ "vice versa.");
+
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "requires bound image to have multiple samples");
+
+ ASSERT_NO_FATAL_FAILURE(Init());
+ ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
+
+ char const *vsSource =
+ "#version 450\n"
+ "\n"
+ "void main() { gl_Position = vec4(0); }\n";
+ char const *fsSource =
+ "#version 450\n"
+ "\n"
+ "layout(set=0, binding=0) uniform sampler2DMS s;\n"
+ "layout(location=0) out vec4 color;\n"
+ "void main() {\n"
+ " color = texelFetch(s, ivec2(0), 0);\n"
+ "}\n";
+ VkShaderObj vs(m_device, vsSource, VK_SHADER_STAGE_VERTEX_BIT, this);
+ VkShaderObj fs(m_device, fsSource, VK_SHADER_STAGE_FRAGMENT_BIT, this);
+
+ VkPipelineObj pipe(m_device);
+ pipe.AddShader(&vs);
+ pipe.AddShader(&fs);
+ pipe.AddDefaultColorAttachment();
+
+ VkTextureObj texture(m_device, nullptr); // THIS LINE CAUSES CRASH ON MALI
+ VkSamplerObj sampler(m_device);
+
+ VkDescriptorSetObj descriptorSet(m_device);
+ descriptorSet.AppendSamplerTexture(&sampler, &texture);
+ descriptorSet.CreateVKDescriptorSet(m_commandBuffer);
+
+ VkResult err = pipe.CreateVKPipeline(descriptorSet.GetPipelineLayout(), renderPass());
+ ASSERT_VK_SUCCESS(err);
+
+ m_commandBuffer->begin();
+ m_commandBuffer->BeginRenderPass(m_renderPassBeginInfo);
+
+ vkCmdBindPipeline(m_commandBuffer->handle(), VK_PIPELINE_BIND_POINT_GRAPHICS, pipe.handle());
+ m_commandBuffer->BindDescriptorSet(descriptorSet);
+
+ VkViewport viewport = {0, 0, 16, 16, 0, 1};
+ vkCmdSetViewport(m_commandBuffer->handle(), 0, 1, &viewport);
+ VkRect2D scissor = {{0, 0}, {16, 16}};
+ vkCmdSetScissor(m_commandBuffer->handle(), 0, 1, &scissor);
+
+ // error produced here.
+ vkCmdDraw(m_commandBuffer->handle(), 3, 1, 0, 0);
+
+ m_errorMonitor->VerifyFound();
+
+ m_commandBuffer->EndRenderPass();
+ m_commandBuffer->end();
+}
+
+TEST_F(VkLayerTest, DrawTimeImageComponentTypeMismatchWithPipeline) {
+ TEST_DESCRIPTION(
+ "Test that an error is produced when the component type of an imageview disagrees with the type in the shader.");
+
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "SINT component type, but bound descriptor");
+
+ ASSERT_NO_FATAL_FAILURE(Init());
+ ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
+
+ char const *vsSource =
+ "#version 450\n"
+ "\n"
+ "void main() { gl_Position = vec4(0); }\n";
+ char const *fsSource =
+ "#version 450\n"
+ "\n"
+ "layout(set=0, binding=0) uniform isampler2D s;\n"
+ "layout(location=0) out vec4 color;\n"
+ "void main() {\n"
+ " color = texelFetch(s, ivec2(0), 0);\n"
+ "}\n";
+ VkShaderObj vs(m_device, vsSource, VK_SHADER_STAGE_VERTEX_BIT, this);
+ VkShaderObj fs(m_device, fsSource, VK_SHADER_STAGE_FRAGMENT_BIT, this);
+
+ VkPipelineObj pipe(m_device);
+ pipe.AddShader(&vs);
+ pipe.AddShader(&fs);
+ pipe.AddDefaultColorAttachment();
+
+ VkTextureObj texture(m_device, nullptr); // UNORM texture by default, incompatible with isampler2D
+ VkSamplerObj sampler(m_device);
+
+ VkDescriptorSetObj descriptorSet(m_device);
+ descriptorSet.AppendSamplerTexture(&sampler, &texture);
+ descriptorSet.CreateVKDescriptorSet(m_commandBuffer);
+
+ VkResult err = pipe.CreateVKPipeline(descriptorSet.GetPipelineLayout(), renderPass());
+ ASSERT_VK_SUCCESS(err);
+
+ m_commandBuffer->begin();
+ m_commandBuffer->BeginRenderPass(m_renderPassBeginInfo);
+
+ vkCmdBindPipeline(m_commandBuffer->handle(), VK_PIPELINE_BIND_POINT_GRAPHICS, pipe.handle());
+ m_commandBuffer->BindDescriptorSet(descriptorSet);
+
+ VkViewport viewport = {0, 0, 16, 16, 0, 1};
+ vkCmdSetViewport(m_commandBuffer->handle(), 0, 1, &viewport);
+ VkRect2D scissor = {{0, 0}, {16, 16}};
+ vkCmdSetScissor(m_commandBuffer->handle(), 0, 1, &scissor);
+
+ // error produced here.
+ vkCmdDraw(m_commandBuffer->handle(), 3, 1, 0, 0);
+
+ m_errorMonitor->VerifyFound();
+
+ m_commandBuffer->EndRenderPass();
+ m_commandBuffer->end();
+}
+
+TEST_F(VkLayerTest, AttachmentDescriptionUndefinedFormat) {
+ TEST_DESCRIPTION("Create a render pass with an attachment description format set to VK_FORMAT_UNDEFINED");
+
+ ASSERT_NO_FATAL_FAILURE(Init());
+ ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
+
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_WARNING_BIT_EXT, "format is VK_FORMAT_UNDEFINED");
+
+ VkAttachmentReference color_attach = {};
+ color_attach.layout = VK_IMAGE_LAYOUT_GENERAL;
+ color_attach.attachment = 0;
+ VkSubpassDescription subpass = {};
+ subpass.colorAttachmentCount = 1;
+ subpass.pColorAttachments = &color_attach;
+
+ VkRenderPassCreateInfo rpci = {};
+ rpci.subpassCount = 1;
+ rpci.pSubpasses = &subpass;
+ rpci.attachmentCount = 1;
+ VkAttachmentDescription attach_desc = {};
+ attach_desc.format = VK_FORMAT_UNDEFINED;
+ attach_desc.samples = VK_SAMPLE_COUNT_1_BIT;
+ attach_desc.finalLayout = VK_IMAGE_LAYOUT_GENERAL;
+ rpci.pAttachments = &attach_desc;
+ rpci.sType = VK_STRUCTURE_TYPE_RENDER_PASS_CREATE_INFO;
+ VkRenderPass rp;
+ VkResult result = vkCreateRenderPass(m_device->device(), &rpci, NULL, &rp);
+
+ m_errorMonitor->VerifyFound();
+
+ if (result == VK_SUCCESS) {
+ vkDestroyRenderPass(m_device->device(), rp, NULL);
+ }
+}
+
+TEST_F(VkLayerTest, CreateImageViewNoMemoryBoundToImage) {
+ VkResult err;
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ " used with no memory bound. Memory should be bound by calling vkBindImageMemory().");
+
+ ASSERT_NO_FATAL_FAILURE(Init());
+
+ // Create an image and try to create a view with no memory backing the image
+ VkImage image;
+
+ const VkFormat tex_format = VK_FORMAT_B8G8R8A8_UNORM;
+ const int32_t tex_width = 32;
+ const int32_t tex_height = 32;
+
+ VkImageCreateInfo image_create_info = {};
+ image_create_info.sType = VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO;
+ image_create_info.pNext = NULL;
+ image_create_info.imageType = VK_IMAGE_TYPE_2D;
+ image_create_info.format = tex_format;
+ image_create_info.extent.width = tex_width;
+ image_create_info.extent.height = tex_height;
+ image_create_info.extent.depth = 1;
+ image_create_info.mipLevels = 1;
+ image_create_info.arrayLayers = 1;
+ image_create_info.samples = VK_SAMPLE_COUNT_1_BIT;
+ image_create_info.tiling = VK_IMAGE_TILING_OPTIMAL;
+ image_create_info.usage = VK_IMAGE_USAGE_SAMPLED_BIT;
+ image_create_info.flags = 0;
+
+ err = vkCreateImage(m_device->device(), &image_create_info, NULL, &image);
+ ASSERT_VK_SUCCESS(err);
+
+ VkImageViewCreateInfo image_view_create_info = {};
+ image_view_create_info.sType = VK_STRUCTURE_TYPE_IMAGE_VIEW_CREATE_INFO;
+ image_view_create_info.image = image;
+ image_view_create_info.viewType = VK_IMAGE_VIEW_TYPE_2D;
+ image_view_create_info.format = tex_format;
+ image_view_create_info.subresourceRange.layerCount = 1;
+ image_view_create_info.subresourceRange.baseMipLevel = 0;
+ image_view_create_info.subresourceRange.levelCount = 1;
+ image_view_create_info.subresourceRange.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
+
+ VkImageView view;
+ err = vkCreateImageView(m_device->device(), &image_view_create_info, NULL, &view);
+
+ m_errorMonitor->VerifyFound();
+ vkDestroyImage(m_device->device(), image, NULL);
+ // If last error is success, it still created the view, so delete it.
+ if (err == VK_SUCCESS) {
+ vkDestroyImageView(m_device->device(), view, NULL);
+ }
+}
+
+TEST_F(VkLayerTest, InvalidImageViewAspect) {
+ TEST_DESCRIPTION("Create an image and try to create a view with an invalid aspectMask");
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkImageSubresource-aspectMask-parameter");
+
+ ASSERT_NO_FATAL_FAILURE(Init());
+
+ const VkFormat tex_format = VK_FORMAT_B8G8R8A8_UNORM;
+ VkImageObj image(m_device);
+ image.Init(32, 32, 1, tex_format, VK_IMAGE_USAGE_SAMPLED_BIT, VK_IMAGE_TILING_LINEAR, 0);
+ ASSERT_TRUE(image.initialized());
+
+ VkImageViewCreateInfo image_view_create_info = {};
+ image_view_create_info.sType = VK_STRUCTURE_TYPE_IMAGE_VIEW_CREATE_INFO;
+ image_view_create_info.image = image.handle();
+ image_view_create_info.viewType = VK_IMAGE_VIEW_TYPE_2D;
+ image_view_create_info.format = tex_format;
+ image_view_create_info.subresourceRange.baseMipLevel = 0;
+ image_view_create_info.subresourceRange.levelCount = 1;
+ image_view_create_info.subresourceRange.layerCount = 1;
+ // Cause an error by setting an invalid image aspect
+ image_view_create_info.subresourceRange.aspectMask = VK_IMAGE_ASPECT_METADATA_BIT;
+
+ VkImageView view;
+ vkCreateImageView(m_device->device(), &image_view_create_info, NULL, &view);
+
+ m_errorMonitor->VerifyFound();
+}
+
+TEST_F(VkLayerTest, ExerciseGetImageSubresourceLayout) {
+ TEST_DESCRIPTION("Test vkGetImageSubresourceLayout() valid usages");
+
+ ASSERT_NO_FATAL_FAILURE(Init());
+ VkSubresourceLayout subres_layout = {};
+
+ // VU 00732: image must have been created with tiling equal to VK_IMAGE_TILING_LINEAR
+ {
+ const VkImageTiling tiling = VK_IMAGE_TILING_OPTIMAL; // ERROR: violates VU 00732
+ VkImageObj img(m_device);
+ img.InitNoLayout(32, 32, 1, VK_FORMAT_B8G8R8A8_UNORM, VK_IMAGE_USAGE_TRANSFER_SRC_BIT, tiling);
+ ASSERT_TRUE(img.initialized());
+
+ VkImageSubresource subres = {};
+ subres.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
+ subres.mipLevel = 0;
+ subres.arrayLayer = 0;
+
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkGetImageSubresourceLayout-image-00996");
+ vkGetImageSubresourceLayout(m_device->device(), img.image(), &subres, &subres_layout);
+ m_errorMonitor->VerifyFound();
+ }
+
+ // VU 00733: The aspectMask member of pSubresource must only have a single bit set
+ {
+ VkImageObj img(m_device);
+ img.InitNoLayout(32, 32, 1, VK_FORMAT_B8G8R8A8_UNORM, VK_IMAGE_USAGE_TRANSFER_SRC_BIT);
+ ASSERT_TRUE(img.initialized());
+
+ VkImageSubresource subres = {};
+ subres.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT | VK_IMAGE_ASPECT_METADATA_BIT; // ERROR: triggers VU 00733
+ subres.mipLevel = 0;
+ subres.arrayLayer = 0;
+
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkGetImageSubresourceLayout-aspectMask-00997");
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkImageSubresource-aspectMask-parameter");
+ vkGetImageSubresourceLayout(m_device->device(), img.image(), &subres, &subres_layout);
+ m_errorMonitor->VerifyFound();
+ }
+
+ // 00739 mipLevel must be less than the mipLevels specified in VkImageCreateInfo when the image was created
+ {
+ VkImageObj img(m_device);
+ img.InitNoLayout(32, 32, 1, VK_FORMAT_B8G8R8A8_UNORM, VK_IMAGE_USAGE_TRANSFER_SRC_BIT);
+ ASSERT_TRUE(img.initialized());
+
+ VkImageSubresource subres = {};
+ subres.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
+ subres.mipLevel = 1; // ERROR: triggers VU 00739
+ subres.arrayLayer = 0;
+
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkGetImageSubresourceLayout-mipLevel-01716");
+ vkGetImageSubresourceLayout(m_device->device(), img.image(), &subres, &subres_layout);
+ m_errorMonitor->VerifyFound();
+ }
+
+ // 00740 arrayLayer must be less than the arrayLayers specified in VkImageCreateInfo when the image was created
+ {
+ VkImageObj img(m_device);
+ img.InitNoLayout(32, 32, 1, VK_FORMAT_B8G8R8A8_UNORM, VK_IMAGE_USAGE_TRANSFER_SRC_BIT);
+ ASSERT_TRUE(img.initialized());
+
+ VkImageSubresource subres = {};
+ subres.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
+ subres.mipLevel = 0;
+ subres.arrayLayer = 1; // ERROR: triggers VU 00740
+
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkGetImageSubresourceLayout-arrayLayer-01717");
+ vkGetImageSubresourceLayout(m_device->device(), img.image(), &subres, &subres_layout);
+ m_errorMonitor->VerifyFound();
+ }
+}
+
+TEST_F(VkLayerTest, CopyImageLayerCountMismatch) {
+ TEST_DESCRIPTION(
+ "Try to copy between images with the source subresource having a different layerCount than the destination subresource");
+ ASSERT_NO_FATAL_FAILURE(Init());
+
+ // Create two images to copy between
+ VkImageObj src_image_obj(m_device);
+ VkImageObj dst_image_obj(m_device);
+
+ VkImageCreateInfo image_create_info = {};
+ image_create_info.sType = VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO;
+ image_create_info.pNext = NULL;
+ image_create_info.imageType = VK_IMAGE_TYPE_2D;
+ image_create_info.format = VK_FORMAT_B8G8R8A8_UNORM;
+ image_create_info.extent.width = 32;
+ image_create_info.extent.height = 32;
+ image_create_info.extent.depth = 1;
+ image_create_info.mipLevels = 1;
+ image_create_info.arrayLayers = 4;
+ image_create_info.samples = VK_SAMPLE_COUNT_1_BIT;
+ image_create_info.tiling = VK_IMAGE_TILING_OPTIMAL;
+ image_create_info.usage = VK_IMAGE_USAGE_TRANSFER_SRC_BIT;
+ image_create_info.flags = 0;
+
+ src_image_obj.init(&image_create_info);
+ ASSERT_TRUE(src_image_obj.initialized());
+
+ image_create_info.usage = VK_IMAGE_USAGE_TRANSFER_DST_BIT;
+ dst_image_obj.init(&image_create_info);
+ ASSERT_TRUE(dst_image_obj.initialized());
+
+ m_commandBuffer->begin();
+ VkImageCopy copyRegion;
+ copyRegion.srcSubresource.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
+ copyRegion.srcSubresource.mipLevel = 0;
+ copyRegion.srcSubresource.baseArrayLayer = 0;
+ copyRegion.srcSubresource.layerCount = 1;
+ copyRegion.srcOffset.x = 0;
+ copyRegion.srcOffset.y = 0;
+ copyRegion.srcOffset.z = 0;
+ copyRegion.dstSubresource.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
+ copyRegion.dstSubresource.mipLevel = 0;
+ copyRegion.dstSubresource.baseArrayLayer = 0;
+ // Introduce failure by forcing the dst layerCount to differ from src
+ copyRegion.dstSubresource.layerCount = 3;
+ copyRegion.dstOffset.x = 0;
+ copyRegion.dstOffset.y = 0;
+ copyRegion.dstOffset.z = 0;
+ copyRegion.extent.width = 1;
+ copyRegion.extent.height = 1;
+ copyRegion.extent.depth = 1;
+
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkImageCopy-extent-00140");
+ m_commandBuffer->CopyImage(src_image_obj.image(), VK_IMAGE_LAYOUT_GENERAL, dst_image_obj.image(), VK_IMAGE_LAYOUT_GENERAL, 1,
+ &copyRegion);
+ m_errorMonitor->VerifyFound();
+}
+
+TEST_F(VkLayerTest, ImageLayerUnsupportedFormat) {
+ TEST_DESCRIPTION("Creating images with unsupported formats ");
+
+ ASSERT_NO_FATAL_FAILURE(Init());
+ ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
+
+ // Create image with unsupported format - Expect FORMAT_UNSUPPORTED
+ VkImageCreateInfo image_create_info = {};
+ image_create_info.sType = VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO;
+ image_create_info.imageType = VK_IMAGE_TYPE_2D;
+ image_create_info.format = VK_FORMAT_UNDEFINED;
+ image_create_info.extent.width = 32;
+ image_create_info.extent.height = 32;
+ image_create_info.extent.depth = 1;
+ image_create_info.mipLevels = 1;
+ image_create_info.arrayLayers = 1;
+ image_create_info.samples = VK_SAMPLE_COUNT_1_BIT;
+ image_create_info.tiling = VK_IMAGE_TILING_OPTIMAL;
+ image_create_info.usage = VK_IMAGE_USAGE_TRANSFER_SRC_BIT;
+
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkImageCreateInfo-format-00943");
+
+ VkImage image;
+ vkCreateImage(m_device->handle(), &image_create_info, NULL, &image);
+ m_errorMonitor->VerifyFound();
+}
+
+TEST_F(VkLayerTest, CreateImageViewFormatMismatchUnrelated) {
+ TEST_DESCRIPTION("Create an image with a color format, then try to create a depth view of it");
+
+ if (!EnableDeviceProfileLayer()) {
+ printf("%s Failed to enable device profile layer.\n", kSkipPrefix);
+ return;
+ }
+
+ ASSERT_NO_FATAL_FAILURE(InitFramework(myDbgFunc, m_errorMonitor));
+ ASSERT_NO_FATAL_FAILURE(InitState());
+
+ // Load required functions
+ PFN_vkSetPhysicalDeviceFormatPropertiesEXT fpvkSetPhysicalDeviceFormatPropertiesEXT =
+ (PFN_vkSetPhysicalDeviceFormatPropertiesEXT)vkGetInstanceProcAddr(instance(), "vkSetPhysicalDeviceFormatPropertiesEXT");
+ PFN_vkGetOriginalPhysicalDeviceFormatPropertiesEXT fpvkGetOriginalPhysicalDeviceFormatPropertiesEXT =
+ (PFN_vkGetOriginalPhysicalDeviceFormatPropertiesEXT)vkGetInstanceProcAddr(instance(),
+ "vkGetOriginalPhysicalDeviceFormatPropertiesEXT");
+
+ if (!(fpvkSetPhysicalDeviceFormatPropertiesEXT) || !(fpvkGetOriginalPhysicalDeviceFormatPropertiesEXT)) {
+ printf("%s Can't find device_profile_api functions; skipped.\n", kSkipPrefix);
+ return;
+ }
+
+ auto depth_format = FindSupportedDepthStencilFormat(gpu());
+ if (!depth_format) {
+ printf("%s Couldn't find depth stencil image format.\n", kSkipPrefix);
+ return;
+ }
+
+ VkFormatProperties formatProps;
+
+ fpvkGetOriginalPhysicalDeviceFormatPropertiesEXT(gpu(), depth_format, &formatProps);
+ formatProps.optimalTilingFeatures |= VK_FORMAT_FEATURE_COLOR_ATTACHMENT_BIT;
+ fpvkSetPhysicalDeviceFormatPropertiesEXT(gpu(), depth_format, formatProps);
+
+ VkImageObj image(m_device);
+ image.Init(128, 128, 1, VK_FORMAT_B8G8R8A8_UNORM, VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT, VK_IMAGE_TILING_OPTIMAL, 0);
+ ASSERT_TRUE(image.initialized());
+
+ VkImageView imgView;
+ VkImageViewCreateInfo imgViewInfo = {};
+ imgViewInfo.sType = VK_STRUCTURE_TYPE_IMAGE_VIEW_CREATE_INFO;
+ imgViewInfo.image = image.handle();
+ imgViewInfo.viewType = VK_IMAGE_VIEW_TYPE_2D;
+ imgViewInfo.format = depth_format;
+ imgViewInfo.subresourceRange.layerCount = 1;
+ imgViewInfo.subresourceRange.baseMipLevel = 0;
+ imgViewInfo.subresourceRange.levelCount = 1;
+ imgViewInfo.subresourceRange.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
+
+ // Can't use depth format for view into color image - Expect INVALID_FORMAT
+ m_errorMonitor->SetDesiredFailureMsg(
+ VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ "Formats MUST be IDENTICAL unless VK_IMAGE_CREATE_MUTABLE_FORMAT BIT was set on image creation.");
+ vkCreateImageView(m_device->handle(), &imgViewInfo, NULL, &imgView);
+ m_errorMonitor->VerifyFound();
+}
+
+TEST_F(VkLayerTest, CreateImageViewNoMutableFormatBit) {
+ TEST_DESCRIPTION("Create an image view with a different format, when the image does not have MUTABLE_FORMAT bit");
+
+ if (!EnableDeviceProfileLayer()) {
+ printf("%s Couldn't enable device profile layer.\n", kSkipPrefix);
+ return;
+ }
+
+ ASSERT_NO_FATAL_FAILURE(InitFramework(myDbgFunc, m_errorMonitor));
+ ASSERT_NO_FATAL_FAILURE(InitState());
+
+ PFN_vkSetPhysicalDeviceFormatPropertiesEXT fpvkSetPhysicalDeviceFormatPropertiesEXT = nullptr;
+ PFN_vkGetOriginalPhysicalDeviceFormatPropertiesEXT fpvkGetOriginalPhysicalDeviceFormatPropertiesEXT = nullptr;
+
+ // Load required functions
+ if (!LoadDeviceProfileLayer(fpvkSetPhysicalDeviceFormatPropertiesEXT, fpvkGetOriginalPhysicalDeviceFormatPropertiesEXT)) {
+ printf("%s Required extensions are not present.\n", kSkipPrefix);
+ return;
+ }
+
+ VkImageObj image(m_device);
+ image.Init(128, 128, 1, VK_FORMAT_B8G8R8A8_UNORM, VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT, VK_IMAGE_TILING_OPTIMAL, 0);
+ ASSERT_TRUE(image.initialized());
+
+ VkFormatProperties formatProps;
+
+ fpvkGetOriginalPhysicalDeviceFormatPropertiesEXT(gpu(), VK_FORMAT_B8G8R8A8_UINT, &formatProps);
+ formatProps.optimalTilingFeatures |= VK_FORMAT_FEATURE_COLOR_ATTACHMENT_BIT;
+ fpvkSetPhysicalDeviceFormatPropertiesEXT(gpu(), VK_FORMAT_B8G8R8A8_UINT, formatProps);
+
+ VkImageView imgView;
+ VkImageViewCreateInfo imgViewInfo = {};
+ imgViewInfo.sType = VK_STRUCTURE_TYPE_IMAGE_VIEW_CREATE_INFO;
+ imgViewInfo.image = image.handle();
+ imgViewInfo.viewType = VK_IMAGE_VIEW_TYPE_2D;
+ imgViewInfo.format = VK_FORMAT_B8G8R8A8_UINT;
+ imgViewInfo.subresourceRange.layerCount = 1;
+ imgViewInfo.subresourceRange.baseMipLevel = 0;
+ imgViewInfo.subresourceRange.levelCount = 1;
+ imgViewInfo.subresourceRange.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
+
+ // Same compatibility class but no MUTABLE_FORMAT bit - Expect
+ // VIEW_CREATE_ERROR
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkImageViewCreateInfo-image-01019");
+ vkCreateImageView(m_device->handle(), &imgViewInfo, NULL, &imgView);
+ m_errorMonitor->VerifyFound();
+}
+
+TEST_F(VkLayerTest, CreateImageViewDifferentClass) {
+ TEST_DESCRIPTION("Passing bad parameters to CreateImageView");
+
+ ASSERT_NO_FATAL_FAILURE(Init());
+
+ if (!(m_device->format_properties(VK_FORMAT_R8_UINT).optimalTilingFeatures & VK_FORMAT_FEATURE_COLOR_ATTACHMENT_BIT)) {
+ printf("%s Device does not support R8_UINT as color attachment; skipped", kSkipPrefix);
+ return;
+ }
+
+ VkImageCreateInfo mutImgInfo = {VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO,
+ nullptr,
+ VK_IMAGE_CREATE_MUTABLE_FORMAT_BIT,
+ VK_IMAGE_TYPE_2D,
+ VK_FORMAT_R8_UINT,
+ {128, 128, 1},
+ 1,
+ 1,
+ VK_SAMPLE_COUNT_1_BIT,
+ VK_IMAGE_TILING_OPTIMAL,
+ VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT,
+ VK_SHARING_MODE_EXCLUSIVE,
+ 0,
+ nullptr,
+ VK_IMAGE_LAYOUT_UNDEFINED};
+ VkImageObj mutImage(m_device);
+ mutImage.init(&mutImgInfo);
+ ASSERT_TRUE(mutImage.initialized());
+
+ VkImageView imgView;
+ VkImageViewCreateInfo imgViewInfo = {};
+ imgViewInfo.sType = VK_STRUCTURE_TYPE_IMAGE_VIEW_CREATE_INFO;
+ imgViewInfo.viewType = VK_IMAGE_VIEW_TYPE_2D;
+ imgViewInfo.format = VK_FORMAT_B8G8R8A8_UNORM;
+ imgViewInfo.subresourceRange.layerCount = 1;
+ imgViewInfo.subresourceRange.baseMipLevel = 0;
+ imgViewInfo.subresourceRange.levelCount = 1;
+ imgViewInfo.subresourceRange.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
+ imgViewInfo.image = mutImage.handle();
+
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkImageViewCreateInfo-image-01018");
+ vkCreateImageView(m_device->handle(), &imgViewInfo, NULL, &imgView);
+ m_errorMonitor->VerifyFound();
+}
+
+TEST_F(VkLayerTest, MultiplaneIncompatibleViewFormat) {
+ TEST_DESCRIPTION("Postive/negative tests of multiplane imageview format compatibility");
+
+ // Enable KHR multiplane req'd extensions
+ bool mp_extensions = InstanceExtensionSupported(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME,
+ VK_KHR_GET_MEMORY_REQUIREMENTS_2_SPEC_VERSION);
+ if (mp_extensions) {
+ m_instance_extension_names.push_back(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME);
+ }
+ ASSERT_NO_FATAL_FAILURE(InitFramework(myDbgFunc, m_errorMonitor));
+ mp_extensions = mp_extensions && DeviceExtensionSupported(gpu(), nullptr, VK_KHR_MAINTENANCE1_EXTENSION_NAME);
+ mp_extensions = mp_extensions && DeviceExtensionSupported(gpu(), nullptr, VK_KHR_GET_MEMORY_REQUIREMENTS_2_EXTENSION_NAME);
+ mp_extensions = mp_extensions && DeviceExtensionSupported(gpu(), nullptr, VK_KHR_BIND_MEMORY_2_EXTENSION_NAME);
+ mp_extensions = mp_extensions && DeviceExtensionSupported(gpu(), nullptr, VK_KHR_SAMPLER_YCBCR_CONVERSION_EXTENSION_NAME);
+ if (mp_extensions) {
+ m_device_extension_names.push_back(VK_KHR_MAINTENANCE1_EXTENSION_NAME);
+ m_device_extension_names.push_back(VK_KHR_GET_MEMORY_REQUIREMENTS_2_EXTENSION_NAME);
+ m_device_extension_names.push_back(VK_KHR_BIND_MEMORY_2_EXTENSION_NAME);
+ m_device_extension_names.push_back(VK_KHR_SAMPLER_YCBCR_CONVERSION_EXTENSION_NAME);
+ } else {
+ printf("%s test requires KHR multiplane extensions, not available. Skipping.\n", kSkipPrefix);
+ return;
+ }
+ ASSERT_NO_FATAL_FAILURE(InitState());
+
+ VkImageCreateInfo ci = {};
+ ci.sType = VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO;
+ ci.pNext = NULL;
+ ci.flags = VK_IMAGE_CREATE_MUTABLE_FORMAT_BIT;
+ ci.imageType = VK_IMAGE_TYPE_2D;
+ ci.format = VK_FORMAT_G8_B8_R8_3PLANE_420_UNORM;
+ ci.tiling = VK_IMAGE_TILING_OPTIMAL;
+ ci.usage = VK_IMAGE_USAGE_SAMPLED_BIT;
+ ci.extent = {128, 128, 1};
+ ci.mipLevels = 1;
+ ci.arrayLayers = 1;
+ ci.samples = VK_SAMPLE_COUNT_1_BIT;
+ ci.sharingMode = VK_SHARING_MODE_EXCLUSIVE;
+ ci.initialLayout = VK_IMAGE_LAYOUT_UNDEFINED;
+
+ // Verify format
+ VkFormatFeatureFlags features = VK_FORMAT_FEATURE_SAMPLED_IMAGE_BIT;
+ bool supported = ImageFormatAndFeaturesSupported(instance(), gpu(), ci, features);
+ if (!supported) {
+ printf("%s Multiplane image format not supported. Skipping test.\n", kSkipPrefix);
+ return;
+ }
+
+ VkImageObj image_obj(m_device);
+ image_obj.init(&ci);
+ ASSERT_TRUE(image_obj.initialized());
+
+ VkImageViewCreateInfo ivci = {};
+ ivci.sType = VK_STRUCTURE_TYPE_IMAGE_VIEW_CREATE_INFO;
+ ivci.image = image_obj.image();
+ ivci.viewType = VK_IMAGE_VIEW_TYPE_2D;
+ ivci.format = VK_FORMAT_R8_SNORM; // Compat is VK_FORMAT_R8_UNORM
+ ivci.subresourceRange.layerCount = 1;
+ ivci.subresourceRange.baseMipLevel = 0;
+ ivci.subresourceRange.levelCount = 1;
+ ivci.subresourceRange.aspectMask = VK_IMAGE_ASPECT_PLANE_1_BIT;
+
+ // Incompatible format error
+ VkImageView imageView = VK_NULL_HANDLE;
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkImageViewCreateInfo-image-01586");
+ vkCreateImageView(m_device->device(), &ivci, NULL, &imageView);
+ m_errorMonitor->VerifyFound();
+ vkDestroyImageView(m_device->device(), imageView, NULL); // VK_NULL_HANDLE allowed
+ imageView = VK_NULL_HANDLE;
+
+ // Correct format succeeds
+ ivci.format = VK_FORMAT_R8_UNORM;
+ m_errorMonitor->ExpectSuccess();
+ vkCreateImageView(m_device->device(), &ivci, NULL, &imageView);
+ m_errorMonitor->VerifyNotFound();
+ vkDestroyImageView(m_device->device(), imageView, NULL); // VK_NULL_HANDLE allowed
+ imageView = VK_NULL_HANDLE;
+
+ // Try a multiplane imageview
+ ivci.format = VK_FORMAT_G8_B8_R8_3PLANE_420_UNORM;
+ ivci.subresourceRange.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
+ m_errorMonitor->ExpectSuccess();
+ vkCreateImageView(m_device->device(), &ivci, NULL, &imageView);
+ m_errorMonitor->VerifyNotFound();
+ vkDestroyImageView(m_device->device(), imageView, NULL); // VK_NULL_HANDLE allowed
+}
+
+TEST_F(VkLayerTest, CreateImageViewInvalidSubresourceRange) {
+ TEST_DESCRIPTION("Passing bad image subrange to CreateImageView");
+
+ ASSERT_NO_FATAL_FAILURE(Init());
+
+ VkImageObj image(m_device);
+ image.Init(32, 32, 1, VK_FORMAT_B8G8R8A8_UNORM, VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT, VK_IMAGE_TILING_OPTIMAL);
+ ASSERT_TRUE(image.create_info().arrayLayers == 1);
+ ASSERT_TRUE(image.initialized());
+
+ VkImageView img_view;
+ VkImageViewCreateInfo img_view_info_template = {};
+ img_view_info_template.sType = VK_STRUCTURE_TYPE_IMAGE_VIEW_CREATE_INFO;
+ img_view_info_template.image = image.handle();
+ img_view_info_template.viewType = VK_IMAGE_VIEW_TYPE_2D_ARRAY;
+ img_view_info_template.format = image.format();
+ // subresourceRange to be filled later for the purposes of this test
+ img_view_info_template.subresourceRange.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
+ img_view_info_template.subresourceRange.baseMipLevel = 0;
+ img_view_info_template.subresourceRange.levelCount = 0;
+ img_view_info_template.subresourceRange.baseArrayLayer = 0;
+ img_view_info_template.subresourceRange.layerCount = 0;
+
+ // Try baseMipLevel >= image.mipLevels with VK_REMAINING_MIP_LEVELS
+ {
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkImageViewCreateInfo-subresourceRange-01478");
+ const VkImageSubresourceRange range = {VK_IMAGE_ASPECT_COLOR_BIT, 1, VK_REMAINING_MIP_LEVELS, 0, 1};
+ VkImageViewCreateInfo img_view_info = img_view_info_template;
+ img_view_info.subresourceRange = range;
+ vkCreateImageView(m_device->handle(), &img_view_info, nullptr, &img_view);
+ m_errorMonitor->VerifyFound();
+ }
+
+ // Try baseMipLevel >= image.mipLevels without VK_REMAINING_MIP_LEVELS
+ {
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkImageViewCreateInfo-subresourceRange-01478");
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkImageViewCreateInfo-subresourceRange-01718");
+ const VkImageSubresourceRange range = {VK_IMAGE_ASPECT_COLOR_BIT, 1, 1, 0, 1};
+ VkImageViewCreateInfo img_view_info = img_view_info_template;
+ img_view_info.subresourceRange = range;
+ vkCreateImageView(m_device->handle(), &img_view_info, nullptr, &img_view);
+ m_errorMonitor->VerifyFound();
+ }
+
+ // Try levelCount = 0
+ {
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkImageViewCreateInfo-subresourceRange-01718");
+ const VkImageSubresourceRange range = {VK_IMAGE_ASPECT_COLOR_BIT, 0, 0, 0, 1};
+ VkImageViewCreateInfo img_view_info = img_view_info_template;
+ img_view_info.subresourceRange = range;
+ vkCreateImageView(m_device->handle(), &img_view_info, nullptr, &img_view);
+ m_errorMonitor->VerifyFound();
+ }
+
+ // Try baseMipLevel + levelCount > image.mipLevels
+ {
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkImageViewCreateInfo-subresourceRange-01718");
+ const VkImageSubresourceRange range = {VK_IMAGE_ASPECT_COLOR_BIT, 0, 2, 0, 1};
+ VkImageViewCreateInfo img_view_info = img_view_info_template;
+ img_view_info.subresourceRange = range;
+ vkCreateImageView(m_device->handle(), &img_view_info, nullptr, &img_view);
+ m_errorMonitor->VerifyFound();
+ }
+
+ // These tests rely on having the Maintenance1 extension not being enabled, and are invalid on all but version 1.0
+ if (m_device->props.apiVersion < VK_API_VERSION_1_1) {
+ // Try baseArrayLayer >= image.arrayLayers with VK_REMAINING_ARRAY_LAYERS
+ {
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ "VUID-VkImageViewCreateInfo-subresourceRange-01480");
+ const VkImageSubresourceRange range = {VK_IMAGE_ASPECT_COLOR_BIT, 0, 1, 1, VK_REMAINING_ARRAY_LAYERS};
+ VkImageViewCreateInfo img_view_info = img_view_info_template;
+ img_view_info.subresourceRange = range;
+ vkCreateImageView(m_device->handle(), &img_view_info, nullptr, &img_view);
+ m_errorMonitor->VerifyFound();
+ }
+
+ // Try baseArrayLayer >= image.arrayLayers without VK_REMAINING_ARRAY_LAYERS
+ {
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ "VUID-VkImageViewCreateInfo-subresourceRange-01480");
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ "VUID-VkImageViewCreateInfo-subresourceRange-01719");
+ const VkImageSubresourceRange range = {VK_IMAGE_ASPECT_COLOR_BIT, 0, 1, 1, 1};
+ VkImageViewCreateInfo img_view_info = img_view_info_template;
+ img_view_info.subresourceRange = range;
+ vkCreateImageView(m_device->handle(), &img_view_info, nullptr, &img_view);
+ m_errorMonitor->VerifyFound();
+ }
+
+ // Try layerCount = 0
+ {
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ "VUID-VkImageViewCreateInfo-subresourceRange-01719");
+ const VkImageSubresourceRange range = {VK_IMAGE_ASPECT_COLOR_BIT, 0, 1, 0, 0};
+ VkImageViewCreateInfo img_view_info = img_view_info_template;
+ img_view_info.subresourceRange = range;
+ vkCreateImageView(m_device->handle(), &img_view_info, nullptr, &img_view);
+ m_errorMonitor->VerifyFound();
+ }
+
+ // Try baseArrayLayer + layerCount > image.arrayLayers
+ {
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ "VUID-VkImageViewCreateInfo-subresourceRange-01719");
+ const VkImageSubresourceRange range = {VK_IMAGE_ASPECT_COLOR_BIT, 0, 1, 0, 2};
+ VkImageViewCreateInfo img_view_info = img_view_info_template;
+ img_view_info.subresourceRange = range;
+ vkCreateImageView(m_device->handle(), &img_view_info, nullptr, &img_view);
+ m_errorMonitor->VerifyFound();
+ }
+ }
+}
+
+TEST_F(VkLayerTest, CompressedImageMipCopyTests) {
+ TEST_DESCRIPTION("Image/Buffer copies for higher mip levels");
+
+ ASSERT_NO_FATAL_FAILURE(Init());
+
+ VkPhysicalDeviceFeatures device_features = {};
+ ASSERT_NO_FATAL_FAILURE(GetPhysicalDeviceFeatures(&device_features));
+ VkFormat compressed_format = VK_FORMAT_UNDEFINED;
+ if (device_features.textureCompressionBC) {
+ compressed_format = VK_FORMAT_BC3_SRGB_BLOCK;
+ } else if (device_features.textureCompressionETC2) {
+ compressed_format = VK_FORMAT_ETC2_R8G8B8A8_UNORM_BLOCK;
+ } else if (device_features.textureCompressionASTC_LDR) {
+ compressed_format = VK_FORMAT_ASTC_4x4_UNORM_BLOCK;
+ } else {
+ printf("%s No compressed formats supported - CompressedImageMipCopyTests skipped.\n", kSkipPrefix);
+ return;
+ }
+
+ VkImageCreateInfo ci;
+ ci.sType = VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO;
+ ci.pNext = NULL;
+ ci.flags = 0;
+ ci.imageType = VK_IMAGE_TYPE_2D;
+ ci.format = compressed_format;
+ ci.extent = {32, 32, 1};
+ ci.mipLevels = 6;
+ ci.arrayLayers = 1;
+ ci.samples = VK_SAMPLE_COUNT_1_BIT;
+ ci.tiling = VK_IMAGE_TILING_OPTIMAL;
+ ci.usage = VK_IMAGE_USAGE_TRANSFER_SRC_BIT | VK_IMAGE_USAGE_TRANSFER_DST_BIT;
+ ci.sharingMode = VK_SHARING_MODE_EXCLUSIVE;
+ ci.queueFamilyIndexCount = 0;
+ ci.pQueueFamilyIndices = NULL;
+ ci.initialLayout = VK_IMAGE_LAYOUT_UNDEFINED;
+
+ VkImageObj image(m_device);
+ image.init(&ci);
+ ASSERT_TRUE(image.initialized());
+
+ VkImageObj odd_image(m_device);
+ ci.extent = {31, 32, 1}; // Mips are [31,32] [15,16] [7,8] [3,4], [1,2] [1,1]
+ odd_image.init(&ci);
+ ASSERT_TRUE(odd_image.initialized());
+
+ // Allocate buffers
+ VkMemoryPropertyFlags reqs = 0;
+ VkBufferObj buffer_1024, buffer_64, buffer_16, buffer_8;
+ buffer_1024.init_as_src_and_dst(*m_device, 1024, reqs);
+ buffer_64.init_as_src_and_dst(*m_device, 64, reqs);
+ buffer_16.init_as_src_and_dst(*m_device, 16, reqs);
+ buffer_8.init_as_src_and_dst(*m_device, 8, reqs);
+
+ VkBufferImageCopy region = {};
+ region.bufferRowLength = 0;
+ region.bufferImageHeight = 0;
+ region.imageSubresource.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
+ region.imageSubresource.layerCount = 1;
+ region.imageOffset = {0, 0, 0};
+ region.bufferOffset = 0;
+
+ // start recording
+ m_commandBuffer->begin();
+
+ // Mip level copies that work - 5 levels
+ m_errorMonitor->ExpectSuccess();
+
+ // Mip 0 should fit in 1k buffer - 1k texels @ 1b each
+ region.imageExtent = {32, 32, 1};
+ region.imageSubresource.mipLevel = 0;
+ vkCmdCopyImageToBuffer(m_commandBuffer->handle(), image.handle(), VK_IMAGE_LAYOUT_GENERAL, buffer_1024.handle(), 1, &region);
+ vkCmdCopyBufferToImage(m_commandBuffer->handle(), buffer_1024.handle(), image.handle(), VK_IMAGE_LAYOUT_GENERAL, 1, &region);
+
+ // Mip 2 should fit in 64b buffer - 64 texels @ 1b each
+ region.imageExtent = {8, 8, 1};
+ region.imageSubresource.mipLevel = 2;
+ vkCmdCopyImageToBuffer(m_commandBuffer->handle(), image.handle(), VK_IMAGE_LAYOUT_GENERAL, buffer_64.handle(), 1, &region);
+ vkCmdCopyBufferToImage(m_commandBuffer->handle(), buffer_64.handle(), image.handle(), VK_IMAGE_LAYOUT_GENERAL, 1, &region);
+
+ // Mip 3 should fit in 16b buffer - 16 texels @ 1b each
+ region.imageExtent = {4, 4, 1};
+ region.imageSubresource.mipLevel = 3;
+ vkCmdCopyImageToBuffer(m_commandBuffer->handle(), image.handle(), VK_IMAGE_LAYOUT_GENERAL, buffer_16.handle(), 1, &region);
+ vkCmdCopyBufferToImage(m_commandBuffer->handle(), buffer_16.handle(), image.handle(), VK_IMAGE_LAYOUT_GENERAL, 1, &region);
+
+ // Mip 4&5 should fit in 16b buffer with no complaint - 4 & 1 texels @ 1b each
+ region.imageExtent = {2, 2, 1};
+ region.imageSubresource.mipLevel = 4;
+ vkCmdCopyImageToBuffer(m_commandBuffer->handle(), image.handle(), VK_IMAGE_LAYOUT_GENERAL, buffer_16.handle(), 1, &region);
+ vkCmdCopyBufferToImage(m_commandBuffer->handle(), buffer_16.handle(), image.handle(), VK_IMAGE_LAYOUT_GENERAL, 1, &region);
+
+ region.imageExtent = {1, 1, 1};
+ region.imageSubresource.mipLevel = 5;
+ vkCmdCopyImageToBuffer(m_commandBuffer->handle(), image.handle(), VK_IMAGE_LAYOUT_GENERAL, buffer_16.handle(), 1, &region);
+ vkCmdCopyBufferToImage(m_commandBuffer->handle(), buffer_16.handle(), image.handle(), VK_IMAGE_LAYOUT_GENERAL, 1, &region);
+ m_errorMonitor->VerifyNotFound();
+
+ // Buffer must accommodate a full compressed block, regardless of texel count
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdCopyImageToBuffer-pRegions-00183");
+ vkCmdCopyImageToBuffer(m_commandBuffer->handle(), image.handle(), VK_IMAGE_LAYOUT_GENERAL, buffer_8.handle(), 1, &region);
+ m_errorMonitor->VerifyFound();
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdCopyBufferToImage-pRegions-00171");
+ vkCmdCopyBufferToImage(m_commandBuffer->handle(), buffer_8.handle(), image.handle(), VK_IMAGE_LAYOUT_GENERAL, 1, &region);
+ m_errorMonitor->VerifyFound();
+
+ // Copy width < compressed block size, but not the full mip width
+ region.imageExtent = {1, 2, 1};
+ region.imageSubresource.mipLevel = 4;
+ m_errorMonitor->SetDesiredFailureMsg(
+ VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ "VUID-VkBufferImageCopy-imageExtent-00207"); // width not a multiple of compressed block width
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ "VUID-vkCmdCopyImageToBuffer-imageOffset-01794"); // image transfer granularity
+ vkCmdCopyImageToBuffer(m_commandBuffer->handle(), image.handle(), VK_IMAGE_LAYOUT_GENERAL, buffer_16.handle(), 1, &region);
+ m_errorMonitor->VerifyFound();
+ m_errorMonitor->SetDesiredFailureMsg(
+ VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ "VUID-VkBufferImageCopy-imageExtent-00207"); // width not a multiple of compressed block width
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ "VUID-vkCmdCopyBufferToImage-imageOffset-01793"); // image transfer granularity
+ vkCmdCopyBufferToImage(m_commandBuffer->handle(), buffer_16.handle(), image.handle(), VK_IMAGE_LAYOUT_GENERAL, 1, &region);
+ m_errorMonitor->VerifyFound();
+
+ // Copy height < compressed block size but not the full mip height
+ region.imageExtent = {2, 1, 1};
+ m_errorMonitor->SetDesiredFailureMsg(
+ VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ "VUID-VkBufferImageCopy-imageExtent-00208"); // height not a multiple of compressed block width
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ "VUID-vkCmdCopyImageToBuffer-imageOffset-01794"); // image transfer granularity
+ vkCmdCopyImageToBuffer(m_commandBuffer->handle(), image.handle(), VK_IMAGE_LAYOUT_GENERAL, buffer_16.handle(), 1, &region);
+ m_errorMonitor->VerifyFound();
+ m_errorMonitor->SetDesiredFailureMsg(
+ VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ "VUID-VkBufferImageCopy-imageExtent-00208"); // height not a multiple of compressed block width
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ "VUID-vkCmdCopyBufferToImage-imageOffset-01793"); // image transfer granularity
+ vkCmdCopyBufferToImage(m_commandBuffer->handle(), buffer_16.handle(), image.handle(), VK_IMAGE_LAYOUT_GENERAL, 1, &region);
+ m_errorMonitor->VerifyFound();
+
+ // Offsets must be multiple of compressed block size
+ region.imageOffset = {1, 1, 0};
+ region.imageExtent = {1, 1, 1};
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ "VUID-VkBufferImageCopy-imageOffset-00205"); // imageOffset not a multiple of block size
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ "VUID-vkCmdCopyImageToBuffer-imageOffset-01794"); // image transfer granularity
+ vkCmdCopyImageToBuffer(m_commandBuffer->handle(), image.handle(), VK_IMAGE_LAYOUT_GENERAL, buffer_16.handle(), 1, &region);
+ m_errorMonitor->VerifyFound();
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ "VUID-VkBufferImageCopy-imageOffset-00205"); // imageOffset not a multiple of block size
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ "VUID-vkCmdCopyBufferToImage-imageOffset-01793"); // image transfer granularity
+ vkCmdCopyBufferToImage(m_commandBuffer->handle(), buffer_16.handle(), image.handle(), VK_IMAGE_LAYOUT_GENERAL, 1, &region);
+ m_errorMonitor->VerifyFound();
+
+ // Offset + extent width = mip width - should succeed
+ region.imageOffset = {4, 4, 0};
+ region.imageExtent = {3, 4, 1};
+ region.imageSubresource.mipLevel = 2;
+ m_errorMonitor->ExpectSuccess();
+ vkCmdCopyImageToBuffer(m_commandBuffer->handle(), odd_image.handle(), VK_IMAGE_LAYOUT_GENERAL, buffer_16.handle(), 1, &region);
+ vkCmdCopyBufferToImage(m_commandBuffer->handle(), buffer_16.handle(), odd_image.handle(), VK_IMAGE_LAYOUT_GENERAL, 1, &region);
+ m_errorMonitor->VerifyNotFound();
+
+ // Offset + extent width > mip width, but still within the final compressed block - should succeed
+ region.imageExtent = {4, 4, 1};
+ m_errorMonitor->ExpectSuccess();
+ vkCmdCopyImageToBuffer(m_commandBuffer->handle(), odd_image.handle(), VK_IMAGE_LAYOUT_GENERAL, buffer_16.handle(), 1, &region);
+ vkCmdCopyBufferToImage(m_commandBuffer->handle(), buffer_16.handle(), odd_image.handle(), VK_IMAGE_LAYOUT_GENERAL, 1, &region);
+ m_errorMonitor->VerifyNotFound();
+
+ // Offset + extent width < mip width and not a multiple of block width - should fail
+ region.imageExtent = {3, 3, 1};
+ m_errorMonitor->SetDesiredFailureMsg(
+ VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ "VUID-VkBufferImageCopy-imageExtent-00208"); // offset+extent not a multiple of block width
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ "VUID-vkCmdCopyImageToBuffer-imageOffset-01794"); // image transfer granularity
+ vkCmdCopyImageToBuffer(m_commandBuffer->handle(), odd_image.handle(), VK_IMAGE_LAYOUT_GENERAL, buffer_16.handle(), 1, &region);
+ m_errorMonitor->VerifyFound();
+ m_errorMonitor->SetDesiredFailureMsg(
+ VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ "VUID-VkBufferImageCopy-imageExtent-00208"); // offset+extent not a multiple of block width
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ "VUID-vkCmdCopyBufferToImage-imageOffset-01793"); // image transfer granularity
+ vkCmdCopyBufferToImage(m_commandBuffer->handle(), buffer_16.handle(), odd_image.handle(), VK_IMAGE_LAYOUT_GENERAL, 1, &region);
+ m_errorMonitor->VerifyFound();
+}
+
+TEST_F(VkLayerTest, ImageBufferCopyTests) {
+ TEST_DESCRIPTION("Image to buffer and buffer to image tests");
+ ASSERT_NO_FATAL_FAILURE(Init());
+
+ // Bail if any dimension of transfer granularity is 0.
+ auto index = m_device->graphics_queue_node_index_;
+ auto queue_family_properties = m_device->phy().queue_properties();
+ if ((queue_family_properties[index].minImageTransferGranularity.depth == 0) ||
+ (queue_family_properties[index].minImageTransferGranularity.width == 0) ||
+ (queue_family_properties[index].minImageTransferGranularity.height == 0)) {
+ printf("%s Subresource copies are disallowed when xfer granularity (x|y|z) is 0. Skipped.\n", kSkipPrefix);
+ return;
+ }
+
+ VkImageObj image_64k(m_device); // 128^2 texels, 64k
+ VkImageObj image_16k(m_device); // 64^2 texels, 16k
+ VkImageObj image_16k_depth(m_device); // 64^2 texels, depth, 16k
+ VkImageObj ds_image_4D_1S(m_device); // 256^2 texels, 512kb (256k depth, 64k stencil, 192k pack)
+ VkImageObj ds_image_3D_1S(m_device); // 256^2 texels, 256kb (192k depth, 64k stencil)
+ VkImageObj ds_image_2D(m_device); // 256^2 texels, 128k (128k depth)
+ VkImageObj ds_image_1S(m_device); // 256^2 texels, 64k (64k stencil)
+
+ image_64k.Init(128, 128, 1, VK_FORMAT_R8G8B8A8_UINT,
+ VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT | VK_IMAGE_USAGE_TRANSFER_SRC_BIT | VK_IMAGE_USAGE_TRANSFER_DST_BIT,
+ VK_IMAGE_TILING_OPTIMAL, 0);
+ image_16k.Init(64, 64, 1, VK_FORMAT_R8G8B8A8_UINT,
+ VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT | VK_IMAGE_USAGE_TRANSFER_SRC_BIT | VK_IMAGE_USAGE_TRANSFER_DST_BIT,
+ VK_IMAGE_TILING_OPTIMAL, 0);
+ ASSERT_TRUE(image_64k.initialized());
+ ASSERT_TRUE(image_16k.initialized());
+
+ // Verify all needed Depth/Stencil formats are supported
+ bool missing_ds_support = false;
+ VkFormatProperties props = {0, 0, 0};
+ vkGetPhysicalDeviceFormatProperties(m_device->phy().handle(), VK_FORMAT_D32_SFLOAT_S8_UINT, &props);
+ missing_ds_support |= (props.bufferFeatures == 0 && props.linearTilingFeatures == 0 && props.optimalTilingFeatures == 0);
+ missing_ds_support |= (props.optimalTilingFeatures & VK_FORMAT_FEATURE_TRANSFER_SRC_BIT) == 0;
+ missing_ds_support |= (props.optimalTilingFeatures & VK_FORMAT_FEATURE_TRANSFER_DST_BIT) == 0;
+ vkGetPhysicalDeviceFormatProperties(m_device->phy().handle(), VK_FORMAT_D24_UNORM_S8_UINT, &props);
+ missing_ds_support |= (props.bufferFeatures == 0 && props.linearTilingFeatures == 0 && props.optimalTilingFeatures == 0);
+ missing_ds_support |= (props.optimalTilingFeatures & VK_FORMAT_FEATURE_TRANSFER_SRC_BIT) == 0;
+ missing_ds_support |= (props.optimalTilingFeatures & VK_FORMAT_FEATURE_TRANSFER_DST_BIT) == 0;
+ vkGetPhysicalDeviceFormatProperties(m_device->phy().handle(), VK_FORMAT_D16_UNORM, &props);
+ missing_ds_support |= (props.bufferFeatures == 0 && props.linearTilingFeatures == 0 && props.optimalTilingFeatures == 0);
+ missing_ds_support |= (props.optimalTilingFeatures & VK_FORMAT_FEATURE_TRANSFER_SRC_BIT) == 0;
+ missing_ds_support |= (props.optimalTilingFeatures & VK_FORMAT_FEATURE_TRANSFER_DST_BIT) == 0;
+ vkGetPhysicalDeviceFormatProperties(m_device->phy().handle(), VK_FORMAT_S8_UINT, &props);
+ missing_ds_support |= (props.bufferFeatures == 0 && props.linearTilingFeatures == 0 && props.optimalTilingFeatures == 0);
+ missing_ds_support |= (props.optimalTilingFeatures & VK_FORMAT_FEATURE_TRANSFER_SRC_BIT) == 0;
+ missing_ds_support |= (props.optimalTilingFeatures & VK_FORMAT_FEATURE_TRANSFER_DST_BIT) == 0;
+
+ if (!missing_ds_support) {
+ image_16k_depth.Init(64, 64, 1, VK_FORMAT_D24_UNORM_S8_UINT,
+ VK_IMAGE_USAGE_TRANSFER_SRC_BIT | VK_IMAGE_USAGE_TRANSFER_DST_BIT, VK_IMAGE_TILING_OPTIMAL, 0);
+ ASSERT_TRUE(image_16k_depth.initialized());
+
+ ds_image_4D_1S.Init(
+ 256, 256, 1, VK_FORMAT_D32_SFLOAT_S8_UINT,
+ VK_IMAGE_USAGE_TRANSFER_SRC_BIT | VK_IMAGE_USAGE_TRANSFER_DST_BIT | VK_IMAGE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT,
+ VK_IMAGE_TILING_OPTIMAL, 0);
+ ASSERT_TRUE(ds_image_4D_1S.initialized());
+
+ ds_image_3D_1S.Init(
+ 256, 256, 1, VK_FORMAT_D24_UNORM_S8_UINT,
+ VK_IMAGE_USAGE_TRANSFER_SRC_BIT | VK_IMAGE_USAGE_TRANSFER_DST_BIT | VK_IMAGE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT,
+ VK_IMAGE_TILING_OPTIMAL, 0);
+ ASSERT_TRUE(ds_image_3D_1S.initialized());
+
+ ds_image_2D.Init(
+ 256, 256, 1, VK_FORMAT_D16_UNORM,
+ VK_IMAGE_USAGE_TRANSFER_SRC_BIT | VK_IMAGE_USAGE_TRANSFER_DST_BIT | VK_IMAGE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT,
+ VK_IMAGE_TILING_OPTIMAL, 0);
+ ASSERT_TRUE(ds_image_2D.initialized());
+
+ ds_image_1S.Init(
+ 256, 256, 1, VK_FORMAT_S8_UINT,
+ VK_IMAGE_USAGE_TRANSFER_SRC_BIT | VK_IMAGE_USAGE_TRANSFER_DST_BIT | VK_IMAGE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT,
+ VK_IMAGE_TILING_OPTIMAL, 0);
+ ASSERT_TRUE(ds_image_1S.initialized());
+ }
+
+ // Allocate buffers
+ VkBufferObj buffer_256k, buffer_128k, buffer_64k, buffer_16k;
+ VkMemoryPropertyFlags reqs = 0;
+ buffer_256k.init_as_src_and_dst(*m_device, 262144, reqs); // 256k
+ buffer_128k.init_as_src_and_dst(*m_device, 131072, reqs); // 128k
+ buffer_64k.init_as_src_and_dst(*m_device, 65536, reqs); // 64k
+ buffer_16k.init_as_src_and_dst(*m_device, 16384, reqs); // 16k
+
+ VkBufferImageCopy region = {};
+ region.bufferRowLength = 0;
+ region.bufferImageHeight = 0;
+ region.imageSubresource.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
+ region.imageSubresource.layerCount = 1;
+ region.imageOffset = {0, 0, 0};
+ region.imageExtent = {64, 64, 1};
+ region.bufferOffset = 0;
+
+ // attempt copies before putting command buffer in recording state
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdCopyBufferToImage-commandBuffer-recording");
+ vkCmdCopyBufferToImage(m_commandBuffer->handle(), buffer_64k.handle(), image_64k.handle(), VK_IMAGE_LAYOUT_GENERAL, 1, &region);
+ m_errorMonitor->VerifyFound();
+
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdCopyImageToBuffer-commandBuffer-recording");
+ vkCmdCopyImageToBuffer(m_commandBuffer->handle(), image_64k.handle(), VK_IMAGE_LAYOUT_GENERAL, buffer_64k.handle(), 1, &region);
+ m_errorMonitor->VerifyFound();
+
+ // start recording
+ m_commandBuffer->begin();
+
+ // successful copies
+ m_errorMonitor->ExpectSuccess();
+ vkCmdCopyImageToBuffer(m_commandBuffer->handle(), image_16k.handle(), VK_IMAGE_LAYOUT_GENERAL, buffer_16k.handle(), 1, &region);
+ vkCmdCopyBufferToImage(m_commandBuffer->handle(), buffer_16k.handle(), image_16k.handle(), VK_IMAGE_LAYOUT_GENERAL, 1, &region);
+ region.imageOffset.x = 16; // 16k copy, offset requires larger image
+ vkCmdCopyImageToBuffer(m_commandBuffer->handle(), image_64k.handle(), VK_IMAGE_LAYOUT_GENERAL, buffer_16k.handle(), 1, &region);
+ region.imageExtent.height = 78; // > 16k copy requires larger buffer & image
+ vkCmdCopyBufferToImage(m_commandBuffer->handle(), buffer_64k.handle(), image_64k.handle(), VK_IMAGE_LAYOUT_GENERAL, 1, &region);
+ region.imageOffset.x = 0;
+ region.imageExtent.height = 64;
+ region.bufferOffset = 256; // 16k copy with buffer offset, requires larger buffer
+ vkCmdCopyImageToBuffer(m_commandBuffer->handle(), image_16k.handle(), VK_IMAGE_LAYOUT_GENERAL, buffer_64k.handle(), 1, &region);
+ m_errorMonitor->VerifyNotFound();
+
+ // image/buffer too small (extent too large) on copy to image
+ region.imageExtent = {65, 64, 1};
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ "VUID-vkCmdCopyBufferToImage-pRegions-00171"); // buffer too small
+ vkCmdCopyBufferToImage(m_commandBuffer->handle(), buffer_16k.handle(), image_64k.handle(), VK_IMAGE_LAYOUT_GENERAL, 1, &region);
+ m_errorMonitor->VerifyFound();
+
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ "VUID-vkCmdCopyBufferToImage-pRegions-00172"); // image too small
+ vkCmdCopyBufferToImage(m_commandBuffer->handle(), buffer_64k.handle(), image_16k.handle(), VK_IMAGE_LAYOUT_GENERAL, 1, &region);
+ m_errorMonitor->VerifyFound();
+
+ // image/buffer too small (offset) on copy to image
+ region.imageExtent = {64, 64, 1};
+ region.imageOffset = {0, 4, 0};
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ "VUID-vkCmdCopyBufferToImage-pRegions-00171"); // buffer too small
+ vkCmdCopyBufferToImage(m_commandBuffer->handle(), buffer_16k.handle(), image_64k.handle(), VK_IMAGE_LAYOUT_GENERAL, 1, &region);
+ m_errorMonitor->VerifyFound();
+
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ "VUID-vkCmdCopyBufferToImage-pRegions-00172"); // image too small
+ vkCmdCopyBufferToImage(m_commandBuffer->handle(), buffer_64k.handle(), image_16k.handle(), VK_IMAGE_LAYOUT_GENERAL, 1, &region);
+ m_errorMonitor->VerifyFound();
+
+ // image/buffer too small on copy to buffer
+ region.imageExtent = {64, 64, 1};
+ region.imageOffset = {0, 0, 0};
+ region.bufferOffset = 4;
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ "VUID-vkCmdCopyImageToBuffer-pRegions-00183"); // buffer too small
+ vkCmdCopyImageToBuffer(m_commandBuffer->handle(), image_64k.handle(), VK_IMAGE_LAYOUT_GENERAL, buffer_16k.handle(), 1, &region);
+ m_errorMonitor->VerifyFound();
+
+ region.imageExtent = {64, 65, 1};
+ region.bufferOffset = 0;
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ "VUID-vkCmdCopyImageToBuffer-pRegions-00182"); // image too small
+ vkCmdCopyImageToBuffer(m_commandBuffer->handle(), image_16k.handle(), VK_IMAGE_LAYOUT_GENERAL, buffer_64k.handle(), 1, &region);
+ m_errorMonitor->VerifyFound();
+
+ // buffer size OK but rowlength causes loose packing
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdCopyImageToBuffer-pRegions-00183");
+ region.imageExtent = {64, 64, 1};
+ region.bufferRowLength = 68;
+ vkCmdCopyImageToBuffer(m_commandBuffer->handle(), image_16k.handle(), VK_IMAGE_LAYOUT_GENERAL, buffer_16k.handle(), 1, &region);
+ m_errorMonitor->VerifyFound();
+
+ // An extent with zero area should produce a warning, but no error
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_WARNING_BIT_EXT | VK_DEBUG_REPORT_ERROR_BIT_EXT, "} has zero area");
+ region.imageExtent.width = 0;
+ vkCmdCopyImageToBuffer(m_commandBuffer->handle(), image_16k.handle(), VK_IMAGE_LAYOUT_GENERAL, buffer_16k.handle(), 1, &region);
+ m_errorMonitor->VerifyFound();
+
+ // aspect bits
+ region.imageExtent = {64, 64, 1};
+ region.bufferRowLength = 0;
+ region.bufferImageHeight = 0;
+ if (!missing_ds_support) {
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ "VUID-VkBufferImageCopy-aspectMask-00212"); // more than 1 aspect bit set
+ region.imageSubresource.aspectMask = VK_IMAGE_ASPECT_DEPTH_BIT | VK_IMAGE_ASPECT_STENCIL_BIT;
+ vkCmdCopyImageToBuffer(m_commandBuffer->handle(), image_16k_depth.handle(), VK_IMAGE_LAYOUT_GENERAL, buffer_16k.handle(), 1,
+ &region);
+ m_errorMonitor->VerifyFound();
+
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ "VUID-VkBufferImageCopy-aspectMask-00211"); // different mis-matched aspect
+ region.imageSubresource.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
+ vkCmdCopyImageToBuffer(m_commandBuffer->handle(), image_16k_depth.handle(), VK_IMAGE_LAYOUT_GENERAL, buffer_16k.handle(), 1,
+ &region);
+ m_errorMonitor->VerifyFound();
+ }
+
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ "VUID-VkBufferImageCopy-aspectMask-00211"); // mis-matched aspect
+ region.imageSubresource.aspectMask = VK_IMAGE_ASPECT_DEPTH_BIT;
+ vkCmdCopyImageToBuffer(m_commandBuffer->handle(), image_16k.handle(), VK_IMAGE_LAYOUT_GENERAL, buffer_16k.handle(), 1, &region);
+ m_errorMonitor->VerifyFound();
+ region.imageSubresource.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
+
+ // Out-of-range mip levels should fail
+ region.imageSubresource.mipLevel = image_16k.create_info().mipLevels + 1;
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdCopyImageToBuffer-imageSubresource-01703");
+ m_errorMonitor->SetDesiredFailureMsg(
+ VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ "VUID-vkCmdCopyImageToBuffer-pRegions-00182"); // unavoidable "region exceeds image bounds" for non-existent mip
+ vkCmdCopyImageToBuffer(m_commandBuffer->handle(), image_16k.handle(), VK_IMAGE_LAYOUT_GENERAL, buffer_16k.handle(), 1, &region);
+ m_errorMonitor->VerifyFound();
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdCopyBufferToImage-imageSubresource-01701");
+ m_errorMonitor->SetDesiredFailureMsg(
+ VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ "VUID-vkCmdCopyBufferToImage-pRegions-00172"); // unavoidable "region exceeds image bounds" for non-existent mip
+ vkCmdCopyBufferToImage(m_commandBuffer->handle(), buffer_16k.handle(), image_16k.handle(), VK_IMAGE_LAYOUT_GENERAL, 1, &region);
+ m_errorMonitor->VerifyFound();
+ region.imageSubresource.mipLevel = 0;
+
+ // Out-of-range array layers should fail
+ region.imageSubresource.baseArrayLayer = image_16k.create_info().arrayLayers;
+ region.imageSubresource.layerCount = 1;
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdCopyImageToBuffer-imageSubresource-01704");
+ vkCmdCopyImageToBuffer(m_commandBuffer->handle(), image_16k.handle(), VK_IMAGE_LAYOUT_GENERAL, buffer_16k.handle(), 1, &region);
+ m_errorMonitor->VerifyFound();
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdCopyBufferToImage-imageSubresource-01702");
+ vkCmdCopyBufferToImage(m_commandBuffer->handle(), buffer_16k.handle(), image_16k.handle(), VK_IMAGE_LAYOUT_GENERAL, 1, &region);
+ m_errorMonitor->VerifyFound();
+ region.imageSubresource.baseArrayLayer = 0;
+
+ // Layout mismatch should fail
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdCopyImageToBuffer-srcImageLayout-00189");
+ vkCmdCopyImageToBuffer(m_commandBuffer->handle(), image_16k.handle(), VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL, buffer_16k.handle(),
+ 1, &region);
+ m_errorMonitor->VerifyFound();
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdCopyBufferToImage-dstImageLayout-00180");
+ vkCmdCopyBufferToImage(m_commandBuffer->handle(), buffer_16k.handle(), image_16k.handle(), VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL,
+ 1, &region);
+ m_errorMonitor->VerifyFound();
+
+ // Test Depth/Stencil copies
+ if (missing_ds_support) {
+ printf("%s Depth / Stencil formats unsupported - skipping D/S tests.\n", kSkipPrefix);
+ } else {
+ VkBufferImageCopy ds_region = {};
+ ds_region.bufferOffset = 0;
+ ds_region.bufferRowLength = 0;
+ ds_region.bufferImageHeight = 0;
+ ds_region.imageSubresource.aspectMask = VK_IMAGE_ASPECT_DEPTH_BIT;
+ ds_region.imageSubresource.mipLevel = 0;
+ ds_region.imageSubresource.baseArrayLayer = 0;
+ ds_region.imageSubresource.layerCount = 1;
+ ds_region.imageOffset = {0, 0, 0};
+ ds_region.imageExtent = {256, 256, 1};
+
+ // Depth copies that should succeed
+ m_errorMonitor->ExpectSuccess(); // Extract 4b depth per texel, pack into 256k buffer
+ vkCmdCopyImageToBuffer(m_commandBuffer->handle(), ds_image_4D_1S.handle(), VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL,
+ buffer_256k.handle(), 1, &ds_region);
+ m_errorMonitor->VerifyNotFound();
+
+ m_errorMonitor->ExpectSuccess(); // Extract 3b depth per texel, pack (loose) into 256k buffer
+ vkCmdCopyImageToBuffer(m_commandBuffer->handle(), ds_image_3D_1S.handle(), VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL,
+ buffer_256k.handle(), 1, &ds_region);
+ m_errorMonitor->VerifyNotFound();
+
+ m_errorMonitor->ExpectSuccess(); // Copy 2b depth per texel, into 128k buffer
+ vkCmdCopyImageToBuffer(m_commandBuffer->handle(), ds_image_2D.handle(), VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL,
+ buffer_128k.handle(), 1, &ds_region);
+ m_errorMonitor->VerifyNotFound();
+
+ // Depth copies that should fail
+ ds_region.bufferOffset = 4;
+ m_errorMonitor->SetDesiredFailureMsg(
+ VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ "VUID-vkCmdCopyImageToBuffer-pRegions-00183"); // Extract 4b depth per texel, pack into 256k buffer
+ vkCmdCopyImageToBuffer(m_commandBuffer->handle(), ds_image_4D_1S.handle(), VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL,
+ buffer_256k.handle(), 1, &ds_region);
+ m_errorMonitor->VerifyFound();
+
+ m_errorMonitor->SetDesiredFailureMsg(
+ VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ "VUID-vkCmdCopyImageToBuffer-pRegions-00183"); // Extract 3b depth per texel, pack (loose) into 256k buffer
+ vkCmdCopyImageToBuffer(m_commandBuffer->handle(), ds_image_3D_1S.handle(), VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL,
+ buffer_256k.handle(), 1, &ds_region);
+ m_errorMonitor->VerifyFound();
+
+ m_errorMonitor->SetDesiredFailureMsg(
+ VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ "VUID-vkCmdCopyImageToBuffer-pRegions-00183"); // Copy 2b depth per texel, into 128k buffer
+ vkCmdCopyImageToBuffer(m_commandBuffer->handle(), ds_image_2D.handle(), VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL,
+ buffer_128k.handle(), 1, &ds_region);
+ m_errorMonitor->VerifyFound();
+
+ // Stencil copies that should succeed
+ ds_region.bufferOffset = 0;
+ ds_region.imageSubresource.aspectMask = VK_IMAGE_ASPECT_STENCIL_BIT;
+ m_errorMonitor->ExpectSuccess(); // Extract 1b stencil per texel, pack into 64k buffer
+ vkCmdCopyImageToBuffer(m_commandBuffer->handle(), ds_image_4D_1S.handle(), VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL,
+ buffer_64k.handle(), 1, &ds_region);
+ m_errorMonitor->VerifyNotFound();
+
+ m_errorMonitor->ExpectSuccess(); // Extract 1b stencil per texel, pack into 64k buffer
+ vkCmdCopyImageToBuffer(m_commandBuffer->handle(), ds_image_3D_1S.handle(), VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL,
+ buffer_64k.handle(), 1, &ds_region);
+ m_errorMonitor->VerifyNotFound();
+
+ m_errorMonitor->ExpectSuccess(); // Copy 1b depth per texel, into 64k buffer
+ vkCmdCopyImageToBuffer(m_commandBuffer->handle(), ds_image_1S.handle(), VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL,
+ buffer_64k.handle(), 1, &ds_region);
+ m_errorMonitor->VerifyNotFound();
+
+ // Stencil copies that should fail
+ m_errorMonitor->SetDesiredFailureMsg(
+ VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ "VUID-vkCmdCopyImageToBuffer-pRegions-00183"); // Extract 1b stencil per texel, pack into 64k buffer
+ vkCmdCopyImageToBuffer(m_commandBuffer->handle(), ds_image_4D_1S.handle(), VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL,
+ buffer_16k.handle(), 1, &ds_region);
+ m_errorMonitor->VerifyFound();
+
+ m_errorMonitor->SetDesiredFailureMsg(
+ VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ "VUID-vkCmdCopyImageToBuffer-pRegions-00183"); // Extract 1b stencil per texel, pack into 64k buffer
+ ds_region.bufferRowLength = 260;
+ vkCmdCopyImageToBuffer(m_commandBuffer->handle(), ds_image_3D_1S.handle(), VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL,
+ buffer_64k.handle(), 1, &ds_region);
+ m_errorMonitor->VerifyFound();
+
+ ds_region.bufferRowLength = 0;
+ ds_region.bufferOffset = 4;
+ m_errorMonitor->SetDesiredFailureMsg(
+ VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ "VUID-vkCmdCopyImageToBuffer-pRegions-00183"); // Copy 1b depth per texel, into 64k buffer
+ vkCmdCopyImageToBuffer(m_commandBuffer->handle(), ds_image_1S.handle(), VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL,
+ buffer_64k.handle(), 1, &ds_region);
+ m_errorMonitor->VerifyFound();
+ }
+
+ // Test compressed formats, if supported
+ VkPhysicalDeviceFeatures device_features = {};
+ ASSERT_NO_FATAL_FAILURE(GetPhysicalDeviceFeatures(&device_features));
+ if (!(device_features.textureCompressionBC || device_features.textureCompressionETC2 ||
+ device_features.textureCompressionASTC_LDR)) {
+ printf("%s No compressed formats supported - block compression tests skipped.\n", kSkipPrefix);
+ } else {
+ VkImageObj image_16k_4x4comp(m_device); // 128^2 texels as 32^2 compressed (4x4) blocks, 16k
+ VkImageObj image_NPOT_4x4comp(m_device); // 130^2 texels as 33^2 compressed (4x4) blocks
+ if (device_features.textureCompressionBC) {
+ image_16k_4x4comp.Init(128, 128, 1, VK_FORMAT_BC3_SRGB_BLOCK, VK_IMAGE_USAGE_TRANSFER_SRC_BIT, VK_IMAGE_TILING_OPTIMAL,
+ 0);
+ image_NPOT_4x4comp.Init(130, 130, 1, VK_FORMAT_BC3_SRGB_BLOCK, VK_IMAGE_USAGE_TRANSFER_SRC_BIT, VK_IMAGE_TILING_OPTIMAL,
+ 0);
+ } else if (device_features.textureCompressionETC2) {
+ image_16k_4x4comp.Init(128, 128, 1, VK_FORMAT_ETC2_R8G8B8A8_UNORM_BLOCK, VK_IMAGE_USAGE_TRANSFER_SRC_BIT,
+ VK_IMAGE_TILING_OPTIMAL, 0);
+ image_NPOT_4x4comp.Init(130, 130, 1, VK_FORMAT_ETC2_R8G8B8A8_UNORM_BLOCK, VK_IMAGE_USAGE_TRANSFER_SRC_BIT,
+ VK_IMAGE_TILING_OPTIMAL, 0);
+ } else {
+ image_16k_4x4comp.Init(128, 128, 1, VK_FORMAT_ASTC_4x4_UNORM_BLOCK, VK_IMAGE_USAGE_TRANSFER_SRC_BIT,
+ VK_IMAGE_TILING_OPTIMAL, 0);
+ image_NPOT_4x4comp.Init(130, 130, 1, VK_FORMAT_ASTC_4x4_UNORM_BLOCK, VK_IMAGE_USAGE_TRANSFER_SRC_BIT,
+ VK_IMAGE_TILING_OPTIMAL, 0);
+ }
+ ASSERT_TRUE(image_16k_4x4comp.initialized());
+
+ // Just fits
+ m_errorMonitor->ExpectSuccess();
+ region.imageExtent = {128, 128, 1};
+ vkCmdCopyImageToBuffer(m_commandBuffer->handle(), image_16k_4x4comp.handle(), VK_IMAGE_LAYOUT_GENERAL, buffer_16k.handle(),
+ 1, &region);
+ m_errorMonitor->VerifyNotFound();
+
+ // with offset, too big for buffer
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdCopyImageToBuffer-pRegions-00183");
+ region.bufferOffset = 16;
+ vkCmdCopyImageToBuffer(m_commandBuffer->handle(), image_16k_4x4comp.handle(), VK_IMAGE_LAYOUT_GENERAL, buffer_16k.handle(),
+ 1, &region);
+ m_errorMonitor->VerifyFound();
+ region.bufferOffset = 0;
+
+ // extents that are not a multiple of compressed block size
+ m_errorMonitor->SetDesiredFailureMsg(
+ VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ "VUID-VkBufferImageCopy-imageExtent-00207"); // extent width not a multiple of block size
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ "VUID-vkCmdCopyImageToBuffer-imageOffset-01794"); // image transfer granularity
+ region.imageExtent.width = 66;
+ vkCmdCopyImageToBuffer(m_commandBuffer->handle(), image_NPOT_4x4comp.handle(), VK_IMAGE_LAYOUT_GENERAL, buffer_16k.handle(),
+ 1, &region);
+ m_errorMonitor->VerifyFound();
+ region.imageExtent.width = 128;
+
+ m_errorMonitor->SetDesiredFailureMsg(
+ VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ "VUID-VkBufferImageCopy-imageExtent-00208"); // extent height not a multiple of block size
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ "VUID-vkCmdCopyImageToBuffer-imageOffset-01794"); // image transfer granularity
+ region.imageExtent.height = 2;
+ vkCmdCopyImageToBuffer(m_commandBuffer->handle(), image_NPOT_4x4comp.handle(), VK_IMAGE_LAYOUT_GENERAL, buffer_16k.handle(),
+ 1, &region);
+ m_errorMonitor->VerifyFound();
+ region.imageExtent.height = 128;
+
+ // TODO: All available compressed formats are 2D, with block depth of 1. Unable to provoke VU_01277.
+
+ // non-multiple extents are allowed if at the far edge of a non-block-multiple image - these should pass
+ m_errorMonitor->ExpectSuccess();
+ region.imageExtent.width = 66;
+ region.imageOffset.x = 64;
+ vkCmdCopyImageToBuffer(m_commandBuffer->handle(), image_NPOT_4x4comp.handle(), VK_IMAGE_LAYOUT_GENERAL, buffer_16k.handle(),
+ 1, &region);
+ region.imageExtent.width = 16;
+ region.imageOffset.x = 0;
+ region.imageExtent.height = 2;
+ region.imageOffset.y = 128;
+ vkCmdCopyImageToBuffer(m_commandBuffer->handle(), image_NPOT_4x4comp.handle(), VK_IMAGE_LAYOUT_GENERAL, buffer_16k.handle(),
+ 1, &region);
+ m_errorMonitor->VerifyNotFound();
+ region.imageOffset = {0, 0, 0};
+
+ // buffer offset must be a multiple of texel block size (16)
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkBufferImageCopy-bufferOffset-00206");
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkBufferImageCopy-bufferOffset-00193");
+ region.imageExtent = {64, 64, 1};
+ region.bufferOffset = 24;
+ vkCmdCopyImageToBuffer(m_commandBuffer->handle(), image_16k_4x4comp.handle(), VK_IMAGE_LAYOUT_GENERAL, buffer_16k.handle(),
+ 1, &region);
+ m_errorMonitor->VerifyFound();
+
+ // rowlength not a multiple of block width (4)
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkBufferImageCopy-bufferRowLength-00203");
+ region.bufferOffset = 0;
+ region.bufferRowLength = 130;
+ region.bufferImageHeight = 0;
+ vkCmdCopyImageToBuffer(m_commandBuffer->handle(), image_16k_4x4comp.handle(), VK_IMAGE_LAYOUT_GENERAL, buffer_64k.handle(),
+ 1, &region);
+ m_errorMonitor->VerifyFound();
+
+ // imageheight not a multiple of block height (4)
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkBufferImageCopy-bufferImageHeight-00204");
+ region.bufferRowLength = 0;
+ region.bufferImageHeight = 130;
+ vkCmdCopyImageToBuffer(m_commandBuffer->handle(), image_16k_4x4comp.handle(), VK_IMAGE_LAYOUT_GENERAL, buffer_64k.handle(),
+ 1, &region);
+ m_errorMonitor->VerifyFound();
+ }
+}
+
+TEST_F(VkLayerTest, MiscImageLayerTests) {
+ TEST_DESCRIPTION("Image-related tests that don't belong elsewhere");
+
+ ASSERT_NO_FATAL_FAILURE(Init());
+
+ // TODO: Ideally we should check if a format is supported, before using it.
+ VkImageObj image(m_device);
+ image.Init(128, 128, 1, VK_FORMAT_R16G16B16A16_UINT, VK_IMAGE_USAGE_TRANSFER_DST_BIT, VK_IMAGE_TILING_OPTIMAL, 0); // 64bpp
+ ASSERT_TRUE(image.initialized());
+ VkBufferObj buffer;
+ VkMemoryPropertyFlags reqs = 0;
+ buffer.init_as_src(*m_device, 128 * 128 * 8, reqs);
+ VkBufferImageCopy region = {};
+ region.bufferRowLength = 128;
+ region.bufferImageHeight = 128;
+ region.imageSubresource.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
+ // layerCount can't be 0 - Expect MISMATCHED_IMAGE_ASPECT
+ region.imageSubresource.layerCount = 1;
+ region.imageExtent.height = 4;
+ region.imageExtent.width = 4;
+ region.imageExtent.depth = 1;
+
+ VkImageObj image2(m_device);
+ image2.Init(128, 128, 1, VK_FORMAT_R8G8_UNORM, VK_IMAGE_USAGE_TRANSFER_DST_BIT, VK_IMAGE_TILING_OPTIMAL, 0); // 16bpp
+ ASSERT_TRUE(image2.initialized());
+ VkBufferObj buffer2;
+ VkMemoryPropertyFlags reqs2 = 0;
+ buffer2.init_as_src(*m_device, 128 * 128 * 2, reqs2);
+ VkBufferImageCopy region2 = {};
+ region2.bufferRowLength = 128;
+ region2.bufferImageHeight = 128;
+ region2.imageSubresource.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
+ // layerCount can't be 0 - Expect MISMATCHED_IMAGE_ASPECT
+ region2.imageSubresource.layerCount = 1;
+ region2.imageExtent.height = 4;
+ region2.imageExtent.width = 4;
+ region2.imageExtent.depth = 1;
+ m_commandBuffer->begin();
+
+ // Image must have offset.z of 0 and extent.depth of 1
+ // Introduce failure by setting imageExtent.depth to 0
+ region.imageExtent.depth = 0;
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkBufferImageCopy-srcImage-00201");
+ vkCmdCopyBufferToImage(m_commandBuffer->handle(), buffer.handle(), image.handle(), VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, 1,
+ &region);
+ m_errorMonitor->VerifyFound();
+
+ region.imageExtent.depth = 1;
+
+ // Image must have offset.z of 0 and extent.depth of 1
+ // Introduce failure by setting imageOffset.z to 4
+ // Note: Also (unavoidably) triggers 'region exceeds image' #1228
+ region.imageOffset.z = 4;
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkBufferImageCopy-srcImage-00201");
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdCopyBufferToImage-pRegions-00172");
+ vkCmdCopyBufferToImage(m_commandBuffer->handle(), buffer.handle(), image.handle(), VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, 1,
+ &region);
+ m_errorMonitor->VerifyFound();
+
+ region.imageOffset.z = 0;
+ // BufferOffset must be a multiple of the calling command's VkImage parameter's texel size
+ // Introduce failure by setting bufferOffset to 1 and 1/2 texels
+ region.bufferOffset = 4;
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkBufferImageCopy-bufferOffset-00193");
+ vkCmdCopyBufferToImage(m_commandBuffer->handle(), buffer.handle(), image.handle(), VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, 1,
+ &region);
+ m_errorMonitor->VerifyFound();
+
+ // BufferOffset must be a multiple of 4
+ // Introduce failure by setting bufferOffset to a value not divisible by 4
+ region2.bufferOffset = 6;
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkBufferImageCopy-bufferOffset-00194");
+ vkCmdCopyBufferToImage(m_commandBuffer->handle(), buffer2.handle(), image2.handle(), VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, 1,
+ &region2);
+ m_errorMonitor->VerifyFound();
+
+ // BufferRowLength must be 0, or greater than or equal to the width member of imageExtent
+ region.bufferOffset = 0;
+ region.imageExtent.height = 128;
+ region.imageExtent.width = 128;
+ // Introduce failure by setting bufferRowLength > 0 but less than width
+ region.bufferRowLength = 64;
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkBufferImageCopy-bufferRowLength-00195");
+ vkCmdCopyBufferToImage(m_commandBuffer->handle(), buffer.handle(), image.handle(), VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, 1,
+ &region);
+ m_errorMonitor->VerifyFound();
+
+ // BufferImageHeight must be 0, or greater than or equal to the height member of imageExtent
+ region.bufferRowLength = 128;
+ // Introduce failure by setting bufferRowHeight > 0 but less than height
+ region.bufferImageHeight = 64;
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkBufferImageCopy-bufferImageHeight-00196");
+ vkCmdCopyBufferToImage(m_commandBuffer->handle(), buffer.handle(), image.handle(), VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, 1,
+ &region);
+ m_errorMonitor->VerifyFound();
+
+ region.bufferImageHeight = 128;
+ VkImageObj intImage1(m_device);
+ intImage1.Init(128, 128, 1, VK_FORMAT_R8_UNORM, VK_IMAGE_USAGE_TRANSFER_SRC_BIT, VK_IMAGE_TILING_OPTIMAL, 0);
+ intImage1.SetLayout(VK_IMAGE_ASPECT_COLOR_BIT, VK_IMAGE_LAYOUT_GENERAL);
+ VkImageObj intImage2(m_device);
+ intImage2.Init(128, 128, 1, VK_FORMAT_R8_UNORM, VK_IMAGE_USAGE_TRANSFER_DST_BIT, VK_IMAGE_TILING_OPTIMAL, 0);
+ intImage2.SetLayout(VK_IMAGE_ASPECT_COLOR_BIT, VK_IMAGE_LAYOUT_GENERAL);
+ VkImageBlit blitRegion = {};
+ blitRegion.srcSubresource.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
+ blitRegion.srcSubresource.baseArrayLayer = 0;
+ blitRegion.srcSubresource.layerCount = 1;
+ blitRegion.srcSubresource.mipLevel = 0;
+ blitRegion.dstSubresource.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
+ blitRegion.dstSubresource.baseArrayLayer = 0;
+ blitRegion.dstSubresource.layerCount = 1;
+ blitRegion.dstSubresource.mipLevel = 0;
+ blitRegion.srcOffsets[0] = {128, 0, 0};
+ blitRegion.srcOffsets[1] = {128, 128, 1};
+ blitRegion.dstOffsets[0] = {0, 128, 0};
+ blitRegion.dstOffsets[1] = {128, 128, 1};
+
+ // Look for NULL-blit warning
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_WARNING_BIT_EXT,
+ "vkCmdBlitImage(): pRegions[0].srcOffsets specify a zero-volume area.");
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_WARNING_BIT_EXT,
+ "vkCmdBlitImage(): pRegions[0].dstOffsets specify a zero-volume area.");
+ vkCmdBlitImage(m_commandBuffer->handle(), intImage1.handle(), intImage1.Layout(), intImage2.handle(), intImage2.Layout(), 1,
+ &blitRegion, VK_FILTER_LINEAR);
+ m_errorMonitor->VerifyFound();
+}
+
+VkResult GPDIFPHelper(VkPhysicalDevice dev, const VkImageCreateInfo *ci, VkImageFormatProperties *limits = nullptr) {
VkImageFormatProperties tmp_limits;
limits = limits ? limits : &tmp_limits;
return vkGetPhysicalDeviceImageFormatProperties(dev, ci->format, ci->imageType, ci->tiling, ci->usage, ci->flags, limits);
}
+TEST_F(VkLayerTest, CreateImageMiscErrors) {
+ TEST_DESCRIPTION("Misc leftover valid usage errors in VkImageCreateInfo struct");
+
+ VkPhysicalDeviceFeatures features{};
+ ASSERT_NO_FATAL_FAILURE(Init(&features));
+
+ VkImage null_image; // throwaway target for all the vkCreateImage
+
+ VkImageCreateInfo tmp_img_ci = {};
+ tmp_img_ci.sType = VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO;
+ tmp_img_ci.flags = 0; // assumably any is supported
+ tmp_img_ci.imageType = VK_IMAGE_TYPE_2D; // any is supported
+ tmp_img_ci.format = VK_FORMAT_R8G8B8A8_UNORM; // has mandatory support for all usages
+ tmp_img_ci.extent = {64, 64, 1}; // limit is 256 for 3D, or 4096
+ tmp_img_ci.mipLevels = 1; // any is supported
+ tmp_img_ci.arrayLayers = 1; // limit is 256
+ tmp_img_ci.samples = VK_SAMPLE_COUNT_1_BIT; // needs to be 1 if TILING_LINEAR
+ // if VK_IMAGE_TILING_LINEAR imageType must be 2D, usage must be TRANSFER, and levels layers samplers all 1
+ tmp_img_ci.tiling = VK_IMAGE_TILING_OPTIMAL;
+ tmp_img_ci.usage = VK_IMAGE_USAGE_TRANSFER_SRC_BIT; // depends on format
+ tmp_img_ci.initialLayout = VK_IMAGE_LAYOUT_UNDEFINED;
+ const VkImageCreateInfo safe_image_ci = tmp_img_ci;
+
+ ASSERT_VK_SUCCESS(GPDIFPHelper(gpu(), &safe_image_ci));
+
+ {
+ VkImageCreateInfo image_ci = safe_image_ci;
+ image_ci.sharingMode = VK_SHARING_MODE_CONCURRENT;
+ image_ci.queueFamilyIndexCount = 2;
+ image_ci.pQueueFamilyIndices = nullptr;
+
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkImageCreateInfo-sharingMode-00941");
+ vkCreateImage(m_device->handle(), &image_ci, NULL, &null_image);
+ m_errorMonitor->VerifyFound();
+ }
+
+ {
+ VkImageCreateInfo image_ci = safe_image_ci;
+ image_ci.sharingMode = VK_SHARING_MODE_CONCURRENT;
+ image_ci.queueFamilyIndexCount = 1;
+ const uint32_t queue_family = 0;
+ image_ci.pQueueFamilyIndices = &queue_family;
+
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkImageCreateInfo-sharingMode-00942");
+ vkCreateImage(m_device->handle(), &image_ci, NULL, &null_image);
+ m_errorMonitor->VerifyFound();
+ }
+
+ {
+ VkImageCreateInfo image_ci = safe_image_ci;
+ image_ci.format = VK_FORMAT_UNDEFINED;
+
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkImageCreateInfo-format-00943");
+ vkCreateImage(m_device->handle(), &image_ci, NULL, &null_image);
+ m_errorMonitor->VerifyFound();
+ }
+
+ {
+ VkImageCreateInfo image_ci = safe_image_ci;
+ image_ci.flags = VK_IMAGE_CREATE_CUBE_COMPATIBLE_BIT;
+ image_ci.arrayLayers = 6;
+ image_ci.imageType = VK_IMAGE_TYPE_1D;
+ image_ci.extent = {64, 1, 1};
+
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkImageCreateInfo-flags-00949");
+ vkCreateImage(m_device->handle(), &image_ci, NULL, &null_image);
+ m_errorMonitor->VerifyFound();
+
+ image_ci = safe_image_ci;
+ image_ci.flags = VK_IMAGE_CREATE_CUBE_COMPATIBLE_BIT;
+ image_ci.imageType = VK_IMAGE_TYPE_3D;
+ image_ci.extent = {4, 4, 4};
+
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkImageCreateInfo-flags-00949");
+ vkCreateImage(m_device->handle(), &image_ci, NULL, &null_image);
+ m_errorMonitor->VerifyFound();
+ }
+
+ {
+ VkImageCreateInfo image_ci = safe_image_ci;
+ image_ci.usage = VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT; // always has 4 samples support
+ image_ci.samples = VK_SAMPLE_COUNT_4_BIT;
+ image_ci.imageType = VK_IMAGE_TYPE_3D;
+ image_ci.extent = {4, 4, 4};
+
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkImageCreateInfo-samples-02257");
+ vkCreateImage(m_device->handle(), &image_ci, NULL, &null_image);
+ m_errorMonitor->VerifyFound();
+
+ image_ci = safe_image_ci;
+ image_ci.usage = VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT; // always has 4 samples support
+ image_ci.samples = VK_SAMPLE_COUNT_4_BIT;
+ image_ci.flags = VK_IMAGE_CREATE_CUBE_COMPATIBLE_BIT;
+ image_ci.arrayLayers = 6;
+
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkImageCreateInfo-samples-02257");
+ vkCreateImage(m_device->handle(), &image_ci, NULL, &null_image);
+ m_errorMonitor->VerifyFound();
+
+ image_ci = safe_image_ci;
+ image_ci.usage = VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT; // always has 4 samples support
+ image_ci.samples = VK_SAMPLE_COUNT_4_BIT;
+ image_ci.tiling = VK_IMAGE_TILING_LINEAR;
+
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkImageCreateInfo-samples-02257");
+ vkCreateImage(m_device->handle(), &image_ci, NULL, &null_image);
+ m_errorMonitor->VerifyFound();
+
+ image_ci = safe_image_ci;
+ image_ci.usage = VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT; // always has 4 samples support
+ image_ci.samples = VK_SAMPLE_COUNT_4_BIT;
+ image_ci.mipLevels = 2;
+
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkImageCreateInfo-samples-02257");
+ vkCreateImage(m_device->handle(), &image_ci, NULL, &null_image);
+ m_errorMonitor->VerifyFound();
+ }
+
+ {
+ VkImageCreateInfo image_ci = safe_image_ci;
+ image_ci.usage = VK_IMAGE_USAGE_TRANSIENT_ATTACHMENT_BIT | VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT;
+ image_ci.usage |= VK_IMAGE_USAGE_TRANSFER_SRC_BIT;
+
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkImageCreateInfo-usage-00963");
+ vkCreateImage(m_device->handle(), &image_ci, NULL, &null_image);
+ m_errorMonitor->VerifyFound();
+
+ image_ci.usage = VK_IMAGE_USAGE_TRANSIENT_ATTACHMENT_BIT;
+
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkImageCreateInfo-usage-00966");
+ vkCreateImage(m_device->handle(), &image_ci, NULL, &null_image);
+ m_errorMonitor->VerifyFound();
+
+ image_ci.usage = VK_IMAGE_USAGE_TRANSIENT_ATTACHMENT_BIT;
+ image_ci.usage |= VK_IMAGE_USAGE_TRANSFER_SRC_BIT;
+
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkImageCreateInfo-usage-00963");
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkImageCreateInfo-usage-00966");
+ vkCreateImage(m_device->handle(), &image_ci, NULL, &null_image);
+ m_errorMonitor->VerifyFound();
+ }
+
+ {
+ VkImageCreateInfo image_ci = safe_image_ci;
+ image_ci.flags = VK_IMAGE_CREATE_SPARSE_BINDING_BIT;
+
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkImageCreateInfo-flags-00969");
+ vkCreateImage(m_device->handle(), &image_ci, NULL, &null_image);
+ m_errorMonitor->VerifyFound();
+ }
+
+ // InitialLayout not VK_IMAGE_LAYOUT_UNDEFINED or VK_IMAGE_LAYOUT_PREDEFINED
+ {
+ VkImageCreateInfo image_ci = safe_image_ci;
+ image_ci.initialLayout = VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL;
+
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkImageCreateInfo-initialLayout-00993");
+ vkCreateImage(m_device->handle(), &image_ci, NULL, &null_image);
+ m_errorMonitor->VerifyFound();
+ }
+}
+
+TEST_F(VkLayerTest, CreateImageMinLimitsViolation) {
+ TEST_DESCRIPTION("Create invalid image with invalid parameters violation minimum limit, such as being zero.");
+
+ ASSERT_NO_FATAL_FAILURE(Init());
+
+ VkImage null_image; // throwaway target for all the vkCreateImage
+
+ VkImageCreateInfo tmp_img_ci = {};
+ tmp_img_ci.sType = VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO;
+ tmp_img_ci.flags = 0; // assumably any is supported
+ tmp_img_ci.imageType = VK_IMAGE_TYPE_2D; // any is supported
+ tmp_img_ci.format = VK_FORMAT_R8G8B8A8_UNORM; // has mandatory support for all usages
+ tmp_img_ci.extent = {1, 1, 1}; // limit is 256 for 3D, or 4096
+ tmp_img_ci.mipLevels = 1; // any is supported
+ tmp_img_ci.arrayLayers = 1; // limit is 256
+ tmp_img_ci.samples = VK_SAMPLE_COUNT_1_BIT; // needs to be 1 if TILING_LINEAR
+ // if VK_IMAGE_TILING_LINEAR imageType must be 2D, usage must be TRANSFER, and levels layers samplers all 1
+ tmp_img_ci.tiling = VK_IMAGE_TILING_OPTIMAL;
+ tmp_img_ci.usage = VK_IMAGE_USAGE_TRANSFER_SRC_BIT; // depends on format
+ tmp_img_ci.initialLayout = VK_IMAGE_LAYOUT_UNDEFINED;
+ const VkImageCreateInfo safe_image_ci = tmp_img_ci;
+
+ enum Dimension { kWidth = 0x1, kHeight = 0x2, kDepth = 0x4 };
+
+ for (underlying_type<Dimension>::type bad_dimensions = 0x1; bad_dimensions < 0x8; ++bad_dimensions) {
+ VkExtent3D extent = {1, 1, 1};
+
+ if (bad_dimensions & kWidth) {
+ extent.width = 0;
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkImageCreateInfo-extent-00944");
+ }
+
+ if (bad_dimensions & kHeight) {
+ extent.height = 0;
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkImageCreateInfo-extent-00945");
+ }
+
+ if (bad_dimensions & kDepth) {
+ extent.depth = 0;
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkImageCreateInfo-extent-00946");
+ }
+
+ VkImageCreateInfo bad_image_ci = safe_image_ci;
+ bad_image_ci.imageType = VK_IMAGE_TYPE_3D; // has to be 3D otherwise it might trigger the non-1 error instead
+ bad_image_ci.extent = extent;
+
+ vkCreateImage(m_device->device(), &bad_image_ci, NULL, &null_image);
+
+ m_errorMonitor->VerifyFound();
+ }
+
+ {
+ VkImageCreateInfo bad_image_ci = safe_image_ci;
+ bad_image_ci.mipLevels = 0;
+
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkImageCreateInfo-mipLevels-00947");
+ vkCreateImage(m_device->device(), &bad_image_ci, NULL, &null_image);
+ m_errorMonitor->VerifyFound();
+ }
+
+ {
+ VkImageCreateInfo bad_image_ci = safe_image_ci;
+ bad_image_ci.arrayLayers = 0;
+
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkImageCreateInfo-arrayLayers-00948");
+ vkCreateImage(m_device->device(), &bad_image_ci, NULL, &null_image);
+ m_errorMonitor->VerifyFound();
+ }
+
+ {
+ VkImageCreateInfo bad_image_ci = safe_image_ci;
+ bad_image_ci.flags = VK_IMAGE_CREATE_CUBE_COMPATIBLE_BIT;
+ bad_image_ci.arrayLayers = 5;
+
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkImageCreateInfo-imageType-00954");
+ vkCreateImage(m_device->device(), &bad_image_ci, NULL, &null_image);
+ m_errorMonitor->VerifyFound();
+
+ bad_image_ci.arrayLayers = 6;
+ bad_image_ci.extent = {64, 63, 1};
+
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkImageCreateInfo-imageType-00954");
+ vkCreateImage(m_device->device(), &bad_image_ci, NULL, &null_image);
+ m_errorMonitor->VerifyFound();
+ }
+
+ {
+ VkImageCreateInfo bad_image_ci = safe_image_ci;
+ bad_image_ci.imageType = VK_IMAGE_TYPE_1D;
+ bad_image_ci.extent = {64, 2, 1};
+
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkImageCreateInfo-imageType-00956");
+ vkCreateImage(m_device->device(), &bad_image_ci, NULL, &null_image);
+ m_errorMonitor->VerifyFound();
+
+ bad_image_ci.imageType = VK_IMAGE_TYPE_1D;
+ bad_image_ci.extent = {64, 1, 2};
+
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkImageCreateInfo-imageType-00956");
+ vkCreateImage(m_device->device(), &bad_image_ci, NULL, &null_image);
+ m_errorMonitor->VerifyFound();
+
+ bad_image_ci.imageType = VK_IMAGE_TYPE_2D;
+ bad_image_ci.extent = {64, 64, 2};
+
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkImageCreateInfo-imageType-00957");
+ vkCreateImage(m_device->device(), &bad_image_ci, NULL, &null_image);
+ m_errorMonitor->VerifyFound();
+
+ bad_image_ci.imageType = VK_IMAGE_TYPE_2D;
+ bad_image_ci.flags = VK_IMAGE_CREATE_CUBE_COMPATIBLE_BIT;
+ bad_image_ci.arrayLayers = 6;
+ bad_image_ci.extent = {64, 64, 2};
+
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkImageCreateInfo-imageType-00957");
+ vkCreateImage(m_device->device(), &bad_image_ci, NULL, &null_image);
+ m_errorMonitor->VerifyFound();
+ }
+
+ {
+ VkImageCreateInfo bad_image_ci = safe_image_ci;
+ bad_image_ci.imageType = VK_IMAGE_TYPE_3D;
+ bad_image_ci.arrayLayers = 2;
+
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkImageCreateInfo-imageType-00961");
+ vkCreateImage(m_device->device(), &bad_image_ci, NULL, &null_image);
+ m_errorMonitor->VerifyFound();
+ }
+}
+
VkFormat FindFormatLinearWithoutMips(VkPhysicalDevice gpu, VkImageCreateInfo image_ci) {
image_ci.tiling = VK_IMAGE_TILING_LINEAR;
@@ -338,6 +24777,125 @@ bool FindFormatWithoutSamples(VkPhysicalDevice gpu, VkImageCreateInfo &image_ci)
return false;
}
+TEST_F(VkLayerTest, CreateImageMaxLimitsViolation) {
+ TEST_DESCRIPTION("Create invalid image with invalid parameters exceeding physical device limits.");
+
+ ASSERT_NO_FATAL_FAILURE(Init());
+
+ VkImage null_image; // throwaway target for all the vkCreateImage
+
+ VkImageCreateInfo tmp_img_ci = {};
+ tmp_img_ci.sType = VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO;
+ tmp_img_ci.flags = 0; // assumably any is supported
+ tmp_img_ci.imageType = VK_IMAGE_TYPE_2D; // any is supported
+ tmp_img_ci.format = VK_FORMAT_R8G8B8A8_UNORM; // has mandatory support for all usages
+ tmp_img_ci.extent = {1, 1, 1}; // limit is 256 for 3D, or 4096
+ tmp_img_ci.mipLevels = 1; // any is supported
+ tmp_img_ci.arrayLayers = 1; // limit is 256
+ tmp_img_ci.samples = VK_SAMPLE_COUNT_1_BIT; // needs to be 1 if TILING_LINEAR
+ // if VK_IMAGE_TILING_LINEAR imageType must be 2D, usage must be TRANSFER, and levels layers samplers all 1
+ tmp_img_ci.tiling = VK_IMAGE_TILING_OPTIMAL;
+ tmp_img_ci.usage = VK_IMAGE_USAGE_TRANSFER_SRC_BIT; // depends on format
+ tmp_img_ci.initialLayout = VK_IMAGE_LAYOUT_UNDEFINED;
+ const VkImageCreateInfo safe_image_ci = tmp_img_ci;
+
+ ASSERT_VK_SUCCESS(GPDIFPHelper(gpu(), &safe_image_ci));
+
+ const VkPhysicalDeviceLimits &dev_limits = m_device->props.limits;
+
+ {
+ VkImageCreateInfo image_ci = safe_image_ci;
+ image_ci.extent = {8, 8, 1};
+ image_ci.mipLevels = 4 + 1; // 4 = log2(8) + 1
+
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkImageCreateInfo-mipLevels-00958");
+ vkCreateImage(m_device->handle(), &image_ci, NULL, &null_image);
+ m_errorMonitor->VerifyFound();
+
+ image_ci.extent = {8, 15, 1};
+ image_ci.mipLevels = 4 + 1; // 4 = floor(log2(15)) + 1
+
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkImageCreateInfo-mipLevels-00958");
+ vkCreateImage(m_device->handle(), &image_ci, NULL, &null_image);
+ m_errorMonitor->VerifyFound();
+ }
+
+ {
+ VkImageCreateInfo image_ci = safe_image_ci;
+ image_ci.tiling = VK_IMAGE_TILING_LINEAR;
+ image_ci.extent = {64, 64, 1};
+ image_ci.format = FindFormatLinearWithoutMips(gpu(), image_ci);
+ image_ci.mipLevels = 2;
+
+ if (image_ci.format != VK_FORMAT_UNDEFINED) {
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkImageCreateInfo-mipLevels-02255");
+ vkCreateImage(m_device->handle(), &image_ci, NULL, &null_image);
+ m_errorMonitor->VerifyFound();
+ } else {
+ printf("%s Cannot find a format to test maxMipLevels limit; skipping part of test.\n", kSkipPrefix);
+ }
+ }
+
+ {
+ VkImageCreateInfo image_ci = safe_image_ci;
+
+ VkImageFormatProperties img_limits;
+ ASSERT_VK_SUCCESS(GPDIFPHelper(gpu(), &image_ci, &img_limits));
+
+ if (img_limits.maxArrayLayers != UINT32_MAX) {
+ image_ci.arrayLayers = img_limits.maxArrayLayers + 1;
+
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkImageCreateInfo-arrayLayers-02256");
+ vkCreateImage(m_device->handle(), &image_ci, NULL, &null_image);
+ m_errorMonitor->VerifyFound();
+ } else {
+ printf("%s VkImageFormatProperties::maxArrayLayers is already UINT32_MAX; skipping part of test.\n", kSkipPrefix);
+ }
+ }
+
+ {
+ VkImageCreateInfo image_ci = safe_image_ci;
+ bool found = FindFormatWithoutSamples(gpu(), image_ci);
+
+ if (found) {
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkImageCreateInfo-samples-02258");
+ vkCreateImage(m_device->handle(), &image_ci, NULL, &null_image);
+ m_errorMonitor->VerifyFound();
+ } else {
+ printf("%s Could not find a format with some unsupported samples; skipping part of test.\n", kSkipPrefix);
+ }
+ }
+
+ {
+ VkImageCreateInfo image_ci = safe_image_ci;
+ image_ci.usage = VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT; // (any attachment bit)
+
+ VkImageFormatProperties img_limits;
+ ASSERT_VK_SUCCESS(GPDIFPHelper(gpu(), &image_ci, &img_limits));
+
+ if (dev_limits.maxFramebufferWidth != UINT32_MAX) {
+ image_ci.extent = {dev_limits.maxFramebufferWidth + 1, 64, 1};
+
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkImageCreateInfo-usage-00964");
+ vkCreateImage(m_device->handle(), &image_ci, NULL, &null_image);
+ m_errorMonitor->VerifyFound();
+ } else {
+ printf("%s VkPhysicalDeviceLimits::maxFramebufferWidth is already UINT32_MAX; skipping part of test.\n", kSkipPrefix);
+ }
+
+ if (dev_limits.maxFramebufferHeight != UINT32_MAX) {
+ image_ci.usage = VK_IMAGE_USAGE_INPUT_ATTACHMENT_BIT; // try different one too
+ image_ci.extent = {64, dev_limits.maxFramebufferHeight + 1, 1};
+
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkImageCreateInfo-usage-00965");
+ vkCreateImage(m_device->handle(), &image_ci, NULL, &null_image);
+ m_errorMonitor->VerifyFound();
+ } else {
+ printf("%s VkPhysicalDeviceLimits::maxFramebufferHeight is already UINT32_MAX; skipping part of test.\n", kSkipPrefix);
+ }
+ }
+}
+
bool FindUnsupportedImage(VkPhysicalDevice gpu, VkImageCreateInfo &image_ci) {
const VkFormat first_vk_format = static_cast<VkFormat>(1);
const VkFormat last_vk_format = static_cast<VkFormat>(130); // avoid compressed/feature protected, otherwise 184
@@ -375,7 +24933,8 @@ bool FindUnsupportedImage(VkPhysicalDevice gpu, VkImageCreateInfo &image_ci) {
return false;
}
-VkFormat FindFormatWithoutFeatures(VkPhysicalDevice gpu, VkImageTiling tiling, VkFormatFeatureFlags undesired_features) {
+VkFormat FindFormatWithoutFeatures(VkPhysicalDevice gpu, VkImageTiling tiling,
+ VkFormatFeatureFlags undesired_features = UINT32_MAX) {
const VkFormat first_vk_format = static_cast<VkFormat>(1);
const VkFormat last_vk_format = static_cast<VkFormat>(130); // avoid compressed/feature protected, otherwise 184
@@ -398,6 +24957,2421 @@ VkFormat FindFormatWithoutFeatures(VkPhysicalDevice gpu, VkImageTiling tiling, V
return VK_FORMAT_UNDEFINED;
}
+TEST_F(VkLayerTest, CopyImageTypeExtentMismatch) {
+ // Image copy tests where format type and extents don't match
+ ASSERT_NO_FATAL_FAILURE(Init());
+
+ VkImageCreateInfo ci;
+ ci.sType = VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO;
+ ci.pNext = NULL;
+ ci.flags = 0;
+ ci.imageType = VK_IMAGE_TYPE_1D;
+ ci.format = VK_FORMAT_R8G8B8A8_UNORM;
+ ci.extent = {32, 1, 1};
+ ci.mipLevels = 1;
+ ci.arrayLayers = 1;
+ ci.samples = VK_SAMPLE_COUNT_1_BIT;
+ ci.tiling = VK_IMAGE_TILING_OPTIMAL;
+ ci.usage = VK_IMAGE_USAGE_TRANSFER_SRC_BIT | VK_IMAGE_USAGE_TRANSFER_DST_BIT;
+ ci.sharingMode = VK_SHARING_MODE_EXCLUSIVE;
+ ci.queueFamilyIndexCount = 0;
+ ci.pQueueFamilyIndices = NULL;
+ ci.initialLayout = VK_IMAGE_LAYOUT_UNDEFINED;
+
+ // Create 1D image
+ VkImageObj image_1D(m_device);
+ image_1D.init(&ci);
+ ASSERT_TRUE(image_1D.initialized());
+
+ // 2D image
+ ci.imageType = VK_IMAGE_TYPE_2D;
+ ci.extent = {32, 32, 1};
+ VkImageObj image_2D(m_device);
+ image_2D.init(&ci);
+ ASSERT_TRUE(image_2D.initialized());
+
+ // 3D image
+ ci.imageType = VK_IMAGE_TYPE_3D;
+ ci.extent = {32, 32, 8};
+ VkImageObj image_3D(m_device);
+ image_3D.init(&ci);
+ ASSERT_TRUE(image_3D.initialized());
+
+ // 2D image array
+ ci.imageType = VK_IMAGE_TYPE_2D;
+ ci.extent = {32, 32, 1};
+ ci.arrayLayers = 8;
+ VkImageObj image_2D_array(m_device);
+ image_2D_array.init(&ci);
+ ASSERT_TRUE(image_2D_array.initialized());
+
+ m_commandBuffer->begin();
+
+ VkImageCopy copy_region;
+ copy_region.extent = {32, 1, 1};
+ copy_region.srcSubresource.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
+ copy_region.dstSubresource.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
+ copy_region.srcSubresource.mipLevel = 0;
+ copy_region.dstSubresource.mipLevel = 0;
+ copy_region.srcSubresource.baseArrayLayer = 0;
+ copy_region.dstSubresource.baseArrayLayer = 0;
+ copy_region.srcSubresource.layerCount = 1;
+ copy_region.dstSubresource.layerCount = 1;
+ copy_region.srcOffset = {0, 0, 0};
+ copy_region.dstOffset = {0, 0, 0};
+
+ // Sanity check
+ m_errorMonitor->ExpectSuccess();
+ m_commandBuffer->CopyImage(image_1D.image(), VK_IMAGE_LAYOUT_GENERAL, image_2D.image(), VK_IMAGE_LAYOUT_GENERAL, 1,
+ &copy_region);
+ m_errorMonitor->VerifyNotFound();
+
+ // 1D texture w/ offset.y > 0. Source = VU 09c00124, dest = 09c00130
+ copy_region.srcOffset.y = 1;
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkImageCopy-srcImage-00146");
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkImageCopy-srcOffset-00145"); // also y-dim overrun
+ m_commandBuffer->CopyImage(image_1D.image(), VK_IMAGE_LAYOUT_GENERAL, image_2D.image(), VK_IMAGE_LAYOUT_GENERAL, 1,
+ &copy_region);
+ m_errorMonitor->VerifyFound();
+ copy_region.srcOffset.y = 0;
+ copy_region.dstOffset.y = 1;
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkImageCopy-dstImage-00152");
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkImageCopy-dstOffset-00151"); // also y-dim overrun
+ m_commandBuffer->CopyImage(image_2D.image(), VK_IMAGE_LAYOUT_GENERAL, image_1D.image(), VK_IMAGE_LAYOUT_GENERAL, 1,
+ &copy_region);
+ m_errorMonitor->VerifyFound();
+ copy_region.dstOffset.y = 0;
+
+ // 1D texture w/ extent.height > 1. Source = VU 09c00124, dest = 09c00130
+ copy_region.extent.height = 2;
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkImageCopy-srcImage-00146");
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkImageCopy-srcOffset-00145"); // also y-dim overrun
+ m_commandBuffer->CopyImage(image_1D.image(), VK_IMAGE_LAYOUT_GENERAL, image_2D.image(), VK_IMAGE_LAYOUT_GENERAL, 1,
+ &copy_region);
+ m_errorMonitor->VerifyFound();
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkImageCopy-dstImage-00152");
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkImageCopy-dstOffset-00151"); // also y-dim overrun
+ m_commandBuffer->CopyImage(image_2D.image(), VK_IMAGE_LAYOUT_GENERAL, image_1D.image(), VK_IMAGE_LAYOUT_GENERAL, 1,
+ &copy_region);
+ m_errorMonitor->VerifyFound();
+ copy_region.extent.height = 1;
+
+ // 1D texture w/ offset.z > 0. Source = VU 09c00df2, dest = 09c00df4
+ copy_region.srcOffset.z = 1;
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkImageCopy-srcImage-01785");
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkImageCopy-srcOffset-00147"); // also z-dim overrun
+ m_commandBuffer->CopyImage(image_1D.image(), VK_IMAGE_LAYOUT_GENERAL, image_2D.image(), VK_IMAGE_LAYOUT_GENERAL, 1,
+ &copy_region);
+ m_errorMonitor->VerifyFound();
+ copy_region.srcOffset.z = 0;
+ copy_region.dstOffset.z = 1;
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkImageCopy-dstImage-01786");
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkImageCopy-dstOffset-00153"); // also z-dim overrun
+ m_commandBuffer->CopyImage(image_2D.image(), VK_IMAGE_LAYOUT_GENERAL, image_1D.image(), VK_IMAGE_LAYOUT_GENERAL, 1,
+ &copy_region);
+ m_errorMonitor->VerifyFound();
+ copy_region.dstOffset.z = 0;
+
+ // 1D texture w/ extent.depth > 1. Source = VU 09c00df2, dest = 09c00df4
+ copy_region.extent.depth = 2;
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkImageCopy-srcImage-01785");
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ "VUID-VkImageCopy-srcOffset-00147"); // also z-dim overrun (src)
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ "VUID-VkImageCopy-dstOffset-00153"); // also z-dim overrun (dst)
+ m_commandBuffer->CopyImage(image_1D.image(), VK_IMAGE_LAYOUT_GENERAL, image_2D.image(), VK_IMAGE_LAYOUT_GENERAL, 1,
+ &copy_region);
+ m_errorMonitor->VerifyFound();
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkImageCopy-dstImage-01786");
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ "VUID-VkImageCopy-srcOffset-00147"); // also z-dim overrun (src)
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ "VUID-VkImageCopy-dstOffset-00153"); // also z-dim overrun (dst)
+ m_commandBuffer->CopyImage(image_2D.image(), VK_IMAGE_LAYOUT_GENERAL, image_1D.image(), VK_IMAGE_LAYOUT_GENERAL, 1,
+ &copy_region);
+ m_errorMonitor->VerifyFound();
+ copy_region.extent.depth = 1;
+
+ // 2D texture w/ offset.z > 0. Source = VU 09c00df6, dest = 09c00df8
+ copy_region.extent = {16, 16, 1};
+ copy_region.srcOffset.z = 4;
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkImageCopy-srcImage-01787");
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ "VUID-VkImageCopy-srcOffset-00147"); // also z-dim overrun (src)
+ m_commandBuffer->CopyImage(image_2D.image(), VK_IMAGE_LAYOUT_GENERAL, image_3D.image(), VK_IMAGE_LAYOUT_GENERAL, 1,
+ &copy_region);
+ m_errorMonitor->VerifyFound();
+ copy_region.srcOffset.z = 0;
+ copy_region.dstOffset.z = 1;
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkImageCopy-dstImage-01788");
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ "VUID-VkImageCopy-dstOffset-00153"); // also z-dim overrun (dst)
+ m_commandBuffer->CopyImage(image_3D.image(), VK_IMAGE_LAYOUT_GENERAL, image_2D.image(), VK_IMAGE_LAYOUT_GENERAL, 1,
+ &copy_region);
+ m_errorMonitor->VerifyFound();
+ copy_region.dstOffset.z = 0;
+
+ // 3D texture accessing an array layer other than 0. VU 09c0011a
+ copy_region.extent = {4, 4, 1};
+ copy_region.srcSubresource.baseArrayLayer = 1;
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkImageCopy-srcImage-00141");
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ "VUID-vkCmdCopyImage-srcSubresource-01698"); // also 'too many layers'
+ m_commandBuffer->CopyImage(image_3D.image(), VK_IMAGE_LAYOUT_GENERAL, image_2D.image(), VK_IMAGE_LAYOUT_GENERAL, 1,
+ &copy_region);
+ m_errorMonitor->VerifyFound();
+ m_commandBuffer->end();
+}
+
+TEST_F(VkLayerTest, CopyImageTypeExtentMismatchMaintenance1) {
+ // Image copy tests where format type and extents don't match and the Maintenance1 extension is enabled
+ ASSERT_NO_FATAL_FAILURE(InitFramework(myDbgFunc, m_errorMonitor));
+ if (DeviceExtensionSupported(gpu(), nullptr, VK_KHR_MAINTENANCE1_EXTENSION_NAME)) {
+ m_device_extension_names.push_back(VK_KHR_MAINTENANCE1_EXTENSION_NAME);
+ } else {
+ printf("%s Maintenance1 extension cannot be enabled, test skipped.\n", kSkipPrefix);
+ return;
+ }
+ ASSERT_NO_FATAL_FAILURE(InitState());
+
+ VkFormat image_format = VK_FORMAT_R8G8B8A8_UNORM;
+ VkFormatProperties format_props;
+ // TODO: Remove this check if or when devsim handles extensions.
+ // The chosen format has mandatory support the transfer src and dst format features when Maitenance1 is enabled. However, our
+ // use of devsim and the mock ICD violate this guarantee.
+ vkGetPhysicalDeviceFormatProperties(m_device->phy().handle(), image_format, &format_props);
+ if (!(format_props.optimalTilingFeatures & VK_FORMAT_FEATURE_TRANSFER_SRC_BIT)) {
+ printf("%s Maintenance1 extension is not supported.\n", kSkipPrefix);
+ return;
+ }
+
+ VkImageCreateInfo ci;
+ ci.sType = VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO;
+ ci.pNext = NULL;
+ ci.flags = 0;
+ ci.imageType = VK_IMAGE_TYPE_1D;
+ ci.format = image_format;
+ ci.extent = {32, 1, 1};
+ ci.mipLevels = 1;
+ ci.arrayLayers = 1;
+ ci.samples = VK_SAMPLE_COUNT_1_BIT;
+ ci.tiling = VK_IMAGE_TILING_OPTIMAL;
+ ci.usage = VK_IMAGE_USAGE_TRANSFER_SRC_BIT | VK_IMAGE_USAGE_TRANSFER_DST_BIT;
+ ci.sharingMode = VK_SHARING_MODE_EXCLUSIVE;
+ ci.queueFamilyIndexCount = 0;
+ ci.pQueueFamilyIndices = NULL;
+ ci.initialLayout = VK_IMAGE_LAYOUT_UNDEFINED;
+
+ // Create 1D image
+ VkImageObj image_1D(m_device);
+ image_1D.init(&ci);
+ ASSERT_TRUE(image_1D.initialized());
+
+ // 2D image
+ ci.imageType = VK_IMAGE_TYPE_2D;
+ ci.extent = {32, 32, 1};
+ VkImageObj image_2D(m_device);
+ image_2D.init(&ci);
+ ASSERT_TRUE(image_2D.initialized());
+
+ // 3D image
+ ci.imageType = VK_IMAGE_TYPE_3D;
+ ci.extent = {32, 32, 8};
+ VkImageObj image_3D(m_device);
+ image_3D.init(&ci);
+ ASSERT_TRUE(image_3D.initialized());
+
+ // 2D image array
+ ci.imageType = VK_IMAGE_TYPE_2D;
+ ci.extent = {32, 32, 1};
+ ci.arrayLayers = 8;
+ VkImageObj image_2D_array(m_device);
+ image_2D_array.init(&ci);
+ ASSERT_TRUE(image_2D_array.initialized());
+
+ m_commandBuffer->begin();
+
+ VkImageCopy copy_region;
+ copy_region.extent = {32, 1, 1};
+ copy_region.srcSubresource.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
+ copy_region.dstSubresource.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
+ copy_region.srcSubresource.mipLevel = 0;
+ copy_region.dstSubresource.mipLevel = 0;
+ copy_region.srcSubresource.baseArrayLayer = 0;
+ copy_region.dstSubresource.baseArrayLayer = 0;
+ copy_region.srcSubresource.layerCount = 1;
+ copy_region.dstSubresource.layerCount = 1;
+ copy_region.srcOffset = {0, 0, 0};
+ copy_region.dstOffset = {0, 0, 0};
+
+ // Copy from layer not present
+ copy_region.srcSubresource.baseArrayLayer = 4;
+ copy_region.srcSubresource.layerCount = 6;
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdCopyImage-srcSubresource-01698");
+ m_commandBuffer->CopyImage(image_2D_array.image(), VK_IMAGE_LAYOUT_GENERAL, image_3D.image(), VK_IMAGE_LAYOUT_GENERAL, 1,
+ &copy_region);
+ m_errorMonitor->VerifyFound();
+ copy_region.srcSubresource.baseArrayLayer = 0;
+ copy_region.srcSubresource.layerCount = 1;
+
+ // Copy to layer not present
+ copy_region.dstSubresource.baseArrayLayer = 1;
+ copy_region.dstSubresource.layerCount = 8;
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdCopyImage-dstSubresource-01699");
+ m_commandBuffer->CopyImage(image_3D.image(), VK_IMAGE_LAYOUT_GENERAL, image_2D_array.image(), VK_IMAGE_LAYOUT_GENERAL, 1,
+ &copy_region);
+ m_errorMonitor->VerifyFound();
+ copy_region.dstSubresource.layerCount = 1;
+
+ m_commandBuffer->end();
+}
+
+TEST_F(VkLayerTest, CopyImageCompressedBlockAlignment) {
+ // Image copy tests on compressed images with block alignment errors
+ SetTargetApiVersion(VK_API_VERSION_1_1);
+ ASSERT_NO_FATAL_FAILURE(Init());
+
+ // Select a compressed format and verify support
+ VkPhysicalDeviceFeatures device_features = {};
+ ASSERT_NO_FATAL_FAILURE(GetPhysicalDeviceFeatures(&device_features));
+ VkFormat compressed_format = VK_FORMAT_UNDEFINED;
+ if (device_features.textureCompressionBC) {
+ compressed_format = VK_FORMAT_BC3_SRGB_BLOCK;
+ } else if (device_features.textureCompressionETC2) {
+ compressed_format = VK_FORMAT_ETC2_R8G8B8A8_UNORM_BLOCK;
+ } else if (device_features.textureCompressionASTC_LDR) {
+ compressed_format = VK_FORMAT_ASTC_4x4_UNORM_BLOCK;
+ }
+
+ VkImageCreateInfo ci;
+ ci.sType = VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO;
+ ci.pNext = NULL;
+ ci.flags = 0;
+ ci.imageType = VK_IMAGE_TYPE_2D;
+ ci.format = compressed_format;
+ ci.extent = {64, 64, 1};
+ ci.mipLevels = 1;
+ ci.arrayLayers = 1;
+ ci.samples = VK_SAMPLE_COUNT_1_BIT;
+ ci.tiling = VK_IMAGE_TILING_OPTIMAL;
+ ci.usage = VK_IMAGE_USAGE_TRANSFER_SRC_BIT | VK_IMAGE_USAGE_TRANSFER_DST_BIT;
+ ci.sharingMode = VK_SHARING_MODE_EXCLUSIVE;
+ ci.queueFamilyIndexCount = 0;
+ ci.pQueueFamilyIndices = NULL;
+ ci.initialLayout = VK_IMAGE_LAYOUT_UNDEFINED;
+
+ VkImageFormatProperties img_prop = {};
+ if (VK_SUCCESS != vkGetPhysicalDeviceImageFormatProperties(m_device->phy().handle(), ci.format, ci.imageType, ci.tiling,
+ ci.usage, ci.flags, &img_prop)) {
+ printf("%s No compressed formats supported - CopyImageCompressedBlockAlignment skipped.\n", kSkipPrefix);
+ return;
+ }
+
+ // Create images
+ VkImageObj image_1(m_device);
+ image_1.init(&ci);
+ ASSERT_TRUE(image_1.initialized());
+
+ ci.extent = {62, 62, 1}; // slightly smaller and not divisible by block size
+ VkImageObj image_2(m_device);
+ image_2.init(&ci);
+ ASSERT_TRUE(image_2.initialized());
+
+ m_commandBuffer->begin();
+
+ VkImageCopy copy_region;
+ copy_region.extent = {48, 48, 1};
+ copy_region.srcSubresource.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
+ copy_region.dstSubresource.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
+ copy_region.srcSubresource.mipLevel = 0;
+ copy_region.dstSubresource.mipLevel = 0;
+ copy_region.srcSubresource.baseArrayLayer = 0;
+ copy_region.dstSubresource.baseArrayLayer = 0;
+ copy_region.srcSubresource.layerCount = 1;
+ copy_region.dstSubresource.layerCount = 1;
+ copy_region.srcOffset = {0, 0, 0};
+ copy_region.dstOffset = {0, 0, 0};
+
+ // Sanity check
+ m_errorMonitor->ExpectSuccess();
+ m_commandBuffer->CopyImage(image_1.image(), VK_IMAGE_LAYOUT_GENERAL, image_2.image(), VK_IMAGE_LAYOUT_GENERAL, 1, &copy_region);
+ m_errorMonitor->VerifyNotFound();
+
+ std::string vuid;
+ bool ycbcr = (DeviceExtensionEnabled(VK_KHR_SAMPLER_YCBCR_CONVERSION_EXTENSION_NAME) ||
+ (DeviceValidationVersion() >= VK_API_VERSION_1_1));
+
+ // Src, Dest offsets must be multiples of compressed block sizes {4, 4, 1}
+ // Image transfer granularity gets set to compressed block size, so an ITG error is also (unavoidably) triggered.
+ vuid = ycbcr ? "VUID-VkImageCopy-srcImage-01727" : "VUID-VkImageCopy-srcOffset-00157";
+ copy_region.srcOffset = {2, 4, 0}; // source width
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, vuid);
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ "VUID-vkCmdCopyImage-srcOffset-01783"); // srcOffset image transfer granularity
+ m_commandBuffer->CopyImage(image_1.image(), VK_IMAGE_LAYOUT_GENERAL, image_2.image(), VK_IMAGE_LAYOUT_GENERAL, 1, &copy_region);
+ m_errorMonitor->VerifyFound();
+ copy_region.srcOffset = {12, 1, 0}; // source height
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, vuid);
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ "VUID-vkCmdCopyImage-srcOffset-01783"); // srcOffset image transfer granularity
+ m_commandBuffer->CopyImage(image_1.image(), VK_IMAGE_LAYOUT_GENERAL, image_2.image(), VK_IMAGE_LAYOUT_GENERAL, 1, &copy_region);
+ m_errorMonitor->VerifyFound();
+ copy_region.srcOffset = {0, 0, 0};
+
+ vuid = ycbcr ? "VUID-VkImageCopy-dstImage-01731" : "VUID-VkImageCopy-dstOffset-00162";
+ copy_region.dstOffset = {1, 0, 0}; // dest width
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, vuid);
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ "VUID-vkCmdCopyImage-dstOffset-01784"); // dstOffset image transfer granularity
+ m_commandBuffer->CopyImage(image_1.image(), VK_IMAGE_LAYOUT_GENERAL, image_2.image(), VK_IMAGE_LAYOUT_GENERAL, 1, &copy_region);
+ m_errorMonitor->VerifyFound();
+ copy_region.dstOffset = {4, 1, 0}; // dest height
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, vuid);
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ "VUID-vkCmdCopyImage-dstOffset-01784"); // dstOffset image transfer granularity
+ m_commandBuffer->CopyImage(image_1.image(), VK_IMAGE_LAYOUT_GENERAL, image_2.image(), VK_IMAGE_LAYOUT_GENERAL, 1, &copy_region);
+ m_errorMonitor->VerifyFound();
+ copy_region.dstOffset = {0, 0, 0};
+
+ // Copy extent must be multiples of compressed block sizes {4, 4, 1} if not full width/height
+ vuid = ycbcr ? "VUID-VkImageCopy-srcImage-01728" : "VUID-VkImageCopy-extent-00158";
+ copy_region.extent = {62, 60, 1}; // source width
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, vuid);
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ "VUID-vkCmdCopyImage-srcOffset-01783"); // src extent image transfer granularity
+ m_commandBuffer->CopyImage(image_1.image(), VK_IMAGE_LAYOUT_GENERAL, image_2.image(), VK_IMAGE_LAYOUT_GENERAL, 1, &copy_region);
+ m_errorMonitor->VerifyFound();
+ vuid = ycbcr ? "VUID-VkImageCopy-srcImage-01729" : "VUID-VkImageCopy-extent-00159";
+ copy_region.extent = {60, 62, 1}; // source height
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, vuid);
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ "VUID-vkCmdCopyImage-srcOffset-01783"); // src extent image transfer granularity
+ m_commandBuffer->CopyImage(image_1.image(), VK_IMAGE_LAYOUT_GENERAL, image_2.image(), VK_IMAGE_LAYOUT_GENERAL, 1, &copy_region);
+ m_errorMonitor->VerifyFound();
+
+ vuid = ycbcr ? "VUID-VkImageCopy-dstImage-01732" : "VUID-VkImageCopy-extent-00163";
+ copy_region.extent = {62, 60, 1}; // dest width
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, vuid);
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ "VUID-vkCmdCopyImage-dstOffset-01784"); // dst extent image transfer granularity
+ m_commandBuffer->CopyImage(image_2.image(), VK_IMAGE_LAYOUT_GENERAL, image_1.image(), VK_IMAGE_LAYOUT_GENERAL, 1, &copy_region);
+ m_errorMonitor->VerifyFound();
+ vuid = ycbcr ? "VUID-VkImageCopy-dstImage-01733" : "VUID-VkImageCopy-extent-00164";
+ copy_region.extent = {60, 62, 1}; // dest height
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, vuid);
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ "VUID-vkCmdCopyImage-dstOffset-01784"); // dst extent image transfer granularity
+ m_commandBuffer->CopyImage(image_2.image(), VK_IMAGE_LAYOUT_GENERAL, image_1.image(), VK_IMAGE_LAYOUT_GENERAL, 1, &copy_region);
+ m_errorMonitor->VerifyFound();
+
+ // Note: "VUID-VkImageCopy-extent-00160", "VUID-VkImageCopy-extent-00165", "VUID-VkImageCopy-srcImage-01730",
+ // "VUID-VkImageCopy-dstImage-01734"
+ // There are currently no supported compressed formats with a block depth other than 1,
+ // so impossible to create a 'not a multiple' condition for depth.
+ m_commandBuffer->end();
+}
+
+TEST_F(VkLayerTest, CopyImageSinglePlane422Alignment) {
+ // Image copy tests on single-plane _422 formats with block alignment errors
+
+ // Enable KHR multiplane req'd extensions
+ bool mp_extensions = InstanceExtensionSupported(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME,
+ VK_KHR_GET_MEMORY_REQUIREMENTS_2_SPEC_VERSION);
+ if (mp_extensions) {
+ m_instance_extension_names.push_back(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME);
+ }
+ ASSERT_NO_FATAL_FAILURE(InitFramework(myDbgFunc, m_errorMonitor));
+ mp_extensions = mp_extensions && DeviceExtensionSupported(gpu(), nullptr, VK_KHR_MAINTENANCE1_EXTENSION_NAME);
+ mp_extensions = mp_extensions && DeviceExtensionSupported(gpu(), nullptr, VK_KHR_GET_MEMORY_REQUIREMENTS_2_EXTENSION_NAME);
+ mp_extensions = mp_extensions && DeviceExtensionSupported(gpu(), nullptr, VK_KHR_BIND_MEMORY_2_EXTENSION_NAME);
+ mp_extensions = mp_extensions && DeviceExtensionSupported(gpu(), nullptr, VK_KHR_SAMPLER_YCBCR_CONVERSION_EXTENSION_NAME);
+ if (mp_extensions) {
+ m_device_extension_names.push_back(VK_KHR_MAINTENANCE1_EXTENSION_NAME);
+ m_device_extension_names.push_back(VK_KHR_GET_MEMORY_REQUIREMENTS_2_EXTENSION_NAME);
+ m_device_extension_names.push_back(VK_KHR_BIND_MEMORY_2_EXTENSION_NAME);
+ m_device_extension_names.push_back(VK_KHR_SAMPLER_YCBCR_CONVERSION_EXTENSION_NAME);
+ } else {
+ printf("%s test requires KHR multiplane extensions, not available. Skipping.\n", kSkipPrefix);
+ return;
+ }
+ ASSERT_NO_FATAL_FAILURE(InitState());
+
+ // Select a _422 format and verify support
+ VkImageCreateInfo ci = {};
+ ci.sType = VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO;
+ ci.pNext = NULL;
+ ci.flags = 0;
+ ci.imageType = VK_IMAGE_TYPE_2D;
+ ci.format = VK_FORMAT_G8B8G8R8_422_UNORM_KHR;
+ ci.tiling = VK_IMAGE_TILING_OPTIMAL;
+ ci.usage = VK_IMAGE_USAGE_TRANSFER_SRC_BIT | VK_IMAGE_USAGE_TRANSFER_DST_BIT;
+ ci.mipLevels = 1;
+ ci.arrayLayers = 1;
+ ci.samples = VK_SAMPLE_COUNT_1_BIT;
+ ci.sharingMode = VK_SHARING_MODE_EXCLUSIVE;
+ ci.queueFamilyIndexCount = 0;
+ ci.pQueueFamilyIndices = NULL;
+ ci.initialLayout = VK_IMAGE_LAYOUT_UNDEFINED;
+
+ // Verify formats
+ VkFormatFeatureFlags features = VK_FORMAT_FEATURE_TRANSFER_SRC_BIT | VK_FORMAT_FEATURE_TRANSFER_DST_BIT;
+ bool supported = ImageFormatAndFeaturesSupported(instance(), gpu(), ci, features);
+ if (!supported) {
+ printf("%s Single-plane _422 image format not supported. Skipping test.\n", kSkipPrefix);
+ return; // Assume there's low ROI on searching for different mp formats
+ }
+
+ // Create images
+ ci.extent = {64, 64, 1};
+ VkImageObj image_422(m_device);
+ image_422.init(&ci);
+ ASSERT_TRUE(image_422.initialized());
+
+ ci.extent = {64, 64, 1};
+ ci.format = VK_FORMAT_R8G8B8A8_UNORM;
+ VkImageObj image_ucmp(m_device);
+ image_ucmp.init(&ci);
+ ASSERT_TRUE(image_ucmp.initialized());
+
+ m_commandBuffer->begin();
+
+ VkImageCopy copy_region;
+ copy_region.extent = {48, 48, 1};
+ copy_region.srcSubresource.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
+ copy_region.dstSubresource.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
+ copy_region.srcSubresource.mipLevel = 0;
+ copy_region.dstSubresource.mipLevel = 0;
+ copy_region.srcSubresource.baseArrayLayer = 0;
+ copy_region.dstSubresource.baseArrayLayer = 0;
+ copy_region.srcSubresource.layerCount = 1;
+ copy_region.dstSubresource.layerCount = 1;
+ copy_region.srcOffset = {0, 0, 0};
+ copy_region.dstOffset = {0, 0, 0};
+
+ // Src offsets must be multiples of compressed block sizes
+ copy_region.srcOffset = {3, 4, 0}; // source offset x
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkImageCopy-srcImage-01727");
+ m_commandBuffer->CopyImage(image_422.image(), VK_IMAGE_LAYOUT_GENERAL, image_ucmp.image(), VK_IMAGE_LAYOUT_GENERAL, 1,
+ &copy_region);
+ m_errorMonitor->VerifyFound();
+ copy_region.srcOffset = {0, 0, 0};
+
+ // Dst offsets must be multiples of compressed block sizes
+ copy_region.dstOffset = {1, 0, 0};
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkImageCopy-dstImage-01731");
+ m_commandBuffer->CopyImage(image_ucmp.image(), VK_IMAGE_LAYOUT_GENERAL, image_422.image(), VK_IMAGE_LAYOUT_GENERAL, 1,
+ &copy_region);
+ m_errorMonitor->VerifyFound();
+ copy_region.dstOffset = {0, 0, 0};
+
+ // Copy extent must be multiples of compressed block sizes if not full width/height
+ copy_region.extent = {31, 60, 1}; // 422 source, extent.x
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkImageCopy-srcImage-01728");
+ m_commandBuffer->CopyImage(image_422.image(), VK_IMAGE_LAYOUT_GENERAL, image_ucmp.image(), VK_IMAGE_LAYOUT_GENERAL, 1,
+ &copy_region);
+ m_errorMonitor->VerifyFound();
+
+ // 422 dest, extent.x
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkImageCopy-dstImage-01732");
+ m_commandBuffer->CopyImage(image_ucmp.image(), VK_IMAGE_LAYOUT_GENERAL, image_422.image(), VK_IMAGE_LAYOUT_GENERAL, 1,
+ &copy_region);
+ m_errorMonitor->VerifyFound();
+ copy_region.dstOffset = {0, 0, 0};
+
+ m_commandBuffer->end();
+}
+
+TEST_F(VkLayerTest, MultiplaneImageSamplerConversionMismatch) {
+ TEST_DESCRIPTION("Create sampler with ycbcr conversion and use with an image created without ycrcb conversion");
+
+ // Enable KHR multiplane req'd extensions
+ bool mp_extensions = InstanceExtensionSupported(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME,
+ VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_SPEC_VERSION);
+ if (mp_extensions) {
+ m_instance_extension_names.push_back(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME);
+ }
+ SetTargetApiVersion(VK_API_VERSION_1_1);
+ ASSERT_NO_FATAL_FAILURE(InitFramework(myDbgFunc, m_errorMonitor));
+ mp_extensions = mp_extensions && DeviceExtensionSupported(gpu(), nullptr, VK_KHR_MAINTENANCE1_EXTENSION_NAME);
+ mp_extensions = mp_extensions && DeviceExtensionSupported(gpu(), nullptr, VK_KHR_GET_MEMORY_REQUIREMENTS_2_EXTENSION_NAME);
+ mp_extensions = mp_extensions && DeviceExtensionSupported(gpu(), nullptr, VK_KHR_BIND_MEMORY_2_EXTENSION_NAME);
+ mp_extensions = mp_extensions && DeviceExtensionSupported(gpu(), nullptr, VK_KHR_SAMPLER_YCBCR_CONVERSION_EXTENSION_NAME);
+ if (mp_extensions) {
+ m_device_extension_names.push_back(VK_KHR_MAINTENANCE1_EXTENSION_NAME);
+ m_device_extension_names.push_back(VK_KHR_GET_MEMORY_REQUIREMENTS_2_EXTENSION_NAME);
+ m_device_extension_names.push_back(VK_KHR_BIND_MEMORY_2_EXTENSION_NAME);
+ m_device_extension_names.push_back(VK_KHR_SAMPLER_YCBCR_CONVERSION_EXTENSION_NAME);
+ } else {
+ printf("%s test requires KHR multiplane extensions, not available. Skipping.\n", kSkipPrefix);
+ return;
+ }
+
+ // Enable Ycbcr Conversion Features
+ VkPhysicalDeviceSamplerYcbcrConversionFeatures ycbcr_features = {};
+ ycbcr_features.sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SAMPLER_YCBCR_CONVERSION_FEATURES;
+ ycbcr_features.samplerYcbcrConversion = VK_TRUE;
+ ASSERT_NO_FATAL_FAILURE(InitState(nullptr, &ycbcr_features));
+
+ const VkImageCreateInfo ci = {VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO,
+ NULL,
+ 0,
+ VK_IMAGE_TYPE_2D,
+ VK_FORMAT_G8_B8R8_2PLANE_420_UNORM_KHR,
+ {128, 128, 1},
+ 1,
+ 1,
+ VK_SAMPLE_COUNT_1_BIT,
+ VK_IMAGE_TILING_LINEAR,
+ VK_IMAGE_USAGE_SAMPLED_BIT,
+ VK_SHARING_MODE_EXCLUSIVE,
+ VK_IMAGE_LAYOUT_UNDEFINED};
+
+ // Verify formats
+ bool supported = ImageFormatAndFeaturesSupported(instance(), gpu(), ci, VK_FORMAT_FEATURE_SAMPLED_IMAGE_BIT);
+ if (!supported) {
+ printf("%s Multiplane image format not supported. Skipping test.\n", kSkipPrefix);
+ return;
+ }
+
+ // Create Ycbcr conversion
+ VkSamplerYcbcrConversionCreateInfo ycbcr_create_info = {VK_STRUCTURE_TYPE_SAMPLER_YCBCR_CONVERSION_CREATE_INFO,
+ NULL,
+ VK_FORMAT_G8_B8R8_2PLANE_420_UNORM_KHR,
+ VK_SAMPLER_YCBCR_MODEL_CONVERSION_RGB_IDENTITY,
+ VK_SAMPLER_YCBCR_RANGE_ITU_FULL,
+ {VK_COMPONENT_SWIZZLE_IDENTITY, VK_COMPONENT_SWIZZLE_IDENTITY,
+ VK_COMPONENT_SWIZZLE_IDENTITY, VK_COMPONENT_SWIZZLE_IDENTITY},
+ VK_CHROMA_LOCATION_COSITED_EVEN,
+ VK_CHROMA_LOCATION_COSITED_EVEN,
+ VK_FILTER_NEAREST,
+ false};
+ VkSamplerYcbcrConversion conversion;
+ vkCreateSamplerYcbcrConversion(m_device->handle(), &ycbcr_create_info, nullptr, &conversion);
+ VkSamplerYcbcrConversionInfo ycbcr_info = {};
+ ycbcr_info.sType = VK_STRUCTURE_TYPE_SAMPLER_YCBCR_CONVERSION_INFO;
+ ycbcr_info.conversion = conversion;
+
+ // Create a sampler using conversion
+ VkSamplerCreateInfo sci = SafeSaneSamplerCreateInfo();
+ sci.pNext = &ycbcr_info;
+
+ VkSampler sampler;
+ VkResult err = vkCreateSampler(m_device->device(), &sci, NULL, &sampler);
+ ASSERT_VK_SUCCESS(err);
+
+ // Create an image without a Ycbcr conversion
+ VkImageObj mpimage(m_device);
+ mpimage.init(&ci);
+
+ VkImageView view;
+ VkImageViewCreateInfo ivci = {};
+ ivci.sType = VK_STRUCTURE_TYPE_IMAGE_VIEW_CREATE_INFO;
+ ivci.image = mpimage.handle();
+ ivci.viewType = VK_IMAGE_VIEW_TYPE_2D;
+ ivci.format = VK_FORMAT_G8_B8R8_2PLANE_420_UNORM_KHR;
+ ivci.subresourceRange.layerCount = 1;
+ ivci.subresourceRange.baseMipLevel = 0;
+ ivci.subresourceRange.levelCount = 1;
+ ivci.subresourceRange.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
+ vkCreateImageView(m_device->device(), &ivci, nullptr, &view);
+
+ // Use the image and sampler together in a descriptor set
+ OneOffDescriptorSet ds(m_device, {
+ {0, VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER, 1, VK_SHADER_STAGE_FRAGMENT_BIT, &sampler},
+ });
+
+ VkDescriptorImageInfo image_info{};
+ image_info.imageLayout = VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL;
+ image_info.imageView = view;
+ image_info.sampler = sampler;
+
+ // Update the descriptor set expecting to get an error
+ VkWriteDescriptorSet descriptor_write = {};
+ descriptor_write.sType = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET;
+ descriptor_write.dstSet = ds.set_;
+ descriptor_write.dstBinding = 0;
+ descriptor_write.descriptorCount = 1;
+ descriptor_write.descriptorType = VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER;
+ descriptor_write.pImageInfo = &image_info;
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkWriteDescriptorSet-descriptorType-01948");
+ vkUpdateDescriptorSets(m_device->device(), 1, &descriptor_write, 0, NULL);
+ m_errorMonitor->VerifyFound();
+ vkDestroySamplerYcbcrConversion(m_device->device(), conversion, nullptr);
+ vkDestroyImageView(m_device->device(), view, NULL);
+ vkDestroySampler(m_device->device(), sampler, nullptr);
+}
+
+TEST_F(VkLayerTest, CopyImageMultiplaneAspectBits) {
+ // Image copy tests on multiplane images with aspect errors
+
+ // Enable KHR multiplane req'd extensions
+ bool mp_extensions = InstanceExtensionSupported(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME,
+ VK_KHR_GET_MEMORY_REQUIREMENTS_2_SPEC_VERSION);
+ if (mp_extensions) {
+ m_instance_extension_names.push_back(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME);
+ }
+ ASSERT_NO_FATAL_FAILURE(InitFramework(myDbgFunc, m_errorMonitor));
+ mp_extensions = mp_extensions && DeviceExtensionSupported(gpu(), nullptr, VK_KHR_MAINTENANCE1_EXTENSION_NAME);
+ mp_extensions = mp_extensions && DeviceExtensionSupported(gpu(), nullptr, VK_KHR_GET_MEMORY_REQUIREMENTS_2_EXTENSION_NAME);
+ mp_extensions = mp_extensions && DeviceExtensionSupported(gpu(), nullptr, VK_KHR_BIND_MEMORY_2_EXTENSION_NAME);
+ mp_extensions = mp_extensions && DeviceExtensionSupported(gpu(), nullptr, VK_KHR_SAMPLER_YCBCR_CONVERSION_EXTENSION_NAME);
+ if (mp_extensions) {
+ m_device_extension_names.push_back(VK_KHR_MAINTENANCE1_EXTENSION_NAME);
+ m_device_extension_names.push_back(VK_KHR_GET_MEMORY_REQUIREMENTS_2_EXTENSION_NAME);
+ m_device_extension_names.push_back(VK_KHR_BIND_MEMORY_2_EXTENSION_NAME);
+ m_device_extension_names.push_back(VK_KHR_SAMPLER_YCBCR_CONVERSION_EXTENSION_NAME);
+ } else {
+ printf("%s test requires KHR multiplane extensions, not available. Skipping.\n", kSkipPrefix);
+ return;
+ }
+ ASSERT_NO_FATAL_FAILURE(InitState());
+
+ // Select multi-plane formats and verify support
+ VkFormat mp3_format = VK_FORMAT_G8_B8_R8_3PLANE_422_UNORM_KHR;
+ VkFormat mp2_format = VK_FORMAT_G8_B8R8_2PLANE_422_UNORM_KHR;
+
+ VkImageCreateInfo ci = {};
+ ci.sType = VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO;
+ ci.pNext = NULL;
+ ci.flags = 0;
+ ci.imageType = VK_IMAGE_TYPE_2D;
+ ci.format = mp2_format;
+ ci.extent = {256, 256, 1};
+ ci.tiling = VK_IMAGE_TILING_OPTIMAL;
+ ci.usage = VK_IMAGE_USAGE_TRANSFER_SRC_BIT | VK_IMAGE_USAGE_TRANSFER_DST_BIT;
+ ci.mipLevels = 1;
+ ci.arrayLayers = 1;
+ ci.samples = VK_SAMPLE_COUNT_1_BIT;
+ ci.sharingMode = VK_SHARING_MODE_EXCLUSIVE;
+ ci.queueFamilyIndexCount = 0;
+ ci.pQueueFamilyIndices = NULL;
+ ci.initialLayout = VK_IMAGE_LAYOUT_UNDEFINED;
+
+ // Verify formats
+ VkFormatFeatureFlags features = VK_FORMAT_FEATURE_TRANSFER_SRC_BIT | VK_FORMAT_FEATURE_TRANSFER_DST_BIT;
+ bool supported = ImageFormatAndFeaturesSupported(instance(), gpu(), ci, features);
+ ci.format = mp3_format;
+ supported = supported && ImageFormatAndFeaturesSupported(instance(), gpu(), ci, features);
+ if (!supported) {
+ printf("%s Multiplane image formats not supported. Skipping test.\n", kSkipPrefix);
+ return; // Assume there's low ROI on searching for different mp formats
+ }
+
+ // Create images
+ VkImageObj mp3_image(m_device);
+ mp3_image.init(&ci);
+ ASSERT_TRUE(mp3_image.initialized());
+
+ ci.format = mp2_format;
+ VkImageObj mp2_image(m_device);
+ mp2_image.init(&ci);
+ ASSERT_TRUE(mp2_image.initialized());
+
+ ci.format = VK_FORMAT_D24_UNORM_S8_UINT;
+ VkImageObj sp_image(m_device);
+ sp_image.init(&ci);
+ ASSERT_TRUE(sp_image.initialized());
+
+ m_commandBuffer->begin();
+
+ VkImageCopy copy_region;
+ copy_region.extent = {128, 128, 1};
+ copy_region.srcSubresource.aspectMask = VK_IMAGE_ASPECT_PLANE_2_BIT_KHR;
+ copy_region.dstSubresource.aspectMask = VK_IMAGE_ASPECT_PLANE_2_BIT_KHR;
+ copy_region.srcSubresource.mipLevel = 0;
+ copy_region.dstSubresource.mipLevel = 0;
+ copy_region.srcSubresource.baseArrayLayer = 0;
+ copy_region.dstSubresource.baseArrayLayer = 0;
+ copy_region.srcSubresource.layerCount = 1;
+ copy_region.dstSubresource.layerCount = 1;
+ copy_region.srcOffset = {0, 0, 0};
+ copy_region.dstOffset = {0, 0, 0};
+
+ m_errorMonitor->SetUnexpectedError("VUID-vkCmdCopyImage-srcImage-00135");
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkImageCopy-srcImage-01552");
+ m_commandBuffer->CopyImage(mp2_image.image(), VK_IMAGE_LAYOUT_GENERAL, mp3_image.image(), VK_IMAGE_LAYOUT_GENERAL, 1,
+ &copy_region);
+ m_errorMonitor->VerifyFound();
+
+ copy_region.srcSubresource.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
+ copy_region.dstSubresource.aspectMask = VK_IMAGE_ASPECT_PLANE_0_BIT_KHR;
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkImageCopy-srcImage-01553");
+ m_commandBuffer->CopyImage(mp3_image.image(), VK_IMAGE_LAYOUT_GENERAL, mp2_image.image(), VK_IMAGE_LAYOUT_GENERAL, 1,
+ &copy_region);
+ m_errorMonitor->VerifyFound();
+
+ copy_region.srcSubresource.aspectMask = VK_IMAGE_ASPECT_PLANE_1_BIT_KHR;
+ copy_region.dstSubresource.aspectMask = VK_IMAGE_ASPECT_PLANE_2_BIT_KHR;
+ m_errorMonitor->SetUnexpectedError("VUID-vkCmdCopyImage-srcImage-00135");
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkImageCopy-dstImage-01554");
+ m_commandBuffer->CopyImage(mp3_image.image(), VK_IMAGE_LAYOUT_GENERAL, mp2_image.image(), VK_IMAGE_LAYOUT_GENERAL, 1,
+ &copy_region);
+ m_errorMonitor->VerifyFound();
+
+ copy_region.dstSubresource.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
+ m_errorMonitor->SetUnexpectedError("VUID-vkCmdCopyImage-srcImage-00135");
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkImageCopy-dstImage-01555");
+ m_commandBuffer->CopyImage(mp2_image.image(), VK_IMAGE_LAYOUT_GENERAL, mp3_image.image(), VK_IMAGE_LAYOUT_GENERAL, 1,
+ &copy_region);
+ m_errorMonitor->VerifyFound();
+
+ copy_region.dstSubresource.aspectMask = VK_IMAGE_ASPECT_DEPTH_BIT;
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkImageCopy-srcImage-01556");
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "dest image depth/stencil formats"); // also
+ m_commandBuffer->CopyImage(mp2_image.image(), VK_IMAGE_LAYOUT_GENERAL, sp_image.image(), VK_IMAGE_LAYOUT_GENERAL, 1,
+ &copy_region);
+ m_errorMonitor->VerifyFound();
+
+ copy_region.srcSubresource.aspectMask = VK_IMAGE_ASPECT_DEPTH_BIT;
+ copy_region.dstSubresource.aspectMask = VK_IMAGE_ASPECT_PLANE_2_BIT_KHR;
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkImageCopy-dstImage-01557");
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "dest image depth/stencil formats"); // also
+ m_commandBuffer->CopyImage(sp_image.image(), VK_IMAGE_LAYOUT_GENERAL, mp3_image.image(), VK_IMAGE_LAYOUT_GENERAL, 1,
+ &copy_region);
+ m_errorMonitor->VerifyFound();
+
+ m_commandBuffer->end();
+}
+
+TEST_F(VkLayerTest, CopyImageSrcSizeExceeded) {
+ // Image copy with source region specified greater than src image size
+ ASSERT_NO_FATAL_FAILURE(Init());
+
+ // Create images with full mip chain
+ VkImageCreateInfo ci;
+ ci.sType = VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO;
+ ci.pNext = NULL;
+ ci.flags = 0;
+ ci.imageType = VK_IMAGE_TYPE_3D;
+ ci.format = VK_FORMAT_R8G8B8A8_UNORM;
+ ci.extent = {32, 32, 8};
+ ci.mipLevels = 6;
+ ci.arrayLayers = 1;
+ ci.samples = VK_SAMPLE_COUNT_1_BIT;
+ ci.tiling = VK_IMAGE_TILING_OPTIMAL;
+ ci.usage = VK_IMAGE_USAGE_TRANSFER_SRC_BIT;
+ ci.sharingMode = VK_SHARING_MODE_EXCLUSIVE;
+ ci.queueFamilyIndexCount = 0;
+ ci.pQueueFamilyIndices = NULL;
+ ci.initialLayout = VK_IMAGE_LAYOUT_UNDEFINED;
+
+ VkImageObj src_image(m_device);
+ src_image.init(&ci);
+ ASSERT_TRUE(src_image.initialized());
+
+ // Dest image with one more mip level
+ ci.extent = {64, 64, 16};
+ ci.mipLevels = 7;
+ ci.usage = VK_IMAGE_USAGE_TRANSFER_DST_BIT;
+ VkImageObj dst_image(m_device);
+ dst_image.init(&ci);
+ ASSERT_TRUE(dst_image.initialized());
+
+ m_commandBuffer->begin();
+
+ VkImageCopy copy_region;
+ copy_region.extent = {32, 32, 8};
+ copy_region.srcSubresource.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
+ copy_region.dstSubresource.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
+ copy_region.srcSubresource.mipLevel = 0;
+ copy_region.dstSubresource.mipLevel = 0;
+ copy_region.srcSubresource.baseArrayLayer = 0;
+ copy_region.dstSubresource.baseArrayLayer = 0;
+ copy_region.srcSubresource.layerCount = 1;
+ copy_region.dstSubresource.layerCount = 1;
+ copy_region.srcOffset = {0, 0, 0};
+ copy_region.dstOffset = {0, 0, 0};
+
+ m_errorMonitor->ExpectSuccess();
+ m_commandBuffer->CopyImage(src_image.image(), VK_IMAGE_LAYOUT_GENERAL, dst_image.image(), VK_IMAGE_LAYOUT_GENERAL, 1,
+ &copy_region);
+ m_errorMonitor->VerifyNotFound();
+
+ // Source exceeded in x-dim, VU 01202
+ copy_region.srcOffset.x = 4;
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ "VUID-vkCmdCopyImage-pRegions-00122"); // General "contained within" VU
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkImageCopy-srcOffset-00144");
+ m_commandBuffer->CopyImage(src_image.image(), VK_IMAGE_LAYOUT_GENERAL, dst_image.image(), VK_IMAGE_LAYOUT_GENERAL, 1,
+ &copy_region);
+ m_errorMonitor->VerifyFound();
+
+ // Source exceeded in y-dim, VU 01203
+ copy_region.srcOffset.x = 0;
+ copy_region.extent.height = 48;
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdCopyImage-pRegions-00122");
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkImageCopy-srcOffset-00145");
+ m_commandBuffer->CopyImage(src_image.image(), VK_IMAGE_LAYOUT_GENERAL, dst_image.image(), VK_IMAGE_LAYOUT_GENERAL, 1,
+ &copy_region);
+ m_errorMonitor->VerifyFound();
+
+ // Source exceeded in z-dim, VU 01204
+ copy_region.extent = {4, 4, 4};
+ copy_region.srcSubresource.mipLevel = 2;
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdCopyImage-pRegions-00122");
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkImageCopy-srcOffset-00147");
+ m_commandBuffer->CopyImage(src_image.image(), VK_IMAGE_LAYOUT_GENERAL, dst_image.image(), VK_IMAGE_LAYOUT_GENERAL, 1,
+ &copy_region);
+ m_errorMonitor->VerifyFound();
+
+ m_commandBuffer->end();
+}
+
+TEST_F(VkLayerTest, CopyImageDstSizeExceeded) {
+ // Image copy with dest region specified greater than dest image size
+ ASSERT_NO_FATAL_FAILURE(Init());
+
+ // Create images with full mip chain
+ VkImageCreateInfo ci;
+ ci.sType = VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO;
+ ci.pNext = NULL;
+ ci.flags = 0;
+ ci.imageType = VK_IMAGE_TYPE_3D;
+ ci.format = VK_FORMAT_R8G8B8A8_UNORM;
+ ci.extent = {32, 32, 8};
+ ci.mipLevels = 6;
+ ci.arrayLayers = 1;
+ ci.samples = VK_SAMPLE_COUNT_1_BIT;
+ ci.tiling = VK_IMAGE_TILING_OPTIMAL;
+ ci.usage = VK_IMAGE_USAGE_TRANSFER_DST_BIT;
+ ci.sharingMode = VK_SHARING_MODE_EXCLUSIVE;
+ ci.queueFamilyIndexCount = 0;
+ ci.pQueueFamilyIndices = NULL;
+ ci.initialLayout = VK_IMAGE_LAYOUT_UNDEFINED;
+
+ VkImageObj dst_image(m_device);
+ dst_image.init(&ci);
+ ASSERT_TRUE(dst_image.initialized());
+
+ // Src image with one more mip level
+ ci.extent = {64, 64, 16};
+ ci.mipLevels = 7;
+ ci.usage = VK_IMAGE_USAGE_TRANSFER_SRC_BIT;
+ VkImageObj src_image(m_device);
+ src_image.init(&ci);
+ ASSERT_TRUE(src_image.initialized());
+
+ m_commandBuffer->begin();
+
+ VkImageCopy copy_region;
+ copy_region.extent = {32, 32, 8};
+ copy_region.srcSubresource.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
+ copy_region.dstSubresource.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
+ copy_region.srcSubresource.mipLevel = 0;
+ copy_region.dstSubresource.mipLevel = 0;
+ copy_region.srcSubresource.baseArrayLayer = 0;
+ copy_region.dstSubresource.baseArrayLayer = 0;
+ copy_region.srcSubresource.layerCount = 1;
+ copy_region.dstSubresource.layerCount = 1;
+ copy_region.srcOffset = {0, 0, 0};
+ copy_region.dstOffset = {0, 0, 0};
+
+ m_errorMonitor->ExpectSuccess();
+ m_commandBuffer->CopyImage(src_image.image(), VK_IMAGE_LAYOUT_GENERAL, dst_image.image(), VK_IMAGE_LAYOUT_GENERAL, 1,
+ &copy_region);
+ m_errorMonitor->VerifyNotFound();
+
+ // Dest exceeded in x-dim, VU 01205
+ copy_region.dstOffset.x = 4;
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ "VUID-vkCmdCopyImage-pRegions-00123"); // General "contained within" VU
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkImageCopy-dstOffset-00150");
+ m_commandBuffer->CopyImage(src_image.image(), VK_IMAGE_LAYOUT_GENERAL, dst_image.image(), VK_IMAGE_LAYOUT_GENERAL, 1,
+ &copy_region);
+ m_errorMonitor->VerifyFound();
+
+ // Dest exceeded in y-dim, VU 01206
+ copy_region.dstOffset.x = 0;
+ copy_region.extent.height = 48;
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdCopyImage-pRegions-00123");
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkImageCopy-dstOffset-00151");
+ m_commandBuffer->CopyImage(src_image.image(), VK_IMAGE_LAYOUT_GENERAL, dst_image.image(), VK_IMAGE_LAYOUT_GENERAL, 1,
+ &copy_region);
+ m_errorMonitor->VerifyFound();
+
+ // Dest exceeded in z-dim, VU 01207
+ copy_region.extent = {4, 4, 4};
+ copy_region.dstSubresource.mipLevel = 2;
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdCopyImage-pRegions-00123");
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkImageCopy-dstOffset-00153");
+ m_commandBuffer->CopyImage(src_image.image(), VK_IMAGE_LAYOUT_GENERAL, dst_image.image(), VK_IMAGE_LAYOUT_GENERAL, 1,
+ &copy_region);
+ m_errorMonitor->VerifyFound();
+
+ m_commandBuffer->end();
+}
+
+TEST_F(VkLayerTest, CopyImageFormatSizeMismatch) {
+ VkResult err;
+ bool pass;
+
+ // Create color images with different format sizes and try to copy between them
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdCopyImage-srcImage-00135");
+
+ SetTargetApiVersion(VK_API_VERSION_1_1);
+ ASSERT_NO_FATAL_FAILURE(Init(nullptr, nullptr, VK_COMMAND_POOL_CREATE_RESET_COMMAND_BUFFER_BIT));
+
+ // Create two images of different types and try to copy between them
+ VkImage srcImage;
+ VkImage dstImage;
+ VkDeviceMemory srcMem;
+ VkDeviceMemory destMem;
+ VkMemoryRequirements memReqs;
+
+ VkImageCreateInfo image_create_info = {};
+ image_create_info.sType = VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO;
+ image_create_info.pNext = NULL;
+ image_create_info.imageType = VK_IMAGE_TYPE_2D;
+ image_create_info.format = VK_FORMAT_B8G8R8A8_UNORM;
+ image_create_info.extent.width = 32;
+ image_create_info.extent.height = 32;
+ image_create_info.extent.depth = 1;
+ image_create_info.mipLevels = 1;
+ image_create_info.arrayLayers = 1;
+ image_create_info.samples = VK_SAMPLE_COUNT_1_BIT;
+ image_create_info.tiling = VK_IMAGE_TILING_LINEAR;
+ image_create_info.usage = VK_IMAGE_USAGE_TRANSFER_SRC_BIT;
+ image_create_info.flags = 0;
+
+ err = vkCreateImage(m_device->device(), &image_create_info, NULL, &srcImage);
+ ASSERT_VK_SUCCESS(err);
+
+ image_create_info.usage = VK_IMAGE_USAGE_TRANSFER_DST_BIT;
+ // Introduce failure by creating second image with a different-sized format.
+ image_create_info.format = VK_FORMAT_R5G5B5A1_UNORM_PACK16;
+ VkFormatProperties properties;
+ vkGetPhysicalDeviceFormatProperties(m_device->phy().handle(), image_create_info.format, &properties);
+ if (properties.optimalTilingFeatures == 0) {
+ vkDestroyImage(m_device->device(), srcImage, NULL);
+ printf("%s Image format not supported; skipped.\n", kSkipPrefix);
+ return;
+ }
+
+ err = vkCreateImage(m_device->device(), &image_create_info, NULL, &dstImage);
+ ASSERT_VK_SUCCESS(err);
+
+ // Allocate memory
+ VkMemoryAllocateInfo memAlloc = {};
+ memAlloc.sType = VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO;
+ memAlloc.pNext = NULL;
+ memAlloc.allocationSize = 0;
+ memAlloc.memoryTypeIndex = 0;
+
+ vkGetImageMemoryRequirements(m_device->device(), srcImage, &memReqs);
+ memAlloc.allocationSize = memReqs.size;
+ pass = m_device->phy().set_memory_type(memReqs.memoryTypeBits, &memAlloc, 0);
+ ASSERT_TRUE(pass);
+ err = vkAllocateMemory(m_device->device(), &memAlloc, NULL, &srcMem);
+ ASSERT_VK_SUCCESS(err);
+
+ vkGetImageMemoryRequirements(m_device->device(), dstImage, &memReqs);
+ memAlloc.allocationSize = memReqs.size;
+ pass = m_device->phy().set_memory_type(memReqs.memoryTypeBits, &memAlloc, 0);
+ ASSERT_TRUE(pass);
+ err = vkAllocateMemory(m_device->device(), &memAlloc, NULL, &destMem);
+ ASSERT_VK_SUCCESS(err);
+
+ err = vkBindImageMemory(m_device->device(), srcImage, srcMem, 0);
+ ASSERT_VK_SUCCESS(err);
+ err = vkBindImageMemory(m_device->device(), dstImage, destMem, 0);
+ ASSERT_VK_SUCCESS(err);
+
+ m_commandBuffer->begin();
+ VkImageCopy copyRegion;
+ copyRegion.srcSubresource.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
+ copyRegion.srcSubresource.mipLevel = 0;
+ copyRegion.srcSubresource.baseArrayLayer = 0;
+ copyRegion.srcSubresource.layerCount = 1;
+ copyRegion.srcOffset.x = 0;
+ copyRegion.srcOffset.y = 0;
+ copyRegion.srcOffset.z = 0;
+ copyRegion.dstSubresource.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
+ copyRegion.dstSubresource.mipLevel = 0;
+ copyRegion.dstSubresource.baseArrayLayer = 0;
+ copyRegion.dstSubresource.layerCount = 1;
+ copyRegion.dstOffset.x = 0;
+ copyRegion.dstOffset.y = 0;
+ copyRegion.dstOffset.z = 0;
+ copyRegion.extent.width = 1;
+ copyRegion.extent.height = 1;
+ copyRegion.extent.depth = 1;
+ m_commandBuffer->CopyImage(srcImage, VK_IMAGE_LAYOUT_GENERAL, dstImage, VK_IMAGE_LAYOUT_GENERAL, 1, &copyRegion);
+ m_commandBuffer->end();
+
+ m_errorMonitor->VerifyFound();
+
+ vkDestroyImage(m_device->device(), dstImage, NULL);
+ vkFreeMemory(m_device->device(), destMem, NULL);
+
+ // Copy to multiplane image with mismatched sizes
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdCopyImage-srcImage-00135");
+
+ VkImageCreateInfo ci;
+ ci.sType = VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO;
+ ci.pNext = NULL;
+ ci.flags = 0;
+ ci.imageType = VK_IMAGE_TYPE_2D;
+ ci.format = VK_FORMAT_G8_B8_R8_3PLANE_420_UNORM;
+ ci.extent = {32, 32, 1};
+ ci.mipLevels = 1;
+ ci.arrayLayers = 1;
+ ci.samples = VK_SAMPLE_COUNT_1_BIT;
+ ci.tiling = VK_IMAGE_TILING_LINEAR;
+ ci.usage = VK_IMAGE_USAGE_TRANSFER_DST_BIT;
+ ci.sharingMode = VK_SHARING_MODE_EXCLUSIVE;
+ ci.queueFamilyIndexCount = 0;
+ ci.pQueueFamilyIndices = NULL;
+ ci.initialLayout = VK_IMAGE_LAYOUT_UNDEFINED;
+
+ VkFormatFeatureFlags features = VK_FORMAT_FEATURE_TRANSFER_DST_BIT;
+ bool supported = ImageFormatAndFeaturesSupported(instance(), gpu(), ci, features);
+ bool ycbcr = (DeviceExtensionEnabled(VK_KHR_SAMPLER_YCBCR_CONVERSION_EXTENSION_NAME) ||
+ (DeviceValidationVersion() >= VK_API_VERSION_1_1));
+ if (!supported || !ycbcr) {
+ printf("%s Image format not supported; skipped multiplanar copy test.\n", kSkipPrefix);
+ vkDestroyImage(m_device->device(), srcImage, NULL);
+ vkFreeMemory(m_device->device(), srcMem, NULL);
+ return;
+ }
+
+ VkImageObj mpImage(m_device);
+ mpImage.init(&ci);
+ ASSERT_TRUE(mpImage.initialized());
+ copyRegion.dstSubresource.aspectMask = VK_IMAGE_ASPECT_PLANE_0_BIT;
+ vkResetCommandBuffer(m_commandBuffer->handle(), 0);
+ m_commandBuffer->begin();
+ m_commandBuffer->CopyImage(srcImage, VK_IMAGE_LAYOUT_GENERAL, mpImage.handle(), VK_IMAGE_LAYOUT_GENERAL, 1, &copyRegion);
+ m_commandBuffer->end();
+
+ m_errorMonitor->VerifyFound();
+
+ vkDestroyImage(m_device->device(), srcImage, NULL);
+ vkFreeMemory(m_device->device(), srcMem, NULL);
+}
+
+TEST_F(VkLayerTest, CopyImageDepthStencilFormatMismatch) {
+ ASSERT_NO_FATAL_FAILURE(Init());
+ auto depth_format = FindSupportedDepthStencilFormat(gpu());
+ if (!depth_format) {
+ printf("%s Couldn't depth stencil image format.\n", kSkipPrefix);
+ return;
+ }
+
+ VkFormatProperties properties;
+ vkGetPhysicalDeviceFormatProperties(m_device->phy().handle(), VK_FORMAT_D32_SFLOAT, &properties);
+ if (properties.optimalTilingFeatures == 0) {
+ printf("%s Image format not supported; skipped.\n", kSkipPrefix);
+ return;
+ }
+
+ VkImageObj srcImage(m_device);
+ srcImage.Init(32, 32, 1, VK_FORMAT_D32_SFLOAT, VK_IMAGE_USAGE_TRANSFER_SRC_BIT, VK_IMAGE_TILING_OPTIMAL);
+ ASSERT_TRUE(srcImage.initialized());
+ VkImageObj dstImage(m_device);
+ dstImage.Init(32, 32, 1, depth_format, VK_IMAGE_USAGE_TRANSFER_DST_BIT, VK_IMAGE_TILING_OPTIMAL);
+ ASSERT_TRUE(dstImage.initialized());
+
+ // Create two images of different types and try to copy between them
+
+ m_commandBuffer->begin();
+ VkImageCopy copyRegion;
+ copyRegion.srcSubresource.aspectMask = VK_IMAGE_ASPECT_DEPTH_BIT;
+ copyRegion.srcSubresource.mipLevel = 0;
+ copyRegion.srcSubresource.baseArrayLayer = 0;
+ copyRegion.srcSubresource.layerCount = 1;
+ copyRegion.srcOffset.x = 0;
+ copyRegion.srcOffset.y = 0;
+ copyRegion.srcOffset.z = 0;
+ copyRegion.dstSubresource.aspectMask = VK_IMAGE_ASPECT_DEPTH_BIT;
+ copyRegion.dstSubresource.mipLevel = 0;
+ copyRegion.dstSubresource.baseArrayLayer = 0;
+ copyRegion.dstSubresource.layerCount = 1;
+ copyRegion.dstOffset.x = 0;
+ copyRegion.dstOffset.y = 0;
+ copyRegion.dstOffset.z = 0;
+ copyRegion.extent.width = 1;
+ copyRegion.extent.height = 1;
+ copyRegion.extent.depth = 1;
+
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ "vkCmdCopyImage called with unmatched source and dest image depth");
+ m_commandBuffer->CopyImage(srcImage.handle(), VK_IMAGE_LAYOUT_GENERAL, dstImage.handle(), VK_IMAGE_LAYOUT_GENERAL, 1,
+ &copyRegion);
+ m_commandBuffer->end();
+
+ m_errorMonitor->VerifyFound();
+}
+
+TEST_F(VkLayerTest, CopyImageSampleCountMismatch) {
+ TEST_DESCRIPTION("Image copies with sample count mis-matches");
+
+ ASSERT_NO_FATAL_FAILURE(Init());
+
+ VkImageFormatProperties image_format_properties;
+ vkGetPhysicalDeviceImageFormatProperties(gpu(), VK_FORMAT_R8G8B8A8_UNORM, VK_IMAGE_TYPE_2D, VK_IMAGE_TILING_OPTIMAL,
+ VK_IMAGE_USAGE_TRANSFER_SRC_BIT | VK_IMAGE_USAGE_TRANSFER_DST_BIT, 0,
+ &image_format_properties);
+
+ if ((0 == (VK_SAMPLE_COUNT_2_BIT & image_format_properties.sampleCounts)) ||
+ (0 == (VK_SAMPLE_COUNT_4_BIT & image_format_properties.sampleCounts))) {
+ printf("%s Image multi-sample support not found; skipped.\n", kSkipPrefix);
+ return;
+ }
+
+ VkImageCreateInfo ci;
+ ci.sType = VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO;
+ ci.pNext = NULL;
+ ci.flags = 0;
+ ci.imageType = VK_IMAGE_TYPE_2D;
+ ci.format = VK_FORMAT_R8G8B8A8_UNORM;
+ ci.extent = {128, 128, 1};
+ ci.mipLevels = 1;
+ ci.arrayLayers = 1;
+ ci.samples = VK_SAMPLE_COUNT_1_BIT;
+ ci.tiling = VK_IMAGE_TILING_OPTIMAL;
+ ci.usage = VK_IMAGE_USAGE_TRANSFER_SRC_BIT | VK_IMAGE_USAGE_TRANSFER_DST_BIT;
+ ci.sharingMode = VK_SHARING_MODE_EXCLUSIVE;
+ ci.queueFamilyIndexCount = 0;
+ ci.pQueueFamilyIndices = NULL;
+ ci.initialLayout = VK_IMAGE_LAYOUT_UNDEFINED;
+
+ VkImageObj image1(m_device);
+ image1.init(&ci);
+ ASSERT_TRUE(image1.initialized());
+
+ ci.samples = VK_SAMPLE_COUNT_2_BIT;
+ VkImageObj image2(m_device);
+ image2.init(&ci);
+ ASSERT_TRUE(image2.initialized());
+
+ ci.samples = VK_SAMPLE_COUNT_4_BIT;
+ VkImageObj image4(m_device);
+ image4.init(&ci);
+ ASSERT_TRUE(image4.initialized());
+
+ m_commandBuffer->begin();
+
+ VkImageCopy copyRegion;
+ copyRegion.srcSubresource.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
+ copyRegion.srcSubresource.mipLevel = 0;
+ copyRegion.srcSubresource.baseArrayLayer = 0;
+ copyRegion.srcSubresource.layerCount = 1;
+ copyRegion.srcOffset = {0, 0, 0};
+ copyRegion.dstSubresource.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
+ copyRegion.dstSubresource.mipLevel = 0;
+ copyRegion.dstSubresource.baseArrayLayer = 0;
+ copyRegion.dstSubresource.layerCount = 1;
+ copyRegion.dstOffset = {0, 0, 0};
+ copyRegion.extent = {128, 128, 1};
+
+ // Copy a single sample image to/from a multi-sample image
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdCopyImage-srcImage-00136");
+ vkCmdCopyImage(m_commandBuffer->handle(), image1.handle(), VK_IMAGE_LAYOUT_GENERAL, image4.handle(), VK_IMAGE_LAYOUT_GENERAL, 1,
+ &copyRegion);
+ m_errorMonitor->VerifyFound();
+
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdCopyImage-srcImage-00136");
+ vkCmdCopyImage(m_commandBuffer->handle(), image2.handle(), VK_IMAGE_LAYOUT_GENERAL, image1.handle(), VK_IMAGE_LAYOUT_GENERAL, 1,
+ &copyRegion);
+ m_errorMonitor->VerifyFound();
+
+ // Copy between multi-sample images with different sample counts
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdCopyImage-srcImage-00136");
+ vkCmdCopyImage(m_commandBuffer->handle(), image2.handle(), VK_IMAGE_LAYOUT_GENERAL, image4.handle(), VK_IMAGE_LAYOUT_GENERAL, 1,
+ &copyRegion);
+ m_errorMonitor->VerifyFound();
+
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdCopyImage-srcImage-00136");
+ vkCmdCopyImage(m_commandBuffer->handle(), image4.handle(), VK_IMAGE_LAYOUT_GENERAL, image2.handle(), VK_IMAGE_LAYOUT_GENERAL, 1,
+ &copyRegion);
+ m_errorMonitor->VerifyFound();
+
+ m_commandBuffer->end();
+}
+
+TEST_F(VkLayerTest, CopyImageAspectMismatch) {
+ TEST_DESCRIPTION("Image copies with aspect mask errors");
+ SetTargetApiVersion(VK_API_VERSION_1_1);
+ ASSERT_NO_FATAL_FAILURE(Init());
+ auto ds_format = FindSupportedDepthStencilFormat(gpu());
+ if (!ds_format) {
+ printf("%s Couldn't find depth stencil format.\n", kSkipPrefix);
+ return;
+ }
+
+ VkFormatProperties properties;
+ vkGetPhysicalDeviceFormatProperties(m_device->phy().handle(), VK_FORMAT_D32_SFLOAT, &properties);
+ if (properties.optimalTilingFeatures == 0) {
+ printf("%s Image format VK_FORMAT_D32_SFLOAT not supported; skipped.\n", kSkipPrefix);
+ return;
+ }
+ VkImageObj color_image(m_device), ds_image(m_device), depth_image(m_device);
+ color_image.Init(128, 128, 1, VK_FORMAT_R32_SFLOAT, VK_IMAGE_USAGE_TRANSFER_SRC_BIT | VK_IMAGE_USAGE_TRANSFER_DST_BIT);
+ depth_image.Init(128, 128, 1, VK_FORMAT_D32_SFLOAT, VK_IMAGE_USAGE_TRANSFER_SRC_BIT | VK_IMAGE_USAGE_TRANSFER_DST_BIT,
+ VK_IMAGE_TILING_OPTIMAL, 0);
+ ds_image.Init(128, 128, 1, ds_format, VK_IMAGE_USAGE_TRANSFER_SRC_BIT | VK_IMAGE_USAGE_TRANSFER_DST_BIT,
+ VK_IMAGE_TILING_OPTIMAL, 0);
+ ASSERT_TRUE(color_image.initialized());
+ ASSERT_TRUE(depth_image.initialized());
+ ASSERT_TRUE(ds_image.initialized());
+
+ VkImageCopy copyRegion;
+ copyRegion.srcSubresource.aspectMask = VK_IMAGE_ASPECT_DEPTH_BIT;
+ copyRegion.srcSubresource.mipLevel = 0;
+ copyRegion.srcSubresource.baseArrayLayer = 0;
+ copyRegion.srcSubresource.layerCount = 1;
+ copyRegion.srcOffset = {0, 0, 0};
+ copyRegion.dstSubresource.aspectMask = VK_IMAGE_ASPECT_DEPTH_BIT;
+ copyRegion.dstSubresource.mipLevel = 0;
+ copyRegion.dstSubresource.baseArrayLayer = 0;
+ copyRegion.dstSubresource.layerCount = 1;
+ copyRegion.dstOffset = {64, 0, 0};
+ copyRegion.extent = {64, 128, 1};
+
+ // Submitting command before command buffer is in recording state
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ "You must call vkBeginCommandBuffer"); // "VUID-vkCmdCopyImage-commandBuffer-recording");
+ vkCmdCopyImage(m_commandBuffer->handle(), depth_image.handle(), VK_IMAGE_LAYOUT_GENERAL, depth_image.handle(),
+ VK_IMAGE_LAYOUT_GENERAL, 1, &copyRegion);
+ m_errorMonitor->VerifyFound();
+
+ m_commandBuffer->begin();
+
+ // Src and dest aspect masks don't match
+ copyRegion.dstSubresource.aspectMask = VK_IMAGE_ASPECT_STENCIL_BIT;
+ bool ycbcr = (DeviceExtensionEnabled(VK_KHR_SAMPLER_YCBCR_CONVERSION_EXTENSION_NAME) ||
+ (DeviceValidationVersion() >= VK_API_VERSION_1_1));
+ std::string vuid = (ycbcr ? "VUID-VkImageCopy-srcImage-01551" : "VUID-VkImageCopy-aspectMask-00137");
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, vuid);
+ vkCmdCopyImage(m_commandBuffer->handle(), ds_image.handle(), VK_IMAGE_LAYOUT_GENERAL, ds_image.handle(),
+ VK_IMAGE_LAYOUT_GENERAL, 1, &copyRegion);
+ m_errorMonitor->VerifyFound();
+ copyRegion.dstSubresource.aspectMask = VK_IMAGE_ASPECT_DEPTH_BIT;
+
+ // Illegal combinations of aspect bits
+ copyRegion.srcSubresource.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT | VK_IMAGE_ASPECT_DEPTH_BIT; // color must be alone
+ copyRegion.dstSubresource.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkImageSubresourceLayers-aspectMask-00167");
+ // These aspect/format mismatches are redundant but unavoidable here
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkImageCopy-aspectMask-00142");
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, vuid);
+ vkCmdCopyImage(m_commandBuffer->handle(), color_image.handle(), VK_IMAGE_LAYOUT_GENERAL, color_image.handle(),
+ VK_IMAGE_LAYOUT_GENERAL, 1, &copyRegion);
+ m_errorMonitor->VerifyFound();
+ // same test for dstSubresource
+ copyRegion.srcSubresource.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
+ copyRegion.dstSubresource.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT | VK_IMAGE_ASPECT_DEPTH_BIT; // color must be alone
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkImageSubresourceLayers-aspectMask-00167");
+ // These aspect/format mismatches are redundant but unavoidable here
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkImageCopy-aspectMask-00143");
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, vuid);
+ vkCmdCopyImage(m_commandBuffer->handle(), color_image.handle(), VK_IMAGE_LAYOUT_GENERAL, color_image.handle(),
+ VK_IMAGE_LAYOUT_GENERAL, 1, &copyRegion);
+ m_errorMonitor->VerifyFound();
+
+ // Metadata aspect is illegal
+ copyRegion.srcSubresource.aspectMask = VK_IMAGE_ASPECT_METADATA_BIT;
+ copyRegion.dstSubresource.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkImageSubresourceLayers-aspectMask-00168");
+ // These aspect/format mismatches are redundant but unavoidable here
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, vuid);
+ vkCmdCopyImage(m_commandBuffer->handle(), color_image.handle(), VK_IMAGE_LAYOUT_GENERAL, color_image.handle(),
+ VK_IMAGE_LAYOUT_GENERAL, 1, &copyRegion);
+ m_errorMonitor->VerifyFound();
+ // same test for dstSubresource
+ copyRegion.srcSubresource.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
+ copyRegion.dstSubresource.aspectMask = VK_IMAGE_ASPECT_METADATA_BIT;
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkImageSubresourceLayers-aspectMask-00168");
+ // These aspect/format mismatches are redundant but unavoidable here
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, vuid);
+ vkCmdCopyImage(m_commandBuffer->handle(), color_image.handle(), VK_IMAGE_LAYOUT_GENERAL, color_image.handle(),
+ VK_IMAGE_LAYOUT_GENERAL, 1, &copyRegion);
+ m_errorMonitor->VerifyFound();
+
+ copyRegion.srcSubresource.aspectMask = VK_IMAGE_ASPECT_DEPTH_BIT;
+ copyRegion.dstSubresource.aspectMask = VK_IMAGE_ASPECT_DEPTH_BIT;
+
+ // Aspect mask doesn't match source image format
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkImageCopy-aspectMask-00142");
+ // Again redundant but unavoidable
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "unmatched source and dest image depth/stencil formats");
+ vkCmdCopyImage(m_commandBuffer->handle(), color_image.handle(), VK_IMAGE_LAYOUT_GENERAL, depth_image.handle(),
+ VK_IMAGE_LAYOUT_GENERAL, 1, &copyRegion);
+ m_errorMonitor->VerifyFound();
+
+ // Aspect mask doesn't match dest image format
+ copyRegion.srcSubresource.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
+ copyRegion.dstSubresource.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkImageCopy-aspectMask-00143");
+ // Again redundant but unavoidable
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "unmatched source and dest image depth/stencil formats");
+ vkCmdCopyImage(m_commandBuffer->handle(), color_image.handle(), VK_IMAGE_LAYOUT_GENERAL, depth_image.handle(),
+ VK_IMAGE_LAYOUT_GENERAL, 1, &copyRegion);
+ m_errorMonitor->VerifyFound();
+
+ m_commandBuffer->end();
+}
+
+TEST_F(VkLayerTest, ResolveImageLowSampleCount) {
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ "vkCmdResolveImage called with source sample count less than 2.");
+
+ ASSERT_NO_FATAL_FAILURE(Init());
+
+ // Create two images of sample count 1 and try to Resolve between them
+
+ VkImageCreateInfo image_create_info = {};
+ image_create_info.sType = VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO;
+ image_create_info.pNext = NULL;
+ image_create_info.imageType = VK_IMAGE_TYPE_2D;
+ image_create_info.format = VK_FORMAT_B8G8R8A8_UNORM;
+ image_create_info.extent.width = 32;
+ image_create_info.extent.height = 1;
+ image_create_info.extent.depth = 1;
+ image_create_info.mipLevels = 1;
+ image_create_info.arrayLayers = 1;
+ image_create_info.samples = VK_SAMPLE_COUNT_1_BIT;
+ image_create_info.tiling = VK_IMAGE_TILING_OPTIMAL;
+ image_create_info.usage = VK_IMAGE_USAGE_TRANSFER_SRC_BIT | VK_IMAGE_USAGE_TRANSFER_DST_BIT;
+ image_create_info.flags = 0;
+
+ VkImageObj srcImage(m_device);
+ srcImage.init(&image_create_info);
+ ASSERT_TRUE(srcImage.initialized());
+
+ VkImageObj dstImage(m_device);
+ dstImage.init(&image_create_info);
+ ASSERT_TRUE(dstImage.initialized());
+
+ m_commandBuffer->begin();
+ VkImageResolve resolveRegion;
+ resolveRegion.srcSubresource.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
+ resolveRegion.srcSubresource.mipLevel = 0;
+ resolveRegion.srcSubresource.baseArrayLayer = 0;
+ resolveRegion.srcSubresource.layerCount = 1;
+ resolveRegion.srcOffset.x = 0;
+ resolveRegion.srcOffset.y = 0;
+ resolveRegion.srcOffset.z = 0;
+ resolveRegion.dstSubresource.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
+ resolveRegion.dstSubresource.mipLevel = 0;
+ resolveRegion.dstSubresource.baseArrayLayer = 0;
+ resolveRegion.dstSubresource.layerCount = 1;
+ resolveRegion.dstOffset.x = 0;
+ resolveRegion.dstOffset.y = 0;
+ resolveRegion.dstOffset.z = 0;
+ resolveRegion.extent.width = 1;
+ resolveRegion.extent.height = 1;
+ resolveRegion.extent.depth = 1;
+ m_commandBuffer->ResolveImage(srcImage.handle(), VK_IMAGE_LAYOUT_GENERAL, dstImage.handle(), VK_IMAGE_LAYOUT_GENERAL, 1,
+ &resolveRegion);
+ m_commandBuffer->end();
+
+ m_errorMonitor->VerifyFound();
+}
+
+TEST_F(VkLayerTest, ResolveImageHighSampleCount) {
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ "vkCmdResolveImage called with dest sample count greater than 1.");
+
+ ASSERT_NO_FATAL_FAILURE(Init());
+
+ // Create two images of sample count 4 and try to Resolve between them
+
+ VkImageCreateInfo image_create_info = {};
+ image_create_info.sType = VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO;
+ image_create_info.pNext = NULL;
+ image_create_info.imageType = VK_IMAGE_TYPE_2D;
+ image_create_info.format = VK_FORMAT_B8G8R8A8_UNORM;
+ image_create_info.extent.width = 32;
+ image_create_info.extent.height = 1;
+ image_create_info.extent.depth = 1;
+ image_create_info.mipLevels = 1;
+ image_create_info.arrayLayers = 1;
+ image_create_info.samples = VK_SAMPLE_COUNT_4_BIT;
+ image_create_info.tiling = VK_IMAGE_TILING_OPTIMAL;
+ // Note: Some implementations expect color attachment usage for any
+ // multisample surface
+ image_create_info.usage =
+ VK_IMAGE_USAGE_TRANSFER_SRC_BIT | VK_IMAGE_USAGE_TRANSFER_DST_BIT | VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT;
+ image_create_info.flags = 0;
+
+ VkImageObj srcImage(m_device);
+ srcImage.init(&image_create_info);
+ ASSERT_TRUE(srcImage.initialized());
+
+ VkImageObj dstImage(m_device);
+ dstImage.init(&image_create_info);
+ ASSERT_TRUE(dstImage.initialized());
+
+ m_commandBuffer->begin();
+ // Need memory barrier to VK_IMAGE_LAYOUT_GENERAL for source and dest?
+ // VK_IMAGE_LAYOUT_UNDEFINED = 0,
+ // VK_IMAGE_LAYOUT_GENERAL = 1,
+ VkImageResolve resolveRegion;
+ resolveRegion.srcSubresource.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
+ resolveRegion.srcSubresource.mipLevel = 0;
+ resolveRegion.srcSubresource.baseArrayLayer = 0;
+ resolveRegion.srcSubresource.layerCount = 1;
+ resolveRegion.srcOffset.x = 0;
+ resolveRegion.srcOffset.y = 0;
+ resolveRegion.srcOffset.z = 0;
+ resolveRegion.dstSubresource.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
+ resolveRegion.dstSubresource.mipLevel = 0;
+ resolveRegion.dstSubresource.baseArrayLayer = 0;
+ resolveRegion.dstSubresource.layerCount = 1;
+ resolveRegion.dstOffset.x = 0;
+ resolveRegion.dstOffset.y = 0;
+ resolveRegion.dstOffset.z = 0;
+ resolveRegion.extent.width = 1;
+ resolveRegion.extent.height = 1;
+ resolveRegion.extent.depth = 1;
+ m_commandBuffer->ResolveImage(srcImage.handle(), VK_IMAGE_LAYOUT_GENERAL, dstImage.handle(), VK_IMAGE_LAYOUT_GENERAL, 1,
+ &resolveRegion);
+ m_commandBuffer->end();
+
+ m_errorMonitor->VerifyFound();
+}
+
+TEST_F(VkLayerTest, ResolveImageFormatMismatch) {
+ VkResult err;
+ bool pass;
+
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_WARNING_BIT_EXT,
+ "vkCmdResolveImage called with unmatched source and dest formats.");
+
+ ASSERT_NO_FATAL_FAILURE(Init());
+
+ // Create two images of different types and try to copy between them
+ VkImage srcImage;
+ VkImage dstImage;
+ VkDeviceMemory srcMem;
+ VkDeviceMemory destMem;
+ VkMemoryRequirements memReqs;
+
+ VkImageCreateInfo image_create_info = {};
+ image_create_info.sType = VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO;
+ image_create_info.pNext = NULL;
+ image_create_info.imageType = VK_IMAGE_TYPE_2D;
+ image_create_info.format = VK_FORMAT_B8G8R8A8_UNORM;
+ image_create_info.extent.width = 32;
+ image_create_info.extent.height = 1;
+ image_create_info.extent.depth = 1;
+ image_create_info.mipLevels = 1;
+ image_create_info.arrayLayers = 1;
+ image_create_info.samples = VK_SAMPLE_COUNT_2_BIT;
+ image_create_info.tiling = VK_IMAGE_TILING_OPTIMAL;
+ // Note: Some implementations expect color attachment usage for any
+ // multisample surface
+ image_create_info.usage = VK_IMAGE_USAGE_TRANSFER_SRC_BIT | VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT;
+ image_create_info.flags = 0;
+
+ err = vkCreateImage(m_device->device(), &image_create_info, NULL, &srcImage);
+ ASSERT_VK_SUCCESS(err);
+
+ // Set format to something other than source image
+ image_create_info.format = VK_FORMAT_R32_SFLOAT;
+ // Note: Some implementations expect color attachment usage for any
+ // multisample surface
+ image_create_info.usage = VK_IMAGE_USAGE_TRANSFER_DST_BIT | VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT;
+ image_create_info.samples = VK_SAMPLE_COUNT_1_BIT;
+
+ err = vkCreateImage(m_device->device(), &image_create_info, NULL, &dstImage);
+ ASSERT_VK_SUCCESS(err);
+
+ // Allocate memory
+ VkMemoryAllocateInfo memAlloc = {};
+ memAlloc.sType = VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO;
+ memAlloc.pNext = NULL;
+ memAlloc.allocationSize = 0;
+ memAlloc.memoryTypeIndex = 0;
+
+ vkGetImageMemoryRequirements(m_device->device(), srcImage, &memReqs);
+ memAlloc.allocationSize = memReqs.size;
+ pass = m_device->phy().set_memory_type(memReqs.memoryTypeBits, &memAlloc, 0);
+ ASSERT_TRUE(pass);
+ err = vkAllocateMemory(m_device->device(), &memAlloc, NULL, &srcMem);
+ ASSERT_VK_SUCCESS(err);
+
+ vkGetImageMemoryRequirements(m_device->device(), dstImage, &memReqs);
+ memAlloc.allocationSize = memReqs.size;
+ pass = m_device->phy().set_memory_type(memReqs.memoryTypeBits, &memAlloc, 0);
+ ASSERT_TRUE(pass);
+ err = vkAllocateMemory(m_device->device(), &memAlloc, NULL, &destMem);
+ ASSERT_VK_SUCCESS(err);
+
+ err = vkBindImageMemory(m_device->device(), srcImage, srcMem, 0);
+ ASSERT_VK_SUCCESS(err);
+ err = vkBindImageMemory(m_device->device(), dstImage, destMem, 0);
+ ASSERT_VK_SUCCESS(err);
+
+ m_commandBuffer->begin();
+ // Need memory barrier to VK_IMAGE_LAYOUT_GENERAL for source and dest?
+ // VK_IMAGE_LAYOUT_UNDEFINED = 0,
+ // VK_IMAGE_LAYOUT_GENERAL = 1,
+ VkImageResolve resolveRegion;
+ resolveRegion.srcSubresource.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
+ resolveRegion.srcSubresource.mipLevel = 0;
+ resolveRegion.srcSubresource.baseArrayLayer = 0;
+ resolveRegion.srcSubresource.layerCount = 1;
+ resolveRegion.srcOffset.x = 0;
+ resolveRegion.srcOffset.y = 0;
+ resolveRegion.srcOffset.z = 0;
+ resolveRegion.dstSubresource.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
+ resolveRegion.dstSubresource.mipLevel = 0;
+ resolveRegion.dstSubresource.baseArrayLayer = 0;
+ resolveRegion.dstSubresource.layerCount = 1;
+ resolveRegion.dstOffset.x = 0;
+ resolveRegion.dstOffset.y = 0;
+ resolveRegion.dstOffset.z = 0;
+ resolveRegion.extent.width = 1;
+ resolveRegion.extent.height = 1;
+ resolveRegion.extent.depth = 1;
+ m_commandBuffer->ResolveImage(srcImage, VK_IMAGE_LAYOUT_GENERAL, dstImage, VK_IMAGE_LAYOUT_GENERAL, 1, &resolveRegion);
+ m_commandBuffer->end();
+
+ m_errorMonitor->VerifyFound();
+
+ vkDestroyImage(m_device->device(), srcImage, NULL);
+ vkDestroyImage(m_device->device(), dstImage, NULL);
+ vkFreeMemory(m_device->device(), srcMem, NULL);
+ vkFreeMemory(m_device->device(), destMem, NULL);
+}
+
+TEST_F(VkLayerTest, ResolveImageTypeMismatch) {
+ VkResult err;
+ bool pass;
+
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_WARNING_BIT_EXT,
+ "vkCmdResolveImage called with unmatched source and dest image types.");
+
+ ASSERT_NO_FATAL_FAILURE(Init());
+
+ // Create two images of different types and try to copy between them
+ VkImage srcImage;
+ VkImage dstImage;
+ VkDeviceMemory srcMem;
+ VkDeviceMemory destMem;
+ VkMemoryRequirements memReqs;
+
+ VkImageCreateInfo image_create_info = {};
+ image_create_info.sType = VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO;
+ image_create_info.pNext = NULL;
+ image_create_info.imageType = VK_IMAGE_TYPE_2D;
+ image_create_info.format = VK_FORMAT_B8G8R8A8_UNORM;
+ image_create_info.extent.width = 32;
+ image_create_info.extent.height = 1;
+ image_create_info.extent.depth = 1;
+ image_create_info.mipLevels = 1;
+ image_create_info.arrayLayers = 1;
+ image_create_info.samples = VK_SAMPLE_COUNT_2_BIT;
+ image_create_info.tiling = VK_IMAGE_TILING_OPTIMAL;
+ // Note: Some implementations expect color attachment usage for any
+ // multisample surface
+ image_create_info.usage = VK_IMAGE_USAGE_TRANSFER_SRC_BIT | VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT;
+ image_create_info.flags = 0;
+
+ err = vkCreateImage(m_device->device(), &image_create_info, NULL, &srcImage);
+ ASSERT_VK_SUCCESS(err);
+
+ image_create_info.imageType = VK_IMAGE_TYPE_1D;
+ // Note: Some implementations expect color attachment usage for any
+ // multisample surface
+ image_create_info.usage = VK_IMAGE_USAGE_TRANSFER_DST_BIT | VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT;
+ image_create_info.samples = VK_SAMPLE_COUNT_1_BIT;
+
+ err = vkCreateImage(m_device->device(), &image_create_info, NULL, &dstImage);
+ ASSERT_VK_SUCCESS(err);
+
+ // Allocate memory
+ VkMemoryAllocateInfo memAlloc = {};
+ memAlloc.sType = VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO;
+ memAlloc.pNext = NULL;
+ memAlloc.allocationSize = 0;
+ memAlloc.memoryTypeIndex = 0;
+
+ vkGetImageMemoryRequirements(m_device->device(), srcImage, &memReqs);
+ memAlloc.allocationSize = memReqs.size;
+ pass = m_device->phy().set_memory_type(memReqs.memoryTypeBits, &memAlloc, 0);
+ ASSERT_TRUE(pass);
+ err = vkAllocateMemory(m_device->device(), &memAlloc, NULL, &srcMem);
+ ASSERT_VK_SUCCESS(err);
+
+ vkGetImageMemoryRequirements(m_device->device(), dstImage, &memReqs);
+ memAlloc.allocationSize = memReqs.size;
+ pass = m_device->phy().set_memory_type(memReqs.memoryTypeBits, &memAlloc, 0);
+ ASSERT_TRUE(pass);
+ err = vkAllocateMemory(m_device->device(), &memAlloc, NULL, &destMem);
+ ASSERT_VK_SUCCESS(err);
+
+ err = vkBindImageMemory(m_device->device(), srcImage, srcMem, 0);
+ ASSERT_VK_SUCCESS(err);
+ err = vkBindImageMemory(m_device->device(), dstImage, destMem, 0);
+ ASSERT_VK_SUCCESS(err);
+
+ m_commandBuffer->begin();
+ // Need memory barrier to VK_IMAGE_LAYOUT_GENERAL for source and dest?
+ // VK_IMAGE_LAYOUT_UNDEFINED = 0,
+ // VK_IMAGE_LAYOUT_GENERAL = 1,
+ VkImageResolve resolveRegion;
+ resolveRegion.srcSubresource.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
+ resolveRegion.srcSubresource.mipLevel = 0;
+ resolveRegion.srcSubresource.baseArrayLayer = 0;
+ resolveRegion.srcSubresource.layerCount = 1;
+ resolveRegion.srcOffset.x = 0;
+ resolveRegion.srcOffset.y = 0;
+ resolveRegion.srcOffset.z = 0;
+ resolveRegion.dstSubresource.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
+ resolveRegion.dstSubresource.mipLevel = 0;
+ resolveRegion.dstSubresource.baseArrayLayer = 0;
+ resolveRegion.dstSubresource.layerCount = 1;
+ resolveRegion.dstOffset.x = 0;
+ resolveRegion.dstOffset.y = 0;
+ resolveRegion.dstOffset.z = 0;
+ resolveRegion.extent.width = 1;
+ resolveRegion.extent.height = 1;
+ resolveRegion.extent.depth = 1;
+ m_commandBuffer->ResolveImage(srcImage, VK_IMAGE_LAYOUT_GENERAL, dstImage, VK_IMAGE_LAYOUT_GENERAL, 1, &resolveRegion);
+ m_commandBuffer->end();
+
+ m_errorMonitor->VerifyFound();
+
+ vkDestroyImage(m_device->device(), srcImage, NULL);
+ vkDestroyImage(m_device->device(), dstImage, NULL);
+ vkFreeMemory(m_device->device(), srcMem, NULL);
+ vkFreeMemory(m_device->device(), destMem, NULL);
+}
+
+TEST_F(VkLayerTest, ResolveImageLayoutMismatch) {
+ ASSERT_NO_FATAL_FAILURE(Init());
+
+ // Create two images of different types and try to copy between them
+ VkImageObj srcImage(m_device);
+ VkImageObj dstImage(m_device);
+
+ VkImageCreateInfo image_create_info = {};
+ image_create_info.sType = VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO;
+ image_create_info.pNext = NULL;
+ image_create_info.imageType = VK_IMAGE_TYPE_2D;
+ image_create_info.format = VK_FORMAT_B8G8R8A8_UNORM;
+ image_create_info.extent.width = 32;
+ image_create_info.extent.height = 32;
+ image_create_info.extent.depth = 1;
+ image_create_info.mipLevels = 1;
+ image_create_info.arrayLayers = 1;
+ image_create_info.samples = VK_SAMPLE_COUNT_2_BIT;
+ image_create_info.tiling = VK_IMAGE_TILING_OPTIMAL;
+ image_create_info.usage =
+ VK_IMAGE_USAGE_TRANSFER_DST_BIT | VK_IMAGE_USAGE_TRANSFER_SRC_BIT | VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT;
+ // Note: Some implementations expect color attachment usage for any
+ // multisample surface
+ image_create_info.flags = 0;
+ srcImage.init(&image_create_info);
+ ASSERT_TRUE(srcImage.initialized());
+
+ // Note: Some implementations expect color attachment usage for any
+ // multisample surface
+ image_create_info.samples = VK_SAMPLE_COUNT_1_BIT;
+ dstImage.init(&image_create_info);
+ ASSERT_TRUE(dstImage.initialized());
+
+ m_commandBuffer->begin();
+ // source image must have valid contents before resolve
+ VkClearColorValue clear_color = {{0, 0, 0, 0}};
+ VkImageSubresourceRange subresource = {};
+ subresource.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
+ subresource.layerCount = 1;
+ subresource.levelCount = 1;
+ srcImage.SetLayout(m_commandBuffer, VK_IMAGE_ASPECT_COLOR_BIT, VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL);
+ m_commandBuffer->ClearColorImage(srcImage.image(), VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, &clear_color, 1, &subresource);
+ srcImage.SetLayout(m_commandBuffer, VK_IMAGE_ASPECT_COLOR_BIT, VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL);
+ dstImage.SetLayout(m_commandBuffer, VK_IMAGE_ASPECT_COLOR_BIT, VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL);
+
+ VkImageResolve resolveRegion;
+ resolveRegion.srcSubresource.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
+ resolveRegion.srcSubresource.mipLevel = 0;
+ resolveRegion.srcSubresource.baseArrayLayer = 0;
+ resolveRegion.srcSubresource.layerCount = 1;
+ resolveRegion.srcOffset.x = 0;
+ resolveRegion.srcOffset.y = 0;
+ resolveRegion.srcOffset.z = 0;
+ resolveRegion.dstSubresource.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
+ resolveRegion.dstSubresource.mipLevel = 0;
+ resolveRegion.dstSubresource.baseArrayLayer = 0;
+ resolveRegion.dstSubresource.layerCount = 1;
+ resolveRegion.dstOffset.x = 0;
+ resolveRegion.dstOffset.y = 0;
+ resolveRegion.dstOffset.z = 0;
+ resolveRegion.extent.width = 1;
+ resolveRegion.extent.height = 1;
+ resolveRegion.extent.depth = 1;
+ // source image layout mismatch
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdResolveImage-srcImageLayout-00260");
+ m_commandBuffer->ResolveImage(srcImage.image(), VK_IMAGE_LAYOUT_GENERAL, dstImage.image(), VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL,
+ 1, &resolveRegion);
+ m_errorMonitor->VerifyFound();
+ // dst image layout mismatch
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdResolveImage-dstImageLayout-00262");
+ m_commandBuffer->ResolveImage(srcImage.image(), VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL, dstImage.image(), VK_IMAGE_LAYOUT_GENERAL,
+ 1, &resolveRegion);
+ m_errorMonitor->VerifyFound();
+ m_commandBuffer->end();
+}
+
+TEST_F(VkLayerTest, ResolveInvalidSubresource) {
+ ASSERT_NO_FATAL_FAILURE(Init());
+
+ // Create two images of different types and try to copy between them
+ VkImageObj srcImage(m_device);
+ VkImageObj dstImage(m_device);
+
+ VkImageCreateInfo image_create_info = {};
+ image_create_info.sType = VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO;
+ image_create_info.pNext = NULL;
+ image_create_info.imageType = VK_IMAGE_TYPE_2D;
+ image_create_info.format = VK_FORMAT_B8G8R8A8_UNORM;
+ image_create_info.extent.width = 32;
+ image_create_info.extent.height = 32;
+ image_create_info.extent.depth = 1;
+ image_create_info.mipLevels = 1;
+ image_create_info.arrayLayers = 1;
+ image_create_info.samples = VK_SAMPLE_COUNT_2_BIT;
+ image_create_info.tiling = VK_IMAGE_TILING_OPTIMAL;
+ image_create_info.usage =
+ VK_IMAGE_USAGE_TRANSFER_DST_BIT | VK_IMAGE_USAGE_TRANSFER_SRC_BIT | VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT;
+ // Note: Some implementations expect color attachment usage for any
+ // multisample surface
+ image_create_info.flags = 0;
+ srcImage.init(&image_create_info);
+ ASSERT_TRUE(srcImage.initialized());
+
+ // Note: Some implementations expect color attachment usage for any
+ // multisample surface
+ image_create_info.samples = VK_SAMPLE_COUNT_1_BIT;
+ dstImage.init(&image_create_info);
+ ASSERT_TRUE(dstImage.initialized());
+
+ m_commandBuffer->begin();
+ // source image must have valid contents before resolve
+ VkClearColorValue clear_color = {{0, 0, 0, 0}};
+ VkImageSubresourceRange subresource = {};
+ subresource.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
+ subresource.layerCount = 1;
+ subresource.levelCount = 1;
+ srcImage.SetLayout(m_commandBuffer, VK_IMAGE_ASPECT_COLOR_BIT, VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL);
+ m_commandBuffer->ClearColorImage(srcImage.image(), VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, &clear_color, 1, &subresource);
+ srcImage.SetLayout(m_commandBuffer, VK_IMAGE_ASPECT_COLOR_BIT, VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL);
+ dstImage.SetLayout(m_commandBuffer, VK_IMAGE_ASPECT_COLOR_BIT, VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL);
+
+ VkImageResolve resolveRegion;
+ resolveRegion.srcSubresource.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
+ resolveRegion.srcSubresource.mipLevel = 0;
+ resolveRegion.srcSubresource.baseArrayLayer = 0;
+ resolveRegion.srcSubresource.layerCount = 1;
+ resolveRegion.srcOffset.x = 0;
+ resolveRegion.srcOffset.y = 0;
+ resolveRegion.srcOffset.z = 0;
+ resolveRegion.dstSubresource.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
+ resolveRegion.dstSubresource.mipLevel = 0;
+ resolveRegion.dstSubresource.baseArrayLayer = 0;
+ resolveRegion.dstSubresource.layerCount = 1;
+ resolveRegion.dstOffset.x = 0;
+ resolveRegion.dstOffset.y = 0;
+ resolveRegion.dstOffset.z = 0;
+ resolveRegion.extent.width = 1;
+ resolveRegion.extent.height = 1;
+ resolveRegion.extent.depth = 1;
+ // invalid source mip level
+ resolveRegion.srcSubresource.mipLevel = image_create_info.mipLevels;
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdResolveImage-srcSubresource-01709");
+ m_commandBuffer->ResolveImage(srcImage.image(), VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL, dstImage.image(),
+ VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, 1, &resolveRegion);
+ m_errorMonitor->VerifyFound();
+ resolveRegion.srcSubresource.mipLevel = 0;
+ // invalid dest mip level
+ resolveRegion.dstSubresource.mipLevel = image_create_info.mipLevels;
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdResolveImage-dstSubresource-01710");
+ m_commandBuffer->ResolveImage(srcImage.image(), VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL, dstImage.image(),
+ VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, 1, &resolveRegion);
+ m_errorMonitor->VerifyFound();
+ resolveRegion.dstSubresource.mipLevel = 0;
+ // invalid source array layer range
+ resolveRegion.srcSubresource.baseArrayLayer = image_create_info.arrayLayers;
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdResolveImage-srcSubresource-01711");
+ m_commandBuffer->ResolveImage(srcImage.image(), VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL, dstImage.image(),
+ VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, 1, &resolveRegion);
+ m_errorMonitor->VerifyFound();
+ resolveRegion.srcSubresource.baseArrayLayer = 0;
+ // invalid dest array layer range
+ resolveRegion.dstSubresource.baseArrayLayer = image_create_info.arrayLayers;
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdResolveImage-dstSubresource-01712");
+ m_commandBuffer->ResolveImage(srcImage.image(), VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL, dstImage.image(),
+ VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, 1, &resolveRegion);
+ m_errorMonitor->VerifyFound();
+ resolveRegion.dstSubresource.baseArrayLayer = 0;
+
+ m_commandBuffer->end();
+}
+
+TEST_F(VkLayerTest, DepthStencilImageViewWithColorAspectBitError) {
+ // Create a single Image descriptor and cause it to first hit an error due
+ // to using a DS format, then cause it to hit error due to COLOR_BIT not
+ // set in aspect
+ // The image format check comes 2nd in validation so we trigger it first,
+ // then when we cause aspect fail next, bad format check will be preempted
+ VkResult err;
+
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ "Combination depth/stencil image formats can have only the ");
+
+ ASSERT_NO_FATAL_FAILURE(Init());
+ auto depth_format = FindSupportedDepthStencilFormat(gpu());
+ if (!depth_format) {
+ printf("%s Couldn't find depth stencil format.\n", kSkipPrefix);
+ return;
+ }
+
+ VkDescriptorPoolSize ds_type_count = {};
+ ds_type_count.type = VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE;
+ ds_type_count.descriptorCount = 1;
+
+ VkDescriptorPoolCreateInfo ds_pool_ci = {};
+ ds_pool_ci.sType = VK_STRUCTURE_TYPE_DESCRIPTOR_POOL_CREATE_INFO;
+ ds_pool_ci.pNext = NULL;
+ ds_pool_ci.maxSets = 1;
+ ds_pool_ci.poolSizeCount = 1;
+ ds_pool_ci.pPoolSizes = &ds_type_count;
+
+ VkDescriptorPool ds_pool;
+ err = vkCreateDescriptorPool(m_device->device(), &ds_pool_ci, NULL, &ds_pool);
+ ASSERT_VK_SUCCESS(err);
+
+ VkDescriptorSetLayoutBinding dsl_binding = {};
+ dsl_binding.binding = 0;
+ dsl_binding.descriptorType = VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE;
+ dsl_binding.descriptorCount = 1;
+ dsl_binding.stageFlags = VK_SHADER_STAGE_ALL;
+ dsl_binding.pImmutableSamplers = NULL;
+
+ const VkDescriptorSetLayoutObj ds_layout(m_device, {dsl_binding});
+
+ VkDescriptorSet descriptorSet;
+ VkDescriptorSetAllocateInfo alloc_info = {};
+ alloc_info.sType = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_ALLOCATE_INFO;
+ alloc_info.descriptorSetCount = 1;
+ alloc_info.descriptorPool = ds_pool;
+ alloc_info.pSetLayouts = &ds_layout.handle();
+ err = vkAllocateDescriptorSets(m_device->device(), &alloc_info, &descriptorSet);
+ ASSERT_VK_SUCCESS(err);
+
+ VkImage image_bad;
+ VkImage image_good;
+ // One bad format and one good format for Color attachment
+ const VkFormat tex_format_bad = depth_format;
+ const VkFormat tex_format_good = VK_FORMAT_B8G8R8A8_UNORM;
+ const int32_t tex_width = 32;
+ const int32_t tex_height = 32;
+
+ VkImageCreateInfo image_create_info = {};
+ image_create_info.sType = VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO;
+ image_create_info.pNext = NULL;
+ image_create_info.imageType = VK_IMAGE_TYPE_2D;
+ image_create_info.format = tex_format_bad;
+ image_create_info.extent.width = tex_width;
+ image_create_info.extent.height = tex_height;
+ image_create_info.extent.depth = 1;
+ image_create_info.mipLevels = 1;
+ image_create_info.arrayLayers = 1;
+ image_create_info.samples = VK_SAMPLE_COUNT_1_BIT;
+ image_create_info.tiling = VK_IMAGE_TILING_OPTIMAL;
+ image_create_info.usage = VK_IMAGE_USAGE_SAMPLED_BIT | VK_IMAGE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT;
+ image_create_info.flags = 0;
+
+ err = vkCreateImage(m_device->device(), &image_create_info, NULL, &image_bad);
+ ASSERT_VK_SUCCESS(err);
+ image_create_info.format = tex_format_good;
+ image_create_info.usage = VK_IMAGE_USAGE_SAMPLED_BIT | VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT;
+ err = vkCreateImage(m_device->device(), &image_create_info, NULL, &image_good);
+ ASSERT_VK_SUCCESS(err);
+
+ // ---Bind image memory---
+ VkMemoryRequirements img_mem_reqs;
+ vkGetImageMemoryRequirements(m_device->device(), image_bad, &img_mem_reqs);
+ VkMemoryAllocateInfo image_alloc_info = {};
+ image_alloc_info.sType = VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO;
+ image_alloc_info.pNext = NULL;
+ image_alloc_info.memoryTypeIndex = 0;
+ image_alloc_info.allocationSize = img_mem_reqs.size;
+ bool pass =
+ m_device->phy().set_memory_type(img_mem_reqs.memoryTypeBits, &image_alloc_info, VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT);
+ ASSERT_TRUE(pass);
+ VkDeviceMemory mem;
+ err = vkAllocateMemory(m_device->device(), &image_alloc_info, NULL, &mem);
+ ASSERT_VK_SUCCESS(err);
+ err = vkBindImageMemory(m_device->device(), image_bad, mem, 0);
+ ASSERT_VK_SUCCESS(err);
+ // -----------------------
+
+ VkImageViewCreateInfo image_view_create_info = {};
+ image_view_create_info.sType = VK_STRUCTURE_TYPE_IMAGE_VIEW_CREATE_INFO;
+ image_view_create_info.image = image_bad;
+ image_view_create_info.viewType = VK_IMAGE_VIEW_TYPE_2D;
+ image_view_create_info.format = tex_format_bad;
+ image_view_create_info.subresourceRange.baseArrayLayer = 0;
+ image_view_create_info.subresourceRange.baseMipLevel = 0;
+ image_view_create_info.subresourceRange.layerCount = 1;
+ image_view_create_info.subresourceRange.levelCount = 1;
+ image_view_create_info.subresourceRange.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT | VK_IMAGE_ASPECT_DEPTH_BIT;
+
+ VkImageView view;
+ err = vkCreateImageView(m_device->device(), &image_view_create_info, NULL, &view);
+
+ m_errorMonitor->VerifyFound();
+
+ vkDestroyImage(m_device->device(), image_bad, NULL);
+ vkDestroyImage(m_device->device(), image_good, NULL);
+ vkDestroyDescriptorPool(m_device->device(), ds_pool, NULL);
+
+ vkFreeMemory(m_device->device(), mem, NULL);
+}
+
+TEST_F(VkLayerTest, ClearImageErrors) {
+ TEST_DESCRIPTION("Call ClearColorImage w/ a depth|stencil image and ClearDepthStencilImage with a color image.");
+
+ ASSERT_NO_FATAL_FAILURE(Init());
+ ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
+
+ m_commandBuffer->begin();
+
+ // Color image
+ VkClearColorValue clear_color;
+ memset(clear_color.uint32, 0, sizeof(uint32_t) * 4);
+ const VkFormat color_format = VK_FORMAT_B8G8R8A8_UNORM;
+ const int32_t img_width = 32;
+ const int32_t img_height = 32;
+ VkImageCreateInfo image_create_info = {};
+ image_create_info.sType = VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO;
+ image_create_info.pNext = NULL;
+ image_create_info.imageType = VK_IMAGE_TYPE_2D;
+ image_create_info.format = color_format;
+ image_create_info.extent.width = img_width;
+ image_create_info.extent.height = img_height;
+ image_create_info.extent.depth = 1;
+ image_create_info.mipLevels = 1;
+ image_create_info.arrayLayers = 1;
+ image_create_info.samples = VK_SAMPLE_COUNT_1_BIT;
+ image_create_info.tiling = VK_IMAGE_TILING_OPTIMAL;
+
+ image_create_info.usage = VK_IMAGE_USAGE_SAMPLED_BIT;
+ vk_testing::Image color_image_no_transfer;
+ color_image_no_transfer.init(*m_device, image_create_info);
+
+ image_create_info.usage = VK_IMAGE_USAGE_SAMPLED_BIT | VK_IMAGE_USAGE_TRANSFER_DST_BIT;
+ vk_testing::Image color_image;
+ color_image.init(*m_device, image_create_info);
+
+ const VkImageSubresourceRange color_range = vk_testing::Image::subresource_range(image_create_info, VK_IMAGE_ASPECT_COLOR_BIT);
+
+ // Depth/Stencil image
+ VkClearDepthStencilValue clear_value = {0};
+ VkImageCreateInfo ds_image_create_info = vk_testing::Image::create_info();
+ ds_image_create_info.imageType = VK_IMAGE_TYPE_2D;
+ ds_image_create_info.format = VK_FORMAT_D16_UNORM;
+ ds_image_create_info.extent.width = 64;
+ ds_image_create_info.extent.height = 64;
+ ds_image_create_info.tiling = VK_IMAGE_TILING_OPTIMAL;
+ ds_image_create_info.usage = VK_IMAGE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT | VK_IMAGE_USAGE_TRANSFER_DST_BIT;
+
+ vk_testing::Image ds_image;
+ ds_image.init(*m_device, ds_image_create_info);
+
+ const VkImageSubresourceRange ds_range = vk_testing::Image::subresource_range(ds_image_create_info, VK_IMAGE_ASPECT_DEPTH_BIT);
+
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "vkCmdClearColorImage called with depth/stencil image.");
+
+ vkCmdClearColorImage(m_commandBuffer->handle(), ds_image.handle(), VK_IMAGE_LAYOUT_GENERAL, &clear_color, 1, &color_range);
+
+ m_errorMonitor->VerifyFound();
+
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ "vkCmdClearColorImage called with image created without VK_IMAGE_USAGE_TRANSFER_DST_BIT");
+
+ vkCmdClearColorImage(m_commandBuffer->handle(), color_image_no_transfer.handle(), VK_IMAGE_LAYOUT_GENERAL, &clear_color, 1,
+ &color_range);
+
+ m_errorMonitor->VerifyFound();
+
+ // Call CmdClearDepthStencilImage with color image
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ "vkCmdClearDepthStencilImage called without a depth/stencil image.");
+
+ vkCmdClearDepthStencilImage(m_commandBuffer->handle(), color_image.handle(), VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, &clear_value,
+ 1, &ds_range);
+
+ m_errorMonitor->VerifyFound();
+}
+
+TEST_F(VkLayerTest, CommandQueueFlags) {
+ TEST_DESCRIPTION(
+ "Allocate a command buffer on a queue that does not support graphics and try to issue a graphics-only command");
+
+ ASSERT_NO_FATAL_FAILURE(Init());
+
+ uint32_t queueFamilyIndex = m_device->QueueFamilyWithoutCapabilities(VK_QUEUE_GRAPHICS_BIT);
+ if (queueFamilyIndex == UINT32_MAX) {
+ printf("%s Non-graphics queue family not found; skipped.\n", kSkipPrefix);
+ return;
+ } else {
+ // Create command pool on a non-graphics queue
+ VkCommandPoolObj command_pool(m_device, queueFamilyIndex);
+
+ // Setup command buffer on pool
+ VkCommandBufferObj command_buffer(m_device, &command_pool);
+ command_buffer.begin();
+
+ // Issue a graphics only command
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdSetViewport-commandBuffer-cmdpool");
+ VkViewport viewport = {0, 0, 16, 16, 0, 1};
+ command_buffer.SetViewport(0, 1, &viewport);
+ m_errorMonitor->VerifyFound();
+ }
+}
+
+TEST_F(VkLayerTest, ExecuteUnrecordedSecondaryCB) {
+ TEST_DESCRIPTION("Attempt vkCmdExecuteCommands with a CB in the initial state");
+ ASSERT_NO_FATAL_FAILURE(Init());
+ VkCommandBufferObj secondary(m_device, m_commandPool, VK_COMMAND_BUFFER_LEVEL_SECONDARY);
+ // never record secondary
+
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdExecuteCommands-pCommandBuffers-00089");
+ m_commandBuffer->begin();
+ vkCmdExecuteCommands(m_commandBuffer->handle(), 1, &secondary.handle());
+ m_errorMonitor->VerifyFound();
+ m_commandBuffer->end();
+}
+
+TEST_F(VkLayerTest, ExecuteUnrecordedPrimaryCB) {
+ TEST_DESCRIPTION("Attempt vkQueueSubmit with a CB in the initial state");
+ ASSERT_NO_FATAL_FAILURE(Init());
+ // never record m_commandBuffer
+
+ VkSubmitInfo si = {};
+ si.sType = VK_STRUCTURE_TYPE_SUBMIT_INFO;
+ si.commandBufferCount = 1;
+ si.pCommandBuffers = &m_commandBuffer->handle();
+
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkQueueSubmit-pCommandBuffers-00072");
+ vkQueueSubmit(m_device->m_queue, 1, &si, VK_NULL_HANDLE);
+ m_errorMonitor->VerifyFound();
+}
+TEST_F(VkLayerTest, ExecuteSecondaryCBWithLayoutMismatch) {
+ TEST_DESCRIPTION("Attempt vkCmdExecuteCommands with a CB with incorrect initial layout.");
+
+ ASSERT_NO_FATAL_FAILURE(InitFramework(myDbgFunc, m_errorMonitor));
+ ASSERT_NO_FATAL_FAILURE(InitState(nullptr, nullptr, VK_COMMAND_POOL_CREATE_RESET_COMMAND_BUFFER_BIT));
+
+ VkImageCreateInfo image_create_info = {};
+ image_create_info.sType = VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO;
+ image_create_info.pNext = NULL;
+ image_create_info.imageType = VK_IMAGE_TYPE_2D;
+ image_create_info.format = VK_FORMAT_B8G8R8A8_UNORM;
+ image_create_info.extent.width = 32;
+ image_create_info.extent.height = 1;
+ image_create_info.extent.depth = 1;
+ image_create_info.mipLevels = 1;
+ image_create_info.arrayLayers = 1;
+ image_create_info.samples = VK_SAMPLE_COUNT_1_BIT;
+ image_create_info.tiling = VK_IMAGE_TILING_OPTIMAL;
+ image_create_info.usage = VK_IMAGE_USAGE_TRANSFER_SRC_BIT | VK_IMAGE_USAGE_TRANSFER_DST_BIT;
+ image_create_info.flags = 0;
+
+ VkImageSubresource image_sub = VkImageObj::subresource(VK_IMAGE_ASPECT_COLOR_BIT, 0, 0);
+ VkImageSubresourceRange image_sub_range = VkImageObj::subresource_range(image_sub);
+
+ VkImageObj image(m_device);
+ image.init(&image_create_info);
+ ASSERT_TRUE(image.initialized());
+ VkImageMemoryBarrier image_barrier =
+ image.image_memory_barrier(0, 0, VK_IMAGE_LAYOUT_UNDEFINED, VK_IMAGE_LAYOUT_GENERAL, image_sub_range);
+
+ auto pipeline = [&image_barrier](const VkCommandBufferObj &cb, VkImageLayout old_layout, VkImageLayout new_layout) {
+ image_barrier.oldLayout = old_layout;
+ image_barrier.newLayout = new_layout;
+ vkCmdPipelineBarrier(cb.handle(), VK_PIPELINE_STAGE_ALL_COMMANDS_BIT, VK_PIPELINE_STAGE_ALL_COMMANDS_BIT, 0, 0, nullptr, 0,
+ nullptr, 1, &image_barrier);
+ };
+
+ // Validate that mismatched use of image layout in secondary command buffer is caught at record time
+ VkCommandBufferObj secondary(m_device, m_commandPool, VK_COMMAND_BUFFER_LEVEL_SECONDARY);
+ secondary.begin();
+ pipeline(secondary, VK_IMAGE_LAYOUT_GENERAL, VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL);
+ secondary.end();
+
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "UNASSIGNED-vkCmdExecuteCommands-commandBuffer-00001");
+ m_commandBuffer->begin();
+ pipeline(*m_commandBuffer, VK_IMAGE_LAYOUT_GENERAL, VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL);
+ vkCmdExecuteCommands(m_commandBuffer->handle(), 1, &secondary.handle());
+ m_errorMonitor->VerifyFound();
+
+ // Validate that we've tracked the changes from the secondary CB correctly
+ m_errorMonitor->ExpectSuccess();
+ pipeline(*m_commandBuffer, VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, VK_IMAGE_LAYOUT_GENERAL);
+ m_errorMonitor->VerifyNotFound();
+ m_commandBuffer->end();
+
+ m_commandBuffer->reset();
+ secondary.reset();
+
+ // Validate that UNDEFINED doesn't false positive on us
+ secondary.begin();
+ pipeline(secondary, VK_IMAGE_LAYOUT_UNDEFINED, VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL);
+ secondary.end();
+ m_commandBuffer->begin();
+ pipeline(*m_commandBuffer, VK_IMAGE_LAYOUT_GENERAL, VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL);
+ m_errorMonitor->ExpectSuccess();
+ vkCmdExecuteCommands(m_commandBuffer->handle(), 1, &secondary.handle());
+ m_errorMonitor->VerifyNotFound();
+ m_commandBuffer->end();
+}
+
+TEST_F(VkLayerTest, ExtensionNotEnabled) {
+ TEST_DESCRIPTION("Validate that using an API from an unenabled extension returns an error");
+
+ if (InstanceExtensionSupported(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME)) {
+ m_instance_extension_names.push_back(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME);
+ } else {
+ printf("%s Did not find required instance extension %s; skipped.\n", kSkipPrefix,
+ VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME);
+ return;
+ }
+ ASSERT_NO_FATAL_FAILURE(InitFramework(myDbgFunc, m_errorMonitor));
+
+ // Required extensions except VK_KHR_GET_MEMORY_REQUIREMENTS_2 -- to create the needed error
+ std::vector<const char *> required_device_extensions = {VK_KHR_MAINTENANCE1_EXTENSION_NAME, VK_KHR_BIND_MEMORY_2_EXTENSION_NAME,
+ VK_KHR_SAMPLER_YCBCR_CONVERSION_EXTENSION_NAME};
+ for (auto dev_ext : required_device_extensions) {
+ if (DeviceExtensionSupported(gpu(), nullptr, dev_ext)) {
+ m_device_extension_names.push_back(dev_ext);
+ } else {
+ printf("%s Did not find required device extension %s; skipped.\n", kSkipPrefix, dev_ext);
+ break;
+ }
+ }
+
+ // Need to ignore this error to get to the one we're testing
+ m_errorMonitor->SetUnexpectedError("VUID-vkCreateDevice-ppEnabledExtensionNames-01387");
+ ASSERT_NO_FATAL_FAILURE(InitState());
+
+ // Find address of extension API
+ auto vkCreateSamplerYcbcrConversionKHR =
+ (PFN_vkCreateSamplerYcbcrConversionKHR)vkGetDeviceProcAddr(m_device->handle(), "vkCreateSamplerYcbcrConversionKHR");
+ if (vkCreateSamplerYcbcrConversionKHR == nullptr) {
+ printf("%s VK_KHR_sampler_ycbcr_conversion not supported by device; skipped.\n", kSkipPrefix);
+ return;
+ }
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "UNASSIGNED-GeneralParameterError-ExtensionNotEnabled");
+ VkSamplerYcbcrConversionCreateInfo ycbcr_info = {VK_STRUCTURE_TYPE_SAMPLER_YCBCR_CONVERSION_CREATE_INFO,
+ NULL,
+ VK_FORMAT_UNDEFINED,
+ VK_SAMPLER_YCBCR_MODEL_CONVERSION_RGB_IDENTITY,
+ VK_SAMPLER_YCBCR_RANGE_ITU_FULL,
+ {VK_COMPONENT_SWIZZLE_IDENTITY, VK_COMPONENT_SWIZZLE_IDENTITY,
+ VK_COMPONENT_SWIZZLE_IDENTITY, VK_COMPONENT_SWIZZLE_IDENTITY},
+ VK_CHROMA_LOCATION_COSITED_EVEN,
+ VK_CHROMA_LOCATION_COSITED_EVEN,
+ VK_FILTER_NEAREST,
+ false};
+ VkSamplerYcbcrConversion conversion;
+ vkCreateSamplerYcbcrConversionKHR(m_device->handle(), &ycbcr_info, nullptr, &conversion);
+ m_errorMonitor->VerifyFound();
+}
+
+TEST_F(VkLayerTest, Maintenance1AndNegativeViewport) {
+ TEST_DESCRIPTION("Attempt to enable AMD_negative_viewport_height and Maintenance1_KHR extension simultaneously");
+
+ ASSERT_NO_FATAL_FAILURE(InitFramework(myDbgFunc, m_errorMonitor));
+ if (!((DeviceExtensionSupported(gpu(), nullptr, VK_KHR_MAINTENANCE1_EXTENSION_NAME)) &&
+ (DeviceExtensionSupported(gpu(), nullptr, VK_AMD_NEGATIVE_VIEWPORT_HEIGHT_EXTENSION_NAME)))) {
+ printf("%s Maintenance1 and AMD_negative viewport height extensions not supported, skipping test\n", kSkipPrefix);
+ return;
+ }
+ ASSERT_NO_FATAL_FAILURE(InitState());
+
+ vk_testing::QueueCreateInfoArray queue_info(m_device->queue_props);
+ const char *extension_names[2] = {"VK_KHR_maintenance1", "VK_AMD_negative_viewport_height"};
+ VkDevice testDevice;
+ VkDeviceCreateInfo device_create_info = {};
+ auto features = m_device->phy().features();
+ device_create_info.sType = VK_STRUCTURE_TYPE_DEVICE_CREATE_INFO;
+ device_create_info.pNext = NULL;
+ device_create_info.queueCreateInfoCount = queue_info.size();
+ device_create_info.pQueueCreateInfos = queue_info.data();
+ device_create_info.enabledLayerCount = 0;
+ device_create_info.ppEnabledLayerNames = NULL;
+ device_create_info.enabledExtensionCount = 2;
+ device_create_info.ppEnabledExtensionNames = (const char *const *)extension_names;
+ device_create_info.pEnabledFeatures = &features;
+
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkDeviceCreateInfo-ppEnabledExtensionNames-00374");
+ // The following unexpected error is coming from the LunarG loader. Do not make it a desired message because platforms that do
+ // not use the LunarG loader (e.g. Android) will not see the message and the test will fail.
+ m_errorMonitor->SetUnexpectedError("Failed to create device chain.");
+ vkCreateDevice(gpu(), &device_create_info, NULL, &testDevice);
+ m_errorMonitor->VerifyFound();
+}
+
+TEST_F(VkLayerTest, InvalidCreateDescriptorPool) {
+ TEST_DESCRIPTION("Attempt to create descriptor pool with invalid parameters");
+
+ ASSERT_NO_FATAL_FAILURE(Init());
+
+ const uint32_t default_descriptor_count = 1;
+ const VkDescriptorPoolSize dp_size_template{VK_DESCRIPTOR_TYPE_SAMPLER, default_descriptor_count};
+
+ const VkDescriptorPoolCreateInfo dp_ci_template{VK_STRUCTURE_TYPE_DESCRIPTOR_POOL_CREATE_INFO,
+ nullptr, // pNext
+ 0, // flags
+ 1, // maxSets
+ 1, // poolSizeCount
+ &dp_size_template};
+
+ // try maxSets = 0
+ {
+ VkDescriptorPoolCreateInfo invalid_dp_ci = dp_ci_template;
+ invalid_dp_ci.maxSets = 0; // invalid maxSets value
+
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkDescriptorPoolCreateInfo-maxSets-00301");
+ {
+ VkDescriptorPool pool;
+ vkCreateDescriptorPool(m_device->device(), &invalid_dp_ci, nullptr, &pool);
+ }
+ m_errorMonitor->VerifyFound();
+ }
+
+ // try descriptorCount = 0
+ {
+ VkDescriptorPoolSize invalid_dp_size = dp_size_template;
+ invalid_dp_size.descriptorCount = 0; // invalid descriptorCount value
+
+ VkDescriptorPoolCreateInfo dp_ci = dp_ci_template;
+ dp_ci.pPoolSizes = &invalid_dp_size;
+
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkDescriptorPoolSize-descriptorCount-00302");
+ {
+ VkDescriptorPool pool;
+ vkCreateDescriptorPool(m_device->device(), &dp_ci, nullptr, &pool);
+ }
+ m_errorMonitor->VerifyFound();
+ }
+}
+
+TEST_F(VkLayerTest, InvalidCreateBufferSize) {
+ TEST_DESCRIPTION("Attempt to create VkBuffer with size of zero");
+
+ ASSERT_NO_FATAL_FAILURE(Init());
+
+ VkBufferCreateInfo info = {};
+ info.sType = VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO;
+ info.usage = VK_BUFFER_USAGE_TRANSFER_SRC_BIT;
+
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkBufferCreateInfo-size-00912");
+ info.size = 0;
+ VkBuffer buffer;
+ vkCreateBuffer(m_device->device(), &info, nullptr, &buffer);
+ m_errorMonitor->VerifyFound();
+}
+
+TEST_F(VkLayerTest, SetDynViewportParamTests) {
+ TEST_DESCRIPTION("Test parameters of vkCmdSetViewport without multiViewport feature");
+
+ SetTargetApiVersion(VK_API_VERSION_1_1);
+ VkPhysicalDeviceFeatures features{};
+ ASSERT_NO_FATAL_FAILURE(Init(&features));
+
+ const VkViewport vp = {0.0, 0.0, 64.0, 64.0, 0.0, 1.0};
+ const VkViewport viewports[] = {vp, vp};
+
+ m_commandBuffer->begin();
+
+ // array tests
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdSetViewport-firstViewport-01224");
+ vkCmdSetViewport(m_commandBuffer->handle(), 1, 1, viewports);
+ m_errorMonitor->VerifyFound();
+
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdSetViewport-viewportCount-arraylength");
+ vkCmdSetViewport(m_commandBuffer->handle(), 0, 0, nullptr);
+ m_errorMonitor->VerifyFound();
+
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdSetViewport-viewportCount-01225");
+ vkCmdSetViewport(m_commandBuffer->handle(), 0, 2, viewports);
+ m_errorMonitor->VerifyFound();
+
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdSetViewport-firstViewport-01224");
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdSetViewport-viewportCount-arraylength");
+ vkCmdSetViewport(m_commandBuffer->handle(), 1, 0, viewports);
+ m_errorMonitor->VerifyFound();
+
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdSetViewport-firstViewport-01224");
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdSetViewport-viewportCount-01225");
+ vkCmdSetViewport(m_commandBuffer->handle(), 1, 2, viewports);
+ m_errorMonitor->VerifyFound();
+
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdSetViewport-pViewports-parameter");
+ vkCmdSetViewport(m_commandBuffer->handle(), 0, 1, nullptr);
+ m_errorMonitor->VerifyFound();
+
+ // core viewport tests
+ using std::vector;
+ struct TestCase {
+ VkViewport vp;
+ std::string veid;
+ };
+
+ // not necessarily boundary values (unspecified cast rounding), but guaranteed to be over limit
+ const auto one_past_max_w = NearestGreater(static_cast<float>(m_device->props.limits.maxViewportDimensions[0]));
+ const auto one_past_max_h = NearestGreater(static_cast<float>(m_device->props.limits.maxViewportDimensions[1]));
+
+ const auto min_bound = m_device->props.limits.viewportBoundsRange[0];
+ const auto max_bound = m_device->props.limits.viewportBoundsRange[1];
+ const auto one_before_min_bounds = NearestSmaller(min_bound);
+ const auto one_past_max_bounds = NearestGreater(max_bound);
+
+ const auto below_zero = NearestSmaller(0.0f);
+ const auto past_one = NearestGreater(1.0f);
+
+ vector<TestCase> test_cases = {
+ {{0.0, 0.0, 0.0, 64.0, 0.0, 1.0}, "VUID-VkViewport-width-01770"},
+ {{0.0, 0.0, one_past_max_w, 64.0, 0.0, 1.0}, "VUID-VkViewport-width-01771"},
+ {{0.0, 0.0, NAN, 64.0, 0.0, 1.0}, "VUID-VkViewport-width-01770"},
+ {{0.0, 0.0, 64.0, one_past_max_h, 0.0, 1.0}, "VUID-VkViewport-height-01773"},
+ {{one_before_min_bounds, 0.0, 64.0, 64.0, 0.0, 1.0}, "VUID-VkViewport-x-01774"},
+ {{one_past_max_bounds, 0.0, 64.0, 64.0, 0.0, 1.0}, "VUID-VkViewport-x-01232"},
+ {{NAN, 0.0, 64.0, 64.0, 0.0, 1.0}, "VUID-VkViewport-x-01774"},
+ {{0.0, one_before_min_bounds, 64.0, 64.0, 0.0, 1.0}, "VUID-VkViewport-y-01775"},
+ {{0.0, NAN, 64.0, 64.0, 0.0, 1.0}, "VUID-VkViewport-y-01775"},
+ {{max_bound, 0.0, 1.0, 64.0, 0.0, 1.0}, "VUID-VkViewport-x-01232"},
+ {{0.0, max_bound, 64.0, 1.0, 0.0, 1.0}, "VUID-VkViewport-y-01233"},
+ {{0.0, 0.0, 64.0, 64.0, below_zero, 1.0}, "VUID-VkViewport-minDepth-01234"},
+ {{0.0, 0.0, 64.0, 64.0, past_one, 1.0}, "VUID-VkViewport-minDepth-01234"},
+ {{0.0, 0.0, 64.0, 64.0, NAN, 1.0}, "VUID-VkViewport-minDepth-01234"},
+ {{0.0, 0.0, 64.0, 64.0, 0.0, below_zero}, "VUID-VkViewport-maxDepth-01235"},
+ {{0.0, 0.0, 64.0, 64.0, 0.0, past_one}, "VUID-VkViewport-maxDepth-01235"},
+ {{0.0, 0.0, 64.0, 64.0, 0.0, NAN}, "VUID-VkViewport-maxDepth-01235"},
+ };
+
+ if (DeviceValidationVersion() < VK_API_VERSION_1_1) {
+ test_cases.push_back({{0.0, 0.0, 64.0, 0.0, 0.0, 1.0}, "VUID-VkViewport-height-01772"});
+ test_cases.push_back({{0.0, 0.0, 64.0, NAN, 0.0, 1.0}, "VUID-VkViewport-height-01772"});
+ } else {
+ test_cases.push_back({{0.0, 0.0, 64.0, NAN, 0.0, 1.0}, "VUID-VkViewport-height-01773"});
+ }
+
+ for (const auto &test_case : test_cases) {
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, test_case.veid);
+ vkCmdSetViewport(m_commandBuffer->handle(), 0, 1, &test_case.vp);
+ m_errorMonitor->VerifyFound();
+ }
+}
+
void NegHeightViewportTests(VkDeviceObj *m_device, VkCommandBufferObj *m_commandBuffer, ErrorMonitor *m_errorMonitor) {
const auto &limits = m_device->props.limits;
@@ -439,1327 +27413,8814 @@ void NegHeightViewportTests(VkDeviceObj *m_device, VkCommandBufferObj *m_command
}
}
-void CreateSamplerTest(VkLayerTest &test, const VkSamplerCreateInfo *pCreateInfo, std::string code) {
- VkResult err;
- VkSampler sampler = VK_NULL_HANDLE;
- if (code.length())
- test.Monitor()->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT | VK_DEBUG_REPORT_WARNING_BIT_EXT, code);
- else
- test.Monitor()->ExpectSuccess();
+TEST_F(VkLayerTest, SetDynViewportParamMaintenance1Tests) {
+ TEST_DESCRIPTION("Verify errors are detected on misuse of SetViewport with a negative viewport extension enabled.");
- err = vkCreateSampler(test.device(), pCreateInfo, NULL, &sampler);
- if (code.length())
- test.Monitor()->VerifyFound();
- else
- test.Monitor()->VerifyNotFound();
+ ASSERT_NO_FATAL_FAILURE(InitFramework(myDbgFunc, m_errorMonitor));
- if (VK_SUCCESS == err) {
- vkDestroySampler(test.device(), sampler, NULL);
+ if (DeviceExtensionSupported(gpu(), nullptr, VK_KHR_MAINTENANCE1_EXTENSION_NAME)) {
+ m_device_extension_names.push_back(VK_KHR_MAINTENANCE1_EXTENSION_NAME);
+ } else {
+ printf("%s VK_KHR_maintenance1 extension not supported -- skipping test\n", kSkipPrefix);
+ return;
}
+ ASSERT_NO_FATAL_FAILURE(InitState());
+
+ NegHeightViewportTests(m_device, m_commandBuffer, m_errorMonitor);
}
-void CreateBufferTest(VkLayerTest &test, const VkBufferCreateInfo *pCreateInfo, std::string code) {
- VkResult err;
- VkBuffer buffer = VK_NULL_HANDLE;
- if (code.length())
- test.Monitor()->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, code);
- else
- test.Monitor()->ExpectSuccess();
+TEST_F(VkLayerTest, SetDynViewportParamMultiviewportTests) {
+ TEST_DESCRIPTION("Test parameters of vkCmdSetViewport with multiViewport feature enabled");
- err = vkCreateBuffer(test.device(), pCreateInfo, NULL, &buffer);
- if (code.length())
- test.Monitor()->VerifyFound();
- else
- test.Monitor()->VerifyNotFound();
+ ASSERT_NO_FATAL_FAILURE(Init());
- if (VK_SUCCESS == err) {
- vkDestroyBuffer(test.device(), buffer, NULL);
+ if (!m_device->phy().features().multiViewport) {
+ printf("%s VkPhysicalDeviceFeatures::multiViewport is not supported -- skipping test.\n", kSkipPrefix);
+ return;
}
+
+ const auto max_viewports = m_device->props.limits.maxViewports;
+ const uint32_t too_many_viewports = 65536 + 1; // let's say this is too much to allocate pViewports for
+
+ m_commandBuffer->begin();
+
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdSetViewport-viewportCount-arraylength");
+ vkCmdSetViewport(m_commandBuffer->handle(), 0, 0, nullptr);
+ m_errorMonitor->VerifyFound();
+
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdSetViewport-pViewports-parameter");
+ vkCmdSetViewport(m_commandBuffer->handle(), 0, max_viewports, nullptr);
+ m_errorMonitor->VerifyFound();
+
+ if (max_viewports >= too_many_viewports) {
+ printf(
+ "%s VkPhysicalDeviceLimits::maxViewports is too large to practically test against -- skipping "
+ "part of "
+ "test.\n",
+ kSkipPrefix);
+ return;
+ }
+
+ const VkViewport vp = {0.0, 0.0, 64.0, 64.0, 0.0, 1.0};
+ const std::vector<VkViewport> viewports(max_viewports + 1, vp);
+
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdSetViewport-firstViewport-01223");
+ vkCmdSetViewport(m_commandBuffer->handle(), 0, max_viewports + 1, viewports.data());
+ m_errorMonitor->VerifyFound();
+
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdSetViewport-firstViewport-01223");
+ vkCmdSetViewport(m_commandBuffer->handle(), max_viewports, 1, viewports.data());
+ m_errorMonitor->VerifyFound();
+
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdSetViewport-firstViewport-01223");
+ vkCmdSetViewport(m_commandBuffer->handle(), 1, max_viewports, viewports.data());
+ m_errorMonitor->VerifyFound();
+
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdSetViewport-viewportCount-arraylength");
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdSetViewport-firstViewport-01223");
+ vkCmdSetViewport(m_commandBuffer->handle(), max_viewports + 1, 0, viewports.data());
+ m_errorMonitor->VerifyFound();
}
-void CreateImageTest(VkLayerTest &test, const VkImageCreateInfo *pCreateInfo, std::string code) {
- VkResult err;
- VkImage image = VK_NULL_HANDLE;
- if (code.length())
- test.Monitor()->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, code);
- else
- test.Monitor()->ExpectSuccess();
+//
+// POSITIVE VALIDATION TESTS
+//
+// These tests do not expect to encounter ANY validation errors pass only if this is true
- err = vkCreateImage(test.device(), pCreateInfo, NULL, &image);
- if (code.length())
- test.Monitor()->VerifyFound();
- else
- test.Monitor()->VerifyNotFound();
+TEST_F(VkPositiveLayerTest, PointSizeWriteInFunction) {
+ TEST_DESCRIPTION("Create a pipeline using TOPOLOGY_POINT_LIST and write PointSize in vertex shader function.");
- if (VK_SUCCESS == err) {
- vkDestroyImage(test.device(), image, NULL);
+ ASSERT_NO_FATAL_FAILURE(Init());
+ m_errorMonitor->ExpectSuccess();
+
+ ASSERT_NO_FATAL_FAILURE(InitViewport());
+
+ // Create VS declaring PointSize and write to it in a function call.
+ static const char PointSizeWriteVertShaderFcn[] =
+ "#version 450\n"
+ "vec2 vertices[3];\n"
+ "out gl_PerVertex\n"
+ "{\n"
+ " vec4 gl_Position;\n"
+ " float gl_PointSize;\n"
+ "};\n"
+ "void OutPointSize() {\n"
+ " gl_PointSize = 7.0;\n"
+ "}\n"
+ "void main() {\n"
+ " vertices[0] = vec2(-1.0, -1.0);\n"
+ " vertices[1] = vec2( 1.0, -1.0);\n"
+ " vertices[2] = vec2( 0.0, 1.0);\n"
+ " gl_Position = vec4(vertices[gl_VertexIndex % 3], 0.0, 1.0);\n"
+ " OutPointSize();\n"
+ "}\n";
+
+ VkShaderObj vs(m_device, PointSizeWriteVertShaderFcn, VK_SHADER_STAGE_VERTEX_BIT, this);
+ VkShaderObj ps(m_device, bindStateFragShaderText, VK_SHADER_STAGE_FRAGMENT_BIT, this);
+
+ {
+ VkPipelineObj pipelineobj(m_device);
+ pipelineobj.AddDefaultColorAttachment();
+ pipelineobj.AddShader(&vs);
+ pipelineobj.AddShader(&ps);
+
+ // Set Input Assembly to TOPOLOGY POINT LIST
+ VkPipelineInputAssemblyStateCreateInfo ia_state = {};
+ ia_state.sType = VK_STRUCTURE_TYPE_PIPELINE_INPUT_ASSEMBLY_STATE_CREATE_INFO;
+ ia_state.topology = VK_PRIMITIVE_TOPOLOGY_POINT_LIST;
+ pipelineobj.SetInputAssembly(&ia_state);
+
+ ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
+ m_commandBuffer->begin();
+ m_commandBuffer->ClearAllBuffers(m_renderTargets, m_clear_color, m_depthStencil, m_depth_clear_color,
+ m_stencil_clear_color);
+ m_commandBuffer->PrepareAttachments(m_renderTargets, m_depthStencil);
+ VkDescriptorSetObj descriptorSet(m_device);
+ descriptorSet.CreateVKDescriptorSet(m_commandBuffer);
+ pipelineobj.CreateVKPipeline(descriptorSet.GetPipelineLayout(), renderPass());
}
+ m_errorMonitor->VerifyNotFound();
}
-void CreateBufferViewTest(VkLayerTest &test, const VkBufferViewCreateInfo *pCreateInfo, const std::vector<std::string> &codes) {
- VkResult err;
- VkBufferView view = VK_NULL_HANDLE;
- if (codes.size())
- std::for_each(codes.begin(), codes.end(),
- [&](const std::string &s) { test.Monitor()->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, s); });
- else
- test.Monitor()->ExpectSuccess();
+TEST_F(VkPositiveLayerTest, PointSizeGeomShaderSuccess) {
+ TEST_DESCRIPTION(
+ "Create a pipeline using TOPOLOGY_POINT_LIST, set PointSize vertex shader, and write in the final geometry stage.");
- err = vkCreateBufferView(test.device(), pCreateInfo, NULL, &view);
- if (codes.size())
- test.Monitor()->VerifyFound();
- else
- test.Monitor()->VerifyNotFound();
+ ASSERT_NO_FATAL_FAILURE(Init());
+ m_errorMonitor->ExpectSuccess();
- if (VK_SUCCESS == err) {
- vkDestroyBufferView(test.device(), view, NULL);
+ if ((!m_device->phy().features().geometryShader) || (!m_device->phy().features().shaderTessellationAndGeometryPointSize)) {
+ printf("%s Device does not support the required geometry shader features; skipped.\n", kSkipPrefix);
+ return;
}
+
+ ASSERT_NO_FATAL_FAILURE(InitViewport());
+
+ // Create VS declaring PointSize and writing to it
+ static const char PointSizeVertShader[] =
+ "#version 450\n"
+ "vec2 vertices[3];\n"
+ "out gl_PerVertex\n"
+ "{\n"
+ " vec4 gl_Position;\n"
+ " float gl_PointSize;\n"
+ "};\n"
+ "void main() {\n"
+ " vertices[0] = vec2(-1.0, -1.0);\n"
+ " vertices[1] = vec2( 1.0, -1.0);\n"
+ " vertices[2] = vec2( 0.0, 1.0);\n"
+ " gl_Position = vec4(vertices[gl_VertexIndex % 3], 0.0, 1.0);\n"
+ " gl_PointSize = 5.0;\n"
+ "}\n";
+ static char const *gsSource =
+ "#version 450\n"
+ "layout (points) in;\n"
+ "layout (points) out;\n"
+ "layout (max_vertices = 1) out;\n"
+ "void main() {\n"
+ " gl_Position = vec4(1.0, 0.5, 0.5, 0.0);\n"
+ " gl_PointSize = 3.3;\n"
+ " EmitVertex();\n"
+ "}\n";
+
+ VkShaderObj vs(m_device, PointSizeVertShader, VK_SHADER_STAGE_VERTEX_BIT, this);
+ VkShaderObj gs(m_device, gsSource, VK_SHADER_STAGE_GEOMETRY_BIT, this);
+ VkShaderObj ps(m_device, bindStateFragShaderText, VK_SHADER_STAGE_FRAGMENT_BIT, this);
+
+ VkPipelineObj pipelineobj(m_device);
+ pipelineobj.AddDefaultColorAttachment();
+ pipelineobj.AddShader(&vs);
+ pipelineobj.AddShader(&gs);
+ pipelineobj.AddShader(&ps);
+
+ // Set Input Assembly to TOPOLOGY POINT LIST
+ VkPipelineInputAssemblyStateCreateInfo ia_state = {};
+ ia_state.sType = VK_STRUCTURE_TYPE_PIPELINE_INPUT_ASSEMBLY_STATE_CREATE_INFO;
+ ia_state.topology = VK_PRIMITIVE_TOPOLOGY_POINT_LIST;
+ pipelineobj.SetInputAssembly(&ia_state);
+
+ ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
+ m_commandBuffer->begin();
+ m_commandBuffer->ClearAllBuffers(m_renderTargets, m_clear_color, m_depthStencil, m_depth_clear_color, m_stencil_clear_color);
+ m_commandBuffer->PrepareAttachments(m_renderTargets, m_depthStencil);
+ VkDescriptorSetObj descriptorSet(m_device);
+ descriptorSet.CreateVKDescriptorSet(m_commandBuffer);
+ pipelineobj.CreateVKPipeline(descriptorSet.GetPipelineLayout(), renderPass());
+ m_errorMonitor->VerifyNotFound();
}
-void CreateImageViewTest(VkLayerTest &test, const VkImageViewCreateInfo *pCreateInfo, std::string code) {
- VkResult err;
- VkImageView view = VK_NULL_HANDLE;
- if (code.length())
- test.Monitor()->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, code);
- else
- test.Monitor()->ExpectSuccess();
+TEST_F(VkPositiveLayerTest, LoosePointSizeWrite) {
+ TEST_DESCRIPTION("Create a pipeline using TOPOLOGY_POINT_LIST and write PointSize outside of a structure.");
- err = vkCreateImageView(test.device(), pCreateInfo, NULL, &view);
- if (code.length())
- test.Monitor()->VerifyFound();
- else
- test.Monitor()->VerifyNotFound();
+ ASSERT_NO_FATAL_FAILURE(Init());
+ m_errorMonitor->ExpectSuccess();
- if (VK_SUCCESS == err) {
- vkDestroyImageView(test.device(), view, NULL);
+ ASSERT_NO_FATAL_FAILURE(InitViewport());
+
+ const std::string LoosePointSizeWrite = R"(
+ OpCapability Shader
+ %1 = OpExtInstImport "GLSL.std.450"
+ OpMemoryModel Logical GLSL450
+ OpEntryPoint Vertex %main "main" %glposition %glpointsize %gl_VertexIndex
+ OpSource GLSL 450
+ OpName %main "main"
+ OpName %vertices "vertices"
+ OpName %glposition "glposition"
+ OpName %glpointsize "glpointsize"
+ OpName %gl_VertexIndex "gl_VertexIndex"
+ OpDecorate %glposition BuiltIn Position
+ OpDecorate %glpointsize BuiltIn PointSize
+ OpDecorate %gl_VertexIndex BuiltIn VertexIndex
+ %void = OpTypeVoid
+ %3 = OpTypeFunction %void
+ %float = OpTypeFloat 32
+ %v2float = OpTypeVector %float 2
+ %uint = OpTypeInt 32 0
+ %uint_3 = OpConstant %uint 3
+ %_arr_v2float_uint_3 = OpTypeArray %v2float %uint_3
+ %_ptr_Private__arr_v2float_uint_3 = OpTypePointer Private %_arr_v2float_uint_3
+ %vertices = OpVariable %_ptr_Private__arr_v2float_uint_3 Private
+ %int = OpTypeInt 32 1
+ %int_0 = OpConstant %int 0
+ %float_n1 = OpConstant %float -1
+ %16 = OpConstantComposite %v2float %float_n1 %float_n1
+ %_ptr_Private_v2float = OpTypePointer Private %v2float
+ %int_1 = OpConstant %int 1
+ %float_1 = OpConstant %float 1
+ %21 = OpConstantComposite %v2float %float_1 %float_n1
+ %int_2 = OpConstant %int 2
+ %float_0 = OpConstant %float 0
+ %25 = OpConstantComposite %v2float %float_0 %float_1
+ %v4float = OpTypeVector %float 4
+ %_ptr_Output_gl_Position = OpTypePointer Output %v4float
+ %glposition = OpVariable %_ptr_Output_gl_Position Output
+ %_ptr_Output_gl_PointSize = OpTypePointer Output %float
+ %glpointsize = OpVariable %_ptr_Output_gl_PointSize Output
+ %_ptr_Input_int = OpTypePointer Input %int
+ %gl_VertexIndex = OpVariable %_ptr_Input_int Input
+ %int_3 = OpConstant %int 3
+ %_ptr_Output_v4float = OpTypePointer Output %v4float
+ %_ptr_Output_float = OpTypePointer Output %float
+ %main = OpFunction %void None %3
+ %5 = OpLabel
+ %18 = OpAccessChain %_ptr_Private_v2float %vertices %int_0
+ OpStore %18 %16
+ %22 = OpAccessChain %_ptr_Private_v2float %vertices %int_1
+ OpStore %22 %21
+ %26 = OpAccessChain %_ptr_Private_v2float %vertices %int_2
+ OpStore %26 %25
+ %33 = OpLoad %int %gl_VertexIndex
+ %35 = OpSMod %int %33 %int_3
+ %36 = OpAccessChain %_ptr_Private_v2float %vertices %35
+ %37 = OpLoad %v2float %36
+ %38 = OpCompositeExtract %float %37 0
+ %39 = OpCompositeExtract %float %37 1
+ %40 = OpCompositeConstruct %v4float %38 %39 %float_0 %float_1
+ %42 = OpAccessChain %_ptr_Output_v4float %glposition
+ OpStore %42 %40
+ OpStore %glpointsize %float_1
+ OpReturn
+ OpFunctionEnd
+ )";
+
+ // Create VS declaring PointSize and write to it in a function call.
+ VkShaderObj vs(m_device, LoosePointSizeWrite, VK_SHADER_STAGE_VERTEX_BIT, this);
+ VkShaderObj ps(m_device, bindStateFragShaderText, VK_SHADER_STAGE_FRAGMENT_BIT, this);
+
+ {
+ VkPipelineObj pipelineobj(m_device);
+ pipelineobj.AddDefaultColorAttachment();
+ pipelineobj.AddShader(&vs);
+ pipelineobj.AddShader(&ps);
+
+ // Set Input Assembly to TOPOLOGY POINT LIST
+ VkPipelineInputAssemblyStateCreateInfo ia_state = {};
+ ia_state.sType = VK_STRUCTURE_TYPE_PIPELINE_INPUT_ASSEMBLY_STATE_CREATE_INFO;
+ ia_state.topology = VK_PRIMITIVE_TOPOLOGY_POINT_LIST;
+ pipelineobj.SetInputAssembly(&ia_state);
+
+ ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
+ m_commandBuffer->begin();
+ m_commandBuffer->ClearAllBuffers(m_renderTargets, m_clear_color, m_depthStencil, m_depth_clear_color,
+ m_stencil_clear_color);
+ m_commandBuffer->PrepareAttachments(m_renderTargets, m_depthStencil);
+ VkDescriptorSetObj descriptorSet(m_device);
+ descriptorSet.CreateVKDescriptorSet(m_commandBuffer);
+ pipelineobj.CreateVKPipeline(descriptorSet.GetPipelineLayout(), renderPass());
}
+ m_errorMonitor->VerifyNotFound();
}
-VkSamplerCreateInfo SafeSaneSamplerCreateInfo() {
- VkSamplerCreateInfo sampler_create_info = {};
- sampler_create_info.sType = VK_STRUCTURE_TYPE_SAMPLER_CREATE_INFO;
- sampler_create_info.pNext = nullptr;
- sampler_create_info.magFilter = VK_FILTER_NEAREST;
- sampler_create_info.minFilter = VK_FILTER_NEAREST;
- sampler_create_info.mipmapMode = VK_SAMPLER_MIPMAP_MODE_NEAREST;
- sampler_create_info.addressModeU = VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_EDGE;
- sampler_create_info.addressModeV = VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_EDGE;
- sampler_create_info.addressModeW = VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_EDGE;
- sampler_create_info.mipLodBias = 0.0;
- sampler_create_info.anisotropyEnable = VK_FALSE;
- sampler_create_info.maxAnisotropy = 1.0;
- sampler_create_info.compareEnable = VK_FALSE;
- sampler_create_info.compareOp = VK_COMPARE_OP_NEVER;
- sampler_create_info.minLod = 0.0;
- sampler_create_info.maxLod = 16.0;
- sampler_create_info.borderColor = VK_BORDER_COLOR_FLOAT_OPAQUE_WHITE;
- sampler_create_info.unnormalizedCoordinates = VK_FALSE;
+TEST_F(VkPositiveLayerTest, UncompressedToCompressedImageCopy) {
+ TEST_DESCRIPTION("Image copies between compressed and uncompressed images");
+ ASSERT_NO_FATAL_FAILURE(Init());
+
+ // Verify format support
+ // Size-compatible (64-bit) formats. Uncompressed is 64 bits per texel, compressed is 64 bits per 4x4 block (or 4bpt).
+ if (!ImageFormatAndFeaturesSupported(gpu(), VK_FORMAT_R16G16B16A16_UINT, VK_IMAGE_TILING_OPTIMAL,
+ VK_FORMAT_FEATURE_TRANSFER_SRC_BIT_KHR | VK_FORMAT_FEATURE_TRANSFER_DST_BIT_KHR) ||
+ !ImageFormatAndFeaturesSupported(gpu(), VK_FORMAT_BC1_RGBA_SRGB_BLOCK, VK_IMAGE_TILING_OPTIMAL,
+ VK_FORMAT_FEATURE_TRANSFER_SRC_BIT_KHR | VK_FORMAT_FEATURE_TRANSFER_DST_BIT_KHR)) {
+ printf("%s Required formats/features not supported - UncompressedToCompressedImageCopy skipped.\n", kSkipPrefix);
+ return;
+ }
- return sampler_create_info;
+ VkImageObj uncomp_10x10t_image(m_device); // Size = 10 * 10 * 64 = 6400
+ VkImageObj comp_10x10b_40x40t_image(m_device); // Size = 40 * 40 * 4 = 6400
+
+ uncomp_10x10t_image.Init(10, 10, 1, VK_FORMAT_R16G16B16A16_UINT,
+ VK_IMAGE_USAGE_TRANSFER_SRC_BIT | VK_IMAGE_USAGE_TRANSFER_DST_BIT, VK_IMAGE_TILING_OPTIMAL);
+ comp_10x10b_40x40t_image.Init(40, 40, 1, VK_FORMAT_BC1_RGBA_SRGB_BLOCK,
+ VK_IMAGE_USAGE_TRANSFER_SRC_BIT | VK_IMAGE_USAGE_TRANSFER_DST_BIT, VK_IMAGE_TILING_OPTIMAL);
+
+ if (!uncomp_10x10t_image.initialized() || !comp_10x10b_40x40t_image.initialized()) {
+ printf("%s Unable to initialize surfaces - UncompressedToCompressedImageCopy skipped.\n", kSkipPrefix);
+ return;
+ }
+
+ // Both copies represent the same number of bytes. Bytes Per Texel = 1 for bc6, 16 for uncompressed
+ // Copy compressed to uncompressed
+ VkImageCopy copy_region = {};
+ copy_region.srcSubresource.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
+ copy_region.dstSubresource.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
+ copy_region.srcSubresource.mipLevel = 0;
+ copy_region.dstSubresource.mipLevel = 0;
+ copy_region.srcSubresource.baseArrayLayer = 0;
+ copy_region.dstSubresource.baseArrayLayer = 0;
+ copy_region.srcSubresource.layerCount = 1;
+ copy_region.dstSubresource.layerCount = 1;
+ copy_region.srcOffset = {0, 0, 0};
+ copy_region.dstOffset = {0, 0, 0};
+
+ m_errorMonitor->ExpectSuccess();
+ m_commandBuffer->begin();
+
+ // Copy from uncompressed to compressed
+ copy_region.extent = {10, 10, 1}; // Dimensions in (uncompressed) texels
+ vkCmdCopyImage(m_commandBuffer->handle(), uncomp_10x10t_image.handle(), VK_IMAGE_LAYOUT_GENERAL,
+ comp_10x10b_40x40t_image.handle(), VK_IMAGE_LAYOUT_GENERAL, 1, &copy_region);
+
+ // And from compressed to uncompressed
+ copy_region.extent = {40, 40, 1}; // Dimensions in (compressed) texels
+ vkCmdCopyImage(m_commandBuffer->handle(), comp_10x10b_40x40t_image.handle(), VK_IMAGE_LAYOUT_GENERAL,
+ uncomp_10x10t_image.handle(), VK_IMAGE_LAYOUT_GENERAL, 1, &copy_region);
+
+ m_errorMonitor->VerifyNotFound();
+ m_commandBuffer->end();
}
-VkImageViewCreateInfo SafeSaneImageViewCreateInfo(VkImage image, VkFormat format, VkImageAspectFlags aspect_mask) {
- VkImageViewCreateInfo image_view_create_info = {};
- image_view_create_info.sType = VK_STRUCTURE_TYPE_IMAGE_VIEW_CREATE_INFO;
- image_view_create_info.image = image;
- image_view_create_info.viewType = VK_IMAGE_VIEW_TYPE_2D;
- image_view_create_info.format = format;
- image_view_create_info.subresourceRange.layerCount = 1;
- image_view_create_info.subresourceRange.baseMipLevel = 0;
- image_view_create_info.subresourceRange.levelCount = 1;
- image_view_create_info.subresourceRange.aspectMask = aspect_mask;
+TEST_F(VkPositiveLayerTest, DeleteDescriptorSetLayoutsBeforeDescriptorSets) {
+ TEST_DESCRIPTION("Create DSLayouts and DescriptorSets and then delete the DSLayouts before the DescriptorSets.");
+ ASSERT_NO_FATAL_FAILURE(Init());
+ ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
+ VkResult err;
+
+ m_errorMonitor->ExpectSuccess();
+
+ VkDescriptorPoolSize ds_type_count = {};
+ ds_type_count.type = VK_DESCRIPTOR_TYPE_SAMPLER;
+ ds_type_count.descriptorCount = 1;
+
+ VkDescriptorPoolCreateInfo ds_pool_ci = {};
+ ds_pool_ci.sType = VK_STRUCTURE_TYPE_DESCRIPTOR_POOL_CREATE_INFO;
+ ds_pool_ci.pNext = NULL;
+ ds_pool_ci.flags = VK_DESCRIPTOR_POOL_CREATE_FREE_DESCRIPTOR_SET_BIT;
+ ds_pool_ci.maxSets = 1;
+ ds_pool_ci.poolSizeCount = 1;
+ ds_pool_ci.pPoolSizes = &ds_type_count;
+
+ VkDescriptorPool ds_pool_one;
+ err = vkCreateDescriptorPool(m_device->device(), &ds_pool_ci, NULL, &ds_pool_one);
+ ASSERT_VK_SUCCESS(err);
+
+ VkDescriptorSetLayoutBinding dsl_binding = {};
+ dsl_binding.binding = 0;
+ dsl_binding.descriptorType = VK_DESCRIPTOR_TYPE_SAMPLER;
+ dsl_binding.descriptorCount = 1;
+ dsl_binding.stageFlags = VK_SHADER_STAGE_ALL;
+ dsl_binding.pImmutableSamplers = NULL;
+
+ VkDescriptorSet descriptorSet;
+ {
+ const VkDescriptorSetLayoutObj ds_layout(m_device, {dsl_binding});
+
+ VkDescriptorSetAllocateInfo alloc_info = {};
+ alloc_info.sType = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_ALLOCATE_INFO;
+ alloc_info.descriptorSetCount = 1;
+ alloc_info.descriptorPool = ds_pool_one;
+ alloc_info.pSetLayouts = &ds_layout.handle();
+ err = vkAllocateDescriptorSets(m_device->device(), &alloc_info, &descriptorSet);
+ ASSERT_VK_SUCCESS(err);
+ } // ds_layout destroyed
+ err = vkFreeDescriptorSets(m_device->device(), ds_pool_one, 1, &descriptorSet);
+
+ vkDestroyDescriptorPool(m_device->device(), ds_pool_one, NULL);
+ m_errorMonitor->VerifyNotFound();
+}
+
+TEST_F(VkPositiveLayerTest, CommandPoolDeleteWithReferences) {
+ TEST_DESCRIPTION("Ensure the validation layers bookkeeping tracks the implicit command buffer frees.");
+ ASSERT_NO_FATAL_FAILURE(Init());
+
+ VkCommandPoolCreateInfo cmd_pool_info = {};
+ cmd_pool_info.sType = VK_STRUCTURE_TYPE_COMMAND_POOL_CREATE_INFO;
+ cmd_pool_info.pNext = NULL;
+ cmd_pool_info.queueFamilyIndex = m_device->graphics_queue_node_index_;
+ cmd_pool_info.flags = VK_COMMAND_POOL_CREATE_RESET_COMMAND_BUFFER_BIT;
+ cmd_pool_info.flags = 0;
+
+ VkCommandPool secondary_cmd_pool;
+ VkResult res = vkCreateCommandPool(m_device->handle(), &cmd_pool_info, NULL, &secondary_cmd_pool);
+ ASSERT_VK_SUCCESS(res);
+
+ VkCommandBufferAllocateInfo cmdalloc = vk_testing::CommandBuffer::create_info(secondary_cmd_pool);
+ cmdalloc.level = VK_COMMAND_BUFFER_LEVEL_SECONDARY;
+
+ VkCommandBuffer secondary_cmds;
+ res = vkAllocateCommandBuffers(m_device->handle(), &cmdalloc, &secondary_cmds);
+
+ VkCommandBufferInheritanceInfo cmd_buf_inheritance_info = {};
+ cmd_buf_inheritance_info.sType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_INHERITANCE_INFO;
+ cmd_buf_inheritance_info.pNext = NULL;
+ cmd_buf_inheritance_info.renderPass = VK_NULL_HANDLE;
+ cmd_buf_inheritance_info.subpass = 0;
+ cmd_buf_inheritance_info.framebuffer = VK_NULL_HANDLE;
+ cmd_buf_inheritance_info.occlusionQueryEnable = VK_FALSE;
+ cmd_buf_inheritance_info.queryFlags = 0;
+ cmd_buf_inheritance_info.pipelineStatistics = 0;
+
+ VkCommandBufferBeginInfo secondary_begin = {};
+ secondary_begin.sType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_BEGIN_INFO;
+ secondary_begin.pNext = NULL;
+ secondary_begin.flags = VK_COMMAND_BUFFER_USAGE_ONE_TIME_SUBMIT_BIT;
+ secondary_begin.pInheritanceInfo = &cmd_buf_inheritance_info;
+
+ res = vkBeginCommandBuffer(secondary_cmds, &secondary_begin);
+ ASSERT_VK_SUCCESS(res);
+ vkEndCommandBuffer(secondary_cmds);
+
+ m_commandBuffer->begin();
+ vkCmdExecuteCommands(m_commandBuffer->handle(), 1, &secondary_cmds);
+ m_commandBuffer->end();
- return image_view_create_info;
+ // DestroyCommandPool *implicitly* frees the command buffers allocated from it
+ vkDestroyCommandPool(m_device->handle(), secondary_cmd_pool, NULL);
+ // If bookkeeping has been lax, validating the reset will attempt to touch deleted data
+ res = vkResetCommandPool(m_device->handle(), m_commandPool->handle(), 0);
+ ASSERT_VK_SUCCESS(res);
}
-VkImageViewCreateInfo SafeSaneImageViewCreateInfo(const VkImageObj &image, VkFormat format, VkImageAspectFlags aspect_mask) {
- return SafeSaneImageViewCreateInfo(image.handle(), format, aspect_mask);
+TEST_F(VkLayerTest, SecondaryCommandBufferClearColorAttachmentsRenderArea) {
+ TEST_DESCRIPTION(
+ "Create a secondary command buffer with CmdClearAttachments call that has a rect outside of renderPass renderArea");
+ ASSERT_NO_FATAL_FAILURE(Init());
+ ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
+
+ VkCommandBufferAllocateInfo command_buffer_allocate_info = {};
+ command_buffer_allocate_info.sType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_ALLOCATE_INFO;
+ command_buffer_allocate_info.commandPool = m_commandPool->handle();
+ command_buffer_allocate_info.level = VK_COMMAND_BUFFER_LEVEL_SECONDARY;
+ command_buffer_allocate_info.commandBufferCount = 1;
+
+ VkCommandBuffer secondary_command_buffer;
+ ASSERT_VK_SUCCESS(vkAllocateCommandBuffers(m_device->device(), &command_buffer_allocate_info, &secondary_command_buffer));
+ VkCommandBufferBeginInfo command_buffer_begin_info = {};
+ VkCommandBufferInheritanceInfo command_buffer_inheritance_info = {};
+ command_buffer_inheritance_info.sType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_INHERITANCE_INFO;
+ command_buffer_inheritance_info.renderPass = m_renderPass;
+ command_buffer_inheritance_info.framebuffer = m_framebuffer;
+
+ command_buffer_begin_info.sType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_BEGIN_INFO;
+ command_buffer_begin_info.flags =
+ VK_COMMAND_BUFFER_USAGE_ONE_TIME_SUBMIT_BIT | VK_COMMAND_BUFFER_USAGE_RENDER_PASS_CONTINUE_BIT;
+ command_buffer_begin_info.pInheritanceInfo = &command_buffer_inheritance_info;
+
+ vkBeginCommandBuffer(secondary_command_buffer, &command_buffer_begin_info);
+ VkClearAttachment color_attachment;
+ color_attachment.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
+ color_attachment.clearValue.color.float32[0] = 0;
+ color_attachment.clearValue.color.float32[1] = 0;
+ color_attachment.clearValue.color.float32[2] = 0;
+ color_attachment.clearValue.color.float32[3] = 0;
+ color_attachment.colorAttachment = 0;
+ // x extent of 257 exceeds render area of 256
+ VkClearRect clear_rect = {{{0, 0}, {257, 32}}};
+ vkCmdClearAttachments(secondary_command_buffer, 1, &color_attachment, 1, &clear_rect);
+ vkEndCommandBuffer(secondary_command_buffer);
+ m_commandBuffer->begin();
+ vkCmdBeginRenderPass(m_commandBuffer->handle(), &m_renderPassBeginInfo, VK_SUBPASS_CONTENTS_SECONDARY_COMMAND_BUFFERS);
+
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdClearAttachments-pRects-00016");
+ vkCmdExecuteCommands(m_commandBuffer->handle(), 1, &secondary_command_buffer);
+ m_errorMonitor->VerifyFound();
+
+ vkCmdEndRenderPass(m_commandBuffer->handle());
+ m_commandBuffer->end();
}
-bool CheckCreateRenderPass2Support(VkRenderFramework *renderFramework, std::vector<const char *> &device_extension_names) {
- if (renderFramework->DeviceExtensionSupported(renderFramework->gpu(), nullptr, VK_KHR_CREATE_RENDERPASS_2_EXTENSION_NAME)) {
- device_extension_names.push_back(VK_KHR_MULTIVIEW_EXTENSION_NAME);
- device_extension_names.push_back(VK_KHR_MAINTENANCE2_EXTENSION_NAME);
- device_extension_names.push_back(VK_KHR_CREATE_RENDERPASS_2_EXTENSION_NAME);
- return true;
- }
- return false;
+TEST_F(VkPositiveLayerTest, SecondaryCommandBufferClearColorAttachments) {
+ TEST_DESCRIPTION("Create a secondary command buffer and record a CmdClearAttachments call into it");
+ m_errorMonitor->ExpectSuccess();
+ ASSERT_NO_FATAL_FAILURE(Init());
+ ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
+
+ VkCommandBufferAllocateInfo command_buffer_allocate_info = {};
+ command_buffer_allocate_info.sType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_ALLOCATE_INFO;
+ command_buffer_allocate_info.commandPool = m_commandPool->handle();
+ command_buffer_allocate_info.level = VK_COMMAND_BUFFER_LEVEL_SECONDARY;
+ command_buffer_allocate_info.commandBufferCount = 1;
+
+ VkCommandBuffer secondary_command_buffer;
+ ASSERT_VK_SUCCESS(vkAllocateCommandBuffers(m_device->device(), &command_buffer_allocate_info, &secondary_command_buffer));
+ VkCommandBufferBeginInfo command_buffer_begin_info = {};
+ VkCommandBufferInheritanceInfo command_buffer_inheritance_info = {};
+ command_buffer_inheritance_info.sType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_INHERITANCE_INFO;
+ command_buffer_inheritance_info.renderPass = m_renderPass;
+ command_buffer_inheritance_info.framebuffer = m_framebuffer;
+
+ command_buffer_begin_info.sType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_BEGIN_INFO;
+ command_buffer_begin_info.flags =
+ VK_COMMAND_BUFFER_USAGE_ONE_TIME_SUBMIT_BIT | VK_COMMAND_BUFFER_USAGE_RENDER_PASS_CONTINUE_BIT;
+ command_buffer_begin_info.pInheritanceInfo = &command_buffer_inheritance_info;
+
+ vkBeginCommandBuffer(secondary_command_buffer, &command_buffer_begin_info);
+ VkClearAttachment color_attachment;
+ color_attachment.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
+ color_attachment.clearValue.color.float32[0] = 0;
+ color_attachment.clearValue.color.float32[1] = 0;
+ color_attachment.clearValue.color.float32[2] = 0;
+ color_attachment.clearValue.color.float32[3] = 0;
+ color_attachment.colorAttachment = 0;
+ VkClearRect clear_rect = {{{0, 0}, {32, 32}}};
+ vkCmdClearAttachments(secondary_command_buffer, 1, &color_attachment, 1, &clear_rect);
+ vkEndCommandBuffer(secondary_command_buffer);
+ m_commandBuffer->begin();
+ vkCmdBeginRenderPass(m_commandBuffer->handle(), &m_renderPassBeginInfo, VK_SUBPASS_CONTENTS_SECONDARY_COMMAND_BUFFERS);
+ vkCmdExecuteCommands(m_commandBuffer->handle(), 1, &secondary_command_buffer);
+ vkCmdEndRenderPass(m_commandBuffer->handle());
+ m_commandBuffer->end();
+ m_errorMonitor->VerifyNotFound();
}
-bool CheckDescriptorIndexingSupportAndInitFramework(VkRenderFramework *renderFramework,
- std::vector<const char *> &instance_extension_names,
- std::vector<const char *> &device_extension_names,
- VkValidationFeaturesEXT *features, void *userData) {
- bool descriptor_indexing = renderFramework->InstanceExtensionSupported(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME);
- if (descriptor_indexing) {
- instance_extension_names.push_back(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME);
- }
- renderFramework->InitFramework(myDbgFunc, userData, features);
- descriptor_indexing = descriptor_indexing && renderFramework->DeviceExtensionSupported(renderFramework->gpu(), nullptr,
- VK_KHR_MAINTENANCE3_EXTENSION_NAME);
- descriptor_indexing = descriptor_indexing && renderFramework->DeviceExtensionSupported(
- renderFramework->gpu(), nullptr, VK_EXT_DESCRIPTOR_INDEXING_EXTENSION_NAME);
- if (descriptor_indexing) {
- device_extension_names.push_back(VK_KHR_MAINTENANCE3_EXTENSION_NAME);
- device_extension_names.push_back(VK_EXT_DESCRIPTOR_INDEXING_EXTENSION_NAME);
- return true;
+TEST_F(VkPositiveLayerTest, SecondaryCommandBufferImageLayoutTransitions) {
+ TEST_DESCRIPTION("Perform an image layout transition in a secondary command buffer followed by a transition in the primary.");
+ VkResult err;
+ m_errorMonitor->ExpectSuccess();
+ ASSERT_NO_FATAL_FAILURE(Init());
+ auto depth_format = FindSupportedDepthStencilFormat(gpu());
+ if (!depth_format) {
+ printf("%s Couldn't find depth stencil format.\n", kSkipPrefix);
+ return;
}
- return false;
+ ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
+ // Allocate a secondary and primary cmd buffer
+ VkCommandBufferAllocateInfo command_buffer_allocate_info = {};
+ command_buffer_allocate_info.sType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_ALLOCATE_INFO;
+ command_buffer_allocate_info.commandPool = m_commandPool->handle();
+ command_buffer_allocate_info.level = VK_COMMAND_BUFFER_LEVEL_SECONDARY;
+ command_buffer_allocate_info.commandBufferCount = 1;
+
+ VkCommandBuffer secondary_command_buffer;
+ ASSERT_VK_SUCCESS(vkAllocateCommandBuffers(m_device->device(), &command_buffer_allocate_info, &secondary_command_buffer));
+ command_buffer_allocate_info.level = VK_COMMAND_BUFFER_LEVEL_PRIMARY;
+ VkCommandBuffer primary_command_buffer;
+ ASSERT_VK_SUCCESS(vkAllocateCommandBuffers(m_device->device(), &command_buffer_allocate_info, &primary_command_buffer));
+ VkCommandBufferBeginInfo command_buffer_begin_info = {};
+ VkCommandBufferInheritanceInfo command_buffer_inheritance_info = {};
+ command_buffer_inheritance_info.sType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_INHERITANCE_INFO;
+ command_buffer_begin_info.sType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_BEGIN_INFO;
+ command_buffer_begin_info.flags = VK_COMMAND_BUFFER_USAGE_ONE_TIME_SUBMIT_BIT;
+ command_buffer_begin_info.pInheritanceInfo = &command_buffer_inheritance_info;
+
+ err = vkBeginCommandBuffer(secondary_command_buffer, &command_buffer_begin_info);
+ ASSERT_VK_SUCCESS(err);
+ VkImageObj image(m_device);
+ image.Init(128, 128, 1, depth_format, VK_IMAGE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT, VK_IMAGE_TILING_OPTIMAL, 0);
+ ASSERT_TRUE(image.initialized());
+ VkImageMemoryBarrier img_barrier = {};
+ img_barrier.sType = VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER;
+ img_barrier.srcAccessMask = VK_ACCESS_HOST_WRITE_BIT;
+ img_barrier.dstAccessMask = VK_ACCESS_SHADER_READ_BIT;
+ img_barrier.oldLayout = VK_IMAGE_LAYOUT_UNDEFINED;
+ img_barrier.newLayout = VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL;
+ img_barrier.image = image.handle();
+ img_barrier.srcQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED;
+ img_barrier.dstQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED;
+ img_barrier.subresourceRange.aspectMask = VK_IMAGE_ASPECT_DEPTH_BIT | VK_IMAGE_ASPECT_STENCIL_BIT;
+ img_barrier.subresourceRange.baseArrayLayer = 0;
+ img_barrier.subresourceRange.baseMipLevel = 0;
+ img_barrier.subresourceRange.layerCount = 1;
+ img_barrier.subresourceRange.levelCount = 1;
+ vkCmdPipelineBarrier(secondary_command_buffer, VK_PIPELINE_STAGE_HOST_BIT, VK_PIPELINE_STAGE_VERTEX_SHADER_BIT, 0, 0, nullptr,
+ 0, nullptr, 1, &img_barrier);
+ err = vkEndCommandBuffer(secondary_command_buffer);
+ ASSERT_VK_SUCCESS(err);
+
+ // Now update primary cmd buffer to execute secondary and transitions image
+ command_buffer_begin_info.pInheritanceInfo = nullptr;
+ err = vkBeginCommandBuffer(primary_command_buffer, &command_buffer_begin_info);
+ ASSERT_VK_SUCCESS(err);
+ vkCmdExecuteCommands(primary_command_buffer, 1, &secondary_command_buffer);
+ VkImageMemoryBarrier img_barrier2 = {};
+ img_barrier2.sType = VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER;
+ img_barrier2.srcAccessMask = VK_ACCESS_HOST_WRITE_BIT;
+ img_barrier2.dstAccessMask = VK_ACCESS_SHADER_READ_BIT;
+ img_barrier2.oldLayout = VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL;
+ img_barrier2.newLayout = VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL;
+ img_barrier2.image = image.handle();
+ img_barrier2.srcQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED;
+ img_barrier2.dstQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED;
+ img_barrier2.subresourceRange.aspectMask = VK_IMAGE_ASPECT_DEPTH_BIT | VK_IMAGE_ASPECT_STENCIL_BIT;
+ img_barrier2.subresourceRange.baseArrayLayer = 0;
+ img_barrier2.subresourceRange.baseMipLevel = 0;
+ img_barrier2.subresourceRange.layerCount = 1;
+ img_barrier2.subresourceRange.levelCount = 1;
+ vkCmdPipelineBarrier(primary_command_buffer, VK_PIPELINE_STAGE_HOST_BIT, VK_PIPELINE_STAGE_VERTEX_SHADER_BIT, 0, 0, nullptr, 0,
+ nullptr, 1, &img_barrier2);
+ err = vkEndCommandBuffer(primary_command_buffer);
+ ASSERT_VK_SUCCESS(err);
+ VkSubmitInfo submit_info = {};
+ submit_info.sType = VK_STRUCTURE_TYPE_SUBMIT_INFO;
+ submit_info.commandBufferCount = 1;
+ submit_info.pCommandBuffers = &primary_command_buffer;
+ err = vkQueueSubmit(m_device->m_queue, 1, &submit_info, VK_NULL_HANDLE);
+ ASSERT_VK_SUCCESS(err);
+ m_errorMonitor->VerifyNotFound();
+ err = vkDeviceWaitIdle(m_device->device());
+ ASSERT_VK_SUCCESS(err);
+ vkFreeCommandBuffers(m_device->device(), m_commandPool->handle(), 1, &secondary_command_buffer);
+ vkFreeCommandBuffers(m_device->device(), m_commandPool->handle(), 1, &primary_command_buffer);
}
-ErrorMonitor::ErrorMonitor() {
- test_platform_thread_create_mutex(&mutex_);
- test_platform_thread_lock_mutex(&mutex_);
- Reset();
- test_platform_thread_unlock_mutex(&mutex_);
+// This is a positive test. No failures are expected.
+TEST_F(VkPositiveLayerTest, IgnoreUnrelatedDescriptor) {
+ TEST_DESCRIPTION(
+ "Ensure that the vkUpdateDescriptorSets validation code is ignoring VkWriteDescriptorSet members that are not related to "
+ "the descriptor type specified by VkWriteDescriptorSet::descriptorType. Correct validation behavior will result in the "
+ "test running to completion without validation errors.");
+
+ const uintptr_t invalid_ptr = 0xcdcdcdcd;
+
+ ASSERT_NO_FATAL_FAILURE(Init());
+
+ // Verify VK_FORMAT_R8_UNORM supports VK_BUFFER_USAGE_STORAGE_TEXEL_BUFFER_BIT
+ const VkFormat format_texel_case = VK_FORMAT_R8_UNORM;
+ const char *format_texel_case_string = "VK_FORMAT_R8_UNORM";
+ VkFormatProperties format_properties;
+ vkGetPhysicalDeviceFormatProperties(gpu(), format_texel_case, &format_properties);
+ if (!(format_properties.bufferFeatures & VK_FORMAT_FEATURE_STORAGE_TEXEL_BUFFER_BIT)) {
+ printf("%s Test requires %s to support VK_BUFFER_USAGE_STORAGE_TEXEL_BUFFER_BIT\n", kSkipPrefix, format_texel_case_string);
+ return;
+ }
+
+ // Image Case
+ {
+ m_errorMonitor->ExpectSuccess();
+
+ VkImageObj image(m_device);
+ image.Init(32, 32, 1, VK_FORMAT_B8G8R8A8_UNORM, VK_IMAGE_USAGE_SAMPLED_BIT, VK_IMAGE_TILING_OPTIMAL, 0);
+
+ VkImageView view = image.targetView(VK_FORMAT_B8G8R8A8_UNORM);
+
+ OneOffDescriptorSet ds(m_device, {
+ {0, VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE, 1, VK_SHADER_STAGE_ALL, nullptr},
+ });
+
+ VkDescriptorImageInfo image_info = {};
+ image_info.imageView = view;
+ image_info.imageLayout = VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL;
+
+ VkWriteDescriptorSet descriptor_write;
+ memset(&descriptor_write, 0, sizeof(descriptor_write));
+ descriptor_write.sType = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET;
+ descriptor_write.dstSet = ds.set_;
+ descriptor_write.dstBinding = 0;
+ descriptor_write.descriptorCount = 1;
+ descriptor_write.descriptorType = VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE;
+ descriptor_write.pImageInfo = &image_info;
+
+ // Set pBufferInfo and pTexelBufferView to invalid values, which should
+ // be
+ // ignored for descriptorType == VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE.
+ // This will most likely produce a crash if the parameter_validation
+ // layer
+ // does not correctly ignore pBufferInfo.
+ descriptor_write.pBufferInfo = reinterpret_cast<const VkDescriptorBufferInfo *>(invalid_ptr);
+ descriptor_write.pTexelBufferView = reinterpret_cast<const VkBufferView *>(invalid_ptr);
+
+ vkUpdateDescriptorSets(m_device->device(), 1, &descriptor_write, 0, NULL);
+
+ m_errorMonitor->VerifyNotFound();
+ }
+
+ // Buffer Case
+ {
+ m_errorMonitor->ExpectSuccess();
+
+ VkBuffer buffer;
+ uint32_t queue_family_index = 0;
+ VkBufferCreateInfo buffer_create_info = {};
+ buffer_create_info.sType = VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO;
+ buffer_create_info.size = 1024;
+ buffer_create_info.usage = VK_BUFFER_USAGE_UNIFORM_BUFFER_BIT;
+ buffer_create_info.queueFamilyIndexCount = 1;
+ buffer_create_info.pQueueFamilyIndices = &queue_family_index;
+
+ VkResult err = vkCreateBuffer(m_device->device(), &buffer_create_info, NULL, &buffer);
+ ASSERT_VK_SUCCESS(err);
+
+ VkMemoryRequirements memory_reqs;
+ VkDeviceMemory buffer_memory;
+ bool pass;
+ VkMemoryAllocateInfo memory_info = {};
+ memory_info.sType = VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO;
+ memory_info.pNext = NULL;
+ memory_info.allocationSize = 0;
+ memory_info.memoryTypeIndex = 0;
+
+ vkGetBufferMemoryRequirements(m_device->device(), buffer, &memory_reqs);
+ memory_info.allocationSize = memory_reqs.size;
+ pass = m_device->phy().set_memory_type(memory_reqs.memoryTypeBits, &memory_info, 0);
+ ASSERT_TRUE(pass);
+
+ err = vkAllocateMemory(m_device->device(), &memory_info, NULL, &buffer_memory);
+ ASSERT_VK_SUCCESS(err);
+ err = vkBindBufferMemory(m_device->device(), buffer, buffer_memory, 0);
+ ASSERT_VK_SUCCESS(err);
+
+ OneOffDescriptorSet ds(m_device, {
+ {0, VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER, 1, VK_SHADER_STAGE_ALL, nullptr},
+ });
+
+ VkDescriptorBufferInfo buffer_info = {};
+ buffer_info.buffer = buffer;
+ buffer_info.offset = 0;
+ buffer_info.range = 1024;
+
+ VkWriteDescriptorSet descriptor_write;
+ memset(&descriptor_write, 0, sizeof(descriptor_write));
+ descriptor_write.sType = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET;
+ descriptor_write.dstSet = ds.set_;
+ descriptor_write.dstBinding = 0;
+ descriptor_write.descriptorCount = 1;
+ descriptor_write.descriptorType = VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER;
+ descriptor_write.pBufferInfo = &buffer_info;
+
+ // Set pImageInfo and pTexelBufferView to invalid values, which should
+ // be
+ // ignored for descriptorType == VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER.
+ // This will most likely produce a crash if the parameter_validation
+ // layer
+ // does not correctly ignore pImageInfo.
+ descriptor_write.pImageInfo = reinterpret_cast<const VkDescriptorImageInfo *>(invalid_ptr);
+ descriptor_write.pTexelBufferView = reinterpret_cast<const VkBufferView *>(invalid_ptr);
+
+ vkUpdateDescriptorSets(m_device->device(), 1, &descriptor_write, 0, NULL);
+
+ m_errorMonitor->VerifyNotFound();
+
+ vkDestroyBuffer(m_device->device(), buffer, NULL);
+ vkFreeMemory(m_device->device(), buffer_memory, NULL);
+ }
+
+ // Texel Buffer Case
+ {
+ m_errorMonitor->ExpectSuccess();
+
+ VkBuffer buffer;
+ uint32_t queue_family_index = 0;
+ VkBufferCreateInfo buffer_create_info = {};
+ buffer_create_info.sType = VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO;
+ buffer_create_info.size = 1024;
+ buffer_create_info.usage = VK_BUFFER_USAGE_STORAGE_TEXEL_BUFFER_BIT;
+ buffer_create_info.queueFamilyIndexCount = 1;
+ buffer_create_info.pQueueFamilyIndices = &queue_family_index;
+
+ VkResult err = vkCreateBuffer(m_device->device(), &buffer_create_info, NULL, &buffer);
+ ASSERT_VK_SUCCESS(err);
+
+ VkMemoryRequirements memory_reqs;
+ VkDeviceMemory buffer_memory;
+ bool pass;
+ VkMemoryAllocateInfo memory_info = {};
+ memory_info.sType = VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO;
+ memory_info.pNext = NULL;
+ memory_info.allocationSize = 0;
+ memory_info.memoryTypeIndex = 0;
+
+ vkGetBufferMemoryRequirements(m_device->device(), buffer, &memory_reqs);
+ memory_info.allocationSize = memory_reqs.size;
+ pass = m_device->phy().set_memory_type(memory_reqs.memoryTypeBits, &memory_info, 0);
+ ASSERT_TRUE(pass);
+
+ err = vkAllocateMemory(m_device->device(), &memory_info, NULL, &buffer_memory);
+ ASSERT_VK_SUCCESS(err);
+ err = vkBindBufferMemory(m_device->device(), buffer, buffer_memory, 0);
+ ASSERT_VK_SUCCESS(err);
+
+ VkBufferViewCreateInfo buff_view_ci = {};
+ buff_view_ci.sType = VK_STRUCTURE_TYPE_BUFFER_VIEW_CREATE_INFO;
+ buff_view_ci.buffer = buffer;
+ buff_view_ci.format = format_texel_case;
+ buff_view_ci.range = VK_WHOLE_SIZE;
+ VkBufferView buffer_view;
+ err = vkCreateBufferView(m_device->device(), &buff_view_ci, NULL, &buffer_view);
+
+ OneOffDescriptorSet ds(m_device, {
+ {0, VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER, 1, VK_SHADER_STAGE_ALL, nullptr},
+ });
+
+ VkWriteDescriptorSet descriptor_write;
+ memset(&descriptor_write, 0, sizeof(descriptor_write));
+ descriptor_write.sType = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET;
+ descriptor_write.dstSet = ds.set_;
+ descriptor_write.dstBinding = 0;
+ descriptor_write.descriptorCount = 1;
+ descriptor_write.descriptorType = VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER;
+ descriptor_write.pTexelBufferView = &buffer_view;
+
+ // Set pImageInfo and pBufferInfo to invalid values, which should be
+ // ignored for descriptorType ==
+ // VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER.
+ // This will most likely produce a crash if the parameter_validation
+ // layer
+ // does not correctly ignore pImageInfo and pBufferInfo.
+ descriptor_write.pImageInfo = reinterpret_cast<const VkDescriptorImageInfo *>(invalid_ptr);
+ descriptor_write.pBufferInfo = reinterpret_cast<const VkDescriptorBufferInfo *>(invalid_ptr);
+
+ vkUpdateDescriptorSets(m_device->device(), 1, &descriptor_write, 0, NULL);
+
+ m_errorMonitor->VerifyNotFound();
+
+ vkDestroyBufferView(m_device->device(), buffer_view, NULL);
+ vkDestroyBuffer(m_device->device(), buffer, NULL);
+ vkFreeMemory(m_device->device(), buffer_memory, NULL);
+ }
}
-ErrorMonitor::~ErrorMonitor() { test_platform_thread_delete_mutex(&mutex_); }
+TEST_F(VkPositiveLayerTest, ImmutableSamplerOnlyDescriptor) {
+ TEST_DESCRIPTION("Bind a DescriptorSet with only an immutable sampler and make sure that we don't warn for no update.");
+
+ ASSERT_NO_FATAL_FAILURE(Init());
+ ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
+
+ OneOffDescriptorSet ds(m_device, {
+ {0, VK_DESCRIPTOR_TYPE_SAMPLER, 1, VK_SHADER_STAGE_FRAGMENT_BIT, nullptr},
+ });
+
+ VkSamplerCreateInfo sampler_ci = SafeSaneSamplerCreateInfo();
+ VkSampler sampler;
+ VkResult err = vkCreateSampler(m_device->device(), &sampler_ci, NULL, &sampler);
+ ASSERT_VK_SUCCESS(err);
-void ErrorMonitor::Reset() {
- message_flags_ = VK_DEBUG_REPORT_ERROR_BIT_EXT;
- bailout_ = NULL;
- message_found_ = VK_FALSE;
- failure_message_strings_.clear();
- desired_message_strings_.clear();
- ignore_message_strings_.clear();
- other_messages_.clear();
+ const VkPipelineLayoutObj pipeline_layout(m_device, {&ds.layout_});
+
+ m_errorMonitor->ExpectSuccess();
+ m_commandBuffer->begin();
+ m_commandBuffer->BeginRenderPass(m_renderPassBeginInfo);
+
+ vkCmdBindDescriptorSets(m_commandBuffer->handle(), VK_PIPELINE_BIND_POINT_GRAPHICS, pipeline_layout.handle(), 0, 1, &ds.set_, 0,
+ nullptr);
+ m_errorMonitor->VerifyNotFound();
+
+ vkDestroySampler(m_device->device(), sampler, NULL);
+
+ m_commandBuffer->EndRenderPass();
+ m_commandBuffer->end();
}
-void ErrorMonitor::SetDesiredFailureMsg(const VkFlags msgFlags, const std::string msg) {
- SetDesiredFailureMsg(msgFlags, msg.c_str());
+TEST_F(VkLayerTest, DuplicateDescriptorBinding) {
+ TEST_DESCRIPTION("Create a descriptor set layout with a duplicate binding number.");
+
+ ASSERT_NO_FATAL_FAILURE(Init());
+ // Create layout where two binding #s are "1"
+ static const uint32_t NUM_BINDINGS = 3;
+ VkDescriptorSetLayoutBinding dsl_binding[NUM_BINDINGS] = {};
+ dsl_binding[0].binding = 1;
+ dsl_binding[0].descriptorType = VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER;
+ dsl_binding[0].descriptorCount = 1;
+ dsl_binding[0].stageFlags = VK_SHADER_STAGE_FRAGMENT_BIT;
+ dsl_binding[0].pImmutableSamplers = NULL;
+ dsl_binding[1].binding = 0;
+ dsl_binding[1].descriptorCount = 1;
+ dsl_binding[1].descriptorType = VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER;
+ dsl_binding[1].descriptorCount = 1;
+ dsl_binding[1].stageFlags = VK_SHADER_STAGE_FRAGMENT_BIT;
+ dsl_binding[1].pImmutableSamplers = NULL;
+ dsl_binding[2].binding = 1; // Duplicate binding should cause error
+ dsl_binding[2].descriptorType = VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER;
+ dsl_binding[2].descriptorCount = 1;
+ dsl_binding[2].stageFlags = VK_SHADER_STAGE_FRAGMENT_BIT;
+ dsl_binding[2].pImmutableSamplers = NULL;
+
+ VkDescriptorSetLayoutCreateInfo ds_layout_ci = {};
+ ds_layout_ci.sType = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_CREATE_INFO;
+ ds_layout_ci.pNext = NULL;
+ ds_layout_ci.bindingCount = NUM_BINDINGS;
+ ds_layout_ci.pBindings = dsl_binding;
+ VkDescriptorSetLayout ds_layout;
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkDescriptorSetLayoutCreateInfo-binding-00279");
+ vkCreateDescriptorSetLayout(m_device->device(), &ds_layout_ci, NULL, &ds_layout);
+ m_errorMonitor->VerifyFound();
}
-void ErrorMonitor::SetDesiredFailureMsg(const VkFlags msgFlags, const char *const msgString) {
- test_platform_thread_lock_mutex(&mutex_);
- desired_message_strings_.insert(msgString);
- message_flags_ |= msgFlags;
- test_platform_thread_unlock_mutex(&mutex_);
+TEST_F(VkLayerTest, InvalidPushDescriptorSetLayout) {
+ TEST_DESCRIPTION("Create a push descriptor set layout with invalid bindings.");
+
+ if (InstanceExtensionSupported(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME)) {
+ m_instance_extension_names.push_back(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME);
+ } else {
+ printf("%s Did not find VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME; skipped.\n", kSkipPrefix);
+ return;
+ }
+
+ ASSERT_NO_FATAL_FAILURE(InitFramework(myDbgFunc, m_errorMonitor));
+ if (DeviceExtensionSupported(gpu(), nullptr, VK_KHR_PUSH_DESCRIPTOR_EXTENSION_NAME)) {
+ m_device_extension_names.push_back(VK_KHR_PUSH_DESCRIPTOR_EXTENSION_NAME);
+ } else {
+ printf("%s %s Extension not supported, skipping tests\n", kSkipPrefix, VK_KHR_PUSH_DESCRIPTOR_EXTENSION_NAME);
+ return;
+ }
+
+ ASSERT_NO_FATAL_FAILURE(InitState());
+
+ // Get the push descriptor limits
+ auto push_descriptor_prop = GetPushDescriptorProperties(instance(), gpu());
+ if (push_descriptor_prop.maxPushDescriptors < 1) {
+ // Some implementations report an invalid maxPushDescriptors of 0
+ printf("%s maxPushDescriptors is zero, skipping tests\n", kSkipPrefix);
+ return;
+ }
+
+ VkDescriptorSetLayoutBinding binding = {0, VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC, 1, VK_SHADER_STAGE_FRAGMENT_BIT, nullptr};
+
+ auto ds_layout_ci = lvl_init_struct<VkDescriptorSetLayoutCreateInfo>();
+ ds_layout_ci.flags = VK_DESCRIPTOR_SET_LAYOUT_CREATE_PUSH_DESCRIPTOR_BIT_KHR;
+ ds_layout_ci.bindingCount = 1;
+ ds_layout_ci.pBindings = &binding;
+
+ // Note that as binding is referenced in ds_layout_ci, it is effectively in the closure by reference as well.
+ auto test_create_ds_layout = [&ds_layout_ci, this](std::string error) {
+ VkDescriptorSetLayout ds_layout = VK_NULL_HANDLE;
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, error);
+ vkCreateDescriptorSetLayout(m_device->handle(), &ds_layout_ci, nullptr, &ds_layout);
+ m_errorMonitor->VerifyFound();
+ vkDestroyDescriptorSetLayout(m_device->handle(), ds_layout, nullptr);
+ };
+
+ // Starting with the initial VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC type set above..
+ test_create_ds_layout("VUID-VkDescriptorSetLayoutCreateInfo-flags-00280");
+
+ binding.descriptorType = VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC;
+ test_create_ds_layout(
+ "VUID-VkDescriptorSetLayoutCreateInfo-flags-00280"); // This is the same VUID as above, just a second error condition.
+
+ if (!(push_descriptor_prop.maxPushDescriptors == std::numeric_limits<uint32_t>::max())) {
+ binding.descriptorType = VK_DESCRIPTOR_TYPE_STORAGE_BUFFER;
+ binding.descriptorCount = push_descriptor_prop.maxPushDescriptors + 1;
+ test_create_ds_layout("VUID-VkDescriptorSetLayoutCreateInfo-flags-00281");
+ } else {
+ printf("%s maxPushDescriptors is set to maximum unit32_t value, skipping 'out of range test'.\n", kSkipPrefix);
+ }
}
-void ErrorMonitor::SetUnexpectedError(const char *const msg) {
- test_platform_thread_lock_mutex(&mutex_);
+TEST_F(VkLayerTest, PushDescriptorSetLayoutWithoutExtension) {
+ TEST_DESCRIPTION("Create a push descriptor set layout without loading the needed extension.");
+ ASSERT_NO_FATAL_FAILURE(Init());
+
+ VkDescriptorSetLayoutBinding binding = {0, VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER, 1, VK_SHADER_STAGE_FRAGMENT_BIT, nullptr};
- ignore_message_strings_.emplace_back(msg);
+ auto ds_layout_ci = lvl_init_struct<VkDescriptorSetLayoutCreateInfo>();
+ ds_layout_ci.flags = VK_DESCRIPTOR_SET_LAYOUT_CREATE_PUSH_DESCRIPTOR_BIT_KHR;
+ ds_layout_ci.bindingCount = 1;
+ ds_layout_ci.pBindings = &binding;
- test_platform_thread_unlock_mutex(&mutex_);
+ std::string error = "Attempted to use VK_DESCRIPTOR_SET_LAYOUT_CREATE_PUSH_DESCRIPTOR_BIT_KHR in ";
+ error = error + "VkDescriptorSetLayoutCreateInfo::flags but its required extension ";
+ error = error + VK_KHR_PUSH_DESCRIPTOR_EXTENSION_NAME;
+ error = error + " has not been enabled.";
+
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, error.c_str());
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkDescriptorSetLayoutCreateInfo-flags-00281");
+ VkDescriptorSetLayout ds_layout = VK_NULL_HANDLE;
+ vkCreateDescriptorSetLayout(m_device->handle(), &ds_layout_ci, nullptr, &ds_layout);
+ m_errorMonitor->VerifyFound();
+ vkDestroyDescriptorSetLayout(m_device->handle(), ds_layout, nullptr);
}
-VkBool32 ErrorMonitor::CheckForDesiredMsg(const char *const msgString) {
- VkBool32 result = VK_FALSE;
- test_platform_thread_lock_mutex(&mutex_);
- if (bailout_ != nullptr) {
- *bailout_ = true;
- }
- string errorString(msgString);
- bool found_expected = false;
+TEST_F(VkLayerTest, DescriptorIndexingSetLayoutWithoutExtension) {
+ TEST_DESCRIPTION("Create an update_after_bind set layout without loading the needed extension.");
+ ASSERT_NO_FATAL_FAILURE(Init());
- if (!IgnoreMessage(errorString)) {
- for (auto desired_msg_it = desired_message_strings_.begin(); desired_msg_it != desired_message_strings_.end();
- ++desired_msg_it) {
- if ((*desired_msg_it).length() == 0) {
- // An empty desired_msg string "" indicates a positive test - not expecting an error.
- // Return true to avoid calling layers/driver with this error.
- // And don't erase the "" string, so it remains if another error is found.
- result = VK_TRUE;
- found_expected = true;
- message_found_ = true;
- failure_message_strings_.insert(errorString);
- } else if (errorString.find(*desired_msg_it) != string::npos) {
- found_expected = true;
- failure_message_strings_.insert(errorString);
- message_found_ = true;
- result = VK_TRUE;
- // Remove a maximum of one failure message from the set
- // Multiset mutation is acceptable because `break` causes flow of control to exit the for loop
- desired_message_strings_.erase(desired_msg_it);
- break;
- }
- }
+ auto ds_layout_ci = lvl_init_struct<VkDescriptorSetLayoutCreateInfo>();
+ ds_layout_ci.flags = VK_DESCRIPTOR_SET_LAYOUT_CREATE_UPDATE_AFTER_BIND_POOL_BIT_EXT;
- if (!found_expected) {
- printf("Unexpected: %s\n", msgString);
- other_messages_.push_back(errorString);
+ std::string error = "Attemped to use VK_DESCRIPTOR_SET_LAYOUT_CREATE_UPDATE_AFTER_BIND_POOL_BIT_EXT in ";
+ error = error + "VkDescriptorSetLayoutCreateInfo::flags but its required extension ";
+ error = error + VK_EXT_DESCRIPTOR_INDEXING_EXTENSION_NAME;
+ error = error + " has not been enabled.";
+
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, error.c_str());
+ VkDescriptorSetLayout ds_layout = VK_NULL_HANDLE;
+ vkCreateDescriptorSetLayout(m_device->handle(), &ds_layout_ci, nullptr, &ds_layout);
+ m_errorMonitor->VerifyFound();
+ vkDestroyDescriptorSetLayout(m_device->handle(), ds_layout, nullptr);
+}
+
+TEST_F(VkLayerTest, DescriptorIndexingSetLayout) {
+ TEST_DESCRIPTION("Exercise various create/allocate-time errors related to VK_EXT_descriptor_indexing.");
+
+ if (InstanceExtensionSupported(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME)) {
+ m_instance_extension_names.push_back(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME);
+ } else {
+ printf("%s Did not find required instance extension %s; skipped.\n", kSkipPrefix,
+ VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME);
+ return;
+ }
+ ASSERT_NO_FATAL_FAILURE(InitFramework(myDbgFunc, m_errorMonitor));
+ std::array<const char *, 2> required_device_extensions = {
+ {VK_KHR_MAINTENANCE3_EXTENSION_NAME, VK_EXT_DESCRIPTOR_INDEXING_EXTENSION_NAME}};
+ for (auto device_extension : required_device_extensions) {
+ if (DeviceExtensionSupported(gpu(), nullptr, device_extension)) {
+ m_device_extension_names.push_back(device_extension);
+ } else {
+ printf("%s %s Extension not supported, skipping tests\n", kSkipPrefix, device_extension);
+ return;
}
}
- test_platform_thread_unlock_mutex(&mutex_);
- return result;
+ PFN_vkGetPhysicalDeviceFeatures2KHR vkGetPhysicalDeviceFeatures2KHR =
+ (PFN_vkGetPhysicalDeviceFeatures2KHR)vkGetInstanceProcAddr(instance(), "vkGetPhysicalDeviceFeatures2KHR");
+ ASSERT_TRUE(vkGetPhysicalDeviceFeatures2KHR != nullptr);
+
+ // Create a device that enables all supported indexing features except descriptorBindingUniformBufferUpdateAfterBind
+ auto indexing_features = lvl_init_struct<VkPhysicalDeviceDescriptorIndexingFeaturesEXT>();
+ auto features2 = lvl_init_struct<VkPhysicalDeviceFeatures2KHR>(&indexing_features);
+ vkGetPhysicalDeviceFeatures2KHR(gpu(), &features2);
+
+ indexing_features.descriptorBindingUniformBufferUpdateAfterBind = VK_FALSE;
+
+ ASSERT_NO_FATAL_FAILURE(InitState(nullptr, &features2));
+
+ std::array<VkDescriptorBindingFlagsEXT, 2> flags = {VK_DESCRIPTOR_BINDING_UPDATE_AFTER_BIND_BIT_EXT,
+ VK_DESCRIPTOR_BINDING_UPDATE_AFTER_BIND_BIT_EXT};
+ auto flags_create_info = lvl_init_struct<VkDescriptorSetLayoutBindingFlagsCreateInfoEXT>();
+ flags_create_info.bindingCount = (uint32_t)flags.size();
+ flags_create_info.pBindingFlags = flags.data();
+
+ VkDescriptorSetLayoutBinding binding = {0, VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER, 1, VK_SHADER_STAGE_FRAGMENT_BIT, nullptr};
+ auto ds_layout_ci = lvl_init_struct<VkDescriptorSetLayoutCreateInfo>(&flags_create_info);
+ ds_layout_ci.bindingCount = 1;
+ ds_layout_ci.pBindings = &binding;
+ VkDescriptorSetLayout ds_layout = VK_NULL_HANDLE;
+
+ // VU for VkDescriptorSetLayoutBindingFlagsCreateInfoEXT::bindingCount
+ flags_create_info.bindingCount = 2;
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ "VUID-VkDescriptorSetLayoutBindingFlagsCreateInfoEXT-bindingCount-03002");
+ VkResult err = vkCreateDescriptorSetLayout(m_device->handle(), &ds_layout_ci, nullptr, &ds_layout);
+ m_errorMonitor->VerifyFound();
+ vkDestroyDescriptorSetLayout(m_device->handle(), ds_layout, nullptr);
+
+ flags_create_info.bindingCount = 1;
+
+ // set is missing UPDATE_AFTER_BIND_POOL flag.
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkDescriptorSetLayoutCreateInfo-flags-03000");
+ // binding uses a feature we disabled
+ m_errorMonitor->SetDesiredFailureMsg(
+ VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ "VUID-VkDescriptorSetLayoutBindingFlagsCreateInfoEXT-descriptorBindingUniformBufferUpdateAfterBind-03005");
+ err = vkCreateDescriptorSetLayout(m_device->handle(), &ds_layout_ci, nullptr, &ds_layout);
+ m_errorMonitor->VerifyFound();
+ vkDestroyDescriptorSetLayout(m_device->handle(), ds_layout, nullptr);
+
+ ds_layout_ci.flags = VK_DESCRIPTOR_SET_LAYOUT_CREATE_UPDATE_AFTER_BIND_POOL_BIT_EXT;
+ ds_layout_ci.bindingCount = 0;
+ flags_create_info.bindingCount = 0;
+ err = vkCreateDescriptorSetLayout(m_device->handle(), &ds_layout_ci, nullptr, &ds_layout);
+ ASSERT_VK_SUCCESS(err);
+
+ VkDescriptorPoolSize pool_size = {binding.descriptorType, binding.descriptorCount};
+ auto dspci = lvl_init_struct<VkDescriptorPoolCreateInfo>();
+ dspci.poolSizeCount = 1;
+ dspci.pPoolSizes = &pool_size;
+ dspci.maxSets = 1;
+ VkDescriptorPool pool;
+ err = vkCreateDescriptorPool(m_device->handle(), &dspci, nullptr, &pool);
+ ASSERT_VK_SUCCESS(err);
+
+ auto ds_alloc_info = lvl_init_struct<VkDescriptorSetAllocateInfo>();
+ ds_alloc_info.descriptorPool = pool;
+ ds_alloc_info.descriptorSetCount = 1;
+ ds_alloc_info.pSetLayouts = &ds_layout;
+
+ VkDescriptorSet ds = VK_NULL_HANDLE;
+ // mismatch between descriptor set and pool
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkDescriptorSetAllocateInfo-pSetLayouts-03044");
+ vkAllocateDescriptorSets(m_device->handle(), &ds_alloc_info, &ds);
+ m_errorMonitor->VerifyFound();
+
+ vkDestroyDescriptorSetLayout(m_device->handle(), ds_layout, nullptr);
+ vkDestroyDescriptorPool(m_device->handle(), pool, nullptr);
+
+ if (indexing_features.descriptorBindingVariableDescriptorCount) {
+ ds_layout_ci.flags = 0;
+ ds_layout_ci.bindingCount = 1;
+ flags_create_info.bindingCount = 1;
+ flags[0] = VK_DESCRIPTOR_BINDING_VARIABLE_DESCRIPTOR_COUNT_BIT_EXT;
+ err = vkCreateDescriptorSetLayout(m_device->handle(), &ds_layout_ci, nullptr, &ds_layout);
+ ASSERT_VK_SUCCESS(err);
+
+ pool_size = {binding.descriptorType, binding.descriptorCount};
+ dspci = lvl_init_struct<VkDescriptorPoolCreateInfo>();
+ dspci.poolSizeCount = 1;
+ dspci.pPoolSizes = &pool_size;
+ dspci.maxSets = 1;
+ err = vkCreateDescriptorPool(m_device->handle(), &dspci, nullptr, &pool);
+ ASSERT_VK_SUCCESS(err);
+
+ auto count_alloc_info = lvl_init_struct<VkDescriptorSetVariableDescriptorCountAllocateInfoEXT>();
+ count_alloc_info.descriptorSetCount = 1;
+ // Set variable count larger than what was in the descriptor binding
+ uint32_t variable_count = 2;
+ count_alloc_info.pDescriptorCounts = &variable_count;
+
+ ds_alloc_info = lvl_init_struct<VkDescriptorSetAllocateInfo>(&count_alloc_info);
+ ds_alloc_info.descriptorPool = pool;
+ ds_alloc_info.descriptorSetCount = 1;
+ ds_alloc_info.pSetLayouts = &ds_layout;
+
+ ds = VK_NULL_HANDLE;
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ "VUID-VkDescriptorSetVariableDescriptorCountAllocateInfoEXT-pSetLayouts-03046");
+ vkAllocateDescriptorSets(m_device->handle(), &ds_alloc_info, &ds);
+ m_errorMonitor->VerifyFound();
+
+ vkDestroyDescriptorSetLayout(m_device->handle(), ds_layout, nullptr);
+ vkDestroyDescriptorPool(m_device->handle(), pool, nullptr);
+ }
}
-vector<string> ErrorMonitor::GetOtherFailureMsgs() const { return other_messages_; }
+TEST_F(VkLayerTest, DescriptorIndexingUpdateAfterBind) {
+ TEST_DESCRIPTION("Exercise errors for updating a descriptor set after it is bound.");
-VkDebugReportFlagsEXT ErrorMonitor::GetMessageFlags() const { return message_flags_; }
+ if (InstanceExtensionSupported(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME)) {
+ m_instance_extension_names.push_back(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME);
+ } else {
+ printf("%s %s Extension not supported, skipping tests\n", kSkipPrefix,
+ VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME);
+ return;
+ }
-bool ErrorMonitor::AnyDesiredMsgFound() const { return message_found_; }
+ ASSERT_NO_FATAL_FAILURE(InitFramework(myDbgFunc, m_errorMonitor));
+ if (DeviceExtensionSupported(gpu(), nullptr, VK_EXT_DESCRIPTOR_INDEXING_EXTENSION_NAME) &&
+ DeviceExtensionSupported(gpu(), nullptr, VK_KHR_MAINTENANCE3_EXTENSION_NAME)) {
+ m_device_extension_names.push_back(VK_EXT_DESCRIPTOR_INDEXING_EXTENSION_NAME);
+ m_device_extension_names.push_back(VK_KHR_MAINTENANCE3_EXTENSION_NAME);
+ } else {
+ printf("%s Descriptor Indexing or Maintenance3 Extension not supported, skipping tests\n", kSkipPrefix);
+ return;
+ }
-bool ErrorMonitor::AllDesiredMsgsFound() const { return desired_message_strings_.empty(); }
+ PFN_vkGetPhysicalDeviceFeatures2KHR vkGetPhysicalDeviceFeatures2KHR =
+ (PFN_vkGetPhysicalDeviceFeatures2KHR)vkGetInstanceProcAddr(instance(), "vkGetPhysicalDeviceFeatures2KHR");
+ ASSERT_TRUE(vkGetPhysicalDeviceFeatures2KHR != nullptr);
-void ErrorMonitor::SetError(const char *const errorString) {
- message_found_ = true;
- failure_message_strings_.insert(errorString);
-}
+ // Create a device that enables all supported indexing features except descriptorBindingUniformBufferUpdateAfterBind
+ auto indexing_features = lvl_init_struct<VkPhysicalDeviceDescriptorIndexingFeaturesEXT>();
+ auto features2 = lvl_init_struct<VkPhysicalDeviceFeatures2KHR>(&indexing_features);
+ vkGetPhysicalDeviceFeatures2KHR(gpu(), &features2);
-void ErrorMonitor::SetBailout(bool *bailout) { bailout_ = bailout; }
+ indexing_features.descriptorBindingUniformBufferUpdateAfterBind = VK_FALSE;
-void ErrorMonitor::DumpFailureMsgs() const {
- vector<string> otherMsgs = GetOtherFailureMsgs();
- if (otherMsgs.size()) {
- cout << "Other error messages logged for this test were:" << endl;
- for (auto iter = otherMsgs.begin(); iter != otherMsgs.end(); iter++) {
- cout << " " << *iter << endl;
+ if (VK_FALSE == indexing_features.descriptorBindingStorageBufferUpdateAfterBind) {
+ printf("%s Test requires (unsupported) descriptorBindingStorageBufferUpdateAfterBind, skipping\n", kSkipPrefix);
+ return;
+ }
+ if (VK_FALSE == features2.features.fragmentStoresAndAtomics) {
+ printf("%s Test requires (unsupported) fragmentStoresAndAtomics, skipping\n", kSkipPrefix);
+ return;
+ }
+
+ ASSERT_NO_FATAL_FAILURE(InitState(nullptr, &features2, VK_COMMAND_POOL_CREATE_RESET_COMMAND_BUFFER_BIT));
+ ASSERT_NO_FATAL_FAILURE(InitViewport());
+ ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
+
+ VkDescriptorBindingFlagsEXT flags[2] = {0, VK_DESCRIPTOR_BINDING_UPDATE_AFTER_BIND_BIT_EXT};
+ auto flags_create_info = lvl_init_struct<VkDescriptorSetLayoutBindingFlagsCreateInfoEXT>();
+ flags_create_info.bindingCount = 2;
+ flags_create_info.pBindingFlags = &flags[0];
+
+ // Descriptor set has two bindings - only the second is update_after_bind
+ VkDescriptorSetLayoutBinding binding[2] = {
+ {0, VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER, 1, VK_SHADER_STAGE_FRAGMENT_BIT, nullptr},
+ {1, VK_DESCRIPTOR_TYPE_STORAGE_BUFFER, 1, VK_SHADER_STAGE_FRAGMENT_BIT, nullptr},
+ };
+ auto ds_layout_ci = lvl_init_struct<VkDescriptorSetLayoutCreateInfo>(&flags_create_info);
+ ds_layout_ci.flags = VK_DESCRIPTOR_SET_LAYOUT_CREATE_UPDATE_AFTER_BIND_POOL_BIT_EXT;
+ ds_layout_ci.bindingCount = 2;
+ ds_layout_ci.pBindings = &binding[0];
+ VkDescriptorSetLayout ds_layout = VK_NULL_HANDLE;
+
+ VkResult err = vkCreateDescriptorSetLayout(m_device->handle(), &ds_layout_ci, nullptr, &ds_layout);
+
+ VkDescriptorPoolSize pool_sizes[2] = {
+ {binding[0].descriptorType, binding[0].descriptorCount},
+ {binding[1].descriptorType, binding[1].descriptorCount},
+ };
+ auto dspci = lvl_init_struct<VkDescriptorPoolCreateInfo>();
+ dspci.flags = VK_DESCRIPTOR_POOL_CREATE_UPDATE_AFTER_BIND_BIT_EXT;
+ dspci.poolSizeCount = 2;
+ dspci.pPoolSizes = &pool_sizes[0];
+ dspci.maxSets = 1;
+ VkDescriptorPool pool;
+ err = vkCreateDescriptorPool(m_device->handle(), &dspci, nullptr, &pool);
+ ASSERT_VK_SUCCESS(err);
+
+ auto ds_alloc_info = lvl_init_struct<VkDescriptorSetAllocateInfo>();
+ ds_alloc_info.descriptorPool = pool;
+ ds_alloc_info.descriptorSetCount = 1;
+ ds_alloc_info.pSetLayouts = &ds_layout;
+
+ VkDescriptorSet ds = VK_NULL_HANDLE;
+ vkAllocateDescriptorSets(m_device->handle(), &ds_alloc_info, &ds);
+ ASSERT_VK_SUCCESS(err);
+
+ VkBufferCreateInfo buffCI = {};
+ buffCI.sType = VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO;
+ buffCI.size = 1024;
+ buffCI.usage = VK_BUFFER_USAGE_UNIFORM_BUFFER_BIT | VK_BUFFER_USAGE_STORAGE_BUFFER_BIT;
+
+ VkBuffer dyub;
+ err = vkCreateBuffer(m_device->device(), &buffCI, NULL, &dyub);
+ ASSERT_VK_SUCCESS(err);
+
+ VkDeviceMemory mem;
+ VkMemoryRequirements mem_reqs;
+ vkGetBufferMemoryRequirements(m_device->device(), dyub, &mem_reqs);
+
+ VkMemoryAllocateInfo mem_alloc_info = {};
+ mem_alloc_info.sType = VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO;
+ mem_alloc_info.allocationSize = mem_reqs.size;
+ m_device->phy().set_memory_type(mem_reqs.memoryTypeBits, &mem_alloc_info, VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT);
+ err = vkAllocateMemory(m_device->device(), &mem_alloc_info, NULL, &mem);
+ ASSERT_VK_SUCCESS(err);
+
+ err = vkBindBufferMemory(m_device->device(), dyub, mem, 0);
+ ASSERT_VK_SUCCESS(err);
+
+ VkDescriptorBufferInfo buffInfo[2] = {};
+ buffInfo[0].buffer = dyub;
+ buffInfo[0].offset = 0;
+ buffInfo[0].range = 1024;
+
+ VkWriteDescriptorSet descriptor_write[2] = {};
+ descriptor_write[0].sType = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET;
+ descriptor_write[0].dstSet = ds;
+ descriptor_write[0].dstBinding = 0;
+ descriptor_write[0].descriptorCount = 1;
+ descriptor_write[0].descriptorType = VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER;
+ descriptor_write[0].pBufferInfo = buffInfo;
+ descriptor_write[1] = descriptor_write[0];
+ descriptor_write[1].dstBinding = 1;
+ descriptor_write[1].descriptorType = VK_DESCRIPTOR_TYPE_STORAGE_BUFFER;
+
+ VkPipelineLayout pipeline_layout;
+ VkPipelineLayoutCreateInfo pipeline_layout_ci = {};
+ pipeline_layout_ci.sType = VK_STRUCTURE_TYPE_PIPELINE_LAYOUT_CREATE_INFO;
+ pipeline_layout_ci.setLayoutCount = 1;
+ pipeline_layout_ci.pSetLayouts = &ds_layout;
+
+ vkCreatePipelineLayout(m_device->device(), &pipeline_layout_ci, NULL, &pipeline_layout);
+
+ // Create a dummy pipeline, since VL inspects which bindings are actually used at draw time
+ char const *vsSource =
+ "#version 450\n"
+ "void main(){\n"
+ " gl_Position = vec4(0);\n"
+ "}\n";
+ char const *fsSource =
+ "#version 450\n"
+ "\n"
+ "layout(location=0) out vec4 color;\n"
+ "layout(set=0, binding=0) uniform foo0 { float x0; } bar0;\n"
+ "layout(set=0, binding=1) buffer foo1 { float x1; } bar1;\n"
+ "void main(){\n"
+ " color = vec4(bar0.x0 + bar1.x1);\n"
+ "}\n";
+
+ VkShaderObj vs(m_device, vsSource, VK_SHADER_STAGE_VERTEX_BIT, this);
+ VkShaderObj fs(m_device, fsSource, VK_SHADER_STAGE_FRAGMENT_BIT, this);
+
+ VkPipelineObj pipe(m_device);
+ pipe.SetViewport(m_viewports);
+ pipe.SetScissor(m_scissors);
+ pipe.AddDefaultColorAttachment();
+ pipe.AddShader(&vs);
+ pipe.AddShader(&fs);
+ pipe.CreateVKPipeline(pipeline_layout, m_renderPass);
+
+ // Make both bindings valid before binding to the command buffer
+ vkUpdateDescriptorSets(m_device->device(), 2, &descriptor_write[0], 0, NULL);
+ m_errorMonitor->VerifyNotFound();
+
+ // Two subtests. First only updates the update_after_bind binding and expects
+ // no error. Second updates the other binding and expects an error when the
+ // command buffer is ended.
+ for (uint32_t i = 0; i < 2; ++i) {
+ m_commandBuffer->begin();
+
+ vkCmdBindDescriptorSets(m_commandBuffer->handle(), VK_PIPELINE_BIND_POINT_GRAPHICS, pipeline_layout, 0, 1, &ds, 0, NULL);
+
+ m_commandBuffer->BeginRenderPass(m_renderPassBeginInfo);
+ vkCmdBindPipeline(m_commandBuffer->handle(), VK_PIPELINE_BIND_POINT_GRAPHICS, pipe.handle());
+ vkCmdDraw(m_commandBuffer->handle(), 0, 0, 0, 0);
+ vkCmdEndRenderPass(m_commandBuffer->handle());
+
+ m_errorMonitor->VerifyNotFound();
+ // Valid to update binding 1 after being bound
+ vkUpdateDescriptorSets(m_device->device(), 1, &descriptor_write[1], 0, NULL);
+ m_errorMonitor->VerifyNotFound();
+
+ if (i == 0) {
+ // expect no errors
+ m_commandBuffer->end();
+ m_errorMonitor->VerifyNotFound();
+ } else {
+ // Invalid to update binding 0 after being bound. But the error is actually
+ // generated during vkEndCommandBuffer
+ vkUpdateDescriptorSets(m_device->device(), 1, &descriptor_write[0], 0, NULL);
+ m_errorMonitor->VerifyNotFound();
+
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "is invalid because bound DescriptorSet");
+
+ vkEndCommandBuffer(m_commandBuffer->handle());
+ m_errorMonitor->VerifyFound();
}
}
+
+ vkDestroyDescriptorSetLayout(m_device->handle(), ds_layout, nullptr);
+ vkDestroyDescriptorPool(m_device->handle(), pool, nullptr);
+ vkDestroyBuffer(m_device->handle(), dyub, NULL);
+ vkFreeMemory(m_device->handle(), mem, NULL);
+ vkDestroyPipelineLayout(m_device->handle(), pipeline_layout, NULL);
}
-void ErrorMonitor::ExpectSuccess(VkDebugReportFlagsEXT const message_flag_mask) {
- // Match ANY message matching specified type
- SetDesiredFailureMsg(message_flag_mask, "");
- message_flags_ = message_flag_mask; // override mask handling in SetDesired...
+TEST_F(VkLayerTest, AllocatePushDescriptorSet) {
+ TEST_DESCRIPTION("Attempt to allocate a push descriptor set.");
+ if (InstanceExtensionSupported(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME)) {
+ m_instance_extension_names.push_back(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME);
+ } else {
+ printf("%s %s Extension not supported, skipping tests\n", kSkipPrefix,
+ VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME);
+ return;
+ }
+
+ ASSERT_NO_FATAL_FAILURE(InitFramework(myDbgFunc, m_errorMonitor));
+ if (DeviceExtensionSupported(gpu(), nullptr, VK_KHR_PUSH_DESCRIPTOR_EXTENSION_NAME)) {
+ m_device_extension_names.push_back(VK_KHR_PUSH_DESCRIPTOR_EXTENSION_NAME);
+ } else {
+ printf("%s %s Extension not supported, skipping tests\n", kSkipPrefix, VK_KHR_PUSH_DESCRIPTOR_EXTENSION_NAME);
+ return;
+ }
+ ASSERT_NO_FATAL_FAILURE(InitState());
+
+ auto push_descriptor_prop = GetPushDescriptorProperties(instance(), gpu());
+ if (push_descriptor_prop.maxPushDescriptors < 1) {
+ // Some implementations report an invalid maxPushDescriptors of 0
+ printf("%s maxPushDescriptors is zero, skipping tests\n", kSkipPrefix);
+ return;
+ }
+
+ VkDescriptorSetLayoutBinding binding = {0, VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER, 1, VK_SHADER_STAGE_FRAGMENT_BIT, nullptr};
+ auto ds_layout_ci = lvl_init_struct<VkDescriptorSetLayoutCreateInfo>();
+ ds_layout_ci.flags = VK_DESCRIPTOR_SET_LAYOUT_CREATE_PUSH_DESCRIPTOR_BIT_KHR;
+ ds_layout_ci.bindingCount = 1;
+ ds_layout_ci.pBindings = &binding;
+ VkDescriptorSetLayout ds_layout = VK_NULL_HANDLE;
+ VkResult err = vkCreateDescriptorSetLayout(m_device->handle(), &ds_layout_ci, nullptr, &ds_layout);
+ ASSERT_VK_SUCCESS(err);
+
+ VkDescriptorPoolSize pool_size = {binding.descriptorType, binding.descriptorCount};
+ auto dspci = lvl_init_struct<VkDescriptorPoolCreateInfo>();
+ dspci.poolSizeCount = 1;
+ dspci.pPoolSizes = &pool_size;
+ dspci.maxSets = 1;
+ VkDescriptorPool pool;
+ err = vkCreateDescriptorPool(m_device->handle(), &dspci, nullptr, &pool);
+ ASSERT_VK_SUCCESS(err);
+
+ auto ds_alloc_info = lvl_init_struct<VkDescriptorSetAllocateInfo>();
+ ds_alloc_info.descriptorPool = pool;
+ ds_alloc_info.descriptorSetCount = 1;
+ ds_alloc_info.pSetLayouts = &ds_layout;
+
+ VkDescriptorSet ds = VK_NULL_HANDLE;
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkDescriptorSetAllocateInfo-pSetLayouts-00308");
+ vkAllocateDescriptorSets(m_device->handle(), &ds_alloc_info, &ds);
+ m_errorMonitor->VerifyFound();
+
+ vkDestroyDescriptorPool(m_device->handle(), pool, nullptr);
+ vkDestroyDescriptorSetLayout(m_device->handle(), ds_layout, nullptr);
}
-void ErrorMonitor::VerifyFound() {
- // Not receiving expected message(s) is a failure. /Before/ throwing, dump any other messages
- if (!AllDesiredMsgsFound()) {
- DumpFailureMsgs();
- for (const auto desired_msg : desired_message_strings_) {
- ADD_FAILURE() << "Did not receive expected error '" << desired_msg << "'";
+TEST_F(VkLayerTest, PushDescriptorSetCmdPushBadArgs) {
+ TEST_DESCRIPTION("Attempt to push a push descriptor set with incorrect arguments.");
+ if (InstanceExtensionSupported(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME)) {
+ m_instance_extension_names.push_back(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME);
+ } else {
+ printf("%s %s Extension not supported, skipping tests\n", kSkipPrefix,
+ VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME);
+ return;
+ }
+
+ ASSERT_NO_FATAL_FAILURE(InitFramework(myDbgFunc, m_errorMonitor));
+ if (DeviceExtensionSupported(gpu(), nullptr, VK_KHR_PUSH_DESCRIPTOR_EXTENSION_NAME)) {
+ m_device_extension_names.push_back(VK_KHR_PUSH_DESCRIPTOR_EXTENSION_NAME);
+ } else {
+ printf("%s %s Extension not supported, skipping tests\n", kSkipPrefix, VK_KHR_PUSH_DESCRIPTOR_EXTENSION_NAME);
+ return;
+ }
+ ASSERT_NO_FATAL_FAILURE(InitState());
+
+ auto push_descriptor_prop = GetPushDescriptorProperties(instance(), gpu());
+ if (push_descriptor_prop.maxPushDescriptors < 1) {
+ // Some implementations report an invalid maxPushDescriptors of 0
+ printf("%s maxPushDescriptors is zero, skipping tests\n", kSkipPrefix);
+ return;
+ }
+
+ // Create ordinary and push descriptor set layout
+ VkDescriptorSetLayoutBinding binding = {0, VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER, 1, VK_SHADER_STAGE_FRAGMENT_BIT, nullptr};
+ const VkDescriptorSetLayoutObj ds_layout(m_device, {binding});
+ ASSERT_TRUE(ds_layout.initialized());
+ const VkDescriptorSetLayoutObj push_ds_layout(m_device, {binding}, VK_DESCRIPTOR_SET_LAYOUT_CREATE_PUSH_DESCRIPTOR_BIT_KHR);
+ ASSERT_TRUE(push_ds_layout.initialized());
+
+ // Now use the descriptor set layouts to create a pipeline layout
+ const VkPipelineLayoutObj pipeline_layout(m_device, {&push_ds_layout, &ds_layout});
+ ASSERT_TRUE(pipeline_layout.initialized());
+
+ // Create a descriptor to push
+ const uint32_t buffer_data[4] = {4, 5, 6, 7};
+ VkConstantBufferObj buffer_obj(m_device, sizeof(buffer_data), &buffer_data);
+ ASSERT_TRUE(buffer_obj.initialized());
+
+ // Create a "write" struct, noting that the buffer_info cannot be a temporary arg (the return from write_descriptor_set
+ // references its data), and the DescriptorSet() can be temporary, because the value is ignored
+ VkDescriptorBufferInfo buffer_info = {buffer_obj.handle(), 0, VK_WHOLE_SIZE};
+
+ VkWriteDescriptorSet descriptor_write = vk_testing::Device::write_descriptor_set(
+ vk_testing::DescriptorSet(), 0, 0, VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER, 1, &buffer_info);
+
+ // Find address of extension call and make the call
+ PFN_vkCmdPushDescriptorSetKHR vkCmdPushDescriptorSetKHR =
+ (PFN_vkCmdPushDescriptorSetKHR)vkGetDeviceProcAddr(m_device->device(), "vkCmdPushDescriptorSetKHR");
+ ASSERT_TRUE(vkCmdPushDescriptorSetKHR != nullptr);
+
+ // Section 1: Queue family matching/capabilities.
+ // Create command pool on a non-graphics queue
+ const uint32_t no_gfx_qfi = m_device->QueueFamilyMatching(VK_QUEUE_COMPUTE_BIT, VK_QUEUE_GRAPHICS_BIT);
+ const uint32_t transfer_only_qfi =
+ m_device->QueueFamilyMatching(VK_QUEUE_TRANSFER_BIT, (VK_QUEUE_COMPUTE_BIT | VK_QUEUE_GRAPHICS_BIT));
+ if ((UINT32_MAX == transfer_only_qfi) && (UINT32_MAX == no_gfx_qfi)) {
+ printf("%s No compute or transfer only queue family, skipping bindpoint and queue tests.", kSkipPrefix);
+ } else {
+ const uint32_t err_qfi = (UINT32_MAX == no_gfx_qfi) ? transfer_only_qfi : no_gfx_qfi;
+
+ VkCommandPoolObj command_pool(m_device, err_qfi);
+ ASSERT_TRUE(command_pool.initialized());
+ VkCommandBufferObj command_buffer(m_device, &command_pool);
+ ASSERT_TRUE(command_buffer.initialized());
+ command_buffer.begin();
+
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ "VUID-vkCmdPushDescriptorSetKHR-pipelineBindPoint-00363");
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkWriteDescriptorSet-descriptorType-00330");
+ if (err_qfi == transfer_only_qfi) {
+ // This as this queue neither supports the gfx or compute bindpoints, we'll get two errors
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ "VUID-vkCmdPushDescriptorSetKHR-commandBuffer-cmdpool");
}
- } else if (GetOtherFailureMsgs().size() > 0) {
- // Fail test case for any unexpected errors
-#if defined(ANDROID)
- // This will get unexpected errors into the adb log
- for (auto msg : other_messages_) {
- __android_log_print(ANDROID_LOG_INFO, "VulkanLayerValidationTests", "[ UNEXPECTED_ERR ] '%s'", msg.c_str());
+ vkCmdPushDescriptorSetKHR(command_buffer.handle(), VK_PIPELINE_BIND_POINT_GRAPHICS, pipeline_layout.handle(), 0, 1,
+ &descriptor_write);
+ m_errorMonitor->VerifyFound();
+ command_buffer.end();
+
+ // If we succeed in testing only one condition above, we need to test the other below.
+ if ((UINT32_MAX != transfer_only_qfi) && (err_qfi != transfer_only_qfi)) {
+ // Need to test the neither compute/gfx supported case separately.
+ VkCommandPoolObj tran_command_pool(m_device, transfer_only_qfi);
+ ASSERT_TRUE(tran_command_pool.initialized());
+ VkCommandBufferObj tran_command_buffer(m_device, &tran_command_pool);
+ ASSERT_TRUE(tran_command_buffer.initialized());
+ tran_command_buffer.begin();
+
+ // We can't avoid getting *both* errors in this case
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ "VUID-vkCmdPushDescriptorSetKHR-pipelineBindPoint-00363");
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkWriteDescriptorSet-descriptorType-00330");
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ "VUID-vkCmdPushDescriptorSetKHR-commandBuffer-cmdpool");
+ vkCmdPushDescriptorSetKHR(tran_command_buffer.handle(), VK_PIPELINE_BIND_POINT_GRAPHICS, pipeline_layout.handle(), 0, 1,
+ &descriptor_write);
+ m_errorMonitor->VerifyFound();
+ tran_command_buffer.end();
}
-#else
- ADD_FAILURE() << "Received unexpected error(s).";
-#endif
}
- Reset();
+
+ // Push to the non-push binding
+ m_commandBuffer->begin();
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdPushDescriptorSetKHR-set-00365");
+ vkCmdPushDescriptorSetKHR(m_commandBuffer->handle(), VK_PIPELINE_BIND_POINT_GRAPHICS, pipeline_layout.handle(), 1, 1,
+ &descriptor_write);
+ m_errorMonitor->VerifyFound();
+
+ // Specify set out of bounds
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdPushDescriptorSetKHR-set-00364");
+ vkCmdPushDescriptorSetKHR(m_commandBuffer->handle(), VK_PIPELINE_BIND_POINT_GRAPHICS, pipeline_layout.handle(), 2, 1,
+ &descriptor_write);
+ m_errorMonitor->VerifyFound();
+ m_commandBuffer->end();
+
+ // This is a test for VUID-vkCmdPushDescriptorSetKHR-commandBuffer-recording
+ // TODO: Add VALIDATION_ERROR_ code support to core_validation::ValidateCmd
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ "You must call vkBeginCommandBuffer() before this call to vkCmdPushDescriptorSetKHR()");
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkWriteDescriptorSet-descriptorType-00330");
+ vkCmdPushDescriptorSetKHR(m_commandBuffer->handle(), VK_PIPELINE_BIND_POINT_GRAPHICS, pipeline_layout.handle(), 0, 1,
+ &descriptor_write);
+ m_errorMonitor->VerifyFound();
}
-void ErrorMonitor::VerifyNotFound() {
- // ExpectSuccess() configured us to match anything. Any error is a failure.
- if (AnyDesiredMsgFound()) {
- DumpFailureMsgs();
- for (const auto msg : failure_message_strings_) {
- ADD_FAILURE() << "Expected to succeed but got error: " << msg;
- }
- } else if (GetOtherFailureMsgs().size() > 0) {
- // Fail test case for any unexpected errors
-#if defined(ANDROID)
- // This will get unexpected errors into the adb log
- for (auto msg : other_messages_) {
- __android_log_print(ANDROID_LOG_INFO, "VulkanLayerValidationTests", "[ UNEXPECTED_ERR ] '%s'", msg.c_str());
- }
-#else
- ADD_FAILURE() << "Received unexpected error(s).";
-#endif
+TEST_F(VkLayerTest, SetDynScissorParamTests) {
+ TEST_DESCRIPTION("Test parameters of vkCmdSetScissor without multiViewport feature");
+
+ VkPhysicalDeviceFeatures features{};
+ ASSERT_NO_FATAL_FAILURE(Init(&features));
+
+ const VkRect2D scissor = {{0, 0}, {16, 16}};
+ const VkRect2D scissors[] = {scissor, scissor};
+
+ m_commandBuffer->begin();
+
+ // array tests
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdSetScissor-firstScissor-00593");
+ vkCmdSetScissor(m_commandBuffer->handle(), 1, 1, scissors);
+ m_errorMonitor->VerifyFound();
+
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdSetScissor-scissorCount-arraylength");
+ vkCmdSetScissor(m_commandBuffer->handle(), 0, 0, nullptr);
+ m_errorMonitor->VerifyFound();
+
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdSetScissor-scissorCount-00594");
+ vkCmdSetScissor(m_commandBuffer->handle(), 0, 2, scissors);
+ m_errorMonitor->VerifyFound();
+
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdSetScissor-firstScissor-00593");
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdSetScissor-scissorCount-arraylength");
+ vkCmdSetScissor(m_commandBuffer->handle(), 1, 0, scissors);
+ m_errorMonitor->VerifyFound();
+
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdSetScissor-firstScissor-00593");
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdSetScissor-scissorCount-00594");
+ vkCmdSetScissor(m_commandBuffer->handle(), 1, 2, scissors);
+ m_errorMonitor->VerifyFound();
+
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdSetScissor-pScissors-parameter");
+ vkCmdSetScissor(m_commandBuffer->handle(), 0, 1, nullptr);
+ m_errorMonitor->VerifyFound();
+
+ struct TestCase {
+ VkRect2D scissor;
+ std::string vuid;
+ };
+
+ std::vector<TestCase> test_cases = {{{{-1, 0}, {16, 16}}, "VUID-vkCmdSetScissor-x-00595"},
+ {{{0, -1}, {16, 16}}, "VUID-vkCmdSetScissor-x-00595"},
+ {{{1, 0}, {INT32_MAX, 16}}, "VUID-vkCmdSetScissor-offset-00596"},
+ {{{INT32_MAX, 0}, {1, 16}}, "VUID-vkCmdSetScissor-offset-00596"},
+ {{{0, 0}, {uint32_t{INT32_MAX} + 1, 16}}, "VUID-vkCmdSetScissor-offset-00596"},
+ {{{0, 1}, {16, INT32_MAX}}, "VUID-vkCmdSetScissor-offset-00597"},
+ {{{0, INT32_MAX}, {16, 1}}, "VUID-vkCmdSetScissor-offset-00597"},
+ {{{0, 0}, {16, uint32_t{INT32_MAX} + 1}}, "VUID-vkCmdSetScissor-offset-00597"}};
+
+ for (const auto &test_case : test_cases) {
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, test_case.vuid);
+ vkCmdSetScissor(m_commandBuffer->handle(), 0, 1, &test_case.scissor);
+ m_errorMonitor->VerifyFound();
}
- Reset();
+
+ m_commandBuffer->end();
}
-bool ErrorMonitor::IgnoreMessage(std::string const &msg) const {
- if (ignore_message_strings_.empty()) {
- return false;
+TEST_F(VkLayerTest, SetDynScissorParamMultiviewportTests) {
+ TEST_DESCRIPTION("Test parameters of vkCmdSetScissor with multiViewport feature enabled");
+
+ ASSERT_NO_FATAL_FAILURE(Init());
+
+ if (!m_device->phy().features().multiViewport) {
+ printf("%s VkPhysicalDeviceFeatures::multiViewport is not supported -- skipping test.\n", kSkipPrefix);
+ return;
}
- return std::find_if(ignore_message_strings_.begin(), ignore_message_strings_.end(), [&msg](std::string const &str) {
- return msg.find(str) != std::string::npos;
- }) != ignore_message_strings_.end();
+ const auto max_scissors = m_device->props.limits.maxViewports;
+ const uint32_t too_many_scissors = 65536 + 1; // let's say this is too much to allocate pScissors for
+
+ m_commandBuffer->begin();
+
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdSetScissor-scissorCount-arraylength");
+ vkCmdSetScissor(m_commandBuffer->handle(), 0, 0, nullptr);
+ m_errorMonitor->VerifyFound();
+
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdSetScissor-pScissors-parameter");
+ vkCmdSetScissor(m_commandBuffer->handle(), 0, max_scissors, nullptr);
+ m_errorMonitor->VerifyFound();
+
+ if (max_scissors >= too_many_scissors) {
+ printf(
+ "%s VkPhysicalDeviceLimits::maxViewports is too large to practically test against -- skipping "
+ "part of "
+ "test.\n",
+ kSkipPrefix);
+ return;
+ }
+
+ const VkRect2D scissor = {{0, 0}, {16, 16}};
+ const std::vector<VkRect2D> scissors(max_scissors + 1, scissor);
+
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdSetScissor-firstScissor-00592");
+ vkCmdSetScissor(m_commandBuffer->handle(), 0, max_scissors + 1, scissors.data());
+ m_errorMonitor->VerifyFound();
+
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdSetScissor-firstScissor-00592");
+ vkCmdSetScissor(m_commandBuffer->handle(), max_scissors, 1, scissors.data());
+ m_errorMonitor->VerifyFound();
+
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdSetScissor-firstScissor-00592");
+ vkCmdSetScissor(m_commandBuffer->handle(), 1, max_scissors, scissors.data());
+ m_errorMonitor->VerifyFound();
+
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdSetScissor-scissorCount-arraylength");
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdSetScissor-firstScissor-00592");
+ vkCmdSetScissor(m_commandBuffer->handle(), max_scissors + 1, 0, scissors.data());
+ m_errorMonitor->VerifyFound();
}
-void VkLayerTest::VKTriangleTest(BsoFailSelect failCase) {
- ASSERT_TRUE(m_device && m_device->initialized()); // VKTriangleTest assumes Init() has finished
+// This is a positive test. No failures are expected.
+TEST_F(VkPositiveLayerTest, EmptyDescriptorUpdateTest) {
+ TEST_DESCRIPTION("Update last descriptor in a set that includes an empty binding");
+ VkResult err;
- ASSERT_NO_FATAL_FAILURE(InitViewport());
+ ASSERT_NO_FATAL_FAILURE(Init());
+ m_errorMonitor->ExpectSuccess();
+
+ // Create layout with two uniform buffer descriptors w/ empty binding between them
+ OneOffDescriptorSet ds(m_device, {
+ {0, VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER, 1, VK_SHADER_STAGE_ALL, nullptr},
+ {1, VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER, 0 /*!*/, 0, nullptr},
+ {2, VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER, 1, VK_SHADER_STAGE_ALL, nullptr},
+ });
+
+ // Create a buffer to be used for update
+ VkBufferCreateInfo buff_ci = {};
+ buff_ci.sType = VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO;
+ buff_ci.usage = VK_BUFFER_USAGE_UNIFORM_BUFFER_BIT;
+ buff_ci.size = 256;
+ buff_ci.sharingMode = VK_SHARING_MODE_EXCLUSIVE;
+ VkBuffer buffer;
+ err = vkCreateBuffer(m_device->device(), &buff_ci, NULL, &buffer);
+ ASSERT_VK_SUCCESS(err);
+ // Have to bind memory to buffer before descriptor update
+ VkMemoryAllocateInfo mem_alloc = {};
+ mem_alloc.sType = VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO;
+ mem_alloc.pNext = NULL;
+ mem_alloc.allocationSize = 512; // one allocation for both buffers
+ mem_alloc.memoryTypeIndex = 0;
+
+ VkMemoryRequirements mem_reqs;
+ vkGetBufferMemoryRequirements(m_device->device(), buffer, &mem_reqs);
+ bool pass = m_device->phy().set_memory_type(mem_reqs.memoryTypeBits, &mem_alloc, 0);
+ if (!pass) {
+ printf("%s Failed to allocate memory.\n", kSkipPrefix);
+ vkDestroyBuffer(m_device->device(), buffer, NULL);
+ return;
+ }
+ // Make sure allocation is sufficiently large to accommodate buffer requirements
+ if (mem_reqs.size > mem_alloc.allocationSize) {
+ mem_alloc.allocationSize = mem_reqs.size;
+ }
- VkShaderObj vs(m_device, bindStateVertShaderText, VK_SHADER_STAGE_VERTEX_BIT, this);
- VkShaderObj ps(m_device, bindStateFragShaderText, VK_SHADER_STAGE_FRAGMENT_BIT, this);
+ VkDeviceMemory mem;
+ err = vkAllocateMemory(m_device->device(), &mem_alloc, NULL, &mem);
+ ASSERT_VK_SUCCESS(err);
+ err = vkBindBufferMemory(m_device->device(), buffer, mem, 0);
+ ASSERT_VK_SUCCESS(err);
- VkPipelineObj pipelineobj(m_device);
- pipelineobj.AddDefaultColorAttachment();
- pipelineobj.AddShader(&vs);
- pipelineobj.AddShader(&ps);
+ // Only update the descriptor at binding 2
+ VkDescriptorBufferInfo buff_info = {};
+ buff_info.buffer = buffer;
+ buff_info.offset = 0;
+ buff_info.range = VK_WHOLE_SIZE;
+ VkWriteDescriptorSet descriptor_write = {};
+ descriptor_write.sType = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET;
+ descriptor_write.dstBinding = 2;
+ descriptor_write.descriptorCount = 1;
+ descriptor_write.pTexelBufferView = nullptr;
+ descriptor_write.pBufferInfo = &buff_info;
+ descriptor_write.pImageInfo = nullptr;
+ descriptor_write.descriptorType = VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER;
+ descriptor_write.dstSet = ds.set_;
- bool failcase_needs_depth = false; // to mark cases that need depth attachment
+ vkUpdateDescriptorSets(m_device->device(), 1, &descriptor_write, 0, NULL);
- VkBufferObj index_buffer;
+ m_errorMonitor->VerifyNotFound();
+ // Cleanup
+ vkFreeMemory(m_device->device(), mem, NULL);
+ vkDestroyBuffer(m_device->device(), buffer, NULL);
+}
- switch (failCase) {
- case BsoFailLineWidth: {
- pipelineobj.MakeDynamic(VK_DYNAMIC_STATE_LINE_WIDTH);
- VkPipelineInputAssemblyStateCreateInfo ia_state = {};
- ia_state.sType = VK_STRUCTURE_TYPE_PIPELINE_INPUT_ASSEMBLY_STATE_CREATE_INFO;
- ia_state.topology = VK_PRIMITIVE_TOPOLOGY_LINE_LIST;
- pipelineobj.SetInputAssembly(&ia_state);
- break;
- }
- case BsoFailLineStipple: {
- pipelineobj.MakeDynamic(VK_DYNAMIC_STATE_LINE_STIPPLE_EXT);
- VkPipelineInputAssemblyStateCreateInfo ia_state = {};
- ia_state.sType = VK_STRUCTURE_TYPE_PIPELINE_INPUT_ASSEMBLY_STATE_CREATE_INFO;
- ia_state.topology = VK_PRIMITIVE_TOPOLOGY_LINE_LIST;
- pipelineobj.SetInputAssembly(&ia_state);
+TEST_F(VkLayerTest, MultiplePushDescriptorSets) {
+ TEST_DESCRIPTION("Verify an error message for multiple push descriptor sets.");
- VkPipelineRasterizationLineStateCreateInfoEXT line_state = {};
- line_state.sType = VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_LINE_STATE_CREATE_INFO_EXT;
- line_state.lineRasterizationMode = VK_LINE_RASTERIZATION_MODE_BRESENHAM_EXT;
- line_state.stippledLineEnable = VK_TRUE;
- line_state.lineStippleFactor = 0;
- line_state.lineStipplePattern = 0;
- pipelineobj.SetLineState(&line_state);
- break;
- }
- case BsoFailDepthBias: {
- pipelineobj.MakeDynamic(VK_DYNAMIC_STATE_DEPTH_BIAS);
- VkPipelineRasterizationStateCreateInfo rs_state = {};
- rs_state.sType = VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_STATE_CREATE_INFO;
- rs_state.depthBiasEnable = VK_TRUE;
- rs_state.lineWidth = 1.0f;
- pipelineobj.SetRasterization(&rs_state);
- break;
- }
- case BsoFailViewport: {
- pipelineobj.MakeDynamic(VK_DYNAMIC_STATE_VIEWPORT);
- break;
- }
- case BsoFailScissor: {
- pipelineobj.MakeDynamic(VK_DYNAMIC_STATE_SCISSOR);
- break;
- }
- case BsoFailBlend: {
- pipelineobj.MakeDynamic(VK_DYNAMIC_STATE_BLEND_CONSTANTS);
- VkPipelineColorBlendAttachmentState att_state = {};
- att_state.dstAlphaBlendFactor = VK_BLEND_FACTOR_CONSTANT_COLOR;
- att_state.blendEnable = VK_TRUE;
- pipelineobj.AddColorAttachment(0, att_state);
- break;
- }
- case BsoFailDepthBounds: {
- failcase_needs_depth = true;
- pipelineobj.MakeDynamic(VK_DYNAMIC_STATE_DEPTH_BOUNDS);
- break;
- }
- case BsoFailStencilReadMask: {
- failcase_needs_depth = true;
- pipelineobj.MakeDynamic(VK_DYNAMIC_STATE_STENCIL_COMPARE_MASK);
- break;
- }
- case BsoFailStencilWriteMask: {
- failcase_needs_depth = true;
- pipelineobj.MakeDynamic(VK_DYNAMIC_STATE_STENCIL_WRITE_MASK);
- break;
- }
- case BsoFailStencilReference: {
- failcase_needs_depth = true;
- pipelineobj.MakeDynamic(VK_DYNAMIC_STATE_STENCIL_REFERENCE);
- break;
- }
+ if (InstanceExtensionSupported(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME)) {
+ m_instance_extension_names.push_back(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME);
+ } else {
+ printf("%s Did not find VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME; skipped.\n", kSkipPrefix);
+ return;
+ }
+ ASSERT_NO_FATAL_FAILURE(InitFramework(myDbgFunc, m_errorMonitor));
+ if (DeviceExtensionSupported(gpu(), nullptr, VK_KHR_PUSH_DESCRIPTOR_EXTENSION_NAME)) {
+ m_device_extension_names.push_back(VK_KHR_PUSH_DESCRIPTOR_EXTENSION_NAME);
+ } else {
+ printf("%s Push Descriptors Extension not supported, skipping tests\n", kSkipPrefix);
+ return;
+ }
+ ASSERT_NO_FATAL_FAILURE(InitState());
- case BsoFailIndexBuffer:
- break;
- case BsoFailIndexBufferBadSize:
- case BsoFailIndexBufferBadOffset:
- case BsoFailIndexBufferBadMapSize:
- case BsoFailIndexBufferBadMapOffset: {
- // Create an index buffer for these tests.
- // There is no need to populate it because we should bail before trying to draw.
- uint32_t const indices[] = {0};
- VkBufferCreateInfo buffer_info = {};
- buffer_info.sType = VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO;
- buffer_info.size = 1024;
- buffer_info.usage = VK_BUFFER_USAGE_INDEX_BUFFER_BIT;
- buffer_info.queueFamilyIndexCount = 1;
- buffer_info.pQueueFamilyIndices = indices;
- index_buffer.init(*m_device, buffer_info, (VkMemoryPropertyFlags)VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT);
- } break;
- case BsoFailCmdClearAttachments:
- break;
- case BsoFailNone:
- break;
- default:
- break;
+ auto push_descriptor_prop = GetPushDescriptorProperties(instance(), gpu());
+ if (push_descriptor_prop.maxPushDescriptors < 1) {
+ // Some implementations report an invalid maxPushDescriptors of 0
+ printf("%s maxPushDescriptors is zero, skipping tests\n", kSkipPrefix);
+ return;
}
- VkDescriptorSetObj descriptorSet(m_device);
+ VkDescriptorSetLayoutBinding dsl_binding = {};
+ dsl_binding.binding = 0;
+ dsl_binding.descriptorType = VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER;
+ dsl_binding.descriptorCount = 1;
+ dsl_binding.stageFlags = VK_SHADER_STAGE_FRAGMENT_BIT;
+ dsl_binding.pImmutableSamplers = NULL;
+
+ const unsigned int descriptor_set_layout_count = 2;
+ std::vector<VkDescriptorSetLayoutObj> ds_layouts;
+ for (uint32_t i = 0; i < descriptor_set_layout_count; ++i) {
+ dsl_binding.binding = i;
+ ds_layouts.emplace_back(m_device, std::vector<VkDescriptorSetLayoutBinding>(1, dsl_binding),
+ VK_DESCRIPTOR_SET_LAYOUT_CREATE_PUSH_DESCRIPTOR_BIT_KHR);
+ }
+ const auto &ds_vk_layouts = MakeVkHandles<VkDescriptorSetLayout>(ds_layouts);
+
+ VkPipelineLayout pipeline_layout;
+ VkPipelineLayoutCreateInfo pipeline_layout_ci = {};
+ pipeline_layout_ci.sType = VK_STRUCTURE_TYPE_PIPELINE_LAYOUT_CREATE_INFO;
+ pipeline_layout_ci.pNext = NULL;
+ pipeline_layout_ci.pushConstantRangeCount = 0;
+ pipeline_layout_ci.pPushConstantRanges = NULL;
+ pipeline_layout_ci.setLayoutCount = ds_vk_layouts.size();
+ pipeline_layout_ci.pSetLayouts = ds_vk_layouts.data();
+
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkPipelineLayoutCreateInfo-pSetLayouts-00293");
+ vkCreatePipelineLayout(m_device->device(), &pipeline_layout_ci, NULL, &pipeline_layout);
+ m_errorMonitor->VerifyFound();
+}
- VkImageView *depth_attachment = nullptr;
- if (failcase_needs_depth) {
- m_depth_stencil_fmt = FindSupportedDepthStencilFormat(gpu());
- ASSERT_TRUE(m_depth_stencil_fmt != VK_FORMAT_UNDEFINED);
+TEST_F(VkLayerTest, CreateDescriptorUpdateTemplate) {
+ TEST_DESCRIPTION("Verify error messages for invalid vkCreateDescriptorUpdateTemplate calls.");
- m_depthStencil->Init(m_device, static_cast<uint32_t>(m_width), static_cast<uint32_t>(m_height), m_depth_stencil_fmt,
- VK_IMAGE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT | VK_IMAGE_USAGE_TRANSFER_DST_BIT);
- depth_attachment = m_depthStencil->BindInfo();
+ if (InstanceExtensionSupported(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME)) {
+ m_instance_extension_names.push_back(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME);
+ } else {
+ printf("%s Did not find VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME; skipped.\n", kSkipPrefix);
+ return;
+ }
+ ASSERT_NO_FATAL_FAILURE(InitFramework(myDbgFunc, m_errorMonitor));
+ // Note: Includes workaround for some implementations which incorrectly return 0 maxPushDescriptors
+ if (DeviceExtensionSupported(gpu(), nullptr, VK_KHR_PUSH_DESCRIPTOR_EXTENSION_NAME) &&
+ DeviceExtensionSupported(gpu(), nullptr, VK_KHR_DESCRIPTOR_UPDATE_TEMPLATE_EXTENSION_NAME) &&
+ (GetPushDescriptorProperties(instance(), gpu()).maxPushDescriptors > 0)) {
+ m_device_extension_names.push_back(VK_KHR_PUSH_DESCRIPTOR_EXTENSION_NAME);
+ m_device_extension_names.push_back(VK_KHR_DESCRIPTOR_UPDATE_TEMPLATE_EXTENSION_NAME);
+ } else {
+ printf("%s Push Descriptors and Descriptor Update Template Extensions not supported, skipping tests\n", kSkipPrefix);
+ return;
}
+ ASSERT_NO_FATAL_FAILURE(InitState());
+
+ VkDescriptorSetLayoutBinding dsl_binding = {};
+ dsl_binding.binding = 0;
+ dsl_binding.descriptorType = VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER;
+ dsl_binding.descriptorCount = 1;
+ dsl_binding.stageFlags = VK_SHADER_STAGE_ALL;
+ dsl_binding.pImmutableSamplers = NULL;
+
+ const VkDescriptorSetLayoutObj ds_layout_ub(m_device, {dsl_binding});
+ const VkDescriptorSetLayoutObj ds_layout_ub1(m_device, {dsl_binding});
+ const VkDescriptorSetLayoutObj ds_layout_ub_push(m_device, {dsl_binding},
+ VK_DESCRIPTOR_SET_LAYOUT_CREATE_PUSH_DESCRIPTOR_BIT_KHR);
+ const VkPipelineLayoutObj pipeline_layout(m_device, {{&ds_layout_ub, &ds_layout_ub1, &ds_layout_ub_push}});
+ PFN_vkCreateDescriptorUpdateTemplateKHR vkCreateDescriptorUpdateTemplateKHR =
+ (PFN_vkCreateDescriptorUpdateTemplateKHR)vkGetDeviceProcAddr(m_device->device(), "vkCreateDescriptorUpdateTemplateKHR");
+ ASSERT_NE(vkCreateDescriptorUpdateTemplateKHR, nullptr);
+ PFN_vkDestroyDescriptorUpdateTemplateKHR vkDestroyDescriptorUpdateTemplateKHR =
+ (PFN_vkDestroyDescriptorUpdateTemplateKHR)vkGetDeviceProcAddr(m_device->device(), "vkDestroyDescriptorUpdateTemplateKHR");
+ ASSERT_NE(vkDestroyDescriptorUpdateTemplateKHR, nullptr);
+
+ VkDescriptorUpdateTemplateEntry entries = {0, 0, 1, VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER, 0, sizeof(VkBuffer)};
+ VkDescriptorUpdateTemplateCreateInfo create_info = {};
+ create_info.sType = VK_STRUCTURE_TYPE_DESCRIPTOR_UPDATE_TEMPLATE_CREATE_INFO;
+ create_info.pNext = nullptr;
+ create_info.flags = 0;
+ create_info.descriptorUpdateEntryCount = 1;
+ create_info.pDescriptorUpdateEntries = &entries;
+
+ auto do_test = [&](std::string err) {
+ VkDescriptorUpdateTemplateKHR dut = VK_NULL_HANDLE;
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, err);
+ if (VK_SUCCESS == vkCreateDescriptorUpdateTemplateKHR(m_device->handle(), &create_info, nullptr, &dut)) {
+ vkDestroyDescriptorUpdateTemplateKHR(m_device->handle(), dut, nullptr);
+ }
+ m_errorMonitor->VerifyFound();
+ };
- ASSERT_NO_FATAL_FAILURE(InitRenderTarget(1, depth_attachment));
+ // Descriptor set type template
+ create_info.templateType = VK_DESCRIPTOR_UPDATE_TEMPLATE_TYPE_DESCRIPTOR_SET;
+ // descriptorSetLayout is NULL
+ do_test("VUID-VkDescriptorUpdateTemplateCreateInfo-templateType-00350");
+
+ // Push descriptor type template
+ create_info.templateType = VK_DESCRIPTOR_UPDATE_TEMPLATE_TYPE_PUSH_DESCRIPTORS_KHR;
+ create_info.pipelineBindPoint = VK_PIPELINE_BIND_POINT_COMPUTE;
+ create_info.pipelineLayout = pipeline_layout.handle();
+ create_info.set = 2;
+
+ // Bad bindpoint -- force fuzz the bind point
+ memset(&create_info.pipelineBindPoint, 0xFE, sizeof(create_info.pipelineBindPoint));
+ do_test("VUID-VkDescriptorUpdateTemplateCreateInfo-templateType-00351");
+ create_info.pipelineBindPoint = VK_PIPELINE_BIND_POINT_COMPUTE;
+
+ // Bad pipeline layout
+ create_info.pipelineLayout = VK_NULL_HANDLE;
+ do_test("VUID-VkDescriptorUpdateTemplateCreateInfo-templateType-00352");
+ create_info.pipelineLayout = pipeline_layout.handle();
+
+ // Wrong set #
+ create_info.set = 0;
+ do_test("VUID-VkDescriptorUpdateTemplateCreateInfo-templateType-00353");
+
+ // Invalid set #
+ create_info.set = 42;
+ do_test("VUID-VkDescriptorUpdateTemplateCreateInfo-templateType-00353");
+}
+
+// This is a positive test. No failures are expected.
+TEST_F(VkPositiveLayerTest, PushDescriptorNullDstSetTest) {
+ TEST_DESCRIPTION("Use null dstSet in CmdPushDescriptorSetKHR");
+
+ if (InstanceExtensionSupported(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME)) {
+ m_instance_extension_names.push_back(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME);
+ } else {
+ printf("%s Did not find VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME; skipped.\n", kSkipPrefix);
+ return;
+ }
+ ASSERT_NO_FATAL_FAILURE(InitFramework(myDbgFunc, m_errorMonitor));
+ if (DeviceExtensionSupported(gpu(), nullptr, VK_KHR_PUSH_DESCRIPTOR_EXTENSION_NAME)) {
+ m_device_extension_names.push_back(VK_KHR_PUSH_DESCRIPTOR_EXTENSION_NAME);
+ } else {
+ printf("%s Push Descriptors Extension not supported, skipping tests\n", kSkipPrefix);
+ return;
+ }
+ ASSERT_NO_FATAL_FAILURE(InitState());
+ m_errorMonitor->ExpectSuccess();
+
+ auto push_descriptor_prop = GetPushDescriptorProperties(instance(), gpu());
+ if (push_descriptor_prop.maxPushDescriptors < 1) {
+ // Some implementations report an invalid maxPushDescriptors of 0
+ printf("%s maxPushDescriptors is zero, skipping tests\n", kSkipPrefix);
+ return;
+ }
+
+ VkDescriptorSetLayoutBinding dsl_binding = {};
+ dsl_binding.binding = 2;
+ dsl_binding.descriptorType = VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER;
+ dsl_binding.descriptorCount = 1;
+ dsl_binding.stageFlags = VK_SHADER_STAGE_FRAGMENT_BIT;
+ dsl_binding.pImmutableSamplers = NULL;
+
+ const VkDescriptorSetLayoutObj ds_layout(m_device, {dsl_binding}, VK_DESCRIPTOR_SET_LAYOUT_CREATE_PUSH_DESCRIPTOR_BIT_KHR);
+
+ // Now use the descriptor layout to create a pipeline layout
+ const VkPipelineLayoutObj pipeline_layout(m_device, {&ds_layout});
+
+ static const float vbo_data[3] = {1.f, 0.f, 1.f};
+ VkConstantBufferObj vbo(m_device, sizeof(vbo_data), (const void *)&vbo_data, VK_BUFFER_USAGE_UNIFORM_BUFFER_BIT);
+
+ VkDescriptorBufferInfo buff_info;
+ buff_info.buffer = vbo.handle();
+ buff_info.offset = 0;
+ buff_info.range = sizeof(vbo_data);
+ VkWriteDescriptorSet descriptor_write = {};
+ descriptor_write.sType = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET;
+ descriptor_write.dstBinding = 2;
+ descriptor_write.descriptorCount = 1;
+ descriptor_write.pTexelBufferView = nullptr;
+ descriptor_write.pBufferInfo = &buff_info;
+ descriptor_write.pImageInfo = nullptr;
+ descriptor_write.descriptorType = VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER;
+ descriptor_write.dstSet = 0; // Should not cause a validation error
+
+ // Find address of extension call and make the call
+ PFN_vkCmdPushDescriptorSetKHR vkCmdPushDescriptorSetKHR =
+ (PFN_vkCmdPushDescriptorSetKHR)vkGetDeviceProcAddr(m_device->device(), "vkCmdPushDescriptorSetKHR");
+ assert(vkCmdPushDescriptorSetKHR != nullptr);
m_commandBuffer->begin();
+ vkCmdPushDescriptorSetKHR(m_commandBuffer->handle(), VK_PIPELINE_BIND_POINT_GRAPHICS, pipeline_layout.handle(), 0, 1,
+ &descriptor_write);
- GenericDrawPreparation(m_commandBuffer, pipelineobj, descriptorSet, failCase);
+ m_errorMonitor->VerifyNotFound();
+}
+
+// This is a positive test. No failures are expected.
+TEST_F(VkPositiveLayerTest, PushDescriptorUnboundSetTest) {
+ TEST_DESCRIPTION("Ensure that no validation errors are produced for not bound push descriptor sets");
+ VkResult err;
+ if (InstanceExtensionSupported(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME)) {
+ m_instance_extension_names.push_back(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME);
+ } else {
+ printf("%s Did not find VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME; skipped.\n", kSkipPrefix);
+ return;
+ }
+ ASSERT_NO_FATAL_FAILURE(InitFramework(myDbgFunc, m_errorMonitor));
+ if (DeviceExtensionSupported(gpu(), nullptr, VK_KHR_PUSH_DESCRIPTOR_EXTENSION_NAME)) {
+ m_device_extension_names.push_back(VK_KHR_PUSH_DESCRIPTOR_EXTENSION_NAME);
+ } else {
+ printf("%s Push Descriptors Extension not supported, skipping tests\n", kSkipPrefix);
+ return;
+ }
+ ASSERT_NO_FATAL_FAILURE(InitState());
+
+ auto push_descriptor_prop = GetPushDescriptorProperties(instance(), gpu());
+ if (push_descriptor_prop.maxPushDescriptors < 1) {
+ // Some implementations report an invalid maxPushDescriptors of 0
+ printf("%s maxPushDescriptors is zero, skipping tests\n", kSkipPrefix);
+ return;
+ }
+
+ ASSERT_NO_FATAL_FAILURE(InitViewport());
+ ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
+ m_errorMonitor->ExpectSuccess();
+
+ VkDescriptorPoolSize ds_type_count = {};
+ ds_type_count.type = VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER;
+ ds_type_count.descriptorCount = 1;
+
+ VkDescriptorPoolCreateInfo ds_pool_ci = {};
+ ds_pool_ci.sType = VK_STRUCTURE_TYPE_DESCRIPTOR_POOL_CREATE_INFO;
+ ds_pool_ci.pNext = NULL;
+ ds_pool_ci.maxSets = 1;
+ ds_pool_ci.poolSizeCount = 1;
+ ds_pool_ci.flags = VK_DESCRIPTOR_POOL_CREATE_FREE_DESCRIPTOR_SET_BIT;
+ ds_pool_ci.pPoolSizes = &ds_type_count;
+
+ VkDescriptorPool ds_pool;
+ err = vkCreateDescriptorPool(m_device->device(), &ds_pool_ci, NULL, &ds_pool);
+ ASSERT_VK_SUCCESS(err);
+
+ // Create descriptor set layout
+ VkDescriptorSetLayoutBinding dsl_binding = {};
+ dsl_binding.binding = 2;
+ dsl_binding.descriptorType = VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER;
+ dsl_binding.descriptorCount = 1;
+ dsl_binding.stageFlags = VK_SHADER_STAGE_FRAGMENT_BIT;
+ dsl_binding.pImmutableSamplers = NULL;
+
+ const VkDescriptorSetLayoutObj ds_layout(m_device, {dsl_binding});
+
+ // Create push descriptor set layout
+ const VkDescriptorSetLayoutObj push_ds_layout(m_device, {dsl_binding}, VK_DESCRIPTOR_SET_LAYOUT_CREATE_PUSH_DESCRIPTOR_BIT_KHR);
+
+ // Allocate descriptor set
+ VkDescriptorSetAllocateInfo alloc_info = {};
+ alloc_info.sType = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_ALLOCATE_INFO;
+ alloc_info.pNext = NULL;
+ alloc_info.descriptorPool = ds_pool;
+ alloc_info.descriptorSetCount = 1;
+ alloc_info.pSetLayouts = &ds_layout.handle();
+ VkDescriptorSet descriptor_set;
+ err = vkAllocateDescriptorSets(m_device->device(), &alloc_info, &descriptor_set);
+ ASSERT_VK_SUCCESS(err);
+
+ // Now use the descriptor layouts to create a pipeline layout
+ const VkPipelineLayoutObj pipeline_layout(m_device, {&push_ds_layout, &ds_layout});
+ // Create PSO
+ char const *vsSource =
+ "#version 450\n"
+ "\n"
+ "void main(){\n"
+ " gl_Position = vec4(1);\n"
+ "}\n";
+ char const *fsSource =
+ "#version 450\n"
+ "\n"
+ "layout(location=0) out vec4 x;\n"
+ "layout(set=0) layout(binding=2) uniform foo1 { float x; } bar1;\n"
+ "layout(set=1) layout(binding=2) uniform foo2 { float y; } bar2;\n"
+ "void main(){\n"
+ " x = vec4(bar1.x) + vec4(bar2.y);\n"
+ "}\n";
+ VkShaderObj vs(m_device, vsSource, VK_SHADER_STAGE_VERTEX_BIT, this);
+ VkShaderObj fs(m_device, fsSource, VK_SHADER_STAGE_FRAGMENT_BIT, this);
+ VkPipelineObj pipe(m_device);
+ pipe.SetViewport(m_viewports);
+ pipe.SetScissor(m_scissors);
+ pipe.AddShader(&vs);
+ pipe.AddShader(&fs);
+ pipe.AddDefaultColorAttachment();
+ pipe.CreateVKPipeline(pipeline_layout.handle(), renderPass());
+
+ static const float bo_data[1] = {1.f};
+ VkConstantBufferObj buffer(m_device, sizeof(bo_data), (const void *)&bo_data, VK_BUFFER_USAGE_UNIFORM_BUFFER_BIT);
+
+ // Update descriptor set
+ VkDescriptorBufferInfo buff_info;
+ buff_info.buffer = buffer.handle();
+ buff_info.offset = 0;
+ buff_info.range = sizeof(bo_data);
+ VkWriteDescriptorSet descriptor_write = {};
+ descriptor_write.sType = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET;
+ descriptor_write.dstBinding = 2;
+ descriptor_write.descriptorCount = 1;
+ descriptor_write.pTexelBufferView = nullptr;
+ descriptor_write.pBufferInfo = &buff_info;
+ descriptor_write.pImageInfo = nullptr;
+ descriptor_write.descriptorType = VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER;
+ descriptor_write.dstSet = descriptor_set;
+ vkUpdateDescriptorSets(m_device->device(), 1, &descriptor_write, 0, NULL);
+
+ PFN_vkCmdPushDescriptorSetKHR vkCmdPushDescriptorSetKHR =
+ (PFN_vkCmdPushDescriptorSetKHR)vkGetDeviceProcAddr(m_device->device(), "vkCmdPushDescriptorSetKHR");
+ assert(vkCmdPushDescriptorSetKHR != nullptr);
+
+ m_commandBuffer->begin();
m_commandBuffer->BeginRenderPass(m_renderPassBeginInfo);
+ vkCmdBindPipeline(m_commandBuffer->handle(), VK_PIPELINE_BIND_POINT_GRAPHICS, pipe.handle());
- // render triangle
- if (failCase == BsoFailIndexBuffer) {
- // Use DrawIndexed w/o an index buffer bound
- m_commandBuffer->DrawIndexed(3, 1, 0, 0, 0);
- } else if (failCase == BsoFailIndexBufferBadSize) {
- // Bind the index buffer and draw one too many indices
- m_commandBuffer->BindIndexBuffer(&index_buffer, 0, VK_INDEX_TYPE_UINT16);
- m_commandBuffer->DrawIndexed(513, 1, 0, 0, 0);
- } else if (failCase == BsoFailIndexBufferBadOffset) {
- // Bind the index buffer and draw one past the end of the buffer using the offset
- m_commandBuffer->BindIndexBuffer(&index_buffer, 0, VK_INDEX_TYPE_UINT16);
- m_commandBuffer->DrawIndexed(512, 1, 1, 0, 0);
- } else if (failCase == BsoFailIndexBufferBadMapSize) {
- // Bind the index buffer at the middle point and draw one too many indices
- m_commandBuffer->BindIndexBuffer(&index_buffer, 512, VK_INDEX_TYPE_UINT16);
- m_commandBuffer->DrawIndexed(257, 1, 0, 0, 0);
- } else if (failCase == BsoFailIndexBufferBadMapOffset) {
- // Bind the index buffer at the middle point and draw one past the end of the buffer
- m_commandBuffer->BindIndexBuffer(&index_buffer, 512, VK_INDEX_TYPE_UINT16);
- m_commandBuffer->DrawIndexed(256, 1, 1, 0, 0);
- } else {
- m_commandBuffer->Draw(3, 1, 0, 0);
+ // Push descriptors and bind descriptor set
+ vkCmdPushDescriptorSetKHR(m_commandBuffer->handle(), VK_PIPELINE_BIND_POINT_GRAPHICS, pipeline_layout.handle(), 0, 1,
+ &descriptor_write);
+ vkCmdBindDescriptorSets(m_commandBuffer->handle(), VK_PIPELINE_BIND_POINT_GRAPHICS, pipeline_layout.handle(), 1, 1,
+ &descriptor_set, 0, NULL);
+
+ // No errors should be generated.
+ vkCmdDraw(m_commandBuffer->handle(), 3, 1, 0, 0);
+
+ m_errorMonitor->VerifyNotFound();
+
+ m_commandBuffer->EndRenderPass();
+ m_commandBuffer->end();
+
+ vkDestroyDescriptorPool(m_device->device(), ds_pool, NULL);
+}
+
+// This is a positive test. No failures are expected.
+TEST_F(VkPositiveLayerTest, TestAliasedMemoryTracking) {
+ VkResult err;
+ bool pass;
+
+ TEST_DESCRIPTION(
+ "Create a buffer, allocate memory, bind memory, destroy the buffer, create an image, and bind the same memory to it");
+
+ m_errorMonitor->ExpectSuccess();
+
+ ASSERT_NO_FATAL_FAILURE(Init());
+
+ VkBuffer buffer;
+ VkImage image;
+ VkDeviceMemory mem;
+ VkMemoryRequirements mem_reqs;
+
+ VkBufferCreateInfo buf_info = {};
+ buf_info.sType = VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO;
+ buf_info.pNext = NULL;
+ buf_info.usage = VK_BUFFER_USAGE_UNIFORM_BUFFER_BIT;
+ buf_info.size = 256;
+ buf_info.queueFamilyIndexCount = 0;
+ buf_info.pQueueFamilyIndices = NULL;
+ buf_info.sharingMode = VK_SHARING_MODE_EXCLUSIVE;
+ buf_info.flags = 0;
+ err = vkCreateBuffer(m_device->device(), &buf_info, NULL, &buffer);
+ ASSERT_VK_SUCCESS(err);
+
+ vkGetBufferMemoryRequirements(m_device->device(), buffer, &mem_reqs);
+
+ VkMemoryAllocateInfo alloc_info = {};
+ alloc_info.sType = VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO;
+ alloc_info.pNext = NULL;
+ alloc_info.memoryTypeIndex = 0;
+
+ // Ensure memory is big enough for both bindings
+ alloc_info.allocationSize = 0x10000;
+
+ pass = m_device->phy().set_memory_type(mem_reqs.memoryTypeBits, &alloc_info, VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT);
+ if (!pass) {
+ printf("%s Failed to allocate memory.\n", kSkipPrefix);
+ vkDestroyBuffer(m_device->device(), buffer, NULL);
+ return;
}
- if (failCase == BsoFailCmdClearAttachments) {
- VkClearAttachment color_attachment = {};
- color_attachment.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
- color_attachment.colorAttachment = 2000000000; // Someone who knew what they were doing would use 0 for the index;
- VkClearRect clear_rect = {{{0, 0}, {static_cast<uint32_t>(m_width), static_cast<uint32_t>(m_height)}}, 0, 1};
+ err = vkAllocateMemory(m_device->device(), &alloc_info, NULL, &mem);
+ ASSERT_VK_SUCCESS(err);
- vkCmdClearAttachments(m_commandBuffer->handle(), 1, &color_attachment, 1, &clear_rect);
+ uint8_t *pData;
+ err = vkMapMemory(m_device->device(), mem, 0, mem_reqs.size, 0, (void **)&pData);
+ ASSERT_VK_SUCCESS(err);
+
+ memset(pData, 0xCADECADE, static_cast<size_t>(mem_reqs.size));
+
+ vkUnmapMemory(m_device->device(), mem);
+
+ err = vkBindBufferMemory(m_device->device(), buffer, mem, 0);
+ ASSERT_VK_SUCCESS(err);
+
+ // NOW, destroy the buffer. Obviously, the resource no longer occupies this
+ // memory. In fact, it was never used by the GPU.
+ // Just be sure, wait for idle.
+ vkDestroyBuffer(m_device->device(), buffer, NULL);
+ vkDeviceWaitIdle(m_device->device());
+
+ // Use optimal as some platforms report linear support but then fail image creation
+ VkImageTiling image_tiling = VK_IMAGE_TILING_OPTIMAL;
+ VkImageFormatProperties image_format_properties;
+ vkGetPhysicalDeviceImageFormatProperties(gpu(), VK_FORMAT_R8G8B8A8_UNORM, VK_IMAGE_TYPE_2D, image_tiling,
+ VK_IMAGE_USAGE_TRANSFER_SRC_BIT, 0, &image_format_properties);
+ if (image_format_properties.maxExtent.width == 0) {
+ printf("%s Image format not supported; skipped.\n", kSkipPrefix);
+ vkFreeMemory(m_device->device(), mem, NULL);
+ return;
}
+ VkImageCreateInfo image_create_info = {};
+ image_create_info.sType = VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO;
+ image_create_info.pNext = NULL;
+ image_create_info.imageType = VK_IMAGE_TYPE_2D;
+ image_create_info.format = VK_FORMAT_R8G8B8A8_UNORM;
+ image_create_info.extent.width = 64;
+ image_create_info.extent.height = 64;
+ image_create_info.extent.depth = 1;
+ image_create_info.mipLevels = 1;
+ image_create_info.arrayLayers = 1;
+ image_create_info.samples = VK_SAMPLE_COUNT_1_BIT;
+ image_create_info.tiling = image_tiling;
+ image_create_info.initialLayout = VK_IMAGE_LAYOUT_PREINITIALIZED;
+ image_create_info.usage = VK_IMAGE_USAGE_TRANSFER_SRC_BIT;
+ image_create_info.queueFamilyIndexCount = 0;
+ image_create_info.pQueueFamilyIndices = NULL;
+ image_create_info.sharingMode = VK_SHARING_MODE_EXCLUSIVE;
+ image_create_info.flags = 0;
+
+ /* Create a mappable image. It will be the texture if linear images are OK
+ * to be textures or it will be the staging image if they are not.
+ */
+ err = vkCreateImage(m_device->device(), &image_create_info, NULL, &image);
+ ASSERT_VK_SUCCESS(err);
+
+ vkGetImageMemoryRequirements(m_device->device(), image, &mem_reqs);
+
+ VkMemoryAllocateInfo mem_alloc = {};
+ mem_alloc.sType = VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO;
+ mem_alloc.pNext = NULL;
+ mem_alloc.allocationSize = 0;
+ mem_alloc.memoryTypeIndex = 0;
+ mem_alloc.allocationSize = mem_reqs.size;
+
+ pass = m_device->phy().set_memory_type(mem_reqs.memoryTypeBits, &mem_alloc, VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT);
+ if (!pass) {
+ printf("%s Failed to allocate memory.\n", kSkipPrefix);
+ vkFreeMemory(m_device->device(), mem, NULL);
+ vkDestroyImage(m_device->device(), image, NULL);
+ return;
+ }
+
+ // VALIDATION FAILURE:
+ err = vkBindImageMemory(m_device->device(), image, mem, 0);
+ ASSERT_VK_SUCCESS(err);
+
+ m_errorMonitor->VerifyNotFound();
+
+ vkFreeMemory(m_device->device(), mem, NULL);
+ vkDestroyImage(m_device->device(), image, NULL);
+}
+
+// This is a positive test. No failures are expected.
+TEST_F(VkPositiveLayerTest, TestDestroyFreeNullHandles) {
+ VkResult err;
+
+ TEST_DESCRIPTION("Call all applicable destroy and free routines with NULL handles, expecting no validation errors");
+
+ m_errorMonitor->ExpectSuccess();
+
+ ASSERT_NO_FATAL_FAILURE(Init());
+ vkDestroyBuffer(m_device->device(), VK_NULL_HANDLE, NULL);
+ vkDestroyBufferView(m_device->device(), VK_NULL_HANDLE, NULL);
+ vkDestroyCommandPool(m_device->device(), VK_NULL_HANDLE, NULL);
+ vkDestroyDescriptorPool(m_device->device(), VK_NULL_HANDLE, NULL);
+ vkDestroyDescriptorSetLayout(m_device->device(), VK_NULL_HANDLE, NULL);
+ vkDestroyDevice(VK_NULL_HANDLE, NULL);
+ vkDestroyEvent(m_device->device(), VK_NULL_HANDLE, NULL);
+ vkDestroyFence(m_device->device(), VK_NULL_HANDLE, NULL);
+ vkDestroyFramebuffer(m_device->device(), VK_NULL_HANDLE, NULL);
+ vkDestroyImage(m_device->device(), VK_NULL_HANDLE, NULL);
+ vkDestroyImageView(m_device->device(), VK_NULL_HANDLE, NULL);
+ vkDestroyInstance(VK_NULL_HANDLE, NULL);
+ vkDestroyPipeline(m_device->device(), VK_NULL_HANDLE, NULL);
+ vkDestroyPipelineCache(m_device->device(), VK_NULL_HANDLE, NULL);
+ vkDestroyPipelineLayout(m_device->device(), VK_NULL_HANDLE, NULL);
+ vkDestroyQueryPool(m_device->device(), VK_NULL_HANDLE, NULL);
+ vkDestroyRenderPass(m_device->device(), VK_NULL_HANDLE, NULL);
+ vkDestroySampler(m_device->device(), VK_NULL_HANDLE, NULL);
+ vkDestroySemaphore(m_device->device(), VK_NULL_HANDLE, NULL);
+ vkDestroyShaderModule(m_device->device(), VK_NULL_HANDLE, NULL);
+
+ VkCommandPool command_pool;
+ VkCommandPoolCreateInfo pool_create_info{};
+ pool_create_info.sType = VK_STRUCTURE_TYPE_COMMAND_POOL_CREATE_INFO;
+ pool_create_info.queueFamilyIndex = m_device->graphics_queue_node_index_;
+ pool_create_info.flags = VK_COMMAND_POOL_CREATE_RESET_COMMAND_BUFFER_BIT;
+ vkCreateCommandPool(m_device->device(), &pool_create_info, nullptr, &command_pool);
+ VkCommandBuffer command_buffers[3] = {};
+ VkCommandBufferAllocateInfo command_buffer_allocate_info{};
+ command_buffer_allocate_info.sType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_ALLOCATE_INFO;
+ command_buffer_allocate_info.commandPool = command_pool;
+ command_buffer_allocate_info.commandBufferCount = 1;
+ command_buffer_allocate_info.level = VK_COMMAND_BUFFER_LEVEL_PRIMARY;
+ vkAllocateCommandBuffers(m_device->device(), &command_buffer_allocate_info, &command_buffers[1]);
+ vkFreeCommandBuffers(m_device->device(), command_pool, 3, command_buffers);
+ vkDestroyCommandPool(m_device->device(), command_pool, NULL);
+
+ VkDescriptorPoolSize ds_type_count = {};
+ ds_type_count.type = VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC;
+ ds_type_count.descriptorCount = 1;
+
+ VkDescriptorPoolCreateInfo ds_pool_ci = {};
+ ds_pool_ci.sType = VK_STRUCTURE_TYPE_DESCRIPTOR_POOL_CREATE_INFO;
+ ds_pool_ci.pNext = NULL;
+ ds_pool_ci.maxSets = 1;
+ ds_pool_ci.poolSizeCount = 1;
+ ds_pool_ci.flags = VK_DESCRIPTOR_POOL_CREATE_FREE_DESCRIPTOR_SET_BIT;
+ ds_pool_ci.pPoolSizes = &ds_type_count;
+
+ VkDescriptorPool ds_pool;
+ err = vkCreateDescriptorPool(m_device->device(), &ds_pool_ci, NULL, &ds_pool);
+ ASSERT_VK_SUCCESS(err);
+
+ VkDescriptorSetLayoutBinding dsl_binding = {};
+ dsl_binding.binding = 2;
+ dsl_binding.descriptorType = VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC;
+ dsl_binding.descriptorCount = 1;
+ dsl_binding.stageFlags = VK_SHADER_STAGE_FRAGMENT_BIT;
+ dsl_binding.pImmutableSamplers = NULL;
+
+ const VkDescriptorSetLayoutObj ds_layout(m_device, {dsl_binding});
+
+ VkDescriptorSet descriptor_sets[3] = {};
+ VkDescriptorSetAllocateInfo alloc_info = {};
+ alloc_info.sType = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_ALLOCATE_INFO;
+ alloc_info.descriptorSetCount = 1;
+ alloc_info.descriptorPool = ds_pool;
+ alloc_info.pSetLayouts = &ds_layout.handle();
+ err = vkAllocateDescriptorSets(m_device->device(), &alloc_info, &descriptor_sets[1]);
+ ASSERT_VK_SUCCESS(err);
+ vkFreeDescriptorSets(m_device->device(), ds_pool, 3, descriptor_sets);
+ vkDestroyDescriptorPool(m_device->device(), ds_pool, NULL);
+
+ vkFreeMemory(m_device->device(), VK_NULL_HANDLE, NULL);
+
+ m_errorMonitor->VerifyNotFound();
+}
+
+TEST_F(VkPositiveLayerTest, QueueSubmitSemaphoresAndLayoutTracking) {
+ TEST_DESCRIPTION("Submit multiple command buffers with chained semaphore signals and layout transitions");
+
+ m_errorMonitor->ExpectSuccess();
+
+ ASSERT_NO_FATAL_FAILURE(Init());
+ VkCommandBuffer cmd_bufs[4];
+ VkCommandBufferAllocateInfo alloc_info;
+ alloc_info.sType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_ALLOCATE_INFO;
+ alloc_info.pNext = NULL;
+ alloc_info.commandBufferCount = 4;
+ alloc_info.commandPool = m_commandPool->handle();
+ alloc_info.level = VK_COMMAND_BUFFER_LEVEL_PRIMARY;
+ vkAllocateCommandBuffers(m_device->device(), &alloc_info, cmd_bufs);
+ VkImageObj image(m_device);
+ image.Init(128, 128, 1, VK_FORMAT_B8G8R8A8_UNORM,
+ (VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT | VK_IMAGE_USAGE_TRANSFER_SRC_BIT | VK_IMAGE_USAGE_TRANSFER_DST_BIT),
+ VK_IMAGE_TILING_OPTIMAL, 0);
+ ASSERT_TRUE(image.initialized());
+ VkCommandBufferBeginInfo cb_binfo;
+ cb_binfo.pNext = NULL;
+ cb_binfo.sType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_BEGIN_INFO;
+ cb_binfo.pInheritanceInfo = VK_NULL_HANDLE;
+ cb_binfo.flags = 0;
+ // Use 4 command buffers, each with an image layout transition, ColorAO->General->ColorAO->TransferSrc->TransferDst
+ vkBeginCommandBuffer(cmd_bufs[0], &cb_binfo);
+ VkImageMemoryBarrier img_barrier = {};
+ img_barrier.sType = VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER;
+ img_barrier.pNext = NULL;
+ img_barrier.srcAccessMask = VK_ACCESS_HOST_WRITE_BIT;
+ img_barrier.dstAccessMask = VK_ACCESS_HOST_WRITE_BIT;
+ img_barrier.oldLayout = VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL;
+ img_barrier.newLayout = VK_IMAGE_LAYOUT_GENERAL;
+ img_barrier.image = image.handle();
+ img_barrier.srcQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED;
+ img_barrier.dstQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED;
+ img_barrier.subresourceRange.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
+ img_barrier.subresourceRange.baseArrayLayer = 0;
+ img_barrier.subresourceRange.baseMipLevel = 0;
+ img_barrier.subresourceRange.layerCount = 1;
+ img_barrier.subresourceRange.levelCount = 1;
+ vkCmdPipelineBarrier(cmd_bufs[0], VK_PIPELINE_STAGE_HOST_BIT, VK_PIPELINE_STAGE_HOST_BIT, 0, 0, nullptr, 0, nullptr, 1,
+ &img_barrier);
+ vkEndCommandBuffer(cmd_bufs[0]);
+ vkBeginCommandBuffer(cmd_bufs[1], &cb_binfo);
+ img_barrier.oldLayout = VK_IMAGE_LAYOUT_GENERAL;
+ img_barrier.newLayout = VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL;
+ vkCmdPipelineBarrier(cmd_bufs[1], VK_PIPELINE_STAGE_HOST_BIT, VK_PIPELINE_STAGE_HOST_BIT, 0, 0, nullptr, 0, nullptr, 1,
+ &img_barrier);
+ vkEndCommandBuffer(cmd_bufs[1]);
+ vkBeginCommandBuffer(cmd_bufs[2], &cb_binfo);
+ img_barrier.oldLayout = VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL;
+ img_barrier.newLayout = VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL;
+ vkCmdPipelineBarrier(cmd_bufs[2], VK_PIPELINE_STAGE_HOST_BIT, VK_PIPELINE_STAGE_HOST_BIT, 0, 0, nullptr, 0, nullptr, 1,
+ &img_barrier);
+ vkEndCommandBuffer(cmd_bufs[2]);
+ vkBeginCommandBuffer(cmd_bufs[3], &cb_binfo);
+ img_barrier.oldLayout = VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL;
+ img_barrier.newLayout = VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL;
+ vkCmdPipelineBarrier(cmd_bufs[3], VK_PIPELINE_STAGE_HOST_BIT, VK_PIPELINE_STAGE_HOST_BIT, 0, 0, nullptr, 0, nullptr, 1,
+ &img_barrier);
+ vkEndCommandBuffer(cmd_bufs[3]);
+
+ // Submit 4 command buffers in 3 submits, with submits 2 and 3 waiting for semaphores from submits 1 and 2
+ VkSemaphore semaphore1, semaphore2;
+ VkSemaphoreCreateInfo semaphore_create_info{};
+ semaphore_create_info.sType = VK_STRUCTURE_TYPE_SEMAPHORE_CREATE_INFO;
+ vkCreateSemaphore(m_device->device(), &semaphore_create_info, nullptr, &semaphore1);
+ vkCreateSemaphore(m_device->device(), &semaphore_create_info, nullptr, &semaphore2);
+ VkPipelineStageFlags flags[]{VK_PIPELINE_STAGE_ALL_COMMANDS_BIT};
+ VkSubmitInfo submit_info[3];
+ submit_info[0].sType = VK_STRUCTURE_TYPE_SUBMIT_INFO;
+ submit_info[0].pNext = nullptr;
+ submit_info[0].commandBufferCount = 1;
+ submit_info[0].pCommandBuffers = &cmd_bufs[0];
+ submit_info[0].signalSemaphoreCount = 1;
+ submit_info[0].pSignalSemaphores = &semaphore1;
+ submit_info[0].waitSemaphoreCount = 0;
+ submit_info[0].pWaitDstStageMask = nullptr;
+ submit_info[0].pWaitDstStageMask = flags;
+ submit_info[1].sType = VK_STRUCTURE_TYPE_SUBMIT_INFO;
+ submit_info[1].pNext = nullptr;
+ submit_info[1].commandBufferCount = 1;
+ submit_info[1].pCommandBuffers = &cmd_bufs[1];
+ submit_info[1].waitSemaphoreCount = 1;
+ submit_info[1].pWaitSemaphores = &semaphore1;
+ submit_info[1].signalSemaphoreCount = 1;
+ submit_info[1].pSignalSemaphores = &semaphore2;
+ submit_info[1].pWaitDstStageMask = flags;
+ submit_info[2].sType = VK_STRUCTURE_TYPE_SUBMIT_INFO;
+ submit_info[2].pNext = nullptr;
+ submit_info[2].commandBufferCount = 2;
+ submit_info[2].pCommandBuffers = &cmd_bufs[2];
+ submit_info[2].waitSemaphoreCount = 1;
+ submit_info[2].pWaitSemaphores = &semaphore2;
+ submit_info[2].signalSemaphoreCount = 0;
+ submit_info[2].pSignalSemaphores = nullptr;
+ submit_info[2].pWaitDstStageMask = flags;
+ vkQueueSubmit(m_device->m_queue, 3, submit_info, VK_NULL_HANDLE);
+ vkQueueWaitIdle(m_device->m_queue);
+
+ vkDestroySemaphore(m_device->device(), semaphore1, NULL);
+ vkDestroySemaphore(m_device->device(), semaphore2, NULL);
+ m_errorMonitor->VerifyNotFound();
+}
+
+TEST_F(VkPositiveLayerTest, DynamicOffsetWithInactiveBinding) {
+ // Create a descriptorSet w/ dynamic descriptors where 1 binding is inactive
+ // We previously had a bug where dynamic offset of inactive bindings was still being used
+ VkResult err;
+ m_errorMonitor->ExpectSuccess();
+
+ ASSERT_NO_FATAL_FAILURE(Init());
+ ASSERT_NO_FATAL_FAILURE(InitViewport());
+ ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
+
+ OneOffDescriptorSet ds(m_device, {
+ {2, VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC, 1, VK_SHADER_STAGE_FRAGMENT_BIT, nullptr},
+ {0, VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC, 1, VK_SHADER_STAGE_FRAGMENT_BIT, nullptr},
+ {1, VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC, 1, VK_SHADER_STAGE_FRAGMENT_BIT, nullptr},
+ });
+
+ const VkPipelineLayoutObj pipeline_layout(m_device, {&ds.layout_});
+
+ // Create two buffers to update the descriptors with
+ // The first will be 2k and used for bindings 0 & 1, the second is 1k for binding 2
+ uint32_t qfi = 0;
+ VkBufferCreateInfo buffCI = {};
+ buffCI.sType = VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO;
+ buffCI.size = 2048;
+ buffCI.usage = VK_BUFFER_USAGE_UNIFORM_BUFFER_BIT;
+ buffCI.queueFamilyIndexCount = 1;
+ buffCI.pQueueFamilyIndices = &qfi;
+
+ VkBuffer dyub1;
+ err = vkCreateBuffer(m_device->device(), &buffCI, NULL, &dyub1);
+ ASSERT_VK_SUCCESS(err);
+ // buffer2
+ buffCI.size = 1024;
+ VkBuffer dyub2;
+ err = vkCreateBuffer(m_device->device(), &buffCI, NULL, &dyub2);
+ ASSERT_VK_SUCCESS(err);
+ // Allocate memory and bind to buffers
+ VkMemoryAllocateInfo mem_alloc[2] = {};
+ mem_alloc[0].sType = VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO;
+ mem_alloc[0].pNext = NULL;
+ mem_alloc[0].memoryTypeIndex = 0;
+ mem_alloc[1].sType = VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO;
+ mem_alloc[1].pNext = NULL;
+ mem_alloc[1].memoryTypeIndex = 0;
+
+ VkMemoryRequirements mem_reqs1;
+ vkGetBufferMemoryRequirements(m_device->device(), dyub1, &mem_reqs1);
+ VkMemoryRequirements mem_reqs2;
+ vkGetBufferMemoryRequirements(m_device->device(), dyub2, &mem_reqs2);
+ mem_alloc[0].allocationSize = mem_reqs1.size;
+ bool pass = m_device->phy().set_memory_type(mem_reqs1.memoryTypeBits, &mem_alloc[0], 0);
+ mem_alloc[1].allocationSize = mem_reqs2.size;
+ pass &= m_device->phy().set_memory_type(mem_reqs2.memoryTypeBits, &mem_alloc[1], 0);
+ if (!pass) {
+ printf("%s Failed to allocate memory.\n", kSkipPrefix);
+ vkDestroyBuffer(m_device->device(), dyub1, NULL);
+ vkDestroyBuffer(m_device->device(), dyub2, NULL);
+ return;
+ }
+
+ VkDeviceMemory mem1;
+ err = vkAllocateMemory(m_device->device(), &mem_alloc[0], NULL, &mem1);
+ ASSERT_VK_SUCCESS(err);
+ err = vkBindBufferMemory(m_device->device(), dyub1, mem1, 0);
+ ASSERT_VK_SUCCESS(err);
+ VkDeviceMemory mem2;
+ err = vkAllocateMemory(m_device->device(), &mem_alloc[1], NULL, &mem2);
+ ASSERT_VK_SUCCESS(err);
+ err = vkBindBufferMemory(m_device->device(), dyub2, mem2, 0);
+ ASSERT_VK_SUCCESS(err);
+ // Update descriptors
+ const uint32_t BINDING_COUNT = 3;
+ VkDescriptorBufferInfo buff_info[BINDING_COUNT] = {};
+ buff_info[0].buffer = dyub1;
+ buff_info[0].offset = 0;
+ buff_info[0].range = 256;
+ buff_info[1].buffer = dyub1;
+ buff_info[1].offset = 256;
+ buff_info[1].range = 512;
+ buff_info[2].buffer = dyub2;
+ buff_info[2].offset = 0;
+ buff_info[2].range = 512;
+
+ VkWriteDescriptorSet descriptor_write;
+ memset(&descriptor_write, 0, sizeof(descriptor_write));
+ descriptor_write.sType = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET;
+ descriptor_write.dstSet = ds.set_;
+ descriptor_write.dstBinding = 0;
+ descriptor_write.descriptorCount = BINDING_COUNT;
+ descriptor_write.descriptorType = VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC;
+ descriptor_write.pBufferInfo = buff_info;
+
+ vkUpdateDescriptorSets(m_device->device(), 1, &descriptor_write, 0, NULL);
+
+ m_commandBuffer->begin();
+ m_commandBuffer->BeginRenderPass(m_renderPassBeginInfo);
+
+ // Create PSO to be used for draw-time errors below
+ char const *vsSource =
+ "#version 450\n"
+ "\n"
+ "void main(){\n"
+ " gl_Position = vec4(1);\n"
+ "}\n";
+ char const *fsSource =
+ "#version 450\n"
+ "\n"
+ "layout(location=0) out vec4 x;\n"
+ "layout(set=0) layout(binding=0) uniform foo1 { int x; int y; } bar1;\n"
+ "layout(set=0) layout(binding=2) uniform foo2 { int x; int y; } bar2;\n"
+ "void main(){\n"
+ " x = vec4(bar1.y) + vec4(bar2.y);\n"
+ "}\n";
+ VkShaderObj vs(m_device, vsSource, VK_SHADER_STAGE_VERTEX_BIT, this);
+ VkShaderObj fs(m_device, fsSource, VK_SHADER_STAGE_FRAGMENT_BIT, this);
+ VkPipelineObj pipe(m_device);
+ pipe.SetViewport(m_viewports);
+ pipe.SetScissor(m_scissors);
+ pipe.AddShader(&vs);
+ pipe.AddShader(&fs);
+ pipe.AddDefaultColorAttachment();
+ pipe.CreateVKPipeline(pipeline_layout.handle(), renderPass());
+
+ vkCmdBindPipeline(m_commandBuffer->handle(), VK_PIPELINE_BIND_POINT_GRAPHICS, pipe.handle());
+ // This update should succeed, but offset of inactive binding 1 oversteps binding 2 buffer size
+ // we used to have a bug in this case.
+ uint32_t dyn_off[BINDING_COUNT] = {0, 1024, 256};
+ vkCmdBindDescriptorSets(m_commandBuffer->handle(), VK_PIPELINE_BIND_POINT_GRAPHICS, pipeline_layout.handle(), 0, 1, &ds.set_,
+ BINDING_COUNT, dyn_off);
+ m_commandBuffer->Draw(1, 0, 0, 0);
+ m_errorMonitor->VerifyNotFound();
- // finalize recording of the command buffer
m_commandBuffer->EndRenderPass();
m_commandBuffer->end();
- m_commandBuffer->QueueCommandBuffer(true);
- DestroyRenderTarget();
+
+ vkDestroyBuffer(m_device->device(), dyub1, NULL);
+ vkDestroyBuffer(m_device->device(), dyub2, NULL);
+ vkFreeMemory(m_device->device(), mem1, NULL);
+ vkFreeMemory(m_device->device(), mem2, NULL);
}
-void VkLayerTest::GenericDrawPreparation(VkCommandBufferObj *commandBuffer, VkPipelineObj &pipelineobj,
- VkDescriptorSetObj &descriptorSet, BsoFailSelect failCase) {
- commandBuffer->ClearAllBuffers(m_renderTargets, m_clear_color, m_depthStencil, m_depth_clear_color, m_stencil_clear_color);
+TEST_F(VkPositiveLayerTest, NonCoherentMemoryMapping) {
+ TEST_DESCRIPTION(
+ "Ensure that validations handling of non-coherent memory mapping while using VK_WHOLE_SIZE does not cause access "
+ "violations");
+ VkResult err;
+ uint8_t *pData;
+ ASSERT_NO_FATAL_FAILURE(Init());
+
+ VkDeviceMemory mem;
+ VkMemoryRequirements mem_reqs;
+ mem_reqs.memoryTypeBits = 0xFFFFFFFF;
+ const VkDeviceSize atom_size = m_device->props.limits.nonCoherentAtomSize;
+ VkMemoryAllocateInfo alloc_info = {};
+ alloc_info.sType = VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO;
+ alloc_info.pNext = NULL;
+ alloc_info.memoryTypeIndex = 0;
+
+ static const VkDeviceSize allocation_size = 32 * atom_size;
+ alloc_info.allocationSize = allocation_size;
+
+ // Find a memory configurations WITHOUT a COHERENT bit, otherwise exit
+ bool pass = m_device->phy().set_memory_type(mem_reqs.memoryTypeBits, &alloc_info, VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT,
+ VK_MEMORY_PROPERTY_HOST_COHERENT_BIT);
+ if (!pass) {
+ pass = m_device->phy().set_memory_type(mem_reqs.memoryTypeBits, &alloc_info,
+ VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT | VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT,
+ VK_MEMORY_PROPERTY_HOST_COHERENT_BIT);
+ if (!pass) {
+ pass = m_device->phy().set_memory_type(
+ mem_reqs.memoryTypeBits, &alloc_info,
+ VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT | VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT | VK_MEMORY_PROPERTY_HOST_CACHED_BIT,
+ VK_MEMORY_PROPERTY_HOST_COHERENT_BIT);
+ if (!pass) {
+ printf("%s Couldn't find a memory type wihtout a COHERENT bit.\n", kSkipPrefix);
+ return;
+ }
+ }
+ }
- commandBuffer->PrepareAttachments(m_renderTargets, m_depthStencil);
- // Make sure depthWriteEnable is set so that Depth fail test will work
- // correctly
- // Make sure stencilTestEnable is set so that Stencil fail test will work
- // correctly
- VkStencilOpState stencil = {};
- stencil.failOp = VK_STENCIL_OP_KEEP;
- stencil.passOp = VK_STENCIL_OP_KEEP;
- stencil.depthFailOp = VK_STENCIL_OP_KEEP;
- stencil.compareOp = VK_COMPARE_OP_NEVER;
+ err = vkAllocateMemory(m_device->device(), &alloc_info, NULL, &mem);
+ ASSERT_VK_SUCCESS(err);
- VkPipelineDepthStencilStateCreateInfo ds_ci = {};
- ds_ci.sType = VK_STRUCTURE_TYPE_PIPELINE_DEPTH_STENCIL_STATE_CREATE_INFO;
- ds_ci.pNext = NULL;
- ds_ci.depthTestEnable = VK_FALSE;
- ds_ci.depthWriteEnable = VK_TRUE;
- ds_ci.depthCompareOp = VK_COMPARE_OP_NEVER;
- ds_ci.depthBoundsTestEnable = VK_FALSE;
- if (failCase == BsoFailDepthBounds) {
- ds_ci.depthBoundsTestEnable = VK_TRUE;
- ds_ci.maxDepthBounds = 0.0f;
- ds_ci.minDepthBounds = 0.0f;
- }
- ds_ci.stencilTestEnable = VK_TRUE;
- ds_ci.front = stencil;
- ds_ci.back = stencil;
+ // Map/Flush/Invalidate using WHOLE_SIZE and zero offsets and entire mapped range
+ m_errorMonitor->ExpectSuccess();
+ err = vkMapMemory(m_device->device(), mem, 0, VK_WHOLE_SIZE, 0, (void **)&pData);
+ ASSERT_VK_SUCCESS(err);
+ VkMappedMemoryRange mmr = {};
+ mmr.sType = VK_STRUCTURE_TYPE_MAPPED_MEMORY_RANGE;
+ mmr.memory = mem;
+ mmr.offset = 0;
+ mmr.size = VK_WHOLE_SIZE;
+ err = vkFlushMappedMemoryRanges(m_device->device(), 1, &mmr);
+ ASSERT_VK_SUCCESS(err);
+ err = vkInvalidateMappedMemoryRanges(m_device->device(), 1, &mmr);
+ ASSERT_VK_SUCCESS(err);
+ m_errorMonitor->VerifyNotFound();
+ vkUnmapMemory(m_device->device(), mem);
- pipelineobj.SetDepthStencil(&ds_ci);
- pipelineobj.SetViewport(m_viewports);
- pipelineobj.SetScissor(m_scissors);
- descriptorSet.CreateVKDescriptorSet(commandBuffer);
- VkResult err = pipelineobj.CreateVKPipeline(descriptorSet.GetPipelineLayout(), renderPass());
+ // Map/Flush/Invalidate using WHOLE_SIZE and an offset and entire mapped range
+ m_errorMonitor->ExpectSuccess();
+ err = vkMapMemory(m_device->device(), mem, 5 * atom_size, VK_WHOLE_SIZE, 0, (void **)&pData);
ASSERT_VK_SUCCESS(err);
- vkCmdBindPipeline(commandBuffer->handle(), VK_PIPELINE_BIND_POINT_GRAPHICS, pipelineobj.handle());
- commandBuffer->BindDescriptorSet(descriptorSet);
+ mmr.sType = VK_STRUCTURE_TYPE_MAPPED_MEMORY_RANGE;
+ mmr.memory = mem;
+ mmr.offset = 6 * atom_size;
+ mmr.size = VK_WHOLE_SIZE;
+ err = vkFlushMappedMemoryRanges(m_device->device(), 1, &mmr);
+ ASSERT_VK_SUCCESS(err);
+ err = vkInvalidateMappedMemoryRanges(m_device->device(), 1, &mmr);
+ ASSERT_VK_SUCCESS(err);
+ m_errorMonitor->VerifyNotFound();
+ vkUnmapMemory(m_device->device(), mem);
+
+ // Map with offset and size
+ // Flush/Invalidate subrange of mapped area with offset and size
+ m_errorMonitor->ExpectSuccess();
+ err = vkMapMemory(m_device->device(), mem, 3 * atom_size, 9 * atom_size, 0, (void **)&pData);
+ ASSERT_VK_SUCCESS(err);
+ mmr.sType = VK_STRUCTURE_TYPE_MAPPED_MEMORY_RANGE;
+ mmr.memory = mem;
+ mmr.offset = 4 * atom_size;
+ mmr.size = 2 * atom_size;
+ err = vkFlushMappedMemoryRanges(m_device->device(), 1, &mmr);
+ ASSERT_VK_SUCCESS(err);
+ err = vkInvalidateMappedMemoryRanges(m_device->device(), 1, &mmr);
+ ASSERT_VK_SUCCESS(err);
+ m_errorMonitor->VerifyNotFound();
+ vkUnmapMemory(m_device->device(), mem);
+
+ // Map without offset and flush WHOLE_SIZE with two separate offsets
+ m_errorMonitor->ExpectSuccess();
+ err = vkMapMemory(m_device->device(), mem, 0, VK_WHOLE_SIZE, 0, (void **)&pData);
+ ASSERT_VK_SUCCESS(err);
+ mmr.sType = VK_STRUCTURE_TYPE_MAPPED_MEMORY_RANGE;
+ mmr.memory = mem;
+ mmr.offset = allocation_size - (4 * atom_size);
+ mmr.size = VK_WHOLE_SIZE;
+ err = vkFlushMappedMemoryRanges(m_device->device(), 1, &mmr);
+ ASSERT_VK_SUCCESS(err);
+ mmr.offset = allocation_size - (6 * atom_size);
+ mmr.size = VK_WHOLE_SIZE;
+ err = vkFlushMappedMemoryRanges(m_device->device(), 1, &mmr);
+ ASSERT_VK_SUCCESS(err);
+ m_errorMonitor->VerifyNotFound();
+ vkUnmapMemory(m_device->device(), mem);
+
+ vkFreeMemory(m_device->device(), mem, NULL);
}
-void VkLayerTest::Init(VkPhysicalDeviceFeatures *features, VkPhysicalDeviceFeatures2 *features2,
- const VkCommandPoolCreateFlags flags, void *instance_pnext) {
- InitFramework(myDbgFunc, m_errorMonitor, instance_pnext);
- InitState(features, features2, flags);
+// This is a positive test. We used to expect error in this case but spec now allows it
+TEST_F(VkPositiveLayerTest, ResetUnsignaledFence) {
+ m_errorMonitor->ExpectSuccess();
+ vk_testing::Fence testFence;
+ VkFenceCreateInfo fenceInfo = {};
+ fenceInfo.sType = VK_STRUCTURE_TYPE_FENCE_CREATE_INFO;
+ fenceInfo.pNext = NULL;
+
+ ASSERT_NO_FATAL_FAILURE(Init());
+ testFence.init(*m_device, fenceInfo);
+ VkFence fences[1] = {testFence.handle()};
+ VkResult result = vkResetFences(m_device->device(), 1, fences);
+ ASSERT_VK_SUCCESS(result);
+
+ m_errorMonitor->VerifyNotFound();
}
-ErrorMonitor *VkLayerTest::Monitor() { return m_errorMonitor; }
+TEST_F(VkPositiveLayerTest, CommandBufferSimultaneousUseSync) {
+ m_errorMonitor->ExpectSuccess();
-VkCommandBufferObj *VkLayerTest::CommandBuffer() { return m_commandBuffer; }
+ ASSERT_NO_FATAL_FAILURE(Init());
+ VkResult err;
-VkLayerTest::VkLayerTest() {
- m_enableWSI = false;
+ // Record (empty!) command buffer that can be submitted multiple times
+ // simultaneously.
+ VkCommandBufferBeginInfo cbbi = {VK_STRUCTURE_TYPE_COMMAND_BUFFER_BEGIN_INFO, nullptr,
+ VK_COMMAND_BUFFER_USAGE_SIMULTANEOUS_USE_BIT, nullptr};
+ m_commandBuffer->begin(&cbbi);
+ m_commandBuffer->end();
- m_instance_layer_names.clear();
- m_instance_extension_names.clear();
- m_device_extension_names.clear();
+ VkFenceCreateInfo fci = {VK_STRUCTURE_TYPE_FENCE_CREATE_INFO, nullptr, 0};
+ VkFence fence;
+ err = vkCreateFence(m_device->device(), &fci, nullptr, &fence);
+ ASSERT_VK_SUCCESS(err);
- // Add default instance extensions to the list
- m_instance_extension_names.push_back(VK_EXT_DEBUG_REPORT_EXTENSION_NAME);
+ VkSemaphoreCreateInfo sci = {VK_STRUCTURE_TYPE_SEMAPHORE_CREATE_INFO, nullptr, 0};
+ VkSemaphore s1, s2;
+ err = vkCreateSemaphore(m_device->device(), &sci, nullptr, &s1);
+ ASSERT_VK_SUCCESS(err);
+ err = vkCreateSemaphore(m_device->device(), &sci, nullptr, &s2);
+ ASSERT_VK_SUCCESS(err);
- if (VkTestFramework::m_khronos_layer_disable) {
- m_instance_layer_names.push_back("VK_LAYER_GOOGLE_threading");
- m_instance_layer_names.push_back("VK_LAYER_LUNARG_parameter_validation");
- m_instance_layer_names.push_back("VK_LAYER_LUNARG_object_tracker");
- m_instance_layer_names.push_back("VK_LAYER_LUNARG_core_validation");
- m_instance_layer_names.push_back("VK_LAYER_GOOGLE_unique_objects");
- } else {
- m_instance_layer_names.push_back("VK_LAYER_KHRONOS_validation");
+ // Submit CB once signaling s1, with fence so we can roll forward to its retirement.
+ VkSubmitInfo si = {VK_STRUCTURE_TYPE_SUBMIT_INFO, nullptr, 0, nullptr, nullptr, 1, &m_commandBuffer->handle(), 1, &s1};
+ err = vkQueueSubmit(m_device->m_queue, 1, &si, fence);
+ ASSERT_VK_SUCCESS(err);
+
+ // Submit CB again, signaling s2.
+ si.pSignalSemaphores = &s2;
+ err = vkQueueSubmit(m_device->m_queue, 1, &si, VK_NULL_HANDLE);
+ ASSERT_VK_SUCCESS(err);
+
+ // Wait for fence.
+ err = vkWaitForFences(m_device->device(), 1, &fence, VK_TRUE, UINT64_MAX);
+ ASSERT_VK_SUCCESS(err);
+
+ // CB is still in flight from second submission, but semaphore s1 is no
+ // longer in flight. delete it.
+ vkDestroySemaphore(m_device->device(), s1, nullptr);
+
+ m_errorMonitor->VerifyNotFound();
+
+ // Force device idle and clean up remaining objects
+ vkDeviceWaitIdle(m_device->device());
+ vkDestroySemaphore(m_device->device(), s2, nullptr);
+ vkDestroyFence(m_device->device(), fence, nullptr);
+}
+
+TEST_F(VkPositiveLayerTest, FenceCreateSignaledWaitHandling) {
+ m_errorMonitor->ExpectSuccess();
+
+ ASSERT_NO_FATAL_FAILURE(Init());
+ VkResult err;
+
+ // A fence created signaled
+ VkFenceCreateInfo fci1 = {VK_STRUCTURE_TYPE_FENCE_CREATE_INFO, nullptr, VK_FENCE_CREATE_SIGNALED_BIT};
+ VkFence f1;
+ err = vkCreateFence(m_device->device(), &fci1, nullptr, &f1);
+ ASSERT_VK_SUCCESS(err);
+
+ // A fence created not
+ VkFenceCreateInfo fci2 = {VK_STRUCTURE_TYPE_FENCE_CREATE_INFO, nullptr, 0};
+ VkFence f2;
+ err = vkCreateFence(m_device->device(), &fci2, nullptr, &f2);
+ ASSERT_VK_SUCCESS(err);
+
+ // Submit the unsignaled fence
+ VkSubmitInfo si = {VK_STRUCTURE_TYPE_SUBMIT_INFO, nullptr, 0, nullptr, nullptr, 0, nullptr, 0, nullptr};
+ err = vkQueueSubmit(m_device->m_queue, 1, &si, f2);
+
+ // Wait on both fences, with signaled first.
+ VkFence fences[] = {f1, f2};
+ vkWaitForFences(m_device->device(), 2, fences, VK_TRUE, UINT64_MAX);
+
+ // Should have both retired!
+ vkDestroyFence(m_device->device(), f1, nullptr);
+ vkDestroyFence(m_device->device(), f2, nullptr);
+
+ m_errorMonitor->VerifyNotFound();
+}
+
+TEST_F(VkPositiveLayerTest, CreateImageViewFollowsParameterCompatibilityRequirements) {
+ TEST_DESCRIPTION("Verify that creating an ImageView with valid usage does not generate validation errors.");
+
+ ASSERT_NO_FATAL_FAILURE(Init());
+
+ m_errorMonitor->ExpectSuccess();
+
+ VkImageCreateInfo imgInfo = {VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO,
+ nullptr,
+ VK_IMAGE_CREATE_MUTABLE_FORMAT_BIT,
+ VK_IMAGE_TYPE_2D,
+ VK_FORMAT_R8G8B8A8_UNORM,
+ {128, 128, 1},
+ 1,
+ 1,
+ VK_SAMPLE_COUNT_1_BIT,
+ VK_IMAGE_TILING_OPTIMAL,
+ VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT,
+ VK_SHARING_MODE_EXCLUSIVE,
+ 0,
+ nullptr,
+ VK_IMAGE_LAYOUT_UNDEFINED};
+ VkImageObj image(m_device);
+ image.init(&imgInfo);
+ ASSERT_TRUE(image.initialized());
+ VkImageView imageView;
+ VkImageViewCreateInfo ivci = {};
+ ivci.sType = VK_STRUCTURE_TYPE_IMAGE_VIEW_CREATE_INFO;
+ ivci.image = image.handle();
+ ivci.viewType = VK_IMAGE_VIEW_TYPE_2D;
+ ivci.format = VK_FORMAT_R8G8B8A8_UNORM;
+ ivci.subresourceRange.layerCount = 1;
+ ivci.subresourceRange.baseMipLevel = 0;
+ ivci.subresourceRange.levelCount = 1;
+ ivci.subresourceRange.baseArrayLayer = 0;
+ ivci.subresourceRange.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
+
+ vkCreateImageView(m_device->device(), &ivci, NULL, &imageView);
+ m_errorMonitor->VerifyNotFound();
+ vkDestroyImageView(m_device->device(), imageView, NULL);
+}
+
+TEST_F(VkPositiveLayerTest, ValidUsage) {
+ TEST_DESCRIPTION("Verify that creating an image view from an image with valid usage doesn't generate validation errors");
+
+ ASSERT_NO_FATAL_FAILURE(Init());
+
+ m_errorMonitor->ExpectSuccess();
+ // Verify that we can create a view with usage INPUT_ATTACHMENT
+ VkImageObj image(m_device);
+ image.Init(128, 128, 1, VK_FORMAT_R8G8B8A8_UNORM, VK_IMAGE_USAGE_INPUT_ATTACHMENT_BIT, VK_IMAGE_TILING_OPTIMAL, 0);
+ ASSERT_TRUE(image.initialized());
+ VkImageView imageView;
+ VkImageViewCreateInfo ivci = {};
+ ivci.sType = VK_STRUCTURE_TYPE_IMAGE_VIEW_CREATE_INFO;
+ ivci.image = image.handle();
+ ivci.viewType = VK_IMAGE_VIEW_TYPE_2D;
+ ivci.format = VK_FORMAT_R8G8B8A8_UNORM;
+ ivci.subresourceRange.layerCount = 1;
+ ivci.subresourceRange.baseMipLevel = 0;
+ ivci.subresourceRange.levelCount = 1;
+ ivci.subresourceRange.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
+
+ vkCreateImageView(m_device->device(), &ivci, NULL, &imageView);
+ m_errorMonitor->VerifyNotFound();
+ vkDestroyImageView(m_device->device(), imageView, NULL);
+}
+
+// This is a positive test. No failures are expected.
+TEST_F(VkPositiveLayerTest, BindSparse) {
+ TEST_DESCRIPTION("Bind 2 memory ranges to one image using vkQueueBindSparse, destroy the image and then free the memory");
+
+ ASSERT_NO_FATAL_FAILURE(Init());
+
+ auto index = m_device->graphics_queue_node_index_;
+ if (!(m_device->queue_props[index].queueFlags & VK_QUEUE_SPARSE_BINDING_BIT)) {
+ printf("%s Graphics queue does not have sparse binding bit.\n", kSkipPrefix);
+ return;
}
- if (VkTestFramework::m_devsim_layer) {
- if (InstanceLayerSupported("VK_LAYER_LUNARG_device_simulation")) {
- m_instance_layer_names.push_back("VK_LAYER_LUNARG_device_simulation");
- } else {
- VkTestFramework::m_devsim_layer = false;
- printf(" Did not find VK_LAYER_LUNARG_device_simulation layer so it will not be enabled.\n");
- }
+ if (!m_device->phy().features().sparseBinding) {
+ printf("%s Device does not support sparse bindings.\n", kSkipPrefix);
+ return;
+ }
+
+ m_errorMonitor->ExpectSuccess(VK_DEBUG_REPORT_ERROR_BIT_EXT | VK_DEBUG_REPORT_WARNING_BIT_EXT);
+
+ VkImage image;
+ VkImageCreateInfo image_create_info = {};
+ image_create_info.sType = VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO;
+ image_create_info.pNext = NULL;
+ image_create_info.imageType = VK_IMAGE_TYPE_2D;
+ image_create_info.format = VK_FORMAT_B8G8R8A8_UNORM;
+ image_create_info.extent.width = 64;
+ image_create_info.extent.height = 64;
+ image_create_info.extent.depth = 1;
+ image_create_info.mipLevels = 1;
+ image_create_info.arrayLayers = 1;
+ image_create_info.samples = VK_SAMPLE_COUNT_1_BIT;
+ image_create_info.tiling = VK_IMAGE_TILING_OPTIMAL;
+ image_create_info.usage = VK_IMAGE_USAGE_TRANSFER_SRC_BIT;
+ image_create_info.flags = VK_IMAGE_CREATE_SPARSE_BINDING_BIT;
+ VkResult err = vkCreateImage(m_device->device(), &image_create_info, NULL, &image);
+ ASSERT_VK_SUCCESS(err);
+
+ VkMemoryRequirements memory_reqs;
+ VkDeviceMemory memory_one, memory_two;
+ bool pass;
+ VkMemoryAllocateInfo memory_info = {};
+ memory_info.sType = VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO;
+ memory_info.pNext = NULL;
+ memory_info.allocationSize = 0;
+ memory_info.memoryTypeIndex = 0;
+ vkGetImageMemoryRequirements(m_device->device(), image, &memory_reqs);
+ // Find an image big enough to allow sparse mapping of 2 memory regions
+ // Increase the image size until it is at least twice the
+ // size of the required alignment, to ensure we can bind both
+ // allocated memory blocks to the image on aligned offsets.
+ while (memory_reqs.size < (memory_reqs.alignment * 2)) {
+ vkDestroyImage(m_device->device(), image, nullptr);
+ image_create_info.extent.width *= 2;
+ image_create_info.extent.height *= 2;
+ err = vkCreateImage(m_device->device(), &image_create_info, nullptr, &image);
+ ASSERT_VK_SUCCESS(err);
+ vkGetImageMemoryRequirements(m_device->device(), image, &memory_reqs);
}
+ // Allocate 2 memory regions of minimum alignment size, bind one at 0, the other
+ // at the end of the first
+ memory_info.allocationSize = memory_reqs.alignment;
+ pass = m_device->phy().set_memory_type(memory_reqs.memoryTypeBits, &memory_info, 0);
+ ASSERT_TRUE(pass);
+ err = vkAllocateMemory(m_device->device(), &memory_info, NULL, &memory_one);
+ ASSERT_VK_SUCCESS(err);
+ err = vkAllocateMemory(m_device->device(), &memory_info, NULL, &memory_two);
+ ASSERT_VK_SUCCESS(err);
+ VkSparseMemoryBind binds[2];
+ binds[0].flags = 0;
+ binds[0].memory = memory_one;
+ binds[0].memoryOffset = 0;
+ binds[0].resourceOffset = 0;
+ binds[0].size = memory_info.allocationSize;
+ binds[1].flags = 0;
+ binds[1].memory = memory_two;
+ binds[1].memoryOffset = 0;
+ binds[1].resourceOffset = memory_info.allocationSize;
+ binds[1].size = memory_info.allocationSize;
+
+ VkSparseImageOpaqueMemoryBindInfo opaqueBindInfo;
+ opaqueBindInfo.image = image;
+ opaqueBindInfo.bindCount = 2;
+ opaqueBindInfo.pBinds = binds;
+
+ VkFence fence = VK_NULL_HANDLE;
+ VkBindSparseInfo bindSparseInfo = {};
+ bindSparseInfo.sType = VK_STRUCTURE_TYPE_BIND_SPARSE_INFO;
+ bindSparseInfo.imageOpaqueBindCount = 1;
+ bindSparseInfo.pImageOpaqueBinds = &opaqueBindInfo;
+
+ vkQueueBindSparse(m_device->m_queue, 1, &bindSparseInfo, fence);
+ vkQueueWaitIdle(m_device->m_queue);
+ vkDestroyImage(m_device->device(), image, NULL);
+ vkFreeMemory(m_device->device(), memory_one, NULL);
+ vkFreeMemory(m_device->device(), memory_two, NULL);
+ m_errorMonitor->VerifyNotFound();
+}
- this->app_info.sType = VK_STRUCTURE_TYPE_APPLICATION_INFO;
- this->app_info.pNext = NULL;
- this->app_info.pApplicationName = "layer_tests";
- this->app_info.applicationVersion = 1;
- this->app_info.pEngineName = "unittest";
- this->app_info.engineVersion = 1;
- this->app_info.apiVersion = VK_API_VERSION_1_0;
+TEST_F(VkPositiveLayerTest, BindSparseMetadata) {
+ TEST_DESCRIPTION("Bind memory for the metadata aspect of a sparse image");
- m_errorMonitor = new ErrorMonitor;
+ ASSERT_NO_FATAL_FAILURE(Init());
+
+ auto index = m_device->graphics_queue_node_index_;
+ if (!(m_device->queue_props[index].queueFlags & VK_QUEUE_SPARSE_BINDING_BIT)) {
+ printf("%s Graphics queue does not have sparse binding bit.\n", kSkipPrefix);
+ return;
+ }
+ if (!m_device->phy().features().sparseResidencyImage2D) {
+ printf("%s Device does not support sparse residency for images.\n", kSkipPrefix);
+ return;
+ }
+
+ m_errorMonitor->ExpectSuccess(VK_DEBUG_REPORT_ERROR_BIT_EXT | VK_DEBUG_REPORT_WARNING_BIT_EXT);
+
+ // Create a sparse image
+ VkImage image;
+ VkImageCreateInfo image_create_info = {};
+ image_create_info.sType = VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO;
+ image_create_info.pNext = NULL;
+ image_create_info.imageType = VK_IMAGE_TYPE_2D;
+ image_create_info.format = VK_FORMAT_B8G8R8A8_UNORM;
+ image_create_info.extent.width = 64;
+ image_create_info.extent.height = 64;
+ image_create_info.extent.depth = 1;
+ image_create_info.mipLevels = 1;
+ image_create_info.arrayLayers = 1;
+ image_create_info.samples = VK_SAMPLE_COUNT_1_BIT;
+ image_create_info.tiling = VK_IMAGE_TILING_OPTIMAL;
+ image_create_info.usage = VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT;
+ image_create_info.flags = VK_IMAGE_CREATE_SPARSE_BINDING_BIT | VK_IMAGE_CREATE_SPARSE_RESIDENCY_BIT;
+ VkResult err = vkCreateImage(m_device->device(), &image_create_info, NULL, &image);
+ ASSERT_VK_SUCCESS(err);
+
+ // Query image memory requirements
+ VkMemoryRequirements memory_reqs;
+ vkGetImageMemoryRequirements(m_device->device(), image, &memory_reqs);
+
+ // Query sparse memory requirements
+ uint32_t sparse_reqs_count = 0;
+ vkGetImageSparseMemoryRequirements(m_device->device(), image, &sparse_reqs_count, nullptr);
+ std::vector<VkSparseImageMemoryRequirements> sparse_reqs(sparse_reqs_count);
+ vkGetImageSparseMemoryRequirements(m_device->device(), image, &sparse_reqs_count, sparse_reqs.data());
+
+ // Find requirements for metadata aspect
+ const VkSparseImageMemoryRequirements *metadata_reqs = nullptr;
+ for (auto const &aspect_sparse_reqs : sparse_reqs) {
+ if (aspect_sparse_reqs.formatProperties.aspectMask == VK_IMAGE_ASPECT_METADATA_BIT) {
+ metadata_reqs = &aspect_sparse_reqs;
+ }
+ }
- // Find out what version the instance supports and record the default target instance
- auto enumerateInstanceVersion = (PFN_vkEnumerateInstanceVersion)vkGetInstanceProcAddr(nullptr, "vkEnumerateInstanceVersion");
- if (enumerateInstanceVersion) {
- enumerateInstanceVersion(&m_instance_api_version);
+ if (!metadata_reqs) {
+ printf("%s Sparse image does not require memory for metadata.\n", kSkipPrefix);
} else {
- m_instance_api_version = VK_API_VERSION_1_0;
+ // Allocate memory for the metadata
+ VkDeviceMemory metadata_memory = VK_NULL_HANDLE;
+ VkMemoryAllocateInfo metadata_memory_info = {};
+ metadata_memory_info.sType = VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO;
+ metadata_memory_info.allocationSize = metadata_reqs->imageMipTailSize;
+ m_device->phy().set_memory_type(memory_reqs.memoryTypeBits, &metadata_memory_info, 0);
+ err = vkAllocateMemory(m_device->device(), &metadata_memory_info, NULL, &metadata_memory);
+ ASSERT_VK_SUCCESS(err);
+
+ // Bind metadata
+ VkSparseMemoryBind sparse_bind = {};
+ sparse_bind.resourceOffset = metadata_reqs->imageMipTailOffset;
+ sparse_bind.size = metadata_reqs->imageMipTailSize;
+ sparse_bind.memory = metadata_memory;
+ sparse_bind.memoryOffset = 0;
+ sparse_bind.flags = VK_SPARSE_MEMORY_BIND_METADATA_BIT;
+
+ VkSparseImageOpaqueMemoryBindInfo opaque_bind_info = {};
+ opaque_bind_info.image = image;
+ opaque_bind_info.bindCount = 1;
+ opaque_bind_info.pBinds = &sparse_bind;
+
+ VkBindSparseInfo bind_info = {};
+ bind_info.sType = VK_STRUCTURE_TYPE_BIND_SPARSE_INFO;
+ bind_info.imageOpaqueBindCount = 1;
+ bind_info.pImageOpaqueBinds = &opaque_bind_info;
+
+ vkQueueBindSparse(m_device->m_queue, 1, &bind_info, VK_NULL_HANDLE);
+ m_errorMonitor->VerifyNotFound();
+
+ // Cleanup
+ vkQueueWaitIdle(m_device->m_queue);
+ vkFreeMemory(m_device->device(), metadata_memory, NULL);
}
- m_target_api_version = app_info.apiVersion;
+
+ vkDestroyImage(m_device->device(), image, NULL);
}
-bool VkLayerTest::AddSurfaceInstanceExtension() {
- m_enableWSI = true;
- if (!InstanceExtensionSupported(VK_KHR_SURFACE_EXTENSION_NAME)) {
- printf("%s VK_KHR_SURFACE_EXTENSION_NAME extension not supported\n", kSkipPrefix);
- return false;
+TEST_F(VkPositiveLayerTest, FramebufferBindingDestroyCommandPool) {
+ TEST_DESCRIPTION(
+ "This test should pass. Create a Framebuffer and command buffer, bind them together, then destroy command pool and "
+ "framebuffer and verify there are no errors.");
+
+ m_errorMonitor->ExpectSuccess();
+
+ ASSERT_NO_FATAL_FAILURE(Init());
+
+ // A renderpass with one color attachment.
+ VkAttachmentDescription attachment = {0,
+ VK_FORMAT_R8G8B8A8_UNORM,
+ VK_SAMPLE_COUNT_1_BIT,
+ VK_ATTACHMENT_LOAD_OP_DONT_CARE,
+ VK_ATTACHMENT_STORE_OP_STORE,
+ VK_ATTACHMENT_LOAD_OP_DONT_CARE,
+ VK_ATTACHMENT_STORE_OP_DONT_CARE,
+ VK_IMAGE_LAYOUT_UNDEFINED,
+ VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL};
+
+ VkAttachmentReference att_ref = {0, VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL};
+
+ VkSubpassDescription subpass = {0, VK_PIPELINE_BIND_POINT_GRAPHICS, 0, nullptr, 1, &att_ref, nullptr, nullptr, 0, nullptr};
+
+ VkRenderPassCreateInfo rpci = {VK_STRUCTURE_TYPE_RENDER_PASS_CREATE_INFO, nullptr, 0, 1, &attachment, 1, &subpass, 0, nullptr};
+
+ VkRenderPass rp;
+ VkResult err = vkCreateRenderPass(m_device->device(), &rpci, nullptr, &rp);
+ ASSERT_VK_SUCCESS(err);
+
+ // A compatible framebuffer.
+ VkImageObj image(m_device);
+ image.Init(32, 32, 1, VK_FORMAT_R8G8B8A8_UNORM, VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT, VK_IMAGE_TILING_OPTIMAL, 0);
+ ASSERT_TRUE(image.initialized());
+
+ VkImageView view = image.targetView(VK_FORMAT_R8G8B8A8_UNORM);
+
+ VkFramebufferCreateInfo fci = {VK_STRUCTURE_TYPE_FRAMEBUFFER_CREATE_INFO, nullptr, 0, rp, 1, &view, 32, 32, 1};
+ VkFramebuffer fb;
+ err = vkCreateFramebuffer(m_device->device(), &fci, nullptr, &fb);
+ ASSERT_VK_SUCCESS(err);
+
+ // Explicitly create a command buffer to bind the FB to so that we can then
+ // destroy the command pool in order to implicitly free command buffer
+ VkCommandPool command_pool;
+ VkCommandPoolCreateInfo pool_create_info{};
+ pool_create_info.sType = VK_STRUCTURE_TYPE_COMMAND_POOL_CREATE_INFO;
+ pool_create_info.queueFamilyIndex = m_device->graphics_queue_node_index_;
+ pool_create_info.flags = VK_COMMAND_POOL_CREATE_RESET_COMMAND_BUFFER_BIT;
+ vkCreateCommandPool(m_device->device(), &pool_create_info, nullptr, &command_pool);
+
+ VkCommandBuffer command_buffer;
+ VkCommandBufferAllocateInfo command_buffer_allocate_info{};
+ command_buffer_allocate_info.sType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_ALLOCATE_INFO;
+ command_buffer_allocate_info.commandPool = command_pool;
+ command_buffer_allocate_info.commandBufferCount = 1;
+ command_buffer_allocate_info.level = VK_COMMAND_BUFFER_LEVEL_PRIMARY;
+ vkAllocateCommandBuffers(m_device->device(), &command_buffer_allocate_info, &command_buffer);
+
+ // Begin our cmd buffer with renderpass using our framebuffer
+ VkRenderPassBeginInfo rpbi = {VK_STRUCTURE_TYPE_RENDER_PASS_BEGIN_INFO, nullptr, rp, fb, {{0, 0}, {32, 32}}, 0, nullptr};
+ VkCommandBufferBeginInfo begin_info{};
+ begin_info.sType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_BEGIN_INFO;
+ vkBeginCommandBuffer(command_buffer, &begin_info);
+
+ vkCmdBeginRenderPass(command_buffer, &rpbi, VK_SUBPASS_CONTENTS_INLINE);
+ vkCmdEndRenderPass(command_buffer);
+ vkEndCommandBuffer(command_buffer);
+ // Destroy command pool to implicitly free command buffer
+ vkDestroyCommandPool(m_device->device(), command_pool, NULL);
+ vkDestroyFramebuffer(m_device->device(), fb, nullptr);
+ vkDestroyRenderPass(m_device->device(), rp, nullptr);
+ m_errorMonitor->VerifyNotFound();
+}
+
+TEST_F(VkPositiveLayerTest, FramebufferCreateDepthStencilLayoutTransitionForDepthOnlyImageView) {
+ TEST_DESCRIPTION(
+ "Validate that when an imageView of a depth/stencil image is used as a depth/stencil framebuffer attachment, the "
+ "aspectMask is ignored and both depth and stencil image subresources are used.");
+
+ ASSERT_NO_FATAL_FAILURE(Init());
+ VkFormatProperties format_properties;
+ vkGetPhysicalDeviceFormatProperties(gpu(), VK_FORMAT_D32_SFLOAT_S8_UINT, &format_properties);
+ if (!(format_properties.optimalTilingFeatures & VK_FORMAT_FEATURE_SAMPLED_IMAGE_BIT)) {
+ printf("%s Image format does not support sampling.\n", kSkipPrefix);
+ return;
}
- m_instance_extension_names.push_back(VK_KHR_SURFACE_EXTENSION_NAME);
- bool bSupport = false;
-#if defined(VK_USE_PLATFORM_WIN32_KHR)
- if (!InstanceExtensionSupported(VK_KHR_WIN32_SURFACE_EXTENSION_NAME)) {
- printf("%s VK_KHR_WIN32_SURFACE_EXTENSION_NAME extension not supported\n", kSkipPrefix);
- return false;
+ m_errorMonitor->ExpectSuccess();
+
+ VkAttachmentDescription attachment = {0,
+ VK_FORMAT_D32_SFLOAT_S8_UINT,
+ VK_SAMPLE_COUNT_1_BIT,
+ VK_ATTACHMENT_LOAD_OP_DONT_CARE,
+ VK_ATTACHMENT_STORE_OP_STORE,
+ VK_ATTACHMENT_LOAD_OP_DONT_CARE,
+ VK_ATTACHMENT_STORE_OP_DONT_CARE,
+ VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL,
+ VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL};
+
+ VkAttachmentReference att_ref = {0, VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL};
+
+ VkSubpassDescription subpass = {0, VK_PIPELINE_BIND_POINT_GRAPHICS, 0, nullptr, 0, nullptr, nullptr, &att_ref, 0, nullptr};
+
+ VkSubpassDependency dep = {0,
+ 0,
+ VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT,
+ VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT,
+ VK_ACCESS_COLOR_ATTACHMENT_WRITE_BIT,
+ VK_ACCESS_COLOR_ATTACHMENT_WRITE_BIT,
+ VK_DEPENDENCY_BY_REGION_BIT};
+
+ VkRenderPassCreateInfo rpci = {VK_STRUCTURE_TYPE_RENDER_PASS_CREATE_INFO, nullptr, 0, 1, &attachment, 1, &subpass, 1, &dep};
+
+ VkResult err;
+ VkRenderPass rp;
+ err = vkCreateRenderPass(m_device->device(), &rpci, nullptr, &rp);
+ ASSERT_VK_SUCCESS(err);
+
+ VkImageObj image(m_device);
+ image.InitNoLayout(32, 32, 1, VK_FORMAT_D32_SFLOAT_S8_UINT,
+ 0x26, // usage
+ VK_IMAGE_TILING_OPTIMAL, 0);
+ ASSERT_TRUE(image.initialized());
+ image.SetLayout(0x6, VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL);
+
+ VkImageViewCreateInfo ivci = {
+ VK_STRUCTURE_TYPE_IMAGE_VIEW_CREATE_INFO,
+ nullptr,
+ 0,
+ image.handle(),
+ VK_IMAGE_VIEW_TYPE_2D,
+ VK_FORMAT_D32_SFLOAT_S8_UINT,
+ {VK_COMPONENT_SWIZZLE_R, VK_COMPONENT_SWIZZLE_G, VK_COMPONENT_SWIZZLE_B, VK_COMPONENT_SWIZZLE_A},
+ {0x2, 0, 1, 0, 1},
+ };
+ VkImageView view;
+ err = vkCreateImageView(m_device->device(), &ivci, nullptr, &view);
+ ASSERT_VK_SUCCESS(err);
+
+ VkFramebufferCreateInfo fci = {VK_STRUCTURE_TYPE_FRAMEBUFFER_CREATE_INFO, nullptr, 0, rp, 1, &view, 32, 32, 1};
+ VkFramebuffer fb;
+ err = vkCreateFramebuffer(m_device->device(), &fci, nullptr, &fb);
+ ASSERT_VK_SUCCESS(err);
+
+ m_commandBuffer->begin();
+
+ VkImageMemoryBarrier imb = {};
+ imb.sType = VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER;
+ imb.pNext = nullptr;
+ imb.srcAccessMask = VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_WRITE_BIT;
+ imb.dstAccessMask = VK_ACCESS_SHADER_READ_BIT;
+ imb.oldLayout = VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL;
+ imb.newLayout = VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL;
+ imb.srcQueueFamilyIndex = 0;
+ imb.dstQueueFamilyIndex = 0;
+ imb.image = image.handle();
+ imb.subresourceRange.aspectMask = 0x6;
+ imb.subresourceRange.baseMipLevel = 0;
+ imb.subresourceRange.levelCount = 0x1;
+ imb.subresourceRange.baseArrayLayer = 0;
+ imb.subresourceRange.layerCount = 0x1;
+
+ vkCmdPipelineBarrier(m_commandBuffer->handle(), VK_PIPELINE_STAGE_EARLY_FRAGMENT_TESTS_BIT,
+ VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT, VK_DEPENDENCY_BY_REGION_BIT, 0, nullptr, 0, nullptr, 1, &imb);
+
+ m_commandBuffer->end();
+ m_commandBuffer->QueueCommandBuffer(false);
+ m_errorMonitor->VerifyNotFound();
+
+ vkDestroyFramebuffer(m_device->device(), fb, nullptr);
+ vkDestroyRenderPass(m_device->device(), rp, nullptr);
+ vkDestroyImageView(m_device->device(), view, nullptr);
+}
+
+// This is a positive test. No errors should be generated.
+TEST_F(VkPositiveLayerTest, BarrierLayoutToImageUsage) {
+ TEST_DESCRIPTION("Ensure barriers' new and old VkImageLayout are compatible with their images' VkImageUsageFlags");
+
+ m_errorMonitor->ExpectSuccess();
+
+ ASSERT_NO_FATAL_FAILURE(Init());
+ auto depth_format = FindSupportedDepthStencilFormat(gpu());
+ if (!depth_format) {
+ printf("%s No Depth + Stencil format found. Skipped.\n", kSkipPrefix);
+ return;
}
- m_instance_extension_names.push_back(VK_KHR_WIN32_SURFACE_EXTENSION_NAME);
- bSupport = true;
-#endif
+ ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
+
+ VkImageMemoryBarrier img_barrier = {};
+ img_barrier.sType = VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER;
+ img_barrier.pNext = NULL;
+ img_barrier.srcAccessMask = VK_ACCESS_HOST_WRITE_BIT;
+ img_barrier.dstAccessMask = VK_ACCESS_SHADER_READ_BIT;
+ img_barrier.oldLayout = VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL;
+ img_barrier.newLayout = VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL;
+ img_barrier.srcQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED;
+ img_barrier.dstQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED;
+ img_barrier.subresourceRange.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
+ img_barrier.subresourceRange.baseArrayLayer = 0;
+ img_barrier.subresourceRange.baseMipLevel = 0;
+ img_barrier.subresourceRange.layerCount = 1;
+ img_barrier.subresourceRange.levelCount = 1;
-#if defined(VK_USE_PLATFORM_ANDROID_KHR) && defined(VALIDATION_APK)
- if (!InstanceExtensionSupported(VK_KHR_ANDROID_SURFACE_EXTENSION_NAME)) {
- printf("%s VK_KHR_ANDROID_SURFACE_EXTENSION_NAME extension not supported\n", kSkipPrefix);
- return false;
+ {
+ VkImageObj img_color(m_device);
+ img_color.Init(128, 128, 1, VK_FORMAT_B8G8R8A8_UNORM, VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT, VK_IMAGE_TILING_OPTIMAL);
+ ASSERT_TRUE(img_color.initialized());
+
+ VkImageObj img_ds1(m_device);
+ img_ds1.Init(128, 128, 1, depth_format, VK_IMAGE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT, VK_IMAGE_TILING_OPTIMAL);
+ ASSERT_TRUE(img_ds1.initialized());
+
+ VkImageObj img_ds2(m_device);
+ img_ds2.Init(128, 128, 1, depth_format, VK_IMAGE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT, VK_IMAGE_TILING_OPTIMAL);
+ ASSERT_TRUE(img_ds2.initialized());
+
+ VkImageObj img_xfer_src(m_device);
+ img_xfer_src.Init(128, 128, 1, VK_FORMAT_B8G8R8A8_UNORM, VK_IMAGE_USAGE_TRANSFER_SRC_BIT, VK_IMAGE_TILING_OPTIMAL);
+ ASSERT_TRUE(img_xfer_src.initialized());
+
+ VkImageObj img_xfer_dst(m_device);
+ img_xfer_dst.Init(128, 128, 1, VK_FORMAT_B8G8R8A8_UNORM, VK_IMAGE_USAGE_TRANSFER_DST_BIT, VK_IMAGE_TILING_OPTIMAL);
+ ASSERT_TRUE(img_xfer_dst.initialized());
+
+ VkImageObj img_sampled(m_device);
+ img_sampled.Init(32, 32, 1, VK_FORMAT_B8G8R8A8_UNORM, VK_IMAGE_USAGE_SAMPLED_BIT, VK_IMAGE_TILING_OPTIMAL);
+ ASSERT_TRUE(img_sampled.initialized());
+
+ VkImageObj img_input(m_device);
+ img_input.Init(128, 128, 1, VK_FORMAT_R8G8B8A8_UNORM, VK_IMAGE_USAGE_INPUT_ATTACHMENT_BIT, VK_IMAGE_TILING_OPTIMAL);
+ ASSERT_TRUE(img_input.initialized());
+
+ const struct {
+ VkImageObj &image_obj;
+ VkImageLayout old_layout;
+ VkImageLayout new_layout;
+ } buffer_layouts[] = {
+ // clang-format off
+ {img_color, VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL, VK_IMAGE_LAYOUT_GENERAL},
+ {img_ds1, VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL, VK_IMAGE_LAYOUT_GENERAL},
+ {img_ds2, VK_IMAGE_LAYOUT_DEPTH_STENCIL_READ_ONLY_OPTIMAL, VK_IMAGE_LAYOUT_GENERAL},
+ {img_sampled, VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL, VK_IMAGE_LAYOUT_GENERAL},
+ {img_input, VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL, VK_IMAGE_LAYOUT_GENERAL},
+ {img_xfer_src, VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL, VK_IMAGE_LAYOUT_GENERAL},
+ {img_xfer_dst, VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, VK_IMAGE_LAYOUT_GENERAL},
+ // clang-format on
+ };
+ const uint32_t layout_count = sizeof(buffer_layouts) / sizeof(buffer_layouts[0]);
+
+ m_commandBuffer->begin();
+ for (uint32_t i = 0; i < layout_count; ++i) {
+ img_barrier.image = buffer_layouts[i].image_obj.handle();
+ const VkImageUsageFlags usage = buffer_layouts[i].image_obj.usage();
+ img_barrier.subresourceRange.aspectMask = (usage == VK_IMAGE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT)
+ ? (VK_IMAGE_ASPECT_DEPTH_BIT | VK_IMAGE_ASPECT_STENCIL_BIT)
+ : VK_IMAGE_ASPECT_COLOR_BIT;
+
+ img_barrier.oldLayout = buffer_layouts[i].old_layout;
+ img_barrier.newLayout = buffer_layouts[i].new_layout;
+ vkCmdPipelineBarrier(m_commandBuffer->handle(), VK_PIPELINE_STAGE_HOST_BIT, VK_PIPELINE_STAGE_VERTEX_SHADER_BIT, 0, 0,
+ nullptr, 0, nullptr, 1, &img_barrier);
+
+ img_barrier.oldLayout = buffer_layouts[i].new_layout;
+ img_barrier.newLayout = buffer_layouts[i].old_layout;
+ vkCmdPipelineBarrier(m_commandBuffer->handle(), VK_PIPELINE_STAGE_HOST_BIT, VK_PIPELINE_STAGE_VERTEX_SHADER_BIT, 0, 0,
+ nullptr, 0, nullptr, 1, &img_barrier);
+ }
+ m_commandBuffer->end();
+
+ img_barrier.oldLayout = VK_IMAGE_LAYOUT_GENERAL;
+ img_barrier.newLayout = VK_IMAGE_LAYOUT_GENERAL;
}
- m_instance_extension_names.push_back(VK_KHR_ANDROID_SURFACE_EXTENSION_NAME);
- bSupport = true;
-#endif
+ m_errorMonitor->VerifyNotFound();
+}
-#if defined(VK_USE_PLATFORM_XLIB_KHR)
- if (!InstanceExtensionSupported(VK_KHR_XLIB_SURFACE_EXTENSION_NAME)) {
- printf("%s VK_KHR_XLIB_SURFACE_EXTENSION_NAME extension not supported\n", kSkipPrefix);
- return false;
+// This is a positive test. No errors should be generated.
+TEST_F(VkPositiveLayerTest, WaitEventThenSet) {
+ TEST_DESCRIPTION("Wait on a event then set it after the wait has been submitted.");
+
+ m_errorMonitor->ExpectSuccess();
+ ASSERT_NO_FATAL_FAILURE(Init());
+
+ VkEvent event;
+ VkEventCreateInfo event_create_info{};
+ event_create_info.sType = VK_STRUCTURE_TYPE_EVENT_CREATE_INFO;
+ vkCreateEvent(m_device->device(), &event_create_info, nullptr, &event);
+
+ VkCommandPool command_pool;
+ VkCommandPoolCreateInfo pool_create_info{};
+ pool_create_info.sType = VK_STRUCTURE_TYPE_COMMAND_POOL_CREATE_INFO;
+ pool_create_info.queueFamilyIndex = m_device->graphics_queue_node_index_;
+ pool_create_info.flags = VK_COMMAND_POOL_CREATE_RESET_COMMAND_BUFFER_BIT;
+ vkCreateCommandPool(m_device->device(), &pool_create_info, nullptr, &command_pool);
+
+ VkCommandBuffer command_buffer;
+ VkCommandBufferAllocateInfo command_buffer_allocate_info{};
+ command_buffer_allocate_info.sType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_ALLOCATE_INFO;
+ command_buffer_allocate_info.commandPool = command_pool;
+ command_buffer_allocate_info.commandBufferCount = 1;
+ command_buffer_allocate_info.level = VK_COMMAND_BUFFER_LEVEL_PRIMARY;
+ vkAllocateCommandBuffers(m_device->device(), &command_buffer_allocate_info, &command_buffer);
+
+ VkQueue queue = VK_NULL_HANDLE;
+ vkGetDeviceQueue(m_device->device(), m_device->graphics_queue_node_index_, 0, &queue);
+
+ {
+ VkCommandBufferBeginInfo begin_info{};
+ begin_info.sType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_BEGIN_INFO;
+ vkBeginCommandBuffer(command_buffer, &begin_info);
+
+ vkCmdWaitEvents(command_buffer, 1, &event, VK_PIPELINE_STAGE_HOST_BIT, VK_PIPELINE_STAGE_ALL_COMMANDS_BIT, 0, nullptr, 0,
+ nullptr, 0, nullptr);
+ vkCmdResetEvent(command_buffer, event, VK_PIPELINE_STAGE_ALL_COMMANDS_BIT);
+ vkEndCommandBuffer(command_buffer);
}
- if (XOpenDisplay(NULL)) {
- m_instance_extension_names.push_back(VK_KHR_XLIB_SURFACE_EXTENSION_NAME);
- bSupport = true;
+ {
+ VkSubmitInfo submit_info{};
+ submit_info.sType = VK_STRUCTURE_TYPE_SUBMIT_INFO;
+ submit_info.commandBufferCount = 1;
+ submit_info.pCommandBuffers = &command_buffer;
+ submit_info.signalSemaphoreCount = 0;
+ submit_info.pSignalSemaphores = nullptr;
+ vkQueueSubmit(queue, 1, &submit_info, VK_NULL_HANDLE);
}
-#endif
+ { vkSetEvent(m_device->device(), event); }
-#if defined(VK_USE_PLATFORM_XCB_KHR)
- if (!InstanceExtensionSupported(VK_KHR_XCB_SURFACE_EXTENSION_NAME)) {
- printf("%s VK_KHR_XCB_SURFACE_EXTENSION_NAME extension not supported\n", kSkipPrefix);
- return false;
+ vkQueueWaitIdle(queue);
+
+ vkDestroyEvent(m_device->device(), event, nullptr);
+ vkFreeCommandBuffers(m_device->device(), command_pool, 1, &command_buffer);
+ vkDestroyCommandPool(m_device->device(), command_pool, NULL);
+
+ m_errorMonitor->VerifyNotFound();
+}
+// This is a positive test. No errors should be generated.
+TEST_F(VkPositiveLayerTest, QueryAndCopySecondaryCommandBuffers) {
+ TEST_DESCRIPTION("Issue a query on a secondary command buffer and copy it on a primary.");
+
+ ASSERT_NO_FATAL_FAILURE(Init());
+ if ((m_device->queue_props.empty()) || (m_device->queue_props[0].queueCount < 2)) {
+ printf("%s Queue family needs to have multiple queues to run this test.\n", kSkipPrefix);
+ return;
}
- if (!bSupport && xcb_connect(NULL, NULL)) {
- m_instance_extension_names.push_back(VK_KHR_XCB_SURFACE_EXTENSION_NAME);
- bSupport = true;
+
+ m_errorMonitor->ExpectSuccess();
+
+ VkQueryPool query_pool;
+ VkQueryPoolCreateInfo query_pool_create_info{};
+ query_pool_create_info.sType = VK_STRUCTURE_TYPE_QUERY_POOL_CREATE_INFO;
+ query_pool_create_info.queryType = VK_QUERY_TYPE_TIMESTAMP;
+ query_pool_create_info.queryCount = 1;
+ vkCreateQueryPool(m_device->device(), &query_pool_create_info, nullptr, &query_pool);
+
+ VkCommandPoolObj command_pool(m_device, m_device->graphics_queue_node_index_, VK_COMMAND_POOL_CREATE_RESET_COMMAND_BUFFER_BIT);
+ VkCommandBufferObj primary_buffer(m_device, &command_pool);
+ VkCommandBufferObj secondary_buffer(m_device, &command_pool, VK_COMMAND_BUFFER_LEVEL_SECONDARY);
+
+ VkQueue queue = VK_NULL_HANDLE;
+ vkGetDeviceQueue(m_device->device(), m_device->graphics_queue_node_index_, 1, &queue);
+
+ uint32_t qfi = 0;
+ VkBufferCreateInfo buff_create_info = {};
+ buff_create_info.sType = VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO;
+ buff_create_info.size = 1024;
+ buff_create_info.usage = VK_BUFFER_USAGE_TRANSFER_DST_BIT;
+ buff_create_info.queueFamilyIndexCount = 1;
+ buff_create_info.pQueueFamilyIndices = &qfi;
+
+ VkResult err;
+ VkBuffer buffer;
+ err = vkCreateBuffer(m_device->device(), &buff_create_info, NULL, &buffer);
+ ASSERT_VK_SUCCESS(err);
+
+ VkMemoryRequirements memReqs;
+ vkGetBufferMemoryRequirements(m_device->device(), buffer, &memReqs);
+ VkMemoryAllocateInfo mem_alloc = {};
+ mem_alloc.sType = VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO;
+ mem_alloc.pNext = NULL;
+ mem_alloc.allocationSize = memReqs.size;
+ mem_alloc.memoryTypeIndex = 0;
+ bool pass = m_device->phy().set_memory_type(memReqs.memoryTypeBits, &mem_alloc, 0);
+ if (!pass) {
+ printf("%s Failed to allocate memory.\n", kSkipPrefix);
+ vkDestroyBuffer(m_device->device(), buffer, NULL);
+ return;
}
-#endif
- if (bSupport) return true;
- printf("%s No platform's surface extension supported\n", kSkipPrefix);
- return false;
-}
+ VkDeviceMemory mem;
+ err = vkAllocateMemory(m_device->device(), &mem_alloc, NULL, &mem);
+ ASSERT_VK_SUCCESS(err);
+ err = vkBindBufferMemory(m_device->device(), buffer, mem, 0);
+ ASSERT_VK_SUCCESS(err);
-bool VkLayerTest::AddSwapchainDeviceExtension() {
- if (!DeviceExtensionSupported(gpu(), nullptr, VK_KHR_SWAPCHAIN_EXTENSION_NAME)) {
- printf("%s VK_KHR_SWAPCHAIN_EXTENSION_NAME extension not supported\n", kSkipPrefix);
- return false;
+ VkCommandBufferInheritanceInfo hinfo = {};
+ hinfo.sType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_INHERITANCE_INFO;
+ hinfo.renderPass = VK_NULL_HANDLE;
+ hinfo.subpass = 0;
+ hinfo.framebuffer = VK_NULL_HANDLE;
+ hinfo.occlusionQueryEnable = VK_FALSE;
+ hinfo.queryFlags = 0;
+ hinfo.pipelineStatistics = 0;
+
+ {
+ VkCommandBufferBeginInfo begin_info{};
+ begin_info.sType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_BEGIN_INFO;
+ begin_info.pInheritanceInfo = &hinfo;
+ secondary_buffer.begin(&begin_info);
+ vkCmdResetQueryPool(secondary_buffer.handle(), query_pool, 0, 1);
+ vkCmdWriteTimestamp(secondary_buffer.handle(), VK_PIPELINE_STAGE_ALL_GRAPHICS_BIT, query_pool, 0);
+ secondary_buffer.end();
+
+ primary_buffer.begin();
+ vkCmdExecuteCommands(primary_buffer.handle(), 1, &secondary_buffer.handle());
+ vkCmdCopyQueryPoolResults(primary_buffer.handle(), query_pool, 0, 1, buffer, 0, 0, 0);
+ primary_buffer.end();
}
- m_device_extension_names.push_back(VK_KHR_SWAPCHAIN_EXTENSION_NAME);
- return true;
+
+ primary_buffer.QueueCommandBuffer();
+ vkQueueWaitIdle(queue);
+
+ vkDestroyQueryPool(m_device->device(), query_pool, nullptr);
+ vkDestroyBuffer(m_device->device(), buffer, NULL);
+ vkFreeMemory(m_device->device(), mem, NULL);
+
+ m_errorMonitor->VerifyNotFound();
}
-uint32_t VkLayerTest::SetTargetApiVersion(uint32_t target_api_version) {
- if (target_api_version == 0) target_api_version = VK_API_VERSION_1_0;
- if (target_api_version <= m_instance_api_version) {
- m_target_api_version = target_api_version;
- app_info.apiVersion = m_target_api_version;
+// This is a positive test. No errors should be generated.
+TEST_F(VkPositiveLayerTest, QueryAndCopyMultipleCommandBuffers) {
+ TEST_DESCRIPTION("Issue a query and copy from it on a second command buffer.");
+
+ ASSERT_NO_FATAL_FAILURE(Init());
+ if ((m_device->queue_props.empty()) || (m_device->queue_props[0].queueCount < 2)) {
+ printf("%s Queue family needs to have multiple queues to run this test.\n", kSkipPrefix);
+ return;
+ }
+
+ m_errorMonitor->ExpectSuccess();
+
+ VkQueryPool query_pool;
+ VkQueryPoolCreateInfo query_pool_create_info{};
+ query_pool_create_info.sType = VK_STRUCTURE_TYPE_QUERY_POOL_CREATE_INFO;
+ query_pool_create_info.queryType = VK_QUERY_TYPE_TIMESTAMP;
+ query_pool_create_info.queryCount = 1;
+ vkCreateQueryPool(m_device->device(), &query_pool_create_info, nullptr, &query_pool);
+
+ VkCommandPool command_pool;
+ VkCommandPoolCreateInfo pool_create_info{};
+ pool_create_info.sType = VK_STRUCTURE_TYPE_COMMAND_POOL_CREATE_INFO;
+ pool_create_info.queueFamilyIndex = m_device->graphics_queue_node_index_;
+ pool_create_info.flags = VK_COMMAND_POOL_CREATE_RESET_COMMAND_BUFFER_BIT;
+ vkCreateCommandPool(m_device->device(), &pool_create_info, nullptr, &command_pool);
+
+ VkCommandBuffer command_buffer[2];
+ VkCommandBufferAllocateInfo command_buffer_allocate_info{};
+ command_buffer_allocate_info.sType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_ALLOCATE_INFO;
+ command_buffer_allocate_info.commandPool = command_pool;
+ command_buffer_allocate_info.commandBufferCount = 2;
+ command_buffer_allocate_info.level = VK_COMMAND_BUFFER_LEVEL_PRIMARY;
+ vkAllocateCommandBuffers(m_device->device(), &command_buffer_allocate_info, command_buffer);
+
+ VkQueue queue = VK_NULL_HANDLE;
+ vkGetDeviceQueue(m_device->device(), m_device->graphics_queue_node_index_, 1, &queue);
+
+ uint32_t qfi = 0;
+ VkBufferCreateInfo buff_create_info = {};
+ buff_create_info.sType = VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO;
+ buff_create_info.size = 1024;
+ buff_create_info.usage = VK_BUFFER_USAGE_TRANSFER_DST_BIT;
+ buff_create_info.queueFamilyIndexCount = 1;
+ buff_create_info.pQueueFamilyIndices = &qfi;
+
+ VkResult err;
+ VkBuffer buffer;
+ err = vkCreateBuffer(m_device->device(), &buff_create_info, NULL, &buffer);
+ ASSERT_VK_SUCCESS(err);
+
+ VkMemoryRequirements memReqs;
+ vkGetBufferMemoryRequirements(m_device->device(), buffer, &memReqs);
+ VkMemoryAllocateInfo mem_alloc = {};
+ mem_alloc.sType = VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO;
+ mem_alloc.pNext = NULL;
+ mem_alloc.allocationSize = memReqs.size;
+ mem_alloc.memoryTypeIndex = 0;
+ bool pass = m_device->phy().set_memory_type(memReqs.memoryTypeBits, &mem_alloc, 0);
+ if (!pass) {
+ vkDestroyBuffer(m_device->device(), buffer, NULL);
+ return;
+ }
+
+ VkDeviceMemory mem;
+ err = vkAllocateMemory(m_device->device(), &mem_alloc, NULL, &mem);
+ ASSERT_VK_SUCCESS(err);
+ err = vkBindBufferMemory(m_device->device(), buffer, mem, 0);
+ ASSERT_VK_SUCCESS(err);
+
+ {
+ VkCommandBufferBeginInfo begin_info{};
+ begin_info.sType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_BEGIN_INFO;
+ vkBeginCommandBuffer(command_buffer[0], &begin_info);
+
+ vkCmdResetQueryPool(command_buffer[0], query_pool, 0, 1);
+ vkCmdWriteTimestamp(command_buffer[0], VK_PIPELINE_STAGE_ALL_GRAPHICS_BIT, query_pool, 0);
+
+ vkEndCommandBuffer(command_buffer[0]);
+
+ vkBeginCommandBuffer(command_buffer[1], &begin_info);
+
+ vkCmdCopyQueryPoolResults(command_buffer[1], query_pool, 0, 1, buffer, 0, 0, 0);
+
+ vkEndCommandBuffer(command_buffer[1]);
}
- return m_target_api_version;
+ {
+ VkSubmitInfo submit_info{};
+ submit_info.sType = VK_STRUCTURE_TYPE_SUBMIT_INFO;
+ submit_info.commandBufferCount = 2;
+ submit_info.pCommandBuffers = command_buffer;
+ submit_info.signalSemaphoreCount = 0;
+ submit_info.pSignalSemaphores = nullptr;
+ vkQueueSubmit(queue, 1, &submit_info, VK_NULL_HANDLE);
+ }
+
+ vkQueueWaitIdle(queue);
+
+ vkDestroyQueryPool(m_device->device(), query_pool, nullptr);
+ vkFreeCommandBuffers(m_device->device(), command_pool, 2, command_buffer);
+ vkDestroyCommandPool(m_device->device(), command_pool, NULL);
+ vkDestroyBuffer(m_device->device(), buffer, NULL);
+ vkFreeMemory(m_device->device(), mem, NULL);
+
+ m_errorMonitor->VerifyNotFound();
}
-uint32_t VkLayerTest::DeviceValidationVersion() {
- // The validation layers, assume the version we are validating to is the apiVersion unless the device apiVersion is lower
- VkPhysicalDeviceProperties props;
- GetPhysicalDeviceProperties(&props);
- return std::min(m_target_api_version, props.apiVersion);
+
+TEST_F(VkLayerTest, ResetEventThenSet) {
+ TEST_DESCRIPTION("Reset an event then set it after the reset has been submitted.");
+
+ ASSERT_NO_FATAL_FAILURE(Init());
+ VkEvent event;
+ VkEventCreateInfo event_create_info{};
+ event_create_info.sType = VK_STRUCTURE_TYPE_EVENT_CREATE_INFO;
+ vkCreateEvent(m_device->device(), &event_create_info, nullptr, &event);
+
+ VkCommandPool command_pool;
+ VkCommandPoolCreateInfo pool_create_info{};
+ pool_create_info.sType = VK_STRUCTURE_TYPE_COMMAND_POOL_CREATE_INFO;
+ pool_create_info.queueFamilyIndex = m_device->graphics_queue_node_index_;
+ pool_create_info.flags = VK_COMMAND_POOL_CREATE_RESET_COMMAND_BUFFER_BIT;
+ vkCreateCommandPool(m_device->device(), &pool_create_info, nullptr, &command_pool);
+
+ VkCommandBuffer command_buffer;
+ VkCommandBufferAllocateInfo command_buffer_allocate_info{};
+ command_buffer_allocate_info.sType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_ALLOCATE_INFO;
+ command_buffer_allocate_info.commandPool = command_pool;
+ command_buffer_allocate_info.commandBufferCount = 1;
+ command_buffer_allocate_info.level = VK_COMMAND_BUFFER_LEVEL_PRIMARY;
+ vkAllocateCommandBuffers(m_device->device(), &command_buffer_allocate_info, &command_buffer);
+
+ VkQueue queue = VK_NULL_HANDLE;
+ vkGetDeviceQueue(m_device->device(), m_device->graphics_queue_node_index_, 0, &queue);
+
+ {
+ VkCommandBufferBeginInfo begin_info{};
+ begin_info.sType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_BEGIN_INFO;
+ vkBeginCommandBuffer(command_buffer, &begin_info);
+
+ vkCmdResetEvent(command_buffer, event, VK_PIPELINE_STAGE_ALL_COMMANDS_BIT);
+ vkEndCommandBuffer(command_buffer);
+ }
+ {
+ VkSubmitInfo submit_info{};
+ submit_info.sType = VK_STRUCTURE_TYPE_SUBMIT_INFO;
+ submit_info.commandBufferCount = 1;
+ submit_info.pCommandBuffers = &command_buffer;
+ submit_info.signalSemaphoreCount = 0;
+ submit_info.pSignalSemaphores = nullptr;
+ vkQueueSubmit(queue, 1, &submit_info, VK_NULL_HANDLE);
+ }
+ {
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "that is already in use by a command buffer.");
+ vkSetEvent(m_device->device(), event);
+ m_errorMonitor->VerifyFound();
+ }
+
+ vkQueueWaitIdle(queue);
+
+ vkDestroyEvent(m_device->device(), event, nullptr);
+ vkFreeCommandBuffers(m_device->device(), command_pool, 1, &command_buffer);
+ vkDestroyCommandPool(m_device->device(), command_pool, NULL);
}
-bool VkLayerTest::LoadDeviceProfileLayer(
- PFN_vkSetPhysicalDeviceFormatPropertiesEXT &fpvkSetPhysicalDeviceFormatPropertiesEXT,
- PFN_vkGetOriginalPhysicalDeviceFormatPropertiesEXT &fpvkGetOriginalPhysicalDeviceFormatPropertiesEXT) {
- // Load required functions
- fpvkSetPhysicalDeviceFormatPropertiesEXT =
- (PFN_vkSetPhysicalDeviceFormatPropertiesEXT)vkGetInstanceProcAddr(instance(), "vkSetPhysicalDeviceFormatPropertiesEXT");
- fpvkGetOriginalPhysicalDeviceFormatPropertiesEXT = (PFN_vkGetOriginalPhysicalDeviceFormatPropertiesEXT)vkGetInstanceProcAddr(
- instance(), "vkGetOriginalPhysicalDeviceFormatPropertiesEXT");
+// This is a positive test. No errors should be generated.
+TEST_F(VkPositiveLayerTest, TwoFencesThreeFrames) {
+ TEST_DESCRIPTION(
+ "Two command buffers with two separate fences are each run through a Submit & WaitForFences cycle 3 times. This previously "
+ "revealed a bug so running this positive test to prevent a regression.");
+ m_errorMonitor->ExpectSuccess();
+
+ ASSERT_NO_FATAL_FAILURE(Init());
+ VkQueue queue = VK_NULL_HANDLE;
+ vkGetDeviceQueue(m_device->device(), m_device->graphics_queue_node_index_, 0, &queue);
+
+ static const uint32_t NUM_OBJECTS = 2;
+ static const uint32_t NUM_FRAMES = 3;
+ VkCommandBuffer cmd_buffers[NUM_OBJECTS] = {};
+ VkFence fences[NUM_OBJECTS] = {};
+
+ VkCommandPool cmd_pool;
+ VkCommandPoolCreateInfo cmd_pool_ci = {};
+ cmd_pool_ci.sType = VK_STRUCTURE_TYPE_COMMAND_POOL_CREATE_INFO;
+ cmd_pool_ci.queueFamilyIndex = m_device->graphics_queue_node_index_;
+ cmd_pool_ci.flags = VK_COMMAND_POOL_CREATE_RESET_COMMAND_BUFFER_BIT;
+ VkResult err = vkCreateCommandPool(m_device->device(), &cmd_pool_ci, nullptr, &cmd_pool);
+ ASSERT_VK_SUCCESS(err);
- if (!(fpvkSetPhysicalDeviceFormatPropertiesEXT) || !(fpvkGetOriginalPhysicalDeviceFormatPropertiesEXT)) {
- printf("%s Can't find device_profile_api functions; skipped.\n", kSkipPrefix);
- return 0;
+ VkCommandBufferAllocateInfo cmd_buf_info = {};
+ cmd_buf_info.sType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_ALLOCATE_INFO;
+ cmd_buf_info.commandPool = cmd_pool;
+ cmd_buf_info.level = VK_COMMAND_BUFFER_LEVEL_PRIMARY;
+ cmd_buf_info.commandBufferCount = 1;
+
+ VkFenceCreateInfo fence_ci = {};
+ fence_ci.sType = VK_STRUCTURE_TYPE_FENCE_CREATE_INFO;
+ fence_ci.pNext = nullptr;
+ fence_ci.flags = 0;
+
+ for (uint32_t i = 0; i < NUM_OBJECTS; ++i) {
+ err = vkAllocateCommandBuffers(m_device->device(), &cmd_buf_info, &cmd_buffers[i]);
+ ASSERT_VK_SUCCESS(err);
+ err = vkCreateFence(m_device->device(), &fence_ci, nullptr, &fences[i]);
+ ASSERT_VK_SUCCESS(err);
}
- return 1;
+ for (uint32_t frame = 0; frame < NUM_FRAMES; ++frame) {
+ for (uint32_t obj = 0; obj < NUM_OBJECTS; ++obj) {
+ // Create empty cmd buffer
+ VkCommandBufferBeginInfo cmdBufBeginDesc = {};
+ cmdBufBeginDesc.sType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_BEGIN_INFO;
+
+ err = vkBeginCommandBuffer(cmd_buffers[obj], &cmdBufBeginDesc);
+ ASSERT_VK_SUCCESS(err);
+ err = vkEndCommandBuffer(cmd_buffers[obj]);
+ ASSERT_VK_SUCCESS(err);
+
+ VkSubmitInfo submit_info = {};
+ submit_info.sType = VK_STRUCTURE_TYPE_SUBMIT_INFO;
+ submit_info.commandBufferCount = 1;
+ submit_info.pCommandBuffers = &cmd_buffers[obj];
+ // Submit cmd buffer and wait for fence
+ err = vkQueueSubmit(queue, 1, &submit_info, fences[obj]);
+ ASSERT_VK_SUCCESS(err);
+ err = vkWaitForFences(m_device->device(), 1, &fences[obj], VK_TRUE, UINT64_MAX);
+ ASSERT_VK_SUCCESS(err);
+ err = vkResetFences(m_device->device(), 1, &fences[obj]);
+ ASSERT_VK_SUCCESS(err);
+ }
+ }
+ m_errorMonitor->VerifyNotFound();
+ vkDestroyCommandPool(m_device->device(), cmd_pool, NULL);
+ for (uint32_t i = 0; i < NUM_OBJECTS; ++i) {
+ vkDestroyFence(m_device->device(), fences[i], nullptr);
+ }
}
+// This is a positive test. No errors should be generated.
+TEST_F(VkPositiveLayerTest, TwoQueueSubmitsSeparateQueuesWithSemaphoreAndOneFenceQWI) {
+ TEST_DESCRIPTION(
+ "Two command buffers, each in a separate QueueSubmit call submitted on separate queues followed by a QueueWaitIdle.");
+
+ ASSERT_NO_FATAL_FAILURE(Init());
+ if ((m_device->queue_props.empty()) || (m_device->queue_props[0].queueCount < 2)) {
+ printf("%s Queue family needs to have multiple queues to run this test.\n", kSkipPrefix);
+ return;
+ }
+
+ m_errorMonitor->ExpectSuccess();
+
+ VkSemaphore semaphore;
+ VkSemaphoreCreateInfo semaphore_create_info{};
+ semaphore_create_info.sType = VK_STRUCTURE_TYPE_SEMAPHORE_CREATE_INFO;
+ vkCreateSemaphore(m_device->device(), &semaphore_create_info, nullptr, &semaphore);
+
+ VkCommandPool command_pool;
+ VkCommandPoolCreateInfo pool_create_info{};
+ pool_create_info.sType = VK_STRUCTURE_TYPE_COMMAND_POOL_CREATE_INFO;
+ pool_create_info.queueFamilyIndex = m_device->graphics_queue_node_index_;
+ pool_create_info.flags = VK_COMMAND_POOL_CREATE_RESET_COMMAND_BUFFER_BIT;
+ vkCreateCommandPool(m_device->device(), &pool_create_info, nullptr, &command_pool);
+
+ VkCommandBuffer command_buffer[2];
+ VkCommandBufferAllocateInfo command_buffer_allocate_info{};
+ command_buffer_allocate_info.sType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_ALLOCATE_INFO;
+ command_buffer_allocate_info.commandPool = command_pool;
+ command_buffer_allocate_info.commandBufferCount = 2;
+ command_buffer_allocate_info.level = VK_COMMAND_BUFFER_LEVEL_PRIMARY;
+ vkAllocateCommandBuffers(m_device->device(), &command_buffer_allocate_info, command_buffer);
+
+ VkQueue queue = VK_NULL_HANDLE;
+ vkGetDeviceQueue(m_device->device(), m_device->graphics_queue_node_index_, 1, &queue);
+
+ {
+ VkCommandBufferBeginInfo begin_info{};
+ begin_info.sType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_BEGIN_INFO;
+ vkBeginCommandBuffer(command_buffer[0], &begin_info);
+
+ vkCmdPipelineBarrier(command_buffer[0], VK_PIPELINE_STAGE_ALL_COMMANDS_BIT, VK_PIPELINE_STAGE_ALL_COMMANDS_BIT, 0, 0,
+ nullptr, 0, nullptr, 0, nullptr);
+
+ VkViewport viewport{};
+ viewport.maxDepth = 1.0f;
+ viewport.minDepth = 0.0f;
+ viewport.width = 512;
+ viewport.height = 512;
+ viewport.x = 0;
+ viewport.y = 0;
+ vkCmdSetViewport(command_buffer[0], 0, 1, &viewport);
+ vkEndCommandBuffer(command_buffer[0]);
+ }
+ {
+ VkCommandBufferBeginInfo begin_info{};
+ begin_info.sType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_BEGIN_INFO;
+ vkBeginCommandBuffer(command_buffer[1], &begin_info);
+
+ VkViewport viewport{};
+ viewport.maxDepth = 1.0f;
+ viewport.minDepth = 0.0f;
+ viewport.width = 512;
+ viewport.height = 512;
+ viewport.x = 0;
+ viewport.y = 0;
+ vkCmdSetViewport(command_buffer[1], 0, 1, &viewport);
+ vkEndCommandBuffer(command_buffer[1]);
+ }
+ {
+ VkSubmitInfo submit_info{};
+ submit_info.sType = VK_STRUCTURE_TYPE_SUBMIT_INFO;
+ submit_info.commandBufferCount = 1;
+ submit_info.pCommandBuffers = &command_buffer[0];
+ submit_info.signalSemaphoreCount = 1;
+ submit_info.pSignalSemaphores = &semaphore;
+ vkQueueSubmit(queue, 1, &submit_info, VK_NULL_HANDLE);
+ }
+ {
+ VkPipelineStageFlags flags[]{VK_PIPELINE_STAGE_ALL_COMMANDS_BIT};
+ VkSubmitInfo submit_info{};
+ submit_info.sType = VK_STRUCTURE_TYPE_SUBMIT_INFO;
+ submit_info.commandBufferCount = 1;
+ submit_info.pCommandBuffers = &command_buffer[1];
+ submit_info.waitSemaphoreCount = 1;
+ submit_info.pWaitSemaphores = &semaphore;
+ submit_info.pWaitDstStageMask = flags;
+ vkQueueSubmit(m_device->m_queue, 1, &submit_info, VK_NULL_HANDLE);
+ }
+
+ vkQueueWaitIdle(m_device->m_queue);
-VkLayerTest::~VkLayerTest() {
- // Clean up resources before we reset
- delete m_errorMonitor;
+ vkDestroySemaphore(m_device->device(), semaphore, nullptr);
+ vkFreeCommandBuffers(m_device->device(), command_pool, 2, &command_buffer[0]);
+ vkDestroyCommandPool(m_device->device(), command_pool, NULL);
+
+ m_errorMonitor->VerifyNotFound();
}
-bool VkBufferTest::GetTestConditionValid(VkDeviceObj *aVulkanDevice, eTestEnFlags aTestFlag, VkBufferUsageFlags aBufferUsage) {
- if (eInvalidDeviceOffset != aTestFlag && eInvalidMemoryOffset != aTestFlag) {
- return true;
+// This is a positive test. No errors should be generated.
+TEST_F(VkPositiveLayerTest, TwoQueueSubmitsSeparateQueuesWithSemaphoreAndOneFenceQWIFence) {
+ TEST_DESCRIPTION(
+ "Two command buffers, each in a separate QueueSubmit call submitted on separate queues, the second having a fence followed "
+ "by a QueueWaitIdle.");
+
+ ASSERT_NO_FATAL_FAILURE(Init());
+ if ((m_device->queue_props.empty()) || (m_device->queue_props[0].queueCount < 2)) {
+ printf("%s Queue family needs to have multiple queues to run this test.\n", kSkipPrefix);
+ return;
}
- VkDeviceSize offset_limit = 0;
- if (eInvalidMemoryOffset == aTestFlag) {
- VkBuffer vulkanBuffer;
- VkBufferCreateInfo buffer_create_info = {};
- buffer_create_info.sType = VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO;
- buffer_create_info.size = 32;
- buffer_create_info.usage = aBufferUsage;
-
- vkCreateBuffer(aVulkanDevice->device(), &buffer_create_info, nullptr, &vulkanBuffer);
- VkMemoryRequirements memory_reqs = {};
-
- vkGetBufferMemoryRequirements(aVulkanDevice->device(), vulkanBuffer, &memory_reqs);
- vkDestroyBuffer(aVulkanDevice->device(), vulkanBuffer, nullptr);
- offset_limit = memory_reqs.alignment;
- } else if ((VK_BUFFER_USAGE_UNIFORM_TEXEL_BUFFER_BIT | VK_BUFFER_USAGE_STORAGE_TEXEL_BUFFER_BIT) & aBufferUsage) {
- offset_limit = aVulkanDevice->props.limits.minTexelBufferOffsetAlignment;
- } else if (VK_BUFFER_USAGE_UNIFORM_BUFFER_BIT & aBufferUsage) {
- offset_limit = aVulkanDevice->props.limits.minUniformBufferOffsetAlignment;
- } else if (VK_BUFFER_USAGE_STORAGE_BUFFER_BIT & aBufferUsage) {
- offset_limit = aVulkanDevice->props.limits.minStorageBufferOffsetAlignment;
- }
- return eOffsetAlignment < offset_limit;
-}
-
-VkBufferTest::VkBufferTest(VkDeviceObj *aVulkanDevice, VkBufferUsageFlags aBufferUsage, eTestEnFlags aTestFlag)
- : AllocateCurrent(true),
- BoundCurrent(false),
- CreateCurrent(false),
- InvalidDeleteEn(false),
- VulkanDevice(aVulkanDevice->device()) {
- if (eBindNullBuffer == aTestFlag || eBindFakeBuffer == aTestFlag) {
- VkMemoryAllocateInfo memory_allocate_info = {};
- memory_allocate_info.sType = VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO;
- memory_allocate_info.allocationSize = 1; // fake size -- shouldn't matter for the test
- memory_allocate_info.memoryTypeIndex = 0; // fake type -- shouldn't matter for the test
- vkAllocateMemory(VulkanDevice, &memory_allocate_info, nullptr, &VulkanMemory);
-
- VulkanBuffer = (aTestFlag == eBindNullBuffer) ? VK_NULL_HANDLE : (VkBuffer)0xCDCDCDCDCDCDCDCD;
-
- vkBindBufferMemory(VulkanDevice, VulkanBuffer, VulkanMemory, 0);
- } else {
- VkBufferCreateInfo buffer_create_info = {};
- buffer_create_info.sType = VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO;
- buffer_create_info.size = 32;
- buffer_create_info.usage = aBufferUsage;
- vkCreateBuffer(VulkanDevice, &buffer_create_info, nullptr, &VulkanBuffer);
+ m_errorMonitor->ExpectSuccess();
- CreateCurrent = true;
+ VkFence fence;
+ VkFenceCreateInfo fence_create_info{};
+ fence_create_info.sType = VK_STRUCTURE_TYPE_FENCE_CREATE_INFO;
+ vkCreateFence(m_device->device(), &fence_create_info, nullptr, &fence);
- VkMemoryRequirements memory_requirements;
- vkGetBufferMemoryRequirements(VulkanDevice, VulkanBuffer, &memory_requirements);
+ VkSemaphore semaphore;
+ VkSemaphoreCreateInfo semaphore_create_info{};
+ semaphore_create_info.sType = VK_STRUCTURE_TYPE_SEMAPHORE_CREATE_INFO;
+ vkCreateSemaphore(m_device->device(), &semaphore_create_info, nullptr, &semaphore);
- VkMemoryAllocateInfo memory_allocate_info = {};
- memory_allocate_info.sType = VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO;
- memory_allocate_info.allocationSize = memory_requirements.size + eOffsetAlignment;
- bool pass = aVulkanDevice->phy().set_memory_type(memory_requirements.memoryTypeBits, &memory_allocate_info,
- VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT);
- if (!pass) {
- CreateCurrent = false;
- vkDestroyBuffer(VulkanDevice, VulkanBuffer, nullptr);
- return;
- }
+ VkCommandPool command_pool;
+ VkCommandPoolCreateInfo pool_create_info{};
+ pool_create_info.sType = VK_STRUCTURE_TYPE_COMMAND_POOL_CREATE_INFO;
+ pool_create_info.queueFamilyIndex = m_device->graphics_queue_node_index_;
+ pool_create_info.flags = VK_COMMAND_POOL_CREATE_RESET_COMMAND_BUFFER_BIT;
+ vkCreateCommandPool(m_device->device(), &pool_create_info, nullptr, &command_pool);
- vkAllocateMemory(VulkanDevice, &memory_allocate_info, NULL, &VulkanMemory);
- // NB: 1 is intentionally an invalid offset value
- const bool offset_en = eInvalidDeviceOffset == aTestFlag || eInvalidMemoryOffset == aTestFlag;
- vkBindBufferMemory(VulkanDevice, VulkanBuffer, VulkanMemory, offset_en ? eOffsetAlignment : 0);
- BoundCurrent = true;
+ VkCommandBuffer command_buffer[2];
+ VkCommandBufferAllocateInfo command_buffer_allocate_info{};
+ command_buffer_allocate_info.sType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_ALLOCATE_INFO;
+ command_buffer_allocate_info.commandPool = command_pool;
+ command_buffer_allocate_info.commandBufferCount = 2;
+ command_buffer_allocate_info.level = VK_COMMAND_BUFFER_LEVEL_PRIMARY;
+ vkAllocateCommandBuffers(m_device->device(), &command_buffer_allocate_info, command_buffer);
+
+ VkQueue queue = VK_NULL_HANDLE;
+ vkGetDeviceQueue(m_device->device(), m_device->graphics_queue_node_index_, 1, &queue);
- InvalidDeleteEn = (eFreeInvalidHandle == aTestFlag);
+ {
+ VkCommandBufferBeginInfo begin_info{};
+ begin_info.sType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_BEGIN_INFO;
+ vkBeginCommandBuffer(command_buffer[0], &begin_info);
+
+ vkCmdPipelineBarrier(command_buffer[0], VK_PIPELINE_STAGE_ALL_COMMANDS_BIT, VK_PIPELINE_STAGE_ALL_COMMANDS_BIT, 0, 0,
+ nullptr, 0, nullptr, 0, nullptr);
+
+ VkViewport viewport{};
+ viewport.maxDepth = 1.0f;
+ viewport.minDepth = 0.0f;
+ viewport.width = 512;
+ viewport.height = 512;
+ viewport.x = 0;
+ viewport.y = 0;
+ vkCmdSetViewport(command_buffer[0], 0, 1, &viewport);
+ vkEndCommandBuffer(command_buffer[0]);
}
+ {
+ VkCommandBufferBeginInfo begin_info{};
+ begin_info.sType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_BEGIN_INFO;
+ vkBeginCommandBuffer(command_buffer[1], &begin_info);
+
+ VkViewport viewport{};
+ viewport.maxDepth = 1.0f;
+ viewport.minDepth = 0.0f;
+ viewport.width = 512;
+ viewport.height = 512;
+ viewport.x = 0;
+ viewport.y = 0;
+ vkCmdSetViewport(command_buffer[1], 0, 1, &viewport);
+ vkEndCommandBuffer(command_buffer[1]);
+ }
+ {
+ VkSubmitInfo submit_info{};
+ submit_info.sType = VK_STRUCTURE_TYPE_SUBMIT_INFO;
+ submit_info.commandBufferCount = 1;
+ submit_info.pCommandBuffers = &command_buffer[0];
+ submit_info.signalSemaphoreCount = 1;
+ submit_info.pSignalSemaphores = &semaphore;
+ vkQueueSubmit(queue, 1, &submit_info, VK_NULL_HANDLE);
+ }
+ {
+ VkPipelineStageFlags flags[]{VK_PIPELINE_STAGE_ALL_COMMANDS_BIT};
+ VkSubmitInfo submit_info{};
+ submit_info.sType = VK_STRUCTURE_TYPE_SUBMIT_INFO;
+ submit_info.commandBufferCount = 1;
+ submit_info.pCommandBuffers = &command_buffer[1];
+ submit_info.waitSemaphoreCount = 1;
+ submit_info.pWaitSemaphores = &semaphore;
+ submit_info.pWaitDstStageMask = flags;
+ vkQueueSubmit(m_device->m_queue, 1, &submit_info, fence);
+ }
+
+ vkQueueWaitIdle(m_device->m_queue);
+
+ vkDestroyFence(m_device->device(), fence, nullptr);
+ vkDestroySemaphore(m_device->device(), semaphore, nullptr);
+ vkFreeCommandBuffers(m_device->device(), command_pool, 2, &command_buffer[0]);
+ vkDestroyCommandPool(m_device->device(), command_pool, NULL);
+
+ m_errorMonitor->VerifyNotFound();
}
-VkBufferTest::~VkBufferTest() {
- if (CreateCurrent) {
- vkDestroyBuffer(VulkanDevice, VulkanBuffer, nullptr);
+// This is a positive test. No errors should be generated.
+TEST_F(VkPositiveLayerTest, TwoQueueSubmitsSeparateQueuesWithSemaphoreAndOneFenceTwoWFF) {
+ TEST_DESCRIPTION(
+ "Two command buffers, each in a separate QueueSubmit call submitted on separate queues, the second having a fence followed "
+ "by two consecutive WaitForFences calls on the same fence.");
+
+ ASSERT_NO_FATAL_FAILURE(Init());
+ if ((m_device->queue_props.empty()) || (m_device->queue_props[0].queueCount < 2)) {
+ printf("%s Queue family needs to have multiple queues to run this test.\n", kSkipPrefix);
+ return;
}
- if (AllocateCurrent) {
- if (InvalidDeleteEn) {
- auto bad_memory = CastFromUint64<VkDeviceMemory>(CastToUint64(VulkanMemory) + 1);
- vkFreeMemory(VulkanDevice, bad_memory, nullptr);
- }
- vkFreeMemory(VulkanDevice, VulkanMemory, nullptr);
+
+ m_errorMonitor->ExpectSuccess();
+
+ VkFence fence;
+ VkFenceCreateInfo fence_create_info{};
+ fence_create_info.sType = VK_STRUCTURE_TYPE_FENCE_CREATE_INFO;
+ vkCreateFence(m_device->device(), &fence_create_info, nullptr, &fence);
+
+ VkSemaphore semaphore;
+ VkSemaphoreCreateInfo semaphore_create_info{};
+ semaphore_create_info.sType = VK_STRUCTURE_TYPE_SEMAPHORE_CREATE_INFO;
+ vkCreateSemaphore(m_device->device(), &semaphore_create_info, nullptr, &semaphore);
+
+ VkCommandPool command_pool;
+ VkCommandPoolCreateInfo pool_create_info{};
+ pool_create_info.sType = VK_STRUCTURE_TYPE_COMMAND_POOL_CREATE_INFO;
+ pool_create_info.queueFamilyIndex = m_device->graphics_queue_node_index_;
+ pool_create_info.flags = VK_COMMAND_POOL_CREATE_RESET_COMMAND_BUFFER_BIT;
+ vkCreateCommandPool(m_device->device(), &pool_create_info, nullptr, &command_pool);
+
+ VkCommandBuffer command_buffer[2];
+ VkCommandBufferAllocateInfo command_buffer_allocate_info{};
+ command_buffer_allocate_info.sType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_ALLOCATE_INFO;
+ command_buffer_allocate_info.commandPool = command_pool;
+ command_buffer_allocate_info.commandBufferCount = 2;
+ command_buffer_allocate_info.level = VK_COMMAND_BUFFER_LEVEL_PRIMARY;
+ vkAllocateCommandBuffers(m_device->device(), &command_buffer_allocate_info, command_buffer);
+
+ VkQueue queue = VK_NULL_HANDLE;
+ vkGetDeviceQueue(m_device->device(), m_device->graphics_queue_node_index_, 1, &queue);
+
+ {
+ VkCommandBufferBeginInfo begin_info{};
+ begin_info.sType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_BEGIN_INFO;
+ vkBeginCommandBuffer(command_buffer[0], &begin_info);
+
+ vkCmdPipelineBarrier(command_buffer[0], VK_PIPELINE_STAGE_ALL_COMMANDS_BIT, VK_PIPELINE_STAGE_ALL_COMMANDS_BIT, 0, 0,
+ nullptr, 0, nullptr, 0, nullptr);
+
+ VkViewport viewport{};
+ viewport.maxDepth = 1.0f;
+ viewport.minDepth = 0.0f;
+ viewport.width = 512;
+ viewport.height = 512;
+ viewport.x = 0;
+ viewport.y = 0;
+ vkCmdSetViewport(command_buffer[0], 0, 1, &viewport);
+ vkEndCommandBuffer(command_buffer[0]);
+ }
+ {
+ VkCommandBufferBeginInfo begin_info{};
+ begin_info.sType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_BEGIN_INFO;
+ vkBeginCommandBuffer(command_buffer[1], &begin_info);
+
+ VkViewport viewport{};
+ viewport.maxDepth = 1.0f;
+ viewport.minDepth = 0.0f;
+ viewport.width = 512;
+ viewport.height = 512;
+ viewport.x = 0;
+ viewport.y = 0;
+ vkCmdSetViewport(command_buffer[1], 0, 1, &viewport);
+ vkEndCommandBuffer(command_buffer[1]);
+ }
+ {
+ VkSubmitInfo submit_info{};
+ submit_info.sType = VK_STRUCTURE_TYPE_SUBMIT_INFO;
+ submit_info.commandBufferCount = 1;
+ submit_info.pCommandBuffers = &command_buffer[0];
+ submit_info.signalSemaphoreCount = 1;
+ submit_info.pSignalSemaphores = &semaphore;
+ vkQueueSubmit(queue, 1, &submit_info, VK_NULL_HANDLE);
+ }
+ {
+ VkPipelineStageFlags flags[]{VK_PIPELINE_STAGE_ALL_COMMANDS_BIT};
+ VkSubmitInfo submit_info{};
+ submit_info.sType = VK_STRUCTURE_TYPE_SUBMIT_INFO;
+ submit_info.commandBufferCount = 1;
+ submit_info.pCommandBuffers = &command_buffer[1];
+ submit_info.waitSemaphoreCount = 1;
+ submit_info.pWaitSemaphores = &semaphore;
+ submit_info.pWaitDstStageMask = flags;
+ vkQueueSubmit(m_device->m_queue, 1, &submit_info, fence);
}
-}
-bool VkBufferTest::GetBufferCurrent() { return AllocateCurrent && BoundCurrent && CreateCurrent; }
+ vkWaitForFences(m_device->device(), 1, &fence, VK_TRUE, UINT64_MAX);
+ vkWaitForFences(m_device->device(), 1, &fence, VK_TRUE, UINT64_MAX);
-const VkBuffer &VkBufferTest::GetBuffer() { return VulkanBuffer; }
+ vkDestroyFence(m_device->device(), fence, nullptr);
+ vkDestroySemaphore(m_device->device(), semaphore, nullptr);
+ vkFreeCommandBuffers(m_device->device(), command_pool, 2, &command_buffer[0]);
+ vkDestroyCommandPool(m_device->device(), command_pool, NULL);
-void VkBufferTest::TestDoubleDestroy() {
- // Destroy the buffer but leave the flag set, which will cause
- // the buffer to be destroyed again in the destructor.
- vkDestroyBuffer(VulkanDevice, VulkanBuffer, nullptr);
+ m_errorMonitor->VerifyNotFound();
}
-uint32_t VkVerticesObj::BindIdGenerator;
+TEST_F(VkPositiveLayerTest, TwoQueuesEnsureCorrectRetirementWithWorkStolen) {
+ ASSERT_NO_FATAL_FAILURE(Init());
+ if ((m_device->queue_props.empty()) || (m_device->queue_props[0].queueCount < 2)) {
+ printf("%s Test requires two queues, skipping\n", kSkipPrefix);
+ return;
+ }
-VkVerticesObj::VkVerticesObj(VkDeviceObj *aVulkanDevice, unsigned aAttributeCount, unsigned aBindingCount, unsigned aByteStride,
- VkDeviceSize aVertexCount, const float *aVerticies)
- : BoundCurrent(false),
- AttributeCount(aAttributeCount),
- BindingCount(aBindingCount),
- BindId(BindIdGenerator),
- PipelineVertexInputStateCreateInfo(),
- VulkanMemoryBuffer(aVulkanDevice, static_cast<int>(aByteStride * aVertexCount), reinterpret_cast<const void *>(aVerticies),
- VK_BUFFER_USAGE_VERTEX_BUFFER_BIT) {
- BindIdGenerator++; // NB: This can wrap w/misuse
-
- VertexInputAttributeDescription = new VkVertexInputAttributeDescription[AttributeCount];
- VertexInputBindingDescription = new VkVertexInputBindingDescription[BindingCount];
-
- PipelineVertexInputStateCreateInfo.pVertexAttributeDescriptions = VertexInputAttributeDescription;
- PipelineVertexInputStateCreateInfo.vertexAttributeDescriptionCount = AttributeCount;
- PipelineVertexInputStateCreateInfo.pVertexBindingDescriptions = VertexInputBindingDescription;
- PipelineVertexInputStateCreateInfo.vertexBindingDescriptionCount = BindingCount;
- PipelineVertexInputStateCreateInfo.sType = VK_STRUCTURE_TYPE_PIPELINE_VERTEX_INPUT_STATE_CREATE_INFO;
-
- unsigned i = 0;
- do {
- VertexInputAttributeDescription[i].binding = BindId;
- VertexInputAttributeDescription[i].location = i;
- VertexInputAttributeDescription[i].format = VK_FORMAT_R32G32B32_SFLOAT;
- VertexInputAttributeDescription[i].offset = sizeof(float) * aByteStride;
- i++;
- } while (AttributeCount < i);
-
- i = 0;
- do {
- VertexInputBindingDescription[i].binding = BindId;
- VertexInputBindingDescription[i].stride = aByteStride;
- VertexInputBindingDescription[i].inputRate = VK_VERTEX_INPUT_RATE_VERTEX;
- i++;
- } while (BindingCount < i);
-}
-
-VkVerticesObj::~VkVerticesObj() {
- if (VertexInputAttributeDescription) {
- delete[] VertexInputAttributeDescription;
- }
- if (VertexInputBindingDescription) {
- delete[] VertexInputBindingDescription;
- }
-}
-
-bool VkVerticesObj::AddVertexInputToPipe(VkPipelineObj &aPipelineObj) {
- aPipelineObj.AddVertexInputAttribs(VertexInputAttributeDescription, AttributeCount);
- aPipelineObj.AddVertexInputBindings(VertexInputBindingDescription, BindingCount);
- return true;
+ VkResult err;
+
+ m_errorMonitor->ExpectSuccess();
+
+ VkQueue q0 = m_device->m_queue;
+ VkQueue q1 = nullptr;
+ vkGetDeviceQueue(m_device->device(), m_device->graphics_queue_node_index_, 1, &q1);
+ ASSERT_NE(q1, nullptr);
+
+ // An (empty) command buffer. We must have work in the first submission --
+ // the layer treats unfenced work differently from fenced work.
+ VkCommandPoolCreateInfo cpci = {VK_STRUCTURE_TYPE_COMMAND_POOL_CREATE_INFO, nullptr, 0, 0};
+ VkCommandPool pool;
+ err = vkCreateCommandPool(m_device->device(), &cpci, nullptr, &pool);
+ ASSERT_VK_SUCCESS(err);
+ VkCommandBufferAllocateInfo cbai = {VK_STRUCTURE_TYPE_COMMAND_BUFFER_ALLOCATE_INFO, nullptr, pool,
+ VK_COMMAND_BUFFER_LEVEL_PRIMARY, 1};
+ VkCommandBuffer cb;
+ err = vkAllocateCommandBuffers(m_device->device(), &cbai, &cb);
+ ASSERT_VK_SUCCESS(err);
+ VkCommandBufferBeginInfo cbbi = {VK_STRUCTURE_TYPE_COMMAND_BUFFER_BEGIN_INFO, nullptr, 0, nullptr};
+ err = vkBeginCommandBuffer(cb, &cbbi);
+ ASSERT_VK_SUCCESS(err);
+ err = vkEndCommandBuffer(cb);
+ ASSERT_VK_SUCCESS(err);
+
+ // A semaphore
+ VkSemaphoreCreateInfo sci = {VK_STRUCTURE_TYPE_SEMAPHORE_CREATE_INFO, nullptr, 0};
+ VkSemaphore s;
+ err = vkCreateSemaphore(m_device->device(), &sci, nullptr, &s);
+ ASSERT_VK_SUCCESS(err);
+
+ // First submission, to q0
+ VkSubmitInfo s0 = {VK_STRUCTURE_TYPE_SUBMIT_INFO, nullptr, 0, nullptr, nullptr, 1, &cb, 1, &s};
+
+ err = vkQueueSubmit(q0, 1, &s0, VK_NULL_HANDLE);
+ ASSERT_VK_SUCCESS(err);
+
+ // Second submission, to q1, waiting on s
+ VkFlags waitmask = VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT; // doesn't really matter what this value is.
+ VkSubmitInfo s1 = {VK_STRUCTURE_TYPE_SUBMIT_INFO, nullptr, 1, &s, &waitmask, 0, nullptr, 0, nullptr};
+
+ err = vkQueueSubmit(q1, 1, &s1, VK_NULL_HANDLE);
+ ASSERT_VK_SUCCESS(err);
+
+ // Wait for q0 idle
+ err = vkQueueWaitIdle(q0);
+ ASSERT_VK_SUCCESS(err);
+
+ // Command buffer should have been completed (it was on q0); reset the pool.
+ vkFreeCommandBuffers(m_device->device(), pool, 1, &cb);
+
+ m_errorMonitor->VerifyNotFound();
+
+ // Force device completely idle and clean up resources
+ vkDeviceWaitIdle(m_device->device());
+ vkDestroyCommandPool(m_device->device(), pool, nullptr);
+ vkDestroySemaphore(m_device->device(), s, nullptr);
}
-bool VkVerticesObj::AddVertexInputToPipeHelpr(CreatePipelineHelper *pipelineHelper) {
- pipelineHelper->vi_ci_.pVertexBindingDescriptions = VertexInputBindingDescription;
- pipelineHelper->vi_ci_.vertexBindingDescriptionCount = BindingCount;
- pipelineHelper->vi_ci_.pVertexAttributeDescriptions = VertexInputAttributeDescription;
- pipelineHelper->vi_ci_.vertexAttributeDescriptionCount = AttributeCount;
- return true;
+// This is a positive test. No errors should be generated.
+TEST_F(VkPositiveLayerTest, TwoQueueSubmitsSeparateQueuesWithSemaphoreAndOneFence) {
+ TEST_DESCRIPTION(
+ "Two command buffers, each in a separate QueueSubmit call submitted on separate queues, the second having a fence, "
+ "followed by a WaitForFences call.");
+
+ ASSERT_NO_FATAL_FAILURE(Init());
+ if ((m_device->queue_props.empty()) || (m_device->queue_props[0].queueCount < 2)) {
+ printf("%s Queue family needs to have multiple queues to run this test.\n", kSkipPrefix);
+ return;
+ }
+
+ m_errorMonitor->ExpectSuccess();
+
+ VkFence fence;
+ VkFenceCreateInfo fence_create_info{};
+ fence_create_info.sType = VK_STRUCTURE_TYPE_FENCE_CREATE_INFO;
+ vkCreateFence(m_device->device(), &fence_create_info, nullptr, &fence);
+
+ VkSemaphore semaphore;
+ VkSemaphoreCreateInfo semaphore_create_info{};
+ semaphore_create_info.sType = VK_STRUCTURE_TYPE_SEMAPHORE_CREATE_INFO;
+ vkCreateSemaphore(m_device->device(), &semaphore_create_info, nullptr, &semaphore);
+
+ VkCommandPool command_pool;
+ VkCommandPoolCreateInfo pool_create_info{};
+ pool_create_info.sType = VK_STRUCTURE_TYPE_COMMAND_POOL_CREATE_INFO;
+ pool_create_info.queueFamilyIndex = m_device->graphics_queue_node_index_;
+ pool_create_info.flags = VK_COMMAND_POOL_CREATE_RESET_COMMAND_BUFFER_BIT;
+ vkCreateCommandPool(m_device->device(), &pool_create_info, nullptr, &command_pool);
+
+ VkCommandBuffer command_buffer[2];
+ VkCommandBufferAllocateInfo command_buffer_allocate_info{};
+ command_buffer_allocate_info.sType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_ALLOCATE_INFO;
+ command_buffer_allocate_info.commandPool = command_pool;
+ command_buffer_allocate_info.commandBufferCount = 2;
+ command_buffer_allocate_info.level = VK_COMMAND_BUFFER_LEVEL_PRIMARY;
+ vkAllocateCommandBuffers(m_device->device(), &command_buffer_allocate_info, command_buffer);
+
+ VkQueue queue = VK_NULL_HANDLE;
+ vkGetDeviceQueue(m_device->device(), m_device->graphics_queue_node_index_, 1, &queue);
+
+ {
+ VkCommandBufferBeginInfo begin_info{};
+ begin_info.sType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_BEGIN_INFO;
+ vkBeginCommandBuffer(command_buffer[0], &begin_info);
+
+ vkCmdPipelineBarrier(command_buffer[0], VK_PIPELINE_STAGE_ALL_COMMANDS_BIT, VK_PIPELINE_STAGE_ALL_COMMANDS_BIT, 0, 0,
+ nullptr, 0, nullptr, 0, nullptr);
+
+ VkViewport viewport{};
+ viewport.maxDepth = 1.0f;
+ viewport.minDepth = 0.0f;
+ viewport.width = 512;
+ viewport.height = 512;
+ viewport.x = 0;
+ viewport.y = 0;
+ vkCmdSetViewport(command_buffer[0], 0, 1, &viewport);
+ vkEndCommandBuffer(command_buffer[0]);
+ }
+ {
+ VkCommandBufferBeginInfo begin_info{};
+ begin_info.sType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_BEGIN_INFO;
+ vkBeginCommandBuffer(command_buffer[1], &begin_info);
+
+ VkViewport viewport{};
+ viewport.maxDepth = 1.0f;
+ viewport.minDepth = 0.0f;
+ viewport.width = 512;
+ viewport.height = 512;
+ viewport.x = 0;
+ viewport.y = 0;
+ vkCmdSetViewport(command_buffer[1], 0, 1, &viewport);
+ vkEndCommandBuffer(command_buffer[1]);
+ }
+ {
+ VkSubmitInfo submit_info{};
+ submit_info.sType = VK_STRUCTURE_TYPE_SUBMIT_INFO;
+ submit_info.commandBufferCount = 1;
+ submit_info.pCommandBuffers = &command_buffer[0];
+ submit_info.signalSemaphoreCount = 1;
+ submit_info.pSignalSemaphores = &semaphore;
+ vkQueueSubmit(queue, 1, &submit_info, VK_NULL_HANDLE);
+ }
+ {
+ VkPipelineStageFlags flags[]{VK_PIPELINE_STAGE_ALL_COMMANDS_BIT};
+ VkSubmitInfo submit_info{};
+ submit_info.sType = VK_STRUCTURE_TYPE_SUBMIT_INFO;
+ submit_info.commandBufferCount = 1;
+ submit_info.pCommandBuffers = &command_buffer[1];
+ submit_info.waitSemaphoreCount = 1;
+ submit_info.pWaitSemaphores = &semaphore;
+ submit_info.pWaitDstStageMask = flags;
+ vkQueueSubmit(m_device->m_queue, 1, &submit_info, fence);
+ }
+
+ vkWaitForFences(m_device->device(), 1, &fence, VK_TRUE, UINT64_MAX);
+
+ vkDestroyFence(m_device->device(), fence, nullptr);
+ vkDestroySemaphore(m_device->device(), semaphore, nullptr);
+ vkFreeCommandBuffers(m_device->device(), command_pool, 2, &command_buffer[0]);
+ vkDestroyCommandPool(m_device->device(), command_pool, NULL);
+
+ m_errorMonitor->VerifyNotFound();
}
-void VkVerticesObj::BindVertexBuffers(VkCommandBuffer aCommandBuffer, unsigned aOffsetCount, VkDeviceSize *aOffsetList) {
- VkDeviceSize *offsetList;
- unsigned offsetCount;
+// This is a positive test. No errors should be generated.
+TEST_F(VkPositiveLayerTest, TwoQueueSubmitsOneQueueWithSemaphoreAndOneFence) {
+ TEST_DESCRIPTION(
+ "Two command buffers, each in a separate QueueSubmit call on the same queue, sharing a signal/wait semaphore, the second "
+ "having a fence, followed by a WaitForFences call.");
+
+ m_errorMonitor->ExpectSuccess();
+
+ ASSERT_NO_FATAL_FAILURE(Init());
+ VkFence fence;
+ VkFenceCreateInfo fence_create_info{};
+ fence_create_info.sType = VK_STRUCTURE_TYPE_FENCE_CREATE_INFO;
+ vkCreateFence(m_device->device(), &fence_create_info, nullptr, &fence);
+
+ VkSemaphore semaphore;
+ VkSemaphoreCreateInfo semaphore_create_info{};
+ semaphore_create_info.sType = VK_STRUCTURE_TYPE_SEMAPHORE_CREATE_INFO;
+ vkCreateSemaphore(m_device->device(), &semaphore_create_info, nullptr, &semaphore);
+
+ VkCommandPool command_pool;
+ VkCommandPoolCreateInfo pool_create_info{};
+ pool_create_info.sType = VK_STRUCTURE_TYPE_COMMAND_POOL_CREATE_INFO;
+ pool_create_info.queueFamilyIndex = m_device->graphics_queue_node_index_;
+ pool_create_info.flags = VK_COMMAND_POOL_CREATE_RESET_COMMAND_BUFFER_BIT;
+ vkCreateCommandPool(m_device->device(), &pool_create_info, nullptr, &command_pool);
+
+ VkCommandBuffer command_buffer[2];
+ VkCommandBufferAllocateInfo command_buffer_allocate_info{};
+ command_buffer_allocate_info.sType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_ALLOCATE_INFO;
+ command_buffer_allocate_info.commandPool = command_pool;
+ command_buffer_allocate_info.commandBufferCount = 2;
+ command_buffer_allocate_info.level = VK_COMMAND_BUFFER_LEVEL_PRIMARY;
+ vkAllocateCommandBuffers(m_device->device(), &command_buffer_allocate_info, command_buffer);
- if (aOffsetCount) {
- offsetList = aOffsetList;
- offsetCount = aOffsetCount;
- } else {
- offsetList = new VkDeviceSize[1]();
- offsetCount = 1;
+ {
+ VkCommandBufferBeginInfo begin_info{};
+ begin_info.sType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_BEGIN_INFO;
+ vkBeginCommandBuffer(command_buffer[0], &begin_info);
+
+ vkCmdPipelineBarrier(command_buffer[0], VK_PIPELINE_STAGE_ALL_COMMANDS_BIT, VK_PIPELINE_STAGE_ALL_COMMANDS_BIT, 0, 0,
+ nullptr, 0, nullptr, 0, nullptr);
+
+ VkViewport viewport{};
+ viewport.maxDepth = 1.0f;
+ viewport.minDepth = 0.0f;
+ viewport.width = 512;
+ viewport.height = 512;
+ viewport.x = 0;
+ viewport.y = 0;
+ vkCmdSetViewport(command_buffer[0], 0, 1, &viewport);
+ vkEndCommandBuffer(command_buffer[0]);
+ }
+ {
+ VkCommandBufferBeginInfo begin_info{};
+ begin_info.sType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_BEGIN_INFO;
+ vkBeginCommandBuffer(command_buffer[1], &begin_info);
+
+ VkViewport viewport{};
+ viewport.maxDepth = 1.0f;
+ viewport.minDepth = 0.0f;
+ viewport.width = 512;
+ viewport.height = 512;
+ viewport.x = 0;
+ viewport.y = 0;
+ vkCmdSetViewport(command_buffer[1], 0, 1, &viewport);
+ vkEndCommandBuffer(command_buffer[1]);
+ }
+ {
+ VkSubmitInfo submit_info{};
+ submit_info.sType = VK_STRUCTURE_TYPE_SUBMIT_INFO;
+ submit_info.commandBufferCount = 1;
+ submit_info.pCommandBuffers = &command_buffer[0];
+ submit_info.signalSemaphoreCount = 1;
+ submit_info.pSignalSemaphores = &semaphore;
+ vkQueueSubmit(m_device->m_queue, 1, &submit_info, VK_NULL_HANDLE);
+ }
+ {
+ VkPipelineStageFlags flags[]{VK_PIPELINE_STAGE_ALL_COMMANDS_BIT};
+ VkSubmitInfo submit_info{};
+ submit_info.sType = VK_STRUCTURE_TYPE_SUBMIT_INFO;
+ submit_info.commandBufferCount = 1;
+ submit_info.pCommandBuffers = &command_buffer[1];
+ submit_info.waitSemaphoreCount = 1;
+ submit_info.pWaitSemaphores = &semaphore;
+ submit_info.pWaitDstStageMask = flags;
+ vkQueueSubmit(m_device->m_queue, 1, &submit_info, fence);
}
- vkCmdBindVertexBuffers(aCommandBuffer, BindId, offsetCount, &VulkanMemoryBuffer.handle(), offsetList);
- BoundCurrent = true;
+ vkWaitForFences(m_device->device(), 1, &fence, VK_TRUE, UINT64_MAX);
- if (!aOffsetCount) {
- delete[] offsetList;
+ vkDestroyFence(m_device->device(), fence, nullptr);
+ vkDestroySemaphore(m_device->device(), semaphore, nullptr);
+ vkFreeCommandBuffers(m_device->device(), command_pool, 2, &command_buffer[0]);
+ vkDestroyCommandPool(m_device->device(), command_pool, NULL);
+
+ m_errorMonitor->VerifyNotFound();
+}
+
+// This is a positive test. No errors should be generated.
+TEST_F(VkPositiveLayerTest, TwoQueueSubmitsOneQueueNullQueueSubmitWithFence) {
+ TEST_DESCRIPTION(
+ "Two command buffers, each in a separate QueueSubmit call on the same queue, no fences, followed by a third QueueSubmit "
+ "with NO SubmitInfos but with a fence, followed by a WaitForFences call.");
+
+ m_errorMonitor->ExpectSuccess();
+
+ ASSERT_NO_FATAL_FAILURE(Init());
+ VkFence fence;
+ VkFenceCreateInfo fence_create_info{};
+ fence_create_info.sType = VK_STRUCTURE_TYPE_FENCE_CREATE_INFO;
+ vkCreateFence(m_device->device(), &fence_create_info, nullptr, &fence);
+
+ VkCommandPool command_pool;
+ VkCommandPoolCreateInfo pool_create_info{};
+ pool_create_info.sType = VK_STRUCTURE_TYPE_COMMAND_POOL_CREATE_INFO;
+ pool_create_info.queueFamilyIndex = m_device->graphics_queue_node_index_;
+ pool_create_info.flags = VK_COMMAND_POOL_CREATE_RESET_COMMAND_BUFFER_BIT;
+ vkCreateCommandPool(m_device->device(), &pool_create_info, nullptr, &command_pool);
+
+ VkCommandBuffer command_buffer[2];
+ VkCommandBufferAllocateInfo command_buffer_allocate_info{};
+ command_buffer_allocate_info.sType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_ALLOCATE_INFO;
+ command_buffer_allocate_info.commandPool = command_pool;
+ command_buffer_allocate_info.commandBufferCount = 2;
+ command_buffer_allocate_info.level = VK_COMMAND_BUFFER_LEVEL_PRIMARY;
+ vkAllocateCommandBuffers(m_device->device(), &command_buffer_allocate_info, command_buffer);
+
+ {
+ VkCommandBufferBeginInfo begin_info{};
+ begin_info.sType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_BEGIN_INFO;
+ vkBeginCommandBuffer(command_buffer[0], &begin_info);
+
+ vkCmdPipelineBarrier(command_buffer[0], VK_PIPELINE_STAGE_ALL_COMMANDS_BIT, VK_PIPELINE_STAGE_ALL_COMMANDS_BIT, 0, 0,
+ nullptr, 0, nullptr, 0, nullptr);
+
+ VkViewport viewport{};
+ viewport.maxDepth = 1.0f;
+ viewport.minDepth = 0.0f;
+ viewport.width = 512;
+ viewport.height = 512;
+ viewport.x = 0;
+ viewport.y = 0;
+ vkCmdSetViewport(command_buffer[0], 0, 1, &viewport);
+ vkEndCommandBuffer(command_buffer[0]);
+ }
+ {
+ VkCommandBufferBeginInfo begin_info{};
+ begin_info.sType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_BEGIN_INFO;
+ vkBeginCommandBuffer(command_buffer[1], &begin_info);
+
+ VkViewport viewport{};
+ viewport.maxDepth = 1.0f;
+ viewport.minDepth = 0.0f;
+ viewport.width = 512;
+ viewport.height = 512;
+ viewport.x = 0;
+ viewport.y = 0;
+ vkCmdSetViewport(command_buffer[1], 0, 1, &viewport);
+ vkEndCommandBuffer(command_buffer[1]);
}
+ {
+ VkSubmitInfo submit_info{};
+ submit_info.sType = VK_STRUCTURE_TYPE_SUBMIT_INFO;
+ submit_info.commandBufferCount = 1;
+ submit_info.pCommandBuffers = &command_buffer[0];
+ submit_info.signalSemaphoreCount = 0;
+ submit_info.pSignalSemaphores = VK_NULL_HANDLE;
+ vkQueueSubmit(m_device->m_queue, 1, &submit_info, VK_NULL_HANDLE);
+ }
+ {
+ VkPipelineStageFlags flags[]{VK_PIPELINE_STAGE_ALL_COMMANDS_BIT};
+ VkSubmitInfo submit_info{};
+ submit_info.sType = VK_STRUCTURE_TYPE_SUBMIT_INFO;
+ submit_info.commandBufferCount = 1;
+ submit_info.pCommandBuffers = &command_buffer[1];
+ submit_info.waitSemaphoreCount = 0;
+ submit_info.pWaitSemaphores = VK_NULL_HANDLE;
+ submit_info.pWaitDstStageMask = flags;
+ vkQueueSubmit(m_device->m_queue, 1, &submit_info, VK_NULL_HANDLE);
+ }
+
+ vkQueueSubmit(m_device->m_queue, 0, NULL, fence);
+
+ VkResult err = vkWaitForFences(m_device->device(), 1, &fence, VK_TRUE, UINT64_MAX);
+ ASSERT_VK_SUCCESS(err);
+
+ vkDestroyFence(m_device->device(), fence, nullptr);
+ vkFreeCommandBuffers(m_device->device(), command_pool, 2, &command_buffer[0]);
+ vkDestroyCommandPool(m_device->device(), command_pool, NULL);
+
+ m_errorMonitor->VerifyNotFound();
}
-OneOffDescriptorSet::OneOffDescriptorSet(VkDeviceObj *device, const Bindings &bindings,
- VkDescriptorSetLayoutCreateFlags layout_flags, void *layout_pnext,
- VkDescriptorPoolCreateFlags poolFlags, void *allocate_pnext)
- : device_{device}, pool_{}, layout_(device, bindings, layout_flags, layout_pnext), set_{} {
- VkResult err;
+// This is a positive test. No errors should be generated.
+TEST_F(VkPositiveLayerTest, TwoQueueSubmitsOneQueueOneFence) {
+ TEST_DESCRIPTION(
+ "Two command buffers, each in a separate QueueSubmit call on the same queue, the second having a fence, followed by a "
+ "WaitForFences call.");
+
+ m_errorMonitor->ExpectSuccess();
+
+ ASSERT_NO_FATAL_FAILURE(Init());
+ VkFence fence;
+ VkFenceCreateInfo fence_create_info{};
+ fence_create_info.sType = VK_STRUCTURE_TYPE_FENCE_CREATE_INFO;
+ vkCreateFence(m_device->device(), &fence_create_info, nullptr, &fence);
+
+ VkCommandPool command_pool;
+ VkCommandPoolCreateInfo pool_create_info{};
+ pool_create_info.sType = VK_STRUCTURE_TYPE_COMMAND_POOL_CREATE_INFO;
+ pool_create_info.queueFamilyIndex = m_device->graphics_queue_node_index_;
+ pool_create_info.flags = VK_COMMAND_POOL_CREATE_RESET_COMMAND_BUFFER_BIT;
+ vkCreateCommandPool(m_device->device(), &pool_create_info, nullptr, &command_pool);
+
+ VkCommandBuffer command_buffer[2];
+ VkCommandBufferAllocateInfo command_buffer_allocate_info{};
+ command_buffer_allocate_info.sType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_ALLOCATE_INFO;
+ command_buffer_allocate_info.commandPool = command_pool;
+ command_buffer_allocate_info.commandBufferCount = 2;
+ command_buffer_allocate_info.level = VK_COMMAND_BUFFER_LEVEL_PRIMARY;
+ vkAllocateCommandBuffers(m_device->device(), &command_buffer_allocate_info, command_buffer);
- std::vector<VkDescriptorPoolSize> sizes;
- for (const auto &b : bindings) sizes.push_back({b.descriptorType, std::max(1u, b.descriptorCount)});
+ {
+ VkCommandBufferBeginInfo begin_info{};
+ begin_info.sType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_BEGIN_INFO;
+ vkBeginCommandBuffer(command_buffer[0], &begin_info);
+
+ vkCmdPipelineBarrier(command_buffer[0], VK_PIPELINE_STAGE_ALL_COMMANDS_BIT, VK_PIPELINE_STAGE_ALL_COMMANDS_BIT, 0, 0,
+ nullptr, 0, nullptr, 0, nullptr);
+
+ VkViewport viewport{};
+ viewport.maxDepth = 1.0f;
+ viewport.minDepth = 0.0f;
+ viewport.width = 512;
+ viewport.height = 512;
+ viewport.x = 0;
+ viewport.y = 0;
+ vkCmdSetViewport(command_buffer[0], 0, 1, &viewport);
+ vkEndCommandBuffer(command_buffer[0]);
+ }
+ {
+ VkCommandBufferBeginInfo begin_info{};
+ begin_info.sType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_BEGIN_INFO;
+ vkBeginCommandBuffer(command_buffer[1], &begin_info);
+
+ VkViewport viewport{};
+ viewport.maxDepth = 1.0f;
+ viewport.minDepth = 0.0f;
+ viewport.width = 512;
+ viewport.height = 512;
+ viewport.x = 0;
+ viewport.y = 0;
+ vkCmdSetViewport(command_buffer[1], 0, 1, &viewport);
+ vkEndCommandBuffer(command_buffer[1]);
+ }
+ {
+ VkSubmitInfo submit_info{};
+ submit_info.sType = VK_STRUCTURE_TYPE_SUBMIT_INFO;
+ submit_info.commandBufferCount = 1;
+ submit_info.pCommandBuffers = &command_buffer[0];
+ submit_info.signalSemaphoreCount = 0;
+ submit_info.pSignalSemaphores = VK_NULL_HANDLE;
+ vkQueueSubmit(m_device->m_queue, 1, &submit_info, VK_NULL_HANDLE);
+ }
+ {
+ VkPipelineStageFlags flags[]{VK_PIPELINE_STAGE_ALL_COMMANDS_BIT};
+ VkSubmitInfo submit_info{};
+ submit_info.sType = VK_STRUCTURE_TYPE_SUBMIT_INFO;
+ submit_info.commandBufferCount = 1;
+ submit_info.pCommandBuffers = &command_buffer[1];
+ submit_info.waitSemaphoreCount = 0;
+ submit_info.pWaitSemaphores = VK_NULL_HANDLE;
+ submit_info.pWaitDstStageMask = flags;
+ vkQueueSubmit(m_device->m_queue, 1, &submit_info, fence);
+ }
- VkDescriptorPoolCreateInfo dspci = {
- VK_STRUCTURE_TYPE_DESCRIPTOR_POOL_CREATE_INFO, nullptr, poolFlags, 1, uint32_t(sizes.size()), sizes.data()};
- err = vkCreateDescriptorPool(device_->handle(), &dspci, nullptr, &pool_);
- if (err != VK_SUCCESS) return;
+ vkWaitForFences(m_device->device(), 1, &fence, VK_TRUE, UINT64_MAX);
- VkDescriptorSetAllocateInfo alloc_info = {VK_STRUCTURE_TYPE_DESCRIPTOR_SET_ALLOCATE_INFO, allocate_pnext, pool_, 1,
- &layout_.handle()};
- err = vkAllocateDescriptorSets(device_->handle(), &alloc_info, &set_);
+ vkDestroyFence(m_device->device(), fence, nullptr);
+ vkFreeCommandBuffers(m_device->device(), command_pool, 2, &command_buffer[0]);
+ vkDestroyCommandPool(m_device->device(), command_pool, NULL);
+
+ m_errorMonitor->VerifyNotFound();
}
-OneOffDescriptorSet::~OneOffDescriptorSet() {
- // No need to destroy set-- it's going away with the pool.
- vkDestroyDescriptorPool(device_->handle(), pool_, nullptr);
+// This is a positive test. No errors should be generated.
+TEST_F(VkPositiveLayerTest, TwoSubmitInfosWithSemaphoreOneQueueSubmitsOneFence) {
+ TEST_DESCRIPTION(
+ "Two command buffers each in a separate SubmitInfo sent in a single QueueSubmit call followed by a WaitForFences call.");
+ ASSERT_NO_FATAL_FAILURE(Init());
+
+ m_errorMonitor->ExpectSuccess();
+
+ VkFence fence;
+ VkFenceCreateInfo fence_create_info{};
+ fence_create_info.sType = VK_STRUCTURE_TYPE_FENCE_CREATE_INFO;
+ vkCreateFence(m_device->device(), &fence_create_info, nullptr, &fence);
+
+ VkSemaphore semaphore;
+ VkSemaphoreCreateInfo semaphore_create_info{};
+ semaphore_create_info.sType = VK_STRUCTURE_TYPE_SEMAPHORE_CREATE_INFO;
+ vkCreateSemaphore(m_device->device(), &semaphore_create_info, nullptr, &semaphore);
+
+ VkCommandPool command_pool;
+ VkCommandPoolCreateInfo pool_create_info{};
+ pool_create_info.sType = VK_STRUCTURE_TYPE_COMMAND_POOL_CREATE_INFO;
+ pool_create_info.queueFamilyIndex = m_device->graphics_queue_node_index_;
+ pool_create_info.flags = VK_COMMAND_POOL_CREATE_RESET_COMMAND_BUFFER_BIT;
+ vkCreateCommandPool(m_device->device(), &pool_create_info, nullptr, &command_pool);
+
+ VkCommandBuffer command_buffer[2];
+ VkCommandBufferAllocateInfo command_buffer_allocate_info{};
+ command_buffer_allocate_info.sType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_ALLOCATE_INFO;
+ command_buffer_allocate_info.commandPool = command_pool;
+ command_buffer_allocate_info.commandBufferCount = 2;
+ command_buffer_allocate_info.level = VK_COMMAND_BUFFER_LEVEL_PRIMARY;
+ vkAllocateCommandBuffers(m_device->device(), &command_buffer_allocate_info, command_buffer);
+
+ {
+ VkCommandBufferBeginInfo begin_info{};
+ begin_info.sType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_BEGIN_INFO;
+ vkBeginCommandBuffer(command_buffer[0], &begin_info);
+
+ vkCmdPipelineBarrier(command_buffer[0], VK_PIPELINE_STAGE_ALL_COMMANDS_BIT, VK_PIPELINE_STAGE_ALL_COMMANDS_BIT, 0, 0,
+ nullptr, 0, nullptr, 0, nullptr);
+
+ VkViewport viewport{};
+ viewport.maxDepth = 1.0f;
+ viewport.minDepth = 0.0f;
+ viewport.width = 512;
+ viewport.height = 512;
+ viewport.x = 0;
+ viewport.y = 0;
+ vkCmdSetViewport(command_buffer[0], 0, 1, &viewport);
+ vkEndCommandBuffer(command_buffer[0]);
+ }
+ {
+ VkCommandBufferBeginInfo begin_info{};
+ begin_info.sType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_BEGIN_INFO;
+ vkBeginCommandBuffer(command_buffer[1], &begin_info);
+
+ VkViewport viewport{};
+ viewport.maxDepth = 1.0f;
+ viewport.minDepth = 0.0f;
+ viewport.width = 512;
+ viewport.height = 512;
+ viewport.x = 0;
+ viewport.y = 0;
+ vkCmdSetViewport(command_buffer[1], 0, 1, &viewport);
+ vkEndCommandBuffer(command_buffer[1]);
+ }
+ {
+ VkSubmitInfo submit_info[2];
+ VkPipelineStageFlags flags[]{VK_PIPELINE_STAGE_ALL_COMMANDS_BIT};
+
+ submit_info[0].sType = VK_STRUCTURE_TYPE_SUBMIT_INFO;
+ submit_info[0].pNext = NULL;
+ submit_info[0].commandBufferCount = 1;
+ submit_info[0].pCommandBuffers = &command_buffer[0];
+ submit_info[0].signalSemaphoreCount = 1;
+ submit_info[0].pSignalSemaphores = &semaphore;
+ submit_info[0].waitSemaphoreCount = 0;
+ submit_info[0].pWaitSemaphores = NULL;
+ submit_info[0].pWaitDstStageMask = 0;
+
+ submit_info[1].sType = VK_STRUCTURE_TYPE_SUBMIT_INFO;
+ submit_info[1].pNext = NULL;
+ submit_info[1].commandBufferCount = 1;
+ submit_info[1].pCommandBuffers = &command_buffer[1];
+ submit_info[1].waitSemaphoreCount = 1;
+ submit_info[1].pWaitSemaphores = &semaphore;
+ submit_info[1].pWaitDstStageMask = flags;
+ submit_info[1].signalSemaphoreCount = 0;
+ submit_info[1].pSignalSemaphores = NULL;
+ vkQueueSubmit(m_device->m_queue, 2, &submit_info[0], fence);
+ }
+
+ vkWaitForFences(m_device->device(), 1, &fence, VK_TRUE, UINT64_MAX);
+
+ vkDestroyFence(m_device->device(), fence, nullptr);
+ vkFreeCommandBuffers(m_device->device(), command_pool, 2, &command_buffer[0]);
+ vkDestroyCommandPool(m_device->device(), command_pool, NULL);
+ vkDestroySemaphore(m_device->device(), semaphore, nullptr);
+
+ m_errorMonitor->VerifyNotFound();
}
-bool OneOffDescriptorSet::Initialized() { return pool_ != VK_NULL_HANDLE && layout_.initialized() && set_ != VK_NULL_HANDLE; }
+TEST_F(VkPositiveLayerTest, CreatePipelineAttribMatrixType) {
+ TEST_DESCRIPTION("Test that pipeline validation accepts matrices passed as vertex attributes");
+ m_errorMonitor->ExpectSuccess();
-void OneOffDescriptorSet::WriteDescriptorBufferInfo(int blinding, VkBuffer buffer, VkDeviceSize size,
- VkDescriptorType descriptorType) {
- VkDescriptorBufferInfo buffer_info = {};
- buffer_info.buffer = buffer;
- buffer_info.offset = 0;
- buffer_info.range = size;
- buffer_infos.emplace_back(buffer_info);
- size_t index = buffer_infos.size() - 1;
+ ASSERT_NO_FATAL_FAILURE(Init());
+ ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
- VkWriteDescriptorSet descriptor_write;
- memset(&descriptor_write, 0, sizeof(descriptor_write));
- descriptor_write.sType = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET;
- descriptor_write.dstSet = set_;
- descriptor_write.dstBinding = blinding;
- descriptor_write.descriptorCount = 1;
- descriptor_write.descriptorType = descriptorType;
- descriptor_write.pBufferInfo = &buffer_infos[index];
- descriptor_write.pImageInfo = nullptr;
- descriptor_write.pTexelBufferView = nullptr;
+ VkVertexInputBindingDescription input_binding;
+ memset(&input_binding, 0, sizeof(input_binding));
+
+ VkVertexInputAttributeDescription input_attribs[2];
+ memset(input_attribs, 0, sizeof(input_attribs));
- descriptor_writes.emplace_back(descriptor_write);
+ for (int i = 0; i < 2; i++) {
+ input_attribs[i].format = VK_FORMAT_R32G32B32A32_SFLOAT;
+ input_attribs[i].location = i;
+ }
+
+ char const *vsSource =
+ "#version 450\n"
+ "\n"
+ "layout(location=0) in mat2x4 x;\n"
+ "void main(){\n"
+ " gl_Position = x[0] + x[1];\n"
+ "}\n";
+ char const *fsSource =
+ "#version 450\n"
+ "\n"
+ "layout(location=0) out vec4 color;\n"
+ "void main(){\n"
+ " color = vec4(1);\n"
+ "}\n";
+
+ VkShaderObj vs(m_device, vsSource, VK_SHADER_STAGE_VERTEX_BIT, this);
+ VkShaderObj fs(m_device, fsSource, VK_SHADER_STAGE_FRAGMENT_BIT, this);
+
+ VkPipelineObj pipe(m_device);
+ pipe.AddDefaultColorAttachment();
+ pipe.AddShader(&vs);
+ pipe.AddShader(&fs);
+
+ pipe.AddVertexInputBindings(&input_binding, 1);
+ pipe.AddVertexInputAttribs(input_attribs, 2);
+
+ VkDescriptorSetObj descriptorSet(m_device);
+ descriptorSet.AppendDummy();
+ descriptorSet.CreateVKDescriptorSet(m_commandBuffer);
+
+ pipe.CreateVKPipeline(descriptorSet.GetPipelineLayout(), renderPass());
+
+ /* expect success */
+ m_errorMonitor->VerifyNotFound();
}
-void OneOffDescriptorSet::WriteDescriptorBufferView(int blinding, VkBufferView &buffer_view, VkDescriptorType descriptorType) {
- VkWriteDescriptorSet descriptor_write;
- memset(&descriptor_write, 0, sizeof(descriptor_write));
- descriptor_write.sType = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET;
- descriptor_write.dstSet = set_;
- descriptor_write.dstBinding = blinding;
- descriptor_write.descriptorCount = 1;
- descriptor_write.descriptorType = descriptorType;
- descriptor_write.pTexelBufferView = &buffer_view;
- descriptor_write.pImageInfo = nullptr;
- descriptor_write.pBufferInfo = nullptr;
+TEST_F(VkPositiveLayerTest, CreatePipelineAttribArrayType) {
+ m_errorMonitor->ExpectSuccess();
+
+ ASSERT_NO_FATAL_FAILURE(Init());
+ ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
+
+ VkVertexInputBindingDescription input_binding;
+ memset(&input_binding, 0, sizeof(input_binding));
+
+ VkVertexInputAttributeDescription input_attribs[2];
+ memset(input_attribs, 0, sizeof(input_attribs));
+
+ for (int i = 0; i < 2; i++) {
+ input_attribs[i].format = VK_FORMAT_R32G32B32A32_SFLOAT;
+ input_attribs[i].location = i;
+ }
+
+ char const *vsSource =
+ "#version 450\n"
+ "\n"
+ "layout(location=0) in vec4 x[2];\n"
+ "void main(){\n"
+ " gl_Position = x[0] + x[1];\n"
+ "}\n";
+ char const *fsSource =
+ "#version 450\n"
+ "\n"
+ "layout(location=0) out vec4 color;\n"
+ "void main(){\n"
+ " color = vec4(1);\n"
+ "}\n";
+
+ VkShaderObj vs(m_device, vsSource, VK_SHADER_STAGE_VERTEX_BIT, this);
+ VkShaderObj fs(m_device, fsSource, VK_SHADER_STAGE_FRAGMENT_BIT, this);
+
+ VkPipelineObj pipe(m_device);
+ pipe.AddDefaultColorAttachment();
+ pipe.AddShader(&vs);
+ pipe.AddShader(&fs);
+
+ pipe.AddVertexInputBindings(&input_binding, 1);
+ pipe.AddVertexInputAttribs(input_attribs, 2);
+
+ VkDescriptorSetObj descriptorSet(m_device);
+ descriptorSet.AppendDummy();
+ descriptorSet.CreateVKDescriptorSet(m_commandBuffer);
+
+ pipe.CreateVKPipeline(descriptorSet.GetPipelineLayout(), renderPass());
- descriptor_writes.emplace_back(descriptor_write);
+ m_errorMonitor->VerifyNotFound();
}
-void OneOffDescriptorSet::WriteDescriptorImageInfo(int blinding, VkImageView image_view, VkSampler sampler,
- VkDescriptorType descriptorType) {
- VkDescriptorImageInfo image_info = {};
- image_info.imageView = image_view;
- image_info.sampler = sampler;
- image_info.imageLayout = VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL;
- image_infos.emplace_back(image_info);
- size_t index = image_infos.size() - 1;
+TEST_F(VkPositiveLayerTest, CreatePipelineAttribComponents) {
+ TEST_DESCRIPTION(
+ "Test that pipeline validation accepts consuming a vertex attribute through multiple vertex shader inputs, each consuming "
+ "a different subset of the components, and that fragment shader-attachment validation tolerates multiple duplicate "
+ "location outputs");
+ m_errorMonitor->ExpectSuccess(VK_DEBUG_REPORT_ERROR_BIT_EXT | VK_DEBUG_REPORT_WARNING_BIT_EXT);
- VkWriteDescriptorSet descriptor_write;
- memset(&descriptor_write, 0, sizeof(descriptor_write));
- descriptor_write.sType = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET;
- descriptor_write.dstSet = set_;
- descriptor_write.dstBinding = blinding;
- descriptor_write.descriptorCount = 1;
- descriptor_write.descriptorType = descriptorType;
- descriptor_write.pImageInfo = &image_infos[index];
- descriptor_write.pBufferInfo = nullptr;
- descriptor_write.pTexelBufferView = nullptr;
+ ASSERT_NO_FATAL_FAILURE(Init());
+ ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
+
+ VkVertexInputBindingDescription input_binding;
+ memset(&input_binding, 0, sizeof(input_binding));
+
+ VkVertexInputAttributeDescription input_attribs[3];
+ memset(input_attribs, 0, sizeof(input_attribs));
+
+ for (int i = 0; i < 3; i++) {
+ input_attribs[i].format = VK_FORMAT_R32G32B32A32_SFLOAT;
+ input_attribs[i].location = i;
+ }
+
+ char const *vsSource =
+ "#version 450\n"
+ "\n"
+ "layout(location=0) in vec4 x;\n"
+ "layout(location=1) in vec3 y1;\n"
+ "layout(location=1, component=3) in float y2;\n"
+ "layout(location=2) in vec4 z;\n"
+ "void main(){\n"
+ " gl_Position = x + vec4(y1, y2) + z;\n"
+ "}\n";
+ char const *fsSource =
+ "#version 450\n"
+ "\n"
+ "layout(location=0, component=0) out float color0;\n"
+ "layout(location=0, component=1) out float color1;\n"
+ "layout(location=0, component=2) out float color2;\n"
+ "layout(location=0, component=3) out float color3;\n"
+ "layout(location=1, component=0) out vec2 second_color0;\n"
+ "layout(location=1, component=2) out vec2 second_color1;\n"
+ "void main(){\n"
+ " color0 = float(1);\n"
+ " second_color0 = vec2(1);\n"
+ "}\n";
+
+ VkShaderObj vs(m_device, vsSource, VK_SHADER_STAGE_VERTEX_BIT, this);
+ VkShaderObj fs(m_device, fsSource, VK_SHADER_STAGE_FRAGMENT_BIT, this);
+
+ VkPipelineObj pipe(m_device);
- descriptor_writes.emplace_back(descriptor_write);
+ VkDescriptorSetObj descriptorSet(m_device);
+ descriptorSet.AppendDummy();
+ descriptorSet.CreateVKDescriptorSet(m_commandBuffer);
+
+ // Create a renderPass with two color attachments
+ VkAttachmentReference attachments[2] = {};
+ attachments[0].layout = VK_IMAGE_LAYOUT_GENERAL;
+ attachments[1].attachment = 1;
+ attachments[1].layout = VK_IMAGE_LAYOUT_GENERAL;
+
+ VkSubpassDescription subpass = {};
+ subpass.pColorAttachments = attachments;
+ subpass.colorAttachmentCount = 2;
+
+ VkRenderPassCreateInfo rpci = {};
+ rpci.subpassCount = 1;
+ rpci.pSubpasses = &subpass;
+ rpci.attachmentCount = 2;
+
+ VkAttachmentDescription attach_desc[2] = {};
+ attach_desc[0].format = VK_FORMAT_B8G8R8A8_UNORM;
+ attach_desc[0].samples = VK_SAMPLE_COUNT_1_BIT;
+ attach_desc[0].initialLayout = VK_IMAGE_LAYOUT_UNDEFINED;
+ attach_desc[0].finalLayout = VK_IMAGE_LAYOUT_GENERAL;
+ attach_desc[0].loadOp = VK_ATTACHMENT_LOAD_OP_DONT_CARE;
+ attach_desc[1].format = VK_FORMAT_B8G8R8A8_UNORM;
+ attach_desc[1].samples = VK_SAMPLE_COUNT_1_BIT;
+ attach_desc[1].initialLayout = VK_IMAGE_LAYOUT_UNDEFINED;
+ attach_desc[1].finalLayout = VK_IMAGE_LAYOUT_GENERAL;
+ attach_desc[1].loadOp = VK_ATTACHMENT_LOAD_OP_DONT_CARE;
+
+ rpci.pAttachments = attach_desc;
+ rpci.sType = VK_STRUCTURE_TYPE_RENDER_PASS_CREATE_INFO;
+
+ VkRenderPass renderpass;
+ vkCreateRenderPass(m_device->device(), &rpci, NULL, &renderpass);
+ pipe.AddShader(&vs);
+ pipe.AddShader(&fs);
+
+ VkPipelineColorBlendAttachmentState att_state1 = {};
+ att_state1.dstAlphaBlendFactor = VK_BLEND_FACTOR_CONSTANT_COLOR;
+ att_state1.blendEnable = VK_FALSE;
+
+ pipe.AddColorAttachment(0, att_state1);
+ pipe.AddColorAttachment(1, att_state1);
+ pipe.AddVertexInputBindings(&input_binding, 1);
+ pipe.AddVertexInputAttribs(input_attribs, 3);
+ pipe.CreateVKPipeline(descriptorSet.GetPipelineLayout(), renderpass);
+ vkDestroyRenderPass(m_device->device(), renderpass, nullptr);
+
+ m_errorMonitor->VerifyNotFound();
}
-void OneOffDescriptorSet::UpdateDescriptorSets() {
- vkUpdateDescriptorSets(device_->handle(), descriptor_writes.size(), descriptor_writes.data(), 0, NULL);
+TEST_F(VkPositiveLayerTest, CreatePipelineSimplePositive) {
+ m_errorMonitor->ExpectSuccess();
+
+ ASSERT_NO_FATAL_FAILURE(Init());
+ ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
+
+ char const *vsSource =
+ "#version 450\n"
+ "void main(){\n"
+ " gl_Position = vec4(0);\n"
+ "}\n";
+ char const *fsSource =
+ "#version 450\n"
+ "\n"
+ "layout(location=0) out vec4 color;\n"
+ "void main(){\n"
+ " color = vec4(1);\n"
+ "}\n";
+
+ VkShaderObj vs(m_device, vsSource, VK_SHADER_STAGE_VERTEX_BIT, this);
+ VkShaderObj fs(m_device, fsSource, VK_SHADER_STAGE_FRAGMENT_BIT, this);
+
+ VkPipelineObj pipe(m_device);
+ pipe.AddDefaultColorAttachment();
+ pipe.AddShader(&vs);
+ pipe.AddShader(&fs);
+
+ VkDescriptorSetObj descriptorSet(m_device);
+ descriptorSet.AppendDummy();
+ descriptorSet.CreateVKDescriptorSet(m_commandBuffer);
+
+ pipe.CreateVKPipeline(descriptorSet.GetPipelineLayout(), renderPass());
+
+ m_errorMonitor->VerifyNotFound();
}
-CreatePipelineHelper::CreatePipelineHelper(VkLayerTest &test) : layer_test_(test) {}
+TEST_F(VkPositiveLayerTest, CreatePipelineRelaxedTypeMatch) {
+ TEST_DESCRIPTION(
+ "Test that pipeline validation accepts the relaxed type matching rules set out in 14.1.3: fundamental type must match, and "
+ "producer side must have at least as many components");
+ m_errorMonitor->ExpectSuccess();
+
+ // VK 1.0.8 Specification, 14.1.3 "Additionally,..." block
+
+ ASSERT_NO_FATAL_FAILURE(Init());
+ ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
+
+ char const *vsSource =
+ "#version 450\n"
+ "layout(location=0) out vec3 x;\n"
+ "layout(location=1) out ivec3 y;\n"
+ "layout(location=2) out vec3 z;\n"
+ "void main(){\n"
+ " gl_Position = vec4(0);\n"
+ " x = vec3(0); y = ivec3(0); z = vec3(0);\n"
+ "}\n";
+ char const *fsSource =
+ "#version 450\n"
+ "\n"
+ "layout(location=0) out vec4 color;\n"
+ "layout(location=0) in float x;\n"
+ "layout(location=1) flat in int y;\n"
+ "layout(location=2) in vec2 z;\n"
+ "void main(){\n"
+ " color = vec4(1 + x + y + z.x);\n"
+ "}\n";
+
+ VkShaderObj vs(m_device, vsSource, VK_SHADER_STAGE_VERTEX_BIT, this);
+ VkShaderObj fs(m_device, fsSource, VK_SHADER_STAGE_FRAGMENT_BIT, this);
+
+ VkPipelineObj pipe(m_device);
+ pipe.AddDefaultColorAttachment();
+ pipe.AddShader(&vs);
+ pipe.AddShader(&fs);
+
+ VkDescriptorSetObj descriptorSet(m_device);
+ descriptorSet.AppendDummy();
+ descriptorSet.CreateVKDescriptorSet(m_commandBuffer);
+
+ VkResult err = VK_SUCCESS;
+ err = pipe.CreateVKPipeline(descriptorSet.GetPipelineLayout(), renderPass());
+ ASSERT_VK_SUCCESS(err);
-CreatePipelineHelper::~CreatePipelineHelper() {
- VkDevice device = layer_test_.device();
- vkDestroyPipelineCache(device, pipeline_cache_, nullptr);
- vkDestroyPipeline(device, pipeline_, nullptr);
+ m_errorMonitor->VerifyNotFound();
}
-void CreatePipelineHelper::InitDescriptorSetInfo() {
- dsl_bindings_ = {{0, VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER, 1, VK_SHADER_STAGE_ALL, nullptr}};
+TEST_F(VkPositiveLayerTest, CreatePipelineTessPerVertex) {
+ TEST_DESCRIPTION("Test that pipeline validation accepts per-vertex variables passed between the TCS and TES stages");
+ m_errorMonitor->ExpectSuccess();
+
+ ASSERT_NO_FATAL_FAILURE(Init());
+ ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
+
+ if (!m_device->phy().features().tessellationShader) {
+ printf("%s Device does not support tessellation shaders; skipped.\n", kSkipPrefix);
+ return;
+ }
+
+ char const *vsSource =
+ "#version 450\n"
+ "void main(){}\n";
+ char const *tcsSource =
+ "#version 450\n"
+ "layout(location=0) out int x[];\n"
+ "layout(vertices=3) out;\n"
+ "void main(){\n"
+ " gl_TessLevelOuter[0] = gl_TessLevelOuter[1] = gl_TessLevelOuter[2] = 1;\n"
+ " gl_TessLevelInner[0] = 1;\n"
+ " x[gl_InvocationID] = gl_InvocationID;\n"
+ "}\n";
+ char const *tesSource =
+ "#version 450\n"
+ "layout(triangles, equal_spacing, cw) in;\n"
+ "layout(location=0) in int x[];\n"
+ "void main(){\n"
+ " gl_Position.xyz = gl_TessCoord;\n"
+ " gl_Position.w = x[0] + x[1] + x[2];\n"
+ "}\n";
+ char const *fsSource =
+ "#version 450\n"
+ "layout(location=0) out vec4 color;\n"
+ "void main(){\n"
+ " color = vec4(1);\n"
+ "}\n";
+
+ VkShaderObj vs(m_device, vsSource, VK_SHADER_STAGE_VERTEX_BIT, this);
+ VkShaderObj tcs(m_device, tcsSource, VK_SHADER_STAGE_TESSELLATION_CONTROL_BIT, this);
+ VkShaderObj tes(m_device, tesSource, VK_SHADER_STAGE_TESSELLATION_EVALUATION_BIT, this);
+ VkShaderObj fs(m_device, fsSource, VK_SHADER_STAGE_FRAGMENT_BIT, this);
+
+ VkPipelineInputAssemblyStateCreateInfo iasci{VK_STRUCTURE_TYPE_PIPELINE_INPUT_ASSEMBLY_STATE_CREATE_INFO, nullptr, 0,
+ VK_PRIMITIVE_TOPOLOGY_PATCH_LIST, VK_FALSE};
+
+ VkPipelineTessellationStateCreateInfo tsci{VK_STRUCTURE_TYPE_PIPELINE_TESSELLATION_STATE_CREATE_INFO, nullptr, 0, 3};
+
+ VkPipelineObj pipe(m_device);
+ pipe.SetInputAssembly(&iasci);
+ pipe.SetTessellation(&tsci);
+ pipe.AddDefaultColorAttachment();
+ pipe.AddShader(&vs);
+ pipe.AddShader(&tcs);
+ pipe.AddShader(&tes);
+ pipe.AddShader(&fs);
+
+ VkDescriptorSetObj descriptorSet(m_device);
+ descriptorSet.AppendDummy();
+ descriptorSet.CreateVKDescriptorSet(m_commandBuffer);
+
+ pipe.CreateVKPipeline(descriptorSet.GetPipelineLayout(), renderPass());
+
+ m_errorMonitor->VerifyNotFound();
}
-void CreatePipelineHelper::InitInputAndVertexInfo() {
- vi_ci_.sType = VK_STRUCTURE_TYPE_PIPELINE_VERTEX_INPUT_STATE_CREATE_INFO;
+TEST_F(VkPositiveLayerTest, CreatePipelineGeometryInputBlockPositive) {
+ TEST_DESCRIPTION(
+ "Test that pipeline validation accepts a user-defined interface block passed into the geometry shader. This is interesting "
+ "because the 'extra' array level is not present on the member type, but on the block instance.");
+ m_errorMonitor->ExpectSuccess();
+
+ ASSERT_NO_FATAL_FAILURE(Init());
+ ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
+
+ if (!m_device->phy().features().geometryShader) {
+ printf("%s Device does not support geometry shaders; skipped.\n", kSkipPrefix);
+ return;
+ }
+
+ char const *vsSource =
+ "#version 450\n"
+ "layout(location=0) out VertexData { vec4 x; } vs_out;\n"
+ "void main(){\n"
+ " vs_out.x = vec4(1);\n"
+ "}\n";
+ char const *gsSource =
+ "#version 450\n"
+ "layout(triangles) in;\n"
+ "layout(triangle_strip, max_vertices=3) out;\n"
+ "layout(location=0) in VertexData { vec4 x; } gs_in[];\n"
+ "void main() {\n"
+ " gl_Position = gs_in[0].x;\n"
+ " EmitVertex();\n"
+ "}\n";
+ char const *fsSource =
+ "#version 450\n"
+ "layout(location=0) out vec4 color;\n"
+ "void main(){\n"
+ " color = vec4(1);\n"
+ "}\n";
+
+ VkShaderObj vs(m_device, vsSource, VK_SHADER_STAGE_VERTEX_BIT, this);
+ VkShaderObj gs(m_device, gsSource, VK_SHADER_STAGE_GEOMETRY_BIT, this);
+ VkShaderObj fs(m_device, fsSource, VK_SHADER_STAGE_FRAGMENT_BIT, this);
+
+ VkPipelineObj pipe(m_device);
+ pipe.AddDefaultColorAttachment();
+ pipe.AddShader(&vs);
+ pipe.AddShader(&gs);
+ pipe.AddShader(&fs);
- ia_ci_.sType = VK_STRUCTURE_TYPE_PIPELINE_INPUT_ASSEMBLY_STATE_CREATE_INFO;
- ia_ci_.topology = VK_PRIMITIVE_TOPOLOGY_TRIANGLE_STRIP;
+ VkDescriptorSetObj descriptorSet(m_device);
+ descriptorSet.AppendDummy();
+ descriptorSet.CreateVKDescriptorSet(m_commandBuffer);
+
+ pipe.CreateVKPipeline(descriptorSet.GetPipelineLayout(), renderPass());
+
+ m_errorMonitor->VerifyNotFound();
}
-void CreatePipelineHelper::InitMultisampleInfo() {
- pipe_ms_state_ci_.sType = VK_STRUCTURE_TYPE_PIPELINE_MULTISAMPLE_STATE_CREATE_INFO;
- pipe_ms_state_ci_.pNext = nullptr;
- pipe_ms_state_ci_.rasterizationSamples = VK_SAMPLE_COUNT_1_BIT;
- pipe_ms_state_ci_.sampleShadingEnable = VK_FALSE;
- pipe_ms_state_ci_.minSampleShading = 1.0;
- pipe_ms_state_ci_.pSampleMask = NULL;
+TEST_F(VkPositiveLayerTest, CreatePipeline64BitAttributesPositive) {
+ TEST_DESCRIPTION(
+ "Test that pipeline validation accepts basic use of 64bit vertex attributes. This is interesting because they consume "
+ "multiple locations.");
+ m_errorMonitor->ExpectSuccess();
+
+ if (!EnableDeviceProfileLayer()) {
+ printf("%s Failed to enable device profile layer.\n", kSkipPrefix);
+ return;
+ }
+
+ ASSERT_NO_FATAL_FAILURE(InitFramework(myDbgFunc, m_errorMonitor));
+ ASSERT_NO_FATAL_FAILURE(InitState());
+ ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
+
+ if (!m_device->phy().features().shaderFloat64) {
+ printf("%s Device does not support 64bit vertex attributes; skipped.\n", kSkipPrefix);
+ return;
+ }
+ // Set 64bit format to support VTX Buffer feature
+ PFN_vkSetPhysicalDeviceFormatPropertiesEXT fpvkSetPhysicalDeviceFormatPropertiesEXT = nullptr;
+ PFN_vkGetOriginalPhysicalDeviceFormatPropertiesEXT fpvkGetOriginalPhysicalDeviceFormatPropertiesEXT = nullptr;
+
+ // Load required functions
+ if (!LoadDeviceProfileLayer(fpvkSetPhysicalDeviceFormatPropertiesEXT, fpvkGetOriginalPhysicalDeviceFormatPropertiesEXT)) {
+ return;
+ }
+ VkFormatProperties format_props;
+ fpvkGetOriginalPhysicalDeviceFormatPropertiesEXT(gpu(), VK_FORMAT_R64G64B64A64_SFLOAT, &format_props);
+ format_props.bufferFeatures |= VK_FORMAT_FEATURE_VERTEX_BUFFER_BIT;
+ fpvkSetPhysicalDeviceFormatPropertiesEXT(gpu(), VK_FORMAT_R64G64B64A64_SFLOAT, format_props);
+
+ VkVertexInputBindingDescription input_bindings[1];
+ memset(input_bindings, 0, sizeof(input_bindings));
+
+ VkVertexInputAttributeDescription input_attribs[4];
+ memset(input_attribs, 0, sizeof(input_attribs));
+ input_attribs[0].location = 0;
+ input_attribs[0].offset = 0;
+ input_attribs[0].format = VK_FORMAT_R64G64B64A64_SFLOAT;
+ input_attribs[1].location = 2;
+ input_attribs[1].offset = 32;
+ input_attribs[1].format = VK_FORMAT_R64G64B64A64_SFLOAT;
+ input_attribs[2].location = 4;
+ input_attribs[2].offset = 64;
+ input_attribs[2].format = VK_FORMAT_R64G64B64A64_SFLOAT;
+ input_attribs[3].location = 6;
+ input_attribs[3].offset = 96;
+ input_attribs[3].format = VK_FORMAT_R64G64B64A64_SFLOAT;
+
+ char const *vsSource =
+ "#version 450\n"
+ "\n"
+ "layout(location=0) in dmat4 x;\n"
+ "void main(){\n"
+ " gl_Position = vec4(x[0][0]);\n"
+ "}\n";
+ char const *fsSource =
+ "#version 450\n"
+ "\n"
+ "layout(location=0) out vec4 color;\n"
+ "void main(){\n"
+ " color = vec4(1);\n"
+ "}\n";
+
+ VkShaderObj vs(m_device, vsSource, VK_SHADER_STAGE_VERTEX_BIT, this);
+ VkShaderObj fs(m_device, fsSource, VK_SHADER_STAGE_FRAGMENT_BIT, this);
+
+ VkPipelineObj pipe(m_device);
+ pipe.AddDefaultColorAttachment();
+ pipe.AddShader(&vs);
+ pipe.AddShader(&fs);
+
+ pipe.AddVertexInputBindings(input_bindings, 1);
+ pipe.AddVertexInputAttribs(input_attribs, 4);
+
+ VkDescriptorSetObj descriptorSet(m_device);
+ descriptorSet.AppendDummy();
+ descriptorSet.CreateVKDescriptorSet(m_commandBuffer);
+
+ pipe.CreateVKPipeline(descriptorSet.GetPipelineLayout(), renderPass());
+
+ m_errorMonitor->VerifyNotFound();
}
-void CreatePipelineHelper::InitPipelineLayoutInfo() {
- pipeline_layout_ci_.sType = VK_STRUCTURE_TYPE_PIPELINE_LAYOUT_CREATE_INFO;
- pipeline_layout_ci_.setLayoutCount = 1; // Not really changeable because InitState() sets exactly one pSetLayout
- pipeline_layout_ci_.pSetLayouts = nullptr; // must bound after it is created
+TEST_F(VkPositiveLayerTest, CreatePipelineInputAttachmentPositive) {
+ TEST_DESCRIPTION("Positive test for a correctly matched input attachment");
+ m_errorMonitor->ExpectSuccess();
+
+ ASSERT_NO_FATAL_FAILURE(Init());
+
+ char const *vsSource =
+ "#version 450\n"
+ "\n"
+ "void main(){\n"
+ " gl_Position = vec4(1);\n"
+ "}\n";
+ char const *fsSource =
+ "#version 450\n"
+ "\n"
+ "layout(input_attachment_index=0, set=0, binding=0) uniform subpassInput x;\n"
+ "layout(location=0) out vec4 color;\n"
+ "void main() {\n"
+ " color = subpassLoad(x);\n"
+ "}\n";
+
+ VkShaderObj vs(m_device, vsSource, VK_SHADER_STAGE_VERTEX_BIT, this);
+ VkShaderObj fs(m_device, fsSource, VK_SHADER_STAGE_FRAGMENT_BIT, this);
+
+ VkPipelineObj pipe(m_device);
+ pipe.AddShader(&vs);
+ pipe.AddShader(&fs);
+ pipe.AddDefaultColorAttachment();
+ ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
+
+ VkDescriptorSetLayoutBinding dslb = {0, VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT, 1, VK_SHADER_STAGE_FRAGMENT_BIT, nullptr};
+ const VkDescriptorSetLayoutObj dsl(m_device, {dslb});
+ const VkPipelineLayoutObj pl(m_device, {&dsl});
+
+ VkAttachmentDescription descs[2] = {
+ {0, VK_FORMAT_R8G8B8A8_UNORM, VK_SAMPLE_COUNT_1_BIT, VK_ATTACHMENT_LOAD_OP_LOAD, VK_ATTACHMENT_STORE_OP_STORE,
+ VK_ATTACHMENT_LOAD_OP_LOAD, VK_ATTACHMENT_STORE_OP_STORE, VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL,
+ VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL},
+ {0, VK_FORMAT_R8G8B8A8_UNORM, VK_SAMPLE_COUNT_1_BIT, VK_ATTACHMENT_LOAD_OP_LOAD, VK_ATTACHMENT_STORE_OP_STORE,
+ VK_ATTACHMENT_LOAD_OP_LOAD, VK_ATTACHMENT_STORE_OP_STORE, VK_IMAGE_LAYOUT_GENERAL, VK_IMAGE_LAYOUT_GENERAL},
+ };
+ VkAttachmentReference color = {
+ 0,
+ VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL,
+ };
+ VkAttachmentReference input = {
+ 1,
+ VK_IMAGE_LAYOUT_GENERAL,
+ };
+
+ VkSubpassDescription sd = {0, VK_PIPELINE_BIND_POINT_GRAPHICS, 1, &input, 1, &color, nullptr, nullptr, 0, nullptr};
+
+ VkRenderPassCreateInfo rpci = {VK_STRUCTURE_TYPE_RENDER_PASS_CREATE_INFO, nullptr, 0, 2, descs, 1, &sd, 0, nullptr};
+ VkRenderPass rp;
+ VkResult err = vkCreateRenderPass(m_device->device(), &rpci, nullptr, &rp);
+ ASSERT_VK_SUCCESS(err);
+
+ // should be OK. would go wrong here if it's going to...
+ pipe.CreateVKPipeline(pl.handle(), rp);
+
+ m_errorMonitor->VerifyNotFound();
+
+ vkDestroyRenderPass(m_device->device(), rp, nullptr);
}
-void CreatePipelineHelper::InitViewportInfo() {
- viewport_ = {0.0f, 0.0f, 64.0f, 64.0f, 0.0f, 1.0f};
- scissor_ = {{0, 0}, {64, 64}};
+TEST_F(VkPositiveLayerTest, CreateComputePipelineMissingDescriptorUnusedPositive) {
+ TEST_DESCRIPTION(
+ "Test that pipeline validation accepts a compute pipeline which declares a descriptor-backed resource which is not "
+ "provided, but the shader does not statically use it. This is interesting because it requires compute pipelines to have a "
+ "proper descriptor use walk, which they didn't for some time.");
+ m_errorMonitor->ExpectSuccess();
+
+ ASSERT_NO_FATAL_FAILURE(Init());
+
+ char const *csSource =
+ "#version 450\n"
+ "\n"
+ "layout(local_size_x=1) in;\n"
+ "layout(set=0, binding=0) buffer block { vec4 x; };\n"
+ "void main(){\n"
+ " // x is not used.\n"
+ "}\n";
+
+ VkShaderObj cs(m_device, csSource, VK_SHADER_STAGE_COMPUTE_BIT, this);
+
+ VkDescriptorSetObj descriptorSet(m_device);
+ descriptorSet.CreateVKDescriptorSet(m_commandBuffer);
+
+ VkComputePipelineCreateInfo cpci = {VK_STRUCTURE_TYPE_COMPUTE_PIPELINE_CREATE_INFO,
+ nullptr,
+ 0,
+ {VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_CREATE_INFO, nullptr, 0,
+ VK_SHADER_STAGE_COMPUTE_BIT, cs.handle(), "main", nullptr},
+ descriptorSet.GetPipelineLayout(),
+ VK_NULL_HANDLE,
+ -1};
+
+ VkPipeline pipe;
+ VkResult err = vkCreateComputePipelines(m_device->device(), VK_NULL_HANDLE, 1, &cpci, nullptr, &pipe);
+
+ m_errorMonitor->VerifyNotFound();
- vp_state_ci_.sType = VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_STATE_CREATE_INFO;
- vp_state_ci_.pNext = nullptr;
- vp_state_ci_.viewportCount = 1;
- vp_state_ci_.pViewports = &viewport_; // ignored if dynamic
- vp_state_ci_.scissorCount = 1;
- vp_state_ci_.pScissors = &scissor_; // ignored if dynamic
+ if (err == VK_SUCCESS) {
+ vkDestroyPipeline(m_device->device(), pipe, nullptr);
+ }
}
-void CreatePipelineHelper::InitDynamicStateInfo() {
- // Use a "validity" check on the {} initialized structure to detect initialization
- // during late bind
+TEST_F(VkPositiveLayerTest, CreateComputePipelineCombinedImageSamplerConsumedAsSampler) {
+ TEST_DESCRIPTION(
+ "Test that pipeline validation accepts a shader consuming only the sampler portion of a combined image + sampler");
+ m_errorMonitor->ExpectSuccess();
+
+ ASSERT_NO_FATAL_FAILURE(Init());
+
+ std::vector<VkDescriptorSetLayoutBinding> bindings = {
+ {0, VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER, 1, VK_SHADER_STAGE_COMPUTE_BIT, nullptr},
+ {1, VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE, 1, VK_SHADER_STAGE_COMPUTE_BIT, nullptr},
+ {2, VK_DESCRIPTOR_TYPE_STORAGE_BUFFER, 1, VK_SHADER_STAGE_COMPUTE_BIT, nullptr},
+ };
+
+ const VkDescriptorSetLayoutObj dsl(m_device, bindings);
+ const VkPipelineLayoutObj pl(m_device, {&dsl});
+
+ char const *csSource =
+ "#version 450\n"
+ "\n"
+ "layout(local_size_x=1) in;\n"
+ "layout(set=0, binding=0) uniform sampler s;\n"
+ "layout(set=0, binding=1) uniform texture2D t;\n"
+ "layout(set=0, binding=2) buffer block { vec4 x; };\n"
+ "void main() {\n"
+ " x = texture(sampler2D(t, s), vec2(0));\n"
+ "}\n";
+ VkShaderObj cs(m_device, csSource, VK_SHADER_STAGE_COMPUTE_BIT, this);
+
+ VkComputePipelineCreateInfo cpci = {VK_STRUCTURE_TYPE_COMPUTE_PIPELINE_CREATE_INFO,
+ nullptr,
+ 0,
+ {VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_CREATE_INFO, nullptr, 0,
+ VK_SHADER_STAGE_COMPUTE_BIT, cs.handle(), "main", nullptr},
+ pl.handle(),
+ VK_NULL_HANDLE,
+ -1};
+
+ VkPipeline pipe;
+ VkResult err = vkCreateComputePipelines(m_device->device(), VK_NULL_HANDLE, 1, &cpci, nullptr, &pipe);
+
+ m_errorMonitor->VerifyNotFound();
+
+ if (err == VK_SUCCESS) {
+ vkDestroyPipeline(m_device->device(), pipe, nullptr);
+ }
}
-void CreatePipelineHelper::InitShaderInfo() {
- vs_.reset(new VkShaderObj(layer_test_.DeviceObj(), bindStateVertShaderText, VK_SHADER_STAGE_VERTEX_BIT, &layer_test_));
- fs_.reset(new VkShaderObj(layer_test_.DeviceObj(), bindStateFragShaderText, VK_SHADER_STAGE_FRAGMENT_BIT, &layer_test_));
- // We shouldn't need a fragment shader but add it to be able to run on more devices
- shader_stages_ = {vs_->GetStageCreateInfo(), fs_->GetStageCreateInfo()};
+TEST_F(VkPositiveLayerTest, CreateComputePipelineCombinedImageSamplerConsumedAsImage) {
+ TEST_DESCRIPTION(
+ "Test that pipeline validation accepts a shader consuming only the image portion of a combined image + sampler");
+ m_errorMonitor->ExpectSuccess();
+
+ ASSERT_NO_FATAL_FAILURE(Init());
+
+ std::vector<VkDescriptorSetLayoutBinding> bindings = {
+ {0, VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER, 1, VK_SHADER_STAGE_COMPUTE_BIT, nullptr},
+ {1, VK_DESCRIPTOR_TYPE_SAMPLER, 1, VK_SHADER_STAGE_COMPUTE_BIT, nullptr},
+ {2, VK_DESCRIPTOR_TYPE_STORAGE_BUFFER, 1, VK_SHADER_STAGE_COMPUTE_BIT, nullptr},
+ };
+
+ const VkDescriptorSetLayoutObj dsl(m_device, bindings);
+ const VkPipelineLayoutObj pl(m_device, {&dsl});
+
+ char const *csSource =
+ "#version 450\n"
+ "\n"
+ "layout(local_size_x=1) in;\n"
+ "layout(set=0, binding=0) uniform texture2D t;\n"
+ "layout(set=0, binding=1) uniform sampler s;\n"
+ "layout(set=0, binding=2) buffer block { vec4 x; };\n"
+ "void main() {\n"
+ " x = texture(sampler2D(t, s), vec2(0));\n"
+ "}\n";
+ VkShaderObj cs(m_device, csSource, VK_SHADER_STAGE_COMPUTE_BIT, this);
+
+ VkComputePipelineCreateInfo cpci = {VK_STRUCTURE_TYPE_COMPUTE_PIPELINE_CREATE_INFO,
+ nullptr,
+ 0,
+ {VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_CREATE_INFO, nullptr, 0,
+ VK_SHADER_STAGE_COMPUTE_BIT, cs.handle(), "main", nullptr},
+ pl.handle(),
+ VK_NULL_HANDLE,
+ -1};
+
+ VkPipeline pipe;
+ VkResult err = vkCreateComputePipelines(m_device->device(), VK_NULL_HANDLE, 1, &cpci, nullptr, &pipe);
+
+ m_errorMonitor->VerifyNotFound();
+
+ if (err == VK_SUCCESS) {
+ vkDestroyPipeline(m_device->device(), pipe, nullptr);
+ }
}
-void CreatePipelineHelper::InitRasterizationInfo() {
- rs_state_ci_.sType = VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_STATE_CREATE_INFO;
- rs_state_ci_.pNext = &line_state_ci_;
- rs_state_ci_.flags = 0;
- rs_state_ci_.depthClampEnable = VK_FALSE;
- rs_state_ci_.rasterizerDiscardEnable = VK_FALSE;
- rs_state_ci_.polygonMode = VK_POLYGON_MODE_FILL;
- rs_state_ci_.cullMode = VK_CULL_MODE_BACK_BIT;
- rs_state_ci_.frontFace = VK_FRONT_FACE_COUNTER_CLOCKWISE;
- rs_state_ci_.depthBiasEnable = VK_FALSE;
- rs_state_ci_.lineWidth = 1.0F;
+TEST_F(VkPositiveLayerTest, CreateComputePipelineCombinedImageSamplerConsumedAsBoth) {
+ TEST_DESCRIPTION(
+ "Test that pipeline validation accepts a shader consuming both the sampler and the image of a combined image+sampler but "
+ "via separate variables");
+ m_errorMonitor->ExpectSuccess();
+
+ ASSERT_NO_FATAL_FAILURE(Init());
+
+ std::vector<VkDescriptorSetLayoutBinding> bindings = {
+ {0, VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER, 1, VK_SHADER_STAGE_COMPUTE_BIT, nullptr},
+ {1, VK_DESCRIPTOR_TYPE_STORAGE_BUFFER, 1, VK_SHADER_STAGE_COMPUTE_BIT, nullptr},
+ };
+
+ const VkDescriptorSetLayoutObj dsl(m_device, bindings);
+ const VkPipelineLayoutObj pl(m_device, {&dsl});
+
+ char const *csSource =
+ "#version 450\n"
+ "\n"
+ "layout(local_size_x=1) in;\n"
+ "layout(set=0, binding=0) uniform texture2D t;\n"
+ "layout(set=0, binding=0) uniform sampler s; // both binding 0!\n"
+ "layout(set=0, binding=1) buffer block { vec4 x; };\n"
+ "void main() {\n"
+ " x = texture(sampler2D(t, s), vec2(0));\n"
+ "}\n";
+ VkShaderObj cs(m_device, csSource, VK_SHADER_STAGE_COMPUTE_BIT, this);
+
+ VkComputePipelineCreateInfo cpci = {VK_STRUCTURE_TYPE_COMPUTE_PIPELINE_CREATE_INFO,
+ nullptr,
+ 0,
+ {VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_CREATE_INFO, nullptr, 0,
+ VK_SHADER_STAGE_COMPUTE_BIT, cs.handle(), "main", nullptr},
+ pl.handle(),
+ VK_NULL_HANDLE,
+ -1};
+
+ VkPipeline pipe;
+ VkResult err = vkCreateComputePipelines(m_device->device(), VK_NULL_HANDLE, 1, &cpci, nullptr, &pipe);
+
+ m_errorMonitor->VerifyNotFound();
+
+ if (err == VK_SUCCESS) {
+ vkDestroyPipeline(m_device->device(), pipe, nullptr);
+ }
}
-void CreatePipelineHelper::InitLineRasterizationInfo() {
- line_state_ci_.sType = VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_LINE_STATE_CREATE_INFO_EXT;
- line_state_ci_.pNext = nullptr;
- line_state_ci_.lineRasterizationMode = VK_LINE_RASTERIZATION_MODE_DEFAULT_EXT;
- line_state_ci_.stippledLineEnable = VK_FALSE;
- line_state_ci_.lineStippleFactor = 0;
- line_state_ci_.lineStipplePattern = 0;
+TEST_F(VkPositiveLayerTest, CreateDescriptorSetBindingWithIgnoredSamplers) {
+ TEST_DESCRIPTION("Test that layers conditionally do ignore the pImmutableSamplers on vkCreateDescriptorSetLayout");
+
+ bool prop2_found = false;
+ if (InstanceExtensionSupported(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME)) {
+ m_instance_extension_names.push_back(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME);
+ prop2_found = true;
+ } else {
+ printf("%s %s Extension not supported, skipping push descriptor sub-tests\n", kSkipPrefix,
+ VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME);
+ }
+
+ ASSERT_NO_FATAL_FAILURE(InitFramework(myDbgFunc, m_errorMonitor));
+ bool push_descriptor_found = false;
+ if (prop2_found && DeviceExtensionSupported(gpu(), nullptr, VK_KHR_PUSH_DESCRIPTOR_EXTENSION_NAME)) {
+ m_device_extension_names.push_back(VK_KHR_PUSH_DESCRIPTOR_EXTENSION_NAME);
+
+ // In addition to the extension being supported we need to have at least one available
+ // Some implementations report an invalid maxPushDescriptors of 0
+ push_descriptor_found = GetPushDescriptorProperties(instance(), gpu()).maxPushDescriptors > 0;
+ } else {
+ printf("%s %s Extension not supported, skipping push descriptor sub-tests\n", kSkipPrefix,
+ VK_KHR_PUSH_DESCRIPTOR_EXTENSION_NAME);
+ }
+
+ ASSERT_NO_FATAL_FAILURE(InitState());
+ const uint64_t fake_address_64 = 0xCDCDCDCDCDCDCDCD;
+ const uint64_t fake_address_32 = 0xCDCDCDCD;
+ const void *fake_pointer =
+ sizeof(void *) == 8 ? reinterpret_cast<void *>(fake_address_64) : reinterpret_cast<void *>(fake_address_32);
+ const VkSampler *hopefully_undereferencable_pointer = reinterpret_cast<const VkSampler *>(fake_pointer);
+
+ // regular descriptors
+ m_errorMonitor->ExpectSuccess();
+ {
+ const VkDescriptorSetLayoutBinding non_sampler_bindings[] = {
+ {0, VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE, 1, VK_SHADER_STAGE_FRAGMENT_BIT, hopefully_undereferencable_pointer},
+ {1, VK_DESCRIPTOR_TYPE_STORAGE_IMAGE, 1, VK_SHADER_STAGE_FRAGMENT_BIT, hopefully_undereferencable_pointer},
+ {2, VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER, 1, VK_SHADER_STAGE_FRAGMENT_BIT, hopefully_undereferencable_pointer},
+ {3, VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER, 1, VK_SHADER_STAGE_FRAGMENT_BIT, hopefully_undereferencable_pointer},
+ {4, VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER, 1, VK_SHADER_STAGE_FRAGMENT_BIT, hopefully_undereferencable_pointer},
+ {5, VK_DESCRIPTOR_TYPE_STORAGE_BUFFER, 1, VK_SHADER_STAGE_FRAGMENT_BIT, hopefully_undereferencable_pointer},
+ {6, VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC, 1, VK_SHADER_STAGE_FRAGMENT_BIT, hopefully_undereferencable_pointer},
+ {7, VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC, 1, VK_SHADER_STAGE_FRAGMENT_BIT, hopefully_undereferencable_pointer},
+ {8, VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT, 1, VK_SHADER_STAGE_FRAGMENT_BIT, hopefully_undereferencable_pointer},
+ };
+ const VkDescriptorSetLayoutCreateInfo dslci = {VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_CREATE_INFO, nullptr, 0,
+ static_cast<uint32_t>(size(non_sampler_bindings)), non_sampler_bindings};
+ VkDescriptorSetLayout dsl;
+ const VkResult err = vkCreateDescriptorSetLayout(m_device->device(), &dslci, nullptr, &dsl);
+ ASSERT_VK_SUCCESS(err);
+ vkDestroyDescriptorSetLayout(m_device->device(), dsl, nullptr);
+ }
+ m_errorMonitor->VerifyNotFound();
+
+ if (push_descriptor_found) {
+ // push descriptors
+ m_errorMonitor->ExpectSuccess();
+ {
+ const VkDescriptorSetLayoutBinding non_sampler_bindings[] = {
+ {0, VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE, 1, VK_SHADER_STAGE_FRAGMENT_BIT, hopefully_undereferencable_pointer},
+ {1, VK_DESCRIPTOR_TYPE_STORAGE_IMAGE, 1, VK_SHADER_STAGE_FRAGMENT_BIT, hopefully_undereferencable_pointer},
+ {2, VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER, 1, VK_SHADER_STAGE_FRAGMENT_BIT, hopefully_undereferencable_pointer},
+ {3, VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER, 1, VK_SHADER_STAGE_FRAGMENT_BIT, hopefully_undereferencable_pointer},
+ {4, VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER, 1, VK_SHADER_STAGE_FRAGMENT_BIT, hopefully_undereferencable_pointer},
+ {5, VK_DESCRIPTOR_TYPE_STORAGE_BUFFER, 1, VK_SHADER_STAGE_FRAGMENT_BIT, hopefully_undereferencable_pointer},
+ {6, VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT, 1, VK_SHADER_STAGE_FRAGMENT_BIT, hopefully_undereferencable_pointer},
+ };
+ const VkDescriptorSetLayoutCreateInfo dslci = {VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_CREATE_INFO, nullptr,
+ VK_DESCRIPTOR_SET_LAYOUT_CREATE_PUSH_DESCRIPTOR_BIT_KHR,
+ static_cast<uint32_t>(size(non_sampler_bindings)), non_sampler_bindings};
+ VkDescriptorSetLayout dsl;
+ const VkResult err = vkCreateDescriptorSetLayout(m_device->device(), &dslci, nullptr, &dsl);
+ ASSERT_VK_SUCCESS(err);
+ vkDestroyDescriptorSetLayout(m_device->device(), dsl, nullptr);
+ }
+ m_errorMonitor->VerifyNotFound();
+ }
}
-void CreatePipelineHelper::InitBlendStateInfo() {
- cb_ci_.sType = VK_STRUCTURE_TYPE_PIPELINE_COLOR_BLEND_STATE_CREATE_INFO;
- cb_ci_.logicOpEnable = VK_FALSE;
- cb_ci_.logicOp = VK_LOGIC_OP_COPY; // ignored if enable is VK_FALSE above
- cb_ci_.attachmentCount = layer_test_.RenderPassInfo().subpassCount;
- ASSERT_TRUE(IsValidVkStruct(layer_test_.RenderPassInfo()));
- cb_ci_.pAttachments = &cb_attachments_;
- for (int i = 0; i < 4; i++) {
- cb_ci_.blendConstants[0] = 1.0F;
+TEST_F(VkPositiveLayerTest, Maintenance1Tests) {
+ TEST_DESCRIPTION("Validate various special cases for the Maintenance1_KHR extension");
+
+ ASSERT_NO_FATAL_FAILURE(InitFramework(myDbgFunc, m_errorMonitor));
+ if (DeviceExtensionSupported(gpu(), nullptr, VK_KHR_MAINTENANCE1_EXTENSION_NAME)) {
+ m_device_extension_names.push_back(VK_KHR_MAINTENANCE1_EXTENSION_NAME);
+ } else {
+ printf("%s Maintenance1 Extension not supported, skipping tests\n", kSkipPrefix);
+ return;
}
+ ASSERT_NO_FATAL_FAILURE(InitState());
+
+ m_errorMonitor->ExpectSuccess();
+
+ VkCommandBufferObj cmd_buf(m_device, m_commandPool);
+ cmd_buf.begin();
+ // Set Negative height, should give error if Maintenance 1 is not enabled
+ VkViewport viewport = {0, 0, 16, -16, 0, 1};
+ vkCmdSetViewport(cmd_buf.handle(), 0, 1, &viewport);
+ cmd_buf.end();
+
+ m_errorMonitor->VerifyNotFound();
}
-void CreatePipelineHelper::InitGraphicsPipelineInfo() {
- // Color-only rendering in a subpass with no depth/stencil attachment
- // Active Pipeline Shader Stages
- // Vertex Shader
- // Fragment Shader
- // Required: Fixed-Function Pipeline Stages
- // VkPipelineVertexInputStateCreateInfo
- // VkPipelineInputAssemblyStateCreateInfo
- // VkPipelineViewportStateCreateInfo
- // VkPipelineRasterizationStateCreateInfo
- // VkPipelineMultisampleStateCreateInfo
- // VkPipelineColorBlendStateCreateInfo
- gp_ci_.sType = VK_STRUCTURE_TYPE_GRAPHICS_PIPELINE_CREATE_INFO;
- gp_ci_.pNext = nullptr;
- gp_ci_.flags = VK_PIPELINE_CREATE_DISABLE_OPTIMIZATION_BIT;
- gp_ci_.pVertexInputState = &vi_ci_;
- gp_ci_.pInputAssemblyState = &ia_ci_;
- gp_ci_.pTessellationState = nullptr;
- gp_ci_.pViewportState = &vp_state_ci_;
- gp_ci_.pRasterizationState = &rs_state_ci_;
- gp_ci_.pMultisampleState = &pipe_ms_state_ci_;
- gp_ci_.pDepthStencilState = nullptr;
- gp_ci_.pColorBlendState = &cb_ci_;
- gp_ci_.pDynamicState = nullptr;
- gp_ci_.renderPass = layer_test_.renderPass();
+TEST_F(VkLayerTest, DuplicateValidPNextStructures) {
+ TEST_DESCRIPTION("Create a pNext chain containing valid structures, but with a duplicate structure type");
+
+ ASSERT_NO_FATAL_FAILURE(InitFramework(myDbgFunc, m_errorMonitor));
+ if (DeviceExtensionSupported(gpu(), nullptr, VK_NV_DEDICATED_ALLOCATION_EXTENSION_NAME)) {
+ m_device_extension_names.push_back(VK_NV_DEDICATED_ALLOCATION_EXTENSION_NAME);
+ } else {
+ printf("%s VK_NV_dedicated_allocation extension not supported, skipping test\n", kSkipPrefix);
+ return;
+ }
+ ASSERT_NO_FATAL_FAILURE(InitState());
+
+ // Create two pNext structures which by themselves would be valid
+ VkDedicatedAllocationBufferCreateInfoNV dedicated_buffer_create_info = {};
+ VkDedicatedAllocationBufferCreateInfoNV dedicated_buffer_create_info_2 = {};
+ dedicated_buffer_create_info.sType = VK_STRUCTURE_TYPE_DEDICATED_ALLOCATION_BUFFER_CREATE_INFO_NV;
+ dedicated_buffer_create_info.pNext = &dedicated_buffer_create_info_2;
+ dedicated_buffer_create_info.dedicatedAllocation = VK_TRUE;
+
+ dedicated_buffer_create_info_2.sType = VK_STRUCTURE_TYPE_DEDICATED_ALLOCATION_BUFFER_CREATE_INFO_NV;
+ dedicated_buffer_create_info_2.pNext = nullptr;
+ dedicated_buffer_create_info_2.dedicatedAllocation = VK_TRUE;
+
+ uint32_t queue_family_index = 0;
+ VkBufferCreateInfo buffer_create_info = {};
+ buffer_create_info.sType = VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO;
+ buffer_create_info.pNext = &dedicated_buffer_create_info;
+ buffer_create_info.size = 1024;
+ buffer_create_info.usage = VK_BUFFER_USAGE_UNIFORM_BUFFER_BIT;
+ buffer_create_info.queueFamilyIndexCount = 1;
+ buffer_create_info.pQueueFamilyIndices = &queue_family_index;
+
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "chain contains duplicate structure types");
+ VkBuffer buffer;
+ vkCreateBuffer(m_device->device(), &buffer_create_info, NULL, &buffer);
+ m_errorMonitor->VerifyFound();
}
-void CreatePipelineHelper::InitPipelineCacheInfo() {
- pc_ci_.sType = VK_STRUCTURE_TYPE_PIPELINE_CACHE_CREATE_INFO;
- pc_ci_.pNext = nullptr;
- pc_ci_.flags = 0;
- pc_ci_.initialDataSize = 0;
- pc_ci_.pInitialData = nullptr;
+TEST_F(VkLayerTest, DedicatedAllocation) {
+ ASSERT_NO_FATAL_FAILURE(InitFramework(myDbgFunc, m_errorMonitor));
+ if (DeviceExtensionSupported(gpu(), nullptr, VK_KHR_DEDICATED_ALLOCATION_EXTENSION_NAME)) {
+ m_device_extension_names.push_back(VK_KHR_DEDICATED_ALLOCATION_EXTENSION_NAME);
+ m_device_extension_names.push_back(VK_KHR_GET_MEMORY_REQUIREMENTS_2_EXTENSION_NAME);
+ } else {
+ printf("%s Dedicated allocation extension not supported, skipping test\n", kSkipPrefix);
+ return;
+ }
+ ASSERT_NO_FATAL_FAILURE(InitState());
+
+ VkMemoryPropertyFlags mem_flags = 0;
+ const VkDeviceSize resource_size = 1024;
+ auto buffer_info = VkBufferObj::create_info(resource_size, VK_BUFFER_USAGE_TRANSFER_DST_BIT);
+ VkBufferObj buffer;
+ buffer.init_no_mem(*m_device, buffer_info);
+ auto buffer_alloc_info = vk_testing::DeviceMemory::get_resource_alloc_info(*m_device, buffer.memory_requirements(), mem_flags);
+ auto buffer_dedicated_info = lvl_init_struct<VkMemoryDedicatedAllocateInfoKHR>();
+ buffer_dedicated_info.buffer = buffer.handle();
+ buffer_alloc_info.pNext = &buffer_dedicated_info;
+ vk_testing::DeviceMemory dedicated_buffer_memory;
+ dedicated_buffer_memory.init(*m_device, buffer_alloc_info);
+
+ VkBufferObj wrong_buffer;
+ wrong_buffer.init_no_mem(*m_device, buffer_info);
+
+ // Bind with wrong buffer
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkBindBufferMemory-memory-01508");
+ vkBindBufferMemory(m_device->handle(), wrong_buffer.handle(), dedicated_buffer_memory.handle(), 0);
+ m_errorMonitor->VerifyFound();
+
+ // Bind with non-zero offset (same VUID)
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ "VUID-vkBindBufferMemory-memory-01508"); // offset must be zero
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ "VUID-vkBindBufferMemory-size-01037"); // offset pushes us past size
+ auto offset = buffer.memory_requirements().alignment;
+ vkBindBufferMemory(m_device->handle(), buffer.handle(), dedicated_buffer_memory.handle(), offset);
+ m_errorMonitor->VerifyFound();
+
+ // Bind correctly (depends on the "skip" above)
+ m_errorMonitor->ExpectSuccess();
+ vkBindBufferMemory(m_device->handle(), buffer.handle(), dedicated_buffer_memory.handle(), 0);
+ m_errorMonitor->VerifyNotFound();
+
+ // And for images...
+ vk_testing::Image image;
+ vk_testing::Image wrong_image;
+ auto image_info = vk_testing::Image::create_info();
+ image_info.extent.width = resource_size;
+ image_info.usage = VK_IMAGE_USAGE_TRANSFER_DST_BIT;
+ image_info.format = VK_FORMAT_R8G8B8A8_UNORM;
+ image.init_no_mem(*m_device, image_info);
+ wrong_image.init_no_mem(*m_device, image_info);
+
+ auto image_dedicated_info = lvl_init_struct<VkMemoryDedicatedAllocateInfoKHR>();
+ image_dedicated_info.image = image.handle();
+ auto image_alloc_info = vk_testing::DeviceMemory::get_resource_alloc_info(*m_device, image.memory_requirements(), mem_flags);
+ image_alloc_info.pNext = &image_dedicated_info;
+ vk_testing::DeviceMemory dedicated_image_memory;
+ dedicated_image_memory.init(*m_device, image_alloc_info);
+
+ // Bind with wrong image
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkBindImageMemory-memory-01509");
+ vkBindImageMemory(m_device->handle(), wrong_image.handle(), dedicated_image_memory.handle(), 0);
+ m_errorMonitor->VerifyFound();
+
+ // Bind with non-zero offset (same VUID)
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ "VUID-vkBindImageMemory-memory-01509"); // offset must be zero
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ "VUID-vkBindImageMemory-size-01049"); // offset pushes us past size
+ auto image_offset = image.memory_requirements().alignment;
+ vkBindImageMemory(m_device->handle(), image.handle(), dedicated_image_memory.handle(), image_offset);
+ m_errorMonitor->VerifyFound();
+
+ // Bind correctly (depends on the "skip" above)
+ m_errorMonitor->ExpectSuccess();
+ vkBindImageMemory(m_device->handle(), image.handle(), dedicated_image_memory.handle(), 0);
+ m_errorMonitor->VerifyNotFound();
}
-void CreatePipelineHelper::InitTesselationState() {
- // TBD -- add shaders and create_info
+TEST_F(VkPositiveLayerTest, ValidStructPNext) {
+ TEST_DESCRIPTION("Verify that a valid pNext value is handled correctly");
+
+ // Positive test to check parameter_validation and unique_objects support for NV_dedicated_allocation
+ ASSERT_NO_FATAL_FAILURE(InitFramework(myDbgFunc, m_errorMonitor));
+ if (DeviceExtensionSupported(gpu(), nullptr, VK_NV_DEDICATED_ALLOCATION_EXTENSION_NAME)) {
+ m_device_extension_names.push_back(VK_NV_DEDICATED_ALLOCATION_EXTENSION_NAME);
+ } else {
+ printf("%s VK_NV_DEDICATED_ALLOCATION_EXTENSION_NAME Extension not supported, skipping test\n", kSkipPrefix);
+ return;
+ }
+ ASSERT_NO_FATAL_FAILURE(InitState());
+
+ m_errorMonitor->ExpectSuccess();
+
+ VkDedicatedAllocationBufferCreateInfoNV dedicated_buffer_create_info = {};
+ dedicated_buffer_create_info.sType = VK_STRUCTURE_TYPE_DEDICATED_ALLOCATION_BUFFER_CREATE_INFO_NV;
+ dedicated_buffer_create_info.pNext = nullptr;
+ dedicated_buffer_create_info.dedicatedAllocation = VK_TRUE;
+
+ uint32_t queue_family_index = 0;
+ VkBufferCreateInfo buffer_create_info = {};
+ buffer_create_info.sType = VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO;
+ buffer_create_info.pNext = &dedicated_buffer_create_info;
+ buffer_create_info.size = 1024;
+ buffer_create_info.usage = VK_BUFFER_USAGE_UNIFORM_BUFFER_BIT;
+ buffer_create_info.queueFamilyIndexCount = 1;
+ buffer_create_info.pQueueFamilyIndices = &queue_family_index;
+
+ VkBuffer buffer;
+ VkResult err = vkCreateBuffer(m_device->device(), &buffer_create_info, NULL, &buffer);
+ ASSERT_VK_SUCCESS(err);
+
+ VkMemoryRequirements memory_reqs;
+ vkGetBufferMemoryRequirements(m_device->device(), buffer, &memory_reqs);
+
+ VkDedicatedAllocationMemoryAllocateInfoNV dedicated_memory_info = {};
+ dedicated_memory_info.sType = VK_STRUCTURE_TYPE_DEDICATED_ALLOCATION_MEMORY_ALLOCATE_INFO_NV;
+ dedicated_memory_info.pNext = nullptr;
+ dedicated_memory_info.buffer = buffer;
+ dedicated_memory_info.image = VK_NULL_HANDLE;
+
+ VkMemoryAllocateInfo memory_info = {};
+ memory_info.sType = VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO;
+ memory_info.pNext = &dedicated_memory_info;
+ memory_info.allocationSize = memory_reqs.size;
+
+ bool pass;
+ pass = m_device->phy().set_memory_type(memory_reqs.memoryTypeBits, &memory_info, 0);
+ ASSERT_TRUE(pass);
+
+ VkDeviceMemory buffer_memory;
+ err = vkAllocateMemory(m_device->device(), &memory_info, NULL, &buffer_memory);
+ ASSERT_VK_SUCCESS(err);
+
+ err = vkBindBufferMemory(m_device->device(), buffer, buffer_memory, 0);
+ ASSERT_VK_SUCCESS(err);
+
+ vkDestroyBuffer(m_device->device(), buffer, NULL);
+ vkFreeMemory(m_device->device(), buffer_memory, NULL);
+
+ m_errorMonitor->VerifyNotFound();
}
-void CreatePipelineHelper::InitInfo() {
- InitDescriptorSetInfo();
- InitInputAndVertexInfo();
- InitMultisampleInfo();
- InitPipelineLayoutInfo();
- InitViewportInfo();
- InitDynamicStateInfo();
- InitShaderInfo();
- InitRasterizationInfo();
- InitLineRasterizationInfo();
- InitBlendStateInfo();
- InitGraphicsPipelineInfo();
- InitPipelineCacheInfo();
+TEST_F(VkPositiveLayerTest, PSOPolygonModeValid) {
+ TEST_DESCRIPTION("Verify that using a solid polygon fill mode works correctly.");
+
+ ASSERT_NO_FATAL_FAILURE(Init());
+ ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
+
+ std::vector<const char *> device_extension_names;
+ auto features = m_device->phy().features();
+ // Artificially disable support for non-solid fill modes
+ features.fillModeNonSolid = false;
+ // The sacrificial device object
+ VkDeviceObj test_device(0, gpu(), device_extension_names, &features);
+
+ VkRenderpassObj render_pass(&test_device);
+
+ const VkPipelineLayoutObj pipeline_layout(&test_device);
+
+ VkPipelineRasterizationStateCreateInfo rs_ci = {};
+ rs_ci.sType = VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_STATE_CREATE_INFO;
+ rs_ci.pNext = nullptr;
+ rs_ci.lineWidth = 1.0f;
+ rs_ci.rasterizerDiscardEnable = false;
+
+ VkShaderObj vs(&test_device, bindStateVertShaderText, VK_SHADER_STAGE_VERTEX_BIT, this);
+ VkShaderObj fs(&test_device, bindStateFragShaderText, VK_SHADER_STAGE_FRAGMENT_BIT, this);
+
+ // Set polygonMode=FILL. No error is expected
+ m_errorMonitor->ExpectSuccess();
+ {
+ VkPipelineObj pipe(&test_device);
+ pipe.AddShader(&vs);
+ pipe.AddShader(&fs);
+ pipe.AddDefaultColorAttachment();
+ // Set polygonMode to a good value
+ rs_ci.polygonMode = VK_POLYGON_MODE_FILL;
+ pipe.SetRasterization(&rs_ci);
+ pipe.CreateVKPipeline(pipeline_layout.handle(), render_pass.handle());
+ }
+ m_errorMonitor->VerifyNotFound();
}
-void CreatePipelineHelper::InitState() {
+TEST_F(VkPositiveLayerTest, LongSemaphoreChain) {
+ m_errorMonitor->ExpectSuccess();
+
+ ASSERT_NO_FATAL_FAILURE(Init());
VkResult err;
- descriptor_set_.reset(new OneOffDescriptorSet(layer_test_.DeviceObj(), dsl_bindings_));
- ASSERT_TRUE(descriptor_set_->Initialized());
- const std::vector<VkPushConstantRange> push_ranges(
- pipeline_layout_ci_.pPushConstantRanges,
- pipeline_layout_ci_.pPushConstantRanges + pipeline_layout_ci_.pushConstantRangeCount);
- pipeline_layout_ = VkPipelineLayoutObj(layer_test_.DeviceObj(), {&descriptor_set_->layout_}, push_ranges);
+ std::vector<VkSemaphore> semaphores;
+
+ const int chainLength = 32768;
+ VkPipelineStageFlags flags = VK_PIPELINE_STAGE_BOTTOM_OF_PIPE_BIT;
+
+ for (int i = 0; i < chainLength; i++) {
+ VkSemaphoreCreateInfo sci = {VK_STRUCTURE_TYPE_SEMAPHORE_CREATE_INFO, nullptr, 0};
+ VkSemaphore semaphore;
+ err = vkCreateSemaphore(m_device->device(), &sci, nullptr, &semaphore);
+ ASSERT_VK_SUCCESS(err);
+
+ semaphores.push_back(semaphore);
+
+ VkSubmitInfo si = {VK_STRUCTURE_TYPE_SUBMIT_INFO,
+ nullptr,
+ semaphores.size() > 1 ? 1u : 0u,
+ semaphores.size() > 1 ? &semaphores[semaphores.size() - 2] : nullptr,
+ &flags,
+ 0,
+ nullptr,
+ 1,
+ &semaphores[semaphores.size() - 1]};
+ err = vkQueueSubmit(m_device->m_queue, 1, &si, VK_NULL_HANDLE);
+ ASSERT_VK_SUCCESS(err);
+ }
- err = vkCreatePipelineCache(layer_test_.device(), &pc_ci_, NULL, &pipeline_cache_);
+ VkFenceCreateInfo fci = {VK_STRUCTURE_TYPE_FENCE_CREATE_INFO, nullptr, 0};
+ VkFence fence;
+ err = vkCreateFence(m_device->device(), &fci, nullptr, &fence);
+ ASSERT_VK_SUCCESS(err);
+ VkSubmitInfo si = {VK_STRUCTURE_TYPE_SUBMIT_INFO, nullptr, 1, &semaphores.back(), &flags, 0, nullptr, 0, nullptr};
+ err = vkQueueSubmit(m_device->m_queue, 1, &si, fence);
ASSERT_VK_SUCCESS(err);
+
+ vkWaitForFences(m_device->device(), 1, &fence, VK_TRUE, UINT64_MAX);
+
+ for (auto semaphore : semaphores) vkDestroySemaphore(m_device->device(), semaphore, nullptr);
+
+ vkDestroyFence(m_device->device(), fence, nullptr);
+
+ m_errorMonitor->VerifyNotFound();
}
-void CreatePipelineHelper::LateBindPipelineInfo() {
- // By value or dynamically located items must be late bound
- gp_ci_.layout = pipeline_layout_.handle();
- gp_ci_.stageCount = shader_stages_.size();
- gp_ci_.pStages = shader_stages_.data();
- if ((gp_ci_.pTessellationState == nullptr) && IsValidVkStruct(tess_ci_)) {
- gp_ci_.pTessellationState = &tess_ci_;
+TEST_F(VkPositiveLayerTest, ExternalSemaphore) {
+#ifdef _WIN32
+ const auto extension_name = VK_KHR_EXTERNAL_SEMAPHORE_WIN32_EXTENSION_NAME;
+ const auto handle_type = VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_OPAQUE_WIN32_KMT_BIT_KHR;
+#else
+ const auto extension_name = VK_KHR_EXTERNAL_SEMAPHORE_FD_EXTENSION_NAME;
+ const auto handle_type = VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_OPAQUE_FD_BIT_KHR;
+#endif
+ // Check for external semaphore instance extensions
+ if (InstanceExtensionSupported(VK_KHR_EXTERNAL_SEMAPHORE_CAPABILITIES_EXTENSION_NAME)) {
+ m_instance_extension_names.push_back(VK_KHR_EXTERNAL_SEMAPHORE_CAPABILITIES_EXTENSION_NAME);
+ m_instance_extension_names.push_back(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME);
+ } else {
+ printf("%s External semaphore extension not supported, skipping test\n", kSkipPrefix);
+ return;
}
- if ((gp_ci_.pDynamicState == nullptr) && IsValidVkStruct(dyn_state_ci_)) {
- gp_ci_.pDynamicState = &dyn_state_ci_;
+ ASSERT_NO_FATAL_FAILURE(InitFramework(myDbgFunc, m_errorMonitor));
+
+ // Check for external semaphore device extensions
+ if (DeviceExtensionSupported(gpu(), nullptr, extension_name)) {
+ m_device_extension_names.push_back(extension_name);
+ m_device_extension_names.push_back(VK_KHR_EXTERNAL_SEMAPHORE_EXTENSION_NAME);
+ } else {
+ printf("%s External semaphore extension not supported, skipping test\n", kSkipPrefix);
+ return;
+ }
+ ASSERT_NO_FATAL_FAILURE(InitState());
+
+ // Check for external semaphore import and export capability
+ VkPhysicalDeviceExternalSemaphoreInfoKHR esi = {VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_SEMAPHORE_INFO_KHR, nullptr,
+ handle_type};
+ VkExternalSemaphorePropertiesKHR esp = {VK_STRUCTURE_TYPE_EXTERNAL_SEMAPHORE_PROPERTIES_KHR, nullptr};
+ auto vkGetPhysicalDeviceExternalSemaphorePropertiesKHR =
+ (PFN_vkGetPhysicalDeviceExternalSemaphorePropertiesKHR)vkGetInstanceProcAddr(
+ instance(), "vkGetPhysicalDeviceExternalSemaphorePropertiesKHR");
+ vkGetPhysicalDeviceExternalSemaphorePropertiesKHR(gpu(), &esi, &esp);
+
+ if (!(esp.externalSemaphoreFeatures & VK_EXTERNAL_SEMAPHORE_FEATURE_EXPORTABLE_BIT_KHR) ||
+ !(esp.externalSemaphoreFeatures & VK_EXTERNAL_SEMAPHORE_FEATURE_IMPORTABLE_BIT_KHR)) {
+ printf("%s External semaphore does not support importing and exporting, skipping test\n", kSkipPrefix);
+ return;
}
+
+ VkResult err;
+ m_errorMonitor->ExpectSuccess();
+
+ // Create a semaphore to export payload from
+ VkExportSemaphoreCreateInfoKHR esci = {VK_STRUCTURE_TYPE_EXPORT_SEMAPHORE_CREATE_INFO_KHR, nullptr, handle_type};
+ VkSemaphoreCreateInfo sci = {VK_STRUCTURE_TYPE_SEMAPHORE_CREATE_INFO, &esci, 0};
+
+ VkSemaphore export_semaphore;
+ err = vkCreateSemaphore(m_device->device(), &sci, nullptr, &export_semaphore);
+ ASSERT_VK_SUCCESS(err);
+
+ // Create a semaphore to import payload into
+ sci.pNext = nullptr;
+ VkSemaphore import_semaphore;
+ err = vkCreateSemaphore(m_device->device(), &sci, nullptr, &import_semaphore);
+ ASSERT_VK_SUCCESS(err);
+
+#ifdef _WIN32
+ // Export semaphore payload to an opaque handle
+ HANDLE handle = nullptr;
+ VkSemaphoreGetWin32HandleInfoKHR ghi = {VK_STRUCTURE_TYPE_SEMAPHORE_GET_WIN32_HANDLE_INFO_KHR, nullptr, export_semaphore,
+ handle_type};
+ auto vkGetSemaphoreWin32HandleKHR =
+ (PFN_vkGetSemaphoreWin32HandleKHR)vkGetDeviceProcAddr(m_device->device(), "vkGetSemaphoreWin32HandleKHR");
+ err = vkGetSemaphoreWin32HandleKHR(m_device->device(), &ghi, &handle);
+ ASSERT_VK_SUCCESS(err);
+
+ // Import opaque handle exported above
+ VkImportSemaphoreWin32HandleInfoKHR ihi = {
+ VK_STRUCTURE_TYPE_IMPORT_SEMAPHORE_WIN32_HANDLE_INFO_KHR, nullptr, import_semaphore, 0, handle_type, handle, nullptr};
+ auto vkImportSemaphoreWin32HandleKHR =
+ (PFN_vkImportSemaphoreWin32HandleKHR)vkGetDeviceProcAddr(m_device->device(), "vkImportSemaphoreWin32HandleKHR");
+ err = vkImportSemaphoreWin32HandleKHR(m_device->device(), &ihi);
+ ASSERT_VK_SUCCESS(err);
+#else
+ // Export semaphore payload to an opaque handle
+ int fd = 0;
+ VkSemaphoreGetFdInfoKHR ghi = {VK_STRUCTURE_TYPE_SEMAPHORE_GET_FD_INFO_KHR, nullptr, export_semaphore, handle_type};
+ auto vkGetSemaphoreFdKHR = (PFN_vkGetSemaphoreFdKHR)vkGetDeviceProcAddr(m_device->device(), "vkGetSemaphoreFdKHR");
+ err = vkGetSemaphoreFdKHR(m_device->device(), &ghi, &fd);
+ ASSERT_VK_SUCCESS(err);
+
+ // Import opaque handle exported above
+ VkImportSemaphoreFdInfoKHR ihi = {
+ VK_STRUCTURE_TYPE_IMPORT_SEMAPHORE_FD_INFO_KHR, nullptr, import_semaphore, 0, handle_type, fd};
+ auto vkImportSemaphoreFdKHR = (PFN_vkImportSemaphoreFdKHR)vkGetDeviceProcAddr(m_device->device(), "vkImportSemaphoreFdKHR");
+ err = vkImportSemaphoreFdKHR(m_device->device(), &ihi);
+ ASSERT_VK_SUCCESS(err);
+#endif
+
+ // Signal the exported semaphore and wait on the imported semaphore
+ VkPipelineStageFlags flags = VK_PIPELINE_STAGE_BOTTOM_OF_PIPE_BIT;
+ VkSubmitInfo si[] = {
+ {VK_STRUCTURE_TYPE_SUBMIT_INFO, nullptr, 0, nullptr, &flags, 0, nullptr, 1, &export_semaphore},
+ {VK_STRUCTURE_TYPE_SUBMIT_INFO, nullptr, 1, &import_semaphore, &flags, 0, nullptr, 0, nullptr},
+ {VK_STRUCTURE_TYPE_SUBMIT_INFO, nullptr, 0, nullptr, &flags, 0, nullptr, 1, &export_semaphore},
+ {VK_STRUCTURE_TYPE_SUBMIT_INFO, nullptr, 1, &import_semaphore, &flags, 0, nullptr, 0, nullptr},
+ };
+ err = vkQueueSubmit(m_device->m_queue, 4, si, VK_NULL_HANDLE);
+ ASSERT_VK_SUCCESS(err);
+
+ if (m_device->phy().features().sparseBinding) {
+ // Signal the imported semaphore and wait on the exported semaphore
+ VkBindSparseInfo bi[] = {
+ {VK_STRUCTURE_TYPE_BIND_SPARSE_INFO, nullptr, 0, nullptr, 0, nullptr, 0, nullptr, 0, nullptr, 1, &import_semaphore},
+ {VK_STRUCTURE_TYPE_BIND_SPARSE_INFO, nullptr, 1, &export_semaphore, 0, nullptr, 0, nullptr, 0, nullptr, 0, nullptr},
+ {VK_STRUCTURE_TYPE_BIND_SPARSE_INFO, nullptr, 0, nullptr, 0, nullptr, 0, nullptr, 0, nullptr, 1, &import_semaphore},
+ {VK_STRUCTURE_TYPE_BIND_SPARSE_INFO, nullptr, 1, &export_semaphore, 0, nullptr, 0, nullptr, 0, nullptr, 0, nullptr},
+ };
+ err = vkQueueBindSparse(m_device->m_queue, 4, bi, VK_NULL_HANDLE);
+ ASSERT_VK_SUCCESS(err);
+ }
+
+ // Cleanup
+ err = vkQueueWaitIdle(m_device->m_queue);
+ ASSERT_VK_SUCCESS(err);
+ vkDestroySemaphore(m_device->device(), export_semaphore, nullptr);
+ vkDestroySemaphore(m_device->device(), import_semaphore, nullptr);
+
+ m_errorMonitor->VerifyNotFound();
}
-VkResult CreatePipelineHelper::CreateGraphicsPipeline(bool implicit_destroy, bool do_late_bind) {
+TEST_F(VkPositiveLayerTest, ExternalFence) {
+#ifdef _WIN32
+ const auto extension_name = VK_KHR_EXTERNAL_FENCE_WIN32_EXTENSION_NAME;
+ const auto handle_type = VK_EXTERNAL_FENCE_HANDLE_TYPE_OPAQUE_WIN32_BIT_KHR;
+#else
+ const auto extension_name = VK_KHR_EXTERNAL_FENCE_FD_EXTENSION_NAME;
+ const auto handle_type = VK_EXTERNAL_FENCE_HANDLE_TYPE_OPAQUE_FD_BIT_KHR;
+#endif
+ // Check for external fence instance extensions
+ if (InstanceExtensionSupported(VK_KHR_EXTERNAL_FENCE_CAPABILITIES_EXTENSION_NAME)) {
+ m_instance_extension_names.push_back(VK_KHR_EXTERNAL_FENCE_CAPABILITIES_EXTENSION_NAME);
+ m_instance_extension_names.push_back(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME);
+ } else {
+ printf("%s External fence extension not supported, skipping test\n", kSkipPrefix);
+ return;
+ }
+ ASSERT_NO_FATAL_FAILURE(InitFramework(myDbgFunc, m_errorMonitor));
+
+ // Check for external fence device extensions
+ if (DeviceExtensionSupported(gpu(), nullptr, extension_name)) {
+ m_device_extension_names.push_back(extension_name);
+ m_device_extension_names.push_back(VK_KHR_EXTERNAL_FENCE_EXTENSION_NAME);
+ } else {
+ printf("%s External fence extension not supported, skipping test\n", kSkipPrefix);
+ return;
+ }
+ ASSERT_NO_FATAL_FAILURE(InitState());
+
+ // Check for external fence import and export capability
+ VkPhysicalDeviceExternalFenceInfoKHR efi = {VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_FENCE_INFO_KHR, nullptr, handle_type};
+ VkExternalFencePropertiesKHR efp = {VK_STRUCTURE_TYPE_EXTERNAL_FENCE_PROPERTIES_KHR, nullptr};
+ auto vkGetPhysicalDeviceExternalFencePropertiesKHR = (PFN_vkGetPhysicalDeviceExternalFencePropertiesKHR)vkGetInstanceProcAddr(
+ instance(), "vkGetPhysicalDeviceExternalFencePropertiesKHR");
+ vkGetPhysicalDeviceExternalFencePropertiesKHR(gpu(), &efi, &efp);
+
+ if (!(efp.externalFenceFeatures & VK_EXTERNAL_FENCE_FEATURE_EXPORTABLE_BIT_KHR) ||
+ !(efp.externalFenceFeatures & VK_EXTERNAL_FENCE_FEATURE_IMPORTABLE_BIT_KHR)) {
+ printf("%s External fence does not support importing and exporting, skipping test\n", kSkipPrefix);
+ return;
+ }
+
VkResult err;
- if (do_late_bind) {
- LateBindPipelineInfo();
+ m_errorMonitor->ExpectSuccess();
+
+ // Create a fence to export payload from
+ VkFence export_fence;
+ {
+ VkExportFenceCreateInfoKHR efci = {VK_STRUCTURE_TYPE_EXPORT_FENCE_CREATE_INFO_KHR, nullptr, handle_type};
+ VkFenceCreateInfo fci = {VK_STRUCTURE_TYPE_FENCE_CREATE_INFO, &efci, 0};
+ err = vkCreateFence(m_device->device(), &fci, nullptr, &export_fence);
+ ASSERT_VK_SUCCESS(err);
+ }
+
+ // Create a fence to import payload into
+ VkFence import_fence;
+ {
+ VkFenceCreateInfo fci = {VK_STRUCTURE_TYPE_FENCE_CREATE_INFO, nullptr, 0};
+ err = vkCreateFence(m_device->device(), &fci, nullptr, &import_fence);
+ ASSERT_VK_SUCCESS(err);
+ }
+
+#ifdef _WIN32
+ // Export fence payload to an opaque handle
+ HANDLE handle = nullptr;
+ {
+ VkFenceGetWin32HandleInfoKHR ghi = {VK_STRUCTURE_TYPE_FENCE_GET_WIN32_HANDLE_INFO_KHR, nullptr, export_fence, handle_type};
+ auto vkGetFenceWin32HandleKHR =
+ (PFN_vkGetFenceWin32HandleKHR)vkGetDeviceProcAddr(m_device->device(), "vkGetFenceWin32HandleKHR");
+ err = vkGetFenceWin32HandleKHR(m_device->device(), &ghi, &handle);
+ ASSERT_VK_SUCCESS(err);
+ }
+
+ // Import opaque handle exported above
+ {
+ VkImportFenceWin32HandleInfoKHR ifi = {
+ VK_STRUCTURE_TYPE_IMPORT_FENCE_WIN32_HANDLE_INFO_KHR, nullptr, import_fence, 0, handle_type, handle, nullptr};
+ auto vkImportFenceWin32HandleKHR =
+ (PFN_vkImportFenceWin32HandleKHR)vkGetDeviceProcAddr(m_device->device(), "vkImportFenceWin32HandleKHR");
+ err = vkImportFenceWin32HandleKHR(m_device->device(), &ifi);
+ ASSERT_VK_SUCCESS(err);
}
- if (implicit_destroy && (pipeline_ != VK_NULL_HANDLE)) {
- vkDestroyPipeline(layer_test_.device(), pipeline_, nullptr);
- pipeline_ = VK_NULL_HANDLE;
+#else
+ // Export fence payload to an opaque handle
+ int fd = 0;
+ {
+ VkFenceGetFdInfoKHR gfi = {VK_STRUCTURE_TYPE_FENCE_GET_FD_INFO_KHR, nullptr, export_fence, handle_type};
+ auto vkGetFenceFdKHR = (PFN_vkGetFenceFdKHR)vkGetDeviceProcAddr(m_device->device(), "vkGetFenceFdKHR");
+ err = vkGetFenceFdKHR(m_device->device(), &gfi, &fd);
+ ASSERT_VK_SUCCESS(err);
+ }
+
+ // Import opaque handle exported above
+ {
+ VkImportFenceFdInfoKHR ifi = {VK_STRUCTURE_TYPE_IMPORT_FENCE_FD_INFO_KHR, nullptr, import_fence, 0, handle_type, fd};
+ auto vkImportFenceFdKHR = (PFN_vkImportFenceFdKHR)vkGetDeviceProcAddr(m_device->device(), "vkImportFenceFdKHR");
+ err = vkImportFenceFdKHR(m_device->device(), &ifi);
+ ASSERT_VK_SUCCESS(err);
}
- err = vkCreateGraphicsPipelines(layer_test_.device(), pipeline_cache_, 1, &gp_ci_, NULL, &pipeline_);
- return err;
+#endif
+
+ // Signal the exported fence and wait on the imported fence
+ vkQueueSubmit(m_device->m_queue, 0, nullptr, export_fence);
+ vkWaitForFences(m_device->device(), 1, &import_fence, VK_TRUE, 1000000000);
+ vkResetFences(m_device->device(), 1, &import_fence);
+ vkQueueSubmit(m_device->m_queue, 0, nullptr, export_fence);
+ vkWaitForFences(m_device->device(), 1, &import_fence, VK_TRUE, 1000000000);
+ vkResetFences(m_device->device(), 1, &import_fence);
+
+ // Signal the imported fence and wait on the exported fence
+ vkQueueSubmit(m_device->m_queue, 0, nullptr, import_fence);
+ vkWaitForFences(m_device->device(), 1, &export_fence, VK_TRUE, 1000000000);
+ vkResetFences(m_device->device(), 1, &export_fence);
+ vkQueueSubmit(m_device->m_queue, 0, nullptr, import_fence);
+ vkWaitForFences(m_device->device(), 1, &export_fence, VK_TRUE, 1000000000);
+ vkResetFences(m_device->device(), 1, &export_fence);
+
+ // Cleanup
+ err = vkQueueWaitIdle(m_device->m_queue);
+ ASSERT_VK_SUCCESS(err);
+ vkDestroyFence(m_device->device(), export_fence, nullptr);
+ vkDestroyFence(m_device->device(), import_fence, nullptr);
+
+ m_errorMonitor->VerifyNotFound();
}
-CreateComputePipelineHelper::CreateComputePipelineHelper(VkLayerTest &test) : layer_test_(test) {}
+extern "C" void *ReleaseNullFence(void *arg) {
+ struct thread_data_struct *data = (struct thread_data_struct *)arg;
-CreateComputePipelineHelper::~CreateComputePipelineHelper() {
- VkDevice device = layer_test_.device();
- vkDestroyPipelineCache(device, pipeline_cache_, nullptr);
- vkDestroyPipeline(device, pipeline_, nullptr);
+ for (int i = 0; i < 40000; i++) {
+ vkDestroyFence(data->device, VK_NULL_HANDLE, NULL);
+ if (data->bailout) {
+ break;
+ }
+ }
+ return NULL;
}
-void CreateComputePipelineHelper::InitDescriptorSetInfo() {
- dsl_bindings_ = {{0, VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER, 1, VK_SHADER_STAGE_ALL, nullptr}};
+TEST_F(VkPositiveLayerTest, ThreadNullFenceCollision) {
+ test_platform_thread thread;
+
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "THREADING ERROR");
+
+ ASSERT_NO_FATAL_FAILURE(Init());
+
+ struct thread_data_struct data;
+ data.device = m_device->device();
+ data.bailout = false;
+ m_errorMonitor->SetBailout(&data.bailout);
+
+ // Call vkDestroyFence of VK_NULL_HANDLE repeatedly using multiple threads.
+ // There should be no validation error from collision of that non-object.
+ test_platform_thread_create(&thread, ReleaseNullFence, (void *)&data);
+ for (int i = 0; i < 40000; i++) {
+ vkDestroyFence(m_device->device(), VK_NULL_HANDLE, NULL);
+ }
+ test_platform_thread_join(thread, NULL);
+
+ m_errorMonitor->SetBailout(NULL);
+
+ m_errorMonitor->VerifyNotFound();
}
-void CreateComputePipelineHelper::InitPipelineLayoutInfo() {
- pipeline_layout_ci_.sType = VK_STRUCTURE_TYPE_PIPELINE_LAYOUT_CREATE_INFO;
- pipeline_layout_ci_.setLayoutCount = 1; // Not really changeable because InitState() sets exactly one pSetLayout
- pipeline_layout_ci_.pSetLayouts = nullptr; // must bound after it is created
+TEST_F(VkPositiveLayerTest, ClearColorImageWithValidRange) {
+ TEST_DESCRIPTION("Record clear color with a valid VkImageSubresourceRange");
+
+ ASSERT_NO_FATAL_FAILURE(Init());
+ ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
+
+ VkImageObj image(m_device);
+ image.Init(32, 32, 1, VK_FORMAT_B8G8R8A8_UNORM, VK_IMAGE_USAGE_TRANSFER_DST_BIT, VK_IMAGE_TILING_OPTIMAL);
+ ASSERT_TRUE(image.create_info().arrayLayers == 1);
+ ASSERT_TRUE(image.initialized());
+ image.SetLayout(VK_IMAGE_ASPECT_COLOR_BIT, VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL);
+
+ const VkClearColorValue clear_color = {{0.0f, 0.0f, 0.0f, 1.0f}};
+
+ m_commandBuffer->begin();
+ const auto cb_handle = m_commandBuffer->handle();
+
+ // Try good case
+ {
+ m_errorMonitor->ExpectSuccess();
+ VkImageSubresourceRange range = {VK_IMAGE_ASPECT_COLOR_BIT, 0, 1, 0, 1};
+ vkCmdClearColorImage(cb_handle, image.handle(), image.Layout(), &clear_color, 1, &range);
+ m_errorMonitor->VerifyNotFound();
+ }
+
+ // Try good case with VK_REMAINING
+ {
+ m_errorMonitor->ExpectSuccess();
+ VkImageSubresourceRange range = {VK_IMAGE_ASPECT_COLOR_BIT, 0, VK_REMAINING_MIP_LEVELS, 0, VK_REMAINING_ARRAY_LAYERS};
+ vkCmdClearColorImage(cb_handle, image.handle(), image.Layout(), &clear_color, 1, &range);
+ m_errorMonitor->VerifyNotFound();
+ }
}
-void CreateComputePipelineHelper::InitShaderInfo() {
- cs_.reset(new VkShaderObj(layer_test_.DeviceObj(), bindStateMinimalShaderText, VK_SHADER_STAGE_COMPUTE_BIT, &layer_test_));
- // We shouldn't need a fragment shader but add it to be able to run on more devices
+TEST_F(VkPositiveLayerTest, ClearDepthStencilWithValidRange) {
+ TEST_DESCRIPTION("Record clear depth with a valid VkImageSubresourceRange");
+
+ ASSERT_NO_FATAL_FAILURE(Init());
+ ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
+
+ auto depth_format = FindSupportedDepthStencilFormat(gpu());
+ if (!depth_format) {
+ printf("%s No Depth + Stencil format found. Skipped.\n", kSkipPrefix);
+ return;
+ }
+
+ VkImageObj image(m_device);
+ image.Init(32, 32, 1, depth_format, VK_IMAGE_USAGE_TRANSFER_DST_BIT, VK_IMAGE_TILING_OPTIMAL);
+ ASSERT_TRUE(image.create_info().arrayLayers == 1);
+ ASSERT_TRUE(image.initialized());
+ const VkImageAspectFlags ds_aspect = VK_IMAGE_ASPECT_DEPTH_BIT | VK_IMAGE_ASPECT_STENCIL_BIT;
+ image.SetLayout(ds_aspect, VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL);
+
+ const VkClearDepthStencilValue clear_value = {};
+
+ m_commandBuffer->begin();
+ const auto cb_handle = m_commandBuffer->handle();
+
+ // Try good case
+ {
+ m_errorMonitor->ExpectSuccess();
+ VkImageSubresourceRange range = {ds_aspect, 0, 1, 0, 1};
+ vkCmdClearDepthStencilImage(cb_handle, image.handle(), image.Layout(), &clear_value, 1, &range);
+ m_errorMonitor->VerifyNotFound();
+ }
+
+ // Try good case with VK_REMAINING
+ {
+ m_errorMonitor->ExpectSuccess();
+ VkImageSubresourceRange range = {ds_aspect, 0, VK_REMAINING_MIP_LEVELS, 0, VK_REMAINING_ARRAY_LAYERS};
+ vkCmdClearDepthStencilImage(cb_handle, image.handle(), image.Layout(), &clear_value, 1, &range);
+ m_errorMonitor->VerifyNotFound();
+ }
}
-void CreateComputePipelineHelper::InitComputePipelineInfo() {
- cp_ci_.sType = VK_STRUCTURE_TYPE_COMPUTE_PIPELINE_CREATE_INFO;
- cp_ci_.pNext = nullptr;
- cp_ci_.flags = 0;
+TEST_F(VkPositiveLayerTest, CreateGraphicsPipelineWithIgnoredPointers) {
+ TEST_DESCRIPTION("Create Graphics Pipeline with pointers that must be ignored by layers");
+
+ ASSERT_NO_FATAL_FAILURE(Init());
+
+ m_depth_stencil_fmt = FindSupportedDepthStencilFormat(gpu());
+ ASSERT_TRUE(m_depth_stencil_fmt != 0);
+
+ m_depthStencil->Init(m_device, static_cast<int32_t>(m_width), static_cast<int32_t>(m_height), m_depth_stencil_fmt);
+
+ ASSERT_NO_FATAL_FAILURE(InitRenderTarget(m_depthStencil->BindInfo()));
+
+ const uint64_t fake_address_64 = 0xCDCDCDCDCDCDCDCD;
+ const uint64_t fake_address_32 = 0xCDCDCDCD;
+ void *hopefully_undereferencable_pointer =
+ sizeof(void *) == 8 ? reinterpret_cast<void *>(fake_address_64) : reinterpret_cast<void *>(fake_address_32);
+
+ VkShaderObj vs(m_device, "#version 450\nvoid main(){gl_Position = vec4(0.0, 0.0, 0.0, 1.0);}\n", VK_SHADER_STAGE_VERTEX_BIT,
+ this);
+
+ const VkPipelineVertexInputStateCreateInfo pipeline_vertex_input_state_create_info{
+ VK_STRUCTURE_TYPE_PIPELINE_VERTEX_INPUT_STATE_CREATE_INFO,
+ nullptr, // pNext
+ 0, // flags
+ 0,
+ nullptr, // bindings
+ 0,
+ nullptr // attributes
+ };
+
+ const VkPipelineInputAssemblyStateCreateInfo pipeline_input_assembly_state_create_info{
+ VK_STRUCTURE_TYPE_PIPELINE_INPUT_ASSEMBLY_STATE_CREATE_INFO,
+ nullptr, // pNext
+ 0, // flags
+ VK_PRIMITIVE_TOPOLOGY_TRIANGLE_LIST,
+ VK_FALSE // primitive restart
+ };
+
+ const VkPipelineRasterizationStateCreateInfo pipeline_rasterization_state_create_info_template{
+ VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_STATE_CREATE_INFO,
+ nullptr, // pNext
+ 0, // flags
+ VK_FALSE, // depthClamp
+ VK_FALSE, // rasterizerDiscardEnable
+ VK_POLYGON_MODE_FILL,
+ VK_CULL_MODE_NONE,
+ VK_FRONT_FACE_COUNTER_CLOCKWISE,
+ VK_FALSE, // depthBias
+ 0.0f,
+ 0.0f,
+ 0.0f, // depthBias params
+ 1.0f // lineWidth
+ };
+
+ VkPipelineLayout pipeline_layout;
+ {
+ VkPipelineLayoutCreateInfo pipeline_layout_create_info{
+ VK_STRUCTURE_TYPE_PIPELINE_LAYOUT_CREATE_INFO,
+ nullptr, // pNext
+ 0, // flags
+ 0,
+ nullptr, // layouts
+ 0,
+ nullptr // push constants
+ };
+
+ VkResult err = vkCreatePipelineLayout(m_device->device(), &pipeline_layout_create_info, nullptr, &pipeline_layout);
+ ASSERT_VK_SUCCESS(err);
+ }
+
+ // try disabled rasterizer and no tessellation
+ {
+ m_errorMonitor->ExpectSuccess();
+
+ VkPipelineRasterizationStateCreateInfo pipeline_rasterization_state_create_info =
+ pipeline_rasterization_state_create_info_template;
+ pipeline_rasterization_state_create_info.rasterizerDiscardEnable = VK_TRUE;
+
+ VkGraphicsPipelineCreateInfo graphics_pipeline_create_info{
+ VK_STRUCTURE_TYPE_GRAPHICS_PIPELINE_CREATE_INFO,
+ nullptr, // pNext
+ 0, // flags
+ 1, // stageCount
+ &vs.GetStageCreateInfo(),
+ &pipeline_vertex_input_state_create_info,
+ &pipeline_input_assembly_state_create_info,
+ reinterpret_cast<const VkPipelineTessellationStateCreateInfo *>(hopefully_undereferencable_pointer),
+ reinterpret_cast<const VkPipelineViewportStateCreateInfo *>(hopefully_undereferencable_pointer),
+ &pipeline_rasterization_state_create_info,
+ reinterpret_cast<const VkPipelineMultisampleStateCreateInfo *>(hopefully_undereferencable_pointer),
+ reinterpret_cast<const VkPipelineDepthStencilStateCreateInfo *>(hopefully_undereferencable_pointer),
+ reinterpret_cast<const VkPipelineColorBlendStateCreateInfo *>(hopefully_undereferencable_pointer),
+ nullptr, // dynamic states
+ pipeline_layout,
+ m_renderPass,
+ 0, // subpass
+ VK_NULL_HANDLE,
+ 0};
+
+ VkPipeline pipeline;
+ vkCreateGraphicsPipelines(m_device->handle(), VK_NULL_HANDLE, 1, &graphics_pipeline_create_info, nullptr, &pipeline);
+
+ m_errorMonitor->VerifyNotFound();
+
+ vkDestroyPipeline(m_device->handle(), pipeline, nullptr);
+ }
+
+ const VkPipelineMultisampleStateCreateInfo pipeline_multisample_state_create_info{
+ VK_STRUCTURE_TYPE_PIPELINE_MULTISAMPLE_STATE_CREATE_INFO,
+ nullptr, // pNext
+ 0, // flags
+ VK_SAMPLE_COUNT_1_BIT,
+ VK_FALSE, // sample shading
+ 0.0f, // minSampleShading
+ nullptr, // pSampleMask
+ VK_FALSE, // alphaToCoverageEnable
+ VK_FALSE // alphaToOneEnable
+ };
+
+ // try enabled rasterizer but no subpass attachments
+ {
+ m_errorMonitor->ExpectSuccess();
+
+ VkPipelineRasterizationStateCreateInfo pipeline_rasterization_state_create_info =
+ pipeline_rasterization_state_create_info_template;
+ pipeline_rasterization_state_create_info.rasterizerDiscardEnable = VK_FALSE;
+
+ VkViewport viewport = {0.0f, 0.0f, 1.0f, 1.0f, 0.0f, 1.0f};
+ VkRect2D scissor = {{0, 0}, {static_cast<uint32_t>(m_width), static_cast<uint32_t>(m_height)}};
+
+ const VkPipelineViewportStateCreateInfo pipeline_viewport_state_create_info{
+ VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_STATE_CREATE_INFO,
+ nullptr, // pNext
+ 0, // flags
+ 1,
+ &viewport,
+ 1,
+ &scissor};
+
+ VkRenderPass render_pass;
+ {
+ VkSubpassDescription subpass_desc = {};
+
+ VkRenderPassCreateInfo render_pass_create_info{
+ VK_STRUCTURE_TYPE_RENDER_PASS_CREATE_INFO,
+ nullptr, // pNext
+ 0, // flags
+ 0,
+ nullptr, // attachments
+ 1,
+ &subpass_desc,
+ 0,
+ nullptr // subpass dependencies
+ };
+
+ VkResult err = vkCreateRenderPass(m_device->handle(), &render_pass_create_info, nullptr, &render_pass);
+ ASSERT_VK_SUCCESS(err);
+ }
+
+ VkGraphicsPipelineCreateInfo graphics_pipeline_create_info{
+ VK_STRUCTURE_TYPE_GRAPHICS_PIPELINE_CREATE_INFO,
+ nullptr, // pNext
+ 0, // flags
+ 1, // stageCount
+ &vs.GetStageCreateInfo(),
+ &pipeline_vertex_input_state_create_info,
+ &pipeline_input_assembly_state_create_info,
+ nullptr,
+ &pipeline_viewport_state_create_info,
+ &pipeline_rasterization_state_create_info,
+ &pipeline_multisample_state_create_info,
+ reinterpret_cast<const VkPipelineDepthStencilStateCreateInfo *>(hopefully_undereferencable_pointer),
+ reinterpret_cast<const VkPipelineColorBlendStateCreateInfo *>(hopefully_undereferencable_pointer),
+ nullptr, // dynamic states
+ pipeline_layout,
+ render_pass,
+ 0, // subpass
+ VK_NULL_HANDLE,
+ 0};
+
+ VkPipeline pipeline;
+ vkCreateGraphicsPipelines(m_device->handle(), VK_NULL_HANDLE, 1, &graphics_pipeline_create_info, nullptr, &pipeline);
+
+ m_errorMonitor->VerifyNotFound();
+
+ vkDestroyPipeline(m_device->handle(), pipeline, nullptr);
+ vkDestroyRenderPass(m_device->handle(), render_pass, nullptr);
+ }
+
+ // try dynamic viewport and scissor
+ {
+ m_errorMonitor->ExpectSuccess();
+
+ VkPipelineRasterizationStateCreateInfo pipeline_rasterization_state_create_info =
+ pipeline_rasterization_state_create_info_template;
+ pipeline_rasterization_state_create_info.rasterizerDiscardEnable = VK_FALSE;
+
+ const VkPipelineViewportStateCreateInfo pipeline_viewport_state_create_info{
+ VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_STATE_CREATE_INFO,
+ nullptr, // pNext
+ 0, // flags
+ 1,
+ reinterpret_cast<const VkViewport *>(hopefully_undereferencable_pointer),
+ 1,
+ reinterpret_cast<const VkRect2D *>(hopefully_undereferencable_pointer)};
+
+ const VkPipelineDepthStencilStateCreateInfo pipeline_depth_stencil_state_create_info{
+ VK_STRUCTURE_TYPE_PIPELINE_DEPTH_STENCIL_STATE_CREATE_INFO,
+ nullptr, // pNext
+ 0, // flags
+ };
+
+ const VkPipelineColorBlendAttachmentState pipeline_color_blend_attachment_state = {};
+
+ const VkPipelineColorBlendStateCreateInfo pipeline_color_blend_state_create_info{
+ VK_STRUCTURE_TYPE_PIPELINE_COLOR_BLEND_STATE_CREATE_INFO,
+ nullptr, // pNext
+ 0, // flags
+ VK_FALSE,
+ VK_LOGIC_OP_CLEAR,
+ 1,
+ &pipeline_color_blend_attachment_state,
+ {0.0f, 0.0f, 0.0f, 0.0f}};
+
+ const VkDynamicState dynamic_states[2] = {VK_DYNAMIC_STATE_VIEWPORT, VK_DYNAMIC_STATE_SCISSOR};
+
+ const VkPipelineDynamicStateCreateInfo pipeline_dynamic_state_create_info{
+ VK_STRUCTURE_TYPE_PIPELINE_DYNAMIC_STATE_CREATE_INFO,
+ nullptr, // pNext
+ 0, // flags
+ 2, dynamic_states};
+
+ VkGraphicsPipelineCreateInfo graphics_pipeline_create_info{VK_STRUCTURE_TYPE_GRAPHICS_PIPELINE_CREATE_INFO,
+ nullptr, // pNext
+ 0, // flags
+ 1, // stageCount
+ &vs.GetStageCreateInfo(),
+ &pipeline_vertex_input_state_create_info,
+ &pipeline_input_assembly_state_create_info,
+ nullptr,
+ &pipeline_viewport_state_create_info,
+ &pipeline_rasterization_state_create_info,
+ &pipeline_multisample_state_create_info,
+ &pipeline_depth_stencil_state_create_info,
+ &pipeline_color_blend_state_create_info,
+ &pipeline_dynamic_state_create_info, // dynamic states
+ pipeline_layout,
+ m_renderPass,
+ 0, // subpass
+ VK_NULL_HANDLE,
+ 0};
+
+ VkPipeline pipeline;
+ vkCreateGraphicsPipelines(m_device->handle(), VK_NULL_HANDLE, 1, &graphics_pipeline_create_info, nullptr, &pipeline);
+
+ m_errorMonitor->VerifyNotFound();
+
+ vkDestroyPipeline(m_device->handle(), pipeline, nullptr);
+ }
+
+ vkDestroyPipelineLayout(m_device->handle(), pipeline_layout, nullptr);
}
-void CreateComputePipelineHelper::InitPipelineCacheInfo() {
- pc_ci_.sType = VK_STRUCTURE_TYPE_PIPELINE_CACHE_CREATE_INFO;
- pc_ci_.pNext = nullptr;
- pc_ci_.flags = 0;
- pc_ci_.initialDataSize = 0;
- pc_ci_.pInitialData = nullptr;
+TEST_F(VkPositiveLayerTest, ExternalMemory) {
+ TEST_DESCRIPTION("Perform a copy through a pair of buffers linked by external memory");
+
+#ifdef _WIN32
+ const auto ext_mem_extension_name = VK_KHR_EXTERNAL_MEMORY_WIN32_EXTENSION_NAME;
+ const auto handle_type = VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_WIN32_BIT_KHR;
+#else
+ const auto ext_mem_extension_name = VK_KHR_EXTERNAL_MEMORY_FD_EXTENSION_NAME;
+ const auto handle_type = VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_FD_BIT_KHR;
+#endif
+
+ // Check for external memory instance extensions
+ std::vector<const char *> reqd_instance_extensions = {
+ {VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME, VK_KHR_EXTERNAL_MEMORY_CAPABILITIES_EXTENSION_NAME}};
+ for (auto extension_name : reqd_instance_extensions) {
+ if (InstanceExtensionSupported(extension_name)) {
+ m_instance_extension_names.push_back(extension_name);
+ } else {
+ printf("%s Required instance extension %s not supported, skipping test\n", kSkipPrefix, extension_name);
+ return;
+ }
+ }
+
+ ASSERT_NO_FATAL_FAILURE(InitFramework(myDbgFunc, m_errorMonitor));
+
+ // Check for import/export capability
+ VkPhysicalDeviceExternalBufferInfoKHR ebi = {VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_BUFFER_INFO_KHR, nullptr, 0,
+ VK_BUFFER_USAGE_TRANSFER_SRC_BIT | VK_BUFFER_USAGE_TRANSFER_DST_BIT, handle_type};
+ VkExternalBufferPropertiesKHR ebp = {VK_STRUCTURE_TYPE_EXTERNAL_BUFFER_PROPERTIES_KHR, nullptr, {0, 0, 0}};
+ auto vkGetPhysicalDeviceExternalBufferPropertiesKHR = (PFN_vkGetPhysicalDeviceExternalBufferPropertiesKHR)vkGetInstanceProcAddr(
+ instance(), "vkGetPhysicalDeviceExternalBufferPropertiesKHR");
+ ASSERT_TRUE(vkGetPhysicalDeviceExternalBufferPropertiesKHR != nullptr);
+ vkGetPhysicalDeviceExternalBufferPropertiesKHR(gpu(), &ebi, &ebp);
+ if (!(ebp.externalMemoryProperties.compatibleHandleTypes & handle_type) ||
+ !(ebp.externalMemoryProperties.externalMemoryFeatures & VK_EXTERNAL_MEMORY_FEATURE_EXPORTABLE_BIT_KHR) ||
+ !(ebp.externalMemoryProperties.externalMemoryFeatures & VK_EXTERNAL_MEMORY_FEATURE_IMPORTABLE_BIT_KHR)) {
+ printf("%s External buffer does not support importing and exporting, skipping test\n", kSkipPrefix);
+ return;
+ }
+
+ // Check if dedicated allocation is required
+ bool dedicated_allocation =
+ ebp.externalMemoryProperties.externalMemoryFeatures & VK_EXTERNAL_MEMORY_FEATURE_DEDICATED_ONLY_BIT_KHR;
+ if (dedicated_allocation) {
+ if (DeviceExtensionSupported(gpu(), nullptr, VK_KHR_DEDICATED_ALLOCATION_EXTENSION_NAME)) {
+ m_device_extension_names.push_back(VK_KHR_DEDICATED_ALLOCATION_EXTENSION_NAME);
+ m_device_extension_names.push_back(VK_KHR_GET_MEMORY_REQUIREMENTS_2_EXTENSION_NAME);
+ } else {
+ printf("%s Dedicated allocation extension not supported, skipping test\n", kSkipPrefix);
+ return;
+ }
+ }
+
+ // Check for external memory device extensions
+ if (DeviceExtensionSupported(gpu(), nullptr, ext_mem_extension_name)) {
+ m_device_extension_names.push_back(ext_mem_extension_name);
+ m_device_extension_names.push_back(VK_KHR_EXTERNAL_MEMORY_EXTENSION_NAME);
+ } else {
+ printf("%s External memory extension not supported, skipping test\n", kSkipPrefix);
+ return;
+ }
+ ASSERT_NO_FATAL_FAILURE(InitState());
+
+ m_errorMonitor->ExpectSuccess(VK_DEBUG_REPORT_ERROR_BIT_EXT | VK_DEBUG_REPORT_WARNING_BIT_EXT);
+
+ VkMemoryPropertyFlags mem_flags = 0;
+ const VkDeviceSize buffer_size = 1024;
+
+ // Create export and import buffers
+ const VkExternalMemoryBufferCreateInfoKHR external_buffer_info = {VK_STRUCTURE_TYPE_EXTERNAL_MEMORY_BUFFER_CREATE_INFO_KHR,
+ nullptr, handle_type};
+ auto buffer_info = VkBufferObj::create_info(buffer_size, VK_BUFFER_USAGE_TRANSFER_SRC_BIT | VK_BUFFER_USAGE_TRANSFER_DST_BIT);
+ buffer_info.pNext = &external_buffer_info;
+ VkBufferObj buffer_export;
+ buffer_export.init_no_mem(*m_device, buffer_info);
+ VkBufferObj buffer_import;
+ buffer_import.init_no_mem(*m_device, buffer_info);
+
+ // Allocation info
+ auto alloc_info = vk_testing::DeviceMemory::get_resource_alloc_info(*m_device, buffer_export.memory_requirements(), mem_flags);
+
+ // Add export allocation info to pNext chain
+ VkExportMemoryAllocateInfoKHR export_info = {VK_STRUCTURE_TYPE_EXPORT_MEMORY_ALLOCATE_INFO_KHR, nullptr, handle_type};
+ alloc_info.pNext = &export_info;
+
+ // Add dedicated allocation info to pNext chain if required
+ VkMemoryDedicatedAllocateInfoKHR dedicated_info = {VK_STRUCTURE_TYPE_MEMORY_DEDICATED_ALLOCATE_INFO_KHR, nullptr,
+ VK_NULL_HANDLE, buffer_export.handle()};
+ if (dedicated_allocation) {
+ export_info.pNext = &dedicated_info;
+ }
+
+ // Allocate memory to be exported
+ vk_testing::DeviceMemory memory_export;
+ memory_export.init(*m_device, alloc_info);
+
+ // Bind exported memory
+ buffer_export.bind_memory(memory_export, 0);
+
+#ifdef _WIN32
+ // Export memory to handle
+ auto vkGetMemoryWin32HandleKHR = (PFN_vkGetMemoryWin32HandleKHR)vkGetInstanceProcAddr(instance(), "vkGetMemoryWin32HandleKHR");
+ ASSERT_TRUE(vkGetMemoryWin32HandleKHR != nullptr);
+ VkMemoryGetWin32HandleInfoKHR mghi = {VK_STRUCTURE_TYPE_MEMORY_GET_WIN32_HANDLE_INFO_KHR, nullptr, memory_export.handle(),
+ handle_type};
+ HANDLE handle;
+ ASSERT_VK_SUCCESS(vkGetMemoryWin32HandleKHR(m_device->device(), &mghi, &handle));
+
+ VkImportMemoryWin32HandleInfoKHR import_info = {VK_STRUCTURE_TYPE_IMPORT_MEMORY_WIN32_HANDLE_INFO_KHR, nullptr, handle_type,
+ handle};
+#else
+ // Export memory to fd
+ auto vkGetMemoryFdKHR = (PFN_vkGetMemoryFdKHR)vkGetInstanceProcAddr(instance(), "vkGetMemoryFdKHR");
+ ASSERT_TRUE(vkGetMemoryFdKHR != nullptr);
+ VkMemoryGetFdInfoKHR mgfi = {VK_STRUCTURE_TYPE_MEMORY_GET_FD_INFO_KHR, nullptr, memory_export.handle(), handle_type};
+ int fd;
+ ASSERT_VK_SUCCESS(vkGetMemoryFdKHR(m_device->device(), &mgfi, &fd));
+
+ VkImportMemoryFdInfoKHR import_info = {VK_STRUCTURE_TYPE_IMPORT_MEMORY_FD_INFO_KHR, nullptr, handle_type, fd};
+#endif
+
+ // Import memory
+ alloc_info = vk_testing::DeviceMemory::get_resource_alloc_info(*m_device, buffer_import.memory_requirements(), mem_flags);
+ alloc_info.pNext = &import_info;
+ vk_testing::DeviceMemory memory_import;
+ memory_import.init(*m_device, alloc_info);
+
+ // Bind imported memory
+ buffer_import.bind_memory(memory_import, 0);
+
+ // Create test buffers and fill input buffer
+ VkMemoryPropertyFlags mem_prop = VK_MEMORY_PROPERTY_HOST_COHERENT_BIT;
+ VkBufferObj buffer_input;
+ buffer_input.init_as_src_and_dst(*m_device, buffer_size, mem_prop);
+ auto input_mem = (uint8_t *)buffer_input.memory().map();
+ for (uint32_t i = 0; i < buffer_size; i++) {
+ input_mem[i] = (i & 0xFF);
+ }
+ buffer_input.memory().unmap();
+ VkBufferObj buffer_output;
+ buffer_output.init_as_src_and_dst(*m_device, buffer_size, mem_prop);
+
+ // Copy from input buffer to output buffer through the exported/imported memory
+ m_commandBuffer->begin();
+ VkBufferCopy copy_info = {0, 0, buffer_size};
+ vkCmdCopyBuffer(m_commandBuffer->handle(), buffer_input.handle(), buffer_export.handle(), 1, &copy_info);
+ // Insert memory barrier to guarantee copy order
+ VkMemoryBarrier mem_barrier = {VK_STRUCTURE_TYPE_MEMORY_BARRIER, nullptr, VK_ACCESS_TRANSFER_WRITE_BIT,
+ VK_ACCESS_TRANSFER_READ_BIT};
+ vkCmdPipelineBarrier(m_commandBuffer->handle(), VK_PIPELINE_STAGE_TRANSFER_BIT, VK_PIPELINE_STAGE_TRANSFER_BIT, 0, 1,
+ &mem_barrier, 0, nullptr, 0, nullptr);
+ vkCmdCopyBuffer(m_commandBuffer->handle(), buffer_import.handle(), buffer_output.handle(), 1, &copy_info);
+ m_commandBuffer->end();
+ m_commandBuffer->QueueCommandBuffer();
+
+ m_errorMonitor->VerifyNotFound();
}
-void CreateComputePipelineHelper::InitInfo() {
- InitDescriptorSetInfo();
- InitPipelineLayoutInfo();
- InitShaderInfo();
- InitComputePipelineInfo();
- InitPipelineCacheInfo();
+TEST_F(VkLayerTest, AMDMixedAttachmentSamplesValidateGraphicsPipeline) {
+ TEST_DESCRIPTION("Verify an error message for an incorrect graphics pipeline rasterization sample count.");
+
+ ASSERT_NO_FATAL_FAILURE(InitFramework(myDbgFunc, m_errorMonitor));
+ if (DeviceExtensionSupported(gpu(), nullptr, VK_AMD_MIXED_ATTACHMENT_SAMPLES_EXTENSION_NAME)) {
+ m_device_extension_names.push_back(VK_AMD_MIXED_ATTACHMENT_SAMPLES_EXTENSION_NAME);
+ } else {
+ printf("%s Extension %s is not supported.\n", kSkipPrefix, VK_AMD_MIXED_ATTACHMENT_SAMPLES_EXTENSION_NAME);
+ return;
+ }
+ ASSERT_NO_FATAL_FAILURE(InitState());
+
+ VkRenderpassObj render_pass(m_device);
+
+ const VkPipelineLayoutObj pipeline_layout(m_device);
+
+ VkShaderObj vs(m_device, bindStateVertShaderText, VK_SHADER_STAGE_VERTEX_BIT, this);
+ VkShaderObj fs(m_device, bindStateFragShaderText, VK_SHADER_STAGE_FRAGMENT_BIT, this);
+
+ // Set a mismatched sample count
+
+ VkPipelineMultisampleStateCreateInfo ms_state_ci = {};
+ ms_state_ci.sType = VK_STRUCTURE_TYPE_PIPELINE_MULTISAMPLE_STATE_CREATE_INFO;
+ ms_state_ci.rasterizationSamples = VK_SAMPLE_COUNT_4_BIT;
+
+ VkPipelineObj pipe(m_device);
+ pipe.AddShader(&vs);
+ pipe.AddShader(&fs);
+ pipe.AddDefaultColorAttachment();
+ pipe.SetMSAA(&ms_state_ci);
+
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkGraphicsPipelineCreateInfo-subpass-01505");
+
+ pipe.CreateVKPipeline(pipeline_layout.handle(), render_pass.handle());
+
+ m_errorMonitor->VerifyFound();
}
-void CreateComputePipelineHelper::InitState() {
- VkResult err;
- descriptor_set_.reset(new OneOffDescriptorSet(layer_test_.DeviceObj(), dsl_bindings_));
- ASSERT_TRUE(descriptor_set_->Initialized());
+TEST_F(VkPositiveLayerTest, ParameterLayerFeatures2Capture) {
+ TEST_DESCRIPTION("Ensure parameter_validation_layer correctly captures physical device features");
+ if (InstanceExtensionSupported(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME)) {
+ m_instance_extension_names.push_back(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME);
+ } else {
+ printf("%s Did not find VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME; skipped.\n", kSkipPrefix);
+ return;
+ }
+
+ ASSERT_NO_FATAL_FAILURE(InitFramework(myDbgFunc, m_errorMonitor));
- const std::vector<VkPushConstantRange> push_ranges(
- pipeline_layout_ci_.pPushConstantRanges,
- pipeline_layout_ci_.pPushConstantRanges + pipeline_layout_ci_.pushConstantRangeCount);
- pipeline_layout_ = VkPipelineLayoutObj(layer_test_.DeviceObj(), {&descriptor_set_->layout_}, push_ranges);
+ PFN_vkGetPhysicalDeviceFeatures2KHR vkGetPhysicalDeviceFeatures2KHR =
+ (PFN_vkGetPhysicalDeviceFeatures2KHR)vkGetInstanceProcAddr(instance(), "vkGetPhysicalDeviceFeatures2KHR");
+ ASSERT_TRUE(vkGetPhysicalDeviceFeatures2KHR != nullptr);
- err = vkCreatePipelineCache(layer_test_.device(), &pc_ci_, NULL, &pipeline_cache_);
+ VkResult err;
+ m_errorMonitor->ExpectSuccess();
+
+ VkPhysicalDeviceFeatures2KHR features2;
+ features2.sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FEATURES_2_KHR;
+ features2.pNext = nullptr;
+
+ vkGetPhysicalDeviceFeatures2KHR(gpu(), &features2);
+
+ // We're not creating a valid m_device, but the phy wrapper is useful
+ vk_testing::PhysicalDevice physical_device(gpu());
+ vk_testing::QueueCreateInfoArray queue_info(physical_device.queue_properties());
+ // Only request creation with queuefamilies that have at least one queue
+ std::vector<VkDeviceQueueCreateInfo> create_queue_infos;
+ auto qci = queue_info.data();
+ for (uint32_t i = 0; i < queue_info.size(); ++i) {
+ if (qci[i].queueCount) {
+ create_queue_infos.push_back(qci[i]);
+ }
+ }
+
+ VkDeviceCreateInfo dev_info = {};
+ dev_info.sType = VK_STRUCTURE_TYPE_DEVICE_CREATE_INFO;
+ dev_info.pNext = &features2;
+ dev_info.flags = 0;
+ dev_info.queueCreateInfoCount = create_queue_infos.size();
+ dev_info.pQueueCreateInfos = create_queue_infos.data();
+ dev_info.enabledLayerCount = 0;
+ dev_info.ppEnabledLayerNames = nullptr;
+ dev_info.enabledExtensionCount = 0;
+ dev_info.ppEnabledExtensionNames = nullptr;
+ dev_info.pEnabledFeatures = nullptr;
+
+ VkDevice device;
+ err = vkCreateDevice(gpu(), &dev_info, nullptr, &device);
ASSERT_VK_SUCCESS(err);
+
+ if (features2.features.samplerAnisotropy) {
+ // Test that the parameter layer is caching the features correctly using CreateSampler
+ VkSamplerCreateInfo sampler_ci = SafeSaneSamplerCreateInfo();
+ // If the features were not captured correctly, this should cause an error
+ sampler_ci.anisotropyEnable = VK_TRUE;
+ sampler_ci.maxAnisotropy = physical_device.properties().limits.maxSamplerAnisotropy;
+
+ VkSampler sampler = VK_NULL_HANDLE;
+ err = vkCreateSampler(device, &sampler_ci, nullptr, &sampler);
+ ASSERT_VK_SUCCESS(err);
+ vkDestroySampler(device, sampler, nullptr);
+ } else {
+ printf("%s Feature samplerAnisotropy not enabled; parameter_layer check skipped.\n", kSkipPrefix);
+ }
+
+ // Verify the core validation layer has captured the physical device features by creating a a query pool.
+ if (features2.features.pipelineStatisticsQuery) {
+ VkQueryPool query_pool;
+ VkQueryPoolCreateInfo qpci{};
+ qpci.sType = VK_STRUCTURE_TYPE_QUERY_POOL_CREATE_INFO;
+ qpci.queryType = VK_QUERY_TYPE_PIPELINE_STATISTICS;
+ qpci.queryCount = 1;
+ err = vkCreateQueryPool(device, &qpci, nullptr, &query_pool);
+ ASSERT_VK_SUCCESS(err);
+
+ vkDestroyQueryPool(device, query_pool, nullptr);
+ } else {
+ printf("%s Feature pipelineStatisticsQuery not enabled; core_validation_layer check skipped.\n", kSkipPrefix);
+ }
+
+ vkDestroyDevice(device, nullptr);
+
+ m_errorMonitor->VerifyNotFound();
}
-void CreateComputePipelineHelper::LateBindPipelineInfo() {
- // By value or dynamically located items must be late bound
- cp_ci_.layout = pipeline_layout_.handle();
- cp_ci_.stage = cs_.get()->GetStageCreateInfo();
+TEST_F(VkPositiveLayerTest, GetMemoryRequirements2) {
+ TEST_DESCRIPTION(
+ "Get memory requirements with VK_KHR_get_memory_requirements2 instead of core entry points and verify layers do not emit "
+ "errors when objects are bound and used");
+
+ ASSERT_NO_FATAL_FAILURE(InitFramework(myDbgFunc, m_errorMonitor));
+
+ // Check for VK_KHR_get_memory_requirementes2 extensions
+ if (DeviceExtensionSupported(gpu(), nullptr, VK_KHR_GET_MEMORY_REQUIREMENTS_2_EXTENSION_NAME)) {
+ m_device_extension_names.push_back(VK_KHR_GET_MEMORY_REQUIREMENTS_2_EXTENSION_NAME);
+ } else {
+ printf("%s %s not supported, skipping test\n", kSkipPrefix, VK_KHR_GET_MEMORY_REQUIREMENTS_2_EXTENSION_NAME);
+ return;
+ }
+
+ ASSERT_NO_FATAL_FAILURE(InitState());
+
+ m_errorMonitor->ExpectSuccess(VK_DEBUG_REPORT_ERROR_BIT_EXT | VK_DEBUG_REPORT_WARNING_BIT_EXT);
+
+ // Create a test buffer
+ VkBufferObj buffer;
+ buffer.init_no_mem(*m_device,
+ VkBufferObj::create_info(1024, VK_BUFFER_USAGE_TRANSFER_SRC_BIT | VK_BUFFER_USAGE_TRANSFER_DST_BIT));
+
+ // Use extension to get buffer memory requirements
+ auto vkGetBufferMemoryRequirements2KHR = reinterpret_cast<PFN_vkGetBufferMemoryRequirements2KHR>(
+ vkGetDeviceProcAddr(m_device->device(), "vkGetBufferMemoryRequirements2KHR"));
+ ASSERT_TRUE(vkGetBufferMemoryRequirements2KHR != nullptr);
+ VkBufferMemoryRequirementsInfo2KHR buffer_info = {VK_STRUCTURE_TYPE_BUFFER_MEMORY_REQUIREMENTS_INFO_2_KHR, nullptr,
+ buffer.handle()};
+ VkMemoryRequirements2KHR buffer_reqs = {VK_STRUCTURE_TYPE_MEMORY_REQUIREMENTS_2_KHR};
+ vkGetBufferMemoryRequirements2KHR(m_device->device(), &buffer_info, &buffer_reqs);
+
+ // Allocate and bind buffer memory
+ vk_testing::DeviceMemory buffer_memory;
+ buffer_memory.init(*m_device, vk_testing::DeviceMemory::get_resource_alloc_info(*m_device, buffer_reqs.memoryRequirements, 0));
+ vkBindBufferMemory(m_device->device(), buffer.handle(), buffer_memory.handle(), 0);
+
+ // Create a test image
+ auto image_ci = vk_testing::Image::create_info();
+ image_ci.imageType = VK_IMAGE_TYPE_2D;
+ image_ci.extent.width = 32;
+ image_ci.extent.height = 32;
+ image_ci.format = VK_FORMAT_R8G8B8A8_UNORM;
+ image_ci.tiling = VK_IMAGE_TILING_OPTIMAL;
+ image_ci.usage = VK_IMAGE_USAGE_TRANSFER_DST_BIT;
+ vk_testing::Image image;
+ image.init_no_mem(*m_device, image_ci);
+
+ // Use extension to get image memory requirements
+ auto vkGetImageMemoryRequirements2KHR = reinterpret_cast<PFN_vkGetImageMemoryRequirements2KHR>(
+ vkGetDeviceProcAddr(m_device->device(), "vkGetImageMemoryRequirements2KHR"));
+ ASSERT_TRUE(vkGetImageMemoryRequirements2KHR != nullptr);
+ VkImageMemoryRequirementsInfo2KHR image_info = {VK_STRUCTURE_TYPE_IMAGE_MEMORY_REQUIREMENTS_INFO_2_KHR, nullptr,
+ image.handle()};
+ VkMemoryRequirements2KHR image_reqs = {VK_STRUCTURE_TYPE_MEMORY_REQUIREMENTS_2_KHR};
+ vkGetImageMemoryRequirements2KHR(m_device->device(), &image_info, &image_reqs);
+
+ // Allocate and bind image memory
+ vk_testing::DeviceMemory image_memory;
+ image_memory.init(*m_device, vk_testing::DeviceMemory::get_resource_alloc_info(*m_device, image_reqs.memoryRequirements, 0));
+ vkBindImageMemory(m_device->device(), image.handle(), image_memory.handle(), 0);
+
+ // Now execute arbitrary commands that use the test buffer and image
+ m_commandBuffer->begin();
+
+ // Fill buffer with 0
+ vkCmdFillBuffer(m_commandBuffer->handle(), buffer.handle(), 0, VK_WHOLE_SIZE, 0);
+
+ // Transition and clear image
+ const auto subresource_range = image.subresource_range(VK_IMAGE_ASPECT_COLOR_BIT);
+ const auto barrier = image.image_memory_barrier(0, VK_ACCESS_TRANSFER_WRITE_BIT, VK_IMAGE_LAYOUT_UNDEFINED,
+ VK_IMAGE_LAYOUT_GENERAL, subresource_range);
+ vkCmdPipelineBarrier(m_commandBuffer->handle(), VK_PIPELINE_STAGE_ALL_COMMANDS_BIT, VK_PIPELINE_STAGE_ALL_COMMANDS_BIT, 0, 0,
+ nullptr, 0, nullptr, 1, &barrier);
+ const VkClearColorValue color = {};
+ vkCmdClearColorImage(m_commandBuffer->handle(), image.handle(), VK_IMAGE_LAYOUT_GENERAL, &color, 1, &subresource_range);
+
+ // Submit and verify no validation errors
+ m_commandBuffer->end();
+ m_commandBuffer->QueueCommandBuffer();
+ m_errorMonitor->VerifyNotFound();
}
-VkResult CreateComputePipelineHelper::CreateComputePipeline(bool implicit_destroy, bool do_late_bind) {
- VkResult err;
- if (do_late_bind) {
- LateBindPipelineInfo();
+TEST_F(VkPositiveLayerTest, BindMemory2) {
+ TEST_DESCRIPTION(
+ "Bind memory with VK_KHR_bind_memory2 instead of core entry points and verify layers do not emit errors when objects are "
+ "used");
+
+ ASSERT_NO_FATAL_FAILURE(InitFramework(myDbgFunc, m_errorMonitor));
+
+ // Check for VK_KHR_get_memory_requirementes2 extensions
+ if (DeviceExtensionSupported(gpu(), nullptr, VK_KHR_BIND_MEMORY_2_EXTENSION_NAME)) {
+ m_device_extension_names.push_back(VK_KHR_BIND_MEMORY_2_EXTENSION_NAME);
+ } else {
+ printf("%s %s not supported, skipping test\n", kSkipPrefix, VK_KHR_BIND_MEMORY_2_EXTENSION_NAME);
+ return;
}
- if (implicit_destroy && (pipeline_ != VK_NULL_HANDLE)) {
- vkDestroyPipeline(layer_test_.device(), pipeline_, nullptr);
- pipeline_ = VK_NULL_HANDLE;
+
+ ASSERT_NO_FATAL_FAILURE(InitState());
+
+ m_errorMonitor->ExpectSuccess(VK_DEBUG_REPORT_ERROR_BIT_EXT | VK_DEBUG_REPORT_WARNING_BIT_EXT);
+
+ // Create a test buffer
+ VkBufferObj buffer;
+ buffer.init_no_mem(*m_device, VkBufferObj::create_info(1024, VK_BUFFER_USAGE_TRANSFER_DST_BIT));
+
+ // Allocate buffer memory
+ vk_testing::DeviceMemory buffer_memory;
+ buffer_memory.init(*m_device, vk_testing::DeviceMemory::get_resource_alloc_info(*m_device, buffer.memory_requirements(), 0));
+
+ // Bind buffer memory with extension
+ auto vkBindBufferMemory2KHR =
+ reinterpret_cast<PFN_vkBindBufferMemory2KHR>(vkGetDeviceProcAddr(m_device->device(), "vkBindBufferMemory2KHR"));
+ ASSERT_TRUE(vkBindBufferMemory2KHR != nullptr);
+ VkBindBufferMemoryInfoKHR buffer_bind_info = {VK_STRUCTURE_TYPE_BIND_BUFFER_MEMORY_INFO_KHR, nullptr, buffer.handle(),
+ buffer_memory.handle(), 0};
+ vkBindBufferMemory2KHR(m_device->device(), 1, &buffer_bind_info);
+
+ // Create a test image
+ auto image_ci = vk_testing::Image::create_info();
+ image_ci.imageType = VK_IMAGE_TYPE_2D;
+ image_ci.extent.width = 32;
+ image_ci.extent.height = 32;
+ image_ci.format = VK_FORMAT_R8G8B8A8_UNORM;
+ image_ci.tiling = VK_IMAGE_TILING_OPTIMAL;
+ image_ci.usage = VK_IMAGE_USAGE_TRANSFER_DST_BIT;
+ vk_testing::Image image;
+ image.init_no_mem(*m_device, image_ci);
+
+ // Allocate image memory
+ vk_testing::DeviceMemory image_memory;
+ image_memory.init(*m_device, vk_testing::DeviceMemory::get_resource_alloc_info(*m_device, image.memory_requirements(), 0));
+
+ // Bind image memory with extension
+ auto vkBindImageMemory2KHR =
+ reinterpret_cast<PFN_vkBindImageMemory2KHR>(vkGetDeviceProcAddr(m_device->device(), "vkBindImageMemory2KHR"));
+ ASSERT_TRUE(vkBindImageMemory2KHR != nullptr);
+ VkBindImageMemoryInfoKHR image_bind_info = {VK_STRUCTURE_TYPE_BIND_IMAGE_MEMORY_INFO_KHR, nullptr, image.handle(),
+ image_memory.handle(), 0};
+ vkBindImageMemory2KHR(m_device->device(), 1, &image_bind_info);
+
+ // Now execute arbitrary commands that use the test buffer and image
+ m_commandBuffer->begin();
+
+ // Fill buffer with 0
+ vkCmdFillBuffer(m_commandBuffer->handle(), buffer.handle(), 0, VK_WHOLE_SIZE, 0);
+
+ // Transition and clear image
+ const auto subresource_range = image.subresource_range(VK_IMAGE_ASPECT_COLOR_BIT);
+ const auto barrier = image.image_memory_barrier(0, VK_ACCESS_TRANSFER_WRITE_BIT, VK_IMAGE_LAYOUT_UNDEFINED,
+ VK_IMAGE_LAYOUT_GENERAL, subresource_range);
+ vkCmdPipelineBarrier(m_commandBuffer->handle(), VK_PIPELINE_STAGE_ALL_COMMANDS_BIT, VK_PIPELINE_STAGE_ALL_COMMANDS_BIT, 0, 0,
+ nullptr, 0, nullptr, 1, &barrier);
+ const VkClearColorValue color = {};
+ vkCmdClearColorImage(m_commandBuffer->handle(), image.handle(), VK_IMAGE_LAYOUT_GENERAL, &color, 1, &subresource_range);
+
+ // Submit and verify no validation errors
+ m_commandBuffer->end();
+ m_commandBuffer->QueueCommandBuffer();
+ m_errorMonitor->VerifyNotFound();
+}
+
+TEST_F(VkPositiveLayerTest, MultiplaneImageTests) {
+ TEST_DESCRIPTION("Positive test of multiplane image operations");
+
+ // Enable KHR multiplane req'd extensions
+ bool mp_extensions = InstanceExtensionSupported(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME,
+ VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_SPEC_VERSION);
+ if (mp_extensions) {
+ m_instance_extension_names.push_back(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME);
+ }
+ SetTargetApiVersion(VK_API_VERSION_1_1);
+ ASSERT_NO_FATAL_FAILURE(InitFramework(myDbgFunc, m_errorMonitor));
+ mp_extensions = mp_extensions && DeviceExtensionSupported(gpu(), nullptr, VK_KHR_MAINTENANCE1_EXTENSION_NAME);
+ mp_extensions = mp_extensions && DeviceExtensionSupported(gpu(), nullptr, VK_KHR_GET_MEMORY_REQUIREMENTS_2_EXTENSION_NAME);
+ mp_extensions = mp_extensions && DeviceExtensionSupported(gpu(), nullptr, VK_KHR_BIND_MEMORY_2_EXTENSION_NAME);
+ mp_extensions = mp_extensions && DeviceExtensionSupported(gpu(), nullptr, VK_KHR_SAMPLER_YCBCR_CONVERSION_EXTENSION_NAME);
+ if (mp_extensions) {
+ m_device_extension_names.push_back(VK_KHR_MAINTENANCE1_EXTENSION_NAME);
+ m_device_extension_names.push_back(VK_KHR_GET_MEMORY_REQUIREMENTS_2_EXTENSION_NAME);
+ m_device_extension_names.push_back(VK_KHR_BIND_MEMORY_2_EXTENSION_NAME);
+ m_device_extension_names.push_back(VK_KHR_SAMPLER_YCBCR_CONVERSION_EXTENSION_NAME);
+ } else {
+ printf("%s test requires KHR multiplane extensions, not available. Skipping.\n", kSkipPrefix);
+ return;
+ }
+ ASSERT_NO_FATAL_FAILURE(InitState(nullptr, nullptr, VK_COMMAND_POOL_CREATE_RESET_COMMAND_BUFFER_BIT));
+ ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
+
+ VkImageCreateInfo ci = {};
+ ci.sType = VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO;
+ ci.pNext = NULL;
+ ci.flags = 0;
+ ci.imageType = VK_IMAGE_TYPE_2D;
+ ci.format = VK_FORMAT_G8_B8_R8_3PLANE_444_UNORM_KHR; // All planes of equal extent
+ ci.tiling = VK_IMAGE_TILING_OPTIMAL;
+ ci.usage = VK_IMAGE_USAGE_TRANSFER_SRC_BIT | VK_IMAGE_USAGE_TRANSFER_DST_BIT | VK_IMAGE_USAGE_SAMPLED_BIT;
+ ci.extent = {128, 128, 1};
+ ci.mipLevels = 1;
+ ci.arrayLayers = 1;
+ ci.samples = VK_SAMPLE_COUNT_1_BIT;
+ ci.sharingMode = VK_SHARING_MODE_EXCLUSIVE;
+ ci.initialLayout = VK_IMAGE_LAYOUT_UNDEFINED;
+
+ // Verify format
+ VkFormatFeatureFlags features = VK_FORMAT_FEATURE_TRANSFER_SRC_BIT | VK_FORMAT_FEATURE_TRANSFER_DST_BIT;
+ bool supported = ImageFormatAndFeaturesSupported(instance(), gpu(), ci, features);
+ if (!supported) {
+ printf("%s Multiplane image format not supported. Skipping test.\n", kSkipPrefix);
+ return; // Assume there's low ROI on searching for different mp formats
+ }
+
+ VkImage image;
+ ASSERT_VK_SUCCESS(vkCreateImage(device(), &ci, NULL, &image));
+
+ // Allocate & bind memory
+ VkPhysicalDeviceMemoryProperties phys_mem_props;
+ vkGetPhysicalDeviceMemoryProperties(gpu(), &phys_mem_props);
+ VkMemoryRequirements mem_reqs;
+ vkGetImageMemoryRequirements(device(), image, &mem_reqs);
+ VkDeviceMemory mem_obj = VK_NULL_HANDLE;
+ VkMemoryPropertyFlagBits mem_props = VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT;
+ for (uint32_t type = 0; type < phys_mem_props.memoryTypeCount; type++) {
+ if ((mem_reqs.memoryTypeBits & (1 << type)) &&
+ ((phys_mem_props.memoryTypes[type].propertyFlags & mem_props) == mem_props)) {
+ VkMemoryAllocateInfo alloc_info = {};
+ alloc_info.sType = VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO;
+ alloc_info.allocationSize = mem_reqs.size;
+ alloc_info.memoryTypeIndex = type;
+ ASSERT_VK_SUCCESS(vkAllocateMemory(device(), &alloc_info, NULL, &mem_obj));
+ break;
+ }
}
- err = vkCreateComputePipelines(layer_test_.device(), pipeline_cache_, 1, &cp_ci_, NULL, &pipeline_);
- return err;
+
+ if (VK_NULL_HANDLE == mem_obj) {
+ printf("%s Unable to allocate image memory. Skipping test.\n", kSkipPrefix);
+ vkDestroyImage(device(), image, NULL);
+ return;
+ }
+ ASSERT_VK_SUCCESS(vkBindImageMemory(device(), image, mem_obj, 0));
+
+ // Copy plane 0 to plane 2
+ VkImageCopy copyRegion = {};
+ copyRegion.srcSubresource.aspectMask = VK_IMAGE_ASPECT_PLANE_0_BIT_KHR;
+ copyRegion.srcSubresource.mipLevel = 0;
+ copyRegion.srcSubresource.baseArrayLayer = 0;
+ copyRegion.srcSubresource.layerCount = 1;
+ copyRegion.srcOffset = {0, 0, 0};
+ copyRegion.dstSubresource.aspectMask = VK_IMAGE_ASPECT_PLANE_2_BIT_KHR;
+ copyRegion.dstSubresource.mipLevel = 0;
+ copyRegion.dstSubresource.baseArrayLayer = 0;
+ copyRegion.dstSubresource.layerCount = 1;
+ copyRegion.dstOffset = {0, 0, 0};
+ copyRegion.extent.width = 128;
+ copyRegion.extent.height = 128;
+ copyRegion.extent.depth = 1;
+
+ m_errorMonitor->ExpectSuccess();
+ m_commandBuffer->begin();
+ m_commandBuffer->CopyImage(image, VK_IMAGE_LAYOUT_GENERAL, image, VK_IMAGE_LAYOUT_GENERAL, 1, &copyRegion);
+ m_commandBuffer->end();
+ m_errorMonitor->VerifyNotFound();
+
+ vkFreeMemory(device(), mem_obj, NULL);
+ vkDestroyImage(device(), image, NULL);
+
+ // Repeat bind test on a DISJOINT multi-planar image, with per-plane memory objects, using API2 variants
+ //
+ features |= VK_FORMAT_FEATURE_DISJOINT_BIT;
+ ci.flags = VK_IMAGE_CREATE_DISJOINT_BIT;
+ if (ImageFormatAndFeaturesSupported(instance(), gpu(), ci, features)) {
+ ASSERT_VK_SUCCESS(vkCreateImage(device(), &ci, NULL, &image));
+
+ // Allocate & bind memory
+ VkPhysicalDeviceMemoryProperties2 phys_mem_props2 = {VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MEMORY_PROPERTIES_2};
+ vkGetPhysicalDeviceMemoryProperties2(gpu(), &phys_mem_props2);
+ VkImagePlaneMemoryRequirementsInfo image_plane_req = {VK_STRUCTURE_TYPE_IMAGE_PLANE_MEMORY_REQUIREMENTS_INFO};
+ VkImageMemoryRequirementsInfo2 mem_req_info2 = {VK_STRUCTURE_TYPE_IMAGE_MEMORY_REQUIREMENTS_INFO_2};
+ mem_req_info2.pNext = &image_plane_req;
+ mem_req_info2.image = image;
+ VkMemoryRequirements2 mem_reqs2 = {VK_STRUCTURE_TYPE_MEMORY_REQUIREMENTS_2};
+
+ VkDeviceMemory p0_mem, p1_mem, p2_mem;
+ mem_props = VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT;
+ VkMemoryAllocateInfo alloc_info = {VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO};
+
+ // Plane 0
+ image_plane_req.planeAspect = VK_IMAGE_ASPECT_PLANE_0_BIT;
+ vkGetImageMemoryRequirements2(device(), &mem_req_info2, &mem_reqs2);
+ uint32_t mem_type = 0;
+ for (mem_type = 0; mem_type < phys_mem_props2.memoryProperties.memoryTypeCount; mem_type++) {
+ if ((mem_reqs2.memoryRequirements.memoryTypeBits & (1 << mem_type)) &&
+ ((phys_mem_props2.memoryProperties.memoryTypes[mem_type].propertyFlags & mem_props) == mem_props)) {
+ alloc_info.memoryTypeIndex = mem_type;
+ break;
+ }
+ }
+ alloc_info.allocationSize = mem_reqs2.memoryRequirements.size;
+ ASSERT_VK_SUCCESS(vkAllocateMemory(device(), &alloc_info, NULL, &p0_mem));
+
+ // Plane 1 & 2 use same memory type
+ image_plane_req.planeAspect = VK_IMAGE_ASPECT_PLANE_1_BIT;
+ vkGetImageMemoryRequirements2(device(), &mem_req_info2, &mem_reqs2);
+ alloc_info.allocationSize = mem_reqs2.memoryRequirements.size;
+ ASSERT_VK_SUCCESS(vkAllocateMemory(device(), &alloc_info, NULL, &p1_mem));
+
+ image_plane_req.planeAspect = VK_IMAGE_ASPECT_PLANE_2_BIT;
+ vkGetImageMemoryRequirements2(device(), &mem_req_info2, &mem_reqs2);
+ alloc_info.allocationSize = mem_reqs2.memoryRequirements.size;
+ ASSERT_VK_SUCCESS(vkAllocateMemory(device(), &alloc_info, NULL, &p2_mem));
+
+ // Set up 3-plane binding
+ VkBindImageMemoryInfo bind_info[3];
+ for (int plane = 0; plane < 3; plane++) {
+ bind_info[plane].sType = VK_STRUCTURE_TYPE_BIND_IMAGE_MEMORY_INFO;
+ bind_info[plane].pNext = nullptr;
+ bind_info[plane].image = image;
+ bind_info[plane].memoryOffset = 0;
+ }
+ bind_info[0].memory = p0_mem;
+ bind_info[1].memory = p1_mem;
+ bind_info[2].memory = p2_mem;
+
+ m_errorMonitor->ExpectSuccess();
+ vkBindImageMemory2(device(), 3, bind_info);
+ m_errorMonitor->VerifyNotFound();
+
+ vkFreeMemory(device(), p0_mem, NULL);
+ vkFreeMemory(device(), p1_mem, NULL);
+ vkFreeMemory(device(), p2_mem, NULL);
+ vkDestroyImage(device(), image, NULL);
+ }
+
+ // Test that changing the layout of ASPECT_COLOR also changes the layout of the individual planes
+ VkBufferObj buffer;
+ VkMemoryPropertyFlags reqs = 0;
+ buffer.init_as_src(*m_device, (VkDeviceSize)128 * 128 * 3, reqs);
+ VkImageObj mpimage(m_device);
+ mpimage.Init(256, 256, 1, VK_FORMAT_G8_B8_R8_3PLANE_422_UNORM_KHR, VK_IMAGE_USAGE_TRANSFER_DST_BIT | VK_IMAGE_USAGE_SAMPLED_BIT,
+ VK_IMAGE_TILING_OPTIMAL, 0);
+ VkBufferImageCopy copy_region = {};
+ copy_region.bufferRowLength = 128;
+ copy_region.bufferImageHeight = 128;
+ copy_region.imageSubresource.aspectMask = VK_IMAGE_ASPECT_PLANE_1_BIT_KHR;
+ copy_region.imageSubresource.layerCount = 1;
+ copy_region.imageExtent.height = 64;
+ copy_region.imageExtent.width = 64;
+ copy_region.imageExtent.depth = 1;
+
+ vkResetCommandBuffer(m_commandBuffer->handle(), 0);
+ m_commandBuffer->begin();
+ mpimage.ImageMemoryBarrier(m_commandBuffer, VK_IMAGE_ASPECT_COLOR_BIT, 0, 0, VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL);
+ vkCmdCopyBufferToImage(m_commandBuffer->handle(), buffer.handle(), mpimage.handle(), VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, 1,
+ &copy_region);
+ m_commandBuffer->end();
+ m_commandBuffer->QueueCommandBuffer(false);
+ m_errorMonitor->VerifyNotFound();
+
+ // Test to verify that views of multiplanar images have layouts tracked correctly
+ // by changing the image's layout then using a view of that image
+ VkImageView view;
+ VkImageViewCreateInfo ivci = {};
+ ivci.sType = VK_STRUCTURE_TYPE_IMAGE_VIEW_CREATE_INFO;
+ ivci.image = mpimage.handle();
+ ivci.viewType = VK_IMAGE_VIEW_TYPE_2D;
+ ivci.format = VK_FORMAT_G8_B8_R8_3PLANE_422_UNORM_KHR;
+ ivci.subresourceRange.layerCount = 1;
+ ivci.subresourceRange.baseMipLevel = 0;
+ ivci.subresourceRange.levelCount = 1;
+ ivci.subresourceRange.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
+ vkCreateImageView(m_device->device(), &ivci, nullptr, &view);
+
+ OneOffDescriptorSet ds(m_device, {
+ {0, VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER, 1, VK_SHADER_STAGE_FRAGMENT_BIT, nullptr},
+ });
+
+ VkSamplerCreateInfo sampler_ci = SafeSaneSamplerCreateInfo();
+ VkSampler sampler;
+
+ VkResult err;
+ err = vkCreateSampler(m_device->device(), &sampler_ci, NULL, &sampler);
+ ASSERT_VK_SUCCESS(err);
+
+ const VkPipelineLayoutObj pipeline_layout(m_device, {&ds.layout_});
+
+ VkDescriptorImageInfo image_info{};
+ image_info.imageLayout = VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL;
+ image_info.imageView = view;
+ image_info.sampler = sampler;
+
+ VkWriteDescriptorSet descriptor_write = {};
+ descriptor_write.sType = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET;
+ descriptor_write.dstSet = ds.set_;
+ descriptor_write.dstBinding = 0;
+ descriptor_write.descriptorCount = 1;
+ descriptor_write.descriptorType = VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER;
+ descriptor_write.pImageInfo = &image_info;
+
+ vkUpdateDescriptorSets(m_device->device(), 1, &descriptor_write, 0, NULL);
+
+ char const *vsSource =
+ "#version 450\n"
+ "\n"
+ "void main(){\n"
+ " gl_Position = vec4(1);\n"
+ "}\n";
+ char const *fsSource =
+ "#version 450\n"
+ "\n"
+ "layout(set=0, binding=0) uniform sampler2D s;\n"
+ "layout(location=0) out vec4 x;\n"
+ "void main(){\n"
+ " x = texture(s, vec2(1));\n"
+ "}\n";
+
+ VkShaderObj vs(m_device, vsSource, VK_SHADER_STAGE_VERTEX_BIT, this);
+ VkShaderObj fs(m_device, fsSource, VK_SHADER_STAGE_FRAGMENT_BIT, this);
+ VkPipelineObj pipe(m_device);
+ pipe.AddShader(&vs);
+ pipe.AddShader(&fs);
+ pipe.AddDefaultColorAttachment();
+ pipe.CreateVKPipeline(pipeline_layout.handle(), renderPass());
+
+ m_errorMonitor->ExpectSuccess();
+ m_commandBuffer->begin();
+ VkImageMemoryBarrier img_barrier = {};
+ img_barrier.sType = VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER;
+ img_barrier.srcAccessMask = VK_ACCESS_SHADER_WRITE_BIT;
+ img_barrier.dstAccessMask = VK_ACCESS_SHADER_WRITE_BIT;
+ img_barrier.oldLayout = VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL;
+ img_barrier.newLayout = VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL;
+ img_barrier.image = mpimage.handle();
+ img_barrier.srcQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED;
+ img_barrier.dstQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED;
+ img_barrier.subresourceRange.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
+ img_barrier.subresourceRange.baseArrayLayer = 0;
+ img_barrier.subresourceRange.baseMipLevel = 0;
+ img_barrier.subresourceRange.layerCount = 1;
+ img_barrier.subresourceRange.levelCount = 1;
+ vkCmdPipelineBarrier(m_commandBuffer->handle(), VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT, VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT,
+ VK_DEPENDENCY_BY_REGION_BIT, 0, nullptr, 0, nullptr, 1, &img_barrier);
+ m_commandBuffer->BeginRenderPass(m_renderPassBeginInfo);
+ vkCmdBindPipeline(m_commandBuffer->handle(), VK_PIPELINE_BIND_POINT_GRAPHICS, pipe.handle());
+ vkCmdBindDescriptorSets(m_commandBuffer->handle(), VK_PIPELINE_BIND_POINT_GRAPHICS, pipeline_layout.handle(), 0, 1, &ds.set_, 0,
+ nullptr);
+
+ VkViewport viewport = {0, 0, 16, 16, 0, 1};
+ VkRect2D scissor = {{0, 0}, {16, 16}};
+ vkCmdSetViewport(m_commandBuffer->handle(), 0, 1, &viewport);
+ vkCmdSetScissor(m_commandBuffer->handle(), 0, 1, &scissor);
+
+ m_commandBuffer->Draw(1, 0, 0, 0);
+ m_commandBuffer->EndRenderPass();
+ m_commandBuffer->end();
+ VkSubmitInfo submit_info = {};
+ submit_info.sType = VK_STRUCTURE_TYPE_SUBMIT_INFO;
+ submit_info.commandBufferCount = 1;
+ submit_info.pCommandBuffers = &m_commandBuffer->handle();
+ vkQueueSubmit(m_device->m_queue, 1, &submit_info, VK_NULL_HANDLE);
+ m_errorMonitor->VerifyNotFound();
+
+ vkQueueWaitIdle(m_device->m_queue);
+ vkDestroyImageView(m_device->device(), view, NULL);
+ vkDestroySampler(m_device->device(), sampler, nullptr);
+}
+
+TEST_F(VkPositiveLayerTest, ApiVersionZero) {
+ TEST_DESCRIPTION("Check that apiVersion = 0 is valid.");
+ m_errorMonitor->ExpectSuccess();
+ app_info.apiVersion = 0U;
+ ASSERT_NO_FATAL_FAILURE(InitFramework(myDbgFunc, m_errorMonitor));
+ m_errorMonitor->VerifyNotFound();
+}
+
+TEST_F(VkLayerTest, DrawIndirectCountKHR) {
+ TEST_DESCRIPTION("Test covered valid usage for vkCmdDrawIndirectCountKHR");
+
+ ASSERT_NO_FATAL_FAILURE(InitFramework(myDbgFunc, m_errorMonitor));
+ if (DeviceExtensionSupported(gpu(), nullptr, VK_KHR_DRAW_INDIRECT_COUNT_EXTENSION_NAME)) {
+ m_device_extension_names.push_back(VK_KHR_DRAW_INDIRECT_COUNT_EXTENSION_NAME);
+ } else {
+ printf(" VK_KHR_draw_indirect_count Extension not supported, skipping test\n");
+ return;
+ }
+ ASSERT_NO_FATAL_FAILURE(InitState());
+ ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
+
+ VkMemoryRequirements memory_requirements;
+ VkMemoryAllocateInfo memory_allocate_info = {VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO};
+
+ auto vkCmdDrawIndirectCountKHR =
+ (PFN_vkCmdDrawIndirectCountKHR)vkGetDeviceProcAddr(m_device->device(), "vkCmdDrawIndirectCountKHR");
+
+ char const *vsSource =
+ "#version 450\n"
+ "\n"
+ "void main() { gl_Position = vec4(0); }\n";
+ char const *fsSource =
+ "#version 450\n"
+ "\n"
+ "layout(location=0) out vec4 color;\n"
+ "void main() {\n"
+ " color = vec4(1, 0, 0, 1);\n"
+ "}\n";
+ VkShaderObj vs(m_device, vsSource, VK_SHADER_STAGE_VERTEX_BIT, this);
+ VkShaderObj fs(m_device, fsSource, VK_SHADER_STAGE_FRAGMENT_BIT, this);
+
+ VkPipelineObj pipe(m_device);
+ pipe.AddShader(&vs);
+ pipe.AddShader(&fs);
+ pipe.AddDefaultColorAttachment();
+
+ VkDescriptorSetObj descriptor_set(m_device);
+ descriptor_set.AppendDummy();
+ descriptor_set.CreateVKDescriptorSet(m_commandBuffer);
+
+ VkResult err = pipe.CreateVKPipeline(descriptor_set.GetPipelineLayout(), renderPass());
+ ASSERT_VK_SUCCESS(err);
+
+ m_commandBuffer->begin();
+ m_commandBuffer->BeginRenderPass(m_renderPassBeginInfo);
+
+ vkCmdBindPipeline(m_commandBuffer->handle(), VK_PIPELINE_BIND_POINT_GRAPHICS, pipe.handle());
+ m_commandBuffer->BindDescriptorSet(descriptor_set);
+
+ VkViewport viewport = {0, 0, 16, 16, 0, 1};
+ vkCmdSetViewport(m_commandBuffer->handle(), 0, 1, &viewport);
+ VkRect2D scissor = {{0, 0}, {16, 16}};
+ vkCmdSetScissor(m_commandBuffer->handle(), 0, 1, &scissor);
+
+ VkBufferCreateInfo buffer_create_info = {VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO};
+ buffer_create_info.size = sizeof(VkDrawIndirectCommand);
+ buffer_create_info.usage = VK_BUFFER_USAGE_INDIRECT_BUFFER_BIT;
+ VkBuffer draw_buffer;
+ vkCreateBuffer(m_device->device(), &buffer_create_info, nullptr, &draw_buffer);
+
+ VkBufferCreateInfo count_buffer_create_info = {VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO};
+ count_buffer_create_info.size = sizeof(uint32_t);
+ count_buffer_create_info.usage = VK_BUFFER_USAGE_INDIRECT_BUFFER_BIT;
+ VkBuffer count_buffer;
+ vkCreateBuffer(m_device->device(), &count_buffer_create_info, nullptr, &count_buffer);
+ vkGetBufferMemoryRequirements(m_device->device(), count_buffer, &memory_requirements);
+ memory_allocate_info.allocationSize = memory_requirements.size;
+ m_device->phy().set_memory_type(memory_requirements.memoryTypeBits, &memory_allocate_info, VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT);
+ VkDeviceMemory count_buffer_memory;
+ vkAllocateMemory(m_device->device(), &memory_allocate_info, NULL, &count_buffer_memory);
+ vkBindBufferMemory(m_device->device(), count_buffer, count_buffer_memory, 0);
+
+ // VUID-vkCmdDrawIndirectCountKHR-buffer-03104
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdDrawIndirectCountKHR-buffer-03104");
+ vkCmdDrawIndirectCountKHR(m_commandBuffer->handle(), draw_buffer, 0, count_buffer, 0, 1, sizeof(VkDrawIndirectCommand));
+ m_errorMonitor->VerifyFound();
+
+ vkGetBufferMemoryRequirements(m_device->device(), draw_buffer, &memory_requirements);
+ memory_allocate_info.allocationSize = memory_requirements.size;
+ m_device->phy().set_memory_type(memory_requirements.memoryTypeBits, &memory_allocate_info, VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT);
+ VkDeviceMemory draw_buffer_memory;
+ vkAllocateMemory(m_device->device(), &memory_allocate_info, NULL, &draw_buffer_memory);
+ vkBindBufferMemory(m_device->device(), draw_buffer, draw_buffer_memory, 0);
+
+ VkBuffer count_buffer_unbound;
+ vkCreateBuffer(m_device->device(), &count_buffer_create_info, nullptr, &count_buffer_unbound);
+
+ // VUID-vkCmdDrawIndirectCountKHR-countBuffer-03106
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdDrawIndirectCountKHR-countBuffer-03106");
+ vkCmdDrawIndirectCountKHR(m_commandBuffer->handle(), draw_buffer, 0, count_buffer_unbound, 0, 1, sizeof(VkDrawIndirectCommand));
+ m_errorMonitor->VerifyFound();
+
+ // VUID-vkCmdDrawIndirectCountKHR-offset-03108
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdDrawIndirectCountKHR-offset-03108");
+ vkCmdDrawIndirectCountKHR(m_commandBuffer->handle(), draw_buffer, 1, count_buffer, 0, 1, sizeof(VkDrawIndirectCommand));
+ m_errorMonitor->VerifyFound();
+
+ // VUID-vkCmdDrawIndirectCountKHR-countBufferOffset-03109
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdDrawIndirectCountKHR-countBufferOffset-03109");
+ vkCmdDrawIndirectCountKHR(m_commandBuffer->handle(), draw_buffer, 0, count_buffer, 1, 1, sizeof(VkDrawIndirectCommand));
+ m_errorMonitor->VerifyFound();
+
+ // VUID-vkCmdDrawIndirectCountKHR-stride-03110
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdDrawIndirectCountKHR-stride-03110");
+ vkCmdDrawIndirectCountKHR(m_commandBuffer->handle(), draw_buffer, 0, count_buffer, 0, 1, 1);
+ m_errorMonitor->VerifyFound();
+
+ // TODO: These covered VUIDs aren't tested. There is also no test coverage for the core Vulkan 1.0 vkCmdDraw* equivalent of
+ // these:
+ // VUID-vkCmdDrawIndirectCountKHR-renderPass-03113
+ // VUID-vkCmdDrawIndirectCountKHR-subpass-03114
+ // VUID-vkCmdDrawIndirectCountKHR-None-03120
+
+ m_commandBuffer->EndRenderPass();
+ m_commandBuffer->end();
+
+ vkDestroyBuffer(m_device->device(), draw_buffer, 0);
+ vkDestroyBuffer(m_device->device(), count_buffer, 0);
+ vkDestroyBuffer(m_device->device(), count_buffer_unbound, 0);
+
+ vkFreeMemory(m_device->device(), draw_buffer_memory, 0);
+ vkFreeMemory(m_device->device(), count_buffer_memory, 0);
}
-CreateNVRayTracingPipelineHelper::CreateNVRayTracingPipelineHelper(VkLayerTest &test) : layer_test_(test) {}
-CreateNVRayTracingPipelineHelper::~CreateNVRayTracingPipelineHelper() {
- VkDevice device = layer_test_.device();
- vkDestroyPipelineCache(device, pipeline_cache_, nullptr);
- vkDestroyPipeline(device, pipeline_, nullptr);
+TEST_F(VkLayerTest, DrawIndexedIndirectCountKHR) {
+ TEST_DESCRIPTION("Test covered valid usage for vkCmdDrawIndexedIndirectCountKHR");
+
+ ASSERT_NO_FATAL_FAILURE(InitFramework(myDbgFunc, m_errorMonitor));
+ if (DeviceExtensionSupported(gpu(), nullptr, VK_KHR_DRAW_INDIRECT_COUNT_EXTENSION_NAME)) {
+ m_device_extension_names.push_back(VK_KHR_DRAW_INDIRECT_COUNT_EXTENSION_NAME);
+ } else {
+ printf(" VK_KHR_draw_indirect_count Extension not supported, skipping test\n");
+ return;
+ }
+ ASSERT_NO_FATAL_FAILURE(InitState());
+ ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
+
+ VkMemoryRequirements memory_requirements;
+ VkMemoryAllocateInfo memory_allocate_info = {VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO};
+
+ auto vkCmdDrawIndexedIndirectCountKHR =
+ (PFN_vkCmdDrawIndexedIndirectCountKHR)vkGetDeviceProcAddr(m_device->device(), "vkCmdDrawIndexedIndirectCountKHR");
+
+ char const *vsSource =
+ "#version 450\n"
+ "\n"
+ "void main() { gl_Position = vec4(0); }\n";
+ char const *fsSource =
+ "#version 450\n"
+ "\n"
+ "layout(location=0) out vec4 color;\n"
+ "void main() {\n"
+ " color = vec4(1, 0, 0, 1);\n"
+ "}\n";
+ VkShaderObj vs(m_device, vsSource, VK_SHADER_STAGE_VERTEX_BIT, this);
+ VkShaderObj fs(m_device, fsSource, VK_SHADER_STAGE_FRAGMENT_BIT, this);
+
+ VkPipelineObj pipe(m_device);
+ pipe.AddShader(&vs);
+ pipe.AddShader(&fs);
+ pipe.AddDefaultColorAttachment();
+
+ VkDescriptorSetObj descriptorSet(m_device);
+ descriptorSet.AppendDummy();
+ descriptorSet.CreateVKDescriptorSet(m_commandBuffer);
+
+ VkResult err = pipe.CreateVKPipeline(descriptorSet.GetPipelineLayout(), renderPass());
+ ASSERT_VK_SUCCESS(err);
+
+ m_commandBuffer->begin();
+ m_commandBuffer->BeginRenderPass(m_renderPassBeginInfo);
+
+ vkCmdBindPipeline(m_commandBuffer->handle(), VK_PIPELINE_BIND_POINT_GRAPHICS, pipe.handle());
+ m_commandBuffer->BindDescriptorSet(descriptorSet);
+
+ VkViewport viewport = {0, 0, 16, 16, 0, 1};
+ vkCmdSetViewport(m_commandBuffer->handle(), 0, 1, &viewport);
+ VkRect2D scissor = {{0, 0}, {16, 16}};
+ vkCmdSetScissor(m_commandBuffer->handle(), 0, 1, &scissor);
+
+ VkBufferCreateInfo buffer_create_info = {VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO};
+ buffer_create_info.size = sizeof(VkDrawIndexedIndirectCommand);
+ buffer_create_info.usage = VK_BUFFER_USAGE_INDIRECT_BUFFER_BIT;
+ VkBuffer draw_buffer;
+ vkCreateBuffer(m_device->device(), &buffer_create_info, nullptr, &draw_buffer);
+ vkGetBufferMemoryRequirements(m_device->device(), draw_buffer, &memory_requirements);
+ memory_allocate_info.allocationSize = memory_requirements.size;
+ m_device->phy().set_memory_type(memory_requirements.memoryTypeBits, &memory_allocate_info, VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT);
+ VkDeviceMemory draw_buffer_memory;
+ vkAllocateMemory(m_device->device(), &memory_allocate_info, NULL, &draw_buffer_memory);
+ vkBindBufferMemory(m_device->device(), draw_buffer, draw_buffer_memory, 0);
+
+ VkBufferCreateInfo count_buffer_create_info = {VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO};
+ count_buffer_create_info.size = sizeof(uint32_t);
+ count_buffer_create_info.usage = VK_BUFFER_USAGE_INDIRECT_BUFFER_BIT;
+ VkBuffer count_buffer;
+ vkCreateBuffer(m_device->device(), &count_buffer_create_info, nullptr, &count_buffer);
+ vkGetBufferMemoryRequirements(m_device->device(), count_buffer, &memory_requirements);
+ memory_allocate_info.allocationSize = memory_requirements.size;
+ m_device->phy().set_memory_type(memory_requirements.memoryTypeBits, &memory_allocate_info, VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT);
+ VkDeviceMemory count_buffer_memory;
+ vkAllocateMemory(m_device->device(), &memory_allocate_info, NULL, &count_buffer_memory);
+ vkBindBufferMemory(m_device->device(), count_buffer, count_buffer_memory, 0);
+
+ VkBufferCreateInfo index_buffer_create_info = {VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO};
+ index_buffer_create_info.size = sizeof(uint32_t);
+ index_buffer_create_info.usage = VK_BUFFER_USAGE_INDEX_BUFFER_BIT;
+ VkBuffer index_buffer;
+ vkCreateBuffer(m_device->device(), &index_buffer_create_info, nullptr, &index_buffer);
+ vkGetBufferMemoryRequirements(m_device->device(), index_buffer, &memory_requirements);
+ memory_allocate_info.allocationSize = memory_requirements.size;
+ m_device->phy().set_memory_type(memory_requirements.memoryTypeBits, &memory_allocate_info, VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT);
+ VkDeviceMemory index_buffer_memory;
+ vkAllocateMemory(m_device->device(), &memory_allocate_info, NULL, &index_buffer_memory);
+ vkBindBufferMemory(m_device->device(), index_buffer, index_buffer_memory, 0);
+
+ // VUID-vkCmdDrawIndexedIndirectCountKHR-None-03152 (partial - only tests whether the index buffer is bound)
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdDrawIndexedIndirectCountKHR-None-03152");
+ vkCmdDrawIndexedIndirectCountKHR(m_commandBuffer->handle(), draw_buffer, 0, count_buffer, 0, 1,
+ sizeof(VkDrawIndexedIndirectCommand));
+ m_errorMonitor->VerifyFound();
+
+ vkCmdBindIndexBuffer(m_commandBuffer->handle(), index_buffer, 0, VK_INDEX_TYPE_UINT32);
+
+ VkBuffer draw_buffer_unbound;
+ vkCreateBuffer(m_device->device(), &count_buffer_create_info, nullptr, &draw_buffer_unbound);
+
+ // VUID-vkCmdDrawIndexedIndirectCountKHR-buffer-03136
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdDrawIndexedIndirectCountKHR-buffer-03136");
+ vkCmdDrawIndexedIndirectCountKHR(m_commandBuffer->handle(), draw_buffer_unbound, 0, count_buffer, 0, 1,
+ sizeof(VkDrawIndexedIndirectCommand));
+ m_errorMonitor->VerifyFound();
+
+ VkBuffer count_buffer_unbound;
+ vkCreateBuffer(m_device->device(), &count_buffer_create_info, nullptr, &count_buffer_unbound);
+
+ // VUID-vkCmdDrawIndexedIndirectCountKHR-countBuffer-03138
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdDrawIndexedIndirectCountKHR-countBuffer-03138");
+ vkCmdDrawIndexedIndirectCountKHR(m_commandBuffer->handle(), draw_buffer, 0, count_buffer_unbound, 0, 1,
+ sizeof(VkDrawIndexedIndirectCommand));
+ m_errorMonitor->VerifyFound();
+
+ // VUID-vkCmdDrawIndexedIndirectCountKHR-offset-03140
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdDrawIndexedIndirectCountKHR-offset-03140");
+ vkCmdDrawIndexedIndirectCountKHR(m_commandBuffer->handle(), draw_buffer, 1, count_buffer, 0, 1,
+ sizeof(VkDrawIndexedIndirectCommand));
+ m_errorMonitor->VerifyFound();
+
+ // VUID-vkCmdDrawIndexedIndirectCountKHR-countBufferOffset-03141
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ "VUID-vkCmdDrawIndexedIndirectCountKHR-countBufferOffset-03141");
+ vkCmdDrawIndexedIndirectCountKHR(m_commandBuffer->handle(), draw_buffer, 0, count_buffer, 1, 1,
+ sizeof(VkDrawIndexedIndirectCommand));
+ m_errorMonitor->VerifyFound();
+
+ // VUID-vkCmdDrawIndexedIndirectCountKHR-stride-03142
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdDrawIndexedIndirectCountKHR-stride-03142");
+ vkCmdDrawIndexedIndirectCountKHR(m_commandBuffer->handle(), draw_buffer, 0, count_buffer, 0, 1, 1);
+ m_errorMonitor->VerifyFound();
+
+ // TODO: These covered VUIDs aren't tested. There is also no test coverage for the core Vulkan 1.0 vkCmdDraw* equivalent of
+ // these:
+ // VUID-vkCmdDrawIndexedIndirectCountKHR-renderPass-03145
+ // VUID-vkCmdDrawIndexedIndirectCountKHR-subpass-03146
+ // VUID-vkCmdDrawIndexedIndirectCountKHR-None-03152 (partial)
+
+ m_commandBuffer->EndRenderPass();
+ m_commandBuffer->end();
+
+ vkDestroyBuffer(m_device->device(), draw_buffer, 0);
+ vkDestroyBuffer(m_device->device(), draw_buffer_unbound, 0);
+ vkDestroyBuffer(m_device->device(), count_buffer, 0);
+ vkDestroyBuffer(m_device->device(), count_buffer_unbound, 0);
+ vkDestroyBuffer(m_device->device(), index_buffer, 0);
+
+ vkFreeMemory(m_device->device(), draw_buffer_memory, 0);
+ vkFreeMemory(m_device->device(), count_buffer_memory, 0);
+ vkFreeMemory(m_device->device(), index_buffer_memory, 0);
}
-bool CreateNVRayTracingPipelineHelper::InitInstanceExtensions(VkLayerTest &test,
- std::vector<const char *> &instance_extension_names) {
- if (test.InstanceExtensionSupported(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME)) {
- instance_extension_names.push_back(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME);
+TEST_F(VkLayerTest, ExclusiveScissorNV) {
+ TEST_DESCRIPTION("Test VK_NV_scissor_exclusive with multiViewport disabled.");
+
+ if (InstanceExtensionSupported(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME)) {
+ m_instance_extension_names.push_back(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME);
} else {
printf("%s Did not find required instance extension %s; skipped.\n", kSkipPrefix,
VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME);
- return false;
+ return;
+ }
+ ASSERT_NO_FATAL_FAILURE(InitFramework(myDbgFunc, m_errorMonitor));
+ std::array<const char *, 1> required_device_extensions = {{VK_NV_SCISSOR_EXCLUSIVE_EXTENSION_NAME}};
+ for (auto device_extension : required_device_extensions) {
+ if (DeviceExtensionSupported(gpu(), nullptr, device_extension)) {
+ m_device_extension_names.push_back(device_extension);
+ } else {
+ printf("%s %s Extension not supported, skipping tests\n", kSkipPrefix, device_extension);
+ return;
+ }
+ }
+
+ PFN_vkGetPhysicalDeviceFeatures2KHR vkGetPhysicalDeviceFeatures2KHR =
+ (PFN_vkGetPhysicalDeviceFeatures2KHR)vkGetInstanceProcAddr(instance(), "vkGetPhysicalDeviceFeatures2KHR");
+ ASSERT_TRUE(vkGetPhysicalDeviceFeatures2KHR != nullptr);
+
+ // Create a device that enables exclusive scissor but disables multiViewport
+ auto exclusive_scissor_features = lvl_init_struct<VkPhysicalDeviceExclusiveScissorFeaturesNV>();
+ auto features2 = lvl_init_struct<VkPhysicalDeviceFeatures2KHR>(&exclusive_scissor_features);
+ vkGetPhysicalDeviceFeatures2KHR(gpu(), &features2);
+
+ features2.features.multiViewport = VK_FALSE;
+
+ ASSERT_NO_FATAL_FAILURE(InitState(nullptr, &features2));
+ ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
+
+ // Based on PSOViewportStateTests
+ {
+ VkViewport viewport = {0.0f, 0.0f, 64.0f, 64.0f, 0.0f, 1.0f};
+ VkViewport viewports[] = {viewport, viewport};
+ VkRect2D scissor = {{0, 0}, {64, 64}};
+ VkRect2D scissors[100] = {scissor, scissor};
+
+ using std::vector;
+ struct TestCase {
+ uint32_t viewport_count;
+ VkViewport *viewports;
+ uint32_t scissor_count;
+ VkRect2D *scissors;
+ uint32_t exclusive_scissor_count;
+ VkRect2D *exclusive_scissors;
+
+ vector<std::string> vuids;
+ };
+
+ vector<TestCase> test_cases = {
+ {1,
+ viewports,
+ 1,
+ scissors,
+ 2,
+ scissors,
+ {"VUID-VkPipelineViewportExclusiveScissorStateCreateInfoNV-exclusiveScissorCount-02027",
+ "VUID-VkPipelineViewportExclusiveScissorStateCreateInfoNV-exclusiveScissorCount-02029"}},
+ {1,
+ viewports,
+ 1,
+ scissors,
+ 100,
+ scissors,
+ {"VUID-VkPipelineViewportExclusiveScissorStateCreateInfoNV-exclusiveScissorCount-02027",
+ "VUID-VkPipelineViewportExclusiveScissorStateCreateInfoNV-exclusiveScissorCount-02028",
+ "VUID-VkPipelineViewportExclusiveScissorStateCreateInfoNV-exclusiveScissorCount-02029"}},
+ {1,
+ viewports,
+ 1,
+ scissors,
+ 1,
+ nullptr,
+ {"VUID-VkPipelineViewportExclusiveScissorStateCreateInfoNV-pDynamicStates-02030"}},
+ };
+
+ for (const auto &test_case : test_cases) {
+ VkPipelineViewportExclusiveScissorStateCreateInfoNV exc = {
+ VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_EXCLUSIVE_SCISSOR_STATE_CREATE_INFO_NV};
+
+ const auto break_vp = [&test_case, &exc](CreatePipelineHelper &helper) {
+ helper.vp_state_ci_.viewportCount = test_case.viewport_count;
+ helper.vp_state_ci_.pViewports = test_case.viewports;
+ helper.vp_state_ci_.scissorCount = test_case.scissor_count;
+ helper.vp_state_ci_.pScissors = test_case.scissors;
+ helper.vp_state_ci_.pNext = &exc;
+
+ exc.exclusiveScissorCount = test_case.exclusive_scissor_count;
+ exc.pExclusiveScissors = test_case.exclusive_scissors;
+ };
+ CreatePipelineHelper::OneshotTest(*this, break_vp, VK_DEBUG_REPORT_ERROR_BIT_EXT, test_case.vuids);
+ }
+ }
+
+ // Based on SetDynScissorParamTests
+ {
+ auto vkCmdSetExclusiveScissorNV =
+ (PFN_vkCmdSetExclusiveScissorNV)vkGetDeviceProcAddr(m_device->device(), "vkCmdSetExclusiveScissorNV");
+
+ const VkRect2D scissor = {{0, 0}, {16, 16}};
+ const VkRect2D scissors[] = {scissor, scissor};
+
+ m_commandBuffer->begin();
+
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ "VUID-vkCmdSetExclusiveScissorNV-firstExclusiveScissor-02035");
+ vkCmdSetExclusiveScissorNV(m_commandBuffer->handle(), 1, 1, scissors);
+ m_errorMonitor->VerifyFound();
+
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ "vkCmdSetExclusiveScissorNV: parameter exclusiveScissorCount must be greater than 0");
+ vkCmdSetExclusiveScissorNV(m_commandBuffer->handle(), 0, 0, nullptr);
+ m_errorMonitor->VerifyFound();
+
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ "VUID-vkCmdSetExclusiveScissorNV-exclusiveScissorCount-02036");
+ vkCmdSetExclusiveScissorNV(m_commandBuffer->handle(), 0, 2, scissors);
+ m_errorMonitor->VerifyFound();
+
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ "vkCmdSetExclusiveScissorNV: parameter exclusiveScissorCount must be greater than 0");
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ "VUID-vkCmdSetExclusiveScissorNV-firstExclusiveScissor-02035");
+ vkCmdSetExclusiveScissorNV(m_commandBuffer->handle(), 1, 0, scissors);
+ m_errorMonitor->VerifyFound();
+
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ "VUID-vkCmdSetExclusiveScissorNV-firstExclusiveScissor-02035");
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ "VUID-vkCmdSetExclusiveScissorNV-exclusiveScissorCount-02036");
+ vkCmdSetExclusiveScissorNV(m_commandBuffer->handle(), 1, 2, scissors);
+ m_errorMonitor->VerifyFound();
+
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ "vkCmdSetExclusiveScissorNV: required parameter pExclusiveScissors specified as NULL");
+ vkCmdSetExclusiveScissorNV(m_commandBuffer->handle(), 0, 1, nullptr);
+ m_errorMonitor->VerifyFound();
+
+ struct TestCase {
+ VkRect2D scissor;
+ std::string vuid;
+ };
+
+ std::vector<TestCase> test_cases = {
+ {{{-1, 0}, {16, 16}}, "VUID-vkCmdSetExclusiveScissorNV-x-02037"},
+ {{{0, -1}, {16, 16}}, "VUID-vkCmdSetExclusiveScissorNV-x-02037"},
+ {{{1, 0}, {INT32_MAX, 16}}, "VUID-vkCmdSetExclusiveScissorNV-offset-02038"},
+ {{{INT32_MAX, 0}, {1, 16}}, "VUID-vkCmdSetExclusiveScissorNV-offset-02038"},
+ {{{0, 0}, {uint32_t{INT32_MAX} + 1, 16}}, "VUID-vkCmdSetExclusiveScissorNV-offset-02038"},
+ {{{0, 1}, {16, INT32_MAX}}, "VUID-vkCmdSetExclusiveScissorNV-offset-02039"},
+ {{{0, INT32_MAX}, {16, 1}}, "VUID-vkCmdSetExclusiveScissorNV-offset-02039"},
+ {{{0, 0}, {16, uint32_t{INT32_MAX} + 1}}, "VUID-vkCmdSetExclusiveScissorNV-offset-02039"}};
+
+ for (const auto &test_case : test_cases) {
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, test_case.vuid);
+ vkCmdSetExclusiveScissorNV(m_commandBuffer->handle(), 0, 1, &test_case.scissor);
+ m_errorMonitor->VerifyFound();
+ }
+
+ m_commandBuffer->end();
}
- return true;
}
-bool CreateNVRayTracingPipelineHelper::InitDeviceExtensions(VkLayerTest &test, std::vector<const char *> &device_extension_names) {
- std::array<const char *, 2> required_device_extensions = {
- {VK_NV_RAY_TRACING_EXTENSION_NAME, VK_KHR_GET_MEMORY_REQUIREMENTS_2_EXTENSION_NAME}};
+TEST_F(VkLayerTest, ShadingRateImageNV) {
+ TEST_DESCRIPTION("Test VK_NV_shading_rate_image.");
+
+ if (InstanceExtensionSupported(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME)) {
+ m_instance_extension_names.push_back(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME);
+ } else {
+ printf("%s Did not find required instance extension %s; skipped.\n", kSkipPrefix,
+ VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME);
+ return;
+ }
+ ASSERT_NO_FATAL_FAILURE(InitFramework(myDbgFunc, m_errorMonitor));
+ std::array<const char *, 1> required_device_extensions = {{VK_NV_SHADING_RATE_IMAGE_EXTENSION_NAME}};
for (auto device_extension : required_device_extensions) {
- if (test.DeviceExtensionSupported(test.gpu(), nullptr, device_extension)) {
- device_extension_names.push_back(device_extension);
+ if (DeviceExtensionSupported(gpu(), nullptr, device_extension)) {
+ m_device_extension_names.push_back(device_extension);
} else {
printf("%s %s Extension not supported, skipping tests\n", kSkipPrefix, device_extension);
- return false;
+ return;
}
}
- return true;
-}
-void CreateNVRayTracingPipelineHelper::InitShaderGroups() {
+ PFN_vkGetPhysicalDeviceFeatures2KHR vkGetPhysicalDeviceFeatures2KHR =
+ (PFN_vkGetPhysicalDeviceFeatures2KHR)vkGetInstanceProcAddr(instance(), "vkGetPhysicalDeviceFeatures2KHR");
+ ASSERT_TRUE(vkGetPhysicalDeviceFeatures2KHR != nullptr);
+
+ // Create a device that enables shading_rate_image but disables multiViewport
+ auto shading_rate_image_features = lvl_init_struct<VkPhysicalDeviceShadingRateImageFeaturesNV>();
+ auto features2 = lvl_init_struct<VkPhysicalDeviceFeatures2KHR>(&shading_rate_image_features);
+ vkGetPhysicalDeviceFeatures2KHR(gpu(), &features2);
+
+ features2.features.multiViewport = VK_FALSE;
+
+ ASSERT_NO_FATAL_FAILURE(InitState(nullptr, &features2));
+ ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
+
+ // Test shading rate image creation
+ VkImage image = VK_NULL_HANDLE;
+ VkResult result = VK_RESULT_MAX_ENUM;
+ VkImageCreateInfo image_create_info = {};
+ image_create_info.sType = VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO;
+ image_create_info.pNext = NULL;
+ image_create_info.imageType = VK_IMAGE_TYPE_2D;
+ image_create_info.format = VK_FORMAT_R8_UINT;
+ image_create_info.extent.width = 4;
+ image_create_info.extent.height = 4;
+ image_create_info.extent.depth = 1;
+ image_create_info.mipLevels = 1;
+ image_create_info.arrayLayers = 1;
+ image_create_info.samples = VK_SAMPLE_COUNT_1_BIT;
+ image_create_info.tiling = VK_IMAGE_TILING_OPTIMAL;
+ image_create_info.initialLayout = VK_IMAGE_LAYOUT_UNDEFINED;
+ image_create_info.usage = VK_IMAGE_USAGE_TRANSFER_DST_BIT | VK_IMAGE_USAGE_SHADING_RATE_IMAGE_BIT_NV;
+ image_create_info.queueFamilyIndexCount = 0;
+ image_create_info.pQueueFamilyIndices = NULL;
+ image_create_info.sharingMode = VK_SHARING_MODE_EXCLUSIVE;
+ image_create_info.flags = VK_IMAGE_CREATE_MUTABLE_FORMAT_BIT;
+
+ // image type must be 2D
+ image_create_info.imageType = VK_IMAGE_TYPE_3D;
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkImageCreateInfo-imageType-02082");
+ result = vkCreateImage(m_device->device(), &image_create_info, NULL, &image);
+ m_errorMonitor->VerifyFound();
+ if (VK_SUCCESS == result) {
+ vkDestroyImage(m_device->device(), image, NULL);
+ image = VK_NULL_HANDLE;
+ }
+ image_create_info.imageType = VK_IMAGE_TYPE_2D;
+
+ // must be single sample
+ image_create_info.samples = VK_SAMPLE_COUNT_2_BIT;
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkImageCreateInfo-samples-02083");
+ result = vkCreateImage(m_device->device(), &image_create_info, NULL, &image);
+ m_errorMonitor->VerifyFound();
+ if (VK_SUCCESS == result) {
+ vkDestroyImage(m_device->device(), image, NULL);
+ image = VK_NULL_HANDLE;
+ }
+ image_create_info.samples = VK_SAMPLE_COUNT_1_BIT;
+
+ // tiling must be optimal
+ image_create_info.tiling = VK_IMAGE_TILING_LINEAR;
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkImageCreateInfo-tiling-02084");
+ result = vkCreateImage(m_device->device(), &image_create_info, NULL, &image);
+ m_errorMonitor->VerifyFound();
+ if (VK_SUCCESS == result) {
+ vkDestroyImage(m_device->device(), image, NULL);
+ image = VK_NULL_HANDLE;
+ }
+ image_create_info.tiling = VK_IMAGE_TILING_OPTIMAL;
+
+ // Should succeed.
+ result = vkCreateImage(m_device->device(), &image_create_info, NULL, &image);
+ m_errorMonitor->VerifyNotFound();
+
+ // bind memory to the image
+ VkMemoryRequirements memory_reqs;
+ VkDeviceMemory image_memory;
+ bool pass;
+ VkMemoryAllocateInfo memory_info = {};
+ memory_info.sType = VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO;
+ memory_info.pNext = NULL;
+ memory_info.allocationSize = 0;
+ memory_info.memoryTypeIndex = 0;
+ vkGetImageMemoryRequirements(m_device->device(), image, &memory_reqs);
+ memory_info.allocationSize = memory_reqs.size;
+ pass = m_device->phy().set_memory_type(memory_reqs.memoryTypeBits, &memory_info, 0);
+ ASSERT_TRUE(pass);
+ result = vkAllocateMemory(m_device->device(), &memory_info, NULL, &image_memory);
+ ASSERT_VK_SUCCESS(result);
+ result = vkBindImageMemory(m_device->device(), image, image_memory, 0);
+ ASSERT_VK_SUCCESS(result);
+
+ // Test image view creation
+ VkImageView view;
+ VkImageViewCreateInfo ivci = {};
+ ivci.sType = VK_STRUCTURE_TYPE_IMAGE_VIEW_CREATE_INFO;
+ ivci.image = image;
+ ivci.viewType = VK_IMAGE_VIEW_TYPE_2D;
+ ivci.format = VK_FORMAT_R8_UINT;
+ ivci.subresourceRange.layerCount = 1;
+ ivci.subresourceRange.baseMipLevel = 0;
+ ivci.subresourceRange.levelCount = 1;
+ ivci.subresourceRange.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
+
+ // view type must be 2D or 2D_ARRAY
+ ivci.viewType = VK_IMAGE_VIEW_TYPE_CUBE;
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkImageViewCreateInfo-image-02086");
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkImageViewCreateInfo-image-01003");
+ result = vkCreateImageView(m_device->device(), &ivci, nullptr, &view);
+ m_errorMonitor->VerifyFound();
+ if (VK_SUCCESS == result) {
+ vkDestroyImageView(m_device->device(), view, NULL);
+ view = VK_NULL_HANDLE;
+ }
+ ivci.viewType = VK_IMAGE_VIEW_TYPE_2D;
+
+ // format must be R8_UINT
+ ivci.format = VK_FORMAT_R8_UNORM;
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkImageViewCreateInfo-image-02087");
+ result = vkCreateImageView(m_device->device(), &ivci, nullptr, &view);
+ m_errorMonitor->VerifyFound();
+ if (VK_SUCCESS == result) {
+ vkDestroyImageView(m_device->device(), view, NULL);
+ view = VK_NULL_HANDLE;
+ }
+ ivci.format = VK_FORMAT_R8_UINT;
+
+ vkCreateImageView(m_device->device(), &ivci, nullptr, &view);
+ m_errorMonitor->VerifyNotFound();
+
+ // Test pipeline creation
+ VkPipelineViewportShadingRateImageStateCreateInfoNV vsrisci = {
+ VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_SHADING_RATE_IMAGE_STATE_CREATE_INFO_NV};
+
+ VkViewport viewport = {0.0f, 0.0f, 64.0f, 64.0f, 0.0f, 1.0f};
+ VkViewport viewports[20] = {viewport, viewport};
+ VkRect2D scissor = {{0, 0}, {64, 64}};
+ VkRect2D scissors[20] = {scissor, scissor};
+ VkDynamicState dynPalette = VK_DYNAMIC_STATE_VIEWPORT_SHADING_RATE_PALETTE_NV;
+ VkPipelineDynamicStateCreateInfo dyn = {VK_STRUCTURE_TYPE_PIPELINE_DYNAMIC_STATE_CREATE_INFO, nullptr, 0, 1, &dynPalette};
+
+ // viewportCount must be 0 or 1 when multiViewport is disabled
{
- VkRayTracingShaderGroupCreateInfoNV group = {};
- group.sType = VK_STRUCTURE_TYPE_RAY_TRACING_SHADER_GROUP_CREATE_INFO_NV;
- group.type = VK_RAY_TRACING_SHADER_GROUP_TYPE_GENERAL_NV;
- group.generalShader = 0;
- group.closestHitShader = VK_SHADER_UNUSED_NV;
- group.anyHitShader = VK_SHADER_UNUSED_NV;
- group.intersectionShader = VK_SHADER_UNUSED_NV;
- groups_.push_back(group);
+ const auto break_vp = [&](CreatePipelineHelper &helper) {
+ helper.vp_state_ci_.viewportCount = 2;
+ helper.vp_state_ci_.pViewports = viewports;
+ helper.vp_state_ci_.scissorCount = 2;
+ helper.vp_state_ci_.pScissors = scissors;
+ helper.vp_state_ci_.pNext = &vsrisci;
+ helper.dyn_state_ci_ = dyn;
+
+ vsrisci.shadingRateImageEnable = VK_TRUE;
+ vsrisci.viewportCount = 2;
+ };
+ CreatePipelineHelper::OneshotTest(
+ *this, break_vp, VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ vector<std::string>({"VUID-VkPipelineViewportShadingRateImageStateCreateInfoNV-viewportCount-02054",
+ "VUID-VkPipelineViewportStateCreateInfo-viewportCount-01216",
+ "VUID-VkPipelineViewportStateCreateInfo-scissorCount-01217"}));
}
+
+ // viewportCounts must match
{
- VkRayTracingShaderGroupCreateInfoNV group = {};
- group.sType = VK_STRUCTURE_TYPE_RAY_TRACING_SHADER_GROUP_CREATE_INFO_NV;
- group.type = VK_RAY_TRACING_SHADER_GROUP_TYPE_TRIANGLES_HIT_GROUP_NV;
- group.generalShader = VK_SHADER_UNUSED_NV;
- group.closestHitShader = 1;
- group.anyHitShader = VK_SHADER_UNUSED_NV;
- group.intersectionShader = VK_SHADER_UNUSED_NV;
- groups_.push_back(group);
+ const auto break_vp = [&](CreatePipelineHelper &helper) {
+ helper.vp_state_ci_.viewportCount = 1;
+ helper.vp_state_ci_.pViewports = viewports;
+ helper.vp_state_ci_.scissorCount = 1;
+ helper.vp_state_ci_.pScissors = scissors;
+ helper.vp_state_ci_.pNext = &vsrisci;
+ helper.dyn_state_ci_ = dyn;
+
+ vsrisci.shadingRateImageEnable = VK_TRUE;
+ vsrisci.viewportCount = 0;
+ };
+ CreatePipelineHelper::OneshotTest(
+ *this, break_vp, VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ vector<std::string>({"VUID-VkPipelineViewportShadingRateImageStateCreateInfoNV-shadingRateImageEnable-02056"}));
}
+
+ // pShadingRatePalettes must not be NULL.
+ {
+ const auto break_vp = [&](CreatePipelineHelper &helper) {
+ helper.vp_state_ci_.viewportCount = 1;
+ helper.vp_state_ci_.pViewports = viewports;
+ helper.vp_state_ci_.scissorCount = 1;
+ helper.vp_state_ci_.pScissors = scissors;
+ helper.vp_state_ci_.pNext = &vsrisci;
+
+ vsrisci.shadingRateImageEnable = VK_TRUE;
+ vsrisci.viewportCount = 1;
+ };
+ CreatePipelineHelper::OneshotTest(
+ *this, break_vp, VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ vector<std::string>({"VUID-VkPipelineViewportShadingRateImageStateCreateInfoNV-pDynamicStates-02057"}));
+ }
+
+ // Create an image without the SRI bit
+ VkImageObj nonSRIimage(m_device);
+ nonSRIimage.Init(256, 256, 1, VK_FORMAT_B8G8R8A8_UNORM, VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT, VK_IMAGE_TILING_OPTIMAL, 0);
+ ASSERT_TRUE(nonSRIimage.initialized());
+ VkImageView nonSRIview = nonSRIimage.targetView(VK_FORMAT_B8G8R8A8_UNORM);
+
+ // Test SRI layout on non-SRI image
+ VkImageMemoryBarrier img_barrier = {};
+ img_barrier.sType = VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER;
+ img_barrier.pNext = nullptr;
+ img_barrier.srcAccessMask = 0;
+ img_barrier.dstAccessMask = 0;
+ img_barrier.oldLayout = VK_IMAGE_LAYOUT_GENERAL;
+ img_barrier.newLayout = VK_IMAGE_LAYOUT_SHADING_RATE_OPTIMAL_NV;
+ img_barrier.image = nonSRIimage.handle();
+ img_barrier.srcQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED;
+ img_barrier.dstQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED;
+ img_barrier.subresourceRange.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
+ img_barrier.subresourceRange.baseArrayLayer = 0;
+ img_barrier.subresourceRange.baseMipLevel = 0;
+ img_barrier.subresourceRange.layerCount = 1;
+ img_barrier.subresourceRange.levelCount = 1;
+
+ m_commandBuffer->begin();
+
+ // Error trying to convert it to SRI layout
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkImageMemoryBarrier-oldLayout-02088");
+ vkCmdPipelineBarrier(m_commandBuffer->handle(), VK_PIPELINE_STAGE_BOTTOM_OF_PIPE_BIT, VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT, 0, 0,
+ nullptr, 0, nullptr, 1, &img_barrier);
+ m_errorMonitor->VerifyFound();
+
+ // succeed converting it to GENERAL
+ img_barrier.newLayout = VK_IMAGE_LAYOUT_GENERAL;
+ vkCmdPipelineBarrier(m_commandBuffer->handle(), VK_PIPELINE_STAGE_BOTTOM_OF_PIPE_BIT, VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT, 0, 0,
+ nullptr, 0, nullptr, 1, &img_barrier);
+ m_errorMonitor->VerifyNotFound();
+
+ // Test vkCmdBindShadingRateImageNV errors
+ auto vkCmdBindShadingRateImageNV =
+ (PFN_vkCmdBindShadingRateImageNV)vkGetDeviceProcAddr(m_device->device(), "vkCmdBindShadingRateImageNV");
+
+ // if the view is non-NULL, it must be R8_UINT, USAGE_SRI, image layout must match, layout must be valid
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdBindShadingRateImageNV-imageView-02060");
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdBindShadingRateImageNV-imageView-02061");
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdBindShadingRateImageNV-imageView-02062");
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdBindShadingRateImageNV-imageLayout-02063");
+ vkCmdBindShadingRateImageNV(m_commandBuffer->handle(), nonSRIview, VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL);
+ m_errorMonitor->VerifyFound();
+
+ // Test vkCmdSetViewportShadingRatePaletteNV errors
+ auto vkCmdSetViewportShadingRatePaletteNV =
+ (PFN_vkCmdSetViewportShadingRatePaletteNV)vkGetDeviceProcAddr(m_device->device(), "vkCmdSetViewportShadingRatePaletteNV");
+
+ VkShadingRatePaletteEntryNV paletteEntries[100] = {};
+ VkShadingRatePaletteNV palette = {100, paletteEntries};
+ VkShadingRatePaletteNV palettes[] = {palette, palette};
+
+ // errors on firstViewport/viewportCount
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ "VUID-vkCmdSetViewportShadingRatePaletteNV-firstViewport-02066");
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ "VUID-vkCmdSetViewportShadingRatePaletteNV-firstViewport-02067");
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ "VUID-vkCmdSetViewportShadingRatePaletteNV-firstViewport-02068");
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ "VUID-vkCmdSetViewportShadingRatePaletteNV-viewportCount-02069");
+ vkCmdSetViewportShadingRatePaletteNV(m_commandBuffer->handle(), 20, 2, palettes);
+ m_errorMonitor->VerifyFound();
+
+ // shadingRatePaletteEntryCount must be in range
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ "VUID-VkShadingRatePaletteNV-shadingRatePaletteEntryCount-02071");
+ vkCmdSetViewportShadingRatePaletteNV(m_commandBuffer->handle(), 0, 1, palettes);
+ m_errorMonitor->VerifyFound();
+
+ VkCoarseSampleLocationNV locations[100] = {
+ {0, 0, 0}, {0, 0, 1}, {0, 1, 0}, {0, 1, 1}, {0, 1, 1}, // duplicate
+ {1000, 0, 0}, // pixelX too large
+ {0, 1000, 0}, // pixelY too large
+ {0, 0, 1000}, // sample too large
+ };
+
+ // Test custom sample orders, both via pipeline state and via dynamic state
{
- VkRayTracingShaderGroupCreateInfoNV group = {};
- group.sType = VK_STRUCTURE_TYPE_RAY_TRACING_SHADER_GROUP_CREATE_INFO_NV;
- group.type = VK_RAY_TRACING_SHADER_GROUP_TYPE_GENERAL_NV;
- group.generalShader = 2;
- group.closestHitShader = VK_SHADER_UNUSED_NV;
- group.anyHitShader = VK_SHADER_UNUSED_NV;
- group.intersectionShader = VK_SHADER_UNUSED_NV;
- groups_.push_back(group);
+ VkCoarseSampleOrderCustomNV sampOrdBadShadingRate = {VK_SHADING_RATE_PALETTE_ENTRY_1_INVOCATION_PER_PIXEL_NV, 1, 1,
+ locations};
+ VkCoarseSampleOrderCustomNV sampOrdBadSampleCount = {VK_SHADING_RATE_PALETTE_ENTRY_1_INVOCATION_PER_1X2_PIXELS_NV, 3, 1,
+ locations};
+ VkCoarseSampleOrderCustomNV sampOrdBadSampleLocationCount = {VK_SHADING_RATE_PALETTE_ENTRY_1_INVOCATION_PER_1X2_PIXELS_NV,
+ 2, 2, locations};
+ VkCoarseSampleOrderCustomNV sampOrdDuplicateLocations = {VK_SHADING_RATE_PALETTE_ENTRY_1_INVOCATION_PER_1X2_PIXELS_NV, 2,
+ 1 * 2 * 2, &locations[1]};
+ VkCoarseSampleOrderCustomNV sampOrdOutOfRangeLocations = {VK_SHADING_RATE_PALETTE_ENTRY_1_INVOCATION_PER_1X2_PIXELS_NV, 2,
+ 1 * 2 * 2, &locations[4]};
+ VkCoarseSampleOrderCustomNV sampOrdTooLargeSampleLocationCount = {
+ VK_SHADING_RATE_PALETTE_ENTRY_1_INVOCATION_PER_4X4_PIXELS_NV, 4, 64, &locations[8]};
+ VkCoarseSampleOrderCustomNV sampOrdGood = {VK_SHADING_RATE_PALETTE_ENTRY_1_INVOCATION_PER_1X2_PIXELS_NV, 2, 1 * 2 * 2,
+ &locations[0]};
+
+ VkPipelineViewportCoarseSampleOrderStateCreateInfoNV csosci = {
+ VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_COARSE_SAMPLE_ORDER_STATE_CREATE_INFO_NV};
+ csosci.sampleOrderType = VK_COARSE_SAMPLE_ORDER_TYPE_CUSTOM_NV;
+ csosci.customSampleOrderCount = 1;
+
+ using std::vector;
+ struct TestCase {
+ const VkCoarseSampleOrderCustomNV *order;
+ vector<std::string> vuids;
+ };
+
+ vector<TestCase> test_cases = {
+ {&sampOrdBadShadingRate, {"VUID-VkCoarseSampleOrderCustomNV-shadingRate-02073"}},
+ {&sampOrdBadSampleCount,
+ {"VUID-VkCoarseSampleOrderCustomNV-sampleCount-02074", "VUID-VkCoarseSampleOrderCustomNV-sampleLocationCount-02075"}},
+ {&sampOrdBadSampleLocationCount, {"VUID-VkCoarseSampleOrderCustomNV-sampleLocationCount-02075"}},
+ {&sampOrdDuplicateLocations, {"VUID-VkCoarseSampleOrderCustomNV-pSampleLocations-02077"}},
+ {&sampOrdOutOfRangeLocations,
+ {"VUID-VkCoarseSampleOrderCustomNV-pSampleLocations-02077", "VUID-VkCoarseSampleLocationNV-pixelX-02078",
+ "VUID-VkCoarseSampleLocationNV-pixelY-02079", "VUID-VkCoarseSampleLocationNV-sample-02080"}},
+ {&sampOrdTooLargeSampleLocationCount,
+ {"VUID-VkCoarseSampleOrderCustomNV-sampleLocationCount-02076",
+ "VUID-VkCoarseSampleOrderCustomNV-pSampleLocations-02077"}},
+ {&sampOrdGood, {}},
+ };
+
+ for (const auto &test_case : test_cases) {
+ const auto break_vp = [&](CreatePipelineHelper &helper) {
+ helper.vp_state_ci_.pNext = &csosci;
+ csosci.pCustomSampleOrders = test_case.order;
+ };
+ CreatePipelineHelper::OneshotTest(*this, break_vp, VK_DEBUG_REPORT_ERROR_BIT_EXT, test_case.vuids);
+ }
+
+ // Test vkCmdSetCoarseSampleOrderNV errors
+ auto vkCmdSetCoarseSampleOrderNV =
+ (PFN_vkCmdSetCoarseSampleOrderNV)vkGetDeviceProcAddr(m_device->device(), "vkCmdSetCoarseSampleOrderNV");
+
+ for (const auto &test_case : test_cases) {
+ for (uint32_t i = 0; i < test_case.vuids.size(); ++i) {
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, test_case.vuids[i]);
+ }
+ vkCmdSetCoarseSampleOrderNV(m_commandBuffer->handle(), VK_COARSE_SAMPLE_ORDER_TYPE_CUSTOM_NV, 1, test_case.order);
+ if (test_case.vuids.size()) {
+ m_errorMonitor->VerifyFound();
+ } else {
+ m_errorMonitor->VerifyNotFound();
+ }
+ }
+
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ "VUID-vkCmdSetCoarseSampleOrderNV-sampleOrderType-02081");
+ vkCmdSetCoarseSampleOrderNV(m_commandBuffer->handle(), VK_COARSE_SAMPLE_ORDER_TYPE_PIXEL_MAJOR_NV, 1, &sampOrdGood);
+ m_errorMonitor->VerifyFound();
+ }
+
+ m_commandBuffer->end();
+
+ vkDestroyImageView(m_device->device(), view, NULL);
+ vkDestroyImage(m_device->device(), image, NULL);
+ vkFreeMemory(m_device->device(), image_memory, NULL);
+}
+
+TEST_F(VkLayerTest, CornerSampledImageNV) {
+ TEST_DESCRIPTION("Test VK_NV_corner_sampled_image.");
+
+ if (InstanceExtensionSupported(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME)) {
+ m_instance_extension_names.push_back(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME);
+ } else {
+ printf("%s Did not find required instance extension %s; skipped.\n", kSkipPrefix,
+ VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME);
+ return;
+ }
+ ASSERT_NO_FATAL_FAILURE(InitFramework(myDbgFunc, m_errorMonitor));
+ std::array<const char *, 1> required_device_extensions = {{VK_NV_CORNER_SAMPLED_IMAGE_EXTENSION_NAME}};
+ for (auto device_extension : required_device_extensions) {
+ if (DeviceExtensionSupported(gpu(), nullptr, device_extension)) {
+ m_device_extension_names.push_back(device_extension);
+ } else {
+ printf("%s %s Extension not supported, skipping tests\n", kSkipPrefix, device_extension);
+ return;
+ }
+ }
+
+ PFN_vkGetPhysicalDeviceFeatures2KHR vkGetPhysicalDeviceFeatures2KHR =
+ (PFN_vkGetPhysicalDeviceFeatures2KHR)vkGetInstanceProcAddr(instance(), "vkGetPhysicalDeviceFeatures2KHR");
+ ASSERT_TRUE(vkGetPhysicalDeviceFeatures2KHR != nullptr);
+
+ // Create a device that enables exclusive scissor but disables multiViewport
+ auto corner_sampled_image_features = lvl_init_struct<VkPhysicalDeviceCornerSampledImageFeaturesNV>();
+ auto features2 = lvl_init_struct<VkPhysicalDeviceFeatures2KHR>(&corner_sampled_image_features);
+ vkGetPhysicalDeviceFeatures2KHR(gpu(), &features2);
+
+ ASSERT_NO_FATAL_FAILURE(InitState(nullptr, &features2));
+
+ VkImage image = VK_NULL_HANDLE;
+ VkResult result = VK_RESULT_MAX_ENUM;
+ VkImageCreateInfo image_create_info = {};
+ image_create_info.sType = VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO;
+ image_create_info.pNext = NULL;
+ image_create_info.imageType = VK_IMAGE_TYPE_1D;
+ image_create_info.format = VK_FORMAT_R8G8B8A8_UNORM;
+ image_create_info.extent.width = 2;
+ image_create_info.extent.height = 1;
+ image_create_info.extent.depth = 1;
+ image_create_info.mipLevels = 1;
+ image_create_info.arrayLayers = 1;
+ image_create_info.samples = VK_SAMPLE_COUNT_1_BIT;
+ image_create_info.tiling = VK_IMAGE_TILING_OPTIMAL;
+ image_create_info.initialLayout = VK_IMAGE_LAYOUT_UNDEFINED;
+ image_create_info.usage = VK_IMAGE_USAGE_TRANSFER_DST_BIT;
+ image_create_info.queueFamilyIndexCount = 0;
+ image_create_info.pQueueFamilyIndices = NULL;
+ image_create_info.sharingMode = VK_SHARING_MODE_EXCLUSIVE;
+ image_create_info.flags = VK_IMAGE_CREATE_CORNER_SAMPLED_BIT_NV;
+
+ // image type must be 2D or 3D
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkImageCreateInfo-flags-02050");
+ result = vkCreateImage(m_device->device(), &image_create_info, NULL, &image);
+ m_errorMonitor->VerifyFound();
+ if (VK_SUCCESS == result) {
+ vkDestroyImage(m_device->device(), image, NULL);
+ image = VK_NULL_HANDLE;
+ }
+
+ // cube/depth not supported
+ image_create_info.imageType = VK_IMAGE_TYPE_2D;
+ image_create_info.extent.height = 2;
+ image_create_info.format = VK_FORMAT_D24_UNORM_S8_UINT;
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkImageCreateInfo-flags-02051");
+ result = vkCreateImage(m_device->device(), &image_create_info, NULL, &image);
+ m_errorMonitor->VerifyFound();
+ if (VK_SUCCESS == result) {
+ vkDestroyImage(m_device->device(), image, NULL);
+ image = VK_NULL_HANDLE;
+ }
+ image_create_info.format = VK_FORMAT_R8G8B8A8_UNORM;
+
+ // 2D width/height must be > 1
+ image_create_info.imageType = VK_IMAGE_TYPE_2D;
+ image_create_info.extent.height = 1;
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkImageCreateInfo-flags-02052");
+ result = vkCreateImage(m_device->device(), &image_create_info, NULL, &image);
+ m_errorMonitor->VerifyFound();
+ if (VK_SUCCESS == result) {
+ vkDestroyImage(m_device->device(), image, NULL);
+ image = VK_NULL_HANDLE;
+ }
+
+ // 3D width/height/depth must be > 1
+ image_create_info.imageType = VK_IMAGE_TYPE_3D;
+ image_create_info.extent.height = 2;
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkImageCreateInfo-flags-02053");
+ result = vkCreateImage(m_device->device(), &image_create_info, NULL, &image);
+ m_errorMonitor->VerifyFound();
+ if (VK_SUCCESS == result) {
+ vkDestroyImage(m_device->device(), image, NULL);
+ image = VK_NULL_HANDLE;
+ }
+ image_create_info.imageType = VK_IMAGE_TYPE_2D;
+
+ // Valid # of mip levels
+ image_create_info.extent = {7, 7, 1};
+ image_create_info.mipLevels = 3; // 3 = ceil(log2(7))
+ result = vkCreateImage(m_device->device(), &image_create_info, NULL, &image);
+ m_errorMonitor->VerifyNotFound();
+ if (VK_SUCCESS == result) {
+ vkDestroyImage(m_device->device(), image, NULL);
+ image = VK_NULL_HANDLE;
+ }
+
+ image_create_info.extent = {8, 8, 1};
+ image_create_info.mipLevels = 3; // 3 = ceil(log2(8))
+ result = vkCreateImage(m_device->device(), &image_create_info, NULL, &image);
+ m_errorMonitor->VerifyNotFound();
+ if (VK_SUCCESS == result) {
+ vkDestroyImage(m_device->device(), image, NULL);
+ image = VK_NULL_HANDLE;
+ }
+
+ image_create_info.extent = {9, 9, 1};
+ image_create_info.mipLevels = 3; // 4 = ceil(log2(9))
+ result = vkCreateImage(m_device->device(), &image_create_info, NULL, &image);
+ m_errorMonitor->VerifyNotFound();
+ if (VK_SUCCESS == result) {
+ vkDestroyImage(m_device->device(), image, NULL);
+ image = VK_NULL_HANDLE;
+ }
+
+ // Invalid # of mip levels
+ image_create_info.extent = {8, 8, 1};
+ image_create_info.mipLevels = 4; // 3 = ceil(log2(8))
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkImageCreateInfo-mipLevels-00958");
+ result = vkCreateImage(m_device->device(), &image_create_info, NULL, &image);
+ m_errorMonitor->VerifyFound();
+ if (VK_SUCCESS == result) {
+ vkDestroyImage(m_device->device(), image, NULL);
+ image = VK_NULL_HANDLE;
+ }
+}
+
+TEST_F(VkLayerTest, MeshShaderNV) {
+ TEST_DESCRIPTION("Test VK_NV_mesh_shader.");
+
+ if (InstanceExtensionSupported(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME)) {
+ m_instance_extension_names.push_back(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME);
+ } else {
+ printf("%s Did not find required instance extension %s; skipped.\n", kSkipPrefix,
+ VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME);
+ return;
+ }
+ ASSERT_NO_FATAL_FAILURE(InitFramework(myDbgFunc, m_errorMonitor));
+ std::array<const char *, 1> required_device_extensions = {{VK_NV_MESH_SHADER_EXTENSION_NAME}};
+ for (auto device_extension : required_device_extensions) {
+ if (DeviceExtensionSupported(gpu(), nullptr, device_extension)) {
+ m_device_extension_names.push_back(device_extension);
+ } else {
+ printf("%s %s Extension not supported, skipping tests\n", kSkipPrefix, device_extension);
+ return;
+ }
+ }
+
+ PFN_vkGetPhysicalDeviceFeatures2KHR vkGetPhysicalDeviceFeatures2KHR =
+ (PFN_vkGetPhysicalDeviceFeatures2KHR)vkGetInstanceProcAddr(instance(), "vkGetPhysicalDeviceFeatures2KHR");
+ ASSERT_TRUE(vkGetPhysicalDeviceFeatures2KHR != nullptr);
+
+ // Create a device that enables mesh_shader
+ auto mesh_shader_features = lvl_init_struct<VkPhysicalDeviceMeshShaderFeaturesNV>();
+ auto features2 = lvl_init_struct<VkPhysicalDeviceFeatures2KHR>(&mesh_shader_features);
+ vkGetPhysicalDeviceFeatures2KHR(gpu(), &features2);
+ features2.features.multiDrawIndirect = VK_FALSE;
+
+ ASSERT_NO_FATAL_FAILURE(InitState(nullptr, &features2));
+ ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
+
+ static const char vertShaderText[] =
+ "#version 450\n"
+ "vec2 vertices[3];\n"
+ "void main() {\n"
+ " vertices[0] = vec2(-1.0, -1.0);\n"
+ " vertices[1] = vec2( 1.0, -1.0);\n"
+ " vertices[2] = vec2( 0.0, 1.0);\n"
+ " gl_Position = vec4(vertices[gl_VertexIndex % 3], 0.0, 1.0);\n"
+ " gl_PointSize = 1.0f;\n"
+ "}\n";
+
+ static const char meshShaderText[] =
+ "#version 450\n"
+ "#extension GL_NV_mesh_shader : require\n"
+ "layout(local_size_x = 1) in;\n"
+ "layout(max_vertices = 3) out;\n"
+ "layout(max_primitives = 1) out;\n"
+ "layout(triangles) out;\n"
+ "void main() {\n"
+ " gl_MeshVerticesNV[0].gl_Position = vec4(-1.0, -1.0, 0, 1);\n"
+ " gl_MeshVerticesNV[1].gl_Position = vec4( 1.0, -1.0, 0, 1);\n"
+ " gl_MeshVerticesNV[2].gl_Position = vec4( 0.0, 1.0, 0, 1);\n"
+ " gl_PrimitiveIndicesNV[0] = 0;\n"
+ " gl_PrimitiveIndicesNV[1] = 1;\n"
+ " gl_PrimitiveIndicesNV[2] = 2;\n"
+ " gl_PrimitiveCountNV = 1;\n"
+ "}\n";
+
+ VkShaderObj vs(m_device, vertShaderText, VK_SHADER_STAGE_VERTEX_BIT, this);
+ VkShaderObj ms(m_device, meshShaderText, VK_SHADER_STAGE_MESH_BIT_NV, this);
+ VkShaderObj fs(m_device, bindStateFragShaderText, VK_SHADER_STAGE_FRAGMENT_BIT, this);
+
+ // Test pipeline creation
+ {
+ // can't mix mesh with vertex
+ const auto break_vp = [&](CreatePipelineHelper &helper) {
+ helper.shader_stages_ = {vs.GetStageCreateInfo(), fs.GetStageCreateInfo(), ms.GetStageCreateInfo()};
+ };
+ CreatePipelineHelper::OneshotTest(*this, break_vp, VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ vector<std::string>({"VUID-VkGraphicsPipelineCreateInfo-pStages-02095"}));
+
+ // vertex or mesh must be present
+ const auto break_vp2 = [&](CreatePipelineHelper &helper) { helper.shader_stages_ = {fs.GetStageCreateInfo()}; };
+ CreatePipelineHelper::OneshotTest(*this, break_vp2, VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ vector<std::string>({"VUID-VkGraphicsPipelineCreateInfo-stage-02096"}));
+
+ // vertexinput and inputassembly must be valid when vertex stage is present
+ const auto break_vp3 = [&](CreatePipelineHelper &helper) {
+ helper.shader_stages_ = {vs.GetStageCreateInfo(), fs.GetStageCreateInfo()};
+ helper.gp_ci_.pVertexInputState = nullptr;
+ helper.gp_ci_.pInputAssemblyState = nullptr;
+ };
+ CreatePipelineHelper::OneshotTest(*this, break_vp3, VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ vector<std::string>({"VUID-VkGraphicsPipelineCreateInfo-pStages-02097",
+ "VUID-VkGraphicsPipelineCreateInfo-pStages-02098"}));
+ }
+
+ PFN_vkCmdDrawMeshTasksIndirectNV vkCmdDrawMeshTasksIndirectNV =
+ (PFN_vkCmdDrawMeshTasksIndirectNV)vkGetInstanceProcAddr(instance(), "vkCmdDrawMeshTasksIndirectNV");
+
+ VkBufferCreateInfo buffer_create_info = {VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO};
+ buffer_create_info.size = sizeof(uint32_t);
+ buffer_create_info.usage = VK_BUFFER_USAGE_INDIRECT_BUFFER_BIT;
+ VkBuffer buffer;
+ VkResult result = vkCreateBuffer(m_device->device(), &buffer_create_info, nullptr, &buffer);
+ ASSERT_VK_SUCCESS(result);
+
+ m_commandBuffer->begin();
+
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdDrawMeshTasksIndirectNV-drawCount-02146");
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdDrawMeshTasksIndirectNV-drawCount-02147");
+ vkCmdDrawMeshTasksIndirectNV(m_commandBuffer->handle(), buffer, 0, 2, 0);
+ m_errorMonitor->VerifyFound();
+
+ m_commandBuffer->end();
+
+ vkDestroyBuffer(m_device->device(), buffer, 0);
+}
+
+TEST_F(VkLayerTest, MeshShaderDisabledNV) {
+ TEST_DESCRIPTION("Test VK_NV_mesh_shader VUs with NV_mesh_shader disabled.");
+ ASSERT_NO_FATAL_FAILURE(Init());
+ ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
+
+ VkEvent event;
+ VkEventCreateInfo event_create_info{};
+ event_create_info.sType = VK_STRUCTURE_TYPE_EVENT_CREATE_INFO;
+ vkCreateEvent(m_device->device(), &event_create_info, nullptr, &event);
+
+ m_commandBuffer->begin();
+
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdSetEvent-stageMask-02107");
+ vkCmdSetEvent(m_commandBuffer->handle(), event, VK_PIPELINE_STAGE_MESH_SHADER_BIT_NV);
+ m_errorMonitor->VerifyFound();
+
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdSetEvent-stageMask-02108");
+ vkCmdSetEvent(m_commandBuffer->handle(), event, VK_PIPELINE_STAGE_TASK_SHADER_BIT_NV);
+ m_errorMonitor->VerifyFound();
+
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdResetEvent-stageMask-02109");
+ vkCmdResetEvent(m_commandBuffer->handle(), event, VK_PIPELINE_STAGE_MESH_SHADER_BIT_NV);
+ m_errorMonitor->VerifyFound();
+
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdResetEvent-stageMask-02110");
+ vkCmdResetEvent(m_commandBuffer->handle(), event, VK_PIPELINE_STAGE_TASK_SHADER_BIT_NV);
+ m_errorMonitor->VerifyFound();
+
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdWaitEvents-srcStageMask-02111");
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdWaitEvents-dstStageMask-02113");
+ vkCmdWaitEvents(m_commandBuffer->handle(), 1, &event, VK_PIPELINE_STAGE_MESH_SHADER_BIT_NV,
+ VK_PIPELINE_STAGE_MESH_SHADER_BIT_NV, 0, nullptr, 0, nullptr, 0, nullptr);
+ m_errorMonitor->VerifyFound();
+
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdWaitEvents-srcStageMask-02112");
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdWaitEvents-dstStageMask-02114");
+ vkCmdWaitEvents(m_commandBuffer->handle(), 1, &event, VK_PIPELINE_STAGE_TASK_SHADER_BIT_NV,
+ VK_PIPELINE_STAGE_TASK_SHADER_BIT_NV, 0, nullptr, 0, nullptr, 0, nullptr);
+ m_errorMonitor->VerifyFound();
+
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdPipelineBarrier-srcStageMask-02115");
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdPipelineBarrier-dstStageMask-02117");
+ vkCmdPipelineBarrier(m_commandBuffer->handle(), VK_PIPELINE_STAGE_MESH_SHADER_BIT_NV, VK_PIPELINE_STAGE_MESH_SHADER_BIT_NV, 0,
+ 0, nullptr, 0, nullptr, 0, nullptr);
+ m_errorMonitor->VerifyFound();
+
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdPipelineBarrier-srcStageMask-02116");
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdPipelineBarrier-dstStageMask-02118");
+ vkCmdPipelineBarrier(m_commandBuffer->handle(), VK_PIPELINE_STAGE_TASK_SHADER_BIT_NV, VK_PIPELINE_STAGE_TASK_SHADER_BIT_NV, 0,
+ 0, nullptr, 0, nullptr, 0, nullptr);
+ m_errorMonitor->VerifyFound();
+
+ m_commandBuffer->end();
+
+ VkSemaphoreCreateInfo semaphore_create_info = {};
+ semaphore_create_info.sType = VK_STRUCTURE_TYPE_SEMAPHORE_CREATE_INFO;
+ VkSemaphore semaphore;
+ ASSERT_VK_SUCCESS(vkCreateSemaphore(m_device->device(), &semaphore_create_info, nullptr, &semaphore));
+
+ VkPipelineStageFlags stage_flags = VK_PIPELINE_STAGE_MESH_SHADER_BIT_NV | VK_PIPELINE_STAGE_TASK_SHADER_BIT_NV;
+ VkSubmitInfo submit_info = {};
+
+ // Signal the semaphore so the next test can wait on it.
+ submit_info.sType = VK_STRUCTURE_TYPE_SUBMIT_INFO;
+ submit_info.signalSemaphoreCount = 1;
+ submit_info.pSignalSemaphores = &semaphore;
+ vkQueueSubmit(m_device->m_queue, 1, &submit_info, VK_NULL_HANDLE);
+ m_errorMonitor->VerifyNotFound();
+
+ submit_info.sType = VK_STRUCTURE_TYPE_SUBMIT_INFO;
+ submit_info.signalSemaphoreCount = 0;
+ submit_info.pSignalSemaphores = nullptr;
+ submit_info.waitSemaphoreCount = 1;
+ submit_info.pWaitSemaphores = &semaphore;
+ submit_info.pWaitDstStageMask = &stage_flags;
+
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkSubmitInfo-pWaitDstStageMask-02089");
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkSubmitInfo-pWaitDstStageMask-02090");
+ vkQueueSubmit(m_device->m_queue, 1, &submit_info, VK_NULL_HANDLE);
+ m_errorMonitor->VerifyFound();
+
+ vkQueueWaitIdle(m_device->m_queue);
+
+ VkShaderObj vs(m_device, bindStateVertShaderText, VK_SHADER_STAGE_VERTEX_BIT, this);
+ VkPipelineShaderStageCreateInfo meshStage = {VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_CREATE_INFO};
+ meshStage = vs.GetStageCreateInfo();
+ meshStage.stage = VK_SHADER_STAGE_MESH_BIT_NV;
+ VkPipelineShaderStageCreateInfo taskStage = {VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_CREATE_INFO};
+ taskStage = vs.GetStageCreateInfo();
+ taskStage.stage = VK_SHADER_STAGE_TASK_BIT_NV;
+
+ // mesh and task shaders not supported
+ const auto break_vp = [&](CreatePipelineHelper &helper) {
+ helper.shader_stages_ = {meshStage, taskStage, vs.GetStageCreateInfo()};
+ };
+ CreatePipelineHelper::OneshotTest(
+ *this, break_vp, VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ vector<std::string>({"VUID-VkPipelineShaderStageCreateInfo-pName-00707", "VUID-VkPipelineShaderStageCreateInfo-pName-00707",
+ "VUID-VkPipelineShaderStageCreateInfo-stage-02091",
+ "VUID-VkPipelineShaderStageCreateInfo-stage-02092"}));
+
+ vkDestroyEvent(m_device->device(), event, nullptr);
+ vkDestroySemaphore(m_device->device(), semaphore, nullptr);
+}
+
+TEST_F(VkLayerTest, InlineUniformBlockEXT) {
+ TEST_DESCRIPTION("Test VK_EXT_inline_uniform_block.");
+
+ if (InstanceExtensionSupported(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME)) {
+ m_instance_extension_names.push_back(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME);
+ } else {
+ printf("%s Did not find required instance extension %s; skipped.\n", kSkipPrefix,
+ VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME);
+ return;
+ }
+ ASSERT_NO_FATAL_FAILURE(InitFramework(myDbgFunc, m_errorMonitor));
+ std::array<const char *, 2> required_device_extensions = {VK_KHR_MAINTENANCE1_EXTENSION_NAME,
+ VK_EXT_INLINE_UNIFORM_BLOCK_EXTENSION_NAME};
+ for (auto device_extension : required_device_extensions) {
+ if (DeviceExtensionSupported(gpu(), nullptr, device_extension)) {
+ m_device_extension_names.push_back(device_extension);
+ } else {
+ printf("%s %s Extension not supported, skipping tests\n", kSkipPrefix, device_extension);
+ return;
+ }
+ }
+
+ // Enable descriptor indexing if supported, but don't require it.
+ bool supportsDescriptorIndexing = true;
+ required_device_extensions = {VK_KHR_MAINTENANCE3_EXTENSION_NAME, VK_EXT_DESCRIPTOR_INDEXING_EXTENSION_NAME};
+ for (auto device_extension : required_device_extensions) {
+ if (DeviceExtensionSupported(gpu(), nullptr, device_extension)) {
+ m_device_extension_names.push_back(device_extension);
+ } else {
+ printf("%s %s Extension not supported, skipping tests\n", kSkipPrefix, device_extension);
+ supportsDescriptorIndexing = false;
+ return;
+ }
+ }
+
+ PFN_vkGetPhysicalDeviceFeatures2KHR vkGetPhysicalDeviceFeatures2KHR =
+ (PFN_vkGetPhysicalDeviceFeatures2KHR)vkGetInstanceProcAddr(instance(), "vkGetPhysicalDeviceFeatures2KHR");
+ ASSERT_TRUE(vkGetPhysicalDeviceFeatures2KHR != nullptr);
+
+ auto descriptor_indexing_features = lvl_init_struct<VkPhysicalDeviceDescriptorIndexingFeaturesEXT>();
+ void *pNext = supportsDescriptorIndexing ? &descriptor_indexing_features : nullptr;
+ // Create a device that enables inline_uniform_block
+ auto inline_uniform_block_features = lvl_init_struct<VkPhysicalDeviceInlineUniformBlockFeaturesEXT>(pNext);
+ auto features2 = lvl_init_struct<VkPhysicalDeviceFeatures2KHR>(&inline_uniform_block_features);
+ vkGetPhysicalDeviceFeatures2KHR(gpu(), &features2);
+
+ PFN_vkGetPhysicalDeviceProperties2KHR vkGetPhysicalDeviceProperties2KHR =
+ (PFN_vkGetPhysicalDeviceProperties2KHR)vkGetInstanceProcAddr(instance(), "vkGetPhysicalDeviceProperties2KHR");
+ assert(vkGetPhysicalDeviceProperties2KHR != nullptr);
+
+ // Get the inline uniform block limits
+ auto inline_uniform_props = lvl_init_struct<VkPhysicalDeviceInlineUniformBlockPropertiesEXT>();
+ auto prop2 = lvl_init_struct<VkPhysicalDeviceProperties2KHR>(&inline_uniform_props);
+ vkGetPhysicalDeviceProperties2KHR(gpu(), &prop2);
+
+ ASSERT_NO_FATAL_FAILURE(InitState(nullptr, &features2));
+
+ VkDescriptorSetLayoutBinding dslb = {};
+ std::vector<VkDescriptorSetLayoutBinding> dslb_vec = {};
+ VkDescriptorSetLayoutCreateInfo ds_layout_ci = {};
+ ds_layout_ci.sType = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_CREATE_INFO;
+ VkDescriptorSetLayout ds_layout = {};
+
+ // Test too many bindings
+ dslb_vec.clear();
+ dslb.binding = 0;
+ dslb.descriptorType = VK_DESCRIPTOR_TYPE_INLINE_UNIFORM_BLOCK_EXT;
+ dslb.descriptorCount = 4;
+ dslb.stageFlags = VK_SHADER_STAGE_FRAGMENT_BIT;
+
+ uint32_t maxBlocks = std::max(inline_uniform_props.maxPerStageDescriptorInlineUniformBlocks,
+ inline_uniform_props.maxDescriptorSetInlineUniformBlocks);
+ for (uint32_t i = 0; i < 1 + maxBlocks; ++i) {
+ dslb.binding = i;
+ dslb_vec.push_back(dslb);
+ }
+
+ ds_layout_ci.bindingCount = dslb_vec.size();
+ ds_layout_ci.pBindings = dslb_vec.data();
+ VkResult err = vkCreateDescriptorSetLayout(m_device->device(), &ds_layout_ci, NULL, &ds_layout);
+ ASSERT_VK_SUCCESS(err);
+
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkPipelineLayoutCreateInfo-descriptorType-02214");
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkPipelineLayoutCreateInfo-descriptorType-02216");
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkPipelineLayoutCreateInfo-descriptorType-02215");
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkPipelineLayoutCreateInfo-descriptorType-02217");
+
+ VkPipelineLayoutCreateInfo pipeline_layout_ci = {};
+ pipeline_layout_ci.sType = VK_STRUCTURE_TYPE_PIPELINE_LAYOUT_CREATE_INFO;
+ pipeline_layout_ci.pNext = NULL;
+ pipeline_layout_ci.setLayoutCount = 1;
+ pipeline_layout_ci.pSetLayouts = &ds_layout;
+ VkPipelineLayout pipeline_layout = VK_NULL_HANDLE;
+
+ err = vkCreatePipelineLayout(m_device->device(), &pipeline_layout_ci, NULL, &pipeline_layout);
+ m_errorMonitor->VerifyFound();
+ vkDestroyPipelineLayout(m_device->device(), pipeline_layout, NULL);
+ pipeline_layout = VK_NULL_HANDLE;
+ vkDestroyDescriptorSetLayout(m_device->device(), ds_layout, nullptr);
+ ds_layout = VK_NULL_HANDLE;
+
+ // Single binding that's too large and is not a multiple of 4
+ dslb.binding = 0;
+ dslb.descriptorCount = inline_uniform_props.maxInlineUniformBlockSize + 1;
+
+ ds_layout_ci.bindingCount = 1;
+ ds_layout_ci.pBindings = &dslb;
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkDescriptorSetLayoutBinding-descriptorType-02209");
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkDescriptorSetLayoutBinding-descriptorType-02210");
+ err = vkCreateDescriptorSetLayout(m_device->device(), &ds_layout_ci, NULL, &ds_layout);
+ m_errorMonitor->VerifyFound();
+ vkDestroyDescriptorSetLayout(m_device->device(), ds_layout, nullptr);
+ ds_layout = VK_NULL_HANDLE;
+
+ // Pool size must be a multiple of 4
+ VkDescriptorPoolSize ds_type_count = {};
+ ds_type_count.type = VK_DESCRIPTOR_TYPE_INLINE_UNIFORM_BLOCK_EXT;
+ ds_type_count.descriptorCount = 33;
+
+ VkDescriptorPoolCreateInfo ds_pool_ci = {};
+ ds_pool_ci.sType = VK_STRUCTURE_TYPE_DESCRIPTOR_POOL_CREATE_INFO;
+ ds_pool_ci.pNext = NULL;
+ ds_pool_ci.flags = 0;
+ ds_pool_ci.maxSets = 2;
+ ds_pool_ci.poolSizeCount = 1;
+ ds_pool_ci.pPoolSizes = &ds_type_count;
+
+ VkDescriptorPool ds_pool = VK_NULL_HANDLE;
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkDescriptorPoolSize-type-02218");
+ err = vkCreateDescriptorPool(m_device->device(), &ds_pool_ci, NULL, &ds_pool);
+ m_errorMonitor->VerifyFound();
+ if (ds_pool) {
+ vkDestroyDescriptorPool(m_device->handle(), ds_pool, nullptr);
+ ds_pool = VK_NULL_HANDLE;
+ }
+
+ // Create a valid pool
+ ds_type_count.descriptorCount = 32;
+ err = vkCreateDescriptorPool(m_device->device(), &ds_pool_ci, NULL, &ds_pool);
+ m_errorMonitor->VerifyNotFound();
+
+ // Create two valid sets with 8 bytes each
+ dslb_vec.clear();
+ dslb.binding = 0;
+ dslb.descriptorType = VK_DESCRIPTOR_TYPE_INLINE_UNIFORM_BLOCK_EXT;
+ dslb.descriptorCount = 8;
+ dslb.stageFlags = VK_SHADER_STAGE_FRAGMENT_BIT;
+ dslb_vec.push_back(dslb);
+ dslb.binding = 1;
+ dslb_vec.push_back(dslb);
+
+ ds_layout_ci.bindingCount = dslb_vec.size();
+ ds_layout_ci.pBindings = &dslb_vec[0];
+
+ err = vkCreateDescriptorSetLayout(m_device->device(), &ds_layout_ci, NULL, &ds_layout);
+ m_errorMonitor->VerifyNotFound();
+
+ VkDescriptorSet descriptor_sets[2];
+ VkDescriptorSetLayout set_layouts[2] = {ds_layout, ds_layout};
+ VkDescriptorSetAllocateInfo alloc_info = {};
+ alloc_info.sType = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_ALLOCATE_INFO;
+ alloc_info.descriptorSetCount = 2;
+ alloc_info.descriptorPool = ds_pool;
+ alloc_info.pSetLayouts = set_layouts;
+ err = vkAllocateDescriptorSets(m_device->device(), &alloc_info, descriptor_sets);
+ m_errorMonitor->VerifyNotFound();
+
+ // Test invalid VkWriteDescriptorSet parameters (array element and size must be multiple of 4)
+ VkWriteDescriptorSet descriptor_write = {};
+ descriptor_write.sType = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET;
+ descriptor_write.dstSet = descriptor_sets[0];
+ descriptor_write.dstBinding = 0;
+ descriptor_write.dstArrayElement = 0;
+ descriptor_write.descriptorCount = 3;
+ descriptor_write.descriptorType = VK_DESCRIPTOR_TYPE_INLINE_UNIFORM_BLOCK_EXT;
+
+ uint32_t dummyData[8] = {};
+ VkWriteDescriptorSetInlineUniformBlockEXT write_inline_uniform = {};
+ write_inline_uniform.sType = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET_INLINE_UNIFORM_BLOCK_EXT;
+ write_inline_uniform.dataSize = 3;
+ write_inline_uniform.pData = &dummyData[0];
+ descriptor_write.pNext = &write_inline_uniform;
+
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkWriteDescriptorSet-descriptorType-02220");
+ vkUpdateDescriptorSets(m_device->device(), 1, &descriptor_write, 0, NULL);
+ m_errorMonitor->VerifyFound();
+
+ descriptor_write.dstArrayElement = 1;
+ descriptor_write.descriptorCount = 4;
+ write_inline_uniform.dataSize = 4;
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkWriteDescriptorSet-descriptorType-02219");
+ vkUpdateDescriptorSets(m_device->device(), 1, &descriptor_write, 0, NULL);
+ m_errorMonitor->VerifyFound();
+
+ descriptor_write.pNext = nullptr;
+ descriptor_write.dstArrayElement = 0;
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkWriteDescriptorSet-descriptorType-02221");
+ vkUpdateDescriptorSets(m_device->device(), 1, &descriptor_write, 0, NULL);
+ m_errorMonitor->VerifyFound();
+
+ descriptor_write.pNext = &write_inline_uniform;
+ vkUpdateDescriptorSets(m_device->device(), 1, &descriptor_write, 0, NULL);
+ m_errorMonitor->VerifyNotFound();
+
+ // Test invalid VkCopyDescriptorSet parameters (array element and size must be multiple of 4)
+ VkCopyDescriptorSet copy_ds_update = {};
+ copy_ds_update.sType = VK_STRUCTURE_TYPE_COPY_DESCRIPTOR_SET;
+ copy_ds_update.srcSet = descriptor_sets[0];
+ copy_ds_update.srcBinding = 0;
+ copy_ds_update.srcArrayElement = 0;
+ copy_ds_update.dstSet = descriptor_sets[1];
+ copy_ds_update.dstBinding = 0;
+ copy_ds_update.dstArrayElement = 0;
+ copy_ds_update.descriptorCount = 4;
+
+ copy_ds_update.srcArrayElement = 1;
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkCopyDescriptorSet-srcBinding-02223");
+ vkUpdateDescriptorSets(m_device->device(), 0, NULL, 1, &copy_ds_update);
+ m_errorMonitor->VerifyFound();
+
+ copy_ds_update.srcArrayElement = 0;
+ copy_ds_update.dstArrayElement = 1;
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkCopyDescriptorSet-dstBinding-02224");
+ vkUpdateDescriptorSets(m_device->device(), 0, NULL, 1, &copy_ds_update);
+ m_errorMonitor->VerifyFound();
+
+ copy_ds_update.dstArrayElement = 0;
+ copy_ds_update.descriptorCount = 5;
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkCopyDescriptorSet-srcBinding-02225");
+ vkUpdateDescriptorSets(m_device->device(), 0, NULL, 1, &copy_ds_update);
+ m_errorMonitor->VerifyFound();
+
+ copy_ds_update.descriptorCount = 4;
+ vkUpdateDescriptorSets(m_device->device(), 0, NULL, 1, &copy_ds_update);
+ m_errorMonitor->VerifyNotFound();
+
+ vkDestroyDescriptorPool(m_device->handle(), ds_pool, nullptr);
+ vkDestroyDescriptorSetLayout(m_device->device(), ds_layout, nullptr);
+}
+
+TEST_F(VkLayerTest, FramebufferMixedSamplesNV) {
+ TEST_DESCRIPTION("Verify VK_NV_framebuffer_mixed_samples.");
+
+ ASSERT_NO_FATAL_FAILURE(InitFramework(myDbgFunc, m_errorMonitor));
+
+ if (DeviceExtensionSupported(gpu(), nullptr, VK_NV_FRAMEBUFFER_MIXED_SAMPLES_EXTENSION_NAME)) {
+ m_device_extension_names.push_back(VK_NV_FRAMEBUFFER_MIXED_SAMPLES_EXTENSION_NAME);
+ } else {
+ printf("%s %s Extension not supported, skipping tests\n", kSkipPrefix, VK_NV_FRAMEBUFFER_MIXED_SAMPLES_EXTENSION_NAME);
+ return;
+ }
+
+ ASSERT_NO_FATAL_FAILURE(InitState());
+ ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
+
+ struct TestCase {
+ VkSampleCountFlagBits color_samples;
+ VkSampleCountFlagBits depth_samples;
+ VkSampleCountFlagBits raster_samples;
+ VkBool32 depth_test;
+ VkBool32 sample_shading;
+ uint32_t table_count;
+ bool positiveTest;
+ std::string vuid;
+ };
+
+ std::vector<TestCase> test_cases = {
+ {VK_SAMPLE_COUNT_4_BIT, VK_SAMPLE_COUNT_4_BIT, VK_SAMPLE_COUNT_4_BIT, VK_FALSE, VK_FALSE, 1, true,
+ "VUID-VkGraphicsPipelineCreateInfo-subpass-00757"},
+ {VK_SAMPLE_COUNT_4_BIT, VK_SAMPLE_COUNT_1_BIT, VK_SAMPLE_COUNT_8_BIT, VK_FALSE, VK_FALSE, 4, false,
+ "VUID-VkPipelineCoverageModulationStateCreateInfoNV-coverageModulationTableEnable-01405"},
+ {VK_SAMPLE_COUNT_4_BIT, VK_SAMPLE_COUNT_1_BIT, VK_SAMPLE_COUNT_8_BIT, VK_FALSE, VK_FALSE, 2, true,
+ "VUID-VkPipelineCoverageModulationStateCreateInfoNV-coverageModulationTableEnable-01405"},
+ {VK_SAMPLE_COUNT_1_BIT, VK_SAMPLE_COUNT_4_BIT, VK_SAMPLE_COUNT_8_BIT, VK_TRUE, VK_FALSE, 1, false,
+ "VUID-VkGraphicsPipelineCreateInfo-subpass-01411"},
+ {VK_SAMPLE_COUNT_1_BIT, VK_SAMPLE_COUNT_8_BIT, VK_SAMPLE_COUNT_8_BIT, VK_TRUE, VK_FALSE, 1, true,
+ "VUID-VkGraphicsPipelineCreateInfo-subpass-01411"},
+ {VK_SAMPLE_COUNT_4_BIT, VK_SAMPLE_COUNT_1_BIT, VK_SAMPLE_COUNT_1_BIT, VK_FALSE, VK_FALSE, 1, false,
+ "VUID-VkGraphicsPipelineCreateInfo-subpass-01412"},
+ {VK_SAMPLE_COUNT_4_BIT, VK_SAMPLE_COUNT_1_BIT, VK_SAMPLE_COUNT_4_BIT, VK_FALSE, VK_FALSE, 1, true,
+ "VUID-VkGraphicsPipelineCreateInfo-subpass-01412"},
+ {VK_SAMPLE_COUNT_1_BIT, VK_SAMPLE_COUNT_4_BIT, VK_SAMPLE_COUNT_4_BIT, VK_FALSE, VK_TRUE, 1, false,
+ "VUID-VkPipelineMultisampleStateCreateInfo-rasterizationSamples-01415"},
+ {VK_SAMPLE_COUNT_1_BIT, VK_SAMPLE_COUNT_4_BIT, VK_SAMPLE_COUNT_4_BIT, VK_FALSE, VK_FALSE, 1, true,
+ "VUID-VkPipelineMultisampleStateCreateInfo-rasterizationSamples-01415"},
+ {VK_SAMPLE_COUNT_1_BIT, VK_SAMPLE_COUNT_4_BIT, VK_SAMPLE_COUNT_8_BIT, VK_FALSE, VK_FALSE, 1, true,
+ "VUID-VkGraphicsPipelineCreateInfo-subpass-00757"}};
+
+ for (const auto &test_case : test_cases) {
+ VkAttachmentDescription att[2] = {{}, {}};
+ att[0].format = VK_FORMAT_R8G8B8A8_UNORM;
+ att[0].samples = test_case.color_samples;
+ att[0].initialLayout = VK_IMAGE_LAYOUT_UNDEFINED;
+ att[0].finalLayout = VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL;
+
+ att[1].format = VK_FORMAT_D24_UNORM_S8_UINT;
+ att[1].samples = test_case.depth_samples;
+ att[1].initialLayout = VK_IMAGE_LAYOUT_UNDEFINED;
+ att[1].finalLayout = VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL;
+
+ VkAttachmentReference cr = {0, VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL};
+ VkAttachmentReference dr = {1, VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL};
+
+ VkSubpassDescription sp = {};
+ sp.pipelineBindPoint = VK_PIPELINE_BIND_POINT_GRAPHICS;
+ sp.colorAttachmentCount = 1;
+ sp.pColorAttachments = &cr;
+ sp.pResolveAttachments = NULL;
+ sp.pDepthStencilAttachment = &dr;
+
+ VkRenderPassCreateInfo rpi = {VK_STRUCTURE_TYPE_RENDER_PASS_CREATE_INFO};
+ rpi.attachmentCount = 2;
+ rpi.pAttachments = att;
+ rpi.subpassCount = 1;
+ rpi.pSubpasses = &sp;
+
+ VkRenderPass rp;
+
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ "VUID-VkSubpassDescription-pDepthStencilAttachment-01418");
+ VkResult err = vkCreateRenderPass(m_device->device(), &rpi, nullptr, &rp);
+ m_errorMonitor->VerifyNotFound();
+
+ ASSERT_VK_SUCCESS(err);
+
+ VkPipelineDepthStencilStateCreateInfo ds = {VK_STRUCTURE_TYPE_PIPELINE_DEPTH_STENCIL_STATE_CREATE_INFO};
+ VkPipelineCoverageModulationStateCreateInfoNV cmi = {VK_STRUCTURE_TYPE_PIPELINE_COVERAGE_MODULATION_STATE_CREATE_INFO_NV};
+
+ // Create a dummy modulation table that can be used for the positive
+ // coverageModulationTableCount test.
+ std::vector<float> cm_table{};
+
+ const auto break_samples = [&cmi, &rp, &ds, &cm_table, &test_case](CreatePipelineHelper &helper) {
+ cm_table.resize(test_case.raster_samples / test_case.color_samples);
+
+ cmi.flags = 0;
+ cmi.coverageModulationTableEnable = (test_case.table_count > 1);
+ cmi.coverageModulationTableCount = test_case.table_count;
+ cmi.pCoverageModulationTable = cm_table.data();
+
+ ds.depthTestEnable = test_case.depth_test;
+
+ helper.pipe_ms_state_ci_.pNext = &cmi;
+ helper.pipe_ms_state_ci_.rasterizationSamples = test_case.raster_samples;
+ helper.pipe_ms_state_ci_.sampleShadingEnable = test_case.sample_shading;
+
+ helper.gp_ci_.renderPass = rp;
+ helper.gp_ci_.pDepthStencilState = &ds;
+ };
+
+ CreatePipelineHelper::OneshotTest(*this, break_samples, VK_DEBUG_REPORT_ERROR_BIT_EXT, test_case.vuid,
+ test_case.positiveTest);
+
+ vkDestroyRenderPass(m_device->device(), rp, nullptr);
}
}
-void CreateNVRayTracingPipelineHelper::InitDescriptorSetInfo() {
- dsl_bindings_ = {
- {0, VK_DESCRIPTOR_TYPE_STORAGE_IMAGE, 1, VK_SHADER_STAGE_RAYGEN_BIT_NV, nullptr},
- {1, VK_DESCRIPTOR_TYPE_ACCELERATION_STRUCTURE_NV, 1, VK_SHADER_STAGE_RAYGEN_BIT_NV, nullptr},
+TEST_F(VkLayerTest, FramebufferMixedSamples) {
+ TEST_DESCRIPTION("Verify that the expected VUIds are hits when VK_NV_framebuffer_mixed_samples is disabled.");
+
+ ASSERT_NO_FATAL_FAILURE(InitFramework(myDbgFunc, m_errorMonitor));
+ ASSERT_NO_FATAL_FAILURE(InitState());
+ ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
+
+ struct TestCase {
+ VkSampleCountFlagBits color_samples;
+ VkSampleCountFlagBits depth_samples;
+ VkSampleCountFlagBits raster_samples;
+ bool positiveTest;
};
+
+ std::vector<TestCase> test_cases = {
+ {VK_SAMPLE_COUNT_2_BIT, VK_SAMPLE_COUNT_4_BIT, VK_SAMPLE_COUNT_8_BIT,
+ false}, // Fails vkCreateRenderPass and vkCreateGraphicsPipeline
+ {VK_SAMPLE_COUNT_4_BIT, VK_SAMPLE_COUNT_4_BIT, VK_SAMPLE_COUNT_8_BIT, false}, // Fails vkCreateGraphicsPipeline
+ {VK_SAMPLE_COUNT_4_BIT, VK_SAMPLE_COUNT_4_BIT, VK_SAMPLE_COUNT_4_BIT, true} // Pass
+ };
+
+ for (const auto &test_case : test_cases) {
+ VkAttachmentDescription att[2] = {{}, {}};
+ att[0].format = VK_FORMAT_R8G8B8A8_UNORM;
+ att[0].samples = test_case.color_samples;
+ att[0].initialLayout = VK_IMAGE_LAYOUT_UNDEFINED;
+ att[0].finalLayout = VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL;
+
+ att[1].format = VK_FORMAT_D24_UNORM_S8_UINT;
+ att[1].samples = test_case.depth_samples;
+ att[1].initialLayout = VK_IMAGE_LAYOUT_UNDEFINED;
+ att[1].finalLayout = VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL;
+
+ VkAttachmentReference cr = {0, VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL};
+ VkAttachmentReference dr = {1, VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL};
+
+ VkSubpassDescription sp = {};
+ sp.pipelineBindPoint = VK_PIPELINE_BIND_POINT_GRAPHICS;
+ sp.colorAttachmentCount = 1;
+ sp.pColorAttachments = &cr;
+ sp.pResolveAttachments = NULL;
+ sp.pDepthStencilAttachment = &dr;
+
+ VkRenderPassCreateInfo rpi = {VK_STRUCTURE_TYPE_RENDER_PASS_CREATE_INFO};
+ rpi.attachmentCount = 2;
+ rpi.pAttachments = att;
+ rpi.subpassCount = 1;
+ rpi.pSubpasses = &sp;
+
+ VkRenderPass rp;
+
+ if (test_case.color_samples == test_case.depth_samples) {
+ m_errorMonitor->ExpectSuccess();
+ } else {
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ "VUID-VkSubpassDescription-pDepthStencilAttachment-01418");
+ }
+
+ VkResult err = vkCreateRenderPass(m_device->device(), &rpi, nullptr, &rp);
+
+ if (test_case.color_samples == test_case.depth_samples) {
+ m_errorMonitor->VerifyNotFound();
+ } else {
+ m_errorMonitor->VerifyFound();
+ continue;
+ }
+
+ ASSERT_VK_SUCCESS(err);
+
+ VkPipelineDepthStencilStateCreateInfo ds = {VK_STRUCTURE_TYPE_PIPELINE_DEPTH_STENCIL_STATE_CREATE_INFO};
+
+ const auto break_samples = [&rp, &ds, &test_case](CreatePipelineHelper &helper) {
+ helper.pipe_ms_state_ci_.rasterizationSamples = test_case.raster_samples;
+
+ helper.gp_ci_.renderPass = rp;
+ helper.gp_ci_.pDepthStencilState = &ds;
+ };
+
+ CreatePipelineHelper::OneshotTest(*this, break_samples, VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ "VUID-VkGraphicsPipelineCreateInfo-subpass-00757", test_case.positiveTest);
+
+ vkDestroyRenderPass(m_device->device(), rp, nullptr);
+ }
}
-void CreateNVRayTracingPipelineHelper::InitPipelineLayoutInfo() {
- pipeline_layout_ci_.sType = VK_STRUCTURE_TYPE_PIPELINE_LAYOUT_CREATE_INFO;
- pipeline_layout_ci_.setLayoutCount = 1; // Not really changeable because InitState() sets exactly one pSetLayout
- pipeline_layout_ci_.pSetLayouts = nullptr; // must bound after it is created
+TEST_F(VkLayerTest, FragmentCoverageToColorNV) {
+ TEST_DESCRIPTION("Verify VK_NV_fragment_coverage_to_color.");
+
+ ASSERT_NO_FATAL_FAILURE(InitFramework(myDbgFunc, m_errorMonitor));
+
+ if (DeviceExtensionSupported(gpu(), nullptr, VK_NV_FRAGMENT_COVERAGE_TO_COLOR_EXTENSION_NAME)) {
+ m_device_extension_names.push_back(VK_NV_FRAGMENT_COVERAGE_TO_COLOR_EXTENSION_NAME);
+ } else {
+ printf("%s %s Extension not supported, skipping tests\n", kSkipPrefix, VK_NV_FRAGMENT_COVERAGE_TO_COLOR_EXTENSION_NAME);
+ return;
+ }
+
+ ASSERT_NO_FATAL_FAILURE(InitState());
+ ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
+
+ struct TestCase {
+ VkFormat format;
+ VkBool32 enabled;
+ uint32_t location;
+ bool positive;
+ };
+
+ const std::array<TestCase, 9> test_cases = {{
+ {VK_FORMAT_R8G8B8A8_UNORM, VK_FALSE, 0, true},
+ {VK_FORMAT_R8_UINT, VK_TRUE, 1, true},
+ {VK_FORMAT_R16_UINT, VK_TRUE, 1, true},
+ {VK_FORMAT_R16_SINT, VK_TRUE, 1, true},
+ {VK_FORMAT_R32_UINT, VK_TRUE, 1, true},
+ {VK_FORMAT_R32_SINT, VK_TRUE, 1, true},
+ {VK_FORMAT_R32_SINT, VK_TRUE, 2, false},
+ {VK_FORMAT_R8_SINT, VK_TRUE, 3, false},
+ {VK_FORMAT_R8G8B8A8_UNORM, VK_TRUE, 1, false},
+ }};
+
+ for (const auto &test_case : test_cases) {
+ std::array<VkAttachmentDescription, 2> att = {{{}, {}}};
+ att[0].format = VK_FORMAT_R8G8B8A8_UNORM;
+ att[0].samples = VK_SAMPLE_COUNT_1_BIT;
+ att[0].initialLayout = VK_IMAGE_LAYOUT_UNDEFINED;
+ att[0].finalLayout = VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL;
+
+ att[1].format = VK_FORMAT_R8G8B8A8_UNORM;
+ att[1].samples = VK_SAMPLE_COUNT_1_BIT;
+ att[1].initialLayout = VK_IMAGE_LAYOUT_UNDEFINED;
+ att[1].finalLayout = VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL;
+
+ if (test_case.location < att.size()) {
+ att[test_case.location].format = test_case.format;
+ }
+
+ const std::array<VkAttachmentReference, 3> cr = {{{0, VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL},
+ {1, VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL},
+ {VK_ATTACHMENT_UNUSED, VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL}}};
+
+ VkSubpassDescription sp = {};
+ sp.pipelineBindPoint = VK_PIPELINE_BIND_POINT_GRAPHICS;
+ sp.colorAttachmentCount = cr.size();
+ sp.pColorAttachments = cr.data();
+
+ VkRenderPassCreateInfo rpi = {VK_STRUCTURE_TYPE_RENDER_PASS_CREATE_INFO};
+ rpi.attachmentCount = att.size();
+ rpi.pAttachments = att.data();
+ rpi.subpassCount = 1;
+ rpi.pSubpasses = &sp;
+
+ const std::array<VkPipelineColorBlendAttachmentState, 3> cba = {{{}, {}, {}}};
+
+ VkPipelineColorBlendStateCreateInfo cbi = {VK_STRUCTURE_TYPE_PIPELINE_COLOR_BLEND_STATE_CREATE_INFO};
+ cbi.attachmentCount = cba.size();
+ cbi.pAttachments = cba.data();
+
+ VkRenderPass rp;
+ VkResult err = vkCreateRenderPass(m_device->device(), &rpi, nullptr, &rp);
+ ASSERT_VK_SUCCESS(err);
+
+ VkPipelineCoverageToColorStateCreateInfoNV cci = {VK_STRUCTURE_TYPE_PIPELINE_COVERAGE_TO_COLOR_STATE_CREATE_INFO_NV};
+
+ const auto break_samples = [&cci, &cbi, &rp, &test_case](CreatePipelineHelper &helper) {
+ cci.coverageToColorEnable = test_case.enabled;
+ cci.coverageToColorLocation = test_case.location;
+
+ helper.pipe_ms_state_ci_.pNext = &cci;
+ helper.gp_ci_.renderPass = rp;
+ helper.gp_ci_.pColorBlendState = &cbi;
+ };
+
+ CreatePipelineHelper::OneshotTest(*this, break_samples, VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ "VUID-VkPipelineCoverageToColorStateCreateInfoNV-coverageToColorEnable-01404",
+ test_case.positive);
+
+ vkDestroyRenderPass(m_device->device(), rp, nullptr);
+ }
}
-void CreateNVRayTracingPipelineHelper::InitShaderInfo() { // DONE
+TEST_F(VkPositiveLayerTest, RayTracingPipelineNV) {
+ TEST_DESCRIPTION("Test VK_NV_ray_tracing.");
+
+ if (InstanceExtensionSupported(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME)) {
+ m_instance_extension_names.push_back(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME);
+ } else {
+ printf("%s Did not find required instance extension %s; skipped.\n", kSkipPrefix,
+ VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME);
+ return;
+ }
+ ASSERT_NO_FATAL_FAILURE(InitFramework(myDbgFunc, m_errorMonitor));
+ std::array<const char *, 2> required_device_extensions = {
+ {VK_NV_RAY_TRACING_EXTENSION_NAME, VK_KHR_GET_MEMORY_REQUIREMENTS_2_EXTENSION_NAME}};
+ for (auto device_extension : required_device_extensions) {
+ if (DeviceExtensionSupported(gpu(), nullptr, device_extension)) {
+ m_device_extension_names.push_back(device_extension);
+ } else {
+ printf("%s %s Extension not supported, skipping tests\n", kSkipPrefix, device_extension);
+ return;
+ }
+ }
+
+ PFN_vkGetPhysicalDeviceFeatures2KHR vkGetPhysicalDeviceFeatures2KHR =
+ (PFN_vkGetPhysicalDeviceFeatures2KHR)vkGetInstanceProcAddr(instance(), "vkGetPhysicalDeviceFeatures2KHR");
+ ASSERT_TRUE(vkGetPhysicalDeviceFeatures2KHR != nullptr);
+
+ ASSERT_NO_FATAL_FAILURE(InitState());
+
+ m_errorMonitor->ExpectSuccess();
+
static const char rayGenShaderText[] =
"#version 460 core \n"
"#extension GL_NV_ray_tracing : require \n"
@@ -1772,8 +36233,7 @@ void CreateNVRayTracingPipelineHelper::InitShaderInfo() { // DONE
"{ \n"
" vec4 col = vec4(0, 0, 0, 1); \n"
" \n"
- " vec3 origin = vec3(float(gl_LaunchIDNV.x)/float(gl_LaunchSizeNV.x), "
- "float(gl_LaunchIDNV.y)/float(gl_LaunchSizeNV.y), "
+ " vec3 origin = vec3(float(gl_LaunchIDNV.x)/float(gl_LaunchSizeNV.x), float(gl_LaunchIDNV.y)/float(gl_LaunchSizeNV.y), "
"1.0); \n"
" vec3 dir = vec3(0.0, 0.0, -1.0); \n"
" \n"
@@ -1803,193 +36263,1384 @@ void CreateNVRayTracingPipelineHelper::InitShaderInfo() { // DONE
" hitValue = 0.0; \n"
"} \n";
- rgs_.reset(new VkShaderObj(layer_test_.DeviceObj(), rayGenShaderText, VK_SHADER_STAGE_RAYGEN_BIT_NV, &layer_test_));
- chs_.reset(new VkShaderObj(layer_test_.DeviceObj(), closestHitShaderText, VK_SHADER_STAGE_CLOSEST_HIT_BIT_NV, &layer_test_));
- mis_.reset(new VkShaderObj(layer_test_.DeviceObj(), missShaderText, VK_SHADER_STAGE_MISS_BIT_NV, &layer_test_));
+ VkShaderObj rgs(m_device, rayGenShaderText, VK_SHADER_STAGE_RAYGEN_BIT_NV, this);
+ VkShaderObj chs(m_device, closestHitShaderText, VK_SHADER_STAGE_CLOSEST_HIT_BIT_NV, this);
+ VkShaderObj mis(m_device, missShaderText, VK_SHADER_STAGE_MISS_BIT_NV, this);
+
+ VkPipelineShaderStageCreateInfo rayStages[3];
+ memset(&rayStages[0], 0, sizeof(rayStages));
+
+ rayStages[0] = rgs.GetStageCreateInfo();
+ rayStages[0].stage = VK_SHADER_STAGE_RAYGEN_BIT_NV;
+ rayStages[1] = chs.GetStageCreateInfo();
+ rayStages[1].stage = VK_SHADER_STAGE_CLOSEST_HIT_BIT_NV;
+ rayStages[2] = mis.GetStageCreateInfo();
+ rayStages[2].stage = VK_SHADER_STAGE_MISS_BIT_NV;
+
+ VkRayTracingShaderGroupCreateInfoNV groups[3];
+ memset(&groups[0], 0, sizeof(groups));
+
+ groups[0].sType = VK_STRUCTURE_TYPE_RAY_TRACING_SHADER_GROUP_CREATE_INFO_NV;
+ groups[0].type = VK_RAY_TRACING_SHADER_GROUP_TYPE_GENERAL_NV;
+ groups[0].generalShader = 0;
+ groups[0].closestHitShader = VK_SHADER_UNUSED_NV;
+ groups[0].anyHitShader = VK_SHADER_UNUSED_NV;
+ groups[0].intersectionShader = VK_SHADER_UNUSED_NV;
+
+ groups[1].sType = VK_STRUCTURE_TYPE_RAY_TRACING_SHADER_GROUP_CREATE_INFO_NV;
+ groups[1].type = VK_RAY_TRACING_SHADER_GROUP_TYPE_TRIANGLES_HIT_GROUP_NV;
+ groups[1].generalShader = VK_SHADER_UNUSED_NV;
+ groups[1].closestHitShader = 1;
+ groups[1].anyHitShader = VK_SHADER_UNUSED_NV;
+ groups[1].intersectionShader = VK_SHADER_UNUSED_NV;
+
+ groups[2].sType = VK_STRUCTURE_TYPE_RAY_TRACING_SHADER_GROUP_CREATE_INFO_NV;
+ groups[2].type = VK_RAY_TRACING_SHADER_GROUP_TYPE_GENERAL_NV;
+ groups[2].generalShader = 2;
+ groups[2].closestHitShader = VK_SHADER_UNUSED_NV;
+ groups[2].anyHitShader = VK_SHADER_UNUSED_NV;
+ groups[2].intersectionShader = VK_SHADER_UNUSED_NV;
+
+ const uint32_t bindingCount = 2;
+ VkDescriptorSetLayoutBinding binding[bindingCount] = {};
+ binding[0].binding = 0;
+ binding[0].descriptorCount = 1;
+ binding[0].stageFlags = VK_SHADER_STAGE_RAYGEN_BIT_NV;
+ binding[0].descriptorType = VK_DESCRIPTOR_TYPE_STORAGE_IMAGE;
+ binding[1].binding = 1;
+ binding[1].descriptorCount = 1;
+ binding[1].stageFlags = VK_SHADER_STAGE_RAYGEN_BIT_NV;
+ binding[1].descriptorType = VK_DESCRIPTOR_TYPE_ACCELERATION_STRUCTURE_NV;
+
+ VkDescriptorSetLayoutCreateInfo descriptorSetEntry = {VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_CREATE_INFO};
+ descriptorSetEntry.bindingCount = bindingCount;
+ descriptorSetEntry.pBindings = binding;
+
+ VkDescriptorSetLayout descriptorSetLayout;
+ VkResult err = vkCreateDescriptorSetLayout(m_device->device(), &descriptorSetEntry, 0, &descriptorSetLayout);
+ ASSERT_VK_SUCCESS(err);
+ VkPipelineLayoutCreateInfo pipelineLayoutCreateInfo = {VK_STRUCTURE_TYPE_PIPELINE_LAYOUT_CREATE_INFO};
+ pipelineLayoutCreateInfo.setLayoutCount = 1;
+ pipelineLayoutCreateInfo.pSetLayouts = &descriptorSetLayout;
+ VkPipelineLayout pipelineLayout;
+ err = vkCreatePipelineLayout(m_device->device(), &pipelineLayoutCreateInfo, 0, &pipelineLayout);
+ ASSERT_VK_SUCCESS(err);
- shader_stages_ = {rgs_->GetStageCreateInfo(), chs_->GetStageCreateInfo(), mis_->GetStageCreateInfo()};
-}
+ PFN_vkCreateRayTracingPipelinesNV vkCreateRayTracingPipelinesNV =
+ (PFN_vkCreateRayTracingPipelinesNV)vkGetInstanceProcAddr(instance(), "vkCreateRayTracingPipelinesNV");
-void CreateNVRayTracingPipelineHelper::InitNVRayTracingPipelineInfo() {
- rp_ci_.sType = VK_STRUCTURE_TYPE_RAY_TRACING_PIPELINE_CREATE_INFO_NV;
+ VkRayTracingPipelineCreateInfoNV rayPipelineInfo = {VK_STRUCTURE_TYPE_RAY_TRACING_PIPELINE_CREATE_INFO_NV};
+ rayPipelineInfo.layout = pipelineLayout;
- rp_ci_.stageCount = shader_stages_.size();
- rp_ci_.pStages = shader_stages_.data();
- rp_ci_.groupCount = groups_.size();
- rp_ci_.pGroups = groups_.data();
-}
+ rayPipelineInfo.stageCount = 3;
+ rayPipelineInfo.pStages = &rayStages[0];
+ rayPipelineInfo.groupCount = 3;
+ rayPipelineInfo.pGroups = &groups[0];
-void CreateNVRayTracingPipelineHelper::InitPipelineCacheInfo() {
- pc_ci_.sType = VK_STRUCTURE_TYPE_PIPELINE_CACHE_CREATE_INFO;
- pc_ci_.pNext = nullptr;
- pc_ci_.flags = 0;
- pc_ci_.initialDataSize = 0;
- pc_ci_.pInitialData = nullptr;
-}
+ VkPipeline rayPipeline;
+ err = vkCreateRayTracingPipelinesNV(m_device->device(), VK_NULL_HANDLE, 1, &rayPipelineInfo, 0, &rayPipeline);
+ ASSERT_VK_SUCCESS(err);
-void CreateNVRayTracingPipelineHelper::InitInfo() {
- InitShaderGroups();
- InitDescriptorSetInfo();
- InitPipelineLayoutInfo();
- InitShaderInfo();
- InitNVRayTracingPipelineInfo();
- InitPipelineCacheInfo();
+ vkDestroyPipeline(m_device->device(), rayPipeline, 0);
+ vkDestroyPipelineLayout(m_device->device(), pipelineLayout, 0);
+ vkDestroyDescriptorSetLayout(m_device->device(), descriptorSetLayout, 0);
+ m_errorMonitor->VerifyNotFound();
}
-void CreateNVRayTracingPipelineHelper::InitState() {
- VkResult err;
- descriptor_set_.reset(new OneOffDescriptorSet(layer_test_.DeviceObj(), dsl_bindings_));
- ASSERT_TRUE(descriptor_set_->Initialized());
+TEST_F(VkLayerTest, CreateYCbCrSampler) {
+ TEST_DESCRIPTION("Verify YCbCr sampler creation.");
- pipeline_layout_ = VkPipelineLayoutObj(layer_test_.DeviceObj(), {&descriptor_set_->layout_});
+ // Test requires API 1.1 or (API 1.0 + SamplerYCbCr extension). Request API 1.1
+ SetTargetApiVersion(VK_API_VERSION_1_1);
+ ASSERT_NO_FATAL_FAILURE(InitFramework(myDbgFunc, m_errorMonitor));
- err = vkCreatePipelineCache(layer_test_.device(), &pc_ci_, NULL, &pipeline_cache_);
- ASSERT_VK_SUCCESS(err);
-}
+ // In case we don't have API 1.1+, try enabling the extension directly (and it's dependencies)
+ if (DeviceExtensionSupported(gpu(), nullptr, VK_KHR_SAMPLER_YCBCR_CONVERSION_EXTENSION_NAME)) {
+ m_device_extension_names.push_back(VK_KHR_SAMPLER_YCBCR_CONVERSION_EXTENSION_NAME);
+ m_device_extension_names.push_back(VK_KHR_MAINTENANCE1_EXTENSION_NAME);
+ m_device_extension_names.push_back(VK_KHR_BIND_MEMORY_2_EXTENSION_NAME);
+ m_device_extension_names.push_back(VK_KHR_GET_MEMORY_REQUIREMENTS_2_EXTENSION_NAME);
+ }
+
+ ASSERT_NO_FATAL_FAILURE(InitState());
+ VkDevice dev = m_device->device();
-void CreateNVRayTracingPipelineHelper::LateBindPipelineInfo() {
- // By value or dynamically located items must be late bound
- rp_ci_.layout = pipeline_layout_.handle();
- rp_ci_.stageCount = shader_stages_.size();
- rp_ci_.pStages = shader_stages_.data();
+ // Verify we have the requested support
+ bool ycbcr_support = (DeviceExtensionEnabled(VK_KHR_SAMPLER_YCBCR_CONVERSION_EXTENSION_NAME) ||
+ (DeviceValidationVersion() >= VK_API_VERSION_1_1));
+ if (!ycbcr_support) {
+ printf("%s Did not find required device extension %s; test skipped.\n", kSkipPrefix,
+ VK_KHR_SAMPLER_YCBCR_CONVERSION_EXTENSION_NAME);
+ return;
+ }
+
+ VkSamplerYcbcrConversion ycbcr_conv = VK_NULL_HANDLE;
+ VkSamplerYcbcrConversionCreateInfo sycci = {};
+ sycci.sType = VK_STRUCTURE_TYPE_SAMPLER_YCBCR_CONVERSION_CREATE_INFO;
+ sycci.format = VK_FORMAT_UNDEFINED;
+ sycci.ycbcrModel = VK_SAMPLER_YCBCR_MODEL_CONVERSION_RGB_IDENTITY;
+ sycci.ycbcrRange = VK_SAMPLER_YCBCR_RANGE_ITU_FULL;
+
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkSamplerYcbcrConversionCreateInfo-format-01649");
+ vkCreateSamplerYcbcrConversion(dev, &sycci, NULL, &ycbcr_conv);
+ m_errorMonitor->VerifyFound();
}
-VkResult CreateNVRayTracingPipelineHelper::CreateNVRayTracingPipeline(bool implicit_destroy, bool do_late_bind) {
- VkResult err;
- if (do_late_bind) {
- LateBindPipelineInfo();
+TEST_F(VkPositiveLayerTest, ViewportArray2NV) {
+ TEST_DESCRIPTION("Test to validate VK_NV_viewport_array2");
+
+ ASSERT_NO_FATAL_FAILURE(InitFramework(myDbgFunc, m_errorMonitor));
+
+ VkPhysicalDeviceFeatures available_features = {};
+ ASSERT_NO_FATAL_FAILURE(GetPhysicalDeviceFeatures(&available_features));
+
+ if (!available_features.multiViewport) {
+ printf("VkPhysicalDeviceFeatures::multiViewport is not supported, skipping tests\n");
+ return;
+ }
+ if (!available_features.tessellationShader) {
+ printf("VkPhysicalDeviceFeatures::tessellationShader is not supported, skipping tests\n");
+ return;
}
- if (implicit_destroy && (pipeline_ != VK_NULL_HANDLE)) {
- vkDestroyPipeline(layer_test_.device(), pipeline_, nullptr);
- pipeline_ = VK_NULL_HANDLE;
+ if (!available_features.geometryShader) {
+ printf("VkPhysicalDeviceFeatures::geometryShader is not supported, skipping tests\n");
+ return;
}
- PFN_vkCreateRayTracingPipelinesNV vkCreateRayTracingPipelinesNV =
- (PFN_vkCreateRayTracingPipelinesNV)vkGetInstanceProcAddr(layer_test_.instance(), "vkCreateRayTracingPipelinesNV");
- err = vkCreateRayTracingPipelinesNV(layer_test_.device(), pipeline_cache_, 1, &rp_ci_, nullptr, &pipeline_);
- return err;
+ if (DeviceExtensionSupported(gpu(), nullptr, VK_NV_VIEWPORT_ARRAY2_EXTENSION_NAME)) {
+ m_device_extension_names.push_back(VK_NV_VIEWPORT_ARRAY2_EXTENSION_NAME);
+ } else {
+ printf("%s %s Extension not supported, skipping tests\n", kSkipPrefix, VK_NV_VIEWPORT_ARRAY2_EXTENSION_NAME);
+ return;
+ }
+
+ ASSERT_NO_FATAL_FAILURE(InitState());
+ ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
+
+ const char tcs_src[] = R"(
+ #version 450
+ layout(vertices = 3) out;
+
+ void main() {
+ gl_TessLevelOuter[0] = 4.0f;
+ gl_TessLevelOuter[1] = 4.0f;
+ gl_TessLevelOuter[2] = 4.0f;
+ gl_TessLevelInner[0] = 3.0f;
+
+ gl_out[gl_InvocationID].gl_Position = gl_in[gl_InvocationID].gl_Position;
+ })";
+
+ const char fs_src[] = R"(
+ #version 450
+ layout(location = 0) out vec4 outColor;
+ void main() {
+ outColor = vec4(1.0f);
+ })";
+
+ // Create tessellation control and fragment shader here since they will not be
+ // modified by the different test cases.
+ VkShaderObj tcs(m_device, tcs_src, VK_SHADER_STAGE_TESSELLATION_CONTROL_BIT, this);
+ VkShaderObj fs(m_device, fs_src, VK_SHADER_STAGE_FRAGMENT_BIT, this);
+
+ std::vector<VkViewport> vps = {{0.0f, 0.0f, m_width / 2.0f, m_height}, {m_width / 2.0f, 0.0f, m_width / 2.0f, m_height}};
+ std::vector<VkRect2D> scs = {
+ {{0, 0}, {static_cast<uint32_t>(m_width) / 2, static_cast<uint32_t>(m_height)}},
+ {{static_cast<int32_t>(m_width) / 2, 0}, {static_cast<uint32_t>(m_width) / 2, static_cast<uint32_t>(m_height)}}};
+
+ enum class TestStage { VERTEX = 0, TESSELLATION_EVAL = 1, GEOMETRY = 2 };
+ std::array<TestStage, 3> vertex_stages = {{TestStage::VERTEX, TestStage::TESSELLATION_EVAL, TestStage::GEOMETRY}};
+
+ // Verify that the usage of gl_ViewportMask[] in the allowed vertex processing
+ // stages does not cause any errors.
+ for (auto stage : vertex_stages) {
+ m_errorMonitor->ExpectSuccess();
+
+ VkPipelineInputAssemblyStateCreateInfo iaci = {VK_STRUCTURE_TYPE_PIPELINE_INPUT_ASSEMBLY_STATE_CREATE_INFO};
+ iaci.topology = (stage != TestStage::VERTEX) ? VK_PRIMITIVE_TOPOLOGY_PATCH_LIST : VK_PRIMITIVE_TOPOLOGY_TRIANGLE_LIST;
+
+ VkPipelineTessellationStateCreateInfo tsci = {VK_STRUCTURE_TYPE_PIPELINE_TESSELLATION_STATE_CREATE_INFO};
+ tsci.patchControlPoints = 3;
+
+ const VkPipelineLayoutObj pl(m_device);
+
+ VkPipelineObj pipe(m_device);
+ pipe.AddDefaultColorAttachment();
+ pipe.SetInputAssembly(&iaci);
+ pipe.SetViewport(vps);
+ pipe.SetScissor(scs);
+ pipe.AddShader(&fs);
+
+ std::stringstream vs_src, tes_src, geom_src;
+
+ vs_src << R"(
+ #version 450
+ #extension GL_NV_viewport_array2 : require
+
+ vec2 positions[3] = { vec2( 0.0f, -0.5f),
+ vec2( 0.5f, 0.5f),
+ vec2(-0.5f, 0.5f)
+ };
+ void main() {)";
+ // Write viewportMask if the vertex shader is the last vertex processing stage.
+ if (stage == TestStage::VERTEX) {
+ vs_src << "gl_ViewportMask[0] = 3;\n";
+ }
+ vs_src << R"(
+ gl_Position = vec4(positions[gl_VertexIndex % 3], 0.0, 1.0);
+ })";
+
+ VkShaderObj vs(m_device, vs_src.str().c_str(), VK_SHADER_STAGE_VERTEX_BIT, this);
+ pipe.AddShader(&vs);
+
+ std::unique_ptr<VkShaderObj> tes, geom;
+
+ if (stage >= TestStage::TESSELLATION_EVAL) {
+ tes_src << R"(
+ #version 450
+ #extension GL_NV_viewport_array2 : require
+ layout(triangles) in;
+
+ void main() {
+ gl_Position = (gl_in[0].gl_Position * gl_TessCoord.x +
+ gl_in[1].gl_Position * gl_TessCoord.y +
+ gl_in[2].gl_Position * gl_TessCoord.z);)";
+ // Write viewportMask if the tess eval shader is the last vertex processing stage.
+ if (stage == TestStage::TESSELLATION_EVAL) {
+ tes_src << "gl_ViewportMask[0] = 3;\n";
+ }
+ tes_src << "}";
+
+ tes = std::unique_ptr<VkShaderObj>(
+ new VkShaderObj(m_device, tes_src.str().c_str(), VK_SHADER_STAGE_TESSELLATION_EVALUATION_BIT, this));
+ pipe.AddShader(tes.get());
+ pipe.AddShader(&tcs);
+ pipe.SetTessellation(&tsci);
+ }
+
+ if (stage >= TestStage::GEOMETRY) {
+ geom_src << R"(
+ #version 450
+ #extension GL_NV_viewport_array2 : require
+ layout(triangles) in;
+ layout(triangle_strip, max_vertices = 3) out;
+
+ void main() {
+ gl_ViewportMask[0] = 3;
+ for(int i = 0; i < 3; ++i) {
+ gl_Position = gl_in[i].gl_Position;
+ EmitVertex();
+ }
+ })";
+
+ geom =
+ std::unique_ptr<VkShaderObj>(new VkShaderObj(m_device, geom_src.str().c_str(), VK_SHADER_STAGE_GEOMETRY_BIT, this));
+ pipe.AddShader(geom.get());
+ }
+
+ pipe.CreateVKPipeline(pl.handle(), renderPass());
+ m_errorMonitor->VerifyNotFound();
+ }
}
-namespace chain_util {
-const void *ExtensionChain::Head() const { return head_; }
-} // namespace chain_util
+#ifdef VK_USE_PLATFORM_ANDROID_KHR
+#include "android_ndk_types.h"
+
+TEST_F(VkLayerTest, AndroidHardwareBufferImageCreate) {
+ TEST_DESCRIPTION("Verify AndroidHardwareBuffer image create info.");
+
+ SetTargetApiVersion(VK_API_VERSION_1_1);
+ ASSERT_NO_FATAL_FAILURE(InitFramework(myDbgFunc, m_errorMonitor));
+
+ if ((DeviceExtensionSupported(gpu(), nullptr, VK_ANDROID_EXTERNAL_MEMORY_ANDROID_HARDWARE_BUFFER_EXTENSION_NAME)) &&
+ // Also skip on devices that advertise AHB, but not the pre-requisite foreign_queue extension
+ (DeviceExtensionSupported(gpu(), nullptr, VK_EXT_QUEUE_FAMILY_FOREIGN_EXTENSION_NAME))) {
+ m_device_extension_names.push_back(VK_ANDROID_EXTERNAL_MEMORY_ANDROID_HARDWARE_BUFFER_EXTENSION_NAME);
+ m_device_extension_names.push_back(VK_KHR_SAMPLER_YCBCR_CONVERSION_EXTENSION_NAME);
+ m_device_extension_names.push_back(VK_KHR_MAINTENANCE1_EXTENSION_NAME);
+ m_device_extension_names.push_back(VK_KHR_BIND_MEMORY_2_EXTENSION_NAME);
+ m_device_extension_names.push_back(VK_KHR_GET_MEMORY_REQUIREMENTS_2_EXTENSION_NAME);
+ m_device_extension_names.push_back(VK_KHR_EXTERNAL_MEMORY_EXTENSION_NAME);
+ m_device_extension_names.push_back(VK_EXT_QUEUE_FAMILY_FOREIGN_EXTENSION_NAME);
+ } else {
+ printf("%s %s extension not supported, skipping tests\n", kSkipPrefix,
+ VK_ANDROID_EXTERNAL_MEMORY_ANDROID_HARDWARE_BUFFER_EXTENSION_NAME);
+ return;
+ }
+
+ ASSERT_NO_FATAL_FAILURE(InitState());
+ VkDevice dev = m_device->device();
+
+ VkImage img = VK_NULL_HANDLE;
+ auto reset_img = [&img, dev]() {
+ if (VK_NULL_HANDLE != img) vkDestroyImage(dev, img, NULL);
+ img = VK_NULL_HANDLE;
+ };
-BarrierQueueFamilyTestHelper::QueueFamilyObjs::~QueueFamilyObjs() {
- delete command_buffer2;
- delete command_buffer;
- delete command_pool;
- delete queue;
+ VkImageCreateInfo ici = {};
+ ici.sType = VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO;
+ ici.pNext = nullptr;
+ ici.imageType = VK_IMAGE_TYPE_2D;
+ ici.arrayLayers = 1;
+ ici.extent = {64, 64, 1};
+ ici.format = VK_FORMAT_UNDEFINED;
+ ici.mipLevels = 1;
+ ici.initialLayout = VK_IMAGE_LAYOUT_UNDEFINED;
+ ici.samples = VK_SAMPLE_COUNT_1_BIT;
+ ici.tiling = VK_IMAGE_TILING_OPTIMAL;
+ ici.usage = VK_IMAGE_USAGE_SAMPLED_BIT;
+
+ // undefined format
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkImageCreateInfo-pNext-01975");
+ vkCreateImage(dev, &ici, NULL, &img);
+ m_errorMonitor->VerifyFound();
+ reset_img();
+
+ // also undefined format
+ VkExternalFormatANDROID efa = {};
+ efa.sType = VK_STRUCTURE_TYPE_EXTERNAL_FORMAT_ANDROID;
+ efa.externalFormat = 0;
+ ici.pNext = &efa;
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkImageCreateInfo-pNext-01975");
+ vkCreateImage(dev, &ici, NULL, &img);
+ m_errorMonitor->VerifyFound();
+ reset_img();
+
+ // undefined format with an unknown external format
+ efa.externalFormat = 0xBADC0DE;
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkExternalFormatANDROID-externalFormat-01894");
+ vkCreateImage(dev, &ici, NULL, &img);
+ m_errorMonitor->VerifyFound();
+ reset_img();
+
+ AHardwareBuffer *ahb;
+ AHardwareBuffer_Desc ahb_desc = {};
+ ahb_desc.format = AHARDWAREBUFFER_FORMAT_R8G8B8X8_UNORM;
+ ahb_desc.usage = AHARDWAREBUFFER_USAGE_GPU_SAMPLED_IMAGE;
+ ahb_desc.width = 64;
+ ahb_desc.height = 64;
+ ahb_desc.layers = 1;
+ // Allocate an AHardwareBuffer
+ AHardwareBuffer_allocate(&ahb_desc, &ahb);
+
+ // Retrieve it's properties to make it's external format 'known' (AHARDWAREBUFFER_FORMAT_R8G8B8X8_UNORM)
+ VkAndroidHardwareBufferFormatPropertiesANDROID ahb_fmt_props = {};
+ ahb_fmt_props.sType = VK_STRUCTURE_TYPE_ANDROID_HARDWARE_BUFFER_FORMAT_PROPERTIES_ANDROID;
+ VkAndroidHardwareBufferPropertiesANDROID ahb_props = {};
+ ahb_props.sType = VK_STRUCTURE_TYPE_ANDROID_HARDWARE_BUFFER_PROPERTIES_ANDROID;
+ ahb_props.pNext = &ahb_fmt_props;
+ PFN_vkGetAndroidHardwareBufferPropertiesANDROID pfn_GetAHBProps =
+ (PFN_vkGetAndroidHardwareBufferPropertiesANDROID)vkGetDeviceProcAddr(dev, "vkGetAndroidHardwareBufferPropertiesANDROID");
+ ASSERT_TRUE(pfn_GetAHBProps != nullptr);
+ pfn_GetAHBProps(dev, ahb, &ahb_props);
+
+ // a defined image format with a non-zero external format
+ ici.format = VK_FORMAT_R8G8B8A8_UNORM;
+ efa.externalFormat = AHARDWAREBUFFER_FORMAT_R8G8B8X8_UNORM;
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkImageCreateInfo-pNext-01974");
+ vkCreateImage(dev, &ici, NULL, &img);
+ m_errorMonitor->VerifyFound();
+ reset_img();
+ ici.format = VK_FORMAT_UNDEFINED;
+
+ // external format while MUTABLE
+ ici.flags = VK_IMAGE_CREATE_MUTABLE_FORMAT_BIT;
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkImageCreateInfo-pNext-02396");
+ vkCreateImage(dev, &ici, NULL, &img);
+ m_errorMonitor->VerifyFound();
+ reset_img();
+ ici.flags = 0;
+
+ // external format while usage other than SAMPLED
+ ici.usage |= VK_IMAGE_USAGE_INPUT_ATTACHMENT_BIT;
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkImageCreateInfo-pNext-02397");
+ vkCreateImage(dev, &ici, NULL, &img);
+ m_errorMonitor->VerifyFound();
+ reset_img();
+ ici.usage = VK_IMAGE_USAGE_SAMPLED_BIT;
+
+ // external format while tiline other than OPTIMAL
+ ici.tiling = VK_IMAGE_TILING_LINEAR;
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkImageCreateInfo-pNext-02398");
+ vkCreateImage(dev, &ici, NULL, &img);
+ m_errorMonitor->VerifyFound();
+ reset_img();
+ ici.tiling = VK_IMAGE_TILING_OPTIMAL;
+
+ // imageType
+ VkExternalMemoryImageCreateInfo emici = {};
+ emici.sType = VK_STRUCTURE_TYPE_EXTERNAL_MEMORY_IMAGE_CREATE_INFO;
+ emici.handleTypes = VK_EXTERNAL_MEMORY_HANDLE_TYPE_ANDROID_HARDWARE_BUFFER_BIT_ANDROID;
+ ici.pNext = &emici; // remove efa from chain, insert emici
+ ici.format = VK_FORMAT_R8G8B8A8_UNORM;
+ ici.imageType = VK_IMAGE_TYPE_3D;
+ ici.extent = {64, 64, 64};
+
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkImageCreateInfo-pNext-02393");
+ vkCreateImage(dev, &ici, NULL, &img);
+ m_errorMonitor->VerifyFound();
+ reset_img();
+
+ // wrong mipLevels
+ ici.imageType = VK_IMAGE_TYPE_2D;
+ ici.extent = {64, 64, 1};
+ ici.mipLevels = 6; // should be 7
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkImageCreateInfo-pNext-02394");
+ vkCreateImage(dev, &ici, NULL, &img);
+ m_errorMonitor->VerifyFound();
+ reset_img();
}
-void BarrierQueueFamilyTestHelper::QueueFamilyObjs::Init(VkDeviceObj *device, uint32_t qf_index, VkQueue qf_queue,
- VkCommandPoolCreateFlags cp_flags) {
- index = qf_index;
- queue = new VkQueueObj(qf_queue, qf_index);
- command_pool = new VkCommandPoolObj(device, qf_index, cp_flags);
- command_buffer = new VkCommandBufferObj(device, command_pool, VK_COMMAND_BUFFER_LEVEL_PRIMARY, queue);
- command_buffer2 = new VkCommandBufferObj(device, command_pool, VK_COMMAND_BUFFER_LEVEL_PRIMARY, queue);
-};
+TEST_F(VkLayerTest, AndroidHardwareBufferFetchUnboundImageInfo) {
+ TEST_DESCRIPTION("Verify AndroidHardwareBuffer retreive image properties while memory unbound.");
+
+ SetTargetApiVersion(VK_API_VERSION_1_1);
+ ASSERT_NO_FATAL_FAILURE(InitFramework(myDbgFunc, m_errorMonitor));
+
+ if ((DeviceExtensionSupported(gpu(), nullptr, VK_ANDROID_EXTERNAL_MEMORY_ANDROID_HARDWARE_BUFFER_EXTENSION_NAME)) &&
+ // Also skip on devices that advertise AHB, but not the pre-requisite foreign_queue extension
+ (DeviceExtensionSupported(gpu(), nullptr, VK_EXT_QUEUE_FAMILY_FOREIGN_EXTENSION_NAME))) {
+ m_device_extension_names.push_back(VK_ANDROID_EXTERNAL_MEMORY_ANDROID_HARDWARE_BUFFER_EXTENSION_NAME);
+ m_device_extension_names.push_back(VK_KHR_SAMPLER_YCBCR_CONVERSION_EXTENSION_NAME);
+ m_device_extension_names.push_back(VK_KHR_MAINTENANCE1_EXTENSION_NAME);
+ m_device_extension_names.push_back(VK_KHR_BIND_MEMORY_2_EXTENSION_NAME);
+ m_device_extension_names.push_back(VK_KHR_GET_MEMORY_REQUIREMENTS_2_EXTENSION_NAME);
+ m_device_extension_names.push_back(VK_KHR_EXTERNAL_MEMORY_EXTENSION_NAME);
+ m_device_extension_names.push_back(VK_EXT_QUEUE_FAMILY_FOREIGN_EXTENSION_NAME);
+ } else {
+ printf("%s %s extension not supported, skipping tests\n", kSkipPrefix,
+ VK_ANDROID_EXTERNAL_MEMORY_ANDROID_HARDWARE_BUFFER_EXTENSION_NAME);
+ return;
+ }
+
+ ASSERT_NO_FATAL_FAILURE(InitState());
+ VkDevice dev = m_device->device();
+
+ VkImage img = VK_NULL_HANDLE;
+ auto reset_img = [&img, dev]() {
+ if (VK_NULL_HANDLE != img) vkDestroyImage(dev, img, NULL);
+ img = VK_NULL_HANDLE;
+ };
-BarrierQueueFamilyTestHelper::Context::Context(VkLayerTest *test, const std::vector<uint32_t> &queue_family_indices)
- : layer_test(test) {
- if (0 == queue_family_indices.size()) {
- return; // This is invalid
+ VkImageCreateInfo ici = {};
+ ici.sType = VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO;
+ ici.pNext = nullptr;
+ ici.imageType = VK_IMAGE_TYPE_2D;
+ ici.arrayLayers = 1;
+ ici.extent = {64, 64, 1};
+ ici.format = VK_FORMAT_R8G8B8A8_UNORM;
+ ici.mipLevels = 1;
+ ici.initialLayout = VK_IMAGE_LAYOUT_UNDEFINED;
+ ici.samples = VK_SAMPLE_COUNT_1_BIT;
+ ici.tiling = VK_IMAGE_TILING_LINEAR;
+ ici.usage = VK_IMAGE_USAGE_SAMPLED_BIT;
+
+ VkExternalMemoryImageCreateInfo emici = {};
+ emici.sType = VK_STRUCTURE_TYPE_EXTERNAL_MEMORY_IMAGE_CREATE_INFO;
+ emici.handleTypes = VK_EXTERNAL_MEMORY_HANDLE_TYPE_ANDROID_HARDWARE_BUFFER_BIT_ANDROID;
+ ici.pNext = &emici;
+
+ m_errorMonitor->ExpectSuccess();
+ vkCreateImage(dev, &ici, NULL, &img);
+ m_errorMonitor->VerifyNotFound();
+
+ // attempt to fetch layout from unbound image
+ VkImageSubresource sub_rsrc = {};
+ sub_rsrc.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
+ VkSubresourceLayout sub_layout = {};
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkGetImageSubresourceLayout-image-01895");
+ vkGetImageSubresourceLayout(dev, img, &sub_rsrc, &sub_layout);
+ m_errorMonitor->VerifyFound();
+
+ // attempt to get memory reqs from unbound image
+ VkImageMemoryRequirementsInfo2 imri = {};
+ imri.sType = VK_STRUCTURE_TYPE_IMAGE_MEMORY_REQUIREMENTS_INFO_2;
+ imri.image = img;
+ VkMemoryRequirements2 mem_reqs = {};
+ mem_reqs.sType = VK_STRUCTURE_TYPE_MEMORY_REQUIREMENTS_2;
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkImageMemoryRequirementsInfo2-image-01897");
+ vkGetImageMemoryRequirements2(dev, &imri, &mem_reqs);
+ m_errorMonitor->VerifyFound();
+
+ reset_img();
+}
+
+TEST_F(VkLayerTest, AndroidHardwareBufferMemoryAllocation) {
+ TEST_DESCRIPTION("Verify AndroidHardwareBuffer memory allocation.");
+
+ SetTargetApiVersion(VK_API_VERSION_1_1);
+ ASSERT_NO_FATAL_FAILURE(InitFramework(myDbgFunc, m_errorMonitor));
+
+ if ((DeviceExtensionSupported(gpu(), nullptr, VK_ANDROID_EXTERNAL_MEMORY_ANDROID_HARDWARE_BUFFER_EXTENSION_NAME)) &&
+ // Also skip on devices that advertise AHB, but not the pre-requisite foreign_queue extension
+ (DeviceExtensionSupported(gpu(), nullptr, VK_EXT_QUEUE_FAMILY_FOREIGN_EXTENSION_NAME))) {
+ m_device_extension_names.push_back(VK_ANDROID_EXTERNAL_MEMORY_ANDROID_HARDWARE_BUFFER_EXTENSION_NAME);
+ m_device_extension_names.push_back(VK_KHR_SAMPLER_YCBCR_CONVERSION_EXTENSION_NAME);
+ m_device_extension_names.push_back(VK_KHR_MAINTENANCE1_EXTENSION_NAME);
+ m_device_extension_names.push_back(VK_KHR_BIND_MEMORY_2_EXTENSION_NAME);
+ m_device_extension_names.push_back(VK_KHR_GET_MEMORY_REQUIREMENTS_2_EXTENSION_NAME);
+ m_device_extension_names.push_back(VK_KHR_EXTERNAL_MEMORY_EXTENSION_NAME);
+ m_device_extension_names.push_back(VK_EXT_QUEUE_FAMILY_FOREIGN_EXTENSION_NAME);
+ } else {
+ printf("%s %s extension not supported, skipping tests\n", kSkipPrefix,
+ VK_ANDROID_EXTERNAL_MEMORY_ANDROID_HARDWARE_BUFFER_EXTENSION_NAME);
+ return;
}
- VkDeviceObj *device_obj = layer_test->DeviceObj();
- queue_families.reserve(queue_family_indices.size());
- default_index = queue_family_indices[0];
- for (auto qfi : queue_family_indices) {
- VkQueue queue = device_obj->queue_family_queues(qfi)[0]->handle();
- queue_families.emplace(std::make_pair(qfi, QueueFamilyObjs()));
- queue_families[qfi].Init(device_obj, qfi, queue, VK_COMMAND_POOL_CREATE_RESET_COMMAND_BUFFER_BIT);
+
+ ASSERT_NO_FATAL_FAILURE(InitState());
+ VkDevice dev = m_device->device();
+
+ VkImage img = VK_NULL_HANDLE;
+ auto reset_img = [&img, dev]() {
+ if (VK_NULL_HANDLE != img) vkDestroyImage(dev, img, NULL);
+ img = VK_NULL_HANDLE;
+ };
+ VkDeviceMemory mem_handle = VK_NULL_HANDLE;
+ auto reset_mem = [&mem_handle, dev]() {
+ if (VK_NULL_HANDLE != mem_handle) vkFreeMemory(dev, mem_handle, NULL);
+ mem_handle = VK_NULL_HANDLE;
+ };
+
+ PFN_vkGetAndroidHardwareBufferPropertiesANDROID pfn_GetAHBProps =
+ (PFN_vkGetAndroidHardwareBufferPropertiesANDROID)vkGetDeviceProcAddr(dev, "vkGetAndroidHardwareBufferPropertiesANDROID");
+ ASSERT_TRUE(pfn_GetAHBProps != nullptr);
+
+ // AHB structs
+ AHardwareBuffer *ahb = nullptr;
+ AHardwareBuffer_Desc ahb_desc = {};
+ VkAndroidHardwareBufferFormatPropertiesANDROID ahb_fmt_props = {};
+ ahb_fmt_props.sType = VK_STRUCTURE_TYPE_ANDROID_HARDWARE_BUFFER_FORMAT_PROPERTIES_ANDROID;
+ VkAndroidHardwareBufferPropertiesANDROID ahb_props = {};
+ ahb_props.sType = VK_STRUCTURE_TYPE_ANDROID_HARDWARE_BUFFER_PROPERTIES_ANDROID;
+ ahb_props.pNext = &ahb_fmt_props;
+ VkImportAndroidHardwareBufferInfoANDROID iahbi = {};
+ iahbi.sType = VK_STRUCTURE_TYPE_IMPORT_ANDROID_HARDWARE_BUFFER_INFO_ANDROID;
+
+ // destroy and re-acquire an AHB, and fetch it's properties
+ auto recreate_ahb = [&ahb, &iahbi, &ahb_desc, &ahb_props, dev, pfn_GetAHBProps]() {
+ if (ahb) AHardwareBuffer_release(ahb);
+ ahb = nullptr;
+ AHardwareBuffer_allocate(&ahb_desc, &ahb);
+ pfn_GetAHBProps(dev, ahb, &ahb_props);
+ iahbi.buffer = ahb;
+ };
+
+ // Allocate an AHardwareBuffer
+ ahb_desc.format = AHARDWAREBUFFER_FORMAT_R8G8B8X8_UNORM;
+ ahb_desc.usage = AHARDWAREBUFFER_USAGE_GPU_SAMPLED_IMAGE;
+ ahb_desc.width = 64;
+ ahb_desc.height = 64;
+ ahb_desc.layers = 1;
+ recreate_ahb();
+
+ // Create an image w/ external format
+ VkExternalFormatANDROID efa = {};
+ efa.sType = VK_STRUCTURE_TYPE_EXTERNAL_FORMAT_ANDROID;
+ efa.externalFormat = AHARDWAREBUFFER_FORMAT_R8G8B8X8_UNORM;
+
+ VkImageCreateInfo ici = {};
+ ici.sType = VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO;
+ ici.pNext = &efa;
+ ici.imageType = VK_IMAGE_TYPE_2D;
+ ici.arrayLayers = 1;
+ ici.extent = {64, 64, 1};
+ ici.format = VK_FORMAT_UNDEFINED;
+ ici.mipLevels = 1;
+ ici.initialLayout = VK_IMAGE_LAYOUT_UNDEFINED;
+ ici.mipLevels = 1;
+ ici.samples = VK_SAMPLE_COUNT_1_BIT;
+ ici.tiling = VK_IMAGE_TILING_OPTIMAL;
+ ici.usage = VK_IMAGE_USAGE_SAMPLED_BIT;
+ VkResult res = vkCreateImage(dev, &ici, NULL, &img);
+ ASSERT_VK_SUCCESS(res);
+
+ VkMemoryAllocateInfo mai = {};
+ mai.sType = VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO;
+ mai.pNext = &iahbi; // Chained import struct
+ mai.allocationSize = ahb_props.allocationSize;
+ mai.memoryTypeIndex = 32;
+ // Set index to match one of the bits in ahb_props
+ for (int i = 0; i < 32; i++) {
+ if (ahb_props.memoryTypeBits & (1 << i)) {
+ mai.memoryTypeIndex = i;
+ break;
+ }
}
- Reset();
+ ASSERT_NE(32, mai.memoryTypeIndex);
+
+ // Import w/ non-dedicated memory allocation
+
+ // Import requires format AHB_FMT_BLOB and usage AHB_USAGE_GPU_DATA_BUFFER
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkMemoryAllocateInfo-pNext-02384");
+ vkAllocateMemory(dev, &mai, NULL, &mem_handle);
+ m_errorMonitor->VerifyFound();
+ reset_mem();
+
+ // Allocation size mismatch
+ ahb_desc.format = AHARDWAREBUFFER_FORMAT_BLOB;
+ ahb_desc.usage = AHARDWAREBUFFER_USAGE_GPU_DATA_BUFFER | AHARDWAREBUFFER_USAGE_GPU_SAMPLED_IMAGE;
+ recreate_ahb();
+ mai.allocationSize = ahb_props.allocationSize + 1;
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkMemoryAllocateInfo-allocationSize-02383");
+ vkAllocateMemory(dev, &mai, NULL, &mem_handle);
+ m_errorMonitor->VerifyFound();
+ mai.allocationSize = ahb_props.allocationSize;
+ reset_mem();
+
+ // memoryTypeIndex mismatch
+ mai.memoryTypeIndex++;
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkMemoryAllocateInfo-memoryTypeIndex-02385");
+ vkAllocateMemory(dev, &mai, NULL, &mem_handle);
+ m_errorMonitor->VerifyFound();
+ mai.memoryTypeIndex--;
+ reset_mem();
+
+ // Insert dedicated image memory allocation to mai chain
+ VkMemoryDedicatedAllocateInfo mdai = {};
+ mdai.sType = VK_STRUCTURE_TYPE_MEMORY_DEDICATED_ALLOCATE_INFO;
+ mdai.image = img;
+ mdai.buffer = VK_NULL_HANDLE;
+ mdai.pNext = mai.pNext;
+ mai.pNext = &mdai;
+
+ // Dedicated allocation with unmatched usage bits
+ ahb_desc.format = AHARDWAREBUFFER_FORMAT_R8G8B8A8_UNORM;
+ ahb_desc.usage = AHARDWAREBUFFER_USAGE_GPU_COLOR_OUTPUT;
+ recreate_ahb();
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkMemoryAllocateInfo-pNext-02390");
+ vkAllocateMemory(dev, &mai, NULL, &mem_handle);
+ m_errorMonitor->VerifyFound();
+ reset_mem();
+
+ // Dedicated allocation with incomplete mip chain
+ reset_img();
+ ici.mipLevels = 2;
+ vkCreateImage(dev, &ici, NULL, &img);
+ mdai.image = img;
+ ahb_desc.usage = AHARDWAREBUFFER_USAGE_GPU_SAMPLED_IMAGE | AHARDWAREBUFFER_USAGE_GPU_MIPMAP_COMPLETE;
+ recreate_ahb();
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkMemoryAllocateInfo-pNext-02389");
+ vkAllocateMemory(dev, &mai, NULL, &mem_handle);
+ m_errorMonitor->VerifyFound();
+ reset_mem();
+
+ // Dedicated allocation with mis-matched dimension
+ ahb_desc.usage = AHARDWAREBUFFER_USAGE_GPU_SAMPLED_IMAGE;
+ ahb_desc.height = 32;
+ ahb_desc.width = 128;
+ recreate_ahb();
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkMemoryAllocateInfo-pNext-02388");
+ vkAllocateMemory(dev, &mai, NULL, &mem_handle);
+ m_errorMonitor->VerifyFound();
+ reset_mem();
+
+ // Dedicated allocation with mis-matched VkFormat
+ ahb_desc.usage = AHARDWAREBUFFER_USAGE_GPU_SAMPLED_IMAGE;
+ ahb_desc.height = 64;
+ ahb_desc.width = 64;
+ recreate_ahb();
+ ici.mipLevels = 1;
+ ici.format = VK_FORMAT_B8G8R8A8_UNORM;
+ ici.pNext = NULL;
+ VkImage img2;
+ vkCreateImage(dev, &ici, NULL, &img2);
+ mdai.image = img2;
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkMemoryAllocateInfo-pNext-02387");
+ vkAllocateMemory(dev, &mai, NULL, &mem_handle);
+ m_errorMonitor->VerifyFound();
+ vkDestroyImage(dev, img2, NULL);
+ mdai.image = img;
+ reset_mem();
+
+ // Missing required ahb usage
+ ahb_desc.usage = AHARDWAREBUFFER_USAGE_PROTECTED_CONTENT;
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ "VUID-vkGetAndroidHardwareBufferPropertiesANDROID-buffer-01884");
+ recreate_ahb();
+ m_errorMonitor->VerifyFound();
+
+ // Dedicated allocation with missing usage bits
+ // Setting up this test also triggers a slew of others
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkMemoryAllocateInfo-pNext-02390");
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkMemoryAllocateInfo-memoryTypeIndex-02385");
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkMemoryAllocateInfo-allocationSize-02383");
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ "VUID-vkGetAndroidHardwareBufferPropertiesANDROID-buffer-01884");
+
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkMemoryAllocateInfo-pNext-02386");
+ vkAllocateMemory(dev, &mai, NULL, &mem_handle);
+ m_errorMonitor->VerifyFound();
+ reset_mem();
+
+ // Non-import allocation - replace import struct in chain with export struct
+ VkExportMemoryAllocateInfo emai = {};
+ emai.sType = VK_STRUCTURE_TYPE_EXPORT_MEMORY_ALLOCATE_INFO;
+ emai.handleTypes = VK_EXTERNAL_MEMORY_HANDLE_TYPE_ANDROID_HARDWARE_BUFFER_BIT_ANDROID;
+ mai.pNext = &emai;
+ emai.pNext = &mdai; // still dedicated
+ mdai.pNext = nullptr;
+
+ // Export with allocation size non-zero
+ ahb_desc.usage = AHARDWAREBUFFER_USAGE_GPU_SAMPLED_IMAGE;
+ recreate_ahb();
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkMemoryAllocateInfo-pNext-01874");
+ vkAllocateMemory(dev, &mai, NULL, &mem_handle);
+ m_errorMonitor->VerifyFound();
+ reset_mem();
+
+ AHardwareBuffer_release(ahb);
+ reset_mem();
+ reset_img();
}
-void BarrierQueueFamilyTestHelper::Context::Reset() {
- layer_test->DeviceObj()->wait();
- for (auto &qf : queue_families) {
- vkResetCommandPool(layer_test->device(), qf.second.command_pool->handle(), 0);
+TEST_F(VkLayerTest, AndroidHardwareBufferCreateYCbCrSampler) {
+ TEST_DESCRIPTION("Verify AndroidHardwareBuffer YCbCr sampler creation.");
+
+ SetTargetApiVersion(VK_API_VERSION_1_1);
+ ASSERT_NO_FATAL_FAILURE(InitFramework(myDbgFunc, m_errorMonitor));
+
+ if ((DeviceExtensionSupported(gpu(), nullptr, VK_ANDROID_EXTERNAL_MEMORY_ANDROID_HARDWARE_BUFFER_EXTENSION_NAME)) &&
+ // Also skip on devices that advertise AHB, but not the pre-requisite foreign_queue extension
+ (DeviceExtensionSupported(gpu(), nullptr, VK_EXT_QUEUE_FAMILY_FOREIGN_EXTENSION_NAME))) {
+ m_device_extension_names.push_back(VK_ANDROID_EXTERNAL_MEMORY_ANDROID_HARDWARE_BUFFER_EXTENSION_NAME);
+ m_device_extension_names.push_back(VK_KHR_SAMPLER_YCBCR_CONVERSION_EXTENSION_NAME);
+ m_device_extension_names.push_back(VK_KHR_MAINTENANCE1_EXTENSION_NAME);
+ m_device_extension_names.push_back(VK_KHR_BIND_MEMORY_2_EXTENSION_NAME);
+ m_device_extension_names.push_back(VK_KHR_GET_MEMORY_REQUIREMENTS_2_EXTENSION_NAME);
+ m_device_extension_names.push_back(VK_KHR_EXTERNAL_MEMORY_EXTENSION_NAME);
+ m_device_extension_names.push_back(VK_EXT_QUEUE_FAMILY_FOREIGN_EXTENSION_NAME);
+ } else {
+ printf("%s %s extension not supported, skipping tests\n", kSkipPrefix,
+ VK_ANDROID_EXTERNAL_MEMORY_ANDROID_HARDWARE_BUFFER_EXTENSION_NAME);
+ return;
}
+
+ ASSERT_NO_FATAL_FAILURE(InitState());
+ VkDevice dev = m_device->device();
+
+ VkSamplerYcbcrConversion ycbcr_conv = VK_NULL_HANDLE;
+ VkSamplerYcbcrConversionCreateInfo sycci = {};
+ sycci.sType = VK_STRUCTURE_TYPE_SAMPLER_YCBCR_CONVERSION_CREATE_INFO;
+ sycci.format = VK_FORMAT_UNDEFINED;
+ sycci.ycbcrModel = VK_SAMPLER_YCBCR_MODEL_CONVERSION_RGB_IDENTITY;
+ sycci.ycbcrRange = VK_SAMPLER_YCBCR_RANGE_ITU_FULL;
+
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkSamplerYcbcrConversionCreateInfo-format-01904");
+ vkCreateSamplerYcbcrConversion(dev, &sycci, NULL, &ycbcr_conv);
+ m_errorMonitor->VerifyFound();
+
+ VkExternalFormatANDROID efa = {};
+ efa.sType = VK_STRUCTURE_TYPE_EXTERNAL_FORMAT_ANDROID;
+ efa.externalFormat = AHARDWAREBUFFER_FORMAT_R8G8B8X8_UNORM;
+ sycci.format = VK_FORMAT_R8G8B8A8_UNORM;
+ sycci.pNext = &efa;
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkSamplerYcbcrConversionCreateInfo-format-01904");
+ vkCreateSamplerYcbcrConversion(dev, &sycci, NULL, &ycbcr_conv);
+ m_errorMonitor->VerifyFound();
}
-BarrierQueueFamilyTestHelper::BarrierQueueFamilyTestHelper(Context *context)
- : context_(context), image_(context->layer_test->DeviceObj()) {}
+TEST_F(VkLayerTest, AndroidHardwareBufferPhysDevImageFormatProp2) {
+ TEST_DESCRIPTION("Verify AndroidHardwareBuffer GetPhysicalDeviceImageFormatProperties.");
+
+ SetTargetApiVersion(VK_API_VERSION_1_1);
+ ASSERT_NO_FATAL_FAILURE(InitFramework(myDbgFunc, m_errorMonitor));
+
+ if ((DeviceExtensionSupported(gpu(), nullptr, VK_ANDROID_EXTERNAL_MEMORY_ANDROID_HARDWARE_BUFFER_EXTENSION_NAME)) &&
+ // Also skip on devices that advertise AHB, but not the pre-requisite foreign_queue extension
+ (DeviceExtensionSupported(gpu(), nullptr, VK_EXT_QUEUE_FAMILY_FOREIGN_EXTENSION_NAME))) {
+ m_device_extension_names.push_back(VK_ANDROID_EXTERNAL_MEMORY_ANDROID_HARDWARE_BUFFER_EXTENSION_NAME);
+ m_device_extension_names.push_back(VK_KHR_SAMPLER_YCBCR_CONVERSION_EXTENSION_NAME);
+ m_device_extension_names.push_back(VK_KHR_MAINTENANCE1_EXTENSION_NAME);
+ m_device_extension_names.push_back(VK_KHR_BIND_MEMORY_2_EXTENSION_NAME);
+ m_device_extension_names.push_back(VK_KHR_GET_MEMORY_REQUIREMENTS_2_EXTENSION_NAME);
+ m_device_extension_names.push_back(VK_KHR_EXTERNAL_MEMORY_EXTENSION_NAME);
+ m_device_extension_names.push_back(VK_EXT_QUEUE_FAMILY_FOREIGN_EXTENSION_NAME);
+ } else {
+ printf("%s %s extension not supported, skipping test\n", kSkipPrefix,
+ VK_ANDROID_EXTERNAL_MEMORY_ANDROID_HARDWARE_BUFFER_EXTENSION_NAME);
+ return;
+ }
-void BarrierQueueFamilyTestHelper::Init(std::vector<uint32_t> *families, bool image_memory, bool buffer_memory) {
- VkDeviceObj *device_obj = context_->layer_test->DeviceObj();
+ ASSERT_NO_FATAL_FAILURE(InitState());
- image_.Init(32, 32, 1, VK_FORMAT_B8G8R8A8_UNORM, VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT, VK_IMAGE_TILING_OPTIMAL, 0, families,
- image_memory);
+ if ((m_instance_api_version < VK_API_VERSION_1_1) &&
+ !InstanceExtensionEnabled(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME)) {
+ printf("%s %s extension not supported, skipping test\n", kSkipPrefix,
+ VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME);
+ return;
+ }
- ASSERT_TRUE(image_.initialized());
+ VkImageFormatProperties2 ifp = {};
+ ifp.sType = VK_STRUCTURE_TYPE_IMAGE_FORMAT_PROPERTIES_2;
+ VkPhysicalDeviceImageFormatInfo2 pdifi = {};
+ pdifi.sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_FORMAT_INFO_2;
+ pdifi.format = VK_FORMAT_R8G8B8A8_UNORM;
+ pdifi.tiling = VK_IMAGE_TILING_OPTIMAL;
+ pdifi.type = VK_IMAGE_TYPE_2D;
+ pdifi.usage = VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT;
+ VkAndroidHardwareBufferUsageANDROID ahbu = {};
+ ahbu.sType = VK_STRUCTURE_TYPE_ANDROID_HARDWARE_BUFFER_USAGE_ANDROID;
+ ahbu.androidHardwareBufferUsage = AHARDWAREBUFFER_USAGE_GPU_SAMPLED_IMAGE;
+ ifp.pNext = &ahbu;
+
+ // AHB_usage chained to input without a matching external image format struc chained to output
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ "VUID-vkGetPhysicalDeviceImageFormatProperties2-pNext-01868");
+ vkGetPhysicalDeviceImageFormatProperties2(m_device->phy().handle(), &pdifi, &ifp);
+ m_errorMonitor->VerifyFound();
+
+ // output struct chained, but does not include VK_EXTERNAL_MEMORY_HANDLE_TYPE_ANDROID_HARDWARE_BUFFER_BIT_ANDROID usage
+ VkPhysicalDeviceExternalImageFormatInfo pdeifi = {};
+ pdeifi.sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_IMAGE_FORMAT_INFO;
+ pdeifi.handleType = VK_EXTERNAL_MEMORY_HANDLE_TYPE_HOST_ALLOCATION_BIT_EXT;
+ pdifi.pNext = &pdeifi;
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ "VUID-vkGetPhysicalDeviceImageFormatProperties2-pNext-01868");
+ vkGetPhysicalDeviceImageFormatProperties2(m_device->phy().handle(), &pdifi, &ifp);
+ m_errorMonitor->VerifyFound();
+}
+
+TEST_F(VkLayerTest, AndroidHardwareBufferCreateImageView) {
+ TEST_DESCRIPTION("Verify AndroidHardwareBuffer image view creation.");
+
+ SetTargetApiVersion(VK_API_VERSION_1_1);
+ ASSERT_NO_FATAL_FAILURE(InitFramework(myDbgFunc, m_errorMonitor));
+
+ if ((DeviceExtensionSupported(gpu(), nullptr, VK_ANDROID_EXTERNAL_MEMORY_ANDROID_HARDWARE_BUFFER_EXTENSION_NAME)) &&
+ // Also skip on devices that advertise AHB, but not the pre-requisite foreign_queue extension
+ (DeviceExtensionSupported(gpu(), nullptr, VK_EXT_QUEUE_FAMILY_FOREIGN_EXTENSION_NAME))) {
+ m_device_extension_names.push_back(VK_ANDROID_EXTERNAL_MEMORY_ANDROID_HARDWARE_BUFFER_EXTENSION_NAME);
+ m_device_extension_names.push_back(VK_KHR_SAMPLER_YCBCR_CONVERSION_EXTENSION_NAME);
+ m_device_extension_names.push_back(VK_KHR_MAINTENANCE1_EXTENSION_NAME);
+ m_device_extension_names.push_back(VK_KHR_BIND_MEMORY_2_EXTENSION_NAME);
+ m_device_extension_names.push_back(VK_KHR_GET_MEMORY_REQUIREMENTS_2_EXTENSION_NAME);
+ m_device_extension_names.push_back(VK_KHR_EXTERNAL_MEMORY_EXTENSION_NAME);
+ m_device_extension_names.push_back(VK_EXT_QUEUE_FAMILY_FOREIGN_EXTENSION_NAME);
+ } else {
+ printf("%s %s extension not supported, skipping tests\n", kSkipPrefix,
+ VK_ANDROID_EXTERNAL_MEMORY_ANDROID_HARDWARE_BUFFER_EXTENSION_NAME);
+ return;
+ }
- image_barrier_ = image_.image_memory_barrier(VK_ACCESS_TRANSFER_READ_BIT, VK_ACCESS_TRANSFER_READ_BIT, image_.Layout(),
- image_.Layout(), image_.subresource_range(VK_IMAGE_ASPECT_COLOR_BIT, 0, 1, 0, 1));
+ ASSERT_NO_FATAL_FAILURE(InitState());
+ VkDevice dev = m_device->device();
+
+ // Expect no validation errors during setup
+ m_errorMonitor->ExpectSuccess();
+
+ // Allocate an AHB and fetch its properties
+ AHardwareBuffer *ahb = nullptr;
+ AHardwareBuffer_Desc ahb_desc = {};
+ ahb_desc.format = AHARDWAREBUFFER_FORMAT_R5G6B5_UNORM;
+ ahb_desc.usage = AHARDWAREBUFFER_USAGE_GPU_SAMPLED_IMAGE;
+ ahb_desc.width = 64;
+ ahb_desc.height = 64;
+ ahb_desc.layers = 1;
+ AHardwareBuffer_allocate(&ahb_desc, &ahb);
+
+ // Retrieve AHB properties to make it's external format 'known'
+ VkAndroidHardwareBufferFormatPropertiesANDROID ahb_fmt_props = {};
+ ahb_fmt_props.sType = VK_STRUCTURE_TYPE_ANDROID_HARDWARE_BUFFER_FORMAT_PROPERTIES_ANDROID;
+ VkAndroidHardwareBufferPropertiesANDROID ahb_props = {};
+ ahb_props.sType = VK_STRUCTURE_TYPE_ANDROID_HARDWARE_BUFFER_PROPERTIES_ANDROID;
+ ahb_props.pNext = &ahb_fmt_props;
+ PFN_vkGetAndroidHardwareBufferPropertiesANDROID pfn_GetAHBProps =
+ (PFN_vkGetAndroidHardwareBufferPropertiesANDROID)vkGetDeviceProcAddr(dev, "vkGetAndroidHardwareBufferPropertiesANDROID");
+ ASSERT_TRUE(pfn_GetAHBProps != nullptr);
+ pfn_GetAHBProps(dev, ahb, &ahb_props);
+ AHardwareBuffer_release(ahb);
+
+ // Give image an external format
+ VkExternalFormatANDROID efa = {};
+ efa.sType = VK_STRUCTURE_TYPE_EXTERNAL_FORMAT_ANDROID;
+ efa.externalFormat = AHARDWAREBUFFER_FORMAT_R5G6B5_UNORM;
+
+ // Create the image
+ VkImage img = VK_NULL_HANDLE;
+ VkImageCreateInfo ici = {};
+ ici.sType = VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO;
+ ici.pNext = &efa;
+ ici.imageType = VK_IMAGE_TYPE_2D;
+ ici.arrayLayers = 1;
+ ici.extent = {64, 64, 1};
+ ici.format = VK_FORMAT_UNDEFINED;
+ ici.mipLevels = 1;
+ ici.initialLayout = VK_IMAGE_LAYOUT_UNDEFINED;
+ ici.samples = VK_SAMPLE_COUNT_1_BIT;
+ ici.tiling = VK_IMAGE_TILING_OPTIMAL;
+ ici.usage = VK_IMAGE_USAGE_SAMPLED_BIT;
+ vkCreateImage(dev, &ici, NULL, &img);
+
+ // Set up memory allocation
+ VkDeviceMemory img_mem = VK_NULL_HANDLE;
+ VkMemoryAllocateInfo mai = {};
+ mai.sType = VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO;
+ mai.allocationSize = 64 * 64 * 4;
+ mai.memoryTypeIndex = 0;
+ vkAllocateMemory(dev, &mai, NULL, &img_mem);
+
+ // Bind image to memory
+ vkBindImageMemory(dev, img, img_mem, 0);
+
+ // Create a YCbCr conversion, with different external format, chain to view
+ VkSamplerYcbcrConversion ycbcr_conv = VK_NULL_HANDLE;
+ VkSamplerYcbcrConversionCreateInfo sycci = {};
+ efa.externalFormat = AHARDWAREBUFFER_FORMAT_R8G8B8X8_UNORM;
+ sycci.sType = VK_STRUCTURE_TYPE_SAMPLER_YCBCR_CONVERSION_CREATE_INFO;
+ sycci.pNext = &efa;
+ sycci.format = VK_FORMAT_UNDEFINED;
+ sycci.ycbcrModel = VK_SAMPLER_YCBCR_MODEL_CONVERSION_RGB_IDENTITY;
+ sycci.ycbcrRange = VK_SAMPLER_YCBCR_RANGE_ITU_FULL;
+ vkCreateSamplerYcbcrConversion(dev, &sycci, NULL, &ycbcr_conv);
+ VkSamplerYcbcrConversionInfo syci = {};
+ syci.sType = VK_STRUCTURE_TYPE_SAMPLER_YCBCR_CONVERSION_INFO;
+ syci.conversion = ycbcr_conv;
+
+ // Create a view
+ VkImageView image_view = VK_NULL_HANDLE;
+ VkImageViewCreateInfo ivci = {};
+ ivci.sType = VK_STRUCTURE_TYPE_IMAGE_VIEW_CREATE_INFO;
+ ivci.pNext = &syci;
+ ivci.image = img;
+ ivci.viewType = VK_IMAGE_VIEW_TYPE_2D;
+ ivci.format = VK_FORMAT_UNDEFINED;
+ ivci.subresourceRange = {VK_IMAGE_ASPECT_COLOR_BIT, 0, 1, 0, 1};
+
+ auto reset_view = [&image_view, dev]() {
+ if (VK_NULL_HANDLE != image_view) vkDestroyImageView(dev, image_view, NULL);
+ image_view = VK_NULL_HANDLE;
+ };
- VkMemoryPropertyFlags mem_prop = VK_MEMORY_PROPERTY_HOST_COHERENT_BIT;
- buffer_.init_as_src_and_dst(*device_obj, 256, mem_prop, families, buffer_memory);
- ASSERT_TRUE(buffer_.initialized());
- buffer_barrier_ = buffer_.buffer_memory_barrier(VK_ACCESS_TRANSFER_READ_BIT, VK_ACCESS_TRANSFER_READ_BIT, 0, VK_WHOLE_SIZE);
+ // Up to this point, no errors expected
+ m_errorMonitor->VerifyNotFound();
+
+ // Chained ycbcr conversion has different (external) format than image
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkImageViewCreateInfo-image-02400");
+ // Also causes "unsupported format" - should be removed in future spec update
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkImageViewCreateInfo-None-02273");
+ vkCreateImageView(dev, &ivci, NULL, &image_view);
+ m_errorMonitor->VerifyFound();
+
+ reset_view();
+ vkDestroySamplerYcbcrConversion(dev, ycbcr_conv, NULL);
+ efa.externalFormat = AHARDWAREBUFFER_FORMAT_R5G6B5_UNORM;
+ vkCreateSamplerYcbcrConversion(dev, &sycci, NULL, &ycbcr_conv);
+ syci.conversion = ycbcr_conv;
+
+ // View component swizzle not IDENTITY
+ ivci.components.r = VK_COMPONENT_SWIZZLE_B;
+ ivci.components.b = VK_COMPONENT_SWIZZLE_R;
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkImageViewCreateInfo-image-02401");
+ // Also causes "unsupported format" - should be removed in future spec update
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkImageViewCreateInfo-None-02273");
+ vkCreateImageView(dev, &ivci, NULL, &image_view);
+ m_errorMonitor->VerifyFound();
+
+ reset_view();
+ ivci.components.r = VK_COMPONENT_SWIZZLE_IDENTITY;
+ ivci.components.b = VK_COMPONENT_SWIZZLE_IDENTITY;
+
+ // View with external format, when format is not UNDEFINED
+ ivci.format = VK_FORMAT_R5G6B5_UNORM_PACK16;
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkImageViewCreateInfo-image-02399");
+ // Also causes "view format different from image format"
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkImageViewCreateInfo-image-01019");
+ vkCreateImageView(dev, &ivci, NULL, &image_view);
+ m_errorMonitor->VerifyFound();
+
+ reset_view();
+ vkDestroySamplerYcbcrConversion(dev, ycbcr_conv, NULL);
+ vkDestroyImageView(dev, image_view, NULL);
+ vkDestroyImage(dev, img, NULL);
+ vkFreeMemory(dev, img_mem, NULL);
}
-BarrierQueueFamilyTestHelper::QueueFamilyObjs *BarrierQueueFamilyTestHelper::GetQueueFamilyInfo(Context *context, uint32_t qfi) {
- QueueFamilyObjs *qf;
+TEST_F(VkLayerTest, AndroidHardwareBufferImportBuffer) {
+ TEST_DESCRIPTION("Verify AndroidHardwareBuffer import as buffer.");
+
+ SetTargetApiVersion(VK_API_VERSION_1_1);
+ ASSERT_NO_FATAL_FAILURE(InitFramework(myDbgFunc, m_errorMonitor));
+
+ if ((DeviceExtensionSupported(gpu(), nullptr, VK_ANDROID_EXTERNAL_MEMORY_ANDROID_HARDWARE_BUFFER_EXTENSION_NAME)) &&
+ // Also skip on devices that advertise AHB, but not the pre-requisite foreign_queue extension
+ (DeviceExtensionSupported(gpu(), nullptr, VK_EXT_QUEUE_FAMILY_FOREIGN_EXTENSION_NAME))) {
+ m_device_extension_names.push_back(VK_ANDROID_EXTERNAL_MEMORY_ANDROID_HARDWARE_BUFFER_EXTENSION_NAME);
+ m_device_extension_names.push_back(VK_KHR_SAMPLER_YCBCR_CONVERSION_EXTENSION_NAME);
+ m_device_extension_names.push_back(VK_KHR_MAINTENANCE1_EXTENSION_NAME);
+ m_device_extension_names.push_back(VK_KHR_BIND_MEMORY_2_EXTENSION_NAME);
+ m_device_extension_names.push_back(VK_KHR_GET_MEMORY_REQUIREMENTS_2_EXTENSION_NAME);
+ m_device_extension_names.push_back(VK_KHR_EXTERNAL_MEMORY_EXTENSION_NAME);
+ m_device_extension_names.push_back(VK_EXT_QUEUE_FAMILY_FOREIGN_EXTENSION_NAME);
+ } else {
+ printf("%s %s extension not supported, skipping tests\n", kSkipPrefix,
+ VK_ANDROID_EXTERNAL_MEMORY_ANDROID_HARDWARE_BUFFER_EXTENSION_NAME);
+ return;
+ }
+
+ ASSERT_NO_FATAL_FAILURE(InitState());
+ VkDevice dev = m_device->device();
- auto qf_it = context->queue_families.find(qfi);
- if (qf_it != context->queue_families.end()) {
- qf = &(qf_it->second);
+ VkDeviceMemory mem_handle = VK_NULL_HANDLE;
+ auto reset_mem = [&mem_handle, dev]() {
+ if (VK_NULL_HANDLE != mem_handle) vkFreeMemory(dev, mem_handle, NULL);
+ mem_handle = VK_NULL_HANDLE;
+ };
+
+ PFN_vkGetAndroidHardwareBufferPropertiesANDROID pfn_GetAHBProps =
+ (PFN_vkGetAndroidHardwareBufferPropertiesANDROID)vkGetDeviceProcAddr(dev, "vkGetAndroidHardwareBufferPropertiesANDROID");
+ ASSERT_TRUE(pfn_GetAHBProps != nullptr);
+
+ // AHB structs
+ AHardwareBuffer *ahb = nullptr;
+ AHardwareBuffer_Desc ahb_desc = {};
+ VkAndroidHardwareBufferPropertiesANDROID ahb_props = {};
+ ahb_props.sType = VK_STRUCTURE_TYPE_ANDROID_HARDWARE_BUFFER_PROPERTIES_ANDROID;
+ VkImportAndroidHardwareBufferInfoANDROID iahbi = {};
+ iahbi.sType = VK_STRUCTURE_TYPE_IMPORT_ANDROID_HARDWARE_BUFFER_INFO_ANDROID;
+
+ // Allocate an AHardwareBuffer
+ ahb_desc.format = AHARDWAREBUFFER_FORMAT_BLOB;
+ ahb_desc.usage = AHARDWAREBUFFER_USAGE_GPU_MIPMAP_COMPLETE;
+ ahb_desc.width = 512;
+ ahb_desc.height = 1;
+ ahb_desc.layers = 1;
+ AHardwareBuffer_allocate(&ahb_desc, &ahb);
+ pfn_GetAHBProps(dev, ahb, &ahb_props);
+ iahbi.buffer = ahb;
+
+ // Create export and import buffers
+ VkExternalMemoryBufferCreateInfo ext_buf_info = {};
+ ext_buf_info.sType = VK_STRUCTURE_TYPE_EXTERNAL_MEMORY_BUFFER_CREATE_INFO_KHR;
+ ext_buf_info.handleTypes = VK_EXTERNAL_MEMORY_HANDLE_TYPE_HOST_ALLOCATION_BIT_EXT;
+
+ VkBufferCreateInfo bci = {};
+ bci.sType = VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO;
+ bci.pNext = &ext_buf_info;
+ bci.size = ahb_props.allocationSize;
+ bci.usage = VK_BUFFER_USAGE_TRANSFER_SRC_BIT;
+
+ VkBuffer buf = VK_NULL_HANDLE;
+ vkCreateBuffer(dev, &bci, NULL, &buf);
+ VkMemoryRequirements mem_reqs;
+ vkGetBufferMemoryRequirements(dev, buf, &mem_reqs);
+
+ // Allocation info
+ VkMemoryAllocateInfo mai = vk_testing::DeviceMemory::get_resource_alloc_info(*m_device, mem_reqs, 0);
+ mai.pNext = &iahbi; // Chained import struct
+
+ // Import as buffer requires format AHB_FMT_BLOB and usage AHB_USAGE_GPU_DATA_BUFFER
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ "VUID-VkImportAndroidHardwareBufferInfoANDROID-buffer-01881");
+ // Also causes "non-dedicated allocation format/usage" error
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkMemoryAllocateInfo-pNext-02384");
+ vkAllocateMemory(dev, &mai, NULL, &mem_handle);
+ m_errorMonitor->VerifyFound();
+
+ AHardwareBuffer_release(ahb);
+ reset_mem();
+ vkDestroyBuffer(dev, buf, NULL);
+}
+
+TEST_F(VkLayerTest, AndroidHardwareBufferExporttBuffer) {
+ TEST_DESCRIPTION("Verify AndroidHardwareBuffer export memory as AHB.");
+
+ SetTargetApiVersion(VK_API_VERSION_1_1);
+ ASSERT_NO_FATAL_FAILURE(InitFramework(myDbgFunc, m_errorMonitor));
+
+ if ((DeviceExtensionSupported(gpu(), nullptr, VK_ANDROID_EXTERNAL_MEMORY_ANDROID_HARDWARE_BUFFER_EXTENSION_NAME)) &&
+ // Also skip on devices that advertise AHB, but not the pre-requisite foreign_queue extension
+ (DeviceExtensionSupported(gpu(), nullptr, VK_EXT_QUEUE_FAMILY_FOREIGN_EXTENSION_NAME))) {
+ m_device_extension_names.push_back(VK_ANDROID_EXTERNAL_MEMORY_ANDROID_HARDWARE_BUFFER_EXTENSION_NAME);
+ m_device_extension_names.push_back(VK_KHR_SAMPLER_YCBCR_CONVERSION_EXTENSION_NAME);
+ m_device_extension_names.push_back(VK_KHR_MAINTENANCE1_EXTENSION_NAME);
+ m_device_extension_names.push_back(VK_KHR_BIND_MEMORY_2_EXTENSION_NAME);
+ m_device_extension_names.push_back(VK_KHR_GET_MEMORY_REQUIREMENTS_2_EXTENSION_NAME);
+ m_device_extension_names.push_back(VK_KHR_EXTERNAL_MEMORY_EXTENSION_NAME);
+ m_device_extension_names.push_back(VK_EXT_QUEUE_FAMILY_FOREIGN_EXTENSION_NAME);
} else {
- qf = &(context->queue_families[context->default_index]);
+ printf("%s %s extension not supported, skipping tests\n", kSkipPrefix,
+ VK_ANDROID_EXTERNAL_MEMORY_ANDROID_HARDWARE_BUFFER_EXTENSION_NAME);
+ return;
}
- return qf;
+
+ ASSERT_NO_FATAL_FAILURE(InitState());
+ VkDevice dev = m_device->device();
+
+ VkDeviceMemory mem_handle = VK_NULL_HANDLE;
+
+ // Allocate device memory, no linked export struct indicating AHB handle type
+ VkMemoryAllocateInfo mai = {};
+ mai.sType = VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO;
+ mai.allocationSize = 65536;
+ mai.memoryTypeIndex = 0;
+ vkAllocateMemory(dev, &mai, NULL, &mem_handle);
+
+ PFN_vkGetMemoryAndroidHardwareBufferANDROID pfn_GetMemAHB =
+ (PFN_vkGetMemoryAndroidHardwareBufferANDROID)vkGetDeviceProcAddr(dev, "vkGetMemoryAndroidHardwareBufferANDROID");
+ ASSERT_TRUE(pfn_GetMemAHB != nullptr);
+
+ VkMemoryGetAndroidHardwareBufferInfoANDROID mgahbi = {};
+ mgahbi.sType = VK_STRUCTURE_TYPE_MEMORY_GET_ANDROID_HARDWARE_BUFFER_INFO_ANDROID;
+ mgahbi.memory = mem_handle;
+ AHardwareBuffer *ahb = nullptr;
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ "VUID-VkMemoryGetAndroidHardwareBufferInfoANDROID-handleTypes-01882");
+ pfn_GetMemAHB(dev, &mgahbi, &ahb);
+ m_errorMonitor->VerifyFound();
+
+ if (ahb) AHardwareBuffer_release(ahb);
+ ahb = nullptr;
+ if (VK_NULL_HANDLE != mem_handle) vkFreeMemory(dev, mem_handle, NULL);
+ mem_handle = VK_NULL_HANDLE;
+
+ // Add an export struct with AHB handle type to allocation info
+ VkExportMemoryAllocateInfo emai = {};
+ emai.sType = VK_STRUCTURE_TYPE_EXPORT_MEMORY_ALLOCATE_INFO;
+ emai.handleTypes = VK_EXTERNAL_MEMORY_HANDLE_TYPE_ANDROID_HARDWARE_BUFFER_BIT_ANDROID;
+ mai.pNext = &emai;
+
+ // Create an image, do not bind memory
+ VkImage img = VK_NULL_HANDLE;
+ VkImageCreateInfo ici = {};
+ ici.sType = VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO;
+ ici.imageType = VK_IMAGE_TYPE_2D;
+ ici.arrayLayers = 1;
+ ici.extent = {128, 128, 1};
+ ici.format = VK_FORMAT_R8G8B8A8_UNORM;
+ ici.mipLevels = 1;
+ ici.initialLayout = VK_IMAGE_LAYOUT_UNDEFINED;
+ ici.samples = VK_SAMPLE_COUNT_1_BIT;
+ ici.tiling = VK_IMAGE_TILING_OPTIMAL;
+ ici.usage = VK_IMAGE_USAGE_SAMPLED_BIT;
+ vkCreateImage(dev, &ici, NULL, &img);
+ ASSERT_TRUE(VK_NULL_HANDLE != img);
+
+ // Add image to allocation chain as dedicated info, re-allocate
+ VkMemoryDedicatedAllocateInfo mdai = {VK_STRUCTURE_TYPE_MEMORY_DEDICATED_ALLOCATE_INFO};
+ mdai.image = img;
+ emai.pNext = &mdai;
+ mai.allocationSize = 0;
+ vkAllocateMemory(dev, &mai, NULL, &mem_handle);
+ mgahbi.memory = mem_handle;
+
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ "VUID-VkMemoryGetAndroidHardwareBufferInfoANDROID-pNext-01883");
+ pfn_GetMemAHB(dev, &mgahbi, &ahb);
+ m_errorMonitor->VerifyFound();
+
+ if (ahb) AHardwareBuffer_release(ahb);
+ if (VK_NULL_HANDLE != mem_handle) vkFreeMemory(dev, mem_handle, NULL);
+ vkDestroyImage(dev, img, NULL);
}
-void BarrierQueueFamilyTestHelper::operator()(std::string img_err, std::string buf_err, uint32_t src, uint32_t dst, bool positive,
- uint32_t queue_family_index, Modifier mod) {
- auto monitor = context_->layer_test->Monitor();
- if (img_err.length()) monitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT | VK_DEBUG_REPORT_WARNING_BIT_EXT, img_err);
- if (buf_err.length()) monitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT | VK_DEBUG_REPORT_WARNING_BIT_EXT, buf_err);
+#endif // VK_USE_PLATFORM_ANDROID_KHR
- image_barrier_.srcQueueFamilyIndex = src;
- image_barrier_.dstQueueFamilyIndex = dst;
- buffer_barrier_.srcQueueFamilyIndex = src;
- buffer_barrier_.dstQueueFamilyIndex = dst;
+TEST_F(VkLayerTest, ViewportSwizzleNV) {
+ TEST_DESCRIPTION("Verify VK_NV_viewprot_swizzle.");
- QueueFamilyObjs *qf = GetQueueFamilyInfo(context_, queue_family_index);
+ ASSERT_NO_FATAL_FAILURE(InitFramework(myDbgFunc, m_errorMonitor));
- VkCommandBufferObj *command_buffer = qf->command_buffer;
- for (int cb_repeat = 0; cb_repeat < (mod == Modifier::DOUBLE_COMMAND_BUFFER ? 2 : 1); cb_repeat++) {
- command_buffer->begin();
- for (int repeat = 0; repeat < (mod == Modifier::DOUBLE_RECORD ? 2 : 1); repeat++) {
- vkCmdPipelineBarrier(command_buffer->handle(), VK_PIPELINE_STAGE_TRANSFER_BIT, VK_PIPELINE_STAGE_TRANSFER_BIT,
- VK_DEPENDENCY_BY_REGION_BIT, 0, nullptr, 1, &buffer_barrier_, 1, &image_barrier_);
- }
- command_buffer->end();
- command_buffer = qf->command_buffer2; // Second pass (if any) goes to the secondary command_buffer.
+ if (DeviceExtensionSupported(gpu(), nullptr, VK_NV_VIEWPORT_SWIZZLE_EXTENSION_NAME)) {
+ m_device_extension_names.push_back(VK_NV_VIEWPORT_SWIZZLE_EXTENSION_NAME);
+ } else {
+ printf("%s %s Extension not supported, skipping tests\n", kSkipPrefix, VK_NV_VIEWPORT_SWIZZLE_EXTENSION_NAME);
+ return;
+ }
+
+ ASSERT_NO_FATAL_FAILURE(InitState());
+ ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
+
+ VkViewportSwizzleNV invalid_swizzles = {
+ VkViewportCoordinateSwizzleNV(-1),
+ VkViewportCoordinateSwizzleNV(-1),
+ VkViewportCoordinateSwizzleNV(-1),
+ VkViewportCoordinateSwizzleNV(-1),
+ };
+
+ VkPipelineViewportSwizzleStateCreateInfoNV vp_swizzle_state = {
+ VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_SWIZZLE_STATE_CREATE_INFO_NV};
+ vp_swizzle_state.viewportCount = 1;
+ vp_swizzle_state.pViewportSwizzles = &invalid_swizzles;
+
+ const std::vector<std::string> expected_vuids = {"VUID-VkViewportSwizzleNV-x-parameter", "VUID-VkViewportSwizzleNV-y-parameter",
+ "VUID-VkViewportSwizzleNV-z-parameter",
+ "VUID-VkViewportSwizzleNV-w-parameter"};
+
+ auto break_swizzles = [&vp_swizzle_state](CreatePipelineHelper &helper) { helper.vp_state_ci_.pNext = &vp_swizzle_state; };
+
+ CreatePipelineHelper::OneshotTest(*this, break_swizzles, VK_DEBUG_REPORT_ERROR_BIT_EXT, expected_vuids);
+
+ struct TestCase {
+ VkBool32 rasterizerDiscardEnable;
+ uint32_t vp_count;
+ uint32_t swizzel_vp_count;
+ bool positive;
+ };
+
+ const std::array<TestCase, 3> test_cases = {{{VK_TRUE, 1, 2, true}, {VK_FALSE, 1, 1, true}, {VK_FALSE, 1, 2, false}}};
+
+ std::array<VkViewportSwizzleNV, 2> swizzles = {
+ {{VK_VIEWPORT_COORDINATE_SWIZZLE_POSITIVE_X_NV, VK_VIEWPORT_COORDINATE_SWIZZLE_POSITIVE_Y_NV,
+ VK_VIEWPORT_COORDINATE_SWIZZLE_POSITIVE_Z_NV, VK_VIEWPORT_COORDINATE_SWIZZLE_POSITIVE_W_NV},
+ {VK_VIEWPORT_COORDINATE_SWIZZLE_POSITIVE_X_NV, VK_VIEWPORT_COORDINATE_SWIZZLE_POSITIVE_Y_NV,
+ VK_VIEWPORT_COORDINATE_SWIZZLE_POSITIVE_Z_NV, VK_VIEWPORT_COORDINATE_SWIZZLE_POSITIVE_W_NV}}};
+
+ for (const auto &test_case : test_cases) {
+ assert(test_case.vp_count <= swizzles.size());
+
+ vp_swizzle_state.viewportCount = test_case.swizzel_vp_count;
+ vp_swizzle_state.pViewportSwizzles = swizzles.data();
+
+ auto break_vp_count = [&vp_swizzle_state, &test_case](CreatePipelineHelper &helper) {
+ helper.rs_state_ci_.rasterizerDiscardEnable = test_case.rasterizerDiscardEnable;
+ helper.vp_state_ci_.viewportCount = test_case.vp_count;
+
+ helper.vp_state_ci_.pNext = &vp_swizzle_state;
+ };
+
+ CreatePipelineHelper::OneshotTest(*this, break_vp_count, VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ "VUID-VkPipelineViewportSwizzleStateCreateInfoNV-viewportCount-01215",
+ test_case.positive);
}
+}
+
+TEST_F(VkLayerTest, BufferDeviceAddressEXT) {
+ TEST_DESCRIPTION("Test VK_EXT_buffer_device_address.");
- if (queue_family_index != kInvalidQueueFamily) {
- if (mod == Modifier::DOUBLE_COMMAND_BUFFER) {
- // the Fence resolves to VK_NULL_HANLE... i.e. no fence
- qf->queue->submit({{qf->command_buffer, qf->command_buffer2}}, vk_testing::Fence(), positive);
+ if (InstanceExtensionSupported(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME)) {
+ m_instance_extension_names.push_back(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME);
+ } else {
+ printf("%s Did not find required instance extension %s; skipped.\n", kSkipPrefix,
+ VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME);
+ return;
+ }
+ ASSERT_NO_FATAL_FAILURE(InitFramework(myDbgFunc, m_errorMonitor));
+ std::array<const char *, 1> required_device_extensions = {{VK_EXT_BUFFER_DEVICE_ADDRESS_EXTENSION_NAME}};
+ for (auto device_extension : required_device_extensions) {
+ if (DeviceExtensionSupported(gpu(), nullptr, device_extension)) {
+ m_device_extension_names.push_back(device_extension);
} else {
- qf->command_buffer->QueueCommandBuffer(positive); // Check for success on positive tests only
+ printf("%s %s Extension not supported, skipping tests\n", kSkipPrefix, device_extension);
+ return;
}
}
- if (positive) {
- monitor->VerifyNotFound();
+ PFN_vkGetPhysicalDeviceFeatures2KHR vkGetPhysicalDeviceFeatures2KHR =
+ (PFN_vkGetPhysicalDeviceFeatures2KHR)vkGetInstanceProcAddr(instance(), "vkGetPhysicalDeviceFeatures2KHR");
+ ASSERT_TRUE(vkGetPhysicalDeviceFeatures2KHR != nullptr);
+
+ // Create a device that enables buffer_device_address
+ auto buffer_device_address_features = lvl_init_struct<VkPhysicalDeviceBufferAddressFeaturesEXT>();
+ auto features2 = lvl_init_struct<VkPhysicalDeviceFeatures2KHR>(&buffer_device_address_features);
+ vkGetPhysicalDeviceFeatures2KHR(gpu(), &features2);
+ buffer_device_address_features.bufferDeviceAddressCaptureReplay = VK_FALSE;
+
+ ASSERT_NO_FATAL_FAILURE(InitState(nullptr, &features2));
+ ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
+
+ PFN_vkGetBufferDeviceAddressEXT vkGetBufferDeviceAddressEXT =
+ (PFN_vkGetBufferDeviceAddressEXT)vkGetInstanceProcAddr(instance(), "vkGetBufferDeviceAddressEXT");
+
+ VkBufferCreateInfo buffer_create_info = {VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO};
+ buffer_create_info.size = sizeof(uint32_t);
+ buffer_create_info.usage = VK_BUFFER_USAGE_SHADER_DEVICE_ADDRESS_BIT_EXT;
+ buffer_create_info.flags = VK_BUFFER_CREATE_DEVICE_ADDRESS_CAPTURE_REPLAY_BIT_EXT;
+ VkBuffer buffer;
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkBufferCreateInfo-flags-02605");
+ VkResult result = vkCreateBuffer(m_device->device(), &buffer_create_info, nullptr, &buffer);
+ m_errorMonitor->VerifyFound();
+ if (result == VK_SUCCESS) {
+ vkDestroyBuffer(m_device->device(), buffer, NULL);
+ }
+
+ buffer_create_info.flags = 0;
+ VkBufferDeviceAddressCreateInfoEXT addr_ci = {VK_STRUCTURE_TYPE_BUFFER_DEVICE_ADDRESS_CREATE_INFO_EXT};
+ addr_ci.deviceAddress = 1;
+ buffer_create_info.pNext = &addr_ci;
+
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkBufferCreateInfo-deviceAddress-02604");
+ result = vkCreateBuffer(m_device->device(), &buffer_create_info, nullptr, &buffer);
+ m_errorMonitor->VerifyFound();
+ if (result == VK_SUCCESS) {
+ vkDestroyBuffer(m_device->device(), buffer, NULL);
+ }
+
+ buffer_create_info.pNext = nullptr;
+ result = vkCreateBuffer(m_device->device(), &buffer_create_info, nullptr, &buffer);
+ ASSERT_VK_SUCCESS(result);
+
+ VkBufferDeviceAddressInfoEXT info = {VK_STRUCTURE_TYPE_BUFFER_DEVICE_ADDRESS_INFO_EXT};
+ info.buffer = buffer;
+
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkBufferDeviceAddressInfoEXT-buffer-02600");
+ vkGetBufferDeviceAddressEXT(m_device->device(), &info);
+ m_errorMonitor->VerifyFound();
+
+ vkDestroyBuffer(m_device->device(), buffer, NULL);
+}
+
+TEST_F(VkLayerTest, BufferDeviceAddressEXTDisabled) {
+ TEST_DESCRIPTION("Test VK_EXT_buffer_device_address.");
+
+ if (InstanceExtensionSupported(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME)) {
+ m_instance_extension_names.push_back(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME);
} else {
- monitor->VerifyFound();
+ printf("%s Did not find required instance extension %s; skipped.\n", kSkipPrefix,
+ VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME);
+ return;
+ }
+ ASSERT_NO_FATAL_FAILURE(InitFramework(myDbgFunc, m_errorMonitor));
+ std::array<const char *, 1> required_device_extensions = {{VK_EXT_BUFFER_DEVICE_ADDRESS_EXTENSION_NAME}};
+ for (auto device_extension : required_device_extensions) {
+ if (DeviceExtensionSupported(gpu(), nullptr, device_extension)) {
+ m_device_extension_names.push_back(device_extension);
+ } else {
+ printf("%s %s Extension not supported, skipping tests\n", kSkipPrefix, device_extension);
+ return;
+ }
}
- context_->Reset();
-};
-void print_android(const char *c) {
-#ifdef VK_USE_PLATFORM_ANDROID_KHR
- __android_log_print(ANDROID_LOG_INFO, "VulkanLayerValidationTests", "%s", c);
-#endif // VK_USE_PLATFORM_ANDROID_KHR
+ PFN_vkGetPhysicalDeviceFeatures2KHR vkGetPhysicalDeviceFeatures2KHR =
+ (PFN_vkGetPhysicalDeviceFeatures2KHR)vkGetInstanceProcAddr(instance(), "vkGetPhysicalDeviceFeatures2KHR");
+ ASSERT_TRUE(vkGetPhysicalDeviceFeatures2KHR != nullptr);
+
+ // Create a device that disables buffer_device_address
+ auto buffer_device_address_features = lvl_init_struct<VkPhysicalDeviceBufferAddressFeaturesEXT>();
+ auto features2 = lvl_init_struct<VkPhysicalDeviceFeatures2KHR>(&buffer_device_address_features);
+ vkGetPhysicalDeviceFeatures2KHR(gpu(), &features2);
+ buffer_device_address_features.bufferDeviceAddress = VK_FALSE;
+ buffer_device_address_features.bufferDeviceAddressCaptureReplay = VK_FALSE;
+
+ ASSERT_NO_FATAL_FAILURE(InitState(nullptr, &features2));
+ ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
+
+ PFN_vkGetBufferDeviceAddressEXT vkGetBufferDeviceAddressEXT =
+ (PFN_vkGetBufferDeviceAddressEXT)vkGetInstanceProcAddr(instance(), "vkGetBufferDeviceAddressEXT");
+
+ VkBufferCreateInfo buffer_create_info = {VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO};
+ buffer_create_info.size = sizeof(uint32_t);
+ buffer_create_info.usage = VK_BUFFER_USAGE_SHADER_DEVICE_ADDRESS_BIT_EXT;
+ VkBuffer buffer;
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkBufferCreateInfo-usage-02606");
+ VkResult result = vkCreateBuffer(m_device->device(), &buffer_create_info, nullptr, &buffer);
+ m_errorMonitor->VerifyFound();
+ if (result == VK_SUCCESS) {
+ vkDestroyBuffer(m_device->device(), buffer, NULL);
+ }
+
+ buffer_create_info.usage = VK_BUFFER_USAGE_INDEX_BUFFER_BIT;
+ result = vkCreateBuffer(m_device->device(), &buffer_create_info, nullptr, &buffer);
+ ASSERT_VK_SUCCESS(result);
+
+ VkBufferDeviceAddressInfoEXT info = {VK_STRUCTURE_TYPE_BUFFER_DEVICE_ADDRESS_INFO_EXT};
+ info.buffer = buffer;
+
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkGetBufferDeviceAddressEXT-None-02598");
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkBufferDeviceAddressInfoEXT-buffer-02601");
+ m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkBufferDeviceAddressInfoEXT-buffer-02600");
+ vkGetBufferDeviceAddressEXT(m_device->device(), &info);
+ m_errorMonitor->VerifyFound();
+
+ vkDestroyBuffer(m_device->device(), buffer, NULL);
}
#if defined(ANDROID) && defined(VALIDATION_APK)
@@ -2097,7 +37748,6 @@ static void processCommand(struct android_app *app, int32_t cmd) {
case APP_CMD_INIT_WINDOW: {
if (app->window) {
initialized = true;
- VkTestFramework::window = app->window;
}
break;
}
diff --git a/tests/layer_validation_tests.h b/tests/layer_validation_tests.h
deleted file mode 100644
index 5ef980370..000000000
--- a/tests/layer_validation_tests.h
+++ /dev/null
@@ -1,786 +0,0 @@
-/*
- * Copyright (c) 2015-2019 The Khronos Group Inc.
- * Copyright (c) 2015-2019 Valve Corporation
- * Copyright (c) 2015-2019 LunarG, Inc.
- * Copyright (c) 2015-2019 Google, Inc.
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Author: Chia-I Wu <olvaffe@gmail.com>
- * Author: Chris Forbes <chrisf@ijw.co.nz>
- * Author: Courtney Goeltzenleuchter <courtney@LunarG.com>
- * Author: Mark Lobodzinski <mark@lunarg.com>
- * Author: Mike Stroyan <mike@LunarG.com>
- * Author: Tobin Ehlis <tobine@google.com>
- * Author: Tony Barbour <tony@LunarG.com>
- * Author: Cody Northrop <cnorthrop@google.com>
- * Author: Dave Houlton <daveh@lunarg.com>
- * Author: Jeremy Kniager <jeremyk@lunarg.com>
- * Author: Shannon McPherson <shannon@lunarg.com>
- * Author: John Zulauf <jzulauf@lunarg.com>
- */
-
-#ifndef VKLAYERTEST_H
-#define VKLAYERTEST_H
-
-#ifdef ANDROID
-#include "vulkan_wrapper.h"
-#else
-#define NOMINMAX
-#include <vulkan/vulkan.h>
-#endif
-
-#include "layers/vk_device_profile_api_layer.h"
-
-#if defined(ANDROID)
-#include <android/log.h>
-#if defined(VALIDATION_APK)
-#include <android_native_app_glue.h>
-#endif
-#endif
-
-#include "icd-spv.h"
-#include "test_common.h"
-#include "vk_layer_config.h"
-#include "vk_format_utils.h"
-#include "vkrenderframework.h"
-#include "vk_typemap_helper.h"
-#include "convert_to_renderpass2.h"
-
-#include <algorithm>
-#include <cmath>
-#include <functional>
-#include <limits>
-#include <memory>
-#include <unordered_set>
-
-//--------------------------------------------------------------------------------------
-// Mesh and VertexFormat Data
-//--------------------------------------------------------------------------------------
-
-static const char kSkipPrefix[] = " TEST SKIPPED:";
-
-enum BsoFailSelect {
- BsoFailNone,
- BsoFailLineWidth,
- BsoFailDepthBias,
- BsoFailViewport,
- BsoFailScissor,
- BsoFailBlend,
- BsoFailDepthBounds,
- BsoFailStencilReadMask,
- BsoFailStencilWriteMask,
- BsoFailStencilReference,
- BsoFailCmdClearAttachments,
- BsoFailIndexBuffer,
- BsoFailIndexBufferBadSize,
- BsoFailIndexBufferBadOffset,
- BsoFailIndexBufferBadMapSize,
- BsoFailIndexBufferBadMapOffset,
- BsoFailLineStipple,
-};
-
-static const char bindStateMinimalShaderText[] = "#version 450\nvoid main() {}\n";
-
-static const char bindStateVertShaderText[] =
- "#version 450\n"
- "void main() {\n"
- " gl_Position = vec4(1);\n"
- "}\n";
-
-static const char bindStateVertPointSizeShaderText[] =
- "#version 450\n"
- "out gl_PerVertex {\n"
- " vec4 gl_Position;\n"
- " float gl_PointSize;\n"
- "};\n"
- "void main() {\n"
- " gl_Position = vec4(1);\n"
- " gl_PointSize = 1.0;\n"
- "}\n";
-
-static char const bindStateGeomShaderText[] =
- "#version 450\n"
- "layout(triangles) in;\n"
- "layout(triangle_strip, max_vertices=3) out;\n"
- "void main() {\n"
- " gl_Position = vec4(1);\n"
- " EmitVertex();\n"
- "}\n";
-
-static char const bindStateGeomPointSizeShaderText[] =
- "#version 450\n"
- "layout (points) in;\n"
- "layout (points) out;\n"
- "layout (max_vertices = 1) out;\n"
- "void main() {\n"
- " gl_Position = vec4(1);\n"
- " gl_PointSize = 1.0;\n"
- " EmitVertex();\n"
- "}\n";
-
-static const char bindStateTscShaderText[] =
- "#version 450\n"
- "layout(vertices=3) out;\n"
- "void main() {\n"
- " gl_TessLevelOuter[0] = gl_TessLevelOuter[1] = gl_TessLevelOuter[2] = 1;\n"
- " gl_TessLevelInner[0] = 1;\n"
- "}\n";
-
-static const char bindStateTeshaderText[] =
- "#version 450\n"
- "layout(triangles, equal_spacing, cw) in;\n"
- "void main() { gl_Position = vec4(1); }\n";
-
-static const char bindStateFragShaderText[] =
- "#version 450\n"
- "layout(location = 0) out vec4 uFragColor;\n"
- "void main(){\n"
- " uFragColor = vec4(0,1,0,1);\n"
- "}\n";
-
-static const char bindStateFragSamplerShaderText[] =
- "#version 450\n"
- "layout(set=0, binding=0) uniform sampler2D s;\n"
- "layout(location=0) out vec4 x;\n"
- "void main(){\n"
- " x = texture(s, vec2(1));\n"
- "}\n";
-
-static const char bindStateFragUniformShaderText[] =
- "#version 450\n"
- "layout(set=0) layout(binding=0) uniform foo { int x; int y; } bar;\n"
- "layout(location=0) out vec4 x;\n"
- "void main(){\n"
- " x = vec4(bar.y);\n"
- "}\n";
-
-// Static arrays helper
-template <class ElementT, size_t array_size>
-size_t size(ElementT (&)[array_size]) {
- return array_size;
-}
-
-// Format search helper
-VkFormat FindSupportedDepthStencilFormat(VkPhysicalDevice phy);
-
-// Returns true if *any* requested features are available.
-// Assumption is that the framework can successfully create an image as
-// long as at least one of the feature bits is present (excepting VTX_BUF).
-bool ImageFormatIsSupported(VkPhysicalDevice phy, VkFormat format, VkImageTiling tiling = VK_IMAGE_TILING_OPTIMAL,
- VkFormatFeatureFlags features = ~VK_FORMAT_FEATURE_VERTEX_BUFFER_BIT);
-
-// Returns true if format and *all* requested features are available.
-bool ImageFormatAndFeaturesSupported(VkPhysicalDevice phy, VkFormat format, VkImageTiling tiling, VkFormatFeatureFlags features);
-
-// Returns true if format and *all* requested features are available.
-bool ImageFormatAndFeaturesSupported(const VkInstance inst, const VkPhysicalDevice phy, const VkImageCreateInfo info,
- const VkFormatFeatureFlags features);
-
-// Validation report callback prototype
-VKAPI_ATTR VkBool32 VKAPI_CALL myDbgFunc(VkFlags msgFlags, VkDebugReportObjectTypeEXT objType, uint64_t srcObject, size_t location,
- int32_t msgCode, const char *pLayerPrefix, const char *pMsg, void *pUserData);
-
-// Simple sane SamplerCreateInfo boilerplate
-VkSamplerCreateInfo SafeSaneSamplerCreateInfo();
-
-VkImageViewCreateInfo SafeSaneImageViewCreateInfo(VkImage image, VkFormat format, VkImageAspectFlags aspect_mask);
-
-VkImageViewCreateInfo SafeSaneImageViewCreateInfo(const VkImageObj &image, VkFormat format, VkImageAspectFlags aspect_mask);
-
-// Helper for checking createRenderPass2 support and adding related extensions.
-bool CheckCreateRenderPass2Support(VkRenderFramework *renderFramework, std::vector<const char *> &device_extension_names);
-
-// Helper for checking descriptor_indexing support and adding related extensions.
-bool CheckDescriptorIndexingSupportAndInitFramework(VkRenderFramework *renderFramework,
- std::vector<const char *> &instance_extension_names,
- std::vector<const char *> &device_extension_names,
- VkValidationFeaturesEXT *features, void *userData);
-
-// Dependent "false" type for the static assert, as GCC will evaluate
-// non-dependent static_asserts even for non-instantiated templates
-template <typename T>
-struct AlwaysFalse : std::false_type {};
-
-// Helpers to get nearest greater or smaller value (of float) -- useful for testing the boundary cases of Vulkan limits
-template <typename T>
-T NearestGreater(const T from) {
- using Lim = std::numeric_limits<T>;
- const auto positive_direction = Lim::has_infinity ? Lim::infinity() : Lim::max();
-
- return std::nextafter(from, positive_direction);
-}
-
-template <typename T>
-T NearestSmaller(const T from) {
- using Lim = std::numeric_limits<T>;
- const auto negative_direction = Lim::has_infinity ? -Lim::infinity() : Lim::lowest();
-
- return std::nextafter(from, negative_direction);
-}
-
-// ErrorMonitor Usage:
-//
-// Call SetDesiredFailureMsg with a string to be compared against all
-// encountered log messages, or a validation error enum identifying
-// desired error message. Passing NULL or VALIDATION_ERROR_MAX_ENUM
-// will match all log messages. logMsg will return true for skipCall
-// only if msg is matched or NULL.
-//
-// Call VerifyFound to determine if all desired failure messages
-// were encountered. Call VerifyNotFound to determine if any unexpected
-// failure was encountered.
-class ErrorMonitor {
- public:
- ErrorMonitor();
-
- ~ErrorMonitor();
-
- // Set monitor to pristine state
- void Reset();
-
- // ErrorMonitor will look for an error message containing the specified string(s)
- void SetDesiredFailureMsg(const VkFlags msgFlags, const std::string msg);
- void SetDesiredFailureMsg(const VkFlags msgFlags, const char *const msgString);
-
- // ErrorMonitor will look for an error message containing the specified string(s)
- template <typename Iter>
- void SetDesiredFailureMsg(const VkFlags msgFlags, Iter iter, const Iter end) {
- for (; iter != end; ++iter) {
- SetDesiredFailureMsg(msgFlags, *iter);
- }
- }
-
- // Set an error that the error monitor will ignore. Do not use this function if you are creating a new test.
- // TODO: This is stopgap to block new unexpected errors from being introduced. The long-term goal is to remove the use of this
- // function and its definition.
- void SetUnexpectedError(const char *const msg);
-
- VkBool32 CheckForDesiredMsg(const char *const msgString);
- vector<string> GetOtherFailureMsgs() const;
- VkDebugReportFlagsEXT GetMessageFlags() const;
- bool AnyDesiredMsgFound() const;
- bool AllDesiredMsgsFound() const;
- void SetError(const char *const errorString);
- void SetBailout(bool *bailout);
- void DumpFailureMsgs() const;
-
- // Helpers
-
- // ExpectSuccess now takes an optional argument allowing a custom combination of debug flags
- void ExpectSuccess(VkDebugReportFlagsEXT const message_flag_mask = VK_DEBUG_REPORT_ERROR_BIT_EXT);
-
- void VerifyFound();
- void VerifyNotFound();
-
- private:
- // TODO: This is stopgap to block new unexpected errors from being introduced. The long-term goal is to remove the use of this
- // function and its definition.
- bool IgnoreMessage(std::string const &msg) const;
-
- VkFlags message_flags_;
- std::unordered_multiset<std::string> desired_message_strings_;
- std::unordered_multiset<std::string> failure_message_strings_;
- std::vector<std::string> ignore_message_strings_;
- vector<string> other_messages_;
- test_platform_thread_mutex mutex_;
- bool *bailout_;
- bool message_found_;
-};
-
-class VkLayerTest : public VkRenderFramework {
- public:
- void VKTriangleTest(BsoFailSelect failCase);
-
- void GenericDrawPreparation(VkCommandBufferObj *commandBuffer, VkPipelineObj &pipelineobj, VkDescriptorSetObj &descriptorSet,
- BsoFailSelect failCase);
-
- void Init(VkPhysicalDeviceFeatures *features = nullptr, VkPhysicalDeviceFeatures2 *features2 = nullptr,
- const VkCommandPoolCreateFlags flags = 0, void *instance_pnext = nullptr);
- bool AddSurfaceInstanceExtension();
- bool AddSwapchainDeviceExtension();
- ErrorMonitor *Monitor();
- VkCommandBufferObj *CommandBuffer();
-
- protected:
- ErrorMonitor *m_errorMonitor;
- uint32_t m_instance_api_version = 0;
- uint32_t m_target_api_version = 0;
- bool m_enableWSI;
-
- uint32_t SetTargetApiVersion(uint32_t target_api_version);
- uint32_t DeviceValidationVersion();
- bool LoadDeviceProfileLayer(
- PFN_vkSetPhysicalDeviceFormatPropertiesEXT &fpvkSetPhysicalDeviceFormatPropertiesEXT,
- PFN_vkGetOriginalPhysicalDeviceFormatPropertiesEXT &fpvkGetOriginalPhysicalDeviceFormatPropertiesEXT);
-
- VkLayerTest();
- ~VkLayerTest();
-};
-
-class VkPositiveLayerTest : public VkLayerTest {
- public:
- protected:
-};
-
-class VkWsiEnabledLayerTest : public VkLayerTest {
- public:
- protected:
- VkWsiEnabledLayerTest() { m_enableWSI = true; }
-};
-
-class VkBufferTest {
- public:
- enum eTestEnFlags {
- eDoubleDelete,
- eInvalidDeviceOffset,
- eInvalidMemoryOffset,
- eBindNullBuffer,
- eBindFakeBuffer,
- eFreeInvalidHandle,
- eNone,
- };
-
- enum eTestConditions { eOffsetAlignment = 1 };
-
- static bool GetTestConditionValid(VkDeviceObj *aVulkanDevice, eTestEnFlags aTestFlag, VkBufferUsageFlags aBufferUsage = 0);
- // A constructor which performs validation tests within construction.
- VkBufferTest(VkDeviceObj *aVulkanDevice, VkBufferUsageFlags aBufferUsage, eTestEnFlags aTestFlag = eNone);
- ~VkBufferTest();
- bool GetBufferCurrent();
- const VkBuffer &GetBuffer();
- void TestDoubleDestroy();
-
- protected:
- bool AllocateCurrent;
- bool BoundCurrent;
- bool CreateCurrent;
- bool InvalidDeleteEn;
-
- VkBuffer VulkanBuffer;
- VkDevice VulkanDevice;
- VkDeviceMemory VulkanMemory;
-};
-
-struct CreatePipelineHelper;
-class VkVerticesObj {
- public:
- VkVerticesObj(VkDeviceObj *aVulkanDevice, unsigned aAttributeCount, unsigned aBindingCount, unsigned aByteStride,
- VkDeviceSize aVertexCount, const float *aVerticies);
- ~VkVerticesObj();
- bool AddVertexInputToPipe(VkPipelineObj &aPipelineObj);
- bool AddVertexInputToPipeHelpr(CreatePipelineHelper *pipelineHelper);
- void BindVertexBuffers(VkCommandBuffer aCommandBuffer, unsigned aOffsetCount = 0, VkDeviceSize *aOffsetList = nullptr);
-
- protected:
- static uint32_t BindIdGenerator;
-
- bool BoundCurrent;
- unsigned AttributeCount;
- unsigned BindingCount;
- uint32_t BindId;
-
- VkPipelineVertexInputStateCreateInfo PipelineVertexInputStateCreateInfo;
- VkVertexInputAttributeDescription *VertexInputAttributeDescription;
- VkVertexInputBindingDescription *VertexInputBindingDescription;
- VkConstantBufferObj VulkanMemoryBuffer;
-};
-
-struct OneOffDescriptorSet {
- VkDeviceObj *device_;
- VkDescriptorPool pool_;
- VkDescriptorSetLayoutObj layout_;
- VkDescriptorSet set_;
- typedef std::vector<VkDescriptorSetLayoutBinding> Bindings;
- std::vector<VkDescriptorBufferInfo> buffer_infos;
- std::vector<VkDescriptorImageInfo> image_infos;
- std::vector<VkWriteDescriptorSet> descriptor_writes;
-
- OneOffDescriptorSet(VkDeviceObj *device, const Bindings &bindings, VkDescriptorSetLayoutCreateFlags layout_flags = 0,
- void *layout_pnext = NULL, VkDescriptorPoolCreateFlags poolFlags = 0, void *allocate_pnext = NULL);
- ~OneOffDescriptorSet();
- bool Initialized();
- void WriteDescriptorBufferInfo(int blinding, VkBuffer buffer, VkDeviceSize size,
- VkDescriptorType descriptorType = VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER);
- void WriteDescriptorBufferView(int blinding, VkBufferView &buffer_view,
- VkDescriptorType descriptorType = VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER);
- void WriteDescriptorImageInfo(int blinding, VkImageView image_view, VkSampler sampler,
- VkDescriptorType descriptorType = VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER);
- void UpdateDescriptorSets();
-};
-
-template <typename T>
-bool IsValidVkStruct(const T &s) {
- return LvlTypeMap<T>::kSType == s.sType;
-}
-
-// Helper class for tersely creating create pipeline tests
-//
-// Designed with minimal error checking to ensure easy error state creation
-// See OneshotTest for typical usage
-struct CreatePipelineHelper {
- public:
- std::vector<VkDescriptorSetLayoutBinding> dsl_bindings_;
- std::unique_ptr<OneOffDescriptorSet> descriptor_set_;
- std::vector<VkPipelineShaderStageCreateInfo> shader_stages_;
- VkPipelineVertexInputStateCreateInfo vi_ci_ = {};
- VkPipelineInputAssemblyStateCreateInfo ia_ci_ = {};
- VkPipelineTessellationStateCreateInfo tess_ci_ = {};
- VkViewport viewport_ = {};
- VkRect2D scissor_ = {};
- VkPipelineViewportStateCreateInfo vp_state_ci_ = {};
- VkPipelineMultisampleStateCreateInfo pipe_ms_state_ci_ = {};
- VkPipelineLayoutCreateInfo pipeline_layout_ci_ = {};
- VkPipelineLayoutObj pipeline_layout_;
- VkPipelineDynamicStateCreateInfo dyn_state_ci_ = {};
- VkPipelineRasterizationStateCreateInfo rs_state_ci_ = {};
- VkPipelineRasterizationLineStateCreateInfoEXT line_state_ci_ = {};
- VkPipelineColorBlendAttachmentState cb_attachments_ = {};
- VkPipelineColorBlendStateCreateInfo cb_ci_ = {};
- VkGraphicsPipelineCreateInfo gp_ci_ = {};
- VkPipelineCacheCreateInfo pc_ci_ = {};
- VkPipeline pipeline_ = VK_NULL_HANDLE;
- VkPipelineCache pipeline_cache_ = VK_NULL_HANDLE;
- std::unique_ptr<VkShaderObj> vs_;
- std::unique_ptr<VkShaderObj> fs_;
- VkLayerTest &layer_test_;
- CreatePipelineHelper(VkLayerTest &test);
- ~CreatePipelineHelper();
-
- void InitDescriptorSetInfo();
- void InitInputAndVertexInfo();
- void InitMultisampleInfo();
- void InitPipelineLayoutInfo();
- void InitViewportInfo();
- void InitDynamicStateInfo();
- void InitShaderInfo();
- void InitRasterizationInfo();
- void InitLineRasterizationInfo();
- void InitBlendStateInfo();
- void InitGraphicsPipelineInfo();
- void InitPipelineCacheInfo();
-
- // Not called by default during init_info
- void InitTesselationState();
-
- // TDB -- add control for optional and/or additional initialization
- void InitInfo();
- void InitState();
- void LateBindPipelineInfo();
- VkResult CreateGraphicsPipeline(bool implicit_destroy = true, bool do_late_bind = true);
-
- // Helper function to create a simple test case (positive or negative)
- //
- // info_override can be any callable that takes a CreatePipelineHeper &
- // flags, error can be any args accepted by "SetDesiredFailure".
- template <typename Test, typename OverrideFunc, typename Error>
- static void OneshotTest(Test &test, const OverrideFunc &info_override, const VkFlags flags, const std::vector<Error> &errors,
- bool positive_test = false) {
- CreatePipelineHelper helper(test);
- helper.InitInfo();
- info_override(helper);
- helper.InitState();
-
- for (const auto &error : errors) test.Monitor()->SetDesiredFailureMsg(flags, error);
- helper.CreateGraphicsPipeline();
-
- if (positive_test) {
- test.Monitor()->VerifyNotFound();
- } else {
- test.Monitor()->VerifyFound();
- }
- }
-
- template <typename Test, typename OverrideFunc, typename Error>
- static void OneshotTest(Test &test, const OverrideFunc &info_override, const VkFlags flags, Error error,
- bool positive_test = false) {
- OneshotTest(test, info_override, flags, std::vector<Error>(1, error), positive_test);
- }
-};
-
-struct CreateComputePipelineHelper {
- public:
- std::vector<VkDescriptorSetLayoutBinding> dsl_bindings_;
- std::unique_ptr<OneOffDescriptorSet> descriptor_set_;
- VkPipelineLayoutCreateInfo pipeline_layout_ci_ = {};
- VkPipelineLayoutObj pipeline_layout_;
- VkComputePipelineCreateInfo cp_ci_ = {};
- VkPipelineCacheCreateInfo pc_ci_ = {};
- VkPipeline pipeline_ = VK_NULL_HANDLE;
- VkPipelineCache pipeline_cache_ = VK_NULL_HANDLE;
- std::unique_ptr<VkShaderObj> cs_;
- VkLayerTest &layer_test_;
- CreateComputePipelineHelper(VkLayerTest &test);
- ~CreateComputePipelineHelper();
-
- void InitDescriptorSetInfo();
- void InitPipelineLayoutInfo();
- void InitShaderInfo();
- void InitComputePipelineInfo();
- void InitPipelineCacheInfo();
-
- // TDB -- add control for optional and/or additional initialization
- void InitInfo();
- void InitState();
- void LateBindPipelineInfo();
- VkResult CreateComputePipeline(bool implicit_destroy = true, bool do_late_bind = true);
-
- // Helper function to create a simple test case (positive or negative)
- //
- // info_override can be any callable that takes a CreatePipelineHeper &
- // flags, error can be any args accepted by "SetDesiredFailure".
- template <typename Test, typename OverrideFunc, typename Error>
- static void OneshotTest(Test &test, const OverrideFunc &info_override, const VkFlags flags, const std::vector<Error> &errors,
- bool positive_test = false) {
- CreateComputePipelineHelper helper(test);
- helper.InitInfo();
- info_override(helper);
- helper.InitState();
-
- for (const auto &error : errors) test.Monitor()->SetDesiredFailureMsg(flags, error);
- helper.CreateComputePipeline();
-
- if (positive_test) {
- test.Monitor()->VerifyNotFound();
- } else {
- test.Monitor()->VerifyFound();
- }
- }
-
- template <typename Test, typename OverrideFunc, typename Error>
- static void OneshotTest(Test &test, const OverrideFunc &info_override, const VkFlags flags, Error error,
- bool positive_test = false) {
- OneshotTest(test, info_override, flags, std::vector<Error>(1, error), positive_test);
- }
-};
-
-// Helper class for tersely creating create ray tracing pipeline tests
-//
-// Designed with minimal error checking to ensure easy error state creation
-// See OneshotTest for typical usage
-struct CreateNVRayTracingPipelineHelper {
- public:
- std::vector<VkDescriptorSetLayoutBinding> dsl_bindings_;
- std::unique_ptr<OneOffDescriptorSet> descriptor_set_;
- std::vector<VkPipelineShaderStageCreateInfo> shader_stages_;
- VkPipelineLayoutCreateInfo pipeline_layout_ci_ = {};
- VkPipelineLayoutObj pipeline_layout_;
- VkRayTracingPipelineCreateInfoNV rp_ci_ = {};
- VkPipelineCacheCreateInfo pc_ci_ = {};
- VkPipeline pipeline_ = VK_NULL_HANDLE;
- VkPipelineCache pipeline_cache_ = VK_NULL_HANDLE;
- std::vector<VkRayTracingShaderGroupCreateInfoNV> groups_;
- std::unique_ptr<VkShaderObj> rgs_;
- std::unique_ptr<VkShaderObj> chs_;
- std::unique_ptr<VkShaderObj> mis_;
- VkLayerTest &layer_test_;
- CreateNVRayTracingPipelineHelper(VkLayerTest &test);
- ~CreateNVRayTracingPipelineHelper();
-
- static bool InitInstanceExtensions(VkLayerTest &test, std::vector<const char *> &instance_extension_names);
- static bool InitDeviceExtensions(VkLayerTest &test, std::vector<const char *> &device_extension_names);
- void InitShaderGroups();
- void InitDescriptorSetInfo();
- void InitPipelineLayoutInfo();
- void InitShaderInfo();
- void InitNVRayTracingPipelineInfo();
- void InitPipelineCacheInfo();
- void InitInfo();
- void InitState();
- void LateBindPipelineInfo();
- VkResult CreateNVRayTracingPipeline(bool implicit_destroy = true, bool do_late_bind = true);
-
- // Helper function to create a simple test case (positive or negative)
- //
- // info_override can be any callable that takes a CreateNVRayTracingPipelineHelper &
- // flags, error can be any args accepted by "SetDesiredFailure".
- template <typename Test, typename OverrideFunc, typename Error>
- static void OneshotTest(Test &test, const OverrideFunc &info_override, const std::vector<Error> &errors,
- const VkFlags flags = VK_DEBUG_REPORT_ERROR_BIT_EXT) {
- CreateNVRayTracingPipelineHelper helper(test);
- helper.InitInfo();
- info_override(helper);
- helper.InitState();
-
- for (const auto &error : errors) test.Monitor()->SetDesiredFailureMsg(flags, error);
- helper.CreateNVRayTracingPipeline();
- test.Monitor()->VerifyFound();
- }
-
- template <typename Test, typename OverrideFunc, typename Error>
- static void OneshotTest(Test &test, const OverrideFunc &info_override, Error error,
- const VkFlags flags = VK_DEBUG_REPORT_ERROR_BIT_EXT) {
- OneshotTest(test, info_override, std::vector<Error>(1, error), flags);
- }
-
- template <typename Test, typename OverrideFunc>
- static void OneshotPositiveTest(Test &test, const OverrideFunc &info_override,
- const VkDebugReportFlagsEXT message_flag_mask = VK_DEBUG_REPORT_ERROR_BIT_EXT) {
- CreateNVRayTracingPipelineHelper helper(test);
- helper.InitInfo();
- info_override(helper);
- helper.InitState();
-
- test.Monitor()->ExpectSuccess(message_flag_mask);
- ASSERT_VK_SUCCESS(helper.CreateNVRayTracingPipeline());
- test.Monitor()->VerifyNotFound();
- }
-};
-
-namespace chain_util {
-template <typename T>
-T Init(const void *pnext_in = nullptr) {
- T pnext_obj = {};
- pnext_obj.sType = LvlTypeMap<T>::kSType;
- pnext_obj.pNext = pnext_in;
- return pnext_obj;
-}
-
-class ExtensionChain {
- const void *head_ = nullptr;
- typedef std::function<bool(const char *)> AddIfFunction;
- AddIfFunction add_if_;
- typedef std::vector<const char *> List;
- List *list_;
-
- public:
- template <typename F>
- ExtensionChain(F &add_if, List *list) : add_if_(add_if), list_(list) {}
-
- template <typename T>
- void Add(const char *name, T &obj) {
- if (add_if_(name)) {
- if (list_) {
- list_->push_back(name);
- }
- obj.pNext = head_;
- head_ = &obj;
- }
- }
-
- const void *Head() const;
-};
-} // namespace chain_util
-
-// PushDescriptorProperties helper
-VkPhysicalDevicePushDescriptorPropertiesKHR GetPushDescriptorProperties(VkInstance instance, VkPhysicalDevice gpu);
-
-// Subgroup properties helper
-VkPhysicalDeviceSubgroupProperties GetSubgroupProperties(VkInstance instance, VkPhysicalDevice gpu);
-
-class BarrierQueueFamilyTestHelper {
- public:
- struct QueueFamilyObjs {
- uint32_t index;
- // We would use std::unique_ptr, but this triggers a compiler error on older compilers
- VkQueueObj *queue = nullptr;
- VkCommandPoolObj *command_pool = nullptr;
- VkCommandBufferObj *command_buffer = nullptr;
- VkCommandBufferObj *command_buffer2 = nullptr;
- ~QueueFamilyObjs();
- void Init(VkDeviceObj *device, uint32_t qf_index, VkQueue qf_queue, VkCommandPoolCreateFlags cp_flags);
- };
-
- struct Context {
- VkLayerTest *layer_test;
- uint32_t default_index;
- std::unordered_map<uint32_t, QueueFamilyObjs> queue_families;
- Context(VkLayerTest *test, const std::vector<uint32_t> &queue_family_indices);
- void Reset();
- };
-
- BarrierQueueFamilyTestHelper(Context *context);
- // Init with queue families non-null for CONCURRENT sharing mode (which requires them)
- void Init(std::vector<uint32_t> *families, bool image_memory = true, bool buffer_memory = true);
-
- QueueFamilyObjs *GetQueueFamilyInfo(Context *context, uint32_t qfi);
-
- enum Modifier {
- NONE,
- DOUBLE_RECORD,
- DOUBLE_COMMAND_BUFFER,
- };
-
- void operator()(std::string img_err, std::string buf_err = "", uint32_t src = VK_QUEUE_FAMILY_IGNORED,
- uint32_t dst = VK_QUEUE_FAMILY_IGNORED, bool positive = false,
- uint32_t queue_family_index = kInvalidQueueFamily, Modifier mod = Modifier::NONE);
-
- static const uint32_t kInvalidQueueFamily = UINT32_MAX;
- Context *context_;
- VkImageObj image_;
- VkImageMemoryBarrier image_barrier_;
- VkBufferObj buffer_;
- VkBufferMemoryBarrier buffer_barrier_;
-};
-
-struct DebugUtilsLabelCheckData {
- std::function<void(const VkDebugUtilsMessengerCallbackDataEXT *pCallbackData, DebugUtilsLabelCheckData *)> callback;
- size_t count;
-};
-
-bool operator==(const VkDebugUtilsLabelEXT &rhs, const VkDebugUtilsLabelEXT &lhs);
-
-VKAPI_ATTR VkBool32 VKAPI_CALL DebugUtilsCallback(VkDebugUtilsMessageSeverityFlagBitsEXT messageSeverity,
- VkDebugUtilsMessageTypeFlagsEXT messageTypes,
- const VkDebugUtilsMessengerCallbackDataEXT *pCallbackData, void *pUserData);
-
-#if GTEST_IS_THREADSAFE
-struct thread_data_struct {
- VkCommandBuffer commandBuffer;
- VkDevice device;
- VkEvent event;
- bool bailout;
-};
-
-extern "C" void *AddToCommandBuffer(void *arg);
-#endif // GTEST_IS_THREADSAFE
-
-extern "C" void *ReleaseNullFence(void *arg);
-
-void TestRenderPassCreate(ErrorMonitor *error_monitor, const VkDevice device, const VkRenderPassCreateInfo *create_info,
- bool rp2_supported, const char *rp1_vuid, const char *rp2_vuid);
-void PositiveTestRenderPassCreate(ErrorMonitor *error_monitor, const VkDevice device, const VkRenderPassCreateInfo *create_info,
- bool rp2_supported);
-void TestRenderPass2KHRCreate(ErrorMonitor *error_monitor, const VkDevice device, const VkRenderPassCreateInfo2KHR *create_info,
- const char *rp2_vuid);
-void TestRenderPassBegin(ErrorMonitor *error_monitor, const VkDevice device, const VkCommandBuffer command_buffer,
- const VkRenderPassBeginInfo *begin_info, bool rp2Supported, const char *rp1_vuid, const char *rp2_vuid);
-
-// Helpers for the tests below
-void ValidOwnershipTransferOp(ErrorMonitor *monitor, VkCommandBufferObj *cb, VkPipelineStageFlags src_stages,
- VkPipelineStageFlags dst_stages, const VkBufferMemoryBarrier *buf_barrier,
- const VkImageMemoryBarrier *img_barrier);
-
-void ValidOwnershipTransfer(ErrorMonitor *monitor, VkCommandBufferObj *cb_from, VkCommandBufferObj *cb_to,
- VkPipelineStageFlags src_stages, VkPipelineStageFlags dst_stages,
- const VkBufferMemoryBarrier *buf_barrier, const VkImageMemoryBarrier *img_barrier);
-
-VkResult GPDIFPHelper(VkPhysicalDevice dev, const VkImageCreateInfo *ci, VkImageFormatProperties *limits = nullptr);
-
-VkFormat FindFormatLinearWithoutMips(VkPhysicalDevice gpu, VkImageCreateInfo image_ci);
-
-bool FindFormatWithoutSamples(VkPhysicalDevice gpu, VkImageCreateInfo &image_ci);
-
-bool FindUnsupportedImage(VkPhysicalDevice gpu, VkImageCreateInfo &image_ci);
-
-VkFormat FindFormatWithoutFeatures(VkPhysicalDevice gpu, VkImageTiling tiling,
- VkFormatFeatureFlags undesired_features = UINT32_MAX);
-
-void NegHeightViewportTests(VkDeviceObj *m_device, VkCommandBufferObj *m_commandBuffer, ErrorMonitor *m_errorMonitor);
-
-void CreateSamplerTest(VkLayerTest &test, const VkSamplerCreateInfo *pCreateInfo, std::string code = "");
-
-void CreateBufferTest(VkLayerTest &test, const VkBufferCreateInfo *pCreateInfo, std::string code = "");
-
-void CreateImageTest(VkLayerTest &test, const VkImageCreateInfo *pCreateInfo, std::string code = "");
-
-void CreateBufferViewTest(VkLayerTest &test, const VkBufferViewCreateInfo *pCreateInfo, const std::vector<std::string> &codes);
-
-void CreateImageViewTest(VkLayerTest &test, const VkImageViewCreateInfo *pCreateInfo, std::string code = "");
-
-void print_android(const char *c);
-#endif // VKLAYERTEST_H
diff --git a/tests/layers/CMakeLists.txt b/tests/layers/CMakeLists.txt
index bf0fea01b..bd66b6d4f 100644
--- a/tests/layers/CMakeLists.txt
+++ b/tests/layers/CMakeLists.txt
@@ -1,6 +1,6 @@
# ~~~
-# Copyright (c) 2016-2019 Valve Corporation
-# Copyright (c) 2016-2019 LunarG, Inc.
+# Copyright (c) 2016-2018 Valve Corporation
+# Copyright (c) 2016-2018 LunarG, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
@@ -69,7 +69,7 @@ if(WIN32)
set_target_properties(copy-${target}-def-file PROPERTIES FOLDER ${LAYERS_HELPER_FOLDER})
add_library(VkLayer_${target} SHARED ${ARGN} VkLayer_${target}.def)
- add_dependencies(VkLayer_${target} VkLayer_utils)
+ add_dependencies(VkLayer_${target} VulkanVL_generate_helper_files VkLayer_utils)
target_link_libraries(VkLayer_${target} PRIVATE VkLayer_utils)
target_compile_definitions(VkLayer_${target} PRIVATE "_CRT_SECURE_NO_WARNINGS")
target_compile_options(VkLayer_${target} PRIVATE $<$<CONFIG:Debug>:/bigobj>)
@@ -77,7 +77,7 @@ if(WIN32)
elseif(APPLE)
macro(AddVkLayer target)
add_library(VkLayer_${target} SHARED ${ARGN})
- add_dependencies(VkLayer_${target} VkLayer_utils)
+ add_dependencies(VkLayer_${target} VulkanVL_generate_helper_files VkLayer_utils)
set_target_properties(VkLayer_${target} PROPERTIES LINK_FLAGS "-Wl")
target_link_libraries(VkLayer_${target} PRIVATE VkLayer_utils)
target_compile_options(VkLayer_${target} PRIVATE "-Wpointer-arith" "-Wno-unused-function")
@@ -85,8 +85,8 @@ elseif(APPLE)
else(UNIX AND NOT APPLE) # i.e.: Linux
macro(AddVkLayer target)
add_library(VkLayer_${target} SHARED ${ARGN})
- add_dependencies(VkLayer_${target} VkLayer_utils)
- set_target_properties(VkLayer_${target} PROPERTIES LINK_FLAGS "-Wl,--version-script=${CMAKE_CURRENT_SOURCE_DIR}/libVkLayer_${target}.map,-Bsymbolic")
+ add_dependencies(VkLayer_${target} VulkanVL_generate_helper_files VkLayer_utils)
+ set_target_properties(VkLayer_${target} PROPERTIES LINK_FLAGS "-Wl,-Bsymbolic")
target_link_libraries(VkLayer_${target} PRIVATE VkLayer_utils)
target_compile_options(VkLayer_${target} PRIVATE "-Wpointer-arith" "-Wno-unused-function")
endmacro()
@@ -96,31 +96,29 @@ AddVkLayer(device_profile_api device_profile_api.cpp ${PROJECT_SOURCE_DIR}/layer
# --------------------------------------------------------------------------------------------------------------------------------
+# TODO: Clear out this list
target_include_directories(${TEST_LAYER_NAME}
- PRIVATE ${CMAKE_CURRENT_SOURCE_DIR}
+ PRIVATE ${VulkanHeaders_INCLUDE_DIR}
+ ${CMAKE_CURRENT_SOURCE_DIR}
${PROJECT_SOURCE_DIR}/layers
${CMAKE_CURRENT_BINARY_DIR}
${PROJECT_BINARY_DIR}
${PROJECT_BINARY_DIR}/layers
- ${CMAKE_BINARY_DIR}
- ${VulkanHeaders_INCLUDE_DIR})
+ ${CMAKE_BINARY_DIR})
if(WIN32)
- # For Windows, copy necessary device_profile_api layer files for the layer_tests
+ # For Windows, copy necessary gtest DLLs to the right spot for the vk_layer_tests...
+ if (MSVC_IDE)
+ file(TO_NATIVE_PATH ${CMAKE_CURRENT_BINARY_DIR}/$<CONFIG>/*device_profile_api.* SRC_LAYER)
+ else()
+ file(TO_NATIVE_PATH ${CMAKE_CURRENT_BINARY_DIR}/*device_profile_api.* SRC_LAYER)
+ endif()
+
file(TO_NATIVE_PATH ${CMAKE_CURRENT_SOURCE_DIR}/windows/${TEST_LAYER_NAME}.json SRC_JSON)
- file(TO_NATIVE_PATH ${PROJECT_BINARY_DIR}/layers/$<CONFIG>/${TEST_LAYER_NAME}.json DST_JSON)
- add_custom_command(TARGET ${TEST_LAYER_NAME} POST_BUILD
- COMMAND ${CMAKE_COMMAND} -E copy ${SRC_JSON} ${DST_JSON})
- SET(DEVICE_PROFILE_LAYER_FILES
- VkLayer_device_profile_api.dll
- VkLayer_device_profile_api.exp
- VkLayer_device_profile_api.lib)
file(TO_NATIVE_PATH ${PROJECT_BINARY_DIR}/layers/$<CONFIG> DST_LAYER)
- foreach(DEV_PROF_FILE ${DEVICE_PROFILE_LAYER_FILES})
- file(TO_NATIVE_PATH ${CMAKE_CURRENT_BINARY_DIR}/$<CONFIG>/${DEV_PROF_FILE} SRC_LAYER)
- add_custom_command(TARGET ${TEST_LAYER_NAME} POST_BUILD
- COMMAND ${CMAKE_COMMAND} -E copy_if_different ${SRC_LAYER} ${DST_LAYER})
- endforeach()
+ add_custom_command(TARGET ${TEST_LAYER_NAME} POST_BUILD
+ COMMAND xcopy /Y /I ${SRC_LAYER} ${DST_LAYER}
+ COMMAND xcopy /Y /I ${SRC_JSON} ${DST_LAYER})
elseif(APPLE)
if(CMAKE_GENERATOR MATCHES "^Xcode.*")
add_custom_command(TARGET ${TEST_LAYER_NAME} POST_BUILD
diff --git a/tests/layers/libVkLayer_device_profile_api.map b/tests/layers/libVkLayer_device_profile_api.map
deleted file mode 100644
index e39706ba4..000000000
--- a/tests/layers/libVkLayer_device_profile_api.map
+++ /dev/null
@@ -1,9 +0,0 @@
-{
- global:
- vkGetInstanceProcAddr;
- vkEnumerateInstanceLayerProperties;
- vkEnumerateInstanceExtensionProperties;
- vkNegotiateLoaderLayerInterfaceVersion;
- local:
- *;
-};
diff --git a/tests/vklayertests_buffer_image_memory_sampler.cpp b/tests/vklayertests_buffer_image_memory_sampler.cpp
deleted file mode 100644
index b8e7817df..000000000
--- a/tests/vklayertests_buffer_image_memory_sampler.cpp
+++ /dev/null
@@ -1,7169 +0,0 @@
-/*
- * Copyright (c) 2015-2019 The Khronos Group Inc.
- * Copyright (c) 2015-2019 Valve Corporation
- * Copyright (c) 2015-2019 LunarG, Inc.
- * Copyright (c) 2015-2019 Google, Inc.
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Author: Chia-I Wu <olvaffe@gmail.com>
- * Author: Chris Forbes <chrisf@ijw.co.nz>
- * Author: Courtney Goeltzenleuchter <courtney@LunarG.com>
- * Author: Mark Lobodzinski <mark@lunarg.com>
- * Author: Mike Stroyan <mike@LunarG.com>
- * Author: Tobin Ehlis <tobine@google.com>
- * Author: Tony Barbour <tony@LunarG.com>
- * Author: Cody Northrop <cnorthrop@google.com>
- * Author: Dave Houlton <daveh@lunarg.com>
- * Author: Jeremy Kniager <jeremyk@lunarg.com>
- * Author: Shannon McPherson <shannon@lunarg.com>
- * Author: John Zulauf <jzulauf@lunarg.com>
- */
-
-#include "cast_utils.h"
-#include "layer_validation_tests.h"
-
-TEST_F(VkLayerTest, MirrorClampToEdgeNotEnabled) {
- TEST_DESCRIPTION("Validation should catch using CLAMP_TO_EDGE addressing mode if the extension is not enabled.");
-
- ASSERT_NO_FATAL_FAILURE(Init());
-
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkSamplerCreateInfo-addressModeU-01079");
- VkSampler sampler = VK_NULL_HANDLE;
- VkSamplerCreateInfo sampler_info = SafeSaneSamplerCreateInfo();
- // Set the modes to cause the error
- sampler_info.addressModeU = VK_SAMPLER_ADDRESS_MODE_MIRROR_CLAMP_TO_EDGE;
- sampler_info.addressModeV = VK_SAMPLER_ADDRESS_MODE_MIRROR_CLAMP_TO_EDGE;
- sampler_info.addressModeW = VK_SAMPLER_ADDRESS_MODE_MIRROR_CLAMP_TO_EDGE;
-
- vkCreateSampler(m_device->device(), &sampler_info, NULL, &sampler);
- m_errorMonitor->VerifyFound();
-}
-
-TEST_F(VkLayerTest, AnisotropyFeatureDisabled) {
- TEST_DESCRIPTION("Validation should check anisotropy parameters are correct with samplerAnisotropy disabled.");
-
- // Determine if required device features are available
- VkPhysicalDeviceFeatures device_features = {};
- ASSERT_NO_FATAL_FAILURE(InitFramework(myDbgFunc, m_errorMonitor));
- ASSERT_NO_FATAL_FAILURE(GetPhysicalDeviceFeatures(&device_features));
- device_features.samplerAnisotropy = VK_FALSE; // force anisotropy off
- ASSERT_NO_FATAL_FAILURE(InitState(&device_features));
-
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkSamplerCreateInfo-anisotropyEnable-01070");
- VkSamplerCreateInfo sampler_info = SafeSaneSamplerCreateInfo();
- // With the samplerAnisotropy disable, the sampler must not enable it.
- sampler_info.anisotropyEnable = VK_TRUE;
- VkSampler sampler = VK_NULL_HANDLE;
-
- VkResult err;
- err = vkCreateSampler(m_device->device(), &sampler_info, NULL, &sampler);
- m_errorMonitor->VerifyFound();
- if (VK_SUCCESS == err) {
- vkDestroySampler(m_device->device(), sampler, NULL);
- }
- sampler = VK_NULL_HANDLE;
-}
-
-TEST_F(VkLayerTest, AnisotropyFeatureEnabled) {
- TEST_DESCRIPTION("Validation must check several conditions that apply only when Anisotropy is enabled.");
-
- // Determine if required device features are available
- VkPhysicalDeviceFeatures device_features = {};
- ASSERT_NO_FATAL_FAILURE(InitFramework(myDbgFunc, m_errorMonitor));
- ASSERT_NO_FATAL_FAILURE(GetPhysicalDeviceFeatures(&device_features));
-
- // These tests require that the device support anisotropic filtering
- if (VK_TRUE != device_features.samplerAnisotropy) {
- printf("%s Test requires unsupported samplerAnisotropy feature. Skipped.\n", kSkipPrefix);
- return;
- }
-
- bool cubic_support = false;
- if (DeviceExtensionSupported(gpu(), nullptr, "VK_IMG_filter_cubic")) {
- m_device_extension_names.push_back("VK_IMG_filter_cubic");
- cubic_support = true;
- }
-
- VkSamplerCreateInfo sampler_info_ref = SafeSaneSamplerCreateInfo();
- sampler_info_ref.anisotropyEnable = VK_TRUE;
- VkSamplerCreateInfo sampler_info = sampler_info_ref;
- ASSERT_NO_FATAL_FAILURE(InitState());
-
- // maxAnisotropy out-of-bounds low.
- sampler_info.maxAnisotropy = NearestSmaller(1.0F);
- CreateSamplerTest(*this, &sampler_info, "VUID-VkSamplerCreateInfo-anisotropyEnable-01071");
- sampler_info.maxAnisotropy = sampler_info_ref.maxAnisotropy;
-
- // maxAnisotropy out-of-bounds high.
- sampler_info.maxAnisotropy = NearestGreater(m_device->phy().properties().limits.maxSamplerAnisotropy);
- CreateSamplerTest(*this, &sampler_info, "VUID-VkSamplerCreateInfo-anisotropyEnable-01071");
- sampler_info.maxAnisotropy = sampler_info_ref.maxAnisotropy;
-
- // Both anisotropy and unnormalized coords enabled
- sampler_info.unnormalizedCoordinates = VK_TRUE;
- // If unnormalizedCoordinates is VK_TRUE, minLod and maxLod must be zero
- sampler_info.minLod = 0;
- sampler_info.maxLod = 0;
- CreateSamplerTest(*this, &sampler_info, "VUID-VkSamplerCreateInfo-unnormalizedCoordinates-01076");
- sampler_info.unnormalizedCoordinates = sampler_info_ref.unnormalizedCoordinates;
-
- // Both anisotropy and cubic filtering enabled
- if (cubic_support) {
- sampler_info.minFilter = VK_FILTER_CUBIC_IMG;
- CreateSamplerTest(*this, &sampler_info, "VUID-VkSamplerCreateInfo-magFilter-01081");
- sampler_info.minFilter = sampler_info_ref.minFilter;
-
- sampler_info.magFilter = VK_FILTER_CUBIC_IMG;
- CreateSamplerTest(*this, &sampler_info, "VUID-VkSamplerCreateInfo-magFilter-01081");
- sampler_info.magFilter = sampler_info_ref.magFilter;
- } else {
- printf("%s Test requires unsupported extension \"VK_IMG_filter_cubic\". Skipped.\n", kSkipPrefix);
- }
-}
-
-TEST_F(VkLayerTest, UnnormalizedCoordinatesEnabled) {
- TEST_DESCRIPTION("Validate restrictions on sampler parameters when unnormalizedCoordinates is true.");
-
- ASSERT_NO_FATAL_FAILURE(InitFramework(myDbgFunc, m_errorMonitor));
- VkSamplerCreateInfo sampler_info_ref = SafeSaneSamplerCreateInfo();
- sampler_info_ref.unnormalizedCoordinates = VK_TRUE;
- sampler_info_ref.minLod = 0.0f;
- sampler_info_ref.maxLod = 0.0f;
- VkSamplerCreateInfo sampler_info = sampler_info_ref;
- ASSERT_NO_FATAL_FAILURE(InitState());
-
- // min and mag filters must be the same
- sampler_info.minFilter = VK_FILTER_NEAREST;
- sampler_info.magFilter = VK_FILTER_LINEAR;
- CreateSamplerTest(*this, &sampler_info, "VUID-VkSamplerCreateInfo-unnormalizedCoordinates-01072");
- std::swap(sampler_info.minFilter, sampler_info.magFilter);
- CreateSamplerTest(*this, &sampler_info, "VUID-VkSamplerCreateInfo-unnormalizedCoordinates-01072");
- sampler_info = sampler_info_ref;
-
- // mipmapMode must be NEAREST
- sampler_info.mipmapMode = VK_SAMPLER_MIPMAP_MODE_LINEAR;
- CreateSamplerTest(*this, &sampler_info, "VUID-VkSamplerCreateInfo-unnormalizedCoordinates-01073");
- sampler_info = sampler_info_ref;
-
- // minlod and maxlod must be zero
- sampler_info.maxLod = 3.14159f;
- CreateSamplerTest(*this, &sampler_info, "VUID-VkSamplerCreateInfo-unnormalizedCoordinates-01074");
- sampler_info.minLod = 2.71828f;
- CreateSamplerTest(*this, &sampler_info, "VUID-VkSamplerCreateInfo-unnormalizedCoordinates-01074");
- sampler_info = sampler_info_ref;
-
- // addressModeU and addressModeV must both be CLAMP_TO_EDGE or CLAMP_TO_BORDER
- // checks all 12 invalid combinations out of 16 total combinations
- const std::array<VkSamplerAddressMode, 4> kAddressModes = {{
- VK_SAMPLER_ADDRESS_MODE_REPEAT,
- VK_SAMPLER_ADDRESS_MODE_MIRRORED_REPEAT,
- VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_EDGE,
- VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_BORDER,
- }};
- for (const auto umode : kAddressModes) {
- for (const auto vmode : kAddressModes) {
- if ((umode != VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_EDGE && umode != VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_BORDER) ||
- (vmode != VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_EDGE && vmode != VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_BORDER)) {
- sampler_info.addressModeU = umode;
- sampler_info.addressModeV = vmode;
- CreateSamplerTest(*this, &sampler_info, "VUID-VkSamplerCreateInfo-unnormalizedCoordinates-01075");
- }
- }
- }
- sampler_info = sampler_info_ref;
-
- // VUID-VkSamplerCreateInfo-unnormalizedCoordinates-01076 is tested in AnisotropyFeatureEnabled above
- // Since it requires checking/enabling the anisotropic filtering feature, it's easier to do it
- // with the other anisotropic tests.
-
- // compareEnable must be VK_FALSE
- sampler_info.compareEnable = VK_TRUE;
- CreateSamplerTest(*this, &sampler_info, "VUID-VkSamplerCreateInfo-unnormalizedCoordinates-01077");
- sampler_info = sampler_info_ref;
-}
-
-TEST_F(VkLayerTest, UpdateBufferAlignment) {
- TEST_DESCRIPTION("Check alignment parameters for vkCmdUpdateBuffer");
- uint32_t updateData[] = {1, 2, 3, 4, 5, 6, 7, 8};
-
- ASSERT_NO_FATAL_FAILURE(Init());
-
- VkMemoryPropertyFlags reqs = VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT;
- VkBufferObj buffer;
- buffer.init_as_dst(*m_device, (VkDeviceSize)20, reqs);
-
- m_commandBuffer->begin();
- // Introduce failure by using dstOffset that is not multiple of 4
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, " is not a multiple of 4");
- m_commandBuffer->UpdateBuffer(buffer.handle(), 1, 4, updateData);
- m_errorMonitor->VerifyFound();
-
- // Introduce failure by using dataSize that is not multiple of 4
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, " is not a multiple of 4");
- m_commandBuffer->UpdateBuffer(buffer.handle(), 0, 6, updateData);
- m_errorMonitor->VerifyFound();
-
- // Introduce failure by using dataSize that is < 0
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
- "must be greater than zero and less than or equal to 65536");
- m_commandBuffer->UpdateBuffer(buffer.handle(), 0, (VkDeviceSize)-44, updateData);
- m_errorMonitor->VerifyFound();
-
- // Introduce failure by using dataSize that is > 65536
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
- "must be greater than zero and less than or equal to 65536");
- m_commandBuffer->UpdateBuffer(buffer.handle(), 0, (VkDeviceSize)80000, updateData);
- m_errorMonitor->VerifyFound();
-
- m_commandBuffer->end();
-}
-
-TEST_F(VkLayerTest, FillBufferAlignment) {
- TEST_DESCRIPTION("Check alignment parameters for vkCmdFillBuffer");
-
- ASSERT_NO_FATAL_FAILURE(Init());
-
- VkMemoryPropertyFlags reqs = VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT;
- VkBufferObj buffer;
- buffer.init_as_dst(*m_device, (VkDeviceSize)20, reqs);
-
- m_commandBuffer->begin();
-
- // Introduce failure by using dstOffset that is not multiple of 4
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, " is not a multiple of 4");
- m_commandBuffer->FillBuffer(buffer.handle(), 1, 4, 0x11111111);
- m_errorMonitor->VerifyFound();
-
- // Introduce failure by using size that is not multiple of 4
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, " is not a multiple of 4");
- m_commandBuffer->FillBuffer(buffer.handle(), 0, 6, 0x11111111);
- m_errorMonitor->VerifyFound();
-
- // Introduce failure by using size that is zero
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "must be greater than zero");
- m_commandBuffer->FillBuffer(buffer.handle(), 0, 0, 0x11111111);
- m_errorMonitor->VerifyFound();
-
- m_commandBuffer->end();
-}
-
-TEST_F(VkLayerTest, SparseBindingImageBufferCreate) {
- TEST_DESCRIPTION("Create buffer/image with sparse attributes but without the sparse_binding bit set");
-
- ASSERT_NO_FATAL_FAILURE(Init());
-
- VkBufferCreateInfo buf_info = {};
- buf_info.sType = VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO;
- buf_info.pNext = NULL;
- buf_info.usage = VK_BUFFER_USAGE_TRANSFER_SRC_BIT;
- buf_info.size = 2048;
- buf_info.queueFamilyIndexCount = 0;
- buf_info.pQueueFamilyIndices = NULL;
- buf_info.sharingMode = VK_SHARING_MODE_EXCLUSIVE;
-
- if (m_device->phy().features().sparseResidencyBuffer) {
- buf_info.flags = VK_BUFFER_CREATE_SPARSE_RESIDENCY_BIT;
- CreateBufferTest(*this, &buf_info, "VUID-VkBufferCreateInfo-flags-00918");
- } else {
- printf("%s Test requires unsupported sparseResidencyBuffer feature. Skipped.\n", kSkipPrefix);
- return;
- }
-
- if (m_device->phy().features().sparseResidencyAliased) {
- buf_info.flags = VK_BUFFER_CREATE_SPARSE_ALIASED_BIT;
- CreateBufferTest(*this, &buf_info, "VUID-VkBufferCreateInfo-flags-00918");
- } else {
- printf("%s Test requires unsupported sparseResidencyAliased feature. Skipped.\n", kSkipPrefix);
- return;
- }
-
- VkImageCreateInfo image_create_info = {};
- image_create_info.sType = VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO;
- image_create_info.pNext = NULL;
- image_create_info.imageType = VK_IMAGE_TYPE_2D;
- image_create_info.format = VK_FORMAT_R8G8B8A8_UNORM;
- image_create_info.extent.width = 512;
- image_create_info.extent.height = 64;
- image_create_info.extent.depth = 1;
- image_create_info.mipLevels = 1;
- image_create_info.arrayLayers = 1;
- image_create_info.samples = VK_SAMPLE_COUNT_1_BIT;
- image_create_info.tiling = VK_IMAGE_TILING_OPTIMAL;
- image_create_info.initialLayout = VK_IMAGE_LAYOUT_PREINITIALIZED;
- image_create_info.usage = VK_IMAGE_USAGE_TRANSFER_SRC_BIT;
- image_create_info.queueFamilyIndexCount = 0;
- image_create_info.pQueueFamilyIndices = NULL;
- image_create_info.sharingMode = VK_SHARING_MODE_EXCLUSIVE;
-
- if (m_device->phy().features().sparseResidencyImage2D) {
- image_create_info.flags = VK_IMAGE_CREATE_SPARSE_RESIDENCY_BIT;
- CreateImageTest(*this, &image_create_info, "VUID-VkImageCreateInfo-flags-00987");
- } else {
- printf("%s Test requires unsupported sparseResidencyImage2D feature. Skipped.\n", kSkipPrefix);
- return;
- }
-
- if (m_device->phy().features().sparseResidencyAliased) {
- image_create_info.flags = VK_IMAGE_CREATE_SPARSE_ALIASED_BIT;
- CreateImageTest(*this, &image_create_info, "VUID-VkImageCreateInfo-flags-00987");
- } else {
- printf("%s Test requires unsupported sparseResidencyAliased feature. Skipped.\n", kSkipPrefix);
- return;
- }
-}
-
-TEST_F(VkLayerTest, SparseResidencyImageCreateUnsupportedTypes) {
- TEST_DESCRIPTION("Create images with sparse residency with unsupported types");
-
- // Determine which device feature are available
- VkPhysicalDeviceFeatures device_features = {};
- ASSERT_NO_FATAL_FAILURE(InitFramework(myDbgFunc, m_errorMonitor));
- ASSERT_NO_FATAL_FAILURE(GetPhysicalDeviceFeatures(&device_features));
-
- // Mask out device features we don't want and initialize device state
- device_features.sparseResidencyImage2D = VK_FALSE;
- device_features.sparseResidencyImage3D = VK_FALSE;
- ASSERT_NO_FATAL_FAILURE(InitState(&device_features));
-
- if (!m_device->phy().features().sparseBinding) {
- printf("%s Test requires unsupported sparseBinding feature. Skipped.\n", kSkipPrefix);
- return;
- }
-
- VkImageCreateInfo image_create_info = {};
- image_create_info.sType = VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO;
- image_create_info.pNext = NULL;
- image_create_info.imageType = VK_IMAGE_TYPE_1D;
- image_create_info.format = VK_FORMAT_R8G8B8A8_UNORM;
- image_create_info.extent.width = 512;
- image_create_info.extent.height = 1;
- image_create_info.extent.depth = 1;
- image_create_info.mipLevels = 1;
- image_create_info.arrayLayers = 1;
- image_create_info.samples = VK_SAMPLE_COUNT_1_BIT;
- image_create_info.tiling = VK_IMAGE_TILING_OPTIMAL;
- image_create_info.initialLayout = VK_IMAGE_LAYOUT_PREINITIALIZED;
- image_create_info.usage = VK_IMAGE_USAGE_TRANSFER_SRC_BIT;
- image_create_info.queueFamilyIndexCount = 0;
- image_create_info.pQueueFamilyIndices = NULL;
- image_create_info.sharingMode = VK_SHARING_MODE_EXCLUSIVE;
- image_create_info.flags = VK_IMAGE_CREATE_SPARSE_RESIDENCY_BIT | VK_BUFFER_CREATE_SPARSE_BINDING_BIT;
-
- // 1D image w/ sparse residency is an error
- CreateImageTest(*this, &image_create_info, "VUID-VkImageCreateInfo-imageType-00970");
-
- // 2D image w/ sparse residency when feature isn't available
- image_create_info.imageType = VK_IMAGE_TYPE_2D;
- image_create_info.extent.height = 64;
- CreateImageTest(*this, &image_create_info, "VUID-VkImageCreateInfo-imageType-00971");
-
- // 3D image w/ sparse residency when feature isn't available
- image_create_info.imageType = VK_IMAGE_TYPE_3D;
- image_create_info.extent.depth = 8;
- CreateImageTest(*this, &image_create_info, "VUID-VkImageCreateInfo-imageType-00972");
-}
-
-TEST_F(VkLayerTest, SparseResidencyImageCreateUnsupportedSamples) {
- TEST_DESCRIPTION("Create images with sparse residency with unsupported tiling or sample counts");
-
- // Determine which device feature are available
- VkPhysicalDeviceFeatures device_features = {};
- ASSERT_NO_FATAL_FAILURE(InitFramework(myDbgFunc, m_errorMonitor));
- ASSERT_NO_FATAL_FAILURE(GetPhysicalDeviceFeatures(&device_features));
-
- // These tests require that the device support sparse residency for 2D images
- if (VK_TRUE != device_features.sparseResidencyImage2D) {
- printf("%s Test requires unsupported SparseResidencyImage2D feature. Skipped.\n", kSkipPrefix);
- return;
- }
-
- // Mask out device features we don't want and initialize device state
- device_features.sparseResidency2Samples = VK_FALSE;
- device_features.sparseResidency4Samples = VK_FALSE;
- device_features.sparseResidency8Samples = VK_FALSE;
- device_features.sparseResidency16Samples = VK_FALSE;
- ASSERT_NO_FATAL_FAILURE(InitState(&device_features));
-
- VkImageCreateInfo image_create_info = {};
- image_create_info.sType = VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO;
- image_create_info.pNext = NULL;
- image_create_info.imageType = VK_IMAGE_TYPE_2D;
- image_create_info.format = VK_FORMAT_R8G8B8A8_UNORM;
- image_create_info.extent.width = 64;
- image_create_info.extent.height = 64;
- image_create_info.extent.depth = 1;
- image_create_info.mipLevels = 1;
- image_create_info.arrayLayers = 1;
- image_create_info.samples = VK_SAMPLE_COUNT_1_BIT;
- image_create_info.tiling = VK_IMAGE_TILING_LINEAR;
- image_create_info.initialLayout = VK_IMAGE_LAYOUT_PREINITIALIZED;
- image_create_info.usage = VK_IMAGE_USAGE_TRANSFER_SRC_BIT;
- image_create_info.queueFamilyIndexCount = 0;
- image_create_info.pQueueFamilyIndices = NULL;
- image_create_info.sharingMode = VK_SHARING_MODE_EXCLUSIVE;
- image_create_info.flags = VK_IMAGE_CREATE_SPARSE_RESIDENCY_BIT | VK_BUFFER_CREATE_SPARSE_BINDING_BIT;
-
- // 2D image w/ sparse residency and linear tiling is an error
- CreateImageTest(*this, &image_create_info,
- "VK_IMAGE_CREATE_SPARSE_RESIDENCY_BIT then image tiling of VK_IMAGE_TILING_LINEAR is not supported");
- image_create_info.tiling = VK_IMAGE_TILING_OPTIMAL;
-
- // Multi-sample image w/ sparse residency when feature isn't available (4 flavors)
- image_create_info.samples = VK_SAMPLE_COUNT_2_BIT;
- CreateImageTest(*this, &image_create_info, "VUID-VkImageCreateInfo-imageType-00973");
-
- image_create_info.samples = VK_SAMPLE_COUNT_4_BIT;
- CreateImageTest(*this, &image_create_info, "VUID-VkImageCreateInfo-imageType-00974");
-
- image_create_info.samples = VK_SAMPLE_COUNT_8_BIT;
- CreateImageTest(*this, &image_create_info, "VUID-VkImageCreateInfo-imageType-00975");
-
- image_create_info.samples = VK_SAMPLE_COUNT_16_BIT;
- CreateImageTest(*this, &image_create_info, "VUID-VkImageCreateInfo-imageType-00976");
-}
-
-TEST_F(VkLayerTest, InvalidMemoryMapping) {
- TEST_DESCRIPTION("Attempt to map memory in a number of incorrect ways");
- VkResult err;
- bool pass;
- ASSERT_NO_FATAL_FAILURE(Init());
-
- VkBuffer buffer;
- VkDeviceMemory mem;
- VkMemoryRequirements mem_reqs;
-
- const VkDeviceSize atom_size = m_device->props.limits.nonCoherentAtomSize;
-
- VkBufferCreateInfo buf_info = {};
- buf_info.sType = VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO;
- buf_info.pNext = NULL;
- buf_info.usage = VK_BUFFER_USAGE_UNIFORM_BUFFER_BIT;
- buf_info.size = 256;
- buf_info.queueFamilyIndexCount = 0;
- buf_info.pQueueFamilyIndices = NULL;
- buf_info.sharingMode = VK_SHARING_MODE_EXCLUSIVE;
- buf_info.flags = 0;
- err = vkCreateBuffer(m_device->device(), &buf_info, NULL, &buffer);
- ASSERT_VK_SUCCESS(err);
-
- vkGetBufferMemoryRequirements(m_device->device(), buffer, &mem_reqs);
- VkMemoryAllocateInfo alloc_info = {};
- alloc_info.sType = VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO;
- alloc_info.pNext = NULL;
- alloc_info.memoryTypeIndex = 0;
-
- // Ensure memory is big enough for both bindings
- static const VkDeviceSize allocation_size = 0x10000;
- alloc_info.allocationSize = allocation_size;
- pass = m_device->phy().set_memory_type(mem_reqs.memoryTypeBits, &alloc_info, VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT);
- if (!pass) {
- printf("%s Failed to set memory type.\n", kSkipPrefix);
- vkDestroyBuffer(m_device->device(), buffer, NULL);
- return;
- }
- err = vkAllocateMemory(m_device->device(), &alloc_info, NULL, &mem);
- ASSERT_VK_SUCCESS(err);
-
- uint8_t *pData;
- // Attempt to map memory size 0 is invalid
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VkMapMemory: Attempting to map memory range of size zero");
- err = vkMapMemory(m_device->device(), mem, 0, 0, 0, (void **)&pData);
- m_errorMonitor->VerifyFound();
- // Map memory twice
- err = vkMapMemory(m_device->device(), mem, 0, mem_reqs.size, 0, (void **)&pData);
- ASSERT_VK_SUCCESS(err);
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "UNASSIGNED-CoreValidation-MemTrack-InvalidMap");
- err = vkMapMemory(m_device->device(), mem, 0, mem_reqs.size, 0, (void **)&pData);
- m_errorMonitor->VerifyFound();
-
- // Unmap the memory to avoid re-map error
- vkUnmapMemory(m_device->device(), mem);
- // overstep allocation with VK_WHOLE_SIZE
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
- " with size of VK_WHOLE_SIZE oversteps total array size 0x");
- err = vkMapMemory(m_device->device(), mem, allocation_size + 1, VK_WHOLE_SIZE, 0, (void **)&pData);
- m_errorMonitor->VerifyFound();
- // overstep allocation w/o VK_WHOLE_SIZE
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, " oversteps total array size 0x");
- err = vkMapMemory(m_device->device(), mem, 1, allocation_size, 0, (void **)&pData);
- m_errorMonitor->VerifyFound();
- // Now error due to unmapping memory that's not mapped
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "Unmapping Memory without memory being mapped: ");
- vkUnmapMemory(m_device->device(), mem);
- m_errorMonitor->VerifyFound();
-
- // Now map memory and cause errors due to flushing invalid ranges
- err = vkMapMemory(m_device->device(), mem, 4 * atom_size, VK_WHOLE_SIZE, 0, (void **)&pData);
- ASSERT_VK_SUCCESS(err);
- VkMappedMemoryRange mmr = {};
- mmr.sType = VK_STRUCTURE_TYPE_MAPPED_MEMORY_RANGE;
- mmr.memory = mem;
- mmr.offset = atom_size; // Error b/c offset less than offset of mapped mem
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkMappedMemoryRange-size-00685");
- vkFlushMappedMemoryRanges(m_device->device(), 1, &mmr);
- m_errorMonitor->VerifyFound();
-
- // Now flush range that oversteps mapped range
- vkUnmapMemory(m_device->device(), mem);
- err = vkMapMemory(m_device->device(), mem, 0, 4 * atom_size, 0, (void **)&pData);
- ASSERT_VK_SUCCESS(err);
- mmr.offset = atom_size;
- mmr.size = 4 * atom_size; // Flushing bounds exceed mapped bounds
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkMappedMemoryRange-size-00685");
- vkFlushMappedMemoryRanges(m_device->device(), 1, &mmr);
- m_errorMonitor->VerifyFound();
-
- // Now flush range with VK_WHOLE_SIZE that oversteps offset
- vkUnmapMemory(m_device->device(), mem);
- err = vkMapMemory(m_device->device(), mem, 2 * atom_size, 4 * atom_size, 0, (void **)&pData);
- ASSERT_VK_SUCCESS(err);
- mmr.offset = atom_size;
- mmr.size = VK_WHOLE_SIZE;
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkMappedMemoryRange-size-00686");
- vkFlushMappedMemoryRanges(m_device->device(), 1, &mmr);
- m_errorMonitor->VerifyFound();
-
- // Some platforms have an atomsize of 1 which makes the test meaningless
- if (atom_size > 3) {
- // Now with an offset NOT a multiple of the device limit
- vkUnmapMemory(m_device->device(), mem);
- err = vkMapMemory(m_device->device(), mem, 0, 4 * atom_size, 0, (void **)&pData);
- ASSERT_VK_SUCCESS(err);
- mmr.offset = 3; // Not a multiple of atom_size
- mmr.size = VK_WHOLE_SIZE;
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkMappedMemoryRange-offset-00687");
- vkFlushMappedMemoryRanges(m_device->device(), 1, &mmr);
- m_errorMonitor->VerifyFound();
-
- // Now with a size NOT a multiple of the device limit
- vkUnmapMemory(m_device->device(), mem);
- err = vkMapMemory(m_device->device(), mem, 0, 4 * atom_size, 0, (void **)&pData);
- ASSERT_VK_SUCCESS(err);
- mmr.offset = atom_size;
- mmr.size = 2 * atom_size + 1; // Not a multiple of atom_size
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkMappedMemoryRange-size-01390");
- vkFlushMappedMemoryRanges(m_device->device(), 1, &mmr);
- m_errorMonitor->VerifyFound();
- }
-
- pass = m_device->phy().set_memory_type(mem_reqs.memoryTypeBits, &alloc_info, VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT,
- VK_MEMORY_PROPERTY_HOST_COHERENT_BIT);
- if (!pass) {
- printf("%s Failed to set memory type.\n", kSkipPrefix);
- vkFreeMemory(m_device->device(), mem, NULL);
- vkDestroyBuffer(m_device->device(), buffer, NULL);
- return;
- }
- // TODO : If we can get HOST_VISIBLE w/o HOST_COHERENT we can test cases of
- // kVUID_Core_MemTrack_InvalidMap in validateAndCopyNoncoherentMemoryToDriver()
-
- vkDestroyBuffer(m_device->device(), buffer, NULL);
- vkFreeMemory(m_device->device(), mem, NULL);
-}
-
-TEST_F(VkLayerTest, MapMemWithoutHostVisibleBit) {
- TEST_DESCRIPTION("Allocate memory that is not mappable and then attempt to map it.");
- VkResult err;
- bool pass;
-
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkMapMemory-memory-00682");
- ASSERT_NO_FATAL_FAILURE(Init());
-
- VkMemoryAllocateInfo mem_alloc = {};
- mem_alloc.sType = VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO;
- mem_alloc.pNext = NULL;
- mem_alloc.allocationSize = 1024;
-
- pass = m_device->phy().set_memory_type(0xFFFFFFFF, &mem_alloc, 0, VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT);
- if (!pass) { // If we can't find any unmappable memory this test doesn't
- // make sense
- printf("%s No unmappable memory types found, skipping test\n", kSkipPrefix);
- return;
- }
-
- VkDeviceMemory mem;
- err = vkAllocateMemory(m_device->device(), &mem_alloc, NULL, &mem);
- ASSERT_VK_SUCCESS(err);
-
- void *mappedAddress = NULL;
- err = vkMapMemory(m_device->device(), mem, 0, VK_WHOLE_SIZE, 0, &mappedAddress);
- m_errorMonitor->VerifyFound();
-
- vkFreeMemory(m_device->device(), mem, NULL);
-}
-
-TEST_F(VkLayerTest, RebindMemory) {
- VkResult err;
- bool pass;
-
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkBindImageMemory-image-01044");
-
- ASSERT_NO_FATAL_FAILURE(Init());
-
- // Create an image, allocate memory, free it, and then try to bind it
- VkImage image;
- VkDeviceMemory mem1;
- VkDeviceMemory mem2;
- VkMemoryRequirements mem_reqs;
-
- const VkFormat tex_format = VK_FORMAT_B8G8R8A8_UNORM;
- const int32_t tex_width = 32;
- const int32_t tex_height = 32;
-
- VkImageCreateInfo image_create_info = {};
- image_create_info.sType = VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO;
- image_create_info.pNext = NULL;
- image_create_info.imageType = VK_IMAGE_TYPE_2D;
- image_create_info.format = tex_format;
- image_create_info.extent.width = tex_width;
- image_create_info.extent.height = tex_height;
- image_create_info.extent.depth = 1;
- image_create_info.mipLevels = 1;
- image_create_info.arrayLayers = 1;
- image_create_info.samples = VK_SAMPLE_COUNT_1_BIT;
- image_create_info.tiling = VK_IMAGE_TILING_OPTIMAL;
- image_create_info.usage = VK_IMAGE_USAGE_SAMPLED_BIT;
- image_create_info.flags = 0;
-
- VkMemoryAllocateInfo mem_alloc = {};
- mem_alloc.sType = VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO;
- mem_alloc.pNext = NULL;
- mem_alloc.allocationSize = 0;
- mem_alloc.memoryTypeIndex = 0;
-
- // Introduce failure, do NOT set memProps to
- // VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT
- mem_alloc.memoryTypeIndex = 1;
- err = vkCreateImage(m_device->device(), &image_create_info, NULL, &image);
- ASSERT_VK_SUCCESS(err);
-
- vkGetImageMemoryRequirements(m_device->device(), image, &mem_reqs);
-
- mem_alloc.allocationSize = mem_reqs.size;
- pass = m_device->phy().set_memory_type(mem_reqs.memoryTypeBits, &mem_alloc, 0);
- ASSERT_TRUE(pass);
-
- // allocate 2 memory objects
- err = vkAllocateMemory(m_device->device(), &mem_alloc, NULL, &mem1);
- ASSERT_VK_SUCCESS(err);
- err = vkAllocateMemory(m_device->device(), &mem_alloc, NULL, &mem2);
- ASSERT_VK_SUCCESS(err);
-
- // Bind first memory object to Image object
- err = vkBindImageMemory(m_device->device(), image, mem1, 0);
- ASSERT_VK_SUCCESS(err);
-
- // Introduce validation failure, try to bind a different memory object to
- // the same image object
- err = vkBindImageMemory(m_device->device(), image, mem2, 0);
-
- m_errorMonitor->VerifyFound();
-
- vkDestroyImage(m_device->device(), image, NULL);
- vkFreeMemory(m_device->device(), mem1, NULL);
- vkFreeMemory(m_device->device(), mem2, NULL);
-}
-
-TEST_F(VkLayerTest, QueryMemoryCommitmentWithoutLazyProperty) {
- TEST_DESCRIPTION("Attempt to query memory commitment on memory without lazy allocation");
- ASSERT_NO_FATAL_FAILURE(Init());
-
- auto image_ci = vk_testing::Image::create_info();
- image_ci.imageType = VK_IMAGE_TYPE_2D;
- image_ci.format = VK_FORMAT_B8G8R8A8_UNORM;
- image_ci.extent.width = 32;
- image_ci.extent.height = 32;
- image_ci.tiling = VK_IMAGE_TILING_OPTIMAL;
- image_ci.usage = VK_IMAGE_USAGE_TRANSIENT_ATTACHMENT_BIT | VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT;
- VkImageObj image(m_device);
- image.init_no_mem(*m_device, image_ci);
-
- auto mem_reqs = image.memory_requirements();
- // memory_type_index is set to 0 here, but is set properly below
- auto image_alloc_info = vk_testing::DeviceMemory::alloc_info(mem_reqs.size, 0);
-
- bool pass;
- // the last argument is the "forbid" argument for set_memory_type, disallowing
- // that particular memory type rather than requiring it
- pass = m_device->phy().set_memory_type(mem_reqs.memoryTypeBits, &image_alloc_info, 0, VK_MEMORY_PROPERTY_LAZILY_ALLOCATED_BIT);
- if (!pass) {
- printf("%s Failed to set memory type.\n", kSkipPrefix);
- return;
- }
- vk_testing::DeviceMemory mem;
- mem.init(*m_device, image_alloc_info);
-
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkGetDeviceMemoryCommitment-memory-00690");
- VkDeviceSize size;
- vkGetDeviceMemoryCommitment(m_device->device(), mem.handle(), &size);
- m_errorMonitor->VerifyFound();
-}
-
-TEST_F(VkLayerTest, InvalidUsageBits) {
- TEST_DESCRIPTION(
- "Specify wrong usage for image then create conflicting view of image Initialize buffer with wrong usage then perform copy "
- "expecting errors from both the image and the buffer (2 calls)");
-
- ASSERT_NO_FATAL_FAILURE(Init());
- auto format = FindSupportedDepthStencilFormat(gpu());
- if (!format) {
- printf("%s No Depth + Stencil format found. Skipped.\n", kSkipPrefix);
- return;
- }
-
- VkImageObj image(m_device);
- // Initialize image with transfer source usage
- image.Init(128, 128, 1, format, VK_IMAGE_USAGE_TRANSFER_SRC_BIT, VK_IMAGE_TILING_OPTIMAL, 0);
- ASSERT_TRUE(image.initialized());
-
- VkImageView dsv;
- VkImageViewCreateInfo dsvci = {};
- dsvci.sType = VK_STRUCTURE_TYPE_IMAGE_VIEW_CREATE_INFO;
- dsvci.image = image.handle();
- dsvci.viewType = VK_IMAGE_VIEW_TYPE_2D;
- dsvci.format = format;
- dsvci.subresourceRange.layerCount = 1;
- dsvci.subresourceRange.baseMipLevel = 0;
- dsvci.subresourceRange.levelCount = 1;
- dsvci.subresourceRange.aspectMask = VK_IMAGE_ASPECT_DEPTH_BIT | VK_IMAGE_ASPECT_STENCIL_BIT;
-
- // Create a view with depth / stencil aspect for image with different usage
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "UNASSIGNED-CoreValidation-MemTrack-InvalidUsageFlag");
- vkCreateImageView(m_device->device(), &dsvci, NULL, &dsv);
- m_errorMonitor->VerifyFound();
-
- // Initialize buffer with TRANSFER_DST usage
- VkBufferObj buffer;
- VkMemoryPropertyFlags reqs = 0;
- buffer.init_as_dst(*m_device, 128 * 128, reqs);
- VkBufferImageCopy region = {};
- region.bufferRowLength = 128;
- region.bufferImageHeight = 128;
- region.imageSubresource.aspectMask = VK_IMAGE_ASPECT_DEPTH_BIT;
- region.imageSubresource.layerCount = 1;
- region.imageExtent.height = 16;
- region.imageExtent.width = 16;
- region.imageExtent.depth = 1;
-
- // Buffer usage not set to TRANSFER_SRC and image usage not set to TRANSFER_DST
- m_commandBuffer->begin();
-
- // two separate errors from this call:
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdCopyBufferToImage-dstImage-00177");
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdCopyBufferToImage-srcBuffer-00174");
-
- vkCmdCopyBufferToImage(m_commandBuffer->handle(), buffer.handle(), image.handle(), VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, 1,
- &region);
- m_errorMonitor->VerifyFound();
-}
-
-TEST_F(VkLayerTest, CopyBufferToCompressedImage) {
- TEST_DESCRIPTION("Copy buffer to compressed image when buffer is larger than image.");
- ASSERT_NO_FATAL_FAILURE(Init());
-
- // Verify format support
- if (!ImageFormatAndFeaturesSupported(gpu(), VK_FORMAT_BC1_RGBA_SRGB_BLOCK, VK_IMAGE_TILING_OPTIMAL,
- VK_FORMAT_FEATURE_TRANSFER_DST_BIT_KHR)) {
- printf("%s Required formats/features not supported - CopyBufferToCompressedImage skipped.\n", kSkipPrefix);
- return;
- }
-
- VkImageObj width_image(m_device);
- VkImageObj height_image(m_device);
- VkBufferObj buffer;
- VkMemoryPropertyFlags reqs = 0;
- buffer.init_as_src(*m_device, 8 * 4 * 2, reqs);
- VkBufferImageCopy region = {};
- region.bufferRowLength = 0;
- region.bufferImageHeight = 0;
- region.imageSubresource.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
- region.imageSubresource.layerCount = 1;
- region.imageExtent.width = 8;
- region.imageExtent.height = 4;
- region.imageExtent.depth = 1;
-
- width_image.Init(5, 4, 1, VK_FORMAT_BC1_RGBA_SRGB_BLOCK, VK_IMAGE_USAGE_TRANSFER_DST_BIT, VK_IMAGE_TILING_OPTIMAL);
- height_image.Init(8, 3, 1, VK_FORMAT_BC1_RGBA_SRGB_BLOCK, VK_IMAGE_USAGE_TRANSFER_DST_BIT, VK_IMAGE_TILING_OPTIMAL);
- if (!width_image.initialized() || (!height_image.initialized())) {
- printf("%s Unable to initialize surfaces - UncompressedToCompressedImageCopy skipped.\n", kSkipPrefix);
- return;
- }
- m_commandBuffer->begin();
-
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkBufferImageCopy-imageOffset-00197");
- vkCmdCopyBufferToImage(m_commandBuffer->handle(), buffer.handle(), width_image.handle(), VK_IMAGE_LAYOUT_GENERAL, 1, &region);
- m_errorMonitor->VerifyFound();
-
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkBufferImageCopy-imageOffset-00200");
- m_errorMonitor->SetUnexpectedError("VUID-vkCmdCopyBufferToImage-pRegions-00172");
-
- VkResult err;
- VkImageCreateInfo depth_image_create_info = {};
- depth_image_create_info.sType = VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO;
- depth_image_create_info.pNext = NULL;
- depth_image_create_info.imageType = VK_IMAGE_TYPE_3D;
- depth_image_create_info.format = VK_FORMAT_BC1_RGBA_SRGB_BLOCK;
- depth_image_create_info.extent.width = 8;
- depth_image_create_info.extent.height = 4;
- depth_image_create_info.extent.depth = 1;
- depth_image_create_info.mipLevels = 1;
- depth_image_create_info.arrayLayers = 1;
- depth_image_create_info.samples = VK_SAMPLE_COUNT_1_BIT;
- depth_image_create_info.tiling = VK_IMAGE_TILING_OPTIMAL;
- depth_image_create_info.initialLayout = VK_IMAGE_LAYOUT_PREINITIALIZED;
- depth_image_create_info.usage = VK_IMAGE_USAGE_TRANSFER_DST_BIT;
- depth_image_create_info.queueFamilyIndexCount = 0;
- depth_image_create_info.pQueueFamilyIndices = NULL;
-
- VkImage depth_image = VK_NULL_HANDLE;
- err = vkCreateImage(m_device->handle(), &depth_image_create_info, NULL, &depth_image);
- ASSERT_VK_SUCCESS(err);
-
- VkDeviceMemory mem1;
- VkMemoryRequirements mem_reqs;
- mem_reqs.memoryTypeBits = VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT;
- VkMemoryAllocateInfo mem_alloc = {};
- mem_alloc.sType = VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO;
- mem_alloc.pNext = NULL;
- mem_alloc.allocationSize = 0;
- mem_alloc.memoryTypeIndex = 0;
- mem_alloc.memoryTypeIndex = 1;
- vkGetImageMemoryRequirements(m_device->device(), depth_image, &mem_reqs);
- mem_alloc.allocationSize = mem_reqs.size;
- bool pass = m_device->phy().set_memory_type(mem_reqs.memoryTypeBits, &mem_alloc, 0);
- ASSERT_TRUE(pass);
- err = vkAllocateMemory(m_device->device(), &mem_alloc, NULL, &mem1);
- ASSERT_VK_SUCCESS(err);
- err = vkBindImageMemory(m_device->device(), depth_image, mem1, 0);
-
- region.imageExtent.depth = 2;
- vkCmdCopyBufferToImage(m_commandBuffer->handle(), buffer.handle(), depth_image, VK_IMAGE_LAYOUT_GENERAL, 1, &region);
- m_errorMonitor->VerifyFound();
-
- vkDestroyImage(m_device->device(), depth_image, NULL);
- vkFreeMemory(m_device->device(), mem1, NULL);
- m_commandBuffer->end();
-}
-
-TEST_F(VkLayerTest, CreateUnknownObject) {
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkGetImageMemoryRequirements-image-parameter");
-
- TEST_DESCRIPTION("Pass an invalid image object handle into a Vulkan API call.");
-
- ASSERT_NO_FATAL_FAILURE(Init());
-
- // Pass bogus handle into GetImageMemoryRequirements
- VkMemoryRequirements mem_reqs;
- uint64_t fakeImageHandle = 0xCADECADE;
- VkImage fauxImage = reinterpret_cast<VkImage &>(fakeImageHandle);
-
- vkGetImageMemoryRequirements(m_device->device(), fauxImage, &mem_reqs);
-
- m_errorMonitor->VerifyFound();
-}
-
-TEST_F(VkLayerTest, BindImageInvalidMemoryType) {
- VkResult err;
-
- TEST_DESCRIPTION("Test validation check for an invalid memory type index during bind[Buffer|Image]Memory time");
-
- ASSERT_NO_FATAL_FAILURE(Init());
-
- // Create an image, allocate memory, set a bad typeIndex and then try to
- // bind it
- VkImage image;
- VkDeviceMemory mem;
- VkMemoryRequirements mem_reqs;
- const VkFormat tex_format = VK_FORMAT_B8G8R8A8_UNORM;
- const int32_t tex_width = 32;
- const int32_t tex_height = 32;
-
- VkImageCreateInfo image_create_info = {};
- image_create_info.sType = VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO;
- image_create_info.pNext = NULL;
- image_create_info.imageType = VK_IMAGE_TYPE_2D;
- image_create_info.format = tex_format;
- image_create_info.extent.width = tex_width;
- image_create_info.extent.height = tex_height;
- image_create_info.extent.depth = 1;
- image_create_info.mipLevels = 1;
- image_create_info.arrayLayers = 1;
- image_create_info.samples = VK_SAMPLE_COUNT_1_BIT;
- image_create_info.tiling = VK_IMAGE_TILING_OPTIMAL;
- image_create_info.usage = VK_IMAGE_USAGE_SAMPLED_BIT;
- image_create_info.flags = 0;
-
- VkMemoryAllocateInfo mem_alloc = {};
- mem_alloc.sType = VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO;
- mem_alloc.pNext = NULL;
- mem_alloc.allocationSize = 0;
- mem_alloc.memoryTypeIndex = 0;
-
- err = vkCreateImage(m_device->device(), &image_create_info, NULL, &image);
- ASSERT_VK_SUCCESS(err);
-
- vkGetImageMemoryRequirements(m_device->device(), image, &mem_reqs);
- mem_alloc.allocationSize = mem_reqs.size;
-
- // Introduce Failure, select invalid TypeIndex
- VkPhysicalDeviceMemoryProperties memory_info;
-
- vkGetPhysicalDeviceMemoryProperties(gpu(), &memory_info);
- unsigned int i;
- for (i = 0; i < memory_info.memoryTypeCount; i++) {
- if ((mem_reqs.memoryTypeBits & (1 << i)) == 0) {
- mem_alloc.memoryTypeIndex = i;
- break;
- }
- }
- if (i >= memory_info.memoryTypeCount) {
- printf("%s No invalid memory type index could be found; skipped.\n", kSkipPrefix);
- vkDestroyImage(m_device->device(), image, NULL);
- return;
- }
-
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "for this object type are not compatible with the memory");
-
- err = vkAllocateMemory(m_device->device(), &mem_alloc, NULL, &mem);
- ASSERT_VK_SUCCESS(err);
-
- err = vkBindImageMemory(m_device->device(), image, mem, 0);
- (void)err;
-
- m_errorMonitor->VerifyFound();
-
- vkDestroyImage(m_device->device(), image, NULL);
- vkFreeMemory(m_device->device(), mem, NULL);
-}
-
-TEST_F(VkLayerTest, BindInvalidMemory) {
- VkResult err;
- bool pass;
-
- ASSERT_NO_FATAL_FAILURE(Init());
-
- const VkFormat tex_format = VK_FORMAT_R8G8B8A8_UNORM;
- const int32_t tex_width = 256;
- const int32_t tex_height = 256;
-
- VkImageCreateInfo image_create_info = {};
- image_create_info.sType = VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO;
- image_create_info.pNext = NULL;
- image_create_info.imageType = VK_IMAGE_TYPE_2D;
- image_create_info.format = tex_format;
- image_create_info.extent.width = tex_width;
- image_create_info.extent.height = tex_height;
- image_create_info.extent.depth = 1;
- image_create_info.mipLevels = 1;
- image_create_info.arrayLayers = 1;
- image_create_info.samples = VK_SAMPLE_COUNT_1_BIT;
- image_create_info.tiling = VK_IMAGE_TILING_OPTIMAL;
- image_create_info.usage = VK_IMAGE_USAGE_SAMPLED_BIT;
- image_create_info.flags = 0;
-
- VkBufferCreateInfo buffer_create_info = {};
- buffer_create_info.sType = VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO;
- buffer_create_info.pNext = NULL;
- buffer_create_info.flags = 0;
- buffer_create_info.size = 4 * 1024 * 1024;
- buffer_create_info.usage = VK_BUFFER_USAGE_VERTEX_BUFFER_BIT;
- buffer_create_info.sharingMode = VK_SHARING_MODE_EXCLUSIVE;
-
- // Create an image/buffer, allocate memory, free it, and then try to bind it
- {
- VkImage image = VK_NULL_HANDLE;
- VkBuffer buffer = VK_NULL_HANDLE;
- err = vkCreateImage(device(), &image_create_info, NULL, &image);
- ASSERT_VK_SUCCESS(err);
- err = vkCreateBuffer(device(), &buffer_create_info, NULL, &buffer);
- ASSERT_VK_SUCCESS(err);
- VkMemoryRequirements image_mem_reqs = {}, buffer_mem_reqs = {};
- vkGetImageMemoryRequirements(device(), image, &image_mem_reqs);
- vkGetBufferMemoryRequirements(device(), buffer, &buffer_mem_reqs);
-
- VkMemoryAllocateInfo image_mem_alloc = {}, buffer_mem_alloc = {};
- image_mem_alloc.sType = VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO;
- image_mem_alloc.allocationSize = image_mem_reqs.size;
- pass = m_device->phy().set_memory_type(image_mem_reqs.memoryTypeBits, &image_mem_alloc, 0);
- ASSERT_TRUE(pass);
- buffer_mem_alloc.sType = VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO;
- buffer_mem_alloc.allocationSize = buffer_mem_reqs.size;
- pass = m_device->phy().set_memory_type(buffer_mem_reqs.memoryTypeBits, &buffer_mem_alloc, 0);
- ASSERT_TRUE(pass);
-
- VkDeviceMemory image_mem = VK_NULL_HANDLE, buffer_mem = VK_NULL_HANDLE;
- err = vkAllocateMemory(device(), &image_mem_alloc, NULL, &image_mem);
- ASSERT_VK_SUCCESS(err);
- err = vkAllocateMemory(device(), &buffer_mem_alloc, NULL, &buffer_mem);
- ASSERT_VK_SUCCESS(err);
-
- vkFreeMemory(device(), image_mem, NULL);
- vkFreeMemory(device(), buffer_mem, NULL);
-
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkBindImageMemory-memory-parameter");
- err = vkBindImageMemory(device(), image, image_mem, 0);
- (void)err; // This may very well return an error.
- m_errorMonitor->VerifyFound();
-
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkBindBufferMemory-memory-parameter");
- err = vkBindBufferMemory(device(), buffer, buffer_mem, 0);
- (void)err; // This may very well return an error.
- m_errorMonitor->VerifyFound();
-
- vkDestroyImage(m_device->device(), image, NULL);
- vkDestroyBuffer(m_device->device(), buffer, NULL);
- }
-
- // Try to bind memory to an object that already has a memory binding
- {
- VkImage image = VK_NULL_HANDLE;
- err = vkCreateImage(device(), &image_create_info, NULL, &image);
- ASSERT_VK_SUCCESS(err);
- VkBuffer buffer = VK_NULL_HANDLE;
- err = vkCreateBuffer(device(), &buffer_create_info, NULL, &buffer);
- ASSERT_VK_SUCCESS(err);
- VkMemoryRequirements image_mem_reqs = {}, buffer_mem_reqs = {};
- vkGetImageMemoryRequirements(device(), image, &image_mem_reqs);
- vkGetBufferMemoryRequirements(device(), buffer, &buffer_mem_reqs);
- VkMemoryAllocateInfo image_alloc_info = {}, buffer_alloc_info = {};
- image_alloc_info.sType = VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO;
- image_alloc_info.allocationSize = image_mem_reqs.size;
- buffer_alloc_info.sType = VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO;
- buffer_alloc_info.allocationSize = buffer_mem_reqs.size;
- pass = m_device->phy().set_memory_type(image_mem_reqs.memoryTypeBits, &image_alloc_info, 0);
- ASSERT_TRUE(pass);
- pass = m_device->phy().set_memory_type(buffer_mem_reqs.memoryTypeBits, &buffer_alloc_info, 0);
- ASSERT_TRUE(pass);
- VkDeviceMemory image_mem, buffer_mem;
- err = vkAllocateMemory(device(), &image_alloc_info, NULL, &image_mem);
- ASSERT_VK_SUCCESS(err);
- err = vkAllocateMemory(device(), &buffer_alloc_info, NULL, &buffer_mem);
- ASSERT_VK_SUCCESS(err);
-
- err = vkBindImageMemory(device(), image, image_mem, 0);
- ASSERT_VK_SUCCESS(err);
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkBindImageMemory-image-01044");
- err = vkBindImageMemory(device(), image, image_mem, 0);
- (void)err; // This may very well return an error.
- m_errorMonitor->VerifyFound();
-
- err = vkBindBufferMemory(device(), buffer, buffer_mem, 0);
- ASSERT_VK_SUCCESS(err);
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkBindBufferMemory-buffer-01029");
- err = vkBindBufferMemory(device(), buffer, buffer_mem, 0);
- (void)err; // This may very well return an error.
- m_errorMonitor->VerifyFound();
-
- vkFreeMemory(device(), image_mem, NULL);
- vkFreeMemory(device(), buffer_mem, NULL);
- vkDestroyImage(device(), image, NULL);
- vkDestroyBuffer(device(), buffer, NULL);
- }
-
- // Try to bind memory to an object with an invalid memoryOffset
- {
- VkImage image = VK_NULL_HANDLE;
- err = vkCreateImage(device(), &image_create_info, NULL, &image);
- ASSERT_VK_SUCCESS(err);
- VkBuffer buffer = VK_NULL_HANDLE;
- err = vkCreateBuffer(device(), &buffer_create_info, NULL, &buffer);
- ASSERT_VK_SUCCESS(err);
- VkMemoryRequirements image_mem_reqs = {}, buffer_mem_reqs = {};
- vkGetImageMemoryRequirements(device(), image, &image_mem_reqs);
- vkGetBufferMemoryRequirements(device(), buffer, &buffer_mem_reqs);
- VkMemoryAllocateInfo image_alloc_info = {}, buffer_alloc_info = {};
- image_alloc_info.sType = VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO;
- // Leave some extra space for alignment wiggle room
- image_alloc_info.allocationSize = image_mem_reqs.size + image_mem_reqs.alignment;
- buffer_alloc_info.sType = VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO;
- buffer_alloc_info.allocationSize = buffer_mem_reqs.size + buffer_mem_reqs.alignment;
- pass = m_device->phy().set_memory_type(image_mem_reqs.memoryTypeBits, &image_alloc_info, 0);
- ASSERT_TRUE(pass);
- pass = m_device->phy().set_memory_type(buffer_mem_reqs.memoryTypeBits, &buffer_alloc_info, 0);
- ASSERT_TRUE(pass);
- VkDeviceMemory image_mem, buffer_mem;
- err = vkAllocateMemory(device(), &image_alloc_info, NULL, &image_mem);
- ASSERT_VK_SUCCESS(err);
- err = vkAllocateMemory(device(), &buffer_alloc_info, NULL, &buffer_mem);
- ASSERT_VK_SUCCESS(err);
-
- // Test unaligned memory offset
- {
- if (image_mem_reqs.alignment > 1) {
- VkDeviceSize image_offset = 1;
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkBindImageMemory-memoryOffset-01048");
- err = vkBindImageMemory(device(), image, image_mem, image_offset);
- (void)err; // This may very well return an error.
- m_errorMonitor->VerifyFound();
- }
-
- if (buffer_mem_reqs.alignment > 1) {
- VkDeviceSize buffer_offset = 1;
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkBindBufferMemory-memoryOffset-01036");
- err = vkBindBufferMemory(device(), buffer, buffer_mem, buffer_offset);
- (void)err; // This may very well return an error.
- m_errorMonitor->VerifyFound();
- }
- }
-
- // Test memory offsets outside the memory allocation
- {
- VkDeviceSize image_offset =
- (image_alloc_info.allocationSize + image_mem_reqs.alignment) & ~(image_mem_reqs.alignment - 1);
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkBindImageMemory-memoryOffset-01046");
- err = vkBindImageMemory(device(), image, image_mem, image_offset);
- (void)err; // This may very well return an error.
- m_errorMonitor->VerifyFound();
-
- VkDeviceSize buffer_offset =
- (buffer_alloc_info.allocationSize + buffer_mem_reqs.alignment) & ~(buffer_mem_reqs.alignment - 1);
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkBindBufferMemory-memoryOffset-01031");
- err = vkBindBufferMemory(device(), buffer, buffer_mem, buffer_offset);
- (void)err; // This may very well return an error.
- m_errorMonitor->VerifyFound();
- }
-
- // Test memory offsets within the memory allocation, but which leave too little memory for
- // the resource.
- {
- VkDeviceSize image_offset = (image_mem_reqs.size - 1) & ~(image_mem_reqs.alignment - 1);
- if ((image_offset > 0) && (image_mem_reqs.size < (image_alloc_info.allocationSize - image_mem_reqs.alignment))) {
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkBindImageMemory-size-01049");
- err = vkBindImageMemory(device(), image, image_mem, image_offset);
- (void)err; // This may very well return an error.
- m_errorMonitor->VerifyFound();
- }
-
- VkDeviceSize buffer_offset = (buffer_mem_reqs.size - 1) & ~(buffer_mem_reqs.alignment - 1);
- if (buffer_offset > 0) {
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkBindBufferMemory-size-01037");
- err = vkBindBufferMemory(device(), buffer, buffer_mem, buffer_offset);
- (void)err; // This may very well return an error.
- m_errorMonitor->VerifyFound();
- }
- }
-
- vkFreeMemory(device(), image_mem, NULL);
- vkFreeMemory(device(), buffer_mem, NULL);
- vkDestroyImage(device(), image, NULL);
- vkDestroyBuffer(device(), buffer, NULL);
- }
-
- // Try to bind memory to an object with an invalid memory type
- {
- VkImage image = VK_NULL_HANDLE;
- err = vkCreateImage(device(), &image_create_info, NULL, &image);
- ASSERT_VK_SUCCESS(err);
- VkBuffer buffer = VK_NULL_HANDLE;
- err = vkCreateBuffer(device(), &buffer_create_info, NULL, &buffer);
- ASSERT_VK_SUCCESS(err);
- VkMemoryRequirements image_mem_reqs = {}, buffer_mem_reqs = {};
- vkGetImageMemoryRequirements(device(), image, &image_mem_reqs);
- vkGetBufferMemoryRequirements(device(), buffer, &buffer_mem_reqs);
- VkMemoryAllocateInfo image_alloc_info = {}, buffer_alloc_info = {};
- image_alloc_info.sType = VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO;
- image_alloc_info.allocationSize = image_mem_reqs.size;
- buffer_alloc_info.sType = VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO;
- buffer_alloc_info.allocationSize = buffer_mem_reqs.size;
- // Create a mask of available memory types *not* supported by these resources,
- // and try to use one of them.
- VkPhysicalDeviceMemoryProperties memory_properties = {};
- vkGetPhysicalDeviceMemoryProperties(m_device->phy().handle(), &memory_properties);
- VkDeviceMemory image_mem, buffer_mem;
-
- uint32_t image_unsupported_mem_type_bits = ((1 << memory_properties.memoryTypeCount) - 1) & ~image_mem_reqs.memoryTypeBits;
- if (image_unsupported_mem_type_bits != 0) {
- pass = m_device->phy().set_memory_type(image_unsupported_mem_type_bits, &image_alloc_info, 0);
- ASSERT_TRUE(pass);
- err = vkAllocateMemory(device(), &image_alloc_info, NULL, &image_mem);
- ASSERT_VK_SUCCESS(err);
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkBindImageMemory-memory-01047");
- err = vkBindImageMemory(device(), image, image_mem, 0);
- (void)err; // This may very well return an error.
- m_errorMonitor->VerifyFound();
- vkFreeMemory(device(), image_mem, NULL);
- }
-
- uint32_t buffer_unsupported_mem_type_bits =
- ((1 << memory_properties.memoryTypeCount) - 1) & ~buffer_mem_reqs.memoryTypeBits;
- if (buffer_unsupported_mem_type_bits != 0) {
- pass = m_device->phy().set_memory_type(buffer_unsupported_mem_type_bits, &buffer_alloc_info, 0);
- ASSERT_TRUE(pass);
- err = vkAllocateMemory(device(), &buffer_alloc_info, NULL, &buffer_mem);
- ASSERT_VK_SUCCESS(err);
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkBindBufferMemory-memory-01035");
- err = vkBindBufferMemory(device(), buffer, buffer_mem, 0);
- (void)err; // This may very well return an error.
- m_errorMonitor->VerifyFound();
- vkFreeMemory(device(), buffer_mem, NULL);
- }
-
- vkDestroyImage(device(), image, NULL);
- vkDestroyBuffer(device(), buffer, NULL);
- }
-
- // Try to bind memory to an image created with sparse memory flags
- {
- VkImageCreateInfo sparse_image_create_info = image_create_info;
- sparse_image_create_info.flags |= VK_IMAGE_CREATE_SPARSE_BINDING_BIT;
- VkImageFormatProperties image_format_properties = {};
- err = vkGetPhysicalDeviceImageFormatProperties(m_device->phy().handle(), sparse_image_create_info.format,
- sparse_image_create_info.imageType, sparse_image_create_info.tiling,
- sparse_image_create_info.usage, sparse_image_create_info.flags,
- &image_format_properties);
- if (!m_device->phy().features().sparseResidencyImage2D || err == VK_ERROR_FORMAT_NOT_SUPPORTED) {
- // most likely means sparse formats aren't supported here; skip this test.
- } else {
- ASSERT_VK_SUCCESS(err);
- if (image_format_properties.maxExtent.width == 0) {
- printf("%s Sparse image format not supported; skipped.\n", kSkipPrefix);
- return;
- } else {
- VkImage sparse_image = VK_NULL_HANDLE;
- err = vkCreateImage(m_device->device(), &sparse_image_create_info, NULL, &sparse_image);
- ASSERT_VK_SUCCESS(err);
- VkMemoryRequirements sparse_mem_reqs = {};
- vkGetImageMemoryRequirements(m_device->device(), sparse_image, &sparse_mem_reqs);
- if (sparse_mem_reqs.memoryTypeBits != 0) {
- VkMemoryAllocateInfo sparse_mem_alloc = {};
- sparse_mem_alloc.sType = VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO;
- sparse_mem_alloc.pNext = NULL;
- sparse_mem_alloc.allocationSize = sparse_mem_reqs.size;
- sparse_mem_alloc.memoryTypeIndex = 0;
- pass = m_device->phy().set_memory_type(sparse_mem_reqs.memoryTypeBits, &sparse_mem_alloc, 0);
- ASSERT_TRUE(pass);
- VkDeviceMemory sparse_mem = VK_NULL_HANDLE;
- err = vkAllocateMemory(m_device->device(), &sparse_mem_alloc, NULL, &sparse_mem);
- ASSERT_VK_SUCCESS(err);
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkBindImageMemory-image-01045");
- err = vkBindImageMemory(m_device->device(), sparse_image, sparse_mem, 0);
- // This may very well return an error.
- (void)err;
- m_errorMonitor->VerifyFound();
- vkFreeMemory(m_device->device(), sparse_mem, NULL);
- }
- vkDestroyImage(m_device->device(), sparse_image, NULL);
- }
- }
- }
-
- // Try to bind memory to a buffer created with sparse memory flags
- {
- VkBufferCreateInfo sparse_buffer_create_info = buffer_create_info;
- sparse_buffer_create_info.flags |= VK_IMAGE_CREATE_SPARSE_BINDING_BIT;
- if (!m_device->phy().features().sparseResidencyBuffer) {
- // most likely means sparse formats aren't supported here; skip this test.
- } else {
- VkBuffer sparse_buffer = VK_NULL_HANDLE;
- err = vkCreateBuffer(m_device->device(), &sparse_buffer_create_info, NULL, &sparse_buffer);
- ASSERT_VK_SUCCESS(err);
- VkMemoryRequirements sparse_mem_reqs = {};
- vkGetBufferMemoryRequirements(m_device->device(), sparse_buffer, &sparse_mem_reqs);
- if (sparse_mem_reqs.memoryTypeBits != 0) {
- VkMemoryAllocateInfo sparse_mem_alloc = {};
- sparse_mem_alloc.sType = VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO;
- sparse_mem_alloc.pNext = NULL;
- sparse_mem_alloc.allocationSize = sparse_mem_reqs.size;
- sparse_mem_alloc.memoryTypeIndex = 0;
- pass = m_device->phy().set_memory_type(sparse_mem_reqs.memoryTypeBits, &sparse_mem_alloc, 0);
- ASSERT_TRUE(pass);
- VkDeviceMemory sparse_mem = VK_NULL_HANDLE;
- err = vkAllocateMemory(m_device->device(), &sparse_mem_alloc, NULL, &sparse_mem);
- ASSERT_VK_SUCCESS(err);
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkBindBufferMemory-buffer-01030");
- err = vkBindBufferMemory(m_device->device(), sparse_buffer, sparse_mem, 0);
- // This may very well return an error.
- (void)err;
- m_errorMonitor->VerifyFound();
- vkFreeMemory(m_device->device(), sparse_mem, NULL);
- }
- vkDestroyBuffer(m_device->device(), sparse_buffer, NULL);
- }
- }
-}
-
-TEST_F(VkLayerTest, BindMemoryToDestroyedObject) {
- VkResult err;
- bool pass;
-
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkBindImageMemory-image-parameter");
-
- ASSERT_NO_FATAL_FAILURE(Init());
-
- // Create an image object, allocate memory, destroy the object and then try
- // to bind it
- VkImage image;
- VkDeviceMemory mem;
- VkMemoryRequirements mem_reqs;
-
- const VkFormat tex_format = VK_FORMAT_B8G8R8A8_UNORM;
- const int32_t tex_width = 32;
- const int32_t tex_height = 32;
-
- VkImageCreateInfo image_create_info = {};
- image_create_info.sType = VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO;
- image_create_info.pNext = NULL;
- image_create_info.imageType = VK_IMAGE_TYPE_2D;
- image_create_info.format = tex_format;
- image_create_info.extent.width = tex_width;
- image_create_info.extent.height = tex_height;
- image_create_info.extent.depth = 1;
- image_create_info.mipLevels = 1;
- image_create_info.arrayLayers = 1;
- image_create_info.samples = VK_SAMPLE_COUNT_1_BIT;
- image_create_info.tiling = VK_IMAGE_TILING_OPTIMAL;
- image_create_info.usage = VK_IMAGE_USAGE_SAMPLED_BIT;
- image_create_info.flags = 0;
-
- VkMemoryAllocateInfo mem_alloc = {};
- mem_alloc.sType = VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO;
- mem_alloc.pNext = NULL;
- mem_alloc.allocationSize = 0;
- mem_alloc.memoryTypeIndex = 0;
-
- err = vkCreateImage(m_device->device(), &image_create_info, NULL, &image);
- ASSERT_VK_SUCCESS(err);
-
- vkGetImageMemoryRequirements(m_device->device(), image, &mem_reqs);
-
- mem_alloc.allocationSize = mem_reqs.size;
- pass = m_device->phy().set_memory_type(mem_reqs.memoryTypeBits, &mem_alloc, 0);
- ASSERT_TRUE(pass);
-
- // Allocate memory
- err = vkAllocateMemory(m_device->device(), &mem_alloc, NULL, &mem);
- ASSERT_VK_SUCCESS(err);
-
- // Introduce validation failure, destroy Image object before binding
- vkDestroyImage(m_device->device(), image, NULL);
- ASSERT_VK_SUCCESS(err);
-
- // Now Try to bind memory to this destroyed object
- err = vkBindImageMemory(m_device->device(), image, mem, 0);
- // This may very well return an error.
- (void)err;
-
- m_errorMonitor->VerifyFound();
-
- vkFreeMemory(m_device->device(), mem, NULL);
-}
-
-TEST_F(VkLayerTest, ExceedMemoryAllocationCount) {
- VkResult err = VK_SUCCESS;
- const int max_mems = 32;
- VkDeviceMemory mems[max_mems + 1];
-
- if (!EnableDeviceProfileLayer()) {
- printf("%s Failed to enable device profile layer.\n", kSkipPrefix);
- return;
- }
-
- ASSERT_NO_FATAL_FAILURE(InitFramework(myDbgFunc, m_errorMonitor));
-
- PFN_vkSetPhysicalDeviceLimitsEXT fpvkSetPhysicalDeviceLimitsEXT =
- (PFN_vkSetPhysicalDeviceLimitsEXT)vkGetInstanceProcAddr(instance(), "vkSetPhysicalDeviceLimitsEXT");
- PFN_vkGetOriginalPhysicalDeviceLimitsEXT fpvkGetOriginalPhysicalDeviceLimitsEXT =
- (PFN_vkGetOriginalPhysicalDeviceLimitsEXT)vkGetInstanceProcAddr(instance(), "vkGetOriginalPhysicalDeviceLimitsEXT");
-
- if (!(fpvkSetPhysicalDeviceLimitsEXT) || !(fpvkGetOriginalPhysicalDeviceLimitsEXT)) {
- printf("%s Can't find device_profile_api functions; skipped.\n", kSkipPrefix);
- return;
- }
- VkPhysicalDeviceProperties props;
- fpvkGetOriginalPhysicalDeviceLimitsEXT(gpu(), &props.limits);
- if (props.limits.maxMemoryAllocationCount > max_mems) {
- props.limits.maxMemoryAllocationCount = max_mems;
- fpvkSetPhysicalDeviceLimitsEXT(gpu(), &props.limits);
- }
- ASSERT_NO_FATAL_FAILURE(InitState());
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
- "Number of currently valid memory objects is not less than the maximum allowed");
-
- VkMemoryAllocateInfo mem_alloc = {};
- mem_alloc.sType = VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO;
- mem_alloc.pNext = NULL;
- mem_alloc.memoryTypeIndex = 0;
- mem_alloc.allocationSize = 4;
-
- int i;
- for (i = 0; i <= max_mems; i++) {
- err = vkAllocateMemory(m_device->device(), &mem_alloc, NULL, &mems[i]);
- if (err != VK_SUCCESS) {
- break;
- }
- }
- m_errorMonitor->VerifyFound();
-
- for (int j = 0; j < i; j++) {
- vkFreeMemory(m_device->device(), mems[j], NULL);
- }
-}
-
-TEST_F(VkLayerTest, ImageSampleCounts) {
- TEST_DESCRIPTION("Use bad sample counts in image transfer calls to trigger validation errors.");
- ASSERT_NO_FATAL_FAILURE(Init(nullptr, nullptr, VK_COMMAND_POOL_CREATE_RESET_COMMAND_BUFFER_BIT));
-
- VkMemoryPropertyFlags reqs = 0;
- VkImageCreateInfo image_create_info = {};
- image_create_info.sType = VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO;
- image_create_info.pNext = NULL;
- image_create_info.imageType = VK_IMAGE_TYPE_2D;
- image_create_info.format = VK_FORMAT_B8G8R8A8_UNORM;
- image_create_info.extent.width = 256;
- image_create_info.extent.height = 256;
- image_create_info.extent.depth = 1;
- image_create_info.mipLevels = 1;
- image_create_info.arrayLayers = 1;
- image_create_info.tiling = VK_IMAGE_TILING_OPTIMAL;
- image_create_info.flags = 0;
-
- VkImageBlit blit_region = {};
- blit_region.srcSubresource.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
- blit_region.srcSubresource.baseArrayLayer = 0;
- blit_region.srcSubresource.layerCount = 1;
- blit_region.srcSubresource.mipLevel = 0;
- blit_region.dstSubresource.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
- blit_region.dstSubresource.baseArrayLayer = 0;
- blit_region.dstSubresource.layerCount = 1;
- blit_region.dstSubresource.mipLevel = 0;
- blit_region.srcOffsets[0] = {0, 0, 0};
- blit_region.srcOffsets[1] = {256, 256, 1};
- blit_region.dstOffsets[0] = {0, 0, 0};
- blit_region.dstOffsets[1] = {128, 128, 1};
-
- // Create two images, the source with sampleCount = 4, and attempt to blit
- // between them
- {
- image_create_info.samples = VK_SAMPLE_COUNT_4_BIT;
- image_create_info.usage = VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT | VK_IMAGE_USAGE_TRANSFER_SRC_BIT;
- VkImageObj src_image(m_device);
- src_image.init(&image_create_info);
- src_image.SetLayout(VK_IMAGE_ASPECT_COLOR_BIT, VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL);
- image_create_info.samples = VK_SAMPLE_COUNT_1_BIT;
- image_create_info.usage = VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT | VK_IMAGE_USAGE_TRANSFER_DST_BIT;
- VkImageObj dst_image(m_device);
- dst_image.init(&image_create_info);
- dst_image.SetLayout(VK_IMAGE_ASPECT_COLOR_BIT, VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL);
- m_commandBuffer->begin();
- // TODO: These 2 VUs are redundant - expect one of them to go away
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdBlitImage-srcImage-00233");
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdBlitImage-srcImage-00228");
- vkCmdBlitImage(m_commandBuffer->handle(), src_image.handle(), VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL, dst_image.handle(),
- VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, 1, &blit_region, VK_FILTER_NEAREST);
- m_errorMonitor->VerifyFound();
- m_commandBuffer->end();
- }
-
- // Create two images, the dest with sampleCount = 4, and attempt to blit
- // between them
- {
- image_create_info.samples = VK_SAMPLE_COUNT_1_BIT;
- image_create_info.usage = VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT | VK_IMAGE_USAGE_TRANSFER_SRC_BIT;
- VkImageObj src_image(m_device);
- src_image.init(&image_create_info);
- src_image.SetLayout(VK_IMAGE_ASPECT_COLOR_BIT, VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL);
- image_create_info.samples = VK_SAMPLE_COUNT_4_BIT;
- image_create_info.usage = VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT | VK_IMAGE_USAGE_TRANSFER_DST_BIT;
- VkImageObj dst_image(m_device);
- dst_image.init(&image_create_info);
- dst_image.SetLayout(VK_IMAGE_ASPECT_COLOR_BIT, VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL);
- m_commandBuffer->begin();
- // TODO: These 2 VUs are redundant - expect one of them to go away
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdBlitImage-dstImage-00234");
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdBlitImage-srcImage-00228");
- vkCmdBlitImage(m_commandBuffer->handle(), src_image.handle(), VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL, dst_image.handle(),
- VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, 1, &blit_region, VK_FILTER_NEAREST);
- m_errorMonitor->VerifyFound();
- m_commandBuffer->end();
- }
-
- VkBufferImageCopy copy_region = {};
- copy_region.bufferRowLength = 128;
- copy_region.bufferImageHeight = 128;
- copy_region.imageSubresource.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
- copy_region.imageSubresource.layerCount = 1;
- copy_region.imageExtent.height = 64;
- copy_region.imageExtent.width = 64;
- copy_region.imageExtent.depth = 1;
-
- // Create src buffer and dst image with sampleCount = 4 and attempt to copy
- // buffer to image
- {
- VkBufferObj src_buffer;
- src_buffer.init_as_src(*m_device, 128 * 128 * 4, reqs);
- image_create_info.samples = VK_SAMPLE_COUNT_4_BIT;
- image_create_info.usage = VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT | VK_IMAGE_USAGE_TRANSFER_DST_BIT;
- VkImageObj dst_image(m_device);
- dst_image.init(&image_create_info);
- dst_image.SetLayout(VK_IMAGE_ASPECT_COLOR_BIT, VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL);
- m_commandBuffer->begin();
- m_errorMonitor->SetDesiredFailureMsg(
- VK_DEBUG_REPORT_ERROR_BIT_EXT,
- "was created with a sample count of VK_SAMPLE_COUNT_4_BIT but must be VK_SAMPLE_COUNT_1_BIT");
- vkCmdCopyBufferToImage(m_commandBuffer->handle(), src_buffer.handle(), dst_image.handle(),
- VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, 1, &copy_region);
- m_errorMonitor->VerifyFound();
- m_commandBuffer->end();
- }
-
- // Create dst buffer and src image with sampleCount = 4 and attempt to copy
- // image to buffer
- {
- VkBufferObj dst_buffer;
- dst_buffer.init_as_dst(*m_device, 128 * 128 * 4, reqs);
- image_create_info.samples = VK_SAMPLE_COUNT_4_BIT;
- image_create_info.usage = VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT | VK_IMAGE_USAGE_TRANSFER_SRC_BIT;
- vk_testing::Image src_image;
- src_image.init(*m_device, (const VkImageCreateInfo &)image_create_info, reqs);
- m_commandBuffer->begin();
- m_errorMonitor->SetDesiredFailureMsg(
- VK_DEBUG_REPORT_ERROR_BIT_EXT,
- "was created with a sample count of VK_SAMPLE_COUNT_4_BIT but must be VK_SAMPLE_COUNT_1_BIT");
- vkCmdCopyImageToBuffer(m_commandBuffer->handle(), src_image.handle(), VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL,
- dst_buffer.handle(), 1, &copy_region);
- m_errorMonitor->VerifyFound();
- m_commandBuffer->end();
- }
-}
-
-TEST_F(VkLayerTest, BlitImageFormatTypes) {
- ASSERT_NO_FATAL_FAILURE(Init());
-
- VkFormat f_unsigned = VK_FORMAT_R8G8B8A8_UINT;
- VkFormat f_signed = VK_FORMAT_R8G8B8A8_SINT;
- VkFormat f_float = VK_FORMAT_R32_SFLOAT;
- VkFormat f_depth = VK_FORMAT_D32_SFLOAT_S8_UINT;
- VkFormat f_depth2 = VK_FORMAT_D32_SFLOAT;
-
- if (!ImageFormatIsSupported(gpu(), f_unsigned, VK_IMAGE_TILING_OPTIMAL) ||
- !ImageFormatIsSupported(gpu(), f_signed, VK_IMAGE_TILING_OPTIMAL) ||
- !ImageFormatIsSupported(gpu(), f_float, VK_IMAGE_TILING_OPTIMAL) ||
- !ImageFormatIsSupported(gpu(), f_depth, VK_IMAGE_TILING_OPTIMAL) ||
- !ImageFormatIsSupported(gpu(), f_depth2, VK_IMAGE_TILING_OPTIMAL)) {
- printf("%s Requested formats not supported - BlitImageFormatTypes skipped.\n", kSkipPrefix);
- return;
- }
-
- // Note any missing feature bits
- bool usrc = !ImageFormatAndFeaturesSupported(gpu(), f_unsigned, VK_IMAGE_TILING_OPTIMAL, VK_FORMAT_FEATURE_BLIT_SRC_BIT);
- bool udst = !ImageFormatAndFeaturesSupported(gpu(), f_unsigned, VK_IMAGE_TILING_OPTIMAL, VK_FORMAT_FEATURE_BLIT_DST_BIT);
- bool ssrc = !ImageFormatAndFeaturesSupported(gpu(), f_signed, VK_IMAGE_TILING_OPTIMAL, VK_FORMAT_FEATURE_BLIT_SRC_BIT);
- bool sdst = !ImageFormatAndFeaturesSupported(gpu(), f_signed, VK_IMAGE_TILING_OPTIMAL, VK_FORMAT_FEATURE_BLIT_DST_BIT);
- bool fsrc = !ImageFormatAndFeaturesSupported(gpu(), f_float, VK_IMAGE_TILING_OPTIMAL, VK_FORMAT_FEATURE_BLIT_SRC_BIT);
- bool fdst = !ImageFormatAndFeaturesSupported(gpu(), f_float, VK_IMAGE_TILING_OPTIMAL, VK_FORMAT_FEATURE_BLIT_DST_BIT);
- bool d1dst = !ImageFormatAndFeaturesSupported(gpu(), f_depth, VK_IMAGE_TILING_OPTIMAL, VK_FORMAT_FEATURE_BLIT_DST_BIT);
- bool d2src = !ImageFormatAndFeaturesSupported(gpu(), f_depth2, VK_IMAGE_TILING_OPTIMAL, VK_FORMAT_FEATURE_BLIT_SRC_BIT);
-
- VkImageObj unsigned_image(m_device);
- unsigned_image.Init(64, 64, 1, f_unsigned, VK_IMAGE_USAGE_TRANSFER_SRC_BIT | VK_IMAGE_USAGE_TRANSFER_DST_BIT,
- VK_IMAGE_TILING_OPTIMAL, 0);
- ASSERT_TRUE(unsigned_image.initialized());
- unsigned_image.SetLayout(VK_IMAGE_ASPECT_COLOR_BIT, VK_IMAGE_LAYOUT_GENERAL);
-
- VkImageObj signed_image(m_device);
- signed_image.Init(64, 64, 1, f_signed, VK_IMAGE_USAGE_TRANSFER_SRC_BIT | VK_IMAGE_USAGE_TRANSFER_DST_BIT,
- VK_IMAGE_TILING_OPTIMAL, 0);
- ASSERT_TRUE(signed_image.initialized());
- signed_image.SetLayout(VK_IMAGE_ASPECT_COLOR_BIT, VK_IMAGE_LAYOUT_GENERAL);
-
- VkImageObj float_image(m_device);
- float_image.Init(64, 64, 1, f_float, VK_IMAGE_USAGE_TRANSFER_SRC_BIT | VK_IMAGE_USAGE_TRANSFER_DST_BIT, VK_IMAGE_TILING_OPTIMAL,
- 0);
- ASSERT_TRUE(float_image.initialized());
- float_image.SetLayout(VK_IMAGE_ASPECT_COLOR_BIT, VK_IMAGE_LAYOUT_GENERAL);
-
- VkImageObj depth_image(m_device);
- depth_image.Init(64, 64, 1, f_depth, VK_IMAGE_USAGE_TRANSFER_SRC_BIT | VK_IMAGE_USAGE_TRANSFER_DST_BIT, VK_IMAGE_TILING_OPTIMAL,
- 0);
- ASSERT_TRUE(depth_image.initialized());
- depth_image.SetLayout(VK_IMAGE_ASPECT_STENCIL_BIT | VK_IMAGE_ASPECT_DEPTH_BIT, VK_IMAGE_LAYOUT_GENERAL);
-
- VkImageObj depth_image2(m_device);
- depth_image2.Init(64, 64, 1, f_depth2, VK_IMAGE_USAGE_TRANSFER_SRC_BIT | VK_IMAGE_USAGE_TRANSFER_DST_BIT,
- VK_IMAGE_TILING_OPTIMAL, 0);
- ASSERT_TRUE(depth_image2.initialized());
- depth_image2.SetLayout(VK_IMAGE_ASPECT_DEPTH_BIT, VK_IMAGE_LAYOUT_GENERAL);
-
- VkImageBlit blitRegion = {};
- blitRegion.srcSubresource.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
- blitRegion.srcSubresource.baseArrayLayer = 0;
- blitRegion.srcSubresource.layerCount = 1;
- blitRegion.srcSubresource.mipLevel = 0;
- blitRegion.dstSubresource.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
- blitRegion.dstSubresource.baseArrayLayer = 0;
- blitRegion.dstSubresource.layerCount = 1;
- blitRegion.dstSubresource.mipLevel = 0;
- blitRegion.srcOffsets[0] = {0, 0, 0};
- blitRegion.srcOffsets[1] = {64, 64, 1};
- blitRegion.dstOffsets[0] = {0, 0, 0};
- blitRegion.dstOffsets[1] = {32, 32, 1};
-
- m_commandBuffer->begin();
-
- // Unsigned int vs not an int
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdBlitImage-srcImage-00230");
- if (usrc) m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdBlitImage-srcImage-01999");
- if (fdst) m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdBlitImage-dstImage-02000");
- vkCmdBlitImage(m_commandBuffer->handle(), unsigned_image.image(), unsigned_image.Layout(), float_image.image(),
- float_image.Layout(), 1, &blitRegion, VK_FILTER_NEAREST);
- m_errorMonitor->VerifyFound();
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdBlitImage-srcImage-00230");
- if (fsrc) m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdBlitImage-srcImage-01999");
- if (udst) m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdBlitImage-dstImage-02000");
- vkCmdBlitImage(m_commandBuffer->handle(), float_image.image(), float_image.Layout(), unsigned_image.image(),
- unsigned_image.Layout(), 1, &blitRegion, VK_FILTER_NEAREST);
- m_errorMonitor->VerifyFound();
-
- // Signed int vs not an int,
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdBlitImage-srcImage-00229");
- if (ssrc) m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdBlitImage-srcImage-01999");
- if (fdst) m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdBlitImage-dstImage-02000");
- vkCmdBlitImage(m_commandBuffer->handle(), signed_image.image(), signed_image.Layout(), float_image.image(),
- float_image.Layout(), 1, &blitRegion, VK_FILTER_NEAREST);
- m_errorMonitor->VerifyFound();
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdBlitImage-srcImage-00229");
- if (fsrc) m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdBlitImage-srcImage-01999");
- if (sdst) m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdBlitImage-dstImage-02000");
- vkCmdBlitImage(m_commandBuffer->handle(), float_image.image(), float_image.Layout(), signed_image.image(),
- signed_image.Layout(), 1, &blitRegion, VK_FILTER_NEAREST);
- m_errorMonitor->VerifyFound();
-
- // Signed vs Unsigned int - generates both VUs
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdBlitImage-srcImage-00229");
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdBlitImage-srcImage-00230");
- if (ssrc) m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdBlitImage-srcImage-01999");
- if (udst) m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdBlitImage-dstImage-02000");
- vkCmdBlitImage(m_commandBuffer->handle(), signed_image.image(), signed_image.Layout(), unsigned_image.image(),
- unsigned_image.Layout(), 1, &blitRegion, VK_FILTER_NEAREST);
- m_errorMonitor->VerifyFound();
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdBlitImage-srcImage-00229");
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdBlitImage-srcImage-00230");
- if (usrc) m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdBlitImage-srcImage-01999");
- if (sdst) m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdBlitImage-dstImage-02000");
- vkCmdBlitImage(m_commandBuffer->handle(), unsigned_image.image(), unsigned_image.Layout(), signed_image.image(),
- signed_image.Layout(), 1, &blitRegion, VK_FILTER_NEAREST);
- m_errorMonitor->VerifyFound();
-
- // Depth vs any non-identical depth format
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdBlitImage-srcImage-00231");
- blitRegion.srcSubresource.aspectMask = VK_IMAGE_ASPECT_DEPTH_BIT;
- blitRegion.dstSubresource.aspectMask = VK_IMAGE_ASPECT_DEPTH_BIT;
- if (d2src) m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdBlitImage-srcImage-01999");
- if (d1dst) m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdBlitImage-dstImage-02000");
- vkCmdBlitImage(m_commandBuffer->handle(), depth_image2.image(), depth_image2.Layout(), depth_image.image(),
- depth_image.Layout(), 1, &blitRegion, VK_FILTER_NEAREST);
- m_errorMonitor->VerifyFound();
-
- m_commandBuffer->end();
-}
-
-TEST_F(VkLayerTest, BlitImageFilters) {
- bool cubic_support = false;
- ASSERT_NO_FATAL_FAILURE(InitFramework(myDbgFunc, m_errorMonitor));
- if (DeviceExtensionSupported(gpu(), nullptr, "VK_IMG_filter_cubic")) {
- m_device_extension_names.push_back("VK_IMG_filter_cubic");
- cubic_support = true;
- }
- ASSERT_NO_FATAL_FAILURE(InitState());
-
- VkFormat fmt = VK_FORMAT_R8_UINT;
- if (!ImageFormatIsSupported(gpu(), fmt, VK_IMAGE_TILING_OPTIMAL)) {
- printf("%s No R8_UINT format support - BlitImageFilters skipped.\n", kSkipPrefix);
- return;
- }
-
- // Create 2D images
- VkImageObj src2D(m_device);
- VkImageObj dst2D(m_device);
- src2D.Init(64, 64, 1, fmt, VK_IMAGE_USAGE_TRANSFER_SRC_BIT, VK_IMAGE_TILING_OPTIMAL, 0);
- dst2D.Init(64, 64, 1, fmt, VK_IMAGE_USAGE_TRANSFER_DST_BIT, VK_IMAGE_TILING_OPTIMAL, 0);
- ASSERT_TRUE(src2D.initialized());
- ASSERT_TRUE(dst2D.initialized());
- src2D.SetLayout(VK_IMAGE_ASPECT_COLOR_BIT, VK_IMAGE_LAYOUT_GENERAL);
- dst2D.SetLayout(VK_IMAGE_ASPECT_COLOR_BIT, VK_IMAGE_LAYOUT_GENERAL);
-
- // Create 3D image
- VkImageCreateInfo ci;
- ci.sType = VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO;
- ci.pNext = NULL;
- ci.flags = 0;
- ci.imageType = VK_IMAGE_TYPE_3D;
- ci.format = fmt;
- ci.extent = {64, 64, 4};
- ci.mipLevels = 1;
- ci.arrayLayers = 1;
- ci.samples = VK_SAMPLE_COUNT_1_BIT;
- ci.tiling = VK_IMAGE_TILING_OPTIMAL;
- ci.usage = VK_IMAGE_USAGE_TRANSFER_SRC_BIT;
- ci.sharingMode = VK_SHARING_MODE_EXCLUSIVE;
- ci.queueFamilyIndexCount = 0;
- ci.pQueueFamilyIndices = NULL;
- ci.initialLayout = VK_IMAGE_LAYOUT_UNDEFINED;
-
- VkImageObj src3D(m_device);
- src3D.init(&ci);
- ASSERT_TRUE(src3D.initialized());
-
- VkImageBlit blitRegion = {};
- blitRegion.srcSubresource.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
- blitRegion.srcSubresource.baseArrayLayer = 0;
- blitRegion.srcSubresource.layerCount = 1;
- blitRegion.srcSubresource.mipLevel = 0;
- blitRegion.dstSubresource.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
- blitRegion.dstSubresource.baseArrayLayer = 0;
- blitRegion.dstSubresource.layerCount = 1;
- blitRegion.dstSubresource.mipLevel = 0;
- blitRegion.srcOffsets[0] = {0, 0, 0};
- blitRegion.srcOffsets[1] = {48, 48, 1};
- blitRegion.dstOffsets[0] = {0, 0, 0};
- blitRegion.dstOffsets[1] = {64, 64, 1};
-
- m_commandBuffer->begin();
-
- // UINT format should not support linear filtering, but check to be sure
- if (!ImageFormatAndFeaturesSupported(gpu(), fmt, VK_IMAGE_TILING_OPTIMAL, VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_LINEAR_BIT)) {
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdBlitImage-filter-02001");
- vkCmdBlitImage(m_commandBuffer->handle(), src2D.image(), src2D.Layout(), dst2D.image(), dst2D.Layout(), 1, &blitRegion,
- VK_FILTER_LINEAR);
- m_errorMonitor->VerifyFound();
- }
-
- if (cubic_support && !ImageFormatAndFeaturesSupported(gpu(), fmt, VK_IMAGE_TILING_OPTIMAL,
- VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_CUBIC_BIT_IMG)) {
- // Invalid filter CUBIC_IMG
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdBlitImage-filter-02002");
- vkCmdBlitImage(m_commandBuffer->handle(), src3D.image(), src3D.Layout(), dst2D.image(), dst2D.Layout(), 1, &blitRegion,
- VK_FILTER_CUBIC_IMG);
- m_errorMonitor->VerifyFound();
-
- // Invalid filter CUBIC_IMG + invalid 2D source image
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdBlitImage-filter-02002");
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdBlitImage-filter-00237");
- vkCmdBlitImage(m_commandBuffer->handle(), src2D.image(), src2D.Layout(), dst2D.image(), dst2D.Layout(), 1, &blitRegion,
- VK_FILTER_CUBIC_IMG);
- m_errorMonitor->VerifyFound();
- }
-
- m_commandBuffer->end();
-}
-
-TEST_F(VkLayerTest, BlitImageLayout) {
- TEST_DESCRIPTION("Incorrect vkCmdBlitImage layouts");
-
- ASSERT_NO_FATAL_FAILURE(Init(nullptr, nullptr, VK_COMMAND_POOL_CREATE_RESET_COMMAND_BUFFER_BIT));
-
- VkResult err;
- VkFormat fmt = VK_FORMAT_R8G8B8A8_UNORM;
-
- VkSubmitInfo submit_info = {};
- submit_info.sType = VK_STRUCTURE_TYPE_SUBMIT_INFO;
- submit_info.commandBufferCount = 1;
- submit_info.pCommandBuffers = &m_commandBuffer->handle();
-
- // Create images
- VkImageObj img_src_transfer(m_device);
- VkImageObj img_dst_transfer(m_device);
- VkImageObj img_general(m_device);
- VkImageObj img_color(m_device);
-
- img_src_transfer.InitNoLayout(64, 64, 1, fmt, VK_IMAGE_USAGE_TRANSFER_SRC_BIT | VK_IMAGE_USAGE_TRANSFER_DST_BIT,
- VK_IMAGE_TILING_OPTIMAL, 0);
- img_dst_transfer.InitNoLayout(64, 64, 1, fmt, VK_IMAGE_USAGE_TRANSFER_SRC_BIT | VK_IMAGE_USAGE_TRANSFER_DST_BIT,
- VK_IMAGE_TILING_OPTIMAL, 0);
- img_general.InitNoLayout(64, 64, 1, fmt, VK_IMAGE_USAGE_TRANSFER_SRC_BIT | VK_IMAGE_USAGE_TRANSFER_DST_BIT,
- VK_IMAGE_TILING_OPTIMAL, 0);
- img_color.InitNoLayout(64, 64, 1, fmt,
- VK_IMAGE_USAGE_TRANSFER_SRC_BIT | VK_IMAGE_USAGE_TRANSFER_DST_BIT | VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT,
- VK_IMAGE_TILING_OPTIMAL, 0);
-
- ASSERT_TRUE(img_src_transfer.initialized());
- ASSERT_TRUE(img_dst_transfer.initialized());
- ASSERT_TRUE(img_general.initialized());
- ASSERT_TRUE(img_color.initialized());
-
- img_src_transfer.SetLayout(VK_IMAGE_ASPECT_COLOR_BIT, VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL);
- img_dst_transfer.SetLayout(VK_IMAGE_ASPECT_COLOR_BIT, VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL);
- img_general.SetLayout(VK_IMAGE_ASPECT_COLOR_BIT, VK_IMAGE_LAYOUT_GENERAL);
- img_color.SetLayout(VK_IMAGE_ASPECT_COLOR_BIT, VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL);
-
- VkImageBlit blit_region = {};
- blit_region.srcSubresource.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
- blit_region.srcSubresource.baseArrayLayer = 0;
- blit_region.srcSubresource.layerCount = 1;
- blit_region.srcSubresource.mipLevel = 0;
- blit_region.dstSubresource.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
- blit_region.dstSubresource.baseArrayLayer = 0;
- blit_region.dstSubresource.layerCount = 1;
- blit_region.dstSubresource.mipLevel = 0;
- blit_region.srcOffsets[0] = {0, 0, 0};
- blit_region.srcOffsets[1] = {48, 48, 1};
- blit_region.dstOffsets[0] = {0, 0, 0};
- blit_region.dstOffsets[1] = {64, 64, 1};
-
- m_commandBuffer->begin();
-
- // Illegal srcImageLayout
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdBlitImage-srcImageLayout-00222");
- vkCmdBlitImage(m_commandBuffer->handle(), img_src_transfer.image(), VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL,
- img_dst_transfer.image(), img_dst_transfer.Layout(), 1, &blit_region, VK_FILTER_LINEAR);
- m_errorMonitor->VerifyFound();
-
- // Illegal destImageLayout
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdBlitImage-dstImageLayout-00227");
- vkCmdBlitImage(m_commandBuffer->handle(), img_src_transfer.image(), img_src_transfer.Layout(), img_dst_transfer.image(),
- VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL, 1, &blit_region, VK_FILTER_LINEAR);
-
- m_commandBuffer->end();
- vkQueueSubmit(m_device->m_queue, 1, &submit_info, VK_NULL_HANDLE);
- m_errorMonitor->VerifyFound();
-
- err = vkQueueWaitIdle(m_device->m_queue);
- ASSERT_VK_SUCCESS(err);
-
- m_commandBuffer->reset(0);
- m_commandBuffer->begin();
-
- // Source image in invalid layout at start of the CB
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "UNASSIGNED-CoreValidation-DrawState-InvalidImageLayout");
- vkCmdBlitImage(m_commandBuffer->handle(), img_src_transfer.image(), img_src_transfer.Layout(), img_color.image(),
- VK_IMAGE_LAYOUT_GENERAL, 1, &blit_region, VK_FILTER_LINEAR);
-
- m_commandBuffer->end();
- vkQueueSubmit(m_device->m_queue, 1, &submit_info, VK_NULL_HANDLE);
- m_errorMonitor->VerifyFound();
- err = vkQueueWaitIdle(m_device->m_queue);
- ASSERT_VK_SUCCESS(err);
-
- m_commandBuffer->reset(0);
- m_commandBuffer->begin();
-
- // Destination image in invalid layout at start of the CB
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "UNASSIGNED-CoreValidation-DrawState-InvalidImageLayout");
- vkCmdBlitImage(m_commandBuffer->handle(), img_color.image(), VK_IMAGE_LAYOUT_GENERAL, img_dst_transfer.image(),
- img_dst_transfer.Layout(), 1, &blit_region, VK_FILTER_LINEAR);
-
- m_commandBuffer->end();
- vkQueueSubmit(m_device->m_queue, 1, &submit_info, VK_NULL_HANDLE);
- m_errorMonitor->VerifyFound();
- err = vkQueueWaitIdle(m_device->m_queue);
- ASSERT_VK_SUCCESS(err);
-
- // Source image in invalid layout in the middle of CB
- m_commandBuffer->reset(0);
- m_commandBuffer->begin();
-
- VkImageMemoryBarrier img_barrier = {};
- img_barrier.sType = VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER;
- img_barrier.pNext = nullptr;
- img_barrier.srcAccessMask = 0;
- img_barrier.dstAccessMask = 0;
- img_barrier.oldLayout = VK_IMAGE_LAYOUT_GENERAL;
- img_barrier.newLayout = VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL;
- img_barrier.image = img_general.handle();
- img_barrier.srcQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED;
- img_barrier.dstQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED;
- img_barrier.subresourceRange.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
- img_barrier.subresourceRange.baseArrayLayer = 0;
- img_barrier.subresourceRange.baseMipLevel = 0;
- img_barrier.subresourceRange.layerCount = 1;
- img_barrier.subresourceRange.levelCount = 1;
-
- vkCmdPipelineBarrier(m_commandBuffer->handle(), VK_PIPELINE_STAGE_BOTTOM_OF_PIPE_BIT, VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT, 0, 0,
- nullptr, 0, nullptr, 1, &img_barrier);
-
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdBlitImage-srcImageLayout-00221");
- vkCmdBlitImage(m_commandBuffer->handle(), img_general.image(), VK_IMAGE_LAYOUT_GENERAL, img_dst_transfer.image(),
- img_dst_transfer.Layout(), 1, &blit_region, VK_FILTER_LINEAR);
-
- m_commandBuffer->end();
- vkQueueSubmit(m_device->m_queue, 1, &submit_info, VK_NULL_HANDLE);
- m_errorMonitor->VerifyFound();
- err = vkQueueWaitIdle(m_device->m_queue);
- ASSERT_VK_SUCCESS(err);
-
- // Destination image in invalid layout in the middle of CB
- m_commandBuffer->reset(0);
- m_commandBuffer->begin();
-
- img_barrier.oldLayout = VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL;
- img_barrier.newLayout = VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL;
- img_barrier.image = img_dst_transfer.handle();
-
- vkCmdPipelineBarrier(m_commandBuffer->handle(), VK_PIPELINE_STAGE_BOTTOM_OF_PIPE_BIT, VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT, 0, 0,
- nullptr, 0, nullptr, 1, &img_barrier);
-
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdBlitImage-dstImageLayout-00226");
- vkCmdBlitImage(m_commandBuffer->handle(), img_src_transfer.image(), img_src_transfer.Layout(), img_dst_transfer.image(),
- VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, 1, &blit_region, VK_FILTER_LINEAR);
-
- m_commandBuffer->end();
- vkQueueSubmit(m_device->m_queue, 1, &submit_info, VK_NULL_HANDLE);
- m_errorMonitor->VerifyFound();
- err = vkQueueWaitIdle(m_device->m_queue);
- ASSERT_VK_SUCCESS(err);
-}
-
-TEST_F(VkLayerTest, BlitImageOffsets) {
- ASSERT_NO_FATAL_FAILURE(Init());
-
- VkFormat fmt = VK_FORMAT_R8G8B8A8_UNORM;
- if (!ImageFormatAndFeaturesSupported(gpu(), fmt, VK_IMAGE_TILING_OPTIMAL,
- VK_FORMAT_FEATURE_BLIT_SRC_BIT | VK_FORMAT_FEATURE_BLIT_DST_BIT)) {
- printf("%s No blit feature bits - BlitImageOffsets skipped.\n", kSkipPrefix);
- return;
- }
-
- VkImageCreateInfo ci;
- ci.sType = VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO;
- ci.pNext = NULL;
- ci.flags = 0;
- ci.imageType = VK_IMAGE_TYPE_1D;
- ci.format = fmt;
- ci.extent = {64, 1, 1};
- ci.mipLevels = 1;
- ci.arrayLayers = 1;
- ci.samples = VK_SAMPLE_COUNT_1_BIT;
- ci.tiling = VK_IMAGE_TILING_OPTIMAL;
- ci.usage = VK_IMAGE_USAGE_TRANSFER_SRC_BIT | VK_IMAGE_USAGE_TRANSFER_DST_BIT;
- ci.sharingMode = VK_SHARING_MODE_EXCLUSIVE;
- ci.queueFamilyIndexCount = 0;
- ci.pQueueFamilyIndices = NULL;
- ci.initialLayout = VK_IMAGE_LAYOUT_UNDEFINED;
-
- VkImageObj image_1D(m_device);
- image_1D.init(&ci);
- ASSERT_TRUE(image_1D.initialized());
-
- ci.imageType = VK_IMAGE_TYPE_2D;
- ci.extent = {64, 64, 1};
- VkImageObj image_2D(m_device);
- image_2D.init(&ci);
- ASSERT_TRUE(image_2D.initialized());
-
- ci.imageType = VK_IMAGE_TYPE_3D;
- ci.extent = {64, 64, 64};
- VkImageObj image_3D(m_device);
- image_3D.init(&ci);
- ASSERT_TRUE(image_3D.initialized());
-
- VkImageBlit blit_region = {};
- blit_region.srcSubresource.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
- blit_region.srcSubresource.baseArrayLayer = 0;
- blit_region.srcSubresource.layerCount = 1;
- blit_region.srcSubresource.mipLevel = 0;
- blit_region.dstSubresource.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
- blit_region.dstSubresource.baseArrayLayer = 0;
- blit_region.dstSubresource.layerCount = 1;
- blit_region.dstSubresource.mipLevel = 0;
-
- m_commandBuffer->begin();
-
- // 1D, with src/dest y offsets other than (0,1)
- blit_region.srcOffsets[0] = {0, 1, 0};
- blit_region.srcOffsets[1] = {30, 1, 1};
- blit_region.dstOffsets[0] = {32, 0, 0};
- blit_region.dstOffsets[1] = {64, 1, 1};
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkImageBlit-srcImage-00245");
- vkCmdBlitImage(m_commandBuffer->handle(), image_1D.image(), image_1D.Layout(), image_1D.image(), image_1D.Layout(), 1,
- &blit_region, VK_FILTER_NEAREST);
- m_errorMonitor->VerifyFound();
-
- blit_region.srcOffsets[0] = {0, 0, 0};
- blit_region.dstOffsets[0] = {32, 1, 0};
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkImageBlit-dstImage-00250");
- vkCmdBlitImage(m_commandBuffer->handle(), image_1D.image(), image_1D.Layout(), image_1D.image(), image_1D.Layout(), 1,
- &blit_region, VK_FILTER_NEAREST);
- m_errorMonitor->VerifyFound();
-
- // 2D, with src/dest z offsets other than (0,1)
- blit_region.srcOffsets[0] = {0, 0, 1};
- blit_region.srcOffsets[1] = {24, 31, 1};
- blit_region.dstOffsets[0] = {32, 32, 0};
- blit_region.dstOffsets[1] = {64, 64, 1};
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkImageBlit-srcImage-00247");
- vkCmdBlitImage(m_commandBuffer->handle(), image_2D.image(), image_2D.Layout(), image_2D.image(), image_2D.Layout(), 1,
- &blit_region, VK_FILTER_NEAREST);
- m_errorMonitor->VerifyFound();
-
- blit_region.srcOffsets[0] = {0, 0, 0};
- blit_region.dstOffsets[0] = {32, 32, 1};
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkImageBlit-dstImage-00252");
- vkCmdBlitImage(m_commandBuffer->handle(), image_2D.image(), image_2D.Layout(), image_2D.image(), image_2D.Layout(), 1,
- &blit_region, VK_FILTER_NEAREST);
- m_errorMonitor->VerifyFound();
-
- // Source offsets exceeding source image dimensions
- blit_region.srcOffsets[0] = {0, 0, 0};
- blit_region.srcOffsets[1] = {65, 64, 1}; // src x
- blit_region.dstOffsets[0] = {0, 0, 0};
- blit_region.dstOffsets[1] = {64, 64, 1};
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkImageBlit-srcOffset-00243"); // x
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdBlitImage-pRegions-00215"); // src region
- vkCmdBlitImage(m_commandBuffer->handle(), image_3D.image(), image_3D.Layout(), image_2D.image(), image_2D.Layout(), 1,
- &blit_region, VK_FILTER_NEAREST);
- m_errorMonitor->VerifyFound();
-
- blit_region.srcOffsets[1] = {64, 65, 1}; // src y
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkImageBlit-srcOffset-00244"); // y
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdBlitImage-pRegions-00215"); // src region
- vkCmdBlitImage(m_commandBuffer->handle(), image_3D.image(), image_3D.Layout(), image_2D.image(), image_2D.Layout(), 1,
- &blit_region, VK_FILTER_NEAREST);
- m_errorMonitor->VerifyFound();
-
- blit_region.srcOffsets[0] = {0, 0, 65}; // src z
- blit_region.srcOffsets[1] = {64, 64, 64};
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkImageBlit-srcOffset-00246"); // z
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdBlitImage-pRegions-00215"); // src region
- vkCmdBlitImage(m_commandBuffer->handle(), image_3D.image(), image_3D.Layout(), image_2D.image(), image_2D.Layout(), 1,
- &blit_region, VK_FILTER_NEAREST);
- m_errorMonitor->VerifyFound();
-
- // Dest offsets exceeding source image dimensions
- blit_region.srcOffsets[0] = {0, 0, 0};
- blit_region.srcOffsets[1] = {64, 64, 1};
- blit_region.dstOffsets[0] = {96, 64, 32}; // dst x
- blit_region.dstOffsets[1] = {64, 0, 33};
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkImageBlit-dstOffset-00248"); // x
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdBlitImage-pRegions-00216"); // dst region
- vkCmdBlitImage(m_commandBuffer->handle(), image_2D.image(), image_2D.Layout(), image_3D.image(), image_3D.Layout(), 1,
- &blit_region, VK_FILTER_NEAREST);
- m_errorMonitor->VerifyFound();
-
- blit_region.dstOffsets[0] = {0, 65, 32}; // dst y
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkImageBlit-dstOffset-00249"); // y
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdBlitImage-pRegions-00216"); // dst region
- vkCmdBlitImage(m_commandBuffer->handle(), image_2D.image(), image_2D.Layout(), image_3D.image(), image_3D.Layout(), 1,
- &blit_region, VK_FILTER_NEAREST);
- m_errorMonitor->VerifyFound();
-
- blit_region.dstOffsets[0] = {0, 64, 65}; // dst z
- blit_region.dstOffsets[1] = {64, 0, 64};
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkImageBlit-dstOffset-00251"); // z
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdBlitImage-pRegions-00216"); // dst region
- vkCmdBlitImage(m_commandBuffer->handle(), image_2D.image(), image_2D.Layout(), image_3D.image(), image_3D.Layout(), 1,
- &blit_region, VK_FILTER_NEAREST);
- m_errorMonitor->VerifyFound();
-
- m_commandBuffer->end();
-}
-
-TEST_F(VkLayerTest, MiscBlitImageTests) {
- ASSERT_NO_FATAL_FAILURE(Init());
-
- VkFormat f_color = VK_FORMAT_R32_SFLOAT; // Need features ..BLIT_SRC_BIT & ..BLIT_DST_BIT
-
- if (!ImageFormatAndFeaturesSupported(gpu(), f_color, VK_IMAGE_TILING_OPTIMAL,
- VK_FORMAT_FEATURE_BLIT_SRC_BIT | VK_FORMAT_FEATURE_BLIT_DST_BIT)) {
- printf("%s Requested format features unavailable - MiscBlitImageTests skipped.\n", kSkipPrefix);
- return;
- }
-
- VkImageCreateInfo ci;
- ci.sType = VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO;
- ci.pNext = NULL;
- ci.flags = 0;
- ci.imageType = VK_IMAGE_TYPE_2D;
- ci.format = f_color;
- ci.extent = {64, 64, 1};
- ci.mipLevels = 1;
- ci.arrayLayers = 1;
- ci.samples = VK_SAMPLE_COUNT_1_BIT;
- ci.tiling = VK_IMAGE_TILING_OPTIMAL;
- ci.usage = VK_IMAGE_USAGE_TRANSFER_SRC_BIT | VK_IMAGE_USAGE_TRANSFER_DST_BIT;
- ci.sharingMode = VK_SHARING_MODE_EXCLUSIVE;
- ci.queueFamilyIndexCount = 0;
- ci.pQueueFamilyIndices = NULL;
- ci.initialLayout = VK_IMAGE_LAYOUT_UNDEFINED;
-
- // 2D color image
- VkImageObj color_img(m_device);
- color_img.init(&ci);
- ASSERT_TRUE(color_img.initialized());
-
- // 2D multi-sample image
- ci.samples = VK_SAMPLE_COUNT_4_BIT;
- VkImageObj ms_img(m_device);
- ms_img.init(&ci);
- ASSERT_TRUE(ms_img.initialized());
-
- // 3D color image
- ci.samples = VK_SAMPLE_COUNT_1_BIT;
- ci.imageType = VK_IMAGE_TYPE_3D;
- ci.extent = {64, 64, 8};
- VkImageObj color_3D_img(m_device);
- color_3D_img.init(&ci);
- ASSERT_TRUE(color_3D_img.initialized());
-
- VkImageBlit blitRegion = {};
- blitRegion.srcSubresource.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
- blitRegion.srcSubresource.baseArrayLayer = 0;
- blitRegion.srcSubresource.layerCount = 1;
- blitRegion.srcSubresource.mipLevel = 0;
- blitRegion.dstSubresource.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
- blitRegion.dstSubresource.baseArrayLayer = 0;
- blitRegion.dstSubresource.layerCount = 1;
- blitRegion.dstSubresource.mipLevel = 0;
- blitRegion.srcOffsets[0] = {0, 0, 0};
- blitRegion.srcOffsets[1] = {16, 16, 1};
- blitRegion.dstOffsets[0] = {32, 32, 0};
- blitRegion.dstOffsets[1] = {64, 64, 1};
-
- m_commandBuffer->begin();
-
- // Blit with aspectMask errors
- blitRegion.srcSubresource.aspectMask = VK_IMAGE_ASPECT_DEPTH_BIT;
- blitRegion.dstSubresource.aspectMask = VK_IMAGE_ASPECT_DEPTH_BIT;
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkImageBlit-aspectMask-00241");
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkImageBlit-aspectMask-00242");
- vkCmdBlitImage(m_commandBuffer->handle(), color_img.image(), color_img.Layout(), color_img.image(), color_img.Layout(), 1,
- &blitRegion, VK_FILTER_NEAREST);
- m_errorMonitor->VerifyFound();
-
- // Blit with invalid src mip level
- blitRegion.srcSubresource.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
- blitRegion.dstSubresource.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
- blitRegion.srcSubresource.mipLevel = ci.mipLevels;
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
- "VUID-vkCmdBlitImage-srcSubresource-01705"); // invalid srcSubresource.mipLevel
- // Redundant unavoidable errors
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
- "VUID-VkImageBlit-srcOffset-00243"); // out-of-bounds srcOffset.x
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
- "VUID-VkImageBlit-srcOffset-00244"); // out-of-bounds srcOffset.y
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
- "VUID-VkImageBlit-srcOffset-00246"); // out-of-bounds srcOffset.z
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
- "VUID-vkCmdBlitImage-pRegions-00215"); // region not contained within src image
- vkCmdBlitImage(m_commandBuffer->handle(), color_img.image(), color_img.Layout(), color_img.image(), color_img.Layout(), 1,
- &blitRegion, VK_FILTER_NEAREST);
- m_errorMonitor->VerifyFound();
-
- // Blit with invalid dst mip level
- blitRegion.srcSubresource.mipLevel = 0;
- blitRegion.dstSubresource.mipLevel = ci.mipLevels;
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
- "VUID-vkCmdBlitImage-dstSubresource-01706"); // invalid dstSubresource.mipLevel
- // Redundant unavoidable errors
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
- "VUID-VkImageBlit-dstOffset-00248"); // out-of-bounds dstOffset.x
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
- "VUID-VkImageBlit-dstOffset-00249"); // out-of-bounds dstOffset.y
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
- "VUID-VkImageBlit-dstOffset-00251"); // out-of-bounds dstOffset.z
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
- "VUID-vkCmdBlitImage-pRegions-00216"); // region not contained within dst image
- vkCmdBlitImage(m_commandBuffer->handle(), color_img.image(), color_img.Layout(), color_img.image(), color_img.Layout(), 1,
- &blitRegion, VK_FILTER_NEAREST);
- m_errorMonitor->VerifyFound();
-
- // Blit with invalid src array layer
- blitRegion.dstSubresource.mipLevel = 0;
- blitRegion.srcSubresource.baseArrayLayer = ci.arrayLayers;
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
- "VUID-vkCmdBlitImage-srcSubresource-01707"); // invalid srcSubresource layer range
- vkCmdBlitImage(m_commandBuffer->handle(), color_img.image(), color_img.Layout(), color_img.image(), color_img.Layout(), 1,
- &blitRegion, VK_FILTER_NEAREST);
- m_errorMonitor->VerifyFound();
-
- // Blit with invalid dst array layer
- blitRegion.srcSubresource.baseArrayLayer = 0;
- blitRegion.dstSubresource.baseArrayLayer = ci.arrayLayers;
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
- "VUID-vkCmdBlitImage-dstSubresource-01708"); // invalid dstSubresource layer range
- // Redundant unavoidable errors
- vkCmdBlitImage(m_commandBuffer->handle(), color_img.image(), color_img.Layout(), color_img.image(), color_img.Layout(), 1,
- &blitRegion, VK_FILTER_NEAREST);
- m_errorMonitor->VerifyFound();
-
- blitRegion.dstSubresource.baseArrayLayer = 0;
-
- // Blit multi-sample image
- // TODO: redundant VUs, one (1c8) or two (1d2 & 1d4) should be eliminated.
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdBlitImage-srcImage-00228");
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdBlitImage-srcImage-00233");
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdBlitImage-dstImage-00234");
- vkCmdBlitImage(m_commandBuffer->handle(), ms_img.image(), ms_img.Layout(), ms_img.image(), ms_img.Layout(), 1, &blitRegion,
- VK_FILTER_NEAREST);
- m_errorMonitor->VerifyFound();
-
- // Blit 3D with baseArrayLayer != 0 or layerCount != 1
- blitRegion.srcSubresource.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
- blitRegion.dstSubresource.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
- blitRegion.srcSubresource.baseArrayLayer = 1;
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkImageBlit-srcImage-00240");
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
- "VUID-vkCmdBlitImage-srcSubresource-01707"); // base+count > total layer count
- vkCmdBlitImage(m_commandBuffer->handle(), color_3D_img.image(), color_3D_img.Layout(), color_3D_img.image(),
- color_3D_img.Layout(), 1, &blitRegion, VK_FILTER_NEAREST);
- m_errorMonitor->VerifyFound();
- blitRegion.srcSubresource.baseArrayLayer = 0;
- blitRegion.srcSubresource.layerCount = 0;
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkImageBlit-srcImage-00240");
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
- "VUID-VkImageSubresourceLayers-layerCount-01700"); // layer count == 0 (src)
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
- "VUID-VkImageBlit-layerCount-00239"); // src/dst layer count mismatch
- vkCmdBlitImage(m_commandBuffer->handle(), color_3D_img.image(), color_3D_img.Layout(), color_3D_img.image(),
- color_3D_img.Layout(), 1, &blitRegion, VK_FILTER_NEAREST);
- m_errorMonitor->VerifyFound();
-
- m_commandBuffer->end();
-}
-
-TEST_F(VkLayerTest, BlitToDepthImageTests) {
- ASSERT_NO_FATAL_FAILURE(Init());
-
- // Need feature ..BLIT_SRC_BIT but not ..BLIT_DST_BIT
- // TODO: provide more choices here; supporting D32_SFLOAT as BLIT_DST isn't unheard of.
- VkFormat f_depth = VK_FORMAT_D32_SFLOAT;
-
- if (!ImageFormatAndFeaturesSupported(gpu(), f_depth, VK_IMAGE_TILING_OPTIMAL, VK_FORMAT_FEATURE_BLIT_SRC_BIT) ||
- ImageFormatAndFeaturesSupported(gpu(), f_depth, VK_IMAGE_TILING_OPTIMAL, VK_FORMAT_FEATURE_BLIT_DST_BIT)) {
- printf("%s Requested format features unavailable - BlitToDepthImageTests skipped.\n", kSkipPrefix);
- return;
- }
-
- VkImageCreateInfo ci;
- ci.sType = VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO;
- ci.pNext = NULL;
- ci.flags = 0;
- ci.imageType = VK_IMAGE_TYPE_2D;
- ci.format = f_depth;
- ci.extent = {64, 64, 1};
- ci.mipLevels = 1;
- ci.arrayLayers = 1;
- ci.samples = VK_SAMPLE_COUNT_1_BIT;
- ci.tiling = VK_IMAGE_TILING_OPTIMAL;
- ci.usage = VK_IMAGE_USAGE_TRANSFER_SRC_BIT | VK_IMAGE_USAGE_TRANSFER_DST_BIT;
- ci.sharingMode = VK_SHARING_MODE_EXCLUSIVE;
- ci.queueFamilyIndexCount = 0;
- ci.pQueueFamilyIndices = NULL;
- ci.initialLayout = VK_IMAGE_LAYOUT_UNDEFINED;
-
- // 2D depth image
- VkImageObj depth_img(m_device);
- depth_img.init(&ci);
- ASSERT_TRUE(depth_img.initialized());
-
- VkImageBlit blitRegion = {};
- blitRegion.srcSubresource.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
- blitRegion.srcSubresource.baseArrayLayer = 0;
- blitRegion.srcSubresource.layerCount = 1;
- blitRegion.srcSubresource.mipLevel = 0;
- blitRegion.dstSubresource.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
- blitRegion.dstSubresource.baseArrayLayer = 0;
- blitRegion.dstSubresource.layerCount = 1;
- blitRegion.dstSubresource.mipLevel = 0;
- blitRegion.srcOffsets[0] = {0, 0, 0};
- blitRegion.srcOffsets[1] = {16, 16, 1};
- blitRegion.dstOffsets[0] = {32, 32, 0};
- blitRegion.dstOffsets[1] = {64, 64, 1};
-
- m_commandBuffer->begin();
-
- // Blit depth image - has SRC_BIT but not DST_BIT
- blitRegion.srcSubresource.aspectMask = VK_IMAGE_ASPECT_DEPTH_BIT;
- blitRegion.dstSubresource.aspectMask = VK_IMAGE_ASPECT_DEPTH_BIT;
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdBlitImage-dstImage-02000");
- vkCmdBlitImage(m_commandBuffer->handle(), depth_img.image(), depth_img.Layout(), depth_img.image(), depth_img.Layout(), 1,
- &blitRegion, VK_FILTER_NEAREST);
- m_errorMonitor->VerifyFound();
-
- m_commandBuffer->end();
-}
-
-TEST_F(VkLayerTest, MinImageTransferGranularity) {
- TEST_DESCRIPTION("Tests for validation of Queue Family property minImageTransferGranularity.");
- ASSERT_NO_FATAL_FAILURE(Init());
-
- auto queue_family_properties = m_device->phy().queue_properties();
- auto large_granularity_family =
- std::find_if(queue_family_properties.begin(), queue_family_properties.end(), [](VkQueueFamilyProperties family_properties) {
- VkExtent3D family_granularity = family_properties.minImageTransferGranularity;
- // We need a queue family that supports copy operations and has a large enough minImageTransferGranularity for the tests
- // below to make sense.
- return (family_properties.queueFlags & VK_QUEUE_TRANSFER_BIT || family_properties.queueFlags & VK_QUEUE_GRAPHICS_BIT ||
- family_properties.queueFlags & VK_QUEUE_COMPUTE_BIT) &&
- family_granularity.depth >= 4 && family_granularity.width >= 4 && family_granularity.height >= 4;
- });
-
- if (large_granularity_family == queue_family_properties.end()) {
- printf("%s No queue family has a large enough granularity for this test to be meaningful, skipping test\n", kSkipPrefix);
- return;
- }
- const size_t queue_family_index = std::distance(queue_family_properties.begin(), large_granularity_family);
- VkExtent3D granularity = queue_family_properties[queue_family_index].minImageTransferGranularity;
- VkCommandPoolObj command_pool(m_device, queue_family_index, 0);
-
- // Create two images of different types and try to copy between them
- VkImage srcImage;
- VkImage dstImage;
-
- VkImageCreateInfo image_create_info = {};
- image_create_info.sType = VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO;
- image_create_info.pNext = NULL;
- image_create_info.imageType = VK_IMAGE_TYPE_3D;
- image_create_info.format = VK_FORMAT_B8G8R8A8_UNORM;
- image_create_info.extent.width = granularity.width * 2;
- image_create_info.extent.height = granularity.height * 2;
- image_create_info.extent.depth = granularity.depth * 2;
- image_create_info.mipLevels = 1;
- image_create_info.arrayLayers = 1;
- image_create_info.samples = VK_SAMPLE_COUNT_1_BIT;
- image_create_info.tiling = VK_IMAGE_TILING_OPTIMAL;
- image_create_info.usage = VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT | VK_IMAGE_USAGE_TRANSFER_SRC_BIT;
- image_create_info.flags = 0;
-
- VkImageObj src_image_obj(m_device);
- src_image_obj.init(&image_create_info);
- ASSERT_TRUE(src_image_obj.initialized());
- srcImage = src_image_obj.handle();
-
- image_create_info.usage = VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT | VK_IMAGE_USAGE_TRANSFER_DST_BIT;
-
- VkImageObj dst_image_obj(m_device);
- dst_image_obj.init(&image_create_info);
- ASSERT_TRUE(dst_image_obj.initialized());
- dstImage = dst_image_obj.handle();
-
- VkCommandBufferObj command_buffer(m_device, &command_pool);
- ASSERT_TRUE(command_buffer.initialized());
- command_buffer.begin();
-
- VkImageCopy copyRegion;
- copyRegion.srcSubresource.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
- copyRegion.srcSubresource.mipLevel = 0;
- copyRegion.srcSubresource.baseArrayLayer = 0;
- copyRegion.srcSubresource.layerCount = 1;
- copyRegion.srcOffset.x = 0;
- copyRegion.srcOffset.y = 0;
- copyRegion.srcOffset.z = 0;
- copyRegion.dstSubresource.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
- copyRegion.dstSubresource.mipLevel = 0;
- copyRegion.dstSubresource.baseArrayLayer = 0;
- copyRegion.dstSubresource.layerCount = 1;
- copyRegion.dstOffset.x = 0;
- copyRegion.dstOffset.y = 0;
- copyRegion.dstOffset.z = 0;
- copyRegion.extent.width = granularity.width;
- copyRegion.extent.height = granularity.height;
- copyRegion.extent.depth = granularity.depth;
-
- // Introduce failure by setting srcOffset to a bad granularity value
- copyRegion.srcOffset.y = 3;
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
- "VUID-vkCmdCopyImage-srcOffset-01783"); // srcOffset image transfer granularity
- command_buffer.CopyImage(srcImage, VK_IMAGE_LAYOUT_GENERAL, dstImage, VK_IMAGE_LAYOUT_GENERAL, 1, &copyRegion);
- m_errorMonitor->VerifyFound();
-
- // Introduce failure by setting extent to a granularity value that is bad
- // for both the source and destination image.
- copyRegion.srcOffset.y = 0;
- copyRegion.extent.width = 3;
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
- "VUID-vkCmdCopyImage-srcOffset-01783"); // src extent image transfer granularity
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
- "VUID-vkCmdCopyImage-dstOffset-01784"); // dst extent image transfer granularity
- command_buffer.CopyImage(srcImage, VK_IMAGE_LAYOUT_GENERAL, dstImage, VK_IMAGE_LAYOUT_GENERAL, 1, &copyRegion);
- m_errorMonitor->VerifyFound();
-
- // Now do some buffer/image copies
- VkBufferObj buffer;
- VkMemoryPropertyFlags reqs = 0;
- buffer.init_as_src_and_dst(*m_device, 8 * granularity.height * granularity.width * granularity.depth, reqs);
- VkBufferImageCopy region = {};
- region.bufferOffset = 0;
- region.bufferRowLength = 0;
- region.bufferImageHeight = 0;
- region.imageSubresource.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
- region.imageSubresource.layerCount = 1;
- region.imageExtent.height = granularity.height;
- region.imageExtent.width = granularity.width;
- region.imageExtent.depth = granularity.depth;
- region.imageOffset.x = 0;
- region.imageOffset.y = 0;
- region.imageOffset.z = 0;
-
- // Introduce failure by setting imageExtent to a bad granularity value
- region.imageExtent.width = 3;
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
- "VUID-vkCmdCopyImageToBuffer-imageOffset-01794"); // image transfer granularity
- vkCmdCopyImageToBuffer(command_buffer.handle(), srcImage, VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL, buffer.handle(), 1, &region);
- m_errorMonitor->VerifyFound();
- region.imageExtent.width = granularity.width;
-
- // Introduce failure by setting imageOffset to a bad granularity value
- region.imageOffset.z = 3;
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
- "VUID-vkCmdCopyBufferToImage-imageOffset-01793"); // image transfer granularity
- vkCmdCopyBufferToImage(command_buffer.handle(), buffer.handle(), dstImage, VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, 1, &region);
- m_errorMonitor->VerifyFound();
-
- command_buffer.end();
-}
-
-TEST_F(VkLayerTest, ImageBarrierSubpassConflicts) {
- TEST_DESCRIPTION("Add a pipeline barrier within a subpass that has conflicting state");
- ASSERT_NO_FATAL_FAILURE(Init());
-
- // A renderpass with a single subpass that declared a self-dependency
- VkAttachmentDescription attach[] = {
- {0, VK_FORMAT_R8G8B8A8_UNORM, VK_SAMPLE_COUNT_1_BIT, VK_ATTACHMENT_LOAD_OP_DONT_CARE, VK_ATTACHMENT_STORE_OP_DONT_CARE,
- VK_ATTACHMENT_LOAD_OP_DONT_CARE, VK_ATTACHMENT_STORE_OP_DONT_CARE, VK_IMAGE_LAYOUT_UNDEFINED,
- VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL},
- };
- VkAttachmentReference ref = {0, VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL};
- VkSubpassDescription subpasses[] = {
- {0, VK_PIPELINE_BIND_POINT_GRAPHICS, 0, nullptr, 1, &ref, nullptr, nullptr, 0, nullptr},
- };
- VkSubpassDependency dep = {0,
- 0,
- VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT,
- VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT,
- VK_ACCESS_COLOR_ATTACHMENT_WRITE_BIT,
- VK_ACCESS_COLOR_ATTACHMENT_WRITE_BIT,
- VK_DEPENDENCY_BY_REGION_BIT};
- VkRenderPassCreateInfo rpci = {VK_STRUCTURE_TYPE_RENDER_PASS_CREATE_INFO, nullptr, 0, 1, attach, 1, subpasses, 1, &dep};
- VkRenderPass rp;
- VkRenderPass rp_noselfdep;
-
- VkResult err = vkCreateRenderPass(m_device->device(), &rpci, nullptr, &rp);
- ASSERT_VK_SUCCESS(err);
- rpci.dependencyCount = 0;
- rpci.pDependencies = nullptr;
- err = vkCreateRenderPass(m_device->device(), &rpci, nullptr, &rp_noselfdep);
- ASSERT_VK_SUCCESS(err);
-
- VkImageObj image(m_device);
- image.InitNoLayout(32, 32, 1, VK_FORMAT_R8G8B8A8_UNORM, VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT, VK_IMAGE_TILING_OPTIMAL, 0);
- VkImageView imageView = image.targetView(VK_FORMAT_R8G8B8A8_UNORM);
-
- VkFramebufferCreateInfo fbci = {VK_STRUCTURE_TYPE_FRAMEBUFFER_CREATE_INFO, nullptr, 0, rp, 1, &imageView, 32, 32, 1};
- VkFramebuffer fb;
- err = vkCreateFramebuffer(m_device->device(), &fbci, nullptr, &fb);
- ASSERT_VK_SUCCESS(err);
-
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdPipelineBarrier-pDependencies-02285");
- m_commandBuffer->begin();
- VkRenderPassBeginInfo rpbi = {VK_STRUCTURE_TYPE_RENDER_PASS_BEGIN_INFO,
- nullptr,
- rp_noselfdep,
- fb,
- {{
- 0,
- 0,
- },
- {32, 32}},
- 0,
- nullptr};
-
- vkCmdBeginRenderPass(m_commandBuffer->handle(), &rpbi, VK_SUBPASS_CONTENTS_INLINE);
- VkMemoryBarrier mem_barrier = {};
- mem_barrier.sType = VK_STRUCTURE_TYPE_MEMORY_BARRIER;
- mem_barrier.pNext = NULL;
- mem_barrier.srcAccessMask = VK_ACCESS_HOST_WRITE_BIT;
- mem_barrier.dstAccessMask = VK_ACCESS_SHADER_READ_BIT;
- vkCmdPipelineBarrier(m_commandBuffer->handle(), VK_PIPELINE_STAGE_HOST_BIT, VK_PIPELINE_STAGE_VERTEX_SHADER_BIT, 0, 1,
- &mem_barrier, 0, nullptr, 0, nullptr);
- m_errorMonitor->VerifyFound();
- vkCmdEndRenderPass(m_commandBuffer->handle());
-
- rpbi.renderPass = rp;
- vkCmdBeginRenderPass(m_commandBuffer->handle(), &rpbi, VK_SUBPASS_CONTENTS_INLINE);
- VkImageMemoryBarrier img_barrier = {};
- img_barrier.sType = VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER;
- img_barrier.srcAccessMask = VK_ACCESS_COLOR_ATTACHMENT_WRITE_BIT;
- img_barrier.dstAccessMask = VK_ACCESS_COLOR_ATTACHMENT_WRITE_BIT;
- img_barrier.oldLayout = VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL;
- img_barrier.newLayout = VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL;
- img_barrier.image = image.handle();
- img_barrier.srcQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED;
- img_barrier.dstQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED;
- img_barrier.subresourceRange.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
- img_barrier.subresourceRange.baseArrayLayer = 0;
- img_barrier.subresourceRange.baseMipLevel = 0;
- img_barrier.subresourceRange.layerCount = 1;
- img_barrier.subresourceRange.levelCount = 1;
- // Mis-match src stage mask
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdPipelineBarrier-pDependencies-02285");
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdPipelineBarrier-pDependencies-02285");
- vkCmdPipelineBarrier(m_commandBuffer->handle(), VK_PIPELINE_STAGE_HOST_BIT, VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT,
- VK_DEPENDENCY_BY_REGION_BIT, 0, nullptr, 0, nullptr, 1, &img_barrier);
- m_errorMonitor->VerifyFound();
- // Now mis-match dst stage mask
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdPipelineBarrier-pDependencies-02285");
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdPipelineBarrier-pDependencies-02285");
- vkCmdPipelineBarrier(m_commandBuffer->handle(), VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT, VK_PIPELINE_STAGE_HOST_BIT,
- VK_DEPENDENCY_BY_REGION_BIT, 0, nullptr, 0, nullptr, 1, &img_barrier);
- m_errorMonitor->VerifyFound();
- // Set srcQueueFamilyIndex to something other than IGNORED
- img_barrier.srcQueueFamilyIndex = 0;
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdPipelineBarrier-srcQueueFamilyIndex-01182");
- vkCmdPipelineBarrier(m_commandBuffer->handle(), VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT,
- VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT, VK_DEPENDENCY_BY_REGION_BIT, 0, nullptr, 0, nullptr, 1,
- &img_barrier);
- m_errorMonitor->VerifyFound();
- img_barrier.srcQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED;
- // Mis-match mem barrier src access mask
- mem_barrier = {};
- mem_barrier.sType = VK_STRUCTURE_TYPE_MEMORY_BARRIER;
- mem_barrier.srcAccessMask = VK_ACCESS_SHADER_READ_BIT;
- mem_barrier.dstAccessMask = VK_ACCESS_COLOR_ATTACHMENT_WRITE_BIT;
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdPipelineBarrier-pDependencies-02285");
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdPipelineBarrier-pDependencies-02285");
- vkCmdPipelineBarrier(m_commandBuffer->handle(), VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT,
- VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT, VK_DEPENDENCY_BY_REGION_BIT, 1, &mem_barrier, 0, nullptr, 0,
- nullptr);
- m_errorMonitor->VerifyFound();
- // Mis-match mem barrier dst access mask. Also set srcAccessMask to 0 which should not cause an error
- mem_barrier.srcAccessMask = 0;
- mem_barrier.dstAccessMask = VK_ACCESS_HOST_WRITE_BIT;
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdPipelineBarrier-pDependencies-02285");
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdPipelineBarrier-pDependencies-02285");
- vkCmdPipelineBarrier(m_commandBuffer->handle(), VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT,
- VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT, VK_DEPENDENCY_BY_REGION_BIT, 1, &mem_barrier, 0, nullptr, 0,
- nullptr);
- m_errorMonitor->VerifyFound();
- // Mis-match image barrier src access mask
- img_barrier.srcAccessMask = VK_ACCESS_SHADER_READ_BIT;
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdPipelineBarrier-pDependencies-02285");
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdPipelineBarrier-pDependencies-02285");
- vkCmdPipelineBarrier(m_commandBuffer->handle(), VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT,
- VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT, VK_DEPENDENCY_BY_REGION_BIT, 0, nullptr, 0, nullptr, 1,
- &img_barrier);
- m_errorMonitor->VerifyFound();
- // Mis-match image barrier dst access mask
- img_barrier.srcAccessMask = VK_ACCESS_COLOR_ATTACHMENT_WRITE_BIT;
- img_barrier.dstAccessMask = VK_ACCESS_HOST_WRITE_BIT;
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdPipelineBarrier-pDependencies-02285");
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdPipelineBarrier-pDependencies-02285");
- vkCmdPipelineBarrier(m_commandBuffer->handle(), VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT,
- VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT, VK_DEPENDENCY_BY_REGION_BIT, 0, nullptr, 0, nullptr, 1,
- &img_barrier);
- m_errorMonitor->VerifyFound();
- // Mis-match dependencyFlags
- img_barrier.dstAccessMask = VK_ACCESS_COLOR_ATTACHMENT_WRITE_BIT;
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdPipelineBarrier-pDependencies-02285");
- vkCmdPipelineBarrier(m_commandBuffer->handle(), VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT,
- VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT, 0 /* wrong */, 0, nullptr, 0, nullptr, 1, &img_barrier);
- m_errorMonitor->VerifyFound();
- // Send non-zero bufferMemoryBarrierCount
- // Construct a valid BufferMemoryBarrier to avoid any parameter errors
- // First we need a valid buffer to reference
- VkBufferObj buffer;
- VkMemoryPropertyFlags mem_reqs = VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT;
- buffer.init_as_src_and_dst(*m_device, 256, mem_reqs);
- VkBufferMemoryBarrier bmb = {};
- bmb.sType = VK_STRUCTURE_TYPE_BUFFER_MEMORY_BARRIER;
- bmb.srcAccessMask = VK_ACCESS_HOST_WRITE_BIT;
- bmb.dstAccessMask = VK_ACCESS_SHADER_READ_BIT;
- bmb.srcQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED;
- bmb.dstQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED;
- bmb.buffer = buffer.handle();
- bmb.offset = 0;
- bmb.size = VK_WHOLE_SIZE;
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdPipelineBarrier-bufferMemoryBarrierCount-01178");
- vkCmdPipelineBarrier(m_commandBuffer->handle(), VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT,
- VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT, VK_DEPENDENCY_BY_REGION_BIT, 0, nullptr, 1, &bmb, 0,
- nullptr);
- m_errorMonitor->VerifyFound();
- // Add image barrier w/ image handle that's not in framebuffer
- VkImageObj lone_image(m_device);
- lone_image.InitNoLayout(32, 32, 1, VK_FORMAT_R8G8B8A8_UNORM, VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT, VK_IMAGE_TILING_OPTIMAL, 0);
- img_barrier.image = lone_image.handle();
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdPipelineBarrier-image-02635");
- vkCmdPipelineBarrier(m_commandBuffer->handle(), VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT,
- VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT, VK_DEPENDENCY_BY_REGION_BIT, 0, nullptr, 0, nullptr, 1,
- &img_barrier);
- m_errorMonitor->VerifyFound();
- // Have image barrier with mis-matched layouts
- img_barrier.image = image.handle();
- img_barrier.oldLayout = VK_IMAGE_LAYOUT_UNDEFINED;
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdPipelineBarrier-oldLayout-01181");
- vkCmdPipelineBarrier(m_commandBuffer->handle(), VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT,
- VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT, VK_DEPENDENCY_BY_REGION_BIT, 0, nullptr, 0, nullptr, 1,
- &img_barrier);
- m_errorMonitor->VerifyFound();
-
- img_barrier.oldLayout = VK_IMAGE_LAYOUT_GENERAL;
- img_barrier.newLayout = VK_IMAGE_LAYOUT_GENERAL;
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdPipelineBarrier-oldLayout-02636");
- vkCmdPipelineBarrier(m_commandBuffer->handle(), VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT,
- VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT, VK_DEPENDENCY_BY_REGION_BIT, 0, nullptr, 0, nullptr, 1,
- &img_barrier);
- m_errorMonitor->VerifyFound();
- vkCmdEndRenderPass(m_commandBuffer->handle());
-
- vkDestroyFramebuffer(m_device->device(), fb, nullptr);
- vkDestroyRenderPass(m_device->device(), rp, nullptr);
- vkDestroyRenderPass(m_device->device(), rp_noselfdep, nullptr);
-}
-
-TEST_F(VkLayerTest, InvalidCmdBufferBufferDestroyed) {
- TEST_DESCRIPTION("Attempt to draw with a command buffer that is invalid due to a buffer dependency being destroyed.");
- ASSERT_NO_FATAL_FAILURE(Init());
-
- VkBuffer buffer;
- VkDeviceMemory mem;
- VkMemoryRequirements mem_reqs;
-
- VkBufferCreateInfo buf_info = {};
- buf_info.sType = VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO;
- buf_info.usage = VK_BUFFER_USAGE_TRANSFER_DST_BIT;
- buf_info.size = 256;
- buf_info.sharingMode = VK_SHARING_MODE_EXCLUSIVE;
- VkResult err = vkCreateBuffer(m_device->device(), &buf_info, NULL, &buffer);
- ASSERT_VK_SUCCESS(err);
-
- vkGetBufferMemoryRequirements(m_device->device(), buffer, &mem_reqs);
-
- VkMemoryAllocateInfo alloc_info = {};
- alloc_info.sType = VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO;
- alloc_info.allocationSize = mem_reqs.size;
- bool pass = false;
- pass = m_device->phy().set_memory_type(mem_reqs.memoryTypeBits, &alloc_info, VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT);
- if (!pass) {
- printf("%s Failed to set memory type.\n", kSkipPrefix);
- vkDestroyBuffer(m_device->device(), buffer, NULL);
- return;
- }
- err = vkAllocateMemory(m_device->device(), &alloc_info, NULL, &mem);
- ASSERT_VK_SUCCESS(err);
-
- err = vkBindBufferMemory(m_device->device(), buffer, mem, 0);
- ASSERT_VK_SUCCESS(err);
-
- m_commandBuffer->begin();
- vkCmdFillBuffer(m_commandBuffer->handle(), buffer, 0, VK_WHOLE_SIZE, 0);
- m_commandBuffer->end();
-
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
- "UNASSIGNED-CoreValidation-DrawState-InvalidCommandBuffer-VkBuffer");
- // Destroy buffer dependency prior to submit to cause ERROR
- vkDestroyBuffer(m_device->device(), buffer, NULL);
-
- VkSubmitInfo submit_info = {};
- submit_info.sType = VK_STRUCTURE_TYPE_SUBMIT_INFO;
- submit_info.commandBufferCount = 1;
- submit_info.pCommandBuffers = &m_commandBuffer->handle();
- vkQueueSubmit(m_device->m_queue, 1, &submit_info, VK_NULL_HANDLE);
-
- m_errorMonitor->VerifyFound();
- vkQueueWaitIdle(m_device->m_queue);
- vkFreeMemory(m_device->handle(), mem, NULL);
-}
-
-TEST_F(VkLayerTest, InvalidCmdBufferBufferViewDestroyed) {
- TEST_DESCRIPTION("Delete bufferView bound to cmd buffer, then attempt to submit cmd buffer.");
-
- ASSERT_NO_FATAL_FAILURE(Init());
- ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
-
- OneOffDescriptorSet descriptor_set(m_device,
- {
- {0, VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER, 1, VK_SHADER_STAGE_FRAGMENT_BIT, nullptr},
- });
- CreatePipelineHelper pipe(*this);
- VkBufferCreateInfo buffer_create_info = {};
- VkBufferViewCreateInfo bvci = {};
- VkBufferView view;
-
- {
- uint32_t queue_family_index = 0;
- buffer_create_info.sType = VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO;
- buffer_create_info.size = 1024;
- buffer_create_info.usage = VK_BUFFER_USAGE_STORAGE_TEXEL_BUFFER_BIT;
- buffer_create_info.queueFamilyIndexCount = 1;
- buffer_create_info.pQueueFamilyIndices = &queue_family_index;
- VkBufferObj buffer;
- buffer.init(*m_device, buffer_create_info);
-
- bvci.sType = VK_STRUCTURE_TYPE_BUFFER_VIEW_CREATE_INFO;
- bvci.buffer = buffer.handle();
- bvci.format = VK_FORMAT_R32_SFLOAT;
- bvci.range = VK_WHOLE_SIZE;
-
- VkResult err = vkCreateBufferView(m_device->device(), &bvci, NULL, &view);
- ASSERT_VK_SUCCESS(err);
-
- descriptor_set.WriteDescriptorBufferView(0, view);
- descriptor_set.UpdateDescriptorSets();
-
- char const *fsSource =
- "#version 450\n"
- "\n"
- "layout(set=0, binding=0, r32f) uniform readonly imageBuffer s;\n"
- "layout(location=0) out vec4 x;\n"
- "void main(){\n"
- " x = imageLoad(s, 0);\n"
- "}\n";
- VkShaderObj vs(m_device, bindStateVertShaderText, VK_SHADER_STAGE_VERTEX_BIT, this);
- VkShaderObj fs(m_device, fsSource, VK_SHADER_STAGE_FRAGMENT_BIT, this);
-
- pipe.InitInfo();
- pipe.InitState();
- pipe.shader_stages_ = {vs.GetStageCreateInfo(), fs.GetStageCreateInfo()};
- pipe.pipeline_layout_ = VkPipelineLayoutObj(m_device, {&descriptor_set.layout_});
- pipe.CreateGraphicsPipeline();
-
- m_commandBuffer->begin();
- m_commandBuffer->BeginRenderPass(m_renderPassBeginInfo);
-
- VkViewport viewport = {0, 0, 16, 16, 0, 1};
- vkCmdSetViewport(m_commandBuffer->handle(), 0, 1, &viewport);
- VkRect2D scissor = {{0, 0}, {16, 16}};
- vkCmdSetScissor(m_commandBuffer->handle(), 0, 1, &scissor);
- // Bind pipeline to cmd buffer - This causes crash on Mali
- vkCmdBindPipeline(m_commandBuffer->handle(), VK_PIPELINE_BIND_POINT_GRAPHICS, pipe.pipeline_);
- vkCmdBindDescriptorSets(m_commandBuffer->handle(), VK_PIPELINE_BIND_POINT_GRAPHICS, pipe.pipeline_layout_.handle(), 0, 1,
- &descriptor_set.set_, 0, nullptr);
- }
- // buffer is released.
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "Descriptor in binding #0 index 0 is using buffer");
- m_commandBuffer->Draw(1, 0, 0, 0);
- m_errorMonitor->VerifyFound();
-
- vkDestroyBufferView(m_device->device(), view, NULL);
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "Descriptor in binding #0 index 0 is using bufferView");
- m_commandBuffer->Draw(1, 0, 0, 0);
- m_errorMonitor->VerifyFound();
-
- VkBufferObj buffer;
- buffer.init(*m_device, buffer_create_info);
-
- bvci.buffer = buffer.handle();
- VkResult err = vkCreateBufferView(m_device->device(), &bvci, NULL, &view);
- ASSERT_VK_SUCCESS(err);
- descriptor_set.descriptor_writes.clear();
- descriptor_set.WriteDescriptorBufferView(0, view);
- descriptor_set.UpdateDescriptorSets();
-
- vkCmdBindDescriptorSets(m_commandBuffer->handle(), VK_PIPELINE_BIND_POINT_GRAPHICS, pipe.pipeline_layout_.handle(), 0, 1,
- &descriptor_set.set_, 0, nullptr);
- m_commandBuffer->Draw(1, 0, 0, 0);
- m_commandBuffer->EndRenderPass();
- m_commandBuffer->end();
-
- // Delete BufferView in order to invalidate cmd buffer
- vkDestroyBufferView(m_device->device(), view, NULL);
- // Now attempt submit of cmd buffer
- VkSubmitInfo submit_info = {};
- submit_info.sType = VK_STRUCTURE_TYPE_SUBMIT_INFO;
- submit_info.commandBufferCount = 1;
- submit_info.pCommandBuffers = &m_commandBuffer->handle();
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
- "UNASSIGNED-CoreValidation-DrawState-InvalidCommandBuffer-VkBufferView");
- vkQueueSubmit(m_device->m_queue, 1, &submit_info, VK_NULL_HANDLE);
- m_errorMonitor->VerifyFound();
-}
-
-TEST_F(VkLayerTest, InvalidCmdBufferImageDestroyed) {
- TEST_DESCRIPTION("Attempt to draw with a command buffer that is invalid due to an image dependency being destroyed.");
- ASSERT_NO_FATAL_FAILURE(Init());
- {
- const VkFormat tex_format = VK_FORMAT_B8G8R8A8_UNORM;
- VkImageCreateInfo image_create_info = {};
- image_create_info.sType = VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO;
- image_create_info.pNext = NULL;
- image_create_info.imageType = VK_IMAGE_TYPE_2D;
- image_create_info.format = tex_format;
- image_create_info.extent.width = 32;
- image_create_info.extent.height = 32;
- image_create_info.extent.depth = 1;
- image_create_info.mipLevels = 1;
- image_create_info.arrayLayers = 1;
- image_create_info.samples = VK_SAMPLE_COUNT_1_BIT;
- image_create_info.tiling = VK_IMAGE_TILING_OPTIMAL;
- image_create_info.usage = VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT | VK_IMAGE_USAGE_TRANSFER_DST_BIT;
- image_create_info.flags = 0;
- VkImageObj image(m_device);
- image.init(&image_create_info);
-
- m_commandBuffer->begin();
- VkClearColorValue ccv;
- ccv.float32[0] = 1.0f;
- ccv.float32[1] = 1.0f;
- ccv.float32[2] = 1.0f;
- ccv.float32[3] = 1.0f;
- VkImageSubresourceRange isr = {};
- isr.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
- isr.baseArrayLayer = 0;
- isr.baseMipLevel = 0;
- isr.layerCount = 1;
- isr.levelCount = 1;
- vkCmdClearColorImage(m_commandBuffer->handle(), image.handle(), VK_IMAGE_LAYOUT_GENERAL, &ccv, 1, &isr);
- m_commandBuffer->end();
- }
- // Destroy image dependency prior to submit to cause ERROR
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
- "UNASSIGNED-CoreValidation-DrawState-InvalidCommandBuffer-VkImage");
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
- "UNASSIGNED-CoreValidation-DrawState-InvalidCommandBuffer-VkDeviceMemory");
-
- VkSubmitInfo submit_info = {};
- submit_info.sType = VK_STRUCTURE_TYPE_SUBMIT_INFO;
- submit_info.commandBufferCount = 1;
- submit_info.pCommandBuffers = &m_commandBuffer->handle();
- vkQueueSubmit(m_device->m_queue, 1, &submit_info, VK_NULL_HANDLE);
-
- m_errorMonitor->VerifyFound();
-}
-
-TEST_F(VkLayerTest, InvalidCmdBufferFramebufferImageDestroyed) {
- TEST_DESCRIPTION(
- "Attempt to draw with a command buffer that is invalid due to a framebuffer image dependency being destroyed.");
- ASSERT_NO_FATAL_FAILURE(Init());
- VkFormatProperties format_properties;
- VkResult err = VK_SUCCESS;
- vkGetPhysicalDeviceFormatProperties(gpu(), VK_FORMAT_B8G8R8A8_UNORM, &format_properties);
- if (!(format_properties.optimalTilingFeatures & VK_FORMAT_FEATURE_SAMPLED_IMAGE_BIT)) {
- printf("%s Image format doesn't support required features.\n", kSkipPrefix);
- return;
- }
- VkFramebuffer fb;
- VkImageView view;
-
- ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
- {
- VkImageCreateInfo image_ci = {};
- image_ci.sType = VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO;
- image_ci.pNext = NULL;
- image_ci.imageType = VK_IMAGE_TYPE_2D;
- image_ci.format = VK_FORMAT_B8G8R8A8_UNORM;
- image_ci.extent.width = 32;
- image_ci.extent.height = 32;
- image_ci.extent.depth = 1;
- image_ci.mipLevels = 1;
- image_ci.arrayLayers = 1;
- image_ci.samples = VK_SAMPLE_COUNT_1_BIT;
- image_ci.tiling = VK_IMAGE_TILING_OPTIMAL;
- image_ci.usage = VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT | VK_IMAGE_USAGE_SAMPLED_BIT;
- image_ci.initialLayout = VK_IMAGE_LAYOUT_UNDEFINED;
- image_ci.flags = 0;
- VkImageObj image(m_device);
- image.init(&image_ci);
-
- VkImageViewCreateInfo ivci = {
- VK_STRUCTURE_TYPE_IMAGE_VIEW_CREATE_INFO,
- nullptr,
- 0,
- image.handle(),
- VK_IMAGE_VIEW_TYPE_2D,
- VK_FORMAT_B8G8R8A8_UNORM,
- {VK_COMPONENT_SWIZZLE_R, VK_COMPONENT_SWIZZLE_G, VK_COMPONENT_SWIZZLE_B, VK_COMPONENT_SWIZZLE_A},
- {VK_IMAGE_ASPECT_COLOR_BIT, 0, 1, 0, 1},
- };
- err = vkCreateImageView(m_device->device(), &ivci, nullptr, &view);
- ASSERT_VK_SUCCESS(err);
-
- VkFramebufferCreateInfo fci = {VK_STRUCTURE_TYPE_FRAMEBUFFER_CREATE_INFO, nullptr, 0, m_renderPass, 1, &view, 32, 32, 1};
- err = vkCreateFramebuffer(m_device->device(), &fci, nullptr, &fb);
- ASSERT_VK_SUCCESS(err);
-
- // Just use default renderpass with our framebuffer
- m_renderPassBeginInfo.framebuffer = fb;
- m_renderPassBeginInfo.renderArea.extent.width = 32;
- m_renderPassBeginInfo.renderArea.extent.height = 32;
- // Create Null cmd buffer for submit
- m_commandBuffer->begin();
- m_commandBuffer->BeginRenderPass(m_renderPassBeginInfo);
- m_commandBuffer->EndRenderPass();
- m_commandBuffer->end();
- }
- // Destroy image attached to framebuffer to invalidate cmd buffer
- // Now attempt to submit cmd buffer and verify error
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
- "UNASSIGNED-CoreValidation-DrawState-InvalidCommandBuffer-VkImage");
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
- "UNASSIGNED-CoreValidation-DrawState-InvalidCommandBuffer-VkDeviceMemory");
- m_commandBuffer->QueueCommandBuffer(false);
- m_errorMonitor->VerifyFound();
-
- vkDestroyFramebuffer(m_device->device(), fb, nullptr);
- vkDestroyImageView(m_device->device(), view, nullptr);
-}
-
-TEST_F(VkLayerTest, ImageMemoryNotBound) {
- TEST_DESCRIPTION("Attempt to draw with an image which has not had memory bound to it.");
- ASSERT_NO_FATAL_FAILURE(Init());
-
- VkImage image;
- const VkFormat tex_format = VK_FORMAT_B8G8R8A8_UNORM;
- VkImageCreateInfo image_create_info = {};
- image_create_info.sType = VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO;
- image_create_info.pNext = NULL;
- image_create_info.imageType = VK_IMAGE_TYPE_2D;
- image_create_info.format = tex_format;
- image_create_info.extent.width = 32;
- image_create_info.extent.height = 32;
- image_create_info.extent.depth = 1;
- image_create_info.mipLevels = 1;
- image_create_info.arrayLayers = 1;
- image_create_info.samples = VK_SAMPLE_COUNT_1_BIT;
- image_create_info.tiling = VK_IMAGE_TILING_OPTIMAL;
- image_create_info.usage = VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT | VK_IMAGE_USAGE_TRANSFER_DST_BIT;
- image_create_info.flags = 0;
- VkResult err = vkCreateImage(m_device->device(), &image_create_info, NULL, &image);
- ASSERT_VK_SUCCESS(err);
- // Have to bind memory to image before recording cmd in cmd buffer using it
- VkMemoryRequirements mem_reqs;
- VkDeviceMemory image_mem;
- bool pass;
- VkMemoryAllocateInfo mem_alloc = {};
- mem_alloc.sType = VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO;
- mem_alloc.pNext = NULL;
- mem_alloc.memoryTypeIndex = 0;
- vkGetImageMemoryRequirements(m_device->device(), image, &mem_reqs);
- mem_alloc.allocationSize = mem_reqs.size;
- pass = m_device->phy().set_memory_type(mem_reqs.memoryTypeBits, &mem_alloc, 0);
- ASSERT_TRUE(pass);
- err = vkAllocateMemory(m_device->device(), &mem_alloc, NULL, &image_mem);
- ASSERT_VK_SUCCESS(err);
-
- // Introduce error, do not call vkBindImageMemory(m_device->device(), image, image_mem, 0);
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
- " used with no memory bound. Memory should be bound by calling vkBindImageMemory().");
-
- m_commandBuffer->begin();
- VkClearColorValue ccv;
- ccv.float32[0] = 1.0f;
- ccv.float32[1] = 1.0f;
- ccv.float32[2] = 1.0f;
- ccv.float32[3] = 1.0f;
- VkImageSubresourceRange isr = {};
- isr.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
- isr.baseArrayLayer = 0;
- isr.baseMipLevel = 0;
- isr.layerCount = 1;
- isr.levelCount = 1;
- vkCmdClearColorImage(m_commandBuffer->handle(), image, VK_IMAGE_LAYOUT_GENERAL, &ccv, 1, &isr);
- m_commandBuffer->end();
-
- m_errorMonitor->VerifyFound();
- vkDestroyImage(m_device->device(), image, NULL);
- vkFreeMemory(m_device->device(), image_mem, nullptr);
-}
-
-TEST_F(VkLayerTest, BufferMemoryNotBound) {
- TEST_DESCRIPTION("Attempt to copy from a buffer which has not had memory bound to it.");
- ASSERT_NO_FATAL_FAILURE(Init());
-
- VkImageObj image(m_device);
- image.Init(128, 128, 1, VK_FORMAT_B8G8R8A8_UNORM, VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT | VK_IMAGE_USAGE_TRANSFER_DST_BIT,
- VK_IMAGE_TILING_OPTIMAL, 0);
- ASSERT_TRUE(image.initialized());
-
- VkBuffer buffer;
- VkDeviceMemory mem;
- VkMemoryRequirements mem_reqs;
-
- VkBufferCreateInfo buf_info = {};
- buf_info.sType = VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO;
- buf_info.usage = VK_BUFFER_USAGE_TRANSFER_SRC_BIT;
- buf_info.size = 1024;
- buf_info.sharingMode = VK_SHARING_MODE_EXCLUSIVE;
- VkResult err = vkCreateBuffer(m_device->device(), &buf_info, NULL, &buffer);
- ASSERT_VK_SUCCESS(err);
-
- vkGetBufferMemoryRequirements(m_device->device(), buffer, &mem_reqs);
-
- VkMemoryAllocateInfo alloc_info = {};
- alloc_info.sType = VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO;
- alloc_info.allocationSize = 1024;
- bool pass = false;
- pass = m_device->phy().set_memory_type(mem_reqs.memoryTypeBits, &alloc_info, VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT);
- if (!pass) {
- printf("%s Failed to set memory type.\n", kSkipPrefix);
- vkDestroyBuffer(m_device->device(), buffer, NULL);
- return;
- }
- err = vkAllocateMemory(m_device->device(), &alloc_info, NULL, &mem);
- ASSERT_VK_SUCCESS(err);
-
- // Introduce failure by not calling vkBindBufferMemory(m_device->device(), buffer, mem, 0);
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
- " used with no memory bound. Memory should be bound by calling vkBindBufferMemory().");
- VkBufferImageCopy region = {};
- region.bufferRowLength = 16;
- region.bufferImageHeight = 16;
- region.imageSubresource.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
-
- region.imageSubresource.layerCount = 1;
- region.imageExtent.height = 4;
- region.imageExtent.width = 4;
- region.imageExtent.depth = 1;
- m_commandBuffer->begin();
- vkCmdCopyBufferToImage(m_commandBuffer->handle(), buffer, image.handle(), VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, 1, &region);
- m_commandBuffer->end();
-
- m_errorMonitor->VerifyFound();
-
- vkDestroyBuffer(m_device->device(), buffer, NULL);
- vkFreeMemory(m_device->handle(), mem, NULL);
-}
-
-TEST_F(VkLayerTest, MultiplaneImageLayoutBadAspectFlags) {
- TEST_DESCRIPTION("Query layout of a multiplane image using illegal aspect flag masks");
-
- // Enable KHR multiplane req'd extensions
- bool mp_extensions = InstanceExtensionSupported(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME,
- VK_KHR_GET_MEMORY_REQUIREMENTS_2_SPEC_VERSION);
- if (mp_extensions) {
- m_instance_extension_names.push_back(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME);
- }
- ASSERT_NO_FATAL_FAILURE(InitFramework(myDbgFunc, m_errorMonitor));
- mp_extensions = mp_extensions && DeviceExtensionSupported(gpu(), nullptr, VK_KHR_MAINTENANCE1_EXTENSION_NAME);
- mp_extensions = mp_extensions && DeviceExtensionSupported(gpu(), nullptr, VK_KHR_GET_MEMORY_REQUIREMENTS_2_EXTENSION_NAME);
- mp_extensions = mp_extensions && DeviceExtensionSupported(gpu(), nullptr, VK_KHR_BIND_MEMORY_2_EXTENSION_NAME);
- mp_extensions = mp_extensions && DeviceExtensionSupported(gpu(), nullptr, VK_KHR_SAMPLER_YCBCR_CONVERSION_EXTENSION_NAME);
- if (mp_extensions) {
- m_device_extension_names.push_back(VK_KHR_MAINTENANCE1_EXTENSION_NAME);
- m_device_extension_names.push_back(VK_KHR_GET_MEMORY_REQUIREMENTS_2_EXTENSION_NAME);
- m_device_extension_names.push_back(VK_KHR_BIND_MEMORY_2_EXTENSION_NAME);
- m_device_extension_names.push_back(VK_KHR_SAMPLER_YCBCR_CONVERSION_EXTENSION_NAME);
- } else {
- printf("%s test requires KHR multiplane extensions, not available. Skipping.\n", kSkipPrefix);
- return;
- }
- ASSERT_NO_FATAL_FAILURE(InitState());
-
- VkImageCreateInfo ci = {};
- ci.sType = VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO;
- ci.pNext = NULL;
- ci.flags = 0;
- ci.imageType = VK_IMAGE_TYPE_2D;
- ci.format = VK_FORMAT_G8_B8R8_2PLANE_420_UNORM_KHR;
- ci.extent = {128, 128, 1};
- ci.mipLevels = 1;
- ci.arrayLayers = 1;
- ci.samples = VK_SAMPLE_COUNT_1_BIT;
- ci.tiling = VK_IMAGE_TILING_LINEAR;
- ci.usage = VK_IMAGE_USAGE_TRANSFER_SRC_BIT;
- ci.sharingMode = VK_SHARING_MODE_EXCLUSIVE;
- ci.initialLayout = VK_IMAGE_LAYOUT_UNDEFINED;
-
- // Verify formats
- bool supported = ImageFormatAndFeaturesSupported(instance(), gpu(), ci, VK_FORMAT_FEATURE_TRANSFER_SRC_BIT);
- ci.format = VK_FORMAT_G8_B8_R8_3PLANE_420_UNORM_KHR;
- supported = supported && ImageFormatAndFeaturesSupported(instance(), gpu(), ci, VK_FORMAT_FEATURE_TRANSFER_SRC_BIT);
- if (!supported) {
- printf("%s Multiplane image format not supported. Skipping test.\n", kSkipPrefix);
- return; // Assume there's low ROI on searching for different mp formats
- }
-
- VkImage image_2plane, image_3plane;
- ci.format = VK_FORMAT_G8_B8R8_2PLANE_420_UNORM_KHR;
- VkResult err = vkCreateImage(device(), &ci, NULL, &image_2plane);
- ASSERT_VK_SUCCESS(err);
-
- ci.format = VK_FORMAT_G8_B8_R8_3PLANE_420_UNORM_KHR;
- err = vkCreateImage(device(), &ci, NULL, &image_3plane);
- ASSERT_VK_SUCCESS(err);
-
- // Query layout of 3rd plane, for a 2-plane image
- VkImageSubresource subres = {};
- subres.aspectMask = VK_IMAGE_ASPECT_PLANE_2_BIT_KHR;
- subres.mipLevel = 0;
- subres.arrayLayer = 0;
- VkSubresourceLayout layout = {};
-
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkGetImageSubresourceLayout-format-01581");
- vkGetImageSubresourceLayout(device(), image_2plane, &subres, &layout);
- m_errorMonitor->VerifyFound();
-
- // Query layout using color aspect, for a 3-plane image
- subres.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkGetImageSubresourceLayout-format-01582");
- vkGetImageSubresourceLayout(device(), image_3plane, &subres, &layout);
- m_errorMonitor->VerifyFound();
-
- // Clean up
- vkDestroyImage(device(), image_2plane, NULL);
- vkDestroyImage(device(), image_3plane, NULL);
-}
-
-TEST_F(VkLayerTest, InvalidBufferViewObject) {
- // Create a single TEXEL_BUFFER descriptor and send it an invalid bufferView
- // First, cause the bufferView to be invalid due to underlying buffer being destroyed
- // Then destroy view itself and verify that same error is hit
- VkResult err;
-
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkWriteDescriptorSet-descriptorType-00323");
- ASSERT_NO_FATAL_FAILURE(Init());
-
- OneOffDescriptorSet descriptor_set(m_device, {
- {0, VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER, 1, VK_SHADER_STAGE_ALL, nullptr},
- });
- VkBufferView view;
- {
- // Create a valid bufferView to start with
- uint32_t queue_family_index = 0;
- VkBufferCreateInfo buffer_create_info = {};
- buffer_create_info.sType = VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO;
- buffer_create_info.size = 1024;
- buffer_create_info.usage = VK_BUFFER_USAGE_STORAGE_TEXEL_BUFFER_BIT;
- buffer_create_info.queueFamilyIndexCount = 1;
- buffer_create_info.pQueueFamilyIndices = &queue_family_index;
- VkBufferObj buffer;
- buffer.init(*m_device, buffer_create_info);
-
- VkBufferViewCreateInfo bvci = {};
- bvci.sType = VK_STRUCTURE_TYPE_BUFFER_VIEW_CREATE_INFO;
- bvci.buffer = buffer.handle();
- bvci.format = VK_FORMAT_R32_SFLOAT;
- bvci.range = VK_WHOLE_SIZE;
-
- err = vkCreateBufferView(m_device->device(), &bvci, NULL, &view);
- ASSERT_VK_SUCCESS(err);
- }
- // First Destroy buffer underlying view which should hit error in CV
-
- VkWriteDescriptorSet descriptor_write;
- memset(&descriptor_write, 0, sizeof(descriptor_write));
- descriptor_write.sType = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET;
- descriptor_write.dstSet = descriptor_set.set_;
- descriptor_write.dstBinding = 0;
- descriptor_write.descriptorCount = 1;
- descriptor_write.descriptorType = VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER;
- descriptor_write.pTexelBufferView = &view;
-
- vkUpdateDescriptorSets(m_device->device(), 1, &descriptor_write, 0, NULL);
- m_errorMonitor->VerifyFound();
-
- // Now destroy view itself and verify same error, which is hit in PV this time
- vkDestroyBufferView(m_device->device(), view, NULL);
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkWriteDescriptorSet-descriptorType-00323");
- vkUpdateDescriptorSets(m_device->device(), 1, &descriptor_write, 0, NULL);
- m_errorMonitor->VerifyFound();
-}
-
-TEST_F(VkLayerTest, CreateBufferViewNoMemoryBoundToBuffer) {
- TEST_DESCRIPTION("Attempt to create a buffer view with a buffer that has no memory bound to it.");
-
- VkResult err;
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
- " used with no memory bound. Memory should be bound by calling vkBindBufferMemory().");
-
- ASSERT_NO_FATAL_FAILURE(Init());
-
- // Create a buffer with no bound memory and then attempt to create
- // a buffer view.
- VkBufferCreateInfo buff_ci = {};
- buff_ci.sType = VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO;
- buff_ci.usage = VK_BUFFER_USAGE_UNIFORM_TEXEL_BUFFER_BIT;
- buff_ci.size = 256;
- buff_ci.sharingMode = VK_SHARING_MODE_EXCLUSIVE;
- VkBuffer buffer;
- err = vkCreateBuffer(m_device->device(), &buff_ci, NULL, &buffer);
- ASSERT_VK_SUCCESS(err);
-
- VkBufferViewCreateInfo buff_view_ci = {};
- buff_view_ci.sType = VK_STRUCTURE_TYPE_BUFFER_VIEW_CREATE_INFO;
- buff_view_ci.buffer = buffer;
- buff_view_ci.format = VK_FORMAT_R8_UNORM;
- buff_view_ci.range = VK_WHOLE_SIZE;
- VkBufferView buff_view;
- err = vkCreateBufferView(m_device->device(), &buff_view_ci, NULL, &buff_view);
-
- m_errorMonitor->VerifyFound();
- vkDestroyBuffer(m_device->device(), buffer, NULL);
- // If last error is success, it still created the view, so delete it.
- if (err == VK_SUCCESS) {
- vkDestroyBufferView(m_device->device(), buff_view, NULL);
- }
-}
-
-TEST_F(VkLayerTest, InvalidBufferViewCreateInfoEntries) {
- TEST_DESCRIPTION("Attempt to create a buffer view with invalid create info.");
-
- ASSERT_NO_FATAL_FAILURE(Init());
-
- const VkPhysicalDeviceLimits &dev_limits = m_device->props.limits;
- const VkDeviceSize minTexelBufferOffsetAlignment = dev_limits.minTexelBufferOffsetAlignment;
- if (minTexelBufferOffsetAlignment == 1) {
- printf("%s Test requires minTexelOffsetAlignment to not be equal to 1. \n", kSkipPrefix);
- return;
- }
-
- const VkFormat format_with_uniform_texel_support = VK_FORMAT_R8G8B8A8_UNORM;
- const char *format_with_uniform_texel_support_string = "VK_FORMAT_R8G8B8A8_UNORM";
- const VkFormat format_without_texel_support = VK_FORMAT_R8G8B8_UNORM;
- const char *format_without_texel_support_string = "VK_FORMAT_R8G8B8_UNORM";
- VkFormatProperties format_properties;
- vkGetPhysicalDeviceFormatProperties(gpu(), format_with_uniform_texel_support, &format_properties);
- if (!(format_properties.bufferFeatures & VK_FORMAT_FEATURE_UNIFORM_TEXEL_BUFFER_BIT)) {
- printf("%s Test requires %s to support VK_BUFFER_USAGE_UNIFORM_TEXEL_BUFFER_BIT\n", kSkipPrefix,
- format_with_uniform_texel_support_string);
- return;
- }
- vkGetPhysicalDeviceFormatProperties(gpu(), format_without_texel_support, &format_properties);
- if ((format_properties.bufferFeatures & VK_FORMAT_FEATURE_STORAGE_TEXEL_BUFFER_BIT) ||
- (format_properties.bufferFeatures & VK_FORMAT_FEATURE_UNIFORM_TEXEL_BUFFER_BIT)) {
- printf(
- "%s Test requires %s to not support VK_BUFFER_USAGE_UNIFORM_TEXEL_BUFFER_BIT nor "
- "VK_BUFFER_USAGE_STORAGE_TEXEL_BUFFER_BIT\n",
- kSkipPrefix, format_without_texel_support_string);
- return;
- }
-
- // Create a test buffer--buffer must have been created using VK_BUFFER_USAGE_UNIFORM_TEXEL_BUFFER_BIT or
- // VK_BUFFER_USAGE_STORAGE_TEXEL_BUFFER_BIT, so use a different usage value instead to cause an error
- const VkDeviceSize resource_size = 1024;
- const VkBufferCreateInfo bad_buffer_info = VkBufferObj::create_info(resource_size, VK_BUFFER_USAGE_INDEX_BUFFER_BIT);
- VkBufferObj bad_buffer;
- bad_buffer.init(*m_device, bad_buffer_info, (VkMemoryPropertyFlags)VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT);
-
- // Create a test buffer view
- VkBufferViewCreateInfo buff_view_ci = {};
- buff_view_ci.sType = VK_STRUCTURE_TYPE_BUFFER_VIEW_CREATE_INFO;
- buff_view_ci.buffer = bad_buffer.handle();
- buff_view_ci.format = format_with_uniform_texel_support;
- buff_view_ci.range = VK_WHOLE_SIZE;
- CreateBufferViewTest(*this, &buff_view_ci, {"VUID-VkBufferViewCreateInfo-buffer-00932"});
-
- // Create a better test buffer
- const VkBufferCreateInfo buffer_info = VkBufferObj::create_info(resource_size, VK_BUFFER_USAGE_UNIFORM_TEXEL_BUFFER_BIT);
- VkBufferObj buffer;
- buffer.init(*m_device, buffer_info, (VkMemoryPropertyFlags)VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT);
-
- // Offset must be less than the size of the buffer, so set it equal to the buffer size to cause an error
- buff_view_ci.buffer = buffer.handle();
- buff_view_ci.offset = buffer.create_info().size;
- CreateBufferViewTest(*this, &buff_view_ci, {"VUID-VkBufferViewCreateInfo-offset-00925"});
-
- // Offset must be a multiple of VkPhysicalDeviceLimits::minTexelBufferOffsetAlignment so add 1 to ensure it is not
- buff_view_ci.offset = minTexelBufferOffsetAlignment + 1;
- CreateBufferViewTest(*this, &buff_view_ci, {"VUID-VkBufferViewCreateInfo-offset-02749"});
-
- // Set offset to acceptable value for range tests
- buff_view_ci.offset = minTexelBufferOffsetAlignment;
- // Setting range equal to 0 will cause an error to occur
- buff_view_ci.range = 0;
- CreateBufferViewTest(*this, &buff_view_ci, {"VUID-VkBufferViewCreateInfo-range-00928"});
-
- uint32_t format_size = FormatElementSize(buff_view_ci.format);
- // Range must be a multiple of the element size of format, so add one to ensure it is not
- buff_view_ci.range = format_size + 1;
- CreateBufferViewTest(*this, &buff_view_ci, {"VUID-VkBufferViewCreateInfo-range-00929"});
-
- // Twice the element size of format multiplied by VkPhysicalDeviceLimits::maxTexelBufferElements guarantees range divided by the
- // element size is greater than maxTexelBufferElements, causing failure
- buff_view_ci.range = 2 * format_size * dev_limits.maxTexelBufferElements;
- CreateBufferViewTest(*this, &buff_view_ci,
- {"VUID-VkBufferViewCreateInfo-range-00930", "VUID-VkBufferViewCreateInfo-offset-00931"});
-
- // Set rage to acceptable value for buffer tests
- buff_view_ci.format = format_without_texel_support;
- buff_view_ci.range = VK_WHOLE_SIZE;
-
- // `buffer` was created using VK_BUFFER_USAGE_UNIFORM_TEXEL_BUFFER_BIT so we can use that for the first buffer test
- CreateBufferViewTest(*this, &buff_view_ci, {"VUID-VkBufferViewCreateInfo-buffer-00933"});
-
- // Create a new buffer using VK_BUFFER_USAGE_STORAGE_TEXEL_BUFFER_BIT
- const VkBufferCreateInfo storage_buffer_info =
- VkBufferObj::create_info(resource_size, VK_BUFFER_USAGE_STORAGE_TEXEL_BUFFER_BIT);
- VkBufferObj storage_buffer;
- storage_buffer.init(*m_device, storage_buffer_info, (VkMemoryPropertyFlags)VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT);
-
- buff_view_ci.buffer = storage_buffer.handle();
- CreateBufferViewTest(*this, &buff_view_ci, {"VUID-VkBufferViewCreateInfo-buffer-00934"});
-}
-
-TEST_F(VkLayerTest, InvalidTexelBufferAlignment) {
- TEST_DESCRIPTION("Test VK_EXT_texel_buffer_alignment.");
-
- if (InstanceExtensionSupported(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME)) {
- m_instance_extension_names.push_back(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME);
- } else {
- printf("%s Did not find required instance extension %s; skipped.\n", kSkipPrefix,
- VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME);
- return;
- }
-
- ASSERT_NO_FATAL_FAILURE(InitFramework(myDbgFunc, m_errorMonitor));
- std::array<const char *, 1> required_device_extensions = {{VK_EXT_TEXEL_BUFFER_ALIGNMENT_EXTENSION_NAME}};
- for (auto device_extension : required_device_extensions) {
- if (DeviceExtensionSupported(gpu(), nullptr, device_extension)) {
- m_device_extension_names.push_back(device_extension);
- } else {
- printf("%s %s Extension not supported, skipping tests\n", kSkipPrefix, device_extension);
- return;
- }
- }
-
- if (DeviceIsMockICD() || DeviceSimulation()) {
- printf("%s MockICD does not support this feature, skipping tests\n", kSkipPrefix);
- return;
- }
-
- PFN_vkGetPhysicalDeviceFeatures2KHR vkGetPhysicalDeviceFeatures2KHR =
- (PFN_vkGetPhysicalDeviceFeatures2KHR)vkGetInstanceProcAddr(instance(), "vkGetPhysicalDeviceFeatures2KHR");
- ASSERT_TRUE(vkGetPhysicalDeviceFeatures2KHR != nullptr);
-
- // Create a device that enables texel_buffer_alignment
- auto texel_buffer_alignment_features = lvl_init_struct<VkPhysicalDeviceTexelBufferAlignmentFeaturesEXT>();
- auto features2 = lvl_init_struct<VkPhysicalDeviceFeatures2KHR>(&texel_buffer_alignment_features);
- vkGetPhysicalDeviceFeatures2KHR(gpu(), &features2);
- texel_buffer_alignment_features.texelBufferAlignment = VK_TRUE;
-
- VkPhysicalDeviceTexelBufferAlignmentPropertiesEXT align_props = {};
- align_props.sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TEXEL_BUFFER_ALIGNMENT_PROPERTIES_EXT;
- VkPhysicalDeviceProperties2 pd_props2 = {};
- pd_props2.sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PROPERTIES_2;
- pd_props2.pNext = &align_props;
- vkGetPhysicalDeviceProperties2(gpu(), &pd_props2);
-
- ASSERT_NO_FATAL_FAILURE(InitState(nullptr, &features2));
- ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
-
- const VkFormat format_with_uniform_texel_support = VK_FORMAT_R8G8B8A8_UNORM;
-
- const VkDeviceSize resource_size = 1024;
- VkBufferCreateInfo buffer_info = VkBufferObj::create_info(
- resource_size, VK_BUFFER_USAGE_UNIFORM_TEXEL_BUFFER_BIT | VK_BUFFER_USAGE_STORAGE_TEXEL_BUFFER_BIT);
- VkBufferObj buffer;
- buffer.init(*m_device, buffer_info, (VkMemoryPropertyFlags)VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT);
-
- // Create a test buffer view
- VkBufferViewCreateInfo buff_view_ci = {};
- buff_view_ci.sType = VK_STRUCTURE_TYPE_BUFFER_VIEW_CREATE_INFO;
- buff_view_ci.buffer = buffer.handle();
- buff_view_ci.format = format_with_uniform_texel_support;
- buff_view_ci.range = VK_WHOLE_SIZE;
-
- buff_view_ci.offset = 1;
- std::vector<std::string> expectedErrors;
- if (buff_view_ci.offset < align_props.storageTexelBufferOffsetAlignmentBytes) {
- expectedErrors.push_back("VUID-VkBufferViewCreateInfo-buffer-02750");
- }
- if (buff_view_ci.offset < align_props.uniformTexelBufferOffsetAlignmentBytes) {
- expectedErrors.push_back("VUID-VkBufferViewCreateInfo-buffer-02751");
- }
- CreateBufferViewTest(*this, &buff_view_ci, expectedErrors);
- expectedErrors.clear();
-
- buff_view_ci.offset = 4;
- if (buff_view_ci.offset < align_props.storageTexelBufferOffsetAlignmentBytes &&
- !align_props.storageTexelBufferOffsetSingleTexelAlignment) {
- expectedErrors.push_back("VUID-VkBufferViewCreateInfo-buffer-02750");
- }
- if (buff_view_ci.offset < align_props.uniformTexelBufferOffsetAlignmentBytes &&
- !align_props.uniformTexelBufferOffsetSingleTexelAlignment) {
- expectedErrors.push_back("VUID-VkBufferViewCreateInfo-buffer-02751");
- }
- CreateBufferViewTest(*this, &buff_view_ci, expectedErrors);
- expectedErrors.clear();
-
- // Test a 3-component format
- VkFormatProperties format_properties;
- vkGetPhysicalDeviceFormatProperties(gpu(), VK_FORMAT_R32G32B32_SFLOAT, &format_properties);
- if (format_properties.bufferFeatures & VK_FORMAT_FEATURE_UNIFORM_TEXEL_BUFFER_BIT) {
- buffer_info.usage = VK_BUFFER_USAGE_UNIFORM_TEXEL_BUFFER_BIT;
- VkBufferObj buffer2;
- buffer2.init(*m_device, buffer_info, (VkMemoryPropertyFlags)VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT);
-
- // Create a test buffer view
- buff_view_ci.buffer = buffer2.handle();
-
- buff_view_ci.format = VK_FORMAT_R32G32B32_SFLOAT;
- buff_view_ci.offset = 1;
- if (buff_view_ci.offset < align_props.uniformTexelBufferOffsetAlignmentBytes) {
- expectedErrors.push_back("VUID-VkBufferViewCreateInfo-buffer-02751");
- }
- CreateBufferViewTest(*this, &buff_view_ci, expectedErrors);
- expectedErrors.clear();
-
- buff_view_ci.offset = 4;
- if (buff_view_ci.offset < align_props.uniformTexelBufferOffsetAlignmentBytes &&
- !align_props.uniformTexelBufferOffsetSingleTexelAlignment) {
- expectedErrors.push_back("VUID-VkBufferViewCreateInfo-buffer-02751");
- }
- CreateBufferViewTest(*this, &buff_view_ci, expectedErrors);
- expectedErrors.clear();
- }
-}
-
-TEST_F(VkLayerTest, FillBufferWithinRenderPass) {
- // Call CmdFillBuffer within an active renderpass
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdFillBuffer-renderpass");
-
- ASSERT_NO_FATAL_FAILURE(Init());
- ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
-
- m_commandBuffer->begin();
- m_commandBuffer->BeginRenderPass(m_renderPassBeginInfo);
-
- VkMemoryPropertyFlags reqs = VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT;
- VkBufferObj dstBuffer;
- dstBuffer.init_as_dst(*m_device, (VkDeviceSize)1024, reqs);
-
- m_commandBuffer->FillBuffer(dstBuffer.handle(), 0, 4, 0x11111111);
-
- m_errorMonitor->VerifyFound();
-
- m_commandBuffer->EndRenderPass();
- m_commandBuffer->end();
-}
-
-TEST_F(VkLayerTest, UpdateBufferWithinRenderPass) {
- // Call CmdUpdateBuffer within an active renderpass
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdUpdateBuffer-renderpass");
-
- ASSERT_NO_FATAL_FAILURE(Init());
- ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
-
- m_commandBuffer->begin();
- m_commandBuffer->BeginRenderPass(m_renderPassBeginInfo);
-
- VkMemoryPropertyFlags reqs = VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT;
- VkBufferObj dstBuffer;
- dstBuffer.init_as_dst(*m_device, (VkDeviceSize)1024, reqs);
-
- VkDeviceSize dstOffset = 0;
- uint32_t Data[] = {1, 2, 3, 4, 5, 6, 7, 8};
- VkDeviceSize dataSize = sizeof(Data) / sizeof(uint32_t);
- vkCmdUpdateBuffer(m_commandBuffer->handle(), dstBuffer.handle(), dstOffset, dataSize, &Data);
-
- m_errorMonitor->VerifyFound();
-
- m_commandBuffer->EndRenderPass();
- m_commandBuffer->end();
-}
-
-TEST_F(VkLayerTest, ClearColorImageWithBadRange) {
- TEST_DESCRIPTION("Record clear color with an invalid VkImageSubresourceRange");
-
- ASSERT_NO_FATAL_FAILURE(Init());
- ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
-
- VkImageObj image(m_device);
- image.Init(32, 32, 1, VK_FORMAT_B8G8R8A8_UNORM, VK_IMAGE_USAGE_TRANSFER_DST_BIT, VK_IMAGE_TILING_OPTIMAL);
- ASSERT_TRUE(image.create_info().arrayLayers == 1);
- ASSERT_TRUE(image.initialized());
- image.SetLayout(VK_IMAGE_ASPECT_COLOR_BIT, VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL);
-
- const VkClearColorValue clear_color = {{0.0f, 0.0f, 0.0f, 1.0f}};
-
- m_commandBuffer->begin();
- const auto cb_handle = m_commandBuffer->handle();
-
- // Try baseMipLevel >= image.mipLevels with VK_REMAINING_MIP_LEVELS
- {
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdClearColorImage-baseMipLevel-01470");
- const VkImageSubresourceRange range = {VK_IMAGE_ASPECT_COLOR_BIT, 1, VK_REMAINING_MIP_LEVELS, 0, 1};
- vkCmdClearColorImage(cb_handle, image.handle(), image.Layout(), &clear_color, 1, &range);
- m_errorMonitor->VerifyFound();
- }
-
- // Try baseMipLevel >= image.mipLevels without VK_REMAINING_MIP_LEVELS
- {
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdClearColorImage-baseMipLevel-01470");
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdClearColorImage-pRanges-01692");
- const VkImageSubresourceRange range = {VK_IMAGE_ASPECT_COLOR_BIT, 1, 1, 0, 1};
- vkCmdClearColorImage(cb_handle, image.handle(), image.Layout(), &clear_color, 1, &range);
- m_errorMonitor->VerifyFound();
- }
-
- // Try levelCount = 0
- {
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdClearColorImage-pRanges-01692");
- const VkImageSubresourceRange range = {VK_IMAGE_ASPECT_COLOR_BIT, 0, 0, 0, 1};
- vkCmdClearColorImage(cb_handle, image.handle(), image.Layout(), &clear_color, 1, &range);
- m_errorMonitor->VerifyFound();
- }
-
- // Try baseMipLevel + levelCount > image.mipLevels
- {
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdClearColorImage-pRanges-01692");
- const VkImageSubresourceRange range = {VK_IMAGE_ASPECT_COLOR_BIT, 0, 2, 0, 1};
- vkCmdClearColorImage(cb_handle, image.handle(), image.Layout(), &clear_color, 1, &range);
- m_errorMonitor->VerifyFound();
- }
-
- // Try baseArrayLayer >= image.arrayLayers with VK_REMAINING_ARRAY_LAYERS
- {
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdClearColorImage-baseArrayLayer-01472");
- const VkImageSubresourceRange range = {VK_IMAGE_ASPECT_COLOR_BIT, 0, 1, 1, VK_REMAINING_ARRAY_LAYERS};
- vkCmdClearColorImage(cb_handle, image.handle(), image.Layout(), &clear_color, 1, &range);
- m_errorMonitor->VerifyFound();
- }
-
- // Try baseArrayLayer >= image.arrayLayers without VK_REMAINING_ARRAY_LAYERS
- {
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdClearColorImage-baseArrayLayer-01472");
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdClearColorImage-pRanges-01693");
- const VkImageSubresourceRange range = {VK_IMAGE_ASPECT_COLOR_BIT, 0, 1, 1, 1};
- vkCmdClearColorImage(cb_handle, image.handle(), image.Layout(), &clear_color, 1, &range);
- m_errorMonitor->VerifyFound();
- }
-
- // Try layerCount = 0
- {
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdClearColorImage-pRanges-01693");
- const VkImageSubresourceRange range = {VK_IMAGE_ASPECT_COLOR_BIT, 0, 1, 0, 0};
- vkCmdClearColorImage(cb_handle, image.handle(), image.Layout(), &clear_color, 1, &range);
- m_errorMonitor->VerifyFound();
- }
-
- // Try baseArrayLayer + layerCount > image.arrayLayers
- {
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdClearColorImage-pRanges-01693");
- const VkImageSubresourceRange range = {VK_IMAGE_ASPECT_COLOR_BIT, 0, 1, 0, 2};
- vkCmdClearColorImage(cb_handle, image.handle(), image.Layout(), &clear_color, 1, &range);
- m_errorMonitor->VerifyFound();
- }
-}
-
-TEST_F(VkLayerTest, ClearDepthStencilWithBadRange) {
- TEST_DESCRIPTION("Record clear depth with an invalid VkImageSubresourceRange");
-
- ASSERT_NO_FATAL_FAILURE(Init());
- ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
-
- const auto depth_format = FindSupportedDepthStencilFormat(gpu());
- if (!depth_format) {
- printf("%s No Depth + Stencil format found. Skipped.\n", kSkipPrefix);
- return;
- }
-
- VkImageObj image(m_device);
- image.Init(32, 32, 1, depth_format, VK_IMAGE_USAGE_TRANSFER_DST_BIT, VK_IMAGE_TILING_OPTIMAL);
- ASSERT_TRUE(image.create_info().arrayLayers == 1);
- ASSERT_TRUE(image.initialized());
- const VkImageAspectFlags ds_aspect = VK_IMAGE_ASPECT_DEPTH_BIT | VK_IMAGE_ASPECT_STENCIL_BIT;
- image.SetLayout(ds_aspect, VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL);
-
- const VkClearDepthStencilValue clear_value = {};
-
- m_commandBuffer->begin();
- const auto cb_handle = m_commandBuffer->handle();
-
- // Try baseMipLevel >= image.mipLevels with VK_REMAINING_MIP_LEVELS
- {
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdClearDepthStencilImage-baseMipLevel-01474");
- const VkImageSubresourceRange range = {ds_aspect, 1, VK_REMAINING_MIP_LEVELS, 0, 1};
- vkCmdClearDepthStencilImage(cb_handle, image.handle(), image.Layout(), &clear_value, 1, &range);
- m_errorMonitor->VerifyFound();
- }
-
- // Try baseMipLevel >= image.mipLevels without VK_REMAINING_MIP_LEVELS
- {
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdClearDepthStencilImage-baseMipLevel-01474");
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdClearDepthStencilImage-pRanges-01694");
- const VkImageSubresourceRange range = {ds_aspect, 1, 1, 0, 1};
- vkCmdClearDepthStencilImage(cb_handle, image.handle(), image.Layout(), &clear_value, 1, &range);
- m_errorMonitor->VerifyFound();
- }
-
- // Try levelCount = 0
- {
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdClearDepthStencilImage-pRanges-01694");
- const VkImageSubresourceRange range = {ds_aspect, 0, 0, 0, 1};
- vkCmdClearDepthStencilImage(cb_handle, image.handle(), image.Layout(), &clear_value, 1, &range);
- m_errorMonitor->VerifyFound();
- }
-
- // Try baseMipLevel + levelCount > image.mipLevels
- {
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdClearDepthStencilImage-pRanges-01694");
- const VkImageSubresourceRange range = {ds_aspect, 0, 2, 0, 1};
- vkCmdClearDepthStencilImage(cb_handle, image.handle(), image.Layout(), &clear_value, 1, &range);
- m_errorMonitor->VerifyFound();
- }
-
- // Try baseArrayLayer >= image.arrayLayers with VK_REMAINING_ARRAY_LAYERS
- {
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
- "VUID-vkCmdClearDepthStencilImage-baseArrayLayer-01476");
- const VkImageSubresourceRange range = {ds_aspect, 0, 1, 1, VK_REMAINING_ARRAY_LAYERS};
- vkCmdClearDepthStencilImage(cb_handle, image.handle(), image.Layout(), &clear_value, 1, &range);
- m_errorMonitor->VerifyFound();
- }
-
- // Try baseArrayLayer >= image.arrayLayers without VK_REMAINING_ARRAY_LAYERS
- {
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
- "VUID-vkCmdClearDepthStencilImage-baseArrayLayer-01476");
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdClearDepthStencilImage-pRanges-01695");
- const VkImageSubresourceRange range = {ds_aspect, 0, 1, 1, 1};
- vkCmdClearDepthStencilImage(cb_handle, image.handle(), image.Layout(), &clear_value, 1, &range);
- m_errorMonitor->VerifyFound();
- }
-
- // Try layerCount = 0
- {
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdClearDepthStencilImage-pRanges-01695");
- const VkImageSubresourceRange range = {ds_aspect, 0, 1, 0, 0};
- vkCmdClearDepthStencilImage(cb_handle, image.handle(), image.Layout(), &clear_value, 1, &range);
- m_errorMonitor->VerifyFound();
- }
-
- // Try baseArrayLayer + layerCount > image.arrayLayers
- {
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdClearDepthStencilImage-pRanges-01695");
- const VkImageSubresourceRange range = {ds_aspect, 0, 1, 0, 2};
- vkCmdClearDepthStencilImage(cb_handle, image.handle(), image.Layout(), &clear_value, 1, &range);
- m_errorMonitor->VerifyFound();
- }
-}
-
-TEST_F(VkLayerTest, ClearColorImageWithinRenderPass) {
- // Call CmdClearColorImage within an active RenderPass
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdClearColorImage-renderpass");
-
- ASSERT_NO_FATAL_FAILURE(Init());
- ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
-
- m_commandBuffer->begin();
- m_commandBuffer->BeginRenderPass(m_renderPassBeginInfo);
-
- VkClearColorValue clear_color;
- memset(clear_color.uint32, 0, sizeof(uint32_t) * 4);
- const VkFormat tex_format = VK_FORMAT_B8G8R8A8_UNORM;
- const int32_t tex_width = 32;
- const int32_t tex_height = 32;
- VkImageCreateInfo image_create_info = {};
- image_create_info.sType = VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO;
- image_create_info.pNext = NULL;
- image_create_info.imageType = VK_IMAGE_TYPE_2D;
- image_create_info.format = tex_format;
- image_create_info.extent.width = tex_width;
- image_create_info.extent.height = tex_height;
- image_create_info.extent.depth = 1;
- image_create_info.mipLevels = 1;
- image_create_info.arrayLayers = 1;
- image_create_info.samples = VK_SAMPLE_COUNT_1_BIT;
- image_create_info.tiling = VK_IMAGE_TILING_OPTIMAL;
- image_create_info.usage = VK_IMAGE_USAGE_SAMPLED_BIT | VK_IMAGE_USAGE_TRANSFER_DST_BIT;
-
- VkImageObj dstImage(m_device);
- dstImage.init(&image_create_info);
-
- const VkImageSubresourceRange range = VkImageObj::subresource_range(image_create_info, VK_IMAGE_ASPECT_COLOR_BIT);
-
- vkCmdClearColorImage(m_commandBuffer->handle(), dstImage.handle(), VK_IMAGE_LAYOUT_GENERAL, &clear_color, 1, &range);
-
- m_errorMonitor->VerifyFound();
-
- m_commandBuffer->EndRenderPass();
- m_commandBuffer->end();
-}
-
-TEST_F(VkLayerTest, ClearDepthStencilImageErrors) {
- // Hit errors related to vkCmdClearDepthStencilImage()
- // 1. Use an image that doesn't have VK_IMAGE_USAGE_TRANSFER_DST_BIT set
- // 2. Call CmdClearDepthStencilImage within an active RenderPass
-
- ASSERT_NO_FATAL_FAILURE(Init());
- ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
-
- auto depth_format = FindSupportedDepthStencilFormat(gpu());
- if (!depth_format) {
- printf("%s No Depth + Stencil format found. Skipped.\n", kSkipPrefix);
- return;
- }
-
- VkClearDepthStencilValue clear_value = {0};
- VkImageCreateInfo image_create_info = VkImageObj::create_info();
- image_create_info.imageType = VK_IMAGE_TYPE_2D;
- image_create_info.format = depth_format;
- image_create_info.extent.width = 64;
- image_create_info.extent.height = 64;
- image_create_info.tiling = VK_IMAGE_TILING_OPTIMAL;
- // Error here is that VK_IMAGE_USAGE_TRANSFER_DST_BIT is excluded for DS image that we'll call Clear on below
- image_create_info.usage = VK_IMAGE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT;
-
- VkImageObj dst_image_bad_usage(m_device);
- dst_image_bad_usage.init(&image_create_info);
- const VkImageSubresourceRange range = VkImageObj::subresource_range(image_create_info, VK_IMAGE_ASPECT_DEPTH_BIT);
-
- m_commandBuffer->begin();
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdClearDepthStencilImage-image-00009");
- vkCmdClearDepthStencilImage(m_commandBuffer->handle(), dst_image_bad_usage.handle(), VK_IMAGE_LAYOUT_GENERAL, &clear_value, 1,
- &range);
- m_errorMonitor->VerifyFound();
-
- // Fix usage for next test case
- image_create_info.usage = VK_IMAGE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT | VK_IMAGE_USAGE_TRANSFER_DST_BIT;
- VkImageObj dst_image(m_device);
- dst_image.init(&image_create_info);
-
- m_commandBuffer->BeginRenderPass(m_renderPassBeginInfo);
-
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdClearDepthStencilImage-renderpass");
- vkCmdClearDepthStencilImage(m_commandBuffer->handle(), dst_image.handle(), VK_IMAGE_LAYOUT_GENERAL, &clear_value, 1, &range);
- m_errorMonitor->VerifyFound();
-
- m_commandBuffer->EndRenderPass();
- m_commandBuffer->end();
-}
-
-TEST_F(VkLayerTest, BufferMemoryBarrierNoBuffer) {
- // Try to add a buffer memory barrier with no buffer.
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
- "required parameter pBufferMemoryBarriers[0].buffer specified as VK_NULL_HANDLE");
-
- ASSERT_NO_FATAL_FAILURE(Init());
- m_commandBuffer->begin();
-
- VkBufferMemoryBarrier buf_barrier = {};
- buf_barrier.sType = VK_STRUCTURE_TYPE_BUFFER_MEMORY_BARRIER;
- buf_barrier.srcAccessMask = VK_ACCESS_HOST_WRITE_BIT;
- buf_barrier.dstAccessMask = VK_ACCESS_SHADER_READ_BIT;
- buf_barrier.srcQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED;
- buf_barrier.dstQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED;
- buf_barrier.buffer = VK_NULL_HANDLE;
- buf_barrier.offset = 0;
- buf_barrier.size = VK_WHOLE_SIZE;
- vkCmdPipelineBarrier(m_commandBuffer->handle(), VK_PIPELINE_STAGE_HOST_BIT, VK_PIPELINE_STAGE_VERTEX_SHADER_BIT, 0, 0, nullptr,
- 1, &buf_barrier, 0, nullptr);
-
- m_errorMonitor->VerifyFound();
-}
-
-TEST_F(VkLayerTest, InvalidBarriers) {
- TEST_DESCRIPTION("A variety of ways to get VK_INVALID_BARRIER ");
-
- ASSERT_NO_FATAL_FAILURE(Init());
- auto depth_format = FindSupportedDepthStencilFormat(gpu());
- if (!depth_format) {
- printf("%s No Depth + Stencil format found. Skipped.\n", kSkipPrefix);
- return;
- }
- // Add a token self-dependency for this test to avoid unexpected errors
- m_addRenderPassSelfDependency = true;
- ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
-
- const uint32_t submit_family = m_device->graphics_queue_node_index_;
- const uint32_t invalid = static_cast<uint32_t>(m_device->queue_props.size());
- const uint32_t other_family = submit_family != 0 ? 0 : 1;
- const bool only_one_family = (invalid == 1) || (m_device->queue_props[other_family].queueCount == 0);
- std::vector<uint32_t> qf_indices{{submit_family, other_family}};
- if (only_one_family) {
- qf_indices.resize(1);
- }
- BarrierQueueFamilyTestHelper::Context test_context(this, qf_indices);
-
- // Use image unbound to memory in barrier
- // Use buffer unbound to memory in barrier
- BarrierQueueFamilyTestHelper conc_test(&test_context);
- conc_test.Init(nullptr, false, false);
-
- conc_test.image_barrier_.newLayout = VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL;
- conc_test(" used with no memory bound. Memory should be bound by calling vkBindImageMemory()",
- " used with no memory bound. Memory should be bound by calling vkBindBufferMemory()");
-
- VkBufferObj buffer;
- VkMemoryPropertyFlags mem_reqs = VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT;
- buffer.init_as_src_and_dst(*m_device, 256, mem_reqs);
- conc_test.buffer_barrier_.buffer = buffer.handle();
-
- VkImageObj image(m_device);
- image.Init(128, 128, 1, VK_FORMAT_B8G8R8A8_UNORM, VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT, VK_IMAGE_TILING_OPTIMAL, 0);
- conc_test.image_barrier_.image = image.handle();
-
- // New layout can't be UNDEFINED
- conc_test.image_barrier_.newLayout = VK_IMAGE_LAYOUT_UNDEFINED;
- conc_test("VUID-VkImageMemoryBarrier-newLayout-01198", "");
-
- // Transition image to color attachment optimal
- conc_test.image_barrier_.newLayout = VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL;
- conc_test("");
-
- // TODO: this looks vestigal or incomplete...
- m_commandBuffer->begin();
- m_commandBuffer->BeginRenderPass(m_renderPassBeginInfo);
-
- // Can't send buffer memory barrier during a render pass
- vkCmdEndRenderPass(m_commandBuffer->handle());
-
- // Duplicate barriers that change layout
- VkImageMemoryBarrier img_barrier = {};
- img_barrier.sType = VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER;
- img_barrier.pNext = NULL;
- img_barrier.image = image.handle();
- img_barrier.oldLayout = VK_IMAGE_LAYOUT_UNDEFINED;
- img_barrier.newLayout = VK_IMAGE_LAYOUT_GENERAL;
- img_barrier.srcAccessMask = VK_ACCESS_COLOR_ATTACHMENT_WRITE_BIT;
- img_barrier.dstAccessMask = VK_ACCESS_SHADER_READ_BIT;
- img_barrier.srcQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED;
- img_barrier.dstQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED;
- img_barrier.subresourceRange.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
- img_barrier.subresourceRange.baseArrayLayer = 0;
- img_barrier.subresourceRange.baseMipLevel = 0;
- img_barrier.subresourceRange.layerCount = 1;
- img_barrier.subresourceRange.levelCount = 1;
- VkImageMemoryBarrier img_barriers[2] = {img_barrier, img_barrier};
-
- // Transitions from UNDEFINED are valid, even if duplicated
- m_errorMonitor->ExpectSuccess();
- vkCmdPipelineBarrier(m_commandBuffer->handle(), VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT,
- VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT, VK_DEPENDENCY_BY_REGION_BIT, 0, nullptr, 0, nullptr, 2,
- img_barriers);
- m_errorMonitor->VerifyNotFound();
-
- // Duplication of layout transitions (not from undefined) are not valid
- img_barriers[0].oldLayout = VK_IMAGE_LAYOUT_GENERAL;
- img_barriers[0].newLayout = VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL;
- img_barriers[1].oldLayout = img_barriers[0].oldLayout;
- img_barriers[1].newLayout = img_barriers[0].newLayout;
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkImageMemoryBarrier-oldLayout-01197");
- vkCmdPipelineBarrier(m_commandBuffer->handle(), VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT,
- VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT, VK_DEPENDENCY_BY_REGION_BIT, 0, nullptr, 0, nullptr, 2,
- img_barriers);
- m_errorMonitor->VerifyFound();
-
- // Exceed the buffer size
- conc_test.buffer_barrier_.offset = conc_test.buffer_.create_info().size + 1;
- conc_test("", "VUID-VkBufferMemoryBarrier-offset-01187");
-
- conc_test.buffer_barrier_.offset = 0;
- conc_test.buffer_barrier_.size = conc_test.buffer_.create_info().size + 1;
- // Size greater than total size
- conc_test("", "VUID-VkBufferMemoryBarrier-size-01189");
-
- conc_test.buffer_barrier_.size = VK_WHOLE_SIZE;
-
- // Now exercise barrier aspect bit errors, first DS
- VkDepthStencilObj ds_image(m_device);
- ds_image.Init(m_device, 128, 128, depth_format);
- ASSERT_TRUE(ds_image.initialized());
-
- conc_test.image_barrier_.oldLayout = VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL;
- conc_test.image_barrier_.newLayout = VK_IMAGE_LAYOUT_GENERAL;
- conc_test.image_barrier_.image = ds_image.handle();
-
- // Not having DEPTH or STENCIL set is an error
- conc_test.image_barrier_.subresourceRange.aspectMask = VK_IMAGE_ASPECT_METADATA_BIT;
-
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkImageSubresource-aspectMask-parameter");
- conc_test("VUID-VkImageMemoryBarrier-image-01207");
-
- // Having only one of depth or stencil set for DS image is an error
- conc_test.image_barrier_.subresourceRange.aspectMask = VK_IMAGE_ASPECT_STENCIL_BIT;
- conc_test("VUID-VkImageMemoryBarrier-image-01207");
-
- // Having anything other than DEPTH and STENCIL is an error
- conc_test.image_barrier_.subresourceRange.aspectMask =
- VK_IMAGE_ASPECT_DEPTH_BIT | VK_IMAGE_ASPECT_STENCIL_BIT | VK_IMAGE_ASPECT_COLOR_BIT;
- conc_test("VUID-VkImageSubresource-aspectMask-parameter");
-
- // Now test depth-only
- VkFormatProperties format_props;
- vkGetPhysicalDeviceFormatProperties(m_device->phy().handle(), VK_FORMAT_D16_UNORM, &format_props);
- if (format_props.optimalTilingFeatures & VK_FORMAT_FEATURE_DEPTH_STENCIL_ATTACHMENT_BIT) {
- VkDepthStencilObj d_image(m_device);
- d_image.Init(m_device, 128, 128, VK_FORMAT_D16_UNORM);
- ASSERT_TRUE(d_image.initialized());
-
- conc_test.image_barrier_.oldLayout = VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL;
- conc_test.image_barrier_.newLayout = VK_IMAGE_LAYOUT_GENERAL;
- conc_test.image_barrier_.image = d_image.handle();
-
- // DEPTH bit must be set
- conc_test.image_barrier_.subresourceRange.aspectMask = VK_IMAGE_ASPECT_METADATA_BIT;
- conc_test("Depth-only image formats must have the VK_IMAGE_ASPECT_DEPTH_BIT set.");
-
- // No bits other than DEPTH may be set
- conc_test.image_barrier_.subresourceRange.aspectMask = VK_IMAGE_ASPECT_DEPTH_BIT | VK_IMAGE_ASPECT_COLOR_BIT;
- conc_test("Depth-only image formats can have only the VK_IMAGE_ASPECT_DEPTH_BIT set.");
- }
-
- // Now test stencil-only
- vkGetPhysicalDeviceFormatProperties(m_device->phy().handle(), VK_FORMAT_S8_UINT, &format_props);
- if (format_props.optimalTilingFeatures & VK_FORMAT_FEATURE_DEPTH_STENCIL_ATTACHMENT_BIT) {
- VkDepthStencilObj s_image(m_device);
- s_image.Init(m_device, 128, 128, VK_FORMAT_S8_UINT);
- ASSERT_TRUE(s_image.initialized());
-
- conc_test.image_barrier_.oldLayout = VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL;
- conc_test.image_barrier_.newLayout = VK_IMAGE_LAYOUT_GENERAL;
- conc_test.image_barrier_.image = s_image.handle();
-
- // Use of COLOR aspect on depth image is error
- conc_test.image_barrier_.subresourceRange.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
- conc_test("Stencil-only image formats must have the VK_IMAGE_ASPECT_STENCIL_BIT set.");
- }
-
- // Finally test color
- VkImageObj c_image(m_device);
- c_image.Init(128, 128, 1, VK_FORMAT_B8G8R8A8_UNORM, VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT, VK_IMAGE_TILING_OPTIMAL, 0);
- ASSERT_TRUE(c_image.initialized());
- conc_test.image_barrier_.oldLayout = VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL;
- conc_test.image_barrier_.newLayout = VK_IMAGE_LAYOUT_GENERAL;
- conc_test.image_barrier_.image = c_image.handle();
-
- // COLOR bit must be set
- conc_test.image_barrier_.subresourceRange.aspectMask = VK_IMAGE_ASPECT_METADATA_BIT;
- conc_test("Color image formats must have the VK_IMAGE_ASPECT_COLOR_BIT set.");
-
- // No bits other than COLOR may be set
- conc_test.image_barrier_.subresourceRange.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT | VK_IMAGE_ASPECT_DEPTH_BIT;
- conc_test("Color image formats must have ONLY the VK_IMAGE_ASPECT_COLOR_BIT set.");
-
- // A barrier's new and old VkImageLayout must be compatible with an image's VkImageUsageFlags.
- {
- VkImageObj img_color(m_device);
- img_color.Init(128, 128, 1, VK_FORMAT_B8G8R8A8_UNORM, VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT, VK_IMAGE_TILING_OPTIMAL);
- ASSERT_TRUE(img_color.initialized());
-
- VkImageObj img_ds(m_device);
- img_ds.Init(128, 128, 1, depth_format, VK_IMAGE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT, VK_IMAGE_TILING_OPTIMAL);
- ASSERT_TRUE(img_ds.initialized());
-
- VkImageObj img_xfer_src(m_device);
- img_xfer_src.Init(128, 128, 1, VK_FORMAT_B8G8R8A8_UNORM, VK_IMAGE_USAGE_TRANSFER_SRC_BIT, VK_IMAGE_TILING_OPTIMAL);
- ASSERT_TRUE(img_xfer_src.initialized());
-
- VkImageObj img_xfer_dst(m_device);
- img_xfer_dst.Init(128, 128, 1, VK_FORMAT_B8G8R8A8_UNORM, VK_IMAGE_USAGE_TRANSFER_DST_BIT, VK_IMAGE_TILING_OPTIMAL);
- ASSERT_TRUE(img_xfer_dst.initialized());
-
- VkImageObj img_sampled(m_device);
- img_sampled.Init(32, 32, 1, VK_FORMAT_B8G8R8A8_UNORM, VK_IMAGE_USAGE_SAMPLED_BIT, VK_IMAGE_TILING_OPTIMAL);
- ASSERT_TRUE(img_sampled.initialized());
-
- VkImageObj img_input(m_device);
- img_input.Init(128, 128, 1, VK_FORMAT_R8G8B8A8_UNORM, VK_IMAGE_USAGE_INPUT_ATTACHMENT_BIT, VK_IMAGE_TILING_OPTIMAL);
- ASSERT_TRUE(img_input.initialized());
-
- const struct {
- VkImageObj &image_obj;
- VkImageLayout bad_layout;
- std::string msg_code;
- } bad_buffer_layouts[] = {
- // clang-format off
- // images _without_ VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT
- {img_ds, VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL, "VUID-VkImageMemoryBarrier-oldLayout-01208"},
- {img_xfer_src, VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL, "VUID-VkImageMemoryBarrier-oldLayout-01208"},
- {img_xfer_dst, VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL, "VUID-VkImageMemoryBarrier-oldLayout-01208"},
- {img_sampled, VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL, "VUID-VkImageMemoryBarrier-oldLayout-01208"},
- {img_input, VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL, "VUID-VkImageMemoryBarrier-oldLayout-01208"},
- // images _without_ VK_IMAGE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT
- {img_color, VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL, "VUID-VkImageMemoryBarrier-oldLayout-01209"},
- {img_xfer_src, VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL, "VUID-VkImageMemoryBarrier-oldLayout-01209"},
- {img_xfer_dst, VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL, "VUID-VkImageMemoryBarrier-oldLayout-01209"},
- {img_sampled, VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL, "VUID-VkImageMemoryBarrier-oldLayout-01209"},
- {img_input, VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL, "VUID-VkImageMemoryBarrier-oldLayout-01209"},
- {img_color, VK_IMAGE_LAYOUT_DEPTH_STENCIL_READ_ONLY_OPTIMAL, "VUID-VkImageMemoryBarrier-oldLayout-01210"},
- {img_xfer_src, VK_IMAGE_LAYOUT_DEPTH_STENCIL_READ_ONLY_OPTIMAL, "VUID-VkImageMemoryBarrier-oldLayout-01210"},
- {img_xfer_dst, VK_IMAGE_LAYOUT_DEPTH_STENCIL_READ_ONLY_OPTIMAL, "VUID-VkImageMemoryBarrier-oldLayout-01210"},
- {img_sampled, VK_IMAGE_LAYOUT_DEPTH_STENCIL_READ_ONLY_OPTIMAL, "VUID-VkImageMemoryBarrier-oldLayout-01210"},
- {img_input, VK_IMAGE_LAYOUT_DEPTH_STENCIL_READ_ONLY_OPTIMAL, "VUID-VkImageMemoryBarrier-oldLayout-01210"},
- // images _without_ VK_IMAGE_USAGE_SAMPLED_BIT or VK_IMAGE_USAGE_INPUT_ATTACHMENT_BIT
- {img_color, VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL, "VUID-VkImageMemoryBarrier-oldLayout-01211"},
- {img_ds, VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL, "VUID-VkImageMemoryBarrier-oldLayout-01211"},
- {img_xfer_src, VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL, "VUID-VkImageMemoryBarrier-oldLayout-01211"},
- {img_xfer_dst, VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL, "VUID-VkImageMemoryBarrier-oldLayout-01211"},
- // images _without_ VK_IMAGE_USAGE_TRANSFER_SRC_BIT
- {img_color, VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL, "VUID-VkImageMemoryBarrier-oldLayout-01212"},
- {img_ds, VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL, "VUID-VkImageMemoryBarrier-oldLayout-01212"},
- {img_xfer_dst, VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL, "VUID-VkImageMemoryBarrier-oldLayout-01212"},
- {img_sampled, VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL, "VUID-VkImageMemoryBarrier-oldLayout-01212"},
- {img_input, VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL, "VUID-VkImageMemoryBarrier-oldLayout-01212"},
- // images _without_ VK_IMAGE_USAGE_TRANSFER_DST_BIT
- {img_color, VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, "VUID-VkImageMemoryBarrier-oldLayout-01213"},
- {img_ds, VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, "VUID-VkImageMemoryBarrier-oldLayout-01213"},
- {img_xfer_src, VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, "VUID-VkImageMemoryBarrier-oldLayout-01213"},
- {img_sampled, VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, "VUID-VkImageMemoryBarrier-oldLayout-01213"},
- {img_input, VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, "VUID-VkImageMemoryBarrier-oldLayout-01213"},
- // clang-format on
- };
- const uint32_t layout_count = sizeof(bad_buffer_layouts) / sizeof(bad_buffer_layouts[0]);
-
- for (uint32_t i = 0; i < layout_count; ++i) {
- conc_test.image_barrier_.image = bad_buffer_layouts[i].image_obj.handle();
- const VkImageUsageFlags usage = bad_buffer_layouts[i].image_obj.usage();
- conc_test.image_barrier_.subresourceRange.aspectMask = (usage == VK_IMAGE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT)
- ? (VK_IMAGE_ASPECT_DEPTH_BIT | VK_IMAGE_ASPECT_STENCIL_BIT)
- : VK_IMAGE_ASPECT_COLOR_BIT;
-
- conc_test.image_barrier_.oldLayout = bad_buffer_layouts[i].bad_layout;
- conc_test.image_barrier_.newLayout = VK_IMAGE_LAYOUT_GENERAL;
- conc_test(bad_buffer_layouts[i].msg_code);
-
- conc_test.image_barrier_.oldLayout = VK_IMAGE_LAYOUT_GENERAL;
- conc_test.image_barrier_.newLayout = bad_buffer_layouts[i].bad_layout;
- conc_test(bad_buffer_layouts[i].msg_code);
- }
-
- conc_test.image_barrier_.oldLayout = VK_IMAGE_LAYOUT_GENERAL;
- conc_test.image_barrier_.newLayout = VK_IMAGE_LAYOUT_GENERAL;
- conc_test.image_barrier_.image = image.handle();
- }
-
- // Attempt barrier where srcAccessMask is not supported by srcStageMask
- // Have lower-order bit that's supported (shader write), but higher-order bit not supported to verify multi-bit validation
- conc_test.buffer_barrier_.srcAccessMask = VK_ACCESS_COLOR_ATTACHMENT_WRITE_BIT | VK_ACCESS_SHADER_WRITE_BIT;
- conc_test.buffer_barrier_.offset = 0;
- conc_test.buffer_barrier_.size = VK_WHOLE_SIZE;
- conc_test("", "VUID-vkCmdPipelineBarrier-pMemoryBarriers-01184");
-
- // Attempt barrier where dstAccessMask is not supported by dstStageMask
- conc_test.buffer_barrier_.srcAccessMask = VK_ACCESS_TRANSFER_READ_BIT;
- conc_test.buffer_barrier_.dstAccessMask = VK_ACCESS_COLOR_ATTACHMENT_WRITE_BIT;
- conc_test("", "VUID-vkCmdPipelineBarrier-pMemoryBarriers-01185");
-
- // Attempt to mismatch barriers/waitEvents calls with incompatible queues
- // Create command pool with incompatible queueflags
- const std::vector<VkQueueFamilyProperties> queue_props = m_device->queue_props;
- uint32_t queue_family_index = m_device->QueueFamilyMatching(VK_QUEUE_GRAPHICS_BIT, VK_QUEUE_COMPUTE_BIT);
- if (queue_family_index == UINT32_MAX) {
- printf("%s No non-compute queue supporting graphics found; skipped.\n", kSkipPrefix);
- return; // NOTE: this exits the test function!
- }
-
- VkBufferMemoryBarrier buf_barrier = {};
- buf_barrier.sType = VK_STRUCTURE_TYPE_BUFFER_MEMORY_BARRIER;
- buf_barrier.pNext = NULL;
- buf_barrier.srcAccessMask = VK_ACCESS_COLOR_ATTACHMENT_WRITE_BIT;
- buf_barrier.dstAccessMask = VK_ACCESS_SHADER_READ_BIT;
- buf_barrier.buffer = buffer.handle();
- buf_barrier.srcQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED;
- buf_barrier.dstQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED;
- buf_barrier.offset = 0;
- buf_barrier.size = VK_WHOLE_SIZE;
-
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdPipelineBarrier-srcStageMask-01183");
-
- VkCommandPoolObj command_pool(m_device, queue_family_index, VK_COMMAND_POOL_CREATE_RESET_COMMAND_BUFFER_BIT);
- VkCommandBufferObj bad_command_buffer(m_device, &command_pool);
-
- bad_command_buffer.begin();
- buf_barrier.srcAccessMask = VK_ACCESS_SHADER_WRITE_BIT;
- // Set two bits that should both be supported as a bonus positive check
- buf_barrier.dstAccessMask = VK_ACCESS_TRANSFER_WRITE_BIT | VK_ACCESS_TRANSFER_READ_BIT;
- vkCmdPipelineBarrier(bad_command_buffer.handle(), VK_PIPELINE_STAGE_COMPUTE_SHADER_BIT, VK_PIPELINE_STAGE_TRANSFER_BIT,
- VK_DEPENDENCY_BY_REGION_BIT, 0, nullptr, 1, &buf_barrier, 0, nullptr);
- m_errorMonitor->VerifyFound();
-
- // Check for error for trying to wait on pipeline stage not supported by this queue. Specifically since our queue is not a
- // compute queue, vkCmdWaitEvents cannot have it's source stage mask be VK_PIPELINE_STAGE_COMPUTE_SHADER_BIT
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdWaitEvents-srcStageMask-01164");
- VkEvent event;
- VkEventCreateInfo event_create_info{};
- event_create_info.sType = VK_STRUCTURE_TYPE_EVENT_CREATE_INFO;
- vkCreateEvent(m_device->device(), &event_create_info, nullptr, &event);
- vkCmdWaitEvents(bad_command_buffer.handle(), 1, &event, /*source stage mask*/ VK_PIPELINE_STAGE_COMPUTE_SHADER_BIT,
- VK_PIPELINE_STAGE_VERTEX_SHADER_BIT, 0, nullptr, 0, nullptr, 0, nullptr);
- m_errorMonitor->VerifyFound();
- bad_command_buffer.end();
-
- vkDestroyEvent(m_device->device(), event, nullptr);
-}
-
-TEST_F(VkLayerTest, InvalidBarrierQueueFamily) {
- TEST_DESCRIPTION("Create and submit barriers with invalid queue families");
- ASSERT_NO_FATAL_FAILURE(Init(nullptr, nullptr, VK_COMMAND_POOL_CREATE_RESET_COMMAND_BUFFER_BIT));
-
- // Find queues of two families
- const uint32_t submit_family = m_device->graphics_queue_node_index_;
- const uint32_t invalid = static_cast<uint32_t>(m_device->queue_props.size());
- const uint32_t other_family = submit_family != 0 ? 0 : 1;
- const bool only_one_family = (invalid == 1) || (m_device->queue_props[other_family].queueCount == 0);
-
- std::vector<uint32_t> qf_indices{{submit_family, other_family}};
- if (only_one_family) {
- qf_indices.resize(1);
- }
- BarrierQueueFamilyTestHelper::Context test_context(this, qf_indices);
-
- if (m_device->props.apiVersion >= VK_API_VERSION_1_1) {
- printf(
- "%s Device has apiVersion greater than 1.0 -- skipping test cases that require external memory "
- "to be "
- "disabled.\n",
- kSkipPrefix);
- } else {
- if (only_one_family) {
- printf("%s Single queue family found -- VK_SHARING_MODE_CONCURRENT testcases skipped.\n", kSkipPrefix);
- } else {
- std::vector<uint32_t> families = {submit_family, other_family};
- BarrierQueueFamilyTestHelper conc_test(&test_context);
- conc_test.Init(&families);
- // core_validation::barrier_queue_families::kSrcAndDestMustBeIgnore
- conc_test("VUID-VkImageMemoryBarrier-image-01199", "VUID-VkBufferMemoryBarrier-buffer-01190", VK_QUEUE_FAMILY_IGNORED,
- submit_family);
- conc_test("VUID-VkImageMemoryBarrier-image-01199", "VUID-VkBufferMemoryBarrier-buffer-01190", submit_family,
- VK_QUEUE_FAMILY_IGNORED);
- conc_test("VUID-VkImageMemoryBarrier-image-01199", "VUID-VkBufferMemoryBarrier-buffer-01190", submit_family,
- submit_family);
- // true -> positive test
- conc_test("VUID-VkImageMemoryBarrier-image-01199", "VUID-VkBufferMemoryBarrier-buffer-01190", VK_QUEUE_FAMILY_IGNORED,
- VK_QUEUE_FAMILY_IGNORED, true);
- }
-
- BarrierQueueFamilyTestHelper excl_test(&test_context);
- excl_test.Init(nullptr); // no queue families means *exclusive* sharing mode.
-
- // core_validation::barrier_queue_families::kBothIgnoreOrBothValid
- excl_test("VUID-VkImageMemoryBarrier-image-01200", "VUID-VkBufferMemoryBarrier-buffer-01192", VK_QUEUE_FAMILY_IGNORED,
- submit_family);
- excl_test("VUID-VkImageMemoryBarrier-image-01200", "VUID-VkBufferMemoryBarrier-buffer-01192", submit_family,
- VK_QUEUE_FAMILY_IGNORED);
- // true -> positive test
- excl_test("VUID-VkImageMemoryBarrier-image-01200", "VUID-VkBufferMemoryBarrier-buffer-01192", submit_family, submit_family,
- true);
- excl_test("VUID-VkImageMemoryBarrier-image-01200", "VUID-VkBufferMemoryBarrier-buffer-01192", VK_QUEUE_FAMILY_IGNORED,
- VK_QUEUE_FAMILY_IGNORED, true);
- }
-
- if (only_one_family) {
- printf("%s Single queue family found -- VK_SHARING_MODE_EXCLUSIVE submit testcases skipped.\n", kSkipPrefix);
- } else {
- BarrierQueueFamilyTestHelper excl_test(&test_context);
- excl_test.Init(nullptr);
-
- // core_validation::barrier_queue_families::kSubmitQueueMustMatchSrcOrDst
- excl_test("VUID-VkImageMemoryBarrier-image-01205", "VUID-VkBufferMemoryBarrier-buffer-01196", other_family, other_family,
- false, submit_family);
-
- // true -> positive test (testing both the index logic and the QFO transfer tracking.
- excl_test("POSITIVE_TEST", "POSITIVE_TEST", submit_family, other_family, true, submit_family);
- excl_test("POSITIVE_TEST", "POSITIVE_TEST", submit_family, other_family, true, other_family);
- excl_test("POSITIVE_TEST", "POSITIVE_TEST", other_family, submit_family, true, other_family);
- excl_test("POSITIVE_TEST", "POSITIVE_TEST", other_family, submit_family, true, submit_family);
-
- // negative testing for QFO transfer tracking
- // Duplicate release in one CB
- excl_test("UNASSIGNED-VkImageMemoryBarrier-image-00001", "UNASSIGNED-VkBufferMemoryBarrier-buffer-00001", submit_family,
- other_family, false, submit_family, BarrierQueueFamilyTestHelper::DOUBLE_RECORD);
- // Duplicate pending release
- excl_test("UNASSIGNED-VkImageMemoryBarrier-image-00003", "UNASSIGNED-VkBufferMemoryBarrier-buffer-00003", submit_family,
- other_family, false, submit_family);
- // Duplicate acquire in one CB
- excl_test("UNASSIGNED-VkImageMemoryBarrier-image-00001", "UNASSIGNED-VkBufferMemoryBarrier-buffer-00001", submit_family,
- other_family, false, other_family, BarrierQueueFamilyTestHelper::DOUBLE_RECORD);
- // No pending release
- excl_test("UNASSIGNED-VkImageMemoryBarrier-image-00004", "UNASSIGNED-VkBufferMemoryBarrier-buffer-00004", submit_family,
- other_family, false, other_family);
- // Duplicate release in two CB
- excl_test("UNASSIGNED-VkImageMemoryBarrier-image-00002", "UNASSIGNED-VkBufferMemoryBarrier-buffer-00002", submit_family,
- other_family, false, submit_family, BarrierQueueFamilyTestHelper::DOUBLE_COMMAND_BUFFER);
- // Duplicate acquire in two CB
- excl_test("POSITIVE_TEST", "POSITIVE_TEST", submit_family, other_family, true, submit_family); // need a succesful release
- excl_test("UNASSIGNED-VkImageMemoryBarrier-image-00002", "UNASSIGNED-VkBufferMemoryBarrier-buffer-00002", submit_family,
- other_family, false, other_family, BarrierQueueFamilyTestHelper::DOUBLE_COMMAND_BUFFER);
- }
-}
-
-TEST_F(VkLayerTest, InvalidBarrierQueueFamilyWithMemExt) {
- TEST_DESCRIPTION("Create and submit barriers with invalid queue families when memory extension is enabled ");
- std::vector<const char *> reqd_instance_extensions = {
- {VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME, VK_KHR_EXTERNAL_MEMORY_CAPABILITIES_EXTENSION_NAME}};
- for (auto extension_name : reqd_instance_extensions) {
- if (InstanceExtensionSupported(extension_name)) {
- m_instance_extension_names.push_back(extension_name);
- } else {
- printf("%s Required instance extension %s not supported, skipping test\n", kSkipPrefix, extension_name);
- return;
- }
- }
-
- ASSERT_NO_FATAL_FAILURE(InitFramework(myDbgFunc, m_errorMonitor));
- // Check for external memory device extensions
- if (DeviceExtensionSupported(gpu(), nullptr, VK_KHR_EXTERNAL_MEMORY_EXTENSION_NAME)) {
- m_device_extension_names.push_back(VK_KHR_EXTERNAL_MEMORY_EXTENSION_NAME);
- } else {
- printf("%s External memory extension not supported, skipping test\n", kSkipPrefix);
- return;
- }
-
- ASSERT_NO_FATAL_FAILURE(InitState(nullptr, nullptr, VK_COMMAND_POOL_CREATE_RESET_COMMAND_BUFFER_BIT));
-
- // Find queues of two families
- const uint32_t submit_family = m_device->graphics_queue_node_index_;
- const uint32_t invalid = static_cast<uint32_t>(m_device->queue_props.size());
- const uint32_t other_family = submit_family != 0 ? 0 : 1;
- const bool only_one_family = (invalid == 1) || (m_device->queue_props[other_family].queueCount == 0);
-
- std::vector<uint32_t> qf_indices{{submit_family, other_family}};
- if (only_one_family) {
- qf_indices.resize(1);
- }
- BarrierQueueFamilyTestHelper::Context test_context(this, qf_indices);
-
- if (only_one_family) {
- printf("%s Single queue family found -- VK_SHARING_MODE_CONCURRENT testcases skipped.\n", kSkipPrefix);
- } else {
- std::vector<uint32_t> families = {submit_family, other_family};
- BarrierQueueFamilyTestHelper conc_test(&test_context);
-
- // core_validation::barrier_queue_families::kSrcOrDstMustBeIgnore
- conc_test.Init(&families);
- conc_test("VUID-VkImageMemoryBarrier-image-01381", "VUID-VkBufferMemoryBarrier-buffer-01191", submit_family, submit_family);
- // true -> positive test
- conc_test("VUID-VkImageMemoryBarrier-image-01381", "VUID-VkBufferMemoryBarrier-buffer-01191", VK_QUEUE_FAMILY_IGNORED,
- VK_QUEUE_FAMILY_IGNORED, true);
- conc_test("VUID-VkImageMemoryBarrier-image-01381", "VUID-VkBufferMemoryBarrier-buffer-01191", VK_QUEUE_FAMILY_IGNORED,
- VK_QUEUE_FAMILY_EXTERNAL_KHR, true);
- conc_test("VUID-VkImageMemoryBarrier-image-01381", "VUID-VkBufferMemoryBarrier-buffer-01191", VK_QUEUE_FAMILY_EXTERNAL_KHR,
- VK_QUEUE_FAMILY_IGNORED, true);
-
- // core_validation::barrier_queue_families::kSpecialOrIgnoreOnly
- conc_test("VUID-VkImageMemoryBarrier-image-01766", "VUID-VkBufferMemoryBarrier-buffer-01763", submit_family,
- VK_QUEUE_FAMILY_IGNORED);
- conc_test("VUID-VkImageMemoryBarrier-image-01766", "VUID-VkBufferMemoryBarrier-buffer-01763", VK_QUEUE_FAMILY_IGNORED,
- submit_family);
- // This is to flag the errors that would be considered only "unexpected" in the parallel case above
- // true -> positive test
- conc_test("VUID-VkImageMemoryBarrier-image-01766", "VUID-VkBufferMemoryBarrier-buffer-01763", VK_QUEUE_FAMILY_IGNORED,
- VK_QUEUE_FAMILY_EXTERNAL_KHR, true);
- conc_test("VUID-VkImageMemoryBarrier-image-01766", "VUID-VkBufferMemoryBarrier-buffer-01763", VK_QUEUE_FAMILY_EXTERNAL_KHR,
- VK_QUEUE_FAMILY_IGNORED, true);
- }
-
- BarrierQueueFamilyTestHelper excl_test(&test_context);
- excl_test.Init(nullptr); // no queue families means *exclusive* sharing mode.
-
- // core_validation::barrier_queue_families::kSrcIgnoreRequiresDstIgnore
- excl_test("VUID-VkImageMemoryBarrier-image-01201", "VUID-VkBufferMemoryBarrier-buffer-01193", VK_QUEUE_FAMILY_IGNORED,
- submit_family);
- excl_test("VUID-VkImageMemoryBarrier-image-01201", "VUID-VkBufferMemoryBarrier-buffer-01193", VK_QUEUE_FAMILY_IGNORED,
- VK_QUEUE_FAMILY_EXTERNAL_KHR);
- // true -> positive test
- excl_test("VUID-VkImageMemoryBarrier-image-01201", "VUID-VkBufferMemoryBarrier-buffer-01193", VK_QUEUE_FAMILY_IGNORED,
- VK_QUEUE_FAMILY_IGNORED, true);
-
- // core_validation::barrier_queue_families::kDstValidOrSpecialIfNotIgnore
- excl_test("VUID-VkImageMemoryBarrier-image-01768", "VUID-VkBufferMemoryBarrier-buffer-01765", submit_family, invalid);
- // true -> positive test
- excl_test("VUID-VkImageMemoryBarrier-image-01768", "VUID-VkBufferMemoryBarrier-buffer-01765", submit_family, submit_family,
- true);
- excl_test("VUID-VkImageMemoryBarrier-image-01768", "VUID-VkBufferMemoryBarrier-buffer-01765", submit_family,
- VK_QUEUE_FAMILY_IGNORED, true);
- excl_test("VUID-VkImageMemoryBarrier-image-01768", "VUID-VkBufferMemoryBarrier-buffer-01765", submit_family,
- VK_QUEUE_FAMILY_EXTERNAL_KHR, true);
-
- // core_validation::barrier_queue_families::kSrcValidOrSpecialIfNotIgnore
- excl_test("VUID-VkImageMemoryBarrier-image-01767", "VUID-VkBufferMemoryBarrier-buffer-01764", invalid, submit_family);
- // true -> positive test
- excl_test("VUID-VkImageMemoryBarrier-image-01767", "VUID-VkBufferMemoryBarrier-buffer-01764", submit_family, submit_family,
- true);
- excl_test("VUID-VkImageMemoryBarrier-image-01767", "VUID-VkBufferMemoryBarrier-buffer-01764", VK_QUEUE_FAMILY_IGNORED,
- VK_QUEUE_FAMILY_IGNORED, true);
- excl_test("VUID-VkImageMemoryBarrier-image-01767", "VUID-VkBufferMemoryBarrier-buffer-01764", VK_QUEUE_FAMILY_EXTERNAL_KHR,
- submit_family, true);
-}
-
-TEST_F(VkLayerTest, ImageBarrierWithBadRange) {
- TEST_DESCRIPTION("VkImageMemoryBarrier with an invalid subresourceRange");
-
- ASSERT_NO_FATAL_FAILURE(Init());
- ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
-
- VkImageMemoryBarrier img_barrier_template = {};
- img_barrier_template.sType = VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER;
- img_barrier_template.pNext = NULL;
- img_barrier_template.srcAccessMask = 0;
- img_barrier_template.dstAccessMask = 0;
- img_barrier_template.oldLayout = VK_IMAGE_LAYOUT_UNDEFINED;
- img_barrier_template.newLayout = VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL;
- img_barrier_template.srcQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED;
- img_barrier_template.dstQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED;
- // subresourceRange to be set later for the for the purposes of this test
- img_barrier_template.subresourceRange.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
- img_barrier_template.subresourceRange.baseArrayLayer = 0;
- img_barrier_template.subresourceRange.baseMipLevel = 0;
- img_barrier_template.subresourceRange.layerCount = 0;
- img_barrier_template.subresourceRange.levelCount = 0;
-
- const uint32_t submit_family = m_device->graphics_queue_node_index_;
- const uint32_t invalid = static_cast<uint32_t>(m_device->queue_props.size());
- const uint32_t other_family = submit_family != 0 ? 0 : 1;
- const bool only_one_family = (invalid == 1) || (m_device->queue_props[other_family].queueCount == 0);
- std::vector<uint32_t> qf_indices{{submit_family, other_family}};
- if (only_one_family) {
- qf_indices.resize(1);
- }
- BarrierQueueFamilyTestHelper::Context test_context(this, qf_indices);
-
- // Use image unbound to memory in barrier
- // Use buffer unbound to memory in barrier
- BarrierQueueFamilyTestHelper conc_test(&test_context);
- conc_test.Init(nullptr);
- img_barrier_template.image = conc_test.image_.handle();
- conc_test.image_barrier_ = img_barrier_template;
- // Nested scope here confuses clang-format, somehow
- // clang-format off
-
- // try for vkCmdPipelineBarrier
- {
- // Try baseMipLevel >= image.mipLevels with VK_REMAINING_MIP_LEVELS
- {
- conc_test.image_barrier_.subresourceRange = {VK_IMAGE_ASPECT_COLOR_BIT, 1, VK_REMAINING_MIP_LEVELS, 0, 1};
- conc_test("VUID-VkImageMemoryBarrier-subresourceRange-01486");
- }
-
- // Try baseMipLevel >= image.mipLevels without VK_REMAINING_MIP_LEVELS
- {
- conc_test.image_barrier_.subresourceRange = {VK_IMAGE_ASPECT_COLOR_BIT, 1, 1, 0, 1};
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkImageMemoryBarrier-subresourceRange-01724");
- conc_test("VUID-VkImageMemoryBarrier-subresourceRange-01486");
- }
-
- // Try levelCount = 0
- {
- conc_test.image_barrier_.subresourceRange = {VK_IMAGE_ASPECT_COLOR_BIT, 0, 0, 0, 1};
- conc_test("VUID-VkImageMemoryBarrier-subresourceRange-01724");
- }
-
- // Try baseMipLevel + levelCount > image.mipLevels
- {
- conc_test.image_barrier_.subresourceRange = {VK_IMAGE_ASPECT_COLOR_BIT, 0, 2, 0, 1};
- conc_test("VUID-VkImageMemoryBarrier-subresourceRange-01724");
- }
-
- // Try baseArrayLayer >= image.arrayLayers with VK_REMAINING_ARRAY_LAYERS
- {
- conc_test.image_barrier_.subresourceRange = {VK_IMAGE_ASPECT_COLOR_BIT, 0, 1, 1, VK_REMAINING_ARRAY_LAYERS};
- conc_test("VUID-VkImageMemoryBarrier-subresourceRange-01488");
- }
-
- // Try baseArrayLayer >= image.arrayLayers without VK_REMAINING_ARRAY_LAYERS
- {
- conc_test.image_barrier_.subresourceRange = {VK_IMAGE_ASPECT_COLOR_BIT, 0, 1, 1, 1};
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkImageMemoryBarrier-subresourceRange-01725");
- conc_test("VUID-VkImageMemoryBarrier-subresourceRange-01488");
- }
-
- // Try layerCount = 0
- {
- conc_test.image_barrier_.subresourceRange = {VK_IMAGE_ASPECT_COLOR_BIT, 0, 1, 0, 0};
- conc_test("VUID-VkImageMemoryBarrier-subresourceRange-01725");
- }
-
- // Try baseArrayLayer + layerCount > image.arrayLayers
- {
- conc_test.image_barrier_.subresourceRange = {VK_IMAGE_ASPECT_COLOR_BIT, 0, 1, 0, 2};
- conc_test("VUID-VkImageMemoryBarrier-subresourceRange-01725");
- }
- }
-
- m_commandBuffer->begin();
- // try for vkCmdWaitEvents
- {
- VkEvent event;
- VkEventCreateInfo eci{VK_STRUCTURE_TYPE_EVENT_CREATE_INFO, NULL, 0};
- VkResult err = vkCreateEvent(m_device->handle(), &eci, nullptr, &event);
- ASSERT_VK_SUCCESS(err);
-
- // Try baseMipLevel >= image.mipLevels with VK_REMAINING_MIP_LEVELS
- {
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkImageMemoryBarrier-subresourceRange-01486");
- const VkImageSubresourceRange range = {VK_IMAGE_ASPECT_COLOR_BIT, 1, VK_REMAINING_MIP_LEVELS, 0, 1};
- VkImageMemoryBarrier img_barrier = img_barrier_template;
- img_barrier.subresourceRange = range;
- vkCmdWaitEvents(m_commandBuffer->handle(), 1, &event, VK_PIPELINE_STAGE_ALL_COMMANDS_BIT,
- VK_PIPELINE_STAGE_ALL_COMMANDS_BIT, 0, nullptr, 0, nullptr, 1, &img_barrier);
- m_errorMonitor->VerifyFound();
- }
-
- // Try baseMipLevel >= image.mipLevels without VK_REMAINING_MIP_LEVELS
- {
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkImageMemoryBarrier-subresourceRange-01486");
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkImageMemoryBarrier-subresourceRange-01724");
- const VkImageSubresourceRange range = {VK_IMAGE_ASPECT_COLOR_BIT, 1, 1, 0, 1};
- VkImageMemoryBarrier img_barrier = img_barrier_template;
- img_barrier.subresourceRange = range;
- vkCmdWaitEvents(m_commandBuffer->handle(), 1, &event, VK_PIPELINE_STAGE_ALL_COMMANDS_BIT,
- VK_PIPELINE_STAGE_ALL_COMMANDS_BIT, 0, nullptr, 0, nullptr, 1, &img_barrier);
- m_errorMonitor->VerifyFound();
- }
-
- // Try levelCount = 0
- {
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkImageMemoryBarrier-subresourceRange-01724");
- const VkImageSubresourceRange range = {VK_IMAGE_ASPECT_COLOR_BIT, 0, 0, 0, 1};
- VkImageMemoryBarrier img_barrier = img_barrier_template;
- img_barrier.subresourceRange = range;
- vkCmdWaitEvents(m_commandBuffer->handle(), 1, &event, VK_PIPELINE_STAGE_ALL_COMMANDS_BIT,
- VK_PIPELINE_STAGE_ALL_COMMANDS_BIT, 0, nullptr, 0, nullptr, 1, &img_barrier);
- m_errorMonitor->VerifyFound();
- }
-
- // Try baseMipLevel + levelCount > image.mipLevels
- {
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkImageMemoryBarrier-subresourceRange-01724");
- const VkImageSubresourceRange range = {VK_IMAGE_ASPECT_COLOR_BIT, 0, 2, 0, 1};
- VkImageMemoryBarrier img_barrier = img_barrier_template;
- img_barrier.subresourceRange = range;
- vkCmdWaitEvents(m_commandBuffer->handle(), 1, &event, VK_PIPELINE_STAGE_ALL_COMMANDS_BIT,
- VK_PIPELINE_STAGE_ALL_COMMANDS_BIT, 0, nullptr, 0, nullptr, 1, &img_barrier);
- m_errorMonitor->VerifyFound();
- }
-
- // Try baseArrayLayer >= image.arrayLayers with VK_REMAINING_ARRAY_LAYERS
- {
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkImageMemoryBarrier-subresourceRange-01488");
- const VkImageSubresourceRange range = {VK_IMAGE_ASPECT_COLOR_BIT, 0, 1, 1, VK_REMAINING_ARRAY_LAYERS};
- VkImageMemoryBarrier img_barrier = img_barrier_template;
- img_barrier.subresourceRange = range;
- vkCmdWaitEvents(m_commandBuffer->handle(), 1, &event, VK_PIPELINE_STAGE_ALL_COMMANDS_BIT,
- VK_PIPELINE_STAGE_ALL_COMMANDS_BIT, 0, nullptr, 0, nullptr, 1, &img_barrier);
- m_errorMonitor->VerifyFound();
- }
-
- // Try baseArrayLayer >= image.arrayLayers without VK_REMAINING_ARRAY_LAYERS
- {
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkImageMemoryBarrier-subresourceRange-01488");
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkImageMemoryBarrier-subresourceRange-01725");
- const VkImageSubresourceRange range = {VK_IMAGE_ASPECT_COLOR_BIT, 0, 1, 1, 1};
- VkImageMemoryBarrier img_barrier = img_barrier_template;
- img_barrier.subresourceRange = range;
- vkCmdWaitEvents(m_commandBuffer->handle(), 1, &event, VK_PIPELINE_STAGE_ALL_COMMANDS_BIT,
- VK_PIPELINE_STAGE_ALL_COMMANDS_BIT, 0, nullptr, 0, nullptr, 1, &img_barrier);
- m_errorMonitor->VerifyFound();
- }
-
- // Try layerCount = 0
- {
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkImageMemoryBarrier-subresourceRange-01725");
- const VkImageSubresourceRange range = {VK_IMAGE_ASPECT_COLOR_BIT, 0, 1, 0, 0};
- VkImageMemoryBarrier img_barrier = img_barrier_template;
- img_barrier.subresourceRange = range;
- vkCmdWaitEvents(m_commandBuffer->handle(), 1, &event, VK_PIPELINE_STAGE_ALL_COMMANDS_BIT,
- VK_PIPELINE_STAGE_ALL_COMMANDS_BIT, 0, nullptr, 0, nullptr, 1, &img_barrier);
- m_errorMonitor->VerifyFound();
- }
-
- // Try baseArrayLayer + layerCount > image.arrayLayers
- {
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkImageMemoryBarrier-subresourceRange-01725");
- const VkImageSubresourceRange range = {VK_IMAGE_ASPECT_COLOR_BIT, 0, 1, 0, 2};
- VkImageMemoryBarrier img_barrier = img_barrier_template;
- img_barrier.subresourceRange = range;
- vkCmdWaitEvents(m_commandBuffer->handle(), 1, &event, VK_PIPELINE_STAGE_ALL_COMMANDS_BIT,
- VK_PIPELINE_STAGE_ALL_COMMANDS_BIT, 0, nullptr, 0, nullptr, 1, &img_barrier);
- m_errorMonitor->VerifyFound();
- }
-
- vkDestroyEvent(m_device->handle(), event, nullptr);
- }
- // clang-format on
-}
-
-TEST_F(VkLayerTest, IdxBufferAlignmentError) {
- // Bind a BeginRenderPass within an active RenderPass
- ASSERT_NO_FATAL_FAILURE(Init());
- ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
-
- uint32_t const indices[] = {0};
- VkBufferCreateInfo buf_info = {};
- buf_info.sType = VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO;
- buf_info.size = 1024;
- buf_info.usage = VK_BUFFER_USAGE_INDEX_BUFFER_BIT;
- buf_info.queueFamilyIndexCount = 1;
- buf_info.pQueueFamilyIndices = indices;
-
- VkBufferObj buffer;
- buffer.init(*m_device, buf_info);
-
- m_commandBuffer->begin();
-
- // vkCmdBindPipeline(m_commandBuffer->handle(),
- // VK_PIPELINE_BIND_POINT_GRAPHICS, pipe.handle());
- // Should error before calling to driver so don't care about actual data
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "vkCmdBindIndexBuffer() offset (0x7) does not fall on ");
- vkCmdBindIndexBuffer(m_commandBuffer->handle(), buffer.handle(), 7, VK_INDEX_TYPE_UINT16);
- m_errorMonitor->VerifyFound();
-}
-
-TEST_F(VkLayerTest, Bad2DArrayImageType) {
- TEST_DESCRIPTION("Create an image with a flag specifying 2D_ARRAY_COMPATIBLE but not of imageType 3D.");
-
- ASSERT_NO_FATAL_FAILURE(InitFramework(myDbgFunc, m_errorMonitor));
- if (DeviceExtensionSupported(gpu(), nullptr, VK_KHR_MAINTENANCE1_EXTENSION_NAME)) {
- m_device_extension_names.push_back(VK_KHR_MAINTENANCE1_EXTENSION_NAME);
- } else {
- printf("%s %s is not supported; skipping\n", kSkipPrefix, VK_KHR_MAINTENANCE1_EXTENSION_NAME);
- return;
- }
- ASSERT_NO_FATAL_FAILURE(InitState());
-
- // Trigger check by setting imagecreateflags to 2d_array_compat and imageType to 2D
- VkImageCreateInfo ici = {VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO,
- nullptr,
- VK_IMAGE_CREATE_2D_ARRAY_COMPATIBLE_BIT_KHR,
- VK_IMAGE_TYPE_2D,
- VK_FORMAT_R8G8B8A8_UNORM,
- {32, 32, 1},
- 1,
- 1,
- VK_SAMPLE_COUNT_1_BIT,
- VK_IMAGE_TILING_OPTIMAL,
- VK_IMAGE_USAGE_SAMPLED_BIT,
- VK_SHARING_MODE_EXCLUSIVE,
- 0,
- nullptr,
- VK_IMAGE_LAYOUT_UNDEFINED};
- CreateImageTest(*this, &ici, "VUID-VkImageCreateInfo-flags-00950");
-}
-
-TEST_F(VkLayerTest, VertexBufferInvalid) {
- TEST_DESCRIPTION(
- "Submit a command buffer using deleted vertex buffer, delete a buffer twice, use an invalid offset for each buffer type, "
- "and attempt to bind a null buffer");
-
- ASSERT_NO_FATAL_FAILURE(Init());
- ASSERT_NO_FATAL_FAILURE(InitViewport());
- ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
-
- CreatePipelineHelper pipe(*this);
- pipe.InitInfo();
- pipe.InitState();
- pipe.CreateGraphicsPipeline();
-
- m_commandBuffer->begin();
- m_commandBuffer->BeginRenderPass(m_renderPassBeginInfo);
- vkCmdBindPipeline(m_commandBuffer->handle(), VK_PIPELINE_BIND_POINT_GRAPHICS, pipe.pipeline_);
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "CoreValidation-DrawState-InvalidCommandBuffer-VkBuffer");
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
- "CoreValidation-DrawState-InvalidCommandBuffer-VkDeviceMemory");
-
- {
- // Create and bind a vertex buffer in a reduced scope, which will cause it to be deleted upon leaving this scope
- const float vbo_data[3] = {1.f, 0.f, 1.f};
- VkVerticesObj draw_verticies(m_device, 1, 1, sizeof(vbo_data[0]), sizeof(vbo_data) / sizeof(vbo_data[0]), vbo_data);
- draw_verticies.BindVertexBuffers(m_commandBuffer->handle());
- draw_verticies.AddVertexInputToPipeHelpr(&pipe);
-
- m_commandBuffer->Draw(1, 0, 0, 0);
-
- m_commandBuffer->EndRenderPass();
- }
-
- vkEndCommandBuffer(m_commandBuffer->handle());
- m_errorMonitor->VerifyFound();
-
- {
- // Create and bind a vertex buffer in a reduced scope, and delete it
- // twice, the second through the destructor
- VkBufferTest buffer_test(m_device, VK_BUFFER_USAGE_STORAGE_BUFFER_BIT, VkBufferTest::eDoubleDelete);
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkDestroyBuffer-buffer-parameter");
- buffer_test.TestDoubleDestroy();
- }
- m_errorMonitor->VerifyFound();
-
- m_errorMonitor->SetUnexpectedError("value of pCreateInfo->usage must not be 0");
- if (VkBufferTest::GetTestConditionValid(m_device, VkBufferTest::eInvalidMemoryOffset)) {
- // Create and bind a memory buffer with an invalid offset.
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkBindBufferMemory-memoryOffset-01036");
- m_errorMonitor->SetUnexpectedError(
- "If buffer was created with the VK_BUFFER_USAGE_UNIFORM_TEXEL_BUFFER_BIT or VK_BUFFER_USAGE_STORAGE_TEXEL_BUFFER_BIT, "
- "memoryOffset must be a multiple of VkPhysicalDeviceLimits::minTexelBufferOffsetAlignment");
- VkBufferTest buffer_test(m_device, VK_BUFFER_USAGE_UNIFORM_TEXEL_BUFFER_BIT, VkBufferTest::eInvalidMemoryOffset);
- (void)buffer_test;
- m_errorMonitor->VerifyFound();
- }
-
- {
- // Attempt to bind a null buffer.
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
- "vkBindBufferMemory: required parameter buffer specified as VK_NULL_HANDLE");
- VkBufferTest buffer_test(m_device, 0, VkBufferTest::eBindNullBuffer);
- (void)buffer_test;
- m_errorMonitor->VerifyFound();
- }
-
- {
- // Attempt to bind a fake buffer.
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkBindBufferMemory-buffer-parameter");
- VkBufferTest buffer_test(m_device, 0, VkBufferTest::eBindFakeBuffer);
- (void)buffer_test;
- m_errorMonitor->VerifyFound();
- }
-
- {
- // Attempt to use an invalid handle to delete a buffer.
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkFreeMemory-memory-parameter");
- VkBufferTest buffer_test(m_device, VK_BUFFER_USAGE_STORAGE_BUFFER_BIT, VkBufferTest::eFreeInvalidHandle);
- (void)buffer_test;
- }
- m_errorMonitor->VerifyFound();
-}
-
-TEST_F(VkLayerTest, BadVertexBufferOffset) {
- TEST_DESCRIPTION("Submit an offset past the end of a vertex buffer");
-
- ASSERT_NO_FATAL_FAILURE(Init());
- ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
- static const float vbo_data[3] = {1.f, 0.f, 1.f};
- VkConstantBufferObj vbo(m_device, sizeof(vbo_data), (const void *)&vbo_data, VK_BUFFER_USAGE_VERTEX_BUFFER_BIT);
- m_commandBuffer->begin();
- m_commandBuffer->BeginRenderPass(m_renderPassBeginInfo);
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdBindVertexBuffers-pOffsets-00626");
- m_commandBuffer->BindVertexBuffer(&vbo, (VkDeviceSize)(3 * sizeof(float)), 1); // Offset at the end of the buffer
- m_errorMonitor->VerifyFound();
-
- m_commandBuffer->EndRenderPass();
- m_commandBuffer->end();
-}
-
-// INVALID_IMAGE_LAYOUT tests (one other case is hit by MapMemWithoutHostVisibleBit and not here)
-TEST_F(VkLayerTest, InvalidImageLayout) {
- TEST_DESCRIPTION(
- "Hit all possible validation checks associated with the UNASSIGNED-CoreValidation-DrawState-InvalidImageLayout error. "
- "Generally these involve having images in the wrong layout when they're copied or transitioned.");
- // 3 in ValidateCmdBufImageLayouts
- // * -1 Attempt to submit cmd buf w/ deleted image
- // * -2 Cmd buf submit of image w/ layout not matching first use w/ subresource
- // * -3 Cmd buf submit of image w/ layout not matching first use w/o subresource
-
- ASSERT_NO_FATAL_FAILURE(Init());
- auto depth_format = FindSupportedDepthStencilFormat(gpu());
- if (!depth_format) {
- printf("%s No Depth + Stencil format found. Skipped.\n", kSkipPrefix);
- return;
- }
- // Create src & dst images to use for copy operations
- VkImageObj src_image(m_device);
- VkImageObj dst_image(m_device);
- VkImageObj depth_image(m_device);
-
- const VkFormat tex_format = VK_FORMAT_B8G8R8A8_UNORM;
- const int32_t tex_width = 32;
- const int32_t tex_height = 32;
-
- VkImageCreateInfo image_create_info = {};
- image_create_info.sType = VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO;
- image_create_info.pNext = NULL;
- image_create_info.imageType = VK_IMAGE_TYPE_2D;
- image_create_info.format = tex_format;
- image_create_info.extent.width = tex_width;
- image_create_info.extent.height = tex_height;
- image_create_info.extent.depth = 1;
- image_create_info.mipLevels = 1;
- image_create_info.arrayLayers = 4;
- image_create_info.samples = VK_SAMPLE_COUNT_1_BIT;
- image_create_info.tiling = VK_IMAGE_TILING_OPTIMAL;
- image_create_info.usage = VK_IMAGE_USAGE_TRANSFER_SRC_BIT;
- image_create_info.initialLayout = VK_IMAGE_LAYOUT_UNDEFINED;
- image_create_info.flags = 0;
-
- src_image.init(&image_create_info);
-
- image_create_info.usage = VK_IMAGE_USAGE_TRANSFER_DST_BIT;
- dst_image.init(&image_create_info);
-
- image_create_info.format = VK_FORMAT_D16_UNORM;
- image_create_info.usage |= VK_IMAGE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT;
- depth_image.init(&image_create_info);
-
- m_commandBuffer->begin();
- VkImageCopy copy_region;
- copy_region.srcSubresource.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
- copy_region.srcSubresource.mipLevel = 0;
- copy_region.srcSubresource.baseArrayLayer = 0;
- copy_region.srcSubresource.layerCount = 1;
- copy_region.srcOffset.x = 0;
- copy_region.srcOffset.y = 0;
- copy_region.srcOffset.z = 0;
- copy_region.dstSubresource.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
- copy_region.dstSubresource.mipLevel = 0;
- copy_region.dstSubresource.baseArrayLayer = 0;
- copy_region.dstSubresource.layerCount = 1;
- copy_region.dstOffset.x = 0;
- copy_region.dstOffset.y = 0;
- copy_region.dstOffset.z = 0;
- copy_region.extent.width = 1;
- copy_region.extent.height = 1;
- copy_region.extent.depth = 1;
-
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_PERFORMANCE_WARNING_BIT_EXT,
- "layout should be VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL instead of GENERAL.");
- m_errorMonitor->SetUnexpectedError("layout should be VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL instead of GENERAL.");
-
- m_commandBuffer->CopyImage(src_image.handle(), VK_IMAGE_LAYOUT_GENERAL, dst_image.handle(), VK_IMAGE_LAYOUT_GENERAL, 1,
- &copy_region);
- m_errorMonitor->VerifyFound();
- // The first call hits the expected WARNING and skips the call down the chain, so call a second time to call down chain and
- // update layer state
- m_errorMonitor->SetUnexpectedError("layout should be VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL instead of GENERAL.");
- m_errorMonitor->SetUnexpectedError("layout should be VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL instead of GENERAL.");
- m_commandBuffer->CopyImage(src_image.handle(), VK_IMAGE_LAYOUT_GENERAL, dst_image.handle(), VK_IMAGE_LAYOUT_GENERAL, 1,
- &copy_region);
- // Now cause error due to src image layout changing
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdCopyImage-srcImageLayout-00128");
- m_errorMonitor->SetUnexpectedError("is VK_IMAGE_LAYOUT_UNDEFINED but can only be VK_IMAGE_LAYOUT");
- m_commandBuffer->CopyImage(src_image.handle(), VK_IMAGE_LAYOUT_UNDEFINED, dst_image.handle(), VK_IMAGE_LAYOUT_GENERAL, 1,
- &copy_region);
- m_errorMonitor->VerifyFound();
- // Final src error is due to bad layout type
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdCopyImage-srcImageLayout-00129");
- m_errorMonitor->SetUnexpectedError(
- "with specific layout VK_IMAGE_LAYOUT_UNDEFINED that doesn't match the previously used layout VK_IMAGE_LAYOUT_GENERAL.");
- m_commandBuffer->CopyImage(src_image.handle(), VK_IMAGE_LAYOUT_UNDEFINED, dst_image.handle(), VK_IMAGE_LAYOUT_GENERAL, 1,
- &copy_region);
- m_errorMonitor->VerifyFound();
- // Now verify same checks for dst
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_PERFORMANCE_WARNING_BIT_EXT,
- "layout should be VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL instead of GENERAL.");
- m_errorMonitor->SetUnexpectedError("layout should be VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL instead of GENERAL.");
- m_commandBuffer->CopyImage(src_image.handle(), VK_IMAGE_LAYOUT_GENERAL, dst_image.handle(), VK_IMAGE_LAYOUT_GENERAL, 1,
- &copy_region);
- m_errorMonitor->VerifyFound();
- // Now cause error due to src image layout changing
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdCopyImage-dstImageLayout-00133");
- m_errorMonitor->SetUnexpectedError(
- "is VK_IMAGE_LAYOUT_UNDEFINED but can only be VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL or VK_IMAGE_LAYOUT_GENERAL.");
- m_commandBuffer->CopyImage(src_image.handle(), VK_IMAGE_LAYOUT_GENERAL, dst_image.handle(), VK_IMAGE_LAYOUT_UNDEFINED, 1,
- &copy_region);
- m_errorMonitor->VerifyFound();
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdCopyImage-dstImageLayout-00134");
- m_errorMonitor->SetUnexpectedError(
- "with specific layout VK_IMAGE_LAYOUT_UNDEFINED that doesn't match the previously used layout VK_IMAGE_LAYOUT_GENERAL.");
- m_commandBuffer->CopyImage(src_image.handle(), VK_IMAGE_LAYOUT_GENERAL, dst_image.handle(), VK_IMAGE_LAYOUT_UNDEFINED, 1,
- &copy_region);
- m_errorMonitor->VerifyFound();
-
- // Convert dst and depth images to TRANSFER_DST for subsequent tests
- VkImageMemoryBarrier transfer_dst_image_barrier[1] = {};
- transfer_dst_image_barrier[0].sType = VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER;
- transfer_dst_image_barrier[0].oldLayout = VK_IMAGE_LAYOUT_UNDEFINED;
- transfer_dst_image_barrier[0].newLayout = VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL;
- transfer_dst_image_barrier[0].srcAccessMask = 0;
- transfer_dst_image_barrier[0].dstAccessMask = VK_ACCESS_TRANSFER_WRITE_BIT;
- transfer_dst_image_barrier[0].image = dst_image.handle();
- transfer_dst_image_barrier[0].subresourceRange.layerCount = image_create_info.arrayLayers;
- transfer_dst_image_barrier[0].subresourceRange.levelCount = image_create_info.mipLevels;
- transfer_dst_image_barrier[0].subresourceRange.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
- vkCmdPipelineBarrier(m_commandBuffer->handle(), VK_PIPELINE_STAGE_ALL_COMMANDS_BIT, VK_PIPELINE_STAGE_ALL_COMMANDS_BIT, 0, 0,
- NULL, 0, NULL, 1, transfer_dst_image_barrier);
- transfer_dst_image_barrier[0].image = depth_image.handle();
- transfer_dst_image_barrier[0].subresourceRange.aspectMask = VK_IMAGE_ASPECT_DEPTH_BIT;
- vkCmdPipelineBarrier(m_commandBuffer->handle(), VK_PIPELINE_STAGE_ALL_COMMANDS_BIT, VK_PIPELINE_STAGE_ALL_COMMANDS_BIT, 0, 0,
- NULL, 0, NULL, 1, transfer_dst_image_barrier);
-
- // Cause errors due to clearing with invalid image layouts
- VkClearColorValue color_clear_value = {};
- VkImageSubresourceRange clear_range;
- clear_range.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
- clear_range.baseMipLevel = 0;
- clear_range.baseArrayLayer = 0;
- clear_range.layerCount = 1;
- clear_range.levelCount = 1;
-
- // Fail due to explicitly prohibited layout for color clear (only GENERAL and TRANSFER_DST are permitted).
- // Since the image is currently not in UNDEFINED layout, this will emit two errors.
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdClearColorImage-imageLayout-00005");
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdClearColorImage-imageLayout-00004");
- m_commandBuffer->ClearColorImage(dst_image.handle(), VK_IMAGE_LAYOUT_UNDEFINED, &color_clear_value, 1, &clear_range);
- m_errorMonitor->VerifyFound();
- // Fail due to provided layout not matching actual current layout for color clear.
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdClearColorImage-imageLayout-00004");
- m_commandBuffer->ClearColorImage(dst_image.handle(), VK_IMAGE_LAYOUT_GENERAL, &color_clear_value, 1, &clear_range);
- m_errorMonitor->VerifyFound();
-
- VkClearDepthStencilValue depth_clear_value = {};
- clear_range.aspectMask = VK_IMAGE_ASPECT_DEPTH_BIT;
-
- // Fail due to explicitly prohibited layout for depth clear (only GENERAL and TRANSFER_DST are permitted).
- // Since the image is currently not in UNDEFINED layout, this will emit two errors.
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdClearDepthStencilImage-imageLayout-00012");
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdClearDepthStencilImage-imageLayout-00011");
- m_commandBuffer->ClearDepthStencilImage(depth_image.handle(), VK_IMAGE_LAYOUT_UNDEFINED, &depth_clear_value, 1, &clear_range);
- m_errorMonitor->VerifyFound();
- // Fail due to provided layout not matching actual current layout for depth clear.
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdClearDepthStencilImage-imageLayout-00011");
- m_commandBuffer->ClearDepthStencilImage(depth_image.handle(), VK_IMAGE_LAYOUT_GENERAL, &depth_clear_value, 1, &clear_range);
- m_errorMonitor->VerifyFound();
-
- // Now cause error due to bad image layout transition in PipelineBarrier
- VkImageMemoryBarrier image_barrier[1] = {};
- image_barrier[0].sType = VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER;
- image_barrier[0].oldLayout = VK_IMAGE_LAYOUT_DEPTH_STENCIL_READ_ONLY_OPTIMAL;
- image_barrier[0].newLayout = VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL;
- image_barrier[0].image = src_image.handle();
- image_barrier[0].subresourceRange.layerCount = image_create_info.arrayLayers;
- image_barrier[0].subresourceRange.levelCount = image_create_info.mipLevels;
- image_barrier[0].subresourceRange.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkImageMemoryBarrier-oldLayout-01197");
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkImageMemoryBarrier-oldLayout-01210");
- vkCmdPipelineBarrier(m_commandBuffer->handle(), VK_PIPELINE_STAGE_ALL_COMMANDS_BIT, VK_PIPELINE_STAGE_ALL_COMMANDS_BIT, 0, 0,
- NULL, 0, NULL, 1, image_barrier);
- m_errorMonitor->VerifyFound();
-
- // Finally some layout errors at RenderPass create time
- // Just hacking in specific state to get to the errors we want so don't copy this unless you know what you're doing.
- VkAttachmentReference attach = {};
- // perf warning for GENERAL layout w/ non-DS input attachment
- attach.layout = VK_IMAGE_LAYOUT_GENERAL;
- VkSubpassDescription subpass = {};
- subpass.inputAttachmentCount = 1;
- subpass.pInputAttachments = &attach;
- VkRenderPassCreateInfo rpci = {};
- rpci.subpassCount = 1;
- rpci.pSubpasses = &subpass;
- rpci.attachmentCount = 1;
- VkAttachmentDescription attach_desc = {};
- attach_desc.format = VK_FORMAT_UNDEFINED;
- attach_desc.samples = VK_SAMPLE_COUNT_1_BIT;
- attach_desc.finalLayout = VK_IMAGE_LAYOUT_GENERAL;
- rpci.pAttachments = &attach_desc;
- rpci.sType = VK_STRUCTURE_TYPE_RENDER_PASS_CREATE_INFO;
- VkRenderPass rp;
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_PERFORMANCE_WARNING_BIT_EXT,
- "Layout for input attachment is GENERAL but should be READ_ONLY_OPTIMAL.");
- vkCreateRenderPass(m_device->device(), &rpci, NULL, &rp);
- m_errorMonitor->VerifyFound();
- // error w/ non-general layout
- attach.layout = VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL;
-
- m_errorMonitor->SetDesiredFailureMsg(
- VK_DEBUG_REPORT_ERROR_BIT_EXT,
- "Layout for input attachment is VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL but can only be READ_ONLY_OPTIMAL or GENERAL.");
- vkCreateRenderPass(m_device->device(), &rpci, NULL, &rp);
- m_errorMonitor->VerifyFound();
- subpass.inputAttachmentCount = 0;
- subpass.colorAttachmentCount = 1;
- subpass.pColorAttachments = &attach;
- attach.layout = VK_IMAGE_LAYOUT_GENERAL;
- // perf warning for GENERAL layout on color attachment
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_PERFORMANCE_WARNING_BIT_EXT,
- "Layout for color attachment is GENERAL but should be COLOR_ATTACHMENT_OPTIMAL.");
- vkCreateRenderPass(m_device->device(), &rpci, NULL, &rp);
- m_errorMonitor->VerifyFound();
- // error w/ non-color opt or GENERAL layout for color attachment
- attach.layout = VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL;
- m_errorMonitor->SetDesiredFailureMsg(
- VK_DEBUG_REPORT_ERROR_BIT_EXT,
- "Layout for color attachment is VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL but can only be COLOR_ATTACHMENT_OPTIMAL or GENERAL.");
- vkCreateRenderPass(m_device->device(), &rpci, NULL, &rp);
- m_errorMonitor->VerifyFound();
- subpass.colorAttachmentCount = 0;
- subpass.pDepthStencilAttachment = &attach;
- attach.layout = VK_IMAGE_LAYOUT_GENERAL;
- // perf warning for GENERAL layout on DS attachment
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_PERFORMANCE_WARNING_BIT_EXT,
- "GENERAL layout for depth attachment may not give optimal performance.");
- vkCreateRenderPass(m_device->device(), &rpci, NULL, &rp);
- m_errorMonitor->VerifyFound();
- // error w/ non-ds opt or GENERAL layout for color attachment
- attach.layout = VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL;
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
- "Layout for depth attachment is VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL but can only be "
- "DEPTH_STENCIL_ATTACHMENT_OPTIMAL, DEPTH_STENCIL_READ_ONLY_OPTIMAL or GENERAL.");
- vkCreateRenderPass(m_device->device(), &rpci, NULL, &rp);
- m_errorMonitor->VerifyFound();
- // For this error we need a valid renderpass so create default one
- attach.layout = VK_IMAGE_LAYOUT_DEPTH_STENCIL_READ_ONLY_OPTIMAL;
- attach.attachment = 0;
- attach_desc.format = depth_format;
- attach_desc.samples = VK_SAMPLE_COUNT_1_BIT;
- attach_desc.storeOp = VK_ATTACHMENT_STORE_OP_STORE;
- attach_desc.stencilLoadOp = VK_ATTACHMENT_LOAD_OP_DONT_CARE;
- attach_desc.stencilStoreOp = VK_ATTACHMENT_STORE_OP_DONT_CARE;
- // Can't do a CLEAR load on READ_ONLY initialLayout
- attach_desc.loadOp = VK_ATTACHMENT_LOAD_OP_CLEAR;
- attach_desc.initialLayout = VK_IMAGE_LAYOUT_DEPTH_STENCIL_READ_ONLY_OPTIMAL;
- attach_desc.finalLayout = VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL;
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
- "with invalid first layout VK_IMAGE_LAYOUT_DEPTH_STENCIL_READ_ONLY_OPTIMAL");
- vkCreateRenderPass(m_device->device(), &rpci, NULL, &rp);
- m_errorMonitor->VerifyFound();
-}
-
-TEST_F(VkLayerTest, InvalidStorageImageLayout) {
- TEST_DESCRIPTION("Attempt to update a STORAGE_IMAGE descriptor w/o GENERAL layout.");
-
- ASSERT_NO_FATAL_FAILURE(Init());
-
- const VkFormat tex_format = VK_FORMAT_R8G8B8A8_UNORM;
- VkImageTiling tiling;
- VkFormatProperties format_properties;
- vkGetPhysicalDeviceFormatProperties(gpu(), tex_format, &format_properties);
- if (format_properties.linearTilingFeatures & VK_FORMAT_FEATURE_STORAGE_IMAGE_BIT) {
- tiling = VK_IMAGE_TILING_LINEAR;
- } else if (format_properties.optimalTilingFeatures & VK_FORMAT_FEATURE_STORAGE_IMAGE_BIT) {
- tiling = VK_IMAGE_TILING_OPTIMAL;
- } else {
- printf("%s Device does not support VK_FORMAT_FEATURE_STORAGE_IMAGE_BIT; skipped.\n", kSkipPrefix);
- return;
- }
-
- OneOffDescriptorSet descriptor_set(m_device,
- {
- {0, VK_DESCRIPTOR_TYPE_STORAGE_IMAGE, 1, VK_SHADER_STAGE_FRAGMENT_BIT, nullptr},
- });
-
- VkImageObj image(m_device);
- image.Init(32, 32, 1, tex_format, VK_IMAGE_USAGE_STORAGE_BIT, tiling, 0);
- ASSERT_TRUE(image.initialized());
- VkImageView view = image.targetView(tex_format);
-
- descriptor_set.WriteDescriptorImageInfo(0, view, VK_NULL_HANDLE, VK_DESCRIPTOR_TYPE_STORAGE_IMAGE);
-
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
- " of VK_DESCRIPTOR_TYPE_STORAGE_IMAGE type is being updated with layout "
- "VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL but according to spec ");
- descriptor_set.UpdateDescriptorSets();
- m_errorMonitor->VerifyFound();
-}
-
-TEST_F(VkLayerTest, CreateImageViewBreaksParameterCompatibilityRequirements) {
- TEST_DESCRIPTION(
- "Attempts to create an Image View with a view type that does not match the image type it is being created from.");
-
- ASSERT_NO_FATAL_FAILURE(InitFramework(myDbgFunc, m_errorMonitor));
- if (DeviceExtensionSupported(gpu(), nullptr, VK_KHR_MAINTENANCE1_EXTENSION_NAME)) {
- m_device_extension_names.push_back(VK_KHR_MAINTENANCE1_EXTENSION_NAME);
- }
- ASSERT_NO_FATAL_FAILURE(InitState());
-
- VkPhysicalDeviceMemoryProperties memProps;
- vkGetPhysicalDeviceMemoryProperties(m_device->phy().handle(), &memProps);
-
- // Test mismatch detection for image of type VK_IMAGE_TYPE_1D
- VkImageCreateInfo imgInfo = {VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO,
- nullptr,
- VK_IMAGE_CREATE_MUTABLE_FORMAT_BIT,
- VK_IMAGE_TYPE_1D,
- VK_FORMAT_R8G8B8A8_UNORM,
- {1, 1, 1},
- 1,
- 1,
- VK_SAMPLE_COUNT_1_BIT,
- VK_IMAGE_TILING_OPTIMAL,
- VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT,
- VK_SHARING_MODE_EXCLUSIVE,
- 0,
- nullptr,
- VK_IMAGE_LAYOUT_UNDEFINED};
- VkImageObj image1D(m_device);
- image1D.init(&imgInfo);
- ASSERT_TRUE(image1D.initialized());
-
- // Initialize VkImageViewCreateInfo with mismatched viewType
- VkImageViewCreateInfo ivci = {};
- ivci.sType = VK_STRUCTURE_TYPE_IMAGE_VIEW_CREATE_INFO;
- ivci.image = image1D.handle();
- ivci.viewType = VK_IMAGE_VIEW_TYPE_2D;
- ivci.format = VK_FORMAT_R8G8B8A8_UNORM;
- ivci.subresourceRange.layerCount = 1;
- ivci.subresourceRange.baseMipLevel = 0;
- ivci.subresourceRange.levelCount = 1;
- ivci.subresourceRange.baseArrayLayer = 0;
- ivci.subresourceRange.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
-
- // Test for error message
- CreateImageViewTest(*this, &ivci,
- "vkCreateImageView(): pCreateInfo->viewType VK_IMAGE_VIEW_TYPE_2D is not compatible with image");
-
- // Test mismatch detection for image of type VK_IMAGE_TYPE_2D
- imgInfo = {VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO,
- nullptr,
- VK_IMAGE_CREATE_MUTABLE_FORMAT_BIT,
- VK_IMAGE_TYPE_2D,
- VK_FORMAT_R8G8B8A8_UNORM,
- {1, 1, 1},
- 1,
- 6,
- VK_SAMPLE_COUNT_1_BIT,
- VK_IMAGE_TILING_OPTIMAL,
- VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT,
- VK_SHARING_MODE_EXCLUSIVE,
- 0,
- nullptr,
- VK_IMAGE_LAYOUT_UNDEFINED};
- VkImageObj image2D(m_device);
- image2D.init(&imgInfo);
- ASSERT_TRUE(image2D.initialized());
-
- // Initialize VkImageViewCreateInfo with mismatched viewType
- ivci = {};
- ivci.sType = VK_STRUCTURE_TYPE_IMAGE_VIEW_CREATE_INFO;
- ivci.image = image2D.handle();
- ivci.viewType = VK_IMAGE_VIEW_TYPE_3D;
- ivci.format = VK_FORMAT_R8G8B8A8_UNORM;
- ivci.subresourceRange.layerCount = 1;
- ivci.subresourceRange.baseMipLevel = 0;
- ivci.subresourceRange.levelCount = 1;
- ivci.subresourceRange.baseArrayLayer = 0;
- ivci.subresourceRange.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
-
- // Test for error message
- CreateImageViewTest(*this, &ivci,
- "vkCreateImageView(): pCreateInfo->viewType VK_IMAGE_VIEW_TYPE_3D is not compatible with image");
-
- // Change VkImageViewCreateInfo to different mismatched viewType
- ivci.viewType = VK_IMAGE_VIEW_TYPE_CUBE;
- ivci.subresourceRange.layerCount = 6;
-
- // Test for error message
- CreateImageViewTest(*this, &ivci, "VUID-VkImageViewCreateInfo-image-01003");
-
- // Test mismatch detection for image of type VK_IMAGE_TYPE_3D
- imgInfo = {VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO,
- nullptr,
- VK_IMAGE_CREATE_MUTABLE_FORMAT_BIT,
- VK_IMAGE_TYPE_3D,
- VK_FORMAT_R8G8B8A8_UNORM,
- {1, 1, 1},
- 1,
- 1,
- VK_SAMPLE_COUNT_1_BIT,
- VK_IMAGE_TILING_OPTIMAL,
- VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT,
- VK_SHARING_MODE_EXCLUSIVE,
- 0,
- nullptr,
- VK_IMAGE_LAYOUT_UNDEFINED};
- VkImageObj image3D(m_device);
- image3D.init(&imgInfo);
- ASSERT_TRUE(image3D.initialized());
-
- // Initialize VkImageViewCreateInfo with mismatched viewType
- ivci = {};
- ivci.sType = VK_STRUCTURE_TYPE_IMAGE_VIEW_CREATE_INFO;
- ivci.image = image3D.handle();
- ivci.viewType = VK_IMAGE_VIEW_TYPE_1D;
- ivci.format = VK_FORMAT_R8G8B8A8_UNORM;
- ivci.subresourceRange.layerCount = 1;
- ivci.subresourceRange.baseMipLevel = 0;
- ivci.subresourceRange.levelCount = 1;
- ivci.subresourceRange.baseArrayLayer = 0;
- ivci.subresourceRange.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
-
- // Test for error message
- CreateImageViewTest(*this, &ivci,
- "vkCreateImageView(): pCreateInfo->viewType VK_IMAGE_VIEW_TYPE_1D is not compatible with image");
-
- // Change VkImageViewCreateInfo to different mismatched viewType
- ivci.viewType = VK_IMAGE_VIEW_TYPE_2D;
-
- // Test for error message
- if (DeviceExtensionSupported(gpu(), nullptr, VK_KHR_MAINTENANCE1_EXTENSION_NAME)) {
- CreateImageViewTest(*this, &ivci, "VUID-VkImageViewCreateInfo-image-01005");
- } else {
- CreateImageViewTest(*this, &ivci, "VUID-VkImageViewCreateInfo-subResourceRange-01021");
- }
-
- // Check if the device can make the image required for this test case.
- VkImageFormatProperties formProps = {{0, 0, 0}, 0, 0, 0, 0};
- VkResult res = vkGetPhysicalDeviceImageFormatProperties(
- m_device->phy().handle(), VK_FORMAT_R8G8B8A8_UNORM, VK_IMAGE_TYPE_3D, VK_IMAGE_TILING_OPTIMAL,
- VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT,
- VK_IMAGE_CREATE_MUTABLE_FORMAT_BIT | VK_IMAGE_CREATE_2D_ARRAY_COMPATIBLE_BIT_KHR | VK_IMAGE_CREATE_SPARSE_BINDING_BIT,
- &formProps);
-
- // If not, skip this part of the test.
- if (res || !m_device->phy().features().sparseBinding ||
- !DeviceExtensionSupported(gpu(), nullptr, VK_KHR_MAINTENANCE1_EXTENSION_NAME)) {
- printf("%s %s is not supported.\n", kSkipPrefix, VK_KHR_MAINTENANCE1_EXTENSION_NAME);
- return;
- }
-
- // Initialize VkImageCreateInfo with VK_IMAGE_CREATE_2D_ARRAY_COMPATIBLE_BIT_KHR and VK_IMAGE_CREATE_SPARSE_BINDING_BIT which
- // are incompatible create flags.
- imgInfo = {
- VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO,
- nullptr,
- VK_IMAGE_CREATE_MUTABLE_FORMAT_BIT | VK_IMAGE_CREATE_2D_ARRAY_COMPATIBLE_BIT_KHR | VK_IMAGE_CREATE_SPARSE_BINDING_BIT,
- VK_IMAGE_TYPE_3D,
- VK_FORMAT_R8G8B8A8_UNORM,
- {1, 1, 1},
- 1,
- 1,
- VK_SAMPLE_COUNT_1_BIT,
- VK_IMAGE_TILING_OPTIMAL,
- VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT,
- VK_SHARING_MODE_EXCLUSIVE,
- 0,
- nullptr,
- VK_IMAGE_LAYOUT_UNDEFINED};
- VkImage imageSparse;
-
- // Creating a sparse image means we should not bind memory to it.
- res = vkCreateImage(m_device->device(), &imgInfo, NULL, &imageSparse);
- ASSERT_FALSE(res);
-
- // Initialize VkImageViewCreateInfo to create a view that will attempt to utilize VK_IMAGE_CREATE_2D_ARRAY_COMPATIBLE_BIT_KHR.
- ivci = {};
- ivci.sType = VK_STRUCTURE_TYPE_IMAGE_VIEW_CREATE_INFO;
- ivci.image = imageSparse;
- ivci.viewType = VK_IMAGE_VIEW_TYPE_2D;
- ivci.format = VK_FORMAT_R8G8B8A8_UNORM;
- ivci.subresourceRange.layerCount = 1;
- ivci.subresourceRange.baseMipLevel = 0;
- ivci.subresourceRange.levelCount = 1;
- ivci.subresourceRange.baseArrayLayer = 0;
- ivci.subresourceRange.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
-
- // Test for error message
- CreateImageViewTest(*this, &ivci,
- " when the VK_IMAGE_CREATE_SPARSE_BINDING_BIT, VK_IMAGE_CREATE_SPARSE_RESIDENCY_BIT, or "
- "VK_IMAGE_CREATE_SPARSE_ALIASED_BIT flags are enabled.");
-
- // Clean up
- vkDestroyImage(m_device->device(), imageSparse, nullptr);
-}
-
-TEST_F(VkLayerTest, CreateImageViewFormatFeatureMismatch) {
- TEST_DESCRIPTION("Create view with a format that does not have the same features as the image format.");
-
- if (!EnableDeviceProfileLayer()) {
- printf("%s Failed to enable device profile layer.\n", kSkipPrefix);
- return;
- }
-
- ASSERT_NO_FATAL_FAILURE(InitFramework(myDbgFunc, m_errorMonitor));
- ASSERT_NO_FATAL_FAILURE(InitState());
-
- PFN_vkSetPhysicalDeviceFormatPropertiesEXT fpvkSetPhysicalDeviceFormatPropertiesEXT = nullptr;
- PFN_vkGetOriginalPhysicalDeviceFormatPropertiesEXT fpvkGetOriginalPhysicalDeviceFormatPropertiesEXT = nullptr;
-
- // Load required functions
- if (!LoadDeviceProfileLayer(fpvkSetPhysicalDeviceFormatPropertiesEXT, fpvkGetOriginalPhysicalDeviceFormatPropertiesEXT)) {
- printf("%s Failed to device profile layer.\n", kSkipPrefix);
- return;
- }
-
- // List of features to be tested
- VkFormatFeatureFlagBits features[] = {VK_FORMAT_FEATURE_SAMPLED_IMAGE_BIT, VK_FORMAT_FEATURE_STORAGE_IMAGE_BIT,
- VK_FORMAT_FEATURE_COLOR_ATTACHMENT_BIT, VK_FORMAT_FEATURE_DEPTH_STENCIL_ATTACHMENT_BIT};
- uint32_t feature_count = 4;
- // List of usage cases for each feature test
- VkImageUsageFlags usages[] = {VK_IMAGE_USAGE_SAMPLED_BIT, VK_IMAGE_USAGE_STORAGE_BIT, VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT,
- VK_IMAGE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT};
- // List of errors that will be thrown in order of tests run
- std::string optimal_error_codes[] = {
- "VUID-VkImageViewCreateInfo-usage-02274",
- "VUID-VkImageViewCreateInfo-usage-02275",
- "VUID-VkImageViewCreateInfo-usage-02276",
- "VUID-VkImageViewCreateInfo-usage-02277",
- };
-
- VkFormatProperties formatProps;
-
- // First three tests
- uint32_t i = 0;
- for (i = 0; i < (feature_count - 1); i++) {
- // Modify formats to have mismatched features
-
- // Format for image
- fpvkGetOriginalPhysicalDeviceFormatPropertiesEXT(gpu(), VK_FORMAT_R32G32B32A32_UINT, &formatProps);
- formatProps.optimalTilingFeatures |= features[i];
- fpvkSetPhysicalDeviceFormatPropertiesEXT(gpu(), VK_FORMAT_R32G32B32A32_UINT, formatProps);
-
- memset(&formatProps, 0, sizeof(formatProps));
-
- // Format for view
- fpvkGetOriginalPhysicalDeviceFormatPropertiesEXT(gpu(), VK_FORMAT_R32G32B32A32_SINT, &formatProps);
- formatProps.optimalTilingFeatures = features[(i + 1) % feature_count];
- fpvkSetPhysicalDeviceFormatPropertiesEXT(gpu(), VK_FORMAT_R32G32B32A32_SINT, formatProps);
-
- // Create image with modified format
- VkImageCreateInfo imgInfo = {VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO,
- nullptr,
- VK_IMAGE_CREATE_MUTABLE_FORMAT_BIT,
- VK_IMAGE_TYPE_2D,
- VK_FORMAT_R32G32B32A32_UINT,
- {1, 1, 1},
- 1,
- 1,
- VK_SAMPLE_COUNT_1_BIT,
- VK_IMAGE_TILING_OPTIMAL,
- usages[i],
- VK_SHARING_MODE_EXCLUSIVE,
- 0,
- nullptr,
- VK_IMAGE_LAYOUT_UNDEFINED};
- VkImageObj image(m_device);
- image.init(&imgInfo);
- ASSERT_TRUE(image.initialized());
-
- // Initialize VkImageViewCreateInfo with modified format
- VkImageViewCreateInfo ivci = {};
- ivci.sType = VK_STRUCTURE_TYPE_IMAGE_VIEW_CREATE_INFO;
- ivci.image = image.handle();
- ivci.viewType = VK_IMAGE_VIEW_TYPE_2D;
- ivci.format = VK_FORMAT_R32G32B32A32_SINT;
- ivci.subresourceRange.layerCount = 1;
- ivci.subresourceRange.baseMipLevel = 0;
- ivci.subresourceRange.levelCount = 1;
- ivci.subresourceRange.baseArrayLayer = 0;
- ivci.subresourceRange.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
-
- // Test for error message
- CreateImageViewTest(*this, &ivci, optimal_error_codes[i]);
- }
-
- // Test for VK_FORMAT_FEATURE_DEPTH_STENCIL_ATTACHMENT_BIT. Needs special formats
-
- // Only run this test if format supported
- if (!ImageFormatIsSupported(gpu(), VK_FORMAT_D24_UNORM_S8_UINT, VK_IMAGE_TILING_OPTIMAL)) {
- printf("%s VK_FORMAT_D24_UNORM_S8_UINT format not supported - skipped.\n", kSkipPrefix);
- return;
- }
- // Modify formats to have mismatched features
-
- // Format for image
- fpvkGetOriginalPhysicalDeviceFormatPropertiesEXT(gpu(), VK_FORMAT_D24_UNORM_S8_UINT, &formatProps);
- formatProps.optimalTilingFeatures |= features[i];
- fpvkSetPhysicalDeviceFormatPropertiesEXT(gpu(), VK_FORMAT_D24_UNORM_S8_UINT, formatProps);
-
- memset(&formatProps, 0, sizeof(formatProps));
-
- // Format for view
- fpvkGetOriginalPhysicalDeviceFormatPropertiesEXT(gpu(), VK_FORMAT_D32_SFLOAT_S8_UINT, &formatProps);
- formatProps.optimalTilingFeatures = features[(i + 1) % feature_count];
- fpvkSetPhysicalDeviceFormatPropertiesEXT(gpu(), VK_FORMAT_D32_SFLOAT_S8_UINT, formatProps);
-
- // Create image with modified format
- VkImageCreateInfo imgInfo = {VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO,
- nullptr,
- VK_IMAGE_CREATE_MUTABLE_FORMAT_BIT,
- VK_IMAGE_TYPE_2D,
- VK_FORMAT_D24_UNORM_S8_UINT,
- {1, 1, 1},
- 1,
- 1,
- VK_SAMPLE_COUNT_1_BIT,
- VK_IMAGE_TILING_OPTIMAL,
- usages[i],
- VK_SHARING_MODE_EXCLUSIVE,
- 0,
- nullptr,
- VK_IMAGE_LAYOUT_UNDEFINED};
- VkImageObj image(m_device);
- image.init(&imgInfo);
- ASSERT_TRUE(image.initialized());
-
- // Initialize VkImageViewCreateInfo with modified format
- VkImageViewCreateInfo ivci = {};
- ivci.sType = VK_STRUCTURE_TYPE_IMAGE_VIEW_CREATE_INFO;
- ivci.image = image.handle();
- ivci.viewType = VK_IMAGE_VIEW_TYPE_2D;
- ivci.format = VK_FORMAT_D32_SFLOAT_S8_UINT;
- ivci.subresourceRange.layerCount = 1;
- ivci.subresourceRange.baseMipLevel = 0;
- ivci.subresourceRange.levelCount = 1;
- ivci.subresourceRange.baseArrayLayer = 0;
- ivci.subresourceRange.aspectMask = VK_IMAGE_ASPECT_STENCIL_BIT;
-
- // Test for error message
- CreateImageViewTest(*this, &ivci, optimal_error_codes[i]);
-}
-
-TEST_F(VkLayerTest, InvalidImageViewUsageCreateInfo) {
- TEST_DESCRIPTION("Usage modification via a chained VkImageViewUsageCreateInfo struct");
-
- if (!EnableDeviceProfileLayer()) {
- printf("%s Test requires DeviceProfileLayer, unavailable - skipped.\n", kSkipPrefix);
- return;
- }
-
- ASSERT_NO_FATAL_FAILURE(InitFramework(myDbgFunc, m_errorMonitor));
- if (!DeviceExtensionSupported(gpu(), nullptr, VK_KHR_MAINTENANCE2_EXTENSION_NAME)) {
- printf("%s Test requires API >= 1.1 or KHR_MAINTENANCE2 extension, unavailable - skipped.\n", kSkipPrefix);
- return;
- }
- m_device_extension_names.push_back(VK_KHR_MAINTENANCE2_EXTENSION_NAME);
- ASSERT_NO_FATAL_FAILURE(InitState());
-
- PFN_vkSetPhysicalDeviceFormatPropertiesEXT fpvkSetPhysicalDeviceFormatPropertiesEXT = nullptr;
- PFN_vkGetOriginalPhysicalDeviceFormatPropertiesEXT fpvkGetOriginalPhysicalDeviceFormatPropertiesEXT = nullptr;
-
- // Load required functions
- if (!LoadDeviceProfileLayer(fpvkSetPhysicalDeviceFormatPropertiesEXT, fpvkGetOriginalPhysicalDeviceFormatPropertiesEXT)) {
- printf("%s Required extensions are not avaiable.\n", kSkipPrefix);
- return;
- }
-
- VkFormatProperties formatProps;
-
- // Ensure image format claims support for sampled and storage, excludes color attachment
- memset(&formatProps, 0, sizeof(formatProps));
- fpvkGetOriginalPhysicalDeviceFormatPropertiesEXT(gpu(), VK_FORMAT_R32G32B32A32_UINT, &formatProps);
- formatProps.optimalTilingFeatures |= (VK_FORMAT_FEATURE_SAMPLED_IMAGE_BIT | VK_FORMAT_FEATURE_STORAGE_IMAGE_BIT);
- formatProps.optimalTilingFeatures = formatProps.optimalTilingFeatures & ~VK_FORMAT_FEATURE_COLOR_ATTACHMENT_BIT;
- fpvkSetPhysicalDeviceFormatPropertiesEXT(gpu(), VK_FORMAT_R32G32B32A32_UINT, formatProps);
-
- // Create image with sampled and storage usages
- VkImageCreateInfo imgInfo = {VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO,
- nullptr,
- VK_IMAGE_CREATE_MUTABLE_FORMAT_BIT,
- VK_IMAGE_TYPE_2D,
- VK_FORMAT_R32G32B32A32_UINT,
- {1, 1, 1},
- 1,
- 1,
- VK_SAMPLE_COUNT_1_BIT,
- VK_IMAGE_TILING_OPTIMAL,
- VK_IMAGE_USAGE_SAMPLED_BIT | VK_IMAGE_USAGE_STORAGE_BIT,
- VK_SHARING_MODE_EXCLUSIVE,
- 0,
- nullptr,
- VK_IMAGE_LAYOUT_UNDEFINED};
- VkImageObj image(m_device);
- image.init(&imgInfo);
- ASSERT_TRUE(image.initialized());
-
- // Force the imageview format to exclude storage feature, include color attachment
- memset(&formatProps, 0, sizeof(formatProps));
- fpvkGetOriginalPhysicalDeviceFormatPropertiesEXT(gpu(), VK_FORMAT_R32G32B32A32_SINT, &formatProps);
- formatProps.optimalTilingFeatures |= VK_FORMAT_FEATURE_COLOR_ATTACHMENT_BIT;
- formatProps.optimalTilingFeatures = (formatProps.optimalTilingFeatures & ~VK_FORMAT_FEATURE_STORAGE_IMAGE_BIT);
- fpvkSetPhysicalDeviceFormatPropertiesEXT(gpu(), VK_FORMAT_R32G32B32A32_SINT, formatProps);
-
- VkImageViewCreateInfo ivci = {};
- ivci.sType = VK_STRUCTURE_TYPE_IMAGE_VIEW_CREATE_INFO;
- ivci.image = image.handle();
- ivci.viewType = VK_IMAGE_VIEW_TYPE_2D;
- ivci.format = VK_FORMAT_R32G32B32A32_SINT;
- ivci.subresourceRange.layerCount = 1;
- ivci.subresourceRange.baseMipLevel = 0;
- ivci.subresourceRange.levelCount = 1;
- ivci.subresourceRange.baseArrayLayer = 0;
- ivci.subresourceRange.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
-
- // ImageView creation should fail because view format doesn't support all the underlying image's usages
- CreateImageViewTest(*this, &ivci, "VUID-VkImageViewCreateInfo-usage-02275");
-
- // Add a chained VkImageViewUsageCreateInfo to override original image usage bits, removing storage
- VkImageViewUsageCreateInfo usage_ci = {VK_STRUCTURE_TYPE_IMAGE_VIEW_USAGE_CREATE_INFO, nullptr, VK_IMAGE_USAGE_SAMPLED_BIT};
- // Link the VkImageViewUsageCreateInfo struct into the view's create info pNext chain
- ivci.pNext = &usage_ci;
-
- // ImageView should now succeed without error
- CreateImageViewTest(*this, &ivci);
-
- // Try a zero usage field
- usage_ci.usage = 0;
- CreateImageViewTest(*this, &ivci, "VUID-VkImageViewUsageCreateInfo-usage-requiredbitmask");
-
- // Try an illegal bit in usage field
- usage_ci.usage = 0x10000000 | VK_IMAGE_USAGE_SAMPLED_BIT;
- CreateImageViewTest(*this, &ivci, "VUID-VkImageViewUsageCreateInfo-usage-parameter");
-}
-
-TEST_F(VkLayerTest, CreateImageViewNoMemoryBoundToImage) {
- VkResult err;
-
- ASSERT_NO_FATAL_FAILURE(Init());
-
- // Create an image and try to create a view with no memory backing the image
- VkImage image;
-
- const VkFormat tex_format = VK_FORMAT_B8G8R8A8_UNORM;
- const int32_t tex_width = 32;
- const int32_t tex_height = 32;
-
- VkImageCreateInfo image_create_info = {};
- image_create_info.sType = VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO;
- image_create_info.pNext = NULL;
- image_create_info.imageType = VK_IMAGE_TYPE_2D;
- image_create_info.format = tex_format;
- image_create_info.extent.width = tex_width;
- image_create_info.extent.height = tex_height;
- image_create_info.extent.depth = 1;
- image_create_info.mipLevels = 1;
- image_create_info.arrayLayers = 1;
- image_create_info.samples = VK_SAMPLE_COUNT_1_BIT;
- image_create_info.tiling = VK_IMAGE_TILING_OPTIMAL;
- image_create_info.usage = VK_IMAGE_USAGE_SAMPLED_BIT;
- image_create_info.flags = 0;
-
- err = vkCreateImage(m_device->device(), &image_create_info, NULL, &image);
- ASSERT_VK_SUCCESS(err);
-
- VkImageViewCreateInfo image_view_create_info = {};
- image_view_create_info.sType = VK_STRUCTURE_TYPE_IMAGE_VIEW_CREATE_INFO;
- image_view_create_info.image = image;
- image_view_create_info.viewType = VK_IMAGE_VIEW_TYPE_2D;
- image_view_create_info.format = tex_format;
- image_view_create_info.subresourceRange.layerCount = 1;
- image_view_create_info.subresourceRange.baseMipLevel = 0;
- image_view_create_info.subresourceRange.levelCount = 1;
- image_view_create_info.subresourceRange.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
-
- CreateImageViewTest(*this, &image_view_create_info,
- " used with no memory bound. Memory should be bound by calling vkBindImageMemory().");
- vkDestroyImage(m_device->device(), image, NULL);
-}
-
-TEST_F(VkLayerTest, InvalidImageViewAspect) {
- TEST_DESCRIPTION("Create an image and try to create a view with an invalid aspectMask");
-
- ASSERT_NO_FATAL_FAILURE(Init());
-
- const VkFormat tex_format = VK_FORMAT_B8G8R8A8_UNORM;
- VkImageObj image(m_device);
- image.Init(32, 32, 1, tex_format, VK_IMAGE_USAGE_SAMPLED_BIT, VK_IMAGE_TILING_LINEAR, 0);
- ASSERT_TRUE(image.initialized());
-
- VkImageViewCreateInfo image_view_create_info = {};
- image_view_create_info.sType = VK_STRUCTURE_TYPE_IMAGE_VIEW_CREATE_INFO;
- image_view_create_info.image = image.handle();
- image_view_create_info.viewType = VK_IMAGE_VIEW_TYPE_2D;
- image_view_create_info.format = tex_format;
- image_view_create_info.subresourceRange.baseMipLevel = 0;
- image_view_create_info.subresourceRange.levelCount = 1;
- image_view_create_info.subresourceRange.layerCount = 1;
- // Cause an error by setting an invalid image aspect
- image_view_create_info.subresourceRange.aspectMask = VK_IMAGE_ASPECT_METADATA_BIT;
-
- CreateImageViewTest(*this, &image_view_create_info, "VUID-VkImageSubresource-aspectMask-parameter");
- m_errorMonitor->VerifyFound();
-}
-
-TEST_F(VkLayerTest, ExerciseGetImageSubresourceLayout) {
- TEST_DESCRIPTION("Test vkGetImageSubresourceLayout() valid usages");
-
- ASSERT_NO_FATAL_FAILURE(Init());
- VkSubresourceLayout subres_layout = {};
-
- // VU 00732: image must have been created with tiling equal to VK_IMAGE_TILING_LINEAR
- {
- const VkImageTiling tiling = VK_IMAGE_TILING_OPTIMAL; // ERROR: violates VU 00732
- VkImageObj img(m_device);
- img.InitNoLayout(32, 32, 1, VK_FORMAT_B8G8R8A8_UNORM, VK_IMAGE_USAGE_TRANSFER_SRC_BIT, tiling);
- ASSERT_TRUE(img.initialized());
-
- VkImageSubresource subres = {};
- subres.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
- subres.mipLevel = 0;
- subres.arrayLayer = 0;
-
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkGetImageSubresourceLayout-image-00996");
- vkGetImageSubresourceLayout(m_device->device(), img.image(), &subres, &subres_layout);
- m_errorMonitor->VerifyFound();
- }
-
- // VU 00733: The aspectMask member of pSubresource must only have a single bit set
- {
- VkImageObj img(m_device);
- img.InitNoLayout(32, 32, 1, VK_FORMAT_B8G8R8A8_UNORM, VK_IMAGE_USAGE_TRANSFER_SRC_BIT);
- ASSERT_TRUE(img.initialized());
-
- VkImageSubresource subres = {};
- subres.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT | VK_IMAGE_ASPECT_METADATA_BIT; // ERROR: triggers VU 00733
- subres.mipLevel = 0;
- subres.arrayLayer = 0;
-
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkGetImageSubresourceLayout-aspectMask-00997");
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkImageSubresource-aspectMask-parameter");
- vkGetImageSubresourceLayout(m_device->device(), img.image(), &subres, &subres_layout);
- m_errorMonitor->VerifyFound();
- }
-
- // 00739 mipLevel must be less than the mipLevels specified in VkImageCreateInfo when the image was created
- {
- VkImageObj img(m_device);
- img.InitNoLayout(32, 32, 1, VK_FORMAT_B8G8R8A8_UNORM, VK_IMAGE_USAGE_TRANSFER_SRC_BIT);
- ASSERT_TRUE(img.initialized());
-
- VkImageSubresource subres = {};
- subres.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
- subres.mipLevel = 1; // ERROR: triggers VU 00739
- subres.arrayLayer = 0;
-
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkGetImageSubresourceLayout-mipLevel-01716");
- vkGetImageSubresourceLayout(m_device->device(), img.image(), &subres, &subres_layout);
- m_errorMonitor->VerifyFound();
- }
-
- // 00740 arrayLayer must be less than the arrayLayers specified in VkImageCreateInfo when the image was created
- {
- VkImageObj img(m_device);
- img.InitNoLayout(32, 32, 1, VK_FORMAT_B8G8R8A8_UNORM, VK_IMAGE_USAGE_TRANSFER_SRC_BIT);
- ASSERT_TRUE(img.initialized());
-
- VkImageSubresource subres = {};
- subres.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
- subres.mipLevel = 0;
- subres.arrayLayer = 1; // ERROR: triggers VU 00740
-
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkGetImageSubresourceLayout-arrayLayer-01717");
- vkGetImageSubresourceLayout(m_device->device(), img.image(), &subres, &subres_layout);
- m_errorMonitor->VerifyFound();
- }
-}
-
-TEST_F(VkLayerTest, ImageLayerUnsupportedFormat) {
- TEST_DESCRIPTION("Creating images with unsupported formats ");
-
- ASSERT_NO_FATAL_FAILURE(Init());
- ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
-
- // Create image with unsupported format - Expect FORMAT_UNSUPPORTED
- VkImageCreateInfo image_create_info = {};
- image_create_info.sType = VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO;
- image_create_info.imageType = VK_IMAGE_TYPE_2D;
- image_create_info.format = VK_FORMAT_UNDEFINED;
- image_create_info.extent.width = 32;
- image_create_info.extent.height = 32;
- image_create_info.extent.depth = 1;
- image_create_info.mipLevels = 1;
- image_create_info.arrayLayers = 1;
- image_create_info.samples = VK_SAMPLE_COUNT_1_BIT;
- image_create_info.tiling = VK_IMAGE_TILING_OPTIMAL;
- image_create_info.usage = VK_IMAGE_USAGE_TRANSFER_SRC_BIT;
-
- CreateImageTest(*this, &image_create_info, "VUID-VkImageCreateInfo-format-00943");
-}
-
-TEST_F(VkLayerTest, CreateImageViewFormatMismatchUnrelated) {
- TEST_DESCRIPTION("Create an image with a color format, then try to create a depth view of it");
-
- if (!EnableDeviceProfileLayer()) {
- printf("%s Failed to enable device profile layer.\n", kSkipPrefix);
- return;
- }
-
- ASSERT_NO_FATAL_FAILURE(InitFramework(myDbgFunc, m_errorMonitor));
- ASSERT_NO_FATAL_FAILURE(InitState());
-
- // Load required functions
- PFN_vkSetPhysicalDeviceFormatPropertiesEXT fpvkSetPhysicalDeviceFormatPropertiesEXT =
- (PFN_vkSetPhysicalDeviceFormatPropertiesEXT)vkGetInstanceProcAddr(instance(), "vkSetPhysicalDeviceFormatPropertiesEXT");
- PFN_vkGetOriginalPhysicalDeviceFormatPropertiesEXT fpvkGetOriginalPhysicalDeviceFormatPropertiesEXT =
- (PFN_vkGetOriginalPhysicalDeviceFormatPropertiesEXT)vkGetInstanceProcAddr(instance(),
- "vkGetOriginalPhysicalDeviceFormatPropertiesEXT");
-
- if (!(fpvkSetPhysicalDeviceFormatPropertiesEXT) || !(fpvkGetOriginalPhysicalDeviceFormatPropertiesEXT)) {
- printf("%s Can't find device_profile_api functions; skipped.\n", kSkipPrefix);
- return;
- }
-
- auto depth_format = FindSupportedDepthStencilFormat(gpu());
- if (!depth_format) {
- printf("%s Couldn't find depth stencil image format.\n", kSkipPrefix);
- return;
- }
-
- VkFormatProperties formatProps;
-
- fpvkGetOriginalPhysicalDeviceFormatPropertiesEXT(gpu(), depth_format, &formatProps);
- formatProps.optimalTilingFeatures |= VK_FORMAT_FEATURE_COLOR_ATTACHMENT_BIT;
- fpvkSetPhysicalDeviceFormatPropertiesEXT(gpu(), depth_format, formatProps);
-
- VkImageObj image(m_device);
- image.Init(128, 128, 1, VK_FORMAT_B8G8R8A8_UNORM, VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT, VK_IMAGE_TILING_OPTIMAL, 0);
- ASSERT_TRUE(image.initialized());
-
- VkImageViewCreateInfo imgViewInfo = {};
- imgViewInfo.sType = VK_STRUCTURE_TYPE_IMAGE_VIEW_CREATE_INFO;
- imgViewInfo.image = image.handle();
- imgViewInfo.viewType = VK_IMAGE_VIEW_TYPE_2D;
- imgViewInfo.format = depth_format;
- imgViewInfo.subresourceRange.layerCount = 1;
- imgViewInfo.subresourceRange.baseMipLevel = 0;
- imgViewInfo.subresourceRange.levelCount = 1;
- imgViewInfo.subresourceRange.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
-
- // Can't use depth format for view into color image - Expect INVALID_FORMAT
- CreateImageViewTest(*this, &imgViewInfo,
- "Formats MUST be IDENTICAL unless VK_IMAGE_CREATE_MUTABLE_FORMAT BIT was set on image creation.");
-}
-
-TEST_F(VkLayerTest, CreateImageViewNoMutableFormatBit) {
- TEST_DESCRIPTION("Create an image view with a different format, when the image does not have MUTABLE_FORMAT bit");
-
- if (!EnableDeviceProfileLayer()) {
- printf("%s Couldn't enable device profile layer.\n", kSkipPrefix);
- return;
- }
-
- ASSERT_NO_FATAL_FAILURE(InitFramework(myDbgFunc, m_errorMonitor));
- ASSERT_NO_FATAL_FAILURE(InitState());
-
- PFN_vkSetPhysicalDeviceFormatPropertiesEXT fpvkSetPhysicalDeviceFormatPropertiesEXT = nullptr;
- PFN_vkGetOriginalPhysicalDeviceFormatPropertiesEXT fpvkGetOriginalPhysicalDeviceFormatPropertiesEXT = nullptr;
-
- // Load required functions
- if (!LoadDeviceProfileLayer(fpvkSetPhysicalDeviceFormatPropertiesEXT, fpvkGetOriginalPhysicalDeviceFormatPropertiesEXT)) {
- printf("%s Required extensions are not present.\n", kSkipPrefix);
- return;
- }
-
- VkImageObj image(m_device);
- image.Init(128, 128, 1, VK_FORMAT_B8G8R8A8_UNORM, VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT, VK_IMAGE_TILING_OPTIMAL, 0);
- ASSERT_TRUE(image.initialized());
-
- VkFormatProperties formatProps;
-
- fpvkGetOriginalPhysicalDeviceFormatPropertiesEXT(gpu(), VK_FORMAT_B8G8R8A8_UINT, &formatProps);
- formatProps.optimalTilingFeatures |= VK_FORMAT_FEATURE_COLOR_ATTACHMENT_BIT;
- fpvkSetPhysicalDeviceFormatPropertiesEXT(gpu(), VK_FORMAT_B8G8R8A8_UINT, formatProps);
-
- VkImageViewCreateInfo imgViewInfo = {};
- imgViewInfo.sType = VK_STRUCTURE_TYPE_IMAGE_VIEW_CREATE_INFO;
- imgViewInfo.image = image.handle();
- imgViewInfo.viewType = VK_IMAGE_VIEW_TYPE_2D;
- imgViewInfo.format = VK_FORMAT_B8G8R8A8_UINT;
- imgViewInfo.subresourceRange.layerCount = 1;
- imgViewInfo.subresourceRange.baseMipLevel = 0;
- imgViewInfo.subresourceRange.levelCount = 1;
- imgViewInfo.subresourceRange.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
-
- // Same compatibility class but no MUTABLE_FORMAT bit - Expect
- // VIEW_CREATE_ERROR
- CreateImageViewTest(*this, &imgViewInfo, "VUID-VkImageViewCreateInfo-image-01019");
-}
-
-TEST_F(VkLayerTest, CreateImageViewDifferentClass) {
- TEST_DESCRIPTION("Passing bad parameters to CreateImageView");
-
- ASSERT_NO_FATAL_FAILURE(Init());
-
- if (!(m_device->format_properties(VK_FORMAT_R8_UINT).optimalTilingFeatures & VK_FORMAT_FEATURE_COLOR_ATTACHMENT_BIT)) {
- printf("%s Device does not support R8_UINT as color attachment; skipped", kSkipPrefix);
- return;
- }
-
- VkImageCreateInfo mutImgInfo = {VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO,
- nullptr,
- VK_IMAGE_CREATE_MUTABLE_FORMAT_BIT,
- VK_IMAGE_TYPE_2D,
- VK_FORMAT_R8_UINT,
- {128, 128, 1},
- 1,
- 1,
- VK_SAMPLE_COUNT_1_BIT,
- VK_IMAGE_TILING_OPTIMAL,
- VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT,
- VK_SHARING_MODE_EXCLUSIVE,
- 0,
- nullptr,
- VK_IMAGE_LAYOUT_UNDEFINED};
- VkImageObj mutImage(m_device);
- mutImage.init(&mutImgInfo);
- ASSERT_TRUE(mutImage.initialized());
-
- VkImageViewCreateInfo imgViewInfo = {};
- imgViewInfo.sType = VK_STRUCTURE_TYPE_IMAGE_VIEW_CREATE_INFO;
- imgViewInfo.viewType = VK_IMAGE_VIEW_TYPE_2D;
- imgViewInfo.format = VK_FORMAT_B8G8R8A8_UNORM;
- imgViewInfo.subresourceRange.layerCount = 1;
- imgViewInfo.subresourceRange.baseMipLevel = 0;
- imgViewInfo.subresourceRange.levelCount = 1;
- imgViewInfo.subresourceRange.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
- imgViewInfo.image = mutImage.handle();
-
- CreateImageViewTest(*this, &imgViewInfo, "VUID-VkImageViewCreateInfo-image-01018");
-}
-
-TEST_F(VkLayerTest, MultiplaneIncompatibleViewFormat) {
- TEST_DESCRIPTION("Postive/negative tests of multiplane imageview format compatibility");
-
- // Enable KHR multiplane req'd extensions
- bool mp_extensions = InstanceExtensionSupported(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME,
- VK_KHR_GET_MEMORY_REQUIREMENTS_2_SPEC_VERSION);
- if (mp_extensions) {
- m_instance_extension_names.push_back(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME);
- }
- ASSERT_NO_FATAL_FAILURE(InitFramework(myDbgFunc, m_errorMonitor));
- mp_extensions = mp_extensions && DeviceExtensionSupported(gpu(), nullptr, VK_KHR_MAINTENANCE1_EXTENSION_NAME);
- mp_extensions = mp_extensions && DeviceExtensionSupported(gpu(), nullptr, VK_KHR_GET_MEMORY_REQUIREMENTS_2_EXTENSION_NAME);
- mp_extensions = mp_extensions && DeviceExtensionSupported(gpu(), nullptr, VK_KHR_BIND_MEMORY_2_EXTENSION_NAME);
- mp_extensions = mp_extensions && DeviceExtensionSupported(gpu(), nullptr, VK_KHR_SAMPLER_YCBCR_CONVERSION_EXTENSION_NAME);
- if (mp_extensions) {
- m_device_extension_names.push_back(VK_KHR_MAINTENANCE1_EXTENSION_NAME);
- m_device_extension_names.push_back(VK_KHR_GET_MEMORY_REQUIREMENTS_2_EXTENSION_NAME);
- m_device_extension_names.push_back(VK_KHR_BIND_MEMORY_2_EXTENSION_NAME);
- m_device_extension_names.push_back(VK_KHR_SAMPLER_YCBCR_CONVERSION_EXTENSION_NAME);
- } else {
- printf("%s test requires KHR multiplane extensions, not available. Skipping.\n", kSkipPrefix);
- return;
- }
- ASSERT_NO_FATAL_FAILURE(InitState());
-
- VkImageCreateInfo ci = {};
- ci.sType = VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO;
- ci.pNext = NULL;
- ci.flags = VK_IMAGE_CREATE_MUTABLE_FORMAT_BIT;
- ci.imageType = VK_IMAGE_TYPE_2D;
- ci.format = VK_FORMAT_G8_B8_R8_3PLANE_420_UNORM;
- ci.tiling = VK_IMAGE_TILING_OPTIMAL;
- ci.usage = VK_IMAGE_USAGE_SAMPLED_BIT;
- ci.extent = {128, 128, 1};
- ci.mipLevels = 1;
- ci.arrayLayers = 1;
- ci.samples = VK_SAMPLE_COUNT_1_BIT;
- ci.sharingMode = VK_SHARING_MODE_EXCLUSIVE;
- ci.initialLayout = VK_IMAGE_LAYOUT_UNDEFINED;
-
- // Verify format
- VkFormatFeatureFlags features = VK_FORMAT_FEATURE_SAMPLED_IMAGE_BIT;
- bool supported = ImageFormatAndFeaturesSupported(instance(), gpu(), ci, features);
- if (!supported) {
- printf("%s Multiplane image format not supported. Skipping test.\n", kSkipPrefix);
- return;
- }
-
- VkImageObj image_obj(m_device);
- image_obj.init(&ci);
- ASSERT_TRUE(image_obj.initialized());
-
- VkImageViewCreateInfo ivci = {};
- ivci.sType = VK_STRUCTURE_TYPE_IMAGE_VIEW_CREATE_INFO;
- ivci.image = image_obj.image();
- ivci.viewType = VK_IMAGE_VIEW_TYPE_2D;
- ivci.format = VK_FORMAT_R8_SNORM; // Compat is VK_FORMAT_R8_UNORM
- ivci.subresourceRange.layerCount = 1;
- ivci.subresourceRange.baseMipLevel = 0;
- ivci.subresourceRange.levelCount = 1;
- ivci.subresourceRange.aspectMask = VK_IMAGE_ASPECT_PLANE_1_BIT;
-
- // Incompatible format error
- CreateImageViewTest(*this, &ivci, "VUID-VkImageViewCreateInfo-image-01586");
-
- // Correct format succeeds
- ivci.format = VK_FORMAT_R8_UNORM;
- CreateImageViewTest(*this, &ivci);
-
- // Try a multiplane imageview
- ivci.format = VK_FORMAT_G8_B8_R8_3PLANE_420_UNORM;
- ivci.subresourceRange.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
- CreateImageViewTest(*this, &ivci);
-}
-
-TEST_F(VkLayerTest, CreateImageViewInvalidSubresourceRange) {
- TEST_DESCRIPTION("Passing bad image subrange to CreateImageView");
-
- ASSERT_NO_FATAL_FAILURE(Init());
-
- VkImageObj image(m_device);
- image.Init(32, 32, 1, VK_FORMAT_B8G8R8A8_UNORM, VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT, VK_IMAGE_TILING_OPTIMAL);
- ASSERT_TRUE(image.create_info().arrayLayers == 1);
- ASSERT_TRUE(image.initialized());
-
- VkImageViewCreateInfo img_view_info_template = {};
- img_view_info_template.sType = VK_STRUCTURE_TYPE_IMAGE_VIEW_CREATE_INFO;
- img_view_info_template.image = image.handle();
- img_view_info_template.viewType = VK_IMAGE_VIEW_TYPE_2D_ARRAY;
- img_view_info_template.format = image.format();
- // subresourceRange to be filled later for the purposes of this test
- img_view_info_template.subresourceRange.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
- img_view_info_template.subresourceRange.baseMipLevel = 0;
- img_view_info_template.subresourceRange.levelCount = 0;
- img_view_info_template.subresourceRange.baseArrayLayer = 0;
- img_view_info_template.subresourceRange.layerCount = 0;
-
- // Try baseMipLevel >= image.mipLevels with VK_REMAINING_MIP_LEVELS
- {
- const VkImageSubresourceRange range = {VK_IMAGE_ASPECT_COLOR_BIT, 1, VK_REMAINING_MIP_LEVELS, 0, 1};
- VkImageViewCreateInfo img_view_info = img_view_info_template;
- img_view_info.subresourceRange = range;
- CreateImageViewTest(*this, &img_view_info, "VUID-VkImageViewCreateInfo-subresourceRange-01478");
- }
-
- // Try baseMipLevel >= image.mipLevels without VK_REMAINING_MIP_LEVELS
- {
- const VkImageSubresourceRange range = {VK_IMAGE_ASPECT_COLOR_BIT, 1, 1, 0, 1};
- VkImageViewCreateInfo img_view_info = img_view_info_template;
- img_view_info.subresourceRange = range;
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkImageViewCreateInfo-subresourceRange-01718");
- CreateImageViewTest(*this, &img_view_info, "VUID-VkImageViewCreateInfo-subresourceRange-01478");
- }
-
- // Try levelCount = 0
- {
- const VkImageSubresourceRange range = {VK_IMAGE_ASPECT_COLOR_BIT, 0, 0, 0, 1};
- VkImageViewCreateInfo img_view_info = img_view_info_template;
- img_view_info.subresourceRange = range;
- CreateImageViewTest(*this, &img_view_info, "VUID-VkImageViewCreateInfo-subresourceRange-01718");
- }
-
- // Try baseMipLevel + levelCount > image.mipLevels
- {
- const VkImageSubresourceRange range = {VK_IMAGE_ASPECT_COLOR_BIT, 0, 2, 0, 1};
- VkImageViewCreateInfo img_view_info = img_view_info_template;
- img_view_info.subresourceRange = range;
- CreateImageViewTest(*this, &img_view_info, "VUID-VkImageViewCreateInfo-subresourceRange-01718");
- }
-
- // These tests rely on having the Maintenance1 extension not being enabled, and are invalid on all but version 1.0
- if (m_device->props.apiVersion < VK_API_VERSION_1_1) {
- // Try baseArrayLayer >= image.arrayLayers with VK_REMAINING_ARRAY_LAYERS
- {
- const VkImageSubresourceRange range = {VK_IMAGE_ASPECT_COLOR_BIT, 0, 1, 1, VK_REMAINING_ARRAY_LAYERS};
- VkImageViewCreateInfo img_view_info = img_view_info_template;
- img_view_info.subresourceRange = range;
- CreateImageViewTest(*this, &img_view_info, "VUID-VkImageViewCreateInfo-subresourceRange-01480");
- }
-
- // Try baseArrayLayer >= image.arrayLayers without VK_REMAINING_ARRAY_LAYERS
- {
- const VkImageSubresourceRange range = {VK_IMAGE_ASPECT_COLOR_BIT, 0, 1, 1, 1};
- VkImageViewCreateInfo img_view_info = img_view_info_template;
- img_view_info.subresourceRange = range;
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
- "VUID-VkImageViewCreateInfo-subresourceRange-01719");
- CreateImageViewTest(*this, &img_view_info, "VUID-VkImageViewCreateInfo-subresourceRange-01480");
- }
-
- // Try layerCount = 0
- {
- const VkImageSubresourceRange range = {VK_IMAGE_ASPECT_COLOR_BIT, 0, 1, 0, 0};
- VkImageViewCreateInfo img_view_info = img_view_info_template;
- img_view_info.subresourceRange = range;
- CreateImageViewTest(*this, &img_view_info, "VUID-VkImageViewCreateInfo-subresourceRange-01719");
- }
-
- // Try baseArrayLayer + layerCount > image.arrayLayers
- {
- const VkImageSubresourceRange range = {VK_IMAGE_ASPECT_COLOR_BIT, 0, 1, 0, 2};
- VkImageViewCreateInfo img_view_info = img_view_info_template;
- img_view_info.subresourceRange = range;
- CreateImageViewTest(*this, &img_view_info, "VUID-VkImageViewCreateInfo-subresourceRange-01719");
- }
- }
-}
-
-TEST_F(VkLayerTest, CreateImageMiscErrors) {
- TEST_DESCRIPTION("Misc leftover valid usage errors in VkImageCreateInfo struct");
-
- VkPhysicalDeviceFeatures features{};
- ASSERT_NO_FATAL_FAILURE(Init(&features));
-
- VkImageCreateInfo tmp_img_ci = {};
- tmp_img_ci.sType = VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO;
- tmp_img_ci.flags = 0; // assumably any is supported
- tmp_img_ci.imageType = VK_IMAGE_TYPE_2D; // any is supported
- tmp_img_ci.format = VK_FORMAT_R8G8B8A8_UNORM; // has mandatory support for all usages
- tmp_img_ci.extent = {64, 64, 1}; // limit is 256 for 3D, or 4096
- tmp_img_ci.mipLevels = 1; // any is supported
- tmp_img_ci.arrayLayers = 1; // limit is 256
- tmp_img_ci.samples = VK_SAMPLE_COUNT_1_BIT; // needs to be 1 if TILING_LINEAR
- // if VK_IMAGE_TILING_LINEAR imageType must be 2D, usage must be TRANSFER, and levels layers samplers all 1
- tmp_img_ci.tiling = VK_IMAGE_TILING_OPTIMAL;
- tmp_img_ci.usage = VK_IMAGE_USAGE_TRANSFER_SRC_BIT; // depends on format
- tmp_img_ci.initialLayout = VK_IMAGE_LAYOUT_UNDEFINED;
- const VkImageCreateInfo safe_image_ci = tmp_img_ci;
-
- ASSERT_VK_SUCCESS(GPDIFPHelper(gpu(), &safe_image_ci));
-
- {
- VkImageCreateInfo image_ci = safe_image_ci;
- image_ci.sharingMode = VK_SHARING_MODE_CONCURRENT;
- image_ci.queueFamilyIndexCount = 2;
- image_ci.pQueueFamilyIndices = nullptr;
- CreateImageTest(*this, &image_ci, "VUID-VkImageCreateInfo-sharingMode-00941");
- }
-
- {
- VkImageCreateInfo image_ci = safe_image_ci;
- image_ci.sharingMode = VK_SHARING_MODE_CONCURRENT;
- image_ci.queueFamilyIndexCount = 1;
- const uint32_t queue_family = 0;
- image_ci.pQueueFamilyIndices = &queue_family;
- CreateImageTest(*this, &image_ci, "VUID-VkImageCreateInfo-sharingMode-00942");
- }
-
- {
- VkImageCreateInfo image_ci = safe_image_ci;
- image_ci.format = VK_FORMAT_UNDEFINED;
- CreateImageTest(*this, &image_ci, "VUID-VkImageCreateInfo-format-00943");
- }
-
- {
- VkImageCreateInfo image_ci = safe_image_ci;
- image_ci.flags = VK_IMAGE_CREATE_CUBE_COMPATIBLE_BIT;
- image_ci.arrayLayers = 6;
- image_ci.imageType = VK_IMAGE_TYPE_1D;
- m_errorMonitor->SetUnexpectedError("VUID-VkImageCreateInfo-imageType-00954");
- image_ci.extent = {64, 1, 1};
- CreateImageTest(*this, &image_ci, "VUID-VkImageCreateInfo-flags-00949");
-
- image_ci = safe_image_ci;
- image_ci.flags = VK_IMAGE_CREATE_CUBE_COMPATIBLE_BIT;
- image_ci.imageType = VK_IMAGE_TYPE_3D;
- m_errorMonitor->SetUnexpectedError("VUID-VkImageCreateInfo-imageType-00954");
- image_ci.extent = {4, 4, 4};
- CreateImageTest(*this, &image_ci, "VUID-VkImageCreateInfo-flags-00949");
-
- image_ci = safe_image_ci;
- image_ci.flags = VK_IMAGE_CREATE_CUBE_COMPATIBLE_BIT;
- image_ci.imageType = VK_IMAGE_TYPE_2D;
- image_ci.extent = {8, 6, 1};
- image_ci.arrayLayers = 6;
- CreateImageTest(*this, &image_ci, "VUID-VkImageCreateInfo-imageType-00954");
-
- image_ci = safe_image_ci;
- image_ci.flags = VK_IMAGE_CREATE_CUBE_COMPATIBLE_BIT;
- image_ci.imageType = VK_IMAGE_TYPE_2D;
- image_ci.extent = {8, 8, 1};
- image_ci.arrayLayers = 4;
- CreateImageTest(*this, &image_ci, "VUID-VkImageCreateInfo-imageType-00954");
- }
-
- {
- VkImageCreateInfo image_ci = safe_image_ci;
- image_ci.usage = VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT; // always has 4 samples support
- image_ci.samples = VK_SAMPLE_COUNT_4_BIT;
- image_ci.imageType = VK_IMAGE_TYPE_3D;
- image_ci.extent = {4, 4, 4};
- CreateImageTest(*this, &image_ci, "VUID-VkImageCreateInfo-samples-02257");
-
- image_ci = safe_image_ci;
- image_ci.usage = VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT; // always has 4 samples support
- image_ci.samples = VK_SAMPLE_COUNT_4_BIT;
- image_ci.flags = VK_IMAGE_CREATE_CUBE_COMPATIBLE_BIT;
- image_ci.arrayLayers = 6;
- CreateImageTest(*this, &image_ci, "VUID-VkImageCreateInfo-samples-02257");
-
- image_ci = safe_image_ci;
- image_ci.usage = VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT; // always has 4 samples support
- image_ci.samples = VK_SAMPLE_COUNT_4_BIT;
- image_ci.tiling = VK_IMAGE_TILING_LINEAR;
- CreateImageTest(*this, &image_ci, "VUID-VkImageCreateInfo-samples-02257");
-
- image_ci = safe_image_ci;
- image_ci.usage = VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT; // always has 4 samples support
- image_ci.samples = VK_SAMPLE_COUNT_4_BIT;
- image_ci.mipLevels = 2;
- CreateImageTest(*this, &image_ci, "VUID-VkImageCreateInfo-samples-02257");
- }
-
- {
- VkImageCreateInfo image_ci = safe_image_ci;
- image_ci.usage = VK_IMAGE_USAGE_TRANSIENT_ATTACHMENT_BIT | VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT;
- image_ci.usage |= VK_IMAGE_USAGE_TRANSFER_SRC_BIT;
- CreateImageTest(*this, &image_ci, "VUID-VkImageCreateInfo-usage-00963");
-
- image_ci.usage = VK_IMAGE_USAGE_TRANSIENT_ATTACHMENT_BIT;
- CreateImageTest(*this, &image_ci, "VUID-VkImageCreateInfo-usage-00966");
-
- image_ci.usage = VK_IMAGE_USAGE_TRANSIENT_ATTACHMENT_BIT;
- image_ci.usage |= VK_IMAGE_USAGE_TRANSFER_SRC_BIT;
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkImageCreateInfo-usage-00963");
- CreateImageTest(*this, &image_ci, "VUID-VkImageCreateInfo-usage-00966");
- }
-
- {
- VkImageCreateInfo image_ci = safe_image_ci;
- image_ci.flags = VK_IMAGE_CREATE_SPARSE_BINDING_BIT;
- CreateImageTest(*this, &image_ci, "VUID-VkImageCreateInfo-flags-00969");
- }
-
- // InitialLayout not VK_IMAGE_LAYOUT_UNDEFINED or VK_IMAGE_LAYOUT_PREDEFINED
- {
- VkImageCreateInfo image_ci = safe_image_ci;
- image_ci.initialLayout = VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL;
- CreateImageTest(*this, &image_ci, "VUID-VkImageCreateInfo-initialLayout-00993");
- }
-}
-
-TEST_F(VkLayerTest, CreateImageMinLimitsViolation) {
- TEST_DESCRIPTION("Create invalid image with invalid parameters violation minimum limit, such as being zero.");
-
- ASSERT_NO_FATAL_FAILURE(Init());
-
- VkImage null_image; // throwaway target for all the vkCreateImage
-
- VkImageCreateInfo tmp_img_ci = {};
- tmp_img_ci.sType = VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO;
- tmp_img_ci.flags = 0; // assumably any is supported
- tmp_img_ci.imageType = VK_IMAGE_TYPE_2D; // any is supported
- tmp_img_ci.format = VK_FORMAT_R8G8B8A8_UNORM; // has mandatory support for all usages
- tmp_img_ci.extent = {1, 1, 1}; // limit is 256 for 3D, or 4096
- tmp_img_ci.mipLevels = 1; // any is supported
- tmp_img_ci.arrayLayers = 1; // limit is 256
- tmp_img_ci.samples = VK_SAMPLE_COUNT_1_BIT; // needs to be 1 if TILING_LINEAR
- // if VK_IMAGE_TILING_LINEAR imageType must be 2D, usage must be TRANSFER, and levels layers samplers all 1
- tmp_img_ci.tiling = VK_IMAGE_TILING_OPTIMAL;
- tmp_img_ci.usage = VK_IMAGE_USAGE_TRANSFER_SRC_BIT; // depends on format
- tmp_img_ci.initialLayout = VK_IMAGE_LAYOUT_UNDEFINED;
- const VkImageCreateInfo safe_image_ci = tmp_img_ci;
-
- enum Dimension { kWidth = 0x1, kHeight = 0x2, kDepth = 0x4 };
-
- for (underlying_type<Dimension>::type bad_dimensions = 0x1; bad_dimensions < 0x8; ++bad_dimensions) {
- VkExtent3D extent = {1, 1, 1};
-
- if (bad_dimensions & kWidth) {
- extent.width = 0;
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkImageCreateInfo-extent-00944");
- }
-
- if (bad_dimensions & kHeight) {
- extent.height = 0;
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkImageCreateInfo-extent-00945");
- }
-
- if (bad_dimensions & kDepth) {
- extent.depth = 0;
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkImageCreateInfo-extent-00946");
- }
-
- VkImageCreateInfo bad_image_ci = safe_image_ci;
- bad_image_ci.imageType = VK_IMAGE_TYPE_3D; // has to be 3D otherwise it might trigger the non-1 error instead
- bad_image_ci.extent = extent;
-
- vkCreateImage(m_device->device(), &bad_image_ci, NULL, &null_image);
-
- m_errorMonitor->VerifyFound();
- }
-
- {
- VkImageCreateInfo bad_image_ci = safe_image_ci;
- bad_image_ci.mipLevels = 0;
- CreateImageTest(*this, &bad_image_ci, "VUID-VkImageCreateInfo-mipLevels-00947");
- }
-
- {
- VkImageCreateInfo bad_image_ci = safe_image_ci;
- bad_image_ci.arrayLayers = 0;
- CreateImageTest(*this, &bad_image_ci, "VUID-VkImageCreateInfo-arrayLayers-00948");
- }
-
- {
- VkImageCreateInfo bad_image_ci = safe_image_ci;
- bad_image_ci.flags = VK_IMAGE_CREATE_CUBE_COMPATIBLE_BIT;
- bad_image_ci.arrayLayers = 5;
- CreateImageTest(*this, &bad_image_ci, "VUID-VkImageCreateInfo-imageType-00954");
-
- bad_image_ci.arrayLayers = 6;
- bad_image_ci.extent = {64, 63, 1};
- CreateImageTest(*this, &bad_image_ci, "VUID-VkImageCreateInfo-imageType-00954");
- }
-
- {
- VkImageCreateInfo bad_image_ci = safe_image_ci;
- bad_image_ci.imageType = VK_IMAGE_TYPE_1D;
- bad_image_ci.extent = {64, 2, 1};
- CreateImageTest(*this, &bad_image_ci, "VUID-VkImageCreateInfo-imageType-00956");
-
- bad_image_ci.imageType = VK_IMAGE_TYPE_1D;
- bad_image_ci.extent = {64, 1, 2};
- CreateImageTest(*this, &bad_image_ci, "VUID-VkImageCreateInfo-imageType-00956");
-
- bad_image_ci.imageType = VK_IMAGE_TYPE_2D;
- bad_image_ci.extent = {64, 64, 2};
- CreateImageTest(*this, &bad_image_ci, "VUID-VkImageCreateInfo-imageType-00957");
-
- bad_image_ci.imageType = VK_IMAGE_TYPE_2D;
- bad_image_ci.flags = VK_IMAGE_CREATE_CUBE_COMPATIBLE_BIT;
- bad_image_ci.arrayLayers = 6;
- bad_image_ci.extent = {64, 64, 2};
- CreateImageTest(*this, &bad_image_ci, "VUID-VkImageCreateInfo-imageType-00957");
- }
-
- {
- VkImageCreateInfo bad_image_ci = safe_image_ci;
- bad_image_ci.imageType = VK_IMAGE_TYPE_3D;
- bad_image_ci.arrayLayers = 2;
- CreateImageTest(*this, &bad_image_ci, "VUID-VkImageCreateInfo-imageType-00961");
- }
-}
-
-TEST_F(VkLayerTest, CreateImageMaxLimitsViolation) {
- TEST_DESCRIPTION("Create invalid image with invalid parameters exceeding physical device limits.");
-
- // Check for VK_KHR_get_physical_device_properties2
- bool push_physical_device_properties_2_support =
- InstanceExtensionSupported(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME);
- if (push_physical_device_properties_2_support) {
- m_instance_extension_names.push_back(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME);
- }
-
- ASSERT_NO_FATAL_FAILURE(InitFramework(myDbgFunc, m_errorMonitor));
-
- bool push_fragment_density_support = false;
-
- if (push_physical_device_properties_2_support) {
- push_fragment_density_support = DeviceExtensionSupported(gpu(), nullptr, VK_EXT_FRAGMENT_DENSITY_MAP_EXTENSION_NAME);
- if (push_fragment_density_support) m_device_extension_names.push_back(VK_EXT_FRAGMENT_DENSITY_MAP_EXTENSION_NAME);
- }
-
- ASSERT_NO_FATAL_FAILURE(InitState(nullptr, nullptr, 0));
-
- VkImageCreateInfo tmp_img_ci = {};
- tmp_img_ci.sType = VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO;
- tmp_img_ci.flags = 0; // assumably any is supported
- tmp_img_ci.imageType = VK_IMAGE_TYPE_2D; // any is supported
- tmp_img_ci.format = VK_FORMAT_R8G8B8A8_UNORM; // has mandatory support for all usages
- tmp_img_ci.extent = {1, 1, 1}; // limit is 256 for 3D, or 4096
- tmp_img_ci.mipLevels = 1; // any is supported
- tmp_img_ci.arrayLayers = 1; // limit is 256
- tmp_img_ci.samples = VK_SAMPLE_COUNT_1_BIT; // needs to be 1 if TILING_LINEAR
- // if VK_IMAGE_TILING_LINEAR imageType must be 2D, usage must be TRANSFER, and levels layers samplers all 1
- tmp_img_ci.tiling = VK_IMAGE_TILING_OPTIMAL;
- tmp_img_ci.usage = VK_IMAGE_USAGE_TRANSFER_SRC_BIT; // depends on format
- tmp_img_ci.initialLayout = VK_IMAGE_LAYOUT_UNDEFINED;
- const VkImageCreateInfo safe_image_ci = tmp_img_ci;
-
- ASSERT_VK_SUCCESS(GPDIFPHelper(gpu(), &safe_image_ci));
-
- const VkPhysicalDeviceLimits &dev_limits = m_device->props.limits;
-
- {
- VkImageCreateInfo image_ci = safe_image_ci;
- image_ci.extent = {8, 8, 1};
- image_ci.mipLevels = 4 + 1; // 4 = log2(8) + 1
- CreateImageTest(*this, &image_ci, "VUID-VkImageCreateInfo-mipLevels-00958");
-
- image_ci.extent = {8, 15, 1};
- image_ci.mipLevels = 4 + 1; // 4 = floor(log2(15)) + 1
- CreateImageTest(*this, &image_ci, "VUID-VkImageCreateInfo-mipLevels-00958");
- }
-
- {
- VkImageCreateInfo image_ci = safe_image_ci;
- image_ci.tiling = VK_IMAGE_TILING_LINEAR;
- image_ci.extent = {64, 64, 1};
- image_ci.format = FindFormatLinearWithoutMips(gpu(), image_ci);
- image_ci.mipLevels = 2;
-
- if (image_ci.format != VK_FORMAT_UNDEFINED) {
- CreateImageTest(*this, &image_ci, "VUID-VkImageCreateInfo-mipLevels-02255");
- } else {
- printf("%s Cannot find a format to test maxMipLevels limit; skipping part of test.\n", kSkipPrefix);
- }
- }
-
- {
- VkImageCreateInfo image_ci = safe_image_ci;
-
- VkImageFormatProperties img_limits;
- ASSERT_VK_SUCCESS(GPDIFPHelper(gpu(), &image_ci, &img_limits));
-
- if (img_limits.maxArrayLayers != UINT32_MAX) {
- image_ci.arrayLayers = img_limits.maxArrayLayers + 1;
- CreateImageTest(*this, &image_ci, "VUID-VkImageCreateInfo-arrayLayers-02256");
- } else {
- printf("%s VkImageFormatProperties::maxArrayLayers is already UINT32_MAX; skipping part of test.\n", kSkipPrefix);
- }
- }
-
- {
- VkImageCreateInfo image_ci = safe_image_ci;
- bool found = FindFormatWithoutSamples(gpu(), image_ci);
-
- if (found) {
- CreateImageTest(*this, &image_ci, "VUID-VkImageCreateInfo-samples-02258");
- } else {
- printf("%s Could not find a format with some unsupported samples; skipping part of test.\n", kSkipPrefix);
- }
- }
-
- {
- VkImageCreateInfo image_ci = safe_image_ci;
- image_ci.usage = VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT; // (any attachment bit)
-
- VkImageFormatProperties img_limits;
- ASSERT_VK_SUCCESS(GPDIFPHelper(gpu(), &image_ci, &img_limits));
-
- if (dev_limits.maxFramebufferWidth != UINT32_MAX) {
- image_ci.extent = {dev_limits.maxFramebufferWidth + 1, 64, 1};
- CreateImageTest(*this, &image_ci, "VUID-VkImageCreateInfo-usage-00964");
- } else {
- printf("%s VkPhysicalDeviceLimits::maxFramebufferWidth is already UINT32_MAX; skipping part of test.\n", kSkipPrefix);
- }
-
- if (dev_limits.maxFramebufferHeight != UINT32_MAX) {
- image_ci.usage = VK_IMAGE_USAGE_INPUT_ATTACHMENT_BIT; // try different one too
- image_ci.extent = {64, dev_limits.maxFramebufferHeight + 1, 1};
- CreateImageTest(*this, &image_ci, "VUID-VkImageCreateInfo-usage-00965");
- } else {
- printf("%s VkPhysicalDeviceLimits::maxFramebufferHeight is already UINT32_MAX; skipping part of test.\n", kSkipPrefix);
- }
- }
-
- {
- if (!push_fragment_density_support) {
- printf("%s VK_EXT_fragment_density_map Extension not supported, skipping tests\n", kSkipPrefix);
- } else {
- VkImageCreateInfo image_ci = safe_image_ci;
- image_ci.usage = VK_IMAGE_USAGE_FRAGMENT_DENSITY_MAP_BIT_EXT;
- VkImageFormatProperties img_limits;
- ASSERT_VK_SUCCESS(GPDIFPHelper(gpu(), &image_ci, &img_limits));
-
- image_ci.extent = {dev_limits.maxFramebufferWidth + 1, 64, 1};
- CreateImageTest(*this, &image_ci, "VUID-VkImageCreateInfo-usage-02559");
-
- image_ci.extent = {64, dev_limits.maxFramebufferHeight + 1, 1};
- CreateImageTest(*this, &image_ci, "VUID-VkImageCreateInfo-usage-02560");
- }
- }
-}
-
-TEST_F(VkLayerTest, MultiplaneImageSamplerConversionMismatch) {
- TEST_DESCRIPTION(
- "Create sampler with ycbcr conversion and use with an image created without ycrcb conversion or immutable sampler");
-
- // Enable KHR multiplane req'd extensions
- bool mp_extensions = InstanceExtensionSupported(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME,
- VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_SPEC_VERSION);
- if (mp_extensions) {
- m_instance_extension_names.push_back(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME);
- }
- SetTargetApiVersion(VK_API_VERSION_1_1);
- ASSERT_NO_FATAL_FAILURE(InitFramework(myDbgFunc, m_errorMonitor));
- mp_extensions = mp_extensions && DeviceExtensionSupported(gpu(), nullptr, VK_KHR_MAINTENANCE1_EXTENSION_NAME);
- mp_extensions = mp_extensions && DeviceExtensionSupported(gpu(), nullptr, VK_KHR_GET_MEMORY_REQUIREMENTS_2_EXTENSION_NAME);
- mp_extensions = mp_extensions && DeviceExtensionSupported(gpu(), nullptr, VK_KHR_BIND_MEMORY_2_EXTENSION_NAME);
- mp_extensions = mp_extensions && DeviceExtensionSupported(gpu(), nullptr, VK_KHR_SAMPLER_YCBCR_CONVERSION_EXTENSION_NAME);
- if (mp_extensions) {
- m_device_extension_names.push_back(VK_KHR_MAINTENANCE1_EXTENSION_NAME);
- m_device_extension_names.push_back(VK_KHR_GET_MEMORY_REQUIREMENTS_2_EXTENSION_NAME);
- m_device_extension_names.push_back(VK_KHR_BIND_MEMORY_2_EXTENSION_NAME);
- m_device_extension_names.push_back(VK_KHR_SAMPLER_YCBCR_CONVERSION_EXTENSION_NAME);
- } else {
- printf("%s test requires KHR multiplane extensions, not available. Skipping.\n", kSkipPrefix);
- return;
- }
-
- // Enable Ycbcr Conversion Features
- VkPhysicalDeviceSamplerYcbcrConversionFeatures ycbcr_features = {};
- ycbcr_features.sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SAMPLER_YCBCR_CONVERSION_FEATURES;
- ycbcr_features.samplerYcbcrConversion = VK_TRUE;
- ASSERT_NO_FATAL_FAILURE(InitState(nullptr, &ycbcr_features));
-
- PFN_vkCreateSamplerYcbcrConversionKHR vkCreateSamplerYcbcrConversionFunction = nullptr;
- PFN_vkDestroySamplerYcbcrConversionKHR vkDestroySamplerYcbcrConversionFunction = nullptr;
-
- if (DeviceValidationVersion() >= VK_API_VERSION_1_1) {
- vkCreateSamplerYcbcrConversionFunction = vkCreateSamplerYcbcrConversion;
- vkDestroySamplerYcbcrConversionFunction = vkDestroySamplerYcbcrConversion;
- } else {
- vkCreateSamplerYcbcrConversionFunction =
- (PFN_vkCreateSamplerYcbcrConversionKHR)vkGetDeviceProcAddr(m_device->handle(), "vkCreateSamplerYcbcrConversionKHR");
- vkDestroySamplerYcbcrConversionFunction =
- (PFN_vkDestroySamplerYcbcrConversionKHR)vkGetDeviceProcAddr(m_device->handle(), "vkDestroySamplerYcbcrConversionKHR");
- }
-
- if (!vkCreateSamplerYcbcrConversionFunction || !vkDestroySamplerYcbcrConversionFunction) {
- printf("%s Did not find required device extension %s; test skipped.\n", kSkipPrefix,
- VK_KHR_SAMPLER_YCBCR_CONVERSION_EXTENSION_NAME);
- return;
- }
-
- ASSERT_NO_FATAL_FAILURE(InitViewport());
- ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
-
- const VkImageCreateInfo ci = {VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO,
- NULL,
- 0,
- VK_IMAGE_TYPE_2D,
- VK_FORMAT_G8_B8R8_2PLANE_420_UNORM_KHR,
- {128, 128, 1},
- 1,
- 1,
- VK_SAMPLE_COUNT_1_BIT,
- VK_IMAGE_TILING_LINEAR,
- VK_IMAGE_USAGE_SAMPLED_BIT,
- VK_SHARING_MODE_EXCLUSIVE,
- VK_IMAGE_LAYOUT_UNDEFINED};
-
- // Verify formats
- bool supported = ImageFormatAndFeaturesSupported(instance(), gpu(), ci, VK_FORMAT_FEATURE_SAMPLED_IMAGE_BIT);
- if (!supported) {
- printf("%s Multiplane image format not supported. Skipping test.\n", kSkipPrefix);
- return;
- }
-
- // Create Ycbcr conversion
- VkSamplerYcbcrConversionCreateInfo ycbcr_create_info = {VK_STRUCTURE_TYPE_SAMPLER_YCBCR_CONVERSION_CREATE_INFO,
- NULL,
- VK_FORMAT_G8_B8R8_2PLANE_420_UNORM_KHR,
- VK_SAMPLER_YCBCR_MODEL_CONVERSION_RGB_IDENTITY,
- VK_SAMPLER_YCBCR_RANGE_ITU_FULL,
- {VK_COMPONENT_SWIZZLE_IDENTITY, VK_COMPONENT_SWIZZLE_IDENTITY,
- VK_COMPONENT_SWIZZLE_IDENTITY, VK_COMPONENT_SWIZZLE_IDENTITY},
- VK_CHROMA_LOCATION_COSITED_EVEN,
- VK_CHROMA_LOCATION_COSITED_EVEN,
- VK_FILTER_NEAREST,
- false};
- VkSamplerYcbcrConversion conversions[2];
- vkCreateSamplerYcbcrConversionFunction(m_device->handle(), &ycbcr_create_info, nullptr, &conversions[0]);
- ycbcr_create_info.components.r = VK_COMPONENT_SWIZZLE_ZERO; // Just anything different than above
- vkCreateSamplerYcbcrConversionFunction(m_device->handle(), &ycbcr_create_info, nullptr, &conversions[1]);
-
- VkSamplerYcbcrConversionInfo ycbcr_info = {};
- ycbcr_info.sType = VK_STRUCTURE_TYPE_SAMPLER_YCBCR_CONVERSION_INFO;
- ycbcr_info.conversion = conversions[0];
-
- // Create a sampler using conversion
- VkSamplerCreateInfo sci = SafeSaneSamplerCreateInfo();
- sci.pNext = &ycbcr_info;
- // Create two samplers with two different conversions, such that one will mismatch
- // It will make the second sampler fail to see if the log prints the second sampler or the first sampler.
- VkSampler samplers[2];
- VkResult err = vkCreateSampler(m_device->device(), &sci, NULL, &samplers[0]);
- ASSERT_VK_SUCCESS(err);
- ycbcr_info.conversion = conversions[1]; // Need two samplers with different conversions
- err = vkCreateSampler(m_device->device(), &sci, NULL, &samplers[1]);
- ASSERT_VK_SUCCESS(err);
-
- // Create an image without a Ycbcr conversion
- VkImageObj mpimage(m_device);
- mpimage.init(&ci);
-
- VkImageView view;
- VkImageViewCreateInfo ivci = {};
- ivci.sType = VK_STRUCTURE_TYPE_IMAGE_VIEW_CREATE_INFO;
- ycbcr_info.conversion = conversions[0]; // Need two samplers with different conversions
- ivci.pNext = &ycbcr_info;
- ivci.image = mpimage.handle();
- ivci.viewType = VK_IMAGE_VIEW_TYPE_2D;
- ivci.format = VK_FORMAT_G8_B8R8_2PLANE_420_UNORM_KHR;
- ivci.subresourceRange.layerCount = 1;
- ivci.subresourceRange.baseMipLevel = 0;
- ivci.subresourceRange.levelCount = 1;
- ivci.subresourceRange.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
- vkCreateImageView(m_device->device(), &ivci, nullptr, &view);
-
- // Use the image and sampler together in a descriptor set
- OneOffDescriptorSet descriptor_set(m_device,
- {
- {0, VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER, 2, VK_SHADER_STAGE_ALL, samplers},
- });
-
- // Use the same image view twice, using the same sampler, with the *second* mismatched with the *second* immutable sampler
- VkDescriptorImageInfo image_infos[2];
- image_infos[0] = {};
- image_infos[0].imageLayout = VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL;
- image_infos[0].imageView = view;
- image_infos[0].sampler = samplers[0];
- image_infos[1] = image_infos[0];
-
- // Update the descriptor set expecting to get an error
- VkWriteDescriptorSet descriptor_write = {};
- descriptor_write.sType = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET;
- descriptor_write.dstSet = descriptor_set.set_;
- descriptor_write.dstBinding = 0;
- descriptor_write.descriptorCount = 2;
- descriptor_write.descriptorType = VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER;
- descriptor_write.pImageInfo = image_infos;
-
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkWriteDescriptorSet-descriptorType-01948");
- vkUpdateDescriptorSets(m_device->device(), 1, &descriptor_write, 0, NULL);
- m_errorMonitor->VerifyFound();
-
- // pImmutableSamplers = nullptr causes an error , VUID-VkWriteDescriptorSet-descriptorType-02738.
- // Because if pNext chains a VkSamplerYcbcrConversionInfo, the sampler has to be a immutable sampler.
- OneOffDescriptorSet descriptor_set_1947(m_device,
- {
- {0, VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER, 1, VK_SHADER_STAGE_ALL, nullptr},
- });
- descriptor_write.dstSet = descriptor_set_1947.set_;
- descriptor_write.descriptorCount = 1;
- descriptor_write.pImageInfo = &image_infos[0];
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkWriteDescriptorSet-descriptorType-02738");
- vkUpdateDescriptorSets(m_device->device(), 1, &descriptor_write, 0, NULL);
- m_errorMonitor->VerifyFound();
-
- vkDestroySamplerYcbcrConversionFunction(m_device->device(), conversions[0], nullptr);
- vkDestroySamplerYcbcrConversionFunction(m_device->device(), conversions[1], nullptr);
- vkDestroyImageView(m_device->device(), view, NULL);
- vkDestroySampler(m_device->device(), samplers[0], nullptr);
- vkDestroySampler(m_device->device(), samplers[1], nullptr);
-}
-
-TEST_F(VkLayerTest, DepthStencilImageViewWithColorAspectBitError) {
- // Create a single Image descriptor and cause it to first hit an error due
- // to using a DS format, then cause it to hit error due to COLOR_BIT not
- // set in aspect
- // The image format check comes 2nd in validation so we trigger it first,
- // then when we cause aspect fail next, bad format check will be preempted
-
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
- "Combination depth/stencil image formats can have only the ");
-
- ASSERT_NO_FATAL_FAILURE(Init());
- auto depth_format = FindSupportedDepthStencilFormat(gpu());
- if (!depth_format) {
- printf("%s Couldn't find depth stencil format.\n", kSkipPrefix);
- return;
- }
-
- VkImageObj image_bad(m_device);
- VkImageObj image_good(m_device);
- // One bad format and one good format for Color attachment
- const VkFormat tex_format_bad = depth_format;
- const VkFormat tex_format_good = VK_FORMAT_B8G8R8A8_UNORM;
- const int32_t tex_width = 32;
- const int32_t tex_height = 32;
-
- VkImageCreateInfo image_create_info = {};
- image_create_info.sType = VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO;
- image_create_info.pNext = NULL;
- image_create_info.imageType = VK_IMAGE_TYPE_2D;
- image_create_info.format = tex_format_bad;
- image_create_info.extent.width = tex_width;
- image_create_info.extent.height = tex_height;
- image_create_info.extent.depth = 1;
- image_create_info.mipLevels = 1;
- image_create_info.arrayLayers = 1;
- image_create_info.samples = VK_SAMPLE_COUNT_1_BIT;
- image_create_info.tiling = VK_IMAGE_TILING_OPTIMAL;
- image_create_info.usage = VK_IMAGE_USAGE_SAMPLED_BIT | VK_IMAGE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT;
- image_create_info.flags = 0;
-
- image_bad.init(&image_create_info);
-
- image_create_info.format = tex_format_good;
- image_create_info.usage = VK_IMAGE_USAGE_SAMPLED_BIT | VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT;
- image_good.init(&image_create_info);
-
- VkImageViewCreateInfo image_view_create_info = {};
- image_view_create_info.sType = VK_STRUCTURE_TYPE_IMAGE_VIEW_CREATE_INFO;
- image_view_create_info.image = image_bad.handle();
- image_view_create_info.viewType = VK_IMAGE_VIEW_TYPE_2D;
- image_view_create_info.format = tex_format_bad;
- image_view_create_info.subresourceRange.baseArrayLayer = 0;
- image_view_create_info.subresourceRange.baseMipLevel = 0;
- image_view_create_info.subresourceRange.layerCount = 1;
- image_view_create_info.subresourceRange.levelCount = 1;
- image_view_create_info.subresourceRange.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT | VK_IMAGE_ASPECT_DEPTH_BIT;
-
- VkImageView view;
- vkCreateImageView(m_device->device(), &image_view_create_info, NULL, &view);
- m_errorMonitor->VerifyFound();
-}
-
-TEST_F(VkLayerTest, ExtensionNotEnabled) {
- TEST_DESCRIPTION("Validate that using an API from an unenabled extension returns an error");
-
- if (InstanceExtensionSupported(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME)) {
- m_instance_extension_names.push_back(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME);
- } else {
- printf("%s Did not find required instance extension %s; skipped.\n", kSkipPrefix,
- VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME);
- return;
- }
- ASSERT_NO_FATAL_FAILURE(InitFramework(myDbgFunc, m_errorMonitor));
-
- // Required extensions except VK_KHR_GET_MEMORY_REQUIREMENTS_2 -- to create the needed error
- std::vector<const char *> required_device_extensions = {VK_KHR_MAINTENANCE1_EXTENSION_NAME, VK_KHR_BIND_MEMORY_2_EXTENSION_NAME,
- VK_KHR_SAMPLER_YCBCR_CONVERSION_EXTENSION_NAME};
- for (auto dev_ext : required_device_extensions) {
- if (DeviceExtensionSupported(gpu(), nullptr, dev_ext)) {
- m_device_extension_names.push_back(dev_ext);
- } else {
- printf("%s Did not find required device extension %s; skipped.\n", kSkipPrefix, dev_ext);
- break;
- }
- }
-
- // Need to ignore this error to get to the one we're testing
- m_errorMonitor->SetUnexpectedError("VUID-vkCreateDevice-ppEnabledExtensionNames-01387");
- ASSERT_NO_FATAL_FAILURE(InitState());
-
- // Find address of extension API
- auto vkCreateSamplerYcbcrConversionKHR =
- (PFN_vkCreateSamplerYcbcrConversionKHR)vkGetDeviceProcAddr(m_device->handle(), "vkCreateSamplerYcbcrConversionKHR");
- if (vkCreateSamplerYcbcrConversionKHR == nullptr) {
- printf("%s VK_KHR_sampler_ycbcr_conversion not supported by device; skipped.\n", kSkipPrefix);
- return;
- }
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "UNASSIGNED-GeneralParameterError-ExtensionNotEnabled");
- VkSamplerYcbcrConversionCreateInfo ycbcr_info = {VK_STRUCTURE_TYPE_SAMPLER_YCBCR_CONVERSION_CREATE_INFO,
- NULL,
- VK_FORMAT_UNDEFINED,
- VK_SAMPLER_YCBCR_MODEL_CONVERSION_RGB_IDENTITY,
- VK_SAMPLER_YCBCR_RANGE_ITU_FULL,
- {VK_COMPONENT_SWIZZLE_IDENTITY, VK_COMPONENT_SWIZZLE_IDENTITY,
- VK_COMPONENT_SWIZZLE_IDENTITY, VK_COMPONENT_SWIZZLE_IDENTITY},
- VK_CHROMA_LOCATION_COSITED_EVEN,
- VK_CHROMA_LOCATION_COSITED_EVEN,
- VK_FILTER_NEAREST,
- false};
- VkSamplerYcbcrConversion conversion;
- vkCreateSamplerYcbcrConversionKHR(m_device->handle(), &ycbcr_info, nullptr, &conversion);
- m_errorMonitor->VerifyFound();
-}
-
-TEST_F(VkLayerTest, InvalidCreateBufferSize) {
- TEST_DESCRIPTION("Attempt to create VkBuffer with size of zero");
-
- ASSERT_NO_FATAL_FAILURE(Init());
-
- VkBufferCreateInfo info = {};
- info.sType = VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO;
- info.usage = VK_BUFFER_USAGE_TRANSFER_SRC_BIT;
- info.size = 0;
- CreateBufferTest(*this, &info, "VUID-VkBufferCreateInfo-size-00912");
-}
-
-TEST_F(VkLayerTest, DuplicateValidPNextStructures) {
- TEST_DESCRIPTION("Create a pNext chain containing valid structures, but with a duplicate structure type");
-
- ASSERT_NO_FATAL_FAILURE(InitFramework(myDbgFunc, m_errorMonitor));
- if (DeviceExtensionSupported(gpu(), nullptr, VK_NV_DEDICATED_ALLOCATION_EXTENSION_NAME)) {
- m_device_extension_names.push_back(VK_NV_DEDICATED_ALLOCATION_EXTENSION_NAME);
- } else {
- printf("%s VK_NV_dedicated_allocation extension not supported, skipping test\n", kSkipPrefix);
- return;
- }
- ASSERT_NO_FATAL_FAILURE(InitState());
-
- // Create two pNext structures which by themselves would be valid
- VkDedicatedAllocationBufferCreateInfoNV dedicated_buffer_create_info = {};
- VkDedicatedAllocationBufferCreateInfoNV dedicated_buffer_create_info_2 = {};
- dedicated_buffer_create_info.sType = VK_STRUCTURE_TYPE_DEDICATED_ALLOCATION_BUFFER_CREATE_INFO_NV;
- dedicated_buffer_create_info.pNext = &dedicated_buffer_create_info_2;
- dedicated_buffer_create_info.dedicatedAllocation = VK_TRUE;
-
- dedicated_buffer_create_info_2.sType = VK_STRUCTURE_TYPE_DEDICATED_ALLOCATION_BUFFER_CREATE_INFO_NV;
- dedicated_buffer_create_info_2.pNext = nullptr;
- dedicated_buffer_create_info_2.dedicatedAllocation = VK_TRUE;
-
- uint32_t queue_family_index = 0;
- VkBufferCreateInfo buffer_create_info = {};
- buffer_create_info.sType = VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO;
- buffer_create_info.pNext = &dedicated_buffer_create_info;
- buffer_create_info.size = 1024;
- buffer_create_info.usage = VK_BUFFER_USAGE_UNIFORM_BUFFER_BIT;
- buffer_create_info.queueFamilyIndexCount = 1;
- buffer_create_info.pQueueFamilyIndices = &queue_family_index;
-
- CreateBufferTest(*this, &buffer_create_info, "chain contains duplicate structure types");
-}
-
-TEST_F(VkLayerTest, DedicatedAllocation) {
- ASSERT_NO_FATAL_FAILURE(InitFramework(myDbgFunc, m_errorMonitor));
- if (DeviceExtensionSupported(gpu(), nullptr, VK_KHR_DEDICATED_ALLOCATION_EXTENSION_NAME)) {
- m_device_extension_names.push_back(VK_KHR_DEDICATED_ALLOCATION_EXTENSION_NAME);
- m_device_extension_names.push_back(VK_KHR_GET_MEMORY_REQUIREMENTS_2_EXTENSION_NAME);
- } else {
- printf("%s Dedicated allocation extension not supported, skipping test\n", kSkipPrefix);
- return;
- }
- ASSERT_NO_FATAL_FAILURE(InitState());
-
- VkMemoryPropertyFlags mem_flags = 0;
- const VkDeviceSize resource_size = 1024;
- auto buffer_info = VkBufferObj::create_info(resource_size, VK_BUFFER_USAGE_TRANSFER_DST_BIT);
- VkBufferObj buffer;
- buffer.init_no_mem(*m_device, buffer_info);
- auto buffer_alloc_info = vk_testing::DeviceMemory::get_resource_alloc_info(*m_device, buffer.memory_requirements(), mem_flags);
- auto buffer_dedicated_info = lvl_init_struct<VkMemoryDedicatedAllocateInfoKHR>();
- buffer_dedicated_info.buffer = buffer.handle();
- buffer_alloc_info.pNext = &buffer_dedicated_info;
- vk_testing::DeviceMemory dedicated_buffer_memory;
- dedicated_buffer_memory.init(*m_device, buffer_alloc_info);
-
- VkBufferObj wrong_buffer;
- wrong_buffer.init_no_mem(*m_device, buffer_info);
-
- // Bind with wrong buffer
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkBindBufferMemory-memory-01508");
- vkBindBufferMemory(m_device->handle(), wrong_buffer.handle(), dedicated_buffer_memory.handle(), 0);
- m_errorMonitor->VerifyFound();
-
- // Bind with non-zero offset (same VUID)
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
- "VUID-vkBindBufferMemory-memory-01508"); // offset must be zero
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
- "VUID-vkBindBufferMemory-size-01037"); // offset pushes us past size
- auto offset = buffer.memory_requirements().alignment;
- vkBindBufferMemory(m_device->handle(), buffer.handle(), dedicated_buffer_memory.handle(), offset);
- m_errorMonitor->VerifyFound();
-
- // Bind correctly (depends on the "skip" above)
- m_errorMonitor->ExpectSuccess();
- vkBindBufferMemory(m_device->handle(), buffer.handle(), dedicated_buffer_memory.handle(), 0);
- m_errorMonitor->VerifyNotFound();
-
- // And for images...
- VkImageObj image(m_device);
- VkImageObj wrong_image(m_device);
- auto image_info = VkImageObj::create_info();
- image_info.extent.width = resource_size;
- image_info.usage = VK_IMAGE_USAGE_TRANSFER_DST_BIT;
- image_info.format = VK_FORMAT_R8G8B8A8_UNORM;
- image.init_no_mem(*m_device, image_info);
- wrong_image.init_no_mem(*m_device, image_info);
-
- auto image_dedicated_info = lvl_init_struct<VkMemoryDedicatedAllocateInfoKHR>();
- image_dedicated_info.image = image.handle();
- auto image_alloc_info = vk_testing::DeviceMemory::get_resource_alloc_info(*m_device, image.memory_requirements(), mem_flags);
- image_alloc_info.pNext = &image_dedicated_info;
- vk_testing::DeviceMemory dedicated_image_memory;
- dedicated_image_memory.init(*m_device, image_alloc_info);
-
- // Bind with wrong image
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkBindImageMemory-memory-01509");
- vkBindImageMemory(m_device->handle(), wrong_image.handle(), dedicated_image_memory.handle(), 0);
- m_errorMonitor->VerifyFound();
-
- // Bind with non-zero offset (same VUID)
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
- "VUID-vkBindImageMemory-memory-01509"); // offset must be zero
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
- "VUID-vkBindImageMemory-size-01049"); // offset pushes us past size
- auto image_offset = image.memory_requirements().alignment;
- vkBindImageMemory(m_device->handle(), image.handle(), dedicated_image_memory.handle(), image_offset);
- m_errorMonitor->VerifyFound();
-
- // Bind correctly (depends on the "skip" above)
- m_errorMonitor->ExpectSuccess();
- vkBindImageMemory(m_device->handle(), image.handle(), dedicated_image_memory.handle(), 0);
- m_errorMonitor->VerifyNotFound();
-}
-
-TEST_F(VkLayerTest, CornerSampledImageNV) {
- TEST_DESCRIPTION("Test VK_NV_corner_sampled_image.");
-
- if (InstanceExtensionSupported(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME)) {
- m_instance_extension_names.push_back(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME);
- } else {
- printf("%s Did not find required instance extension %s; skipped.\n", kSkipPrefix,
- VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME);
- return;
- }
- ASSERT_NO_FATAL_FAILURE(InitFramework(myDbgFunc, m_errorMonitor));
- std::array<const char *, 1> required_device_extensions = {{VK_NV_CORNER_SAMPLED_IMAGE_EXTENSION_NAME}};
- for (auto device_extension : required_device_extensions) {
- if (DeviceExtensionSupported(gpu(), nullptr, device_extension)) {
- m_device_extension_names.push_back(device_extension);
- } else {
- printf("%s %s Extension not supported, skipping tests\n", kSkipPrefix, device_extension);
- return;
- }
- }
-
- PFN_vkGetPhysicalDeviceFeatures2KHR vkGetPhysicalDeviceFeatures2KHR =
- (PFN_vkGetPhysicalDeviceFeatures2KHR)vkGetInstanceProcAddr(instance(), "vkGetPhysicalDeviceFeatures2KHR");
- ASSERT_TRUE(vkGetPhysicalDeviceFeatures2KHR != nullptr);
-
- // Create a device that enables exclusive scissor but disables multiViewport
- auto corner_sampled_image_features = lvl_init_struct<VkPhysicalDeviceCornerSampledImageFeaturesNV>();
- auto features2 = lvl_init_struct<VkPhysicalDeviceFeatures2KHR>(&corner_sampled_image_features);
- vkGetPhysicalDeviceFeatures2KHR(gpu(), &features2);
-
- ASSERT_NO_FATAL_FAILURE(InitState(nullptr, &features2));
-
- VkImageCreateInfo image_create_info = {};
- image_create_info.sType = VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO;
- image_create_info.pNext = NULL;
- image_create_info.imageType = VK_IMAGE_TYPE_1D;
- image_create_info.format = VK_FORMAT_R8G8B8A8_UNORM;
- image_create_info.extent.width = 2;
- image_create_info.extent.height = 1;
- image_create_info.extent.depth = 1;
- image_create_info.mipLevels = 1;
- image_create_info.arrayLayers = 1;
- image_create_info.samples = VK_SAMPLE_COUNT_1_BIT;
- image_create_info.tiling = VK_IMAGE_TILING_OPTIMAL;
- image_create_info.initialLayout = VK_IMAGE_LAYOUT_UNDEFINED;
- image_create_info.usage = VK_IMAGE_USAGE_TRANSFER_DST_BIT;
- image_create_info.queueFamilyIndexCount = 0;
- image_create_info.pQueueFamilyIndices = NULL;
- image_create_info.sharingMode = VK_SHARING_MODE_EXCLUSIVE;
- image_create_info.flags = VK_IMAGE_CREATE_CORNER_SAMPLED_BIT_NV;
-
- // image type must be 2D or 3D
- CreateImageTest(*this, &image_create_info, "VUID-VkImageCreateInfo-flags-02050");
-
- // cube/depth not supported
- image_create_info.imageType = VK_IMAGE_TYPE_2D;
- image_create_info.extent.height = 2;
- image_create_info.format = VK_FORMAT_D24_UNORM_S8_UINT;
- CreateImageTest(*this, &image_create_info, "VUID-VkImageCreateInfo-flags-02051");
-
- image_create_info.format = VK_FORMAT_R8G8B8A8_UNORM;
-
- // 2D width/height must be > 1
- image_create_info.imageType = VK_IMAGE_TYPE_2D;
- image_create_info.extent.height = 1;
- CreateImageTest(*this, &image_create_info, "VUID-VkImageCreateInfo-flags-02052");
-
- // 3D width/height/depth must be > 1
- image_create_info.imageType = VK_IMAGE_TYPE_3D;
- image_create_info.extent.height = 2;
- CreateImageTest(*this, &image_create_info, "VUID-VkImageCreateInfo-flags-02053");
-
- image_create_info.imageType = VK_IMAGE_TYPE_2D;
-
- // Valid # of mip levels
- image_create_info.extent = {7, 7, 1};
- image_create_info.mipLevels = 3; // 3 = ceil(log2(7))
- CreateImageTest(*this, &image_create_info);
-
- image_create_info.extent = {8, 8, 1};
- image_create_info.mipLevels = 3; // 3 = ceil(log2(8))
- CreateImageTest(*this, &image_create_info);
-
- image_create_info.extent = {9, 9, 1};
- image_create_info.mipLevels = 3; // 4 = ceil(log2(9))
- CreateImageTest(*this, &image_create_info);
-
- // Invalid # of mip levels
- image_create_info.extent = {8, 8, 1};
- image_create_info.mipLevels = 4; // 3 = ceil(log2(8))
- CreateImageTest(*this, &image_create_info, "VUID-VkImageCreateInfo-mipLevels-00958");
-}
-
-TEST_F(VkLayerTest, CreateYCbCrSampler) {
- TEST_DESCRIPTION("Verify YCbCr sampler creation.");
-
- // Test requires API 1.1 or (API 1.0 + SamplerYCbCr extension). Request API 1.1
- SetTargetApiVersion(VK_API_VERSION_1_1);
- ASSERT_NO_FATAL_FAILURE(InitFramework(myDbgFunc, m_errorMonitor));
-
- // In case we don't have API 1.1+, try enabling the extension directly (and it's dependencies)
- if (DeviceExtensionSupported(gpu(), nullptr, VK_KHR_SAMPLER_YCBCR_CONVERSION_EXTENSION_NAME)) {
- m_device_extension_names.push_back(VK_KHR_SAMPLER_YCBCR_CONVERSION_EXTENSION_NAME);
- m_device_extension_names.push_back(VK_KHR_MAINTENANCE1_EXTENSION_NAME);
- m_device_extension_names.push_back(VK_KHR_BIND_MEMORY_2_EXTENSION_NAME);
- m_device_extension_names.push_back(VK_KHR_GET_MEMORY_REQUIREMENTS_2_EXTENSION_NAME);
- }
-
- ASSERT_NO_FATAL_FAILURE(InitState());
- VkDevice dev = m_device->device();
-
- PFN_vkCreateSamplerYcbcrConversionKHR vkCreateSamplerYcbcrConversionFunction = nullptr;
- if (DeviceValidationVersion() >= VK_API_VERSION_1_1) {
- vkCreateSamplerYcbcrConversionFunction = vkCreateSamplerYcbcrConversion;
- } else {
- vkCreateSamplerYcbcrConversionFunction =
- (PFN_vkCreateSamplerYcbcrConversionKHR)vkGetDeviceProcAddr(m_device->handle(), "vkCreateSamplerYcbcrConversionKHR");
- }
-
- if (!vkCreateSamplerYcbcrConversionFunction) {
- printf("%s Did not find required device support for YcbcrSamplerConversion; test skipped.\n", kSkipPrefix);
- return;
- }
-
- // Verify we have the requested support
- bool ycbcr_support = (DeviceExtensionEnabled(VK_KHR_SAMPLER_YCBCR_CONVERSION_EXTENSION_NAME) ||
- (DeviceValidationVersion() >= VK_API_VERSION_1_1));
- if (!ycbcr_support) {
- printf("%s Did not find required device extension %s; test skipped.\n", kSkipPrefix,
- VK_KHR_SAMPLER_YCBCR_CONVERSION_EXTENSION_NAME);
- return;
- }
-
- VkSamplerYcbcrConversion ycbcr_conv = VK_NULL_HANDLE;
- VkSamplerYcbcrConversionCreateInfo sycci = {};
- sycci.sType = VK_STRUCTURE_TYPE_SAMPLER_YCBCR_CONVERSION_CREATE_INFO;
- sycci.format = VK_FORMAT_UNDEFINED;
- sycci.ycbcrModel = VK_SAMPLER_YCBCR_MODEL_CONVERSION_RGB_IDENTITY;
- sycci.ycbcrRange = VK_SAMPLER_YCBCR_RANGE_ITU_FULL;
-
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkSamplerYcbcrConversionCreateInfo-format-01649");
- vkCreateSamplerYcbcrConversionFunction(dev, &sycci, NULL, &ycbcr_conv);
- m_errorMonitor->VerifyFound();
-}
-
-TEST_F(VkLayerTest, BufferDeviceAddressEXT) {
- TEST_DESCRIPTION("Test VK_EXT_buffer_device_address.");
-
- if (InstanceExtensionSupported(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME)) {
- m_instance_extension_names.push_back(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME);
- } else {
- printf("%s Did not find required instance extension %s; skipped.\n", kSkipPrefix,
- VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME);
- return;
- }
- ASSERT_NO_FATAL_FAILURE(InitFramework(myDbgFunc, m_errorMonitor));
- std::array<const char *, 1> required_device_extensions = {{VK_EXT_BUFFER_DEVICE_ADDRESS_EXTENSION_NAME}};
- for (auto device_extension : required_device_extensions) {
- if (DeviceExtensionSupported(gpu(), nullptr, device_extension)) {
- m_device_extension_names.push_back(device_extension);
- } else {
- printf("%s %s Extension not supported, skipping tests\n", kSkipPrefix, device_extension);
- return;
- }
- }
-
- if (DeviceIsMockICD() || DeviceSimulation()) {
- printf("%s MockICD does not support this feature, skipping tests\n", kSkipPrefix);
- return;
- }
-
- PFN_vkGetPhysicalDeviceFeatures2KHR vkGetPhysicalDeviceFeatures2KHR =
- (PFN_vkGetPhysicalDeviceFeatures2KHR)vkGetInstanceProcAddr(instance(), "vkGetPhysicalDeviceFeatures2KHR");
- ASSERT_TRUE(vkGetPhysicalDeviceFeatures2KHR != nullptr);
-
- // Create a device that enables buffer_device_address
- auto buffer_device_address_features = lvl_init_struct<VkPhysicalDeviceBufferAddressFeaturesEXT>();
- auto features2 = lvl_init_struct<VkPhysicalDeviceFeatures2KHR>(&buffer_device_address_features);
- vkGetPhysicalDeviceFeatures2KHR(gpu(), &features2);
- buffer_device_address_features.bufferDeviceAddressCaptureReplay = VK_FALSE;
-
- ASSERT_NO_FATAL_FAILURE(InitState(nullptr, &features2));
- ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
-
- PFN_vkGetBufferDeviceAddressEXT vkGetBufferDeviceAddressEXT =
- (PFN_vkGetBufferDeviceAddressEXT)vkGetInstanceProcAddr(instance(), "vkGetBufferDeviceAddressEXT");
-
- VkBufferCreateInfo buffer_create_info = {VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO};
- buffer_create_info.size = sizeof(uint32_t);
- buffer_create_info.usage = VK_BUFFER_USAGE_SHADER_DEVICE_ADDRESS_BIT_EXT;
- buffer_create_info.flags = VK_BUFFER_CREATE_DEVICE_ADDRESS_CAPTURE_REPLAY_BIT_EXT;
- CreateBufferTest(*this, &buffer_create_info, "VUID-VkBufferCreateInfo-flags-02605");
-
- buffer_create_info.flags = 0;
- VkBufferDeviceAddressCreateInfoEXT addr_ci = {VK_STRUCTURE_TYPE_BUFFER_DEVICE_ADDRESS_CREATE_INFO_EXT};
- addr_ci.deviceAddress = 1;
- buffer_create_info.pNext = &addr_ci;
- CreateBufferTest(*this, &buffer_create_info, "VUID-VkBufferCreateInfo-deviceAddress-02604");
-
- buffer_create_info.pNext = nullptr;
- VkBuffer buffer;
- VkResult err = vkCreateBuffer(m_device->device(), &buffer_create_info, NULL, &buffer);
- ASSERT_VK_SUCCESS(err);
-
- VkBufferDeviceAddressInfoEXT info = {VK_STRUCTURE_TYPE_BUFFER_DEVICE_ADDRESS_INFO_EXT};
- info.buffer = buffer;
-
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkBufferDeviceAddressInfoEXT-buffer-02600");
- vkGetBufferDeviceAddressEXT(m_device->device(), &info);
- m_errorMonitor->VerifyFound();
-
- vkDestroyBuffer(m_device->device(), buffer, NULL);
-}
-
-TEST_F(VkLayerTest, BufferDeviceAddressEXTDisabled) {
- TEST_DESCRIPTION("Test VK_EXT_buffer_device_address.");
-
- if (InstanceExtensionSupported(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME)) {
- m_instance_extension_names.push_back(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME);
- } else {
- printf("%s Did not find required instance extension %s; skipped.\n", kSkipPrefix,
- VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME);
- return;
- }
- ASSERT_NO_FATAL_FAILURE(InitFramework(myDbgFunc, m_errorMonitor));
- std::array<const char *, 1> required_device_extensions = {{VK_EXT_BUFFER_DEVICE_ADDRESS_EXTENSION_NAME}};
- for (auto device_extension : required_device_extensions) {
- if (DeviceExtensionSupported(gpu(), nullptr, device_extension)) {
- m_device_extension_names.push_back(device_extension);
- } else {
- printf("%s %s Extension not supported, skipping tests\n", kSkipPrefix, device_extension);
- return;
- }
- }
-
- if (DeviceIsMockICD() || DeviceSimulation()) {
- printf("%s MockICD does not support this feature, skipping tests\n", kSkipPrefix);
- return;
- }
-
- PFN_vkGetPhysicalDeviceFeatures2KHR vkGetPhysicalDeviceFeatures2KHR =
- (PFN_vkGetPhysicalDeviceFeatures2KHR)vkGetInstanceProcAddr(instance(), "vkGetPhysicalDeviceFeatures2KHR");
- ASSERT_TRUE(vkGetPhysicalDeviceFeatures2KHR != nullptr);
-
- // Create a device that disables buffer_device_address
- auto buffer_device_address_features = lvl_init_struct<VkPhysicalDeviceBufferAddressFeaturesEXT>();
- auto features2 = lvl_init_struct<VkPhysicalDeviceFeatures2KHR>(&buffer_device_address_features);
- vkGetPhysicalDeviceFeatures2KHR(gpu(), &features2);
- buffer_device_address_features.bufferDeviceAddress = VK_FALSE;
- buffer_device_address_features.bufferDeviceAddressCaptureReplay = VK_FALSE;
-
- ASSERT_NO_FATAL_FAILURE(InitState(nullptr, &features2));
- ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
-
- PFN_vkGetBufferDeviceAddressEXT vkGetBufferDeviceAddressEXT =
- (PFN_vkGetBufferDeviceAddressEXT)vkGetInstanceProcAddr(instance(), "vkGetBufferDeviceAddressEXT");
-
- VkBufferCreateInfo buffer_create_info = {VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO};
- buffer_create_info.size = sizeof(uint32_t);
- buffer_create_info.usage = VK_BUFFER_USAGE_SHADER_DEVICE_ADDRESS_BIT_EXT;
- CreateBufferTest(*this, &buffer_create_info, "VUID-VkBufferCreateInfo-usage-02606");
-
- buffer_create_info.usage = VK_BUFFER_USAGE_INDEX_BUFFER_BIT;
- VkBuffer buffer;
- VkResult err = vkCreateBuffer(m_device->device(), &buffer_create_info, NULL, &buffer);
- ASSERT_VK_SUCCESS(err);
-
- VkBufferDeviceAddressInfoEXT info = {VK_STRUCTURE_TYPE_BUFFER_DEVICE_ADDRESS_INFO_EXT};
- info.buffer = buffer;
-
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkGetBufferDeviceAddressEXT-None-02598");
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkBufferDeviceAddressInfoEXT-buffer-02601");
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkBufferDeviceAddressInfoEXT-buffer-02600");
- vkGetBufferDeviceAddressEXT(m_device->device(), &info);
- m_errorMonitor->VerifyFound();
-
- vkDestroyBuffer(m_device->device(), buffer, NULL);
-}
-
-TEST_F(VkLayerTest, CreateImageYcbcrArrayLayers) {
- TEST_DESCRIPTION("Creating images with out-of-range arrayLayers ");
-
- // Enable KHR multiplane req'd extensions
- bool mp_extensions = InstanceExtensionSupported(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME,
- VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_SPEC_VERSION);
- if (mp_extensions) {
- m_instance_extension_names.push_back(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME);
- }
- ASSERT_NO_FATAL_FAILURE(InitFramework(myDbgFunc, m_errorMonitor));
- mp_extensions = mp_extensions && DeviceExtensionSupported(gpu(), nullptr, VK_KHR_MAINTENANCE1_EXTENSION_NAME);
- mp_extensions = mp_extensions && DeviceExtensionSupported(gpu(), nullptr, VK_KHR_GET_MEMORY_REQUIREMENTS_2_EXTENSION_NAME);
- mp_extensions = mp_extensions && DeviceExtensionSupported(gpu(), nullptr, VK_KHR_BIND_MEMORY_2_EXTENSION_NAME);
- mp_extensions = mp_extensions && DeviceExtensionSupported(gpu(), nullptr, VK_KHR_SAMPLER_YCBCR_CONVERSION_EXTENSION_NAME);
- if (mp_extensions) {
- m_device_extension_names.push_back(VK_KHR_MAINTENANCE1_EXTENSION_NAME);
- m_device_extension_names.push_back(VK_KHR_GET_MEMORY_REQUIREMENTS_2_EXTENSION_NAME);
- m_device_extension_names.push_back(VK_KHR_BIND_MEMORY_2_EXTENSION_NAME);
- m_device_extension_names.push_back(VK_KHR_SAMPLER_YCBCR_CONVERSION_EXTENSION_NAME);
- } else {
- printf("%s test requires KHR multiplane extensions, not available. Skipping.\n", kSkipPrefix);
- return;
- }
-
- ASSERT_NO_FATAL_FAILURE(InitState());
- ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
-
- // Create ycbcr image with unsupported arrayLayers
- VkImageCreateInfo image_create_info = {};
- image_create_info.sType = VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO;
- image_create_info.imageType = VK_IMAGE_TYPE_2D;
- image_create_info.format = VK_FORMAT_G8_B8_R8_3PLANE_420_UNORM;
- image_create_info.extent.width = 32;
- image_create_info.extent.height = 32;
- image_create_info.extent.depth = 1;
- image_create_info.mipLevels = 1;
- image_create_info.samples = VK_SAMPLE_COUNT_1_BIT;
- image_create_info.tiling = VK_IMAGE_TILING_OPTIMAL;
- image_create_info.usage = VK_IMAGE_USAGE_TRANSFER_SRC_BIT;
-
- bool supported = ImageFormatAndFeaturesSupported(instance(), gpu(), image_create_info, VK_FORMAT_FEATURE_TRANSFER_SRC_BIT);
- if (!supported) {
- printf("%s Multiplane image format not supported. Skipping test.\n", kSkipPrefix);
- return;
- }
-
- VkImageFormatProperties img_limits;
- ASSERT_VK_SUCCESS(GPDIFPHelper(gpu(), &image_create_info, &img_limits));
- if (img_limits.maxArrayLayers == 1) {
- return;
- }
- image_create_info.arrayLayers = img_limits.maxArrayLayers;
-
- CreateImageTest(*this, &image_create_info, "VUID-VkImageCreateInfo-format-02653");
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkImageCreateInfo-format-02653");
-}
-
-TEST_F(VkLayerTest, BindImageMemorySwapchain) {
- TEST_DESCRIPTION("Invalid bind image with a swapchain");
- SetTargetApiVersion(VK_API_VERSION_1_1);
-
- if (!AddSurfaceInstanceExtension()) {
- printf("%s surface extensions not supported, skipping BindSwapchainImageMemory test\n", kSkipPrefix);
- return;
- }
-
- ASSERT_NO_FATAL_FAILURE(InitFramework(myDbgFunc, m_errorMonitor));
-
- if (!AddSwapchainDeviceExtension()) {
- printf("%s swapchain extensions not supported, skipping BindSwapchainImageMemory test\n", kSkipPrefix);
- return;
- }
-
- if (DeviceValidationVersion() < VK_API_VERSION_1_1) {
- printf("%s VkBindImageMemoryInfo requires Vulkan 1.1+, skipping test\n", kSkipPrefix);
- return;
- }
-
- ASSERT_NO_FATAL_FAILURE(InitState());
- ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
- if (!InitSwapchain()) {
- printf("%s Cannot create surface or swapchain, skipping BindSwapchainImageMemory test\n", kSkipPrefix);
- return;
- }
-
- auto image_create_info = lvl_init_struct<VkImageCreateInfo>();
- image_create_info.imageType = VK_IMAGE_TYPE_2D;
- image_create_info.format = VK_FORMAT_R8G8B8A8_UNORM;
- image_create_info.extent.width = 64;
- image_create_info.extent.height = 64;
- image_create_info.extent.depth = 1;
- image_create_info.mipLevels = 1;
- image_create_info.arrayLayers = 1;
- image_create_info.samples = VK_SAMPLE_COUNT_1_BIT;
- image_create_info.tiling = VK_IMAGE_TILING_OPTIMAL;
- image_create_info.initialLayout = VK_IMAGE_LAYOUT_PREINITIALIZED;
- image_create_info.usage = VK_IMAGE_USAGE_TRANSFER_SRC_BIT;
- image_create_info.sharingMode = VK_SHARING_MODE_EXCLUSIVE;
-
- auto image_swapchain_create_info = lvl_init_struct<VkImageSwapchainCreateInfoKHR>();
- image_swapchain_create_info.swapchain = m_swapchain;
- image_create_info.pNext = &image_swapchain_create_info;
-
- VkImage image_from_swapchain;
- vkCreateImage(device(), &image_create_info, NULL, &image_from_swapchain);
-
- VkMemoryRequirements mem_reqs = {};
- vkGetImageMemoryRequirements(device(), image_from_swapchain, &mem_reqs);
-
- auto alloc_info = lvl_init_struct<VkMemoryAllocateInfo>();
- alloc_info.memoryTypeIndex = 0;
- alloc_info.allocationSize = mem_reqs.size;
-
- bool pass = m_device->phy().set_memory_type(mem_reqs.memoryTypeBits, &alloc_info, 0);
- ASSERT_TRUE(pass);
-
- VkDeviceMemory mem;
- VkResult err = vkAllocateMemory(m_device->device(), &alloc_info, NULL, &mem);
- ASSERT_VK_SUCCESS(err);
-
- auto bind_info = lvl_init_struct<VkBindImageMemoryInfo>();
- bind_info.image = image_from_swapchain;
- bind_info.memory = VK_NULL_HANDLE;
- bind_info.memoryOffset = 0;
-
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkBindImageMemoryInfo-image-01630");
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkBindImageMemoryInfo-pNext-01632");
- vkBindImageMemory2(m_device->device(), 1, &bind_info);
- m_errorMonitor->VerifyFound();
-
- auto bind_swapchain_info = lvl_init_struct<VkBindImageMemorySwapchainInfoKHR>();
- bind_swapchain_info.swapchain = VK_NULL_HANDLE;
- bind_swapchain_info.imageIndex = 0;
- bind_info.pNext = &bind_swapchain_info;
-
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "UNASSIGNED-GeneralParameterError-RequiredParameter");
- vkBindImageMemory2(m_device->device(), 1, &bind_info);
- m_errorMonitor->VerifyFound();
-
- bind_info.memory = mem;
- bind_swapchain_info.swapchain = m_swapchain;
- bind_swapchain_info.imageIndex = UINT32_MAX;
-
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkBindImageMemoryInfo-pNext-01631");
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkBindImageMemorySwapchainInfoKHR-imageIndex-01644");
- vkBindImageMemory2(m_device->device(), 1, &bind_info);
- m_errorMonitor->VerifyFound();
-
- vkDestroyImage(m_device->device(), image_from_swapchain, NULL);
- vkFreeMemory(m_device->device(), mem, NULL);
- DestroySwapchain();
-}
-
-TEST_F(VkLayerTest, TransferImageToSwapchainWithInvalidLayoutDeviceGroup) {
- TEST_DESCRIPTION("Transfer an image to a swapchain's image with a invalid layout between device group");
-
-#if defined(VK_USE_PLATFORM_ANDROID_KHR)
- printf(
- "%s According to VUID-01631, VkBindImageMemoryInfo-memory should be NULL. But Android will crash if memory is NULL, "
- "skipping test\n",
- kSkipPrefix);
- return;
-#endif
-
- SetTargetApiVersion(VK_API_VERSION_1_1);
-
- if (!AddSurfaceInstanceExtension()) {
- printf("%s surface extensions not supported, skipping test\n", kSkipPrefix);
- return;
- }
-
- ASSERT_NO_FATAL_FAILURE(InitFramework(myDbgFunc, m_errorMonitor));
-
- if (!AddSwapchainDeviceExtension()) {
- printf("%s swapchain extensions not supported, skipping test\n", kSkipPrefix);
- return;
- }
-
- if (DeviceValidationVersion() < VK_API_VERSION_1_1) {
- printf("%s VkBindImageMemoryInfo requires Vulkan 1.1+, skipping test\n", kSkipPrefix);
- return;
- }
- uint32_t physical_device_group_count = 0;
- vkEnumeratePhysicalDeviceGroups(instance(), &physical_device_group_count, nullptr);
-
- if (physical_device_group_count == 0) {
- printf("%s physical_device_group_count is 0, skipping test\n", kSkipPrefix);
- return;
- }
-
- std::vector<VkPhysicalDeviceGroupProperties> physical_device_group(physical_device_group_count,
- {VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_GROUP_PROPERTIES});
- vkEnumeratePhysicalDeviceGroups(instance(), &physical_device_group_count, physical_device_group.data());
- VkDeviceGroupDeviceCreateInfo create_device_pnext = {};
- create_device_pnext.sType = VK_STRUCTURE_TYPE_DEVICE_GROUP_DEVICE_CREATE_INFO;
- create_device_pnext.physicalDeviceCount = physical_device_group[0].physicalDeviceCount;
- create_device_pnext.pPhysicalDevices = physical_device_group[0].physicalDevices;
- ASSERT_NO_FATAL_FAILURE(InitState(nullptr, &create_device_pnext));
- ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
- if (!InitSwapchain(VK_IMAGE_USAGE_TRANSFER_DST_BIT)) {
- printf("%s Cannot create surface or swapchain, skipping test\n", kSkipPrefix);
- return;
- }
-
- auto image_create_info = lvl_init_struct<VkImageCreateInfo>();
- image_create_info.imageType = VK_IMAGE_TYPE_2D;
- image_create_info.format = VK_FORMAT_R8G8B8A8_UNORM;
- image_create_info.extent.width = 64;
- image_create_info.extent.height = 64;
- image_create_info.extent.depth = 1;
- image_create_info.mipLevels = 1;
- image_create_info.arrayLayers = 1;
- image_create_info.samples = VK_SAMPLE_COUNT_1_BIT;
- image_create_info.tiling = VK_IMAGE_TILING_OPTIMAL;
- image_create_info.initialLayout = VK_IMAGE_LAYOUT_PREINITIALIZED;
- image_create_info.usage = VK_IMAGE_USAGE_TRANSFER_SRC_BIT;
- image_create_info.sharingMode = VK_SHARING_MODE_EXCLUSIVE;
-
- VkImageObj src_Image(m_device);
- src_Image.init(&image_create_info);
-
- image_create_info.usage = VK_IMAGE_USAGE_TRANSFER_DST_BIT;
- image_create_info.flags = VK_IMAGE_CREATE_ALIAS_BIT;
-
- auto image_swapchain_create_info = lvl_init_struct<VkImageSwapchainCreateInfoKHR>();
- image_swapchain_create_info.swapchain = m_swapchain;
- image_create_info.pNext = &image_swapchain_create_info;
-
- VkImage peer_image;
- vkCreateImage(device(), &image_create_info, NULL, &peer_image);
-
- auto bind_devicegroup_info = lvl_init_struct<VkBindImageMemoryDeviceGroupInfo>();
- bind_devicegroup_info.deviceIndexCount = 2;
- std::array<uint32_t, 2> deviceIndices = {0, 0};
- bind_devicegroup_info.pDeviceIndices = deviceIndices.data();
- bind_devicegroup_info.splitInstanceBindRegionCount = 0;
- bind_devicegroup_info.pSplitInstanceBindRegions = nullptr;
-
- auto bind_swapchain_info = lvl_init_struct<VkBindImageMemorySwapchainInfoKHR>(&bind_devicegroup_info);
- bind_swapchain_info.swapchain = m_swapchain;
- bind_swapchain_info.imageIndex = 0;
-
- auto bind_info = lvl_init_struct<VkBindImageMemoryInfo>(&bind_swapchain_info);
- bind_info.image = peer_image;
- bind_info.memory = VK_NULL_HANDLE;
- bind_info.memoryOffset = 0;
-
- vkBindImageMemory2(m_device->device(), 1, &bind_info);
-
- uint32_t swapchain_images_count = 0;
- vkGetSwapchainImagesKHR(device(), m_swapchain, &swapchain_images_count, nullptr);
- std::vector<VkImage> swapchain_images;
- swapchain_images.resize(swapchain_images_count);
- vkGetSwapchainImagesKHR(device(), m_swapchain, &swapchain_images_count, swapchain_images.data());
-
- m_commandBuffer->begin();
-
- VkImageCopy copy_region = {};
- copy_region.srcSubresource.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
- copy_region.dstSubresource.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
- copy_region.srcSubresource.mipLevel = 0;
- copy_region.dstSubresource.mipLevel = 0;
- copy_region.srcSubresource.baseArrayLayer = 0;
- copy_region.dstSubresource.baseArrayLayer = 0;
- copy_region.srcSubresource.layerCount = 1;
- copy_region.dstSubresource.layerCount = 1;
- copy_region.srcOffset = {0, 0, 0};
- copy_region.dstOffset = {0, 0, 0};
- copy_region.extent = {10, 10, 1};
- vkCmdCopyImage(m_commandBuffer->handle(), src_Image.handle(), VK_IMAGE_LAYOUT_GENERAL, peer_image,
- VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, 1, &copy_region);
-
- m_commandBuffer->end();
-
- VkSubmitInfo submit_info = {};
- submit_info.sType = VK_STRUCTURE_TYPE_SUBMIT_INFO;
- submit_info.commandBufferCount = 1;
- submit_info.pCommandBuffers = &m_commandBuffer->handle();
-
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "UNASSIGNED-CoreValidation-DrawState-InvalidImageLayout");
- vkQueueSubmit(m_device->m_queue, 1, &submit_info, VK_NULL_HANDLE);
- m_errorMonitor->VerifyFound();
-
- vkDestroyImage(m_device->device(), peer_image, NULL);
- DestroySwapchain();
-}
diff --git a/tests/vklayertests_command.cpp b/tests/vklayertests_command.cpp
deleted file mode 100644
index 3c44562bd..000000000
--- a/tests/vklayertests_command.cpp
+++ /dev/null
@@ -1,5016 +0,0 @@
-/*
- * Copyright (c) 2015-2019 The Khronos Group Inc.
- * Copyright (c) 2015-2019 Valve Corporation
- * Copyright (c) 2015-2019 LunarG, Inc.
- * Copyright (c) 2015-2019 Google, Inc.
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Author: Chia-I Wu <olvaffe@gmail.com>
- * Author: Chris Forbes <chrisf@ijw.co.nz>
- * Author: Courtney Goeltzenleuchter <courtney@LunarG.com>
- * Author: Mark Lobodzinski <mark@lunarg.com>
- * Author: Mike Stroyan <mike@LunarG.com>
- * Author: Tobin Ehlis <tobine@google.com>
- * Author: Tony Barbour <tony@LunarG.com>
- * Author: Cody Northrop <cnorthrop@google.com>
- * Author: Dave Houlton <daveh@lunarg.com>
- * Author: Jeremy Kniager <jeremyk@lunarg.com>
- * Author: Shannon McPherson <shannon@lunarg.com>
- * Author: John Zulauf <jzulauf@lunarg.com>
- */
-
-#include "cast_utils.h"
-#include "layer_validation_tests.h"
-
-TEST_F(VkLayerTest, InvalidCommandPoolConsistency) {
- TEST_DESCRIPTION("Allocate command buffers from one command pool and attempt to delete them from another.");
-
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkFreeCommandBuffers-pCommandBuffers-parent");
-
- ASSERT_NO_FATAL_FAILURE(Init());
- VkCommandPool command_pool_one;
- VkCommandPool command_pool_two;
-
- VkCommandPoolCreateInfo pool_create_info{};
- pool_create_info.sType = VK_STRUCTURE_TYPE_COMMAND_POOL_CREATE_INFO;
- pool_create_info.queueFamilyIndex = m_device->graphics_queue_node_index_;
- pool_create_info.flags = VK_COMMAND_POOL_CREATE_RESET_COMMAND_BUFFER_BIT;
-
- vkCreateCommandPool(m_device->device(), &pool_create_info, nullptr, &command_pool_one);
-
- vkCreateCommandPool(m_device->device(), &pool_create_info, nullptr, &command_pool_two);
-
- VkCommandBuffer cb;
- VkCommandBufferAllocateInfo command_buffer_allocate_info{};
- command_buffer_allocate_info.sType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_ALLOCATE_INFO;
- command_buffer_allocate_info.commandPool = command_pool_one;
- command_buffer_allocate_info.commandBufferCount = 1;
- command_buffer_allocate_info.level = VK_COMMAND_BUFFER_LEVEL_PRIMARY;
- vkAllocateCommandBuffers(m_device->device(), &command_buffer_allocate_info, &cb);
-
- vkFreeCommandBuffers(m_device->device(), command_pool_two, 1, &cb);
-
- m_errorMonitor->VerifyFound();
-
- vkDestroyCommandPool(m_device->device(), command_pool_one, NULL);
- vkDestroyCommandPool(m_device->device(), command_pool_two, NULL);
-}
-
-TEST_F(VkLayerTest, InvalidSecondaryCommandBufferBarrier) {
- TEST_DESCRIPTION("Add an invalid image barrier in a secondary command buffer");
- ASSERT_NO_FATAL_FAILURE(Init());
-
- // A renderpass with a single subpass that declared a self-dependency
- VkAttachmentDescription attach[] = {
- {0, VK_FORMAT_R8G8B8A8_UNORM, VK_SAMPLE_COUNT_1_BIT, VK_ATTACHMENT_LOAD_OP_DONT_CARE, VK_ATTACHMENT_STORE_OP_DONT_CARE,
- VK_ATTACHMENT_LOAD_OP_DONT_CARE, VK_ATTACHMENT_STORE_OP_DONT_CARE, VK_IMAGE_LAYOUT_UNDEFINED,
- VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL},
- };
- VkAttachmentReference ref = {0, VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL};
- VkSubpassDescription subpasses[] = {
- {0, VK_PIPELINE_BIND_POINT_GRAPHICS, 0, nullptr, 1, &ref, nullptr, nullptr, 0, nullptr},
- };
- VkSubpassDependency dep = {0,
- 0,
- VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT,
- VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT,
- VK_ACCESS_SHADER_WRITE_BIT,
- VK_ACCESS_SHADER_WRITE_BIT,
- VK_DEPENDENCY_BY_REGION_BIT};
- VkRenderPassCreateInfo rpci = {VK_STRUCTURE_TYPE_RENDER_PASS_CREATE_INFO, nullptr, 0, 1, attach, 1, subpasses, 1, &dep};
- VkRenderPass rp;
-
- VkResult err = vkCreateRenderPass(m_device->device(), &rpci, nullptr, &rp);
- ASSERT_VK_SUCCESS(err);
-
- VkImageObj image(m_device);
- image.Init(32, 32, 1, VK_FORMAT_R8G8B8A8_UNORM, VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT, VK_IMAGE_TILING_OPTIMAL, 0);
- VkImageView imageView = image.targetView(VK_FORMAT_R8G8B8A8_UNORM);
- // Second image that img_barrier will incorrectly use
- VkImageObj image2(m_device);
- image2.Init(32, 32, 1, VK_FORMAT_R8G8B8A8_UNORM, VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT, VK_IMAGE_TILING_OPTIMAL, 0);
-
- VkFramebufferCreateInfo fbci = {VK_STRUCTURE_TYPE_FRAMEBUFFER_CREATE_INFO, nullptr, 0, rp, 1, &imageView, 32, 32, 1};
- VkFramebuffer fb;
- err = vkCreateFramebuffer(m_device->device(), &fbci, nullptr, &fb);
- ASSERT_VK_SUCCESS(err);
-
- m_commandBuffer->begin();
-
- VkRenderPassBeginInfo rpbi = {VK_STRUCTURE_TYPE_RENDER_PASS_BEGIN_INFO,
- nullptr,
- rp,
- fb,
- {{
- 0,
- 0,
- },
- {32, 32}},
- 0,
- nullptr};
-
- vkCmdBeginRenderPass(m_commandBuffer->handle(), &rpbi, VK_SUBPASS_CONTENTS_SECONDARY_COMMAND_BUFFERS);
-
- VkCommandPoolObj pool(m_device, m_device->graphics_queue_node_index_, VK_COMMAND_POOL_CREATE_RESET_COMMAND_BUFFER_BIT);
- VkCommandBufferObj secondary(m_device, &pool, VK_COMMAND_BUFFER_LEVEL_SECONDARY);
-
- VkCommandBufferInheritanceInfo cbii = {VK_STRUCTURE_TYPE_COMMAND_BUFFER_INHERITANCE_INFO,
- nullptr,
- rp,
- 0,
- VK_NULL_HANDLE, // Set to NULL FB handle intentionally to flesh out any errors
- VK_FALSE,
- 0,
- 0};
- VkCommandBufferBeginInfo cbbi = {VK_STRUCTURE_TYPE_COMMAND_BUFFER_BEGIN_INFO, nullptr,
- VK_COMMAND_BUFFER_USAGE_ONE_TIME_SUBMIT_BIT | VK_COMMAND_BUFFER_USAGE_RENDER_PASS_CONTINUE_BIT,
- &cbii};
- vkBeginCommandBuffer(secondary.handle(), &cbbi);
- VkImageMemoryBarrier img_barrier = {};
- img_barrier.sType = VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER;
- img_barrier.srcAccessMask = VK_ACCESS_SHADER_WRITE_BIT;
- img_barrier.dstAccessMask = VK_ACCESS_SHADER_WRITE_BIT;
- img_barrier.oldLayout = VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL;
- img_barrier.newLayout = VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL;
- img_barrier.image = image2.handle(); // Image mis-matches with FB image
- img_barrier.srcQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED;
- img_barrier.dstQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED;
- img_barrier.subresourceRange.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
- img_barrier.subresourceRange.baseArrayLayer = 0;
- img_barrier.subresourceRange.baseMipLevel = 0;
- img_barrier.subresourceRange.layerCount = 1;
- img_barrier.subresourceRange.levelCount = 1;
- vkCmdPipelineBarrier(secondary.handle(), VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT, VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT,
- VK_DEPENDENCY_BY_REGION_BIT, 0, nullptr, 0, nullptr, 1, &img_barrier);
- secondary.end();
-
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdPipelineBarrier-image-02635");
- vkCmdExecuteCommands(m_commandBuffer->handle(), 1, &secondary.handle());
- m_errorMonitor->VerifyFound();
-
- vkDestroyFramebuffer(m_device->device(), fb, nullptr);
- vkDestroyRenderPass(m_device->device(), rp, nullptr);
-}
-
-TEST_F(VkLayerTest, DynamicDepthBiasNotBound) {
- TEST_DESCRIPTION(
- "Run a simple draw calls to validate failure when Depth Bias dynamic state is required but not correctly bound.");
-
- ASSERT_NO_FATAL_FAILURE(Init());
- // Dynamic depth bias
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "Dynamic depth bias state not set for this command buffer");
- VKTriangleTest(BsoFailDepthBias);
- m_errorMonitor->VerifyFound();
-}
-
-TEST_F(VkLayerTest, DynamicLineWidthNotBound) {
- TEST_DESCRIPTION(
- "Run a simple draw calls to validate failure when Line Width dynamic state is required but not correctly bound.");
-
- ASSERT_NO_FATAL_FAILURE(Init());
- // Dynamic line width
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "Dynamic line width state not set for this command buffer");
- VKTriangleTest(BsoFailLineWidth);
- m_errorMonitor->VerifyFound();
-}
-
-TEST_F(VkLayerTest, DynamicLineStippleNotBound) {
- TEST_DESCRIPTION(
- "Run a simple draw calls to validate failure when Line Stipple dynamic state is required but not correctly bound.");
-
- if (InstanceExtensionSupported(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME)) {
- m_instance_extension_names.push_back(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME);
- } else {
- printf("%s Did not find required instance extension %s; skipped.\n", kSkipPrefix,
- VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME);
- return;
- }
- ASSERT_NO_FATAL_FAILURE(InitFramework(myDbgFunc, m_errorMonitor));
- std::array<const char *, 1> required_device_extensions = {{VK_EXT_LINE_RASTERIZATION_EXTENSION_NAME}};
- for (auto device_extension : required_device_extensions) {
- if (DeviceExtensionSupported(gpu(), nullptr, device_extension)) {
- m_device_extension_names.push_back(device_extension);
- } else {
- printf("%s %s Extension not supported, skipping tests\n", kSkipPrefix, device_extension);
- return;
- }
- }
-
- PFN_vkGetPhysicalDeviceFeatures2KHR vkGetPhysicalDeviceFeatures2KHR =
- (PFN_vkGetPhysicalDeviceFeatures2KHR)vkGetInstanceProcAddr(instance(), "vkGetPhysicalDeviceFeatures2KHR");
- ASSERT_TRUE(vkGetPhysicalDeviceFeatures2KHR != nullptr);
-
- auto line_rasterization_features = lvl_init_struct<VkPhysicalDeviceLineRasterizationFeaturesEXT>();
- auto features2 = lvl_init_struct<VkPhysicalDeviceFeatures2KHR>(&line_rasterization_features);
- vkGetPhysicalDeviceFeatures2KHR(gpu(), &features2);
-
- if (!line_rasterization_features.stippledBresenhamLines || !line_rasterization_features.bresenhamLines) {
- printf("%sStipple Bresenham lines not supported; skipped.\n", kSkipPrefix);
- return;
- }
-
- ASSERT_NO_FATAL_FAILURE(InitState(nullptr, &features2, VK_COMMAND_POOL_CREATE_RESET_COMMAND_BUFFER_BIT));
-
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
- "Dynamic line stipple state not set for this command buffer");
- VKTriangleTest(BsoFailLineStipple);
- m_errorMonitor->VerifyFound();
-}
-
-TEST_F(VkLayerTest, DynamicViewportNotBound) {
- TEST_DESCRIPTION(
- "Run a simple draw calls to validate failure when Viewport dynamic state is required but not correctly bound.");
-
- ASSERT_NO_FATAL_FAILURE(Init());
- // Dynamic viewport state
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
- "Dynamic viewport(s) 0 are used by pipeline state object, but were not provided");
- VKTriangleTest(BsoFailViewport);
- m_errorMonitor->VerifyFound();
-}
-
-TEST_F(VkLayerTest, DynamicScissorNotBound) {
- TEST_DESCRIPTION("Run a simple draw calls to validate failure when Scissor dynamic state is required but not correctly bound.");
-
- ASSERT_NO_FATAL_FAILURE(Init());
- // Dynamic scissor state
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
- "Dynamic scissor(s) 0 are used by pipeline state object, but were not provided");
- VKTriangleTest(BsoFailScissor);
- m_errorMonitor->VerifyFound();
-}
-
-TEST_F(VkLayerTest, DynamicBlendConstantsNotBound) {
- TEST_DESCRIPTION(
- "Run a simple draw calls to validate failure when Blend Constants dynamic state is required but not correctly bound.");
-
- ASSERT_NO_FATAL_FAILURE(Init());
- // Dynamic blend constant state
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
- "Dynamic blend constants state not set for this command buffer");
- VKTriangleTest(BsoFailBlend);
- m_errorMonitor->VerifyFound();
-}
-
-TEST_F(VkLayerTest, DynamicDepthBoundsNotBound) {
- TEST_DESCRIPTION(
- "Run a simple draw calls to validate failure when Depth Bounds dynamic state is required but not correctly bound.");
-
- ASSERT_NO_FATAL_FAILURE(Init());
- if (!m_device->phy().features().depthBounds) {
- printf("%s Device does not support depthBounds test; skipped.\n", kSkipPrefix);
- return;
- }
- // Dynamic depth bounds
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
- "Dynamic depth bounds state not set for this command buffer");
- VKTriangleTest(BsoFailDepthBounds);
- m_errorMonitor->VerifyFound();
-}
-
-TEST_F(VkLayerTest, DynamicStencilReadNotBound) {
- TEST_DESCRIPTION(
- "Run a simple draw calls to validate failure when Stencil Read dynamic state is required but not correctly bound.");
-
- ASSERT_NO_FATAL_FAILURE(Init());
- // Dynamic stencil read mask
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
- "Dynamic stencil read mask state not set for this command buffer");
- VKTriangleTest(BsoFailStencilReadMask);
- m_errorMonitor->VerifyFound();
-}
-
-TEST_F(VkLayerTest, DynamicStencilWriteNotBound) {
- TEST_DESCRIPTION(
- "Run a simple draw calls to validate failure when Stencil Write dynamic state is required but not correctly bound.");
-
- ASSERT_NO_FATAL_FAILURE(Init());
- // Dynamic stencil write mask
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
- "Dynamic stencil write mask state not set for this command buffer");
- VKTriangleTest(BsoFailStencilWriteMask);
- m_errorMonitor->VerifyFound();
-}
-
-TEST_F(VkLayerTest, DynamicStencilRefNotBound) {
- TEST_DESCRIPTION(
- "Run a simple draw calls to validate failure when Stencil Ref dynamic state is required but not correctly bound.");
-
- ASSERT_NO_FATAL_FAILURE(Init());
- // Dynamic stencil reference
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
- "Dynamic stencil reference state not set for this command buffer");
- VKTriangleTest(BsoFailStencilReference);
- m_errorMonitor->VerifyFound();
-}
-
-TEST_F(VkLayerTest, IndexBufferNotBound) {
- TEST_DESCRIPTION("Run an indexed draw call without an index buffer bound.");
-
- ASSERT_NO_FATAL_FAILURE(Init());
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
- "Index buffer object not bound to this command buffer when Indexed ");
- VKTriangleTest(BsoFailIndexBuffer);
- m_errorMonitor->VerifyFound();
-}
-
-TEST_F(VkLayerTest, IndexBufferBadSize) {
- TEST_DESCRIPTION("Run indexed draw call with bad index buffer size.");
-
- ASSERT_NO_FATAL_FAILURE(Init(nullptr, nullptr, VK_COMMAND_POOL_CREATE_RESET_COMMAND_BUFFER_BIT));
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "vkCmdDrawIndexed() index size ");
- VKTriangleTest(BsoFailIndexBufferBadSize);
- m_errorMonitor->VerifyFound();
-}
-
-TEST_F(VkLayerTest, IndexBufferBadOffset) {
- TEST_DESCRIPTION("Run indexed draw call with bad index buffer offset.");
-
- ASSERT_NO_FATAL_FAILURE(Init(nullptr, nullptr, VK_COMMAND_POOL_CREATE_RESET_COMMAND_BUFFER_BIT));
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "vkCmdDrawIndexed() index size ");
- VKTriangleTest(BsoFailIndexBufferBadOffset);
- m_errorMonitor->VerifyFound();
-}
-
-TEST_F(VkLayerTest, IndexBufferBadBindSize) {
- TEST_DESCRIPTION("Run bind index buffer with a size greater than the index buffer.");
-
- ASSERT_NO_FATAL_FAILURE(Init(nullptr, nullptr, VK_COMMAND_POOL_CREATE_RESET_COMMAND_BUFFER_BIT));
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "vkCmdDrawIndexed() index size ");
- VKTriangleTest(BsoFailIndexBufferBadMapSize);
- m_errorMonitor->VerifyFound();
-}
-
-TEST_F(VkLayerTest, IndexBufferBadBindOffset) {
- TEST_DESCRIPTION("Run bind index buffer with an offset greater than the size of the index buffer.");
-
- ASSERT_NO_FATAL_FAILURE(Init(nullptr, nullptr, VK_COMMAND_POOL_CREATE_RESET_COMMAND_BUFFER_BIT));
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "vkCmdDrawIndexed() index size ");
- VKTriangleTest(BsoFailIndexBufferBadMapOffset);
- m_errorMonitor->VerifyFound();
-}
-
-TEST_F(VkLayerTest, MissingClearAttachment) {
- TEST_DESCRIPTION("Points to a wrong colorAttachment index in a VkClearAttachment structure passed to vkCmdClearAttachments");
- ASSERT_NO_FATAL_FAILURE(Init());
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdClearAttachments-aspectMask-02501");
-
- VKTriangleTest(BsoFailCmdClearAttachments);
- m_errorMonitor->VerifyFound();
-}
-
-TEST_F(VkLayerTest, CommandBufferTwoSubmits) {
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
- "was begun w/ VK_COMMAND_BUFFER_USAGE_ONE_TIME_SUBMIT_BIT set, but has been submitted");
-
- ASSERT_NO_FATAL_FAILURE(Init());
- ASSERT_NO_FATAL_FAILURE(InitViewport());
- ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
-
- // We luck out b/c by default the framework creates CB w/ the
- // VK_COMMAND_BUFFER_USAGE_ONE_TIME_SUBMIT_BIT set
- m_commandBuffer->begin();
- m_commandBuffer->ClearAllBuffers(m_renderTargets, m_clear_color, nullptr, m_depth_clear_color, m_stencil_clear_color);
- m_commandBuffer->end();
-
- // Bypass framework since it does the waits automatically
- VkResult err = VK_SUCCESS;
- VkSubmitInfo submit_info;
- submit_info.sType = VK_STRUCTURE_TYPE_SUBMIT_INFO;
- submit_info.pNext = NULL;
- submit_info.waitSemaphoreCount = 0;
- submit_info.pWaitSemaphores = NULL;
- submit_info.pWaitDstStageMask = NULL;
- submit_info.commandBufferCount = 1;
- submit_info.pCommandBuffers = &m_commandBuffer->handle();
- submit_info.signalSemaphoreCount = 0;
- submit_info.pSignalSemaphores = NULL;
-
- err = vkQueueSubmit(m_device->m_queue, 1, &submit_info, VK_NULL_HANDLE);
- ASSERT_VK_SUCCESS(err);
- vkQueueWaitIdle(m_device->m_queue);
-
- // Cause validation error by re-submitting cmd buffer that should only be
- // submitted once
- err = vkQueueSubmit(m_device->m_queue, 1, &submit_info, VK_NULL_HANDLE);
- vkQueueWaitIdle(m_device->m_queue);
-
- m_errorMonitor->VerifyFound();
-}
-
-TEST_F(VkLayerTest, InvalidPushConstants) {
- ASSERT_NO_FATAL_FAILURE(Init());
- ASSERT_NO_FATAL_FAILURE(InitViewport());
- ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
-
- VkPipelineLayout pipeline_layout;
- VkPushConstantRange pc_range = {};
- VkPipelineLayoutCreateInfo pipeline_layout_ci = {};
- pipeline_layout_ci.sType = VK_STRUCTURE_TYPE_PIPELINE_LAYOUT_CREATE_INFO;
- pipeline_layout_ci.pushConstantRangeCount = 1;
- pipeline_layout_ci.pPushConstantRanges = &pc_range;
-
- //
- // Check for invalid push constant ranges in pipeline layouts.
- //
- struct PipelineLayoutTestCase {
- VkPushConstantRange const range;
- char const *msg;
- };
-
- const uint32_t too_big = m_device->props.limits.maxPushConstantsSize + 0x4;
- const std::array<PipelineLayoutTestCase, 10> range_tests = {{
- {{VK_SHADER_STAGE_VERTEX_BIT, 0, 0}, "vkCreatePipelineLayout() call has push constants index 0 with size 0."},
- {{VK_SHADER_STAGE_VERTEX_BIT, 0, 1}, "vkCreatePipelineLayout() call has push constants index 0 with size 1."},
- {{VK_SHADER_STAGE_VERTEX_BIT, 4, 1}, "vkCreatePipelineLayout() call has push constants index 0 with size 1."},
- {{VK_SHADER_STAGE_VERTEX_BIT, 4, 0}, "vkCreatePipelineLayout() call has push constants index 0 with size 0."},
- {{VK_SHADER_STAGE_VERTEX_BIT, 1, 4}, "vkCreatePipelineLayout() call has push constants index 0 with offset 1. Offset must"},
- {{VK_SHADER_STAGE_VERTEX_BIT, 0, too_big}, "vkCreatePipelineLayout() call has push constants index 0 with offset "},
- {{VK_SHADER_STAGE_VERTEX_BIT, too_big, too_big}, "vkCreatePipelineLayout() call has push constants index 0 with offset "},
- {{VK_SHADER_STAGE_VERTEX_BIT, too_big, 4}, "vkCreatePipelineLayout() call has push constants index 0 with offset "},
- {{VK_SHADER_STAGE_VERTEX_BIT, 0xFFFFFFF0, 0x00000020},
- "vkCreatePipelineLayout() call has push constants index 0 with offset "},
- {{VK_SHADER_STAGE_VERTEX_BIT, 0x00000020, 0xFFFFFFF0},
- "vkCreatePipelineLayout() call has push constants index 0 with offset "},
- }};
-
- // Check for invalid offset and size
- for (const auto &iter : range_tests) {
- pc_range = iter.range;
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, iter.msg);
- vkCreatePipelineLayout(m_device->device(), &pipeline_layout_ci, NULL, &pipeline_layout);
- m_errorMonitor->VerifyFound();
- }
-
- // Check for invalid stage flag
- pc_range.offset = 0;
- pc_range.size = 16;
- pc_range.stageFlags = 0;
- m_errorMonitor->SetDesiredFailureMsg(
- VK_DEBUG_REPORT_ERROR_BIT_EXT,
- "vkCreatePipelineLayout: value of pCreateInfo->pPushConstantRanges[0].stageFlags must not be 0");
- vkCreatePipelineLayout(m_device->device(), &pipeline_layout_ci, NULL, &pipeline_layout);
- m_errorMonitor->VerifyFound();
-
- // Check for duplicate stage flags in a list of push constant ranges.
- // A shader can only have one push constant block and that block is mapped
- // to the push constant range that has that shader's stage flag set.
- // The shader's stage flag can only appear once in all the ranges, so the
- // implementation can find the one and only range to map it to.
- const uint32_t ranges_per_test = 5;
- struct DuplicateStageFlagsTestCase {
- VkPushConstantRange const ranges[ranges_per_test];
- std::vector<char const *> const msg;
- };
- // Overlapping ranges are OK, but a stage flag can appear only once.
- const std::array<DuplicateStageFlagsTestCase, 3> duplicate_stageFlags_tests = {
- {
- {{{VK_SHADER_STAGE_VERTEX_BIT, 0, 4},
- {VK_SHADER_STAGE_VERTEX_BIT, 0, 4},
- {VK_SHADER_STAGE_VERTEX_BIT, 0, 4},
- {VK_SHADER_STAGE_VERTEX_BIT, 0, 4},
- {VK_SHADER_STAGE_VERTEX_BIT, 0, 4}},
- {
- "vkCreatePipelineLayout() Duplicate stage flags found in ranges 0 and 1.",
- "vkCreatePipelineLayout() Duplicate stage flags found in ranges 0 and 2.",
- "vkCreatePipelineLayout() Duplicate stage flags found in ranges 0 and 3.",
- "vkCreatePipelineLayout() Duplicate stage flags found in ranges 0 and 4.",
- "vkCreatePipelineLayout() Duplicate stage flags found in ranges 1 and 2.",
- "vkCreatePipelineLayout() Duplicate stage flags found in ranges 1 and 3.",
- "vkCreatePipelineLayout() Duplicate stage flags found in ranges 1 and 4.",
- "vkCreatePipelineLayout() Duplicate stage flags found in ranges 2 and 3.",
- "vkCreatePipelineLayout() Duplicate stage flags found in ranges 2 and 4.",
- "vkCreatePipelineLayout() Duplicate stage flags found in ranges 3 and 4.",
- }},
- {{{VK_SHADER_STAGE_VERTEX_BIT, 0, 4},
- {VK_SHADER_STAGE_GEOMETRY_BIT, 0, 4},
- {VK_SHADER_STAGE_FRAGMENT_BIT, 0, 4},
- {VK_SHADER_STAGE_VERTEX_BIT, 0, 4},
- {VK_SHADER_STAGE_GEOMETRY_BIT, 0, 4}},
- {
- "vkCreatePipelineLayout() Duplicate stage flags found in ranges 0 and 3.",
- "vkCreatePipelineLayout() Duplicate stage flags found in ranges 1 and 4.",
- }},
- {{{VK_SHADER_STAGE_FRAGMENT_BIT, 0, 4},
- {VK_SHADER_STAGE_TESSELLATION_CONTROL_BIT, 0, 4},
- {VK_SHADER_STAGE_VERTEX_BIT, 0, 4},
- {VK_SHADER_STAGE_VERTEX_BIT, 0, 4},
- {VK_SHADER_STAGE_GEOMETRY_BIT, 0, 4}},
- {
- "vkCreatePipelineLayout() Duplicate stage flags found in ranges 2 and 3.",
- }},
- },
- };
-
- for (const auto &iter : duplicate_stageFlags_tests) {
- pipeline_layout_ci.pPushConstantRanges = iter.ranges;
- pipeline_layout_ci.pushConstantRangeCount = ranges_per_test;
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, iter.msg.begin(), iter.msg.end());
- vkCreatePipelineLayout(m_device->device(), &pipeline_layout_ci, NULL, &pipeline_layout);
- m_errorMonitor->VerifyFound();
- }
-
- //
- // CmdPushConstants tests
- //
-
- // Setup a pipeline layout with ranges: [0,32) [16,80)
- const std::vector<VkPushConstantRange> pc_range2 = {{VK_SHADER_STAGE_VERTEX_BIT, 16, 64},
- {VK_SHADER_STAGE_FRAGMENT_BIT, 0, 32}};
- const VkPipelineLayoutObj pipeline_layout_obj(m_device, {}, pc_range2);
-
- const uint8_t dummy_values[100] = {};
-
- m_commandBuffer->begin();
- m_commandBuffer->BeginRenderPass(m_renderPassBeginInfo);
-
- // Check for invalid stage flag
- // Note that VU 00996 isn't reached due to parameter validation
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "vkCmdPushConstants: value of stageFlags must not be 0");
- vkCmdPushConstants(m_commandBuffer->handle(), pipeline_layout_obj.handle(), 0, 0, 16, dummy_values);
- m_errorMonitor->VerifyFound();
-
- // Positive tests for the overlapping ranges
- m_errorMonitor->ExpectSuccess();
- vkCmdPushConstants(m_commandBuffer->handle(), pipeline_layout_obj.handle(), VK_SHADER_STAGE_FRAGMENT_BIT, 0, 16, dummy_values);
- m_errorMonitor->VerifyNotFound();
- m_errorMonitor->ExpectSuccess();
- vkCmdPushConstants(m_commandBuffer->handle(), pipeline_layout_obj.handle(), VK_SHADER_STAGE_VERTEX_BIT, 32, 48, dummy_values);
- m_errorMonitor->VerifyNotFound();
- m_errorMonitor->ExpectSuccess();
- vkCmdPushConstants(m_commandBuffer->handle(), pipeline_layout_obj.handle(),
- VK_SHADER_STAGE_VERTEX_BIT | VK_SHADER_STAGE_FRAGMENT_BIT, 16, 16, dummy_values);
- m_errorMonitor->VerifyNotFound();
-
- // Wrong cmd stages for extant range
- // No range for all cmd stages -- "VUID-vkCmdPushConstants-offset-01795" VUID-vkCmdPushConstants-offset-01795
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdPushConstants-offset-01795");
- // Missing cmd stages for found overlapping range -- "VUID-vkCmdPushConstants-offset-01796" VUID-vkCmdPushConstants-offset-01796
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdPushConstants-offset-01796");
- vkCmdPushConstants(m_commandBuffer->handle(), pipeline_layout_obj.handle(), VK_SHADER_STAGE_GEOMETRY_BIT, 0, 16, dummy_values);
- m_errorMonitor->VerifyFound();
-
- // Wrong no extant range
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdPushConstants-offset-01795");
- vkCmdPushConstants(m_commandBuffer->handle(), pipeline_layout_obj.handle(), VK_SHADER_STAGE_FRAGMENT_BIT, 80, 4, dummy_values);
- m_errorMonitor->VerifyFound();
-
- // Wrong overlapping extent
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdPushConstants-offset-01795");
- vkCmdPushConstants(m_commandBuffer->handle(), pipeline_layout_obj.handle(),
- VK_SHADER_STAGE_VERTEX_BIT | VK_SHADER_STAGE_FRAGMENT_BIT, 0, 20, dummy_values);
- m_errorMonitor->VerifyFound();
-
- // Wrong stage flags for valid overlapping range
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdPushConstants-offset-01796");
- vkCmdPushConstants(m_commandBuffer->handle(), pipeline_layout_obj.handle(), VK_SHADER_STAGE_VERTEX_BIT, 16, 16, dummy_values);
- m_errorMonitor->VerifyFound();
-
- m_commandBuffer->EndRenderPass();
- m_commandBuffer->end();
-}
-
-TEST_F(VkLayerTest, NoBeginCommandBuffer) {
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
- "You must call vkBeginCommandBuffer() before this call to ");
-
- ASSERT_NO_FATAL_FAILURE(Init());
- VkCommandBufferObj commandBuffer(m_device, m_commandPool);
- // Call EndCommandBuffer() w/o calling BeginCommandBuffer()
- vkEndCommandBuffer(commandBuffer.handle());
-
- m_errorMonitor->VerifyFound();
-}
-
-TEST_F(VkLayerTest, SecondaryCommandBufferNullRenderpass) {
- ASSERT_NO_FATAL_FAILURE(Init());
-
- VkCommandBufferObj cb(m_device, m_commandPool, VK_COMMAND_BUFFER_LEVEL_SECONDARY);
-
- // Force the failure by not setting the Renderpass and Framebuffer fields
- VkCommandBufferInheritanceInfo cmd_buf_hinfo = {};
- cmd_buf_hinfo.sType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_INHERITANCE_INFO;
-
- VkCommandBufferBeginInfo cmd_buf_info = {};
- cmd_buf_info.sType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_BEGIN_INFO;
- cmd_buf_info.pNext = NULL;
- cmd_buf_info.flags = VK_COMMAND_BUFFER_USAGE_ONE_TIME_SUBMIT_BIT | VK_COMMAND_BUFFER_USAGE_RENDER_PASS_CONTINUE_BIT;
- cmd_buf_info.pInheritanceInfo = &cmd_buf_hinfo;
-
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkCommandBufferBeginInfo-flags-00053");
- vkBeginCommandBuffer(cb.handle(), &cmd_buf_info);
- m_errorMonitor->VerifyFound();
-}
-
-TEST_F(VkLayerTest, SecondaryCommandBufferRerecordedExplicitReset) {
- ASSERT_NO_FATAL_FAILURE(Init());
-
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "was destroyed or rerecorded");
-
- // A pool we can reset in.
- VkCommandPoolObj pool(m_device, m_device->graphics_queue_node_index_, VK_COMMAND_POOL_CREATE_RESET_COMMAND_BUFFER_BIT);
- VkCommandBufferObj secondary(m_device, &pool, VK_COMMAND_BUFFER_LEVEL_SECONDARY);
-
- secondary.begin();
- secondary.end();
-
- m_commandBuffer->begin();
- vkCmdExecuteCommands(m_commandBuffer->handle(), 1, &secondary.handle());
-
- // rerecording of secondary
- secondary.reset(); // explicit reset here.
- secondary.begin();
- secondary.end();
-
- vkCmdExecuteCommands(m_commandBuffer->handle(), 1, &secondary.handle());
- m_errorMonitor->VerifyFound();
-}
-
-TEST_F(VkLayerTest, SecondaryCommandBufferRerecordedNoReset) {
- ASSERT_NO_FATAL_FAILURE(Init());
-
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "was destroyed or rerecorded");
-
- // A pool we can reset in.
- VkCommandPoolObj pool(m_device, m_device->graphics_queue_node_index_, VK_COMMAND_POOL_CREATE_RESET_COMMAND_BUFFER_BIT);
- VkCommandBufferObj secondary(m_device, &pool, VK_COMMAND_BUFFER_LEVEL_SECONDARY);
-
- secondary.begin();
- secondary.end();
-
- m_commandBuffer->begin();
- vkCmdExecuteCommands(m_commandBuffer->handle(), 1, &secondary.handle());
-
- // rerecording of secondary
- secondary.begin(); // implicit reset in begin
- secondary.end();
-
- vkCmdExecuteCommands(m_commandBuffer->handle(), 1, &secondary.handle());
- m_errorMonitor->VerifyFound();
-}
-
-TEST_F(VkLayerTest, CascadedInvalidation) {
- ASSERT_NO_FATAL_FAILURE(Init());
-
- VkEventCreateInfo eci = {VK_STRUCTURE_TYPE_EVENT_CREATE_INFO, nullptr, 0};
- VkEvent event;
- vkCreateEvent(m_device->device(), &eci, nullptr, &event);
-
- VkCommandBufferObj secondary(m_device, m_commandPool, VK_COMMAND_BUFFER_LEVEL_SECONDARY);
- secondary.begin();
- vkCmdSetEvent(secondary.handle(), event, VK_PIPELINE_STAGE_BOTTOM_OF_PIPE_BIT);
- secondary.end();
-
- m_commandBuffer->begin();
- vkCmdExecuteCommands(m_commandBuffer->handle(), 1, &secondary.handle());
- m_commandBuffer->end();
-
- // destroying the event should invalidate both primary and secondary CB
- vkDestroyEvent(m_device->device(), event, nullptr);
-
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
- "UNASSIGNED-CoreValidation-DrawState-InvalidCommandBuffer-VkEvent");
- m_commandBuffer->QueueCommandBuffer(false);
- m_errorMonitor->VerifyFound();
-}
-
-TEST_F(VkLayerTest, CommandBufferResetErrors) {
- // Cause error due to Begin while recording CB
- // Then cause 2 errors for attempting to reset CB w/o having
- // VK_COMMAND_POOL_CREATE_RESET_COMMAND_BUFFER_BIT set for the pool from
- // which CBs were allocated. Note that this bit is off by default.
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkBeginCommandBuffer-commandBuffer-00049");
-
- ASSERT_NO_FATAL_FAILURE(Init());
-
- // Calls AllocateCommandBuffers
- VkCommandBufferObj commandBuffer(m_device, m_commandPool);
-
- // Force the failure by setting the Renderpass and Framebuffer fields with (fake) data
- VkCommandBufferInheritanceInfo cmd_buf_hinfo = {};
- cmd_buf_hinfo.sType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_INHERITANCE_INFO;
- VkCommandBufferBeginInfo cmd_buf_info = {};
- cmd_buf_info.sType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_BEGIN_INFO;
- cmd_buf_info.pNext = NULL;
- cmd_buf_info.flags = VK_COMMAND_BUFFER_USAGE_ONE_TIME_SUBMIT_BIT;
- cmd_buf_info.pInheritanceInfo = &cmd_buf_hinfo;
-
- // Begin CB to transition to recording state
- vkBeginCommandBuffer(commandBuffer.handle(), &cmd_buf_info);
- // Can't re-begin. This should trigger error
- vkBeginCommandBuffer(commandBuffer.handle(), &cmd_buf_info);
- m_errorMonitor->VerifyFound();
-
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkResetCommandBuffer-commandBuffer-00046");
- VkCommandBufferResetFlags flags = 0; // Don't care about flags for this test
- // Reset attempt will trigger error due to incorrect CommandPool state
- vkResetCommandBuffer(commandBuffer.handle(), flags);
- m_errorMonitor->VerifyFound();
-
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkBeginCommandBuffer-commandBuffer-00050");
- // Transition CB to RECORDED state
- vkEndCommandBuffer(commandBuffer.handle());
- // Now attempting to Begin will implicitly reset, which triggers error
- vkBeginCommandBuffer(commandBuffer.handle(), &cmd_buf_info);
- m_errorMonitor->VerifyFound();
-}
-
-TEST_F(VkLayerTest, ClearColorAttachmentsOutsideRenderPass) {
- // Call CmdClearAttachmentss outside of an active RenderPass
-
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
- "vkCmdClearAttachments(): This call must be issued inside an active render pass");
-
- ASSERT_NO_FATAL_FAILURE(Init());
- ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
-
- // Start no RenderPass
- m_commandBuffer->begin();
-
- VkClearAttachment color_attachment;
- color_attachment.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
- color_attachment.clearValue.color.float32[0] = 0;
- color_attachment.clearValue.color.float32[1] = 0;
- color_attachment.clearValue.color.float32[2] = 0;
- color_attachment.clearValue.color.float32[3] = 0;
- color_attachment.colorAttachment = 0;
- VkClearRect clear_rect = {{{0, 0}, {32, 32}}, 0, 1};
- vkCmdClearAttachments(m_commandBuffer->handle(), 1, &color_attachment, 1, &clear_rect);
-
- m_errorMonitor->VerifyFound();
-}
-
-TEST_F(VkLayerTest, ClearColorAttachmentsZeroLayercount) {
- TEST_DESCRIPTION("Call CmdClearAttachments with a pRect having a layerCount of zero.");
-
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdClearAttachments-layerCount-01934");
-
- ASSERT_NO_FATAL_FAILURE(Init());
- ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
-
- m_commandBuffer->begin();
- vkCmdBeginRenderPass(m_commandBuffer->handle(), &renderPassBeginInfo(), VK_SUBPASS_CONTENTS_INLINE);
-
- VkClearAttachment color_attachment;
- color_attachment.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
- color_attachment.clearValue.color.float32[0] = 0;
- color_attachment.clearValue.color.float32[1] = 0;
- color_attachment.clearValue.color.float32[2] = 0;
- color_attachment.clearValue.color.float32[3] = 0;
- color_attachment.colorAttachment = 0;
- VkClearRect clear_rect = {{{0, 0}, {32, 32}}};
- vkCmdClearAttachments(m_commandBuffer->handle(), 1, &color_attachment, 1, &clear_rect);
-
- m_errorMonitor->VerifyFound();
-}
-
-TEST_F(VkLayerTest, ExecuteCommandsPrimaryCB) {
- TEST_DESCRIPTION("Attempt vkCmdExecuteCommands with a primary command buffer (should only be secondary)");
-
- ASSERT_NO_FATAL_FAILURE(Init());
- ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
-
- // An empty primary command buffer
- VkCommandBufferObj cb(m_device, m_commandPool);
- cb.begin();
- cb.end();
-
- m_commandBuffer->begin();
- vkCmdBeginRenderPass(m_commandBuffer->handle(), &renderPassBeginInfo(), VK_SUBPASS_CONTENTS_SECONDARY_COMMAND_BUFFERS);
- VkCommandBuffer handle = cb.handle();
-
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdExecuteCommands-pCommandBuffers-00088");
- vkCmdExecuteCommands(m_commandBuffer->handle(), 1, &handle);
- m_errorMonitor->VerifyFound();
-
- m_errorMonitor->SetUnexpectedError("All elements of pCommandBuffers must not be in the pending state");
-
- m_commandBuffer->EndRenderPass();
- m_commandBuffer->end();
-}
-
-TEST_F(VkLayerTest, InvalidVertexAttributeAlignment) {
- TEST_DESCRIPTION("Check for proper aligment of attribAddress which depends on a bound pipeline and on a bound vertex buffer");
-
- ASSERT_NO_FATAL_FAILURE(Init());
- ASSERT_NO_FATAL_FAILURE(InitViewport());
- ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
-
- const VkPipelineLayoutObj pipeline_layout(m_device);
-
- struct VboEntry {
- uint16_t input0[2];
- uint32_t input1;
- float input2[4];
- };
-
- const unsigned vbo_entry_count = 3;
- const VboEntry vbo_data[vbo_entry_count] = {};
-
- VkConstantBufferObj vbo(m_device, static_cast<int>(sizeof(VboEntry) * vbo_entry_count),
- reinterpret_cast<const void *>(vbo_data), VK_BUFFER_USAGE_VERTEX_BUFFER_BIT);
-
- VkVertexInputBindingDescription input_binding;
- input_binding.binding = 0;
- input_binding.stride = sizeof(VboEntry);
- input_binding.inputRate = VK_VERTEX_INPUT_RATE_VERTEX;
-
- VkVertexInputAttributeDescription input_attribs[3];
-
- input_attribs[0].binding = 0;
- // Location switch between attrib[0] and attrib[1] is intentional
- input_attribs[0].location = 1;
- input_attribs[0].format = VK_FORMAT_A8B8G8R8_UNORM_PACK32;
- input_attribs[0].offset = offsetof(VboEntry, input1);
-
- input_attribs[1].binding = 0;
- input_attribs[1].location = 0;
- input_attribs[1].format = VK_FORMAT_R16G16_UNORM;
- input_attribs[1].offset = offsetof(VboEntry, input0);
-
- input_attribs[2].binding = 0;
- input_attribs[2].location = 2;
- input_attribs[2].format = VK_FORMAT_R32G32B32A32_SFLOAT;
- input_attribs[2].offset = offsetof(VboEntry, input2);
-
- char const *vsSource =
- "#version 450\n"
- "\n"
- "layout(location = 0) in vec2 input0;"
- "layout(location = 1) in vec4 input1;"
- "layout(location = 2) in vec4 input2;"
- "\n"
- "void main(){\n"
- " gl_Position = input1 + input2;\n"
- " gl_Position.xy += input0;\n"
- "}\n";
-
- VkShaderObj vs(m_device, vsSource, VK_SHADER_STAGE_VERTEX_BIT, this);
- VkShaderObj fs(m_device, bindStateFragShaderText, VK_SHADER_STAGE_FRAGMENT_BIT, this);
-
- VkPipelineObj pipe1(m_device);
- pipe1.AddDefaultColorAttachment();
- pipe1.AddShader(&vs);
- pipe1.AddShader(&fs);
- pipe1.AddVertexInputBindings(&input_binding, 1);
- pipe1.AddVertexInputAttribs(&input_attribs[0], 3);
- pipe1.SetViewport(m_viewports);
- pipe1.SetScissor(m_scissors);
- pipe1.CreateVKPipeline(pipeline_layout.handle(), renderPass());
-
- input_binding.stride = 6;
-
- VkPipelineObj pipe2(m_device);
- pipe2.AddDefaultColorAttachment();
- pipe2.AddShader(&vs);
- pipe2.AddShader(&fs);
- pipe2.AddVertexInputBindings(&input_binding, 1);
- pipe2.AddVertexInputAttribs(&input_attribs[0], 3);
- pipe2.SetViewport(m_viewports);
- pipe2.SetScissor(m_scissors);
- pipe2.CreateVKPipeline(pipeline_layout.handle(), renderPass());
-
- m_commandBuffer->begin();
- m_commandBuffer->BeginRenderPass(m_renderPassBeginInfo);
-
- // Test with invalid buffer offset
- VkDeviceSize offset = 1;
- vkCmdBindPipeline(m_commandBuffer->handle(), VK_PIPELINE_BIND_POINT_GRAPHICS, pipe1.handle());
- vkCmdBindVertexBuffers(m_commandBuffer->handle(), 0, 1, &vbo.handle(), &offset);
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "Invalid attribAddress alignment for vertex attribute 0");
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "Invalid attribAddress alignment for vertex attribute 1");
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "Invalid attribAddress alignment for vertex attribute 2");
- m_commandBuffer->Draw(1, 0, 0, 0);
- m_errorMonitor->VerifyFound();
-
- // Test with invalid buffer stride
- offset = 0;
- vkCmdBindPipeline(m_commandBuffer->handle(), VK_PIPELINE_BIND_POINT_GRAPHICS, pipe2.handle());
- vkCmdBindVertexBuffers(m_commandBuffer->handle(), 0, 1, &vbo.handle(), &offset);
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "Invalid attribAddress alignment for vertex attribute 0");
- // Attribute[1] is aligned properly even with a wrong stride
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "Invalid attribAddress alignment for vertex attribute 2");
- m_commandBuffer->Draw(1, 0, 0, 0);
- m_errorMonitor->VerifyFound();
-
- m_commandBuffer->EndRenderPass();
- m_commandBuffer->end();
-}
-
-TEST_F(VkLayerTest, NonSimultaneousSecondaryMarksPrimary) {
- ASSERT_NO_FATAL_FAILURE(Init());
- const char *simultaneous_use_message = "UNASSIGNED-CoreValidation-DrawState-InvalidCommandBufferSimultaneousUse";
-
- VkCommandBufferObj secondary(m_device, m_commandPool, VK_COMMAND_BUFFER_LEVEL_SECONDARY);
-
- secondary.begin();
- secondary.end();
-
- VkCommandBufferBeginInfo cbbi = {
- VK_STRUCTURE_TYPE_COMMAND_BUFFER_BEGIN_INFO,
- nullptr,
- VK_COMMAND_BUFFER_USAGE_SIMULTANEOUS_USE_BIT,
- nullptr,
- };
-
- m_commandBuffer->begin(&cbbi);
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_WARNING_BIT_EXT, simultaneous_use_message);
- vkCmdExecuteCommands(m_commandBuffer->handle(), 1, &secondary.handle());
- m_errorMonitor->VerifyFound();
- m_commandBuffer->end();
-}
-
-TEST_F(VkLayerTest, SimultaneousUseSecondaryTwoExecutes) {
- ASSERT_NO_FATAL_FAILURE(Init());
-
- const char *simultaneous_use_message = "VUID-vkCmdExecuteCommands-pCommandBuffers-00092";
-
- VkCommandBufferObj secondary(m_device, m_commandPool, VK_COMMAND_BUFFER_LEVEL_SECONDARY);
-
- VkCommandBufferInheritanceInfo inh = {
- VK_STRUCTURE_TYPE_COMMAND_BUFFER_INHERITANCE_INFO,
- nullptr,
- };
- VkCommandBufferBeginInfo cbbi = {VK_STRUCTURE_TYPE_COMMAND_BUFFER_BEGIN_INFO, nullptr, 0, &inh};
-
- secondary.begin(&cbbi);
- secondary.end();
-
- m_commandBuffer->begin();
- vkCmdExecuteCommands(m_commandBuffer->handle(), 1, &secondary.handle());
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, simultaneous_use_message);
- vkCmdExecuteCommands(m_commandBuffer->handle(), 1, &secondary.handle());
- m_errorMonitor->VerifyFound();
- m_commandBuffer->end();
-}
-
-TEST_F(VkLayerTest, SimultaneousUseSecondarySingleExecute) {
- ASSERT_NO_FATAL_FAILURE(Init());
-
- // variation on previous test executing the same CB twice in the same
- // CmdExecuteCommands call
-
- const char *simultaneous_use_message = "VUID-vkCmdExecuteCommands-pCommandBuffers-00093";
-
- VkCommandBufferObj secondary(m_device, m_commandPool, VK_COMMAND_BUFFER_LEVEL_SECONDARY);
-
- VkCommandBufferInheritanceInfo inh = {
- VK_STRUCTURE_TYPE_COMMAND_BUFFER_INHERITANCE_INFO,
- nullptr,
- };
- VkCommandBufferBeginInfo cbbi = {VK_STRUCTURE_TYPE_COMMAND_BUFFER_BEGIN_INFO, nullptr, 0, &inh};
-
- secondary.begin(&cbbi);
- secondary.end();
-
- m_commandBuffer->begin();
- VkCommandBuffer cbs[] = {secondary.handle(), secondary.handle()};
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, simultaneous_use_message);
- vkCmdExecuteCommands(m_commandBuffer->handle(), 2, cbs);
- m_errorMonitor->VerifyFound();
- m_commandBuffer->end();
-}
-
-TEST_F(VkLayerTest, SimultaneousUseOneShot) {
- TEST_DESCRIPTION("Submit the same command buffer twice in one submit looking for simultaneous use and one time submit errors");
- const char *simultaneous_use_message = "is already in use and is not marked for simultaneous use";
- const char *one_shot_message = "VK_COMMAND_BUFFER_USAGE_ONE_TIME_SUBMIT_BIT set, but has been submitted";
- ASSERT_NO_FATAL_FAILURE(Init());
-
- VkCommandBuffer cmd_bufs[2];
- VkCommandBufferAllocateInfo alloc_info;
- alloc_info.sType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_ALLOCATE_INFO;
- alloc_info.pNext = NULL;
- alloc_info.commandBufferCount = 2;
- alloc_info.commandPool = m_commandPool->handle();
- alloc_info.level = VK_COMMAND_BUFFER_LEVEL_PRIMARY;
- vkAllocateCommandBuffers(m_device->device(), &alloc_info, cmd_bufs);
-
- VkCommandBufferBeginInfo cb_binfo;
- cb_binfo.pNext = NULL;
- cb_binfo.sType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_BEGIN_INFO;
- cb_binfo.pInheritanceInfo = VK_NULL_HANDLE;
- cb_binfo.flags = 0;
- vkBeginCommandBuffer(cmd_bufs[0], &cb_binfo);
- VkViewport viewport = {0, 0, 16, 16, 0, 1};
- vkCmdSetViewport(cmd_bufs[0], 0, 1, &viewport);
- vkEndCommandBuffer(cmd_bufs[0]);
- VkCommandBuffer duplicates[2] = {cmd_bufs[0], cmd_bufs[0]};
-
- VkSubmitInfo submit_info = {};
- submit_info.sType = VK_STRUCTURE_TYPE_SUBMIT_INFO;
- submit_info.commandBufferCount = 2;
- submit_info.pCommandBuffers = duplicates;
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, simultaneous_use_message);
- vkQueueSubmit(m_device->m_queue, 1, &submit_info, VK_NULL_HANDLE);
- m_errorMonitor->VerifyFound();
- vkQueueWaitIdle(m_device->m_queue);
-
- // Set one time use and now look for one time submit
- duplicates[0] = duplicates[1] = cmd_bufs[1];
- cb_binfo.flags = VK_COMMAND_BUFFER_USAGE_SIMULTANEOUS_USE_BIT | VK_COMMAND_BUFFER_USAGE_ONE_TIME_SUBMIT_BIT;
- vkBeginCommandBuffer(cmd_bufs[1], &cb_binfo);
- vkCmdSetViewport(cmd_bufs[1], 0, 1, &viewport);
- vkEndCommandBuffer(cmd_bufs[1]);
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, one_shot_message);
- vkQueueSubmit(m_device->m_queue, 1, &submit_info, VK_NULL_HANDLE);
- m_errorMonitor->VerifyFound();
- vkQueueWaitIdle(m_device->m_queue);
-}
-
-TEST_F(VkLayerTest, DrawTimeImageViewTypeMismatchWithPipeline) {
- TEST_DESCRIPTION(
- "Test that an error is produced when an image view type does not match the dimensionality declared in the shader");
-
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "requires an image view of type VK_IMAGE_VIEW_TYPE_3D");
-
- ASSERT_NO_FATAL_FAILURE(Init());
- ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
-
- char const *fsSource =
- "#version 450\n"
- "\n"
- "layout(set=0, binding=0) uniform sampler3D s;\n"
- "layout(location=0) out vec4 color;\n"
- "void main() {\n"
- " color = texture(s, vec3(0));\n"
- "}\n";
- VkShaderObj vs(m_device, bindStateVertShaderText, VK_SHADER_STAGE_VERTEX_BIT, this);
- VkShaderObj fs(m_device, fsSource, VK_SHADER_STAGE_FRAGMENT_BIT, this);
-
- VkPipelineObj pipe(m_device);
- pipe.AddShader(&vs);
- pipe.AddShader(&fs);
- pipe.AddDefaultColorAttachment();
-
- VkTextureObj texture(m_device, nullptr);
- VkSamplerObj sampler(m_device);
-
- VkDescriptorSetObj descriptorSet(m_device);
- descriptorSet.AppendSamplerTexture(&sampler, &texture);
- descriptorSet.CreateVKDescriptorSet(m_commandBuffer);
-
- VkResult err = pipe.CreateVKPipeline(descriptorSet.GetPipelineLayout(), renderPass());
- ASSERT_VK_SUCCESS(err);
-
- m_commandBuffer->begin();
- m_commandBuffer->BeginRenderPass(m_renderPassBeginInfo);
-
- vkCmdBindPipeline(m_commandBuffer->handle(), VK_PIPELINE_BIND_POINT_GRAPHICS, pipe.handle());
- m_commandBuffer->BindDescriptorSet(descriptorSet);
-
- VkViewport viewport = {0, 0, 16, 16, 0, 1};
- vkCmdSetViewport(m_commandBuffer->handle(), 0, 1, &viewport);
- VkRect2D scissor = {{0, 0}, {16, 16}};
- vkCmdSetScissor(m_commandBuffer->handle(), 0, 1, &scissor);
-
- // error produced here.
- vkCmdDraw(m_commandBuffer->handle(), 3, 1, 0, 0);
-
- m_errorMonitor->VerifyFound();
-
- m_commandBuffer->EndRenderPass();
- m_commandBuffer->end();
-}
-
-TEST_F(VkLayerTest, DrawTimeImageMultisampleMismatchWithPipeline) {
- TEST_DESCRIPTION(
- "Test that an error is produced when a multisampled images are consumed via singlesample images types in the shader, or "
- "vice versa.");
-
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "requires bound image to have multiple samples");
-
- ASSERT_NO_FATAL_FAILURE(Init());
- ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
-
- char const *fsSource =
- "#version 450\n"
- "\n"
- "layout(set=0, binding=0) uniform sampler2DMS s;\n"
- "layout(location=0) out vec4 color;\n"
- "void main() {\n"
- " color = texelFetch(s, ivec2(0), 0);\n"
- "}\n";
- VkShaderObj vs(m_device, bindStateVertShaderText, VK_SHADER_STAGE_VERTEX_BIT, this);
- VkShaderObj fs(m_device, fsSource, VK_SHADER_STAGE_FRAGMENT_BIT, this);
-
- VkPipelineObj pipe(m_device);
- pipe.AddShader(&vs);
- pipe.AddShader(&fs);
- pipe.AddDefaultColorAttachment();
-
- VkTextureObj texture(m_device, nullptr); // THIS LINE CAUSES CRASH ON MALI
- VkSamplerObj sampler(m_device);
-
- VkDescriptorSetObj descriptorSet(m_device);
- descriptorSet.AppendSamplerTexture(&sampler, &texture);
- descriptorSet.CreateVKDescriptorSet(m_commandBuffer);
-
- VkResult err = pipe.CreateVKPipeline(descriptorSet.GetPipelineLayout(), renderPass());
- ASSERT_VK_SUCCESS(err);
-
- m_commandBuffer->begin();
- m_commandBuffer->BeginRenderPass(m_renderPassBeginInfo);
-
- vkCmdBindPipeline(m_commandBuffer->handle(), VK_PIPELINE_BIND_POINT_GRAPHICS, pipe.handle());
- m_commandBuffer->BindDescriptorSet(descriptorSet);
-
- VkViewport viewport = {0, 0, 16, 16, 0, 1};
- vkCmdSetViewport(m_commandBuffer->handle(), 0, 1, &viewport);
- VkRect2D scissor = {{0, 0}, {16, 16}};
- vkCmdSetScissor(m_commandBuffer->handle(), 0, 1, &scissor);
-
- // error produced here.
- vkCmdDraw(m_commandBuffer->handle(), 3, 1, 0, 0);
-
- m_errorMonitor->VerifyFound();
-
- m_commandBuffer->EndRenderPass();
- m_commandBuffer->end();
-}
-
-TEST_F(VkLayerTest, DrawTimeImageComponentTypeMismatchWithPipeline) {
- TEST_DESCRIPTION(
- "Test that an error is produced when the component type of an imageview disagrees with the type in the shader.");
-
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "SINT component type, but bound descriptor");
-
- ASSERT_NO_FATAL_FAILURE(Init());
- ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
-
- char const *fsSource =
- "#version 450\n"
- "\n"
- "layout(set=0, binding=0) uniform isampler2D s;\n"
- "layout(location=0) out vec4 color;\n"
- "void main() {\n"
- " color = texelFetch(s, ivec2(0), 0);\n"
- "}\n";
- VkShaderObj vs(m_device, bindStateVertShaderText, VK_SHADER_STAGE_VERTEX_BIT, this);
- VkShaderObj fs(m_device, fsSource, VK_SHADER_STAGE_FRAGMENT_BIT, this);
-
- VkPipelineObj pipe(m_device);
- pipe.AddShader(&vs);
- pipe.AddShader(&fs);
- pipe.AddDefaultColorAttachment();
-
- VkTextureObj texture(m_device, nullptr); // UNORM texture by default, incompatible with isampler2D
- VkSamplerObj sampler(m_device);
-
- VkDescriptorSetObj descriptorSet(m_device);
- descriptorSet.AppendSamplerTexture(&sampler, &texture);
- descriptorSet.CreateVKDescriptorSet(m_commandBuffer);
-
- VkResult err = pipe.CreateVKPipeline(descriptorSet.GetPipelineLayout(), renderPass());
- ASSERT_VK_SUCCESS(err);
-
- m_commandBuffer->begin();
- m_commandBuffer->BeginRenderPass(m_renderPassBeginInfo);
-
- vkCmdBindPipeline(m_commandBuffer->handle(), VK_PIPELINE_BIND_POINT_GRAPHICS, pipe.handle());
- m_commandBuffer->BindDescriptorSet(descriptorSet);
-
- VkViewport viewport = {0, 0, 16, 16, 0, 1};
- vkCmdSetViewport(m_commandBuffer->handle(), 0, 1, &viewport);
- VkRect2D scissor = {{0, 0}, {16, 16}};
- vkCmdSetScissor(m_commandBuffer->handle(), 0, 1, &scissor);
-
- // error produced here.
- vkCmdDraw(m_commandBuffer->handle(), 3, 1, 0, 0);
-
- m_errorMonitor->VerifyFound();
-
- m_commandBuffer->EndRenderPass();
- m_commandBuffer->end();
-}
-
-TEST_F(VkLayerTest, CopyImageLayerCountMismatch) {
- TEST_DESCRIPTION(
- "Try to copy between images with the source subresource having a different layerCount than the destination subresource");
- ASSERT_NO_FATAL_FAILURE(Init());
-
- // Create two images to copy between
- VkImageObj src_image_obj(m_device);
- VkImageObj dst_image_obj(m_device);
-
- VkImageCreateInfo image_create_info = {};
- image_create_info.sType = VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO;
- image_create_info.pNext = NULL;
- image_create_info.imageType = VK_IMAGE_TYPE_2D;
- image_create_info.format = VK_FORMAT_B8G8R8A8_UNORM;
- image_create_info.extent.width = 32;
- image_create_info.extent.height = 32;
- image_create_info.extent.depth = 1;
- image_create_info.mipLevels = 1;
- image_create_info.arrayLayers = 4;
- image_create_info.samples = VK_SAMPLE_COUNT_1_BIT;
- image_create_info.tiling = VK_IMAGE_TILING_OPTIMAL;
- image_create_info.usage = VK_IMAGE_USAGE_TRANSFER_SRC_BIT;
- image_create_info.flags = 0;
-
- src_image_obj.init(&image_create_info);
- ASSERT_TRUE(src_image_obj.initialized());
-
- image_create_info.usage = VK_IMAGE_USAGE_TRANSFER_DST_BIT;
- dst_image_obj.init(&image_create_info);
- ASSERT_TRUE(dst_image_obj.initialized());
-
- m_commandBuffer->begin();
- VkImageCopy copyRegion;
- copyRegion.srcSubresource.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
- copyRegion.srcSubresource.mipLevel = 0;
- copyRegion.srcSubresource.baseArrayLayer = 0;
- copyRegion.srcSubresource.layerCount = 1;
- copyRegion.srcOffset.x = 0;
- copyRegion.srcOffset.y = 0;
- copyRegion.srcOffset.z = 0;
- copyRegion.dstSubresource.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
- copyRegion.dstSubresource.mipLevel = 0;
- copyRegion.dstSubresource.baseArrayLayer = 0;
- // Introduce failure by forcing the dst layerCount to differ from src
- copyRegion.dstSubresource.layerCount = 3;
- copyRegion.dstOffset.x = 0;
- copyRegion.dstOffset.y = 0;
- copyRegion.dstOffset.z = 0;
- copyRegion.extent.width = 1;
- copyRegion.extent.height = 1;
- copyRegion.extent.depth = 1;
-
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkImageCopy-extent-00140");
- m_commandBuffer->CopyImage(src_image_obj.image(), VK_IMAGE_LAYOUT_GENERAL, dst_image_obj.image(), VK_IMAGE_LAYOUT_GENERAL, 1,
- &copyRegion);
- m_errorMonitor->VerifyFound();
-}
-
-TEST_F(VkLayerTest, CompressedImageMipCopyTests) {
- TEST_DESCRIPTION("Image/Buffer copies for higher mip levels");
-
- ASSERT_NO_FATAL_FAILURE(Init());
-
- VkPhysicalDeviceFeatures device_features = {};
- ASSERT_NO_FATAL_FAILURE(GetPhysicalDeviceFeatures(&device_features));
- VkFormat compressed_format = VK_FORMAT_UNDEFINED;
- if (device_features.textureCompressionBC) {
- compressed_format = VK_FORMAT_BC3_SRGB_BLOCK;
- } else if (device_features.textureCompressionETC2) {
- compressed_format = VK_FORMAT_ETC2_R8G8B8A8_UNORM_BLOCK;
- } else if (device_features.textureCompressionASTC_LDR) {
- compressed_format = VK_FORMAT_ASTC_4x4_UNORM_BLOCK;
- } else {
- printf("%s No compressed formats supported - CompressedImageMipCopyTests skipped.\n", kSkipPrefix);
- return;
- }
-
- VkImageCreateInfo ci;
- ci.sType = VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO;
- ci.pNext = NULL;
- ci.flags = 0;
- ci.imageType = VK_IMAGE_TYPE_2D;
- ci.format = compressed_format;
- ci.extent = {32, 32, 1};
- ci.mipLevels = 6;
- ci.arrayLayers = 1;
- ci.samples = VK_SAMPLE_COUNT_1_BIT;
- ci.tiling = VK_IMAGE_TILING_OPTIMAL;
- ci.usage = VK_IMAGE_USAGE_TRANSFER_SRC_BIT | VK_IMAGE_USAGE_TRANSFER_DST_BIT;
- ci.sharingMode = VK_SHARING_MODE_EXCLUSIVE;
- ci.queueFamilyIndexCount = 0;
- ci.pQueueFamilyIndices = NULL;
- ci.initialLayout = VK_IMAGE_LAYOUT_UNDEFINED;
-
- VkImageObj image(m_device);
- image.init(&ci);
- ASSERT_TRUE(image.initialized());
-
- VkImageObj odd_image(m_device);
- ci.extent = {31, 32, 1}; // Mips are [31,32] [15,16] [7,8] [3,4], [1,2] [1,1]
- odd_image.init(&ci);
- ASSERT_TRUE(odd_image.initialized());
-
- // Allocate buffers
- VkMemoryPropertyFlags reqs = 0;
- VkBufferObj buffer_1024, buffer_64, buffer_16, buffer_8;
- buffer_1024.init_as_src_and_dst(*m_device, 1024, reqs);
- buffer_64.init_as_src_and_dst(*m_device, 64, reqs);
- buffer_16.init_as_src_and_dst(*m_device, 16, reqs);
- buffer_8.init_as_src_and_dst(*m_device, 8, reqs);
-
- VkBufferImageCopy region = {};
- region.bufferRowLength = 0;
- region.bufferImageHeight = 0;
- region.imageSubresource.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
- region.imageSubresource.layerCount = 1;
- region.imageOffset = {0, 0, 0};
- region.bufferOffset = 0;
-
- // start recording
- m_commandBuffer->begin();
-
- // Mip level copies that work - 5 levels
- m_errorMonitor->ExpectSuccess();
-
- // Mip 0 should fit in 1k buffer - 1k texels @ 1b each
- region.imageExtent = {32, 32, 1};
- region.imageSubresource.mipLevel = 0;
- vkCmdCopyImageToBuffer(m_commandBuffer->handle(), image.handle(), VK_IMAGE_LAYOUT_GENERAL, buffer_1024.handle(), 1, &region);
- vkCmdCopyBufferToImage(m_commandBuffer->handle(), buffer_1024.handle(), image.handle(), VK_IMAGE_LAYOUT_GENERAL, 1, &region);
-
- // Mip 2 should fit in 64b buffer - 64 texels @ 1b each
- region.imageExtent = {8, 8, 1};
- region.imageSubresource.mipLevel = 2;
- vkCmdCopyImageToBuffer(m_commandBuffer->handle(), image.handle(), VK_IMAGE_LAYOUT_GENERAL, buffer_64.handle(), 1, &region);
- vkCmdCopyBufferToImage(m_commandBuffer->handle(), buffer_64.handle(), image.handle(), VK_IMAGE_LAYOUT_GENERAL, 1, &region);
-
- // Mip 3 should fit in 16b buffer - 16 texels @ 1b each
- region.imageExtent = {4, 4, 1};
- region.imageSubresource.mipLevel = 3;
- vkCmdCopyImageToBuffer(m_commandBuffer->handle(), image.handle(), VK_IMAGE_LAYOUT_GENERAL, buffer_16.handle(), 1, &region);
- vkCmdCopyBufferToImage(m_commandBuffer->handle(), buffer_16.handle(), image.handle(), VK_IMAGE_LAYOUT_GENERAL, 1, &region);
-
- // Mip 4&5 should fit in 16b buffer with no complaint - 4 & 1 texels @ 1b each
- region.imageExtent = {2, 2, 1};
- region.imageSubresource.mipLevel = 4;
- vkCmdCopyImageToBuffer(m_commandBuffer->handle(), image.handle(), VK_IMAGE_LAYOUT_GENERAL, buffer_16.handle(), 1, &region);
- vkCmdCopyBufferToImage(m_commandBuffer->handle(), buffer_16.handle(), image.handle(), VK_IMAGE_LAYOUT_GENERAL, 1, &region);
-
- region.imageExtent = {1, 1, 1};
- region.imageSubresource.mipLevel = 5;
- vkCmdCopyImageToBuffer(m_commandBuffer->handle(), image.handle(), VK_IMAGE_LAYOUT_GENERAL, buffer_16.handle(), 1, &region);
- vkCmdCopyBufferToImage(m_commandBuffer->handle(), buffer_16.handle(), image.handle(), VK_IMAGE_LAYOUT_GENERAL, 1, &region);
- m_errorMonitor->VerifyNotFound();
-
- // Buffer must accommodate a full compressed block, regardless of texel count
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdCopyImageToBuffer-pRegions-00183");
- vkCmdCopyImageToBuffer(m_commandBuffer->handle(), image.handle(), VK_IMAGE_LAYOUT_GENERAL, buffer_8.handle(), 1, &region);
- m_errorMonitor->VerifyFound();
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdCopyBufferToImage-pRegions-00171");
- vkCmdCopyBufferToImage(m_commandBuffer->handle(), buffer_8.handle(), image.handle(), VK_IMAGE_LAYOUT_GENERAL, 1, &region);
- m_errorMonitor->VerifyFound();
-
- // Copy width < compressed block size, but not the full mip width
- region.imageExtent = {1, 2, 1};
- region.imageSubresource.mipLevel = 4;
- m_errorMonitor->SetDesiredFailureMsg(
- VK_DEBUG_REPORT_ERROR_BIT_EXT,
- "VUID-VkBufferImageCopy-imageExtent-00207"); // width not a multiple of compressed block width
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
- "VUID-vkCmdCopyImageToBuffer-imageOffset-01794"); // image transfer granularity
- vkCmdCopyImageToBuffer(m_commandBuffer->handle(), image.handle(), VK_IMAGE_LAYOUT_GENERAL, buffer_16.handle(), 1, &region);
- m_errorMonitor->VerifyFound();
- m_errorMonitor->SetDesiredFailureMsg(
- VK_DEBUG_REPORT_ERROR_BIT_EXT,
- "VUID-VkBufferImageCopy-imageExtent-00207"); // width not a multiple of compressed block width
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
- "VUID-vkCmdCopyBufferToImage-imageOffset-01793"); // image transfer granularity
- vkCmdCopyBufferToImage(m_commandBuffer->handle(), buffer_16.handle(), image.handle(), VK_IMAGE_LAYOUT_GENERAL, 1, &region);
- m_errorMonitor->VerifyFound();
-
- // Copy height < compressed block size but not the full mip height
- region.imageExtent = {2, 1, 1};
- m_errorMonitor->SetDesiredFailureMsg(
- VK_DEBUG_REPORT_ERROR_BIT_EXT,
- "VUID-VkBufferImageCopy-imageExtent-00208"); // height not a multiple of compressed block width
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
- "VUID-vkCmdCopyImageToBuffer-imageOffset-01794"); // image transfer granularity
- vkCmdCopyImageToBuffer(m_commandBuffer->handle(), image.handle(), VK_IMAGE_LAYOUT_GENERAL, buffer_16.handle(), 1, &region);
- m_errorMonitor->VerifyFound();
- m_errorMonitor->SetDesiredFailureMsg(
- VK_DEBUG_REPORT_ERROR_BIT_EXT,
- "VUID-VkBufferImageCopy-imageExtent-00208"); // height not a multiple of compressed block width
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
- "VUID-vkCmdCopyBufferToImage-imageOffset-01793"); // image transfer granularity
- vkCmdCopyBufferToImage(m_commandBuffer->handle(), buffer_16.handle(), image.handle(), VK_IMAGE_LAYOUT_GENERAL, 1, &region);
- m_errorMonitor->VerifyFound();
-
- // Offsets must be multiple of compressed block size
- region.imageOffset = {1, 1, 0};
- region.imageExtent = {1, 1, 1};
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
- "VUID-VkBufferImageCopy-imageOffset-00205"); // imageOffset not a multiple of block size
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
- "VUID-vkCmdCopyImageToBuffer-imageOffset-01794"); // image transfer granularity
- vkCmdCopyImageToBuffer(m_commandBuffer->handle(), image.handle(), VK_IMAGE_LAYOUT_GENERAL, buffer_16.handle(), 1, &region);
- m_errorMonitor->VerifyFound();
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
- "VUID-VkBufferImageCopy-imageOffset-00205"); // imageOffset not a multiple of block size
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
- "VUID-vkCmdCopyBufferToImage-imageOffset-01793"); // image transfer granularity
- vkCmdCopyBufferToImage(m_commandBuffer->handle(), buffer_16.handle(), image.handle(), VK_IMAGE_LAYOUT_GENERAL, 1, &region);
- m_errorMonitor->VerifyFound();
-
- // Offset + extent width = mip width - should succeed
- region.imageOffset = {4, 4, 0};
- region.imageExtent = {3, 4, 1};
- region.imageSubresource.mipLevel = 2;
- m_errorMonitor->ExpectSuccess();
- vkCmdCopyImageToBuffer(m_commandBuffer->handle(), odd_image.handle(), VK_IMAGE_LAYOUT_GENERAL, buffer_16.handle(), 1, &region);
- vkCmdCopyBufferToImage(m_commandBuffer->handle(), buffer_16.handle(), odd_image.handle(), VK_IMAGE_LAYOUT_GENERAL, 1, &region);
- m_errorMonitor->VerifyNotFound();
-
- // Offset + extent width < mip width and not a multiple of block width - should fail
- region.imageExtent = {3, 3, 1};
- m_errorMonitor->SetDesiredFailureMsg(
- VK_DEBUG_REPORT_ERROR_BIT_EXT,
- "VUID-VkBufferImageCopy-imageExtent-00208"); // offset+extent not a multiple of block width
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
- "VUID-vkCmdCopyImageToBuffer-imageOffset-01794"); // image transfer granularity
- vkCmdCopyImageToBuffer(m_commandBuffer->handle(), odd_image.handle(), VK_IMAGE_LAYOUT_GENERAL, buffer_16.handle(), 1, &region);
- m_errorMonitor->VerifyFound();
- m_errorMonitor->SetDesiredFailureMsg(
- VK_DEBUG_REPORT_ERROR_BIT_EXT,
- "VUID-VkBufferImageCopy-imageExtent-00208"); // offset+extent not a multiple of block width
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
- "VUID-vkCmdCopyBufferToImage-imageOffset-01793"); // image transfer granularity
- vkCmdCopyBufferToImage(m_commandBuffer->handle(), buffer_16.handle(), odd_image.handle(), VK_IMAGE_LAYOUT_GENERAL, 1, &region);
- m_errorMonitor->VerifyFound();
-}
-
-TEST_F(VkLayerTest, ImageBufferCopyTests) {
- TEST_DESCRIPTION("Image to buffer and buffer to image tests");
- ASSERT_NO_FATAL_FAILURE(Init());
-
- // Bail if any dimension of transfer granularity is 0.
- auto index = m_device->graphics_queue_node_index_;
- auto queue_family_properties = m_device->phy().queue_properties();
- if ((queue_family_properties[index].minImageTransferGranularity.depth == 0) ||
- (queue_family_properties[index].minImageTransferGranularity.width == 0) ||
- (queue_family_properties[index].minImageTransferGranularity.height == 0)) {
- printf("%s Subresource copies are disallowed when xfer granularity (x|y|z) is 0. Skipped.\n", kSkipPrefix);
- return;
- }
-
- VkImageObj image_64k(m_device); // 128^2 texels, 64k
- VkImageObj image_16k(m_device); // 64^2 texels, 16k
- VkImageObj image_16k_depth(m_device); // 64^2 texels, depth, 16k
- VkImageObj ds_image_4D_1S(m_device); // 256^2 texels, 512kb (256k depth, 64k stencil, 192k pack)
- VkImageObj ds_image_3D_1S(m_device); // 256^2 texels, 256kb (192k depth, 64k stencil)
- VkImageObj ds_image_2D(m_device); // 256^2 texels, 128k (128k depth)
- VkImageObj ds_image_1S(m_device); // 256^2 texels, 64k (64k stencil)
-
- image_64k.Init(128, 128, 1, VK_FORMAT_R8G8B8A8_UINT,
- VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT | VK_IMAGE_USAGE_TRANSFER_SRC_BIT | VK_IMAGE_USAGE_TRANSFER_DST_BIT,
- VK_IMAGE_TILING_OPTIMAL, 0);
- image_16k.Init(64, 64, 1, VK_FORMAT_R8G8B8A8_UINT,
- VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT | VK_IMAGE_USAGE_TRANSFER_SRC_BIT | VK_IMAGE_USAGE_TRANSFER_DST_BIT,
- VK_IMAGE_TILING_OPTIMAL, 0);
- ASSERT_TRUE(image_64k.initialized());
- ASSERT_TRUE(image_16k.initialized());
-
- // Verify all needed Depth/Stencil formats are supported
- bool missing_ds_support = false;
- VkFormatProperties props = {0, 0, 0};
- vkGetPhysicalDeviceFormatProperties(m_device->phy().handle(), VK_FORMAT_D32_SFLOAT_S8_UINT, &props);
- missing_ds_support |= (props.bufferFeatures == 0 && props.linearTilingFeatures == 0 && props.optimalTilingFeatures == 0);
- missing_ds_support |= (props.optimalTilingFeatures & VK_FORMAT_FEATURE_TRANSFER_SRC_BIT) == 0;
- missing_ds_support |= (props.optimalTilingFeatures & VK_FORMAT_FEATURE_TRANSFER_DST_BIT) == 0;
- vkGetPhysicalDeviceFormatProperties(m_device->phy().handle(), VK_FORMAT_D24_UNORM_S8_UINT, &props);
- missing_ds_support |= (props.bufferFeatures == 0 && props.linearTilingFeatures == 0 && props.optimalTilingFeatures == 0);
- missing_ds_support |= (props.optimalTilingFeatures & VK_FORMAT_FEATURE_TRANSFER_SRC_BIT) == 0;
- missing_ds_support |= (props.optimalTilingFeatures & VK_FORMAT_FEATURE_TRANSFER_DST_BIT) == 0;
- vkGetPhysicalDeviceFormatProperties(m_device->phy().handle(), VK_FORMAT_D16_UNORM, &props);
- missing_ds_support |= (props.bufferFeatures == 0 && props.linearTilingFeatures == 0 && props.optimalTilingFeatures == 0);
- missing_ds_support |= (props.optimalTilingFeatures & VK_FORMAT_FEATURE_TRANSFER_SRC_BIT) == 0;
- missing_ds_support |= (props.optimalTilingFeatures & VK_FORMAT_FEATURE_TRANSFER_DST_BIT) == 0;
- vkGetPhysicalDeviceFormatProperties(m_device->phy().handle(), VK_FORMAT_S8_UINT, &props);
- missing_ds_support |= (props.bufferFeatures == 0 && props.linearTilingFeatures == 0 && props.optimalTilingFeatures == 0);
- missing_ds_support |= (props.optimalTilingFeatures & VK_FORMAT_FEATURE_TRANSFER_SRC_BIT) == 0;
- missing_ds_support |= (props.optimalTilingFeatures & VK_FORMAT_FEATURE_TRANSFER_DST_BIT) == 0;
-
- if (!missing_ds_support) {
- image_16k_depth.Init(64, 64, 1, VK_FORMAT_D24_UNORM_S8_UINT,
- VK_IMAGE_USAGE_TRANSFER_SRC_BIT | VK_IMAGE_USAGE_TRANSFER_DST_BIT, VK_IMAGE_TILING_OPTIMAL, 0);
- ASSERT_TRUE(image_16k_depth.initialized());
-
- ds_image_4D_1S.Init(
- 256, 256, 1, VK_FORMAT_D32_SFLOAT_S8_UINT,
- VK_IMAGE_USAGE_TRANSFER_SRC_BIT | VK_IMAGE_USAGE_TRANSFER_DST_BIT | VK_IMAGE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT,
- VK_IMAGE_TILING_OPTIMAL, 0);
- ASSERT_TRUE(ds_image_4D_1S.initialized());
-
- ds_image_3D_1S.Init(
- 256, 256, 1, VK_FORMAT_D24_UNORM_S8_UINT,
- VK_IMAGE_USAGE_TRANSFER_SRC_BIT | VK_IMAGE_USAGE_TRANSFER_DST_BIT | VK_IMAGE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT,
- VK_IMAGE_TILING_OPTIMAL, 0);
- ASSERT_TRUE(ds_image_3D_1S.initialized());
-
- ds_image_2D.Init(
- 256, 256, 1, VK_FORMAT_D16_UNORM,
- VK_IMAGE_USAGE_TRANSFER_SRC_BIT | VK_IMAGE_USAGE_TRANSFER_DST_BIT | VK_IMAGE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT,
- VK_IMAGE_TILING_OPTIMAL, 0);
- ASSERT_TRUE(ds_image_2D.initialized());
-
- ds_image_1S.Init(
- 256, 256, 1, VK_FORMAT_S8_UINT,
- VK_IMAGE_USAGE_TRANSFER_SRC_BIT | VK_IMAGE_USAGE_TRANSFER_DST_BIT | VK_IMAGE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT,
- VK_IMAGE_TILING_OPTIMAL, 0);
- ASSERT_TRUE(ds_image_1S.initialized());
- }
-
- // Allocate buffers
- VkBufferObj buffer_256k, buffer_128k, buffer_64k, buffer_16k;
- VkMemoryPropertyFlags reqs = 0;
- buffer_256k.init_as_src_and_dst(*m_device, 262144, reqs); // 256k
- buffer_128k.init_as_src_and_dst(*m_device, 131072, reqs); // 128k
- buffer_64k.init_as_src_and_dst(*m_device, 65536, reqs); // 64k
- buffer_16k.init_as_src_and_dst(*m_device, 16384, reqs); // 16k
-
- VkBufferImageCopy region = {};
- region.bufferRowLength = 0;
- region.bufferImageHeight = 0;
- region.imageSubresource.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
- region.imageSubresource.layerCount = 1;
- region.imageOffset = {0, 0, 0};
- region.imageExtent = {64, 64, 1};
- region.bufferOffset = 0;
-
- // attempt copies before putting command buffer in recording state
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdCopyBufferToImage-commandBuffer-recording");
- vkCmdCopyBufferToImage(m_commandBuffer->handle(), buffer_64k.handle(), image_64k.handle(), VK_IMAGE_LAYOUT_GENERAL, 1, &region);
- m_errorMonitor->VerifyFound();
-
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdCopyImageToBuffer-commandBuffer-recording");
- vkCmdCopyImageToBuffer(m_commandBuffer->handle(), image_64k.handle(), VK_IMAGE_LAYOUT_GENERAL, buffer_64k.handle(), 1, &region);
- m_errorMonitor->VerifyFound();
-
- // start recording
- m_commandBuffer->begin();
-
- // successful copies
- m_errorMonitor->ExpectSuccess();
- vkCmdCopyImageToBuffer(m_commandBuffer->handle(), image_16k.handle(), VK_IMAGE_LAYOUT_GENERAL, buffer_16k.handle(), 1, &region);
- vkCmdCopyBufferToImage(m_commandBuffer->handle(), buffer_16k.handle(), image_16k.handle(), VK_IMAGE_LAYOUT_GENERAL, 1, &region);
- region.imageOffset.x = 16; // 16k copy, offset requires larger image
- vkCmdCopyImageToBuffer(m_commandBuffer->handle(), image_64k.handle(), VK_IMAGE_LAYOUT_GENERAL, buffer_16k.handle(), 1, &region);
- region.imageExtent.height = 78; // > 16k copy requires larger buffer & image
- vkCmdCopyBufferToImage(m_commandBuffer->handle(), buffer_64k.handle(), image_64k.handle(), VK_IMAGE_LAYOUT_GENERAL, 1, &region);
- region.imageOffset.x = 0;
- region.imageExtent.height = 64;
- region.bufferOffset = 256; // 16k copy with buffer offset, requires larger buffer
- vkCmdCopyImageToBuffer(m_commandBuffer->handle(), image_16k.handle(), VK_IMAGE_LAYOUT_GENERAL, buffer_64k.handle(), 1, &region);
- m_errorMonitor->VerifyNotFound();
-
- // image/buffer too small (extent too large) on copy to image
- region.imageExtent = {65, 64, 1};
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
- "VUID-vkCmdCopyBufferToImage-pRegions-00171"); // buffer too small
- vkCmdCopyBufferToImage(m_commandBuffer->handle(), buffer_16k.handle(), image_64k.handle(), VK_IMAGE_LAYOUT_GENERAL, 1, &region);
- m_errorMonitor->VerifyFound();
-
- m_errorMonitor->SetUnexpectedError("VUID-VkBufferImageCopy-imageOffset-00197");
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
- "VUID-vkCmdCopyBufferToImage-pRegions-00172"); // image too small
- vkCmdCopyBufferToImage(m_commandBuffer->handle(), buffer_64k.handle(), image_16k.handle(), VK_IMAGE_LAYOUT_GENERAL, 1, &region);
- m_errorMonitor->VerifyFound();
-
- // image/buffer too small (offset) on copy to image
- region.imageExtent = {64, 64, 1};
- region.imageOffset = {0, 4, 0};
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
- "VUID-vkCmdCopyBufferToImage-pRegions-00171"); // buffer too small
- vkCmdCopyBufferToImage(m_commandBuffer->handle(), buffer_16k.handle(), image_64k.handle(), VK_IMAGE_LAYOUT_GENERAL, 1, &region);
- m_errorMonitor->VerifyFound();
-
- m_errorMonitor->SetUnexpectedError("VUID-VkBufferImageCopy-imageOffset-00197");
- m_errorMonitor->SetUnexpectedError("VUID-VkBufferImageCopy-imageOffset-00198");
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
- "VUID-vkCmdCopyBufferToImage-pRegions-00172"); // image too small
- vkCmdCopyBufferToImage(m_commandBuffer->handle(), buffer_64k.handle(), image_16k.handle(), VK_IMAGE_LAYOUT_GENERAL, 1, &region);
- m_errorMonitor->VerifyFound();
-
- // image/buffer too small on copy to buffer
- region.imageExtent = {64, 64, 1};
- region.imageOffset = {0, 0, 0};
- region.bufferOffset = 4;
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
- "VUID-vkCmdCopyImageToBuffer-pRegions-00183"); // buffer too small
- vkCmdCopyImageToBuffer(m_commandBuffer->handle(), image_64k.handle(), VK_IMAGE_LAYOUT_GENERAL, buffer_16k.handle(), 1, &region);
- m_errorMonitor->VerifyFound();
-
- region.imageExtent = {64, 65, 1};
- region.bufferOffset = 0;
- m_errorMonitor->SetUnexpectedError("VUID-VkBufferImageCopy-imageOffset-00198");
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
- "VUID-vkCmdCopyImageToBuffer-pRegions-00182"); // image too small
- vkCmdCopyImageToBuffer(m_commandBuffer->handle(), image_16k.handle(), VK_IMAGE_LAYOUT_GENERAL, buffer_64k.handle(), 1, &region);
- m_errorMonitor->VerifyFound();
-
- // buffer size OK but rowlength causes loose packing
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdCopyImageToBuffer-pRegions-00183");
- region.imageExtent = {64, 64, 1};
- region.bufferRowLength = 68;
- vkCmdCopyImageToBuffer(m_commandBuffer->handle(), image_16k.handle(), VK_IMAGE_LAYOUT_GENERAL, buffer_16k.handle(), 1, &region);
- m_errorMonitor->VerifyFound();
-
- // An extent with zero area should produce a warning, but no error
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_WARNING_BIT_EXT | VK_DEBUG_REPORT_ERROR_BIT_EXT, "} has zero area");
- region.imageExtent.width = 0;
- vkCmdCopyImageToBuffer(m_commandBuffer->handle(), image_16k.handle(), VK_IMAGE_LAYOUT_GENERAL, buffer_16k.handle(), 1, &region);
- m_errorMonitor->VerifyFound();
-
- // aspect bits
- region.imageExtent = {64, 64, 1};
- region.bufferRowLength = 0;
- region.bufferImageHeight = 0;
- if (!missing_ds_support) {
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
- "VUID-VkBufferImageCopy-aspectMask-00212"); // more than 1 aspect bit set
- region.imageSubresource.aspectMask = VK_IMAGE_ASPECT_DEPTH_BIT | VK_IMAGE_ASPECT_STENCIL_BIT;
- vkCmdCopyImageToBuffer(m_commandBuffer->handle(), image_16k_depth.handle(), VK_IMAGE_LAYOUT_GENERAL, buffer_16k.handle(), 1,
- &region);
- m_errorMonitor->VerifyFound();
-
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
- "VUID-VkBufferImageCopy-aspectMask-00211"); // different mis-matched aspect
- region.imageSubresource.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
- vkCmdCopyImageToBuffer(m_commandBuffer->handle(), image_16k_depth.handle(), VK_IMAGE_LAYOUT_GENERAL, buffer_16k.handle(), 1,
- &region);
- m_errorMonitor->VerifyFound();
- }
-
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
- "VUID-VkBufferImageCopy-aspectMask-00211"); // mis-matched aspect
- region.imageSubresource.aspectMask = VK_IMAGE_ASPECT_DEPTH_BIT;
- vkCmdCopyImageToBuffer(m_commandBuffer->handle(), image_16k.handle(), VK_IMAGE_LAYOUT_GENERAL, buffer_16k.handle(), 1, &region);
- m_errorMonitor->VerifyFound();
- region.imageSubresource.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
-
- // Out-of-range mip levels should fail
- region.imageSubresource.mipLevel = image_16k.create_info().mipLevels + 1;
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdCopyImageToBuffer-imageSubresource-01703");
- m_errorMonitor->SetUnexpectedError("VUID-VkBufferImageCopy-imageOffset-00197");
- m_errorMonitor->SetUnexpectedError("VUID-VkBufferImageCopy-imageOffset-00198");
- m_errorMonitor->SetUnexpectedError("VUID-VkBufferImageCopy-imageOffset-00200");
- m_errorMonitor->SetDesiredFailureMsg(
- VK_DEBUG_REPORT_ERROR_BIT_EXT,
- "VUID-vkCmdCopyImageToBuffer-pRegions-00182"); // unavoidable "region exceeds image bounds" for non-existent mip
- vkCmdCopyImageToBuffer(m_commandBuffer->handle(), image_16k.handle(), VK_IMAGE_LAYOUT_GENERAL, buffer_16k.handle(), 1, &region);
- m_errorMonitor->VerifyFound();
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdCopyBufferToImage-imageSubresource-01701");
- m_errorMonitor->SetUnexpectedError("VUID-VkBufferImageCopy-imageOffset-00197");
- m_errorMonitor->SetUnexpectedError("VUID-VkBufferImageCopy-imageOffset-00198");
- m_errorMonitor->SetUnexpectedError("VUID-VkBufferImageCopy-imageOffset-00200");
- m_errorMonitor->SetDesiredFailureMsg(
- VK_DEBUG_REPORT_ERROR_BIT_EXT,
- "VUID-vkCmdCopyBufferToImage-pRegions-00172"); // unavoidable "region exceeds image bounds" for non-existent mip
- vkCmdCopyBufferToImage(m_commandBuffer->handle(), buffer_16k.handle(), image_16k.handle(), VK_IMAGE_LAYOUT_GENERAL, 1, &region);
- m_errorMonitor->VerifyFound();
- region.imageSubresource.mipLevel = 0;
-
- // Out-of-range array layers should fail
- region.imageSubresource.baseArrayLayer = image_16k.create_info().arrayLayers;
- region.imageSubresource.layerCount = 1;
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdCopyImageToBuffer-imageSubresource-01704");
- vkCmdCopyImageToBuffer(m_commandBuffer->handle(), image_16k.handle(), VK_IMAGE_LAYOUT_GENERAL, buffer_16k.handle(), 1, &region);
- m_errorMonitor->VerifyFound();
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdCopyBufferToImage-imageSubresource-01702");
- vkCmdCopyBufferToImage(m_commandBuffer->handle(), buffer_16k.handle(), image_16k.handle(), VK_IMAGE_LAYOUT_GENERAL, 1, &region);
- m_errorMonitor->VerifyFound();
- region.imageSubresource.baseArrayLayer = 0;
-
- // Layout mismatch should fail
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdCopyImageToBuffer-srcImageLayout-00189");
- vkCmdCopyImageToBuffer(m_commandBuffer->handle(), image_16k.handle(), VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL, buffer_16k.handle(),
- 1, &region);
- m_errorMonitor->VerifyFound();
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdCopyBufferToImage-dstImageLayout-00180");
- vkCmdCopyBufferToImage(m_commandBuffer->handle(), buffer_16k.handle(), image_16k.handle(), VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL,
- 1, &region);
- m_errorMonitor->VerifyFound();
-
- // Test Depth/Stencil copies
- if (missing_ds_support) {
- printf("%s Depth / Stencil formats unsupported - skipping D/S tests.\n", kSkipPrefix);
- } else {
- VkBufferImageCopy ds_region = {};
- ds_region.bufferOffset = 0;
- ds_region.bufferRowLength = 0;
- ds_region.bufferImageHeight = 0;
- ds_region.imageSubresource.aspectMask = VK_IMAGE_ASPECT_DEPTH_BIT;
- ds_region.imageSubresource.mipLevel = 0;
- ds_region.imageSubresource.baseArrayLayer = 0;
- ds_region.imageSubresource.layerCount = 1;
- ds_region.imageOffset = {0, 0, 0};
- ds_region.imageExtent = {256, 256, 1};
-
- // Depth copies that should succeed
- m_errorMonitor->ExpectSuccess(); // Extract 4b depth per texel, pack into 256k buffer
- vkCmdCopyImageToBuffer(m_commandBuffer->handle(), ds_image_4D_1S.handle(), VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL,
- buffer_256k.handle(), 1, &ds_region);
- m_errorMonitor->VerifyNotFound();
-
- m_errorMonitor->ExpectSuccess(); // Extract 3b depth per texel, pack (loose) into 256k buffer
- vkCmdCopyImageToBuffer(m_commandBuffer->handle(), ds_image_3D_1S.handle(), VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL,
- buffer_256k.handle(), 1, &ds_region);
- m_errorMonitor->VerifyNotFound();
-
- m_errorMonitor->ExpectSuccess(); // Copy 2b depth per texel, into 128k buffer
- vkCmdCopyImageToBuffer(m_commandBuffer->handle(), ds_image_2D.handle(), VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL,
- buffer_128k.handle(), 1, &ds_region);
- m_errorMonitor->VerifyNotFound();
-
- // Depth copies that should fail
- ds_region.bufferOffset = 4;
- m_errorMonitor->SetDesiredFailureMsg(
- VK_DEBUG_REPORT_ERROR_BIT_EXT,
- "VUID-vkCmdCopyImageToBuffer-pRegions-00183"); // Extract 4b depth per texel, pack into 256k buffer
- vkCmdCopyImageToBuffer(m_commandBuffer->handle(), ds_image_4D_1S.handle(), VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL,
- buffer_256k.handle(), 1, &ds_region);
- m_errorMonitor->VerifyFound();
-
- m_errorMonitor->SetDesiredFailureMsg(
- VK_DEBUG_REPORT_ERROR_BIT_EXT,
- "VUID-vkCmdCopyImageToBuffer-pRegions-00183"); // Extract 3b depth per texel, pack (loose) into 256k buffer
- vkCmdCopyImageToBuffer(m_commandBuffer->handle(), ds_image_3D_1S.handle(), VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL,
- buffer_256k.handle(), 1, &ds_region);
- m_errorMonitor->VerifyFound();
-
- m_errorMonitor->SetDesiredFailureMsg(
- VK_DEBUG_REPORT_ERROR_BIT_EXT,
- "VUID-vkCmdCopyImageToBuffer-pRegions-00183"); // Copy 2b depth per texel, into 128k buffer
- vkCmdCopyImageToBuffer(m_commandBuffer->handle(), ds_image_2D.handle(), VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL,
- buffer_128k.handle(), 1, &ds_region);
- m_errorMonitor->VerifyFound();
-
- // Stencil copies that should succeed
- ds_region.bufferOffset = 0;
- ds_region.imageSubresource.aspectMask = VK_IMAGE_ASPECT_STENCIL_BIT;
- m_errorMonitor->ExpectSuccess(); // Extract 1b stencil per texel, pack into 64k buffer
- vkCmdCopyImageToBuffer(m_commandBuffer->handle(), ds_image_4D_1S.handle(), VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL,
- buffer_64k.handle(), 1, &ds_region);
- m_errorMonitor->VerifyNotFound();
-
- m_errorMonitor->ExpectSuccess(); // Extract 1b stencil per texel, pack into 64k buffer
- vkCmdCopyImageToBuffer(m_commandBuffer->handle(), ds_image_3D_1S.handle(), VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL,
- buffer_64k.handle(), 1, &ds_region);
- m_errorMonitor->VerifyNotFound();
-
- m_errorMonitor->ExpectSuccess(); // Copy 1b depth per texel, into 64k buffer
- vkCmdCopyImageToBuffer(m_commandBuffer->handle(), ds_image_1S.handle(), VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL,
- buffer_64k.handle(), 1, &ds_region);
- m_errorMonitor->VerifyNotFound();
-
- // Stencil copies that should fail
- m_errorMonitor->SetDesiredFailureMsg(
- VK_DEBUG_REPORT_ERROR_BIT_EXT,
- "VUID-vkCmdCopyImageToBuffer-pRegions-00183"); // Extract 1b stencil per texel, pack into 64k buffer
- vkCmdCopyImageToBuffer(m_commandBuffer->handle(), ds_image_4D_1S.handle(), VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL,
- buffer_16k.handle(), 1, &ds_region);
- m_errorMonitor->VerifyFound();
-
- m_errorMonitor->SetDesiredFailureMsg(
- VK_DEBUG_REPORT_ERROR_BIT_EXT,
- "VUID-vkCmdCopyImageToBuffer-pRegions-00183"); // Extract 1b stencil per texel, pack into 64k buffer
- ds_region.bufferRowLength = 260;
- vkCmdCopyImageToBuffer(m_commandBuffer->handle(), ds_image_3D_1S.handle(), VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL,
- buffer_64k.handle(), 1, &ds_region);
- m_errorMonitor->VerifyFound();
-
- ds_region.bufferRowLength = 0;
- ds_region.bufferOffset = 4;
- m_errorMonitor->SetDesiredFailureMsg(
- VK_DEBUG_REPORT_ERROR_BIT_EXT,
- "VUID-vkCmdCopyImageToBuffer-pRegions-00183"); // Copy 1b depth per texel, into 64k buffer
- vkCmdCopyImageToBuffer(m_commandBuffer->handle(), ds_image_1S.handle(), VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL,
- buffer_64k.handle(), 1, &ds_region);
- m_errorMonitor->VerifyFound();
- }
-
- // Test compressed formats, if supported
- VkPhysicalDeviceFeatures device_features = {};
- ASSERT_NO_FATAL_FAILURE(GetPhysicalDeviceFeatures(&device_features));
- if (!(device_features.textureCompressionBC || device_features.textureCompressionETC2 ||
- device_features.textureCompressionASTC_LDR)) {
- printf("%s No compressed formats supported - block compression tests skipped.\n", kSkipPrefix);
- } else {
- VkImageObj image_16k_4x4comp(m_device); // 128^2 texels as 32^2 compressed (4x4) blocks, 16k
- VkImageObj image_NPOT_4x4comp(m_device); // 130^2 texels as 33^2 compressed (4x4) blocks
- if (device_features.textureCompressionBC) {
- image_16k_4x4comp.Init(128, 128, 1, VK_FORMAT_BC3_SRGB_BLOCK, VK_IMAGE_USAGE_TRANSFER_SRC_BIT, VK_IMAGE_TILING_OPTIMAL,
- 0);
- image_NPOT_4x4comp.Init(130, 130, 1, VK_FORMAT_BC3_SRGB_BLOCK, VK_IMAGE_USAGE_TRANSFER_SRC_BIT, VK_IMAGE_TILING_OPTIMAL,
- 0);
- } else if (device_features.textureCompressionETC2) {
- image_16k_4x4comp.Init(128, 128, 1, VK_FORMAT_ETC2_R8G8B8A8_UNORM_BLOCK, VK_IMAGE_USAGE_TRANSFER_SRC_BIT,
- VK_IMAGE_TILING_OPTIMAL, 0);
- image_NPOT_4x4comp.Init(130, 130, 1, VK_FORMAT_ETC2_R8G8B8A8_UNORM_BLOCK, VK_IMAGE_USAGE_TRANSFER_SRC_BIT,
- VK_IMAGE_TILING_OPTIMAL, 0);
- } else {
- image_16k_4x4comp.Init(128, 128, 1, VK_FORMAT_ASTC_4x4_UNORM_BLOCK, VK_IMAGE_USAGE_TRANSFER_SRC_BIT,
- VK_IMAGE_TILING_OPTIMAL, 0);
- image_NPOT_4x4comp.Init(130, 130, 1, VK_FORMAT_ASTC_4x4_UNORM_BLOCK, VK_IMAGE_USAGE_TRANSFER_SRC_BIT,
- VK_IMAGE_TILING_OPTIMAL, 0);
- }
- ASSERT_TRUE(image_16k_4x4comp.initialized());
-
- // Just fits
- m_errorMonitor->ExpectSuccess();
- region.imageExtent = {128, 128, 1};
- vkCmdCopyImageToBuffer(m_commandBuffer->handle(), image_16k_4x4comp.handle(), VK_IMAGE_LAYOUT_GENERAL, buffer_16k.handle(),
- 1, &region);
- m_errorMonitor->VerifyNotFound();
-
- // with offset, too big for buffer
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdCopyImageToBuffer-pRegions-00183");
- region.bufferOffset = 16;
- vkCmdCopyImageToBuffer(m_commandBuffer->handle(), image_16k_4x4comp.handle(), VK_IMAGE_LAYOUT_GENERAL, buffer_16k.handle(),
- 1, &region);
- m_errorMonitor->VerifyFound();
- region.bufferOffset = 0;
-
- // extents that are not a multiple of compressed block size
- m_errorMonitor->SetDesiredFailureMsg(
- VK_DEBUG_REPORT_ERROR_BIT_EXT,
- "VUID-VkBufferImageCopy-imageExtent-00207"); // extent width not a multiple of block size
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
- "VUID-vkCmdCopyImageToBuffer-imageOffset-01794"); // image transfer granularity
- region.imageExtent.width = 66;
- vkCmdCopyImageToBuffer(m_commandBuffer->handle(), image_NPOT_4x4comp.handle(), VK_IMAGE_LAYOUT_GENERAL, buffer_16k.handle(),
- 1, &region);
- m_errorMonitor->VerifyFound();
- region.imageExtent.width = 128;
-
- m_errorMonitor->SetDesiredFailureMsg(
- VK_DEBUG_REPORT_ERROR_BIT_EXT,
- "VUID-VkBufferImageCopy-imageExtent-00208"); // extent height not a multiple of block size
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
- "VUID-vkCmdCopyImageToBuffer-imageOffset-01794"); // image transfer granularity
- region.imageExtent.height = 2;
- vkCmdCopyImageToBuffer(m_commandBuffer->handle(), image_NPOT_4x4comp.handle(), VK_IMAGE_LAYOUT_GENERAL, buffer_16k.handle(),
- 1, &region);
- m_errorMonitor->VerifyFound();
- region.imageExtent.height = 128;
-
- // TODO: All available compressed formats are 2D, with block depth of 1. Unable to provoke VU_01277.
-
- // non-multiple extents are allowed if at the far edge of a non-block-multiple image - these should pass
- m_errorMonitor->ExpectSuccess();
- region.imageExtent.width = 66;
- region.imageOffset.x = 64;
- vkCmdCopyImageToBuffer(m_commandBuffer->handle(), image_NPOT_4x4comp.handle(), VK_IMAGE_LAYOUT_GENERAL, buffer_16k.handle(),
- 1, &region);
- region.imageExtent.width = 16;
- region.imageOffset.x = 0;
- region.imageExtent.height = 2;
- region.imageOffset.y = 128;
- vkCmdCopyImageToBuffer(m_commandBuffer->handle(), image_NPOT_4x4comp.handle(), VK_IMAGE_LAYOUT_GENERAL, buffer_16k.handle(),
- 1, &region);
- m_errorMonitor->VerifyNotFound();
- region.imageOffset = {0, 0, 0};
-
- // buffer offset must be a multiple of texel block size (16)
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkBufferImageCopy-bufferOffset-00206");
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkBufferImageCopy-bufferOffset-00193");
- region.imageExtent = {64, 64, 1};
- region.bufferOffset = 24;
- vkCmdCopyImageToBuffer(m_commandBuffer->handle(), image_16k_4x4comp.handle(), VK_IMAGE_LAYOUT_GENERAL, buffer_16k.handle(),
- 1, &region);
- m_errorMonitor->VerifyFound();
-
- // rowlength not a multiple of block width (4)
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkBufferImageCopy-bufferRowLength-00203");
- region.bufferOffset = 0;
- region.bufferRowLength = 130;
- region.bufferImageHeight = 0;
- vkCmdCopyImageToBuffer(m_commandBuffer->handle(), image_16k_4x4comp.handle(), VK_IMAGE_LAYOUT_GENERAL, buffer_64k.handle(),
- 1, &region);
- m_errorMonitor->VerifyFound();
-
- // imageheight not a multiple of block height (4)
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkBufferImageCopy-bufferImageHeight-00204");
- region.bufferRowLength = 0;
- region.bufferImageHeight = 130;
- vkCmdCopyImageToBuffer(m_commandBuffer->handle(), image_16k_4x4comp.handle(), VK_IMAGE_LAYOUT_GENERAL, buffer_64k.handle(),
- 1, &region);
- m_errorMonitor->VerifyFound();
- }
-}
-
-TEST_F(VkLayerTest, MiscImageLayerTests) {
- TEST_DESCRIPTION("Image-related tests that don't belong elsewhere");
-
- ASSERT_NO_FATAL_FAILURE(Init());
-
- // TODO: Ideally we should check if a format is supported, before using it.
- VkImageObj image(m_device);
- image.Init(128, 128, 1, VK_FORMAT_R16G16B16A16_UINT, VK_IMAGE_USAGE_TRANSFER_DST_BIT, VK_IMAGE_TILING_OPTIMAL, 0); // 64bpp
- ASSERT_TRUE(image.initialized());
- VkBufferObj buffer;
- VkMemoryPropertyFlags reqs = 0;
- buffer.init_as_src(*m_device, 128 * 128 * 8, reqs);
- VkBufferImageCopy region = {};
- region.bufferRowLength = 128;
- region.bufferImageHeight = 128;
- region.imageSubresource.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
- // layerCount can't be 0 - Expect MISMATCHED_IMAGE_ASPECT
- region.imageSubresource.layerCount = 1;
- region.imageExtent.height = 4;
- region.imageExtent.width = 4;
- region.imageExtent.depth = 1;
-
- VkImageObj image2(m_device);
- image2.Init(128, 128, 1, VK_FORMAT_R8G8_UNORM, VK_IMAGE_USAGE_TRANSFER_DST_BIT, VK_IMAGE_TILING_OPTIMAL, 0); // 16bpp
- ASSERT_TRUE(image2.initialized());
- VkBufferObj buffer2;
- VkMemoryPropertyFlags reqs2 = 0;
- buffer2.init_as_src(*m_device, 128 * 128 * 2, reqs2);
- VkBufferImageCopy region2 = {};
- region2.bufferRowLength = 128;
- region2.bufferImageHeight = 128;
- region2.imageSubresource.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
- // layerCount can't be 0 - Expect MISMATCHED_IMAGE_ASPECT
- region2.imageSubresource.layerCount = 1;
- region2.imageExtent.height = 4;
- region2.imageExtent.width = 4;
- region2.imageExtent.depth = 1;
- m_commandBuffer->begin();
-
- // Image must have offset.z of 0 and extent.depth of 1
- // Introduce failure by setting imageExtent.depth to 0
- region.imageExtent.depth = 0;
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkBufferImageCopy-srcImage-00201");
- vkCmdCopyBufferToImage(m_commandBuffer->handle(), buffer.handle(), image.handle(), VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, 1,
- &region);
- m_errorMonitor->VerifyFound();
-
- region.imageExtent.depth = 1;
-
- // Image must have offset.z of 0 and extent.depth of 1
- // Introduce failure by setting imageOffset.z to 4
- // Note: Also (unavoidably) triggers 'region exceeds image' #1228
- region.imageOffset.z = 4;
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkBufferImageCopy-srcImage-00201");
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkBufferImageCopy-imageOffset-00200");
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdCopyBufferToImage-pRegions-00172");
- vkCmdCopyBufferToImage(m_commandBuffer->handle(), buffer.handle(), image.handle(), VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, 1,
- &region);
- m_errorMonitor->VerifyFound();
-
- region.imageOffset.z = 0;
- // BufferOffset must be a multiple of the calling command's VkImage parameter's texel size
- // Introduce failure by setting bufferOffset to 1 and 1/2 texels
- region.bufferOffset = 4;
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkBufferImageCopy-bufferOffset-00193");
- vkCmdCopyBufferToImage(m_commandBuffer->handle(), buffer.handle(), image.handle(), VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, 1,
- &region);
- m_errorMonitor->VerifyFound();
-
- // BufferOffset must be a multiple of 4
- // Introduce failure by setting bufferOffset to a value not divisible by 4
- region2.bufferOffset = 6;
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkBufferImageCopy-bufferOffset-00194");
- vkCmdCopyBufferToImage(m_commandBuffer->handle(), buffer2.handle(), image2.handle(), VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, 1,
- &region2);
- m_errorMonitor->VerifyFound();
-
- // BufferRowLength must be 0, or greater than or equal to the width member of imageExtent
- region.bufferOffset = 0;
- region.imageExtent.height = 128;
- region.imageExtent.width = 128;
- // Introduce failure by setting bufferRowLength > 0 but less than width
- region.bufferRowLength = 64;
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkBufferImageCopy-bufferRowLength-00195");
- vkCmdCopyBufferToImage(m_commandBuffer->handle(), buffer.handle(), image.handle(), VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, 1,
- &region);
- m_errorMonitor->VerifyFound();
-
- // BufferImageHeight must be 0, or greater than or equal to the height member of imageExtent
- region.bufferRowLength = 128;
- // Introduce failure by setting bufferRowHeight > 0 but less than height
- region.bufferImageHeight = 64;
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkBufferImageCopy-bufferImageHeight-00196");
- vkCmdCopyBufferToImage(m_commandBuffer->handle(), buffer.handle(), image.handle(), VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, 1,
- &region);
- m_errorMonitor->VerifyFound();
-
- region.bufferImageHeight = 128;
- VkImageObj intImage1(m_device);
- intImage1.Init(128, 128, 1, VK_FORMAT_R8_UNORM, VK_IMAGE_USAGE_TRANSFER_SRC_BIT, VK_IMAGE_TILING_OPTIMAL, 0);
- intImage1.SetLayout(VK_IMAGE_ASPECT_COLOR_BIT, VK_IMAGE_LAYOUT_GENERAL);
- VkImageObj intImage2(m_device);
- intImage2.Init(128, 128, 1, VK_FORMAT_R8_UNORM, VK_IMAGE_USAGE_TRANSFER_DST_BIT, VK_IMAGE_TILING_OPTIMAL, 0);
- intImage2.SetLayout(VK_IMAGE_ASPECT_COLOR_BIT, VK_IMAGE_LAYOUT_GENERAL);
- VkImageBlit blitRegion = {};
- blitRegion.srcSubresource.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
- blitRegion.srcSubresource.baseArrayLayer = 0;
- blitRegion.srcSubresource.layerCount = 1;
- blitRegion.srcSubresource.mipLevel = 0;
- blitRegion.dstSubresource.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
- blitRegion.dstSubresource.baseArrayLayer = 0;
- blitRegion.dstSubresource.layerCount = 1;
- blitRegion.dstSubresource.mipLevel = 0;
- blitRegion.srcOffsets[0] = {128, 0, 0};
- blitRegion.srcOffsets[1] = {128, 128, 1};
- blitRegion.dstOffsets[0] = {0, 128, 0};
- blitRegion.dstOffsets[1] = {128, 128, 1};
-
- // Look for NULL-blit warning
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_WARNING_BIT_EXT,
- "vkCmdBlitImage(): pRegions[0].srcOffsets specify a zero-volume area.");
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_WARNING_BIT_EXT,
- "vkCmdBlitImage(): pRegions[0].dstOffsets specify a zero-volume area.");
- vkCmdBlitImage(m_commandBuffer->handle(), intImage1.handle(), intImage1.Layout(), intImage2.handle(), intImage2.Layout(), 1,
- &blitRegion, VK_FILTER_LINEAR);
- m_errorMonitor->VerifyFound();
-}
-
-TEST_F(VkLayerTest, CopyImageTypeExtentMismatch) {
- // Image copy tests where format type and extents don't match
- ASSERT_NO_FATAL_FAILURE(Init());
-
- VkImageCreateInfo ci;
- ci.sType = VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO;
- ci.pNext = NULL;
- ci.flags = 0;
- ci.imageType = VK_IMAGE_TYPE_1D;
- ci.format = VK_FORMAT_R8G8B8A8_UNORM;
- ci.extent = {32, 1, 1};
- ci.mipLevels = 1;
- ci.arrayLayers = 1;
- ci.samples = VK_SAMPLE_COUNT_1_BIT;
- ci.tiling = VK_IMAGE_TILING_OPTIMAL;
- ci.usage = VK_IMAGE_USAGE_TRANSFER_SRC_BIT | VK_IMAGE_USAGE_TRANSFER_DST_BIT;
- ci.sharingMode = VK_SHARING_MODE_EXCLUSIVE;
- ci.queueFamilyIndexCount = 0;
- ci.pQueueFamilyIndices = NULL;
- ci.initialLayout = VK_IMAGE_LAYOUT_UNDEFINED;
-
- // Create 1D image
- VkImageObj image_1D(m_device);
- image_1D.init(&ci);
- ASSERT_TRUE(image_1D.initialized());
-
- // 2D image
- ci.imageType = VK_IMAGE_TYPE_2D;
- ci.extent = {32, 32, 1};
- VkImageObj image_2D(m_device);
- image_2D.init(&ci);
- ASSERT_TRUE(image_2D.initialized());
-
- // 3D image
- ci.imageType = VK_IMAGE_TYPE_3D;
- ci.extent = {32, 32, 8};
- VkImageObj image_3D(m_device);
- image_3D.init(&ci);
- ASSERT_TRUE(image_3D.initialized());
-
- // 2D image array
- ci.imageType = VK_IMAGE_TYPE_2D;
- ci.extent = {32, 32, 1};
- ci.arrayLayers = 8;
- VkImageObj image_2D_array(m_device);
- image_2D_array.init(&ci);
- ASSERT_TRUE(image_2D_array.initialized());
-
- m_commandBuffer->begin();
-
- VkImageCopy copy_region;
- copy_region.extent = {32, 1, 1};
- copy_region.srcSubresource.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
- copy_region.dstSubresource.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
- copy_region.srcSubresource.mipLevel = 0;
- copy_region.dstSubresource.mipLevel = 0;
- copy_region.srcSubresource.baseArrayLayer = 0;
- copy_region.dstSubresource.baseArrayLayer = 0;
- copy_region.srcSubresource.layerCount = 1;
- copy_region.dstSubresource.layerCount = 1;
- copy_region.srcOffset = {0, 0, 0};
- copy_region.dstOffset = {0, 0, 0};
-
- // Sanity check
- m_errorMonitor->ExpectSuccess();
- m_commandBuffer->CopyImage(image_1D.image(), VK_IMAGE_LAYOUT_GENERAL, image_2D.image(), VK_IMAGE_LAYOUT_GENERAL, 1,
- &copy_region);
- m_errorMonitor->VerifyNotFound();
-
- // 1D texture w/ offset.y > 0. Source = VU 09c00124, dest = 09c00130
- copy_region.srcOffset.y = 1;
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkImageCopy-srcImage-00146");
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkImageCopy-srcOffset-00145"); // also y-dim overrun
- m_commandBuffer->CopyImage(image_1D.image(), VK_IMAGE_LAYOUT_GENERAL, image_2D.image(), VK_IMAGE_LAYOUT_GENERAL, 1,
- &copy_region);
- m_errorMonitor->VerifyFound();
- copy_region.srcOffset.y = 0;
- copy_region.dstOffset.y = 1;
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkImageCopy-dstImage-00152");
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkImageCopy-dstOffset-00151"); // also y-dim overrun
- m_commandBuffer->CopyImage(image_2D.image(), VK_IMAGE_LAYOUT_GENERAL, image_1D.image(), VK_IMAGE_LAYOUT_GENERAL, 1,
- &copy_region);
- m_errorMonitor->VerifyFound();
- copy_region.dstOffset.y = 0;
-
- // 1D texture w/ extent.height > 1. Source = VU 09c00124, dest = 09c00130
- copy_region.extent.height = 2;
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkImageCopy-srcImage-00146");
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkImageCopy-srcOffset-00145"); // also y-dim overrun
- m_commandBuffer->CopyImage(image_1D.image(), VK_IMAGE_LAYOUT_GENERAL, image_2D.image(), VK_IMAGE_LAYOUT_GENERAL, 1,
- &copy_region);
- m_errorMonitor->VerifyFound();
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkImageCopy-dstImage-00152");
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkImageCopy-dstOffset-00151"); // also y-dim overrun
- m_commandBuffer->CopyImage(image_2D.image(), VK_IMAGE_LAYOUT_GENERAL, image_1D.image(), VK_IMAGE_LAYOUT_GENERAL, 1,
- &copy_region);
- m_errorMonitor->VerifyFound();
- copy_region.extent.height = 1;
-
- // 1D texture w/ offset.z > 0. Source = VU 09c00df2, dest = 09c00df4
- copy_region.srcOffset.z = 1;
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkImageCopy-srcImage-01785");
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkImageCopy-srcOffset-00147"); // also z-dim overrun
- m_commandBuffer->CopyImage(image_1D.image(), VK_IMAGE_LAYOUT_GENERAL, image_2D.image(), VK_IMAGE_LAYOUT_GENERAL, 1,
- &copy_region);
- m_errorMonitor->VerifyFound();
- copy_region.srcOffset.z = 0;
- copy_region.dstOffset.z = 1;
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkImageCopy-dstImage-01786");
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkImageCopy-dstOffset-00153"); // also z-dim overrun
- m_commandBuffer->CopyImage(image_2D.image(), VK_IMAGE_LAYOUT_GENERAL, image_1D.image(), VK_IMAGE_LAYOUT_GENERAL, 1,
- &copy_region);
- m_errorMonitor->VerifyFound();
- copy_region.dstOffset.z = 0;
-
- // 1D texture w/ extent.depth > 1. Source = VU 09c00df2, dest = 09c00df4
- copy_region.extent.depth = 2;
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkImageCopy-srcImage-01785");
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
- "VUID-VkImageCopy-srcOffset-00147"); // also z-dim overrun (src)
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
- "VUID-VkImageCopy-dstOffset-00153"); // also z-dim overrun (dst)
- m_commandBuffer->CopyImage(image_1D.image(), VK_IMAGE_LAYOUT_GENERAL, image_2D.image(), VK_IMAGE_LAYOUT_GENERAL, 1,
- &copy_region);
- m_errorMonitor->VerifyFound();
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkImageCopy-dstImage-01786");
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
- "VUID-VkImageCopy-srcOffset-00147"); // also z-dim overrun (src)
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
- "VUID-VkImageCopy-dstOffset-00153"); // also z-dim overrun (dst)
- m_commandBuffer->CopyImage(image_2D.image(), VK_IMAGE_LAYOUT_GENERAL, image_1D.image(), VK_IMAGE_LAYOUT_GENERAL, 1,
- &copy_region);
- m_errorMonitor->VerifyFound();
- copy_region.extent.depth = 1;
-
- // 2D texture w/ offset.z > 0. Source = VU 09c00df6, dest = 09c00df8
- copy_region.extent = {16, 16, 1};
- copy_region.srcOffset.z = 4;
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkImageCopy-srcImage-01787");
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
- "VUID-VkImageCopy-srcOffset-00147"); // also z-dim overrun (src)
- m_commandBuffer->CopyImage(image_2D.image(), VK_IMAGE_LAYOUT_GENERAL, image_3D.image(), VK_IMAGE_LAYOUT_GENERAL, 1,
- &copy_region);
- m_errorMonitor->VerifyFound();
- copy_region.srcOffset.z = 0;
- copy_region.dstOffset.z = 1;
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkImageCopy-dstImage-01788");
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
- "VUID-VkImageCopy-dstOffset-00153"); // also z-dim overrun (dst)
- m_commandBuffer->CopyImage(image_3D.image(), VK_IMAGE_LAYOUT_GENERAL, image_2D.image(), VK_IMAGE_LAYOUT_GENERAL, 1,
- &copy_region);
- m_errorMonitor->VerifyFound();
- copy_region.dstOffset.z = 0;
-
- // 3D texture accessing an array layer other than 0. VU 09c0011a
- copy_region.extent = {4, 4, 1};
- copy_region.srcSubresource.baseArrayLayer = 1;
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkImageCopy-srcImage-00141");
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
- "VUID-vkCmdCopyImage-srcSubresource-01698"); // also 'too many layers'
- m_commandBuffer->CopyImage(image_3D.image(), VK_IMAGE_LAYOUT_GENERAL, image_2D.image(), VK_IMAGE_LAYOUT_GENERAL, 1,
- &copy_region);
- m_errorMonitor->VerifyFound();
- m_commandBuffer->end();
-}
-
-TEST_F(VkLayerTest, CopyImageTypeExtentMismatchMaintenance1) {
- // Image copy tests where format type and extents don't match and the Maintenance1 extension is enabled
- ASSERT_NO_FATAL_FAILURE(InitFramework(myDbgFunc, m_errorMonitor));
- if (DeviceExtensionSupported(gpu(), nullptr, VK_KHR_MAINTENANCE1_EXTENSION_NAME)) {
- m_device_extension_names.push_back(VK_KHR_MAINTENANCE1_EXTENSION_NAME);
- } else {
- printf("%s Maintenance1 extension cannot be enabled, test skipped.\n", kSkipPrefix);
- return;
- }
- ASSERT_NO_FATAL_FAILURE(InitState());
-
- VkFormat image_format = VK_FORMAT_R8G8B8A8_UNORM;
- VkFormatProperties format_props;
- // TODO: Remove this check if or when devsim handles extensions.
- // The chosen format has mandatory support the transfer src and dst format features when Maitenance1 is enabled. However, our
- // use of devsim and the mock ICD violate this guarantee.
- vkGetPhysicalDeviceFormatProperties(m_device->phy().handle(), image_format, &format_props);
- if (!(format_props.optimalTilingFeatures & VK_FORMAT_FEATURE_TRANSFER_SRC_BIT)) {
- printf("%s Maintenance1 extension is not supported.\n", kSkipPrefix);
- return;
- }
-
- VkImageCreateInfo ci;
- ci.sType = VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO;
- ci.pNext = NULL;
- ci.flags = 0;
- ci.imageType = VK_IMAGE_TYPE_1D;
- ci.format = image_format;
- ci.extent = {32, 1, 1};
- ci.mipLevels = 1;
- ci.arrayLayers = 1;
- ci.samples = VK_SAMPLE_COUNT_1_BIT;
- ci.tiling = VK_IMAGE_TILING_OPTIMAL;
- ci.usage = VK_IMAGE_USAGE_TRANSFER_SRC_BIT | VK_IMAGE_USAGE_TRANSFER_DST_BIT;
- ci.sharingMode = VK_SHARING_MODE_EXCLUSIVE;
- ci.queueFamilyIndexCount = 0;
- ci.pQueueFamilyIndices = NULL;
- ci.initialLayout = VK_IMAGE_LAYOUT_UNDEFINED;
-
- // Create 1D image
- VkImageObj image_1D(m_device);
- image_1D.init(&ci);
- ASSERT_TRUE(image_1D.initialized());
-
- // 2D image
- ci.imageType = VK_IMAGE_TYPE_2D;
- ci.extent = {32, 32, 1};
- VkImageObj image_2D(m_device);
- image_2D.init(&ci);
- ASSERT_TRUE(image_2D.initialized());
-
- // 3D image
- ci.imageType = VK_IMAGE_TYPE_3D;
- ci.extent = {32, 32, 8};
- VkImageObj image_3D(m_device);
- image_3D.init(&ci);
- ASSERT_TRUE(image_3D.initialized());
-
- // 2D image array
- ci.imageType = VK_IMAGE_TYPE_2D;
- ci.extent = {32, 32, 1};
- ci.arrayLayers = 8;
- VkImageObj image_2D_array(m_device);
- image_2D_array.init(&ci);
- ASSERT_TRUE(image_2D_array.initialized());
-
- m_commandBuffer->begin();
-
- VkImageCopy copy_region;
- copy_region.extent = {32, 1, 1};
- copy_region.srcSubresource.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
- copy_region.dstSubresource.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
- copy_region.srcSubresource.mipLevel = 0;
- copy_region.dstSubresource.mipLevel = 0;
- copy_region.srcSubresource.baseArrayLayer = 0;
- copy_region.dstSubresource.baseArrayLayer = 0;
- copy_region.srcSubresource.layerCount = 1;
- copy_region.dstSubresource.layerCount = 1;
- copy_region.srcOffset = {0, 0, 0};
- copy_region.dstOffset = {0, 0, 0};
-
- // Copy from layer not present
- copy_region.srcSubresource.baseArrayLayer = 4;
- copy_region.srcSubresource.layerCount = 6;
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdCopyImage-srcSubresource-01698");
- m_commandBuffer->CopyImage(image_2D_array.image(), VK_IMAGE_LAYOUT_GENERAL, image_3D.image(), VK_IMAGE_LAYOUT_GENERAL, 1,
- &copy_region);
- m_errorMonitor->VerifyFound();
- copy_region.srcSubresource.baseArrayLayer = 0;
- copy_region.srcSubresource.layerCount = 1;
-
- // Copy to layer not present
- copy_region.dstSubresource.baseArrayLayer = 1;
- copy_region.dstSubresource.layerCount = 8;
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdCopyImage-dstSubresource-01699");
- m_commandBuffer->CopyImage(image_3D.image(), VK_IMAGE_LAYOUT_GENERAL, image_2D_array.image(), VK_IMAGE_LAYOUT_GENERAL, 1,
- &copy_region);
- m_errorMonitor->VerifyFound();
- copy_region.dstSubresource.layerCount = 1;
-
- m_commandBuffer->end();
-}
-
-TEST_F(VkLayerTest, CopyImageCompressedBlockAlignment) {
- // Image copy tests on compressed images with block alignment errors
- SetTargetApiVersion(VK_API_VERSION_1_1);
- ASSERT_NO_FATAL_FAILURE(Init());
-
- // Select a compressed format and verify support
- VkPhysicalDeviceFeatures device_features = {};
- ASSERT_NO_FATAL_FAILURE(GetPhysicalDeviceFeatures(&device_features));
- VkFormat compressed_format = VK_FORMAT_UNDEFINED;
- if (device_features.textureCompressionBC) {
- compressed_format = VK_FORMAT_BC3_SRGB_BLOCK;
- } else if (device_features.textureCompressionETC2) {
- compressed_format = VK_FORMAT_ETC2_R8G8B8A8_UNORM_BLOCK;
- } else if (device_features.textureCompressionASTC_LDR) {
- compressed_format = VK_FORMAT_ASTC_4x4_UNORM_BLOCK;
- }
-
- VkImageCreateInfo ci;
- ci.sType = VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO;
- ci.pNext = NULL;
- ci.flags = 0;
- ci.imageType = VK_IMAGE_TYPE_2D;
- ci.format = compressed_format;
- ci.extent = {64, 64, 1};
- ci.mipLevels = 1;
- ci.arrayLayers = 1;
- ci.samples = VK_SAMPLE_COUNT_1_BIT;
- ci.tiling = VK_IMAGE_TILING_OPTIMAL;
- ci.usage = VK_IMAGE_USAGE_TRANSFER_SRC_BIT | VK_IMAGE_USAGE_TRANSFER_DST_BIT;
- ci.sharingMode = VK_SHARING_MODE_EXCLUSIVE;
- ci.queueFamilyIndexCount = 0;
- ci.pQueueFamilyIndices = NULL;
- ci.initialLayout = VK_IMAGE_LAYOUT_UNDEFINED;
-
- VkImageFormatProperties img_prop = {};
- if (VK_SUCCESS != vkGetPhysicalDeviceImageFormatProperties(m_device->phy().handle(), ci.format, ci.imageType, ci.tiling,
- ci.usage, ci.flags, &img_prop)) {
- printf("%s No compressed formats supported - CopyImageCompressedBlockAlignment skipped.\n", kSkipPrefix);
- return;
- }
-
- // Create images
- VkImageObj image_1(m_device);
- image_1.init(&ci);
- ASSERT_TRUE(image_1.initialized());
-
- ci.extent = {62, 62, 1}; // slightly smaller and not divisible by block size
- VkImageObj image_2(m_device);
- image_2.init(&ci);
- ASSERT_TRUE(image_2.initialized());
-
- m_commandBuffer->begin();
-
- VkImageCopy copy_region;
- copy_region.extent = {48, 48, 1};
- copy_region.srcSubresource.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
- copy_region.dstSubresource.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
- copy_region.srcSubresource.mipLevel = 0;
- copy_region.dstSubresource.mipLevel = 0;
- copy_region.srcSubresource.baseArrayLayer = 0;
- copy_region.dstSubresource.baseArrayLayer = 0;
- copy_region.srcSubresource.layerCount = 1;
- copy_region.dstSubresource.layerCount = 1;
- copy_region.srcOffset = {0, 0, 0};
- copy_region.dstOffset = {0, 0, 0};
-
- // Sanity check
- m_errorMonitor->ExpectSuccess();
- m_commandBuffer->CopyImage(image_1.image(), VK_IMAGE_LAYOUT_GENERAL, image_2.image(), VK_IMAGE_LAYOUT_GENERAL, 1, &copy_region);
- m_errorMonitor->VerifyNotFound();
-
- std::string vuid;
- bool ycbcr = (DeviceExtensionEnabled(VK_KHR_SAMPLER_YCBCR_CONVERSION_EXTENSION_NAME) ||
- (DeviceValidationVersion() >= VK_API_VERSION_1_1));
-
- // Src, Dest offsets must be multiples of compressed block sizes {4, 4, 1}
- // Image transfer granularity gets set to compressed block size, so an ITG error is also (unavoidably) triggered.
- vuid = ycbcr ? "VUID-VkImageCopy-srcImage-01727" : "VUID-VkImageCopy-srcOffset-00157";
- copy_region.srcOffset = {2, 4, 0}; // source width
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, vuid);
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
- "VUID-vkCmdCopyImage-srcOffset-01783"); // srcOffset image transfer granularity
- m_commandBuffer->CopyImage(image_1.image(), VK_IMAGE_LAYOUT_GENERAL, image_2.image(), VK_IMAGE_LAYOUT_GENERAL, 1, &copy_region);
- m_errorMonitor->VerifyFound();
- copy_region.srcOffset = {12, 1, 0}; // source height
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, vuid);
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
- "VUID-vkCmdCopyImage-srcOffset-01783"); // srcOffset image transfer granularity
- m_commandBuffer->CopyImage(image_1.image(), VK_IMAGE_LAYOUT_GENERAL, image_2.image(), VK_IMAGE_LAYOUT_GENERAL, 1, &copy_region);
- m_errorMonitor->VerifyFound();
- copy_region.srcOffset = {0, 0, 0};
-
- vuid = ycbcr ? "VUID-VkImageCopy-dstImage-01731" : "VUID-VkImageCopy-dstOffset-00162";
- copy_region.dstOffset = {1, 0, 0}; // dest width
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, vuid);
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
- "VUID-vkCmdCopyImage-dstOffset-01784"); // dstOffset image transfer granularity
- m_commandBuffer->CopyImage(image_1.image(), VK_IMAGE_LAYOUT_GENERAL, image_2.image(), VK_IMAGE_LAYOUT_GENERAL, 1, &copy_region);
- m_errorMonitor->VerifyFound();
- copy_region.dstOffset = {4, 1, 0}; // dest height
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, vuid);
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
- "VUID-vkCmdCopyImage-dstOffset-01784"); // dstOffset image transfer granularity
- m_commandBuffer->CopyImage(image_1.image(), VK_IMAGE_LAYOUT_GENERAL, image_2.image(), VK_IMAGE_LAYOUT_GENERAL, 1, &copy_region);
- m_errorMonitor->VerifyFound();
- copy_region.dstOffset = {0, 0, 0};
-
- // Copy extent must be multiples of compressed block sizes {4, 4, 1} if not full width/height
- vuid = ycbcr ? "VUID-VkImageCopy-srcImage-01728" : "VUID-VkImageCopy-extent-00158";
- copy_region.extent = {62, 60, 1}; // source width
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, vuid);
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
- "VUID-vkCmdCopyImage-srcOffset-01783"); // src extent image transfer granularity
- m_commandBuffer->CopyImage(image_1.image(), VK_IMAGE_LAYOUT_GENERAL, image_2.image(), VK_IMAGE_LAYOUT_GENERAL, 1, &copy_region);
- m_errorMonitor->VerifyFound();
- vuid = ycbcr ? "VUID-VkImageCopy-srcImage-01729" : "VUID-VkImageCopy-extent-00159";
- copy_region.extent = {60, 62, 1}; // source height
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, vuid);
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
- "VUID-vkCmdCopyImage-srcOffset-01783"); // src extent image transfer granularity
- m_commandBuffer->CopyImage(image_1.image(), VK_IMAGE_LAYOUT_GENERAL, image_2.image(), VK_IMAGE_LAYOUT_GENERAL, 1, &copy_region);
- m_errorMonitor->VerifyFound();
-
- vuid = ycbcr ? "VUID-VkImageCopy-dstImage-01732" : "VUID-VkImageCopy-extent-00163";
- copy_region.extent = {62, 60, 1}; // dest width
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, vuid);
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
- "VUID-vkCmdCopyImage-dstOffset-01784"); // dst extent image transfer granularity
- m_commandBuffer->CopyImage(image_2.image(), VK_IMAGE_LAYOUT_GENERAL, image_1.image(), VK_IMAGE_LAYOUT_GENERAL, 1, &copy_region);
- m_errorMonitor->VerifyFound();
- vuid = ycbcr ? "VUID-VkImageCopy-dstImage-01733" : "VUID-VkImageCopy-extent-00164";
- copy_region.extent = {60, 62, 1}; // dest height
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, vuid);
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
- "VUID-vkCmdCopyImage-dstOffset-01784"); // dst extent image transfer granularity
- m_commandBuffer->CopyImage(image_2.image(), VK_IMAGE_LAYOUT_GENERAL, image_1.image(), VK_IMAGE_LAYOUT_GENERAL, 1, &copy_region);
- m_errorMonitor->VerifyFound();
-
- // Note: "VUID-VkImageCopy-extent-00160", "VUID-VkImageCopy-extent-00165", "VUID-VkImageCopy-srcImage-01730",
- // "VUID-VkImageCopy-dstImage-01734"
- // There are currently no supported compressed formats with a block depth other than 1,
- // so impossible to create a 'not a multiple' condition for depth.
- m_commandBuffer->end();
-}
-
-TEST_F(VkLayerTest, CopyImageSinglePlane422Alignment) {
- // Image copy tests on single-plane _422 formats with block alignment errors
-
- // Enable KHR multiplane req'd extensions
- bool mp_extensions = InstanceExtensionSupported(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME,
- VK_KHR_GET_MEMORY_REQUIREMENTS_2_SPEC_VERSION);
- if (mp_extensions) {
- m_instance_extension_names.push_back(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME);
- }
- ASSERT_NO_FATAL_FAILURE(InitFramework(myDbgFunc, m_errorMonitor));
- mp_extensions = mp_extensions && DeviceExtensionSupported(gpu(), nullptr, VK_KHR_MAINTENANCE1_EXTENSION_NAME);
- mp_extensions = mp_extensions && DeviceExtensionSupported(gpu(), nullptr, VK_KHR_GET_MEMORY_REQUIREMENTS_2_EXTENSION_NAME);
- mp_extensions = mp_extensions && DeviceExtensionSupported(gpu(), nullptr, VK_KHR_BIND_MEMORY_2_EXTENSION_NAME);
- mp_extensions = mp_extensions && DeviceExtensionSupported(gpu(), nullptr, VK_KHR_SAMPLER_YCBCR_CONVERSION_EXTENSION_NAME);
- if (mp_extensions) {
- m_device_extension_names.push_back(VK_KHR_MAINTENANCE1_EXTENSION_NAME);
- m_device_extension_names.push_back(VK_KHR_GET_MEMORY_REQUIREMENTS_2_EXTENSION_NAME);
- m_device_extension_names.push_back(VK_KHR_BIND_MEMORY_2_EXTENSION_NAME);
- m_device_extension_names.push_back(VK_KHR_SAMPLER_YCBCR_CONVERSION_EXTENSION_NAME);
- } else {
- printf("%s test requires KHR multiplane extensions, not available. Skipping.\n", kSkipPrefix);
- return;
- }
- ASSERT_NO_FATAL_FAILURE(InitState());
-
- // Select a _422 format and verify support
- VkImageCreateInfo ci = {};
- ci.sType = VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO;
- ci.pNext = NULL;
- ci.flags = 0;
- ci.imageType = VK_IMAGE_TYPE_2D;
- ci.format = VK_FORMAT_G8B8G8R8_422_UNORM_KHR;
- ci.tiling = VK_IMAGE_TILING_OPTIMAL;
- ci.usage = VK_IMAGE_USAGE_TRANSFER_SRC_BIT | VK_IMAGE_USAGE_TRANSFER_DST_BIT;
- ci.mipLevels = 1;
- ci.arrayLayers = 1;
- ci.samples = VK_SAMPLE_COUNT_1_BIT;
- ci.sharingMode = VK_SHARING_MODE_EXCLUSIVE;
- ci.queueFamilyIndexCount = 0;
- ci.pQueueFamilyIndices = NULL;
- ci.initialLayout = VK_IMAGE_LAYOUT_UNDEFINED;
-
- // Verify formats
- VkFormatFeatureFlags features = VK_FORMAT_FEATURE_TRANSFER_SRC_BIT | VK_FORMAT_FEATURE_TRANSFER_DST_BIT;
- bool supported = ImageFormatAndFeaturesSupported(instance(), gpu(), ci, features);
- if (!supported) {
- printf("%s Single-plane _422 image format not supported. Skipping test.\n", kSkipPrefix);
- return; // Assume there's low ROI on searching for different mp formats
- }
-
- // Create images
- ci.extent = {64, 64, 1};
- VkImageObj image_422(m_device);
- image_422.init(&ci);
- ASSERT_TRUE(image_422.initialized());
-
- ci.extent = {64, 64, 1};
- ci.format = VK_FORMAT_R8G8B8A8_UNORM;
- VkImageObj image_ucmp(m_device);
- image_ucmp.init(&ci);
- ASSERT_TRUE(image_ucmp.initialized());
-
- m_commandBuffer->begin();
-
- VkImageCopy copy_region;
- copy_region.extent = {48, 48, 1};
- copy_region.srcSubresource.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
- copy_region.dstSubresource.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
- copy_region.srcSubresource.mipLevel = 0;
- copy_region.dstSubresource.mipLevel = 0;
- copy_region.srcSubresource.baseArrayLayer = 0;
- copy_region.dstSubresource.baseArrayLayer = 0;
- copy_region.srcSubresource.layerCount = 1;
- copy_region.dstSubresource.layerCount = 1;
- copy_region.srcOffset = {0, 0, 0};
- copy_region.dstOffset = {0, 0, 0};
-
- // Src offsets must be multiples of compressed block sizes
- copy_region.srcOffset = {3, 4, 0}; // source offset x
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkImageCopy-srcImage-01727");
- m_commandBuffer->CopyImage(image_422.image(), VK_IMAGE_LAYOUT_GENERAL, image_ucmp.image(), VK_IMAGE_LAYOUT_GENERAL, 1,
- &copy_region);
- m_errorMonitor->VerifyFound();
- copy_region.srcOffset = {0, 0, 0};
-
- // Dst offsets must be multiples of compressed block sizes
- copy_region.dstOffset = {1, 0, 0};
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkImageCopy-dstImage-01731");
- m_commandBuffer->CopyImage(image_ucmp.image(), VK_IMAGE_LAYOUT_GENERAL, image_422.image(), VK_IMAGE_LAYOUT_GENERAL, 1,
- &copy_region);
- m_errorMonitor->VerifyFound();
- copy_region.dstOffset = {0, 0, 0};
-
- // Copy extent must be multiples of compressed block sizes if not full width/height
- copy_region.extent = {31, 60, 1}; // 422 source, extent.x
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkImageCopy-srcImage-01728");
- m_commandBuffer->CopyImage(image_422.image(), VK_IMAGE_LAYOUT_GENERAL, image_ucmp.image(), VK_IMAGE_LAYOUT_GENERAL, 1,
- &copy_region);
- m_errorMonitor->VerifyFound();
-
- // 422 dest, extent.x
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkImageCopy-dstImage-01732");
- m_commandBuffer->CopyImage(image_ucmp.image(), VK_IMAGE_LAYOUT_GENERAL, image_422.image(), VK_IMAGE_LAYOUT_GENERAL, 1,
- &copy_region);
- m_errorMonitor->VerifyFound();
- copy_region.dstOffset = {0, 0, 0};
-
- m_commandBuffer->end();
-}
-
-TEST_F(VkLayerTest, CopyImageMultiplaneAspectBits) {
- // Image copy tests on multiplane images with aspect errors
-
- // Enable KHR multiplane req'd extensions
- bool mp_extensions = InstanceExtensionSupported(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME,
- VK_KHR_GET_MEMORY_REQUIREMENTS_2_SPEC_VERSION);
- if (mp_extensions) {
- m_instance_extension_names.push_back(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME);
- }
- ASSERT_NO_FATAL_FAILURE(InitFramework(myDbgFunc, m_errorMonitor));
- mp_extensions = mp_extensions && DeviceExtensionSupported(gpu(), nullptr, VK_KHR_MAINTENANCE1_EXTENSION_NAME);
- mp_extensions = mp_extensions && DeviceExtensionSupported(gpu(), nullptr, VK_KHR_GET_MEMORY_REQUIREMENTS_2_EXTENSION_NAME);
- mp_extensions = mp_extensions && DeviceExtensionSupported(gpu(), nullptr, VK_KHR_BIND_MEMORY_2_EXTENSION_NAME);
- mp_extensions = mp_extensions && DeviceExtensionSupported(gpu(), nullptr, VK_KHR_SAMPLER_YCBCR_CONVERSION_EXTENSION_NAME);
- if (mp_extensions) {
- m_device_extension_names.push_back(VK_KHR_MAINTENANCE1_EXTENSION_NAME);
- m_device_extension_names.push_back(VK_KHR_GET_MEMORY_REQUIREMENTS_2_EXTENSION_NAME);
- m_device_extension_names.push_back(VK_KHR_BIND_MEMORY_2_EXTENSION_NAME);
- m_device_extension_names.push_back(VK_KHR_SAMPLER_YCBCR_CONVERSION_EXTENSION_NAME);
- } else {
- printf("%s test requires KHR multiplane extensions, not available. Skipping.\n", kSkipPrefix);
- return;
- }
- ASSERT_NO_FATAL_FAILURE(InitState());
-
- // Select multi-plane formats and verify support
- VkFormat mp3_format = VK_FORMAT_G8_B8_R8_3PLANE_422_UNORM_KHR;
- VkFormat mp2_format = VK_FORMAT_G8_B8R8_2PLANE_422_UNORM_KHR;
-
- VkImageCreateInfo ci = {};
- ci.sType = VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO;
- ci.pNext = NULL;
- ci.flags = 0;
- ci.imageType = VK_IMAGE_TYPE_2D;
- ci.format = mp2_format;
- ci.extent = {256, 256, 1};
- ci.tiling = VK_IMAGE_TILING_OPTIMAL;
- ci.usage = VK_IMAGE_USAGE_TRANSFER_SRC_BIT | VK_IMAGE_USAGE_TRANSFER_DST_BIT;
- ci.mipLevels = 1;
- ci.arrayLayers = 1;
- ci.samples = VK_SAMPLE_COUNT_1_BIT;
- ci.sharingMode = VK_SHARING_MODE_EXCLUSIVE;
- ci.queueFamilyIndexCount = 0;
- ci.pQueueFamilyIndices = NULL;
- ci.initialLayout = VK_IMAGE_LAYOUT_UNDEFINED;
-
- // Verify formats
- VkFormatFeatureFlags features = VK_FORMAT_FEATURE_TRANSFER_SRC_BIT | VK_FORMAT_FEATURE_TRANSFER_DST_BIT;
- bool supported = ImageFormatAndFeaturesSupported(instance(), gpu(), ci, features);
- ci.format = VK_FORMAT_D24_UNORM_S8_UINT;
- supported = supported && ImageFormatAndFeaturesSupported(instance(), gpu(), ci, features);
- ci.format = mp3_format;
- supported = supported && ImageFormatAndFeaturesSupported(instance(), gpu(), ci, features);
- if (!supported) {
- printf("%s Multiplane image formats or optimally tiled depth-stencil buffers not supported. Skipping test.\n",
- kSkipPrefix);
- return; // Assume there's low ROI on searching for different mp formats
- }
-
- // Create images
- VkImageObj mp3_image(m_device);
- mp3_image.init(&ci);
- ASSERT_TRUE(mp3_image.initialized());
-
- ci.format = mp2_format;
- VkImageObj mp2_image(m_device);
- mp2_image.init(&ci);
- ASSERT_TRUE(mp2_image.initialized());
-
- ci.format = VK_FORMAT_D24_UNORM_S8_UINT;
- VkImageObj sp_image(m_device);
- sp_image.init(&ci);
- ASSERT_TRUE(sp_image.initialized());
-
- m_commandBuffer->begin();
-
- VkImageCopy copy_region;
- copy_region.extent = {128, 128, 1};
- copy_region.srcSubresource.aspectMask = VK_IMAGE_ASPECT_PLANE_2_BIT_KHR;
- copy_region.dstSubresource.aspectMask = VK_IMAGE_ASPECT_PLANE_2_BIT_KHR;
- copy_region.srcSubresource.mipLevel = 0;
- copy_region.dstSubresource.mipLevel = 0;
- copy_region.srcSubresource.baseArrayLayer = 0;
- copy_region.dstSubresource.baseArrayLayer = 0;
- copy_region.srcSubresource.layerCount = 1;
- copy_region.dstSubresource.layerCount = 1;
- copy_region.srcOffset = {0, 0, 0};
- copy_region.dstOffset = {0, 0, 0};
-
- m_errorMonitor->SetUnexpectedError("VUID-vkCmdCopyImage-srcImage-00135");
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkImageCopy-srcImage-01552");
- m_commandBuffer->CopyImage(mp2_image.image(), VK_IMAGE_LAYOUT_GENERAL, mp3_image.image(), VK_IMAGE_LAYOUT_GENERAL, 1,
- &copy_region);
- m_errorMonitor->VerifyFound();
-
- m_errorMonitor->SetUnexpectedError("VUID-vkCmdCopyImage-srcImage-00135");
- copy_region.srcSubresource.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
- copy_region.dstSubresource.aspectMask = VK_IMAGE_ASPECT_PLANE_0_BIT_KHR;
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkImageCopy-srcImage-01553");
- m_commandBuffer->CopyImage(mp3_image.image(), VK_IMAGE_LAYOUT_GENERAL, mp2_image.image(), VK_IMAGE_LAYOUT_GENERAL, 1,
- &copy_region);
- m_errorMonitor->VerifyFound();
-
- copy_region.srcSubresource.aspectMask = VK_IMAGE_ASPECT_PLANE_1_BIT_KHR;
- copy_region.dstSubresource.aspectMask = VK_IMAGE_ASPECT_PLANE_2_BIT_KHR;
- m_errorMonitor->SetUnexpectedError("VUID-vkCmdCopyImage-srcImage-00135");
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkImageCopy-dstImage-01554");
- m_commandBuffer->CopyImage(mp3_image.image(), VK_IMAGE_LAYOUT_GENERAL, mp2_image.image(), VK_IMAGE_LAYOUT_GENERAL, 1,
- &copy_region);
- m_errorMonitor->VerifyFound();
-
- copy_region.dstSubresource.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
- m_errorMonitor->SetUnexpectedError("VUID-vkCmdCopyImage-srcImage-00135");
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkImageCopy-dstImage-01555");
- m_commandBuffer->CopyImage(mp2_image.image(), VK_IMAGE_LAYOUT_GENERAL, mp3_image.image(), VK_IMAGE_LAYOUT_GENERAL, 1,
- &copy_region);
- m_errorMonitor->VerifyFound();
-
- copy_region.dstSubresource.aspectMask = VK_IMAGE_ASPECT_DEPTH_BIT;
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkImageCopy-srcImage-01556");
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "dest image depth/stencil formats"); // also
- m_commandBuffer->CopyImage(mp2_image.image(), VK_IMAGE_LAYOUT_GENERAL, sp_image.image(), VK_IMAGE_LAYOUT_GENERAL, 1,
- &copy_region);
- m_errorMonitor->VerifyFound();
-
- copy_region.srcSubresource.aspectMask = VK_IMAGE_ASPECT_DEPTH_BIT;
- copy_region.dstSubresource.aspectMask = VK_IMAGE_ASPECT_PLANE_2_BIT_KHR;
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkImageCopy-dstImage-01557");
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "dest image depth/stencil formats"); // also
- m_commandBuffer->CopyImage(sp_image.image(), VK_IMAGE_LAYOUT_GENERAL, mp3_image.image(), VK_IMAGE_LAYOUT_GENERAL, 1,
- &copy_region);
- m_errorMonitor->VerifyFound();
-
- m_commandBuffer->end();
-}
-
-TEST_F(VkLayerTest, CopyImageSrcSizeExceeded) {
- // Image copy with source region specified greater than src image size
- ASSERT_NO_FATAL_FAILURE(Init());
-
- // Create images with full mip chain
- VkImageCreateInfo ci;
- ci.sType = VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO;
- ci.pNext = NULL;
- ci.flags = 0;
- ci.imageType = VK_IMAGE_TYPE_3D;
- ci.format = VK_FORMAT_R8G8B8A8_UNORM;
- ci.extent = {32, 32, 8};
- ci.mipLevels = 6;
- ci.arrayLayers = 1;
- ci.samples = VK_SAMPLE_COUNT_1_BIT;
- ci.tiling = VK_IMAGE_TILING_OPTIMAL;
- ci.usage = VK_IMAGE_USAGE_TRANSFER_SRC_BIT;
- ci.sharingMode = VK_SHARING_MODE_EXCLUSIVE;
- ci.queueFamilyIndexCount = 0;
- ci.pQueueFamilyIndices = NULL;
- ci.initialLayout = VK_IMAGE_LAYOUT_UNDEFINED;
-
- VkImageObj src_image(m_device);
- src_image.init(&ci);
- ASSERT_TRUE(src_image.initialized());
-
- // Dest image with one more mip level
- ci.extent = {64, 64, 16};
- ci.mipLevels = 7;
- ci.usage = VK_IMAGE_USAGE_TRANSFER_DST_BIT;
- VkImageObj dst_image(m_device);
- dst_image.init(&ci);
- ASSERT_TRUE(dst_image.initialized());
-
- m_commandBuffer->begin();
-
- VkImageCopy copy_region;
- copy_region.extent = {32, 32, 8};
- copy_region.srcSubresource.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
- copy_region.dstSubresource.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
- copy_region.srcSubresource.mipLevel = 0;
- copy_region.dstSubresource.mipLevel = 0;
- copy_region.srcSubresource.baseArrayLayer = 0;
- copy_region.dstSubresource.baseArrayLayer = 0;
- copy_region.srcSubresource.layerCount = 1;
- copy_region.dstSubresource.layerCount = 1;
- copy_region.srcOffset = {0, 0, 0};
- copy_region.dstOffset = {0, 0, 0};
-
- m_errorMonitor->ExpectSuccess();
- m_commandBuffer->CopyImage(src_image.image(), VK_IMAGE_LAYOUT_GENERAL, dst_image.image(), VK_IMAGE_LAYOUT_GENERAL, 1,
- &copy_region);
- m_errorMonitor->VerifyNotFound();
-
- // Source exceeded in x-dim, VU 01202
- copy_region.srcOffset.x = 4;
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
- "VUID-vkCmdCopyImage-pRegions-00122"); // General "contained within" VU
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkImageCopy-srcOffset-00144");
- m_commandBuffer->CopyImage(src_image.image(), VK_IMAGE_LAYOUT_GENERAL, dst_image.image(), VK_IMAGE_LAYOUT_GENERAL, 1,
- &copy_region);
- m_errorMonitor->VerifyFound();
-
- // Source exceeded in y-dim, VU 01203
- copy_region.srcOffset.x = 0;
- copy_region.extent.height = 48;
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdCopyImage-pRegions-00122");
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkImageCopy-srcOffset-00145");
- m_commandBuffer->CopyImage(src_image.image(), VK_IMAGE_LAYOUT_GENERAL, dst_image.image(), VK_IMAGE_LAYOUT_GENERAL, 1,
- &copy_region);
- m_errorMonitor->VerifyFound();
-
- // Source exceeded in z-dim, VU 01204
- copy_region.extent = {4, 4, 4};
- copy_region.srcSubresource.mipLevel = 2;
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdCopyImage-pRegions-00122");
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkImageCopy-srcOffset-00147");
- m_commandBuffer->CopyImage(src_image.image(), VK_IMAGE_LAYOUT_GENERAL, dst_image.image(), VK_IMAGE_LAYOUT_GENERAL, 1,
- &copy_region);
- m_errorMonitor->VerifyFound();
-
- m_commandBuffer->end();
-}
-
-TEST_F(VkLayerTest, CopyImageDstSizeExceeded) {
- // Image copy with dest region specified greater than dest image size
- ASSERT_NO_FATAL_FAILURE(Init());
-
- // Create images with full mip chain
- VkImageCreateInfo ci;
- ci.sType = VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO;
- ci.pNext = NULL;
- ci.flags = 0;
- ci.imageType = VK_IMAGE_TYPE_3D;
- ci.format = VK_FORMAT_R8G8B8A8_UNORM;
- ci.extent = {32, 32, 8};
- ci.mipLevels = 6;
- ci.arrayLayers = 1;
- ci.samples = VK_SAMPLE_COUNT_1_BIT;
- ci.tiling = VK_IMAGE_TILING_OPTIMAL;
- ci.usage = VK_IMAGE_USAGE_TRANSFER_DST_BIT;
- ci.sharingMode = VK_SHARING_MODE_EXCLUSIVE;
- ci.queueFamilyIndexCount = 0;
- ci.pQueueFamilyIndices = NULL;
- ci.initialLayout = VK_IMAGE_LAYOUT_UNDEFINED;
-
- VkImageObj dst_image(m_device);
- dst_image.init(&ci);
- ASSERT_TRUE(dst_image.initialized());
-
- // Src image with one more mip level
- ci.extent = {64, 64, 16};
- ci.mipLevels = 7;
- ci.usage = VK_IMAGE_USAGE_TRANSFER_SRC_BIT;
- VkImageObj src_image(m_device);
- src_image.init(&ci);
- ASSERT_TRUE(src_image.initialized());
-
- m_commandBuffer->begin();
-
- VkImageCopy copy_region;
- copy_region.extent = {32, 32, 8};
- copy_region.srcSubresource.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
- copy_region.dstSubresource.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
- copy_region.srcSubresource.mipLevel = 0;
- copy_region.dstSubresource.mipLevel = 0;
- copy_region.srcSubresource.baseArrayLayer = 0;
- copy_region.dstSubresource.baseArrayLayer = 0;
- copy_region.srcSubresource.layerCount = 1;
- copy_region.dstSubresource.layerCount = 1;
- copy_region.srcOffset = {0, 0, 0};
- copy_region.dstOffset = {0, 0, 0};
-
- m_errorMonitor->ExpectSuccess();
- m_commandBuffer->CopyImage(src_image.image(), VK_IMAGE_LAYOUT_GENERAL, dst_image.image(), VK_IMAGE_LAYOUT_GENERAL, 1,
- &copy_region);
- m_errorMonitor->VerifyNotFound();
-
- // Dest exceeded in x-dim, VU 01205
- copy_region.dstOffset.x = 4;
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
- "VUID-vkCmdCopyImage-pRegions-00123"); // General "contained within" VU
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkImageCopy-dstOffset-00150");
- m_commandBuffer->CopyImage(src_image.image(), VK_IMAGE_LAYOUT_GENERAL, dst_image.image(), VK_IMAGE_LAYOUT_GENERAL, 1,
- &copy_region);
- m_errorMonitor->VerifyFound();
-
- // Dest exceeded in y-dim, VU 01206
- copy_region.dstOffset.x = 0;
- copy_region.extent.height = 48;
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdCopyImage-pRegions-00123");
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkImageCopy-dstOffset-00151");
- m_commandBuffer->CopyImage(src_image.image(), VK_IMAGE_LAYOUT_GENERAL, dst_image.image(), VK_IMAGE_LAYOUT_GENERAL, 1,
- &copy_region);
- m_errorMonitor->VerifyFound();
-
- // Dest exceeded in z-dim, VU 01207
- copy_region.extent = {4, 4, 4};
- copy_region.dstSubresource.mipLevel = 2;
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdCopyImage-pRegions-00123");
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkImageCopy-dstOffset-00153");
- m_commandBuffer->CopyImage(src_image.image(), VK_IMAGE_LAYOUT_GENERAL, dst_image.image(), VK_IMAGE_LAYOUT_GENERAL, 1,
- &copy_region);
- m_errorMonitor->VerifyFound();
-
- m_commandBuffer->end();
-}
-
-TEST_F(VkLayerTest, CopyImageFormatSizeMismatch) {
- VkResult err;
- bool pass;
-
- // Create color images with different format sizes and try to copy between them
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdCopyImage-srcImage-00135");
-
- SetTargetApiVersion(VK_API_VERSION_1_1);
- ASSERT_NO_FATAL_FAILURE(Init(nullptr, nullptr, VK_COMMAND_POOL_CREATE_RESET_COMMAND_BUFFER_BIT));
-
- // Create two images of different types and try to copy between them
- VkImage srcImage;
- VkImage dstImage;
- VkDeviceMemory srcMem;
- VkDeviceMemory destMem;
- VkMemoryRequirements memReqs;
-
- VkImageCreateInfo image_create_info = {};
- image_create_info.sType = VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO;
- image_create_info.pNext = NULL;
- image_create_info.imageType = VK_IMAGE_TYPE_2D;
- image_create_info.format = VK_FORMAT_B8G8R8A8_UNORM;
- image_create_info.extent.width = 32;
- image_create_info.extent.height = 32;
- image_create_info.extent.depth = 1;
- image_create_info.mipLevels = 1;
- image_create_info.arrayLayers = 1;
- image_create_info.samples = VK_SAMPLE_COUNT_1_BIT;
- image_create_info.tiling = VK_IMAGE_TILING_LINEAR;
- image_create_info.usage = VK_IMAGE_USAGE_TRANSFER_SRC_BIT;
- image_create_info.flags = 0;
-
- err = vkCreateImage(m_device->device(), &image_create_info, NULL, &srcImage);
- ASSERT_VK_SUCCESS(err);
-
- image_create_info.usage = VK_IMAGE_USAGE_TRANSFER_DST_BIT;
- // Introduce failure by creating second image with a different-sized format.
- image_create_info.format = VK_FORMAT_R5G5B5A1_UNORM_PACK16;
- VkFormatProperties properties;
- vkGetPhysicalDeviceFormatProperties(m_device->phy().handle(), image_create_info.format, &properties);
- if (properties.optimalTilingFeatures == 0) {
- vkDestroyImage(m_device->device(), srcImage, NULL);
- printf("%s Image format not supported; skipped.\n", kSkipPrefix);
- return;
- }
-
- err = vkCreateImage(m_device->device(), &image_create_info, NULL, &dstImage);
- ASSERT_VK_SUCCESS(err);
-
- // Allocate memory
- VkMemoryAllocateInfo memAlloc = {};
- memAlloc.sType = VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO;
- memAlloc.pNext = NULL;
- memAlloc.allocationSize = 0;
- memAlloc.memoryTypeIndex = 0;
-
- vkGetImageMemoryRequirements(m_device->device(), srcImage, &memReqs);
- memAlloc.allocationSize = memReqs.size;
- pass = m_device->phy().set_memory_type(memReqs.memoryTypeBits, &memAlloc, 0);
- ASSERT_TRUE(pass);
- err = vkAllocateMemory(m_device->device(), &memAlloc, NULL, &srcMem);
- ASSERT_VK_SUCCESS(err);
-
- vkGetImageMemoryRequirements(m_device->device(), dstImage, &memReqs);
- memAlloc.allocationSize = memReqs.size;
- pass = m_device->phy().set_memory_type(memReqs.memoryTypeBits, &memAlloc, 0);
- ASSERT_TRUE(pass);
- err = vkAllocateMemory(m_device->device(), &memAlloc, NULL, &destMem);
- ASSERT_VK_SUCCESS(err);
-
- err = vkBindImageMemory(m_device->device(), srcImage, srcMem, 0);
- ASSERT_VK_SUCCESS(err);
- err = vkBindImageMemory(m_device->device(), dstImage, destMem, 0);
- ASSERT_VK_SUCCESS(err);
-
- m_commandBuffer->begin();
- VkImageCopy copyRegion;
- copyRegion.srcSubresource.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
- copyRegion.srcSubresource.mipLevel = 0;
- copyRegion.srcSubresource.baseArrayLayer = 0;
- copyRegion.srcSubresource.layerCount = 1;
- copyRegion.srcOffset.x = 0;
- copyRegion.srcOffset.y = 0;
- copyRegion.srcOffset.z = 0;
- copyRegion.dstSubresource.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
- copyRegion.dstSubresource.mipLevel = 0;
- copyRegion.dstSubresource.baseArrayLayer = 0;
- copyRegion.dstSubresource.layerCount = 1;
- copyRegion.dstOffset.x = 0;
- copyRegion.dstOffset.y = 0;
- copyRegion.dstOffset.z = 0;
- copyRegion.extent.width = 1;
- copyRegion.extent.height = 1;
- copyRegion.extent.depth = 1;
- m_commandBuffer->CopyImage(srcImage, VK_IMAGE_LAYOUT_GENERAL, dstImage, VK_IMAGE_LAYOUT_GENERAL, 1, &copyRegion);
- m_commandBuffer->end();
-
- m_errorMonitor->VerifyFound();
-
- vkDestroyImage(m_device->device(), dstImage, NULL);
- vkFreeMemory(m_device->device(), destMem, NULL);
-
- // Copy to multiplane image with mismatched sizes
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdCopyImage-srcImage-00135");
-
- VkImageCreateInfo ci;
- ci.sType = VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO;
- ci.pNext = NULL;
- ci.flags = 0;
- ci.imageType = VK_IMAGE_TYPE_2D;
- ci.format = VK_FORMAT_G8_B8_R8_3PLANE_420_UNORM;
- ci.extent = {32, 32, 1};
- ci.mipLevels = 1;
- ci.arrayLayers = 1;
- ci.samples = VK_SAMPLE_COUNT_1_BIT;
- ci.tiling = VK_IMAGE_TILING_LINEAR;
- ci.usage = VK_IMAGE_USAGE_TRANSFER_DST_BIT;
- ci.sharingMode = VK_SHARING_MODE_EXCLUSIVE;
- ci.queueFamilyIndexCount = 0;
- ci.pQueueFamilyIndices = NULL;
- ci.initialLayout = VK_IMAGE_LAYOUT_UNDEFINED;
-
- VkFormatFeatureFlags features = VK_FORMAT_FEATURE_TRANSFER_DST_BIT;
- bool supported = ImageFormatAndFeaturesSupported(instance(), gpu(), ci, features);
- bool ycbcr = (DeviceExtensionEnabled(VK_KHR_SAMPLER_YCBCR_CONVERSION_EXTENSION_NAME) ||
- (DeviceValidationVersion() >= VK_API_VERSION_1_1));
- if (!supported || !ycbcr) {
- printf("%s Image format not supported; skipped multiplanar copy test.\n", kSkipPrefix);
- vkDestroyImage(m_device->device(), srcImage, NULL);
- vkFreeMemory(m_device->device(), srcMem, NULL);
- return;
- }
-
- VkImageObj mpImage(m_device);
- mpImage.init(&ci);
- ASSERT_TRUE(mpImage.initialized());
- copyRegion.dstSubresource.aspectMask = VK_IMAGE_ASPECT_PLANE_0_BIT;
- vkResetCommandBuffer(m_commandBuffer->handle(), 0);
- m_commandBuffer->begin();
- m_commandBuffer->CopyImage(srcImage, VK_IMAGE_LAYOUT_GENERAL, mpImage.handle(), VK_IMAGE_LAYOUT_GENERAL, 1, &copyRegion);
- m_commandBuffer->end();
-
- m_errorMonitor->VerifyFound();
-
- vkDestroyImage(m_device->device(), srcImage, NULL);
- vkFreeMemory(m_device->device(), srcMem, NULL);
-}
-
-TEST_F(VkLayerTest, CopyImageDepthStencilFormatMismatch) {
- ASSERT_NO_FATAL_FAILURE(Init());
- auto depth_format = FindSupportedDepthStencilFormat(gpu());
- if (!depth_format) {
- printf("%s Couldn't depth stencil image format.\n", kSkipPrefix);
- return;
- }
-
- VkFormatProperties properties;
- vkGetPhysicalDeviceFormatProperties(m_device->phy().handle(), VK_FORMAT_D32_SFLOAT, &properties);
- if (properties.optimalTilingFeatures == 0) {
- printf("%s Image format not supported; skipped.\n", kSkipPrefix);
- return;
- }
-
- VkImageObj srcImage(m_device);
- srcImage.Init(32, 32, 1, VK_FORMAT_D32_SFLOAT, VK_IMAGE_USAGE_TRANSFER_SRC_BIT, VK_IMAGE_TILING_OPTIMAL);
- ASSERT_TRUE(srcImage.initialized());
- VkImageObj dstImage(m_device);
- dstImage.Init(32, 32, 1, depth_format, VK_IMAGE_USAGE_TRANSFER_DST_BIT, VK_IMAGE_TILING_OPTIMAL);
- ASSERT_TRUE(dstImage.initialized());
-
- // Create two images of different types and try to copy between them
-
- m_commandBuffer->begin();
- VkImageCopy copyRegion;
- copyRegion.srcSubresource.aspectMask = VK_IMAGE_ASPECT_DEPTH_BIT;
- copyRegion.srcSubresource.mipLevel = 0;
- copyRegion.srcSubresource.baseArrayLayer = 0;
- copyRegion.srcSubresource.layerCount = 1;
- copyRegion.srcOffset.x = 0;
- copyRegion.srcOffset.y = 0;
- copyRegion.srcOffset.z = 0;
- copyRegion.dstSubresource.aspectMask = VK_IMAGE_ASPECT_DEPTH_BIT;
- copyRegion.dstSubresource.mipLevel = 0;
- copyRegion.dstSubresource.baseArrayLayer = 0;
- copyRegion.dstSubresource.layerCount = 1;
- copyRegion.dstOffset.x = 0;
- copyRegion.dstOffset.y = 0;
- copyRegion.dstOffset.z = 0;
- copyRegion.extent.width = 1;
- copyRegion.extent.height = 1;
- copyRegion.extent.depth = 1;
-
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
- "vkCmdCopyImage called with unmatched source and dest image depth");
- m_commandBuffer->CopyImage(srcImage.handle(), VK_IMAGE_LAYOUT_GENERAL, dstImage.handle(), VK_IMAGE_LAYOUT_GENERAL, 1,
- &copyRegion);
- m_commandBuffer->end();
-
- m_errorMonitor->VerifyFound();
-}
-
-TEST_F(VkLayerTest, CopyImageSampleCountMismatch) {
- TEST_DESCRIPTION("Image copies with sample count mis-matches");
-
- ASSERT_NO_FATAL_FAILURE(Init());
-
- VkImageFormatProperties image_format_properties;
- vkGetPhysicalDeviceImageFormatProperties(gpu(), VK_FORMAT_R8G8B8A8_UNORM, VK_IMAGE_TYPE_2D, VK_IMAGE_TILING_OPTIMAL,
- VK_IMAGE_USAGE_TRANSFER_SRC_BIT | VK_IMAGE_USAGE_TRANSFER_DST_BIT, 0,
- &image_format_properties);
-
- if ((0 == (VK_SAMPLE_COUNT_2_BIT & image_format_properties.sampleCounts)) ||
- (0 == (VK_SAMPLE_COUNT_4_BIT & image_format_properties.sampleCounts))) {
- printf("%s Image multi-sample support not found; skipped.\n", kSkipPrefix);
- return;
- }
-
- VkImageCreateInfo ci;
- ci.sType = VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO;
- ci.pNext = NULL;
- ci.flags = 0;
- ci.imageType = VK_IMAGE_TYPE_2D;
- ci.format = VK_FORMAT_R8G8B8A8_UNORM;
- ci.extent = {128, 128, 1};
- ci.mipLevels = 1;
- ci.arrayLayers = 1;
- ci.samples = VK_SAMPLE_COUNT_1_BIT;
- ci.tiling = VK_IMAGE_TILING_OPTIMAL;
- ci.usage = VK_IMAGE_USAGE_TRANSFER_SRC_BIT | VK_IMAGE_USAGE_TRANSFER_DST_BIT;
- ci.sharingMode = VK_SHARING_MODE_EXCLUSIVE;
- ci.queueFamilyIndexCount = 0;
- ci.pQueueFamilyIndices = NULL;
- ci.initialLayout = VK_IMAGE_LAYOUT_UNDEFINED;
-
- VkImageObj image1(m_device);
- image1.init(&ci);
- ASSERT_TRUE(image1.initialized());
-
- ci.samples = VK_SAMPLE_COUNT_2_BIT;
- VkImageObj image2(m_device);
- image2.init(&ci);
- ASSERT_TRUE(image2.initialized());
-
- ci.samples = VK_SAMPLE_COUNT_4_BIT;
- VkImageObj image4(m_device);
- image4.init(&ci);
- ASSERT_TRUE(image4.initialized());
-
- m_commandBuffer->begin();
-
- VkImageCopy copyRegion;
- copyRegion.srcSubresource.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
- copyRegion.srcSubresource.mipLevel = 0;
- copyRegion.srcSubresource.baseArrayLayer = 0;
- copyRegion.srcSubresource.layerCount = 1;
- copyRegion.srcOffset = {0, 0, 0};
- copyRegion.dstSubresource.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
- copyRegion.dstSubresource.mipLevel = 0;
- copyRegion.dstSubresource.baseArrayLayer = 0;
- copyRegion.dstSubresource.layerCount = 1;
- copyRegion.dstOffset = {0, 0, 0};
- copyRegion.extent = {128, 128, 1};
-
- // Copy a single sample image to/from a multi-sample image
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdCopyImage-srcImage-00136");
- vkCmdCopyImage(m_commandBuffer->handle(), image1.handle(), VK_IMAGE_LAYOUT_GENERAL, image4.handle(), VK_IMAGE_LAYOUT_GENERAL, 1,
- &copyRegion);
- m_errorMonitor->VerifyFound();
-
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdCopyImage-srcImage-00136");
- vkCmdCopyImage(m_commandBuffer->handle(), image2.handle(), VK_IMAGE_LAYOUT_GENERAL, image1.handle(), VK_IMAGE_LAYOUT_GENERAL, 1,
- &copyRegion);
- m_errorMonitor->VerifyFound();
-
- // Copy between multi-sample images with different sample counts
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdCopyImage-srcImage-00136");
- vkCmdCopyImage(m_commandBuffer->handle(), image2.handle(), VK_IMAGE_LAYOUT_GENERAL, image4.handle(), VK_IMAGE_LAYOUT_GENERAL, 1,
- &copyRegion);
- m_errorMonitor->VerifyFound();
-
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdCopyImage-srcImage-00136");
- vkCmdCopyImage(m_commandBuffer->handle(), image4.handle(), VK_IMAGE_LAYOUT_GENERAL, image2.handle(), VK_IMAGE_LAYOUT_GENERAL, 1,
- &copyRegion);
- m_errorMonitor->VerifyFound();
-
- m_commandBuffer->end();
-}
-
-TEST_F(VkLayerTest, CopyImageAspectMismatch) {
- TEST_DESCRIPTION("Image copies with aspect mask errors");
- SetTargetApiVersion(VK_API_VERSION_1_1);
- ASSERT_NO_FATAL_FAILURE(Init());
- auto ds_format = FindSupportedDepthStencilFormat(gpu());
- if (!ds_format) {
- printf("%s Couldn't find depth stencil format.\n", kSkipPrefix);
- return;
- }
-
- VkFormatProperties properties;
- vkGetPhysicalDeviceFormatProperties(m_device->phy().handle(), VK_FORMAT_D32_SFLOAT, &properties);
- if (properties.optimalTilingFeatures == 0) {
- printf("%s Image format VK_FORMAT_D32_SFLOAT not supported; skipped.\n", kSkipPrefix);
- return;
- }
- VkImageObj color_image(m_device), ds_image(m_device), depth_image(m_device);
- color_image.Init(128, 128, 1, VK_FORMAT_R32_SFLOAT, VK_IMAGE_USAGE_TRANSFER_SRC_BIT | VK_IMAGE_USAGE_TRANSFER_DST_BIT);
- depth_image.Init(128, 128, 1, VK_FORMAT_D32_SFLOAT, VK_IMAGE_USAGE_TRANSFER_SRC_BIT | VK_IMAGE_USAGE_TRANSFER_DST_BIT,
- VK_IMAGE_TILING_OPTIMAL, 0);
- ds_image.Init(128, 128, 1, ds_format, VK_IMAGE_USAGE_TRANSFER_SRC_BIT | VK_IMAGE_USAGE_TRANSFER_DST_BIT,
- VK_IMAGE_TILING_OPTIMAL, 0);
- ASSERT_TRUE(color_image.initialized());
- ASSERT_TRUE(depth_image.initialized());
- ASSERT_TRUE(ds_image.initialized());
-
- VkImageCopy copyRegion;
- copyRegion.srcSubresource.aspectMask = VK_IMAGE_ASPECT_DEPTH_BIT;
- copyRegion.srcSubresource.mipLevel = 0;
- copyRegion.srcSubresource.baseArrayLayer = 0;
- copyRegion.srcSubresource.layerCount = 1;
- copyRegion.srcOffset = {0, 0, 0};
- copyRegion.dstSubresource.aspectMask = VK_IMAGE_ASPECT_DEPTH_BIT;
- copyRegion.dstSubresource.mipLevel = 0;
- copyRegion.dstSubresource.baseArrayLayer = 0;
- copyRegion.dstSubresource.layerCount = 1;
- copyRegion.dstOffset = {64, 0, 0};
- copyRegion.extent = {64, 128, 1};
-
- // Submitting command before command buffer is in recording state
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
- "You must call vkBeginCommandBuffer"); // "VUID-vkCmdCopyImage-commandBuffer-recording");
- vkCmdCopyImage(m_commandBuffer->handle(), depth_image.handle(), VK_IMAGE_LAYOUT_GENERAL, depth_image.handle(),
- VK_IMAGE_LAYOUT_GENERAL, 1, &copyRegion);
- m_errorMonitor->VerifyFound();
-
- m_commandBuffer->begin();
-
- // Src and dest aspect masks don't match
- copyRegion.dstSubresource.aspectMask = VK_IMAGE_ASPECT_STENCIL_BIT;
- bool ycbcr = (DeviceExtensionEnabled(VK_KHR_SAMPLER_YCBCR_CONVERSION_EXTENSION_NAME) ||
- (DeviceValidationVersion() >= VK_API_VERSION_1_1));
- std::string vuid = (ycbcr ? "VUID-VkImageCopy-srcImage-01551" : "VUID-VkImageCopy-aspectMask-00137");
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, vuid);
- vkCmdCopyImage(m_commandBuffer->handle(), ds_image.handle(), VK_IMAGE_LAYOUT_GENERAL, ds_image.handle(),
- VK_IMAGE_LAYOUT_GENERAL, 1, &copyRegion);
- m_errorMonitor->VerifyFound();
- copyRegion.dstSubresource.aspectMask = VK_IMAGE_ASPECT_DEPTH_BIT;
-
- // Illegal combinations of aspect bits
- copyRegion.srcSubresource.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT | VK_IMAGE_ASPECT_DEPTH_BIT; // color must be alone
- copyRegion.dstSubresource.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkImageSubresourceLayers-aspectMask-00167");
- // These aspect/format mismatches are redundant but unavoidable here
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkImageCopy-aspectMask-00142");
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, vuid);
- vkCmdCopyImage(m_commandBuffer->handle(), color_image.handle(), VK_IMAGE_LAYOUT_GENERAL, color_image.handle(),
- VK_IMAGE_LAYOUT_GENERAL, 1, &copyRegion);
- m_errorMonitor->VerifyFound();
- // same test for dstSubresource
- copyRegion.srcSubresource.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
- copyRegion.dstSubresource.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT | VK_IMAGE_ASPECT_DEPTH_BIT; // color must be alone
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkImageSubresourceLayers-aspectMask-00167");
- // These aspect/format mismatches are redundant but unavoidable here
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkImageCopy-aspectMask-00143");
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, vuid);
- vkCmdCopyImage(m_commandBuffer->handle(), color_image.handle(), VK_IMAGE_LAYOUT_GENERAL, color_image.handle(),
- VK_IMAGE_LAYOUT_GENERAL, 1, &copyRegion);
- m_errorMonitor->VerifyFound();
-
- // Metadata aspect is illegal
- copyRegion.srcSubresource.aspectMask = VK_IMAGE_ASPECT_METADATA_BIT;
- copyRegion.dstSubresource.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkImageSubresourceLayers-aspectMask-00168");
- // These aspect/format mismatches are redundant but unavoidable here
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, vuid);
- vkCmdCopyImage(m_commandBuffer->handle(), color_image.handle(), VK_IMAGE_LAYOUT_GENERAL, color_image.handle(),
- VK_IMAGE_LAYOUT_GENERAL, 1, &copyRegion);
- m_errorMonitor->VerifyFound();
- // same test for dstSubresource
- copyRegion.srcSubresource.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
- copyRegion.dstSubresource.aspectMask = VK_IMAGE_ASPECT_METADATA_BIT;
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkImageSubresourceLayers-aspectMask-00168");
- // These aspect/format mismatches are redundant but unavoidable here
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, vuid);
- vkCmdCopyImage(m_commandBuffer->handle(), color_image.handle(), VK_IMAGE_LAYOUT_GENERAL, color_image.handle(),
- VK_IMAGE_LAYOUT_GENERAL, 1, &copyRegion);
- m_errorMonitor->VerifyFound();
-
- copyRegion.srcSubresource.aspectMask = VK_IMAGE_ASPECT_DEPTH_BIT;
- copyRegion.dstSubresource.aspectMask = VK_IMAGE_ASPECT_DEPTH_BIT;
-
- // Aspect mask doesn't match source image format
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkImageCopy-aspectMask-00142");
- // Again redundant but unavoidable
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "unmatched source and dest image depth/stencil formats");
- vkCmdCopyImage(m_commandBuffer->handle(), color_image.handle(), VK_IMAGE_LAYOUT_GENERAL, depth_image.handle(),
- VK_IMAGE_LAYOUT_GENERAL, 1, &copyRegion);
- m_errorMonitor->VerifyFound();
-
- // Aspect mask doesn't match dest image format
- copyRegion.srcSubresource.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
- copyRegion.dstSubresource.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkImageCopy-aspectMask-00143");
- // Again redundant but unavoidable
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "unmatched source and dest image depth/stencil formats");
- vkCmdCopyImage(m_commandBuffer->handle(), color_image.handle(), VK_IMAGE_LAYOUT_GENERAL, depth_image.handle(),
- VK_IMAGE_LAYOUT_GENERAL, 1, &copyRegion);
- m_errorMonitor->VerifyFound();
-
- m_commandBuffer->end();
-}
-
-TEST_F(VkLayerTest, ResolveImageLowSampleCount) {
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
- "vkCmdResolveImage called with source sample count less than 2.");
-
- ASSERT_NO_FATAL_FAILURE(Init());
-
- // Create two images of sample count 1 and try to Resolve between them
-
- VkImageCreateInfo image_create_info = {};
- image_create_info.sType = VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO;
- image_create_info.pNext = NULL;
- image_create_info.imageType = VK_IMAGE_TYPE_2D;
- image_create_info.format = VK_FORMAT_B8G8R8A8_UNORM;
- image_create_info.extent.width = 32;
- image_create_info.extent.height = 1;
- image_create_info.extent.depth = 1;
- image_create_info.mipLevels = 1;
- image_create_info.arrayLayers = 1;
- image_create_info.samples = VK_SAMPLE_COUNT_1_BIT;
- image_create_info.tiling = VK_IMAGE_TILING_OPTIMAL;
- image_create_info.usage = VK_IMAGE_USAGE_TRANSFER_SRC_BIT | VK_IMAGE_USAGE_TRANSFER_DST_BIT;
- image_create_info.flags = 0;
-
- VkImageObj srcImage(m_device);
- srcImage.init(&image_create_info);
- ASSERT_TRUE(srcImage.initialized());
-
- VkImageObj dstImage(m_device);
- dstImage.init(&image_create_info);
- ASSERT_TRUE(dstImage.initialized());
-
- m_commandBuffer->begin();
- VkImageResolve resolveRegion;
- resolveRegion.srcSubresource.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
- resolveRegion.srcSubresource.mipLevel = 0;
- resolveRegion.srcSubresource.baseArrayLayer = 0;
- resolveRegion.srcSubresource.layerCount = 1;
- resolveRegion.srcOffset.x = 0;
- resolveRegion.srcOffset.y = 0;
- resolveRegion.srcOffset.z = 0;
- resolveRegion.dstSubresource.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
- resolveRegion.dstSubresource.mipLevel = 0;
- resolveRegion.dstSubresource.baseArrayLayer = 0;
- resolveRegion.dstSubresource.layerCount = 1;
- resolveRegion.dstOffset.x = 0;
- resolveRegion.dstOffset.y = 0;
- resolveRegion.dstOffset.z = 0;
- resolveRegion.extent.width = 1;
- resolveRegion.extent.height = 1;
- resolveRegion.extent.depth = 1;
- m_commandBuffer->ResolveImage(srcImage.handle(), VK_IMAGE_LAYOUT_GENERAL, dstImage.handle(), VK_IMAGE_LAYOUT_GENERAL, 1,
- &resolveRegion);
- m_commandBuffer->end();
-
- m_errorMonitor->VerifyFound();
-}
-
-TEST_F(VkLayerTest, ResolveImageHighSampleCount) {
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
- "vkCmdResolveImage called with dest sample count greater than 1.");
-
- ASSERT_NO_FATAL_FAILURE(Init());
-
- // Create two images of sample count 4 and try to Resolve between them
-
- VkImageCreateInfo image_create_info = {};
- image_create_info.sType = VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO;
- image_create_info.pNext = NULL;
- image_create_info.imageType = VK_IMAGE_TYPE_2D;
- image_create_info.format = VK_FORMAT_B8G8R8A8_UNORM;
- image_create_info.extent.width = 32;
- image_create_info.extent.height = 1;
- image_create_info.extent.depth = 1;
- image_create_info.mipLevels = 1;
- image_create_info.arrayLayers = 1;
- image_create_info.samples = VK_SAMPLE_COUNT_4_BIT;
- image_create_info.tiling = VK_IMAGE_TILING_OPTIMAL;
- // Note: Some implementations expect color attachment usage for any
- // multisample surface
- image_create_info.usage =
- VK_IMAGE_USAGE_TRANSFER_SRC_BIT | VK_IMAGE_USAGE_TRANSFER_DST_BIT | VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT;
- image_create_info.flags = 0;
-
- VkImageObj srcImage(m_device);
- srcImage.init(&image_create_info);
- ASSERT_TRUE(srcImage.initialized());
-
- VkImageObj dstImage(m_device);
- dstImage.init(&image_create_info);
- ASSERT_TRUE(dstImage.initialized());
-
- m_commandBuffer->begin();
- // Need memory barrier to VK_IMAGE_LAYOUT_GENERAL for source and dest?
- // VK_IMAGE_LAYOUT_UNDEFINED = 0,
- // VK_IMAGE_LAYOUT_GENERAL = 1,
- VkImageResolve resolveRegion;
- resolveRegion.srcSubresource.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
- resolveRegion.srcSubresource.mipLevel = 0;
- resolveRegion.srcSubresource.baseArrayLayer = 0;
- resolveRegion.srcSubresource.layerCount = 1;
- resolveRegion.srcOffset.x = 0;
- resolveRegion.srcOffset.y = 0;
- resolveRegion.srcOffset.z = 0;
- resolveRegion.dstSubresource.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
- resolveRegion.dstSubresource.mipLevel = 0;
- resolveRegion.dstSubresource.baseArrayLayer = 0;
- resolveRegion.dstSubresource.layerCount = 1;
- resolveRegion.dstOffset.x = 0;
- resolveRegion.dstOffset.y = 0;
- resolveRegion.dstOffset.z = 0;
- resolveRegion.extent.width = 1;
- resolveRegion.extent.height = 1;
- resolveRegion.extent.depth = 1;
- m_commandBuffer->ResolveImage(srcImage.handle(), VK_IMAGE_LAYOUT_GENERAL, dstImage.handle(), VK_IMAGE_LAYOUT_GENERAL, 1,
- &resolveRegion);
- m_commandBuffer->end();
-
- m_errorMonitor->VerifyFound();
-}
-
-TEST_F(VkLayerTest, ResolveImageFormatMismatch) {
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_WARNING_BIT_EXT,
- "vkCmdResolveImage called with unmatched source and dest formats.");
-
- ASSERT_NO_FATAL_FAILURE(Init());
-
- // Create two images of different types and try to copy between them
- VkImageObj srcImage(m_device);
- VkImageObj dstImage(m_device);
-
- VkImageCreateInfo image_create_info = {};
- image_create_info.sType = VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO;
- image_create_info.pNext = NULL;
- image_create_info.imageType = VK_IMAGE_TYPE_2D;
- image_create_info.format = VK_FORMAT_B8G8R8A8_UNORM;
- image_create_info.extent.width = 32;
- image_create_info.extent.height = 1;
- image_create_info.extent.depth = 1;
- image_create_info.mipLevels = 1;
- image_create_info.arrayLayers = 1;
- image_create_info.samples = VK_SAMPLE_COUNT_2_BIT;
- image_create_info.tiling = VK_IMAGE_TILING_OPTIMAL;
- // Note: Some implementations expect color attachment usage for any
- // multisample surface
- image_create_info.usage = VK_IMAGE_USAGE_TRANSFER_SRC_BIT | VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT;
- image_create_info.flags = 0;
- srcImage.init(&image_create_info);
-
- // Set format to something other than source image
- image_create_info.format = VK_FORMAT_R32_SFLOAT;
- // Note: Some implementations expect color attachment usage for any
- // multisample surface
- image_create_info.usage = VK_IMAGE_USAGE_TRANSFER_DST_BIT | VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT;
- image_create_info.samples = VK_SAMPLE_COUNT_1_BIT;
- dstImage.init(&image_create_info);
-
- m_commandBuffer->begin();
- // Need memory barrier to VK_IMAGE_LAYOUT_GENERAL for source and dest?
- // VK_IMAGE_LAYOUT_UNDEFINED = 0,
- // VK_IMAGE_LAYOUT_GENERAL = 1,
- VkImageResolve resolveRegion;
- resolveRegion.srcSubresource.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
- resolveRegion.srcSubresource.mipLevel = 0;
- resolveRegion.srcSubresource.baseArrayLayer = 0;
- resolveRegion.srcSubresource.layerCount = 1;
- resolveRegion.srcOffset.x = 0;
- resolveRegion.srcOffset.y = 0;
- resolveRegion.srcOffset.z = 0;
- resolveRegion.dstSubresource.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
- resolveRegion.dstSubresource.mipLevel = 0;
- resolveRegion.dstSubresource.baseArrayLayer = 0;
- resolveRegion.dstSubresource.layerCount = 1;
- resolveRegion.dstOffset.x = 0;
- resolveRegion.dstOffset.y = 0;
- resolveRegion.dstOffset.z = 0;
- resolveRegion.extent.width = 1;
- resolveRegion.extent.height = 1;
- resolveRegion.extent.depth = 1;
- m_commandBuffer->ResolveImage(srcImage.handle(), VK_IMAGE_LAYOUT_GENERAL, dstImage.handle(), VK_IMAGE_LAYOUT_GENERAL, 1,
- &resolveRegion);
- m_commandBuffer->end();
-
- m_errorMonitor->VerifyFound();
-}
-
-TEST_F(VkLayerTest, ResolveImageTypeMismatch) {
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_WARNING_BIT_EXT,
- "vkCmdResolveImage called with unmatched source and dest image types.");
-
- ASSERT_NO_FATAL_FAILURE(Init());
-
- // Create two images of different types and try to copy between them
- VkImageObj srcImage(m_device);
- VkImageObj dstImage(m_device);
-
- VkImageCreateInfo image_create_info = {};
- image_create_info.sType = VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO;
- image_create_info.pNext = NULL;
- image_create_info.imageType = VK_IMAGE_TYPE_2D;
- image_create_info.format = VK_FORMAT_B8G8R8A8_UNORM;
- image_create_info.extent.width = 32;
- image_create_info.extent.height = 1;
- image_create_info.extent.depth = 1;
- image_create_info.mipLevels = 1;
- image_create_info.arrayLayers = 1;
- image_create_info.samples = VK_SAMPLE_COUNT_2_BIT;
- image_create_info.tiling = VK_IMAGE_TILING_OPTIMAL;
- // Note: Some implementations expect color attachment usage for any
- // multisample surface
- image_create_info.usage = VK_IMAGE_USAGE_TRANSFER_SRC_BIT | VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT;
- image_create_info.flags = 0;
- srcImage.init(&image_create_info);
-
- image_create_info.imageType = VK_IMAGE_TYPE_1D;
- // Note: Some implementations expect color attachment usage for any
- // multisample surface
- image_create_info.usage = VK_IMAGE_USAGE_TRANSFER_DST_BIT | VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT;
- image_create_info.samples = VK_SAMPLE_COUNT_1_BIT;
- dstImage.init(&image_create_info);
-
- m_commandBuffer->begin();
- // Need memory barrier to VK_IMAGE_LAYOUT_GENERAL for source and dest?
- // VK_IMAGE_LAYOUT_UNDEFINED = 0,
- // VK_IMAGE_LAYOUT_GENERAL = 1,
- VkImageResolve resolveRegion;
- resolveRegion.srcSubresource.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
- resolveRegion.srcSubresource.mipLevel = 0;
- resolveRegion.srcSubresource.baseArrayLayer = 0;
- resolveRegion.srcSubresource.layerCount = 1;
- resolveRegion.srcOffset.x = 0;
- resolveRegion.srcOffset.y = 0;
- resolveRegion.srcOffset.z = 0;
- resolveRegion.dstSubresource.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
- resolveRegion.dstSubresource.mipLevel = 0;
- resolveRegion.dstSubresource.baseArrayLayer = 0;
- resolveRegion.dstSubresource.layerCount = 1;
- resolveRegion.dstOffset.x = 0;
- resolveRegion.dstOffset.y = 0;
- resolveRegion.dstOffset.z = 0;
- resolveRegion.extent.width = 1;
- resolveRegion.extent.height = 1;
- resolveRegion.extent.depth = 1;
- m_commandBuffer->ResolveImage(srcImage.handle(), VK_IMAGE_LAYOUT_GENERAL, dstImage.handle(), VK_IMAGE_LAYOUT_GENERAL, 1,
- &resolveRegion);
- m_commandBuffer->end();
-
- m_errorMonitor->VerifyFound();
-}
-
-TEST_F(VkLayerTest, ResolveImageLayoutMismatch) {
- ASSERT_NO_FATAL_FAILURE(Init());
-
- // Create two images of different types and try to copy between them
- VkImageObj srcImage(m_device);
- VkImageObj dstImage(m_device);
-
- VkImageCreateInfo image_create_info = {};
- image_create_info.sType = VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO;
- image_create_info.pNext = NULL;
- image_create_info.imageType = VK_IMAGE_TYPE_2D;
- image_create_info.format = VK_FORMAT_B8G8R8A8_UNORM;
- image_create_info.extent.width = 32;
- image_create_info.extent.height = 32;
- image_create_info.extent.depth = 1;
- image_create_info.mipLevels = 1;
- image_create_info.arrayLayers = 1;
- image_create_info.samples = VK_SAMPLE_COUNT_2_BIT;
- image_create_info.tiling = VK_IMAGE_TILING_OPTIMAL;
- image_create_info.usage =
- VK_IMAGE_USAGE_TRANSFER_DST_BIT | VK_IMAGE_USAGE_TRANSFER_SRC_BIT | VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT;
- // Note: Some implementations expect color attachment usage for any
- // multisample surface
- image_create_info.flags = 0;
- srcImage.init(&image_create_info);
- ASSERT_TRUE(srcImage.initialized());
-
- // Note: Some implementations expect color attachment usage for any
- // multisample surface
- image_create_info.samples = VK_SAMPLE_COUNT_1_BIT;
- dstImage.init(&image_create_info);
- ASSERT_TRUE(dstImage.initialized());
-
- m_commandBuffer->begin();
- // source image must have valid contents before resolve
- VkClearColorValue clear_color = {{0, 0, 0, 0}};
- VkImageSubresourceRange subresource = {};
- subresource.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
- subresource.layerCount = 1;
- subresource.levelCount = 1;
- srcImage.SetLayout(m_commandBuffer, VK_IMAGE_ASPECT_COLOR_BIT, VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL);
- m_commandBuffer->ClearColorImage(srcImage.image(), VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, &clear_color, 1, &subresource);
- srcImage.SetLayout(m_commandBuffer, VK_IMAGE_ASPECT_COLOR_BIT, VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL);
- dstImage.SetLayout(m_commandBuffer, VK_IMAGE_ASPECT_COLOR_BIT, VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL);
-
- VkImageResolve resolveRegion;
- resolveRegion.srcSubresource.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
- resolveRegion.srcSubresource.mipLevel = 0;
- resolveRegion.srcSubresource.baseArrayLayer = 0;
- resolveRegion.srcSubresource.layerCount = 1;
- resolveRegion.srcOffset.x = 0;
- resolveRegion.srcOffset.y = 0;
- resolveRegion.srcOffset.z = 0;
- resolveRegion.dstSubresource.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
- resolveRegion.dstSubresource.mipLevel = 0;
- resolveRegion.dstSubresource.baseArrayLayer = 0;
- resolveRegion.dstSubresource.layerCount = 1;
- resolveRegion.dstOffset.x = 0;
- resolveRegion.dstOffset.y = 0;
- resolveRegion.dstOffset.z = 0;
- resolveRegion.extent.width = 1;
- resolveRegion.extent.height = 1;
- resolveRegion.extent.depth = 1;
- // source image layout mismatch
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdResolveImage-srcImageLayout-00260");
- m_commandBuffer->ResolveImage(srcImage.image(), VK_IMAGE_LAYOUT_GENERAL, dstImage.image(), VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL,
- 1, &resolveRegion);
- m_errorMonitor->VerifyFound();
- // dst image layout mismatch
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdResolveImage-dstImageLayout-00262");
- m_commandBuffer->ResolveImage(srcImage.image(), VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL, dstImage.image(), VK_IMAGE_LAYOUT_GENERAL,
- 1, &resolveRegion);
- m_errorMonitor->VerifyFound();
- m_commandBuffer->end();
-}
-
-TEST_F(VkLayerTest, ResolveInvalidSubresource) {
- ASSERT_NO_FATAL_FAILURE(Init());
-
- // Create two images of different types and try to copy between them
- VkImageObj srcImage(m_device);
- VkImageObj dstImage(m_device);
-
- VkImageCreateInfo image_create_info = {};
- image_create_info.sType = VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO;
- image_create_info.pNext = NULL;
- image_create_info.imageType = VK_IMAGE_TYPE_2D;
- image_create_info.format = VK_FORMAT_B8G8R8A8_UNORM;
- image_create_info.extent.width = 32;
- image_create_info.extent.height = 32;
- image_create_info.extent.depth = 1;
- image_create_info.mipLevels = 1;
- image_create_info.arrayLayers = 1;
- image_create_info.samples = VK_SAMPLE_COUNT_2_BIT;
- image_create_info.tiling = VK_IMAGE_TILING_OPTIMAL;
- image_create_info.usage =
- VK_IMAGE_USAGE_TRANSFER_DST_BIT | VK_IMAGE_USAGE_TRANSFER_SRC_BIT | VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT;
- // Note: Some implementations expect color attachment usage for any
- // multisample surface
- image_create_info.flags = 0;
- srcImage.init(&image_create_info);
- ASSERT_TRUE(srcImage.initialized());
-
- // Note: Some implementations expect color attachment usage for any
- // multisample surface
- image_create_info.samples = VK_SAMPLE_COUNT_1_BIT;
- dstImage.init(&image_create_info);
- ASSERT_TRUE(dstImage.initialized());
-
- m_commandBuffer->begin();
- // source image must have valid contents before resolve
- VkClearColorValue clear_color = {{0, 0, 0, 0}};
- VkImageSubresourceRange subresource = {};
- subresource.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
- subresource.layerCount = 1;
- subresource.levelCount = 1;
- srcImage.SetLayout(m_commandBuffer, VK_IMAGE_ASPECT_COLOR_BIT, VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL);
- m_commandBuffer->ClearColorImage(srcImage.image(), VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, &clear_color, 1, &subresource);
- srcImage.SetLayout(m_commandBuffer, VK_IMAGE_ASPECT_COLOR_BIT, VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL);
- dstImage.SetLayout(m_commandBuffer, VK_IMAGE_ASPECT_COLOR_BIT, VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL);
-
- VkImageResolve resolveRegion;
- resolveRegion.srcSubresource.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
- resolveRegion.srcSubresource.mipLevel = 0;
- resolveRegion.srcSubresource.baseArrayLayer = 0;
- resolveRegion.srcSubresource.layerCount = 1;
- resolveRegion.srcOffset.x = 0;
- resolveRegion.srcOffset.y = 0;
- resolveRegion.srcOffset.z = 0;
- resolveRegion.dstSubresource.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
- resolveRegion.dstSubresource.mipLevel = 0;
- resolveRegion.dstSubresource.baseArrayLayer = 0;
- resolveRegion.dstSubresource.layerCount = 1;
- resolveRegion.dstOffset.x = 0;
- resolveRegion.dstOffset.y = 0;
- resolveRegion.dstOffset.z = 0;
- resolveRegion.extent.width = 1;
- resolveRegion.extent.height = 1;
- resolveRegion.extent.depth = 1;
- // invalid source mip level
- resolveRegion.srcSubresource.mipLevel = image_create_info.mipLevels;
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdResolveImage-srcSubresource-01709");
- m_commandBuffer->ResolveImage(srcImage.image(), VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL, dstImage.image(),
- VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, 1, &resolveRegion);
- m_errorMonitor->VerifyFound();
- resolveRegion.srcSubresource.mipLevel = 0;
- // invalid dest mip level
- resolveRegion.dstSubresource.mipLevel = image_create_info.mipLevels;
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdResolveImage-dstSubresource-01710");
- m_commandBuffer->ResolveImage(srcImage.image(), VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL, dstImage.image(),
- VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, 1, &resolveRegion);
- m_errorMonitor->VerifyFound();
- resolveRegion.dstSubresource.mipLevel = 0;
- // invalid source array layer range
- resolveRegion.srcSubresource.baseArrayLayer = image_create_info.arrayLayers;
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdResolveImage-srcSubresource-01711");
- m_commandBuffer->ResolveImage(srcImage.image(), VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL, dstImage.image(),
- VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, 1, &resolveRegion);
- m_errorMonitor->VerifyFound();
- resolveRegion.srcSubresource.baseArrayLayer = 0;
- // invalid dest array layer range
- resolveRegion.dstSubresource.baseArrayLayer = image_create_info.arrayLayers;
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdResolveImage-dstSubresource-01712");
- m_commandBuffer->ResolveImage(srcImage.image(), VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL, dstImage.image(),
- VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, 1, &resolveRegion);
- m_errorMonitor->VerifyFound();
- resolveRegion.dstSubresource.baseArrayLayer = 0;
-
- m_commandBuffer->end();
-}
-
-TEST_F(VkLayerTest, ClearImageErrors) {
- TEST_DESCRIPTION("Call ClearColorImage w/ a depth|stencil image and ClearDepthStencilImage with a color image.");
-
- ASSERT_NO_FATAL_FAILURE(Init());
- ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
-
- m_commandBuffer->begin();
-
- // Color image
- VkClearColorValue clear_color;
- memset(clear_color.uint32, 0, sizeof(uint32_t) * 4);
- const VkFormat color_format = VK_FORMAT_B8G8R8A8_UNORM;
- const int32_t img_width = 32;
- const int32_t img_height = 32;
- VkImageCreateInfo image_create_info = {};
- image_create_info.sType = VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO;
- image_create_info.pNext = NULL;
- image_create_info.imageType = VK_IMAGE_TYPE_2D;
- image_create_info.format = color_format;
- image_create_info.extent.width = img_width;
- image_create_info.extent.height = img_height;
- image_create_info.extent.depth = 1;
- image_create_info.mipLevels = 1;
- image_create_info.arrayLayers = 1;
- image_create_info.samples = VK_SAMPLE_COUNT_1_BIT;
- image_create_info.tiling = VK_IMAGE_TILING_OPTIMAL;
-
- image_create_info.usage = VK_IMAGE_USAGE_SAMPLED_BIT;
- vk_testing::Image color_image_no_transfer;
- color_image_no_transfer.init(*m_device, image_create_info);
-
- image_create_info.usage = VK_IMAGE_USAGE_SAMPLED_BIT | VK_IMAGE_USAGE_TRANSFER_DST_BIT;
- vk_testing::Image color_image;
- color_image.init(*m_device, image_create_info);
-
- const VkImageSubresourceRange color_range = vk_testing::Image::subresource_range(image_create_info, VK_IMAGE_ASPECT_COLOR_BIT);
-
- // Depth/Stencil image
- VkClearDepthStencilValue clear_value = {0};
- VkImageCreateInfo ds_image_create_info = vk_testing::Image::create_info();
- ds_image_create_info.imageType = VK_IMAGE_TYPE_2D;
- ds_image_create_info.format = VK_FORMAT_D16_UNORM;
- ds_image_create_info.extent.width = 64;
- ds_image_create_info.extent.height = 64;
- ds_image_create_info.tiling = VK_IMAGE_TILING_OPTIMAL;
- ds_image_create_info.usage = VK_IMAGE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT | VK_IMAGE_USAGE_TRANSFER_DST_BIT;
-
- vk_testing::Image ds_image;
- ds_image.init(*m_device, ds_image_create_info);
-
- const VkImageSubresourceRange ds_range = vk_testing::Image::subresource_range(ds_image_create_info, VK_IMAGE_ASPECT_DEPTH_BIT);
-
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "vkCmdClearColorImage called with depth/stencil image.");
-
- vkCmdClearColorImage(m_commandBuffer->handle(), ds_image.handle(), VK_IMAGE_LAYOUT_GENERAL, &clear_color, 1, &color_range);
-
- m_errorMonitor->VerifyFound();
-
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
- "vkCmdClearColorImage called with image created without VK_IMAGE_USAGE_TRANSFER_DST_BIT");
-
- vkCmdClearColorImage(m_commandBuffer->handle(), color_image_no_transfer.handle(), VK_IMAGE_LAYOUT_GENERAL, &clear_color, 1,
- &color_range);
-
- m_errorMonitor->VerifyFound();
-
- // Call CmdClearDepthStencilImage with color image
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
- "vkCmdClearDepthStencilImage called without a depth/stencil image.");
-
- vkCmdClearDepthStencilImage(m_commandBuffer->handle(), color_image.handle(), VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, &clear_value,
- 1, &ds_range);
-
- m_errorMonitor->VerifyFound();
-}
-
-TEST_F(VkLayerTest, CommandQueueFlags) {
- TEST_DESCRIPTION(
- "Allocate a command buffer on a queue that does not support graphics and try to issue a graphics-only command");
-
- ASSERT_NO_FATAL_FAILURE(Init());
-
- uint32_t queueFamilyIndex = m_device->QueueFamilyWithoutCapabilities(VK_QUEUE_GRAPHICS_BIT);
- if (queueFamilyIndex == UINT32_MAX) {
- printf("%s Non-graphics queue family not found; skipped.\n", kSkipPrefix);
- return;
- } else {
- // Create command pool on a non-graphics queue
- VkCommandPoolObj command_pool(m_device, queueFamilyIndex);
-
- // Setup command buffer on pool
- VkCommandBufferObj command_buffer(m_device, &command_pool);
- command_buffer.begin();
-
- // Issue a graphics only command
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdSetViewport-commandBuffer-cmdpool");
- VkViewport viewport = {0, 0, 16, 16, 0, 1};
- command_buffer.SetViewport(0, 1, &viewport);
- m_errorMonitor->VerifyFound();
- }
-}
-
-TEST_F(VkLayerTest, ExecuteUnrecordedSecondaryCB) {
- TEST_DESCRIPTION("Attempt vkCmdExecuteCommands with a CB in the initial state");
- ASSERT_NO_FATAL_FAILURE(Init());
- VkCommandBufferObj secondary(m_device, m_commandPool, VK_COMMAND_BUFFER_LEVEL_SECONDARY);
- // never record secondary
-
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdExecuteCommands-pCommandBuffers-00089");
- m_commandBuffer->begin();
- vkCmdExecuteCommands(m_commandBuffer->handle(), 1, &secondary.handle());
- m_errorMonitor->VerifyFound();
- m_commandBuffer->end();
-}
-
-TEST_F(VkLayerTest, ExecuteSecondaryCBWithLayoutMismatch) {
- TEST_DESCRIPTION("Attempt vkCmdExecuteCommands with a CB with incorrect initial layout.");
-
- ASSERT_NO_FATAL_FAILURE(InitFramework(myDbgFunc, m_errorMonitor));
- ASSERT_NO_FATAL_FAILURE(InitState(nullptr, nullptr, VK_COMMAND_POOL_CREATE_RESET_COMMAND_BUFFER_BIT));
-
- VkImageCreateInfo image_create_info = {};
- image_create_info.sType = VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO;
- image_create_info.pNext = NULL;
- image_create_info.imageType = VK_IMAGE_TYPE_2D;
- image_create_info.format = VK_FORMAT_B8G8R8A8_UNORM;
- image_create_info.extent.width = 32;
- image_create_info.extent.height = 1;
- image_create_info.extent.depth = 1;
- image_create_info.mipLevels = 1;
- image_create_info.arrayLayers = 1;
- image_create_info.samples = VK_SAMPLE_COUNT_1_BIT;
- image_create_info.tiling = VK_IMAGE_TILING_OPTIMAL;
- image_create_info.usage = VK_IMAGE_USAGE_TRANSFER_SRC_BIT | VK_IMAGE_USAGE_TRANSFER_DST_BIT;
- image_create_info.flags = 0;
-
- VkImageSubresource image_sub = VkImageObj::subresource(VK_IMAGE_ASPECT_COLOR_BIT, 0, 0);
- VkImageSubresourceRange image_sub_range = VkImageObj::subresource_range(image_sub);
-
- VkImageObj image(m_device);
- image.init(&image_create_info);
- ASSERT_TRUE(image.initialized());
- VkImageMemoryBarrier image_barrier =
- image.image_memory_barrier(0, 0, VK_IMAGE_LAYOUT_UNDEFINED, VK_IMAGE_LAYOUT_GENERAL, image_sub_range);
-
- auto pipeline = [&image_barrier](const VkCommandBufferObj &cb, VkImageLayout old_layout, VkImageLayout new_layout) {
- image_barrier.oldLayout = old_layout;
- image_barrier.newLayout = new_layout;
- vkCmdPipelineBarrier(cb.handle(), VK_PIPELINE_STAGE_ALL_COMMANDS_BIT, VK_PIPELINE_STAGE_ALL_COMMANDS_BIT, 0, 0, nullptr, 0,
- nullptr, 1, &image_barrier);
- };
-
- // Validate that mismatched use of image layout in secondary command buffer is caught at record time
- VkCommandBufferObj secondary(m_device, m_commandPool, VK_COMMAND_BUFFER_LEVEL_SECONDARY);
- secondary.begin();
- pipeline(secondary, VK_IMAGE_LAYOUT_GENERAL, VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL);
- secondary.end();
-
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "UNASSIGNED-vkCmdExecuteCommands-commandBuffer-00001");
- m_commandBuffer->begin();
- pipeline(*m_commandBuffer, VK_IMAGE_LAYOUT_GENERAL, VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL);
- vkCmdExecuteCommands(m_commandBuffer->handle(), 1, &secondary.handle());
- m_errorMonitor->VerifyFound();
-
- // Validate that we've tracked the changes from the secondary CB correctly
- m_errorMonitor->ExpectSuccess();
- pipeline(*m_commandBuffer, VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, VK_IMAGE_LAYOUT_GENERAL);
- m_errorMonitor->VerifyNotFound();
- m_commandBuffer->end();
-
- m_commandBuffer->reset();
- secondary.reset();
-
- // Validate that UNDEFINED doesn't false positive on us
- secondary.begin();
- pipeline(secondary, VK_IMAGE_LAYOUT_UNDEFINED, VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL);
- secondary.end();
- m_commandBuffer->begin();
- pipeline(*m_commandBuffer, VK_IMAGE_LAYOUT_GENERAL, VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL);
- m_errorMonitor->ExpectSuccess();
- vkCmdExecuteCommands(m_commandBuffer->handle(), 1, &secondary.handle());
- m_errorMonitor->VerifyNotFound();
- m_commandBuffer->end();
-}
-
-TEST_F(VkLayerTest, SetDynViewportParamTests) {
- TEST_DESCRIPTION("Test parameters of vkCmdSetViewport without multiViewport feature");
-
- SetTargetApiVersion(VK_API_VERSION_1_1);
- VkPhysicalDeviceFeatures features{};
- ASSERT_NO_FATAL_FAILURE(Init(&features));
-
- const VkViewport vp = {0.0, 0.0, 64.0, 64.0, 0.0, 1.0};
- const VkViewport viewports[] = {vp, vp};
-
- m_commandBuffer->begin();
-
- // array tests
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdSetViewport-firstViewport-01224");
- vkCmdSetViewport(m_commandBuffer->handle(), 1, 1, viewports);
- m_errorMonitor->VerifyFound();
-
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdSetViewport-viewportCount-arraylength");
- vkCmdSetViewport(m_commandBuffer->handle(), 0, 0, nullptr);
- m_errorMonitor->VerifyFound();
-
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdSetViewport-viewportCount-01225");
- vkCmdSetViewport(m_commandBuffer->handle(), 0, 2, viewports);
- m_errorMonitor->VerifyFound();
-
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdSetViewport-firstViewport-01224");
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdSetViewport-viewportCount-01225");
- vkCmdSetViewport(m_commandBuffer->handle(), 1, 2, viewports);
- m_errorMonitor->VerifyFound();
-
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdSetViewport-pViewports-parameter");
- vkCmdSetViewport(m_commandBuffer->handle(), 0, 1, nullptr);
- m_errorMonitor->VerifyFound();
-
- // core viewport tests
- using std::vector;
- struct TestCase {
- VkViewport vp;
- std::string veid;
- };
-
- // not necessarily boundary values (unspecified cast rounding), but guaranteed to be over limit
- const auto one_past_max_w = NearestGreater(static_cast<float>(m_device->props.limits.maxViewportDimensions[0]));
- const auto one_past_max_h = NearestGreater(static_cast<float>(m_device->props.limits.maxViewportDimensions[1]));
-
- const auto min_bound = m_device->props.limits.viewportBoundsRange[0];
- const auto max_bound = m_device->props.limits.viewportBoundsRange[1];
- const auto one_before_min_bounds = NearestSmaller(min_bound);
- const auto one_past_max_bounds = NearestGreater(max_bound);
-
- const auto below_zero = NearestSmaller(0.0f);
- const auto past_one = NearestGreater(1.0f);
-
- vector<TestCase> test_cases = {
- {{0.0, 0.0, 0.0, 64.0, 0.0, 1.0}, "VUID-VkViewport-width-01770"},
- {{0.0, 0.0, one_past_max_w, 64.0, 0.0, 1.0}, "VUID-VkViewport-width-01771"},
- {{0.0, 0.0, NAN, 64.0, 0.0, 1.0}, "VUID-VkViewport-width-01770"},
- {{0.0, 0.0, 64.0, one_past_max_h, 0.0, 1.0}, "VUID-VkViewport-height-01773"},
- {{one_before_min_bounds, 0.0, 64.0, 64.0, 0.0, 1.0}, "VUID-VkViewport-x-01774"},
- {{one_past_max_bounds, 0.0, 64.0, 64.0, 0.0, 1.0}, "VUID-VkViewport-x-01232"},
- {{NAN, 0.0, 64.0, 64.0, 0.0, 1.0}, "VUID-VkViewport-x-01774"},
- {{0.0, one_before_min_bounds, 64.0, 64.0, 0.0, 1.0}, "VUID-VkViewport-y-01775"},
- {{0.0, NAN, 64.0, 64.0, 0.0, 1.0}, "VUID-VkViewport-y-01775"},
- {{max_bound, 0.0, 1.0, 64.0, 0.0, 1.0}, "VUID-VkViewport-x-01232"},
- {{0.0, max_bound, 64.0, 1.0, 0.0, 1.0}, "VUID-VkViewport-y-01233"},
- {{0.0, 0.0, 64.0, 64.0, below_zero, 1.0}, "VUID-VkViewport-minDepth-01234"},
- {{0.0, 0.0, 64.0, 64.0, past_one, 1.0}, "VUID-VkViewport-minDepth-01234"},
- {{0.0, 0.0, 64.0, 64.0, NAN, 1.0}, "VUID-VkViewport-minDepth-01234"},
- {{0.0, 0.0, 64.0, 64.0, 0.0, below_zero}, "VUID-VkViewport-maxDepth-01235"},
- {{0.0, 0.0, 64.0, 64.0, 0.0, past_one}, "VUID-VkViewport-maxDepth-01235"},
- {{0.0, 0.0, 64.0, 64.0, 0.0, NAN}, "VUID-VkViewport-maxDepth-01235"},
- };
-
- if (DeviceValidationVersion() < VK_API_VERSION_1_1) {
- test_cases.push_back({{0.0, 0.0, 64.0, 0.0, 0.0, 1.0}, "VUID-VkViewport-height-01772"});
- test_cases.push_back({{0.0, 0.0, 64.0, NAN, 0.0, 1.0}, "VUID-VkViewport-height-01772"});
- } else {
- test_cases.push_back({{0.0, 0.0, 64.0, NAN, 0.0, 1.0}, "VUID-VkViewport-height-01773"});
- }
-
- for (const auto &test_case : test_cases) {
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, test_case.veid);
- vkCmdSetViewport(m_commandBuffer->handle(), 0, 1, &test_case.vp);
- m_errorMonitor->VerifyFound();
- }
-}
-
-TEST_F(VkLayerTest, SetDynViewportParamMaintenance1Tests) {
- TEST_DESCRIPTION("Verify errors are detected on misuse of SetViewport with a negative viewport extension enabled.");
-
- ASSERT_NO_FATAL_FAILURE(InitFramework(myDbgFunc, m_errorMonitor));
-
- if (DeviceExtensionSupported(gpu(), nullptr, VK_KHR_MAINTENANCE1_EXTENSION_NAME)) {
- m_device_extension_names.push_back(VK_KHR_MAINTENANCE1_EXTENSION_NAME);
- } else {
- printf("%s VK_KHR_maintenance1 extension not supported -- skipping test\n", kSkipPrefix);
- return;
- }
- ASSERT_NO_FATAL_FAILURE(InitState());
-
- NegHeightViewportTests(m_device, m_commandBuffer, m_errorMonitor);
-}
-
-TEST_F(VkLayerTest, SetDynViewportParamMultiviewportTests) {
- TEST_DESCRIPTION("Test parameters of vkCmdSetViewport with multiViewport feature enabled");
-
- ASSERT_NO_FATAL_FAILURE(Init());
-
- if (!m_device->phy().features().multiViewport) {
- printf("%s VkPhysicalDeviceFeatures::multiViewport is not supported -- skipping test.\n", kSkipPrefix);
- return;
- }
-
- const auto max_viewports = m_device->props.limits.maxViewports;
- const uint32_t too_many_viewports = 65536 + 1; // let's say this is too much to allocate pViewports for
-
- m_commandBuffer->begin();
-
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdSetViewport-viewportCount-arraylength");
- vkCmdSetViewport(m_commandBuffer->handle(), 0, 0, nullptr);
- m_errorMonitor->VerifyFound();
-
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdSetViewport-pViewports-parameter");
- vkCmdSetViewport(m_commandBuffer->handle(), 0, max_viewports, nullptr);
- m_errorMonitor->VerifyFound();
-
- if (max_viewports >= too_many_viewports) {
- printf(
- "%s VkPhysicalDeviceLimits::maxViewports is too large to practically test against -- skipping "
- "part of "
- "test.\n",
- kSkipPrefix);
- return;
- }
-
- const VkViewport vp = {0.0, 0.0, 64.0, 64.0, 0.0, 1.0};
- const std::vector<VkViewport> viewports(max_viewports + 1, vp);
-
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdSetViewport-firstViewport-01223");
- vkCmdSetViewport(m_commandBuffer->handle(), 0, max_viewports + 1, viewports.data());
- m_errorMonitor->VerifyFound();
-
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdSetViewport-firstViewport-01223");
- vkCmdSetViewport(m_commandBuffer->handle(), max_viewports, 1, viewports.data());
- m_errorMonitor->VerifyFound();
-
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdSetViewport-firstViewport-01223");
- vkCmdSetViewport(m_commandBuffer->handle(), 1, max_viewports, viewports.data());
- m_errorMonitor->VerifyFound();
-
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdSetViewport-viewportCount-arraylength");
- vkCmdSetViewport(m_commandBuffer->handle(), 1, 0, viewports.data());
- m_errorMonitor->VerifyFound();
-}
-
-TEST_F(VkLayerTest, BadRenderPassScopeSecondaryCmdBuffer) {
- TEST_DESCRIPTION(
- "Test secondary buffers executed in wrong render pass scope wrt VK_COMMAND_BUFFER_USAGE_RENDER_PASS_CONTINUE_BIT");
-
- ASSERT_NO_FATAL_FAILURE(Init());
- ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
-
- VkCommandBufferObj sec_cmdbuff_inside_rp(m_device, m_commandPool, VK_COMMAND_BUFFER_LEVEL_SECONDARY);
- VkCommandBufferObj sec_cmdbuff_outside_rp(m_device, m_commandPool, VK_COMMAND_BUFFER_LEVEL_SECONDARY);
-
- const VkCommandBufferInheritanceInfo cmdbuff_ii = {
- VK_STRUCTURE_TYPE_COMMAND_BUFFER_INHERITANCE_INFO,
- nullptr, // pNext
- m_renderPass,
- 0, // subpass
- m_framebuffer,
- };
- const VkCommandBufferBeginInfo cmdbuff_bi_tmpl = {VK_STRUCTURE_TYPE_COMMAND_BUFFER_BEGIN_INFO,
- nullptr, // pNext
- VK_COMMAND_BUFFER_USAGE_ONE_TIME_SUBMIT_BIT, &cmdbuff_ii};
-
- VkCommandBufferBeginInfo cmdbuff_inside_rp_bi = cmdbuff_bi_tmpl;
- cmdbuff_inside_rp_bi.flags |= VK_COMMAND_BUFFER_USAGE_RENDER_PASS_CONTINUE_BIT;
- sec_cmdbuff_inside_rp.begin(&cmdbuff_inside_rp_bi);
- sec_cmdbuff_inside_rp.end();
-
- VkCommandBufferBeginInfo cmdbuff_outside_rp_bi = cmdbuff_bi_tmpl;
- cmdbuff_outside_rp_bi.flags &= ~VK_COMMAND_BUFFER_USAGE_RENDER_PASS_CONTINUE_BIT;
- sec_cmdbuff_outside_rp.begin(&cmdbuff_outside_rp_bi);
- sec_cmdbuff_outside_rp.end();
-
- m_commandBuffer->begin();
-
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdExecuteCommands-pCommandBuffers-00100");
- vkCmdExecuteCommands(m_commandBuffer->handle(), 1, &sec_cmdbuff_inside_rp.handle());
- m_errorMonitor->VerifyFound();
-
- const VkRenderPassBeginInfo rp_bi{VK_STRUCTURE_TYPE_RENDER_PASS_BEGIN_INFO,
- nullptr, // pNext
- m_renderPass,
- m_framebuffer,
- {{0, 0}, {32, 32}},
- static_cast<uint32_t>(m_renderPassClearValues.size()),
- m_renderPassClearValues.data()};
- vkCmdBeginRenderPass(m_commandBuffer->handle(), &rp_bi, VK_SUBPASS_CONTENTS_SECONDARY_COMMAND_BUFFERS);
-
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdExecuteCommands-pCommandBuffers-00096");
- vkCmdExecuteCommands(m_commandBuffer->handle(), 1, &sec_cmdbuff_outside_rp.handle());
- m_errorMonitor->VerifyFound();
-}
-
-TEST_F(VkLayerTest, SecondaryCommandBufferClearColorAttachmentsRenderArea) {
- TEST_DESCRIPTION(
- "Create a secondary command buffer with CmdClearAttachments call that has a rect outside of renderPass renderArea");
- ASSERT_NO_FATAL_FAILURE(Init());
- ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
-
- VkCommandBufferAllocateInfo command_buffer_allocate_info = {};
- command_buffer_allocate_info.sType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_ALLOCATE_INFO;
- command_buffer_allocate_info.commandPool = m_commandPool->handle();
- command_buffer_allocate_info.level = VK_COMMAND_BUFFER_LEVEL_SECONDARY;
- command_buffer_allocate_info.commandBufferCount = 1;
-
- VkCommandBuffer secondary_command_buffer;
- ASSERT_VK_SUCCESS(vkAllocateCommandBuffers(m_device->device(), &command_buffer_allocate_info, &secondary_command_buffer));
- VkCommandBufferBeginInfo command_buffer_begin_info = {};
- VkCommandBufferInheritanceInfo command_buffer_inheritance_info = {};
- command_buffer_inheritance_info.sType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_INHERITANCE_INFO;
- command_buffer_inheritance_info.renderPass = m_renderPass;
- command_buffer_inheritance_info.framebuffer = m_framebuffer;
-
- command_buffer_begin_info.sType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_BEGIN_INFO;
- command_buffer_begin_info.flags =
- VK_COMMAND_BUFFER_USAGE_ONE_TIME_SUBMIT_BIT | VK_COMMAND_BUFFER_USAGE_RENDER_PASS_CONTINUE_BIT;
- command_buffer_begin_info.pInheritanceInfo = &command_buffer_inheritance_info;
-
- vkBeginCommandBuffer(secondary_command_buffer, &command_buffer_begin_info);
- VkClearAttachment color_attachment;
- color_attachment.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
- color_attachment.clearValue.color.float32[0] = 0;
- color_attachment.clearValue.color.float32[1] = 0;
- color_attachment.clearValue.color.float32[2] = 0;
- color_attachment.clearValue.color.float32[3] = 0;
- color_attachment.colorAttachment = 0;
- // x extent of 257 exceeds render area of 256
- VkClearRect clear_rect = {{{0, 0}, {257, 32}}, 0, 1};
- vkCmdClearAttachments(secondary_command_buffer, 1, &color_attachment, 1, &clear_rect);
- vkEndCommandBuffer(secondary_command_buffer);
- m_commandBuffer->begin();
- vkCmdBeginRenderPass(m_commandBuffer->handle(), &m_renderPassBeginInfo, VK_SUBPASS_CONTENTS_SECONDARY_COMMAND_BUFFERS);
-
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdClearAttachments-pRects-00016");
- vkCmdExecuteCommands(m_commandBuffer->handle(), 1, &secondary_command_buffer);
- m_errorMonitor->VerifyFound();
-
- vkCmdEndRenderPass(m_commandBuffer->handle());
- m_commandBuffer->end();
-}
-
-TEST_F(VkLayerTest, PushDescriptorSetCmdPushBadArgs) {
- TEST_DESCRIPTION("Attempt to push a push descriptor set with incorrect arguments.");
- if (InstanceExtensionSupported(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME)) {
- m_instance_extension_names.push_back(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME);
- } else {
- printf("%s %s Extension not supported, skipping tests\n", kSkipPrefix,
- VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME);
- return;
- }
-
- ASSERT_NO_FATAL_FAILURE(InitFramework(myDbgFunc, m_errorMonitor));
- if (DeviceExtensionSupported(gpu(), nullptr, VK_KHR_PUSH_DESCRIPTOR_EXTENSION_NAME)) {
- m_device_extension_names.push_back(VK_KHR_PUSH_DESCRIPTOR_EXTENSION_NAME);
- } else {
- printf("%s %s Extension not supported, skipping tests\n", kSkipPrefix, VK_KHR_PUSH_DESCRIPTOR_EXTENSION_NAME);
- return;
- }
- ASSERT_NO_FATAL_FAILURE(InitState());
-
- auto push_descriptor_prop = GetPushDescriptorProperties(instance(), gpu());
- if (push_descriptor_prop.maxPushDescriptors < 1) {
- // Some implementations report an invalid maxPushDescriptors of 0
- printf("%s maxPushDescriptors is zero, skipping tests\n", kSkipPrefix);
- return;
- }
-
- // Create ordinary and push descriptor set layout
- VkDescriptorSetLayoutBinding binding = {0, VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER, 1, VK_SHADER_STAGE_FRAGMENT_BIT, nullptr};
- const VkDescriptorSetLayoutObj ds_layout(m_device, {binding});
- ASSERT_TRUE(ds_layout.initialized());
- const VkDescriptorSetLayoutObj push_ds_layout(m_device, {binding}, VK_DESCRIPTOR_SET_LAYOUT_CREATE_PUSH_DESCRIPTOR_BIT_KHR);
- ASSERT_TRUE(push_ds_layout.initialized());
-
- // Now use the descriptor set layouts to create a pipeline layout
- const VkPipelineLayoutObj pipeline_layout(m_device, {&push_ds_layout, &ds_layout});
- ASSERT_TRUE(pipeline_layout.initialized());
-
- // Create a descriptor to push
- const uint32_t buffer_data[4] = {4, 5, 6, 7};
- VkConstantBufferObj buffer_obj(m_device, sizeof(buffer_data), &buffer_data);
- ASSERT_TRUE(buffer_obj.initialized());
-
- // Create a "write" struct, noting that the buffer_info cannot be a temporary arg (the return from write_descriptor_set
- // references its data), and the DescriptorSet() can be temporary, because the value is ignored
- VkDescriptorBufferInfo buffer_info = {buffer_obj.handle(), 0, VK_WHOLE_SIZE};
-
- VkWriteDescriptorSet descriptor_write = vk_testing::Device::write_descriptor_set(
- vk_testing::DescriptorSet(), 0, 0, VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER, 1, &buffer_info);
-
- // Find address of extension call and make the call
- PFN_vkCmdPushDescriptorSetKHR vkCmdPushDescriptorSetKHR =
- (PFN_vkCmdPushDescriptorSetKHR)vkGetDeviceProcAddr(m_device->device(), "vkCmdPushDescriptorSetKHR");
- ASSERT_TRUE(vkCmdPushDescriptorSetKHR != nullptr);
-
- // Section 1: Queue family matching/capabilities.
- // Create command pool on a non-graphics queue
- const uint32_t no_gfx_qfi = m_device->QueueFamilyMatching(VK_QUEUE_COMPUTE_BIT, VK_QUEUE_GRAPHICS_BIT);
- const uint32_t transfer_only_qfi =
- m_device->QueueFamilyMatching(VK_QUEUE_TRANSFER_BIT, (VK_QUEUE_COMPUTE_BIT | VK_QUEUE_GRAPHICS_BIT));
- if ((UINT32_MAX == transfer_only_qfi) && (UINT32_MAX == no_gfx_qfi)) {
- printf("%s No compute or transfer only queue family, skipping bindpoint and queue tests.\n", kSkipPrefix);
- } else {
- const uint32_t err_qfi = (UINT32_MAX == no_gfx_qfi) ? transfer_only_qfi : no_gfx_qfi;
-
- VkCommandPoolObj command_pool(m_device, err_qfi);
- ASSERT_TRUE(command_pool.initialized());
- VkCommandBufferObj command_buffer(m_device, &command_pool);
- ASSERT_TRUE(command_buffer.initialized());
- command_buffer.begin();
-
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
- "VUID-vkCmdPushDescriptorSetKHR-pipelineBindPoint-00363");
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkWriteDescriptorSet-descriptorType-00330");
- if (err_qfi == transfer_only_qfi) {
- // This as this queue neither supports the gfx or compute bindpoints, we'll get two errors
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
- "VUID-vkCmdPushDescriptorSetKHR-commandBuffer-cmdpool");
- }
- vkCmdPushDescriptorSetKHR(command_buffer.handle(), VK_PIPELINE_BIND_POINT_GRAPHICS, pipeline_layout.handle(), 0, 1,
- &descriptor_write);
- m_errorMonitor->VerifyFound();
- command_buffer.end();
-
- // If we succeed in testing only one condition above, we need to test the other below.
- if ((UINT32_MAX != transfer_only_qfi) && (err_qfi != transfer_only_qfi)) {
- // Need to test the neither compute/gfx supported case separately.
- VkCommandPoolObj tran_command_pool(m_device, transfer_only_qfi);
- ASSERT_TRUE(tran_command_pool.initialized());
- VkCommandBufferObj tran_command_buffer(m_device, &tran_command_pool);
- ASSERT_TRUE(tran_command_buffer.initialized());
- tran_command_buffer.begin();
-
- // We can't avoid getting *both* errors in this case
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
- "VUID-vkCmdPushDescriptorSetKHR-pipelineBindPoint-00363");
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkWriteDescriptorSet-descriptorType-00330");
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
- "VUID-vkCmdPushDescriptorSetKHR-commandBuffer-cmdpool");
- vkCmdPushDescriptorSetKHR(tran_command_buffer.handle(), VK_PIPELINE_BIND_POINT_GRAPHICS, pipeline_layout.handle(), 0, 1,
- &descriptor_write);
- m_errorMonitor->VerifyFound();
- tran_command_buffer.end();
- }
- }
-
- // Push to the non-push binding
- m_commandBuffer->begin();
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdPushDescriptorSetKHR-set-00365");
- vkCmdPushDescriptorSetKHR(m_commandBuffer->handle(), VK_PIPELINE_BIND_POINT_GRAPHICS, pipeline_layout.handle(), 1, 1,
- &descriptor_write);
- m_errorMonitor->VerifyFound();
-
- // Specify set out of bounds
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdPushDescriptorSetKHR-set-00364");
- vkCmdPushDescriptorSetKHR(m_commandBuffer->handle(), VK_PIPELINE_BIND_POINT_GRAPHICS, pipeline_layout.handle(), 2, 1,
- &descriptor_write);
- m_errorMonitor->VerifyFound();
- m_commandBuffer->end();
-
- // This is a test for VUID-vkCmdPushDescriptorSetKHR-commandBuffer-recording
- // TODO: Add VALIDATION_ERROR_ code support to core_validation::ValidateCmd
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
- "You must call vkBeginCommandBuffer() before this call to vkCmdPushDescriptorSetKHR()");
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkWriteDescriptorSet-descriptorType-00330");
- vkCmdPushDescriptorSetKHR(m_commandBuffer->handle(), VK_PIPELINE_BIND_POINT_GRAPHICS, pipeline_layout.handle(), 0, 1,
- &descriptor_write);
- m_errorMonitor->VerifyFound();
-}
-
-TEST_F(VkLayerTest, SetDynScissorParamTests) {
- TEST_DESCRIPTION("Test parameters of vkCmdSetScissor without multiViewport feature");
-
- VkPhysicalDeviceFeatures features{};
- ASSERT_NO_FATAL_FAILURE(Init(&features));
-
- const VkRect2D scissor = {{0, 0}, {16, 16}};
- const VkRect2D scissors[] = {scissor, scissor};
-
- m_commandBuffer->begin();
-
- // array tests
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdSetScissor-firstScissor-00593");
- vkCmdSetScissor(m_commandBuffer->handle(), 1, 1, scissors);
- m_errorMonitor->VerifyFound();
-
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdSetScissor-scissorCount-arraylength");
- vkCmdSetScissor(m_commandBuffer->handle(), 0, 0, nullptr);
- m_errorMonitor->VerifyFound();
-
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdSetScissor-scissorCount-00594");
- vkCmdSetScissor(m_commandBuffer->handle(), 0, 2, scissors);
- m_errorMonitor->VerifyFound();
-
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdSetScissor-firstScissor-00593");
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdSetScissor-scissorCount-00594");
- vkCmdSetScissor(m_commandBuffer->handle(), 1, 2, scissors);
- m_errorMonitor->VerifyFound();
-
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdSetScissor-pScissors-parameter");
- vkCmdSetScissor(m_commandBuffer->handle(), 0, 1, nullptr);
- m_errorMonitor->VerifyFound();
-
- struct TestCase {
- VkRect2D scissor;
- std::string vuid;
- };
-
- std::vector<TestCase> test_cases = {{{{-1, 0}, {16, 16}}, "VUID-vkCmdSetScissor-x-00595"},
- {{{0, -1}, {16, 16}}, "VUID-vkCmdSetScissor-x-00595"},
- {{{1, 0}, {INT32_MAX, 16}}, "VUID-vkCmdSetScissor-offset-00596"},
- {{{INT32_MAX, 0}, {1, 16}}, "VUID-vkCmdSetScissor-offset-00596"},
- {{{0, 0}, {uint32_t{INT32_MAX} + 1, 16}}, "VUID-vkCmdSetScissor-offset-00596"},
- {{{0, 1}, {16, INT32_MAX}}, "VUID-vkCmdSetScissor-offset-00597"},
- {{{0, INT32_MAX}, {16, 1}}, "VUID-vkCmdSetScissor-offset-00597"},
- {{{0, 0}, {16, uint32_t{INT32_MAX} + 1}}, "VUID-vkCmdSetScissor-offset-00597"}};
-
- for (const auto &test_case : test_cases) {
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, test_case.vuid);
- vkCmdSetScissor(m_commandBuffer->handle(), 0, 1, &test_case.scissor);
- m_errorMonitor->VerifyFound();
- }
-
- m_commandBuffer->end();
-}
-
-TEST_F(VkLayerTest, SetDynScissorParamMultiviewportTests) {
- TEST_DESCRIPTION("Test parameters of vkCmdSetScissor with multiViewport feature enabled");
-
- ASSERT_NO_FATAL_FAILURE(Init());
-
- if (!m_device->phy().features().multiViewport) {
- printf("%s VkPhysicalDeviceFeatures::multiViewport is not supported -- skipping test.\n", kSkipPrefix);
- return;
- }
-
- const auto max_scissors = m_device->props.limits.maxViewports;
- const uint32_t too_many_scissors = 65536 + 1; // let's say this is too much to allocate pScissors for
-
- m_commandBuffer->begin();
-
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdSetScissor-scissorCount-arraylength");
- vkCmdSetScissor(m_commandBuffer->handle(), 0, 0, nullptr);
- m_errorMonitor->VerifyFound();
-
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdSetScissor-pScissors-parameter");
- vkCmdSetScissor(m_commandBuffer->handle(), 0, max_scissors, nullptr);
- m_errorMonitor->VerifyFound();
-
- if (max_scissors >= too_many_scissors) {
- printf(
- "%s VkPhysicalDeviceLimits::maxViewports is too large to practically test against -- skipping "
- "part of "
- "test.\n",
- kSkipPrefix);
- return;
- }
-
- const VkRect2D scissor = {{0, 0}, {16, 16}};
- const std::vector<VkRect2D> scissors(max_scissors + 1, scissor);
-
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdSetScissor-firstScissor-00592");
- vkCmdSetScissor(m_commandBuffer->handle(), 0, max_scissors + 1, scissors.data());
- m_errorMonitor->VerifyFound();
-
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdSetScissor-firstScissor-00592");
- vkCmdSetScissor(m_commandBuffer->handle(), max_scissors, 1, scissors.data());
- m_errorMonitor->VerifyFound();
-
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdSetScissor-firstScissor-00592");
- vkCmdSetScissor(m_commandBuffer->handle(), 1, max_scissors, scissors.data());
- m_errorMonitor->VerifyFound();
-
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdSetScissor-scissorCount-arraylength");
- vkCmdSetScissor(m_commandBuffer->handle(), 1, 0, scissors.data());
- m_errorMonitor->VerifyFound();
-}
-
-TEST_F(VkLayerTest, DrawIndirect) {
- TEST_DESCRIPTION("Test covered valid usage for vkCmdDrawIndirect");
-
- ASSERT_NO_FATAL_FAILURE(Init());
- ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
-
- CreatePipelineHelper pipe(*this);
- pipe.InitInfo();
- const VkDynamicState dyn_states[] = {VK_DYNAMIC_STATE_VIEWPORT, VK_DYNAMIC_STATE_SCISSOR};
- VkPipelineDynamicStateCreateInfo dyn_state_ci = {};
- dyn_state_ci.sType = VK_STRUCTURE_TYPE_PIPELINE_DYNAMIC_STATE_CREATE_INFO;
- dyn_state_ci.dynamicStateCount = size(dyn_states);
- dyn_state_ci.pDynamicStates = dyn_states;
- pipe.dyn_state_ci_ = dyn_state_ci;
- pipe.InitState();
- pipe.CreateGraphicsPipeline();
-
- m_commandBuffer->begin();
- m_commandBuffer->BeginRenderPass(m_renderPassBeginInfo);
-
- vkCmdBindPipeline(m_commandBuffer->handle(), VK_PIPELINE_BIND_POINT_GRAPHICS, pipe.pipeline_);
- vkCmdBindDescriptorSets(m_commandBuffer->handle(), VK_PIPELINE_BIND_POINT_GRAPHICS, pipe.pipeline_layout_.handle(), 0, 1,
- &pipe.descriptor_set_->set_, 0, NULL);
-
- VkViewport viewport = {0, 0, 16, 16, 0, 1};
- vkCmdSetViewport(m_commandBuffer->handle(), 0, 1, &viewport);
- VkRect2D scissor = {{0, 0}, {16, 16}};
- vkCmdSetScissor(m_commandBuffer->handle(), 0, 1, &scissor);
-
- VkBufferCreateInfo buffer_create_info = {VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO};
- buffer_create_info.usage = VK_BUFFER_USAGE_VERTEX_BUFFER_BIT;
- buffer_create_info.size = sizeof(VkDrawIndirectCommand);
- VkBufferObj draw_buffer;
- draw_buffer.init(*m_device, buffer_create_info);
-
- // VUID-vkCmdDrawIndirect-buffer-02709
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdDrawIndirect-buffer-02709");
- vkCmdDrawIndirect(m_commandBuffer->handle(), draw_buffer.handle(), 0, 1, sizeof(VkDrawIndirectCommand));
- m_errorMonitor->VerifyFound();
-
- m_commandBuffer->EndRenderPass();
- m_commandBuffer->end();
-}
-
-TEST_F(VkLayerTest, DrawIndirectCountKHR) {
- TEST_DESCRIPTION("Test covered valid usage for vkCmdDrawIndirectCountKHR");
-
- ASSERT_NO_FATAL_FAILURE(InitFramework(myDbgFunc, m_errorMonitor));
- if (DeviceExtensionSupported(gpu(), nullptr, VK_KHR_DRAW_INDIRECT_COUNT_EXTENSION_NAME)) {
- m_device_extension_names.push_back(VK_KHR_DRAW_INDIRECT_COUNT_EXTENSION_NAME);
- } else {
- printf(" VK_KHR_draw_indirect_count Extension not supported, skipping test\n");
- return;
- }
- ASSERT_NO_FATAL_FAILURE(InitState());
- ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
-
- VkMemoryRequirements memory_requirements;
- VkMemoryAllocateInfo memory_allocate_info = {VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO};
-
- auto vkCmdDrawIndirectCountKHR =
- (PFN_vkCmdDrawIndirectCountKHR)vkGetDeviceProcAddr(m_device->device(), "vkCmdDrawIndirectCountKHR");
-
- CreatePipelineHelper pipe(*this);
- pipe.InitInfo();
- const VkDynamicState dyn_states[] = {VK_DYNAMIC_STATE_VIEWPORT, VK_DYNAMIC_STATE_SCISSOR};
- VkPipelineDynamicStateCreateInfo dyn_state_ci = {};
- dyn_state_ci.sType = VK_STRUCTURE_TYPE_PIPELINE_DYNAMIC_STATE_CREATE_INFO;
- dyn_state_ci.dynamicStateCount = size(dyn_states);
- dyn_state_ci.pDynamicStates = dyn_states;
- pipe.dyn_state_ci_ = dyn_state_ci;
- pipe.InitState();
- pipe.CreateGraphicsPipeline();
-
- m_commandBuffer->begin();
- m_commandBuffer->BeginRenderPass(m_renderPassBeginInfo);
-
- vkCmdBindPipeline(m_commandBuffer->handle(), VK_PIPELINE_BIND_POINT_GRAPHICS, pipe.pipeline_);
- vkCmdBindDescriptorSets(m_commandBuffer->handle(), VK_PIPELINE_BIND_POINT_GRAPHICS, pipe.pipeline_layout_.handle(), 0, 1,
- &pipe.descriptor_set_->set_, 0, NULL);
-
- VkViewport viewport = {0, 0, 16, 16, 0, 1};
- vkCmdSetViewport(m_commandBuffer->handle(), 0, 1, &viewport);
- VkRect2D scissor = {{0, 0}, {16, 16}};
- vkCmdSetScissor(m_commandBuffer->handle(), 0, 1, &scissor);
-
- VkBufferCreateInfo buffer_create_info = {VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO};
- buffer_create_info.size = sizeof(VkDrawIndirectCommand);
- buffer_create_info.usage = VK_BUFFER_USAGE_INDIRECT_BUFFER_BIT;
- VkBuffer draw_buffer;
- vkCreateBuffer(m_device->device(), &buffer_create_info, nullptr, &draw_buffer);
-
- VkBufferCreateInfo count_buffer_create_info = {VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO};
- count_buffer_create_info.size = sizeof(uint32_t);
- count_buffer_create_info.usage = VK_BUFFER_USAGE_INDIRECT_BUFFER_BIT;
- VkBufferObj count_buffer;
- count_buffer.init(*m_device, count_buffer_create_info);
-
- // VUID-vkCmdDrawIndirectCountKHR-buffer-02708
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdDrawIndirectCountKHR-buffer-02708");
- vkCmdDrawIndirectCountKHR(m_commandBuffer->handle(), draw_buffer, 0, count_buffer.handle(), 0, 1,
- sizeof(VkDrawIndirectCommand));
- m_errorMonitor->VerifyFound();
-
- vkGetBufferMemoryRequirements(m_device->device(), draw_buffer, &memory_requirements);
- memory_allocate_info.allocationSize = memory_requirements.size;
- m_device->phy().set_memory_type(memory_requirements.memoryTypeBits, &memory_allocate_info, VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT);
- VkDeviceMemory draw_buffer_memory;
- vkAllocateMemory(m_device->device(), &memory_allocate_info, NULL, &draw_buffer_memory);
- vkBindBufferMemory(m_device->device(), draw_buffer, draw_buffer_memory, 0);
-
- VkBuffer count_buffer_unbound;
- vkCreateBuffer(m_device->device(), &count_buffer_create_info, nullptr, &count_buffer_unbound);
-
- // VUID-vkCmdDrawIndirectCountKHR-countBuffer-02714
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdDrawIndirectCountKHR-countBuffer-02714");
- vkCmdDrawIndirectCountKHR(m_commandBuffer->handle(), draw_buffer, 0, count_buffer_unbound, 0, 1, sizeof(VkDrawIndirectCommand));
- m_errorMonitor->VerifyFound();
-
- // VUID-vkCmdDrawIndirectCountKHR-offset-02710
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdDrawIndirectCountKHR-offset-02710");
- vkCmdDrawIndirectCountKHR(m_commandBuffer->handle(), draw_buffer, 1, count_buffer.handle(), 0, 1,
- sizeof(VkDrawIndirectCommand));
- m_errorMonitor->VerifyFound();
-
- // VUID-vkCmdDrawIndirectCountKHR-countBufferOffset-02716
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdDrawIndirectCountKHR-countBufferOffset-02716");
- vkCmdDrawIndirectCountKHR(m_commandBuffer->handle(), draw_buffer, 0, count_buffer.handle(), 1, 1,
- sizeof(VkDrawIndirectCommand));
- m_errorMonitor->VerifyFound();
-
- // VUID-vkCmdDrawIndirectCountKHR-stride-03110
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdDrawIndirectCountKHR-stride-03110");
- vkCmdDrawIndirectCountKHR(m_commandBuffer->handle(), draw_buffer, 0, count_buffer.handle(), 0, 1, 1);
- m_errorMonitor->VerifyFound();
-
- // TODO: These covered VUIDs aren't tested. There is also no test coverage for the core Vulkan 1.0 vkCmdDraw* equivalent of
- // these:
- // VUID-vkCmdDrawIndirectCountKHR-renderPass-02684
- // VUID-vkCmdDrawIndirectCountKHR-subpass-02685
- // VUID-vkCmdDrawIndirectCountKHR-commandBuffer-02701
-
- m_commandBuffer->EndRenderPass();
- m_commandBuffer->end();
-
- vkDestroyBuffer(m_device->device(), draw_buffer, 0);
- vkDestroyBuffer(m_device->device(), count_buffer_unbound, 0);
-
- vkFreeMemory(m_device->device(), draw_buffer_memory, 0);
-}
-
-TEST_F(VkLayerTest, DrawIndexedIndirectCountKHR) {
- TEST_DESCRIPTION("Test covered valid usage for vkCmdDrawIndexedIndirectCountKHR");
-
- ASSERT_NO_FATAL_FAILURE(InitFramework(myDbgFunc, m_errorMonitor));
- if (DeviceExtensionSupported(gpu(), nullptr, VK_KHR_DRAW_INDIRECT_COUNT_EXTENSION_NAME)) {
- m_device_extension_names.push_back(VK_KHR_DRAW_INDIRECT_COUNT_EXTENSION_NAME);
- } else {
- printf(" VK_KHR_draw_indirect_count Extension not supported, skipping test\n");
- return;
- }
- ASSERT_NO_FATAL_FAILURE(InitState());
- ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
-
- auto vkCmdDrawIndexedIndirectCountKHR =
- (PFN_vkCmdDrawIndexedIndirectCountKHR)vkGetDeviceProcAddr(m_device->device(), "vkCmdDrawIndexedIndirectCountKHR");
-
- CreatePipelineHelper pipe(*this);
- pipe.InitInfo();
- const VkDynamicState dyn_states[] = {VK_DYNAMIC_STATE_VIEWPORT, VK_DYNAMIC_STATE_SCISSOR};
- VkPipelineDynamicStateCreateInfo dyn_state_ci = {};
- dyn_state_ci.sType = VK_STRUCTURE_TYPE_PIPELINE_DYNAMIC_STATE_CREATE_INFO;
- dyn_state_ci.dynamicStateCount = size(dyn_states);
- dyn_state_ci.pDynamicStates = dyn_states;
- pipe.dyn_state_ci_ = dyn_state_ci;
- pipe.InitState();
- pipe.CreateGraphicsPipeline();
-
- m_commandBuffer->begin();
- m_commandBuffer->BeginRenderPass(m_renderPassBeginInfo);
-
- vkCmdBindPipeline(m_commandBuffer->handle(), VK_PIPELINE_BIND_POINT_GRAPHICS, pipe.pipeline_);
- vkCmdBindDescriptorSets(m_commandBuffer->handle(), VK_PIPELINE_BIND_POINT_GRAPHICS, pipe.pipeline_layout_.handle(), 0, 1,
- &pipe.descriptor_set_->set_, 0, NULL);
-
- VkViewport viewport = {0, 0, 16, 16, 0, 1};
- vkCmdSetViewport(m_commandBuffer->handle(), 0, 1, &viewport);
- VkRect2D scissor = {{0, 0}, {16, 16}};
- vkCmdSetScissor(m_commandBuffer->handle(), 0, 1, &scissor);
-
- VkBufferCreateInfo buffer_create_info = {VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO};
- buffer_create_info.size = sizeof(VkDrawIndexedIndirectCommand);
- buffer_create_info.usage = VK_BUFFER_USAGE_INDIRECT_BUFFER_BIT;
- VkBufferObj draw_buffer;
- draw_buffer.init(*m_device, buffer_create_info);
-
- VkBufferCreateInfo count_buffer_create_info = {VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO};
- count_buffer_create_info.size = sizeof(uint32_t);
- count_buffer_create_info.usage = VK_BUFFER_USAGE_INDIRECT_BUFFER_BIT;
- VkBufferObj count_buffer;
- count_buffer.init(*m_device, count_buffer_create_info);
-
- VkBufferCreateInfo index_buffer_create_info = {VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO};
- index_buffer_create_info.size = sizeof(uint32_t);
- index_buffer_create_info.usage = VK_BUFFER_USAGE_INDEX_BUFFER_BIT;
- VkBufferObj index_buffer;
- index_buffer.init(*m_device, index_buffer_create_info);
-
- // VUID-vkCmdDrawIndexedIndirectCountKHR-commandBuffer-02701 (partial - only tests whether the index buffer is bound)
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
- "VUID-vkCmdDrawIndexedIndirectCountKHR-commandBuffer-02701");
- vkCmdDrawIndexedIndirectCountKHR(m_commandBuffer->handle(), draw_buffer.handle(), 0, count_buffer.handle(), 0, 1,
- sizeof(VkDrawIndexedIndirectCommand));
- m_errorMonitor->VerifyFound();
-
- vkCmdBindIndexBuffer(m_commandBuffer->handle(), index_buffer.handle(), 0, VK_INDEX_TYPE_UINT32);
-
- VkBuffer draw_buffer_unbound;
- vkCreateBuffer(m_device->device(), &count_buffer_create_info, nullptr, &draw_buffer_unbound);
-
- // VUID-vkCmdDrawIndexedIndirectCountKHR-buffer-02708
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdDrawIndexedIndirectCountKHR-buffer-02708");
- vkCmdDrawIndexedIndirectCountKHR(m_commandBuffer->handle(), draw_buffer_unbound, 0, count_buffer.handle(), 0, 1,
- sizeof(VkDrawIndexedIndirectCommand));
- m_errorMonitor->VerifyFound();
-
- VkBuffer count_buffer_unbound;
- vkCreateBuffer(m_device->device(), &count_buffer_create_info, nullptr, &count_buffer_unbound);
-
- // VUID-vkCmdDrawIndexedIndirectCountKHR-countBuffer-02714
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdDrawIndexedIndirectCountKHR-countBuffer-02714");
- vkCmdDrawIndexedIndirectCountKHR(m_commandBuffer->handle(), draw_buffer.handle(), 0, count_buffer_unbound, 0, 1,
- sizeof(VkDrawIndexedIndirectCommand));
- m_errorMonitor->VerifyFound();
-
- // VUID-vkCmdDrawIndexedIndirectCountKHR-offset-02710
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdDrawIndexedIndirectCountKHR-offset-02710");
- vkCmdDrawIndexedIndirectCountKHR(m_commandBuffer->handle(), draw_buffer.handle(), 1, count_buffer.handle(), 0, 1,
- sizeof(VkDrawIndexedIndirectCommand));
- m_errorMonitor->VerifyFound();
-
- // VUID-vkCmdDrawIndexedIndirectCountKHR-countBufferOffset-02716
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
- "VUID-vkCmdDrawIndexedIndirectCountKHR-countBufferOffset-02716");
- vkCmdDrawIndexedIndirectCountKHR(m_commandBuffer->handle(), draw_buffer.handle(), 0, count_buffer.handle(), 1, 1,
- sizeof(VkDrawIndexedIndirectCommand));
- m_errorMonitor->VerifyFound();
-
- // VUID-vkCmdDrawIndexedIndirectCountKHR-stride-03142
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdDrawIndexedIndirectCountKHR-stride-03142");
- vkCmdDrawIndexedIndirectCountKHR(m_commandBuffer->handle(), draw_buffer.handle(), 0, count_buffer.handle(), 0, 1, 1);
- m_errorMonitor->VerifyFound();
-
- // TODO: These covered VUIDs aren't tested. There is also no test coverage for the core Vulkan 1.0 vkCmdDraw* equivalent of
- // these:
- // VUID-vkCmdDrawIndexedIndirectCountKHR-renderPass-02684
- // VUID-vkCmdDrawIndexedIndirectCountKHR-subpass-02685
- // VUID-vkCmdDrawIndexedIndirectCountKHR-commandBuffer-02701 (partial)
-
- m_commandBuffer->EndRenderPass();
- m_commandBuffer->end();
-
- vkDestroyBuffer(m_device->device(), draw_buffer_unbound, 0);
- vkDestroyBuffer(m_device->device(), count_buffer_unbound, 0);
-}
-
-TEST_F(VkLayerTest, ExclusiveScissorNV) {
- TEST_DESCRIPTION("Test VK_NV_scissor_exclusive with multiViewport disabled.");
-
- if (InstanceExtensionSupported(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME)) {
- m_instance_extension_names.push_back(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME);
- } else {
- printf("%s Did not find required instance extension %s; skipped.\n", kSkipPrefix,
- VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME);
- return;
- }
- ASSERT_NO_FATAL_FAILURE(InitFramework(myDbgFunc, m_errorMonitor));
- std::array<const char *, 1> required_device_extensions = {{VK_NV_SCISSOR_EXCLUSIVE_EXTENSION_NAME}};
- for (auto device_extension : required_device_extensions) {
- if (DeviceExtensionSupported(gpu(), nullptr, device_extension)) {
- m_device_extension_names.push_back(device_extension);
- } else {
- printf("%s %s Extension not supported, skipping tests\n", kSkipPrefix, device_extension);
- return;
- }
- }
-
- PFN_vkGetPhysicalDeviceFeatures2KHR vkGetPhysicalDeviceFeatures2KHR =
- (PFN_vkGetPhysicalDeviceFeatures2KHR)vkGetInstanceProcAddr(instance(), "vkGetPhysicalDeviceFeatures2KHR");
- ASSERT_TRUE(vkGetPhysicalDeviceFeatures2KHR != nullptr);
-
- // Create a device that enables exclusive scissor but disables multiViewport
- auto exclusive_scissor_features = lvl_init_struct<VkPhysicalDeviceExclusiveScissorFeaturesNV>();
- auto features2 = lvl_init_struct<VkPhysicalDeviceFeatures2KHR>(&exclusive_scissor_features);
- vkGetPhysicalDeviceFeatures2KHR(gpu(), &features2);
-
- features2.features.multiViewport = VK_FALSE;
-
- ASSERT_NO_FATAL_FAILURE(InitState(nullptr, &features2));
- ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
-
- if (m_device->phy().properties().limits.maxViewports) {
- printf("%s Device doesn't support the necessary number of viewports, skipping test.\n", kSkipPrefix);
- return;
- }
-
- // Based on PSOViewportStateTests
- {
- VkViewport viewport = {0.0f, 0.0f, 64.0f, 64.0f, 0.0f, 1.0f};
- VkViewport viewports[] = {viewport, viewport};
- VkRect2D scissor = {{0, 0}, {64, 64}};
- VkRect2D scissors[100] = {scissor, scissor};
-
- using std::vector;
- struct TestCase {
- uint32_t viewport_count;
- VkViewport *viewports;
- uint32_t scissor_count;
- VkRect2D *scissors;
- uint32_t exclusive_scissor_count;
- VkRect2D *exclusive_scissors;
-
- vector<std::string> vuids;
- };
-
- vector<TestCase> test_cases = {
- {1,
- viewports,
- 1,
- scissors,
- 2,
- scissors,
- {"VUID-VkPipelineViewportExclusiveScissorStateCreateInfoNV-exclusiveScissorCount-02027",
- "VUID-VkPipelineViewportExclusiveScissorStateCreateInfoNV-exclusiveScissorCount-02029"}},
- {1,
- viewports,
- 1,
- scissors,
- 100,
- scissors,
- {"VUID-VkPipelineViewportExclusiveScissorStateCreateInfoNV-exclusiveScissorCount-02027",
- "VUID-VkPipelineViewportExclusiveScissorStateCreateInfoNV-exclusiveScissorCount-02028",
- "VUID-VkPipelineViewportExclusiveScissorStateCreateInfoNV-exclusiveScissorCount-02029"}},
- {1,
- viewports,
- 1,
- scissors,
- 1,
- nullptr,
- {"VUID-VkPipelineViewportExclusiveScissorStateCreateInfoNV-pDynamicStates-02030"}},
- };
-
- for (const auto &test_case : test_cases) {
- VkPipelineViewportExclusiveScissorStateCreateInfoNV exc = {
- VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_EXCLUSIVE_SCISSOR_STATE_CREATE_INFO_NV};
-
- const auto break_vp = [&test_case, &exc](CreatePipelineHelper &helper) {
- helper.vp_state_ci_.viewportCount = test_case.viewport_count;
- helper.vp_state_ci_.pViewports = test_case.viewports;
- helper.vp_state_ci_.scissorCount = test_case.scissor_count;
- helper.vp_state_ci_.pScissors = test_case.scissors;
- helper.vp_state_ci_.pNext = &exc;
-
- exc.exclusiveScissorCount = test_case.exclusive_scissor_count;
- exc.pExclusiveScissors = test_case.exclusive_scissors;
- };
- CreatePipelineHelper::OneshotTest(*this, break_vp, VK_DEBUG_REPORT_ERROR_BIT_EXT, test_case.vuids);
- }
- }
-
- // Based on SetDynScissorParamTests
- {
- auto vkCmdSetExclusiveScissorNV =
- (PFN_vkCmdSetExclusiveScissorNV)vkGetDeviceProcAddr(m_device->device(), "vkCmdSetExclusiveScissorNV");
-
- const VkRect2D scissor = {{0, 0}, {16, 16}};
- const VkRect2D scissors[] = {scissor, scissor};
-
- m_commandBuffer->begin();
-
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
- "VUID-vkCmdSetExclusiveScissorNV-firstExclusiveScissor-02035");
- vkCmdSetExclusiveScissorNV(m_commandBuffer->handle(), 1, 1, scissors);
- m_errorMonitor->VerifyFound();
-
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
- "vkCmdSetExclusiveScissorNV: parameter exclusiveScissorCount must be greater than 0");
- vkCmdSetExclusiveScissorNV(m_commandBuffer->handle(), 0, 0, nullptr);
- m_errorMonitor->VerifyFound();
-
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
- "VUID-vkCmdSetExclusiveScissorNV-exclusiveScissorCount-02036");
- vkCmdSetExclusiveScissorNV(m_commandBuffer->handle(), 0, 2, scissors);
- m_errorMonitor->VerifyFound();
-
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
- "vkCmdSetExclusiveScissorNV: parameter exclusiveScissorCount must be greater than 0");
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
- "VUID-vkCmdSetExclusiveScissorNV-firstExclusiveScissor-02035");
- vkCmdSetExclusiveScissorNV(m_commandBuffer->handle(), 1, 0, scissors);
- m_errorMonitor->VerifyFound();
-
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
- "VUID-vkCmdSetExclusiveScissorNV-firstExclusiveScissor-02035");
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
- "VUID-vkCmdSetExclusiveScissorNV-exclusiveScissorCount-02036");
- vkCmdSetExclusiveScissorNV(m_commandBuffer->handle(), 1, 2, scissors);
- m_errorMonitor->VerifyFound();
-
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
- "vkCmdSetExclusiveScissorNV: required parameter pExclusiveScissors specified as NULL");
- vkCmdSetExclusiveScissorNV(m_commandBuffer->handle(), 0, 1, nullptr);
- m_errorMonitor->VerifyFound();
-
- struct TestCase {
- VkRect2D scissor;
- std::string vuid;
- };
-
- std::vector<TestCase> test_cases = {
- {{{-1, 0}, {16, 16}}, "VUID-vkCmdSetExclusiveScissorNV-x-02037"},
- {{{0, -1}, {16, 16}}, "VUID-vkCmdSetExclusiveScissorNV-x-02037"},
- {{{1, 0}, {INT32_MAX, 16}}, "VUID-vkCmdSetExclusiveScissorNV-offset-02038"},
- {{{INT32_MAX, 0}, {1, 16}}, "VUID-vkCmdSetExclusiveScissorNV-offset-02038"},
- {{{0, 0}, {uint32_t{INT32_MAX} + 1, 16}}, "VUID-vkCmdSetExclusiveScissorNV-offset-02038"},
- {{{0, 1}, {16, INT32_MAX}}, "VUID-vkCmdSetExclusiveScissorNV-offset-02039"},
- {{{0, INT32_MAX}, {16, 1}}, "VUID-vkCmdSetExclusiveScissorNV-offset-02039"},
- {{{0, 0}, {16, uint32_t{INT32_MAX} + 1}}, "VUID-vkCmdSetExclusiveScissorNV-offset-02039"}};
-
- for (const auto &test_case : test_cases) {
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, test_case.vuid);
- vkCmdSetExclusiveScissorNV(m_commandBuffer->handle(), 0, 1, &test_case.scissor);
- m_errorMonitor->VerifyFound();
- }
-
- m_commandBuffer->end();
- }
-}
-
-TEST_F(VkLayerTest, MeshShaderNV) {
- TEST_DESCRIPTION("Test VK_NV_mesh_shader.");
-
- if (InstanceExtensionSupported(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME)) {
- m_instance_extension_names.push_back(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME);
- } else {
- printf("%s Did not find required instance extension %s; skipped.\n", kSkipPrefix,
- VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME);
- return;
- }
- ASSERT_NO_FATAL_FAILURE(InitFramework(myDbgFunc, m_errorMonitor));
- std::array<const char *, 1> required_device_extensions = {{VK_NV_MESH_SHADER_EXTENSION_NAME}};
- for (auto device_extension : required_device_extensions) {
- if (DeviceExtensionSupported(gpu(), nullptr, device_extension)) {
- m_device_extension_names.push_back(device_extension);
- } else {
- printf("%s %s Extension not supported, skipping tests\n", kSkipPrefix, device_extension);
- return;
- }
- }
-
- if (DeviceIsMockICD() || DeviceSimulation()) {
- printf("%sNot suppored by MockICD, skipping tests\n", kSkipPrefix);
- return;
- }
-
- PFN_vkGetPhysicalDeviceFeatures2KHR vkGetPhysicalDeviceFeatures2KHR =
- (PFN_vkGetPhysicalDeviceFeatures2KHR)vkGetInstanceProcAddr(instance(), "vkGetPhysicalDeviceFeatures2KHR");
- ASSERT_TRUE(vkGetPhysicalDeviceFeatures2KHR != nullptr);
-
- // Create a device that enables mesh_shader
- auto mesh_shader_features = lvl_init_struct<VkPhysicalDeviceMeshShaderFeaturesNV>();
- auto features2 = lvl_init_struct<VkPhysicalDeviceFeatures2KHR>(&mesh_shader_features);
- vkGetPhysicalDeviceFeatures2KHR(gpu(), &features2);
- features2.features.multiDrawIndirect = VK_FALSE;
-
- ASSERT_NO_FATAL_FAILURE(InitState(nullptr, &features2));
- ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
-
- static const char vertShaderText[] =
- "#version 450\n"
- "vec2 vertices[3];\n"
- "void main() {\n"
- " vertices[0] = vec2(-1.0, -1.0);\n"
- " vertices[1] = vec2( 1.0, -1.0);\n"
- " vertices[2] = vec2( 0.0, 1.0);\n"
- " gl_Position = vec4(vertices[gl_VertexIndex % 3], 0.0, 1.0);\n"
- " gl_PointSize = 1.0f;\n"
- "}\n";
-
- static const char meshShaderText[] =
- "#version 450\n"
- "#extension GL_NV_mesh_shader : require\n"
- "layout(local_size_x = 1) in;\n"
- "layout(max_vertices = 3) out;\n"
- "layout(max_primitives = 1) out;\n"
- "layout(triangles) out;\n"
- "void main() {\n"
- " gl_MeshVerticesNV[0].gl_Position = vec4(-1.0, -1.0, 0, 1);\n"
- " gl_MeshVerticesNV[1].gl_Position = vec4( 1.0, -1.0, 0, 1);\n"
- " gl_MeshVerticesNV[2].gl_Position = vec4( 0.0, 1.0, 0, 1);\n"
- " gl_PrimitiveIndicesNV[0] = 0;\n"
- " gl_PrimitiveIndicesNV[1] = 1;\n"
- " gl_PrimitiveIndicesNV[2] = 2;\n"
- " gl_PrimitiveCountNV = 1;\n"
- "}\n";
-
- VkShaderObj vs(m_device, vertShaderText, VK_SHADER_STAGE_VERTEX_BIT, this);
- VkShaderObj ms(m_device, meshShaderText, VK_SHADER_STAGE_MESH_BIT_NV, this);
- VkShaderObj fs(m_device, bindStateFragShaderText, VK_SHADER_STAGE_FRAGMENT_BIT, this);
-
- // Test pipeline creation
- {
- // can't mix mesh with vertex
- const auto break_vp = [&](CreatePipelineHelper &helper) {
- helper.shader_stages_ = {vs.GetStageCreateInfo(), fs.GetStageCreateInfo(), ms.GetStageCreateInfo()};
- };
- CreatePipelineHelper::OneshotTest(*this, break_vp, VK_DEBUG_REPORT_ERROR_BIT_EXT,
- vector<std::string>({"VUID-VkGraphicsPipelineCreateInfo-pStages-02095"}));
-
- // vertex or mesh must be present
- const auto break_vp2 = [&](CreatePipelineHelper &helper) { helper.shader_stages_ = {fs.GetStageCreateInfo()}; };
- CreatePipelineHelper::OneshotTest(*this, break_vp2, VK_DEBUG_REPORT_ERROR_BIT_EXT,
- vector<std::string>({"VUID-VkGraphicsPipelineCreateInfo-stage-02096"}));
-
- // vertexinput and inputassembly must be valid when vertex stage is present
- const auto break_vp3 = [&](CreatePipelineHelper &helper) {
- helper.shader_stages_ = {vs.GetStageCreateInfo(), fs.GetStageCreateInfo()};
- helper.gp_ci_.pVertexInputState = nullptr;
- helper.gp_ci_.pInputAssemblyState = nullptr;
- };
- CreatePipelineHelper::OneshotTest(*this, break_vp3, VK_DEBUG_REPORT_ERROR_BIT_EXT,
- vector<std::string>({"VUID-VkGraphicsPipelineCreateInfo-pStages-02097",
- "VUID-VkGraphicsPipelineCreateInfo-pStages-02098"}));
- }
-
- PFN_vkCmdDrawMeshTasksIndirectNV vkCmdDrawMeshTasksIndirectNV =
- (PFN_vkCmdDrawMeshTasksIndirectNV)vkGetInstanceProcAddr(instance(), "vkCmdDrawMeshTasksIndirectNV");
-
- VkBufferCreateInfo buffer_create_info = {VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO};
- buffer_create_info.size = sizeof(uint32_t);
- buffer_create_info.usage = VK_BUFFER_USAGE_INDIRECT_BUFFER_BIT;
- VkBuffer buffer;
- VkResult result = vkCreateBuffer(m_device->device(), &buffer_create_info, nullptr, &buffer);
- ASSERT_VK_SUCCESS(result);
-
- m_commandBuffer->begin();
-
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdDrawMeshTasksIndirectNV-drawCount-02146");
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdDrawMeshTasksIndirectNV-drawCount-02718");
- vkCmdDrawMeshTasksIndirectNV(m_commandBuffer->handle(), buffer, 0, 2, 0);
- m_errorMonitor->VerifyFound();
-
- m_commandBuffer->end();
-
- vkDestroyBuffer(m_device->device(), buffer, 0);
-}
-
-TEST_F(VkLayerTest, MeshShaderDisabledNV) {
- TEST_DESCRIPTION("Test VK_NV_mesh_shader VUs with NV_mesh_shader disabled.");
- ASSERT_NO_FATAL_FAILURE(Init());
- ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
-
- VkEvent event;
- VkEventCreateInfo event_create_info{};
- event_create_info.sType = VK_STRUCTURE_TYPE_EVENT_CREATE_INFO;
- vkCreateEvent(m_device->device(), &event_create_info, nullptr, &event);
-
- m_commandBuffer->begin();
-
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdSetEvent-stageMask-02107");
- vkCmdSetEvent(m_commandBuffer->handle(), event, VK_PIPELINE_STAGE_MESH_SHADER_BIT_NV);
- m_errorMonitor->VerifyFound();
-
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdSetEvent-stageMask-02108");
- vkCmdSetEvent(m_commandBuffer->handle(), event, VK_PIPELINE_STAGE_TASK_SHADER_BIT_NV);
- m_errorMonitor->VerifyFound();
-
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdResetEvent-stageMask-02109");
- vkCmdResetEvent(m_commandBuffer->handle(), event, VK_PIPELINE_STAGE_MESH_SHADER_BIT_NV);
- m_errorMonitor->VerifyFound();
-
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdResetEvent-stageMask-02110");
- vkCmdResetEvent(m_commandBuffer->handle(), event, VK_PIPELINE_STAGE_TASK_SHADER_BIT_NV);
- m_errorMonitor->VerifyFound();
-
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdWaitEvents-srcStageMask-02111");
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdWaitEvents-dstStageMask-02113");
- vkCmdWaitEvents(m_commandBuffer->handle(), 1, &event, VK_PIPELINE_STAGE_MESH_SHADER_BIT_NV,
- VK_PIPELINE_STAGE_MESH_SHADER_BIT_NV, 0, nullptr, 0, nullptr, 0, nullptr);
- m_errorMonitor->VerifyFound();
-
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdWaitEvents-srcStageMask-02112");
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdWaitEvents-dstStageMask-02114");
- vkCmdWaitEvents(m_commandBuffer->handle(), 1, &event, VK_PIPELINE_STAGE_TASK_SHADER_BIT_NV,
- VK_PIPELINE_STAGE_TASK_SHADER_BIT_NV, 0, nullptr, 0, nullptr, 0, nullptr);
- m_errorMonitor->VerifyFound();
-
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdPipelineBarrier-srcStageMask-02115");
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdPipelineBarrier-dstStageMask-02117");
- vkCmdPipelineBarrier(m_commandBuffer->handle(), VK_PIPELINE_STAGE_MESH_SHADER_BIT_NV, VK_PIPELINE_STAGE_MESH_SHADER_BIT_NV, 0,
- 0, nullptr, 0, nullptr, 0, nullptr);
- m_errorMonitor->VerifyFound();
-
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdPipelineBarrier-srcStageMask-02116");
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdPipelineBarrier-dstStageMask-02118");
- vkCmdPipelineBarrier(m_commandBuffer->handle(), VK_PIPELINE_STAGE_TASK_SHADER_BIT_NV, VK_PIPELINE_STAGE_TASK_SHADER_BIT_NV, 0,
- 0, nullptr, 0, nullptr, 0, nullptr);
- m_errorMonitor->VerifyFound();
-
- m_commandBuffer->end();
-
- VkSemaphoreCreateInfo semaphore_create_info = {};
- semaphore_create_info.sType = VK_STRUCTURE_TYPE_SEMAPHORE_CREATE_INFO;
- VkSemaphore semaphore;
- ASSERT_VK_SUCCESS(vkCreateSemaphore(m_device->device(), &semaphore_create_info, nullptr, &semaphore));
-
- VkPipelineStageFlags stage_flags = VK_PIPELINE_STAGE_MESH_SHADER_BIT_NV | VK_PIPELINE_STAGE_TASK_SHADER_BIT_NV;
- VkSubmitInfo submit_info = {};
-
- // Signal the semaphore so the next test can wait on it.
- submit_info.sType = VK_STRUCTURE_TYPE_SUBMIT_INFO;
- submit_info.signalSemaphoreCount = 1;
- submit_info.pSignalSemaphores = &semaphore;
- vkQueueSubmit(m_device->m_queue, 1, &submit_info, VK_NULL_HANDLE);
- m_errorMonitor->VerifyNotFound();
-
- submit_info.sType = VK_STRUCTURE_TYPE_SUBMIT_INFO;
- submit_info.signalSemaphoreCount = 0;
- submit_info.pSignalSemaphores = nullptr;
- submit_info.waitSemaphoreCount = 1;
- submit_info.pWaitSemaphores = &semaphore;
- submit_info.pWaitDstStageMask = &stage_flags;
-
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkSubmitInfo-pWaitDstStageMask-02089");
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkSubmitInfo-pWaitDstStageMask-02090");
- vkQueueSubmit(m_device->m_queue, 1, &submit_info, VK_NULL_HANDLE);
- m_errorMonitor->VerifyFound();
-
- vkQueueWaitIdle(m_device->m_queue);
-
- VkShaderObj vs(m_device, bindStateVertShaderText, VK_SHADER_STAGE_VERTEX_BIT, this);
- VkPipelineShaderStageCreateInfo meshStage = {VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_CREATE_INFO};
- meshStage = vs.GetStageCreateInfo();
- meshStage.stage = VK_SHADER_STAGE_MESH_BIT_NV;
- VkPipelineShaderStageCreateInfo taskStage = {VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_CREATE_INFO};
- taskStage = vs.GetStageCreateInfo();
- taskStage.stage = VK_SHADER_STAGE_TASK_BIT_NV;
-
- // mesh and task shaders not supported
- const auto break_vp = [&](CreatePipelineHelper &helper) {
- helper.shader_stages_ = {meshStage, taskStage, vs.GetStageCreateInfo()};
- };
- CreatePipelineHelper::OneshotTest(
- *this, break_vp, VK_DEBUG_REPORT_ERROR_BIT_EXT,
- vector<std::string>({"VUID-VkPipelineShaderStageCreateInfo-pName-00707", "VUID-VkPipelineShaderStageCreateInfo-pName-00707",
- "VUID-VkPipelineShaderStageCreateInfo-stage-02091",
- "VUID-VkPipelineShaderStageCreateInfo-stage-02092"}));
-
- vkDestroyEvent(m_device->device(), event, nullptr);
- vkDestroySemaphore(m_device->device(), semaphore, nullptr);
-}
diff --git a/tests/vklayertests_descriptor_renderpass_framebuffer.cpp b/tests/vklayertests_descriptor_renderpass_framebuffer.cpp
deleted file mode 100644
index c765ea411..000000000
--- a/tests/vklayertests_descriptor_renderpass_framebuffer.cpp
+++ /dev/null
@@ -1,7242 +0,0 @@
-/*
- * Copyright (c) 2015-2019 The Khronos Group Inc.
- * Copyright (c) 2015-2019 Valve Corporation
- * Copyright (c) 2015-2019 LunarG, Inc.
- * Copyright (c) 2015-2019 Google, Inc.
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Author: Chia-I Wu <olvaffe@gmail.com>
- * Author: Chris Forbes <chrisf@ijw.co.nz>
- * Author: Courtney Goeltzenleuchter <courtney@LunarG.com>
- * Author: Mark Lobodzinski <mark@lunarg.com>
- * Author: Mike Stroyan <mike@LunarG.com>
- * Author: Tobin Ehlis <tobine@google.com>
- * Author: Tony Barbour <tony@LunarG.com>
- * Author: Cody Northrop <cnorthrop@google.com>
- * Author: Dave Houlton <daveh@lunarg.com>
- * Author: Jeremy Kniager <jeremyk@lunarg.com>
- * Author: Shannon McPherson <shannon@lunarg.com>
- * Author: John Zulauf <jzulauf@lunarg.com>
- */
-
-#include "cast_utils.h"
-#include "layer_validation_tests.h"
-
-TEST_F(VkLayerTest, GpuValidationArrayOOBGraphicsShaders) {
- TEST_DESCRIPTION(
- "GPU validation: Verify detection of out-of-bounds descriptor array indexing and use of uninitialized descriptors.");
- if (!VkRenderFramework::DeviceCanDraw()) {
- printf("%s GPU-Assisted validation test requires a driver that can draw.\n", kSkipPrefix);
- return;
- }
-
- VkValidationFeatureEnableEXT enables[] = {VK_VALIDATION_FEATURE_ENABLE_GPU_ASSISTED_EXT};
- VkValidationFeaturesEXT features = {};
- features.sType = VK_STRUCTURE_TYPE_VALIDATION_FEATURES_EXT;
- features.enabledValidationFeatureCount = 1;
- features.pEnabledValidationFeatures = enables;
- bool descriptor_indexing = CheckDescriptorIndexingSupportAndInitFramework(this, m_instance_extension_names,
- m_device_extension_names, &features, m_errorMonitor);
- VkPhysicalDeviceFeatures2KHR features2 = {};
- auto indexing_features = lvl_init_struct<VkPhysicalDeviceDescriptorIndexingFeaturesEXT>();
- if (descriptor_indexing) {
- PFN_vkGetPhysicalDeviceFeatures2KHR vkGetPhysicalDeviceFeatures2KHR =
- (PFN_vkGetPhysicalDeviceFeatures2KHR)vkGetInstanceProcAddr(instance(), "vkGetPhysicalDeviceFeatures2KHR");
- ASSERT_TRUE(vkGetPhysicalDeviceFeatures2KHR != nullptr);
-
- features2 = lvl_init_struct<VkPhysicalDeviceFeatures2KHR>(&indexing_features);
- vkGetPhysicalDeviceFeatures2KHR(gpu(), &features2);
-
- if (!indexing_features.runtimeDescriptorArray || !indexing_features.descriptorBindingSampledImageUpdateAfterBind ||
- !indexing_features.descriptorBindingPartiallyBound || !indexing_features.descriptorBindingVariableDescriptorCount ||
- !indexing_features.shaderSampledImageArrayNonUniformIndexing ||
- !indexing_features.shaderStorageBufferArrayNonUniformIndexing) {
- printf("Not all descriptor indexing features supported, skipping descriptor indexing tests\n");
- descriptor_indexing = false;
- }
- }
-
- VkCommandPoolCreateFlags pool_flags = VK_COMMAND_POOL_CREATE_RESET_COMMAND_BUFFER_BIT;
- ASSERT_NO_FATAL_FAILURE(InitState(nullptr, &features2, pool_flags));
- if (m_device->props.apiVersion < VK_API_VERSION_1_1) {
- printf("%s GPU-Assisted validation test requires Vulkan 1.1+.\n", kSkipPrefix);
- return;
- }
- ASSERT_NO_FATAL_FAILURE(InitViewport());
- ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
-
- // Make a uniform buffer to be passed to the shader that contains the invalid array index.
- uint32_t qfi = 0;
- VkBufferCreateInfo bci = {};
- bci.sType = VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO;
- bci.usage = VK_BUFFER_USAGE_UNIFORM_BUFFER_BIT;
- bci.size = 1024;
- bci.queueFamilyIndexCount = 1;
- bci.pQueueFamilyIndices = &qfi;
- VkBufferObj buffer0;
- VkMemoryPropertyFlags mem_props = VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT | VK_MEMORY_PROPERTY_HOST_COHERENT_BIT;
- buffer0.init(*m_device, bci, mem_props);
-
- bci.usage = VK_BUFFER_USAGE_STORAGE_BUFFER_BIT;
- // Make another buffer to populate the buffer array to be indexed
- VkBufferObj buffer1;
- buffer1.init(*m_device, bci, mem_props);
-
- void *layout_pnext = nullptr;
- void *allocate_pnext = nullptr;
- auto pool_create_flags = 0;
- auto layout_create_flags = 0;
- VkDescriptorBindingFlagsEXT ds_binding_flags[2] = {};
- VkDescriptorSetLayoutBindingFlagsCreateInfoEXT layout_createinfo_binding_flags[1] = {};
- if (descriptor_indexing) {
- ds_binding_flags[0] = 0;
- ds_binding_flags[1] = VK_DESCRIPTOR_BINDING_PARTIALLY_BOUND_BIT_EXT | VK_DESCRIPTOR_BINDING_UPDATE_AFTER_BIND_BIT_EXT;
-
- layout_createinfo_binding_flags[0].sType = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_BINDING_FLAGS_CREATE_INFO_EXT;
- layout_createinfo_binding_flags[0].pNext = NULL;
- layout_createinfo_binding_flags[0].bindingCount = 2;
- layout_createinfo_binding_flags[0].pBindingFlags = ds_binding_flags;
- layout_create_flags = VK_DESCRIPTOR_SET_LAYOUT_CREATE_UPDATE_AFTER_BIND_POOL_BIT_EXT;
- pool_create_flags = VK_DESCRIPTOR_SET_LAYOUT_CREATE_UPDATE_AFTER_BIND_POOL_BIT_EXT;
- layout_pnext = layout_createinfo_binding_flags;
- }
-
- // Prepare descriptors
- OneOffDescriptorSet descriptor_set(m_device,
- {
- {0, VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER, 1, VK_SHADER_STAGE_ALL, nullptr},
- {1, VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER, 6, VK_SHADER_STAGE_ALL, nullptr},
- },
- layout_create_flags, layout_pnext, pool_create_flags);
-
- VkDescriptorSetVariableDescriptorCountAllocateInfoEXT variable_count = {};
- uint32_t desc_counts;
- if (descriptor_indexing) {
- layout_create_flags = 0;
- pool_create_flags = 0;
- ds_binding_flags[1] =
- VK_DESCRIPTOR_BINDING_PARTIALLY_BOUND_BIT_EXT | VK_DESCRIPTOR_BINDING_VARIABLE_DESCRIPTOR_COUNT_BIT_EXT;
- desc_counts = 6; // We'll reserve 8 spaces in the layout, but the descriptor will only use 6
- variable_count.sType = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_VARIABLE_DESCRIPTOR_COUNT_ALLOCATE_INFO_EXT;
- variable_count.descriptorSetCount = 1;
- variable_count.pDescriptorCounts = &desc_counts;
- allocate_pnext = &variable_count;
- }
-
- OneOffDescriptorSet descriptor_set_variable(m_device,
- {
- {0, VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER, 1, VK_SHADER_STAGE_ALL, nullptr},
- {1, VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER, 8, VK_SHADER_STAGE_ALL, nullptr},
- },
- layout_create_flags, layout_pnext, pool_create_flags, allocate_pnext);
-
- const VkPipelineLayoutObj pipeline_layout(m_device, {&descriptor_set.layout_});
- const VkPipelineLayoutObj pipeline_layout_variable(m_device, {&descriptor_set_variable.layout_});
- VkTextureObj texture(m_device, nullptr);
- VkSamplerObj sampler(m_device);
-
- VkDescriptorBufferInfo buffer_info[1] = {};
- buffer_info[0].buffer = buffer0.handle();
- buffer_info[0].offset = 0;
- buffer_info[0].range = sizeof(uint32_t);
-
- VkDescriptorImageInfo image_info[6] = {};
- for (int i = 0; i < 6; i++) {
- image_info[i] = texture.DescriptorImageInfo();
- image_info[i].sampler = sampler.handle();
- image_info[i].imageLayout = VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL;
- }
-
- VkWriteDescriptorSet descriptor_writes[2] = {};
- descriptor_writes[0].sType = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET;
- descriptor_writes[0].dstSet = descriptor_set.set_; // descriptor_set;
- descriptor_writes[0].dstBinding = 0;
- descriptor_writes[0].descriptorCount = 1;
- descriptor_writes[0].descriptorType = VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER;
- descriptor_writes[0].pBufferInfo = buffer_info;
- descriptor_writes[1].sType = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET;
- descriptor_writes[1].dstSet = descriptor_set.set_; // descriptor_set;
- descriptor_writes[1].dstBinding = 1;
- if (descriptor_indexing)
- descriptor_writes[1].descriptorCount = 5; // Intentionally don't write index 5
- else
- descriptor_writes[1].descriptorCount = 6;
- descriptor_writes[1].descriptorType = VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER;
- descriptor_writes[1].pImageInfo = image_info;
- vkUpdateDescriptorSets(m_device->device(), 2, descriptor_writes, 0, NULL);
- if (descriptor_indexing) {
- descriptor_writes[0].dstSet = descriptor_set_variable.set_;
- descriptor_writes[1].dstSet = descriptor_set_variable.set_;
- vkUpdateDescriptorSets(m_device->device(), 2, descriptor_writes, 0, NULL);
- }
-
- ds_binding_flags[0] = 0;
- ds_binding_flags[1] = VK_DESCRIPTOR_BINDING_PARTIALLY_BOUND_BIT_EXT;
-
- // Resources for buffer tests
- OneOffDescriptorSet descriptor_set_buffer(m_device,
- {
- {0, VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER, 1, VK_SHADER_STAGE_ALL, nullptr},
- {1, VK_DESCRIPTOR_TYPE_STORAGE_BUFFER, 6, VK_SHADER_STAGE_ALL, nullptr},
- },
- 0, layout_pnext, 0);
-
- const VkPipelineLayoutObj pipeline_layout_buffer(m_device, {&descriptor_set_buffer.layout_});
-
- VkDescriptorBufferInfo buffer_test_buffer_info[7] = {};
- buffer_test_buffer_info[0].buffer = buffer0.handle();
- buffer_test_buffer_info[0].offset = 0;
- buffer_test_buffer_info[0].range = sizeof(uint32_t);
-
- for (int i = 1; i < 7; i++) {
- buffer_test_buffer_info[i].buffer = buffer1.handle();
- buffer_test_buffer_info[i].offset = 0;
- buffer_test_buffer_info[i].range = 4 * sizeof(float);
- }
-
- if (descriptor_indexing) {
- VkWriteDescriptorSet buffer_descriptor_writes[2] = {};
- buffer_descriptor_writes[0].sType = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET;
- buffer_descriptor_writes[0].dstSet = descriptor_set_buffer.set_; // descriptor_set;
- buffer_descriptor_writes[0].dstBinding = 0;
- buffer_descriptor_writes[0].descriptorCount = 1;
- buffer_descriptor_writes[0].descriptorType = VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER;
- buffer_descriptor_writes[0].pBufferInfo = buffer_test_buffer_info;
- buffer_descriptor_writes[1].sType = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET;
- buffer_descriptor_writes[1].dstSet = descriptor_set_buffer.set_; // descriptor_set;
- buffer_descriptor_writes[1].dstBinding = 1;
- buffer_descriptor_writes[1].descriptorCount = 5; // Intentionally don't write index 5
- buffer_descriptor_writes[1].descriptorType = VK_DESCRIPTOR_TYPE_STORAGE_BUFFER;
- buffer_descriptor_writes[1].pBufferInfo = &buffer_test_buffer_info[1];
- vkUpdateDescriptorSets(m_device->device(), 2, buffer_descriptor_writes, 0, NULL);
- }
-
- // Shader programs for array OOB test in vertex stage:
- // - The vertex shader fetches the invalid index from the uniform buffer and uses it to make an invalid index into another
- // array.
- char const *vsSource_vert =
- "#version 450\n"
- "\n"
- "layout(std140, set = 0, binding = 0) uniform foo { uint tex_index[1]; } uniform_index_buffer;\n"
- "layout(set = 0, binding = 1) uniform sampler2D tex[6];\n"
- "vec2 vertices[3];\n"
- "void main(){\n"
- " vertices[0] = vec2(-1.0, -1.0);\n"
- " vertices[1] = vec2( 1.0, -1.0);\n"
- " vertices[2] = vec2( 0.0, 1.0);\n"
- " gl_Position = vec4(vertices[gl_VertexIndex % 3], 0.0, 1.0);\n"
- " gl_Position += 1e-30 * texture(tex[uniform_index_buffer.tex_index[0]], vec2(0, 0));\n"
- "}\n";
- char const *fsSource_vert =
- "#version 450\n"
- "\n"
- "layout(set = 0, binding = 1) uniform sampler2D tex[6];\n"
- "layout(location = 0) out vec4 uFragColor;\n"
- "void main(){\n"
- " uFragColor = texture(tex[0], vec2(0, 0));\n"
- "}\n";
-
- // Shader programs for array OOB test in fragment stage:
- // - The vertex shader fetches the invalid index from the uniform buffer and passes it to the fragment shader.
- // - The fragment shader makes the invalid array access.
- char const *vsSource_frag =
- "#version 450\n"
- "\n"
- "layout(std140, binding = 0) uniform foo { uint tex_index[1]; } uniform_index_buffer;\n"
- "layout(location = 0) out flat uint index;\n"
- "vec2 vertices[3];\n"
- "void main(){\n"
- " vertices[0] = vec2(-1.0, -1.0);\n"
- " vertices[1] = vec2( 1.0, -1.0);\n"
- " vertices[2] = vec2( 0.0, 1.0);\n"
- " gl_Position = vec4(vertices[gl_VertexIndex % 3], 0.0, 1.0);\n"
- " index = uniform_index_buffer.tex_index[0];\n"
- "}\n";
- char const *fsSource_frag =
- "#version 450\n"
- "\n"
- "layout(set = 0, binding = 1) uniform sampler2D tex[6];\n"
- "layout(location = 0) out vec4 uFragColor;\n"
- "layout(location = 0) in flat uint index;\n"
- "void main(){\n"
- " uFragColor = texture(tex[index], vec2(0, 0));\n"
- "}\n";
- char const *fsSource_frag_runtime =
- "#version 450\n"
- "#extension GL_EXT_nonuniform_qualifier : enable\n"
- "\n"
- "layout(set = 0, binding = 1) uniform sampler2D tex[];\n"
- "layout(location = 0) out vec4 uFragColor;\n"
- "layout(location = 0) in flat uint index;\n"
- "void main(){\n"
- " uFragColor = texture(tex[index], vec2(0, 0));\n"
- "}\n";
- char const *fsSource_buffer =
- "#version 450\n"
- "#extension GL_EXT_nonuniform_qualifier : enable\n "
- "\n"
- "layout(set = 0, binding = 1) buffer foo { vec4 val; } colors[];\n"
- "layout(location = 0) out vec4 uFragColor;\n"
- "layout(location = 0) in flat uint index;\n"
- "void main(){\n"
- " uFragColor = colors[index].val;\n"
- "}\n";
- char const *gsSource =
- "#version 450\n"
- "#extension GL_EXT_nonuniform_qualifier : enable\n "
- "layout(triangles) in;\n"
- "layout(triangle_strip, max_vertices=3) out;\n"
- "layout(location=0) in VertexData { vec4 x; } gs_in[];\n"
- "layout(std140, set = 0, binding = 0) uniform ufoo { uint index; } uniform_index_buffer;\n"
- "layout(set = 0, binding = 1) buffer bfoo { vec4 val; } adds[];\n"
- "void main() {\n"
- " gl_Position = gs_in[0].x + adds[uniform_index_buffer.index].val.x;\n"
- " EmitVertex();\n"
- "}\n";
- static const char *tesSource =
- "#version 450\n"
- "#extension GL_EXT_nonuniform_qualifier : enable\n "
- "layout(std140, set = 0, binding = 0) uniform ufoo { uint index; } uniform_index_buffer;\n"
- "layout(set = 0, binding = 1) buffer bfoo { vec4 val; } adds[];\n"
- "layout(triangles, equal_spacing, cw) in;\n"
- "void main() {\n"
- " gl_Position = adds[uniform_index_buffer.index].val;\n"
- "}\n";
-
- struct TestCase {
- char const *vertex_source;
- char const *fragment_source;
- char const *geometry_source;
- char const *tess_ctrl_source;
- char const *tess_eval_source;
- bool debug;
- const VkPipelineLayoutObj *pipeline_layout;
- const OneOffDescriptorSet *descriptor_set;
- uint32_t index;
- char const *expected_error;
- };
-
- std::vector<TestCase> tests;
- tests.push_back({vsSource_vert, fsSource_vert, nullptr, nullptr, nullptr, false, &pipeline_layout, &descriptor_set, 25,
- "Index of 25 used to index descriptor array of length 6."});
- tests.push_back({vsSource_frag, fsSource_frag, nullptr, nullptr, nullptr, false, &pipeline_layout, &descriptor_set, 25,
- "Index of 25 used to index descriptor array of length 6."});
-#if !defined(ANDROID)
- // The Android test framework uses shaderc for online compilations. Even when configured to compile with debug info,
- // shaderc seems to drop the OpLine instructions from the shader binary. This causes the following two tests to fail
- // on Android platforms. Skip these tests until the shaderc issue is understood/resolved.
- tests.push_back({vsSource_vert, fsSource_vert, nullptr, nullptr, nullptr, true, &pipeline_layout, &descriptor_set, 25,
- "gl_Position += 1e-30 * texture(tex[uniform_index_buffer.tex_index[0]], vec2(0, 0));"});
- tests.push_back({vsSource_frag, fsSource_frag, nullptr, nullptr, nullptr, true, &pipeline_layout, &descriptor_set, 25,
- "uFragColor = texture(tex[index], vec2(0, 0));"});
-#endif
- if (descriptor_indexing) {
- tests.push_back({vsSource_frag, fsSource_frag_runtime, nullptr, nullptr, nullptr, false, &pipeline_layout, &descriptor_set,
- 25, "Index of 25 used to index descriptor array of length 6."});
- tests.push_back({vsSource_frag, fsSource_frag_runtime, nullptr, nullptr, nullptr, false, &pipeline_layout, &descriptor_set,
- 5, "Descriptor index 5 is uninitialized"});
- // Pick 6 below because it is less than the maximum specified, but more than the actual specified
- tests.push_back({vsSource_frag, fsSource_frag_runtime, nullptr, nullptr, nullptr, false, &pipeline_layout_variable,
- &descriptor_set_variable, 6, "Index of 6 used to index descriptor array of length 6."});
- tests.push_back({vsSource_frag, fsSource_frag_runtime, nullptr, nullptr, nullptr, false, &pipeline_layout_variable,
- &descriptor_set_variable, 5, "Descriptor index 5 is uninitialized"});
- tests.push_back({vsSource_frag, fsSource_buffer, nullptr, nullptr, nullptr, false, &pipeline_layout_buffer,
- &descriptor_set_buffer, 25, "Index of 25 used to index descriptor array of length 6."});
- tests.push_back({vsSource_frag, fsSource_buffer, nullptr, nullptr, nullptr, false, &pipeline_layout_buffer,
- &descriptor_set_buffer, 5, "Descriptor index 5 is uninitialized"});
- if (m_device->phy().features().geometryShader) {
- // OOB Geometry
- tests.push_back({bindStateVertShaderText, bindStateFragShaderText, gsSource, nullptr, nullptr, false,
- &pipeline_layout_buffer, &descriptor_set_buffer, 25, "Stage = Geometry"});
- // Uninitialized Geometry
- tests.push_back({bindStateVertShaderText, bindStateFragShaderText, gsSource, nullptr, nullptr, false,
- &pipeline_layout_buffer, &descriptor_set_buffer, 5, "Stage = Geometry"});
- }
- if (m_device->phy().features().tessellationShader) {
- tests.push_back({bindStateVertShaderText, bindStateFragShaderText, nullptr, bindStateTscShaderText, tesSource, false,
- &pipeline_layout_buffer, &descriptor_set_buffer, 25, "Stage = Tessellation Eval"});
- tests.push_back({bindStateVertShaderText, bindStateFragShaderText, nullptr, bindStateTscShaderText, tesSource, false,
- &pipeline_layout_buffer, &descriptor_set_buffer, 5, "Stage = Tessellation Eval"});
- }
- }
-
- VkViewport viewport = m_viewports[0];
- VkRect2D scissors = m_scissors[0];
-
- VkSubmitInfo submit_info = {};
- submit_info.sType = VK_STRUCTURE_TYPE_SUBMIT_INFO;
- submit_info.commandBufferCount = 1;
- submit_info.pCommandBuffers = &m_commandBuffer->handle();
-
- for (const auto &iter : tests) {
- VkResult err;
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, iter.expected_error);
- VkShaderObj vs(m_device, iter.vertex_source, VK_SHADER_STAGE_VERTEX_BIT, this, "main", iter.debug);
- VkShaderObj fs(m_device, iter.fragment_source, VK_SHADER_STAGE_FRAGMENT_BIT, this, "main", iter.debug);
- VkShaderObj *gs = nullptr;
- VkShaderObj *tcs = nullptr;
- VkShaderObj *tes = nullptr;
- VkPipelineObj pipe(m_device);
- pipe.AddShader(&vs);
- pipe.AddShader(&fs);
- if (iter.geometry_source) {
- gs = new VkShaderObj(m_device, iter.geometry_source, VK_SHADER_STAGE_GEOMETRY_BIT, this, "main", iter.debug);
- pipe.AddShader(gs);
- }
- if (iter.tess_ctrl_source && iter.tess_eval_source) {
- tcs = new VkShaderObj(m_device, iter.tess_ctrl_source, VK_SHADER_STAGE_TESSELLATION_CONTROL_BIT, this, "main",
- iter.debug);
- tes = new VkShaderObj(m_device, iter.tess_eval_source, VK_SHADER_STAGE_TESSELLATION_EVALUATION_BIT, this, "main",
- iter.debug);
- pipe.AddShader(tcs);
- pipe.AddShader(tes);
- VkPipelineInputAssemblyStateCreateInfo iasci{VK_STRUCTURE_TYPE_PIPELINE_INPUT_ASSEMBLY_STATE_CREATE_INFO, nullptr, 0,
- VK_PRIMITIVE_TOPOLOGY_PATCH_LIST, VK_FALSE};
- VkPipelineTessellationDomainOriginStateCreateInfo tessellationDomainOriginStateInfo = {
- VK_STRUCTURE_TYPE_PIPELINE_TESSELLATION_DOMAIN_ORIGIN_STATE_CREATE_INFO, VK_NULL_HANDLE,
- VK_TESSELLATION_DOMAIN_ORIGIN_UPPER_LEFT};
-
- VkPipelineTessellationStateCreateInfo tsci{VK_STRUCTURE_TYPE_PIPELINE_TESSELLATION_STATE_CREATE_INFO,
- &tessellationDomainOriginStateInfo, 0, 3};
- pipe.SetTessellation(&tsci);
- pipe.SetInputAssembly(&iasci);
- }
- pipe.AddDefaultColorAttachment();
- err = pipe.CreateVKPipeline(iter.pipeline_layout->handle(), renderPass());
- ASSERT_VK_SUCCESS(err);
- m_commandBuffer->begin();
- m_commandBuffer->BeginRenderPass(m_renderPassBeginInfo);
- vkCmdBindPipeline(m_commandBuffer->handle(), VK_PIPELINE_BIND_POINT_GRAPHICS, pipe.handle());
- vkCmdBindDescriptorSets(m_commandBuffer->handle(), VK_PIPELINE_BIND_POINT_GRAPHICS, iter.pipeline_layout->handle(), 0, 1,
- &iter.descriptor_set->set_, 0, nullptr);
- vkCmdSetViewport(m_commandBuffer->handle(), 0, 1, &viewport);
- vkCmdSetScissor(m_commandBuffer->handle(), 0, 1, &scissors);
- vkCmdDraw(m_commandBuffer->handle(), 3, 1, 0, 0);
- vkCmdEndRenderPass(m_commandBuffer->handle());
- m_commandBuffer->end();
- uint32_t *data = (uint32_t *)buffer0.memory().map();
- data[0] = iter.index;
- buffer0.memory().unmap();
- vkQueueSubmit(m_device->m_queue, 1, &submit_info, VK_NULL_HANDLE);
- vkQueueWaitIdle(m_device->m_queue);
- m_errorMonitor->VerifyFound();
- if (gs) {
- delete gs;
- }
- if (tcs && tes) {
- delete tcs;
- delete tes;
- }
- }
- auto c_queue = m_device->GetDefaultComputeQueue();
- if (c_queue && descriptor_indexing) {
- char const *csSource =
- "#version 450\n"
- "#extension GL_EXT_nonuniform_qualifier : enable\n "
- "layout(set = 0, binding = 0) uniform ufoo { uint index; } u_index;"
- "layout(set = 0, binding = 1) buffer StorageBuffer {\n"
- " uint data;\n"
- "} Data[];\n"
- "void main() {\n"
- " Data[(u_index.index - 1)].data = Data[u_index.index].data;\n"
- "}\n";
-
- auto shader_module = new VkShaderObj(m_device, csSource, VK_SHADER_STAGE_COMPUTE_BIT, this);
-
- VkPipelineShaderStageCreateInfo stage;
- stage.sType = VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_CREATE_INFO;
- stage.pNext = nullptr;
- stage.flags = 0;
- stage.stage = VK_SHADER_STAGE_COMPUTE_BIT;
- stage.module = shader_module->handle();
- stage.pName = "main";
- stage.pSpecializationInfo = nullptr;
-
- // CreateComputePipelines
- VkComputePipelineCreateInfo pipeline_info = {};
- pipeline_info.sType = VK_STRUCTURE_TYPE_COMPUTE_PIPELINE_CREATE_INFO;
- pipeline_info.pNext = nullptr;
- pipeline_info.flags = 0;
- pipeline_info.layout = pipeline_layout_buffer.handle();
- pipeline_info.basePipelineHandle = VK_NULL_HANDLE;
- pipeline_info.basePipelineIndex = -1;
- pipeline_info.stage = stage;
-
- VkPipeline c_pipeline;
- vkCreateComputePipelines(device(), VK_NULL_HANDLE, 1, &pipeline_info, nullptr, &c_pipeline);
- VkCommandBufferBeginInfo begin_info = {};
- VkCommandBufferInheritanceInfo hinfo = {};
- hinfo.sType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_INHERITANCE_INFO;
- begin_info.sType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_BEGIN_INFO;
- begin_info.pInheritanceInfo = &hinfo;
-
- m_commandBuffer->begin(&begin_info);
- vkCmdBindPipeline(m_commandBuffer->handle(), VK_PIPELINE_BIND_POINT_COMPUTE, c_pipeline);
- vkCmdBindDescriptorSets(m_commandBuffer->handle(), VK_PIPELINE_BIND_POINT_COMPUTE, pipeline_layout_buffer.handle(), 0, 1,
- &descriptor_set_buffer.set_, 0, nullptr);
- vkCmdDispatch(m_commandBuffer->handle(), 1, 1, 1);
- m_commandBuffer->end();
-
- // Uninitialized
- uint32_t *data = (uint32_t *)buffer0.memory().map();
- data[0] = 5;
- buffer0.memory().unmap();
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "Stage = Compute");
- vkQueueSubmit(c_queue->handle(), 1, &submit_info, VK_NULL_HANDLE);
- vkQueueWaitIdle(m_device->m_queue);
- m_errorMonitor->VerifyFound();
- // Out of Bounds
- data = (uint32_t *)buffer0.memory().map();
- data[0] = 25;
- buffer0.memory().unmap();
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "Stage = Compute");
- vkQueueSubmit(c_queue->handle(), 1, &submit_info, VK_NULL_HANDLE);
- vkQueueWaitIdle(m_device->m_queue);
- m_errorMonitor->VerifyFound();
- vkDestroyPipeline(m_device->handle(), c_pipeline, NULL);
- vkDestroyShaderModule(m_device->handle(), shader_module->handle(), NULL);
- }
- return;
-}
-
-TEST_F(VkLayerTest, GpuValidationArrayOOBRayTracingShaders) {
- TEST_DESCRIPTION(
- "GPU validation: Verify detection of out-of-bounds descriptor array indexing and use of uninitialized descriptors for "
- "ray tracing shaders.");
-
- std::array<const char *, 1> required_instance_extensions = {VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME};
- for (auto instance_extension : required_instance_extensions) {
- if (InstanceExtensionSupported(instance_extension)) {
- m_instance_extension_names.push_back(instance_extension);
- } else {
- printf("%s Did not find required instance extension %s; skipped.\n", kSkipPrefix, instance_extension);
- return;
- }
- }
-
- VkValidationFeatureEnableEXT validation_feature_enables[] = {VK_VALIDATION_FEATURE_ENABLE_GPU_ASSISTED_EXT};
- VkValidationFeaturesEXT validation_features = {};
- validation_features.sType = VK_STRUCTURE_TYPE_VALIDATION_FEATURES_EXT;
- validation_features.enabledValidationFeatureCount = 1;
- validation_features.pEnabledValidationFeatures = validation_feature_enables;
- bool descriptor_indexing = CheckDescriptorIndexingSupportAndInitFramework(
- this, m_instance_extension_names, m_device_extension_names, &validation_features, m_errorMonitor);
-
- if (DeviceIsMockICD() || DeviceSimulation()) {
- printf("%s Test not supported by MockICD, skipping tests\n", kSkipPrefix);
- return;
- }
-
- std::array<const char *, 2> required_device_extensions = {VK_KHR_GET_MEMORY_REQUIREMENTS_2_EXTENSION_NAME,
- VK_NV_RAY_TRACING_EXTENSION_NAME};
- for (auto device_extension : required_device_extensions) {
- if (DeviceExtensionSupported(gpu(), nullptr, device_extension)) {
- m_device_extension_names.push_back(device_extension);
- } else {
- printf("%s %s Extension not supported, skipping tests\n", kSkipPrefix, device_extension);
- return;
- }
- }
-
- VkPhysicalDeviceFeatures2KHR features2 = {};
- auto indexing_features = lvl_init_struct<VkPhysicalDeviceDescriptorIndexingFeaturesEXT>();
- if (descriptor_indexing) {
- PFN_vkGetPhysicalDeviceFeatures2KHR vkGetPhysicalDeviceFeatures2KHR =
- (PFN_vkGetPhysicalDeviceFeatures2KHR)vkGetInstanceProcAddr(instance(), "vkGetPhysicalDeviceFeatures2KHR");
- ASSERT_TRUE(vkGetPhysicalDeviceFeatures2KHR != nullptr);
-
- features2 = lvl_init_struct<VkPhysicalDeviceFeatures2KHR>(&indexing_features);
- vkGetPhysicalDeviceFeatures2KHR(gpu(), &features2);
-
- if (!indexing_features.runtimeDescriptorArray || !indexing_features.descriptorBindingPartiallyBound ||
- !indexing_features.descriptorBindingSampledImageUpdateAfterBind ||
- !indexing_features.descriptorBindingVariableDescriptorCount) {
- printf("Not all descriptor indexing features supported, skipping descriptor indexing tests\n");
- descriptor_indexing = false;
- }
- }
- VkCommandPoolCreateFlags pool_flags = VK_COMMAND_POOL_CREATE_RESET_COMMAND_BUFFER_BIT;
- ASSERT_NO_FATAL_FAILURE(InitState(nullptr, &features2, pool_flags));
-
- PFN_vkGetPhysicalDeviceProperties2KHR vkGetPhysicalDeviceProperties2KHR =
- (PFN_vkGetPhysicalDeviceProperties2KHR)vkGetInstanceProcAddr(instance(), "vkGetPhysicalDeviceProperties2KHR");
- ASSERT_TRUE(vkGetPhysicalDeviceProperties2KHR != nullptr);
-
- auto ray_tracing_properties = lvl_init_struct<VkPhysicalDeviceRayTracingPropertiesNV>();
- auto properties2 = lvl_init_struct<VkPhysicalDeviceProperties2KHR>(&ray_tracing_properties);
- vkGetPhysicalDeviceProperties2KHR(gpu(), &properties2);
- if (ray_tracing_properties.maxTriangleCount == 0) {
- printf("%s Did not find required ray tracing properties; skipped.\n", kSkipPrefix);
- return;
- }
-
- VkQueue ray_tracing_queue = m_device->m_queue;
- uint32_t ray_tracing_queue_family_index = 0;
-
- // If supported, run on the compute only queue.
- uint32_t compute_only_queue_family_index = m_device->QueueFamilyMatching(VK_QUEUE_COMPUTE_BIT, VK_QUEUE_GRAPHICS_BIT);
- if (compute_only_queue_family_index != UINT32_MAX) {
- const auto &compute_only_queues = m_device->queue_family_queues(compute_only_queue_family_index);
- if (!compute_only_queues.empty()) {
- ray_tracing_queue = compute_only_queues[0]->handle();
- ray_tracing_queue_family_index = compute_only_queue_family_index;
- }
- }
-
- VkCommandPoolObj ray_tracing_command_pool(m_device, ray_tracing_queue_family_index,
- VK_COMMAND_POOL_CREATE_RESET_COMMAND_BUFFER_BIT);
- VkCommandBufferObj ray_tracing_command_buffer(m_device, &ray_tracing_command_pool);
-
- struct AABB {
- float min_x;
- float min_y;
- float min_z;
- float max_x;
- float max_y;
- float max_z;
- };
-
- const std::vector<AABB> aabbs = {{-1.0f, -1.0f, -1.0f, +1.0f, +1.0f, +1.0f}};
-
- struct VkGeometryInstanceNV {
- float transform[12];
- uint32_t instanceCustomIndex : 24;
- uint32_t mask : 8;
- uint32_t instanceOffset : 24;
- uint32_t flags : 8;
- uint64_t accelerationStructureHandle;
- };
-
- VkDeviceSize aabb_buffer_size = sizeof(AABB) * aabbs.size();
- VkBufferObj aabb_buffer;
- aabb_buffer.init(*m_device, aabb_buffer_size, VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT | VK_MEMORY_PROPERTY_HOST_COHERENT_BIT,
- VK_BUFFER_USAGE_RAY_TRACING_BIT_NV, {ray_tracing_queue_family_index});
-
- uint8_t *mapped_aabb_buffer_data = (uint8_t *)aabb_buffer.memory().map();
- std::memcpy(mapped_aabb_buffer_data, (uint8_t *)aabbs.data(), static_cast<std::size_t>(aabb_buffer_size));
- aabb_buffer.memory().unmap();
-
- VkGeometryNV geometry = {};
- geometry.sType = VK_STRUCTURE_TYPE_GEOMETRY_NV;
- geometry.geometryType = VK_GEOMETRY_TYPE_AABBS_NV;
- geometry.geometry.triangles = {};
- geometry.geometry.triangles.sType = VK_STRUCTURE_TYPE_GEOMETRY_TRIANGLES_NV;
- geometry.geometry.aabbs = {};
- geometry.geometry.aabbs.sType = VK_STRUCTURE_TYPE_GEOMETRY_AABB_NV;
- geometry.geometry.aabbs.aabbData = aabb_buffer.handle();
- geometry.geometry.aabbs.numAABBs = static_cast<uint32_t>(aabbs.size());
- geometry.geometry.aabbs.offset = 0;
- geometry.geometry.aabbs.stride = static_cast<VkDeviceSize>(sizeof(AABB));
- geometry.flags = 0;
-
- VkAccelerationStructureInfoNV bot_level_as_info = {};
- bot_level_as_info.sType = VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_INFO_NV;
- bot_level_as_info.type = VK_ACCELERATION_STRUCTURE_TYPE_BOTTOM_LEVEL_NV;
- bot_level_as_info.instanceCount = 0;
- bot_level_as_info.geometryCount = 1;
- bot_level_as_info.pGeometries = &geometry;
-
- VkAccelerationStructureCreateInfoNV bot_level_as_create_info = {};
- bot_level_as_create_info.sType = VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_CREATE_INFO_NV;
- bot_level_as_create_info.info = bot_level_as_info;
-
- VkAccelerationStructureObj bot_level_as(*m_device, bot_level_as_create_info);
-
- const std::vector<VkGeometryInstanceNV> instances = {
- VkGeometryInstanceNV{
- {
- // clang-format off
- 1.0f, 0.0f, 0.0f, 0.0f,
- 0.0f, 1.0f, 0.0f, 0.0f,
- 0.0f, 0.0f, 1.0f, 0.0f,
- // clang-format on
- },
- 0,
- 0xFF,
- 0,
- VK_GEOMETRY_INSTANCE_TRIANGLE_CULL_DISABLE_BIT_NV,
- bot_level_as.opaque_handle(),
- },
- };
-
- VkDeviceSize instance_buffer_size = sizeof(VkGeometryInstanceNV) * instances.size();
- VkBufferObj instance_buffer;
- instance_buffer.init(*m_device, instance_buffer_size,
- VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT | VK_MEMORY_PROPERTY_HOST_COHERENT_BIT,
- VK_BUFFER_USAGE_RAY_TRACING_BIT_NV, {ray_tracing_queue_family_index});
-
- uint8_t *mapped_instance_buffer_data = (uint8_t *)instance_buffer.memory().map();
- std::memcpy(mapped_instance_buffer_data, (uint8_t *)instances.data(), static_cast<std::size_t>(instance_buffer_size));
- instance_buffer.memory().unmap();
-
- VkAccelerationStructureInfoNV top_level_as_info = {};
- top_level_as_info.sType = VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_INFO_NV;
- top_level_as_info.type = VK_ACCELERATION_STRUCTURE_TYPE_TOP_LEVEL_NV;
- top_level_as_info.instanceCount = 1;
- top_level_as_info.geometryCount = 0;
-
- VkAccelerationStructureCreateInfoNV top_level_as_create_info = {};
- top_level_as_create_info.sType = VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_CREATE_INFO_NV;
- top_level_as_create_info.info = top_level_as_info;
-
- VkAccelerationStructureObj top_level_as(*m_device, top_level_as_create_info);
-
- VkDeviceSize scratch_buffer_size = std::max(bot_level_as.build_scratch_memory_requirements().memoryRequirements.size,
- top_level_as.build_scratch_memory_requirements().memoryRequirements.size);
- VkBufferObj scratch_buffer;
- scratch_buffer.init(*m_device, scratch_buffer_size, VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT, VK_BUFFER_USAGE_RAY_TRACING_BIT_NV);
-
- ray_tracing_command_buffer.begin();
-
- // Build bot level acceleration structure
- ray_tracing_command_buffer.BuildAccelerationStructure(&bot_level_as, scratch_buffer.handle());
-
- // Barrier to prevent using scratch buffer for top level build before bottom level build finishes
- VkMemoryBarrier memory_barrier = {};
- memory_barrier.sType = VK_STRUCTURE_TYPE_MEMORY_BARRIER;
- memory_barrier.srcAccessMask = VK_ACCESS_ACCELERATION_STRUCTURE_READ_BIT_NV | VK_ACCESS_ACCELERATION_STRUCTURE_WRITE_BIT_NV;
- memory_barrier.dstAccessMask = VK_ACCESS_ACCELERATION_STRUCTURE_READ_BIT_NV | VK_ACCESS_ACCELERATION_STRUCTURE_WRITE_BIT_NV;
- ray_tracing_command_buffer.PipelineBarrier(VK_PIPELINE_STAGE_ACCELERATION_STRUCTURE_BUILD_BIT_NV,
- VK_PIPELINE_STAGE_ACCELERATION_STRUCTURE_BUILD_BIT_NV, 0, 1, &memory_barrier, 0,
- nullptr, 0, nullptr);
-
- // Build top level acceleration structure
- ray_tracing_command_buffer.BuildAccelerationStructure(&top_level_as, scratch_buffer.handle(), instance_buffer.handle());
-
- ray_tracing_command_buffer.end();
-
- VkSubmitInfo submit_info = {};
- submit_info.sType = VK_STRUCTURE_TYPE_SUBMIT_INFO;
- submit_info.commandBufferCount = 1;
- submit_info.pCommandBuffers = &ray_tracing_command_buffer.handle();
- vkQueueSubmit(ray_tracing_queue, 1, &submit_info, VK_NULL_HANDLE);
- vkQueueWaitIdle(ray_tracing_queue);
- m_errorMonitor->VerifyNotFound();
-
- VkTextureObj texture(m_device, nullptr);
- VkSamplerObj sampler(m_device);
-
- VkDeviceSize storage_buffer_size = 1024;
- VkBufferObj storage_buffer;
- storage_buffer.init(*m_device, storage_buffer_size, VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT | VK_MEMORY_PROPERTY_HOST_COHERENT_BIT,
- VK_BUFFER_USAGE_STORAGE_BUFFER_BIT, {ray_tracing_queue_family_index});
-
- VkDeviceSize shader_binding_table_buffer_size = ray_tracing_properties.shaderGroupHandleSize * 4ull;
- VkBufferObj shader_binding_table_buffer;
- shader_binding_table_buffer.init(*m_device, shader_binding_table_buffer_size,
- VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT | VK_MEMORY_PROPERTY_HOST_COHERENT_BIT,
- VK_BUFFER_USAGE_RAY_TRACING_BIT_NV, {ray_tracing_queue_family_index});
-
- // Setup descriptors!
- const VkShaderStageFlags kAllRayTracingStages = VK_SHADER_STAGE_RAYGEN_BIT_NV | VK_SHADER_STAGE_ANY_HIT_BIT_NV |
- VK_SHADER_STAGE_CLOSEST_HIT_BIT_NV | VK_SHADER_STAGE_MISS_BIT_NV |
- VK_SHADER_STAGE_INTERSECTION_BIT_NV | VK_SHADER_STAGE_CALLABLE_BIT_NV;
-
- void *layout_pnext = nullptr;
- void *allocate_pnext = nullptr;
- VkDescriptorPoolCreateFlags pool_create_flags = 0;
- VkDescriptorSetLayoutCreateFlags layout_create_flags = 0;
- VkDescriptorBindingFlagsEXT ds_binding_flags[3] = {};
- VkDescriptorSetLayoutBindingFlagsCreateInfoEXT layout_createinfo_binding_flags[1] = {};
- if (descriptor_indexing) {
- ds_binding_flags[0] = 0;
- ds_binding_flags[1] = 0;
- ds_binding_flags[2] = VK_DESCRIPTOR_BINDING_PARTIALLY_BOUND_BIT_EXT | VK_DESCRIPTOR_BINDING_UPDATE_AFTER_BIND_BIT_EXT;
-
- layout_createinfo_binding_flags[0].sType = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_BINDING_FLAGS_CREATE_INFO_EXT;
- layout_createinfo_binding_flags[0].pNext = NULL;
- layout_createinfo_binding_flags[0].bindingCount = 3;
- layout_createinfo_binding_flags[0].pBindingFlags = ds_binding_flags;
- layout_create_flags = VK_DESCRIPTOR_SET_LAYOUT_CREATE_UPDATE_AFTER_BIND_POOL_BIT_EXT;
- pool_create_flags = VK_DESCRIPTOR_SET_LAYOUT_CREATE_UPDATE_AFTER_BIND_POOL_BIT_EXT;
- layout_pnext = layout_createinfo_binding_flags;
- }
-
- // Prepare descriptors
- OneOffDescriptorSet ds(m_device,
- {
- {0, VK_DESCRIPTOR_TYPE_ACCELERATION_STRUCTURE_NV, 1, kAllRayTracingStages, nullptr},
- {1, VK_DESCRIPTOR_TYPE_STORAGE_BUFFER, 1, kAllRayTracingStages, nullptr},
- {2, VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER, 6, kAllRayTracingStages, nullptr},
- },
- layout_create_flags, layout_pnext, pool_create_flags);
-
- VkDescriptorSetVariableDescriptorCountAllocateInfoEXT variable_count = {};
- uint32_t desc_counts;
- if (descriptor_indexing) {
- layout_create_flags = 0;
- pool_create_flags = 0;
- ds_binding_flags[2] =
- VK_DESCRIPTOR_BINDING_PARTIALLY_BOUND_BIT_EXT | VK_DESCRIPTOR_BINDING_VARIABLE_DESCRIPTOR_COUNT_BIT_EXT;
- desc_counts = 6; // We'll reserve 8 spaces in the layout, but the descriptor will only use 6
- variable_count.sType = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_VARIABLE_DESCRIPTOR_COUNT_ALLOCATE_INFO_EXT;
- variable_count.descriptorSetCount = 1;
- variable_count.pDescriptorCounts = &desc_counts;
- allocate_pnext = &variable_count;
- }
-
- OneOffDescriptorSet ds_variable(m_device,
- {
- {0, VK_DESCRIPTOR_TYPE_ACCELERATION_STRUCTURE_NV, 1, kAllRayTracingStages, nullptr},
- {1, VK_DESCRIPTOR_TYPE_STORAGE_BUFFER, 1, kAllRayTracingStages, nullptr},
- {2, VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER, 8, kAllRayTracingStages, nullptr},
- },
- layout_create_flags, layout_pnext, pool_create_flags, allocate_pnext);
-
- VkAccelerationStructureNV top_level_as_handle = top_level_as.handle();
- VkWriteDescriptorSetAccelerationStructureNV write_descript_set_as = {};
- write_descript_set_as.sType = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET_ACCELERATION_STRUCTURE_NV;
- write_descript_set_as.accelerationStructureCount = 1;
- write_descript_set_as.pAccelerationStructures = &top_level_as_handle;
-
- VkDescriptorBufferInfo descriptor_buffer_info = {};
- descriptor_buffer_info.buffer = storage_buffer.handle();
- descriptor_buffer_info.offset = 0;
- descriptor_buffer_info.range = storage_buffer_size;
-
- VkDescriptorImageInfo descriptor_image_infos[6] = {};
- for (int i = 0; i < 6; i++) {
- descriptor_image_infos[i] = texture.DescriptorImageInfo();
- descriptor_image_infos[i].sampler = sampler.handle();
- descriptor_image_infos[i].imageLayout = VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL;
- }
-
- VkWriteDescriptorSet descriptor_writes[3] = {};
- descriptor_writes[0].sType = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET;
- descriptor_writes[0].dstSet = ds.set_;
- descriptor_writes[0].dstBinding = 0;
- descriptor_writes[0].descriptorCount = 1;
- descriptor_writes[0].descriptorType = VK_DESCRIPTOR_TYPE_ACCELERATION_STRUCTURE_NV;
- descriptor_writes[0].pNext = &write_descript_set_as;
-
- descriptor_writes[1].sType = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET;
- descriptor_writes[1].dstSet = ds.set_;
- descriptor_writes[1].dstBinding = 1;
- descriptor_writes[1].descriptorCount = 1;
- descriptor_writes[1].descriptorType = VK_DESCRIPTOR_TYPE_STORAGE_BUFFER;
- descriptor_writes[1].pBufferInfo = &descriptor_buffer_info;
-
- descriptor_writes[2].sType = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET;
- descriptor_writes[2].dstSet = ds.set_;
- descriptor_writes[2].dstBinding = 2;
- if (descriptor_indexing) {
- descriptor_writes[2].descriptorCount = 5; // Intentionally don't write index 5
- } else {
- descriptor_writes[2].descriptorCount = 6;
- }
- descriptor_writes[2].descriptorType = VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER;
- descriptor_writes[2].pImageInfo = descriptor_image_infos;
- vkUpdateDescriptorSets(m_device->device(), 3, descriptor_writes, 0, NULL);
- if (descriptor_indexing) {
- descriptor_writes[0].dstSet = ds_variable.set_;
- descriptor_writes[1].dstSet = ds_variable.set_;
- descriptor_writes[2].dstSet = ds_variable.set_;
- vkUpdateDescriptorSets(m_device->device(), 3, descriptor_writes, 0, NULL);
- }
-
- const VkPipelineLayoutObj pipeline_layout(m_device, {&ds.layout_});
- const VkPipelineLayoutObj pipeline_layout_variable(m_device, {&ds_variable.layout_});
-
- const auto SetImagesArrayLength = [](const std::string &shader_template, const std::string &length_str) {
- const std::string to_replace = "IMAGES_ARRAY_LENGTH";
-
- std::string result = shader_template;
- auto position = result.find(to_replace);
- assert(position != std::string::npos);
- result.replace(position, to_replace.length(), length_str);
- return result;
- };
-
- const std::string rgen_source_template = R"(#version 460
- #extension GL_EXT_nonuniform_qualifier : require
- #extension GL_EXT_samplerless_texture_functions : require
- #extension GL_NV_ray_tracing : require
-
- layout(set = 0, binding = 0) uniform accelerationStructureNV topLevelAS;
- layout(set = 0, binding = 1, std430) buffer RayTracingSbo {
- uint rgen_index;
- uint ahit_index;
- uint chit_index;
- uint miss_index;
- uint intr_index;
- uint call_index;
-
- uint rgen_ran;
- uint ahit_ran;
- uint chit_ran;
- uint miss_ran;
- uint intr_ran;
- uint call_ran;
-
- float result1;
- float result2;
- float result3;
- } sbo;
- layout(set = 0, binding = 2) uniform texture2D textures[IMAGES_ARRAY_LENGTH];
-
- layout(location = 0) rayPayloadNV vec3 payload;
- layout(location = 3) callableDataNV vec3 callableData;
-
- void main() {
- sbo.rgen_ran = 1;
-
- executeCallableNV(0, 3);
- sbo.result1 = callableData.x;
-
- vec3 origin = vec3(0.0f, 0.0f, -2.0f);
- vec3 direction = vec3(0.0f, 0.0f, 1.0f);
-
- traceNV(topLevelAS, gl_RayFlagsNoneNV, 0xFF, 0, 1, 0, origin, 0.001, direction, 10000.0, 0);
- sbo.result2 = payload.x;
-
- traceNV(topLevelAS, gl_RayFlagsNoneNV, 0xFF, 0, 1, 0, origin, 0.001, -direction, 10000.0, 0);
- sbo.result3 = payload.x;
-
- if (sbo.rgen_index > 0) {
- // OOB here:
- sbo.result3 = texelFetch(textures[sbo.rgen_index], ivec2(0, 0), 0).x;
- }
- }
- )";
-
- const std::string rgen_source = SetImagesArrayLength(rgen_source_template, "6");
- const std::string rgen_source_runtime = SetImagesArrayLength(rgen_source_template, "");
-
- const std::string ahit_source_template = R"(#version 460
- #extension GL_EXT_nonuniform_qualifier : require
- #extension GL_EXT_samplerless_texture_functions : require
- #extension GL_NV_ray_tracing : require
-
- layout(set = 0, binding = 1, std430) buffer StorageBuffer {
- uint rgen_index;
- uint ahit_index;
- uint chit_index;
- uint miss_index;
- uint intr_index;
- uint call_index;
-
- uint rgen_ran;
- uint ahit_ran;
- uint chit_ran;
- uint miss_ran;
- uint intr_ran;
- uint call_ran;
-
- float result1;
- float result2;
- float result3;
- } sbo;
- layout(set = 0, binding = 2) uniform texture2D textures[IMAGES_ARRAY_LENGTH];
-
- hitAttributeNV vec3 hitValue;
-
- layout(location = 0) rayPayloadInNV vec3 payload;
-
- void main() {
- sbo.ahit_ran = 2;
-
- payload = vec3(0.1234f);
-
- if (sbo.ahit_index > 0) {
- // OOB here:
- payload.x = texelFetch(textures[sbo.ahit_index], ivec2(0, 0), 0).x;
- }
- }
- )";
- const std::string ahit_source = SetImagesArrayLength(ahit_source_template, "6");
- const std::string ahit_source_runtime = SetImagesArrayLength(ahit_source_template, "");
-
- const std::string chit_source_template = R"(#version 460
- #extension GL_EXT_nonuniform_qualifier : require
- #extension GL_EXT_samplerless_texture_functions : require
- #extension GL_NV_ray_tracing : require
-
- layout(set = 0, binding = 1, std430) buffer RayTracingSbo {
- uint rgen_index;
- uint ahit_index;
- uint chit_index;
- uint miss_index;
- uint intr_index;
- uint call_index;
-
- uint rgen_ran;
- uint ahit_ran;
- uint chit_ran;
- uint miss_ran;
- uint intr_ran;
- uint call_ran;
-
- float result1;
- float result2;
- float result3;
- } sbo;
- layout(set = 0, binding = 2) uniform texture2D textures[IMAGES_ARRAY_LENGTH];
-
- layout(location = 0) rayPayloadInNV vec3 payload;
-
- hitAttributeNV vec3 attribs;
-
- void main() {
- sbo.chit_ran = 3;
-
- payload = attribs;
- if (sbo.chit_index > 0) {
- // OOB here:
- payload.x = texelFetch(textures[sbo.chit_index], ivec2(0, 0), 0).x;
- }
- }
- )";
- const std::string chit_source = SetImagesArrayLength(chit_source_template, "6");
- const std::string chit_source_runtime = SetImagesArrayLength(chit_source_template, "");
-
- const std::string miss_source_template = R"(#version 460
- #extension GL_EXT_nonuniform_qualifier : enable
- #extension GL_EXT_samplerless_texture_functions : require
- #extension GL_NV_ray_tracing : require
-
- layout(set = 0, binding = 1, std430) buffer RayTracingSbo {
- uint rgen_index;
- uint ahit_index;
- uint chit_index;
- uint miss_index;
- uint intr_index;
- uint call_index;
-
- uint rgen_ran;
- uint ahit_ran;
- uint chit_ran;
- uint miss_ran;
- uint intr_ran;
- uint call_ran;
-
- float result1;
- float result2;
- float result3;
- } sbo;
- layout(set = 0, binding = 2) uniform texture2D textures[IMAGES_ARRAY_LENGTH];
-
- layout(location = 0) rayPayloadInNV vec3 payload;
-
- void main() {
- sbo.miss_ran = 4;
-
- payload = vec3(1.0, 0.0, 0.0);
-
- if (sbo.miss_index > 0) {
- // OOB here:
- payload.x = texelFetch(textures[sbo.miss_index], ivec2(0, 0), 0).x;
- }
- }
- )";
- const std::string miss_source = SetImagesArrayLength(miss_source_template, "6");
- const std::string miss_source_runtime = SetImagesArrayLength(miss_source_template, "");
-
- const std::string intr_source_template = R"(#version 460
- #extension GL_EXT_nonuniform_qualifier : require
- #extension GL_EXT_samplerless_texture_functions : require
- #extension GL_NV_ray_tracing : require
-
- layout(set = 0, binding = 1, std430) buffer StorageBuffer {
- uint rgen_index;
- uint ahit_index;
- uint chit_index;
- uint miss_index;
- uint intr_index;
- uint call_index;
-
- uint rgen_ran;
- uint ahit_ran;
- uint chit_ran;
- uint miss_ran;
- uint intr_ran;
- uint call_ran;
-
- float result1;
- float result2;
- float result3;
- } sbo;
- layout(set = 0, binding = 2) uniform texture2D textures[IMAGES_ARRAY_LENGTH];
-
- hitAttributeNV vec3 hitValue;
-
- void main() {
- sbo.intr_ran = 5;
-
- hitValue = vec3(0.0f, 0.5f, 0.0f);
-
- reportIntersectionNV(1.0f, 0);
-
- if (sbo.intr_index > 0) {
- // OOB here:
- hitValue.x = texelFetch(textures[sbo.intr_index], ivec2(0, 0), 0).x;
- }
- }
- )";
- const std::string intr_source = SetImagesArrayLength(intr_source_template, "6");
- const std::string intr_source_runtime = SetImagesArrayLength(intr_source_template, "");
-
- const std::string call_source_template = R"(#version 460
- #extension GL_EXT_nonuniform_qualifier : require
- #extension GL_EXT_samplerless_texture_functions : require
- #extension GL_NV_ray_tracing : require
-
- layout(set = 0, binding = 1, std430) buffer StorageBuffer {
- uint rgen_index;
- uint ahit_index;
- uint chit_index;
- uint miss_index;
- uint intr_index;
- uint call_index;
-
- uint rgen_ran;
- uint ahit_ran;
- uint chit_ran;
- uint miss_ran;
- uint intr_ran;
- uint call_ran;
-
- float result1;
- float result2;
- float result3;
- } sbo;
- layout(set = 0, binding = 2) uniform texture2D textures[IMAGES_ARRAY_LENGTH];
-
- layout(location = 3) callableDataInNV vec3 callableData;
-
- void main() {
- sbo.call_ran = 6;
-
- callableData = vec3(0.1234f);
-
- if (sbo.call_index > 0) {
- // OOB here:
- callableData.x = texelFetch(textures[sbo.call_index], ivec2(0, 0), 0).x;
- }
- }
- )";
- const std::string call_source = SetImagesArrayLength(call_source_template, "6");
- const std::string call_source_runtime = SetImagesArrayLength(call_source_template, "");
-
- struct TestCase {
- const std::string &rgen_shader_source;
- const std::string &ahit_shader_source;
- const std::string &chit_shader_source;
- const std::string &miss_shader_source;
- const std::string &intr_shader_source;
- const std::string &call_shader_source;
- bool variable_length;
- uint32_t rgen_index;
- uint32_t ahit_index;
- uint32_t chit_index;
- uint32_t miss_index;
- uint32_t intr_index;
- uint32_t call_index;
- const char *expected_error;
- };
-
- std::vector<TestCase> tests;
- tests.push_back({rgen_source, ahit_source, chit_source, miss_source, intr_source, call_source, false, 25, 0, 0, 0, 0, 0,
- "Index of 25 used to index descriptor array of length 6."});
- tests.push_back({rgen_source, ahit_source, chit_source, miss_source, intr_source, call_source, false, 0, 25, 0, 0, 0, 0,
- "Index of 25 used to index descriptor array of length 6."});
- tests.push_back({rgen_source, ahit_source, chit_source, miss_source, intr_source, call_source, false, 0, 0, 25, 0, 0, 0,
- "Index of 25 used to index descriptor array of length 6."});
- tests.push_back({rgen_source, ahit_source, chit_source, miss_source, intr_source, call_source, false, 0, 0, 0, 25, 0, 0,
- "Index of 25 used to index descriptor array of length 6."});
- tests.push_back({rgen_source, ahit_source, chit_source, miss_source, intr_source, call_source, false, 0, 0, 0, 0, 25, 0,
- "Index of 25 used to index descriptor array of length 6."});
- tests.push_back({rgen_source, ahit_source, chit_source, miss_source, intr_source, call_source, false, 0, 0, 0, 0, 0, 25,
- "Index of 25 used to index descriptor array of length 6."});
-
- if (descriptor_indexing) {
- tests.push_back({rgen_source_runtime, ahit_source_runtime, chit_source_runtime, miss_source_runtime, intr_source_runtime,
- call_source_runtime, true, 25, 0, 0, 0, 0, 0, "Index of 25 used to index descriptor array of length 6."});
- tests.push_back({rgen_source_runtime, ahit_source_runtime, chit_source_runtime, miss_source_runtime, intr_source_runtime,
- call_source_runtime, true, 0, 25, 0, 0, 0, 0, "Index of 25 used to index descriptor array of length 6."});
- tests.push_back({rgen_source_runtime, ahit_source_runtime, chit_source_runtime, miss_source_runtime, intr_source_runtime,
- call_source_runtime, true, 0, 0, 25, 0, 0, 0, "Index of 25 used to index descriptor array of length 6."});
- tests.push_back({rgen_source_runtime, ahit_source_runtime, chit_source_runtime, miss_source_runtime, intr_source_runtime,
- call_source_runtime, true, 0, 0, 0, 25, 0, 0, "Index of 25 used to index descriptor array of length 6."});
- tests.push_back({rgen_source_runtime, ahit_source_runtime, chit_source_runtime, miss_source_runtime, intr_source_runtime,
- call_source_runtime, true, 0, 0, 0, 0, 25, 0, "Index of 25 used to index descriptor array of length 6."});
- tests.push_back({rgen_source_runtime, ahit_source_runtime, chit_source_runtime, miss_source_runtime, intr_source_runtime,
- call_source_runtime, true, 0, 0, 0, 0, 0, 25, "Index of 25 used to index descriptor array of length 6."});
-
- // For this group, 6 is less than max specified (max specified is 8) but more than actual specified (actual specified is 5)
- tests.push_back({rgen_source_runtime, ahit_source_runtime, chit_source_runtime, miss_source_runtime, intr_source_runtime,
- call_source_runtime, true, 6, 0, 0, 0, 0, 0, "Index of 6 used to index descriptor array of length 6."});
- tests.push_back({rgen_source_runtime, ahit_source_runtime, chit_source_runtime, miss_source_runtime, intr_source_runtime,
- call_source_runtime, true, 0, 6, 0, 0, 0, 0, "Index of 6 used to index descriptor array of length 6."});
- tests.push_back({rgen_source_runtime, ahit_source_runtime, chit_source_runtime, miss_source_runtime, intr_source_runtime,
- call_source_runtime, true, 0, 0, 6, 0, 0, 0, "Index of 6 used to index descriptor array of length 6."});
- tests.push_back({rgen_source_runtime, ahit_source_runtime, chit_source_runtime, miss_source_runtime, intr_source_runtime,
- call_source_runtime, true, 0, 0, 0, 6, 0, 0, "Index of 6 used to index descriptor array of length 6."});
- tests.push_back({rgen_source_runtime, ahit_source_runtime, chit_source_runtime, miss_source_runtime, intr_source_runtime,
- call_source_runtime, true, 0, 0, 0, 0, 6, 0, "Index of 6 used to index descriptor array of length 6."});
- tests.push_back({rgen_source_runtime, ahit_source_runtime, chit_source_runtime, miss_source_runtime, intr_source_runtime,
- call_source_runtime, true, 0, 0, 0, 0, 0, 6, "Index of 6 used to index descriptor array of length 6."});
-
- tests.push_back({rgen_source_runtime, ahit_source_runtime, chit_source_runtime, miss_source_runtime, intr_source_runtime,
- call_source_runtime, true, 5, 0, 0, 0, 0, 0, "Descriptor index 5 is uninitialized."});
- tests.push_back({rgen_source_runtime, ahit_source_runtime, chit_source_runtime, miss_source_runtime, intr_source_runtime,
- call_source_runtime, true, 0, 5, 0, 0, 0, 0, "Descriptor index 5 is uninitialized."});
- tests.push_back({rgen_source_runtime, ahit_source_runtime, chit_source_runtime, miss_source_runtime, intr_source_runtime,
- call_source_runtime, true, 0, 0, 5, 0, 0, 0, "Descriptor index 5 is uninitialized."});
- tests.push_back({rgen_source_runtime, ahit_source_runtime, chit_source_runtime, miss_source_runtime, intr_source_runtime,
- call_source_runtime, true, 0, 0, 0, 5, 0, 0, "Descriptor index 5 is uninitialized."});
- tests.push_back({rgen_source_runtime, ahit_source_runtime, chit_source_runtime, miss_source_runtime, intr_source_runtime,
- call_source_runtime, true, 0, 0, 0, 0, 5, 0, "Descriptor index 5 is uninitialized."});
- tests.push_back({rgen_source_runtime, ahit_source_runtime, chit_source_runtime, miss_source_runtime, intr_source_runtime,
- call_source_runtime, true, 0, 0, 0, 0, 0, 5, "Descriptor index 5 is uninitialized."});
- }
-
- PFN_vkCreateRayTracingPipelinesNV vkCreateRayTracingPipelinesNV = reinterpret_cast<PFN_vkCreateRayTracingPipelinesNV>(
- vkGetDeviceProcAddr(m_device->handle(), "vkCreateRayTracingPipelinesNV"));
- ASSERT_TRUE(vkCreateRayTracingPipelinesNV != nullptr);
-
- PFN_vkGetRayTracingShaderGroupHandlesNV vkGetRayTracingShaderGroupHandlesNV =
- reinterpret_cast<PFN_vkGetRayTracingShaderGroupHandlesNV>(
- vkGetDeviceProcAddr(m_device->handle(), "vkGetRayTracingShaderGroupHandlesNV"));
- ASSERT_TRUE(vkGetRayTracingShaderGroupHandlesNV != nullptr);
-
- PFN_vkCmdTraceRaysNV vkCmdTraceRaysNV =
- reinterpret_cast<PFN_vkCmdTraceRaysNV>(vkGetDeviceProcAddr(m_device->handle(), "vkCmdTraceRaysNV"));
- ASSERT_TRUE(vkCmdTraceRaysNV != nullptr);
-
- for (const auto &test : tests) {
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, test.expected_error);
-
- VkShaderObj rgen_shader(m_device, test.rgen_shader_source.c_str(), VK_SHADER_STAGE_RAYGEN_BIT_NV, this, "main");
- VkShaderObj ahit_shader(m_device, test.ahit_shader_source.c_str(), VK_SHADER_STAGE_ANY_HIT_BIT_NV, this, "main");
- VkShaderObj chit_shader(m_device, test.chit_shader_source.c_str(), VK_SHADER_STAGE_CLOSEST_HIT_BIT_NV, this, "main");
- VkShaderObj miss_shader(m_device, test.miss_shader_source.c_str(), VK_SHADER_STAGE_MISS_BIT_NV, this, "main");
- VkShaderObj intr_shader(m_device, test.intr_shader_source.c_str(), VK_SHADER_STAGE_INTERSECTION_BIT_NV, this, "main");
- VkShaderObj call_shader(m_device, test.call_shader_source.c_str(), VK_SHADER_STAGE_CALLABLE_BIT_NV, this, "main");
-
- VkPipelineShaderStageCreateInfo stage_create_infos[6] = {};
- stage_create_infos[0].sType = VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_CREATE_INFO;
- stage_create_infos[0].stage = VK_SHADER_STAGE_RAYGEN_BIT_NV;
- stage_create_infos[0].module = rgen_shader.handle();
- stage_create_infos[0].pName = "main";
-
- stage_create_infos[1].sType = VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_CREATE_INFO;
- stage_create_infos[1].stage = VK_SHADER_STAGE_ANY_HIT_BIT_NV;
- stage_create_infos[1].module = ahit_shader.handle();
- stage_create_infos[1].pName = "main";
-
- stage_create_infos[2].sType = VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_CREATE_INFO;
- stage_create_infos[2].stage = VK_SHADER_STAGE_CLOSEST_HIT_BIT_NV;
- stage_create_infos[2].module = chit_shader.handle();
- stage_create_infos[2].pName = "main";
-
- stage_create_infos[3].sType = VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_CREATE_INFO;
- stage_create_infos[3].stage = VK_SHADER_STAGE_MISS_BIT_NV;
- stage_create_infos[3].module = miss_shader.handle();
- stage_create_infos[3].pName = "main";
-
- stage_create_infos[4].sType = VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_CREATE_INFO;
- stage_create_infos[4].stage = VK_SHADER_STAGE_INTERSECTION_BIT_NV;
- stage_create_infos[4].module = intr_shader.handle();
- stage_create_infos[4].pName = "main";
-
- stage_create_infos[5].sType = VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_CREATE_INFO;
- stage_create_infos[5].stage = VK_SHADER_STAGE_CALLABLE_BIT_NV;
- stage_create_infos[5].module = call_shader.handle();
- stage_create_infos[5].pName = "main";
-
- VkRayTracingShaderGroupCreateInfoNV group_create_infos[4] = {};
- group_create_infos[0].sType = VK_STRUCTURE_TYPE_RAY_TRACING_SHADER_GROUP_CREATE_INFO_NV;
- group_create_infos[0].type = VK_RAY_TRACING_SHADER_GROUP_TYPE_GENERAL_NV;
- group_create_infos[0].generalShader = 0; // rgen
- group_create_infos[0].closestHitShader = VK_SHADER_UNUSED_NV;
- group_create_infos[0].anyHitShader = VK_SHADER_UNUSED_NV;
- group_create_infos[0].intersectionShader = VK_SHADER_UNUSED_NV;
-
- group_create_infos[1].sType = VK_STRUCTURE_TYPE_RAY_TRACING_SHADER_GROUP_CREATE_INFO_NV;
- group_create_infos[1].type = VK_RAY_TRACING_SHADER_GROUP_TYPE_GENERAL_NV;
- group_create_infos[1].generalShader = 3; // miss
- group_create_infos[1].closestHitShader = VK_SHADER_UNUSED_NV;
- group_create_infos[1].anyHitShader = VK_SHADER_UNUSED_NV;
- group_create_infos[1].intersectionShader = VK_SHADER_UNUSED_NV;
-
- group_create_infos[2].sType = VK_STRUCTURE_TYPE_RAY_TRACING_SHADER_GROUP_CREATE_INFO_NV;
- group_create_infos[2].type = VK_RAY_TRACING_SHADER_GROUP_TYPE_PROCEDURAL_HIT_GROUP_NV;
- group_create_infos[2].generalShader = VK_SHADER_UNUSED_NV;
- group_create_infos[2].closestHitShader = 2;
- group_create_infos[2].anyHitShader = 1;
- group_create_infos[2].intersectionShader = 4;
-
- group_create_infos[3].sType = VK_STRUCTURE_TYPE_RAY_TRACING_SHADER_GROUP_CREATE_INFO_NV;
- group_create_infos[3].type = VK_RAY_TRACING_SHADER_GROUP_TYPE_GENERAL_NV;
- group_create_infos[3].generalShader = 5; // call
- group_create_infos[3].closestHitShader = VK_SHADER_UNUSED_NV;
- group_create_infos[3].anyHitShader = VK_SHADER_UNUSED_NV;
- group_create_infos[3].intersectionShader = VK_SHADER_UNUSED_NV;
-
- VkRayTracingPipelineCreateInfoNV pipeline_ci = {};
- pipeline_ci.sType = VK_STRUCTURE_TYPE_RAY_TRACING_PIPELINE_CREATE_INFO_NV;
- pipeline_ci.stageCount = 6;
- pipeline_ci.pStages = stage_create_infos;
- pipeline_ci.groupCount = 4;
- pipeline_ci.pGroups = group_create_infos;
- pipeline_ci.maxRecursionDepth = 2;
- pipeline_ci.layout = test.variable_length ? pipeline_layout_variable.handle() : pipeline_layout.handle();
-
- VkPipeline pipeline = VK_NULL_HANDLE;
- ASSERT_VK_SUCCESS(vkCreateRayTracingPipelinesNV(m_device->handle(), VK_NULL_HANDLE, 1, &pipeline_ci, nullptr, &pipeline));
-
- std::vector<uint8_t> shader_binding_table_data;
- shader_binding_table_data.resize(static_cast<std::size_t>(shader_binding_table_buffer_size), 0);
- ASSERT_VK_SUCCESS(vkGetRayTracingShaderGroupHandlesNV(m_device->handle(), pipeline, 0, 4,
- static_cast<std::size_t>(shader_binding_table_buffer_size),
- shader_binding_table_data.data()));
-
- uint8_t *mapped_shader_binding_table_data = (uint8_t *)shader_binding_table_buffer.memory().map();
- std::memcpy(mapped_shader_binding_table_data, shader_binding_table_data.data(), shader_binding_table_data.size());
- shader_binding_table_buffer.memory().unmap();
-
- ray_tracing_command_buffer.begin();
-
- vkCmdBindPipeline(ray_tracing_command_buffer.handle(), VK_PIPELINE_BIND_POINT_RAY_TRACING_NV, pipeline);
- vkCmdBindDescriptorSets(ray_tracing_command_buffer.handle(), VK_PIPELINE_BIND_POINT_RAY_TRACING_NV,
- test.variable_length ? pipeline_layout_variable.handle() : pipeline_layout.handle(), 0, 1,
- test.variable_length ? &ds_variable.set_ : &ds.set_, 0, nullptr);
-
- vkCmdTraceRaysNV(ray_tracing_command_buffer.handle(), shader_binding_table_buffer.handle(),
- ray_tracing_properties.shaderGroupHandleSize * 0ull, shader_binding_table_buffer.handle(),
- ray_tracing_properties.shaderGroupHandleSize * 1ull, ray_tracing_properties.shaderGroupHandleSize,
- shader_binding_table_buffer.handle(), ray_tracing_properties.shaderGroupHandleSize * 2ull,
- ray_tracing_properties.shaderGroupHandleSize, shader_binding_table_buffer.handle(),
- ray_tracing_properties.shaderGroupHandleSize * 3ull, ray_tracing_properties.shaderGroupHandleSize,
- /*width=*/1, /*height=*/1, /*depth=*/1);
-
- ray_tracing_command_buffer.end();
-
- // Update the index of the texture that the shaders should read
- uint32_t *mapped_storage_buffer_data = (uint32_t *)storage_buffer.memory().map();
- mapped_storage_buffer_data[0] = test.rgen_index;
- mapped_storage_buffer_data[1] = test.ahit_index;
- mapped_storage_buffer_data[2] = test.chit_index;
- mapped_storage_buffer_data[3] = test.miss_index;
- mapped_storage_buffer_data[4] = test.intr_index;
- mapped_storage_buffer_data[5] = test.call_index;
- mapped_storage_buffer_data[6] = 0;
- mapped_storage_buffer_data[7] = 0;
- mapped_storage_buffer_data[8] = 0;
- mapped_storage_buffer_data[9] = 0;
- mapped_storage_buffer_data[10] = 0;
- mapped_storage_buffer_data[11] = 0;
- storage_buffer.memory().unmap();
-
- vkQueueSubmit(ray_tracing_queue, 1, &submit_info, VK_NULL_HANDLE);
- vkQueueWaitIdle(ray_tracing_queue);
- m_errorMonitor->VerifyFound();
-
- mapped_storage_buffer_data = (uint32_t *)storage_buffer.memory().map();
- ASSERT_TRUE(mapped_storage_buffer_data[6] == 1);
- ASSERT_TRUE(mapped_storage_buffer_data[7] == 2);
- ASSERT_TRUE(mapped_storage_buffer_data[8] == 3);
- ASSERT_TRUE(mapped_storage_buffer_data[9] == 4);
- ASSERT_TRUE(mapped_storage_buffer_data[10] == 5);
- ASSERT_TRUE(mapped_storage_buffer_data[11] == 6);
- storage_buffer.memory().unmap();
-
- vkDestroyPipeline(m_device->handle(), pipeline, nullptr);
- }
-}
-
-TEST_F(VkLayerTest, InvalidDescriptorPoolConsistency) {
- VkResult err;
-
- TEST_DESCRIPTION("Allocate descriptor sets from one DS pool and attempt to delete them from another.");
-
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkFreeDescriptorSets-pDescriptorSets-parent");
-
- ASSERT_NO_FATAL_FAILURE(Init());
- ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
-
- VkDescriptorPoolSize ds_type_count = {};
- ds_type_count.type = VK_DESCRIPTOR_TYPE_SAMPLER;
- ds_type_count.descriptorCount = 1;
-
- VkDescriptorPoolCreateInfo ds_pool_ci = {};
- ds_pool_ci.sType = VK_STRUCTURE_TYPE_DESCRIPTOR_POOL_CREATE_INFO;
- ds_pool_ci.pNext = NULL;
- ds_pool_ci.flags = 0;
- ds_pool_ci.maxSets = 1;
- ds_pool_ci.poolSizeCount = 1;
- ds_pool_ci.pPoolSizes = &ds_type_count;
-
- VkDescriptorPool bad_pool;
- err = vkCreateDescriptorPool(m_device->device(), &ds_pool_ci, NULL, &bad_pool);
- ASSERT_VK_SUCCESS(err);
-
- OneOffDescriptorSet descriptor_set(m_device, {
- {0, VK_DESCRIPTOR_TYPE_SAMPLER, 1, VK_SHADER_STAGE_ALL, nullptr},
- });
-
- err = vkFreeDescriptorSets(m_device->device(), bad_pool, 1, &descriptor_set.set_);
-
- m_errorMonitor->VerifyFound();
-
- vkDestroyDescriptorPool(m_device->device(), bad_pool, NULL);
-}
-
-TEST_F(VkLayerTest, DrawWithPipelineIncompatibleWithSubpass) {
- TEST_DESCRIPTION("Use a pipeline for the wrong subpass in a render pass instance");
-
- ASSERT_NO_FATAL_FAILURE(Init());
-
- // A renderpass with two subpasses, both writing the same attachment.
- VkAttachmentDescription attach[] = {
- {0, VK_FORMAT_R8G8B8A8_UNORM, VK_SAMPLE_COUNT_1_BIT, VK_ATTACHMENT_LOAD_OP_DONT_CARE, VK_ATTACHMENT_STORE_OP_DONT_CARE,
- VK_ATTACHMENT_LOAD_OP_DONT_CARE, VK_ATTACHMENT_STORE_OP_DONT_CARE, VK_IMAGE_LAYOUT_UNDEFINED,
- VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL},
- };
- VkAttachmentReference ref = {0, VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL};
- VkSubpassDescription subpasses[] = {
- {0, VK_PIPELINE_BIND_POINT_GRAPHICS, 0, nullptr, 1, &ref, nullptr, nullptr, 0, nullptr},
- {0, VK_PIPELINE_BIND_POINT_GRAPHICS, 0, nullptr, 1, &ref, nullptr, nullptr, 0, nullptr},
- };
- VkSubpassDependency dep = {0,
- 1,
- VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT,
- VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT,
- VK_ACCESS_COLOR_ATTACHMENT_WRITE_BIT,
- VK_ACCESS_COLOR_ATTACHMENT_WRITE_BIT,
- VK_DEPENDENCY_BY_REGION_BIT};
- VkRenderPassCreateInfo rpci = {VK_STRUCTURE_TYPE_RENDER_PASS_CREATE_INFO, nullptr, 0, 1, attach, 2, subpasses, 1, &dep};
- VkRenderPass rp;
- VkResult err = vkCreateRenderPass(m_device->device(), &rpci, nullptr, &rp);
- ASSERT_VK_SUCCESS(err);
-
- VkImageObj image(m_device);
- image.InitNoLayout(32, 32, 1, VK_FORMAT_R8G8B8A8_UNORM, VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT, VK_IMAGE_TILING_OPTIMAL, 0);
- VkImageView imageView = image.targetView(VK_FORMAT_R8G8B8A8_UNORM);
-
- VkFramebufferCreateInfo fbci = {VK_STRUCTURE_TYPE_FRAMEBUFFER_CREATE_INFO, nullptr, 0, rp, 1, &imageView, 32, 32, 1};
- VkFramebuffer fb;
- err = vkCreateFramebuffer(m_device->device(), &fbci, nullptr, &fb);
- ASSERT_VK_SUCCESS(err);
-
- VkShaderObj vs(m_device, bindStateVertShaderText, VK_SHADER_STAGE_VERTEX_BIT, this);
- VkShaderObj fs(m_device, bindStateFragShaderText, VK_SHADER_STAGE_FRAGMENT_BIT, this);
- VkPipelineObj pipe(m_device);
- pipe.AddDefaultColorAttachment();
- pipe.AddShader(&vs);
- pipe.AddShader(&fs);
- VkViewport viewport = {0.0f, 0.0f, 64.0f, 64.0f, 0.0f, 1.0f};
- m_viewports.push_back(viewport);
- pipe.SetViewport(m_viewports);
- VkRect2D rect = {};
- m_scissors.push_back(rect);
- pipe.SetScissor(m_scissors);
-
- const VkPipelineLayoutObj pl(m_device);
- pipe.CreateVKPipeline(pl.handle(), rp);
-
- m_commandBuffer->begin();
-
- VkRenderPassBeginInfo rpbi = {VK_STRUCTURE_TYPE_RENDER_PASS_BEGIN_INFO,
- nullptr,
- rp,
- fb,
- {{
- 0,
- 0,
- },
- {32, 32}},
- 0,
- nullptr};
-
- // subtest 1: bind in the wrong subpass
- vkCmdBeginRenderPass(m_commandBuffer->handle(), &rpbi, VK_SUBPASS_CONTENTS_INLINE);
- vkCmdNextSubpass(m_commandBuffer->handle(), VK_SUBPASS_CONTENTS_INLINE);
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "built for subpass 0 but used in subpass 1");
- vkCmdBindPipeline(m_commandBuffer->handle(), VK_PIPELINE_BIND_POINT_GRAPHICS, pipe.handle());
- vkCmdDraw(m_commandBuffer->handle(), 3, 1, 0, 0);
- m_errorMonitor->VerifyFound();
-
- vkCmdEndRenderPass(m_commandBuffer->handle());
-
- // subtest 2: bind in correct subpass, then transition to next subpass
- vkCmdBeginRenderPass(m_commandBuffer->handle(), &rpbi, VK_SUBPASS_CONTENTS_INLINE);
- vkCmdBindPipeline(m_commandBuffer->handle(), VK_PIPELINE_BIND_POINT_GRAPHICS, pipe.handle());
- vkCmdNextSubpass(m_commandBuffer->handle(), VK_SUBPASS_CONTENTS_INLINE);
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "built for subpass 0 but used in subpass 1");
- vkCmdDraw(m_commandBuffer->handle(), 3, 1, 0, 0);
- m_errorMonitor->VerifyFound();
-
- vkCmdEndRenderPass(m_commandBuffer->handle());
-
- m_commandBuffer->end();
-
- vkDestroyFramebuffer(m_device->device(), fb, nullptr);
- vkDestroyRenderPass(m_device->device(), rp, nullptr);
-}
-
-TEST_F(VkLayerTest, ImageBarrierSubpassConflict) {
- TEST_DESCRIPTION("Check case where subpass index references different image from image barrier");
- ASSERT_NO_FATAL_FAILURE(Init());
-
- // Create RP/FB combo where subpass has incorrect index attachment, this is 2nd half of "VUID-vkCmdPipelineBarrier-image-02635"
- VkAttachmentDescription attach[] = {
- {0, VK_FORMAT_R8G8B8A8_UNORM, VK_SAMPLE_COUNT_1_BIT, VK_ATTACHMENT_LOAD_OP_DONT_CARE, VK_ATTACHMENT_STORE_OP_DONT_CARE,
- VK_ATTACHMENT_LOAD_OP_DONT_CARE, VK_ATTACHMENT_STORE_OP_DONT_CARE, VK_IMAGE_LAYOUT_UNDEFINED,
- VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL},
- {0, VK_FORMAT_R8G8B8A8_UNORM, VK_SAMPLE_COUNT_1_BIT, VK_ATTACHMENT_LOAD_OP_DONT_CARE, VK_ATTACHMENT_STORE_OP_DONT_CARE,
- VK_ATTACHMENT_LOAD_OP_DONT_CARE, VK_ATTACHMENT_STORE_OP_DONT_CARE, VK_IMAGE_LAYOUT_UNDEFINED,
- VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL},
- };
- // ref attachment points to wrong attachment index compared to img_barrier below
- VkAttachmentReference ref = {1, VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL};
- VkSubpassDescription subpasses[] = {
- {0, VK_PIPELINE_BIND_POINT_GRAPHICS, 0, nullptr, 1, &ref, nullptr, nullptr, 0, nullptr},
- };
- VkSubpassDependency dep = {0,
- 0,
- VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT,
- VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT,
- VK_ACCESS_COLOR_ATTACHMENT_WRITE_BIT,
- VK_ACCESS_COLOR_ATTACHMENT_WRITE_BIT,
- VK_DEPENDENCY_BY_REGION_BIT};
-
- VkRenderPassCreateInfo rpci = {VK_STRUCTURE_TYPE_RENDER_PASS_CREATE_INFO, nullptr, 0, 2, attach, 1, subpasses, 1, &dep};
- VkRenderPass rp;
-
- VkResult err = vkCreateRenderPass(m_device->device(), &rpci, nullptr, &rp);
- ASSERT_VK_SUCCESS(err);
-
- VkImageObj image(m_device);
- image.InitNoLayout(32, 32, 1, VK_FORMAT_R8G8B8A8_UNORM, VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT, VK_IMAGE_TILING_OPTIMAL, 0);
- VkImageView imageView = image.targetView(VK_FORMAT_R8G8B8A8_UNORM);
- VkImageObj image2(m_device);
- image2.InitNoLayout(32, 32, 1, VK_FORMAT_R8G8B8A8_UNORM, VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT, VK_IMAGE_TILING_OPTIMAL, 0);
- VkImageView imageView2 = image2.targetView(VK_FORMAT_R8G8B8A8_UNORM);
- // re-use imageView from start of test
- VkImageView iv_array[2] = {imageView, imageView2};
-
- VkFramebufferCreateInfo fbci = {VK_STRUCTURE_TYPE_FRAMEBUFFER_CREATE_INFO, nullptr, 0, rp, 2, iv_array, 32, 32, 1};
- VkFramebuffer fb;
- err = vkCreateFramebuffer(m_device->device(), &fbci, nullptr, &fb);
- ASSERT_VK_SUCCESS(err);
-
- VkRenderPassBeginInfo rpbi = {VK_STRUCTURE_TYPE_RENDER_PASS_BEGIN_INFO,
- nullptr,
- rp,
- fb,
- {{
- 0,
- 0,
- },
- {32, 32}},
- 0,
- nullptr};
-
- VkImageMemoryBarrier img_barrier = {};
- img_barrier.sType = VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER;
- img_barrier.srcAccessMask = VK_ACCESS_COLOR_ATTACHMENT_WRITE_BIT;
- img_barrier.dstAccessMask = VK_ACCESS_COLOR_ATTACHMENT_WRITE_BIT;
- img_barrier.oldLayout = VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL;
- img_barrier.newLayout = VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL;
- img_barrier.image = image.handle(); /* barrier references image from attachment index 0 */
- img_barrier.srcQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED;
- img_barrier.dstQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED;
- img_barrier.subresourceRange.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
- img_barrier.subresourceRange.baseArrayLayer = 0;
- img_barrier.subresourceRange.baseMipLevel = 0;
- img_barrier.subresourceRange.layerCount = 1;
- img_barrier.subresourceRange.levelCount = 1;
- m_commandBuffer->begin();
- vkCmdBeginRenderPass(m_commandBuffer->handle(), &rpbi, VK_SUBPASS_CONTENTS_INLINE);
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdPipelineBarrier-image-02635");
- vkCmdPipelineBarrier(m_commandBuffer->handle(), VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT,
- VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT, VK_DEPENDENCY_BY_REGION_BIT, 0, nullptr, 0, nullptr, 1,
- &img_barrier);
- m_errorMonitor->VerifyFound();
-
- vkDestroyFramebuffer(m_device->device(), fb, nullptr);
- vkDestroyRenderPass(m_device->device(), rp, nullptr);
-}
-
-TEST_F(VkLayerTest, RenderPassCreateAttachmentIndexOutOfRange) {
- // Check for VK_KHR_get_physical_device_properties2
- if (InstanceExtensionSupported(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME)) {
- m_instance_extension_names.push_back(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME);
- }
-
- ASSERT_NO_FATAL_FAILURE(InitFramework(myDbgFunc, m_errorMonitor));
- bool rp2Supported = CheckCreateRenderPass2Support(this, m_device_extension_names);
- ASSERT_NO_FATAL_FAILURE(InitState());
-
- // There are no attachments, but refer to attachment 0.
- VkAttachmentReference ref = {0, VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL};
- VkSubpassDescription subpasses[] = {
- {0, VK_PIPELINE_BIND_POINT_GRAPHICS, 0, nullptr, 1, &ref, nullptr, nullptr, 0, nullptr},
- };
-
- VkRenderPassCreateInfo rpci = {VK_STRUCTURE_TYPE_RENDER_PASS_CREATE_INFO, nullptr, 0, 0, nullptr, 1, subpasses, 0, nullptr};
-
- // "... must be less than the total number of attachments ..."
- TestRenderPassCreate(m_errorMonitor, m_device->device(), &rpci, rp2Supported, "VUID-VkRenderPassCreateInfo-attachment-00834",
- "VUID-VkRenderPassCreateInfo2KHR-attachment-03051");
-}
-
-TEST_F(VkLayerTest, RenderPassCreateAttachmentReadOnlyButCleared) {
- // Check for VK_KHR_get_physical_device_properties2
- if (InstanceExtensionSupported(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME)) {
- m_instance_extension_names.push_back(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME);
- }
-
- ASSERT_NO_FATAL_FAILURE(InitFramework(myDbgFunc, m_errorMonitor));
-
- bool rp2Supported = CheckCreateRenderPass2Support(this, m_device_extension_names);
- bool maintenance2Supported = rp2Supported;
-
- // Check for VK_KHR_maintenance2
- if (!rp2Supported && DeviceExtensionSupported(gpu(), nullptr, VK_KHR_MAINTENANCE2_EXTENSION_NAME)) {
- m_device_extension_names.push_back(VK_KHR_MAINTENANCE2_EXTENSION_NAME);
- maintenance2Supported = true;
- }
-
- ASSERT_NO_FATAL_FAILURE(InitState());
-
- if (m_device->props.apiVersion < VK_API_VERSION_1_1) {
- maintenance2Supported = true;
- }
-
- VkAttachmentDescription description = {0,
- VK_FORMAT_D32_SFLOAT_S8_UINT,
- VK_SAMPLE_COUNT_1_BIT,
- VK_ATTACHMENT_LOAD_OP_CLEAR,
- VK_ATTACHMENT_STORE_OP_DONT_CARE,
- VK_ATTACHMENT_LOAD_OP_CLEAR,
- VK_ATTACHMENT_STORE_OP_DONT_CARE,
- VK_IMAGE_LAYOUT_GENERAL,
- VK_IMAGE_LAYOUT_GENERAL};
-
- VkAttachmentReference depth_stencil_ref = {0, VK_IMAGE_LAYOUT_DEPTH_STENCIL_READ_ONLY_OPTIMAL};
-
- VkSubpassDescription subpass = {0, VK_PIPELINE_BIND_POINT_GRAPHICS, 0, nullptr, 0, nullptr, nullptr, &depth_stencil_ref, 0,
- nullptr};
-
- VkRenderPassCreateInfo rpci = {VK_STRUCTURE_TYPE_RENDER_PASS_CREATE_INFO, nullptr, 0, 1, &description, 1, &subpass, 0, nullptr};
-
- // VK_IMAGE_LAYOUT_DEPTH_STENCIL_READ_ONLY_OPTIMAL but depth cleared
- TestRenderPassCreate(m_errorMonitor, m_device->device(), &rpci, rp2Supported, "VUID-VkRenderPassCreateInfo-pAttachments-00836",
- "VUID-VkRenderPassCreateInfo2KHR-pAttachments-02522");
-
- if (maintenance2Supported) {
- // VK_IMAGE_LAYOUT_DEPTH_READ_ONLY_STENCIL_ATTACHMENT_OPTIMAL but depth cleared
- depth_stencil_ref.layout = VK_IMAGE_LAYOUT_DEPTH_READ_ONLY_STENCIL_ATTACHMENT_OPTIMAL;
-
- TestRenderPassCreate(m_errorMonitor, m_device->device(), &rpci, rp2Supported,
- "VUID-VkRenderPassCreateInfo-pAttachments-01566", nullptr);
-
- // VK_IMAGE_LAYOUT_DEPTH_ATTACHMENT_STENCIL_READ_ONLY_OPTIMAL but depth cleared
- depth_stencil_ref.layout = VK_IMAGE_LAYOUT_DEPTH_ATTACHMENT_STENCIL_READ_ONLY_OPTIMAL;
-
- TestRenderPassCreate(m_errorMonitor, m_device->device(), &rpci, rp2Supported,
- "VUID-VkRenderPassCreateInfo-pAttachments-01567", nullptr);
- }
-}
-
-TEST_F(VkLayerTest, RenderPassCreateAttachmentMismatchingLayoutsColor) {
- TEST_DESCRIPTION("Attachment is used simultaneously as two color attachments with different layouts.");
-
- // Check for VK_KHR_get_physical_device_properties2
- if (InstanceExtensionSupported(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME)) {
- m_instance_extension_names.push_back(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME);
- }
-
- ASSERT_NO_FATAL_FAILURE(InitFramework(myDbgFunc, m_errorMonitor));
- bool rp2Supported = CheckCreateRenderPass2Support(this, m_device_extension_names);
- ASSERT_NO_FATAL_FAILURE(InitState());
-
- VkAttachmentDescription attach[] = {
- {0, VK_FORMAT_R8G8B8A8_UNORM, VK_SAMPLE_COUNT_1_BIT, VK_ATTACHMENT_LOAD_OP_DONT_CARE, VK_ATTACHMENT_STORE_OP_DONT_CARE,
- VK_ATTACHMENT_LOAD_OP_DONT_CARE, VK_ATTACHMENT_STORE_OP_DONT_CARE, VK_IMAGE_LAYOUT_UNDEFINED,
- VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL},
- };
- VkAttachmentReference refs[] = {
- {0, VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL},
- {0, VK_IMAGE_LAYOUT_GENERAL},
- };
- VkSubpassDescription subpasses[] = {
- {0, VK_PIPELINE_BIND_POINT_GRAPHICS, 0, nullptr, 2, refs, nullptr, nullptr, 0, nullptr},
- };
-
- VkRenderPassCreateInfo rpci = {VK_STRUCTURE_TYPE_RENDER_PASS_CREATE_INFO, nullptr, 0, 1, attach, 1, subpasses, 0, nullptr};
-
- TestRenderPassCreate(m_errorMonitor, m_device->device(), &rpci, rp2Supported,
- "subpass 0 already uses attachment 0 with a different image layout",
- "subpass 0 already uses attachment 0 with a different image layout");
-}
-
-TEST_F(VkLayerTest, RenderPassCreateAttachmentDescriptionInvalidFinalLayout) {
- TEST_DESCRIPTION("VkAttachmentDescription's finalLayout must not be UNDEFINED or PREINITIALIZED");
-
- // Check for VK_KHR_get_physical_device_properties2
- if (InstanceExtensionSupported(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME)) {
- m_instance_extension_names.push_back(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME);
- }
-
- ASSERT_NO_FATAL_FAILURE(InitFramework(myDbgFunc, m_errorMonitor));
- bool rp2Supported = CheckCreateRenderPass2Support(this, m_device_extension_names);
- ASSERT_NO_FATAL_FAILURE(InitState());
-
- VkAttachmentDescription attach_desc = {};
- attach_desc.format = VK_FORMAT_R8G8B8A8_UNORM;
- attach_desc.samples = VK_SAMPLE_COUNT_1_BIT;
- attach_desc.loadOp = VK_ATTACHMENT_LOAD_OP_CLEAR;
- attach_desc.storeOp = VK_ATTACHMENT_STORE_OP_STORE;
- attach_desc.stencilLoadOp = VK_ATTACHMENT_LOAD_OP_DONT_CARE;
- attach_desc.stencilStoreOp = VK_ATTACHMENT_STORE_OP_DONT_CARE;
- attach_desc.initialLayout = VK_IMAGE_LAYOUT_UNDEFINED;
- attach_desc.finalLayout = VK_IMAGE_LAYOUT_UNDEFINED;
- VkAttachmentReference attach_ref = {};
- attach_ref.attachment = 0;
- attach_ref.layout = VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL;
- VkSubpassDescription subpass = {};
- subpass.pipelineBindPoint = VK_PIPELINE_BIND_POINT_GRAPHICS;
- subpass.colorAttachmentCount = 1;
- subpass.pColorAttachments = &attach_ref;
- VkRenderPassCreateInfo rpci = {};
- rpci.sType = VK_STRUCTURE_TYPE_RENDER_PASS_CREATE_INFO;
- rpci.attachmentCount = 1;
- rpci.pAttachments = &attach_desc;
- rpci.subpassCount = 1;
- rpci.pSubpasses = &subpass;
-
- TestRenderPassCreate(m_errorMonitor, m_device->device(), &rpci, rp2Supported, "VUID-VkAttachmentDescription-finalLayout-00843",
- "VUID-VkAttachmentDescription2KHR-finalLayout-03061");
-
- attach_desc.finalLayout = VK_IMAGE_LAYOUT_PREINITIALIZED;
- TestRenderPassCreate(m_errorMonitor, m_device->device(), &rpci, rp2Supported, "VUID-VkAttachmentDescription-finalLayout-00843",
- "VUID-VkAttachmentDescription2KHR-finalLayout-03061");
-}
-
-TEST_F(VkLayerTest, RenderPassCreateAttachmentsMisc) {
- TEST_DESCRIPTION(
- "Ensure that CreateRenderPass produces the expected validation errors when a subpass's attachments violate the valid usage "
- "conditions.");
-
- // Check for VK_KHR_get_physical_device_properties2
- if (InstanceExtensionSupported(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME)) {
- m_instance_extension_names.push_back(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME);
- }
-
- ASSERT_NO_FATAL_FAILURE(InitFramework(myDbgFunc, m_errorMonitor));
- bool rp2Supported = CheckCreateRenderPass2Support(this, m_device_extension_names);
- ASSERT_NO_FATAL_FAILURE(InitState());
-
- std::vector<VkAttachmentDescription> attachments = {
- // input attachments
- {0, VK_FORMAT_R8G8B8A8_UNORM, VK_SAMPLE_COUNT_4_BIT, VK_ATTACHMENT_LOAD_OP_DONT_CARE, VK_ATTACHMENT_STORE_OP_DONT_CARE,
- VK_ATTACHMENT_LOAD_OP_DONT_CARE, VK_ATTACHMENT_STORE_OP_DONT_CARE, VK_IMAGE_LAYOUT_GENERAL, VK_IMAGE_LAYOUT_GENERAL},
- // color attachments
- {0, VK_FORMAT_R8G8B8A8_UNORM, VK_SAMPLE_COUNT_4_BIT, VK_ATTACHMENT_LOAD_OP_DONT_CARE, VK_ATTACHMENT_STORE_OP_DONT_CARE,
- VK_ATTACHMENT_LOAD_OP_DONT_CARE, VK_ATTACHMENT_STORE_OP_DONT_CARE, VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL,
- VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL},
- {0, VK_FORMAT_R8G8B8A8_UNORM, VK_SAMPLE_COUNT_4_BIT, VK_ATTACHMENT_LOAD_OP_DONT_CARE, VK_ATTACHMENT_STORE_OP_DONT_CARE,
- VK_ATTACHMENT_LOAD_OP_DONT_CARE, VK_ATTACHMENT_STORE_OP_DONT_CARE, VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL,
- VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL},
- // depth attachment
- {0, VK_FORMAT_D24_UNORM_S8_UINT, VK_SAMPLE_COUNT_4_BIT, VK_ATTACHMENT_LOAD_OP_DONT_CARE, VK_ATTACHMENT_STORE_OP_DONT_CARE,
- VK_ATTACHMENT_LOAD_OP_DONT_CARE, VK_ATTACHMENT_STORE_OP_DONT_CARE, VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL,
- VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL},
- // resolve attachment
- {0, VK_FORMAT_R8G8B8A8_UNORM, VK_SAMPLE_COUNT_1_BIT, VK_ATTACHMENT_LOAD_OP_DONT_CARE, VK_ATTACHMENT_STORE_OP_DONT_CARE,
- VK_ATTACHMENT_LOAD_OP_DONT_CARE, VK_ATTACHMENT_STORE_OP_DONT_CARE, VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL,
- VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL},
- // preserve attachments
- {0, VK_FORMAT_R8G8B8A8_UNORM, VK_SAMPLE_COUNT_4_BIT, VK_ATTACHMENT_LOAD_OP_DONT_CARE, VK_ATTACHMENT_STORE_OP_DONT_CARE,
- VK_ATTACHMENT_LOAD_OP_DONT_CARE, VK_ATTACHMENT_STORE_OP_DONT_CARE, VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL,
- VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL},
- };
-
- std::vector<VkAttachmentReference> input = {
- {0, VK_IMAGE_LAYOUT_GENERAL},
- };
- std::vector<VkAttachmentReference> color = {
- {1, VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL},
- {2, VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL},
- };
- VkAttachmentReference depth = {3, VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL};
- std::vector<VkAttachmentReference> resolve = {
- {4, VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL},
- {VK_ATTACHMENT_UNUSED, VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL},
- };
- std::vector<uint32_t> preserve = {5};
-
- VkSubpassDescription subpass = {0,
- VK_PIPELINE_BIND_POINT_GRAPHICS,
- (uint32_t)input.size(),
- input.data(),
- (uint32_t)color.size(),
- color.data(),
- resolve.data(),
- &depth,
- (uint32_t)preserve.size(),
- preserve.data()};
-
- VkRenderPassCreateInfo rpci = {VK_STRUCTURE_TYPE_RENDER_PASS_CREATE_INFO,
- nullptr,
- 0,
- (uint32_t)attachments.size(),
- attachments.data(),
- 1,
- &subpass,
- 0,
- nullptr};
-
- // Test too many color attachments
- {
- std::vector<VkAttachmentReference> too_many_colors(m_device->props.limits.maxColorAttachments + 1, color[0]);
- subpass.colorAttachmentCount = (uint32_t)too_many_colors.size();
- subpass.pColorAttachments = too_many_colors.data();
- subpass.pResolveAttachments = NULL;
-
- TestRenderPassCreate(m_errorMonitor, m_device->device(), &rpci, rp2Supported,
- "VUID-VkSubpassDescription-colorAttachmentCount-00845",
- "VUID-VkSubpassDescription2KHR-colorAttachmentCount-03063");
-
- subpass.colorAttachmentCount = (uint32_t)color.size();
- subpass.pColorAttachments = color.data();
- subpass.pResolveAttachments = resolve.data();
- }
-
- // Test sample count mismatch between color buffers
- attachments[subpass.pColorAttachments[1].attachment].samples = VK_SAMPLE_COUNT_8_BIT;
- depth.attachment = VK_ATTACHMENT_UNUSED; // Avoids triggering 01418
-
- TestRenderPassCreate(m_errorMonitor, m_device->device(), &rpci, rp2Supported,
- "VUID-VkSubpassDescription-pColorAttachments-01417",
- "VUID-VkSubpassDescription2KHR-pColorAttachments-03069");
-
- depth.attachment = 3;
- attachments[subpass.pColorAttachments[1].attachment].samples = attachments[subpass.pColorAttachments[0].attachment].samples;
-
- // Test sample count mismatch between color buffers and depth buffer
- attachments[subpass.pDepthStencilAttachment->attachment].samples = VK_SAMPLE_COUNT_8_BIT;
- subpass.colorAttachmentCount = 1;
-
- TestRenderPassCreate(m_errorMonitor, m_device->device(), &rpci, rp2Supported,
- "VUID-VkSubpassDescription-pDepthStencilAttachment-01418",
- "VUID-VkSubpassDescription2KHR-pDepthStencilAttachment-03071");
-
- attachments[subpass.pDepthStencilAttachment->attachment].samples = attachments[subpass.pColorAttachments[0].attachment].samples;
- subpass.colorAttachmentCount = (uint32_t)color.size();
-
- // Test resolve attachment with UNUSED color attachment
- color[0].attachment = VK_ATTACHMENT_UNUSED;
-
- TestRenderPassCreate(m_errorMonitor, m_device->device(), &rpci, rp2Supported,
- "VUID-VkSubpassDescription-pResolveAttachments-00847",
- "VUID-VkSubpassDescription2KHR-pResolveAttachments-03065");
-
- color[0].attachment = 1;
-
- // Test resolve from a single-sampled color attachment
- attachments[subpass.pColorAttachments[0].attachment].samples = VK_SAMPLE_COUNT_1_BIT;
- subpass.colorAttachmentCount = 1; // avoid mismatch (00337), and avoid double report
- subpass.pDepthStencilAttachment = nullptr; // avoid mismatch (01418)
-
- TestRenderPassCreate(m_errorMonitor, m_device->device(), &rpci, rp2Supported,
- "VUID-VkSubpassDescription-pResolveAttachments-00848",
- "VUID-VkSubpassDescription2KHR-pResolveAttachments-03066");
-
- attachments[subpass.pColorAttachments[0].attachment].samples = VK_SAMPLE_COUNT_4_BIT;
- subpass.colorAttachmentCount = (uint32_t)color.size();
- subpass.pDepthStencilAttachment = &depth;
-
- // Test resolve to a multi-sampled resolve attachment
- attachments[subpass.pResolveAttachments[0].attachment].samples = VK_SAMPLE_COUNT_4_BIT;
-
- TestRenderPassCreate(m_errorMonitor, m_device->device(), &rpci, rp2Supported,
- "VUID-VkSubpassDescription-pResolveAttachments-00849",
- "VUID-VkSubpassDescription2KHR-pResolveAttachments-03067");
-
- attachments[subpass.pResolveAttachments[0].attachment].samples = VK_SAMPLE_COUNT_1_BIT;
-
- // Test with color/resolve format mismatch
- attachments[subpass.pColorAttachments[0].attachment].format = VK_FORMAT_R8G8B8A8_SRGB;
-
- TestRenderPassCreate(m_errorMonitor, m_device->device(), &rpci, rp2Supported,
- "VUID-VkSubpassDescription-pResolveAttachments-00850",
- "VUID-VkSubpassDescription2KHR-pResolveAttachments-03068");
-
- attachments[subpass.pColorAttachments[0].attachment].format = attachments[subpass.pResolveAttachments[0].attachment].format;
-
- // Test for UNUSED preserve attachments
- preserve[0] = VK_ATTACHMENT_UNUSED;
-
- TestRenderPassCreate(m_errorMonitor, m_device->device(), &rpci, rp2Supported, "VUID-VkSubpassDescription-attachment-00853",
- "VUID-VkSubpassDescription2KHR-attachment-03073");
-
- preserve[0] = 5;
- // Test for preserve attachments used elsewhere in the subpass
- color[0].attachment = preserve[0];
-
- TestRenderPassCreate(m_errorMonitor, m_device->device(), &rpci, rp2Supported,
- "VUID-VkSubpassDescription-pPreserveAttachments-00854",
- "VUID-VkSubpassDescription2KHR-pPreserveAttachments-03074");
-
- color[0].attachment = 1;
- input[0].attachment = 0;
- input[0].layout = VK_IMAGE_LAYOUT_GENERAL;
-
- // Test for attachment used first as input with loadOp=CLEAR
- {
- std::vector<VkSubpassDescription> subpasses = {subpass, subpass, subpass};
- subpasses[0].inputAttachmentCount = 0;
- subpasses[1].inputAttachmentCount = 0;
- attachments[input[0].attachment].loadOp = VK_ATTACHMENT_LOAD_OP_CLEAR;
- VkRenderPassCreateInfo rpci_multipass = {VK_STRUCTURE_TYPE_RENDER_PASS_CREATE_INFO,
- nullptr,
- 0,
- (uint32_t)attachments.size(),
- attachments.data(),
- (uint32_t)subpasses.size(),
- subpasses.data(),
- 0,
- nullptr};
-
- TestRenderPassCreate(m_errorMonitor, m_device->device(), &rpci_multipass, rp2Supported,
- "VUID-VkSubpassDescription-loadOp-00846", "VUID-VkSubpassDescription2KHR-loadOp-03064");
-
- attachments[input[0].attachment].loadOp = VK_ATTACHMENT_LOAD_OP_DONT_CARE;
- }
-}
-
-TEST_F(VkLayerTest, RenderPassCreateAttachmentReferenceInvalidLayout) {
- TEST_DESCRIPTION("Attachment reference uses PREINITIALIZED or UNDEFINED layouts");
-
- // Check for VK_KHR_get_physical_device_properties2
- if (InstanceExtensionSupported(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME)) {
- m_instance_extension_names.push_back(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME);
- }
-
- ASSERT_NO_FATAL_FAILURE(InitFramework(myDbgFunc, m_errorMonitor));
- bool rp2Supported = CheckCreateRenderPass2Support(this, m_device_extension_names);
- ASSERT_NO_FATAL_FAILURE(InitState());
-
- VkAttachmentDescription attach[] = {
- {0, VK_FORMAT_R8G8B8A8_UNORM, VK_SAMPLE_COUNT_1_BIT, VK_ATTACHMENT_LOAD_OP_DONT_CARE, VK_ATTACHMENT_STORE_OP_DONT_CARE,
- VK_ATTACHMENT_LOAD_OP_DONT_CARE, VK_ATTACHMENT_STORE_OP_DONT_CARE, VK_IMAGE_LAYOUT_UNDEFINED,
- VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL},
- };
- VkAttachmentReference refs[] = {
- {0, VK_IMAGE_LAYOUT_UNDEFINED},
- };
- VkSubpassDescription subpasses[] = {
- {0, VK_PIPELINE_BIND_POINT_GRAPHICS, 0, nullptr, 1, refs, nullptr, nullptr, 0, nullptr},
- };
-
- VkRenderPassCreateInfo rpci = {VK_STRUCTURE_TYPE_RENDER_PASS_CREATE_INFO, nullptr, 0, 1, attach, 1, subpasses, 0, nullptr};
-
- // Use UNDEFINED layout
- TestRenderPassCreate(m_errorMonitor, m_device->device(), &rpci, rp2Supported, "VUID-VkAttachmentReference-layout-00857",
- "VUID-VkAttachmentReference2KHR-layout-03077");
-
- // Use PREINITIALIZED layout
- refs[0].layout = VK_IMAGE_LAYOUT_PREINITIALIZED;
- TestRenderPassCreate(m_errorMonitor, m_device->device(), &rpci, rp2Supported, "VUID-VkAttachmentReference-layout-00857",
- "VUID-VkAttachmentReference2KHR-layout-03077");
-}
-
-TEST_F(VkLayerTest, RenderPassCreateOverlappingCorrelationMasks) {
- TEST_DESCRIPTION("Create a subpass with overlapping correlation masks");
-
- // Check for VK_KHR_get_physical_device_properties2
- if (InstanceExtensionSupported(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME)) {
- m_instance_extension_names.push_back(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME);
- }
-
- ASSERT_NO_FATAL_FAILURE(InitFramework(myDbgFunc, m_errorMonitor));
- bool rp2Supported = CheckCreateRenderPass2Support(this, m_device_extension_names);
-
- if (!rp2Supported) {
- if (DeviceExtensionSupported(gpu(), nullptr, VK_KHR_MULTIVIEW_EXTENSION_NAME)) {
- m_device_extension_names.push_back(VK_KHR_MULTIVIEW_EXTENSION_NAME);
- } else {
- printf("%s Extension %s is not supported.\n", kSkipPrefix, VK_KHR_MULTIVIEW_EXTENSION_NAME);
- return;
- }
- }
-
- ASSERT_NO_FATAL_FAILURE(InitState());
-
- VkSubpassDescription subpass = {0, VK_PIPELINE_BIND_POINT_GRAPHICS, 0, nullptr, 0, nullptr, nullptr, nullptr, 0, nullptr};
- uint32_t viewMasks[] = {0x3u};
- uint32_t correlationMasks[] = {0x1u, 0x3u};
- VkRenderPassMultiviewCreateInfo rpmvci = {
- VK_STRUCTURE_TYPE_RENDER_PASS_MULTIVIEW_CREATE_INFO, nullptr, 1, viewMasks, 0, nullptr, 2, correlationMasks};
-
- VkRenderPassCreateInfo rpci = {VK_STRUCTURE_TYPE_RENDER_PASS_CREATE_INFO, &rpmvci, 0, 0, nullptr, 1, &subpass, 0, nullptr};
-
- // Correlation masks must not overlap
- TestRenderPassCreate(m_errorMonitor, m_device->device(), &rpci, rp2Supported,
- "VUID-VkRenderPassMultiviewCreateInfo-pCorrelationMasks-00841",
- "VUID-VkRenderPassCreateInfo2KHR-pCorrelatedViewMasks-03056");
-
- // Check for more specific "don't set any correlation masks when multiview is not enabled"
- if (rp2Supported) {
- viewMasks[0] = 0;
- correlationMasks[0] = 0;
- correlationMasks[1] = 0;
- safe_VkRenderPassCreateInfo2KHR safe_rpci2;
- ConvertVkRenderPassCreateInfoToV2KHR(&rpci, &safe_rpci2);
-
- TestRenderPass2KHRCreate(m_errorMonitor, m_device->device(), safe_rpci2.ptr(),
- "VUID-VkRenderPassCreateInfo2KHR-viewMask-03057");
- }
-}
-
-TEST_F(VkLayerTest, RenderPassCreateInvalidViewMasks) {
- TEST_DESCRIPTION("Create a subpass with the wrong number of view masks, or inconsistent setting of view masks");
-
- // Check for VK_KHR_get_physical_device_properties2
- if (InstanceExtensionSupported(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME)) {
- m_instance_extension_names.push_back(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME);
- }
-
- ASSERT_NO_FATAL_FAILURE(InitFramework(myDbgFunc, m_errorMonitor));
- bool rp2Supported = CheckCreateRenderPass2Support(this, m_device_extension_names);
-
- if (!rp2Supported) {
- if (DeviceExtensionSupported(gpu(), nullptr, VK_KHR_MULTIVIEW_EXTENSION_NAME)) {
- m_device_extension_names.push_back(VK_KHR_MULTIVIEW_EXTENSION_NAME);
- } else {
- printf("%s Extension %s is not supported.\n", kSkipPrefix, VK_KHR_MULTIVIEW_EXTENSION_NAME);
- return;
- }
- }
-
- ASSERT_NO_FATAL_FAILURE(InitState());
-
- VkSubpassDescription subpasses[] = {
- {0, VK_PIPELINE_BIND_POINT_GRAPHICS, 0, nullptr, 0, nullptr, nullptr, nullptr, 0, nullptr},
- {0, VK_PIPELINE_BIND_POINT_GRAPHICS, 0, nullptr, 0, nullptr, nullptr, nullptr, 0, nullptr},
- };
- uint32_t viewMasks[] = {0x3u, 0u};
- VkRenderPassMultiviewCreateInfo rpmvci = {
- VK_STRUCTURE_TYPE_RENDER_PASS_MULTIVIEW_CREATE_INFO, nullptr, 1, viewMasks, 0, nullptr, 0, nullptr};
-
- VkRenderPassCreateInfo rpci = {VK_STRUCTURE_TYPE_RENDER_PASS_CREATE_INFO, &rpmvci, 0, 0, nullptr, 2, subpasses, 0, nullptr};
-
- // Not enough view masks
- TestRenderPassCreate(m_errorMonitor, m_device->device(), &rpci, rp2Supported, "VUID-VkRenderPassCreateInfo-pNext-01928",
- "VUID-VkRenderPassCreateInfo2KHR-viewMask-03058");
-}
-
-TEST_F(VkLayerTest, RenderPassCreateInvalidInputAttachmentReferences) {
- TEST_DESCRIPTION("Create a subpass with the meta data aspect mask set for an input attachment");
-
- ASSERT_NO_FATAL_FAILURE(InitFramework(myDbgFunc, m_errorMonitor));
-
- if (DeviceExtensionSupported(gpu(), nullptr, VK_KHR_MAINTENANCE2_EXTENSION_NAME)) {
- m_device_extension_names.push_back(VK_KHR_MAINTENANCE2_EXTENSION_NAME);
- } else {
- printf("%s Extension %s is not supported.\n", kSkipPrefix, VK_KHR_MAINTENANCE2_EXTENSION_NAME);
- return;
- }
-
- ASSERT_NO_FATAL_FAILURE(InitState());
-
- VkAttachmentDescription attach = {0,
- VK_FORMAT_R8G8B8A8_UNORM,
- VK_SAMPLE_COUNT_1_BIT,
- VK_ATTACHMENT_LOAD_OP_DONT_CARE,
- VK_ATTACHMENT_STORE_OP_DONT_CARE,
- VK_ATTACHMENT_LOAD_OP_DONT_CARE,
- VK_ATTACHMENT_STORE_OP_DONT_CARE,
- VK_IMAGE_LAYOUT_UNDEFINED,
- VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL};
- VkAttachmentReference ref = {0, VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL};
-
- VkSubpassDescription subpass = {0, VK_PIPELINE_BIND_POINT_GRAPHICS, 1, &ref, 0, nullptr, nullptr, nullptr, 0, nullptr};
- VkInputAttachmentAspectReference iaar = {0, 0, VK_IMAGE_ASPECT_METADATA_BIT};
- VkRenderPassInputAttachmentAspectCreateInfo rpiaaci = {VK_STRUCTURE_TYPE_RENDER_PASS_INPUT_ATTACHMENT_ASPECT_CREATE_INFO,
- nullptr, 1, &iaar};
-
- VkRenderPassCreateInfo rpci = {VK_STRUCTURE_TYPE_RENDER_PASS_CREATE_INFO, &rpiaaci, 0, 1, &attach, 1, &subpass, 0, nullptr};
-
- // Invalid meta data aspect
- m_errorMonitor->SetDesiredFailureMsg(
- VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkRenderPassCreateInfo-pNext-01963"); // Cannot/should not avoid getting this one too
- TestRenderPassCreate(m_errorMonitor, m_device->device(), &rpci, false, "VUID-VkInputAttachmentAspectReference-aspectMask-01964",
- nullptr);
-
- // Aspect not present
- iaar.aspectMask = VK_IMAGE_ASPECT_DEPTH_BIT;
- TestRenderPassCreate(m_errorMonitor, m_device->device(), &rpci, false, "VUID-VkRenderPassCreateInfo-pNext-01963", nullptr);
-
- // Invalid subpass index
- iaar.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
- iaar.subpass = 1;
- TestRenderPassCreate(m_errorMonitor, m_device->device(), &rpci, false, "VUID-VkRenderPassCreateInfo-pNext-01926", nullptr);
- iaar.subpass = 0;
-
- // Invalid input attachment index
- iaar.inputAttachmentIndex = 1;
- TestRenderPassCreate(m_errorMonitor, m_device->device(), &rpci, false, "VUID-VkRenderPassCreateInfo-pNext-01927", nullptr);
-}
-
-TEST_F(VkLayerTest, RenderPassCreateInvalidFragmentDensityMapReferences) {
- TEST_DESCRIPTION("Create a subpass with the wrong attachment information for a fragment density map ");
-
- // Check for VK_KHR_get_physical_device_properties2
- if (InstanceExtensionSupported(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME)) {
- m_instance_extension_names.push_back(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME);
- } else {
- printf("%s Extension %s is not supported.\n", kSkipPrefix, VK_EXT_FRAGMENT_DENSITY_MAP_EXTENSION_NAME);
- return;
- }
-
- ASSERT_NO_FATAL_FAILURE(InitFramework(myDbgFunc, m_errorMonitor));
-
- if (DeviceExtensionSupported(gpu(), nullptr, VK_EXT_FRAGMENT_DENSITY_MAP_EXTENSION_NAME)) {
- m_device_extension_names.push_back(VK_EXT_FRAGMENT_DENSITY_MAP_EXTENSION_NAME);
- } else {
- printf("%s Extension %s is not supported.\n", kSkipPrefix, VK_EXT_FRAGMENT_DENSITY_MAP_EXTENSION_NAME);
- return;
- }
-
- ASSERT_NO_FATAL_FAILURE(InitState());
-
- VkAttachmentDescription attach = {0,
- VK_FORMAT_R8G8_UNORM,
- VK_SAMPLE_COUNT_1_BIT,
- VK_ATTACHMENT_LOAD_OP_LOAD,
- VK_ATTACHMENT_STORE_OP_DONT_CARE,
- VK_ATTACHMENT_LOAD_OP_DONT_CARE,
- VK_ATTACHMENT_STORE_OP_DONT_CARE,
- VK_IMAGE_LAYOUT_UNDEFINED,
- VK_IMAGE_LAYOUT_FRAGMENT_DENSITY_MAP_OPTIMAL_EXT};
- // Set 1 instead of 0
- VkAttachmentReference ref = {1, VK_IMAGE_LAYOUT_FRAGMENT_DENSITY_MAP_OPTIMAL_EXT};
- VkSubpassDescription subpass = {0, VK_PIPELINE_BIND_POINT_GRAPHICS, 1, &ref, 0, nullptr, nullptr, nullptr, 0, nullptr};
- VkRenderPassFragmentDensityMapCreateInfoEXT rpfdmi = {VK_STRUCTURE_TYPE_RENDER_PASS_FRAGMENT_DENSITY_MAP_CREATE_INFO_EXT,
- nullptr, ref};
-
- VkRenderPassCreateInfo rpci = {VK_STRUCTURE_TYPE_RENDER_PASS_CREATE_INFO, &rpfdmi, 0, 1, &attach, 1, &subpass, 0, nullptr};
-
- TestRenderPassCreate(m_errorMonitor, m_device->device(), &rpci, false,
- "VUID-VkRenderPassFragmentDensityMapCreateInfoEXT-fragmentDensityMapAttachment-02547", nullptr);
-
- // Set wrong VkImageLayout
- ref = {0, VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL};
- subpass = {0, VK_PIPELINE_BIND_POINT_GRAPHICS, 1, &ref, 0, nullptr, nullptr, nullptr, 0, nullptr};
- rpfdmi = {VK_STRUCTURE_TYPE_RENDER_PASS_FRAGMENT_DENSITY_MAP_CREATE_INFO_EXT, nullptr, ref};
- rpci = {VK_STRUCTURE_TYPE_RENDER_PASS_CREATE_INFO, &rpfdmi, 0, 1, &attach, 1, &subpass, 0, nullptr};
-
- TestRenderPassCreate(m_errorMonitor, m_device->device(), &rpci, false,
- "VUID-VkRenderPassFragmentDensityMapCreateInfoEXT-fragmentDensityMapAttachment-02549", nullptr);
-
- // Set wrong load operation
- attach = {0,
- VK_FORMAT_R8G8_UNORM,
- VK_SAMPLE_COUNT_1_BIT,
- VK_ATTACHMENT_LOAD_OP_CLEAR,
- VK_ATTACHMENT_STORE_OP_DONT_CARE,
- VK_ATTACHMENT_LOAD_OP_DONT_CARE,
- VK_ATTACHMENT_STORE_OP_DONT_CARE,
- VK_IMAGE_LAYOUT_UNDEFINED,
- VK_IMAGE_LAYOUT_FRAGMENT_DENSITY_MAP_OPTIMAL_EXT};
-
- ref = {0, VK_IMAGE_LAYOUT_FRAGMENT_DENSITY_MAP_OPTIMAL_EXT};
- subpass = {0, VK_PIPELINE_BIND_POINT_GRAPHICS, 1, &ref, 0, nullptr, nullptr, nullptr, 0, nullptr};
- rpfdmi = {VK_STRUCTURE_TYPE_RENDER_PASS_FRAGMENT_DENSITY_MAP_CREATE_INFO_EXT, nullptr, ref};
- rpci = {VK_STRUCTURE_TYPE_RENDER_PASS_CREATE_INFO, &rpfdmi, 0, 1, &attach, 1, &subpass, 0, nullptr};
-
- TestRenderPassCreate(m_errorMonitor, m_device->device(), &rpci, false,
- "VUID-VkRenderPassFragmentDensityMapCreateInfoEXT-fragmentDensityMapAttachment-02550", nullptr);
-
- // Set wrong store operation
- attach = {0,
- VK_FORMAT_R8G8_UNORM,
- VK_SAMPLE_COUNT_1_BIT,
- VK_ATTACHMENT_LOAD_OP_LOAD,
- VK_ATTACHMENT_STORE_OP_STORE,
- VK_ATTACHMENT_LOAD_OP_DONT_CARE,
- VK_ATTACHMENT_STORE_OP_DONT_CARE,
- VK_IMAGE_LAYOUT_UNDEFINED,
- VK_IMAGE_LAYOUT_FRAGMENT_DENSITY_MAP_OPTIMAL_EXT};
-
- ref = {0, VK_IMAGE_LAYOUT_FRAGMENT_DENSITY_MAP_OPTIMAL_EXT};
- subpass = {0, VK_PIPELINE_BIND_POINT_GRAPHICS, 1, &ref, 0, nullptr, nullptr, nullptr, 0, nullptr};
- rpfdmi = {VK_STRUCTURE_TYPE_RENDER_PASS_FRAGMENT_DENSITY_MAP_CREATE_INFO_EXT, nullptr, ref};
- rpci = {VK_STRUCTURE_TYPE_RENDER_PASS_CREATE_INFO, &rpfdmi, 0, 1, &attach, 1, &subpass, 0, nullptr};
-
- TestRenderPassCreate(m_errorMonitor, m_device->device(), &rpci, false,
- "VUID-VkRenderPassFragmentDensityMapCreateInfoEXT-fragmentDensityMapAttachment-02551", nullptr);
-}
-
-TEST_F(VkLayerTest, RenderPassCreateSubpassNonGraphicsPipeline) {
- TEST_DESCRIPTION("Create a subpass with the compute pipeline bind point");
- // Check for VK_KHR_get_physical_device_properties2
- if (InstanceExtensionSupported(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME)) {
- m_instance_extension_names.push_back(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME);
- }
-
- ASSERT_NO_FATAL_FAILURE(InitFramework(myDbgFunc, m_errorMonitor));
- bool rp2Supported = CheckCreateRenderPass2Support(this, m_device_extension_names);
- ASSERT_NO_FATAL_FAILURE(InitState());
-
- VkSubpassDescription subpasses[] = {
- {0, VK_PIPELINE_BIND_POINT_COMPUTE, 0, nullptr, 0, nullptr, nullptr, nullptr, 0, nullptr},
- };
-
- VkRenderPassCreateInfo rpci = {VK_STRUCTURE_TYPE_RENDER_PASS_CREATE_INFO, nullptr, 0, 0, nullptr, 1, subpasses, 0, nullptr};
-
- TestRenderPassCreate(m_errorMonitor, m_device->device(), &rpci, rp2Supported,
- "VUID-VkSubpassDescription-pipelineBindPoint-00844",
- "VUID-VkSubpassDescription2KHR-pipelineBindPoint-03062");
-}
-
-TEST_F(VkLayerTest, RenderPassCreateSubpassMissingAttributesBitMultiviewNVX) {
- TEST_DESCRIPTION("Create a subpass with the VK_SUBPASS_DESCRIPTION_PER_VIEW_ATTRIBUTES_BIT_NVX flag missing");
-
- // Check for VK_KHR_get_physical_device_properties2
- if (InstanceExtensionSupported(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME)) {
- m_instance_extension_names.push_back(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME);
- } else {
- printf("%s Extension %s is not supported.\n", kSkipPrefix, VK_NVX_MULTIVIEW_PER_VIEW_ATTRIBUTES_EXTENSION_NAME);
- return;
- }
-
- ASSERT_NO_FATAL_FAILURE(InitFramework(myDbgFunc, m_errorMonitor));
-
- if (DeviceExtensionSupported(gpu(), nullptr, VK_NVX_MULTIVIEW_PER_VIEW_ATTRIBUTES_EXTENSION_NAME) &&
- DeviceExtensionSupported(gpu(), nullptr, VK_KHR_MULTIVIEW_EXTENSION_NAME)) {
- m_device_extension_names.push_back(VK_NVX_MULTIVIEW_PER_VIEW_ATTRIBUTES_EXTENSION_NAME);
- m_device_extension_names.push_back(VK_KHR_MULTIVIEW_EXTENSION_NAME);
- } else {
- printf("%s Extension %s is not supported.\n", kSkipPrefix, VK_NVX_MULTIVIEW_PER_VIEW_ATTRIBUTES_EXTENSION_NAME);
- return;
- }
-
- bool rp2Supported = CheckCreateRenderPass2Support(this, m_device_extension_names);
- ASSERT_NO_FATAL_FAILURE(InitState());
-
- VkSubpassDescription subpasses[] = {
- {VK_SUBPASS_DESCRIPTION_PER_VIEW_POSITION_X_ONLY_BIT_NVX, VK_PIPELINE_BIND_POINT_GRAPHICS, 0, nullptr, 0, nullptr, nullptr,
- nullptr, 0, nullptr},
- };
-
- VkRenderPassCreateInfo rpci = {VK_STRUCTURE_TYPE_RENDER_PASS_CREATE_INFO, nullptr, 0, 0, nullptr, 1, subpasses, 0, nullptr};
-
- TestRenderPassCreate(m_errorMonitor, m_device->device(), &rpci, rp2Supported, "VUID-VkSubpassDescription-flags-00856",
- "VUID-VkSubpassDescription2KHR-flags-03076");
-}
-
-TEST_F(VkLayerTest, RenderPassCreate2SubpassInvalidInputAttachmentParameters) {
- TEST_DESCRIPTION("Create a subpass with parameters in the input attachment ref which are invalid");
-
- // Check for VK_KHR_get_physical_device_properties2
- if (InstanceExtensionSupported(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME)) {
- m_instance_extension_names.push_back(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME);
- } else {
- printf("%s Extension %s is not supported.\n", kSkipPrefix, VK_KHR_CREATE_RENDERPASS_2_EXTENSION_NAME);
- return;
- }
-
- ASSERT_NO_FATAL_FAILURE(InitFramework(myDbgFunc, m_errorMonitor));
-
- bool rp2Supported = CheckCreateRenderPass2Support(this, m_device_extension_names);
-
- if (!rp2Supported) {
- printf("%s Extension %s is not supported.\n", kSkipPrefix, VK_KHR_CREATE_RENDERPASS_2_EXTENSION_NAME);
- return;
- }
-
- ASSERT_NO_FATAL_FAILURE(InitState());
-
- VkAttachmentReference2KHR reference = {VK_STRUCTURE_TYPE_ATTACHMENT_REFERENCE_2_KHR, nullptr, VK_ATTACHMENT_UNUSED,
- VK_IMAGE_LAYOUT_UNDEFINED, 0};
- VkSubpassDescription2KHR subpass = {VK_STRUCTURE_TYPE_SUBPASS_DESCRIPTION_2_KHR,
- nullptr,
- 0,
- VK_PIPELINE_BIND_POINT_GRAPHICS,
- 0,
- 1,
- &reference,
- 0,
- nullptr,
- nullptr,
- nullptr,
- 0,
- nullptr};
-
- VkRenderPassCreateInfo2KHR rpci2 = {
- VK_STRUCTURE_TYPE_RENDER_PASS_CREATE_INFO_2_KHR, nullptr, 0, 0, nullptr, 1, &subpass, 0, nullptr, 0, nullptr};
-
- // Test for aspect mask of 0
- TestRenderPass2KHRCreate(m_errorMonitor, m_device->device(), &rpci2, "VUID-VkSubpassDescription2KHR-aspectMask-03176");
-
- // Test for invalid aspect mask bits
- reference.aspectMask |= VK_IMAGE_ASPECT_FLAG_BITS_MAX_ENUM;
- TestRenderPass2KHRCreate(m_errorMonitor, m_device->device(), &rpci2, "VUID-VkSubpassDescription2KHR-aspectMask-03175");
-}
-
-TEST_F(VkLayerTest, RenderPassCreateInvalidSubpassDependencies) {
- // Check for VK_KHR_get_physical_device_properties2
- if (InstanceExtensionSupported(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME)) {
- m_instance_extension_names.push_back(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME);
- }
-
- ASSERT_NO_FATAL_FAILURE(InitFramework(myDbgFunc, m_errorMonitor));
-
- bool rp2_supported = CheckCreateRenderPass2Support(this, m_device_extension_names);
- bool multiviewSupported = rp2_supported;
-
- if (!rp2_supported && DeviceExtensionSupported(gpu(), nullptr, VK_KHR_MULTIVIEW_EXTENSION_NAME)) {
- m_device_extension_names.push_back(VK_KHR_MULTIVIEW_EXTENSION_NAME);
- multiviewSupported = true;
- }
-
- // Add a device features struct enabling NO features
- VkPhysicalDeviceFeatures features = {0};
- ASSERT_NO_FATAL_FAILURE(InitState(&features));
-
- if (m_device->props.apiVersion >= VK_API_VERSION_1_1) {
- multiviewSupported = true;
- }
-
- // Create two dummy subpasses
- VkSubpassDescription subpasses[] = {
- {0, VK_PIPELINE_BIND_POINT_GRAPHICS, 0, nullptr, 0, nullptr, nullptr, nullptr, 0, nullptr},
- {0, VK_PIPELINE_BIND_POINT_GRAPHICS, 0, nullptr, 0, nullptr, nullptr, nullptr, 0, nullptr},
- };
-
- VkSubpassDependency dependency;
- VkRenderPassCreateInfo rpci = {VK_STRUCTURE_TYPE_RENDER_PASS_CREATE_INFO, nullptr, 0, 0, nullptr, 2, subpasses, 1, &dependency};
-
- // Non graphics stages in subpass dependency
- dependency = {0, 1, VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT | VK_PIPELINE_STAGE_TRANSFER_BIT,
- VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT};
- TestRenderPassCreate(m_errorMonitor, m_device->device(), &rpci, rp2_supported,
- "VUID-VkRenderPassCreateInfo-pDependencies-00837", "VUID-VkRenderPassCreateInfo2KHR-pDependencies-03054");
-
- dependency = {0, 1, VK_PIPELINE_STAGE_ALL_COMMANDS_BIT, VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT, 0, 0, 0};
- TestRenderPassCreate(m_errorMonitor, m_device->device(), &rpci, rp2_supported,
- "VUID-VkRenderPassCreateInfo-pDependencies-00837", "VUID-VkRenderPassCreateInfo2KHR-pDependencies-03054");
-
- dependency = {0, 1, VK_PIPELINE_STAGE_HOST_BIT, VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT, 0, 0, 0};
- TestRenderPassCreate(m_errorMonitor, m_device->device(), &rpci, rp2_supported,
- "VUID-VkRenderPassCreateInfo-pDependencies-00837", "VUID-VkRenderPassCreateInfo2KHR-pDependencies-03054");
-
- dependency = {0, 1, VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT,
- VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT | VK_PIPELINE_STAGE_COMPUTE_SHADER_BIT};
- TestRenderPassCreate(m_errorMonitor, m_device->device(), &rpci, rp2_supported,
- "VUID-VkRenderPassCreateInfo-pDependencies-00838", "VUID-VkRenderPassCreateInfo2KHR-pDependencies-03055");
-
- dependency = {0, 1, VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT, VK_PIPELINE_STAGE_HOST_BIT, 0, 0, 0};
- TestRenderPassCreate(m_errorMonitor, m_device->device(), &rpci, rp2_supported,
- "VUID-VkRenderPassCreateInfo-pDependencies-00838", "VUID-VkRenderPassCreateInfo2KHR-pDependencies-03055");
-
- dependency = {0, VK_SUBPASS_EXTERNAL, VK_PIPELINE_STAGE_COMPUTE_SHADER_BIT, VK_PIPELINE_STAGE_COMPUTE_SHADER_BIT, 0, 0, 0};
- TestRenderPassCreate(m_errorMonitor, m_device->device(), &rpci, rp2_supported,
- "VUID-VkRenderPassCreateInfo-pDependencies-00837", "VUID-VkRenderPassCreateInfo2KHR-pDependencies-03054");
-
- dependency = {VK_SUBPASS_EXTERNAL, 0, VK_PIPELINE_STAGE_COMPUTE_SHADER_BIT, VK_PIPELINE_STAGE_COMPUTE_SHADER_BIT, 0, 0, 0};
- TestRenderPassCreate(m_errorMonitor, m_device->device(), &rpci, rp2_supported,
- "VUID-VkRenderPassCreateInfo-pDependencies-00838", "VUID-VkRenderPassCreateInfo2KHR-pDependencies-03055");
-
- dependency = {0, 0, VK_PIPELINE_STAGE_COMPUTE_SHADER_BIT, VK_PIPELINE_STAGE_BOTTOM_OF_PIPE_BIT, 0, 0, 0};
- TestRenderPassCreate(m_errorMonitor, m_device->device(), &rpci, rp2_supported,
- "VUID-VkRenderPassCreateInfo-pDependencies-00837", "VUID-VkRenderPassCreateInfo2KHR-pDependencies-03054");
-
- // Geometry shaders not enabled source
- dependency = {0, 1, VK_PIPELINE_STAGE_GEOMETRY_SHADER_BIT, VK_PIPELINE_STAGE_BOTTOM_OF_PIPE_BIT, 0, 0, 0};
-
- TestRenderPassCreate(m_errorMonitor, m_device->device(), &rpci, rp2_supported, "VUID-VkSubpassDependency-srcStageMask-00860",
- "VUID-VkSubpassDependency2KHR-srcStageMask-03080");
-
- // Geometry shaders not enabled destination
- dependency = {0, 1, VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT, VK_PIPELINE_STAGE_GEOMETRY_SHADER_BIT, 0, 0, 0};
-
- TestRenderPassCreate(m_errorMonitor, m_device->device(), &rpci, rp2_supported, "VUID-VkSubpassDependency-dstStageMask-00861",
- "VUID-VkSubpassDependency2KHR-dstStageMask-03081");
-
- // Tessellation not enabled source
- dependency = {0, 1, VK_PIPELINE_STAGE_TESSELLATION_CONTROL_SHADER_BIT, VK_PIPELINE_STAGE_BOTTOM_OF_PIPE_BIT, 0, 0, 0};
-
- TestRenderPassCreate(m_errorMonitor, m_device->device(), &rpci, rp2_supported, "VUID-VkSubpassDependency-srcStageMask-00862",
- "VUID-VkSubpassDependency2KHR-srcStageMask-03082");
-
- // Tessellation not enabled destination
- dependency = {0, 1, VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT, VK_PIPELINE_STAGE_TESSELLATION_EVALUATION_SHADER_BIT, 0, 0, 0};
-
- TestRenderPassCreate(m_errorMonitor, m_device->device(), &rpci, rp2_supported, "VUID-VkSubpassDependency-dstStageMask-00863",
- "VUID-VkSubpassDependency2KHR-dstStageMask-03083");
-
- // Potential cyclical dependency
- dependency = {1, 0, VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT, VK_PIPELINE_STAGE_BOTTOM_OF_PIPE_BIT, 0, 0, 0};
-
- TestRenderPassCreate(m_errorMonitor, m_device->device(), &rpci, rp2_supported, "VUID-VkSubpassDependency-srcSubpass-00864",
- "VUID-VkSubpassDependency2KHR-srcSubpass-03084");
-
- // EXTERNAL to EXTERNAL dependency
- dependency = {
- VK_SUBPASS_EXTERNAL, VK_SUBPASS_EXTERNAL, VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT, VK_PIPELINE_STAGE_BOTTOM_OF_PIPE_BIT, 0, 0, 0};
-
- TestRenderPassCreate(m_errorMonitor, m_device->device(), &rpci, rp2_supported, "VUID-VkSubpassDependency-srcSubpass-00865",
- "VUID-VkSubpassDependency2KHR-srcSubpass-03085");
-
- // Logically later source stages in self dependency
- dependency = {0, 0, VK_PIPELINE_STAGE_VERTEX_SHADER_BIT, VK_PIPELINE_STAGE_VERTEX_INPUT_BIT, 0, 0, 0};
-
- TestRenderPassCreate(m_errorMonitor, m_device->device(), &rpci, rp2_supported, "VUID-VkSubpassDependency-srcSubpass-00867",
- "VUID-VkSubpassDependency2KHR-srcSubpass-03087");
-
- // Source access mask mismatch with source stage mask
- dependency = {0, 1, VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT, VK_PIPELINE_STAGE_BOTTOM_OF_PIPE_BIT, VK_ACCESS_UNIFORM_READ_BIT, 0, 0};
-
- TestRenderPassCreate(m_errorMonitor, m_device->device(), &rpci, rp2_supported, "VUID-VkSubpassDependency-srcAccessMask-00868",
- "VUID-VkSubpassDependency2KHR-srcAccessMask-03088");
-
- // Destination access mask mismatch with destination stage mask
- dependency = {
- 0, 1, VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT, VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT, 0, VK_ACCESS_COLOR_ATTACHMENT_WRITE_BIT, 0};
-
- TestRenderPassCreate(m_errorMonitor, m_device->device(), &rpci, rp2_supported, "VUID-VkSubpassDependency-dstAccessMask-00869",
- "VUID-VkSubpassDependency2KHR-dstAccessMask-03089");
-
- if (multiviewSupported) {
- // VIEW_LOCAL_BIT but multiview is not enabled
- dependency = {0, 1, VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT, VK_PIPELINE_STAGE_BOTTOM_OF_PIPE_BIT,
- 0, 0, VK_DEPENDENCY_VIEW_LOCAL_BIT};
-
- TestRenderPassCreate(m_errorMonitor, m_device->device(), &rpci, rp2_supported, nullptr,
- "VUID-VkRenderPassCreateInfo2KHR-viewMask-03059");
-
- // Enable multiview
- uint32_t pViewMasks[2] = {0x3u, 0x3u};
- int32_t pViewOffsets[2] = {0, 0};
- VkRenderPassMultiviewCreateInfo rpmvci = {
- VK_STRUCTURE_TYPE_RENDER_PASS_MULTIVIEW_CREATE_INFO, nullptr, 2, pViewMasks, 0, nullptr, 0, nullptr};
- rpci.pNext = &rpmvci;
-
- // Excessive view offsets
- dependency = {0, 1, VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT, VK_PIPELINE_STAGE_BOTTOM_OF_PIPE_BIT,
- 0, 0, VK_DEPENDENCY_VIEW_LOCAL_BIT};
- rpmvci.pViewOffsets = pViewOffsets;
- rpmvci.dependencyCount = 2;
-
- TestRenderPassCreate(m_errorMonitor, m_device->device(), &rpci, false, "VUID-VkRenderPassCreateInfo-pNext-01929", nullptr);
-
- rpmvci.dependencyCount = 0;
-
- // View offset with subpass self dependency
- dependency = {0, 0, VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT, VK_PIPELINE_STAGE_BOTTOM_OF_PIPE_BIT,
- 0, 0, VK_DEPENDENCY_VIEW_LOCAL_BIT};
- rpmvci.pViewOffsets = pViewOffsets;
- pViewOffsets[0] = 1;
- rpmvci.dependencyCount = 1;
-
- TestRenderPassCreate(m_errorMonitor, m_device->device(), &rpci, false, "VUID-VkRenderPassCreateInfo-pNext-01930", nullptr);
-
- rpmvci.dependencyCount = 0;
-
- // View offset with no view local bit
- if (rp2_supported) {
- dependency = {0, VK_SUBPASS_EXTERNAL, VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT, VK_PIPELINE_STAGE_BOTTOM_OF_PIPE_BIT, 0, 0, 0};
- rpmvci.pViewOffsets = pViewOffsets;
- pViewOffsets[0] = 1;
- rpmvci.dependencyCount = 1;
-
- TestRenderPassCreate(m_errorMonitor, m_device->device(), &rpci, rp2_supported, nullptr,
- "VUID-VkSubpassDependency2KHR-dependencyFlags-03092");
-
- rpmvci.dependencyCount = 0;
- }
-
- // EXTERNAL subpass with VIEW_LOCAL_BIT - source subpass
- dependency = {VK_SUBPASS_EXTERNAL, 1, VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT, VK_PIPELINE_STAGE_BOTTOM_OF_PIPE_BIT, 0, 0,
- VK_DEPENDENCY_VIEW_LOCAL_BIT};
-
- TestRenderPassCreate(m_errorMonitor, m_device->device(), &rpci, rp2_supported,
- "VUID-VkSubpassDependency-dependencyFlags-02520",
- "VUID-VkSubpassDependency2KHR-dependencyFlags-03090");
-
- // EXTERNAL subpass with VIEW_LOCAL_BIT - destination subpass
- dependency = {0, VK_SUBPASS_EXTERNAL, VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT, VK_PIPELINE_STAGE_BOTTOM_OF_PIPE_BIT, 0,
- 0, VK_DEPENDENCY_VIEW_LOCAL_BIT};
-
- TestRenderPassCreate(m_errorMonitor, m_device->device(), &rpci, rp2_supported,
- "VUID-VkSubpassDependency-dependencyFlags-02521",
- "VUID-VkSubpassDependency2KHR-dependencyFlags-03091");
-
- // Multiple views but no view local bit in self-dependency
- dependency = {0, 0, VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT, VK_PIPELINE_STAGE_BOTTOM_OF_PIPE_BIT, 0, 0, 0};
-
- TestRenderPassCreate(m_errorMonitor, m_device->device(), &rpci, rp2_supported, "VUID-VkSubpassDependency-srcSubpass-00872",
- "VUID-VkRenderPassCreateInfo2KHR-pDependencies-03060");
- }
-}
-
-TEST_F(VkLayerTest, RenderPassCreateInvalidMixedAttachmentSamplesAMD) {
- TEST_DESCRIPTION("Verify error messages for supported and unsupported sample counts in render pass attachments.");
-
- // Check for VK_KHR_get_physical_device_properties2
- if (InstanceExtensionSupported(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME)) {
- m_instance_extension_names.push_back(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME);
- }
-
- ASSERT_NO_FATAL_FAILURE(InitFramework(myDbgFunc, m_errorMonitor));
-
- if (DeviceExtensionSupported(gpu(), nullptr, VK_AMD_MIXED_ATTACHMENT_SAMPLES_EXTENSION_NAME)) {
- m_device_extension_names.push_back(VK_AMD_MIXED_ATTACHMENT_SAMPLES_EXTENSION_NAME);
- } else {
- printf("%s Extension %s is not supported.\n", kSkipPrefix, VK_AMD_MIXED_ATTACHMENT_SAMPLES_EXTENSION_NAME);
- return;
- }
-
- bool rp2Supported = CheckCreateRenderPass2Support(this, m_device_extension_names);
-
- ASSERT_NO_FATAL_FAILURE(InitState());
-
- std::vector<VkAttachmentDescription> attachments;
-
- {
- VkAttachmentDescription att = {};
- att.format = VK_FORMAT_R8G8B8A8_UNORM;
- att.samples = VK_SAMPLE_COUNT_1_BIT;
- att.loadOp = VK_ATTACHMENT_LOAD_OP_CLEAR;
- att.storeOp = VK_ATTACHMENT_STORE_OP_STORE;
- att.stencilLoadOp = VK_ATTACHMENT_LOAD_OP_DONT_CARE;
- att.stencilStoreOp = VK_ATTACHMENT_STORE_OP_DONT_CARE;
- att.initialLayout = VK_IMAGE_LAYOUT_UNDEFINED;
- att.finalLayout = VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL;
-
- attachments.push_back(att);
-
- att.format = VK_FORMAT_D16_UNORM;
- att.samples = VK_SAMPLE_COUNT_4_BIT;
- att.loadOp = VK_ATTACHMENT_LOAD_OP_CLEAR;
- att.storeOp = VK_ATTACHMENT_STORE_OP_STORE;
- att.stencilLoadOp = VK_ATTACHMENT_LOAD_OP_CLEAR;
- att.stencilStoreOp = VK_ATTACHMENT_STORE_OP_STORE;
- att.initialLayout = VK_IMAGE_LAYOUT_UNDEFINED;
- att.finalLayout = VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL;
-
- attachments.push_back(att);
- }
-
- VkAttachmentReference color_ref = {};
- color_ref.attachment = 0;
- color_ref.layout = VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL;
-
- VkAttachmentReference depth_ref = {};
- depth_ref.attachment = 1;
- depth_ref.layout = VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL;
-
- VkSubpassDescription subpass = {};
- subpass.pipelineBindPoint = VK_PIPELINE_BIND_POINT_GRAPHICS;
- subpass.colorAttachmentCount = 1;
- subpass.pColorAttachments = &color_ref;
- subpass.pDepthStencilAttachment = &depth_ref;
-
- VkRenderPassCreateInfo rpci = {};
- rpci.sType = VK_STRUCTURE_TYPE_RENDER_PASS_CREATE_INFO;
- rpci.attachmentCount = attachments.size();
- rpci.pAttachments = attachments.data();
- rpci.subpassCount = 1;
- rpci.pSubpasses = &subpass;
-
- m_errorMonitor->ExpectSuccess();
-
- VkRenderPass rp;
- VkResult err;
-
- err = vkCreateRenderPass(device(), &rpci, NULL, &rp);
- m_errorMonitor->VerifyNotFound();
- if (err == VK_SUCCESS) vkDestroyRenderPass(m_device->device(), rp, nullptr);
-
- // Expect an error message for invalid sample counts
- attachments[0].samples = VK_SAMPLE_COUNT_4_BIT;
- attachments[1].samples = VK_SAMPLE_COUNT_1_BIT;
-
- TestRenderPassCreate(m_errorMonitor, m_device->device(), &rpci, rp2Supported,
- "VUID-VkSubpassDescription-pColorAttachments-01506",
- "VUID-VkSubpassDescription2KHR-pColorAttachments-03070");
-}
-
-TEST_F(VkLayerTest, RenderPassBeginInvalidRenderArea) {
- TEST_DESCRIPTION("Generate INVALID_RENDER_AREA error by beginning renderpass with extent outside of framebuffer");
- // Check for VK_KHR_get_physical_device_properties2
- if (InstanceExtensionSupported(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME)) {
- m_instance_extension_names.push_back(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME);
- }
-
- ASSERT_NO_FATAL_FAILURE(InitFramework(myDbgFunc, m_errorMonitor));
- bool rp2Supported = CheckCreateRenderPass2Support(this, m_device_extension_names);
- ASSERT_NO_FATAL_FAILURE(InitState(nullptr, nullptr, VK_COMMAND_POOL_CREATE_RESET_COMMAND_BUFFER_BIT));
-
- ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
-
- // Framebuffer for render target is 256x256, exceed that for INVALID_RENDER_AREA
- m_renderPassBeginInfo.renderArea.extent.width = 257;
- m_renderPassBeginInfo.renderArea.extent.height = 257;
-
- TestRenderPassBegin(m_errorMonitor, m_device->device(), m_commandBuffer->handle(), &m_renderPassBeginInfo, rp2Supported,
- "Cannot execute a render pass with renderArea not within the bound of the framebuffer.",
- "Cannot execute a render pass with renderArea not within the bound of the framebuffer.");
-}
-
-TEST_F(VkLayerTest, RenderPassBeginWithinRenderPass) {
- // Check for VK_KHR_get_physical_device_properties2
- if (InstanceExtensionSupported(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME)) {
- m_instance_extension_names.push_back(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME);
- }
-
- ASSERT_NO_FATAL_FAILURE(InitFramework(myDbgFunc, m_errorMonitor));
- PFN_vkCmdBeginRenderPass2KHR vkCmdBeginRenderPass2KHR = nullptr;
- bool rp2Supported = CheckCreateRenderPass2Support(this, m_device_extension_names);
- ASSERT_NO_FATAL_FAILURE(InitState());
-
- if (rp2Supported) {
- vkCmdBeginRenderPass2KHR =
- (PFN_vkCmdBeginRenderPass2KHR)vkGetDeviceProcAddr(m_device->device(), "vkCmdBeginRenderPass2KHR");
- }
-
- ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
-
- // Bind a BeginRenderPass within an active RenderPass
- m_commandBuffer->begin();
- m_commandBuffer->BeginRenderPass(m_renderPassBeginInfo);
-
- // Just use a dummy Renderpass
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdBeginRenderPass-renderpass");
- vkCmdBeginRenderPass(m_commandBuffer->handle(), &m_renderPassBeginInfo, VK_SUBPASS_CONTENTS_INLINE);
-
- m_errorMonitor->VerifyFound();
-
- if (rp2Supported) {
- VkSubpassBeginInfoKHR subpassBeginInfo = {VK_STRUCTURE_TYPE_SUBPASS_BEGIN_INFO_KHR, nullptr, VK_SUBPASS_CONTENTS_INLINE};
-
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdBeginRenderPass2KHR-renderpass");
- vkCmdBeginRenderPass2KHR(m_commandBuffer->handle(), &m_renderPassBeginInfo, &subpassBeginInfo);
- m_errorMonitor->VerifyFound();
- }
-}
-
-TEST_F(VkLayerTest, RenderPassBeginIncompatibleFramebufferRenderPass) {
- TEST_DESCRIPTION("Test that renderpass begin is compatible with the framebuffer renderpass ");
-
- ASSERT_NO_FATAL_FAILURE(Init(nullptr, nullptr, VK_COMMAND_POOL_CREATE_RESET_COMMAND_BUFFER_BIT));
-
- // Create a depth stencil image view
- VkImageObj image(m_device);
-
- image.Init(128, 128, 1, VK_FORMAT_D16_UNORM, VK_IMAGE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT, VK_IMAGE_TILING_OPTIMAL);
- ASSERT_TRUE(image.initialized());
-
- VkImageView dsv;
- VkImageViewCreateInfo dsvci = {};
- dsvci.sType = VK_STRUCTURE_TYPE_IMAGE_VIEW_CREATE_INFO;
- dsvci.pNext = nullptr;
- dsvci.image = image.handle();
- dsvci.viewType = VK_IMAGE_VIEW_TYPE_2D;
- dsvci.format = VK_FORMAT_D16_UNORM;
- dsvci.subresourceRange.layerCount = 1;
- dsvci.subresourceRange.baseMipLevel = 0;
- dsvci.subresourceRange.levelCount = 1;
- dsvci.subresourceRange.aspectMask = VK_IMAGE_ASPECT_DEPTH_BIT;
- vkCreateImageView(m_device->device(), &dsvci, NULL, &dsv);
-
- // Create a renderPass with a single attachment that uses loadOp CLEAR
- VkAttachmentDescription description = {0,
- VK_FORMAT_D16_UNORM,
- VK_SAMPLE_COUNT_1_BIT,
- VK_ATTACHMENT_LOAD_OP_LOAD,
- VK_ATTACHMENT_STORE_OP_DONT_CARE,
- VK_ATTACHMENT_LOAD_OP_CLEAR,
- VK_ATTACHMENT_STORE_OP_DONT_CARE,
- VK_IMAGE_LAYOUT_GENERAL,
- VK_IMAGE_LAYOUT_GENERAL};
-
- VkAttachmentReference depth_stencil_ref = {0, VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL};
-
- VkSubpassDescription subpass = {0, VK_PIPELINE_BIND_POINT_GRAPHICS, 0, nullptr, 0, nullptr, nullptr, &depth_stencil_ref, 0,
- nullptr};
-
- VkRenderPassCreateInfo rpci = {VK_STRUCTURE_TYPE_RENDER_PASS_CREATE_INFO, nullptr, 0, 1, &description, 1, &subpass, 0, nullptr};
- VkRenderPass rp1, rp2;
-
- vkCreateRenderPass(m_device->device(), &rpci, NULL, &rp1);
- subpass.pDepthStencilAttachment = nullptr;
- vkCreateRenderPass(m_device->device(), &rpci, NULL, &rp2);
-
- // Create a framebuffer
-
- VkFramebufferCreateInfo fbci = {VK_STRUCTURE_TYPE_FRAMEBUFFER_CREATE_INFO, nullptr, 0, rp1, 1, &dsv, 128, 128, 1};
- VkFramebuffer fb;
-
- vkCreateFramebuffer(m_device->handle(), &fbci, nullptr, &fb);
-
- VkRenderPassBeginInfo rp_begin = {VK_STRUCTURE_TYPE_RENDER_PASS_BEGIN_INFO, nullptr, rp2, fb, {{0, 0}, {128, 128}}, 0, nullptr};
-
- TestRenderPassBegin(m_errorMonitor, m_device->device(), m_commandBuffer->handle(), &rp_begin, false,
- "VUID-VkRenderPassBeginInfo-renderPass-00904", nullptr);
-
- vkDestroyRenderPass(m_device->device(), rp1, nullptr);
- vkDestroyRenderPass(m_device->device(), rp2, nullptr);
- vkDestroyFramebuffer(m_device->device(), fb, nullptr);
- vkDestroyImageView(m_device->device(), dsv, nullptr);
-}
-
-TEST_F(VkLayerTest, RenderPassBeginLayoutsFramebufferImageUsageMismatches) {
- TEST_DESCRIPTION(
- "Test that renderpass initial/final layouts match up with the usage bits set for each attachment of the framebuffer");
-
- // Check for VK_KHR_get_physical_device_properties2
- if (InstanceExtensionSupported(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME)) {
- m_instance_extension_names.push_back(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME);
- }
-
- ASSERT_NO_FATAL_FAILURE(InitFramework(myDbgFunc, m_errorMonitor));
- bool rp2Supported = CheckCreateRenderPass2Support(this, m_device_extension_names);
- bool maintenance2Supported = rp2Supported;
-
- // Check for VK_KHR_maintenance2
- if (!rp2Supported && DeviceExtensionSupported(gpu(), nullptr, VK_KHR_MAINTENANCE2_EXTENSION_NAME)) {
- m_device_extension_names.push_back(VK_KHR_MAINTENANCE2_EXTENSION_NAME);
- maintenance2Supported = true;
- }
-
- ASSERT_NO_FATAL_FAILURE(InitState(nullptr, nullptr, VK_COMMAND_POOL_CREATE_RESET_COMMAND_BUFFER_BIT));
-
- if (m_device->props.apiVersion >= VK_API_VERSION_1_1) {
- maintenance2Supported = true;
- }
-
- // Create an input attachment view
- VkImageObj iai(m_device);
-
- iai.InitNoLayout(128, 128, 1, VK_FORMAT_R8G8B8A8_UNORM, VK_IMAGE_USAGE_INPUT_ATTACHMENT_BIT, VK_IMAGE_TILING_OPTIMAL);
- ASSERT_TRUE(iai.initialized());
-
- VkImageView iav;
- VkImageViewCreateInfo iavci = {};
- iavci.sType = VK_STRUCTURE_TYPE_IMAGE_VIEW_CREATE_INFO;
- iavci.pNext = nullptr;
- iavci.image = iai.handle();
- iavci.viewType = VK_IMAGE_VIEW_TYPE_2D;
- iavci.format = VK_FORMAT_R8G8B8A8_UNORM;
- iavci.subresourceRange.layerCount = 1;
- iavci.subresourceRange.baseMipLevel = 0;
- iavci.subresourceRange.levelCount = 1;
- iavci.subresourceRange.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
- vkCreateImageView(m_device->device(), &iavci, NULL, &iav);
-
- // Create a color attachment view
- VkImageObj cai(m_device);
-
- cai.InitNoLayout(128, 128, 1, VK_FORMAT_R8G8B8A8_UNORM, VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT, VK_IMAGE_TILING_OPTIMAL);
- ASSERT_TRUE(cai.initialized());
-
- VkImageView cav;
- VkImageViewCreateInfo cavci = {};
- cavci.sType = VK_STRUCTURE_TYPE_IMAGE_VIEW_CREATE_INFO;
- cavci.pNext = nullptr;
- cavci.image = cai.handle();
- cavci.viewType = VK_IMAGE_VIEW_TYPE_2D;
- cavci.format = VK_FORMAT_R8G8B8A8_UNORM;
- cavci.subresourceRange.layerCount = 1;
- cavci.subresourceRange.baseMipLevel = 0;
- cavci.subresourceRange.levelCount = 1;
- cavci.subresourceRange.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
- vkCreateImageView(m_device->device(), &cavci, NULL, &cav);
-
- // Create a renderPass with those attachments
- VkAttachmentDescription descriptions[] = {
- {0, VK_FORMAT_R8G8B8A8_UNORM, VK_SAMPLE_COUNT_1_BIT, VK_ATTACHMENT_LOAD_OP_DONT_CARE, VK_ATTACHMENT_STORE_OP_DONT_CARE,
- VK_ATTACHMENT_LOAD_OP_CLEAR, VK_ATTACHMENT_STORE_OP_DONT_CARE, VK_IMAGE_LAYOUT_GENERAL, VK_IMAGE_LAYOUT_GENERAL},
- {1, VK_FORMAT_R8G8B8A8_UNORM, VK_SAMPLE_COUNT_1_BIT, VK_ATTACHMENT_LOAD_OP_DONT_CARE, VK_ATTACHMENT_STORE_OP_DONT_CARE,
- VK_ATTACHMENT_LOAD_OP_CLEAR, VK_ATTACHMENT_STORE_OP_DONT_CARE, VK_IMAGE_LAYOUT_GENERAL, VK_IMAGE_LAYOUT_GENERAL}};
-
- VkAttachmentReference input_ref = {0, VK_IMAGE_LAYOUT_GENERAL};
- VkAttachmentReference color_ref = {1, VK_IMAGE_LAYOUT_GENERAL};
-
- VkSubpassDescription subpass = {0, VK_PIPELINE_BIND_POINT_GRAPHICS, 1, &input_ref, 1, &color_ref, nullptr, nullptr, 0, nullptr};
-
- VkRenderPassCreateInfo rpci = {VK_STRUCTURE_TYPE_RENDER_PASS_CREATE_INFO, nullptr, 0, 2, descriptions, 1, &subpass, 0, nullptr};
-
- VkRenderPass rp;
-
- vkCreateRenderPass(m_device->device(), &rpci, NULL, &rp);
-
- // Create a framebuffer
-
- VkImageView views[] = {iav, cav};
-
- VkFramebufferCreateInfo fbci = {VK_STRUCTURE_TYPE_FRAMEBUFFER_CREATE_INFO, nullptr, 0, rp, 2, views, 128, 128, 1};
- VkFramebuffer fb;
-
- vkCreateFramebuffer(m_device->handle(), &fbci, nullptr, &fb);
-
- VkRenderPassBeginInfo rp_begin = {VK_STRUCTURE_TYPE_RENDER_PASS_BEGIN_INFO, nullptr, rp, fb, {{0, 0}, {128, 128}}, 0, nullptr};
-
- VkRenderPass rp_invalid;
-
- // Initial layout is VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL but attachment doesn't support IMAGE_USAGE_COLOR_ATTACHMENT_BIT
- descriptions[0].initialLayout = VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL;
- vkCreateRenderPass(m_device->device(), &rpci, NULL, &rp_invalid);
- rp_begin.renderPass = rp_invalid;
- TestRenderPassBegin(m_errorMonitor, m_device->device(), m_commandBuffer->handle(), &rp_begin, rp2Supported,
- "VUID-vkCmdBeginRenderPass-initialLayout-00895", "VUID-vkCmdBeginRenderPass2KHR-initialLayout-03094");
-
- vkDestroyRenderPass(m_device->handle(), rp_invalid, nullptr);
-
- // Initial layout is VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL but attachment doesn't support VK_IMAGE_USAGE_INPUT_ATTACHMENT_BIT
- // / VK_IMAGE_USAGE_SAMPLED_BIT
- descriptions[0].initialLayout = VK_IMAGE_LAYOUT_GENERAL;
- descriptions[1].initialLayout = VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL;
- vkCreateRenderPass(m_device->device(), &rpci, NULL, &rp_invalid);
- rp_begin.renderPass = rp_invalid;
-
- TestRenderPassBegin(m_errorMonitor, m_device->device(), m_commandBuffer->handle(), &rp_begin, rp2Supported,
- "VUID-vkCmdBeginRenderPass-initialLayout-00897", "VUID-vkCmdBeginRenderPass2KHR-initialLayout-03097");
-
- vkDestroyRenderPass(m_device->handle(), rp_invalid, nullptr);
- descriptions[1].initialLayout = VK_IMAGE_LAYOUT_GENERAL;
-
- // Initial layout is VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL but attachment doesn't support VK_IMAGE_USAGE_TRANSFER_SRC_BIT
- descriptions[0].initialLayout = VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL;
- vkCreateRenderPass(m_device->device(), &rpci, NULL, &rp_invalid);
- rp_begin.renderPass = rp_invalid;
-
- TestRenderPassBegin(m_errorMonitor, m_device->device(), m_commandBuffer->handle(), &rp_begin, rp2Supported,
- "VUID-vkCmdBeginRenderPass-initialLayout-00898", "VUID-vkCmdBeginRenderPass2KHR-initialLayout-03098");
-
- vkDestroyRenderPass(m_device->handle(), rp_invalid, nullptr);
-
- // Initial layout is VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL but attachment doesn't support VK_IMAGE_USAGE_TRANSFER_DST_BIT
- descriptions[0].initialLayout = VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL;
- vkCreateRenderPass(m_device->device(), &rpci, NULL, &rp_invalid);
- rp_begin.renderPass = rp_invalid;
-
- TestRenderPassBegin(m_errorMonitor, m_device->device(), m_commandBuffer->handle(), &rp_begin, rp2Supported,
- "VUID-vkCmdBeginRenderPass-initialLayout-00899", "VUID-vkCmdBeginRenderPass2KHR-initialLayout-03099");
-
- vkDestroyRenderPass(m_device->handle(), rp_invalid, nullptr);
-
- // Initial layout is VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL but attachment doesn't support
- // VK_IMAGE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT
- descriptions[0].initialLayout = VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL;
- vkCreateRenderPass(m_device->device(), &rpci, NULL, &rp_invalid);
- rp_begin.renderPass = rp_invalid;
- const char *initial_layout_vuid_rp1 =
- maintenance2Supported ? "VUID-vkCmdBeginRenderPass-initialLayout-01758" : "VUID-vkCmdBeginRenderPass-initialLayout-00896";
-
- TestRenderPassBegin(m_errorMonitor, m_device->device(), m_commandBuffer->handle(), &rp_begin, rp2Supported,
- initial_layout_vuid_rp1, "VUID-vkCmdBeginRenderPass2KHR-initialLayout-03096");
-
- vkDestroyRenderPass(m_device->handle(), rp_invalid, nullptr);
-
- // Initial layout is VK_IMAGE_LAYOUT_DEPTH_STENCIL_READ_ONLY_OPTIMAL but attachment doesn't support
- // VK_IMAGE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT
- descriptions[0].initialLayout = VK_IMAGE_LAYOUT_DEPTH_STENCIL_READ_ONLY_OPTIMAL;
- vkCreateRenderPass(m_device->device(), &rpci, NULL, &rp_invalid);
- rp_begin.renderPass = rp_invalid;
-
- TestRenderPassBegin(m_errorMonitor, m_device->device(), m_commandBuffer->handle(), &rp_begin, rp2Supported,
- initial_layout_vuid_rp1, "VUID-vkCmdBeginRenderPass2KHR-initialLayout-03096");
-
- vkDestroyRenderPass(m_device->handle(), rp_invalid, nullptr);
-
- if (maintenance2Supported || rp2Supported) {
- // Initial layout is VK_IMAGE_LAYOUT_DEPTH_READ_ONLY_STENCIL_ATTACHMENT_OPTIMAL but attachment doesn't support
- // VK_IMAGE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT
- descriptions[0].initialLayout = VK_IMAGE_LAYOUT_DEPTH_READ_ONLY_STENCIL_ATTACHMENT_OPTIMAL;
- vkCreateRenderPass(m_device->device(), &rpci, NULL, &rp_invalid);
- rp_begin.renderPass = rp_invalid;
-
- TestRenderPassBegin(m_errorMonitor, m_device->device(), m_commandBuffer->handle(), &rp_begin, rp2Supported,
- "VUID-vkCmdBeginRenderPass-initialLayout-01758", "VUID-vkCmdBeginRenderPass2KHR-initialLayout-03096");
-
- vkDestroyRenderPass(m_device->handle(), rp_invalid, nullptr);
-
- // Initial layout is VK_IMAGE_LAYOUT_DEPTH_ATTACHMENT_STENCIL_READ_ONLY_OPTIMAL but attachment doesn't support
- // VK_IMAGE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT
- descriptions[0].initialLayout = VK_IMAGE_LAYOUT_DEPTH_ATTACHMENT_STENCIL_READ_ONLY_OPTIMAL;
- vkCreateRenderPass(m_device->device(), &rpci, NULL, &rp_invalid);
- rp_begin.renderPass = rp_invalid;
-
- TestRenderPassBegin(m_errorMonitor, m_device->device(), m_commandBuffer->handle(), &rp_begin, rp2Supported,
- "VUID-vkCmdBeginRenderPass-initialLayout-01758", "VUID-vkCmdBeginRenderPass2KHR-initialLayout-03096");
-
- vkDestroyRenderPass(m_device->handle(), rp_invalid, nullptr);
- }
-
- vkDestroyRenderPass(m_device->device(), rp, nullptr);
- vkDestroyFramebuffer(m_device->device(), fb, nullptr);
- vkDestroyImageView(m_device->device(), iav, nullptr);
- vkDestroyImageView(m_device->device(), cav, nullptr);
-}
-
-TEST_F(VkLayerTest, RenderPassBeginClearOpMismatch) {
- TEST_DESCRIPTION(
- "Begin a renderPass where clearValueCount is less than the number of renderPass attachments that use "
- "loadOp VK_ATTACHMENT_LOAD_OP_CLEAR.");
-
- // Check for VK_KHR_get_physical_device_properties2
- if (InstanceExtensionSupported(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME)) {
- m_instance_extension_names.push_back(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME);
- }
-
- ASSERT_NO_FATAL_FAILURE(InitFramework(myDbgFunc, m_errorMonitor));
- bool rp2Supported = CheckCreateRenderPass2Support(this, m_device_extension_names);
- ASSERT_NO_FATAL_FAILURE(InitState(nullptr, nullptr, VK_COMMAND_POOL_CREATE_RESET_COMMAND_BUFFER_BIT));
-
- ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
-
- // Create a renderPass with a single attachment that uses loadOp CLEAR
- VkAttachmentReference attach = {};
- attach.layout = VK_IMAGE_LAYOUT_GENERAL;
- VkSubpassDescription subpass = {};
- subpass.colorAttachmentCount = 1;
- subpass.pColorAttachments = &attach;
- VkRenderPassCreateInfo rpci = {};
- rpci.subpassCount = 1;
- rpci.pSubpasses = &subpass;
- rpci.attachmentCount = 1;
- VkAttachmentDescription attach_desc = {};
- attach_desc.format = VK_FORMAT_B8G8R8A8_UNORM;
- // Set loadOp to CLEAR
- attach_desc.loadOp = VK_ATTACHMENT_LOAD_OP_CLEAR;
- attach_desc.samples = VK_SAMPLE_COUNT_1_BIT;
- attach_desc.finalLayout = VK_IMAGE_LAYOUT_GENERAL;
- rpci.pAttachments = &attach_desc;
- rpci.sType = VK_STRUCTURE_TYPE_RENDER_PASS_CREATE_INFO;
- VkRenderPass rp;
- vkCreateRenderPass(m_device->device(), &rpci, NULL, &rp);
-
- VkRenderPassBeginInfo rp_begin = {};
- rp_begin.sType = VK_STRUCTURE_TYPE_RENDER_PASS_BEGIN_INFO;
- rp_begin.pNext = NULL;
- rp_begin.renderPass = renderPass();
- rp_begin.framebuffer = framebuffer();
- rp_begin.clearValueCount = 0; // Should be 1
-
- TestRenderPassBegin(m_errorMonitor, m_device->device(), m_commandBuffer->handle(), &rp_begin, rp2Supported,
- "VUID-VkRenderPassBeginInfo-clearValueCount-00902", "VUID-VkRenderPassBeginInfo-clearValueCount-00902");
-
- vkDestroyRenderPass(m_device->device(), rp, NULL);
-}
-
-TEST_F(VkLayerTest, RenderPassBeginSampleLocationsInvalidIndicesEXT) {
- TEST_DESCRIPTION("Test that attachment indices and subpass indices specifed by sample locations structures are valid");
-
- // Check for VK_KHR_get_physical_device_properties2
- if (InstanceExtensionSupported(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME)) {
- m_instance_extension_names.push_back(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME);
- }
- ASSERT_NO_FATAL_FAILURE(InitFramework(myDbgFunc, m_errorMonitor));
- if (DeviceExtensionSupported(gpu(), nullptr, VK_EXT_SAMPLE_LOCATIONS_EXTENSION_NAME)) {
- m_device_extension_names.push_back(VK_EXT_SAMPLE_LOCATIONS_EXTENSION_NAME);
- } else {
- printf("%s Extension %s is not supported.\n", kSkipPrefix, VK_EXT_SAMPLE_LOCATIONS_EXTENSION_NAME);
- return;
- }
-
- ASSERT_NO_FATAL_FAILURE(InitState(nullptr, nullptr, VK_COMMAND_POOL_CREATE_RESET_COMMAND_BUFFER_BIT));
-
- // Create a depth stencil image view
- VkImageObj image(m_device);
-
- image.Init(128, 128, 1, VK_FORMAT_D16_UNORM, VK_IMAGE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT, VK_IMAGE_TILING_OPTIMAL);
- ASSERT_TRUE(image.initialized());
-
- VkImageView dsv;
- VkImageViewCreateInfo dsvci = {};
- dsvci.sType = VK_STRUCTURE_TYPE_IMAGE_VIEW_CREATE_INFO;
- dsvci.pNext = nullptr;
- dsvci.image = image.handle();
- dsvci.viewType = VK_IMAGE_VIEW_TYPE_2D;
- dsvci.format = VK_FORMAT_D16_UNORM;
- dsvci.subresourceRange.layerCount = 1;
- dsvci.subresourceRange.baseMipLevel = 0;
- dsvci.subresourceRange.levelCount = 1;
- dsvci.subresourceRange.aspectMask = VK_IMAGE_ASPECT_DEPTH_BIT;
- vkCreateImageView(m_device->device(), &dsvci, NULL, &dsv);
-
- // Create a renderPass with a single attachment that uses loadOp CLEAR
- VkAttachmentDescription description = {0,
- VK_FORMAT_D16_UNORM,
- VK_SAMPLE_COUNT_1_BIT,
- VK_ATTACHMENT_LOAD_OP_LOAD,
- VK_ATTACHMENT_STORE_OP_DONT_CARE,
- VK_ATTACHMENT_LOAD_OP_CLEAR,
- VK_ATTACHMENT_STORE_OP_DONT_CARE,
- VK_IMAGE_LAYOUT_GENERAL,
- VK_IMAGE_LAYOUT_GENERAL};
-
- VkAttachmentReference depth_stencil_ref = {0, VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL};
-
- VkSubpassDescription subpass = {0, VK_PIPELINE_BIND_POINT_GRAPHICS, 0, nullptr, 0, nullptr, nullptr, &depth_stencil_ref, 0,
- nullptr};
-
- VkRenderPassCreateInfo rpci = {VK_STRUCTURE_TYPE_RENDER_PASS_CREATE_INFO, nullptr, 0, 1, &description, 1, &subpass, 0, nullptr};
- VkRenderPass rp;
-
- vkCreateRenderPass(m_device->device(), &rpci, NULL, &rp);
-
- // Create a framebuffer
-
- VkFramebufferCreateInfo fbci = {VK_STRUCTURE_TYPE_FRAMEBUFFER_CREATE_INFO, nullptr, 0, rp, 1, &dsv, 128, 128, 1};
- VkFramebuffer fb;
-
- vkCreateFramebuffer(m_device->handle(), &fbci, nullptr, &fb);
-
- VkSampleLocationEXT sample_location = {0.5, 0.5};
-
- VkSampleLocationsInfoEXT sample_locations_info = {
- VK_STRUCTURE_TYPE_SAMPLE_LOCATIONS_INFO_EXT, nullptr, VK_SAMPLE_COUNT_1_BIT, {1, 1}, 1, &sample_location};
-
- VkAttachmentSampleLocationsEXT attachment_sample_locations = {0, sample_locations_info};
- VkSubpassSampleLocationsEXT subpass_sample_locations = {0, sample_locations_info};
-
- VkRenderPassSampleLocationsBeginInfoEXT rp_sl_begin = {VK_STRUCTURE_TYPE_RENDER_PASS_SAMPLE_LOCATIONS_BEGIN_INFO_EXT,
- nullptr,
- 1,
- &attachment_sample_locations,
- 1,
- &subpass_sample_locations};
-
- VkRenderPassBeginInfo rp_begin = {
- VK_STRUCTURE_TYPE_RENDER_PASS_BEGIN_INFO, &rp_sl_begin, rp, fb, {{0, 0}, {128, 128}}, 0, nullptr};
-
- attachment_sample_locations.attachmentIndex = 1;
- TestRenderPassBegin(m_errorMonitor, m_device->device(), m_commandBuffer->handle(), &rp_begin, false,
- "VUID-VkAttachmentSampleLocationsEXT-attachmentIndex-01531", nullptr);
- attachment_sample_locations.attachmentIndex = 0;
-
- subpass_sample_locations.subpassIndex = 1;
- TestRenderPassBegin(m_errorMonitor, m_device->device(), m_commandBuffer->handle(), &rp_begin, false,
- "VUID-VkSubpassSampleLocationsEXT-subpassIndex-01532", nullptr);
- subpass_sample_locations.subpassIndex = 0;
-
- vkDestroyRenderPass(m_device->device(), rp, nullptr);
- vkDestroyFramebuffer(m_device->device(), fb, nullptr);
- vkDestroyImageView(m_device->device(), dsv, nullptr);
-}
-
-TEST_F(VkLayerTest, RenderPassNextSubpassExcessive) {
- TEST_DESCRIPTION("Test that an error is produced when CmdNextSubpass is called too many times in a renderpass instance");
-
- // Check for VK_KHR_get_physical_device_properties2
- if (InstanceExtensionSupported(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME)) {
- m_instance_extension_names.push_back(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME);
- }
-
- ASSERT_NO_FATAL_FAILURE(InitFramework(myDbgFunc, m_errorMonitor));
- PFN_vkCmdNextSubpass2KHR vkCmdNextSubpass2KHR = nullptr;
- bool rp2Supported = CheckCreateRenderPass2Support(this, m_device_extension_names);
- ASSERT_NO_FATAL_FAILURE(InitState());
-
- if (rp2Supported) {
- vkCmdNextSubpass2KHR = (PFN_vkCmdNextSubpass2KHR)vkGetDeviceProcAddr(m_device->device(), "vkCmdNextSubpass2KHR");
- }
-
- ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
-
- m_commandBuffer->begin();
- m_commandBuffer->BeginRenderPass(m_renderPassBeginInfo);
-
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdNextSubpass-None-00909");
- vkCmdNextSubpass(m_commandBuffer->handle(), VK_SUBPASS_CONTENTS_INLINE);
- m_errorMonitor->VerifyFound();
-
- if (rp2Supported) {
- VkSubpassBeginInfoKHR subpassBeginInfo = {VK_STRUCTURE_TYPE_SUBPASS_BEGIN_INFO_KHR, nullptr, VK_SUBPASS_CONTENTS_INLINE};
- VkSubpassEndInfoKHR subpassEndInfo = {VK_STRUCTURE_TYPE_SUBPASS_END_INFO_KHR, nullptr};
-
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdNextSubpass2KHR-None-03102");
-
- vkCmdNextSubpass2KHR(m_commandBuffer->handle(), &subpassBeginInfo, &subpassEndInfo);
- m_errorMonitor->VerifyFound();
- }
-
- m_commandBuffer->EndRenderPass();
- m_commandBuffer->end();
-}
-
-TEST_F(VkLayerTest, RenderPassEndBeforeFinalSubpass) {
- TEST_DESCRIPTION("Test that an error is produced when CmdEndRenderPass is called before the final subpass has been reached");
-
- // Check for VK_KHR_get_physical_device_properties2
- if (InstanceExtensionSupported(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME)) {
- m_instance_extension_names.push_back(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME);
- }
-
- ASSERT_NO_FATAL_FAILURE(InitFramework(myDbgFunc, m_errorMonitor));
- PFN_vkCmdEndRenderPass2KHR vkCmdEndRenderPass2KHR = nullptr;
- bool rp2Supported = CheckCreateRenderPass2Support(this, m_device_extension_names);
- ASSERT_NO_FATAL_FAILURE(InitState(nullptr, nullptr, VK_COMMAND_POOL_CREATE_RESET_COMMAND_BUFFER_BIT));
-
- if (rp2Supported) {
- vkCmdEndRenderPass2KHR = (PFN_vkCmdEndRenderPass2KHR)vkGetDeviceProcAddr(m_device->device(), "vkCmdEndRenderPass2KHR");
- }
-
- VkSubpassDescription sd[2] = {{0, VK_PIPELINE_BIND_POINT_GRAPHICS, 0, nullptr, 0, nullptr, nullptr, nullptr, 0, nullptr},
- {0, VK_PIPELINE_BIND_POINT_GRAPHICS, 0, nullptr, 0, nullptr, nullptr, nullptr, 0, nullptr}};
-
- VkRenderPassCreateInfo rcpi = {VK_STRUCTURE_TYPE_RENDER_PASS_CREATE_INFO, nullptr, 0, 0, nullptr, 2, sd, 0, nullptr};
-
- VkRenderPass rp;
- VkResult err = vkCreateRenderPass(m_device->device(), &rcpi, nullptr, &rp);
- ASSERT_VK_SUCCESS(err);
-
- VkFramebufferCreateInfo fbci = {VK_STRUCTURE_TYPE_FRAMEBUFFER_CREATE_INFO, nullptr, 0, rp, 0, nullptr, 16, 16, 1};
-
- VkFramebuffer fb;
- err = vkCreateFramebuffer(m_device->device(), &fbci, nullptr, &fb);
- ASSERT_VK_SUCCESS(err);
-
- m_commandBuffer->begin();
-
- VkRenderPassBeginInfo rpbi = {VK_STRUCTURE_TYPE_RENDER_PASS_BEGIN_INFO, nullptr, rp, fb, {{0, 0}, {16, 16}}, 0, nullptr};
-
- vkCmdBeginRenderPass(m_commandBuffer->handle(), &rpbi, VK_SUBPASS_CONTENTS_INLINE);
-
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdEndRenderPass-None-00910");
- vkCmdEndRenderPass(m_commandBuffer->handle());
- m_errorMonitor->VerifyFound();
-
- if (rp2Supported) {
- VkSubpassEndInfoKHR subpassEndInfo = {VK_STRUCTURE_TYPE_SUBPASS_END_INFO_KHR, nullptr};
-
- m_commandBuffer->reset();
- m_commandBuffer->begin();
- vkCmdBeginRenderPass(m_commandBuffer->handle(), &rpbi, VK_SUBPASS_CONTENTS_INLINE);
-
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdEndRenderPass2KHR-None-03103");
- vkCmdEndRenderPass2KHR(m_commandBuffer->handle(), &subpassEndInfo);
- m_errorMonitor->VerifyFound();
- }
-
- // Clean up.
- vkDestroyFramebuffer(m_device->device(), fb, nullptr);
- vkDestroyRenderPass(m_device->device(), rp, nullptr);
-}
-
-TEST_F(VkLayerTest, RenderPassDestroyWhileInUse) {
- TEST_DESCRIPTION("Delete in-use renderPass.");
-
- ASSERT_NO_FATAL_FAILURE(Init());
- ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
-
- // Create simple renderpass
- VkAttachmentReference attach = {};
- attach.layout = VK_IMAGE_LAYOUT_GENERAL;
- VkSubpassDescription subpass = {};
- subpass.colorAttachmentCount = 1;
- subpass.pColorAttachments = &attach;
- VkRenderPassCreateInfo rpci = {};
- rpci.subpassCount = 1;
- rpci.pSubpasses = &subpass;
- rpci.attachmentCount = 1;
- VkAttachmentDescription attach_desc = {};
- attach_desc.format = VK_FORMAT_B8G8R8A8_UNORM;
- attach_desc.samples = VK_SAMPLE_COUNT_1_BIT;
- attach_desc.finalLayout = VK_IMAGE_LAYOUT_GENERAL;
- rpci.pAttachments = &attach_desc;
- rpci.sType = VK_STRUCTURE_TYPE_RENDER_PASS_CREATE_INFO;
- VkRenderPass rp;
- VkResult err = vkCreateRenderPass(m_device->device(), &rpci, NULL, &rp);
- ASSERT_VK_SUCCESS(err);
-
- m_errorMonitor->ExpectSuccess();
-
- m_commandBuffer->begin();
- VkRenderPassBeginInfo rpbi = {};
- rpbi.sType = VK_STRUCTURE_TYPE_RENDER_PASS_BEGIN_INFO;
- rpbi.framebuffer = m_framebuffer;
- rpbi.renderPass = rp;
- m_commandBuffer->BeginRenderPass(rpbi);
- m_commandBuffer->EndRenderPass();
- m_commandBuffer->end();
-
- VkSubmitInfo submit_info = {};
- submit_info.sType = VK_STRUCTURE_TYPE_SUBMIT_INFO;
- submit_info.commandBufferCount = 1;
- submit_info.pCommandBuffers = &m_commandBuffer->handle();
- vkQueueSubmit(m_device->m_queue, 1, &submit_info, VK_NULL_HANDLE);
- m_errorMonitor->VerifyNotFound();
-
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkDestroyRenderPass-renderPass-00873");
- vkDestroyRenderPass(m_device->device(), rp, nullptr);
- m_errorMonitor->VerifyFound();
-
- // Wait for queue to complete so we can safely destroy rp
- vkQueueWaitIdle(m_device->m_queue);
- m_errorMonitor->SetUnexpectedError("If renderPass is not VK_NULL_HANDLE, renderPass must be a valid VkRenderPass handle");
- m_errorMonitor->SetUnexpectedError("Was it created? Has it already been destroyed?");
- vkDestroyRenderPass(m_device->device(), rp, nullptr);
-}
-
-TEST_F(VkLayerTest, FramebufferCreateErrors) {
- TEST_DESCRIPTION(
- "Hit errors when attempting to create a framebuffer :\n"
- " 1. Mismatch between framebuffer & renderPass attachmentCount\n"
- " 2. Use a color image as depthStencil attachment\n"
- " 3. Mismatch framebuffer & renderPass attachment formats\n"
- " 4. Mismatch framebuffer & renderPass attachment #samples\n"
- " 5. Framebuffer attachment w/ non-1 mip-levels\n"
- " 6. Framebuffer attachment where dimensions don't match\n"
- " 7. Framebuffer attachment where dimensions don't match\n"
- " 8. Framebuffer attachment w/o identity swizzle\n"
- " 9. framebuffer dimensions exceed physical device limits\n"
- "10. null pAttachments\n");
-
- // Check for VK_KHR_get_physical_device_properties2
- bool push_physical_device_properties_2_support =
- InstanceExtensionSupported(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME);
- if (push_physical_device_properties_2_support) {
- m_instance_extension_names.push_back(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME);
- }
-
- ASSERT_NO_FATAL_FAILURE(InitFramework(myDbgFunc, m_errorMonitor));
-
- bool push_fragment_density_support = false;
-
- if (push_physical_device_properties_2_support) {
- push_fragment_density_support = DeviceExtensionSupported(gpu(), nullptr, VK_EXT_FRAGMENT_DENSITY_MAP_EXTENSION_NAME);
- if (push_fragment_density_support) m_device_extension_names.push_back(VK_EXT_FRAGMENT_DENSITY_MAP_EXTENSION_NAME);
- }
-
- ASSERT_NO_FATAL_FAILURE(InitState(nullptr, nullptr, 0));
- ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
-
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkFramebufferCreateInfo-attachmentCount-00876");
-
- // Create a renderPass with a single color attachment
- VkAttachmentReference attach = {};
- attach.layout = VK_IMAGE_LAYOUT_GENERAL;
- VkSubpassDescription subpass = {};
- subpass.pColorAttachments = &attach;
- VkRenderPassCreateInfo rpci = {};
- rpci.subpassCount = 1;
- rpci.pSubpasses = &subpass;
- rpci.attachmentCount = 1;
- VkAttachmentDescription attach_desc = {};
- attach_desc.format = VK_FORMAT_B8G8R8A8_UNORM;
- attach_desc.samples = VK_SAMPLE_COUNT_1_BIT;
- attach_desc.finalLayout = VK_IMAGE_LAYOUT_GENERAL;
- rpci.pAttachments = &attach_desc;
- rpci.sType = VK_STRUCTURE_TYPE_RENDER_PASS_CREATE_INFO;
- VkRenderPass rp;
- VkResult err = vkCreateRenderPass(m_device->device(), &rpci, NULL, &rp);
- ASSERT_VK_SUCCESS(err);
-
- VkImageView ivs[2];
- ivs[0] = m_renderTargets[0]->targetView(VK_FORMAT_B8G8R8A8_UNORM);
- ivs[1] = m_renderTargets[0]->targetView(VK_FORMAT_B8G8R8A8_UNORM);
- VkFramebufferCreateInfo fb_info = {};
- fb_info.sType = VK_STRUCTURE_TYPE_FRAMEBUFFER_CREATE_INFO;
- fb_info.pNext = NULL;
- fb_info.renderPass = rp;
- // Set mis-matching attachmentCount
- fb_info.attachmentCount = 2;
- fb_info.pAttachments = ivs;
- fb_info.width = 100;
- fb_info.height = 100;
- fb_info.layers = 1;
-
- VkFramebuffer fb;
- err = vkCreateFramebuffer(device(), &fb_info, NULL, &fb);
-
- m_errorMonitor->VerifyFound();
- if (err == VK_SUCCESS) {
- vkDestroyFramebuffer(m_device->device(), fb, NULL);
- }
- vkDestroyRenderPass(m_device->device(), rp, NULL);
-
- // Create a renderPass with a depth-stencil attachment created with
- // IMAGE_USAGE_COLOR_ATTACHMENT
- // Add our color attachment to pDepthStencilAttachment
- subpass.pDepthStencilAttachment = &attach;
- subpass.pColorAttachments = NULL;
- VkRenderPass rp_ds;
- err = vkCreateRenderPass(m_device->device(), &rpci, NULL, &rp_ds);
- ASSERT_VK_SUCCESS(err);
- // Set correct attachment count, but attachment has COLOR usage bit set
- fb_info.attachmentCount = 1;
- fb_info.renderPass = rp_ds;
-
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkFramebufferCreateInfo-pAttachments-02633");
- err = vkCreateFramebuffer(device(), &fb_info, NULL, &fb);
-
- m_errorMonitor->VerifyFound();
- if (err == VK_SUCCESS) {
- vkDestroyFramebuffer(m_device->device(), fb, NULL);
- }
- vkDestroyRenderPass(m_device->device(), rp_ds, NULL);
-
- // Create new renderpass with alternate attachment format from fb
- attach_desc.format = VK_FORMAT_R8G8B8A8_UNORM;
- subpass.pDepthStencilAttachment = NULL;
- subpass.pColorAttachments = &attach;
- err = vkCreateRenderPass(m_device->device(), &rpci, NULL, &rp);
- ASSERT_VK_SUCCESS(err);
-
- // Cause error due to mis-matched formats between rp & fb
- // rp attachment 0 now has RGBA8 but corresponding fb attach is BGRA8
- fb_info.renderPass = rp;
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkFramebufferCreateInfo-pAttachments-00880");
- err = vkCreateFramebuffer(device(), &fb_info, NULL, &fb);
-
- m_errorMonitor->VerifyFound();
- if (err == VK_SUCCESS) {
- vkDestroyFramebuffer(m_device->device(), fb, NULL);
- }
- vkDestroyRenderPass(m_device->device(), rp, NULL);
-
- // Create new renderpass with alternate sample count from fb
- attach_desc.format = VK_FORMAT_B8G8R8A8_UNORM;
- attach_desc.samples = VK_SAMPLE_COUNT_4_BIT;
- err = vkCreateRenderPass(m_device->device(), &rpci, NULL, &rp);
- ASSERT_VK_SUCCESS(err);
-
- // Cause error due to mis-matched sample count between rp & fb
- fb_info.renderPass = rp;
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkFramebufferCreateInfo-pAttachments-00881");
- err = vkCreateFramebuffer(device(), &fb_info, NULL, &fb);
-
- m_errorMonitor->VerifyFound();
- if (err == VK_SUCCESS) {
- vkDestroyFramebuffer(m_device->device(), fb, NULL);
- }
-
- vkDestroyRenderPass(m_device->device(), rp, NULL);
-
- {
- // Create an image with 2 mip levels.
- VkImageObj image(m_device);
- image.Init(128, 128, 2, VK_FORMAT_B8G8R8A8_UNORM, VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT, VK_IMAGE_TILING_OPTIMAL, 0);
- ASSERT_TRUE(image.initialized());
-
- // Create a image view with two mip levels.
- VkImageView view;
- VkImageViewCreateInfo ivci = {};
- ivci.sType = VK_STRUCTURE_TYPE_IMAGE_VIEW_CREATE_INFO;
- ivci.image = image.handle();
- ivci.viewType = VK_IMAGE_VIEW_TYPE_2D;
- ivci.format = VK_FORMAT_B8G8R8A8_UNORM;
- ivci.subresourceRange.layerCount = 1;
- ivci.subresourceRange.baseMipLevel = 0;
- // Set level count to 2 (only 1 is allowed for FB attachment)
- ivci.subresourceRange.levelCount = 2;
- ivci.subresourceRange.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
- err = vkCreateImageView(m_device->device(), &ivci, NULL, &view);
- ASSERT_VK_SUCCESS(err);
-
- // Re-create renderpass to have matching sample count
- attach_desc.samples = VK_SAMPLE_COUNT_1_BIT;
- err = vkCreateRenderPass(m_device->device(), &rpci, NULL, &rp);
- ASSERT_VK_SUCCESS(err);
-
- fb_info.renderPass = rp;
- fb_info.pAttachments = &view;
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkFramebufferCreateInfo-pAttachments-00883");
- err = vkCreateFramebuffer(device(), &fb_info, NULL, &fb);
-
- m_errorMonitor->VerifyFound();
- if (err == VK_SUCCESS) {
- vkDestroyFramebuffer(m_device->device(), fb, NULL);
- }
- vkDestroyImageView(m_device->device(), view, NULL);
- }
-
- // Update view to original color buffer and grow FB dimensions too big
- fb_info.pAttachments = ivs;
- fb_info.height = 1024;
- fb_info.width = 1024;
- fb_info.layers = 2;
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkFramebufferCreateInfo-pAttachments-00882");
- err = vkCreateFramebuffer(device(), &fb_info, NULL, &fb);
-
- m_errorMonitor->VerifyFound();
- if (err == VK_SUCCESS) {
- vkDestroyFramebuffer(m_device->device(), fb, NULL);
- }
-
- {
- if (!push_fragment_density_support) {
- printf("%s VK_EXT_fragment_density_map Extension not supported, skipping tests\n", kSkipPrefix);
- } else {
- uint32_t attachment_width = 512;
- uint32_t attachment_height = 512;
- VkFormat attachment_format = VK_FORMAT_R8G8_UNORM;
- uint32_t frame_width = 512;
- uint32_t frame_height = 512;
-
- // Create a renderPass with a single color attachment for fragment density map
- VkAttachmentReference attach_fragment_density_map = {};
- attach_fragment_density_map.layout = VK_IMAGE_LAYOUT_GENERAL;
- VkSubpassDescription subpass_fragment_density_map = {};
- subpass_fragment_density_map.pColorAttachments = &attach_fragment_density_map;
- VkRenderPassCreateInfo rpci_fragment_density_map = {};
- rpci_fragment_density_map.subpassCount = 1;
- rpci_fragment_density_map.pSubpasses = &subpass_fragment_density_map;
- rpci_fragment_density_map.attachmentCount = 1;
- VkAttachmentDescription attach_desc_fragment_density_map = {};
- attach_desc_fragment_density_map.format = attachment_format;
- attach_desc_fragment_density_map.samples = VK_SAMPLE_COUNT_1_BIT;
- attach_desc_fragment_density_map.finalLayout = VK_IMAGE_LAYOUT_FRAGMENT_DENSITY_MAP_OPTIMAL_EXT;
- rpci_fragment_density_map.pAttachments = &attach_desc_fragment_density_map;
- rpci_fragment_density_map.sType = VK_STRUCTURE_TYPE_RENDER_PASS_CREATE_INFO;
- VkRenderPass rp_fragment_density_map;
-
- err = vkCreateRenderPass(m_device->device(), &rpci_fragment_density_map, NULL, &rp_fragment_density_map);
- ASSERT_VK_SUCCESS(err);
-
- // Create view attachment
- VkImageView view_fragment_density_map;
- VkImageViewCreateInfo ivci = {};
- ivci.sType = VK_STRUCTURE_TYPE_IMAGE_VIEW_CREATE_INFO;
- ivci.viewType = VK_IMAGE_VIEW_TYPE_2D;
- ivci.format = attachment_format;
- ivci.flags = 0;
- ivci.subresourceRange.layerCount = 1;
- ivci.subresourceRange.baseMipLevel = 0;
- ivci.subresourceRange.levelCount = 1;
- ivci.subresourceRange.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
-
- VkFramebufferAttachmentImageInfoKHR fb_fdm = {};
- fb_fdm.sType = VK_STRUCTURE_TYPE_FRAMEBUFFER_ATTACHMENT_IMAGE_INFO_KHR;
- fb_fdm.usage = VK_IMAGE_USAGE_FRAGMENT_DENSITY_MAP_BIT_EXT;
- fb_fdm.width = frame_width;
- fb_fdm.height = frame_height;
- fb_fdm.layerCount = 1;
- fb_fdm.viewFormatCount = 1;
- fb_fdm.pViewFormats = &attachment_format;
- VkFramebufferAttachmentsCreateInfoKHR fb_aci_fdm = {};
- fb_aci_fdm.sType = VK_STRUCTURE_TYPE_FRAMEBUFFER_ATTACHMENTS_CREATE_INFO_KHR;
- fb_aci_fdm.attachmentImageInfoCount = 1;
- fb_aci_fdm.pAttachmentImageInfos = &fb_fdm;
-
- VkFramebufferCreateInfo fbci = {};
- fbci.sType = VK_STRUCTURE_TYPE_FRAMEBUFFER_CREATE_INFO;
- fbci.pNext = &fb_aci_fdm;
- fbci.flags = 0;
- fbci.width = frame_width;
- fbci.height = frame_height;
- fbci.layers = 1;
- fbci.renderPass = rp_fragment_density_map;
- fbci.attachmentCount = 1;
- fbci.pAttachments = &view_fragment_density_map;
-
- // Set small width
- VkImageObj image2(m_device);
- image2.Init(16, attachment_height, 1, attachment_format, VK_IMAGE_USAGE_FRAGMENT_DENSITY_MAP_BIT_EXT,
- VK_IMAGE_TILING_LINEAR, 0);
- ASSERT_TRUE(image2.initialized());
-
- ivci.image = image2.handle();
- err = vkCreateImageView(m_device->device(), &ivci, NULL, &view_fragment_density_map);
- ASSERT_VK_SUCCESS(err);
-
- fbci.pAttachments = &view_fragment_density_map;
-
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkFramebufferCreateInfo-pAttachments-02555");
- err = vkCreateFramebuffer(device(), &fbci, NULL, &fb);
-
- m_errorMonitor->VerifyFound();
- if (err == VK_SUCCESS) {
- vkDestroyFramebuffer(m_device->device(), fb, NULL);
- }
-
- vkDestroyImageView(m_device->device(), view_fragment_density_map, NULL);
-
- // Set small height
- VkImageObj image3(m_device);
- image3.Init(attachment_width, 16, 1, attachment_format, VK_IMAGE_USAGE_FRAGMENT_DENSITY_MAP_BIT_EXT,
- VK_IMAGE_TILING_LINEAR, 0);
- ASSERT_TRUE(image3.initialized());
-
- ivci.image = image3.handle();
- err = vkCreateImageView(m_device->device(), &ivci, NULL, &view_fragment_density_map);
- ASSERT_VK_SUCCESS(err);
-
- fbci.pAttachments = &view_fragment_density_map;
-
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkFramebufferCreateInfo-pAttachments-02556");
- err = vkCreateFramebuffer(device(), &fbci, NULL, &fb);
-
- m_errorMonitor->VerifyFound();
- if (err == VK_SUCCESS) {
- vkDestroyFramebuffer(m_device->device(), fb, NULL);
- }
-
- vkDestroyImageView(m_device->device(), view_fragment_density_map, NULL);
-
- vkDestroyRenderPass(m_device->device(), rp_fragment_density_map, NULL);
- }
- }
-
- {
- // Create an image with one mip level.
- VkImageObj image(m_device);
- image.Init(128, 128, 1, VK_FORMAT_B8G8R8A8_UNORM, VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT, VK_IMAGE_TILING_OPTIMAL, 0);
- ASSERT_TRUE(image.initialized());
-
- // Create view attachment with non-identity swizzle
- VkImageView view;
- VkImageViewCreateInfo ivci = {};
- ivci.sType = VK_STRUCTURE_TYPE_IMAGE_VIEW_CREATE_INFO;
- ivci.image = image.handle();
- ivci.viewType = VK_IMAGE_VIEW_TYPE_2D;
- ivci.format = VK_FORMAT_B8G8R8A8_UNORM;
- ivci.subresourceRange.layerCount = 1;
- ivci.subresourceRange.baseMipLevel = 0;
- ivci.subresourceRange.levelCount = 1;
- ivci.subresourceRange.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
- ivci.components.r = VK_COMPONENT_SWIZZLE_G;
- ivci.components.g = VK_COMPONENT_SWIZZLE_R;
- ivci.components.b = VK_COMPONENT_SWIZZLE_A;
- ivci.components.a = VK_COMPONENT_SWIZZLE_B;
- err = vkCreateImageView(m_device->device(), &ivci, NULL, &view);
- ASSERT_VK_SUCCESS(err);
-
- fb_info.pAttachments = &view;
- fb_info.height = 100;
- fb_info.width = 100;
- fb_info.layers = 1;
-
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkFramebufferCreateInfo-pAttachments-00884");
- err = vkCreateFramebuffer(device(), &fb_info, NULL, &fb);
-
- m_errorMonitor->VerifyFound();
- if (err == VK_SUCCESS) {
- vkDestroyFramebuffer(m_device->device(), fb, NULL);
- }
- vkDestroyImageView(m_device->device(), view, NULL);
- }
-
- // reset attachment to color attachment
- fb_info.pAttachments = ivs;
-
- // Request fb that exceeds max width
- fb_info.width = m_device->props.limits.maxFramebufferWidth + 1;
- fb_info.height = 100;
- fb_info.layers = 1;
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkFramebufferCreateInfo-width-00886");
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkFramebufferCreateInfo-pAttachments-00882");
- err = vkCreateFramebuffer(device(), &fb_info, NULL, &fb);
- m_errorMonitor->VerifyFound();
- if (err == VK_SUCCESS) {
- vkDestroyFramebuffer(m_device->device(), fb, NULL);
- }
- // and width=0
- fb_info.width = 0;
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkFramebufferCreateInfo-width-00885");
- err = vkCreateFramebuffer(device(), &fb_info, NULL, &fb);
- m_errorMonitor->VerifyFound();
- if (err == VK_SUCCESS) {
- vkDestroyFramebuffer(m_device->device(), fb, NULL);
- }
-
- // Request fb that exceeds max height
- fb_info.width = 100;
- fb_info.height = m_device->props.limits.maxFramebufferHeight + 1;
- fb_info.layers = 1;
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkFramebufferCreateInfo-height-00888");
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkFramebufferCreateInfo-pAttachments-00882");
- err = vkCreateFramebuffer(device(), &fb_info, NULL, &fb);
- m_errorMonitor->VerifyFound();
- if (err == VK_SUCCESS) {
- vkDestroyFramebuffer(m_device->device(), fb, NULL);
- }
- // and height=0
- fb_info.height = 0;
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkFramebufferCreateInfo-height-00887");
- err = vkCreateFramebuffer(device(), &fb_info, NULL, &fb);
- m_errorMonitor->VerifyFound();
- if (err == VK_SUCCESS) {
- vkDestroyFramebuffer(m_device->device(), fb, NULL);
- }
-
- // Request fb that exceeds max layers
- fb_info.width = 100;
- fb_info.height = 100;
- fb_info.layers = m_device->props.limits.maxFramebufferLayers + 1;
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkFramebufferCreateInfo-layers-00890");
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkFramebufferCreateInfo-pAttachments-00882");
- err = vkCreateFramebuffer(device(), &fb_info, NULL, &fb);
- m_errorMonitor->VerifyFound();
- if (err == VK_SUCCESS) {
- vkDestroyFramebuffer(m_device->device(), fb, NULL);
- }
- // and layers=0
- fb_info.layers = 0;
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkFramebufferCreateInfo-layers-00889");
- err = vkCreateFramebuffer(device(), &fb_info, NULL, &fb);
- m_errorMonitor->VerifyFound();
- if (err == VK_SUCCESS) {
- vkDestroyFramebuffer(m_device->device(), fb, NULL);
- }
-
- // Try to create with pAttachments = NULL
- fb_info.layers = 1;
- fb_info.pAttachments = NULL;
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID_Undefined");
- err = vkCreateFramebuffer(device(), &fb_info, NULL, &fb);
- m_errorMonitor->VerifyFound();
- if (err == VK_SUCCESS) {
- vkDestroyFramebuffer(m_device->device(), fb, NULL);
- }
-
- vkDestroyRenderPass(m_device->device(), rp, NULL);
-}
-
-TEST_F(VkLayerTest, AllocDescriptorFromEmptyPool) {
- TEST_DESCRIPTION("Attempt to allocate more sets and descriptors than descriptor pool has available.");
- VkResult err;
-
- ASSERT_NO_FATAL_FAILURE(Init());
- ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
-
- // This test is valid for Vulkan 1.0 only -- skip if device has an API version greater than 1.0.
- if (m_device->props.apiVersion >= VK_API_VERSION_1_1) {
- printf("%s Device has apiVersion greater than 1.0 -- skipping Descriptor Set checks.\n", kSkipPrefix);
- return;
- }
-
- // Create Pool w/ 1 Sampler descriptor, but try to alloc Uniform Buffer
- // descriptor from it
- VkDescriptorPoolSize ds_type_count = {};
- ds_type_count.type = VK_DESCRIPTOR_TYPE_SAMPLER;
- ds_type_count.descriptorCount = 2;
-
- VkDescriptorPoolCreateInfo ds_pool_ci = {};
- ds_pool_ci.sType = VK_STRUCTURE_TYPE_DESCRIPTOR_POOL_CREATE_INFO;
- ds_pool_ci.pNext = NULL;
- ds_pool_ci.flags = 0;
- ds_pool_ci.maxSets = 1;
- ds_pool_ci.poolSizeCount = 1;
- ds_pool_ci.pPoolSizes = &ds_type_count;
-
- VkDescriptorPool ds_pool;
- err = vkCreateDescriptorPool(m_device->device(), &ds_pool_ci, NULL, &ds_pool);
- ASSERT_VK_SUCCESS(err);
-
- VkDescriptorSetLayoutBinding dsl_binding_samp = {};
- dsl_binding_samp.binding = 0;
- dsl_binding_samp.descriptorType = VK_DESCRIPTOR_TYPE_SAMPLER;
- dsl_binding_samp.descriptorCount = 1;
- dsl_binding_samp.stageFlags = VK_SHADER_STAGE_ALL;
- dsl_binding_samp.pImmutableSamplers = NULL;
-
- const VkDescriptorSetLayoutObj ds_layout_samp(m_device, {dsl_binding_samp});
-
- // Try to allocate 2 sets when pool only has 1 set
- VkDescriptorSet descriptor_sets[2];
- VkDescriptorSetLayout set_layouts[2] = {ds_layout_samp.handle(), ds_layout_samp.handle()};
- VkDescriptorSetAllocateInfo alloc_info = {};
- alloc_info.sType = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_ALLOCATE_INFO;
- alloc_info.descriptorSetCount = 2;
- alloc_info.descriptorPool = ds_pool;
- alloc_info.pSetLayouts = set_layouts;
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
- "VUID-VkDescriptorSetAllocateInfo-descriptorSetCount-00306");
- err = vkAllocateDescriptorSets(m_device->device(), &alloc_info, descriptor_sets);
- m_errorMonitor->VerifyFound();
-
- alloc_info.descriptorSetCount = 1;
- // Create layout w/ descriptor type not available in pool
- VkDescriptorSetLayoutBinding dsl_binding = {};
- dsl_binding.binding = 0;
- dsl_binding.descriptorType = VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER;
- dsl_binding.descriptorCount = 1;
- dsl_binding.stageFlags = VK_SHADER_STAGE_ALL;
- dsl_binding.pImmutableSamplers = NULL;
-
- const VkDescriptorSetLayoutObj ds_layout_ub(m_device, {dsl_binding});
-
- VkDescriptorSet descriptor_set;
- alloc_info.descriptorSetCount = 1;
- alloc_info.pSetLayouts = &ds_layout_ub.handle();
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkDescriptorSetAllocateInfo-descriptorPool-00307");
- err = vkAllocateDescriptorSets(m_device->device(), &alloc_info, &descriptor_set);
-
- m_errorMonitor->VerifyFound();
-
- vkDestroyDescriptorPool(m_device->device(), ds_pool, NULL);
-}
-
-TEST_F(VkLayerTest, FreeDescriptorFromOneShotPool) {
- VkResult err;
-
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkFreeDescriptorSets-descriptorPool-00312");
-
- ASSERT_NO_FATAL_FAILURE(Init());
- ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
-
- VkDescriptorPoolSize ds_type_count = {};
- ds_type_count.type = VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER;
- ds_type_count.descriptorCount = 1;
-
- VkDescriptorPoolCreateInfo ds_pool_ci = {};
- ds_pool_ci.sType = VK_STRUCTURE_TYPE_DESCRIPTOR_POOL_CREATE_INFO;
- ds_pool_ci.pNext = NULL;
- ds_pool_ci.maxSets = 1;
- ds_pool_ci.poolSizeCount = 1;
- ds_pool_ci.flags = 0;
- // Not specifying VK_DESCRIPTOR_POOL_CREATE_FREE_DESCRIPTOR_SET_BIT means
- // app can only call vkResetDescriptorPool on this pool.;
- ds_pool_ci.pPoolSizes = &ds_type_count;
-
- VkDescriptorPool ds_pool;
- err = vkCreateDescriptorPool(m_device->device(), &ds_pool_ci, NULL, &ds_pool);
- ASSERT_VK_SUCCESS(err);
-
- VkDescriptorSetLayoutBinding dsl_binding = {};
- dsl_binding.binding = 0;
- dsl_binding.descriptorType = VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER;
- dsl_binding.descriptorCount = 1;
- dsl_binding.stageFlags = VK_SHADER_STAGE_ALL;
- dsl_binding.pImmutableSamplers = NULL;
-
- const VkDescriptorSetLayoutObj ds_layout(m_device, {dsl_binding});
-
- VkDescriptorSet descriptorSet;
- VkDescriptorSetAllocateInfo alloc_info = {};
- alloc_info.sType = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_ALLOCATE_INFO;
- alloc_info.descriptorSetCount = 1;
- alloc_info.descriptorPool = ds_pool;
- alloc_info.pSetLayouts = &ds_layout.handle();
- err = vkAllocateDescriptorSets(m_device->device(), &alloc_info, &descriptorSet);
- ASSERT_VK_SUCCESS(err);
-
- err = vkFreeDescriptorSets(m_device->device(), ds_pool, 1, &descriptorSet);
- m_errorMonitor->VerifyFound();
-
- vkDestroyDescriptorPool(m_device->device(), ds_pool, NULL);
-}
-
-TEST_F(VkLayerTest, InvalidDescriptorPool) {
- // Attempt to clear Descriptor Pool with bad object.
- // ObjectTracker should catch this.
-
- ASSERT_NO_FATAL_FAILURE(Init());
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkResetDescriptorPool-descriptorPool-parameter");
- uint64_t fake_pool_handle = 0xbaad6001;
- VkDescriptorPool bad_pool = reinterpret_cast<VkDescriptorPool &>(fake_pool_handle);
- vkResetDescriptorPool(device(), bad_pool, 0);
- m_errorMonitor->VerifyFound();
-}
-
-TEST_F(VkLayerTest, InvalidDescriptorSet) {
- // Attempt to bind an invalid Descriptor Set to a valid Command Buffer
- // ObjectTracker should catch this.
- // Create a valid cmd buffer
- // call vkCmdBindDescriptorSets w/ false Descriptor Set
-
- uint64_t fake_set_handle = 0xbaad6001;
- VkDescriptorSet bad_set = reinterpret_cast<VkDescriptorSet &>(fake_set_handle);
-
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdBindDescriptorSets-pDescriptorSets-parameter");
-
- ASSERT_NO_FATAL_FAILURE(Init());
-
- VkDescriptorSetLayoutBinding layout_binding = {};
- layout_binding.binding = 0;
- layout_binding.descriptorType = VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER;
- layout_binding.descriptorCount = 1;
- layout_binding.stageFlags = VK_SHADER_STAGE_VERTEX_BIT;
- layout_binding.pImmutableSamplers = NULL;
-
- const VkDescriptorSetLayoutObj descriptor_set_layout(m_device, {layout_binding});
-
- const VkPipelineLayoutObj pipeline_layout(DeviceObj(), {&descriptor_set_layout});
-
- m_commandBuffer->begin();
- vkCmdBindDescriptorSets(m_commandBuffer->handle(), VK_PIPELINE_BIND_POINT_GRAPHICS, pipeline_layout.handle(), 0, 1, &bad_set, 0,
- NULL);
- m_errorMonitor->VerifyFound();
- m_commandBuffer->end();
-}
-
-TEST_F(VkLayerTest, InvalidDescriptorSetLayout) {
- // Attempt to create a Pipeline Layout with an invalid Descriptor Set Layout.
- // ObjectTracker should catch this.
- uint64_t fake_layout_handle = 0xbaad6001;
- VkDescriptorSetLayout bad_layout = reinterpret_cast<VkDescriptorSetLayout &>(fake_layout_handle);
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkPipelineLayoutCreateInfo-pSetLayouts-parameter");
- ASSERT_NO_FATAL_FAILURE(Init());
- VkPipelineLayout pipeline_layout;
- VkPipelineLayoutCreateInfo plci = {};
- plci.sType = VK_STRUCTURE_TYPE_PIPELINE_LAYOUT_CREATE_INFO;
- plci.pNext = NULL;
- plci.setLayoutCount = 1;
- plci.pSetLayouts = &bad_layout;
- vkCreatePipelineLayout(device(), &plci, NULL, &pipeline_layout);
-
- m_errorMonitor->VerifyFound();
-}
-
-TEST_F(VkLayerTest, WriteDescriptorSetIntegrityCheck) {
- TEST_DESCRIPTION(
- "This test verifies some requirements of chapter 13.2.3 of the Vulkan Spec "
- "1) A uniform buffer update must have a valid buffer index. "
- "2) When using an array of descriptors in a single WriteDescriptor, the descriptor types and stageflags "
- "must all be the same. "
- "3) Immutable Sampler state must match across descriptors. "
- "4) That sampled image descriptors have required layouts. ");
-
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkWriteDescriptorSet-descriptorType-00324");
-
- ASSERT_NO_FATAL_FAILURE(Init());
-
- VkSamplerCreateInfo sampler_ci = SafeSaneSamplerCreateInfo();
- VkSampler sampler;
- VkResult err = vkCreateSampler(m_device->device(), &sampler_ci, NULL, &sampler);
- ASSERT_VK_SUCCESS(err);
-
- OneOffDescriptorSet::Bindings bindings = {
- {0, VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER, 1, VK_SHADER_STAGE_ALL, NULL},
- {1, VK_DESCRIPTOR_TYPE_SAMPLER, 1, VK_SHADER_STAGE_FRAGMENT_BIT, NULL},
- {2, VK_DESCRIPTOR_TYPE_SAMPLER, 1, VK_SHADER_STAGE_FRAGMENT_BIT, static_cast<VkSampler *>(&sampler)},
- {3, VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE, 1, VK_SHADER_STAGE_FRAGMENT_BIT, NULL}};
- OneOffDescriptorSet descriptor_set(m_device, bindings);
- ASSERT_TRUE(descriptor_set.Initialized());
-
- VkWriteDescriptorSet descriptor_write = {};
- descriptor_write.sType = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET;
- descriptor_write.dstSet = descriptor_set.set_;
- descriptor_write.dstBinding = 0;
- descriptor_write.descriptorCount = 1;
- descriptor_write.descriptorType = VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER;
-
- // 1) The uniform buffer is intentionally invalid here
- vkUpdateDescriptorSets(m_device->device(), 1, &descriptor_write, 0, NULL);
- m_errorMonitor->VerifyFound();
-
- // Create a buffer to update the descriptor with
- uint32_t qfi = 0;
- VkBufferCreateInfo buffCI = {};
- buffCI.sType = VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO;
- buffCI.size = 1024;
- buffCI.usage = VK_BUFFER_USAGE_UNIFORM_BUFFER_BIT;
- buffCI.queueFamilyIndexCount = 1;
- buffCI.pQueueFamilyIndices = &qfi;
-
- VkBufferObj dynamic_uniform_buffer;
- dynamic_uniform_buffer.init(*m_device, buffCI);
-
- VkDescriptorBufferInfo buffInfo[2] = {};
- buffInfo[0].buffer = dynamic_uniform_buffer.handle();
- buffInfo[0].offset = 0;
- buffInfo[0].range = 1024;
- buffInfo[1].buffer = dynamic_uniform_buffer.handle();
- buffInfo[1].offset = 0;
- buffInfo[1].range = 1024;
- descriptor_write.pBufferInfo = buffInfo;
- descriptor_write.descriptorCount = 2;
-
- // 2) The stateFlags don't match between the first and second descriptor
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkWriteDescriptorSet-dstArrayElement-00321");
- vkUpdateDescriptorSets(m_device->device(), 1, &descriptor_write, 0, NULL);
- m_errorMonitor->VerifyFound();
-
- // 3) The second descriptor has a null_ptr pImmutableSamplers and
- // the third descriptor contains an immutable sampler
- descriptor_write.dstBinding = 1;
- descriptor_write.descriptorType = VK_DESCRIPTOR_TYPE_SAMPLER;
-
- // Make pImageInfo index non-null to avoid complaints of it missing
- VkDescriptorImageInfo imageInfo = {};
- imageInfo.imageLayout = VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL;
- descriptor_write.pImageInfo = &imageInfo;
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkWriteDescriptorSet-dstArrayElement-00321");
- vkUpdateDescriptorSets(m_device->device(), 1, &descriptor_write, 0, NULL);
- m_errorMonitor->VerifyFound();
-
- // 4) That sampled image descriptors have required layouts
- // Create images to update the descriptor with
- VkImageObj image(m_device);
- const VkFormat tex_format = VK_FORMAT_B8G8R8A8_UNORM;
- image.Init(32, 32, 1, tex_format, VK_IMAGE_USAGE_SAMPLED_BIT, VK_IMAGE_TILING_OPTIMAL, 0);
- ASSERT_TRUE(image.initialized());
-
- // Attmept write with incorrect layout for sampled descriptor
- imageInfo.sampler = VK_NULL_HANDLE;
- imageInfo.imageView = image.targetView(tex_format);
- imageInfo.imageLayout = VK_IMAGE_LAYOUT_UNDEFINED;
-
- descriptor_write.dstBinding = 3;
- descriptor_write.descriptorCount = 1;
- descriptor_write.descriptorType = VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE;
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkWriteDescriptorSet-descriptorType-01403");
- vkUpdateDescriptorSets(m_device->device(), 1, &descriptor_write, 0, NULL);
- m_errorMonitor->VerifyFound();
-
- vkDestroySampler(m_device->device(), sampler, NULL);
-}
-
-TEST_F(VkLayerTest, WriteDescriptorSetConsecutiveUpdates) {
- TEST_DESCRIPTION(
- "Verifies that updates rolling over to next descriptor work correctly by destroying buffer from consecutive update known "
- "to be used in descriptor set and verifying that error is flagged.");
-
- ASSERT_NO_FATAL_FAILURE(Init());
- ASSERT_NO_FATAL_FAILURE(InitViewport());
- ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
-
- OneOffDescriptorSet descriptor_set(m_device, {
- {0, VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER, 2, VK_SHADER_STAGE_ALL, nullptr},
- {1, VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER, 1, VK_SHADER_STAGE_ALL, nullptr},
- });
-
- uint32_t qfi = 0;
- VkBufferCreateInfo bci = {};
- bci.sType = VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO;
- bci.usage = VK_BUFFER_USAGE_UNIFORM_BUFFER_BIT;
- bci.size = 2048;
- bci.queueFamilyIndexCount = 1;
- bci.pQueueFamilyIndices = &qfi;
- VkBufferObj buffer0;
- buffer0.init(*m_device, bci);
- CreatePipelineHelper pipe(*this);
- { // Scope 2nd buffer to cause early destruction
- VkBufferObj buffer1;
- bci.size = 1024;
- buffer1.init(*m_device, bci);
-
- VkDescriptorBufferInfo buffer_info[3] = {};
- buffer_info[0].buffer = buffer0.handle();
- buffer_info[0].offset = 0;
- buffer_info[0].range = 1024;
- buffer_info[1].buffer = buffer0.handle();
- buffer_info[1].offset = 1024;
- buffer_info[1].range = 1024;
- buffer_info[2].buffer = buffer1.handle();
- buffer_info[2].offset = 0;
- buffer_info[2].range = 1024;
-
- VkWriteDescriptorSet descriptor_write = {};
- descriptor_write.sType = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET;
- descriptor_write.dstSet = descriptor_set.set_; // descriptor_set;
- descriptor_write.dstBinding = 0;
- descriptor_write.descriptorCount = 3;
- descriptor_write.descriptorType = VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER;
- descriptor_write.pBufferInfo = buffer_info;
-
- // Update descriptor
- vkUpdateDescriptorSets(m_device->device(), 1, &descriptor_write, 0, NULL);
-
- // Create PSO that uses the uniform buffers
- char const *fsSource =
- "#version 450\n"
- "\n"
- "layout(location=0) out vec4 x;\n"
- "layout(set=0) layout(binding=0) uniform foo { int x; int y; } bar;\n"
- "layout(set=0) layout(binding=1) uniform blah { int x; } duh;\n"
- "void main(){\n"
- " x = vec4(duh.x, bar.y, bar.x, 1);\n"
- "}\n";
- VkShaderObj fs(m_device, fsSource, VK_SHADER_STAGE_FRAGMENT_BIT, this);
-
- pipe.InitInfo();
- pipe.shader_stages_ = {pipe.vs_->GetStageCreateInfo(), fs.GetStageCreateInfo()};
- const VkDynamicState dyn_states[] = {VK_DYNAMIC_STATE_VIEWPORT, VK_DYNAMIC_STATE_SCISSOR};
- VkPipelineDynamicStateCreateInfo dyn_state_ci = {};
- dyn_state_ci.sType = VK_STRUCTURE_TYPE_PIPELINE_DYNAMIC_STATE_CREATE_INFO;
- dyn_state_ci.dynamicStateCount = size(dyn_states);
- dyn_state_ci.pDynamicStates = dyn_states;
- pipe.dyn_state_ci_ = dyn_state_ci;
- pipe.InitState();
- pipe.pipeline_layout_ = VkPipelineLayoutObj(m_device, {&descriptor_set.layout_});
- pipe.CreateGraphicsPipeline();
-
- m_commandBuffer->begin();
- m_commandBuffer->BeginRenderPass(m_renderPassBeginInfo);
-
- vkCmdBindPipeline(m_commandBuffer->handle(), VK_PIPELINE_BIND_POINT_GRAPHICS, pipe.pipeline_);
- vkCmdBindDescriptorSets(m_commandBuffer->handle(), VK_PIPELINE_BIND_POINT_GRAPHICS, pipe.pipeline_layout_.handle(), 0, 1,
- &descriptor_set.set_, 0, nullptr);
-
- VkViewport viewport = {0, 0, 16, 16, 0, 1};
- vkCmdSetViewport(m_commandBuffer->handle(), 0, 1, &viewport);
- VkRect2D scissor = {{0, 0}, {16, 16}};
- vkCmdSetScissor(m_commandBuffer->handle(), 0, 1, &scissor);
- vkCmdDraw(m_commandBuffer->handle(), 3, 1, 0, 0);
- vkCmdEndRenderPass(m_commandBuffer->handle());
- m_commandBuffer->end();
- }
- // buffer2 just went out of scope and was destroyed along with its memory
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
- "UNASSIGNED-CoreValidation-DrawState-InvalidCommandBuffer-VkBuffer");
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
- "UNASSIGNED-CoreValidation-DrawState-InvalidCommandBuffer-VkDeviceMemory");
-
- VkSubmitInfo submit_info = {};
- submit_info.sType = VK_STRUCTURE_TYPE_SUBMIT_INFO;
- submit_info.commandBufferCount = 1;
- submit_info.pCommandBuffers = &m_commandBuffer->handle();
- vkQueueSubmit(m_device->m_queue, 1, &submit_info, VK_NULL_HANDLE);
- m_errorMonitor->VerifyFound();
-}
-
-TEST_F(VkLayerTest, InvalidCmdBufferDescriptorSetBufferDestroyed) {
- TEST_DESCRIPTION(
- "Attempt to draw with a command buffer that is invalid due to a bound descriptor set with a buffer dependency being "
- "destroyed.");
- ASSERT_NO_FATAL_FAILURE(Init());
- ASSERT_NO_FATAL_FAILURE(InitViewport());
- ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
-
- CreatePipelineHelper pipe(*this);
- {
- // Create a buffer to update the descriptor with
- uint32_t qfi = 0;
- VkBufferCreateInfo buffCI = {};
- buffCI.sType = VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO;
- buffCI.size = 1024;
- buffCI.usage = VK_BUFFER_USAGE_UNIFORM_BUFFER_BIT;
- buffCI.queueFamilyIndexCount = 1;
- buffCI.pQueueFamilyIndices = &qfi;
-
- VkBufferObj buffer;
- buffer.init(*m_device, buffCI);
-
- // Create PSO to be used for draw-time errors below
- char const *fsSource =
- "#version 450\n"
- "\n"
- "layout(location=0) out vec4 x;\n"
- "layout(set=0) layout(binding=0) uniform foo { int x; int y; } bar;\n"
- "void main(){\n"
- " x = vec4(bar.y);\n"
- "}\n";
- VkShaderObj fs(m_device, fsSource, VK_SHADER_STAGE_FRAGMENT_BIT, this);
- pipe.InitInfo();
- pipe.shader_stages_ = {pipe.vs_->GetStageCreateInfo(), fs.GetStageCreateInfo()};
- const VkDynamicState dyn_states[] = {VK_DYNAMIC_STATE_VIEWPORT, VK_DYNAMIC_STATE_SCISSOR};
- VkPipelineDynamicStateCreateInfo dyn_state_ci = {};
- dyn_state_ci.sType = VK_STRUCTURE_TYPE_PIPELINE_DYNAMIC_STATE_CREATE_INFO;
- dyn_state_ci.dynamicStateCount = size(dyn_states);
- dyn_state_ci.pDynamicStates = dyn_states;
- pipe.dyn_state_ci_ = dyn_state_ci;
- pipe.InitState();
- pipe.CreateGraphicsPipeline();
-
- // Correctly update descriptor to avoid "NOT_UPDATED" error
- pipe.descriptor_set_->WriteDescriptorBufferInfo(0, buffer.handle(), 1024);
- pipe.descriptor_set_->UpdateDescriptorSets();
-
- m_commandBuffer->begin();
- m_commandBuffer->BeginRenderPass(m_renderPassBeginInfo);
- vkCmdBindPipeline(m_commandBuffer->handle(), VK_PIPELINE_BIND_POINT_GRAPHICS, pipe.pipeline_);
- vkCmdBindDescriptorSets(m_commandBuffer->handle(), VK_PIPELINE_BIND_POINT_GRAPHICS, pipe.pipeline_layout_.handle(), 0, 1,
- &pipe.descriptor_set_->set_, 0, NULL);
-
- vkCmdSetViewport(m_commandBuffer->handle(), 0, 1, &m_viewports[0]);
- vkCmdSetScissor(m_commandBuffer->handle(), 0, 1, &m_scissors[0]);
-
- m_commandBuffer->Draw(1, 0, 0, 0);
- m_commandBuffer->EndRenderPass();
- m_commandBuffer->end();
- }
- // Destroy buffer should invalidate the cmd buffer, causing error on submit
-
- // Attempt to submit cmd buffer
- VkSubmitInfo submit_info = {};
- submit_info.sType = VK_STRUCTURE_TYPE_SUBMIT_INFO;
- submit_info.commandBufferCount = 1;
- submit_info.pCommandBuffers = &m_commandBuffer->handle();
- // Invalid VkBuffe
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "UNASSIGNED-CoreValidation-DrawState-InvalidCommandBuffe");
- // Invalid VkDeviceMemory
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, " that is invalid because bound ");
- vkQueueSubmit(m_device->m_queue, 1, &submit_info, VK_NULL_HANDLE);
- m_errorMonitor->VerifyFound();
-}
-
-TEST_F(VkLayerTest, InvalidCmdBufferDescriptorSetImageSamplerDestroyed) {
- TEST_DESCRIPTION(
- "Attempt to draw with a command buffer that is invalid due to a bound descriptor sets with a combined image sampler having "
- "their image, sampler, and descriptor set each respectively destroyed and then attempting to submit associated cmd "
- "buffers. Attempt to destroy a DescriptorSet that is in use.");
- ASSERT_NO_FATAL_FAILURE(Init(nullptr, nullptr, VK_COMMAND_POOL_CREATE_RESET_COMMAND_BUFFER_BIT));
- ASSERT_NO_FATAL_FAILURE(InitViewport());
- ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
-
- VkDescriptorPoolSize ds_type_count = {};
- ds_type_count.type = VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER;
- ds_type_count.descriptorCount = 1;
-
- VkDescriptorPoolCreateInfo ds_pool_ci = {};
- ds_pool_ci.sType = VK_STRUCTURE_TYPE_DESCRIPTOR_POOL_CREATE_INFO;
- ds_pool_ci.pNext = NULL;
- ds_pool_ci.flags = VK_DESCRIPTOR_POOL_CREATE_FREE_DESCRIPTOR_SET_BIT;
- ds_pool_ci.maxSets = 1;
- ds_pool_ci.poolSizeCount = 1;
- ds_pool_ci.pPoolSizes = &ds_type_count;
-
- VkDescriptorPool ds_pool;
- VkResult err = vkCreateDescriptorPool(m_device->device(), &ds_pool_ci, NULL, &ds_pool);
- ASSERT_VK_SUCCESS(err);
-
- VkDescriptorSetLayoutBinding dsl_binding = {};
- dsl_binding.binding = 0;
- dsl_binding.descriptorType = VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER;
- dsl_binding.descriptorCount = 1;
- dsl_binding.stageFlags = VK_SHADER_STAGE_ALL;
- dsl_binding.pImmutableSamplers = NULL;
-
- const VkDescriptorSetLayoutObj ds_layout(m_device, {dsl_binding});
-
- VkDescriptorSet descriptorSet;
- VkDescriptorSetAllocateInfo alloc_info = {};
- alloc_info.sType = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_ALLOCATE_INFO;
- alloc_info.descriptorSetCount = 1;
- alloc_info.descriptorPool = ds_pool;
- alloc_info.pSetLayouts = &ds_layout.handle();
- err = vkAllocateDescriptorSets(m_device->device(), &alloc_info, &descriptorSet);
- ASSERT_VK_SUCCESS(err);
-
- const VkPipelineLayoutObj pipeline_layout(m_device, {&ds_layout});
-
- // Create images to update the descriptor with
- VkImage image;
- VkImage image2;
- const VkFormat tex_format = VK_FORMAT_B8G8R8A8_UNORM;
- const int32_t tex_width = 32;
- const int32_t tex_height = 32;
- VkImageCreateInfo image_create_info = {};
- image_create_info.sType = VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO;
- image_create_info.pNext = NULL;
- image_create_info.imageType = VK_IMAGE_TYPE_2D;
- image_create_info.format = tex_format;
- image_create_info.extent.width = tex_width;
- image_create_info.extent.height = tex_height;
- image_create_info.extent.depth = 1;
- image_create_info.mipLevels = 1;
- image_create_info.arrayLayers = 1;
- image_create_info.samples = VK_SAMPLE_COUNT_1_BIT;
- image_create_info.tiling = VK_IMAGE_TILING_OPTIMAL;
- image_create_info.usage = VK_IMAGE_USAGE_SAMPLED_BIT;
- image_create_info.flags = 0;
- err = vkCreateImage(m_device->device(), &image_create_info, NULL, &image);
- ASSERT_VK_SUCCESS(err);
- err = vkCreateImage(m_device->device(), &image_create_info, NULL, &image2);
- ASSERT_VK_SUCCESS(err);
-
- VkMemoryRequirements memory_reqs;
- VkDeviceMemory image_memory;
- bool pass;
- VkMemoryAllocateInfo memory_info = {};
- memory_info.sType = VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO;
- memory_info.pNext = NULL;
- memory_info.allocationSize = 0;
- memory_info.memoryTypeIndex = 0;
- vkGetImageMemoryRequirements(m_device->device(), image, &memory_reqs);
- // Allocate enough memory for both images
- VkDeviceSize align_mod = memory_reqs.size % memory_reqs.alignment;
- VkDeviceSize aligned_size = ((align_mod == 0) ? memory_reqs.size : (memory_reqs.size + memory_reqs.alignment - align_mod));
- memory_info.allocationSize = aligned_size * 2;
- pass = m_device->phy().set_memory_type(memory_reqs.memoryTypeBits, &memory_info, 0);
- ASSERT_TRUE(pass);
- err = vkAllocateMemory(m_device->device(), &memory_info, NULL, &image_memory);
- ASSERT_VK_SUCCESS(err);
- err = vkBindImageMemory(m_device->device(), image, image_memory, 0);
- ASSERT_VK_SUCCESS(err);
- // Bind second image to memory right after first image
- err = vkBindImageMemory(m_device->device(), image2, image_memory, aligned_size);
- ASSERT_VK_SUCCESS(err);
-
- VkImageViewCreateInfo image_view_create_info = {};
- image_view_create_info.sType = VK_STRUCTURE_TYPE_IMAGE_VIEW_CREATE_INFO;
- image_view_create_info.image = image;
- image_view_create_info.viewType = VK_IMAGE_VIEW_TYPE_2D;
- image_view_create_info.format = tex_format;
- image_view_create_info.subresourceRange.layerCount = 1;
- image_view_create_info.subresourceRange.baseMipLevel = 0;
- image_view_create_info.subresourceRange.levelCount = 1;
- image_view_create_info.subresourceRange.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
-
- VkImageView tmp_view; // First test deletes this view
- VkImageView view;
- VkImageView view2;
- err = vkCreateImageView(m_device->device(), &image_view_create_info, NULL, &tmp_view);
- ASSERT_VK_SUCCESS(err);
- err = vkCreateImageView(m_device->device(), &image_view_create_info, NULL, &view);
- ASSERT_VK_SUCCESS(err);
- image_view_create_info.image = image2;
- err = vkCreateImageView(m_device->device(), &image_view_create_info, NULL, &view2);
- ASSERT_VK_SUCCESS(err);
- // Create Samplers
- VkSamplerCreateInfo sampler_ci = SafeSaneSamplerCreateInfo();
- VkSampler sampler;
- VkSampler sampler2;
- err = vkCreateSampler(m_device->device(), &sampler_ci, NULL, &sampler);
- ASSERT_VK_SUCCESS(err);
- err = vkCreateSampler(m_device->device(), &sampler_ci, NULL, &sampler2);
- ASSERT_VK_SUCCESS(err);
- // Update descriptor with image and sampler
- VkDescriptorImageInfo img_info = {};
- img_info.sampler = sampler;
- img_info.imageView = tmp_view;
- img_info.imageLayout = VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL;
-
- VkWriteDescriptorSet descriptor_write;
- memset(&descriptor_write, 0, sizeof(descriptor_write));
- descriptor_write.sType = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET;
- descriptor_write.dstSet = descriptorSet;
- descriptor_write.dstBinding = 0;
- descriptor_write.descriptorCount = 1;
- descriptor_write.descriptorType = VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER;
- descriptor_write.pImageInfo = &img_info;
-
- vkUpdateDescriptorSets(m_device->device(), 1, &descriptor_write, 0, NULL);
-
- // Create PSO to be used for draw-time errors below
- char const *fsSource =
- "#version 450\n"
- "\n"
- "layout(set=0, binding=0) uniform sampler2D s;\n"
- "layout(location=0) out vec4 x;\n"
- "void main(){\n"
- " x = texture(s, vec2(1));\n"
- "}\n";
- VkShaderObj vs(m_device, bindStateVertShaderText, VK_SHADER_STAGE_VERTEX_BIT, this);
- VkShaderObj fs(m_device, fsSource, VK_SHADER_STAGE_FRAGMENT_BIT, this);
- VkPipelineObj pipe(m_device);
- pipe.AddShader(&vs);
- pipe.AddShader(&fs);
- pipe.AddDefaultColorAttachment();
- pipe.CreateVKPipeline(pipeline_layout.handle(), renderPass());
-
- // First error case is destroying sampler prior to cmd buffer submission
- m_commandBuffer->begin();
-
- // Transit image layout from VK_IMAGE_LAYOUT_UNDEFINED into VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL
- VkImageMemoryBarrier barrier = {};
- barrier.sType = VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER;
- barrier.oldLayout = VK_IMAGE_LAYOUT_UNDEFINED;
- barrier.newLayout = VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL;
- barrier.srcQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED;
- barrier.dstQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED;
- barrier.image = image;
- barrier.srcAccessMask = 0;
- barrier.dstAccessMask = VK_ACCESS_SHADER_READ_BIT;
- barrier.subresourceRange.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
- barrier.subresourceRange.baseMipLevel = 0;
- barrier.subresourceRange.levelCount = 1;
- barrier.subresourceRange.baseArrayLayer = 0;
- barrier.subresourceRange.layerCount = 1;
- vkCmdPipelineBarrier(m_commandBuffer->handle(), VK_PIPELINE_STAGE_ALL_COMMANDS_BIT, VK_PIPELINE_STAGE_ALL_COMMANDS_BIT, 0, 0,
- nullptr, 0, nullptr, 1, &barrier);
-
- m_commandBuffer->BeginRenderPass(m_renderPassBeginInfo);
- vkCmdBindPipeline(m_commandBuffer->handle(), VK_PIPELINE_BIND_POINT_GRAPHICS, pipe.handle());
- vkCmdBindDescriptorSets(m_commandBuffer->handle(), VK_PIPELINE_BIND_POINT_GRAPHICS, pipeline_layout.handle(), 0, 1,
- &descriptorSet, 0, NULL);
- VkViewport viewport = {0, 0, 16, 16, 0, 1};
- VkRect2D scissor = {{0, 0}, {16, 16}};
- vkCmdSetViewport(m_commandBuffer->handle(), 0, 1, &viewport);
- vkCmdSetScissor(m_commandBuffer->handle(), 0, 1, &scissor);
- m_commandBuffer->Draw(1, 0, 0, 0);
- m_commandBuffer->EndRenderPass();
- m_commandBuffer->end();
- VkSubmitInfo submit_info = {};
- submit_info.sType = VK_STRUCTURE_TYPE_SUBMIT_INFO;
- submit_info.commandBufferCount = 1;
- submit_info.pCommandBuffers = &m_commandBuffer->handle();
- // This first submit should be successful
- vkQueueSubmit(m_device->m_queue, 1, &submit_info, VK_NULL_HANDLE);
- vkQueueWaitIdle(m_device->m_queue);
-
- // Now destroy imageview and reset cmdBuffer
- vkDestroyImageView(m_device->device(), tmp_view, NULL);
- m_commandBuffer->reset(0);
- m_commandBuffer->begin();
- m_commandBuffer->BeginRenderPass(m_renderPassBeginInfo);
- vkCmdBindPipeline(m_commandBuffer->handle(), VK_PIPELINE_BIND_POINT_GRAPHICS, pipe.handle());
- vkCmdBindDescriptorSets(m_commandBuffer->handle(), VK_PIPELINE_BIND_POINT_GRAPHICS, pipeline_layout.handle(), 0, 1,
- &descriptorSet, 0, NULL);
- vkCmdSetViewport(m_commandBuffer->handle(), 0, 1, &viewport);
- vkCmdSetScissor(m_commandBuffer->handle(), 0, 1, &scissor);
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, " that has been destroyed.");
- m_commandBuffer->Draw(1, 0, 0, 0);
- m_errorMonitor->VerifyFound();
- m_commandBuffer->EndRenderPass();
- m_commandBuffer->end();
-
- // Re-update descriptor with new view
- img_info.imageView = view;
- vkUpdateDescriptorSets(m_device->device(), 1, &descriptor_write, 0, NULL);
- // Now test destroying sampler prior to cmd buffer submission
- m_commandBuffer->begin();
- m_commandBuffer->BeginRenderPass(m_renderPassBeginInfo);
- vkCmdBindPipeline(m_commandBuffer->handle(), VK_PIPELINE_BIND_POINT_GRAPHICS, pipe.handle());
- vkCmdBindDescriptorSets(m_commandBuffer->handle(), VK_PIPELINE_BIND_POINT_GRAPHICS, pipeline_layout.handle(), 0, 1,
- &descriptorSet, 0, NULL);
- vkCmdSetViewport(m_commandBuffer->handle(), 0, 1, &viewport);
- vkCmdSetScissor(m_commandBuffer->handle(), 0, 1, &scissor);
- m_commandBuffer->Draw(1, 0, 0, 0);
- m_commandBuffer->EndRenderPass();
- m_commandBuffer->end();
- // Destroy sampler invalidates the cmd buffer, causing error on submit
- vkDestroySampler(m_device->device(), sampler, NULL);
- // Attempt to submit cmd buffer
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
- "UNASSIGNED-CoreValidation-DrawState-InvalidCommandBuffer-VkSampler");
- submit_info = {};
- submit_info.sType = VK_STRUCTURE_TYPE_SUBMIT_INFO;
- submit_info.commandBufferCount = 1;
- submit_info.pCommandBuffers = &m_commandBuffer->handle();
- vkQueueSubmit(m_device->m_queue, 1, &submit_info, VK_NULL_HANDLE);
- m_errorMonitor->VerifyFound();
-
- // Now re-update descriptor with valid sampler and delete image
- img_info.sampler = sampler2;
- vkUpdateDescriptorSets(m_device->device(), 1, &descriptor_write, 0, NULL);
-
- VkCommandBufferBeginInfo info = {};
- info.sType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_BEGIN_INFO;
- info.flags = VK_COMMAND_BUFFER_USAGE_SIMULTANEOUS_USE_BIT;
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
- "UNASSIGNED-CoreValidation-DrawState-InvalidCommandBuffer-VkImage");
- m_commandBuffer->begin(&info);
- m_commandBuffer->BeginRenderPass(m_renderPassBeginInfo);
- vkCmdBindPipeline(m_commandBuffer->handle(), VK_PIPELINE_BIND_POINT_GRAPHICS, pipe.handle());
- vkCmdBindDescriptorSets(m_commandBuffer->handle(), VK_PIPELINE_BIND_POINT_GRAPHICS, pipeline_layout.handle(), 0, 1,
- &descriptorSet, 0, NULL);
- vkCmdSetViewport(m_commandBuffer->handle(), 0, 1, &viewport);
- vkCmdSetScissor(m_commandBuffer->handle(), 0, 1, &scissor);
- m_commandBuffer->Draw(1, 0, 0, 0);
- m_commandBuffer->EndRenderPass();
- m_commandBuffer->end();
- // Destroy image invalidates the cmd buffer, causing error on submit
- vkDestroyImage(m_device->device(), image, NULL);
- // Attempt to submit cmd buffer
- submit_info = {};
- submit_info.sType = VK_STRUCTURE_TYPE_SUBMIT_INFO;
- submit_info.commandBufferCount = 1;
- submit_info.pCommandBuffers = &m_commandBuffer->handle();
- vkQueueSubmit(m_device->m_queue, 1, &submit_info, VK_NULL_HANDLE);
- m_errorMonitor->VerifyFound();
- // Now update descriptor to be valid, but then free descriptor
- img_info.imageView = view2;
- vkUpdateDescriptorSets(m_device->device(), 1, &descriptor_write, 0, NULL);
- m_commandBuffer->begin(&info);
-
- // Transit image2 layout from VK_IMAGE_LAYOUT_UNDEFINED into VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL
- barrier.image = image2;
- vkCmdPipelineBarrier(m_commandBuffer->handle(), VK_PIPELINE_STAGE_ALL_COMMANDS_BIT, VK_PIPELINE_STAGE_ALL_COMMANDS_BIT, 0, 0,
- nullptr, 0, nullptr, 1, &barrier);
-
- m_commandBuffer->BeginRenderPass(m_renderPassBeginInfo);
- vkCmdBindPipeline(m_commandBuffer->handle(), VK_PIPELINE_BIND_POINT_GRAPHICS, pipe.handle());
- vkCmdBindDescriptorSets(m_commandBuffer->handle(), VK_PIPELINE_BIND_POINT_GRAPHICS, pipeline_layout.handle(), 0, 1,
- &descriptorSet, 0, NULL);
- vkCmdSetViewport(m_commandBuffer->handle(), 0, 1, &viewport);
- vkCmdSetScissor(m_commandBuffer->handle(), 0, 1, &scissor);
- m_commandBuffer->Draw(1, 0, 0, 0);
- m_commandBuffer->EndRenderPass();
- m_commandBuffer->end();
- vkQueueSubmit(m_device->m_queue, 1, &submit_info, VK_NULL_HANDLE);
-
- // Immediately try to destroy the descriptor set in the active command buffer - failure expected
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkFreeDescriptorSets-pDescriptorSets-00309");
- vkFreeDescriptorSets(m_device->device(), ds_pool, 1, &descriptorSet);
- m_errorMonitor->VerifyFound();
-
- // Try again once the queue is idle - should succeed w/o error
- // TODO - though the particular error above doesn't re-occur, there are other 'unexpecteds' still to clean up
- vkQueueWaitIdle(m_device->m_queue);
- m_errorMonitor->SetUnexpectedError(
- "pDescriptorSets must be a valid pointer to an array of descriptorSetCount VkDescriptorSet handles, each element of which "
- "must either be a valid handle or VK_NULL_HANDLE");
- m_errorMonitor->SetUnexpectedError("Unable to remove DescriptorSet obj");
- vkFreeDescriptorSets(m_device->device(), ds_pool, 1, &descriptorSet);
-
- // Attempt to submit cmd buffer containing the freed descriptor set
- submit_info = {};
- submit_info.sType = VK_STRUCTURE_TYPE_SUBMIT_INFO;
- submit_info.commandBufferCount = 1;
- submit_info.pCommandBuffers = &m_commandBuffer->handle();
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
- "UNASSIGNED-CoreValidation-DrawState-InvalidCommandBuffer-VkDescriptorSet");
- vkQueueSubmit(m_device->m_queue, 1, &submit_info, VK_NULL_HANDLE);
- m_errorMonitor->VerifyFound();
-
- // Cleanup
- vkFreeMemory(m_device->device(), image_memory, NULL);
- vkDestroySampler(m_device->device(), sampler2, NULL);
- vkDestroyImage(m_device->device(), image2, NULL);
- vkDestroyImageView(m_device->device(), view, NULL);
- vkDestroyImageView(m_device->device(), view2, NULL);
- vkDestroyDescriptorPool(m_device->device(), ds_pool, NULL);
-}
-
-TEST_F(VkLayerTest, InvalidDescriptorSetSamplerDestroyed) {
- TEST_DESCRIPTION("Attempt to draw with a bound descriptor sets with a combined image sampler where sampler has been deleted.");
- ASSERT_NO_FATAL_FAILURE(Init(nullptr, nullptr, VK_COMMAND_POOL_CREATE_RESET_COMMAND_BUFFER_BIT));
- ASSERT_NO_FATAL_FAILURE(InitViewport());
- ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
-
- OneOffDescriptorSet descriptor_set(m_device,
- {
- {0, VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER, 1, VK_SHADER_STAGE_ALL, nullptr},
- {1, VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER, 1, VK_SHADER_STAGE_ALL, nullptr},
- });
-
- const VkPipelineLayoutObj pipeline_layout(m_device, {&descriptor_set.layout_});
- // Create images to update the descriptor with
- VkImageObj image(m_device);
- const VkFormat tex_format = VK_FORMAT_B8G8R8A8_UNORM;
- image.Init(32, 32, 1, tex_format, VK_IMAGE_USAGE_SAMPLED_BIT, VK_IMAGE_TILING_OPTIMAL, 0);
- ASSERT_TRUE(image.initialized());
-
- VkImageViewCreateInfo image_view_create_info = {};
- image_view_create_info.sType = VK_STRUCTURE_TYPE_IMAGE_VIEW_CREATE_INFO;
- image_view_create_info.image = image.handle();
- image_view_create_info.viewType = VK_IMAGE_VIEW_TYPE_2D;
- image_view_create_info.format = tex_format;
- image_view_create_info.subresourceRange.layerCount = 1;
- image_view_create_info.subresourceRange.baseMipLevel = 0;
- image_view_create_info.subresourceRange.levelCount = 1;
- image_view_create_info.subresourceRange.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
-
- VkImageView view;
- VkResult err = vkCreateImageView(m_device->device(), &image_view_create_info, NULL, &view);
- ASSERT_VK_SUCCESS(err);
- // Create Samplers
- VkSamplerCreateInfo sampler_ci = SafeSaneSamplerCreateInfo();
- VkSampler sampler;
- err = vkCreateSampler(m_device->device(), &sampler_ci, NULL, &sampler);
- ASSERT_VK_SUCCESS(err);
- VkSampler sampler1;
- err = vkCreateSampler(m_device->device(), &sampler_ci, NULL, &sampler1);
- ASSERT_VK_SUCCESS(err);
- // Update descriptor with image and sampler
- VkDescriptorImageInfo img_info = {};
- img_info.sampler = sampler;
- img_info.imageView = view;
- img_info.imageLayout = VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL;
-
- VkDescriptorImageInfo img_info1 = img_info;
- img_info1.sampler = sampler1;
-
- VkWriteDescriptorSet descriptor_write;
- memset(&descriptor_write, 0, sizeof(descriptor_write));
- descriptor_write.sType = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET;
- descriptor_write.dstSet = descriptor_set.set_;
- descriptor_write.dstBinding = 0;
- descriptor_write.descriptorCount = 1;
- descriptor_write.descriptorType = VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER;
- descriptor_write.pImageInfo = &img_info;
-
- std::array<VkWriteDescriptorSet, 2> descriptor_writes = {descriptor_write, descriptor_write};
- descriptor_writes[1].dstBinding = 1;
- descriptor_writes[1].pImageInfo = &img_info1;
-
- vkUpdateDescriptorSets(m_device->device(), 2, descriptor_writes.data(), 0, NULL);
-
- // Destroy the sampler before it's bound to the cmd buffer
- vkDestroySampler(m_device->device(), sampler1, NULL);
-
- // Create PSO to be used for draw-time errors below
- char const *fsSource =
- "#version 450\n"
- "\n"
- "layout(set=0, binding=0) uniform sampler2D s;\n"
- "layout(set=0, binding=1) uniform sampler2D s1;\n"
- "layout(location=0) out vec4 x;\n"
- "void main(){\n"
- " x = texture(s, vec2(1));\n"
- " x = texture(s1, vec2(1));\n"
- "}\n";
- VkShaderObj vs(m_device, bindStateVertShaderText, VK_SHADER_STAGE_VERTEX_BIT, this);
- VkShaderObj fs(m_device, fsSource, VK_SHADER_STAGE_FRAGMENT_BIT, this);
- VkPipelineObj pipe(m_device);
- pipe.AddShader(&vs);
- pipe.AddShader(&fs);
- pipe.AddDefaultColorAttachment();
- pipe.CreateVKPipeline(pipeline_layout.handle(), renderPass());
-
- // First error case is destroying sampler prior to cmd buffer submission
- m_commandBuffer->begin();
- m_commandBuffer->BeginRenderPass(m_renderPassBeginInfo);
- vkCmdBindPipeline(m_commandBuffer->handle(), VK_PIPELINE_BIND_POINT_GRAPHICS, pipe.handle());
- vkCmdBindDescriptorSets(m_commandBuffer->handle(), VK_PIPELINE_BIND_POINT_GRAPHICS, pipeline_layout.handle(), 0, 1,
- &descriptor_set.set_, 0, NULL);
- VkViewport viewport = {0, 0, 16, 16, 0, 1};
- VkRect2D scissor = {{0, 0}, {16, 16}};
- vkCmdSetViewport(m_commandBuffer->handle(), 0, 1, &viewport);
- vkCmdSetScissor(m_commandBuffer->handle(), 0, 1, &scissor);
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, " Descriptor in binding #1 index 0 is using sampler ");
- m_commandBuffer->Draw(1, 0, 0, 0);
- m_errorMonitor->VerifyFound();
-
- m_commandBuffer->EndRenderPass();
- m_commandBuffer->end();
-
- vkDestroySampler(m_device->device(), sampler, NULL);
- vkDestroyImageView(m_device->device(), view, NULL);
-}
-
-TEST_F(VkLayerTest, ImageDescriptorLayoutMismatch) {
- TEST_DESCRIPTION("Create an image sampler layout->image layout mismatch within/without a command buffer");
-
- ASSERT_NO_FATAL_FAILURE(InitFramework(myDbgFunc, m_errorMonitor));
- bool maint2_support = DeviceExtensionSupported(gpu(), nullptr, VK_KHR_MAINTENANCE2_EXTENSION_NAME);
- if (maint2_support) {
- m_device_extension_names.push_back(VK_KHR_MAINTENANCE2_EXTENSION_NAME);
- } else {
- printf("%s Relaxed layout matching subtest requires API >= 1.1 or KHR_MAINTENANCE2 extension, unavailable - skipped.\n",
- kSkipPrefix);
- }
- ASSERT_NO_FATAL_FAILURE(InitState(nullptr, nullptr, VK_COMMAND_POOL_CREATE_RESET_COMMAND_BUFFER_BIT));
-
- ASSERT_NO_FATAL_FAILURE(InitViewport());
- ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
-
- OneOffDescriptorSet descriptor_set(m_device,
- {
- {0, VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER, 1, VK_SHADER_STAGE_ALL, nullptr},
- });
- VkDescriptorSet descriptorSet = descriptor_set.set_;
-
- const VkPipelineLayoutObj pipeline_layout(m_device, {&descriptor_set.layout_});
-
- // Create image, view, and sampler
- const VkFormat format = VK_FORMAT_B8G8R8A8_UNORM;
- VkImageObj image(m_device);
- image.Init(32, 32, 1, format, VK_IMAGE_USAGE_SAMPLED_BIT | VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL, VK_IMAGE_TILING_OPTIMAL,
- 0);
- ASSERT_TRUE(image.initialized());
-
- vk_testing::ImageView view;
- auto image_view_create_info = SafeSaneImageViewCreateInfo(image, format, VK_IMAGE_ASPECT_COLOR_BIT);
- view.init(*m_device, image_view_create_info);
- ASSERT_TRUE(view.initialized());
-
- // Create Sampler
- vk_testing::Sampler sampler;
- VkSamplerCreateInfo sampler_ci = SafeSaneSamplerCreateInfo();
- sampler.init(*m_device, sampler_ci);
- ASSERT_TRUE(sampler.initialized());
-
- // Setup structure for descriptor update with sampler, for update in do_test below
- VkDescriptorImageInfo img_info = {};
- img_info.sampler = sampler.handle();
-
- VkWriteDescriptorSet descriptor_write;
- memset(&descriptor_write, 0, sizeof(descriptor_write));
- descriptor_write.sType = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET;
- descriptor_write.dstSet = descriptorSet;
- descriptor_write.dstBinding = 0;
- descriptor_write.descriptorCount = 1;
- descriptor_write.descriptorType = VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER;
- descriptor_write.pImageInfo = &img_info;
-
- // Create PSO to be used for draw-time errors below
- VkShaderObj vs(m_device, bindStateVertShaderText, VK_SHADER_STAGE_VERTEX_BIT, this);
- VkShaderObj fs(m_device, bindStateFragSamplerShaderText, VK_SHADER_STAGE_FRAGMENT_BIT, this);
- VkPipelineObj pipe(m_device);
- pipe.AddShader(&vs);
- pipe.AddShader(&fs);
- pipe.AddDefaultColorAttachment();
- pipe.CreateVKPipeline(pipeline_layout.handle(), renderPass());
-
- VkViewport viewport = {0, 0, 16, 16, 0, 1};
- VkRect2D scissor = {{0, 0}, {16, 16}};
-
- VkCommandBufferObj cmd_buf(m_device, m_commandPool);
-
- VkSubmitInfo submit_info = {};
- submit_info.sType = VK_STRUCTURE_TYPE_SUBMIT_INFO;
- submit_info.commandBufferCount = 1;
- submit_info.pCommandBuffers = &cmd_buf.handle();
-
- enum TestType {
- kInternal, // Image layout mismatch is *within* a given command buffer
- kExternal // Image layout mismatch is with the current state of the image, found at QueueSubmit
- };
- std::array<TestType, 2> test_list = {kInternal, kExternal};
- const std::vector<std::string> internal_errors = {"VUID-VkDescriptorImageInfo-imageLayout-00344",
- "UNASSIGNED-CoreValidation-DrawState-DescriptorSetNotUpdated"};
- const std::vector<std::string> external_errors = {"UNASSIGNED-CoreValidation-DrawState-InvalidImageLayout"};
-
- // Common steps to create the two classes of errors (or two classes of positives)
- auto do_test = [&](VkImageObj *image, vk_testing::ImageView *view, VkImageAspectFlags aspect_mask, VkImageLayout image_layout,
- VkImageLayout descriptor_layout, const bool positive_test) {
- // Set up the descriptor
- img_info.imageView = view->handle();
- img_info.imageLayout = descriptor_layout;
- vkUpdateDescriptorSets(m_device->device(), 1, &descriptor_write, 0, NULL);
-
- for (TestType test_type : test_list) {
- cmd_buf.begin();
- // record layout different than actual descriptor layout.
- const VkFlags read_write = VK_ACCESS_MEMORY_READ_BIT | VK_ACCESS_MEMORY_WRITE_BIT;
- auto image_barrier = image->image_memory_barrier(read_write, read_write, VK_IMAGE_LAYOUT_UNDEFINED, image_layout,
- image->subresource_range(aspect_mask));
- cmd_buf.PipelineBarrier(VK_PIPELINE_STAGE_ALL_GRAPHICS_BIT, VK_PIPELINE_STAGE_ALL_GRAPHICS_BIT, 0, 0, nullptr, 0,
- nullptr, 1, &image_barrier);
-
- if (test_type == kExternal) {
- // The image layout is external to the command buffer we are recording to test. Submit to push to instance scope.
- cmd_buf.end();
- vkQueueSubmit(m_device->m_queue, 1, &submit_info, VK_NULL_HANDLE);
- vkQueueWaitIdle(m_device->m_queue);
- cmd_buf.begin();
- }
-
- cmd_buf.BeginRenderPass(m_renderPassBeginInfo);
- vkCmdBindPipeline(cmd_buf.handle(), VK_PIPELINE_BIND_POINT_GRAPHICS, pipe.handle());
- vkCmdBindDescriptorSets(cmd_buf.handle(), VK_PIPELINE_BIND_POINT_GRAPHICS, pipeline_layout.handle(), 0, 1,
- &descriptorSet, 0, NULL);
- vkCmdSetViewport(cmd_buf.handle(), 0, 1, &viewport);
- vkCmdSetScissor(cmd_buf.handle(), 0, 1, &scissor);
-
- // At draw time the update layout will mis-match the actual layout
- if (positive_test || (test_type == kExternal)) {
- m_errorMonitor->ExpectSuccess();
- } else {
- for (const auto &err : internal_errors) {
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, err.c_str());
- }
- }
- cmd_buf.Draw(1, 0, 0, 0);
- if (positive_test || (test_type == kExternal)) {
- m_errorMonitor->VerifyNotFound();
- } else {
- m_errorMonitor->VerifyFound();
- }
-
- m_errorMonitor->ExpectSuccess();
- cmd_buf.EndRenderPass();
- cmd_buf.end();
- m_errorMonitor->VerifyNotFound();
-
- // Submit cmd buffer
- if (positive_test || (test_type == kInternal)) {
- m_errorMonitor->ExpectSuccess();
- } else {
- for (const auto &err : external_errors) {
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, err.c_str());
- }
- }
- vkQueueSubmit(m_device->m_queue, 1, &submit_info, VK_NULL_HANDLE);
- vkQueueWaitIdle(m_device->m_queue);
- if (positive_test || (test_type == kInternal)) {
- m_errorMonitor->VerifyNotFound();
- } else {
- m_errorMonitor->VerifyFound();
- }
- }
- };
- do_test(&image, &view, VK_IMAGE_ASPECT_COLOR_BIT, VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL,
- VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL, /* positive */ false);
-
- // Create depth stencil image and views
- const VkFormat format_ds = m_depth_stencil_fmt = FindSupportedDepthStencilFormat(gpu());
- bool ds_test_support = maint2_support && (format_ds != VK_FORMAT_UNDEFINED);
- VkImageObj image_ds(m_device);
- vk_testing::ImageView stencil_view;
- vk_testing::ImageView depth_view;
- const VkImageLayout ds_image_layout = VK_IMAGE_LAYOUT_DEPTH_STENCIL_READ_ONLY_OPTIMAL;
- const VkImageLayout depth_descriptor_layout = VK_IMAGE_LAYOUT_DEPTH_READ_ONLY_STENCIL_ATTACHMENT_OPTIMAL;
- const VkImageLayout stencil_descriptor_layout = VK_IMAGE_LAYOUT_DEPTH_ATTACHMENT_STENCIL_READ_ONLY_OPTIMAL;
- const VkImageAspectFlags depth_stencil = VK_IMAGE_ASPECT_DEPTH_BIT | VK_IMAGE_ASPECT_STENCIL_BIT;
- if (ds_test_support) {
- image_ds.Init(32, 32, 1, format_ds, VK_IMAGE_USAGE_SAMPLED_BIT | VK_IMAGE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT,
- VK_IMAGE_TILING_OPTIMAL, 0);
- ASSERT_TRUE(image_ds.initialized());
- auto ds_view_ci = SafeSaneImageViewCreateInfo(image_ds, format_ds, VK_IMAGE_ASPECT_DEPTH_BIT);
- depth_view.init(*m_device, ds_view_ci);
- ds_view_ci.subresourceRange.aspectMask = VK_IMAGE_ASPECT_STENCIL_BIT;
- stencil_view.init(*m_device, ds_view_ci);
- do_test(&image_ds, &depth_view, depth_stencil, ds_image_layout, depth_descriptor_layout, /* positive */ true);
- do_test(&image_ds, &depth_view, depth_stencil, ds_image_layout, VK_IMAGE_LAYOUT_GENERAL, /* positive */ false);
- do_test(&image_ds, &stencil_view, depth_stencil, ds_image_layout, stencil_descriptor_layout, /* positive */ true);
- do_test(&image_ds, &stencil_view, depth_stencil, ds_image_layout, VK_IMAGE_LAYOUT_GENERAL, /* positive */ false);
- }
-}
-
-TEST_F(VkLayerTest, DescriptorPoolInUseResetSignaled) {
- TEST_DESCRIPTION("Reset a DescriptorPool with a DescriptorSet that is in use.");
- ASSERT_NO_FATAL_FAILURE(Init());
- ASSERT_NO_FATAL_FAILURE(InitViewport());
- ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
-
- OneOffDescriptorSet descriptor_set(m_device,
- {
- {0, VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER, 1, VK_SHADER_STAGE_ALL, nullptr},
- });
-
- const VkPipelineLayoutObj pipeline_layout(m_device, {&descriptor_set.layout_});
-
- // Create image to update the descriptor with
- VkImageObj image(m_device);
- image.Init(32, 32, 1, VK_FORMAT_B8G8R8A8_UNORM, VK_IMAGE_USAGE_SAMPLED_BIT, VK_IMAGE_TILING_OPTIMAL, 0);
- ASSERT_TRUE(image.initialized());
-
- VkImageView view = image.targetView(VK_FORMAT_B8G8R8A8_UNORM);
- // Create Sampler
- VkSamplerCreateInfo sampler_ci = SafeSaneSamplerCreateInfo();
- VkSampler sampler;
- VkResult err = vkCreateSampler(m_device->device(), &sampler_ci, nullptr, &sampler);
- ASSERT_VK_SUCCESS(err);
- // Update descriptor with image and sampler
- descriptor_set.WriteDescriptorImageInfo(0, view, sampler);
- descriptor_set.UpdateDescriptorSets();
-
- // Create PSO to be used for draw-time errors below
- VkShaderObj vs(m_device, bindStateVertShaderText, VK_SHADER_STAGE_VERTEX_BIT, this);
- VkShaderObj fs(m_device, bindStateFragSamplerShaderText, VK_SHADER_STAGE_FRAGMENT_BIT, this);
- VkPipelineObj pipe(m_device);
- pipe.AddShader(&vs);
- pipe.AddShader(&fs);
- pipe.AddDefaultColorAttachment();
- pipe.CreateVKPipeline(pipeline_layout.handle(), renderPass());
-
- m_commandBuffer->begin();
- m_commandBuffer->BeginRenderPass(m_renderPassBeginInfo);
- vkCmdBindPipeline(m_commandBuffer->handle(), VK_PIPELINE_BIND_POINT_GRAPHICS, pipe.handle());
- vkCmdBindDescriptorSets(m_commandBuffer->handle(), VK_PIPELINE_BIND_POINT_GRAPHICS, pipeline_layout.handle(), 0, 1,
- &descriptor_set.set_, 0, nullptr);
-
- VkViewport viewport = {0, 0, 16, 16, 0, 1};
- VkRect2D scissor = {{0, 0}, {16, 16}};
- vkCmdSetViewport(m_commandBuffer->handle(), 0, 1, &viewport);
- vkCmdSetScissor(m_commandBuffer->handle(), 0, 1, &scissor);
-
- m_commandBuffer->Draw(1, 0, 0, 0);
- m_commandBuffer->EndRenderPass();
- m_commandBuffer->end();
- // Submit cmd buffer to put pool in-flight
- VkSubmitInfo submit_info = {};
- submit_info.sType = VK_STRUCTURE_TYPE_SUBMIT_INFO;
- submit_info.commandBufferCount = 1;
- submit_info.pCommandBuffers = &m_commandBuffer->handle();
- vkQueueSubmit(m_device->m_queue, 1, &submit_info, VK_NULL_HANDLE);
- // Reset pool while in-flight, causing error
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkResetDescriptorPool-descriptorPool-00313");
- vkResetDescriptorPool(m_device->device(), descriptor_set.pool_, 0);
- m_errorMonitor->VerifyFound();
- vkQueueWaitIdle(m_device->m_queue);
- // Cleanup
- vkDestroySampler(m_device->device(), sampler, nullptr);
- m_errorMonitor->SetUnexpectedError(
- "If descriptorPool is not VK_NULL_HANDLE, descriptorPool must be a valid VkDescriptorPool handle");
- m_errorMonitor->SetUnexpectedError("Unable to remove DescriptorPool obj");
-}
-
-TEST_F(VkLayerTest, DescriptorImageUpdateNoMemoryBound) {
- TEST_DESCRIPTION("Attempt an image descriptor set update where image's bound memory has been freed.");
- ASSERT_NO_FATAL_FAILURE(Init());
- ASSERT_NO_FATAL_FAILURE(InitViewport());
- ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
-
- OneOffDescriptorSet descriptor_set(m_device,
- {
- {0, VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER, 1, VK_SHADER_STAGE_ALL, nullptr},
- });
-
- // Create images to update the descriptor with
- VkImage image;
- const VkFormat tex_format = VK_FORMAT_B8G8R8A8_UNORM;
- const int32_t tex_width = 32;
- const int32_t tex_height = 32;
- VkImageCreateInfo image_create_info = {};
- image_create_info.sType = VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO;
- image_create_info.pNext = NULL;
- image_create_info.imageType = VK_IMAGE_TYPE_2D;
- image_create_info.format = tex_format;
- image_create_info.extent.width = tex_width;
- image_create_info.extent.height = tex_height;
- image_create_info.extent.depth = 1;
- image_create_info.mipLevels = 1;
- image_create_info.arrayLayers = 1;
- image_create_info.samples = VK_SAMPLE_COUNT_1_BIT;
- image_create_info.tiling = VK_IMAGE_TILING_OPTIMAL;
- image_create_info.usage = VK_IMAGE_USAGE_SAMPLED_BIT;
- image_create_info.flags = 0;
- VkResult err = vkCreateImage(m_device->device(), &image_create_info, NULL, &image);
- ASSERT_VK_SUCCESS(err);
- // Initially bind memory to avoid error at bind view time. We'll break binding before update.
- VkMemoryRequirements memory_reqs;
- VkDeviceMemory image_memory;
- bool pass;
- VkMemoryAllocateInfo memory_info = {};
- memory_info.sType = VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO;
- memory_info.pNext = NULL;
- memory_info.allocationSize = 0;
- memory_info.memoryTypeIndex = 0;
- vkGetImageMemoryRequirements(m_device->device(), image, &memory_reqs);
- // Allocate enough memory for image
- memory_info.allocationSize = memory_reqs.size;
- pass = m_device->phy().set_memory_type(memory_reqs.memoryTypeBits, &memory_info, 0);
- ASSERT_TRUE(pass);
- err = vkAllocateMemory(m_device->device(), &memory_info, NULL, &image_memory);
- ASSERT_VK_SUCCESS(err);
- err = vkBindImageMemory(m_device->device(), image, image_memory, 0);
- ASSERT_VK_SUCCESS(err);
-
- VkImageViewCreateInfo image_view_create_info = {};
- image_view_create_info.sType = VK_STRUCTURE_TYPE_IMAGE_VIEW_CREATE_INFO;
- image_view_create_info.image = image;
- image_view_create_info.viewType = VK_IMAGE_VIEW_TYPE_2D;
- image_view_create_info.format = tex_format;
- image_view_create_info.subresourceRange.layerCount = 1;
- image_view_create_info.subresourceRange.baseMipLevel = 0;
- image_view_create_info.subresourceRange.levelCount = 1;
- image_view_create_info.subresourceRange.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
-
- VkImageView view;
- err = vkCreateImageView(m_device->device(), &image_view_create_info, NULL, &view);
- ASSERT_VK_SUCCESS(err);
- // Create Samplers
- VkSamplerCreateInfo sampler_ci = SafeSaneSamplerCreateInfo();
- VkSampler sampler;
- err = vkCreateSampler(m_device->device(), &sampler_ci, NULL, &sampler);
- ASSERT_VK_SUCCESS(err);
- // Update descriptor with image and sampler
- descriptor_set.WriteDescriptorImageInfo(0, view, sampler);
- // Break memory binding and attempt update
- vkFreeMemory(m_device->device(), image_memory, nullptr);
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
- " previously bound memory was freed. Memory must not be freed prior to this operation.");
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
- "vkUpdateDescriptorSets() failed write update validation for ");
- descriptor_set.UpdateDescriptorSets();
- m_errorMonitor->VerifyFound();
- // Cleanup
- vkDestroyImage(m_device->device(), image, NULL);
- vkDestroySampler(m_device->device(), sampler, NULL);
- vkDestroyImageView(m_device->device(), view, NULL);
-}
-
-TEST_F(VkLayerTest, InvalidDynamicOffsetCases) {
- // Create a descriptorSet w/ dynamic descriptor and then hit 3 offset error
- // cases:
- // 1. No dynamicOffset supplied
- // 2. Too many dynamicOffsets supplied
- // 3. Dynamic offset oversteps buffer being updated
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
- " requires 1 dynamicOffsets, but only 0 dynamicOffsets are left in pDynamicOffsets ");
-
- ASSERT_NO_FATAL_FAILURE(Init());
- ASSERT_NO_FATAL_FAILURE(InitViewport());
- ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
-
- OneOffDescriptorSet descriptor_set(m_device,
- {
- {0, VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC, 1, VK_SHADER_STAGE_ALL, nullptr},
- });
-
- const VkPipelineLayoutObj pipeline_layout(m_device, {&descriptor_set.layout_});
-
- // Create a buffer to update the descriptor with
- uint32_t qfi = 0;
- VkBufferCreateInfo buffCI = {};
- buffCI.sType = VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO;
- buffCI.size = 1024;
- buffCI.usage = VK_BUFFER_USAGE_UNIFORM_BUFFER_BIT;
- buffCI.queueFamilyIndexCount = 1;
- buffCI.pQueueFamilyIndices = &qfi;
-
- VkBufferObj dynamic_uniform_buffer;
- dynamic_uniform_buffer.init(*m_device, buffCI);
-
- // Correctly update descriptor to avoid "NOT_UPDATED" error
- descriptor_set.WriteDescriptorBufferInfo(0, dynamic_uniform_buffer.handle(), 1024, VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC);
- descriptor_set.UpdateDescriptorSets();
-
- m_commandBuffer->begin();
- m_commandBuffer->BeginRenderPass(m_renderPassBeginInfo);
- vkCmdBindDescriptorSets(m_commandBuffer->handle(), VK_PIPELINE_BIND_POINT_GRAPHICS, pipeline_layout.handle(), 0, 1,
- &descriptor_set.set_, 0, NULL);
- m_errorMonitor->VerifyFound();
- uint32_t pDynOff[2] = {512, 756};
- // Now cause error b/c too many dynOffsets in array for # of dyn descriptors
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
- "Attempting to bind 1 descriptorSets with 1 dynamic descriptors, but ");
- vkCmdBindDescriptorSets(m_commandBuffer->handle(), VK_PIPELINE_BIND_POINT_GRAPHICS, pipeline_layout.handle(), 0, 1,
- &descriptor_set.set_, 2, pDynOff);
- m_errorMonitor->VerifyFound();
- // Finally cause error due to dynamicOffset being too big
- m_errorMonitor->SetDesiredFailureMsg(
- VK_DEBUG_REPORT_ERROR_BIT_EXT,
- " dynamic offset 512 combined with offset 0 and range 1024 that oversteps the buffer size of 1024");
- // Create PSO to be used for draw-time errors below
- VkShaderObj vs(m_device, bindStateVertShaderText, VK_SHADER_STAGE_VERTEX_BIT, this);
- VkShaderObj fs(m_device, bindStateFragUniformShaderText, VK_SHADER_STAGE_FRAGMENT_BIT, this);
- VkPipelineObj pipe(m_device);
- pipe.AddShader(&vs);
- pipe.AddShader(&fs);
- pipe.AddDefaultColorAttachment();
- pipe.CreateVKPipeline(pipeline_layout.handle(), renderPass());
-
- VkViewport viewport = {0, 0, 16, 16, 0, 1};
- vkCmdSetViewport(m_commandBuffer->handle(), 0, 1, &viewport);
- VkRect2D scissor = {{0, 0}, {16, 16}};
- vkCmdSetScissor(m_commandBuffer->handle(), 0, 1, &scissor);
-
- vkCmdBindPipeline(m_commandBuffer->handle(), VK_PIPELINE_BIND_POINT_GRAPHICS, pipe.handle());
- // This update should succeed, but offset size of 512 will overstep buffer
- // /w range 1024 & size 1024
- vkCmdBindDescriptorSets(m_commandBuffer->handle(), VK_PIPELINE_BIND_POINT_GRAPHICS, pipeline_layout.handle(), 0, 1,
- &descriptor_set.set_, 1, pDynOff);
- m_commandBuffer->Draw(1, 0, 0, 0);
- m_errorMonitor->VerifyFound();
-
- m_commandBuffer->EndRenderPass();
- m_commandBuffer->end();
-}
-
-TEST_F(VkLayerTest, DescriptorBufferUpdateNoMemoryBound) {
- TEST_DESCRIPTION("Attempt to update a descriptor with a non-sparse buffer that doesn't have memory bound");
- VkResult err;
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
- " used with no memory bound. Memory should be bound by calling vkBindBufferMemory().");
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
- "vkUpdateDescriptorSets() failed write update validation for ");
-
- ASSERT_NO_FATAL_FAILURE(Init());
- ASSERT_NO_FATAL_FAILURE(InitViewport());
- ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
-
- OneOffDescriptorSet descriptor_set(m_device,
- {
- {0, VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC, 1, VK_SHADER_STAGE_ALL, nullptr},
- });
-
- // Create a buffer to update the descriptor with
- uint32_t qfi = 0;
- VkBufferCreateInfo buffCI = {};
- buffCI.sType = VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO;
- buffCI.size = 1024;
- buffCI.usage = VK_BUFFER_USAGE_UNIFORM_BUFFER_BIT;
- buffCI.queueFamilyIndexCount = 1;
- buffCI.pQueueFamilyIndices = &qfi;
-
- VkBuffer dynamic_uniform_buffer;
- err = vkCreateBuffer(m_device->device(), &buffCI, NULL, &dynamic_uniform_buffer);
- ASSERT_VK_SUCCESS(err);
-
- // Attempt to update descriptor without binding memory to it
- descriptor_set.WriteDescriptorBufferInfo(0, dynamic_uniform_buffer, 1024, VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC);
- descriptor_set.UpdateDescriptorSets();
- m_errorMonitor->VerifyFound();
- vkDestroyBuffer(m_device->device(), dynamic_uniform_buffer, NULL);
-}
-
-TEST_F(VkLayerTest, DescriptorSetCompatibility) {
- // Test various desriptorSet errors with bad binding combinations
- using std::vector;
- VkResult err;
-
- ASSERT_NO_FATAL_FAILURE(Init());
- ASSERT_NO_FATAL_FAILURE(InitViewport());
- ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
-
- static const uint32_t NUM_DESCRIPTOR_TYPES = 5;
- VkDescriptorPoolSize ds_type_count[NUM_DESCRIPTOR_TYPES] = {};
- ds_type_count[0].type = VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER;
- ds_type_count[0].descriptorCount = 10;
- ds_type_count[1].type = VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE;
- ds_type_count[1].descriptorCount = 2;
- ds_type_count[2].type = VK_DESCRIPTOR_TYPE_STORAGE_IMAGE;
- ds_type_count[2].descriptorCount = 2;
- ds_type_count[3].type = VK_DESCRIPTOR_TYPE_SAMPLER;
- ds_type_count[3].descriptorCount = 5;
- // TODO : LunarG ILO driver currently asserts in desc.c w/ INPUT_ATTACHMENT
- // type
- // ds_type_count[4].type = VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT;
- ds_type_count[4].type = VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER;
- ds_type_count[4].descriptorCount = 2;
-
- VkDescriptorPoolCreateInfo ds_pool_ci = {};
- ds_pool_ci.sType = VK_STRUCTURE_TYPE_DESCRIPTOR_POOL_CREATE_INFO;
- ds_pool_ci.pNext = NULL;
- ds_pool_ci.maxSets = 5;
- ds_pool_ci.poolSizeCount = NUM_DESCRIPTOR_TYPES;
- ds_pool_ci.pPoolSizes = ds_type_count;
-
- VkDescriptorPool ds_pool;
- err = vkCreateDescriptorPool(m_device->device(), &ds_pool_ci, NULL, &ds_pool);
- ASSERT_VK_SUCCESS(err);
-
- static const uint32_t MAX_DS_TYPES_IN_LAYOUT = 2;
- VkDescriptorSetLayoutBinding dsl_binding[MAX_DS_TYPES_IN_LAYOUT] = {};
- dsl_binding[0].binding = 0;
- dsl_binding[0].descriptorType = VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER;
- dsl_binding[0].descriptorCount = 5;
- dsl_binding[0].stageFlags = VK_SHADER_STAGE_ALL;
- dsl_binding[0].pImmutableSamplers = NULL;
-
- // Create layout identical to set0 layout but w/ different stageFlags
- VkDescriptorSetLayoutBinding dsl_fs_stage_only = {};
- dsl_fs_stage_only.binding = 0;
- dsl_fs_stage_only.descriptorType = VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER;
- dsl_fs_stage_only.descriptorCount = 5;
- dsl_fs_stage_only.stageFlags = VK_SHADER_STAGE_FRAGMENT_BIT; // Different stageFlags to cause error at
- // bind time
- dsl_fs_stage_only.pImmutableSamplers = NULL;
-
- vector<VkDescriptorSetLayoutObj> ds_layouts;
- // Create 4 unique layouts for full pipelineLayout, and 1 special fs-only
- // layout for error case
- ds_layouts.emplace_back(m_device, std::vector<VkDescriptorSetLayoutBinding>(1, dsl_binding[0]));
-
- const VkDescriptorSetLayoutObj ds_layout_fs_only(m_device, {dsl_fs_stage_only});
-
- dsl_binding[0].binding = 0;
- dsl_binding[0].descriptorType = VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE;
- dsl_binding[0].descriptorCount = 2;
- dsl_binding[1].binding = 1;
- dsl_binding[1].descriptorType = VK_DESCRIPTOR_TYPE_STORAGE_IMAGE;
- dsl_binding[1].descriptorCount = 2;
- dsl_binding[1].stageFlags = VK_SHADER_STAGE_ALL;
- dsl_binding[1].pImmutableSamplers = NULL;
- ds_layouts.emplace_back(m_device, std::vector<VkDescriptorSetLayoutBinding>({dsl_binding[0], dsl_binding[1]}));
-
- dsl_binding[0].binding = 0;
- dsl_binding[0].descriptorType = VK_DESCRIPTOR_TYPE_SAMPLER;
- dsl_binding[0].descriptorCount = 5;
- ds_layouts.emplace_back(m_device, std::vector<VkDescriptorSetLayoutBinding>(1, dsl_binding[0]));
-
- dsl_binding[0].descriptorType = VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER;
- dsl_binding[0].descriptorCount = 2;
- ds_layouts.emplace_back(m_device, std::vector<VkDescriptorSetLayoutBinding>(1, dsl_binding[0]));
-
- const auto &ds_vk_layouts = MakeVkHandles<VkDescriptorSetLayout>(ds_layouts);
-
- static const uint32_t NUM_SETS = 4;
- VkDescriptorSet descriptorSet[NUM_SETS] = {};
- VkDescriptorSetAllocateInfo alloc_info = {};
- alloc_info.sType = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_ALLOCATE_INFO;
- alloc_info.descriptorPool = ds_pool;
- alloc_info.descriptorSetCount = ds_vk_layouts.size();
- alloc_info.pSetLayouts = ds_vk_layouts.data();
- err = vkAllocateDescriptorSets(m_device->device(), &alloc_info, descriptorSet);
- ASSERT_VK_SUCCESS(err);
- VkDescriptorSet ds0_fs_only = {};
- alloc_info.descriptorSetCount = 1;
- alloc_info.pSetLayouts = &ds_layout_fs_only.handle();
- err = vkAllocateDescriptorSets(m_device->device(), &alloc_info, &ds0_fs_only);
- ASSERT_VK_SUCCESS(err);
-
- const VkPipelineLayoutObj pipeline_layout(m_device, {&ds_layouts[0], &ds_layouts[1]});
- // Create pipelineLayout with only one setLayout
- const VkPipelineLayoutObj single_pipe_layout(m_device, {&ds_layouts[0]});
- // Create pipelineLayout with 2 descriptor setLayout at index 0
- const VkPipelineLayoutObj pipe_layout_one_desc(m_device, {&ds_layouts[3]});
- // Create pipelineLayout with 5 SAMPLER descriptor setLayout at index 0
- const VkPipelineLayoutObj pipe_layout_five_samp(m_device, {&ds_layouts[2]});
- // Create pipelineLayout with UB type, but stageFlags for FS only
- VkPipelineLayoutObj pipe_layout_fs_only(m_device, {&ds_layout_fs_only});
- // Create pipelineLayout w/ incompatible set0 layout, but set1 is fine
- const VkPipelineLayoutObj pipe_layout_bad_set0(m_device, {&ds_layout_fs_only, &ds_layouts[1]});
-
- // Add buffer binding for UBO
- uint32_t qfi = 0;
- VkBufferCreateInfo bci = {};
- bci.sType = VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO;
- bci.usage = VK_BUFFER_USAGE_UNIFORM_BUFFER_BIT;
- bci.size = 8;
- bci.queueFamilyIndexCount = 1;
- bci.pQueueFamilyIndices = &qfi;
- VkBufferObj buffer;
- buffer.init(*m_device, bci);
- VkDescriptorBufferInfo buffer_info;
- buffer_info.buffer = buffer.handle();
- buffer_info.offset = 0;
- buffer_info.range = VK_WHOLE_SIZE;
- VkWriteDescriptorSet descriptor_write = {};
- descriptor_write.sType = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET;
- descriptor_write.dstSet = descriptorSet[0];
- descriptor_write.dstBinding = 0;
- descriptor_write.descriptorCount = 1;
- descriptor_write.descriptorType = VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER;
- descriptor_write.pBufferInfo = &buffer_info;
- vkUpdateDescriptorSets(m_device->device(), 1, &descriptor_write, 0, NULL);
-
- // Create PSO to be used for draw-time errors below
- VkShaderObj vs(m_device, bindStateVertShaderText, VK_SHADER_STAGE_VERTEX_BIT, this);
- VkShaderObj fs(m_device, bindStateFragUniformShaderText, VK_SHADER_STAGE_FRAGMENT_BIT, this);
- VkPipelineObj pipe(m_device);
- pipe.AddShader(&vs);
- pipe.AddShader(&fs);
- pipe.AddDefaultColorAttachment();
- pipe.CreateVKPipeline(pipe_layout_fs_only.handle(), renderPass());
-
- m_commandBuffer->begin();
- m_commandBuffer->BeginRenderPass(m_renderPassBeginInfo);
-
- vkCmdBindPipeline(m_commandBuffer->handle(), VK_PIPELINE_BIND_POINT_GRAPHICS, pipe.handle());
- // TODO : Want to cause various binding incompatibility issues here to test
- // DrawState
- // First cause various verify_layout_compatibility() fails
- // Second disturb early and late sets and verify INFO msgs
- // VerifySetLayoutCompatibility fail cases:
- // 1. invalid VkPipelineLayout (layout) passed into vkCmdBindDescriptorSets
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdBindDescriptorSets-layout-parameter");
- vkCmdBindDescriptorSets(m_commandBuffer->handle(), VK_PIPELINE_BIND_POINT_GRAPHICS,
- CastToHandle<VkPipelineLayout, uintptr_t>(0xbaadb1be), 0, 1, &descriptorSet[0], 0, NULL);
- m_errorMonitor->VerifyFound();
-
- // 2. layoutIndex exceeds # of layouts in layout
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, " attempting to bind set to index 1");
- vkCmdBindDescriptorSets(m_commandBuffer->handle(), VK_PIPELINE_BIND_POINT_GRAPHICS, single_pipe_layout.handle(), 0, 2,
- &descriptorSet[0], 0, NULL);
- m_errorMonitor->VerifyFound();
-
- // 3. Pipeline setLayout[0] has 2 descriptors, but set being bound has 5
- // descriptors
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, " has 2 descriptors, but DescriptorSetLayout ");
- vkCmdBindDescriptorSets(m_commandBuffer->handle(), VK_PIPELINE_BIND_POINT_GRAPHICS, pipe_layout_one_desc.handle(), 0, 1,
- &descriptorSet[0], 0, NULL);
- m_errorMonitor->VerifyFound();
-
- // 4. same # of descriptors but mismatch in type
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, " is type 'VK_DESCRIPTOR_TYPE_SAMPLER' but binding ");
- vkCmdBindDescriptorSets(m_commandBuffer->handle(), VK_PIPELINE_BIND_POINT_GRAPHICS, pipe_layout_five_samp.handle(), 0, 1,
- &descriptorSet[0], 0, NULL);
- m_errorMonitor->VerifyFound();
-
- // 5. same # of descriptors but mismatch in stageFlags
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
- " has stageFlags 16 but binding 0 for DescriptorSetLayout ");
- vkCmdBindDescriptorSets(m_commandBuffer->handle(), VK_PIPELINE_BIND_POINT_GRAPHICS, pipe_layout_fs_only.handle(), 0, 1,
- &descriptorSet[0], 0, NULL);
- m_errorMonitor->VerifyFound();
-
- // Now that we're done actively using the pipelineLayout that gfx pipeline
- // was created with, we should be able to delete it. Do that now to verify
- // that validation obeys pipelineLayout lifetime
- pipe_layout_fs_only.Reset();
-
- // Cause draw-time errors due to PSO incompatibilities
- // 1. Error due to not binding required set (we actually use same code as
- // above to disturb set0)
- vkCmdBindDescriptorSets(m_commandBuffer->handle(), VK_PIPELINE_BIND_POINT_GRAPHICS, pipeline_layout.handle(), 0, 2,
- &descriptorSet[0], 0, NULL);
- vkCmdBindDescriptorSets(m_commandBuffer->handle(), VK_PIPELINE_BIND_POINT_GRAPHICS, pipe_layout_bad_set0.handle(), 1, 1,
- &descriptorSet[1], 0, NULL);
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, " uses set #0 but that set is not bound.");
-
- VkViewport viewport = {0, 0, 16, 16, 0, 1};
- VkRect2D scissor = {{0, 0}, {16, 16}};
- vkCmdSetViewport(m_commandBuffer->handle(), 0, 1, &viewport);
- vkCmdSetScissor(m_commandBuffer->handle(), 0, 1, &scissor);
-
- m_commandBuffer->Draw(1, 0, 0, 0);
- m_errorMonitor->VerifyFound();
-
- // 2. Error due to bound set not being compatible with PSO's
- // VkPipelineLayout (diff stageFlags in this case)
- vkCmdBindDescriptorSets(m_commandBuffer->handle(), VK_PIPELINE_BIND_POINT_GRAPHICS, pipeline_layout.handle(), 0, 2,
- &descriptorSet[0], 0, NULL);
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, " bound as set #0 is not compatible with ");
- m_commandBuffer->Draw(1, 0, 0, 0);
- m_errorMonitor->VerifyFound();
-
- // Remaining clean-up
- m_commandBuffer->EndRenderPass();
- m_commandBuffer->end();
-
- vkDestroyDescriptorPool(m_device->device(), ds_pool, NULL);
-}
-
-TEST_F(VkLayerTest, NullRenderPass) {
- // Bind a NULL RenderPass
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
- "vkCmdBeginRenderPass: required parameter pRenderPassBegin specified as NULL");
-
- ASSERT_NO_FATAL_FAILURE(Init());
- ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
-
- m_commandBuffer->begin();
- // Don't care about RenderPass handle b/c error should be flagged before
- // that
- vkCmdBeginRenderPass(m_commandBuffer->handle(), NULL, VK_SUBPASS_CONTENTS_INLINE);
-
- m_errorMonitor->VerifyFound();
-
- m_commandBuffer->end();
-}
-
-TEST_F(VkLayerTest, EndCommandBufferWithinRenderPass) {
- TEST_DESCRIPTION("End a command buffer with an active render pass");
-
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkEndCommandBuffer-commandBuffer-00060");
-
- ASSERT_NO_FATAL_FAILURE(Init());
- ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
-
- m_commandBuffer->begin();
- m_commandBuffer->BeginRenderPass(m_renderPassBeginInfo);
- vkEndCommandBuffer(m_commandBuffer->handle());
-
- m_errorMonitor->VerifyFound();
-
- // End command buffer properly to avoid driver issues. This is safe -- the
- // previous vkEndCommandBuffer should not have reached the driver.
- m_commandBuffer->EndRenderPass();
- m_commandBuffer->end();
-
- // TODO: Add test for VK_COMMAND_BUFFER_LEVEL_SECONDARY
- // TODO: Add test for VK_COMMAND_BUFFER_USAGE_RENDER_PASS_CONTINUE_BIT
-}
-
-TEST_F(VkLayerTest, DSUsageBitsErrors) {
- TEST_DESCRIPTION("Attempt to update descriptor sets for images and buffers that do not have correct usage bits sets.");
-
- ASSERT_NO_FATAL_FAILURE(Init());
-
- const VkFormat buffer_format = VK_FORMAT_R8_UNORM;
- VkFormatProperties format_properties;
- vkGetPhysicalDeviceFormatProperties(gpu(), buffer_format, &format_properties);
- if (!(format_properties.bufferFeatures & VK_FORMAT_FEATURE_STORAGE_TEXEL_BUFFER_BIT)) {
- printf("%s Device does not support VK_FORMAT_FEATURE_STORAGE_TEXEL_BUFFER_BIT for this format; skipped.\n", kSkipPrefix);
- return;
- }
-
- std::array<VkDescriptorPoolSize, VK_DESCRIPTOR_TYPE_RANGE_SIZE> ds_type_count;
- for (uint32_t i = 0; i < ds_type_count.size(); ++i) {
- ds_type_count[i].type = VkDescriptorType(i);
- ds_type_count[i].descriptorCount = 1;
- }
-
- vk_testing::DescriptorPool ds_pool;
- ds_pool.init(*m_device, vk_testing::DescriptorPool::create_info(0, VK_DESCRIPTOR_TYPE_RANGE_SIZE, ds_type_count));
- ASSERT_TRUE(ds_pool.initialized());
-
- std::vector<VkDescriptorSetLayoutBinding> dsl_bindings(1);
- dsl_bindings[0].binding = 0;
- dsl_bindings[0].descriptorType = VkDescriptorType(0);
- dsl_bindings[0].descriptorCount = 1;
- dsl_bindings[0].stageFlags = VK_SHADER_STAGE_ALL;
- dsl_bindings[0].pImmutableSamplers = NULL;
-
- // Create arrays of layout and descriptor objects
- using UpDescriptorSet = std::unique_ptr<vk_testing::DescriptorSet>;
- std::vector<UpDescriptorSet> descriptor_sets;
- using UpDescriptorSetLayout = std::unique_ptr<VkDescriptorSetLayoutObj>;
- std::vector<UpDescriptorSetLayout> ds_layouts;
- descriptor_sets.reserve(VK_DESCRIPTOR_TYPE_RANGE_SIZE);
- ds_layouts.reserve(VK_DESCRIPTOR_TYPE_RANGE_SIZE);
- for (uint32_t i = 0; i < VK_DESCRIPTOR_TYPE_RANGE_SIZE; ++i) {
- dsl_bindings[0].descriptorType = VkDescriptorType(i);
- ds_layouts.push_back(UpDescriptorSetLayout(new VkDescriptorSetLayoutObj(m_device, dsl_bindings)));
- descriptor_sets.push_back(UpDescriptorSet(ds_pool.alloc_sets(*m_device, *ds_layouts.back())));
- ASSERT_TRUE(descriptor_sets.back()->initialized());
- }
-
- // Create a buffer & bufferView to be used for invalid updates
- const VkDeviceSize buffer_size = 256;
- uint8_t data[buffer_size];
- VkConstantBufferObj buffer(m_device, buffer_size, data, VK_BUFFER_USAGE_UNIFORM_TEXEL_BUFFER_BIT);
- VkConstantBufferObj storage_texel_buffer(m_device, buffer_size, data, VK_BUFFER_USAGE_STORAGE_TEXEL_BUFFER_BIT);
- ASSERT_TRUE(buffer.initialized() && storage_texel_buffer.initialized());
-
- auto buff_view_ci = vk_testing::BufferView::createInfo(buffer.handle(), VK_FORMAT_R8_UNORM);
- vk_testing::BufferView buffer_view_obj, storage_texel_buffer_view_obj;
- buffer_view_obj.init(*m_device, buff_view_ci);
- buff_view_ci.buffer = storage_texel_buffer.handle();
- storage_texel_buffer_view_obj.init(*m_device, buff_view_ci);
- ASSERT_TRUE(buffer_view_obj.initialized() && storage_texel_buffer_view_obj.initialized());
- VkBufferView buffer_view = buffer_view_obj.handle();
- VkBufferView storage_texel_buffer_view = storage_texel_buffer_view_obj.handle();
-
- // Create an image to be used for invalid updates
- VkImageObj image_obj(m_device);
- image_obj.InitNoLayout(64, 64, 1, VK_FORMAT_R8G8B8A8_UNORM, VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT, VK_IMAGE_TILING_OPTIMAL, 0);
- ASSERT_TRUE(image_obj.initialized());
- VkImageView image_view = image_obj.targetView(VK_FORMAT_R8G8B8A8_UNORM);
-
- VkDescriptorBufferInfo buff_info = {};
- buff_info.buffer = buffer.handle();
- VkDescriptorImageInfo img_info = {};
- img_info.imageView = image_view;
- VkWriteDescriptorSet descriptor_write = {};
- descriptor_write.sType = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET;
- descriptor_write.dstBinding = 0;
- descriptor_write.descriptorCount = 1;
- descriptor_write.pTexelBufferView = &buffer_view;
- descriptor_write.pBufferInfo = &buff_info;
- descriptor_write.pImageInfo = &img_info;
-
- // These error messages align with VkDescriptorType struct
- std::string error_codes[] = {
- "UNASSIGNED-CoreValidation-DrawState-InvalidImageView", // placeholder, no error for SAMPLER descriptor
- "UNASSIGNED-CoreValidation-DrawState-InvalidImageView", // COMBINED_IMAGE_SAMPLER
- "UNASSIGNED-CoreValidation-DrawState-InvalidImageView", // SAMPLED_IMAGE
- "UNASSIGNED-CoreValidation-DrawState-InvalidImageView", // STORAGE_IMAGE
- "VUID-VkWriteDescriptorSet-descriptorType-00334", // UNIFORM_TEXEL_BUFFER
- "VUID-VkWriteDescriptorSet-descriptorType-00335", // STORAGE_TEXEL_BUFFER
- "VUID-VkWriteDescriptorSet-descriptorType-00330", // UNIFORM_BUFFER
- "VUID-VkWriteDescriptorSet-descriptorType-00331", // STORAGE_BUFFER
- "VUID-VkWriteDescriptorSet-descriptorType-00330", // UNIFORM_BUFFER_DYNAMIC
- "VUID-VkWriteDescriptorSet-descriptorType-00331", // STORAGE_BUFFER_DYNAMIC
- "UNASSIGNED-CoreValidation-DrawState-InvalidImageView" // INPUT_ATTACHMENT
- };
- // Start loop at 1 as SAMPLER desc type has no usage bit error
- for (uint32_t i = 1; i < VK_DESCRIPTOR_TYPE_RANGE_SIZE; ++i) {
- if (VkDescriptorType(i) == VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER) {
- // Now check for UNIFORM_TEXEL_BUFFER using storage_texel_buffer_view
- descriptor_write.pTexelBufferView = &storage_texel_buffer_view;
- }
- descriptor_write.descriptorType = VkDescriptorType(i);
- descriptor_write.dstSet = descriptor_sets[i]->handle();
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, error_codes[i]);
-
- vkUpdateDescriptorSets(m_device->device(), 1, &descriptor_write, 0, NULL);
-
- m_errorMonitor->VerifyFound();
- if (VkDescriptorType(i) == VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER) {
- descriptor_write.pTexelBufferView = &buffer_view;
- }
- }
-}
-
-TEST_F(VkLayerTest, DSBufferInfoErrors) {
- TEST_DESCRIPTION(
- "Attempt to update buffer descriptor set that has incorrect parameters in VkDescriptorBufferInfo struct. This includes:\n"
- "1. offset value greater than or equal to buffer size\n"
- "2. range value of 0\n"
- "3. range value greater than buffer (size - offset)");
-
- // GPDDP2 needed for push descriptors support below
- bool gpdp2_support = InstanceExtensionSupported(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME,
- VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_SPEC_VERSION);
- if (gpdp2_support) {
- m_instance_extension_names.push_back(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME);
- }
- ASSERT_NO_FATAL_FAILURE(InitFramework(myDbgFunc, m_errorMonitor));
- bool update_template_support = DeviceExtensionSupported(gpu(), nullptr, VK_KHR_DESCRIPTOR_UPDATE_TEMPLATE_EXTENSION_NAME);
- if (update_template_support) {
- m_device_extension_names.push_back(VK_KHR_DESCRIPTOR_UPDATE_TEMPLATE_EXTENSION_NAME);
- } else {
- printf("%s Descriptor Update Template Extensions not supported, template cases skipped.\n", kSkipPrefix);
- }
-
- // Note: Includes workaround for some implementations which incorrectly return 0 maxPushDescriptors
- bool push_descriptor_support = gpdp2_support &&
- DeviceExtensionSupported(gpu(), nullptr, VK_KHR_PUSH_DESCRIPTOR_EXTENSION_NAME) &&
- (GetPushDescriptorProperties(instance(), gpu()).maxPushDescriptors > 0);
- if (push_descriptor_support) {
- m_device_extension_names.push_back(VK_KHR_PUSH_DESCRIPTOR_EXTENSION_NAME);
- } else {
- printf("%s Push Descriptor Extension not supported, push descriptor cases skipped.\n", kSkipPrefix);
- }
-
- ASSERT_NO_FATAL_FAILURE(InitState(nullptr, nullptr, VK_COMMAND_POOL_CREATE_RESET_COMMAND_BUFFER_BIT));
-
- std::vector<VkDescriptorSetLayoutBinding> ds_bindings = {
- {0, VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER, 1, VK_SHADER_STAGE_ALL, nullptr}};
- OneOffDescriptorSet descriptor_set(m_device, ds_bindings);
-
- // Create a buffer to be used for invalid updates
- VkBufferCreateInfo buff_ci = {};
- buff_ci.sType = VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO;
- buff_ci.usage = VK_BUFFER_USAGE_UNIFORM_BUFFER_BIT;
- buff_ci.size = m_device->props.limits.minUniformBufferOffsetAlignment;
- buff_ci.sharingMode = VK_SHARING_MODE_EXCLUSIVE;
- VkBufferObj buffer;
- buffer.init(*m_device, buff_ci);
-
- VkDescriptorBufferInfo buff_info = {};
- buff_info.buffer = buffer.handle();
- VkWriteDescriptorSet descriptor_write = {};
- descriptor_write.sType = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET;
- descriptor_write.dstBinding = 0;
- descriptor_write.descriptorCount = 1;
- descriptor_write.pTexelBufferView = nullptr;
- descriptor_write.pBufferInfo = &buff_info;
- descriptor_write.pImageInfo = nullptr;
-
- descriptor_write.descriptorType = VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER;
- descriptor_write.dstSet = descriptor_set.set_;
-
- // Relying on the "return nullptr for non-enabled extensions
- auto vkCreateDescriptorUpdateTemplateKHR =
- (PFN_vkCreateDescriptorUpdateTemplateKHR)vkGetDeviceProcAddr(m_device->device(), "vkCreateDescriptorUpdateTemplateKHR");
- auto vkDestroyDescriptorUpdateTemplateKHR =
- (PFN_vkDestroyDescriptorUpdateTemplateKHR)vkGetDeviceProcAddr(m_device->device(), "vkDestroyDescriptorUpdateTemplateKHR");
- auto vkUpdateDescriptorSetWithTemplateKHR =
- (PFN_vkUpdateDescriptorSetWithTemplateKHR)vkGetDeviceProcAddr(m_device->device(), "vkUpdateDescriptorSetWithTemplateKHR");
-
- if (update_template_support) {
- ASSERT_NE(vkCreateDescriptorUpdateTemplateKHR, nullptr);
- ASSERT_NE(vkDestroyDescriptorUpdateTemplateKHR, nullptr);
- ASSERT_NE(vkUpdateDescriptorSetWithTemplateKHR, nullptr);
- }
-
- // Setup for update w/ template tests
- // Create a template of descriptor set updates
- struct SimpleTemplateData {
- uint8_t padding[7];
- VkDescriptorBufferInfo buff_info;
- uint32_t other_padding[4];
- };
- SimpleTemplateData update_template_data = {};
-
- VkDescriptorUpdateTemplateEntry update_template_entry = {};
- update_template_entry.dstBinding = 0;
- update_template_entry.dstArrayElement = 0;
- update_template_entry.descriptorCount = 1;
- update_template_entry.descriptorType = VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER;
- update_template_entry.offset = offsetof(SimpleTemplateData, buff_info);
- update_template_entry.stride = sizeof(SimpleTemplateData);
-
- auto update_template_ci = lvl_init_struct<VkDescriptorUpdateTemplateCreateInfoKHR>();
- update_template_ci.descriptorUpdateEntryCount = 1;
- update_template_ci.pDescriptorUpdateEntries = &update_template_entry;
- update_template_ci.templateType = VK_DESCRIPTOR_UPDATE_TEMPLATE_TYPE_DESCRIPTOR_SET;
- update_template_ci.descriptorSetLayout = descriptor_set.layout_.handle();
-
- VkDescriptorUpdateTemplate update_template = VK_NULL_HANDLE;
- if (update_template_support) {
- auto result = vkCreateDescriptorUpdateTemplateKHR(m_device->device(), &update_template_ci, nullptr, &update_template);
- ASSERT_VK_SUCCESS(result);
- }
-
- // VK_KHR_push_descriptor support
- auto vkCmdPushDescriptorSetKHR =
- (PFN_vkCmdPushDescriptorSetKHR)vkGetDeviceProcAddr(m_device->device(), "vkCmdPushDescriptorSetKHR");
- auto vkCmdPushDescriptorSetWithTemplateKHR =
- (PFN_vkCmdPushDescriptorSetWithTemplateKHR)vkGetDeviceProcAddr(m_device->device(), "vkCmdPushDescriptorSetWithTemplateKHR");
-
- std::unique_ptr<VkDescriptorSetLayoutObj> push_dsl = nullptr;
- std::unique_ptr<VkPipelineLayoutObj> pipeline_layout = nullptr;
- VkDescriptorUpdateTemplate push_template = VK_NULL_HANDLE;
- if (push_descriptor_support) {
- ASSERT_NE(vkCmdPushDescriptorSetKHR, nullptr);
- push_dsl.reset(
- new VkDescriptorSetLayoutObj(m_device, ds_bindings, VK_DESCRIPTOR_SET_LAYOUT_CREATE_PUSH_DESCRIPTOR_BIT_KHR));
- pipeline_layout.reset(new VkPipelineLayoutObj(m_device, {push_dsl.get()}));
- ASSERT_TRUE(push_dsl->initialized());
-
- if (update_template_support) {
- ASSERT_NE(vkCmdPushDescriptorSetWithTemplateKHR, nullptr);
- auto push_template_ci = lvl_init_struct<VkDescriptorUpdateTemplateCreateInfoKHR>();
- push_template_ci.descriptorUpdateEntryCount = 1;
- push_template_ci.pDescriptorUpdateEntries = &update_template_entry;
- push_template_ci.templateType = VK_DESCRIPTOR_UPDATE_TEMPLATE_TYPE_PUSH_DESCRIPTORS_KHR;
- push_template_ci.descriptorSetLayout = VK_NULL_HANDLE;
- push_template_ci.pipelineBindPoint = VK_PIPELINE_BIND_POINT_GRAPHICS;
- push_template_ci.pipelineLayout = pipeline_layout->handle();
- push_template_ci.set = 0;
- auto result = vkCreateDescriptorUpdateTemplateKHR(m_device->device(), &push_template_ci, nullptr, &push_template);
- ASSERT_VK_SUCCESS(result);
- }
- }
-
- auto do_test = [&](const char *desired_failure) {
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, desired_failure);
- vkUpdateDescriptorSets(m_device->device(), 1, &descriptor_write, 0, NULL);
- m_errorMonitor->VerifyFound();
-
- if (push_descriptor_support) {
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, desired_failure);
- m_commandBuffer->begin();
- vkCmdPushDescriptorSetKHR(m_commandBuffer->handle(), VK_PIPELINE_BIND_POINT_GRAPHICS, pipeline_layout->handle(), 0, 1,
- &descriptor_write);
- m_commandBuffer->end();
- m_errorMonitor->VerifyFound();
- }
-
- if (update_template_support) {
- update_template_data.buff_info = buff_info; // copy the test case information into our "pData"
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, desired_failure);
- vkUpdateDescriptorSetWithTemplateKHR(m_device->device(), descriptor_set.set_, update_template, &update_template_data);
- m_errorMonitor->VerifyFound();
- if (push_descriptor_support) {
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, desired_failure);
- m_commandBuffer->begin();
- vkCmdPushDescriptorSetWithTemplateKHR(m_commandBuffer->handle(), push_template, pipeline_layout->handle(), 0,
- &update_template_data);
- m_commandBuffer->end();
- m_errorMonitor->VerifyFound();
- }
- }
- };
-
- // Cause error due to offset out of range
- buff_info.offset = buff_ci.size;
- buff_info.range = VK_WHOLE_SIZE;
- do_test("VUID-VkDescriptorBufferInfo-offset-00340");
-
- // Now cause error due to range of 0
- buff_info.offset = 0;
- buff_info.range = 0;
- do_test("VUID-VkDescriptorBufferInfo-range-00341");
-
- // Now cause error due to range exceeding buffer size - offset
- buff_info.offset = 0;
- buff_info.range = buff_ci.size + 1;
- do_test("VUID-VkDescriptorBufferInfo-range-00342");
-
- if (update_template_support) {
- vkDestroyDescriptorUpdateTemplateKHR(m_device->device(), update_template, nullptr);
- if (push_descriptor_support) {
- vkDestroyDescriptorUpdateTemplateKHR(m_device->device(), push_template, nullptr);
- }
- }
-}
-
-TEST_F(VkLayerTest, DSBufferLimitErrors) {
- TEST_DESCRIPTION(
- "Attempt to update buffer descriptor set that has VkDescriptorBufferInfo values that violate device limits.\n"
- "Test cases include:\n"
- "1. range of uniform buffer update exceeds maxUniformBufferRange\n"
- "2. offset of uniform buffer update is not multiple of minUniformBufferOffsetAlignment\n"
- "3. using VK_WHOLE_SIZE with uniform buffer size exceeding maxUniformBufferRange\n"
- "4. range of storage buffer update exceeds maxStorageBufferRange\n"
- "5. offset of storage buffer update is not multiple of minStorageBufferOffsetAlignment\n"
- "6. using VK_WHOLE_SIZE with storage buffer size exceeding maxStorageBufferRange");
-
- ASSERT_NO_FATAL_FAILURE(Init());
-
- struct TestCase {
- VkDescriptorType descriptor_type;
- VkBufferUsageFlagBits buffer_usage;
- VkDeviceSize max_range;
- std::string max_range_vu;
- VkDeviceSize min_align;
- std::string min_align_vu;
- };
-
- for (const auto &test_case : {
- TestCase({VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER, VK_BUFFER_USAGE_UNIFORM_BUFFER_BIT,
- m_device->props.limits.maxUniformBufferRange, "VUID-VkWriteDescriptorSet-descriptorType-00332",
- m_device->props.limits.minUniformBufferOffsetAlignment, "VUID-VkWriteDescriptorSet-descriptorType-00327"}),
- TestCase({VK_DESCRIPTOR_TYPE_STORAGE_BUFFER, VK_BUFFER_USAGE_STORAGE_BUFFER_BIT,
- m_device->props.limits.maxStorageBufferRange, "VUID-VkWriteDescriptorSet-descriptorType-00333",
- m_device->props.limits.minStorageBufferOffsetAlignment, "VUID-VkWriteDescriptorSet-descriptorType-00328"}),
- }) {
- // Create layout with single buffer
- OneOffDescriptorSet descriptor_set(m_device, {
- {0, test_case.descriptor_type, 1, VK_SHADER_STAGE_ALL, nullptr},
- });
-
- // Create a buffer to be used for invalid updates
- VkBufferCreateInfo bci = {};
- bci.sType = VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO;
- bci.usage = test_case.buffer_usage;
- bci.size = test_case.max_range + test_case.min_align; // Make buffer bigger than range limit
- bci.sharingMode = VK_SHARING_MODE_EXCLUSIVE;
- VkBuffer buffer;
- VkResult err = vkCreateBuffer(m_device->device(), &bci, NULL, &buffer);
- ASSERT_VK_SUCCESS(err);
-
- // Have to bind memory to buffer before descriptor update
- VkMemoryRequirements mem_reqs;
- vkGetBufferMemoryRequirements(m_device->device(), buffer, &mem_reqs);
-
- VkMemoryAllocateInfo mem_alloc = {};
- mem_alloc.sType = VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO;
- mem_alloc.pNext = NULL;
- mem_alloc.allocationSize = mem_reqs.size;
- bool pass = m_device->phy().set_memory_type(mem_reqs.memoryTypeBits, &mem_alloc, 0);
- if (!pass) {
- printf("%s Failed to allocate memory in DSBufferLimitErrors; skipped.\n", kSkipPrefix);
- vkDestroyBuffer(m_device->device(), buffer, NULL);
- continue;
- }
-
- VkDeviceMemory mem;
- err = vkAllocateMemory(m_device->device(), &mem_alloc, NULL, &mem);
- if (VK_SUCCESS != err) {
- printf("%s Failed to allocate memory in DSBufferLimitErrors; skipped.\n", kSkipPrefix);
- vkDestroyBuffer(m_device->device(), buffer, NULL);
- continue;
- }
- err = vkBindBufferMemory(m_device->device(), buffer, mem, 0);
- ASSERT_VK_SUCCESS(err);
-
- VkDescriptorBufferInfo buff_info = {};
- buff_info.buffer = buffer;
- VkWriteDescriptorSet descriptor_write = {};
- descriptor_write.sType = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET;
- descriptor_write.dstBinding = 0;
- descriptor_write.descriptorCount = 1;
- descriptor_write.pTexelBufferView = nullptr;
- descriptor_write.pBufferInfo = &buff_info;
- descriptor_write.pImageInfo = nullptr;
- descriptor_write.descriptorType = test_case.descriptor_type;
- descriptor_write.dstSet = descriptor_set.set_;
-
- // Exceed range limit
- if (test_case.max_range != UINT32_MAX) {
- buff_info.range = test_case.max_range + 1;
- buff_info.offset = 0;
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, test_case.max_range_vu);
- vkUpdateDescriptorSets(m_device->device(), 1, &descriptor_write, 0, NULL);
- m_errorMonitor->VerifyFound();
- }
-
- // Reduce size of range to acceptable limit and cause offset error
- if (test_case.min_align > 1) {
- buff_info.range = test_case.max_range;
- buff_info.offset = test_case.min_align - 1;
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, test_case.min_align_vu);
- vkUpdateDescriptorSets(m_device->device(), 1, &descriptor_write, 0, NULL);
- m_errorMonitor->VerifyFound();
- }
-
- // Exceed effective range limit by using VK_WHOLE_SIZE
- buff_info.range = VK_WHOLE_SIZE;
- buff_info.offset = 0;
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, test_case.max_range_vu);
- vkUpdateDescriptorSets(m_device->device(), 1, &descriptor_write, 0, NULL);
- m_errorMonitor->VerifyFound();
-
- // Cleanup
- vkFreeMemory(m_device->device(), mem, NULL);
- vkDestroyBuffer(m_device->device(), buffer, NULL);
- }
-}
-
-TEST_F(VkLayerTest, DSAspectBitsErrors) {
- // TODO : Initially only catching case where DEPTH & STENCIL aspect bits
- // are set, but could expand this test to hit more cases.
- TEST_DESCRIPTION("Attempt to update descriptor sets for images that do not have correct aspect bits sets.");
- VkResult err;
-
- ASSERT_NO_FATAL_FAILURE(Init());
- auto depth_format = FindSupportedDepthStencilFormat(gpu());
- if (!depth_format) {
- printf("%s No Depth + Stencil format found. Skipped.\n", kSkipPrefix);
- return;
- }
-
- OneOffDescriptorSet descriptor_set(m_device, {
- {0, VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT, 1, VK_SHADER_STAGE_ALL, nullptr},
- });
-
- // Create an image to be used for invalid updates
- VkImageObj image_obj(m_device);
- VkFormatProperties fmt_props;
- vkGetPhysicalDeviceFormatProperties(m_device->phy().handle(), depth_format, &fmt_props);
- if (!image_obj.IsCompatible(VK_IMAGE_USAGE_SAMPLED_BIT, fmt_props.linearTilingFeatures) &&
- !image_obj.IsCompatible(VK_IMAGE_USAGE_SAMPLED_BIT, fmt_props.optimalTilingFeatures)) {
- printf("%s Depth + Stencil format cannot be sampled. Skipped.\n", kSkipPrefix);
- return;
- }
- image_obj.Init(64, 64, 1, depth_format, VK_IMAGE_USAGE_SAMPLED_BIT);
- ASSERT_TRUE(image_obj.initialized());
- VkImage image = image_obj.image();
-
- // Now create view for image
- VkImageViewCreateInfo image_view_ci = {};
- image_view_ci.sType = VK_STRUCTURE_TYPE_IMAGE_VIEW_CREATE_INFO;
- image_view_ci.image = image;
- image_view_ci.format = depth_format;
- image_view_ci.viewType = VK_IMAGE_VIEW_TYPE_2D;
- image_view_ci.subresourceRange.layerCount = 1;
- image_view_ci.subresourceRange.baseArrayLayer = 0;
- image_view_ci.subresourceRange.levelCount = 1;
- // Setting both depth & stencil aspect bits is illegal for an imageView used
- // to populate a descriptor set.
- image_view_ci.subresourceRange.aspectMask = VK_IMAGE_ASPECT_DEPTH_BIT | VK_IMAGE_ASPECT_STENCIL_BIT;
-
- VkImageView image_view;
- err = vkCreateImageView(m_device->device(), &image_view_ci, NULL, &image_view);
- ASSERT_VK_SUCCESS(err);
- descriptor_set.WriteDescriptorImageInfo(0, image_view, VK_NULL_HANDLE, VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT);
-
- const char *error_msg = "VUID-VkDescriptorImageInfo-imageView-01976";
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, error_msg);
- descriptor_set.UpdateDescriptorSets();
- m_errorMonitor->VerifyFound();
- vkDestroyImageView(m_device->device(), image_view, NULL);
-}
-
-TEST_F(VkLayerTest, DSTypeMismatch) {
- // Create DS w/ layout of one type and attempt Update w/ mis-matched type
- VkResult err;
-
- m_errorMonitor->SetDesiredFailureMsg(
- VK_DEBUG_REPORT_ERROR_BIT_EXT,
- " binding #0 with type VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER but update type is VK_DESCRIPTOR_TYPE_SAMPLER");
-
- ASSERT_NO_FATAL_FAILURE(Init());
- OneOffDescriptorSet descriptor_set(m_device, {
- {0, VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER, 1, VK_SHADER_STAGE_ALL, nullptr},
- });
-
- VkSamplerCreateInfo sampler_ci = SafeSaneSamplerCreateInfo();
- VkSampler sampler;
- err = vkCreateSampler(m_device->device(), &sampler_ci, NULL, &sampler);
- ASSERT_VK_SUCCESS(err);
-
- descriptor_set.WriteDescriptorImageInfo(0, VK_NULL_HANDLE, sampler, VK_DESCRIPTOR_TYPE_SAMPLER);
- descriptor_set.UpdateDescriptorSets();
-
- m_errorMonitor->VerifyFound();
-
- vkDestroySampler(m_device->device(), sampler, NULL);
-}
-
-TEST_F(VkLayerTest, DSUpdateOutOfBounds) {
- // For overlapping Update, have arrayIndex exceed that of layout
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkWriteDescriptorSet-dstArrayElement-00321");
-
- ASSERT_NO_FATAL_FAILURE(Init());
- OneOffDescriptorSet descriptor_set(m_device, {
- {0, VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER, 1, VK_SHADER_STAGE_ALL, nullptr},
- });
-
- VkBufferTest buffer_test(m_device, VK_BUFFER_USAGE_UNIFORM_BUFFER_BIT);
- if (!buffer_test.GetBufferCurrent()) {
- // Something prevented creation of buffer so abort
- printf("%s Buffer creation failed, skipping test\n", kSkipPrefix);
- return;
- }
-
- // Correctly update descriptor to avoid "NOT_UPDATED" error
- VkDescriptorBufferInfo buff_info = {};
- buff_info.buffer = buffer_test.GetBuffer();
- buff_info.offset = 0;
- buff_info.range = 1024;
-
- VkWriteDescriptorSet descriptor_write;
- memset(&descriptor_write, 0, sizeof(descriptor_write));
- descriptor_write.sType = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET;
- descriptor_write.dstSet = descriptor_set.set_;
- descriptor_write.dstArrayElement = 1; /* This index out of bounds for the update */
- descriptor_write.descriptorCount = 1;
- descriptor_write.descriptorType = VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER;
- descriptor_write.pBufferInfo = &buff_info;
-
- vkUpdateDescriptorSets(m_device->device(), 1, &descriptor_write, 0, NULL);
-
- m_errorMonitor->VerifyFound();
-}
-
-TEST_F(VkLayerTest, InvalidDSUpdateIndex) {
- // Create layout w/ count of 1 and attempt update to that layout w/ binding index 2
- VkResult err;
-
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkWriteDescriptorSet-dstBinding-00315");
-
- ASSERT_NO_FATAL_FAILURE(Init());
- OneOffDescriptorSet descriptor_set(m_device, {
- {0, VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER, 1, VK_SHADER_STAGE_ALL, nullptr},
- });
-
- VkSamplerCreateInfo sampler_ci = SafeSaneSamplerCreateInfo();
- VkSampler sampler;
- err = vkCreateSampler(m_device->device(), &sampler_ci, NULL, &sampler);
- ASSERT_VK_SUCCESS(err);
-
- // This is the wrong type, but out of bounds will be flagged first
- descriptor_set.WriteDescriptorImageInfo(2, VK_NULL_HANDLE, sampler, VK_DESCRIPTOR_TYPE_SAMPLER);
- descriptor_set.UpdateDescriptorSets();
- m_errorMonitor->VerifyFound();
-
- vkDestroySampler(m_device->device(), sampler, NULL);
-}
-
-TEST_F(VkLayerTest, DSUpdateEmptyBinding) {
- // Create layout w/ empty binding and attempt to update it
- VkResult err;
-
- ASSERT_NO_FATAL_FAILURE(Init());
-
- OneOffDescriptorSet descriptor_set(m_device, {
- {0, VK_DESCRIPTOR_TYPE_SAMPLER, 0 /* !! */, VK_SHADER_STAGE_ALL, nullptr},
- });
-
- VkSamplerCreateInfo sampler_ci = SafeSaneSamplerCreateInfo();
- VkSampler sampler;
- err = vkCreateSampler(m_device->device(), &sampler_ci, NULL, &sampler);
- ASSERT_VK_SUCCESS(err);
-
- // descriptor_write.descriptorCount = 1, Lie here to avoid parameter_validation error
- // This is the wrong type, but empty binding error will be flagged first
- descriptor_set.WriteDescriptorImageInfo(0, VK_NULL_HANDLE, sampler, VK_DESCRIPTOR_TYPE_SAMPLER);
-
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkWriteDescriptorSet-dstBinding-00316");
- descriptor_set.UpdateDescriptorSets();
- m_errorMonitor->VerifyFound();
-
- vkDestroySampler(m_device->device(), sampler, NULL);
-}
-
-TEST_F(VkLayerTest, InvalidDSUpdateStruct) {
- // Call UpdateDS w/ struct type other than valid VK_STRUCTUR_TYPE_UPDATE_*
- // types
- VkResult err;
-
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, ".sType must be VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET");
-
- ASSERT_NO_FATAL_FAILURE(Init());
-
- OneOffDescriptorSet descriptor_set(m_device, {
- {0, VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER, 1, VK_SHADER_STAGE_ALL, nullptr},
- });
-
- VkSamplerCreateInfo sampler_ci = SafeSaneSamplerCreateInfo();
- VkSampler sampler;
- err = vkCreateSampler(m_device->device(), &sampler_ci, NULL, &sampler);
- ASSERT_VK_SUCCESS(err);
-
- VkDescriptorImageInfo info = {};
- info.sampler = sampler;
-
- VkWriteDescriptorSet descriptor_write;
- memset(&descriptor_write, 0, sizeof(descriptor_write));
- descriptor_write.sType = VK_STRUCTURE_TYPE_SUBMIT_INFO; /* Intentionally broken struct type */
- descriptor_write.dstSet = descriptor_set.set_;
- descriptor_write.descriptorCount = 1;
- // This is the wrong type, but out of bounds will be flagged first
- descriptor_write.descriptorType = VK_DESCRIPTOR_TYPE_SAMPLER;
- descriptor_write.pImageInfo = &info;
-
- vkUpdateDescriptorSets(m_device->device(), 1, &descriptor_write, 0, NULL);
-
- m_errorMonitor->VerifyFound();
-
- vkDestroySampler(m_device->device(), sampler, NULL);
-}
-
-TEST_F(VkLayerTest, SampleDescriptorUpdateError) {
- // Create a single Sampler descriptor and send it an invalid Sampler
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkWriteDescriptorSet-descriptorType-00325");
-
- ASSERT_NO_FATAL_FAILURE(Init());
- OneOffDescriptorSet descriptor_set(m_device, {
- {0, VK_DESCRIPTOR_TYPE_SAMPLER, 1, VK_SHADER_STAGE_ALL, nullptr},
- });
-
- VkSampler sampler = CastToHandle<VkSampler, uintptr_t>(0xbaadbeef); // Sampler with invalid handle
-
- descriptor_set.WriteDescriptorImageInfo(0, VK_NULL_HANDLE, sampler, VK_DESCRIPTOR_TYPE_SAMPLER);
- descriptor_set.UpdateDescriptorSets();
- m_errorMonitor->VerifyFound();
-}
-
-TEST_F(VkLayerTest, ImageViewDescriptorUpdateError) {
- // Create a single combined Image/Sampler descriptor and send it an invalid
- // imageView
- VkResult err;
-
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkWriteDescriptorSet-descriptorType-00326");
-
- ASSERT_NO_FATAL_FAILURE(Init());
- OneOffDescriptorSet descriptor_set(m_device,
- {
- {0, VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER, 1, VK_SHADER_STAGE_ALL, nullptr},
- });
-
- VkSamplerCreateInfo sampler_ci = SafeSaneSamplerCreateInfo();
- VkSampler sampler;
- err = vkCreateSampler(m_device->device(), &sampler_ci, NULL, &sampler);
- ASSERT_VK_SUCCESS(err);
-
- VkImageView view = CastToHandle<VkImageView, uintptr_t>(0xbaadbeef); // invalid imageView object
-
- descriptor_set.WriteDescriptorImageInfo(0, view, sampler, VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER);
- descriptor_set.UpdateDescriptorSets();
- m_errorMonitor->VerifyFound();
-
- vkDestroySampler(m_device->device(), sampler, NULL);
-}
-
-TEST_F(VkLayerTest, CopyDescriptorUpdateErrors) {
- // Create DS w/ layout of 2 types, write update 1 and attempt to copy-update
- // into the other
- VkResult err;
-
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
- " binding #1 with type VK_DESCRIPTOR_TYPE_SAMPLER. Types do not match.");
-
- ASSERT_NO_FATAL_FAILURE(Init());
- OneOffDescriptorSet descriptor_set(m_device, {
- {0, VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER, 1, VK_SHADER_STAGE_ALL, nullptr},
- {1, VK_DESCRIPTOR_TYPE_SAMPLER, 1, VK_SHADER_STAGE_ALL, nullptr},
- });
-
- VkSamplerCreateInfo sampler_ci = SafeSaneSamplerCreateInfo();
- VkSampler sampler;
- err = vkCreateSampler(m_device->device(), &sampler_ci, NULL, &sampler);
- ASSERT_VK_SUCCESS(err);
-
- // SAMPLER binding from layout above
- // This write update should succeed
- descriptor_set.WriteDescriptorImageInfo(1, VK_NULL_HANDLE, sampler, VK_DESCRIPTOR_TYPE_SAMPLER);
- descriptor_set.UpdateDescriptorSets();
- // Now perform a copy update that fails due to type mismatch
- VkCopyDescriptorSet copy_ds_update;
- memset(&copy_ds_update, 0, sizeof(VkCopyDescriptorSet));
- copy_ds_update.sType = VK_STRUCTURE_TYPE_COPY_DESCRIPTOR_SET;
- copy_ds_update.srcSet = descriptor_set.set_;
- copy_ds_update.srcBinding = 1; // Copy from SAMPLER binding
- copy_ds_update.dstSet = descriptor_set.set_;
- copy_ds_update.dstBinding = 0; // ERROR : copy to UNIFORM binding
- copy_ds_update.descriptorCount = 1; // copy 1 descriptor
- vkUpdateDescriptorSets(m_device->device(), 0, NULL, 1, &copy_ds_update);
-
- m_errorMonitor->VerifyFound();
- // Now perform a copy update that fails due to binding out of bounds
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, " does not have copy update src binding of 3.");
- memset(&copy_ds_update, 0, sizeof(VkCopyDescriptorSet));
- copy_ds_update.sType = VK_STRUCTURE_TYPE_COPY_DESCRIPTOR_SET;
- copy_ds_update.srcSet = descriptor_set.set_;
- copy_ds_update.srcBinding = 3; // ERROR : Invalid binding for matching layout
- copy_ds_update.dstSet = descriptor_set.set_;
- copy_ds_update.dstBinding = 0;
- copy_ds_update.descriptorCount = 1; // Copy 1 descriptor
- vkUpdateDescriptorSets(m_device->device(), 0, NULL, 1, &copy_ds_update);
-
- m_errorMonitor->VerifyFound();
-
- // Now perform a copy update that fails due to binding out of bounds
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
- " binding#1 with offset index of 1 plus update array offset of 0 and update of 5 "
- "descriptors oversteps total number of descriptors in set: 2.");
-
- memset(&copy_ds_update, 0, sizeof(VkCopyDescriptorSet));
- copy_ds_update.sType = VK_STRUCTURE_TYPE_COPY_DESCRIPTOR_SET;
- copy_ds_update.srcSet = descriptor_set.set_;
- copy_ds_update.srcBinding = 1;
- copy_ds_update.dstSet = descriptor_set.set_;
- copy_ds_update.dstBinding = 0;
- copy_ds_update.descriptorCount = 5; // ERROR copy 5 descriptors (out of bounds for layout)
- vkUpdateDescriptorSets(m_device->device(), 0, NULL, 1, &copy_ds_update);
-
- m_errorMonitor->VerifyFound();
-
- vkDestroySampler(m_device->device(), sampler, NULL);
-}
-
-TEST_F(VkLayerTest, DrawWithPipelineIncompatibleWithRenderPass) {
- TEST_DESCRIPTION(
- "Hit RenderPass incompatible cases. Initial case is drawing with an active renderpass that's not compatible with the bound "
- "pipeline state object's creation renderpass");
-
- ASSERT_NO_FATAL_FAILURE(Init());
- ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
-
- OneOffDescriptorSet descriptor_set(m_device, {
- {0, VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER, 1, VK_SHADER_STAGE_ALL, nullptr},
- });
-
- const VkPipelineLayoutObj pipeline_layout(m_device, {&descriptor_set.layout_});
-
- VkShaderObj vs(m_device, bindStateVertShaderText, VK_SHADER_STAGE_VERTEX_BIT, this);
- VkShaderObj fs(m_device, bindStateFragShaderText, VK_SHADER_STAGE_FRAGMENT_BIT, this); // We shouldn't need a fragment shader
- // but add it to be able to run on more devices
- // Create a renderpass that will be incompatible with default renderpass
- VkAttachmentReference color_att = {};
- color_att.layout = VK_IMAGE_LAYOUT_GENERAL;
- VkSubpassDescription subpass = {};
- subpass.colorAttachmentCount = 1;
- subpass.pColorAttachments = &color_att;
- VkRenderPassCreateInfo rpci = {};
- rpci.subpassCount = 1;
- rpci.pSubpasses = &subpass;
- rpci.attachmentCount = 1;
- VkAttachmentDescription attach_desc = {};
- attach_desc.samples = VK_SAMPLE_COUNT_1_BIT;
- // Format incompatible with PSO RP color attach format B8G8R8A8_UNORM
- attach_desc.format = VK_FORMAT_R8G8B8A8_UNORM;
- attach_desc.finalLayout = VK_IMAGE_LAYOUT_GENERAL;
- rpci.pAttachments = &attach_desc;
- rpci.sType = VK_STRUCTURE_TYPE_RENDER_PASS_CREATE_INFO;
- VkRenderPass rp;
- vkCreateRenderPass(m_device->device(), &rpci, NULL, &rp);
- VkPipelineObj pipe(m_device);
- pipe.AddShader(&vs);
- pipe.AddShader(&fs);
- pipe.AddDefaultColorAttachment();
- VkViewport viewport = {0.0f, 0.0f, 64.0f, 64.0f, 0.0f, 1.0f};
- m_viewports.push_back(viewport);
- pipe.SetViewport(m_viewports);
- VkRect2D rect = {{0, 0}, {64, 64}};
- m_scissors.push_back(rect);
- pipe.SetScissor(m_scissors);
- pipe.CreateVKPipeline(pipeline_layout.handle(), rp);
-
- VkCommandBufferInheritanceInfo cbii = {};
- cbii.sType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_INHERITANCE_INFO;
- cbii.renderPass = rp;
- cbii.subpass = 0;
- VkCommandBufferBeginInfo cbbi = {};
- cbbi.sType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_BEGIN_INFO;
- cbbi.pInheritanceInfo = &cbii;
- vkBeginCommandBuffer(m_commandBuffer->handle(), &cbbi);
- vkCmdBeginRenderPass(m_commandBuffer->handle(), &m_renderPassBeginInfo, VK_SUBPASS_CONTENTS_INLINE);
- vkCmdBindPipeline(m_commandBuffer->handle(), VK_PIPELINE_BIND_POINT_GRAPHICS, pipe.handle());
-
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdDraw-renderPass-02684");
- // Render triangle (the error should trigger on the attempt to draw).
- m_commandBuffer->Draw(3, 1, 0, 0);
-
- // Finalize recording of the command buffer
- m_commandBuffer->EndRenderPass();
- m_commandBuffer->end();
-
- m_errorMonitor->VerifyFound();
-
- vkDestroyRenderPass(m_device->device(), rp, NULL);
-}
-
-TEST_F(VkLayerTest, Maint1BindingSliceOf3DImage) {
- TEST_DESCRIPTION(
- "Attempt to bind a slice of a 3D texture in a descriptor set. This is explicitly disallowed by KHR_maintenance1 to keep "
- "things simple for drivers.");
- ASSERT_NO_FATAL_FAILURE(InitFramework(myDbgFunc, m_errorMonitor));
- if (DeviceExtensionSupported(gpu(), nullptr, VK_KHR_MAINTENANCE1_EXTENSION_NAME)) {
- m_device_extension_names.push_back(VK_KHR_MAINTENANCE1_EXTENSION_NAME);
- } else {
- printf("%s %s is not supported; skipping\n", kSkipPrefix, VK_KHR_MAINTENANCE1_EXTENSION_NAME);
- return;
- }
- ASSERT_NO_FATAL_FAILURE(InitState());
-
- VkResult err;
-
- OneOffDescriptorSet descriptor_set(m_device,
- {
- {0, VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE, 1, VK_SHADER_STAGE_FRAGMENT_BIT, nullptr},
- });
-
- VkImageCreateInfo ici = {VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO,
- nullptr,
- VK_IMAGE_CREATE_2D_ARRAY_COMPATIBLE_BIT_KHR,
- VK_IMAGE_TYPE_3D,
- VK_FORMAT_R8G8B8A8_UNORM,
- {32, 32, 32},
- 1,
- 1,
- VK_SAMPLE_COUNT_1_BIT,
- VK_IMAGE_TILING_OPTIMAL,
- VK_IMAGE_USAGE_SAMPLED_BIT,
- VK_SHARING_MODE_EXCLUSIVE,
- 0,
- nullptr,
- VK_IMAGE_LAYOUT_UNDEFINED};
- VkImageObj image(m_device);
- image.init(&ici);
- ASSERT_TRUE(image.initialized());
-
- VkImageViewCreateInfo ivci = {
- VK_STRUCTURE_TYPE_IMAGE_VIEW_CREATE_INFO,
- nullptr,
- 0,
- image.handle(),
- VK_IMAGE_VIEW_TYPE_2D,
- VK_FORMAT_R8G8B8A8_UNORM,
- {VK_COMPONENT_SWIZZLE_IDENTITY, VK_COMPONENT_SWIZZLE_IDENTITY, VK_COMPONENT_SWIZZLE_IDENTITY,
- VK_COMPONENT_SWIZZLE_IDENTITY},
- {VK_IMAGE_ASPECT_COLOR_BIT, 0, 1, 0, 1},
- };
- VkImageView view;
- err = vkCreateImageView(m_device->device(), &ivci, nullptr, &view);
- ASSERT_VK_SUCCESS(err);
-
- // Meat of the test.
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkDescriptorImageInfo-imageView-00343");
-
- VkDescriptorImageInfo dii = {VK_NULL_HANDLE, view, VK_IMAGE_LAYOUT_GENERAL};
- VkWriteDescriptorSet write = {VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET,
- nullptr,
- descriptor_set.set_,
- 0,
- 0,
- 1,
- VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE,
- &dii,
- nullptr,
- nullptr};
- vkUpdateDescriptorSets(m_device->device(), 1, &write, 0, nullptr);
-
- m_errorMonitor->VerifyFound();
-
- vkDestroyImageView(m_device->device(), view, nullptr);
-}
-
-TEST_F(VkLayerTest, UpdateDestroyDescriptorSetLayout) {
- TEST_DESCRIPTION("Attempt updates to descriptor sets with destroyed descriptor set layouts");
- // TODO: Update to match the descriptor set layout specific VUIDs/VALIDATION_ERROR_* when present
- const auto kWriteDestroyedLayout = "VUID-VkWriteDescriptorSet-dstSet-00320";
- const auto kCopyDstDestroyedLayout = "VUID-VkCopyDescriptorSet-dstSet-parameter";
- const auto kCopySrcDestroyedLayout = "VUID-VkCopyDescriptorSet-srcSet-parameter";
-
- ASSERT_NO_FATAL_FAILURE(Init());
-
- // Set up the descriptor (resource) and write/copy operations to use.
- float data[16] = {};
- VkConstantBufferObj buffer(m_device, sizeof(data), data, VK_BUFFER_USAGE_UNIFORM_BUFFER_BIT);
- ASSERT_TRUE(buffer.initialized());
-
- VkDescriptorBufferInfo info = {};
- info.buffer = buffer.handle();
- info.range = VK_WHOLE_SIZE;
-
- VkWriteDescriptorSet write_descriptor = {};
- write_descriptor.sType = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET;
- write_descriptor.dstSet = VK_NULL_HANDLE; // must update this
- write_descriptor.dstBinding = 0;
- write_descriptor.descriptorCount = 1;
- write_descriptor.descriptorType = VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER;
- write_descriptor.pBufferInfo = &info;
-
- VkCopyDescriptorSet copy_descriptor = {};
- copy_descriptor.sType = VK_STRUCTURE_TYPE_COPY_DESCRIPTOR_SET;
- copy_descriptor.srcSet = VK_NULL_HANDLE; // must update
- copy_descriptor.srcBinding = 0;
- copy_descriptor.dstSet = VK_NULL_HANDLE; // must update
- copy_descriptor.dstBinding = 0;
- copy_descriptor.descriptorCount = 1;
-
- // Create valid and invalid source and destination descriptor sets
- std::vector<VkDescriptorSetLayoutBinding> one_uniform_buffer = {
- {0, VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER, 1, VK_SHADER_STAGE_ALL, nullptr},
- };
- OneOffDescriptorSet good_dst(m_device, one_uniform_buffer);
- ASSERT_TRUE(good_dst.Initialized());
-
- OneOffDescriptorSet bad_dst(m_device, one_uniform_buffer);
- // Must assert before invalidating it below
- ASSERT_TRUE(bad_dst.Initialized());
- bad_dst.layout_ = VkDescriptorSetLayoutObj();
-
- OneOffDescriptorSet good_src(m_device, one_uniform_buffer);
- ASSERT_TRUE(good_src.Initialized());
-
- // Put valid data in the good and bad sources, simultaneously doing a positive test on write and copy operations
- m_errorMonitor->ExpectSuccess();
- write_descriptor.dstSet = good_src.set_;
- vkUpdateDescriptorSets(m_device->device(), 1, &write_descriptor, 0, NULL);
- m_errorMonitor->VerifyNotFound();
-
- OneOffDescriptorSet bad_src(m_device, one_uniform_buffer);
- ASSERT_TRUE(bad_src.Initialized());
-
- // to complete our positive testing use copy, where above we used write.
- copy_descriptor.srcSet = good_src.set_;
- copy_descriptor.dstSet = bad_src.set_;
- vkUpdateDescriptorSets(m_device->device(), 0, nullptr, 1, &copy_descriptor);
- bad_src.layout_ = VkDescriptorSetLayoutObj();
- m_errorMonitor->VerifyNotFound();
-
- // Trigger the three invalid use errors
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, kWriteDestroyedLayout);
- write_descriptor.dstSet = bad_dst.set_;
- vkUpdateDescriptorSets(m_device->device(), 1, &write_descriptor, 0, NULL);
- m_errorMonitor->VerifyFound();
-
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, kCopyDstDestroyedLayout);
- copy_descriptor.dstSet = bad_dst.set_;
- vkUpdateDescriptorSets(m_device->device(), 0, nullptr, 1, &copy_descriptor);
- m_errorMonitor->VerifyFound();
-
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, kCopySrcDestroyedLayout);
- copy_descriptor.srcSet = bad_src.set_;
- copy_descriptor.dstSet = good_dst.set_;
- vkUpdateDescriptorSets(m_device->device(), 0, nullptr, 1, &copy_descriptor);
- m_errorMonitor->VerifyFound();
-}
-
-TEST_F(VkLayerTest, FramebufferIncompatible) {
- TEST_DESCRIPTION(
- "Bind a secondary command buffer with a framebuffer that does not match the framebuffer for the active renderpass.");
- ASSERT_NO_FATAL_FAILURE(Init());
- ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
-
- // A renderpass with one color attachment.
- VkAttachmentDescription attachment = {0,
- VK_FORMAT_B8G8R8A8_UNORM,
- VK_SAMPLE_COUNT_1_BIT,
- VK_ATTACHMENT_LOAD_OP_DONT_CARE,
- VK_ATTACHMENT_STORE_OP_STORE,
- VK_ATTACHMENT_LOAD_OP_DONT_CARE,
- VK_ATTACHMENT_STORE_OP_DONT_CARE,
- VK_IMAGE_LAYOUT_UNDEFINED,
- VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL};
-
- VkAttachmentReference att_ref = {0, VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL};
-
- VkSubpassDescription subpass = {0, VK_PIPELINE_BIND_POINT_GRAPHICS, 0, nullptr, 1, &att_ref, nullptr, nullptr, 0, nullptr};
-
- VkRenderPassCreateInfo rpci = {VK_STRUCTURE_TYPE_RENDER_PASS_CREATE_INFO, nullptr, 0, 1, &attachment, 1, &subpass, 0, nullptr};
-
- VkRenderPass rp;
- VkResult err = vkCreateRenderPass(m_device->device(), &rpci, nullptr, &rp);
- ASSERT_VK_SUCCESS(err);
-
- // A compatible framebuffer.
- VkImageObj image(m_device);
- image.Init(32, 32, 1, VK_FORMAT_B8G8R8A8_UNORM, VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT, VK_IMAGE_TILING_OPTIMAL, 0);
- ASSERT_TRUE(image.initialized());
-
- VkImageViewCreateInfo ivci = {
- VK_STRUCTURE_TYPE_IMAGE_VIEW_CREATE_INFO,
- nullptr,
- 0,
- image.handle(),
- VK_IMAGE_VIEW_TYPE_2D,
- VK_FORMAT_B8G8R8A8_UNORM,
- {VK_COMPONENT_SWIZZLE_IDENTITY, VK_COMPONENT_SWIZZLE_IDENTITY, VK_COMPONENT_SWIZZLE_IDENTITY,
- VK_COMPONENT_SWIZZLE_IDENTITY},
- {VK_IMAGE_ASPECT_COLOR_BIT, 0, 1, 0, 1},
- };
- VkImageView view;
- err = vkCreateImageView(m_device->device(), &ivci, nullptr, &view);
- ASSERT_VK_SUCCESS(err);
-
- VkFramebufferCreateInfo fci = {VK_STRUCTURE_TYPE_FRAMEBUFFER_CREATE_INFO, nullptr, 0, rp, 1, &view, 32, 32, 1};
- VkFramebuffer fb;
- err = vkCreateFramebuffer(m_device->device(), &fci, nullptr, &fb);
- ASSERT_VK_SUCCESS(err);
-
- VkCommandBufferAllocateInfo cbai = {};
- cbai.sType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_ALLOCATE_INFO;
- cbai.commandPool = m_commandPool->handle();
- cbai.level = VK_COMMAND_BUFFER_LEVEL_SECONDARY;
- cbai.commandBufferCount = 1;
-
- VkCommandBuffer sec_cb;
- err = vkAllocateCommandBuffers(m_device->device(), &cbai, &sec_cb);
- ASSERT_VK_SUCCESS(err);
- VkCommandBufferBeginInfo cbbi = {};
- VkCommandBufferInheritanceInfo cbii = {};
- cbii.sType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_INHERITANCE_INFO;
- cbii.renderPass = renderPass();
- cbii.framebuffer = fb;
- cbbi.sType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_BEGIN_INFO;
- cbbi.pNext = NULL;
- cbbi.flags = VK_COMMAND_BUFFER_USAGE_ONE_TIME_SUBMIT_BIT | VK_COMMAND_BUFFER_USAGE_RENDER_PASS_CONTINUE_BIT;
- cbbi.pInheritanceInfo = &cbii;
- vkBeginCommandBuffer(sec_cb, &cbbi);
- vkEndCommandBuffer(sec_cb);
-
- VkCommandBufferBeginInfo cbbi2 = {VK_STRUCTURE_TYPE_COMMAND_BUFFER_BEGIN_INFO, nullptr, 0, nullptr};
- vkBeginCommandBuffer(m_commandBuffer->handle(), &cbbi2);
- vkCmdBeginRenderPass(m_commandBuffer->handle(), &m_renderPassBeginInfo, VK_SUBPASS_CONTENTS_SECONDARY_COMMAND_BUFFERS);
-
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdExecuteCommands-pCommandBuffers-00099");
- vkCmdExecuteCommands(m_commandBuffer->handle(), 1, &sec_cb);
- m_errorMonitor->VerifyFound();
- // Cleanup
-
- vkCmdEndRenderPass(m_commandBuffer->handle());
- vkEndCommandBuffer(m_commandBuffer->handle());
-
- vkDestroyImageView(m_device->device(), view, NULL);
- vkDestroyRenderPass(m_device->device(), rp, NULL);
- vkDestroyFramebuffer(m_device->device(), fb, NULL);
-}
-
-TEST_F(VkLayerTest, RenderPassMissingAttachment) {
- TEST_DESCRIPTION("Begin render pass with missing framebuffer attachment");
- ASSERT_NO_FATAL_FAILURE(Init());
- ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
-
- // Create a renderPass with a single color attachment
- VkAttachmentReference attach = {};
- attach.layout = VK_IMAGE_LAYOUT_GENERAL;
- VkSubpassDescription subpass = {};
- subpass.pColorAttachments = &attach;
- VkRenderPassCreateInfo rpci = {};
- rpci.subpassCount = 1;
- rpci.pSubpasses = &subpass;
- rpci.attachmentCount = 1;
- VkAttachmentDescription attach_desc = {};
- attach_desc.format = VK_FORMAT_B8G8R8A8_UNORM;
- attach_desc.samples = VK_SAMPLE_COUNT_1_BIT;
- attach_desc.finalLayout = VK_IMAGE_LAYOUT_GENERAL;
- rpci.pAttachments = &attach_desc;
- rpci.sType = VK_STRUCTURE_TYPE_RENDER_PASS_CREATE_INFO;
- VkRenderPass rp;
- VkResult err = vkCreateRenderPass(m_device->device(), &rpci, NULL, &rp);
- ASSERT_VK_SUCCESS(err);
-
- auto createView = lvl_init_struct<VkImageViewCreateInfo>();
- createView.image = m_renderTargets[0]->handle();
- createView.viewType = VK_IMAGE_VIEW_TYPE_2D;
- createView.format = VK_FORMAT_B8G8R8A8_UNORM;
- createView.components.r = VK_COMPONENT_SWIZZLE_R;
- createView.components.g = VK_COMPONENT_SWIZZLE_G;
- createView.components.b = VK_COMPONENT_SWIZZLE_B;
- createView.components.a = VK_COMPONENT_SWIZZLE_A;
- createView.subresourceRange = {VK_IMAGE_ASPECT_COLOR_BIT, 0, 1, 0, 1};
- createView.flags = 0;
-
- VkImageView iv;
- vkCreateImageView(m_device->handle(), &createView, nullptr, &iv);
-
- auto fb_info = lvl_init_struct<VkFramebufferCreateInfo>();
- fb_info.renderPass = rp;
- fb_info.attachmentCount = 1;
- fb_info.pAttachments = &iv;
- fb_info.width = 100;
- fb_info.height = 100;
- fb_info.layers = 1;
-
- // Create the framebuffer then destory the view it uses.
- VkFramebuffer fb;
- err = vkCreateFramebuffer(device(), &fb_info, NULL, &fb);
- vkDestroyImageView(device(), iv, NULL);
- ASSERT_VK_SUCCESS(err);
-
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkRenderPassBeginInfo-framebuffer-parameter");
-
- auto rpbi = lvl_init_struct<VkRenderPassBeginInfo>();
- rpbi.renderPass = rp;
- rpbi.framebuffer = fb;
- rpbi.renderArea = {{0, 0}, {32, 32}};
-
- m_commandBuffer->begin();
- vkCmdBeginRenderPass(m_commandBuffer->handle(), &rpbi, VK_SUBPASS_CONTENTS_INLINE);
- // Don't call vkCmdEndRenderPass; as the begin has been "skipped" based on the error condition
- m_errorMonitor->VerifyFound();
- m_commandBuffer->end();
-
- vkDestroyFramebuffer(m_device->device(), fb, NULL);
- vkDestroyRenderPass(m_device->device(), rp, NULL);
-}
-
-TEST_F(VkLayerTest, AttachmentDescriptionUndefinedFormat) {
- TEST_DESCRIPTION("Create a render pass with an attachment description format set to VK_FORMAT_UNDEFINED");
-
- ASSERT_NO_FATAL_FAILURE(Init());
- ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
-
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_WARNING_BIT_EXT, "format is VK_FORMAT_UNDEFINED");
-
- VkAttachmentReference color_attach = {};
- color_attach.layout = VK_IMAGE_LAYOUT_GENERAL;
- color_attach.attachment = 0;
- VkSubpassDescription subpass = {};
- subpass.colorAttachmentCount = 1;
- subpass.pColorAttachments = &color_attach;
-
- VkRenderPassCreateInfo rpci = {};
- rpci.subpassCount = 1;
- rpci.pSubpasses = &subpass;
- rpci.attachmentCount = 1;
- VkAttachmentDescription attach_desc = {};
- attach_desc.format = VK_FORMAT_UNDEFINED;
- attach_desc.samples = VK_SAMPLE_COUNT_1_BIT;
- attach_desc.finalLayout = VK_IMAGE_LAYOUT_GENERAL;
- rpci.pAttachments = &attach_desc;
- rpci.sType = VK_STRUCTURE_TYPE_RENDER_PASS_CREATE_INFO;
- VkRenderPass rp;
- VkResult result = vkCreateRenderPass(m_device->device(), &rpci, NULL, &rp);
-
- m_errorMonitor->VerifyFound();
-
- if (result == VK_SUCCESS) {
- vkDestroyRenderPass(m_device->device(), rp, NULL);
- }
-}
-
-TEST_F(VkLayerTest, InvalidCreateDescriptorPool) {
- TEST_DESCRIPTION("Attempt to create descriptor pool with invalid parameters");
-
- ASSERT_NO_FATAL_FAILURE(Init());
-
- const uint32_t default_descriptor_count = 1;
- const VkDescriptorPoolSize dp_size_template{VK_DESCRIPTOR_TYPE_SAMPLER, default_descriptor_count};
-
- const VkDescriptorPoolCreateInfo dp_ci_template{VK_STRUCTURE_TYPE_DESCRIPTOR_POOL_CREATE_INFO,
- nullptr, // pNext
- 0, // flags
- 1, // maxSets
- 1, // poolSizeCount
- &dp_size_template};
-
- // try maxSets = 0
- {
- VkDescriptorPoolCreateInfo invalid_dp_ci = dp_ci_template;
- invalid_dp_ci.maxSets = 0; // invalid maxSets value
-
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkDescriptorPoolCreateInfo-maxSets-00301");
- {
- VkDescriptorPool pool;
- vkCreateDescriptorPool(m_device->device(), &invalid_dp_ci, nullptr, &pool);
- }
- m_errorMonitor->VerifyFound();
- }
-
- // try descriptorCount = 0
- {
- VkDescriptorPoolSize invalid_dp_size = dp_size_template;
- invalid_dp_size.descriptorCount = 0; // invalid descriptorCount value
-
- VkDescriptorPoolCreateInfo dp_ci = dp_ci_template;
- dp_ci.pPoolSizes = &invalid_dp_size;
-
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkDescriptorPoolSize-descriptorCount-00302");
- {
- VkDescriptorPool pool;
- vkCreateDescriptorPool(m_device->device(), &dp_ci, nullptr, &pool);
- }
- m_errorMonitor->VerifyFound();
- }
-}
-
-TEST_F(VkLayerTest, DuplicateDescriptorBinding) {
- TEST_DESCRIPTION("Create a descriptor set layout with a duplicate binding number.");
-
- ASSERT_NO_FATAL_FAILURE(Init());
- // Create layout where two binding #s are "1"
- static const uint32_t NUM_BINDINGS = 3;
- VkDescriptorSetLayoutBinding dsl_binding[NUM_BINDINGS] = {};
- dsl_binding[0].binding = 1;
- dsl_binding[0].descriptorType = VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER;
- dsl_binding[0].descriptorCount = 1;
- dsl_binding[0].stageFlags = VK_SHADER_STAGE_FRAGMENT_BIT;
- dsl_binding[0].pImmutableSamplers = NULL;
- dsl_binding[1].binding = 0;
- dsl_binding[1].descriptorCount = 1;
- dsl_binding[1].descriptorType = VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER;
- dsl_binding[1].descriptorCount = 1;
- dsl_binding[1].stageFlags = VK_SHADER_STAGE_FRAGMENT_BIT;
- dsl_binding[1].pImmutableSamplers = NULL;
- dsl_binding[2].binding = 1; // Duplicate binding should cause error
- dsl_binding[2].descriptorType = VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER;
- dsl_binding[2].descriptorCount = 1;
- dsl_binding[2].stageFlags = VK_SHADER_STAGE_FRAGMENT_BIT;
- dsl_binding[2].pImmutableSamplers = NULL;
-
- VkDescriptorSetLayoutCreateInfo ds_layout_ci = {};
- ds_layout_ci.sType = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_CREATE_INFO;
- ds_layout_ci.pNext = NULL;
- ds_layout_ci.bindingCount = NUM_BINDINGS;
- ds_layout_ci.pBindings = dsl_binding;
- VkDescriptorSetLayout ds_layout;
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkDescriptorSetLayoutCreateInfo-binding-00279");
- vkCreateDescriptorSetLayout(m_device->device(), &ds_layout_ci, NULL, &ds_layout);
- m_errorMonitor->VerifyFound();
-}
-
-TEST_F(VkLayerTest, InvalidPushDescriptorSetLayout) {
- TEST_DESCRIPTION("Create a push descriptor set layout with invalid bindings.");
-
- if (InstanceExtensionSupported(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME)) {
- m_instance_extension_names.push_back(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME);
- } else {
- printf("%s Did not find VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME; skipped.\n", kSkipPrefix);
- return;
- }
-
- ASSERT_NO_FATAL_FAILURE(InitFramework(myDbgFunc, m_errorMonitor));
- if (DeviceExtensionSupported(gpu(), nullptr, VK_KHR_PUSH_DESCRIPTOR_EXTENSION_NAME)) {
- m_device_extension_names.push_back(VK_KHR_PUSH_DESCRIPTOR_EXTENSION_NAME);
- } else {
- printf("%s %s Extension not supported, skipping tests\n", kSkipPrefix, VK_KHR_PUSH_DESCRIPTOR_EXTENSION_NAME);
- return;
- }
-
- ASSERT_NO_FATAL_FAILURE(InitState());
-
- // Get the push descriptor limits
- auto push_descriptor_prop = GetPushDescriptorProperties(instance(), gpu());
- if (push_descriptor_prop.maxPushDescriptors < 1) {
- // Some implementations report an invalid maxPushDescriptors of 0
- printf("%s maxPushDescriptors is zero, skipping tests\n", kSkipPrefix);
- return;
- }
-
- VkDescriptorSetLayoutBinding binding = {0, VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC, 1, VK_SHADER_STAGE_FRAGMENT_BIT, nullptr};
-
- auto ds_layout_ci = lvl_init_struct<VkDescriptorSetLayoutCreateInfo>();
- ds_layout_ci.flags = VK_DESCRIPTOR_SET_LAYOUT_CREATE_PUSH_DESCRIPTOR_BIT_KHR;
- ds_layout_ci.bindingCount = 1;
- ds_layout_ci.pBindings = &binding;
-
- // Note that as binding is referenced in ds_layout_ci, it is effectively in the closure by reference as well.
- auto test_create_ds_layout = [&ds_layout_ci, this](std::string error) {
- VkDescriptorSetLayout ds_layout = VK_NULL_HANDLE;
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, error);
- vkCreateDescriptorSetLayout(m_device->handle(), &ds_layout_ci, nullptr, &ds_layout);
- m_errorMonitor->VerifyFound();
- vkDestroyDescriptorSetLayout(m_device->handle(), ds_layout, nullptr);
- };
-
- // Starting with the initial VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC type set above..
- test_create_ds_layout("VUID-VkDescriptorSetLayoutCreateInfo-flags-00280");
-
- binding.descriptorType = VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC;
- test_create_ds_layout(
- "VUID-VkDescriptorSetLayoutCreateInfo-flags-00280"); // This is the same VUID as above, just a second error condition.
-
- if (!(push_descriptor_prop.maxPushDescriptors == std::numeric_limits<uint32_t>::max())) {
- binding.descriptorType = VK_DESCRIPTOR_TYPE_STORAGE_BUFFER;
- binding.descriptorCount = push_descriptor_prop.maxPushDescriptors + 1;
- test_create_ds_layout("VUID-VkDescriptorSetLayoutCreateInfo-flags-00281");
- } else {
- printf("%s maxPushDescriptors is set to maximum unit32_t value, skipping 'out of range test'.\n", kSkipPrefix);
- }
-}
-
-TEST_F(VkLayerTest, PushDescriptorSetLayoutWithoutExtension) {
- TEST_DESCRIPTION("Create a push descriptor set layout without loading the needed extension.");
- ASSERT_NO_FATAL_FAILURE(Init());
-
- VkDescriptorSetLayoutBinding binding = {0, VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER, 1, VK_SHADER_STAGE_FRAGMENT_BIT, nullptr};
-
- auto ds_layout_ci = lvl_init_struct<VkDescriptorSetLayoutCreateInfo>();
- ds_layout_ci.flags = VK_DESCRIPTOR_SET_LAYOUT_CREATE_PUSH_DESCRIPTOR_BIT_KHR;
- ds_layout_ci.bindingCount = 1;
- ds_layout_ci.pBindings = &binding;
-
- std::string error = "Attempted to use VK_DESCRIPTOR_SET_LAYOUT_CREATE_PUSH_DESCRIPTOR_BIT_KHR in ";
- error = error + "VkDescriptorSetLayoutCreateInfo::flags but its required extension ";
- error = error + VK_KHR_PUSH_DESCRIPTOR_EXTENSION_NAME;
- error = error + " has not been enabled.";
-
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, error.c_str());
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkDescriptorSetLayoutCreateInfo-flags-00281");
- VkDescriptorSetLayout ds_layout = VK_NULL_HANDLE;
- vkCreateDescriptorSetLayout(m_device->handle(), &ds_layout_ci, nullptr, &ds_layout);
- m_errorMonitor->VerifyFound();
- vkDestroyDescriptorSetLayout(m_device->handle(), ds_layout, nullptr);
-}
-
-TEST_F(VkLayerTest, DescriptorIndexingSetLayoutWithoutExtension) {
- TEST_DESCRIPTION("Create an update_after_bind set layout without loading the needed extension.");
- ASSERT_NO_FATAL_FAILURE(Init());
-
- auto ds_layout_ci = lvl_init_struct<VkDescriptorSetLayoutCreateInfo>();
- ds_layout_ci.flags = VK_DESCRIPTOR_SET_LAYOUT_CREATE_UPDATE_AFTER_BIND_POOL_BIT_EXT;
-
- std::string error = "Attemped to use VK_DESCRIPTOR_SET_LAYOUT_CREATE_UPDATE_AFTER_BIND_POOL_BIT_EXT in ";
- error = error + "VkDescriptorSetLayoutCreateInfo::flags but its required extension ";
- error = error + VK_EXT_DESCRIPTOR_INDEXING_EXTENSION_NAME;
- error = error + " has not been enabled.";
-
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, error.c_str());
- VkDescriptorSetLayout ds_layout = VK_NULL_HANDLE;
- vkCreateDescriptorSetLayout(m_device->handle(), &ds_layout_ci, nullptr, &ds_layout);
- m_errorMonitor->VerifyFound();
- vkDestroyDescriptorSetLayout(m_device->handle(), ds_layout, nullptr);
-}
-
-TEST_F(VkLayerTest, DescriptorIndexingSetLayout) {
- TEST_DESCRIPTION("Exercise various create/allocate-time errors related to VK_EXT_descriptor_indexing.");
-
- if (!(CheckDescriptorIndexingSupportAndInitFramework(this, m_instance_extension_names, m_device_extension_names, NULL,
- m_errorMonitor))) {
- printf("%s Descriptor indexing or one of its dependencies not supported, skipping tests\n.", kSkipPrefix);
- return;
- }
-
- PFN_vkGetPhysicalDeviceFeatures2KHR vkGetPhysicalDeviceFeatures2KHR =
- (PFN_vkGetPhysicalDeviceFeatures2KHR)vkGetInstanceProcAddr(instance(), "vkGetPhysicalDeviceFeatures2KHR");
- ASSERT_TRUE(vkGetPhysicalDeviceFeatures2KHR != nullptr);
-
- // Create a device that enables all supported indexing features except descriptorBindingUniformBufferUpdateAfterBind
- auto indexing_features = lvl_init_struct<VkPhysicalDeviceDescriptorIndexingFeaturesEXT>();
- auto features2 = lvl_init_struct<VkPhysicalDeviceFeatures2KHR>(&indexing_features);
- vkGetPhysicalDeviceFeatures2KHR(gpu(), &features2);
-
- indexing_features.descriptorBindingUniformBufferUpdateAfterBind = VK_FALSE;
-
- ASSERT_NO_FATAL_FAILURE(InitState(nullptr, &features2));
-
- std::array<VkDescriptorBindingFlagsEXT, 2> flags = {VK_DESCRIPTOR_BINDING_UPDATE_AFTER_BIND_BIT_EXT,
- VK_DESCRIPTOR_BINDING_UPDATE_AFTER_BIND_BIT_EXT};
- auto flags_create_info = lvl_init_struct<VkDescriptorSetLayoutBindingFlagsCreateInfoEXT>();
- flags_create_info.bindingCount = (uint32_t)flags.size();
- flags_create_info.pBindingFlags = flags.data();
-
- VkDescriptorSetLayoutBinding binding = {0, VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER, 1, VK_SHADER_STAGE_FRAGMENT_BIT, nullptr};
- auto ds_layout_ci = lvl_init_struct<VkDescriptorSetLayoutCreateInfo>(&flags_create_info);
- ds_layout_ci.bindingCount = 1;
- ds_layout_ci.pBindings = &binding;
- VkDescriptorSetLayout ds_layout = VK_NULL_HANDLE;
-
- // VU for VkDescriptorSetLayoutBindingFlagsCreateInfoEXT::bindingCount
- flags_create_info.bindingCount = 2;
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
- "VUID-VkDescriptorSetLayoutBindingFlagsCreateInfoEXT-bindingCount-03002");
- VkResult err = vkCreateDescriptorSetLayout(m_device->handle(), &ds_layout_ci, nullptr, &ds_layout);
- m_errorMonitor->VerifyFound();
- vkDestroyDescriptorSetLayout(m_device->handle(), ds_layout, nullptr);
-
- flags_create_info.bindingCount = 1;
-
- // set is missing UPDATE_AFTER_BIND_POOL flag.
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkDescriptorSetLayoutCreateInfo-flags-03000");
- // binding uses a feature we disabled
- m_errorMonitor->SetDesiredFailureMsg(
- VK_DEBUG_REPORT_ERROR_BIT_EXT,
- "VUID-VkDescriptorSetLayoutBindingFlagsCreateInfoEXT-descriptorBindingUniformBufferUpdateAfterBind-03005");
- err = vkCreateDescriptorSetLayout(m_device->handle(), &ds_layout_ci, nullptr, &ds_layout);
- m_errorMonitor->VerifyFound();
- vkDestroyDescriptorSetLayout(m_device->handle(), ds_layout, nullptr);
-
- ds_layout_ci.flags = VK_DESCRIPTOR_SET_LAYOUT_CREATE_UPDATE_AFTER_BIND_POOL_BIT_EXT;
- ds_layout_ci.bindingCount = 0;
- flags_create_info.bindingCount = 0;
- err = vkCreateDescriptorSetLayout(m_device->handle(), &ds_layout_ci, nullptr, &ds_layout);
- ASSERT_VK_SUCCESS(err);
-
- VkDescriptorPoolSize pool_size = {binding.descriptorType, binding.descriptorCount};
- auto dspci = lvl_init_struct<VkDescriptorPoolCreateInfo>();
- dspci.poolSizeCount = 1;
- dspci.pPoolSizes = &pool_size;
- dspci.maxSets = 1;
- VkDescriptorPool pool;
- err = vkCreateDescriptorPool(m_device->handle(), &dspci, nullptr, &pool);
- ASSERT_VK_SUCCESS(err);
-
- auto ds_alloc_info = lvl_init_struct<VkDescriptorSetAllocateInfo>();
- ds_alloc_info.descriptorPool = pool;
- ds_alloc_info.descriptorSetCount = 1;
- ds_alloc_info.pSetLayouts = &ds_layout;
-
- VkDescriptorSet ds = VK_NULL_HANDLE;
- // mismatch between descriptor set and pool
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkDescriptorSetAllocateInfo-pSetLayouts-03044");
- vkAllocateDescriptorSets(m_device->handle(), &ds_alloc_info, &ds);
- m_errorMonitor->VerifyFound();
-
- vkDestroyDescriptorSetLayout(m_device->handle(), ds_layout, nullptr);
- vkDestroyDescriptorPool(m_device->handle(), pool, nullptr);
-
- if (indexing_features.descriptorBindingVariableDescriptorCount) {
- ds_layout_ci.flags = 0;
- ds_layout_ci.bindingCount = 1;
- flags_create_info.bindingCount = 1;
- flags[0] = VK_DESCRIPTOR_BINDING_VARIABLE_DESCRIPTOR_COUNT_BIT_EXT;
- err = vkCreateDescriptorSetLayout(m_device->handle(), &ds_layout_ci, nullptr, &ds_layout);
- ASSERT_VK_SUCCESS(err);
-
- pool_size = {binding.descriptorType, binding.descriptorCount};
- dspci = lvl_init_struct<VkDescriptorPoolCreateInfo>();
- dspci.poolSizeCount = 1;
- dspci.pPoolSizes = &pool_size;
- dspci.maxSets = 1;
- err = vkCreateDescriptorPool(m_device->handle(), &dspci, nullptr, &pool);
- ASSERT_VK_SUCCESS(err);
-
- auto count_alloc_info = lvl_init_struct<VkDescriptorSetVariableDescriptorCountAllocateInfoEXT>();
- count_alloc_info.descriptorSetCount = 1;
- // Set variable count larger than what was in the descriptor binding
- uint32_t variable_count = 2;
- count_alloc_info.pDescriptorCounts = &variable_count;
-
- ds_alloc_info = lvl_init_struct<VkDescriptorSetAllocateInfo>(&count_alloc_info);
- ds_alloc_info.descriptorPool = pool;
- ds_alloc_info.descriptorSetCount = 1;
- ds_alloc_info.pSetLayouts = &ds_layout;
-
- ds = VK_NULL_HANDLE;
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
- "VUID-VkDescriptorSetVariableDescriptorCountAllocateInfoEXT-pSetLayouts-03046");
- vkAllocateDescriptorSets(m_device->handle(), &ds_alloc_info, &ds);
- m_errorMonitor->VerifyFound();
-
- vkDestroyDescriptorSetLayout(m_device->handle(), ds_layout, nullptr);
- vkDestroyDescriptorPool(m_device->handle(), pool, nullptr);
- }
-}
-
-TEST_F(VkLayerTest, DescriptorIndexingUpdateAfterBind) {
- TEST_DESCRIPTION("Exercise errors for updating a descriptor set after it is bound.");
-
- if (InstanceExtensionSupported(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME)) {
- m_instance_extension_names.push_back(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME);
- } else {
- printf("%s %s Extension not supported, skipping tests\n", kSkipPrefix,
- VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME);
- return;
- }
-
- ASSERT_NO_FATAL_FAILURE(InitFramework(myDbgFunc, m_errorMonitor));
- if (DeviceExtensionSupported(gpu(), nullptr, VK_EXT_DESCRIPTOR_INDEXING_EXTENSION_NAME) &&
- DeviceExtensionSupported(gpu(), nullptr, VK_KHR_MAINTENANCE3_EXTENSION_NAME)) {
- m_device_extension_names.push_back(VK_EXT_DESCRIPTOR_INDEXING_EXTENSION_NAME);
- m_device_extension_names.push_back(VK_KHR_MAINTENANCE3_EXTENSION_NAME);
- } else {
- printf("%s Descriptor Indexing or Maintenance3 Extension not supported, skipping tests\n", kSkipPrefix);
- return;
- }
-
- PFN_vkGetPhysicalDeviceFeatures2KHR vkGetPhysicalDeviceFeatures2KHR =
- (PFN_vkGetPhysicalDeviceFeatures2KHR)vkGetInstanceProcAddr(instance(), "vkGetPhysicalDeviceFeatures2KHR");
- ASSERT_TRUE(vkGetPhysicalDeviceFeatures2KHR != nullptr);
-
- // Create a device that enables all supported indexing features except descriptorBindingUniformBufferUpdateAfterBind
- auto indexing_features = lvl_init_struct<VkPhysicalDeviceDescriptorIndexingFeaturesEXT>();
- auto features2 = lvl_init_struct<VkPhysicalDeviceFeatures2KHR>(&indexing_features);
- vkGetPhysicalDeviceFeatures2KHR(gpu(), &features2);
-
- indexing_features.descriptorBindingUniformBufferUpdateAfterBind = VK_FALSE;
-
- if (VK_FALSE == indexing_features.descriptorBindingStorageBufferUpdateAfterBind) {
- printf("%s Test requires (unsupported) descriptorBindingStorageBufferUpdateAfterBind, skipping\n", kSkipPrefix);
- return;
- }
- if (VK_FALSE == features2.features.fragmentStoresAndAtomics) {
- printf("%s Test requires (unsupported) fragmentStoresAndAtomics, skipping\n", kSkipPrefix);
- return;
- }
-
- ASSERT_NO_FATAL_FAILURE(InitState(nullptr, &features2, VK_COMMAND_POOL_CREATE_RESET_COMMAND_BUFFER_BIT));
- ASSERT_NO_FATAL_FAILURE(InitViewport());
- ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
-
- VkDescriptorBindingFlagsEXT flags[2] = {0, VK_DESCRIPTOR_BINDING_UPDATE_AFTER_BIND_BIT_EXT};
- auto flags_create_info = lvl_init_struct<VkDescriptorSetLayoutBindingFlagsCreateInfoEXT>();
- flags_create_info.bindingCount = 2;
- flags_create_info.pBindingFlags = &flags[0];
-
- // Descriptor set has two bindings - only the second is update_after_bind
- VkDescriptorSetLayoutBinding binding[2] = {
- {0, VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER, 1, VK_SHADER_STAGE_FRAGMENT_BIT, nullptr},
- {1, VK_DESCRIPTOR_TYPE_STORAGE_BUFFER, 1, VK_SHADER_STAGE_FRAGMENT_BIT, nullptr},
- };
- auto ds_layout_ci = lvl_init_struct<VkDescriptorSetLayoutCreateInfo>(&flags_create_info);
- ds_layout_ci.flags = VK_DESCRIPTOR_SET_LAYOUT_CREATE_UPDATE_AFTER_BIND_POOL_BIT_EXT;
- ds_layout_ci.bindingCount = 2;
- ds_layout_ci.pBindings = &binding[0];
- VkDescriptorSetLayout ds_layout = VK_NULL_HANDLE;
-
- VkResult err = vkCreateDescriptorSetLayout(m_device->handle(), &ds_layout_ci, nullptr, &ds_layout);
-
- VkDescriptorPoolSize pool_sizes[2] = {
- {binding[0].descriptorType, binding[0].descriptorCount},
- {binding[1].descriptorType, binding[1].descriptorCount},
- };
- auto dspci = lvl_init_struct<VkDescriptorPoolCreateInfo>();
- dspci.flags = VK_DESCRIPTOR_POOL_CREATE_UPDATE_AFTER_BIND_BIT_EXT;
- dspci.poolSizeCount = 2;
- dspci.pPoolSizes = &pool_sizes[0];
- dspci.maxSets = 1;
- VkDescriptorPool pool;
- err = vkCreateDescriptorPool(m_device->handle(), &dspci, nullptr, &pool);
- ASSERT_VK_SUCCESS(err);
-
- auto ds_alloc_info = lvl_init_struct<VkDescriptorSetAllocateInfo>();
- ds_alloc_info.descriptorPool = pool;
- ds_alloc_info.descriptorSetCount = 1;
- ds_alloc_info.pSetLayouts = &ds_layout;
-
- VkDescriptorSet ds = VK_NULL_HANDLE;
- vkAllocateDescriptorSets(m_device->handle(), &ds_alloc_info, &ds);
- ASSERT_VK_SUCCESS(err);
-
- VkBufferCreateInfo buffCI = {};
- buffCI.sType = VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO;
- buffCI.size = 1024;
- buffCI.usage = VK_BUFFER_USAGE_UNIFORM_BUFFER_BIT | VK_BUFFER_USAGE_STORAGE_BUFFER_BIT;
-
- VkBuffer dynamic_uniform_buffer;
- err = vkCreateBuffer(m_device->device(), &buffCI, NULL, &dynamic_uniform_buffer);
- ASSERT_VK_SUCCESS(err);
-
- VkDeviceMemory mem;
- VkMemoryRequirements mem_reqs;
- vkGetBufferMemoryRequirements(m_device->device(), dynamic_uniform_buffer, &mem_reqs);
-
- VkMemoryAllocateInfo mem_alloc_info = {};
- mem_alloc_info.sType = VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO;
- mem_alloc_info.allocationSize = mem_reqs.size;
- m_device->phy().set_memory_type(mem_reqs.memoryTypeBits, &mem_alloc_info, VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT);
- err = vkAllocateMemory(m_device->device(), &mem_alloc_info, NULL, &mem);
- ASSERT_VK_SUCCESS(err);
-
- err = vkBindBufferMemory(m_device->device(), dynamic_uniform_buffer, mem, 0);
- ASSERT_VK_SUCCESS(err);
-
- VkDescriptorBufferInfo buffInfo[2] = {};
- buffInfo[0].buffer = dynamic_uniform_buffer;
- buffInfo[0].offset = 0;
- buffInfo[0].range = 1024;
-
- VkWriteDescriptorSet descriptor_write[2] = {};
- descriptor_write[0].sType = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET;
- descriptor_write[0].dstSet = ds;
- descriptor_write[0].dstBinding = 0;
- descriptor_write[0].descriptorCount = 1;
- descriptor_write[0].descriptorType = VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER;
- descriptor_write[0].pBufferInfo = buffInfo;
- descriptor_write[1] = descriptor_write[0];
- descriptor_write[1].dstBinding = 1;
- descriptor_write[1].descriptorType = VK_DESCRIPTOR_TYPE_STORAGE_BUFFER;
-
- VkPipelineLayout pipeline_layout;
- VkPipelineLayoutCreateInfo pipeline_layout_ci = {};
- pipeline_layout_ci.sType = VK_STRUCTURE_TYPE_PIPELINE_LAYOUT_CREATE_INFO;
- pipeline_layout_ci.setLayoutCount = 1;
- pipeline_layout_ci.pSetLayouts = &ds_layout;
-
- vkCreatePipelineLayout(m_device->device(), &pipeline_layout_ci, NULL, &pipeline_layout);
-
- // Create a dummy pipeline, since VL inspects which bindings are actually used at draw time
- char const *fsSource =
- "#version 450\n"
- "\n"
- "layout(location=0) out vec4 color;\n"
- "layout(set=0, binding=0) uniform foo0 { float x0; } bar0;\n"
- "layout(set=0, binding=1) buffer foo1 { float x1; } bar1;\n"
- "void main(){\n"
- " color = vec4(bar0.x0 + bar1.x1);\n"
- "}\n";
-
- VkShaderObj vs(m_device, bindStateVertShaderText, VK_SHADER_STAGE_VERTEX_BIT, this);
- VkShaderObj fs(m_device, fsSource, VK_SHADER_STAGE_FRAGMENT_BIT, this);
-
- VkPipelineObj pipe(m_device);
- pipe.SetViewport(m_viewports);
- pipe.SetScissor(m_scissors);
- pipe.AddDefaultColorAttachment();
- pipe.AddShader(&vs);
- pipe.AddShader(&fs);
- pipe.CreateVKPipeline(pipeline_layout, m_renderPass);
-
- // Make both bindings valid before binding to the command buffer
- vkUpdateDescriptorSets(m_device->device(), 2, &descriptor_write[0], 0, NULL);
- m_errorMonitor->VerifyNotFound();
-
- // Two subtests. First only updates the update_after_bind binding and expects
- // no error. Second updates the other binding and expects an error when the
- // command buffer is ended.
- for (uint32_t i = 0; i < 2; ++i) {
- m_commandBuffer->begin();
-
- vkCmdBindDescriptorSets(m_commandBuffer->handle(), VK_PIPELINE_BIND_POINT_GRAPHICS, pipeline_layout, 0, 1, &ds, 0, NULL);
-
- m_commandBuffer->BeginRenderPass(m_renderPassBeginInfo);
- vkCmdBindPipeline(m_commandBuffer->handle(), VK_PIPELINE_BIND_POINT_GRAPHICS, pipe.handle());
- vkCmdDraw(m_commandBuffer->handle(), 0, 0, 0, 0);
- vkCmdEndRenderPass(m_commandBuffer->handle());
-
- m_errorMonitor->VerifyNotFound();
- // Valid to update binding 1 after being bound
- vkUpdateDescriptorSets(m_device->device(), 1, &descriptor_write[1], 0, NULL);
- m_errorMonitor->VerifyNotFound();
-
- if (i == 0) {
- // expect no errors
- m_commandBuffer->end();
- m_errorMonitor->VerifyNotFound();
- } else {
- // Invalid to update binding 0 after being bound. But the error is actually
- // generated during vkEndCommandBuffer
- vkUpdateDescriptorSets(m_device->device(), 1, &descriptor_write[0], 0, NULL);
- m_errorMonitor->VerifyNotFound();
-
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
- "UNASSIGNED-CoreValidation-DrawState-InvalidCommandBuffer-VkDescriptorSet");
-
- vkEndCommandBuffer(m_commandBuffer->handle());
- m_errorMonitor->VerifyFound();
- }
- }
-
- vkDestroyDescriptorSetLayout(m_device->handle(), ds_layout, nullptr);
- vkDestroyDescriptorPool(m_device->handle(), pool, nullptr);
- vkDestroyBuffer(m_device->handle(), dynamic_uniform_buffer, NULL);
- vkFreeMemory(m_device->handle(), mem, NULL);
- vkDestroyPipelineLayout(m_device->handle(), pipeline_layout, NULL);
-}
-
-TEST_F(VkLayerTest, AllocatePushDescriptorSet) {
- TEST_DESCRIPTION("Attempt to allocate a push descriptor set.");
- if (InstanceExtensionSupported(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME)) {
- m_instance_extension_names.push_back(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME);
- } else {
- printf("%s %s Extension not supported, skipping tests\n", kSkipPrefix,
- VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME);
- return;
- }
-
- ASSERT_NO_FATAL_FAILURE(InitFramework(myDbgFunc, m_errorMonitor));
- if (DeviceExtensionSupported(gpu(), nullptr, VK_KHR_PUSH_DESCRIPTOR_EXTENSION_NAME)) {
- m_device_extension_names.push_back(VK_KHR_PUSH_DESCRIPTOR_EXTENSION_NAME);
- } else {
- printf("%s %s Extension not supported, skipping tests\n", kSkipPrefix, VK_KHR_PUSH_DESCRIPTOR_EXTENSION_NAME);
- return;
- }
- ASSERT_NO_FATAL_FAILURE(InitState());
-
- auto push_descriptor_prop = GetPushDescriptorProperties(instance(), gpu());
- if (push_descriptor_prop.maxPushDescriptors < 1) {
- // Some implementations report an invalid maxPushDescriptors of 0
- printf("%s maxPushDescriptors is zero, skipping tests\n", kSkipPrefix);
- return;
- }
-
- VkDescriptorSetLayoutBinding binding = {0, VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER, 1, VK_SHADER_STAGE_FRAGMENT_BIT, nullptr};
- auto ds_layout_ci = lvl_init_struct<VkDescriptorSetLayoutCreateInfo>();
- ds_layout_ci.flags = VK_DESCRIPTOR_SET_LAYOUT_CREATE_PUSH_DESCRIPTOR_BIT_KHR;
- ds_layout_ci.bindingCount = 1;
- ds_layout_ci.pBindings = &binding;
- VkDescriptorSetLayout ds_layout = VK_NULL_HANDLE;
- VkResult err = vkCreateDescriptorSetLayout(m_device->handle(), &ds_layout_ci, nullptr, &ds_layout);
- ASSERT_VK_SUCCESS(err);
-
- VkDescriptorPoolSize pool_size = {binding.descriptorType, binding.descriptorCount};
- auto dspci = lvl_init_struct<VkDescriptorPoolCreateInfo>();
- dspci.poolSizeCount = 1;
- dspci.pPoolSizes = &pool_size;
- dspci.maxSets = 1;
- VkDescriptorPool pool;
- err = vkCreateDescriptorPool(m_device->handle(), &dspci, nullptr, &pool);
- ASSERT_VK_SUCCESS(err);
-
- auto ds_alloc_info = lvl_init_struct<VkDescriptorSetAllocateInfo>();
- ds_alloc_info.descriptorPool = pool;
- ds_alloc_info.descriptorSetCount = 1;
- ds_alloc_info.pSetLayouts = &ds_layout;
-
- VkDescriptorSet ds = VK_NULL_HANDLE;
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkDescriptorSetAllocateInfo-pSetLayouts-00308");
- vkAllocateDescriptorSets(m_device->handle(), &ds_alloc_info, &ds);
- m_errorMonitor->VerifyFound();
-
- vkDestroyDescriptorPool(m_device->handle(), pool, nullptr);
- vkDestroyDescriptorSetLayout(m_device->handle(), ds_layout, nullptr);
-}
-
-TEST_F(VkLayerTest, CreateDescriptorUpdateTemplate) {
- TEST_DESCRIPTION("Verify error messages for invalid vkCreateDescriptorUpdateTemplate calls.");
-
- if (InstanceExtensionSupported(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME)) {
- m_instance_extension_names.push_back(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME);
- } else {
- printf("%s Did not find VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME; skipped.\n", kSkipPrefix);
- return;
- }
- ASSERT_NO_FATAL_FAILURE(InitFramework(myDbgFunc, m_errorMonitor));
- // Note: Includes workaround for some implementations which incorrectly return 0 maxPushDescriptors
- if (DeviceExtensionSupported(gpu(), nullptr, VK_KHR_PUSH_DESCRIPTOR_EXTENSION_NAME) &&
- DeviceExtensionSupported(gpu(), nullptr, VK_KHR_DESCRIPTOR_UPDATE_TEMPLATE_EXTENSION_NAME) &&
- (GetPushDescriptorProperties(instance(), gpu()).maxPushDescriptors > 0)) {
- m_device_extension_names.push_back(VK_KHR_PUSH_DESCRIPTOR_EXTENSION_NAME);
- m_device_extension_names.push_back(VK_KHR_DESCRIPTOR_UPDATE_TEMPLATE_EXTENSION_NAME);
- } else {
- printf("%s Push Descriptors and Descriptor Update Template Extensions not supported, skipping tests\n", kSkipPrefix);
- return;
- }
- ASSERT_NO_FATAL_FAILURE(InitState());
-
- VkDescriptorSetLayoutBinding dsl_binding = {};
- dsl_binding.binding = 0;
- dsl_binding.descriptorType = VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER;
- dsl_binding.descriptorCount = 1;
- dsl_binding.stageFlags = VK_SHADER_STAGE_ALL;
- dsl_binding.pImmutableSamplers = NULL;
-
- const VkDescriptorSetLayoutObj ds_layout_ub(m_device, {dsl_binding});
- const VkDescriptorSetLayoutObj ds_layout_ub1(m_device, {dsl_binding});
- const VkDescriptorSetLayoutObj ds_layout_ub_push(m_device, {dsl_binding},
- VK_DESCRIPTOR_SET_LAYOUT_CREATE_PUSH_DESCRIPTOR_BIT_KHR);
- const VkPipelineLayoutObj pipeline_layout(m_device, {{&ds_layout_ub, &ds_layout_ub1, &ds_layout_ub_push}});
- PFN_vkCreateDescriptorUpdateTemplateKHR vkCreateDescriptorUpdateTemplateKHR =
- (PFN_vkCreateDescriptorUpdateTemplateKHR)vkGetDeviceProcAddr(m_device->device(), "vkCreateDescriptorUpdateTemplateKHR");
- ASSERT_NE(vkCreateDescriptorUpdateTemplateKHR, nullptr);
- PFN_vkDestroyDescriptorUpdateTemplateKHR vkDestroyDescriptorUpdateTemplateKHR =
- (PFN_vkDestroyDescriptorUpdateTemplateKHR)vkGetDeviceProcAddr(m_device->device(), "vkDestroyDescriptorUpdateTemplateKHR");
- ASSERT_NE(vkDestroyDescriptorUpdateTemplateKHR, nullptr);
-
- VkDescriptorUpdateTemplateEntry entries = {0, 0, 1, VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER, 0, sizeof(VkBuffer)};
- VkDescriptorUpdateTemplateCreateInfo create_info = {};
- create_info.sType = VK_STRUCTURE_TYPE_DESCRIPTOR_UPDATE_TEMPLATE_CREATE_INFO;
- create_info.pNext = nullptr;
- create_info.flags = 0;
- create_info.descriptorUpdateEntryCount = 1;
- create_info.pDescriptorUpdateEntries = &entries;
-
- auto do_test = [&](std::string err) {
- VkDescriptorUpdateTemplateKHR dut = VK_NULL_HANDLE;
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, err);
- if (VK_SUCCESS == vkCreateDescriptorUpdateTemplateKHR(m_device->handle(), &create_info, nullptr, &dut)) {
- vkDestroyDescriptorUpdateTemplateKHR(m_device->handle(), dut, nullptr);
- }
- m_errorMonitor->VerifyFound();
- };
-
- // Descriptor set type template
- create_info.templateType = VK_DESCRIPTOR_UPDATE_TEMPLATE_TYPE_DESCRIPTOR_SET;
- // descriptorSetLayout is NULL
- do_test("VUID-VkDescriptorUpdateTemplateCreateInfo-templateType-00350");
-
- // Push descriptor type template
- create_info.templateType = VK_DESCRIPTOR_UPDATE_TEMPLATE_TYPE_PUSH_DESCRIPTORS_KHR;
- create_info.pipelineBindPoint = VK_PIPELINE_BIND_POINT_COMPUTE;
- create_info.pipelineLayout = pipeline_layout.handle();
- create_info.set = 2;
-
- // Bad bindpoint -- force fuzz the bind point
- memset(&create_info.pipelineBindPoint, 0xFE, sizeof(create_info.pipelineBindPoint));
- do_test("VUID-VkDescriptorUpdateTemplateCreateInfo-templateType-00351");
- create_info.pipelineBindPoint = VK_PIPELINE_BIND_POINT_COMPUTE;
-
- // Bad pipeline layout
- create_info.pipelineLayout = VK_NULL_HANDLE;
- do_test("VUID-VkDescriptorUpdateTemplateCreateInfo-templateType-00352");
- create_info.pipelineLayout = pipeline_layout.handle();
-
- // Wrong set #
- create_info.set = 0;
- do_test("VUID-VkDescriptorUpdateTemplateCreateInfo-templateType-00353");
-
- // Invalid set #
- create_info.set = 42;
- do_test("VUID-VkDescriptorUpdateTemplateCreateInfo-templateType-00353");
-}
-
-TEST_F(VkLayerTest, InlineUniformBlockEXT) {
- TEST_DESCRIPTION("Test VK_EXT_inline_uniform_block.");
-
- if (InstanceExtensionSupported(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME)) {
- m_instance_extension_names.push_back(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME);
- } else {
- printf("%s Did not find required instance extension %s; skipped.\n", kSkipPrefix,
- VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME);
- return;
- }
- ASSERT_NO_FATAL_FAILURE(InitFramework(myDbgFunc, m_errorMonitor));
- std::array<const char *, 2> required_device_extensions = {VK_KHR_MAINTENANCE1_EXTENSION_NAME,
- VK_EXT_INLINE_UNIFORM_BLOCK_EXTENSION_NAME};
- for (auto device_extension : required_device_extensions) {
- if (DeviceExtensionSupported(gpu(), nullptr, device_extension)) {
- m_device_extension_names.push_back(device_extension);
- } else {
- printf("%s %s Extension not supported, skipping tests\n", kSkipPrefix, device_extension);
- return;
- }
- }
-
- // Enable descriptor indexing if supported, but don't require it.
- bool supportsDescriptorIndexing = true;
- required_device_extensions = {VK_KHR_MAINTENANCE3_EXTENSION_NAME, VK_EXT_DESCRIPTOR_INDEXING_EXTENSION_NAME};
- for (auto device_extension : required_device_extensions) {
- if (DeviceExtensionSupported(gpu(), nullptr, device_extension)) {
- m_device_extension_names.push_back(device_extension);
- } else {
- printf("%s %s Extension not supported, skipping tests\n", kSkipPrefix, device_extension);
- supportsDescriptorIndexing = false;
- return;
- }
- }
-
- PFN_vkGetPhysicalDeviceFeatures2KHR vkGetPhysicalDeviceFeatures2KHR =
- (PFN_vkGetPhysicalDeviceFeatures2KHR)vkGetInstanceProcAddr(instance(), "vkGetPhysicalDeviceFeatures2KHR");
- ASSERT_TRUE(vkGetPhysicalDeviceFeatures2KHR != nullptr);
-
- auto descriptor_indexing_features = lvl_init_struct<VkPhysicalDeviceDescriptorIndexingFeaturesEXT>();
- void *pNext = supportsDescriptorIndexing ? &descriptor_indexing_features : nullptr;
- // Create a device that enables inline_uniform_block
- auto inline_uniform_block_features = lvl_init_struct<VkPhysicalDeviceInlineUniformBlockFeaturesEXT>(pNext);
- auto features2 = lvl_init_struct<VkPhysicalDeviceFeatures2KHR>(&inline_uniform_block_features);
- vkGetPhysicalDeviceFeatures2KHR(gpu(), &features2);
-
- PFN_vkGetPhysicalDeviceProperties2KHR vkGetPhysicalDeviceProperties2KHR =
- (PFN_vkGetPhysicalDeviceProperties2KHR)vkGetInstanceProcAddr(instance(), "vkGetPhysicalDeviceProperties2KHR");
- assert(vkGetPhysicalDeviceProperties2KHR != nullptr);
-
- // Get the inline uniform block limits
- auto inline_uniform_props = lvl_init_struct<VkPhysicalDeviceInlineUniformBlockPropertiesEXT>();
- auto prop2 = lvl_init_struct<VkPhysicalDeviceProperties2KHR>(&inline_uniform_props);
- vkGetPhysicalDeviceProperties2KHR(gpu(), &prop2);
-
- ASSERT_NO_FATAL_FAILURE(InitState(nullptr, &features2));
-
- VkDescriptorSetLayoutBinding dslb = {};
- std::vector<VkDescriptorSetLayoutBinding> dslb_vec = {};
- VkDescriptorSetLayoutCreateInfo ds_layout_ci = {};
- ds_layout_ci.sType = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_CREATE_INFO;
- VkDescriptorSetLayout ds_layout = {};
-
- // Test too many bindings
- dslb_vec.clear();
- dslb.binding = 0;
- dslb.descriptorType = VK_DESCRIPTOR_TYPE_INLINE_UNIFORM_BLOCK_EXT;
- dslb.descriptorCount = 4;
- dslb.stageFlags = VK_SHADER_STAGE_FRAGMENT_BIT;
-
- if (inline_uniform_props.maxInlineUniformBlockSize < dslb.descriptorCount) {
- printf("%sDescriptorCount exceeds InlineUniformBlockSize limit, skipping tests\n", kSkipPrefix);
- return;
- }
-
- uint32_t maxBlocks = std::max(inline_uniform_props.maxPerStageDescriptorInlineUniformBlocks,
- inline_uniform_props.maxDescriptorSetInlineUniformBlocks);
- for (uint32_t i = 0; i < 1 + maxBlocks; ++i) {
- dslb.binding = i;
- dslb_vec.push_back(dslb);
- }
-
- ds_layout_ci.bindingCount = dslb_vec.size();
- ds_layout_ci.pBindings = dslb_vec.data();
- VkResult err = vkCreateDescriptorSetLayout(m_device->device(), &ds_layout_ci, NULL, &ds_layout);
- ASSERT_VK_SUCCESS(err);
-
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkPipelineLayoutCreateInfo-descriptorType-02214");
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkPipelineLayoutCreateInfo-descriptorType-02216");
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkPipelineLayoutCreateInfo-descriptorType-02215");
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkPipelineLayoutCreateInfo-descriptorType-02217");
-
- VkPipelineLayoutCreateInfo pipeline_layout_ci = {};
- pipeline_layout_ci.sType = VK_STRUCTURE_TYPE_PIPELINE_LAYOUT_CREATE_INFO;
- pipeline_layout_ci.pNext = NULL;
- pipeline_layout_ci.setLayoutCount = 1;
- pipeline_layout_ci.pSetLayouts = &ds_layout;
- VkPipelineLayout pipeline_layout = VK_NULL_HANDLE;
-
- err = vkCreatePipelineLayout(m_device->device(), &pipeline_layout_ci, NULL, &pipeline_layout);
- m_errorMonitor->VerifyFound();
- vkDestroyPipelineLayout(m_device->device(), pipeline_layout, NULL);
- pipeline_layout = VK_NULL_HANDLE;
- vkDestroyDescriptorSetLayout(m_device->device(), ds_layout, nullptr);
- ds_layout = VK_NULL_HANDLE;
-
- // Single binding that's too large and is not a multiple of 4
- dslb.binding = 0;
- dslb.descriptorCount = inline_uniform_props.maxInlineUniformBlockSize + 1;
-
- ds_layout_ci.bindingCount = 1;
- ds_layout_ci.pBindings = &dslb;
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkDescriptorSetLayoutBinding-descriptorType-02209");
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkDescriptorSetLayoutBinding-descriptorType-02210");
- err = vkCreateDescriptorSetLayout(m_device->device(), &ds_layout_ci, NULL, &ds_layout);
- m_errorMonitor->VerifyFound();
- vkDestroyDescriptorSetLayout(m_device->device(), ds_layout, nullptr);
- ds_layout = VK_NULL_HANDLE;
-
- // Pool size must be a multiple of 4
- VkDescriptorPoolSize ds_type_count = {};
- ds_type_count.type = VK_DESCRIPTOR_TYPE_INLINE_UNIFORM_BLOCK_EXT;
- ds_type_count.descriptorCount = 33;
-
- VkDescriptorPoolCreateInfo ds_pool_ci = {};
- ds_pool_ci.sType = VK_STRUCTURE_TYPE_DESCRIPTOR_POOL_CREATE_INFO;
- ds_pool_ci.pNext = NULL;
- ds_pool_ci.flags = 0;
- ds_pool_ci.maxSets = 2;
- ds_pool_ci.poolSizeCount = 1;
- ds_pool_ci.pPoolSizes = &ds_type_count;
-
- VkDescriptorPool ds_pool = VK_NULL_HANDLE;
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkDescriptorPoolSize-type-02218");
- err = vkCreateDescriptorPool(m_device->device(), &ds_pool_ci, NULL, &ds_pool);
- m_errorMonitor->VerifyFound();
- if (ds_pool) {
- vkDestroyDescriptorPool(m_device->handle(), ds_pool, nullptr);
- ds_pool = VK_NULL_HANDLE;
- }
-
- // Create a valid pool
- ds_type_count.descriptorCount = 32;
- err = vkCreateDescriptorPool(m_device->device(), &ds_pool_ci, NULL, &ds_pool);
- m_errorMonitor->VerifyNotFound();
-
- // Create two valid sets with 8 bytes each
- dslb_vec.clear();
- dslb.binding = 0;
- dslb.descriptorType = VK_DESCRIPTOR_TYPE_INLINE_UNIFORM_BLOCK_EXT;
- dslb.descriptorCount = 8;
- dslb.stageFlags = VK_SHADER_STAGE_FRAGMENT_BIT;
- dslb_vec.push_back(dslb);
- dslb.binding = 1;
- dslb_vec.push_back(dslb);
-
- ds_layout_ci.bindingCount = dslb_vec.size();
- ds_layout_ci.pBindings = &dslb_vec[0];
-
- err = vkCreateDescriptorSetLayout(m_device->device(), &ds_layout_ci, NULL, &ds_layout);
- m_errorMonitor->VerifyNotFound();
-
- VkDescriptorSet descriptor_sets[2];
- VkDescriptorSetLayout set_layouts[2] = {ds_layout, ds_layout};
- VkDescriptorSetAllocateInfo alloc_info = {};
- alloc_info.sType = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_ALLOCATE_INFO;
- alloc_info.descriptorSetCount = 2;
- alloc_info.descriptorPool = ds_pool;
- alloc_info.pSetLayouts = set_layouts;
- err = vkAllocateDescriptorSets(m_device->device(), &alloc_info, descriptor_sets);
- m_errorMonitor->VerifyNotFound();
-
- // Test invalid VkWriteDescriptorSet parameters (array element and size must be multiple of 4)
- VkWriteDescriptorSet descriptor_write = {};
- descriptor_write.sType = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET;
- descriptor_write.dstSet = descriptor_sets[0];
- descriptor_write.dstBinding = 0;
- descriptor_write.dstArrayElement = 0;
- descriptor_write.descriptorCount = 3;
- descriptor_write.descriptorType = VK_DESCRIPTOR_TYPE_INLINE_UNIFORM_BLOCK_EXT;
-
- uint32_t dummyData[8] = {};
- VkWriteDescriptorSetInlineUniformBlockEXT write_inline_uniform = {};
- write_inline_uniform.sType = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET_INLINE_UNIFORM_BLOCK_EXT;
- write_inline_uniform.dataSize = 3;
- write_inline_uniform.pData = &dummyData[0];
- descriptor_write.pNext = &write_inline_uniform;
-
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkWriteDescriptorSet-descriptorType-02220");
- vkUpdateDescriptorSets(m_device->device(), 1, &descriptor_write, 0, NULL);
- m_errorMonitor->VerifyFound();
-
- descriptor_write.dstArrayElement = 1;
- descriptor_write.descriptorCount = 4;
- write_inline_uniform.dataSize = 4;
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkWriteDescriptorSet-descriptorType-02219");
- vkUpdateDescriptorSets(m_device->device(), 1, &descriptor_write, 0, NULL);
- m_errorMonitor->VerifyFound();
-
- descriptor_write.pNext = nullptr;
- descriptor_write.dstArrayElement = 0;
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkWriteDescriptorSet-descriptorType-02221");
- vkUpdateDescriptorSets(m_device->device(), 1, &descriptor_write, 0, NULL);
- m_errorMonitor->VerifyFound();
-
- descriptor_write.pNext = &write_inline_uniform;
- vkUpdateDescriptorSets(m_device->device(), 1, &descriptor_write, 0, NULL);
- m_errorMonitor->VerifyNotFound();
-
- // Test invalid VkCopyDescriptorSet parameters (array element and size must be multiple of 4)
- VkCopyDescriptorSet copy_ds_update = {};
- copy_ds_update.sType = VK_STRUCTURE_TYPE_COPY_DESCRIPTOR_SET;
- copy_ds_update.srcSet = descriptor_sets[0];
- copy_ds_update.srcBinding = 0;
- copy_ds_update.srcArrayElement = 0;
- copy_ds_update.dstSet = descriptor_sets[1];
- copy_ds_update.dstBinding = 0;
- copy_ds_update.dstArrayElement = 0;
- copy_ds_update.descriptorCount = 4;
-
- copy_ds_update.srcArrayElement = 1;
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkCopyDescriptorSet-srcBinding-02223");
- vkUpdateDescriptorSets(m_device->device(), 0, NULL, 1, &copy_ds_update);
- m_errorMonitor->VerifyFound();
-
- copy_ds_update.srcArrayElement = 0;
- copy_ds_update.dstArrayElement = 1;
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkCopyDescriptorSet-dstBinding-02224");
- vkUpdateDescriptorSets(m_device->device(), 0, NULL, 1, &copy_ds_update);
- m_errorMonitor->VerifyFound();
-
- copy_ds_update.dstArrayElement = 0;
- copy_ds_update.descriptorCount = 5;
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkCopyDescriptorSet-srcBinding-02225");
- vkUpdateDescriptorSets(m_device->device(), 0, NULL, 1, &copy_ds_update);
- m_errorMonitor->VerifyFound();
-
- copy_ds_update.descriptorCount = 4;
- vkUpdateDescriptorSets(m_device->device(), 0, NULL, 1, &copy_ds_update);
- m_errorMonitor->VerifyNotFound();
-
- vkDestroyDescriptorPool(m_device->handle(), ds_pool, nullptr);
- vkDestroyDescriptorSetLayout(m_device->device(), ds_layout, nullptr);
-} \ No newline at end of file
diff --git a/tests/vklayertests_imageless_framebuffer.cpp b/tests/vklayertests_imageless_framebuffer.cpp
deleted file mode 100644
index 1139f62d9..000000000
--- a/tests/vklayertests_imageless_framebuffer.cpp
+++ /dev/null
@@ -1,1084 +0,0 @@
-/*
- * Copyright (c) 2015-2019 The Khronos Group Inc.
- * Copyright (c) 2015-2019 Valve Corporation
- * Copyright (c) 2015-2019 LunarG, Inc.
- * Copyright (c) 2015-2019 Google, Inc.
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Author: Chia-I Wu <olvaffe@gmail.com>
- * Author: Chris Forbes <chrisf@ijw.co.nz>
- * Author: Courtney Goeltzenleuchter <courtney@LunarG.com>
- * Author: Mark Lobodzinski <mark@lunarg.com>
- * Author: Mike Stroyan <mike@LunarG.com>
- * Author: Tobin Ehlis <tobine@google.com>
- * Author: Tony Barbour <tony@LunarG.com>
- * Author: Cody Northrop <cnorthrop@google.com>
- * Author: Dave Houlton <daveh@lunarg.com>
- * Author: Jeremy Kniager <jeremyk@lunarg.com>
- * Author: Shannon McPherson <shannon@lunarg.com>
- * Author: John Zulauf <jzulauf@lunarg.com>
- * Author: Tobias Hector <tobias.hector@amd.com>
- */
-
-#include "cast_utils.h"
-#include "layer_validation_tests.h"
-
-TEST_F(VkLayerTest, ImagelessFramebufferRenderPassBeginImageViewMismatchTests) {
- TEST_DESCRIPTION(
- "Begin a renderPass where the image views specified do not match the parameters used to create the framebuffer and render "
- "pass.");
-
- if (InstanceExtensionSupported(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME)) {
- m_instance_extension_names.push_back(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME);
- } else {
- printf("%s Did not find required device extension %s; skipped.\n", kSkipPrefix,
- VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME);
- return;
- }
-
- ASSERT_NO_FATAL_FAILURE(InitFramework(myDbgFunc, m_errorMonitor));
- bool rp2Supported = CheckCreateRenderPass2Support(this, m_device_extension_names);
-
- if (DeviceExtensionSupported(gpu(), nullptr, VK_KHR_IMAGELESS_FRAMEBUFFER_EXTENSION_NAME)) {
- m_device_extension_names.push_back(VK_KHR_MAINTENANCE2_EXTENSION_NAME);
- m_device_extension_names.push_back(VK_KHR_IMAGE_FORMAT_LIST_EXTENSION_NAME);
- m_device_extension_names.push_back(VK_KHR_IMAGELESS_FRAMEBUFFER_EXTENSION_NAME);
- } else {
- printf("%s test requires VK_KHR_imageless_framebuffer, not available. Skipping.\n", kSkipPrefix);
- return;
- }
-
- VkPhysicalDeviceImagelessFramebufferFeaturesKHR physicalDeviceImagelessFramebufferFeatures = {};
- physicalDeviceImagelessFramebufferFeatures.sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGELESS_FRAMEBUFFER_FEATURES_KHR;
- physicalDeviceImagelessFramebufferFeatures.imagelessFramebuffer = VK_TRUE;
- VkPhysicalDeviceFeatures2 physicalDeviceFeatures2 = {};
- physicalDeviceFeatures2.sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FEATURES_2;
- physicalDeviceFeatures2.pNext = &physicalDeviceImagelessFramebufferFeatures;
-
- ASSERT_NO_FATAL_FAILURE(InitState(nullptr, &physicalDeviceFeatures2, VK_COMMAND_POOL_CREATE_RESET_COMMAND_BUFFER_BIT));
-
- uint32_t attachmentWidth = 512;
- uint32_t attachmentHeight = 512;
- VkFormat attachmentFormats[2] = {VK_FORMAT_R8G8B8A8_UNORM, VK_FORMAT_B8G8R8A8_UNORM};
- VkFormat framebufferAttachmentFormats[3] = {VK_FORMAT_R8G8B8A8_UNORM, VK_FORMAT_B8G8R8A8_UNORM, VK_FORMAT_B8G8R8A8_UNORM};
-
- // Create a renderPass with a single attachment
- VkAttachmentDescription attachmentDescription = {};
- attachmentDescription.format = attachmentFormats[0];
- attachmentDescription.samples = VK_SAMPLE_COUNT_1_BIT;
- attachmentDescription.finalLayout = VK_IMAGE_LAYOUT_GENERAL;
- VkAttachmentReference attachmentReference = {};
- attachmentReference.layout = VK_IMAGE_LAYOUT_GENERAL;
- VkSubpassDescription subpassDescription = {};
- subpassDescription.colorAttachmentCount = 1;
- subpassDescription.pColorAttachments = &attachmentReference;
- VkRenderPassCreateInfo renderPassCreateInfo = {};
- renderPassCreateInfo.subpassCount = 1;
- renderPassCreateInfo.pSubpasses = &subpassDescription;
- renderPassCreateInfo.attachmentCount = 1;
- renderPassCreateInfo.pAttachments = &attachmentDescription;
- renderPassCreateInfo.sType = VK_STRUCTURE_TYPE_RENDER_PASS_CREATE_INFO;
- VkRenderPass renderPass;
- vkCreateRenderPass(m_device->device(), &renderPassCreateInfo, NULL, &renderPass);
-
- VkFramebufferAttachmentImageInfoKHR framebufferAttachmentImageInfo = {};
- framebufferAttachmentImageInfo.sType = VK_STRUCTURE_TYPE_FRAMEBUFFER_ATTACHMENT_IMAGE_INFO_KHR;
- framebufferAttachmentImageInfo.usage = VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT;
- framebufferAttachmentImageInfo.width = attachmentWidth;
- framebufferAttachmentImageInfo.height = attachmentHeight;
- framebufferAttachmentImageInfo.layerCount = 1;
- framebufferAttachmentImageInfo.viewFormatCount = 2;
- framebufferAttachmentImageInfo.pViewFormats = framebufferAttachmentFormats;
- VkFramebufferAttachmentsCreateInfoKHR framebufferAttachmentsCreateInfo = {};
- framebufferAttachmentsCreateInfo.sType = VK_STRUCTURE_TYPE_FRAMEBUFFER_ATTACHMENTS_CREATE_INFO_KHR;
- framebufferAttachmentsCreateInfo.attachmentImageInfoCount = 1;
- framebufferAttachmentsCreateInfo.pAttachmentImageInfos = &framebufferAttachmentImageInfo;
- VkFramebufferCreateInfo framebufferCreateInfo = {};
- framebufferCreateInfo.sType = VK_STRUCTURE_TYPE_FRAMEBUFFER_CREATE_INFO;
- framebufferCreateInfo.pNext = &framebufferAttachmentsCreateInfo;
- framebufferCreateInfo.flags = VK_FRAMEBUFFER_CREATE_IMAGELESS_BIT_KHR;
- framebufferCreateInfo.width = attachmentWidth;
- framebufferCreateInfo.height = attachmentHeight;
- framebufferCreateInfo.layers = 1;
- framebufferCreateInfo.attachmentCount = 1;
- framebufferCreateInfo.pAttachments = nullptr;
- framebufferCreateInfo.renderPass = renderPass;
- VkFramebuffer framebuffer;
-
- VkImageFormatListCreateInfoKHR imageFormatListCreateInfo = {};
- imageFormatListCreateInfo.sType = VK_STRUCTURE_TYPE_IMAGE_FORMAT_LIST_CREATE_INFO_KHR;
- imageFormatListCreateInfo.viewFormatCount = 2;
- imageFormatListCreateInfo.pViewFormats = attachmentFormats;
- VkImageCreateInfo imageCreateInfo = {};
- imageCreateInfo.sType = VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO;
- imageCreateInfo.pNext = &imageFormatListCreateInfo;
- imageCreateInfo.flags = VK_IMAGE_CREATE_MUTABLE_FORMAT_BIT;
- imageCreateInfo.usage = VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT;
- imageCreateInfo.extent.width = attachmentWidth;
- imageCreateInfo.extent.height = attachmentHeight;
- imageCreateInfo.extent.depth = 1;
- imageCreateInfo.arrayLayers = 1;
- imageCreateInfo.mipLevels = 10;
- imageCreateInfo.imageType = VK_IMAGE_TYPE_2D;
- imageCreateInfo.samples = VK_SAMPLE_COUNT_1_BIT;
- imageCreateInfo.format = attachmentFormats[0];
-
- VkImageObj imageObject(m_device);
- imageObject.init(&imageCreateInfo);
- VkImage image = imageObject.image();
-
- VkImageViewCreateInfo imageViewCreateInfo = {};
- imageViewCreateInfo.sType = VK_STRUCTURE_TYPE_IMAGE_VIEW_CREATE_INFO;
- imageViewCreateInfo.image = image;
- imageViewCreateInfo.viewType = VK_IMAGE_VIEW_TYPE_2D;
- imageViewCreateInfo.format = attachmentFormats[0];
- imageViewCreateInfo.subresourceRange.layerCount = 1;
- imageViewCreateInfo.subresourceRange.levelCount = 1;
- imageViewCreateInfo.subresourceRange.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
- VkImageView imageView;
- vkCreateImageView(m_device->device(), &imageViewCreateInfo, NULL, &imageView);
-
- VkRenderPassAttachmentBeginInfoKHR renderPassAttachmentBeginInfo = {};
- renderPassAttachmentBeginInfo.sType = VK_STRUCTURE_TYPE_RENDER_PASS_ATTACHMENT_BEGIN_INFO_KHR;
- renderPassAttachmentBeginInfo.pNext = nullptr;
- renderPassAttachmentBeginInfo.attachmentCount = 1;
- renderPassAttachmentBeginInfo.pAttachments = &imageView;
- VkRenderPassBeginInfo renderPassBeginInfo = {};
- renderPassBeginInfo.sType = VK_STRUCTURE_TYPE_RENDER_PASS_BEGIN_INFO;
- renderPassBeginInfo.pNext = &renderPassAttachmentBeginInfo;
- renderPassBeginInfo.renderPass = renderPass;
- renderPassBeginInfo.renderArea.extent.width = attachmentWidth;
- renderPassBeginInfo.renderArea.extent.height = attachmentHeight;
-
- // Imageless framebuffer creation bit not present
- framebufferCreateInfo.pAttachments = &imageView;
- framebufferCreateInfo.flags = 0;
- vkCreateFramebuffer(m_device->device(), &framebufferCreateInfo, nullptr, &framebuffer);
- renderPassBeginInfo.framebuffer = framebuffer;
- TestRenderPassBegin(m_errorMonitor, m_device->device(), m_commandBuffer->handle(), &renderPassBeginInfo, rp2Supported,
- "VUID-VkRenderPassBeginInfo-framebuffer-03207", "VUID-VkRenderPassBeginInfo-framebuffer-03207");
- vkDestroyFramebuffer(m_device->device(), framebuffer, nullptr);
- framebufferCreateInfo.pAttachments = nullptr;
- framebufferCreateInfo.flags = VK_FRAMEBUFFER_CREATE_IMAGELESS_BIT_KHR;
-
- vkCreateFramebuffer(m_device->device(), &framebufferCreateInfo, nullptr, &framebuffer);
- renderPassAttachmentBeginInfo.attachmentCount = 2;
- renderPassBeginInfo.framebuffer = framebuffer;
- TestRenderPassBegin(m_errorMonitor, m_device->device(), m_commandBuffer->handle(), &renderPassBeginInfo, rp2Supported,
- "VUID-VkRenderPassBeginInfo-framebuffer-03208", "VUID-VkRenderPassBeginInfo-framebuffer-03208");
- renderPassAttachmentBeginInfo.attachmentCount = 1;
- vkDestroyFramebuffer(m_device->device(), framebuffer, nullptr);
-
- // Mismatched number of attachments
- vkCreateFramebuffer(m_device->device(), &framebufferCreateInfo, nullptr, &framebuffer);
- renderPassAttachmentBeginInfo.attachmentCount = 2;
- renderPassBeginInfo.framebuffer = framebuffer;
- TestRenderPassBegin(m_errorMonitor, m_device->device(), m_commandBuffer->handle(), &renderPassBeginInfo, rp2Supported,
- "VUID-VkRenderPassBeginInfo-framebuffer-03208", "VUID-VkRenderPassBeginInfo-framebuffer-03208");
- renderPassAttachmentBeginInfo.attachmentCount = 1;
- vkDestroyFramebuffer(m_device->device(), framebuffer, nullptr);
-
- // Mismatched flags
- framebufferAttachmentImageInfo.flags = 0;
- vkCreateFramebuffer(m_device->device(), &framebufferCreateInfo, nullptr, &framebuffer);
- renderPassBeginInfo.framebuffer = framebuffer;
- TestRenderPassBegin(m_errorMonitor, m_device->device(), m_commandBuffer->handle(), &renderPassBeginInfo, rp2Supported,
- "VUID-VkRenderPassBeginInfo-framebuffer-03209", "VUID-VkRenderPassBeginInfo-framebuffer-03209");
- vkDestroyFramebuffer(m_device->device(), framebuffer, nullptr);
- framebufferAttachmentImageInfo.flags = VK_IMAGE_CREATE_MUTABLE_FORMAT_BIT;
-
- // Mismatched usage
- framebufferAttachmentImageInfo.usage = VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT | VK_IMAGE_USAGE_SAMPLED_BIT;
- vkCreateFramebuffer(m_device->device(), &framebufferCreateInfo, nullptr, &framebuffer);
- renderPassBeginInfo.framebuffer = framebuffer;
- TestRenderPassBegin(m_errorMonitor, m_device->device(), m_commandBuffer->handle(), &renderPassBeginInfo, rp2Supported,
- "VUID-VkRenderPassBeginInfo-framebuffer-03210", "VUID-VkRenderPassBeginInfo-framebuffer-03210");
- vkDestroyFramebuffer(m_device->device(), framebuffer, nullptr);
- framebufferAttachmentImageInfo.usage = VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT;
-
- // Mismatched width
- framebufferAttachmentImageInfo.width += 1;
- vkCreateFramebuffer(m_device->device(), &framebufferCreateInfo, nullptr, &framebuffer);
- renderPassBeginInfo.framebuffer = framebuffer;
- TestRenderPassBegin(m_errorMonitor, m_device->device(), m_commandBuffer->handle(), &renderPassBeginInfo, rp2Supported,
- "VUID-VkRenderPassBeginInfo-framebuffer-03211", "VUID-VkRenderPassBeginInfo-framebuffer-03211");
- vkDestroyFramebuffer(m_device->device(), framebuffer, nullptr);
- framebufferAttachmentImageInfo.width -= 1;
-
- // Mismatched height
- framebufferAttachmentImageInfo.height += 1;
- vkCreateFramebuffer(m_device->device(), &framebufferCreateInfo, nullptr, &framebuffer);
- renderPassBeginInfo.framebuffer = framebuffer;
- TestRenderPassBegin(m_errorMonitor, m_device->device(), m_commandBuffer->handle(), &renderPassBeginInfo, rp2Supported,
- "VUID-VkRenderPassBeginInfo-framebuffer-03212", "VUID-VkRenderPassBeginInfo-framebuffer-03212");
- vkDestroyFramebuffer(m_device->device(), framebuffer, nullptr);
- framebufferAttachmentImageInfo.height -= 1;
-
- // Mismatched layer count
- framebufferAttachmentImageInfo.layerCount += 1;
- vkCreateFramebuffer(m_device->device(), &framebufferCreateInfo, nullptr, &framebuffer);
- renderPassBeginInfo.framebuffer = framebuffer;
- TestRenderPassBegin(m_errorMonitor, m_device->device(), m_commandBuffer->handle(), &renderPassBeginInfo, rp2Supported,
- "VUID-VkRenderPassBeginInfo-framebuffer-03213", "VUID-VkRenderPassBeginInfo-framebuffer-03213");
- vkDestroyFramebuffer(m_device->device(), framebuffer, nullptr);
- framebufferAttachmentImageInfo.layerCount -= 1;
-
- // Mismatched view format count
- framebufferAttachmentImageInfo.viewFormatCount = 3;
- vkCreateFramebuffer(m_device->device(), &framebufferCreateInfo, nullptr, &framebuffer);
- renderPassBeginInfo.framebuffer = framebuffer;
- TestRenderPassBegin(m_errorMonitor, m_device->device(), m_commandBuffer->handle(), &renderPassBeginInfo, rp2Supported,
- "VUID-VkRenderPassBeginInfo-framebuffer-03214", "VUID-VkRenderPassBeginInfo-framebuffer-03214");
- vkDestroyFramebuffer(m_device->device(), framebuffer, nullptr);
- framebufferAttachmentImageInfo.viewFormatCount = 2;
-
- // Mismatched format lists
- framebufferAttachmentFormats[1] = VK_FORMAT_B8G8R8A8_SRGB;
- vkCreateFramebuffer(m_device->device(), &framebufferCreateInfo, nullptr, &framebuffer);
- renderPassBeginInfo.framebuffer = framebuffer;
- TestRenderPassBegin(m_errorMonitor, m_device->device(), m_commandBuffer->handle(), &renderPassBeginInfo, rp2Supported,
- "VUID-VkRenderPassBeginInfo-framebuffer-03215", "VUID-VkRenderPassBeginInfo-framebuffer-03215");
- vkDestroyFramebuffer(m_device->device(), framebuffer, nullptr);
- framebufferAttachmentFormats[1] = VK_FORMAT_B8G8R8A8_UNORM;
-
- // Mismatched formats
- VkImageView imageView2;
- imageViewCreateInfo.format = attachmentFormats[1];
- vkCreateImageView(m_device->device(), &imageViewCreateInfo, nullptr, &imageView2);
- renderPassAttachmentBeginInfo.pAttachments = &imageView2;
- vkCreateFramebuffer(m_device->device(), &framebufferCreateInfo, nullptr, &framebuffer);
- renderPassBeginInfo.framebuffer = framebuffer;
- TestRenderPassBegin(m_errorMonitor, m_device->device(), m_commandBuffer->handle(), &renderPassBeginInfo, rp2Supported,
- "VUID-VkRenderPassBeginInfo-framebuffer-03216", "VUID-VkRenderPassBeginInfo-framebuffer-03216");
- vkDestroyFramebuffer(m_device->device(), framebuffer, nullptr);
- vkDestroyImageView(m_device->device(), imageView2, nullptr);
- renderPassAttachmentBeginInfo.pAttachments = &imageView;
- imageViewCreateInfo.format = attachmentFormats[0];
-
- // Mismatched sample counts
- imageCreateInfo.samples = VK_SAMPLE_COUNT_4_BIT;
- imageCreateInfo.mipLevels = 1;
- VkImageObj imageObject2(m_device);
- imageObject2.init(&imageCreateInfo);
- imageViewCreateInfo.image = imageObject2.image();
- vkCreateImageView(m_device->device(), &imageViewCreateInfo, nullptr, &imageView2);
- renderPassAttachmentBeginInfo.pAttachments = &imageView2;
- vkCreateFramebuffer(m_device->device(), &framebufferCreateInfo, nullptr, &framebuffer);
- renderPassBeginInfo.framebuffer = framebuffer;
- TestRenderPassBegin(m_errorMonitor, m_device->device(), m_commandBuffer->handle(), &renderPassBeginInfo, rp2Supported,
- "VUID-VkRenderPassBeginInfo-framebuffer-03217", "VUID-VkRenderPassBeginInfo-framebuffer-03217");
- vkDestroyFramebuffer(m_device->device(), framebuffer, nullptr);
- vkDestroyImageView(m_device->device(), imageView2, nullptr);
- renderPassAttachmentBeginInfo.pAttachments = &imageView;
- imageViewCreateInfo.image = imageObject.image();
- imageCreateInfo.samples = VK_SAMPLE_COUNT_1_BIT;
- imageCreateInfo.mipLevels = 10;
-
- // Mismatched level counts
- imageViewCreateInfo.subresourceRange.levelCount = 2;
- vkCreateImageView(m_device->device(), &imageViewCreateInfo, nullptr, &imageView2);
- renderPassAttachmentBeginInfo.pAttachments = &imageView2;
- vkCreateFramebuffer(m_device->device(), &framebufferCreateInfo, nullptr, &framebuffer);
- renderPassBeginInfo.framebuffer = framebuffer;
- TestRenderPassBegin(m_errorMonitor, m_device->device(), m_commandBuffer->handle(), &renderPassBeginInfo, rp2Supported,
- "VUID-VkRenderPassAttachmentBeginInfoKHR-pAttachments-03218",
- "VUID-VkRenderPassAttachmentBeginInfoKHR-pAttachments-03218");
- vkDestroyFramebuffer(m_device->device(), framebuffer, nullptr);
- vkDestroyImageView(m_device->device(), imageView2, nullptr);
- renderPassAttachmentBeginInfo.pAttachments = &imageView;
- imageViewCreateInfo.subresourceRange.levelCount = 1;
-
- // Non-identity component swizzle
- imageViewCreateInfo.components.r = VK_COMPONENT_SWIZZLE_A;
- vkCreateImageView(m_device->device(), &imageViewCreateInfo, nullptr, &imageView2);
- renderPassAttachmentBeginInfo.pAttachments = &imageView2;
- vkCreateFramebuffer(m_device->device(), &framebufferCreateInfo, nullptr, &framebuffer);
- renderPassBeginInfo.framebuffer = framebuffer;
- TestRenderPassBegin(m_errorMonitor, m_device->device(), m_commandBuffer->handle(), &renderPassBeginInfo, rp2Supported,
- "VUID-VkRenderPassAttachmentBeginInfoKHR-pAttachments-03219",
- "VUID-VkRenderPassAttachmentBeginInfoKHR-pAttachments-03219");
- vkDestroyFramebuffer(m_device->device(), framebuffer, nullptr);
- vkDestroyImageView(m_device->device(), imageView2, nullptr);
- renderPassAttachmentBeginInfo.pAttachments = &imageView;
- imageViewCreateInfo.components.r = VK_COMPONENT_SWIZZLE_IDENTITY;
-
- vkDestroyRenderPass(m_device->device(), renderPass, nullptr);
- // vkDestroyFramebuffer(m_device->device(), framebuffer, nullptr);
- vkDestroyImageView(m_device->device(), imageView, nullptr);
-}
-
-TEST_F(VkLayerTest, ImagelessFramebufferFeatureEnableTest) {
- TEST_DESCRIPTION("Use imageless framebuffer functionality without enabling the feature");
-
- if (InstanceExtensionSupported(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME)) {
- m_instance_extension_names.push_back(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME);
- } else {
- printf("%s Did not find required instance extension %s; skipped.\n", kSkipPrefix,
- VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME);
- return;
- }
- ASSERT_NO_FATAL_FAILURE(InitFramework(myDbgFunc, m_errorMonitor));
-
- if (DeviceExtensionSupported(gpu(), nullptr, VK_KHR_IMAGELESS_FRAMEBUFFER_EXTENSION_NAME)) {
- m_device_extension_names.push_back(VK_KHR_MAINTENANCE2_EXTENSION_NAME);
- m_device_extension_names.push_back(VK_KHR_IMAGE_FORMAT_LIST_EXTENSION_NAME);
- m_device_extension_names.push_back(VK_KHR_IMAGELESS_FRAMEBUFFER_EXTENSION_NAME);
- } else {
- printf("%s Did not find required device extension %s; skipped.\n", kSkipPrefix,
- VK_KHR_IMAGELESS_FRAMEBUFFER_EXTENSION_NAME);
- return;
- }
-
- ASSERT_NO_FATAL_FAILURE(InitState(nullptr, nullptr, VK_COMMAND_POOL_CREATE_RESET_COMMAND_BUFFER_BIT));
-
- ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
-
- uint32_t attachmentWidth = 512;
- uint32_t attachmentHeight = 512;
- VkFormat attachmentFormat = VK_FORMAT_R8G8B8A8_UNORM;
-
- // Create a renderPass with a single attachment
- VkAttachmentDescription attachmentDescription = {};
- attachmentDescription.format = attachmentFormat;
- attachmentDescription.samples = VK_SAMPLE_COUNT_1_BIT;
- attachmentDescription.finalLayout = VK_IMAGE_LAYOUT_GENERAL;
- VkAttachmentReference attachmentReference = {};
- attachmentReference.layout = VK_IMAGE_LAYOUT_GENERAL;
- VkSubpassDescription subpassDescription = {};
- subpassDescription.colorAttachmentCount = 1;
- subpassDescription.pColorAttachments = &attachmentReference;
- VkRenderPassCreateInfo renderPassCreateInfo = {};
- renderPassCreateInfo.subpassCount = 1;
- renderPassCreateInfo.pSubpasses = &subpassDescription;
- renderPassCreateInfo.attachmentCount = 1;
- renderPassCreateInfo.pAttachments = &attachmentDescription;
- renderPassCreateInfo.sType = VK_STRUCTURE_TYPE_RENDER_PASS_CREATE_INFO;
- VkRenderPass renderPass;
- vkCreateRenderPass(m_device->device(), &renderPassCreateInfo, NULL, &renderPass);
-
- VkFramebufferAttachmentImageInfoKHR framebufferAttachmentImageInfo = {};
- framebufferAttachmentImageInfo.sType = VK_STRUCTURE_TYPE_FRAMEBUFFER_ATTACHMENT_IMAGE_INFO_KHR;
- framebufferAttachmentImageInfo.usage = VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT;
- framebufferAttachmentImageInfo.width = attachmentWidth;
- framebufferAttachmentImageInfo.height = attachmentHeight;
- framebufferAttachmentImageInfo.layerCount = 1;
- framebufferAttachmentImageInfo.viewFormatCount = 1;
- framebufferAttachmentImageInfo.pViewFormats = &attachmentFormat;
- VkFramebufferAttachmentsCreateInfoKHR framebufferAttachmentsCreateInfo = {};
- framebufferAttachmentsCreateInfo.sType = VK_STRUCTURE_TYPE_FRAMEBUFFER_ATTACHMENTS_CREATE_INFO_KHR;
- framebufferAttachmentsCreateInfo.attachmentImageInfoCount = 1;
- framebufferAttachmentsCreateInfo.pAttachmentImageInfos = &framebufferAttachmentImageInfo;
- VkFramebufferCreateInfo framebufferCreateInfo = {};
- framebufferCreateInfo.sType = VK_STRUCTURE_TYPE_FRAMEBUFFER_CREATE_INFO;
- framebufferCreateInfo.pNext = &framebufferAttachmentsCreateInfo;
- framebufferCreateInfo.flags = VK_FRAMEBUFFER_CREATE_IMAGELESS_BIT_KHR;
- framebufferCreateInfo.width = attachmentWidth;
- framebufferCreateInfo.height = attachmentHeight;
- framebufferCreateInfo.layers = 1;
- framebufferCreateInfo.renderPass = renderPass;
- framebufferCreateInfo.attachmentCount = 1;
- VkFramebuffer framebuffer = VK_NULL_HANDLE;
-
- // Imageless framebuffer creation bit not present
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkFramebufferCreateInfo-flags-03189");
- vkCreateFramebuffer(m_device->device(), &framebufferCreateInfo, nullptr, &framebuffer);
- m_errorMonitor->VerifyFound();
-
- if (framebuffer != VK_NULL_HANDLE) {
- vkDestroyFramebuffer(m_device->device(), framebuffer, nullptr);
- }
- vkDestroyRenderPass(m_device->device(), renderPass, nullptr);
-}
-
-TEST_F(VkLayerTest, ImagelessFramebufferCreationTests) {
- TEST_DESCRIPTION("Create an imageless framebuffer in various invalid ways");
-
- if (InstanceExtensionSupported(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME)) {
- m_instance_extension_names.push_back(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME);
- } else {
- printf("%s Did not find required instance extension %s; skipped.\n", kSkipPrefix,
- VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME);
- return;
- }
- ASSERT_NO_FATAL_FAILURE(InitFramework(myDbgFunc, m_errorMonitor));
- bool rp2Supported = CheckCreateRenderPass2Support(this, m_device_extension_names);
-
- bool multiviewSupported = rp2Supported;
- if (!rp2Supported) {
- if (DeviceExtensionSupported(gpu(), nullptr, VK_KHR_MULTIVIEW_EXTENSION_NAME)) {
- m_device_extension_names.push_back(VK_KHR_MULTIVIEW_EXTENSION_NAME);
- multiviewSupported = true;
- }
- }
-
- if (DeviceExtensionSupported(gpu(), nullptr, VK_KHR_IMAGELESS_FRAMEBUFFER_EXTENSION_NAME)) {
- m_device_extension_names.push_back(VK_KHR_MAINTENANCE2_EXTENSION_NAME);
- m_device_extension_names.push_back(VK_KHR_IMAGE_FORMAT_LIST_EXTENSION_NAME);
- m_device_extension_names.push_back(VK_KHR_IMAGELESS_FRAMEBUFFER_EXTENSION_NAME);
- } else {
- printf("%s Did not find required device extension %s; skipped.\n", kSkipPrefix,
- VK_KHR_IMAGELESS_FRAMEBUFFER_EXTENSION_NAME);
- return;
- }
-
- VkPhysicalDeviceImagelessFramebufferFeaturesKHR physicalDeviceImagelessFramebufferFeatures = {};
- physicalDeviceImagelessFramebufferFeatures.sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGELESS_FRAMEBUFFER_FEATURES_KHR;
- physicalDeviceImagelessFramebufferFeatures.imagelessFramebuffer = VK_TRUE;
- VkPhysicalDeviceFeatures2 physicalDeviceFeatures2 = {};
- physicalDeviceFeatures2.sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FEATURES_2;
- physicalDeviceFeatures2.pNext = &physicalDeviceImagelessFramebufferFeatures;
- ASSERT_NO_FATAL_FAILURE(InitState(nullptr, &physicalDeviceFeatures2, VK_COMMAND_POOL_CREATE_RESET_COMMAND_BUFFER_BIT));
-
- ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
-
- uint32_t attachmentWidth = 512;
- uint32_t attachmentHeight = 512;
- VkFormat attachmentFormat = VK_FORMAT_R8G8B8A8_UNORM;
-
- // Create a renderPass with a single attachment
- VkAttachmentDescription attachmentDescription = {};
- attachmentDescription.format = attachmentFormat;
- attachmentDescription.samples = VK_SAMPLE_COUNT_1_BIT;
- attachmentDescription.finalLayout = VK_IMAGE_LAYOUT_GENERAL;
- VkAttachmentReference attachmentReference = {};
- attachmentReference.layout = VK_IMAGE_LAYOUT_GENERAL;
- VkSubpassDescription subpassDescription = {};
- subpassDescription.colorAttachmentCount = 1;
- subpassDescription.pColorAttachments = &attachmentReference;
- VkRenderPassCreateInfo renderPassCreateInfo = {};
- renderPassCreateInfo.subpassCount = 1;
- renderPassCreateInfo.pSubpasses = &subpassDescription;
- renderPassCreateInfo.attachmentCount = 1;
- renderPassCreateInfo.pAttachments = &attachmentDescription;
- renderPassCreateInfo.sType = VK_STRUCTURE_TYPE_RENDER_PASS_CREATE_INFO;
- VkRenderPass renderPass;
- vkCreateRenderPass(m_device->device(), &renderPassCreateInfo, NULL, &renderPass);
-
- VkFramebufferAttachmentImageInfoKHR framebufferAttachmentImageInfo = {};
- framebufferAttachmentImageInfo.sType = VK_STRUCTURE_TYPE_FRAMEBUFFER_ATTACHMENT_IMAGE_INFO_KHR;
- framebufferAttachmentImageInfo.usage = VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT;
- framebufferAttachmentImageInfo.width = attachmentWidth;
- framebufferAttachmentImageInfo.height = attachmentHeight;
- framebufferAttachmentImageInfo.layerCount = 1;
- framebufferAttachmentImageInfo.viewFormatCount = 1;
- framebufferAttachmentImageInfo.pViewFormats = &attachmentFormat;
- VkFramebufferAttachmentsCreateInfoKHR framebufferAttachmentsCreateInfo = {};
- framebufferAttachmentsCreateInfo.sType = VK_STRUCTURE_TYPE_FRAMEBUFFER_ATTACHMENTS_CREATE_INFO_KHR;
- framebufferAttachmentsCreateInfo.attachmentImageInfoCount = 1;
- framebufferAttachmentsCreateInfo.pAttachmentImageInfos = &framebufferAttachmentImageInfo;
- VkFramebufferCreateInfo framebufferCreateInfo = {};
- framebufferCreateInfo.sType = VK_STRUCTURE_TYPE_FRAMEBUFFER_CREATE_INFO;
- framebufferCreateInfo.pNext = &framebufferAttachmentsCreateInfo;
- framebufferCreateInfo.flags = VK_FRAMEBUFFER_CREATE_IMAGELESS_BIT_KHR;
- framebufferCreateInfo.width = attachmentWidth;
- framebufferCreateInfo.height = attachmentHeight;
- framebufferCreateInfo.layers = 1;
- framebufferCreateInfo.renderPass = renderPass;
- framebufferCreateInfo.attachmentCount = 1;
- VkFramebuffer framebuffer = VK_NULL_HANDLE;
-
- // Attachments info not present
- framebufferCreateInfo.pNext = nullptr;
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkFramebufferCreateInfo-flags-03190");
- vkCreateFramebuffer(m_device->device(), &framebufferCreateInfo, nullptr, &framebuffer);
- m_errorMonitor->VerifyFound();
- if (framebuffer != VK_NULL_HANDLE) {
- vkDestroyFramebuffer(m_device->device(), framebuffer, nullptr);
- }
- framebufferCreateInfo.pNext = &framebufferAttachmentsCreateInfo;
-
- // Mismatched attachment counts
- framebufferAttachmentsCreateInfo.attachmentImageInfoCount = 2;
- VkFramebufferAttachmentImageInfoKHR framebufferAttachmentImageInfos[2] = {framebufferAttachmentImageInfo,
- framebufferAttachmentImageInfo};
- framebufferAttachmentsCreateInfo.pAttachmentImageInfos = framebufferAttachmentImageInfos;
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkFramebufferCreateInfo-flags-03191");
- vkCreateFramebuffer(m_device->device(), &framebufferCreateInfo, nullptr, &framebuffer);
- m_errorMonitor->VerifyFound();
- if (framebuffer != VK_NULL_HANDLE) {
- vkDestroyFramebuffer(m_device->device(), framebuffer, nullptr);
- }
- framebufferAttachmentsCreateInfo.pAttachmentImageInfos = &framebufferAttachmentImageInfo;
- framebufferAttachmentsCreateInfo.attachmentImageInfoCount = 1;
-
- // Mismatched format list
- attachmentFormat = VK_FORMAT_B8G8R8A8_UNORM;
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkFramebufferCreateInfo-flags-03205");
- vkCreateFramebuffer(m_device->device(), &framebufferCreateInfo, nullptr, &framebuffer);
- m_errorMonitor->VerifyFound();
- if (framebuffer != VK_NULL_HANDLE) {
- vkDestroyFramebuffer(m_device->device(), framebuffer, nullptr);
- }
- attachmentFormat = VK_FORMAT_R8G8B8A8_UNORM;
-
- // Mismatched format list
- attachmentFormat = VK_FORMAT_B8G8R8A8_UNORM;
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkFramebufferCreateInfo-flags-03205");
- vkCreateFramebuffer(m_device->device(), &framebufferCreateInfo, nullptr, &framebuffer);
- m_errorMonitor->VerifyFound();
- if (framebuffer != VK_NULL_HANDLE) {
- vkDestroyFramebuffer(m_device->device(), framebuffer, nullptr);
- }
- attachmentFormat = VK_FORMAT_R8G8B8A8_UNORM;
-
- // Mismatched layer count, multiview disabled
- framebufferCreateInfo.layers = 2;
- const char* mismatchedLayersNoMultiviewVuid =
- multiviewSupported ? "VUID-VkFramebufferCreateInfo-renderPass-03199" : "VUID-VkFramebufferCreateInfo-flags-03200";
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, mismatchedLayersNoMultiviewVuid);
- vkCreateFramebuffer(m_device->device(), &framebufferCreateInfo, nullptr, &framebuffer);
- m_errorMonitor->VerifyFound();
- if (framebuffer != VK_NULL_HANDLE) {
- vkDestroyFramebuffer(m_device->device(), framebuffer, nullptr);
- }
- framebufferCreateInfo.layers = 1;
-
- // Mismatched width
- framebufferCreateInfo.width += 1;
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkFramebufferCreateInfo-flags-03192");
- vkCreateFramebuffer(m_device->device(), &framebufferCreateInfo, nullptr, &framebuffer);
- m_errorMonitor->VerifyFound();
- if (framebuffer != VK_NULL_HANDLE) {
- vkDestroyFramebuffer(m_device->device(), framebuffer, nullptr);
- }
- framebufferCreateInfo.width -= 1;
-
- // Mismatched height
- framebufferCreateInfo.height += 1;
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkFramebufferCreateInfo-flags-03193");
- vkCreateFramebuffer(m_device->device(), &framebufferCreateInfo, nullptr, &framebuffer);
- m_errorMonitor->VerifyFound();
- if (framebuffer != VK_NULL_HANDLE) {
- vkDestroyFramebuffer(m_device->device(), framebuffer, nullptr);
- }
- framebufferCreateInfo.height -= 1;
-
- vkDestroyRenderPass(m_device->device(), renderPass, nullptr);
-}
-
-TEST_F(VkLayerTest, ImagelessFramebufferAttachmentImageUsageMismatchTests) {
- TEST_DESCRIPTION("Create an imageless framebuffer with mismatched attachment image usage");
-
- if (InstanceExtensionSupported(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME)) {
- m_instance_extension_names.push_back(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME);
- } else {
- printf("%s Did not find required instance extension %s; skipped.\n", kSkipPrefix,
- VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME);
- return;
- }
- ASSERT_NO_FATAL_FAILURE(InitFramework(myDbgFunc, m_errorMonitor));
-
- if (DeviceExtensionSupported(gpu(), nullptr, VK_KHR_IMAGELESS_FRAMEBUFFER_EXTENSION_NAME)) {
- m_device_extension_names.push_back(VK_KHR_MAINTENANCE2_EXTENSION_NAME);
- m_device_extension_names.push_back(VK_KHR_IMAGE_FORMAT_LIST_EXTENSION_NAME);
- m_device_extension_names.push_back(VK_KHR_IMAGELESS_FRAMEBUFFER_EXTENSION_NAME);
- } else {
- printf("%s Did not find required device extension %s; skipped.\n", kSkipPrefix,
- VK_KHR_IMAGELESS_FRAMEBUFFER_EXTENSION_NAME);
- return;
- }
-
- VkPhysicalDeviceImagelessFramebufferFeaturesKHR physicalDeviceImagelessFramebufferFeatures = {};
- physicalDeviceImagelessFramebufferFeatures.sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGELESS_FRAMEBUFFER_FEATURES_KHR;
- physicalDeviceImagelessFramebufferFeatures.imagelessFramebuffer = VK_TRUE;
- VkPhysicalDeviceFeatures2 physicalDeviceFeatures2 = {};
- physicalDeviceFeatures2.sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FEATURES_2;
- physicalDeviceFeatures2.pNext = &physicalDeviceImagelessFramebufferFeatures;
- ASSERT_NO_FATAL_FAILURE(InitState(nullptr, &physicalDeviceFeatures2, VK_COMMAND_POOL_CREATE_RESET_COMMAND_BUFFER_BIT));
-
- ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
-
- uint32_t attachmentWidth = 512;
- uint32_t attachmentHeight = 512;
- VkFormat colorAndInputAttachmentFormat = VK_FORMAT_R8G8B8A8_UNORM;
- VkFormat depthStencilAttachmentFormat = VK_FORMAT_D32_SFLOAT_S8_UINT;
-
- VkAttachmentDescription attachmentDescriptions[4] = {};
- // Color attachment
- attachmentDescriptions[0].format = colorAndInputAttachmentFormat;
- attachmentDescriptions[0].samples = VK_SAMPLE_COUNT_4_BIT;
- attachmentDescriptions[0].finalLayout = VK_IMAGE_LAYOUT_GENERAL;
- // Color resolve attachment
- attachmentDescriptions[1].format = colorAndInputAttachmentFormat;
- attachmentDescriptions[1].samples = VK_SAMPLE_COUNT_1_BIT;
- attachmentDescriptions[1].finalLayout = VK_IMAGE_LAYOUT_GENERAL;
- // Depth stencil attachment
- attachmentDescriptions[2].format = depthStencilAttachmentFormat;
- attachmentDescriptions[2].samples = VK_SAMPLE_COUNT_4_BIT;
- attachmentDescriptions[2].finalLayout = VK_IMAGE_LAYOUT_GENERAL;
- // Input attachment
- attachmentDescriptions[3].format = colorAndInputAttachmentFormat;
- attachmentDescriptions[3].samples = VK_SAMPLE_COUNT_1_BIT;
- attachmentDescriptions[3].finalLayout = VK_IMAGE_LAYOUT_GENERAL;
-
- VkAttachmentReference colorAttachmentReference = {};
- colorAttachmentReference.layout = VK_IMAGE_LAYOUT_GENERAL;
- colorAttachmentReference.attachment = 0;
- VkAttachmentReference colorResolveAttachmentReference = {};
- colorResolveAttachmentReference.layout = VK_IMAGE_LAYOUT_GENERAL;
- colorResolveAttachmentReference.attachment = 1;
- VkAttachmentReference depthStencilAttachmentReference = {};
- depthStencilAttachmentReference.layout = VK_IMAGE_LAYOUT_GENERAL;
- depthStencilAttachmentReference.attachment = 2;
- VkAttachmentReference inputAttachmentReference = {};
- inputAttachmentReference.layout = VK_IMAGE_LAYOUT_GENERAL;
- inputAttachmentReference.attachment = 3;
- VkSubpassDescription subpassDescription = {};
- subpassDescription.colorAttachmentCount = 1;
- subpassDescription.pColorAttachments = &colorAttachmentReference;
- subpassDescription.pResolveAttachments = &colorResolveAttachmentReference;
- subpassDescription.pDepthStencilAttachment = &depthStencilAttachmentReference;
- subpassDescription.inputAttachmentCount = 1;
- subpassDescription.pInputAttachments = &inputAttachmentReference;
-
- VkRenderPassCreateInfo renderPassCreateInfo = {};
- renderPassCreateInfo.attachmentCount = 4;
- renderPassCreateInfo.subpassCount = 1;
- renderPassCreateInfo.pSubpasses = &subpassDescription;
- renderPassCreateInfo.pAttachments = attachmentDescriptions;
- renderPassCreateInfo.sType = VK_STRUCTURE_TYPE_RENDER_PASS_CREATE_INFO;
- VkRenderPass renderPass;
- vkCreateRenderPass(m_device->device(), &renderPassCreateInfo, nullptr, &renderPass);
-
- VkFramebufferAttachmentImageInfoKHR framebufferAttachmentImageInfos[4] = {};
- // Color attachment
- framebufferAttachmentImageInfos[0].sType = VK_STRUCTURE_TYPE_FRAMEBUFFER_ATTACHMENT_IMAGE_INFO_KHR;
- framebufferAttachmentImageInfos[0].width = attachmentWidth;
- framebufferAttachmentImageInfos[0].height = attachmentHeight;
- framebufferAttachmentImageInfos[0].usage = VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT;
- framebufferAttachmentImageInfos[0].layerCount = 1;
- framebufferAttachmentImageInfos[0].viewFormatCount = 1;
- framebufferAttachmentImageInfos[0].pViewFormats = &colorAndInputAttachmentFormat;
- // Color resolve attachment
- framebufferAttachmentImageInfos[1].sType = VK_STRUCTURE_TYPE_FRAMEBUFFER_ATTACHMENT_IMAGE_INFO_KHR;
- framebufferAttachmentImageInfos[1].width = attachmentWidth;
- framebufferAttachmentImageInfos[1].height = attachmentHeight;
- framebufferAttachmentImageInfos[1].usage = VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT;
- framebufferAttachmentImageInfos[1].layerCount = 1;
- framebufferAttachmentImageInfos[1].viewFormatCount = 1;
- framebufferAttachmentImageInfos[1].pViewFormats = &colorAndInputAttachmentFormat;
- // Depth stencil attachment
- framebufferAttachmentImageInfos[2].sType = VK_STRUCTURE_TYPE_FRAMEBUFFER_ATTACHMENT_IMAGE_INFO_KHR;
- framebufferAttachmentImageInfos[2].width = attachmentWidth;
- framebufferAttachmentImageInfos[2].height = attachmentHeight;
- framebufferAttachmentImageInfos[2].usage = VK_IMAGE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT;
- framebufferAttachmentImageInfos[2].layerCount = 1;
- framebufferAttachmentImageInfos[2].viewFormatCount = 1;
- framebufferAttachmentImageInfos[2].pViewFormats = &depthStencilAttachmentFormat;
- // Input attachment
- framebufferAttachmentImageInfos[3].sType = VK_STRUCTURE_TYPE_FRAMEBUFFER_ATTACHMENT_IMAGE_INFO_KHR;
- framebufferAttachmentImageInfos[3].width = attachmentWidth;
- framebufferAttachmentImageInfos[3].height = attachmentHeight;
- framebufferAttachmentImageInfos[3].usage = VK_IMAGE_USAGE_INPUT_ATTACHMENT_BIT;
- framebufferAttachmentImageInfos[3].layerCount = 1;
- framebufferAttachmentImageInfos[3].viewFormatCount = 1;
- framebufferAttachmentImageInfos[3].pViewFormats = &colorAndInputAttachmentFormat;
- VkFramebufferAttachmentsCreateInfoKHR framebufferAttachmentsCreateInfo = {};
- framebufferAttachmentsCreateInfo.sType = VK_STRUCTURE_TYPE_FRAMEBUFFER_ATTACHMENTS_CREATE_INFO_KHR;
- framebufferAttachmentsCreateInfo.attachmentImageInfoCount = 4;
- framebufferAttachmentsCreateInfo.pAttachmentImageInfos = framebufferAttachmentImageInfos;
- VkFramebufferCreateInfo framebufferCreateInfo = {};
- framebufferCreateInfo.sType = VK_STRUCTURE_TYPE_FRAMEBUFFER_CREATE_INFO;
- framebufferCreateInfo.pNext = &framebufferAttachmentsCreateInfo;
- framebufferCreateInfo.flags = VK_FRAMEBUFFER_CREATE_IMAGELESS_BIT_KHR;
- framebufferCreateInfo.width = attachmentWidth;
- framebufferCreateInfo.height = attachmentHeight;
- framebufferCreateInfo.layers = 1;
- framebufferCreateInfo.renderPass = renderPass;
- framebufferCreateInfo.attachmentCount = 4;
- VkFramebuffer framebuffer = VK_NULL_HANDLE;
-
- // Color attachment, mismatched usage
- framebufferAttachmentImageInfos[0].usage = VK_IMAGE_USAGE_SAMPLED_BIT;
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkFramebufferCreateInfo-flags-03201");
- vkCreateFramebuffer(m_device->device(), &framebufferCreateInfo, nullptr, &framebuffer);
- m_errorMonitor->VerifyFound();
- if (framebuffer != VK_NULL_HANDLE) {
- vkDestroyFramebuffer(m_device->device(), framebuffer, nullptr);
- }
- framebufferAttachmentImageInfos[0].usage = VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT;
-
- // Color resolve attachment, mismatched usage
- framebufferAttachmentImageInfos[1].usage = VK_IMAGE_USAGE_SAMPLED_BIT;
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkFramebufferCreateInfo-flags-03201");
- vkCreateFramebuffer(m_device->device(), &framebufferCreateInfo, nullptr, &framebuffer);
- m_errorMonitor->VerifyFound();
- if (framebuffer != VK_NULL_HANDLE) {
- vkDestroyFramebuffer(m_device->device(), framebuffer, nullptr);
- }
- framebufferAttachmentImageInfos[1].usage = VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT;
-
- // Depth stencil attachment, mismatched usage
- framebufferAttachmentImageInfos[2].usage = VK_IMAGE_USAGE_SAMPLED_BIT;
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkFramebufferCreateInfo-flags-03202");
- vkCreateFramebuffer(m_device->device(), &framebufferCreateInfo, nullptr, &framebuffer);
- m_errorMonitor->VerifyFound();
- if (framebuffer != VK_NULL_HANDLE) {
- vkDestroyFramebuffer(m_device->device(), framebuffer, nullptr);
- }
- framebufferAttachmentImageInfos[2].usage = VK_IMAGE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT;
-
- // Color attachment, mismatched usage
- framebufferAttachmentImageInfos[3].usage = VK_IMAGE_USAGE_SAMPLED_BIT;
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkFramebufferCreateInfo-flags-03204");
- vkCreateFramebuffer(m_device->device(), &framebufferCreateInfo, nullptr, &framebuffer);
- m_errorMonitor->VerifyFound();
- if (framebuffer != VK_NULL_HANDLE) {
- vkDestroyFramebuffer(m_device->device(), framebuffer, nullptr);
- }
- framebufferAttachmentImageInfos[3].usage = VK_IMAGE_USAGE_INPUT_ATTACHMENT_BIT;
-
- vkDestroyRenderPass(m_device->device(), renderPass, nullptr);
-}
-
-TEST_F(VkLayerTest, ImagelessFramebufferAttachmentMultiviewImageLayerCountMismatchTests) {
- TEST_DESCRIPTION("Create an imageless framebuffer against a multiview-enabled render pass with mismatched layer counts");
-
- if (InstanceExtensionSupported(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME)) {
- m_instance_extension_names.push_back(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME);
- } else {
- printf("%s Did not find required instance extension %s; skipped.\n", kSkipPrefix,
- VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME);
- return;
- }
- ASSERT_NO_FATAL_FAILURE(InitFramework(myDbgFunc, m_errorMonitor));
-
- if (DeviceExtensionSupported(gpu(), nullptr, VK_KHR_MULTIVIEW_EXTENSION_NAME)) {
- m_device_extension_names.push_back(VK_KHR_MULTIVIEW_EXTENSION_NAME);
- } else {
- printf("%s Did not find required device extension %s; skipped.\n", kSkipPrefix, VK_KHR_MULTIVIEW_EXTENSION_NAME);
- return;
- }
-
- if (DeviceExtensionSupported(gpu(), nullptr, VK_KHR_IMAGELESS_FRAMEBUFFER_EXTENSION_NAME)) {
- m_device_extension_names.push_back(VK_KHR_MAINTENANCE2_EXTENSION_NAME);
- m_device_extension_names.push_back(VK_KHR_IMAGE_FORMAT_LIST_EXTENSION_NAME);
- m_device_extension_names.push_back(VK_KHR_IMAGELESS_FRAMEBUFFER_EXTENSION_NAME);
- } else {
- printf("%s Did not find required device extension %s; skipped.\n", kSkipPrefix,
- VK_KHR_IMAGELESS_FRAMEBUFFER_EXTENSION_NAME);
- return;
- }
-
- VkPhysicalDeviceImagelessFramebufferFeaturesKHR physicalDeviceImagelessFramebufferFeatures = {};
- physicalDeviceImagelessFramebufferFeatures.sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGELESS_FRAMEBUFFER_FEATURES_KHR;
- physicalDeviceImagelessFramebufferFeatures.imagelessFramebuffer = VK_TRUE;
- VkPhysicalDeviceFeatures2 physicalDeviceFeatures2 = {};
- physicalDeviceFeatures2.sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FEATURES_2;
- physicalDeviceFeatures2.pNext = &physicalDeviceImagelessFramebufferFeatures;
- ASSERT_NO_FATAL_FAILURE(InitState(nullptr, &physicalDeviceFeatures2, VK_COMMAND_POOL_CREATE_RESET_COMMAND_BUFFER_BIT));
-
- ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
-
- uint32_t attachmentWidth = 512;
- uint32_t attachmentHeight = 512;
- VkFormat colorAndInputAttachmentFormat = VK_FORMAT_R8G8B8A8_UNORM;
- VkFormat depthStencilAttachmentFormat = VK_FORMAT_D32_SFLOAT_S8_UINT;
-
- VkAttachmentDescription attachmentDescriptions[4] = {};
- // Color attachment
- attachmentDescriptions[0].format = colorAndInputAttachmentFormat;
- attachmentDescriptions[0].samples = VK_SAMPLE_COUNT_4_BIT;
- attachmentDescriptions[0].finalLayout = VK_IMAGE_LAYOUT_GENERAL;
- // Color resolve attachment
- attachmentDescriptions[1].format = colorAndInputAttachmentFormat;
- attachmentDescriptions[1].samples = VK_SAMPLE_COUNT_1_BIT;
- attachmentDescriptions[1].finalLayout = VK_IMAGE_LAYOUT_GENERAL;
- // Depth stencil attachment
- attachmentDescriptions[2].format = depthStencilAttachmentFormat;
- attachmentDescriptions[2].samples = VK_SAMPLE_COUNT_4_BIT;
- attachmentDescriptions[2].finalLayout = VK_IMAGE_LAYOUT_GENERAL;
- // Input attachment
- attachmentDescriptions[3].format = colorAndInputAttachmentFormat;
- attachmentDescriptions[3].samples = VK_SAMPLE_COUNT_1_BIT;
- attachmentDescriptions[3].finalLayout = VK_IMAGE_LAYOUT_GENERAL;
-
- VkAttachmentReference colorAttachmentReference = {};
- colorAttachmentReference.layout = VK_IMAGE_LAYOUT_GENERAL;
- colorAttachmentReference.attachment = 0;
- VkAttachmentReference colorResolveAttachmentReference = {};
- colorResolveAttachmentReference.layout = VK_IMAGE_LAYOUT_GENERAL;
- colorResolveAttachmentReference.attachment = 1;
- VkAttachmentReference depthStencilAttachmentReference = {};
- depthStencilAttachmentReference.layout = VK_IMAGE_LAYOUT_GENERAL;
- depthStencilAttachmentReference.attachment = 2;
- VkAttachmentReference inputAttachmentReference = {};
- inputAttachmentReference.layout = VK_IMAGE_LAYOUT_GENERAL;
- inputAttachmentReference.attachment = 3;
- VkSubpassDescription subpassDescription = {};
- subpassDescription.colorAttachmentCount = 1;
- subpassDescription.pColorAttachments = &colorAttachmentReference;
- subpassDescription.pResolveAttachments = &colorResolveAttachmentReference;
- subpassDescription.pDepthStencilAttachment = &depthStencilAttachmentReference;
- subpassDescription.inputAttachmentCount = 1;
- subpassDescription.pInputAttachments = &inputAttachmentReference;
-
- uint32_t viewMask = 0x3u;
- VkRenderPassMultiviewCreateInfo renderPassMultiviewCreateInfo = {};
- renderPassMultiviewCreateInfo.sType = VK_STRUCTURE_TYPE_RENDER_PASS_MULTIVIEW_CREATE_INFO;
- renderPassMultiviewCreateInfo.subpassCount = 1;
- renderPassMultiviewCreateInfo.pViewMasks = &viewMask;
- VkRenderPassCreateInfo renderPassCreateInfo = {};
- renderPassCreateInfo.pNext = &renderPassMultiviewCreateInfo;
- renderPassCreateInfo.attachmentCount = 4;
- renderPassCreateInfo.subpassCount = 1;
- renderPassCreateInfo.pSubpasses = &subpassDescription;
- renderPassCreateInfo.pAttachments = attachmentDescriptions;
- renderPassCreateInfo.sType = VK_STRUCTURE_TYPE_RENDER_PASS_CREATE_INFO;
- VkRenderPass renderPass;
- vkCreateRenderPass(m_device->device(), &renderPassCreateInfo, nullptr, &renderPass);
-
- VkFramebufferAttachmentImageInfoKHR framebufferAttachmentImageInfos[4] = {};
- // Color attachment
- framebufferAttachmentImageInfos[0].sType = VK_STRUCTURE_TYPE_FRAMEBUFFER_ATTACHMENT_IMAGE_INFO_KHR;
- framebufferAttachmentImageInfos[0].width = attachmentWidth;
- framebufferAttachmentImageInfos[0].height = attachmentHeight;
- framebufferAttachmentImageInfos[0].usage = VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT;
- framebufferAttachmentImageInfos[0].layerCount = 2;
- framebufferAttachmentImageInfos[0].viewFormatCount = 1;
- framebufferAttachmentImageInfos[0].pViewFormats = &colorAndInputAttachmentFormat;
- // Color resolve attachment
- framebufferAttachmentImageInfos[1].sType = VK_STRUCTURE_TYPE_FRAMEBUFFER_ATTACHMENT_IMAGE_INFO_KHR;
- framebufferAttachmentImageInfos[1].width = attachmentWidth;
- framebufferAttachmentImageInfos[1].height = attachmentHeight;
- framebufferAttachmentImageInfos[1].usage = VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT;
- framebufferAttachmentImageInfos[1].layerCount = 2;
- framebufferAttachmentImageInfos[1].viewFormatCount = 1;
- framebufferAttachmentImageInfos[1].pViewFormats = &colorAndInputAttachmentFormat;
- // Depth stencil attachment
- framebufferAttachmentImageInfos[2].sType = VK_STRUCTURE_TYPE_FRAMEBUFFER_ATTACHMENT_IMAGE_INFO_KHR;
- framebufferAttachmentImageInfos[2].width = attachmentWidth;
- framebufferAttachmentImageInfos[2].height = attachmentHeight;
- framebufferAttachmentImageInfos[2].usage = VK_IMAGE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT;
- framebufferAttachmentImageInfos[2].layerCount = 2;
- framebufferAttachmentImageInfos[2].viewFormatCount = 1;
- framebufferAttachmentImageInfos[2].pViewFormats = &depthStencilAttachmentFormat;
- // Input attachment
- framebufferAttachmentImageInfos[3].sType = VK_STRUCTURE_TYPE_FRAMEBUFFER_ATTACHMENT_IMAGE_INFO_KHR;
- framebufferAttachmentImageInfos[3].width = attachmentWidth;
- framebufferAttachmentImageInfos[3].height = attachmentHeight;
- framebufferAttachmentImageInfos[3].usage = VK_IMAGE_USAGE_INPUT_ATTACHMENT_BIT;
- framebufferAttachmentImageInfos[3].layerCount = 2;
- framebufferAttachmentImageInfos[3].viewFormatCount = 1;
- framebufferAttachmentImageInfos[3].pViewFormats = &colorAndInputAttachmentFormat;
- VkFramebufferAttachmentsCreateInfoKHR framebufferAttachmentsCreateInfo = {};
- framebufferAttachmentsCreateInfo.sType = VK_STRUCTURE_TYPE_FRAMEBUFFER_ATTACHMENTS_CREATE_INFO_KHR;
- framebufferAttachmentsCreateInfo.attachmentImageInfoCount = 4;
- framebufferAttachmentsCreateInfo.pAttachmentImageInfos = framebufferAttachmentImageInfos;
- VkFramebufferCreateInfo framebufferCreateInfo = {};
- framebufferCreateInfo.sType = VK_STRUCTURE_TYPE_FRAMEBUFFER_CREATE_INFO;
- framebufferCreateInfo.pNext = &framebufferAttachmentsCreateInfo;
- framebufferCreateInfo.flags = VK_FRAMEBUFFER_CREATE_IMAGELESS_BIT_KHR;
- framebufferCreateInfo.width = attachmentWidth;
- framebufferCreateInfo.height = attachmentHeight;
- framebufferCreateInfo.layers = 1;
- framebufferCreateInfo.renderPass = renderPass;
- framebufferCreateInfo.attachmentCount = 4;
- VkFramebuffer framebuffer = VK_NULL_HANDLE;
-
- // Color attachment, mismatched layer count
- framebufferAttachmentImageInfos[0].layerCount = 1;
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkFramebufferCreateInfo-renderPass-03198");
- vkCreateFramebuffer(m_device->device(), &framebufferCreateInfo, nullptr, &framebuffer);
- m_errorMonitor->VerifyFound();
- if (framebuffer != VK_NULL_HANDLE) {
- vkDestroyFramebuffer(m_device->device(), framebuffer, nullptr);
- }
- framebufferAttachmentImageInfos[0].layerCount = 2;
-
- // Color resolve attachment, mismatched layer count
- framebufferAttachmentImageInfos[1].layerCount = 1;
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkFramebufferCreateInfo-renderPass-03198");
- vkCreateFramebuffer(m_device->device(), &framebufferCreateInfo, nullptr, &framebuffer);
- m_errorMonitor->VerifyFound();
- if (framebuffer != VK_NULL_HANDLE) {
- vkDestroyFramebuffer(m_device->device(), framebuffer, nullptr);
- }
- framebufferAttachmentImageInfos[1].layerCount = 2;
-
- // Depth stencil attachment, mismatched layer count
- framebufferAttachmentImageInfos[2].layerCount = 1;
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkFramebufferCreateInfo-renderPass-03198");
- vkCreateFramebuffer(m_device->device(), &framebufferCreateInfo, nullptr, &framebuffer);
- m_errorMonitor->VerifyFound();
- if (framebuffer != VK_NULL_HANDLE) {
- vkDestroyFramebuffer(m_device->device(), framebuffer, nullptr);
- }
- framebufferAttachmentImageInfos[2].layerCount = 2;
-
- // Input attachment, mismatched layer count
- framebufferAttachmentImageInfos[3].layerCount = 1;
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkFramebufferCreateInfo-renderPass-03198");
- vkCreateFramebuffer(m_device->device(), &framebufferCreateInfo, nullptr, &framebuffer);
- m_errorMonitor->VerifyFound();
- if (framebuffer != VK_NULL_HANDLE) {
- vkDestroyFramebuffer(m_device->device(), framebuffer, nullptr);
- }
- framebufferAttachmentImageInfos[3].layerCount = 2;
-
- vkDestroyRenderPass(m_device->device(), renderPass, nullptr);
-}
-
-TEST_F(VkLayerTest, ImagelessFramebufferDepthStencilResolveAttachmentTests) {
- TEST_DESCRIPTION(
- "Create an imageless framebuffer against a render pass using depth stencil resolve, with mismatched information");
-
- if (InstanceExtensionSupported(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME)) {
- m_instance_extension_names.push_back(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME);
- } else {
- printf("%s Did not find required instance extension %s; skipped.\n", kSkipPrefix,
- VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME);
- return;
- }
- ASSERT_NO_FATAL_FAILURE(InitFramework(myDbgFunc, m_errorMonitor));
-
- bool rp2Supported = CheckCreateRenderPass2Support(this, m_device_extension_names);
- if (!rp2Supported) {
- printf("%s Did not find required device extension %s; skipped.\n", kSkipPrefix, VK_KHR_CREATE_RENDERPASS_2_EXTENSION_NAME);
- return;
- }
-
- if (DeviceExtensionSupported(gpu(), nullptr, VK_KHR_DEPTH_STENCIL_RESOLVE_EXTENSION_NAME)) {
- m_device_extension_names.push_back(VK_KHR_DEPTH_STENCIL_RESOLVE_EXTENSION_NAME);
- } else {
- printf("%s Did not find required device extension %s; skipped.\n", kSkipPrefix,
- VK_KHR_DEPTH_STENCIL_RESOLVE_EXTENSION_NAME);
- return;
- }
-
- if (DeviceExtensionSupported(gpu(), nullptr, VK_KHR_IMAGELESS_FRAMEBUFFER_EXTENSION_NAME)) {
- m_device_extension_names.push_back(VK_KHR_MAINTENANCE2_EXTENSION_NAME);
- m_device_extension_names.push_back(VK_KHR_IMAGE_FORMAT_LIST_EXTENSION_NAME);
- m_device_extension_names.push_back(VK_KHR_IMAGELESS_FRAMEBUFFER_EXTENSION_NAME);
- } else {
- printf("%s Did not find required device extension %s; skipped.\n", kSkipPrefix,
- VK_KHR_IMAGELESS_FRAMEBUFFER_EXTENSION_NAME);
- return;
- }
-
- VkPhysicalDeviceImagelessFramebufferFeaturesKHR physicalDeviceImagelessFramebufferFeatures = {};
- physicalDeviceImagelessFramebufferFeatures.sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGELESS_FRAMEBUFFER_FEATURES_KHR;
- physicalDeviceImagelessFramebufferFeatures.imagelessFramebuffer = VK_TRUE;
- VkPhysicalDeviceFeatures2 physicalDeviceFeatures2 = {};
- physicalDeviceFeatures2.sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FEATURES_2;
- physicalDeviceFeatures2.pNext = &physicalDeviceImagelessFramebufferFeatures;
- ASSERT_NO_FATAL_FAILURE(InitState(nullptr, &physicalDeviceFeatures2, VK_COMMAND_POOL_CREATE_RESET_COMMAND_BUFFER_BIT));
-
- ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
-
- uint32_t attachmentWidth = 512;
- uint32_t attachmentHeight = 512;
- VkFormat attachmentFormat = VK_FORMAT_D32_SFLOAT_S8_UINT;
-
- VkAttachmentDescription2KHR attachmentDescriptions[2] = {};
- // Depth/stencil attachment
- attachmentDescriptions[0].sType = VK_STRUCTURE_TYPE_ATTACHMENT_DESCRIPTION_2_KHR;
- attachmentDescriptions[0].format = attachmentFormat;
- attachmentDescriptions[0].samples = VK_SAMPLE_COUNT_4_BIT;
- attachmentDescriptions[0].finalLayout = VK_IMAGE_LAYOUT_GENERAL;
- // Depth/stencil resolve attachment
- attachmentDescriptions[1].sType = VK_STRUCTURE_TYPE_ATTACHMENT_DESCRIPTION_2_KHR;
- attachmentDescriptions[1].format = attachmentFormat;
- attachmentDescriptions[1].samples = VK_SAMPLE_COUNT_1_BIT;
- attachmentDescriptions[1].finalLayout = VK_IMAGE_LAYOUT_GENERAL;
-
- VkAttachmentReference2KHR depthStencilAttachmentReference = {};
- depthStencilAttachmentReference.sType = VK_STRUCTURE_TYPE_ATTACHMENT_REFERENCE_2_KHR;
- depthStencilAttachmentReference.layout = VK_IMAGE_LAYOUT_GENERAL;
- depthStencilAttachmentReference.attachment = 0;
- VkAttachmentReference2KHR depthStencilResolveAttachmentReference = {};
- depthStencilResolveAttachmentReference.sType = VK_STRUCTURE_TYPE_ATTACHMENT_REFERENCE_2_KHR;
- depthStencilResolveAttachmentReference.layout = VK_IMAGE_LAYOUT_GENERAL;
- depthStencilResolveAttachmentReference.attachment = 1;
- VkSubpassDescriptionDepthStencilResolveKHR subpassDescriptionDepthStencilResolve = {};
- subpassDescriptionDepthStencilResolve.sType = VK_STRUCTURE_TYPE_SUBPASS_DESCRIPTION_DEPTH_STENCIL_RESOLVE_KHR;
- subpassDescriptionDepthStencilResolve.pDepthStencilResolveAttachment = &depthStencilResolveAttachmentReference;
- subpassDescriptionDepthStencilResolve.depthResolveMode = VK_RESOLVE_MODE_SAMPLE_ZERO_BIT_KHR;
- subpassDescriptionDepthStencilResolve.stencilResolveMode = VK_RESOLVE_MODE_SAMPLE_ZERO_BIT_KHR;
- VkSubpassDescription2KHR subpassDescription = {};
- subpassDescription.sType = VK_STRUCTURE_TYPE_SUBPASS_DESCRIPTION_2_KHR;
- subpassDescription.pNext = &subpassDescriptionDepthStencilResolve;
- subpassDescription.pDepthStencilAttachment = &depthStencilAttachmentReference;
- subpassDescription.viewMask = 0x3u;
-
- VkRenderPassCreateInfo2KHR renderPassCreateInfo = {};
- renderPassCreateInfo.sType = VK_STRUCTURE_TYPE_RENDER_PASS_CREATE_INFO_2_KHR;
- renderPassCreateInfo.pNext = nullptr;
- renderPassCreateInfo.attachmentCount = 2;
- renderPassCreateInfo.subpassCount = 1;
- renderPassCreateInfo.pSubpasses = &subpassDescription;
- renderPassCreateInfo.pAttachments = attachmentDescriptions;
- VkRenderPass renderPass;
- PFN_vkCreateRenderPass2KHR vkCreateRenderPass2KHR =
- (PFN_vkCreateRenderPass2KHR)vkGetDeviceProcAddr(m_device->device(), "vkCreateRenderPass2KHR");
- vkCreateRenderPass2KHR(m_device->device(), &renderPassCreateInfo, nullptr, &renderPass);
-
- VkFramebufferAttachmentImageInfoKHR framebufferAttachmentImageInfos[2] = {};
- // Depth/stencil attachment
- framebufferAttachmentImageInfos[0].sType = VK_STRUCTURE_TYPE_FRAMEBUFFER_ATTACHMENT_IMAGE_INFO_KHR;
- framebufferAttachmentImageInfos[0].width = attachmentWidth;
- framebufferAttachmentImageInfos[0].height = attachmentHeight;
- framebufferAttachmentImageInfos[0].usage = VK_IMAGE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT;
- framebufferAttachmentImageInfos[0].layerCount = 2;
- framebufferAttachmentImageInfos[0].viewFormatCount = 1;
- framebufferAttachmentImageInfos[0].pViewFormats = &attachmentFormat;
- // Depth/stencil resolve attachment
- framebufferAttachmentImageInfos[1].sType = VK_STRUCTURE_TYPE_FRAMEBUFFER_ATTACHMENT_IMAGE_INFO_KHR;
- framebufferAttachmentImageInfos[1].width = attachmentWidth;
- framebufferAttachmentImageInfos[1].height = attachmentHeight;
- framebufferAttachmentImageInfos[1].usage = VK_IMAGE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT;
- framebufferAttachmentImageInfos[1].layerCount = 2;
- framebufferAttachmentImageInfos[1].viewFormatCount = 1;
- framebufferAttachmentImageInfos[1].pViewFormats = &attachmentFormat;
- VkFramebufferAttachmentsCreateInfoKHR framebufferAttachmentsCreateInfo = {};
- framebufferAttachmentsCreateInfo.sType = VK_STRUCTURE_TYPE_FRAMEBUFFER_ATTACHMENTS_CREATE_INFO_KHR;
- framebufferAttachmentsCreateInfo.attachmentImageInfoCount = 2;
- framebufferAttachmentsCreateInfo.pAttachmentImageInfos = framebufferAttachmentImageInfos;
- VkFramebufferCreateInfo framebufferCreateInfo = {};
- framebufferCreateInfo.sType = VK_STRUCTURE_TYPE_FRAMEBUFFER_CREATE_INFO;
- framebufferCreateInfo.pNext = &framebufferAttachmentsCreateInfo;
- framebufferCreateInfo.flags = VK_FRAMEBUFFER_CREATE_IMAGELESS_BIT_KHR;
- framebufferCreateInfo.width = attachmentWidth;
- framebufferCreateInfo.height = attachmentHeight;
- framebufferCreateInfo.layers = 1;
- framebufferCreateInfo.renderPass = renderPass;
- framebufferCreateInfo.attachmentCount = 2;
- VkFramebuffer framebuffer = VK_NULL_HANDLE;
-
- // Color attachment, mismatched layer count
- framebufferAttachmentImageInfos[0].layerCount = 1;
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkFramebufferCreateInfo-renderPass-03198");
- vkCreateFramebuffer(m_device->device(), &framebufferCreateInfo, nullptr, &framebuffer);
- m_errorMonitor->VerifyFound();
- if (framebuffer != VK_NULL_HANDLE) {
- vkDestroyFramebuffer(m_device->device(), framebuffer, nullptr);
- }
- framebufferAttachmentImageInfos[0].layerCount = 2;
-
- // Depth resolve attachment, mismatched image usage
- framebufferAttachmentImageInfos[1].usage = VK_IMAGE_USAGE_SAMPLED_BIT;
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkFramebufferCreateInfo-flags-03203");
- vkCreateFramebuffer(m_device->device(), &framebufferCreateInfo, nullptr, &framebuffer);
- m_errorMonitor->VerifyFound();
- if (framebuffer != VK_NULL_HANDLE) {
- vkDestroyFramebuffer(m_device->device(), framebuffer, nullptr);
- }
- framebufferAttachmentImageInfos[1].usage = VK_IMAGE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT;
-
- // Depth resolve attachment, mismatched layer count
- framebufferAttachmentImageInfos[1].layerCount = 1;
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkFramebufferCreateInfo-renderPass-03198");
- vkCreateFramebuffer(m_device->device(), &framebufferCreateInfo, nullptr, &framebuffer);
- m_errorMonitor->VerifyFound();
- if (framebuffer != VK_NULL_HANDLE) {
- vkDestroyFramebuffer(m_device->device(), framebuffer, nullptr);
- }
- framebufferAttachmentImageInfos[1].layerCount = 2;
-
- vkDestroyRenderPass(m_device->device(), renderPass, nullptr);
-}
diff --git a/tests/vklayertests_others.cpp b/tests/vklayertests_others.cpp
deleted file mode 100644
index f05426976..000000000
--- a/tests/vklayertests_others.cpp
+++ /dev/null
@@ -1,4938 +0,0 @@
-/*
- * Copyright (c) 2015-2019 The Khronos Group Inc.
- * Copyright (c) 2015-2019 Valve Corporation
- * Copyright (c) 2015-2019 LunarG, Inc.
- * Copyright (c) 2015-2019 Google, Inc.
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Author: Chia-I Wu <olvaffe@gmail.com>
- * Author: Chris Forbes <chrisf@ijw.co.nz>
- * Author: Courtney Goeltzenleuchter <courtney@LunarG.com>
- * Author: Mark Lobodzinski <mark@lunarg.com>
- * Author: Mike Stroyan <mike@LunarG.com>
- * Author: Tobin Ehlis <tobine@google.com>
- * Author: Tony Barbour <tony@LunarG.com>
- * Author: Cody Northrop <cnorthrop@google.com>
- * Author: Dave Houlton <daveh@lunarg.com>
- * Author: Jeremy Kniager <jeremyk@lunarg.com>
- * Author: Shannon McPherson <shannon@lunarg.com>
- * Author: John Zulauf <jzulauf@lunarg.com>
- */
-
-#include "cast_utils.h"
-#include "layer_validation_tests.h"
-
-TEST_F(VkLayerTest, RequiredParameter) {
- TEST_DESCRIPTION("Specify VK_NULL_HANDLE, NULL, and 0 for required handle, pointer, array, and array count parameters");
-
- ASSERT_NO_FATAL_FAILURE(Init());
-
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "required parameter pFeatures specified as NULL");
- // Specify NULL for a pointer to a handle
- // Expected to trigger an error with
- // parameter_validation::validate_required_pointer
- vkGetPhysicalDeviceFeatures(gpu(), NULL);
- m_errorMonitor->VerifyFound();
-
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
- "required parameter pQueueFamilyPropertyCount specified as NULL");
- // Specify NULL for pointer to array count
- // Expected to trigger an error with parameter_validation::validate_array
- vkGetPhysicalDeviceQueueFamilyProperties(gpu(), NULL, NULL);
- m_errorMonitor->VerifyFound();
-
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdSetViewport-viewportCount-arraylength");
- // Specify 0 for a required array count
- // Expected to trigger an error with parameter_validation::validate_array
- VkViewport viewport = {0.0f, 0.0f, 64.0f, 64.0f, 0.0f, 1.0f};
- m_commandBuffer->SetViewport(0, 0, &viewport);
- m_errorMonitor->VerifyFound();
-
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCreateImage-pCreateInfo-parameter");
- // Specify a null pImageCreateInfo struct pointer
- VkImage test_image;
- vkCreateImage(device(), NULL, NULL, &test_image);
- m_errorMonitor->VerifyFound();
-
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdSetViewport-pViewports-parameter");
- // Specify NULL for a required array
- // Expected to trigger an error with parameter_validation::validate_array
- m_commandBuffer->SetViewport(0, 1, NULL);
- m_errorMonitor->VerifyFound();
-
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "required parameter memory specified as VK_NULL_HANDLE");
- // Specify VK_NULL_HANDLE for a required handle
- // Expected to trigger an error with
- // parameter_validation::validate_required_handle
- vkUnmapMemory(device(), VK_NULL_HANDLE);
- m_errorMonitor->VerifyFound();
-
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
- "required parameter pFences[0] specified as VK_NULL_HANDLE");
- // Specify VK_NULL_HANDLE for a required handle array entry
- // Expected to trigger an error with
- // parameter_validation::validate_required_handle_array
- VkFence fence = VK_NULL_HANDLE;
- vkResetFences(device(), 1, &fence);
- m_errorMonitor->VerifyFound();
-
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "required parameter pAllocateInfo specified as NULL");
- // Specify NULL for a required struct pointer
- // Expected to trigger an error with
- // parameter_validation::validate_struct_type
- VkDeviceMemory memory = VK_NULL_HANDLE;
- vkAllocateMemory(device(), NULL, NULL, &memory);
- m_errorMonitor->VerifyFound();
-
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "value of faceMask must not be 0");
- // Specify 0 for a required VkFlags parameter
- // Expected to trigger an error with parameter_validation::validate_flags
- m_commandBuffer->SetStencilReference(0, 0);
- m_errorMonitor->VerifyFound();
-
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "value of pSubmits[0].pWaitDstStageMask[0] must not be 0");
- // Specify 0 for a required VkFlags array entry
- // Expected to trigger an error with
- // parameter_validation::validate_flags_array
- VkSemaphore semaphore = VK_NULL_HANDLE;
- VkPipelineStageFlags stageFlags = 0;
- VkSubmitInfo submitInfo = {};
- submitInfo.sType = VK_STRUCTURE_TYPE_SUBMIT_INFO;
- submitInfo.waitSemaphoreCount = 1;
- submitInfo.pWaitSemaphores = &semaphore;
- submitInfo.pWaitDstStageMask = &stageFlags;
- vkQueueSubmit(m_device->m_queue, 1, &submitInfo, VK_NULL_HANDLE);
- m_errorMonitor->VerifyFound();
-
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkSubmitInfo-sType-sType");
- stageFlags = VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT;
- // Set a bogus sType and see what happens
- submitInfo.sType = VK_STRUCTURE_TYPE_FENCE_CREATE_INFO;
- submitInfo.waitSemaphoreCount = 1;
- submitInfo.pWaitSemaphores = &semaphore;
- submitInfo.pWaitDstStageMask = &stageFlags;
- vkQueueSubmit(m_device->m_queue, 1, &submitInfo, VK_NULL_HANDLE);
- m_errorMonitor->VerifyFound();
-
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkSubmitInfo-pWaitSemaphores-parameter");
- stageFlags = VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT;
- submitInfo.sType = VK_STRUCTURE_TYPE_SUBMIT_INFO;
- submitInfo.waitSemaphoreCount = 1;
- // Set a null pointer for pWaitSemaphores
- submitInfo.pWaitSemaphores = NULL;
- submitInfo.pWaitDstStageMask = &stageFlags;
- vkQueueSubmit(m_device->m_queue, 1, &submitInfo, VK_NULL_HANDLE);
- m_errorMonitor->VerifyFound();
-
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCreateRenderPass-pCreateInfo-parameter");
- VkRenderPass render_pass;
- vkCreateRenderPass(device(), nullptr, nullptr, &render_pass);
- m_errorMonitor->VerifyFound();
-}
-
-TEST_F(VkLayerTest, PnextOnlyStructValidation) {
- TEST_DESCRIPTION("See if checks occur on structs ONLY used in pnext chains.");
-
- if (!(CheckDescriptorIndexingSupportAndInitFramework(this, m_instance_extension_names, m_device_extension_names, NULL,
- m_errorMonitor))) {
- printf("Descriptor indexing or one of its dependencies not supported, skipping tests\n");
- return;
- }
-
- PFN_vkGetPhysicalDeviceFeatures2KHR vkGetPhysicalDeviceFeatures2KHR =
- (PFN_vkGetPhysicalDeviceFeatures2KHR)vkGetInstanceProcAddr(instance(), "vkGetPhysicalDeviceFeatures2KHR");
- ASSERT_TRUE(vkGetPhysicalDeviceFeatures2KHR != nullptr);
-
- // Create a device passing in a bad PdevFeatures2 value
- auto indexing_features = lvl_init_struct<VkPhysicalDeviceDescriptorIndexingFeaturesEXT>();
- auto features2 = lvl_init_struct<VkPhysicalDeviceFeatures2KHR>(&indexing_features);
- vkGetPhysicalDeviceFeatures2KHR(gpu(), &features2);
- // Set one of the features values to an invalid boolean value
- indexing_features.descriptorBindingUniformBufferUpdateAfterBind = 800;
-
- uint32_t queue_node_count;
- vkGetPhysicalDeviceQueueFamilyProperties(gpu(), &queue_node_count, NULL);
- VkQueueFamilyProperties *queue_props = new VkQueueFamilyProperties[queue_node_count];
- vkGetPhysicalDeviceQueueFamilyProperties(gpu(), &queue_node_count, queue_props);
- float priorities[] = {1.0f};
- VkDeviceQueueCreateInfo queue_info{};
- queue_info.sType = VK_STRUCTURE_TYPE_DEVICE_QUEUE_CREATE_INFO;
- queue_info.pNext = NULL;
- queue_info.flags = 0;
- queue_info.queueFamilyIndex = 0;
- queue_info.queueCount = 1;
- queue_info.pQueuePriorities = &priorities[0];
- VkDeviceCreateInfo dev_info = {};
- dev_info.sType = VK_STRUCTURE_TYPE_DEVICE_CREATE_INFO;
- dev_info.pNext = NULL;
- dev_info.queueCreateInfoCount = 1;
- dev_info.pQueueCreateInfos = &queue_info;
- dev_info.enabledLayerCount = 0;
- dev_info.ppEnabledLayerNames = NULL;
- dev_info.enabledExtensionCount = m_device_extension_names.size();
- dev_info.ppEnabledExtensionNames = m_device_extension_names.data();
- dev_info.pNext = &features2;
- VkDevice dev;
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_WARNING_BIT_EXT, "is neither VK_TRUE nor VK_FALSE");
- m_errorMonitor->SetUnexpectedError("Failed to create");
- vkCreateDevice(gpu(), &dev_info, NULL, &dev);
- m_errorMonitor->VerifyFound();
-}
-
-TEST_F(VkLayerTest, ReservedParameter) {
- TEST_DESCRIPTION("Specify a non-zero value for a reserved parameter");
-
- ASSERT_NO_FATAL_FAILURE(Init());
-
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, " must be 0");
- // Specify 0 for a reserved VkFlags parameter
- // Expected to trigger an error with
- // parameter_validation::validate_reserved_flags
- VkEvent event_handle = VK_NULL_HANDLE;
- VkEventCreateInfo event_info = {};
- event_info.sType = VK_STRUCTURE_TYPE_EVENT_CREATE_INFO;
- event_info.flags = 1;
- vkCreateEvent(device(), &event_info, NULL, &event_handle);
- m_errorMonitor->VerifyFound();
-}
-
-TEST_F(VkLayerTest, DebugMarkerNameTest) {
- TEST_DESCRIPTION("Ensure debug marker object names are printed in debug report output");
-
- ASSERT_NO_FATAL_FAILURE(InitFramework(myDbgFunc, m_errorMonitor));
- if (DeviceExtensionSupported(gpu(), "VK_LAYER_LUNARG_core_validation", VK_EXT_DEBUG_MARKER_EXTENSION_NAME)) {
- m_device_extension_names.push_back(VK_EXT_DEBUG_MARKER_EXTENSION_NAME);
- } else {
- printf("%s Debug Marker Extension not supported, skipping test\n", kSkipPrefix);
- return;
- }
- ASSERT_NO_FATAL_FAILURE(InitState());
-
- PFN_vkDebugMarkerSetObjectNameEXT fpvkDebugMarkerSetObjectNameEXT =
- (PFN_vkDebugMarkerSetObjectNameEXT)vkGetInstanceProcAddr(instance(), "vkDebugMarkerSetObjectNameEXT");
- if (!(fpvkDebugMarkerSetObjectNameEXT)) {
- printf("%s Can't find fpvkDebugMarkerSetObjectNameEXT; skipped.\n", kSkipPrefix);
- return;
- }
-
- if (DeviceSimulation()) {
- printf("%sSkipping object naming test.\n", kSkipPrefix);
- return;
- }
-
- VkBuffer buffer;
- VkDeviceMemory memory_1, memory_2;
- std::string memory_name = "memory_name";
-
- VkBufferCreateInfo buffer_create_info = {};
- buffer_create_info.sType = VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO;
- buffer_create_info.usage = VK_BUFFER_USAGE_UNIFORM_BUFFER_BIT;
- buffer_create_info.size = 1;
-
- vkCreateBuffer(device(), &buffer_create_info, nullptr, &buffer);
-
- VkMemoryRequirements memRequirements;
- vkGetBufferMemoryRequirements(device(), buffer, &memRequirements);
-
- VkMemoryAllocateInfo memory_allocate_info = {};
- memory_allocate_info.sType = VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO;
- memory_allocate_info.allocationSize = memRequirements.size;
- memory_allocate_info.memoryTypeIndex = 0;
-
- vkAllocateMemory(device(), &memory_allocate_info, nullptr, &memory_1);
- vkAllocateMemory(device(), &memory_allocate_info, nullptr, &memory_2);
-
- VkDebugMarkerObjectNameInfoEXT name_info = {};
- name_info.sType = VK_STRUCTURE_TYPE_DEBUG_MARKER_OBJECT_NAME_INFO_EXT;
- name_info.pNext = nullptr;
- name_info.object = (uint64_t)memory_2;
- name_info.objectType = VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_MEMORY_EXT;
- name_info.pObjectName = memory_name.c_str();
- fpvkDebugMarkerSetObjectNameEXT(device(), &name_info);
-
- vkBindBufferMemory(device(), buffer, memory_1, 0);
-
- // Test core_validation layer
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, memory_name);
- vkBindBufferMemory(device(), buffer, memory_2, 0);
- m_errorMonitor->VerifyFound();
-
- vkFreeMemory(device(), memory_1, nullptr);
- memory_1 = VK_NULL_HANDLE;
- vkFreeMemory(device(), memory_2, nullptr);
- memory_2 = VK_NULL_HANDLE;
- vkDestroyBuffer(device(), buffer, nullptr);
- buffer = VK_NULL_HANDLE;
-
- VkCommandBuffer commandBuffer;
- std::string commandBuffer_name = "command_buffer_name";
- VkCommandPool commandpool_1;
- VkCommandPool commandpool_2;
- VkCommandPoolCreateInfo pool_create_info{};
- pool_create_info.sType = VK_STRUCTURE_TYPE_COMMAND_POOL_CREATE_INFO;
- pool_create_info.queueFamilyIndex = m_device->graphics_queue_node_index_;
- pool_create_info.flags = VK_COMMAND_POOL_CREATE_RESET_COMMAND_BUFFER_BIT;
- vkCreateCommandPool(device(), &pool_create_info, nullptr, &commandpool_1);
- vkCreateCommandPool(device(), &pool_create_info, nullptr, &commandpool_2);
-
- VkCommandBufferAllocateInfo command_buffer_allocate_info{};
- command_buffer_allocate_info.sType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_ALLOCATE_INFO;
- command_buffer_allocate_info.commandPool = commandpool_1;
- command_buffer_allocate_info.commandBufferCount = 1;
- command_buffer_allocate_info.level = VK_COMMAND_BUFFER_LEVEL_PRIMARY;
- vkAllocateCommandBuffers(device(), &command_buffer_allocate_info, &commandBuffer);
-
- name_info.object = (uint64_t)commandBuffer;
- name_info.objectType = VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT;
- name_info.pObjectName = commandBuffer_name.c_str();
- fpvkDebugMarkerSetObjectNameEXT(device(), &name_info);
-
- VkCommandBufferBeginInfo cb_begin_Info = {};
- cb_begin_Info.sType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_BEGIN_INFO;
- cb_begin_Info.flags = VK_COMMAND_BUFFER_USAGE_ONE_TIME_SUBMIT_BIT;
- vkBeginCommandBuffer(commandBuffer, &cb_begin_Info);
-
- const VkRect2D scissor = {{-1, 0}, {16, 16}};
- const VkRect2D scissors[] = {scissor, scissor};
-
- // Test parameter_validation layer
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, commandBuffer_name);
- vkCmdSetScissor(commandBuffer, 1, 1, scissors);
- m_errorMonitor->VerifyFound();
-
- // Test object_tracker layer
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, commandBuffer_name);
- vkFreeCommandBuffers(device(), commandpool_2, 1, &commandBuffer);
- m_errorMonitor->VerifyFound();
-
- vkDestroyCommandPool(device(), commandpool_1, NULL);
- vkDestroyCommandPool(device(), commandpool_2, NULL);
-}
-
-TEST_F(VkLayerTest, DebugUtilsNameTest) {
- TEST_DESCRIPTION("Ensure debug utils object names are printed in debug messenger output");
-
- // Skip test if extension not supported
- if (InstanceExtensionSupported(VK_EXT_DEBUG_UTILS_EXTENSION_NAME)) {
- m_instance_extension_names.push_back(VK_EXT_DEBUG_UTILS_EXTENSION_NAME);
- } else {
- printf("%s Debug Utils Extension not supported, skipping test\n", kSkipPrefix);
- return;
- }
-
- ASSERT_NO_FATAL_FAILURE(InitFramework(myDbgFunc, m_errorMonitor));
- ASSERT_NO_FATAL_FAILURE(InitState());
-
- PFN_vkSetDebugUtilsObjectNameEXT fpvkSetDebugUtilsObjectNameEXT =
- (PFN_vkSetDebugUtilsObjectNameEXT)vkGetInstanceProcAddr(instance(), "vkSetDebugUtilsObjectNameEXT");
- ASSERT_TRUE(fpvkSetDebugUtilsObjectNameEXT); // Must be extant if extension is enabled
- PFN_vkCreateDebugUtilsMessengerEXT fpvkCreateDebugUtilsMessengerEXT =
- (PFN_vkCreateDebugUtilsMessengerEXT)vkGetInstanceProcAddr(instance(), "vkCreateDebugUtilsMessengerEXT");
- ASSERT_TRUE(fpvkCreateDebugUtilsMessengerEXT); // Must be extant if extension is enabled
- PFN_vkDestroyDebugUtilsMessengerEXT fpvkDestroyDebugUtilsMessengerEXT =
- (PFN_vkDestroyDebugUtilsMessengerEXT)vkGetInstanceProcAddr(instance(), "vkDestroyDebugUtilsMessengerEXT");
- ASSERT_TRUE(fpvkDestroyDebugUtilsMessengerEXT); // Must be extant if extension is enabled
- PFN_vkCmdInsertDebugUtilsLabelEXT fpvkCmdInsertDebugUtilsLabelEXT =
- (PFN_vkCmdInsertDebugUtilsLabelEXT)vkGetInstanceProcAddr(instance(), "vkCmdInsertDebugUtilsLabelEXT");
- ASSERT_TRUE(fpvkCmdInsertDebugUtilsLabelEXT); // Must be extant if extension is enabled
-
- if (DeviceSimulation()) {
- printf("%sSkipping object naming test.\n", kSkipPrefix);
- return;
- }
-
- DebugUtilsLabelCheckData callback_data;
- auto empty_callback = [](const VkDebugUtilsMessengerCallbackDataEXT *pCallbackData, DebugUtilsLabelCheckData *data) {
- data->count++;
- };
- callback_data.count = 0;
- callback_data.callback = empty_callback;
-
- auto callback_create_info = lvl_init_struct<VkDebugUtilsMessengerCreateInfoEXT>();
- callback_create_info.messageSeverity =
- VK_DEBUG_UTILS_MESSAGE_SEVERITY_ERROR_BIT_EXT | VK_DEBUG_UTILS_MESSAGE_SEVERITY_WARNING_BIT_EXT;
- callback_create_info.messageType = VK_DEBUG_UTILS_MESSAGE_TYPE_VALIDATION_BIT_EXT;
- callback_create_info.pfnUserCallback = DebugUtilsCallback;
- callback_create_info.pUserData = &callback_data;
- VkDebugUtilsMessengerEXT my_messenger = VK_NULL_HANDLE;
- fpvkCreateDebugUtilsMessengerEXT(instance(), &callback_create_info, nullptr, &my_messenger);
-
- VkBuffer buffer;
- VkDeviceMemory memory_1, memory_2;
- std::string memory_name = "memory_name";
-
- VkBufferCreateInfo buffer_create_info = {};
- buffer_create_info.sType = VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO;
- buffer_create_info.usage = VK_BUFFER_USAGE_UNIFORM_BUFFER_BIT;
- buffer_create_info.size = 1;
-
- vkCreateBuffer(device(), &buffer_create_info, nullptr, &buffer);
-
- VkMemoryRequirements memRequirements;
- vkGetBufferMemoryRequirements(device(), buffer, &memRequirements);
-
- VkMemoryAllocateInfo memory_allocate_info = {};
- memory_allocate_info.sType = VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO;
- memory_allocate_info.allocationSize = memRequirements.size;
- memory_allocate_info.memoryTypeIndex = 0;
-
- vkAllocateMemory(device(), &memory_allocate_info, nullptr, &memory_1);
- vkAllocateMemory(device(), &memory_allocate_info, nullptr, &memory_2);
-
- VkDebugUtilsObjectNameInfoEXT name_info = {};
- name_info.sType = VK_STRUCTURE_TYPE_DEBUG_UTILS_OBJECT_NAME_INFO_EXT;
- name_info.pNext = nullptr;
- name_info.objectHandle = (uint64_t)memory_2;
- name_info.objectType = VK_OBJECT_TYPE_DEVICE_MEMORY;
- name_info.pObjectName = memory_name.c_str();
- fpvkSetDebugUtilsObjectNameEXT(device(), &name_info);
-
- vkBindBufferMemory(device(), buffer, memory_1, 0);
-
- // Test core_validation layer
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, memory_name);
- vkBindBufferMemory(device(), buffer, memory_2, 0);
- m_errorMonitor->VerifyFound();
-
- vkFreeMemory(device(), memory_1, nullptr);
- memory_1 = VK_NULL_HANDLE;
- vkFreeMemory(device(), memory_2, nullptr);
- memory_2 = VK_NULL_HANDLE;
- vkDestroyBuffer(device(), buffer, nullptr);
- buffer = VK_NULL_HANDLE;
-
- VkCommandBuffer commandBuffer;
- std::string commandBuffer_name = "command_buffer_name";
- VkCommandPool commandpool_1;
- VkCommandPool commandpool_2;
- VkCommandPoolCreateInfo pool_create_info{};
- pool_create_info.sType = VK_STRUCTURE_TYPE_COMMAND_POOL_CREATE_INFO;
- pool_create_info.queueFamilyIndex = m_device->graphics_queue_node_index_;
- pool_create_info.flags = VK_COMMAND_POOL_CREATE_RESET_COMMAND_BUFFER_BIT;
- vkCreateCommandPool(device(), &pool_create_info, nullptr, &commandpool_1);
- vkCreateCommandPool(device(), &pool_create_info, nullptr, &commandpool_2);
-
- VkCommandBufferAllocateInfo command_buffer_allocate_info{};
- command_buffer_allocate_info.sType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_ALLOCATE_INFO;
- command_buffer_allocate_info.commandPool = commandpool_1;
- command_buffer_allocate_info.commandBufferCount = 1;
- command_buffer_allocate_info.level = VK_COMMAND_BUFFER_LEVEL_PRIMARY;
- vkAllocateCommandBuffers(device(), &command_buffer_allocate_info, &commandBuffer);
-
- name_info.objectHandle = (uint64_t)commandBuffer;
- name_info.objectType = VK_OBJECT_TYPE_COMMAND_BUFFER;
- name_info.pObjectName = commandBuffer_name.c_str();
- fpvkSetDebugUtilsObjectNameEXT(device(), &name_info);
-
- VkCommandBufferBeginInfo cb_begin_Info = {};
- cb_begin_Info.sType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_BEGIN_INFO;
- cb_begin_Info.flags = VK_COMMAND_BUFFER_USAGE_ONE_TIME_SUBMIT_BIT;
- vkBeginCommandBuffer(commandBuffer, &cb_begin_Info);
-
- const VkRect2D scissor = {{-1, 0}, {16, 16}};
- const VkRect2D scissors[] = {scissor, scissor};
-
- auto command_label = lvl_init_struct<VkDebugUtilsLabelEXT>();
- command_label.pLabelName = "Command Label 0123";
- command_label.color[0] = 0.;
- command_label.color[1] = 1.;
- command_label.color[2] = 2.;
- command_label.color[3] = 3.0;
- bool command_label_test = false;
- auto command_label_callback = [command_label, &command_label_test](const VkDebugUtilsMessengerCallbackDataEXT *pCallbackData,
- DebugUtilsLabelCheckData *data) {
- data->count++;
- command_label_test = false;
- if (pCallbackData->cmdBufLabelCount == 1) {
- command_label_test = pCallbackData->pCmdBufLabels[0] == command_label;
- }
- };
- callback_data.callback = command_label_callback;
-
- fpvkCmdInsertDebugUtilsLabelEXT(commandBuffer, &command_label);
- // Test parameter_validation layer
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, commandBuffer_name);
- vkCmdSetScissor(commandBuffer, 1, 1, scissors);
- m_errorMonitor->VerifyFound();
-
- // Check the label test
- if (!command_label_test) {
- ADD_FAILURE() << "Command label '" << command_label.pLabelName << "' not passed to callback.";
- }
-
- // Test object_tracker layer
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, commandBuffer_name);
- vkFreeCommandBuffers(device(), commandpool_2, 1, &commandBuffer);
- m_errorMonitor->VerifyFound();
-
- vkDestroyCommandPool(device(), commandpool_1, NULL);
- vkDestroyCommandPool(device(), commandpool_2, NULL);
- fpvkDestroyDebugUtilsMessengerEXT(instance(), my_messenger, nullptr);
-}
-
-TEST_F(VkLayerTest, InvalidStructSType) {
- TEST_DESCRIPTION("Specify an invalid VkStructureType for a Vulkan structure's sType field");
-
- ASSERT_NO_FATAL_FAILURE(Init());
-
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "parameter pAllocateInfo->sType must be");
- // Zero struct memory, effectively setting sType to
- // VK_STRUCTURE_TYPE_APPLICATION_INFO
- // Expected to trigger an error with
- // parameter_validation::validate_struct_type
- VkMemoryAllocateInfo alloc_info = {};
- VkDeviceMemory memory = VK_NULL_HANDLE;
- vkAllocateMemory(device(), &alloc_info, NULL, &memory);
- m_errorMonitor->VerifyFound();
-
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "parameter pSubmits[0].sType must be");
- // Zero struct memory, effectively setting sType to
- // VK_STRUCTURE_TYPE_APPLICATION_INFO
- // Expected to trigger an error with
- // parameter_validation::validate_struct_type_array
- VkSubmitInfo submit_info = {};
- vkQueueSubmit(m_device->m_queue, 1, &submit_info, VK_NULL_HANDLE);
- m_errorMonitor->VerifyFound();
-}
-
-TEST_F(VkLayerTest, InvalidStructPNext) {
- TEST_DESCRIPTION("Specify an invalid value for a Vulkan structure's pNext field");
-
- ASSERT_NO_FATAL_FAILURE(Init());
-
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_WARNING_BIT_EXT, "value of pCreateInfo->pNext must be NULL");
- // Set VkMemoryAllocateInfo::pNext to a non-NULL value, when pNext must be NULL.
- // Need to pick a function that has no allowed pNext structure types.
- // Expected to trigger an error with parameter_validation::validate_struct_pnext
- VkEvent event = VK_NULL_HANDLE;
- VkEventCreateInfo event_alloc_info = {};
- // Zero-initialization will provide the correct sType
- VkApplicationInfo app_info = {};
- event_alloc_info.sType = VK_STRUCTURE_TYPE_EVENT_CREATE_INFO;
- event_alloc_info.pNext = &app_info;
- vkCreateEvent(device(), &event_alloc_info, NULL, &event);
- m_errorMonitor->VerifyFound();
-
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_WARNING_BIT_EXT,
- " chain includes a structure with unexpected VkStructureType ");
- // Set VkMemoryAllocateInfo::pNext to a non-NULL value, but use
- // a function that has allowed pNext structure types and specify
- // a structure type that is not allowed.
- // Expected to trigger an error with parameter_validation::validate_struct_pnext
- VkDeviceMemory memory = VK_NULL_HANDLE;
- VkMemoryAllocateInfo memory_alloc_info = {};
- memory_alloc_info.sType = VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO;
- memory_alloc_info.pNext = &app_info;
- vkAllocateMemory(device(), &memory_alloc_info, NULL, &memory);
- m_errorMonitor->VerifyFound();
-}
-
-TEST_F(VkLayerTest, UnrecognizedValueOutOfRange) {
- ASSERT_NO_FATAL_FAILURE(Init());
-
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
- "does not fall within the begin..end range of the core VkFormat enumeration tokens");
- // Specify an invalid VkFormat value
- // Expected to trigger an error with
- // parameter_validation::validate_ranged_enum
- VkFormatProperties format_properties;
- vkGetPhysicalDeviceFormatProperties(gpu(), static_cast<VkFormat>(8000), &format_properties);
- m_errorMonitor->VerifyFound();
-}
-
-TEST_F(VkLayerTest, UnrecognizedValueBadMask) {
- ASSERT_NO_FATAL_FAILURE(Init());
-
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "contains flag bits that are not recognized members of");
- // Specify an invalid VkFlags bitmask value
- // Expected to trigger an error with parameter_validation::validate_flags
- VkImageFormatProperties image_format_properties;
- vkGetPhysicalDeviceImageFormatProperties(gpu(), VK_FORMAT_R8G8B8A8_UNORM, VK_IMAGE_TYPE_2D, VK_IMAGE_TILING_OPTIMAL,
- static_cast<VkImageUsageFlags>(1 << 25), 0, &image_format_properties);
- m_errorMonitor->VerifyFound();
-}
-
-TEST_F(VkLayerTest, UnrecognizedValueBadFlag) {
- ASSERT_NO_FATAL_FAILURE(Init());
-
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "contains flag bits that are not recognized members of");
- // Specify an invalid VkFlags array entry
- // Expected to trigger an error with parameter_validation::validate_flags_array
- VkSemaphore semaphore;
- VkSemaphoreCreateInfo semaphore_create_info{};
- semaphore_create_info.sType = VK_STRUCTURE_TYPE_SEMAPHORE_CREATE_INFO;
- vkCreateSemaphore(m_device->device(), &semaphore_create_info, nullptr, &semaphore);
- // `stage_flags` is set to a value which, currently, is not a defined stage flag
- // `VK_IMAGE_ASPECT_FLAG_BITS_MAX_ENUM` works well for this
- VkPipelineStageFlags stage_flags = VK_IMAGE_ASPECT_FLAG_BITS_MAX_ENUM;
- // `waitSemaphoreCount` *must* be greater than 0 to perform this check
- VkSubmitInfo submit_info = {};
- submit_info.sType = VK_STRUCTURE_TYPE_SUBMIT_INFO;
- submit_info.waitSemaphoreCount = 1;
- submit_info.pWaitSemaphores = &semaphore;
- submit_info.pWaitDstStageMask = &stage_flags;
- vkQueueSubmit(m_device->m_queue, 1, &submit_info, VK_NULL_HANDLE);
- vkDestroySemaphore(m_device->device(), semaphore, nullptr);
-
- m_errorMonitor->VerifyFound();
-}
-
-TEST_F(VkLayerTest, UnrecognizedValueBadBool) {
- // Make sure using VK_SAMPLER_ADDRESS_MODE_MIRROR_CLAMP_TO_EDGE doesn't trigger a false positive.
- ASSERT_NO_FATAL_FAILURE(InitFramework(myDbgFunc, m_errorMonitor));
- if (DeviceExtensionSupported(gpu(), nullptr, VK_KHR_SAMPLER_MIRROR_CLAMP_TO_EDGE_EXTENSION_NAME)) {
- m_device_extension_names.push_back(VK_KHR_SAMPLER_MIRROR_CLAMP_TO_EDGE_EXTENSION_NAME);
- } else {
- printf("%s VK_KHR_sampler_mirror_clamp_to_edge extension not supported, skipping test\n", kSkipPrefix);
- return;
- }
- ASSERT_NO_FATAL_FAILURE(InitState());
-
- // Specify an invalid VkBool32 value, expecting a warning with parameter_validation::validate_bool32
- VkSamplerCreateInfo sampler_info = SafeSaneSamplerCreateInfo();
- sampler_info.addressModeU = VK_SAMPLER_ADDRESS_MODE_MIRROR_CLAMP_TO_EDGE;
- sampler_info.addressModeV = VK_SAMPLER_ADDRESS_MODE_MIRROR_CLAMP_TO_EDGE;
- sampler_info.addressModeW = VK_SAMPLER_ADDRESS_MODE_MIRROR_CLAMP_TO_EDGE;
-
- // Not VK_TRUE or VK_FALSE
- sampler_info.anisotropyEnable = 3;
- CreateSamplerTest(*this, &sampler_info, "is neither VK_TRUE nor VK_FALSE");
-}
-
-TEST_F(VkLayerTest, UnrecognizedValueMaxEnum) {
- ASSERT_NO_FATAL_FAILURE(Init());
-
- // Specify MAX_ENUM
- VkFormatProperties format_properties;
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "does not fall within the begin..end range");
- vkGetPhysicalDeviceFormatProperties(gpu(), VK_FORMAT_MAX_ENUM, &format_properties);
- m_errorMonitor->VerifyFound();
-}
-
-TEST_F(VkLayerTest, SubmitSignaledFence) {
- vk_testing::Fence testFence;
-
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
- "submitted in SIGNALED state. Fences must be reset before being submitted");
-
- VkFenceCreateInfo fenceInfo = {};
- fenceInfo.sType = VK_STRUCTURE_TYPE_FENCE_CREATE_INFO;
- fenceInfo.pNext = NULL;
- fenceInfo.flags = VK_FENCE_CREATE_SIGNALED_BIT;
-
- ASSERT_NO_FATAL_FAILURE(Init());
- ASSERT_NO_FATAL_FAILURE(InitViewport());
- ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
-
- m_commandBuffer->begin();
- m_commandBuffer->ClearAllBuffers(m_renderTargets, m_clear_color, nullptr, m_depth_clear_color, m_stencil_clear_color);
- m_commandBuffer->end();
-
- testFence.init(*m_device, fenceInfo);
-
- VkSubmitInfo submit_info;
- submit_info.sType = VK_STRUCTURE_TYPE_SUBMIT_INFO;
- submit_info.pNext = NULL;
- submit_info.waitSemaphoreCount = 0;
- submit_info.pWaitSemaphores = NULL;
- submit_info.pWaitDstStageMask = NULL;
- submit_info.commandBufferCount = 1;
- submit_info.pCommandBuffers = &m_commandBuffer->handle();
- submit_info.signalSemaphoreCount = 0;
- submit_info.pSignalSemaphores = NULL;
-
- vkQueueSubmit(m_device->m_queue, 1, &submit_info, testFence.handle());
- vkQueueWaitIdle(m_device->m_queue);
-
- m_errorMonitor->VerifyFound();
-}
-
-TEST_F(VkLayerTest, LeakAnObject) {
- TEST_DESCRIPTION("Create a fence and destroy its device without first destroying the fence.");
-
- ASSERT_NO_FATAL_FAILURE(InitFramework(myDbgFunc, m_errorMonitor));
-
- // Workaround for overzealous layers checking even the guaranteed 0th queue family
- const auto q_props = vk_testing::PhysicalDevice(gpu()).queue_properties();
- ASSERT_TRUE(q_props.size() > 0);
- ASSERT_TRUE(q_props[0].queueCount > 0);
-
- const float q_priority[] = {1.0f};
- VkDeviceQueueCreateInfo queue_ci = {};
- queue_ci.sType = VK_STRUCTURE_TYPE_DEVICE_QUEUE_CREATE_INFO;
- queue_ci.queueFamilyIndex = 0;
- queue_ci.queueCount = 1;
- queue_ci.pQueuePriorities = q_priority;
-
- VkDeviceCreateInfo device_ci = {};
- device_ci.sType = VK_STRUCTURE_TYPE_DEVICE_CREATE_INFO;
- device_ci.queueCreateInfoCount = 1;
- device_ci.pQueueCreateInfos = &queue_ci;
-
- VkDevice leaky_device;
- ASSERT_VK_SUCCESS(vkCreateDevice(gpu(), &device_ci, nullptr, &leaky_device));
-
- const VkFenceCreateInfo fence_ci = {VK_STRUCTURE_TYPE_FENCE_CREATE_INFO};
- VkFence leaked_fence;
- ASSERT_VK_SUCCESS(vkCreateFence(leaky_device, &fence_ci, nullptr, &leaked_fence));
-
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkDestroyDevice-device-00378");
- vkDestroyDevice(leaky_device, nullptr);
- m_errorMonitor->VerifyFound();
-}
-
-TEST_F(VkLayerTest, UseObjectWithWrongDevice) {
- TEST_DESCRIPTION(
- "Try to destroy a render pass object using a device other than the one it was created on. This should generate a distinct "
- "error from the invalid handle error.");
- // Create first device and renderpass
- ASSERT_NO_FATAL_FAILURE(Init());
- ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
-
- // Create second device
- float priorities[] = {1.0f};
- VkDeviceQueueCreateInfo queue_info{};
- queue_info.sType = VK_STRUCTURE_TYPE_DEVICE_QUEUE_CREATE_INFO;
- queue_info.pNext = NULL;
- queue_info.flags = 0;
- queue_info.queueFamilyIndex = 0;
- queue_info.queueCount = 1;
- queue_info.pQueuePriorities = &priorities[0];
-
- VkDeviceCreateInfo device_create_info = {};
- auto features = m_device->phy().features();
- device_create_info.sType = VK_STRUCTURE_TYPE_DEVICE_CREATE_INFO;
- device_create_info.pNext = NULL;
- device_create_info.queueCreateInfoCount = 1;
- device_create_info.pQueueCreateInfos = &queue_info;
- device_create_info.enabledLayerCount = 0;
- device_create_info.ppEnabledLayerNames = NULL;
- device_create_info.pEnabledFeatures = &features;
-
- VkDevice second_device;
- ASSERT_VK_SUCCESS(vkCreateDevice(gpu(), &device_create_info, NULL, &second_device));
-
- // Try to destroy the renderpass from the first device using the second device
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkDestroyRenderPass-renderPass-parent");
- vkDestroyRenderPass(second_device, m_renderPass, NULL);
- m_errorMonitor->VerifyFound();
-
- vkDestroyDevice(second_device, NULL);
-}
-
-TEST_F(VkLayerTest, InvalidAllocationCallbacks) {
- TEST_DESCRIPTION("Test with invalid VkAllocationCallbacks");
-
- ASSERT_NO_FATAL_FAILURE(Init());
-
- // vkCreateInstance, and vkCreateDevice tend to crash in the Loader Trampoline ATM, so choosing vkCreateCommandPool
- const VkCommandPoolCreateInfo cpci = {VK_STRUCTURE_TYPE_COMMAND_POOL_CREATE_INFO, nullptr, 0,
- DeviceObj()->QueueFamilyMatching(0, 0, true)};
- VkCommandPool cmdPool;
-
- struct Alloc {
- static VKAPI_ATTR void *VKAPI_CALL alloc(void *, size_t, size_t, VkSystemAllocationScope) { return nullptr; };
- static VKAPI_ATTR void *VKAPI_CALL realloc(void *, void *, size_t, size_t, VkSystemAllocationScope) { return nullptr; };
- static VKAPI_ATTR void VKAPI_CALL free(void *, void *){};
- static VKAPI_ATTR void VKAPI_CALL internalAlloc(void *, size_t, VkInternalAllocationType, VkSystemAllocationScope){};
- static VKAPI_ATTR void VKAPI_CALL internalFree(void *, size_t, VkInternalAllocationType, VkSystemAllocationScope){};
- };
-
- {
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkAllocationCallbacks-pfnAllocation-00632");
- const VkAllocationCallbacks allocator = {nullptr, nullptr, Alloc::realloc, Alloc::free, nullptr, nullptr};
- vkCreateCommandPool(device(), &cpci, &allocator, &cmdPool);
- m_errorMonitor->VerifyFound();
- }
-
- {
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkAllocationCallbacks-pfnReallocation-00633");
- const VkAllocationCallbacks allocator = {nullptr, Alloc::alloc, nullptr, Alloc::free, nullptr, nullptr};
- vkCreateCommandPool(device(), &cpci, &allocator, &cmdPool);
- m_errorMonitor->VerifyFound();
- }
-
- {
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkAllocationCallbacks-pfnFree-00634");
- const VkAllocationCallbacks allocator = {nullptr, Alloc::alloc, Alloc::realloc, nullptr, nullptr, nullptr};
- vkCreateCommandPool(device(), &cpci, &allocator, &cmdPool);
- m_errorMonitor->VerifyFound();
- }
-
- {
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
- "VUID-VkAllocationCallbacks-pfnInternalAllocation-00635");
- const VkAllocationCallbacks allocator = {nullptr, Alloc::alloc, Alloc::realloc, Alloc::free, nullptr, Alloc::internalFree};
- vkCreateCommandPool(device(), &cpci, &allocator, &cmdPool);
- m_errorMonitor->VerifyFound();
- }
-
- {
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
- "VUID-VkAllocationCallbacks-pfnInternalAllocation-00635");
- const VkAllocationCallbacks allocator = {nullptr, Alloc::alloc, Alloc::realloc, Alloc::free, Alloc::internalAlloc, nullptr};
- vkCreateCommandPool(device(), &cpci, &allocator, &cmdPool);
- m_errorMonitor->VerifyFound();
- }
-}
-
-TEST_F(VkLayerTest, MismatchedQueueFamiliesOnSubmit) {
- TEST_DESCRIPTION(
- "Submit command buffer created using one queue family and attempt to submit them on a queue created in a different queue "
- "family.");
-
- ASSERT_NO_FATAL_FAILURE(Init()); // assumes it initializes all queue families on vkCreateDevice
-
- // This test is meaningless unless we have multiple queue families
- auto queue_family_properties = m_device->phy().queue_properties();
- std::vector<uint32_t> queue_families;
- for (uint32_t i = 0; i < queue_family_properties.size(); ++i)
- if (queue_family_properties[i].queueCount > 0) queue_families.push_back(i);
-
- if (queue_families.size() < 2) {
- printf("%s Device only has one queue family; skipped.\n", kSkipPrefix);
- return;
- }
-
- const uint32_t queue_family = queue_families[0];
-
- const uint32_t other_queue_family = queue_families[1];
- VkQueue other_queue;
- vkGetDeviceQueue(m_device->device(), other_queue_family, 0, &other_queue);
-
- VkCommandPoolObj cmd_pool(m_device, queue_family);
- VkCommandBufferObj cmd_buff(m_device, &cmd_pool);
-
- cmd_buff.begin();
- cmd_buff.end();
-
- // Submit on the wrong queue
- VkSubmitInfo submit_info = {};
- submit_info.sType = VK_STRUCTURE_TYPE_SUBMIT_INFO;
- submit_info.commandBufferCount = 1;
- submit_info.pCommandBuffers = &cmd_buff.handle();
-
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkQueueSubmit-pCommandBuffers-00074");
- vkQueueSubmit(other_queue, 1, &submit_info, VK_NULL_HANDLE);
- m_errorMonitor->VerifyFound();
-}
-
-TEST_F(VkLayerTest, TemporaryExternalSemaphore) {
-#ifdef _WIN32
- const auto extension_name = VK_KHR_EXTERNAL_SEMAPHORE_WIN32_EXTENSION_NAME;
- const auto handle_type = VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_OPAQUE_WIN32_KMT_BIT_KHR;
-#else
- const auto extension_name = VK_KHR_EXTERNAL_SEMAPHORE_FD_EXTENSION_NAME;
- const auto handle_type = VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_OPAQUE_FD_BIT_KHR;
-#endif
- // Check for external semaphore instance extensions
- if (InstanceExtensionSupported(VK_KHR_EXTERNAL_SEMAPHORE_CAPABILITIES_EXTENSION_NAME)) {
- m_instance_extension_names.push_back(VK_KHR_EXTERNAL_SEMAPHORE_CAPABILITIES_EXTENSION_NAME);
- m_instance_extension_names.push_back(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME);
- } else {
- printf("%s External semaphore extension not supported, skipping test\n", kSkipPrefix);
- return;
- }
- ASSERT_NO_FATAL_FAILURE(InitFramework(myDbgFunc, m_errorMonitor));
-
- // Check for external semaphore device extensions
- if (DeviceExtensionSupported(gpu(), nullptr, extension_name)) {
- m_device_extension_names.push_back(extension_name);
- m_device_extension_names.push_back(VK_KHR_EXTERNAL_SEMAPHORE_EXTENSION_NAME);
- } else {
- printf("%s External semaphore extension not supported, skipping test\n", kSkipPrefix);
- return;
- }
- ASSERT_NO_FATAL_FAILURE(InitState());
-
- // Check for external semaphore import and export capability
- VkPhysicalDeviceExternalSemaphoreInfoKHR esi = {VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_SEMAPHORE_INFO_KHR, nullptr,
- handle_type};
- VkExternalSemaphorePropertiesKHR esp = {VK_STRUCTURE_TYPE_EXTERNAL_SEMAPHORE_PROPERTIES_KHR, nullptr};
- auto vkGetPhysicalDeviceExternalSemaphorePropertiesKHR =
- (PFN_vkGetPhysicalDeviceExternalSemaphorePropertiesKHR)vkGetInstanceProcAddr(
- instance(), "vkGetPhysicalDeviceExternalSemaphorePropertiesKHR");
- vkGetPhysicalDeviceExternalSemaphorePropertiesKHR(gpu(), &esi, &esp);
-
- if (!(esp.externalSemaphoreFeatures & VK_EXTERNAL_SEMAPHORE_FEATURE_EXPORTABLE_BIT_KHR) ||
- !(esp.externalSemaphoreFeatures & VK_EXTERNAL_SEMAPHORE_FEATURE_IMPORTABLE_BIT_KHR)) {
- printf("%s External semaphore does not support importing and exporting, skipping test\n", kSkipPrefix);
- return;
- }
-
- VkResult err;
-
- // Create a semaphore to export payload from
- VkExportSemaphoreCreateInfoKHR esci = {VK_STRUCTURE_TYPE_EXPORT_SEMAPHORE_CREATE_INFO_KHR, nullptr, handle_type};
- VkSemaphoreCreateInfo sci = {VK_STRUCTURE_TYPE_SEMAPHORE_CREATE_INFO, &esci, 0};
-
- VkSemaphore export_semaphore;
- err = vkCreateSemaphore(m_device->device(), &sci, nullptr, &export_semaphore);
- ASSERT_VK_SUCCESS(err);
-
- // Create a semaphore to import payload into
- sci.pNext = nullptr;
- VkSemaphore import_semaphore;
- err = vkCreateSemaphore(m_device->device(), &sci, nullptr, &import_semaphore);
- ASSERT_VK_SUCCESS(err);
-
-#ifdef _WIN32
- // Export semaphore payload to an opaque handle
- HANDLE handle = nullptr;
- VkSemaphoreGetWin32HandleInfoKHR ghi = {VK_STRUCTURE_TYPE_SEMAPHORE_GET_WIN32_HANDLE_INFO_KHR, nullptr, export_semaphore,
- handle_type};
- auto vkGetSemaphoreWin32HandleKHR =
- (PFN_vkGetSemaphoreWin32HandleKHR)vkGetDeviceProcAddr(m_device->device(), "vkGetSemaphoreWin32HandleKHR");
- err = vkGetSemaphoreWin32HandleKHR(m_device->device(), &ghi, &handle);
- ASSERT_VK_SUCCESS(err);
-
- // Import opaque handle exported above *temporarily*
- VkImportSemaphoreWin32HandleInfoKHR ihi = {VK_STRUCTURE_TYPE_IMPORT_SEMAPHORE_WIN32_HANDLE_INFO_KHR,
- nullptr,
- import_semaphore,
- VK_SEMAPHORE_IMPORT_TEMPORARY_BIT_KHR,
- handle_type,
- handle,
- nullptr};
- auto vkImportSemaphoreWin32HandleKHR =
- (PFN_vkImportSemaphoreWin32HandleKHR)vkGetDeviceProcAddr(m_device->device(), "vkImportSemaphoreWin32HandleKHR");
- err = vkImportSemaphoreWin32HandleKHR(m_device->device(), &ihi);
- ASSERT_VK_SUCCESS(err);
-#else
- // Export semaphore payload to an opaque handle
- int fd = 0;
- VkSemaphoreGetFdInfoKHR ghi = {VK_STRUCTURE_TYPE_SEMAPHORE_GET_FD_INFO_KHR, nullptr, export_semaphore, handle_type};
- auto vkGetSemaphoreFdKHR = (PFN_vkGetSemaphoreFdKHR)vkGetDeviceProcAddr(m_device->device(), "vkGetSemaphoreFdKHR");
- err = vkGetSemaphoreFdKHR(m_device->device(), &ghi, &fd);
- ASSERT_VK_SUCCESS(err);
-
- // Import opaque handle exported above *temporarily*
- VkImportSemaphoreFdInfoKHR ihi = {VK_STRUCTURE_TYPE_IMPORT_SEMAPHORE_FD_INFO_KHR, nullptr, import_semaphore,
- VK_SEMAPHORE_IMPORT_TEMPORARY_BIT_KHR, handle_type, fd};
- auto vkImportSemaphoreFdKHR = (PFN_vkImportSemaphoreFdKHR)vkGetDeviceProcAddr(m_device->device(), "vkImportSemaphoreFdKHR");
- err = vkImportSemaphoreFdKHR(m_device->device(), &ihi);
- ASSERT_VK_SUCCESS(err);
-#endif
-
- // Wait on the imported semaphore twice in vkQueueSubmit, the second wait should be an error
- VkPipelineStageFlags flags = VK_PIPELINE_STAGE_BOTTOM_OF_PIPE_BIT;
- VkSubmitInfo si[] = {
- {VK_STRUCTURE_TYPE_SUBMIT_INFO, nullptr, 0, nullptr, &flags, 0, nullptr, 1, &export_semaphore},
- {VK_STRUCTURE_TYPE_SUBMIT_INFO, nullptr, 1, &import_semaphore, &flags, 0, nullptr, 0, nullptr},
- {VK_STRUCTURE_TYPE_SUBMIT_INFO, nullptr, 0, nullptr, &flags, 0, nullptr, 1, &export_semaphore},
- {VK_STRUCTURE_TYPE_SUBMIT_INFO, nullptr, 1, &import_semaphore, &flags, 0, nullptr, 0, nullptr},
- };
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "has no way to be signaled");
- vkQueueSubmit(m_device->m_queue, 4, si, VK_NULL_HANDLE);
- m_errorMonitor->VerifyFound();
-
- auto index = m_device->graphics_queue_node_index_;
- if (m_device->queue_props[index].queueFlags & VK_QUEUE_SPARSE_BINDING_BIT) {
- // Wait on the imported semaphore twice in vkQueueBindSparse, the second wait should be an error
- VkBindSparseInfo bi[] = {
- {VK_STRUCTURE_TYPE_BIND_SPARSE_INFO, nullptr, 0, nullptr, 0, nullptr, 0, nullptr, 0, nullptr, 1, &export_semaphore},
- {VK_STRUCTURE_TYPE_BIND_SPARSE_INFO, nullptr, 1, &import_semaphore, 0, nullptr, 0, nullptr, 0, nullptr, 0, nullptr},
- {VK_STRUCTURE_TYPE_BIND_SPARSE_INFO, nullptr, 0, nullptr, 0, nullptr, 0, nullptr, 0, nullptr, 1, &export_semaphore},
- {VK_STRUCTURE_TYPE_BIND_SPARSE_INFO, nullptr, 1, &import_semaphore, 0, nullptr, 0, nullptr, 0, nullptr, 0, nullptr},
- };
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "has no way to be signaled");
- vkQueueBindSparse(m_device->m_queue, 4, bi, VK_NULL_HANDLE);
- m_errorMonitor->VerifyFound();
- }
-
- // Cleanup
- err = vkQueueWaitIdle(m_device->m_queue);
- ASSERT_VK_SUCCESS(err);
- vkDestroySemaphore(m_device->device(), export_semaphore, nullptr);
- vkDestroySemaphore(m_device->device(), import_semaphore, nullptr);
-}
-
-TEST_F(VkLayerTest, TemporaryExternalFence) {
-#ifdef _WIN32
- const auto extension_name = VK_KHR_EXTERNAL_FENCE_WIN32_EXTENSION_NAME;
- const auto handle_type = VK_EXTERNAL_FENCE_HANDLE_TYPE_OPAQUE_WIN32_BIT_KHR;
-#else
- const auto extension_name = VK_KHR_EXTERNAL_FENCE_FD_EXTENSION_NAME;
- const auto handle_type = VK_EXTERNAL_FENCE_HANDLE_TYPE_OPAQUE_FD_BIT_KHR;
-#endif
- // Check for external fence instance extensions
- if (InstanceExtensionSupported(VK_KHR_EXTERNAL_FENCE_CAPABILITIES_EXTENSION_NAME)) {
- m_instance_extension_names.push_back(VK_KHR_EXTERNAL_FENCE_CAPABILITIES_EXTENSION_NAME);
- m_instance_extension_names.push_back(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME);
- } else {
- printf("%s External fence extension not supported, skipping test\n", kSkipPrefix);
- return;
- }
- ASSERT_NO_FATAL_FAILURE(InitFramework(myDbgFunc, m_errorMonitor));
-
- // Check for external fence device extensions
- if (DeviceExtensionSupported(gpu(), nullptr, extension_name)) {
- m_device_extension_names.push_back(extension_name);
- m_device_extension_names.push_back(VK_KHR_EXTERNAL_FENCE_EXTENSION_NAME);
- } else {
- printf("%s External fence extension not supported, skipping test\n", kSkipPrefix);
- return;
- }
- ASSERT_NO_FATAL_FAILURE(InitState());
-
- // Check for external fence import and export capability
- VkPhysicalDeviceExternalFenceInfoKHR efi = {VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_FENCE_INFO_KHR, nullptr, handle_type};
- VkExternalFencePropertiesKHR efp = {VK_STRUCTURE_TYPE_EXTERNAL_FENCE_PROPERTIES_KHR, nullptr};
- auto vkGetPhysicalDeviceExternalFencePropertiesKHR = (PFN_vkGetPhysicalDeviceExternalFencePropertiesKHR)vkGetInstanceProcAddr(
- instance(), "vkGetPhysicalDeviceExternalFencePropertiesKHR");
- vkGetPhysicalDeviceExternalFencePropertiesKHR(gpu(), &efi, &efp);
-
- if (!(efp.externalFenceFeatures & VK_EXTERNAL_FENCE_FEATURE_EXPORTABLE_BIT_KHR) ||
- !(efp.externalFenceFeatures & VK_EXTERNAL_FENCE_FEATURE_IMPORTABLE_BIT_KHR)) {
- printf("%s External fence does not support importing and exporting, skipping test\n", kSkipPrefix);
- return;
- }
-
- VkResult err;
-
- // Create a fence to export payload from
- VkFence export_fence;
- {
- VkExportFenceCreateInfoKHR efci = {VK_STRUCTURE_TYPE_EXPORT_FENCE_CREATE_INFO_KHR, nullptr, handle_type};
- VkFenceCreateInfo fci = {VK_STRUCTURE_TYPE_FENCE_CREATE_INFO, &efci, 0};
- err = vkCreateFence(m_device->device(), &fci, nullptr, &export_fence);
- ASSERT_VK_SUCCESS(err);
- }
-
- // Create a fence to import payload into
- VkFence import_fence;
- {
- VkFenceCreateInfo fci = {VK_STRUCTURE_TYPE_FENCE_CREATE_INFO, nullptr, 0};
- err = vkCreateFence(m_device->device(), &fci, nullptr, &import_fence);
- ASSERT_VK_SUCCESS(err);
- }
-
-#ifdef _WIN32
- // Export fence payload to an opaque handle
- HANDLE handle = nullptr;
- {
- VkFenceGetWin32HandleInfoKHR ghi = {VK_STRUCTURE_TYPE_FENCE_GET_WIN32_HANDLE_INFO_KHR, nullptr, export_fence, handle_type};
- auto vkGetFenceWin32HandleKHR =
- (PFN_vkGetFenceWin32HandleKHR)vkGetDeviceProcAddr(m_device->device(), "vkGetFenceWin32HandleKHR");
- err = vkGetFenceWin32HandleKHR(m_device->device(), &ghi, &handle);
- ASSERT_VK_SUCCESS(err);
- }
-
- // Import opaque handle exported above
- {
- VkImportFenceWin32HandleInfoKHR ifi = {VK_STRUCTURE_TYPE_IMPORT_FENCE_WIN32_HANDLE_INFO_KHR,
- nullptr,
- import_fence,
- VK_FENCE_IMPORT_TEMPORARY_BIT_KHR,
- handle_type,
- handle,
- nullptr};
- auto vkImportFenceWin32HandleKHR =
- (PFN_vkImportFenceWin32HandleKHR)vkGetDeviceProcAddr(m_device->device(), "vkImportFenceWin32HandleKHR");
- err = vkImportFenceWin32HandleKHR(m_device->device(), &ifi);
- ASSERT_VK_SUCCESS(err);
- }
-#else
- // Export fence payload to an opaque handle
- int fd = 0;
- {
- VkFenceGetFdInfoKHR gfi = {VK_STRUCTURE_TYPE_FENCE_GET_FD_INFO_KHR, nullptr, export_fence, handle_type};
- auto vkGetFenceFdKHR = (PFN_vkGetFenceFdKHR)vkGetDeviceProcAddr(m_device->device(), "vkGetFenceFdKHR");
- err = vkGetFenceFdKHR(m_device->device(), &gfi, &fd);
- ASSERT_VK_SUCCESS(err);
- }
-
- // Import opaque handle exported above
- {
- VkImportFenceFdInfoKHR ifi = {VK_STRUCTURE_TYPE_IMPORT_FENCE_FD_INFO_KHR, nullptr, import_fence,
- VK_FENCE_IMPORT_TEMPORARY_BIT_KHR, handle_type, fd};
- auto vkImportFenceFdKHR = (PFN_vkImportFenceFdKHR)vkGetDeviceProcAddr(m_device->device(), "vkImportFenceFdKHR");
- err = vkImportFenceFdKHR(m_device->device(), &ifi);
- ASSERT_VK_SUCCESS(err);
- }
-#endif
-
- // Undo the temporary import
- vkResetFences(m_device->device(), 1, &import_fence);
-
- // Signal the previously imported fence twice, the second signal should produce a validation error
- vkQueueSubmit(m_device->m_queue, 0, nullptr, import_fence);
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "is already in use by another submission.");
- vkQueueSubmit(m_device->m_queue, 0, nullptr, import_fence);
- m_errorMonitor->VerifyFound();
-
- // Cleanup
- err = vkQueueWaitIdle(m_device->m_queue);
- ASSERT_VK_SUCCESS(err);
- vkDestroyFence(m_device->device(), export_fence, nullptr);
- vkDestroyFence(m_device->device(), import_fence, nullptr);
-}
-
-TEST_F(VkLayerTest, InvalidCmdBufferEventDestroyed) {
- TEST_DESCRIPTION("Attempt to draw with a command buffer that is invalid due to an event dependency being destroyed.");
- ASSERT_NO_FATAL_FAILURE(Init());
-
- VkEvent event;
- VkEventCreateInfo evci = {};
- evci.sType = VK_STRUCTURE_TYPE_EVENT_CREATE_INFO;
- VkResult result = vkCreateEvent(m_device->device(), &evci, NULL, &event);
- ASSERT_VK_SUCCESS(result);
-
- m_commandBuffer->begin();
- vkCmdSetEvent(m_commandBuffer->handle(), event, VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT);
- m_commandBuffer->end();
-
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
- "UNASSIGNED-CoreValidation-DrawState-InvalidCommandBuffer-VkEvent");
- // Destroy event dependency prior to submit to cause ERROR
- vkDestroyEvent(m_device->device(), event, NULL);
-
- VkSubmitInfo submit_info = {};
- submit_info.sType = VK_STRUCTURE_TYPE_SUBMIT_INFO;
- submit_info.commandBufferCount = 1;
- submit_info.pCommandBuffers = &m_commandBuffer->handle();
- vkQueueSubmit(m_device->m_queue, 1, &submit_info, VK_NULL_HANDLE);
-
- m_errorMonitor->VerifyFound();
-}
-
-TEST_F(VkLayerTest, InvalidCmdBufferQueryPoolDestroyed) {
- TEST_DESCRIPTION("Attempt to draw with a command buffer that is invalid due to a query pool dependency being destroyed.");
- ASSERT_NO_FATAL_FAILURE(Init());
-
- VkQueryPool query_pool;
- VkQueryPoolCreateInfo qpci{};
- qpci.sType = VK_STRUCTURE_TYPE_QUERY_POOL_CREATE_INFO;
- qpci.queryType = VK_QUERY_TYPE_TIMESTAMP;
- qpci.queryCount = 1;
- VkResult result = vkCreateQueryPool(m_device->device(), &qpci, nullptr, &query_pool);
- ASSERT_VK_SUCCESS(result);
-
- m_commandBuffer->begin();
- vkCmdResetQueryPool(m_commandBuffer->handle(), query_pool, 0, 1);
- m_commandBuffer->end();
-
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
- "UNASSIGNED-CoreValidation-DrawState-InvalidCommandBuffer-VkQueryPool");
- // Destroy query pool dependency prior to submit to cause ERROR
- vkDestroyQueryPool(m_device->device(), query_pool, NULL);
-
- VkSubmitInfo submit_info = {};
- submit_info.sType = VK_STRUCTURE_TYPE_SUBMIT_INFO;
- submit_info.commandBufferCount = 1;
- submit_info.pCommandBuffers = &m_commandBuffer->handle();
- vkQueueSubmit(m_device->m_queue, 1, &submit_info, VK_NULL_HANDLE);
-
- m_errorMonitor->VerifyFound();
-}
-
-TEST_F(VkLayerTest, DeviceFeature2AndVertexAttributeDivisorExtensionUnenabled) {
- TEST_DESCRIPTION(
- "Test unenabled VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME & "
- "VK_EXT_VERTEX_ATTRIBUTE_DIVISOR_EXTENSION_NAME.");
-
- VkPhysicalDeviceVertexAttributeDivisorFeaturesEXT vadf = {};
- vadf.sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VERTEX_ATTRIBUTE_DIVISOR_FEATURES_EXT;
- VkPhysicalDeviceFeatures2 pd_features2 = {};
- pd_features2.sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FEATURES_2;
- pd_features2.pNext = &vadf;
-
- ASSERT_NO_FATAL_FAILURE(Init());
- vk_testing::QueueCreateInfoArray queue_info(m_device->queue_props);
- VkDeviceCreateInfo device_create_info = {};
- device_create_info.sType = VK_STRUCTURE_TYPE_DEVICE_CREATE_INFO;
- device_create_info.pNext = &pd_features2;
- device_create_info.queueCreateInfoCount = queue_info.size();
- device_create_info.pQueueCreateInfos = queue_info.data();
- VkDevice testDevice;
-
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
- "VK_KHR_get_physical_device_properties2 must be enabled when it creates an instance");
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
- "VK_EXT_vertex_attribute_divisor must be enabled when it creates a device");
- m_errorMonitor->SetUnexpectedError("Failed to create device chain");
- vkCreateDevice(gpu(), &device_create_info, NULL, &testDevice);
- m_errorMonitor->VerifyFound();
-}
-
-TEST_F(VkLayerTest, InvalidDeviceMask) {
- TEST_DESCRIPTION("Invalid deviceMask.");
- SetTargetApiVersion(VK_API_VERSION_1_1);
-
- bool support_surface = true;
- if (!AddSurfaceInstanceExtension()) {
- printf("%s surface extensions not supported, skipping VkAcquireNextImageInfoKHR test\n", kSkipPrefix);
- support_surface = false;
- }
-
- ASSERT_NO_FATAL_FAILURE(InitFramework(myDbgFunc, m_errorMonitor));
-
- if (support_surface) {
- if (!AddSwapchainDeviceExtension()) {
- printf("%s swapchain extensions not supported, skipping BindSwapchainImageMemory test\n", kSkipPrefix);
- support_surface = false;
- }
- }
-
- if (DeviceValidationVersion() < VK_API_VERSION_1_1) {
- printf("%s Device Groups requires Vulkan 1.1+, skipping test\n", kSkipPrefix);
- return;
- }
- uint32_t physical_device_group_count = 0;
- vkEnumeratePhysicalDeviceGroups(instance(), &physical_device_group_count, nullptr);
-
- if (physical_device_group_count == 0) {
- printf("%s physical_device_group_count is 0, skipping test\n", kSkipPrefix);
- return;
- }
-
- std::vector<VkPhysicalDeviceGroupProperties> physical_device_group(physical_device_group_count,
- {VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_GROUP_PROPERTIES});
- vkEnumeratePhysicalDeviceGroups(instance(), &physical_device_group_count, physical_device_group.data());
- VkDeviceGroupDeviceCreateInfo create_device_pnext = {};
- create_device_pnext.sType = VK_STRUCTURE_TYPE_DEVICE_GROUP_DEVICE_CREATE_INFO;
- create_device_pnext.physicalDeviceCount = physical_device_group[0].physicalDeviceCount;
- create_device_pnext.pPhysicalDevices = physical_device_group[0].physicalDevices;
- ASSERT_NO_FATAL_FAILURE(InitState(nullptr, &create_device_pnext, VK_COMMAND_POOL_CREATE_RESET_COMMAND_BUFFER_BIT));
- ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
-
- if (!InitSwapchain()) {
- printf("%s Cannot create surface or swapchain, skipping VkAcquireNextImageInfoKHR test\n", kSkipPrefix);
- support_surface = false;
- }
-
- // Test VkMemoryAllocateFlagsInfo
- VkMemoryAllocateFlagsInfo alloc_flags_info = {};
- alloc_flags_info.sType = VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_FLAGS_INFO;
- alloc_flags_info.flags = VK_MEMORY_ALLOCATE_DEVICE_MASK_BIT;
- alloc_flags_info.deviceMask = 0xFFFFFFFF;
- VkMemoryAllocateInfo alloc_info = {};
- alloc_info.sType = VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO;
- alloc_info.pNext = &alloc_flags_info;
- alloc_info.memoryTypeIndex = 0;
- alloc_info.allocationSize = 32;
-
- VkDeviceMemory mem;
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkMemoryAllocateFlagsInfo-deviceMask-00675");
- vkAllocateMemory(m_device->device(), &alloc_info, NULL, &mem);
- m_errorMonitor->VerifyFound();
-
- alloc_flags_info.deviceMask = 0;
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkMemoryAllocateFlagsInfo-deviceMask-00676");
- vkAllocateMemory(m_device->device(), &alloc_info, NULL, &mem);
- m_errorMonitor->VerifyFound();
-
- // Test VkDeviceGroupCommandBufferBeginInfo
- VkDeviceGroupCommandBufferBeginInfo dev_grp_cmd_buf_info = {};
- dev_grp_cmd_buf_info.sType = VK_STRUCTURE_TYPE_DEVICE_GROUP_COMMAND_BUFFER_BEGIN_INFO;
- dev_grp_cmd_buf_info.deviceMask = 0xFFFFFFFF;
- VkCommandBufferBeginInfo cmd_buf_info = {};
- cmd_buf_info.sType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_BEGIN_INFO;
- cmd_buf_info.pNext = &dev_grp_cmd_buf_info;
-
- m_commandBuffer->reset();
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
- "VUID-VkDeviceGroupCommandBufferBeginInfo-deviceMask-00106");
- vkBeginCommandBuffer(m_commandBuffer->handle(), &cmd_buf_info);
- m_errorMonitor->VerifyFound();
-
- dev_grp_cmd_buf_info.deviceMask = 0;
- m_commandBuffer->reset();
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
- "VUID-VkDeviceGroupCommandBufferBeginInfo-deviceMask-00107");
- vkBeginCommandBuffer(m_commandBuffer->handle(), &cmd_buf_info);
- m_errorMonitor->VerifyFound();
-
- // Test VkDeviceGroupRenderPassBeginInfo
- dev_grp_cmd_buf_info.deviceMask = 0x00000001;
- m_commandBuffer->reset();
- vkBeginCommandBuffer(m_commandBuffer->handle(), &cmd_buf_info);
-
- VkDeviceGroupRenderPassBeginInfo dev_grp_rp_info = {};
- dev_grp_rp_info.sType = VK_STRUCTURE_TYPE_DEVICE_GROUP_RENDER_PASS_BEGIN_INFO;
- dev_grp_rp_info.deviceMask = 0xFFFFFFFF;
- m_renderPassBeginInfo.pNext = &dev_grp_rp_info;
-
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkDeviceGroupRenderPassBeginInfo-deviceMask-00905");
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkDeviceGroupRenderPassBeginInfo-deviceMask-00907");
- vkCmdBeginRenderPass(m_commandBuffer->handle(), &m_renderPassBeginInfo, VK_SUBPASS_CONTENTS_INLINE);
- m_errorMonitor->VerifyFound();
-
- dev_grp_rp_info.deviceMask = 0;
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkDeviceGroupRenderPassBeginInfo-deviceMask-00906");
- vkCmdBeginRenderPass(m_commandBuffer->handle(), &m_renderPassBeginInfo, VK_SUBPASS_CONTENTS_INLINE);
- m_errorMonitor->VerifyFound();
-
- dev_grp_rp_info.deviceMask = 0x00000001;
- dev_grp_rp_info.deviceRenderAreaCount = physical_device_group[0].physicalDeviceCount + 1;
- std::vector<VkRect2D> device_render_areas(dev_grp_rp_info.deviceRenderAreaCount, m_renderPassBeginInfo.renderArea);
- dev_grp_rp_info.pDeviceRenderAreas = device_render_areas.data();
-
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
- "VUID-VkDeviceGroupRenderPassBeginInfo-deviceRenderAreaCount-00908");
- vkCmdBeginRenderPass(m_commandBuffer->handle(), &m_renderPassBeginInfo, VK_SUBPASS_CONTENTS_INLINE);
- m_errorMonitor->VerifyFound();
-
- // Test vkCmdSetDeviceMask()
- vkCmdSetDeviceMask(m_commandBuffer->handle(), 0x00000001);
-
- dev_grp_rp_info.deviceRenderAreaCount = physical_device_group[0].physicalDeviceCount;
- vkCmdBeginRenderPass(m_commandBuffer->handle(), &m_renderPassBeginInfo, VK_SUBPASS_CONTENTS_INLINE);
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdSetDeviceMask-deviceMask-00108");
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdSetDeviceMask-deviceMask-00110");
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdSetDeviceMask-deviceMask-00111");
- vkCmdSetDeviceMask(m_commandBuffer->handle(), 0xFFFFFFFF);
- m_errorMonitor->VerifyFound();
-
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdSetDeviceMask-deviceMask-00109");
- vkCmdSetDeviceMask(m_commandBuffer->handle(), 0);
- m_errorMonitor->VerifyFound();
-
- VkSemaphoreCreateInfo semaphore_create_info = {};
- semaphore_create_info.sType = VK_STRUCTURE_TYPE_SEMAPHORE_CREATE_INFO;
- VkSemaphore semaphore;
- ASSERT_VK_SUCCESS(vkCreateSemaphore(m_device->device(), &semaphore_create_info, nullptr, &semaphore));
- VkSemaphore semaphore2;
- ASSERT_VK_SUCCESS(vkCreateSemaphore(m_device->device(), &semaphore_create_info, nullptr, &semaphore2));
- VkFenceCreateInfo fence_create_info = {};
- fence_create_info.sType = VK_STRUCTURE_TYPE_FENCE_CREATE_INFO;
- VkFence fence;
- ASSERT_VK_SUCCESS(vkCreateFence(m_device->device(), &fence_create_info, nullptr, &fence));
-
- if (support_surface) {
- // Test VkAcquireNextImageInfoKHR
- uint32_t imageIndex = 0;
- VkAcquireNextImageInfoKHR acquire_next_image_info = {};
- acquire_next_image_info.sType = VK_STRUCTURE_TYPE_ACQUIRE_NEXT_IMAGE_INFO_KHR;
- acquire_next_image_info.semaphore = semaphore;
- acquire_next_image_info.swapchain = m_swapchain;
- acquire_next_image_info.fence = fence;
- acquire_next_image_info.deviceMask = 0xFFFFFFFF;
-
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkAcquireNextImageInfoKHR-deviceMask-01290");
- vkAcquireNextImage2KHR(m_device->device(), &acquire_next_image_info, &imageIndex);
- m_errorMonitor->VerifyFound();
-
- vkWaitForFences(m_device->device(), 1, &fence, VK_TRUE, std::numeric_limits<int>::max());
- vkResetFences(m_device->device(), 1, &fence);
-
- acquire_next_image_info.semaphore = semaphore2;
- acquire_next_image_info.deviceMask = 0;
-
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkAcquireNextImageInfoKHR-deviceMask-01291");
- vkAcquireNextImage2KHR(m_device->device(), &acquire_next_image_info, &imageIndex);
- m_errorMonitor->VerifyFound();
- DestroySwapchain();
- }
-
- // Test VkDeviceGroupSubmitInfo
- VkDeviceGroupSubmitInfo device_group_submit_info = {};
- device_group_submit_info.sType = VK_STRUCTURE_TYPE_DEVICE_GROUP_SUBMIT_INFO;
- device_group_submit_info.commandBufferCount = 1;
- std::array<uint32_t, 1> command_buffer_device_masks = {0xFFFFFFFF};
- device_group_submit_info.pCommandBufferDeviceMasks = command_buffer_device_masks.data();
-
- VkSubmitInfo submit_info = {};
- submit_info.sType = VK_STRUCTURE_TYPE_SUBMIT_INFO;
- submit_info.pNext = &device_group_submit_info;
- submit_info.commandBufferCount = 1;
- submit_info.pCommandBuffers = &m_commandBuffer->handle();
-
- m_commandBuffer->reset();
- vkBeginCommandBuffer(m_commandBuffer->handle(), &cmd_buf_info);
- vkEndCommandBuffer(m_commandBuffer->handle());
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
- "VUID-VkDeviceGroupSubmitInfo-pCommandBufferDeviceMasks-00086");
- vkQueueSubmit(m_device->m_queue, 1, &submit_info, VK_NULL_HANDLE);
- m_errorMonitor->VerifyFound();
- vkQueueWaitIdle(m_device->m_queue);
-
- vkWaitForFences(m_device->device(), 1, &fence, VK_TRUE, std::numeric_limits<int>::max());
- vkDestroyFence(m_device->device(), fence, nullptr);
- vkDestroySemaphore(m_device->device(), semaphore, nullptr);
- vkDestroySemaphore(m_device->device(), semaphore2, nullptr);
-}
-
-TEST_F(VkLayerTest, ValidationCacheTestBadMerge) {
- ASSERT_NO_FATAL_FAILURE(InitFramework(myDbgFunc, m_errorMonitor));
- if (DeviceExtensionSupported(gpu(), "VK_LAYER_LUNARG_core_validation", VK_EXT_VALIDATION_CACHE_EXTENSION_NAME)) {
- m_device_extension_names.push_back(VK_EXT_VALIDATION_CACHE_EXTENSION_NAME);
- } else {
- printf("%s %s not supported, skipping test\n", kSkipPrefix, VK_EXT_VALIDATION_CACHE_EXTENSION_NAME);
- return;
- }
- ASSERT_NO_FATAL_FAILURE(InitState());
-
- // Load extension functions
- auto fpCreateValidationCache =
- (PFN_vkCreateValidationCacheEXT)vkGetDeviceProcAddr(m_device->device(), "vkCreateValidationCacheEXT");
- auto fpDestroyValidationCache =
- (PFN_vkDestroyValidationCacheEXT)vkGetDeviceProcAddr(m_device->device(), "vkDestroyValidationCacheEXT");
- auto fpMergeValidationCaches =
- (PFN_vkMergeValidationCachesEXT)vkGetDeviceProcAddr(m_device->device(), "vkMergeValidationCachesEXT");
- if (!fpCreateValidationCache || !fpDestroyValidationCache || !fpMergeValidationCaches) {
- printf("%s Failed to load function pointers for %s\n", kSkipPrefix, VK_EXT_VALIDATION_CACHE_EXTENSION_NAME);
- return;
- }
-
- VkValidationCacheCreateInfoEXT validationCacheCreateInfo;
- validationCacheCreateInfo.sType = VK_STRUCTURE_TYPE_VALIDATION_CACHE_CREATE_INFO_EXT;
- validationCacheCreateInfo.pNext = NULL;
- validationCacheCreateInfo.initialDataSize = 0;
- validationCacheCreateInfo.pInitialData = NULL;
- validationCacheCreateInfo.flags = 0;
- VkValidationCacheEXT validationCache = VK_NULL_HANDLE;
- VkResult res = fpCreateValidationCache(m_device->device(), &validationCacheCreateInfo, nullptr, &validationCache);
- ASSERT_VK_SUCCESS(res);
-
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkMergeValidationCachesEXT-dstCache-01536");
- res = fpMergeValidationCaches(m_device->device(), validationCache, 1, &validationCache);
- m_errorMonitor->VerifyFound();
-
- fpDestroyValidationCache(m_device->device(), validationCache, nullptr);
-}
-
-TEST_F(VkLayerTest, InvalidQueueFamilyIndex) {
- // Miscellaneous queueFamilyIndex validation tests
- ASSERT_NO_FATAL_FAILURE(Init());
- ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
- VkBufferCreateInfo buffCI = {};
- buffCI.sType = VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO;
- buffCI.size = 1024;
- buffCI.usage = VK_BUFFER_USAGE_TRANSFER_DST_BIT;
- buffCI.queueFamilyIndexCount = 2;
- // Introduce failure by specifying invalid queue_family_index
- uint32_t qfi[2];
- qfi[0] = 777;
- qfi[1] = 0;
-
- buffCI.pQueueFamilyIndices = qfi;
- buffCI.sharingMode = VK_SHARING_MODE_CONCURRENT; // qfi only matters in CONCURRENT mode
-
- // Test for queue family index out of range
- CreateBufferTest(*this, &buffCI, "VUID-VkBufferCreateInfo-sharingMode-01419");
-
- // Test for non-unique QFI in array
- qfi[0] = 0;
- CreateBufferTest(*this, &buffCI, "VUID-VkBufferCreateInfo-sharingMode-01419");
-
- if (m_device->queue_props.size() > 2) {
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "which was not created allowing concurrent");
-
- // Create buffer shared to queue families 1 and 2, but submitted on queue family 0
- buffCI.queueFamilyIndexCount = 2;
- qfi[0] = 1;
- qfi[1] = 2;
- VkBufferObj ib;
- ib.init(*m_device, buffCI);
-
- m_commandBuffer->begin();
- vkCmdFillBuffer(m_commandBuffer->handle(), ib.handle(), 0, 16, 5);
- m_commandBuffer->end();
- m_commandBuffer->QueueCommandBuffer(false);
- m_errorMonitor->VerifyFound();
- }
-}
-
-TEST_F(VkLayerTest, InvalidQueryPoolCreate) {
- TEST_DESCRIPTION("Attempt to create a query pool for PIPELINE_STATISTICS without enabling pipeline stats for the device.");
-
- ASSERT_NO_FATAL_FAILURE(Init());
-
- vk_testing::QueueCreateInfoArray queue_info(m_device->queue_props);
-
- VkDevice local_device;
- VkDeviceCreateInfo device_create_info = {};
- auto features = m_device->phy().features();
- // Intentionally disable pipeline stats
- features.pipelineStatisticsQuery = VK_FALSE;
- device_create_info.sType = VK_STRUCTURE_TYPE_DEVICE_CREATE_INFO;
- device_create_info.pNext = NULL;
- device_create_info.queueCreateInfoCount = queue_info.size();
- device_create_info.pQueueCreateInfos = queue_info.data();
- device_create_info.enabledLayerCount = 0;
- device_create_info.ppEnabledLayerNames = NULL;
- device_create_info.pEnabledFeatures = &features;
- VkResult err = vkCreateDevice(gpu(), &device_create_info, nullptr, &local_device);
- ASSERT_VK_SUCCESS(err);
-
- VkQueryPoolCreateInfo qpci{};
- qpci.sType = VK_STRUCTURE_TYPE_QUERY_POOL_CREATE_INFO;
- qpci.queryType = VK_QUERY_TYPE_PIPELINE_STATISTICS;
- qpci.queryCount = 1;
- VkQueryPool query_pool;
-
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkQueryPoolCreateInfo-queryType-00791");
- vkCreateQueryPool(local_device, &qpci, nullptr, &query_pool);
- m_errorMonitor->VerifyFound();
-
- vkDestroyDevice(local_device, nullptr);
-}
-
-TEST_F(VkLayerTest, UnclosedQuery) {
- TEST_DESCRIPTION("End a command buffer with a query still in progress.");
-
- const char *invalid_query = "VUID-vkEndCommandBuffer-commandBuffer-00061";
-
- ASSERT_NO_FATAL_FAILURE(Init());
-
- VkEvent event;
- VkEventCreateInfo event_create_info{};
- event_create_info.sType = VK_STRUCTURE_TYPE_EVENT_CREATE_INFO;
- vkCreateEvent(m_device->device(), &event_create_info, nullptr, &event);
-
- VkQueue queue = VK_NULL_HANDLE;
- vkGetDeviceQueue(m_device->device(), m_device->graphics_queue_node_index_, 0, &queue);
-
- m_commandBuffer->begin();
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, invalid_query);
-
- VkQueryPool query_pool;
- VkQueryPoolCreateInfo query_pool_create_info = {};
- query_pool_create_info.sType = VK_STRUCTURE_TYPE_QUERY_POOL_CREATE_INFO;
- query_pool_create_info.queryType = VK_QUERY_TYPE_OCCLUSION;
- query_pool_create_info.queryCount = 1;
- vkCreateQueryPool(m_device->device(), &query_pool_create_info, nullptr, &query_pool);
-
- vkCmdResetQueryPool(m_commandBuffer->handle(), query_pool, 0 /*startQuery*/, 1 /*queryCount*/);
- vkCmdBeginQuery(m_commandBuffer->handle(), query_pool, 0, 0);
-
- vkEndCommandBuffer(m_commandBuffer->handle());
- m_errorMonitor->VerifyFound();
-
- vkDestroyQueryPool(m_device->device(), query_pool, nullptr);
- vkDestroyEvent(m_device->device(), event, nullptr);
-}
-
-TEST_F(VkLayerTest, QueryPreciseBit) {
- TEST_DESCRIPTION("Check for correct Query Precise Bit circumstances.");
- ASSERT_NO_FATAL_FAILURE(Init());
-
- // These tests require that the device support pipeline statistics query
- VkPhysicalDeviceFeatures device_features = {};
- ASSERT_NO_FATAL_FAILURE(GetPhysicalDeviceFeatures(&device_features));
- if (VK_TRUE != device_features.pipelineStatisticsQuery) {
- printf("%s Test requires unsupported pipelineStatisticsQuery feature. Skipped.\n", kSkipPrefix);
- return;
- }
-
- std::vector<const char *> device_extension_names;
- auto features = m_device->phy().features();
-
- // Test for precise bit when query type is not OCCLUSION
- if (features.occlusionQueryPrecise) {
- VkEvent event;
- VkEventCreateInfo event_create_info{};
- event_create_info.sType = VK_STRUCTURE_TYPE_EVENT_CREATE_INFO;
- vkCreateEvent(m_device->handle(), &event_create_info, nullptr, &event);
-
- m_commandBuffer->begin();
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdBeginQuery-queryType-00800");
-
- VkQueryPool query_pool;
- VkQueryPoolCreateInfo query_pool_create_info = {};
- query_pool_create_info.sType = VK_STRUCTURE_TYPE_QUERY_POOL_CREATE_INFO;
- query_pool_create_info.queryType = VK_QUERY_TYPE_PIPELINE_STATISTICS;
- query_pool_create_info.queryCount = 1;
- vkCreateQueryPool(m_device->handle(), &query_pool_create_info, nullptr, &query_pool);
-
- vkCmdResetQueryPool(m_commandBuffer->handle(), query_pool, 0, 1);
- vkCmdBeginQuery(m_commandBuffer->handle(), query_pool, 0, VK_QUERY_CONTROL_PRECISE_BIT);
- m_errorMonitor->VerifyFound();
-
- m_commandBuffer->end();
- vkDestroyQueryPool(m_device->handle(), query_pool, nullptr);
- vkDestroyEvent(m_device->handle(), event, nullptr);
- }
-
- // Test for precise bit when precise feature is not available
- features.occlusionQueryPrecise = false;
- VkDeviceObj test_device(0, gpu(), device_extension_names, &features);
-
- VkCommandPoolCreateInfo pool_create_info{};
- pool_create_info.sType = VK_STRUCTURE_TYPE_COMMAND_POOL_CREATE_INFO;
- pool_create_info.queueFamilyIndex = test_device.graphics_queue_node_index_;
-
- VkCommandPool command_pool;
- vkCreateCommandPool(test_device.handle(), &pool_create_info, nullptr, &command_pool);
-
- VkCommandBufferAllocateInfo cmd = {};
- cmd.sType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_ALLOCATE_INFO;
- cmd.pNext = NULL;
- cmd.commandPool = command_pool;
- cmd.level = VK_COMMAND_BUFFER_LEVEL_PRIMARY;
- cmd.commandBufferCount = 1;
-
- VkCommandBuffer cmd_buffer;
- VkResult err = vkAllocateCommandBuffers(test_device.handle(), &cmd, &cmd_buffer);
- ASSERT_VK_SUCCESS(err);
-
- VkEvent event;
- VkEventCreateInfo event_create_info{};
- event_create_info.sType = VK_STRUCTURE_TYPE_EVENT_CREATE_INFO;
- vkCreateEvent(test_device.handle(), &event_create_info, nullptr, &event);
-
- VkCommandBufferBeginInfo begin_info = {VK_STRUCTURE_TYPE_COMMAND_BUFFER_BEGIN_INFO, nullptr,
- VK_COMMAND_BUFFER_USAGE_ONE_TIME_SUBMIT_BIT, nullptr};
-
- vkBeginCommandBuffer(cmd_buffer, &begin_info);
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdBeginQuery-queryType-00800");
-
- VkQueryPool query_pool;
- VkQueryPoolCreateInfo query_pool_create_info = {};
- query_pool_create_info.sType = VK_STRUCTURE_TYPE_QUERY_POOL_CREATE_INFO;
- query_pool_create_info.queryType = VK_QUERY_TYPE_OCCLUSION;
- query_pool_create_info.queryCount = 1;
- vkCreateQueryPool(test_device.handle(), &query_pool_create_info, nullptr, &query_pool);
-
- vkCmdResetQueryPool(cmd_buffer, query_pool, 0, 1);
- vkCmdBeginQuery(cmd_buffer, query_pool, 0, VK_QUERY_CONTROL_PRECISE_BIT);
- m_errorMonitor->VerifyFound();
-
- vkEndCommandBuffer(cmd_buffer);
- vkDestroyQueryPool(test_device.handle(), query_pool, nullptr);
- vkDestroyEvent(test_device.handle(), event, nullptr);
- vkDestroyCommandPool(test_device.handle(), command_pool, nullptr);
-}
-
-TEST_F(VkLayerTest, StageMaskGsTsEnabled) {
- TEST_DESCRIPTION(
- "Attempt to use a stageMask w/ geometry shader and tesselation shader bits enabled when those features are disabled on the "
- "device.");
-
- ASSERT_NO_FATAL_FAILURE(Init());
- ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
-
- std::vector<const char *> device_extension_names;
- auto features = m_device->phy().features();
- // Make sure gs & ts are disabled
- features.geometryShader = false;
- features.tessellationShader = false;
- // The sacrificial device object
- VkDeviceObj test_device(0, gpu(), device_extension_names, &features);
-
- VkCommandPoolCreateInfo pool_create_info{};
- pool_create_info.sType = VK_STRUCTURE_TYPE_COMMAND_POOL_CREATE_INFO;
- pool_create_info.queueFamilyIndex = test_device.graphics_queue_node_index_;
-
- VkCommandPool command_pool;
- vkCreateCommandPool(test_device.handle(), &pool_create_info, nullptr, &command_pool);
-
- VkCommandBufferAllocateInfo cmd = {};
- cmd.sType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_ALLOCATE_INFO;
- cmd.pNext = NULL;
- cmd.commandPool = command_pool;
- cmd.level = VK_COMMAND_BUFFER_LEVEL_PRIMARY;
- cmd.commandBufferCount = 1;
-
- VkCommandBuffer cmd_buffer;
- VkResult err = vkAllocateCommandBuffers(test_device.handle(), &cmd, &cmd_buffer);
- ASSERT_VK_SUCCESS(err);
-
- VkEvent event;
- VkEventCreateInfo evci = {};
- evci.sType = VK_STRUCTURE_TYPE_EVENT_CREATE_INFO;
- VkResult result = vkCreateEvent(test_device.handle(), &evci, NULL, &event);
- ASSERT_VK_SUCCESS(result);
-
- VkCommandBufferBeginInfo cbbi = {};
- cbbi.sType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_BEGIN_INFO;
- vkBeginCommandBuffer(cmd_buffer, &cbbi);
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdSetEvent-stageMask-01150");
- vkCmdSetEvent(cmd_buffer, event, VK_PIPELINE_STAGE_GEOMETRY_SHADER_BIT);
- m_errorMonitor->VerifyFound();
-
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdSetEvent-stageMask-01151");
- vkCmdSetEvent(cmd_buffer, event, VK_PIPELINE_STAGE_TESSELLATION_CONTROL_SHADER_BIT);
- m_errorMonitor->VerifyFound();
-
- vkDestroyEvent(test_device.handle(), event, NULL);
- vkDestroyCommandPool(test_device.handle(), command_pool, NULL);
-}
-
-TEST_F(VkLayerTest, DescriptorPoolInUseDestroyedSignaled) {
- TEST_DESCRIPTION("Delete a DescriptorPool with a DescriptorSet that is in use.");
- ASSERT_NO_FATAL_FAILURE(Init());
- ASSERT_NO_FATAL_FAILURE(InitViewport());
- ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
-
- // Create image to update the descriptor with
- VkImageObj image(m_device);
- image.Init(32, 32, 1, VK_FORMAT_B8G8R8A8_UNORM, VK_IMAGE_USAGE_SAMPLED_BIT, VK_IMAGE_TILING_OPTIMAL, 0);
- ASSERT_TRUE(image.initialized());
-
- VkImageView view = image.targetView(VK_FORMAT_B8G8R8A8_UNORM);
- // Create Sampler
- VkSamplerCreateInfo sampler_ci = SafeSaneSamplerCreateInfo();
- VkSampler sampler;
- VkResult err = vkCreateSampler(m_device->device(), &sampler_ci, NULL, &sampler);
- ASSERT_VK_SUCCESS(err);
-
- // Create PSO to be used for draw-time errors below
- VkShaderObj fs(m_device, bindStateFragSamplerShaderText, VK_SHADER_STAGE_FRAGMENT_BIT, this);
-
- CreatePipelineHelper pipe(*this);
- pipe.InitInfo();
- pipe.shader_stages_ = {pipe.vs_->GetStageCreateInfo(), fs.GetStageCreateInfo()};
- pipe.dsl_bindings_ = {
- {0, VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER, 1, VK_SHADER_STAGE_ALL, nullptr},
- };
- const VkDynamicState dyn_states[] = {VK_DYNAMIC_STATE_VIEWPORT, VK_DYNAMIC_STATE_SCISSOR};
- VkPipelineDynamicStateCreateInfo dyn_state_ci = {};
- dyn_state_ci.sType = VK_STRUCTURE_TYPE_PIPELINE_DYNAMIC_STATE_CREATE_INFO;
- dyn_state_ci.dynamicStateCount = size(dyn_states);
- dyn_state_ci.pDynamicStates = dyn_states;
- pipe.dyn_state_ci_ = dyn_state_ci;
- pipe.InitState();
- pipe.CreateGraphicsPipeline();
-
- // Update descriptor with image and sampler
- pipe.descriptor_set_->WriteDescriptorImageInfo(0, view, sampler, VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER);
- pipe.descriptor_set_->UpdateDescriptorSets();
-
- m_commandBuffer->begin();
- m_commandBuffer->BeginRenderPass(m_renderPassBeginInfo);
- vkCmdBindPipeline(m_commandBuffer->handle(), VK_PIPELINE_BIND_POINT_GRAPHICS, pipe.pipeline_);
- vkCmdBindDescriptorSets(m_commandBuffer->handle(), VK_PIPELINE_BIND_POINT_GRAPHICS, pipe.pipeline_layout_.handle(), 0, 1,
- &pipe.descriptor_set_->set_, 0, NULL);
-
- VkViewport viewport = {0, 0, 16, 16, 0, 1};
- VkRect2D scissor = {{0, 0}, {16, 16}};
- vkCmdSetViewport(m_commandBuffer->handle(), 0, 1, &viewport);
- vkCmdSetScissor(m_commandBuffer->handle(), 0, 1, &scissor);
-
- m_commandBuffer->Draw(1, 0, 0, 0);
- m_commandBuffer->EndRenderPass();
- m_commandBuffer->end();
- // Submit cmd buffer to put pool in-flight
- VkSubmitInfo submit_info = {};
- submit_info.sType = VK_STRUCTURE_TYPE_SUBMIT_INFO;
- submit_info.commandBufferCount = 1;
- submit_info.pCommandBuffers = &m_commandBuffer->handle();
- vkQueueSubmit(m_device->m_queue, 1, &submit_info, VK_NULL_HANDLE);
- // Destroy pool while in-flight, causing error
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkDestroyDescriptorPool-descriptorPool-00303");
- vkDestroyDescriptorPool(m_device->device(), pipe.descriptor_set_->pool_, NULL);
- m_errorMonitor->VerifyFound();
- vkQueueWaitIdle(m_device->m_queue);
- // Cleanup
- vkDestroySampler(m_device->device(), sampler, NULL);
- m_errorMonitor->SetUnexpectedError(
- "If descriptorPool is not VK_NULL_HANDLE, descriptorPool must be a valid VkDescriptorPool handle");
- m_errorMonitor->SetUnexpectedError("Unable to remove DescriptorPool obj");
- // TODO : It seems Validation layers think ds_pool was already destroyed, even though it wasn't?
-}
-
-TEST_F(VkLayerTest, FramebufferInUseDestroyedSignaled) {
- TEST_DESCRIPTION("Delete in-use framebuffer.");
- ASSERT_NO_FATAL_FAILURE(Init());
- VkFormatProperties format_properties;
- VkResult err = VK_SUCCESS;
- vkGetPhysicalDeviceFormatProperties(gpu(), VK_FORMAT_B8G8R8A8_UNORM, &format_properties);
-
- ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
-
- VkImageObj image(m_device);
- image.Init(256, 256, 1, VK_FORMAT_B8G8R8A8_UNORM, VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT, VK_IMAGE_TILING_OPTIMAL, 0);
- ASSERT_TRUE(image.initialized());
- VkImageView view = image.targetView(VK_FORMAT_B8G8R8A8_UNORM);
-
- VkFramebufferCreateInfo fci = {VK_STRUCTURE_TYPE_FRAMEBUFFER_CREATE_INFO, nullptr, 0, m_renderPass, 1, &view, 256, 256, 1};
- VkFramebuffer fb;
- err = vkCreateFramebuffer(m_device->device(), &fci, nullptr, &fb);
- ASSERT_VK_SUCCESS(err);
-
- // Just use default renderpass with our framebuffer
- m_renderPassBeginInfo.framebuffer = fb;
- // Create Null cmd buffer for submit
- m_commandBuffer->begin();
- m_commandBuffer->BeginRenderPass(m_renderPassBeginInfo);
- m_commandBuffer->EndRenderPass();
- m_commandBuffer->end();
- // Submit cmd buffer to put it in-flight
- VkSubmitInfo submit_info = {};
- submit_info.sType = VK_STRUCTURE_TYPE_SUBMIT_INFO;
- submit_info.commandBufferCount = 1;
- submit_info.pCommandBuffers = &m_commandBuffer->handle();
- vkQueueSubmit(m_device->m_queue, 1, &submit_info, VK_NULL_HANDLE);
- // Destroy framebuffer while in-flight
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkDestroyFramebuffer-framebuffer-00892");
- vkDestroyFramebuffer(m_device->device(), fb, NULL);
- m_errorMonitor->VerifyFound();
- // Wait for queue to complete so we can safely destroy everything
- vkQueueWaitIdle(m_device->m_queue);
- m_errorMonitor->SetUnexpectedError("If framebuffer is not VK_NULL_HANDLE, framebuffer must be a valid VkFramebuffer handle");
- m_errorMonitor->SetUnexpectedError("Unable to remove Framebuffer obj");
- vkDestroyFramebuffer(m_device->device(), fb, nullptr);
-}
-
-TEST_F(VkLayerTest, FramebufferImageInUseDestroyedSignaled) {
- TEST_DESCRIPTION("Delete in-use image that's child of framebuffer.");
- ASSERT_NO_FATAL_FAILURE(Init());
- VkFormatProperties format_properties;
- VkResult err = VK_SUCCESS;
- vkGetPhysicalDeviceFormatProperties(gpu(), VK_FORMAT_B8G8R8A8_UNORM, &format_properties);
-
- ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
-
- VkImageCreateInfo image_ci = {};
- image_ci.sType = VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO;
- image_ci.pNext = NULL;
- image_ci.imageType = VK_IMAGE_TYPE_2D;
- image_ci.format = VK_FORMAT_B8G8R8A8_UNORM;
- image_ci.extent.width = 256;
- image_ci.extent.height = 256;
- image_ci.extent.depth = 1;
- image_ci.mipLevels = 1;
- image_ci.arrayLayers = 1;
- image_ci.samples = VK_SAMPLE_COUNT_1_BIT;
- image_ci.tiling = VK_IMAGE_TILING_OPTIMAL;
- image_ci.usage = VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT;
- image_ci.initialLayout = VK_IMAGE_LAYOUT_UNDEFINED;
- image_ci.flags = 0;
- VkImageObj image(m_device);
- image.init(&image_ci);
-
- VkImageView view = image.targetView(VK_FORMAT_B8G8R8A8_UNORM);
-
- VkFramebufferCreateInfo fci = {VK_STRUCTURE_TYPE_FRAMEBUFFER_CREATE_INFO, nullptr, 0, m_renderPass, 1, &view, 256, 256, 1};
- VkFramebuffer fb;
- err = vkCreateFramebuffer(m_device->device(), &fci, nullptr, &fb);
- ASSERT_VK_SUCCESS(err);
-
- // Just use default renderpass with our framebuffer
- m_renderPassBeginInfo.framebuffer = fb;
- // Create Null cmd buffer for submit
- m_commandBuffer->begin();
- m_commandBuffer->BeginRenderPass(m_renderPassBeginInfo);
- m_commandBuffer->EndRenderPass();
- m_commandBuffer->end();
- // Submit cmd buffer to put it (and attached imageView) in-flight
- VkSubmitInfo submit_info = {};
- submit_info.sType = VK_STRUCTURE_TYPE_SUBMIT_INFO;
- submit_info.commandBufferCount = 1;
- submit_info.pCommandBuffers = &m_commandBuffer->handle();
- // Submit cmd buffer to put framebuffer and children in-flight
- vkQueueSubmit(m_device->m_queue, 1, &submit_info, VK_NULL_HANDLE);
- // Destroy image attached to framebuffer while in-flight
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkDestroyImage-image-01000");
- vkDestroyImage(m_device->device(), image.handle(), NULL);
- m_errorMonitor->VerifyFound();
- // Wait for queue to complete so we can safely destroy image and other objects
- vkQueueWaitIdle(m_device->m_queue);
- m_errorMonitor->SetUnexpectedError("If image is not VK_NULL_HANDLE, image must be a valid VkImage handle");
- m_errorMonitor->SetUnexpectedError("Unable to remove Image obj");
- vkDestroyFramebuffer(m_device->device(), fb, nullptr);
-}
-
-TEST_F(VkLayerTest, EventInUseDestroyedSignaled) {
- ASSERT_NO_FATAL_FAILURE(Init());
- ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
-
- m_commandBuffer->begin();
-
- VkEvent event;
- VkEventCreateInfo event_create_info = {};
- event_create_info.sType = VK_STRUCTURE_TYPE_EVENT_CREATE_INFO;
- vkCreateEvent(m_device->device(), &event_create_info, nullptr, &event);
- vkCmdSetEvent(m_commandBuffer->handle(), event, VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT);
-
- m_commandBuffer->end();
- vkDestroyEvent(m_device->device(), event, nullptr);
-
- VkSubmitInfo submit_info = {};
- submit_info.sType = VK_STRUCTURE_TYPE_SUBMIT_INFO;
- submit_info.commandBufferCount = 1;
- submit_info.pCommandBuffers = &m_commandBuffer->handle();
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "that is invalid because bound");
- vkQueueSubmit(m_device->m_queue, 1, &submit_info, VK_NULL_HANDLE);
- m_errorMonitor->VerifyFound();
-}
-
-TEST_F(VkLayerTest, InUseDestroyedSignaled) {
- TEST_DESCRIPTION(
- "Use vkCmdExecuteCommands with invalid state in primary and secondary command buffers. Delete objects that are in use. "
- "Call VkQueueSubmit with an event that has been deleted.");
-
- ASSERT_NO_FATAL_FAILURE(Init());
- ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
-
- m_errorMonitor->ExpectSuccess();
-
- VkSemaphoreCreateInfo semaphore_create_info = {};
- semaphore_create_info.sType = VK_STRUCTURE_TYPE_SEMAPHORE_CREATE_INFO;
- VkSemaphore semaphore;
- ASSERT_VK_SUCCESS(vkCreateSemaphore(m_device->device(), &semaphore_create_info, nullptr, &semaphore));
- VkFenceCreateInfo fence_create_info = {};
- fence_create_info.sType = VK_STRUCTURE_TYPE_FENCE_CREATE_INFO;
- VkFence fence;
- ASSERT_VK_SUCCESS(vkCreateFence(m_device->device(), &fence_create_info, nullptr, &fence));
-
- VkBufferTest buffer_test(m_device, VK_BUFFER_USAGE_UNIFORM_BUFFER_BIT);
-
- CreatePipelineHelper pipe(*this);
- pipe.InitInfo();
- pipe.InitState();
- pipe.CreateGraphicsPipeline();
-
- pipe.descriptor_set_->WriteDescriptorBufferInfo(0, buffer_test.GetBuffer(), 1024, VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER);
- pipe.descriptor_set_->UpdateDescriptorSets();
-
- VkEvent event;
- VkEventCreateInfo event_create_info = {};
- event_create_info.sType = VK_STRUCTURE_TYPE_EVENT_CREATE_INFO;
- vkCreateEvent(m_device->device(), &event_create_info, nullptr, &event);
-
- m_commandBuffer->begin();
-
- vkCmdSetEvent(m_commandBuffer->handle(), event, VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT);
-
- vkCmdBindPipeline(m_commandBuffer->handle(), VK_PIPELINE_BIND_POINT_GRAPHICS, pipe.pipeline_);
- vkCmdBindDescriptorSets(m_commandBuffer->handle(), VK_PIPELINE_BIND_POINT_GRAPHICS, pipe.pipeline_layout_.handle(), 0, 1,
- &pipe.descriptor_set_->set_, 0, NULL);
-
- m_commandBuffer->end();
-
- VkSubmitInfo submit_info = {};
- submit_info.sType = VK_STRUCTURE_TYPE_SUBMIT_INFO;
- submit_info.commandBufferCount = 1;
- submit_info.pCommandBuffers = &m_commandBuffer->handle();
- submit_info.signalSemaphoreCount = 1;
- submit_info.pSignalSemaphores = &semaphore;
- vkQueueSubmit(m_device->m_queue, 1, &submit_info, fence);
- m_errorMonitor->Reset(); // resume logmsg processing
-
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkDestroyEvent-event-01145");
- vkDestroyEvent(m_device->device(), event, nullptr);
- m_errorMonitor->VerifyFound();
-
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkDestroySemaphore-semaphore-01137");
- vkDestroySemaphore(m_device->device(), semaphore, nullptr);
- m_errorMonitor->VerifyFound();
-
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkDestroyFence-fence-01120");
- vkDestroyFence(m_device->device(), fence, nullptr);
- m_errorMonitor->VerifyFound();
-
- vkQueueWaitIdle(m_device->m_queue);
- m_errorMonitor->SetUnexpectedError("If semaphore is not VK_NULL_HANDLE, semaphore must be a valid VkSemaphore handle");
- m_errorMonitor->SetUnexpectedError("Unable to remove Semaphore obj");
- vkDestroySemaphore(m_device->device(), semaphore, nullptr);
- m_errorMonitor->SetUnexpectedError("If fence is not VK_NULL_HANDLE, fence must be a valid VkFence handle");
- m_errorMonitor->SetUnexpectedError("Unable to remove Fence obj");
- vkDestroyFence(m_device->device(), fence, nullptr);
- m_errorMonitor->SetUnexpectedError("If event is not VK_NULL_HANDLE, event must be a valid VkEvent handle");
- m_errorMonitor->SetUnexpectedError("Unable to remove Event obj");
- vkDestroyEvent(m_device->device(), event, nullptr);
-}
-
-TEST_F(VkLayerTest, QueryPoolPartialTimestamp) {
- TEST_DESCRIPTION("Request partial result on timestamp query.");
-
- ASSERT_NO_FATAL_FAILURE(Init());
- ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
-
- VkQueryPool query_pool;
- VkQueryPoolCreateInfo query_pool_ci{};
- query_pool_ci.sType = VK_STRUCTURE_TYPE_QUERY_POOL_CREATE_INFO;
- query_pool_ci.queryType = VK_QUERY_TYPE_TIMESTAMP;
- query_pool_ci.queryCount = 1;
- vkCreateQueryPool(m_device->device(), &query_pool_ci, nullptr, &query_pool);
-
- m_commandBuffer->begin();
- vkCmdResetQueryPool(m_commandBuffer->handle(), query_pool, 0, 1);
- vkCmdWriteTimestamp(m_commandBuffer->handle(), VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT, query_pool, 0);
- m_commandBuffer->end();
-
- // Submit cmd buffer and wait for it.
- VkSubmitInfo submit_info = {};
- submit_info.sType = VK_STRUCTURE_TYPE_SUBMIT_INFO;
- submit_info.commandBufferCount = 1;
- submit_info.pCommandBuffers = &m_commandBuffer->handle();
- vkQueueSubmit(m_device->m_queue, 1, &submit_info, VK_NULL_HANDLE);
- vkQueueWaitIdle(m_device->m_queue);
-
- // Attempt to obtain partial results.
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkGetQueryPoolResults-queryType-00818");
- uint32_t data_space[16];
- m_errorMonitor->SetUnexpectedError("Cannot get query results on queryPool");
- vkGetQueryPoolResults(m_device->handle(), query_pool, 0, 1, sizeof(data_space), &data_space, sizeof(uint32_t),
- VK_QUERY_RESULT_PARTIAL_BIT);
- m_errorMonitor->VerifyFound();
-
- // Destroy query pool.
- vkDestroyQueryPool(m_device->handle(), query_pool, NULL);
-}
-
-TEST_F(VkLayerTest, QueryPoolInUseDestroyedSignaled) {
- TEST_DESCRIPTION("Delete in-use query pool.");
-
- ASSERT_NO_FATAL_FAILURE(Init());
- ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
-
- VkQueryPool query_pool;
- VkQueryPoolCreateInfo query_pool_ci{};
- query_pool_ci.sType = VK_STRUCTURE_TYPE_QUERY_POOL_CREATE_INFO;
- query_pool_ci.queryType = VK_QUERY_TYPE_TIMESTAMP;
- query_pool_ci.queryCount = 1;
- vkCreateQueryPool(m_device->device(), &query_pool_ci, nullptr, &query_pool);
-
- m_commandBuffer->begin();
- // Use query pool to create binding with cmd buffer
- vkCmdResetQueryPool(m_commandBuffer->handle(), query_pool, 0, 1);
- vkCmdWriteTimestamp(m_commandBuffer->handle(), VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT, query_pool, 0);
- m_commandBuffer->end();
-
- // Submit cmd buffer and then destroy query pool while in-flight
- VkSubmitInfo submit_info = {};
- submit_info.sType = VK_STRUCTURE_TYPE_SUBMIT_INFO;
- submit_info.commandBufferCount = 1;
- submit_info.pCommandBuffers = &m_commandBuffer->handle();
- vkQueueSubmit(m_device->m_queue, 1, &submit_info, VK_NULL_HANDLE);
-
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkDestroyQueryPool-queryPool-00793");
- vkDestroyQueryPool(m_device->handle(), query_pool, NULL);
- m_errorMonitor->VerifyFound();
-
- vkQueueWaitIdle(m_device->m_queue);
- // Now that cmd buffer done we can safely destroy query_pool
- m_errorMonitor->SetUnexpectedError("If queryPool is not VK_NULL_HANDLE, queryPool must be a valid VkQueryPool handle");
- m_errorMonitor->SetUnexpectedError("Unable to remove QueryPool obj");
- vkDestroyQueryPool(m_device->handle(), query_pool, NULL);
-}
-
-TEST_F(VkLayerTest, PipelineInUseDestroyedSignaled) {
- TEST_DESCRIPTION("Delete in-use pipeline.");
-
- ASSERT_NO_FATAL_FAILURE(Init());
- ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
-
- const VkPipelineLayoutObj pipeline_layout(m_device);
-
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkDestroyPipeline-pipeline-00765");
- // Create PSO to be used for draw-time errors below
-
- // Store pipeline handle so we can actually delete it before test finishes
- VkPipeline delete_this_pipeline;
- { // Scope pipeline so it will be auto-deleted
- CreatePipelineHelper pipe(*this);
- pipe.InitInfo();
- pipe.InitState();
- pipe.CreateGraphicsPipeline();
-
- delete_this_pipeline = pipe.pipeline_;
-
- m_commandBuffer->begin();
- // Bind pipeline to cmd buffer
- vkCmdBindPipeline(m_commandBuffer->handle(), VK_PIPELINE_BIND_POINT_GRAPHICS, pipe.pipeline_);
-
- m_commandBuffer->end();
-
- VkSubmitInfo submit_info = {};
- submit_info.sType = VK_STRUCTURE_TYPE_SUBMIT_INFO;
- submit_info.commandBufferCount = 1;
- submit_info.pCommandBuffers = &m_commandBuffer->handle();
- // Submit cmd buffer and then pipeline destroyed while in-flight
- vkQueueSubmit(m_device->m_queue, 1, &submit_info, VK_NULL_HANDLE);
- } // Pipeline deletion triggered here
- m_errorMonitor->VerifyFound();
- // Make sure queue finished and then actually delete pipeline
- vkQueueWaitIdle(m_device->m_queue);
- m_errorMonitor->SetUnexpectedError("If pipeline is not VK_NULL_HANDLE, pipeline must be a valid VkPipeline handle");
- m_errorMonitor->SetUnexpectedError("Unable to remove Pipeline obj");
- vkDestroyPipeline(m_device->handle(), delete_this_pipeline, nullptr);
-}
-
-TEST_F(VkLayerTest, ImageViewInUseDestroyedSignaled) {
- TEST_DESCRIPTION("Delete in-use imageView.");
-
- ASSERT_NO_FATAL_FAILURE(Init());
- ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
-
- VkSamplerCreateInfo sampler_ci = SafeSaneSamplerCreateInfo();
- VkSampler sampler;
-
- VkResult err;
- err = vkCreateSampler(m_device->device(), &sampler_ci, NULL, &sampler);
- ASSERT_VK_SUCCESS(err);
-
- VkImageObj image(m_device);
- image.Init(128, 128, 1, VK_FORMAT_R8G8B8A8_UNORM, VK_IMAGE_USAGE_SAMPLED_BIT, VK_IMAGE_TILING_OPTIMAL, 0);
- ASSERT_TRUE(image.initialized());
-
- VkImageView view = image.targetView(VK_FORMAT_R8G8B8A8_UNORM);
-
- // Create PSO to use the sampler
- VkShaderObj fs(m_device, bindStateFragSamplerShaderText, VK_SHADER_STAGE_FRAGMENT_BIT, this);
-
- CreatePipelineHelper pipe(*this);
- pipe.InitInfo();
- pipe.shader_stages_ = {pipe.vs_->GetStageCreateInfo(), fs.GetStageCreateInfo()};
- pipe.dsl_bindings_ = {
- {0, VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER, 1, VK_SHADER_STAGE_ALL, nullptr},
- };
- const VkDynamicState dyn_states[] = {VK_DYNAMIC_STATE_VIEWPORT, VK_DYNAMIC_STATE_SCISSOR};
- VkPipelineDynamicStateCreateInfo dyn_state_ci = {};
- dyn_state_ci.sType = VK_STRUCTURE_TYPE_PIPELINE_DYNAMIC_STATE_CREATE_INFO;
- dyn_state_ci.dynamicStateCount = size(dyn_states);
- dyn_state_ci.pDynamicStates = dyn_states;
- pipe.dyn_state_ci_ = dyn_state_ci;
- pipe.InitState();
- pipe.CreateGraphicsPipeline();
-
- pipe.descriptor_set_->WriteDescriptorImageInfo(0, view, sampler, VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER);
- pipe.descriptor_set_->UpdateDescriptorSets();
-
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkDestroyImageView-imageView-01026");
-
- m_commandBuffer->begin();
- m_commandBuffer->BeginRenderPass(m_renderPassBeginInfo);
- // Bind pipeline to cmd buffer
- vkCmdBindPipeline(m_commandBuffer->handle(), VK_PIPELINE_BIND_POINT_GRAPHICS, pipe.pipeline_);
- vkCmdBindDescriptorSets(m_commandBuffer->handle(), VK_PIPELINE_BIND_POINT_GRAPHICS, pipe.pipeline_layout_.handle(), 0, 1,
- &pipe.descriptor_set_->set_, 0, nullptr);
-
- VkViewport viewport = {0, 0, 16, 16, 0, 1};
- VkRect2D scissor = {{0, 0}, {16, 16}};
- vkCmdSetViewport(m_commandBuffer->handle(), 0, 1, &viewport);
- vkCmdSetScissor(m_commandBuffer->handle(), 0, 1, &scissor);
-
- m_commandBuffer->Draw(1, 0, 0, 0);
- m_commandBuffer->EndRenderPass();
- m_commandBuffer->end();
- // Submit cmd buffer then destroy sampler
- VkSubmitInfo submit_info = {};
- submit_info.sType = VK_STRUCTURE_TYPE_SUBMIT_INFO;
- submit_info.commandBufferCount = 1;
- submit_info.pCommandBuffers = &m_commandBuffer->handle();
- // Submit cmd buffer and then destroy imageView while in-flight
- vkQueueSubmit(m_device->m_queue, 1, &submit_info, VK_NULL_HANDLE);
-
- vkDestroyImageView(m_device->device(), view, nullptr);
- m_errorMonitor->VerifyFound();
- vkQueueWaitIdle(m_device->m_queue);
- // Now we can actually destroy imageView
- m_errorMonitor->SetUnexpectedError("If imageView is not VK_NULL_HANDLE, imageView must be a valid VkImageView handle");
- m_errorMonitor->SetUnexpectedError("Unable to remove ImageView obj");
- vkDestroySampler(m_device->device(), sampler, nullptr);
-}
-
-TEST_F(VkLayerTest, BufferViewInUseDestroyedSignaled) {
- TEST_DESCRIPTION("Delete in-use bufferView.");
-
- ASSERT_NO_FATAL_FAILURE(Init());
- ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
-
- uint32_t queue_family_index = 0;
- VkBufferCreateInfo buffer_create_info = {};
- buffer_create_info.sType = VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO;
- buffer_create_info.size = 1024;
- buffer_create_info.usage = VK_BUFFER_USAGE_STORAGE_TEXEL_BUFFER_BIT;
- buffer_create_info.queueFamilyIndexCount = 1;
- buffer_create_info.pQueueFamilyIndices = &queue_family_index;
- VkBufferObj buffer;
- buffer.init(*m_device, buffer_create_info);
-
- VkBufferView view;
- VkBufferViewCreateInfo bvci = {};
- bvci.sType = VK_STRUCTURE_TYPE_BUFFER_VIEW_CREATE_INFO;
- bvci.buffer = buffer.handle();
- bvci.format = VK_FORMAT_R32_SFLOAT;
- bvci.range = VK_WHOLE_SIZE;
-
- VkResult err = vkCreateBufferView(m_device->device(), &bvci, NULL, &view);
- ASSERT_VK_SUCCESS(err);
-
- char const *fsSource =
- "#version 450\n"
- "\n"
- "layout(set=0, binding=0, r32f) uniform readonly imageBuffer s;\n"
- "layout(location=0) out vec4 x;\n"
- "void main(){\n"
- " x = imageLoad(s, 0);\n"
- "}\n";
- VkShaderObj fs(m_device, fsSource, VK_SHADER_STAGE_FRAGMENT_BIT, this);
-
- CreatePipelineHelper pipe(*this);
- pipe.InitInfo();
- pipe.shader_stages_ = {pipe.vs_->GetStageCreateInfo(), fs.GetStageCreateInfo()};
- pipe.dsl_bindings_ = {
- {0, VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER, 1, VK_SHADER_STAGE_ALL, nullptr},
- };
- const VkDynamicState dyn_states[] = {VK_DYNAMIC_STATE_VIEWPORT, VK_DYNAMIC_STATE_SCISSOR};
- VkPipelineDynamicStateCreateInfo dyn_state_ci = {};
- dyn_state_ci.sType = VK_STRUCTURE_TYPE_PIPELINE_DYNAMIC_STATE_CREATE_INFO;
- dyn_state_ci.dynamicStateCount = size(dyn_states);
- dyn_state_ci.pDynamicStates = dyn_states;
- pipe.dyn_state_ci_ = dyn_state_ci;
- pipe.InitState();
- pipe.CreateGraphicsPipeline();
-
- pipe.descriptor_set_->WriteDescriptorBufferView(0, view, VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER);
- pipe.descriptor_set_->UpdateDescriptorSets();
-
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkDestroyBufferView-bufferView-00936");
-
- m_commandBuffer->begin();
- m_commandBuffer->BeginRenderPass(m_renderPassBeginInfo);
- VkViewport viewport = {0, 0, 16, 16, 0, 1};
- vkCmdSetViewport(m_commandBuffer->handle(), 0, 1, &viewport);
- VkRect2D scissor = {{0, 0}, {16, 16}};
- vkCmdSetScissor(m_commandBuffer->handle(), 0, 1, &scissor);
- // Bind pipeline to cmd buffer
- vkCmdBindPipeline(m_commandBuffer->handle(), VK_PIPELINE_BIND_POINT_GRAPHICS, pipe.pipeline_);
- vkCmdBindDescriptorSets(m_commandBuffer->handle(), VK_PIPELINE_BIND_POINT_GRAPHICS, pipe.pipeline_layout_.handle(), 0, 1,
- &pipe.descriptor_set_->set_, 0, nullptr);
- m_commandBuffer->Draw(1, 0, 0, 0);
- m_commandBuffer->EndRenderPass();
- m_commandBuffer->end();
-
- VkSubmitInfo submit_info = {};
- submit_info.sType = VK_STRUCTURE_TYPE_SUBMIT_INFO;
- submit_info.commandBufferCount = 1;
- submit_info.pCommandBuffers = &m_commandBuffer->handle();
- // Submit cmd buffer and then destroy bufferView while in-flight
- vkQueueSubmit(m_device->m_queue, 1, &submit_info, VK_NULL_HANDLE);
-
- vkDestroyBufferView(m_device->device(), view, nullptr);
- m_errorMonitor->VerifyFound();
- vkQueueWaitIdle(m_device->m_queue);
- // Now we can actually destroy bufferView
- m_errorMonitor->SetUnexpectedError("If bufferView is not VK_NULL_HANDLE, bufferView must be a valid VkBufferView handle");
- m_errorMonitor->SetUnexpectedError("Unable to remove BufferView obj");
- vkDestroyBufferView(m_device->device(), view, NULL);
-}
-
-TEST_F(VkLayerTest, SamplerInUseDestroyedSignaled) {
- TEST_DESCRIPTION("Delete in-use sampler.");
-
- ASSERT_NO_FATAL_FAILURE(Init());
- ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
-
- VkSamplerCreateInfo sampler_ci = SafeSaneSamplerCreateInfo();
- VkSampler sampler;
-
- VkResult err;
- err = vkCreateSampler(m_device->device(), &sampler_ci, NULL, &sampler);
- ASSERT_VK_SUCCESS(err);
-
- VkImageObj image(m_device);
- image.Init(128, 128, 1, VK_FORMAT_R8G8B8A8_UNORM, VK_IMAGE_USAGE_SAMPLED_BIT, VK_IMAGE_TILING_OPTIMAL, 0);
- ASSERT_TRUE(image.initialized());
-
- VkImageView view = image.targetView(VK_FORMAT_R8G8B8A8_UNORM);
-
- // Create PSO to use the sampler
- VkShaderObj fs(m_device, bindStateFragSamplerShaderText, VK_SHADER_STAGE_FRAGMENT_BIT, this);
-
- CreatePipelineHelper pipe(*this);
- pipe.InitInfo();
- pipe.shader_stages_ = {pipe.vs_->GetStageCreateInfo(), fs.GetStageCreateInfo()};
- pipe.dsl_bindings_ = {
- {0, VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER, 1, VK_SHADER_STAGE_ALL, nullptr},
- };
- const VkDynamicState dyn_states[] = {VK_DYNAMIC_STATE_VIEWPORT, VK_DYNAMIC_STATE_SCISSOR};
- VkPipelineDynamicStateCreateInfo dyn_state_ci = {};
- dyn_state_ci.sType = VK_STRUCTURE_TYPE_PIPELINE_DYNAMIC_STATE_CREATE_INFO;
- dyn_state_ci.dynamicStateCount = size(dyn_states);
- dyn_state_ci.pDynamicStates = dyn_states;
- pipe.dyn_state_ci_ = dyn_state_ci;
- pipe.InitState();
- pipe.CreateGraphicsPipeline();
-
- pipe.descriptor_set_->WriteDescriptorImageInfo(0, view, sampler, VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER);
- pipe.descriptor_set_->UpdateDescriptorSets();
-
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkDestroySampler-sampler-01082");
-
- m_commandBuffer->begin();
- m_commandBuffer->BeginRenderPass(m_renderPassBeginInfo);
- // Bind pipeline to cmd buffer
- vkCmdBindPipeline(m_commandBuffer->handle(), VK_PIPELINE_BIND_POINT_GRAPHICS, pipe.pipeline_);
- vkCmdBindDescriptorSets(m_commandBuffer->handle(), VK_PIPELINE_BIND_POINT_GRAPHICS, pipe.pipeline_layout_.handle(), 0, 1,
- &pipe.descriptor_set_->set_, 0, nullptr);
-
- VkViewport viewport = {0, 0, 16, 16, 0, 1};
- VkRect2D scissor = {{0, 0}, {16, 16}};
- vkCmdSetViewport(m_commandBuffer->handle(), 0, 1, &viewport);
- vkCmdSetScissor(m_commandBuffer->handle(), 0, 1, &scissor);
-
- m_commandBuffer->Draw(1, 0, 0, 0);
- m_commandBuffer->EndRenderPass();
- m_commandBuffer->end();
- // Submit cmd buffer then destroy sampler
- VkSubmitInfo submit_info = {};
- submit_info.sType = VK_STRUCTURE_TYPE_SUBMIT_INFO;
- submit_info.commandBufferCount = 1;
- submit_info.pCommandBuffers = &m_commandBuffer->handle();
- // Submit cmd buffer and then destroy sampler while in-flight
- vkQueueSubmit(m_device->m_queue, 1, &submit_info, VK_NULL_HANDLE);
-
- vkDestroySampler(m_device->device(), sampler, nullptr); // Destroyed too soon
- m_errorMonitor->VerifyFound();
- vkQueueWaitIdle(m_device->m_queue);
-
- // Now we can actually destroy sampler
- m_errorMonitor->SetUnexpectedError("If sampler is not VK_NULL_HANDLE, sampler must be a valid VkSampler handle");
- m_errorMonitor->SetUnexpectedError("Unable to remove Sampler obj");
- vkDestroySampler(m_device->device(), sampler, NULL); // Destroyed for real
-}
-
-TEST_F(VkLayerTest, QueueForwardProgressFenceWait) {
- TEST_DESCRIPTION("Call VkQueueSubmit with a semaphore that is already signaled but not waited on by the queue.");
-
- ASSERT_NO_FATAL_FAILURE(Init());
- ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
-
- const char *queue_forward_progress_message = "UNASSIGNED-CoreValidation-DrawState-QueueForwardProgress";
-
- VkCommandBufferObj cb1(m_device, m_commandPool);
- cb1.begin();
- cb1.end();
-
- VkSemaphoreCreateInfo semaphore_create_info = {};
- semaphore_create_info.sType = VK_STRUCTURE_TYPE_SEMAPHORE_CREATE_INFO;
- VkSemaphore semaphore;
- ASSERT_VK_SUCCESS(vkCreateSemaphore(m_device->device(), &semaphore_create_info, nullptr, &semaphore));
- VkSubmitInfo submit_info = {};
- submit_info.sType = VK_STRUCTURE_TYPE_SUBMIT_INFO;
- submit_info.commandBufferCount = 1;
- submit_info.pCommandBuffers = &cb1.handle();
- submit_info.signalSemaphoreCount = 1;
- submit_info.pSignalSemaphores = &semaphore;
- vkQueueSubmit(m_device->m_queue, 1, &submit_info, VK_NULL_HANDLE);
-
- m_commandBuffer->begin();
- m_commandBuffer->end();
- submit_info.pCommandBuffers = &m_commandBuffer->handle();
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, queue_forward_progress_message);
- vkQueueSubmit(m_device->m_queue, 1, &submit_info, VK_NULL_HANDLE);
- m_errorMonitor->VerifyFound();
-
- vkDeviceWaitIdle(m_device->device());
- vkDestroySemaphore(m_device->device(), semaphore, nullptr);
-}
-
-#if GTEST_IS_THREADSAFE
-TEST_F(VkLayerTest, ThreadCommandBufferCollision) {
- test_platform_thread thread;
-
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "THREADING ERROR");
-
- ASSERT_NO_FATAL_FAILURE(Init());
- ASSERT_NO_FATAL_FAILURE(InitViewport());
- ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
-
- // Calls AllocateCommandBuffers
- VkCommandBufferObj commandBuffer(m_device, m_commandPool);
-
- commandBuffer.begin();
-
- VkEventCreateInfo event_info;
- VkEvent event;
- VkResult err;
-
- memset(&event_info, 0, sizeof(event_info));
- event_info.sType = VK_STRUCTURE_TYPE_EVENT_CREATE_INFO;
-
- err = vkCreateEvent(device(), &event_info, NULL, &event);
- ASSERT_VK_SUCCESS(err);
-
- err = vkResetEvent(device(), event);
- ASSERT_VK_SUCCESS(err);
-
- struct thread_data_struct data;
- data.commandBuffer = commandBuffer.handle();
- data.event = event;
- data.bailout = false;
- m_errorMonitor->SetBailout(&data.bailout);
-
- // First do some correct operations using multiple threads.
- // Add many entries to command buffer from another thread.
- test_platform_thread_create(&thread, AddToCommandBuffer, (void *)&data);
- // Make non-conflicting calls from this thread at the same time.
- for (int i = 0; i < 80000; i++) {
- uint32_t count;
- vkEnumeratePhysicalDevices(instance(), &count, NULL);
- }
- test_platform_thread_join(thread, NULL);
-
- // Then do some incorrect operations using multiple threads.
- // Add many entries to command buffer from another thread.
- test_platform_thread_create(&thread, AddToCommandBuffer, (void *)&data);
- // Add many entries to command buffer from this thread at the same time.
- AddToCommandBuffer(&data);
-
- test_platform_thread_join(thread, NULL);
- commandBuffer.end();
-
- m_errorMonitor->SetBailout(NULL);
-
- m_errorMonitor->VerifyFound();
-
- vkDestroyEvent(device(), event, NULL);
-}
-#endif // GTEST_IS_THREADSAFE
-
-TEST_F(VkLayerTest, ExecuteUnrecordedPrimaryCB) {
- TEST_DESCRIPTION("Attempt vkQueueSubmit with a CB in the initial state");
- ASSERT_NO_FATAL_FAILURE(Init());
- // never record m_commandBuffer
-
- VkSubmitInfo si = {};
- si.sType = VK_STRUCTURE_TYPE_SUBMIT_INFO;
- si.commandBufferCount = 1;
- si.pCommandBuffers = &m_commandBuffer->handle();
-
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkQueueSubmit-pCommandBuffers-00072");
- vkQueueSubmit(m_device->m_queue, 1, &si, VK_NULL_HANDLE);
- m_errorMonitor->VerifyFound();
-}
-
-TEST_F(VkLayerTest, Maintenance1AndNegativeViewport) {
- TEST_DESCRIPTION("Attempt to enable AMD_negative_viewport_height and Maintenance1_KHR extension simultaneously");
-
- ASSERT_NO_FATAL_FAILURE(InitFramework(myDbgFunc, m_errorMonitor));
- if (!((DeviceExtensionSupported(gpu(), nullptr, VK_KHR_MAINTENANCE1_EXTENSION_NAME)) &&
- (DeviceExtensionSupported(gpu(), nullptr, VK_AMD_NEGATIVE_VIEWPORT_HEIGHT_EXTENSION_NAME)))) {
- printf("%s Maintenance1 and AMD_negative viewport height extensions not supported, skipping test\n", kSkipPrefix);
- return;
- }
- ASSERT_NO_FATAL_FAILURE(InitState());
-
- vk_testing::QueueCreateInfoArray queue_info(m_device->queue_props);
- const char *extension_names[2] = {"VK_KHR_maintenance1", "VK_AMD_negative_viewport_height"};
- VkDevice testDevice;
- VkDeviceCreateInfo device_create_info = {};
- auto features = m_device->phy().features();
- device_create_info.sType = VK_STRUCTURE_TYPE_DEVICE_CREATE_INFO;
- device_create_info.pNext = NULL;
- device_create_info.queueCreateInfoCount = queue_info.size();
- device_create_info.pQueueCreateInfos = queue_info.data();
- device_create_info.enabledLayerCount = 0;
- device_create_info.ppEnabledLayerNames = NULL;
- device_create_info.enabledExtensionCount = 2;
- device_create_info.ppEnabledExtensionNames = (const char *const *)extension_names;
- device_create_info.pEnabledFeatures = &features;
-
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkDeviceCreateInfo-ppEnabledExtensionNames-00374");
- // The following unexpected error is coming from the LunarG loader. Do not make it a desired message because platforms that do
- // not use the LunarG loader (e.g. Android) will not see the message and the test will fail.
- m_errorMonitor->SetUnexpectedError("Failed to create device chain.");
- vkCreateDevice(gpu(), &device_create_info, NULL, &testDevice);
- m_errorMonitor->VerifyFound();
-}
-
-TEST_F(VkLayerTest, HostQueryResetNotEnabled) {
- TEST_DESCRIPTION("Use vkResetQueryPoolEXT without enabling the feature");
-
- if (!InstanceExtensionSupported(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME)) {
- printf("%s Did not find required instance extension %s; skipped.\n", kSkipPrefix,
- VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME);
- return;
- }
-
- m_instance_extension_names.push_back(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME);
- ASSERT_NO_FATAL_FAILURE(InitFramework(myDbgFunc, m_errorMonitor));
-
- if (!DeviceExtensionSupported(gpu(), nullptr, VK_EXT_HOST_QUERY_RESET_EXTENSION_NAME)) {
- printf("%s Extension %s not supported by device; skipped.\n", kSkipPrefix, VK_EXT_HOST_QUERY_RESET_EXTENSION_NAME);
- return;
- }
-
- m_device_extension_names.push_back(VK_EXT_HOST_QUERY_RESET_EXTENSION_NAME);
- ASSERT_NO_FATAL_FAILURE(InitState());
-
- auto fpvkResetQueryPoolEXT = (PFN_vkResetQueryPoolEXT)vkGetDeviceProcAddr(m_device->device(), "vkResetQueryPoolEXT");
-
- VkQueryPool query_pool;
- VkQueryPoolCreateInfo query_pool_create_info{};
- query_pool_create_info.sType = VK_STRUCTURE_TYPE_QUERY_POOL_CREATE_INFO;
- query_pool_create_info.queryType = VK_QUERY_TYPE_TIMESTAMP;
- query_pool_create_info.queryCount = 1;
- vkCreateQueryPool(m_device->device(), &query_pool_create_info, nullptr, &query_pool);
-
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkResetQueryPoolEXT-None-02665");
- fpvkResetQueryPoolEXT(m_device->device(), query_pool, 0, 1);
- m_errorMonitor->VerifyFound();
-
- vkDestroyQueryPool(m_device->device(), query_pool, nullptr);
-}
-
-TEST_F(VkLayerTest, HostQueryResetBadFirstQuery) {
- TEST_DESCRIPTION("Bad firstQuery in vkResetQueryPoolEXT");
-
- if (!InstanceExtensionSupported(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME)) {
- printf("%s Did not find required instance extension %s; skipped.\n", kSkipPrefix,
- VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME);
- return;
- }
-
- m_instance_extension_names.push_back(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME);
- ASSERT_NO_FATAL_FAILURE(InitFramework(myDbgFunc, m_errorMonitor));
-
- if (!DeviceExtensionSupported(gpu(), nullptr, VK_EXT_HOST_QUERY_RESET_EXTENSION_NAME)) {
- printf("%s Extension %s not supported by device; skipped.\n", kSkipPrefix, VK_EXT_HOST_QUERY_RESET_EXTENSION_NAME);
- return;
- }
-
- m_device_extension_names.push_back(VK_EXT_HOST_QUERY_RESET_EXTENSION_NAME);
-
- VkPhysicalDeviceHostQueryResetFeaturesEXT host_query_reset_features{};
- host_query_reset_features.sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_HOST_QUERY_RESET_FEATURES_EXT;
- host_query_reset_features.hostQueryReset = VK_TRUE;
-
- VkPhysicalDeviceFeatures2 pd_features2{};
- pd_features2.sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FEATURES_2;
- pd_features2.pNext = &host_query_reset_features;
-
- ASSERT_NO_FATAL_FAILURE(InitState(nullptr, &pd_features2));
-
- auto fpvkResetQueryPoolEXT = (PFN_vkResetQueryPoolEXT)vkGetDeviceProcAddr(m_device->device(), "vkResetQueryPoolEXT");
-
- VkQueryPool query_pool;
- VkQueryPoolCreateInfo query_pool_create_info{};
- query_pool_create_info.sType = VK_STRUCTURE_TYPE_QUERY_POOL_CREATE_INFO;
- query_pool_create_info.queryType = VK_QUERY_TYPE_TIMESTAMP;
- query_pool_create_info.queryCount = 1;
- vkCreateQueryPool(m_device->device(), &query_pool_create_info, nullptr, &query_pool);
-
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkResetQueryPoolEXT-firstQuery-02666");
- fpvkResetQueryPoolEXT(m_device->device(), query_pool, 1, 0);
- m_errorMonitor->VerifyFound();
-
- vkDestroyQueryPool(m_device->device(), query_pool, nullptr);
-}
-
-TEST_F(VkLayerTest, HostQueryResetBadRange) {
- TEST_DESCRIPTION("Bad range in vkResetQueryPoolEXT");
-
- if (!InstanceExtensionSupported(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME)) {
- printf("%s Did not find required instance extension %s; skipped.\n", kSkipPrefix,
- VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME);
- return;
- }
-
- m_instance_extension_names.push_back(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME);
- ASSERT_NO_FATAL_FAILURE(InitFramework(myDbgFunc, m_errorMonitor));
-
- if (!DeviceExtensionSupported(gpu(), nullptr, VK_EXT_HOST_QUERY_RESET_EXTENSION_NAME)) {
- printf("%s Extension %s not supported by device; skipped.\n", kSkipPrefix, VK_EXT_HOST_QUERY_RESET_EXTENSION_NAME);
- return;
- }
-
- m_device_extension_names.push_back(VK_EXT_HOST_QUERY_RESET_EXTENSION_NAME);
-
- VkPhysicalDeviceHostQueryResetFeaturesEXT host_query_reset_features{};
- host_query_reset_features.sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_HOST_QUERY_RESET_FEATURES_EXT;
- host_query_reset_features.hostQueryReset = VK_TRUE;
-
- VkPhysicalDeviceFeatures2 pd_features2{};
- pd_features2.sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FEATURES_2;
- pd_features2.pNext = &host_query_reset_features;
-
- ASSERT_NO_FATAL_FAILURE(InitState(nullptr, &pd_features2));
-
- auto fpvkResetQueryPoolEXT = (PFN_vkResetQueryPoolEXT)vkGetDeviceProcAddr(m_device->device(), "vkResetQueryPoolEXT");
-
- VkQueryPool query_pool;
- VkQueryPoolCreateInfo query_pool_create_info{};
- query_pool_create_info.sType = VK_STRUCTURE_TYPE_QUERY_POOL_CREATE_INFO;
- query_pool_create_info.queryType = VK_QUERY_TYPE_TIMESTAMP;
- query_pool_create_info.queryCount = 1;
- vkCreateQueryPool(m_device->device(), &query_pool_create_info, nullptr, &query_pool);
-
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkResetQueryPoolEXT-firstQuery-02667");
- fpvkResetQueryPoolEXT(m_device->device(), query_pool, 0, 2);
- m_errorMonitor->VerifyFound();
-
- vkDestroyQueryPool(m_device->device(), query_pool, nullptr);
-}
-
-TEST_F(VkLayerTest, HostQueryResetInvalidQueryPool) {
- TEST_DESCRIPTION("Invalid queryPool in vkResetQueryPoolEXT");
-
- if (!InstanceExtensionSupported(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME)) {
- printf("%s Did not find required instance extension %s; skipped.\n", kSkipPrefix,
- VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME);
- return;
- }
-
- m_instance_extension_names.push_back(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME);
- ASSERT_NO_FATAL_FAILURE(InitFramework(myDbgFunc, m_errorMonitor));
-
- if (!DeviceExtensionSupported(gpu(), nullptr, VK_EXT_HOST_QUERY_RESET_EXTENSION_NAME)) {
- printf("%s Extension %s not supported by device; skipped.\n", kSkipPrefix, VK_EXT_HOST_QUERY_RESET_EXTENSION_NAME);
- return;
- }
-
- m_device_extension_names.push_back(VK_EXT_HOST_QUERY_RESET_EXTENSION_NAME);
-
- VkPhysicalDeviceHostQueryResetFeaturesEXT host_query_reset_features{};
- host_query_reset_features.sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_HOST_QUERY_RESET_FEATURES_EXT;
- host_query_reset_features.hostQueryReset = VK_TRUE;
-
- VkPhysicalDeviceFeatures2 pd_features2{};
- pd_features2.sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FEATURES_2;
- pd_features2.pNext = &host_query_reset_features;
-
- ASSERT_NO_FATAL_FAILURE(InitState(nullptr, &pd_features2));
-
- auto fpvkResetQueryPoolEXT = (PFN_vkResetQueryPoolEXT)vkGetDeviceProcAddr(m_device->device(), "vkResetQueryPoolEXT");
-
- // Create and destroy a query pool.
- VkQueryPool query_pool;
- VkQueryPoolCreateInfo query_pool_create_info{};
- query_pool_create_info.sType = VK_STRUCTURE_TYPE_QUERY_POOL_CREATE_INFO;
- query_pool_create_info.queryType = VK_QUERY_TYPE_TIMESTAMP;
- query_pool_create_info.queryCount = 1;
- vkCreateQueryPool(m_device->device(), &query_pool_create_info, nullptr, &query_pool);
- vkDestroyQueryPool(m_device->device(), query_pool, nullptr);
-
- // Attempt to reuse the query pool handle.
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkResetQueryPoolEXT-queryPool-parameter");
- fpvkResetQueryPoolEXT(m_device->device(), query_pool, 0, 1);
- m_errorMonitor->VerifyFound();
-}
-
-TEST_F(VkLayerTest, HostQueryResetWrongDevice) {
- TEST_DESCRIPTION("Device not matching queryPool in vkResetQueryPoolEXT");
-
- if (!InstanceExtensionSupported(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME)) {
- printf("%s Did not find required instance extension %s; skipped.\n", kSkipPrefix,
- VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME);
- return;
- }
-
- m_instance_extension_names.push_back(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME);
- ASSERT_NO_FATAL_FAILURE(InitFramework(myDbgFunc, m_errorMonitor));
-
- if (!DeviceExtensionSupported(gpu(), nullptr, VK_EXT_HOST_QUERY_RESET_EXTENSION_NAME)) {
- printf("%s Extension %s not supported by device; skipped.\n", kSkipPrefix, VK_EXT_HOST_QUERY_RESET_EXTENSION_NAME);
- return;
- }
-
- m_device_extension_names.push_back(VK_EXT_HOST_QUERY_RESET_EXTENSION_NAME);
-
- VkPhysicalDeviceHostQueryResetFeaturesEXT host_query_reset_features{};
- host_query_reset_features.sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_HOST_QUERY_RESET_FEATURES_EXT;
- host_query_reset_features.hostQueryReset = VK_TRUE;
-
- VkPhysicalDeviceFeatures2 pd_features2{};
- pd_features2.sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FEATURES_2;
- pd_features2.pNext = &host_query_reset_features;
-
- ASSERT_NO_FATAL_FAILURE(InitState(nullptr, &pd_features2));
-
- auto fpvkResetQueryPoolEXT = (PFN_vkResetQueryPoolEXT)vkGetDeviceProcAddr(m_device->device(), "vkResetQueryPoolEXT");
-
- VkQueryPool query_pool;
- VkQueryPoolCreateInfo query_pool_create_info{};
- query_pool_create_info.sType = VK_STRUCTURE_TYPE_QUERY_POOL_CREATE_INFO;
- query_pool_create_info.queryType = VK_QUERY_TYPE_TIMESTAMP;
- query_pool_create_info.queryCount = 1;
- vkCreateQueryPool(m_device->device(), &query_pool_create_info, nullptr, &query_pool);
-
- // Create a second device with the feature enabled.
- vk_testing::QueueCreateInfoArray queue_info(m_device->queue_props);
- auto features = m_device->phy().features();
-
- VkDeviceCreateInfo device_create_info = {};
- device_create_info.sType = VK_STRUCTURE_TYPE_DEVICE_CREATE_INFO;
- device_create_info.pNext = &host_query_reset_features;
- device_create_info.queueCreateInfoCount = queue_info.size();
- device_create_info.pQueueCreateInfos = queue_info.data();
- device_create_info.pEnabledFeatures = &features;
- device_create_info.enabledExtensionCount = m_device_extension_names.size();
- device_create_info.ppEnabledExtensionNames = m_device_extension_names.data();
-
- VkDevice second_device;
- ASSERT_VK_SUCCESS(vkCreateDevice(gpu(), &device_create_info, nullptr, &second_device));
-
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkResetQueryPoolEXT-queryPool-parent");
- // Run vkResetQueryPoolExt on the wrong device.
- fpvkResetQueryPoolEXT(second_device, query_pool, 0, 1);
- m_errorMonitor->VerifyFound();
-
- vkDestroyQueryPool(m_device->device(), query_pool, nullptr);
- vkDestroyDevice(second_device, nullptr);
-}
-
-TEST_F(VkLayerTest, ResetEventThenSet) {
- TEST_DESCRIPTION("Reset an event then set it after the reset has been submitted.");
-
- ASSERT_NO_FATAL_FAILURE(Init());
- VkEvent event;
- VkEventCreateInfo event_create_info{};
- event_create_info.sType = VK_STRUCTURE_TYPE_EVENT_CREATE_INFO;
- vkCreateEvent(m_device->device(), &event_create_info, nullptr, &event);
-
- VkCommandPool command_pool;
- VkCommandPoolCreateInfo pool_create_info{};
- pool_create_info.sType = VK_STRUCTURE_TYPE_COMMAND_POOL_CREATE_INFO;
- pool_create_info.queueFamilyIndex = m_device->graphics_queue_node_index_;
- pool_create_info.flags = VK_COMMAND_POOL_CREATE_RESET_COMMAND_BUFFER_BIT;
- vkCreateCommandPool(m_device->device(), &pool_create_info, nullptr, &command_pool);
-
- VkCommandBuffer command_buffer;
- VkCommandBufferAllocateInfo command_buffer_allocate_info{};
- command_buffer_allocate_info.sType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_ALLOCATE_INFO;
- command_buffer_allocate_info.commandPool = command_pool;
- command_buffer_allocate_info.commandBufferCount = 1;
- command_buffer_allocate_info.level = VK_COMMAND_BUFFER_LEVEL_PRIMARY;
- vkAllocateCommandBuffers(m_device->device(), &command_buffer_allocate_info, &command_buffer);
-
- VkQueue queue = VK_NULL_HANDLE;
- vkGetDeviceQueue(m_device->device(), m_device->graphics_queue_node_index_, 0, &queue);
-
- {
- VkCommandBufferBeginInfo begin_info{};
- begin_info.sType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_BEGIN_INFO;
- vkBeginCommandBuffer(command_buffer, &begin_info);
-
- vkCmdResetEvent(command_buffer, event, VK_PIPELINE_STAGE_ALL_COMMANDS_BIT);
- vkEndCommandBuffer(command_buffer);
- }
- {
- VkSubmitInfo submit_info{};
- submit_info.sType = VK_STRUCTURE_TYPE_SUBMIT_INFO;
- submit_info.commandBufferCount = 1;
- submit_info.pCommandBuffers = &command_buffer;
- submit_info.signalSemaphoreCount = 0;
- submit_info.pSignalSemaphores = nullptr;
- vkQueueSubmit(queue, 1, &submit_info, VK_NULL_HANDLE);
- }
- {
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "that is already in use by a command buffer.");
- vkSetEvent(m_device->device(), event);
- m_errorMonitor->VerifyFound();
- }
-
- vkQueueWaitIdle(queue);
-
- vkDestroyEvent(m_device->device(), event, nullptr);
- vkFreeCommandBuffers(m_device->device(), command_pool, 1, &command_buffer);
- vkDestroyCommandPool(m_device->device(), command_pool, NULL);
-}
-
-TEST_F(VkLayerTest, ShadingRateImageNV) {
- TEST_DESCRIPTION("Test VK_NV_shading_rate_image.");
-
- if (InstanceExtensionSupported(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME)) {
- m_instance_extension_names.push_back(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME);
- } else {
- printf("%s Did not find required instance extension %s; skipped.\n", kSkipPrefix,
- VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME);
- return;
- }
- ASSERT_NO_FATAL_FAILURE(InitFramework(myDbgFunc, m_errorMonitor));
- std::array<const char *, 1> required_device_extensions = {{VK_NV_SHADING_RATE_IMAGE_EXTENSION_NAME}};
- for (auto device_extension : required_device_extensions) {
- if (DeviceExtensionSupported(gpu(), nullptr, device_extension)) {
- m_device_extension_names.push_back(device_extension);
- } else {
- printf("%s %s Extension not supported, skipping tests\n", kSkipPrefix, device_extension);
- return;
- }
- }
-
- if (DeviceIsMockICD() || DeviceSimulation()) {
- printf("%s Test not supported by MockICD, skipping tests\n", kSkipPrefix);
- return;
- }
-
- PFN_vkGetPhysicalDeviceFeatures2KHR vkGetPhysicalDeviceFeatures2KHR =
- (PFN_vkGetPhysicalDeviceFeatures2KHR)vkGetInstanceProcAddr(instance(), "vkGetPhysicalDeviceFeatures2KHR");
- ASSERT_TRUE(vkGetPhysicalDeviceFeatures2KHR != nullptr);
-
- // Create a device that enables shading_rate_image but disables multiViewport
- auto shading_rate_image_features = lvl_init_struct<VkPhysicalDeviceShadingRateImageFeaturesNV>();
- auto features2 = lvl_init_struct<VkPhysicalDeviceFeatures2KHR>(&shading_rate_image_features);
- vkGetPhysicalDeviceFeatures2KHR(gpu(), &features2);
-
- features2.features.multiViewport = VK_FALSE;
-
- ASSERT_NO_FATAL_FAILURE(InitState(nullptr, &features2));
- ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
-
- // Test shading rate image creation
- VkResult result = VK_RESULT_MAX_ENUM;
- VkImageCreateInfo image_create_info = {};
- image_create_info.sType = VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO;
- image_create_info.pNext = NULL;
- image_create_info.imageType = VK_IMAGE_TYPE_2D;
- image_create_info.format = VK_FORMAT_R8_UINT;
- image_create_info.extent.width = 4;
- image_create_info.extent.height = 4;
- image_create_info.extent.depth = 1;
- image_create_info.mipLevels = 1;
- image_create_info.arrayLayers = 1;
- image_create_info.samples = VK_SAMPLE_COUNT_1_BIT;
- image_create_info.tiling = VK_IMAGE_TILING_OPTIMAL;
- image_create_info.initialLayout = VK_IMAGE_LAYOUT_UNDEFINED;
- image_create_info.usage = VK_IMAGE_USAGE_TRANSFER_DST_BIT | VK_IMAGE_USAGE_SHADING_RATE_IMAGE_BIT_NV;
- image_create_info.queueFamilyIndexCount = 0;
- image_create_info.pQueueFamilyIndices = NULL;
- image_create_info.sharingMode = VK_SHARING_MODE_EXCLUSIVE;
- image_create_info.flags = VK_IMAGE_CREATE_MUTABLE_FORMAT_BIT;
-
- // image type must be 2D
- image_create_info.imageType = VK_IMAGE_TYPE_3D;
- CreateImageTest(*this, &image_create_info, "VUID-VkImageCreateInfo-imageType-02082");
-
- image_create_info.imageType = VK_IMAGE_TYPE_2D;
-
- // must be single sample
- image_create_info.samples = VK_SAMPLE_COUNT_2_BIT;
- CreateImageTest(*this, &image_create_info, "VUID-VkImageCreateInfo-samples-02083");
-
- image_create_info.samples = VK_SAMPLE_COUNT_1_BIT;
-
- // tiling must be optimal
- image_create_info.tiling = VK_IMAGE_TILING_LINEAR;
- CreateImageTest(*this, &image_create_info, "VUID-VkImageCreateInfo-tiling-02084");
-
- image_create_info.tiling = VK_IMAGE_TILING_OPTIMAL;
-
- // Should succeed.
- VkImageObj image(m_device);
- image.init(&image_create_info);
-
- // Test image view creation
- VkImageView view;
- VkImageViewCreateInfo ivci = {};
- ivci.sType = VK_STRUCTURE_TYPE_IMAGE_VIEW_CREATE_INFO;
- ivci.image = image.handle();
- ivci.viewType = VK_IMAGE_VIEW_TYPE_2D;
- ivci.format = VK_FORMAT_R8_UINT;
- ivci.subresourceRange.layerCount = 1;
- ivci.subresourceRange.baseMipLevel = 0;
- ivci.subresourceRange.levelCount = 1;
- ivci.subresourceRange.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
-
- // view type must be 2D or 2D_ARRAY
- ivci.viewType = VK_IMAGE_VIEW_TYPE_CUBE;
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkImageViewCreateInfo-image-02086");
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkImageViewCreateInfo-image-01003");
- result = vkCreateImageView(m_device->device(), &ivci, nullptr, &view);
- m_errorMonitor->VerifyFound();
- if (VK_SUCCESS == result) {
- vkDestroyImageView(m_device->device(), view, NULL);
- view = VK_NULL_HANDLE;
- }
- ivci.viewType = VK_IMAGE_VIEW_TYPE_2D;
-
- // format must be R8_UINT
- ivci.format = VK_FORMAT_R8_UNORM;
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkImageViewCreateInfo-image-02087");
- result = vkCreateImageView(m_device->device(), &ivci, nullptr, &view);
- m_errorMonitor->VerifyFound();
- if (VK_SUCCESS == result) {
- vkDestroyImageView(m_device->device(), view, NULL);
- view = VK_NULL_HANDLE;
- }
- ivci.format = VK_FORMAT_R8_UINT;
-
- vkCreateImageView(m_device->device(), &ivci, nullptr, &view);
- m_errorMonitor->VerifyNotFound();
-
- // Test pipeline creation
- VkPipelineViewportShadingRateImageStateCreateInfoNV vsrisci = {
- VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_SHADING_RATE_IMAGE_STATE_CREATE_INFO_NV};
-
- VkViewport viewport = {0.0f, 0.0f, 64.0f, 64.0f, 0.0f, 1.0f};
- VkViewport viewports[20] = {viewport, viewport};
- VkRect2D scissor = {{0, 0}, {64, 64}};
- VkRect2D scissors[20] = {scissor, scissor};
- VkDynamicState dynPalette = VK_DYNAMIC_STATE_VIEWPORT_SHADING_RATE_PALETTE_NV;
- VkPipelineDynamicStateCreateInfo dyn = {VK_STRUCTURE_TYPE_PIPELINE_DYNAMIC_STATE_CREATE_INFO, nullptr, 0, 1, &dynPalette};
-
- // viewportCount must be 0 or 1 when multiViewport is disabled
- {
- const auto break_vp = [&](CreatePipelineHelper &helper) {
- helper.vp_state_ci_.viewportCount = 2;
- helper.vp_state_ci_.pViewports = viewports;
- helper.vp_state_ci_.scissorCount = 2;
- helper.vp_state_ci_.pScissors = scissors;
- helper.vp_state_ci_.pNext = &vsrisci;
- helper.dyn_state_ci_ = dyn;
-
- vsrisci.shadingRateImageEnable = VK_TRUE;
- vsrisci.viewportCount = 2;
- };
- CreatePipelineHelper::OneshotTest(
- *this, break_vp, VK_DEBUG_REPORT_ERROR_BIT_EXT,
- vector<std::string>({"VUID-VkPipelineViewportShadingRateImageStateCreateInfoNV-viewportCount-02054",
- "VUID-VkPipelineViewportStateCreateInfo-viewportCount-01216",
- "VUID-VkPipelineViewportStateCreateInfo-scissorCount-01217"}));
- }
-
- // viewportCounts must match
- {
- const auto break_vp = [&](CreatePipelineHelper &helper) {
- helper.vp_state_ci_.viewportCount = 1;
- helper.vp_state_ci_.pViewports = viewports;
- helper.vp_state_ci_.scissorCount = 1;
- helper.vp_state_ci_.pScissors = scissors;
- helper.vp_state_ci_.pNext = &vsrisci;
- helper.dyn_state_ci_ = dyn;
-
- vsrisci.shadingRateImageEnable = VK_TRUE;
- vsrisci.viewportCount = 0;
- };
- CreatePipelineHelper::OneshotTest(
- *this, break_vp, VK_DEBUG_REPORT_ERROR_BIT_EXT,
- vector<std::string>({"VUID-VkPipelineViewportShadingRateImageStateCreateInfoNV-shadingRateImageEnable-02056"}));
- }
-
- // pShadingRatePalettes must not be NULL.
- {
- const auto break_vp = [&](CreatePipelineHelper &helper) {
- helper.vp_state_ci_.viewportCount = 1;
- helper.vp_state_ci_.pViewports = viewports;
- helper.vp_state_ci_.scissorCount = 1;
- helper.vp_state_ci_.pScissors = scissors;
- helper.vp_state_ci_.pNext = &vsrisci;
-
- vsrisci.shadingRateImageEnable = VK_TRUE;
- vsrisci.viewportCount = 1;
- };
- CreatePipelineHelper::OneshotTest(
- *this, break_vp, VK_DEBUG_REPORT_ERROR_BIT_EXT,
- vector<std::string>({"VUID-VkPipelineViewportShadingRateImageStateCreateInfoNV-pDynamicStates-02057"}));
- }
-
- // Create an image without the SRI bit
- VkImageObj nonSRIimage(m_device);
- nonSRIimage.Init(256, 256, 1, VK_FORMAT_B8G8R8A8_UNORM, VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT, VK_IMAGE_TILING_OPTIMAL, 0);
- ASSERT_TRUE(nonSRIimage.initialized());
- VkImageView nonSRIview = nonSRIimage.targetView(VK_FORMAT_B8G8R8A8_UNORM);
-
- // Test SRI layout on non-SRI image
- VkImageMemoryBarrier img_barrier = {};
- img_barrier.sType = VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER;
- img_barrier.pNext = nullptr;
- img_barrier.srcAccessMask = 0;
- img_barrier.dstAccessMask = 0;
- img_barrier.oldLayout = VK_IMAGE_LAYOUT_GENERAL;
- img_barrier.newLayout = VK_IMAGE_LAYOUT_SHADING_RATE_OPTIMAL_NV;
- img_barrier.image = nonSRIimage.handle();
- img_barrier.srcQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED;
- img_barrier.dstQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED;
- img_barrier.subresourceRange.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
- img_barrier.subresourceRange.baseArrayLayer = 0;
- img_barrier.subresourceRange.baseMipLevel = 0;
- img_barrier.subresourceRange.layerCount = 1;
- img_barrier.subresourceRange.levelCount = 1;
-
- m_commandBuffer->begin();
-
- // Error trying to convert it to SRI layout
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkImageMemoryBarrier-oldLayout-02088");
- vkCmdPipelineBarrier(m_commandBuffer->handle(), VK_PIPELINE_STAGE_BOTTOM_OF_PIPE_BIT, VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT, 0, 0,
- nullptr, 0, nullptr, 1, &img_barrier);
- m_errorMonitor->VerifyFound();
-
- // succeed converting it to GENERAL
- img_barrier.newLayout = VK_IMAGE_LAYOUT_GENERAL;
- vkCmdPipelineBarrier(m_commandBuffer->handle(), VK_PIPELINE_STAGE_BOTTOM_OF_PIPE_BIT, VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT, 0, 0,
- nullptr, 0, nullptr, 1, &img_barrier);
- m_errorMonitor->VerifyNotFound();
-
- // Test vkCmdBindShadingRateImageNV errors
- auto vkCmdBindShadingRateImageNV =
- (PFN_vkCmdBindShadingRateImageNV)vkGetDeviceProcAddr(m_device->device(), "vkCmdBindShadingRateImageNV");
-
- // if the view is non-NULL, it must be R8_UINT, USAGE_SRI, image layout must match, layout must be valid
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdBindShadingRateImageNV-imageView-02060");
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdBindShadingRateImageNV-imageView-02061");
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdBindShadingRateImageNV-imageView-02062");
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdBindShadingRateImageNV-imageLayout-02063");
- vkCmdBindShadingRateImageNV(m_commandBuffer->handle(), nonSRIview, VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL);
- m_errorMonitor->VerifyFound();
-
- // Test vkCmdSetViewportShadingRatePaletteNV errors
- auto vkCmdSetViewportShadingRatePaletteNV =
- (PFN_vkCmdSetViewportShadingRatePaletteNV)vkGetDeviceProcAddr(m_device->device(), "vkCmdSetViewportShadingRatePaletteNV");
-
- VkShadingRatePaletteEntryNV paletteEntries[100] = {};
- VkShadingRatePaletteNV palette = {100, paletteEntries};
- VkShadingRatePaletteNV palettes[] = {palette, palette};
-
- // errors on firstViewport/viewportCount
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
- "VUID-vkCmdSetViewportShadingRatePaletteNV-firstViewport-02066");
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
- "VUID-vkCmdSetViewportShadingRatePaletteNV-firstViewport-02067");
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
- "VUID-vkCmdSetViewportShadingRatePaletteNV-firstViewport-02068");
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
- "VUID-vkCmdSetViewportShadingRatePaletteNV-viewportCount-02069");
- vkCmdSetViewportShadingRatePaletteNV(m_commandBuffer->handle(), 20, 2, palettes);
- m_errorMonitor->VerifyFound();
-
- // shadingRatePaletteEntryCount must be in range
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
- "VUID-VkShadingRatePaletteNV-shadingRatePaletteEntryCount-02071");
- vkCmdSetViewportShadingRatePaletteNV(m_commandBuffer->handle(), 0, 1, palettes);
- m_errorMonitor->VerifyFound();
-
- VkCoarseSampleLocationNV locations[100] = {
- {0, 0, 0}, {0, 0, 1}, {0, 1, 0}, {0, 1, 1}, {0, 1, 1}, // duplicate
- {1000, 0, 0}, // pixelX too large
- {0, 1000, 0}, // pixelY too large
- {0, 0, 1000}, // sample too large
- };
-
- // Test custom sample orders, both via pipeline state and via dynamic state
- {
- VkCoarseSampleOrderCustomNV sampOrdBadShadingRate = {VK_SHADING_RATE_PALETTE_ENTRY_1_INVOCATION_PER_PIXEL_NV, 1, 1,
- locations};
- VkCoarseSampleOrderCustomNV sampOrdBadSampleCount = {VK_SHADING_RATE_PALETTE_ENTRY_1_INVOCATION_PER_1X2_PIXELS_NV, 3, 1,
- locations};
- VkCoarseSampleOrderCustomNV sampOrdBadSampleLocationCount = {VK_SHADING_RATE_PALETTE_ENTRY_1_INVOCATION_PER_1X2_PIXELS_NV,
- 2, 2, locations};
- VkCoarseSampleOrderCustomNV sampOrdDuplicateLocations = {VK_SHADING_RATE_PALETTE_ENTRY_1_INVOCATION_PER_1X2_PIXELS_NV, 2,
- 1 * 2 * 2, &locations[1]};
- VkCoarseSampleOrderCustomNV sampOrdOutOfRangeLocations = {VK_SHADING_RATE_PALETTE_ENTRY_1_INVOCATION_PER_1X2_PIXELS_NV, 2,
- 1 * 2 * 2, &locations[4]};
- VkCoarseSampleOrderCustomNV sampOrdTooLargeSampleLocationCount = {
- VK_SHADING_RATE_PALETTE_ENTRY_1_INVOCATION_PER_4X4_PIXELS_NV, 4, 64, &locations[8]};
- VkCoarseSampleOrderCustomNV sampOrdGood = {VK_SHADING_RATE_PALETTE_ENTRY_1_INVOCATION_PER_1X2_PIXELS_NV, 2, 1 * 2 * 2,
- &locations[0]};
-
- VkPipelineViewportCoarseSampleOrderStateCreateInfoNV csosci = {
- VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_COARSE_SAMPLE_ORDER_STATE_CREATE_INFO_NV};
- csosci.sampleOrderType = VK_COARSE_SAMPLE_ORDER_TYPE_CUSTOM_NV;
- csosci.customSampleOrderCount = 1;
-
- using std::vector;
- struct TestCase {
- const VkCoarseSampleOrderCustomNV *order;
- vector<std::string> vuids;
- };
-
- vector<TestCase> test_cases = {
- {&sampOrdBadShadingRate, {"VUID-VkCoarseSampleOrderCustomNV-shadingRate-02073"}},
- {&sampOrdBadSampleCount,
- {"VUID-VkCoarseSampleOrderCustomNV-sampleCount-02074", "VUID-VkCoarseSampleOrderCustomNV-sampleLocationCount-02075"}},
- {&sampOrdBadSampleLocationCount, {"VUID-VkCoarseSampleOrderCustomNV-sampleLocationCount-02075"}},
- {&sampOrdDuplicateLocations, {"VUID-VkCoarseSampleOrderCustomNV-pSampleLocations-02077"}},
- {&sampOrdOutOfRangeLocations,
- {"VUID-VkCoarseSampleOrderCustomNV-pSampleLocations-02077", "VUID-VkCoarseSampleLocationNV-pixelX-02078",
- "VUID-VkCoarseSampleLocationNV-pixelY-02079", "VUID-VkCoarseSampleLocationNV-sample-02080"}},
- {&sampOrdTooLargeSampleLocationCount,
- {"VUID-VkCoarseSampleOrderCustomNV-sampleLocationCount-02076",
- "VUID-VkCoarseSampleOrderCustomNV-pSampleLocations-02077"}},
- {&sampOrdGood, {}},
- };
-
- for (const auto &test_case : test_cases) {
- const auto break_vp = [&](CreatePipelineHelper &helper) {
- helper.vp_state_ci_.pNext = &csosci;
- csosci.pCustomSampleOrders = test_case.order;
- };
- CreatePipelineHelper::OneshotTest(*this, break_vp, VK_DEBUG_REPORT_ERROR_BIT_EXT, test_case.vuids);
- }
-
- // Test vkCmdSetCoarseSampleOrderNV errors
- auto vkCmdSetCoarseSampleOrderNV =
- (PFN_vkCmdSetCoarseSampleOrderNV)vkGetDeviceProcAddr(m_device->device(), "vkCmdSetCoarseSampleOrderNV");
-
- for (const auto &test_case : test_cases) {
- for (uint32_t i = 0; i < test_case.vuids.size(); ++i) {
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, test_case.vuids[i]);
- }
- vkCmdSetCoarseSampleOrderNV(m_commandBuffer->handle(), VK_COARSE_SAMPLE_ORDER_TYPE_CUSTOM_NV, 1, test_case.order);
- if (test_case.vuids.size()) {
- m_errorMonitor->VerifyFound();
- } else {
- m_errorMonitor->VerifyNotFound();
- }
- }
-
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
- "VUID-vkCmdSetCoarseSampleOrderNV-sampleOrderType-02081");
- vkCmdSetCoarseSampleOrderNV(m_commandBuffer->handle(), VK_COARSE_SAMPLE_ORDER_TYPE_PIXEL_MAJOR_NV, 1, &sampOrdGood);
- m_errorMonitor->VerifyFound();
- }
-
- m_commandBuffer->end();
-
- vkDestroyImageView(m_device->device(), view, NULL);
-}
-
-#ifdef VK_USE_PLATFORM_ANDROID_KHR
-#include "android_ndk_types.h"
-
-TEST_F(VkLayerTest, AndroidHardwareBufferImageCreate) {
- TEST_DESCRIPTION("Verify AndroidHardwareBuffer image create info.");
-
- SetTargetApiVersion(VK_API_VERSION_1_1);
- ASSERT_NO_FATAL_FAILURE(InitFramework(myDbgFunc, m_errorMonitor));
-
- if ((DeviceExtensionSupported(gpu(), nullptr, VK_ANDROID_EXTERNAL_MEMORY_ANDROID_HARDWARE_BUFFER_EXTENSION_NAME)) &&
- // Also skip on devices that advertise AHB, but not the pre-requisite foreign_queue extension
- (DeviceExtensionSupported(gpu(), nullptr, VK_EXT_QUEUE_FAMILY_FOREIGN_EXTENSION_NAME))) {
- m_device_extension_names.push_back(VK_ANDROID_EXTERNAL_MEMORY_ANDROID_HARDWARE_BUFFER_EXTENSION_NAME);
- m_device_extension_names.push_back(VK_KHR_SAMPLER_YCBCR_CONVERSION_EXTENSION_NAME);
- m_device_extension_names.push_back(VK_KHR_MAINTENANCE1_EXTENSION_NAME);
- m_device_extension_names.push_back(VK_KHR_BIND_MEMORY_2_EXTENSION_NAME);
- m_device_extension_names.push_back(VK_KHR_GET_MEMORY_REQUIREMENTS_2_EXTENSION_NAME);
- m_device_extension_names.push_back(VK_KHR_EXTERNAL_MEMORY_EXTENSION_NAME);
- m_device_extension_names.push_back(VK_EXT_QUEUE_FAMILY_FOREIGN_EXTENSION_NAME);
- } else {
- printf("%s %s extension not supported, skipping tests\n", kSkipPrefix,
- VK_ANDROID_EXTERNAL_MEMORY_ANDROID_HARDWARE_BUFFER_EXTENSION_NAME);
- return;
- }
-
- ASSERT_NO_FATAL_FAILURE(InitState());
- VkDevice dev = m_device->device();
-
- VkImage img = VK_NULL_HANDLE;
- auto reset_img = [&img, dev]() {
- if (VK_NULL_HANDLE != img) vkDestroyImage(dev, img, NULL);
- img = VK_NULL_HANDLE;
- };
-
- VkImageCreateInfo ici = {};
- ici.sType = VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO;
- ici.pNext = nullptr;
- ici.imageType = VK_IMAGE_TYPE_2D;
- ici.arrayLayers = 1;
- ici.extent = {64, 64, 1};
- ici.format = VK_FORMAT_UNDEFINED;
- ici.mipLevels = 1;
- ici.initialLayout = VK_IMAGE_LAYOUT_UNDEFINED;
- ici.samples = VK_SAMPLE_COUNT_1_BIT;
- ici.tiling = VK_IMAGE_TILING_OPTIMAL;
- ici.usage = VK_IMAGE_USAGE_SAMPLED_BIT;
-
- // undefined format
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkImageCreateInfo-pNext-01975");
- m_errorMonitor->SetUnexpectedError("VUID_Undefined");
- vkCreateImage(dev, &ici, NULL, &img);
- m_errorMonitor->VerifyFound();
- reset_img();
-
- // also undefined format
- VkExternalFormatANDROID efa = {};
- efa.sType = VK_STRUCTURE_TYPE_EXTERNAL_FORMAT_ANDROID;
- efa.externalFormat = 0;
- ici.pNext = &efa;
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkImageCreateInfo-pNext-01975");
- vkCreateImage(dev, &ici, NULL, &img);
- m_errorMonitor->VerifyFound();
- reset_img();
-
- // undefined format with an unknown external format
- efa.externalFormat = 0xBADC0DE;
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkExternalFormatANDROID-externalFormat-01894");
- vkCreateImage(dev, &ici, NULL, &img);
- m_errorMonitor->VerifyFound();
- reset_img();
-
- AHardwareBuffer *ahb;
- AHardwareBuffer_Desc ahb_desc = {};
- ahb_desc.format = AHARDWAREBUFFER_FORMAT_R8G8B8X8_UNORM;
- ahb_desc.usage = AHARDWAREBUFFER_USAGE_GPU_SAMPLED_IMAGE;
- ahb_desc.width = 64;
- ahb_desc.height = 64;
- ahb_desc.layers = 1;
- // Allocate an AHardwareBuffer
- AHardwareBuffer_allocate(&ahb_desc, &ahb);
-
- // Retrieve it's properties to make it's external format 'known' (AHARDWAREBUFFER_FORMAT_R8G8B8X8_UNORM)
- VkAndroidHardwareBufferFormatPropertiesANDROID ahb_fmt_props = {};
- ahb_fmt_props.sType = VK_STRUCTURE_TYPE_ANDROID_HARDWARE_BUFFER_FORMAT_PROPERTIES_ANDROID;
- VkAndroidHardwareBufferPropertiesANDROID ahb_props = {};
- ahb_props.sType = VK_STRUCTURE_TYPE_ANDROID_HARDWARE_BUFFER_PROPERTIES_ANDROID;
- ahb_props.pNext = &ahb_fmt_props;
- PFN_vkGetAndroidHardwareBufferPropertiesANDROID pfn_GetAHBProps =
- (PFN_vkGetAndroidHardwareBufferPropertiesANDROID)vkGetDeviceProcAddr(dev, "vkGetAndroidHardwareBufferPropertiesANDROID");
- ASSERT_TRUE(pfn_GetAHBProps != nullptr);
- pfn_GetAHBProps(dev, ahb, &ahb_props);
-
- // a defined image format with a non-zero external format
- ici.format = VK_FORMAT_R8G8B8A8_UNORM;
- efa.externalFormat = ahb_fmt_props.externalFormat;
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkImageCreateInfo-pNext-01974");
- vkCreateImage(dev, &ici, NULL, &img);
- m_errorMonitor->VerifyFound();
- reset_img();
- ici.format = VK_FORMAT_UNDEFINED;
-
- // external format while MUTABLE
- ici.flags = VK_IMAGE_CREATE_MUTABLE_FORMAT_BIT;
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkImageCreateInfo-pNext-02396");
- vkCreateImage(dev, &ici, NULL, &img);
- m_errorMonitor->VerifyFound();
- reset_img();
- ici.flags = 0;
-
- // external format while usage other than SAMPLED
- ici.usage |= VK_IMAGE_USAGE_INPUT_ATTACHMENT_BIT;
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkImageCreateInfo-pNext-02397");
- vkCreateImage(dev, &ici, NULL, &img);
- m_errorMonitor->VerifyFound();
- reset_img();
- ici.usage = VK_IMAGE_USAGE_SAMPLED_BIT;
-
- // external format while tiline other than OPTIMAL
- ici.tiling = VK_IMAGE_TILING_LINEAR;
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkImageCreateInfo-pNext-02398");
- vkCreateImage(dev, &ici, NULL, &img);
- m_errorMonitor->VerifyFound();
- reset_img();
- ici.tiling = VK_IMAGE_TILING_OPTIMAL;
-
- // imageType
- VkExternalMemoryImageCreateInfo emici = {};
- emici.sType = VK_STRUCTURE_TYPE_EXTERNAL_MEMORY_IMAGE_CREATE_INFO;
- emici.handleTypes = VK_EXTERNAL_MEMORY_HANDLE_TYPE_ANDROID_HARDWARE_BUFFER_BIT_ANDROID;
- ici.pNext = &emici; // remove efa from chain, insert emici
- ici.format = VK_FORMAT_R8G8B8A8_UNORM;
- ici.imageType = VK_IMAGE_TYPE_3D;
- ici.extent = {64, 64, 64};
-
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkImageCreateInfo-pNext-02393");
- vkCreateImage(dev, &ici, NULL, &img);
- m_errorMonitor->VerifyFound();
- reset_img();
-
- // wrong mipLevels
- ici.imageType = VK_IMAGE_TYPE_2D;
- ici.extent = {64, 64, 1};
- ici.mipLevels = 6; // should be 7
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkImageCreateInfo-pNext-02394");
- vkCreateImage(dev, &ici, NULL, &img);
- m_errorMonitor->VerifyFound();
- reset_img();
-}
-
-TEST_F(VkLayerTest, AndroidHardwareBufferFetchUnboundImageInfo) {
- TEST_DESCRIPTION("Verify AndroidHardwareBuffer retreive image properties while memory unbound.");
-
- SetTargetApiVersion(VK_API_VERSION_1_1);
- ASSERT_NO_FATAL_FAILURE(InitFramework(myDbgFunc, m_errorMonitor));
-
- if ((DeviceExtensionSupported(gpu(), nullptr, VK_ANDROID_EXTERNAL_MEMORY_ANDROID_HARDWARE_BUFFER_EXTENSION_NAME)) &&
- // Also skip on devices that advertise AHB, but not the pre-requisite foreign_queue extension
- (DeviceExtensionSupported(gpu(), nullptr, VK_EXT_QUEUE_FAMILY_FOREIGN_EXTENSION_NAME))) {
- m_device_extension_names.push_back(VK_ANDROID_EXTERNAL_MEMORY_ANDROID_HARDWARE_BUFFER_EXTENSION_NAME);
- m_device_extension_names.push_back(VK_KHR_SAMPLER_YCBCR_CONVERSION_EXTENSION_NAME);
- m_device_extension_names.push_back(VK_KHR_MAINTENANCE1_EXTENSION_NAME);
- m_device_extension_names.push_back(VK_KHR_BIND_MEMORY_2_EXTENSION_NAME);
- m_device_extension_names.push_back(VK_KHR_GET_MEMORY_REQUIREMENTS_2_EXTENSION_NAME);
- m_device_extension_names.push_back(VK_KHR_EXTERNAL_MEMORY_EXTENSION_NAME);
- m_device_extension_names.push_back(VK_EXT_QUEUE_FAMILY_FOREIGN_EXTENSION_NAME);
- } else {
- printf("%s %s extension not supported, skipping tests\n", kSkipPrefix,
- VK_ANDROID_EXTERNAL_MEMORY_ANDROID_HARDWARE_BUFFER_EXTENSION_NAME);
- return;
- }
-
- ASSERT_NO_FATAL_FAILURE(InitState());
- VkDevice dev = m_device->device();
-
- VkImage img = VK_NULL_HANDLE;
- auto reset_img = [&img, dev]() {
- if (VK_NULL_HANDLE != img) vkDestroyImage(dev, img, NULL);
- img = VK_NULL_HANDLE;
- };
-
- VkImageCreateInfo ici = {};
- ici.sType = VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO;
- ici.pNext = nullptr;
- ici.imageType = VK_IMAGE_TYPE_2D;
- ici.arrayLayers = 1;
- ici.extent = {64, 64, 1};
- ici.format = VK_FORMAT_R8G8B8A8_UNORM;
- ici.mipLevels = 1;
- ici.initialLayout = VK_IMAGE_LAYOUT_UNDEFINED;
- ici.samples = VK_SAMPLE_COUNT_1_BIT;
- ici.tiling = VK_IMAGE_TILING_LINEAR;
- ici.usage = VK_IMAGE_USAGE_SAMPLED_BIT;
-
- VkExternalMemoryImageCreateInfo emici = {};
- emici.sType = VK_STRUCTURE_TYPE_EXTERNAL_MEMORY_IMAGE_CREATE_INFO;
- emici.handleTypes = VK_EXTERNAL_MEMORY_HANDLE_TYPE_ANDROID_HARDWARE_BUFFER_BIT_ANDROID;
- ici.pNext = &emici;
-
- m_errorMonitor->ExpectSuccess();
- vkCreateImage(dev, &ici, NULL, &img);
- m_errorMonitor->VerifyNotFound();
-
- // attempt to fetch layout from unbound image
- VkImageSubresource sub_rsrc = {};
- sub_rsrc.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
- VkSubresourceLayout sub_layout = {};
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkGetImageSubresourceLayout-image-01895");
- vkGetImageSubresourceLayout(dev, img, &sub_rsrc, &sub_layout);
- m_errorMonitor->VerifyFound();
-
- // attempt to get memory reqs from unbound image
- VkImageMemoryRequirementsInfo2 imri = {};
- imri.sType = VK_STRUCTURE_TYPE_IMAGE_MEMORY_REQUIREMENTS_INFO_2;
- imri.image = img;
- VkMemoryRequirements2 mem_reqs = {};
- mem_reqs.sType = VK_STRUCTURE_TYPE_MEMORY_REQUIREMENTS_2;
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkImageMemoryRequirementsInfo2-image-01897");
- vkGetImageMemoryRequirements2(dev, &imri, &mem_reqs);
- m_errorMonitor->VerifyFound();
-
- reset_img();
-}
-
-TEST_F(VkLayerTest, AndroidHardwareBufferMemoryAllocation) {
- TEST_DESCRIPTION("Verify AndroidHardwareBuffer memory allocation.");
-
- SetTargetApiVersion(VK_API_VERSION_1_1);
- ASSERT_NO_FATAL_FAILURE(InitFramework(myDbgFunc, m_errorMonitor));
-
- if ((DeviceExtensionSupported(gpu(), nullptr, VK_ANDROID_EXTERNAL_MEMORY_ANDROID_HARDWARE_BUFFER_EXTENSION_NAME)) &&
- // Also skip on devices that advertise AHB, but not the pre-requisite foreign_queue extension
- (DeviceExtensionSupported(gpu(), nullptr, VK_EXT_QUEUE_FAMILY_FOREIGN_EXTENSION_NAME))) {
- m_device_extension_names.push_back(VK_ANDROID_EXTERNAL_MEMORY_ANDROID_HARDWARE_BUFFER_EXTENSION_NAME);
- m_device_extension_names.push_back(VK_KHR_SAMPLER_YCBCR_CONVERSION_EXTENSION_NAME);
- m_device_extension_names.push_back(VK_KHR_MAINTENANCE1_EXTENSION_NAME);
- m_device_extension_names.push_back(VK_KHR_BIND_MEMORY_2_EXTENSION_NAME);
- m_device_extension_names.push_back(VK_KHR_GET_MEMORY_REQUIREMENTS_2_EXTENSION_NAME);
- m_device_extension_names.push_back(VK_KHR_EXTERNAL_MEMORY_EXTENSION_NAME);
- m_device_extension_names.push_back(VK_EXT_QUEUE_FAMILY_FOREIGN_EXTENSION_NAME);
- } else {
- printf("%s %s extension not supported, skipping tests\n", kSkipPrefix,
- VK_ANDROID_EXTERNAL_MEMORY_ANDROID_HARDWARE_BUFFER_EXTENSION_NAME);
- return;
- }
-
- ASSERT_NO_FATAL_FAILURE(InitState());
- VkDevice dev = m_device->device();
-
- VkImage img = VK_NULL_HANDLE;
- auto reset_img = [&img, dev]() {
- if (VK_NULL_HANDLE != img) vkDestroyImage(dev, img, NULL);
- img = VK_NULL_HANDLE;
- };
- VkDeviceMemory mem_handle = VK_NULL_HANDLE;
- auto reset_mem = [&mem_handle, dev]() {
- if (VK_NULL_HANDLE != mem_handle) vkFreeMemory(dev, mem_handle, NULL);
- mem_handle = VK_NULL_HANDLE;
- };
-
- PFN_vkGetAndroidHardwareBufferPropertiesANDROID pfn_GetAHBProps =
- (PFN_vkGetAndroidHardwareBufferPropertiesANDROID)vkGetDeviceProcAddr(dev, "vkGetAndroidHardwareBufferPropertiesANDROID");
- ASSERT_TRUE(pfn_GetAHBProps != nullptr);
-
- // AHB structs
- AHardwareBuffer *ahb = nullptr;
- AHardwareBuffer_Desc ahb_desc = {};
- VkAndroidHardwareBufferFormatPropertiesANDROID ahb_fmt_props = {};
- ahb_fmt_props.sType = VK_STRUCTURE_TYPE_ANDROID_HARDWARE_BUFFER_FORMAT_PROPERTIES_ANDROID;
- VkAndroidHardwareBufferPropertiesANDROID ahb_props = {};
- ahb_props.sType = VK_STRUCTURE_TYPE_ANDROID_HARDWARE_BUFFER_PROPERTIES_ANDROID;
- ahb_props.pNext = &ahb_fmt_props;
- VkImportAndroidHardwareBufferInfoANDROID iahbi = {};
- iahbi.sType = VK_STRUCTURE_TYPE_IMPORT_ANDROID_HARDWARE_BUFFER_INFO_ANDROID;
-
- // destroy and re-acquire an AHB, and fetch it's properties
- auto recreate_ahb = [&ahb, &iahbi, &ahb_desc, &ahb_props, dev, pfn_GetAHBProps]() {
- if (ahb) AHardwareBuffer_release(ahb);
- ahb = nullptr;
- AHardwareBuffer_allocate(&ahb_desc, &ahb);
- if (ahb) {
- pfn_GetAHBProps(dev, ahb, &ahb_props);
- iahbi.buffer = ahb;
- }
- };
-
- // Allocate an AHardwareBuffer
- ahb_desc.format = AHARDWAREBUFFER_FORMAT_R8G8B8X8_UNORM;
- ahb_desc.usage = AHARDWAREBUFFER_USAGE_GPU_SAMPLED_IMAGE;
- ahb_desc.width = 64;
- ahb_desc.height = 64;
- ahb_desc.layers = 1;
- recreate_ahb();
-
- // Create an image w/ external format
- VkExternalFormatANDROID efa = {};
- efa.sType = VK_STRUCTURE_TYPE_EXTERNAL_FORMAT_ANDROID;
- efa.externalFormat = ahb_fmt_props.externalFormat;
-
- VkImageCreateInfo ici = {};
- ici.sType = VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO;
- ici.pNext = &efa;
- ici.imageType = VK_IMAGE_TYPE_2D;
- ici.arrayLayers = 1;
- ici.extent = {64, 64, 1};
- ici.format = VK_FORMAT_UNDEFINED;
- ici.mipLevels = 1;
- ici.initialLayout = VK_IMAGE_LAYOUT_UNDEFINED;
- ici.samples = VK_SAMPLE_COUNT_1_BIT;
- ici.tiling = VK_IMAGE_TILING_OPTIMAL;
- ici.usage = VK_IMAGE_USAGE_SAMPLED_BIT;
- VkResult res = vkCreateImage(dev, &ici, NULL, &img);
- ASSERT_VK_SUCCESS(res);
-
- VkMemoryAllocateInfo mai = {};
- mai.sType = VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO;
- mai.pNext = &iahbi; // Chained import struct
- mai.allocationSize = ahb_props.allocationSize;
- mai.memoryTypeIndex = 32;
- // Set index to match one of the bits in ahb_props
- for (int i = 0; i < 32; i++) {
- if (ahb_props.memoryTypeBits & (1 << i)) {
- mai.memoryTypeIndex = i;
- break;
- }
- }
- ASSERT_NE(32, mai.memoryTypeIndex);
-
- // Import w/ non-dedicated memory allocation
-
- // Import requires format AHB_FMT_BLOB and usage AHB_USAGE_GPU_DATA_BUFFER
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkMemoryAllocateInfo-pNext-02384");
- vkAllocateMemory(dev, &mai, NULL, &mem_handle);
- m_errorMonitor->VerifyFound();
- reset_mem();
-
- // Allocation size mismatch
- ahb_desc.format = AHARDWAREBUFFER_FORMAT_BLOB;
- ahb_desc.usage = AHARDWAREBUFFER_USAGE_GPU_DATA_BUFFER;
- ahb_desc.height = 1;
- recreate_ahb();
- mai.allocationSize = ahb_props.allocationSize + 1;
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkMemoryAllocateInfo-allocationSize-02383");
- vkAllocateMemory(dev, &mai, NULL, &mem_handle);
- m_errorMonitor->VerifyFound();
- mai.allocationSize = ahb_props.allocationSize;
- reset_mem();
-
- // memoryTypeIndex mismatch
- mai.memoryTypeIndex++;
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkMemoryAllocateInfo-memoryTypeIndex-02385");
- vkAllocateMemory(dev, &mai, NULL, &mem_handle);
- m_errorMonitor->VerifyFound();
- mai.memoryTypeIndex--;
- reset_mem();
-
- // Insert dedicated image memory allocation to mai chain
- VkMemoryDedicatedAllocateInfo mdai = {};
- mdai.sType = VK_STRUCTURE_TYPE_MEMORY_DEDICATED_ALLOCATE_INFO;
- mdai.image = img;
- mdai.buffer = VK_NULL_HANDLE;
- mdai.pNext = mai.pNext;
- mai.pNext = &mdai;
-
- // Dedicated allocation with unmatched usage bits
- ahb_desc.format = AHARDWAREBUFFER_FORMAT_R8G8B8A8_UNORM;
- ahb_desc.usage = AHARDWAREBUFFER_USAGE_GPU_COLOR_OUTPUT;
- ahb_desc.height = 64;
- recreate_ahb();
- mai.allocationSize = ahb_props.allocationSize;
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkMemoryAllocateInfo-pNext-02390");
- vkAllocateMemory(dev, &mai, NULL, &mem_handle);
- m_errorMonitor->VerifyFound();
- reset_mem();
-
- // Dedicated allocation with incomplete mip chain
- reset_img();
- ici.mipLevels = 2;
- vkCreateImage(dev, &ici, NULL, &img);
- mdai.image = img;
- ahb_desc.usage = AHARDWAREBUFFER_USAGE_GPU_SAMPLED_IMAGE | AHARDWAREBUFFER_USAGE_GPU_MIPMAP_COMPLETE;
- recreate_ahb();
-
- if (ahb) {
- mai.allocationSize = ahb_props.allocationSize;
- for (int i = 0; i < 32; i++) {
- if (ahb_props.memoryTypeBits & (1 << i)) {
- mai.memoryTypeIndex = i;
- break;
- }
- }
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkMemoryAllocateInfo-pNext-02389");
- vkAllocateMemory(dev, &mai, NULL, &mem_handle);
- m_errorMonitor->VerifyFound();
- reset_mem();
- } else {
- // ERROR: AHardwareBuffer_allocate() with MIPMAP_COMPLETE fails. It returns -12, NO_MEMORY.
- // The problem seems to happen in Pixel 2, not Pixel 3.
- printf("%s AHARDWAREBUFFER_USAGE_GPU_MIPMAP_COMPLETE not supported, skipping tests\n", kSkipPrefix);
- }
-
- // Dedicated allocation with mis-matched dimension
- ahb_desc.usage = AHARDWAREBUFFER_USAGE_GPU_SAMPLED_IMAGE;
- ahb_desc.height = 32;
- ahb_desc.width = 128;
- recreate_ahb();
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkMemoryAllocateInfo-pNext-02388");
- vkAllocateMemory(dev, &mai, NULL, &mem_handle);
- m_errorMonitor->VerifyFound();
- reset_mem();
-
- // Dedicated allocation with mis-matched VkFormat
- ahb_desc.usage = AHARDWAREBUFFER_USAGE_GPU_SAMPLED_IMAGE;
- ahb_desc.height = 64;
- ahb_desc.width = 64;
- recreate_ahb();
- ici.mipLevels = 1;
- ici.format = VK_FORMAT_B8G8R8A8_UNORM;
- ici.pNext = NULL;
- VkImage img2;
- vkCreateImage(dev, &ici, NULL, &img2);
- mdai.image = img2;
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkMemoryAllocateInfo-pNext-02387");
- vkAllocateMemory(dev, &mai, NULL, &mem_handle);
- m_errorMonitor->VerifyFound();
- vkDestroyImage(dev, img2, NULL);
- mdai.image = img;
- reset_mem();
-
- // Missing required ahb usage
- ahb_desc.usage = AHARDWAREBUFFER_USAGE_PROTECTED_CONTENT;
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
- "VUID-vkGetAndroidHardwareBufferPropertiesANDROID-buffer-01884");
- recreate_ahb();
- m_errorMonitor->VerifyFound();
-
- // Dedicated allocation with missing usage bits
- // Setting up this test also triggers a slew of others
- mai.allocationSize = ahb_props.allocationSize + 1;
- mai.memoryTypeIndex = 0;
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkMemoryAllocateInfo-pNext-02390");
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkMemoryAllocateInfo-memoryTypeIndex-02385");
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkMemoryAllocateInfo-allocationSize-02383");
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkMemoryAllocateInfo-pNext-02386");
- vkAllocateMemory(dev, &mai, NULL, &mem_handle);
- m_errorMonitor->VerifyFound();
- reset_mem();
-
- // Non-import allocation - replace import struct in chain with export struct
- VkExportMemoryAllocateInfo emai = {};
- emai.sType = VK_STRUCTURE_TYPE_EXPORT_MEMORY_ALLOCATE_INFO;
- emai.handleTypes = VK_EXTERNAL_MEMORY_HANDLE_TYPE_ANDROID_HARDWARE_BUFFER_BIT_ANDROID;
- mai.pNext = &emai;
- emai.pNext = &mdai; // still dedicated
- mdai.pNext = nullptr;
-
- // Export with allocation size non-zero
- ahb_desc.usage = AHARDWAREBUFFER_USAGE_GPU_SAMPLED_IMAGE;
- recreate_ahb();
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkMemoryAllocateInfo-pNext-01874");
- vkAllocateMemory(dev, &mai, NULL, &mem_handle);
- m_errorMonitor->VerifyFound();
- reset_mem();
-
- AHardwareBuffer_release(ahb);
- reset_mem();
- reset_img();
-}
-
-TEST_F(VkLayerTest, AndroidHardwareBufferCreateYCbCrSampler) {
- TEST_DESCRIPTION("Verify AndroidHardwareBuffer YCbCr sampler creation.");
-
- SetTargetApiVersion(VK_API_VERSION_1_1);
- ASSERT_NO_FATAL_FAILURE(InitFramework(myDbgFunc, m_errorMonitor));
-
- if ((DeviceExtensionSupported(gpu(), nullptr, VK_ANDROID_EXTERNAL_MEMORY_ANDROID_HARDWARE_BUFFER_EXTENSION_NAME)) &&
- // Also skip on devices that advertise AHB, but not the pre-requisite foreign_queue extension
- (DeviceExtensionSupported(gpu(), nullptr, VK_EXT_QUEUE_FAMILY_FOREIGN_EXTENSION_NAME))) {
- m_device_extension_names.push_back(VK_ANDROID_EXTERNAL_MEMORY_ANDROID_HARDWARE_BUFFER_EXTENSION_NAME);
- m_device_extension_names.push_back(VK_KHR_SAMPLER_YCBCR_CONVERSION_EXTENSION_NAME);
- m_device_extension_names.push_back(VK_KHR_MAINTENANCE1_EXTENSION_NAME);
- m_device_extension_names.push_back(VK_KHR_BIND_MEMORY_2_EXTENSION_NAME);
- m_device_extension_names.push_back(VK_KHR_GET_MEMORY_REQUIREMENTS_2_EXTENSION_NAME);
- m_device_extension_names.push_back(VK_KHR_EXTERNAL_MEMORY_EXTENSION_NAME);
- m_device_extension_names.push_back(VK_EXT_QUEUE_FAMILY_FOREIGN_EXTENSION_NAME);
- } else {
- printf("%s %s extension not supported, skipping tests\n", kSkipPrefix,
- VK_ANDROID_EXTERNAL_MEMORY_ANDROID_HARDWARE_BUFFER_EXTENSION_NAME);
- return;
- }
-
- ASSERT_NO_FATAL_FAILURE(InitState());
- VkDevice dev = m_device->device();
-
- VkSamplerYcbcrConversion ycbcr_conv = VK_NULL_HANDLE;
- VkSamplerYcbcrConversionCreateInfo sycci = {};
- sycci.sType = VK_STRUCTURE_TYPE_SAMPLER_YCBCR_CONVERSION_CREATE_INFO;
- sycci.format = VK_FORMAT_UNDEFINED;
- sycci.ycbcrModel = VK_SAMPLER_YCBCR_MODEL_CONVERSION_RGB_IDENTITY;
- sycci.ycbcrRange = VK_SAMPLER_YCBCR_RANGE_ITU_FULL;
-
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkSamplerYcbcrConversionCreateInfo-format-01904");
- vkCreateSamplerYcbcrConversion(dev, &sycci, NULL, &ycbcr_conv);
- m_errorMonitor->VerifyFound();
-
- VkExternalFormatANDROID efa = {};
- efa.sType = VK_STRUCTURE_TYPE_EXTERNAL_FORMAT_ANDROID;
- efa.externalFormat = AHARDWAREBUFFER_FORMAT_R8G8B8X8_UNORM;
- sycci.format = VK_FORMAT_R8G8B8A8_UNORM;
- sycci.pNext = &efa;
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkSamplerYcbcrConversionCreateInfo-format-01904");
- vkCreateSamplerYcbcrConversion(dev, &sycci, NULL, &ycbcr_conv);
- m_errorMonitor->VerifyFound();
-}
-
-TEST_F(VkLayerTest, AndroidHardwareBufferPhysDevImageFormatProp2) {
- TEST_DESCRIPTION("Verify AndroidHardwareBuffer GetPhysicalDeviceImageFormatProperties.");
-
- SetTargetApiVersion(VK_API_VERSION_1_1);
- ASSERT_NO_FATAL_FAILURE(InitFramework(myDbgFunc, m_errorMonitor));
-
- if ((DeviceExtensionSupported(gpu(), nullptr, VK_ANDROID_EXTERNAL_MEMORY_ANDROID_HARDWARE_BUFFER_EXTENSION_NAME)) &&
- // Also skip on devices that advertise AHB, but not the pre-requisite foreign_queue extension
- (DeviceExtensionSupported(gpu(), nullptr, VK_EXT_QUEUE_FAMILY_FOREIGN_EXTENSION_NAME))) {
- m_device_extension_names.push_back(VK_ANDROID_EXTERNAL_MEMORY_ANDROID_HARDWARE_BUFFER_EXTENSION_NAME);
- m_device_extension_names.push_back(VK_KHR_SAMPLER_YCBCR_CONVERSION_EXTENSION_NAME);
- m_device_extension_names.push_back(VK_KHR_MAINTENANCE1_EXTENSION_NAME);
- m_device_extension_names.push_back(VK_KHR_BIND_MEMORY_2_EXTENSION_NAME);
- m_device_extension_names.push_back(VK_KHR_GET_MEMORY_REQUIREMENTS_2_EXTENSION_NAME);
- m_device_extension_names.push_back(VK_KHR_EXTERNAL_MEMORY_EXTENSION_NAME);
- m_device_extension_names.push_back(VK_EXT_QUEUE_FAMILY_FOREIGN_EXTENSION_NAME);
- } else {
- printf("%s %s extension not supported, skipping test\n", kSkipPrefix,
- VK_ANDROID_EXTERNAL_MEMORY_ANDROID_HARDWARE_BUFFER_EXTENSION_NAME);
- return;
- }
-
- ASSERT_NO_FATAL_FAILURE(InitState());
-
- if ((m_instance_api_version < VK_API_VERSION_1_1) &&
- !InstanceExtensionEnabled(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME)) {
- printf("%s %s extension not supported, skipping test\n", kSkipPrefix,
- VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME);
- return;
- }
-
- VkImageFormatProperties2 ifp = {};
- ifp.sType = VK_STRUCTURE_TYPE_IMAGE_FORMAT_PROPERTIES_2;
- VkPhysicalDeviceImageFormatInfo2 pdifi = {};
- pdifi.sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_FORMAT_INFO_2;
- pdifi.format = VK_FORMAT_R8G8B8A8_UNORM;
- pdifi.tiling = VK_IMAGE_TILING_OPTIMAL;
- pdifi.type = VK_IMAGE_TYPE_2D;
- pdifi.usage = VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT;
- VkAndroidHardwareBufferUsageANDROID ahbu = {};
- ahbu.sType = VK_STRUCTURE_TYPE_ANDROID_HARDWARE_BUFFER_USAGE_ANDROID;
- ahbu.androidHardwareBufferUsage = AHARDWAREBUFFER_USAGE_GPU_SAMPLED_IMAGE;
- ifp.pNext = &ahbu;
-
- // AHB_usage chained to input without a matching external image format struc chained to output
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
- "VUID-vkGetPhysicalDeviceImageFormatProperties2-pNext-01868");
- vkGetPhysicalDeviceImageFormatProperties2(m_device->phy().handle(), &pdifi, &ifp);
- m_errorMonitor->VerifyFound();
-
- // output struct chained, but does not include VK_EXTERNAL_MEMORY_HANDLE_TYPE_ANDROID_HARDWARE_BUFFER_BIT_ANDROID usage
- VkPhysicalDeviceExternalImageFormatInfo pdeifi = {};
- pdeifi.sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_IMAGE_FORMAT_INFO;
- pdeifi.handleType = VK_EXTERNAL_MEMORY_HANDLE_TYPE_HOST_ALLOCATION_BIT_EXT;
- pdifi.pNext = &pdeifi;
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
- "VUID-vkGetPhysicalDeviceImageFormatProperties2-pNext-01868");
- vkGetPhysicalDeviceImageFormatProperties2(m_device->phy().handle(), &pdifi, &ifp);
- m_errorMonitor->VerifyFound();
-}
-
-TEST_F(VkLayerTest, AndroidHardwareBufferCreateImageView) {
- TEST_DESCRIPTION("Verify AndroidHardwareBuffer image view creation.");
-
- SetTargetApiVersion(VK_API_VERSION_1_1);
- ASSERT_NO_FATAL_FAILURE(InitFramework(myDbgFunc, m_errorMonitor));
-
- if ((DeviceExtensionSupported(gpu(), nullptr, VK_ANDROID_EXTERNAL_MEMORY_ANDROID_HARDWARE_BUFFER_EXTENSION_NAME)) &&
- // Also skip on devices that advertise AHB, but not the pre-requisite foreign_queue extension
- (DeviceExtensionSupported(gpu(), nullptr, VK_EXT_QUEUE_FAMILY_FOREIGN_EXTENSION_NAME))) {
- m_device_extension_names.push_back(VK_ANDROID_EXTERNAL_MEMORY_ANDROID_HARDWARE_BUFFER_EXTENSION_NAME);
- m_device_extension_names.push_back(VK_KHR_SAMPLER_YCBCR_CONVERSION_EXTENSION_NAME);
- m_device_extension_names.push_back(VK_KHR_MAINTENANCE1_EXTENSION_NAME);
- m_device_extension_names.push_back(VK_KHR_BIND_MEMORY_2_EXTENSION_NAME);
- m_device_extension_names.push_back(VK_KHR_GET_MEMORY_REQUIREMENTS_2_EXTENSION_NAME);
- m_device_extension_names.push_back(VK_KHR_EXTERNAL_MEMORY_EXTENSION_NAME);
- m_device_extension_names.push_back(VK_EXT_QUEUE_FAMILY_FOREIGN_EXTENSION_NAME);
- } else {
- printf("%s %s extension not supported, skipping tests\n", kSkipPrefix,
- VK_ANDROID_EXTERNAL_MEMORY_ANDROID_HARDWARE_BUFFER_EXTENSION_NAME);
- return;
- }
-
- ASSERT_NO_FATAL_FAILURE(InitState());
- VkDevice dev = m_device->device();
-
- // Allocate an AHB and fetch its properties
- AHardwareBuffer *ahb = nullptr;
- AHardwareBuffer_Desc ahb_desc = {};
- ahb_desc.format = AHARDWAREBUFFER_FORMAT_R5G6B5_UNORM;
- ahb_desc.usage = AHARDWAREBUFFER_USAGE_GPU_SAMPLED_IMAGE;
- ahb_desc.width = 64;
- ahb_desc.height = 64;
- ahb_desc.layers = 1;
- AHardwareBuffer_allocate(&ahb_desc, &ahb);
-
- // Retrieve AHB properties to make it's external format 'known'
- VkAndroidHardwareBufferFormatPropertiesANDROID ahb_fmt_props = {};
- ahb_fmt_props.sType = VK_STRUCTURE_TYPE_ANDROID_HARDWARE_BUFFER_FORMAT_PROPERTIES_ANDROID;
- VkAndroidHardwareBufferPropertiesANDROID ahb_props = {};
- ahb_props.sType = VK_STRUCTURE_TYPE_ANDROID_HARDWARE_BUFFER_PROPERTIES_ANDROID;
- ahb_props.pNext = &ahb_fmt_props;
- PFN_vkGetAndroidHardwareBufferPropertiesANDROID pfn_GetAHBProps =
- (PFN_vkGetAndroidHardwareBufferPropertiesANDROID)vkGetDeviceProcAddr(dev, "vkGetAndroidHardwareBufferPropertiesANDROID");
- ASSERT_TRUE(pfn_GetAHBProps != nullptr);
- pfn_GetAHBProps(dev, ahb, &ahb_props);
- AHardwareBuffer_release(ahb);
-
- // Give image an external format
- VkExternalFormatANDROID efa = {};
- efa.sType = VK_STRUCTURE_TYPE_EXTERNAL_FORMAT_ANDROID;
- efa.externalFormat = ahb_fmt_props.externalFormat;
-
- ahb_desc.format = AHARDWAREBUFFER_FORMAT_R8G8B8A8_UNORM;
- ahb_desc.usage = AHARDWAREBUFFER_USAGE_GPU_SAMPLED_IMAGE;
- ahb_desc.width = 64;
- ahb_desc.height = 1;
- ahb_desc.layers = 1;
- AHardwareBuffer_allocate(&ahb_desc, &ahb);
-
- // Create another VkExternalFormatANDROID for test VUID-VkImageViewCreateInfo-image-02400
- VkAndroidHardwareBufferFormatPropertiesANDROID ahb_fmt_props_Ycbcr = {};
- ahb_fmt_props_Ycbcr.sType = VK_STRUCTURE_TYPE_ANDROID_HARDWARE_BUFFER_FORMAT_PROPERTIES_ANDROID;
- VkAndroidHardwareBufferPropertiesANDROID ahb_props_Ycbcr = {};
- ahb_props_Ycbcr.sType = VK_STRUCTURE_TYPE_ANDROID_HARDWARE_BUFFER_PROPERTIES_ANDROID;
- ahb_props_Ycbcr.pNext = &ahb_fmt_props_Ycbcr;
- pfn_GetAHBProps(dev, ahb, &ahb_props_Ycbcr);
- AHardwareBuffer_release(ahb);
-
- VkExternalFormatANDROID efa_Ycbcr = {};
- efa_Ycbcr.sType = VK_STRUCTURE_TYPE_EXTERNAL_FORMAT_ANDROID;
- efa_Ycbcr.externalFormat = ahb_fmt_props_Ycbcr.externalFormat;
-
- // Create the image
- VkImage img = VK_NULL_HANDLE;
- VkImageCreateInfo ici = {};
- ici.sType = VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO;
- ici.pNext = &efa;
- ici.imageType = VK_IMAGE_TYPE_2D;
- ici.arrayLayers = 1;
- ici.extent = {64, 64, 1};
- ici.format = VK_FORMAT_UNDEFINED;
- ici.mipLevels = 1;
- ici.initialLayout = VK_IMAGE_LAYOUT_UNDEFINED;
- ici.samples = VK_SAMPLE_COUNT_1_BIT;
- ici.tiling = VK_IMAGE_TILING_OPTIMAL;
- ici.usage = VK_IMAGE_USAGE_SAMPLED_BIT;
- vkCreateImage(dev, &ici, NULL, &img);
-
- // Set up memory allocation
- VkDeviceMemory img_mem = VK_NULL_HANDLE;
- VkMemoryAllocateInfo mai = {};
- mai.sType = VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO;
- mai.allocationSize = 64 * 64 * 4;
- mai.memoryTypeIndex = 0;
- vkAllocateMemory(dev, &mai, NULL, &img_mem);
-
- // It shouldn't use vkGetImageMemoryRequirements for AndroidHardwareBuffer.
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "UNASSIGNED-CoreValidation-DrawState-InvalidImage");
- VkMemoryRequirements img_mem_reqs = {};
- vkGetImageMemoryRequirements(m_device->device(), img, &img_mem_reqs);
- vkBindImageMemory(dev, img, img_mem, 0);
- m_errorMonitor->VerifyFound();
-
- // Bind image to memory
- vkDestroyImage(dev, img, NULL);
- vkFreeMemory(dev, img_mem, NULL);
- vkCreateImage(dev, &ici, NULL, &img);
- vkAllocateMemory(dev, &mai, NULL, &img_mem);
- vkBindImageMemory(dev, img, img_mem, 0);
-
- // Create a YCbCr conversion, with different external format, chain to view
- VkSamplerYcbcrConversion ycbcr_conv = VK_NULL_HANDLE;
- VkSamplerYcbcrConversionCreateInfo sycci = {};
- sycci.sType = VK_STRUCTURE_TYPE_SAMPLER_YCBCR_CONVERSION_CREATE_INFO;
- sycci.pNext = &efa_Ycbcr;
- sycci.format = VK_FORMAT_UNDEFINED;
- sycci.ycbcrModel = VK_SAMPLER_YCBCR_MODEL_CONVERSION_RGB_IDENTITY;
- sycci.ycbcrRange = VK_SAMPLER_YCBCR_RANGE_ITU_FULL;
- vkCreateSamplerYcbcrConversion(dev, &sycci, NULL, &ycbcr_conv);
- VkSamplerYcbcrConversionInfo syci = {};
- syci.sType = VK_STRUCTURE_TYPE_SAMPLER_YCBCR_CONVERSION_INFO;
- syci.conversion = ycbcr_conv;
-
- // Create a view
- VkImageView image_view = VK_NULL_HANDLE;
- VkImageViewCreateInfo ivci = {};
- ivci.sType = VK_STRUCTURE_TYPE_IMAGE_VIEW_CREATE_INFO;
- ivci.pNext = &syci;
- ivci.image = img;
- ivci.viewType = VK_IMAGE_VIEW_TYPE_2D;
- ivci.format = VK_FORMAT_UNDEFINED;
- ivci.subresourceRange = {VK_IMAGE_ASPECT_COLOR_BIT, 0, 1, 0, 1};
-
- auto reset_view = [&image_view, dev]() {
- if (VK_NULL_HANDLE != image_view) vkDestroyImageView(dev, image_view, NULL);
- image_view = VK_NULL_HANDLE;
- };
-
- // Up to this point, no errors expected
- m_errorMonitor->VerifyNotFound();
-
- // Chained ycbcr conversion has different (external) format than image
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkImageViewCreateInfo-image-02400");
- // Also causes "unsupported format" - should be removed in future spec update
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkImageViewCreateInfo-None-02273");
- vkCreateImageView(dev, &ivci, NULL, &image_view);
- m_errorMonitor->VerifyFound();
-
- reset_view();
- vkDestroySamplerYcbcrConversion(dev, ycbcr_conv, NULL);
- sycci.pNext = &efa;
- vkCreateSamplerYcbcrConversion(dev, &sycci, NULL, &ycbcr_conv);
- syci.conversion = ycbcr_conv;
-
- // View component swizzle not IDENTITY
- ivci.components.r = VK_COMPONENT_SWIZZLE_B;
- ivci.components.b = VK_COMPONENT_SWIZZLE_R;
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkImageViewCreateInfo-image-02401");
- // Also causes "unsupported format" - should be removed in future spec update
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkImageViewCreateInfo-None-02273");
- vkCreateImageView(dev, &ivci, NULL, &image_view);
- m_errorMonitor->VerifyFound();
-
- reset_view();
- ivci.components.r = VK_COMPONENT_SWIZZLE_IDENTITY;
- ivci.components.b = VK_COMPONENT_SWIZZLE_IDENTITY;
-
- // View with external format, when format is not UNDEFINED
- ivci.format = VK_FORMAT_R5G6B5_UNORM_PACK16;
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkImageViewCreateInfo-image-02399");
- // Also causes "view format different from image format"
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkImageViewCreateInfo-image-01019");
- vkCreateImageView(dev, &ivci, NULL, &image_view);
- m_errorMonitor->VerifyFound();
-
- reset_view();
- vkDestroySamplerYcbcrConversion(dev, ycbcr_conv, NULL);
- vkDestroyImageView(dev, image_view, NULL);
- vkDestroyImage(dev, img, NULL);
- vkFreeMemory(dev, img_mem, NULL);
-}
-
-TEST_F(VkLayerTest, AndroidHardwareBufferImportBuffer) {
- TEST_DESCRIPTION("Verify AndroidHardwareBuffer import as buffer.");
-
- SetTargetApiVersion(VK_API_VERSION_1_1);
- ASSERT_NO_FATAL_FAILURE(InitFramework(myDbgFunc, m_errorMonitor));
-
- if ((DeviceExtensionSupported(gpu(), nullptr, VK_ANDROID_EXTERNAL_MEMORY_ANDROID_HARDWARE_BUFFER_EXTENSION_NAME)) &&
- // Also skip on devices that advertise AHB, but not the pre-requisite foreign_queue extension
- (DeviceExtensionSupported(gpu(), nullptr, VK_EXT_QUEUE_FAMILY_FOREIGN_EXTENSION_NAME))) {
- m_device_extension_names.push_back(VK_ANDROID_EXTERNAL_MEMORY_ANDROID_HARDWARE_BUFFER_EXTENSION_NAME);
- m_device_extension_names.push_back(VK_KHR_SAMPLER_YCBCR_CONVERSION_EXTENSION_NAME);
- m_device_extension_names.push_back(VK_KHR_MAINTENANCE1_EXTENSION_NAME);
- m_device_extension_names.push_back(VK_KHR_BIND_MEMORY_2_EXTENSION_NAME);
- m_device_extension_names.push_back(VK_KHR_GET_MEMORY_REQUIREMENTS_2_EXTENSION_NAME);
- m_device_extension_names.push_back(VK_KHR_EXTERNAL_MEMORY_EXTENSION_NAME);
- m_device_extension_names.push_back(VK_EXT_QUEUE_FAMILY_FOREIGN_EXTENSION_NAME);
- } else {
- printf("%s %s extension not supported, skipping tests\n", kSkipPrefix,
- VK_ANDROID_EXTERNAL_MEMORY_ANDROID_HARDWARE_BUFFER_EXTENSION_NAME);
- return;
- }
-
- ASSERT_NO_FATAL_FAILURE(InitState());
- VkDevice dev = m_device->device();
-
- VkDeviceMemory mem_handle = VK_NULL_HANDLE;
- auto reset_mem = [&mem_handle, dev]() {
- if (VK_NULL_HANDLE != mem_handle) vkFreeMemory(dev, mem_handle, NULL);
- mem_handle = VK_NULL_HANDLE;
- };
-
- PFN_vkGetAndroidHardwareBufferPropertiesANDROID pfn_GetAHBProps =
- (PFN_vkGetAndroidHardwareBufferPropertiesANDROID)vkGetDeviceProcAddr(dev, "vkGetAndroidHardwareBufferPropertiesANDROID");
- ASSERT_TRUE(pfn_GetAHBProps != nullptr);
-
- // AHB structs
- AHardwareBuffer *ahb = nullptr;
- AHardwareBuffer_Desc ahb_desc = {};
- VkAndroidHardwareBufferPropertiesANDROID ahb_props = {};
- ahb_props.sType = VK_STRUCTURE_TYPE_ANDROID_HARDWARE_BUFFER_PROPERTIES_ANDROID;
- VkImportAndroidHardwareBufferInfoANDROID iahbi = {};
- iahbi.sType = VK_STRUCTURE_TYPE_IMPORT_ANDROID_HARDWARE_BUFFER_INFO_ANDROID;
-
- // Allocate an AHardwareBuffer
- ahb_desc.format = AHARDWAREBUFFER_FORMAT_BLOB;
- ahb_desc.usage = AHARDWAREBUFFER_USAGE_SENSOR_DIRECT_DATA;
- ahb_desc.width = 512;
- ahb_desc.height = 1;
- ahb_desc.layers = 1;
- AHardwareBuffer_allocate(&ahb_desc, &ahb);
- m_errorMonitor->SetUnexpectedError("VUID-vkGetAndroidHardwareBufferPropertiesANDROID-buffer-01884");
- pfn_GetAHBProps(dev, ahb, &ahb_props);
- iahbi.buffer = ahb;
-
- // Create export and import buffers
- VkExternalMemoryBufferCreateInfo ext_buf_info = {};
- ext_buf_info.sType = VK_STRUCTURE_TYPE_EXTERNAL_MEMORY_BUFFER_CREATE_INFO_KHR;
- ext_buf_info.handleTypes = VK_EXTERNAL_MEMORY_HANDLE_TYPE_HOST_ALLOCATION_BIT_EXT;
-
- VkBufferCreateInfo bci = {};
- bci.sType = VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO;
- bci.pNext = &ext_buf_info;
- bci.size = ahb_props.allocationSize;
- bci.usage = VK_BUFFER_USAGE_TRANSFER_SRC_BIT;
-
- VkBuffer buf = VK_NULL_HANDLE;
- vkCreateBuffer(dev, &bci, NULL, &buf);
- VkMemoryRequirements mem_reqs;
- vkGetBufferMemoryRequirements(dev, buf, &mem_reqs);
-
- // Allocation info
- VkMemoryAllocateInfo mai = vk_testing::DeviceMemory::get_resource_alloc_info(*m_device, mem_reqs, 0);
- mai.pNext = &iahbi; // Chained import struct
- VkPhysicalDeviceMemoryProperties memory_info;
- vkGetPhysicalDeviceMemoryProperties(gpu(), &memory_info);
- unsigned int i;
- for (i = 0; i < memory_info.memoryTypeCount; i++) {
- if ((ahb_props.memoryTypeBits & (1 << i))) {
- mai.memoryTypeIndex = i;
- break;
- }
- }
- if (i >= memory_info.memoryTypeCount) {
- printf("%s No invalid memory type index could be found; skipped.\n", kSkipPrefix);
- AHardwareBuffer_release(ahb);
- reset_mem();
- vkDestroyBuffer(dev, buf, NULL);
- return;
- }
-
- // Import as buffer requires format AHB_FMT_BLOB and usage AHB_USAGE_GPU_DATA_BUFFER
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
- "VUID-VkImportAndroidHardwareBufferInfoANDROID-buffer-01881");
- // Also causes "non-dedicated allocation format/usage" error
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkMemoryAllocateInfo-pNext-02384");
- vkAllocateMemory(dev, &mai, NULL, &mem_handle);
- m_errorMonitor->VerifyFound();
-
- AHardwareBuffer_release(ahb);
- reset_mem();
- vkDestroyBuffer(dev, buf, NULL);
-}
-
-TEST_F(VkLayerTest, AndroidHardwareBufferExporttBuffer) {
- TEST_DESCRIPTION("Verify AndroidHardwareBuffer export memory as AHB.");
-
- SetTargetApiVersion(VK_API_VERSION_1_1);
- ASSERT_NO_FATAL_FAILURE(InitFramework(myDbgFunc, m_errorMonitor));
-
- if ((DeviceExtensionSupported(gpu(), nullptr, VK_ANDROID_EXTERNAL_MEMORY_ANDROID_HARDWARE_BUFFER_EXTENSION_NAME)) &&
- // Also skip on devices that advertise AHB, but not the pre-requisite foreign_queue extension
- (DeviceExtensionSupported(gpu(), nullptr, VK_EXT_QUEUE_FAMILY_FOREIGN_EXTENSION_NAME))) {
- m_device_extension_names.push_back(VK_ANDROID_EXTERNAL_MEMORY_ANDROID_HARDWARE_BUFFER_EXTENSION_NAME);
- m_device_extension_names.push_back(VK_KHR_SAMPLER_YCBCR_CONVERSION_EXTENSION_NAME);
- m_device_extension_names.push_back(VK_KHR_MAINTENANCE1_EXTENSION_NAME);
- m_device_extension_names.push_back(VK_KHR_BIND_MEMORY_2_EXTENSION_NAME);
- m_device_extension_names.push_back(VK_KHR_GET_MEMORY_REQUIREMENTS_2_EXTENSION_NAME);
- m_device_extension_names.push_back(VK_KHR_EXTERNAL_MEMORY_EXTENSION_NAME);
- m_device_extension_names.push_back(VK_EXT_QUEUE_FAMILY_FOREIGN_EXTENSION_NAME);
- } else {
- printf("%s %s extension not supported, skipping tests\n", kSkipPrefix,
- VK_ANDROID_EXTERNAL_MEMORY_ANDROID_HARDWARE_BUFFER_EXTENSION_NAME);
- return;
- }
-
- ASSERT_NO_FATAL_FAILURE(InitState());
- VkDevice dev = m_device->device();
-
- VkDeviceMemory mem_handle = VK_NULL_HANDLE;
-
- // Allocate device memory, no linked export struct indicating AHB handle type
- VkMemoryAllocateInfo mai = {};
- mai.sType = VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO;
- mai.allocationSize = 65536;
- mai.memoryTypeIndex = 0;
- vkAllocateMemory(dev, &mai, NULL, &mem_handle);
-
- PFN_vkGetMemoryAndroidHardwareBufferANDROID pfn_GetMemAHB =
- (PFN_vkGetMemoryAndroidHardwareBufferANDROID)vkGetDeviceProcAddr(dev, "vkGetMemoryAndroidHardwareBufferANDROID");
- ASSERT_TRUE(pfn_GetMemAHB != nullptr);
-
- VkMemoryGetAndroidHardwareBufferInfoANDROID mgahbi = {};
- mgahbi.sType = VK_STRUCTURE_TYPE_MEMORY_GET_ANDROID_HARDWARE_BUFFER_INFO_ANDROID;
- mgahbi.memory = mem_handle;
- AHardwareBuffer *ahb = nullptr;
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
- "VUID-VkMemoryGetAndroidHardwareBufferInfoANDROID-handleTypes-01882");
- pfn_GetMemAHB(dev, &mgahbi, &ahb);
- m_errorMonitor->VerifyFound();
-
- if (ahb) AHardwareBuffer_release(ahb);
- ahb = nullptr;
- if (VK_NULL_HANDLE != mem_handle) vkFreeMemory(dev, mem_handle, NULL);
- mem_handle = VK_NULL_HANDLE;
-
- // Add an export struct with AHB handle type to allocation info
- VkExportMemoryAllocateInfo emai = {};
- emai.sType = VK_STRUCTURE_TYPE_EXPORT_MEMORY_ALLOCATE_INFO;
- emai.handleTypes = VK_EXTERNAL_MEMORY_HANDLE_TYPE_ANDROID_HARDWARE_BUFFER_BIT_ANDROID;
- mai.pNext = &emai;
-
- // Create an image, do not bind memory
- VkImage img = VK_NULL_HANDLE;
- VkImageCreateInfo ici = {};
- ici.sType = VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO;
- ici.imageType = VK_IMAGE_TYPE_2D;
- ici.arrayLayers = 1;
- ici.extent = {128, 128, 1};
- ici.format = VK_FORMAT_R8G8B8A8_UNORM;
- ici.mipLevels = 1;
- ici.initialLayout = VK_IMAGE_LAYOUT_UNDEFINED;
- ici.samples = VK_SAMPLE_COUNT_1_BIT;
- ici.tiling = VK_IMAGE_TILING_OPTIMAL;
- ici.usage = VK_IMAGE_USAGE_SAMPLED_BIT;
- vkCreateImage(dev, &ici, NULL, &img);
- ASSERT_TRUE(VK_NULL_HANDLE != img);
-
- // Add image to allocation chain as dedicated info, re-allocate
- VkMemoryDedicatedAllocateInfo mdai = {VK_STRUCTURE_TYPE_MEMORY_DEDICATED_ALLOCATE_INFO};
- mdai.image = img;
- emai.pNext = &mdai;
- mai.allocationSize = 0;
- vkAllocateMemory(dev, &mai, NULL, &mem_handle);
- mgahbi.memory = mem_handle;
-
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
- "VUID-VkMemoryGetAndroidHardwareBufferInfoANDROID-pNext-01883");
- pfn_GetMemAHB(dev, &mgahbi, &ahb);
- m_errorMonitor->VerifyFound();
-
- if (ahb) AHardwareBuffer_release(ahb);
- if (VK_NULL_HANDLE != mem_handle) vkFreeMemory(dev, mem_handle, NULL);
- vkDestroyImage(dev, img, NULL);
-}
-
-#endif // VK_USE_PLATFORM_ANDROID_KHR
-
-TEST_F(VkLayerTest, ValidateStride) {
- TEST_DESCRIPTION("Validate Stride.");
- ASSERT_NO_FATAL_FAILURE(Init(nullptr, nullptr, VK_COMMAND_POOL_CREATE_RESET_COMMAND_BUFFER_BIT));
- ASSERT_NO_FATAL_FAILURE(InitViewport());
- ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
-
- VkQueryPool query_pool;
- VkQueryPoolCreateInfo query_pool_ci{};
- query_pool_ci.sType = VK_STRUCTURE_TYPE_QUERY_POOL_CREATE_INFO;
- query_pool_ci.queryType = VK_QUERY_TYPE_TIMESTAMP;
- query_pool_ci.queryCount = 1;
- vkCreateQueryPool(m_device->device(), &query_pool_ci, nullptr, &query_pool);
-
- m_commandBuffer->begin();
- vkCmdResetQueryPool(m_commandBuffer->handle(), query_pool, 0, 1);
- vkCmdWriteTimestamp(m_commandBuffer->handle(), VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT, query_pool, 0);
- m_commandBuffer->end();
-
- VkSubmitInfo submit_info = {};
- submit_info.sType = VK_STRUCTURE_TYPE_SUBMIT_INFO;
- submit_info.commandBufferCount = 1;
- submit_info.pCommandBuffers = &m_commandBuffer->handle();
- vkQueueSubmit(m_device->m_queue, 1, &submit_info, VK_NULL_HANDLE);
- vkQueueWaitIdle(m_device->m_queue);
-
- char data_space;
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkGetQueryPoolResults-flags-00814");
- vkGetQueryPoolResults(m_device->handle(), query_pool, 0, 1, sizeof(data_space), &data_space, 1, VK_QUERY_RESULT_WAIT_BIT);
- m_errorMonitor->VerifyFound();
-
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkGetQueryPoolResults-flags-00815");
- vkGetQueryPoolResults(m_device->handle(), query_pool, 0, 1, sizeof(data_space), &data_space, 1,
- (VK_QUERY_RESULT_WAIT_BIT | VK_QUERY_RESULT_64_BIT));
- m_errorMonitor->VerifyFound();
-
- char data_space4[4] = "";
- m_errorMonitor->ExpectSuccess();
- vkGetQueryPoolResults(m_device->handle(), query_pool, 0, 1, sizeof(data_space4), &data_space4, 4, VK_QUERY_RESULT_WAIT_BIT);
- m_errorMonitor->VerifyNotFound();
-
- char data_space8[8] = "";
- m_errorMonitor->ExpectSuccess();
- vkGetQueryPoolResults(m_device->handle(), query_pool, 0, 1, sizeof(data_space8), &data_space8, 8,
- (VK_QUERY_RESULT_WAIT_BIT | VK_QUERY_RESULT_64_BIT));
- m_errorMonitor->VerifyNotFound();
-
- uint32_t qfi = 0;
- VkBufferCreateInfo buff_create_info = {};
- buff_create_info.sType = VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO;
- buff_create_info.size = 128;
- buff_create_info.usage =
- VK_BUFFER_USAGE_TRANSFER_DST_BIT | VK_BUFFER_USAGE_INDIRECT_BUFFER_BIT | VK_BUFFER_USAGE_INDEX_BUFFER_BIT;
- buff_create_info.queueFamilyIndexCount = 1;
- buff_create_info.pQueueFamilyIndices = &qfi;
- VkBufferObj buffer;
- buffer.init(*m_device, buff_create_info);
-
- m_commandBuffer->reset();
- m_commandBuffer->begin();
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdCopyQueryPoolResults-flags-00822");
- vkCmdCopyQueryPoolResults(m_commandBuffer->handle(), query_pool, 0, 1, buffer.handle(), 1, 1, 0);
- m_errorMonitor->VerifyFound();
-
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdCopyQueryPoolResults-flags-00823");
- vkCmdCopyQueryPoolResults(m_commandBuffer->handle(), query_pool, 0, 1, buffer.handle(), 1, 1, VK_QUERY_RESULT_64_BIT);
- m_errorMonitor->VerifyFound();
-
- m_errorMonitor->ExpectSuccess();
- vkCmdCopyQueryPoolResults(m_commandBuffer->handle(), query_pool, 0, 1, buffer.handle(), 4, 4, 0);
- m_errorMonitor->VerifyNotFound();
-
- m_errorMonitor->ExpectSuccess();
- vkCmdCopyQueryPoolResults(m_commandBuffer->handle(), query_pool, 0, 1, buffer.handle(), 8, 8, VK_QUERY_RESULT_64_BIT);
- m_errorMonitor->VerifyNotFound();
-
- if (m_device->phy().features().multiDrawIndirect) {
- CreatePipelineHelper helper(*this);
- helper.InitInfo();
- helper.InitState();
- helper.CreateGraphicsPipeline();
- m_commandBuffer->BeginRenderPass(m_renderPassBeginInfo);
- vkCmdBindPipeline(m_commandBuffer->handle(), VK_PIPELINE_BIND_POINT_GRAPHICS, helper.pipeline_);
-
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdDrawIndirect-drawCount-00476");
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdDrawIndirect-drawCount-00488");
- vkCmdDrawIndirect(m_commandBuffer->handle(), buffer.handle(), 0, 100, 2);
- m_errorMonitor->VerifyFound();
-
- m_errorMonitor->ExpectSuccess();
- vkCmdDrawIndirect(m_commandBuffer->handle(), buffer.handle(), 0, 2, 24);
- m_errorMonitor->VerifyNotFound();
-
- vkCmdBindIndexBuffer(m_commandBuffer->handle(), buffer.handle(), 0, VK_INDEX_TYPE_UINT16);
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdDrawIndexedIndirect-drawCount-00528");
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdDrawIndexedIndirect-drawCount-00540");
- vkCmdDrawIndexedIndirect(m_commandBuffer->handle(), buffer.handle(), 0, 100, 2);
- m_errorMonitor->VerifyFound();
-
- m_errorMonitor->ExpectSuccess();
- vkCmdDrawIndexedIndirect(m_commandBuffer->handle(), buffer.handle(), 0, 2, 24);
- m_errorMonitor->VerifyNotFound();
-
- vkCmdEndRenderPass(m_commandBuffer->handle());
- m_commandBuffer->end();
-
- } else {
- printf("%s Test requires unsupported multiDrawIndirect feature. Skipped.\n", kSkipPrefix);
- }
- vkDestroyQueryPool(m_device->handle(), query_pool, NULL);
-}
-
-TEST_F(VkLayerTest, WarningSwapchainCreateInfoPreTransform) {
- TEST_DESCRIPTION("Print warning when preTransform doesn't match curretTransform");
-
- if (!AddSurfaceInstanceExtension()) {
- printf("%s surface extensions not supported, skipping test\n", kSkipPrefix);
- return;
- }
-
- ASSERT_NO_FATAL_FAILURE(InitFramework(myDbgFunc, m_errorMonitor));
-
- if (!AddSwapchainDeviceExtension()) {
- printf("%s swapchain extensions not supported, skipping test\n", kSkipPrefix);
- return;
- }
-
- ASSERT_NO_FATAL_FAILURE(InitState());
- ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
-
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_PERFORMANCE_WARNING_BIT_EXT,
- "UNASSIGNED-CoreValidation-SwapchainPreTransform");
- m_errorMonitor->SetUnexpectedError("VUID-VkSwapchainCreateInfoKHR-preTransform-01279");
- InitSwapchain(VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT, VK_SURFACE_TRANSFORM_INHERIT_BIT_KHR);
- m_errorMonitor->VerifyFound();
-}
-
-bool InitFrameworkForRayTracingTest(VkRenderFramework *renderFramework, std::vector<const char *> &instance_extension_names,
- std::vector<const char *> &device_extension_names, void *user_data) {
- const std::array<const char *, 1> required_instance_extensions = {VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME};
- for (const char *required_instance_extension : required_instance_extensions) {
- if (renderFramework->InstanceExtensionSupported(required_instance_extension)) {
- instance_extension_names.push_back(required_instance_extension);
- } else {
- printf("%s %s instance extension not supported, skipping test\n", kSkipPrefix, required_instance_extension);
- return false;
- }
- }
- renderFramework->InitFramework(myDbgFunc, user_data);
-
- if (renderFramework->DeviceIsMockICD() || renderFramework->DeviceSimulation()) {
- printf("%s Test not supported by MockICD, skipping tests\n", kSkipPrefix);
- return false;
- }
-
- const std::array<const char *, 2> required_device_extensions = {
- VK_KHR_GET_MEMORY_REQUIREMENTS_2_EXTENSION_NAME,
- VK_NV_RAY_TRACING_EXTENSION_NAME,
- };
- for (const char *required_device_extension : required_device_extensions) {
- if (renderFramework->DeviceExtensionSupported(renderFramework->gpu(), nullptr, required_device_extension)) {
- device_extension_names.push_back(required_device_extension);
- } else {
- printf("%s %s device extension not supported, skipping test\n", kSkipPrefix, required_device_extension);
- return false;
- }
- }
- renderFramework->InitState();
- return true;
-}
-
-TEST_F(VkLayerTest, ValidateGeometryNV) {
- TEST_DESCRIPTION("Validate acceleration structure geometries.");
- if (!InitFrameworkForRayTracingTest(this, m_instance_extension_names, m_device_extension_names, m_errorMonitor)) {
- return;
- }
-
- VkBufferObj vbo;
- vbo.init(*m_device, 1024, VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT | VK_MEMORY_PROPERTY_HOST_COHERENT_BIT,
- VK_BUFFER_USAGE_RAY_TRACING_BIT_NV);
-
- VkBufferObj ibo;
- ibo.init(*m_device, 1024, VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT | VK_MEMORY_PROPERTY_HOST_COHERENT_BIT,
- VK_BUFFER_USAGE_RAY_TRACING_BIT_NV);
-
- VkBufferObj tbo;
- tbo.init(*m_device, 1024, VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT | VK_MEMORY_PROPERTY_HOST_COHERENT_BIT,
- VK_BUFFER_USAGE_RAY_TRACING_BIT_NV);
-
- VkBufferObj aabbbo;
- aabbbo.init(*m_device, 1024, VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT | VK_MEMORY_PROPERTY_HOST_COHERENT_BIT,
- VK_BUFFER_USAGE_RAY_TRACING_BIT_NV);
-
- VkBufferCreateInfo unbound_buffer_ci = {};
- unbound_buffer_ci.sType = VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO;
- unbound_buffer_ci.size = 1024;
- unbound_buffer_ci.usage = VK_BUFFER_USAGE_RAY_TRACING_BIT_NV;
- VkBufferObj unbound_buffer;
- unbound_buffer.init_no_mem(*m_device, unbound_buffer_ci);
-
- const std::vector<float> vertices = {1.0f, 0.0f, 0.0f, 0.0f, 1.0f, 0.0f, -1.0f, 0.0f, 0.0f};
- const std::vector<uint32_t> indicies = {0, 1, 2};
- const std::vector<float> aabbs = {0.0f, 0.0f, 0.0f, 1.0f, 1.0f, 1.0f};
- const std::vector<float> transforms = {1.0f, 0.0f, 0.0f, 0.0f, 0.0f, 1.0f, 0.0f, 0.0f, 0.0f, 0.0f, 1.0f, 0.0f};
-
- uint8_t *mapped_vbo_buffer_data = (uint8_t *)vbo.memory().map();
- std::memcpy(mapped_vbo_buffer_data, (uint8_t *)vertices.data(), sizeof(float) * vertices.size());
- vbo.memory().unmap();
-
- uint8_t *mapped_ibo_buffer_data = (uint8_t *)ibo.memory().map();
- std::memcpy(mapped_ibo_buffer_data, (uint8_t *)indicies.data(), sizeof(uint32_t) * indicies.size());
- ibo.memory().unmap();
-
- uint8_t *mapped_tbo_buffer_data = (uint8_t *)tbo.memory().map();
- std::memcpy(mapped_tbo_buffer_data, (uint8_t *)transforms.data(), sizeof(float) * transforms.size());
- tbo.memory().unmap();
-
- uint8_t *mapped_aabbbo_buffer_data = (uint8_t *)aabbbo.memory().map();
- std::memcpy(mapped_aabbbo_buffer_data, (uint8_t *)aabbs.data(), sizeof(float) * aabbs.size());
- aabbbo.memory().unmap();
-
- VkGeometryNV valid_geometry_triangles = {};
- valid_geometry_triangles.sType = VK_STRUCTURE_TYPE_GEOMETRY_NV;
- valid_geometry_triangles.geometryType = VK_GEOMETRY_TYPE_TRIANGLES_NV;
- valid_geometry_triangles.geometry.triangles.sType = VK_STRUCTURE_TYPE_GEOMETRY_TRIANGLES_NV;
- valid_geometry_triangles.geometry.triangles.vertexData = vbo.handle();
- valid_geometry_triangles.geometry.triangles.vertexOffset = 0;
- valid_geometry_triangles.geometry.triangles.vertexCount = 3;
- valid_geometry_triangles.geometry.triangles.vertexStride = 12;
- valid_geometry_triangles.geometry.triangles.vertexFormat = VK_FORMAT_R32G32B32_SFLOAT;
- valid_geometry_triangles.geometry.triangles.indexData = ibo.handle();
- valid_geometry_triangles.geometry.triangles.indexOffset = 0;
- valid_geometry_triangles.geometry.triangles.indexCount = 3;
- valid_geometry_triangles.geometry.triangles.indexType = VK_INDEX_TYPE_UINT32;
- valid_geometry_triangles.geometry.triangles.transformData = tbo.handle();
- valid_geometry_triangles.geometry.triangles.transformOffset = 0;
- valid_geometry_triangles.geometry.aabbs = {};
- valid_geometry_triangles.geometry.aabbs.sType = VK_STRUCTURE_TYPE_GEOMETRY_AABB_NV;
-
- VkGeometryNV valid_geometry_aabbs = {};
- valid_geometry_aabbs.sType = VK_STRUCTURE_TYPE_GEOMETRY_NV;
- valid_geometry_aabbs.geometryType = VK_GEOMETRY_TYPE_AABBS_NV;
- valid_geometry_aabbs.geometry.triangles = {};
- valid_geometry_aabbs.geometry.triangles.sType = VK_STRUCTURE_TYPE_GEOMETRY_TRIANGLES_NV;
- valid_geometry_aabbs.geometry.aabbs = {};
- valid_geometry_aabbs.geometry.aabbs.sType = VK_STRUCTURE_TYPE_GEOMETRY_AABB_NV;
- valid_geometry_aabbs.geometry.aabbs.aabbData = aabbbo.handle();
- valid_geometry_aabbs.geometry.aabbs.numAABBs = 1;
- valid_geometry_aabbs.geometry.aabbs.offset = 0;
- valid_geometry_aabbs.geometry.aabbs.stride = 24;
-
- PFN_vkCreateAccelerationStructureNV vkCreateAccelerationStructureNV = reinterpret_cast<PFN_vkCreateAccelerationStructureNV>(
- vkGetDeviceProcAddr(m_device->handle(), "vkCreateAccelerationStructureNV"));
- assert(vkCreateAccelerationStructureNV != nullptr);
-
- const auto GetCreateInfo = [](const VkGeometryNV &geometry) {
- VkAccelerationStructureCreateInfoNV as_create_info = {};
- as_create_info.sType = VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_CREATE_INFO_NV;
- as_create_info.info.sType = VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_INFO_NV;
- as_create_info.info.type = VK_ACCELERATION_STRUCTURE_TYPE_BOTTOM_LEVEL_NV;
- as_create_info.info.instanceCount = 0;
- as_create_info.info.geometryCount = 1;
- as_create_info.info.pGeometries = &geometry;
- return as_create_info;
- };
-
- VkAccelerationStructureNV as;
-
- // Invalid vertex format.
- {
- VkGeometryNV geometry = valid_geometry_triangles;
- geometry.geometry.triangles.vertexFormat = VK_FORMAT_R64_UINT;
-
- VkAccelerationStructureCreateInfoNV as_create_info = GetCreateInfo(geometry);
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkGeometryTrianglesNV-vertexFormat-02430");
- vkCreateAccelerationStructureNV(m_device->handle(), &as_create_info, nullptr, &as);
- m_errorMonitor->VerifyFound();
- }
- // Invalid vertex offset - not multiple of component size.
- {
- VkGeometryNV geometry = valid_geometry_triangles;
- geometry.geometry.triangles.vertexOffset = 1;
-
- VkAccelerationStructureCreateInfoNV as_create_info = GetCreateInfo(geometry);
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkGeometryTrianglesNV-vertexOffset-02429");
- vkCreateAccelerationStructureNV(m_device->handle(), &as_create_info, nullptr, &as);
- m_errorMonitor->VerifyFound();
- }
- // Invalid vertex offset - bigger than buffer.
- {
- VkGeometryNV geometry = valid_geometry_triangles;
- geometry.geometry.triangles.vertexOffset = 12 * 1024;
-
- VkAccelerationStructureCreateInfoNV as_create_info = GetCreateInfo(geometry);
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkGeometryTrianglesNV-vertexOffset-02428");
- vkCreateAccelerationStructureNV(m_device->handle(), &as_create_info, nullptr, &as);
- m_errorMonitor->VerifyFound();
- }
- // Invalid vertex buffer - no such buffer.
- {
- VkGeometryNV geometry = valid_geometry_triangles;
- geometry.geometry.triangles.vertexData = VkBuffer(123456789);
-
- VkAccelerationStructureCreateInfoNV as_create_info = GetCreateInfo(geometry);
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkGeometryTrianglesNV-vertexData-parameter");
- vkCreateAccelerationStructureNV(m_device->handle(), &as_create_info, nullptr, &as);
- m_errorMonitor->VerifyFound();
- }
- // Invalid vertex buffer - no memory bound.
- {
- VkGeometryNV geometry = valid_geometry_triangles;
- geometry.geometry.triangles.vertexData = unbound_buffer.handle();
-
- VkAccelerationStructureCreateInfoNV as_create_info = GetCreateInfo(geometry);
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkGeometryTrianglesNV-vertexOffset-02428");
- vkCreateAccelerationStructureNV(m_device->handle(), &as_create_info, nullptr, &as);
- m_errorMonitor->VerifyFound();
- }
-
- // Invalid index offset - not multiple of index size.
- {
- VkGeometryNV geometry = valid_geometry_triangles;
- geometry.geometry.triangles.indexOffset = 1;
-
- VkAccelerationStructureCreateInfoNV as_create_info = GetCreateInfo(geometry);
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkGeometryTrianglesNV-indexOffset-02432");
- vkCreateAccelerationStructureNV(m_device->handle(), &as_create_info, nullptr, &as);
- m_errorMonitor->VerifyFound();
- }
- // Invalid index offset - bigger than buffer.
- {
- VkGeometryNV geometry = valid_geometry_triangles;
- geometry.geometry.triangles.indexOffset = 2048;
-
- VkAccelerationStructureCreateInfoNV as_create_info = GetCreateInfo(geometry);
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkGeometryTrianglesNV-indexOffset-02431");
- vkCreateAccelerationStructureNV(m_device->handle(), &as_create_info, nullptr, &as);
- m_errorMonitor->VerifyFound();
- }
- // Invalid index count - must be 0 if type is VK_INDEX_TYPE_NONE_NV.
- {
- VkGeometryNV geometry = valid_geometry_triangles;
- geometry.geometry.triangles.indexType = VK_INDEX_TYPE_NONE_NV;
- geometry.geometry.triangles.indexData = VK_NULL_HANDLE;
- geometry.geometry.triangles.indexCount = 1;
-
- VkAccelerationStructureCreateInfoNV as_create_info = GetCreateInfo(geometry);
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkGeometryTrianglesNV-indexCount-02436");
- vkCreateAccelerationStructureNV(m_device->handle(), &as_create_info, nullptr, &as);
- m_errorMonitor->VerifyFound();
- }
- // Invalid index data - must be VK_NULL_HANDLE if type is VK_INDEX_TYPE_NONE_NV.
- {
- VkGeometryNV geometry = valid_geometry_triangles;
- geometry.geometry.triangles.indexType = VK_INDEX_TYPE_NONE_NV;
- geometry.geometry.triangles.indexData = ibo.handle();
- geometry.geometry.triangles.indexCount = 0;
-
- VkAccelerationStructureCreateInfoNV as_create_info = GetCreateInfo(geometry);
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkGeometryTrianglesNV-indexData-02434");
- vkCreateAccelerationStructureNV(m_device->handle(), &as_create_info, nullptr, &as);
- m_errorMonitor->VerifyFound();
- }
-
- // Invalid transform offset - not multiple of 16.
- {
- VkGeometryNV geometry = valid_geometry_triangles;
- geometry.geometry.triangles.transformOffset = 1;
-
- VkAccelerationStructureCreateInfoNV as_create_info = GetCreateInfo(geometry);
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkGeometryTrianglesNV-transformOffset-02438");
- vkCreateAccelerationStructureNV(m_device->handle(), &as_create_info, nullptr, &as);
- m_errorMonitor->VerifyFound();
- }
- // Invalid transform offset - bigger than buffer.
- {
- VkGeometryNV geometry = valid_geometry_triangles;
- geometry.geometry.triangles.transformOffset = 2048;
-
- VkAccelerationStructureCreateInfoNV as_create_info = GetCreateInfo(geometry);
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkGeometryTrianglesNV-transformOffset-02437");
- vkCreateAccelerationStructureNV(m_device->handle(), &as_create_info, nullptr, &as);
- m_errorMonitor->VerifyFound();
- }
-
- // Invalid aabb offset - not multiple of 8.
- {
- VkGeometryNV geometry = valid_geometry_aabbs;
- geometry.geometry.aabbs.offset = 1;
-
- VkAccelerationStructureCreateInfoNV as_create_info = GetCreateInfo(geometry);
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkGeometryAABBNV-offset-02440");
- vkCreateAccelerationStructureNV(m_device->handle(), &as_create_info, nullptr, &as);
- m_errorMonitor->VerifyFound();
- }
- // Invalid aabb offset - bigger than buffer.
- {
- VkGeometryNV geometry = valid_geometry_aabbs;
- geometry.geometry.aabbs.offset = 8 * 1024;
-
- VkAccelerationStructureCreateInfoNV as_create_info = GetCreateInfo(geometry);
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkGeometryAABBNV-offset-02439");
- vkCreateAccelerationStructureNV(m_device->handle(), &as_create_info, nullptr, &as);
- m_errorMonitor->VerifyFound();
- }
- // Invalid aabb stride - not multiple of 8.
- {
- VkGeometryNV geometry = valid_geometry_aabbs;
- geometry.geometry.aabbs.stride = 1;
-
- VkAccelerationStructureCreateInfoNV as_create_info = GetCreateInfo(geometry);
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkGeometryAABBNV-stride-02441");
- vkCreateAccelerationStructureNV(m_device->handle(), &as_create_info, nullptr, &as);
- m_errorMonitor->VerifyFound();
- }
-}
-
-void GetSimpleGeometryForAccelerationStructureTests(const VkDeviceObj &device, VkBufferObj *vbo, VkBufferObj *ibo,
- VkGeometryNV *geometry) {
- vbo->init(device, 1024);
- ibo->init(device, 1024);
-
- *geometry = {};
- geometry->sType = VK_STRUCTURE_TYPE_GEOMETRY_NV;
- geometry->geometryType = VK_GEOMETRY_TYPE_TRIANGLES_NV;
- geometry->geometry.triangles.sType = VK_STRUCTURE_TYPE_GEOMETRY_TRIANGLES_NV;
- geometry->geometry.triangles.vertexData = vbo->handle();
- geometry->geometry.triangles.vertexOffset = 0;
- geometry->geometry.triangles.vertexCount = 3;
- geometry->geometry.triangles.vertexStride = 12;
- geometry->geometry.triangles.vertexFormat = VK_FORMAT_R32G32B32_SFLOAT;
- geometry->geometry.triangles.indexData = ibo->handle();
- geometry->geometry.triangles.indexOffset = 0;
- geometry->geometry.triangles.indexCount = 3;
- geometry->geometry.triangles.indexType = VK_INDEX_TYPE_UINT32;
- geometry->geometry.triangles.transformData = VK_NULL_HANDLE;
- geometry->geometry.triangles.transformOffset = 0;
- geometry->geometry.aabbs = {};
- geometry->geometry.aabbs.sType = VK_STRUCTURE_TYPE_GEOMETRY_AABB_NV;
-}
-
-TEST_F(VkLayerTest, ValidateCreateAccelerationStructureNV) {
- TEST_DESCRIPTION("Validate acceleration structure creation.");
- if (!InitFrameworkForRayTracingTest(this, m_instance_extension_names, m_device_extension_names, m_errorMonitor)) {
- return;
- }
-
- PFN_vkCreateAccelerationStructureNV vkCreateAccelerationStructureNV = reinterpret_cast<PFN_vkCreateAccelerationStructureNV>(
- vkGetDeviceProcAddr(m_device->handle(), "vkCreateAccelerationStructureNV"));
- assert(vkCreateAccelerationStructureNV != nullptr);
-
- VkBufferObj vbo;
- VkBufferObj ibo;
- VkGeometryNV geometry;
- GetSimpleGeometryForAccelerationStructureTests(*m_device, &vbo, &ibo, &geometry);
-
- VkAccelerationStructureCreateInfoNV as_create_info = {};
- as_create_info.sType = VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_CREATE_INFO_NV;
- as_create_info.info.sType = VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_INFO_NV;
-
- VkAccelerationStructureNV as = VK_NULL_HANDLE;
-
- // Top level can not have geometry
- {
- VkAccelerationStructureCreateInfoNV bad_top_level_create_info = as_create_info;
- bad_top_level_create_info.info.type = VK_ACCELERATION_STRUCTURE_TYPE_TOP_LEVEL_NV;
- bad_top_level_create_info.info.instanceCount = 0;
- bad_top_level_create_info.info.geometryCount = 1;
- bad_top_level_create_info.info.pGeometries = &geometry;
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkAccelerationStructureInfoNV-type-02425");
- vkCreateAccelerationStructureNV(m_device->handle(), &bad_top_level_create_info, nullptr, &as);
- m_errorMonitor->VerifyFound();
- }
-
- // Bot level can not have instances
- {
- VkAccelerationStructureCreateInfoNV bad_bot_level_create_info = as_create_info;
- bad_bot_level_create_info.info.type = VK_ACCELERATION_STRUCTURE_TYPE_BOTTOM_LEVEL_NV;
- bad_bot_level_create_info.info.instanceCount = 1;
- bad_bot_level_create_info.info.geometryCount = 0;
- bad_bot_level_create_info.info.pGeometries = nullptr;
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkAccelerationStructureInfoNV-type-02426");
- vkCreateAccelerationStructureNV(m_device->handle(), &bad_bot_level_create_info, nullptr, &as);
- m_errorMonitor->VerifyFound();
- }
-
- // Can not prefer both fast trace and fast build
- {
- VkAccelerationStructureCreateInfoNV bad_flags_level_create_info = as_create_info;
- bad_flags_level_create_info.info.type = VK_ACCELERATION_STRUCTURE_TYPE_BOTTOM_LEVEL_NV;
- bad_flags_level_create_info.info.instanceCount = 0;
- bad_flags_level_create_info.info.geometryCount = 1;
- bad_flags_level_create_info.info.pGeometries = &geometry;
- bad_flags_level_create_info.info.flags =
- VK_BUILD_ACCELERATION_STRUCTURE_PREFER_FAST_TRACE_BIT_NV | VK_BUILD_ACCELERATION_STRUCTURE_PREFER_FAST_BUILD_BIT_NV;
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkAccelerationStructureInfoNV-flags-02592");
- vkCreateAccelerationStructureNV(m_device->handle(), &bad_flags_level_create_info, nullptr, &as);
- m_errorMonitor->VerifyFound();
- }
-
- // Can not have geometry or instance for compacting
- {
- VkAccelerationStructureCreateInfoNV bad_compacting_as_create_info = as_create_info;
- bad_compacting_as_create_info.info.type = VK_ACCELERATION_STRUCTURE_TYPE_BOTTOM_LEVEL_NV;
- bad_compacting_as_create_info.info.instanceCount = 0;
- bad_compacting_as_create_info.info.geometryCount = 1;
- bad_compacting_as_create_info.info.pGeometries = &geometry;
- bad_compacting_as_create_info.info.flags = 0;
- bad_compacting_as_create_info.compactedSize = 1024;
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
- "VUID-VkAccelerationStructureCreateInfoNV-compactedSize-02421");
- vkCreateAccelerationStructureNV(m_device->handle(), &bad_compacting_as_create_info, nullptr, &as);
- m_errorMonitor->VerifyFound();
- }
-
- // Can not mix different geometry types into single bottom level acceleration structure
- {
- VkGeometryNV aabb_geometry = {};
- aabb_geometry.sType = VK_STRUCTURE_TYPE_GEOMETRY_NV;
- aabb_geometry.geometryType = VK_GEOMETRY_TYPE_AABBS_NV;
- aabb_geometry.geometry.triangles.sType = VK_STRUCTURE_TYPE_GEOMETRY_TRIANGLES_NV;
- aabb_geometry.geometry.aabbs = {};
- aabb_geometry.geometry.aabbs.sType = VK_STRUCTURE_TYPE_GEOMETRY_AABB_NV;
- // Buffer contents do not matter for this test.
- aabb_geometry.geometry.aabbs.aabbData = geometry.geometry.triangles.vertexData;
- aabb_geometry.geometry.aabbs.numAABBs = 1;
- aabb_geometry.geometry.aabbs.offset = 0;
- aabb_geometry.geometry.aabbs.stride = 24;
-
- std::vector<VkGeometryNV> geometries = {geometry, aabb_geometry};
-
- VkAccelerationStructureCreateInfoNV mix_geometry_types_as_create_info = as_create_info;
- mix_geometry_types_as_create_info.info.type = VK_ACCELERATION_STRUCTURE_TYPE_BOTTOM_LEVEL_NV;
- mix_geometry_types_as_create_info.info.instanceCount = 0;
- mix_geometry_types_as_create_info.info.geometryCount = static_cast<uint32_t>(geometries.size());
- mix_geometry_types_as_create_info.info.pGeometries = geometries.data();
- mix_geometry_types_as_create_info.info.flags = 0;
-
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
- "UNASSIGNED-VkAccelerationStructureInfoNV-pGeometries-XXXX");
- vkCreateAccelerationStructureNV(m_device->handle(), &mix_geometry_types_as_create_info, nullptr, &as);
- m_errorMonitor->VerifyFound();
- }
-}
-
-TEST_F(VkLayerTest, ValidateBindAccelerationStructureNV) {
- TEST_DESCRIPTION("Validate acceleration structure binding.");
- if (!InitFrameworkForRayTracingTest(this, m_instance_extension_names, m_device_extension_names, m_errorMonitor)) {
- return;
- }
-
- PFN_vkBindAccelerationStructureMemoryNV vkBindAccelerationStructureMemoryNV =
- reinterpret_cast<PFN_vkBindAccelerationStructureMemoryNV>(
- vkGetDeviceProcAddr(m_device->handle(), "vkBindAccelerationStructureMemoryNV"));
- assert(vkBindAccelerationStructureMemoryNV != nullptr);
-
- VkBufferObj vbo;
- VkBufferObj ibo;
- VkGeometryNV geometry;
- GetSimpleGeometryForAccelerationStructureTests(*m_device, &vbo, &ibo, &geometry);
-
- VkAccelerationStructureCreateInfoNV as_create_info = {};
- as_create_info.sType = VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_CREATE_INFO_NV;
- as_create_info.info.sType = VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_INFO_NV;
- as_create_info.info.type = VK_ACCELERATION_STRUCTURE_TYPE_BOTTOM_LEVEL_NV;
- as_create_info.info.geometryCount = 1;
- as_create_info.info.pGeometries = &geometry;
- as_create_info.info.instanceCount = 0;
-
- VkAccelerationStructureObj as(*m_device, as_create_info, false);
- m_errorMonitor->VerifyNotFound();
-
- VkMemoryRequirements as_memory_requirements = as.memory_requirements().memoryRequirements;
-
- VkBindAccelerationStructureMemoryInfoNV as_bind_info = {};
- as_bind_info.sType = VK_STRUCTURE_TYPE_BIND_ACCELERATION_STRUCTURE_MEMORY_INFO_NV;
- as_bind_info.accelerationStructure = as.handle();
-
- VkMemoryAllocateInfo as_memory_alloc = {};
- as_memory_alloc.sType = VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO;
- as_memory_alloc.allocationSize = as_memory_requirements.size;
- ASSERT_TRUE(m_device->phy().set_memory_type(as_memory_requirements.memoryTypeBits, &as_memory_alloc, 0));
-
- // Can not bind already freed memory
- {
- VkDeviceMemory as_memory_freed = VK_NULL_HANDLE;
- ASSERT_VK_SUCCESS(vkAllocateMemory(device(), &as_memory_alloc, NULL, &as_memory_freed));
- vkFreeMemory(device(), as_memory_freed, NULL);
-
- VkBindAccelerationStructureMemoryInfoNV as_bind_info_freed = as_bind_info;
- as_bind_info_freed.memory = as_memory_freed;
- as_bind_info_freed.memoryOffset = 0;
-
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
- "VUID-VkBindAccelerationStructureMemoryInfoNV-memory-parameter");
- (void)vkBindAccelerationStructureMemoryNV(device(), 1, &as_bind_info_freed);
- m_errorMonitor->VerifyFound();
- }
-
- // Can not bind with bad alignment
- if (as_memory_requirements.alignment > 1) {
- VkMemoryAllocateInfo as_memory_alloc_bad_alignment = as_memory_alloc;
- as_memory_alloc_bad_alignment.allocationSize += 1;
-
- VkDeviceMemory as_memory_bad_alignment = VK_NULL_HANDLE;
- ASSERT_VK_SUCCESS(vkAllocateMemory(device(), &as_memory_alloc_bad_alignment, NULL, &as_memory_bad_alignment));
-
- VkBindAccelerationStructureMemoryInfoNV as_bind_info_bad_alignment = as_bind_info;
- as_bind_info_bad_alignment.memory = as_memory_bad_alignment;
- as_bind_info_bad_alignment.memoryOffset = 1;
-
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
- "VUID-VkBindAccelerationStructureMemoryInfoNV-memoryOffset-02594");
- (void)vkBindAccelerationStructureMemoryNV(device(), 1, &as_bind_info_bad_alignment);
- m_errorMonitor->VerifyFound();
-
- vkFreeMemory(device(), as_memory_bad_alignment, NULL);
- }
-
- // Can not bind with offset outside the allocation
- {
- VkDeviceMemory as_memory_bad_offset = VK_NULL_HANDLE;
- ASSERT_VK_SUCCESS(vkAllocateMemory(device(), &as_memory_alloc, NULL, &as_memory_bad_offset));
-
- VkBindAccelerationStructureMemoryInfoNV as_bind_info_bad_offset = as_bind_info;
- as_bind_info_bad_offset.memory = as_memory_bad_offset;
- as_bind_info_bad_offset.memoryOffset =
- (as_memory_alloc.allocationSize + as_memory_requirements.alignment) & ~(as_memory_requirements.alignment - 1);
-
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
- "VUID-VkBindAccelerationStructureMemoryInfoNV-memoryOffset-02451");
- (void)vkBindAccelerationStructureMemoryNV(device(), 1, &as_bind_info_bad_offset);
- m_errorMonitor->VerifyFound();
-
- vkFreeMemory(device(), as_memory_bad_offset, NULL);
- }
-
- // Can not bind with offset that doesn't leave enough size
- {
- VkDeviceSize offset = (as_memory_requirements.size - 1) & ~(as_memory_requirements.alignment - 1);
- if (offset > 0 && (as_memory_requirements.size < (as_memory_alloc.allocationSize - as_memory_requirements.alignment))) {
- VkDeviceMemory as_memory_bad_offset = VK_NULL_HANDLE;
- ASSERT_VK_SUCCESS(vkAllocateMemory(device(), &as_memory_alloc, NULL, &as_memory_bad_offset));
-
- VkBindAccelerationStructureMemoryInfoNV as_bind_info_bad_offset = as_bind_info;
- as_bind_info_bad_offset.memory = as_memory_bad_offset;
- as_bind_info_bad_offset.memoryOffset = offset;
-
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
- "VUID-VkBindAccelerationStructureMemoryInfoNV-size-02595");
- (void)vkBindAccelerationStructureMemoryNV(device(), 1, &as_bind_info_bad_offset);
- m_errorMonitor->VerifyFound();
-
- vkFreeMemory(device(), as_memory_bad_offset, NULL);
- }
- }
-
- // Can not bind with memory that has unsupported memory type
- {
- VkPhysicalDeviceMemoryProperties memory_properties = {};
- vkGetPhysicalDeviceMemoryProperties(m_device->phy().handle(), &memory_properties);
-
- uint32_t supported_memory_type_bits = as_memory_requirements.memoryTypeBits;
- uint32_t unsupported_mem_type_bits = ((1 << memory_properties.memoryTypeCount) - 1) & ~supported_memory_type_bits;
- if (unsupported_mem_type_bits != 0) {
- VkMemoryAllocateInfo as_memory_alloc_bad_type = as_memory_alloc;
- ASSERT_TRUE(m_device->phy().set_memory_type(unsupported_mem_type_bits, &as_memory_alloc_bad_type, 0));
-
- VkDeviceMemory as_memory_bad_type = VK_NULL_HANDLE;
- ASSERT_VK_SUCCESS(vkAllocateMemory(device(), &as_memory_alloc_bad_type, NULL, &as_memory_bad_type));
-
- VkBindAccelerationStructureMemoryInfoNV as_bind_info_bad_type = as_bind_info;
- as_bind_info_bad_type.memory = as_memory_bad_type;
-
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
- "VUID-VkBindAccelerationStructureMemoryInfoNV-memory-02593");
- (void)vkBindAccelerationStructureMemoryNV(device(), 1, &as_bind_info_bad_type);
- m_errorMonitor->VerifyFound();
-
- vkFreeMemory(device(), as_memory_bad_type, NULL);
- }
- }
-
- // Can not bind memory twice
- {
- VkAccelerationStructureObj as_twice(*m_device, as_create_info, false);
-
- VkDeviceMemory as_memory_twice_1 = VK_NULL_HANDLE;
- VkDeviceMemory as_memory_twice_2 = VK_NULL_HANDLE;
- ASSERT_VK_SUCCESS(vkAllocateMemory(device(), &as_memory_alloc, NULL, &as_memory_twice_1));
- ASSERT_VK_SUCCESS(vkAllocateMemory(device(), &as_memory_alloc, NULL, &as_memory_twice_2));
- VkBindAccelerationStructureMemoryInfoNV as_bind_info_twice_1 = as_bind_info;
- VkBindAccelerationStructureMemoryInfoNV as_bind_info_twice_2 = as_bind_info;
- as_bind_info_twice_1.accelerationStructure = as_twice.handle();
- as_bind_info_twice_2.accelerationStructure = as_twice.handle();
- as_bind_info_twice_1.memory = as_memory_twice_1;
- as_bind_info_twice_2.memory = as_memory_twice_2;
-
- ASSERT_VK_SUCCESS(vkBindAccelerationStructureMemoryNV(device(), 1, &as_bind_info_twice_1));
- m_errorMonitor->VerifyNotFound();
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
- "VUID-VkBindAccelerationStructureMemoryInfoNV-accelerationStructure-02450");
- (void)vkBindAccelerationStructureMemoryNV(device(), 1, &as_bind_info_twice_2);
- m_errorMonitor->VerifyFound();
-
- vkFreeMemory(device(), as_memory_twice_1, NULL);
- vkFreeMemory(device(), as_memory_twice_2, NULL);
- }
-}
-
-TEST_F(VkLayerTest, ValidateCmdBuildAccelerationStructureNV) {
- TEST_DESCRIPTION("Validate acceleration structure building.");
- if (!InitFrameworkForRayTracingTest(this, m_instance_extension_names, m_device_extension_names, m_errorMonitor)) {
- return;
- }
-
- PFN_vkCmdBuildAccelerationStructureNV vkCmdBuildAccelerationStructureNV =
- reinterpret_cast<PFN_vkCmdBuildAccelerationStructureNV>(
- vkGetDeviceProcAddr(m_device->handle(), "vkCmdBuildAccelerationStructureNV"));
- assert(vkCmdBuildAccelerationStructureNV != nullptr);
-
- VkBufferObj vbo;
- VkBufferObj ibo;
- VkGeometryNV geometry;
- GetSimpleGeometryForAccelerationStructureTests(*m_device, &vbo, &ibo, &geometry);
-
- VkAccelerationStructureCreateInfoNV bot_level_as_create_info = {};
- bot_level_as_create_info.sType = VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_CREATE_INFO_NV;
- bot_level_as_create_info.info.sType = VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_INFO_NV;
- bot_level_as_create_info.info.type = VK_ACCELERATION_STRUCTURE_TYPE_BOTTOM_LEVEL_NV;
- bot_level_as_create_info.info.instanceCount = 0;
- bot_level_as_create_info.info.geometryCount = 1;
- bot_level_as_create_info.info.pGeometries = &geometry;
-
- VkAccelerationStructureObj bot_level_as(*m_device, bot_level_as_create_info);
- m_errorMonitor->VerifyNotFound();
-
- VkBufferObj bot_level_as_scratch;
- bot_level_as.create_scratch_buffer(*m_device, &bot_level_as_scratch);
-
- // Command buffer must be in recording state
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
- "VUID-vkCmdBuildAccelerationStructureNV-commandBuffer-recording");
- vkCmdBuildAccelerationStructureNV(m_commandBuffer->handle(), &bot_level_as_create_info.info, VK_NULL_HANDLE, 0, VK_FALSE,
- bot_level_as.handle(), VK_NULL_HANDLE, bot_level_as_scratch.handle(), 0);
- m_errorMonitor->VerifyFound();
-
- m_commandBuffer->begin();
-
- // Incompatible type
- VkAccelerationStructureInfoNV as_build_info_with_incompatible_type = bot_level_as_create_info.info;
- as_build_info_with_incompatible_type.type = VK_ACCELERATION_STRUCTURE_TYPE_TOP_LEVEL_NV;
- as_build_info_with_incompatible_type.instanceCount = 1;
- as_build_info_with_incompatible_type.geometryCount = 0;
-
- // This is duplicated since it triggers one error for different types and one error for lower instance count - the
- // build info is incompatible but still needs to be valid to get past the stateless checks.
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdBuildAccelerationStructureNV-dst-02488");
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdBuildAccelerationStructureNV-dst-02488");
- vkCmdBuildAccelerationStructureNV(m_commandBuffer->handle(), &as_build_info_with_incompatible_type, VK_NULL_HANDLE, 0, VK_FALSE,
- bot_level_as.handle(), VK_NULL_HANDLE, bot_level_as_scratch.handle(), 0);
- m_errorMonitor->VerifyFound();
-
- // Incompatible flags
- VkAccelerationStructureInfoNV as_build_info_with_incompatible_flags = bot_level_as_create_info.info;
- as_build_info_with_incompatible_flags.flags = VK_BUILD_ACCELERATION_STRUCTURE_LOW_MEMORY_BIT_NV;
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdBuildAccelerationStructureNV-dst-02488");
- vkCmdBuildAccelerationStructureNV(m_commandBuffer->handle(), &as_build_info_with_incompatible_flags, VK_NULL_HANDLE, 0,
- VK_FALSE, bot_level_as.handle(), VK_NULL_HANDLE, bot_level_as_scratch.handle(), 0);
- m_errorMonitor->VerifyFound();
-
- // Incompatible build size
- VkGeometryNV geometry_with_more_vertices = geometry;
- geometry_with_more_vertices.geometry.triangles.vertexCount += 1;
-
- VkAccelerationStructureInfoNV as_build_info_with_incompatible_geometry = bot_level_as_create_info.info;
- as_build_info_with_incompatible_geometry.pGeometries = &geometry_with_more_vertices;
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdBuildAccelerationStructureNV-dst-02488");
- vkCmdBuildAccelerationStructureNV(m_commandBuffer->handle(), &as_build_info_with_incompatible_geometry, VK_NULL_HANDLE, 0,
- VK_FALSE, bot_level_as.handle(), VK_NULL_HANDLE, bot_level_as_scratch.handle(), 0);
- m_errorMonitor->VerifyFound();
-
- // Scratch buffer too small
- VkBufferCreateInfo too_small_scratch_buffer_info = {};
- too_small_scratch_buffer_info.sType = VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO;
- too_small_scratch_buffer_info.usage = VK_BUFFER_USAGE_RAY_TRACING_BIT_NV;
- too_small_scratch_buffer_info.size = 1;
- VkBufferObj too_small_scratch_buffer(*m_device, too_small_scratch_buffer_info);
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdBuildAccelerationStructureNV-update-02491");
- vkCmdBuildAccelerationStructureNV(m_commandBuffer->handle(), &bot_level_as_create_info.info, VK_NULL_HANDLE, 0, VK_FALSE,
- bot_level_as.handle(), VK_NULL_HANDLE, too_small_scratch_buffer.handle(), 0);
- m_errorMonitor->VerifyFound();
-
- // Scratch buffer with offset too small
- VkDeviceSize scratch_buffer_offset = 5;
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdBuildAccelerationStructureNV-update-02491");
- vkCmdBuildAccelerationStructureNV(m_commandBuffer->handle(), &bot_level_as_create_info.info, VK_NULL_HANDLE, 0, VK_FALSE,
- bot_level_as.handle(), VK_NULL_HANDLE, bot_level_as_scratch.handle(), scratch_buffer_offset);
- m_errorMonitor->VerifyFound();
-
- // Src must have been built before
- VkAccelerationStructureObj bot_level_as_updated(*m_device, bot_level_as_create_info);
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdBuildAccelerationStructureNV-update-02489");
- vkCmdBuildAccelerationStructureNV(m_commandBuffer->handle(), &bot_level_as_create_info.info, VK_NULL_HANDLE, 0, VK_TRUE,
- bot_level_as_updated.handle(), bot_level_as.handle(), bot_level_as_scratch.handle(), 0);
- m_errorMonitor->VerifyFound();
-
- // Src must have been built before with the VK_BUILD_ACCELERATION_STRUCTURE_ALLOW_UPDATE_BIT_NV flag
- vkCmdBuildAccelerationStructureNV(m_commandBuffer->handle(), &bot_level_as_create_info.info, VK_NULL_HANDLE, 0, VK_FALSE,
- bot_level_as.handle(), VK_NULL_HANDLE, bot_level_as_scratch.handle(), 0);
- m_errorMonitor->VerifyNotFound();
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdBuildAccelerationStructureNV-update-02489");
- vkCmdBuildAccelerationStructureNV(m_commandBuffer->handle(), &bot_level_as_create_info.info, VK_NULL_HANDLE, 0, VK_TRUE,
- bot_level_as_updated.handle(), bot_level_as.handle(), bot_level_as_scratch.handle(), 0);
- m_errorMonitor->VerifyFound();
-}
-
-TEST_F(VkLayerTest, ValidateGetAccelerationStructureHandleNV) {
- TEST_DESCRIPTION("Validate acceleration structure handle querying.");
- if (!InitFrameworkForRayTracingTest(this, m_instance_extension_names, m_device_extension_names, m_errorMonitor)) {
- return;
- }
-
- PFN_vkGetAccelerationStructureHandleNV vkGetAccelerationStructureHandleNV =
- reinterpret_cast<PFN_vkGetAccelerationStructureHandleNV>(
- vkGetDeviceProcAddr(m_device->handle(), "vkGetAccelerationStructureHandleNV"));
- assert(vkGetAccelerationStructureHandleNV != nullptr);
-
- VkBufferObj vbo;
- VkBufferObj ibo;
- VkGeometryNV geometry;
- GetSimpleGeometryForAccelerationStructureTests(*m_device, &vbo, &ibo, &geometry);
-
- VkAccelerationStructureCreateInfoNV bot_level_as_create_info = {};
- bot_level_as_create_info.sType = VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_CREATE_INFO_NV;
- bot_level_as_create_info.info.sType = VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_INFO_NV;
- bot_level_as_create_info.info.type = VK_ACCELERATION_STRUCTURE_TYPE_BOTTOM_LEVEL_NV;
- bot_level_as_create_info.info.instanceCount = 0;
- bot_level_as_create_info.info.geometryCount = 1;
- bot_level_as_create_info.info.pGeometries = &geometry;
-
- // Not enough space for the handle
- {
- VkAccelerationStructureObj bot_level_as(*m_device, bot_level_as_create_info);
- m_errorMonitor->VerifyNotFound();
-
- uint64_t handle = 0;
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
- "VUID-vkGetAccelerationStructureHandleNV-dataSize-02240");
- vkGetAccelerationStructureHandleNV(m_device->handle(), bot_level_as.handle(), sizeof(uint8_t), &handle);
- m_errorMonitor->VerifyFound();
- }
-
- // No memory bound to acceleration structure
- {
- VkAccelerationStructureObj bot_level_as(*m_device, bot_level_as_create_info, /*init_memory=*/false);
- m_errorMonitor->VerifyNotFound();
-
- uint64_t handle = 0;
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
- "UNASSIGNED-vkGetAccelerationStructureHandleNV-accelerationStructure-XXXX");
- vkGetAccelerationStructureHandleNV(m_device->handle(), bot_level_as.handle(), sizeof(uint64_t), &handle);
- m_errorMonitor->VerifyFound();
- }
-}
-
-TEST_F(VkLayerTest, ValidateCmdCopyAccelerationStructureNV) {
- TEST_DESCRIPTION("Validate acceleration structure copying.");
- if (!InitFrameworkForRayTracingTest(this, m_instance_extension_names, m_device_extension_names, m_errorMonitor)) {
- return;
- }
-
- PFN_vkCmdCopyAccelerationStructureNV vkCmdCopyAccelerationStructureNV = reinterpret_cast<PFN_vkCmdCopyAccelerationStructureNV>(
- vkGetDeviceProcAddr(m_device->handle(), "vkCmdCopyAccelerationStructureNV"));
- assert(vkCmdCopyAccelerationStructureNV != nullptr);
-
- VkBufferObj vbo;
- VkBufferObj ibo;
- VkGeometryNV geometry;
- GetSimpleGeometryForAccelerationStructureTests(*m_device, &vbo, &ibo, &geometry);
-
- VkAccelerationStructureCreateInfoNV as_create_info = {};
- as_create_info.sType = VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_CREATE_INFO_NV;
- as_create_info.info.sType = VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_INFO_NV;
- as_create_info.info.type = VK_ACCELERATION_STRUCTURE_TYPE_BOTTOM_LEVEL_NV;
- as_create_info.info.instanceCount = 0;
- as_create_info.info.geometryCount = 1;
- as_create_info.info.pGeometries = &geometry;
-
- VkAccelerationStructureObj src_as(*m_device, as_create_info);
- VkAccelerationStructureObj dst_as(*m_device, as_create_info);
- VkAccelerationStructureObj dst_as_without_mem(*m_device, as_create_info, false);
- m_errorMonitor->VerifyNotFound();
-
- // Command buffer must be in recording state
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
- "VUID-vkCmdCopyAccelerationStructureNV-commandBuffer-recording");
- vkCmdCopyAccelerationStructureNV(m_commandBuffer->handle(), dst_as.handle(), src_as.handle(),
- VK_COPY_ACCELERATION_STRUCTURE_MODE_CLONE_NV);
- m_errorMonitor->VerifyFound();
-
- m_commandBuffer->begin();
-
- // Src must have been created with allow compaction flag
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdCopyAccelerationStructureNV-src-02497");
- vkCmdCopyAccelerationStructureNV(m_commandBuffer->handle(), dst_as.handle(), src_as.handle(),
- VK_COPY_ACCELERATION_STRUCTURE_MODE_COMPACT_NV);
- m_errorMonitor->VerifyFound();
-
- // Dst must have been bound with memory
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
- "UNASSIGNED-CoreValidation-DrawState-InvalidCommandBuffer-VkAccelerationStructureNV");
- vkCmdCopyAccelerationStructureNV(m_commandBuffer->handle(), dst_as_without_mem.handle(), src_as.handle(),
- VK_COPY_ACCELERATION_STRUCTURE_MODE_CLONE_NV);
- m_errorMonitor->VerifyFound();
-}
diff --git a/tests/vklayertests_pipeline_shader.cpp b/tests/vklayertests_pipeline_shader.cpp
deleted file mode 100644
index daa6df0f5..000000000
--- a/tests/vklayertests_pipeline_shader.cpp
+++ /dev/null
@@ -1,5754 +0,0 @@
-/*
- * Copyright (c) 2015-2019 The Khronos Group Inc.
- * Copyright (c) 2015-2019 Valve Corporation
- * Copyright (c) 2015-2019 LunarG, Inc.
- * Copyright (c) 2015-2019 Google, Inc.
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Author: Chia-I Wu <olvaffe@gmail.com>
- * Author: Chris Forbes <chrisf@ijw.co.nz>
- * Author: Courtney Goeltzenleuchter <courtney@LunarG.com>
- * Author: Mark Lobodzinski <mark@lunarg.com>
- * Author: Mike Stroyan <mike@LunarG.com>
- * Author: Tobin Ehlis <tobine@google.com>
- * Author: Tony Barbour <tony@LunarG.com>
- * Author: Cody Northrop <cnorthrop@google.com>
- * Author: Dave Houlton <daveh@lunarg.com>
- * Author: Jeremy Kniager <jeremyk@lunarg.com>
- * Author: Shannon McPherson <shannon@lunarg.com>
- * Author: John Zulauf <jzulauf@lunarg.com>
- */
-
-#include "cast_utils.h"
-#include "layer_validation_tests.h"
-
-TEST_F(VkLayerTest, PSOPolygonModeInvalid) {
- TEST_DESCRIPTION("Attempt to use invalid polygon fill modes.");
- VkPhysicalDeviceFeatures device_features = {};
- device_features.fillModeNonSolid = VK_FALSE;
- // The sacrificial device object
- ASSERT_NO_FATAL_FAILURE(Init(&device_features));
- ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
-
- VkPipelineRasterizationStateCreateInfo rs_ci = {};
- rs_ci.sType = VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_STATE_CREATE_INFO;
- rs_ci.pNext = nullptr;
- rs_ci.lineWidth = 1.0f;
- rs_ci.rasterizerDiscardEnable = VK_TRUE;
-
- auto set_polygonMode = [&](CreatePipelineHelper &helper) { helper.rs_state_ci_ = rs_ci; };
-
- // Set polygonMode to POINT while the non-solid fill mode feature is disabled.
- // Introduce failure by setting unsupported polygon mode
- rs_ci.polygonMode = VK_POLYGON_MODE_POINT;
- CreatePipelineHelper::OneshotTest(*this, set_polygonMode, VK_DEBUG_REPORT_ERROR_BIT_EXT,
- "polygonMode cannot be VK_POLYGON_MODE_POINT or VK_POLYGON_MODE_LINE");
-
- // Set polygonMode to LINE while the non-solid fill mode feature is disabled.
- // Introduce failure by setting unsupported polygon mode
- rs_ci.polygonMode = VK_POLYGON_MODE_LINE;
- CreatePipelineHelper::OneshotTest(*this, set_polygonMode, VK_DEBUG_REPORT_ERROR_BIT_EXT,
- "polygonMode cannot be VK_POLYGON_MODE_POINT or VK_POLYGON_MODE_LINE");
-
- // Set polygonMode to FILL_RECTANGLE_NV while the extension is not enabled.
- // Introduce failure by setting unsupported polygon mode
- rs_ci.polygonMode = VK_POLYGON_MODE_FILL_RECTANGLE_NV;
- CreatePipelineHelper::OneshotTest(*this, set_polygonMode, VK_DEBUG_REPORT_ERROR_BIT_EXT,
- "VUID-VkPipelineRasterizationStateCreateInfo-polygonMode-01414");
-}
-
-TEST_F(VkLayerTest, PipelineNotBound) {
- TEST_DESCRIPTION("Pass in an invalid pipeline object handle into a Vulkan API call.");
-
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdBindPipeline-pipeline-parameter");
-
- ASSERT_NO_FATAL_FAILURE(Init());
- ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
-
- VkPipeline badPipeline = CastToHandle<VkPipeline, uintptr_t>(0xbaadb1be);
-
- m_commandBuffer->begin();
- vkCmdBindPipeline(m_commandBuffer->handle(), VK_PIPELINE_BIND_POINT_GRAPHICS, badPipeline);
-
- m_errorMonitor->VerifyFound();
-}
-
-TEST_F(VkLayerTest, PipelineWrongBindPointGraphics) {
- TEST_DESCRIPTION("Bind a compute pipeline in the graphics bind point");
-
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdBindPipeline-pipelineBindPoint-00779");
-
- ASSERT_NO_FATAL_FAILURE(Init());
- ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
-
- CreateComputePipelineHelper pipe(*this);
- pipe.InitInfo();
- pipe.InitState();
- pipe.CreateComputePipeline();
-
- m_commandBuffer->begin();
- vkCmdBindPipeline(m_commandBuffer->handle(), VK_PIPELINE_BIND_POINT_GRAPHICS, pipe.pipeline_);
-
- m_errorMonitor->VerifyFound();
-}
-
-TEST_F(VkLayerTest, PipelineWrongBindPointCompute) {
- TEST_DESCRIPTION("Bind a graphics pipeline in the compute bind point");
-
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdBindPipeline-pipelineBindPoint-00780");
-
- ASSERT_NO_FATAL_FAILURE(Init());
- ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
-
- CreatePipelineHelper pipe(*this);
- pipe.InitInfo();
- pipe.InitState();
- pipe.CreateGraphicsPipeline();
-
- m_commandBuffer->begin();
- vkCmdBindPipeline(m_commandBuffer->handle(), VK_PIPELINE_BIND_POINT_COMPUTE, pipe.pipeline_);
-
- m_errorMonitor->VerifyFound();
-}
-
-TEST_F(VkLayerTest, PipelineWrongBindPointRayTracing) {
- TEST_DESCRIPTION("Bind a graphics pipeline in the ray-tracing bind point");
-
- if (InstanceExtensionSupported(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME)) {
- m_instance_extension_names.push_back(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME);
- } else {
- printf("%s Extension %s is not supported.\n", kSkipPrefix, VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME);
- return;
- }
- ASSERT_NO_FATAL_FAILURE(InitFramework(myDbgFunc, m_errorMonitor));
-
- if (DeviceExtensionSupported(gpu(), nullptr, VK_NV_RAY_TRACING_EXTENSION_NAME)) {
- m_device_extension_names.push_back(VK_NV_RAY_TRACING_EXTENSION_NAME);
- m_device_extension_names.push_back(VK_KHR_GET_MEMORY_REQUIREMENTS_2_EXTENSION_NAME);
- } else {
- printf("%s Extension %s is not supported.\n", kSkipPrefix, VK_NV_RAY_TRACING_EXTENSION_NAME);
- return;
- }
- ASSERT_NO_FATAL_FAILURE(InitState());
-
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdBindPipeline-pipelineBindPoint-02392");
-
- ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
-
- if (!EnableDeviceProfileLayer()) {
- printf("%s Failed to enable device profile layer.\n", kSkipPrefix);
- return;
- }
-
- CreatePipelineHelper pipe(*this);
- pipe.InitInfo();
- pipe.InitState();
- pipe.CreateGraphicsPipeline();
-
- m_commandBuffer->begin();
- vkCmdBindPipeline(m_commandBuffer->handle(), VK_PIPELINE_BIND_POINT_RAY_TRACING_NV, pipe.pipeline_);
-
- m_errorMonitor->VerifyFound();
-}
-
-TEST_F(VkLayerTest, CreatePipelineBadVertexAttributeFormat) {
- TEST_DESCRIPTION("Test that pipeline validation catches invalid vertex attribute formats");
-
- ASSERT_NO_FATAL_FAILURE(Init());
- ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
-
- VkVertexInputBindingDescription input_binding;
- memset(&input_binding, 0, sizeof(input_binding));
-
- VkVertexInputAttributeDescription input_attribs;
- memset(&input_attribs, 0, sizeof(input_attribs));
-
- // Pick a really bad format for this purpose and make sure it should fail
- input_attribs.format = VK_FORMAT_BC2_UNORM_BLOCK;
- VkFormatProperties format_props = m_device->format_properties(input_attribs.format);
- if ((format_props.bufferFeatures & VK_FORMAT_FEATURE_VERTEX_BUFFER_BIT) != 0) {
- printf("%s Format unsuitable for test; skipped.\n", kSkipPrefix);
- return;
- }
-
- input_attribs.location = 0;
-
- auto set_info = [&](CreatePipelineHelper &helper) {
- helper.vi_ci_.pVertexBindingDescriptions = &input_binding;
- helper.vi_ci_.vertexBindingDescriptionCount = 1;
- helper.vi_ci_.pVertexAttributeDescriptions = &input_attribs;
- helper.vi_ci_.vertexAttributeDescriptionCount = 1;
- };
- CreatePipelineHelper::OneshotTest(*this, set_info, VK_DEBUG_REPORT_ERROR_BIT_EXT,
- "VUID-VkVertexInputAttributeDescription-format-00623");
-}
-
-TEST_F(VkLayerTest, DisabledIndependentBlend) {
- TEST_DESCRIPTION(
- "Generate INDEPENDENT_BLEND by disabling independent blend and then specifying different blend states for two "
- "attachments");
- VkPhysicalDeviceFeatures features = {};
- features.independentBlend = VK_FALSE;
- ASSERT_NO_FATAL_FAILURE(Init(&features));
-
- m_errorMonitor->SetDesiredFailureMsg(
- VK_DEBUG_REPORT_ERROR_BIT_EXT,
- "Invalid Pipeline CreateInfo: If independent blend feature not enabled, all elements of pAttachments must be identical");
-
- VkDescriptorSetObj descriptorSet(m_device);
- descriptorSet.AppendDummy();
- descriptorSet.CreateVKDescriptorSet(m_commandBuffer);
-
- VkPipelineObj pipeline(m_device);
- // Create a renderPass with two color attachments
- VkAttachmentReference attachments[2] = {};
- attachments[0].layout = VK_IMAGE_LAYOUT_GENERAL;
- attachments[1].attachment = 1;
- attachments[1].layout = VK_IMAGE_LAYOUT_GENERAL;
-
- VkSubpassDescription subpass = {};
- subpass.pColorAttachments = attachments;
- subpass.colorAttachmentCount = 2;
-
- VkRenderPassCreateInfo rpci = {};
- rpci.subpassCount = 1;
- rpci.pSubpasses = &subpass;
- rpci.attachmentCount = 2;
-
- VkAttachmentDescription attach_desc[2] = {};
- attach_desc[0].format = VK_FORMAT_B8G8R8A8_UNORM;
- attach_desc[0].samples = VK_SAMPLE_COUNT_1_BIT;
- attach_desc[0].initialLayout = VK_IMAGE_LAYOUT_UNDEFINED;
- attach_desc[0].finalLayout = VK_IMAGE_LAYOUT_GENERAL;
- attach_desc[1].format = VK_FORMAT_B8G8R8A8_UNORM;
- attach_desc[1].samples = VK_SAMPLE_COUNT_1_BIT;
- attach_desc[1].initialLayout = VK_IMAGE_LAYOUT_UNDEFINED;
- attach_desc[1].finalLayout = VK_IMAGE_LAYOUT_GENERAL;
-
- rpci.pAttachments = attach_desc;
- rpci.sType = VK_STRUCTURE_TYPE_RENDER_PASS_CREATE_INFO;
-
- VkRenderPass renderpass;
- vkCreateRenderPass(m_device->device(), &rpci, NULL, &renderpass);
- VkShaderObj vs(m_device, bindStateVertShaderText, VK_SHADER_STAGE_VERTEX_BIT, this);
- pipeline.AddShader(&vs);
-
- VkPipelineColorBlendAttachmentState att_state1 = {}, att_state2 = {};
- att_state1.dstAlphaBlendFactor = VK_BLEND_FACTOR_CONSTANT_COLOR;
- att_state1.blendEnable = VK_TRUE;
- att_state2.dstAlphaBlendFactor = VK_BLEND_FACTOR_CONSTANT_COLOR;
- att_state2.blendEnable = VK_FALSE;
- pipeline.AddColorAttachment(0, att_state1);
- pipeline.AddColorAttachment(1, att_state2);
- pipeline.CreateVKPipeline(descriptorSet.GetPipelineLayout(), renderpass);
- m_errorMonitor->VerifyFound();
- vkDestroyRenderPass(m_device->device(), renderpass, NULL);
-}
-
-// Is the Pipeline compatible with the expectations of the Renderpass/subpasses?
-TEST_F(VkLayerTest, PipelineRenderpassCompatibility) {
- TEST_DESCRIPTION(
- "Create a graphics pipeline that is incompatible with the requirements of its contained Renderpass/subpasses.");
- ASSERT_NO_FATAL_FAILURE(Init());
- ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
-
- VkPipelineColorBlendAttachmentState att_state1 = {};
- att_state1.dstAlphaBlendFactor = VK_BLEND_FACTOR_CONSTANT_COLOR;
- att_state1.blendEnable = VK_TRUE;
-
- auto set_info = [&](CreatePipelineHelper &helper) {
- helper.cb_attachments_ = att_state1;
- helper.gp_ci_.pColorBlendState = nullptr;
- };
- CreatePipelineHelper::OneshotTest(*this, set_info, VK_DEBUG_REPORT_ERROR_BIT_EXT,
- "VUID-VkGraphicsPipelineCreateInfo-rasterizerDiscardEnable-00753");
-}
-
-TEST_F(VkLayerTest, PointSizeFailure) {
- TEST_DESCRIPTION("Create a pipeline using TOPOLOGY_POINT_LIST but do not set PointSize in vertex shader.");
-
- ASSERT_NO_FATAL_FAILURE(Init());
- ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
- ASSERT_NO_FATAL_FAILURE(InitViewport());
-
- // Create VS declaring PointSize but not writing to it
- const char NoPointSizeVertShader[] =
- "#version 450\n"
- "vec2 vertices[3];\n"
- "out gl_PerVertex\n"
- "{\n"
- " vec4 gl_Position;\n"
- " float gl_PointSize;\n"
- "};\n"
- "void main() {\n"
- " vertices[0] = vec2(-1.0, -1.0);\n"
- " vertices[1] = vec2( 1.0, -1.0);\n"
- " vertices[2] = vec2( 0.0, 1.0);\n"
- " gl_Position = vec4(vertices[gl_VertexIndex % 3], 0.0, 1.0);\n"
- "}\n";
- VkShaderObj vs(m_device, NoPointSizeVertShader, VK_SHADER_STAGE_VERTEX_BIT, this);
-
- // Set Input Assembly to TOPOLOGY POINT LIST
- auto set_info = [&](CreatePipelineHelper &helper) {
- // Set Input Assembly to TOPOLOGY POINT LIST
- helper.ia_ci_.topology = VK_PRIMITIVE_TOPOLOGY_POINT_LIST;
-
- helper.shader_stages_ = {vs.GetStageCreateInfo(), helper.fs_->GetStageCreateInfo()};
- };
- CreatePipelineHelper::OneshotTest(*this, set_info, VK_DEBUG_REPORT_ERROR_BIT_EXT, "Pipeline topology is set to POINT_LIST");
-}
-
-TEST_F(VkLayerTest, InvalidTopology) {
- TEST_DESCRIPTION("InvalidTopology.");
- VkPhysicalDeviceFeatures deviceFeatures = {};
- deviceFeatures.geometryShader = VK_FALSE;
- deviceFeatures.tessellationShader = VK_FALSE;
-
- ASSERT_NO_FATAL_FAILURE(Init(&deviceFeatures));
- ASSERT_NO_FATAL_FAILURE(InitViewport());
- ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
-
- VkShaderObj vs(m_device, bindStateVertPointSizeShaderText, VK_SHADER_STAGE_VERTEX_BIT, this);
-
- VkPrimitiveTopology topology;
-
- auto set_info = [&](CreatePipelineHelper &helper) {
- helper.ia_ci_.topology = topology;
- helper.ia_ci_.primitiveRestartEnable = VK_TRUE;
- helper.shader_stages_ = {vs.GetStageCreateInfo(), helper.fs_->GetStageCreateInfo()};
- };
-
- topology = VK_PRIMITIVE_TOPOLOGY_POINT_LIST;
- CreatePipelineHelper::OneshotTest(*this, set_info, VK_DEBUG_REPORT_ERROR_BIT_EXT,
- "VUID-VkPipelineInputAssemblyStateCreateInfo-topology-00428");
-
- topology = VK_PRIMITIVE_TOPOLOGY_LINE_LIST;
- CreatePipelineHelper::OneshotTest(*this, set_info, VK_DEBUG_REPORT_ERROR_BIT_EXT,
- "VUID-VkPipelineInputAssemblyStateCreateInfo-topology-00428");
-
- topology = VK_PRIMITIVE_TOPOLOGY_TRIANGLE_LIST;
- CreatePipelineHelper::OneshotTest(*this, set_info, VK_DEBUG_REPORT_ERROR_BIT_EXT,
- "VUID-VkPipelineInputAssemblyStateCreateInfo-topology-00428");
-
- topology = VK_PRIMITIVE_TOPOLOGY_LINE_LIST_WITH_ADJACENCY;
- CreatePipelineHelper::OneshotTest(*this, set_info, VK_DEBUG_REPORT_ERROR_BIT_EXT,
- std::vector<string>{"VUID-VkPipelineInputAssemblyStateCreateInfo-topology-00428",
- "VUID-VkPipelineInputAssemblyStateCreateInfo-topology-00429"});
-
- topology = VK_PRIMITIVE_TOPOLOGY_TRIANGLE_LIST_WITH_ADJACENCY;
- CreatePipelineHelper::OneshotTest(*this, set_info, VK_DEBUG_REPORT_ERROR_BIT_EXT,
- std::vector<string>{"VUID-VkPipelineInputAssemblyStateCreateInfo-topology-00428",
- "VUID-VkPipelineInputAssemblyStateCreateInfo-topology-00429"});
-
- topology = VK_PRIMITIVE_TOPOLOGY_PATCH_LIST;
- CreatePipelineHelper::OneshotTest(*this, set_info, VK_DEBUG_REPORT_ERROR_BIT_EXT,
- std::vector<string>{"VUID-VkPipelineInputAssemblyStateCreateInfo-topology-00428",
- "VUID-VkPipelineInputAssemblyStateCreateInfo-topology-00430",
- "VUID-VkGraphicsPipelineCreateInfo-topology-00737"});
-
- topology = VK_PRIMITIVE_TOPOLOGY_LINE_STRIP_WITH_ADJACENCY;
- CreatePipelineHelper::OneshotTest(*this, set_info, VK_DEBUG_REPORT_ERROR_BIT_EXT,
- "VUID-VkPipelineInputAssemblyStateCreateInfo-topology-00429");
-
- topology = VK_PRIMITIVE_TOPOLOGY_TRIANGLE_STRIP_WITH_ADJACENCY;
- CreatePipelineHelper::OneshotTest(*this, set_info, VK_DEBUG_REPORT_ERROR_BIT_EXT,
- "VUID-VkPipelineInputAssemblyStateCreateInfo-topology-00429");
-}
-
-TEST_F(VkLayerTest, PointSizeGeomShaderFailure) {
- TEST_DESCRIPTION(
- "Create a pipeline using TOPOLOGY_POINT_LIST, set PointSize vertex shader, but not in the final geometry stage.");
-
- ASSERT_NO_FATAL_FAILURE(Init());
-
- if ((!m_device->phy().features().geometryShader) || (!m_device->phy().features().shaderTessellationAndGeometryPointSize)) {
- printf("%s Device does not support the required geometry shader features; skipped.\n", kSkipPrefix);
- return;
- }
- ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
- ASSERT_NO_FATAL_FAILURE(InitViewport());
-
- // Create VS declaring PointSize and writing to it
- static char const *gsSource =
- "#version 450\n"
- "layout (points) in;\n"
- "layout (points) out;\n"
- "layout (max_vertices = 1) out;\n"
- "void main() {\n"
- " gl_Position = vec4(1.0, 0.5, 0.5, 0.0);\n"
- " EmitVertex();\n"
- "}\n";
-
- VkShaderObj vs(m_device, bindStateVertPointSizeShaderText, VK_SHADER_STAGE_VERTEX_BIT, this);
- VkShaderObj gs(m_device, gsSource, VK_SHADER_STAGE_GEOMETRY_BIT, this);
-
- auto set_info = [&](CreatePipelineHelper &helper) {
- helper.ia_ci_.topology = VK_PRIMITIVE_TOPOLOGY_POINT_LIST;
- helper.shader_stages_ = {vs.GetStageCreateInfo(), gs.GetStageCreateInfo(), helper.fs_->GetStageCreateInfo()};
- };
-
- CreatePipelineHelper::OneshotTest(*this, set_info, VK_DEBUG_REPORT_ERROR_BIT_EXT, "Pipeline topology is set to POINT_LIST");
-}
-
-TEST_F(VkLayerTest, BuiltinBlockOrderMismatchVsGs) {
- TEST_DESCRIPTION("Use different order of gl_Position and gl_PointSize in builtin block interface between VS and GS.");
-
- ASSERT_NO_FATAL_FAILURE(Init());
-
- if (!m_device->phy().features().geometryShader) {
- printf("%s Device does not support geometry shaders; Skipped.\n", kSkipPrefix);
- return;
- }
- ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
- ASSERT_NO_FATAL_FAILURE(InitViewport());
-
- // Compiled using the GLSL code below. GlslangValidator rearranges the members, but here they are kept in the order provided.
- // #version 450
- // layout (points) in;
- // layout (points) out;
- // layout (max_vertices = 1) out;
- // in gl_PerVertex {
- // float gl_PointSize;
- // vec4 gl_Position;
- // } gl_in[];
- // void main() {
- // gl_Position = gl_in[0].gl_Position;
- // gl_PointSize = gl_in[0].gl_PointSize;
- // EmitVertex();
- // }
-
- const std::string gsSource = R"(
- OpCapability Geometry
- OpCapability GeometryPointSize
- %1 = OpExtInstImport "GLSL.std.450"
- OpMemoryModel Logical GLSL450
- OpEntryPoint Geometry %main "main" %_ %gl_in
- OpExecutionMode %main InputPoints
- OpExecutionMode %main Invocations 1
- OpExecutionMode %main OutputPoints
- OpExecutionMode %main OutputVertices 1
- OpSource GLSL 450
- OpMemberDecorate %gl_PerVertex 0 BuiltIn Position
- OpMemberDecorate %gl_PerVertex 1 BuiltIn PointSize
- OpMemberDecorate %gl_PerVertex 2 BuiltIn ClipDistance
- OpMemberDecorate %gl_PerVertex 3 BuiltIn CullDistance
- OpDecorate %gl_PerVertex Block
- OpMemberDecorate %gl_PerVertex_0 0 BuiltIn PointSize
- OpMemberDecorate %gl_PerVertex_0 1 BuiltIn Position
- OpDecorate %gl_PerVertex_0 Block
- %void = OpTypeVoid
- %3 = OpTypeFunction %void
- %float = OpTypeFloat 32
- %v4float = OpTypeVector %float 4
- %uint = OpTypeInt 32 0
- %uint_1 = OpConstant %uint 1
-%_arr_float_uint_1 = OpTypeArray %float %uint_1
-%gl_PerVertex = OpTypeStruct %v4float %float %_arr_float_uint_1 %_arr_float_uint_1
-%_ptr_Output_gl_PerVertex = OpTypePointer Output %gl_PerVertex
- %_ = OpVariable %_ptr_Output_gl_PerVertex Output
- %int = OpTypeInt 32 1
- %int_0 = OpConstant %int 0
-%gl_PerVertex_0 = OpTypeStruct %float %v4float
-%_arr_gl_PerVertex_0_uint_1 = OpTypeArray %gl_PerVertex_0 %uint_1
-%_ptr_Input__arr_gl_PerVertex_0_uint_1 = OpTypePointer Input %_arr_gl_PerVertex_0_uint_1
- %gl_in = OpVariable %_ptr_Input__arr_gl_PerVertex_0_uint_1 Input
-%_ptr_Input_v4float = OpTypePointer Input %v4float
-%_ptr_Output_v4float = OpTypePointer Output %v4float
- %int_1 = OpConstant %int 1
-%_ptr_Input_float = OpTypePointer Input %float
-%_ptr_Output_float = OpTypePointer Output %float
- %main = OpFunction %void None %3
- %5 = OpLabel
- %21 = OpAccessChain %_ptr_Input_v4float %gl_in %int_0 %int_1
- %22 = OpLoad %v4float %21
- %24 = OpAccessChain %_ptr_Output_v4float %_ %int_0
- OpStore %24 %22
- %27 = OpAccessChain %_ptr_Input_float %gl_in %int_0 %int_0
- %28 = OpLoad %float %27
- %30 = OpAccessChain %_ptr_Output_float %_ %int_1
- OpStore %30 %28
- OpEmitVertex
- OpReturn
- OpFunctionEnd
- )";
-
- VkShaderObj vs(m_device, bindStateVertPointSizeShaderText, VK_SHADER_STAGE_VERTEX_BIT, this);
- VkShaderObj gs(m_device, gsSource, VK_SHADER_STAGE_GEOMETRY_BIT, this);
-
- auto set_info = [&](CreatePipelineHelper &helper) {
- helper.ia_ci_.topology = VK_PRIMITIVE_TOPOLOGY_POINT_LIST;
- helper.shader_stages_ = {vs.GetStageCreateInfo(), gs.GetStageCreateInfo(), helper.fs_->GetStageCreateInfo()};
- };
- CreatePipelineHelper::OneshotTest(*this, set_info, VK_DEBUG_REPORT_ERROR_BIT_EXT,
- "Builtin variable inside block doesn't match between");
-}
-
-TEST_F(VkLayerTest, BuiltinBlockSizeMismatchVsGs) {
- TEST_DESCRIPTION("Use different number of elements in builtin block interface between VS and GS.");
-
- ASSERT_NO_FATAL_FAILURE(Init());
-
- if (!m_device->phy().features().geometryShader) {
- printf("%s Device does not support geometry shaders; Skipped.\n", kSkipPrefix);
- return;
- }
-
- ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
- ASSERT_NO_FATAL_FAILURE(InitViewport());
-
- static const char *gsSource =
- "#version 450\n"
- "layout (points) in;\n"
- "layout (points) out;\n"
- "layout (max_vertices = 1) out;\n"
- "in gl_PerVertex\n"
- "{\n"
- " vec4 gl_Position;\n"
- " float gl_PointSize;\n"
- " float gl_ClipDistance[];\n"
- "} gl_in[];\n"
- "void main()\n"
- "{\n"
- " gl_Position = gl_in[0].gl_Position;\n"
- " gl_PointSize = gl_in[0].gl_PointSize;\n"
- " EmitVertex();\n"
- "}\n";
-
- VkShaderObj vs(m_device, bindStateVertPointSizeShaderText, VK_SHADER_STAGE_VERTEX_BIT, this);
- VkShaderObj gs(m_device, gsSource, VK_SHADER_STAGE_GEOMETRY_BIT, this);
-
- auto set_info = [&](CreatePipelineHelper &helper) {
- helper.ia_ci_.topology = VK_PRIMITIVE_TOPOLOGY_POINT_LIST;
- helper.shader_stages_ = {vs.GetStageCreateInfo(), gs.GetStageCreateInfo(), helper.fs_->GetStageCreateInfo()};
- };
- CreatePipelineHelper::OneshotTest(*this, set_info, VK_DEBUG_REPORT_ERROR_BIT_EXT,
- "Number of elements inside builtin block differ between stages");
-}
-
-TEST_F(VkLayerTest, CreatePipelineLayoutExceedsSetLimit) {
- TEST_DESCRIPTION("Attempt to create a pipeline layout using more than the physical limit of SetLayouts.");
-
- ASSERT_NO_FATAL_FAILURE(Init());
-
- VkDescriptorSetLayoutBinding layout_binding = {};
- layout_binding.binding = 0;
- layout_binding.descriptorType = VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER;
- layout_binding.descriptorCount = 1;
- layout_binding.stageFlags = VK_SHADER_STAGE_VERTEX_BIT;
- layout_binding.pImmutableSamplers = NULL;
-
- VkDescriptorSetLayoutCreateInfo ds_layout_ci = {};
- ds_layout_ci.sType = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_CREATE_INFO;
- ds_layout_ci.bindingCount = 1;
- ds_layout_ci.pBindings = &layout_binding;
- VkDescriptorSetLayout ds_layout = {};
- VkResult err = vkCreateDescriptorSetLayout(m_device->device(), &ds_layout_ci, NULL, &ds_layout);
- ASSERT_VK_SUCCESS(err);
-
- // Create an array of DSLs, one larger than the physical limit
- const auto excess_layouts = 1 + m_device->phy().properties().limits.maxBoundDescriptorSets;
- std::vector<VkDescriptorSetLayout> dsl_array(excess_layouts, ds_layout);
-
- VkPipelineLayoutCreateInfo pipeline_layout_ci = {};
- pipeline_layout_ci.sType = VK_STRUCTURE_TYPE_PIPELINE_LAYOUT_CREATE_INFO;
- pipeline_layout_ci.pNext = NULL;
- pipeline_layout_ci.setLayoutCount = excess_layouts;
- pipeline_layout_ci.pSetLayouts = dsl_array.data();
-
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkPipelineLayoutCreateInfo-setLayoutCount-00286");
- VkPipelineLayout pipeline_layout = VK_NULL_HANDLE;
- err = vkCreatePipelineLayout(m_device->device(), &pipeline_layout_ci, NULL, &pipeline_layout);
- m_errorMonitor->VerifyFound();
-
- // Clean up
- vkDestroyDescriptorSetLayout(m_device->device(), ds_layout, NULL);
-}
-
-TEST_F(VkLayerTest, CreatePipelineLayoutExcessPerStageDescriptors) {
- TEST_DESCRIPTION("Attempt to create a pipeline layout where total descriptors exceed per-stage limits");
-
- ASSERT_NO_FATAL_FAILURE(Init());
-
- uint32_t max_uniform_buffers = m_device->phy().properties().limits.maxPerStageDescriptorUniformBuffers;
- uint32_t max_storage_buffers = m_device->phy().properties().limits.maxPerStageDescriptorStorageBuffers;
- uint32_t max_sampled_images = m_device->phy().properties().limits.maxPerStageDescriptorSampledImages;
- uint32_t max_storage_images = m_device->phy().properties().limits.maxPerStageDescriptorStorageImages;
- uint32_t max_samplers = m_device->phy().properties().limits.maxPerStageDescriptorSamplers;
- uint32_t max_combined = std::min(max_samplers, max_sampled_images);
- uint32_t max_input_attachments = m_device->phy().properties().limits.maxPerStageDescriptorInputAttachments;
-
- uint32_t sum_dyn_uniform_buffers = m_device->phy().properties().limits.maxDescriptorSetUniformBuffersDynamic;
- uint32_t sum_uniform_buffers = m_device->phy().properties().limits.maxDescriptorSetUniformBuffers;
- uint32_t sum_dyn_storage_buffers = m_device->phy().properties().limits.maxDescriptorSetStorageBuffersDynamic;
- uint32_t sum_storage_buffers = m_device->phy().properties().limits.maxDescriptorSetStorageBuffers;
- uint32_t sum_sampled_images = m_device->phy().properties().limits.maxDescriptorSetSampledImages;
- uint32_t sum_storage_images = m_device->phy().properties().limits.maxDescriptorSetStorageImages;
- uint32_t sum_samplers = m_device->phy().properties().limits.maxDescriptorSetSamplers;
- uint32_t sum_input_attachments = m_device->phy().properties().limits.maxDescriptorSetInputAttachments;
-
- // Devices that report UINT32_MAX for any of these limits can't run this test
- if (UINT32_MAX == std::max({max_uniform_buffers, max_storage_buffers, max_sampled_images, max_storage_images, max_samplers})) {
- printf("%s Physical device limits report as 2^32-1. Skipping test.\n", kSkipPrefix);
- return;
- }
-
- VkDescriptorSetLayoutBinding dslb = {};
- std::vector<VkDescriptorSetLayoutBinding> dslb_vec = {};
- VkDescriptorSetLayout ds_layout = VK_NULL_HANDLE;
- VkDescriptorSetLayoutCreateInfo ds_layout_ci = {};
- ds_layout_ci.sType = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_CREATE_INFO;
- ds_layout_ci.pNext = NULL;
- VkPipelineLayoutCreateInfo pipeline_layout_ci = {};
- pipeline_layout_ci.sType = VK_STRUCTURE_TYPE_PIPELINE_LAYOUT_CREATE_INFO;
- pipeline_layout_ci.pNext = NULL;
- pipeline_layout_ci.setLayoutCount = 1;
- pipeline_layout_ci.pSetLayouts = &ds_layout;
- VkPipelineLayout pipeline_layout = VK_NULL_HANDLE;
-
- // VU 0fe0023e - too many sampler type descriptors in fragment stage
- dslb_vec.clear();
- dslb.binding = 0;
- dslb.descriptorType = VK_DESCRIPTOR_TYPE_SAMPLER;
- dslb.descriptorCount = max_samplers;
- dslb.stageFlags = VK_SHADER_STAGE_ALL_GRAPHICS;
- dslb.pImmutableSamplers = NULL;
- dslb_vec.push_back(dslb);
- dslb.binding = 1;
- dslb.descriptorType = VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER;
- dslb.descriptorCount = max_combined;
- dslb.stageFlags = VK_SHADER_STAGE_FRAGMENT_BIT;
- dslb_vec.push_back(dslb);
-
- ds_layout_ci.bindingCount = dslb_vec.size();
- ds_layout_ci.pBindings = dslb_vec.data();
- VkResult err = vkCreateDescriptorSetLayout(m_device->device(), &ds_layout_ci, NULL, &ds_layout);
- ASSERT_VK_SUCCESS(err);
-
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkPipelineLayoutCreateInfo-pSetLayouts-00287");
- if ((max_samplers + max_combined) > sum_samplers) {
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
- "VUID-VkPipelineLayoutCreateInfo-pSetLayouts-01677"); // expect all-stages sum too
- }
- if (max_combined > sum_sampled_images) {
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
- "VUID-VkPipelineLayoutCreateInfo-pSetLayouts-01682"); // expect all-stages sum too
- }
- err = vkCreatePipelineLayout(m_device->device(), &pipeline_layout_ci, NULL, &pipeline_layout);
- m_errorMonitor->VerifyFound();
- vkDestroyPipelineLayout(m_device->device(), pipeline_layout, NULL); // Unnecessary but harmless if test passed
- pipeline_layout = VK_NULL_HANDLE;
- vkDestroyDescriptorSetLayout(m_device->device(), ds_layout, NULL);
-
- // VU 0fe00240 - too many uniform buffer type descriptors in vertex stage
- dslb_vec.clear();
- dslb.binding = 0;
- dslb.descriptorType = VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER;
- dslb.descriptorCount = max_uniform_buffers + 1;
- dslb.stageFlags = VK_SHADER_STAGE_VERTEX_BIT | VK_SHADER_STAGE_FRAGMENT_BIT;
- dslb_vec.push_back(dslb);
- dslb.binding = 1;
- dslb.descriptorType = VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC;
- dslb.stageFlags = VK_SHADER_STAGE_VERTEX_BIT;
- dslb_vec.push_back(dslb);
-
- ds_layout_ci.bindingCount = dslb_vec.size();
- ds_layout_ci.pBindings = dslb_vec.data();
- err = vkCreateDescriptorSetLayout(m_device->device(), &ds_layout_ci, NULL, &ds_layout);
- ASSERT_VK_SUCCESS(err);
-
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkPipelineLayoutCreateInfo-pSetLayouts-00288");
- if (dslb.descriptorCount > sum_uniform_buffers) {
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
- "VUID-VkPipelineLayoutCreateInfo-pSetLayouts-01678"); // expect all-stages sum too
- }
- if (dslb.descriptorCount > sum_dyn_uniform_buffers) {
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
- "VUID-VkPipelineLayoutCreateInfo-pSetLayouts-01679"); // expect all-stages sum too
- }
- err = vkCreatePipelineLayout(m_device->device(), &pipeline_layout_ci, NULL, &pipeline_layout);
- m_errorMonitor->VerifyFound();
- vkDestroyPipelineLayout(m_device->device(), pipeline_layout, NULL); // Unnecessary but harmless if test passed
- pipeline_layout = VK_NULL_HANDLE;
- vkDestroyDescriptorSetLayout(m_device->device(), ds_layout, NULL);
-
- // VU 0fe00242 - too many storage buffer type descriptors in compute stage
- dslb_vec.clear();
- dslb.binding = 0;
- dslb.descriptorType = VK_DESCRIPTOR_TYPE_STORAGE_BUFFER;
- dslb.descriptorCount = max_storage_buffers + 1;
- dslb.stageFlags = VK_SHADER_STAGE_ALL;
- dslb_vec.push_back(dslb);
- dslb.binding = 1;
- dslb.descriptorType = VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC;
- dslb_vec.push_back(dslb);
- dslb.binding = 2;
- dslb.descriptorType = VK_DESCRIPTOR_TYPE_STORAGE_BUFFER;
- dslb.stageFlags = VK_SHADER_STAGE_COMPUTE_BIT;
- dslb_vec.push_back(dslb);
-
- ds_layout_ci.bindingCount = dslb_vec.size();
- ds_layout_ci.pBindings = dslb_vec.data();
- err = vkCreateDescriptorSetLayout(m_device->device(), &ds_layout_ci, NULL, &ds_layout);
- ASSERT_VK_SUCCESS(err);
-
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkPipelineLayoutCreateInfo-pSetLayouts-00289");
- if (dslb.descriptorCount > sum_dyn_storage_buffers) {
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
- "VUID-VkPipelineLayoutCreateInfo-pSetLayouts-01681"); // expect all-stages sum too
- }
- if (dslb_vec[0].descriptorCount + dslb_vec[2].descriptorCount > sum_storage_buffers) {
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
- "VUID-VkPipelineLayoutCreateInfo-pSetLayouts-01680"); // expect all-stages sum too
- }
- err = vkCreatePipelineLayout(m_device->device(), &pipeline_layout_ci, NULL, &pipeline_layout);
- m_errorMonitor->VerifyFound();
- vkDestroyPipelineLayout(m_device->device(), pipeline_layout, NULL); // Unnecessary but harmless if test passed
- pipeline_layout = VK_NULL_HANDLE;
- vkDestroyDescriptorSetLayout(m_device->device(), ds_layout, NULL);
-
- // VU 0fe00244 - too many sampled image type descriptors in multiple stages
- dslb_vec.clear();
- dslb.binding = 0;
- dslb.descriptorType = VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE;
- dslb.descriptorCount = max_sampled_images;
- dslb.stageFlags = VK_SHADER_STAGE_VERTEX_BIT | VK_SHADER_STAGE_FRAGMENT_BIT;
- dslb_vec.push_back(dslb);
- dslb.binding = 1;
- dslb.descriptorType = VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER;
- dslb.stageFlags = VK_SHADER_STAGE_ALL_GRAPHICS;
- dslb_vec.push_back(dslb);
- dslb.binding = 2;
- dslb.descriptorCount = max_combined;
- dslb.descriptorType = VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER;
- dslb_vec.push_back(dslb);
-
- ds_layout_ci.bindingCount = dslb_vec.size();
- ds_layout_ci.pBindings = dslb_vec.data();
- err = vkCreateDescriptorSetLayout(m_device->device(), &ds_layout_ci, NULL, &ds_layout);
- ASSERT_VK_SUCCESS(err);
-
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkPipelineLayoutCreateInfo-pSetLayouts-00290");
- if (max_combined + 2 * max_sampled_images > sum_sampled_images) {
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
- "VUID-VkPipelineLayoutCreateInfo-pSetLayouts-01682"); // expect all-stages sum too
- }
- if (max_combined > sum_samplers) {
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
- "VUID-VkPipelineLayoutCreateInfo-pSetLayouts-01677"); // expect all-stages sum too
- }
- err = vkCreatePipelineLayout(m_device->device(), &pipeline_layout_ci, NULL, &pipeline_layout);
- m_errorMonitor->VerifyFound();
- vkDestroyPipelineLayout(m_device->device(), pipeline_layout, NULL); // Unnecessary but harmless if test passed
- pipeline_layout = VK_NULL_HANDLE;
- vkDestroyDescriptorSetLayout(m_device->device(), ds_layout, NULL);
-
- // VU 0fe00246 - too many storage image type descriptors in fragment stage
- dslb_vec.clear();
- dslb.binding = 0;
- dslb.descriptorType = VK_DESCRIPTOR_TYPE_STORAGE_IMAGE;
- dslb.descriptorCount = 1 + (max_storage_images / 2);
- dslb.stageFlags = VK_SHADER_STAGE_FRAGMENT_BIT;
- dslb_vec.push_back(dslb);
- dslb.binding = 1;
- dslb.descriptorType = VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER;
- dslb.stageFlags = VK_SHADER_STAGE_VERTEX_BIT | VK_SHADER_STAGE_FRAGMENT_BIT | VK_SHADER_STAGE_COMPUTE_BIT;
- dslb_vec.push_back(dslb);
-
- ds_layout_ci.bindingCount = dslb_vec.size();
- ds_layout_ci.pBindings = dslb_vec.data();
- err = vkCreateDescriptorSetLayout(m_device->device(), &ds_layout_ci, NULL, &ds_layout);
- ASSERT_VK_SUCCESS(err);
-
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkPipelineLayoutCreateInfo-pSetLayouts-00291");
- if (2 * dslb.descriptorCount > sum_storage_images) {
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
- "VUID-VkPipelineLayoutCreateInfo-pSetLayouts-01683"); // expect all-stages sum too
- }
- err = vkCreatePipelineLayout(m_device->device(), &pipeline_layout_ci, NULL, &pipeline_layout);
- m_errorMonitor->VerifyFound();
- vkDestroyPipelineLayout(m_device->device(), pipeline_layout, NULL); // Unnecessary but harmless if test passed
- pipeline_layout = VK_NULL_HANDLE;
- vkDestroyDescriptorSetLayout(m_device->device(), ds_layout, NULL);
-
- // VU 0fe00d18 - too many input attachments in fragment stage
- dslb_vec.clear();
- dslb.binding = 0;
- dslb.descriptorType = VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT;
- dslb.descriptorCount = 1 + max_input_attachments;
- dslb.stageFlags = VK_SHADER_STAGE_FRAGMENT_BIT;
- dslb_vec.push_back(dslb);
-
- ds_layout_ci.bindingCount = dslb_vec.size();
- ds_layout_ci.pBindings = dslb_vec.data();
- err = vkCreateDescriptorSetLayout(m_device->device(), &ds_layout_ci, NULL, &ds_layout);
- ASSERT_VK_SUCCESS(err);
-
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkPipelineLayoutCreateInfo-pSetLayouts-01676");
- if (dslb.descriptorCount > sum_input_attachments) {
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
- "VUID-VkPipelineLayoutCreateInfo-pSetLayouts-01684"); // expect all-stages sum too
- }
- err = vkCreatePipelineLayout(m_device->device(), &pipeline_layout_ci, NULL, &pipeline_layout);
- m_errorMonitor->VerifyFound();
- vkDestroyPipelineLayout(m_device->device(), pipeline_layout, NULL); // Unnecessary but harmless if test passed
- pipeline_layout = VK_NULL_HANDLE;
- vkDestroyDescriptorSetLayout(m_device->device(), ds_layout, NULL);
-}
-
-TEST_F(VkLayerTest, CreatePipelineLayoutExcessDescriptorsOverall) {
- TEST_DESCRIPTION("Attempt to create a pipeline layout where total descriptors exceed limits");
-
- ASSERT_NO_FATAL_FAILURE(Init());
-
- uint32_t max_uniform_buffers = m_device->phy().properties().limits.maxPerStageDescriptorUniformBuffers;
- uint32_t max_storage_buffers = m_device->phy().properties().limits.maxPerStageDescriptorStorageBuffers;
- uint32_t max_sampled_images = m_device->phy().properties().limits.maxPerStageDescriptorSampledImages;
- uint32_t max_storage_images = m_device->phy().properties().limits.maxPerStageDescriptorStorageImages;
- uint32_t max_samplers = m_device->phy().properties().limits.maxPerStageDescriptorSamplers;
- uint32_t max_input_attachments = m_device->phy().properties().limits.maxPerStageDescriptorInputAttachments;
-
- uint32_t sum_dyn_uniform_buffers = m_device->phy().properties().limits.maxDescriptorSetUniformBuffersDynamic;
- uint32_t sum_uniform_buffers = m_device->phy().properties().limits.maxDescriptorSetUniformBuffers;
- uint32_t sum_dyn_storage_buffers = m_device->phy().properties().limits.maxDescriptorSetStorageBuffersDynamic;
- uint32_t sum_storage_buffers = m_device->phy().properties().limits.maxDescriptorSetStorageBuffers;
- uint32_t sum_sampled_images = m_device->phy().properties().limits.maxDescriptorSetSampledImages;
- uint32_t sum_storage_images = m_device->phy().properties().limits.maxDescriptorSetStorageImages;
- uint32_t sum_samplers = m_device->phy().properties().limits.maxDescriptorSetSamplers;
- uint32_t sum_input_attachments = m_device->phy().properties().limits.maxDescriptorSetInputAttachments;
-
- // Devices that report UINT32_MAX for any of these limits can't run this test
- if (UINT32_MAX == std::max({sum_dyn_uniform_buffers, sum_uniform_buffers, sum_dyn_storage_buffers, sum_storage_buffers,
- sum_sampled_images, sum_storage_images, sum_samplers, sum_input_attachments})) {
- printf("%s Physical device limits report as 2^32-1. Skipping test.\n", kSkipPrefix);
- return;
- }
-
- VkDescriptorSetLayoutBinding dslb = {};
- std::vector<VkDescriptorSetLayoutBinding> dslb_vec = {};
- VkDescriptorSetLayout ds_layout = VK_NULL_HANDLE;
- VkDescriptorSetLayoutCreateInfo ds_layout_ci = {};
- ds_layout_ci.sType = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_CREATE_INFO;
- ds_layout_ci.pNext = NULL;
- VkPipelineLayoutCreateInfo pipeline_layout_ci = {};
- pipeline_layout_ci.sType = VK_STRUCTURE_TYPE_PIPELINE_LAYOUT_CREATE_INFO;
- pipeline_layout_ci.pNext = NULL;
- pipeline_layout_ci.setLayoutCount = 1;
- pipeline_layout_ci.pSetLayouts = &ds_layout;
- VkPipelineLayout pipeline_layout = VK_NULL_HANDLE;
-
- // VU 0fe00d1a - too many sampler type descriptors overall
- dslb_vec.clear();
- dslb.binding = 0;
- dslb.descriptorType = VK_DESCRIPTOR_TYPE_SAMPLER;
- dslb.descriptorCount = sum_samplers / 2;
- dslb.stageFlags = VK_SHADER_STAGE_VERTEX_BIT;
- dslb.pImmutableSamplers = NULL;
- dslb_vec.push_back(dslb);
- dslb.binding = 1;
- dslb.descriptorType = VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER;
- dslb.descriptorCount = sum_samplers - dslb.descriptorCount + 1;
- dslb.stageFlags = VK_SHADER_STAGE_FRAGMENT_BIT;
- dslb_vec.push_back(dslb);
-
- ds_layout_ci.bindingCount = dslb_vec.size();
- ds_layout_ci.pBindings = dslb_vec.data();
- VkResult err = vkCreateDescriptorSetLayout(m_device->device(), &ds_layout_ci, NULL, &ds_layout);
- ASSERT_VK_SUCCESS(err);
-
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkPipelineLayoutCreateInfo-pSetLayouts-01677");
- if (dslb.descriptorCount > max_samplers) {
- m_errorMonitor->SetDesiredFailureMsg(
- VK_DEBUG_REPORT_ERROR_BIT_EXT,
- "VUID-VkPipelineLayoutCreateInfo-pSetLayouts-00287"); // Expect max-per-stage samplers exceeds limits
- }
- if (dslb.descriptorCount > sum_sampled_images) {
- m_errorMonitor->SetDesiredFailureMsg(
- VK_DEBUG_REPORT_ERROR_BIT_EXT,
- "VUID-VkPipelineLayoutCreateInfo-pSetLayouts-01682"); // Expect max overall sampled image count exceeds limits
- }
- if (dslb.descriptorCount > max_sampled_images) {
- m_errorMonitor->SetDesiredFailureMsg(
- VK_DEBUG_REPORT_ERROR_BIT_EXT,
- "VUID-VkPipelineLayoutCreateInfo-pSetLayouts-00290"); // Expect max per-stage sampled image count exceeds limits
- }
- err = vkCreatePipelineLayout(m_device->device(), &pipeline_layout_ci, NULL, &pipeline_layout);
- m_errorMonitor->VerifyFound();
- vkDestroyPipelineLayout(m_device->device(), pipeline_layout, NULL); // Unnecessary but harmless if test passed
- pipeline_layout = VK_NULL_HANDLE;
- vkDestroyDescriptorSetLayout(m_device->device(), ds_layout, NULL);
-
- // VU 0fe00d1c - too many uniform buffer type descriptors overall
- dslb_vec.clear();
- dslb.binding = 0;
- dslb.descriptorType = VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER;
- dslb.descriptorCount = sum_uniform_buffers + 1;
- dslb.stageFlags = VK_SHADER_STAGE_VERTEX_BIT | VK_SHADER_STAGE_FRAGMENT_BIT;
- dslb.pImmutableSamplers = NULL;
- dslb_vec.push_back(dslb);
-
- ds_layout_ci.bindingCount = dslb_vec.size();
- ds_layout_ci.pBindings = dslb_vec.data();
- err = vkCreateDescriptorSetLayout(m_device->device(), &ds_layout_ci, NULL, &ds_layout);
- ASSERT_VK_SUCCESS(err);
-
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkPipelineLayoutCreateInfo-pSetLayouts-01678");
- if (dslb.descriptorCount > max_uniform_buffers) {
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
- "VUID-VkPipelineLayoutCreateInfo-pSetLayouts-00288"); // expect max-per-stage too
- }
- err = vkCreatePipelineLayout(m_device->device(), &pipeline_layout_ci, NULL, &pipeline_layout);
- m_errorMonitor->VerifyFound();
- vkDestroyPipelineLayout(m_device->device(), pipeline_layout, NULL); // Unnecessary but harmless if test passed
- pipeline_layout = VK_NULL_HANDLE;
- vkDestroyDescriptorSetLayout(m_device->device(), ds_layout, NULL);
-
- // VU 0fe00d1e - too many dynamic uniform buffer type descriptors overall
- dslb_vec.clear();
- dslb.binding = 0;
- dslb.descriptorType = VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC;
- dslb.descriptorCount = sum_dyn_uniform_buffers + 1;
- dslb.stageFlags = VK_SHADER_STAGE_VERTEX_BIT | VK_SHADER_STAGE_FRAGMENT_BIT;
- dslb.pImmutableSamplers = NULL;
- dslb_vec.push_back(dslb);
-
- ds_layout_ci.bindingCount = dslb_vec.size();
- ds_layout_ci.pBindings = dslb_vec.data();
- err = vkCreateDescriptorSetLayout(m_device->device(), &ds_layout_ci, NULL, &ds_layout);
- ASSERT_VK_SUCCESS(err);
-
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkPipelineLayoutCreateInfo-pSetLayouts-01679");
- if (dslb.descriptorCount > max_uniform_buffers) {
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
- "VUID-VkPipelineLayoutCreateInfo-pSetLayouts-00288"); // expect max-per-stage too
- }
- err = vkCreatePipelineLayout(m_device->device(), &pipeline_layout_ci, NULL, &pipeline_layout);
- m_errorMonitor->VerifyFound();
- vkDestroyPipelineLayout(m_device->device(), pipeline_layout, NULL); // Unnecessary but harmless if test passed
- pipeline_layout = VK_NULL_HANDLE;
- vkDestroyDescriptorSetLayout(m_device->device(), ds_layout, NULL);
-
- // VU 0fe00d20 - too many storage buffer type descriptors overall
- dslb_vec.clear();
- dslb.binding = 0;
- dslb.descriptorType = VK_DESCRIPTOR_TYPE_STORAGE_BUFFER;
- dslb.descriptorCount = sum_storage_buffers + 1;
- dslb.stageFlags = VK_SHADER_STAGE_VERTEX_BIT | VK_SHADER_STAGE_FRAGMENT_BIT;
- dslb.pImmutableSamplers = NULL;
- dslb_vec.push_back(dslb);
-
- ds_layout_ci.bindingCount = dslb_vec.size();
- ds_layout_ci.pBindings = dslb_vec.data();
- err = vkCreateDescriptorSetLayout(m_device->device(), &ds_layout_ci, NULL, &ds_layout);
- ASSERT_VK_SUCCESS(err);
-
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkPipelineLayoutCreateInfo-pSetLayouts-01680");
- if (dslb.descriptorCount > max_storage_buffers) {
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
- "VUID-VkPipelineLayoutCreateInfo-pSetLayouts-00289"); // expect max-per-stage too
- }
- err = vkCreatePipelineLayout(m_device->device(), &pipeline_layout_ci, NULL, &pipeline_layout);
- m_errorMonitor->VerifyFound();
- vkDestroyPipelineLayout(m_device->device(), pipeline_layout, NULL); // Unnecessary but harmless if test passed
- pipeline_layout = VK_NULL_HANDLE;
- vkDestroyDescriptorSetLayout(m_device->device(), ds_layout, NULL);
-
- // VU 0fe00d22 - too many dynamic storage buffer type descriptors overall
- dslb_vec.clear();
- dslb.binding = 0;
- dslb.descriptorType = VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC;
- dslb.descriptorCount = sum_dyn_storage_buffers + 1;
- dslb.stageFlags = VK_SHADER_STAGE_VERTEX_BIT | VK_SHADER_STAGE_FRAGMENT_BIT;
- dslb.pImmutableSamplers = NULL;
- dslb_vec.push_back(dslb);
-
- ds_layout_ci.bindingCount = dslb_vec.size();
- ds_layout_ci.pBindings = dslb_vec.data();
- err = vkCreateDescriptorSetLayout(m_device->device(), &ds_layout_ci, NULL, &ds_layout);
- ASSERT_VK_SUCCESS(err);
-
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkPipelineLayoutCreateInfo-pSetLayouts-01681");
- if (dslb.descriptorCount > max_storage_buffers) {
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
- "VUID-VkPipelineLayoutCreateInfo-pSetLayouts-00289"); // expect max-per-stage too
- }
- err = vkCreatePipelineLayout(m_device->device(), &pipeline_layout_ci, NULL, &pipeline_layout);
- m_errorMonitor->VerifyFound();
- vkDestroyPipelineLayout(m_device->device(), pipeline_layout, NULL); // Unnecessary but harmless if test passed
- pipeline_layout = VK_NULL_HANDLE;
- vkDestroyDescriptorSetLayout(m_device->device(), ds_layout, NULL);
-
- // VU 0fe00d24 - too many sampled image type descriptors overall
- dslb_vec.clear();
- dslb.binding = 0;
- dslb.descriptorType = VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER;
- dslb.descriptorCount = max_samplers;
- dslb.stageFlags = VK_SHADER_STAGE_VERTEX_BIT;
- dslb.pImmutableSamplers = NULL;
- dslb_vec.push_back(dslb);
- dslb.binding = 1;
- dslb.descriptorType = VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE;
- // revisit: not robust to odd limits.
- uint32_t remaining = (max_samplers > sum_sampled_images ? 0 : (sum_sampled_images - max_samplers) / 2);
- dslb.descriptorCount = 1 + remaining;
- dslb.stageFlags = VK_SHADER_STAGE_FRAGMENT_BIT;
- dslb_vec.push_back(dslb);
- dslb.binding = 2;
- dslb.descriptorType = VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER;
- dslb.stageFlags = VK_SHADER_STAGE_COMPUTE_BIT;
- dslb_vec.push_back(dslb);
-
- ds_layout_ci.bindingCount = dslb_vec.size();
- ds_layout_ci.pBindings = dslb_vec.data();
- err = vkCreateDescriptorSetLayout(m_device->device(), &ds_layout_ci, NULL, &ds_layout);
- ASSERT_VK_SUCCESS(err);
-
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkPipelineLayoutCreateInfo-pSetLayouts-01682");
- if (std::max(dslb_vec[0].descriptorCount, dslb_vec[1].descriptorCount) > max_sampled_images) {
- m_errorMonitor->SetDesiredFailureMsg(
- VK_DEBUG_REPORT_ERROR_BIT_EXT,
- "VUID-VkPipelineLayoutCreateInfo-pSetLayouts-00290"); // Expect max-per-stage sampled images to exceed limits
- }
- err = vkCreatePipelineLayout(m_device->device(), &pipeline_layout_ci, NULL, &pipeline_layout);
- m_errorMonitor->VerifyFound();
- vkDestroyPipelineLayout(m_device->device(), pipeline_layout, NULL); // Unnecessary but harmless if test passed
- pipeline_layout = VK_NULL_HANDLE;
- vkDestroyDescriptorSetLayout(m_device->device(), ds_layout, NULL);
-
- // VU 0fe00d26 - too many storage image type descriptors overall
- dslb_vec.clear();
- dslb.binding = 0;
- dslb.descriptorType = VK_DESCRIPTOR_TYPE_STORAGE_IMAGE;
- dslb.descriptorCount = sum_storage_images / 2;
- dslb.stageFlags = VK_SHADER_STAGE_VERTEX_BIT;
- dslb.pImmutableSamplers = NULL;
- dslb_vec.push_back(dslb);
- dslb.binding = 1;
- dslb.descriptorType = VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER;
- dslb.descriptorCount = sum_storage_images - dslb.descriptorCount + 1;
- dslb.stageFlags = VK_SHADER_STAGE_FRAGMENT_BIT;
- dslb_vec.push_back(dslb);
-
- ds_layout_ci.bindingCount = dslb_vec.size();
- ds_layout_ci.pBindings = dslb_vec.data();
- err = vkCreateDescriptorSetLayout(m_device->device(), &ds_layout_ci, NULL, &ds_layout);
- ASSERT_VK_SUCCESS(err);
-
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkPipelineLayoutCreateInfo-pSetLayouts-01683");
- if (dslb.descriptorCount > max_storage_images) {
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
- "VUID-VkPipelineLayoutCreateInfo-pSetLayouts-00291"); // expect max-per-stage too
- }
- err = vkCreatePipelineLayout(m_device->device(), &pipeline_layout_ci, NULL, &pipeline_layout);
- m_errorMonitor->VerifyFound();
- vkDestroyPipelineLayout(m_device->device(), pipeline_layout, NULL); // Unnecessary but harmless if test passed
- pipeline_layout = VK_NULL_HANDLE;
- vkDestroyDescriptorSetLayout(m_device->device(), ds_layout, NULL);
-
- // VU 0fe00d28 - too many input attachment type descriptors overall
- dslb_vec.clear();
- dslb.binding = 0;
- dslb.descriptorType = VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT;
- dslb.descriptorCount = sum_input_attachments + 1;
- dslb.stageFlags = VK_SHADER_STAGE_FRAGMENT_BIT;
- dslb.pImmutableSamplers = NULL;
- dslb_vec.push_back(dslb);
-
- ds_layout_ci.bindingCount = dslb_vec.size();
- ds_layout_ci.pBindings = dslb_vec.data();
- err = vkCreateDescriptorSetLayout(m_device->device(), &ds_layout_ci, NULL, &ds_layout);
- ASSERT_VK_SUCCESS(err);
-
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkPipelineLayoutCreateInfo-pSetLayouts-01684");
- if (dslb.descriptorCount > max_input_attachments) {
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
- "VUID-VkPipelineLayoutCreateInfo-pSetLayouts-01676"); // expect max-per-stage too
- }
- err = vkCreatePipelineLayout(m_device->device(), &pipeline_layout_ci, NULL, &pipeline_layout);
- m_errorMonitor->VerifyFound();
- vkDestroyPipelineLayout(m_device->device(), pipeline_layout, NULL); // Unnecessary but harmless if test passed
- pipeline_layout = VK_NULL_HANDLE;
- vkDestroyDescriptorSetLayout(m_device->device(), ds_layout, NULL);
-}
-
-TEST_F(VkLayerTest, InvalidCmdBufferPipelineDestroyed) {
- TEST_DESCRIPTION("Attempt to draw with a command buffer that is invalid due to a pipeline dependency being destroyed.");
- ASSERT_NO_FATAL_FAILURE(Init());
- ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
-
- {
- // Use helper to create graphics pipeline
- CreatePipelineHelper helper(*this);
- helper.InitInfo();
- helper.InitState();
- helper.CreateGraphicsPipeline();
-
- // Bind helper pipeline to command buffer
- m_commandBuffer->begin();
- vkCmdBindPipeline(m_commandBuffer->handle(), VK_PIPELINE_BIND_POINT_GRAPHICS, helper.pipeline_);
- m_commandBuffer->end();
-
- // pipeline will be destroyed when helper goes out of scope
- }
-
- // Cause error by submitting command buffer that references destroyed pipeline
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
- "UNASSIGNED-CoreValidation-DrawState-InvalidCommandBuffer-VkPipeline");
- m_commandBuffer->QueueCommandBuffer(false);
- m_errorMonitor->VerifyFound();
-}
-
-TEST_F(VkLayerTest, InvalidPipeline) {
- uint64_t fake_pipeline_handle = 0xbaad6001;
- VkPipeline bad_pipeline = reinterpret_cast<VkPipeline &>(fake_pipeline_handle);
-
- // Enable VK_KHR_draw_indirect_count for KHR variants
- ASSERT_NO_FATAL_FAILURE(InitFramework(myDbgFunc, m_errorMonitor));
- if (DeviceExtensionSupported(gpu(), nullptr, VK_KHR_DRAW_INDIRECT_COUNT_EXTENSION_NAME)) {
- m_device_extension_names.push_back(VK_KHR_DRAW_INDIRECT_COUNT_EXTENSION_NAME);
- }
- ASSERT_NO_FATAL_FAILURE(InitState());
- bool has_khr_indirect = DeviceExtensionEnabled(VK_KHR_DRAW_INDIRECT_COUNT_EXTENSION_NAME);
- ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
-
- // Attempt to bind an invalid Pipeline to a valid Command Buffer
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdBindPipeline-pipeline-parameter");
- m_commandBuffer->begin();
- vkCmdBindPipeline(m_commandBuffer->handle(), VK_PIPELINE_BIND_POINT_GRAPHICS, bad_pipeline);
- m_errorMonitor->VerifyFound();
-
- // Try each of the 6 flavors of Draw()
- m_commandBuffer->BeginRenderPass(m_renderPassBeginInfo); // Draw*() calls must be submitted within a renderpass
-
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdDraw-None-02700");
- m_commandBuffer->Draw(1, 0, 0, 0);
- m_errorMonitor->VerifyFound();
-
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdDrawIndexed-None-02700");
- m_commandBuffer->DrawIndexed(1, 1, 0, 0, 0);
- m_errorMonitor->VerifyFound();
-
- VkBufferObj buffer;
- VkBufferCreateInfo ci = {};
- ci.sType = VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO;
- ci.usage = VK_BUFFER_USAGE_INDIRECT_BUFFER_BIT;
- ci.size = 1024;
- buffer.init(*m_device, ci);
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdDrawIndirect-None-02700");
- vkCmdDrawIndirect(m_commandBuffer->handle(), buffer.handle(), 0, 1, 0);
- m_errorMonitor->VerifyFound();
-
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdDrawIndexedIndirect-None-02700");
- vkCmdDrawIndexedIndirect(m_commandBuffer->handle(), buffer.handle(), 0, 1, 0);
- m_errorMonitor->VerifyFound();
-
- if (has_khr_indirect) {
- auto fpCmdDrawIndirectCountKHR =
- (PFN_vkCmdDrawIndirectCountKHR)vkGetDeviceProcAddr(m_device->device(), "vkCmdDrawIndirectCountKHR");
- ASSERT_NE(fpCmdDrawIndirectCountKHR, nullptr);
-
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdDrawIndirectCountKHR-None-02700");
- // stride must be a multiple of 4 and must be greater than or equal to sizeof(VkDrawIndirectCommand)
- fpCmdDrawIndirectCountKHR(m_commandBuffer->handle(), buffer.handle(), 0, buffer.handle(), 512, 1, 512);
- m_errorMonitor->VerifyFound();
-
- auto fpCmdDrawIndexedIndirectCountKHR =
- (PFN_vkCmdDrawIndexedIndirectCountKHR)vkGetDeviceProcAddr(m_device->device(), "vkCmdDrawIndexedIndirectCountKHR");
- ASSERT_NE(fpCmdDrawIndexedIndirectCountKHR, nullptr);
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdDrawIndexedIndirectCountKHR-None-02700");
- // stride must be a multiple of 4 and must be greater than or equal to sizeof(VkDrawIndexedIndirectCommand)
- fpCmdDrawIndexedIndirectCountKHR(m_commandBuffer->handle(), buffer.handle(), 0, buffer.handle(), 512, 1, 512);
- m_errorMonitor->VerifyFound();
- }
-
- // Also try the Dispatch variants
- vkCmdEndRenderPass(m_commandBuffer->handle()); // Compute submissions must be outside a renderpass
-
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdDispatch-None-02700");
- vkCmdDispatch(m_commandBuffer->handle(), 0, 0, 0);
- m_errorMonitor->VerifyFound();
-
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdDispatchIndirect-None-02700");
- vkCmdDispatchIndirect(m_commandBuffer->handle(), buffer.handle(), 0);
- m_errorMonitor->VerifyFound();
-}
-
-TEST_F(VkLayerTest, CmdDispatchExceedLimits) {
- TEST_DESCRIPTION("Compute dispatch with dimensions that exceed device limits");
-
- // Enable KHX device group extensions, if available
- if (InstanceExtensionSupported(VK_KHR_DEVICE_GROUP_CREATION_EXTENSION_NAME)) {
- m_instance_extension_names.push_back(VK_KHR_DEVICE_GROUP_CREATION_EXTENSION_NAME);
- }
- ASSERT_NO_FATAL_FAILURE(InitFramework(myDbgFunc, m_errorMonitor));
- bool khx_dg_ext_available = false;
- if (DeviceExtensionSupported(gpu(), nullptr, VK_KHR_DEVICE_GROUP_EXTENSION_NAME)) {
- m_device_extension_names.push_back(VK_KHR_DEVICE_GROUP_EXTENSION_NAME);
- khx_dg_ext_available = true;
- }
- ASSERT_NO_FATAL_FAILURE(InitState());
-
- uint32_t x_count_limit = m_device->props.limits.maxComputeWorkGroupCount[0];
- uint32_t y_count_limit = m_device->props.limits.maxComputeWorkGroupCount[1];
- uint32_t z_count_limit = m_device->props.limits.maxComputeWorkGroupCount[2];
- if (std::max({x_count_limit, y_count_limit, z_count_limit}) == UINT32_MAX) {
- printf("%s device maxComputeWorkGroupCount limit reports UINT32_MAX, test not possible, skipping.\n", kSkipPrefix);
- return;
- }
-
- uint32_t x_size_limit = m_device->props.limits.maxComputeWorkGroupSize[0];
- uint32_t y_size_limit = m_device->props.limits.maxComputeWorkGroupSize[1];
- uint32_t z_size_limit = m_device->props.limits.maxComputeWorkGroupSize[2];
-
- std::string spv_source = R"(
- OpCapability Shader
- OpMemoryModel Logical GLSL450
- OpEntryPoint GLCompute %main "main"
- OpExecutionMode %main LocalSize )";
- spv_source.append(std::to_string(x_size_limit + 1) + " " + std::to_string(y_size_limit + 1) + " " +
- std::to_string(z_size_limit + 1));
- spv_source.append(R"(
- %void = OpTypeVoid
- %3 = OpTypeFunction %void
- %main = OpFunction %void None %3
- %5 = OpLabel
- OpReturn
- OpFunctionEnd)");
-
- CreateComputePipelineHelper pipe(*this);
- pipe.InitInfo();
- pipe.cs_.reset(new VkShaderObj(m_device, spv_source, VK_SHADER_STAGE_COMPUTE_BIT, this));
- pipe.InitState();
-
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "exceeds device limit maxComputeWorkGroupSize[0]");
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "exceeds device limit maxComputeWorkGroupSize[1]");
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "exceeds device limit maxComputeWorkGroupSize[2]");
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "features-limits-maxComputeWorkGroupInvocations");
- pipe.CreateComputePipeline();
- m_errorMonitor->VerifyFound();
-
- // Create a minimal compute pipeline
- x_size_limit = (x_size_limit > 1024) ? 1024 : x_size_limit;
- y_size_limit = (y_size_limit > 1024) ? 1024 : y_size_limit;
- z_size_limit = (z_size_limit > 64) ? 64 : z_size_limit;
-
- uint32_t invocations_limit = m_device->props.limits.maxComputeWorkGroupInvocations;
- x_size_limit = (x_size_limit > invocations_limit) ? invocations_limit : x_size_limit;
- invocations_limit /= x_size_limit;
- y_size_limit = (y_size_limit > invocations_limit) ? invocations_limit : y_size_limit;
- invocations_limit /= y_size_limit;
- z_size_limit = (z_size_limit > invocations_limit) ? invocations_limit : z_size_limit;
-
- char cs_text[128] = "";
- sprintf(cs_text, "#version 450\nlayout(local_size_x = %d, local_size_y = %d, local_size_z = %d) in;\nvoid main() {}\n",
- x_size_limit, y_size_limit, z_size_limit);
-
- VkShaderObj cs_obj(m_device, cs_text, VK_SHADER_STAGE_COMPUTE_BIT, this);
- pipe.cs_.reset(new VkShaderObj(m_device, cs_text, VK_SHADER_STAGE_COMPUTE_BIT, this));
- pipe.CreateComputePipeline();
-
- // Bind pipeline to command buffer
- m_commandBuffer->begin();
- vkCmdBindPipeline(m_commandBuffer->handle(), VK_PIPELINE_BIND_POINT_COMPUTE, pipe.pipeline_);
-
- // Dispatch counts that exceed device limits
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdDispatch-groupCountX-00386");
- vkCmdDispatch(m_commandBuffer->handle(), x_count_limit + 1, y_count_limit, z_count_limit);
- m_errorMonitor->VerifyFound();
-
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdDispatch-groupCountY-00387");
- vkCmdDispatch(m_commandBuffer->handle(), x_count_limit, y_count_limit + 1, z_count_limit);
- m_errorMonitor->VerifyFound();
-
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdDispatch-groupCountZ-00388");
- vkCmdDispatch(m_commandBuffer->handle(), x_count_limit, y_count_limit, z_count_limit + 1);
- m_errorMonitor->VerifyFound();
-
- if (khx_dg_ext_available) {
- PFN_vkCmdDispatchBaseKHR fp_vkCmdDispatchBaseKHR =
- (PFN_vkCmdDispatchBaseKHR)vkGetInstanceProcAddr(instance(), "vkCmdDispatchBaseKHR");
-
- // Base equals or exceeds limit
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdDispatchBase-baseGroupX-00421");
- fp_vkCmdDispatchBaseKHR(m_commandBuffer->handle(), x_count_limit, y_count_limit - 1, z_count_limit - 1, 0, 0, 0);
- m_errorMonitor->VerifyFound();
-
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdDispatchBase-baseGroupX-00422");
- fp_vkCmdDispatchBaseKHR(m_commandBuffer->handle(), x_count_limit - 1, y_count_limit, z_count_limit - 1, 0, 0, 0);
- m_errorMonitor->VerifyFound();
-
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdDispatchBase-baseGroupZ-00423");
- fp_vkCmdDispatchBaseKHR(m_commandBuffer->handle(), x_count_limit - 1, y_count_limit - 1, z_count_limit, 0, 0, 0);
- m_errorMonitor->VerifyFound();
-
- // (Base + count) exceeds limit
- uint32_t x_base = x_count_limit / 2;
- uint32_t y_base = y_count_limit / 2;
- uint32_t z_base = z_count_limit / 2;
- x_count_limit -= x_base;
- y_count_limit -= y_base;
- z_count_limit -= z_base;
-
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdDispatchBase-groupCountX-00424");
- fp_vkCmdDispatchBaseKHR(m_commandBuffer->handle(), x_base, y_base, z_base, x_count_limit + 1, y_count_limit, z_count_limit);
- m_errorMonitor->VerifyFound();
-
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdDispatchBase-groupCountY-00425");
- fp_vkCmdDispatchBaseKHR(m_commandBuffer->handle(), x_base, y_base, z_base, x_count_limit, y_count_limit + 1, z_count_limit);
- m_errorMonitor->VerifyFound();
-
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdDispatchBase-groupCountZ-00426");
- fp_vkCmdDispatchBaseKHR(m_commandBuffer->handle(), x_base, y_base, z_base, x_count_limit, y_count_limit, z_count_limit + 1);
- m_errorMonitor->VerifyFound();
- } else {
- printf("%s KHX_DEVICE_GROUP_* extensions not supported, skipping CmdDispatchBaseKHR() tests.\n", kSkipPrefix);
- }
-}
-
-TEST_F(VkLayerTest, InvalidPipelineCreateState) {
- // Attempt to Create Gfx Pipeline w/o a VS
-
- ASSERT_NO_FATAL_FAILURE(Init());
- ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
- ASSERT_NO_FATAL_FAILURE(InitViewport());
-
- VkShaderObj vs(m_device, bindStateVertShaderText, VK_SHADER_STAGE_VERTEX_BIT, this);
- VkShaderObj fs(m_device, bindStateFragShaderText, VK_SHADER_STAGE_FRAGMENT_BIT, this);
-
- VkPipelineShaderStageCreateInfo shaderStage = fs.GetStageCreateInfo(); // should be: vs.GetStageCreateInfo();
-
- auto set_info = [&](CreatePipelineHelper &helper) { helper.shader_stages_ = {shaderStage}; };
- CreatePipelineHelper::OneshotTest(*this, set_info, VK_DEBUG_REPORT_ERROR_BIT_EXT,
- "Invalid Pipeline CreateInfo State: Vertex Shader required");
-
- // Finally, check the string validation for the shader stage pName variable. Correct the shader stage data, and bork the
- // string before calling again
- shaderStage = vs.GetStageCreateInfo();
- const uint8_t cont_char = 0xf8;
- char bad_string[] = {static_cast<char>(cont_char), static_cast<char>(cont_char), static_cast<char>(cont_char),
- static_cast<char>(cont_char)};
- shaderStage.pName = bad_string;
-
- CreatePipelineHelper::OneshotTest(*this, set_info, VK_DEBUG_REPORT_ERROR_BIT_EXT,
- "contains invalid characters or is badly formed");
-}
-
-TEST_F(VkLayerTest, InvalidPipelineSampleRateFeatureDisable) {
- // Enable sample shading in pipeline when the feature is disabled.
- // Disable sampleRateShading here
- VkPhysicalDeviceFeatures device_features = {};
- device_features.sampleRateShading = VK_FALSE;
-
- ASSERT_NO_FATAL_FAILURE(Init(&device_features));
- ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
-
- // Cause the error by enabling sample shading...
- auto set_shading_enable = [](CreatePipelineHelper &helper) { helper.pipe_ms_state_ci_.sampleShadingEnable = VK_TRUE; };
- CreatePipelineHelper::OneshotTest(*this, set_shading_enable, VK_DEBUG_REPORT_ERROR_BIT_EXT,
- "VUID-VkPipelineMultisampleStateCreateInfo-sampleShadingEnable-00784");
-}
-
-TEST_F(VkLayerTest, InvalidPipelineSampleRateFeatureEnable) {
- // Enable sample shading in pipeline when the feature is disabled.
- ASSERT_NO_FATAL_FAILURE(InitFramework(myDbgFunc, m_errorMonitor));
-
- // Require sampleRateShading here
- VkPhysicalDeviceFeatures device_features = {};
- ASSERT_NO_FATAL_FAILURE(GetPhysicalDeviceFeatures(&device_features));
- if (device_features.sampleRateShading == VK_FALSE) {
- printf("%s SampleRateShading feature is disabled -- skipping related checks.\n", kSkipPrefix);
- return;
- }
-
- ASSERT_NO_FATAL_FAILURE(InitState(&device_features));
- ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
-
- auto range_test = [this](float value, bool positive_test) {
- auto info_override = [value](CreatePipelineHelper &helper) {
- helper.pipe_ms_state_ci_.sampleShadingEnable = VK_TRUE;
- helper.pipe_ms_state_ci_.minSampleShading = value;
- };
- CreatePipelineHelper::OneshotTest(*this, info_override, VK_DEBUG_REPORT_ERROR_BIT_EXT,
- "VUID-VkPipelineMultisampleStateCreateInfo-minSampleShading-00786", positive_test);
- };
-
- range_test(NearestSmaller(0.0F), false);
- range_test(NearestGreater(1.0F), false);
- range_test(0.0F, /* positive_test= */ true);
- range_test(1.0F, /* positive_test= */ true);
-}
-
-TEST_F(VkLayerTest, InvalidPipelineSamplePNext) {
- // Enable sample shading in pipeline when the feature is disabled.
- // Check for VK_KHR_get_physical_device_properties2
- if (InstanceExtensionSupported(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME)) {
- m_instance_extension_names.push_back(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME);
- }
- ASSERT_NO_FATAL_FAILURE(InitFramework(myDbgFunc, m_errorMonitor));
-
- // Set up the extension structs
- auto sampleLocations = chain_util::Init<VkPipelineSampleLocationsStateCreateInfoEXT>();
- sampleLocations.sampleLocationsInfo.sType = VK_STRUCTURE_TYPE_SAMPLE_LOCATIONS_INFO_EXT;
- auto coverageToColor = chain_util::Init<VkPipelineCoverageToColorStateCreateInfoNV>();
- auto coverageModulation = chain_util::Init<VkPipelineCoverageModulationStateCreateInfoNV>();
- auto discriminatrix = [this](const char *name) { return DeviceExtensionSupported(gpu(), nullptr, name); };
- chain_util::ExtensionChain chain(discriminatrix, &m_device_extension_names);
- chain.Add(VK_EXT_SAMPLE_LOCATIONS_EXTENSION_NAME, sampleLocations);
- chain.Add(VK_NV_FRAGMENT_COVERAGE_TO_COLOR_EXTENSION_NAME, coverageToColor);
- chain.Add(VK_NV_FRAMEBUFFER_MIXED_SAMPLES_EXTENSION_NAME, coverageModulation);
- const void *extension_head = chain.Head();
-
- ASSERT_NO_FATAL_FAILURE(InitState());
- ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
-
- if (extension_head) {
- auto good_chain = [extension_head](CreatePipelineHelper &helper) { helper.pipe_ms_state_ci_.pNext = extension_head; };
- CreatePipelineHelper::OneshotTest(*this, good_chain, (VK_DEBUG_REPORT_ERROR_BIT_EXT | VK_DEBUG_REPORT_WARNING_BIT_EXT),
- "No error", true);
- } else {
- printf("%s Required extension not present -- skipping positive checks.\n", kSkipPrefix);
- }
-
- auto instance_ci = chain_util::Init<VkInstanceCreateInfo>();
- auto bad_chain = [&instance_ci](CreatePipelineHelper &helper) { helper.pipe_ms_state_ci_.pNext = &instance_ci; };
- CreatePipelineHelper::OneshotTest(*this, bad_chain, VK_DEBUG_REPORT_WARNING_BIT_EXT,
- "VUID-VkPipelineMultisampleStateCreateInfo-pNext-pNext");
-}
-
-TEST_F(VkLayerTest, VertexAttributeDivisorExtension) {
- TEST_DESCRIPTION("Test VUIDs added with VK_EXT_vertex_attribute_divisor extension.");
-
- bool inst_ext = InstanceExtensionSupported(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME);
- if (inst_ext) {
- m_instance_extension_names.push_back(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME);
- ASSERT_NO_FATAL_FAILURE(InitFramework(myDbgFunc, m_errorMonitor));
- }
- if (inst_ext && DeviceExtensionSupported(gpu(), nullptr, VK_EXT_VERTEX_ATTRIBUTE_DIVISOR_EXTENSION_NAME)) {
- m_device_extension_names.push_back(VK_EXT_VERTEX_ATTRIBUTE_DIVISOR_EXTENSION_NAME);
- } else {
- printf("%s %s Extension not supported, skipping tests\n", kSkipPrefix, VK_EXT_VERTEX_ATTRIBUTE_DIVISOR_EXTENSION_NAME);
- return;
- }
-
- VkPhysicalDeviceVertexAttributeDivisorFeaturesEXT vadf = {};
- vadf.sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VERTEX_ATTRIBUTE_DIVISOR_FEATURES_EXT;
- vadf.vertexAttributeInstanceRateDivisor = VK_TRUE;
- vadf.vertexAttributeInstanceRateZeroDivisor = VK_TRUE;
-
- VkPhysicalDeviceFeatures2 pd_features2 = {};
- pd_features2.sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FEATURES_2;
- pd_features2.pNext = &vadf;
-
- ASSERT_NO_FATAL_FAILURE(InitState(nullptr, &pd_features2));
- ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
-
- const VkPhysicalDeviceLimits &dev_limits = m_device->props.limits;
- VkPhysicalDeviceVertexAttributeDivisorPropertiesEXT pdvad_props = {};
- pdvad_props.sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VERTEX_ATTRIBUTE_DIVISOR_PROPERTIES_EXT;
- VkPhysicalDeviceProperties2 pd_props2 = {};
- pd_props2.sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PROPERTIES_2;
- pd_props2.pNext = &pdvad_props;
- vkGetPhysicalDeviceProperties2(gpu(), &pd_props2);
-
- VkVertexInputBindingDivisorDescriptionEXT vibdd = {};
- VkPipelineVertexInputDivisorStateCreateInfoEXT pvids_ci = {};
- pvids_ci.sType = VK_STRUCTURE_TYPE_PIPELINE_VERTEX_INPUT_DIVISOR_STATE_CREATE_INFO_EXT;
- pvids_ci.vertexBindingDivisorCount = 1;
- pvids_ci.pVertexBindingDivisors = &vibdd;
- VkVertexInputBindingDescription vibd = {};
- vibd.stride = 12;
- vibd.inputRate = VK_VERTEX_INPUT_RATE_VERTEX;
-
- if (pdvad_props.maxVertexAttribDivisor < pvids_ci.vertexBindingDivisorCount) {
- printf("%sThis device does not support %d vertexBindingDivisors, skipping tests\n", kSkipPrefix,
- pvids_ci.vertexBindingDivisorCount);
- return;
- }
-
- using std::vector;
- struct TestCase {
- uint32_t div_binding;
- uint32_t div_divisor;
- uint32_t desc_binding;
- VkVertexInputRate desc_rate;
- vector<std::string> vuids;
- };
-
- // clang-format off
- vector<TestCase> test_cases = {
- { 0,
- 1,
- 0,
- VK_VERTEX_INPUT_RATE_VERTEX,
- {"VUID-VkVertexInputBindingDivisorDescriptionEXT-inputRate-01871"}
- },
- { dev_limits.maxVertexInputBindings + 1,
- 1,
- 0,
- VK_VERTEX_INPUT_RATE_INSTANCE,
- {"VUID-VkVertexInputBindingDivisorDescriptionEXT-binding-01869",
- "VUID-VkVertexInputBindingDivisorDescriptionEXT-inputRate-01871"}
- }
- };
-
- if (UINT32_MAX != pdvad_props.maxVertexAttribDivisor) { // Can't test overflow if maxVAD is UINT32_MAX
- test_cases.push_back(
- { 0,
- pdvad_props.maxVertexAttribDivisor + 1,
- 0,
- VK_VERTEX_INPUT_RATE_INSTANCE,
- {"VUID-VkVertexInputBindingDivisorDescriptionEXT-divisor-01870"}
- } );
- }
- // clang-format on
-
- for (const auto &test_case : test_cases) {
- const auto bad_divisor_state = [&test_case, &vibdd, &pvids_ci, &vibd](CreatePipelineHelper &helper) {
- vibdd.binding = test_case.div_binding;
- vibdd.divisor = test_case.div_divisor;
- vibd.binding = test_case.desc_binding;
- vibd.inputRate = test_case.desc_rate;
- helper.vi_ci_.pNext = &pvids_ci;
- helper.vi_ci_.vertexBindingDescriptionCount = 1;
- helper.vi_ci_.pVertexBindingDescriptions = &vibd;
- };
- CreatePipelineHelper::OneshotTest(*this, bad_divisor_state, VK_DEBUG_REPORT_ERROR_BIT_EXT, test_case.vuids);
- }
-}
-
-TEST_F(VkLayerTest, VertexAttributeDivisorDisabled) {
- TEST_DESCRIPTION("Test instance divisor feature disabled for VK_EXT_vertex_attribute_divisor extension.");
-
- bool inst_ext = InstanceExtensionSupported(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME);
- if (inst_ext) {
- m_instance_extension_names.push_back(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME);
- ASSERT_NO_FATAL_FAILURE(InitFramework(myDbgFunc, m_errorMonitor));
- }
- if (inst_ext && DeviceExtensionSupported(gpu(), nullptr, VK_EXT_VERTEX_ATTRIBUTE_DIVISOR_EXTENSION_NAME)) {
- m_device_extension_names.push_back(VK_EXT_VERTEX_ATTRIBUTE_DIVISOR_EXTENSION_NAME);
- } else {
- printf("%s %s Extension not supported, skipping tests\n", kSkipPrefix, VK_EXT_VERTEX_ATTRIBUTE_DIVISOR_EXTENSION_NAME);
- return;
- }
-
- VkPhysicalDeviceVertexAttributeDivisorFeaturesEXT vadf = {};
- vadf.sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VERTEX_ATTRIBUTE_DIVISOR_FEATURES_EXT;
- vadf.vertexAttributeInstanceRateDivisor = VK_FALSE;
- vadf.vertexAttributeInstanceRateZeroDivisor = VK_FALSE;
- VkPhysicalDeviceFeatures2 pd_features2 = {};
- pd_features2.sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FEATURES_2;
- pd_features2.pNext = &vadf;
-
- ASSERT_NO_FATAL_FAILURE(InitState(nullptr, &pd_features2));
- ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
-
- VkPhysicalDeviceVertexAttributeDivisorPropertiesEXT pdvad_props = {};
- pdvad_props.sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VERTEX_ATTRIBUTE_DIVISOR_PROPERTIES_EXT;
- VkPhysicalDeviceProperties2 pd_props2 = {};
- pd_props2.sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PROPERTIES_2;
- pd_props2.pNext = &pdvad_props;
- vkGetPhysicalDeviceProperties2(gpu(), &pd_props2);
-
- VkVertexInputBindingDivisorDescriptionEXT vibdd = {};
- vibdd.binding = 0;
- vibdd.divisor = 2;
- VkPipelineVertexInputDivisorStateCreateInfoEXT pvids_ci = {};
- pvids_ci.sType = VK_STRUCTURE_TYPE_PIPELINE_VERTEX_INPUT_DIVISOR_STATE_CREATE_INFO_EXT;
- pvids_ci.vertexBindingDivisorCount = 1;
- pvids_ci.pVertexBindingDivisors = &vibdd;
- VkVertexInputBindingDescription vibd = {};
- vibd.binding = vibdd.binding;
- vibd.stride = 12;
- vibd.inputRate = VK_VERTEX_INPUT_RATE_INSTANCE;
-
- if (pdvad_props.maxVertexAttribDivisor < pvids_ci.vertexBindingDivisorCount) {
- printf("%sThis device does not support %d vertexBindingDivisors, skipping tests\n", kSkipPrefix,
- pvids_ci.vertexBindingDivisorCount);
- return;
- }
-
- const auto instance_rate = [&pvids_ci, &vibd](CreatePipelineHelper &helper) {
- helper.vi_ci_.pNext = &pvids_ci;
- helper.vi_ci_.vertexBindingDescriptionCount = 1;
- helper.vi_ci_.pVertexBindingDescriptions = &vibd;
- };
- CreatePipelineHelper::OneshotTest(*this, instance_rate, VK_DEBUG_REPORT_ERROR_BIT_EXT,
- "VUID-VkVertexInputBindingDivisorDescriptionEXT-vertexAttributeInstanceRateDivisor-02229");
-}
-
-TEST_F(VkLayerTest, VertexAttributeDivisorInstanceRateZero) {
- TEST_DESCRIPTION("Test instanceRateZero feature of VK_EXT_vertex_attribute_divisor extension.");
-
- bool inst_ext = InstanceExtensionSupported(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME);
- if (inst_ext) {
- m_instance_extension_names.push_back(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME);
- ASSERT_NO_FATAL_FAILURE(InitFramework(myDbgFunc, m_errorMonitor));
- }
- if (inst_ext && DeviceExtensionSupported(gpu(), nullptr, VK_EXT_VERTEX_ATTRIBUTE_DIVISOR_EXTENSION_NAME)) {
- m_device_extension_names.push_back(VK_EXT_VERTEX_ATTRIBUTE_DIVISOR_EXTENSION_NAME);
- } else {
- printf("%s %s Extension not supported, skipping tests\n", kSkipPrefix, VK_EXT_VERTEX_ATTRIBUTE_DIVISOR_EXTENSION_NAME);
- return;
- }
-
- VkPhysicalDeviceVertexAttributeDivisorFeaturesEXT vadf = {};
- vadf.sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VERTEX_ATTRIBUTE_DIVISOR_FEATURES_EXT;
- vadf.vertexAttributeInstanceRateDivisor = VK_TRUE;
- vadf.vertexAttributeInstanceRateZeroDivisor = VK_FALSE;
- VkPhysicalDeviceFeatures2 pd_features2 = {};
- pd_features2.sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FEATURES_2;
- pd_features2.pNext = &vadf;
-
- ASSERT_NO_FATAL_FAILURE(InitState(nullptr, &pd_features2));
- ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
-
- VkVertexInputBindingDivisorDescriptionEXT vibdd = {};
- vibdd.binding = 0;
- vibdd.divisor = 0;
- VkPipelineVertexInputDivisorStateCreateInfoEXT pvids_ci = {};
- pvids_ci.sType = VK_STRUCTURE_TYPE_PIPELINE_VERTEX_INPUT_DIVISOR_STATE_CREATE_INFO_EXT;
- pvids_ci.vertexBindingDivisorCount = 1;
- pvids_ci.pVertexBindingDivisors = &vibdd;
- VkVertexInputBindingDescription vibd = {};
- vibd.binding = vibdd.binding;
- vibd.stride = 12;
- vibd.inputRate = VK_VERTEX_INPUT_RATE_INSTANCE;
-
- const auto instance_rate = [&pvids_ci, &vibd](CreatePipelineHelper &helper) {
- helper.vi_ci_.pNext = &pvids_ci;
- helper.vi_ci_.vertexBindingDescriptionCount = 1;
- helper.vi_ci_.pVertexBindingDescriptions = &vibd;
- };
- CreatePipelineHelper::OneshotTest(
- *this, instance_rate, VK_DEBUG_REPORT_ERROR_BIT_EXT,
- "VUID-VkVertexInputBindingDivisorDescriptionEXT-vertexAttributeInstanceRateZeroDivisor-02228");
-}
-
-/*// TODO : This test should be good, but needs Tess support in compiler to run
-TEST_F(VkLayerTest, InvalidPatchControlPoints)
-{
- // Attempt to Create Gfx Pipeline w/o a VS
- VkResult err;
-
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
- "Invalid Pipeline CreateInfo State: VK_PRIMITIVE_TOPOLOGY_PATCH
-primitive ");
-
- ASSERT_NO_FATAL_FAILURE(Init());
- ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
-
- VkDescriptorPoolSize ds_type_count = {};
- ds_type_count.type = VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER;
- ds_type_count.descriptorCount = 1;
-
- VkDescriptorPoolCreateInfo ds_pool_ci = {};
- ds_pool_ci.sType = VK_STRUCTURE_TYPE_DESCRIPTOR_POOL_CREATE_INFO;
- ds_pool_ci.pNext = NULL;
- ds_pool_ci.poolSizeCount = 1;
- ds_pool_ci.pPoolSizes = &ds_type_count;
-
- VkDescriptorPool ds_pool;
- err = vkCreateDescriptorPool(m_device->device(),
-VK_DESCRIPTOR_POOL_USAGE_NON_FREE, 1, &ds_pool_ci, NULL, &ds_pool);
- ASSERT_VK_SUCCESS(err);
-
- VkDescriptorSetLayoutBinding dsl_binding = {};
- dsl_binding.binding = 0;
- dsl_binding.descriptorType = VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER;
- dsl_binding.descriptorCount = 1;
- dsl_binding.stageFlags = VK_SHADER_STAGE_ALL;
- dsl_binding.pImmutableSamplers = NULL;
-
- VkDescriptorSetLayoutCreateInfo ds_layout_ci = {};
- ds_layout_ci.sType =
-VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_CREATE_INFO;
- ds_layout_ci.pNext = NULL;
- ds_layout_ci.bindingCount = 1;
- ds_layout_ci.pBindings = &dsl_binding;
-
- VkDescriptorSetLayout ds_layout;
- err = vkCreateDescriptorSetLayout(m_device->device(), &ds_layout_ci, NULL,
-&ds_layout);
- ASSERT_VK_SUCCESS(err);
-
- VkDescriptorSet descriptorSet;
- err = vkAllocateDescriptorSets(m_device->device(), ds_pool,
-VK_DESCRIPTOR_SET_USAGE_NON_FREE, 1, &ds_layout, &descriptorSet);
- ASSERT_VK_SUCCESS(err);
-
- VkPipelineLayoutCreateInfo pipeline_layout_ci = {};
- pipeline_layout_ci.sType =
-VK_STRUCTURE_TYPE_PIPELINE_LAYOUT_CREATE_INFO;
- pipeline_layout_ci.pNext = NULL;
- pipeline_layout_ci.setLayoutCount = 1;
- pipeline_layout_ci.pSetLayouts = &ds_layout;
-
- VkPipelineLayout pipeline_layout;
- err = vkCreatePipelineLayout(m_device->device(), &pipeline_layout_ci, NULL,
-&pipeline_layout);
- ASSERT_VK_SUCCESS(err);
-
- VkPipelineShaderStageCreateInfo shaderStages[3];
- memset(&shaderStages, 0, 3 * sizeof(VkPipelineShaderStageCreateInfo));
-
- VkShaderObj vs(m_device,bindStateVertShaderText,VK_SHADER_STAGE_VERTEX_BIT,
-this);
- // Just using VS txt for Tess shaders as we don't care about functionality
- VkShaderObj
-tc(m_device,bindStateVertShaderText,VK_SHADER_STAGE_TESSELLATION_CONTROL_BIT,
-this);
- VkShaderObj
-te(m_device,bindStateVertShaderText,VK_SHADER_STAGE_TESSELLATION_EVALUATION_BIT,
-this);
-
- shaderStages[0].sType =
-VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_CREATE_INFO;
- shaderStages[0].stage = VK_SHADER_STAGE_VERTEX_BIT;
- shaderStages[0].shader = vs.handle();
- shaderStages[1].sType =
-VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_CREATE_INFO;
- shaderStages[1].stage = VK_SHADER_STAGE_TESSELLATION_CONTROL_BIT;
- shaderStages[1].shader = tc.handle();
- shaderStages[2].sType =
-VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_CREATE_INFO;
- shaderStages[2].stage = VK_SHADER_STAGE_TESSELLATION_EVALUATION_BIT;
- shaderStages[2].shader = te.handle();
-
- VkPipelineInputAssemblyStateCreateInfo iaCI = {};
- iaCI.sType =
-VK_STRUCTURE_TYPE_PIPELINE_INPUT_ASSEMBLY_STATE_CREATE_INFO;
- iaCI.topology = VK_PRIMITIVE_TOPOLOGY_PATCH_LIST;
-
- VkPipelineTessellationStateCreateInfo tsCI = {};
- tsCI.sType = VK_STRUCTURE_TYPE_PIPELINE_TESSELLATION_STATE_CREATE_INFO;
- tsCI.patchControlPoints = 0; // This will cause an error
-
- VkGraphicsPipelineCreateInfo gp_ci = {};
- gp_ci.sType = VK_STRUCTURE_TYPE_GRAPHICS_PIPELINE_CREATE_INFO;
- gp_ci.pNext = NULL;
- gp_ci.stageCount = 3;
- gp_ci.pStages = shaderStages;
- gp_ci.pVertexInputState = NULL;
- gp_ci.pInputAssemblyState = &iaCI;
- gp_ci.pTessellationState = &tsCI;
- gp_ci.pViewportState = NULL;
- gp_ci.pRasterizationState = NULL;
- gp_ci.pMultisampleState = NULL;
- gp_ci.pDepthStencilState = NULL;
- gp_ci.pColorBlendState = NULL;
- gp_ci.flags = VK_PIPELINE_CREATE_DISABLE_OPTIMIZATION_BIT;
- gp_ci.layout = pipeline_layout;
- gp_ci.renderPass = renderPass();
-
- VkPipelineCacheCreateInfo pc_ci = {};
- pc_ci.sType = VK_STRUCTURE_TYPE_PIPELINE_CACHE_CREATE_INFO;
- pc_ci.pNext = NULL;
- pc_ci.initialSize = 0;
- pc_ci.initialData = 0;
- pc_ci.maxSize = 0;
-
- VkPipeline pipeline;
- VkPipelineCache pipelineCache;
-
- err = vkCreatePipelineCache(m_device->device(), &pc_ci, NULL,
-&pipelineCache);
- ASSERT_VK_SUCCESS(err);
- err = vkCreateGraphicsPipelines(m_device->device(), pipelineCache, 1,
-&gp_ci, NULL, &pipeline);
-
- m_errorMonitor->VerifyFound();
-
- vkDestroyPipelineCache(m_device->device(), pipelineCache, NULL);
- vkDestroyPipelineLayout(m_device->device(), pipeline_layout, NULL);
- vkDestroyDescriptorSetLayout(m_device->device(), ds_layout, NULL);
- vkDestroyDescriptorPool(m_device->device(), ds_pool, NULL);
-}
-*/
-
-TEST_F(VkLayerTest, PSOViewportStateTests) {
- TEST_DESCRIPTION("Test VkPipelineViewportStateCreateInfo viewport and scissor count validation for non-multiViewport");
-
- VkPhysicalDeviceFeatures features{};
- ASSERT_NO_FATAL_FAILURE(Init(&features));
- ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
-
- const auto break_vp_state = [](CreatePipelineHelper &helper) {
- helper.rs_state_ci_.rasterizerDiscardEnable = VK_FALSE;
- helper.gp_ci_.pViewportState = nullptr;
- };
- CreatePipelineHelper::OneshotTest(*this, break_vp_state, VK_DEBUG_REPORT_ERROR_BIT_EXT,
- "VUID-VkGraphicsPipelineCreateInfo-rasterizerDiscardEnable-00750");
-
- VkViewport viewport = {0.0f, 0.0f, 64.0f, 64.0f, 0.0f, 1.0f};
- VkViewport viewports[] = {viewport, viewport};
- VkRect2D scissor = {{0, 0}, {64, 64}};
- VkRect2D scissors[] = {scissor, scissor};
-
- // test viewport and scissor arrays
- using std::vector;
- struct TestCase {
- uint32_t viewport_count;
- VkViewport *viewports;
- uint32_t scissor_count;
- VkRect2D *scissors;
-
- vector<std::string> vuids;
- };
-
- vector<TestCase> test_cases = {
- {0,
- viewports,
- 1,
- scissors,
- {"VUID-VkPipelineViewportStateCreateInfo-viewportCount-01216",
- "VUID-VkPipelineViewportStateCreateInfo-scissorCount-01220"}},
- {2,
- viewports,
- 1,
- scissors,
- {"VUID-VkPipelineViewportStateCreateInfo-viewportCount-01216",
- "VUID-VkPipelineViewportStateCreateInfo-scissorCount-01220"}},
- {1,
- viewports,
- 0,
- scissors,
- {"VUID-VkPipelineViewportStateCreateInfo-scissorCount-01217",
- "VUID-VkPipelineViewportStateCreateInfo-scissorCount-01220"}},
- {1,
- viewports,
- 2,
- scissors,
- {"VUID-VkPipelineViewportStateCreateInfo-scissorCount-01217",
- "VUID-VkPipelineViewportStateCreateInfo-scissorCount-01220"}},
- {0,
- viewports,
- 0,
- scissors,
- {"VUID-VkPipelineViewportStateCreateInfo-viewportCount-01216",
- "VUID-VkPipelineViewportStateCreateInfo-scissorCount-01217"}},
- {2,
- viewports,
- 2,
- scissors,
- {"VUID-VkPipelineViewportStateCreateInfo-viewportCount-01216",
- "VUID-VkPipelineViewportStateCreateInfo-scissorCount-01217"}},
- {0,
- viewports,
- 2,
- scissors,
- {"VUID-VkPipelineViewportStateCreateInfo-viewportCount-01216", "VUID-VkPipelineViewportStateCreateInfo-scissorCount-01217",
- "VUID-VkPipelineViewportStateCreateInfo-scissorCount-01220"}},
- {2,
- viewports,
- 0,
- scissors,
- {"VUID-VkPipelineViewportStateCreateInfo-viewportCount-01216", "VUID-VkPipelineViewportStateCreateInfo-scissorCount-01217",
- "VUID-VkPipelineViewportStateCreateInfo-scissorCount-01220"}},
- {1, nullptr, 1, scissors, {"VUID-VkGraphicsPipelineCreateInfo-pDynamicStates-00747"}},
- {1, viewports, 1, nullptr, {"VUID-VkGraphicsPipelineCreateInfo-pDynamicStates-00748"}},
- {1,
- nullptr,
- 1,
- nullptr,
- {"VUID-VkGraphicsPipelineCreateInfo-pDynamicStates-00747", "VUID-VkGraphicsPipelineCreateInfo-pDynamicStates-00748"}},
- {2,
- nullptr,
- 3,
- nullptr,
- {"VUID-VkPipelineViewportStateCreateInfo-viewportCount-01216", "VUID-VkPipelineViewportStateCreateInfo-scissorCount-01217",
- "VUID-VkPipelineViewportStateCreateInfo-scissorCount-01220", "VUID-VkGraphicsPipelineCreateInfo-pDynamicStates-00747",
- "VUID-VkGraphicsPipelineCreateInfo-pDynamicStates-00748"}},
- {0,
- nullptr,
- 0,
- nullptr,
- {"VUID-VkPipelineViewportStateCreateInfo-viewportCount-01216",
- "VUID-VkPipelineViewportStateCreateInfo-scissorCount-01217"}},
- };
-
- for (const auto &test_case : test_cases) {
- const auto break_vp = [&test_case](CreatePipelineHelper &helper) {
- helper.vp_state_ci_.viewportCount = test_case.viewport_count;
- helper.vp_state_ci_.pViewports = test_case.viewports;
- helper.vp_state_ci_.scissorCount = test_case.scissor_count;
- helper.vp_state_ci_.pScissors = test_case.scissors;
- };
- CreatePipelineHelper::OneshotTest(*this, break_vp, VK_DEBUG_REPORT_ERROR_BIT_EXT, test_case.vuids);
- }
-
- vector<TestCase> dyn_test_cases = {
- {0,
- viewports,
- 1,
- scissors,
- {"VUID-VkPipelineViewportStateCreateInfo-viewportCount-01216",
- "VUID-VkPipelineViewportStateCreateInfo-scissorCount-01220"}},
- {2,
- viewports,
- 1,
- scissors,
- {"VUID-VkPipelineViewportStateCreateInfo-viewportCount-01216",
- "VUID-VkPipelineViewportStateCreateInfo-scissorCount-01220"}},
- {1,
- viewports,
- 0,
- scissors,
- {"VUID-VkPipelineViewportStateCreateInfo-scissorCount-01217",
- "VUID-VkPipelineViewportStateCreateInfo-scissorCount-01220"}},
- {1,
- viewports,
- 2,
- scissors,
- {"VUID-VkPipelineViewportStateCreateInfo-scissorCount-01217",
- "VUID-VkPipelineViewportStateCreateInfo-scissorCount-01220"}},
- {0,
- viewports,
- 0,
- scissors,
- {"VUID-VkPipelineViewportStateCreateInfo-viewportCount-01216",
- "VUID-VkPipelineViewportStateCreateInfo-scissorCount-01217"}},
- {2,
- viewports,
- 2,
- scissors,
- {"VUID-VkPipelineViewportStateCreateInfo-viewportCount-01216",
- "VUID-VkPipelineViewportStateCreateInfo-scissorCount-01217"}},
- {0,
- viewports,
- 2,
- scissors,
- {"VUID-VkPipelineViewportStateCreateInfo-viewportCount-01216", "VUID-VkPipelineViewportStateCreateInfo-scissorCount-01217",
- "VUID-VkPipelineViewportStateCreateInfo-scissorCount-01220"}},
- {2,
- viewports,
- 0,
- scissors,
- {"VUID-VkPipelineViewportStateCreateInfo-viewportCount-01216", "VUID-VkPipelineViewportStateCreateInfo-scissorCount-01217",
- "VUID-VkPipelineViewportStateCreateInfo-scissorCount-01220"}},
- {2,
- nullptr,
- 3,
- nullptr,
- {"VUID-VkPipelineViewportStateCreateInfo-viewportCount-01216", "VUID-VkPipelineViewportStateCreateInfo-scissorCount-01217",
- "VUID-VkPipelineViewportStateCreateInfo-scissorCount-01220"}},
- {0,
- nullptr,
- 0,
- nullptr,
- {"VUID-VkPipelineViewportStateCreateInfo-viewportCount-01216",
- "VUID-VkPipelineViewportStateCreateInfo-scissorCount-01217"}},
- };
-
- const VkDynamicState dyn_states[] = {VK_DYNAMIC_STATE_VIEWPORT, VK_DYNAMIC_STATE_SCISSOR};
-
- for (const auto &test_case : dyn_test_cases) {
- const auto break_vp = [&](CreatePipelineHelper &helper) {
- VkPipelineDynamicStateCreateInfo dyn_state_ci = {};
- dyn_state_ci.sType = VK_STRUCTURE_TYPE_PIPELINE_DYNAMIC_STATE_CREATE_INFO;
- dyn_state_ci.dynamicStateCount = size(dyn_states);
- dyn_state_ci.pDynamicStates = dyn_states;
- helper.dyn_state_ci_ = dyn_state_ci;
-
- helper.vp_state_ci_.viewportCount = test_case.viewport_count;
- helper.vp_state_ci_.pViewports = test_case.viewports;
- helper.vp_state_ci_.scissorCount = test_case.scissor_count;
- helper.vp_state_ci_.pScissors = test_case.scissors;
- };
- CreatePipelineHelper::OneshotTest(*this, break_vp, VK_DEBUG_REPORT_ERROR_BIT_EXT, test_case.vuids);
- }
-}
-
-// Set Extension dynamic states without enabling the required Extensions.
-TEST_F(VkLayerTest, ExtensionDynamicStatesSetWOExtensionEnabled) {
- TEST_DESCRIPTION("Create a graphics pipeline with Extension dynamic states without enabling the required Extensions.");
-
- ASSERT_NO_FATAL_FAILURE(Init());
- ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
-
- using std::vector;
- struct TestCase {
- uint32_t dynamic_state_count;
- VkDynamicState dynamic_state;
-
- char const *errmsg;
- };
-
- vector<TestCase> dyn_test_cases = {
- {1, VK_DYNAMIC_STATE_VIEWPORT_W_SCALING_NV,
- "contains VK_DYNAMIC_STATE_VIEWPORT_W_SCALING_NV, but VK_NV_clip_space_w_scaling"},
- {1, VK_DYNAMIC_STATE_DISCARD_RECTANGLE_EXT,
- "contains VK_DYNAMIC_STATE_DISCARD_RECTANGLE_EXT, but VK_EXT_discard_rectangles"},
- {1, VK_DYNAMIC_STATE_SAMPLE_LOCATIONS_EXT, "contains VK_DYNAMIC_STATE_SAMPLE_LOCATIONS_EXT, but VK_EXT_sample_locations"},
- };
-
- for (const auto &test_case : dyn_test_cases) {
- VkDynamicState state[1];
- state[0] = test_case.dynamic_state;
- const auto break_vp = [&](CreatePipelineHelper &helper) {
- VkPipelineDynamicStateCreateInfo dyn_state_ci = {};
- dyn_state_ci.sType = VK_STRUCTURE_TYPE_PIPELINE_DYNAMIC_STATE_CREATE_INFO;
- dyn_state_ci.dynamicStateCount = test_case.dynamic_state_count;
- dyn_state_ci.pDynamicStates = state;
- helper.dyn_state_ci_ = dyn_state_ci;
- };
- CreatePipelineHelper::OneshotTest(*this, break_vp, VK_DEBUG_REPORT_ERROR_BIT_EXT, test_case.errmsg);
- }
-}
-
-TEST_F(VkLayerTest, PSOViewportStateMultiViewportTests) {
- TEST_DESCRIPTION("Test VkPipelineViewportStateCreateInfo viewport and scissor count validation for multiViewport feature");
-
- ASSERT_NO_FATAL_FAILURE(Init()); // enables all supported features
-
- if (!m_device->phy().features().multiViewport) {
- printf("%s VkPhysicalDeviceFeatures::multiViewport is not supported -- skipping test.\n", kSkipPrefix);
- return;
- }
- // at least 16 viewports supported from here on
-
- ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
-
- VkViewport viewport = {0.0f, 0.0f, 64.0f, 64.0f, 0.0f, 1.0f};
- VkViewport viewports[] = {viewport, viewport};
- VkRect2D scissor = {{0, 0}, {64, 64}};
- VkRect2D scissors[] = {scissor, scissor};
-
- using std::vector;
- struct TestCase {
- uint32_t viewport_count;
- VkViewport *viewports;
- uint32_t scissor_count;
- VkRect2D *scissors;
-
- vector<std::string> vuids;
- };
-
- vector<TestCase> test_cases = {
- {0,
- viewports,
- 2,
- scissors,
- {"VUID-VkPipelineViewportStateCreateInfo-viewportCount-arraylength",
- "VUID-VkPipelineViewportStateCreateInfo-scissorCount-01220"}},
- {2,
- viewports,
- 0,
- scissors,
- {"VUID-VkPipelineViewportStateCreateInfo-scissorCount-arraylength",
- "VUID-VkPipelineViewportStateCreateInfo-scissorCount-01220"}},
- {0,
- viewports,
- 0,
- scissors,
- {"VUID-VkPipelineViewportStateCreateInfo-viewportCount-arraylength",
- "VUID-VkPipelineViewportStateCreateInfo-scissorCount-arraylength"}},
- {2, nullptr, 2, scissors, {"VUID-VkGraphicsPipelineCreateInfo-pDynamicStates-00747"}},
- {2, viewports, 2, nullptr, {"VUID-VkGraphicsPipelineCreateInfo-pDynamicStates-00748"}},
- {2,
- nullptr,
- 2,
- nullptr,
- {"VUID-VkGraphicsPipelineCreateInfo-pDynamicStates-00747", "VUID-VkGraphicsPipelineCreateInfo-pDynamicStates-00748"}},
- {0,
- nullptr,
- 0,
- nullptr,
- {"VUID-VkPipelineViewportStateCreateInfo-viewportCount-arraylength",
- "VUID-VkPipelineViewportStateCreateInfo-scissorCount-arraylength"}},
- };
-
- const auto max_viewports = m_device->phy().properties().limits.maxViewports;
- const bool max_viewports_maxxed = max_viewports == std::numeric_limits<decltype(max_viewports)>::max();
- if (max_viewports_maxxed) {
- printf("%s VkPhysicalDeviceLimits::maxViewports is UINT32_MAX -- skipping part of test requiring to exceed maxViewports.\n",
- kSkipPrefix);
- } else {
- const auto too_much_viewports = max_viewports + 1;
- // avoid potentially big allocations by using only nullptr
- test_cases.push_back({too_much_viewports,
- nullptr,
- 2,
- scissors,
- {"VUID-VkPipelineViewportStateCreateInfo-viewportCount-01218",
- "VUID-VkPipelineViewportStateCreateInfo-scissorCount-01220",
- "VUID-VkGraphicsPipelineCreateInfo-pDynamicStates-00747"}});
- test_cases.push_back({2,
- viewports,
- too_much_viewports,
- nullptr,
- {"VUID-VkPipelineViewportStateCreateInfo-scissorCount-01219",
- "VUID-VkPipelineViewportStateCreateInfo-scissorCount-01220",
- "VUID-VkGraphicsPipelineCreateInfo-pDynamicStates-00748"}});
- test_cases.push_back(
- {too_much_viewports,
- nullptr,
- too_much_viewports,
- nullptr,
- {"VUID-VkPipelineViewportStateCreateInfo-viewportCount-01218",
- "VUID-VkPipelineViewportStateCreateInfo-scissorCount-01219", "VUID-VkGraphicsPipelineCreateInfo-pDynamicStates-00747",
- "VUID-VkGraphicsPipelineCreateInfo-pDynamicStates-00748"}});
- }
-
- for (const auto &test_case : test_cases) {
- const auto break_vp = [&test_case](CreatePipelineHelper &helper) {
- helper.vp_state_ci_.viewportCount = test_case.viewport_count;
- helper.vp_state_ci_.pViewports = test_case.viewports;
- helper.vp_state_ci_.scissorCount = test_case.scissor_count;
- helper.vp_state_ci_.pScissors = test_case.scissors;
- };
- CreatePipelineHelper::OneshotTest(*this, break_vp, VK_DEBUG_REPORT_ERROR_BIT_EXT, test_case.vuids);
- }
-
- vector<TestCase> dyn_test_cases = {
- {0,
- viewports,
- 2,
- scissors,
- {"VUID-VkPipelineViewportStateCreateInfo-viewportCount-arraylength",
- "VUID-VkPipelineViewportStateCreateInfo-scissorCount-01220"}},
- {2,
- viewports,
- 0,
- scissors,
- {"VUID-VkPipelineViewportStateCreateInfo-scissorCount-arraylength",
- "VUID-VkPipelineViewportStateCreateInfo-scissorCount-01220"}},
- {0,
- viewports,
- 0,
- scissors,
- {"VUID-VkPipelineViewportStateCreateInfo-viewportCount-arraylength",
- "VUID-VkPipelineViewportStateCreateInfo-scissorCount-arraylength"}},
- {0,
- nullptr,
- 0,
- nullptr,
- {"VUID-VkPipelineViewportStateCreateInfo-viewportCount-arraylength",
- "VUID-VkPipelineViewportStateCreateInfo-scissorCount-arraylength"}},
- };
-
- if (!max_viewports_maxxed) {
- const auto too_much_viewports = max_viewports + 1;
- // avoid potentially big allocations by using only nullptr
- dyn_test_cases.push_back({too_much_viewports,
- nullptr,
- 2,
- scissors,
- {"VUID-VkPipelineViewportStateCreateInfo-viewportCount-01218",
- "VUID-VkPipelineViewportStateCreateInfo-scissorCount-01220"}});
- dyn_test_cases.push_back({2,
- viewports,
- too_much_viewports,
- nullptr,
- {"VUID-VkPipelineViewportStateCreateInfo-scissorCount-01219",
- "VUID-VkPipelineViewportStateCreateInfo-scissorCount-01220"}});
- dyn_test_cases.push_back({too_much_viewports,
- nullptr,
- too_much_viewports,
- nullptr,
- {"VUID-VkPipelineViewportStateCreateInfo-viewportCount-01218",
- "VUID-VkPipelineViewportStateCreateInfo-scissorCount-01219"}});
- }
-
- const VkDynamicState dyn_states[] = {VK_DYNAMIC_STATE_VIEWPORT, VK_DYNAMIC_STATE_SCISSOR};
-
- for (const auto &test_case : dyn_test_cases) {
- const auto break_vp = [&](CreatePipelineHelper &helper) {
- VkPipelineDynamicStateCreateInfo dyn_state_ci = {};
- dyn_state_ci.sType = VK_STRUCTURE_TYPE_PIPELINE_DYNAMIC_STATE_CREATE_INFO;
- dyn_state_ci.dynamicStateCount = size(dyn_states);
- dyn_state_ci.pDynamicStates = dyn_states;
- helper.dyn_state_ci_ = dyn_state_ci;
-
- helper.vp_state_ci_.viewportCount = test_case.viewport_count;
- helper.vp_state_ci_.pViewports = test_case.viewports;
- helper.vp_state_ci_.scissorCount = test_case.scissor_count;
- helper.vp_state_ci_.pScissors = test_case.scissors;
- };
- CreatePipelineHelper::OneshotTest(*this, break_vp, VK_DEBUG_REPORT_ERROR_BIT_EXT, test_case.vuids);
- }
-}
-
-TEST_F(VkLayerTest, DynViewportAndScissorUndefinedDrawState) {
- TEST_DESCRIPTION("Test viewport and scissor dynamic state that is not set before draw");
-
- ASSERT_NO_FATAL_FAILURE(Init());
-
- // TODO: should also test on !multiViewport
- if (!m_device->phy().features().multiViewport) {
- printf("%s Device does not support multiple viewports/scissors; skipped.\n", kSkipPrefix);
- return;
- }
-
- ASSERT_NO_FATAL_FAILURE(InitViewport());
- ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
-
- VkShaderObj vs(m_device, bindStateVertShaderText, VK_SHADER_STAGE_VERTEX_BIT, this);
- VkShaderObj fs(m_device, bindStateFragShaderText, VK_SHADER_STAGE_FRAGMENT_BIT, this);
-
- const VkPipelineLayoutObj pipeline_layout(m_device);
-
- VkPipelineObj pipeline_dyn_vp(m_device);
- pipeline_dyn_vp.AddShader(&vs);
- pipeline_dyn_vp.AddShader(&fs);
- pipeline_dyn_vp.AddDefaultColorAttachment();
- pipeline_dyn_vp.MakeDynamic(VK_DYNAMIC_STATE_VIEWPORT);
- pipeline_dyn_vp.SetScissor(m_scissors);
- ASSERT_VK_SUCCESS(pipeline_dyn_vp.CreateVKPipeline(pipeline_layout.handle(), m_renderPass));
-
- VkPipelineObj pipeline_dyn_sc(m_device);
- pipeline_dyn_sc.AddShader(&vs);
- pipeline_dyn_sc.AddShader(&fs);
- pipeline_dyn_sc.AddDefaultColorAttachment();
- pipeline_dyn_sc.SetViewport(m_viewports);
- pipeline_dyn_sc.MakeDynamic(VK_DYNAMIC_STATE_SCISSOR);
- ASSERT_VK_SUCCESS(pipeline_dyn_sc.CreateVKPipeline(pipeline_layout.handle(), m_renderPass));
-
- m_commandBuffer->begin();
- m_commandBuffer->BeginRenderPass(m_renderPassBeginInfo);
-
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
- "Dynamic viewport(s) 0 are used by pipeline state object, ");
- vkCmdBindPipeline(m_commandBuffer->handle(), VK_PIPELINE_BIND_POINT_GRAPHICS, pipeline_dyn_vp.handle());
- vkCmdSetViewport(m_commandBuffer->handle(), 1, 1,
- &m_viewports[0]); // Forgetting to set needed 0th viewport (PSO viewportCount == 1)
- m_commandBuffer->Draw(1, 0, 0, 0);
- m_errorMonitor->VerifyFound();
-
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "Dynamic scissor(s) 0 are used by pipeline state object, ");
- vkCmdBindPipeline(m_commandBuffer->handle(), VK_PIPELINE_BIND_POINT_GRAPHICS, pipeline_dyn_sc.handle());
- vkCmdSetScissor(m_commandBuffer->handle(), 1, 1,
- &m_scissors[0]); // Forgetting to set needed 0th scissor (PSO scissorCount == 1)
- m_commandBuffer->Draw(1, 0, 0, 0);
- m_errorMonitor->VerifyFound();
-
- m_commandBuffer->EndRenderPass();
- m_commandBuffer->end();
-}
-
-TEST_F(VkLayerTest, PSOLineWidthInvalid) {
- TEST_DESCRIPTION("Test non-1.0 lineWidth errors when pipeline is created and in vkCmdSetLineWidth");
- VkPhysicalDeviceFeatures features{};
- ASSERT_NO_FATAL_FAILURE(Init(&features));
- ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
-
- const std::vector<float> test_cases = {-1.0f, 0.0f, NearestSmaller(1.0f), NearestGreater(1.0f), NAN};
-
- // test VkPipelineRasterizationStateCreateInfo::lineWidth
- for (const auto test_case : test_cases) {
- const auto set_lineWidth = [&](CreatePipelineHelper &helper) { helper.rs_state_ci_.lineWidth = test_case; };
- CreatePipelineHelper::OneshotTest(*this, set_lineWidth, VK_DEBUG_REPORT_ERROR_BIT_EXT,
- "VUID-VkGraphicsPipelineCreateInfo-pDynamicStates-00749");
- }
-
- // test vkCmdSetLineWidth
- m_commandBuffer->begin();
-
- for (const auto test_case : test_cases) {
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdSetLineWidth-lineWidth-00788");
- vkCmdSetLineWidth(m_commandBuffer->handle(), test_case);
- m_errorMonitor->VerifyFound();
- }
-}
-
-TEST_F(VkLayerTest, VUID_VkVertexInputBindingDescription_binding_00618) {
- TEST_DESCRIPTION(
- "Test VUID-VkVertexInputBindingDescription-binding-00618: binding must be less than "
- "VkPhysicalDeviceLimits::maxVertexInputBindings");
-
- ASSERT_NO_FATAL_FAILURE(Init());
- ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
-
- // Test when binding is greater than or equal to VkPhysicalDeviceLimits::maxVertexInputBindings.
- VkVertexInputBindingDescription vertex_input_binding_description{};
- vertex_input_binding_description.binding = m_device->props.limits.maxVertexInputBindings;
-
- const auto set_binding = [&](CreatePipelineHelper &helper) {
- helper.vi_ci_.pVertexBindingDescriptions = &vertex_input_binding_description;
- helper.vi_ci_.vertexBindingDescriptionCount = 1;
- };
- CreatePipelineHelper::OneshotTest(*this, set_binding, VK_DEBUG_REPORT_ERROR_BIT_EXT,
- "VUID-VkVertexInputBindingDescription-binding-00618");
-}
-
-TEST_F(VkLayerTest, VUID_VkVertexInputBindingDescription_stride_00619) {
- TEST_DESCRIPTION(
- "Test VUID-VkVertexInputBindingDescription-stride-00619: stride must be less than or equal to "
- "VkPhysicalDeviceLimits::maxVertexInputBindingStride");
-
- ASSERT_NO_FATAL_FAILURE(Init());
- ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
-
- // Test when stride is greater than VkPhysicalDeviceLimits::maxVertexInputBindingStride.
- VkVertexInputBindingDescription vertex_input_binding_description{};
- vertex_input_binding_description.stride = m_device->props.limits.maxVertexInputBindingStride + 1;
-
- const auto set_binding = [&](CreatePipelineHelper &helper) {
- helper.vi_ci_.pVertexBindingDescriptions = &vertex_input_binding_description;
- helper.vi_ci_.vertexBindingDescriptionCount = 1;
- };
- CreatePipelineHelper::OneshotTest(*this, set_binding, VK_DEBUG_REPORT_ERROR_BIT_EXT,
- "VUID-VkVertexInputBindingDescription-stride-00619");
-}
-
-TEST_F(VkLayerTest, VUID_VkVertexInputAttributeDescription_location_00620) {
- TEST_DESCRIPTION(
- "Test VUID-VkVertexInputAttributeDescription-location-00620: location must be less than "
- "VkPhysicalDeviceLimits::maxVertexInputAttributes");
-
- ASSERT_NO_FATAL_FAILURE(Init());
- ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
-
- // Test when location is greater than or equal to VkPhysicalDeviceLimits::maxVertexInputAttributes.
- VkVertexInputAttributeDescription vertex_input_attribute_description{};
- vertex_input_attribute_description.location = m_device->props.limits.maxVertexInputAttributes;
-
- const auto set_attribute = [&](CreatePipelineHelper &helper) {
- helper.vi_ci_.pVertexAttributeDescriptions = &vertex_input_attribute_description;
- helper.vi_ci_.vertexAttributeDescriptionCount = 1;
- };
- CreatePipelineHelper::OneshotTest(*this, set_attribute, VK_DEBUG_REPORT_ERROR_BIT_EXT,
- vector<string>{"VUID-VkVertexInputAttributeDescription-location-00620",
- "VUID-VkPipelineVertexInputStateCreateInfo-binding-00615"});
-}
-
-TEST_F(VkLayerTest, VUID_VkVertexInputAttributeDescription_binding_00621) {
- TEST_DESCRIPTION(
- "Test VUID-VkVertexInputAttributeDescription-binding-00621: binding must be less than "
- "VkPhysicalDeviceLimits::maxVertexInputBindings");
-
- ASSERT_NO_FATAL_FAILURE(Init());
- ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
-
- // Test when binding is greater than or equal to VkPhysicalDeviceLimits::maxVertexInputBindings.
- VkVertexInputAttributeDescription vertex_input_attribute_description{};
- vertex_input_attribute_description.binding = m_device->props.limits.maxVertexInputBindings;
-
- const auto set_attribute = [&](CreatePipelineHelper &helper) {
- helper.vi_ci_.pVertexAttributeDescriptions = &vertex_input_attribute_description;
- helper.vi_ci_.vertexAttributeDescriptionCount = 1;
- };
- CreatePipelineHelper::OneshotTest(*this, set_attribute, VK_DEBUG_REPORT_ERROR_BIT_EXT,
- vector<string>{"VUID-VkVertexInputAttributeDescription-binding-00621",
- "VUID-VkPipelineVertexInputStateCreateInfo-binding-00615"});
-}
-
-TEST_F(VkLayerTest, VUID_VkVertexInputAttributeDescription_offset_00622) {
- TEST_DESCRIPTION(
- "Test VUID-VkVertexInputAttributeDescription-offset-00622: offset must be less than or equal to "
- "VkPhysicalDeviceLimits::maxVertexInputAttributeOffset");
-
- EnableDeviceProfileLayer();
-
- ASSERT_NO_FATAL_FAILURE(InitFramework(myDbgFunc, m_errorMonitor));
-
- uint32_t maxVertexInputAttributeOffset = 0;
- {
- VkPhysicalDeviceProperties device_props = {};
- vkGetPhysicalDeviceProperties(gpu(), &device_props);
- maxVertexInputAttributeOffset = device_props.limits.maxVertexInputAttributeOffset;
- if (maxVertexInputAttributeOffset == 0xFFFFFFFF) {
- // Attempt to artificially lower maximum offset
- PFN_vkSetPhysicalDeviceLimitsEXT fpvkSetPhysicalDeviceLimitsEXT =
- (PFN_vkSetPhysicalDeviceLimitsEXT)vkGetInstanceProcAddr(instance(), "vkSetPhysicalDeviceLimitsEXT");
- if (!fpvkSetPhysicalDeviceLimitsEXT) {
- printf("%s All offsets are valid & device_profile_api not found; skipped.\n", kSkipPrefix);
- return;
- }
- device_props.limits.maxVertexInputAttributeOffset = device_props.limits.maxVertexInputBindingStride - 2;
- fpvkSetPhysicalDeviceLimitsEXT(gpu(), &device_props.limits);
- maxVertexInputAttributeOffset = device_props.limits.maxVertexInputAttributeOffset;
- }
- }
- ASSERT_NO_FATAL_FAILURE(InitState());
- ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
-
- VkVertexInputBindingDescription vertex_input_binding_description{};
- vertex_input_binding_description.binding = 0;
- vertex_input_binding_description.stride = m_device->props.limits.maxVertexInputBindingStride;
- vertex_input_binding_description.inputRate = VK_VERTEX_INPUT_RATE_VERTEX;
- // Test when offset is greater than maximum.
- VkVertexInputAttributeDescription vertex_input_attribute_description{};
- vertex_input_attribute_description.format = VK_FORMAT_R8_UNORM;
- vertex_input_attribute_description.offset = maxVertexInputAttributeOffset + 1;
-
- const auto set_attribute = [&](CreatePipelineHelper &helper) {
- helper.vi_ci_.pVertexBindingDescriptions = &vertex_input_binding_description;
- helper.vi_ci_.vertexBindingDescriptionCount = 1;
- helper.vi_ci_.pVertexAttributeDescriptions = &vertex_input_attribute_description;
- helper.vi_ci_.vertexAttributeDescriptionCount = 1;
- };
- CreatePipelineHelper::OneshotTest(*this, set_attribute, VK_DEBUG_REPORT_ERROR_BIT_EXT,
- "VUID-VkVertexInputAttributeDescription-offset-00622");
-}
-
-TEST_F(VkLayerTest, NumSamplesMismatch) {
- // Create CommandBuffer where MSAA samples doesn't match RenderPass
- // sampleCount
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "Num samples mismatch! ");
-
- ASSERT_NO_FATAL_FAILURE(Init());
- ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
-
- OneOffDescriptorSet descriptor_set(m_device, {
- {0, VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER, 1, VK_SHADER_STAGE_ALL, nullptr},
- });
-
- VkPipelineMultisampleStateCreateInfo pipe_ms_state_ci = {};
- pipe_ms_state_ci.sType = VK_STRUCTURE_TYPE_PIPELINE_MULTISAMPLE_STATE_CREATE_INFO;
- pipe_ms_state_ci.pNext = NULL;
- pipe_ms_state_ci.rasterizationSamples = VK_SAMPLE_COUNT_4_BIT;
- pipe_ms_state_ci.sampleShadingEnable = 0;
- pipe_ms_state_ci.minSampleShading = 1.0;
- pipe_ms_state_ci.pSampleMask = NULL;
-
- const VkPipelineLayoutObj pipeline_layout(m_device, {&descriptor_set.layout_});
-
- VkShaderObj vs(m_device, bindStateVertShaderText, VK_SHADER_STAGE_VERTEX_BIT, this);
- VkShaderObj fs(m_device, bindStateFragShaderText, VK_SHADER_STAGE_FRAGMENT_BIT, this); // We shouldn't need a fragment shader
- // but add it to be able to run on more devices
- VkPipelineObj pipe(m_device);
- pipe.AddShader(&vs);
- pipe.AddShader(&fs);
- pipe.AddDefaultColorAttachment();
- pipe.SetMSAA(&pipe_ms_state_ci);
-
- m_errorMonitor->SetUnexpectedError("VUID-VkGraphicsPipelineCreateInfo-subpass-00757");
- pipe.CreateVKPipeline(pipeline_layout.handle(), renderPass());
-
- m_commandBuffer->begin();
- m_commandBuffer->BeginRenderPass(m_renderPassBeginInfo);
- vkCmdBindPipeline(m_commandBuffer->handle(), VK_PIPELINE_BIND_POINT_GRAPHICS, pipe.handle());
-
- VkViewport viewport = {0, 0, 16, 16, 0, 1};
- vkCmdSetViewport(m_commandBuffer->handle(), 0, 1, &viewport);
- VkRect2D scissor = {{0, 0}, {16, 16}};
- vkCmdSetScissor(m_commandBuffer->handle(), 0, 1, &scissor);
-
- // Render triangle (the error should trigger on the attempt to draw).
- m_commandBuffer->Draw(3, 1, 0, 0);
-
- // Finalize recording of the command buffer
- m_commandBuffer->EndRenderPass();
- m_commandBuffer->end();
-
- m_errorMonitor->VerifyFound();
-}
-
-TEST_F(VkLayerTest, NumBlendAttachMismatch) {
- // Create Pipeline where the number of blend attachments doesn't match the
- // number of color attachments. In this case, we don't add any color
- // blend attachments even though we have a color attachment.
-
- ASSERT_NO_FATAL_FAILURE(Init());
- ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
-
- VkPipelineMultisampleStateCreateInfo pipe_ms_state_ci = {};
- pipe_ms_state_ci.sType = VK_STRUCTURE_TYPE_PIPELINE_MULTISAMPLE_STATE_CREATE_INFO;
- pipe_ms_state_ci.pNext = NULL;
- pipe_ms_state_ci.rasterizationSamples = VK_SAMPLE_COUNT_1_BIT;
- pipe_ms_state_ci.sampleShadingEnable = 0;
- pipe_ms_state_ci.minSampleShading = 1.0;
- pipe_ms_state_ci.pSampleMask = NULL;
-
- const auto set_MSAA = [&](CreatePipelineHelper &helper) {
- helper.pipe_ms_state_ci_ = pipe_ms_state_ci;
- helper.cb_ci_.attachmentCount = 0;
- };
- CreatePipelineHelper::OneshotTest(*this, set_MSAA, VK_DEBUG_REPORT_ERROR_BIT_EXT,
- "VUID-VkGraphicsPipelineCreateInfo-attachmentCount-00746");
-}
-
-TEST_F(VkLayerTest, CmdClearAttachmentTests) {
- TEST_DESCRIPTION("Various tests for validating usage of vkCmdClearAttachments");
-
- ASSERT_NO_FATAL_FAILURE(Init());
- ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
-
- m_commandBuffer->begin();
- m_commandBuffer->BeginRenderPass(m_renderPassBeginInfo);
-
- // Main thing we care about for this test is that the VkImage obj we're
- // clearing matches Color Attachment of FB
- // Also pass down other dummy params to keep driver and paramchecker happy
- VkClearAttachment color_attachment;
- color_attachment.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
- color_attachment.clearValue.color.float32[0] = 1.0;
- color_attachment.clearValue.color.float32[1] = 1.0;
- color_attachment.clearValue.color.float32[2] = 1.0;
- color_attachment.clearValue.color.float32[3] = 1.0;
- color_attachment.colorAttachment = 0;
- VkClearRect clear_rect = {{{0, 0}, {(uint32_t)m_width, (uint32_t)m_height}}, 0, 1};
-
- // Call for full-sized FB Color attachment prior to issuing a Draw
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_PERFORMANCE_WARNING_BIT_EXT,
- "UNASSIGNED-CoreValidation-DrawState-ClearCmdBeforeDraw");
- vkCmdClearAttachments(m_commandBuffer->handle(), 1, &color_attachment, 1, &clear_rect);
- m_errorMonitor->VerifyFound();
-
- clear_rect.rect.extent.width = renderPassBeginInfo().renderArea.extent.width + 4;
- clear_rect.rect.extent.height = clear_rect.rect.extent.height / 2;
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdClearAttachments-pRects-00016");
- vkCmdClearAttachments(m_commandBuffer->handle(), 1, &color_attachment, 1, &clear_rect);
- m_errorMonitor->VerifyFound();
-
- // baseLayer >= view layers
- clear_rect.rect.extent.width = (uint32_t)m_width;
- clear_rect.baseArrayLayer = 1;
- clear_rect.layerCount = 1;
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdClearAttachments-pRects-00017");
- vkCmdClearAttachments(m_commandBuffer->handle(), 1, &color_attachment, 1, &clear_rect);
- m_errorMonitor->VerifyFound();
-
- // baseLayer + layerCount > view layers
- clear_rect.rect.extent.width = (uint32_t)m_width;
- clear_rect.baseArrayLayer = 0;
- clear_rect.layerCount = 2;
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdClearAttachments-pRects-00017");
- vkCmdClearAttachments(m_commandBuffer->handle(), 1, &color_attachment, 1, &clear_rect);
- m_errorMonitor->VerifyFound();
-
- m_commandBuffer->EndRenderPass();
- m_commandBuffer->end();
-}
-
-TEST_F(VkLayerTest, VtxBufferBadIndex) {
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_PERFORMANCE_WARNING_BIT_EXT,
- "UNASSIGNED-CoreValidation-DrawState-VtxIndexOutOfBounds");
-
- ASSERT_NO_FATAL_FAILURE(Init());
- ASSERT_NO_FATAL_FAILURE(InitViewport());
- ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
-
- VkPipelineMultisampleStateCreateInfo pipe_ms_state_ci = {};
- pipe_ms_state_ci.sType = VK_STRUCTURE_TYPE_PIPELINE_MULTISAMPLE_STATE_CREATE_INFO;
- pipe_ms_state_ci.pNext = NULL;
- pipe_ms_state_ci.rasterizationSamples = VK_SAMPLE_COUNT_1_BIT;
- pipe_ms_state_ci.sampleShadingEnable = 0;
- pipe_ms_state_ci.minSampleShading = 1.0;
- pipe_ms_state_ci.pSampleMask = NULL;
-
- CreatePipelineHelper pipe(*this);
- pipe.InitInfo();
- pipe.pipe_ms_state_ci_ = pipe_ms_state_ci;
- pipe.InitState();
- pipe.CreateGraphicsPipeline();
-
- m_commandBuffer->begin();
- m_commandBuffer->BeginRenderPass(m_renderPassBeginInfo);
- vkCmdBindPipeline(m_commandBuffer->handle(), VK_PIPELINE_BIND_POINT_GRAPHICS, pipe.pipeline_);
- // Don't care about actual data, just need to get to draw to flag error
- const float vbo_data[3] = {1.f, 0.f, 1.f};
- VkConstantBufferObj vbo(m_device, sizeof(vbo_data), (const void *)&vbo_data, VK_BUFFER_USAGE_VERTEX_BUFFER_BIT);
- m_commandBuffer->BindVertexBuffer(&vbo, (VkDeviceSize)0, 1); // VBO idx 1, but no VBO in PSO
- m_commandBuffer->Draw(1, 0, 0, 0);
-
- m_errorMonitor->VerifyFound();
-
- m_commandBuffer->EndRenderPass();
- m_commandBuffer->end();
-}
-
-TEST_F(VkLayerTest, InvalidVertexBindingDescriptions) {
- TEST_DESCRIPTION(
- "Attempt to create a graphics pipeline where:"
- "1) count of vertex bindings exceeds device's maxVertexInputBindings limit"
- "2) requested bindings include a duplicate binding value");
-
- ASSERT_NO_FATAL_FAILURE(Init());
- ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
-
- const uint32_t binding_count = m_device->props.limits.maxVertexInputBindings + 1;
-
- std::vector<VkVertexInputBindingDescription> input_bindings(binding_count);
- for (uint32_t i = 0; i < binding_count; ++i) {
- input_bindings[i].binding = i;
- input_bindings[i].stride = 4;
- input_bindings[i].inputRate = VK_VERTEX_INPUT_RATE_VERTEX;
- }
- // Let the last binding description use same binding as the first one
- input_bindings[binding_count - 1].binding = 0;
-
- VkVertexInputAttributeDescription input_attrib;
- input_attrib.binding = 0;
- input_attrib.location = 0;
- input_attrib.format = VK_FORMAT_R32G32B32_SFLOAT;
- input_attrib.offset = 0;
-
- const auto set_Info = [&](CreatePipelineHelper &helper) {
- helper.vi_ci_.pVertexBindingDescriptions = input_bindings.data();
- helper.vi_ci_.vertexBindingDescriptionCount = binding_count;
- helper.vi_ci_.pVertexAttributeDescriptions = &input_attrib;
- helper.vi_ci_.vertexAttributeDescriptionCount = 1;
- };
- CreatePipelineHelper::OneshotTest(
- *this, set_Info, VK_DEBUG_REPORT_ERROR_BIT_EXT,
- vector<string>{"VUID-VkPipelineVertexInputStateCreateInfo-vertexBindingDescriptionCount-00613",
- "VUID-VkPipelineVertexInputStateCreateInfo-pVertexBindingDescriptions-00616"});
-}
-
-TEST_F(VkLayerTest, InvalidVertexAttributeDescriptions) {
- TEST_DESCRIPTION(
- "Attempt to create a graphics pipeline where:"
- "1) count of vertex attributes exceeds device's maxVertexInputAttributes limit"
- "2) requested location include a duplicate location value"
- "3) binding used by one attribute is not defined by a binding description");
-
- ASSERT_NO_FATAL_FAILURE(Init());
- ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
-
- VkVertexInputBindingDescription input_binding;
- input_binding.binding = 0;
- input_binding.stride = 4;
- input_binding.inputRate = VK_VERTEX_INPUT_RATE_VERTEX;
-
- const uint32_t attribute_count = m_device->props.limits.maxVertexInputAttributes + 1;
- std::vector<VkVertexInputAttributeDescription> input_attribs(attribute_count);
- for (uint32_t i = 0; i < attribute_count; ++i) {
- input_attribs[i].binding = 0;
- input_attribs[i].location = i;
- input_attribs[i].format = VK_FORMAT_R32G32B32_SFLOAT;
- input_attribs[i].offset = 0;
- }
- // Let the last input_attribs description use same location as the first one
- input_attribs[attribute_count - 1].location = 0;
- // Let the last input_attribs description use binding which is not defined
- input_attribs[attribute_count - 1].binding = 1;
-
- const auto set_Info = [&](CreatePipelineHelper &helper) {
- helper.vi_ci_.pVertexBindingDescriptions = &input_binding;
- helper.vi_ci_.vertexBindingDescriptionCount = 1;
- helper.vi_ci_.pVertexAttributeDescriptions = input_attribs.data();
- helper.vi_ci_.vertexAttributeDescriptionCount = attribute_count;
- };
- CreatePipelineHelper::OneshotTest(
- *this, set_Info, VK_DEBUG_REPORT_ERROR_BIT_EXT,
- vector<string>{"VUID-VkPipelineVertexInputStateCreateInfo-vertexAttributeDescriptionCount-00614",
- "VUID-VkPipelineVertexInputStateCreateInfo-binding-00615",
- "VUID-VkPipelineVertexInputStateCreateInfo-pVertexAttributeDescriptions-00617"});
-}
-
-TEST_F(VkLayerTest, ColorBlendInvalidLogicOp) {
- TEST_DESCRIPTION("Attempt to use invalid VkPipelineColorBlendStateCreateInfo::logicOp value.");
-
- ASSERT_NO_FATAL_FAILURE(Init()); // enables all supported features
- ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
-
- if (!m_device->phy().features().logicOp) {
- printf("%s Device does not support logicOp feature; skipped.\n", kSkipPrefix);
- return;
- }
-
- const auto set_shading_enable = [](CreatePipelineHelper &helper) {
- helper.cb_ci_.logicOpEnable = VK_TRUE;
- helper.cb_ci_.logicOp = static_cast<VkLogicOp>(VK_LOGIC_OP_END_RANGE + 1); // invalid logicOp to be tested
- };
- CreatePipelineHelper::OneshotTest(*this, set_shading_enable, VK_DEBUG_REPORT_ERROR_BIT_EXT,
- "VUID-VkPipelineColorBlendStateCreateInfo-logicOpEnable-00607");
-}
-
-TEST_F(VkLayerTest, ColorBlendUnsupportedLogicOp) {
- TEST_DESCRIPTION("Attempt enabling VkPipelineColorBlendStateCreateInfo::logicOpEnable when logicOp feature is disabled.");
-
- VkPhysicalDeviceFeatures features{};
- ASSERT_NO_FATAL_FAILURE(Init(&features));
- ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
-
- const auto set_shading_enable = [](CreatePipelineHelper &helper) { helper.cb_ci_.logicOpEnable = VK_TRUE; };
- CreatePipelineHelper::OneshotTest(*this, set_shading_enable, VK_DEBUG_REPORT_ERROR_BIT_EXT,
- "VUID-VkPipelineColorBlendStateCreateInfo-logicOpEnable-00606");
-}
-
-TEST_F(VkLayerTest, ColorBlendUnsupportedDualSourceBlend) {
- TEST_DESCRIPTION("Attempt to use dual-source blending when dualSrcBlend feature is disabled.");
-
- VkPhysicalDeviceFeatures features{};
- ASSERT_NO_FATAL_FAILURE(Init(&features));
- ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
-
- VkPipelineColorBlendAttachmentState cb_attachments = {};
-
- const auto set_dsb_src_color_enable = [&](CreatePipelineHelper &helper) { helper.cb_attachments_ = cb_attachments; };
-
- cb_attachments.blendEnable = VK_TRUE;
- cb_attachments.srcColorBlendFactor = VK_BLEND_FACTOR_SRC1_COLOR; // bad!
- cb_attachments.dstColorBlendFactor = VK_BLEND_FACTOR_ONE_MINUS_SRC_COLOR;
- cb_attachments.colorBlendOp = VK_BLEND_OP_ADD;
- cb_attachments.srcAlphaBlendFactor = VK_BLEND_FACTOR_SRC_ALPHA;
- cb_attachments.dstAlphaBlendFactor = VK_BLEND_FACTOR_ONE_MINUS_SRC_ALPHA;
- cb_attachments.alphaBlendOp = VK_BLEND_OP_ADD;
- CreatePipelineHelper::OneshotTest(*this, set_dsb_src_color_enable, VK_DEBUG_REPORT_ERROR_BIT_EXT,
- "VUID-VkPipelineColorBlendAttachmentState-srcColorBlendFactor-00608");
-
- cb_attachments.blendEnable = VK_TRUE;
- cb_attachments.srcColorBlendFactor = VK_BLEND_FACTOR_SRC_COLOR;
- cb_attachments.dstColorBlendFactor = VK_BLEND_FACTOR_ONE_MINUS_SRC1_COLOR; // bad
- cb_attachments.colorBlendOp = VK_BLEND_OP_ADD;
- cb_attachments.srcAlphaBlendFactor = VK_BLEND_FACTOR_SRC_ALPHA;
- cb_attachments.dstAlphaBlendFactor = VK_BLEND_FACTOR_ONE_MINUS_SRC_ALPHA;
- cb_attachments.alphaBlendOp = VK_BLEND_OP_ADD;
- CreatePipelineHelper::OneshotTest(*this, set_dsb_src_color_enable, VK_DEBUG_REPORT_ERROR_BIT_EXT,
- "VUID-VkPipelineColorBlendAttachmentState-dstColorBlendFactor-00609");
-
- cb_attachments.blendEnable = VK_TRUE;
- cb_attachments.srcColorBlendFactor = VK_BLEND_FACTOR_SRC_COLOR;
- cb_attachments.dstColorBlendFactor = VK_BLEND_FACTOR_ONE_MINUS_SRC_COLOR;
- cb_attachments.colorBlendOp = VK_BLEND_OP_ADD;
- cb_attachments.srcAlphaBlendFactor = VK_BLEND_FACTOR_SRC1_ALPHA; // bad
- cb_attachments.dstAlphaBlendFactor = VK_BLEND_FACTOR_ONE_MINUS_SRC_ALPHA;
- cb_attachments.alphaBlendOp = VK_BLEND_OP_ADD;
- CreatePipelineHelper::OneshotTest(*this, set_dsb_src_color_enable, VK_DEBUG_REPORT_ERROR_BIT_EXT,
- "VUID-VkPipelineColorBlendAttachmentState-srcAlphaBlendFactor-00610");
-
- cb_attachments.blendEnable = VK_TRUE;
- cb_attachments.srcColorBlendFactor = VK_BLEND_FACTOR_SRC_COLOR;
- cb_attachments.dstColorBlendFactor = VK_BLEND_FACTOR_ONE_MINUS_SRC_COLOR;
- cb_attachments.colorBlendOp = VK_BLEND_OP_ADD;
- cb_attachments.srcAlphaBlendFactor = VK_BLEND_FACTOR_SRC_ALPHA;
- cb_attachments.dstAlphaBlendFactor = VK_BLEND_FACTOR_ONE_MINUS_SRC1_ALPHA; // bad!
- cb_attachments.alphaBlendOp = VK_BLEND_OP_ADD;
- CreatePipelineHelper::OneshotTest(*this, set_dsb_src_color_enable, VK_DEBUG_REPORT_ERROR_BIT_EXT,
- "VUID-VkPipelineColorBlendAttachmentState-dstAlphaBlendFactor-00611");
-}
-
-TEST_F(VkLayerTest, InvalidSPIRVCodeSize) {
- TEST_DESCRIPTION("Test that errors are produced for a spirv modules with invalid code sizes");
-
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "Invalid SPIR-V header");
-
- ASSERT_NO_FATAL_FAILURE(Init());
- ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
-
- VkShaderModule module;
- VkShaderModuleCreateInfo moduleCreateInfo;
- struct icd_spv_header spv;
-
- spv.magic = ICD_SPV_MAGIC;
- spv.version = ICD_SPV_VERSION;
- spv.gen_magic = 0;
-
- moduleCreateInfo.sType = VK_STRUCTURE_TYPE_SHADER_MODULE_CREATE_INFO;
- moduleCreateInfo.pNext = NULL;
- moduleCreateInfo.pCode = (const uint32_t *)&spv;
- moduleCreateInfo.codeSize = 4;
- moduleCreateInfo.flags = 0;
- vkCreateShaderModule(m_device->device(), &moduleCreateInfo, NULL, &module);
-
- m_errorMonitor->VerifyFound();
-
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkShaderModuleCreateInfo-pCode-01376");
- std::vector<unsigned int> shader;
- VkShaderModuleCreateInfo module_create_info;
- VkShaderModule shader_module;
- module_create_info.sType = VK_STRUCTURE_TYPE_SHADER_MODULE_CREATE_INFO;
- module_create_info.pNext = NULL;
- this->GLSLtoSPV(VK_SHADER_STAGE_VERTEX_BIT, bindStateVertShaderText, shader);
- module_create_info.pCode = shader.data();
- // Introduce failure by making codeSize a non-multiple of 4
- module_create_info.codeSize = shader.size() * sizeof(unsigned int) - 1;
- module_create_info.flags = 0;
- vkCreateShaderModule(m_device->handle(), &module_create_info, NULL, &shader_module);
-
- m_errorMonitor->VerifyFound();
-}
-
-TEST_F(VkLayerTest, InvalidSPIRVMagic) {
- TEST_DESCRIPTION("Test that an error is produced for a spirv module with a bad magic number");
-
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "Invalid SPIR-V magic number");
-
- ASSERT_NO_FATAL_FAILURE(Init());
- ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
-
- VkShaderModule module;
- VkShaderModuleCreateInfo moduleCreateInfo;
- struct icd_spv_header spv;
-
- spv.magic = (uint32_t)~ICD_SPV_MAGIC;
- spv.version = ICD_SPV_VERSION;
- spv.gen_magic = 0;
-
- moduleCreateInfo.sType = VK_STRUCTURE_TYPE_SHADER_MODULE_CREATE_INFO;
- moduleCreateInfo.pNext = NULL;
- moduleCreateInfo.pCode = (const uint32_t *)&spv;
- moduleCreateInfo.codeSize = sizeof(spv) + 16;
- moduleCreateInfo.flags = 0;
- vkCreateShaderModule(m_device->device(), &moduleCreateInfo, NULL, &module);
-
- m_errorMonitor->VerifyFound();
-}
-
-TEST_F(VkLayerTest, CreatePipelineVertexOutputNotConsumed) {
- TEST_DESCRIPTION("Test that a warning is produced for a vertex output that is not consumed by the fragment stage");
-
- ASSERT_NO_FATAL_FAILURE(Init());
- ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
-
- char const *vsSource =
- "#version 450\n"
- "layout(location=0) out float x;\n"
- "void main(){\n"
- " gl_Position = vec4(1);\n"
- " x = 0;\n"
- "}\n";
- VkShaderObj vs(m_device, vsSource, VK_SHADER_STAGE_VERTEX_BIT, this);
-
- const auto set_info = [&](CreatePipelineHelper &helper) {
- helper.shader_stages_ = {vs.GetStageCreateInfo(), helper.fs_->GetStageCreateInfo()};
- };
- CreatePipelineHelper::OneshotTest(*this, set_info, VK_DEBUG_REPORT_PERFORMANCE_WARNING_BIT_EXT,
- "not consumed by fragment shader");
-}
-
-TEST_F(VkLayerTest, CreatePipelineCheckShaderBadSpecialization) {
- TEST_DESCRIPTION("Challenge core_validation with shader validation issues related to vkCreateGraphicsPipelines.");
-
- ASSERT_NO_FATAL_FAILURE(Init());
- ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
-
- char const *fsSource =
- "#version 450\n"
- "layout (constant_id = 0) const float r = 0.0f;\n"
- "layout(location = 0) out vec4 uFragColor;\n"
- "void main(){\n"
- " uFragColor = vec4(r,1,0,1);\n"
- "}\n";
- VkShaderObj fs(m_device, fsSource, VK_SHADER_STAGE_FRAGMENT_BIT, this);
-
- // This structure maps constant ids to data locations.
- const VkSpecializationMapEntry entry =
- // id, offset, size
- {0, 4, sizeof(uint32_t)}; // Challenge core validation by using a bogus offset.
-
- uint32_t data = 1;
-
- // Set up the info describing spec map and data
- const VkSpecializationInfo specialization_info = {
- 1,
- &entry,
- 1 * sizeof(float),
- &data,
- };
-
- const auto set_info = [&](CreatePipelineHelper &helper) {
- helper.shader_stages_ = {helper.vs_->GetStageCreateInfo(), fs.GetStageCreateInfo()};
- helper.shader_stages_[1].pSpecializationInfo = &specialization_info;
- };
- CreatePipelineHelper::OneshotTest(
- *this, set_info, VK_DEBUG_REPORT_PERFORMANCE_WARNING_BIT_EXT,
- "Specialization entry 0 (for constant id 0) references memory outside provided specialization data ");
-}
-
-TEST_F(VkLayerTest, CreatePipelineCheckShaderDescriptorTypeMismatch) {
- TEST_DESCRIPTION("Challenge core_validation with shader validation issues related to vkCreateGraphicsPipelines.");
-
- ASSERT_NO_FATAL_FAILURE(Init());
- ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
-
- OneOffDescriptorSet descriptor_set(m_device, {
- {0, VK_DESCRIPTOR_TYPE_STORAGE_BUFFER, 1, VK_SHADER_STAGE_ALL, nullptr},
- });
-
- char const *vsSource =
- "#version 450\n"
- "\n"
- "layout (std140, set = 0, binding = 0) uniform buf {\n"
- " mat4 mvp;\n"
- "} ubuf;\n"
- "void main(){\n"
- " gl_Position = ubuf.mvp * vec4(1);\n"
- "}\n";
-
- VkShaderObj vs(m_device, vsSource, VK_SHADER_STAGE_VERTEX_BIT, this);
-
- CreatePipelineHelper pipe(*this);
- pipe.InitInfo();
- pipe.shader_stages_ = {vs.GetStageCreateInfo(), pipe.fs_->GetStageCreateInfo()};
- pipe.InitState();
- pipe.pipeline_layout_ = VkPipelineLayoutObj(m_device, {&descriptor_set.layout_});
-
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "Type mismatch on descriptor slot 0.0 ");
- pipe.CreateGraphicsPipeline();
- m_errorMonitor->VerifyFound();
-}
-
-TEST_F(VkLayerTest, CreatePipelineCheckShaderDescriptorNotAccessible) {
- TEST_DESCRIPTION(
- "Create a pipeline in which a descriptor used by a shader stage does not include that stage in its stageFlags.");
-
- ASSERT_NO_FATAL_FAILURE(Init());
- ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
-
- OneOffDescriptorSet ds(m_device, {
- {0, VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER, 1, VK_SHADER_STAGE_FRAGMENT_BIT /*!*/, nullptr},
- });
-
- char const *vsSource =
- "#version 450\n"
- "\n"
- "layout (std140, set = 0, binding = 0) uniform buf {\n"
- " mat4 mvp;\n"
- "} ubuf;\n"
- "void main(){\n"
- " gl_Position = ubuf.mvp * vec4(1);\n"
- "}\n";
-
- VkShaderObj vs(m_device, vsSource, VK_SHADER_STAGE_VERTEX_BIT, this);
-
- CreatePipelineHelper pipe(*this);
- pipe.InitInfo();
- pipe.shader_stages_ = {vs.GetStageCreateInfo(), pipe.fs_->GetStageCreateInfo()};
- pipe.InitState();
- pipe.pipeline_layout_ = VkPipelineLayoutObj(m_device, {&ds.layout_});
-
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "Shader uses descriptor slot 0.0 ");
- pipe.CreateGraphicsPipeline();
- m_errorMonitor->VerifyFound();
-}
-
-TEST_F(VkLayerTest, CreatePipelineCheckShaderPushConstantNotAccessible) {
- TEST_DESCRIPTION(
- "Create a graphics pipeline in which a push constant range containing a push constant block member is not accessible from "
- "the current shader stage.");
-
- ASSERT_NO_FATAL_FAILURE(Init());
- ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
-
- char const *vsSource =
- "#version 450\n"
- "\n"
- "layout(push_constant, std430) uniform foo { float x; } consts;\n"
- "void main(){\n"
- " gl_Position = vec4(consts.x);\n"
- "}\n";
-
- VkShaderObj vs(m_device, vsSource, VK_SHADER_STAGE_VERTEX_BIT, this);
-
- // Set up a push constant range
- VkPushConstantRange push_constant_range = {};
- // Set to the wrong stage to challenge core_validation
- push_constant_range.stageFlags = VK_SHADER_STAGE_FRAGMENT_BIT;
- push_constant_range.size = 4;
-
- const VkPipelineLayoutObj pipeline_layout(m_device, {}, {push_constant_range});
-
- CreatePipelineHelper pipe(*this);
- pipe.InitInfo();
- pipe.shader_stages_ = {vs.GetStageCreateInfo(), pipe.fs_->GetStageCreateInfo()};
- pipe.InitState();
- pipe.pipeline_layout_ = VkPipelineLayoutObj(m_device, {}, {push_constant_range});
-
- m_errorMonitor->SetDesiredFailureMsg(
- VK_DEBUG_REPORT_ERROR_BIT_EXT,
- "Push constant range covering variable starting at offset 0 not accessible from stage VK_SHADER_STAGE_VERTEX_BIT");
- pipe.CreateGraphicsPipeline();
- m_errorMonitor->VerifyFound();
-}
-
-TEST_F(VkLayerTest, CreatePipelineCheckShaderNotEnabled) {
- TEST_DESCRIPTION(
- "Create a graphics pipeline in which a capability declared by the shader requires a feature not enabled on the device.");
-
- ASSERT_NO_FATAL_FAILURE(InitFramework(myDbgFunc, m_errorMonitor));
-
- // Some awkward steps are required to test with custom device features.
- VkPhysicalDeviceFeatures device_features = {};
- // Disable support for 64 bit floats
- device_features.shaderFloat64 = false;
- // The sacrificial device object
- ASSERT_NO_FATAL_FAILURE(InitState(&device_features));
- ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
-
- char const *fsSource =
- "#version 450\n"
- "\n"
- "layout(location=0) out vec4 color;\n"
- "void main(){\n"
- " dvec4 green = vec4(0.0, 1.0, 0.0, 1.0);\n"
- " color = vec4(green);\n"
- "}\n";
- VkShaderObj fs(m_device, fsSource, VK_SHADER_STAGE_FRAGMENT_BIT, this);
-
- CreatePipelineHelper pipe(*this);
- pipe.InitInfo();
- pipe.shader_stages_ = {pipe.vs_->GetStageCreateInfo(), fs.GetStageCreateInfo()};
- pipe.InitState();
- pipe.pipeline_layout_ = VkPipelineLayoutObj(m_device);
-
- m_errorMonitor->SetDesiredFailureMsg(
- VK_DEBUG_REPORT_ERROR_BIT_EXT, "Shader requires VkPhysicalDeviceFeatures::shaderFloat64 but is not enabled on the device");
- pipe.CreateGraphicsPipeline();
- m_errorMonitor->VerifyFound();
-}
-
-TEST_F(VkLayerTest, CreateShaderModuleCheckBadCapability) {
- TEST_DESCRIPTION("Create a shader in which a capability declared by the shader is not supported.");
- // Note that this failure message comes from spirv-tools, specifically the validator.
-
- ASSERT_NO_FATAL_FAILURE(Init());
- ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
-
- const std::string spv_source = R"(
- OpCapability ImageRect
- OpEntryPoint Vertex %main "main"
- %main = OpFunction %void None %3
- OpReturn
- OpFunctionEnd
- )";
-
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "Capability ImageRect is not allowed by Vulkan");
-
- std::vector<unsigned int> spv;
- VkShaderModuleCreateInfo module_create_info;
- VkShaderModule shader_module;
- module_create_info.sType = VK_STRUCTURE_TYPE_SHADER_MODULE_CREATE_INFO;
- module_create_info.pNext = NULL;
- ASMtoSPV(SPV_ENV_VULKAN_1_0, 0, spv_source.data(), spv);
- module_create_info.pCode = spv.data();
- module_create_info.codeSize = spv.size() * sizeof(unsigned int);
- module_create_info.flags = 0;
-
- VkResult err = vkCreateShaderModule(m_device->handle(), &module_create_info, NULL, &shader_module);
- m_errorMonitor->VerifyFound();
- if (err == VK_SUCCESS) {
- vkDestroyShaderModule(m_device->handle(), shader_module, NULL);
- }
-}
-
-TEST_F(VkLayerTest, CreatePipelineFragmentInputNotProvided) {
- TEST_DESCRIPTION(
- "Test that an error is produced for a fragment shader input which is not present in the outputs of the previous stage");
-
- ASSERT_NO_FATAL_FAILURE(Init());
- ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
-
- char const *fsSource =
- "#version 450\n"
- "\n"
- "layout(location=0) in float x;\n"
- "layout(location=0) out vec4 color;\n"
- "void main(){\n"
- " color = vec4(x);\n"
- "}\n";
- VkShaderObj fs(m_device, fsSource, VK_SHADER_STAGE_FRAGMENT_BIT, this);
-
- const auto set_info = [&](CreatePipelineHelper &helper) {
- helper.shader_stages_ = {helper.vs_->GetStageCreateInfo(), fs.GetStageCreateInfo()};
- };
- CreatePipelineHelper::OneshotTest(*this, set_info, VK_DEBUG_REPORT_ERROR_BIT_EXT, "not written by vertex shader");
-}
-
-TEST_F(VkLayerTest, CreatePipelineFragmentInputNotProvidedInBlock) {
- TEST_DESCRIPTION(
- "Test that an error is produced for a fragment shader input within an interace block, which is not present in the outputs "
- "of the previous stage.");
-
- ASSERT_NO_FATAL_FAILURE(Init());
- ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
-
- char const *fsSource =
- "#version 450\n"
- "\n"
- "in block { layout(location=0) float x; } ins;\n"
- "layout(location=0) out vec4 color;\n"
- "void main(){\n"
- " color = vec4(ins.x);\n"
- "}\n";
-
- VkShaderObj fs(m_device, fsSource, VK_SHADER_STAGE_FRAGMENT_BIT, this);
-
- const auto set_info = [&](CreatePipelineHelper &helper) {
- helper.shader_stages_ = {helper.vs_->GetStageCreateInfo(), fs.GetStageCreateInfo()};
- };
- CreatePipelineHelper::OneshotTest(*this, set_info, VK_DEBUG_REPORT_ERROR_BIT_EXT, "not written by vertex shader");
-}
-
-TEST_F(VkLayerTest, CreatePipelineVsFsTypeMismatchArraySize) {
- TEST_DESCRIPTION("Test that an error is produced for mismatched array sizes across the vertex->fragment shader interface");
-
- ASSERT_NO_FATAL_FAILURE(Init());
- ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
-
- char const *vsSource =
- "#version 450\n"
- "\n"
- "layout(location=0) out float x[2];\n"
- "void main(){\n"
- " x[0] = 0; x[1] = 0;\n"
- " gl_Position = vec4(1);\n"
- "}\n";
- char const *fsSource =
- "#version 450\n"
- "\n"
- "layout(location=0) in float x[1];\n"
- "layout(location=0) out vec4 color;\n"
- "void main(){\n"
- " color = vec4(x[0]);\n"
- "}\n";
-
- VkShaderObj vs(m_device, vsSource, VK_SHADER_STAGE_VERTEX_BIT, this);
- VkShaderObj fs(m_device, fsSource, VK_SHADER_STAGE_FRAGMENT_BIT, this);
-
- const auto set_info = [&](CreatePipelineHelper &helper) {
- helper.shader_stages_ = {vs.GetStageCreateInfo(), fs.GetStageCreateInfo()};
- };
- CreatePipelineHelper::OneshotTest(
- *this, set_info, VK_DEBUG_REPORT_ERROR_BIT_EXT,
- "Type mismatch on location 0.0: 'ptr to output arr[2] of float32' vs 'ptr to input arr[1] of float32'");
-}
-
-TEST_F(VkLayerTest, CreatePipelineVsFsTypeMismatch) {
- TEST_DESCRIPTION("Test that an error is produced for mismatched types across the vertex->fragment shader interface");
-
- ASSERT_NO_FATAL_FAILURE(Init());
- ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
-
- char const *vsSource =
- "#version 450\n"
- "\n"
- "layout(location=0) out int x;\n"
- "void main(){\n"
- " x = 0;\n"
- " gl_Position = vec4(1);\n"
- "}\n";
- char const *fsSource =
- "#version 450\n"
- "\n"
- "layout(location=0) in float x;\n" /* VS writes int */
- "layout(location=0) out vec4 color;\n"
- "void main(){\n"
- " color = vec4(x);\n"
- "}\n";
-
- VkShaderObj vs(m_device, vsSource, VK_SHADER_STAGE_VERTEX_BIT, this);
- VkShaderObj fs(m_device, fsSource, VK_SHADER_STAGE_FRAGMENT_BIT, this);
-
- const auto set_info = [&](CreatePipelineHelper &helper) {
- helper.shader_stages_ = {vs.GetStageCreateInfo(), fs.GetStageCreateInfo()};
- };
- CreatePipelineHelper::OneshotTest(*this, set_info, VK_DEBUG_REPORT_ERROR_BIT_EXT, "Type mismatch on location 0");
-}
-
-TEST_F(VkLayerTest, CreatePipelineVsFsTypeMismatchInBlock) {
- TEST_DESCRIPTION(
- "Test that an error is produced for mismatched types across the vertex->fragment shader interface, when the variable is "
- "contained within an interface block");
-
- ASSERT_NO_FATAL_FAILURE(Init());
- ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
-
- char const *vsSource =
- "#version 450\n"
- "\n"
- "out block { layout(location=0) int x; } outs;\n"
- "void main(){\n"
- " outs.x = 0;\n"
- " gl_Position = vec4(1);\n"
- "}\n";
- char const *fsSource =
- "#version 450\n"
- "\n"
- "in block { layout(location=0) float x; } ins;\n" /* VS writes int */
- "layout(location=0) out vec4 color;\n"
- "void main(){\n"
- " color = vec4(ins.x);\n"
- "}\n";
-
- VkShaderObj vs(m_device, vsSource, VK_SHADER_STAGE_VERTEX_BIT, this);
- VkShaderObj fs(m_device, fsSource, VK_SHADER_STAGE_FRAGMENT_BIT, this);
-
- const auto set_info = [&](CreatePipelineHelper &helper) {
- helper.shader_stages_ = {vs.GetStageCreateInfo(), fs.GetStageCreateInfo()};
- };
- CreatePipelineHelper::OneshotTest(*this, set_info, VK_DEBUG_REPORT_ERROR_BIT_EXT, "Type mismatch on location 0");
-}
-
-TEST_F(VkLayerTest, CreatePipelineVsFsMismatchByLocation) {
- TEST_DESCRIPTION(
- "Test that an error is produced for location mismatches across the vertex->fragment shader interface; This should manifest "
- "as a not-written/not-consumed pair, but flushes out broken walking of the interfaces");
-
- ASSERT_NO_FATAL_FAILURE(Init());
- ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
-
- char const *vsSource =
- "#version 450\n"
- "\n"
- "out block { layout(location=1) float x; } outs;\n"
- "void main(){\n"
- " outs.x = 0;\n"
- " gl_Position = vec4(1);\n"
- "}\n";
- char const *fsSource =
- "#version 450\n"
- "\n"
- "in block { layout(location=0) float x; } ins;\n"
- "layout(location=0) out vec4 color;\n"
- "void main(){\n"
- " color = vec4(ins.x);\n"
- "}\n";
-
- VkShaderObj vs(m_device, vsSource, VK_SHADER_STAGE_VERTEX_BIT, this);
- VkShaderObj fs(m_device, fsSource, VK_SHADER_STAGE_FRAGMENT_BIT, this);
-
- const auto set_info = [&](CreatePipelineHelper &helper) {
- helper.shader_stages_ = {vs.GetStageCreateInfo(), fs.GetStageCreateInfo()};
- };
- CreatePipelineHelper::OneshotTest(*this, set_info, VK_DEBUG_REPORT_ERROR_BIT_EXT,
- "location 0.0 which is not written by vertex shader");
-}
-
-TEST_F(VkLayerTest, CreatePipelineVsFsMismatchByComponent) {
- TEST_DESCRIPTION(
- "Test that an error is produced for component mismatches across the vertex->fragment shader interface. It's not enough to "
- "have the same set of locations in use; matching is defined in terms of spirv variables.");
-
- ASSERT_NO_FATAL_FAILURE(Init());
- ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
-
- char const *vsSource =
- "#version 450\n"
- "\n"
- "out block { layout(location=0, component=0) float x; } outs;\n"
- "void main(){\n"
- " outs.x = 0;\n"
- " gl_Position = vec4(1);\n"
- "}\n";
- char const *fsSource =
- "#version 450\n"
- "\n"
- "in block { layout(location=0, component=1) float x; } ins;\n"
- "layout(location=0) out vec4 color;\n"
- "void main(){\n"
- " color = vec4(ins.x);\n"
- "}\n";
-
- VkShaderObj vs(m_device, vsSource, VK_SHADER_STAGE_VERTEX_BIT, this);
- VkShaderObj fs(m_device, fsSource, VK_SHADER_STAGE_FRAGMENT_BIT, this);
-
- const auto set_info = [&](CreatePipelineHelper &helper) {
- helper.shader_stages_ = {vs.GetStageCreateInfo(), fs.GetStageCreateInfo()};
- };
- CreatePipelineHelper::OneshotTest(*this, set_info, VK_DEBUG_REPORT_ERROR_BIT_EXT,
- "location 0.1 which is not written by vertex shader");
-}
-
-TEST_F(VkLayerTest, CreatePipelineVsFsMismatchByPrecision) {
- TEST_DESCRIPTION("Test that the RelaxedPrecision decoration is validated to match");
-
- ASSERT_NO_FATAL_FAILURE(Init());
- ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
-
- char const *vsSource =
- "#version 450\n"
- "layout(location=0) out mediump float x;\n"
- "void main() { gl_Position = vec4(0); x = 1.0; }\n";
- char const *fsSource =
- "#version 450\n"
- "layout(location=0) in highp float x;\n"
- "layout(location=0) out vec4 color;\n"
- "void main() { color = vec4(x); }\n";
-
- VkShaderObj vs(m_device, vsSource, VK_SHADER_STAGE_VERTEX_BIT, this);
- VkShaderObj fs(m_device, fsSource, VK_SHADER_STAGE_FRAGMENT_BIT, this);
-
- const auto set_info = [&](CreatePipelineHelper &helper) {
- helper.shader_stages_ = {vs.GetStageCreateInfo(), fs.GetStageCreateInfo()};
- };
- CreatePipelineHelper::OneshotTest(*this, set_info, VK_DEBUG_REPORT_ERROR_BIT_EXT, "differ in precision");
-}
-
-TEST_F(VkLayerTest, CreatePipelineVsFsMismatchByPrecisionBlock) {
- TEST_DESCRIPTION("Test that the RelaxedPrecision decoration is validated to match");
-
- ASSERT_NO_FATAL_FAILURE(Init());
- ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
-
- char const *vsSource =
- "#version 450\n"
- "out block { layout(location=0) mediump float x; };\n"
- "void main() { gl_Position = vec4(0); x = 1.0; }\n";
- char const *fsSource =
- "#version 450\n"
- "in block { layout(location=0) highp float x; };\n"
- "layout(location=0) out vec4 color;\n"
- "void main() { color = vec4(x); }\n";
-
- VkShaderObj vs(m_device, vsSource, VK_SHADER_STAGE_VERTEX_BIT, this);
- VkShaderObj fs(m_device, fsSource, VK_SHADER_STAGE_FRAGMENT_BIT, this);
-
- const auto set_info = [&](CreatePipelineHelper &helper) {
- helper.shader_stages_ = {vs.GetStageCreateInfo(), fs.GetStageCreateInfo()};
- };
- CreatePipelineHelper::OneshotTest(*this, set_info, VK_DEBUG_REPORT_ERROR_BIT_EXT, "differ in precision");
-}
-
-TEST_F(VkLayerTest, CreatePipelineAttribNotConsumed) {
- TEST_DESCRIPTION("Test that a warning is produced for a vertex attribute which is not consumed by the vertex shader");
-
- ASSERT_NO_FATAL_FAILURE(Init());
- ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
-
- VkVertexInputBindingDescription input_binding;
- memset(&input_binding, 0, sizeof(input_binding));
-
- VkVertexInputAttributeDescription input_attrib;
- memset(&input_attrib, 0, sizeof(input_attrib));
- input_attrib.format = VK_FORMAT_R32_SFLOAT;
-
- const auto set_info = [&](CreatePipelineHelper &helper) {
- helper.vi_ci_.pVertexBindingDescriptions = &input_binding;
- helper.vi_ci_.vertexBindingDescriptionCount = 1;
- helper.vi_ci_.pVertexAttributeDescriptions = &input_attrib;
- helper.vi_ci_.vertexAttributeDescriptionCount = 1;
- };
- CreatePipelineHelper::OneshotTest(*this, set_info, VK_DEBUG_REPORT_PERFORMANCE_WARNING_BIT_EXT,
- "location 0 not consumed by vertex shader");
-}
-
-TEST_F(VkLayerTest, CreatePipelineAttribLocationMismatch) {
- TEST_DESCRIPTION(
- "Test that a warning is produced for a location mismatch on vertex attributes. This flushes out bad behavior in the "
- "interface walker");
-
- ASSERT_NO_FATAL_FAILURE(Init());
- ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
-
- VkVertexInputBindingDescription input_binding;
- memset(&input_binding, 0, sizeof(input_binding));
-
- VkVertexInputAttributeDescription input_attrib;
- memset(&input_attrib, 0, sizeof(input_attrib));
- input_attrib.format = VK_FORMAT_R32_SFLOAT;
-
- const auto set_info = [&](CreatePipelineHelper &helper) {
- helper.vi_ci_.pVertexBindingDescriptions = &input_binding;
- helper.vi_ci_.vertexBindingDescriptionCount = 1;
- helper.vi_ci_.pVertexAttributeDescriptions = &input_attrib;
- helper.vi_ci_.vertexAttributeDescriptionCount = 1;
- };
- m_errorMonitor->SetUnexpectedError("Vertex shader consumes input at location 1 but not provided");
-
- CreatePipelineHelper::OneshotTest(*this, set_info, VK_DEBUG_REPORT_PERFORMANCE_WARNING_BIT_EXT,
- "location 0 not consumed by vertex shader");
-}
-
-TEST_F(VkLayerTest, CreatePipelineAttribNotProvided) {
- TEST_DESCRIPTION("Test that an error is produced for a vertex shader input which is not provided by a vertex attribute");
-
- ASSERT_NO_FATAL_FAILURE(Init());
- ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
-
- char const *vsSource =
- "#version 450\n"
- "\n"
- "layout(location=0) in vec4 x;\n" /* not provided */
- "void main(){\n"
- " gl_Position = x;\n"
- "}\n";
- VkShaderObj vs(m_device, vsSource, VK_SHADER_STAGE_VERTEX_BIT, this);
-
- const auto set_info = [&](CreatePipelineHelper &helper) {
- helper.shader_stages_ = {vs.GetStageCreateInfo(), helper.fs_->GetStageCreateInfo()};
- };
- CreatePipelineHelper::OneshotTest(*this, set_info, VK_DEBUG_REPORT_ERROR_BIT_EXT,
- "Vertex shader consumes input at location 0 but not provided");
-}
-
-TEST_F(VkLayerTest, CreatePipelineAttribTypeMismatch) {
- TEST_DESCRIPTION(
- "Test that an error is produced for a mismatch between the fundamental type (float/int/uint) of an attribute and the "
- "vertex shader input that consumes it");
-
- ASSERT_NO_FATAL_FAILURE(Init());
- ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
-
- VkVertexInputBindingDescription input_binding;
- memset(&input_binding, 0, sizeof(input_binding));
-
- VkVertexInputAttributeDescription input_attrib;
- memset(&input_attrib, 0, sizeof(input_attrib));
- input_attrib.format = VK_FORMAT_R32_SFLOAT;
-
- char const *vsSource =
- "#version 450\n"
- "\n"
- "layout(location=0) in int x;\n" /* attrib provided float */
- "void main(){\n"
- " gl_Position = vec4(x);\n"
- "}\n";
- VkShaderObj vs(m_device, vsSource, VK_SHADER_STAGE_VERTEX_BIT, this);
-
- const auto set_info = [&](CreatePipelineHelper &helper) {
- helper.shader_stages_ = {vs.GetStageCreateInfo(), helper.fs_->GetStageCreateInfo()};
- helper.vi_ci_.pVertexBindingDescriptions = &input_binding;
- helper.vi_ci_.vertexBindingDescriptionCount = 1;
- helper.vi_ci_.pVertexAttributeDescriptions = &input_attrib;
- helper.vi_ci_.vertexAttributeDescriptionCount = 1;
- };
- CreatePipelineHelper::OneshotTest(*this, set_info, VK_DEBUG_REPORT_ERROR_BIT_EXT,
- "location 0 does not match vertex shader input type");
-}
-
-TEST_F(VkLayerTest, CreatePipelineDuplicateStage) {
- TEST_DESCRIPTION("Test that an error is produced for a pipeline containing multiple shaders for the same stage");
-
- ASSERT_NO_FATAL_FAILURE(Init());
- ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
-
- const auto set_info = [&](CreatePipelineHelper &helper) {
- helper.shader_stages_ = {helper.vs_->GetStageCreateInfo(), helper.vs_->GetStageCreateInfo(),
- helper.fs_->GetStageCreateInfo()};
- };
- CreatePipelineHelper::OneshotTest(*this, set_info, VK_DEBUG_REPORT_ERROR_BIT_EXT,
- "Multiple shaders provided for stage VK_SHADER_STAGE_VERTEX_BIT");
-}
-
-TEST_F(VkLayerTest, CreatePipelineMissingEntrypoint) {
- ASSERT_NO_FATAL_FAILURE(Init());
- ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
-
- VkShaderObj fs(m_device, bindStateFragShaderText, VK_SHADER_STAGE_FRAGMENT_BIT, this, "foo");
-
- const auto set_info = [&](CreatePipelineHelper &helper) {
- helper.shader_stages_ = {helper.vs_->GetStageCreateInfo(), fs.GetStageCreateInfo()};
- };
- CreatePipelineHelper::OneshotTest(*this, set_info, VK_DEBUG_REPORT_ERROR_BIT_EXT, "No entrypoint found named `foo`");
-}
-
-TEST_F(VkLayerTest, CreatePipelineDepthStencilRequired) {
- m_errorMonitor->SetDesiredFailureMsg(
- VK_DEBUG_REPORT_ERROR_BIT_EXT,
- "pDepthStencilState is NULL when rasterization is enabled and subpass uses a depth/stencil attachment");
-
- ASSERT_NO_FATAL_FAILURE(Init());
- ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
-
- VkShaderObj vs(m_device, bindStateVertShaderText, VK_SHADER_STAGE_VERTEX_BIT, this);
- VkShaderObj fs(m_device, bindStateFragShaderText, VK_SHADER_STAGE_FRAGMENT_BIT, this);
-
- VkPipelineObj pipe(m_device);
- pipe.AddDefaultColorAttachment();
- pipe.AddShader(&vs);
- pipe.AddShader(&fs);
-
- VkDescriptorSetObj descriptorSet(m_device);
- descriptorSet.AppendDummy();
- descriptorSet.CreateVKDescriptorSet(m_commandBuffer);
-
- VkAttachmentDescription attachments[] = {
- {
- 0,
- VK_FORMAT_B8G8R8A8_UNORM,
- VK_SAMPLE_COUNT_1_BIT,
- VK_ATTACHMENT_LOAD_OP_DONT_CARE,
- VK_ATTACHMENT_STORE_OP_DONT_CARE,
- VK_ATTACHMENT_LOAD_OP_DONT_CARE,
- VK_ATTACHMENT_STORE_OP_DONT_CARE,
- VK_IMAGE_LAYOUT_UNDEFINED,
- VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL,
- },
- {
- 0,
- VK_FORMAT_D16_UNORM,
- VK_SAMPLE_COUNT_1_BIT,
- VK_ATTACHMENT_LOAD_OP_DONT_CARE,
- VK_ATTACHMENT_STORE_OP_DONT_CARE,
- VK_ATTACHMENT_LOAD_OP_DONT_CARE,
- VK_ATTACHMENT_STORE_OP_DONT_CARE,
- VK_IMAGE_LAYOUT_UNDEFINED,
- VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL,
- },
- };
- VkAttachmentReference refs[] = {
- {0, VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL},
- {1, VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL},
- };
- VkSubpassDescription subpass = {0, VK_PIPELINE_BIND_POINT_GRAPHICS, 0, nullptr, 1, &refs[0], nullptr, &refs[1], 0, nullptr};
- VkRenderPassCreateInfo rpci = {VK_STRUCTURE_TYPE_RENDER_PASS_CREATE_INFO, nullptr, 0, 2, attachments, 1, &subpass, 0, nullptr};
- VkRenderPass rp;
- VkResult err = vkCreateRenderPass(m_device->device(), &rpci, nullptr, &rp);
- ASSERT_VK_SUCCESS(err);
-
- pipe.CreateVKPipeline(descriptorSet.GetPipelineLayout(), rp);
-
- m_errorMonitor->VerifyFound();
-
- vkDestroyRenderPass(m_device->device(), rp, nullptr);
-}
-
-TEST_F(VkLayerTest, CreatePipelineTessPatchDecorationMismatch) {
- TEST_DESCRIPTION(
- "Test that an error is produced for a variable output from the TCS without the patch decoration, but consumed in the TES "
- "with the decoration.");
-
- ASSERT_NO_FATAL_FAILURE(Init());
- ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
-
- if (!m_device->phy().features().tessellationShader) {
- printf("%s Device does not support tessellation shaders; skipped.\n", kSkipPrefix);
- return;
- }
-
- char const *tcsSource =
- "#version 450\n"
- "layout(location=0) out int x[];\n"
- "layout(vertices=3) out;\n"
- "void main(){\n"
- " gl_TessLevelOuter[0] = gl_TessLevelOuter[1] = gl_TessLevelOuter[2] = 1;\n"
- " gl_TessLevelInner[0] = 1;\n"
- " x[gl_InvocationID] = gl_InvocationID;\n"
- "}\n";
- char const *tesSource =
- "#version 450\n"
- "layout(triangles, equal_spacing, cw) in;\n"
- "layout(location=0) patch in int x;\n"
- "void main(){\n"
- " gl_Position.xyz = gl_TessCoord;\n"
- " gl_Position.w = x;\n"
- "}\n";
- VkShaderObj tcs(m_device, tcsSource, VK_SHADER_STAGE_TESSELLATION_CONTROL_BIT, this);
- VkShaderObj tes(m_device, tesSource, VK_SHADER_STAGE_TESSELLATION_EVALUATION_BIT, this);
-
- VkPipelineInputAssemblyStateCreateInfo iasci{VK_STRUCTURE_TYPE_PIPELINE_INPUT_ASSEMBLY_STATE_CREATE_INFO, nullptr, 0,
- VK_PRIMITIVE_TOPOLOGY_PATCH_LIST, VK_FALSE};
-
- VkPipelineTessellationStateCreateInfo tsci{VK_STRUCTURE_TYPE_PIPELINE_TESSELLATION_STATE_CREATE_INFO, nullptr, 0, 3};
-
- const auto set_info = [&](CreatePipelineHelper &helper) {
- helper.gp_ci_.pTessellationState = &tsci;
- helper.gp_ci_.pInputAssemblyState = &iasci;
- helper.shader_stages_.emplace_back(tcs.GetStageCreateInfo());
- helper.shader_stages_.emplace_back(tes.GetStageCreateInfo());
- };
- CreatePipelineHelper::OneshotTest(
- *this, set_info, VK_DEBUG_REPORT_ERROR_BIT_EXT,
- "is per-vertex in tessellation control shader stage but per-patch in tessellation evaluation shader stage");
-}
-
-TEST_F(VkLayerTest, CreatePipelineTessErrors) {
- TEST_DESCRIPTION("Test various errors when creating a graphics pipeline with tessellation stages active.");
-
- ASSERT_NO_FATAL_FAILURE(Init());
- ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
-
- if (!m_device->phy().features().tessellationShader) {
- printf("%s Device does not support tessellation shaders; skipped.\n", kSkipPrefix);
- return;
- }
-
- char const *tcsSource =
- "#version 450\n"
- "layout(vertices=3) out;\n"
- "void main(){\n"
- " gl_TessLevelOuter[0] = gl_TessLevelOuter[1] = gl_TessLevelOuter[2] = 1;\n"
- " gl_TessLevelInner[0] = 1;\n"
- "}\n";
- char const *tesSource =
- "#version 450\n"
- "layout(triangles, equal_spacing, cw) in;\n"
- "void main(){\n"
- " gl_Position.xyz = gl_TessCoord;\n"
- " gl_Position.w = 0;\n"
- "}\n";
- VkShaderObj tcs(m_device, tcsSource, VK_SHADER_STAGE_TESSELLATION_CONTROL_BIT, this);
- VkShaderObj tes(m_device, tesSource, VK_SHADER_STAGE_TESSELLATION_EVALUATION_BIT, this);
-
- VkPipelineInputAssemblyStateCreateInfo iasci{VK_STRUCTURE_TYPE_PIPELINE_INPUT_ASSEMBLY_STATE_CREATE_INFO, nullptr, 0,
- VK_PRIMITIVE_TOPOLOGY_PATCH_LIST, VK_FALSE};
-
- VkPipelineTessellationStateCreateInfo tsci{VK_STRUCTURE_TYPE_PIPELINE_TESSELLATION_STATE_CREATE_INFO, nullptr, 0, 3};
-
- std::vector<VkPipelineShaderStageCreateInfo> shader_stages = {};
- VkPipelineInputAssemblyStateCreateInfo iasci_bad = iasci;
- VkPipelineInputAssemblyStateCreateInfo *p_iasci = nullptr;
- VkPipelineTessellationStateCreateInfo tsci_bad = tsci;
- VkPipelineTessellationStateCreateInfo *p_tsci = nullptr;
-
- const auto set_info = [&](CreatePipelineHelper &helper) {
- helper.gp_ci_.pTessellationState = p_tsci;
- helper.gp_ci_.pInputAssemblyState = p_iasci;
- helper.shader_stages_ = {helper.vs_->GetStageCreateInfo(), helper.fs_->GetStageCreateInfo()};
- helper.shader_stages_.insert(helper.shader_stages_.end(), shader_stages.begin(), shader_stages.end());
- };
-
- iasci_bad.topology = VK_PRIMITIVE_TOPOLOGY_TRIANGLE_LIST; // otherwise we get a failure about invalid topology
- p_iasci = &iasci_bad;
- // Pass a tess control shader without a tess eval shader
- shader_stages = {tcs.GetStageCreateInfo()};
- CreatePipelineHelper::OneshotTest(*this, set_info, VK_DEBUG_REPORT_ERROR_BIT_EXT,
- "VUID-VkGraphicsPipelineCreateInfo-pStages-00729");
-
- // Pass a tess eval shader without a tess control shader
- shader_stages = {tes.GetStageCreateInfo()};
- CreatePipelineHelper::OneshotTest(*this, set_info, VK_DEBUG_REPORT_ERROR_BIT_EXT,
- "VUID-VkGraphicsPipelineCreateInfo-pStages-00730");
-
- p_iasci = &iasci;
- shader_stages = {};
- // Pass patch topology without tessellation shaders
- CreatePipelineHelper::OneshotTest(*this, set_info, VK_DEBUG_REPORT_ERROR_BIT_EXT,
- "VUID-VkGraphicsPipelineCreateInfo-topology-00737");
-
- shader_stages = {tcs.GetStageCreateInfo(), tes.GetStageCreateInfo()};
- // Pass a NULL pTessellationState (with active tessellation shader stages)
- CreatePipelineHelper::OneshotTest(*this, set_info, VK_DEBUG_REPORT_ERROR_BIT_EXT,
- "VUID-VkGraphicsPipelineCreateInfo-pStages-00731");
-
- // Pass an invalid pTessellationState (bad sType)
- tsci_bad.sType = VK_STRUCTURE_TYPE_SUBMIT_INFO;
- p_tsci = &tsci_bad;
- shader_stages = {tcs.GetStageCreateInfo(), tes.GetStageCreateInfo()};
- CreatePipelineHelper::OneshotTest(*this, set_info, VK_DEBUG_REPORT_ERROR_BIT_EXT,
- "VUID-VkPipelineTessellationStateCreateInfo-sType-sType");
-
- // Pass out-of-range patchControlPoints
- p_iasci = &iasci;
- tsci_bad = tsci;
- tsci_bad.patchControlPoints = 0;
- CreatePipelineHelper::OneshotTest(*this, set_info, VK_DEBUG_REPORT_ERROR_BIT_EXT,
- "VUID-VkPipelineTessellationStateCreateInfo-patchControlPoints-01214");
-
- tsci_bad.patchControlPoints = m_device->props.limits.maxTessellationPatchSize + 1;
- CreatePipelineHelper::OneshotTest(*this, set_info, VK_DEBUG_REPORT_ERROR_BIT_EXT,
- "VUID-VkPipelineTessellationStateCreateInfo-patchControlPoints-01214");
-
- p_tsci = &tsci;
- // Pass an invalid primitive topology
- iasci_bad = iasci;
- iasci_bad.topology = VK_PRIMITIVE_TOPOLOGY_TRIANGLE_LIST;
- p_iasci = &iasci_bad;
- CreatePipelineHelper::OneshotTest(*this, set_info, VK_DEBUG_REPORT_ERROR_BIT_EXT,
- "VUID-VkGraphicsPipelineCreateInfo-pStages-00736");
-}
-
-TEST_F(VkLayerTest, CreatePipelineAttribBindingConflict) {
- TEST_DESCRIPTION(
- "Test that an error is produced for a vertex attribute setup where multiple bindings provide the same location");
-
- ASSERT_NO_FATAL_FAILURE(Init());
- ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
-
- /* Two binding descriptions for binding 0 */
- VkVertexInputBindingDescription input_bindings[2];
- memset(input_bindings, 0, sizeof(input_bindings));
-
- VkVertexInputAttributeDescription input_attrib;
- memset(&input_attrib, 0, sizeof(input_attrib));
- input_attrib.format = VK_FORMAT_R32_SFLOAT;
-
- char const *vsSource =
- "#version 450\n"
- "\n"
- "layout(location=0) in float x;\n" /* attrib provided float */
- "void main(){\n"
- " gl_Position = vec4(x);\n"
- "}\n";
-
- VkShaderObj vs(m_device, vsSource, VK_SHADER_STAGE_VERTEX_BIT, this);
-
- m_errorMonitor->VerifyFound();
- const auto set_info = [&](CreatePipelineHelper &helper) {
- helper.shader_stages_ = {vs.GetStageCreateInfo(), helper.fs_->GetStageCreateInfo()};
- helper.vi_ci_.pVertexBindingDescriptions = input_bindings;
- helper.vi_ci_.vertexBindingDescriptionCount = 2;
- helper.vi_ci_.pVertexAttributeDescriptions = &input_attrib;
- helper.vi_ci_.vertexAttributeDescriptionCount = 1;
- };
- m_errorMonitor->SetUnexpectedError("VUID-VkPipelineVertexInputStateCreateInfo-pVertexBindingDescriptions-00616 ");
- CreatePipelineHelper::OneshotTest(*this, set_info, VK_DEBUG_REPORT_ERROR_BIT_EXT,
- "Duplicate vertex input binding descriptions for binding 0");
-}
-
-TEST_F(VkLayerTest, CreatePipelineFragmentOutputNotWritten) {
- TEST_DESCRIPTION(
- "Test that an error is produced for a fragment shader which does not provide an output for one of the pipeline's color "
- "attachments");
-
- ASSERT_NO_FATAL_FAILURE(Init());
- ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
-
- VkShaderObj fs(m_device, bindStateMinimalShaderText, VK_SHADER_STAGE_FRAGMENT_BIT, this);
-
- const auto set_info = [&](CreatePipelineHelper &helper) {
- helper.shader_stages_ = {helper.vs_->GetStageCreateInfo(), fs.GetStageCreateInfo()};
- helper.cb_attachments_.colorWriteMask = 1;
- };
- CreatePipelineHelper::OneshotTest(*this, set_info, VK_DEBUG_REPORT_WARNING_BIT_EXT,
- "Attachment 0 not written by fragment shader");
-}
-
-TEST_F(VkLayerTest, CreatePipelineFragmentOutputNotConsumed) {
- TEST_DESCRIPTION(
- "Test that a warning is produced for a fragment shader which provides a spurious output with no matching attachment");
-
- ASSERT_NO_FATAL_FAILURE(Init());
- ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
-
- char const *fsSource =
- "#version 450\n"
- "\n"
- "layout(location=0) out vec4 x;\n"
- "layout(location=1) out vec4 y;\n" /* no matching attachment for this */
- "void main(){\n"
- " x = vec4(1);\n"
- " y = vec4(1);\n"
- "}\n";
- VkShaderObj fs(m_device, fsSource, VK_SHADER_STAGE_FRAGMENT_BIT, this);
-
- const auto set_info = [&](CreatePipelineHelper &helper) {
- helper.shader_stages_ = {helper.vs_->GetStageCreateInfo(), fs.GetStageCreateInfo()};
- };
- CreatePipelineHelper::OneshotTest(*this, set_info, VK_DEBUG_REPORT_WARNING_BIT_EXT,
- "fragment shader writes to output location 1 with no matching attachment");
-}
-
-TEST_F(VkLayerTest, CreatePipelineFragmentNoOutputLocation0ButAlphaToCoverageEnabled) {
- TEST_DESCRIPTION("Test that an error is produced when alpha to coverage is enabled but no output at location 0 is declared.");
-
- ASSERT_NO_FATAL_FAILURE(Init());
- ASSERT_NO_FATAL_FAILURE(InitRenderTarget(0u));
-
- VkShaderObj fs(m_device, bindStateMinimalShaderText, VK_SHADER_STAGE_FRAGMENT_BIT, this);
-
- VkPipelineMultisampleStateCreateInfo ms_state_ci = {};
- ms_state_ci.sType = VK_STRUCTURE_TYPE_PIPELINE_MULTISAMPLE_STATE_CREATE_INFO;
- ms_state_ci.rasterizationSamples = VK_SAMPLE_COUNT_1_BIT;
- ms_state_ci.alphaToCoverageEnable = VK_TRUE;
-
- const auto set_info = [&](CreatePipelineHelper &helper) {
- helper.shader_stages_ = {helper.vs_->GetStageCreateInfo(), fs.GetStageCreateInfo()};
- helper.pipe_ms_state_ci_ = ms_state_ci;
- };
- CreatePipelineHelper::OneshotTest(
- *this, set_info, VK_DEBUG_REPORT_ERROR_BIT_EXT,
- "fragment shader doesn't declare alpha output at location 0 even though alpha to coverage is enabled.");
-}
-
-TEST_F(VkLayerTest, CreatePipelineFragmentNoAlphaLocation0ButAlphaToCoverageEnabled) {
- TEST_DESCRIPTION(
- "Test that an error is produced when alpha to coverage is enabled but output at location 0 doesn't have alpha channel.");
-
- ASSERT_NO_FATAL_FAILURE(Init());
- ASSERT_NO_FATAL_FAILURE(InitRenderTarget(0u));
-
- char const *fsSource =
- "#version 450\n"
- "layout(location=0) out vec3 x;\n"
- "\n"
- "void main(){\n"
- " x = vec3(1);\n"
- "}\n";
- VkShaderObj fs(m_device, fsSource, VK_SHADER_STAGE_FRAGMENT_BIT, this);
-
- VkPipelineMultisampleStateCreateInfo ms_state_ci = {};
- ms_state_ci.sType = VK_STRUCTURE_TYPE_PIPELINE_MULTISAMPLE_STATE_CREATE_INFO;
- ms_state_ci.rasterizationSamples = VK_SAMPLE_COUNT_1_BIT;
- ms_state_ci.alphaToCoverageEnable = VK_TRUE;
-
- const auto set_info = [&](CreatePipelineHelper &helper) {
- helper.shader_stages_ = {helper.vs_->GetStageCreateInfo(), fs.GetStageCreateInfo()};
- helper.pipe_ms_state_ci_ = ms_state_ci;
- };
- CreatePipelineHelper::OneshotTest(
- *this, set_info, VK_DEBUG_REPORT_ERROR_BIT_EXT,
- "fragment shader doesn't declare alpha output at location 0 even though alpha to coverage is enabled.");
-}
-
-TEST_F(VkLayerTest, CreatePipelineFragmentOutputTypeMismatch) {
- TEST_DESCRIPTION(
- "Test that an error is produced for a mismatch between the fundamental type of an fragment shader output variable, and the "
- "format of the corresponding attachment");
-
- ASSERT_NO_FATAL_FAILURE(Init());
- ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
-
- char const *fsSource =
- "#version 450\n"
- "\n"
- "layout(location=0) out ivec4 x;\n" /* not UNORM */
- "void main(){\n"
- " x = ivec4(1);\n"
- "}\n";
-
- VkShaderObj fs(m_device, fsSource, VK_SHADER_STAGE_FRAGMENT_BIT, this);
-
- const auto set_info = [&](CreatePipelineHelper &helper) {
- helper.shader_stages_ = {helper.vs_->GetStageCreateInfo(), fs.GetStageCreateInfo()};
- };
- CreatePipelineHelper::OneshotTest(*this, set_info, VK_DEBUG_REPORT_WARNING_BIT_EXT,
- "does not match fragment shader output type");
-}
-
-TEST_F(VkLayerTest, CreatePipelineExceedMaxVertexOutputComponents) {
- TEST_DESCRIPTION(
- "Test that an error is produced when the number of output components from the vertex stage exceeds the device limit");
-
- ASSERT_NO_FATAL_FAILURE(Init());
- ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
-
- for (int overflow = 0; overflow < 2; ++overflow) {
- m_errorMonitor->Reset();
-
- const uint32_t maxVsOutComp = m_device->props.limits.maxVertexOutputComponents + overflow;
- std::string vsSourceStr = "#version 450\n\n";
- const uint32_t numVec4 = maxVsOutComp / 4;
- uint32_t location = 0;
- for (uint32_t i = 0; i < numVec4; i++) {
- vsSourceStr += "layout(location=" + std::to_string(location) + ") out vec4 v" + std::to_string(i) + ";\n";
- location += 1;
- }
- const uint32_t remainder = maxVsOutComp % 4;
- if (remainder != 0) {
- if (remainder == 1) {
- vsSourceStr += "layout(location=" + std::to_string(location) + ") out float" + " vn;\n";
- } else {
- vsSourceStr += "layout(location=" + std::to_string(location) + ") out vec" + std::to_string(remainder) + " vn;\n";
- }
- location += 1;
- }
- vsSourceStr +=
- "void main(){\n"
- "}\n";
-
- std::string fsSourceStr =
- "#version 450\n"
- "\n"
- "layout(location=0) out vec4 color;\n"
- "\n"
- "void main(){\n"
- " color = vec4(1);\n"
- "}\n";
-
- VkShaderObj vs(m_device, vsSourceStr.c_str(), VK_SHADER_STAGE_VERTEX_BIT, this);
- VkShaderObj fs(m_device, fsSourceStr.c_str(), VK_SHADER_STAGE_FRAGMENT_BIT, this);
-
- const auto set_info = [&](CreatePipelineHelper &helper) {
- helper.shader_stages_ = {vs.GetStageCreateInfo(), fs.GetStageCreateInfo()};
- };
- if (overflow) {
- CreatePipelineHelper::OneshotTest(*this, set_info, VK_DEBUG_REPORT_WARNING_BIT_EXT,
- "Vertex shader exceeds VkPhysicalDeviceLimits::maxVertexOutputComponents");
- } else {
- CreatePipelineHelper::OneshotTest(*this, set_info, VK_DEBUG_REPORT_WARNING_BIT_EXT, "", true);
- }
- }
-}
-
-TEST_F(VkLayerTest, CreatePipelineExceedMaxTessellationControlInputOutputComponents) {
- TEST_DESCRIPTION(
- "Test that errors are produced when the number of per-vertex input and/or output components to the tessellation control "
- "stage exceeds the device limit");
-
- ASSERT_NO_FATAL_FAILURE(Init());
- ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
-
- for (int overflow = 0; overflow < 2; ++overflow) {
- m_errorMonitor->Reset();
- VkPhysicalDeviceFeatures feat;
- vkGetPhysicalDeviceFeatures(gpu(), &feat);
- if (!feat.tessellationShader) {
- printf("%s tessellation shader stage(s) unsupported.\n", kSkipPrefix);
- return;
- }
-
- // Tessellation control stage
- std::string tcsSourceStr =
- "#version 450\n"
- "\n";
- // Input components
- const uint32_t maxTescInComp = m_device->props.limits.maxTessellationControlPerVertexInputComponents + overflow;
- const uint32_t numInVec4 = maxTescInComp / 4;
- uint32_t inLocation = 0;
- for (uint32_t i = 0; i < numInVec4; i++) {
- tcsSourceStr += "layout(location=" + std::to_string(inLocation) + ") in vec4 v" + std::to_string(i) + "In[];\n";
- inLocation += 1;
- }
- const uint32_t inRemainder = maxTescInComp % 4;
- if (inRemainder != 0) {
- if (inRemainder == 1) {
- tcsSourceStr += "layout(location=" + std::to_string(inLocation) + ") in float" + " vnIn[];\n";
- } else {
- tcsSourceStr +=
- "layout(location=" + std::to_string(inLocation) + ") in vec" + std::to_string(inRemainder) + " vnIn[];\n";
- }
- inLocation += 1;
- }
-
- // Output components
- const uint32_t maxTescOutComp = m_device->props.limits.maxTessellationControlPerVertexOutputComponents + overflow;
- const uint32_t numOutVec4 = maxTescOutComp / 4;
- uint32_t outLocation = 0;
- for (uint32_t i = 0; i < numOutVec4; i++) {
- tcsSourceStr += "layout(location=" + std::to_string(outLocation) + ") out vec4 v" + std::to_string(i) + "Out[3];\n";
- outLocation += 1;
- }
- const uint32_t outRemainder = maxTescOutComp % 4;
- if (outRemainder != 0) {
- if (outRemainder == 1) {
- tcsSourceStr += "layout(location=" + std::to_string(outLocation) + ") out float" + " vnOut[3];\n";
- } else {
- tcsSourceStr +=
- "layout(location=" + std::to_string(outLocation) + ") out vec" + std::to_string(outRemainder) + " vnOut[3];\n";
- }
- outLocation += 1;
- }
-
- tcsSourceStr += "layout(vertices=3) out;\n";
- // Finalize
- tcsSourceStr +=
- "\n"
- "void main(){\n"
- "}\n";
-
- VkShaderObj tcs(m_device, tcsSourceStr.c_str(), VK_SHADER_STAGE_TESSELLATION_CONTROL_BIT, this);
- VkShaderObj tes(m_device, bindStateTeshaderText, VK_SHADER_STAGE_TESSELLATION_EVALUATION_BIT, this);
-
- VkPipelineInputAssemblyStateCreateInfo inputAssemblyInfo = {};
- inputAssemblyInfo.sType = VK_STRUCTURE_TYPE_PIPELINE_INPUT_ASSEMBLY_STATE_CREATE_INFO;
- inputAssemblyInfo.pNext = NULL;
- inputAssemblyInfo.flags = 0;
- inputAssemblyInfo.topology = VK_PRIMITIVE_TOPOLOGY_PATCH_LIST;
- inputAssemblyInfo.primitiveRestartEnable = VK_FALSE;
-
- VkPipelineTessellationStateCreateInfo tessInfo = {};
- tessInfo.sType = VK_STRUCTURE_TYPE_PIPELINE_TESSELLATION_STATE_CREATE_INFO;
- tessInfo.pNext = NULL;
- tessInfo.flags = 0;
- tessInfo.patchControlPoints = 3;
-
- m_errorMonitor->SetUnexpectedError("UNASSIGNED-CoreValidation-Shader-InputNotProduced");
-
- const auto set_info = [&](CreatePipelineHelper &helper) {
- helper.shader_stages_ = {helper.vs_->GetStageCreateInfo(), tcs.GetStageCreateInfo(), tes.GetStageCreateInfo(),
- helper.fs_->GetStageCreateInfo()};
- helper.gp_ci_.pTessellationState = &tessInfo;
- helper.gp_ci_.pInputAssemblyState = &inputAssemblyInfo;
- };
- if (overflow) {
- CreatePipelineHelper::OneshotTest(
- *this, set_info, VK_DEBUG_REPORT_ERROR_BIT_EXT,
- vector<string>{
- "Tessellation control shader exceeds VkPhysicalDeviceLimits::maxTessellationControlPerVertexInputComponents",
- "Tessellation control shader exceeds VkPhysicalDeviceLimits::maxTessellationControlPerVertexOutputComponents"});
- } else {
- CreatePipelineHelper::OneshotTest(*this, set_info, VK_DEBUG_REPORT_ERROR_BIT_EXT, "", true);
- }
- }
-}
-
-TEST_F(VkLayerTest, CreatePipelineExceedMaxTessellationEvaluationInputOutputComponents) {
- TEST_DESCRIPTION(
- "Test that errors are produced when the number of input and/or output components to the tessellation evaluation stage "
- "exceeds the device limit");
-
- ASSERT_NO_FATAL_FAILURE(Init());
- ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
-
- for (int overflow = 0; overflow < 2; ++overflow) {
- m_errorMonitor->Reset();
- VkPhysicalDeviceFeatures feat;
- vkGetPhysicalDeviceFeatures(gpu(), &feat);
- if (!feat.tessellationShader) {
- printf("%s tessellation shader stage(s) unsupported.\n", kSkipPrefix);
- return;
- }
-
- // Tessellation evaluation stage
- std::string tesSourceStr =
- "#version 450\n"
- "\n"
- "layout (triangles) in;\n"
- "\n";
- // Input components
- const uint32_t maxTeseInComp = m_device->props.limits.maxTessellationEvaluationInputComponents + overflow;
- const uint32_t numInVec4 = maxTeseInComp / 4;
- uint32_t inLocation = 0;
- for (uint32_t i = 0; i < numInVec4; i++) {
- tesSourceStr += "layout(location=" + std::to_string(inLocation) + ") in vec4 v" + std::to_string(i) + "In[];\n";
- inLocation += 1;
- }
- const uint32_t inRemainder = maxTeseInComp % 4;
- if (inRemainder != 0) {
- if (inRemainder == 1) {
- tesSourceStr += "layout(location=" + std::to_string(inLocation) + ") in float" + " vnIn[];\n";
- } else {
- tesSourceStr +=
- "layout(location=" + std::to_string(inLocation) + ") in vec" + std::to_string(inRemainder) + " vnIn[];\n";
- }
- inLocation += 1;
- }
-
- // Output components
- const uint32_t maxTeseOutComp = m_device->props.limits.maxTessellationEvaluationOutputComponents + overflow;
- const uint32_t numOutVec4 = maxTeseOutComp / 4;
- uint32_t outLocation = 0;
- for (uint32_t i = 0; i < numOutVec4; i++) {
- tesSourceStr += "layout(location=" + std::to_string(outLocation) + ") out vec4 v" + std::to_string(i) + "Out;\n";
- outLocation += 1;
- }
- const uint32_t outRemainder = maxTeseOutComp % 4;
- if (outRemainder != 0) {
- if (outRemainder == 1) {
- tesSourceStr += "layout(location=" + std::to_string(outLocation) + ") out float" + " vnOut;\n";
- } else {
- tesSourceStr +=
- "layout(location=" + std::to_string(outLocation) + ") out vec" + std::to_string(outRemainder) + " vnOut;\n";
- }
- outLocation += 1;
- }
-
- // Finalize
- tesSourceStr +=
- "\n"
- "void main(){\n"
- "}\n";
-
- VkShaderObj tcs(m_device, bindStateTscShaderText, VK_SHADER_STAGE_TESSELLATION_CONTROL_BIT, this);
- VkShaderObj tes(m_device, tesSourceStr.c_str(), VK_SHADER_STAGE_TESSELLATION_EVALUATION_BIT, this);
-
- VkPipelineInputAssemblyStateCreateInfo inputAssemblyInfo = {};
- inputAssemblyInfo.sType = VK_STRUCTURE_TYPE_PIPELINE_INPUT_ASSEMBLY_STATE_CREATE_INFO;
- inputAssemblyInfo.pNext = NULL;
- inputAssemblyInfo.flags = 0;
- inputAssemblyInfo.topology = VK_PRIMITIVE_TOPOLOGY_PATCH_LIST;
- inputAssemblyInfo.primitiveRestartEnable = VK_FALSE;
-
- VkPipelineTessellationStateCreateInfo tessInfo = {};
- tessInfo.sType = VK_STRUCTURE_TYPE_PIPELINE_TESSELLATION_STATE_CREATE_INFO;
- tessInfo.pNext = NULL;
- tessInfo.flags = 0;
- tessInfo.patchControlPoints = 3;
-
- m_errorMonitor->SetUnexpectedError("UNASSIGNED-CoreValidation-Shader-InputNotProduced");
-
- const auto set_info = [&](CreatePipelineHelper &helper) {
- helper.shader_stages_ = {helper.vs_->GetStageCreateInfo(), tcs.GetStageCreateInfo(), tes.GetStageCreateInfo(),
- helper.fs_->GetStageCreateInfo()};
- helper.gp_ci_.pTessellationState = &tessInfo;
- helper.gp_ci_.pInputAssemblyState = &inputAssemblyInfo;
- };
- if (overflow) {
- CreatePipelineHelper::OneshotTest(
- *this, set_info, VK_DEBUG_REPORT_ERROR_BIT_EXT,
- vector<string>{
- "Tessellation evaluation shader exceeds VkPhysicalDeviceLimits::maxTessellationEvaluationInputComponents",
- "Tessellation evaluation shader exceeds VkPhysicalDeviceLimits::maxTessellationEvaluationOutputComponents"});
- } else {
- CreatePipelineHelper::OneshotTest(*this, set_info, VK_DEBUG_REPORT_ERROR_BIT_EXT, "", true);
- }
- }
-}
-
-TEST_F(VkLayerTest, CreatePipelineExceedMaxGeometryInputOutputComponents) {
- TEST_DESCRIPTION(
- "Test that errors are produced when the number of input and/or output components to the geometry stage exceeds the device "
- "limit");
-
- ASSERT_NO_FATAL_FAILURE(Init());
- ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
-
- for (int overflow = 0; overflow < 2; ++overflow) {
- m_errorMonitor->Reset();
- VkPhysicalDeviceFeatures feat;
- vkGetPhysicalDeviceFeatures(gpu(), &feat);
- if (!feat.geometryShader) {
- printf("%s geometry shader stage unsupported.\n", kSkipPrefix);
- return;
- }
-
- std::string gsSourceStr =
- "#version 450\n"
- "\n"
- "layout(triangles) in;\n"
- "layout(invocations=1) in;\n";
-
- // Input components
- const uint32_t maxGeomInComp = m_device->props.limits.maxGeometryInputComponents + overflow;
- const uint32_t numInVec4 = maxGeomInComp / 4;
- uint32_t inLocation = 0;
- for (uint32_t i = 0; i < numInVec4; i++) {
- gsSourceStr += "layout(location=" + std::to_string(inLocation) + ") in vec4 v" + std::to_string(i) + "In[];\n";
- inLocation += 1;
- }
- const uint32_t inRemainder = maxGeomInComp % 4;
- if (inRemainder != 0) {
- if (inRemainder == 1) {
- gsSourceStr += "layout(location=" + std::to_string(inLocation) + ") in float" + " vnIn[];\n";
- } else {
- gsSourceStr +=
- "layout(location=" + std::to_string(inLocation) + ") in vec" + std::to_string(inRemainder) + " vnIn[];\n";
- }
- inLocation += 1;
- }
-
- // Output components
- const uint32_t maxGeomOutComp = m_device->props.limits.maxGeometryOutputComponents + overflow;
- const uint32_t numOutVec4 = maxGeomOutComp / 4;
- uint32_t outLocation = 0;
- for (uint32_t i = 0; i < numOutVec4; i++) {
- gsSourceStr += "layout(location=" + std::to_string(outLocation) + ") out vec4 v" + std::to_string(i) + "Out;\n";
- outLocation += 1;
- }
- const uint32_t outRemainder = maxGeomOutComp % 4;
- if (outRemainder != 0) {
- if (outRemainder == 1) {
- gsSourceStr += "layout(location=" + std::to_string(outLocation) + ") out float" + " vnOut;\n";
- } else {
- gsSourceStr +=
- "layout(location=" + std::to_string(outLocation) + ") out vec" + std::to_string(outRemainder) + " vnOut;\n";
- }
- outLocation += 1;
- }
-
- // Finalize
- int max_vertices = overflow ? (m_device->props.limits.maxGeometryTotalOutputComponents / maxGeomOutComp + 1) : 1;
- gsSourceStr += "layout(triangle_strip, max_vertices = " + std::to_string(max_vertices) +
- ") out;\n"
- "\n"
- "void main(){\n"
- "}\n";
-
- VkShaderObj gs(m_device, gsSourceStr.c_str(), VK_SHADER_STAGE_GEOMETRY_BIT, this);
-
- m_errorMonitor->SetUnexpectedError("UNASSIGNED-CoreValidation-Shader-InputNotProduced");
-
- const auto set_info = [&](CreatePipelineHelper &helper) {
- helper.shader_stages_ = {helper.vs_->GetStageCreateInfo(), gs.GetStageCreateInfo(), helper.fs_->GetStageCreateInfo()};
- };
- if (overflow) {
- CreatePipelineHelper::OneshotTest(
- *this, set_info, VK_DEBUG_REPORT_ERROR_BIT_EXT,
- vector<string>{"Geometry shader exceeds VkPhysicalDeviceLimits::maxGeometryInputComponents",
- "Geometry shader exceeds VkPhysicalDeviceLimits::maxGeometryOutputComponents",
- "Geometry shader exceeds VkPhysicalDeviceLimits::maxGeometryTotalOutputComponents"});
- } else {
- CreatePipelineHelper::OneshotTest(*this, set_info, VK_DEBUG_REPORT_ERROR_BIT_EXT, "", true);
- }
- }
-}
-
-TEST_F(VkLayerTest, CreatePipelineExceedMaxFragmentInputComponents) {
- TEST_DESCRIPTION(
- "Test that an error is produced when the number of input components from the fragment stage exceeds the device limit");
-
- ASSERT_NO_FATAL_FAILURE(Init());
- ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
-
- for (int overflow = 0; overflow < 2; ++overflow) {
- m_errorMonitor->Reset();
-
- const uint32_t maxFsInComp = m_device->props.limits.maxFragmentInputComponents + overflow;
- std::string fsSourceStr = "#version 450\n\n";
- const uint32_t numVec4 = maxFsInComp / 4;
- uint32_t location = 0;
- for (uint32_t i = 0; i < numVec4; i++) {
- fsSourceStr += "layout(location=" + std::to_string(location) + ") in vec4 v" + std::to_string(i) + ";\n";
- location += 1;
- }
- const uint32_t remainder = maxFsInComp % 4;
- if (remainder != 0) {
- if (remainder == 1) {
- fsSourceStr += "layout(location=" + std::to_string(location) + ") in float" + " vn;\n";
- } else {
- fsSourceStr += "layout(location=" + std::to_string(location) + ") in vec" + std::to_string(remainder) + " vn;\n";
- }
- location += 1;
- }
- fsSourceStr +=
- "layout(location=0) out vec4 color;"
- "\n"
- "void main(){\n"
- " color = vec4(1);\n"
- "}\n";
- VkShaderObj fs(m_device, fsSourceStr.c_str(), VK_SHADER_STAGE_FRAGMENT_BIT, this);
-
- m_errorMonitor->SetUnexpectedError("UNASSIGNED-CoreValidation-Shader-InputNotProduced");
- const auto set_info = [&](CreatePipelineHelper &helper) {
- helper.shader_stages_ = {helper.vs_->GetStageCreateInfo(), fs.GetStageCreateInfo()};
- };
- if (overflow) {
- CreatePipelineHelper::OneshotTest(*this, set_info, VK_DEBUG_REPORT_ERROR_BIT_EXT,
- "Fragment shader exceeds "
- "VkPhysicalDeviceLimits::maxFragmentInputComponents");
- } else {
- CreatePipelineHelper::OneshotTest(*this, set_info, VK_DEBUG_REPORT_ERROR_BIT_EXT, "", true);
- }
- }
-}
-
-TEST_F(VkLayerTest, CreatePipelineExceedMaxGeometryInstanceVertexCount) {
- TEST_DESCRIPTION(
- "Test that errors are produced when the number of output vertices/instances in the geometry stage exceeds the device "
- "limit");
-
- ASSERT_NO_FATAL_FAILURE(Init());
- ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
-
- for (int overflow = 0; overflow < 2; ++overflow) {
- m_errorMonitor->Reset();
- VkPhysicalDeviceFeatures feat;
- vkGetPhysicalDeviceFeatures(gpu(), &feat);
- if (!feat.geometryShader) {
- printf("%s geometry shader stage unsupported.\n", kSkipPrefix);
- return;
- }
-
- std::string gsSourceStr = R"(
- OpCapability Geometry
- OpMemoryModel Logical GLSL450
- OpEntryPoint Geometry %main "main"
- OpExecutionMode %main InputPoints
- OpExecutionMode %main OutputTriangleStrip
- )";
- if (overflow) {
- gsSourceStr += "OpExecutionMode %main Invocations " +
- std::to_string(m_device->props.limits.maxGeometryShaderInvocations + 1) +
- "\n\
- OpExecutionMode %main OutputVertices " +
- std::to_string(m_device->props.limits.maxGeometryOutputVertices + 1);
- } else {
- gsSourceStr += R"(
- OpExecutionMode %main Invocations 1
- OpExecutionMode %main OutputVertices 1
- )";
- }
- gsSourceStr += R"(
- OpSource GLSL 450
- %void = OpTypeVoid
- %3 = OpTypeFunction %void
- %main = OpFunction %void None %3
- %5 = OpLabel
- OpReturn
- OpFunctionEnd
- )";
- VkShaderObj gs(m_device, gsSourceStr, VK_SHADER_STAGE_GEOMETRY_BIT, this);
-
- const auto set_info = [&](CreatePipelineHelper &helper) {
- helper.shader_stages_ = {helper.vs_->GetStageCreateInfo(), gs.GetStageCreateInfo(), helper.fs_->GetStageCreateInfo()};
- };
- if (overflow) {
- CreatePipelineHelper::OneshotTest(*this, set_info, VK_DEBUG_REPORT_ERROR_BIT_EXT,
- vector<string>{"VUID-VkPipelineShaderStageCreateInfo-stage-00714",
- "VUID-VkPipelineShaderStageCreateInfo-stage-00715"});
- } else {
- CreatePipelineHelper::OneshotTest(*this, set_info, VK_DEBUG_REPORT_ERROR_BIT_EXT, "", true);
- }
- }
-}
-
-TEST_F(VkLayerTest, CreatePipelineUniformBlockNotProvided) {
- TEST_DESCRIPTION(
- "Test that an error is produced for a shader consuming a uniform block which has no corresponding binding in the pipeline "
- "layout");
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "not declared in pipeline layout");
-
- ASSERT_NO_FATAL_FAILURE(Init());
-
- VkShaderObj vs(m_device, bindStateVertShaderText, VK_SHADER_STAGE_VERTEX_BIT, this);
- VkShaderObj fs(m_device, bindStateFragUniformShaderText, VK_SHADER_STAGE_FRAGMENT_BIT, this);
-
- VkPipelineObj pipe(m_device);
- pipe.AddShader(&vs);
- pipe.AddShader(&fs);
-
- /* set up CB 0; type is UNORM by default */
- pipe.AddDefaultColorAttachment();
- ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
-
- VkDescriptorSetObj descriptorSet(m_device);
- descriptorSet.CreateVKDescriptorSet(m_commandBuffer);
-
- pipe.CreateVKPipeline(descriptorSet.GetPipelineLayout(), renderPass());
-
- m_errorMonitor->VerifyFound();
-}
-
-TEST_F(VkLayerTest, CreatePipelinePushConstantsNotInLayout) {
- TEST_DESCRIPTION(
- "Test that an error is produced for a shader consuming push constants which are not provided in the pipeline layout");
-
- ASSERT_NO_FATAL_FAILURE(Init());
- ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
-
- char const *vsSource =
- "#version 450\n"
- "\n"
- "layout(push_constant, std430) uniform foo { float x; } consts;\n"
- "void main(){\n"
- " gl_Position = vec4(consts.x);\n"
- "}\n";
-
- VkShaderObj vs(m_device, vsSource, VK_SHADER_STAGE_VERTEX_BIT, this);
-
- CreatePipelineHelper pipe(*this);
- pipe.InitInfo();
- pipe.shader_stages_ = {vs.GetStageCreateInfo(), pipe.fs_->GetStageCreateInfo()};
- pipe.InitState();
- pipe.pipeline_layout_ = VkPipelineLayoutObj(m_device, {});
- /* should have generated an error -- no push constant ranges provided! */
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "not declared in layout");
- pipe.CreateGraphicsPipeline();
- m_errorMonitor->VerifyFound();
-}
-
-TEST_F(VkLayerTest, CreatePipelineInputAttachmentMissing) {
- TEST_DESCRIPTION(
- "Test that an error is produced for a shader consuming an input attachment which is not included in the subpass "
- "description");
-
- ASSERT_NO_FATAL_FAILURE(Init());
- ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
-
- char const *fsSource =
- "#version 450\n"
- "\n"
- "layout(input_attachment_index=0, set=0, binding=0) uniform subpassInput x;\n"
- "layout(location=0) out vec4 color;\n"
- "void main() {\n"
- " color = subpassLoad(x);\n"
- "}\n";
-
- VkShaderObj fs(m_device, fsSource, VK_SHADER_STAGE_FRAGMENT_BIT, this);
-
- const auto set_info = [&](CreatePipelineHelper &helper) {
- helper.shader_stages_ = {helper.vs_->GetStageCreateInfo(), fs.GetStageCreateInfo()};
- helper.dsl_bindings_ = {{0, VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT, 1, VK_SHADER_STAGE_FRAGMENT_BIT, nullptr}};
- };
- CreatePipelineHelper::OneshotTest(*this, set_info, VK_DEBUG_REPORT_ERROR_BIT_EXT,
- "consumes input attachment index 0 but not provided in subpass");
-}
-
-TEST_F(VkLayerTest, CreatePipelineInputAttachmentTypeMismatch) {
- TEST_DESCRIPTION(
- "Test that an error is produced for a shader consuming an input attachment with a format having a different fundamental "
- "type");
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
- "input attachment 0 format of VK_FORMAT_R8G8B8A8_UINT does not match");
-
- ASSERT_NO_FATAL_FAILURE(Init());
-
- char const *fsSource =
- "#version 450\n"
- "\n"
- "layout(input_attachment_index=0, set=0, binding=0) uniform subpassInput x;\n"
- "layout(location=0) out vec4 color;\n"
- "void main() {\n"
- " color = subpassLoad(x);\n"
- "}\n";
-
- VkShaderObj vs(m_device, bindStateVertShaderText, VK_SHADER_STAGE_VERTEX_BIT, this);
- VkShaderObj fs(m_device, fsSource, VK_SHADER_STAGE_FRAGMENT_BIT, this);
-
- VkPipelineObj pipe(m_device);
- pipe.AddShader(&vs);
- pipe.AddShader(&fs);
- pipe.AddDefaultColorAttachment();
- ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
-
- VkDescriptorSetLayoutBinding dslb = {0, VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT, 1, VK_SHADER_STAGE_FRAGMENT_BIT, nullptr};
- const VkDescriptorSetLayoutObj dsl(m_device, {dslb});
-
- const VkPipelineLayoutObj pl(m_device, {&dsl});
-
- VkAttachmentDescription descs[2] = {
- {0, VK_FORMAT_R8G8B8A8_UNORM, VK_SAMPLE_COUNT_1_BIT, VK_ATTACHMENT_LOAD_OP_LOAD, VK_ATTACHMENT_STORE_OP_STORE,
- VK_ATTACHMENT_LOAD_OP_LOAD, VK_ATTACHMENT_STORE_OP_STORE, VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL,
- VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL},
- {0, VK_FORMAT_R8G8B8A8_UINT, VK_SAMPLE_COUNT_1_BIT, VK_ATTACHMENT_LOAD_OP_LOAD, VK_ATTACHMENT_STORE_OP_STORE,
- VK_ATTACHMENT_LOAD_OP_LOAD, VK_ATTACHMENT_STORE_OP_STORE, VK_IMAGE_LAYOUT_GENERAL, VK_IMAGE_LAYOUT_GENERAL},
- };
- VkAttachmentReference color = {
- 0,
- VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL,
- };
- VkAttachmentReference input = {
- 1,
- VK_IMAGE_LAYOUT_GENERAL,
- };
-
- VkSubpassDescription sd = {0, VK_PIPELINE_BIND_POINT_GRAPHICS, 1, &input, 1, &color, nullptr, nullptr, 0, nullptr};
-
- VkRenderPassCreateInfo rpci = {VK_STRUCTURE_TYPE_RENDER_PASS_CREATE_INFO, nullptr, 0, 2, descs, 1, &sd, 0, nullptr};
- VkRenderPass rp;
- VkResult err = vkCreateRenderPass(m_device->device(), &rpci, nullptr, &rp);
- ASSERT_VK_SUCCESS(err);
-
- // error here.
- pipe.CreateVKPipeline(pl.handle(), rp);
-
- m_errorMonitor->VerifyFound();
-
- vkDestroyRenderPass(m_device->device(), rp, nullptr);
-}
-
-TEST_F(VkLayerTest, CreatePipelineInputAttachmentMissingArray) {
- TEST_DESCRIPTION(
- "Test that an error is produced for a shader consuming an input attachment which is not included in the subpass "
- "description -- array case");
-
- ASSERT_NO_FATAL_FAILURE(Init());
- ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
-
- char const *fsSource =
- "#version 450\n"
- "\n"
- "layout(input_attachment_index=0, set=0, binding=0) uniform subpassInput xs[1];\n"
- "layout(location=0) out vec4 color;\n"
- "void main() {\n"
- " color = subpassLoad(xs[0]);\n"
- "}\n";
-
- VkShaderObj fs(m_device, fsSource, VK_SHADER_STAGE_FRAGMENT_BIT, this);
-
- const auto set_info = [&](CreatePipelineHelper &helper) {
- helper.shader_stages_ = {helper.vs_->GetStageCreateInfo(), fs.GetStageCreateInfo()};
- helper.dsl_bindings_ = {{0, VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT, 2, VK_SHADER_STAGE_FRAGMENT_BIT, nullptr}};
- };
- CreatePipelineHelper::OneshotTest(*this, set_info, VK_DEBUG_REPORT_ERROR_BIT_EXT,
- "consumes input attachment index 0 but not provided in subpass");
-}
-
-TEST_F(VkLayerTest, CreateComputePipelineMissingDescriptor) {
- TEST_DESCRIPTION(
- "Test that an error is produced for a compute pipeline consuming a descriptor which is not provided in the pipeline "
- "layout");
-
- ASSERT_NO_FATAL_FAILURE(Init());
-
- char const *csSource =
- "#version 450\n"
- "\n"
- "layout(local_size_x=1) in;\n"
- "layout(set=0, binding=0) buffer block { vec4 x; };\n"
- "void main(){\n"
- " x = vec4(1);\n"
- "}\n";
-
- CreateComputePipelineHelper pipe(*this);
- pipe.InitInfo();
- pipe.cs_.reset(new VkShaderObj(m_device, csSource, VK_SHADER_STAGE_COMPUTE_BIT, this));
- pipe.InitState();
- pipe.pipeline_layout_ = VkPipelineLayoutObj(m_device, {});
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "Shader uses descriptor slot 0.0");
- pipe.CreateComputePipeline();
- m_errorMonitor->VerifyFound();
-}
-
-TEST_F(VkLayerTest, CreateComputePipelineDescriptorTypeMismatch) {
- TEST_DESCRIPTION("Test that an error is produced for a pipeline consuming a descriptor-backed resource of a mismatched type");
-
- ASSERT_NO_FATAL_FAILURE(Init());
-
- char const *csSource =
- "#version 450\n"
- "\n"
- "layout(local_size_x=1) in;\n"
- "layout(set=0, binding=0) buffer block { vec4 x; };\n"
- "void main() {\n"
- " x.x = 1.0f;\n"
- "}\n";
-
- const auto set_info = [&](CreateComputePipelineHelper &helper) {
- helper.cs_.reset(new VkShaderObj(m_device, csSource, VK_SHADER_STAGE_COMPUTE_BIT, this));
- helper.dsl_bindings_ = {{0, VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER, 1, VK_SHADER_STAGE_COMPUTE_BIT, nullptr}};
- };
- CreateComputePipelineHelper::OneshotTest(*this, set_info, VK_DEBUG_REPORT_ERROR_BIT_EXT,
- "but descriptor of type VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER");
-}
-
-TEST_F(VkLayerTest, MultiplePushDescriptorSets) {
- TEST_DESCRIPTION("Verify an error message for multiple push descriptor sets.");
-
- if (InstanceExtensionSupported(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME)) {
- m_instance_extension_names.push_back(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME);
- } else {
- printf("%s Did not find VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME; skipped.\n", kSkipPrefix);
- return;
- }
- ASSERT_NO_FATAL_FAILURE(InitFramework(myDbgFunc, m_errorMonitor));
- if (DeviceExtensionSupported(gpu(), nullptr, VK_KHR_PUSH_DESCRIPTOR_EXTENSION_NAME)) {
- m_device_extension_names.push_back(VK_KHR_PUSH_DESCRIPTOR_EXTENSION_NAME);
- } else {
- printf("%s Push Descriptors Extension not supported, skipping tests\n", kSkipPrefix);
- return;
- }
- ASSERT_NO_FATAL_FAILURE(InitState());
-
- auto push_descriptor_prop = GetPushDescriptorProperties(instance(), gpu());
- if (push_descriptor_prop.maxPushDescriptors < 1) {
- // Some implementations report an invalid maxPushDescriptors of 0
- printf("%s maxPushDescriptors is zero, skipping tests\n", kSkipPrefix);
- return;
- }
-
- VkDescriptorSetLayoutBinding dsl_binding = {};
- dsl_binding.binding = 0;
- dsl_binding.descriptorType = VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER;
- dsl_binding.descriptorCount = 1;
- dsl_binding.stageFlags = VK_SHADER_STAGE_FRAGMENT_BIT;
- dsl_binding.pImmutableSamplers = NULL;
-
- const unsigned int descriptor_set_layout_count = 2;
- std::vector<VkDescriptorSetLayoutObj> ds_layouts;
- for (uint32_t i = 0; i < descriptor_set_layout_count; ++i) {
- dsl_binding.binding = i;
- ds_layouts.emplace_back(m_device, std::vector<VkDescriptorSetLayoutBinding>(1, dsl_binding),
- VK_DESCRIPTOR_SET_LAYOUT_CREATE_PUSH_DESCRIPTOR_BIT_KHR);
- }
- const auto &ds_vk_layouts = MakeVkHandles<VkDescriptorSetLayout>(ds_layouts);
-
- VkPipelineLayout pipeline_layout;
- VkPipelineLayoutCreateInfo pipeline_layout_ci = {};
- pipeline_layout_ci.sType = VK_STRUCTURE_TYPE_PIPELINE_LAYOUT_CREATE_INFO;
- pipeline_layout_ci.pNext = NULL;
- pipeline_layout_ci.pushConstantRangeCount = 0;
- pipeline_layout_ci.pPushConstantRanges = NULL;
- pipeline_layout_ci.setLayoutCount = ds_vk_layouts.size();
- pipeline_layout_ci.pSetLayouts = ds_vk_layouts.data();
-
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkPipelineLayoutCreateInfo-pSetLayouts-00293");
- vkCreatePipelineLayout(m_device->device(), &pipeline_layout_ci, NULL, &pipeline_layout);
- m_errorMonitor->VerifyFound();
-}
-
-TEST_F(VkLayerTest, AMDMixedAttachmentSamplesValidateGraphicsPipeline) {
- TEST_DESCRIPTION("Verify an error message for an incorrect graphics pipeline rasterization sample count.");
-
- ASSERT_NO_FATAL_FAILURE(InitFramework(myDbgFunc, m_errorMonitor));
- if (DeviceExtensionSupported(gpu(), nullptr, VK_AMD_MIXED_ATTACHMENT_SAMPLES_EXTENSION_NAME)) {
- m_device_extension_names.push_back(VK_AMD_MIXED_ATTACHMENT_SAMPLES_EXTENSION_NAME);
- } else {
- printf("%s Extension %s is not supported.\n", kSkipPrefix, VK_AMD_MIXED_ATTACHMENT_SAMPLES_EXTENSION_NAME);
- return;
- }
- ASSERT_NO_FATAL_FAILURE(InitState());
- ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
-
- // Set a mismatched sample count
- VkPipelineMultisampleStateCreateInfo ms_state_ci = {};
- ms_state_ci.sType = VK_STRUCTURE_TYPE_PIPELINE_MULTISAMPLE_STATE_CREATE_INFO;
- ms_state_ci.rasterizationSamples = VK_SAMPLE_COUNT_4_BIT;
-
- const auto set_info = [&](CreatePipelineHelper &helper) { helper.pipe_ms_state_ci_ = ms_state_ci; };
- CreatePipelineHelper::OneshotTest(*this, set_info, VK_DEBUG_REPORT_ERROR_BIT_EXT,
- "VUID-VkGraphicsPipelineCreateInfo-subpass-01505");
-}
-
-TEST_F(VkLayerTest, FramebufferMixedSamplesNV) {
- TEST_DESCRIPTION("Verify VK_NV_framebuffer_mixed_samples.");
-
- ASSERT_NO_FATAL_FAILURE(InitFramework(myDbgFunc, m_errorMonitor));
-
- if (DeviceExtensionSupported(gpu(), nullptr, VK_NV_FRAMEBUFFER_MIXED_SAMPLES_EXTENSION_NAME)) {
- m_device_extension_names.push_back(VK_NV_FRAMEBUFFER_MIXED_SAMPLES_EXTENSION_NAME);
- } else {
- printf("%s %s Extension not supported, skipping tests\n", kSkipPrefix, VK_NV_FRAMEBUFFER_MIXED_SAMPLES_EXTENSION_NAME);
- return;
- }
-
- VkPhysicalDeviceFeatures device_features = {};
- ASSERT_NO_FATAL_FAILURE(GetPhysicalDeviceFeatures(&device_features));
- if (VK_TRUE != device_features.sampleRateShading) {
- printf("%s Test requires unsupported sampleRateShading feature.\n", kSkipPrefix);
- return;
- }
-
- ASSERT_NO_FATAL_FAILURE(InitState());
- ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
-
- struct TestCase {
- VkSampleCountFlagBits color_samples;
- VkSampleCountFlagBits depth_samples;
- VkSampleCountFlagBits raster_samples;
- VkBool32 depth_test;
- VkBool32 sample_shading;
- uint32_t table_count;
- bool positiveTest;
- std::string vuid;
- };
-
- std::vector<TestCase> test_cases = {
- {VK_SAMPLE_COUNT_4_BIT, VK_SAMPLE_COUNT_4_BIT, VK_SAMPLE_COUNT_4_BIT, VK_FALSE, VK_FALSE, 1, true,
- "VUID-VkGraphicsPipelineCreateInfo-subpass-00757"},
- {VK_SAMPLE_COUNT_4_BIT, VK_SAMPLE_COUNT_1_BIT, VK_SAMPLE_COUNT_8_BIT, VK_FALSE, VK_FALSE, 4, false,
- "VUID-VkPipelineCoverageModulationStateCreateInfoNV-coverageModulationTableEnable-01405"},
- {VK_SAMPLE_COUNT_4_BIT, VK_SAMPLE_COUNT_1_BIT, VK_SAMPLE_COUNT_8_BIT, VK_FALSE, VK_FALSE, 2, true,
- "VUID-VkPipelineCoverageModulationStateCreateInfoNV-coverageModulationTableEnable-01405"},
- {VK_SAMPLE_COUNT_1_BIT, VK_SAMPLE_COUNT_4_BIT, VK_SAMPLE_COUNT_8_BIT, VK_TRUE, VK_FALSE, 1, false,
- "VUID-VkGraphicsPipelineCreateInfo-subpass-01411"},
- {VK_SAMPLE_COUNT_1_BIT, VK_SAMPLE_COUNT_8_BIT, VK_SAMPLE_COUNT_8_BIT, VK_TRUE, VK_FALSE, 1, true,
- "VUID-VkGraphicsPipelineCreateInfo-subpass-01411"},
- {VK_SAMPLE_COUNT_4_BIT, VK_SAMPLE_COUNT_1_BIT, VK_SAMPLE_COUNT_1_BIT, VK_FALSE, VK_FALSE, 1, false,
- "VUID-VkGraphicsPipelineCreateInfo-subpass-01412"},
- {VK_SAMPLE_COUNT_4_BIT, VK_SAMPLE_COUNT_1_BIT, VK_SAMPLE_COUNT_4_BIT, VK_FALSE, VK_FALSE, 1, true,
- "VUID-VkGraphicsPipelineCreateInfo-subpass-01412"},
- {VK_SAMPLE_COUNT_1_BIT, VK_SAMPLE_COUNT_4_BIT, VK_SAMPLE_COUNT_4_BIT, VK_FALSE, VK_TRUE, 1, false,
- "VUID-VkPipelineMultisampleStateCreateInfo-rasterizationSamples-01415"},
- {VK_SAMPLE_COUNT_1_BIT, VK_SAMPLE_COUNT_4_BIT, VK_SAMPLE_COUNT_4_BIT, VK_FALSE, VK_FALSE, 1, true,
- "VUID-VkPipelineMultisampleStateCreateInfo-rasterizationSamples-01415"},
- {VK_SAMPLE_COUNT_1_BIT, VK_SAMPLE_COUNT_4_BIT, VK_SAMPLE_COUNT_8_BIT, VK_FALSE, VK_FALSE, 1, true,
- "VUID-VkGraphicsPipelineCreateInfo-subpass-00757"}};
-
- for (const auto &test_case : test_cases) {
- VkAttachmentDescription att[2] = {{}, {}};
- att[0].format = VK_FORMAT_R8G8B8A8_UNORM;
- att[0].samples = test_case.color_samples;
- att[0].initialLayout = VK_IMAGE_LAYOUT_UNDEFINED;
- att[0].finalLayout = VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL;
-
- att[1].format = VK_FORMAT_D24_UNORM_S8_UINT;
- att[1].samples = test_case.depth_samples;
- att[1].initialLayout = VK_IMAGE_LAYOUT_UNDEFINED;
- att[1].finalLayout = VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL;
-
- VkAttachmentReference cr = {0, VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL};
- VkAttachmentReference dr = {1, VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL};
-
- VkSubpassDescription sp = {};
- sp.pipelineBindPoint = VK_PIPELINE_BIND_POINT_GRAPHICS;
- sp.colorAttachmentCount = 1;
- sp.pColorAttachments = &cr;
- sp.pResolveAttachments = NULL;
- sp.pDepthStencilAttachment = &dr;
-
- VkRenderPassCreateInfo rpi = {VK_STRUCTURE_TYPE_RENDER_PASS_CREATE_INFO};
- rpi.attachmentCount = 2;
- rpi.pAttachments = att;
- rpi.subpassCount = 1;
- rpi.pSubpasses = &sp;
-
- VkRenderPass rp;
-
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
- "VUID-VkSubpassDescription-pDepthStencilAttachment-01418");
- VkResult err = vkCreateRenderPass(m_device->device(), &rpi, nullptr, &rp);
- m_errorMonitor->VerifyNotFound();
-
- ASSERT_VK_SUCCESS(err);
-
- VkPipelineDepthStencilStateCreateInfo ds = {VK_STRUCTURE_TYPE_PIPELINE_DEPTH_STENCIL_STATE_CREATE_INFO};
- VkPipelineCoverageModulationStateCreateInfoNV cmi = {VK_STRUCTURE_TYPE_PIPELINE_COVERAGE_MODULATION_STATE_CREATE_INFO_NV};
-
- // Create a dummy modulation table that can be used for the positive
- // coverageModulationTableCount test.
- std::vector<float> cm_table{};
-
- const auto break_samples = [&cmi, &rp, &ds, &cm_table, &test_case](CreatePipelineHelper &helper) {
- cm_table.resize(test_case.raster_samples / test_case.color_samples);
-
- cmi.flags = 0;
- cmi.coverageModulationTableEnable = (test_case.table_count > 1);
- cmi.coverageModulationTableCount = test_case.table_count;
- cmi.pCoverageModulationTable = cm_table.data();
-
- ds.depthTestEnable = test_case.depth_test;
-
- helper.pipe_ms_state_ci_.pNext = &cmi;
- helper.pipe_ms_state_ci_.rasterizationSamples = test_case.raster_samples;
- helper.pipe_ms_state_ci_.sampleShadingEnable = test_case.sample_shading;
-
- helper.gp_ci_.renderPass = rp;
- helper.gp_ci_.pDepthStencilState = &ds;
- };
-
- CreatePipelineHelper::OneshotTest(*this, break_samples, VK_DEBUG_REPORT_ERROR_BIT_EXT, test_case.vuid,
- test_case.positiveTest);
-
- vkDestroyRenderPass(m_device->device(), rp, nullptr);
- }
-}
-
-TEST_F(VkLayerTest, FramebufferMixedSamples) {
- TEST_DESCRIPTION("Verify that the expected VUIds are hits when VK_NV_framebuffer_mixed_samples is disabled.");
-
- ASSERT_NO_FATAL_FAILURE(InitFramework(myDbgFunc, m_errorMonitor));
- ASSERT_NO_FATAL_FAILURE(InitState());
- ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
-
- struct TestCase {
- VkSampleCountFlagBits color_samples;
- VkSampleCountFlagBits depth_samples;
- VkSampleCountFlagBits raster_samples;
- bool positiveTest;
- };
-
- std::vector<TestCase> test_cases = {
- {VK_SAMPLE_COUNT_2_BIT, VK_SAMPLE_COUNT_4_BIT, VK_SAMPLE_COUNT_8_BIT,
- false}, // Fails vkCreateRenderPass and vkCreateGraphicsPipeline
- {VK_SAMPLE_COUNT_4_BIT, VK_SAMPLE_COUNT_4_BIT, VK_SAMPLE_COUNT_8_BIT, false}, // Fails vkCreateGraphicsPipeline
- {VK_SAMPLE_COUNT_4_BIT, VK_SAMPLE_COUNT_4_BIT, VK_SAMPLE_COUNT_4_BIT, true} // Pass
- };
-
- for (const auto &test_case : test_cases) {
- VkAttachmentDescription att[2] = {{}, {}};
- att[0].format = VK_FORMAT_R8G8B8A8_UNORM;
- att[0].samples = test_case.color_samples;
- att[0].initialLayout = VK_IMAGE_LAYOUT_UNDEFINED;
- att[0].finalLayout = VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL;
-
- att[1].format = VK_FORMAT_D24_UNORM_S8_UINT;
- att[1].samples = test_case.depth_samples;
- att[1].initialLayout = VK_IMAGE_LAYOUT_UNDEFINED;
- att[1].finalLayout = VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL;
-
- VkAttachmentReference cr = {0, VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL};
- VkAttachmentReference dr = {1, VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL};
-
- VkSubpassDescription sp = {};
- sp.pipelineBindPoint = VK_PIPELINE_BIND_POINT_GRAPHICS;
- sp.colorAttachmentCount = 1;
- sp.pColorAttachments = &cr;
- sp.pResolveAttachments = NULL;
- sp.pDepthStencilAttachment = &dr;
-
- VkRenderPassCreateInfo rpi = {VK_STRUCTURE_TYPE_RENDER_PASS_CREATE_INFO};
- rpi.attachmentCount = 2;
- rpi.pAttachments = att;
- rpi.subpassCount = 1;
- rpi.pSubpasses = &sp;
-
- VkRenderPass rp;
-
- if (test_case.color_samples == test_case.depth_samples) {
- m_errorMonitor->ExpectSuccess();
- } else {
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
- "VUID-VkSubpassDescription-pDepthStencilAttachment-01418");
- }
-
- VkResult err = vkCreateRenderPass(m_device->device(), &rpi, nullptr, &rp);
-
- if (test_case.color_samples == test_case.depth_samples) {
- m_errorMonitor->VerifyNotFound();
- } else {
- m_errorMonitor->VerifyFound();
- continue;
- }
-
- ASSERT_VK_SUCCESS(err);
-
- VkPipelineDepthStencilStateCreateInfo ds = {VK_STRUCTURE_TYPE_PIPELINE_DEPTH_STENCIL_STATE_CREATE_INFO};
-
- const auto break_samples = [&rp, &ds, &test_case](CreatePipelineHelper &helper) {
- helper.pipe_ms_state_ci_.rasterizationSamples = test_case.raster_samples;
-
- helper.gp_ci_.renderPass = rp;
- helper.gp_ci_.pDepthStencilState = &ds;
- };
-
- CreatePipelineHelper::OneshotTest(*this, break_samples, VK_DEBUG_REPORT_ERROR_BIT_EXT,
- "VUID-VkGraphicsPipelineCreateInfo-subpass-00757", test_case.positiveTest);
-
- vkDestroyRenderPass(m_device->device(), rp, nullptr);
- }
-}
-
-TEST_F(VkLayerTest, FragmentCoverageToColorNV) {
- TEST_DESCRIPTION("Verify VK_NV_fragment_coverage_to_color.");
-
- ASSERT_NO_FATAL_FAILURE(InitFramework(myDbgFunc, m_errorMonitor));
-
- if (DeviceExtensionSupported(gpu(), nullptr, VK_NV_FRAGMENT_COVERAGE_TO_COLOR_EXTENSION_NAME)) {
- m_device_extension_names.push_back(VK_NV_FRAGMENT_COVERAGE_TO_COLOR_EXTENSION_NAME);
- } else {
- printf("%s %s Extension not supported, skipping tests\n", kSkipPrefix, VK_NV_FRAGMENT_COVERAGE_TO_COLOR_EXTENSION_NAME);
- return;
- }
-
- ASSERT_NO_FATAL_FAILURE(InitState());
- ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
-
- struct TestCase {
- VkFormat format;
- VkBool32 enabled;
- uint32_t location;
- bool positive;
- };
-
- const std::array<TestCase, 9> test_cases = {{
- {VK_FORMAT_R8G8B8A8_UNORM, VK_FALSE, 0, true},
- {VK_FORMAT_R8_UINT, VK_TRUE, 1, true},
- {VK_FORMAT_R16_UINT, VK_TRUE, 1, true},
- {VK_FORMAT_R16_SINT, VK_TRUE, 1, true},
- {VK_FORMAT_R32_UINT, VK_TRUE, 1, true},
- {VK_FORMAT_R32_SINT, VK_TRUE, 1, true},
- {VK_FORMAT_R32_SINT, VK_TRUE, 2, false},
- {VK_FORMAT_R8_SINT, VK_TRUE, 3, false},
- {VK_FORMAT_R8G8B8A8_UNORM, VK_TRUE, 1, false},
- }};
-
- for (const auto &test_case : test_cases) {
- std::array<VkAttachmentDescription, 2> att = {{{}, {}}};
- att[0].format = VK_FORMAT_R8G8B8A8_UNORM;
- att[0].samples = VK_SAMPLE_COUNT_1_BIT;
- att[0].initialLayout = VK_IMAGE_LAYOUT_UNDEFINED;
- att[0].finalLayout = VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL;
-
- att[1].format = VK_FORMAT_R8G8B8A8_UNORM;
- att[1].samples = VK_SAMPLE_COUNT_1_BIT;
- att[1].initialLayout = VK_IMAGE_LAYOUT_UNDEFINED;
- att[1].finalLayout = VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL;
-
- if (test_case.location < att.size()) {
- att[test_case.location].format = test_case.format;
- }
-
- const std::array<VkAttachmentReference, 3> cr = {{{0, VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL},
- {1, VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL},
- {VK_ATTACHMENT_UNUSED, VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL}}};
-
- VkSubpassDescription sp = {};
- sp.pipelineBindPoint = VK_PIPELINE_BIND_POINT_GRAPHICS;
- sp.colorAttachmentCount = cr.size();
- sp.pColorAttachments = cr.data();
-
- VkRenderPassCreateInfo rpi = {VK_STRUCTURE_TYPE_RENDER_PASS_CREATE_INFO};
- rpi.attachmentCount = att.size();
- rpi.pAttachments = att.data();
- rpi.subpassCount = 1;
- rpi.pSubpasses = &sp;
-
- const std::array<VkPipelineColorBlendAttachmentState, 3> cba = {{{}, {}, {}}};
-
- VkPipelineColorBlendStateCreateInfo cbi = {VK_STRUCTURE_TYPE_PIPELINE_COLOR_BLEND_STATE_CREATE_INFO};
- cbi.attachmentCount = cba.size();
- cbi.pAttachments = cba.data();
-
- VkRenderPass rp;
- VkResult err = vkCreateRenderPass(m_device->device(), &rpi, nullptr, &rp);
- ASSERT_VK_SUCCESS(err);
-
- VkPipelineCoverageToColorStateCreateInfoNV cci = {VK_STRUCTURE_TYPE_PIPELINE_COVERAGE_TO_COLOR_STATE_CREATE_INFO_NV};
-
- const auto break_samples = [&cci, &cbi, &rp, &test_case](CreatePipelineHelper &helper) {
- cci.coverageToColorEnable = test_case.enabled;
- cci.coverageToColorLocation = test_case.location;
-
- helper.pipe_ms_state_ci_.pNext = &cci;
- helper.gp_ci_.renderPass = rp;
- helper.gp_ci_.pColorBlendState = &cbi;
- };
-
- CreatePipelineHelper::OneshotTest(*this, break_samples, VK_DEBUG_REPORT_ERROR_BIT_EXT,
- "VUID-VkPipelineCoverageToColorStateCreateInfoNV-coverageToColorEnable-01404",
- test_case.positive);
-
- vkDestroyRenderPass(m_device->device(), rp, nullptr);
- }
-}
-
-TEST_F(VkLayerTest, ViewportSwizzleNV) {
- TEST_DESCRIPTION("Verify VK_NV_viewprot_swizzle.");
-
- ASSERT_NO_FATAL_FAILURE(InitFramework(myDbgFunc, m_errorMonitor));
-
- if (DeviceExtensionSupported(gpu(), nullptr, VK_NV_VIEWPORT_SWIZZLE_EXTENSION_NAME)) {
- m_device_extension_names.push_back(VK_NV_VIEWPORT_SWIZZLE_EXTENSION_NAME);
- } else {
- printf("%s %s Extension not supported, skipping tests\n", kSkipPrefix, VK_NV_VIEWPORT_SWIZZLE_EXTENSION_NAME);
- return;
- }
-
- ASSERT_NO_FATAL_FAILURE(InitState());
- ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
-
- VkViewportSwizzleNV invalid_swizzles = {
- VkViewportCoordinateSwizzleNV(-1),
- VkViewportCoordinateSwizzleNV(-1),
- VkViewportCoordinateSwizzleNV(-1),
- VkViewportCoordinateSwizzleNV(-1),
- };
-
- VkPipelineViewportSwizzleStateCreateInfoNV vp_swizzle_state = {
- VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_SWIZZLE_STATE_CREATE_INFO_NV};
- vp_swizzle_state.viewportCount = 1;
- vp_swizzle_state.pViewportSwizzles = &invalid_swizzles;
-
- const std::vector<std::string> expected_vuids = {"VUID-VkViewportSwizzleNV-x-parameter", "VUID-VkViewportSwizzleNV-y-parameter",
- "VUID-VkViewportSwizzleNV-z-parameter",
- "VUID-VkViewportSwizzleNV-w-parameter"};
-
- auto break_swizzles = [&vp_swizzle_state](CreatePipelineHelper &helper) { helper.vp_state_ci_.pNext = &vp_swizzle_state; };
-
- CreatePipelineHelper::OneshotTest(*this, break_swizzles, VK_DEBUG_REPORT_ERROR_BIT_EXT, expected_vuids);
-
- struct TestCase {
- VkBool32 rasterizerDiscardEnable;
- uint32_t vp_count;
- uint32_t swizzel_vp_count;
- bool positive;
- };
-
- const std::array<TestCase, 3> test_cases = {{{VK_TRUE, 1, 2, true}, {VK_FALSE, 1, 1, true}, {VK_FALSE, 1, 2, false}}};
-
- std::array<VkViewportSwizzleNV, 2> swizzles = {
- {{VK_VIEWPORT_COORDINATE_SWIZZLE_POSITIVE_X_NV, VK_VIEWPORT_COORDINATE_SWIZZLE_POSITIVE_Y_NV,
- VK_VIEWPORT_COORDINATE_SWIZZLE_POSITIVE_Z_NV, VK_VIEWPORT_COORDINATE_SWIZZLE_POSITIVE_W_NV},
- {VK_VIEWPORT_COORDINATE_SWIZZLE_POSITIVE_X_NV, VK_VIEWPORT_COORDINATE_SWIZZLE_POSITIVE_Y_NV,
- VK_VIEWPORT_COORDINATE_SWIZZLE_POSITIVE_Z_NV, VK_VIEWPORT_COORDINATE_SWIZZLE_POSITIVE_W_NV}}};
-
- for (const auto &test_case : test_cases) {
- assert(test_case.vp_count <= swizzles.size());
-
- vp_swizzle_state.viewportCount = test_case.swizzel_vp_count;
- vp_swizzle_state.pViewportSwizzles = swizzles.data();
-
- auto break_vp_count = [&vp_swizzle_state, &test_case](CreatePipelineHelper &helper) {
- helper.rs_state_ci_.rasterizerDiscardEnable = test_case.rasterizerDiscardEnable;
- helper.vp_state_ci_.viewportCount = test_case.vp_count;
-
- helper.vp_state_ci_.pNext = &vp_swizzle_state;
- };
-
- CreatePipelineHelper::OneshotTest(*this, break_vp_count, VK_DEBUG_REPORT_ERROR_BIT_EXT,
- "VUID-VkPipelineViewportSwizzleStateCreateInfoNV-viewportCount-01215",
- test_case.positive);
- }
-}
-
-TEST_F(VkLayerTest, CooperativeMatrixNV) {
- TEST_DESCRIPTION("Test VK_NV_cooperative_matrix.");
-
- if (InstanceExtensionSupported(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME)) {
- m_instance_extension_names.push_back(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME);
- } else {
- printf("%s Did not find required instance extension %s; skipped.\n", kSkipPrefix,
- VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME);
- return;
- }
- ASSERT_NO_FATAL_FAILURE(InitFramework(myDbgFunc, m_errorMonitor));
- std::array<const char *, 2> required_device_extensions = {
- {VK_NV_COOPERATIVE_MATRIX_EXTENSION_NAME, VK_KHR_SHADER_FLOAT16_INT8_EXTENSION_NAME}};
- for (auto device_extension : required_device_extensions) {
- if (DeviceExtensionSupported(gpu(), nullptr, device_extension)) {
- m_device_extension_names.push_back(device_extension);
- } else {
- printf("%s %s Extension not supported, skipping tests\n", kSkipPrefix, device_extension);
- return;
- }
- }
-
- if (DeviceIsMockICD() || DeviceSimulation()) {
- printf("%s Test not supported by MockICD, skipping tests\n", kSkipPrefix);
- return;
- }
-
- PFN_vkGetPhysicalDeviceFeatures2KHR vkGetPhysicalDeviceFeatures2KHR =
- (PFN_vkGetPhysicalDeviceFeatures2KHR)vkGetInstanceProcAddr(instance(), "vkGetPhysicalDeviceFeatures2KHR");
- ASSERT_TRUE(vkGetPhysicalDeviceFeatures2KHR != nullptr);
-
- auto float16_features = lvl_init_struct<VkPhysicalDeviceFloat16Int8FeaturesKHR>();
- auto cooperative_matrix_features = lvl_init_struct<VkPhysicalDeviceCooperativeMatrixFeaturesNV>(&float16_features);
- auto features2 = lvl_init_struct<VkPhysicalDeviceFeatures2KHR>(&cooperative_matrix_features);
- vkGetPhysicalDeviceFeatures2KHR(gpu(), &features2);
-
- ASSERT_NO_FATAL_FAILURE(InitState(nullptr, &features2));
-
- std::vector<VkDescriptorSetLayoutBinding> bindings(0);
- const VkDescriptorSetLayoutObj dsl(m_device, bindings);
- const VkPipelineLayoutObj pl(m_device, {&dsl});
-
- char const *csSource =
- "#version 450\n"
- "#extension GL_NV_cooperative_matrix : enable\n"
- "#extension GL_KHR_shader_subgroup_basic : enable\n"
- "#extension GL_KHR_memory_scope_semantics : enable\n"
- "#extension GL_EXT_shader_explicit_arithmetic_types_float16 : enable\n"
- "layout(local_size_x = 32) in;\n"
- "layout(constant_id = 0) const uint C0 = 1;"
- "layout(constant_id = 1) const uint C1 = 1;"
- "void main() {\n"
- // Bad type
- " fcoopmatNV<16, gl_ScopeSubgroup, 3, 5> badSize = fcoopmatNV<16, gl_ScopeSubgroup, 3, 5>(float16_t(0.0));\n"
- // Not a valid multiply when C0 != C1
- " fcoopmatNV<16, gl_ScopeSubgroup, C0, C1> A;\n"
- " fcoopmatNV<16, gl_ScopeSubgroup, C0, C1> B;\n"
- " fcoopmatNV<16, gl_ScopeSubgroup, C0, C1> C;\n"
- " coopMatMulAddNV(A, B, C);\n"
- "}\n";
-
- const uint32_t specData[] = {
- 16,
- 8,
- };
- VkSpecializationMapEntry entries[] = {
- {0, sizeof(uint32_t) * 0, sizeof(uint32_t)},
- {1, sizeof(uint32_t) * 1, sizeof(uint32_t)},
- };
-
- VkSpecializationInfo specInfo = {
- 2,
- entries,
- sizeof(specData),
- specData,
- };
-
- CreateComputePipelineHelper pipe(*this);
- pipe.InitInfo();
- pipe.cs_.reset(new VkShaderObj(m_device, csSource, VK_SHADER_STAGE_COMPUTE_BIT, this, "main", false, &specInfo));
- pipe.InitState();
- pipe.pipeline_layout_ = VkPipelineLayoutObj(m_device, {});
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "UNASSIGNED-CoreValidation-Shader-CooperativeMatrixType");
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "UNASSIGNED-CoreValidation-Shader-CooperativeMatrixMulAdd");
- pipe.CreateComputePipeline();
- m_errorMonitor->VerifyFound();
-}
-
-TEST_F(VkLayerTest, SubgroupSupportedOperations) {
- TEST_DESCRIPTION("Test shader validation support for subgroup supportedOperations.");
-
- SetTargetApiVersion(VK_API_VERSION_1_1);
- ASSERT_NO_FATAL_FAILURE(InitFramework(myDbgFunc, m_errorMonitor));
- ASSERT_NO_FATAL_FAILURE(InitState());
- ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
-
- // 1.1 and up only.
- if (m_device->props.apiVersion < VK_API_VERSION_1_1) {
- printf("%s Vulkan 1.1 not supported, skipping test\n", kSkipPrefix);
- return;
- }
-
- if (DeviceIsMockICD() || DeviceSimulation()) {
- printf("%s DevSim doesn't support Vulkan 1.1, skipping tests\n", kSkipPrefix);
- return;
- }
-
- VkPhysicalDeviceSubgroupProperties subgroup_prop = GetSubgroupProperties(instance(), gpu());
-
- // CreatePipelineLayout
- VkPipelineLayoutCreateInfo pipeline_layout_ci = {};
- pipeline_layout_ci.sType = VK_STRUCTURE_TYPE_PIPELINE_LAYOUT_CREATE_INFO;
- pipeline_layout_ci.pNext = NULL;
- pipeline_layout_ci.flags = 0;
- pipeline_layout_ci.setLayoutCount = 0;
- pipeline_layout_ci.pSetLayouts = VK_NULL_HANDLE;
- VkPipelineLayout pipeline_layout = VK_NULL_HANDLE;
- vkCreatePipelineLayout(m_device->device(), &pipeline_layout_ci, NULL, &pipeline_layout);
-
- const std::pair<const char *, VkSubgroupFeatureFlagBits> capabilities[] = {
- {"GroupNonUniform", VK_SUBGROUP_FEATURE_BASIC_BIT},
- {"GroupNonUniformVote", VK_SUBGROUP_FEATURE_VOTE_BIT},
- {"GroupNonUniformArithmetic", VK_SUBGROUP_FEATURE_ARITHMETIC_BIT},
- {"GroupNonUniformBallot", VK_SUBGROUP_FEATURE_BALLOT_BIT},
- {"GroupNonUniformShuffle", VK_SUBGROUP_FEATURE_SHUFFLE_BIT},
- {"GroupNonUniformShuffleRelative", VK_SUBGROUP_FEATURE_SHUFFLE_RELATIVE_BIT},
- {"GroupNonUniformClustered", VK_SUBGROUP_FEATURE_CLUSTERED_BIT},
- {"GroupNonUniformQuad", VK_SUBGROUP_FEATURE_QUAD_BIT},
- };
-
- for (auto &capability : capabilities) {
- std::string spv_source[3];
-
- spv_source[0] = "OpCapability " + std::string(capability.first) + "\n" + R"(
- OpCapability Shader
- OpMemoryModel Logical GLSL450
- OpEntryPoint GLCompute %main "main"
- OpExecutionMode %main LocalSize 1 1 1
- %void = OpTypeVoid
- %func = OpTypeFunction %void
- %main = OpFunction %void None %func
- %40 = OpLabel
- OpReturn
- OpFunctionEnd
- )";
-
- spv_source[1] = "OpCapability " + std::string(capability.first) + "\n" + R"(
- OpCapability Shader
- OpMemoryModel Logical GLSL450
- OpEntryPoint Vertex %main "main"
- %void = OpTypeVoid
- %func = OpTypeFunction %void
- %main = OpFunction %void None %func
- %40 = OpLabel
- OpReturn
- OpFunctionEnd
- )";
-
- spv_source[2] = "OpCapability " + std::string(capability.first) + "\n" + R"(
- OpCapability Shader
- OpMemoryModel Logical GLSL450
- OpEntryPoint Fragment %main "main"
- OpExecutionMode %main OriginUpperLeft
- %void = OpTypeVoid
- %func = OpTypeFunction %void
- %main = OpFunction %void None %func
- %40 = OpLabel
- OpReturn
- OpFunctionEnd
- )";
-
- VkShaderModule shader_module[3];
- VkPipelineShaderStageCreateInfo stage[3];
-
- for (int i = 0; i < 3; ++i) {
- // CreateShaderModule
- std::vector<unsigned int> spv;
- VkShaderModuleCreateInfo module_create_info;
- module_create_info.sType = VK_STRUCTURE_TYPE_SHADER_MODULE_CREATE_INFO;
- module_create_info.pNext = NULL;
- ASMtoSPV(SPV_ENV_VULKAN_1_1, 0, spv_source[i].data(), spv);
- module_create_info.pCode = spv.data();
- module_create_info.codeSize = spv.size() * sizeof(unsigned int);
- module_create_info.flags = 0;
-
- VkResult result = vkCreateShaderModule(m_device->handle(), &module_create_info, NULL, &shader_module[i]);
-
- // NOTE: It appears that for the case of invalid capabilities some drivers (recent AMD) fail at CreateShaderModule time.
- // Likely the capability test should be moved up to CSM time, implementing ShaderModuleCreateInfo-pCode-01090
- // Note(2) -- yes I truncated the above VUID s.t. the VUID checking tools would not catch it.
- if (result != VK_SUCCESS) shader_module[i] = VK_NULL_HANDLE;
-
- stage[i].sType = VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_CREATE_INFO;
- stage[i].pNext = nullptr;
- stage[i].flags = 0;
- // stage[i].stage initialized later;
- stage[i].module = shader_module[i];
- stage[i].pName = "main";
- stage[i].pSpecializationInfo = nullptr;
- }
-
- // CreateComputePipelines
- VkComputePipelineCreateInfo pipeline_info = {};
- pipeline_info.sType = VK_STRUCTURE_TYPE_COMPUTE_PIPELINE_CREATE_INFO;
- pipeline_info.pNext = nullptr;
- pipeline_info.flags = 0;
- pipeline_info.layout = pipeline_layout;
- pipeline_info.basePipelineHandle = VK_NULL_HANDLE;
- pipeline_info.basePipelineIndex = -1;
- pipeline_info.stage = stage[0];
- pipeline_info.stage.stage = VK_SHADER_STAGE_COMPUTE_BIT;
-
- if (pipeline_info.stage.module != VK_NULL_HANDLE) {
- if (!(subgroup_prop.supportedOperations & capability.second)) {
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
- "VkPhysicalDeviceSubgroupProperties::supportedOperations");
- }
- if (!(subgroup_prop.supportedStages & VK_SHADER_STAGE_COMPUTE_BIT)) {
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
- "VkPhysicalDeviceSubgroupProperties::supportedStages");
- }
-
- VkPipeline cs_pipeline;
- vkCreateComputePipelines(device(), VK_NULL_HANDLE, 1, &pipeline_info, nullptr, &cs_pipeline);
- vkDestroyPipeline(device(), cs_pipeline, nullptr);
-
- m_errorMonitor->VerifyFound();
- }
-
- if ((stage[1].module != VK_NULL_HANDLE) && (stage[2].module != VK_NULL_HANDLE)) {
- stage[1].stage = VK_SHADER_STAGE_VERTEX_BIT;
- stage[2].stage = VK_SHADER_STAGE_FRAGMENT_BIT;
-
- VkPipelineObj pipe(m_device);
- pipe.AddShader(stage[1]);
- pipe.AddShader(stage[2]);
- pipe.AddDefaultColorAttachment();
-
- if (!(subgroup_prop.supportedOperations & capability.second)) {
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
- "VkPhysicalDeviceSubgroupProperties::supportedOperations");
- }
- if (!(subgroup_prop.supportedStages & VK_SHADER_STAGE_VERTEX_BIT)) {
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
- "VkPhysicalDeviceSubgroupProperties::supportedStages");
- }
- if (!(subgroup_prop.supportedOperations & capability.second)) {
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
- "VkPhysicalDeviceSubgroupProperties::supportedOperations");
- }
- if (!(subgroup_prop.supportedStages & VK_SHADER_STAGE_FRAGMENT_BIT)) {
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
- "VkPhysicalDeviceSubgroupProperties::supportedStages");
- }
- pipe.CreateVKPipeline(pipeline_layout, renderPass());
-
- m_errorMonitor->VerifyFound();
- }
-
- vkDestroyShaderModule(device(), shader_module[0], nullptr);
- vkDestroyShaderModule(device(), shader_module[1], nullptr);
- vkDestroyShaderModule(device(), shader_module[2], nullptr);
- }
-
- vkDestroyPipelineLayout(device(), pipeline_layout, nullptr);
-}
-
-TEST_F(VkLayerTest, SubgroupRequired) {
- TEST_DESCRIPTION("Test that the minimum required functionality for subgroups is present.");
-
- SetTargetApiVersion(VK_API_VERSION_1_1);
- ASSERT_NO_FATAL_FAILURE(InitFramework(myDbgFunc, m_errorMonitor));
- ASSERT_NO_FATAL_FAILURE(InitState());
-
- // 1.1 and up only.
- if (m_device->props.apiVersion < VK_API_VERSION_1_1) {
- printf("%s Vulkan 1.1 not supported, skipping test\n", kSkipPrefix);
- return;
- }
-
- if (DeviceIsMockICD() || DeviceSimulation()) {
- printf("%s DevSim doesn't support Vulkan 1.1, skipping tests\n", kSkipPrefix);
- return;
- }
-
- VkPhysicalDeviceSubgroupProperties subgroup_prop = GetSubgroupProperties(instance(), gpu());
-
- auto queue_family_properties = m_device->phy().queue_properties();
-
- bool foundGraphics = false;
- bool foundCompute = false;
-
- for (auto queue_family : queue_family_properties) {
- if (queue_family.queueFlags & VK_QUEUE_COMPUTE_BIT) {
- foundCompute = true;
- break;
- }
-
- if (queue_family.queueFlags & VK_QUEUE_GRAPHICS_BIT) {
- foundGraphics = true;
- }
- }
-
- if (!(foundGraphics || foundCompute)) return;
-
- ASSERT_GE(subgroup_prop.subgroupSize, 1u);
-
- if (foundCompute) {
- ASSERT_TRUE(subgroup_prop.supportedStages & VK_SHADER_STAGE_COMPUTE_BIT);
- }
-
- ASSERT_TRUE(subgroup_prop.supportedOperations & VK_SUBGROUP_FEATURE_BASIC_BIT);
-}
-
-TEST_F(VkLayerTest, GraphicsPipelineStageCreationFeedbackCount) {
- TEST_DESCRIPTION("Test graphics pipeline feedback stage count check.");
-
- ASSERT_NO_FATAL_FAILURE(InitFramework(myDbgFunc, m_errorMonitor));
- if (DeviceExtensionSupported(gpu(), nullptr, VK_EXT_PIPELINE_CREATION_FEEDBACK_EXTENSION_NAME)) {
- m_device_extension_names.push_back(VK_EXT_PIPELINE_CREATION_FEEDBACK_EXTENSION_NAME);
- } else {
- printf("%s Extension %s is not supported.\n", kSkipPrefix, VK_EXT_PIPELINE_CREATION_FEEDBACK_EXTENSION_NAME);
- return;
- }
- ASSERT_NO_FATAL_FAILURE(InitState());
- ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
-
- auto feedback_info = lvl_init_struct<VkPipelineCreationFeedbackCreateInfoEXT>();
- VkPipelineCreationFeedbackEXT feedbacks[3] = {};
-
- feedback_info.pPipelineCreationFeedback = &feedbacks[0];
- feedback_info.pipelineStageCreationFeedbackCount = 2;
- feedback_info.pPipelineStageCreationFeedbacks = &feedbacks[1];
-
- auto set_feedback = [&feedback_info](CreatePipelineHelper &helper) { helper.gp_ci_.pNext = &feedback_info; };
-
- CreatePipelineHelper::OneshotTest(*this, set_feedback, VK_DEBUG_REPORT_ERROR_BIT_EXT,
- "VUID-VkPipelineCreationFeedbackCreateInfoEXT-pipelineStageCreationFeedbackCount-02668",
- true);
-
- feedback_info.pipelineStageCreationFeedbackCount = 1;
- CreatePipelineHelper::OneshotTest(*this, set_feedback, VK_DEBUG_REPORT_ERROR_BIT_EXT,
- "VUID-VkPipelineCreationFeedbackCreateInfoEXT-pipelineStageCreationFeedbackCount-02668",
- false);
-}
-
-TEST_F(VkLayerTest, ComputePipelineStageCreationFeedbackCount) {
- TEST_DESCRIPTION("Test compute pipeline feedback stage count check.");
-
- ASSERT_NO_FATAL_FAILURE(InitFramework(myDbgFunc, m_errorMonitor));
- if (DeviceExtensionSupported(gpu(), nullptr, VK_EXT_PIPELINE_CREATION_FEEDBACK_EXTENSION_NAME)) {
- m_device_extension_names.push_back(VK_EXT_PIPELINE_CREATION_FEEDBACK_EXTENSION_NAME);
- } else {
- printf("%s Extension %s is not supported.\n", kSkipPrefix, VK_EXT_PIPELINE_CREATION_FEEDBACK_EXTENSION_NAME);
- return;
- }
- ASSERT_NO_FATAL_FAILURE(InitState());
- ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
-
- VkPipelineCreationFeedbackCreateInfoEXT feedback_info = {};
- VkPipelineCreationFeedbackEXT feedbacks[3] = {};
- feedback_info.sType = VK_STRUCTURE_TYPE_PIPELINE_CREATION_FEEDBACK_CREATE_INFO_EXT;
- feedback_info.pPipelineCreationFeedback = &feedbacks[0];
- feedback_info.pipelineStageCreationFeedbackCount = 1;
- feedback_info.pPipelineStageCreationFeedbacks = &feedbacks[1];
-
- const auto set_info = [&](CreateComputePipelineHelper &helper) { helper.cp_ci_.pNext = &feedback_info; };
-
- CreateComputePipelineHelper::OneshotTest(*this, set_info, VK_DEBUG_REPORT_ERROR_BIT_EXT, "", true);
-
- feedback_info.pipelineStageCreationFeedbackCount = 2;
- CreateComputePipelineHelper::OneshotTest(
- *this, set_info, VK_DEBUG_REPORT_ERROR_BIT_EXT,
- "VUID-VkPipelineCreationFeedbackCreateInfoEXT-pipelineStageCreationFeedbackCount-02669");
-}
-
-TEST_F(VkLayerTest, NVRayTracingPipelineStageCreationFeedbackCount) {
- TEST_DESCRIPTION("Test NV ray tracing pipeline feedback stage count check.");
-
- if (!CreateNVRayTracingPipelineHelper::InitInstanceExtensions(*this, m_instance_extension_names)) {
- return;
- }
- ASSERT_NO_FATAL_FAILURE(InitFramework(myDbgFunc, m_errorMonitor));
-
- if (DeviceExtensionSupported(gpu(), nullptr, VK_EXT_PIPELINE_CREATION_FEEDBACK_EXTENSION_NAME)) {
- m_device_extension_names.push_back(VK_EXT_PIPELINE_CREATION_FEEDBACK_EXTENSION_NAME);
- } else {
- printf("%s Extension %s is not supported.\n", kSkipPrefix, VK_EXT_PIPELINE_CREATION_FEEDBACK_EXTENSION_NAME);
- return;
- }
-
- if (!CreateNVRayTracingPipelineHelper::InitDeviceExtensions(*this, m_device_extension_names)) {
- return;
- }
- ASSERT_NO_FATAL_FAILURE(InitState());
-
- auto feedback_info = lvl_init_struct<VkPipelineCreationFeedbackCreateInfoEXT>();
- VkPipelineCreationFeedbackEXT feedbacks[4] = {};
-
- feedback_info.pPipelineCreationFeedback = &feedbacks[0];
- feedback_info.pipelineStageCreationFeedbackCount = 2;
- feedback_info.pPipelineStageCreationFeedbacks = &feedbacks[1];
-
- auto set_feedback = [&feedback_info](CreateNVRayTracingPipelineHelper &helper) { helper.rp_ci_.pNext = &feedback_info; };
-
- feedback_info.pipelineStageCreationFeedbackCount = 3;
- CreateNVRayTracingPipelineHelper::OneshotPositiveTest(*this, set_feedback);
-
- feedback_info.pipelineStageCreationFeedbackCount = 2;
- CreateNVRayTracingPipelineHelper::OneshotTest(
- *this, set_feedback, "VUID-VkPipelineCreationFeedbackCreateInfoEXT-pipelineStageCreationFeedbackCount-02670");
-}
-
-TEST_F(VkLayerTest, CreatePipelineCheckShaderImageFootprintEnabled) {
- TEST_DESCRIPTION("Create a pipeline requiring the shader image footprint feature which has not enabled on the device.");
-
- ASSERT_NO_FATAL_FAILURE(Init());
-
- if (!DeviceExtensionSupported(gpu(), nullptr, VK_NV_SHADER_IMAGE_FOOTPRINT_EXTENSION_NAME)) {
- printf("%s Extension %s is not supported.\n", kSkipPrefix, VK_NV_SHADER_IMAGE_FOOTPRINT_EXTENSION_NAME);
- return;
- }
-
- std::vector<const char *> device_extension_names;
- auto features = m_device->phy().features();
-
- // Disable the image footprint feature.
- auto image_footprint_features = lvl_init_struct<VkPhysicalDeviceShaderImageFootprintFeaturesNV>();
- image_footprint_features.imageFootprint = VK_FALSE;
-
- VkDeviceObj test_device(0, gpu(), device_extension_names, &features, &image_footprint_features);
-
- char const *fsSource =
- "#version 450\n"
- "#extension GL_NV_shader_texture_footprint : require\n"
- "layout(set=0, binding=0) uniform sampler2D s;\n"
- "layout(location=0) out vec4 color;\n"
- "void main(){\n"
- " gl_TextureFootprint2DNV footprint;\n"
- " if (textureFootprintNV(s, vec2(1.0), 5, false, footprint)) {\n"
- " color = vec4(0.0, 1.0, 0.0, 1.0);\n"
- " } else {\n"
- " color = vec4(vec2(footprint.anchor), vec2(footprint.offset));\n"
- " }\n"
- "}\n";
-
- VkShaderObj vs(&test_device, bindStateVertShaderText, VK_SHADER_STAGE_VERTEX_BIT, this);
- VkShaderObj fs(&test_device, fsSource, VK_SHADER_STAGE_FRAGMENT_BIT, this);
-
- VkRenderpassObj render_pass(&test_device);
-
- VkPipelineObj pipe(&test_device);
- pipe.AddDefaultColorAttachment();
- pipe.AddShader(&vs);
- pipe.AddShader(&fs);
-
- VkDescriptorSetLayoutBinding binding = {0, VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER, 1, VK_SHADER_STAGE_FRAGMENT_BIT, nullptr};
- const VkDescriptorSetLayoutObj ds_layout(&test_device, {binding});
- ASSERT_TRUE(ds_layout.initialized());
-
- const VkPipelineLayoutObj pipeline_layout(&test_device, {&ds_layout});
-
- m_errorMonitor->SetDesiredFailureMsg(
- VK_DEBUG_REPORT_ERROR_BIT_EXT,
- "Shader requires VkPhysicalDeviceShaderImageFootprintFeaturesNV::imageFootprint but is not enabled on the device");
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
- "Shader requires extension VkPhysicalDeviceShaderImageFootprintFeaturesNV::imageFootprint "
- "but is not enabled on the device");
- pipe.CreateVKPipeline(pipeline_layout.handle(), render_pass.handle());
- m_errorMonitor->VerifyFound();
-}
-
-TEST_F(VkLayerTest, CreatePipelineCheckFragmentShaderBarycentricEnabled) {
- TEST_DESCRIPTION("Create a pipeline requiring the fragment shader barycentric feature which has not enabled on the device.");
-
- ASSERT_NO_FATAL_FAILURE(Init());
-
- std::vector<const char *> device_extension_names;
- auto features = m_device->phy().features();
-
- // Disable the fragment shader barycentric feature.
- auto fragment_shader_barycentric_features = lvl_init_struct<VkPhysicalDeviceFragmentShaderBarycentricFeaturesNV>();
- fragment_shader_barycentric_features.fragmentShaderBarycentric = VK_FALSE;
-
- VkDeviceObj test_device(0, gpu(), device_extension_names, &features, &fragment_shader_barycentric_features);
-
- char const *fsSource =
- "#version 450\n"
- "#extension GL_NV_fragment_shader_barycentric : require\n"
- "layout(location=0) out float value;\n"
- "void main(){\n"
- " value = gl_BaryCoordNV.x;\n"
- "}\n";
-
- VkShaderObj vs(&test_device, bindStateVertShaderText, VK_SHADER_STAGE_VERTEX_BIT, this);
- VkShaderObj fs(&test_device, fsSource, VK_SHADER_STAGE_FRAGMENT_BIT, this);
-
- VkRenderpassObj render_pass(&test_device);
-
- VkPipelineObj pipe(&test_device);
- pipe.AddDefaultColorAttachment();
- pipe.AddShader(&vs);
- pipe.AddShader(&fs);
-
- const VkPipelineLayoutObj pipeline_layout(&test_device);
-
- m_errorMonitor->SetDesiredFailureMsg(
- VK_DEBUG_REPORT_ERROR_BIT_EXT,
- "Shader requires VkPhysicalDeviceFragmentShaderBarycentricFeaturesNV::fragmentShaderBarycentric but is not enabled on the "
- "device");
- m_errorMonitor->SetDesiredFailureMsg(
- VK_DEBUG_REPORT_ERROR_BIT_EXT,
- "Shader requires extension VkPhysicalDeviceFragmentShaderBarycentricFeaturesNV::fragmentShaderBarycentric but is not "
- "enabled on the device");
- pipe.CreateVKPipeline(pipeline_layout.handle(), render_pass.handle());
- m_errorMonitor->VerifyFound();
-}
-
-TEST_F(VkLayerTest, CreatePipelineCheckComputeShaderDerivativesEnabled) {
- TEST_DESCRIPTION("Create a pipeline requiring the compute shader derivatives feature which has not enabled on the device.");
-
- ASSERT_NO_FATAL_FAILURE(Init());
-
- std::vector<const char *> device_extension_names;
- auto features = m_device->phy().features();
-
- // Disable the compute shader derivatives features.
- auto compute_shader_derivatives_features = lvl_init_struct<VkPhysicalDeviceComputeShaderDerivativesFeaturesNV>();
- compute_shader_derivatives_features.computeDerivativeGroupLinear = VK_FALSE;
- compute_shader_derivatives_features.computeDerivativeGroupQuads = VK_FALSE;
-
- VkDeviceObj test_device(0, gpu(), device_extension_names, &features, &compute_shader_derivatives_features);
-
- VkDescriptorSetLayoutBinding binding = {0, VK_DESCRIPTOR_TYPE_STORAGE_BUFFER, 1, VK_SHADER_STAGE_COMPUTE_BIT, nullptr};
- const VkDescriptorSetLayoutObj dsl(&test_device, {binding});
- const VkPipelineLayoutObj pl(&test_device, {&dsl});
-
- char const *csSource =
- "#version 450\n"
- "#extension GL_NV_compute_shader_derivatives : require\n"
- "\n"
- "layout(local_size_x=2, local_size_y=4) in;\n"
- "layout(derivative_group_quadsNV) in;\n"
- "\n"
- "layout(set=0, binding=0) buffer InputOutputBuffer {\n"
- " float values[];\n"
- "};\n"
- "\n"
- "void main(){\n"
- " values[gl_LocalInvocationIndex] = dFdx(values[gl_LocalInvocationIndex]);"
- "}\n";
-
- VkShaderObj cs(&test_device, csSource, VK_SHADER_STAGE_COMPUTE_BIT, this);
-
- VkComputePipelineCreateInfo cpci = {VK_STRUCTURE_TYPE_COMPUTE_PIPELINE_CREATE_INFO,
- nullptr,
- 0,
- {VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_CREATE_INFO, nullptr, 0,
- VK_SHADER_STAGE_COMPUTE_BIT, cs.handle(), "main", nullptr},
- pl.handle(),
- VK_NULL_HANDLE,
- -1};
-
- m_errorMonitor->SetDesiredFailureMsg(
- VK_DEBUG_REPORT_ERROR_BIT_EXT,
- "Shader requires VkPhysicalDeviceComputeShaderDerivativesFeaturesNV::computeDerivativeGroupQuads but is not enabled on the "
- "device");
- m_errorMonitor->SetDesiredFailureMsg(
- VK_DEBUG_REPORT_ERROR_BIT_EXT,
- "Shader requires extension VkPhysicalDeviceComputeShaderDerivativesFeaturesNV::computeDerivativeGroupQuads but is not "
- "enabled on the device");
-
- VkPipeline pipe = VK_NULL_HANDLE;
- vkCreateComputePipelines(test_device.device(), VK_NULL_HANDLE, 1, &cpci, nullptr, &pipe);
- m_errorMonitor->VerifyFound();
- vkDestroyPipeline(test_device.device(), pipe, nullptr);
- m_errorMonitor->VerifyFound();
-}
-
-TEST_F(VkLayerTest, CreatePipelineCheckFragmentShaderInterlockEnabled) {
- TEST_DESCRIPTION("Create a pipeline requiring the fragment shader interlock feature which has not enabled on the device.");
-
- ASSERT_NO_FATAL_FAILURE(Init());
-
- std::vector<const char *> device_extension_names;
- if (DeviceExtensionSupported(gpu(), nullptr, VK_EXT_FRAGMENT_SHADER_INTERLOCK_EXTENSION_NAME)) {
- // Note: we intentionally do not add the required extension to the device extension list.
- // in order to create the error below
- } else {
- // We skip this test if the extension is not supported by the driver as in some cases this will cause
- // the vkCreateShaderModule to fail without generating an error message
- printf("%s Extension %s is not supported.\n", kSkipPrefix, VK_EXT_FRAGMENT_SHADER_INTERLOCK_EXTENSION_NAME);
- return;
- }
-
- auto features = m_device->phy().features();
-
- // Disable the fragment shader interlock feature.
- auto fragment_shader_interlock_features = lvl_init_struct<VkPhysicalDeviceFragmentShaderInterlockFeaturesEXT>();
- fragment_shader_interlock_features.fragmentShaderSampleInterlock = VK_FALSE;
- fragment_shader_interlock_features.fragmentShaderPixelInterlock = VK_FALSE;
- fragment_shader_interlock_features.fragmentShaderShadingRateInterlock = VK_FALSE;
-
- VkDeviceObj test_device(0, gpu(), device_extension_names, &features, &fragment_shader_interlock_features);
-
- char const *fsSource =
- "#version 450\n"
- "#extension GL_ARB_fragment_shader_interlock : require\n"
- "layout(sample_interlock_ordered) in;\n"
- "void main(){\n"
- "}\n";
-
- VkShaderObj vs(&test_device, bindStateVertShaderText, VK_SHADER_STAGE_VERTEX_BIT, this);
- VkShaderObj fs(&test_device, fsSource, VK_SHADER_STAGE_FRAGMENT_BIT, this);
-
- VkRenderpassObj render_pass(&test_device);
-
- VkPipelineObj pipe(&test_device);
- pipe.AddDefaultColorAttachment();
- pipe.AddShader(&vs);
- pipe.AddShader(&fs);
-
- const VkPipelineLayoutObj pipeline_layout(&test_device);
-
- m_errorMonitor->SetDesiredFailureMsg(
- VK_DEBUG_REPORT_ERROR_BIT_EXT,
- "Shader requires VkPhysicalDeviceFragmentShaderInterlockFeaturesEXT::fragmentShaderSampleInterlock but is not enabled on "
- "the device");
- m_errorMonitor->SetDesiredFailureMsg(
- VK_DEBUG_REPORT_ERROR_BIT_EXT,
- "Shader requires extension VkPhysicalDeviceFragmentShaderInterlockFeaturesEXT::fragmentShaderSampleInterlock but is not "
- "enabled on the device");
- pipe.CreateVKPipeline(pipeline_layout.handle(), render_pass.handle());
- m_errorMonitor->VerifyFound();
-}
-
-TEST_F(VkLayerTest, CreatePipelineCheckDemoteToHelperInvocation) {
- TEST_DESCRIPTION("Create a pipeline requiring the demote to helper invocation feature which has not enabled on the device.");
-
- ASSERT_NO_FATAL_FAILURE(Init());
-
- std::vector<const char *> device_extension_names;
- if (DeviceExtensionSupported(gpu(), nullptr, VK_EXT_SHADER_DEMOTE_TO_HELPER_INVOCATION_EXTENSION_NAME)) {
- // Note: we intentionally do not add the required extension to the device extension list.
- // in order to create the error below
- } else {
- // We skip this test if the extension is not supported by the driver as in some cases this will cause
- // the vkCreateShaderModule to fail without generating an error message
- printf("%s Extension %s is not supported.\n", kSkipPrefix, VK_EXT_SHADER_DEMOTE_TO_HELPER_INVOCATION_EXTENSION_NAME);
- return;
- }
-
- auto features = m_device->phy().features();
-
- // Disable the demote to helper invocation feature.
- auto demote_features = lvl_init_struct<VkPhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT>();
- demote_features.shaderDemoteToHelperInvocation = VK_FALSE;
-
- VkDeviceObj test_device(0, gpu(), device_extension_names, &features, &demote_features);
-
- char const *fsSource =
- "#version 450\n"
- "#extension GL_EXT_demote_to_helper_invocation : require\n"
- "void main(){\n"
- " demote;\n"
- "}\n";
-
- VkShaderObj vs(&test_device, bindStateVertShaderText, VK_SHADER_STAGE_VERTEX_BIT, this);
- VkShaderObj fs(&test_device, fsSource, VK_SHADER_STAGE_FRAGMENT_BIT, this);
-
- VkRenderpassObj render_pass(&test_device);
-
- VkPipelineObj pipe(&test_device);
- pipe.AddDefaultColorAttachment();
- pipe.AddShader(&vs);
- pipe.AddShader(&fs);
-
- const VkPipelineLayoutObj pipeline_layout(&test_device);
-
- m_errorMonitor->SetDesiredFailureMsg(
- VK_DEBUG_REPORT_ERROR_BIT_EXT,
- "Shader requires VkPhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT::shaderDemoteToHelperInvocation but is not "
- "enabled on "
- "the device");
- m_errorMonitor->SetDesiredFailureMsg(
- VK_DEBUG_REPORT_ERROR_BIT_EXT,
- "Shader requires extension VkPhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT::shaderDemoteToHelperInvocation but "
- "is not "
- "enabled on the device");
- pipe.CreateVKPipeline(pipeline_layout.handle(), render_pass.handle());
- m_errorMonitor->VerifyFound();
-}
-
-TEST_F(VkLayerTest, CreatePipelineCheckLineRasterization) {
- TEST_DESCRIPTION("Test VK_EXT_line_rasterization state against feature enables.");
-
- if (InstanceExtensionSupported(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME)) {
- m_instance_extension_names.push_back(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME);
- } else {
- printf("%s Did not find required instance extension %s; skipped.\n", kSkipPrefix,
- VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME);
- return;
- }
- ASSERT_NO_FATAL_FAILURE(InitFramework(myDbgFunc, m_errorMonitor));
- std::array<const char *, 1> required_device_extensions = {{VK_EXT_LINE_RASTERIZATION_EXTENSION_NAME}};
- for (auto device_extension : required_device_extensions) {
- if (DeviceExtensionSupported(gpu(), nullptr, device_extension)) {
- m_device_extension_names.push_back(device_extension);
- } else {
- printf("%s %s Extension not supported, skipping tests\n", kSkipPrefix, device_extension);
- return;
- }
- }
-
- PFN_vkGetPhysicalDeviceFeatures2KHR vkGetPhysicalDeviceFeatures2KHR =
- (PFN_vkGetPhysicalDeviceFeatures2KHR)vkGetInstanceProcAddr(instance(), "vkGetPhysicalDeviceFeatures2KHR");
- ASSERT_TRUE(vkGetPhysicalDeviceFeatures2KHR != nullptr);
-
- auto line_rasterization_features = lvl_init_struct<VkPhysicalDeviceLineRasterizationFeaturesEXT>();
- auto features2 = lvl_init_struct<VkPhysicalDeviceFeatures2KHR>(&line_rasterization_features);
- vkGetPhysicalDeviceFeatures2KHR(gpu(), &features2);
-
- line_rasterization_features.rectangularLines = VK_FALSE;
- line_rasterization_features.bresenhamLines = VK_FALSE;
- line_rasterization_features.smoothLines = VK_FALSE;
- line_rasterization_features.stippledRectangularLines = VK_FALSE;
- line_rasterization_features.stippledBresenhamLines = VK_FALSE;
- line_rasterization_features.stippledSmoothLines = VK_FALSE;
-
- ASSERT_NO_FATAL_FAILURE(InitState(nullptr, &features2, VK_COMMAND_POOL_CREATE_RESET_COMMAND_BUFFER_BIT));
- ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
-
- CreatePipelineHelper::OneshotTest(
- *this,
- [&](CreatePipelineHelper &helper) {
- helper.line_state_ci_.lineRasterizationMode = VK_LINE_RASTERIZATION_MODE_BRESENHAM_EXT;
- helper.pipe_ms_state_ci_.alphaToCoverageEnable = VK_TRUE;
- },
- VK_DEBUG_REPORT_ERROR_BIT_EXT,
- std::vector<const char *>{"VUID-VkGraphicsPipelineCreateInfo-lineRasterizationMode-02766",
- "VUID-VkPipelineRasterizationLineStateCreateInfoEXT-lineRasterizationMode-02769"});
-
- CreatePipelineHelper::OneshotTest(
- *this,
- [&](CreatePipelineHelper &helper) {
- helper.line_state_ci_.lineRasterizationMode = VK_LINE_RASTERIZATION_MODE_BRESENHAM_EXT;
- helper.line_state_ci_.stippledLineEnable = VK_TRUE;
- },
- VK_DEBUG_REPORT_ERROR_BIT_EXT,
- std::vector<const char *>{"VUID-VkGraphicsPipelineCreateInfo-stippledLineEnable-02767",
- "VUID-VkPipelineRasterizationLineStateCreateInfoEXT-lineRasterizationMode-02769",
- "VUID-VkPipelineRasterizationLineStateCreateInfoEXT-stippledLineEnable-02772"});
-
- CreatePipelineHelper::OneshotTest(
- *this,
- [&](CreatePipelineHelper &helper) {
- helper.line_state_ci_.lineRasterizationMode = VK_LINE_RASTERIZATION_MODE_RECTANGULAR_EXT;
- helper.line_state_ci_.stippledLineEnable = VK_TRUE;
- },
- VK_DEBUG_REPORT_ERROR_BIT_EXT,
- std::vector<const char *>{"VUID-VkGraphicsPipelineCreateInfo-stippledLineEnable-02767",
- "VUID-VkPipelineRasterizationLineStateCreateInfoEXT-lineRasterizationMode-02768",
- "VUID-VkPipelineRasterizationLineStateCreateInfoEXT-stippledLineEnable-02771"});
-
- CreatePipelineHelper::OneshotTest(
- *this,
- [&](CreatePipelineHelper &helper) {
- helper.line_state_ci_.lineRasterizationMode = VK_LINE_RASTERIZATION_MODE_RECTANGULAR_SMOOTH_EXT;
- helper.line_state_ci_.stippledLineEnable = VK_TRUE;
- },
- VK_DEBUG_REPORT_ERROR_BIT_EXT,
- std::vector<const char *>{"VUID-VkGraphicsPipelineCreateInfo-stippledLineEnable-02767",
- "VUID-VkPipelineRasterizationLineStateCreateInfoEXT-lineRasterizationMode-02770",
- "VUID-VkPipelineRasterizationLineStateCreateInfoEXT-stippledLineEnable-02773"});
-
- CreatePipelineHelper::OneshotTest(
- *this,
- [&](CreatePipelineHelper &helper) {
- helper.line_state_ci_.lineRasterizationMode = VK_LINE_RASTERIZATION_MODE_DEFAULT_EXT;
- helper.line_state_ci_.stippledLineEnable = VK_TRUE;
- },
- VK_DEBUG_REPORT_ERROR_BIT_EXT,
- std::vector<const char *>{"VUID-VkGraphicsPipelineCreateInfo-stippledLineEnable-02767",
- "VUID-VkPipelineRasterizationLineStateCreateInfoEXT-stippledLineEnable-02774"});
-
- PFN_vkCmdSetLineStippleEXT vkCmdSetLineStippleEXT =
- (PFN_vkCmdSetLineStippleEXT)vkGetDeviceProcAddr(m_device->device(), "vkCmdSetLineStippleEXT");
- ASSERT_TRUE(vkCmdSetLineStippleEXT != nullptr);
-
- m_commandBuffer->begin();
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdSetLineStippleEXT-lineStippleFactor-02776");
- vkCmdSetLineStippleEXT(m_commandBuffer->handle(), 0, 0);
- m_errorMonitor->VerifyFound();
- vkCmdSetLineStippleEXT(m_commandBuffer->handle(), 1, 1);
- m_errorMonitor->VerifyFound();
-}
-
-TEST_F(VkLayerTest, FillRectangleNV) {
- TEST_DESCRIPTION("Verify VK_NV_fill_rectangle");
-
- ASSERT_NO_FATAL_FAILURE(InitFramework(myDbgFunc, m_errorMonitor));
-
- VkPhysicalDeviceFeatures device_features = {};
- ASSERT_NO_FATAL_FAILURE(GetPhysicalDeviceFeatures(&device_features));
-
- // Disable non-solid fill modes to make sure that the usage of VK_POLYGON_MODE_LINE and
- // VK_POLYGON_MODE_POINT will cause an error when the VK_NV_fill_rectangle extension is enabled.
- device_features.fillModeNonSolid = VK_FALSE;
-
- if (DeviceExtensionSupported(gpu(), nullptr, VK_NV_FILL_RECTANGLE_EXTENSION_NAME)) {
- m_device_extension_names.push_back(VK_NV_FILL_RECTANGLE_EXTENSION_NAME);
- } else {
- printf("%s %s Extension not supported, skipping tests\n", kSkipPrefix, VK_NV_FILL_RECTANGLE_EXTENSION_NAME);
- return;
- }
-
- ASSERT_NO_FATAL_FAILURE(InitState(&device_features));
- ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
-
- VkPolygonMode polygon_mode = VK_POLYGON_MODE_LINE;
-
- auto set_polygon_mode = [&polygon_mode](CreatePipelineHelper &helper) { helper.rs_state_ci_.polygonMode = polygon_mode; };
-
- // Set unsupported polygon mode VK_POLYGON_MODE_LINE
- CreatePipelineHelper::OneshotTest(*this, set_polygon_mode, VK_DEBUG_REPORT_ERROR_BIT_EXT,
- "VUID-VkPipelineRasterizationStateCreateInfo-polygonMode-01507", false);
-
- // Set unsupported polygon mode VK_POLYGON_MODE_POINT
- polygon_mode = VK_POLYGON_MODE_POINT;
- CreatePipelineHelper::OneshotTest(*this, set_polygon_mode, VK_DEBUG_REPORT_ERROR_BIT_EXT,
- "VUID-VkPipelineRasterizationStateCreateInfo-polygonMode-01507", false);
-
- // Set supported polygon mode VK_POLYGON_MODE_FILL
- polygon_mode = VK_POLYGON_MODE_FILL;
- CreatePipelineHelper::OneshotTest(*this, set_polygon_mode, VK_DEBUG_REPORT_ERROR_BIT_EXT,
- "VUID-VkPipelineRasterizationStateCreateInfo-polygonMode-01507", true);
-
- // Set supported polygon mode VK_POLYGON_MODE_FILL_RECTANGLE_NV
- polygon_mode = VK_POLYGON_MODE_FILL_RECTANGLE_NV;
- CreatePipelineHelper::OneshotTest(*this, set_polygon_mode, VK_DEBUG_REPORT_ERROR_BIT_EXT,
- "VUID-VkPipelineRasterizationStateCreateInfo-polygonMode-01507", true);
-}
diff --git a/tests/vkpositivelayertests.cpp b/tests/vkpositivelayertests.cpp
deleted file mode 100644
index 610301ed5..000000000
--- a/tests/vkpositivelayertests.cpp
+++ /dev/null
@@ -1,8350 +0,0 @@
-/*
- * Copyright (c) 2015-2019 The Khronos Group Inc.
- * Copyright (c) 2015-2019 Valve Corporation
- * Copyright (c) 2015-2019 LunarG, Inc.
- * Copyright (c) 2015-2019 Google, Inc.
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Author: Chia-I Wu <olvaffe@gmail.com>
- * Author: Chris Forbes <chrisf@ijw.co.nz>
- * Author: Courtney Goeltzenleuchter <courtney@LunarG.com>
- * Author: Mark Lobodzinski <mark@lunarg.com>
- * Author: Mike Stroyan <mike@LunarG.com>
- * Author: Tobin Ehlis <tobine@google.com>
- * Author: Tony Barbour <tony@LunarG.com>
- * Author: Cody Northrop <cnorthrop@google.com>
- * Author: Dave Houlton <daveh@lunarg.com>
- * Author: Jeremy Kniager <jeremyk@lunarg.com>
- * Author: Shannon McPherson <shannon@lunarg.com>
- * Author: John Zulauf <jzulauf@lunarg.com>
- */
-
-#include "cast_utils.h"
-#include "layer_validation_tests.h"
-//
-// POSITIVE VALIDATION TESTS
-//
-// These tests do not expect to encounter ANY validation errors pass only if this is true
-
-TEST_F(VkPositiveLayerTest, NullFunctionPointer) {
- TEST_DESCRIPTION("On 1_0 instance , call GetDeviceProcAddr on promoted 1_1 device-level entrypoint");
- SetTargetApiVersion(VK_API_VERSION_1_0);
-
- ASSERT_NO_FATAL_FAILURE(InitFramework(myDbgFunc, m_errorMonitor));
-
- if (DeviceExtensionSupported(gpu(), nullptr, "VK_KHR_get_memory_requirements2")) {
- m_device_extension_names.push_back("VK_KHR_get_memory_requirements2");
- } else {
- printf("%s VK_KHR_get_memory_reqirements2 extension not supported, skipping NullFunctionPointer test\n", kSkipPrefix);
- return;
- }
-
- ASSERT_NO_FATAL_FAILURE(InitState());
-
- m_errorMonitor->ExpectSuccess();
- auto fpGetBufferMemoryRequirements =
- (PFN_vkGetBufferMemoryRequirements2)vkGetDeviceProcAddr(m_device->device(), "vkGetBufferMemoryRequirements2");
- if (fpGetBufferMemoryRequirements) {
- m_errorMonitor->SetError("Null was expected!");
- }
- m_errorMonitor->VerifyNotFound();
-}
-
-TEST_F(VkPositiveLayerTest, SecondaryCommandBufferBarrier) {
- TEST_DESCRIPTION("Add a pipeline barrier in a secondary command buffer");
- ASSERT_NO_FATAL_FAILURE(Init());
-
- m_errorMonitor->ExpectSuccess();
-
- // A renderpass with a single subpass that declared a self-dependency
- VkAttachmentDescription attach[] = {
- {0, VK_FORMAT_R8G8B8A8_UNORM, VK_SAMPLE_COUNT_1_BIT, VK_ATTACHMENT_LOAD_OP_DONT_CARE, VK_ATTACHMENT_STORE_OP_DONT_CARE,
- VK_ATTACHMENT_LOAD_OP_DONT_CARE, VK_ATTACHMENT_STORE_OP_DONT_CARE, VK_IMAGE_LAYOUT_UNDEFINED,
- VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL},
- };
- VkAttachmentReference ref = {0, VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL};
- VkSubpassDescription subpasses[] = {
- {0, VK_PIPELINE_BIND_POINT_GRAPHICS, 0, nullptr, 1, &ref, nullptr, nullptr, 0, nullptr},
- };
- VkSubpassDependency dep = {0,
- 0,
- VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT,
- VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT,
- VK_ACCESS_SHADER_WRITE_BIT,
- VK_ACCESS_SHADER_WRITE_BIT,
- VK_DEPENDENCY_BY_REGION_BIT};
- VkRenderPassCreateInfo rpci = {VK_STRUCTURE_TYPE_RENDER_PASS_CREATE_INFO, nullptr, 0, 1, attach, 1, subpasses, 1, &dep};
- VkRenderPass rp;
-
- VkResult err = vkCreateRenderPass(m_device->device(), &rpci, nullptr, &rp);
- ASSERT_VK_SUCCESS(err);
-
- VkImageObj image(m_device);
- image.Init(32, 32, 1, VK_FORMAT_R8G8B8A8_UNORM, VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT, VK_IMAGE_TILING_OPTIMAL, 0);
- VkImageView imageView = image.targetView(VK_FORMAT_R8G8B8A8_UNORM);
-
- VkFramebufferCreateInfo fbci = {VK_STRUCTURE_TYPE_FRAMEBUFFER_CREATE_INFO, nullptr, 0, rp, 1, &imageView, 32, 32, 1};
- VkFramebuffer fb;
- err = vkCreateFramebuffer(m_device->device(), &fbci, nullptr, &fb);
- ASSERT_VK_SUCCESS(err);
-
- m_commandBuffer->begin();
-
- VkRenderPassBeginInfo rpbi = {VK_STRUCTURE_TYPE_RENDER_PASS_BEGIN_INFO,
- nullptr,
- rp,
- fb,
- {{
- 0,
- 0,
- },
- {32, 32}},
- 0,
- nullptr};
-
- vkCmdBeginRenderPass(m_commandBuffer->handle(), &rpbi, VK_SUBPASS_CONTENTS_SECONDARY_COMMAND_BUFFERS);
-
- VkCommandPoolObj pool(m_device, m_device->graphics_queue_node_index_, VK_COMMAND_POOL_CREATE_RESET_COMMAND_BUFFER_BIT);
- VkCommandBufferObj secondary(m_device, &pool, VK_COMMAND_BUFFER_LEVEL_SECONDARY);
-
- VkCommandBufferInheritanceInfo cbii = {VK_STRUCTURE_TYPE_COMMAND_BUFFER_INHERITANCE_INFO,
- nullptr,
- rp,
- 0,
- VK_NULL_HANDLE, // Set to NULL FB handle intentionally to flesh out any errors
- VK_FALSE,
- 0,
- 0};
- VkCommandBufferBeginInfo cbbi = {VK_STRUCTURE_TYPE_COMMAND_BUFFER_BEGIN_INFO, nullptr,
- VK_COMMAND_BUFFER_USAGE_ONE_TIME_SUBMIT_BIT | VK_COMMAND_BUFFER_USAGE_RENDER_PASS_CONTINUE_BIT,
- &cbii};
- vkBeginCommandBuffer(secondary.handle(), &cbbi);
- VkMemoryBarrier mem_barrier = {};
- mem_barrier.sType = VK_STRUCTURE_TYPE_MEMORY_BARRIER;
- mem_barrier.pNext = NULL;
- mem_barrier.srcAccessMask = VK_ACCESS_SHADER_WRITE_BIT;
- mem_barrier.dstAccessMask = VK_ACCESS_SHADER_WRITE_BIT;
- vkCmdPipelineBarrier(secondary.handle(), VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT, VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT,
- VK_DEPENDENCY_BY_REGION_BIT, 1, &mem_barrier, 0, nullptr, 0, nullptr);
-
- image.ImageMemoryBarrier(&secondary, VK_IMAGE_ASPECT_COLOR_BIT, VK_ACCESS_SHADER_WRITE_BIT, VK_ACCESS_SHADER_WRITE_BIT,
- VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL, VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT,
- VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT);
- secondary.end();
-
- vkCmdExecuteCommands(m_commandBuffer->handle(), 1, &secondary.handle());
- vkCmdEndRenderPass(m_commandBuffer->handle());
- m_commandBuffer->end();
-
- VkSubmitInfo submit_info = {};
- submit_info.sType = VK_STRUCTURE_TYPE_SUBMIT_INFO;
- submit_info.commandBufferCount = 1;
- submit_info.pCommandBuffers = &m_commandBuffer->handle();
- vkQueueSubmit(m_device->m_queue, 1, &submit_info, VK_NULL_HANDLE);
- vkQueueWaitIdle(m_device->m_queue);
-
- vkDestroyFramebuffer(m_device->device(), fb, nullptr);
- vkDestroyRenderPass(m_device->device(), rp, nullptr);
- m_errorMonitor->VerifyNotFound();
-}
-
-TEST_F(VkPositiveLayerTest, RenderPassCreateAttachmentUsedTwiceOK) {
- TEST_DESCRIPTION("Attachment is used simultaneously as color and input, with the same layout. This is OK.");
-
- ASSERT_NO_FATAL_FAILURE(Init());
-
- VkAttachmentDescription attach[] = {
- {0, VK_FORMAT_R8G8B8A8_UNORM, VK_SAMPLE_COUNT_1_BIT, VK_ATTACHMENT_LOAD_OP_LOAD, VK_ATTACHMENT_STORE_OP_DONT_CARE,
- VK_ATTACHMENT_LOAD_OP_DONT_CARE, VK_ATTACHMENT_STORE_OP_DONT_CARE, VK_IMAGE_LAYOUT_GENERAL, VK_IMAGE_LAYOUT_GENERAL},
- };
- VkAttachmentReference ref = {0, VK_IMAGE_LAYOUT_GENERAL};
- VkSubpassDescription subpasses[] = {
- {0, VK_PIPELINE_BIND_POINT_GRAPHICS, 1, &ref, 1, &ref, nullptr, nullptr, 0, nullptr},
- };
-
- VkRenderPassCreateInfo rpci = {VK_STRUCTURE_TYPE_RENDER_PASS_CREATE_INFO, nullptr, 0, 1, attach, 1, subpasses, 0, nullptr};
- VkRenderPass rp;
-
- m_errorMonitor->ExpectSuccess();
- vkCreateRenderPass(m_device->device(), &rpci, nullptr, &rp);
- m_errorMonitor->VerifyNotFound();
- vkDestroyRenderPass(m_device->device(), rp, nullptr);
-}
-
-TEST_F(VkPositiveLayerTest, RenderPassCreateInitialLayoutUndefined) {
- TEST_DESCRIPTION(
- "Ensure that CmdBeginRenderPass with an attachment's initialLayout of VK_IMAGE_LAYOUT_UNDEFINED works when the command "
- "buffer has prior knowledge of that attachment's layout.");
-
- m_errorMonitor->ExpectSuccess();
-
- ASSERT_NO_FATAL_FAILURE(Init());
-
- // A renderpass with one color attachment.
- VkAttachmentDescription attachment = {0,
- VK_FORMAT_R8G8B8A8_UNORM,
- VK_SAMPLE_COUNT_1_BIT,
- VK_ATTACHMENT_LOAD_OP_DONT_CARE,
- VK_ATTACHMENT_STORE_OP_STORE,
- VK_ATTACHMENT_LOAD_OP_DONT_CARE,
- VK_ATTACHMENT_STORE_OP_DONT_CARE,
- VK_IMAGE_LAYOUT_UNDEFINED,
- VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL};
-
- VkAttachmentReference att_ref = {0, VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL};
-
- VkSubpassDescription subpass = {0, VK_PIPELINE_BIND_POINT_GRAPHICS, 0, nullptr, 1, &att_ref, nullptr, nullptr, 0, nullptr};
-
- VkRenderPassCreateInfo rpci = {VK_STRUCTURE_TYPE_RENDER_PASS_CREATE_INFO, nullptr, 0, 1, &attachment, 1, &subpass, 0, nullptr};
-
- VkRenderPass rp;
- VkResult err = vkCreateRenderPass(m_device->device(), &rpci, nullptr, &rp);
- ASSERT_VK_SUCCESS(err);
-
- // A compatible framebuffer.
- VkImageObj image(m_device);
- image.Init(32, 32, 1, VK_FORMAT_R8G8B8A8_UNORM, VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT, VK_IMAGE_TILING_OPTIMAL, 0);
- ASSERT_TRUE(image.initialized());
-
- VkImageViewCreateInfo ivci = {
- VK_STRUCTURE_TYPE_IMAGE_VIEW_CREATE_INFO,
- nullptr,
- 0,
- image.handle(),
- VK_IMAGE_VIEW_TYPE_2D,
- VK_FORMAT_R8G8B8A8_UNORM,
- {VK_COMPONENT_SWIZZLE_IDENTITY, VK_COMPONENT_SWIZZLE_IDENTITY, VK_COMPONENT_SWIZZLE_IDENTITY,
- VK_COMPONENT_SWIZZLE_IDENTITY},
- {VK_IMAGE_ASPECT_COLOR_BIT, 0, 1, 0, 1},
- };
- VkImageView view;
- err = vkCreateImageView(m_device->device(), &ivci, nullptr, &view);
- ASSERT_VK_SUCCESS(err);
-
- VkFramebufferCreateInfo fci = {VK_STRUCTURE_TYPE_FRAMEBUFFER_CREATE_INFO, nullptr, 0, rp, 1, &view, 32, 32, 1};
- VkFramebuffer fb;
- err = vkCreateFramebuffer(m_device->device(), &fci, nullptr, &fb);
- ASSERT_VK_SUCCESS(err);
-
- // Record a single command buffer which uses this renderpass twice. The
- // bug is triggered at the beginning of the second renderpass, when the
- // command buffer already has a layout recorded for the attachment.
- VkRenderPassBeginInfo rpbi = {VK_STRUCTURE_TYPE_RENDER_PASS_BEGIN_INFO, nullptr, rp, fb, {{0, 0}, {32, 32}}, 0, nullptr};
- m_commandBuffer->begin();
- vkCmdBeginRenderPass(m_commandBuffer->handle(), &rpbi, VK_SUBPASS_CONTENTS_INLINE);
- vkCmdEndRenderPass(m_commandBuffer->handle());
- vkCmdBeginRenderPass(m_commandBuffer->handle(), &rpbi, VK_SUBPASS_CONTENTS_INLINE);
-
- m_errorMonitor->VerifyNotFound();
-
- vkCmdEndRenderPass(m_commandBuffer->handle());
- m_commandBuffer->end();
-
- vkDestroyFramebuffer(m_device->device(), fb, nullptr);
- vkDestroyRenderPass(m_device->device(), rp, nullptr);
- vkDestroyImageView(m_device->device(), view, nullptr);
-}
-
-TEST_F(VkPositiveLayerTest, RenderPassCreateAttachmentLayoutWithLoadOpThenReadOnly) {
- TEST_DESCRIPTION(
- "Positive test where we create a renderpass with an attachment that uses LOAD_OP_CLEAR, the first subpass has a valid "
- "layout, and a second subpass then uses a valid *READ_ONLY* layout.");
- m_errorMonitor->ExpectSuccess();
- ASSERT_NO_FATAL_FAILURE(Init());
- auto depth_format = FindSupportedDepthStencilFormat(gpu());
- if (!depth_format) {
- printf("%s No Depth + Stencil format found. Skipped.\n", kSkipPrefix);
- return;
- }
-
- VkAttachmentReference attach[2] = {};
- attach[0].attachment = 0;
- attach[0].layout = VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL;
- attach[1].attachment = 0;
- attach[1].layout = VK_IMAGE_LAYOUT_DEPTH_STENCIL_READ_ONLY_OPTIMAL;
- VkSubpassDescription subpasses[2] = {};
- // First subpass clears DS attach on load
- subpasses[0].pDepthStencilAttachment = &attach[0];
- // 2nd subpass reads in DS as input attachment
- subpasses[1].inputAttachmentCount = 1;
- subpasses[1].pInputAttachments = &attach[1];
- VkAttachmentDescription attach_desc = {};
- attach_desc.format = depth_format;
- attach_desc.samples = VK_SAMPLE_COUNT_1_BIT;
- attach_desc.storeOp = VK_ATTACHMENT_STORE_OP_STORE;
- attach_desc.stencilStoreOp = VK_ATTACHMENT_STORE_OP_DONT_CARE;
- attach_desc.loadOp = VK_ATTACHMENT_LOAD_OP_CLEAR;
- attach_desc.stencilLoadOp = VK_ATTACHMENT_LOAD_OP_CLEAR;
- attach_desc.initialLayout = VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL;
- attach_desc.finalLayout = VK_IMAGE_LAYOUT_DEPTH_STENCIL_READ_ONLY_OPTIMAL;
- VkRenderPassCreateInfo rpci = {};
- rpci.sType = VK_STRUCTURE_TYPE_RENDER_PASS_CREATE_INFO;
- rpci.attachmentCount = 1;
- rpci.pAttachments = &attach_desc;
- rpci.subpassCount = 2;
- rpci.pSubpasses = subpasses;
-
- // Now create RenderPass and verify no errors
- VkRenderPass rp;
- vkCreateRenderPass(m_device->device(), &rpci, NULL, &rp);
- m_errorMonitor->VerifyNotFound();
-
- vkDestroyRenderPass(m_device->device(), rp, NULL);
-}
-
-TEST_F(VkPositiveLayerTest, RenderPassBeginSubpassZeroTransitionsApplied) {
- TEST_DESCRIPTION("Ensure that CmdBeginRenderPass applies the layout transitions for the first subpass");
-
- m_errorMonitor->ExpectSuccess();
-
- ASSERT_NO_FATAL_FAILURE(Init());
-
- // A renderpass with one color attachment.
- VkAttachmentDescription attachment = {0,
- VK_FORMAT_R8G8B8A8_UNORM,
- VK_SAMPLE_COUNT_1_BIT,
- VK_ATTACHMENT_LOAD_OP_DONT_CARE,
- VK_ATTACHMENT_STORE_OP_STORE,
- VK_ATTACHMENT_LOAD_OP_DONT_CARE,
- VK_ATTACHMENT_STORE_OP_DONT_CARE,
- VK_IMAGE_LAYOUT_UNDEFINED,
- VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL};
-
- VkAttachmentReference att_ref = {0, VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL};
-
- VkSubpassDescription subpass = {0, VK_PIPELINE_BIND_POINT_GRAPHICS, 0, nullptr, 1, &att_ref, nullptr, nullptr, 0, nullptr};
-
- VkSubpassDependency dep = {0,
- 0,
- VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT,
- VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT,
- VK_ACCESS_COLOR_ATTACHMENT_WRITE_BIT,
- VK_ACCESS_COLOR_ATTACHMENT_WRITE_BIT,
- VK_DEPENDENCY_BY_REGION_BIT};
-
- VkRenderPassCreateInfo rpci = {VK_STRUCTURE_TYPE_RENDER_PASS_CREATE_INFO, nullptr, 0, 1, &attachment, 1, &subpass, 1, &dep};
-
- VkResult err;
- VkRenderPass rp;
- err = vkCreateRenderPass(m_device->device(), &rpci, nullptr, &rp);
- ASSERT_VK_SUCCESS(err);
-
- // A compatible framebuffer.
- VkImageObj image(m_device);
- image.Init(32, 32, 1, VK_FORMAT_R8G8B8A8_UNORM, VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT, VK_IMAGE_TILING_OPTIMAL, 0);
- ASSERT_TRUE(image.initialized());
-
- VkImageView view = image.targetView(VK_FORMAT_R8G8B8A8_UNORM);
-
- VkFramebufferCreateInfo fci = {VK_STRUCTURE_TYPE_FRAMEBUFFER_CREATE_INFO, nullptr, 0, rp, 1, &view, 32, 32, 1};
- VkFramebuffer fb;
- err = vkCreateFramebuffer(m_device->device(), &fci, nullptr, &fb);
- ASSERT_VK_SUCCESS(err);
-
- // Record a single command buffer which issues a pipeline barrier w/
- // image memory barrier for the attachment. This detects the previously
- // missing tracking of the subpass layout by throwing a validation error
- // if it doesn't occur.
- VkRenderPassBeginInfo rpbi = {VK_STRUCTURE_TYPE_RENDER_PASS_BEGIN_INFO, nullptr, rp, fb, {{0, 0}, {32, 32}}, 0, nullptr};
- m_commandBuffer->begin();
- vkCmdBeginRenderPass(m_commandBuffer->handle(), &rpbi, VK_SUBPASS_CONTENTS_INLINE);
-
- image.ImageMemoryBarrier(m_commandBuffer, VK_IMAGE_ASPECT_COLOR_BIT, VK_ACCESS_COLOR_ATTACHMENT_WRITE_BIT,
- VK_ACCESS_COLOR_ATTACHMENT_WRITE_BIT, VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL,
- VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT, VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT);
-
- vkCmdEndRenderPass(m_commandBuffer->handle());
- m_errorMonitor->VerifyNotFound();
- m_commandBuffer->end();
-
- vkDestroyFramebuffer(m_device->device(), fb, nullptr);
- vkDestroyRenderPass(m_device->device(), rp, nullptr);
-}
-
-TEST_F(VkPositiveLayerTest, RenderPassBeginTransitionsAttachmentUnused) {
- TEST_DESCRIPTION(
- "Ensure that layout transitions work correctly without errors, when an attachment reference is VK_ATTACHMENT_UNUSED");
-
- m_errorMonitor->ExpectSuccess();
-
- ASSERT_NO_FATAL_FAILURE(Init());
-
- // A renderpass with no attachments
- VkAttachmentReference att_ref = {VK_ATTACHMENT_UNUSED, VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL};
-
- VkSubpassDescription subpass = {0, VK_PIPELINE_BIND_POINT_GRAPHICS, 0, nullptr, 1, &att_ref, nullptr, nullptr, 0, nullptr};
-
- VkRenderPassCreateInfo rpci = {VK_STRUCTURE_TYPE_RENDER_PASS_CREATE_INFO, nullptr, 0, 0, nullptr, 1, &subpass, 0, nullptr};
-
- VkRenderPass rp;
- VkResult err = vkCreateRenderPass(m_device->device(), &rpci, nullptr, &rp);
- ASSERT_VK_SUCCESS(err);
-
- // A compatible framebuffer.
- VkFramebufferCreateInfo fci = {VK_STRUCTURE_TYPE_FRAMEBUFFER_CREATE_INFO, nullptr, 0, rp, 0, nullptr, 32, 32, 1};
- VkFramebuffer fb;
- err = vkCreateFramebuffer(m_device->device(), &fci, nullptr, &fb);
- ASSERT_VK_SUCCESS(err);
-
- // Record a command buffer which just begins and ends the renderpass. The
- // bug manifests in BeginRenderPass.
- VkRenderPassBeginInfo rpbi = {VK_STRUCTURE_TYPE_RENDER_PASS_BEGIN_INFO, nullptr, rp, fb, {{0, 0}, {32, 32}}, 0, nullptr};
- m_commandBuffer->begin();
- vkCmdBeginRenderPass(m_commandBuffer->handle(), &rpbi, VK_SUBPASS_CONTENTS_INLINE);
- vkCmdEndRenderPass(m_commandBuffer->handle());
- m_errorMonitor->VerifyNotFound();
- m_commandBuffer->end();
-
- vkDestroyFramebuffer(m_device->device(), fb, nullptr);
- vkDestroyRenderPass(m_device->device(), rp, nullptr);
-}
-
-TEST_F(VkPositiveLayerTest, RenderPassBeginStencilLoadOp) {
- TEST_DESCRIPTION("Create a stencil-only attachment with a LOAD_OP set to CLEAR. stencil[Load|Store]Op used to be ignored.");
- VkResult result = VK_SUCCESS;
- ASSERT_NO_FATAL_FAILURE(Init());
- auto depth_format = FindSupportedDepthStencilFormat(gpu());
- if (!depth_format) {
- printf("%s No Depth + Stencil format found. Skipped.\n", kSkipPrefix);
- return;
- }
- VkImageFormatProperties formatProps;
- vkGetPhysicalDeviceImageFormatProperties(gpu(), depth_format, VK_IMAGE_TYPE_2D, VK_IMAGE_TILING_OPTIMAL,
- VK_IMAGE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT | VK_IMAGE_USAGE_TRANSFER_SRC_BIT, 0,
- &formatProps);
- if (formatProps.maxExtent.width < 100 || formatProps.maxExtent.height < 100) {
- printf("%s Image format max extent is too small.\n", kSkipPrefix);
- return;
- }
-
- VkFormat depth_stencil_fmt = depth_format;
- m_depthStencil->Init(m_device, 100, 100, depth_stencil_fmt,
- VK_IMAGE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT | VK_IMAGE_USAGE_TRANSFER_SRC_BIT);
- VkAttachmentDescription att = {};
- VkAttachmentReference ref = {};
- att.format = depth_stencil_fmt;
- att.samples = VK_SAMPLE_COUNT_1_BIT;
- att.loadOp = VK_ATTACHMENT_LOAD_OP_DONT_CARE;
- att.storeOp = VK_ATTACHMENT_STORE_OP_DONT_CARE;
- att.stencilLoadOp = VK_ATTACHMENT_LOAD_OP_CLEAR;
- att.stencilStoreOp = VK_ATTACHMENT_STORE_OP_STORE;
- att.initialLayout = VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL;
- att.finalLayout = VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL;
-
- VkClearValue clear;
- clear.depthStencil.depth = 1.0;
- clear.depthStencil.stencil = 0;
- ref.attachment = 0;
- ref.layout = VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL;
-
- VkSubpassDescription subpass = {};
- subpass.pipelineBindPoint = VK_PIPELINE_BIND_POINT_GRAPHICS;
- subpass.flags = 0;
- subpass.inputAttachmentCount = 0;
- subpass.pInputAttachments = NULL;
- subpass.colorAttachmentCount = 0;
- subpass.pColorAttachments = NULL;
- subpass.pResolveAttachments = NULL;
- subpass.pDepthStencilAttachment = &ref;
- subpass.preserveAttachmentCount = 0;
- subpass.pPreserveAttachments = NULL;
-
- VkRenderPass rp;
- VkRenderPassCreateInfo rp_info = {};
- rp_info.sType = VK_STRUCTURE_TYPE_RENDER_PASS_CREATE_INFO;
- rp_info.attachmentCount = 1;
- rp_info.pAttachments = &att;
- rp_info.subpassCount = 1;
- rp_info.pSubpasses = &subpass;
- result = vkCreateRenderPass(device(), &rp_info, NULL, &rp);
- ASSERT_VK_SUCCESS(result);
-
- VkImageView *depthView = m_depthStencil->BindInfo();
- VkFramebufferCreateInfo fb_info = {};
- fb_info.sType = VK_STRUCTURE_TYPE_FRAMEBUFFER_CREATE_INFO;
- fb_info.pNext = NULL;
- fb_info.renderPass = rp;
- fb_info.attachmentCount = 1;
- fb_info.pAttachments = depthView;
- fb_info.width = 100;
- fb_info.height = 100;
- fb_info.layers = 1;
- VkFramebuffer fb;
- result = vkCreateFramebuffer(device(), &fb_info, NULL, &fb);
- ASSERT_VK_SUCCESS(result);
-
- VkRenderPassBeginInfo rpbinfo = {};
- rpbinfo.clearValueCount = 1;
- rpbinfo.pClearValues = &clear;
- rpbinfo.pNext = NULL;
- rpbinfo.renderPass = rp;
- rpbinfo.sType = VK_STRUCTURE_TYPE_RENDER_PASS_BEGIN_INFO;
- rpbinfo.renderArea.extent.width = 100;
- rpbinfo.renderArea.extent.height = 100;
- rpbinfo.renderArea.offset.x = 0;
- rpbinfo.renderArea.offset.y = 0;
- rpbinfo.framebuffer = fb;
-
- VkFenceObj fence;
- fence.init(*m_device, VkFenceObj::create_info());
- ASSERT_TRUE(fence.initialized());
-
- m_commandBuffer->begin();
- m_commandBuffer->BeginRenderPass(rpbinfo);
- m_commandBuffer->EndRenderPass();
- m_commandBuffer->end();
- m_commandBuffer->QueueCommandBuffer(fence);
-
- VkImageObj destImage(m_device);
- destImage.Init(100, 100, 1, depth_stencil_fmt, VK_IMAGE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT | VK_IMAGE_USAGE_TRANSFER_DST_BIT,
- VK_IMAGE_TILING_OPTIMAL, 0);
- fence.wait(VK_TRUE, UINT64_MAX);
- VkCommandBufferObj cmdbuf(m_device, m_commandPool);
- cmdbuf.begin();
-
- m_depthStencil->ImageMemoryBarrier(&cmdbuf, VK_IMAGE_ASPECT_DEPTH_BIT | VK_IMAGE_ASPECT_STENCIL_BIT,
- VK_ACCESS_TRANSFER_READ_BIT | VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_READ_BIT,
- VK_ACCESS_TRANSFER_WRITE_BIT | VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_WRITE_BIT,
- VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL);
-
- destImage.ImageMemoryBarrier(&cmdbuf, VK_IMAGE_ASPECT_DEPTH_BIT | VK_IMAGE_ASPECT_STENCIL_BIT,
- VK_ACCESS_TRANSFER_WRITE_BIT | VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_WRITE_BIT, 0,
- VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL);
- VkImageCopy cregion;
- cregion.srcSubresource.aspectMask = VK_IMAGE_ASPECT_DEPTH_BIT | VK_IMAGE_ASPECT_STENCIL_BIT;
- cregion.srcSubresource.mipLevel = 0;
- cregion.srcSubresource.baseArrayLayer = 0;
- cregion.srcSubresource.layerCount = 1;
- cregion.srcOffset.x = 0;
- cregion.srcOffset.y = 0;
- cregion.srcOffset.z = 0;
- cregion.dstSubresource.aspectMask = VK_IMAGE_ASPECT_DEPTH_BIT | VK_IMAGE_ASPECT_STENCIL_BIT;
- cregion.dstSubresource.mipLevel = 0;
- cregion.dstSubresource.baseArrayLayer = 0;
- cregion.dstSubresource.layerCount = 1;
- cregion.dstOffset.x = 0;
- cregion.dstOffset.y = 0;
- cregion.dstOffset.z = 0;
- cregion.extent.width = 100;
- cregion.extent.height = 100;
- cregion.extent.depth = 1;
- cmdbuf.CopyImage(m_depthStencil->handle(), VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL, destImage.handle(),
- VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, 1, &cregion);
- cmdbuf.end();
-
- VkSubmitInfo submit_info;
- submit_info.sType = VK_STRUCTURE_TYPE_SUBMIT_INFO;
- submit_info.pNext = NULL;
- submit_info.waitSemaphoreCount = 0;
- submit_info.pWaitSemaphores = NULL;
- submit_info.pWaitDstStageMask = NULL;
- submit_info.commandBufferCount = 1;
- submit_info.pCommandBuffers = &cmdbuf.handle();
- submit_info.signalSemaphoreCount = 0;
- submit_info.pSignalSemaphores = NULL;
-
- m_errorMonitor->ExpectSuccess();
- vkQueueSubmit(m_device->m_queue, 1, &submit_info, VK_NULL_HANDLE);
- m_errorMonitor->VerifyNotFound();
-
- vkQueueWaitIdle(m_device->m_queue);
- vkDestroyRenderPass(m_device->device(), rp, nullptr);
- vkDestroyFramebuffer(m_device->device(), fb, nullptr);
-}
-
-TEST_F(VkPositiveLayerTest, RenderPassBeginInlineAndSecondaryCommandBuffers) {
- m_errorMonitor->ExpectSuccess();
-
- ASSERT_NO_FATAL_FAILURE(Init());
- ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
-
- m_commandBuffer->begin();
-
- vkCmdBeginRenderPass(m_commandBuffer->handle(), &m_renderPassBeginInfo, VK_SUBPASS_CONTENTS_SECONDARY_COMMAND_BUFFERS);
- vkCmdEndRenderPass(m_commandBuffer->handle());
- m_errorMonitor->VerifyNotFound();
- vkCmdBeginRenderPass(m_commandBuffer->handle(), &m_renderPassBeginInfo, VK_SUBPASS_CONTENTS_INLINE);
- m_errorMonitor->VerifyNotFound();
- vkCmdEndRenderPass(m_commandBuffer->handle());
- m_errorMonitor->VerifyNotFound();
-
- m_commandBuffer->end();
- m_errorMonitor->VerifyNotFound();
-}
-
-TEST_F(VkPositiveLayerTest, RenderPassBeginDepthStencilLayoutTransitionFromUndefined) {
- TEST_DESCRIPTION(
- "Create a render pass with depth-stencil attachment where layout transition from UNDEFINED TO DS_READ_ONLY_OPTIMAL is set "
- "by render pass and verify that transition has correctly occurred at queue submit time with no validation errors.");
-
- ASSERT_NO_FATAL_FAILURE(Init());
- auto depth_format = FindSupportedDepthStencilFormat(gpu());
- if (!depth_format) {
- printf("%s No Depth + Stencil format found. Skipped.\n", kSkipPrefix);
- return;
- }
- VkImageFormatProperties format_props;
- vkGetPhysicalDeviceImageFormatProperties(gpu(), depth_format, VK_IMAGE_TYPE_2D, VK_IMAGE_TILING_OPTIMAL,
- VK_IMAGE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT, 0, &format_props);
- if (format_props.maxExtent.width < 32 || format_props.maxExtent.height < 32) {
- printf("%s Depth extent too small, RenderPassDepthStencilLayoutTransition skipped.\n", kSkipPrefix);
- return;
- }
-
- m_errorMonitor->ExpectSuccess();
- ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
-
- // A renderpass with one depth/stencil attachment.
- VkAttachmentDescription attachment = {0,
- depth_format,
- VK_SAMPLE_COUNT_1_BIT,
- VK_ATTACHMENT_LOAD_OP_DONT_CARE,
- VK_ATTACHMENT_STORE_OP_DONT_CARE,
- VK_ATTACHMENT_LOAD_OP_DONT_CARE,
- VK_ATTACHMENT_STORE_OP_DONT_CARE,
- VK_IMAGE_LAYOUT_UNDEFINED,
- VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL};
-
- VkAttachmentReference att_ref = {0, VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL};
-
- VkSubpassDescription subpass = {0, VK_PIPELINE_BIND_POINT_GRAPHICS, 0, nullptr, 0, nullptr, nullptr, &att_ref, 0, nullptr};
-
- VkRenderPassCreateInfo rpci = {VK_STRUCTURE_TYPE_RENDER_PASS_CREATE_INFO, nullptr, 0, 1, &attachment, 1, &subpass, 0, nullptr};
-
- VkRenderPass rp;
- VkResult err = vkCreateRenderPass(m_device->device(), &rpci, nullptr, &rp);
- ASSERT_VK_SUCCESS(err);
- // A compatible ds image.
- VkImageObj image(m_device);
- image.Init(32, 32, 1, depth_format, VK_IMAGE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT, VK_IMAGE_TILING_OPTIMAL, 0);
- ASSERT_TRUE(image.initialized());
-
- VkImageViewCreateInfo ivci = {
- VK_STRUCTURE_TYPE_IMAGE_VIEW_CREATE_INFO,
- nullptr,
- 0,
- image.handle(),
- VK_IMAGE_VIEW_TYPE_2D,
- depth_format,
- {VK_COMPONENT_SWIZZLE_IDENTITY, VK_COMPONENT_SWIZZLE_IDENTITY, VK_COMPONENT_SWIZZLE_IDENTITY,
- VK_COMPONENT_SWIZZLE_IDENTITY},
- {VK_IMAGE_ASPECT_DEPTH_BIT, 0, 1, 0, 1},
- };
- VkImageView view;
- err = vkCreateImageView(m_device->device(), &ivci, nullptr, &view);
- ASSERT_VK_SUCCESS(err);
-
- VkFramebufferCreateInfo fci = {VK_STRUCTURE_TYPE_FRAMEBUFFER_CREATE_INFO, nullptr, 0, rp, 1, &view, 32, 32, 1};
- VkFramebuffer fb;
- err = vkCreateFramebuffer(m_device->device(), &fci, nullptr, &fb);
- ASSERT_VK_SUCCESS(err);
-
- VkRenderPassBeginInfo rpbi = {VK_STRUCTURE_TYPE_RENDER_PASS_BEGIN_INFO, nullptr, rp, fb, {{0, 0}, {32, 32}}, 0, nullptr};
- m_commandBuffer->begin();
- vkCmdBeginRenderPass(m_commandBuffer->handle(), &rpbi, VK_SUBPASS_CONTENTS_INLINE);
- vkCmdEndRenderPass(m_commandBuffer->handle());
- m_commandBuffer->end();
- m_commandBuffer->QueueCommandBuffer(false);
- m_errorMonitor->VerifyNotFound();
-
- // Cleanup
- vkDestroyImageView(m_device->device(), view, NULL);
- vkDestroyRenderPass(m_device->device(), rp, NULL);
- vkDestroyFramebuffer(m_device->device(), fb, NULL);
-}
-
-TEST_F(VkPositiveLayerTest, DestroyPipelineRenderPass) {
- TEST_DESCRIPTION("Draw using a pipeline whose create renderPass has been destroyed.");
- m_errorMonitor->ExpectSuccess();
- ASSERT_NO_FATAL_FAILURE(Init());
- ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
-
- VkResult err;
-
- // Create a renderPass that's compatible with Draw-time renderPass
- VkAttachmentDescription att = {};
- att.format = m_render_target_fmt;
- att.samples = VK_SAMPLE_COUNT_1_BIT;
- att.loadOp = VK_ATTACHMENT_LOAD_OP_CLEAR;
- att.storeOp = VK_ATTACHMENT_STORE_OP_STORE;
- att.stencilLoadOp = VK_ATTACHMENT_LOAD_OP_DONT_CARE;
- att.stencilStoreOp = VK_ATTACHMENT_STORE_OP_DONT_CARE;
- att.initialLayout = VK_IMAGE_LAYOUT_UNDEFINED;
- att.finalLayout = VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL;
-
- VkAttachmentReference ref = {};
- ref.layout = VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL;
- ref.attachment = 0;
-
- m_renderPassClearValues.clear();
- VkClearValue clear = {};
- clear.color = m_clear_color;
-
- VkSubpassDescription subpass = {};
- subpass.pipelineBindPoint = VK_PIPELINE_BIND_POINT_GRAPHICS;
- subpass.flags = 0;
- subpass.inputAttachmentCount = 0;
- subpass.pInputAttachments = NULL;
- subpass.colorAttachmentCount = 1;
- subpass.pColorAttachments = &ref;
- subpass.pResolveAttachments = NULL;
-
- subpass.pDepthStencilAttachment = NULL;
- subpass.preserveAttachmentCount = 0;
- subpass.pPreserveAttachments = NULL;
-
- VkRenderPassCreateInfo rp_info = {};
- rp_info.sType = VK_STRUCTURE_TYPE_RENDER_PASS_CREATE_INFO;
- rp_info.attachmentCount = 1;
- rp_info.pAttachments = &att;
- rp_info.subpassCount = 1;
- rp_info.pSubpasses = &subpass;
-
- VkRenderPass rp;
- err = vkCreateRenderPass(device(), &rp_info, NULL, &rp);
- ASSERT_VK_SUCCESS(err);
-
- VkShaderObj vs(m_device, bindStateVertShaderText, VK_SHADER_STAGE_VERTEX_BIT, this);
- VkShaderObj fs(m_device, bindStateFragShaderText, VK_SHADER_STAGE_FRAGMENT_BIT, this);
-
- VkPipelineObj pipe(m_device);
- pipe.AddDefaultColorAttachment();
- pipe.AddShader(&vs);
- pipe.AddShader(&fs);
- VkViewport viewport = {0.0f, 0.0f, 64.0f, 64.0f, 0.0f, 1.0f};
- m_viewports.push_back(viewport);
- pipe.SetViewport(m_viewports);
- VkRect2D rect = {{0, 0}, {64, 64}};
- m_scissors.push_back(rect);
- pipe.SetScissor(m_scissors);
-
- const VkPipelineLayoutObj pl(m_device);
- pipe.CreateVKPipeline(pl.handle(), rp);
-
- m_commandBuffer->begin();
- m_commandBuffer->BeginRenderPass(m_renderPassBeginInfo);
- vkCmdBindPipeline(m_commandBuffer->handle(), VK_PIPELINE_BIND_POINT_GRAPHICS, pipe.handle());
- // Destroy renderPass before pipeline is used in Draw
- // We delay until after CmdBindPipeline to verify that invalid binding isn't
- // created between CB & renderPass, which we used to do.
- vkDestroyRenderPass(m_device->device(), rp, nullptr);
- vkCmdDraw(m_commandBuffer->handle(), 3, 1, 0, 0);
- vkCmdEndRenderPass(m_commandBuffer->handle());
- m_commandBuffer->end();
-
- VkSubmitInfo submit_info = {};
- submit_info.sType = VK_STRUCTURE_TYPE_SUBMIT_INFO;
- submit_info.commandBufferCount = 1;
- submit_info.pCommandBuffers = &m_commandBuffer->handle();
- vkQueueSubmit(m_device->m_queue, 1, &submit_info, VK_NULL_HANDLE);
- m_errorMonitor->VerifyNotFound();
- vkQueueWaitIdle(m_device->m_queue);
-}
-
-TEST_F(VkPositiveLayerTest, BasicQuery) {
- TEST_DESCRIPTION("Use a couple occlusion queries");
- m_errorMonitor->ExpectSuccess();
- ASSERT_NO_FATAL_FAILURE(Init());
- ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
-
- uint32_t qfi = 0;
- VkBufferCreateInfo bci = {};
- bci.sType = VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO;
- bci.usage = VK_BUFFER_USAGE_UNIFORM_BUFFER_BIT | VK_BUFFER_USAGE_TRANSFER_DST_BIT;
- bci.size = 4 * sizeof(uint64_t);
- bci.queueFamilyIndexCount = 1;
- bci.pQueueFamilyIndices = &qfi;
- VkBufferObj buffer;
- VkMemoryPropertyFlags mem_props = VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT | VK_MEMORY_PROPERTY_HOST_COHERENT_BIT;
- buffer.init(*m_device, bci, mem_props);
-
- VkQueryPool query_pool;
- VkQueryPoolCreateInfo query_pool_info;
- query_pool_info.sType = VK_STRUCTURE_TYPE_QUERY_POOL_CREATE_INFO;
- query_pool_info.pNext = NULL;
- query_pool_info.queryType = VK_QUERY_TYPE_OCCLUSION;
- query_pool_info.flags = 0;
- query_pool_info.queryCount = 2;
- query_pool_info.pipelineStatistics = 0;
-
- VkResult res = vkCreateQueryPool(m_device->handle(), &query_pool_info, NULL, &query_pool);
- ASSERT_VK_SUCCESS(res);
-
- CreatePipelineHelper pipe(*this);
- pipe.InitInfo();
- pipe.InitState();
- pipe.CreateGraphicsPipeline();
-
- m_commandBuffer->begin();
- vkCmdResetQueryPool(m_commandBuffer->handle(), query_pool, 0, 2);
- m_commandBuffer->BeginRenderPass(m_renderPassBeginInfo);
- vkCmdBeginQuery(m_commandBuffer->handle(), query_pool, 0, 0);
- vkCmdEndQuery(m_commandBuffer->handle(), query_pool, 0);
- vkCmdBindPipeline(m_commandBuffer->handle(), VK_PIPELINE_BIND_POINT_GRAPHICS, pipe.pipeline_);
- vkCmdBeginQuery(m_commandBuffer->handle(), query_pool, 1, 0);
- vkCmdDraw(m_commandBuffer->handle(), 3, 1, 0, 0);
- vkCmdEndRenderPass(m_commandBuffer->handle());
- vkCmdEndQuery(m_commandBuffer->handle(), query_pool, 1);
- vkCmdCopyQueryPoolResults(m_commandBuffer->handle(), query_pool, 0, 2, buffer.handle(), 0, sizeof(uint64_t),
- VK_QUERY_RESULT_64_BIT | VK_QUERY_RESULT_WAIT_BIT);
- m_commandBuffer->end();
-
- VkSubmitInfo submit_info = {};
- submit_info.sType = VK_STRUCTURE_TYPE_SUBMIT_INFO;
- submit_info.commandBufferCount = 1;
- submit_info.pCommandBuffers = &m_commandBuffer->handle();
- vkQueueSubmit(m_device->m_queue, 1, &submit_info, VK_NULL_HANDLE);
-
- vkQueueWaitIdle(m_device->m_queue);
- uint64_t samples_passed[4];
- res = vkGetQueryPoolResults(m_device->handle(), query_pool, 0, 2, sizeof(samples_passed), samples_passed, sizeof(uint64_t),
- VK_QUERY_RESULT_64_BIT | VK_QUERY_RESULT_WAIT_BIT);
- ASSERT_VK_SUCCESS(res);
- m_errorMonitor->VerifyNotFound();
- vkDestroyQueryPool(m_device->handle(), query_pool, NULL);
-}
-
-TEST_F(VkPositiveLayerTest, MultiplaneGetImageSubresourceLayout) {
- TEST_DESCRIPTION("Positive test, query layout of a single plane of a multiplane image. (repro Github #2530)");
-
- // Enable KHR multiplane req'd extensions
- bool mp_extensions = InstanceExtensionSupported(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME,
- VK_KHR_GET_MEMORY_REQUIREMENTS_2_SPEC_VERSION);
- if (mp_extensions) {
- m_instance_extension_names.push_back(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME);
- }
- ASSERT_NO_FATAL_FAILURE(InitFramework(myDbgFunc, m_errorMonitor));
- mp_extensions = mp_extensions && DeviceExtensionSupported(gpu(), nullptr, VK_KHR_MAINTENANCE1_EXTENSION_NAME);
- mp_extensions = mp_extensions && DeviceExtensionSupported(gpu(), nullptr, VK_KHR_GET_MEMORY_REQUIREMENTS_2_EXTENSION_NAME);
- mp_extensions = mp_extensions && DeviceExtensionSupported(gpu(), nullptr, VK_KHR_BIND_MEMORY_2_EXTENSION_NAME);
- mp_extensions = mp_extensions && DeviceExtensionSupported(gpu(), nullptr, VK_KHR_SAMPLER_YCBCR_CONVERSION_EXTENSION_NAME);
- if (mp_extensions) {
- m_device_extension_names.push_back(VK_KHR_MAINTENANCE1_EXTENSION_NAME);
- m_device_extension_names.push_back(VK_KHR_GET_MEMORY_REQUIREMENTS_2_EXTENSION_NAME);
- m_device_extension_names.push_back(VK_KHR_BIND_MEMORY_2_EXTENSION_NAME);
- m_device_extension_names.push_back(VK_KHR_SAMPLER_YCBCR_CONVERSION_EXTENSION_NAME);
- } else {
- printf("%s test requires KHR multiplane extensions, not available. Skipping.\n", kSkipPrefix);
- return;
- }
- ASSERT_NO_FATAL_FAILURE(InitState());
-
- VkImageCreateInfo ci = {};
- ci.sType = VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO;
- ci.pNext = NULL;
- ci.flags = 0;
- ci.imageType = VK_IMAGE_TYPE_2D;
- ci.format = VK_FORMAT_G8_B8_R8_3PLANE_420_UNORM_KHR;
- ci.extent = {128, 128, 1};
- ci.mipLevels = 1;
- ci.arrayLayers = 1;
- ci.samples = VK_SAMPLE_COUNT_1_BIT;
- ci.tiling = VK_IMAGE_TILING_LINEAR;
- ci.usage = VK_IMAGE_USAGE_TRANSFER_SRC_BIT;
- ci.sharingMode = VK_SHARING_MODE_EXCLUSIVE;
- ci.initialLayout = VK_IMAGE_LAYOUT_UNDEFINED;
-
- // Verify format
- bool supported = ImageFormatAndFeaturesSupported(instance(), gpu(), ci, VK_FORMAT_FEATURE_TRANSFER_SRC_BIT);
- if (!supported) {
- printf("%s Multiplane image format not supported. Skipping test.\n", kSkipPrefix);
- return; // Assume there's low ROI on searching for different mp formats
- }
-
- VkImage image;
- VkResult err = vkCreateImage(device(), &ci, NULL, &image);
- ASSERT_VK_SUCCESS(err);
-
- // Query layout of 3rd plane
- VkImageSubresource subres = {};
- subres.aspectMask = VK_IMAGE_ASPECT_PLANE_2_BIT_KHR;
- subres.mipLevel = 0;
- subres.arrayLayer = 0;
- VkSubresourceLayout layout = {};
-
- m_errorMonitor->ExpectSuccess();
- vkGetImageSubresourceLayout(device(), image, &subres, &layout);
- m_errorMonitor->VerifyNotFound();
-
- vkDestroyImage(device(), image, NULL);
-}
-
-TEST_F(VkPositiveLayerTest, OwnershipTranfersImage) {
- TEST_DESCRIPTION("Valid image ownership transfers that shouldn't create errors");
- ASSERT_NO_FATAL_FAILURE(Init(nullptr, nullptr, VK_COMMAND_POOL_CREATE_RESET_COMMAND_BUFFER_BIT));
-
- uint32_t no_gfx = m_device->QueueFamilyWithoutCapabilities(VK_QUEUE_GRAPHICS_BIT);
- if (no_gfx == UINT32_MAX) {
- printf("%s Required queue families not present (non-graphics capable required).\n", kSkipPrefix);
- return;
- }
- VkQueueObj *no_gfx_queue = m_device->queue_family_queues(no_gfx)[0].get();
-
- VkCommandPoolObj no_gfx_pool(m_device, no_gfx, VK_COMMAND_POOL_CREATE_RESET_COMMAND_BUFFER_BIT);
- VkCommandBufferObj no_gfx_cb(m_device, &no_gfx_pool, VK_COMMAND_BUFFER_LEVEL_PRIMARY, no_gfx_queue);
-
- // Create an "exclusive" image owned by the graphics queue.
- VkImageObj image(m_device);
- VkFlags image_use = VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT | VK_IMAGE_USAGE_TRANSFER_DST_BIT | VK_IMAGE_USAGE_SAMPLED_BIT;
- image.Init(32, 32, 1, VK_FORMAT_B8G8R8A8_UNORM, image_use, VK_IMAGE_TILING_OPTIMAL, 0);
- ASSERT_TRUE(image.initialized());
- auto image_subres = image.subresource_range(VK_IMAGE_ASPECT_COLOR_BIT, 0, 1, 0, 1);
- auto image_barrier = image.image_memory_barrier(0, 0, image.Layout(), image.Layout(), image_subres);
- image_barrier.srcQueueFamilyIndex = m_device->graphics_queue_node_index_;
- image_barrier.dstQueueFamilyIndex = no_gfx;
-
- ValidOwnershipTransfer(m_errorMonitor, m_commandBuffer, &no_gfx_cb, VK_PIPELINE_STAGE_ALL_GRAPHICS_BIT,
- VK_PIPELINE_STAGE_TRANSFER_BIT, nullptr, &image_barrier);
-
- // Change layouts while changing ownership
- image_barrier.srcQueueFamilyIndex = no_gfx;
- image_barrier.dstQueueFamilyIndex = m_device->graphics_queue_node_index_;
- image_barrier.oldLayout = image.Layout();
- // Make sure the new layout is different from the old
- if (image_barrier.oldLayout == VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL) {
- image_barrier.newLayout = VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL;
- } else {
- image_barrier.newLayout = VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL;
- }
-
- ValidOwnershipTransfer(m_errorMonitor, &no_gfx_cb, m_commandBuffer, VK_PIPELINE_STAGE_TRANSFER_BIT,
- VK_PIPELINE_STAGE_ALL_GRAPHICS_BIT, nullptr, &image_barrier);
-}
-
-TEST_F(VkPositiveLayerTest, OwnershipTranfersBuffer) {
- TEST_DESCRIPTION("Valid buffer ownership transfers that shouldn't create errors");
- ASSERT_NO_FATAL_FAILURE(Init(nullptr, nullptr, VK_COMMAND_POOL_CREATE_RESET_COMMAND_BUFFER_BIT));
-
- uint32_t no_gfx = m_device->QueueFamilyWithoutCapabilities(VK_QUEUE_GRAPHICS_BIT);
- if (no_gfx == UINT32_MAX) {
- printf("%s Required queue families not present (non-graphics capable required).\n", kSkipPrefix);
- return;
- }
- VkQueueObj *no_gfx_queue = m_device->queue_family_queues(no_gfx)[0].get();
-
- VkCommandPoolObj no_gfx_pool(m_device, no_gfx, VK_COMMAND_POOL_CREATE_RESET_COMMAND_BUFFER_BIT);
- VkCommandBufferObj no_gfx_cb(m_device, &no_gfx_pool, VK_COMMAND_BUFFER_LEVEL_PRIMARY, no_gfx_queue);
-
- // Create a buffer
- const VkDeviceSize buffer_size = 256;
- uint8_t data[buffer_size] = {0xFF};
- VkConstantBufferObj buffer(m_device, buffer_size, data, VK_BUFFER_USAGE_UNIFORM_TEXEL_BUFFER_BIT);
- ASSERT_TRUE(buffer.initialized());
- auto buffer_barrier = buffer.buffer_memory_barrier(0, 0, 0, VK_WHOLE_SIZE);
-
- // Let gfx own it.
- buffer_barrier.srcQueueFamilyIndex = m_device->graphics_queue_node_index_;
- buffer_barrier.dstQueueFamilyIndex = m_device->graphics_queue_node_index_;
- ValidOwnershipTransferOp(m_errorMonitor, m_commandBuffer, VK_PIPELINE_STAGE_ALL_GRAPHICS_BIT, VK_PIPELINE_STAGE_TRANSFER_BIT,
- &buffer_barrier, nullptr);
-
- // Transfer it to non-gfx
- buffer_barrier.dstQueueFamilyIndex = no_gfx;
- ValidOwnershipTransfer(m_errorMonitor, m_commandBuffer, &no_gfx_cb, VK_PIPELINE_STAGE_ALL_GRAPHICS_BIT,
- VK_PIPELINE_STAGE_TRANSFER_BIT, &buffer_barrier, nullptr);
-
- // Transfer it to gfx
- buffer_barrier.srcQueueFamilyIndex = no_gfx;
- buffer_barrier.dstQueueFamilyIndex = m_device->graphics_queue_node_index_;
- ValidOwnershipTransfer(m_errorMonitor, &no_gfx_cb, m_commandBuffer, VK_PIPELINE_STAGE_TRANSFER_BIT,
- VK_PIPELINE_STAGE_ALL_GRAPHICS_BIT, &buffer_barrier, nullptr);
-}
-
-TEST_F(VkPositiveLayerTest, LayoutFromPresentWithoutAccessMemoryRead) {
- // Transition an image away from PRESENT_SRC_KHR without ACCESS_MEMORY_READ
- // in srcAccessMask.
-
- // The required behavior here was a bit unclear in earlier versions of the
- // spec, but there is no memory dependency required here, so this should
- // work without warnings.
-
- m_errorMonitor->ExpectSuccess();
- ASSERT_NO_FATAL_FAILURE(Init());
- VkImageObj image(m_device);
- image.Init(128, 128, 1, VK_FORMAT_B8G8R8A8_UNORM, (VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT | VK_IMAGE_USAGE_TRANSFER_DST_BIT),
- VK_IMAGE_TILING_OPTIMAL, 0);
- ASSERT_TRUE(image.initialized());
-
- VkImageMemoryBarrier barrier = {};
- VkImageSubresourceRange range;
- barrier.sType = VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER;
- barrier.srcAccessMask = VK_ACCESS_COLOR_ATTACHMENT_WRITE_BIT;
- barrier.dstAccessMask = 0;
- barrier.oldLayout = VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL;
- barrier.newLayout = VK_IMAGE_LAYOUT_PRESENT_SRC_KHR;
- barrier.image = image.handle();
- range.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
- range.baseMipLevel = 0;
- range.levelCount = 1;
- range.baseArrayLayer = 0;
- range.layerCount = 1;
- barrier.subresourceRange = range;
- VkCommandBufferObj cmdbuf(m_device, m_commandPool);
- cmdbuf.begin();
- cmdbuf.PipelineBarrier(VK_PIPELINE_STAGE_ALL_COMMANDS_BIT, VK_PIPELINE_STAGE_ALL_COMMANDS_BIT, 0, 0, nullptr, 0, nullptr, 1,
- &barrier);
- barrier.oldLayout = VK_IMAGE_LAYOUT_PRESENT_SRC_KHR;
- barrier.newLayout = VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL;
- barrier.srcAccessMask = 0;
- barrier.dstAccessMask = VK_ACCESS_TRANSFER_WRITE_BIT;
- cmdbuf.PipelineBarrier(VK_PIPELINE_STAGE_ALL_COMMANDS_BIT, VK_PIPELINE_STAGE_ALL_COMMANDS_BIT, 0, 0, nullptr, 0, nullptr, 1,
- &barrier);
-
- m_errorMonitor->VerifyNotFound();
-}
-
-TEST_F(VkPositiveLayerTest, CopyNonupdatedDescriptors) {
- TEST_DESCRIPTION("Copy non-updated descriptors");
- unsigned int i;
-
- ASSERT_NO_FATAL_FAILURE(Init());
- OneOffDescriptorSet src_descriptor_set(m_device, {
- {0, VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER, 1, VK_SHADER_STAGE_ALL, nullptr},
- {1, VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE, 1, VK_SHADER_STAGE_ALL, nullptr},
- {2, VK_DESCRIPTOR_TYPE_SAMPLER, 1, VK_SHADER_STAGE_ALL, nullptr},
- });
- OneOffDescriptorSet dst_descriptor_set(m_device, {
- {0, VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER, 1, VK_SHADER_STAGE_ALL, nullptr},
- {1, VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE, 1, VK_SHADER_STAGE_ALL, nullptr},
- });
-
- m_errorMonitor->ExpectSuccess();
-
- const unsigned int copy_size = 2;
- VkCopyDescriptorSet copy_ds_update[copy_size];
- memset(copy_ds_update, 0, sizeof(copy_ds_update));
- for (i = 0; i < copy_size; i++) {
- copy_ds_update[i].sType = VK_STRUCTURE_TYPE_COPY_DESCRIPTOR_SET;
- copy_ds_update[i].srcSet = src_descriptor_set.set_;
- copy_ds_update[i].srcBinding = i;
- copy_ds_update[i].dstSet = dst_descriptor_set.set_;
- copy_ds_update[i].dstBinding = i;
- copy_ds_update[i].descriptorCount = 1;
- }
- vkUpdateDescriptorSets(m_device->device(), 0, NULL, copy_size, copy_ds_update);
-
- m_errorMonitor->VerifyNotFound();
-}
-
-TEST_F(VkPositiveLayerTest, ConfirmNoVLErrorWhenVkCmdClearAttachmentsCalledInSecondaryCB) {
- TEST_DESCRIPTION(
- "This test is to verify that when vkCmdClearAttachments is called by a secondary commandbuffer, the validation layers do "
- "not throw an error if the primary commandbuffer begins a renderpass before executing the secondary commandbuffer.");
-
- ASSERT_NO_FATAL_FAILURE(Init());
- ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
-
- VkCommandBufferObj secondary(m_device, m_commandPool, VK_COMMAND_BUFFER_LEVEL_SECONDARY);
-
- VkCommandBufferBeginInfo info = {};
- VkCommandBufferInheritanceInfo hinfo = {};
- info.flags = VK_COMMAND_BUFFER_USAGE_ONE_TIME_SUBMIT_BIT | VK_COMMAND_BUFFER_USAGE_RENDER_PASS_CONTINUE_BIT;
- info.sType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_BEGIN_INFO;
- info.pInheritanceInfo = &hinfo;
- hinfo.sType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_INHERITANCE_INFO;
- hinfo.pNext = NULL;
- hinfo.renderPass = renderPass();
- hinfo.subpass = 0;
- hinfo.framebuffer = m_framebuffer;
- hinfo.occlusionQueryEnable = VK_FALSE;
- hinfo.queryFlags = 0;
- hinfo.pipelineStatistics = 0;
-
- secondary.begin(&info);
- VkClearAttachment color_attachment;
- color_attachment.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
- color_attachment.clearValue.color.float32[0] = 0.0;
- color_attachment.clearValue.color.float32[1] = 0.0;
- color_attachment.clearValue.color.float32[2] = 0.0;
- color_attachment.clearValue.color.float32[3] = 0.0;
- color_attachment.colorAttachment = 0;
- VkClearRect clear_rect = {{{0, 0}, {(uint32_t)m_width, (uint32_t)m_height}}, 0, 1};
- vkCmdClearAttachments(secondary.handle(), 1, &color_attachment, 1, &clear_rect);
- secondary.end();
- // Modify clear rect here to verify that it doesn't cause validation error
- clear_rect = {{{0, 0}, {99999999, 99999999}}, 0, 0};
-
- m_commandBuffer->begin();
- vkCmdBeginRenderPass(m_commandBuffer->handle(), &m_renderPassBeginInfo, VK_SUBPASS_CONTENTS_SECONDARY_COMMAND_BUFFERS);
- vkCmdExecuteCommands(m_commandBuffer->handle(), 1, &secondary.handle());
- vkCmdEndRenderPass(m_commandBuffer->handle());
- m_commandBuffer->end();
- m_errorMonitor->VerifyNotFound();
-}
-
-TEST_F(VkPositiveLayerTest, CreatePipelineComplexTypes) {
- TEST_DESCRIPTION("Smoke test for complex types across VS/FS boundary");
- ASSERT_NO_FATAL_FAILURE(Init());
- ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
-
- if (!m_device->phy().features().tessellationShader) {
- printf("%s Device does not support tessellation shaders; skipped.\n", kSkipPrefix);
- return;
- }
-
- m_errorMonitor->ExpectSuccess();
-
- VkShaderObj vs(m_device, bindStateVertShaderText, VK_SHADER_STAGE_VERTEX_BIT, this);
- VkShaderObj tcs(m_device, bindStateTscShaderText, VK_SHADER_STAGE_TESSELLATION_CONTROL_BIT, this);
- VkShaderObj tes(m_device, bindStateTeshaderText, VK_SHADER_STAGE_TESSELLATION_EVALUATION_BIT, this);
- VkShaderObj fs(m_device, bindStateFragShaderText, VK_SHADER_STAGE_FRAGMENT_BIT, this);
-
- VkPipelineInputAssemblyStateCreateInfo iasci{VK_STRUCTURE_TYPE_PIPELINE_INPUT_ASSEMBLY_STATE_CREATE_INFO, nullptr, 0,
- VK_PRIMITIVE_TOPOLOGY_PATCH_LIST, VK_FALSE};
- VkPipelineTessellationStateCreateInfo tsci{VK_STRUCTURE_TYPE_PIPELINE_TESSELLATION_STATE_CREATE_INFO, nullptr, 0, 3};
-
- CreatePipelineHelper pipe(*this);
- pipe.InitInfo();
- pipe.gp_ci_.pTessellationState = &tsci;
- pipe.gp_ci_.pInputAssemblyState = &iasci;
- pipe.shader_stages_ = {vs.GetStageCreateInfo(), tcs.GetStageCreateInfo(), tes.GetStageCreateInfo(), fs.GetStageCreateInfo()};
- pipe.InitState();
- pipe.CreateGraphicsPipeline();
- m_errorMonitor->VerifyNotFound();
-}
-
-TEST_F(VkPositiveLayerTest, ShaderRelaxedBlockLayout) {
- // This is a positive test, no errors expected
- // Verifies the ability to relax block layout rules with a shader that requires them to be relaxed
- TEST_DESCRIPTION("Create a shader that requires relaxed block layout.");
-
- ASSERT_NO_FATAL_FAILURE(InitFramework(myDbgFunc, m_errorMonitor));
-
- // The Relaxed Block Layout extension was promoted to core in 1.1.
- // Go ahead and check for it and turn it on in case a 1.0 device has it.
- if (!DeviceExtensionSupported(gpu(), nullptr, VK_KHR_RELAXED_BLOCK_LAYOUT_EXTENSION_NAME)) {
- printf("%s Extension %s not supported, skipping this pass. \n", kSkipPrefix, VK_KHR_RELAXED_BLOCK_LAYOUT_EXTENSION_NAME);
- return;
- }
- m_device_extension_names.push_back(VK_KHR_RELAXED_BLOCK_LAYOUT_EXTENSION_NAME);
- ASSERT_NO_FATAL_FAILURE(InitState());
- ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
-
- // Vertex shader requiring relaxed layout.
- // Without relaxed layout, we would expect a message like:
- // "Structure id 2 decorated as Block for variable in Uniform storage class
- // must follow standard uniform buffer layout rules: member 1 at offset 4 is not aligned to 16"
-
- const std::string spv_source = R"(
- OpCapability Shader
- OpMemoryModel Logical GLSL450
- OpEntryPoint Vertex %main "main"
- OpSource GLSL 450
- OpMemberDecorate %S 0 Offset 0
- OpMemberDecorate %S 1 Offset 4
- OpDecorate %S Block
- OpDecorate %B DescriptorSet 0
- OpDecorate %B Binding 0
- %void = OpTypeVoid
- %3 = OpTypeFunction %void
- %float = OpTypeFloat 32
- %v3float = OpTypeVector %float 3
- %S = OpTypeStruct %float %v3float
-%_ptr_Uniform_S = OpTypePointer Uniform %S
- %B = OpVariable %_ptr_Uniform_S Uniform
- %main = OpFunction %void None %3
- %5 = OpLabel
- OpReturn
- OpFunctionEnd
- )";
- m_errorMonitor->ExpectSuccess();
- VkShaderObj vs(m_device, spv_source, VK_SHADER_STAGE_VERTEX_BIT, this);
- m_errorMonitor->VerifyNotFound();
-}
-
-TEST_F(VkPositiveLayerTest, ShaderUboStd430Layout) {
- // This is a positive test, no errors expected
- // Verifies the ability to scalar block layout rules with a shader that requires them to be relaxed
- TEST_DESCRIPTION("Create a shader that requires UBO std430 layout.");
- // Enable req'd extensions
- if (!InstanceExtensionSupported(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME)) {
- printf("%s Extension %s not supported, skipping this pass. \n", kSkipPrefix,
- VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME);
- return;
- }
- m_instance_extension_names.push_back(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME);
- ASSERT_NO_FATAL_FAILURE(InitFramework(myDbgFunc, m_errorMonitor));
-
- // Check for the UBO standard block layout extension and turn it on if it's available
- if (!DeviceExtensionSupported(gpu(), nullptr, VK_KHR_UNIFORM_BUFFER_STANDARD_LAYOUT_EXTENSION_NAME)) {
- printf("%s Extension %s not supported, skipping this pass. \n", kSkipPrefix,
- VK_KHR_UNIFORM_BUFFER_STANDARD_LAYOUT_EXTENSION_NAME);
- return;
- }
- m_device_extension_names.push_back(VK_KHR_UNIFORM_BUFFER_STANDARD_LAYOUT_EXTENSION_NAME);
-
- PFN_vkGetPhysicalDeviceFeatures2 vkGetPhysicalDeviceFeatures2 =
- (PFN_vkGetPhysicalDeviceFeatures2)vkGetInstanceProcAddr(instance(), "vkGetPhysicalDeviceFeatures2KHR");
-
- auto uniform_buffer_standard_layout_features = lvl_init_struct<VkPhysicalDeviceUniformBufferStandardLayoutFeaturesKHR>(NULL);
- uniform_buffer_standard_layout_features.uniformBufferStandardLayout = VK_TRUE;
- auto query_features2 = lvl_init_struct<VkPhysicalDeviceFeatures2>(&uniform_buffer_standard_layout_features);
- vkGetPhysicalDeviceFeatures2(gpu(), &query_features2);
-
- auto set_features2 = lvl_init_struct<VkPhysicalDeviceFeatures2>(&uniform_buffer_standard_layout_features);
-
- ASSERT_NO_FATAL_FAILURE(InitState(nullptr, &set_features2));
- ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
-
- // Vertex shader requiring std430 in a uniform buffer.
- // Without uniform buffer standard layout, we would expect a message like:
- // "Structure id 3 decorated as Block for variable in Uniform storage class
- // must follow standard uniform buffer layout rules: member 0 is an array
- // with stride 4 not satisfying alignment to 16"
-
- const std::string spv_source = R"(
- OpCapability Shader
- OpMemoryModel Logical GLSL450
- OpEntryPoint Vertex %main "main"
- OpSource GLSL 460
- OpDecorate %_arr_float_uint_8 ArrayStride 4
- OpMemberDecorate %foo 0 Offset 0
- OpDecorate %foo Block
- OpDecorate %b DescriptorSet 0
- OpDecorate %b Binding 0
- %void = OpTypeVoid
- %3 = OpTypeFunction %void
- %float = OpTypeFloat 32
- %uint = OpTypeInt 32 0
- %uint_8 = OpConstant %uint 8
-%_arr_float_uint_8 = OpTypeArray %float %uint_8
- %foo = OpTypeStruct %_arr_float_uint_8
-%_ptr_Uniform_foo = OpTypePointer Uniform %foo
- %b = OpVariable %_ptr_Uniform_foo Uniform
- %main = OpFunction %void None %3
- %5 = OpLabel
- OpReturn
- OpFunctionEnd
- )";
-
- std::vector<unsigned int> spv;
- VkShaderModuleCreateInfo module_create_info;
- VkShaderModule shader_module;
- module_create_info.sType = VK_STRUCTURE_TYPE_SHADER_MODULE_CREATE_INFO;
- module_create_info.pNext = NULL;
- ASMtoSPV(SPV_ENV_VULKAN_1_0, 0, spv_source.data(), spv);
- module_create_info.pCode = spv.data();
- module_create_info.codeSize = spv.size() * sizeof(unsigned int);
- module_create_info.flags = 0;
-
- m_errorMonitor->ExpectSuccess();
- VkResult err = vkCreateShaderModule(m_device->handle(), &module_create_info, NULL, &shader_module);
- m_errorMonitor->VerifyNotFound();
- if (err == VK_SUCCESS) {
- vkDestroyShaderModule(m_device->handle(), shader_module, NULL);
- }
-}
-
-TEST_F(VkPositiveLayerTest, ShaderScalarBlockLayout) {
- // This is a positive test, no errors expected
- // Verifies the ability to scalar block layout rules with a shader that requires them to be relaxed
- TEST_DESCRIPTION("Create a shader that requires scalar block layout.");
- // Enable req'd extensions
- if (!InstanceExtensionSupported(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME)) {
- printf("%s Extension %s not supported, skipping this pass. \n", kSkipPrefix,
- VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME);
- return;
- }
- m_instance_extension_names.push_back(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME);
- ASSERT_NO_FATAL_FAILURE(InitFramework(myDbgFunc, m_errorMonitor));
-
- // Check for the Scalar Block Layout extension and turn it on if it's available
- if (!DeviceExtensionSupported(gpu(), nullptr, VK_EXT_SCALAR_BLOCK_LAYOUT_EXTENSION_NAME)) {
- printf("%s Extension %s not supported, skipping this pass. \n", kSkipPrefix, VK_EXT_SCALAR_BLOCK_LAYOUT_EXTENSION_NAME);
- return;
- }
- m_device_extension_names.push_back(VK_EXT_SCALAR_BLOCK_LAYOUT_EXTENSION_NAME);
-
- PFN_vkGetPhysicalDeviceFeatures2 vkGetPhysicalDeviceFeatures2 =
- (PFN_vkGetPhysicalDeviceFeatures2)vkGetInstanceProcAddr(instance(), "vkGetPhysicalDeviceFeatures2KHR");
-
- auto scalar_block_features = lvl_init_struct<VkPhysicalDeviceScalarBlockLayoutFeaturesEXT>(NULL);
- scalar_block_features.scalarBlockLayout = VK_TRUE;
- auto query_features2 = lvl_init_struct<VkPhysicalDeviceFeatures2>(&scalar_block_features);
- vkGetPhysicalDeviceFeatures2(gpu(), &query_features2);
-
- auto set_features2 = lvl_init_struct<VkPhysicalDeviceFeatures2>(&scalar_block_features);
-
- ASSERT_NO_FATAL_FAILURE(InitState(nullptr, &set_features2));
- ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
-
- // Vertex shader requiring scalar layout.
- // Without scalar layout, we would expect a message like:
- // "Structure id 2 decorated as Block for variable in Uniform storage class
- // must follow standard uniform buffer layout rules: member 1 at offset 4 is not aligned to 16"
-
- const std::string spv_source = R"(
- OpCapability Shader
- OpMemoryModel Logical GLSL450
- OpEntryPoint Vertex %main "main"
- OpSource GLSL 450
- OpMemberDecorate %S 0 Offset 0
- OpMemberDecorate %S 1 Offset 4
- OpMemberDecorate %S 2 Offset 8
- OpDecorate %S Block
- OpDecorate %B DescriptorSet 0
- OpDecorate %B Binding 0
- %void = OpTypeVoid
- %3 = OpTypeFunction %void
- %float = OpTypeFloat 32
- %v3float = OpTypeVector %float 3
- %S = OpTypeStruct %float %float %v3float
-%_ptr_Uniform_S = OpTypePointer Uniform %S
- %B = OpVariable %_ptr_Uniform_S Uniform
- %main = OpFunction %void None %3
- %5 = OpLabel
- OpReturn
- OpFunctionEnd
- )";
-
- m_errorMonitor->ExpectSuccess();
- VkShaderObj vs(m_device, spv_source, VK_SHADER_STAGE_VERTEX_BIT, this);
- m_errorMonitor->VerifyNotFound();
-}
-
-TEST_F(VkPositiveLayerTest, SpirvGroupDecorations) {
- TEST_DESCRIPTION("Test shader validation support for group decorations.");
- ASSERT_NO_FATAL_FAILURE(InitFramework(myDbgFunc, m_errorMonitor));
- ASSERT_NO_FATAL_FAILURE(InitState());
- ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
-
- const std::string spv_source = R"(
- OpCapability Shader
- OpMemoryModel Logical GLSL450
- OpEntryPoint GLCompute %main "main" %gl_GlobalInvocationID
- OpExecutionMode %main LocalSize 1 1 1
- OpSource GLSL 430
- OpName %main "main"
- OpName %gl_GlobalInvocationID "gl_GlobalInvocationID"
- OpDecorate %gl_GlobalInvocationID BuiltIn GlobalInvocationId
- OpDecorate %_runtimearr_float ArrayStride 4
- OpDecorate %4 BufferBlock
- OpDecorate %5 Offset 0
- %4 = OpDecorationGroup
- %5 = OpDecorationGroup
- OpGroupDecorate %4 %_struct_6 %_struct_7 %_struct_8 %_struct_9 %_struct_10 %_struct_11
- OpGroupMemberDecorate %5 %_struct_6 0 %_struct_7 0 %_struct_8 0 %_struct_9 0 %_struct_10 0 %_struct_11 0
- OpDecorate %12 DescriptorSet 0
- OpDecorate %13 DescriptorSet 0
- OpDecorate %13 NonWritable
- OpDecorate %13 Restrict
- %14 = OpDecorationGroup
- %12 = OpDecorationGroup
- %13 = OpDecorationGroup
- OpGroupDecorate %12 %15
- OpGroupDecorate %12 %15
- OpGroupDecorate %12 %15
- OpDecorate %15 DescriptorSet 0
- OpDecorate %15 Binding 5
- OpGroupDecorate %14 %16
- OpDecorate %16 DescriptorSet 0
- OpDecorate %16 Binding 0
- OpGroupDecorate %12 %17
- OpDecorate %17 Binding 1
- OpGroupDecorate %13 %18 %19
- OpDecorate %18 Binding 2
- OpDecorate %19 Binding 3
- OpGroupDecorate %14 %20
- OpGroupDecorate %12 %20
- OpGroupDecorate %13 %20
- OpDecorate %20 Binding 4
- %bool = OpTypeBool
- %void = OpTypeVoid
- %23 = OpTypeFunction %void
- %uint = OpTypeInt 32 0
- %int = OpTypeInt 32 1
- %float = OpTypeFloat 32
- %v3uint = OpTypeVector %uint 3
- %v3float = OpTypeVector %float 3
-%_ptr_Input_v3uint = OpTypePointer Input %v3uint
-%_ptr_Uniform_int = OpTypePointer Uniform %int
-%_ptr_Uniform_float = OpTypePointer Uniform %float
-%_runtimearr_int = OpTypeRuntimeArray %int
-%_runtimearr_float = OpTypeRuntimeArray %float
-%gl_GlobalInvocationID = OpVariable %_ptr_Input_v3uint Input
- %int_0 = OpConstant %int 0
- %_struct_6 = OpTypeStruct %_runtimearr_float
-%_ptr_Uniform__struct_6 = OpTypePointer Uniform %_struct_6
- %15 = OpVariable %_ptr_Uniform__struct_6 Uniform
- %_struct_7 = OpTypeStruct %_runtimearr_float
-%_ptr_Uniform__struct_7 = OpTypePointer Uniform %_struct_7
- %16 = OpVariable %_ptr_Uniform__struct_7 Uniform
- %_struct_8 = OpTypeStruct %_runtimearr_float
-%_ptr_Uniform__struct_8 = OpTypePointer Uniform %_struct_8
- %17 = OpVariable %_ptr_Uniform__struct_8 Uniform
- %_struct_9 = OpTypeStruct %_runtimearr_float
-%_ptr_Uniform__struct_9 = OpTypePointer Uniform %_struct_9
- %18 = OpVariable %_ptr_Uniform__struct_9 Uniform
- %_struct_10 = OpTypeStruct %_runtimearr_float
-%_ptr_Uniform__struct_10 = OpTypePointer Uniform %_struct_10
- %19 = OpVariable %_ptr_Uniform__struct_10 Uniform
- %_struct_11 = OpTypeStruct %_runtimearr_float
-%_ptr_Uniform__struct_11 = OpTypePointer Uniform %_struct_11
- %20 = OpVariable %_ptr_Uniform__struct_11 Uniform
- %main = OpFunction %void None %23
- %40 = OpLabel
- %41 = OpLoad %v3uint %gl_GlobalInvocationID
- %42 = OpCompositeExtract %uint %41 0
- %43 = OpAccessChain %_ptr_Uniform_float %16 %int_0 %42
- %44 = OpAccessChain %_ptr_Uniform_float %17 %int_0 %42
- %45 = OpAccessChain %_ptr_Uniform_float %18 %int_0 %42
- %46 = OpAccessChain %_ptr_Uniform_float %19 %int_0 %42
- %47 = OpAccessChain %_ptr_Uniform_float %20 %int_0 %42
- %48 = OpAccessChain %_ptr_Uniform_float %15 %int_0 %42
- %49 = OpLoad %float %43
- %50 = OpLoad %float %44
- %51 = OpLoad %float %45
- %52 = OpLoad %float %46
- %53 = OpLoad %float %47
- %54 = OpFAdd %float %49 %50
- %55 = OpFAdd %float %54 %51
- %56 = OpFAdd %float %55 %52
- %57 = OpFAdd %float %56 %53
- OpStore %48 %57
- OpReturn
- OpFunctionEnd
-)";
-
- // CreateDescriptorSetLayout
- VkDescriptorSetLayoutBinding dslb[6] = {};
- size_t dslb_size = size(dslb);
- for (size_t i = 0; i < dslb_size; i++) {
- dslb[i].binding = i;
- dslb[i].descriptorCount = 1;
- dslb[i].descriptorType = VK_DESCRIPTOR_TYPE_STORAGE_BUFFER;
- dslb[i].pImmutableSamplers = NULL;
- dslb[i].stageFlags = VK_SHADER_STAGE_COMPUTE_BIT | VK_SHADER_STAGE_ALL;
- }
- if (m_device->props.limits.maxPerStageDescriptorStorageBuffers < dslb_size) {
- printf("%sNeeded storage buffer bindings exceeds this devices limit. Skipping tests.\n", kSkipPrefix);
- return;
- }
-
- CreateComputePipelineHelper pipe(*this);
- pipe.InitInfo();
- pipe.dsl_bindings_.resize(dslb_size);
- memcpy(pipe.dsl_bindings_.data(), dslb, dslb_size * sizeof(VkDescriptorSetLayoutBinding));
- pipe.cs_.reset(new VkShaderObj(m_device, bindStateMinimalShaderText, VK_SHADER_STAGE_COMPUTE_BIT, this));
- pipe.InitState();
- m_errorMonitor->ExpectSuccess();
- pipe.CreateComputePipeline();
- m_errorMonitor->VerifyNotFound();
-}
-
-TEST_F(VkPositiveLayerTest, CreatePipelineCheckShaderCapabilityExtension1of2) {
- // This is a positive test, no errors expected
- // Verifies the ability to deal with a shader that declares a non-unique SPIRV capability ID
- TEST_DESCRIPTION("Create a shader in which uses a non-unique capability ID extension, 1 of 2");
-
- ASSERT_NO_FATAL_FAILURE(InitFramework(myDbgFunc, m_errorMonitor));
- if (!DeviceExtensionSupported(gpu(), nullptr, VK_EXT_SHADER_VIEWPORT_INDEX_LAYER_EXTENSION_NAME)) {
- printf("%s Extension %s not supported, skipping this pass. \n", kSkipPrefix,
- VK_EXT_SHADER_VIEWPORT_INDEX_LAYER_EXTENSION_NAME);
- return;
- }
- m_device_extension_names.push_back(VK_EXT_SHADER_VIEWPORT_INDEX_LAYER_EXTENSION_NAME);
- ASSERT_NO_FATAL_FAILURE(InitState());
-
- // These tests require that the device support multiViewport
- if (!m_device->phy().features().multiViewport) {
- printf("%s Device does not support multiViewport, test skipped.\n", kSkipPrefix);
- return;
- }
- ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
-
- // Vertex shader using viewport array capability
- char const *vsSource =
- "#version 450\n"
- "#extension GL_ARB_shader_viewport_layer_array : enable\n"
- "void main() {\n"
- " gl_ViewportIndex = 1;\n"
- "}\n";
-
- VkShaderObj vs(m_device, vsSource, VK_SHADER_STAGE_VERTEX_BIT, this);
-
- CreatePipelineHelper pipe(*this);
- pipe.InitInfo();
- pipe.shader_stages_ = {vs.GetStageCreateInfo()};
- pipe.InitState();
- m_errorMonitor->ExpectSuccess();
- pipe.CreateGraphicsPipeline();
- m_errorMonitor->VerifyNotFound();
-}
-
-TEST_F(VkPositiveLayerTest, CreatePipelineCheckShaderCapabilityExtension2of2) {
- // This is a positive test, no errors expected
- // Verifies the ability to deal with a shader that declares a non-unique SPIRV capability ID
- TEST_DESCRIPTION("Create a shader in which uses a non-unique capability ID extension, 2 of 2");
-
- ASSERT_NO_FATAL_FAILURE(InitFramework(myDbgFunc, m_errorMonitor));
- if (!DeviceExtensionSupported(gpu(), nullptr, VK_NV_VIEWPORT_ARRAY2_EXTENSION_NAME)) {
- printf("%s Extension %s not supported, skipping this pass. \n", kSkipPrefix, VK_NV_VIEWPORT_ARRAY2_EXTENSION_NAME);
- return;
- }
- m_device_extension_names.push_back(VK_NV_VIEWPORT_ARRAY2_EXTENSION_NAME);
- ASSERT_NO_FATAL_FAILURE(InitState());
-
- // These tests require that the device support multiViewport
- if (!m_device->phy().features().multiViewport) {
- printf("%s Device does not support multiViewport, test skipped.\n", kSkipPrefix);
- return;
- }
- ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
-
- // Vertex shader using viewport array capability
- char const *vsSource =
- "#version 450\n"
- "#extension GL_ARB_shader_viewport_layer_array : enable\n"
- "void main() {\n"
- " gl_ViewportIndex = 1;\n"
- "}\n";
-
- VkShaderObj vs(m_device, vsSource, VK_SHADER_STAGE_VERTEX_BIT, this);
-
- CreatePipelineHelper pipe(*this);
- pipe.InitInfo();
- pipe.shader_stages_ = {vs.GetStageCreateInfo()};
- pipe.InitState();
- m_errorMonitor->ExpectSuccess();
- pipe.CreateGraphicsPipeline();
- m_errorMonitor->VerifyNotFound();
-}
-
-TEST_F(VkPositiveLayerTest, CreatePipelineFragmentOutputNotWrittenButMasked) {
- TEST_DESCRIPTION(
- "Test that no error is produced when the fragment shader fails to declare an output, but the corresponding attachment's "
- "write mask is 0.");
- m_errorMonitor->ExpectSuccess();
-
- ASSERT_NO_FATAL_FAILURE(Init());
-
- char const *fsSource =
- "#version 450\n"
- "\n"
- "void main(){\n"
- "}\n";
-
- VkShaderObj vs(m_device, bindStateVertShaderText, VK_SHADER_STAGE_VERTEX_BIT, this);
- VkShaderObj fs(m_device, fsSource, VK_SHADER_STAGE_FRAGMENT_BIT, this);
-
- VkPipelineObj pipe(m_device);
- pipe.AddShader(&vs);
- pipe.AddShader(&fs);
-
- /* set up CB 0, not written, but also masked */
- pipe.AddDefaultColorAttachment(0);
- ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
-
- VkDescriptorSetObj descriptorSet(m_device);
- descriptorSet.AppendDummy();
- descriptorSet.CreateVKDescriptorSet(m_commandBuffer);
-
- pipe.CreateVKPipeline(descriptorSet.GetPipelineLayout(), renderPass());
-
- m_errorMonitor->VerifyNotFound();
-}
-
-TEST_F(VkPositiveLayerTest, StatelessValidationDisable) {
- TEST_DESCRIPTION("Specify a non-zero value for a reserved parameter with stateless validation disabled");
-
- VkValidationFeatureDisableEXT disables[] = {VK_VALIDATION_FEATURE_DISABLE_API_PARAMETERS_EXT};
- VkValidationFeaturesEXT features = {};
- features.sType = VK_STRUCTURE_TYPE_VALIDATION_FEATURES_EXT;
- features.disabledValidationFeatureCount = 1;
- features.pDisabledValidationFeatures = disables;
- VkCommandPoolCreateFlags pool_flags = VK_COMMAND_POOL_CREATE_RESET_COMMAND_BUFFER_BIT;
- ASSERT_NO_FATAL_FAILURE(Init(nullptr, nullptr, pool_flags, &features));
-
- m_errorMonitor->ExpectSuccess();
- // Specify 0 for a reserved VkFlags parameter. Normally this is expected to trigger an stateless validation error, but this
- // validation was disabled via the features extension, so no errors should be forthcoming.
- VkEvent event_handle = VK_NULL_HANDLE;
- VkEventCreateInfo event_info = {};
- event_info.sType = VK_STRUCTURE_TYPE_EVENT_CREATE_INFO;
- event_info.flags = 1;
- vkCreateEvent(device(), &event_info, NULL, &event_handle);
- vkDestroyEvent(device(), event_handle, NULL);
- m_errorMonitor->VerifyNotFound();
-}
-
-TEST_F(VkPositiveLayerTest, PointSizeWriteInFunction) {
- TEST_DESCRIPTION("Create a pipeline using TOPOLOGY_POINT_LIST and write PointSize in vertex shader function.");
-
- ASSERT_NO_FATAL_FAILURE(Init());
- m_errorMonitor->ExpectSuccess();
- ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
- ASSERT_NO_FATAL_FAILURE(InitViewport());
-
- // Create VS declaring PointSize and write to it in a function call.
- VkShaderObj vs(m_device, bindStateVertPointSizeShaderText, VK_SHADER_STAGE_VERTEX_BIT, this);
- VkShaderObj ps(m_device, bindStateFragShaderText, VK_SHADER_STAGE_FRAGMENT_BIT, this);
- {
- CreatePipelineHelper pipe(*this);
- pipe.InitInfo();
- pipe.shader_stages_ = {vs.GetStageCreateInfo(), ps.GetStageCreateInfo()};
- pipe.ia_ci_.topology = VK_PRIMITIVE_TOPOLOGY_POINT_LIST;
- pipe.InitState();
- pipe.CreateGraphicsPipeline();
- }
- m_errorMonitor->VerifyNotFound();
-}
-
-TEST_F(VkPositiveLayerTest, PointSizeGeomShaderSuccess) {
- TEST_DESCRIPTION(
- "Create a pipeline using TOPOLOGY_POINT_LIST, set PointSize vertex shader, and write in the final geometry stage.");
-
- ASSERT_NO_FATAL_FAILURE(Init());
- m_errorMonitor->ExpectSuccess();
-
- if ((!m_device->phy().features().geometryShader) || (!m_device->phy().features().shaderTessellationAndGeometryPointSize)) {
- printf("%s Device does not support the required geometry shader features; skipped.\n", kSkipPrefix);
- return;
- }
- ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
- ASSERT_NO_FATAL_FAILURE(InitViewport());
-
- // Create VS declaring PointSize and writing to it
- VkShaderObj vs(m_device, bindStateVertPointSizeShaderText, VK_SHADER_STAGE_VERTEX_BIT, this);
- VkShaderObj gs(m_device, bindStateGeomPointSizeShaderText, VK_SHADER_STAGE_GEOMETRY_BIT, this);
- VkShaderObj ps(m_device, bindStateFragShaderText, VK_SHADER_STAGE_FRAGMENT_BIT, this);
-
- CreatePipelineHelper pipe(*this);
- pipe.InitInfo();
- pipe.shader_stages_ = {vs.GetStageCreateInfo(), gs.GetStageCreateInfo(), ps.GetStageCreateInfo()};
- // Set Input Assembly to TOPOLOGY POINT LIST
- pipe.ia_ci_.topology = VK_PRIMITIVE_TOPOLOGY_POINT_LIST;
- pipe.InitState();
- pipe.CreateGraphicsPipeline();
- m_errorMonitor->VerifyNotFound();
-}
-
-TEST_F(VkPositiveLayerTest, LoosePointSizeWrite) {
- TEST_DESCRIPTION("Create a pipeline using TOPOLOGY_POINT_LIST and write PointSize outside of a structure.");
-
- ASSERT_NO_FATAL_FAILURE(Init());
- m_errorMonitor->ExpectSuccess();
- ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
- ASSERT_NO_FATAL_FAILURE(InitViewport());
-
- const std::string LoosePointSizeWrite = R"(
- OpCapability Shader
- %1 = OpExtInstImport "GLSL.std.450"
- OpMemoryModel Logical GLSL450
- OpEntryPoint Vertex %main "main" %glposition %glpointsize %gl_VertexIndex
- OpSource GLSL 450
- OpName %main "main"
- OpName %vertices "vertices"
- OpName %glposition "glposition"
- OpName %glpointsize "glpointsize"
- OpName %gl_VertexIndex "gl_VertexIndex"
- OpDecorate %glposition BuiltIn Position
- OpDecorate %glpointsize BuiltIn PointSize
- OpDecorate %gl_VertexIndex BuiltIn VertexIndex
- %void = OpTypeVoid
- %3 = OpTypeFunction %void
- %float = OpTypeFloat 32
- %v2float = OpTypeVector %float 2
- %uint = OpTypeInt 32 0
- %uint_3 = OpConstant %uint 3
- %_arr_v2float_uint_3 = OpTypeArray %v2float %uint_3
- %_ptr_Private__arr_v2float_uint_3 = OpTypePointer Private %_arr_v2float_uint_3
- %vertices = OpVariable %_ptr_Private__arr_v2float_uint_3 Private
- %int = OpTypeInt 32 1
- %int_0 = OpConstant %int 0
- %float_n1 = OpConstant %float -1
- %16 = OpConstantComposite %v2float %float_n1 %float_n1
- %_ptr_Private_v2float = OpTypePointer Private %v2float
- %int_1 = OpConstant %int 1
- %float_1 = OpConstant %float 1
- %21 = OpConstantComposite %v2float %float_1 %float_n1
- %int_2 = OpConstant %int 2
- %float_0 = OpConstant %float 0
- %25 = OpConstantComposite %v2float %float_0 %float_1
- %v4float = OpTypeVector %float 4
- %_ptr_Output_gl_Position = OpTypePointer Output %v4float
- %glposition = OpVariable %_ptr_Output_gl_Position Output
- %_ptr_Output_gl_PointSize = OpTypePointer Output %float
- %glpointsize = OpVariable %_ptr_Output_gl_PointSize Output
- %_ptr_Input_int = OpTypePointer Input %int
- %gl_VertexIndex = OpVariable %_ptr_Input_int Input
- %int_3 = OpConstant %int 3
- %_ptr_Output_v4float = OpTypePointer Output %v4float
- %_ptr_Output_float = OpTypePointer Output %float
- %main = OpFunction %void None %3
- %5 = OpLabel
- %18 = OpAccessChain %_ptr_Private_v2float %vertices %int_0
- OpStore %18 %16
- %22 = OpAccessChain %_ptr_Private_v2float %vertices %int_1
- OpStore %22 %21
- %26 = OpAccessChain %_ptr_Private_v2float %vertices %int_2
- OpStore %26 %25
- %33 = OpLoad %int %gl_VertexIndex
- %35 = OpSMod %int %33 %int_3
- %36 = OpAccessChain %_ptr_Private_v2float %vertices %35
- %37 = OpLoad %v2float %36
- %38 = OpCompositeExtract %float %37 0
- %39 = OpCompositeExtract %float %37 1
- %40 = OpCompositeConstruct %v4float %38 %39 %float_0 %float_1
- %42 = OpAccessChain %_ptr_Output_v4float %glposition
- OpStore %42 %40
- OpStore %glpointsize %float_1
- OpReturn
- OpFunctionEnd
- )";
-
- // Create VS declaring PointSize and write to it in a function call.
- VkShaderObj vs(m_device, LoosePointSizeWrite, VK_SHADER_STAGE_VERTEX_BIT, this);
- VkShaderObj ps(m_device, bindStateFragShaderText, VK_SHADER_STAGE_FRAGMENT_BIT, this);
-
- {
- CreatePipelineHelper pipe(*this);
- pipe.InitInfo();
- pipe.shader_stages_ = {vs.GetStageCreateInfo(), ps.GetStageCreateInfo()};
- // Set Input Assembly to TOPOLOGY POINT LIST
- pipe.ia_ci_.topology = VK_PRIMITIVE_TOPOLOGY_POINT_LIST;
- pipe.InitState();
- pipe.CreateGraphicsPipeline();
- }
- m_errorMonitor->VerifyNotFound();
-}
-
-TEST_F(VkPositiveLayerTest, UncompressedToCompressedImageCopy) {
- TEST_DESCRIPTION("Image copies between compressed and uncompressed images");
- ASSERT_NO_FATAL_FAILURE(Init());
-
- // Verify format support
- // Size-compatible (64-bit) formats. Uncompressed is 64 bits per texel, compressed is 64 bits per 4x4 block (or 4bpt).
- if (!ImageFormatAndFeaturesSupported(gpu(), VK_FORMAT_R16G16B16A16_UINT, VK_IMAGE_TILING_OPTIMAL,
- VK_FORMAT_FEATURE_TRANSFER_SRC_BIT_KHR | VK_FORMAT_FEATURE_TRANSFER_DST_BIT_KHR) ||
- !ImageFormatAndFeaturesSupported(gpu(), VK_FORMAT_BC1_RGBA_SRGB_BLOCK, VK_IMAGE_TILING_OPTIMAL,
- VK_FORMAT_FEATURE_TRANSFER_SRC_BIT_KHR | VK_FORMAT_FEATURE_TRANSFER_DST_BIT_KHR)) {
- printf("%s Required formats/features not supported - UncompressedToCompressedImageCopy skipped.\n", kSkipPrefix);
- return;
- }
-
- VkImageObj uncomp_10x10t_image(m_device); // Size = 10 * 10 * 64 = 6400
- VkImageObj comp_10x10b_40x40t_image(m_device); // Size = 40 * 40 * 4 = 6400
-
- uncomp_10x10t_image.Init(10, 10, 1, VK_FORMAT_R16G16B16A16_UINT,
- VK_IMAGE_USAGE_TRANSFER_SRC_BIT | VK_IMAGE_USAGE_TRANSFER_DST_BIT, VK_IMAGE_TILING_OPTIMAL);
- comp_10x10b_40x40t_image.Init(40, 40, 1, VK_FORMAT_BC1_RGBA_SRGB_BLOCK,
- VK_IMAGE_USAGE_TRANSFER_SRC_BIT | VK_IMAGE_USAGE_TRANSFER_DST_BIT, VK_IMAGE_TILING_OPTIMAL);
-
- if (!uncomp_10x10t_image.initialized() || !comp_10x10b_40x40t_image.initialized()) {
- printf("%s Unable to initialize surfaces - UncompressedToCompressedImageCopy skipped.\n", kSkipPrefix);
- return;
- }
-
- // Both copies represent the same number of bytes. Bytes Per Texel = 1 for bc6, 16 for uncompressed
- // Copy compressed to uncompressed
- VkImageCopy copy_region = {};
- copy_region.srcSubresource.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
- copy_region.dstSubresource.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
- copy_region.srcSubresource.mipLevel = 0;
- copy_region.dstSubresource.mipLevel = 0;
- copy_region.srcSubresource.baseArrayLayer = 0;
- copy_region.dstSubresource.baseArrayLayer = 0;
- copy_region.srcSubresource.layerCount = 1;
- copy_region.dstSubresource.layerCount = 1;
- copy_region.srcOffset = {0, 0, 0};
- copy_region.dstOffset = {0, 0, 0};
-
- m_errorMonitor->ExpectSuccess();
- m_commandBuffer->begin();
-
- // Copy from uncompressed to compressed
- copy_region.extent = {10, 10, 1}; // Dimensions in (uncompressed) texels
- vkCmdCopyImage(m_commandBuffer->handle(), uncomp_10x10t_image.handle(), VK_IMAGE_LAYOUT_GENERAL,
- comp_10x10b_40x40t_image.handle(), VK_IMAGE_LAYOUT_GENERAL, 1, &copy_region);
-
- // And from compressed to uncompressed
- copy_region.extent = {40, 40, 1}; // Dimensions in (compressed) texels
- vkCmdCopyImage(m_commandBuffer->handle(), comp_10x10b_40x40t_image.handle(), VK_IMAGE_LAYOUT_GENERAL,
- uncomp_10x10t_image.handle(), VK_IMAGE_LAYOUT_GENERAL, 1, &copy_region);
-
- m_errorMonitor->VerifyNotFound();
- m_commandBuffer->end();
-}
-
-TEST_F(VkPositiveLayerTest, DeleteDescriptorSetLayoutsBeforeDescriptorSets) {
- TEST_DESCRIPTION("Create DSLayouts and DescriptorSets and then delete the DSLayouts before the DescriptorSets.");
- ASSERT_NO_FATAL_FAILURE(Init());
- ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
- VkResult err;
-
- m_errorMonitor->ExpectSuccess();
-
- VkDescriptorPoolSize ds_type_count = {};
- ds_type_count.type = VK_DESCRIPTOR_TYPE_SAMPLER;
- ds_type_count.descriptorCount = 1;
-
- VkDescriptorPoolCreateInfo ds_pool_ci = {};
- ds_pool_ci.sType = VK_STRUCTURE_TYPE_DESCRIPTOR_POOL_CREATE_INFO;
- ds_pool_ci.pNext = NULL;
- ds_pool_ci.flags = VK_DESCRIPTOR_POOL_CREATE_FREE_DESCRIPTOR_SET_BIT;
- ds_pool_ci.maxSets = 1;
- ds_pool_ci.poolSizeCount = 1;
- ds_pool_ci.pPoolSizes = &ds_type_count;
-
- VkDescriptorPool ds_pool_one;
- err = vkCreateDescriptorPool(m_device->device(), &ds_pool_ci, NULL, &ds_pool_one);
- ASSERT_VK_SUCCESS(err);
-
- VkDescriptorSetLayoutBinding dsl_binding = {};
- dsl_binding.binding = 0;
- dsl_binding.descriptorType = VK_DESCRIPTOR_TYPE_SAMPLER;
- dsl_binding.descriptorCount = 1;
- dsl_binding.stageFlags = VK_SHADER_STAGE_ALL;
- dsl_binding.pImmutableSamplers = NULL;
-
- VkDescriptorSet descriptorSet;
- {
- const VkDescriptorSetLayoutObj ds_layout(m_device, {dsl_binding});
-
- VkDescriptorSetAllocateInfo alloc_info = {};
- alloc_info.sType = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_ALLOCATE_INFO;
- alloc_info.descriptorSetCount = 1;
- alloc_info.descriptorPool = ds_pool_one;
- alloc_info.pSetLayouts = &ds_layout.handle();
- err = vkAllocateDescriptorSets(m_device->device(), &alloc_info, &descriptorSet);
- ASSERT_VK_SUCCESS(err);
- } // ds_layout destroyed
- err = vkFreeDescriptorSets(m_device->device(), ds_pool_one, 1, &descriptorSet);
-
- vkDestroyDescriptorPool(m_device->device(), ds_pool_one, NULL);
- m_errorMonitor->VerifyNotFound();
-}
-
-TEST_F(VkPositiveLayerTest, CommandPoolDeleteWithReferences) {
- TEST_DESCRIPTION("Ensure the validation layers bookkeeping tracks the implicit command buffer frees.");
- ASSERT_NO_FATAL_FAILURE(Init());
-
- VkCommandPoolCreateInfo cmd_pool_info = {};
- cmd_pool_info.sType = VK_STRUCTURE_TYPE_COMMAND_POOL_CREATE_INFO;
- cmd_pool_info.pNext = NULL;
- cmd_pool_info.queueFamilyIndex = m_device->graphics_queue_node_index_;
- cmd_pool_info.flags = VK_COMMAND_POOL_CREATE_RESET_COMMAND_BUFFER_BIT;
- cmd_pool_info.flags = 0;
-
- VkCommandPool secondary_cmd_pool;
- VkResult res = vkCreateCommandPool(m_device->handle(), &cmd_pool_info, NULL, &secondary_cmd_pool);
- ASSERT_VK_SUCCESS(res);
-
- VkCommandBufferAllocateInfo cmdalloc = vk_testing::CommandBuffer::create_info(secondary_cmd_pool);
- cmdalloc.level = VK_COMMAND_BUFFER_LEVEL_SECONDARY;
-
- VkCommandBuffer secondary_cmds;
- res = vkAllocateCommandBuffers(m_device->handle(), &cmdalloc, &secondary_cmds);
-
- VkCommandBufferInheritanceInfo cmd_buf_inheritance_info = {};
- cmd_buf_inheritance_info.sType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_INHERITANCE_INFO;
- cmd_buf_inheritance_info.pNext = NULL;
- cmd_buf_inheritance_info.renderPass = VK_NULL_HANDLE;
- cmd_buf_inheritance_info.subpass = 0;
- cmd_buf_inheritance_info.framebuffer = VK_NULL_HANDLE;
- cmd_buf_inheritance_info.occlusionQueryEnable = VK_FALSE;
- cmd_buf_inheritance_info.queryFlags = 0;
- cmd_buf_inheritance_info.pipelineStatistics = 0;
-
- VkCommandBufferBeginInfo secondary_begin = {};
- secondary_begin.sType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_BEGIN_INFO;
- secondary_begin.pNext = NULL;
- secondary_begin.flags = VK_COMMAND_BUFFER_USAGE_ONE_TIME_SUBMIT_BIT;
- secondary_begin.pInheritanceInfo = &cmd_buf_inheritance_info;
-
- res = vkBeginCommandBuffer(secondary_cmds, &secondary_begin);
- ASSERT_VK_SUCCESS(res);
- vkEndCommandBuffer(secondary_cmds);
-
- m_commandBuffer->begin();
- vkCmdExecuteCommands(m_commandBuffer->handle(), 1, &secondary_cmds);
- m_commandBuffer->end();
-
- // DestroyCommandPool *implicitly* frees the command buffers allocated from it
- vkDestroyCommandPool(m_device->handle(), secondary_cmd_pool, NULL);
- // If bookkeeping has been lax, validating the reset will attempt to touch deleted data
- res = vkResetCommandPool(m_device->handle(), m_commandPool->handle(), 0);
- ASSERT_VK_SUCCESS(res);
-}
-
-TEST_F(VkPositiveLayerTest, SecondaryCommandBufferClearColorAttachments) {
- TEST_DESCRIPTION("Create a secondary command buffer and record a CmdClearAttachments call into it");
- m_errorMonitor->ExpectSuccess();
- ASSERT_NO_FATAL_FAILURE(Init());
- ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
-
- VkCommandBufferAllocateInfo command_buffer_allocate_info = {};
- command_buffer_allocate_info.sType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_ALLOCATE_INFO;
- command_buffer_allocate_info.commandPool = m_commandPool->handle();
- command_buffer_allocate_info.level = VK_COMMAND_BUFFER_LEVEL_SECONDARY;
- command_buffer_allocate_info.commandBufferCount = 1;
-
- VkCommandBuffer secondary_command_buffer;
- ASSERT_VK_SUCCESS(vkAllocateCommandBuffers(m_device->device(), &command_buffer_allocate_info, &secondary_command_buffer));
- VkCommandBufferBeginInfo command_buffer_begin_info = {};
- VkCommandBufferInheritanceInfo command_buffer_inheritance_info = {};
- command_buffer_inheritance_info.sType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_INHERITANCE_INFO;
- command_buffer_inheritance_info.renderPass = m_renderPass;
- command_buffer_inheritance_info.framebuffer = m_framebuffer;
-
- command_buffer_begin_info.sType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_BEGIN_INFO;
- command_buffer_begin_info.flags =
- VK_COMMAND_BUFFER_USAGE_ONE_TIME_SUBMIT_BIT | VK_COMMAND_BUFFER_USAGE_RENDER_PASS_CONTINUE_BIT;
- command_buffer_begin_info.pInheritanceInfo = &command_buffer_inheritance_info;
-
- vkBeginCommandBuffer(secondary_command_buffer, &command_buffer_begin_info);
- VkClearAttachment color_attachment;
- color_attachment.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
- color_attachment.clearValue.color.float32[0] = 0;
- color_attachment.clearValue.color.float32[1] = 0;
- color_attachment.clearValue.color.float32[2] = 0;
- color_attachment.clearValue.color.float32[3] = 0;
- color_attachment.colorAttachment = 0;
- VkClearRect clear_rect = {{{0, 0}, {32, 32}}, 0, 1};
- vkCmdClearAttachments(secondary_command_buffer, 1, &color_attachment, 1, &clear_rect);
- vkEndCommandBuffer(secondary_command_buffer);
- m_commandBuffer->begin();
- vkCmdBeginRenderPass(m_commandBuffer->handle(), &m_renderPassBeginInfo, VK_SUBPASS_CONTENTS_SECONDARY_COMMAND_BUFFERS);
- vkCmdExecuteCommands(m_commandBuffer->handle(), 1, &secondary_command_buffer);
- vkCmdEndRenderPass(m_commandBuffer->handle());
- m_commandBuffer->end();
- m_errorMonitor->VerifyNotFound();
-}
-
-TEST_F(VkPositiveLayerTest, SecondaryCommandBufferImageLayoutTransitions) {
- TEST_DESCRIPTION("Perform an image layout transition in a secondary command buffer followed by a transition in the primary.");
- VkResult err;
- m_errorMonitor->ExpectSuccess();
- ASSERT_NO_FATAL_FAILURE(Init());
- auto depth_format = FindSupportedDepthStencilFormat(gpu());
- if (!depth_format) {
- printf("%s Couldn't find depth stencil format.\n", kSkipPrefix);
- return;
- }
- ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
- // Allocate a secondary and primary cmd buffer
- VkCommandBufferAllocateInfo command_buffer_allocate_info = {};
- command_buffer_allocate_info.sType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_ALLOCATE_INFO;
- command_buffer_allocate_info.commandPool = m_commandPool->handle();
- command_buffer_allocate_info.level = VK_COMMAND_BUFFER_LEVEL_SECONDARY;
- command_buffer_allocate_info.commandBufferCount = 1;
-
- VkCommandBuffer secondary_command_buffer;
- ASSERT_VK_SUCCESS(vkAllocateCommandBuffers(m_device->device(), &command_buffer_allocate_info, &secondary_command_buffer));
- command_buffer_allocate_info.level = VK_COMMAND_BUFFER_LEVEL_PRIMARY;
- VkCommandBuffer primary_command_buffer;
- ASSERT_VK_SUCCESS(vkAllocateCommandBuffers(m_device->device(), &command_buffer_allocate_info, &primary_command_buffer));
- VkCommandBufferBeginInfo command_buffer_begin_info = {};
- VkCommandBufferInheritanceInfo command_buffer_inheritance_info = {};
- command_buffer_inheritance_info.sType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_INHERITANCE_INFO;
- command_buffer_begin_info.sType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_BEGIN_INFO;
- command_buffer_begin_info.flags = VK_COMMAND_BUFFER_USAGE_ONE_TIME_SUBMIT_BIT;
- command_buffer_begin_info.pInheritanceInfo = &command_buffer_inheritance_info;
-
- err = vkBeginCommandBuffer(secondary_command_buffer, &command_buffer_begin_info);
- ASSERT_VK_SUCCESS(err);
- VkImageObj image(m_device);
- image.Init(128, 128, 1, depth_format, VK_IMAGE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT, VK_IMAGE_TILING_OPTIMAL, 0);
- ASSERT_TRUE(image.initialized());
- VkImageMemoryBarrier img_barrier = {};
- img_barrier.sType = VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER;
- img_barrier.srcAccessMask = VK_ACCESS_HOST_WRITE_BIT;
- img_barrier.dstAccessMask = VK_ACCESS_SHADER_READ_BIT;
- img_barrier.oldLayout = VK_IMAGE_LAYOUT_UNDEFINED;
- img_barrier.newLayout = VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL;
- img_barrier.image = image.handle();
- img_barrier.srcQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED;
- img_barrier.dstQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED;
- img_barrier.subresourceRange.aspectMask = VK_IMAGE_ASPECT_DEPTH_BIT | VK_IMAGE_ASPECT_STENCIL_BIT;
- img_barrier.subresourceRange.baseArrayLayer = 0;
- img_barrier.subresourceRange.baseMipLevel = 0;
- img_barrier.subresourceRange.layerCount = 1;
- img_barrier.subresourceRange.levelCount = 1;
- vkCmdPipelineBarrier(secondary_command_buffer, VK_PIPELINE_STAGE_HOST_BIT, VK_PIPELINE_STAGE_VERTEX_SHADER_BIT, 0, 0, nullptr,
- 0, nullptr, 1, &img_barrier);
- err = vkEndCommandBuffer(secondary_command_buffer);
- ASSERT_VK_SUCCESS(err);
-
- // Now update primary cmd buffer to execute secondary and transitions image
- command_buffer_begin_info.pInheritanceInfo = nullptr;
- err = vkBeginCommandBuffer(primary_command_buffer, &command_buffer_begin_info);
- ASSERT_VK_SUCCESS(err);
- vkCmdExecuteCommands(primary_command_buffer, 1, &secondary_command_buffer);
- VkImageMemoryBarrier img_barrier2 = {};
- img_barrier2.sType = VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER;
- img_barrier2.srcAccessMask = VK_ACCESS_HOST_WRITE_BIT;
- img_barrier2.dstAccessMask = VK_ACCESS_SHADER_READ_BIT;
- img_barrier2.oldLayout = VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL;
- img_barrier2.newLayout = VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL;
- img_barrier2.image = image.handle();
- img_barrier2.srcQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED;
- img_barrier2.dstQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED;
- img_barrier2.subresourceRange.aspectMask = VK_IMAGE_ASPECT_DEPTH_BIT | VK_IMAGE_ASPECT_STENCIL_BIT;
- img_barrier2.subresourceRange.baseArrayLayer = 0;
- img_barrier2.subresourceRange.baseMipLevel = 0;
- img_barrier2.subresourceRange.layerCount = 1;
- img_barrier2.subresourceRange.levelCount = 1;
- vkCmdPipelineBarrier(primary_command_buffer, VK_PIPELINE_STAGE_HOST_BIT, VK_PIPELINE_STAGE_VERTEX_SHADER_BIT, 0, 0, nullptr, 0,
- nullptr, 1, &img_barrier2);
- err = vkEndCommandBuffer(primary_command_buffer);
- ASSERT_VK_SUCCESS(err);
- VkSubmitInfo submit_info = {};
- submit_info.sType = VK_STRUCTURE_TYPE_SUBMIT_INFO;
- submit_info.commandBufferCount = 1;
- submit_info.pCommandBuffers = &primary_command_buffer;
- err = vkQueueSubmit(m_device->m_queue, 1, &submit_info, VK_NULL_HANDLE);
- ASSERT_VK_SUCCESS(err);
- m_errorMonitor->VerifyNotFound();
- err = vkDeviceWaitIdle(m_device->device());
- ASSERT_VK_SUCCESS(err);
- vkFreeCommandBuffers(m_device->device(), m_commandPool->handle(), 1, &secondary_command_buffer);
- vkFreeCommandBuffers(m_device->device(), m_commandPool->handle(), 1, &primary_command_buffer);
-}
-
-// This is a positive test. No failures are expected.
-TEST_F(VkPositiveLayerTest, IgnoreUnrelatedDescriptor) {
- TEST_DESCRIPTION(
- "Ensure that the vkUpdateDescriptorSets validation code is ignoring VkWriteDescriptorSet members that are not related to "
- "the descriptor type specified by VkWriteDescriptorSet::descriptorType. Correct validation behavior will result in the "
- "test running to completion without validation errors.");
-
- const uintptr_t invalid_ptr = 0xcdcdcdcd;
-
- ASSERT_NO_FATAL_FAILURE(Init());
-
- // Verify VK_FORMAT_R8_UNORM supports VK_BUFFER_USAGE_STORAGE_TEXEL_BUFFER_BIT
- const VkFormat format_texel_case = VK_FORMAT_R8_UNORM;
- const char *format_texel_case_string = "VK_FORMAT_R8_UNORM";
- VkFormatProperties format_properties;
- vkGetPhysicalDeviceFormatProperties(gpu(), format_texel_case, &format_properties);
- if (!(format_properties.bufferFeatures & VK_FORMAT_FEATURE_STORAGE_TEXEL_BUFFER_BIT)) {
- printf("%s Test requires %s to support VK_BUFFER_USAGE_STORAGE_TEXEL_BUFFER_BIT\n", kSkipPrefix, format_texel_case_string);
- return;
- }
-
- // Image Case
- {
- m_errorMonitor->ExpectSuccess();
-
- VkImageObj image(m_device);
- image.Init(32, 32, 1, VK_FORMAT_B8G8R8A8_UNORM, VK_IMAGE_USAGE_SAMPLED_BIT, VK_IMAGE_TILING_OPTIMAL, 0);
-
- VkImageView view = image.targetView(VK_FORMAT_B8G8R8A8_UNORM);
-
- OneOffDescriptorSet descriptor_set(m_device, {
- {0, VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE, 1, VK_SHADER_STAGE_ALL, nullptr},
- });
-
- VkDescriptorImageInfo image_info = {};
- image_info.imageView = view;
- image_info.imageLayout = VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL;
-
- VkWriteDescriptorSet descriptor_write;
- memset(&descriptor_write, 0, sizeof(descriptor_write));
- descriptor_write.sType = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET;
- descriptor_write.dstSet = descriptor_set.set_;
- descriptor_write.dstBinding = 0;
- descriptor_write.descriptorCount = 1;
- descriptor_write.descriptorType = VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE;
- descriptor_write.pImageInfo = &image_info;
-
- // Set pBufferInfo and pTexelBufferView to invalid values, which should
- // be
- // ignored for descriptorType == VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE.
- // This will most likely produce a crash if the parameter_validation
- // layer
- // does not correctly ignore pBufferInfo.
- descriptor_write.pBufferInfo = reinterpret_cast<const VkDescriptorBufferInfo *>(invalid_ptr);
- descriptor_write.pTexelBufferView = reinterpret_cast<const VkBufferView *>(invalid_ptr);
-
- vkUpdateDescriptorSets(m_device->device(), 1, &descriptor_write, 0, NULL);
-
- m_errorMonitor->VerifyNotFound();
- }
-
- // Buffer Case
- {
- m_errorMonitor->ExpectSuccess();
-
- uint32_t queue_family_index = 0;
- VkBufferCreateInfo buffer_create_info = {};
- buffer_create_info.sType = VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO;
- buffer_create_info.size = 1024;
- buffer_create_info.usage = VK_BUFFER_USAGE_UNIFORM_BUFFER_BIT;
- buffer_create_info.queueFamilyIndexCount = 1;
- buffer_create_info.pQueueFamilyIndices = &queue_family_index;
-
- VkBufferObj buffer;
- buffer.init(*m_device, buffer_create_info);
-
- OneOffDescriptorSet descriptor_set(m_device, {
- {0, VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER, 1, VK_SHADER_STAGE_ALL, nullptr},
- });
-
- VkDescriptorBufferInfo buffer_info = {};
- buffer_info.buffer = buffer.handle();
- buffer_info.offset = 0;
- buffer_info.range = 1024;
-
- VkWriteDescriptorSet descriptor_write;
- memset(&descriptor_write, 0, sizeof(descriptor_write));
- descriptor_write.sType = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET;
- descriptor_write.dstSet = descriptor_set.set_;
- descriptor_write.dstBinding = 0;
- descriptor_write.descriptorCount = 1;
- descriptor_write.descriptorType = VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER;
- descriptor_write.pBufferInfo = &buffer_info;
-
- // Set pImageInfo and pTexelBufferView to invalid values, which should
- // be
- // ignored for descriptorType == VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER.
- // This will most likely produce a crash if the parameter_validation
- // layer
- // does not correctly ignore pImageInfo.
- descriptor_write.pImageInfo = reinterpret_cast<const VkDescriptorImageInfo *>(invalid_ptr);
- descriptor_write.pTexelBufferView = reinterpret_cast<const VkBufferView *>(invalid_ptr);
-
- vkUpdateDescriptorSets(m_device->device(), 1, &descriptor_write, 0, NULL);
-
- m_errorMonitor->VerifyNotFound();
- }
-
- // Texel Buffer Case
- {
- m_errorMonitor->ExpectSuccess();
-
- uint32_t queue_family_index = 0;
- VkBufferCreateInfo buffer_create_info = {};
- buffer_create_info.sType = VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO;
- buffer_create_info.size = 1024;
- buffer_create_info.usage = VK_BUFFER_USAGE_STORAGE_TEXEL_BUFFER_BIT;
- buffer_create_info.queueFamilyIndexCount = 1;
- buffer_create_info.pQueueFamilyIndices = &queue_family_index;
-
- VkBufferObj buffer;
- buffer.init(*m_device, buffer_create_info);
-
- VkBufferViewCreateInfo buff_view_ci = {};
- buff_view_ci.sType = VK_STRUCTURE_TYPE_BUFFER_VIEW_CREATE_INFO;
- buff_view_ci.buffer = buffer.handle();
- buff_view_ci.format = format_texel_case;
- buff_view_ci.range = VK_WHOLE_SIZE;
- VkBufferView buffer_view;
- VkResult err = vkCreateBufferView(m_device->device(), &buff_view_ci, NULL, &buffer_view);
- ASSERT_VK_SUCCESS(err);
- OneOffDescriptorSet descriptor_set(m_device,
- {
- {0, VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER, 1, VK_SHADER_STAGE_ALL, nullptr},
- });
-
- VkWriteDescriptorSet descriptor_write;
- memset(&descriptor_write, 0, sizeof(descriptor_write));
- descriptor_write.sType = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET;
- descriptor_write.dstSet = descriptor_set.set_;
- descriptor_write.dstBinding = 0;
- descriptor_write.descriptorCount = 1;
- descriptor_write.descriptorType = VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER;
- descriptor_write.pTexelBufferView = &buffer_view;
-
- // Set pImageInfo and pBufferInfo to invalid values, which should be
- // ignored for descriptorType ==
- // VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER.
- // This will most likely produce a crash if the parameter_validation
- // layer
- // does not correctly ignore pImageInfo and pBufferInfo.
- descriptor_write.pImageInfo = reinterpret_cast<const VkDescriptorImageInfo *>(invalid_ptr);
- descriptor_write.pBufferInfo = reinterpret_cast<const VkDescriptorBufferInfo *>(invalid_ptr);
-
- vkUpdateDescriptorSets(m_device->device(), 1, &descriptor_write, 0, NULL);
-
- m_errorMonitor->VerifyNotFound();
-
- vkDestroyBufferView(m_device->device(), buffer_view, NULL);
- }
-}
-
-TEST_F(VkPositiveLayerTest, ImmutableSamplerOnlyDescriptor) {
- TEST_DESCRIPTION("Bind a DescriptorSet with only an immutable sampler and make sure that we don't warn for no update.");
-
- ASSERT_NO_FATAL_FAILURE(Init());
- ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
-
- OneOffDescriptorSet descriptor_set(m_device, {
- {0, VK_DESCRIPTOR_TYPE_SAMPLER, 1, VK_SHADER_STAGE_FRAGMENT_BIT, nullptr},
- });
-
- VkSamplerCreateInfo sampler_ci = SafeSaneSamplerCreateInfo();
- VkSampler sampler;
- VkResult err = vkCreateSampler(m_device->device(), &sampler_ci, NULL, &sampler);
- ASSERT_VK_SUCCESS(err);
-
- const VkPipelineLayoutObj pipeline_layout(m_device, {&descriptor_set.layout_});
-
- m_errorMonitor->ExpectSuccess();
- m_commandBuffer->begin();
- m_commandBuffer->BeginRenderPass(m_renderPassBeginInfo);
-
- vkCmdBindDescriptorSets(m_commandBuffer->handle(), VK_PIPELINE_BIND_POINT_GRAPHICS, pipeline_layout.handle(), 0, 1,
- &descriptor_set.set_, 0, nullptr);
- m_errorMonitor->VerifyNotFound();
-
- vkDestroySampler(m_device->device(), sampler, NULL);
-
- m_commandBuffer->EndRenderPass();
- m_commandBuffer->end();
-}
-
-// This is a positive test. No failures are expected.
-TEST_F(VkPositiveLayerTest, EmptyDescriptorUpdateTest) {
- TEST_DESCRIPTION("Update last descriptor in a set that includes an empty binding");
- VkResult err;
-
- ASSERT_NO_FATAL_FAILURE(Init());
- m_errorMonitor->ExpectSuccess();
-
- // Create layout with two uniform buffer descriptors w/ empty binding between them
- OneOffDescriptorSet ds(m_device, {
- {0, VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER, 1, VK_SHADER_STAGE_ALL, nullptr},
- {1, VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER, 0 /*!*/, 0, nullptr},
- {2, VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER, 1, VK_SHADER_STAGE_ALL, nullptr},
- });
-
- // Create a buffer to be used for update
- VkBufferCreateInfo buff_ci = {};
- buff_ci.sType = VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO;
- buff_ci.usage = VK_BUFFER_USAGE_UNIFORM_BUFFER_BIT;
- buff_ci.size = 256;
- buff_ci.sharingMode = VK_SHARING_MODE_EXCLUSIVE;
- VkBuffer buffer;
- err = vkCreateBuffer(m_device->device(), &buff_ci, NULL, &buffer);
- ASSERT_VK_SUCCESS(err);
- // Have to bind memory to buffer before descriptor update
- VkMemoryAllocateInfo mem_alloc = {};
- mem_alloc.sType = VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO;
- mem_alloc.pNext = NULL;
- mem_alloc.allocationSize = 512; // one allocation for both buffers
- mem_alloc.memoryTypeIndex = 0;
-
- VkMemoryRequirements mem_reqs;
- vkGetBufferMemoryRequirements(m_device->device(), buffer, &mem_reqs);
- bool pass = m_device->phy().set_memory_type(mem_reqs.memoryTypeBits, &mem_alloc, 0);
- if (!pass) {
- printf("%s Failed to allocate memory.\n", kSkipPrefix);
- vkDestroyBuffer(m_device->device(), buffer, NULL);
- return;
- }
- // Make sure allocation is sufficiently large to accommodate buffer requirements
- if (mem_reqs.size > mem_alloc.allocationSize) {
- mem_alloc.allocationSize = mem_reqs.size;
- }
-
- VkDeviceMemory mem;
- err = vkAllocateMemory(m_device->device(), &mem_alloc, NULL, &mem);
- ASSERT_VK_SUCCESS(err);
- err = vkBindBufferMemory(m_device->device(), buffer, mem, 0);
- ASSERT_VK_SUCCESS(err);
-
- // Only update the descriptor at binding 2
- VkDescriptorBufferInfo buff_info = {};
- buff_info.buffer = buffer;
- buff_info.offset = 0;
- buff_info.range = VK_WHOLE_SIZE;
- VkWriteDescriptorSet descriptor_write = {};
- descriptor_write.sType = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET;
- descriptor_write.dstBinding = 2;
- descriptor_write.descriptorCount = 1;
- descriptor_write.pTexelBufferView = nullptr;
- descriptor_write.pBufferInfo = &buff_info;
- descriptor_write.pImageInfo = nullptr;
- descriptor_write.descriptorType = VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER;
- descriptor_write.dstSet = ds.set_;
-
- vkUpdateDescriptorSets(m_device->device(), 1, &descriptor_write, 0, NULL);
-
- m_errorMonitor->VerifyNotFound();
- // Cleanup
- vkFreeMemory(m_device->device(), mem, NULL);
- vkDestroyBuffer(m_device->device(), buffer, NULL);
-}
-
-// This is a positive test. No failures are expected.
-TEST_F(VkPositiveLayerTest, PushDescriptorNullDstSetTest) {
- TEST_DESCRIPTION("Use null dstSet in CmdPushDescriptorSetKHR");
-
- if (InstanceExtensionSupported(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME)) {
- m_instance_extension_names.push_back(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME);
- } else {
- printf("%s Did not find VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME; skipped.\n", kSkipPrefix);
- return;
- }
- ASSERT_NO_FATAL_FAILURE(InitFramework(myDbgFunc, m_errorMonitor));
- if (DeviceExtensionSupported(gpu(), nullptr, VK_KHR_PUSH_DESCRIPTOR_EXTENSION_NAME)) {
- m_device_extension_names.push_back(VK_KHR_PUSH_DESCRIPTOR_EXTENSION_NAME);
- } else {
- printf("%s Push Descriptors Extension not supported, skipping tests\n", kSkipPrefix);
- return;
- }
- ASSERT_NO_FATAL_FAILURE(InitState());
- m_errorMonitor->ExpectSuccess();
-
- auto push_descriptor_prop = GetPushDescriptorProperties(instance(), gpu());
- if (push_descriptor_prop.maxPushDescriptors < 1) {
- // Some implementations report an invalid maxPushDescriptors of 0
- printf("%s maxPushDescriptors is zero, skipping tests\n", kSkipPrefix);
- return;
- }
-
- ASSERT_NO_FATAL_FAILURE(InitViewport());
- ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
-
- VkDescriptorSetLayoutBinding dsl_binding = {};
- dsl_binding.binding = 2;
- dsl_binding.descriptorType = VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER;
- dsl_binding.descriptorCount = 1;
- dsl_binding.stageFlags = VK_SHADER_STAGE_FRAGMENT_BIT;
- dsl_binding.pImmutableSamplers = NULL;
-
- const VkDescriptorSetLayoutObj ds_layout(m_device, {dsl_binding});
- // Create push descriptor set layout
- const VkDescriptorSetLayoutObj push_ds_layout(m_device, {dsl_binding}, VK_DESCRIPTOR_SET_LAYOUT_CREATE_PUSH_DESCRIPTOR_BIT_KHR);
-
- // Use helper to create graphics pipeline
- CreatePipelineHelper helper(*this);
- helper.InitInfo();
- helper.InitState();
- helper.pipeline_layout_ = VkPipelineLayoutObj(m_device, {&push_ds_layout, &ds_layout});
- helper.CreateGraphicsPipeline();
-
- const float vbo_data[3] = {1.f, 0.f, 1.f};
- VkConstantBufferObj vbo(m_device, sizeof(vbo_data), (const void *)&vbo_data, VK_BUFFER_USAGE_UNIFORM_BUFFER_BIT);
-
- VkDescriptorBufferInfo buff_info;
- buff_info.buffer = vbo.handle();
- buff_info.offset = 0;
- buff_info.range = sizeof(vbo_data);
- VkWriteDescriptorSet descriptor_write = {};
- descriptor_write.sType = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET;
- descriptor_write.dstBinding = 2;
- descriptor_write.descriptorCount = 1;
- descriptor_write.pTexelBufferView = nullptr;
- descriptor_write.pBufferInfo = &buff_info;
- descriptor_write.pImageInfo = nullptr;
- descriptor_write.descriptorType = VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER;
- descriptor_write.dstSet = 0; // Should not cause a validation error
-
- // Find address of extension call and make the call
- PFN_vkCmdPushDescriptorSetKHR vkCmdPushDescriptorSetKHR =
- (PFN_vkCmdPushDescriptorSetKHR)vkGetDeviceProcAddr(m_device->device(), "vkCmdPushDescriptorSetKHR");
- assert(vkCmdPushDescriptorSetKHR != nullptr);
-
- m_commandBuffer->begin();
-
- // In Intel GPU, it needs to bind pipeline before push descriptor set.
- vkCmdBindPipeline(m_commandBuffer->handle(), VK_PIPELINE_BIND_POINT_GRAPHICS, helper.pipeline_);
- vkCmdPushDescriptorSetKHR(m_commandBuffer->handle(), VK_PIPELINE_BIND_POINT_GRAPHICS, helper.pipeline_layout_.handle(), 0, 1,
- &descriptor_write);
-
- m_errorMonitor->VerifyNotFound();
-}
-
-// This is a positive test. No failures are expected.
-TEST_F(VkPositiveLayerTest, PushDescriptorUnboundSetTest) {
- TEST_DESCRIPTION("Ensure that no validation errors are produced for not bound push descriptor sets");
- if (InstanceExtensionSupported(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME)) {
- m_instance_extension_names.push_back(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME);
- } else {
- printf("%s Did not find VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME; skipped.\n", kSkipPrefix);
- return;
- }
- ASSERT_NO_FATAL_FAILURE(InitFramework(myDbgFunc, m_errorMonitor));
- if (DeviceExtensionSupported(gpu(), nullptr, VK_KHR_PUSH_DESCRIPTOR_EXTENSION_NAME)) {
- m_device_extension_names.push_back(VK_KHR_PUSH_DESCRIPTOR_EXTENSION_NAME);
- } else {
- printf("%s Push Descriptors Extension not supported, skipping tests\n", kSkipPrefix);
- return;
- }
- ASSERT_NO_FATAL_FAILURE(InitState());
-
- auto push_descriptor_prop = GetPushDescriptorProperties(instance(), gpu());
- if (push_descriptor_prop.maxPushDescriptors < 1) {
- // Some implementations report an invalid maxPushDescriptors of 0
- printf("%s maxPushDescriptors is zero, skipping tests\n", kSkipPrefix);
- return;
- }
-
- ASSERT_NO_FATAL_FAILURE(InitViewport());
- ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
- m_errorMonitor->ExpectSuccess();
-
- // Create descriptor set layout
- VkDescriptorSetLayoutBinding dsl_binding = {};
- dsl_binding.binding = 2;
- dsl_binding.descriptorType = VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER;
- dsl_binding.descriptorCount = 1;
- dsl_binding.stageFlags = VK_SHADER_STAGE_FRAGMENT_BIT;
- dsl_binding.pImmutableSamplers = NULL;
-
- OneOffDescriptorSet descriptor_set(m_device, {dsl_binding}, 0, nullptr, VK_DESCRIPTOR_POOL_CREATE_FREE_DESCRIPTOR_SET_BIT,
- nullptr);
-
- // Create push descriptor set layout
- const VkDescriptorSetLayoutObj push_ds_layout(m_device, {dsl_binding}, VK_DESCRIPTOR_SET_LAYOUT_CREATE_PUSH_DESCRIPTOR_BIT_KHR);
-
- // Create PSO
- char const fsSource[] =
- "#version 450\n"
- "\n"
- "layout(location=0) out vec4 x;\n"
- "layout(set=0) layout(binding=2) uniform foo1 { float x; } bar1;\n"
- "layout(set=1) layout(binding=2) uniform foo2 { float y; } bar2;\n"
- "void main(){\n"
- " x = vec4(bar1.x) + vec4(bar2.y);\n"
- "}\n";
- VkShaderObj vs(m_device, bindStateVertShaderText, VK_SHADER_STAGE_VERTEX_BIT, this);
- VkShaderObj fs(m_device, fsSource, VK_SHADER_STAGE_FRAGMENT_BIT, this);
- CreatePipelineHelper pipe(*this);
- pipe.InitInfo();
- pipe.shader_stages_ = {vs.GetStageCreateInfo(), fs.GetStageCreateInfo()};
- pipe.InitState();
- // Now use the descriptor layouts to create a pipeline layout
- pipe.pipeline_layout_ = VkPipelineLayoutObj(m_device, {&push_ds_layout, &descriptor_set.layout_});
- pipe.CreateGraphicsPipeline();
-
- const float bo_data[1] = {1.f};
- VkConstantBufferObj buffer(m_device, sizeof(bo_data), (const void *)&bo_data, VK_BUFFER_USAGE_UNIFORM_BUFFER_BIT);
-
- // Update descriptor set
- descriptor_set.WriteDescriptorBufferInfo(2, buffer.handle(), sizeof(bo_data));
- descriptor_set.UpdateDescriptorSets();
-
- PFN_vkCmdPushDescriptorSetKHR vkCmdPushDescriptorSetKHR =
- (PFN_vkCmdPushDescriptorSetKHR)vkGetDeviceProcAddr(m_device->device(), "vkCmdPushDescriptorSetKHR");
- assert(vkCmdPushDescriptorSetKHR != nullptr);
-
- m_commandBuffer->begin();
- m_commandBuffer->BeginRenderPass(m_renderPassBeginInfo);
- vkCmdBindPipeline(m_commandBuffer->handle(), VK_PIPELINE_BIND_POINT_GRAPHICS, pipe.pipeline_);
-
- // Push descriptors and bind descriptor set
- vkCmdPushDescriptorSetKHR(m_commandBuffer->handle(), VK_PIPELINE_BIND_POINT_GRAPHICS, pipe.pipeline_layout_.handle(), 0, 1,
- descriptor_set.descriptor_writes.data());
- vkCmdBindDescriptorSets(m_commandBuffer->handle(), VK_PIPELINE_BIND_POINT_GRAPHICS, pipe.pipeline_layout_.handle(), 1, 1,
- &descriptor_set.set_, 0, NULL);
-
- // No errors should be generated.
- vkCmdDraw(m_commandBuffer->handle(), 3, 1, 0, 0);
-
- m_errorMonitor->VerifyNotFound();
-
- m_commandBuffer->EndRenderPass();
- m_commandBuffer->end();
-}
-
-TEST_F(VkPositiveLayerTest, PushDescriptorSetUpdatingSetNumber) {
- TEST_DESCRIPTION(
- "Ensure that no validation errors are produced when the push descriptor set number changes "
- "between two vkCmdPushDescriptorSetKHR calls.");
-
- if (InstanceExtensionSupported(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME)) {
- m_instance_extension_names.push_back(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME);
- } else {
- printf("%s %s Extension not supported, skipping tests\n", kSkipPrefix,
- VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME);
- return;
- }
-
- ASSERT_NO_FATAL_FAILURE(InitFramework(myDbgFunc, m_errorMonitor));
- if (DeviceExtensionSupported(gpu(), nullptr, VK_KHR_PUSH_DESCRIPTOR_EXTENSION_NAME)) {
- m_device_extension_names.push_back(VK_KHR_PUSH_DESCRIPTOR_EXTENSION_NAME);
- } else {
- printf("%s %s Extension not supported, skipping tests\n", kSkipPrefix, VK_KHR_PUSH_DESCRIPTOR_EXTENSION_NAME);
- return;
- }
- ASSERT_NO_FATAL_FAILURE(InitState());
- auto push_descriptor_prop = GetPushDescriptorProperties(instance(), gpu());
- if (push_descriptor_prop.maxPushDescriptors < 1) {
- // Some implementations report an invalid maxPushDescriptors of 0
- printf("%s maxPushDescriptors is zero, skipping tests\n", kSkipPrefix);
- return;
- }
- ASSERT_NO_FATAL_FAILURE(InitViewport());
- ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
- m_errorMonitor->ExpectSuccess();
-
- // Create a descriptor to push
- const uint32_t buffer_data[4] = {4, 5, 6, 7};
- VkConstantBufferObj buffer_obj(
- m_device, sizeof(buffer_data), &buffer_data,
- VK_BUFFER_USAGE_TRANSFER_SRC_BIT | VK_BUFFER_USAGE_TRANSFER_DST_BIT | VK_BUFFER_USAGE_UNIFORM_BUFFER_BIT);
- ASSERT_TRUE(buffer_obj.initialized());
-
- VkDescriptorBufferInfo buffer_info = {buffer_obj.handle(), 0, VK_WHOLE_SIZE};
-
- PFN_vkCmdPushDescriptorSetKHR vkCmdPushDescriptorSetKHR =
- (PFN_vkCmdPushDescriptorSetKHR)vkGetDeviceProcAddr(m_device->device(), "vkCmdPushDescriptorSetKHR");
- ASSERT_TRUE(vkCmdPushDescriptorSetKHR != nullptr);
-
- const VkDescriptorSetLayoutBinding ds_binding_0 = {0, VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER, 1, VK_SHADER_STAGE_FRAGMENT_BIT,
- nullptr};
- const VkDescriptorSetLayoutBinding ds_binding_1 = {1, VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER, 1, VK_SHADER_STAGE_FRAGMENT_BIT,
- nullptr};
- const VkDescriptorSetLayoutObj ds_layout(m_device, {ds_binding_0, ds_binding_1});
- ASSERT_TRUE(ds_layout.initialized());
-
- const VkDescriptorSetLayoutBinding push_ds_binding_0 = {0, VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER, 1, VK_SHADER_STAGE_FRAGMENT_BIT,
- nullptr};
- const VkDescriptorSetLayoutObj push_ds_layout(m_device, {push_ds_binding_0},
- VK_DESCRIPTOR_SET_LAYOUT_CREATE_PUSH_DESCRIPTOR_BIT_KHR);
- ASSERT_TRUE(push_ds_layout.initialized());
-
- m_commandBuffer->begin();
- m_commandBuffer->BeginRenderPass(m_renderPassBeginInfo);
-
- VkPipelineObj pipe0(m_device);
- VkPipelineObj pipe1(m_device);
- {
- // Note: the push descriptor set is set number 2.
- const VkPipelineLayoutObj pipeline_layout(m_device, {&ds_layout, &ds_layout, &push_ds_layout, &ds_layout});
- ASSERT_TRUE(pipeline_layout.initialized());
-
- char const *fsSource =
- "#version 450\n"
- "\n"
- "layout(location=0) out vec4 x;\n"
- "layout(set=2) layout(binding=0) uniform foo { vec4 y; } bar;\n"
- "void main(){\n"
- " x = bar.y;\n"
- "}\n";
-
- VkShaderObj vs(m_device, bindStateVertShaderText, VK_SHADER_STAGE_VERTEX_BIT, this);
- VkShaderObj fs(m_device, fsSource, VK_SHADER_STAGE_FRAGMENT_BIT, this);
- VkPipelineObj &pipe = pipe0;
- pipe.SetViewport(m_viewports);
- pipe.SetScissor(m_scissors);
- pipe.AddShader(&vs);
- pipe.AddShader(&fs);
- pipe.AddDefaultColorAttachment();
- pipe.CreateVKPipeline(pipeline_layout.handle(), renderPass());
-
- vkCmdBindPipeline(m_commandBuffer->handle(), VK_PIPELINE_BIND_POINT_GRAPHICS, pipe.handle());
-
- const VkWriteDescriptorSet descriptor_write = vk_testing::Device::write_descriptor_set(
- vk_testing::DescriptorSet(), 0, 0, VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER, 1, &buffer_info);
-
- // Note: pushing to desciptor set number 2.
- vkCmdPushDescriptorSetKHR(m_commandBuffer->handle(), VK_PIPELINE_BIND_POINT_GRAPHICS, pipeline_layout.handle(), 2, 1,
- &descriptor_write);
- vkCmdDraw(m_commandBuffer->handle(), 3, 1, 0, 0);
- }
-
- m_errorMonitor->VerifyNotFound();
-
- {
- // Note: the push descriptor set is now set number 3.
- const VkPipelineLayoutObj pipeline_layout(m_device, {&ds_layout, &ds_layout, &ds_layout, &push_ds_layout});
- ASSERT_TRUE(pipeline_layout.initialized());
-
- const VkWriteDescriptorSet descriptor_write = vk_testing::Device::write_descriptor_set(
- vk_testing::DescriptorSet(), 0, 0, VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER, 1, &buffer_info);
-
- char const *fsSource =
- "#version 450\n"
- "\n"
- "layout(location=0) out vec4 x;\n"
- "layout(set=3) layout(binding=0) uniform foo { vec4 y; } bar;\n"
- "void main(){\n"
- " x = bar.y;\n"
- "}\n";
-
- VkShaderObj vs(m_device, bindStateVertShaderText, VK_SHADER_STAGE_VERTEX_BIT, this);
- VkShaderObj fs(m_device, fsSource, VK_SHADER_STAGE_FRAGMENT_BIT, this);
- VkPipelineObj &pipe = pipe1;
- pipe.SetViewport(m_viewports);
- pipe.SetScissor(m_scissors);
- pipe.AddShader(&vs);
- pipe.AddShader(&fs);
- pipe.AddDefaultColorAttachment();
- pipe.CreateVKPipeline(pipeline_layout.handle(), renderPass());
-
- vkCmdBindPipeline(m_commandBuffer->handle(), VK_PIPELINE_BIND_POINT_GRAPHICS, pipe.handle());
-
- // Note: now pushing to desciptor set number 3.
- vkCmdPushDescriptorSetKHR(m_commandBuffer->handle(), VK_PIPELINE_BIND_POINT_GRAPHICS, pipeline_layout.handle(), 3, 1,
- &descriptor_write);
- vkCmdDraw(m_commandBuffer->handle(), 3, 1, 0, 0);
- }
-
- m_errorMonitor->VerifyNotFound();
-
- m_commandBuffer->EndRenderPass();
- m_commandBuffer->end();
-}
-
-// This is a positive test. No failures are expected.
-TEST_F(VkPositiveLayerTest, TestAliasedMemoryTracking) {
- VkResult err;
- bool pass;
-
- TEST_DESCRIPTION(
- "Create a buffer, allocate memory, bind memory, destroy the buffer, create an image, and bind the same memory to it");
-
- m_errorMonitor->ExpectSuccess();
-
- ASSERT_NO_FATAL_FAILURE(Init());
-
- VkBuffer buffer;
- VkImage image;
- VkDeviceMemory mem;
- VkMemoryRequirements mem_reqs;
-
- VkBufferCreateInfo buf_info = {};
- buf_info.sType = VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO;
- buf_info.pNext = NULL;
- buf_info.usage = VK_BUFFER_USAGE_UNIFORM_BUFFER_BIT;
- buf_info.size = 256;
- buf_info.queueFamilyIndexCount = 0;
- buf_info.pQueueFamilyIndices = NULL;
- buf_info.sharingMode = VK_SHARING_MODE_EXCLUSIVE;
- buf_info.flags = 0;
- err = vkCreateBuffer(m_device->device(), &buf_info, NULL, &buffer);
- ASSERT_VK_SUCCESS(err);
-
- vkGetBufferMemoryRequirements(m_device->device(), buffer, &mem_reqs);
-
- VkMemoryAllocateInfo alloc_info = {};
- alloc_info.sType = VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO;
- alloc_info.pNext = NULL;
- alloc_info.memoryTypeIndex = 0;
-
- // Ensure memory is big enough for both bindings
- alloc_info.allocationSize = 0x10000;
-
- pass = m_device->phy().set_memory_type(mem_reqs.memoryTypeBits, &alloc_info, VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT);
- if (!pass) {
- printf("%s Failed to allocate memory.\n", kSkipPrefix);
- vkDestroyBuffer(m_device->device(), buffer, NULL);
- return;
- }
-
- err = vkAllocateMemory(m_device->device(), &alloc_info, NULL, &mem);
- ASSERT_VK_SUCCESS(err);
-
- uint8_t *pData;
- err = vkMapMemory(m_device->device(), mem, 0, mem_reqs.size, 0, (void **)&pData);
- ASSERT_VK_SUCCESS(err);
-
- memset(pData, 0xCADECADE, static_cast<size_t>(mem_reqs.size));
-
- vkUnmapMemory(m_device->device(), mem);
-
- err = vkBindBufferMemory(m_device->device(), buffer, mem, 0);
- ASSERT_VK_SUCCESS(err);
-
- // NOW, destroy the buffer. Obviously, the resource no longer occupies this
- // memory. In fact, it was never used by the GPU.
- // Just be sure, wait for idle.
- vkDestroyBuffer(m_device->device(), buffer, NULL);
- vkDeviceWaitIdle(m_device->device());
-
- // Use optimal as some platforms report linear support but then fail image creation
- VkImageTiling image_tiling = VK_IMAGE_TILING_OPTIMAL;
- VkImageFormatProperties image_format_properties;
- vkGetPhysicalDeviceImageFormatProperties(gpu(), VK_FORMAT_R8G8B8A8_UNORM, VK_IMAGE_TYPE_2D, image_tiling,
- VK_IMAGE_USAGE_TRANSFER_SRC_BIT, 0, &image_format_properties);
- if (image_format_properties.maxExtent.width == 0) {
- printf("%s Image format not supported; skipped.\n", kSkipPrefix);
- vkFreeMemory(m_device->device(), mem, NULL);
- return;
- }
- VkImageCreateInfo image_create_info = {};
- image_create_info.sType = VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO;
- image_create_info.pNext = NULL;
- image_create_info.imageType = VK_IMAGE_TYPE_2D;
- image_create_info.format = VK_FORMAT_R8G8B8A8_UNORM;
- image_create_info.extent.width = 64;
- image_create_info.extent.height = 64;
- image_create_info.extent.depth = 1;
- image_create_info.mipLevels = 1;
- image_create_info.arrayLayers = 1;
- image_create_info.samples = VK_SAMPLE_COUNT_1_BIT;
- image_create_info.tiling = image_tiling;
- image_create_info.initialLayout = VK_IMAGE_LAYOUT_PREINITIALIZED;
- image_create_info.usage = VK_IMAGE_USAGE_TRANSFER_SRC_BIT;
- image_create_info.queueFamilyIndexCount = 0;
- image_create_info.pQueueFamilyIndices = NULL;
- image_create_info.sharingMode = VK_SHARING_MODE_EXCLUSIVE;
- image_create_info.flags = 0;
-
- /* Create a mappable image. It will be the texture if linear images are OK
- * to be textures or it will be the staging image if they are not.
- */
- err = vkCreateImage(m_device->device(), &image_create_info, NULL, &image);
- ASSERT_VK_SUCCESS(err);
-
- vkGetImageMemoryRequirements(m_device->device(), image, &mem_reqs);
-
- VkMemoryAllocateInfo mem_alloc = {};
- mem_alloc.sType = VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO;
- mem_alloc.pNext = NULL;
- mem_alloc.allocationSize = 0;
- mem_alloc.memoryTypeIndex = 0;
- mem_alloc.allocationSize = mem_reqs.size;
-
- pass = m_device->phy().set_memory_type(mem_reqs.memoryTypeBits, &mem_alloc, VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT);
- if (!pass) {
- printf("%s Failed to allocate memory.\n", kSkipPrefix);
- vkFreeMemory(m_device->device(), mem, NULL);
- vkDestroyImage(m_device->device(), image, NULL);
- return;
- }
-
- // VALIDATION FAILURE:
- err = vkBindImageMemory(m_device->device(), image, mem, 0);
- ASSERT_VK_SUCCESS(err);
-
- m_errorMonitor->VerifyNotFound();
-
- vkFreeMemory(m_device->device(), mem, NULL);
- vkDestroyImage(m_device->device(), image, NULL);
-}
-
-// This is a positive test. No failures are expected.
-TEST_F(VkPositiveLayerTest, TestDestroyFreeNullHandles) {
- VkResult err;
-
- TEST_DESCRIPTION("Call all applicable destroy and free routines with NULL handles, expecting no validation errors");
-
- m_errorMonitor->ExpectSuccess();
-
- ASSERT_NO_FATAL_FAILURE(Init());
- vkDestroyBuffer(m_device->device(), VK_NULL_HANDLE, NULL);
- vkDestroyBufferView(m_device->device(), VK_NULL_HANDLE, NULL);
- vkDestroyCommandPool(m_device->device(), VK_NULL_HANDLE, NULL);
- vkDestroyDescriptorPool(m_device->device(), VK_NULL_HANDLE, NULL);
- vkDestroyDescriptorSetLayout(m_device->device(), VK_NULL_HANDLE, NULL);
- vkDestroyDevice(VK_NULL_HANDLE, NULL);
- vkDestroyEvent(m_device->device(), VK_NULL_HANDLE, NULL);
- vkDestroyFence(m_device->device(), VK_NULL_HANDLE, NULL);
- vkDestroyFramebuffer(m_device->device(), VK_NULL_HANDLE, NULL);
- vkDestroyImage(m_device->device(), VK_NULL_HANDLE, NULL);
- vkDestroyImageView(m_device->device(), VK_NULL_HANDLE, NULL);
- vkDestroyInstance(VK_NULL_HANDLE, NULL);
- vkDestroyPipeline(m_device->device(), VK_NULL_HANDLE, NULL);
- vkDestroyPipelineCache(m_device->device(), VK_NULL_HANDLE, NULL);
- vkDestroyPipelineLayout(m_device->device(), VK_NULL_HANDLE, NULL);
- vkDestroyQueryPool(m_device->device(), VK_NULL_HANDLE, NULL);
- vkDestroyRenderPass(m_device->device(), VK_NULL_HANDLE, NULL);
- vkDestroySampler(m_device->device(), VK_NULL_HANDLE, NULL);
- vkDestroySemaphore(m_device->device(), VK_NULL_HANDLE, NULL);
- vkDestroyShaderModule(m_device->device(), VK_NULL_HANDLE, NULL);
-
- VkCommandPool command_pool;
- VkCommandPoolCreateInfo pool_create_info{};
- pool_create_info.sType = VK_STRUCTURE_TYPE_COMMAND_POOL_CREATE_INFO;
- pool_create_info.queueFamilyIndex = m_device->graphics_queue_node_index_;
- pool_create_info.flags = VK_COMMAND_POOL_CREATE_RESET_COMMAND_BUFFER_BIT;
- vkCreateCommandPool(m_device->device(), &pool_create_info, nullptr, &command_pool);
- VkCommandBuffer command_buffers[3] = {};
- VkCommandBufferAllocateInfo command_buffer_allocate_info{};
- command_buffer_allocate_info.sType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_ALLOCATE_INFO;
- command_buffer_allocate_info.commandPool = command_pool;
- command_buffer_allocate_info.commandBufferCount = 1;
- command_buffer_allocate_info.level = VK_COMMAND_BUFFER_LEVEL_PRIMARY;
- vkAllocateCommandBuffers(m_device->device(), &command_buffer_allocate_info, &command_buffers[1]);
- vkFreeCommandBuffers(m_device->device(), command_pool, 3, command_buffers);
- vkDestroyCommandPool(m_device->device(), command_pool, NULL);
-
- VkDescriptorPoolSize ds_type_count = {};
- ds_type_count.type = VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC;
- ds_type_count.descriptorCount = 1;
-
- VkDescriptorPoolCreateInfo ds_pool_ci = {};
- ds_pool_ci.sType = VK_STRUCTURE_TYPE_DESCRIPTOR_POOL_CREATE_INFO;
- ds_pool_ci.pNext = NULL;
- ds_pool_ci.maxSets = 1;
- ds_pool_ci.poolSizeCount = 1;
- ds_pool_ci.flags = VK_DESCRIPTOR_POOL_CREATE_FREE_DESCRIPTOR_SET_BIT;
- ds_pool_ci.pPoolSizes = &ds_type_count;
-
- VkDescriptorPool ds_pool;
- err = vkCreateDescriptorPool(m_device->device(), &ds_pool_ci, NULL, &ds_pool);
- ASSERT_VK_SUCCESS(err);
-
- VkDescriptorSetLayoutBinding dsl_binding = {};
- dsl_binding.binding = 2;
- dsl_binding.descriptorType = VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC;
- dsl_binding.descriptorCount = 1;
- dsl_binding.stageFlags = VK_SHADER_STAGE_FRAGMENT_BIT;
- dsl_binding.pImmutableSamplers = NULL;
-
- const VkDescriptorSetLayoutObj ds_layout(m_device, {dsl_binding});
-
- VkDescriptorSet descriptor_sets[3] = {};
- VkDescriptorSetAllocateInfo alloc_info = {};
- alloc_info.sType = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_ALLOCATE_INFO;
- alloc_info.descriptorSetCount = 1;
- alloc_info.descriptorPool = ds_pool;
- alloc_info.pSetLayouts = &ds_layout.handle();
- err = vkAllocateDescriptorSets(m_device->device(), &alloc_info, &descriptor_sets[1]);
- ASSERT_VK_SUCCESS(err);
- vkFreeDescriptorSets(m_device->device(), ds_pool, 3, descriptor_sets);
- vkDestroyDescriptorPool(m_device->device(), ds_pool, NULL);
-
- vkFreeMemory(m_device->device(), VK_NULL_HANDLE, NULL);
-
- m_errorMonitor->VerifyNotFound();
-}
-
-TEST_F(VkPositiveLayerTest, QueueSubmitSemaphoresAndLayoutTracking) {
- TEST_DESCRIPTION("Submit multiple command buffers with chained semaphore signals and layout transitions");
-
- m_errorMonitor->ExpectSuccess();
-
- ASSERT_NO_FATAL_FAILURE(Init());
- VkCommandBuffer cmd_bufs[4];
- VkCommandBufferAllocateInfo alloc_info;
- alloc_info.sType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_ALLOCATE_INFO;
- alloc_info.pNext = NULL;
- alloc_info.commandBufferCount = 4;
- alloc_info.commandPool = m_commandPool->handle();
- alloc_info.level = VK_COMMAND_BUFFER_LEVEL_PRIMARY;
- vkAllocateCommandBuffers(m_device->device(), &alloc_info, cmd_bufs);
- VkImageObj image(m_device);
- image.Init(128, 128, 1, VK_FORMAT_B8G8R8A8_UNORM,
- (VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT | VK_IMAGE_USAGE_TRANSFER_SRC_BIT | VK_IMAGE_USAGE_TRANSFER_DST_BIT),
- VK_IMAGE_TILING_OPTIMAL, 0);
- ASSERT_TRUE(image.initialized());
- VkCommandBufferBeginInfo cb_binfo;
- cb_binfo.pNext = NULL;
- cb_binfo.sType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_BEGIN_INFO;
- cb_binfo.pInheritanceInfo = VK_NULL_HANDLE;
- cb_binfo.flags = 0;
- // Use 4 command buffers, each with an image layout transition, ColorAO->General->ColorAO->TransferSrc->TransferDst
- vkBeginCommandBuffer(cmd_bufs[0], &cb_binfo);
- VkImageMemoryBarrier img_barrier = {};
- img_barrier.sType = VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER;
- img_barrier.pNext = NULL;
- img_barrier.srcAccessMask = VK_ACCESS_HOST_WRITE_BIT;
- img_barrier.dstAccessMask = VK_ACCESS_HOST_WRITE_BIT;
- img_barrier.oldLayout = VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL;
- img_barrier.newLayout = VK_IMAGE_LAYOUT_GENERAL;
- img_barrier.image = image.handle();
- img_barrier.srcQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED;
- img_barrier.dstQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED;
- img_barrier.subresourceRange.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
- img_barrier.subresourceRange.baseArrayLayer = 0;
- img_barrier.subresourceRange.baseMipLevel = 0;
- img_barrier.subresourceRange.layerCount = 1;
- img_barrier.subresourceRange.levelCount = 1;
- vkCmdPipelineBarrier(cmd_bufs[0], VK_PIPELINE_STAGE_HOST_BIT, VK_PIPELINE_STAGE_HOST_BIT, 0, 0, nullptr, 0, nullptr, 1,
- &img_barrier);
- vkEndCommandBuffer(cmd_bufs[0]);
- vkBeginCommandBuffer(cmd_bufs[1], &cb_binfo);
- img_barrier.oldLayout = VK_IMAGE_LAYOUT_GENERAL;
- img_barrier.newLayout = VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL;
- vkCmdPipelineBarrier(cmd_bufs[1], VK_PIPELINE_STAGE_HOST_BIT, VK_PIPELINE_STAGE_HOST_BIT, 0, 0, nullptr, 0, nullptr, 1,
- &img_barrier);
- vkEndCommandBuffer(cmd_bufs[1]);
- vkBeginCommandBuffer(cmd_bufs[2], &cb_binfo);
- img_barrier.oldLayout = VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL;
- img_barrier.newLayout = VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL;
- vkCmdPipelineBarrier(cmd_bufs[2], VK_PIPELINE_STAGE_HOST_BIT, VK_PIPELINE_STAGE_HOST_BIT, 0, 0, nullptr, 0, nullptr, 1,
- &img_barrier);
- vkEndCommandBuffer(cmd_bufs[2]);
- vkBeginCommandBuffer(cmd_bufs[3], &cb_binfo);
- img_barrier.oldLayout = VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL;
- img_barrier.newLayout = VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL;
- vkCmdPipelineBarrier(cmd_bufs[3], VK_PIPELINE_STAGE_HOST_BIT, VK_PIPELINE_STAGE_HOST_BIT, 0, 0, nullptr, 0, nullptr, 1,
- &img_barrier);
- vkEndCommandBuffer(cmd_bufs[3]);
-
- // Submit 4 command buffers in 3 submits, with submits 2 and 3 waiting for semaphores from submits 1 and 2
- VkSemaphore semaphore1, semaphore2;
- VkSemaphoreCreateInfo semaphore_create_info{};
- semaphore_create_info.sType = VK_STRUCTURE_TYPE_SEMAPHORE_CREATE_INFO;
- vkCreateSemaphore(m_device->device(), &semaphore_create_info, nullptr, &semaphore1);
- vkCreateSemaphore(m_device->device(), &semaphore_create_info, nullptr, &semaphore2);
- VkPipelineStageFlags flags[]{VK_PIPELINE_STAGE_ALL_COMMANDS_BIT};
- VkSubmitInfo submit_info[3];
- submit_info[0].sType = VK_STRUCTURE_TYPE_SUBMIT_INFO;
- submit_info[0].pNext = nullptr;
- submit_info[0].commandBufferCount = 1;
- submit_info[0].pCommandBuffers = &cmd_bufs[0];
- submit_info[0].signalSemaphoreCount = 1;
- submit_info[0].pSignalSemaphores = &semaphore1;
- submit_info[0].waitSemaphoreCount = 0;
- submit_info[0].pWaitDstStageMask = nullptr;
- submit_info[0].pWaitDstStageMask = flags;
- submit_info[1].sType = VK_STRUCTURE_TYPE_SUBMIT_INFO;
- submit_info[1].pNext = nullptr;
- submit_info[1].commandBufferCount = 1;
- submit_info[1].pCommandBuffers = &cmd_bufs[1];
- submit_info[1].waitSemaphoreCount = 1;
- submit_info[1].pWaitSemaphores = &semaphore1;
- submit_info[1].signalSemaphoreCount = 1;
- submit_info[1].pSignalSemaphores = &semaphore2;
- submit_info[1].pWaitDstStageMask = flags;
- submit_info[2].sType = VK_STRUCTURE_TYPE_SUBMIT_INFO;
- submit_info[2].pNext = nullptr;
- submit_info[2].commandBufferCount = 2;
- submit_info[2].pCommandBuffers = &cmd_bufs[2];
- submit_info[2].waitSemaphoreCount = 1;
- submit_info[2].pWaitSemaphores = &semaphore2;
- submit_info[2].signalSemaphoreCount = 0;
- submit_info[2].pSignalSemaphores = nullptr;
- submit_info[2].pWaitDstStageMask = flags;
- vkQueueSubmit(m_device->m_queue, 3, submit_info, VK_NULL_HANDLE);
- vkQueueWaitIdle(m_device->m_queue);
-
- vkDestroySemaphore(m_device->device(), semaphore1, NULL);
- vkDestroySemaphore(m_device->device(), semaphore2, NULL);
- m_errorMonitor->VerifyNotFound();
-}
-
-TEST_F(VkPositiveLayerTest, DynamicOffsetWithInactiveBinding) {
- // Create a descriptorSet w/ dynamic descriptors where 1 binding is inactive
- // We previously had a bug where dynamic offset of inactive bindings was still being used
- m_errorMonitor->ExpectSuccess();
-
- ASSERT_NO_FATAL_FAILURE(Init());
- ASSERT_NO_FATAL_FAILURE(InitViewport());
- ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
-
- OneOffDescriptorSet descriptor_set(m_device,
- {
- {2, VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC, 1, VK_SHADER_STAGE_FRAGMENT_BIT, nullptr},
- {0, VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC, 1, VK_SHADER_STAGE_FRAGMENT_BIT, nullptr},
- {1, VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC, 1, VK_SHADER_STAGE_FRAGMENT_BIT, nullptr},
- });
-
- // Create two buffers to update the descriptors with
- // The first will be 2k and used for bindings 0 & 1, the second is 1k for binding 2
- uint32_t qfi = 0;
- VkBufferCreateInfo buffCI = {};
- buffCI.sType = VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO;
- buffCI.size = 2048;
- buffCI.usage = VK_BUFFER_USAGE_UNIFORM_BUFFER_BIT;
- buffCI.queueFamilyIndexCount = 1;
- buffCI.pQueueFamilyIndices = &qfi;
-
- VkBufferObj dynamic_uniform_buffer_1, dynamic_uniform_buffer_2;
- dynamic_uniform_buffer_1.init(*m_device, buffCI);
- buffCI.size = 1024;
- dynamic_uniform_buffer_2.init(*m_device, buffCI);
-
- // Update descriptors
- const uint32_t BINDING_COUNT = 3;
- VkDescriptorBufferInfo buff_info[BINDING_COUNT] = {};
- buff_info[0].buffer = dynamic_uniform_buffer_1.handle();
- buff_info[0].offset = 0;
- buff_info[0].range = 256;
- buff_info[1].buffer = dynamic_uniform_buffer_1.handle();
- buff_info[1].offset = 256;
- buff_info[1].range = 512;
- buff_info[2].buffer = dynamic_uniform_buffer_2.handle();
- buff_info[2].offset = 0;
- buff_info[2].range = 512;
-
- VkWriteDescriptorSet descriptor_write;
- memset(&descriptor_write, 0, sizeof(descriptor_write));
- descriptor_write.sType = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET;
- descriptor_write.dstSet = descriptor_set.set_;
- descriptor_write.dstBinding = 0;
- descriptor_write.descriptorCount = BINDING_COUNT;
- descriptor_write.descriptorType = VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC;
- descriptor_write.pBufferInfo = buff_info;
-
- vkUpdateDescriptorSets(m_device->device(), 1, &descriptor_write, 0, NULL);
-
- m_commandBuffer->begin();
- m_commandBuffer->BeginRenderPass(m_renderPassBeginInfo);
-
- // Create PSO to be used for draw-time errors below
- char const *fsSource =
- "#version 450\n"
- "\n"
- "layout(location=0) out vec4 x;\n"
- "layout(set=0) layout(binding=0) uniform foo1 { int x; int y; } bar1;\n"
- "layout(set=0) layout(binding=2) uniform foo2 { int x; int y; } bar2;\n"
- "void main(){\n"
- " x = vec4(bar1.y) + vec4(bar2.y);\n"
- "}\n";
- VkShaderObj vs(m_device, bindStateVertShaderText, VK_SHADER_STAGE_VERTEX_BIT, this);
- VkShaderObj fs(m_device, fsSource, VK_SHADER_STAGE_FRAGMENT_BIT, this);
-
- CreatePipelineHelper pipe(*this);
- pipe.InitInfo();
- pipe.InitState();
- pipe.shader_stages_ = {vs.GetStageCreateInfo(), fs.GetStageCreateInfo()};
- pipe.pipeline_layout_ = VkPipelineLayoutObj(m_device, {&descriptor_set.layout_});
- pipe.CreateGraphicsPipeline();
-
- vkCmdBindPipeline(m_commandBuffer->handle(), VK_PIPELINE_BIND_POINT_GRAPHICS, pipe.pipeline_);
- // This update should succeed, but offset of inactive binding 1 oversteps binding 2 buffer size
- // we used to have a bug in this case.
- uint32_t dyn_off[BINDING_COUNT] = {0, 1024, 256};
- vkCmdBindDescriptorSets(m_commandBuffer->handle(), VK_PIPELINE_BIND_POINT_GRAPHICS, pipe.pipeline_layout_.handle(), 0, 1,
- &descriptor_set.set_, BINDING_COUNT, dyn_off);
- m_commandBuffer->Draw(1, 0, 0, 0);
- m_errorMonitor->VerifyNotFound();
-
- m_commandBuffer->EndRenderPass();
- m_commandBuffer->end();
-}
-
-TEST_F(VkPositiveLayerTest, NonCoherentMemoryMapping) {
- TEST_DESCRIPTION(
- "Ensure that validations handling of non-coherent memory mapping while using VK_WHOLE_SIZE does not cause access "
- "violations");
- VkResult err;
- uint8_t *pData;
- ASSERT_NO_FATAL_FAILURE(Init());
-
- VkDeviceMemory mem;
- VkMemoryRequirements mem_reqs;
- mem_reqs.memoryTypeBits = 0xFFFFFFFF;
- const VkDeviceSize atom_size = m_device->props.limits.nonCoherentAtomSize;
- VkMemoryAllocateInfo alloc_info = {};
- alloc_info.sType = VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO;
- alloc_info.pNext = NULL;
- alloc_info.memoryTypeIndex = 0;
-
- static const VkDeviceSize allocation_size = 32 * atom_size;
- alloc_info.allocationSize = allocation_size;
-
- // Find a memory configurations WITHOUT a COHERENT bit, otherwise exit
- bool pass = m_device->phy().set_memory_type(mem_reqs.memoryTypeBits, &alloc_info, VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT,
- VK_MEMORY_PROPERTY_HOST_COHERENT_BIT);
- if (!pass) {
- pass = m_device->phy().set_memory_type(mem_reqs.memoryTypeBits, &alloc_info,
- VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT | VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT,
- VK_MEMORY_PROPERTY_HOST_COHERENT_BIT);
- if (!pass) {
- pass = m_device->phy().set_memory_type(
- mem_reqs.memoryTypeBits, &alloc_info,
- VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT | VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT | VK_MEMORY_PROPERTY_HOST_CACHED_BIT,
- VK_MEMORY_PROPERTY_HOST_COHERENT_BIT);
- if (!pass) {
- printf("%s Couldn't find a memory type wihtout a COHERENT bit.\n", kSkipPrefix);
- return;
- }
- }
- }
-
- err = vkAllocateMemory(m_device->device(), &alloc_info, NULL, &mem);
- ASSERT_VK_SUCCESS(err);
-
- // Map/Flush/Invalidate using WHOLE_SIZE and zero offsets and entire mapped range
- m_errorMonitor->ExpectSuccess();
- err = vkMapMemory(m_device->device(), mem, 0, VK_WHOLE_SIZE, 0, (void **)&pData);
- ASSERT_VK_SUCCESS(err);
- VkMappedMemoryRange mmr = {};
- mmr.sType = VK_STRUCTURE_TYPE_MAPPED_MEMORY_RANGE;
- mmr.memory = mem;
- mmr.offset = 0;
- mmr.size = VK_WHOLE_SIZE;
- err = vkFlushMappedMemoryRanges(m_device->device(), 1, &mmr);
- ASSERT_VK_SUCCESS(err);
- err = vkInvalidateMappedMemoryRanges(m_device->device(), 1, &mmr);
- ASSERT_VK_SUCCESS(err);
- m_errorMonitor->VerifyNotFound();
- vkUnmapMemory(m_device->device(), mem);
-
- // Map/Flush/Invalidate using WHOLE_SIZE and an offset and entire mapped range
- m_errorMonitor->ExpectSuccess();
- err = vkMapMemory(m_device->device(), mem, 5 * atom_size, VK_WHOLE_SIZE, 0, (void **)&pData);
- ASSERT_VK_SUCCESS(err);
- mmr.sType = VK_STRUCTURE_TYPE_MAPPED_MEMORY_RANGE;
- mmr.memory = mem;
- mmr.offset = 6 * atom_size;
- mmr.size = VK_WHOLE_SIZE;
- err = vkFlushMappedMemoryRanges(m_device->device(), 1, &mmr);
- ASSERT_VK_SUCCESS(err);
- err = vkInvalidateMappedMemoryRanges(m_device->device(), 1, &mmr);
- ASSERT_VK_SUCCESS(err);
- m_errorMonitor->VerifyNotFound();
- vkUnmapMemory(m_device->device(), mem);
-
- // Map with offset and size
- // Flush/Invalidate subrange of mapped area with offset and size
- m_errorMonitor->ExpectSuccess();
- err = vkMapMemory(m_device->device(), mem, 3 * atom_size, 9 * atom_size, 0, (void **)&pData);
- ASSERT_VK_SUCCESS(err);
- mmr.sType = VK_STRUCTURE_TYPE_MAPPED_MEMORY_RANGE;
- mmr.memory = mem;
- mmr.offset = 4 * atom_size;
- mmr.size = 2 * atom_size;
- err = vkFlushMappedMemoryRanges(m_device->device(), 1, &mmr);
- ASSERT_VK_SUCCESS(err);
- err = vkInvalidateMappedMemoryRanges(m_device->device(), 1, &mmr);
- ASSERT_VK_SUCCESS(err);
- m_errorMonitor->VerifyNotFound();
- vkUnmapMemory(m_device->device(), mem);
-
- // Map without offset and flush WHOLE_SIZE with two separate offsets
- m_errorMonitor->ExpectSuccess();
- err = vkMapMemory(m_device->device(), mem, 0, VK_WHOLE_SIZE, 0, (void **)&pData);
- ASSERT_VK_SUCCESS(err);
- mmr.sType = VK_STRUCTURE_TYPE_MAPPED_MEMORY_RANGE;
- mmr.memory = mem;
- mmr.offset = allocation_size - (4 * atom_size);
- mmr.size = VK_WHOLE_SIZE;
- err = vkFlushMappedMemoryRanges(m_device->device(), 1, &mmr);
- ASSERT_VK_SUCCESS(err);
- mmr.offset = allocation_size - (6 * atom_size);
- mmr.size = VK_WHOLE_SIZE;
- err = vkFlushMappedMemoryRanges(m_device->device(), 1, &mmr);
- ASSERT_VK_SUCCESS(err);
- m_errorMonitor->VerifyNotFound();
- vkUnmapMemory(m_device->device(), mem);
-
- vkFreeMemory(m_device->device(), mem, NULL);
-}
-
-// This is a positive test. We used to expect error in this case but spec now allows it
-TEST_F(VkPositiveLayerTest, ResetUnsignaledFence) {
- m_errorMonitor->ExpectSuccess();
- vk_testing::Fence testFence;
- VkFenceCreateInfo fenceInfo = {};
- fenceInfo.sType = VK_STRUCTURE_TYPE_FENCE_CREATE_INFO;
- fenceInfo.pNext = NULL;
-
- ASSERT_NO_FATAL_FAILURE(Init());
- testFence.init(*m_device, fenceInfo);
- VkFence fences[1] = {testFence.handle()};
- VkResult result = vkResetFences(m_device->device(), 1, fences);
- ASSERT_VK_SUCCESS(result);
-
- m_errorMonitor->VerifyNotFound();
-}
-
-TEST_F(VkPositiveLayerTest, CommandBufferSimultaneousUseSync) {
- m_errorMonitor->ExpectSuccess();
-
- ASSERT_NO_FATAL_FAILURE(Init());
- VkResult err;
-
- // Record (empty!) command buffer that can be submitted multiple times
- // simultaneously.
- VkCommandBufferBeginInfo cbbi = {VK_STRUCTURE_TYPE_COMMAND_BUFFER_BEGIN_INFO, nullptr,
- VK_COMMAND_BUFFER_USAGE_SIMULTANEOUS_USE_BIT, nullptr};
- m_commandBuffer->begin(&cbbi);
- m_commandBuffer->end();
-
- VkFenceCreateInfo fci = {VK_STRUCTURE_TYPE_FENCE_CREATE_INFO, nullptr, 0};
- VkFence fence;
- err = vkCreateFence(m_device->device(), &fci, nullptr, &fence);
- ASSERT_VK_SUCCESS(err);
-
- VkSemaphoreCreateInfo sci = {VK_STRUCTURE_TYPE_SEMAPHORE_CREATE_INFO, nullptr, 0};
- VkSemaphore s1, s2;
- err = vkCreateSemaphore(m_device->device(), &sci, nullptr, &s1);
- ASSERT_VK_SUCCESS(err);
- err = vkCreateSemaphore(m_device->device(), &sci, nullptr, &s2);
- ASSERT_VK_SUCCESS(err);
-
- // Submit CB once signaling s1, with fence so we can roll forward to its retirement.
- VkSubmitInfo si = {VK_STRUCTURE_TYPE_SUBMIT_INFO, nullptr, 0, nullptr, nullptr, 1, &m_commandBuffer->handle(), 1, &s1};
- err = vkQueueSubmit(m_device->m_queue, 1, &si, fence);
- ASSERT_VK_SUCCESS(err);
-
- // Submit CB again, signaling s2.
- si.pSignalSemaphores = &s2;
- err = vkQueueSubmit(m_device->m_queue, 1, &si, VK_NULL_HANDLE);
- ASSERT_VK_SUCCESS(err);
-
- // Wait for fence.
- err = vkWaitForFences(m_device->device(), 1, &fence, VK_TRUE, UINT64_MAX);
- ASSERT_VK_SUCCESS(err);
-
- // CB is still in flight from second submission, but semaphore s1 is no
- // longer in flight. delete it.
- vkDestroySemaphore(m_device->device(), s1, nullptr);
-
- m_errorMonitor->VerifyNotFound();
-
- // Force device idle and clean up remaining objects
- vkDeviceWaitIdle(m_device->device());
- vkDestroySemaphore(m_device->device(), s2, nullptr);
- vkDestroyFence(m_device->device(), fence, nullptr);
-}
-
-TEST_F(VkPositiveLayerTest, FenceCreateSignaledWaitHandling) {
- m_errorMonitor->ExpectSuccess();
-
- ASSERT_NO_FATAL_FAILURE(Init());
- VkResult err;
-
- // A fence created signaled
- VkFenceCreateInfo fci1 = {VK_STRUCTURE_TYPE_FENCE_CREATE_INFO, nullptr, VK_FENCE_CREATE_SIGNALED_BIT};
- VkFence f1;
- err = vkCreateFence(m_device->device(), &fci1, nullptr, &f1);
- ASSERT_VK_SUCCESS(err);
-
- // A fence created not
- VkFenceCreateInfo fci2 = {VK_STRUCTURE_TYPE_FENCE_CREATE_INFO, nullptr, 0};
- VkFence f2;
- err = vkCreateFence(m_device->device(), &fci2, nullptr, &f2);
- ASSERT_VK_SUCCESS(err);
-
- // Submit the unsignaled fence
- VkSubmitInfo si = {VK_STRUCTURE_TYPE_SUBMIT_INFO, nullptr, 0, nullptr, nullptr, 0, nullptr, 0, nullptr};
- err = vkQueueSubmit(m_device->m_queue, 1, &si, f2);
-
- // Wait on both fences, with signaled first.
- VkFence fences[] = {f1, f2};
- vkWaitForFences(m_device->device(), 2, fences, VK_TRUE, UINT64_MAX);
-
- // Should have both retired!
- vkDestroyFence(m_device->device(), f1, nullptr);
- vkDestroyFence(m_device->device(), f2, nullptr);
-
- m_errorMonitor->VerifyNotFound();
-}
-
-TEST_F(VkPositiveLayerTest, CreateImageViewFollowsParameterCompatibilityRequirements) {
- TEST_DESCRIPTION("Verify that creating an ImageView with valid usage does not generate validation errors.");
-
- ASSERT_NO_FATAL_FAILURE(Init());
-
- m_errorMonitor->ExpectSuccess();
-
- VkImageCreateInfo imgInfo = {VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO,
- nullptr,
- VK_IMAGE_CREATE_MUTABLE_FORMAT_BIT,
- VK_IMAGE_TYPE_2D,
- VK_FORMAT_R8G8B8A8_UNORM,
- {128, 128, 1},
- 1,
- 1,
- VK_SAMPLE_COUNT_1_BIT,
- VK_IMAGE_TILING_OPTIMAL,
- VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT,
- VK_SHARING_MODE_EXCLUSIVE,
- 0,
- nullptr,
- VK_IMAGE_LAYOUT_UNDEFINED};
- VkImageObj image(m_device);
- image.init(&imgInfo);
- ASSERT_TRUE(image.initialized());
- image.targetView(VK_FORMAT_R8G8B8A8_UNORM);
- m_errorMonitor->VerifyNotFound();
-}
-
-TEST_F(VkPositiveLayerTest, ValidUsage) {
- TEST_DESCRIPTION("Verify that creating an image view from an image with valid usage doesn't generate validation errors");
-
- ASSERT_NO_FATAL_FAILURE(Init());
-
- m_errorMonitor->ExpectSuccess();
- // Verify that we can create a view with usage INPUT_ATTACHMENT
- VkImageObj image(m_device);
- image.Init(128, 128, 1, VK_FORMAT_R8G8B8A8_UNORM, VK_IMAGE_USAGE_INPUT_ATTACHMENT_BIT, VK_IMAGE_TILING_OPTIMAL, 0);
- ASSERT_TRUE(image.initialized());
- VkImageView imageView;
- VkImageViewCreateInfo ivci = {};
- ivci.sType = VK_STRUCTURE_TYPE_IMAGE_VIEW_CREATE_INFO;
- ivci.image = image.handle();
- ivci.viewType = VK_IMAGE_VIEW_TYPE_2D;
- ivci.format = VK_FORMAT_R8G8B8A8_UNORM;
- ivci.subresourceRange.layerCount = 1;
- ivci.subresourceRange.baseMipLevel = 0;
- ivci.subresourceRange.levelCount = 1;
- ivci.subresourceRange.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
-
- vkCreateImageView(m_device->device(), &ivci, NULL, &imageView);
- m_errorMonitor->VerifyNotFound();
- vkDestroyImageView(m_device->device(), imageView, NULL);
-}
-
-// This is a positive test. No failures are expected.
-TEST_F(VkPositiveLayerTest, BindSparse) {
- TEST_DESCRIPTION("Bind 2 memory ranges to one image using vkQueueBindSparse, destroy the image and then free the memory");
-
- ASSERT_NO_FATAL_FAILURE(Init());
-
- auto index = m_device->graphics_queue_node_index_;
- if (!(m_device->queue_props[index].queueFlags & VK_QUEUE_SPARSE_BINDING_BIT)) {
- printf("%s Graphics queue does not have sparse binding bit.\n", kSkipPrefix);
- return;
- }
- if (!m_device->phy().features().sparseBinding) {
- printf("%s Device does not support sparse bindings.\n", kSkipPrefix);
- return;
- }
-
- m_errorMonitor->ExpectSuccess(VK_DEBUG_REPORT_ERROR_BIT_EXT | VK_DEBUG_REPORT_WARNING_BIT_EXT);
-
- VkImage image;
- VkImageCreateInfo image_create_info = {};
- image_create_info.sType = VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO;
- image_create_info.pNext = NULL;
- image_create_info.imageType = VK_IMAGE_TYPE_2D;
- image_create_info.format = VK_FORMAT_B8G8R8A8_UNORM;
- image_create_info.extent.width = 64;
- image_create_info.extent.height = 64;
- image_create_info.extent.depth = 1;
- image_create_info.mipLevels = 1;
- image_create_info.arrayLayers = 1;
- image_create_info.samples = VK_SAMPLE_COUNT_1_BIT;
- image_create_info.tiling = VK_IMAGE_TILING_OPTIMAL;
- image_create_info.usage = VK_IMAGE_USAGE_TRANSFER_SRC_BIT;
- image_create_info.flags = VK_IMAGE_CREATE_SPARSE_BINDING_BIT;
- VkResult err = vkCreateImage(m_device->device(), &image_create_info, NULL, &image);
- ASSERT_VK_SUCCESS(err);
-
- VkMemoryRequirements memory_reqs;
- VkDeviceMemory memory_one, memory_two;
- bool pass;
- VkMemoryAllocateInfo memory_info = {};
- memory_info.sType = VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO;
- memory_info.pNext = NULL;
- memory_info.allocationSize = 0;
- memory_info.memoryTypeIndex = 0;
- vkGetImageMemoryRequirements(m_device->device(), image, &memory_reqs);
- // Find an image big enough to allow sparse mapping of 2 memory regions
- // Increase the image size until it is at least twice the
- // size of the required alignment, to ensure we can bind both
- // allocated memory blocks to the image on aligned offsets.
- while (memory_reqs.size < (memory_reqs.alignment * 2)) {
- vkDestroyImage(m_device->device(), image, nullptr);
- image_create_info.extent.width *= 2;
- image_create_info.extent.height *= 2;
- err = vkCreateImage(m_device->device(), &image_create_info, nullptr, &image);
- ASSERT_VK_SUCCESS(err);
- vkGetImageMemoryRequirements(m_device->device(), image, &memory_reqs);
- }
- // Allocate 2 memory regions of minimum alignment size, bind one at 0, the other
- // at the end of the first
- memory_info.allocationSize = memory_reqs.alignment;
- pass = m_device->phy().set_memory_type(memory_reqs.memoryTypeBits, &memory_info, 0);
- ASSERT_TRUE(pass);
- err = vkAllocateMemory(m_device->device(), &memory_info, NULL, &memory_one);
- ASSERT_VK_SUCCESS(err);
- err = vkAllocateMemory(m_device->device(), &memory_info, NULL, &memory_two);
- ASSERT_VK_SUCCESS(err);
- VkSparseMemoryBind binds[2];
- binds[0].flags = 0;
- binds[0].memory = memory_one;
- binds[0].memoryOffset = 0;
- binds[0].resourceOffset = 0;
- binds[0].size = memory_info.allocationSize;
- binds[1].flags = 0;
- binds[1].memory = memory_two;
- binds[1].memoryOffset = 0;
- binds[1].resourceOffset = memory_info.allocationSize;
- binds[1].size = memory_info.allocationSize;
-
- VkSparseImageOpaqueMemoryBindInfo opaqueBindInfo;
- opaqueBindInfo.image = image;
- opaqueBindInfo.bindCount = 2;
- opaqueBindInfo.pBinds = binds;
-
- VkFence fence = VK_NULL_HANDLE;
- VkBindSparseInfo bindSparseInfo = {};
- bindSparseInfo.sType = VK_STRUCTURE_TYPE_BIND_SPARSE_INFO;
- bindSparseInfo.imageOpaqueBindCount = 1;
- bindSparseInfo.pImageOpaqueBinds = &opaqueBindInfo;
-
- vkQueueBindSparse(m_device->m_queue, 1, &bindSparseInfo, fence);
- vkQueueWaitIdle(m_device->m_queue);
- vkDestroyImage(m_device->device(), image, NULL);
- vkFreeMemory(m_device->device(), memory_one, NULL);
- vkFreeMemory(m_device->device(), memory_two, NULL);
- m_errorMonitor->VerifyNotFound();
-}
-
-TEST_F(VkPositiveLayerTest, BindSparseMetadata) {
- TEST_DESCRIPTION("Bind memory for the metadata aspect of a sparse image");
-
- ASSERT_NO_FATAL_FAILURE(Init());
-
- auto index = m_device->graphics_queue_node_index_;
- if (!(m_device->queue_props[index].queueFlags & VK_QUEUE_SPARSE_BINDING_BIT)) {
- printf("%s Graphics queue does not have sparse binding bit.\n", kSkipPrefix);
- return;
- }
- if (!m_device->phy().features().sparseResidencyImage2D) {
- printf("%s Device does not support sparse residency for images.\n", kSkipPrefix);
- return;
- }
-
- m_errorMonitor->ExpectSuccess(VK_DEBUG_REPORT_ERROR_BIT_EXT | VK_DEBUG_REPORT_WARNING_BIT_EXT);
-
- // Create a sparse image
- VkImage image;
- VkImageCreateInfo image_create_info = {};
- image_create_info.sType = VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO;
- image_create_info.pNext = NULL;
- image_create_info.imageType = VK_IMAGE_TYPE_2D;
- image_create_info.format = VK_FORMAT_B8G8R8A8_UNORM;
- image_create_info.extent.width = 64;
- image_create_info.extent.height = 64;
- image_create_info.extent.depth = 1;
- image_create_info.mipLevels = 1;
- image_create_info.arrayLayers = 1;
- image_create_info.samples = VK_SAMPLE_COUNT_1_BIT;
- image_create_info.tiling = VK_IMAGE_TILING_OPTIMAL;
- image_create_info.usage = VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT;
- image_create_info.flags = VK_IMAGE_CREATE_SPARSE_BINDING_BIT | VK_IMAGE_CREATE_SPARSE_RESIDENCY_BIT;
- VkResult err = vkCreateImage(m_device->device(), &image_create_info, NULL, &image);
- ASSERT_VK_SUCCESS(err);
-
- // Query image memory requirements
- VkMemoryRequirements memory_reqs;
- vkGetImageMemoryRequirements(m_device->device(), image, &memory_reqs);
-
- // Query sparse memory requirements
- uint32_t sparse_reqs_count = 0;
- vkGetImageSparseMemoryRequirements(m_device->device(), image, &sparse_reqs_count, nullptr);
- std::vector<VkSparseImageMemoryRequirements> sparse_reqs(sparse_reqs_count);
- vkGetImageSparseMemoryRequirements(m_device->device(), image, &sparse_reqs_count, sparse_reqs.data());
-
- // Find requirements for metadata aspect
- const VkSparseImageMemoryRequirements *metadata_reqs = nullptr;
- for (auto const &aspect_sparse_reqs : sparse_reqs) {
- if (aspect_sparse_reqs.formatProperties.aspectMask == VK_IMAGE_ASPECT_METADATA_BIT) {
- metadata_reqs = &aspect_sparse_reqs;
- }
- }
-
- if (!metadata_reqs) {
- printf("%s Sparse image does not require memory for metadata.\n", kSkipPrefix);
- } else {
- // Allocate memory for the metadata
- VkDeviceMemory metadata_memory = VK_NULL_HANDLE;
- VkMemoryAllocateInfo metadata_memory_info = {};
- metadata_memory_info.sType = VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO;
- metadata_memory_info.allocationSize = metadata_reqs->imageMipTailSize;
- m_device->phy().set_memory_type(memory_reqs.memoryTypeBits, &metadata_memory_info, 0);
- err = vkAllocateMemory(m_device->device(), &metadata_memory_info, NULL, &metadata_memory);
- ASSERT_VK_SUCCESS(err);
-
- // Bind metadata
- VkSparseMemoryBind sparse_bind = {};
- sparse_bind.resourceOffset = metadata_reqs->imageMipTailOffset;
- sparse_bind.size = metadata_reqs->imageMipTailSize;
- sparse_bind.memory = metadata_memory;
- sparse_bind.memoryOffset = 0;
- sparse_bind.flags = VK_SPARSE_MEMORY_BIND_METADATA_BIT;
-
- VkSparseImageOpaqueMemoryBindInfo opaque_bind_info = {};
- opaque_bind_info.image = image;
- opaque_bind_info.bindCount = 1;
- opaque_bind_info.pBinds = &sparse_bind;
-
- VkBindSparseInfo bind_info = {};
- bind_info.sType = VK_STRUCTURE_TYPE_BIND_SPARSE_INFO;
- bind_info.imageOpaqueBindCount = 1;
- bind_info.pImageOpaqueBinds = &opaque_bind_info;
-
- vkQueueBindSparse(m_device->m_queue, 1, &bind_info, VK_NULL_HANDLE);
- m_errorMonitor->VerifyNotFound();
-
- // Cleanup
- vkQueueWaitIdle(m_device->m_queue);
- vkFreeMemory(m_device->device(), metadata_memory, NULL);
- }
-
- vkDestroyImage(m_device->device(), image, NULL);
-}
-
-TEST_F(VkPositiveLayerTest, FramebufferBindingDestroyCommandPool) {
- TEST_DESCRIPTION(
- "This test should pass. Create a Framebuffer and command buffer, bind them together, then destroy command pool and "
- "framebuffer and verify there are no errors.");
-
- m_errorMonitor->ExpectSuccess();
-
- ASSERT_NO_FATAL_FAILURE(Init());
-
- // A renderpass with one color attachment.
- VkAttachmentDescription attachment = {0,
- VK_FORMAT_R8G8B8A8_UNORM,
- VK_SAMPLE_COUNT_1_BIT,
- VK_ATTACHMENT_LOAD_OP_DONT_CARE,
- VK_ATTACHMENT_STORE_OP_STORE,
- VK_ATTACHMENT_LOAD_OP_DONT_CARE,
- VK_ATTACHMENT_STORE_OP_DONT_CARE,
- VK_IMAGE_LAYOUT_UNDEFINED,
- VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL};
-
- VkAttachmentReference att_ref = {0, VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL};
-
- VkSubpassDescription subpass = {0, VK_PIPELINE_BIND_POINT_GRAPHICS, 0, nullptr, 1, &att_ref, nullptr, nullptr, 0, nullptr};
-
- VkRenderPassCreateInfo rpci = {VK_STRUCTURE_TYPE_RENDER_PASS_CREATE_INFO, nullptr, 0, 1, &attachment, 1, &subpass, 0, nullptr};
-
- VkRenderPass rp;
- VkResult err = vkCreateRenderPass(m_device->device(), &rpci, nullptr, &rp);
- ASSERT_VK_SUCCESS(err);
-
- // A compatible framebuffer.
- VkImageObj image(m_device);
- image.Init(32, 32, 1, VK_FORMAT_R8G8B8A8_UNORM, VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT, VK_IMAGE_TILING_OPTIMAL, 0);
- ASSERT_TRUE(image.initialized());
-
- VkImageView view = image.targetView(VK_FORMAT_R8G8B8A8_UNORM);
-
- VkFramebufferCreateInfo fci = {VK_STRUCTURE_TYPE_FRAMEBUFFER_CREATE_INFO, nullptr, 0, rp, 1, &view, 32, 32, 1};
- VkFramebuffer fb;
- err = vkCreateFramebuffer(m_device->device(), &fci, nullptr, &fb);
- ASSERT_VK_SUCCESS(err);
-
- // Explicitly create a command buffer to bind the FB to so that we can then
- // destroy the command pool in order to implicitly free command buffer
- VkCommandPool command_pool;
- VkCommandPoolCreateInfo pool_create_info{};
- pool_create_info.sType = VK_STRUCTURE_TYPE_COMMAND_POOL_CREATE_INFO;
- pool_create_info.queueFamilyIndex = m_device->graphics_queue_node_index_;
- pool_create_info.flags = VK_COMMAND_POOL_CREATE_RESET_COMMAND_BUFFER_BIT;
- vkCreateCommandPool(m_device->device(), &pool_create_info, nullptr, &command_pool);
-
- VkCommandBuffer command_buffer;
- VkCommandBufferAllocateInfo command_buffer_allocate_info{};
- command_buffer_allocate_info.sType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_ALLOCATE_INFO;
- command_buffer_allocate_info.commandPool = command_pool;
- command_buffer_allocate_info.commandBufferCount = 1;
- command_buffer_allocate_info.level = VK_COMMAND_BUFFER_LEVEL_PRIMARY;
- vkAllocateCommandBuffers(m_device->device(), &command_buffer_allocate_info, &command_buffer);
-
- // Begin our cmd buffer with renderpass using our framebuffer
- VkRenderPassBeginInfo rpbi = {VK_STRUCTURE_TYPE_RENDER_PASS_BEGIN_INFO, nullptr, rp, fb, {{0, 0}, {32, 32}}, 0, nullptr};
- VkCommandBufferBeginInfo begin_info{};
- begin_info.sType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_BEGIN_INFO;
- vkBeginCommandBuffer(command_buffer, &begin_info);
-
- vkCmdBeginRenderPass(command_buffer, &rpbi, VK_SUBPASS_CONTENTS_INLINE);
- vkCmdEndRenderPass(command_buffer);
- vkEndCommandBuffer(command_buffer);
- // Destroy command pool to implicitly free command buffer
- vkDestroyCommandPool(m_device->device(), command_pool, NULL);
- vkDestroyFramebuffer(m_device->device(), fb, nullptr);
- vkDestroyRenderPass(m_device->device(), rp, nullptr);
- m_errorMonitor->VerifyNotFound();
-}
-
-TEST_F(VkPositiveLayerTest, FramebufferCreateDepthStencilLayoutTransitionForDepthOnlyImageView) {
- TEST_DESCRIPTION(
- "Validate that when an imageView of a depth/stencil image is used as a depth/stencil framebuffer attachment, the "
- "aspectMask is ignored and both depth and stencil image subresources are used.");
-
- ASSERT_NO_FATAL_FAILURE(Init());
- VkFormatProperties format_properties;
- vkGetPhysicalDeviceFormatProperties(gpu(), VK_FORMAT_D32_SFLOAT_S8_UINT, &format_properties);
- if (!(format_properties.optimalTilingFeatures & VK_FORMAT_FEATURE_SAMPLED_IMAGE_BIT)) {
- printf("%s Image format does not support sampling.\n", kSkipPrefix);
- return;
- }
-
- m_errorMonitor->ExpectSuccess();
-
- VkAttachmentDescription attachment = {0,
- VK_FORMAT_D32_SFLOAT_S8_UINT,
- VK_SAMPLE_COUNT_1_BIT,
- VK_ATTACHMENT_LOAD_OP_DONT_CARE,
- VK_ATTACHMENT_STORE_OP_STORE,
- VK_ATTACHMENT_LOAD_OP_DONT_CARE,
- VK_ATTACHMENT_STORE_OP_DONT_CARE,
- VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL,
- VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL};
-
- VkAttachmentReference att_ref = {0, VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL};
-
- VkSubpassDescription subpass = {0, VK_PIPELINE_BIND_POINT_GRAPHICS, 0, nullptr, 0, nullptr, nullptr, &att_ref, 0, nullptr};
-
- VkSubpassDependency dep = {0,
- 0,
- VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT,
- VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT,
- VK_ACCESS_COLOR_ATTACHMENT_WRITE_BIT,
- VK_ACCESS_COLOR_ATTACHMENT_WRITE_BIT,
- VK_DEPENDENCY_BY_REGION_BIT};
-
- VkRenderPassCreateInfo rpci = {VK_STRUCTURE_TYPE_RENDER_PASS_CREATE_INFO, nullptr, 0, 1, &attachment, 1, &subpass, 1, &dep};
-
- VkResult err;
- VkRenderPass rp;
- err = vkCreateRenderPass(m_device->device(), &rpci, nullptr, &rp);
- ASSERT_VK_SUCCESS(err);
-
- VkImageObj image(m_device);
- image.InitNoLayout(32, 32, 1, VK_FORMAT_D32_SFLOAT_S8_UINT,
- 0x26, // usage
- VK_IMAGE_TILING_OPTIMAL, 0);
- ASSERT_TRUE(image.initialized());
- image.SetLayout(0x6, VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL);
-
- VkImageView view = image.targetView(VK_FORMAT_D32_SFLOAT_S8_UINT, VK_IMAGE_ASPECT_DEPTH_BIT);
-
- VkFramebufferCreateInfo fci = {VK_STRUCTURE_TYPE_FRAMEBUFFER_CREATE_INFO, nullptr, 0, rp, 1, &view, 32, 32, 1};
- VkFramebuffer fb;
- err = vkCreateFramebuffer(m_device->device(), &fci, nullptr, &fb);
- ASSERT_VK_SUCCESS(err);
-
- m_commandBuffer->begin();
-
- VkImageMemoryBarrier imb = {};
- imb.sType = VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER;
- imb.pNext = nullptr;
- imb.srcAccessMask = VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_WRITE_BIT;
- imb.dstAccessMask = VK_ACCESS_SHADER_READ_BIT;
- imb.oldLayout = VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL;
- imb.newLayout = VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL;
- imb.srcQueueFamilyIndex = 0;
- imb.dstQueueFamilyIndex = 0;
- imb.image = image.handle();
- imb.subresourceRange.aspectMask = 0x6;
- imb.subresourceRange.baseMipLevel = 0;
- imb.subresourceRange.levelCount = 0x1;
- imb.subresourceRange.baseArrayLayer = 0;
- imb.subresourceRange.layerCount = 0x1;
-
- vkCmdPipelineBarrier(m_commandBuffer->handle(), VK_PIPELINE_STAGE_EARLY_FRAGMENT_TESTS_BIT,
- VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT, VK_DEPENDENCY_BY_REGION_BIT, 0, nullptr, 0, nullptr, 1, &imb);
-
- m_commandBuffer->end();
- m_commandBuffer->QueueCommandBuffer(false);
- m_errorMonitor->VerifyNotFound();
-
- vkDestroyFramebuffer(m_device->device(), fb, nullptr);
- vkDestroyRenderPass(m_device->device(), rp, nullptr);
-}
-
-// This is a positive test. No errors should be generated.
-TEST_F(VkPositiveLayerTest, BarrierLayoutToImageUsage) {
- TEST_DESCRIPTION("Ensure barriers' new and old VkImageLayout are compatible with their images' VkImageUsageFlags");
-
- m_errorMonitor->ExpectSuccess();
-
- ASSERT_NO_FATAL_FAILURE(Init());
- auto depth_format = FindSupportedDepthStencilFormat(gpu());
- if (!depth_format) {
- printf("%s No Depth + Stencil format found. Skipped.\n", kSkipPrefix);
- return;
- }
- ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
-
- VkImageMemoryBarrier img_barrier = {};
- img_barrier.sType = VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER;
- img_barrier.pNext = NULL;
- img_barrier.srcAccessMask = VK_ACCESS_HOST_WRITE_BIT;
- img_barrier.dstAccessMask = VK_ACCESS_SHADER_READ_BIT;
- img_barrier.oldLayout = VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL;
- img_barrier.newLayout = VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL;
- img_barrier.srcQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED;
- img_barrier.dstQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED;
- img_barrier.subresourceRange.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
- img_barrier.subresourceRange.baseArrayLayer = 0;
- img_barrier.subresourceRange.baseMipLevel = 0;
- img_barrier.subresourceRange.layerCount = 1;
- img_barrier.subresourceRange.levelCount = 1;
-
- {
- VkImageObj img_color(m_device);
- img_color.Init(128, 128, 1, VK_FORMAT_B8G8R8A8_UNORM, VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT, VK_IMAGE_TILING_OPTIMAL);
- ASSERT_TRUE(img_color.initialized());
-
- VkImageObj img_ds1(m_device);
- img_ds1.Init(128, 128, 1, depth_format, VK_IMAGE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT, VK_IMAGE_TILING_OPTIMAL);
- ASSERT_TRUE(img_ds1.initialized());
-
- VkImageObj img_ds2(m_device);
- img_ds2.Init(128, 128, 1, depth_format, VK_IMAGE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT, VK_IMAGE_TILING_OPTIMAL);
- ASSERT_TRUE(img_ds2.initialized());
-
- VkImageObj img_xfer_src(m_device);
- img_xfer_src.Init(128, 128, 1, VK_FORMAT_B8G8R8A8_UNORM, VK_IMAGE_USAGE_TRANSFER_SRC_BIT, VK_IMAGE_TILING_OPTIMAL);
- ASSERT_TRUE(img_xfer_src.initialized());
-
- VkImageObj img_xfer_dst(m_device);
- img_xfer_dst.Init(128, 128, 1, VK_FORMAT_B8G8R8A8_UNORM, VK_IMAGE_USAGE_TRANSFER_DST_BIT, VK_IMAGE_TILING_OPTIMAL);
- ASSERT_TRUE(img_xfer_dst.initialized());
-
- VkImageObj img_sampled(m_device);
- img_sampled.Init(32, 32, 1, VK_FORMAT_B8G8R8A8_UNORM, VK_IMAGE_USAGE_SAMPLED_BIT, VK_IMAGE_TILING_OPTIMAL);
- ASSERT_TRUE(img_sampled.initialized());
-
- VkImageObj img_input(m_device);
- img_input.Init(128, 128, 1, VK_FORMAT_R8G8B8A8_UNORM, VK_IMAGE_USAGE_INPUT_ATTACHMENT_BIT, VK_IMAGE_TILING_OPTIMAL);
- ASSERT_TRUE(img_input.initialized());
-
- const struct {
- VkImageObj &image_obj;
- VkImageLayout old_layout;
- VkImageLayout new_layout;
- } buffer_layouts[] = {
- // clang-format off
- {img_color, VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL, VK_IMAGE_LAYOUT_GENERAL},
- {img_ds1, VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL, VK_IMAGE_LAYOUT_GENERAL},
- {img_ds2, VK_IMAGE_LAYOUT_DEPTH_STENCIL_READ_ONLY_OPTIMAL, VK_IMAGE_LAYOUT_GENERAL},
- {img_sampled, VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL, VK_IMAGE_LAYOUT_GENERAL},
- {img_input, VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL, VK_IMAGE_LAYOUT_GENERAL},
- {img_xfer_src, VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL, VK_IMAGE_LAYOUT_GENERAL},
- {img_xfer_dst, VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, VK_IMAGE_LAYOUT_GENERAL},
- // clang-format on
- };
- const uint32_t layout_count = sizeof(buffer_layouts) / sizeof(buffer_layouts[0]);
-
- m_commandBuffer->begin();
- for (uint32_t i = 0; i < layout_count; ++i) {
- img_barrier.image = buffer_layouts[i].image_obj.handle();
- const VkImageUsageFlags usage = buffer_layouts[i].image_obj.usage();
- img_barrier.subresourceRange.aspectMask = (usage == VK_IMAGE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT)
- ? (VK_IMAGE_ASPECT_DEPTH_BIT | VK_IMAGE_ASPECT_STENCIL_BIT)
- : VK_IMAGE_ASPECT_COLOR_BIT;
-
- img_barrier.oldLayout = buffer_layouts[i].old_layout;
- img_barrier.newLayout = buffer_layouts[i].new_layout;
- vkCmdPipelineBarrier(m_commandBuffer->handle(), VK_PIPELINE_STAGE_HOST_BIT, VK_PIPELINE_STAGE_VERTEX_SHADER_BIT, 0, 0,
- nullptr, 0, nullptr, 1, &img_barrier);
-
- img_barrier.oldLayout = buffer_layouts[i].new_layout;
- img_barrier.newLayout = buffer_layouts[i].old_layout;
- vkCmdPipelineBarrier(m_commandBuffer->handle(), VK_PIPELINE_STAGE_HOST_BIT, VK_PIPELINE_STAGE_VERTEX_SHADER_BIT, 0, 0,
- nullptr, 0, nullptr, 1, &img_barrier);
- }
- m_commandBuffer->end();
-
- img_barrier.oldLayout = VK_IMAGE_LAYOUT_GENERAL;
- img_barrier.newLayout = VK_IMAGE_LAYOUT_GENERAL;
- }
- m_errorMonitor->VerifyNotFound();
-}
-
-// This is a positive test. No errors should be generated.
-TEST_F(VkPositiveLayerTest, WaitEventThenSet) {
- TEST_DESCRIPTION("Wait on a event then set it after the wait has been submitted.");
-
- m_errorMonitor->ExpectSuccess();
- ASSERT_NO_FATAL_FAILURE(Init());
-
- VkEvent event;
- VkEventCreateInfo event_create_info{};
- event_create_info.sType = VK_STRUCTURE_TYPE_EVENT_CREATE_INFO;
- vkCreateEvent(m_device->device(), &event_create_info, nullptr, &event);
-
- VkCommandPool command_pool;
- VkCommandPoolCreateInfo pool_create_info{};
- pool_create_info.sType = VK_STRUCTURE_TYPE_COMMAND_POOL_CREATE_INFO;
- pool_create_info.queueFamilyIndex = m_device->graphics_queue_node_index_;
- pool_create_info.flags = VK_COMMAND_POOL_CREATE_RESET_COMMAND_BUFFER_BIT;
- vkCreateCommandPool(m_device->device(), &pool_create_info, nullptr, &command_pool);
-
- VkCommandBuffer command_buffer;
- VkCommandBufferAllocateInfo command_buffer_allocate_info{};
- command_buffer_allocate_info.sType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_ALLOCATE_INFO;
- command_buffer_allocate_info.commandPool = command_pool;
- command_buffer_allocate_info.commandBufferCount = 1;
- command_buffer_allocate_info.level = VK_COMMAND_BUFFER_LEVEL_PRIMARY;
- vkAllocateCommandBuffers(m_device->device(), &command_buffer_allocate_info, &command_buffer);
-
- VkQueue queue = VK_NULL_HANDLE;
- vkGetDeviceQueue(m_device->device(), m_device->graphics_queue_node_index_, 0, &queue);
-
- {
- VkCommandBufferBeginInfo begin_info{};
- begin_info.sType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_BEGIN_INFO;
- vkBeginCommandBuffer(command_buffer, &begin_info);
-
- vkCmdWaitEvents(command_buffer, 1, &event, VK_PIPELINE_STAGE_HOST_BIT, VK_PIPELINE_STAGE_ALL_COMMANDS_BIT, 0, nullptr, 0,
- nullptr, 0, nullptr);
- vkCmdResetEvent(command_buffer, event, VK_PIPELINE_STAGE_ALL_COMMANDS_BIT);
- vkEndCommandBuffer(command_buffer);
- }
- {
- VkSubmitInfo submit_info{};
- submit_info.sType = VK_STRUCTURE_TYPE_SUBMIT_INFO;
- submit_info.commandBufferCount = 1;
- submit_info.pCommandBuffers = &command_buffer;
- submit_info.signalSemaphoreCount = 0;
- submit_info.pSignalSemaphores = nullptr;
- vkQueueSubmit(queue, 1, &submit_info, VK_NULL_HANDLE);
- }
- { vkSetEvent(m_device->device(), event); }
-
- vkQueueWaitIdle(queue);
-
- vkDestroyEvent(m_device->device(), event, nullptr);
- vkFreeCommandBuffers(m_device->device(), command_pool, 1, &command_buffer);
- vkDestroyCommandPool(m_device->device(), command_pool, NULL);
-
- m_errorMonitor->VerifyNotFound();
-}
-// This is a positive test. No errors should be generated.
-TEST_F(VkPositiveLayerTest, QueryAndCopySecondaryCommandBuffers) {
- TEST_DESCRIPTION("Issue a query on a secondary command buffer and copy it on a primary.");
-
- ASSERT_NO_FATAL_FAILURE(Init());
- if ((m_device->queue_props.empty()) || (m_device->queue_props[0].queueCount < 2)) {
- printf("%s Queue family needs to have multiple queues to run this test.\n", kSkipPrefix);
- return;
- }
-
- m_errorMonitor->ExpectSuccess();
-
- VkQueryPool query_pool;
- VkQueryPoolCreateInfo query_pool_create_info{};
- query_pool_create_info.sType = VK_STRUCTURE_TYPE_QUERY_POOL_CREATE_INFO;
- query_pool_create_info.queryType = VK_QUERY_TYPE_TIMESTAMP;
- query_pool_create_info.queryCount = 1;
- vkCreateQueryPool(m_device->device(), &query_pool_create_info, nullptr, &query_pool);
-
- VkCommandPoolObj command_pool(m_device, m_device->graphics_queue_node_index_, VK_COMMAND_POOL_CREATE_RESET_COMMAND_BUFFER_BIT);
- VkCommandBufferObj primary_buffer(m_device, &command_pool);
- VkCommandBufferObj secondary_buffer(m_device, &command_pool, VK_COMMAND_BUFFER_LEVEL_SECONDARY);
-
- VkQueue queue = VK_NULL_HANDLE;
- vkGetDeviceQueue(m_device->device(), m_device->graphics_queue_node_index_, 1, &queue);
-
- uint32_t qfi = 0;
- VkBufferCreateInfo buff_create_info = {};
- buff_create_info.sType = VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO;
- buff_create_info.size = 1024;
- buff_create_info.usage = VK_BUFFER_USAGE_TRANSFER_DST_BIT;
- buff_create_info.queueFamilyIndexCount = 1;
- buff_create_info.pQueueFamilyIndices = &qfi;
-
- VkBufferObj buffer;
- buffer.init(*m_device, buff_create_info);
-
- VkCommandBufferInheritanceInfo hinfo = {};
- hinfo.sType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_INHERITANCE_INFO;
- hinfo.renderPass = VK_NULL_HANDLE;
- hinfo.subpass = 0;
- hinfo.framebuffer = VK_NULL_HANDLE;
- hinfo.occlusionQueryEnable = VK_FALSE;
- hinfo.queryFlags = 0;
- hinfo.pipelineStatistics = 0;
-
- {
- VkCommandBufferBeginInfo begin_info{};
- begin_info.sType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_BEGIN_INFO;
- begin_info.pInheritanceInfo = &hinfo;
- secondary_buffer.begin(&begin_info);
- vkCmdResetQueryPool(secondary_buffer.handle(), query_pool, 0, 1);
- vkCmdWriteTimestamp(secondary_buffer.handle(), VK_PIPELINE_STAGE_ALL_GRAPHICS_BIT, query_pool, 0);
- secondary_buffer.end();
-
- primary_buffer.begin();
- vkCmdExecuteCommands(primary_buffer.handle(), 1, &secondary_buffer.handle());
- vkCmdCopyQueryPoolResults(primary_buffer.handle(), query_pool, 0, 1, buffer.handle(), 0, 0, VK_QUERY_RESULT_WAIT_BIT);
- primary_buffer.end();
- }
-
- primary_buffer.QueueCommandBuffer();
- vkQueueWaitIdle(queue);
-
- vkDestroyQueryPool(m_device->device(), query_pool, nullptr);
- m_errorMonitor->VerifyNotFound();
-}
-
-// This is a positive test. No errors should be generated.
-TEST_F(VkPositiveLayerTest, QueryAndCopyMultipleCommandBuffers) {
- TEST_DESCRIPTION("Issue a query and copy from it on a second command buffer.");
-
- ASSERT_NO_FATAL_FAILURE(Init());
- if ((m_device->queue_props.empty()) || (m_device->queue_props[0].queueCount < 2)) {
- printf("%s Queue family needs to have multiple queues to run this test.\n", kSkipPrefix);
- return;
- }
-
- m_errorMonitor->ExpectSuccess();
-
- VkQueryPool query_pool;
- VkQueryPoolCreateInfo query_pool_create_info{};
- query_pool_create_info.sType = VK_STRUCTURE_TYPE_QUERY_POOL_CREATE_INFO;
- query_pool_create_info.queryType = VK_QUERY_TYPE_TIMESTAMP;
- query_pool_create_info.queryCount = 1;
- vkCreateQueryPool(m_device->device(), &query_pool_create_info, nullptr, &query_pool);
-
- VkCommandPool command_pool;
- VkCommandPoolCreateInfo pool_create_info{};
- pool_create_info.sType = VK_STRUCTURE_TYPE_COMMAND_POOL_CREATE_INFO;
- pool_create_info.queueFamilyIndex = m_device->graphics_queue_node_index_;
- pool_create_info.flags = VK_COMMAND_POOL_CREATE_RESET_COMMAND_BUFFER_BIT;
- vkCreateCommandPool(m_device->device(), &pool_create_info, nullptr, &command_pool);
-
- VkCommandBuffer command_buffer[2];
- VkCommandBufferAllocateInfo command_buffer_allocate_info{};
- command_buffer_allocate_info.sType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_ALLOCATE_INFO;
- command_buffer_allocate_info.commandPool = command_pool;
- command_buffer_allocate_info.commandBufferCount = 2;
- command_buffer_allocate_info.level = VK_COMMAND_BUFFER_LEVEL_PRIMARY;
- vkAllocateCommandBuffers(m_device->device(), &command_buffer_allocate_info, command_buffer);
-
- VkQueue queue = VK_NULL_HANDLE;
- vkGetDeviceQueue(m_device->device(), m_device->graphics_queue_node_index_, 1, &queue);
-
- uint32_t qfi = 0;
- VkBufferCreateInfo buff_create_info = {};
- buff_create_info.sType = VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO;
- buff_create_info.size = 1024;
- buff_create_info.usage = VK_BUFFER_USAGE_TRANSFER_DST_BIT;
- buff_create_info.queueFamilyIndexCount = 1;
- buff_create_info.pQueueFamilyIndices = &qfi;
-
- VkBufferObj buffer;
- buffer.init(*m_device, buff_create_info);
-
- {
- VkCommandBufferBeginInfo begin_info{};
- begin_info.sType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_BEGIN_INFO;
- vkBeginCommandBuffer(command_buffer[0], &begin_info);
-
- vkCmdResetQueryPool(command_buffer[0], query_pool, 0, 1);
- vkCmdWriteTimestamp(command_buffer[0], VK_PIPELINE_STAGE_ALL_GRAPHICS_BIT, query_pool, 0);
-
- vkEndCommandBuffer(command_buffer[0]);
-
- vkBeginCommandBuffer(command_buffer[1], &begin_info);
-
- vkCmdCopyQueryPoolResults(command_buffer[1], query_pool, 0, 1, buffer.handle(), 0, 0, VK_QUERY_RESULT_WAIT_BIT);
-
- vkEndCommandBuffer(command_buffer[1]);
- }
- {
- VkSubmitInfo submit_info{};
- submit_info.sType = VK_STRUCTURE_TYPE_SUBMIT_INFO;
- submit_info.commandBufferCount = 2;
- submit_info.pCommandBuffers = command_buffer;
- submit_info.signalSemaphoreCount = 0;
- submit_info.pSignalSemaphores = nullptr;
- vkQueueSubmit(queue, 1, &submit_info, VK_NULL_HANDLE);
- }
-
- vkQueueWaitIdle(queue);
-
- vkDestroyQueryPool(m_device->device(), query_pool, nullptr);
- vkFreeCommandBuffers(m_device->device(), command_pool, 2, command_buffer);
- vkDestroyCommandPool(m_device->device(), command_pool, NULL);
-
- m_errorMonitor->VerifyNotFound();
-}
-
-// This is a positive test. No errors should be generated.
-TEST_F(VkPositiveLayerTest, TwoFencesThreeFrames) {
- TEST_DESCRIPTION(
- "Two command buffers with two separate fences are each run through a Submit & WaitForFences cycle 3 times. This previously "
- "revealed a bug so running this positive test to prevent a regression.");
- m_errorMonitor->ExpectSuccess();
-
- ASSERT_NO_FATAL_FAILURE(Init());
- VkQueue queue = VK_NULL_HANDLE;
- vkGetDeviceQueue(m_device->device(), m_device->graphics_queue_node_index_, 0, &queue);
-
- static const uint32_t NUM_OBJECTS = 2;
- static const uint32_t NUM_FRAMES = 3;
- VkCommandBuffer cmd_buffers[NUM_OBJECTS] = {};
- VkFence fences[NUM_OBJECTS] = {};
-
- VkCommandPool cmd_pool;
- VkCommandPoolCreateInfo cmd_pool_ci = {};
- cmd_pool_ci.sType = VK_STRUCTURE_TYPE_COMMAND_POOL_CREATE_INFO;
- cmd_pool_ci.queueFamilyIndex = m_device->graphics_queue_node_index_;
- cmd_pool_ci.flags = VK_COMMAND_POOL_CREATE_RESET_COMMAND_BUFFER_BIT;
- VkResult err = vkCreateCommandPool(m_device->device(), &cmd_pool_ci, nullptr, &cmd_pool);
- ASSERT_VK_SUCCESS(err);
-
- VkCommandBufferAllocateInfo cmd_buf_info = {};
- cmd_buf_info.sType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_ALLOCATE_INFO;
- cmd_buf_info.commandPool = cmd_pool;
- cmd_buf_info.level = VK_COMMAND_BUFFER_LEVEL_PRIMARY;
- cmd_buf_info.commandBufferCount = 1;
-
- VkFenceCreateInfo fence_ci = {};
- fence_ci.sType = VK_STRUCTURE_TYPE_FENCE_CREATE_INFO;
- fence_ci.pNext = nullptr;
- fence_ci.flags = 0;
-
- for (uint32_t i = 0; i < NUM_OBJECTS; ++i) {
- err = vkAllocateCommandBuffers(m_device->device(), &cmd_buf_info, &cmd_buffers[i]);
- ASSERT_VK_SUCCESS(err);
- err = vkCreateFence(m_device->device(), &fence_ci, nullptr, &fences[i]);
- ASSERT_VK_SUCCESS(err);
- }
-
- for (uint32_t frame = 0; frame < NUM_FRAMES; ++frame) {
- for (uint32_t obj = 0; obj < NUM_OBJECTS; ++obj) {
- // Create empty cmd buffer
- VkCommandBufferBeginInfo cmdBufBeginDesc = {};
- cmdBufBeginDesc.sType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_BEGIN_INFO;
-
- err = vkBeginCommandBuffer(cmd_buffers[obj], &cmdBufBeginDesc);
- ASSERT_VK_SUCCESS(err);
- err = vkEndCommandBuffer(cmd_buffers[obj]);
- ASSERT_VK_SUCCESS(err);
-
- VkSubmitInfo submit_info = {};
- submit_info.sType = VK_STRUCTURE_TYPE_SUBMIT_INFO;
- submit_info.commandBufferCount = 1;
- submit_info.pCommandBuffers = &cmd_buffers[obj];
- // Submit cmd buffer and wait for fence
- err = vkQueueSubmit(queue, 1, &submit_info, fences[obj]);
- ASSERT_VK_SUCCESS(err);
- err = vkWaitForFences(m_device->device(), 1, &fences[obj], VK_TRUE, UINT64_MAX);
- ASSERT_VK_SUCCESS(err);
- err = vkResetFences(m_device->device(), 1, &fences[obj]);
- ASSERT_VK_SUCCESS(err);
- }
- }
- m_errorMonitor->VerifyNotFound();
- vkDestroyCommandPool(m_device->device(), cmd_pool, NULL);
- for (uint32_t i = 0; i < NUM_OBJECTS; ++i) {
- vkDestroyFence(m_device->device(), fences[i], nullptr);
- }
-}
-// This is a positive test. No errors should be generated.
-TEST_F(VkPositiveLayerTest, TwoQueueSubmitsSeparateQueuesWithSemaphoreAndOneFenceQWI) {
- TEST_DESCRIPTION(
- "Two command buffers, each in a separate QueueSubmit call submitted on separate queues followed by a QueueWaitIdle.");
-
- ASSERT_NO_FATAL_FAILURE(Init());
- if ((m_device->queue_props.empty()) || (m_device->queue_props[0].queueCount < 2)) {
- printf("%s Queue family needs to have multiple queues to run this test.\n", kSkipPrefix);
- return;
- }
-
- m_errorMonitor->ExpectSuccess();
-
- VkSemaphore semaphore;
- VkSemaphoreCreateInfo semaphore_create_info{};
- semaphore_create_info.sType = VK_STRUCTURE_TYPE_SEMAPHORE_CREATE_INFO;
- vkCreateSemaphore(m_device->device(), &semaphore_create_info, nullptr, &semaphore);
-
- VkCommandPool command_pool;
- VkCommandPoolCreateInfo pool_create_info{};
- pool_create_info.sType = VK_STRUCTURE_TYPE_COMMAND_POOL_CREATE_INFO;
- pool_create_info.queueFamilyIndex = m_device->graphics_queue_node_index_;
- pool_create_info.flags = VK_COMMAND_POOL_CREATE_RESET_COMMAND_BUFFER_BIT;
- vkCreateCommandPool(m_device->device(), &pool_create_info, nullptr, &command_pool);
-
- VkCommandBuffer command_buffer[2];
- VkCommandBufferAllocateInfo command_buffer_allocate_info{};
- command_buffer_allocate_info.sType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_ALLOCATE_INFO;
- command_buffer_allocate_info.commandPool = command_pool;
- command_buffer_allocate_info.commandBufferCount = 2;
- command_buffer_allocate_info.level = VK_COMMAND_BUFFER_LEVEL_PRIMARY;
- vkAllocateCommandBuffers(m_device->device(), &command_buffer_allocate_info, command_buffer);
-
- VkQueue queue = VK_NULL_HANDLE;
- vkGetDeviceQueue(m_device->device(), m_device->graphics_queue_node_index_, 1, &queue);
-
- {
- VkCommandBufferBeginInfo begin_info{};
- begin_info.sType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_BEGIN_INFO;
- vkBeginCommandBuffer(command_buffer[0], &begin_info);
-
- vkCmdPipelineBarrier(command_buffer[0], VK_PIPELINE_STAGE_ALL_COMMANDS_BIT, VK_PIPELINE_STAGE_ALL_COMMANDS_BIT, 0, 0,
- nullptr, 0, nullptr, 0, nullptr);
-
- VkViewport viewport{};
- viewport.maxDepth = 1.0f;
- viewport.minDepth = 0.0f;
- viewport.width = 512;
- viewport.height = 512;
- viewport.x = 0;
- viewport.y = 0;
- vkCmdSetViewport(command_buffer[0], 0, 1, &viewport);
- vkEndCommandBuffer(command_buffer[0]);
- }
- {
- VkCommandBufferBeginInfo begin_info{};
- begin_info.sType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_BEGIN_INFO;
- vkBeginCommandBuffer(command_buffer[1], &begin_info);
-
- VkViewport viewport{};
- viewport.maxDepth = 1.0f;
- viewport.minDepth = 0.0f;
- viewport.width = 512;
- viewport.height = 512;
- viewport.x = 0;
- viewport.y = 0;
- vkCmdSetViewport(command_buffer[1], 0, 1, &viewport);
- vkEndCommandBuffer(command_buffer[1]);
- }
- {
- VkSubmitInfo submit_info{};
- submit_info.sType = VK_STRUCTURE_TYPE_SUBMIT_INFO;
- submit_info.commandBufferCount = 1;
- submit_info.pCommandBuffers = &command_buffer[0];
- submit_info.signalSemaphoreCount = 1;
- submit_info.pSignalSemaphores = &semaphore;
- vkQueueSubmit(queue, 1, &submit_info, VK_NULL_HANDLE);
- }
- {
- VkPipelineStageFlags flags[]{VK_PIPELINE_STAGE_ALL_COMMANDS_BIT};
- VkSubmitInfo submit_info{};
- submit_info.sType = VK_STRUCTURE_TYPE_SUBMIT_INFO;
- submit_info.commandBufferCount = 1;
- submit_info.pCommandBuffers = &command_buffer[1];
- submit_info.waitSemaphoreCount = 1;
- submit_info.pWaitSemaphores = &semaphore;
- submit_info.pWaitDstStageMask = flags;
- vkQueueSubmit(m_device->m_queue, 1, &submit_info, VK_NULL_HANDLE);
- }
-
- vkQueueWaitIdle(m_device->m_queue);
-
- vkDestroySemaphore(m_device->device(), semaphore, nullptr);
- vkFreeCommandBuffers(m_device->device(), command_pool, 2, &command_buffer[0]);
- vkDestroyCommandPool(m_device->device(), command_pool, NULL);
-
- m_errorMonitor->VerifyNotFound();
-}
-
-// This is a positive test. No errors should be generated.
-TEST_F(VkPositiveLayerTest, TwoQueueSubmitsSeparateQueuesWithSemaphoreAndOneFenceQWIFence) {
- TEST_DESCRIPTION(
- "Two command buffers, each in a separate QueueSubmit call submitted on separate queues, the second having a fence followed "
- "by a QueueWaitIdle.");
-
- ASSERT_NO_FATAL_FAILURE(Init());
- if ((m_device->queue_props.empty()) || (m_device->queue_props[0].queueCount < 2)) {
- printf("%s Queue family needs to have multiple queues to run this test.\n", kSkipPrefix);
- return;
- }
-
- m_errorMonitor->ExpectSuccess();
-
- VkFence fence;
- VkFenceCreateInfo fence_create_info{};
- fence_create_info.sType = VK_STRUCTURE_TYPE_FENCE_CREATE_INFO;
- vkCreateFence(m_device->device(), &fence_create_info, nullptr, &fence);
-
- VkSemaphore semaphore;
- VkSemaphoreCreateInfo semaphore_create_info{};
- semaphore_create_info.sType = VK_STRUCTURE_TYPE_SEMAPHORE_CREATE_INFO;
- vkCreateSemaphore(m_device->device(), &semaphore_create_info, nullptr, &semaphore);
-
- VkCommandPool command_pool;
- VkCommandPoolCreateInfo pool_create_info{};
- pool_create_info.sType = VK_STRUCTURE_TYPE_COMMAND_POOL_CREATE_INFO;
- pool_create_info.queueFamilyIndex = m_device->graphics_queue_node_index_;
- pool_create_info.flags = VK_COMMAND_POOL_CREATE_RESET_COMMAND_BUFFER_BIT;
- vkCreateCommandPool(m_device->device(), &pool_create_info, nullptr, &command_pool);
-
- VkCommandBuffer command_buffer[2];
- VkCommandBufferAllocateInfo command_buffer_allocate_info{};
- command_buffer_allocate_info.sType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_ALLOCATE_INFO;
- command_buffer_allocate_info.commandPool = command_pool;
- command_buffer_allocate_info.commandBufferCount = 2;
- command_buffer_allocate_info.level = VK_COMMAND_BUFFER_LEVEL_PRIMARY;
- vkAllocateCommandBuffers(m_device->device(), &command_buffer_allocate_info, command_buffer);
-
- VkQueue queue = VK_NULL_HANDLE;
- vkGetDeviceQueue(m_device->device(), m_device->graphics_queue_node_index_, 1, &queue);
-
- {
- VkCommandBufferBeginInfo begin_info{};
- begin_info.sType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_BEGIN_INFO;
- vkBeginCommandBuffer(command_buffer[0], &begin_info);
-
- vkCmdPipelineBarrier(command_buffer[0], VK_PIPELINE_STAGE_ALL_COMMANDS_BIT, VK_PIPELINE_STAGE_ALL_COMMANDS_BIT, 0, 0,
- nullptr, 0, nullptr, 0, nullptr);
-
- VkViewport viewport{};
- viewport.maxDepth = 1.0f;
- viewport.minDepth = 0.0f;
- viewport.width = 512;
- viewport.height = 512;
- viewport.x = 0;
- viewport.y = 0;
- vkCmdSetViewport(command_buffer[0], 0, 1, &viewport);
- vkEndCommandBuffer(command_buffer[0]);
- }
- {
- VkCommandBufferBeginInfo begin_info{};
- begin_info.sType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_BEGIN_INFO;
- vkBeginCommandBuffer(command_buffer[1], &begin_info);
-
- VkViewport viewport{};
- viewport.maxDepth = 1.0f;
- viewport.minDepth = 0.0f;
- viewport.width = 512;
- viewport.height = 512;
- viewport.x = 0;
- viewport.y = 0;
- vkCmdSetViewport(command_buffer[1], 0, 1, &viewport);
- vkEndCommandBuffer(command_buffer[1]);
- }
- {
- VkSubmitInfo submit_info{};
- submit_info.sType = VK_STRUCTURE_TYPE_SUBMIT_INFO;
- submit_info.commandBufferCount = 1;
- submit_info.pCommandBuffers = &command_buffer[0];
- submit_info.signalSemaphoreCount = 1;
- submit_info.pSignalSemaphores = &semaphore;
- vkQueueSubmit(queue, 1, &submit_info, VK_NULL_HANDLE);
- }
- {
- VkPipelineStageFlags flags[]{VK_PIPELINE_STAGE_ALL_COMMANDS_BIT};
- VkSubmitInfo submit_info{};
- submit_info.sType = VK_STRUCTURE_TYPE_SUBMIT_INFO;
- submit_info.commandBufferCount = 1;
- submit_info.pCommandBuffers = &command_buffer[1];
- submit_info.waitSemaphoreCount = 1;
- submit_info.pWaitSemaphores = &semaphore;
- submit_info.pWaitDstStageMask = flags;
- vkQueueSubmit(m_device->m_queue, 1, &submit_info, fence);
- }
-
- vkQueueWaitIdle(m_device->m_queue);
-
- vkDestroyFence(m_device->device(), fence, nullptr);
- vkDestroySemaphore(m_device->device(), semaphore, nullptr);
- vkFreeCommandBuffers(m_device->device(), command_pool, 2, &command_buffer[0]);
- vkDestroyCommandPool(m_device->device(), command_pool, NULL);
-
- m_errorMonitor->VerifyNotFound();
-}
-
-// This is a positive test. No errors should be generated.
-TEST_F(VkPositiveLayerTest, TwoQueueSubmitsSeparateQueuesWithSemaphoreAndOneFenceTwoWFF) {
- TEST_DESCRIPTION(
- "Two command buffers, each in a separate QueueSubmit call submitted on separate queues, the second having a fence followed "
- "by two consecutive WaitForFences calls on the same fence.");
-
- ASSERT_NO_FATAL_FAILURE(Init());
- if ((m_device->queue_props.empty()) || (m_device->queue_props[0].queueCount < 2)) {
- printf("%s Queue family needs to have multiple queues to run this test.\n", kSkipPrefix);
- return;
- }
-
- m_errorMonitor->ExpectSuccess();
-
- VkFence fence;
- VkFenceCreateInfo fence_create_info{};
- fence_create_info.sType = VK_STRUCTURE_TYPE_FENCE_CREATE_INFO;
- vkCreateFence(m_device->device(), &fence_create_info, nullptr, &fence);
-
- VkSemaphore semaphore;
- VkSemaphoreCreateInfo semaphore_create_info{};
- semaphore_create_info.sType = VK_STRUCTURE_TYPE_SEMAPHORE_CREATE_INFO;
- vkCreateSemaphore(m_device->device(), &semaphore_create_info, nullptr, &semaphore);
-
- VkCommandPool command_pool;
- VkCommandPoolCreateInfo pool_create_info{};
- pool_create_info.sType = VK_STRUCTURE_TYPE_COMMAND_POOL_CREATE_INFO;
- pool_create_info.queueFamilyIndex = m_device->graphics_queue_node_index_;
- pool_create_info.flags = VK_COMMAND_POOL_CREATE_RESET_COMMAND_BUFFER_BIT;
- vkCreateCommandPool(m_device->device(), &pool_create_info, nullptr, &command_pool);
-
- VkCommandBuffer command_buffer[2];
- VkCommandBufferAllocateInfo command_buffer_allocate_info{};
- command_buffer_allocate_info.sType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_ALLOCATE_INFO;
- command_buffer_allocate_info.commandPool = command_pool;
- command_buffer_allocate_info.commandBufferCount = 2;
- command_buffer_allocate_info.level = VK_COMMAND_BUFFER_LEVEL_PRIMARY;
- vkAllocateCommandBuffers(m_device->device(), &command_buffer_allocate_info, command_buffer);
-
- VkQueue queue = VK_NULL_HANDLE;
- vkGetDeviceQueue(m_device->device(), m_device->graphics_queue_node_index_, 1, &queue);
-
- {
- VkCommandBufferBeginInfo begin_info{};
- begin_info.sType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_BEGIN_INFO;
- vkBeginCommandBuffer(command_buffer[0], &begin_info);
-
- vkCmdPipelineBarrier(command_buffer[0], VK_PIPELINE_STAGE_ALL_COMMANDS_BIT, VK_PIPELINE_STAGE_ALL_COMMANDS_BIT, 0, 0,
- nullptr, 0, nullptr, 0, nullptr);
-
- VkViewport viewport{};
- viewport.maxDepth = 1.0f;
- viewport.minDepth = 0.0f;
- viewport.width = 512;
- viewport.height = 512;
- viewport.x = 0;
- viewport.y = 0;
- vkCmdSetViewport(command_buffer[0], 0, 1, &viewport);
- vkEndCommandBuffer(command_buffer[0]);
- }
- {
- VkCommandBufferBeginInfo begin_info{};
- begin_info.sType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_BEGIN_INFO;
- vkBeginCommandBuffer(command_buffer[1], &begin_info);
-
- VkViewport viewport{};
- viewport.maxDepth = 1.0f;
- viewport.minDepth = 0.0f;
- viewport.width = 512;
- viewport.height = 512;
- viewport.x = 0;
- viewport.y = 0;
- vkCmdSetViewport(command_buffer[1], 0, 1, &viewport);
- vkEndCommandBuffer(command_buffer[1]);
- }
- {
- VkSubmitInfo submit_info{};
- submit_info.sType = VK_STRUCTURE_TYPE_SUBMIT_INFO;
- submit_info.commandBufferCount = 1;
- submit_info.pCommandBuffers = &command_buffer[0];
- submit_info.signalSemaphoreCount = 1;
- submit_info.pSignalSemaphores = &semaphore;
- vkQueueSubmit(queue, 1, &submit_info, VK_NULL_HANDLE);
- }
- {
- VkPipelineStageFlags flags[]{VK_PIPELINE_STAGE_ALL_COMMANDS_BIT};
- VkSubmitInfo submit_info{};
- submit_info.sType = VK_STRUCTURE_TYPE_SUBMIT_INFO;
- submit_info.commandBufferCount = 1;
- submit_info.pCommandBuffers = &command_buffer[1];
- submit_info.waitSemaphoreCount = 1;
- submit_info.pWaitSemaphores = &semaphore;
- submit_info.pWaitDstStageMask = flags;
- vkQueueSubmit(m_device->m_queue, 1, &submit_info, fence);
- }
-
- vkWaitForFences(m_device->device(), 1, &fence, VK_TRUE, UINT64_MAX);
- vkWaitForFences(m_device->device(), 1, &fence, VK_TRUE, UINT64_MAX);
-
- vkDestroyFence(m_device->device(), fence, nullptr);
- vkDestroySemaphore(m_device->device(), semaphore, nullptr);
- vkFreeCommandBuffers(m_device->device(), command_pool, 2, &command_buffer[0]);
- vkDestroyCommandPool(m_device->device(), command_pool, NULL);
-
- m_errorMonitor->VerifyNotFound();
-}
-
-TEST_F(VkPositiveLayerTest, TwoQueuesEnsureCorrectRetirementWithWorkStolen) {
- ASSERT_NO_FATAL_FAILURE(Init());
- if ((m_device->queue_props.empty()) || (m_device->queue_props[0].queueCount < 2)) {
- printf("%s Test requires two queues, skipping\n", kSkipPrefix);
- return;
- }
-
- VkResult err;
-
- m_errorMonitor->ExpectSuccess();
-
- VkQueue q0 = m_device->m_queue;
- VkQueue q1 = nullptr;
- vkGetDeviceQueue(m_device->device(), m_device->graphics_queue_node_index_, 1, &q1);
- ASSERT_NE(q1, nullptr);
-
- // An (empty) command buffer. We must have work in the first submission --
- // the layer treats unfenced work differently from fenced work.
- VkCommandPoolCreateInfo cpci = {VK_STRUCTURE_TYPE_COMMAND_POOL_CREATE_INFO, nullptr, 0, 0};
- VkCommandPool pool;
- err = vkCreateCommandPool(m_device->device(), &cpci, nullptr, &pool);
- ASSERT_VK_SUCCESS(err);
- VkCommandBufferAllocateInfo cbai = {VK_STRUCTURE_TYPE_COMMAND_BUFFER_ALLOCATE_INFO, nullptr, pool,
- VK_COMMAND_BUFFER_LEVEL_PRIMARY, 1};
- VkCommandBuffer cb;
- err = vkAllocateCommandBuffers(m_device->device(), &cbai, &cb);
- ASSERT_VK_SUCCESS(err);
- VkCommandBufferBeginInfo cbbi = {VK_STRUCTURE_TYPE_COMMAND_BUFFER_BEGIN_INFO, nullptr, 0, nullptr};
- err = vkBeginCommandBuffer(cb, &cbbi);
- ASSERT_VK_SUCCESS(err);
- err = vkEndCommandBuffer(cb);
- ASSERT_VK_SUCCESS(err);
-
- // A semaphore
- VkSemaphoreCreateInfo sci = {VK_STRUCTURE_TYPE_SEMAPHORE_CREATE_INFO, nullptr, 0};
- VkSemaphore s;
- err = vkCreateSemaphore(m_device->device(), &sci, nullptr, &s);
- ASSERT_VK_SUCCESS(err);
-
- // First submission, to q0
- VkSubmitInfo s0 = {VK_STRUCTURE_TYPE_SUBMIT_INFO, nullptr, 0, nullptr, nullptr, 1, &cb, 1, &s};
-
- err = vkQueueSubmit(q0, 1, &s0, VK_NULL_HANDLE);
- ASSERT_VK_SUCCESS(err);
-
- // Second submission, to q1, waiting on s
- VkFlags waitmask = VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT; // doesn't really matter what this value is.
- VkSubmitInfo s1 = {VK_STRUCTURE_TYPE_SUBMIT_INFO, nullptr, 1, &s, &waitmask, 0, nullptr, 0, nullptr};
-
- err = vkQueueSubmit(q1, 1, &s1, VK_NULL_HANDLE);
- ASSERT_VK_SUCCESS(err);
-
- // Wait for q0 idle
- err = vkQueueWaitIdle(q0);
- ASSERT_VK_SUCCESS(err);
-
- // Command buffer should have been completed (it was on q0); reset the pool.
- vkFreeCommandBuffers(m_device->device(), pool, 1, &cb);
-
- m_errorMonitor->VerifyNotFound();
-
- // Force device completely idle and clean up resources
- vkDeviceWaitIdle(m_device->device());
- vkDestroyCommandPool(m_device->device(), pool, nullptr);
- vkDestroySemaphore(m_device->device(), s, nullptr);
-}
-
-// This is a positive test. No errors should be generated.
-TEST_F(VkPositiveLayerTest, TwoQueueSubmitsSeparateQueuesWithSemaphoreAndOneFence) {
- TEST_DESCRIPTION(
- "Two command buffers, each in a separate QueueSubmit call submitted on separate queues, the second having a fence, "
- "followed by a WaitForFences call.");
-
- ASSERT_NO_FATAL_FAILURE(Init());
- if ((m_device->queue_props.empty()) || (m_device->queue_props[0].queueCount < 2)) {
- printf("%s Queue family needs to have multiple queues to run this test.\n", kSkipPrefix);
- return;
- }
-
- m_errorMonitor->ExpectSuccess();
-
- VkFence fence;
- VkFenceCreateInfo fence_create_info{};
- fence_create_info.sType = VK_STRUCTURE_TYPE_FENCE_CREATE_INFO;
- vkCreateFence(m_device->device(), &fence_create_info, nullptr, &fence);
-
- VkSemaphore semaphore;
- VkSemaphoreCreateInfo semaphore_create_info{};
- semaphore_create_info.sType = VK_STRUCTURE_TYPE_SEMAPHORE_CREATE_INFO;
- vkCreateSemaphore(m_device->device(), &semaphore_create_info, nullptr, &semaphore);
-
- VkCommandPool command_pool;
- VkCommandPoolCreateInfo pool_create_info{};
- pool_create_info.sType = VK_STRUCTURE_TYPE_COMMAND_POOL_CREATE_INFO;
- pool_create_info.queueFamilyIndex = m_device->graphics_queue_node_index_;
- pool_create_info.flags = VK_COMMAND_POOL_CREATE_RESET_COMMAND_BUFFER_BIT;
- vkCreateCommandPool(m_device->device(), &pool_create_info, nullptr, &command_pool);
-
- VkCommandBuffer command_buffer[2];
- VkCommandBufferAllocateInfo command_buffer_allocate_info{};
- command_buffer_allocate_info.sType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_ALLOCATE_INFO;
- command_buffer_allocate_info.commandPool = command_pool;
- command_buffer_allocate_info.commandBufferCount = 2;
- command_buffer_allocate_info.level = VK_COMMAND_BUFFER_LEVEL_PRIMARY;
- vkAllocateCommandBuffers(m_device->device(), &command_buffer_allocate_info, command_buffer);
-
- VkQueue queue = VK_NULL_HANDLE;
- vkGetDeviceQueue(m_device->device(), m_device->graphics_queue_node_index_, 1, &queue);
-
- {
- VkCommandBufferBeginInfo begin_info{};
- begin_info.sType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_BEGIN_INFO;
- vkBeginCommandBuffer(command_buffer[0], &begin_info);
-
- vkCmdPipelineBarrier(command_buffer[0], VK_PIPELINE_STAGE_ALL_COMMANDS_BIT, VK_PIPELINE_STAGE_ALL_COMMANDS_BIT, 0, 0,
- nullptr, 0, nullptr, 0, nullptr);
-
- VkViewport viewport{};
- viewport.maxDepth = 1.0f;
- viewport.minDepth = 0.0f;
- viewport.width = 512;
- viewport.height = 512;
- viewport.x = 0;
- viewport.y = 0;
- vkCmdSetViewport(command_buffer[0], 0, 1, &viewport);
- vkEndCommandBuffer(command_buffer[0]);
- }
- {
- VkCommandBufferBeginInfo begin_info{};
- begin_info.sType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_BEGIN_INFO;
- vkBeginCommandBuffer(command_buffer[1], &begin_info);
-
- VkViewport viewport{};
- viewport.maxDepth = 1.0f;
- viewport.minDepth = 0.0f;
- viewport.width = 512;
- viewport.height = 512;
- viewport.x = 0;
- viewport.y = 0;
- vkCmdSetViewport(command_buffer[1], 0, 1, &viewport);
- vkEndCommandBuffer(command_buffer[1]);
- }
- {
- VkSubmitInfo submit_info{};
- submit_info.sType = VK_STRUCTURE_TYPE_SUBMIT_INFO;
- submit_info.commandBufferCount = 1;
- submit_info.pCommandBuffers = &command_buffer[0];
- submit_info.signalSemaphoreCount = 1;
- submit_info.pSignalSemaphores = &semaphore;
- vkQueueSubmit(queue, 1, &submit_info, VK_NULL_HANDLE);
- }
- {
- VkPipelineStageFlags flags[]{VK_PIPELINE_STAGE_ALL_COMMANDS_BIT};
- VkSubmitInfo submit_info{};
- submit_info.sType = VK_STRUCTURE_TYPE_SUBMIT_INFO;
- submit_info.commandBufferCount = 1;
- submit_info.pCommandBuffers = &command_buffer[1];
- submit_info.waitSemaphoreCount = 1;
- submit_info.pWaitSemaphores = &semaphore;
- submit_info.pWaitDstStageMask = flags;
- vkQueueSubmit(m_device->m_queue, 1, &submit_info, fence);
- }
-
- vkWaitForFences(m_device->device(), 1, &fence, VK_TRUE, UINT64_MAX);
-
- vkDestroyFence(m_device->device(), fence, nullptr);
- vkDestroySemaphore(m_device->device(), semaphore, nullptr);
- vkFreeCommandBuffers(m_device->device(), command_pool, 2, &command_buffer[0]);
- vkDestroyCommandPool(m_device->device(), command_pool, NULL);
-
- m_errorMonitor->VerifyNotFound();
-}
-
-// This is a positive test. No errors should be generated.
-TEST_F(VkPositiveLayerTest, TwoQueueSubmitsOneQueueWithSemaphoreAndOneFence) {
- TEST_DESCRIPTION(
- "Two command buffers, each in a separate QueueSubmit call on the same queue, sharing a signal/wait semaphore, the second "
- "having a fence, followed by a WaitForFences call.");
-
- m_errorMonitor->ExpectSuccess();
-
- ASSERT_NO_FATAL_FAILURE(Init());
- VkFence fence;
- VkFenceCreateInfo fence_create_info{};
- fence_create_info.sType = VK_STRUCTURE_TYPE_FENCE_CREATE_INFO;
- vkCreateFence(m_device->device(), &fence_create_info, nullptr, &fence);
-
- VkSemaphore semaphore;
- VkSemaphoreCreateInfo semaphore_create_info{};
- semaphore_create_info.sType = VK_STRUCTURE_TYPE_SEMAPHORE_CREATE_INFO;
- vkCreateSemaphore(m_device->device(), &semaphore_create_info, nullptr, &semaphore);
-
- VkCommandPool command_pool;
- VkCommandPoolCreateInfo pool_create_info{};
- pool_create_info.sType = VK_STRUCTURE_TYPE_COMMAND_POOL_CREATE_INFO;
- pool_create_info.queueFamilyIndex = m_device->graphics_queue_node_index_;
- pool_create_info.flags = VK_COMMAND_POOL_CREATE_RESET_COMMAND_BUFFER_BIT;
- vkCreateCommandPool(m_device->device(), &pool_create_info, nullptr, &command_pool);
-
- VkCommandBuffer command_buffer[2];
- VkCommandBufferAllocateInfo command_buffer_allocate_info{};
- command_buffer_allocate_info.sType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_ALLOCATE_INFO;
- command_buffer_allocate_info.commandPool = command_pool;
- command_buffer_allocate_info.commandBufferCount = 2;
- command_buffer_allocate_info.level = VK_COMMAND_BUFFER_LEVEL_PRIMARY;
- vkAllocateCommandBuffers(m_device->device(), &command_buffer_allocate_info, command_buffer);
-
- {
- VkCommandBufferBeginInfo begin_info{};
- begin_info.sType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_BEGIN_INFO;
- vkBeginCommandBuffer(command_buffer[0], &begin_info);
-
- vkCmdPipelineBarrier(command_buffer[0], VK_PIPELINE_STAGE_ALL_COMMANDS_BIT, VK_PIPELINE_STAGE_ALL_COMMANDS_BIT, 0, 0,
- nullptr, 0, nullptr, 0, nullptr);
-
- VkViewport viewport{};
- viewport.maxDepth = 1.0f;
- viewport.minDepth = 0.0f;
- viewport.width = 512;
- viewport.height = 512;
- viewport.x = 0;
- viewport.y = 0;
- vkCmdSetViewport(command_buffer[0], 0, 1, &viewport);
- vkEndCommandBuffer(command_buffer[0]);
- }
- {
- VkCommandBufferBeginInfo begin_info{};
- begin_info.sType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_BEGIN_INFO;
- vkBeginCommandBuffer(command_buffer[1], &begin_info);
-
- VkViewport viewport{};
- viewport.maxDepth = 1.0f;
- viewport.minDepth = 0.0f;
- viewport.width = 512;
- viewport.height = 512;
- viewport.x = 0;
- viewport.y = 0;
- vkCmdSetViewport(command_buffer[1], 0, 1, &viewport);
- vkEndCommandBuffer(command_buffer[1]);
- }
- {
- VkSubmitInfo submit_info{};
- submit_info.sType = VK_STRUCTURE_TYPE_SUBMIT_INFO;
- submit_info.commandBufferCount = 1;
- submit_info.pCommandBuffers = &command_buffer[0];
- submit_info.signalSemaphoreCount = 1;
- submit_info.pSignalSemaphores = &semaphore;
- vkQueueSubmit(m_device->m_queue, 1, &submit_info, VK_NULL_HANDLE);
- }
- {
- VkPipelineStageFlags flags[]{VK_PIPELINE_STAGE_ALL_COMMANDS_BIT};
- VkSubmitInfo submit_info{};
- submit_info.sType = VK_STRUCTURE_TYPE_SUBMIT_INFO;
- submit_info.commandBufferCount = 1;
- submit_info.pCommandBuffers = &command_buffer[1];
- submit_info.waitSemaphoreCount = 1;
- submit_info.pWaitSemaphores = &semaphore;
- submit_info.pWaitDstStageMask = flags;
- vkQueueSubmit(m_device->m_queue, 1, &submit_info, fence);
- }
-
- vkWaitForFences(m_device->device(), 1, &fence, VK_TRUE, UINT64_MAX);
-
- vkDestroyFence(m_device->device(), fence, nullptr);
- vkDestroySemaphore(m_device->device(), semaphore, nullptr);
- vkFreeCommandBuffers(m_device->device(), command_pool, 2, &command_buffer[0]);
- vkDestroyCommandPool(m_device->device(), command_pool, NULL);
-
- m_errorMonitor->VerifyNotFound();
-}
-
-// This is a positive test. No errors should be generated.
-TEST_F(VkPositiveLayerTest, TwoQueueSubmitsOneQueueNullQueueSubmitWithFence) {
- TEST_DESCRIPTION(
- "Two command buffers, each in a separate QueueSubmit call on the same queue, no fences, followed by a third QueueSubmit "
- "with NO SubmitInfos but with a fence, followed by a WaitForFences call.");
-
- m_errorMonitor->ExpectSuccess();
-
- ASSERT_NO_FATAL_FAILURE(Init());
- VkFence fence;
- VkFenceCreateInfo fence_create_info{};
- fence_create_info.sType = VK_STRUCTURE_TYPE_FENCE_CREATE_INFO;
- vkCreateFence(m_device->device(), &fence_create_info, nullptr, &fence);
-
- VkCommandPool command_pool;
- VkCommandPoolCreateInfo pool_create_info{};
- pool_create_info.sType = VK_STRUCTURE_TYPE_COMMAND_POOL_CREATE_INFO;
- pool_create_info.queueFamilyIndex = m_device->graphics_queue_node_index_;
- pool_create_info.flags = VK_COMMAND_POOL_CREATE_RESET_COMMAND_BUFFER_BIT;
- vkCreateCommandPool(m_device->device(), &pool_create_info, nullptr, &command_pool);
-
- VkCommandBuffer command_buffer[2];
- VkCommandBufferAllocateInfo command_buffer_allocate_info{};
- command_buffer_allocate_info.sType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_ALLOCATE_INFO;
- command_buffer_allocate_info.commandPool = command_pool;
- command_buffer_allocate_info.commandBufferCount = 2;
- command_buffer_allocate_info.level = VK_COMMAND_BUFFER_LEVEL_PRIMARY;
- vkAllocateCommandBuffers(m_device->device(), &command_buffer_allocate_info, command_buffer);
-
- {
- VkCommandBufferBeginInfo begin_info{};
- begin_info.sType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_BEGIN_INFO;
- vkBeginCommandBuffer(command_buffer[0], &begin_info);
-
- vkCmdPipelineBarrier(command_buffer[0], VK_PIPELINE_STAGE_ALL_COMMANDS_BIT, VK_PIPELINE_STAGE_ALL_COMMANDS_BIT, 0, 0,
- nullptr, 0, nullptr, 0, nullptr);
-
- VkViewport viewport{};
- viewport.maxDepth = 1.0f;
- viewport.minDepth = 0.0f;
- viewport.width = 512;
- viewport.height = 512;
- viewport.x = 0;
- viewport.y = 0;
- vkCmdSetViewport(command_buffer[0], 0, 1, &viewport);
- vkEndCommandBuffer(command_buffer[0]);
- }
- {
- VkCommandBufferBeginInfo begin_info{};
- begin_info.sType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_BEGIN_INFO;
- vkBeginCommandBuffer(command_buffer[1], &begin_info);
-
- VkViewport viewport{};
- viewport.maxDepth = 1.0f;
- viewport.minDepth = 0.0f;
- viewport.width = 512;
- viewport.height = 512;
- viewport.x = 0;
- viewport.y = 0;
- vkCmdSetViewport(command_buffer[1], 0, 1, &viewport);
- vkEndCommandBuffer(command_buffer[1]);
- }
- {
- VkSubmitInfo submit_info{};
- submit_info.sType = VK_STRUCTURE_TYPE_SUBMIT_INFO;
- submit_info.commandBufferCount = 1;
- submit_info.pCommandBuffers = &command_buffer[0];
- submit_info.signalSemaphoreCount = 0;
- submit_info.pSignalSemaphores = VK_NULL_HANDLE;
- vkQueueSubmit(m_device->m_queue, 1, &submit_info, VK_NULL_HANDLE);
- }
- {
- VkPipelineStageFlags flags[]{VK_PIPELINE_STAGE_ALL_COMMANDS_BIT};
- VkSubmitInfo submit_info{};
- submit_info.sType = VK_STRUCTURE_TYPE_SUBMIT_INFO;
- submit_info.commandBufferCount = 1;
- submit_info.pCommandBuffers = &command_buffer[1];
- submit_info.waitSemaphoreCount = 0;
- submit_info.pWaitSemaphores = VK_NULL_HANDLE;
- submit_info.pWaitDstStageMask = flags;
- vkQueueSubmit(m_device->m_queue, 1, &submit_info, VK_NULL_HANDLE);
- }
-
- vkQueueSubmit(m_device->m_queue, 0, NULL, fence);
-
- VkResult err = vkWaitForFences(m_device->device(), 1, &fence, VK_TRUE, UINT64_MAX);
- ASSERT_VK_SUCCESS(err);
-
- vkDestroyFence(m_device->device(), fence, nullptr);
- vkFreeCommandBuffers(m_device->device(), command_pool, 2, &command_buffer[0]);
- vkDestroyCommandPool(m_device->device(), command_pool, NULL);
-
- m_errorMonitor->VerifyNotFound();
-}
-
-// This is a positive test. No errors should be generated.
-TEST_F(VkPositiveLayerTest, TwoQueueSubmitsOneQueueOneFence) {
- TEST_DESCRIPTION(
- "Two command buffers, each in a separate QueueSubmit call on the same queue, the second having a fence, followed by a "
- "WaitForFences call.");
-
- m_errorMonitor->ExpectSuccess();
-
- ASSERT_NO_FATAL_FAILURE(Init());
- VkFence fence;
- VkFenceCreateInfo fence_create_info{};
- fence_create_info.sType = VK_STRUCTURE_TYPE_FENCE_CREATE_INFO;
- vkCreateFence(m_device->device(), &fence_create_info, nullptr, &fence);
-
- VkCommandPool command_pool;
- VkCommandPoolCreateInfo pool_create_info{};
- pool_create_info.sType = VK_STRUCTURE_TYPE_COMMAND_POOL_CREATE_INFO;
- pool_create_info.queueFamilyIndex = m_device->graphics_queue_node_index_;
- pool_create_info.flags = VK_COMMAND_POOL_CREATE_RESET_COMMAND_BUFFER_BIT;
- vkCreateCommandPool(m_device->device(), &pool_create_info, nullptr, &command_pool);
-
- VkCommandBuffer command_buffer[2];
- VkCommandBufferAllocateInfo command_buffer_allocate_info{};
- command_buffer_allocate_info.sType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_ALLOCATE_INFO;
- command_buffer_allocate_info.commandPool = command_pool;
- command_buffer_allocate_info.commandBufferCount = 2;
- command_buffer_allocate_info.level = VK_COMMAND_BUFFER_LEVEL_PRIMARY;
- vkAllocateCommandBuffers(m_device->device(), &command_buffer_allocate_info, command_buffer);
-
- {
- VkCommandBufferBeginInfo begin_info{};
- begin_info.sType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_BEGIN_INFO;
- vkBeginCommandBuffer(command_buffer[0], &begin_info);
-
- vkCmdPipelineBarrier(command_buffer[0], VK_PIPELINE_STAGE_ALL_COMMANDS_BIT, VK_PIPELINE_STAGE_ALL_COMMANDS_BIT, 0, 0,
- nullptr, 0, nullptr, 0, nullptr);
-
- VkViewport viewport{};
- viewport.maxDepth = 1.0f;
- viewport.minDepth = 0.0f;
- viewport.width = 512;
- viewport.height = 512;
- viewport.x = 0;
- viewport.y = 0;
- vkCmdSetViewport(command_buffer[0], 0, 1, &viewport);
- vkEndCommandBuffer(command_buffer[0]);
- }
- {
- VkCommandBufferBeginInfo begin_info{};
- begin_info.sType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_BEGIN_INFO;
- vkBeginCommandBuffer(command_buffer[1], &begin_info);
-
- VkViewport viewport{};
- viewport.maxDepth = 1.0f;
- viewport.minDepth = 0.0f;
- viewport.width = 512;
- viewport.height = 512;
- viewport.x = 0;
- viewport.y = 0;
- vkCmdSetViewport(command_buffer[1], 0, 1, &viewport);
- vkEndCommandBuffer(command_buffer[1]);
- }
- {
- VkSubmitInfo submit_info{};
- submit_info.sType = VK_STRUCTURE_TYPE_SUBMIT_INFO;
- submit_info.commandBufferCount = 1;
- submit_info.pCommandBuffers = &command_buffer[0];
- submit_info.signalSemaphoreCount = 0;
- submit_info.pSignalSemaphores = VK_NULL_HANDLE;
- vkQueueSubmit(m_device->m_queue, 1, &submit_info, VK_NULL_HANDLE);
- }
- {
- VkPipelineStageFlags flags[]{VK_PIPELINE_STAGE_ALL_COMMANDS_BIT};
- VkSubmitInfo submit_info{};
- submit_info.sType = VK_STRUCTURE_TYPE_SUBMIT_INFO;
- submit_info.commandBufferCount = 1;
- submit_info.pCommandBuffers = &command_buffer[1];
- submit_info.waitSemaphoreCount = 0;
- submit_info.pWaitSemaphores = VK_NULL_HANDLE;
- submit_info.pWaitDstStageMask = flags;
- vkQueueSubmit(m_device->m_queue, 1, &submit_info, fence);
- }
-
- vkWaitForFences(m_device->device(), 1, &fence, VK_TRUE, UINT64_MAX);
-
- vkDestroyFence(m_device->device(), fence, nullptr);
- vkFreeCommandBuffers(m_device->device(), command_pool, 2, &command_buffer[0]);
- vkDestroyCommandPool(m_device->device(), command_pool, NULL);
-
- m_errorMonitor->VerifyNotFound();
-}
-
-// This is a positive test. No errors should be generated.
-TEST_F(VkPositiveLayerTest, TwoSubmitInfosWithSemaphoreOneQueueSubmitsOneFence) {
- TEST_DESCRIPTION(
- "Two command buffers each in a separate SubmitInfo sent in a single QueueSubmit call followed by a WaitForFences call.");
- ASSERT_NO_FATAL_FAILURE(Init());
-
- m_errorMonitor->ExpectSuccess();
-
- VkFence fence;
- VkFenceCreateInfo fence_create_info{};
- fence_create_info.sType = VK_STRUCTURE_TYPE_FENCE_CREATE_INFO;
- vkCreateFence(m_device->device(), &fence_create_info, nullptr, &fence);
-
- VkSemaphore semaphore;
- VkSemaphoreCreateInfo semaphore_create_info{};
- semaphore_create_info.sType = VK_STRUCTURE_TYPE_SEMAPHORE_CREATE_INFO;
- vkCreateSemaphore(m_device->device(), &semaphore_create_info, nullptr, &semaphore);
-
- VkCommandPool command_pool;
- VkCommandPoolCreateInfo pool_create_info{};
- pool_create_info.sType = VK_STRUCTURE_TYPE_COMMAND_POOL_CREATE_INFO;
- pool_create_info.queueFamilyIndex = m_device->graphics_queue_node_index_;
- pool_create_info.flags = VK_COMMAND_POOL_CREATE_RESET_COMMAND_BUFFER_BIT;
- vkCreateCommandPool(m_device->device(), &pool_create_info, nullptr, &command_pool);
-
- VkCommandBuffer command_buffer[2];
- VkCommandBufferAllocateInfo command_buffer_allocate_info{};
- command_buffer_allocate_info.sType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_ALLOCATE_INFO;
- command_buffer_allocate_info.commandPool = command_pool;
- command_buffer_allocate_info.commandBufferCount = 2;
- command_buffer_allocate_info.level = VK_COMMAND_BUFFER_LEVEL_PRIMARY;
- vkAllocateCommandBuffers(m_device->device(), &command_buffer_allocate_info, command_buffer);
-
- {
- VkCommandBufferBeginInfo begin_info{};
- begin_info.sType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_BEGIN_INFO;
- vkBeginCommandBuffer(command_buffer[0], &begin_info);
-
- vkCmdPipelineBarrier(command_buffer[0], VK_PIPELINE_STAGE_ALL_COMMANDS_BIT, VK_PIPELINE_STAGE_ALL_COMMANDS_BIT, 0, 0,
- nullptr, 0, nullptr, 0, nullptr);
-
- VkViewport viewport{};
- viewport.maxDepth = 1.0f;
- viewport.minDepth = 0.0f;
- viewport.width = 512;
- viewport.height = 512;
- viewport.x = 0;
- viewport.y = 0;
- vkCmdSetViewport(command_buffer[0], 0, 1, &viewport);
- vkEndCommandBuffer(command_buffer[0]);
- }
- {
- VkCommandBufferBeginInfo begin_info{};
- begin_info.sType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_BEGIN_INFO;
- vkBeginCommandBuffer(command_buffer[1], &begin_info);
-
- VkViewport viewport{};
- viewport.maxDepth = 1.0f;
- viewport.minDepth = 0.0f;
- viewport.width = 512;
- viewport.height = 512;
- viewport.x = 0;
- viewport.y = 0;
- vkCmdSetViewport(command_buffer[1], 0, 1, &viewport);
- vkEndCommandBuffer(command_buffer[1]);
- }
- {
- VkSubmitInfo submit_info[2];
- VkPipelineStageFlags flags[]{VK_PIPELINE_STAGE_ALL_COMMANDS_BIT};
-
- submit_info[0].sType = VK_STRUCTURE_TYPE_SUBMIT_INFO;
- submit_info[0].pNext = NULL;
- submit_info[0].commandBufferCount = 1;
- submit_info[0].pCommandBuffers = &command_buffer[0];
- submit_info[0].signalSemaphoreCount = 1;
- submit_info[0].pSignalSemaphores = &semaphore;
- submit_info[0].waitSemaphoreCount = 0;
- submit_info[0].pWaitSemaphores = NULL;
- submit_info[0].pWaitDstStageMask = 0;
-
- submit_info[1].sType = VK_STRUCTURE_TYPE_SUBMIT_INFO;
- submit_info[1].pNext = NULL;
- submit_info[1].commandBufferCount = 1;
- submit_info[1].pCommandBuffers = &command_buffer[1];
- submit_info[1].waitSemaphoreCount = 1;
- submit_info[1].pWaitSemaphores = &semaphore;
- submit_info[1].pWaitDstStageMask = flags;
- submit_info[1].signalSemaphoreCount = 0;
- submit_info[1].pSignalSemaphores = NULL;
- vkQueueSubmit(m_device->m_queue, 2, &submit_info[0], fence);
- }
-
- vkWaitForFences(m_device->device(), 1, &fence, VK_TRUE, UINT64_MAX);
-
- vkDestroyFence(m_device->device(), fence, nullptr);
- vkFreeCommandBuffers(m_device->device(), command_pool, 2, &command_buffer[0]);
- vkDestroyCommandPool(m_device->device(), command_pool, NULL);
- vkDestroySemaphore(m_device->device(), semaphore, nullptr);
-
- m_errorMonitor->VerifyNotFound();
-}
-
-TEST_F(VkPositiveLayerTest, CreatePipelineAttribMatrixType) {
- TEST_DESCRIPTION("Test that pipeline validation accepts matrices passed as vertex attributes");
- m_errorMonitor->ExpectSuccess();
-
- ASSERT_NO_FATAL_FAILURE(Init());
- ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
-
- VkVertexInputBindingDescription input_binding;
- memset(&input_binding, 0, sizeof(input_binding));
-
- VkVertexInputAttributeDescription input_attribs[2];
- memset(input_attribs, 0, sizeof(input_attribs));
-
- for (int i = 0; i < 2; i++) {
- input_attribs[i].format = VK_FORMAT_R32G32B32A32_SFLOAT;
- input_attribs[i].location = i;
- }
-
- char const *vsSource =
- "#version 450\n"
- "\n"
- "layout(location=0) in mat2x4 x;\n"
- "void main(){\n"
- " gl_Position = x[0] + x[1];\n"
- "}\n";
-
- VkShaderObj vs(m_device, vsSource, VK_SHADER_STAGE_VERTEX_BIT, this);
- VkShaderObj fs(m_device, bindStateFragShaderText, VK_SHADER_STAGE_FRAGMENT_BIT, this);
-
- CreatePipelineHelper pipe(*this);
- pipe.InitInfo();
- pipe.vi_ci_.pVertexBindingDescriptions = &input_binding;
- pipe.vi_ci_.vertexBindingDescriptionCount = 1;
- pipe.vi_ci_.pVertexAttributeDescriptions = input_attribs;
- pipe.vi_ci_.vertexAttributeDescriptionCount = 2;
- pipe.shader_stages_ = {vs.GetStageCreateInfo(), fs.GetStageCreateInfo()};
- pipe.InitState();
- pipe.CreateGraphicsPipeline();
- /* expect success */
- m_errorMonitor->VerifyNotFound();
-}
-
-TEST_F(VkPositiveLayerTest, CreatePipelineAttribArrayType) {
- m_errorMonitor->ExpectSuccess();
-
- ASSERT_NO_FATAL_FAILURE(Init());
- ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
-
- VkVertexInputBindingDescription input_binding;
- memset(&input_binding, 0, sizeof(input_binding));
-
- VkVertexInputAttributeDescription input_attribs[2];
- memset(input_attribs, 0, sizeof(input_attribs));
-
- for (int i = 0; i < 2; i++) {
- input_attribs[i].format = VK_FORMAT_R32G32B32A32_SFLOAT;
- input_attribs[i].location = i;
- }
-
- char const *vsSource =
- "#version 450\n"
- "\n"
- "layout(location=0) in vec4 x[2];\n"
- "void main(){\n"
- " gl_Position = x[0] + x[1];\n"
- "}\n";
-
- VkShaderObj vs(m_device, vsSource, VK_SHADER_STAGE_VERTEX_BIT, this);
- VkShaderObj fs(m_device, bindStateFragShaderText, VK_SHADER_STAGE_FRAGMENT_BIT, this);
-
- CreatePipelineHelper pipe(*this);
- pipe.InitInfo();
- pipe.vi_ci_.pVertexBindingDescriptions = &input_binding;
- pipe.vi_ci_.vertexBindingDescriptionCount = 1;
- pipe.vi_ci_.pVertexAttributeDescriptions = input_attribs;
- pipe.vi_ci_.vertexAttributeDescriptionCount = 2;
- pipe.shader_stages_ = {vs.GetStageCreateInfo(), fs.GetStageCreateInfo()};
- pipe.InitState();
- pipe.CreateGraphicsPipeline();
-
- m_errorMonitor->VerifyNotFound();
-}
-
-TEST_F(VkPositiveLayerTest, CreatePipelineAttribComponents) {
- TEST_DESCRIPTION(
- "Test that pipeline validation accepts consuming a vertex attribute through multiple vertex shader inputs, each consuming "
- "a different subset of the components, and that fragment shader-attachment validation tolerates multiple duplicate "
- "location outputs");
- m_errorMonitor->ExpectSuccess(VK_DEBUG_REPORT_ERROR_BIT_EXT | VK_DEBUG_REPORT_WARNING_BIT_EXT);
-
- ASSERT_NO_FATAL_FAILURE(Init());
- ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
-
- VkVertexInputBindingDescription input_binding;
- memset(&input_binding, 0, sizeof(input_binding));
-
- VkVertexInputAttributeDescription input_attribs[3];
- memset(input_attribs, 0, sizeof(input_attribs));
-
- for (int i = 0; i < 3; i++) {
- input_attribs[i].format = VK_FORMAT_R32G32B32A32_SFLOAT;
- input_attribs[i].location = i;
- }
-
- char const *vsSource =
- "#version 450\n"
- "\n"
- "layout(location=0) in vec4 x;\n"
- "layout(location=1) in vec3 y1;\n"
- "layout(location=1, component=3) in float y2;\n"
- "layout(location=2) in vec4 z;\n"
- "void main(){\n"
- " gl_Position = x + vec4(y1, y2) + z;\n"
- "}\n";
- char const *fsSource =
- "#version 450\n"
- "\n"
- "layout(location=0, component=0) out float color0;\n"
- "layout(location=0, component=1) out float color1;\n"
- "layout(location=0, component=2) out float color2;\n"
- "layout(location=0, component=3) out float color3;\n"
- "layout(location=1, component=0) out vec2 second_color0;\n"
- "layout(location=1, component=2) out vec2 second_color1;\n"
- "void main(){\n"
- " color0 = float(1);\n"
- " second_color0 = vec2(1);\n"
- "}\n";
-
- VkShaderObj vs(m_device, vsSource, VK_SHADER_STAGE_VERTEX_BIT, this);
- VkShaderObj fs(m_device, fsSource, VK_SHADER_STAGE_FRAGMENT_BIT, this);
-
- VkPipelineObj pipe(m_device);
-
- VkDescriptorSetObj descriptorSet(m_device);
- descriptorSet.AppendDummy();
- descriptorSet.CreateVKDescriptorSet(m_commandBuffer);
-
- // Create a renderPass with two color attachments
- VkAttachmentReference attachments[2] = {};
- attachments[0].layout = VK_IMAGE_LAYOUT_GENERAL;
- attachments[1].attachment = 1;
- attachments[1].layout = VK_IMAGE_LAYOUT_GENERAL;
-
- VkSubpassDescription subpass = {};
- subpass.pColorAttachments = attachments;
- subpass.colorAttachmentCount = 2;
-
- VkRenderPassCreateInfo rpci = {};
- rpci.subpassCount = 1;
- rpci.pSubpasses = &subpass;
- rpci.attachmentCount = 2;
-
- VkAttachmentDescription attach_desc[2] = {};
- attach_desc[0].format = VK_FORMAT_B8G8R8A8_UNORM;
- attach_desc[0].samples = VK_SAMPLE_COUNT_1_BIT;
- attach_desc[0].initialLayout = VK_IMAGE_LAYOUT_UNDEFINED;
- attach_desc[0].finalLayout = VK_IMAGE_LAYOUT_GENERAL;
- attach_desc[0].loadOp = VK_ATTACHMENT_LOAD_OP_DONT_CARE;
- attach_desc[1].format = VK_FORMAT_B8G8R8A8_UNORM;
- attach_desc[1].samples = VK_SAMPLE_COUNT_1_BIT;
- attach_desc[1].initialLayout = VK_IMAGE_LAYOUT_UNDEFINED;
- attach_desc[1].finalLayout = VK_IMAGE_LAYOUT_GENERAL;
- attach_desc[1].loadOp = VK_ATTACHMENT_LOAD_OP_DONT_CARE;
-
- rpci.pAttachments = attach_desc;
- rpci.sType = VK_STRUCTURE_TYPE_RENDER_PASS_CREATE_INFO;
-
- VkRenderPass renderpass;
- vkCreateRenderPass(m_device->device(), &rpci, NULL, &renderpass);
- pipe.AddShader(&vs);
- pipe.AddShader(&fs);
-
- VkPipelineColorBlendAttachmentState att_state1 = {};
- att_state1.dstAlphaBlendFactor = VK_BLEND_FACTOR_CONSTANT_COLOR;
- att_state1.blendEnable = VK_FALSE;
-
- pipe.AddColorAttachment(0, att_state1);
- pipe.AddColorAttachment(1, att_state1);
- pipe.AddVertexInputBindings(&input_binding, 1);
- pipe.AddVertexInputAttribs(input_attribs, 3);
- pipe.CreateVKPipeline(descriptorSet.GetPipelineLayout(), renderpass);
- vkDestroyRenderPass(m_device->device(), renderpass, nullptr);
-
- m_errorMonitor->VerifyNotFound();
-}
-
-TEST_F(VkPositiveLayerTest, CreatePipelineSimplePositive) {
- m_errorMonitor->ExpectSuccess();
-
- ASSERT_NO_FATAL_FAILURE(Init());
- ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
-
- CreatePipelineHelper pipe(*this);
- pipe.InitInfo();
- pipe.InitState();
- pipe.CreateGraphicsPipeline();
-
- m_errorMonitor->VerifyNotFound();
-}
-
-TEST_F(VkPositiveLayerTest, CreatePipelineRelaxedTypeMatch) {
- TEST_DESCRIPTION(
- "Test that pipeline validation accepts the relaxed type matching rules set out in 14.1.3: fundamental type must match, and "
- "producer side must have at least as many components");
- m_errorMonitor->ExpectSuccess();
-
- // VK 1.0.8 Specification, 14.1.3 "Additionally,..." block
-
- ASSERT_NO_FATAL_FAILURE(Init());
- ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
-
- char const *vsSource =
- "#version 450\n"
- "layout(location=0) out vec3 x;\n"
- "layout(location=1) out ivec3 y;\n"
- "layout(location=2) out vec3 z;\n"
- "void main(){\n"
- " gl_Position = vec4(0);\n"
- " x = vec3(0); y = ivec3(0); z = vec3(0);\n"
- "}\n";
- char const *fsSource =
- "#version 450\n"
- "\n"
- "layout(location=0) out vec4 color;\n"
- "layout(location=0) in float x;\n"
- "layout(location=1) flat in int y;\n"
- "layout(location=2) in vec2 z;\n"
- "void main(){\n"
- " color = vec4(1 + x + y + z.x);\n"
- "}\n";
-
- VkShaderObj vs(m_device, vsSource, VK_SHADER_STAGE_VERTEX_BIT, this);
- VkShaderObj fs(m_device, fsSource, VK_SHADER_STAGE_FRAGMENT_BIT, this);
-
- CreatePipelineHelper pipe(*this);
- pipe.InitInfo();
- pipe.shader_stages_ = {vs.GetStageCreateInfo(), fs.GetStageCreateInfo()};
- pipe.InitState();
- pipe.CreateGraphicsPipeline();
-
- m_errorMonitor->VerifyNotFound();
-}
-
-TEST_F(VkPositiveLayerTest, CreatePipelineTessPerVertex) {
- TEST_DESCRIPTION("Test that pipeline validation accepts per-vertex variables passed between the TCS and TES stages");
- m_errorMonitor->ExpectSuccess();
-
- ASSERT_NO_FATAL_FAILURE(Init());
- ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
-
- if (!m_device->phy().features().tessellationShader) {
- printf("%s Device does not support tessellation shaders; skipped.\n", kSkipPrefix);
- return;
- }
-
- char const *tcsSource =
- "#version 450\n"
- "layout(location=0) out int x[];\n"
- "layout(vertices=3) out;\n"
- "void main(){\n"
- " gl_TessLevelOuter[0] = gl_TessLevelOuter[1] = gl_TessLevelOuter[2] = 1;\n"
- " gl_TessLevelInner[0] = 1;\n"
- " x[gl_InvocationID] = gl_InvocationID;\n"
- "}\n";
- char const *tesSource =
- "#version 450\n"
- "layout(triangles, equal_spacing, cw) in;\n"
- "layout(location=0) in int x[];\n"
- "void main(){\n"
- " gl_Position.xyz = gl_TessCoord;\n"
- " gl_Position.w = x[0] + x[1] + x[2];\n"
- "}\n";
-
- VkShaderObj vs(m_device, bindStateMinimalShaderText, VK_SHADER_STAGE_VERTEX_BIT, this);
- VkShaderObj tcs(m_device, tcsSource, VK_SHADER_STAGE_TESSELLATION_CONTROL_BIT, this);
- VkShaderObj tes(m_device, tesSource, VK_SHADER_STAGE_TESSELLATION_EVALUATION_BIT, this);
- VkShaderObj fs(m_device, bindStateFragShaderText, VK_SHADER_STAGE_FRAGMENT_BIT, this);
-
- VkPipelineInputAssemblyStateCreateInfo iasci{VK_STRUCTURE_TYPE_PIPELINE_INPUT_ASSEMBLY_STATE_CREATE_INFO, nullptr, 0,
- VK_PRIMITIVE_TOPOLOGY_PATCH_LIST, VK_FALSE};
-
- VkPipelineTessellationStateCreateInfo tsci{VK_STRUCTURE_TYPE_PIPELINE_TESSELLATION_STATE_CREATE_INFO, nullptr, 0, 3};
-
- CreatePipelineHelper pipe(*this);
- pipe.InitInfo();
- pipe.gp_ci_.pTessellationState = &tsci;
- pipe.gp_ci_.pInputAssemblyState = &iasci;
- pipe.shader_stages_ = {vs.GetStageCreateInfo(), tcs.GetStageCreateInfo(), tes.GetStageCreateInfo(), fs.GetStageCreateInfo()};
- pipe.InitState();
- pipe.CreateGraphicsPipeline();
- m_errorMonitor->VerifyNotFound();
-}
-
-TEST_F(VkPositiveLayerTest, CreatePipelineGeometryInputBlockPositive) {
- TEST_DESCRIPTION(
- "Test that pipeline validation accepts a user-defined interface block passed into the geometry shader. This is interesting "
- "because the 'extra' array level is not present on the member type, but on the block instance.");
- m_errorMonitor->ExpectSuccess();
-
- ASSERT_NO_FATAL_FAILURE(Init());
- ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
-
- if (!m_device->phy().features().geometryShader) {
- printf("%s Device does not support geometry shaders; skipped.\n", kSkipPrefix);
- return;
- }
-
- char const *gsSource =
- "#version 450\n"
- "layout(triangles) in;\n"
- "layout(triangle_strip, max_vertices=3) out;\n"
- "layout(location=0) in VertexData { vec4 x; } gs_in[];\n"
- "void main() {\n"
- " gl_Position = gs_in[0].x;\n"
- " EmitVertex();\n"
- "}\n";
-
- VkShaderObj vs(m_device, bindStateVertShaderText, VK_SHADER_STAGE_VERTEX_BIT, this);
- VkShaderObj gs(m_device, gsSource, VK_SHADER_STAGE_GEOMETRY_BIT, this);
- VkShaderObj fs(m_device, bindStateFragShaderText, VK_SHADER_STAGE_FRAGMENT_BIT, this);
-
- CreatePipelineHelper pipe(*this);
- pipe.InitInfo();
- pipe.shader_stages_ = {vs.GetStageCreateInfo(), gs.GetStageCreateInfo(), fs.GetStageCreateInfo()};
- pipe.InitState();
- pipe.CreateGraphicsPipeline();
- m_errorMonitor->VerifyNotFound();
-}
-
-TEST_F(VkPositiveLayerTest, CreatePipeline64BitAttributesPositive) {
- TEST_DESCRIPTION(
- "Test that pipeline validation accepts basic use of 64bit vertex attributes. This is interesting because they consume "
- "multiple locations.");
- m_errorMonitor->ExpectSuccess();
-
- if (!EnableDeviceProfileLayer()) {
- printf("%s Failed to enable device profile layer.\n", kSkipPrefix);
- return;
- }
-
- ASSERT_NO_FATAL_FAILURE(InitFramework(myDbgFunc, m_errorMonitor));
- ASSERT_NO_FATAL_FAILURE(InitState());
- ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
-
- if (!m_device->phy().features().shaderFloat64) {
- printf("%s Device does not support 64bit vertex attributes; skipped.\n", kSkipPrefix);
- return;
- }
- // Set 64bit format to support VTX Buffer feature
- PFN_vkSetPhysicalDeviceFormatPropertiesEXT fpvkSetPhysicalDeviceFormatPropertiesEXT = nullptr;
- PFN_vkGetOriginalPhysicalDeviceFormatPropertiesEXT fpvkGetOriginalPhysicalDeviceFormatPropertiesEXT = nullptr;
-
- // Load required functions
- if (!LoadDeviceProfileLayer(fpvkSetPhysicalDeviceFormatPropertiesEXT, fpvkGetOriginalPhysicalDeviceFormatPropertiesEXT)) {
- return;
- }
- VkFormatProperties format_props;
- fpvkGetOriginalPhysicalDeviceFormatPropertiesEXT(gpu(), VK_FORMAT_R64G64B64A64_SFLOAT, &format_props);
- format_props.bufferFeatures |= VK_FORMAT_FEATURE_VERTEX_BUFFER_BIT;
- fpvkSetPhysicalDeviceFormatPropertiesEXT(gpu(), VK_FORMAT_R64G64B64A64_SFLOAT, format_props);
-
- VkVertexInputBindingDescription input_bindings[1];
- memset(input_bindings, 0, sizeof(input_bindings));
-
- VkVertexInputAttributeDescription input_attribs[4];
- memset(input_attribs, 0, sizeof(input_attribs));
- input_attribs[0].location = 0;
- input_attribs[0].offset = 0;
- input_attribs[0].format = VK_FORMAT_R64G64B64A64_SFLOAT;
- input_attribs[1].location = 2;
- input_attribs[1].offset = 32;
- input_attribs[1].format = VK_FORMAT_R64G64B64A64_SFLOAT;
- input_attribs[2].location = 4;
- input_attribs[2].offset = 64;
- input_attribs[2].format = VK_FORMAT_R64G64B64A64_SFLOAT;
- input_attribs[3].location = 6;
- input_attribs[3].offset = 96;
- input_attribs[3].format = VK_FORMAT_R64G64B64A64_SFLOAT;
-
- char const *vsSource =
- "#version 450\n"
- "\n"
- "layout(location=0) in dmat4 x;\n"
- "void main(){\n"
- " gl_Position = vec4(x[0][0]);\n"
- "}\n";
-
- VkShaderObj vs(m_device, vsSource, VK_SHADER_STAGE_VERTEX_BIT, this);
- VkShaderObj fs(m_device, bindStateFragShaderText, VK_SHADER_STAGE_FRAGMENT_BIT, this);
-
- CreatePipelineHelper pipe(*this);
- pipe.InitInfo();
- pipe.vi_ci_.pVertexBindingDescriptions = input_bindings;
- pipe.vi_ci_.vertexBindingDescriptionCount = 1;
- pipe.vi_ci_.pVertexAttributeDescriptions = input_attribs;
- pipe.vi_ci_.vertexAttributeDescriptionCount = 4;
- pipe.shader_stages_ = {vs.GetStageCreateInfo(), fs.GetStageCreateInfo()};
- pipe.InitState();
- pipe.CreateGraphicsPipeline();
- m_errorMonitor->VerifyNotFound();
-}
-
-TEST_F(VkPositiveLayerTest, CreatePipelineInputAttachmentPositive) {
- TEST_DESCRIPTION("Positive test for a correctly matched input attachment");
- m_errorMonitor->ExpectSuccess();
-
- ASSERT_NO_FATAL_FAILURE(Init());
-
- char const *fsSource =
- "#version 450\n"
- "\n"
- "layout(input_attachment_index=0, set=0, binding=0) uniform subpassInput x;\n"
- "layout(location=0) out vec4 color;\n"
- "void main() {\n"
- " color = subpassLoad(x);\n"
- "}\n";
-
- VkShaderObj vs(m_device, bindStateVertShaderText, VK_SHADER_STAGE_VERTEX_BIT, this);
- VkShaderObj fs(m_device, fsSource, VK_SHADER_STAGE_FRAGMENT_BIT, this);
-
- VkPipelineObj pipe(m_device);
- pipe.AddShader(&vs);
- pipe.AddShader(&fs);
- pipe.AddDefaultColorAttachment();
- ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
-
- VkDescriptorSetLayoutBinding dslb = {0, VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT, 1, VK_SHADER_STAGE_FRAGMENT_BIT, nullptr};
- const VkDescriptorSetLayoutObj dsl(m_device, {dslb});
- const VkPipelineLayoutObj pl(m_device, {&dsl});
-
- VkAttachmentDescription descs[2] = {
- {0, VK_FORMAT_R8G8B8A8_UNORM, VK_SAMPLE_COUNT_1_BIT, VK_ATTACHMENT_LOAD_OP_LOAD, VK_ATTACHMENT_STORE_OP_STORE,
- VK_ATTACHMENT_LOAD_OP_LOAD, VK_ATTACHMENT_STORE_OP_STORE, VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL,
- VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL},
- {0, VK_FORMAT_R8G8B8A8_UNORM, VK_SAMPLE_COUNT_1_BIT, VK_ATTACHMENT_LOAD_OP_LOAD, VK_ATTACHMENT_STORE_OP_STORE,
- VK_ATTACHMENT_LOAD_OP_LOAD, VK_ATTACHMENT_STORE_OP_STORE, VK_IMAGE_LAYOUT_GENERAL, VK_IMAGE_LAYOUT_GENERAL},
- };
- VkAttachmentReference color = {
- 0,
- VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL,
- };
- VkAttachmentReference input = {
- 1,
- VK_IMAGE_LAYOUT_GENERAL,
- };
-
- VkSubpassDescription sd = {0, VK_PIPELINE_BIND_POINT_GRAPHICS, 1, &input, 1, &color, nullptr, nullptr, 0, nullptr};
-
- VkRenderPassCreateInfo rpci = {VK_STRUCTURE_TYPE_RENDER_PASS_CREATE_INFO, nullptr, 0, 2, descs, 1, &sd, 0, nullptr};
- VkRenderPass rp;
- VkResult err = vkCreateRenderPass(m_device->device(), &rpci, nullptr, &rp);
- ASSERT_VK_SUCCESS(err);
-
- // should be OK. would go wrong here if it's going to...
- pipe.CreateVKPipeline(pl.handle(), rp);
-
- m_errorMonitor->VerifyNotFound();
-
- vkDestroyRenderPass(m_device->device(), rp, nullptr);
-}
-
-TEST_F(VkPositiveLayerTest, CreateComputePipelineMissingDescriptorUnusedPositive) {
- TEST_DESCRIPTION(
- "Test that pipeline validation accepts a compute pipeline which declares a descriptor-backed resource which is not "
- "provided, but the shader does not statically use it. This is interesting because it requires compute pipelines to have a "
- "proper descriptor use walk, which they didn't for some time.");
- m_errorMonitor->ExpectSuccess();
-
- ASSERT_NO_FATAL_FAILURE(Init());
-
- char const *csSource =
- "#version 450\n"
- "\n"
- "layout(local_size_x=1) in;\n"
- "layout(set=0, binding=0) buffer block { vec4 x; };\n"
- "void main(){\n"
- " // x is not used.\n"
- "}\n";
-
- CreateComputePipelineHelper pipe(*this);
- pipe.InitInfo();
- pipe.cs_.reset(new VkShaderObj(m_device, csSource, VK_SHADER_STAGE_COMPUTE_BIT, this));
- pipe.InitState();
- pipe.CreateComputePipeline();
-
- m_errorMonitor->VerifyNotFound();
-}
-
-TEST_F(VkPositiveLayerTest, CreateComputePipelineCombinedImageSamplerConsumedAsSampler) {
- TEST_DESCRIPTION(
- "Test that pipeline validation accepts a shader consuming only the sampler portion of a combined image + sampler");
- m_errorMonitor->ExpectSuccess();
-
- ASSERT_NO_FATAL_FAILURE(Init());
-
- std::vector<VkDescriptorSetLayoutBinding> bindings = {
- {0, VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER, 1, VK_SHADER_STAGE_COMPUTE_BIT, nullptr},
- {1, VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE, 1, VK_SHADER_STAGE_COMPUTE_BIT, nullptr},
- {2, VK_DESCRIPTOR_TYPE_STORAGE_BUFFER, 1, VK_SHADER_STAGE_COMPUTE_BIT, nullptr},
- };
-
- char const *csSource =
- "#version 450\n"
- "\n"
- "layout(local_size_x=1) in;\n"
- "layout(set=0, binding=0) uniform sampler s;\n"
- "layout(set=0, binding=1) uniform texture2D t;\n"
- "layout(set=0, binding=2) buffer block { vec4 x; };\n"
- "void main() {\n"
- " x = texture(sampler2D(t, s), vec2(0));\n"
- "}\n";
- CreateComputePipelineHelper pipe(*this);
- pipe.InitInfo();
- pipe.dsl_bindings_.resize(bindings.size());
- memcpy(pipe.dsl_bindings_.data(), bindings.data(), bindings.size() * sizeof(VkDescriptorSetLayoutBinding));
- pipe.cs_.reset(new VkShaderObj(m_device, csSource, VK_SHADER_STAGE_COMPUTE_BIT, this));
- pipe.InitState();
- m_errorMonitor->ExpectSuccess();
- pipe.CreateComputePipeline();
-
- m_errorMonitor->VerifyNotFound();
-}
-
-TEST_F(VkPositiveLayerTest, CreateComputePipelineCombinedImageSamplerConsumedAsImage) {
- TEST_DESCRIPTION(
- "Test that pipeline validation accepts a shader consuming only the image portion of a combined image + sampler");
- m_errorMonitor->ExpectSuccess();
-
- ASSERT_NO_FATAL_FAILURE(Init());
-
- std::vector<VkDescriptorSetLayoutBinding> bindings = {
- {0, VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER, 1, VK_SHADER_STAGE_COMPUTE_BIT, nullptr},
- {1, VK_DESCRIPTOR_TYPE_SAMPLER, 1, VK_SHADER_STAGE_COMPUTE_BIT, nullptr},
- {2, VK_DESCRIPTOR_TYPE_STORAGE_BUFFER, 1, VK_SHADER_STAGE_COMPUTE_BIT, nullptr},
- };
-
- char const *csSource =
- "#version 450\n"
- "\n"
- "layout(local_size_x=1) in;\n"
- "layout(set=0, binding=0) uniform texture2D t;\n"
- "layout(set=0, binding=1) uniform sampler s;\n"
- "layout(set=0, binding=2) buffer block { vec4 x; };\n"
- "void main() {\n"
- " x = texture(sampler2D(t, s), vec2(0));\n"
- "}\n";
- CreateComputePipelineHelper pipe(*this);
- pipe.InitInfo();
- pipe.dsl_bindings_.resize(bindings.size());
- memcpy(pipe.dsl_bindings_.data(), bindings.data(), bindings.size() * sizeof(VkDescriptorSetLayoutBinding));
- pipe.cs_.reset(new VkShaderObj(m_device, csSource, VK_SHADER_STAGE_COMPUTE_BIT, this));
- pipe.InitState();
- m_errorMonitor->ExpectSuccess();
- pipe.CreateComputePipeline();
-
- m_errorMonitor->VerifyNotFound();
-}
-
-TEST_F(VkPositiveLayerTest, CreateComputePipelineCombinedImageSamplerConsumedAsBoth) {
- TEST_DESCRIPTION(
- "Test that pipeline validation accepts a shader consuming both the sampler and the image of a combined image+sampler but "
- "via separate variables");
- m_errorMonitor->ExpectSuccess();
-
- ASSERT_NO_FATAL_FAILURE(Init());
-
- std::vector<VkDescriptorSetLayoutBinding> bindings = {
- {0, VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER, 1, VK_SHADER_STAGE_COMPUTE_BIT, nullptr},
- {1, VK_DESCRIPTOR_TYPE_STORAGE_BUFFER, 1, VK_SHADER_STAGE_COMPUTE_BIT, nullptr},
- };
-
- char const *csSource =
- "#version 450\n"
- "\n"
- "layout(local_size_x=1) in;\n"
- "layout(set=0, binding=0) uniform texture2D t;\n"
- "layout(set=0, binding=0) uniform sampler s; // both binding 0!\n"
- "layout(set=0, binding=1) buffer block { vec4 x; };\n"
- "void main() {\n"
- " x = texture(sampler2D(t, s), vec2(0));\n"
- "}\n";
- CreateComputePipelineHelper pipe(*this);
- pipe.InitInfo();
- pipe.dsl_bindings_.resize(bindings.size());
- memcpy(pipe.dsl_bindings_.data(), bindings.data(), bindings.size() * sizeof(VkDescriptorSetLayoutBinding));
- pipe.cs_.reset(new VkShaderObj(m_device, csSource, VK_SHADER_STAGE_COMPUTE_BIT, this));
- pipe.InitState();
- m_errorMonitor->ExpectSuccess();
- pipe.CreateComputePipeline();
-
- m_errorMonitor->VerifyNotFound();
-}
-
-TEST_F(VkPositiveLayerTest, CreateDescriptorSetBindingWithIgnoredSamplers) {
- TEST_DESCRIPTION("Test that layers conditionally do ignore the pImmutableSamplers on vkCreateDescriptorSetLayout");
-
- bool prop2_found = false;
- if (InstanceExtensionSupported(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME)) {
- m_instance_extension_names.push_back(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME);
- prop2_found = true;
- } else {
- printf("%s %s Extension not supported, skipping push descriptor sub-tests\n", kSkipPrefix,
- VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME);
- }
-
- ASSERT_NO_FATAL_FAILURE(InitFramework(myDbgFunc, m_errorMonitor));
- bool push_descriptor_found = false;
- if (prop2_found && DeviceExtensionSupported(gpu(), nullptr, VK_KHR_PUSH_DESCRIPTOR_EXTENSION_NAME)) {
- m_device_extension_names.push_back(VK_KHR_PUSH_DESCRIPTOR_EXTENSION_NAME);
-
- // In addition to the extension being supported we need to have at least one available
- // Some implementations report an invalid maxPushDescriptors of 0
- push_descriptor_found = GetPushDescriptorProperties(instance(), gpu()).maxPushDescriptors > 0;
- } else {
- printf("%s %s Extension not supported, skipping push descriptor sub-tests\n", kSkipPrefix,
- VK_KHR_PUSH_DESCRIPTOR_EXTENSION_NAME);
- }
-
- ASSERT_NO_FATAL_FAILURE(InitState());
- const uint64_t fake_address_64 = 0xCDCDCDCDCDCDCDCD;
- const uint64_t fake_address_32 = 0xCDCDCDCD;
- const void *fake_pointer =
- sizeof(void *) == 8 ? reinterpret_cast<void *>(fake_address_64) : reinterpret_cast<void *>(fake_address_32);
- const VkSampler *hopefully_undereferencable_pointer = reinterpret_cast<const VkSampler *>(fake_pointer);
-
- // regular descriptors
- m_errorMonitor->ExpectSuccess();
- {
- const VkDescriptorSetLayoutBinding non_sampler_bindings[] = {
- {0, VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE, 1, VK_SHADER_STAGE_FRAGMENT_BIT, hopefully_undereferencable_pointer},
- {1, VK_DESCRIPTOR_TYPE_STORAGE_IMAGE, 1, VK_SHADER_STAGE_FRAGMENT_BIT, hopefully_undereferencable_pointer},
- {2, VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER, 1, VK_SHADER_STAGE_FRAGMENT_BIT, hopefully_undereferencable_pointer},
- {3, VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER, 1, VK_SHADER_STAGE_FRAGMENT_BIT, hopefully_undereferencable_pointer},
- {4, VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER, 1, VK_SHADER_STAGE_FRAGMENT_BIT, hopefully_undereferencable_pointer},
- {5, VK_DESCRIPTOR_TYPE_STORAGE_BUFFER, 1, VK_SHADER_STAGE_FRAGMENT_BIT, hopefully_undereferencable_pointer},
- {6, VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC, 1, VK_SHADER_STAGE_FRAGMENT_BIT, hopefully_undereferencable_pointer},
- {7, VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC, 1, VK_SHADER_STAGE_FRAGMENT_BIT, hopefully_undereferencable_pointer},
- {8, VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT, 1, VK_SHADER_STAGE_FRAGMENT_BIT, hopefully_undereferencable_pointer},
- };
- const VkDescriptorSetLayoutCreateInfo dslci = {VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_CREATE_INFO, nullptr, 0,
- static_cast<uint32_t>(size(non_sampler_bindings)), non_sampler_bindings};
- VkDescriptorSetLayout dsl;
- const VkResult err = vkCreateDescriptorSetLayout(m_device->device(), &dslci, nullptr, &dsl);
- ASSERT_VK_SUCCESS(err);
- vkDestroyDescriptorSetLayout(m_device->device(), dsl, nullptr);
- }
- m_errorMonitor->VerifyNotFound();
-
- if (push_descriptor_found) {
- // push descriptors
- m_errorMonitor->ExpectSuccess();
- {
- const VkDescriptorSetLayoutBinding non_sampler_bindings[] = {
- {0, VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE, 1, VK_SHADER_STAGE_FRAGMENT_BIT, hopefully_undereferencable_pointer},
- {1, VK_DESCRIPTOR_TYPE_STORAGE_IMAGE, 1, VK_SHADER_STAGE_FRAGMENT_BIT, hopefully_undereferencable_pointer},
- {2, VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER, 1, VK_SHADER_STAGE_FRAGMENT_BIT, hopefully_undereferencable_pointer},
- {3, VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER, 1, VK_SHADER_STAGE_FRAGMENT_BIT, hopefully_undereferencable_pointer},
- {4, VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER, 1, VK_SHADER_STAGE_FRAGMENT_BIT, hopefully_undereferencable_pointer},
- {5, VK_DESCRIPTOR_TYPE_STORAGE_BUFFER, 1, VK_SHADER_STAGE_FRAGMENT_BIT, hopefully_undereferencable_pointer},
- {6, VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT, 1, VK_SHADER_STAGE_FRAGMENT_BIT, hopefully_undereferencable_pointer},
- };
- const VkDescriptorSetLayoutCreateInfo dslci = {VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_CREATE_INFO, nullptr,
- VK_DESCRIPTOR_SET_LAYOUT_CREATE_PUSH_DESCRIPTOR_BIT_KHR,
- static_cast<uint32_t>(size(non_sampler_bindings)), non_sampler_bindings};
- VkDescriptorSetLayout dsl;
- const VkResult err = vkCreateDescriptorSetLayout(m_device->device(), &dslci, nullptr, &dsl);
- ASSERT_VK_SUCCESS(err);
- vkDestroyDescriptorSetLayout(m_device->device(), dsl, nullptr);
- }
- m_errorMonitor->VerifyNotFound();
- }
-}
-TEST_F(VkPositiveLayerTest, GpuValidationInlineUniformBlock) {
- TEST_DESCRIPTION("GPU validation: Make sure inline uniform blocks don't generate false validation errors");
- m_errorMonitor->ExpectSuccess();
- VkValidationFeatureEnableEXT enables[] = {VK_VALIDATION_FEATURE_ENABLE_GPU_ASSISTED_EXT};
- VkValidationFeaturesEXT features = {};
- features.sType = VK_STRUCTURE_TYPE_VALIDATION_FEATURES_EXT;
- features.enabledValidationFeatureCount = 1;
- features.pEnabledValidationFeatures = enables;
- bool descriptor_indexing = CheckDescriptorIndexingSupportAndInitFramework(this, m_instance_extension_names,
- m_device_extension_names, &features, m_errorMonitor);
- if (DeviceIsMockICD() || DeviceSimulation()) {
- printf("%s Test not supported by MockICD, skipping tests\n", kSkipPrefix);
- return;
- }
- VkPhysicalDeviceFeatures2KHR features2 = {};
- auto indexing_features = lvl_init_struct<VkPhysicalDeviceDescriptorIndexingFeaturesEXT>();
- auto inline_uniform_block_features = lvl_init_struct<VkPhysicalDeviceInlineUniformBlockFeaturesEXT>(&indexing_features);
- bool inline_uniform_block = DeviceExtensionSupported(gpu(), nullptr, VK_EXT_INLINE_UNIFORM_BLOCK_EXTENSION_NAME);
- if (!(descriptor_indexing && inline_uniform_block)) {
- printf("Descriptor indexing and/or inline uniform block not supported Skipping test\n");
- return;
- }
- m_device_extension_names.push_back(VK_KHR_MAINTENANCE1_EXTENSION_NAME);
- m_device_extension_names.push_back(VK_EXT_INLINE_UNIFORM_BLOCK_EXTENSION_NAME);
- PFN_vkGetPhysicalDeviceFeatures2KHR vkGetPhysicalDeviceFeatures2KHR =
- (PFN_vkGetPhysicalDeviceFeatures2KHR)vkGetInstanceProcAddr(instance(), "vkGetPhysicalDeviceFeatures2KHR");
- ASSERT_TRUE(vkGetPhysicalDeviceFeatures2KHR != nullptr);
-
- features2 = lvl_init_struct<VkPhysicalDeviceFeatures2KHR>(&inline_uniform_block_features);
- vkGetPhysicalDeviceFeatures2KHR(gpu(), &features2);
- if (!indexing_features.descriptorBindingPartiallyBound || !inline_uniform_block_features.inlineUniformBlock) {
- printf("Not all features supported, skipping test\n");
- return;
- }
- auto inline_uniform_props = lvl_init_struct<VkPhysicalDeviceInlineUniformBlockPropertiesEXT>();
- auto prop2 = lvl_init_struct<VkPhysicalDeviceProperties2KHR>(&inline_uniform_props);
- vkGetPhysicalDeviceProperties2(gpu(), &prop2);
-
- VkCommandPoolCreateFlags pool_flags = VK_COMMAND_POOL_CREATE_RESET_COMMAND_BUFFER_BIT;
- ASSERT_NO_FATAL_FAILURE(InitState(nullptr, &features2, pool_flags));
- if (m_device->props.apiVersion < VK_API_VERSION_1_1) {
- printf("%s GPU-Assisted validation test requires Vulkan 1.1+.\n", kSkipPrefix);
- return;
- }
- auto c_queue = m_device->GetDefaultComputeQueue();
- if (nullptr == c_queue) {
- printf("Compute not supported, skipping test\n");
- return;
- }
-
- uint32_t qfi = 0;
- VkBufferCreateInfo bci = {};
- bci.sType = VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO;
- bci.usage = VK_BUFFER_USAGE_STORAGE_BUFFER_BIT;
- bci.size = 4;
- bci.queueFamilyIndexCount = 1;
- bci.pQueueFamilyIndices = &qfi;
- VkBufferObj buffer0;
- VkMemoryPropertyFlags mem_props = VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT | VK_MEMORY_PROPERTY_HOST_COHERENT_BIT;
- buffer0.init(*m_device, bci, mem_props);
-
- VkDescriptorBindingFlagsEXT ds_binding_flags[2] = {};
- ds_binding_flags[1] = VK_DESCRIPTOR_BINDING_PARTIALLY_BOUND_BIT_EXT;
- VkDescriptorSetLayoutBindingFlagsCreateInfoEXT layout_createinfo_binding_flags[1] = {};
- layout_createinfo_binding_flags[0].sType = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_BINDING_FLAGS_CREATE_INFO_EXT;
- layout_createinfo_binding_flags[0].pNext = NULL;
- layout_createinfo_binding_flags[0].bindingCount = 2;
- layout_createinfo_binding_flags[0].pBindingFlags = ds_binding_flags;
-
- OneOffDescriptorSet descriptor_set(m_device,
- {
- {0, VK_DESCRIPTOR_TYPE_STORAGE_BUFFER, 1, VK_SHADER_STAGE_ALL, nullptr},
- {1, VK_DESCRIPTOR_TYPE_INLINE_UNIFORM_BLOCK_EXT, 20, VK_SHADER_STAGE_ALL,
- nullptr}, // 16 bytes for ivec4, 4 more for int
- },
- 0, layout_createinfo_binding_flags, 0);
- const VkPipelineLayoutObj pipeline_layout(m_device, {&descriptor_set.layout_});
-
- VkDescriptorBufferInfo buffer_info[1] = {};
- buffer_info[0].buffer = buffer0.handle();
- buffer_info[0].offset = 0;
- buffer_info[0].range = sizeof(uint32_t);
-
- const uint32_t test_data = 0xdeadca7;
- VkWriteDescriptorSetInlineUniformBlockEXT write_inline_uniform = {};
- write_inline_uniform.sType = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET_INLINE_UNIFORM_BLOCK_EXT;
- write_inline_uniform.dataSize = 4;
- write_inline_uniform.pData = &test_data;
-
- VkWriteDescriptorSet descriptor_writes[2] = {};
- descriptor_writes[0].sType = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET;
- descriptor_writes[0].dstSet = descriptor_set.set_;
- descriptor_writes[0].dstBinding = 0;
- descriptor_writes[0].descriptorCount = 1;
- descriptor_writes[0].descriptorType = VK_DESCRIPTOR_TYPE_STORAGE_BUFFER;
- descriptor_writes[0].pBufferInfo = buffer_info;
-
- descriptor_writes[1].sType = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET;
- descriptor_writes[1].dstSet = descriptor_set.set_;
- descriptor_writes[1].dstBinding = 1;
- descriptor_writes[1].dstArrayElement = 16; // Skip first 16 bytes (dummy)
- descriptor_writes[1].descriptorCount = 4; // Write 4 bytes to val
- descriptor_writes[1].descriptorType = VK_DESCRIPTOR_TYPE_INLINE_UNIFORM_BLOCK_EXT;
- descriptor_writes[1].pNext = &write_inline_uniform;
- vkUpdateDescriptorSets(m_device->device(), 2, descriptor_writes, 0, NULL);
-
- char const *csSource =
- "#version 450\n"
- "#extension GL_EXT_nonuniform_qualifier : enable\n "
- "layout(set = 0, binding = 0) buffer StorageBuffer { uint index; } u_index;"
- "layout(set = 0, binding = 1) uniform inlineubodef { ivec4 dummy; int val; } inlineubo;\n"
-
- "void main() {\n"
- " u_index.index = inlineubo.val;\n"
- "}\n";
-
- auto shader_module = new VkShaderObj(m_device, csSource, VK_SHADER_STAGE_COMPUTE_BIT, this);
-
- VkPipelineShaderStageCreateInfo stage;
- stage.sType = VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_CREATE_INFO;
- stage.pNext = nullptr;
- stage.flags = 0;
- stage.stage = VK_SHADER_STAGE_COMPUTE_BIT;
- stage.module = shader_module->handle();
- stage.pName = "main";
- stage.pSpecializationInfo = nullptr;
-
- // CreateComputePipelines
- VkComputePipelineCreateInfo pipeline_info = {};
- pipeline_info.sType = VK_STRUCTURE_TYPE_COMPUTE_PIPELINE_CREATE_INFO;
- pipeline_info.pNext = nullptr;
- pipeline_info.flags = 0;
- pipeline_info.layout = pipeline_layout.handle();
- pipeline_info.basePipelineHandle = VK_NULL_HANDLE;
- pipeline_info.basePipelineIndex = -1;
- pipeline_info.stage = stage;
-
- VkPipeline c_pipeline;
- vkCreateComputePipelines(device(), VK_NULL_HANDLE, 1, &pipeline_info, nullptr, &c_pipeline);
-
- m_commandBuffer->begin();
- vkCmdBindPipeline(m_commandBuffer->handle(), VK_PIPELINE_BIND_POINT_COMPUTE, c_pipeline);
- vkCmdBindDescriptorSets(m_commandBuffer->handle(), VK_PIPELINE_BIND_POINT_COMPUTE, pipeline_layout.handle(), 0, 1,
- &descriptor_set.set_, 0, nullptr);
- vkCmdDispatch(m_commandBuffer->handle(), 1, 1, 1);
- m_commandBuffer->end();
-
- VkSubmitInfo submit_info = {};
- submit_info.sType = VK_STRUCTURE_TYPE_SUBMIT_INFO;
- submit_info.commandBufferCount = 1;
- submit_info.pCommandBuffers = &m_commandBuffer->handle();
- vkQueueSubmit(c_queue->handle(), 1, &submit_info, VK_NULL_HANDLE);
- vkQueueWaitIdle(m_device->m_queue);
- m_errorMonitor->VerifyNotFound();
- vkDestroyPipeline(m_device->handle(), c_pipeline, NULL);
- vkDestroyShaderModule(m_device->handle(), shader_module->handle(), NULL);
-
- uint32_t *data = (uint32_t *)buffer0.memory().map();
- ASSERT_TRUE(*data = test_data);
- buffer0.memory().unmap();
-}
-
-TEST_F(VkPositiveLayerTest, Maintenance1Tests) {
- TEST_DESCRIPTION("Validate various special cases for the Maintenance1_KHR extension");
-
- ASSERT_NO_FATAL_FAILURE(InitFramework(myDbgFunc, m_errorMonitor));
- if (DeviceExtensionSupported(gpu(), nullptr, VK_KHR_MAINTENANCE1_EXTENSION_NAME)) {
- m_device_extension_names.push_back(VK_KHR_MAINTENANCE1_EXTENSION_NAME);
- } else {
- printf("%s Maintenance1 Extension not supported, skipping tests\n", kSkipPrefix);
- return;
- }
- ASSERT_NO_FATAL_FAILURE(InitState());
-
- m_errorMonitor->ExpectSuccess();
-
- VkCommandBufferObj cmd_buf(m_device, m_commandPool);
- cmd_buf.begin();
- // Set Negative height, should give error if Maintenance 1 is not enabled
- VkViewport viewport = {0, 0, 16, -16, 0, 1};
- vkCmdSetViewport(cmd_buf.handle(), 0, 1, &viewport);
- cmd_buf.end();
-
- m_errorMonitor->VerifyNotFound();
-}
-
-TEST_F(VkPositiveLayerTest, ValidStructPNext) {
- TEST_DESCRIPTION("Verify that a valid pNext value is handled correctly");
-
- // Positive test to check parameter_validation and unique_objects support for NV_dedicated_allocation
- ASSERT_NO_FATAL_FAILURE(InitFramework(myDbgFunc, m_errorMonitor));
- if (DeviceExtensionSupported(gpu(), nullptr, VK_NV_DEDICATED_ALLOCATION_EXTENSION_NAME)) {
- m_device_extension_names.push_back(VK_NV_DEDICATED_ALLOCATION_EXTENSION_NAME);
- } else {
- printf("%s VK_NV_DEDICATED_ALLOCATION_EXTENSION_NAME Extension not supported, skipping test\n", kSkipPrefix);
- return;
- }
- ASSERT_NO_FATAL_FAILURE(InitState());
-
- m_errorMonitor->ExpectSuccess();
-
- VkDedicatedAllocationBufferCreateInfoNV dedicated_buffer_create_info = {};
- dedicated_buffer_create_info.sType = VK_STRUCTURE_TYPE_DEDICATED_ALLOCATION_BUFFER_CREATE_INFO_NV;
- dedicated_buffer_create_info.pNext = nullptr;
- dedicated_buffer_create_info.dedicatedAllocation = VK_TRUE;
-
- uint32_t queue_family_index = 0;
- VkBufferCreateInfo buffer_create_info = {};
- buffer_create_info.sType = VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO;
- buffer_create_info.pNext = &dedicated_buffer_create_info;
- buffer_create_info.size = 1024;
- buffer_create_info.usage = VK_BUFFER_USAGE_UNIFORM_BUFFER_BIT;
- buffer_create_info.queueFamilyIndexCount = 1;
- buffer_create_info.pQueueFamilyIndices = &queue_family_index;
-
- VkBuffer buffer;
- VkResult err = vkCreateBuffer(m_device->device(), &buffer_create_info, NULL, &buffer);
- ASSERT_VK_SUCCESS(err);
-
- VkMemoryRequirements memory_reqs;
- vkGetBufferMemoryRequirements(m_device->device(), buffer, &memory_reqs);
-
- VkDedicatedAllocationMemoryAllocateInfoNV dedicated_memory_info = {};
- dedicated_memory_info.sType = VK_STRUCTURE_TYPE_DEDICATED_ALLOCATION_MEMORY_ALLOCATE_INFO_NV;
- dedicated_memory_info.pNext = nullptr;
- dedicated_memory_info.buffer = buffer;
- dedicated_memory_info.image = VK_NULL_HANDLE;
-
- VkMemoryAllocateInfo memory_info = {};
- memory_info.sType = VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO;
- memory_info.pNext = &dedicated_memory_info;
- memory_info.allocationSize = memory_reqs.size;
-
- bool pass;
- pass = m_device->phy().set_memory_type(memory_reqs.memoryTypeBits, &memory_info, 0);
- ASSERT_TRUE(pass);
-
- VkDeviceMemory buffer_memory;
- err = vkAllocateMemory(m_device->device(), &memory_info, NULL, &buffer_memory);
- ASSERT_VK_SUCCESS(err);
-
- err = vkBindBufferMemory(m_device->device(), buffer, buffer_memory, 0);
- ASSERT_VK_SUCCESS(err);
-
- vkDestroyBuffer(m_device->device(), buffer, NULL);
- vkFreeMemory(m_device->device(), buffer_memory, NULL);
-
- m_errorMonitor->VerifyNotFound();
-}
-
-TEST_F(VkPositiveLayerTest, PSOPolygonModeValid) {
- TEST_DESCRIPTION("Verify that using a solid polygon fill mode works correctly.");
-
- ASSERT_NO_FATAL_FAILURE(Init());
- ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
-
- std::vector<const char *> device_extension_names;
- auto features = m_device->phy().features();
- // Artificially disable support for non-solid fill modes
- features.fillModeNonSolid = false;
- // The sacrificial device object
- VkDeviceObj test_device(0, gpu(), device_extension_names, &features);
-
- VkRenderpassObj render_pass(&test_device);
-
- const VkPipelineLayoutObj pipeline_layout(&test_device);
-
- VkPipelineRasterizationStateCreateInfo rs_ci = {};
- rs_ci.sType = VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_STATE_CREATE_INFO;
- rs_ci.pNext = nullptr;
- rs_ci.lineWidth = 1.0f;
- rs_ci.rasterizerDiscardEnable = false;
-
- VkShaderObj vs(&test_device, bindStateVertShaderText, VK_SHADER_STAGE_VERTEX_BIT, this);
- VkShaderObj fs(&test_device, bindStateFragShaderText, VK_SHADER_STAGE_FRAGMENT_BIT, this);
-
- // Set polygonMode=FILL. No error is expected
- m_errorMonitor->ExpectSuccess();
- {
- VkPipelineObj pipe(&test_device);
- pipe.AddShader(&vs);
- pipe.AddShader(&fs);
- pipe.AddDefaultColorAttachment();
- // Set polygonMode to a good value
- rs_ci.polygonMode = VK_POLYGON_MODE_FILL;
- pipe.SetRasterization(&rs_ci);
- pipe.CreateVKPipeline(pipeline_layout.handle(), render_pass.handle());
- }
- m_errorMonitor->VerifyNotFound();
-}
-
-TEST_F(VkPositiveLayerTest, LongSemaphoreChain) {
- m_errorMonitor->ExpectSuccess();
-
- ASSERT_NO_FATAL_FAILURE(Init());
- VkResult err;
-
- std::vector<VkSemaphore> semaphores;
-
- const int chainLength = 32768;
- VkPipelineStageFlags flags = VK_PIPELINE_STAGE_BOTTOM_OF_PIPE_BIT;
-
- for (int i = 0; i < chainLength; i++) {
- VkSemaphoreCreateInfo sci = {VK_STRUCTURE_TYPE_SEMAPHORE_CREATE_INFO, nullptr, 0};
- VkSemaphore semaphore;
- err = vkCreateSemaphore(m_device->device(), &sci, nullptr, &semaphore);
- ASSERT_VK_SUCCESS(err);
-
- semaphores.push_back(semaphore);
-
- VkSubmitInfo si = {VK_STRUCTURE_TYPE_SUBMIT_INFO,
- nullptr,
- semaphores.size() > 1 ? 1u : 0u,
- semaphores.size() > 1 ? &semaphores[semaphores.size() - 2] : nullptr,
- &flags,
- 0,
- nullptr,
- 1,
- &semaphores[semaphores.size() - 1]};
- err = vkQueueSubmit(m_device->m_queue, 1, &si, VK_NULL_HANDLE);
- ASSERT_VK_SUCCESS(err);
- }
-
- VkFenceCreateInfo fci = {VK_STRUCTURE_TYPE_FENCE_CREATE_INFO, nullptr, 0};
- VkFence fence;
- err = vkCreateFence(m_device->device(), &fci, nullptr, &fence);
- ASSERT_VK_SUCCESS(err);
- VkSubmitInfo si = {VK_STRUCTURE_TYPE_SUBMIT_INFO, nullptr, 1, &semaphores.back(), &flags, 0, nullptr, 0, nullptr};
- err = vkQueueSubmit(m_device->m_queue, 1, &si, fence);
- ASSERT_VK_SUCCESS(err);
-
- vkWaitForFences(m_device->device(), 1, &fence, VK_TRUE, UINT64_MAX);
-
- for (auto semaphore : semaphores) vkDestroySemaphore(m_device->device(), semaphore, nullptr);
-
- vkDestroyFence(m_device->device(), fence, nullptr);
-
- m_errorMonitor->VerifyNotFound();
-}
-
-TEST_F(VkPositiveLayerTest, ExternalSemaphore) {
-#ifdef _WIN32
- const auto extension_name = VK_KHR_EXTERNAL_SEMAPHORE_WIN32_EXTENSION_NAME;
- const auto handle_type = VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_OPAQUE_WIN32_KMT_BIT_KHR;
-#else
- const auto extension_name = VK_KHR_EXTERNAL_SEMAPHORE_FD_EXTENSION_NAME;
- const auto handle_type = VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_OPAQUE_FD_BIT_KHR;
-#endif
- // Check for external semaphore instance extensions
- if (InstanceExtensionSupported(VK_KHR_EXTERNAL_SEMAPHORE_CAPABILITIES_EXTENSION_NAME)) {
- m_instance_extension_names.push_back(VK_KHR_EXTERNAL_SEMAPHORE_CAPABILITIES_EXTENSION_NAME);
- m_instance_extension_names.push_back(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME);
- } else {
- printf("%s External semaphore extension not supported, skipping test\n", kSkipPrefix);
- return;
- }
- ASSERT_NO_FATAL_FAILURE(InitFramework(myDbgFunc, m_errorMonitor));
-
- // Check for external semaphore device extensions
- if (DeviceExtensionSupported(gpu(), nullptr, extension_name)) {
- m_device_extension_names.push_back(extension_name);
- m_device_extension_names.push_back(VK_KHR_EXTERNAL_SEMAPHORE_EXTENSION_NAME);
- } else {
- printf("%s External semaphore extension not supported, skipping test\n", kSkipPrefix);
- return;
- }
- ASSERT_NO_FATAL_FAILURE(InitState());
-
- // Check for external semaphore import and export capability
- VkPhysicalDeviceExternalSemaphoreInfoKHR esi = {VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_SEMAPHORE_INFO_KHR, nullptr,
- handle_type};
- VkExternalSemaphorePropertiesKHR esp = {VK_STRUCTURE_TYPE_EXTERNAL_SEMAPHORE_PROPERTIES_KHR, nullptr};
- auto vkGetPhysicalDeviceExternalSemaphorePropertiesKHR =
- (PFN_vkGetPhysicalDeviceExternalSemaphorePropertiesKHR)vkGetInstanceProcAddr(
- instance(), "vkGetPhysicalDeviceExternalSemaphorePropertiesKHR");
- vkGetPhysicalDeviceExternalSemaphorePropertiesKHR(gpu(), &esi, &esp);
-
- if (!(esp.externalSemaphoreFeatures & VK_EXTERNAL_SEMAPHORE_FEATURE_EXPORTABLE_BIT_KHR) ||
- !(esp.externalSemaphoreFeatures & VK_EXTERNAL_SEMAPHORE_FEATURE_IMPORTABLE_BIT_KHR)) {
- printf("%s External semaphore does not support importing and exporting, skipping test\n", kSkipPrefix);
- return;
- }
-
- VkResult err;
- m_errorMonitor->ExpectSuccess();
-
- // Create a semaphore to export payload from
- VkExportSemaphoreCreateInfoKHR esci = {VK_STRUCTURE_TYPE_EXPORT_SEMAPHORE_CREATE_INFO_KHR, nullptr, handle_type};
- VkSemaphoreCreateInfo sci = {VK_STRUCTURE_TYPE_SEMAPHORE_CREATE_INFO, &esci, 0};
-
- VkSemaphore export_semaphore;
- err = vkCreateSemaphore(m_device->device(), &sci, nullptr, &export_semaphore);
- ASSERT_VK_SUCCESS(err);
-
- // Create a semaphore to import payload into
- sci.pNext = nullptr;
- VkSemaphore import_semaphore;
- err = vkCreateSemaphore(m_device->device(), &sci, nullptr, &import_semaphore);
- ASSERT_VK_SUCCESS(err);
-
-#ifdef _WIN32
- // Export semaphore payload to an opaque handle
- HANDLE handle = nullptr;
- VkSemaphoreGetWin32HandleInfoKHR ghi = {VK_STRUCTURE_TYPE_SEMAPHORE_GET_WIN32_HANDLE_INFO_KHR, nullptr, export_semaphore,
- handle_type};
- auto vkGetSemaphoreWin32HandleKHR =
- (PFN_vkGetSemaphoreWin32HandleKHR)vkGetDeviceProcAddr(m_device->device(), "vkGetSemaphoreWin32HandleKHR");
- err = vkGetSemaphoreWin32HandleKHR(m_device->device(), &ghi, &handle);
- ASSERT_VK_SUCCESS(err);
-
- // Import opaque handle exported above
- VkImportSemaphoreWin32HandleInfoKHR ihi = {
- VK_STRUCTURE_TYPE_IMPORT_SEMAPHORE_WIN32_HANDLE_INFO_KHR, nullptr, import_semaphore, 0, handle_type, handle, nullptr};
- auto vkImportSemaphoreWin32HandleKHR =
- (PFN_vkImportSemaphoreWin32HandleKHR)vkGetDeviceProcAddr(m_device->device(), "vkImportSemaphoreWin32HandleKHR");
- err = vkImportSemaphoreWin32HandleKHR(m_device->device(), &ihi);
- ASSERT_VK_SUCCESS(err);
-#else
- // Export semaphore payload to an opaque handle
- int fd = 0;
- VkSemaphoreGetFdInfoKHR ghi = {VK_STRUCTURE_TYPE_SEMAPHORE_GET_FD_INFO_KHR, nullptr, export_semaphore, handle_type};
- auto vkGetSemaphoreFdKHR = (PFN_vkGetSemaphoreFdKHR)vkGetDeviceProcAddr(m_device->device(), "vkGetSemaphoreFdKHR");
- err = vkGetSemaphoreFdKHR(m_device->device(), &ghi, &fd);
- ASSERT_VK_SUCCESS(err);
-
- // Import opaque handle exported above
- VkImportSemaphoreFdInfoKHR ihi = {
- VK_STRUCTURE_TYPE_IMPORT_SEMAPHORE_FD_INFO_KHR, nullptr, import_semaphore, 0, handle_type, fd};
- auto vkImportSemaphoreFdKHR = (PFN_vkImportSemaphoreFdKHR)vkGetDeviceProcAddr(m_device->device(), "vkImportSemaphoreFdKHR");
- err = vkImportSemaphoreFdKHR(m_device->device(), &ihi);
- ASSERT_VK_SUCCESS(err);
-#endif
-
- // Signal the exported semaphore and wait on the imported semaphore
- VkPipelineStageFlags flags = VK_PIPELINE_STAGE_BOTTOM_OF_PIPE_BIT;
- VkSubmitInfo si[] = {
- {VK_STRUCTURE_TYPE_SUBMIT_INFO, nullptr, 0, nullptr, &flags, 0, nullptr, 1, &export_semaphore},
- {VK_STRUCTURE_TYPE_SUBMIT_INFO, nullptr, 1, &import_semaphore, &flags, 0, nullptr, 0, nullptr},
- {VK_STRUCTURE_TYPE_SUBMIT_INFO, nullptr, 0, nullptr, &flags, 0, nullptr, 1, &export_semaphore},
- {VK_STRUCTURE_TYPE_SUBMIT_INFO, nullptr, 1, &import_semaphore, &flags, 0, nullptr, 0, nullptr},
- };
- err = vkQueueSubmit(m_device->m_queue, 4, si, VK_NULL_HANDLE);
- ASSERT_VK_SUCCESS(err);
-
- if (m_device->phy().features().sparseBinding) {
- // Signal the imported semaphore and wait on the exported semaphore
- VkBindSparseInfo bi[] = {
- {VK_STRUCTURE_TYPE_BIND_SPARSE_INFO, nullptr, 0, nullptr, 0, nullptr, 0, nullptr, 0, nullptr, 1, &import_semaphore},
- {VK_STRUCTURE_TYPE_BIND_SPARSE_INFO, nullptr, 1, &export_semaphore, 0, nullptr, 0, nullptr, 0, nullptr, 0, nullptr},
- {VK_STRUCTURE_TYPE_BIND_SPARSE_INFO, nullptr, 0, nullptr, 0, nullptr, 0, nullptr, 0, nullptr, 1, &import_semaphore},
- {VK_STRUCTURE_TYPE_BIND_SPARSE_INFO, nullptr, 1, &export_semaphore, 0, nullptr, 0, nullptr, 0, nullptr, 0, nullptr},
- };
- err = vkQueueBindSparse(m_device->m_queue, 4, bi, VK_NULL_HANDLE);
- ASSERT_VK_SUCCESS(err);
- }
-
- // Cleanup
- err = vkQueueWaitIdle(m_device->m_queue);
- ASSERT_VK_SUCCESS(err);
- vkDestroySemaphore(m_device->device(), export_semaphore, nullptr);
- vkDestroySemaphore(m_device->device(), import_semaphore, nullptr);
-
- m_errorMonitor->VerifyNotFound();
-}
-
-TEST_F(VkPositiveLayerTest, ExternalFence) {
-#ifdef _WIN32
- const auto extension_name = VK_KHR_EXTERNAL_FENCE_WIN32_EXTENSION_NAME;
- const auto handle_type = VK_EXTERNAL_FENCE_HANDLE_TYPE_OPAQUE_WIN32_BIT_KHR;
-#else
- const auto extension_name = VK_KHR_EXTERNAL_FENCE_FD_EXTENSION_NAME;
- const auto handle_type = VK_EXTERNAL_FENCE_HANDLE_TYPE_OPAQUE_FD_BIT_KHR;
-#endif
- // Check for external fence instance extensions
- if (InstanceExtensionSupported(VK_KHR_EXTERNAL_FENCE_CAPABILITIES_EXTENSION_NAME)) {
- m_instance_extension_names.push_back(VK_KHR_EXTERNAL_FENCE_CAPABILITIES_EXTENSION_NAME);
- m_instance_extension_names.push_back(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME);
- } else {
- printf("%s External fence extension not supported, skipping test\n", kSkipPrefix);
- return;
- }
- ASSERT_NO_FATAL_FAILURE(InitFramework(myDbgFunc, m_errorMonitor));
-
- // Check for external fence device extensions
- if (DeviceExtensionSupported(gpu(), nullptr, extension_name)) {
- m_device_extension_names.push_back(extension_name);
- m_device_extension_names.push_back(VK_KHR_EXTERNAL_FENCE_EXTENSION_NAME);
- } else {
- printf("%s External fence extension not supported, skipping test\n", kSkipPrefix);
- return;
- }
- ASSERT_NO_FATAL_FAILURE(InitState());
-
- // Check for external fence import and export capability
- VkPhysicalDeviceExternalFenceInfoKHR efi = {VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_FENCE_INFO_KHR, nullptr, handle_type};
- VkExternalFencePropertiesKHR efp = {VK_STRUCTURE_TYPE_EXTERNAL_FENCE_PROPERTIES_KHR, nullptr};
- auto vkGetPhysicalDeviceExternalFencePropertiesKHR = (PFN_vkGetPhysicalDeviceExternalFencePropertiesKHR)vkGetInstanceProcAddr(
- instance(), "vkGetPhysicalDeviceExternalFencePropertiesKHR");
- vkGetPhysicalDeviceExternalFencePropertiesKHR(gpu(), &efi, &efp);
-
- if (!(efp.externalFenceFeatures & VK_EXTERNAL_FENCE_FEATURE_EXPORTABLE_BIT_KHR) ||
- !(efp.externalFenceFeatures & VK_EXTERNAL_FENCE_FEATURE_IMPORTABLE_BIT_KHR)) {
- printf("%s External fence does not support importing and exporting, skipping test\n", kSkipPrefix);
- return;
- }
-
- VkResult err;
- m_errorMonitor->ExpectSuccess();
-
- // Create a fence to export payload from
- VkFence export_fence;
- {
- VkExportFenceCreateInfoKHR efci = {VK_STRUCTURE_TYPE_EXPORT_FENCE_CREATE_INFO_KHR, nullptr, handle_type};
- VkFenceCreateInfo fci = {VK_STRUCTURE_TYPE_FENCE_CREATE_INFO, &efci, 0};
- err = vkCreateFence(m_device->device(), &fci, nullptr, &export_fence);
- ASSERT_VK_SUCCESS(err);
- }
-
- // Create a fence to import payload into
- VkFence import_fence;
- {
- VkFenceCreateInfo fci = {VK_STRUCTURE_TYPE_FENCE_CREATE_INFO, nullptr, 0};
- err = vkCreateFence(m_device->device(), &fci, nullptr, &import_fence);
- ASSERT_VK_SUCCESS(err);
- }
-
-#ifdef _WIN32
- // Export fence payload to an opaque handle
- HANDLE handle = nullptr;
- {
- VkFenceGetWin32HandleInfoKHR ghi = {VK_STRUCTURE_TYPE_FENCE_GET_WIN32_HANDLE_INFO_KHR, nullptr, export_fence, handle_type};
- auto vkGetFenceWin32HandleKHR =
- (PFN_vkGetFenceWin32HandleKHR)vkGetDeviceProcAddr(m_device->device(), "vkGetFenceWin32HandleKHR");
- err = vkGetFenceWin32HandleKHR(m_device->device(), &ghi, &handle);
- ASSERT_VK_SUCCESS(err);
- }
-
- // Import opaque handle exported above
- {
- VkImportFenceWin32HandleInfoKHR ifi = {
- VK_STRUCTURE_TYPE_IMPORT_FENCE_WIN32_HANDLE_INFO_KHR, nullptr, import_fence, 0, handle_type, handle, nullptr};
- auto vkImportFenceWin32HandleKHR =
- (PFN_vkImportFenceWin32HandleKHR)vkGetDeviceProcAddr(m_device->device(), "vkImportFenceWin32HandleKHR");
- err = vkImportFenceWin32HandleKHR(m_device->device(), &ifi);
- ASSERT_VK_SUCCESS(err);
- }
-#else
- // Export fence payload to an opaque handle
- int fd = 0;
- {
- VkFenceGetFdInfoKHR gfi = {VK_STRUCTURE_TYPE_FENCE_GET_FD_INFO_KHR, nullptr, export_fence, handle_type};
- auto vkGetFenceFdKHR = (PFN_vkGetFenceFdKHR)vkGetDeviceProcAddr(m_device->device(), "vkGetFenceFdKHR");
- err = vkGetFenceFdKHR(m_device->device(), &gfi, &fd);
- ASSERT_VK_SUCCESS(err);
- }
-
- // Import opaque handle exported above
- {
- VkImportFenceFdInfoKHR ifi = {VK_STRUCTURE_TYPE_IMPORT_FENCE_FD_INFO_KHR, nullptr, import_fence, 0, handle_type, fd};
- auto vkImportFenceFdKHR = (PFN_vkImportFenceFdKHR)vkGetDeviceProcAddr(m_device->device(), "vkImportFenceFdKHR");
- err = vkImportFenceFdKHR(m_device->device(), &ifi);
- ASSERT_VK_SUCCESS(err);
- }
-#endif
-
- // Signal the exported fence and wait on the imported fence
- vkQueueSubmit(m_device->m_queue, 0, nullptr, export_fence);
- vkWaitForFences(m_device->device(), 1, &import_fence, VK_TRUE, 1000000000);
- vkResetFences(m_device->device(), 1, &import_fence);
- vkQueueSubmit(m_device->m_queue, 0, nullptr, export_fence);
- vkWaitForFences(m_device->device(), 1, &import_fence, VK_TRUE, 1000000000);
- vkResetFences(m_device->device(), 1, &import_fence);
-
- // Signal the imported fence and wait on the exported fence
- vkQueueSubmit(m_device->m_queue, 0, nullptr, import_fence);
- vkWaitForFences(m_device->device(), 1, &export_fence, VK_TRUE, 1000000000);
- vkResetFences(m_device->device(), 1, &export_fence);
- vkQueueSubmit(m_device->m_queue, 0, nullptr, import_fence);
- vkWaitForFences(m_device->device(), 1, &export_fence, VK_TRUE, 1000000000);
- vkResetFences(m_device->device(), 1, &export_fence);
-
- // Cleanup
- err = vkQueueWaitIdle(m_device->m_queue);
- ASSERT_VK_SUCCESS(err);
- vkDestroyFence(m_device->device(), export_fence, nullptr);
- vkDestroyFence(m_device->device(), import_fence, nullptr);
-
- m_errorMonitor->VerifyNotFound();
-}
-
-TEST_F(VkPositiveLayerTest, ThreadNullFenceCollision) {
- test_platform_thread thread;
-
- m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "THREADING ERROR");
-
- ASSERT_NO_FATAL_FAILURE(Init());
-
- struct thread_data_struct data;
- data.device = m_device->device();
- data.bailout = false;
- m_errorMonitor->SetBailout(&data.bailout);
-
- // Call vkDestroyFence of VK_NULL_HANDLE repeatedly using multiple threads.
- // There should be no validation error from collision of that non-object.
- test_platform_thread_create(&thread, ReleaseNullFence, (void *)&data);
- for (int i = 0; i < 40000; i++) {
- vkDestroyFence(m_device->device(), VK_NULL_HANDLE, NULL);
- }
- test_platform_thread_join(thread, NULL);
-
- m_errorMonitor->SetBailout(NULL);
-
- m_errorMonitor->VerifyNotFound();
-}
-
-TEST_F(VkPositiveLayerTest, ClearColorImageWithValidRange) {
- TEST_DESCRIPTION("Record clear color with a valid VkImageSubresourceRange");
-
- ASSERT_NO_FATAL_FAILURE(Init());
- ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
-
- VkImageObj image(m_device);
- image.Init(32, 32, 1, VK_FORMAT_B8G8R8A8_UNORM, VK_IMAGE_USAGE_TRANSFER_DST_BIT, VK_IMAGE_TILING_OPTIMAL);
- ASSERT_TRUE(image.create_info().arrayLayers == 1);
- ASSERT_TRUE(image.initialized());
- image.SetLayout(VK_IMAGE_ASPECT_COLOR_BIT, VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL);
-
- const VkClearColorValue clear_color = {{0.0f, 0.0f, 0.0f, 1.0f}};
-
- m_commandBuffer->begin();
- const auto cb_handle = m_commandBuffer->handle();
-
- // Try good case
- {
- m_errorMonitor->ExpectSuccess();
- VkImageSubresourceRange range = {VK_IMAGE_ASPECT_COLOR_BIT, 0, 1, 0, 1};
- vkCmdClearColorImage(cb_handle, image.handle(), image.Layout(), &clear_color, 1, &range);
- m_errorMonitor->VerifyNotFound();
- }
-
- // Try good case with VK_REMAINING
- {
- m_errorMonitor->ExpectSuccess();
- VkImageSubresourceRange range = {VK_IMAGE_ASPECT_COLOR_BIT, 0, VK_REMAINING_MIP_LEVELS, 0, VK_REMAINING_ARRAY_LAYERS};
- vkCmdClearColorImage(cb_handle, image.handle(), image.Layout(), &clear_color, 1, &range);
- m_errorMonitor->VerifyNotFound();
- }
-}
-
-TEST_F(VkPositiveLayerTest, ClearDepthStencilWithValidRange) {
- TEST_DESCRIPTION("Record clear depth with a valid VkImageSubresourceRange");
-
- ASSERT_NO_FATAL_FAILURE(Init());
- ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
-
- auto depth_format = FindSupportedDepthStencilFormat(gpu());
- if (!depth_format) {
- printf("%s No Depth + Stencil format found. Skipped.\n", kSkipPrefix);
- return;
- }
-
- VkImageObj image(m_device);
- image.Init(32, 32, 1, depth_format, VK_IMAGE_USAGE_TRANSFER_DST_BIT, VK_IMAGE_TILING_OPTIMAL);
- ASSERT_TRUE(image.create_info().arrayLayers == 1);
- ASSERT_TRUE(image.initialized());
- const VkImageAspectFlags ds_aspect = VK_IMAGE_ASPECT_DEPTH_BIT | VK_IMAGE_ASPECT_STENCIL_BIT;
- image.SetLayout(ds_aspect, VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL);
-
- const VkClearDepthStencilValue clear_value = {};
-
- m_commandBuffer->begin();
- const auto cb_handle = m_commandBuffer->handle();
-
- // Try good case
- {
- m_errorMonitor->ExpectSuccess();
- VkImageSubresourceRange range = {ds_aspect, 0, 1, 0, 1};
- vkCmdClearDepthStencilImage(cb_handle, image.handle(), image.Layout(), &clear_value, 1, &range);
- m_errorMonitor->VerifyNotFound();
- }
-
- // Try good case with VK_REMAINING
- {
- m_errorMonitor->ExpectSuccess();
- VkImageSubresourceRange range = {ds_aspect, 0, VK_REMAINING_MIP_LEVELS, 0, VK_REMAINING_ARRAY_LAYERS};
- vkCmdClearDepthStencilImage(cb_handle, image.handle(), image.Layout(), &clear_value, 1, &range);
- m_errorMonitor->VerifyNotFound();
- }
-}
-
-TEST_F(VkPositiveLayerTest, CreateGraphicsPipelineWithIgnoredPointers) {
- TEST_DESCRIPTION("Create Graphics Pipeline with pointers that must be ignored by layers");
-
- ASSERT_NO_FATAL_FAILURE(Init());
-
- m_depth_stencil_fmt = FindSupportedDepthStencilFormat(gpu());
- ASSERT_TRUE(m_depth_stencil_fmt != 0);
-
- m_depthStencil->Init(m_device, static_cast<int32_t>(m_width), static_cast<int32_t>(m_height), m_depth_stencil_fmt);
-
- ASSERT_NO_FATAL_FAILURE(InitRenderTarget(m_depthStencil->BindInfo()));
-
- const uint64_t fake_address_64 = 0xCDCDCDCDCDCDCDCD;
- const uint64_t fake_address_32 = 0xCDCDCDCD;
- void *hopefully_undereferencable_pointer =
- sizeof(void *) == 8 ? reinterpret_cast<void *>(fake_address_64) : reinterpret_cast<void *>(fake_address_32);
-
- VkShaderObj vs(m_device, bindStateVertShaderText, VK_SHADER_STAGE_VERTEX_BIT, this);
-
- const VkPipelineVertexInputStateCreateInfo pipeline_vertex_input_state_create_info{
- VK_STRUCTURE_TYPE_PIPELINE_VERTEX_INPUT_STATE_CREATE_INFO,
- nullptr, // pNext
- 0, // flags
- 0,
- nullptr, // bindings
- 0,
- nullptr // attributes
- };
-
- const VkPipelineInputAssemblyStateCreateInfo pipeline_input_assembly_state_create_info{
- VK_STRUCTURE_TYPE_PIPELINE_INPUT_ASSEMBLY_STATE_CREATE_INFO,
- nullptr, // pNext
- 0, // flags
- VK_PRIMITIVE_TOPOLOGY_TRIANGLE_LIST,
- VK_FALSE // primitive restart
- };
-
- const VkPipelineRasterizationStateCreateInfo pipeline_rasterization_state_create_info_template{
- VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_STATE_CREATE_INFO,
- nullptr, // pNext
- 0, // flags
- VK_FALSE, // depthClamp
- VK_FALSE, // rasterizerDiscardEnable
- VK_POLYGON_MODE_FILL,
- VK_CULL_MODE_NONE,
- VK_FRONT_FACE_COUNTER_CLOCKWISE,
- VK_FALSE, // depthBias
- 0.0f,
- 0.0f,
- 0.0f, // depthBias params
- 1.0f // lineWidth
- };
-
- VkPipelineLayout pipeline_layout;
- {
- VkPipelineLayoutCreateInfo pipeline_layout_create_info{
- VK_STRUCTURE_TYPE_PIPELINE_LAYOUT_CREATE_INFO,
- nullptr, // pNext
- 0, // flags
- 0,
- nullptr, // layouts
- 0,
- nullptr // push constants
- };
-
- VkResult err = vkCreatePipelineLayout(m_device->device(), &pipeline_layout_create_info, nullptr, &pipeline_layout);
- ASSERT_VK_SUCCESS(err);
- }
-
- // try disabled rasterizer and no tessellation
- {
- m_errorMonitor->ExpectSuccess();
-
- VkPipelineRasterizationStateCreateInfo pipeline_rasterization_state_create_info =
- pipeline_rasterization_state_create_info_template;
- pipeline_rasterization_state_create_info.rasterizerDiscardEnable = VK_TRUE;
-
- VkGraphicsPipelineCreateInfo graphics_pipeline_create_info{
- VK_STRUCTURE_TYPE_GRAPHICS_PIPELINE_CREATE_INFO,
- nullptr, // pNext
- 0, // flags
- 1, // stageCount
- &vs.GetStageCreateInfo(),
- &pipeline_vertex_input_state_create_info,
- &pipeline_input_assembly_state_create_info,
- reinterpret_cast<const VkPipelineTessellationStateCreateInfo *>(hopefully_undereferencable_pointer),
- reinterpret_cast<const VkPipelineViewportStateCreateInfo *>(hopefully_undereferencable_pointer),
- &pipeline_rasterization_state_create_info,
- reinterpret_cast<const VkPipelineMultisampleStateCreateInfo *>(hopefully_undereferencable_pointer),
- reinterpret_cast<const VkPipelineDepthStencilStateCreateInfo *>(hopefully_undereferencable_pointer),
- reinterpret_cast<const VkPipelineColorBlendStateCreateInfo *>(hopefully_undereferencable_pointer),
- nullptr, // dynamic states
- pipeline_layout,
- m_renderPass,
- 0, // subpass
- VK_NULL_HANDLE,
- 0};
-
- VkPipeline pipeline;
- vkCreateGraphicsPipelines(m_device->handle(), VK_NULL_HANDLE, 1, &graphics_pipeline_create_info, nullptr, &pipeline);
-
- m_errorMonitor->VerifyNotFound();
-
- vkDestroyPipeline(m_device->handle(), pipeline, nullptr);
- }
-
- const VkPipelineMultisampleStateCreateInfo pipeline_multisample_state_create_info{
- VK_STRUCTURE_TYPE_PIPELINE_MULTISAMPLE_STATE_CREATE_INFO,
- nullptr, // pNext
- 0, // flags
- VK_SAMPLE_COUNT_1_BIT,
- VK_FALSE, // sample shading
- 0.0f, // minSampleShading
- nullptr, // pSampleMask
- VK_FALSE, // alphaToCoverageEnable
- VK_FALSE // alphaToOneEnable
- };
-
- // try enabled rasterizer but no subpass attachments
- {
- m_errorMonitor->ExpectSuccess();
-
- VkPipelineRasterizationStateCreateInfo pipeline_rasterization_state_create_info =
- pipeline_rasterization_state_create_info_template;
- pipeline_rasterization_state_create_info.rasterizerDiscardEnable = VK_FALSE;
-
- VkViewport viewport = {0.0f, 0.0f, 1.0f, 1.0f, 0.0f, 1.0f};
- VkRect2D scissor = {{0, 0}, {static_cast<uint32_t>(m_width), static_cast<uint32_t>(m_height)}};
-
- const VkPipelineViewportStateCreateInfo pipeline_viewport_state_create_info{
- VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_STATE_CREATE_INFO,
- nullptr, // pNext
- 0, // flags
- 1,
- &viewport,
- 1,
- &scissor};
-
- VkRenderPass render_pass;
- {
- VkSubpassDescription subpass_desc = {};
-
- VkRenderPassCreateInfo render_pass_create_info{
- VK_STRUCTURE_TYPE_RENDER_PASS_CREATE_INFO,
- nullptr, // pNext
- 0, // flags
- 0,
- nullptr, // attachments
- 1,
- &subpass_desc,
- 0,
- nullptr // subpass dependencies
- };
-
- VkResult err = vkCreateRenderPass(m_device->handle(), &render_pass_create_info, nullptr, &render_pass);
- ASSERT_VK_SUCCESS(err);
- }
-
- VkGraphicsPipelineCreateInfo graphics_pipeline_create_info{
- VK_STRUCTURE_TYPE_GRAPHICS_PIPELINE_CREATE_INFO,
- nullptr, // pNext
- 0, // flags
- 1, // stageCount
- &vs.GetStageCreateInfo(),
- &pipeline_vertex_input_state_create_info,
- &pipeline_input_assembly_state_create_info,
- nullptr,
- &pipeline_viewport_state_create_info,
- &pipeline_rasterization_state_create_info,
- &pipeline_multisample_state_create_info,
- reinterpret_cast<const VkPipelineDepthStencilStateCreateInfo *>(hopefully_undereferencable_pointer),
- reinterpret_cast<const VkPipelineColorBlendStateCreateInfo *>(hopefully_undereferencable_pointer),
- nullptr, // dynamic states
- pipeline_layout,
- render_pass,
- 0, // subpass
- VK_NULL_HANDLE,
- 0};
-
- VkPipeline pipeline;
- vkCreateGraphicsPipelines(m_device->handle(), VK_NULL_HANDLE, 1, &graphics_pipeline_create_info, nullptr, &pipeline);
-
- m_errorMonitor->VerifyNotFound();
-
- vkDestroyPipeline(m_device->handle(), pipeline, nullptr);
- vkDestroyRenderPass(m_device->handle(), render_pass, nullptr);
- }
-
- // try dynamic viewport and scissor
- {
- m_errorMonitor->ExpectSuccess();
-
- VkPipelineRasterizationStateCreateInfo pipeline_rasterization_state_create_info =
- pipeline_rasterization_state_create_info_template;
- pipeline_rasterization_state_create_info.rasterizerDiscardEnable = VK_FALSE;
-
- const VkPipelineViewportStateCreateInfo pipeline_viewport_state_create_info{
- VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_STATE_CREATE_INFO,
- nullptr, // pNext
- 0, // flags
- 1,
- reinterpret_cast<const VkViewport *>(hopefully_undereferencable_pointer),
- 1,
- reinterpret_cast<const VkRect2D *>(hopefully_undereferencable_pointer)};
-
- const VkPipelineDepthStencilStateCreateInfo pipeline_depth_stencil_state_create_info{
- VK_STRUCTURE_TYPE_PIPELINE_DEPTH_STENCIL_STATE_CREATE_INFO,
- nullptr, // pNext
- 0, // flags
- };
-
- const VkPipelineColorBlendAttachmentState pipeline_color_blend_attachment_state = {};
-
- const VkPipelineColorBlendStateCreateInfo pipeline_color_blend_state_create_info{
- VK_STRUCTURE_TYPE_PIPELINE_COLOR_BLEND_STATE_CREATE_INFO,
- nullptr, // pNext
- 0, // flags
- VK_FALSE,
- VK_LOGIC_OP_CLEAR,
- 1,
- &pipeline_color_blend_attachment_state,
- {0.0f, 0.0f, 0.0f, 0.0f}};
-
- const VkDynamicState dynamic_states[2] = {VK_DYNAMIC_STATE_VIEWPORT, VK_DYNAMIC_STATE_SCISSOR};
-
- const VkPipelineDynamicStateCreateInfo pipeline_dynamic_state_create_info{
- VK_STRUCTURE_TYPE_PIPELINE_DYNAMIC_STATE_CREATE_INFO,
- nullptr, // pNext
- 0, // flags
- 2, dynamic_states};
-
- VkGraphicsPipelineCreateInfo graphics_pipeline_create_info{VK_STRUCTURE_TYPE_GRAPHICS_PIPELINE_CREATE_INFO,
- nullptr, // pNext
- 0, // flags
- 1, // stageCount
- &vs.GetStageCreateInfo(),
- &pipeline_vertex_input_state_create_info,
- &pipeline_input_assembly_state_create_info,
- nullptr,
- &pipeline_viewport_state_create_info,
- &pipeline_rasterization_state_create_info,
- &pipeline_multisample_state_create_info,
- &pipeline_depth_stencil_state_create_info,
- &pipeline_color_blend_state_create_info,
- &pipeline_dynamic_state_create_info, // dynamic states
- pipeline_layout,
- m_renderPass,
- 0, // subpass
- VK_NULL_HANDLE,
- 0};
-
- VkPipeline pipeline;
- vkCreateGraphicsPipelines(m_device->handle(), VK_NULL_HANDLE, 1, &graphics_pipeline_create_info, nullptr, &pipeline);
-
- m_errorMonitor->VerifyNotFound();
-
- vkDestroyPipeline(m_device->handle(), pipeline, nullptr);
- }
-
- vkDestroyPipelineLayout(m_device->handle(), pipeline_layout, nullptr);
-}
-
-TEST_F(VkPositiveLayerTest, ExternalMemory) {
- TEST_DESCRIPTION("Perform a copy through a pair of buffers linked by external memory");
-
-#ifdef _WIN32
- const auto ext_mem_extension_name = VK_KHR_EXTERNAL_MEMORY_WIN32_EXTENSION_NAME;
- const auto handle_type = VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_WIN32_BIT_KHR;
-#else
- const auto ext_mem_extension_name = VK_KHR_EXTERNAL_MEMORY_FD_EXTENSION_NAME;
- const auto handle_type = VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_FD_BIT_KHR;
-#endif
-
- // Check for external memory instance extensions
- std::vector<const char *> reqd_instance_extensions = {
- {VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME, VK_KHR_EXTERNAL_MEMORY_CAPABILITIES_EXTENSION_NAME}};
- for (auto extension_name : reqd_instance_extensions) {
- if (InstanceExtensionSupported(extension_name)) {
- m_instance_extension_names.push_back(extension_name);
- } else {
- printf("%s Required instance extension %s not supported, skipping test\n", kSkipPrefix, extension_name);
- return;
- }
- }
-
- ASSERT_NO_FATAL_FAILURE(InitFramework(myDbgFunc, m_errorMonitor));
-
- // Check for import/export capability
- VkPhysicalDeviceExternalBufferInfoKHR ebi = {VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_BUFFER_INFO_KHR, nullptr, 0,
- VK_BUFFER_USAGE_TRANSFER_SRC_BIT | VK_BUFFER_USAGE_TRANSFER_DST_BIT, handle_type};
- VkExternalBufferPropertiesKHR ebp = {VK_STRUCTURE_TYPE_EXTERNAL_BUFFER_PROPERTIES_KHR, nullptr, {0, 0, 0}};
- auto vkGetPhysicalDeviceExternalBufferPropertiesKHR = (PFN_vkGetPhysicalDeviceExternalBufferPropertiesKHR)vkGetInstanceProcAddr(
- instance(), "vkGetPhysicalDeviceExternalBufferPropertiesKHR");
- ASSERT_TRUE(vkGetPhysicalDeviceExternalBufferPropertiesKHR != nullptr);
- vkGetPhysicalDeviceExternalBufferPropertiesKHR(gpu(), &ebi, &ebp);
- if (!(ebp.externalMemoryProperties.compatibleHandleTypes & handle_type) ||
- !(ebp.externalMemoryProperties.externalMemoryFeatures & VK_EXTERNAL_MEMORY_FEATURE_EXPORTABLE_BIT_KHR) ||
- !(ebp.externalMemoryProperties.externalMemoryFeatures & VK_EXTERNAL_MEMORY_FEATURE_IMPORTABLE_BIT_KHR)) {
- printf("%s External buffer does not support importing and exporting, skipping test\n", kSkipPrefix);
- return;
- }
-
- // Check if dedicated allocation is required
- bool dedicated_allocation =
- ebp.externalMemoryProperties.externalMemoryFeatures & VK_EXTERNAL_MEMORY_FEATURE_DEDICATED_ONLY_BIT_KHR;
- if (dedicated_allocation) {
- if (DeviceExtensionSupported(gpu(), nullptr, VK_KHR_DEDICATED_ALLOCATION_EXTENSION_NAME)) {
- m_device_extension_names.push_back(VK_KHR_DEDICATED_ALLOCATION_EXTENSION_NAME);
- m_device_extension_names.push_back(VK_KHR_GET_MEMORY_REQUIREMENTS_2_EXTENSION_NAME);
- } else {
- printf("%s Dedicated allocation extension not supported, skipping test\n", kSkipPrefix);
- return;
- }
- }
-
- // Check for external memory device extensions
- if (DeviceExtensionSupported(gpu(), nullptr, ext_mem_extension_name)) {
- m_device_extension_names.push_back(ext_mem_extension_name);
- m_device_extension_names.push_back(VK_KHR_EXTERNAL_MEMORY_EXTENSION_NAME);
- } else {
- printf("%s External memory extension not supported, skipping test\n", kSkipPrefix);
- return;
- }
- ASSERT_NO_FATAL_FAILURE(InitState());
-
- m_errorMonitor->ExpectSuccess(VK_DEBUG_REPORT_ERROR_BIT_EXT | VK_DEBUG_REPORT_WARNING_BIT_EXT);
-
- VkMemoryPropertyFlags mem_flags = 0;
- const VkDeviceSize buffer_size = 1024;
-
- // Create export and import buffers
- const VkExternalMemoryBufferCreateInfoKHR external_buffer_info = {VK_STRUCTURE_TYPE_EXTERNAL_MEMORY_BUFFER_CREATE_INFO_KHR,
- nullptr, handle_type};
- auto buffer_info = VkBufferObj::create_info(buffer_size, VK_BUFFER_USAGE_TRANSFER_SRC_BIT | VK_BUFFER_USAGE_TRANSFER_DST_BIT);
- buffer_info.pNext = &external_buffer_info;
- VkBufferObj buffer_export;
- buffer_export.init_no_mem(*m_device, buffer_info);
- VkBufferObj buffer_import;
- buffer_import.init_no_mem(*m_device, buffer_info);
-
- // Allocation info
- auto alloc_info = vk_testing::DeviceMemory::get_resource_alloc_info(*m_device, buffer_export.memory_requirements(), mem_flags);
-
- // Add export allocation info to pNext chain
- VkExportMemoryAllocateInfoKHR export_info = {VK_STRUCTURE_TYPE_EXPORT_MEMORY_ALLOCATE_INFO_KHR, nullptr, handle_type};
- alloc_info.pNext = &export_info;
-
- // Add dedicated allocation info to pNext chain if required
- VkMemoryDedicatedAllocateInfoKHR dedicated_info = {VK_STRUCTURE_TYPE_MEMORY_DEDICATED_ALLOCATE_INFO_KHR, nullptr,
- VK_NULL_HANDLE, buffer_export.handle()};
- if (dedicated_allocation) {
- export_info.pNext = &dedicated_info;
- }
-
- // Allocate memory to be exported
- vk_testing::DeviceMemory memory_export;
- memory_export.init(*m_device, alloc_info);
-
- // Bind exported memory
- buffer_export.bind_memory(memory_export, 0);
-
-#ifdef _WIN32
- // Export memory to handle
- auto vkGetMemoryWin32HandleKHR = (PFN_vkGetMemoryWin32HandleKHR)vkGetInstanceProcAddr(instance(), "vkGetMemoryWin32HandleKHR");
- ASSERT_TRUE(vkGetMemoryWin32HandleKHR != nullptr);
- VkMemoryGetWin32HandleInfoKHR mghi = {VK_STRUCTURE_TYPE_MEMORY_GET_WIN32_HANDLE_INFO_KHR, nullptr, memory_export.handle(),
- handle_type};
- HANDLE handle;
- ASSERT_VK_SUCCESS(vkGetMemoryWin32HandleKHR(m_device->device(), &mghi, &handle));
-
- VkImportMemoryWin32HandleInfoKHR import_info = {VK_STRUCTURE_TYPE_IMPORT_MEMORY_WIN32_HANDLE_INFO_KHR, nullptr, handle_type,
- handle};
-#else
- // Export memory to fd
- auto vkGetMemoryFdKHR = (PFN_vkGetMemoryFdKHR)vkGetInstanceProcAddr(instance(), "vkGetMemoryFdKHR");
- ASSERT_TRUE(vkGetMemoryFdKHR != nullptr);
- VkMemoryGetFdInfoKHR mgfi = {VK_STRUCTURE_TYPE_MEMORY_GET_FD_INFO_KHR, nullptr, memory_export.handle(), handle_type};
- int fd;
- ASSERT_VK_SUCCESS(vkGetMemoryFdKHR(m_device->device(), &mgfi, &fd));
-
- VkImportMemoryFdInfoKHR import_info = {VK_STRUCTURE_TYPE_IMPORT_MEMORY_FD_INFO_KHR, nullptr, handle_type, fd};
-#endif
-
- // Import memory
- alloc_info = vk_testing::DeviceMemory::get_resource_alloc_info(*m_device, buffer_import.memory_requirements(), mem_flags);
- alloc_info.pNext = &import_info;
- vk_testing::DeviceMemory memory_import;
- memory_import.init(*m_device, alloc_info);
-
- // Bind imported memory
- buffer_import.bind_memory(memory_import, 0);
-
- // Create test buffers and fill input buffer
- VkMemoryPropertyFlags mem_prop = VK_MEMORY_PROPERTY_HOST_COHERENT_BIT;
- VkBufferObj buffer_input;
- buffer_input.init_as_src_and_dst(*m_device, buffer_size, mem_prop);
- auto input_mem = (uint8_t *)buffer_input.memory().map();
- for (uint32_t i = 0; i < buffer_size; i++) {
- input_mem[i] = (i & 0xFF);
- }
- buffer_input.memory().unmap();
- VkBufferObj buffer_output;
- buffer_output.init_as_src_and_dst(*m_device, buffer_size, mem_prop);
-
- // Copy from input buffer to output buffer through the exported/imported memory
- m_commandBuffer->begin();
- VkBufferCopy copy_info = {0, 0, buffer_size};
- vkCmdCopyBuffer(m_commandBuffer->handle(), buffer_input.handle(), buffer_export.handle(), 1, &copy_info);
- // Insert memory barrier to guarantee copy order
- VkMemoryBarrier mem_barrier = {VK_STRUCTURE_TYPE_MEMORY_BARRIER, nullptr, VK_ACCESS_TRANSFER_WRITE_BIT,
- VK_ACCESS_TRANSFER_READ_BIT};
- vkCmdPipelineBarrier(m_commandBuffer->handle(), VK_PIPELINE_STAGE_TRANSFER_BIT, VK_PIPELINE_STAGE_TRANSFER_BIT, 0, 1,
- &mem_barrier, 0, nullptr, 0, nullptr);
- vkCmdCopyBuffer(m_commandBuffer->handle(), buffer_import.handle(), buffer_output.handle(), 1, &copy_info);
- m_commandBuffer->end();
- m_commandBuffer->QueueCommandBuffer();
-
- m_errorMonitor->VerifyNotFound();
-}
-
-TEST_F(VkPositiveLayerTest, ParameterLayerFeatures2Capture) {
- TEST_DESCRIPTION("Ensure parameter_validation_layer correctly captures physical device features");
- if (InstanceExtensionSupported(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME)) {
- m_instance_extension_names.push_back(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME);
- } else {
- printf("%s Did not find VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME; skipped.\n", kSkipPrefix);
- return;
- }
-
- ASSERT_NO_FATAL_FAILURE(InitFramework(myDbgFunc, m_errorMonitor));
-
- PFN_vkGetPhysicalDeviceFeatures2KHR vkGetPhysicalDeviceFeatures2KHR =
- (PFN_vkGetPhysicalDeviceFeatures2KHR)vkGetInstanceProcAddr(instance(), "vkGetPhysicalDeviceFeatures2KHR");
- ASSERT_TRUE(vkGetPhysicalDeviceFeatures2KHR != nullptr);
-
- VkResult err;
- m_errorMonitor->ExpectSuccess();
-
- VkPhysicalDeviceFeatures2KHR features2;
- features2.sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FEATURES_2_KHR;
- features2.pNext = nullptr;
-
- vkGetPhysicalDeviceFeatures2KHR(gpu(), &features2);
-
- // We're not creating a valid m_device, but the phy wrapper is useful
- vk_testing::PhysicalDevice physical_device(gpu());
- vk_testing::QueueCreateInfoArray queue_info(physical_device.queue_properties());
- // Only request creation with queuefamilies that have at least one queue
- std::vector<VkDeviceQueueCreateInfo> create_queue_infos;
- auto qci = queue_info.data();
- for (uint32_t i = 0; i < queue_info.size(); ++i) {
- if (qci[i].queueCount) {
- create_queue_infos.push_back(qci[i]);
- }
- }
-
- VkDeviceCreateInfo dev_info = {};
- dev_info.sType = VK_STRUCTURE_TYPE_DEVICE_CREATE_INFO;
- dev_info.pNext = &features2;
- dev_info.flags = 0;
- dev_info.queueCreateInfoCount = create_queue_infos.size();
- dev_info.pQueueCreateInfos = create_queue_infos.data();
- dev_info.enabledLayerCount = 0;
- dev_info.ppEnabledLayerNames = nullptr;
- dev_info.enabledExtensionCount = 0;
- dev_info.ppEnabledExtensionNames = nullptr;
- dev_info.pEnabledFeatures = nullptr;
-
- VkDevice device;
- err = vkCreateDevice(gpu(), &dev_info, nullptr, &device);
- ASSERT_VK_SUCCESS(err);
-
- if (features2.features.samplerAnisotropy) {
- // Test that the parameter layer is caching the features correctly using CreateSampler
- VkSamplerCreateInfo sampler_ci = SafeSaneSamplerCreateInfo();
- // If the features were not captured correctly, this should cause an error
- sampler_ci.anisotropyEnable = VK_TRUE;
- sampler_ci.maxAnisotropy = physical_device.properties().limits.maxSamplerAnisotropy;
-
- VkSampler sampler = VK_NULL_HANDLE;
- err = vkCreateSampler(device, &sampler_ci, nullptr, &sampler);
- ASSERT_VK_SUCCESS(err);
- vkDestroySampler(device, sampler, nullptr);
- } else {
- printf("%s Feature samplerAnisotropy not enabled; parameter_layer check skipped.\n", kSkipPrefix);
- }
-
- // Verify the core validation layer has captured the physical device features by creating a a query pool.
- if (features2.features.pipelineStatisticsQuery) {
- VkQueryPool query_pool;
- VkQueryPoolCreateInfo qpci{};
- qpci.sType = VK_STRUCTURE_TYPE_QUERY_POOL_CREATE_INFO;
- qpci.queryType = VK_QUERY_TYPE_PIPELINE_STATISTICS;
- qpci.queryCount = 1;
- err = vkCreateQueryPool(device, &qpci, nullptr, &query_pool);
- ASSERT_VK_SUCCESS(err);
-
- vkDestroyQueryPool(device, query_pool, nullptr);
- } else {
- printf("%s Feature pipelineStatisticsQuery not enabled; core_validation_layer check skipped.\n", kSkipPrefix);
- }
-
- vkDestroyDevice(device, nullptr);
-
- m_errorMonitor->VerifyNotFound();
-}
-
-TEST_F(VkPositiveLayerTest, GetMemoryRequirements2) {
- TEST_DESCRIPTION(
- "Get memory requirements with VK_KHR_get_memory_requirements2 instead of core entry points and verify layers do not emit "
- "errors when objects are bound and used");
-
- ASSERT_NO_FATAL_FAILURE(InitFramework(myDbgFunc, m_errorMonitor));
-
- // Check for VK_KHR_get_memory_requirementes2 extensions
- if (DeviceExtensionSupported(gpu(), nullptr, VK_KHR_GET_MEMORY_REQUIREMENTS_2_EXTENSION_NAME)) {
- m_device_extension_names.push_back(VK_KHR_GET_MEMORY_REQUIREMENTS_2_EXTENSION_NAME);
- } else {
- printf("%s %s not supported, skipping test\n", kSkipPrefix, VK_KHR_GET_MEMORY_REQUIREMENTS_2_EXTENSION_NAME);
- return;
- }
-
- ASSERT_NO_FATAL_FAILURE(InitState());
-
- m_errorMonitor->ExpectSuccess(VK_DEBUG_REPORT_ERROR_BIT_EXT | VK_DEBUG_REPORT_WARNING_BIT_EXT);
-
- // Create a test buffer
- VkBufferObj buffer;
- buffer.init_no_mem(*m_device,
- VkBufferObj::create_info(1024, VK_BUFFER_USAGE_TRANSFER_SRC_BIT | VK_BUFFER_USAGE_TRANSFER_DST_BIT));
-
- // Use extension to get buffer memory requirements
- auto vkGetBufferMemoryRequirements2KHR = reinterpret_cast<PFN_vkGetBufferMemoryRequirements2KHR>(
- vkGetDeviceProcAddr(m_device->device(), "vkGetBufferMemoryRequirements2KHR"));
- ASSERT_TRUE(vkGetBufferMemoryRequirements2KHR != nullptr);
- VkBufferMemoryRequirementsInfo2KHR buffer_info = {VK_STRUCTURE_TYPE_BUFFER_MEMORY_REQUIREMENTS_INFO_2_KHR, nullptr,
- buffer.handle()};
- VkMemoryRequirements2KHR buffer_reqs = {VK_STRUCTURE_TYPE_MEMORY_REQUIREMENTS_2_KHR};
- vkGetBufferMemoryRequirements2KHR(m_device->device(), &buffer_info, &buffer_reqs);
-
- // Allocate and bind buffer memory
- vk_testing::DeviceMemory buffer_memory;
- buffer_memory.init(*m_device, vk_testing::DeviceMemory::get_resource_alloc_info(*m_device, buffer_reqs.memoryRequirements, 0));
- vkBindBufferMemory(m_device->device(), buffer.handle(), buffer_memory.handle(), 0);
-
- // Create a test image
- auto image_ci = vk_testing::Image::create_info();
- image_ci.imageType = VK_IMAGE_TYPE_2D;
- image_ci.extent.width = 32;
- image_ci.extent.height = 32;
- image_ci.format = VK_FORMAT_R8G8B8A8_UNORM;
- image_ci.tiling = VK_IMAGE_TILING_OPTIMAL;
- image_ci.usage = VK_IMAGE_USAGE_TRANSFER_DST_BIT;
- vk_testing::Image image;
- image.init_no_mem(*m_device, image_ci);
-
- // Use extension to get image memory requirements
- auto vkGetImageMemoryRequirements2KHR = reinterpret_cast<PFN_vkGetImageMemoryRequirements2KHR>(
- vkGetDeviceProcAddr(m_device->device(), "vkGetImageMemoryRequirements2KHR"));
- ASSERT_TRUE(vkGetImageMemoryRequirements2KHR != nullptr);
- VkImageMemoryRequirementsInfo2KHR image_info = {VK_STRUCTURE_TYPE_IMAGE_MEMORY_REQUIREMENTS_INFO_2_KHR, nullptr,
- image.handle()};
- VkMemoryRequirements2KHR image_reqs = {VK_STRUCTURE_TYPE_MEMORY_REQUIREMENTS_2_KHR};
- vkGetImageMemoryRequirements2KHR(m_device->device(), &image_info, &image_reqs);
-
- // Allocate and bind image memory
- vk_testing::DeviceMemory image_memory;
- image_memory.init(*m_device, vk_testing::DeviceMemory::get_resource_alloc_info(*m_device, image_reqs.memoryRequirements, 0));
- vkBindImageMemory(m_device->device(), image.handle(), image_memory.handle(), 0);
-
- // Now execute arbitrary commands that use the test buffer and image
- m_commandBuffer->begin();
-
- // Fill buffer with 0
- vkCmdFillBuffer(m_commandBuffer->handle(), buffer.handle(), 0, VK_WHOLE_SIZE, 0);
-
- // Transition and clear image
- const auto subresource_range = image.subresource_range(VK_IMAGE_ASPECT_COLOR_BIT);
- const auto barrier = image.image_memory_barrier(0, VK_ACCESS_TRANSFER_WRITE_BIT, VK_IMAGE_LAYOUT_UNDEFINED,
- VK_IMAGE_LAYOUT_GENERAL, subresource_range);
- vkCmdPipelineBarrier(m_commandBuffer->handle(), VK_PIPELINE_STAGE_ALL_COMMANDS_BIT, VK_PIPELINE_STAGE_ALL_COMMANDS_BIT, 0, 0,
- nullptr, 0, nullptr, 1, &barrier);
- const VkClearColorValue color = {};
- vkCmdClearColorImage(m_commandBuffer->handle(), image.handle(), VK_IMAGE_LAYOUT_GENERAL, &color, 1, &subresource_range);
-
- // Submit and verify no validation errors
- m_commandBuffer->end();
- m_commandBuffer->QueueCommandBuffer();
- m_errorMonitor->VerifyNotFound();
-}
-
-TEST_F(VkPositiveLayerTest, BindMemory2) {
- TEST_DESCRIPTION(
- "Bind memory with VK_KHR_bind_memory2 instead of core entry points and verify layers do not emit errors when objects are "
- "used");
-
- ASSERT_NO_FATAL_FAILURE(InitFramework(myDbgFunc, m_errorMonitor));
-
- // Check for VK_KHR_get_memory_requirementes2 extensions
- if (DeviceExtensionSupported(gpu(), nullptr, VK_KHR_BIND_MEMORY_2_EXTENSION_NAME)) {
- m_device_extension_names.push_back(VK_KHR_BIND_MEMORY_2_EXTENSION_NAME);
- } else {
- printf("%s %s not supported, skipping test\n", kSkipPrefix, VK_KHR_BIND_MEMORY_2_EXTENSION_NAME);
- return;
- }
-
- ASSERT_NO_FATAL_FAILURE(InitState());
-
- m_errorMonitor->ExpectSuccess(VK_DEBUG_REPORT_ERROR_BIT_EXT | VK_DEBUG_REPORT_WARNING_BIT_EXT);
-
- // Create a test buffer
- VkBufferObj buffer;
- buffer.init_no_mem(*m_device, VkBufferObj::create_info(1024, VK_BUFFER_USAGE_TRANSFER_DST_BIT));
-
- // Allocate buffer memory
- vk_testing::DeviceMemory buffer_memory;
- buffer_memory.init(*m_device, vk_testing::DeviceMemory::get_resource_alloc_info(*m_device, buffer.memory_requirements(), 0));
-
- // Bind buffer memory with extension
- auto vkBindBufferMemory2KHR =
- reinterpret_cast<PFN_vkBindBufferMemory2KHR>(vkGetDeviceProcAddr(m_device->device(), "vkBindBufferMemory2KHR"));
- ASSERT_TRUE(vkBindBufferMemory2KHR != nullptr);
- VkBindBufferMemoryInfoKHR buffer_bind_info = {VK_STRUCTURE_TYPE_BIND_BUFFER_MEMORY_INFO_KHR, nullptr, buffer.handle(),
- buffer_memory.handle(), 0};
- vkBindBufferMemory2KHR(m_device->device(), 1, &buffer_bind_info);
-
- // Create a test image
- auto image_ci = vk_testing::Image::create_info();
- image_ci.imageType = VK_IMAGE_TYPE_2D;
- image_ci.extent.width = 32;
- image_ci.extent.height = 32;
- image_ci.format = VK_FORMAT_R8G8B8A8_UNORM;
- image_ci.tiling = VK_IMAGE_TILING_OPTIMAL;
- image_ci.usage = VK_IMAGE_USAGE_TRANSFER_DST_BIT;
- vk_testing::Image image;
- image.init_no_mem(*m_device, image_ci);
-
- // Allocate image memory
- vk_testing::DeviceMemory image_memory;
- image_memory.init(*m_device, vk_testing::DeviceMemory::get_resource_alloc_info(*m_device, image.memory_requirements(), 0));
-
- // Bind image memory with extension
- auto vkBindImageMemory2KHR =
- reinterpret_cast<PFN_vkBindImageMemory2KHR>(vkGetDeviceProcAddr(m_device->device(), "vkBindImageMemory2KHR"));
- ASSERT_TRUE(vkBindImageMemory2KHR != nullptr);
- VkBindImageMemoryInfoKHR image_bind_info = {VK_STRUCTURE_TYPE_BIND_IMAGE_MEMORY_INFO_KHR, nullptr, image.handle(),
- image_memory.handle(), 0};
- vkBindImageMemory2KHR(m_device->device(), 1, &image_bind_info);
-
- // Now execute arbitrary commands that use the test buffer and image
- m_commandBuffer->begin();
-
- // Fill buffer with 0
- vkCmdFillBuffer(m_commandBuffer->handle(), buffer.handle(), 0, VK_WHOLE_SIZE, 0);
-
- // Transition and clear image
- const auto subresource_range = image.subresource_range(VK_IMAGE_ASPECT_COLOR_BIT);
- const auto barrier = image.image_memory_barrier(0, VK_ACCESS_TRANSFER_WRITE_BIT, VK_IMAGE_LAYOUT_UNDEFINED,
- VK_IMAGE_LAYOUT_GENERAL, subresource_range);
- vkCmdPipelineBarrier(m_commandBuffer->handle(), VK_PIPELINE_STAGE_ALL_COMMANDS_BIT, VK_PIPELINE_STAGE_ALL_COMMANDS_BIT, 0, 0,
- nullptr, 0, nullptr, 1, &barrier);
- const VkClearColorValue color = {};
- vkCmdClearColorImage(m_commandBuffer->handle(), image.handle(), VK_IMAGE_LAYOUT_GENERAL, &color, 1, &subresource_range);
-
- // Submit and verify no validation errors
- m_commandBuffer->end();
- m_commandBuffer->QueueCommandBuffer();
- m_errorMonitor->VerifyNotFound();
-}
-
-TEST_F(VkPositiveLayerTest, CreatePipelineWithCoreChecksDisabled) {
- TEST_DESCRIPTION("Test CreatePipeline while the CoreChecks validation object is disabled");
-
- // Enable KHR validation features extension
- VkValidationFeatureDisableEXT disables[] = {VK_VALIDATION_FEATURE_DISABLE_CORE_CHECKS_EXT};
- VkValidationFeaturesEXT features = {};
- features.sType = VK_STRUCTURE_TYPE_VALIDATION_FEATURES_EXT;
- features.disabledValidationFeatureCount = 1;
- features.pDisabledValidationFeatures = disables;
-
- VkCommandPoolCreateFlags pool_flags = VK_COMMAND_POOL_CREATE_RESET_COMMAND_BUFFER_BIT;
- ASSERT_NO_FATAL_FAILURE(Init(nullptr, nullptr, pool_flags, &features));
- ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
- VkShaderObj vs(m_device, bindStateVertShaderText, VK_SHADER_STAGE_VERTEX_BIT, this);
- VkShaderObj fs(m_device, bindStateFragShaderText, VK_SHADER_STAGE_FRAGMENT_BIT, this);
- VkPipelineInputAssemblyStateCreateInfo iasci{VK_STRUCTURE_TYPE_PIPELINE_INPUT_ASSEMBLY_STATE_CREATE_INFO, nullptr, 0,
- VK_PRIMITIVE_TOPOLOGY_PATCH_LIST, VK_FALSE};
-
- CreatePipelineHelper pipe(*this);
- pipe.InitInfo();
- pipe.gp_ci_.pInputAssemblyState = &iasci;
- pipe.shader_stages_ = {vs.GetStageCreateInfo(), fs.GetStageCreateInfo()};
- pipe.InitState();
- m_errorMonitor->ExpectSuccess();
- pipe.CreateGraphicsPipeline();
- m_errorMonitor->VerifyNotFound();
-}
-
-TEST_F(VkPositiveLayerTest, CreatePipeineWithTessellationDomainOrigin) {
- TEST_DESCRIPTION(
- "Test CreatePipeline when VkPipelineTessellationStateCreateInfo.pNext include "
- "VkPipelineTessellationDomainOriginStateCreateInfo");
-
- ASSERT_NO_FATAL_FAILURE(Init());
- ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
-
- if (!m_device->phy().features().tessellationShader) {
- printf("%s Device does not support tessellation shaders; skipped.\n", kSkipPrefix);
- return;
- }
-
- VkShaderObj vs(m_device, bindStateVertShaderText, VK_SHADER_STAGE_VERTEX_BIT, this);
- VkShaderObj tcs(m_device, bindStateTscShaderText, VK_SHADER_STAGE_TESSELLATION_CONTROL_BIT, this);
- VkShaderObj tes(m_device, bindStateTeshaderText, VK_SHADER_STAGE_TESSELLATION_EVALUATION_BIT, this);
- VkShaderObj fs(m_device, bindStateFragShaderText, VK_SHADER_STAGE_FRAGMENT_BIT, this);
-
- VkPipelineInputAssemblyStateCreateInfo iasci{VK_STRUCTURE_TYPE_PIPELINE_INPUT_ASSEMBLY_STATE_CREATE_INFO, nullptr, 0,
- VK_PRIMITIVE_TOPOLOGY_PATCH_LIST, VK_FALSE};
-
- VkPipelineTessellationDomainOriginStateCreateInfo tessellationDomainOriginStateInfo = {
- VK_STRUCTURE_TYPE_PIPELINE_TESSELLATION_DOMAIN_ORIGIN_STATE_CREATE_INFO, VK_NULL_HANDLE,
- VK_TESSELLATION_DOMAIN_ORIGIN_UPPER_LEFT};
-
- VkPipelineTessellationStateCreateInfo tsci{VK_STRUCTURE_TYPE_PIPELINE_TESSELLATION_STATE_CREATE_INFO,
- &tessellationDomainOriginStateInfo, 0, 3};
-
- CreatePipelineHelper pipe(*this);
- pipe.InitInfo();
- pipe.gp_ci_.pTessellationState = &tsci;
- pipe.gp_ci_.pInputAssemblyState = &iasci;
- pipe.shader_stages_ = {vs.GetStageCreateInfo(), tcs.GetStageCreateInfo(), tes.GetStageCreateInfo(), fs.GetStageCreateInfo()};
- pipe.InitState();
- m_errorMonitor->ExpectSuccess();
- pipe.CreateGraphicsPipeline();
- m_errorMonitor->VerifyNotFound();
-}
-
-TEST_F(VkPositiveLayerTest, MultiplaneImageCopyBufferToImage) {
- TEST_DESCRIPTION("Positive test of multiplane copy buffer to image");
- // Enable KHR multiplane req'd extensions
- bool mp_extensions = InstanceExtensionSupported(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME,
- VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_SPEC_VERSION);
- if (mp_extensions) {
- m_instance_extension_names.push_back(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME);
- }
- SetTargetApiVersion(VK_API_VERSION_1_1);
- ASSERT_NO_FATAL_FAILURE(InitFramework(myDbgFunc, m_errorMonitor));
- mp_extensions = mp_extensions && DeviceExtensionSupported(gpu(), nullptr, VK_KHR_MAINTENANCE1_EXTENSION_NAME);
- mp_extensions = mp_extensions && DeviceExtensionSupported(gpu(), nullptr, VK_KHR_GET_MEMORY_REQUIREMENTS_2_EXTENSION_NAME);
- mp_extensions = mp_extensions && DeviceExtensionSupported(gpu(), nullptr, VK_KHR_BIND_MEMORY_2_EXTENSION_NAME);
- mp_extensions = mp_extensions && DeviceExtensionSupported(gpu(), nullptr, VK_KHR_SAMPLER_YCBCR_CONVERSION_EXTENSION_NAME);
- if (mp_extensions) {
- m_device_extension_names.push_back(VK_KHR_MAINTENANCE1_EXTENSION_NAME);
- m_device_extension_names.push_back(VK_KHR_GET_MEMORY_REQUIREMENTS_2_EXTENSION_NAME);
- m_device_extension_names.push_back(VK_KHR_BIND_MEMORY_2_EXTENSION_NAME);
- m_device_extension_names.push_back(VK_KHR_SAMPLER_YCBCR_CONVERSION_EXTENSION_NAME);
- } else {
- printf("%s test requires KHR multiplane extensions, not available. Skipping.\n", kSkipPrefix);
- return;
- }
- ASSERT_NO_FATAL_FAILURE(InitState(nullptr, nullptr, VK_COMMAND_POOL_CREATE_RESET_COMMAND_BUFFER_BIT));
- ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
-
- VkImageCreateInfo ci = {};
- ci.sType = VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO;
- ci.pNext = NULL;
- ci.flags = 0;
- ci.imageType = VK_IMAGE_TYPE_2D;
- ci.format = VK_FORMAT_G8_B8_R8_3PLANE_444_UNORM_KHR; // All planes of equal extent
- ci.tiling = VK_IMAGE_TILING_OPTIMAL;
- ci.usage = VK_IMAGE_USAGE_TRANSFER_SRC_BIT | VK_IMAGE_USAGE_TRANSFER_DST_BIT | VK_IMAGE_USAGE_SAMPLED_BIT;
- ci.extent = {16, 16, 1};
- ci.mipLevels = 1;
- ci.arrayLayers = 1;
- ci.samples = VK_SAMPLE_COUNT_1_BIT;
- ci.sharingMode = VK_SHARING_MODE_EXCLUSIVE;
- ci.initialLayout = VK_IMAGE_LAYOUT_UNDEFINED;
-
- VkFormatFeatureFlags features = VK_FORMAT_FEATURE_TRANSFER_SRC_BIT | VK_FORMAT_FEATURE_TRANSFER_DST_BIT;
- bool supported = ImageFormatAndFeaturesSupported(instance(), gpu(), ci, features);
- if (!supported) {
- printf("%s Multiplane image format not supported. Skipping test.\n", kSkipPrefix);
- return; // Assume there's low ROI on searching for different mp formats
- }
-
- VkImageObj image(m_device);
- image.init(&ci);
-
- m_commandBuffer->reset();
- m_errorMonitor->ExpectSuccess();
- m_commandBuffer->begin();
- image.ImageMemoryBarrier(m_commandBuffer, VK_IMAGE_ASPECT_COLOR_BIT, 0, VK_ACCESS_TRANSFER_WRITE_BIT,
- VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL);
-
- std::array<VkImageAspectFlagBits, 3> aspects = {VK_IMAGE_ASPECT_PLANE_0_BIT, VK_IMAGE_ASPECT_PLANE_1_BIT,
- VK_IMAGE_ASPECT_PLANE_2_BIT};
- std::array<VkBufferObj, 3> buffers;
- VkMemoryPropertyFlags reqs = 0;
-
- VkBufferImageCopy copy = {};
- copy.imageSubresource.layerCount = 1;
- copy.imageExtent.depth = 1;
- copy.imageExtent.height = 16;
- copy.imageExtent.width = 16;
-
- for (size_t i = 0; i < aspects.size(); ++i) {
- buffers[i].init_as_src(*m_device, (VkDeviceSize)16 * 16 * 1, reqs);
- copy.imageSubresource.aspectMask = aspects[i];
- vkCmdCopyBufferToImage(m_commandBuffer->handle(), buffers[i].handle(), image.handle(), VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL,
- 1, &copy);
- }
- m_commandBuffer->end();
- m_errorMonitor->VerifyNotFound();
-}
-
-TEST_F(VkPositiveLayerTest, MultiplaneImageTests) {
- TEST_DESCRIPTION("Positive test of multiplane image operations");
-
- // Enable KHR multiplane req'd extensions
- bool mp_extensions = InstanceExtensionSupported(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME,
- VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_SPEC_VERSION);
- if (mp_extensions) {
- m_instance_extension_names.push_back(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME);
- }
- SetTargetApiVersion(VK_API_VERSION_1_1);
- ASSERT_NO_FATAL_FAILURE(InitFramework(myDbgFunc, m_errorMonitor));
- mp_extensions = mp_extensions && DeviceExtensionSupported(gpu(), nullptr, VK_KHR_MAINTENANCE1_EXTENSION_NAME);
- mp_extensions = mp_extensions && DeviceExtensionSupported(gpu(), nullptr, VK_KHR_GET_MEMORY_REQUIREMENTS_2_EXTENSION_NAME);
- mp_extensions = mp_extensions && DeviceExtensionSupported(gpu(), nullptr, VK_KHR_BIND_MEMORY_2_EXTENSION_NAME);
- if (mp_extensions) {
- m_device_extension_names.push_back(VK_KHR_MAINTENANCE1_EXTENSION_NAME);
- m_device_extension_names.push_back(VK_KHR_GET_MEMORY_REQUIREMENTS_2_EXTENSION_NAME);
- m_device_extension_names.push_back(VK_KHR_BIND_MEMORY_2_EXTENSION_NAME);
- } else {
- printf("%s test requires KHR multiplane extensions, not available. Skipping.\n", kSkipPrefix);
- return;
- }
- ASSERT_NO_FATAL_FAILURE(InitState(nullptr, nullptr, VK_COMMAND_POOL_CREATE_RESET_COMMAND_BUFFER_BIT));
- ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
-
- // Create aliased function pointers for 1.0 and 1.1 contexts
-
- PFN_vkBindImageMemory2KHR vkBindImageMemory2Function = nullptr;
- PFN_vkGetImageMemoryRequirements2KHR vkGetImageMemoryRequirements2Function = nullptr;
- PFN_vkGetPhysicalDeviceMemoryProperties2KHR vkGetPhysicalDeviceMemoryProperties2Function = nullptr;
-
- if (DeviceValidationVersion() >= VK_API_VERSION_1_1) {
- vkBindImageMemory2Function = vkBindImageMemory2;
- vkGetImageMemoryRequirements2Function = vkGetImageMemoryRequirements2;
- vkGetPhysicalDeviceMemoryProperties2Function = vkGetPhysicalDeviceMemoryProperties2;
- } else {
- vkBindImageMemory2Function = (PFN_vkBindImageMemory2KHR)vkGetDeviceProcAddr(m_device->handle(), "vkBindImageMemory2KHR");
- vkGetImageMemoryRequirements2Function =
- (PFN_vkGetImageMemoryRequirements2KHR)vkGetDeviceProcAddr(m_device->handle(), "vkGetImageMemoryRequirements2KHR");
- vkGetPhysicalDeviceMemoryProperties2Function = (PFN_vkGetPhysicalDeviceMemoryProperties2KHR)vkGetDeviceProcAddr(
- m_device->handle(), "vkGetPhysicalDeviceMemoryProperties2KHR");
- }
-
- if (!vkBindImageMemory2Function || !vkGetImageMemoryRequirements2Function || !vkGetPhysicalDeviceMemoryProperties2Function) {
- printf("%s Did not find required device extension support; test skipped.\n", kSkipPrefix);
- return;
- }
-
- VkImageCreateInfo ci = {};
- ci.sType = VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO;
- ci.pNext = NULL;
- ci.flags = 0;
- ci.imageType = VK_IMAGE_TYPE_2D;
- ci.format = VK_FORMAT_G8_B8_R8_3PLANE_444_UNORM_KHR; // All planes of equal extent
- ci.tiling = VK_IMAGE_TILING_OPTIMAL;
- ci.usage = VK_IMAGE_USAGE_TRANSFER_SRC_BIT | VK_IMAGE_USAGE_TRANSFER_DST_BIT | VK_IMAGE_USAGE_SAMPLED_BIT;
- ci.extent = {128, 128, 1};
- ci.mipLevels = 1;
- ci.arrayLayers = 1;
- ci.samples = VK_SAMPLE_COUNT_1_BIT;
- ci.sharingMode = VK_SHARING_MODE_EXCLUSIVE;
- ci.initialLayout = VK_IMAGE_LAYOUT_UNDEFINED;
-
- // Verify format
- VkFormatFeatureFlags features = VK_FORMAT_FEATURE_TRANSFER_SRC_BIT | VK_FORMAT_FEATURE_TRANSFER_DST_BIT;
- bool supported = ImageFormatAndFeaturesSupported(instance(), gpu(), ci, features);
- if (!supported) {
- printf("%s Multiplane image format not supported. Skipping test.\n", kSkipPrefix);
- return; // Assume there's low ROI on searching for different mp formats
- }
-
- VkImage image;
- ASSERT_VK_SUCCESS(vkCreateImage(device(), &ci, NULL, &image));
-
- // Allocate & bind memory
- VkPhysicalDeviceMemoryProperties phys_mem_props;
- vkGetPhysicalDeviceMemoryProperties(gpu(), &phys_mem_props);
- VkMemoryRequirements mem_reqs;
- vkGetImageMemoryRequirements(device(), image, &mem_reqs);
- VkDeviceMemory mem_obj = VK_NULL_HANDLE;
- VkMemoryPropertyFlagBits mem_props = VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT;
- for (uint32_t type = 0; type < phys_mem_props.memoryTypeCount; type++) {
- if ((mem_reqs.memoryTypeBits & (1 << type)) &&
- ((phys_mem_props.memoryTypes[type].propertyFlags & mem_props) == mem_props)) {
- VkMemoryAllocateInfo alloc_info = {};
- alloc_info.sType = VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO;
- alloc_info.allocationSize = mem_reqs.size;
- alloc_info.memoryTypeIndex = type;
- ASSERT_VK_SUCCESS(vkAllocateMemory(device(), &alloc_info, NULL, &mem_obj));
- break;
- }
- }
-
- if (VK_NULL_HANDLE == mem_obj) {
- printf("%s Unable to allocate image memory. Skipping test.\n", kSkipPrefix);
- vkDestroyImage(device(), image, NULL);
- return;
- }
- ASSERT_VK_SUCCESS(vkBindImageMemory(device(), image, mem_obj, 0));
-
- // Copy plane 0 to plane 2
- VkImageCopy copyRegion = {};
- copyRegion.srcSubresource.aspectMask = VK_IMAGE_ASPECT_PLANE_0_BIT_KHR;
- copyRegion.srcSubresource.mipLevel = 0;
- copyRegion.srcSubresource.baseArrayLayer = 0;
- copyRegion.srcSubresource.layerCount = 1;
- copyRegion.srcOffset = {0, 0, 0};
- copyRegion.dstSubresource.aspectMask = VK_IMAGE_ASPECT_PLANE_2_BIT_KHR;
- copyRegion.dstSubresource.mipLevel = 0;
- copyRegion.dstSubresource.baseArrayLayer = 0;
- copyRegion.dstSubresource.layerCount = 1;
- copyRegion.dstOffset = {0, 0, 0};
- copyRegion.extent.width = 128;
- copyRegion.extent.height = 128;
- copyRegion.extent.depth = 1;
-
- m_errorMonitor->ExpectSuccess();
- m_commandBuffer->begin();
- m_commandBuffer->CopyImage(image, VK_IMAGE_LAYOUT_GENERAL, image, VK_IMAGE_LAYOUT_GENERAL, 1, &copyRegion);
- m_commandBuffer->end();
- m_errorMonitor->VerifyNotFound();
-
- vkFreeMemory(device(), mem_obj, NULL);
- vkDestroyImage(device(), image, NULL);
-
- // Repeat bind test on a DISJOINT multi-planar image, with per-plane memory objects, using API2 variants
- //
- features |= VK_FORMAT_FEATURE_DISJOINT_BIT;
- ci.flags = VK_IMAGE_CREATE_DISJOINT_BIT;
- if (ImageFormatAndFeaturesSupported(instance(), gpu(), ci, features)) {
- ASSERT_VK_SUCCESS(vkCreateImage(device(), &ci, NULL, &image));
-
- // Allocate & bind memory
- VkPhysicalDeviceMemoryProperties2 phys_mem_props2 = {VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MEMORY_PROPERTIES_2};
- vkGetPhysicalDeviceMemoryProperties2Function(gpu(), &phys_mem_props2);
- VkImagePlaneMemoryRequirementsInfo image_plane_req = {VK_STRUCTURE_TYPE_IMAGE_PLANE_MEMORY_REQUIREMENTS_INFO};
- VkImageMemoryRequirementsInfo2 mem_req_info2 = {VK_STRUCTURE_TYPE_IMAGE_MEMORY_REQUIREMENTS_INFO_2};
- mem_req_info2.pNext = &image_plane_req;
- mem_req_info2.image = image;
- VkMemoryRequirements2 mem_reqs2 = {VK_STRUCTURE_TYPE_MEMORY_REQUIREMENTS_2};
-
- VkDeviceMemory p0_mem, p1_mem, p2_mem;
- mem_props = VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT;
- VkMemoryAllocateInfo alloc_info = {VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO};
-
- // Plane 0
- image_plane_req.planeAspect = VK_IMAGE_ASPECT_PLANE_0_BIT;
- vkGetImageMemoryRequirements2Function(device(), &mem_req_info2, &mem_reqs2);
- uint32_t mem_type = 0;
- for (mem_type = 0; mem_type < phys_mem_props2.memoryProperties.memoryTypeCount; mem_type++) {
- if ((mem_reqs2.memoryRequirements.memoryTypeBits & (1 << mem_type)) &&
- ((phys_mem_props2.memoryProperties.memoryTypes[mem_type].propertyFlags & mem_props) == mem_props)) {
- alloc_info.memoryTypeIndex = mem_type;
- break;
- }
- }
- alloc_info.allocationSize = mem_reqs2.memoryRequirements.size;
- ASSERT_VK_SUCCESS(vkAllocateMemory(device(), &alloc_info, NULL, &p0_mem));
-
- // Plane 1 & 2 use same memory type
- image_plane_req.planeAspect = VK_IMAGE_ASPECT_PLANE_1_BIT;
- vkGetImageMemoryRequirements2Function(device(), &mem_req_info2, &mem_reqs2);
- alloc_info.allocationSize = mem_reqs2.memoryRequirements.size;
- ASSERT_VK_SUCCESS(vkAllocateMemory(device(), &alloc_info, NULL, &p1_mem));
-
- image_plane_req.planeAspect = VK_IMAGE_ASPECT_PLANE_2_BIT;
- vkGetImageMemoryRequirements2Function(device(), &mem_req_info2, &mem_reqs2);
- alloc_info.allocationSize = mem_reqs2.memoryRequirements.size;
- ASSERT_VK_SUCCESS(vkAllocateMemory(device(), &alloc_info, NULL, &p2_mem));
-
- // Set up 3-plane binding
- VkBindImageMemoryInfo bind_info[3];
- for (int plane = 0; plane < 3; plane++) {
- bind_info[plane].sType = VK_STRUCTURE_TYPE_BIND_IMAGE_MEMORY_INFO;
- bind_info[plane].pNext = nullptr;
- bind_info[plane].image = image;
- bind_info[plane].memoryOffset = 0;
- }
- bind_info[0].memory = p0_mem;
- bind_info[1].memory = p1_mem;
- bind_info[2].memory = p2_mem;
-
- m_errorMonitor->ExpectSuccess();
- vkBindImageMemory2Function(device(), 3, bind_info);
- m_errorMonitor->VerifyNotFound();
-
- vkFreeMemory(device(), p0_mem, NULL);
- vkFreeMemory(device(), p1_mem, NULL);
- vkFreeMemory(device(), p2_mem, NULL);
- vkDestroyImage(device(), image, NULL);
- }
-
- // Test that changing the layout of ASPECT_COLOR also changes the layout of the individual planes
- VkBufferObj buffer;
- VkMemoryPropertyFlags reqs = 0;
- buffer.init_as_src(*m_device, (VkDeviceSize)128 * 128 * 3, reqs);
- VkImageObj mpimage(m_device);
- mpimage.Init(256, 256, 1, VK_FORMAT_G8_B8_R8_3PLANE_422_UNORM_KHR, VK_IMAGE_USAGE_TRANSFER_DST_BIT | VK_IMAGE_USAGE_SAMPLED_BIT,
- VK_IMAGE_TILING_OPTIMAL, 0);
- VkBufferImageCopy copy_region = {};
- copy_region.bufferRowLength = 128;
- copy_region.bufferImageHeight = 128;
- copy_region.imageSubresource.aspectMask = VK_IMAGE_ASPECT_PLANE_1_BIT_KHR;
- copy_region.imageSubresource.layerCount = 1;
- copy_region.imageExtent.height = 64;
- copy_region.imageExtent.width = 64;
- copy_region.imageExtent.depth = 1;
-
- vkResetCommandBuffer(m_commandBuffer->handle(), 0);
- m_commandBuffer->begin();
- mpimage.ImageMemoryBarrier(m_commandBuffer, VK_IMAGE_ASPECT_COLOR_BIT, 0, 0, VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL);
- vkCmdCopyBufferToImage(m_commandBuffer->handle(), buffer.handle(), mpimage.handle(), VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, 1,
- &copy_region);
- m_commandBuffer->end();
- m_commandBuffer->QueueCommandBuffer(false);
- m_errorMonitor->VerifyNotFound();
-
- // Test to verify that views of multiplanar images have layouts tracked correctly
- // by changing the image's layout then using a view of that image
- VkImageView view;
- VkImageViewCreateInfo ivci = {};
- ivci.sType = VK_STRUCTURE_TYPE_IMAGE_VIEW_CREATE_INFO;
- ivci.image = mpimage.handle();
- ivci.viewType = VK_IMAGE_VIEW_TYPE_2D;
- ivci.format = VK_FORMAT_G8_B8_R8_3PLANE_422_UNORM_KHR;
- ivci.subresourceRange.layerCount = 1;
- ivci.subresourceRange.baseMipLevel = 0;
- ivci.subresourceRange.levelCount = 1;
- ivci.subresourceRange.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
- vkCreateImageView(m_device->device(), &ivci, nullptr, &view);
-
- OneOffDescriptorSet descriptor_set(m_device,
- {
- {0, VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER, 1, VK_SHADER_STAGE_FRAGMENT_BIT, nullptr},
- });
-
- VkSamplerCreateInfo sampler_ci = SafeSaneSamplerCreateInfo();
- VkSampler sampler;
-
- VkResult err;
- err = vkCreateSampler(m_device->device(), &sampler_ci, NULL, &sampler);
- ASSERT_VK_SUCCESS(err);
-
- const VkPipelineLayoutObj pipeline_layout(m_device, {&descriptor_set.layout_});
- descriptor_set.WriteDescriptorImageInfo(0, view, sampler);
- descriptor_set.UpdateDescriptorSets();
-
- VkShaderObj vs(m_device, bindStateVertShaderText, VK_SHADER_STAGE_VERTEX_BIT, this);
- VkShaderObj fs(m_device, bindStateFragSamplerShaderText, VK_SHADER_STAGE_FRAGMENT_BIT, this);
- VkPipelineObj pipe(m_device);
- pipe.AddShader(&vs);
- pipe.AddShader(&fs);
- pipe.AddDefaultColorAttachment();
- pipe.CreateVKPipeline(pipeline_layout.handle(), renderPass());
-
- m_errorMonitor->ExpectSuccess();
- m_commandBuffer->begin();
- VkImageMemoryBarrier img_barrier = {};
- img_barrier.sType = VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER;
- img_barrier.srcAccessMask = VK_ACCESS_SHADER_WRITE_BIT;
- img_barrier.dstAccessMask = VK_ACCESS_SHADER_WRITE_BIT;
- img_barrier.oldLayout = VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL;
- img_barrier.newLayout = VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL;
- img_barrier.image = mpimage.handle();
- img_barrier.srcQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED;
- img_barrier.dstQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED;
- img_barrier.subresourceRange.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
- img_barrier.subresourceRange.baseArrayLayer = 0;
- img_barrier.subresourceRange.baseMipLevel = 0;
- img_barrier.subresourceRange.layerCount = 1;
- img_barrier.subresourceRange.levelCount = 1;
- vkCmdPipelineBarrier(m_commandBuffer->handle(), VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT, VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT,
- VK_DEPENDENCY_BY_REGION_BIT, 0, nullptr, 0, nullptr, 1, &img_barrier);
- m_commandBuffer->BeginRenderPass(m_renderPassBeginInfo);
- vkCmdBindPipeline(m_commandBuffer->handle(), VK_PIPELINE_BIND_POINT_GRAPHICS, pipe.handle());
- vkCmdBindDescriptorSets(m_commandBuffer->handle(), VK_PIPELINE_BIND_POINT_GRAPHICS, pipeline_layout.handle(), 0, 1,
- &descriptor_set.set_, 0, nullptr);
-
- VkViewport viewport = {0, 0, 16, 16, 0, 1};
- VkRect2D scissor = {{0, 0}, {16, 16}};
- vkCmdSetViewport(m_commandBuffer->handle(), 0, 1, &viewport);
- vkCmdSetScissor(m_commandBuffer->handle(), 0, 1, &scissor);
-
- m_commandBuffer->Draw(1, 0, 0, 0);
- m_commandBuffer->EndRenderPass();
- m_commandBuffer->end();
- VkSubmitInfo submit_info = {};
- submit_info.sType = VK_STRUCTURE_TYPE_SUBMIT_INFO;
- submit_info.commandBufferCount = 1;
- submit_info.pCommandBuffers = &m_commandBuffer->handle();
- vkQueueSubmit(m_device->m_queue, 1, &submit_info, VK_NULL_HANDLE);
- m_errorMonitor->VerifyNotFound();
-
- vkQueueWaitIdle(m_device->m_queue);
- vkDestroyImageView(m_device->device(), view, NULL);
- vkDestroySampler(m_device->device(), sampler, nullptr);
-}
-
-TEST_F(VkPositiveLayerTest, ApiVersionZero) {
- TEST_DESCRIPTION("Check that apiVersion = 0 is valid.");
- m_errorMonitor->ExpectSuccess();
- app_info.apiVersion = 0U;
- ASSERT_NO_FATAL_FAILURE(InitFramework(myDbgFunc, m_errorMonitor));
- m_errorMonitor->VerifyNotFound();
-}
-
-TEST_F(VkPositiveLayerTest, RayTracingPipelineNV) {
- TEST_DESCRIPTION("Test VK_NV_ray_tracing.");
-
- if (!CreateNVRayTracingPipelineHelper::InitInstanceExtensions(*this, m_instance_extension_names)) {
- return;
- }
- ASSERT_NO_FATAL_FAILURE(InitFramework(myDbgFunc, m_errorMonitor));
-
- PFN_vkGetPhysicalDeviceFeatures2KHR vkGetPhysicalDeviceFeatures2KHR =
- (PFN_vkGetPhysicalDeviceFeatures2KHR)vkGetInstanceProcAddr(instance(), "vkGetPhysicalDeviceFeatures2KHR");
- ASSERT_TRUE(vkGetPhysicalDeviceFeatures2KHR != nullptr);
-
- if (!CreateNVRayTracingPipelineHelper::InitDeviceExtensions(*this, m_device_extension_names)) {
- return;
- }
- ASSERT_NO_FATAL_FAILURE(InitState());
-
- auto ignore_update = [](CreateNVRayTracingPipelineHelper &helper) {};
- CreateNVRayTracingPipelineHelper::OneshotPositiveTest(*this, ignore_update);
-}
-
-TEST_F(VkPositiveLayerTest, ViewportArray2NV) {
- TEST_DESCRIPTION("Test to validate VK_NV_viewport_array2");
-
- ASSERT_NO_FATAL_FAILURE(InitFramework(myDbgFunc, m_errorMonitor));
-
- VkPhysicalDeviceFeatures available_features = {};
- ASSERT_NO_FATAL_FAILURE(GetPhysicalDeviceFeatures(&available_features));
-
- if (!available_features.multiViewport) {
- printf("%s VkPhysicalDeviceFeatures::multiViewport is not supported, skipping tests\n", kSkipPrefix);
- return;
- }
- if (!available_features.tessellationShader) {
- printf("%s VkPhysicalDeviceFeatures::tessellationShader is not supported, skipping tests\n", kSkipPrefix);
- return;
- }
- if (!available_features.geometryShader) {
- printf("%s VkPhysicalDeviceFeatures::geometryShader is not supported, skipping tests\n", kSkipPrefix);
- return;
- }
-
- if (DeviceExtensionSupported(gpu(), nullptr, VK_NV_VIEWPORT_ARRAY2_EXTENSION_NAME)) {
- m_device_extension_names.push_back(VK_NV_VIEWPORT_ARRAY2_EXTENSION_NAME);
- } else {
- printf("%s %s Extension not supported, skipping tests\n", kSkipPrefix, VK_NV_VIEWPORT_ARRAY2_EXTENSION_NAME);
- return;
- }
-
- ASSERT_NO_FATAL_FAILURE(InitState());
- ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
-
- const char tcs_src[] = R"(
- #version 450
- layout(vertices = 3) out;
-
- void main() {
- gl_TessLevelOuter[0] = 4.0f;
- gl_TessLevelOuter[1] = 4.0f;
- gl_TessLevelOuter[2] = 4.0f;
- gl_TessLevelInner[0] = 3.0f;
-
- gl_out[gl_InvocationID].gl_Position = gl_in[gl_InvocationID].gl_Position;
- })";
-
- // Create tessellation control and fragment shader here since they will not be
- // modified by the different test cases.
- VkShaderObj tcs(m_device, tcs_src, VK_SHADER_STAGE_TESSELLATION_CONTROL_BIT, this);
- VkShaderObj fs(m_device, bindStateFragShaderText, VK_SHADER_STAGE_FRAGMENT_BIT, this);
-
- std::vector<VkViewport> vps = {{0.0f, 0.0f, m_width / 2.0f, m_height}, {m_width / 2.0f, 0.0f, m_width / 2.0f, m_height}};
- std::vector<VkRect2D> scs = {
- {{0, 0}, {static_cast<uint32_t>(m_width) / 2, static_cast<uint32_t>(m_height)}},
- {{static_cast<int32_t>(m_width) / 2, 0}, {static_cast<uint32_t>(m_width) / 2, static_cast<uint32_t>(m_height)}}};
-
- enum class TestStage { VERTEX = 0, TESSELLATION_EVAL = 1, GEOMETRY = 2 };
- std::array<TestStage, 3> vertex_stages = {{TestStage::VERTEX, TestStage::TESSELLATION_EVAL, TestStage::GEOMETRY}};
-
- // Verify that the usage of gl_ViewportMask[] in the allowed vertex processing
- // stages does not cause any errors.
- for (auto stage : vertex_stages) {
- m_errorMonitor->ExpectSuccess();
-
- VkPipelineInputAssemblyStateCreateInfo iaci = {VK_STRUCTURE_TYPE_PIPELINE_INPUT_ASSEMBLY_STATE_CREATE_INFO};
- iaci.topology = (stage != TestStage::VERTEX) ? VK_PRIMITIVE_TOPOLOGY_PATCH_LIST : VK_PRIMITIVE_TOPOLOGY_TRIANGLE_LIST;
-
- VkPipelineTessellationStateCreateInfo tsci = {VK_STRUCTURE_TYPE_PIPELINE_TESSELLATION_STATE_CREATE_INFO};
- tsci.patchControlPoints = 3;
-
- const VkPipelineLayoutObj pl(m_device);
-
- VkPipelineObj pipe(m_device);
- pipe.AddDefaultColorAttachment();
- pipe.SetInputAssembly(&iaci);
- pipe.SetViewport(vps);
- pipe.SetScissor(scs);
- pipe.AddShader(&fs);
-
- std::stringstream vs_src, tes_src, geom_src;
-
- vs_src << R"(
- #version 450
- #extension GL_NV_viewport_array2 : require
-
- vec2 positions[3] = { vec2( 0.0f, -0.5f),
- vec2( 0.5f, 0.5f),
- vec2(-0.5f, 0.5f)
- };
- void main() {)";
- // Write viewportMask if the vertex shader is the last vertex processing stage.
- if (stage == TestStage::VERTEX) {
- vs_src << "gl_ViewportMask[0] = 3;\n";
- }
- vs_src << R"(
- gl_Position = vec4(positions[gl_VertexIndex % 3], 0.0, 1.0);
- })";
-
- VkShaderObj vs(m_device, vs_src.str().c_str(), VK_SHADER_STAGE_VERTEX_BIT, this);
- pipe.AddShader(&vs);
-
- std::unique_ptr<VkShaderObj> tes, geom;
-
- if (stage >= TestStage::TESSELLATION_EVAL) {
- tes_src << R"(
- #version 450
- #extension GL_NV_viewport_array2 : require
- layout(triangles) in;
-
- void main() {
- gl_Position = (gl_in[0].gl_Position * gl_TessCoord.x +
- gl_in[1].gl_Position * gl_TessCoord.y +
- gl_in[2].gl_Position * gl_TessCoord.z);)";
- // Write viewportMask if the tess eval shader is the last vertex processing stage.
- if (stage == TestStage::TESSELLATION_EVAL) {
- tes_src << "gl_ViewportMask[0] = 3;\n";
- }
- tes_src << "}";
-
- tes = std::unique_ptr<VkShaderObj>(
- new VkShaderObj(m_device, tes_src.str().c_str(), VK_SHADER_STAGE_TESSELLATION_EVALUATION_BIT, this));
- pipe.AddShader(tes.get());
- pipe.AddShader(&tcs);
- pipe.SetTessellation(&tsci);
- }
-
- if (stage >= TestStage::GEOMETRY) {
- geom_src << R"(
- #version 450
- #extension GL_NV_viewport_array2 : require
- layout(triangles) in;
- layout(triangle_strip, max_vertices = 3) out;
-
- void main() {
- gl_ViewportMask[0] = 3;
- for(int i = 0; i < 3; ++i) {
- gl_Position = gl_in[i].gl_Position;
- EmitVertex();
- }
- })";
-
- geom =
- std::unique_ptr<VkShaderObj>(new VkShaderObj(m_device, geom_src.str().c_str(), VK_SHADER_STAGE_GEOMETRY_BIT, this));
- pipe.AddShader(geom.get());
- }
-
- pipe.CreateVKPipeline(pl.handle(), renderPass());
- m_errorMonitor->VerifyNotFound();
- }
-}
-
-TEST_F(VkPositiveLayerTest, HostQueryResetSuccess) {
- // This is a positive test. No failures are expected.
- TEST_DESCRIPTION("Use vkResetQueryPoolEXT normally");
-
- if (!InstanceExtensionSupported(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME)) {
- printf("%s Did not find required instance extension %s; skipped.\n", kSkipPrefix,
- VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME);
- return;
- }
-
- m_instance_extension_names.push_back(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME);
- ASSERT_NO_FATAL_FAILURE(InitFramework(myDbgFunc, m_errorMonitor));
-
- if (!DeviceExtensionSupported(gpu(), nullptr, VK_EXT_HOST_QUERY_RESET_EXTENSION_NAME)) {
- printf("%s Extension %s not supported by device; skipped.\n", kSkipPrefix, VK_EXT_HOST_QUERY_RESET_EXTENSION_NAME);
- return;
- }
-
- m_device_extension_names.push_back(VK_EXT_HOST_QUERY_RESET_EXTENSION_NAME);
-
- VkPhysicalDeviceHostQueryResetFeaturesEXT host_query_reset_features{};
- host_query_reset_features.sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_HOST_QUERY_RESET_FEATURES_EXT;
- host_query_reset_features.hostQueryReset = VK_TRUE;
-
- VkPhysicalDeviceFeatures2 pd_features2{};
- pd_features2.sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FEATURES_2;
- pd_features2.pNext = &host_query_reset_features;
-
- ASSERT_NO_FATAL_FAILURE(InitState(nullptr, &pd_features2));
-
- auto fpvkResetQueryPoolEXT = (PFN_vkResetQueryPoolEXT)vkGetDeviceProcAddr(m_device->device(), "vkResetQueryPoolEXT");
-
- m_errorMonitor->ExpectSuccess();
-
- VkQueryPool query_pool;
- VkQueryPoolCreateInfo query_pool_create_info{};
- query_pool_create_info.sType = VK_STRUCTURE_TYPE_QUERY_POOL_CREATE_INFO;
- query_pool_create_info.queryType = VK_QUERY_TYPE_TIMESTAMP;
- query_pool_create_info.queryCount = 1;
- vkCreateQueryPool(m_device->device(), &query_pool_create_info, nullptr, &query_pool);
- fpvkResetQueryPoolEXT(m_device->device(), query_pool, 0, 1);
- vkDestroyQueryPool(m_device->device(), query_pool, nullptr);
-
- m_errorMonitor->VerifyNotFound();
-}
-
-TEST_F(VkPositiveLayerTest, CreatePipelineFragmentOutputNotConsumedButAlphaToCoverageEnabled) {
- TEST_DESCRIPTION(
- "Test that no warning is produced when writing to non-existing color attachment if alpha to coverage is enabled.");
-
- ASSERT_NO_FATAL_FAILURE(Init());
- ASSERT_NO_FATAL_FAILURE(InitRenderTarget(0u));
-
- VkPipelineMultisampleStateCreateInfo ms_state_ci = {};
- ms_state_ci.sType = VK_STRUCTURE_TYPE_PIPELINE_MULTISAMPLE_STATE_CREATE_INFO;
- ms_state_ci.rasterizationSamples = VK_SAMPLE_COUNT_1_BIT;
- ms_state_ci.alphaToCoverageEnable = VK_TRUE;
-
- const auto set_info = [&](CreatePipelineHelper &helper) {
- helper.pipe_ms_state_ci_ = ms_state_ci;
- helper.cb_ci_.attachmentCount = 0;
- };
- CreatePipelineHelper::OneshotTest(*this, set_info, VK_DEBUG_REPORT_ERROR_BIT_EXT | VK_DEBUG_REPORT_WARNING_BIT_EXT, "", true);
-}
-
-TEST_F(VkPositiveLayerTest, UseFirstQueueUnqueried) {
- TEST_DESCRIPTION("Use first queue family and one queue without first querying with vkGetPhysicalDeviceQueueFamilyProperties");
-
- ASSERT_NO_FATAL_FAILURE(InitFramework(myDbgFunc, m_errorMonitor));
-
- const float q_priority[] = {1.0f};
- VkDeviceQueueCreateInfo queue_ci = {};
- queue_ci.sType = VK_STRUCTURE_TYPE_DEVICE_QUEUE_CREATE_INFO;
- queue_ci.queueFamilyIndex = 0;
- queue_ci.queueCount = 1;
- queue_ci.pQueuePriorities = q_priority;
-
- VkDeviceCreateInfo device_ci = {};
- device_ci.sType = VK_STRUCTURE_TYPE_DEVICE_CREATE_INFO;
- device_ci.queueCreateInfoCount = 1;
- device_ci.pQueueCreateInfos = &queue_ci;
-
- m_errorMonitor->ExpectSuccess();
- VkDevice test_device;
- vkCreateDevice(gpu(), &device_ci, nullptr, &test_device);
- m_errorMonitor->VerifyNotFound();
-
- vkDestroyDevice(test_device, nullptr);
-}
-
-// Android loader returns an error in this case
-#if !defined(ANDROID)
-TEST_F(VkPositiveLayerTest, GetDevProcAddrNullPtr) {
- TEST_DESCRIPTION("Call GetDeviceProcAddr on an enabled instance extension expecting nullptr");
- ASSERT_NO_FATAL_FAILURE(InitFramework(myDbgFunc, m_errorMonitor));
-
- if (InstanceExtensionSupported(VK_KHR_SURFACE_EXTENSION_NAME)) {
- m_instance_extension_names.push_back(VK_KHR_SURFACE_EXTENSION_NAME);
- } else {
- printf("%s %s not supported, skipping test\n", kSkipPrefix, VK_KHR_SURFACE_EXTENSION_NAME);
- return;
- }
- ASSERT_NO_FATAL_FAILURE(InitState());
-
- m_errorMonitor->ExpectSuccess();
- auto fpDestroySurface = (PFN_vkCreateValidationCacheEXT)vkGetDeviceProcAddr(m_device->device(), "vkDestroySurfaceKHR");
- if (fpDestroySurface) {
- m_errorMonitor->SetError("Null was expected!");
- }
- m_errorMonitor->VerifyNotFound();
-}
-#endif
-
-TEST_F(VkPositiveLayerTest, CmdCopySwapchainImage) {
- TEST_DESCRIPTION("Run vkCmdCopyImage with a swapchain image");
-
-#if defined(VK_USE_PLATFORM_ANDROID_KHR)
- printf(
- "%s According to VUID-01631, VkBindImageMemoryInfo-memory should be NULL. But Android will crash if memory is NULL, "
- "skipping CmdCopySwapchainImage test\n",
- kSkipPrefix);
- return;
-#endif
-
- SetTargetApiVersion(VK_API_VERSION_1_1);
-
- if (!AddSurfaceInstanceExtension()) {
- printf("%s surface extensions not supported, skipping CmdCopySwapchainImage test\n", kSkipPrefix);
- return;
- }
-
- ASSERT_NO_FATAL_FAILURE(InitFramework(myDbgFunc, m_errorMonitor));
-
- if (!AddSwapchainDeviceExtension()) {
- printf("%s swapchain extensions not supported, skipping CmdCopySwapchainImage test\n", kSkipPrefix);
- return;
- }
-
- if (DeviceValidationVersion() < VK_API_VERSION_1_1) {
- printf("%s VkBindImageMemoryInfo requires Vulkan 1.1+, skipping test\n", kSkipPrefix);
- return;
- }
-
- ASSERT_NO_FATAL_FAILURE(InitState());
- ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
- if (!InitSwapchain()) {
- printf("%s Cannot create surface or swapchain, skipping CmdCopySwapchainImage test\n", kSkipPrefix);
- return;
- }
-
- auto image_create_info = lvl_init_struct<VkImageCreateInfo>();
- image_create_info.imageType = VK_IMAGE_TYPE_2D;
- image_create_info.format = VK_FORMAT_R8G8B8A8_UNORM;
- image_create_info.extent.width = 64;
- image_create_info.extent.height = 64;
- image_create_info.extent.depth = 1;
- image_create_info.mipLevels = 1;
- image_create_info.arrayLayers = 1;
- image_create_info.samples = VK_SAMPLE_COUNT_1_BIT;
- image_create_info.tiling = VK_IMAGE_TILING_OPTIMAL;
- image_create_info.initialLayout = VK_IMAGE_LAYOUT_PREINITIALIZED;
- image_create_info.usage = VK_IMAGE_USAGE_TRANSFER_SRC_BIT;
- image_create_info.sharingMode = VK_SHARING_MODE_EXCLUSIVE;
-
- VkImageObj srcImage(m_device);
- srcImage.init(&image_create_info);
-
- image_create_info.usage = VK_IMAGE_USAGE_TRANSFER_DST_BIT;
-
- auto image_swapchain_create_info = lvl_init_struct<VkImageSwapchainCreateInfoKHR>();
- image_swapchain_create_info.swapchain = m_swapchain;
- image_create_info.pNext = &image_swapchain_create_info;
-
- VkImage image_from_swapchain;
- vkCreateImage(device(), &image_create_info, NULL, &image_from_swapchain);
-
- auto bind_swapchain_info = lvl_init_struct<VkBindImageMemorySwapchainInfoKHR>();
- bind_swapchain_info.swapchain = m_swapchain;
- bind_swapchain_info.imageIndex = 0;
-
- auto bind_info = lvl_init_struct<VkBindImageMemoryInfo>(&bind_swapchain_info);
- bind_info.image = image_from_swapchain;
- bind_info.memory = VK_NULL_HANDLE;
- bind_info.memoryOffset = 0;
-
- vkBindImageMemory2(m_device->device(), 1, &bind_info);
-
- VkImageCopy copy_region = {};
- copy_region.srcSubresource.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
- copy_region.dstSubresource.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
- copy_region.srcSubresource.mipLevel = 0;
- copy_region.dstSubresource.mipLevel = 0;
- copy_region.srcSubresource.baseArrayLayer = 0;
- copy_region.dstSubresource.baseArrayLayer = 0;
- copy_region.srcSubresource.layerCount = 1;
- copy_region.dstSubresource.layerCount = 1;
- copy_region.srcOffset = {0, 0, 0};
- copy_region.dstOffset = {0, 0, 0};
- copy_region.extent = {10, 10, 1};
-
- m_commandBuffer->begin();
-
- m_errorMonitor->ExpectSuccess();
- vkCmdCopyImage(m_commandBuffer->handle(), srcImage.handle(), VK_IMAGE_LAYOUT_GENERAL, image_from_swapchain,
- VK_IMAGE_LAYOUT_GENERAL, 1, &copy_region);
- m_errorMonitor->VerifyNotFound();
-
- vkDestroyImage(m_device->device(), image_from_swapchain, NULL);
- DestroySwapchain();
-}
-
-TEST_F(VkPositiveLayerTest, TransferImageToSwapchainDeviceGroup) {
- TEST_DESCRIPTION("Transfer an image to a swapchain's image between device group");
-
-#if defined(VK_USE_PLATFORM_ANDROID_KHR)
- printf(
- "%s According to VUID-01631, VkBindImageMemoryInfo-memory should be NULL. But Android will crash if memory is NULL, "
- "skipping test\n",
- kSkipPrefix);
- return;
-#endif
-
- SetTargetApiVersion(VK_API_VERSION_1_1);
-
- if (!AddSurfaceInstanceExtension()) {
- printf("%s surface extensions not supported, skipping test\n", kSkipPrefix);
- return;
- }
-
- ASSERT_NO_FATAL_FAILURE(InitFramework(myDbgFunc, m_errorMonitor));
-
- if (!AddSwapchainDeviceExtension()) {
- printf("%s swapchain extensions not supported, skipping test\n", kSkipPrefix);
- return;
- }
-
- if (DeviceValidationVersion() < VK_API_VERSION_1_1) {
- printf("%s VkBindImageMemoryInfo requires Vulkan 1.1+, skipping test\n", kSkipPrefix);
- return;
- }
- uint32_t physical_device_group_count = 0;
- vkEnumeratePhysicalDeviceGroups(instance(), &physical_device_group_count, nullptr);
-
- if (physical_device_group_count == 0) {
- printf("%s physical_device_group_count is 0, skipping test\n", kSkipPrefix);
- return;
- }
-
- std::vector<VkPhysicalDeviceGroupProperties> physical_device_group(physical_device_group_count,
- {VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_GROUP_PROPERTIES});
- vkEnumeratePhysicalDeviceGroups(instance(), &physical_device_group_count, physical_device_group.data());
- VkDeviceGroupDeviceCreateInfo create_device_pnext = {};
- create_device_pnext.sType = VK_STRUCTURE_TYPE_DEVICE_GROUP_DEVICE_CREATE_INFO;
- create_device_pnext.physicalDeviceCount = physical_device_group[0].physicalDeviceCount;
- create_device_pnext.pPhysicalDevices = physical_device_group[0].physicalDevices;
- ASSERT_NO_FATAL_FAILURE(InitState(nullptr, &create_device_pnext, VK_COMMAND_POOL_CREATE_RESET_COMMAND_BUFFER_BIT));
- ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
- if (!InitSwapchain(VK_IMAGE_USAGE_TRANSFER_DST_BIT)) {
- printf("%s Cannot create surface or swapchain, skipping test\n", kSkipPrefix);
- return;
- }
-
- auto image_create_info = lvl_init_struct<VkImageCreateInfo>();
- image_create_info.imageType = VK_IMAGE_TYPE_2D;
- image_create_info.format = VK_FORMAT_R8G8B8A8_UNORM;
- image_create_info.extent.width = 64;
- image_create_info.extent.height = 64;
- image_create_info.extent.depth = 1;
- image_create_info.mipLevels = 1;
- image_create_info.arrayLayers = 1;
- image_create_info.samples = VK_SAMPLE_COUNT_1_BIT;
- image_create_info.tiling = VK_IMAGE_TILING_OPTIMAL;
- image_create_info.initialLayout = VK_IMAGE_LAYOUT_PREINITIALIZED;
- image_create_info.usage = VK_IMAGE_USAGE_TRANSFER_SRC_BIT;
- image_create_info.sharingMode = VK_SHARING_MODE_EXCLUSIVE;
-
- VkImageObj src_Image(m_device);
- src_Image.init(&image_create_info);
-
- image_create_info.usage = VK_IMAGE_USAGE_TRANSFER_DST_BIT;
- image_create_info.flags = VK_IMAGE_CREATE_ALIAS_BIT;
-
- auto image_swapchain_create_info = lvl_init_struct<VkImageSwapchainCreateInfoKHR>();
- image_swapchain_create_info.swapchain = m_swapchain;
- image_create_info.pNext = &image_swapchain_create_info;
-
- VkImage peer_image;
- vkCreateImage(device(), &image_create_info, NULL, &peer_image);
-
- auto bind_devicegroup_info = lvl_init_struct<VkBindImageMemoryDeviceGroupInfo>();
- bind_devicegroup_info.deviceIndexCount = 2;
- std::array<uint32_t, 2> deviceIndices = {0, 0};
- bind_devicegroup_info.pDeviceIndices = deviceIndices.data();
- bind_devicegroup_info.splitInstanceBindRegionCount = 0;
- bind_devicegroup_info.pSplitInstanceBindRegions = nullptr;
-
- auto bind_swapchain_info = lvl_init_struct<VkBindImageMemorySwapchainInfoKHR>(&bind_devicegroup_info);
- bind_swapchain_info.swapchain = m_swapchain;
- bind_swapchain_info.imageIndex = 0;
-
- auto bind_info = lvl_init_struct<VkBindImageMemoryInfo>(&bind_swapchain_info);
- bind_info.image = peer_image;
- bind_info.memory = VK_NULL_HANDLE;
- bind_info.memoryOffset = 0;
-
- vkBindImageMemory2(m_device->device(), 1, &bind_info);
-
- uint32_t swapchain_images_count = 0;
- vkGetSwapchainImagesKHR(device(), m_swapchain, &swapchain_images_count, nullptr);
- std::vector<VkImage> swapchain_images;
- swapchain_images.resize(swapchain_images_count);
- vkGetSwapchainImagesKHR(device(), m_swapchain, &swapchain_images_count, swapchain_images.data());
-
- m_commandBuffer->begin();
-
- auto img_barrier = lvl_init_struct<VkImageMemoryBarrier>();
- img_barrier.oldLayout = VK_IMAGE_LAYOUT_UNDEFINED;
- img_barrier.newLayout = VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL;
- img_barrier.image = swapchain_images[0];
- img_barrier.srcQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED;
- img_barrier.dstQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED;
- img_barrier.subresourceRange.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
- img_barrier.subresourceRange.baseArrayLayer = 0;
- img_barrier.subresourceRange.baseMipLevel = 0;
- img_barrier.subresourceRange.layerCount = 1;
- img_barrier.subresourceRange.levelCount = 1;
- vkCmdPipelineBarrier(m_commandBuffer->handle(), VK_PIPELINE_STAGE_BOTTOM_OF_PIPE_BIT, VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT, 0, 0,
- nullptr, 0, nullptr, 1, &img_barrier);
-
- m_commandBuffer->end();
- m_commandBuffer->QueueCommandBuffer();
-
- m_commandBuffer->reset();
- m_commandBuffer->begin();
-
- VkImageCopy copy_region = {};
- copy_region.srcSubresource.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
- copy_region.dstSubresource.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
- copy_region.srcSubresource.mipLevel = 0;
- copy_region.dstSubresource.mipLevel = 0;
- copy_region.srcSubresource.baseArrayLayer = 0;
- copy_region.dstSubresource.baseArrayLayer = 0;
- copy_region.srcSubresource.layerCount = 1;
- copy_region.dstSubresource.layerCount = 1;
- copy_region.srcOffset = {0, 0, 0};
- copy_region.dstOffset = {0, 0, 0};
- copy_region.extent = {10, 10, 1};
- vkCmdCopyImage(m_commandBuffer->handle(), src_Image.handle(), VK_IMAGE_LAYOUT_GENERAL, peer_image,
- VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, 1, &copy_region);
-
- m_commandBuffer->end();
- m_errorMonitor->ExpectSuccess();
- m_commandBuffer->QueueCommandBuffer();
- m_errorMonitor->VerifyNotFound();
-
- vkDestroyImage(m_device->device(), peer_image, NULL);
- DestroySwapchain();
-}
-
-TEST_F(VkPositiveLayerTest, RenderPassValidStages) {
- TEST_DESCRIPTION("Create render pass with valid stages");
-
- bool rp2_supported = InstanceExtensionSupported(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME);
- if (rp2_supported) m_instance_extension_names.push_back(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME);
-
- ASSERT_NO_FATAL_FAILURE(InitFramework(myDbgFunc, m_errorMonitor));
- if (rp2_supported) rp2_supported = CheckCreateRenderPass2Support(this, m_device_extension_names);
- ASSERT_NO_FATAL_FAILURE(InitState());
-
- VkSubpassDescription sci[2] = {};
- sci[0].pipelineBindPoint = VK_PIPELINE_BIND_POINT_GRAPHICS;
- sci[1].pipelineBindPoint = VK_PIPELINE_BIND_POINT_GRAPHICS;
-
- VkSubpassDependency dependency = {};
- // to be filled later by tests
-
- VkRenderPassCreateInfo rpci = {};
- rpci.sType = VK_STRUCTURE_TYPE_RENDER_PASS_CREATE_INFO;
- rpci.subpassCount = 2;
- rpci.pSubpasses = sci;
- rpci.dependencyCount = 1;
- rpci.pDependencies = &dependency;
-
- const VkPipelineStageFlags kGraphicsStages =
- VK_PIPELINE_STAGE_ALL_GRAPHICS_BIT | VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT | VK_PIPELINE_STAGE_DRAW_INDIRECT_BIT |
- VK_PIPELINE_STAGE_VERTEX_INPUT_BIT | VK_PIPELINE_STAGE_VERTEX_SHADER_BIT | VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT |
- VK_PIPELINE_STAGE_EARLY_FRAGMENT_TESTS_BIT | VK_PIPELINE_STAGE_LATE_FRAGMENT_TESTS_BIT |
- VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT | VK_PIPELINE_STAGE_BOTTOM_OF_PIPE_BIT;
-
- dependency.srcSubpass = 0;
- dependency.dstSubpass = 1;
- dependency.srcStageMask = kGraphicsStages;
- dependency.dstStageMask = kGraphicsStages;
- PositiveTestRenderPassCreate(m_errorMonitor, m_device->device(), &rpci, rp2_supported);
-
- dependency.srcSubpass = VK_SUBPASS_EXTERNAL;
- dependency.dstSubpass = 0;
- dependency.srcStageMask = kGraphicsStages | VK_PIPELINE_STAGE_HOST_BIT;
- dependency.dstStageMask = kGraphicsStages;
- PositiveTestRenderPassCreate(m_errorMonitor, m_device->device(), &rpci, rp2_supported);
-
- dependency.srcSubpass = 0;
- dependency.dstSubpass = VK_SUBPASS_EXTERNAL;
- dependency.srcStageMask = kGraphicsStages;
- dependency.dstStageMask = VK_PIPELINE_STAGE_HOST_BIT;
- PositiveTestRenderPassCreate(m_errorMonitor, m_device->device(), &rpci, rp2_supported);
-}
-
-TEST_F(VkPositiveLayerTest, SampleMaskOverrideCoverageNV) {
- TEST_DESCRIPTION("Test to validate VK_NV_sample_mask_override_coverage");
-
- ASSERT_NO_FATAL_FAILURE(InitFramework(myDbgFunc, m_errorMonitor));
-
- if (DeviceExtensionSupported(gpu(), nullptr, VK_NV_SAMPLE_MASK_OVERRIDE_COVERAGE_EXTENSION_NAME)) {
- m_device_extension_names.push_back(VK_NV_SAMPLE_MASK_OVERRIDE_COVERAGE_EXTENSION_NAME);
- } else {
- printf("%s %s Extension not supported, skipping tests\n", kSkipPrefix, VK_NV_SAMPLE_MASK_OVERRIDE_COVERAGE_EXTENSION_NAME);
- return;
- }
-
- ASSERT_NO_FATAL_FAILURE(InitState());
-
- const char vs_src[] = R"(
- #version 450
- layout(location=0) out vec4 fragColor;
-
- const vec2 pos[3] = { vec2( 0.0f, -0.5f),
- vec2( 0.5f, 0.5f),
- vec2(-0.5f, 0.5f)
- };
- void main()
- {
- gl_Position = vec4(pos[gl_VertexIndex % 3], 0.0f, 1.0f);
- fragColor = vec4(0.0f, 1.0f, 0.0f, 1.0f);
- })";
-
- const char fs_src[] = R"(
- #version 450
- #extension GL_NV_sample_mask_override_coverage : require
-
- layout(location = 0) in vec4 fragColor;
- layout(location = 0) out vec4 outColor;
-
- layout(override_coverage) out int gl_SampleMask[];
-
- void main()
- {
- gl_SampleMask[0] = 0xff;
- outColor = fragColor;
- })";
-
- m_errorMonitor->ExpectSuccess();
-
- const VkSampleCountFlagBits sampleCount = VK_SAMPLE_COUNT_8_BIT;
-
- VkAttachmentDescription cAttachment = {};
- cAttachment.format = VK_FORMAT_B8G8R8A8_UNORM;
- cAttachment.samples = sampleCount;
- cAttachment.loadOp = VK_ATTACHMENT_LOAD_OP_CLEAR;
- cAttachment.storeOp = VK_ATTACHMENT_STORE_OP_STORE;
- cAttachment.stencilLoadOp = VK_ATTACHMENT_LOAD_OP_DONT_CARE;
- cAttachment.stencilStoreOp = VK_ATTACHMENT_STORE_OP_DONT_CARE;
- cAttachment.initialLayout = VK_IMAGE_LAYOUT_UNDEFINED;
- cAttachment.finalLayout = VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL;
-
- VkAttachmentReference cAttachRef = {};
- cAttachRef.attachment = 0;
- cAttachRef.layout = VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL;
-
- VkSubpassDescription subpass = {};
- subpass.pipelineBindPoint = VK_PIPELINE_BIND_POINT_GRAPHICS;
- subpass.colorAttachmentCount = 1;
- subpass.pColorAttachments = &cAttachRef;
-
- VkRenderPassCreateInfo rpci = {VK_STRUCTURE_TYPE_RENDER_PASS_CREATE_INFO};
- rpci.attachmentCount = 1;
- rpci.pAttachments = &cAttachment;
- rpci.subpassCount = 1;
- rpci.pSubpasses = &subpass;
-
- VkRenderPass rp;
- vkCreateRenderPass(m_device->device(), &rpci, nullptr, &rp);
-
- const VkPipelineLayoutObj pl(m_device);
-
- VkSampleMask sampleMask = 0x01;
- VkPipelineMultisampleStateCreateInfo msaa = {VK_STRUCTURE_TYPE_PIPELINE_MULTISAMPLE_STATE_CREATE_INFO};
- msaa.rasterizationSamples = sampleCount;
- msaa.sampleShadingEnable = VK_FALSE;
- msaa.pSampleMask = &sampleMask;
-
- VkPipelineObj pipe(m_device);
- pipe.AddDefaultColorAttachment();
- pipe.SetMSAA(&msaa);
-
- VkShaderObj vs(m_device, vs_src, VK_SHADER_STAGE_VERTEX_BIT, this);
- pipe.AddShader(&vs);
-
- VkShaderObj fs(m_device, fs_src, VK_SHADER_STAGE_FRAGMENT_BIT, this);
- pipe.AddShader(&fs);
-
- // Create pipeline and make sure that the usage of NV_sample_mask_override_coverage
- // in the fragment shader does not cause any errors.
- pipe.CreateVKPipeline(pl.handle(), rp);
-
- vkDestroyRenderPass(m_device->device(), rp, nullptr);
-
- m_errorMonitor->VerifyNotFound();
-}
-
-TEST_F(VkPositiveLayerTest, TestRasterizationDiscardEnableTrue) {
- TEST_DESCRIPTION("Ensure it doesn't crash and trigger error msg when rasterizerDiscardEnable = true");
- ASSERT_NO_FATAL_FAILURE(Init());
- ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
-
- VkAttachmentDescription att[1] = {{}};
- att[0].format = VK_FORMAT_R8G8B8A8_UNORM;
- att[0].samples = VK_SAMPLE_COUNT_4_BIT;
- att[0].initialLayout = VK_IMAGE_LAYOUT_UNDEFINED;
- att[0].finalLayout = VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL;
- VkAttachmentReference cr = {0, VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL};
- VkSubpassDescription sp = {};
- sp.pipelineBindPoint = VK_PIPELINE_BIND_POINT_GRAPHICS;
- sp.colorAttachmentCount = 1;
- sp.pColorAttachments = &cr;
- VkRenderPassCreateInfo rpi = {VK_STRUCTURE_TYPE_RENDER_PASS_CREATE_INFO};
- rpi.attachmentCount = 1;
- rpi.pAttachments = att;
- rpi.subpassCount = 1;
- rpi.pSubpasses = &sp;
- VkRenderPass rp;
- vkCreateRenderPass(m_device->device(), &rpi, nullptr, &rp);
-
- CreatePipelineHelper pipe(*this);
- pipe.InitInfo();
- pipe.gp_ci_.pViewportState = nullptr;
- pipe.gp_ci_.pMultisampleState = nullptr;
- pipe.gp_ci_.pDepthStencilState = nullptr;
- pipe.gp_ci_.pColorBlendState = nullptr;
- pipe.gp_ci_.renderPass = rp;
-
- m_errorMonitor->ExpectSuccess();
- // Skip the test in NexusPlayer. The driver crashes when pViewportState, pMultisampleState, pDepthStencilState, pColorBlendState
- // are NULL.
- pipe.rs_state_ci_.rasterizerDiscardEnable = VK_TRUE;
- pipe.InitState();
- pipe.CreateGraphicsPipeline();
- m_errorMonitor->VerifyNotFound();
- vkDestroyRenderPass(m_device->device(), rp, nullptr);
-}
-
-TEST_F(VkPositiveLayerTest, TestSamplerDataForCombinedImageSampler) {
- TEST_DESCRIPTION("Shader code uses sampler data for CombinedImageSampler");
- ASSERT_NO_FATAL_FAILURE(Init());
- ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
-
- const std::string fsSource = R"(
- OpCapability Shader
- OpMemoryModel Logical GLSL450
- OpEntryPoint Fragment %main "main"
- OpExecutionMode %main OriginUpperLeft
-
- OpDecorate %InputData DescriptorSet 0
- OpDecorate %InputData Binding 0
- OpDecorate %SamplerData DescriptorSet 0
- OpDecorate %SamplerData Binding 0
-
- %void = OpTypeVoid
- %f32 = OpTypeFloat 32
- %Image = OpTypeImage %f32 2D 0 0 0 1 Rgba32f
- %ImagePtr = OpTypePointer UniformConstant %Image
- %InputData = OpVariable %ImagePtr UniformConstant
- %Sampler = OpTypeSampler
- %SamplerPtr = OpTypePointer UniformConstant %Sampler
- %SamplerData = OpVariable %SamplerPtr UniformConstant
- %SampledImage = OpTypeSampledImage %Image
-
- %func = OpTypeFunction %void
- %main = OpFunction %void None %func
- %40 = OpLabel
- %call_smp = OpLoad %Sampler %SamplerData
- OpReturn
- OpFunctionEnd)";
-
- VkShaderObj fs(m_device, fsSource, VK_SHADER_STAGE_FRAGMENT_BIT, this);
-
- CreatePipelineHelper pipe(*this);
- pipe.InitInfo();
- pipe.dsl_bindings_ = {
- {0, VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER, 1, VK_SHADER_STAGE_ALL, nullptr},
- };
- pipe.shader_stages_ = {fs.GetStageCreateInfo(), pipe.vs_->GetStageCreateInfo()};
- pipe.InitState();
- pipe.CreateGraphicsPipeline();
-
- VkImageObj image(m_device);
- image.Init(32, 32, 1, VK_FORMAT_R8G8B8A8_UNORM, VK_IMAGE_USAGE_SAMPLED_BIT, VK_IMAGE_TILING_OPTIMAL, 0);
- VkImageView view = image.targetView(VK_FORMAT_R8G8B8A8_UNORM);
-
- VkSamplerCreateInfo sampler_ci = SafeSaneSamplerCreateInfo();
- VkSampler sampler;
- vkCreateSampler(m_device->device(), &sampler_ci, nullptr, &sampler);
-
- uint32_t qfi = 0;
- VkBufferCreateInfo buffer_create_info = {};
- buffer_create_info.sType = VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO;
- buffer_create_info.size = 1024;
- buffer_create_info.usage = VK_BUFFER_USAGE_STORAGE_BUFFER_BIT;
- buffer_create_info.queueFamilyIndexCount = 1;
- buffer_create_info.pQueueFamilyIndices = &qfi;
-
- VkBufferObj buffer;
- buffer.init(*m_device, buffer_create_info);
-
- pipe.descriptor_set_->WriteDescriptorImageInfo(0, view, sampler, VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER);
- pipe.descriptor_set_->UpdateDescriptorSets();
-
- m_commandBuffer->begin();
- m_commandBuffer->BeginRenderPass(m_renderPassBeginInfo);
- vkCmdBindPipeline(m_commandBuffer->handle(), VK_PIPELINE_BIND_POINT_GRAPHICS, pipe.pipeline_);
- vkCmdBindDescriptorSets(m_commandBuffer->handle(), VK_PIPELINE_BIND_POINT_GRAPHICS, pipe.pipeline_layout_.handle(), 0, 1,
- &pipe.descriptor_set_->set_, 0, NULL);
-
- m_errorMonitor->ExpectSuccess();
- vkCmdDraw(m_commandBuffer->handle(), 3, 1, 0, 0);
- m_errorMonitor->VerifyNotFound();
-
- vkCmdEndRenderPass(m_commandBuffer->handle());
- m_commandBuffer->end();
- vkDestroySampler(m_device->device(), sampler, NULL);
-}
-
-TEST_F(VkPositiveLayerTest, NotPointSizeGeometryShaderSuccess) {
- TEST_DESCRIPTION("Create a pipeline using TOPOLOGY_POINT_LIST, but geometry shader doesn't include PointSize.");
-
- ASSERT_NO_FATAL_FAILURE(Init());
-
- if ((!m_device->phy().features().geometryShader)) {
- printf("%s Device does not support the required geometry shader features; skipped.\n", kSkipPrefix);
- return;
- }
- ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
- ASSERT_NO_FATAL_FAILURE(InitViewport());
-
- VkShaderObj gs(m_device, bindStateGeomShaderText, VK_SHADER_STAGE_GEOMETRY_BIT, this);
-
- CreatePipelineHelper pipe(*this);
- pipe.InitInfo();
- pipe.shader_stages_ = {pipe.vs_->GetStageCreateInfo(), gs.GetStageCreateInfo(), pipe.fs_->GetStageCreateInfo()};
- pipe.ia_ci_.topology = VK_PRIMITIVE_TOPOLOGY_POINT_LIST;
- pipe.InitState();
-
- m_errorMonitor->ExpectSuccess();
- pipe.CreateGraphicsPipeline();
- m_errorMonitor->VerifyNotFound();
-}
-
-TEST_F(VkPositiveLayerTest, SubpassWithReadOnlyLayoutWithoutDependency) {
- TEST_DESCRIPTION("When both subpasses' attachments are the same and layouts are read-only, they don't need dependency.");
- ASSERT_NO_FATAL_FAILURE(Init());
-
- auto depth_format = FindSupportedDepthStencilFormat(gpu());
- if (!depth_format) {
- printf("%s No Depth + Stencil format found. Skipped.\n", kSkipPrefix);
- return;
- }
-
- // A renderpass with one color attachment.
- VkAttachmentDescription attachment = {0,
- depth_format,
- VK_SAMPLE_COUNT_1_BIT,
- VK_ATTACHMENT_LOAD_OP_DONT_CARE,
- VK_ATTACHMENT_STORE_OP_STORE,
- VK_ATTACHMENT_LOAD_OP_DONT_CARE,
- VK_ATTACHMENT_STORE_OP_DONT_CARE,
- VK_IMAGE_LAYOUT_UNDEFINED,
- VK_IMAGE_LAYOUT_DEPTH_STENCIL_READ_ONLY_OPTIMAL};
- const int size = 2;
- std::array<VkAttachmentDescription, size> attachments = {attachment, attachment};
-
- VkAttachmentReference att_ref_depth_stencil = {0, VK_IMAGE_LAYOUT_DEPTH_STENCIL_READ_ONLY_OPTIMAL};
-
- std::array<VkSubpassDescription, size> subpasses;
- subpasses[0] = {0, VK_PIPELINE_BIND_POINT_GRAPHICS, 0, 0, 0, nullptr, nullptr, &att_ref_depth_stencil, 0, nullptr};
- subpasses[1] = {0, VK_PIPELINE_BIND_POINT_GRAPHICS, 0, 0, 0, nullptr, nullptr, &att_ref_depth_stencil, 0, nullptr};
-
- VkRenderPassCreateInfo rpci = {
- VK_STRUCTURE_TYPE_RENDER_PASS_CREATE_INFO, nullptr, 0, size, attachments.data(), size, subpasses.data(), 0, nullptr};
-
- VkRenderPass rp;
- VkResult err = vkCreateRenderPass(m_device->device(), &rpci, nullptr, &rp);
- ASSERT_VK_SUCCESS(err);
-
- // A compatible framebuffer.
- VkImageObj image(m_device);
- image.Init(32, 32, 1, depth_format, VK_IMAGE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT, VK_IMAGE_TILING_LINEAR, 0);
- ASSERT_TRUE(image.initialized());
-
- VkImageViewCreateInfo ivci = {VK_STRUCTURE_TYPE_IMAGE_VIEW_CREATE_INFO,
- nullptr,
- 0,
- image.handle(),
- VK_IMAGE_VIEW_TYPE_2D,
- depth_format,
- {VK_COMPONENT_SWIZZLE_IDENTITY, VK_COMPONENT_SWIZZLE_IDENTITY, VK_COMPONENT_SWIZZLE_IDENTITY,
- VK_COMPONENT_SWIZZLE_IDENTITY},
- {VK_IMAGE_ASPECT_DEPTH_BIT | VK_IMAGE_ASPECT_STENCIL_BIT, 0, 1, 0, 1}};
-
- VkImageView view;
- err = vkCreateImageView(m_device->device(), &ivci, nullptr, &view);
- ASSERT_VK_SUCCESS(err);
- std::array<VkImageView, size> views = {view, view};
-
- VkFramebufferCreateInfo fci = {VK_STRUCTURE_TYPE_FRAMEBUFFER_CREATE_INFO, nullptr, 0, rp, size, views.data(), 32, 32, 1};
- VkFramebuffer fb;
- err = vkCreateFramebuffer(m_device->device(), &fci, nullptr, &fb);
- ASSERT_VK_SUCCESS(err);
-
- VkRenderPassBeginInfo rpbi = {VK_STRUCTURE_TYPE_RENDER_PASS_BEGIN_INFO, nullptr, rp, fb, {{0, 0}, {32, 32}}, 0, nullptr};
- m_commandBuffer->begin();
- vkCmdBeginRenderPass(m_commandBuffer->handle(), &rpbi, VK_SUBPASS_CONTENTS_INLINE);
- vkCmdNextSubpass(m_commandBuffer->handle(), VK_SUBPASS_CONTENTS_INLINE);
- vkCmdEndRenderPass(m_commandBuffer->handle());
- m_commandBuffer->end();
-
- vkDestroyFramebuffer(m_device->device(), fb, nullptr);
- vkDestroyRenderPass(m_device->device(), rp, nullptr);
- vkDestroyImageView(m_device->device(), view, nullptr);
-}
-
-TEST_F(VkPositiveLayerTest, GeometryShaderPassthroughNV) {
- TEST_DESCRIPTION("Test to validate VK_NV_geometry_shader_passthrough");
-
- ASSERT_NO_FATAL_FAILURE(InitFramework(myDbgFunc, m_errorMonitor));
-
- VkPhysicalDeviceFeatures available_features = {};
- ASSERT_NO_FATAL_FAILURE(GetPhysicalDeviceFeatures(&available_features));
-
- if (!available_features.geometryShader) {
- printf("%s VkPhysicalDeviceFeatures::geometryShader is not supported, skipping test\n", kSkipPrefix);
- return;
- }
-
- if (DeviceExtensionSupported(gpu(), nullptr, VK_NV_GEOMETRY_SHADER_PASSTHROUGH_EXTENSION_NAME)) {
- m_device_extension_names.push_back(VK_NV_GEOMETRY_SHADER_PASSTHROUGH_EXTENSION_NAME);
- } else {
- printf("%s %s Extension not supported, skipping tests\n", kSkipPrefix, VK_NV_GEOMETRY_SHADER_PASSTHROUGH_EXTENSION_NAME);
- return;
- }
-
- ASSERT_NO_FATAL_FAILURE(InitState());
- ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
-
- const char vs_src[] = R"(
- #version 450
-
- out gl_PerVertex {
- vec4 gl_Position;
- };
-
- layout(location = 0) out ColorBlock {vec4 vertexColor;};
-
- const vec2 positions[3] = { vec2( 0.0f, -0.5f),
- vec2( 0.5f, 0.5f),
- vec2(-0.5f, 0.5f)
- };
-
- const vec4 colors[3] = { vec4(1.0f, 0.0f, 0.0f, 1.0f),
- vec4(0.0f, 1.0f, 0.0f, 1.0f),
- vec4(0.0f, 0.0f, 1.0f, 1.0f)
- };
- void main()
- {
- vertexColor = colors[gl_VertexIndex % 3];
- gl_Position = vec4(positions[gl_VertexIndex % 3], 0.0, 1.0);
- })";
-
- const char gs_src[] = R"(
- #version 450
- #extension GL_NV_geometry_shader_passthrough: require
-
- layout(triangles) in;
- layout(triangle_strip, max_vertices = 3) out;
-
- layout(passthrough) in gl_PerVertex {vec4 gl_Position;};
- layout(location = 0, passthrough) in ColorBlock {vec4 vertexColor;};
-
- void main()
- {
- gl_Layer = 0;
- })";
-
- const char fs_src[] = R"(
- #version 450
-
- layout(location = 0) in ColorBlock {vec4 vertexColor;};
- layout(location = 0) out vec4 outColor;
-
- void main() {
- outColor = vertexColor;
- })";
-
- m_errorMonitor->ExpectSuccess();
-
- const VkPipelineLayoutObj pl(m_device);
-
- VkPipelineObj pipe(m_device);
- pipe.AddDefaultColorAttachment();
-
- VkShaderObj vs(m_device, vs_src, VK_SHADER_STAGE_VERTEX_BIT, this);
- pipe.AddShader(&vs);
-
- VkShaderObj gs(m_device, gs_src, VK_SHADER_STAGE_GEOMETRY_BIT, this);
- pipe.AddShader(&gs);
-
- VkShaderObj fs(m_device, fs_src, VK_SHADER_STAGE_FRAGMENT_BIT, this);
- pipe.AddShader(&fs);
-
- // Create pipeline and make sure that the usage of NV_geometry_shader_passthrough
- // in the fragment shader does not cause any errors.
- pipe.CreateVKPipeline(pl.handle(), renderPass());
-
- m_errorMonitor->VerifyNotFound();
-}
diff --git a/tests/vkrenderframework.cpp b/tests/vkrenderframework.cpp
index f0008355f..63b8f7b76 100644
--- a/tests/vkrenderframework.cpp
+++ b/tests/vkrenderframework.cpp
@@ -38,8 +38,6 @@ VkRenderFramework::VkRenderFramework()
m_commandBuffer(NULL),
m_renderPass(VK_NULL_HANDLE),
m_framebuffer(VK_NULL_HANDLE),
- m_surface(VK_NULL_HANDLE),
- m_swapchain(VK_NULL_HANDLE),
m_addRenderPassSelfDependency(false),
m_width(256.0), // default window width
m_height(256.0), // default window height
@@ -63,7 +61,7 @@ VkRenderFramework::VkRenderFramework()
m_clear_color.float32[3] = 0.0f;
}
-VkRenderFramework::~VkRenderFramework() { ShutdownFramework(); }
+VkRenderFramework::~VkRenderFramework() {}
VkPhysicalDevice VkRenderFramework::gpu() {
EXPECT_NE((VkInstance)0, inst); // Invalid to request gpu before instance exists
@@ -190,9 +188,6 @@ bool VkRenderFramework::DeviceIsMockICD() {
return false;
}
-// Some tests may need to be skipped if the devsim layer is in use.
-bool VkRenderFramework::DeviceSimulation() { return m_devsim_layer; }
-
// Render into a RenderTarget and read the pixels back to see if the device can really draw.
// Note: This cannot be called from inside an initialized VkRenderFramework because frameworks cannot be "nested".
// It is best to call it before "Init()".
@@ -473,153 +468,6 @@ void VkRenderFramework::InitViewport(float width, float height) {
}
void VkRenderFramework::InitViewport() { InitViewport(m_width, m_height); }
-
-bool VkRenderFramework::InitSurface() { return InitSurface(m_width, m_height); }
-
-#ifdef VK_USE_PLATFORM_WIN32_KHR
-LRESULT CALLBACK WindowProc(HWND hwnd, UINT uMsg, WPARAM wParam, LPARAM lParam) {
- return DefWindowProc(hwnd, uMsg, wParam, lParam);
-}
-#endif // VK_USE_PLATFORM_WIN32_KHR
-
-bool VkRenderFramework::InitSurface(float width, float height) {
-#if defined(VK_USE_PLATFORM_WIN32_KHR)
- HINSTANCE window_instance = GetModuleHandle(nullptr);
- const char class_name[] = "test";
- WNDCLASS wc = {};
- wc.lpfnWndProc = WindowProc;
- wc.hInstance = window_instance;
- wc.lpszClassName = class_name;
- RegisterClass(&wc);
- HWND window = CreateWindowEx(0, class_name, 0, 0, 0, 0, (int)m_width, (int)m_height, NULL, NULL, window_instance, NULL);
- ShowWindow(window, SW_HIDE);
-
- VkWin32SurfaceCreateInfoKHR surface_create_info = {};
- surface_create_info.sType = VK_STRUCTURE_TYPE_WIN32_SURFACE_CREATE_INFO_KHR;
- surface_create_info.hinstance = window_instance;
- surface_create_info.hwnd = window;
- VkResult err = vkCreateWin32SurfaceKHR(instance(), &surface_create_info, nullptr, &m_surface);
- if (err != VK_SUCCESS) return false;
-#endif
-
-#if defined(VK_USE_PLATFORM_ANDROID_KHR) && defined(VALIDATION_APK)
- VkAndroidSurfaceCreateInfoKHR surface_create_info = {};
- surface_create_info.sType = VK_STRUCTURE_TYPE_ANDROID_SURFACE_CREATE_INFO_KHR;
- surface_create_info.window = VkTestFramework::window;
- VkResult err = vkCreateAndroidSurfaceKHR(instance(), &surface_create_info, nullptr, &m_surface);
- if (err != VK_SUCCESS) return false;
-#endif
-
-#if defined(VK_USE_PLATFORM_XLIB_KHR)
- Display *dpy = XOpenDisplay(NULL);
- if (dpy) {
- int s = DefaultScreen(dpy);
- Window window = XCreateSimpleWindow(dpy, RootWindow(dpy, s), 0, 0, (int)m_width, (int)m_height, 1, BlackPixel(dpy, s),
- WhitePixel(dpy, s));
- VkXlibSurfaceCreateInfoKHR surface_create_info = {};
- surface_create_info.sType = VK_STRUCTURE_TYPE_XLIB_SURFACE_CREATE_INFO_KHR;
- surface_create_info.dpy = dpy;
- surface_create_info.window = window;
- VkResult err = vkCreateXlibSurfaceKHR(instance(), &surface_create_info, nullptr, &m_surface);
- if (err != VK_SUCCESS) return false;
- }
-#endif
-
-#if defined(VK_USE_PLATFORM_XCB_KHR)
- if (m_surface == VK_NULL_HANDLE) {
- xcb_connection_t *connection = xcb_connect(NULL, NULL);
- if (connection) {
- xcb_window_t window = xcb_generate_id(connection);
- VkXcbSurfaceCreateInfoKHR surface_create_info = {};
- surface_create_info.sType = VK_STRUCTURE_TYPE_XCB_SURFACE_CREATE_INFO_KHR;
- surface_create_info.connection = connection;
- surface_create_info.window = window;
- VkResult err = vkCreateXcbSurfaceKHR(instance(), &surface_create_info, nullptr, &m_surface);
- if (err != VK_SUCCESS) return false;
- }
- }
-#endif
-
- return (m_surface == VK_NULL_HANDLE) ? false : true;
-}
-
-bool VkRenderFramework::InitSwapchain(VkImageUsageFlags imageUsage, VkSurfaceTransformFlagBitsKHR preTransform) {
- if (InitSurface()) {
- return InitSwapchain(m_surface, imageUsage, preTransform);
- }
- return false;
-}
-
-bool VkRenderFramework::InitSwapchain(VkSurfaceKHR &surface, VkImageUsageFlags imageUsage,
- VkSurfaceTransformFlagBitsKHR preTransform) {
- for (size_t i = 0; i < m_device->queue_props.size(); ++i) {
- VkBool32 presentSupport = false;
- vkGetPhysicalDeviceSurfaceSupportKHR(m_device->phy().handle(), i, surface, &presentSupport);
- }
-
- VkSurfaceCapabilitiesKHR capabilities;
- vkGetPhysicalDeviceSurfaceCapabilitiesKHR(m_device->phy().handle(), surface, &capabilities);
-
- uint32_t format_count;
- vkGetPhysicalDeviceSurfaceFormatsKHR(m_device->phy().handle(), surface, &format_count, nullptr);
- std::vector<VkSurfaceFormatKHR> formats;
- if (format_count != 0) {
- formats.resize(format_count);
- vkGetPhysicalDeviceSurfaceFormatsKHR(m_device->phy().handle(), surface, &format_count, formats.data());
- }
-
- uint32_t present_mode_count;
- vkGetPhysicalDeviceSurfacePresentModesKHR(m_device->phy().handle(), surface, &present_mode_count, nullptr);
- std::vector<VkPresentModeKHR> present_modes;
- if (present_mode_count != 0) {
- present_modes.resize(present_mode_count);
- vkGetPhysicalDeviceSurfacePresentModesKHR(m_device->phy().handle(), surface, &present_mode_count, present_modes.data());
- }
-
- VkSwapchainCreateInfoKHR swapchain_create_info = {};
- swapchain_create_info.sType = VK_STRUCTURE_TYPE_SWAPCHAIN_CREATE_INFO_KHR;
- swapchain_create_info.pNext = 0;
- swapchain_create_info.surface = surface;
- swapchain_create_info.minImageCount = capabilities.minImageCount;
- swapchain_create_info.imageFormat = formats[0].format;
- swapchain_create_info.imageColorSpace = formats[0].colorSpace;
- swapchain_create_info.imageExtent = {capabilities.minImageExtent.width, capabilities.minImageExtent.height};
- swapchain_create_info.imageArrayLayers = capabilities.maxImageArrayLayers;
- swapchain_create_info.imageUsage = imageUsage;
- swapchain_create_info.imageSharingMode = VK_SHARING_MODE_EXCLUSIVE;
- swapchain_create_info.preTransform = preTransform;
-#ifdef VK_USE_PLATFORM_ANDROID_KHR
- swapchain_create_info.compositeAlpha = VK_COMPOSITE_ALPHA_INHERIT_BIT_KHR;
-#else
- swapchain_create_info.compositeAlpha = VK_COMPOSITE_ALPHA_OPAQUE_BIT_KHR;
-#endif
- swapchain_create_info.presentMode = present_modes[0];
- swapchain_create_info.clipped = VK_FALSE;
- swapchain_create_info.oldSwapchain = 0;
-
- VkResult err = vkCreateSwapchainKHR(device(), &swapchain_create_info, nullptr, &m_swapchain);
- if (err != VK_SUCCESS) {
- return false;
- }
- uint32_t imageCount = 0;
- vkGetSwapchainImagesKHR(device(), m_swapchain, &imageCount, nullptr);
- std::vector<VkImage> swapchainImages;
- swapchainImages.resize(imageCount);
- vkGetSwapchainImagesKHR(device(), m_swapchain, &imageCount, swapchainImages.data());
- return true;
-}
-
-void VkRenderFramework::DestroySwapchain() {
- if (m_swapchain != VK_NULL_HANDLE) {
- vkDestroySwapchainKHR(device(), m_swapchain, nullptr);
- m_swapchain = VK_NULL_HANDLE;
- }
- if (m_surface != VK_NULL_HANDLE) {
- vkDestroySurfaceKHR(instance(), m_surface, nullptr);
- m_surface = VK_NULL_HANDLE;
- }
-}
-
void VkRenderFramework::InitRenderTarget() { InitRenderTarget(1); }
void VkRenderFramework::InitRenderTarget(uint32_t targets) { InitRenderTarget(targets, NULL); }
@@ -818,18 +666,11 @@ VkQueueObj *VkDeviceObj::GetDefaultQueue() {
if (graphics_queues().empty()) return nullptr;
return graphics_queues()[0];
}
-
-VkQueueObj *VkDeviceObj::GetDefaultComputeQueue() {
- if (compute_queues().empty()) return nullptr;
- return compute_queues()[0];
-}
-
VkDescriptorSetLayoutObj::VkDescriptorSetLayoutObj(const VkDeviceObj *device,
const std::vector<VkDescriptorSetLayoutBinding> &descriptor_set_bindings,
- VkDescriptorSetLayoutCreateFlags flags, void *pNext) {
+ VkDescriptorSetLayoutCreateFlags flags) {
VkDescriptorSetLayoutCreateInfo dsl_ci = {};
dsl_ci.sType = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_CREATE_INFO;
- dsl_ci.pNext = pNext;
dsl_ci.flags = flags;
dsl_ci.bindingCount = static_cast<uint32_t>(descriptor_set_bindings.size());
dsl_ci.pBindings = descriptor_set_bindings.data();
@@ -1013,22 +854,21 @@ void VkImageObj::ImageMemoryBarrier(VkCommandBufferObj *cmd_buf, VkImageAspectFl
VK_ACCESS_SHADER_READ_BIT |
VK_ACCESS_COLOR_ATTACHMENT_READ_BIT |
VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_READ_BIT |
- VK_MEMORY_INPUT_COPY_BIT*/, VkImageLayout image_layout,
- VkPipelineStageFlags src_stages, VkPipelineStageFlags dest_stages,
- uint32_t srcQueueFamilyIndex, uint32_t dstQueueFamilyIndex) {
+ VK_MEMORY_INPUT_COPY_BIT*/, VkImageLayout image_layout) {
// clang-format on
// TODO: Mali device crashing with VK_REMAINING_MIP_LEVELS
const VkImageSubresourceRange subresourceRange =
subresource_range(aspect, 0, /*VK_REMAINING_MIP_LEVELS*/ 1, 0, 1 /*VK_REMAINING_ARRAY_LAYERS*/);
VkImageMemoryBarrier barrier;
- barrier = image_memory_barrier(output_mask, input_mask, Layout(), image_layout, subresourceRange, srcQueueFamilyIndex,
- dstQueueFamilyIndex);
+ barrier = image_memory_barrier(output_mask, input_mask, Layout(), image_layout, subresourceRange);
VkImageMemoryBarrier *pmemory_barrier = &barrier;
+ VkPipelineStageFlags src_stages = VK_PIPELINE_STAGE_ALL_COMMANDS_BIT;
+ VkPipelineStageFlags dest_stages = VK_PIPELINE_STAGE_ALL_COMMANDS_BIT;
+
// write barrier to the command buffer
- vkCmdPipelineBarrier(cmd_buf->handle(), src_stages, dest_stages, VK_DEPENDENCY_BY_REGION_BIT, 0, NULL, 0, NULL, 1,
- pmemory_barrier);
+ vkCmdPipelineBarrier(cmd_buf->handle(), src_stages, dest_stages, 0, 0, NULL, 0, NULL, 1, pmemory_barrier);
}
void VkImageObj::SetLayout(VkCommandBufferObj *cmd_buf, VkImageAspectFlags aspect, VkImageLayout image_layout) {
@@ -1165,7 +1005,7 @@ bool VkImageObj::IsCompatible(const VkImageUsageFlags usages, const VkFormatFeat
void VkImageObj::InitNoLayout(uint32_t const width, uint32_t const height, uint32_t const mipLevels, VkFormat const format,
VkFlags const usage, VkImageTiling const requested_tiling, VkMemoryPropertyFlags const reqs,
- const std::vector<uint32_t> *queue_families, bool memory) {
+ const std::vector<uint32_t> *queue_families) {
VkFormatProperties image_fmt;
VkImageTiling tiling = VK_IMAGE_TILING_OPTIMAL;
@@ -1207,18 +1047,16 @@ void VkImageObj::InitNoLayout(uint32_t const width, uint32_t const height, uint3
Layout(imageCreateInfo.initialLayout);
imageCreateInfo.usage = usage;
- if (memory)
- vk_testing::Image::init(*m_device, imageCreateInfo, reqs);
- else
- vk_testing::Image::init_no_mem(*m_device, imageCreateInfo);
+
+ vk_testing::Image::init(*m_device, imageCreateInfo, reqs);
}
void VkImageObj::Init(uint32_t const width, uint32_t const height, uint32_t const mipLevels, VkFormat const format,
VkFlags const usage, VkImageTiling const requested_tiling, VkMemoryPropertyFlags const reqs,
- const std::vector<uint32_t> *queue_families, bool memory) {
- InitNoLayout(width, height, mipLevels, format, usage, requested_tiling, reqs, queue_families, memory);
+ const std::vector<uint32_t> *queue_families) {
+ InitNoLayout(width, height, mipLevels, format, usage, requested_tiling, reqs, queue_families);
- if (!initialized() || !memory) return; // We don't have a valid handle from early stage init, and thus SetLayout will fail
+ if (!initialized()) return; // We don't have a valid handle from early stage init, and thus SetLayout will fail
VkImageLayout newLayout;
if (usage & VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT)
@@ -1336,7 +1174,7 @@ VkResult VkImageObj::CopyImageOut(VkImageObj &dst_image) {
src_image_layout = this->Layout();
this->SetLayout(&cmd_buf, VK_IMAGE_ASPECT_COLOR_BIT, VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL);
- dest_image_layout = (dst_image.Layout() == VK_IMAGE_LAYOUT_UNDEFINED) ? VK_IMAGE_LAYOUT_GENERAL : dst_image.Layout();
+ dest_image_layout = (dst_image.Layout() == VK_IMAGE_LAYOUT_UNDEFINED) ? VK_IMAGE_LAYOUT_GENERAL : this->Layout();
dst_image.SetLayout(&cmd_buf, VK_IMAGE_ASPECT_COLOR_BIT, VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL);
VkImageCopy copy_region = {};
@@ -1512,7 +1350,7 @@ VkConstantBufferObj::VkConstantBufferObj(VkDeviceObj *device, VkDeviceSize alloc
VkPipelineShaderStageCreateInfo const &VkShaderObj::GetStageCreateInfo() const { return m_stage_info; }
VkShaderObj::VkShaderObj(VkDeviceObj *device, const char *shader_code, VkShaderStageFlagBits stage, VkRenderFramework *framework,
- char const *name, bool debug, VkSpecializationInfo *specInfo) {
+ char const *name, bool debug) {
VkResult U_ASSERT_ONLY err = VK_SUCCESS;
std::vector<unsigned int> spv;
VkShaderModuleCreateInfo moduleCreateInfo;
@@ -1524,7 +1362,7 @@ VkShaderObj::VkShaderObj(VkDeviceObj *device, const char *shader_code, VkShaderS
m_stage_info.stage = stage;
m_stage_info.module = VK_NULL_HANDLE;
m_stage_info.pName = name;
- m_stage_info.pSpecializationInfo = specInfo;
+ m_stage_info.pSpecializationInfo = nullptr;
moduleCreateInfo.sType = VK_STRUCTURE_TYPE_SHADER_MODULE_CREATE_INFO;
moduleCreateInfo.pNext = nullptr;
@@ -1540,7 +1378,7 @@ VkShaderObj::VkShaderObj(VkDeviceObj *device, const char *shader_code, VkShaderS
}
VkShaderObj::VkShaderObj(VkDeviceObj *device, const std::string spv_source, VkShaderStageFlagBits stage,
- VkRenderFramework *framework, char const *name, VkSpecializationInfo *specInfo) {
+ VkRenderFramework *framework, char const *name) {
VkResult U_ASSERT_ONLY err = VK_SUCCESS;
std::vector<unsigned int> spv;
VkShaderModuleCreateInfo moduleCreateInfo;
@@ -1552,7 +1390,7 @@ VkShaderObj::VkShaderObj(VkDeviceObj *device, const std::string spv_source, VkSh
m_stage_info.stage = stage;
m_stage_info.module = VK_NULL_HANDLE;
m_stage_info.pName = name;
- m_stage_info.pSpecializationInfo = specInfo;
+ m_stage_info.pSpecializationInfo = nullptr;
moduleCreateInfo.sType = VK_STRUCTURE_TYPE_SHADER_MODULE_CREATE_INFO;
moduleCreateInfo.pNext = nullptr;
@@ -1610,7 +1448,7 @@ VkPipelineObj::VkPipelineObj(VkDeviceObj *device) {
m_vp_state.pScissors = nullptr;
m_rs_state.sType = VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_STATE_CREATE_INFO;
- m_rs_state.pNext = &m_line_state;
+ m_rs_state.pNext = nullptr;
m_rs_state.flags = 0;
m_rs_state.depthClampEnable = VK_FALSE;
m_rs_state.rasterizerDiscardEnable = VK_FALSE;
@@ -1623,13 +1461,6 @@ VkPipelineObj::VkPipelineObj(VkDeviceObj *device) {
m_rs_state.depthBiasSlopeFactor = 0.0f;
m_rs_state.lineWidth = 1.0f;
- m_line_state.sType = VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_LINE_STATE_CREATE_INFO_EXT;
- m_line_state.pNext = nullptr;
- m_line_state.lineRasterizationMode = VK_LINE_RASTERIZATION_MODE_DEFAULT_EXT;
- m_line_state.stippledLineEnable = VK_FALSE;
- m_line_state.lineStippleFactor = 0;
- m_line_state.lineStipplePattern = 0;
-
m_ms_state.sType = VK_STRUCTURE_TYPE_PIPELINE_MULTISAMPLE_STATE_CREATE_INFO;
m_ms_state.pNext = nullptr;
m_ms_state.flags = 0;
@@ -1705,15 +1536,10 @@ void VkPipelineObj::SetMSAA(const VkPipelineMultisampleStateCreateInfo *ms_state
void VkPipelineObj::SetInputAssembly(const VkPipelineInputAssemblyStateCreateInfo *ia_state) { m_ia_state = *ia_state; }
-void VkPipelineObj::SetRasterization(const VkPipelineRasterizationStateCreateInfo *rs_state) {
- m_rs_state = *rs_state;
- m_rs_state.pNext = &m_line_state;
-}
+void VkPipelineObj::SetRasterization(const VkPipelineRasterizationStateCreateInfo *rs_state) { m_rs_state = *rs_state; }
void VkPipelineObj::SetTessellation(const VkPipelineTessellationStateCreateInfo *te_state) { m_te_state = te_state; }
-void VkPipelineObj::SetLineState(const VkPipelineRasterizationLineStateCreateInfoEXT *line_state) { m_line_state = *line_state; }
-
void VkPipelineObj::InitGraphicsPipelineCreateInfo(VkGraphicsPipelineCreateInfo *gp_ci) {
gp_ci->stageCount = m_shaderStages.size();
gp_ci->pStages = m_shaderStages.size() ? m_shaderStages.data() : nullptr;
@@ -1863,19 +1689,6 @@ void VkCommandBufferObj::ClearDepthStencilImage(VkImage image, VkImageLayout ima
vkCmdClearDepthStencilImage(handle(), image, imageLayout, pColor, rangeCount, pRanges);
}
-void VkCommandBufferObj::BuildAccelerationStructure(VkAccelerationStructureObj *as, VkBuffer scratchBuffer) {
- BuildAccelerationStructure(as, scratchBuffer, VK_NULL_HANDLE);
-}
-
-void VkCommandBufferObj::BuildAccelerationStructure(VkAccelerationStructureObj *as, VkBuffer scratchBuffer, VkBuffer instanceData) {
- PFN_vkCmdBuildAccelerationStructureNV vkCmdBuildAccelerationStructureNV =
- (PFN_vkCmdBuildAccelerationStructureNV)vkGetDeviceProcAddr(as->dev(), "vkCmdBuildAccelerationStructureNV");
- assert(vkCmdBuildAccelerationStructureNV != nullptr);
-
- vkCmdBuildAccelerationStructureNV(handle(), &as->info(), instanceData, 0, VK_FALSE, as->handle(), VK_NULL_HANDLE, scratchBuffer,
- 0);
-}
-
void VkCommandBufferObj::PrepareAttachments(const vector<std::unique_ptr<VkImageObj>> &color_atts,
VkDepthStencilObj *depth_stencil_att) {
for (const auto &color_att : color_atts) {
diff --git a/tests/vkrenderframework.h b/tests/vkrenderframework.h
index f6e8d33b6..58a5c44a4 100644
--- a/tests/vkrenderframework.h
+++ b/tests/vkrenderframework.h
@@ -63,7 +63,6 @@ class VkDeviceObj : public vk_testing::Device {
VkDevice device() { return handle(); }
void SetDeviceQueue();
VkQueueObj *GetDefaultQueue();
- VkQueueObj *GetDefaultComputeQueue();
uint32_t id;
VkPhysicalDeviceProperties props;
@@ -78,6 +77,9 @@ class VkDepthStencilObj;
class VkRenderFramework : public VkTestFramework {
public:
+ VkRenderFramework();
+ ~VkRenderFramework();
+
VkInstance instance() { return inst; }
VkDevice device() { return m_device->device(); }
VkDeviceObj *DeviceObj() const { return m_device; }
@@ -87,13 +89,6 @@ class VkRenderFramework : public VkTestFramework {
VkFramebuffer framebuffer() { return m_framebuffer; }
void InitViewport(float width, float height);
void InitViewport();
- bool InitSurface();
- bool InitSurface(float width, float height);
- bool InitSwapchain(VkSurfaceKHR &surface, VkImageUsageFlags imageUsage = VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT,
- VkSurfaceTransformFlagBitsKHR preTransform = VK_SURFACE_TRANSFORM_IDENTITY_BIT_KHR);
- bool InitSwapchain(VkImageUsageFlags imageUsage = VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT,
- VkSurfaceTransformFlagBitsKHR preTransform = VK_SURFACE_TRANSFORM_IDENTITY_BIT_KHR);
- void DestroySwapchain();
void InitRenderTarget();
void InitRenderTarget(uint32_t targets);
void InitRenderTarget(VkImageView *dsBinding);
@@ -116,13 +111,9 @@ class VkRenderFramework : public VkTestFramework {
bool DeviceExtensionSupported(VkPhysicalDevice dev, const char *layer, const char *name, uint32_t specVersion = 0);
bool DeviceExtensionEnabled(const char *name);
bool DeviceIsMockICD();
- bool DeviceSimulation();
bool DeviceCanDraw();
protected:
- VkRenderFramework();
- virtual ~VkRenderFramework() = 0;
-
VkApplicationInfo app_info;
VkInstance inst;
VkPhysicalDevice objs[16];
@@ -133,8 +124,6 @@ class VkRenderFramework : public VkTestFramework {
VkRenderPass m_renderPass;
VkRenderPassCreateInfo renderPass_info_ = {};
VkFramebuffer m_framebuffer;
- VkSurfaceKHR m_surface;
- VkSwapchainKHR m_swapchain;
std::vector<VkViewport> m_viewports;
std::vector<VkRect2D> m_scissors;
float m_lineWidth;
@@ -168,6 +157,24 @@ class VkRenderFramework : public VkTestFramework {
std::vector<const char *> m_instance_layer_names;
std::vector<const char *> m_instance_extension_names;
std::vector<const char *> m_device_extension_names;
+
+ /*
+ * SetUp and TearDown are called by the Google Test framework
+ * to initialize a test framework based on this class.
+ */
+ virtual void SetUp() {
+ this->app_info.sType = VK_STRUCTURE_TYPE_APPLICATION_INFO;
+ this->app_info.pNext = NULL;
+ this->app_info.pApplicationName = "base";
+ this->app_info.applicationVersion = 1;
+ this->app_info.pEngineName = "unittest";
+ this->app_info.engineVersion = 1;
+ this->app_info.apiVersion = VK_API_VERSION_1_0;
+
+ InitFramework();
+ }
+
+ virtual void TearDown() { ShutdownFramework(); }
};
class VkDescriptorSetObj;
@@ -176,7 +183,6 @@ class VkPipelineObj;
class VkDescriptorSetObj;
typedef vk_testing::Fence VkFenceObj;
typedef vk_testing::Buffer VkBufferObj;
-typedef vk_testing::AccelerationStructure VkAccelerationStructureObj;
class VkCommandPoolObj : public vk_testing::CommandPool {
public:
@@ -216,8 +222,6 @@ class VkCommandBufferObj : public vk_testing::CommandBuffer {
const VkImageSubresourceRange *pRanges);
void ClearDepthStencilImage(VkImage image, VkImageLayout imageLayout, const VkClearDepthStencilValue *pColor,
uint32_t rangeCount, const VkImageSubresourceRange *pRanges);
- void BuildAccelerationStructure(VkAccelerationStructureObj *as, VkBuffer scratchBuffer);
- void BuildAccelerationStructure(VkAccelerationStructureObj *as, VkBuffer scratchBuffer, VkBuffer instanceData);
protected:
VkDeviceObj *m_device;
@@ -256,12 +260,13 @@ class VkImageObj : public vk_testing::Image {
public:
void Init(uint32_t const width, uint32_t const height, uint32_t const mipLevels, VkFormat const format, VkFlags const usage,
VkImageTiling const tiling = VK_IMAGE_TILING_LINEAR, VkMemoryPropertyFlags const reqs = 0,
- const std::vector<uint32_t> *queue_families = nullptr, bool memory = true);
+ const std::vector<uint32_t> *queue_families = nullptr);
+
void init(const VkImageCreateInfo *create_info);
void InitNoLayout(uint32_t const width, uint32_t const height, uint32_t const mipLevels, VkFormat const format,
VkFlags const usage, VkImageTiling tiling = VK_IMAGE_TILING_LINEAR, VkMemoryPropertyFlags reqs = 0,
- const std::vector<uint32_t> *queue_families = nullptr, bool memory = true);
+ const std::vector<uint32_t> *queue_families = nullptr);
// void clear( CommandBuffer*, uint32_t[4] );
@@ -274,10 +279,7 @@ class VkImageObj : public vk_testing::Image {
void UnmapMemory() { Image::memory().unmap(); }
void ImageMemoryBarrier(VkCommandBufferObj *cmd, VkImageAspectFlags aspect, VkFlags output_mask, VkFlags input_mask,
- VkImageLayout image_layout, VkPipelineStageFlags src_stages = VK_PIPELINE_STAGE_ALL_COMMANDS_BIT,
- VkPipelineStageFlags dest_stages = VK_PIPELINE_STAGE_ALL_COMMANDS_BIT,
- uint32_t srcQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED,
- uint32_t dstQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED);
+ VkImageLayout image_layout);
VkResult CopyImage(VkImageObj &src_image);
@@ -287,7 +289,7 @@ class VkImageObj : public vk_testing::Image {
VkImage image() const { return handle(); }
- VkImageView targetView(VkFormat format, VkImageAspectFlags aspect = VK_IMAGE_ASPECT_COLOR_BIT) {
+ VkImageView targetView(VkFormat format) {
if (!m_targetView.initialized()) {
VkImageViewCreateInfo createView = {};
createView.sType = VK_STRUCTURE_TYPE_IMAGE_VIEW_CREATE_INFO;
@@ -298,7 +300,7 @@ class VkImageObj : public vk_testing::Image {
createView.components.g = VK_COMPONENT_SWIZZLE_G;
createView.components.b = VK_COMPONENT_SWIZZLE_B;
createView.components.a = VK_COMPONENT_SWIZZLE_A;
- createView.subresourceRange = {aspect, 0, 1, 0, 1};
+ createView.subresourceRange = {VK_IMAGE_ASPECT_COLOR_BIT, 0, 1, 0, 1};
createView.flags = 0;
m_targetView.init(*m_device, createView);
}
@@ -362,7 +364,7 @@ class VkDescriptorSetLayoutObj : public vk_testing::DescriptorSetLayout {
VkDescriptorSetLayoutObj() = default;
VkDescriptorSetLayoutObj(const VkDeviceObj *device,
const std::vector<VkDescriptorSetLayoutBinding> &descriptor_set_bindings = {},
- VkDescriptorSetLayoutCreateFlags flags = 0, void *pNext = NULL);
+ VkDescriptorSetLayoutCreateFlags flags = 0);
// Move constructor and move assignment operator for Visual Studio 2013
VkDescriptorSetLayoutObj(VkDescriptorSetLayoutObj &&src) : DescriptorSetLayout(std::move(src)){};
@@ -402,9 +404,9 @@ class VkDescriptorSetObj : public vk_testing::DescriptorPool {
class VkShaderObj : public vk_testing::ShaderModule {
public:
VkShaderObj(VkDeviceObj *device, const char *shaderText, VkShaderStageFlagBits stage, VkRenderFramework *framework,
- char const *name = "main", bool debug = false, VkSpecializationInfo *specInfo = nullptr);
+ char const *name = "main", bool debug = false);
VkShaderObj(VkDeviceObj *device, const std::string spv_source, VkShaderStageFlagBits stage, VkRenderFramework *framework,
- char const *name = "main", VkSpecializationInfo *specInfo = nullptr);
+ char const *name = "main");
VkPipelineShaderStageCreateInfo const &GetStageCreateInfo() const;
protected:
@@ -452,7 +454,6 @@ class VkPipelineObj : public vk_testing::Pipeline {
void SetTessellation(const VkPipelineTessellationStateCreateInfo *te_state);
void SetViewport(const vector<VkViewport> viewports);
void SetScissor(const vector<VkRect2D> scissors);
- void SetLineState(const VkPipelineRasterizationLineStateCreateInfoEXT *line_state);
void InitGraphicsPipelineCreateInfo(VkGraphicsPipelineCreateInfo *gp_ci);
@@ -468,7 +469,6 @@ class VkPipelineObj : public vk_testing::Pipeline {
VkPipelineMultisampleStateCreateInfo m_ms_state;
VkPipelineTessellationStateCreateInfo const *m_te_state;
VkPipelineDynamicStateCreateInfo m_pd_state;
- VkPipelineRasterizationLineStateCreateInfoEXT m_line_state;
vector<VkDynamicState> m_dynamic_state_enables;
vector<VkViewport> m_viewports;
vector<VkRect2D> m_scissors;
diff --git a/tests/vktestbinding.cpp b/tests/vktestbinding.cpp
index 3444c959e..399442ac8 100644
--- a/tests/vktestbinding.cpp
+++ b/tests/vktestbinding.cpp
@@ -1,7 +1,7 @@
/*
- * Copyright (c) 2015-2019 The Khronos Group Inc.
- * Copyright (c) 2015-2019 Valve Corporation
- * Copyright (c) 2015-2019 LunarG, Inc.
+ * Copyright (c) 2015-2016 The Khronos Group Inc.
+ * Copyright (c) 2015-2016 Valve Corporation
+ * Copyright (c) 2015-2016 LunarG, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
@@ -605,88 +605,6 @@ void ImageView::init(const Device &dev, const VkImageViewCreateInfo &info) {
NON_DISPATCHABLE_HANDLE_INIT(vkCreateImageView, dev, &info);
}
-AccelerationStructure::~AccelerationStructure() {
- if (initialized()) {
- PFN_vkDestroyAccelerationStructureNV vkDestroyAccelerationStructureNV =
- (PFN_vkDestroyAccelerationStructureNV)vkGetDeviceProcAddr(device(), "vkDestroyAccelerationStructureNV");
- assert(vkDestroyAccelerationStructureNV != nullptr);
-
- vkDestroyAccelerationStructureNV(device(), handle(), nullptr);
- }
-}
-
-VkMemoryRequirements2 AccelerationStructure::memory_requirements() const {
- PFN_vkGetAccelerationStructureMemoryRequirementsNV vkGetAccelerationStructureMemoryRequirementsNV =
- (PFN_vkGetAccelerationStructureMemoryRequirementsNV)vkGetDeviceProcAddr(device(),
- "vkGetAccelerationStructureMemoryRequirementsNV");
- assert(vkGetAccelerationStructureMemoryRequirementsNV != nullptr);
-
- VkAccelerationStructureMemoryRequirementsInfoNV memoryRequirementsInfo = {};
- memoryRequirementsInfo.sType = VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_INFO_NV;
- memoryRequirementsInfo.type = VK_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_TYPE_OBJECT_NV;
- memoryRequirementsInfo.accelerationStructure = handle();
-
- VkMemoryRequirements2 memoryRequirements = {};
- vkGetAccelerationStructureMemoryRequirementsNV(device(), &memoryRequirementsInfo, &memoryRequirements);
- return memoryRequirements;
-}
-
-VkMemoryRequirements2 AccelerationStructure::build_scratch_memory_requirements() const {
- PFN_vkGetAccelerationStructureMemoryRequirementsNV vkGetAccelerationStructureMemoryRequirementsNV =
- (PFN_vkGetAccelerationStructureMemoryRequirementsNV)vkGetDeviceProcAddr(device(),
- "vkGetAccelerationStructureMemoryRequirementsNV");
- assert(vkGetAccelerationStructureMemoryRequirementsNV != nullptr);
-
- VkAccelerationStructureMemoryRequirementsInfoNV memoryRequirementsInfo = {};
- memoryRequirementsInfo.sType = VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_INFO_NV;
- memoryRequirementsInfo.type = VK_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_TYPE_BUILD_SCRATCH_NV;
- memoryRequirementsInfo.accelerationStructure = handle();
-
- VkMemoryRequirements2 memoryRequirements = {};
- vkGetAccelerationStructureMemoryRequirementsNV(device(), &memoryRequirementsInfo, &memoryRequirements);
- return memoryRequirements;
-}
-
-void AccelerationStructure::init(const Device &dev, const VkAccelerationStructureCreateInfoNV &info, bool init_memory) {
- PFN_vkCreateAccelerationStructureNV vkCreateAccelerationStructureNV =
- (PFN_vkCreateAccelerationStructureNV)vkGetDeviceProcAddr(dev.handle(), "vkCreateAccelerationStructureNV");
- assert(vkCreateAccelerationStructureNV != nullptr);
-
- NON_DISPATCHABLE_HANDLE_INIT(vkCreateAccelerationStructureNV, dev, &info);
-
- info_ = info.info;
-
- if (init_memory) {
- memory_.init(dev, DeviceMemory::get_resource_alloc_info(dev, memory_requirements().memoryRequirements,
- VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT));
-
- PFN_vkBindAccelerationStructureMemoryNV vkBindAccelerationStructureMemoryNV =
- (PFN_vkBindAccelerationStructureMemoryNV)vkGetDeviceProcAddr(dev.handle(), "vkBindAccelerationStructureMemoryNV");
- assert(vkBindAccelerationStructureMemoryNV != nullptr);
-
- VkBindAccelerationStructureMemoryInfoNV bind_info = {};
- bind_info.sType = VK_STRUCTURE_TYPE_BIND_ACCELERATION_STRUCTURE_MEMORY_INFO_NV;
- bind_info.accelerationStructure = handle();
- bind_info.memory = memory_.handle();
- EXPECT(vkBindAccelerationStructureMemoryNV(dev.handle(), 1, &bind_info) == VK_SUCCESS);
-
- PFN_vkGetAccelerationStructureHandleNV vkGetAccelerationStructureHandleNV =
- (PFN_vkGetAccelerationStructureHandleNV)vkGetDeviceProcAddr(dev.handle(), "vkGetAccelerationStructureHandleNV");
- assert(vkGetAccelerationStructureHandleNV != nullptr);
- EXPECT(vkGetAccelerationStructureHandleNV(dev.handle(), handle(), sizeof(uint64_t), &opaque_handle_) == VK_SUCCESS);
- }
-}
-
-void AccelerationStructure::create_scratch_buffer(const Device &dev, Buffer *buffer) {
- VkMemoryRequirements scratch_buffer_memory_requirements = build_scratch_memory_requirements().memoryRequirements;
-
- VkBufferCreateInfo create_info = {};
- create_info.sType = VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO;
- create_info.size = scratch_buffer_memory_requirements.size;
- create_info.usage = VK_BUFFER_USAGE_RAY_TRACING_BIT_NV;
- return buffer->init(dev, create_info, VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT);
-}
-
NON_DISPATCHABLE_HANDLE_DTOR(ShaderModule, vkDestroyShaderModule)
void ShaderModule::init(const Device &dev, const VkShaderModuleCreateInfo &info) {
diff --git a/tests/vktestbinding.h b/tests/vktestbinding.h
index d2a65393a..0b47f02e3 100644
--- a/tests/vktestbinding.h
+++ b/tests/vktestbinding.h
@@ -1,7 +1,7 @@
/*
- * Copyright (c) 2015-2016, 2019 The Khronos Group Inc.
- * Copyright (c) 2015-2016, 2019 Valve Corporation
- * Copyright (c) 2015-2016, 2019 LunarG, Inc.
+ * Copyright (c) 2015-2016 The Khronos Group Inc.
+ * Copyright (c) 2015-2016 Valve Corporation
+ * Copyright (c) 2015-2016 LunarG, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
@@ -387,10 +387,7 @@ class Buffer : public internal::NonDispHandle<VkBuffer> {
// vkCreateBuffer()
void init(const Device &dev, const VkBufferCreateInfo &info, VkMemoryPropertyFlags mem_props);
void init(const Device &dev, const VkBufferCreateInfo &info) { init(dev, info, 0); }
- void init(const Device &dev, VkDeviceSize size, VkMemoryPropertyFlags mem_props,
- VkBufferUsageFlags usage = VK_BUFFER_USAGE_UNIFORM_BUFFER_BIT, const std::vector<uint32_t> &queue_families = {}) {
- init(dev, create_info(size, usage, &queue_families), mem_props);
- }
+ void init(const Device &dev, VkDeviceSize size, VkMemoryPropertyFlags mem_props) { init(dev, create_info(size, 0), mem_props); }
void init(const Device &dev, VkDeviceSize size) { init(dev, size, 0); }
void init_as_src(const Device &dev, VkDeviceSize size, VkMemoryPropertyFlags &reqs,
const std::vector<uint32_t> *queue_families = nullptr) {
@@ -401,12 +398,8 @@ class Buffer : public internal::NonDispHandle<VkBuffer> {
init(dev, create_info(size, VK_BUFFER_USAGE_TRANSFER_DST_BIT, queue_families), reqs);
}
void init_as_src_and_dst(const Device &dev, VkDeviceSize size, VkMemoryPropertyFlags &reqs,
- const std::vector<uint32_t> *queue_families = nullptr, bool memory = true) {
- if (memory)
- init(dev, create_info(size, VK_BUFFER_USAGE_TRANSFER_SRC_BIT | VK_BUFFER_USAGE_TRANSFER_DST_BIT, queue_families), reqs);
- else
- init_no_mem(dev,
- create_info(size, VK_BUFFER_USAGE_TRANSFER_SRC_BIT | VK_BUFFER_USAGE_TRANSFER_DST_BIT, queue_families));
+ const std::vector<uint32_t> *queue_families = nullptr) {
+ init(dev, create_info(size, VK_BUFFER_USAGE_TRANSFER_SRC_BIT | VK_BUFFER_USAGE_TRANSFER_DST_BIT, queue_families), reqs);
}
void init_no_mem(const Device &dev, const VkBufferCreateInfo &info);
@@ -496,16 +489,16 @@ class Image : public internal::NonDispHandle<VkImage> {
bool transparent() const;
bool copyable() const { return (format_features_ & VK_FORMAT_FEATURE_SAMPLED_IMAGE_BIT); }
- VkImageSubresourceRange subresource_range(VkImageAspectFlags aspect) const { return subresource_range(create_info_, aspect); }
+ VkImageSubresourceRange subresource_range(VkImageAspectFlagBits aspect) const {
+ return subresource_range(create_info_, aspect);
+ }
VkExtent3D extent() const { return create_info_.extent; }
VkExtent3D extent(uint32_t mip_level) const { return extent(create_info_.extent, mip_level); }
VkFormat format() const { return create_info_.format; }
VkImageUsageFlags usage() const { return create_info_.usage; }
VkSharingMode sharing_mode() const { return create_info_.sharingMode; }
VkImageMemoryBarrier image_memory_barrier(VkFlags output_mask, VkFlags input_mask, VkImageLayout old_layout,
- VkImageLayout new_layout, const VkImageSubresourceRange &range,
- uint32_t srcQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED,
- uint32_t dstQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED) const {
+ VkImageLayout new_layout, const VkImageSubresourceRange &range) const {
VkImageMemoryBarrier barrier = {};
barrier.sType = VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER;
barrier.srcAccessMask = output_mask;
@@ -514,8 +507,11 @@ class Image : public internal::NonDispHandle<VkImage> {
barrier.newLayout = new_layout;
barrier.image = handle();
barrier.subresourceRange = range;
- barrier.srcQueueFamilyIndex = srcQueueFamilyIndex;
- barrier.dstQueueFamilyIndex = dstQueueFamilyIndex;
+
+ if (sharing_mode() == VK_SHARING_MODE_CONCURRENT) {
+ barrier.srcQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED;
+ barrier.dstQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED;
+ }
return barrier;
}
@@ -555,34 +551,6 @@ class ImageView : public internal::NonDispHandle<VkImageView> {
void init(const Device &dev, const VkImageViewCreateInfo &info);
};
-class AccelerationStructure : public internal::NonDispHandle<VkAccelerationStructureNV> {
- public:
- explicit AccelerationStructure(const Device &dev, const VkAccelerationStructureCreateInfoNV &info, bool init_memory = true) {
- init(dev, info, init_memory);
- }
- ~AccelerationStructure();
-
- // vkCreateAccelerationStructureNV
- void init(const Device &dev, const VkAccelerationStructureCreateInfoNV &info, bool init_memory = true);
-
- // vkGetAccelerationStructureMemoryRequirementsNV()
- VkMemoryRequirements2 memory_requirements() const;
- VkMemoryRequirements2 build_scratch_memory_requirements() const;
-
- uint64_t opaque_handle() const { return opaque_handle_; }
-
- const VkAccelerationStructureInfoNV &info() const { return info_; }
-
- const VkDevice &dev() const { return device(); }
-
- void create_scratch_buffer(const Device &dev, Buffer *buffer);
-
- private:
- VkAccelerationStructureInfoNV info_;
- DeviceMemory memory_;
- uint64_t opaque_handle_;
-};
-
class ShaderModule : public internal::NonDispHandle<VkShaderModule> {
public:
~ShaderModule();
diff --git a/tests/vktestframework.cpp b/tests/vktestframework.cpp
index 0d9dd584a..9819a3bd6 100644
--- a/tests/vktestframework.cpp
+++ b/tests/vktestframework.cpp
@@ -144,7 +144,6 @@ bool VkTestFramework::m_canonicalize_spv = false;
bool VkTestFramework::m_strip_spv = false;
bool VkTestFramework::m_do_everything_spv = false;
bool VkTestFramework::m_devsim_layer = false;
-bool VkTestFramework::m_khronos_layer_disable = false;
int VkTestFramework::m_width = 0;
int VkTestFramework::m_height = 0;
@@ -165,8 +164,6 @@ void VkTestFramework::InitArgs(int *argc, char *argv[]) {
m_canonicalize_spv = true;
else if (optionMatch("--devsim", argv[i]))
m_devsim_layer = true;
- else if (optionMatch("--disable_uberlayer", argv[i]))
- m_khronos_layer_disable = true;
else if (optionMatch("--help", argv[i]) || optionMatch("-h", argv[i])) {
printf("\nOther options:\n");
printf(
diff --git a/tests/vktestframework.h b/tests/vktestframework.h
index 0ac0e8e52..c73600f7a 100644
--- a/tests/vktestframework.h
+++ b/tests/vktestframework.h
@@ -59,6 +59,9 @@ class VkImageObj;
class VkTestFramework : public ::testing::Test {
public:
+ VkTestFramework();
+ ~VkTestFramework();
+
VkFormat GetFormat(VkInstance instance, vk_testing::Device *device);
static bool optionMatch(const char *option, char *optionLine);
static void InitArgs(int *argc, char *argv[]);
@@ -71,15 +74,10 @@ class VkTestFramework : public ::testing::Test {
static bool m_strip_spv;
static bool m_do_everything_spv;
static bool m_devsim_layer;
- static bool m_khronos_layer_disable;
char **ReadFileData(const char *fileName);
void FreeFileData(char **data);
- protected:
- VkTestFramework();
- virtual ~VkTestFramework() = 0;
-
private:
int m_compile_options;
int m_num_shader_strings;
diff --git a/tests/vktestframeworkandroid.cpp b/tests/vktestframeworkandroid.cpp
index a069b236b..77e892011 100644
--- a/tests/vktestframeworkandroid.cpp
+++ b/tests/vktestframeworkandroid.cpp
@@ -26,8 +26,6 @@ VkTestFramework::~VkTestFramework() {}
// Define static elements
bool VkTestFramework::m_devsim_layer = false;
-bool VkTestFramework::m_khronos_layer_disable = false;
-ANativeWindow *VkTestFramework::window = nullptr;
VkFormat VkTestFramework::GetFormat(VkInstance instance, vk_testing::Device *device) {
VkFormatProperties format_props;
@@ -91,7 +89,7 @@ bool VkTestFramework::GLSLtoSPV(const VkShaderStageFlagBits shader_type, const c
shaderc::SpvCompilationResult result =
compiler.CompileGlslToSpv(pshader, strlen(pshader), MapShadercType(shader_type), "shader", options);
if (result.GetCompilationStatus() != shaderc_compilation_status_success) {
- __android_log_print(ANDROID_LOG_ERROR, "VkLayerValidationTests", "GLSLtoSPV compilation failed: %s",
+ __android_log_print(ANDROID_LOG_ERROR, "VkLayerValidationTest", "GLSLtoSPV compilation failed: %s",
result.GetErrorMessage().c_str());
return false;
}
diff --git a/tests/vktestframeworkandroid.h b/tests/vktestframeworkandroid.h
index 4be0d81e7..631f8ebd9 100644
--- a/tests/vktestframeworkandroid.h
+++ b/tests/vktestframeworkandroid.h
@@ -47,8 +47,6 @@ class VkTestFramework : public ::testing::Test {
bool debug = false);
bool ASMtoSPV(const spv_target_env target_env, const uint32_t options, const char *pasm, std::vector<unsigned int> &spv);
static bool m_devsim_layer;
- static bool m_khronos_layer_disable;
- static ANativeWindow *window;
};
class TestEnvironment : public ::testing::Environment {